From f6c868d8a2df4d2961983d4e52095d6e7551e9cb Mon Sep 17 00:00:00 2001 From: Liam Mitchell Date: Tue, 23 Sep 2025 23:02:02 +0200 Subject: [PATCH 01/63] fix: calculate omit in diff (#8566) Discovered while investigating https://github.com/npm/cli/issues/8535 `npm install --omit` output doesn't show any removed packages. This PR moves the omit calc into the diff calc so omits are handled like other resolution logic. Improvements: * we see removals in CLI output * _createSparseTree no longer creates dirs that will only be cleaned later * no duplicate filterSet calculation, omit is calculated right after filters I removed the trashList check on reifying node. Code coverage complained that this branch wasn't hit in any tests so I assume it should always be empty. I think this uncovered one bug with workspaces in the test `workspaces > reify workspaces omit dev dependencies > workspaces only`. From my understanding of the [docs](https://docs.npmjs.com/cli/v11/commands/npm-install#include-workspace-root), only workspace `a` should be touched, `root` and workspace `b` should still have their packages. I updated the test. Co-authored-by: Liam Mitchell --- workspaces/arborist/lib/arborist/reify.js | 47 ++++--------------- workspaces/arborist/lib/diff.js | 32 +++++++++++-- workspaces/arborist/lib/node.js | 12 +++++ .../test/arborist/reify.js.test.cjs | 1 - workspaces/arborist/test/arborist/reify.js | 17 ++++++- 5 files changed, 63 insertions(+), 46 deletions(-) diff --git a/workspaces/arborist/lib/arborist/reify.js b/workspaces/arborist/lib/arborist/reify.js index 5da8e72bfa567..ee381e809216f 100644 --- a/workspaces/arborist/lib/arborist/reify.js +++ b/workspaces/arborist/lib/arborist/reify.js @@ -42,7 +42,6 @@ const { defaultLockfileVersion } = Shrinkwrap const _retireShallowNodes = Symbol.for('retireShallowNodes') const _loadBundlesAndUpdateTrees = Symbol.for('loadBundlesAndUpdateTrees') const _submitQuickAudit = Symbol('submitQuickAudit') -const _addOmitsToTrashList = Symbol('addOmitsToTrashList') const _unpackNewModules = Symbol.for('unpackNewModules') const _build = Symbol.for('build') @@ -85,6 +84,7 @@ module.exports = cls => class Reifier extends cls { #dryRun #nmValidated = new Set() #omit + #omitted #retiredPaths = {} #retiredUnchanged = {} #savePrefix @@ -109,6 +109,7 @@ module.exports = cls => class Reifier extends cls { } this.#omit = new Set(options.omit) + this.#omitted = new Set() // start tracker block this.addTracker('reify') @@ -141,6 +142,10 @@ module.exports = cls => class Reifier extends cls { this.idealTree = oldTree } await this[_saveIdealTree](options) + // clean omitted + for (const node of this.#omitted) { + node.parent = null + } // clean up any trash that is still in the tree for (const path of this[_trashList]) { const loc = relpath(this.idealTree.realpath, path) @@ -315,7 +320,6 @@ module.exports = cls => class Reifier extends cls { ]], [_rollbackCreateSparseTree, [ _createSparseTree, - _addOmitsToTrashList, _loadShrinkwrapsAndUpdateTrees, _loadBundlesAndUpdateTrees, _submitQuickAudit, @@ -470,6 +474,8 @@ module.exports = cls => class Reifier extends cls { // find all the nodes that need to change between the actual // and ideal trees. this.diff = Diff.calculate({ + omit: this.#omit, + omitted: this.#omitted, shrinkwrapInflated: this.#shrinkwrapInflated, filterNodes, actual: this.actualTree, @@ -554,37 +560,6 @@ module.exports = cls => class Reifier extends cls { }) } - // adding to the trash list will skip reifying, and delete them - // if they are currently in the tree and otherwise untouched. - [_addOmitsToTrashList] () { - if (!this.#omit.size) { - return - } - - const timeEnd = time.start('reify:trashOmits') - for (const node of this.idealTree.inventory.values()) { - const { top } = node - - // if the top is not the root or workspace then we do not want to omit it - if (!top.isProjectRoot && !top.isWorkspace) { - continue - } - - // if a diff filter has been created, then we do not omit the node if the - // top node is not in that set - if (this.diff?.filterSet?.size && !this.diff.filterSet.has(top)) { - continue - } - - // omit node if the dep type matches any omit flags that were set - if (node.shouldOmit(this.#omit)) { - this[_addNodeToTrashList](node) - } - } - - timeEnd() - } - [_createSparseTree] () { const timeEnd = time.start('reify:createSparse') // if we call this fn again, we look for the previous list @@ -683,7 +658,6 @@ module.exports = cls => class Reifier extends cls { // reload the diff and sparse tree because the ideal tree changed .then(() => this[_diffTrees]()) .then(() => this[_createSparseTree]()) - .then(() => this[_addOmitsToTrashList]()) .then(() => this[_loadShrinkwrapsAndUpdateTrees]()) .then(timeEnd) } @@ -691,15 +665,10 @@ module.exports = cls => class Reifier extends cls { // create a symlink for Links, extract for Nodes // return the node object, since we usually want that // handle optional dep failures here - // If node is in trash list, skip it // If reifying fails, and the node is optional, add it and its optionalSet // to the trash list // Always return the node. [_reifyNode] (node) { - if (this[_trashList].has(node.path)) { - return node - } - const timeEnd = time.start(`reifyNode:${node.location}`) this.addTracker('reify', node.name, node.location) diff --git a/workspaces/arborist/lib/diff.js b/workspaces/arborist/lib/diff.js index fb94407bb0166..9f2d5aed47d07 100644 --- a/workspaces/arborist/lib/diff.js +++ b/workspaces/arborist/lib/diff.js @@ -11,7 +11,9 @@ const { existsSync } = require('node:fs') const ssri = require('ssri') class Diff { - constructor ({ actual, ideal, filterSet, shrinkwrapInflated }) { + constructor ({ actual, ideal, filterSet, shrinkwrapInflated, omit, omitted }) { + this.omit = omit + this.omitted = omitted this.filterSet = filterSet this.shrinkwrapInflated = shrinkwrapInflated this.children = [] @@ -36,6 +38,8 @@ class Diff { ideal, filterNodes = [], shrinkwrapInflated = new Set(), + omit = new Set(), + omitted = new Set(), }) { // if there's a filterNode, then: // - get the path from the root to the filterNode. The root or @@ -94,18 +98,28 @@ class Diff { } return depth({ - tree: new Diff({ actual, ideal, filterSet, shrinkwrapInflated }), + tree: new Diff({ actual, ideal, filterSet, shrinkwrapInflated, omit, omitted }), getChildren, leave, }) } } -const getAction = ({ actual, ideal }) => { +const getAction = ({ actual, ideal, omit, omitted }) => { if (!ideal) { return 'REMOVE' } + if (ideal.shouldOmit?.(omit)) { + omitted.add(ideal) + + if (actual) { + return 'REMOVE' + } + + return null + } + // bundled meta-deps are copied over to the ideal tree when we visit it, // so they'll appear to be missing here. There's no need to handle them // in the diff, though, because they'll be replaced at reify time anyway @@ -184,6 +198,8 @@ const getChildren = diff => { removed, filterSet, shrinkwrapInflated, + omit, + omitted, } = diff // Note: we DON'T diff fsChildren themselves, because they are either @@ -214,6 +230,8 @@ const getChildren = diff => { removed, filterSet, shrinkwrapInflated, + omit, + omitted, }) } @@ -232,12 +250,14 @@ const diffNode = ({ removed, filterSet, shrinkwrapInflated, + omit, + omitted, }) => { if (filterSet.size && !(filterSet.has(ideal) || filterSet.has(actual))) { return } - const action = getAction({ actual, ideal }) + const action = getAction({ actual, ideal, omit, omitted }) // if it's a match, then get its children // otherwise, this is the child diff node @@ -245,7 +265,7 @@ const diffNode = ({ if (action === 'REMOVE') { removed.push(actual) } - children.push(new Diff({ actual, ideal, filterSet, shrinkwrapInflated })) + children.push(new Diff({ actual, ideal, filterSet, shrinkwrapInflated, omit, omitted })) } else { unchanged.push(ideal) // !*! Weird dirty hack warning !*! @@ -285,6 +305,8 @@ const diffNode = ({ removed, filterSet, shrinkwrapInflated, + omit, + omitted, })) } } diff --git a/workspaces/arborist/lib/node.js b/workspaces/arborist/lib/node.js index 91c61fa09b414..1f67708a41ced 100644 --- a/workspaces/arborist/lib/node.js +++ b/workspaces/arborist/lib/node.js @@ -490,6 +490,18 @@ class Node { } shouldOmit (omitSet) { + if (!omitSet.size) { + return false + } + + const { top } = this + + // if the top is not the root or workspace then we do not want to omit it + if (!top.isProjectRoot && !top.isWorkspace) { + return false + } + + // omit node if the dep type matches any omit flags that were set return ( this.peer && omitSet.has('peer') || this.dev && omitSet.has('dev') || diff --git a/workspaces/arborist/tap-snapshots/test/arborist/reify.js.test.cjs b/workspaces/arborist/tap-snapshots/test/arborist/reify.js.test.cjs index 28d5e4c789fa0..210ec999e6dff 100644 --- a/workspaces/arborist/tap-snapshots/test/arborist/reify.js.test.cjs +++ b/workspaces/arborist/tap-snapshots/test/arborist/reify.js.test.cjs @@ -17365,7 +17365,6 @@ Array [ "reify:retireShallow", "reify:save", "reify:trash", - "reify:trashOmits", "reify:unpack", "reify:unretire", "reifyNode:node_modules/@isaacs/testing-peer-deps-b", diff --git a/workspaces/arborist/test/arborist/reify.js b/workspaces/arborist/test/arborist/reify.js index a2944ceff4e62..eb805d3245933 100644 --- a/workspaces/arborist/test/arborist/reify.js +++ b/workspaces/arborist/test/arborist/reify.js @@ -394,6 +394,21 @@ t.test('dev, optional, devOptional flags and omissions', t => { })) }) +t.test('omit reports no diff on second run', async t => { + const path = fixture(t, 'testing-dev-optional-flags') + createRegistry(t, true) + const arb = newArb({ path }) + await arb.reify({ omit: ['dev'] }) + t.equal(arb.actualTree.children.get('once'), undefined, 'no once in tree') + t.ok(arb.diff.children.length, 'first reify has changes') + await arb.reify({ omit: ['dev'] }) + t.equal(arb.actualTree.children.get('once'), undefined, 'no once in tree') + t.notOk(arb.diff.children.length, 'second reify has no changes') + await arb.reify({}) + t.ok(arb.actualTree.children.get('once'), 'once in tree') + t.ok(arb.diff.children.length, 'removing omit has changes') +}) + t.test('omits when both dev and optional flags are set', t => { const path = 'testing-dev-optional-flags-2' const omits = [['dev'], ['optional']] @@ -1329,7 +1344,7 @@ t.test('workspaces', async t => { await t.test('workspaces only', async t => { createRegistry(t, false) const { root, a, b } = await runCase(t, { workspaces: ['a'] }) - t.equal(root.exists(), false, 'root') + t.equal(root.exists(), true, 'root') t.equal(a.exists(), false, 'a') t.equal(b.exists(), true, 'b') }) From 4f37534300553e9ddfbc413c14d1ef15b02b46f2 Mon Sep 17 00:00:00 2001 From: Gar Date: Fri, 6 Dec 2024 09:27:08 -0800 Subject: [PATCH 02/63] deps: remove read-package-json-fast --- package-lock.json | 3 +-- workspaces/arborist/package.json | 1 - workspaces/libnpmexec/package.json | 1 - 3 files changed, 1 insertion(+), 4 deletions(-) diff --git a/package-lock.json b/package-lock.json index d8680210d0047..460e9d9e1dd26 100644 --- a/package-lock.json +++ b/package-lock.json @@ -13406,6 +13406,7 @@ "version": "4.0.0", "resolved": "https://registry.npmjs.org/read-package-json-fast/-/read-package-json-fast-4.0.0.tgz", "integrity": "sha512-qpt8EwugBWDw2cgE2W+/3oxC+KTez2uSVR8JU9Q36TXPAGCaozfQUs59v4j4GFpWTaw0i6hAZSvOmu1J0uOEUg==", + "dev": true, "license": "ISC", "dependencies": { "json-parse-even-better-errors": "^4.0.0", @@ -18690,7 +18691,6 @@ "proggy": "^3.0.0", "promise-all-reject-late": "^1.0.0", "promise-call-limit": "^3.0.1", - "read-package-json-fast": "^4.0.0", "semver": "^7.3.7", "ssri": "^12.0.0", "treeverse": "^3.0.0", @@ -18790,7 +18790,6 @@ "proc-log": "^5.0.0", "promise-retry": "^2.0.1", "read": "^4.0.0", - "read-package-json-fast": "^4.0.0", "semver": "^7.3.7", "signal-exit": "^4.1.0", "walk-up-path": "^4.0.0" diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json index 7e98d0e7d7571..30f8a2b995cad 100644 --- a/workspaces/arborist/package.json +++ b/workspaces/arborist/package.json @@ -32,7 +32,6 @@ "proggy": "^3.0.0", "promise-all-reject-late": "^1.0.0", "promise-call-limit": "^3.0.1", - "read-package-json-fast": "^4.0.0", "semver": "^7.3.7", "ssri": "^12.0.0", "treeverse": "^3.0.0", diff --git a/workspaces/libnpmexec/package.json b/workspaces/libnpmexec/package.json index 706f6db5d4794..827b1f38a73b0 100644 --- a/workspaces/libnpmexec/package.json +++ b/workspaces/libnpmexec/package.json @@ -69,7 +69,6 @@ "proc-log": "^5.0.0", "promise-retry": "^2.0.1", "read": "^4.0.0", - "read-package-json-fast": "^4.0.0", "semver": "^7.3.7", "signal-exit": "^4.1.0", "walk-up-path": "^4.0.0" From eed8a10f09831cc01bdc7d07c4fae5c27dcf966c Mon Sep 17 00:00:00 2001 From: Gar Date: Mon, 7 Jul 2025 09:07:58 -0700 Subject: [PATCH 03/63] chore: use latest/local arborist in mock-registry --- mock-registry/package.json | 2 +- node_modules/read-package-json-fast/LICENSE | 15 -- .../read-package-json-fast/lib/index.js | 141 ------------------ .../read-package-json-fast/package.json | 49 ------ package-lock.json | 16 +- 5 files changed, 2 insertions(+), 221 deletions(-) delete mode 100644 node_modules/read-package-json-fast/LICENSE delete mode 100644 node_modules/read-package-json-fast/lib/index.js delete mode 100644 node_modules/read-package-json-fast/package.json diff --git a/mock-registry/package.json b/mock-registry/package.json index af7faf3c58749..3f43061223f52 100644 --- a/mock-registry/package.json +++ b/mock-registry/package.json @@ -46,7 +46,7 @@ ] }, "devDependencies": { - "@npmcli/arborist": "^9.0.0", + "@npmcli/arborist": "^9.1.2", "@npmcli/eslint-config": "^5.0.1", "@npmcli/template-oss": "4.24.4", "json-stringify-safe": "^5.0.1", diff --git a/node_modules/read-package-json-fast/LICENSE b/node_modules/read-package-json-fast/LICENSE deleted file mode 100644 index 20a4762540923..0000000000000 --- a/node_modules/read-package-json-fast/LICENSE +++ /dev/null @@ -1,15 +0,0 @@ -The ISC License - -Copyright (c) npm, Inc. and Contributors - -Permission to use, copy, modify, and/or distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR -IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/read-package-json-fast/lib/index.js b/node_modules/read-package-json-fast/lib/index.js deleted file mode 100644 index beb089db8d53e..0000000000000 --- a/node_modules/read-package-json-fast/lib/index.js +++ /dev/null @@ -1,141 +0,0 @@ -const { readFile, lstat, readdir } = require('fs/promises') -const parse = require('json-parse-even-better-errors') -const normalizePackageBin = require('npm-normalize-package-bin') -const { resolve, dirname, join, relative } = require('path') - -const rpj = path => readFile(path, 'utf8') - .then(data => readBinDir(path, normalize(stripUnderscores(parse(data))))) - .catch(er => { - er.path = path - throw er - }) - -// load the directories.bin folder as a 'bin' object -const readBinDir = async (path, data) => { - if (data.bin) { - return data - } - - const m = data.directories && data.directories.bin - if (!m || typeof m !== 'string') { - return data - } - - // cut off any monkey business, like setting directories.bin - // to ../../../etc/passwd or /etc/passwd or something like that. - const root = dirname(path) - const dir = join('.', join('/', m)) - data.bin = await walkBinDir(root, dir, {}) - return data -} - -const walkBinDir = async (root, dir, obj) => { - const entries = await readdir(resolve(root, dir)).catch(() => []) - for (const entry of entries) { - if (entry.charAt(0) === '.') { - continue - } - const f = resolve(root, dir, entry) - // ignore stat errors, weird file types, symlinks, etc. - const st = await lstat(f).catch(() => null) - if (!st) { - continue - } else if (st.isFile()) { - obj[entry] = relative(root, f) - } else if (st.isDirectory()) { - await walkBinDir(root, join(dir, entry), obj) - } - } - return obj -} - -// do not preserve _fields set in files, they are sus -const stripUnderscores = data => { - for (const key of Object.keys(data).filter(k => /^_/.test(k))) { - delete data[key] - } - return data -} - -const normalize = data => { - addId(data) - fixBundled(data) - pruneRepeatedOptionals(data) - fixScripts(data) - fixFunding(data) - normalizePackageBin(data) - return data -} - -rpj.normalize = normalize - -const addId = data => { - if (data.name && data.version) { - data._id = `${data.name}@${data.version}` - } - return data -} - -// it was once common practice to list deps both in optionalDependencies -// and in dependencies, to support npm versions that did not know abbout -// optionalDependencies. This is no longer a relevant need, so duplicating -// the deps in two places is unnecessary and excessive. -const pruneRepeatedOptionals = data => { - const od = data.optionalDependencies - const dd = data.dependencies || {} - if (od && typeof od === 'object') { - for (const name of Object.keys(od)) { - delete dd[name] - } - } - if (Object.keys(dd).length === 0) { - delete data.dependencies - } - return data -} - -const fixBundled = data => { - const bdd = data.bundledDependencies - const bd = data.bundleDependencies === undefined ? bdd - : data.bundleDependencies - - if (bd === false) { - data.bundleDependencies = [] - } else if (bd === true) { - data.bundleDependencies = Object.keys(data.dependencies || {}) - } else if (bd && typeof bd === 'object') { - if (!Array.isArray(bd)) { - data.bundleDependencies = Object.keys(bd) - } else { - data.bundleDependencies = bd - } - } else { - delete data.bundleDependencies - } - - delete data.bundledDependencies - return data -} - -const fixScripts = data => { - if (!data.scripts || typeof data.scripts !== 'object') { - delete data.scripts - return data - } - - for (const [name, script] of Object.entries(data.scripts)) { - if (typeof script !== 'string') { - delete data.scripts[name] - } - } - return data -} - -const fixFunding = data => { - if (data.funding && typeof data.funding === 'string') { - data.funding = { url: data.funding } - } - return data -} - -module.exports = rpj diff --git a/node_modules/read-package-json-fast/package.json b/node_modules/read-package-json-fast/package.json deleted file mode 100644 index 20208329e24be..0000000000000 --- a/node_modules/read-package-json-fast/package.json +++ /dev/null @@ -1,49 +0,0 @@ -{ - "name": "read-package-json-fast", - "version": "4.0.0", - "description": "Like read-package-json, but faster", - "main": "lib/index.js", - "author": "GitHub Inc.", - "license": "ISC", - "scripts": { - "test": "tap", - "snap": "tap", - "lint": "npm run eslint", - "postlint": "template-oss-check", - "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run eslint -- --fix", - "posttest": "npm run lint", - "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - }, - "devDependencies": { - "@npmcli/eslint-config": "^5.0.0", - "@npmcli/template-oss": "4.23.3", - "tap": "^16.3.0" - }, - "dependencies": { - "json-parse-even-better-errors": "^4.0.0", - "npm-normalize-package-bin": "^4.0.0" - }, - "repository": { - "type": "git", - "url": "git+https://github.com/npm/read-package-json-fast.git" - }, - "files": [ - "bin/", - "lib/" - ], - "templateOSS": { - "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.23.3", - "publish": true - }, - "tap": { - "nyc-arg": [ - "--exclude", - "tap-snapshots/**" - ] - } -} diff --git a/package-lock.json b/package-lock.json index 460e9d9e1dd26..a4c6653add2d8 100644 --- a/package-lock.json +++ b/package-lock.json @@ -2007,7 +2007,7 @@ "version": "1.0.0", "license": "ISC", "devDependencies": { - "@npmcli/arborist": "^9.0.0", + "@npmcli/arborist": "^9.1.2", "@npmcli/eslint-config": "^5.0.1", "@npmcli/template-oss": "4.24.4", "json-stringify-safe": "^5.0.1", @@ -13402,20 +13402,6 @@ "node": "^18.17.0 || >=20.5.0" } }, - "node_modules/read-package-json-fast": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/read-package-json-fast/-/read-package-json-fast-4.0.0.tgz", - "integrity": "sha512-qpt8EwugBWDw2cgE2W+/3oxC+KTez2uSVR8JU9Q36TXPAGCaozfQUs59v4j4GFpWTaw0i6hAZSvOmu1J0uOEUg==", - "dev": true, - "license": "ISC", - "dependencies": { - "json-parse-even-better-errors": "^4.0.0", - "npm-normalize-package-bin": "^4.0.0" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, "node_modules/read-pkg": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-5.2.0.tgz", From 1eedf82f2a36df193a51dca2c07fdc82dcb18a68 Mon Sep 17 00:00:00 2001 From: Gar Date: Mon, 7 Jul 2025 08:39:51 -0700 Subject: [PATCH 04/63] fix: use @npmcli/package-json to parse package.json --- .../arborist/lib/arborist/build-ideal-tree.js | 16 ++-- .../arborist/lib/arborist/load-actual.js | 6 +- .../arborist/lib/arborist/load-virtual.js | 24 ++---- workspaces/arborist/lib/arborist/rebuild.js | 15 ++-- workspaces/arborist/lib/arborist/reify.js | 57 ++++++-------- workspaces/arborist/lib/node.js | 75 ++++++++++--------- .../test/arborist/load-actual-ctor-throw.js | 22 ------ workspaces/libnpmexec/lib/run-script.js | 7 +- 8 files changed, 94 insertions(+), 128 deletions(-) delete mode 100644 workspaces/arborist/test/arborist/load-actual-ctor-throw.js diff --git a/workspaces/arborist/lib/arborist/build-ideal-tree.js b/workspaces/arborist/lib/arborist/build-ideal-tree.js index 281f62b116bd3..9eff905ffa39c 100644 --- a/workspaces/arborist/lib/arborist/build-ideal-tree.js +++ b/workspaces/arborist/lib/arborist/build-ideal-tree.js @@ -1,6 +1,6 @@ // mixin implementing the buildIdealTree method const localeCompare = require('@isaacs/string-locale-compare')('en') -const rpj = require('read-package-json-fast') +const PackageJson = require('@npmcli/package-json') const npa = require('npm-package-arg') const pacote = require('pacote') const cacache = require('cacache') @@ -268,7 +268,7 @@ module.exports = cls => class IdealTreeBuilder extends cls { root = await this.#globalRootNode() } else { try { - const pkg = await rpj(this.path + '/package.json') + const { content: pkg } = await PackageJson.normalize(this.path) root = await this.#rootNodeFromPackage(pkg) } catch (err) { if (err.code === 'EJSONPARSE') { @@ -448,7 +448,6 @@ module.exports = cls => class IdealTreeBuilder extends cls { const paths = await readdirScoped(nm).catch(() => []) for (const p of paths) { const name = p.replace(/\\/g, '/') - tree.package.dependencies = tree.package.dependencies || {} const updateName = this[_updateNames].includes(name) if (this[_updateAll] || updateName) { if (updateName) { @@ -1288,14 +1287,15 @@ This is a one-time fix-up, please be patient... }) } - #linkFromSpec (name, spec, parent) { + async #linkFromSpec (name, spec, parent) { const realpath = spec.fetchSpec const { installLinks, legacyPeerDeps } = this - return rpj(realpath + '/package.json').catch(() => ({})).then(pkg => { - const link = new Link({ name, parent, realpath, pkg, installLinks, legacyPeerDeps }) - this.#linkNodes.add(link) - return link + const { content: pkg } = await PackageJson.normalize(realpath).catch(() => { + return { content: {} } }) + const link = new Link({ name, parent, realpath, pkg, installLinks, legacyPeerDeps }) + this.#linkNodes.add(link) + return link } // load all peer deps and meta-peer deps into the node's parent diff --git a/workspaces/arborist/lib/arborist/load-actual.js b/workspaces/arborist/lib/arborist/load-actual.js index 2add9553688a4..75836d2fbe4a5 100644 --- a/workspaces/arborist/lib/arborist/load-actual.js +++ b/workspaces/arborist/lib/arborist/load-actual.js @@ -1,8 +1,8 @@ // mix-in implementing the loadActual method -const { relative, dirname, resolve, join, normalize } = require('node:path') +const { relative, dirname, resolve, normalize } = require('node:path') -const rpj = require('read-package-json-fast') +const PackageJson = require('@npmcli/package-json') const { readdirScoped } = require('@npmcli/fs') const { walkUp } = require('walk-up-path') const ancestorPath = require('common-ancestor-path') @@ -279,7 +279,7 @@ module.exports = cls => class ActualLoader extends cls { } try { - const pkg = await rpj(join(real, 'package.json')) + const { content: pkg } = await PackageJson.normalize(real) params.pkg = pkg if (useRootOverrides && root.overrides) { params.overrides = root.overrides.getNodeRule({ name: pkg.name, version: pkg.version }) diff --git a/workspaces/arborist/lib/arborist/load-virtual.js b/workspaces/arborist/lib/arborist/load-virtual.js index 92626d8707006..fb0e5e8c60c6f 100644 --- a/workspaces/arborist/lib/arborist/load-virtual.js +++ b/workspaces/arborist/lib/arborist/load-virtual.js @@ -1,16 +1,15 @@ +const { resolve } = require('node:path') // mixin providing the loadVirtual method const mapWorkspaces = require('@npmcli/map-workspaces') - -const { resolve } = require('node:path') - +const PackageJson = require('@npmcli/package-json') const nameFromFolder = require('@npmcli/name-from-folder') + const consistentResolve = require('../consistent-resolve.js') const Shrinkwrap = require('../shrinkwrap.js') const Node = require('../node.js') const Link = require('../link.js') const relpath = require('../relpath.js') const calcDepFlags = require('../calc-dep-flags.js') -const rpj = require('read-package-json-fast') const treeCheck = require('../tree-check.js') const flagsSuspect = Symbol.for('flagsSuspect') @@ -54,10 +53,11 @@ module.exports = cls => class VirtualLoader extends cls { // when building the ideal tree, we pass in a root node to this function // otherwise, load it from the root package json or the lockfile + const pkg = await PackageJson.normalize(this.path).then(p => p.content).catch(() => s.data.packages[''] || {}) + // TODO clean this up const { - root = await this.#loadRoot(s), + root = await this[setWorkspaces](this.#loadNode('', pkg, true)), } = options - this.#rootOptionProvided = options.root await this.#loadFromShrinkwrap(s, root) @@ -65,12 +65,6 @@ module.exports = cls => class VirtualLoader extends cls { return treeCheck(this.virtualTree) } - async #loadRoot (s) { - const pj = this.path + '/package.json' - const pkg = await rpj(pj).catch(() => s.data.packages['']) || {} - return this[setWorkspaces](this.#loadNode('', pkg, true)) - } - async #loadFromShrinkwrap (s, root) { if (!this.#rootOptionProvided) { // root is never any of these things, but might be a brand new @@ -219,11 +213,7 @@ To fix: // we always need to read the package.json for link targets // outside node_modules because they can be changed by the local user if (!link.target.parent) { - const pj = link.realpath + '/package.json' - const pkg = await rpj(pj).catch(() => null) - if (pkg) { - link.target.package = pkg - } + await PackageJson.normalize(link.realpath).then(p => link.target.package = p.content).catch(() => null) } } } diff --git a/workspaces/arborist/lib/arborist/rebuild.js b/workspaces/arborist/lib/arborist/rebuild.js index 3340ddaa67067..272d6a4122aef 100644 --- a/workspaces/arborist/lib/arborist/rebuild.js +++ b/workspaces/arborist/lib/arborist/rebuild.js @@ -1,20 +1,19 @@ // Arborist.rebuild({path = this.path}) will do all the binlinks and // bundle building needed. Called by reify, and by `npm rebuild`. +const PackageJson = require('@npmcli/package-json') +const binLinks = require('bin-links') const localeCompare = require('@isaacs/string-locale-compare')('en') -const { depth: dfwalk } = require('treeverse') const promiseAllRejectLate = require('promise-all-reject-late') -const rpj = require('read-package-json-fast') -const binLinks = require('bin-links') const runScript = require('@npmcli/run-script') const { callLimit: promiseCallLimit } = require('promise-call-limit') -const { resolve } = require('node:path') +const { depth: dfwalk } = require('treeverse') const { isNodeGypPackage, defaultGypInstallScript } = require('@npmcli/node-gyp') const { log, time } = require('proc-log') +const { resolve } = require('node:path') const boolEnv = b => b ? '1' : '' -const sortNodes = (a, b) => - (a.depth - b.depth) || localeCompare(a.path, b.path) +const sortNodes = (a, b) => (a.depth - b.depth) || localeCompare(a.path, b.path) const _checkBins = Symbol.for('checkBins') @@ -250,7 +249,9 @@ module.exports = cls => class Builder extends cls { // add to the set then remove while we're reading the pj, so we // don't accidentally hit it multiple times. set.add(node) - const pkg = await rpj(node.path + '/package.json').catch(() => ({})) + const { content: pkg } = await PackageJson.normalize(node.path).catch(() => { + return { content: {} } + }) set.delete(node) const { scripts = {} } = pkg diff --git a/workspaces/arborist/lib/arborist/reify.js b/workspaces/arborist/lib/arborist/reify.js index ee381e809216f..8591e0b0db96e 100644 --- a/workspaces/arborist/lib/arborist/reify.js +++ b/workspaces/arborist/lib/arborist/reify.js @@ -1,43 +1,33 @@ // mixin implementing the reify method -const onExit = require('../signal-handling.js') -const pacote = require('pacote') -const AuditReport = require('../audit-report.js') -const { subset, intersects } = require('semver') -const npa = require('npm-package-arg') -const semver = require('semver') -const debug = require('../debug.js') -const { walkUp } = require('walk-up-path') -const { log, time } = require('proc-log') -const rpj = require('read-package-json-fast') -const hgi = require('hosted-git-info') - -const { dirname, resolve, relative, join } = require('node:path') -const { depth: dfwalk } = require('treeverse') -const { - lstat, - mkdir, - rm, - symlink, -} = require('node:fs/promises') -const { moveFile } = require('@npmcli/fs') const PackageJson = require('@npmcli/package-json') +const hgi = require('hosted-git-info') +const npa = require('npm-package-arg') const packageContents = require('@npmcli/installed-package-contents') +const pacote = require('pacote') +const promiseAllRejectLate = require('promise-all-reject-late') const runScript = require('@npmcli/run-script') +const { callLimit: promiseCallLimit } = require('promise-call-limit') const { checkEngine, checkPlatform } = require('npm-install-checks') +const { depth: dfwalk } = require('treeverse') +const { dirname, resolve, relative, join } = require('node:path') +const { log, time } = require('proc-log') +const { lstat, mkdir, rm, symlink } = require('node:fs/promises') +const { moveFile } = require('@npmcli/fs') +const { subset, intersects } = require('semver') +const { walkUp } = require('walk-up-path') -const treeCheck = require('../tree-check.js') -const relpath = require('../relpath.js') +const AuditReport = require('../audit-report.js') const Diff = require('../diff.js') -const retirePath = require('../retire-path.js') -const promiseAllRejectLate = require('promise-all-reject-late') -const { callLimit: promiseCallLimit } = require('promise-call-limit') -const optionalSet = require('../optional-set.js') const calcDepFlags = require('../calc-dep-flags.js') +const debug = require('../debug.js') +const onExit = require('../signal-handling.js') +const optionalSet = require('../optional-set.js') +const relpath = require('../relpath.js') +const retirePath = require('../retire-path.js') +const treeCheck = require('../tree-check.js') +const { defaultLockfileVersion } = require('../shrinkwrap.js') const { saveTypeMap, hasSubKey } = require('../add-rm-pkg-deps.js') -const Shrinkwrap = require('../shrinkwrap.js') -const { defaultLockfileVersion } = Shrinkwrap - // Part of steps (steps need refactoring before we can do anything about these) const _retireShallowNodes = Symbol.for('retireShallowNodes') const _loadBundlesAndUpdateTrees = Symbol.for('loadBundlesAndUpdateTrees') @@ -772,7 +762,7 @@ module.exports = cls => class Reifier extends cls { }) // store nodes don't use Node class so node.package doesn't get updated if (node.isInStore) { - const pkg = await rpj(join(node.path, 'package.json')) + const { content: pkg } = await PackageJson.normalize(node.path) node.package.scripts = pkg.scripts } return @@ -1401,8 +1391,7 @@ module.exports = cls => class Reifier extends cls { if (options.saveType) { const depType = saveTypeMap.get(options.saveType) pkg[depType][name] = newSpec - // rpj will have moved it here if it was in both - // if it is empty it will be deleted later + // PackageJson.normalize will have moved it here if it was in both, if it is empty it will be deleted later if (options.saveType === 'prod' && pkg.optionalDependencies) { delete pkg.optionalDependencies[name] } @@ -1443,7 +1432,7 @@ module.exports = cls => class Reifier extends cls { const exactVersion = node => { for (const edge of node.edgesIn) { try { - if (semver.subset(edge.spec, node.version)) { + if (subset(edge.spec, node.version)) { return false } } catch { diff --git a/workspaces/arborist/lib/node.js b/workspaces/arborist/lib/node.js index 1f67708a41ced..1b75e60660927 100644 --- a/workspaces/arborist/lib/node.js +++ b/workspaces/arborist/lib/node.js @@ -28,22 +28,28 @@ // where we need to quickly find all instances of a given package name within a // tree. -const semver = require('semver') +const PackageJson = require('@npmcli/package-json') const nameFromFolder = require('@npmcli/name-from-folder') +const npa = require('npm-package-arg') +const semver = require('semver') +const util = require('node:util') +const { getPaths: getBinPaths } = require('bin-links') +const { log } = require('proc-log') +const { resolve, relative, dirname, basename } = require('node:path') +const { walkUp } = require('walk-up-path') + +const CaseInsensitiveMap = require('./case-insensitive-map.js') const Edge = require('./edge.js') const Inventory = require('./inventory.js') const OverrideSet = require('./override-set.js') -const { normalize } = require('read-package-json-fast') -const { getPaths: getBinPaths } = require('bin-links') -const npa = require('npm-package-arg') +const consistentResolve = require('./consistent-resolve.js') const debug = require('./debug.js') const gatherDepSet = require('./gather-dep-set.js') +const printableTree = require('./printable.js') +const querySelectorAll = require('./query-selector-all.js') +const relpath = require('./relpath.js') const treeCheck = require('./tree-check.js') -const { walkUp } = require('walk-up-path') -const { log } = require('proc-log') -const { resolve, relative, dirname, basename } = require('node:path') -const util = require('node:util') const _package = Symbol('_package') const _parent = Symbol('_parent') const _target = Symbol.for('_target') @@ -58,14 +64,6 @@ const _delistFromMeta = Symbol.for('_delistFromMeta') const _explain = Symbol('_explain') const _explanation = Symbol('_explanation') -const relpath = require('./relpath.js') -const consistentResolve = require('./consistent-resolve.js') - -const printableTree = require('./printable.js') -const CaseInsensitiveMap = require('./case-insensitive-map.js') - -const querySelectorAll = require('./query-selector-all.js') - class Node { #global #meta @@ -121,14 +119,25 @@ class Node { // package's dependencies in a virtual root. this.sourceReference = sourceReference - // TODO if this came from pacote.manifest we don't have to do this, - // we can be told to skip this step - const pkg = sourceReference ? sourceReference.package - : normalize(options.pkg || {}) + // have to set the internal package ref before assigning the parent, because this.package is read when adding to inventory + if (sourceReference) { + this[_package] = sourceReference.package + } else { + // TODO if this came from pacote.manifest we don't have to do this, we can be told to skip this step + const pkg = new PackageJson() + let content = {} + // TODO this is overly guarded. If pkg is not an object we should not allow it at all. + if (options.pkg && typeof options.pkg === 'object') { + content = options.pkg + } + pkg.fromContent(content) + pkg.syncNormalize() + this[_package] = pkg.content + } this.name = name || - nameFromFolder(path || pkg.name || realpath) || - pkg.name || + nameFromFolder(path || this.package.name || realpath) || + this.package.name || null // should be equal if not a link @@ -156,13 +165,13 @@ class Node { // probably what we're getting from pacote, which IS trustworthy. // // Otherwise, hopefully a shrinkwrap will help us out. - const resolved = consistentResolve(pkg._resolved) - if (resolved && !(/^file:/.test(resolved) && pkg._where)) { + const resolved = consistentResolve(this.package._resolved) + if (resolved && !(/^file:/.test(resolved) && this.package._where)) { this.resolved = resolved } } - this.integrity = integrity || pkg._integrity || null - this.hasShrinkwrap = hasShrinkwrap || pkg._hasShrinkwrap || false + this.integrity = integrity || this.package._integrity || null + this.hasShrinkwrap = hasShrinkwrap || this.package._hasShrinkwrap || false this.installLinks = installLinks this.legacyPeerDeps = legacyPeerDeps @@ -203,17 +212,13 @@ class Node { this.edgesIn = new Set() this.edgesOut = new CaseInsensitiveMap() - // have to set the internal package ref before assigning the parent, - // because this.package is read when adding to inventory - this[_package] = pkg && typeof pkg === 'object' ? pkg : {} - if (overrides) { this.overrides = overrides } else if (loadOverrides) { - const overrides = this[_package].overrides || {} + const overrides = this.package.overrides || {} if (Object.keys(overrides).length > 0) { this.overrides = new OverrideSet({ - overrides: this[_package].overrides, + overrides: this.package.overrides, }) } } @@ -314,7 +319,7 @@ class Node { } return getBinPaths({ - pkg: this[_package], + pkg: this.package, path: this.path, global: this.global, top: this.globalTop, @@ -328,11 +333,11 @@ class Node { } get version () { - return this[_package].version || '' + return this.package.version || '' } get packageName () { - return this[_package].name || null + return this.package.name || null } get pkgid () { diff --git a/workspaces/arborist/test/arborist/load-actual-ctor-throw.js b/workspaces/arborist/test/arborist/load-actual-ctor-throw.js deleted file mode 100644 index 82569f1311cfa..0000000000000 --- a/workspaces/arborist/test/arborist/load-actual-ctor-throw.js +++ /dev/null @@ -1,22 +0,0 @@ -const rpj = require('read-package-json-fast') -const t = require('tap') -const rpjMock = Object.assign((...args) => rpj(...args), { - ...rpj, - normalize: () => { - throw new Error('boom') - }, -}) -const Arborist = t.mock('../../lib/arborist', { - 'read-package-json-fast': rpjMock, -}) - -const { resolve } = require('node:path') -const { fixtures } = require('../fixtures/index.js') - -t.test('blow up and catch error if Node ctor blows up', t => { - // mock rpj so that we can blow up on the 'normalize' method called - // in the Node constructor, because it's (by design) extremely hard - // to make the ctor throw. - const path = resolve(fixtures, 'root') - return t.rejects(new Arborist({ path }).loadActual(), { message: 'boom' }) -}) diff --git a/workspaces/libnpmexec/lib/run-script.js b/workspaces/libnpmexec/lib/run-script.js index aa4f0525e9d2f..13f16a74eb8a0 100644 --- a/workspaces/libnpmexec/lib/run-script.js +++ b/workspaces/libnpmexec/lib/run-script.js @@ -1,6 +1,6 @@ const ciInfo = require('ci-info') const runScript = require('@npmcli/run-script') -const readPackageJson = require('read-package-json-fast') +const pkgJson = require('@npmcli/package-json') const { log, output } = require('proc-log') const noTTY = require('./no-tty.js') const isWindowsShell = require('./is-windows.js') @@ -28,7 +28,10 @@ const run = async ({ // do the fakey runScript dance // still should work if no package.json in cwd - const realPkg = await readPackageJson(`${path}/package.json`).catch(() => ({})) + const { content: realPkg } = await pkgJson.normalize(path, { steps: [ + 'binDir', + ...pkgJson.normalizeSteps, + ] }).catch(() => ({ content: {} })) const pkg = { ...realPkg, scripts: { From ceae674c32a080b81e62d79003c2d537d7ca93d2 Mon Sep 17 00:00:00 2001 From: Gar Date: Wed, 17 Sep 2025 10:07:33 -0700 Subject: [PATCH 05/63] deps: @npmcli/package-json@7.0.1 --- DEPENDENCIES.json | 1 - DEPENDENCIES.md | 10 +- node_modules/.gitignore | 36 +- .../@isaacs/balanced-match/LICENSE.md | 23 + .../balanced-match/dist/commonjs/index.js | 59 + .../balanced-match/dist/commonjs/package.json | 3 + .../@isaacs/balanced-match/dist/esm/index.js | 54 + .../balanced-match/dist/esm/package.json | 3 + .../@isaacs/balanced-match/package.json | 79 + node_modules/@isaacs/brace-expansion/LICENSE | 23 + .../brace-expansion/dist/commonjs/index.js | 196 ++ .../dist/commonjs/package.json | 3 + .../@isaacs/brace-expansion/dist/esm/index.js | 193 ++ .../brace-expansion/dist/esm/package.json | 3 + .../@isaacs/brace-expansion/package.json | 71 + .../node_modules/@npmcli/package-json/LICENSE | 18 + .../@npmcli/package-json/lib/index.js | 286 +++ .../package-json/lib/normalize-data.js | 257 +++ .../@npmcli/package-json/lib/normalize.js | 601 +++++ .../@npmcli/package-json/lib/read-package.js | 39 + .../@npmcli/package-json/lib/sort.js | 101 + .../package-json/lib/update-dependencies.js | 75 + .../package-json/lib/update-scripts.js | 29 + .../package-json/lib/update-workspaces.js | 26 + .../@npmcli/package-json/package.json | 61 + .../@npmcli/package-json/lib/index.js | 38 +- .../package-json/lib/normalize-data.js | 11 +- .../@npmcli/package-json/lib/normalize.js | 301 +-- .../node_modules/@npmcli/git/LICENSE | 15 + .../node_modules/@npmcli/git/lib/clone.js | 172 ++ .../node_modules/@npmcli/git/lib/errors.js | 36 + .../node_modules/@npmcli/git/lib/find.js | 15 + .../node_modules/@npmcli/git/lib/index.js | 9 + .../node_modules/@npmcli/git/lib/is-clean.js | 6 + .../node_modules/@npmcli/git/lib/is.js | 4 + .../@npmcli/git/lib/lines-to-revs.js | 147 ++ .../@npmcli/git/lib/make-error.js | 33 + .../node_modules/@npmcli/git/lib/opts.js | 57 + .../node_modules/@npmcli/git/lib/revs.js | 22 + .../node_modules/@npmcli/git/lib/spawn.js | 44 + .../node_modules/@npmcli/git/lib/utils.js | 3 + .../node_modules/@npmcli/git/lib/which.js | 18 + .../node_modules/@npmcli/git/package.json | 58 + .../package-json/node_modules/glob/LICENSE | 15 + .../node_modules/glob/dist/commonjs/glob.js | 247 ++ .../glob/dist/commonjs/has-magic.js | 27 + .../node_modules/glob/dist/commonjs/ignore.js | 119 + .../node_modules/glob/dist/commonjs/index.js | 68 + .../glob/dist/commonjs/package.json | 3 + .../glob/dist/commonjs/pattern.js | 219 ++ .../glob/dist/commonjs/processor.js | 301 +++ .../node_modules/glob/dist/commonjs/walker.js | 387 ++++ .../node_modules/glob/dist/esm/bin.d.mts | 3 + .../node_modules/glob/dist/esm/bin.mjs | 276 +++ .../node_modules/glob/dist/esm/glob.js | 243 ++ .../node_modules/glob/dist/esm/has-magic.js | 23 + .../node_modules/glob/dist/esm/ignore.js | 115 + .../node_modules/glob/dist/esm/index.js | 55 + .../node_modules/glob/dist/esm/package.json | 3 + .../node_modules/glob/dist/esm/pattern.js | 215 ++ .../node_modules/glob/dist/esm/processor.js | 294 +++ .../node_modules/glob/dist/esm/walker.js | 381 ++++ .../node_modules/glob/package.json | 97 + .../node_modules/hosted-git-info/LICENSE | 13 + .../hosted-git-info/lib/from-url.js | 122 + .../node_modules/hosted-git-info/lib/hosts.js | 231 ++ .../node_modules/hosted-git-info/lib/index.js | 227 ++ .../hosted-git-info/lib/parse-url.js | 78 + .../node_modules/hosted-git-info/package.json | 61 + .../node_modules/jackspeak/LICENSE.md | 55 + .../jackspeak/dist/commonjs/index.js | 947 ++++++++ .../jackspeak/dist/commonjs/package.json | 3 + .../node_modules/jackspeak/dist/esm/index.js | 936 ++++++++ .../jackspeak/dist/esm/package.json | 3 + .../node_modules/jackspeak/package.json | 94 + .../node_modules/lru-cache/LICENSE | 15 + .../lru-cache/dist/commonjs/index.js | 1564 +++++++++++++ .../lru-cache/dist/commonjs/index.min.js | 2 + .../lru-cache/dist/commonjs/package.json | 3 + .../node_modules/lru-cache/dist/esm/index.js | 1560 +++++++++++++ .../lru-cache/dist/esm/index.min.js | 2 + .../lru-cache/dist/esm/package.json | 3 + .../node_modules/lru-cache/package.json | 113 + .../node_modules/minimatch/LICENSE | 15 + .../dist/commonjs/assert-valid-pattern.js | 14 + .../minimatch/dist/commonjs/ast.js | 592 +++++ .../dist/commonjs/brace-expressions.js | 152 ++ .../minimatch/dist/commonjs/escape.js | 22 + .../minimatch/dist/commonjs/index.js | 1014 +++++++++ .../minimatch/dist/commonjs/package.json | 3 + .../minimatch/dist/commonjs/unescape.js | 24 + .../dist/esm/assert-valid-pattern.js | 10 + .../node_modules/minimatch/dist/esm/ast.js | 588 +++++ .../minimatch/dist/esm/brace-expressions.js | 148 ++ .../node_modules/minimatch/dist/esm/escape.js | 18 + .../node_modules/minimatch/dist/esm/index.js | 1001 ++++++++ .../minimatch/dist/esm/package.json | 3 + .../minimatch/dist/esm/unescape.js | 20 + .../node_modules/minimatch/package.json | 79 + .../node_modules/npm-package-arg/LICENSE | 15 + .../node_modules/npm-package-arg/lib/npa.js | 481 ++++ .../node_modules/npm-package-arg/package.json | 61 + .../node_modules/npm-pick-manifest/LICENSE.md | 16 + .../npm-pick-manifest/lib/index.js | 219 ++ .../npm-pick-manifest/package.json | 58 + .../node_modules/path-scurry/LICENSE.md | 55 + .../path-scurry/dist/commonjs/index.js | 2016 +++++++++++++++++ .../path-scurry/dist/commonjs/package.json | 3 + .../path-scurry/dist/esm/index.js | 1981 ++++++++++++++++ .../path-scurry/dist/esm/package.json | 3 + .../node_modules/path-scurry/package.json | 88 + .../@npmcli/package-json/package.json | 16 +- .../node_modules/@npmcli/package-json/LICENSE | 18 + .../@npmcli/package-json/lib/index.js | 286 +++ .../package-json/lib/normalize-data.js | 257 +++ .../@npmcli/package-json/lib/normalize.js | 601 +++++ .../@npmcli/package-json/lib/read-package.js | 39 + .../@npmcli/package-json/lib/sort.js | 101 + .../package-json/lib/update-dependencies.js | 75 + .../package-json/lib/update-scripts.js | 29 + .../package-json/lib/update-workspaces.js | 26 + .../@npmcli/package-json/package.json | 61 + .../node_modules/@npmcli/package-json/LICENSE | 18 + .../@npmcli/package-json/lib/index.js | 286 +++ .../package-json/lib/normalize-data.js | 257 +++ .../@npmcli/package-json/lib/normalize.js | 601 +++++ .../@npmcli/package-json/lib/read-package.js | 39 + .../@npmcli/package-json/lib/sort.js | 101 + .../package-json/lib/update-dependencies.js | 75 + .../package-json/lib/update-scripts.js | 29 + .../package-json/lib/update-workspaces.js | 26 + .../@npmcli/package-json/package.json | 61 + .../node_modules/@npmcli/package-json/LICENSE | 18 + .../@npmcli/package-json/lib/index.js | 286 +++ .../package-json/lib/normalize-data.js | 257 +++ .../@npmcli/package-json/lib/normalize.js | 601 +++++ .../@npmcli/package-json/lib/read-package.js | 39 + .../@npmcli/package-json/lib/sort.js | 101 + .../package-json/lib/update-dependencies.js | 75 + .../package-json/lib/update-scripts.js | 29 + .../package-json/lib/update-workspaces.js | 26 + .../@npmcli/package-json/package.json | 61 + package-lock.json | 488 +++- package.json | 2 +- workspaces/arborist/package.json | 4 +- workspaces/config/package.json | 2 +- workspaces/libnpmexec/package.json | 2 +- workspaces/libnpmpublish/package.json | 2 +- 148 files changed, 25840 insertions(+), 216 deletions(-) create mode 100644 node_modules/@isaacs/balanced-match/LICENSE.md create mode 100644 node_modules/@isaacs/balanced-match/dist/commonjs/index.js create mode 100644 node_modules/@isaacs/balanced-match/dist/commonjs/package.json create mode 100644 node_modules/@isaacs/balanced-match/dist/esm/index.js create mode 100644 node_modules/@isaacs/balanced-match/dist/esm/package.json create mode 100644 node_modules/@isaacs/balanced-match/package.json create mode 100644 node_modules/@isaacs/brace-expansion/LICENSE create mode 100644 node_modules/@isaacs/brace-expansion/dist/commonjs/index.js create mode 100644 node_modules/@isaacs/brace-expansion/dist/commonjs/package.json create mode 100644 node_modules/@isaacs/brace-expansion/dist/esm/index.js create mode 100644 node_modules/@isaacs/brace-expansion/dist/esm/package.json create mode 100644 node_modules/@isaacs/brace-expansion/package.json create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/LICENSE create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/index.js create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/normalize-data.js create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/normalize.js create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/read-package.js create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/sort.js create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/update-dependencies.js create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/update-scripts.js create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/update-workspaces.js create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/package.json create mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/LICENSE create mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/clone.js create mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/errors.js create mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/find.js create mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/index.js create mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/is-clean.js create mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/is.js create mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/lines-to-revs.js create mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/make-error.js create mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/opts.js create mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/revs.js create mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/spawn.js create mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/utils.js create mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/which.js create mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/package.json create mode 100644 node_modules/@npmcli/package-json/node_modules/glob/LICENSE create mode 100644 node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/glob.js create mode 100644 node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/has-magic.js create mode 100644 node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/ignore.js create mode 100644 node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/index.js create mode 100644 node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/package.json create mode 100644 node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/pattern.js create mode 100644 node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/processor.js create mode 100644 node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/walker.js create mode 100644 node_modules/@npmcli/package-json/node_modules/glob/dist/esm/bin.d.mts create mode 100755 node_modules/@npmcli/package-json/node_modules/glob/dist/esm/bin.mjs create mode 100644 node_modules/@npmcli/package-json/node_modules/glob/dist/esm/glob.js create mode 100644 node_modules/@npmcli/package-json/node_modules/glob/dist/esm/has-magic.js create mode 100644 node_modules/@npmcli/package-json/node_modules/glob/dist/esm/ignore.js create mode 100644 node_modules/@npmcli/package-json/node_modules/glob/dist/esm/index.js create mode 100644 node_modules/@npmcli/package-json/node_modules/glob/dist/esm/package.json create mode 100644 node_modules/@npmcli/package-json/node_modules/glob/dist/esm/pattern.js create mode 100644 node_modules/@npmcli/package-json/node_modules/glob/dist/esm/processor.js create mode 100644 node_modules/@npmcli/package-json/node_modules/glob/dist/esm/walker.js create mode 100644 node_modules/@npmcli/package-json/node_modules/glob/package.json create mode 100644 node_modules/@npmcli/package-json/node_modules/hosted-git-info/LICENSE create mode 100644 node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/from-url.js create mode 100644 node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/hosts.js create mode 100644 node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/index.js create mode 100644 node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/parse-url.js create mode 100644 node_modules/@npmcli/package-json/node_modules/hosted-git-info/package.json create mode 100644 node_modules/@npmcli/package-json/node_modules/jackspeak/LICENSE.md create mode 100644 node_modules/@npmcli/package-json/node_modules/jackspeak/dist/commonjs/index.js create mode 100644 node_modules/@npmcli/package-json/node_modules/jackspeak/dist/commonjs/package.json create mode 100644 node_modules/@npmcli/package-json/node_modules/jackspeak/dist/esm/index.js create mode 100644 node_modules/@npmcli/package-json/node_modules/jackspeak/dist/esm/package.json create mode 100644 node_modules/@npmcli/package-json/node_modules/jackspeak/package.json create mode 100644 node_modules/@npmcli/package-json/node_modules/lru-cache/LICENSE create mode 100644 node_modules/@npmcli/package-json/node_modules/lru-cache/dist/commonjs/index.js create mode 100644 node_modules/@npmcli/package-json/node_modules/lru-cache/dist/commonjs/index.min.js create mode 100644 node_modules/@npmcli/package-json/node_modules/lru-cache/dist/commonjs/package.json create mode 100644 node_modules/@npmcli/package-json/node_modules/lru-cache/dist/esm/index.js create mode 100644 node_modules/@npmcli/package-json/node_modules/lru-cache/dist/esm/index.min.js create mode 100644 node_modules/@npmcli/package-json/node_modules/lru-cache/dist/esm/package.json create mode 100644 node_modules/@npmcli/package-json/node_modules/lru-cache/package.json create mode 100644 node_modules/@npmcli/package-json/node_modules/minimatch/LICENSE create mode 100644 node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js create mode 100644 node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/ast.js create mode 100644 node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/brace-expressions.js create mode 100644 node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/escape.js create mode 100644 node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/index.js create mode 100644 node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/package.json create mode 100644 node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/unescape.js create mode 100644 node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/assert-valid-pattern.js create mode 100644 node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/ast.js create mode 100644 node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/brace-expressions.js create mode 100644 node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/escape.js create mode 100644 node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/index.js create mode 100644 node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/package.json create mode 100644 node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/unescape.js create mode 100644 node_modules/@npmcli/package-json/node_modules/minimatch/package.json create mode 100644 node_modules/@npmcli/package-json/node_modules/npm-package-arg/LICENSE create mode 100644 node_modules/@npmcli/package-json/node_modules/npm-package-arg/lib/npa.js create mode 100644 node_modules/@npmcli/package-json/node_modules/npm-package-arg/package.json create mode 100644 node_modules/@npmcli/package-json/node_modules/npm-pick-manifest/LICENSE.md create mode 100644 node_modules/@npmcli/package-json/node_modules/npm-pick-manifest/lib/index.js create mode 100644 node_modules/@npmcli/package-json/node_modules/npm-pick-manifest/package.json create mode 100644 node_modules/@npmcli/package-json/node_modules/path-scurry/LICENSE.md create mode 100644 node_modules/@npmcli/package-json/node_modules/path-scurry/dist/commonjs/index.js create mode 100644 node_modules/@npmcli/package-json/node_modules/path-scurry/dist/commonjs/package.json create mode 100644 node_modules/@npmcli/package-json/node_modules/path-scurry/dist/esm/index.js create mode 100644 node_modules/@npmcli/package-json/node_modules/path-scurry/dist/esm/package.json create mode 100644 node_modules/@npmcli/package-json/node_modules/path-scurry/package.json create mode 100644 node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/LICENSE create mode 100644 node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/index.js create mode 100644 node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/normalize-data.js create mode 100644 node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/normalize.js create mode 100644 node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/read-package.js create mode 100644 node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/sort.js create mode 100644 node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/update-dependencies.js create mode 100644 node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/update-scripts.js create mode 100644 node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/update-workspaces.js create mode 100644 node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/package.json create mode 100644 node_modules/init-package-json/node_modules/@npmcli/package-json/LICENSE create mode 100644 node_modules/init-package-json/node_modules/@npmcli/package-json/lib/index.js create mode 100644 node_modules/init-package-json/node_modules/@npmcli/package-json/lib/normalize-data.js create mode 100644 node_modules/init-package-json/node_modules/@npmcli/package-json/lib/normalize.js create mode 100644 node_modules/init-package-json/node_modules/@npmcli/package-json/lib/read-package.js create mode 100644 node_modules/init-package-json/node_modules/@npmcli/package-json/lib/sort.js create mode 100644 node_modules/init-package-json/node_modules/@npmcli/package-json/lib/update-dependencies.js create mode 100644 node_modules/init-package-json/node_modules/@npmcli/package-json/lib/update-scripts.js create mode 100644 node_modules/init-package-json/node_modules/@npmcli/package-json/lib/update-workspaces.js create mode 100644 node_modules/init-package-json/node_modules/@npmcli/package-json/package.json create mode 100644 node_modules/pacote/node_modules/@npmcli/package-json/LICENSE create mode 100644 node_modules/pacote/node_modules/@npmcli/package-json/lib/index.js create mode 100644 node_modules/pacote/node_modules/@npmcli/package-json/lib/normalize-data.js create mode 100644 node_modules/pacote/node_modules/@npmcli/package-json/lib/normalize.js create mode 100644 node_modules/pacote/node_modules/@npmcli/package-json/lib/read-package.js create mode 100644 node_modules/pacote/node_modules/@npmcli/package-json/lib/sort.js create mode 100644 node_modules/pacote/node_modules/@npmcli/package-json/lib/update-dependencies.js create mode 100644 node_modules/pacote/node_modules/@npmcli/package-json/lib/update-scripts.js create mode 100644 node_modules/pacote/node_modules/@npmcli/package-json/lib/update-workspaces.js create mode 100644 node_modules/pacote/node_modules/@npmcli/package-json/package.json diff --git a/DEPENDENCIES.json b/DEPENDENCIES.json index b8f4c8d2d2cfd..51a1b4c234b1b 100644 --- a/DEPENDENCIES.json +++ b/DEPENDENCIES.json @@ -58,7 +58,6 @@ "bin-links", "nopt", "parse-conflict-json", - "read-package-json-fast", "@npmcli/mock-globals", "read", "normalize-package-data" diff --git a/DEPENDENCIES.md b/DEPENDENCIES.md index 5c3c3caacd0ae..68de2df464a6c 100644 --- a/DEPENDENCIES.md +++ b/DEPENDENCIES.md @@ -38,7 +38,6 @@ graph LR; libnpmexec-->npmcli-template-oss["@npmcli/template-oss"]; libnpmexec-->pacote; libnpmexec-->proc-log; - libnpmexec-->read-package-json-fast; libnpmexec-->read; libnpmexec-->semver; libnpmfund-->npmcli-arborist["@npmcli/arborist"]; @@ -178,7 +177,6 @@ graph LR; npmcli-arborist-->parse-conflict-json; npmcli-arborist-->proc-log; npmcli-arborist-->proggy; - npmcli-arborist-->read-package-json-fast; npmcli-arborist-->semver; npmcli-arborist-->ssri; npmcli-config-->ini; @@ -248,8 +246,6 @@ graph LR; parse-conflict-json-->json-parse-even-better-errors; promzard-->read; read-->mute-stream; - read-package-json-fast-->json-parse-even-better-errors; - read-package-json-fast-->npm-normalize-package-bin; unique-filename-->unique-slug; ``` @@ -350,7 +346,6 @@ graph LR; libnpmexec-->pacote; libnpmexec-->proc-log; libnpmexec-->promise-retry; - libnpmexec-->read-package-json-fast; libnpmexec-->read; libnpmexec-->semver; libnpmexec-->signal-exit; @@ -593,7 +588,6 @@ graph LR; npmcli-arborist-->proggy; npmcli-arborist-->promise-all-reject-late; npmcli-arborist-->promise-call-limit; - npmcli-arborist-->read-package-json-fast; npmcli-arborist-->semver; npmcli-arborist-->ssri; npmcli-arborist-->tap; @@ -710,8 +704,6 @@ graph LR; promise-retry-->retry; promzard-->read; read-->mute-stream; - read-package-json-fast-->json-parse-even-better-errors; - read-package-json-fast-->npm-normalize-package-bin; shebang-command-->shebang-regex; sigstore-->sigstore-bundle["@sigstore/bundle"]; sigstore-->sigstore-core["@sigstore/core"]; @@ -787,5 +779,5 @@ packages higher up the chain. - @npmcli/package-json, npm-registry-fetch - @npmcli/git, make-fetch-happen - @npmcli/smoke-tests, @npmcli/installed-package-contents, npm-pick-manifest, cacache, promzard - - @npmcli/docs, @npmcli/fs, npm-bundled, @npmcli/promise-spawn, npm-install-checks, npm-package-arg, unique-filename, npm-packlist, bin-links, nopt, parse-conflict-json, read-package-json-fast, @npmcli/mock-globals, read, normalize-package-data + - @npmcli/docs, @npmcli/fs, npm-bundled, @npmcli/promise-spawn, npm-install-checks, npm-package-arg, unique-filename, npm-packlist, bin-links, nopt, parse-conflict-json, @npmcli/mock-globals, read, normalize-package-data - @npmcli/eslint-config, @npmcli/template-oss, ignore-walk, semver, npm-normalize-package-bin, @npmcli/name-from-folder, which, ini, hosted-git-info, proc-log, validate-npm-package-name, json-parse-even-better-errors, ssri, unique-slug, @npmcli/node-gyp, @npmcli/redact, @npmcli/agent, minipass-fetch, @npmcli/query, cmd-shim, read-cmd-shim, write-file-atomic, abbrev, proggy, minify-registry-metadata, mute-stream, npm-audit-report, npm-user-validate diff --git a/node_modules/.gitignore b/node_modules/.gitignore index 8451947e5f73b..514ff1c417f92 100644 --- a/node_modules/.gitignore +++ b/node_modules/.gitignore @@ -5,6 +5,8 @@ # Allow all bundled deps !/@isaacs/ /@isaacs/* +!/@isaacs/balanced-match +!/@isaacs/brace-expansion !/@isaacs/cliui !/@isaacs/cliui/node_modules/ /@isaacs/cliui/node_modules/* @@ -21,14 +23,37 @@ !/@npmcli/git !/@npmcli/installed-package-contents !/@npmcli/map-workspaces +!/@npmcli/map-workspaces/node_modules/ +/@npmcli/map-workspaces/node_modules/* +!/@npmcli/map-workspaces/node_modules/@npmcli/ +/@npmcli/map-workspaces/node_modules/@npmcli/* +!/@npmcli/map-workspaces/node_modules/@npmcli/package-json !/@npmcli/metavuln-calculator !/@npmcli/name-from-folder !/@npmcli/node-gyp !/@npmcli/package-json +!/@npmcli/package-json/node_modules/ +/@npmcli/package-json/node_modules/* +!/@npmcli/package-json/node_modules/@npmcli/ +/@npmcli/package-json/node_modules/@npmcli/* +!/@npmcli/package-json/node_modules/@npmcli/git +!/@npmcli/package-json/node_modules/glob +!/@npmcli/package-json/node_modules/hosted-git-info +!/@npmcli/package-json/node_modules/jackspeak +!/@npmcli/package-json/node_modules/lru-cache +!/@npmcli/package-json/node_modules/minimatch +!/@npmcli/package-json/node_modules/npm-package-arg +!/@npmcli/package-json/node_modules/npm-pick-manifest +!/@npmcli/package-json/node_modules/path-scurry !/@npmcli/promise-spawn !/@npmcli/query !/@npmcli/redact !/@npmcli/run-script +!/@npmcli/run-script/node_modules/ +/@npmcli/run-script/node_modules/* +!/@npmcli/run-script/node_modules/@npmcli/ +/@npmcli/run-script/node_modules/@npmcli/* +!/@npmcli/run-script/node_modules/@npmcli/package-json !/@pkgjs/ /@pkgjs/* !/@pkgjs/parseargs @@ -98,6 +123,11 @@ !/imurmurhash !/ini !/init-package-json +!/init-package-json/node_modules/ +/init-package-json/node_modules/* +!/init-package-json/node_modules/@npmcli/ +/init-package-json/node_modules/@npmcli/* +!/init-package-json/node_modules/@npmcli/package-json !/ip-address !/ip-regex !/is-cidr @@ -167,6 +197,11 @@ !/p-map !/package-json-from-dist !/pacote +!/pacote/node_modules/ +/pacote/node_modules/* +!/pacote/node_modules/@npmcli/ +/pacote/node_modules/@npmcli/* +!/pacote/node_modules/@npmcli/package-json !/parse-conflict-json !/path-key !/path-scurry @@ -179,7 +214,6 @@ !/promzard !/qrcode-terminal !/read-cmd-shim -!/read-package-json-fast !/read !/retry !/safer-buffer diff --git a/node_modules/@isaacs/balanced-match/LICENSE.md b/node_modules/@isaacs/balanced-match/LICENSE.md new file mode 100644 index 0000000000000..61ece8cc92afb --- /dev/null +++ b/node_modules/@isaacs/balanced-match/LICENSE.md @@ -0,0 +1,23 @@ +(MIT) + +Original code Copyright Julian Gruber + +Port to TypeScript Copyright Isaac Z. Schlueter + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@isaacs/balanced-match/dist/commonjs/index.js b/node_modules/@isaacs/balanced-match/dist/commonjs/index.js new file mode 100644 index 0000000000000..0c9014bac1531 --- /dev/null +++ b/node_modules/@isaacs/balanced-match/dist/commonjs/index.js @@ -0,0 +1,59 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.range = exports.balanced = void 0; +const balanced = (a, b, str) => { + const ma = a instanceof RegExp ? maybeMatch(a, str) : a; + const mb = b instanceof RegExp ? maybeMatch(b, str) : b; + const r = ma !== null && mb != null && (0, exports.range)(ma, mb, str); + return (r && { + start: r[0], + end: r[1], + pre: str.slice(0, r[0]), + body: str.slice(r[0] + ma.length, r[1]), + post: str.slice(r[1] + mb.length), + }); +}; +exports.balanced = balanced; +const maybeMatch = (reg, str) => { + const m = str.match(reg); + return m ? m[0] : null; +}; +const range = (a, b, str) => { + let begs, beg, left, right = undefined, result; + let ai = str.indexOf(a); + let bi = str.indexOf(b, ai + 1); + let i = ai; + if (ai >= 0 && bi > 0) { + if (a === b) { + return [ai, bi]; + } + begs = []; + left = str.length; + while (i >= 0 && !result) { + if (i === ai) { + begs.push(i); + ai = str.indexOf(a, i + 1); + } + else if (begs.length === 1) { + const r = begs.pop(); + if (r !== undefined) + result = [r, bi]; + } + else { + beg = begs.pop(); + if (beg !== undefined && beg < left) { + left = beg; + right = bi; + } + bi = str.indexOf(b, i + 1); + } + i = ai < bi && ai >= 0 ? ai : bi; + } + if (begs.length && right !== undefined) { + result = [left, right]; + } + } + return result; +}; +exports.range = range; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@isaacs/balanced-match/dist/commonjs/package.json b/node_modules/@isaacs/balanced-match/dist/commonjs/package.json new file mode 100644 index 0000000000000..5bbefffbabee3 --- /dev/null +++ b/node_modules/@isaacs/balanced-match/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/node_modules/@isaacs/balanced-match/dist/esm/index.js b/node_modules/@isaacs/balanced-match/dist/esm/index.js new file mode 100644 index 0000000000000..fe81200f9d676 --- /dev/null +++ b/node_modules/@isaacs/balanced-match/dist/esm/index.js @@ -0,0 +1,54 @@ +export const balanced = (a, b, str) => { + const ma = a instanceof RegExp ? maybeMatch(a, str) : a; + const mb = b instanceof RegExp ? maybeMatch(b, str) : b; + const r = ma !== null && mb != null && range(ma, mb, str); + return (r && { + start: r[0], + end: r[1], + pre: str.slice(0, r[0]), + body: str.slice(r[0] + ma.length, r[1]), + post: str.slice(r[1] + mb.length), + }); +}; +const maybeMatch = (reg, str) => { + const m = str.match(reg); + return m ? m[0] : null; +}; +export const range = (a, b, str) => { + let begs, beg, left, right = undefined, result; + let ai = str.indexOf(a); + let bi = str.indexOf(b, ai + 1); + let i = ai; + if (ai >= 0 && bi > 0) { + if (a === b) { + return [ai, bi]; + } + begs = []; + left = str.length; + while (i >= 0 && !result) { + if (i === ai) { + begs.push(i); + ai = str.indexOf(a, i + 1); + } + else if (begs.length === 1) { + const r = begs.pop(); + if (r !== undefined) + result = [r, bi]; + } + else { + beg = begs.pop(); + if (beg !== undefined && beg < left) { + left = beg; + right = bi; + } + bi = str.indexOf(b, i + 1); + } + i = ai < bi && ai >= 0 ? ai : bi; + } + if (begs.length && right !== undefined) { + result = [left, right]; + } + } + return result; +}; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@isaacs/balanced-match/dist/esm/package.json b/node_modules/@isaacs/balanced-match/dist/esm/package.json new file mode 100644 index 0000000000000..3dbc1ca591c05 --- /dev/null +++ b/node_modules/@isaacs/balanced-match/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@isaacs/balanced-match/package.json b/node_modules/@isaacs/balanced-match/package.json new file mode 100644 index 0000000000000..49296e6af443c --- /dev/null +++ b/node_modules/@isaacs/balanced-match/package.json @@ -0,0 +1,79 @@ +{ + "name": "@isaacs/balanced-match", + "description": "Match balanced character pairs, like \"{\" and \"}\"", + "version": "4.0.1", + "files": [ + "dist" + ], + "repository": { + "type": "git", + "url": "git://github.com/isaacs/balanced-match.git" + }, + "exports": { + "./package.json": "./package.json", + ".": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } + }, + "type": "module", + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "prepare": "tshy", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "test": "tap", + "snap": "tap", + "format": "prettier --write . --loglevel warn", + "benchmark": "node benchmark/index.js", + "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts" + }, + "prettier": { + "semi": false, + "printWidth": 80, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "devDependencies": { + "@types/brace-expansion": "^1.1.2", + "@types/node": "^24.0.0", + "mkdirp": "^3.0.1", + "prettier": "^3.3.2", + "tap": "^21.1.0", + "tshy": "^3.0.2", + "typedoc": "^0.28.5" + }, + "keywords": [ + "match", + "regexp", + "test", + "balanced", + "parse" + ], + "license": "MIT", + "engines": { + "node": "20 || >=22" + }, + "tshy": { + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts" + } + }, + "main": "./dist/commonjs/index.js", + "types": "./dist/commonjs/index.d.ts", + "module": "./dist/esm/index.js" +} diff --git a/node_modules/@isaacs/brace-expansion/LICENSE b/node_modules/@isaacs/brace-expansion/LICENSE new file mode 100644 index 0000000000000..46e7b75c91ced --- /dev/null +++ b/node_modules/@isaacs/brace-expansion/LICENSE @@ -0,0 +1,23 @@ +MIT License + +Copyright Julian Gruber + +TypeScript port Copyright Isaac Z. Schlueter + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/@isaacs/brace-expansion/dist/commonjs/index.js b/node_modules/@isaacs/brace-expansion/dist/commonjs/index.js new file mode 100644 index 0000000000000..99cee69d560e2 --- /dev/null +++ b/node_modules/@isaacs/brace-expansion/dist/commonjs/index.js @@ -0,0 +1,196 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.expand = expand; +const balanced_match_1 = require("@isaacs/balanced-match"); +const escSlash = '\0SLASH' + Math.random() + '\0'; +const escOpen = '\0OPEN' + Math.random() + '\0'; +const escClose = '\0CLOSE' + Math.random() + '\0'; +const escComma = '\0COMMA' + Math.random() + '\0'; +const escPeriod = '\0PERIOD' + Math.random() + '\0'; +const escSlashPattern = new RegExp(escSlash, 'g'); +const escOpenPattern = new RegExp(escOpen, 'g'); +const escClosePattern = new RegExp(escClose, 'g'); +const escCommaPattern = new RegExp(escComma, 'g'); +const escPeriodPattern = new RegExp(escPeriod, 'g'); +const slashPattern = /\\\\/g; +const openPattern = /\\{/g; +const closePattern = /\\}/g; +const commaPattern = /\\,/g; +const periodPattern = /\\./g; +function numeric(str) { + return !isNaN(str) ? parseInt(str, 10) : str.charCodeAt(0); +} +function escapeBraces(str) { + return str + .replace(slashPattern, escSlash) + .replace(openPattern, escOpen) + .replace(closePattern, escClose) + .replace(commaPattern, escComma) + .replace(periodPattern, escPeriod); +} +function unescapeBraces(str) { + return str + .replace(escSlashPattern, '\\') + .replace(escOpenPattern, '{') + .replace(escClosePattern, '}') + .replace(escCommaPattern, ',') + .replace(escPeriodPattern, '.'); +} +/** + * Basically just str.split(","), but handling cases + * where we have nested braced sections, which should be + * treated as individual members, like {a,{b,c},d} + */ +function parseCommaParts(str) { + if (!str) { + return ['']; + } + const parts = []; + const m = (0, balanced_match_1.balanced)('{', '}', str); + if (!m) { + return str.split(','); + } + const { pre, body, post } = m; + const p = pre.split(','); + p[p.length - 1] += '{' + body + '}'; + const postParts = parseCommaParts(post); + if (post.length) { + ; + p[p.length - 1] += postParts.shift(); + p.push.apply(p, postParts); + } + parts.push.apply(parts, p); + return parts; +} +function expand(str) { + if (!str) { + return []; + } + // I don't know why Bash 4.3 does this, but it does. + // Anything starting with {} will have the first two bytes preserved + // but *only* at the top level, so {},a}b will not expand to anything, + // but a{},b}c will be expanded to [a}c,abc]. + // One could argue that this is a bug in Bash, but since the goal of + // this module is to match Bash's rules, we escape a leading {} + if (str.slice(0, 2) === '{}') { + str = '\\{\\}' + str.slice(2); + } + return expand_(escapeBraces(str), true).map(unescapeBraces); +} +function embrace(str) { + return '{' + str + '}'; +} +function isPadded(el) { + return /^-?0\d/.test(el); +} +function lte(i, y) { + return i <= y; +} +function gte(i, y) { + return i >= y; +} +function expand_(str, isTop) { + /** @type {string[]} */ + const expansions = []; + const m = (0, balanced_match_1.balanced)('{', '}', str); + if (!m) + return [str]; + // no need to expand pre, since it is guaranteed to be free of brace-sets + const pre = m.pre; + const post = m.post.length ? expand_(m.post, false) : ['']; + if (/\$$/.test(m.pre)) { + for (let k = 0; k < post.length; k++) { + const expansion = pre + '{' + m.body + '}' + post[k]; + expansions.push(expansion); + } + } + else { + const isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); + const isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); + const isSequence = isNumericSequence || isAlphaSequence; + const isOptions = m.body.indexOf(',') >= 0; + if (!isSequence && !isOptions) { + // {a},b} + if (m.post.match(/,(?!,).*\}/)) { + str = m.pre + '{' + m.body + escClose + m.post; + return expand_(str); + } + return [str]; + } + let n; + if (isSequence) { + n = m.body.split(/\.\./); + } + else { + n = parseCommaParts(m.body); + if (n.length === 1 && n[0] !== undefined) { + // x{{a,b}}y ==> x{a}y x{b}y + n = expand_(n[0], false).map(embrace); + //XXX is this necessary? Can't seem to hit it in tests. + /* c8 ignore start */ + if (n.length === 1) { + return post.map(p => m.pre + n[0] + p); + } + /* c8 ignore stop */ + } + } + // at this point, n is the parts, and we know it's not a comma set + // with a single entry. + let N; + if (isSequence && n[0] !== undefined && n[1] !== undefined) { + const x = numeric(n[0]); + const y = numeric(n[1]); + const width = Math.max(n[0].length, n[1].length); + let incr = n.length === 3 && n[2] !== undefined ? Math.abs(numeric(n[2])) : 1; + let test = lte; + const reverse = y < x; + if (reverse) { + incr *= -1; + test = gte; + } + const pad = n.some(isPadded); + N = []; + for (let i = x; test(i, y); i += incr) { + let c; + if (isAlphaSequence) { + c = String.fromCharCode(i); + if (c === '\\') { + c = ''; + } + } + else { + c = String(i); + if (pad) { + const need = width - c.length; + if (need > 0) { + const z = new Array(need + 1).join('0'); + if (i < 0) { + c = '-' + z + c.slice(1); + } + else { + c = z + c; + } + } + } + } + N.push(c); + } + } + else { + N = []; + for (let j = 0; j < n.length; j++) { + N.push.apply(N, expand_(n[j], false)); + } + } + for (let j = 0; j < N.length; j++) { + for (let k = 0; k < post.length; k++) { + const expansion = pre + N[j] + post[k]; + if (!isTop || isSequence || expansion) { + expansions.push(expansion); + } + } + } + } + return expansions; +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@isaacs/brace-expansion/dist/commonjs/package.json b/node_modules/@isaacs/brace-expansion/dist/commonjs/package.json new file mode 100644 index 0000000000000..5bbefffbabee3 --- /dev/null +++ b/node_modules/@isaacs/brace-expansion/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/node_modules/@isaacs/brace-expansion/dist/esm/index.js b/node_modules/@isaacs/brace-expansion/dist/esm/index.js new file mode 100644 index 0000000000000..ebb88ed4117c8 --- /dev/null +++ b/node_modules/@isaacs/brace-expansion/dist/esm/index.js @@ -0,0 +1,193 @@ +import { balanced } from '@isaacs/balanced-match'; +const escSlash = '\0SLASH' + Math.random() + '\0'; +const escOpen = '\0OPEN' + Math.random() + '\0'; +const escClose = '\0CLOSE' + Math.random() + '\0'; +const escComma = '\0COMMA' + Math.random() + '\0'; +const escPeriod = '\0PERIOD' + Math.random() + '\0'; +const escSlashPattern = new RegExp(escSlash, 'g'); +const escOpenPattern = new RegExp(escOpen, 'g'); +const escClosePattern = new RegExp(escClose, 'g'); +const escCommaPattern = new RegExp(escComma, 'g'); +const escPeriodPattern = new RegExp(escPeriod, 'g'); +const slashPattern = /\\\\/g; +const openPattern = /\\{/g; +const closePattern = /\\}/g; +const commaPattern = /\\,/g; +const periodPattern = /\\./g; +function numeric(str) { + return !isNaN(str) ? parseInt(str, 10) : str.charCodeAt(0); +} +function escapeBraces(str) { + return str + .replace(slashPattern, escSlash) + .replace(openPattern, escOpen) + .replace(closePattern, escClose) + .replace(commaPattern, escComma) + .replace(periodPattern, escPeriod); +} +function unescapeBraces(str) { + return str + .replace(escSlashPattern, '\\') + .replace(escOpenPattern, '{') + .replace(escClosePattern, '}') + .replace(escCommaPattern, ',') + .replace(escPeriodPattern, '.'); +} +/** + * Basically just str.split(","), but handling cases + * where we have nested braced sections, which should be + * treated as individual members, like {a,{b,c},d} + */ +function parseCommaParts(str) { + if (!str) { + return ['']; + } + const parts = []; + const m = balanced('{', '}', str); + if (!m) { + return str.split(','); + } + const { pre, body, post } = m; + const p = pre.split(','); + p[p.length - 1] += '{' + body + '}'; + const postParts = parseCommaParts(post); + if (post.length) { + ; + p[p.length - 1] += postParts.shift(); + p.push.apply(p, postParts); + } + parts.push.apply(parts, p); + return parts; +} +export function expand(str) { + if (!str) { + return []; + } + // I don't know why Bash 4.3 does this, but it does. + // Anything starting with {} will have the first two bytes preserved + // but *only* at the top level, so {},a}b will not expand to anything, + // but a{},b}c will be expanded to [a}c,abc]. + // One could argue that this is a bug in Bash, but since the goal of + // this module is to match Bash's rules, we escape a leading {} + if (str.slice(0, 2) === '{}') { + str = '\\{\\}' + str.slice(2); + } + return expand_(escapeBraces(str), true).map(unescapeBraces); +} +function embrace(str) { + return '{' + str + '}'; +} +function isPadded(el) { + return /^-?0\d/.test(el); +} +function lte(i, y) { + return i <= y; +} +function gte(i, y) { + return i >= y; +} +function expand_(str, isTop) { + /** @type {string[]} */ + const expansions = []; + const m = balanced('{', '}', str); + if (!m) + return [str]; + // no need to expand pre, since it is guaranteed to be free of brace-sets + const pre = m.pre; + const post = m.post.length ? expand_(m.post, false) : ['']; + if (/\$$/.test(m.pre)) { + for (let k = 0; k < post.length; k++) { + const expansion = pre + '{' + m.body + '}' + post[k]; + expansions.push(expansion); + } + } + else { + const isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); + const isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); + const isSequence = isNumericSequence || isAlphaSequence; + const isOptions = m.body.indexOf(',') >= 0; + if (!isSequence && !isOptions) { + // {a},b} + if (m.post.match(/,(?!,).*\}/)) { + str = m.pre + '{' + m.body + escClose + m.post; + return expand_(str); + } + return [str]; + } + let n; + if (isSequence) { + n = m.body.split(/\.\./); + } + else { + n = parseCommaParts(m.body); + if (n.length === 1 && n[0] !== undefined) { + // x{{a,b}}y ==> x{a}y x{b}y + n = expand_(n[0], false).map(embrace); + //XXX is this necessary? Can't seem to hit it in tests. + /* c8 ignore start */ + if (n.length === 1) { + return post.map(p => m.pre + n[0] + p); + } + /* c8 ignore stop */ + } + } + // at this point, n is the parts, and we know it's not a comma set + // with a single entry. + let N; + if (isSequence && n[0] !== undefined && n[1] !== undefined) { + const x = numeric(n[0]); + const y = numeric(n[1]); + const width = Math.max(n[0].length, n[1].length); + let incr = n.length === 3 && n[2] !== undefined ? Math.abs(numeric(n[2])) : 1; + let test = lte; + const reverse = y < x; + if (reverse) { + incr *= -1; + test = gte; + } + const pad = n.some(isPadded); + N = []; + for (let i = x; test(i, y); i += incr) { + let c; + if (isAlphaSequence) { + c = String.fromCharCode(i); + if (c === '\\') { + c = ''; + } + } + else { + c = String(i); + if (pad) { + const need = width - c.length; + if (need > 0) { + const z = new Array(need + 1).join('0'); + if (i < 0) { + c = '-' + z + c.slice(1); + } + else { + c = z + c; + } + } + } + } + N.push(c); + } + } + else { + N = []; + for (let j = 0; j < n.length; j++) { + N.push.apply(N, expand_(n[j], false)); + } + } + for (let j = 0; j < N.length; j++) { + for (let k = 0; k < post.length; k++) { + const expansion = pre + N[j] + post[k]; + if (!isTop || isSequence || expansion) { + expansions.push(expansion); + } + } + } + } + return expansions; +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@isaacs/brace-expansion/dist/esm/package.json b/node_modules/@isaacs/brace-expansion/dist/esm/package.json new file mode 100644 index 0000000000000..3dbc1ca591c05 --- /dev/null +++ b/node_modules/@isaacs/brace-expansion/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@isaacs/brace-expansion/package.json b/node_modules/@isaacs/brace-expansion/package.json new file mode 100644 index 0000000000000..cf1035688398b --- /dev/null +++ b/node_modules/@isaacs/brace-expansion/package.json @@ -0,0 +1,71 @@ +{ + "name": "@isaacs/brace-expansion", + "description": "Brace expansion as known from sh/bash", + "version": "5.0.0", + "files": [ + "dist" + ], + "exports": { + "./package.json": "./package.json", + ".": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } + }, + "type": "module", + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "prepare": "tshy", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "test": "tap", + "snap": "tap", + "format": "prettier --write . --loglevel warn", + "benchmark": "node benchmark/index.js", + "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts" + }, + "prettier": { + "semi": false, + "printWidth": 80, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "devDependencies": { + "@types/brace-expansion": "^1.1.2", + "@types/node": "^24.0.0", + "mkdirp": "^3.0.1", + "prettier": "^3.3.2", + "tap": "^21.1.0", + "tshy": "^3.0.2", + "typedoc": "^0.28.5" + }, + "dependencies": { + "@isaacs/balanced-match": "^4.0.1" + }, + "license": "MIT", + "engines": { + "node": "20 || >=22" + }, + "tshy": { + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts" + } + }, + "main": "./dist/commonjs/index.js", + "types": "./dist/commonjs/index.d.ts", + "module": "./dist/esm/index.js" +} diff --git a/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/LICENSE b/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/LICENSE new file mode 100644 index 0000000000000..6a1f3708f6d70 --- /dev/null +++ b/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/LICENSE @@ -0,0 +1,18 @@ +ISC License + +Copyright GitHub Inc. + +Permission to use, copy, modify, and/or distribute this +software for any purpose with or without fee is hereby +granted, provided that the above copyright notice and this +permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL +WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO +EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/index.js b/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/index.js new file mode 100644 index 0000000000000..7eff602d73a3f --- /dev/null +++ b/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/index.js @@ -0,0 +1,286 @@ +const { readFile, writeFile } = require('node:fs/promises') +const { resolve } = require('node:path') +const parseJSON = require('json-parse-even-better-errors') + +const updateDeps = require('./update-dependencies.js') +const updateScripts = require('./update-scripts.js') +const updateWorkspaces = require('./update-workspaces.js') +const normalize = require('./normalize.js') +const { read, parse } = require('./read-package.js') +const { packageSort } = require('./sort.js') + +// a list of handy specialized helper functions that take +// care of special cases that are handled by the npm cli +const knownSteps = new Set([ + updateDeps, + updateScripts, + updateWorkspaces, +]) + +// list of all keys that are handled by "knownSteps" helpers +const knownKeys = new Set([ + ...updateDeps.knownKeys, + 'scripts', + 'workspaces', +]) + +class PackageJson { + static normalizeSteps = Object.freeze([ + '_id', + '_attributes', + 'bundledDependencies', + 'bundleDependencies', + 'optionalDedupe', + 'scripts', + 'funding', + 'bin', + ]) + + // npm pkg fix + static fixSteps = Object.freeze([ + 'binRefs', + 'bundleDependencies', + 'bundleDependenciesFalse', + 'fixName', + 'fixNameField', + 'fixVersionField', + 'fixRepositoryField', + 'fixDependencies', + 'devDependencies', + 'scriptpath', + ]) + + static prepareSteps = Object.freeze([ + '_id', + '_attributes', + 'bundledDependencies', + 'bundleDependencies', + 'bundleDependenciesDeleteFalse', + 'gypfile', + 'serverjs', + 'scriptpath', + 'authors', + 'readme', + 'mans', + 'binDir', + 'gitHead', + 'fillTypes', + 'normalizeData', + 'binRefs', + ]) + + // create a new empty package.json, so we can save at the given path even + // though we didn't start from a parsed file + static async create (path, opts = {}) { + const p = new PackageJson() + await p.create(path) + if (opts.data) { + return p.update(opts.data) + } + return p + } + + // Loads a package.json at given path and JSON parses + static async load (path, opts = {}) { + const p = new PackageJson() + // Avoid try/catch if we aren't going to create + if (!opts.create) { + return p.load(path) + } + + try { + return await p.load(path) + } catch (err) { + if (!err.message.startsWith('Could not read package.json')) { + throw err + } + return await p.create(path) + } + } + + // npm pkg fix + static async fix (path, opts) { + const p = new PackageJson() + await p.load(path, true) + return p.fix(opts) + } + + // read-package-json compatible behavior + static async prepare (path, opts) { + const p = new PackageJson() + await p.load(path, true) + return p.prepare(opts) + } + + // read-package-json-fast compatible behavior + static async normalize (path, opts) { + const p = new PackageJson() + await p.load(path) + return p.normalize(opts) + } + + #path + #manifest + #readFileContent = '' + #canSave = true + + // Load content from given path + async load (path, parseIndex) { + this.#path = path + let parseErr + try { + this.#readFileContent = await read(this.filename) + } catch (err) { + if (!parseIndex) { + throw err + } + parseErr = err + } + + if (parseErr) { + const indexFile = resolve(this.path, 'index.js') + let indexFileContent + try { + indexFileContent = await readFile(indexFile, 'utf8') + } catch (err) { + throw parseErr + } + try { + this.fromComment(indexFileContent) + } catch (err) { + throw parseErr + } + // This wasn't a package.json so prevent saving + this.#canSave = false + return this + } + + return this.fromJSON(this.#readFileContent) + } + + // Load data from a JSON string/buffer + fromJSON (data) { + this.#manifest = parse(data) + return this + } + + fromContent (data) { + this.#manifest = data + this.#canSave = false + return this + } + + // Load data from a comment + // /**package { "name": "foo", "version": "1.2.3", ... } **/ + fromComment (data) { + data = data.split(/^\/\*\*package(?:\s|$)/m) + + if (data.length < 2) { + throw new Error('File has no package in comments') + } + data = data[1] + data = data.split(/\*\*\/$/m) + + if (data.length < 2) { + throw new Error('File has no package in comments') + } + data = data[0] + data = data.replace(/^\s*\*/mg, '') + + this.#manifest = parseJSON(data) + return this + } + + get content () { + return this.#manifest + } + + get path () { + return this.#path + } + + get filename () { + if (this.path) { + return resolve(this.path, 'package.json') + } + return undefined + } + + create (path) { + this.#path = path + this.#manifest = {} + return this + } + + // This should be the ONLY way to set content in the manifest + update (content) { + if (!this.content) { + throw new Error('Can not update without content. Please `load` or `create`') + } + + for (const step of knownSteps) { + this.#manifest = step({ content, originalContent: this.content }) + } + + // unknown properties will just be overwitten + for (const [key, value] of Object.entries(content)) { + if (!knownKeys.has(key)) { + this.content[key] = value + } + } + + return this + } + + async save ({ sort } = {}) { + if (!this.#canSave) { + throw new Error('No package.json to save to') + } + const { + [Symbol.for('indent')]: indent, + [Symbol.for('newline')]: newline, + ...rest + } = this.content + + const format = indent === undefined ? ' ' : indent + const eol = newline === undefined ? '\n' : newline + + const content = sort ? packageSort(rest) : rest + + const fileContent = `${ + JSON.stringify(content, null, format) + }\n` + .replace(/\n/g, eol) + + if (fileContent.trim() !== this.#readFileContent.trim()) { + const written = await writeFile(this.filename, fileContent) + this.#readFileContent = fileContent + return written + } + } + + async normalize (opts = {}) { + if (!opts.steps) { + opts.steps = this.constructor.normalizeSteps + } + await normalize(this, opts) + return this + } + + async prepare (opts = {}) { + if (!opts.steps) { + opts.steps = this.constructor.prepareSteps + } + await normalize(this, opts) + return this + } + + async fix (opts = {}) { + // This one is not overridable + opts.steps = this.constructor.fixSteps + await normalize(this, opts) + return this + } +} + +module.exports = PackageJson diff --git a/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/normalize-data.js b/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/normalize-data.js new file mode 100644 index 0000000000000..79b0bafbcd3a4 --- /dev/null +++ b/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/normalize-data.js @@ -0,0 +1,257 @@ +// Originally normalize-package-data + +const url = require('node:url') +const hostedGitInfo = require('hosted-git-info') +const validateLicense = require('validate-npm-package-license') + +const typos = { + dependancies: 'dependencies', + dependecies: 'dependencies', + depdenencies: 'dependencies', + devEependencies: 'devDependencies', + depends: 'dependencies', + 'dev-dependencies': 'devDependencies', + devDependences: 'devDependencies', + devDepenencies: 'devDependencies', + devdependencies: 'devDependencies', + repostitory: 'repository', + repo: 'repository', + prefereGlobal: 'preferGlobal', + hompage: 'homepage', + hampage: 'homepage', + autohr: 'author', + autor: 'author', + contributers: 'contributors', + publicationConfig: 'publishConfig', + script: 'scripts', +} + +const isEmail = str => str.includes('@') && (str.indexOf('@') < str.lastIndexOf('.')) + +// Extracts description from contents of a readme file in markdown format +function extractDescription (description) { + // the first block of text before the first heading that isn't the first line heading + const lines = description.trim().split('\n') + let start = 0 + // skip initial empty lines and lines that start with # + while (lines[start]?.trim().match(/^(#|$)/)) { + start++ + } + let end = start + 1 + // keep going till we get to the end or an empty line + while (end < lines.length && lines[end].trim()) { + end++ + } + return lines.slice(start, end).join(' ').trim() +} + +function stringifyPerson (person) { + if (typeof person !== 'string') { + const name = person.name || '' + const u = person.url || person.web + const wrappedUrl = u ? (' (' + u + ')') : '' + const e = person.email || person.mail + const wrappedEmail = e ? (' <' + e + '>') : '' + person = name + wrappedEmail + wrappedUrl + } + const matchedName = person.match(/^([^(<]+)/) + const matchedUrl = person.match(/\(([^()]+)\)/) + const matchedEmail = person.match(/<([^<>]+)>/) + const parsed = {} + if (matchedName?.[0].trim()) { + parsed.name = matchedName[0].trim() + } + if (matchedEmail) { + parsed.email = matchedEmail[1] + } + if (matchedUrl) { + parsed.url = matchedUrl[1] + } + return parsed +} + +function normalizeData (data, changes) { + // fixDescriptionField + if (data.description && typeof data.description !== 'string') { + changes?.push(`'description' field should be a string`) + delete data.description + } + if (data.readme && !data.description && data.readme !== 'ERROR: No README data found!') { + data.description = extractDescription(data.readme) + } + if (data.description === undefined) { + delete data.description + } + if (!data.description) { + changes?.push('No description') + } + + // fixModulesField + if (data.modules) { + changes?.push(`modules field is deprecated`) + delete data.modules + } + + // fixFilesField + const files = data.files + if (files && !Array.isArray(files)) { + changes?.push(`Invalid 'files' member`) + delete data.files + } else if (data.files) { + data.files = data.files.filter(function (file) { + if (!file || typeof file !== 'string') { + changes?.push(`Invalid filename in 'files' list: ${file}`) + return false + } else { + return true + } + }) + } + + // fixManField + if (data.man && typeof data.man === 'string') { + data.man = [data.man] + } + + // fixBugsField + if (!data.bugs && data.repository?.url) { + const hosted = hostedGitInfo.fromUrl(data.repository.url) + if (hosted && hosted.bugs()) { + data.bugs = { url: hosted.bugs() } + } + } else if (data.bugs) { + if (typeof data.bugs === 'string') { + if (isEmail(data.bugs)) { + data.bugs = { email: data.bugs } + /* eslint-disable-next-line node/no-deprecated-api */ + } else if (url.parse(data.bugs).protocol) { + data.bugs = { url: data.bugs } + } else { + changes?.push(`Bug string field must be url, email, or {email,url}`) + } + } else { + for (const k in data.bugs) { + if (['web', 'name'].includes(k)) { + changes?.push(`bugs['${k}'] should probably be bugs['url'].`) + data.bugs.url = data.bugs[k] + delete data.bugs[k] + } + } + const oldBugs = data.bugs + data.bugs = {} + if (oldBugs.url) { + /* eslint-disable-next-line node/no-deprecated-api */ + if (typeof (oldBugs.url) === 'string' && url.parse(oldBugs.url).protocol) { + data.bugs.url = oldBugs.url + } else { + changes?.push('bugs.url field must be a string url. Deleted.') + } + } + if (oldBugs.email) { + if (typeof (oldBugs.email) === 'string' && isEmail(oldBugs.email)) { + data.bugs.email = oldBugs.email + } else { + changes?.push('bugs.email field must be a string email. Deleted.') + } + } + } + if (!data.bugs.email && !data.bugs.url) { + delete data.bugs + changes?.push('Normalized value of bugs field is an empty object. Deleted.') + } + } + // fixKeywordsField + if (typeof data.keywords === 'string') { + data.keywords = data.keywords.split(/,\s+/) + } + if (data.keywords && !Array.isArray(data.keywords)) { + delete data.keywords + changes?.push(`keywords should be an array of strings`) + } else if (data.keywords) { + data.keywords = data.keywords.filter(function (kw) { + if (typeof kw !== 'string' || !kw) { + changes?.push(`keywords should be an array of strings`) + return false + } else { + return true + } + }) + } + // fixBundleDependenciesField + const bdd = 'bundledDependencies' + const bd = 'bundleDependencies' + if (data[bdd] && !data[bd]) { + data[bd] = data[bdd] + delete data[bdd] + } + if (data[bd] && !Array.isArray(data[bd])) { + changes?.push(`Invalid 'bundleDependencies' list. Must be array of package names`) + delete data[bd] + } else if (data[bd]) { + data[bd] = data[bd].filter(function (filtered) { + if (!filtered || typeof filtered !== 'string') { + changes?.push(`Invalid bundleDependencies member: ${filtered}`) + return false + } else { + if (!data.dependencies) { + data.dependencies = {} + } + if (!Object.prototype.hasOwnProperty.call(data.dependencies, filtered)) { + changes?.push(`Non-dependency in bundleDependencies: ${filtered}`) + data.dependencies[filtered] = '*' + } + return true + } + }) + } + // fixHomepageField + if (!data.homepage && data.repository && data.repository.url) { + const hosted = hostedGitInfo.fromUrl(data.repository.url) + if (hosted) { + data.homepage = hosted.docs() + } + } + if (data.homepage) { + if (typeof data.homepage !== 'string') { + changes?.push('homepage field must be a string url. Deleted.') + delete data.homepage + } else { + /* eslint-disable-next-line node/no-deprecated-api */ + if (!url.parse(data.homepage).protocol) { + data.homepage = 'http://' + data.homepage + } + } + } + // fixReadmeField + if (!data.readme) { + changes?.push('No README data') + data.readme = 'ERROR: No README data found!' + } + // fixLicenseField + const license = data.license || data.licence + if (!license) { + changes?.push('No license field.') + } else if (typeof (license) !== 'string' || license.length < 1 || license.trim() === '') { + changes?.push('license should be a valid SPDX license expression') + } else if (!validateLicense(license).validForNewPackages) { + changes?.push('license should be a valid SPDX license expression') + } + // fixPeople + if (data.author) { + data.author = stringifyPerson(data.author) + } + ['maintainers', 'contributors'].forEach(function (set) { + if (!Array.isArray(data[set])) { + return + } + data[set] = data[set].map(stringifyPerson) + }) + // fixTypos + for (const d in typos) { + if (Object.prototype.hasOwnProperty.call(data, d)) { + changes?.push(`${d} should probably be ${typos[d]}.`) + } + } +} + +module.exports = { normalizeData } diff --git a/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/normalize.js b/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/normalize.js new file mode 100644 index 0000000000000..845f6753a9a00 --- /dev/null +++ b/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/normalize.js @@ -0,0 +1,601 @@ +const valid = require('semver/functions/valid') +const clean = require('semver/functions/clean') +const fs = require('node:fs/promises') +const path = require('node:path') +const { log } = require('proc-log') +const moduleBuiltin = require('node:module') + +/** + * @type {import('hosted-git-info')} + */ +let _hostedGitInfo +function lazyHostedGitInfo () { + if (!_hostedGitInfo) { + _hostedGitInfo = require('hosted-git-info') + } + return _hostedGitInfo +} + +/** + * @type {import('glob').glob} + */ +let _glob +function lazyLoadGlob () { + if (!_glob) { + _glob = require('glob').glob + } + return _glob +} + +// used to be npm-normalize-package-bin +function normalizePackageBin (pkg, changes) { + if (pkg.bin) { + if (typeof pkg.bin === 'string' && pkg.name) { + changes?.push('"bin" was converted to an object') + pkg.bin = { [pkg.name]: pkg.bin } + } else if (Array.isArray(pkg.bin)) { + changes?.push('"bin" was converted to an object') + pkg.bin = pkg.bin.reduce((acc, k) => { + acc[path.basename(k)] = k + return acc + }, {}) + } + if (typeof pkg.bin === 'object') { + for (const binKey in pkg.bin) { + if (typeof pkg.bin[binKey] !== 'string') { + delete pkg.bin[binKey] + changes?.push(`removed invalid "bin[${binKey}]"`) + continue + } + const base = path.basename(secureAndUnixifyPath(binKey)) + if (!base) { + delete pkg.bin[binKey] + changes?.push(`removed invalid "bin[${binKey}]"`) + continue + } + + const binTarget = secureAndUnixifyPath(pkg.bin[binKey]) + + if (!binTarget) { + delete pkg.bin[binKey] + changes?.push(`removed invalid "bin[${binKey}]"`) + continue + } + + if (base !== binKey) { + delete pkg.bin[binKey] + changes?.push(`"bin[${binKey}]" was renamed to "bin[${base}]"`) + } + if (binTarget !== pkg.bin[binKey]) { + changes?.push(`"bin[${base}]" script name was cleaned`) + } + pkg.bin[base] = binTarget + } + + if (Object.keys(pkg.bin).length === 0) { + changes?.push('empty "bin" was removed') + delete pkg.bin + } + + return pkg + } + } + delete pkg.bin +} + +function normalizePackageMan (pkg, changes) { + if (pkg.man) { + const mans = [] + for (const man of (Array.isArray(pkg.man) ? pkg.man : [pkg.man])) { + if (typeof man !== 'string') { + changes?.push(`removed invalid "man [${man}]"`) + } else { + mans.push(secureAndUnixifyPath(man)) + } + } + + if (!mans.length) { + changes?.push('empty "man" was removed') + } else { + pkg.man = mans + return pkg + } + } + delete pkg.man +} + +function isCorrectlyEncodedName (spec) { + return !spec.match(/[/@\s+%:]/) && + spec === encodeURIComponent(spec) +} + +function isValidScopedPackageName (spec) { + if (spec.charAt(0) !== '@') { + return false + } + + const rest = spec.slice(1).split('/') + if (rest.length !== 2) { + return false + } + + return rest[0] && rest[1] && + rest[0] === encodeURIComponent(rest[0]) && + rest[1] === encodeURIComponent(rest[1]) +} + +function unixifyPath (ref) { + return ref.replace(/\\|:/g, '/') +} + +function secureAndUnixifyPath (ref) { + const secured = unixifyPath(path.join('.', path.join('/', unixifyPath(ref)))) + return secured.startsWith('./') ? '' : secured +} + +// We don't want the `changes` array in here by default because this is a hot +// path for parsing packuments during install. So the calling method passes it +// in if it wants to track changes. +const normalize = async (pkg, { strict, steps, root, changes, allowLegacyCase }) => { + if (!pkg.content) { + throw new Error('Can not normalize without content') + } + const data = pkg.content + const scripts = data.scripts || {} + const pkgId = `${data.name ?? ''}@${data.version ?? ''}` + + // name and version are load bearing so we have to clean them up first + if (steps.includes('fixName') || steps.includes('fixNameField') || steps.includes('normalizeData')) { + if (!data.name && !strict) { + changes?.push('Missing "name" field was set to an empty string') + data.name = '' + } else { + if (typeof data.name !== 'string') { + throw new Error('name field must be a string.') + } + if (!strict) { + const name = data.name.trim() + if (data.name !== name) { + changes?.push(`Whitespace was trimmed from "name"`) + data.name = name + } + } + + if (data.name.startsWith('.') || + !(isValidScopedPackageName(data.name) || isCorrectlyEncodedName(data.name)) || + (strict && (!allowLegacyCase) && data.name !== data.name.toLowerCase()) || + data.name.toLowerCase() === 'node_modules' || + data.name.toLowerCase() === 'favicon.ico') { + throw new Error('Invalid name: ' + JSON.stringify(data.name)) + } + } + } + + if (steps.includes('fixName')) { + // Check for conflicts with builtin modules + if (moduleBuiltin.builtinModules.includes(data.name)) { + log.warn('package-json', pkgId, `Package name "${data.name}" conflicts with a Node.js built-in module name`) + } + } + + if (steps.includes('fixVersionField') || steps.includes('normalizeData')) { + // allow "loose" semver 1.0 versions in non-strict mode + // enforce strict semver 2.0 compliance in strict mode + const loose = !strict + if (!data.version) { + data.version = '' + } else { + if (!valid(data.version, loose)) { + throw new Error(`Invalid version: "${data.version}"`) + } + const version = clean(data.version, loose) + if (version !== data.version) { + changes?.push(`"version" was cleaned and set to "${version}"`) + data.version = version + } + } + } + // remove attributes that start with "_" + if (steps.includes('_attributes')) { + for (const key in data) { + if (key.startsWith('_')) { + changes?.push(`"${key}" was removed`) + delete pkg.content[key] + } + } + } + + // build the "_id" attribute + if (steps.includes('_id')) { + if (data.name && data.version) { + changes?.push(`"_id" was set to ${pkgId}`) + data._id = pkgId + } + } + + // fix bundledDependencies typo + // normalize bundleDependencies + if (steps.includes('bundledDependencies')) { + if (data.bundleDependencies === undefined && data.bundledDependencies !== undefined) { + data.bundleDependencies = data.bundledDependencies + } + changes?.push(`Deleted incorrect "bundledDependencies"`) + delete data.bundledDependencies + } + // expand "bundleDependencies: true or translate from object" + if (steps.includes('bundleDependencies')) { + const bd = data.bundleDependencies + if (bd === false && !steps.includes('bundleDependenciesDeleteFalse')) { + changes?.push(`"bundleDependencies" was changed from "false" to "[]"`) + data.bundleDependencies = [] + } else if (bd === true) { + changes?.push(`"bundleDependencies" was auto-populated from "dependencies"`) + data.bundleDependencies = Object.keys(data.dependencies || {}) + } else if (bd && typeof bd === 'object') { + if (!Array.isArray(bd)) { + changes?.push(`"bundleDependencies" was changed from an object to an array`) + data.bundleDependencies = Object.keys(bd) + } + } else if ('bundleDependencies' in data) { + changes?.push(`"bundleDependencies" was removed`) + delete data.bundleDependencies + } + } + + // it was once common practice to list deps both in optionalDependencies and + // in dependencies, to support npm versions that did not know about + // optionalDependencies. This is no longer a relevant need, so duplicating + // the deps in two places is unnecessary and excessive. + if (steps.includes('optionalDedupe')) { + if (data.dependencies && + data.optionalDependencies && typeof data.optionalDependencies === 'object') { + for (const name in data.optionalDependencies) { + changes?.push(`optionalDependencies."${name}" was removed`) + delete data.dependencies[name] + } + if (!Object.keys(data.dependencies).length) { + changes?.push(`Empty "optionalDependencies" was removed`) + delete data.dependencies + } + } + } + + // add "install" attribute if any "*.gyp" files exist + if (steps.includes('gypfile')) { + if (!scripts.install && !scripts.preinstall && data.gypfile !== false) { + const files = await lazyLoadGlob()('*.gyp', { cwd: pkg.path }) + if (files.length) { + scripts.install = 'node-gyp rebuild' + data.scripts = scripts + data.gypfile = true + changes?.push(`"scripts.install" was set to "node-gyp rebuild"`) + changes?.push(`"gypfile" was set to "true"`) + } + } + } + + // add "start" attribute if "server.js" exists + if (steps.includes('serverjs') && !scripts.start) { + try { + await fs.access(path.join(pkg.path, 'server.js')) + scripts.start = 'node server.js' + data.scripts = scripts + changes?.push('"scripts.start" was set to "node server.js"') + } catch { + // do nothing + } + } + + // strip "node_modules/.bin" from scripts entries + // remove invalid scripts entries (non-strings) + if ((steps.includes('scripts') || steps.includes('scriptpath')) && data.scripts !== undefined) { + const spre = /^(\.[/\\])?node_modules[/\\].bin[\\/]/ + if (typeof data.scripts === 'object') { + for (const name in data.scripts) { + if (typeof data.scripts[name] !== 'string') { + delete data.scripts[name] + changes?.push(`Invalid scripts."${name}" was removed`) + } else if (steps.includes('scriptpath') && spre.test(data.scripts[name])) { + data.scripts[name] = data.scripts[name].replace(spre, '') + changes?.push(`scripts entry "${name}" was fixed to remove node_modules/.bin reference`) + } + } + } else { + changes?.push(`Removed invalid "scripts"`) + delete data.scripts + } + } + + if (steps.includes('funding')) { + if (data.funding && typeof data.funding === 'string') { + data.funding = { url: data.funding } + changes?.push(`"funding" was changed to an object with a url attribute`) + } + } + + // populate "authors" attribute + if (steps.includes('authors') && !data.contributors) { + try { + const authorData = await fs.readFile(path.join(pkg.path, 'AUTHORS'), 'utf8') + const authors = authorData.split(/\r?\n/g) + .map(line => line.replace(/^\s*#.*$/, '').trim()) + .filter(line => line) + data.contributors = authors + changes?.push('"contributors" was auto-populated with the contents of the "AUTHORS" file') + } catch { + // do nothing + } + } + + // populate "readme" attribute + if (steps.includes('readme') && !data.readme) { + const mdre = /\.m?a?r?k?d?o?w?n?$/i + const files = await lazyLoadGlob()('{README,README.*}', { + cwd: pkg.path, + nocase: true, + mark: true, + }) + let readmeFile + for (const file of files) { + // don't accept directories. + if (!file.endsWith(path.sep)) { + if (file.match(mdre)) { + readmeFile = file + break + } + if (file.endsWith('README')) { + readmeFile = file + } + } + } + if (readmeFile) { + const readmeData = await fs.readFile(path.join(pkg.path, readmeFile), 'utf8') + data.readme = readmeData + data.readmeFilename = readmeFile + changes?.push(`"readme" was set to the contents of ${readmeFile}`) + changes?.push(`"readmeFilename" was set to ${readmeFile}`) + } + if (!data.readme) { + data.readme = 'ERROR: No README data found!' + } + } + + // expand directories.man + if (steps.includes('mans')) { + if (data.directories?.man && !data.man) { + const manDir = secureAndUnixifyPath(data.directories.man) + const cwd = path.resolve(pkg.path, manDir) + const files = await lazyLoadGlob()('**/*.[0-9]', { cwd }) + data.man = files.map(man => + path.relative(pkg.path, path.join(cwd, man)).split(path.sep).join('/') + ) + } + normalizePackageMan(data, changes) + } + + if (steps.includes('bin') || steps.includes('binDir') || steps.includes('binRefs')) { + normalizePackageBin(data, changes) + } + + // expand "directories.bin" + if (steps.includes('binDir') && data.directories?.bin && !data.bin) { + const binsDir = path.resolve(pkg.path, secureAndUnixifyPath(data.directories.bin)) + const bins = await lazyLoadGlob()('**', { cwd: binsDir }) + data.bin = bins.reduce((acc, binFile) => { + if (binFile && !binFile.startsWith('.')) { + const binName = path.basename(binFile) + acc[binName] = path.join(data.directories.bin, binFile) + } + return acc + }, {}) + // *sigh* + normalizePackageBin(data, changes) + } + + // populate "gitHead" attribute + if (steps.includes('gitHead') && !data.gitHead) { + const git = require('@npmcli/git') + const gitRoot = await git.find({ cwd: pkg.path, root }) + let head + if (gitRoot) { + try { + head = await fs.readFile(path.resolve(gitRoot, '.git/HEAD'), 'utf8') + } catch (err) { + // do nothing + } + } + let headData + if (head) { + if (head.startsWith('ref: ')) { + const headRef = head.replace(/^ref: /, '').trim() + const headFile = path.resolve(gitRoot, '.git', headRef) + try { + headData = await fs.readFile(headFile, 'utf8') + headData = headData.replace(/^ref: /, '').trim() + } catch (err) { + // do nothing + } + if (!headData) { + const packFile = path.resolve(gitRoot, '.git/packed-refs') + try { + let refs = await fs.readFile(packFile, 'utf8') + if (refs) { + refs = refs.split('\n') + for (let i = 0; i < refs.length; i++) { + const match = refs[i].match(/^([0-9a-f]{40}) (.+)$/) + if (match && match[2].trim() === headRef) { + headData = match[1] + break + } + } + } + } catch { + // do nothing + } + } + } else { + headData = head.trim() + } + } + if (headData) { + data.gitHead = headData + } + } + + // populate "types" attribute + if (steps.includes('fillTypes')) { + const index = data.main || 'index.js' + + if (typeof index !== 'string') { + throw new TypeError('The "main" attribute must be of type string.') + } + + // TODO exports is much more complicated than this in verbose format + // We need to support for instance + + // "exports": { + // ".": [ + // { + // "default": "./lib/npm.js" + // }, + // "./lib/npm.js" + // ], + // "./package.json": "./package.json" + // }, + // as well as conditional exports + + // if (data.exports && typeof data.exports === 'string') { + // index = data.exports + // } + + // if (data.exports && data.exports['.']) { + // index = data.exports['.'] + // if (typeof index !== 'string') { + // } + // } + const extless = path.join(path.dirname(index), path.basename(index, path.extname(index))) + const dts = `./${extless}.d.ts` + const hasDTSFields = 'types' in data || 'typings' in data + if (!hasDTSFields) { + try { + await fs.access(path.join(pkg.path, dts)) + data.types = dts.split(path.sep).join('/') + } catch { + // do nothing + } + } + } + + // "normalizeData" from "read-package-json", which was just a call through to + // "normalize-package-data". We only call the "fixer" functions because + // outside of that it was also clobbering _id (which we already conditionally + // do) and also adding the gypfile script (which we also already + // conditionally do) + + // Some steps are isolated so we can do a limited subset of these in `fix` + if (steps.includes('fixRepositoryField') || steps.includes('normalizeData')) { + if (data.repositories) { + changes?.push(`"repository" was set to the first entry in "repositories" (${data.repository})`) + data.repository = data.repositories[0] + } + if (data.repository) { + if (typeof data.repository === 'string') { + changes?.push('"repository" was changed from a string to an object') + data.repository = { + type: 'git', + url: data.repository, + } + } + if (data.repository.url) { + const hosted = lazyHostedGitInfo().fromUrl(data.repository.url) + let r + if (hosted) { + if (hosted.getDefaultRepresentation() === 'shortcut') { + r = hosted.https() + } else { + r = hosted.toString() + } + if (r !== data.repository.url) { + changes?.push(`"repository.url" was normalized to "${r}"`) + data.repository.url = r + } + } + } + } + } + + if (steps.includes('fixDependencies') || steps.includes('normalizeData')) { + // peerDependencies? + // devDependencies is meaningless here, it's ignored on an installed package + for (const type of ['dependencies', 'devDependencies', 'optionalDependencies']) { + if (data[type]) { + let secondWarning = true + if (typeof data[type] === 'string') { + changes?.push(`"${type}" was converted from a string into an object`) + data[type] = data[type].trim().split(/[\n\r\s\t ,]+/) + secondWarning = false + } + if (Array.isArray(data[type])) { + if (secondWarning) { + changes?.push(`"${type}" was converted from an array into an object`) + } + const o = {} + for (const d of data[type]) { + if (typeof d === 'string') { + const dep = d.trim().split(/(:?[@\s><=])/) + const dn = dep.shift() + const dv = dep.join('').replace(/^@/, '').trim() + o[dn] = dv + } + } + data[type] = o + } + } + } + // normalize-package-data used to put optional dependencies BACK into + // dependencies here, we no longer do this + + for (const deps of ['dependencies', 'devDependencies']) { + if (deps in data) { + if (!data[deps] || typeof data[deps] !== 'object') { + changes?.push(`Removed invalid "${deps}"`) + delete data[deps] + } else { + for (const d in data[deps]) { + const r = data[deps][d] + if (typeof r !== 'string') { + changes?.push(`Removed invalid "${deps}.${d}"`) + delete data[deps][d] + } + const hosted = lazyHostedGitInfo().fromUrl(data[deps][d])?.toString() + if (hosted && hosted !== data[deps][d]) { + changes?.push(`Normalized git reference to "${deps}.${d}"`) + data[deps][d] = hosted.toString() + } + } + } + } + } + } + + // TODO some of this is duplicated in other steps here, a future breaking change may be able to remove the duplicates involved in this step + if (steps.includes('normalizeData')) { + const { normalizeData } = require('./normalize-data.js') + normalizeData(data, changes) + } + + // Warn if the bin references don't point to anything. This might be better + // in normalize-package-data if it had access to the file path. + if (steps.includes('binRefs') && data.bin instanceof Object) { + for (const key in data.bin) { + try { + await fs.access(path.resolve(pkg.path, data.bin[key])) + } catch { + log.warn('package-json', pkgId, `No bin file found at ${data.bin[key]}`) + // XXX: should a future breaking change delete bin entries that cannot be accessed? + } + } + } +} + +module.exports = normalize diff --git a/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/read-package.js b/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/read-package.js new file mode 100644 index 0000000000000..d6c86ce388e6c --- /dev/null +++ b/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/read-package.js @@ -0,0 +1,39 @@ +// This is JUST the code needed to open a package.json file and parse it. +// It's isolated out so that code needing to parse a package.json file can do so in the same way as this module does, without needing to require the whole module, or needing to require the underlying parsing library. + +const { readFile } = require('fs/promises') +const parseJSON = require('json-parse-even-better-errors') + +async function read (filename) { + try { + const data = await readFile(filename, 'utf8') + return data + } catch (err) { + err.message = `Could not read package.json: ${err}` + throw err + } +} + +function parse (data) { + try { + const content = parseJSON(data) + return content + } catch (err) { + err.message = `Invalid package.json: ${err}` + throw err + } +} + +// This is what most external libs will use. +// PackageJson will call read and parse separately +async function readPackage (filename) { + const data = await read(filename) + const content = parse(data) + return content +} + +module.exports = { + read, + parse, + readPackage, +} diff --git a/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/sort.js b/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/sort.js new file mode 100644 index 0000000000000..0bd0d5199da58 --- /dev/null +++ b/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/sort.js @@ -0,0 +1,101 @@ +/** + * arbitrary sort order for package.json largely pulled from: + * https://github.com/keithamus/sort-package-json/blob/main/defaultRules.md + * + * cross checked with: + * https://github.com/npm/types/blob/main/types/index.d.ts#L104 + * https://docs.npmjs.com/cli/configuring-npm/package-json + */ +function packageSort (json) { + const { + name, + version, + private: isPrivate, + description, + keywords, + homepage, + bugs, + repository, + funding, + license, + author, + maintainers, + contributors, + type, + imports, + exports, + main, + browser, + types, + bin, + man, + directories, + files, + workspaces, + scripts, + config, + dependencies, + devDependencies, + peerDependencies, + peerDependenciesMeta, + optionalDependencies, + bundledDependencies, + bundleDependencies, + engines, + os, + cpu, + publishConfig, + devEngines, + licenses, + overrides, + ...rest + } = json + + return { + ...(typeof name !== 'undefined' ? { name } : {}), + ...(typeof version !== 'undefined' ? { version } : {}), + ...(typeof isPrivate !== 'undefined' ? { private: isPrivate } : {}), + ...(typeof description !== 'undefined' ? { description } : {}), + ...(typeof keywords !== 'undefined' ? { keywords } : {}), + ...(typeof homepage !== 'undefined' ? { homepage } : {}), + ...(typeof bugs !== 'undefined' ? { bugs } : {}), + ...(typeof repository !== 'undefined' ? { repository } : {}), + ...(typeof funding !== 'undefined' ? { funding } : {}), + ...(typeof license !== 'undefined' ? { license } : {}), + ...(typeof author !== 'undefined' ? { author } : {}), + ...(typeof maintainers !== 'undefined' ? { maintainers } : {}), + ...(typeof contributors !== 'undefined' ? { contributors } : {}), + ...(typeof type !== 'undefined' ? { type } : {}), + ...(typeof imports !== 'undefined' ? { imports } : {}), + ...(typeof exports !== 'undefined' ? { exports } : {}), + ...(typeof main !== 'undefined' ? { main } : {}), + ...(typeof browser !== 'undefined' ? { browser } : {}), + ...(typeof types !== 'undefined' ? { types } : {}), + ...(typeof bin !== 'undefined' ? { bin } : {}), + ...(typeof man !== 'undefined' ? { man } : {}), + ...(typeof directories !== 'undefined' ? { directories } : {}), + ...(typeof files !== 'undefined' ? { files } : {}), + ...(typeof workspaces !== 'undefined' ? { workspaces } : {}), + ...(typeof scripts !== 'undefined' ? { scripts } : {}), + ...(typeof config !== 'undefined' ? { config } : {}), + ...(typeof dependencies !== 'undefined' ? { dependencies } : {}), + ...(typeof devDependencies !== 'undefined' ? { devDependencies } : {}), + ...(typeof peerDependencies !== 'undefined' ? { peerDependencies } : {}), + ...(typeof peerDependenciesMeta !== 'undefined' ? { peerDependenciesMeta } : {}), + ...(typeof optionalDependencies !== 'undefined' ? { optionalDependencies } : {}), + ...(typeof bundledDependencies !== 'undefined' ? { bundledDependencies } : {}), + ...(typeof bundleDependencies !== 'undefined' ? { bundleDependencies } : {}), + ...(typeof engines !== 'undefined' ? { engines } : {}), + ...(typeof os !== 'undefined' ? { os } : {}), + ...(typeof cpu !== 'undefined' ? { cpu } : {}), + ...(typeof publishConfig !== 'undefined' ? { publishConfig } : {}), + ...(typeof devEngines !== 'undefined' ? { devEngines } : {}), + ...(typeof licenses !== 'undefined' ? { licenses } : {}), + ...(typeof overrides !== 'undefined' ? { overrides } : {}), + ...rest, + } +} + +module.exports = { + packageSort, +} diff --git a/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/update-dependencies.js b/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/update-dependencies.js new file mode 100644 index 0000000000000..7259949ab661d --- /dev/null +++ b/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/update-dependencies.js @@ -0,0 +1,75 @@ +const depTypes = new Set([ + 'dependencies', + 'optionalDependencies', + 'devDependencies', + 'peerDependencies', +]) + +// sort alphabetically all types of deps for a given package +const orderDeps = (content) => { + for (const type of depTypes) { + if (content && content[type]) { + content[type] = Object.keys(content[type]) + .sort((a, b) => a.localeCompare(b, 'en')) + .reduce((res, key) => { + res[key] = content[type][key] + return res + }, {}) + } + } + return content +} + +const updateDependencies = ({ content, originalContent }) => { + const pkg = orderDeps({ + ...content, + }) + + // optionalDependencies don't need to be repeated in two places + if (pkg.dependencies) { + if (pkg.optionalDependencies) { + for (const name of Object.keys(pkg.optionalDependencies)) { + delete pkg.dependencies[name] + } + } + } + + const result = { ...originalContent } + + // loop through all types of dependencies and update package json pkg + for (const type of depTypes) { + if (pkg[type]) { + result[type] = pkg[type] + } + + // prune empty type props from resulting object + const emptyDepType = + pkg[type] + && typeof pkg === 'object' + && Object.keys(pkg[type]).length === 0 + if (emptyDepType) { + delete result[type] + } + } + + // if original package.json had dep in peerDeps AND deps, preserve that. + const { dependencies: origProd, peerDependencies: origPeer } = + originalContent || {} + const { peerDependencies: newPeer } = result + if (origProd && origPeer && newPeer) { + // we have original prod/peer deps, and new peer deps + // copy over any that were in both in the original + for (const name of Object.keys(origPeer)) { + if (origProd[name] !== undefined && newPeer[name] !== undefined) { + result.dependencies = result.dependencies || {} + result.dependencies[name] = newPeer[name] + } + } + } + + return result +} + +updateDependencies.knownKeys = depTypes + +module.exports = updateDependencies diff --git a/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/update-scripts.js b/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/update-scripts.js new file mode 100644 index 0000000000000..30495e54cc3c7 --- /dev/null +++ b/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/update-scripts.js @@ -0,0 +1,29 @@ +const updateScripts = ({ content, originalContent = {} }) => { + const newScripts = content.scripts + + if (!newScripts) { + return originalContent + } + + // validate scripts content being appended + const hasInvalidScripts = () => + Object.entries(newScripts) + .some(([key, value]) => + typeof key !== 'string' || typeof value !== 'string') + if (hasInvalidScripts()) { + throw Object.assign( + new TypeError( + 'package.json scripts should be a key-value pair of strings.'), + { code: 'ESCRIPTSINVALID' } + ) + } + + return { + ...originalContent, + scripts: { + ...newScripts, + }, + } +} + +module.exports = updateScripts diff --git a/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/update-workspaces.js b/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/update-workspaces.js new file mode 100644 index 0000000000000..04bf63230636f --- /dev/null +++ b/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/update-workspaces.js @@ -0,0 +1,26 @@ +const updateWorkspaces = ({ content, originalContent = {} }) => { + const newWorkspaces = content.workspaces + + if (!newWorkspaces) { + return originalContent + } + + // validate workspaces content being appended + const hasInvalidWorkspaces = () => + newWorkspaces.some(w => !(typeof w === 'string')) + if (!newWorkspaces.length || hasInvalidWorkspaces()) { + throw Object.assign( + new TypeError('workspaces should be an array of strings.'), + { code: 'EWORKSPACESINVALID' } + ) + } + + return { + ...originalContent, + workspaces: [ + ...newWorkspaces, + ], + } +} + +module.exports = updateWorkspaces diff --git a/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/package.json b/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/package.json new file mode 100644 index 0000000000000..263d67ff3bc5b --- /dev/null +++ b/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/package.json @@ -0,0 +1,61 @@ +{ + "name": "@npmcli/package-json", + "version": "6.2.0", + "description": "Programmatic API to update package.json", + "keywords": [ + "npm", + "oss" + ], + "repository": { + "type": "git", + "url": "git+https://github.com/npm/package-json.git" + }, + "license": "ISC", + "author": "GitHub Inc.", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "scripts": { + "snap": "tap", + "test": "tap", + "lint": "npm run eslint", + "lintfix": "npm run eslint -- --fix", + "posttest": "npm run lint", + "postsnap": "npm run lintfix --", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" + }, + "dependencies": { + "@npmcli/git": "^6.0.0", + "glob": "^10.2.2", + "hosted-git-info": "^8.0.0", + "json-parse-even-better-errors": "^4.0.0", + "proc-log": "^5.0.0", + "semver": "^7.5.3", + "validate-npm-package-license": "^3.0.4" + }, + "devDependencies": { + "@npmcli/eslint-config": "^5.1.0", + "@npmcli/template-oss": "4.23.6", + "read-package-json": "^7.0.0", + "read-package-json-fast": "^4.0.0", + "tap": "^16.0.1" + }, + "engines": { + "node": "^18.17.0 || >=20.5.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.23.6", + "publish": "true" + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/node_modules/@npmcli/package-json/lib/index.js b/node_modules/@npmcli/package-json/lib/index.js index 7eff602d73a3f..fabe5fbcda7bc 100644 --- a/node_modules/@npmcli/package-json/lib/index.js +++ b/node_modules/@npmcli/package-json/lib/index.js @@ -5,7 +5,7 @@ const parseJSON = require('json-parse-even-better-errors') const updateDeps = require('./update-dependencies.js') const updateScripts = require('./update-scripts.js') const updateWorkspaces = require('./update-workspaces.js') -const normalize = require('./normalize.js') +const { normalize, syncNormalize } = require('./normalize.js') const { read, parse } = require('./read-package.js') const { packageSort } = require('./sort.js') @@ -25,24 +25,11 @@ const knownKeys = new Set([ ]) class PackageJson { - static normalizeSteps = Object.freeze([ - '_id', - '_attributes', - 'bundledDependencies', - 'bundleDependencies', - 'optionalDedupe', - 'scripts', - 'funding', - 'bin', - ]) - // npm pkg fix static fixSteps = Object.freeze([ 'binRefs', 'bundleDependencies', - 'bundleDependenciesFalse', 'fixName', - 'fixNameField', 'fixVersionField', 'fixRepositoryField', 'fixDependencies', @@ -50,6 +37,18 @@ class PackageJson { 'scriptpath', ]) + static normalizeSteps = Object.freeze([ + '_id', + '_attributes', + 'bundledDependencies', + 'bundleDependencies', + 'optionalDedupe', + 'scripts', + 'funding', + 'bin', + 'binDir', + ]) + static prepareSteps = Object.freeze([ '_id', '_attributes', @@ -164,7 +163,11 @@ class PackageJson { return this } + // Manually set data from an existing object fromContent (data) { + if (!data || typeof data !== 'object') { + throw new Error('Content data must be an object') + } this.#manifest = data this.#canSave = false return this @@ -259,6 +262,13 @@ class PackageJson { } } + // steps is NOT overrideable here because this is a legacy function that's not being used in new places + syncNormalize (opts = {}) { + opts.steps = this.constructor.normalizeSteps.filter(s => s !== '_attributes') + syncNormalize(this, opts) + return this + } + async normalize (opts = {}) { if (!opts.steps) { opts.steps = this.constructor.normalizeSteps diff --git a/node_modules/@npmcli/package-json/lib/normalize-data.js b/node_modules/@npmcli/package-json/lib/normalize-data.js index 79b0bafbcd3a4..1c1a36984c5e9 100644 --- a/node_modules/@npmcli/package-json/lib/normalize-data.js +++ b/node_modules/@npmcli/package-json/lib/normalize-data.js @@ -1,6 +1,6 @@ // Originally normalize-package-data -const url = require('node:url') +const { URL } = require('node:url') const hostedGitInfo = require('hosted-git-info') const validateLicense = require('validate-npm-package-license') @@ -123,8 +123,7 @@ function normalizeData (data, changes) { if (typeof data.bugs === 'string') { if (isEmail(data.bugs)) { data.bugs = { email: data.bugs } - /* eslint-disable-next-line node/no-deprecated-api */ - } else if (url.parse(data.bugs).protocol) { + } else if (URL.canParse(data.bugs)) { data.bugs = { url: data.bugs } } else { changes?.push(`Bug string field must be url, email, or {email,url}`) @@ -140,8 +139,7 @@ function normalizeData (data, changes) { const oldBugs = data.bugs data.bugs = {} if (oldBugs.url) { - /* eslint-disable-next-line node/no-deprecated-api */ - if (typeof (oldBugs.url) === 'string' && url.parse(oldBugs.url).protocol) { + if (URL.canParse(oldBugs.url)) { data.bugs.url = oldBugs.url } else { changes?.push('bugs.url field must be a string url. Deleted.') @@ -216,8 +214,7 @@ function normalizeData (data, changes) { changes?.push('homepage field must be a string url. Deleted.') delete data.homepage } else { - /* eslint-disable-next-line node/no-deprecated-api */ - if (!url.parse(data.homepage).protocol) { + if (!URL.canParse(data.homepage)) { data.homepage = 'http://' + data.homepage } } diff --git a/node_modules/@npmcli/package-json/lib/normalize.js b/node_modules/@npmcli/package-json/lib/normalize.js index 845f6753a9a00..f65e6ad7ba2c4 100644 --- a/node_modules/@npmcli/package-json/lib/normalize.js +++ b/node_modules/@npmcli/package-json/lib/normalize.js @@ -67,7 +67,7 @@ function normalizePackageBin (pkg, changes) { changes?.push(`"bin[${binKey}]" was renamed to "bin[${base}]"`) } if (binTarget !== pkg.bin[binKey]) { - changes?.push(`"bin[${base}]" script name was cleaned`) + changes?.push(`"bin[${base}]" script name ${binTarget} was invalid and removed`) } pkg.bin[base] = binTarget } @@ -133,15 +133,9 @@ function secureAndUnixifyPath (ref) { return secured.startsWith('./') ? '' : secured } -// We don't want the `changes` array in here by default because this is a hot -// path for parsing packuments during install. So the calling method passes it -// in if it wants to track changes. -const normalize = async (pkg, { strict, steps, root, changes, allowLegacyCase }) => { - if (!pkg.content) { - throw new Error('Can not normalize without content') - } +// Only steps that can be ran synchronously. There are some object constructors (i.e. Aborist Node) that need synchronous normalization so here we are. +function syncSteps (pkg, { strict, steps, changes, allowLegacyCase }) { const data = pkg.content - const scripts = data.scripts || {} const pkgId = `${data.name ?? ''}@${data.version ?? ''}` // name and version are load bearing so we have to clean them up first @@ -195,6 +189,7 @@ const normalize = async (pkg, { strict, steps, root, changes, allowLegacyCase }) } } } + // remove attributes that start with "_" if (steps.includes('_attributes')) { for (const key in data) { @@ -214,14 +209,14 @@ const normalize = async (pkg, { strict, steps, root, changes, allowLegacyCase }) } // fix bundledDependencies typo - // normalize bundleDependencies if (steps.includes('bundledDependencies')) { if (data.bundleDependencies === undefined && data.bundledDependencies !== undefined) { data.bundleDependencies = data.bundledDependencies + changes?.push(`Deleted incorrect "bundledDependencies"`) } - changes?.push(`Deleted incorrect "bundledDependencies"`) delete data.bundledDependencies } + // expand "bundleDependencies: true or translate from object" if (steps.includes('bundleDependencies')) { const bd = data.bundleDependencies @@ -260,32 +255,6 @@ const normalize = async (pkg, { strict, steps, root, changes, allowLegacyCase }) } } - // add "install" attribute if any "*.gyp" files exist - if (steps.includes('gypfile')) { - if (!scripts.install && !scripts.preinstall && data.gypfile !== false) { - const files = await lazyLoadGlob()('*.gyp', { cwd: pkg.path }) - if (files.length) { - scripts.install = 'node-gyp rebuild' - data.scripts = scripts - data.gypfile = true - changes?.push(`"scripts.install" was set to "node-gyp rebuild"`) - changes?.push(`"gypfile" was set to "true"`) - } - } - } - - // add "start" attribute if "server.js" exists - if (steps.includes('serverjs') && !scripts.start) { - try { - await fs.access(path.join(pkg.path, 'server.js')) - scripts.start = 'node server.js' - data.scripts = scripts - changes?.push('"scripts.start" was set to "node server.js"') - } catch { - // do nothing - } - } - // strip "node_modules/.bin" from scripts entries // remove invalid scripts entries (non-strings) if ((steps.includes('scripts') || steps.includes('scriptpath')) && data.scripts !== undefined) { @@ -313,6 +282,137 @@ const normalize = async (pkg, { strict, steps, root, changes, allowLegacyCase }) } } + // "normalizeData" from "read-package-json", which was just a call through to + // "normalize-package-data". We only call the "fixer" functions because + // outside of that it was also clobbering _id (which we already conditionally + // do) and also adding the gypfile script (which we also already + // conditionally do) + + // Some steps are isolated so we can do a limited subset of these in `fix` + if (steps.includes('fixRepositoryField') || steps.includes('normalizeData')) { + if (data.repositories) { + changes?.push(`"repository" was set to the first entry in "repositories" (${data.repository})`) + data.repository = data.repositories[0] + } + if (data.repository) { + if (typeof data.repository === 'string') { + changes?.push('"repository" was changed from a string to an object') + data.repository = { + type: 'git', + url: data.repository, + } + } + if (data.repository.url) { + const hosted = lazyHostedGitInfo().fromUrl(data.repository.url) + let r + if (hosted) { + if (hosted.getDefaultRepresentation() === 'shortcut') { + r = hosted.https() + } else { + r = hosted.toString() + } + if (r !== data.repository.url) { + changes?.push(`"repository.url" was normalized to "${r}"`) + data.repository.url = r + } + } + } + } + } + + if (steps.includes('fixDependencies') || steps.includes('normalizeData')) { + // peerDependencies? + // devDependencies is meaningless here, it's ignored on an installed package + for (const type of ['dependencies', 'devDependencies', 'optionalDependencies']) { + if (data[type]) { + let secondWarning = true + if (typeof data[type] === 'string') { + changes?.push(`"${type}" was converted from a string into an object`) + data[type] = data[type].trim().split(/[\n\r\s\t ,]+/) + secondWarning = false + } + if (Array.isArray(data[type])) { + if (secondWarning) { + changes?.push(`"${type}" was converted from an array into an object`) + } + const o = {} + for (const d of data[type]) { + if (typeof d === 'string') { + const dep = d.trim().split(/(:?[@\s><=])/) + const dn = dep.shift() + const dv = dep.join('').replace(/^@/, '').trim() + o[dn] = dv + } + } + data[type] = o + } + } + } + // normalize-package-data used to put optional dependencies BACK into + // dependencies here, we no longer do this + + for (const deps of ['dependencies', 'devDependencies']) { + if (deps in data) { + if (!data[deps] || typeof data[deps] !== 'object') { + changes?.push(`Removed invalid "${deps}"`) + delete data[deps] + } else { + for (const d in data[deps]) { + const r = data[deps][d] + if (typeof r !== 'string') { + changes?.push(`Removed invalid "${deps}.${d}"`) + delete data[deps][d] + } + const hosted = lazyHostedGitInfo().fromUrl(data[deps][d])?.toString() + if (hosted && hosted !== data[deps][d]) { + changes?.push(`Normalized git reference to "${deps}.${d}"`) + data[deps][d] = hosted.toString() + } + } + } + } + } + } + + // TODO some of this is duplicated in other steps here, a future breaking change may be able to remove the duplicates involved in this step + if (steps.includes('normalizeData')) { + const { normalizeData } = require('./normalize-data.js') + normalizeData(data, changes) + } +} + +// Steps that require await, distinct from sync-steps.js +async function asyncSteps (pkg, { steps, root, changes }) { + const data = pkg.content + const scripts = data.scripts || {} + const pkgId = `${data.name ?? ''}@${data.version ?? ''}` + + // add "install" attribute if any "*.gyp" files exist + if (steps.includes('gypfile')) { + if (!scripts.install && !scripts.preinstall && data.gypfile !== false) { + const files = await lazyLoadGlob()('*.gyp', { cwd: pkg.path }) + if (files.length) { + scripts.install = 'node-gyp rebuild' + data.scripts = scripts + data.gypfile = true + changes?.push(`"scripts.install" was set to "node-gyp rebuild"`) + changes?.push(`"gypfile" was set to "true"`) + } + } + } + + // add "start" attribute if "server.js" exists + if (steps.includes('serverjs') && !scripts.start) { + try { + await fs.access(path.join(pkg.path, 'server.js')) + scripts.start = 'node server.js' + data.scripts = scripts + changes?.push('"scripts.start" was set to "node server.js"') + } catch { + // do nothing + } + } + // populate "authors" attribute if (steps.includes('authors') && !data.contributors) { try { @@ -373,22 +473,19 @@ const normalize = async (pkg, { strict, steps, root, changes, allowLegacyCase }) normalizePackageMan(data, changes) } - if (steps.includes('bin') || steps.includes('binDir') || steps.includes('binRefs')) { - normalizePackageBin(data, changes) - } - // expand "directories.bin" if (steps.includes('binDir') && data.directories?.bin && !data.bin) { - const binsDir = path.resolve(pkg.path, secureAndUnixifyPath(data.directories.bin)) - const bins = await lazyLoadGlob()('**', { cwd: binsDir }) + const binPath = secureAndUnixifyPath(data.directories.bin) + const bins = await lazyLoadGlob()('**', { cwd: path.resolve(pkg.path, binPath) }) data.bin = bins.reduce((acc, binFile) => { if (binFile && !binFile.startsWith('.')) { const binName = path.basename(binFile) - acc[binName] = path.join(data.directories.bin, binFile) + // binPath is already cleaned and unixified, no need to path.join here. + acc[binName] = `${binPath}/${secureAndUnixifyPath(binFile)}` } return acc }, {}) - // *sigh* + } else if (steps.includes('bin') || steps.includes('binDir') || steps.includes('binRefs')) { normalizePackageBin(data, changes) } @@ -486,104 +583,6 @@ const normalize = async (pkg, { strict, steps, root, changes, allowLegacyCase }) } } - // "normalizeData" from "read-package-json", which was just a call through to - // "normalize-package-data". We only call the "fixer" functions because - // outside of that it was also clobbering _id (which we already conditionally - // do) and also adding the gypfile script (which we also already - // conditionally do) - - // Some steps are isolated so we can do a limited subset of these in `fix` - if (steps.includes('fixRepositoryField') || steps.includes('normalizeData')) { - if (data.repositories) { - changes?.push(`"repository" was set to the first entry in "repositories" (${data.repository})`) - data.repository = data.repositories[0] - } - if (data.repository) { - if (typeof data.repository === 'string') { - changes?.push('"repository" was changed from a string to an object') - data.repository = { - type: 'git', - url: data.repository, - } - } - if (data.repository.url) { - const hosted = lazyHostedGitInfo().fromUrl(data.repository.url) - let r - if (hosted) { - if (hosted.getDefaultRepresentation() === 'shortcut') { - r = hosted.https() - } else { - r = hosted.toString() - } - if (r !== data.repository.url) { - changes?.push(`"repository.url" was normalized to "${r}"`) - data.repository.url = r - } - } - } - } - } - - if (steps.includes('fixDependencies') || steps.includes('normalizeData')) { - // peerDependencies? - // devDependencies is meaningless here, it's ignored on an installed package - for (const type of ['dependencies', 'devDependencies', 'optionalDependencies']) { - if (data[type]) { - let secondWarning = true - if (typeof data[type] === 'string') { - changes?.push(`"${type}" was converted from a string into an object`) - data[type] = data[type].trim().split(/[\n\r\s\t ,]+/) - secondWarning = false - } - if (Array.isArray(data[type])) { - if (secondWarning) { - changes?.push(`"${type}" was converted from an array into an object`) - } - const o = {} - for (const d of data[type]) { - if (typeof d === 'string') { - const dep = d.trim().split(/(:?[@\s><=])/) - const dn = dep.shift() - const dv = dep.join('').replace(/^@/, '').trim() - o[dn] = dv - } - } - data[type] = o - } - } - } - // normalize-package-data used to put optional dependencies BACK into - // dependencies here, we no longer do this - - for (const deps of ['dependencies', 'devDependencies']) { - if (deps in data) { - if (!data[deps] || typeof data[deps] !== 'object') { - changes?.push(`Removed invalid "${deps}"`) - delete data[deps] - } else { - for (const d in data[deps]) { - const r = data[deps][d] - if (typeof r !== 'string') { - changes?.push(`Removed invalid "${deps}.${d}"`) - delete data[deps][d] - } - const hosted = lazyHostedGitInfo().fromUrl(data[deps][d])?.toString() - if (hosted && hosted !== data[deps][d]) { - changes?.push(`Normalized git reference to "${deps}.${d}"`) - data[deps][d] = hosted.toString() - } - } - } - } - } - } - - // TODO some of this is duplicated in other steps here, a future breaking change may be able to remove the duplicates involved in this step - if (steps.includes('normalizeData')) { - const { normalizeData } = require('./normalize-data.js') - normalizeData(data, changes) - } - // Warn if the bin references don't point to anything. This might be better // in normalize-package-data if it had access to the file path. if (steps.includes('binRefs') && data.bin instanceof Object) { @@ -598,4 +597,18 @@ const normalize = async (pkg, { strict, steps, root, changes, allowLegacyCase }) } } -module.exports = normalize +// We don't want the `changes` array in here by default because this is a hot path for parsing packuments during install. The calling method passes it in if it wants to track changes. +async function normalize (pkg, opts) { + if (!pkg.content) { + throw new Error('Can not normalize without content') + } + await asyncSteps(pkg, opts) + // the normalizeData part of this needs to be the last thing ran, so sync comes second + syncSteps(pkg, opts) +} + +function syncNormalize (pkg, opts) { + syncSteps(pkg, opts) +} + +module.exports = { normalize, syncNormalize } diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/LICENSE b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/LICENSE new file mode 100644 index 0000000000000..8f90f96f4c6c5 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE NPM DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE NPM BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, +OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, +DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS +ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS +SOFTWARE. diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/clone.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/clone.js new file mode 100644 index 0000000000000..e25a4d1426821 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/clone.js @@ -0,0 +1,172 @@ +// The goal here is to minimize both git workload and +// the number of refs we download over the network. +// +// Every method ends up with the checked out working dir +// at the specified ref, and resolves with the git sha. + +// Only certain whitelisted hosts get shallow cloning. +// Many hosts (including GHE) don't always support it. +// A failed shallow fetch takes a LOT longer than a full +// fetch in most cases, so we skip it entirely. +// Set opts.gitShallow = true/false to force this behavior +// one way or the other. +const shallowHosts = new Set([ + 'github.com', + 'gist.github.com', + 'gitlab.com', + 'bitbucket.com', + 'bitbucket.org', +]) +// we have to use url.parse until we add the same shim that hosted-git-info has +// to handle scp:// urls +const { parse } = require('url') // eslint-disable-line node/no-deprecated-api +const path = require('path') + +const getRevs = require('./revs.js') +const spawn = require('./spawn.js') +const { isWindows } = require('./utils.js') + +const pickManifest = require('npm-pick-manifest') +const fs = require('fs/promises') + +module.exports = (repo, ref = 'HEAD', target = null, opts = {}) => + getRevs(repo, opts).then(revs => clone( + repo, + revs, + ref, + resolveRef(revs, ref, opts), + target || defaultTarget(repo, opts.cwd), + opts + )) + +const maybeShallow = (repo, opts) => { + if (opts.gitShallow === false || opts.gitShallow) { + return opts.gitShallow + } + return shallowHosts.has(parse(repo).host) +} + +const defaultTarget = (repo, /* istanbul ignore next */ cwd = process.cwd()) => + path.resolve(cwd, path.basename(repo.replace(/[/\\]?\.git$/, ''))) + +const clone = (repo, revs, ref, revDoc, target, opts) => { + if (!revDoc) { + return unresolved(repo, ref, target, opts) + } + if (revDoc.sha === revs.refs.HEAD.sha) { + return plain(repo, revDoc, target, opts) + } + if (revDoc.type === 'tag' || revDoc.type === 'branch') { + return branch(repo, revDoc, target, opts) + } + return other(repo, revDoc, target, opts) +} + +const resolveRef = (revs, ref, opts) => { + const { spec = {} } = opts + ref = spec.gitCommittish || ref + /* istanbul ignore next - will fail anyway, can't pull */ + if (!revs) { + return null + } + if (spec.gitRange) { + return pickManifest(revs, spec.gitRange, opts) + } + if (!ref) { + return revs.refs.HEAD + } + if (revs.refs[ref]) { + return revs.refs[ref] + } + if (revs.shas[ref]) { + return revs.refs[revs.shas[ref][0]] + } + return null +} + +// pull request or some other kind of advertised ref +const other = (repo, revDoc, target, opts) => { + const shallow = maybeShallow(repo, opts) + + const fetchOrigin = ['fetch', 'origin', revDoc.rawRef] + .concat(shallow ? ['--depth=1'] : []) + + const git = (args) => spawn(args, { ...opts, cwd: target }) + return fs.mkdir(target, { recursive: true }) + .then(() => git(['init'])) + .then(() => isWindows(opts) + ? git(['config', '--local', '--add', 'core.longpaths', 'true']) + : null) + .then(() => git(['remote', 'add', 'origin', repo])) + .then(() => git(fetchOrigin)) + .then(() => git(['checkout', revDoc.sha])) + .then(() => updateSubmodules(target, opts)) + .then(() => revDoc.sha) +} + +// tag or branches. use -b +const branch = (repo, revDoc, target, opts) => { + const args = [ + 'clone', + '-b', + revDoc.ref, + repo, + target, + '--recurse-submodules', + ] + if (maybeShallow(repo, opts)) { + args.push('--depth=1') + } + if (isWindows(opts)) { + args.push('--config', 'core.longpaths=true') + } + return spawn(args, opts).then(() => revDoc.sha) +} + +// just the head. clone it +const plain = (repo, revDoc, target, opts) => { + const args = [ + 'clone', + repo, + target, + '--recurse-submodules', + ] + if (maybeShallow(repo, opts)) { + args.push('--depth=1') + } + if (isWindows(opts)) { + args.push('--config', 'core.longpaths=true') + } + return spawn(args, opts).then(() => revDoc.sha) +} + +const updateSubmodules = async (target, opts) => { + const hasSubmodules = await fs.stat(`${target}/.gitmodules`) + .then(() => true) + .catch(() => false) + if (!hasSubmodules) { + return null + } + return spawn([ + 'submodule', + 'update', + '-q', + '--init', + '--recursive', + ], { ...opts, cwd: target }) +} + +const unresolved = (repo, ref, target, opts) => { + // can't do this one shallowly, because the ref isn't advertised + // but we can avoid checking out the working dir twice, at least + const lp = isWindows(opts) ? ['--config', 'core.longpaths=true'] : [] + const cloneArgs = ['clone', '--mirror', '-q', repo, target + '/.git'] + const git = (args) => spawn(args, { ...opts, cwd: target }) + return fs.mkdir(target, { recursive: true }) + .then(() => git(cloneArgs.concat(lp))) + .then(() => git(['init'])) + .then(() => git(['checkout', ref])) + .then(() => updateSubmodules(target, opts)) + .then(() => git(['rev-parse', '--revs-only', 'HEAD'])) + .then(({ stdout }) => stdout.trim()) +} diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/errors.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/errors.js new file mode 100644 index 0000000000000..3ceaa45811669 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/errors.js @@ -0,0 +1,36 @@ + +const maxRetry = 3 + +class GitError extends Error { + shouldRetry () { + return false + } +} + +class GitConnectionError extends GitError { + constructor () { + super('A git connection error occurred') + } + + shouldRetry (number) { + return number < maxRetry + } +} + +class GitPathspecError extends GitError { + constructor () { + super('The git reference could not be found') + } +} + +class GitUnknownError extends GitError { + constructor () { + super('An unknown git error occurred') + } +} + +module.exports = { + GitConnectionError, + GitPathspecError, + GitUnknownError, +} diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/find.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/find.js new file mode 100644 index 0000000000000..34bd310b88e5d --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/find.js @@ -0,0 +1,15 @@ +const is = require('./is.js') +const { dirname } = require('path') + +module.exports = async ({ cwd = process.cwd(), root } = {}) => { + while (true) { + if (await is({ cwd })) { + return cwd + } + const next = dirname(cwd) + if (cwd === root || cwd === next) { + return null + } + cwd = next + } +} diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/index.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/index.js new file mode 100644 index 0000000000000..10a65f782e6da --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/index.js @@ -0,0 +1,9 @@ +module.exports = { + clone: require('./clone.js'), + revs: require('./revs.js'), + spawn: require('./spawn.js'), + is: require('./is.js'), + find: require('./find.js'), + isClean: require('./is-clean.js'), + errors: require('./errors.js'), +} diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/is-clean.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/is-clean.js new file mode 100644 index 0000000000000..182373be94193 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/is-clean.js @@ -0,0 +1,6 @@ +const spawn = require('./spawn.js') + +module.exports = (opts = {}) => + spawn(['status', '--porcelain=v1', '-uno'], opts) + .then(res => !res.stdout.trim().split(/\r?\n+/) + .map(l => l.trim()).filter(l => l).length) diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/is.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/is.js new file mode 100644 index 0000000000000..f5a0e8754f10d --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/is.js @@ -0,0 +1,4 @@ +// not an airtight indicator, but a good gut-check to even bother trying +const { stat } = require('fs/promises') +module.exports = ({ cwd = process.cwd() } = {}) => + stat(cwd + '/.git').then(() => true, () => false) diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/lines-to-revs.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/lines-to-revs.js new file mode 100644 index 0000000000000..6bd7e7a4c1531 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/lines-to-revs.js @@ -0,0 +1,147 @@ +// turn an array of lines from `git ls-remote` into a thing +// vaguely resembling a packument, where docs are a resolved ref + +const semver = require('semver') + +module.exports = lines => finish(lines.reduce(linesToRevsReducer, { + versions: {}, + 'dist-tags': {}, + refs: {}, + shas: {}, +})) + +const finish = revs => distTags(shaList(peelTags(revs))) + +// We can check out shallow clones on specific SHAs if we have a ref +const shaList = revs => { + Object.keys(revs.refs).forEach(ref => { + const doc = revs.refs[ref] + if (!revs.shas[doc.sha]) { + revs.shas[doc.sha] = [ref] + } else { + revs.shas[doc.sha].push(ref) + } + }) + return revs +} + +// Replace any tags with their ^{} counterparts, if those exist +const peelTags = revs => { + Object.keys(revs.refs).filter(ref => ref.endsWith('^{}')).forEach(ref => { + const peeled = revs.refs[ref] + const unpeeled = revs.refs[ref.replace(/\^\{\}$/, '')] + if (unpeeled) { + unpeeled.sha = peeled.sha + delete revs.refs[ref] + } + }) + return revs +} + +const distTags = revs => { + // not entirely sure what situations would result in an + // ichabod repo, but best to be careful in Sleepy Hollow anyway + const HEAD = revs.refs.HEAD || /* istanbul ignore next */ {} + const versions = Object.keys(revs.versions) + versions.forEach(v => { + // simulate a dist-tags with latest pointing at the + // 'latest' branch if one exists and is a version, + // or HEAD if not. + const ver = revs.versions[v] + if (revs.refs.latest && ver.sha === revs.refs.latest.sha) { + revs['dist-tags'].latest = v + } else if (ver.sha === HEAD.sha) { + revs['dist-tags'].HEAD = v + if (!revs.refs.latest) { + revs['dist-tags'].latest = v + } + } + }) + return revs +} + +const refType = ref => { + if (ref.startsWith('refs/tags/')) { + return 'tag' + } + if (ref.startsWith('refs/heads/')) { + return 'branch' + } + if (ref.startsWith('refs/pull/')) { + return 'pull' + } + if (ref === 'HEAD') { + return 'head' + } + // Could be anything, ignore for now + /* istanbul ignore next */ + return 'other' +} + +// return the doc, or null if we should ignore it. +const lineToRevDoc = line => { + const split = line.trim().split(/\s+/, 2) + if (split.length < 2) { + return null + } + + const sha = split[0].trim() + const rawRef = split[1].trim() + const type = refType(rawRef) + + if (type === 'tag') { + // refs/tags/foo^{} is the 'peeled tag', ie the commit + // that is tagged by refs/tags/foo they resolve to the same + // content, just different objects in git's data structure. + // But, we care about the thing the tag POINTS to, not the tag + // object itself, so we only look at the peeled tag refs, and + // ignore the pointer. + // For now, though, we have to save both, because some tags + // don't have peels, if they were not annotated. + const ref = rawRef.slice('refs/tags/'.length) + return { sha, ref, rawRef, type } + } + + if (type === 'branch') { + const ref = rawRef.slice('refs/heads/'.length) + return { sha, ref, rawRef, type } + } + + if (type === 'pull') { + // NB: merged pull requests installable with #pull/123/merge + // for the merged pr, or #pull/123 for the PR head + const ref = rawRef.slice('refs/'.length).replace(/\/head$/, '') + return { sha, ref, rawRef, type } + } + + if (type === 'head') { + const ref = 'HEAD' + return { sha, ref, rawRef, type } + } + + // at this point, all we can do is leave the ref un-munged + return { sha, ref: rawRef, rawRef, type } +} + +const linesToRevsReducer = (revs, line) => { + const doc = lineToRevDoc(line) + + if (!doc) { + return revs + } + + revs.refs[doc.ref] = doc + revs.refs[doc.rawRef] = doc + + if (doc.type === 'tag') { + // try to pull a semver value out of tags like `release-v1.2.3` + // which is a pretty common pattern. + const match = !doc.ref.endsWith('^{}') && + doc.ref.match(/v?(\d+\.\d+\.\d+(?:[-+].+)?)$/) + if (match && semver.valid(match[1], true)) { + revs.versions[semver.clean(match[1], true)] = doc + } + } + + return revs +} diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/make-error.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/make-error.js new file mode 100644 index 0000000000000..7540ec7c8b9f7 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/make-error.js @@ -0,0 +1,33 @@ +const { + GitConnectionError, + GitPathspecError, + GitUnknownError, +} = require('./errors.js') + +const connectionErrorRe = new RegExp([ + 'remote error: Internal Server Error', + 'The remote end hung up unexpectedly', + 'Connection timed out', + 'Operation timed out', + 'Failed to connect to .* Timed out', + 'Connection reset by peer', + 'SSL_ERROR_SYSCALL', + 'The requested URL returned error: 503', +].join('|')) + +const missingPathspecRe = /pathspec .* did not match any file\(s\) known to git/ + +function makeError (er) { + const message = er.stderr + let gitEr + if (connectionErrorRe.test(message)) { + gitEr = new GitConnectionError(message) + } else if (missingPathspecRe.test(message)) { + gitEr = new GitPathspecError(message) + } else { + gitEr = new GitUnknownError(message) + } + return Object.assign(gitEr, er) +} + +module.exports = makeError diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/opts.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/opts.js new file mode 100644 index 0000000000000..1e80e9efe4989 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/opts.js @@ -0,0 +1,57 @@ +const fs = require('node:fs') +const os = require('node:os') +const path = require('node:path') +const ini = require('ini') + +const gitConfigPath = path.join(os.homedir(), '.gitconfig') + +let cachedConfig = null + +// Function to load and cache the git config +const loadGitConfig = () => { + if (cachedConfig === null) { + try { + cachedConfig = {} + if (fs.existsSync(gitConfigPath)) { + const configContent = fs.readFileSync(gitConfigPath, 'utf-8') + cachedConfig = ini.parse(configContent) + } + } catch (error) { + cachedConfig = {} + } + } + return cachedConfig +} + +const checkGitConfigs = () => { + const config = loadGitConfig() + return { + sshCommandSetInConfig: config?.core?.sshCommand !== undefined, + askPassSetInConfig: config?.core?.askpass !== undefined, + } +} + +const sshCommandSetInEnv = process.env.GIT_SSH_COMMAND !== undefined +const askPassSetInEnv = process.env.GIT_ASKPASS !== undefined +const { sshCommandSetInConfig, askPassSetInConfig } = checkGitConfigs() + +// Values we want to set if they're not already defined by the end user +// This defaults to accepting new ssh host key fingerprints +const finalGitEnv = { + ...(askPassSetInEnv || askPassSetInConfig ? {} : { + GIT_ASKPASS: 'echo', + }), + ...(sshCommandSetInEnv || sshCommandSetInConfig ? {} : { + GIT_SSH_COMMAND: 'ssh -oStrictHostKeyChecking=accept-new', + }), +} + +module.exports = (opts = {}) => ({ + stdioString: true, + ...opts, + shell: false, + env: opts.env || { ...finalGitEnv, ...process.env }, +}) + +// Export the loadGitConfig function for testing +module.exports.loadGitConfig = loadGitConfig diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/revs.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/revs.js new file mode 100644 index 0000000000000..ebcc848fa3458 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/revs.js @@ -0,0 +1,22 @@ +const spawn = require('./spawn.js') +const { LRUCache } = require('lru-cache') +const linesToRevs = require('./lines-to-revs.js') + +const revsCache = new LRUCache({ + max: 100, + ttl: 5 * 60 * 1000, +}) + +module.exports = async (repo, opts = {}) => { + if (!opts.noGitRevCache) { + const cached = revsCache.get(repo) + if (cached) { + return cached + } + } + + const { stdout } = await spawn(['ls-remote', repo], opts) + const revs = linesToRevs(stdout.trim().split('\n')) + revsCache.set(repo, revs) + return revs +} diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/spawn.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/spawn.js new file mode 100644 index 0000000000000..03c1cbde21547 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/spawn.js @@ -0,0 +1,44 @@ +const spawn = require('@npmcli/promise-spawn') +const promiseRetry = require('promise-retry') +const { log } = require('proc-log') +const makeError = require('./make-error.js') +const makeOpts = require('./opts.js') + +module.exports = (gitArgs, opts = {}) => { + const whichGit = require('./which.js') + const gitPath = whichGit(opts) + + if (gitPath instanceof Error) { + return Promise.reject(gitPath) + } + + // undocumented option, mostly only here for tests + const args = opts.allowReplace || gitArgs[0] === '--no-replace-objects' + ? gitArgs + : ['--no-replace-objects', ...gitArgs] + + let retryOpts = opts.retry + if (retryOpts === null || retryOpts === undefined) { + retryOpts = { + retries: opts.fetchRetries || 2, + factor: opts.fetchRetryFactor || 10, + maxTimeout: opts.fetchRetryMaxtimeout || 60000, + minTimeout: opts.fetchRetryMintimeout || 1000, + } + } + return promiseRetry((retryFn, number) => { + if (number !== 1) { + log.silly('git', `Retrying git command: ${ + args.join(' ')} attempt # ${number}`) + } + + return spawn(gitPath, args, makeOpts(opts)) + .catch(er => { + const gitError = makeError(er) + if (!gitError.shouldRetry(number)) { + throw gitError + } + retryFn(gitError) + }) + }, retryOpts) +} diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/utils.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/utils.js new file mode 100644 index 0000000000000..fcd9578a19597 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/utils.js @@ -0,0 +1,3 @@ +const isWindows = opts => (opts.fakePlatform || process.platform) === 'win32' + +exports.isWindows = isWindows diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/which.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/which.js new file mode 100644 index 0000000000000..dc2a1ad212166 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/which.js @@ -0,0 +1,18 @@ +const which = require('which') + +let gitPath +try { + gitPath = which.sync('git') +} catch { + // ignore errors +} + +module.exports = (opts = {}) => { + if (opts.git) { + return opts.git + } + if (!gitPath || opts.git === false) { + return Object.assign(new Error('No git binary found in $PATH'), { code: 'ENOGIT' }) + } + return gitPath +} diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/package.json b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/package.json new file mode 100644 index 0000000000000..f4e844bccab0d --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/package.json @@ -0,0 +1,58 @@ +{ + "name": "@npmcli/git", + "version": "7.0.0", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "description": "a util for spawning git from npm CLI contexts", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/git.git" + }, + "author": "GitHub Inc.", + "license": "ISC", + "scripts": { + "lint": "npm run eslint", + "snap": "tap", + "test": "tap", + "posttest": "npm run lint", + "postlint": "template-oss-check", + "lintfix": "npm run eslint -- --fix", + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" + }, + "tap": { + "timeout": 600, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "devDependencies": { + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.24.1", + "npm-package-arg": "^13.0.0", + "slash": "^3.0.0", + "tap": "^16.0.1" + }, + "dependencies": { + "@npmcli/promise-spawn": "^8.0.0", + "ini": "^5.0.0", + "lru-cache": "^11.2.1", + "npm-pick-manifest": "^11.0.1", + "proc-log": "^5.0.0", + "promise-retry": "^2.0.1", + "semver": "^7.3.5", + "which": "^5.0.0" + }, + "engines": { + "node": "^20.17.0 || >=22.9.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.24.1", + "publish": true + } +} diff --git a/node_modules/@npmcli/package-json/node_modules/glob/LICENSE b/node_modules/@npmcli/package-json/node_modules/glob/LICENSE new file mode 100644 index 0000000000000..ec7df93329abf --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/glob/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2009-2023 Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/glob.js b/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/glob.js new file mode 100644 index 0000000000000..e1339bbbcf57f --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/glob.js @@ -0,0 +1,247 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Glob = void 0; +const minimatch_1 = require("minimatch"); +const node_url_1 = require("node:url"); +const path_scurry_1 = require("path-scurry"); +const pattern_js_1 = require("./pattern.js"); +const walker_js_1 = require("./walker.js"); +// if no process global, just call it linux. +// so we default to case-sensitive, / separators +const defaultPlatform = (typeof process === 'object' && + process && + typeof process.platform === 'string') ? + process.platform + : 'linux'; +/** + * An object that can perform glob pattern traversals. + */ +class Glob { + absolute; + cwd; + root; + dot; + dotRelative; + follow; + ignore; + magicalBraces; + mark; + matchBase; + maxDepth; + nobrace; + nocase; + nodir; + noext; + noglobstar; + pattern; + platform; + realpath; + scurry; + stat; + signal; + windowsPathsNoEscape; + withFileTypes; + includeChildMatches; + /** + * The options provided to the constructor. + */ + opts; + /** + * An array of parsed immutable {@link Pattern} objects. + */ + patterns; + /** + * All options are stored as properties on the `Glob` object. + * + * See {@link GlobOptions} for full options descriptions. + * + * Note that a previous `Glob` object can be passed as the + * `GlobOptions` to another `Glob` instantiation to re-use settings + * and caches with a new pattern. + * + * Traversal functions can be called multiple times to run the walk + * again. + */ + constructor(pattern, opts) { + /* c8 ignore start */ + if (!opts) + throw new TypeError('glob options required'); + /* c8 ignore stop */ + this.withFileTypes = !!opts.withFileTypes; + this.signal = opts.signal; + this.follow = !!opts.follow; + this.dot = !!opts.dot; + this.dotRelative = !!opts.dotRelative; + this.nodir = !!opts.nodir; + this.mark = !!opts.mark; + if (!opts.cwd) { + this.cwd = ''; + } + else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) { + opts.cwd = (0, node_url_1.fileURLToPath)(opts.cwd); + } + this.cwd = opts.cwd || ''; + this.root = opts.root; + this.magicalBraces = !!opts.magicalBraces; + this.nobrace = !!opts.nobrace; + this.noext = !!opts.noext; + this.realpath = !!opts.realpath; + this.absolute = opts.absolute; + this.includeChildMatches = opts.includeChildMatches !== false; + this.noglobstar = !!opts.noglobstar; + this.matchBase = !!opts.matchBase; + this.maxDepth = + typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity; + this.stat = !!opts.stat; + this.ignore = opts.ignore; + if (this.withFileTypes && this.absolute !== undefined) { + throw new Error('cannot set absolute and withFileTypes:true'); + } + if (typeof pattern === 'string') { + pattern = [pattern]; + } + this.windowsPathsNoEscape = + !!opts.windowsPathsNoEscape || + opts.allowWindowsEscape === + false; + if (this.windowsPathsNoEscape) { + pattern = pattern.map(p => p.replace(/\\/g, '/')); + } + if (this.matchBase) { + if (opts.noglobstar) { + throw new TypeError('base matching requires globstar'); + } + pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`)); + } + this.pattern = pattern; + this.platform = opts.platform || defaultPlatform; + this.opts = { ...opts, platform: this.platform }; + if (opts.scurry) { + this.scurry = opts.scurry; + if (opts.nocase !== undefined && + opts.nocase !== opts.scurry.nocase) { + throw new Error('nocase option contradicts provided scurry option'); + } + } + else { + const Scurry = opts.platform === 'win32' ? path_scurry_1.PathScurryWin32 + : opts.platform === 'darwin' ? path_scurry_1.PathScurryDarwin + : opts.platform ? path_scurry_1.PathScurryPosix + : path_scurry_1.PathScurry; + this.scurry = new Scurry(this.cwd, { + nocase: opts.nocase, + fs: opts.fs, + }); + } + this.nocase = this.scurry.nocase; + // If you do nocase:true on a case-sensitive file system, then + // we need to use regexps instead of strings for non-magic + // path portions, because statting `aBc` won't return results + // for the file `AbC` for example. + const nocaseMagicOnly = this.platform === 'darwin' || this.platform === 'win32'; + const mmo = { + // default nocase based on platform + ...opts, + dot: this.dot, + matchBase: this.matchBase, + nobrace: this.nobrace, + nocase: this.nocase, + nocaseMagicOnly, + nocomment: true, + noext: this.noext, + nonegate: true, + optimizationLevel: 2, + platform: this.platform, + windowsPathsNoEscape: this.windowsPathsNoEscape, + debug: !!this.opts.debug, + }; + const mms = this.pattern.map(p => new minimatch_1.Minimatch(p, mmo)); + const [matchSet, globParts] = mms.reduce((set, m) => { + set[0].push(...m.set); + set[1].push(...m.globParts); + return set; + }, [[], []]); + this.patterns = matchSet.map((set, i) => { + const g = globParts[i]; + /* c8 ignore start */ + if (!g) + throw new Error('invalid pattern object'); + /* c8 ignore stop */ + return new pattern_js_1.Pattern(set, g, 0, this.platform); + }); + } + async walk() { + // Walkers always return array of Path objects, so we just have to + // coerce them into the right shape. It will have already called + // realpath() if the option was set to do so, so we know that's cached. + // start out knowing the cwd, at least + return [ + ...(await new walker_js_1.GlobWalker(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity ? + this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + includeChildMatches: this.includeChildMatches, + }).walk()), + ]; + } + walkSync() { + return [ + ...new walker_js_1.GlobWalker(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity ? + this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + includeChildMatches: this.includeChildMatches, + }).walkSync(), + ]; + } + stream() { + return new walker_js_1.GlobStream(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity ? + this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + includeChildMatches: this.includeChildMatches, + }).stream(); + } + streamSync() { + return new walker_js_1.GlobStream(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity ? + this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + includeChildMatches: this.includeChildMatches, + }).streamSync(); + } + /** + * Default sync iteration function. Returns a Generator that + * iterates over the results. + */ + iterateSync() { + return this.streamSync()[Symbol.iterator](); + } + [Symbol.iterator]() { + return this.iterateSync(); + } + /** + * Default async iteration function. Returns an AsyncGenerator that + * iterates over the results. + */ + iterate() { + return this.stream()[Symbol.asyncIterator](); + } + [Symbol.asyncIterator]() { + return this.iterate(); + } +} +exports.Glob = Glob; +//# sourceMappingURL=glob.js.map \ No newline at end of file diff --git a/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/has-magic.js b/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/has-magic.js new file mode 100644 index 0000000000000..0918bd57e0f1c --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/has-magic.js @@ -0,0 +1,27 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.hasMagic = void 0; +const minimatch_1 = require("minimatch"); +/** + * Return true if the patterns provided contain any magic glob characters, + * given the options provided. + * + * Brace expansion is not considered "magic" unless the `magicalBraces` option + * is set, as brace expansion just turns one string into an array of strings. + * So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and + * `'xby'` both do not contain any magic glob characters, and it's treated the + * same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true` + * is in the options, brace expansion _is_ treated as a pattern having magic. + */ +const hasMagic = (pattern, options = {}) => { + if (!Array.isArray(pattern)) { + pattern = [pattern]; + } + for (const p of pattern) { + if (new minimatch_1.Minimatch(p, options).hasMagic()) + return true; + } + return false; +}; +exports.hasMagic = hasMagic; +//# sourceMappingURL=has-magic.js.map \ No newline at end of file diff --git a/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/ignore.js b/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/ignore.js new file mode 100644 index 0000000000000..5f1fde0680dea --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/ignore.js @@ -0,0 +1,119 @@ +"use strict"; +// give it a pattern, and it'll be able to tell you if +// a given path should be ignored. +// Ignoring a path ignores its children if the pattern ends in /** +// Ignores are always parsed in dot:true mode +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Ignore = void 0; +const minimatch_1 = require("minimatch"); +const pattern_js_1 = require("./pattern.js"); +const defaultPlatform = (typeof process === 'object' && + process && + typeof process.platform === 'string') ? + process.platform + : 'linux'; +/** + * Class used to process ignored patterns + */ +class Ignore { + relative; + relativeChildren; + absolute; + absoluteChildren; + platform; + mmopts; + constructor(ignored, { nobrace, nocase, noext, noglobstar, platform = defaultPlatform, }) { + this.relative = []; + this.absolute = []; + this.relativeChildren = []; + this.absoluteChildren = []; + this.platform = platform; + this.mmopts = { + dot: true, + nobrace, + nocase, + noext, + noglobstar, + optimizationLevel: 2, + platform, + nocomment: true, + nonegate: true, + }; + for (const ign of ignored) + this.add(ign); + } + add(ign) { + // this is a little weird, but it gives us a clean set of optimized + // minimatch matchers, without getting tripped up if one of them + // ends in /** inside a brace section, and it's only inefficient at + // the start of the walk, not along it. + // It'd be nice if the Pattern class just had a .test() method, but + // handling globstars is a bit of a pita, and that code already lives + // in minimatch anyway. + // Another way would be if maybe Minimatch could take its set/globParts + // as an option, and then we could at least just use Pattern to test + // for absolute-ness. + // Yet another way, Minimatch could take an array of glob strings, and + // a cwd option, and do the right thing. + const mm = new minimatch_1.Minimatch(ign, this.mmopts); + for (let i = 0; i < mm.set.length; i++) { + const parsed = mm.set[i]; + const globParts = mm.globParts[i]; + /* c8 ignore start */ + if (!parsed || !globParts) { + throw new Error('invalid pattern object'); + } + // strip off leading ./ portions + // https://github.com/isaacs/node-glob/issues/570 + while (parsed[0] === '.' && globParts[0] === '.') { + parsed.shift(); + globParts.shift(); + } + /* c8 ignore stop */ + const p = new pattern_js_1.Pattern(parsed, globParts, 0, this.platform); + const m = new minimatch_1.Minimatch(p.globString(), this.mmopts); + const children = globParts[globParts.length - 1] === '**'; + const absolute = p.isAbsolute(); + if (absolute) + this.absolute.push(m); + else + this.relative.push(m); + if (children) { + if (absolute) + this.absoluteChildren.push(m); + else + this.relativeChildren.push(m); + } + } + } + ignored(p) { + const fullpath = p.fullpath(); + const fullpaths = `${fullpath}/`; + const relative = p.relative() || '.'; + const relatives = `${relative}/`; + for (const m of this.relative) { + if (m.match(relative) || m.match(relatives)) + return true; + } + for (const m of this.absolute) { + if (m.match(fullpath) || m.match(fullpaths)) + return true; + } + return false; + } + childrenIgnored(p) { + const fullpath = p.fullpath() + '/'; + const relative = (p.relative() || '.') + '/'; + for (const m of this.relativeChildren) { + if (m.match(relative)) + return true; + } + for (const m of this.absoluteChildren) { + if (m.match(fullpath)) + return true; + } + return false; + } +} +exports.Ignore = Ignore; +//# sourceMappingURL=ignore.js.map \ No newline at end of file diff --git a/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/index.js b/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/index.js new file mode 100644 index 0000000000000..151495d170efa --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/index.js @@ -0,0 +1,68 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.glob = exports.sync = exports.iterate = exports.iterateSync = exports.stream = exports.streamSync = exports.Ignore = exports.hasMagic = exports.Glob = exports.unescape = exports.escape = void 0; +exports.globStreamSync = globStreamSync; +exports.globStream = globStream; +exports.globSync = globSync; +exports.globIterateSync = globIterateSync; +exports.globIterate = globIterate; +const minimatch_1 = require("minimatch"); +const glob_js_1 = require("./glob.js"); +const has_magic_js_1 = require("./has-magic.js"); +var minimatch_2 = require("minimatch"); +Object.defineProperty(exports, "escape", { enumerable: true, get: function () { return minimatch_2.escape; } }); +Object.defineProperty(exports, "unescape", { enumerable: true, get: function () { return minimatch_2.unescape; } }); +var glob_js_2 = require("./glob.js"); +Object.defineProperty(exports, "Glob", { enumerable: true, get: function () { return glob_js_2.Glob; } }); +var has_magic_js_2 = require("./has-magic.js"); +Object.defineProperty(exports, "hasMagic", { enumerable: true, get: function () { return has_magic_js_2.hasMagic; } }); +var ignore_js_1 = require("./ignore.js"); +Object.defineProperty(exports, "Ignore", { enumerable: true, get: function () { return ignore_js_1.Ignore; } }); +function globStreamSync(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).streamSync(); +} +function globStream(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).stream(); +} +function globSync(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).walkSync(); +} +async function glob_(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).walk(); +} +function globIterateSync(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).iterateSync(); +} +function globIterate(pattern, options = {}) { + return new glob_js_1.Glob(pattern, options).iterate(); +} +// aliases: glob.sync.stream() glob.stream.sync() glob.sync() etc +exports.streamSync = globStreamSync; +exports.stream = Object.assign(globStream, { sync: globStreamSync }); +exports.iterateSync = globIterateSync; +exports.iterate = Object.assign(globIterate, { + sync: globIterateSync, +}); +exports.sync = Object.assign(globSync, { + stream: globStreamSync, + iterate: globIterateSync, +}); +exports.glob = Object.assign(glob_, { + glob: glob_, + globSync, + sync: exports.sync, + globStream, + stream: exports.stream, + globStreamSync, + streamSync: exports.streamSync, + globIterate, + iterate: exports.iterate, + globIterateSync, + iterateSync: exports.iterateSync, + Glob: glob_js_1.Glob, + hasMagic: has_magic_js_1.hasMagic, + escape: minimatch_1.escape, + unescape: minimatch_1.unescape, +}); +exports.glob.glob = exports.glob; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/package.json b/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/package.json new file mode 100644 index 0000000000000..5bbefffbabee3 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/pattern.js b/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/pattern.js new file mode 100644 index 0000000000000..f0de35fb5bed9 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/pattern.js @@ -0,0 +1,219 @@ +"use strict"; +// this is just a very light wrapper around 2 arrays with an offset index +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Pattern = void 0; +const minimatch_1 = require("minimatch"); +const isPatternList = (pl) => pl.length >= 1; +const isGlobList = (gl) => gl.length >= 1; +/** + * An immutable-ish view on an array of glob parts and their parsed + * results + */ +class Pattern { + #patternList; + #globList; + #index; + length; + #platform; + #rest; + #globString; + #isDrive; + #isUNC; + #isAbsolute; + #followGlobstar = true; + constructor(patternList, globList, index, platform) { + if (!isPatternList(patternList)) { + throw new TypeError('empty pattern list'); + } + if (!isGlobList(globList)) { + throw new TypeError('empty glob list'); + } + if (globList.length !== patternList.length) { + throw new TypeError('mismatched pattern list and glob list lengths'); + } + this.length = patternList.length; + if (index < 0 || index >= this.length) { + throw new TypeError('index out of range'); + } + this.#patternList = patternList; + this.#globList = globList; + this.#index = index; + this.#platform = platform; + // normalize root entries of absolute patterns on initial creation. + if (this.#index === 0) { + // c: => ['c:/'] + // C:/ => ['C:/'] + // C:/x => ['C:/', 'x'] + // //host/share => ['//host/share/'] + // //host/share/ => ['//host/share/'] + // //host/share/x => ['//host/share/', 'x'] + // /etc => ['/', 'etc'] + // / => ['/'] + if (this.isUNC()) { + // '' / '' / 'host' / 'share' + const [p0, p1, p2, p3, ...prest] = this.#patternList; + const [g0, g1, g2, g3, ...grest] = this.#globList; + if (prest[0] === '') { + // ends in / + prest.shift(); + grest.shift(); + } + const p = [p0, p1, p2, p3, ''].join('/'); + const g = [g0, g1, g2, g3, ''].join('/'); + this.#patternList = [p, ...prest]; + this.#globList = [g, ...grest]; + this.length = this.#patternList.length; + } + else if (this.isDrive() || this.isAbsolute()) { + const [p1, ...prest] = this.#patternList; + const [g1, ...grest] = this.#globList; + if (prest[0] === '') { + // ends in / + prest.shift(); + grest.shift(); + } + const p = p1 + '/'; + const g = g1 + '/'; + this.#patternList = [p, ...prest]; + this.#globList = [g, ...grest]; + this.length = this.#patternList.length; + } + } + } + /** + * The first entry in the parsed list of patterns + */ + pattern() { + return this.#patternList[this.#index]; + } + /** + * true of if pattern() returns a string + */ + isString() { + return typeof this.#patternList[this.#index] === 'string'; + } + /** + * true of if pattern() returns GLOBSTAR + */ + isGlobstar() { + return this.#patternList[this.#index] === minimatch_1.GLOBSTAR; + } + /** + * true if pattern() returns a regexp + */ + isRegExp() { + return this.#patternList[this.#index] instanceof RegExp; + } + /** + * The /-joined set of glob parts that make up this pattern + */ + globString() { + return (this.#globString = + this.#globString || + (this.#index === 0 ? + this.isAbsolute() ? + this.#globList[0] + this.#globList.slice(1).join('/') + : this.#globList.join('/') + : this.#globList.slice(this.#index).join('/'))); + } + /** + * true if there are more pattern parts after this one + */ + hasMore() { + return this.length > this.#index + 1; + } + /** + * The rest of the pattern after this part, or null if this is the end + */ + rest() { + if (this.#rest !== undefined) + return this.#rest; + if (!this.hasMore()) + return (this.#rest = null); + this.#rest = new Pattern(this.#patternList, this.#globList, this.#index + 1, this.#platform); + this.#rest.#isAbsolute = this.#isAbsolute; + this.#rest.#isUNC = this.#isUNC; + this.#rest.#isDrive = this.#isDrive; + return this.#rest; + } + /** + * true if the pattern represents a //unc/path/ on windows + */ + isUNC() { + const pl = this.#patternList; + return this.#isUNC !== undefined ? + this.#isUNC + : (this.#isUNC = + this.#platform === 'win32' && + this.#index === 0 && + pl[0] === '' && + pl[1] === '' && + typeof pl[2] === 'string' && + !!pl[2] && + typeof pl[3] === 'string' && + !!pl[3]); + } + // pattern like C:/... + // split = ['C:', ...] + // XXX: would be nice to handle patterns like `c:*` to test the cwd + // in c: for *, but I don't know of a way to even figure out what that + // cwd is without actually chdir'ing into it? + /** + * True if the pattern starts with a drive letter on Windows + */ + isDrive() { + const pl = this.#patternList; + return this.#isDrive !== undefined ? + this.#isDrive + : (this.#isDrive = + this.#platform === 'win32' && + this.#index === 0 && + this.length > 1 && + typeof pl[0] === 'string' && + /^[a-z]:$/i.test(pl[0])); + } + // pattern = '/' or '/...' or '/x/...' + // split = ['', ''] or ['', ...] or ['', 'x', ...] + // Drive and UNC both considered absolute on windows + /** + * True if the pattern is rooted on an absolute path + */ + isAbsolute() { + const pl = this.#patternList; + return this.#isAbsolute !== undefined ? + this.#isAbsolute + : (this.#isAbsolute = + (pl[0] === '' && pl.length > 1) || + this.isDrive() || + this.isUNC()); + } + /** + * consume the root of the pattern, and return it + */ + root() { + const p = this.#patternList[0]; + return (typeof p === 'string' && this.isAbsolute() && this.#index === 0) ? + p + : ''; + } + /** + * Check to see if the current globstar pattern is allowed to follow + * a symbolic link. + */ + checkFollowGlobstar() { + return !(this.#index === 0 || + !this.isGlobstar() || + !this.#followGlobstar); + } + /** + * Mark that the current globstar pattern is following a symbolic link + */ + markFollowGlobstar() { + if (this.#index === 0 || !this.isGlobstar() || !this.#followGlobstar) + return false; + this.#followGlobstar = false; + return true; + } +} +exports.Pattern = Pattern; +//# sourceMappingURL=pattern.js.map \ No newline at end of file diff --git a/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/processor.js b/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/processor.js new file mode 100644 index 0000000000000..ee3bb4397e0b2 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/processor.js @@ -0,0 +1,301 @@ +"use strict"; +// synchronous utility for filtering entries and calculating subwalks +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Processor = exports.SubWalks = exports.MatchRecord = exports.HasWalkedCache = void 0; +const minimatch_1 = require("minimatch"); +/** + * A cache of which patterns have been processed for a given Path + */ +class HasWalkedCache { + store; + constructor(store = new Map()) { + this.store = store; + } + copy() { + return new HasWalkedCache(new Map(this.store)); + } + hasWalked(target, pattern) { + return this.store.get(target.fullpath())?.has(pattern.globString()); + } + storeWalked(target, pattern) { + const fullpath = target.fullpath(); + const cached = this.store.get(fullpath); + if (cached) + cached.add(pattern.globString()); + else + this.store.set(fullpath, new Set([pattern.globString()])); + } +} +exports.HasWalkedCache = HasWalkedCache; +/** + * A record of which paths have been matched in a given walk step, + * and whether they only are considered a match if they are a directory, + * and whether their absolute or relative path should be returned. + */ +class MatchRecord { + store = new Map(); + add(target, absolute, ifDir) { + const n = (absolute ? 2 : 0) | (ifDir ? 1 : 0); + const current = this.store.get(target); + this.store.set(target, current === undefined ? n : n & current); + } + // match, absolute, ifdir + entries() { + return [...this.store.entries()].map(([path, n]) => [ + path, + !!(n & 2), + !!(n & 1), + ]); + } +} +exports.MatchRecord = MatchRecord; +/** + * A collection of patterns that must be processed in a subsequent step + * for a given path. + */ +class SubWalks { + store = new Map(); + add(target, pattern) { + if (!target.canReaddir()) { + return; + } + const subs = this.store.get(target); + if (subs) { + if (!subs.find(p => p.globString() === pattern.globString())) { + subs.push(pattern); + } + } + else + this.store.set(target, [pattern]); + } + get(target) { + const subs = this.store.get(target); + /* c8 ignore start */ + if (!subs) { + throw new Error('attempting to walk unknown path'); + } + /* c8 ignore stop */ + return subs; + } + entries() { + return this.keys().map(k => [k, this.store.get(k)]); + } + keys() { + return [...this.store.keys()].filter(t => t.canReaddir()); + } +} +exports.SubWalks = SubWalks; +/** + * The class that processes patterns for a given path. + * + * Handles child entry filtering, and determining whether a path's + * directory contents must be read. + */ +class Processor { + hasWalkedCache; + matches = new MatchRecord(); + subwalks = new SubWalks(); + patterns; + follow; + dot; + opts; + constructor(opts, hasWalkedCache) { + this.opts = opts; + this.follow = !!opts.follow; + this.dot = !!opts.dot; + this.hasWalkedCache = + hasWalkedCache ? hasWalkedCache.copy() : new HasWalkedCache(); + } + processPatterns(target, patterns) { + this.patterns = patterns; + const processingSet = patterns.map(p => [target, p]); + // map of paths to the magic-starting subwalks they need to walk + // first item in patterns is the filter + for (let [t, pattern] of processingSet) { + this.hasWalkedCache.storeWalked(t, pattern); + const root = pattern.root(); + const absolute = pattern.isAbsolute() && this.opts.absolute !== false; + // start absolute patterns at root + if (root) { + t = t.resolve(root === '/' && this.opts.root !== undefined ? + this.opts.root + : root); + const rest = pattern.rest(); + if (!rest) { + this.matches.add(t, true, false); + continue; + } + else { + pattern = rest; + } + } + if (t.isENOENT()) + continue; + let p; + let rest; + let changed = false; + while (typeof (p = pattern.pattern()) === 'string' && + (rest = pattern.rest())) { + const c = t.resolve(p); + t = c; + pattern = rest; + changed = true; + } + p = pattern.pattern(); + rest = pattern.rest(); + if (changed) { + if (this.hasWalkedCache.hasWalked(t, pattern)) + continue; + this.hasWalkedCache.storeWalked(t, pattern); + } + // now we have either a final string for a known entry, + // more strings for an unknown entry, + // or a pattern starting with magic, mounted on t. + if (typeof p === 'string') { + // must not be final entry, otherwise we would have + // concatenated it earlier. + const ifDir = p === '..' || p === '' || p === '.'; + this.matches.add(t.resolve(p), absolute, ifDir); + continue; + } + else if (p === minimatch_1.GLOBSTAR) { + // if no rest, match and subwalk pattern + // if rest, process rest and subwalk pattern + // if it's a symlink, but we didn't get here by way of a + // globstar match (meaning it's the first time THIS globstar + // has traversed a symlink), then we follow it. Otherwise, stop. + if (!t.isSymbolicLink() || + this.follow || + pattern.checkFollowGlobstar()) { + this.subwalks.add(t, pattern); + } + const rp = rest?.pattern(); + const rrest = rest?.rest(); + if (!rest || ((rp === '' || rp === '.') && !rrest)) { + // only HAS to be a dir if it ends in **/ or **/. + // but ending in ** will match files as well. + this.matches.add(t, absolute, rp === '' || rp === '.'); + } + else { + if (rp === '..') { + // this would mean you're matching **/.. at the fs root, + // and no thanks, I'm not gonna test that specific case. + /* c8 ignore start */ + const tp = t.parent || t; + /* c8 ignore stop */ + if (!rrest) + this.matches.add(tp, absolute, true); + else if (!this.hasWalkedCache.hasWalked(tp, rrest)) { + this.subwalks.add(tp, rrest); + } + } + } + } + else if (p instanceof RegExp) { + this.subwalks.add(t, pattern); + } + } + return this; + } + subwalkTargets() { + return this.subwalks.keys(); + } + child() { + return new Processor(this.opts, this.hasWalkedCache); + } + // return a new Processor containing the subwalks for each + // child entry, and a set of matches, and + // a hasWalkedCache that's a copy of this one + // then we're going to call + filterEntries(parent, entries) { + const patterns = this.subwalks.get(parent); + // put matches and entry walks into the results processor + const results = this.child(); + for (const e of entries) { + for (const pattern of patterns) { + const absolute = pattern.isAbsolute(); + const p = pattern.pattern(); + const rest = pattern.rest(); + if (p === minimatch_1.GLOBSTAR) { + results.testGlobstar(e, pattern, rest, absolute); + } + else if (p instanceof RegExp) { + results.testRegExp(e, p, rest, absolute); + } + else { + results.testString(e, p, rest, absolute); + } + } + } + return results; + } + testGlobstar(e, pattern, rest, absolute) { + if (this.dot || !e.name.startsWith('.')) { + if (!pattern.hasMore()) { + this.matches.add(e, absolute, false); + } + if (e.canReaddir()) { + // if we're in follow mode or it's not a symlink, just keep + // testing the same pattern. If there's more after the globstar, + // then this symlink consumes the globstar. If not, then we can + // follow at most ONE symlink along the way, so we mark it, which + // also checks to ensure that it wasn't already marked. + if (this.follow || !e.isSymbolicLink()) { + this.subwalks.add(e, pattern); + } + else if (e.isSymbolicLink()) { + if (rest && pattern.checkFollowGlobstar()) { + this.subwalks.add(e, rest); + } + else if (pattern.markFollowGlobstar()) { + this.subwalks.add(e, pattern); + } + } + } + } + // if the NEXT thing matches this entry, then also add + // the rest. + if (rest) { + const rp = rest.pattern(); + if (typeof rp === 'string' && + // dots and empty were handled already + rp !== '..' && + rp !== '' && + rp !== '.') { + this.testString(e, rp, rest.rest(), absolute); + } + else if (rp === '..') { + /* c8 ignore start */ + const ep = e.parent || e; + /* c8 ignore stop */ + this.subwalks.add(ep, rest); + } + else if (rp instanceof RegExp) { + this.testRegExp(e, rp, rest.rest(), absolute); + } + } + } + testRegExp(e, p, rest, absolute) { + if (!p.test(e.name)) + return; + if (!rest) { + this.matches.add(e, absolute, false); + } + else { + this.subwalks.add(e, rest); + } + } + testString(e, p, rest, absolute) { + // should never happen? + if (!e.isNamed(p)) + return; + if (!rest) { + this.matches.add(e, absolute, false); + } + else { + this.subwalks.add(e, rest); + } + } +} +exports.Processor = Processor; +//# sourceMappingURL=processor.js.map \ No newline at end of file diff --git a/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/walker.js b/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/walker.js new file mode 100644 index 0000000000000..cb15946d9a852 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/walker.js @@ -0,0 +1,387 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.GlobStream = exports.GlobWalker = exports.GlobUtil = void 0; +/** + * Single-use utility classes to provide functionality to the {@link Glob} + * methods. + * + * @module + */ +const minipass_1 = require("minipass"); +const ignore_js_1 = require("./ignore.js"); +const processor_js_1 = require("./processor.js"); +const makeIgnore = (ignore, opts) => typeof ignore === 'string' ? new ignore_js_1.Ignore([ignore], opts) + : Array.isArray(ignore) ? new ignore_js_1.Ignore(ignore, opts) + : ignore; +/** + * basic walking utilities that all the glob walker types use + */ +class GlobUtil { + path; + patterns; + opts; + seen = new Set(); + paused = false; + aborted = false; + #onResume = []; + #ignore; + #sep; + signal; + maxDepth; + includeChildMatches; + constructor(patterns, path, opts) { + this.patterns = patterns; + this.path = path; + this.opts = opts; + this.#sep = !opts.posix && opts.platform === 'win32' ? '\\' : '/'; + this.includeChildMatches = opts.includeChildMatches !== false; + if (opts.ignore || !this.includeChildMatches) { + this.#ignore = makeIgnore(opts.ignore ?? [], opts); + if (!this.includeChildMatches && + typeof this.#ignore.add !== 'function') { + const m = 'cannot ignore child matches, ignore lacks add() method.'; + throw new Error(m); + } + } + // ignore, always set with maxDepth, but it's optional on the + // GlobOptions type + /* c8 ignore start */ + this.maxDepth = opts.maxDepth || Infinity; + /* c8 ignore stop */ + if (opts.signal) { + this.signal = opts.signal; + this.signal.addEventListener('abort', () => { + this.#onResume.length = 0; + }); + } + } + #ignored(path) { + return this.seen.has(path) || !!this.#ignore?.ignored?.(path); + } + #childrenIgnored(path) { + return !!this.#ignore?.childrenIgnored?.(path); + } + // backpressure mechanism + pause() { + this.paused = true; + } + resume() { + /* c8 ignore start */ + if (this.signal?.aborted) + return; + /* c8 ignore stop */ + this.paused = false; + let fn = undefined; + while (!this.paused && (fn = this.#onResume.shift())) { + fn(); + } + } + onResume(fn) { + if (this.signal?.aborted) + return; + /* c8 ignore start */ + if (!this.paused) { + fn(); + } + else { + /* c8 ignore stop */ + this.#onResume.push(fn); + } + } + // do the requisite realpath/stat checking, and return the path + // to add or undefined to filter it out. + async matchCheck(e, ifDir) { + if (ifDir && this.opts.nodir) + return undefined; + let rpc; + if (this.opts.realpath) { + rpc = e.realpathCached() || (await e.realpath()); + if (!rpc) + return undefined; + e = rpc; + } + const needStat = e.isUnknown() || this.opts.stat; + const s = needStat ? await e.lstat() : e; + if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) { + const target = await s.realpath(); + /* c8 ignore start */ + if (target && (target.isUnknown() || this.opts.stat)) { + await target.lstat(); + } + /* c8 ignore stop */ + } + return this.matchCheckTest(s, ifDir); + } + matchCheckTest(e, ifDir) { + return (e && + (this.maxDepth === Infinity || e.depth() <= this.maxDepth) && + (!ifDir || e.canReaddir()) && + (!this.opts.nodir || !e.isDirectory()) && + (!this.opts.nodir || + !this.opts.follow || + !e.isSymbolicLink() || + !e.realpathCached()?.isDirectory()) && + !this.#ignored(e)) ? + e + : undefined; + } + matchCheckSync(e, ifDir) { + if (ifDir && this.opts.nodir) + return undefined; + let rpc; + if (this.opts.realpath) { + rpc = e.realpathCached() || e.realpathSync(); + if (!rpc) + return undefined; + e = rpc; + } + const needStat = e.isUnknown() || this.opts.stat; + const s = needStat ? e.lstatSync() : e; + if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) { + const target = s.realpathSync(); + if (target && (target?.isUnknown() || this.opts.stat)) { + target.lstatSync(); + } + } + return this.matchCheckTest(s, ifDir); + } + matchFinish(e, absolute) { + if (this.#ignored(e)) + return; + // we know we have an ignore if this is false, but TS doesn't + if (!this.includeChildMatches && this.#ignore?.add) { + const ign = `${e.relativePosix()}/**`; + this.#ignore.add(ign); + } + const abs = this.opts.absolute === undefined ? absolute : this.opts.absolute; + this.seen.add(e); + const mark = this.opts.mark && e.isDirectory() ? this.#sep : ''; + // ok, we have what we need! + if (this.opts.withFileTypes) { + this.matchEmit(e); + } + else if (abs) { + const abs = this.opts.posix ? e.fullpathPosix() : e.fullpath(); + this.matchEmit(abs + mark); + } + else { + const rel = this.opts.posix ? e.relativePosix() : e.relative(); + const pre = this.opts.dotRelative && !rel.startsWith('..' + this.#sep) ? + '.' + this.#sep + : ''; + this.matchEmit(!rel ? '.' + mark : pre + rel + mark); + } + } + async match(e, absolute, ifDir) { + const p = await this.matchCheck(e, ifDir); + if (p) + this.matchFinish(p, absolute); + } + matchSync(e, absolute, ifDir) { + const p = this.matchCheckSync(e, ifDir); + if (p) + this.matchFinish(p, absolute); + } + walkCB(target, patterns, cb) { + /* c8 ignore start */ + if (this.signal?.aborted) + cb(); + /* c8 ignore stop */ + this.walkCB2(target, patterns, new processor_js_1.Processor(this.opts), cb); + } + walkCB2(target, patterns, processor, cb) { + if (this.#childrenIgnored(target)) + return cb(); + if (this.signal?.aborted) + cb(); + if (this.paused) { + this.onResume(() => this.walkCB2(target, patterns, processor, cb)); + return; + } + processor.processPatterns(target, patterns); + // done processing. all of the above is sync, can be abstracted out. + // subwalks is a map of paths to the entry filters they need + // matches is a map of paths to [absolute, ifDir] tuples. + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + tasks++; + this.match(m, absolute, ifDir).then(() => next()); + } + for (const t of processor.subwalkTargets()) { + if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) { + continue; + } + tasks++; + const childrenCached = t.readdirCached(); + if (t.calledReaddir()) + this.walkCB3(t, childrenCached, processor, next); + else { + t.readdirCB((_, entries) => this.walkCB3(t, entries, processor, next), true); + } + } + next(); + } + walkCB3(target, entries, processor, cb) { + processor = processor.filterEntries(target, entries); + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + tasks++; + this.match(m, absolute, ifDir).then(() => next()); + } + for (const [target, patterns] of processor.subwalks.entries()) { + tasks++; + this.walkCB2(target, patterns, processor.child(), next); + } + next(); + } + walkCBSync(target, patterns, cb) { + /* c8 ignore start */ + if (this.signal?.aborted) + cb(); + /* c8 ignore stop */ + this.walkCB2Sync(target, patterns, new processor_js_1.Processor(this.opts), cb); + } + walkCB2Sync(target, patterns, processor, cb) { + if (this.#childrenIgnored(target)) + return cb(); + if (this.signal?.aborted) + cb(); + if (this.paused) { + this.onResume(() => this.walkCB2Sync(target, patterns, processor, cb)); + return; + } + processor.processPatterns(target, patterns); + // done processing. all of the above is sync, can be abstracted out. + // subwalks is a map of paths to the entry filters they need + // matches is a map of paths to [absolute, ifDir] tuples. + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + this.matchSync(m, absolute, ifDir); + } + for (const t of processor.subwalkTargets()) { + if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) { + continue; + } + tasks++; + const children = t.readdirSync(); + this.walkCB3Sync(t, children, processor, next); + } + next(); + } + walkCB3Sync(target, entries, processor, cb) { + processor = processor.filterEntries(target, entries); + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + this.matchSync(m, absolute, ifDir); + } + for (const [target, patterns] of processor.subwalks.entries()) { + tasks++; + this.walkCB2Sync(target, patterns, processor.child(), next); + } + next(); + } +} +exports.GlobUtil = GlobUtil; +class GlobWalker extends GlobUtil { + matches = new Set(); + constructor(patterns, path, opts) { + super(patterns, path, opts); + } + matchEmit(e) { + this.matches.add(e); + } + async walk() { + if (this.signal?.aborted) + throw this.signal.reason; + if (this.path.isUnknown()) { + await this.path.lstat(); + } + await new Promise((res, rej) => { + this.walkCB(this.path, this.patterns, () => { + if (this.signal?.aborted) { + rej(this.signal.reason); + } + else { + res(this.matches); + } + }); + }); + return this.matches; + } + walkSync() { + if (this.signal?.aborted) + throw this.signal.reason; + if (this.path.isUnknown()) { + this.path.lstatSync(); + } + // nothing for the callback to do, because this never pauses + this.walkCBSync(this.path, this.patterns, () => { + if (this.signal?.aborted) + throw this.signal.reason; + }); + return this.matches; + } +} +exports.GlobWalker = GlobWalker; +class GlobStream extends GlobUtil { + results; + constructor(patterns, path, opts) { + super(patterns, path, opts); + this.results = new minipass_1.Minipass({ + signal: this.signal, + objectMode: true, + }); + this.results.on('drain', () => this.resume()); + this.results.on('resume', () => this.resume()); + } + matchEmit(e) { + this.results.write(e); + if (!this.results.flowing) + this.pause(); + } + stream() { + const target = this.path; + if (target.isUnknown()) { + target.lstat().then(() => { + this.walkCB(target, this.patterns, () => this.results.end()); + }); + } + else { + this.walkCB(target, this.patterns, () => this.results.end()); + } + return this.results; + } + streamSync() { + if (this.path.isUnknown()) { + this.path.lstatSync(); + } + this.walkCBSync(this.path, this.patterns, () => this.results.end()); + return this.results; + } +} +exports.GlobStream = GlobStream; +//# sourceMappingURL=walker.js.map \ No newline at end of file diff --git a/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/bin.d.mts b/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/bin.d.mts new file mode 100644 index 0000000000000..77298e4770817 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/bin.d.mts @@ -0,0 +1,3 @@ +#!/usr/bin/env node +export {}; +//# sourceMappingURL=bin.d.mts.map \ No newline at end of file diff --git a/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/bin.mjs b/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/bin.mjs new file mode 100755 index 0000000000000..553bb79303d90 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/bin.mjs @@ -0,0 +1,276 @@ +#!/usr/bin/env node +import { foregroundChild } from 'foreground-child'; +import { existsSync } from 'fs'; +import { jack } from 'jackspeak'; +import { loadPackageJson } from 'package-json-from-dist'; +import { join } from 'path'; +import { globStream } from './index.js'; +const { version } = loadPackageJson(import.meta.url, '../package.json'); +const j = jack({ + usage: 'glob [options] [ [ ...]]', +}) + .description(` + Glob v${version} + + Expand the positional glob expression arguments into any matching file + system paths found. + `) + .opt({ + cmd: { + short: 'c', + hint: 'command', + description: `Run the command provided, passing the glob expression + matches as arguments.`, + }, +}) + .opt({ + default: { + short: 'p', + hint: 'pattern', + description: `If no positional arguments are provided, glob will use + this pattern`, + }, +}) + .flag({ + all: { + short: 'A', + description: `By default, the glob cli command will not expand any + arguments that are an exact match to a file on disk. + + This prevents double-expanding, in case the shell expands + an argument whose filename is a glob expression. + + For example, if 'app/*.ts' would match 'app/[id].ts', then + on Windows powershell or cmd.exe, 'glob app/*.ts' will + expand to 'app/[id].ts', as expected. However, in posix + shells such as bash or zsh, the shell will first expand + 'app/*.ts' to a list of filenames. Then glob will look + for a file matching 'app/[id].ts' (ie, 'app/i.ts' or + 'app/d.ts'), which is unexpected. + + Setting '--all' prevents this behavior, causing glob + to treat ALL patterns as glob expressions to be expanded, + even if they are an exact match to a file on disk. + + When setting this option, be sure to enquote arguments + so that the shell will not expand them prior to passing + them to the glob command process. + `, + }, + absolute: { + short: 'a', + description: 'Expand to absolute paths', + }, + 'dot-relative': { + short: 'd', + description: `Prepend './' on relative matches`, + }, + mark: { + short: 'm', + description: `Append a / on any directories matched`, + }, + posix: { + short: 'x', + description: `Always resolve to posix style paths, using '/' as the + directory separator, even on Windows. Drive letter + absolute matches on Windows will be expanded to their + full resolved UNC maths, eg instead of 'C:\\foo\\bar', + it will expand to '//?/C:/foo/bar'. + `, + }, + follow: { + short: 'f', + description: `Follow symlinked directories when expanding '**'`, + }, + realpath: { + short: 'R', + description: `Call 'fs.realpath' on all of the results. In the case + of an entry that cannot be resolved, the entry is + omitted. This incurs a slight performance penalty, of + course, because of the added system calls.`, + }, + stat: { + short: 's', + description: `Call 'fs.lstat' on all entries, whether required or not + to determine if it's a valid match.`, + }, + 'match-base': { + short: 'b', + description: `Perform a basename-only match if the pattern does not + contain any slash characters. That is, '*.js' would be + treated as equivalent to '**/*.js', matching js files + in all directories. + `, + }, + dot: { + description: `Allow patterns to match files/directories that start + with '.', even if the pattern does not start with '.' + `, + }, + nobrace: { + description: 'Do not expand {...} patterns', + }, + nocase: { + description: `Perform a case-insensitive match. This defaults to + 'true' on macOS and Windows platforms, and false on + all others. + + Note: 'nocase' should only be explicitly set when it is + known that the filesystem's case sensitivity differs + from the platform default. If set 'true' on + case-insensitive file systems, then the walk may return + more or less results than expected. + `, + }, + nodir: { + description: `Do not match directories, only files. + + Note: to *only* match directories, append a '/' at the + end of the pattern. + `, + }, + noext: { + description: `Do not expand extglob patterns, such as '+(a|b)'`, + }, + noglobstar: { + description: `Do not expand '**' against multiple path portions. + Ie, treat it as a normal '*' instead.`, + }, + 'windows-path-no-escape': { + description: `Use '\\' as a path separator *only*, and *never* as an + escape character. If set, all '\\' characters are + replaced with '/' in the pattern.`, + }, +}) + .num({ + 'max-depth': { + short: 'D', + description: `Maximum depth to traverse from the current + working directory`, + }, +}) + .opt({ + cwd: { + short: 'C', + description: 'Current working directory to execute/match in', + default: process.cwd(), + }, + root: { + short: 'r', + description: `A string path resolved against the 'cwd', which is + used as the starting point for absolute patterns that + start with '/' (but not drive letters or UNC paths + on Windows). + + Note that this *doesn't* necessarily limit the walk to + the 'root' directory, and doesn't affect the cwd + starting point for non-absolute patterns. A pattern + containing '..' will still be able to traverse out of + the root directory, if it is not an actual root directory + on the filesystem, and any non-absolute patterns will + still be matched in the 'cwd'. + + To start absolute and non-absolute patterns in the same + path, you can use '--root=' to set it to the empty + string. However, be aware that on Windows systems, a + pattern like 'x:/*' or '//host/share/*' will *always* + start in the 'x:/' or '//host/share/' directory, + regardless of the --root setting. + `, + }, + platform: { + description: `Defaults to the value of 'process.platform' if + available, or 'linux' if not. Setting --platform=win32 + on non-Windows systems may cause strange behavior!`, + validOptions: [ + 'aix', + 'android', + 'darwin', + 'freebsd', + 'haiku', + 'linux', + 'openbsd', + 'sunos', + 'win32', + 'cygwin', + 'netbsd', + ], + }, +}) + .optList({ + ignore: { + short: 'i', + description: `Glob patterns to ignore`, + }, +}) + .flag({ + debug: { + short: 'v', + description: `Output a huge amount of noisy debug information about + patterns as they are parsed and used to match files.`, + }, + version: { + short: 'V', + description: `Output the version (${version})`, + }, + help: { + short: 'h', + description: 'Show this usage information', + }, +}); +try { + const { positionals, values } = j.parse(); + if (values.version) { + console.log(version); + process.exit(0); + } + if (values.help) { + console.log(j.usage()); + process.exit(0); + } + if (positionals.length === 0 && !values.default) + throw 'No patterns provided'; + if (positionals.length === 0 && values.default) + positionals.push(values.default); + const patterns = values.all ? positionals : positionals.filter(p => !existsSync(p)); + const matches = values.all ? + [] + : positionals.filter(p => existsSync(p)).map(p => join(p)); + const stream = globStream(patterns, { + absolute: values.absolute, + cwd: values.cwd, + dot: values.dot, + dotRelative: values['dot-relative'], + follow: values.follow, + ignore: values.ignore, + mark: values.mark, + matchBase: values['match-base'], + maxDepth: values['max-depth'], + nobrace: values.nobrace, + nocase: values.nocase, + nodir: values.nodir, + noext: values.noext, + noglobstar: values.noglobstar, + platform: values.platform, + realpath: values.realpath, + root: values.root, + stat: values.stat, + debug: values.debug, + posix: values.posix, + }); + const cmd = values.cmd; + if (!cmd) { + matches.forEach(m => console.log(m)); + stream.on('data', f => console.log(f)); + } + else { + stream.on('data', f => matches.push(f)); + stream.on('end', () => foregroundChild(cmd, matches, { shell: true })); + } +} +catch (e) { + console.error(j.usage()); + console.error(e instanceof Error ? e.message : String(e)); + process.exit(1); +} +//# sourceMappingURL=bin.mjs.map \ No newline at end of file diff --git a/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/glob.js b/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/glob.js new file mode 100644 index 0000000000000..c9ff3b0036d94 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/glob.js @@ -0,0 +1,243 @@ +import { Minimatch } from 'minimatch'; +import { fileURLToPath } from 'node:url'; +import { PathScurry, PathScurryDarwin, PathScurryPosix, PathScurryWin32, } from 'path-scurry'; +import { Pattern } from './pattern.js'; +import { GlobStream, GlobWalker } from './walker.js'; +// if no process global, just call it linux. +// so we default to case-sensitive, / separators +const defaultPlatform = (typeof process === 'object' && + process && + typeof process.platform === 'string') ? + process.platform + : 'linux'; +/** + * An object that can perform glob pattern traversals. + */ +export class Glob { + absolute; + cwd; + root; + dot; + dotRelative; + follow; + ignore; + magicalBraces; + mark; + matchBase; + maxDepth; + nobrace; + nocase; + nodir; + noext; + noglobstar; + pattern; + platform; + realpath; + scurry; + stat; + signal; + windowsPathsNoEscape; + withFileTypes; + includeChildMatches; + /** + * The options provided to the constructor. + */ + opts; + /** + * An array of parsed immutable {@link Pattern} objects. + */ + patterns; + /** + * All options are stored as properties on the `Glob` object. + * + * See {@link GlobOptions} for full options descriptions. + * + * Note that a previous `Glob` object can be passed as the + * `GlobOptions` to another `Glob` instantiation to re-use settings + * and caches with a new pattern. + * + * Traversal functions can be called multiple times to run the walk + * again. + */ + constructor(pattern, opts) { + /* c8 ignore start */ + if (!opts) + throw new TypeError('glob options required'); + /* c8 ignore stop */ + this.withFileTypes = !!opts.withFileTypes; + this.signal = opts.signal; + this.follow = !!opts.follow; + this.dot = !!opts.dot; + this.dotRelative = !!opts.dotRelative; + this.nodir = !!opts.nodir; + this.mark = !!opts.mark; + if (!opts.cwd) { + this.cwd = ''; + } + else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) { + opts.cwd = fileURLToPath(opts.cwd); + } + this.cwd = opts.cwd || ''; + this.root = opts.root; + this.magicalBraces = !!opts.magicalBraces; + this.nobrace = !!opts.nobrace; + this.noext = !!opts.noext; + this.realpath = !!opts.realpath; + this.absolute = opts.absolute; + this.includeChildMatches = opts.includeChildMatches !== false; + this.noglobstar = !!opts.noglobstar; + this.matchBase = !!opts.matchBase; + this.maxDepth = + typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity; + this.stat = !!opts.stat; + this.ignore = opts.ignore; + if (this.withFileTypes && this.absolute !== undefined) { + throw new Error('cannot set absolute and withFileTypes:true'); + } + if (typeof pattern === 'string') { + pattern = [pattern]; + } + this.windowsPathsNoEscape = + !!opts.windowsPathsNoEscape || + opts.allowWindowsEscape === + false; + if (this.windowsPathsNoEscape) { + pattern = pattern.map(p => p.replace(/\\/g, '/')); + } + if (this.matchBase) { + if (opts.noglobstar) { + throw new TypeError('base matching requires globstar'); + } + pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`)); + } + this.pattern = pattern; + this.platform = opts.platform || defaultPlatform; + this.opts = { ...opts, platform: this.platform }; + if (opts.scurry) { + this.scurry = opts.scurry; + if (opts.nocase !== undefined && + opts.nocase !== opts.scurry.nocase) { + throw new Error('nocase option contradicts provided scurry option'); + } + } + else { + const Scurry = opts.platform === 'win32' ? PathScurryWin32 + : opts.platform === 'darwin' ? PathScurryDarwin + : opts.platform ? PathScurryPosix + : PathScurry; + this.scurry = new Scurry(this.cwd, { + nocase: opts.nocase, + fs: opts.fs, + }); + } + this.nocase = this.scurry.nocase; + // If you do nocase:true on a case-sensitive file system, then + // we need to use regexps instead of strings for non-magic + // path portions, because statting `aBc` won't return results + // for the file `AbC` for example. + const nocaseMagicOnly = this.platform === 'darwin' || this.platform === 'win32'; + const mmo = { + // default nocase based on platform + ...opts, + dot: this.dot, + matchBase: this.matchBase, + nobrace: this.nobrace, + nocase: this.nocase, + nocaseMagicOnly, + nocomment: true, + noext: this.noext, + nonegate: true, + optimizationLevel: 2, + platform: this.platform, + windowsPathsNoEscape: this.windowsPathsNoEscape, + debug: !!this.opts.debug, + }; + const mms = this.pattern.map(p => new Minimatch(p, mmo)); + const [matchSet, globParts] = mms.reduce((set, m) => { + set[0].push(...m.set); + set[1].push(...m.globParts); + return set; + }, [[], []]); + this.patterns = matchSet.map((set, i) => { + const g = globParts[i]; + /* c8 ignore start */ + if (!g) + throw new Error('invalid pattern object'); + /* c8 ignore stop */ + return new Pattern(set, g, 0, this.platform); + }); + } + async walk() { + // Walkers always return array of Path objects, so we just have to + // coerce them into the right shape. It will have already called + // realpath() if the option was set to do so, so we know that's cached. + // start out knowing the cwd, at least + return [ + ...(await new GlobWalker(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity ? + this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + includeChildMatches: this.includeChildMatches, + }).walk()), + ]; + } + walkSync() { + return [ + ...new GlobWalker(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity ? + this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + includeChildMatches: this.includeChildMatches, + }).walkSync(), + ]; + } + stream() { + return new GlobStream(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity ? + this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + includeChildMatches: this.includeChildMatches, + }).stream(); + } + streamSync() { + return new GlobStream(this.patterns, this.scurry.cwd, { + ...this.opts, + maxDepth: this.maxDepth !== Infinity ? + this.maxDepth + this.scurry.cwd.depth() + : Infinity, + platform: this.platform, + nocase: this.nocase, + includeChildMatches: this.includeChildMatches, + }).streamSync(); + } + /** + * Default sync iteration function. Returns a Generator that + * iterates over the results. + */ + iterateSync() { + return this.streamSync()[Symbol.iterator](); + } + [Symbol.iterator]() { + return this.iterateSync(); + } + /** + * Default async iteration function. Returns an AsyncGenerator that + * iterates over the results. + */ + iterate() { + return this.stream()[Symbol.asyncIterator](); + } + [Symbol.asyncIterator]() { + return this.iterate(); + } +} +//# sourceMappingURL=glob.js.map \ No newline at end of file diff --git a/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/has-magic.js b/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/has-magic.js new file mode 100644 index 0000000000000..ba2321ab868d0 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/has-magic.js @@ -0,0 +1,23 @@ +import { Minimatch } from 'minimatch'; +/** + * Return true if the patterns provided contain any magic glob characters, + * given the options provided. + * + * Brace expansion is not considered "magic" unless the `magicalBraces` option + * is set, as brace expansion just turns one string into an array of strings. + * So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and + * `'xby'` both do not contain any magic glob characters, and it's treated the + * same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true` + * is in the options, brace expansion _is_ treated as a pattern having magic. + */ +export const hasMagic = (pattern, options = {}) => { + if (!Array.isArray(pattern)) { + pattern = [pattern]; + } + for (const p of pattern) { + if (new Minimatch(p, options).hasMagic()) + return true; + } + return false; +}; +//# sourceMappingURL=has-magic.js.map \ No newline at end of file diff --git a/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/ignore.js b/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/ignore.js new file mode 100644 index 0000000000000..539c4a4fdebc4 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/ignore.js @@ -0,0 +1,115 @@ +// give it a pattern, and it'll be able to tell you if +// a given path should be ignored. +// Ignoring a path ignores its children if the pattern ends in /** +// Ignores are always parsed in dot:true mode +import { Minimatch } from 'minimatch'; +import { Pattern } from './pattern.js'; +const defaultPlatform = (typeof process === 'object' && + process && + typeof process.platform === 'string') ? + process.platform + : 'linux'; +/** + * Class used to process ignored patterns + */ +export class Ignore { + relative; + relativeChildren; + absolute; + absoluteChildren; + platform; + mmopts; + constructor(ignored, { nobrace, nocase, noext, noglobstar, platform = defaultPlatform, }) { + this.relative = []; + this.absolute = []; + this.relativeChildren = []; + this.absoluteChildren = []; + this.platform = platform; + this.mmopts = { + dot: true, + nobrace, + nocase, + noext, + noglobstar, + optimizationLevel: 2, + platform, + nocomment: true, + nonegate: true, + }; + for (const ign of ignored) + this.add(ign); + } + add(ign) { + // this is a little weird, but it gives us a clean set of optimized + // minimatch matchers, without getting tripped up if one of them + // ends in /** inside a brace section, and it's only inefficient at + // the start of the walk, not along it. + // It'd be nice if the Pattern class just had a .test() method, but + // handling globstars is a bit of a pita, and that code already lives + // in minimatch anyway. + // Another way would be if maybe Minimatch could take its set/globParts + // as an option, and then we could at least just use Pattern to test + // for absolute-ness. + // Yet another way, Minimatch could take an array of glob strings, and + // a cwd option, and do the right thing. + const mm = new Minimatch(ign, this.mmopts); + for (let i = 0; i < mm.set.length; i++) { + const parsed = mm.set[i]; + const globParts = mm.globParts[i]; + /* c8 ignore start */ + if (!parsed || !globParts) { + throw new Error('invalid pattern object'); + } + // strip off leading ./ portions + // https://github.com/isaacs/node-glob/issues/570 + while (parsed[0] === '.' && globParts[0] === '.') { + parsed.shift(); + globParts.shift(); + } + /* c8 ignore stop */ + const p = new Pattern(parsed, globParts, 0, this.platform); + const m = new Minimatch(p.globString(), this.mmopts); + const children = globParts[globParts.length - 1] === '**'; + const absolute = p.isAbsolute(); + if (absolute) + this.absolute.push(m); + else + this.relative.push(m); + if (children) { + if (absolute) + this.absoluteChildren.push(m); + else + this.relativeChildren.push(m); + } + } + } + ignored(p) { + const fullpath = p.fullpath(); + const fullpaths = `${fullpath}/`; + const relative = p.relative() || '.'; + const relatives = `${relative}/`; + for (const m of this.relative) { + if (m.match(relative) || m.match(relatives)) + return true; + } + for (const m of this.absolute) { + if (m.match(fullpath) || m.match(fullpaths)) + return true; + } + return false; + } + childrenIgnored(p) { + const fullpath = p.fullpath() + '/'; + const relative = (p.relative() || '.') + '/'; + for (const m of this.relativeChildren) { + if (m.match(relative)) + return true; + } + for (const m of this.absoluteChildren) { + if (m.match(fullpath)) + return true; + } + return false; + } +} +//# sourceMappingURL=ignore.js.map \ No newline at end of file diff --git a/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/index.js b/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/index.js new file mode 100644 index 0000000000000..e15c1f9c4cb03 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/index.js @@ -0,0 +1,55 @@ +import { escape, unescape } from 'minimatch'; +import { Glob } from './glob.js'; +import { hasMagic } from './has-magic.js'; +export { escape, unescape } from 'minimatch'; +export { Glob } from './glob.js'; +export { hasMagic } from './has-magic.js'; +export { Ignore } from './ignore.js'; +export function globStreamSync(pattern, options = {}) { + return new Glob(pattern, options).streamSync(); +} +export function globStream(pattern, options = {}) { + return new Glob(pattern, options).stream(); +} +export function globSync(pattern, options = {}) { + return new Glob(pattern, options).walkSync(); +} +async function glob_(pattern, options = {}) { + return new Glob(pattern, options).walk(); +} +export function globIterateSync(pattern, options = {}) { + return new Glob(pattern, options).iterateSync(); +} +export function globIterate(pattern, options = {}) { + return new Glob(pattern, options).iterate(); +} +// aliases: glob.sync.stream() glob.stream.sync() glob.sync() etc +export const streamSync = globStreamSync; +export const stream = Object.assign(globStream, { sync: globStreamSync }); +export const iterateSync = globIterateSync; +export const iterate = Object.assign(globIterate, { + sync: globIterateSync, +}); +export const sync = Object.assign(globSync, { + stream: globStreamSync, + iterate: globIterateSync, +}); +export const glob = Object.assign(glob_, { + glob: glob_, + globSync, + sync, + globStream, + stream, + globStreamSync, + streamSync, + globIterate, + iterate, + globIterateSync, + iterateSync, + Glob, + hasMagic, + escape, + unescape, +}); +glob.glob = glob; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/package.json b/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/package.json new file mode 100644 index 0000000000000..3dbc1ca591c05 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/pattern.js b/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/pattern.js new file mode 100644 index 0000000000000..b41defa10c6a3 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/pattern.js @@ -0,0 +1,215 @@ +// this is just a very light wrapper around 2 arrays with an offset index +import { GLOBSTAR } from 'minimatch'; +const isPatternList = (pl) => pl.length >= 1; +const isGlobList = (gl) => gl.length >= 1; +/** + * An immutable-ish view on an array of glob parts and their parsed + * results + */ +export class Pattern { + #patternList; + #globList; + #index; + length; + #platform; + #rest; + #globString; + #isDrive; + #isUNC; + #isAbsolute; + #followGlobstar = true; + constructor(patternList, globList, index, platform) { + if (!isPatternList(patternList)) { + throw new TypeError('empty pattern list'); + } + if (!isGlobList(globList)) { + throw new TypeError('empty glob list'); + } + if (globList.length !== patternList.length) { + throw new TypeError('mismatched pattern list and glob list lengths'); + } + this.length = patternList.length; + if (index < 0 || index >= this.length) { + throw new TypeError('index out of range'); + } + this.#patternList = patternList; + this.#globList = globList; + this.#index = index; + this.#platform = platform; + // normalize root entries of absolute patterns on initial creation. + if (this.#index === 0) { + // c: => ['c:/'] + // C:/ => ['C:/'] + // C:/x => ['C:/', 'x'] + // //host/share => ['//host/share/'] + // //host/share/ => ['//host/share/'] + // //host/share/x => ['//host/share/', 'x'] + // /etc => ['/', 'etc'] + // / => ['/'] + if (this.isUNC()) { + // '' / '' / 'host' / 'share' + const [p0, p1, p2, p3, ...prest] = this.#patternList; + const [g0, g1, g2, g3, ...grest] = this.#globList; + if (prest[0] === '') { + // ends in / + prest.shift(); + grest.shift(); + } + const p = [p0, p1, p2, p3, ''].join('/'); + const g = [g0, g1, g2, g3, ''].join('/'); + this.#patternList = [p, ...prest]; + this.#globList = [g, ...grest]; + this.length = this.#patternList.length; + } + else if (this.isDrive() || this.isAbsolute()) { + const [p1, ...prest] = this.#patternList; + const [g1, ...grest] = this.#globList; + if (prest[0] === '') { + // ends in / + prest.shift(); + grest.shift(); + } + const p = p1 + '/'; + const g = g1 + '/'; + this.#patternList = [p, ...prest]; + this.#globList = [g, ...grest]; + this.length = this.#patternList.length; + } + } + } + /** + * The first entry in the parsed list of patterns + */ + pattern() { + return this.#patternList[this.#index]; + } + /** + * true of if pattern() returns a string + */ + isString() { + return typeof this.#patternList[this.#index] === 'string'; + } + /** + * true of if pattern() returns GLOBSTAR + */ + isGlobstar() { + return this.#patternList[this.#index] === GLOBSTAR; + } + /** + * true if pattern() returns a regexp + */ + isRegExp() { + return this.#patternList[this.#index] instanceof RegExp; + } + /** + * The /-joined set of glob parts that make up this pattern + */ + globString() { + return (this.#globString = + this.#globString || + (this.#index === 0 ? + this.isAbsolute() ? + this.#globList[0] + this.#globList.slice(1).join('/') + : this.#globList.join('/') + : this.#globList.slice(this.#index).join('/'))); + } + /** + * true if there are more pattern parts after this one + */ + hasMore() { + return this.length > this.#index + 1; + } + /** + * The rest of the pattern after this part, or null if this is the end + */ + rest() { + if (this.#rest !== undefined) + return this.#rest; + if (!this.hasMore()) + return (this.#rest = null); + this.#rest = new Pattern(this.#patternList, this.#globList, this.#index + 1, this.#platform); + this.#rest.#isAbsolute = this.#isAbsolute; + this.#rest.#isUNC = this.#isUNC; + this.#rest.#isDrive = this.#isDrive; + return this.#rest; + } + /** + * true if the pattern represents a //unc/path/ on windows + */ + isUNC() { + const pl = this.#patternList; + return this.#isUNC !== undefined ? + this.#isUNC + : (this.#isUNC = + this.#platform === 'win32' && + this.#index === 0 && + pl[0] === '' && + pl[1] === '' && + typeof pl[2] === 'string' && + !!pl[2] && + typeof pl[3] === 'string' && + !!pl[3]); + } + // pattern like C:/... + // split = ['C:', ...] + // XXX: would be nice to handle patterns like `c:*` to test the cwd + // in c: for *, but I don't know of a way to even figure out what that + // cwd is without actually chdir'ing into it? + /** + * True if the pattern starts with a drive letter on Windows + */ + isDrive() { + const pl = this.#patternList; + return this.#isDrive !== undefined ? + this.#isDrive + : (this.#isDrive = + this.#platform === 'win32' && + this.#index === 0 && + this.length > 1 && + typeof pl[0] === 'string' && + /^[a-z]:$/i.test(pl[0])); + } + // pattern = '/' or '/...' or '/x/...' + // split = ['', ''] or ['', ...] or ['', 'x', ...] + // Drive and UNC both considered absolute on windows + /** + * True if the pattern is rooted on an absolute path + */ + isAbsolute() { + const pl = this.#patternList; + return this.#isAbsolute !== undefined ? + this.#isAbsolute + : (this.#isAbsolute = + (pl[0] === '' && pl.length > 1) || + this.isDrive() || + this.isUNC()); + } + /** + * consume the root of the pattern, and return it + */ + root() { + const p = this.#patternList[0]; + return (typeof p === 'string' && this.isAbsolute() && this.#index === 0) ? + p + : ''; + } + /** + * Check to see if the current globstar pattern is allowed to follow + * a symbolic link. + */ + checkFollowGlobstar() { + return !(this.#index === 0 || + !this.isGlobstar() || + !this.#followGlobstar); + } + /** + * Mark that the current globstar pattern is following a symbolic link + */ + markFollowGlobstar() { + if (this.#index === 0 || !this.isGlobstar() || !this.#followGlobstar) + return false; + this.#followGlobstar = false; + return true; + } +} +//# sourceMappingURL=pattern.js.map \ No newline at end of file diff --git a/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/processor.js b/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/processor.js new file mode 100644 index 0000000000000..f874892ffed0c --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/processor.js @@ -0,0 +1,294 @@ +// synchronous utility for filtering entries and calculating subwalks +import { GLOBSTAR } from 'minimatch'; +/** + * A cache of which patterns have been processed for a given Path + */ +export class HasWalkedCache { + store; + constructor(store = new Map()) { + this.store = store; + } + copy() { + return new HasWalkedCache(new Map(this.store)); + } + hasWalked(target, pattern) { + return this.store.get(target.fullpath())?.has(pattern.globString()); + } + storeWalked(target, pattern) { + const fullpath = target.fullpath(); + const cached = this.store.get(fullpath); + if (cached) + cached.add(pattern.globString()); + else + this.store.set(fullpath, new Set([pattern.globString()])); + } +} +/** + * A record of which paths have been matched in a given walk step, + * and whether they only are considered a match if they are a directory, + * and whether their absolute or relative path should be returned. + */ +export class MatchRecord { + store = new Map(); + add(target, absolute, ifDir) { + const n = (absolute ? 2 : 0) | (ifDir ? 1 : 0); + const current = this.store.get(target); + this.store.set(target, current === undefined ? n : n & current); + } + // match, absolute, ifdir + entries() { + return [...this.store.entries()].map(([path, n]) => [ + path, + !!(n & 2), + !!(n & 1), + ]); + } +} +/** + * A collection of patterns that must be processed in a subsequent step + * for a given path. + */ +export class SubWalks { + store = new Map(); + add(target, pattern) { + if (!target.canReaddir()) { + return; + } + const subs = this.store.get(target); + if (subs) { + if (!subs.find(p => p.globString() === pattern.globString())) { + subs.push(pattern); + } + } + else + this.store.set(target, [pattern]); + } + get(target) { + const subs = this.store.get(target); + /* c8 ignore start */ + if (!subs) { + throw new Error('attempting to walk unknown path'); + } + /* c8 ignore stop */ + return subs; + } + entries() { + return this.keys().map(k => [k, this.store.get(k)]); + } + keys() { + return [...this.store.keys()].filter(t => t.canReaddir()); + } +} +/** + * The class that processes patterns for a given path. + * + * Handles child entry filtering, and determining whether a path's + * directory contents must be read. + */ +export class Processor { + hasWalkedCache; + matches = new MatchRecord(); + subwalks = new SubWalks(); + patterns; + follow; + dot; + opts; + constructor(opts, hasWalkedCache) { + this.opts = opts; + this.follow = !!opts.follow; + this.dot = !!opts.dot; + this.hasWalkedCache = + hasWalkedCache ? hasWalkedCache.copy() : new HasWalkedCache(); + } + processPatterns(target, patterns) { + this.patterns = patterns; + const processingSet = patterns.map(p => [target, p]); + // map of paths to the magic-starting subwalks they need to walk + // first item in patterns is the filter + for (let [t, pattern] of processingSet) { + this.hasWalkedCache.storeWalked(t, pattern); + const root = pattern.root(); + const absolute = pattern.isAbsolute() && this.opts.absolute !== false; + // start absolute patterns at root + if (root) { + t = t.resolve(root === '/' && this.opts.root !== undefined ? + this.opts.root + : root); + const rest = pattern.rest(); + if (!rest) { + this.matches.add(t, true, false); + continue; + } + else { + pattern = rest; + } + } + if (t.isENOENT()) + continue; + let p; + let rest; + let changed = false; + while (typeof (p = pattern.pattern()) === 'string' && + (rest = pattern.rest())) { + const c = t.resolve(p); + t = c; + pattern = rest; + changed = true; + } + p = pattern.pattern(); + rest = pattern.rest(); + if (changed) { + if (this.hasWalkedCache.hasWalked(t, pattern)) + continue; + this.hasWalkedCache.storeWalked(t, pattern); + } + // now we have either a final string for a known entry, + // more strings for an unknown entry, + // or a pattern starting with magic, mounted on t. + if (typeof p === 'string') { + // must not be final entry, otherwise we would have + // concatenated it earlier. + const ifDir = p === '..' || p === '' || p === '.'; + this.matches.add(t.resolve(p), absolute, ifDir); + continue; + } + else if (p === GLOBSTAR) { + // if no rest, match and subwalk pattern + // if rest, process rest and subwalk pattern + // if it's a symlink, but we didn't get here by way of a + // globstar match (meaning it's the first time THIS globstar + // has traversed a symlink), then we follow it. Otherwise, stop. + if (!t.isSymbolicLink() || + this.follow || + pattern.checkFollowGlobstar()) { + this.subwalks.add(t, pattern); + } + const rp = rest?.pattern(); + const rrest = rest?.rest(); + if (!rest || ((rp === '' || rp === '.') && !rrest)) { + // only HAS to be a dir if it ends in **/ or **/. + // but ending in ** will match files as well. + this.matches.add(t, absolute, rp === '' || rp === '.'); + } + else { + if (rp === '..') { + // this would mean you're matching **/.. at the fs root, + // and no thanks, I'm not gonna test that specific case. + /* c8 ignore start */ + const tp = t.parent || t; + /* c8 ignore stop */ + if (!rrest) + this.matches.add(tp, absolute, true); + else if (!this.hasWalkedCache.hasWalked(tp, rrest)) { + this.subwalks.add(tp, rrest); + } + } + } + } + else if (p instanceof RegExp) { + this.subwalks.add(t, pattern); + } + } + return this; + } + subwalkTargets() { + return this.subwalks.keys(); + } + child() { + return new Processor(this.opts, this.hasWalkedCache); + } + // return a new Processor containing the subwalks for each + // child entry, and a set of matches, and + // a hasWalkedCache that's a copy of this one + // then we're going to call + filterEntries(parent, entries) { + const patterns = this.subwalks.get(parent); + // put matches and entry walks into the results processor + const results = this.child(); + for (const e of entries) { + for (const pattern of patterns) { + const absolute = pattern.isAbsolute(); + const p = pattern.pattern(); + const rest = pattern.rest(); + if (p === GLOBSTAR) { + results.testGlobstar(e, pattern, rest, absolute); + } + else if (p instanceof RegExp) { + results.testRegExp(e, p, rest, absolute); + } + else { + results.testString(e, p, rest, absolute); + } + } + } + return results; + } + testGlobstar(e, pattern, rest, absolute) { + if (this.dot || !e.name.startsWith('.')) { + if (!pattern.hasMore()) { + this.matches.add(e, absolute, false); + } + if (e.canReaddir()) { + // if we're in follow mode or it's not a symlink, just keep + // testing the same pattern. If there's more after the globstar, + // then this symlink consumes the globstar. If not, then we can + // follow at most ONE symlink along the way, so we mark it, which + // also checks to ensure that it wasn't already marked. + if (this.follow || !e.isSymbolicLink()) { + this.subwalks.add(e, pattern); + } + else if (e.isSymbolicLink()) { + if (rest && pattern.checkFollowGlobstar()) { + this.subwalks.add(e, rest); + } + else if (pattern.markFollowGlobstar()) { + this.subwalks.add(e, pattern); + } + } + } + } + // if the NEXT thing matches this entry, then also add + // the rest. + if (rest) { + const rp = rest.pattern(); + if (typeof rp === 'string' && + // dots and empty were handled already + rp !== '..' && + rp !== '' && + rp !== '.') { + this.testString(e, rp, rest.rest(), absolute); + } + else if (rp === '..') { + /* c8 ignore start */ + const ep = e.parent || e; + /* c8 ignore stop */ + this.subwalks.add(ep, rest); + } + else if (rp instanceof RegExp) { + this.testRegExp(e, rp, rest.rest(), absolute); + } + } + } + testRegExp(e, p, rest, absolute) { + if (!p.test(e.name)) + return; + if (!rest) { + this.matches.add(e, absolute, false); + } + else { + this.subwalks.add(e, rest); + } + } + testString(e, p, rest, absolute) { + // should never happen? + if (!e.isNamed(p)) + return; + if (!rest) { + this.matches.add(e, absolute, false); + } + else { + this.subwalks.add(e, rest); + } + } +} +//# sourceMappingURL=processor.js.map \ No newline at end of file diff --git a/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/walker.js b/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/walker.js new file mode 100644 index 0000000000000..3d68196c4f175 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/walker.js @@ -0,0 +1,381 @@ +/** + * Single-use utility classes to provide functionality to the {@link Glob} + * methods. + * + * @module + */ +import { Minipass } from 'minipass'; +import { Ignore } from './ignore.js'; +import { Processor } from './processor.js'; +const makeIgnore = (ignore, opts) => typeof ignore === 'string' ? new Ignore([ignore], opts) + : Array.isArray(ignore) ? new Ignore(ignore, opts) + : ignore; +/** + * basic walking utilities that all the glob walker types use + */ +export class GlobUtil { + path; + patterns; + opts; + seen = new Set(); + paused = false; + aborted = false; + #onResume = []; + #ignore; + #sep; + signal; + maxDepth; + includeChildMatches; + constructor(patterns, path, opts) { + this.patterns = patterns; + this.path = path; + this.opts = opts; + this.#sep = !opts.posix && opts.platform === 'win32' ? '\\' : '/'; + this.includeChildMatches = opts.includeChildMatches !== false; + if (opts.ignore || !this.includeChildMatches) { + this.#ignore = makeIgnore(opts.ignore ?? [], opts); + if (!this.includeChildMatches && + typeof this.#ignore.add !== 'function') { + const m = 'cannot ignore child matches, ignore lacks add() method.'; + throw new Error(m); + } + } + // ignore, always set with maxDepth, but it's optional on the + // GlobOptions type + /* c8 ignore start */ + this.maxDepth = opts.maxDepth || Infinity; + /* c8 ignore stop */ + if (opts.signal) { + this.signal = opts.signal; + this.signal.addEventListener('abort', () => { + this.#onResume.length = 0; + }); + } + } + #ignored(path) { + return this.seen.has(path) || !!this.#ignore?.ignored?.(path); + } + #childrenIgnored(path) { + return !!this.#ignore?.childrenIgnored?.(path); + } + // backpressure mechanism + pause() { + this.paused = true; + } + resume() { + /* c8 ignore start */ + if (this.signal?.aborted) + return; + /* c8 ignore stop */ + this.paused = false; + let fn = undefined; + while (!this.paused && (fn = this.#onResume.shift())) { + fn(); + } + } + onResume(fn) { + if (this.signal?.aborted) + return; + /* c8 ignore start */ + if (!this.paused) { + fn(); + } + else { + /* c8 ignore stop */ + this.#onResume.push(fn); + } + } + // do the requisite realpath/stat checking, and return the path + // to add or undefined to filter it out. + async matchCheck(e, ifDir) { + if (ifDir && this.opts.nodir) + return undefined; + let rpc; + if (this.opts.realpath) { + rpc = e.realpathCached() || (await e.realpath()); + if (!rpc) + return undefined; + e = rpc; + } + const needStat = e.isUnknown() || this.opts.stat; + const s = needStat ? await e.lstat() : e; + if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) { + const target = await s.realpath(); + /* c8 ignore start */ + if (target && (target.isUnknown() || this.opts.stat)) { + await target.lstat(); + } + /* c8 ignore stop */ + } + return this.matchCheckTest(s, ifDir); + } + matchCheckTest(e, ifDir) { + return (e && + (this.maxDepth === Infinity || e.depth() <= this.maxDepth) && + (!ifDir || e.canReaddir()) && + (!this.opts.nodir || !e.isDirectory()) && + (!this.opts.nodir || + !this.opts.follow || + !e.isSymbolicLink() || + !e.realpathCached()?.isDirectory()) && + !this.#ignored(e)) ? + e + : undefined; + } + matchCheckSync(e, ifDir) { + if (ifDir && this.opts.nodir) + return undefined; + let rpc; + if (this.opts.realpath) { + rpc = e.realpathCached() || e.realpathSync(); + if (!rpc) + return undefined; + e = rpc; + } + const needStat = e.isUnknown() || this.opts.stat; + const s = needStat ? e.lstatSync() : e; + if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) { + const target = s.realpathSync(); + if (target && (target?.isUnknown() || this.opts.stat)) { + target.lstatSync(); + } + } + return this.matchCheckTest(s, ifDir); + } + matchFinish(e, absolute) { + if (this.#ignored(e)) + return; + // we know we have an ignore if this is false, but TS doesn't + if (!this.includeChildMatches && this.#ignore?.add) { + const ign = `${e.relativePosix()}/**`; + this.#ignore.add(ign); + } + const abs = this.opts.absolute === undefined ? absolute : this.opts.absolute; + this.seen.add(e); + const mark = this.opts.mark && e.isDirectory() ? this.#sep : ''; + // ok, we have what we need! + if (this.opts.withFileTypes) { + this.matchEmit(e); + } + else if (abs) { + const abs = this.opts.posix ? e.fullpathPosix() : e.fullpath(); + this.matchEmit(abs + mark); + } + else { + const rel = this.opts.posix ? e.relativePosix() : e.relative(); + const pre = this.opts.dotRelative && !rel.startsWith('..' + this.#sep) ? + '.' + this.#sep + : ''; + this.matchEmit(!rel ? '.' + mark : pre + rel + mark); + } + } + async match(e, absolute, ifDir) { + const p = await this.matchCheck(e, ifDir); + if (p) + this.matchFinish(p, absolute); + } + matchSync(e, absolute, ifDir) { + const p = this.matchCheckSync(e, ifDir); + if (p) + this.matchFinish(p, absolute); + } + walkCB(target, patterns, cb) { + /* c8 ignore start */ + if (this.signal?.aborted) + cb(); + /* c8 ignore stop */ + this.walkCB2(target, patterns, new Processor(this.opts), cb); + } + walkCB2(target, patterns, processor, cb) { + if (this.#childrenIgnored(target)) + return cb(); + if (this.signal?.aborted) + cb(); + if (this.paused) { + this.onResume(() => this.walkCB2(target, patterns, processor, cb)); + return; + } + processor.processPatterns(target, patterns); + // done processing. all of the above is sync, can be abstracted out. + // subwalks is a map of paths to the entry filters they need + // matches is a map of paths to [absolute, ifDir] tuples. + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + tasks++; + this.match(m, absolute, ifDir).then(() => next()); + } + for (const t of processor.subwalkTargets()) { + if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) { + continue; + } + tasks++; + const childrenCached = t.readdirCached(); + if (t.calledReaddir()) + this.walkCB3(t, childrenCached, processor, next); + else { + t.readdirCB((_, entries) => this.walkCB3(t, entries, processor, next), true); + } + } + next(); + } + walkCB3(target, entries, processor, cb) { + processor = processor.filterEntries(target, entries); + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + tasks++; + this.match(m, absolute, ifDir).then(() => next()); + } + for (const [target, patterns] of processor.subwalks.entries()) { + tasks++; + this.walkCB2(target, patterns, processor.child(), next); + } + next(); + } + walkCBSync(target, patterns, cb) { + /* c8 ignore start */ + if (this.signal?.aborted) + cb(); + /* c8 ignore stop */ + this.walkCB2Sync(target, patterns, new Processor(this.opts), cb); + } + walkCB2Sync(target, patterns, processor, cb) { + if (this.#childrenIgnored(target)) + return cb(); + if (this.signal?.aborted) + cb(); + if (this.paused) { + this.onResume(() => this.walkCB2Sync(target, patterns, processor, cb)); + return; + } + processor.processPatterns(target, patterns); + // done processing. all of the above is sync, can be abstracted out. + // subwalks is a map of paths to the entry filters they need + // matches is a map of paths to [absolute, ifDir] tuples. + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + this.matchSync(m, absolute, ifDir); + } + for (const t of processor.subwalkTargets()) { + if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) { + continue; + } + tasks++; + const children = t.readdirSync(); + this.walkCB3Sync(t, children, processor, next); + } + next(); + } + walkCB3Sync(target, entries, processor, cb) { + processor = processor.filterEntries(target, entries); + let tasks = 1; + const next = () => { + if (--tasks === 0) + cb(); + }; + for (const [m, absolute, ifDir] of processor.matches.entries()) { + if (this.#ignored(m)) + continue; + this.matchSync(m, absolute, ifDir); + } + for (const [target, patterns] of processor.subwalks.entries()) { + tasks++; + this.walkCB2Sync(target, patterns, processor.child(), next); + } + next(); + } +} +export class GlobWalker extends GlobUtil { + matches = new Set(); + constructor(patterns, path, opts) { + super(patterns, path, opts); + } + matchEmit(e) { + this.matches.add(e); + } + async walk() { + if (this.signal?.aborted) + throw this.signal.reason; + if (this.path.isUnknown()) { + await this.path.lstat(); + } + await new Promise((res, rej) => { + this.walkCB(this.path, this.patterns, () => { + if (this.signal?.aborted) { + rej(this.signal.reason); + } + else { + res(this.matches); + } + }); + }); + return this.matches; + } + walkSync() { + if (this.signal?.aborted) + throw this.signal.reason; + if (this.path.isUnknown()) { + this.path.lstatSync(); + } + // nothing for the callback to do, because this never pauses + this.walkCBSync(this.path, this.patterns, () => { + if (this.signal?.aborted) + throw this.signal.reason; + }); + return this.matches; + } +} +export class GlobStream extends GlobUtil { + results; + constructor(patterns, path, opts) { + super(patterns, path, opts); + this.results = new Minipass({ + signal: this.signal, + objectMode: true, + }); + this.results.on('drain', () => this.resume()); + this.results.on('resume', () => this.resume()); + } + matchEmit(e) { + this.results.write(e); + if (!this.results.flowing) + this.pause(); + } + stream() { + const target = this.path; + if (target.isUnknown()) { + target.lstat().then(() => { + this.walkCB(target, this.patterns, () => this.results.end()); + }); + } + else { + this.walkCB(target, this.patterns, () => this.results.end()); + } + return this.results; + } + streamSync() { + if (this.path.isUnknown()) { + this.path.lstatSync(); + } + this.walkCBSync(this.path, this.patterns, () => this.results.end()); + return this.results; + } +} +//# sourceMappingURL=walker.js.map \ No newline at end of file diff --git a/node_modules/@npmcli/package-json/node_modules/glob/package.json b/node_modules/@npmcli/package-json/node_modules/glob/package.json new file mode 100644 index 0000000000000..7be2c53bd5c9f --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/glob/package.json @@ -0,0 +1,97 @@ +{ + "author": "Isaac Z. Schlueter (https://blog.izs.me/)", + "name": "glob", + "description": "the most correct and second fastest glob implementation in JavaScript", + "version": "11.0.3", + "type": "module", + "tshy": { + "main": true, + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts" + } + }, + "bin": "./dist/esm/bin.mjs", + "main": "./dist/commonjs/index.js", + "types": "./dist/commonjs/index.d.ts", + "exports": { + "./package.json": "./package.json", + ".": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } + }, + "repository": { + "type": "git", + "url": "git://github.com/isaacs/node-glob.git" + }, + "files": [ + "dist" + ], + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "npm run benchclean; git push origin --follow-tags", + "prepare": "tshy", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "test": "tap", + "snap": "tap", + "format": "prettier --write . --log-level warn", + "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts", + "profclean": "rm -f v8.log profile.txt", + "test-regen": "npm run profclean && TEST_REGEN=1 node --no-warnings --loader ts-node/esm test/00-setup.ts", + "prebench": "npm run prepare", + "bench": "bash benchmark.sh", + "preprof": "npm run prepare", + "prof": "bash prof.sh", + "benchclean": "node benchclean.cjs" + }, + "prettier": { + "experimentalTernaries": true, + "semi": false, + "printWidth": 75, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "dependencies": { + "foreground-child": "^3.3.1", + "jackspeak": "^4.1.1", + "minimatch": "^10.0.3", + "minipass": "^7.1.2", + "package-json-from-dist": "^1.0.0", + "path-scurry": "^2.0.0" + }, + "devDependencies": { + "@types/node": "^24.0.1", + "memfs": "^4.17.2", + "mkdirp": "^3.0.1", + "prettier": "^3.5.3", + "rimraf": "^6.0.1", + "tap": "^21.1.0", + "tshy": "^3.0.2", + "typedoc": "^0.28.5" + }, + "tap": { + "before": "test/00-setup.ts" + }, + "license": "ISC", + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "engines": { + "node": "20 || >=22" + }, + "module": "./dist/esm/index.js" +} diff --git a/node_modules/@npmcli/package-json/node_modules/hosted-git-info/LICENSE b/node_modules/@npmcli/package-json/node_modules/hosted-git-info/LICENSE new file mode 100644 index 0000000000000..45055763dc838 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/hosted-git-info/LICENSE @@ -0,0 +1,13 @@ +Copyright (c) 2015, Rebecca Turner + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH +REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND +FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM +LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR +OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR +PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/from-url.js b/node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/from-url.js new file mode 100644 index 0000000000000..efc1247d59d12 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/from-url.js @@ -0,0 +1,122 @@ +'use strict' + +const parseUrl = require('./parse-url') + +// look for github shorthand inputs, such as npm/cli +const isGitHubShorthand = (arg) => { + // it cannot contain whitespace before the first # + // it cannot start with a / because that's probably an absolute file path + // but it must include a slash since repos are username/repository + // it cannot start with a . because that's probably a relative file path + // it cannot start with an @ because that's a scoped package if it passes the other tests + // it cannot contain a : before a # because that tells us that there's a protocol + // a second / may not exist before a # + const firstHash = arg.indexOf('#') + const firstSlash = arg.indexOf('/') + const secondSlash = arg.indexOf('/', firstSlash + 1) + const firstColon = arg.indexOf(':') + const firstSpace = /\s/.exec(arg) + const firstAt = arg.indexOf('@') + + const spaceOnlyAfterHash = !firstSpace || (firstHash > -1 && firstSpace.index > firstHash) + const atOnlyAfterHash = firstAt === -1 || (firstHash > -1 && firstAt > firstHash) + const colonOnlyAfterHash = firstColon === -1 || (firstHash > -1 && firstColon > firstHash) + const secondSlashOnlyAfterHash = secondSlash === -1 || (firstHash > -1 && secondSlash > firstHash) + const hasSlash = firstSlash > 0 + // if a # is found, what we really want to know is that the character + // immediately before # is not a / + const doesNotEndWithSlash = firstHash > -1 ? arg[firstHash - 1] !== '/' : !arg.endsWith('/') + const doesNotStartWithDot = !arg.startsWith('.') + + return spaceOnlyAfterHash && hasSlash && doesNotEndWithSlash && + doesNotStartWithDot && atOnlyAfterHash && colonOnlyAfterHash && + secondSlashOnlyAfterHash +} + +module.exports = (giturl, opts, { gitHosts, protocols }) => { + if (!giturl) { + return + } + + const correctedUrl = isGitHubShorthand(giturl) ? `github:${giturl}` : giturl + const parsed = parseUrl(correctedUrl, protocols) + if (!parsed) { + return + } + + const gitHostShortcut = gitHosts.byShortcut[parsed.protocol] + const gitHostDomain = gitHosts.byDomain[parsed.hostname.startsWith('www.') + ? parsed.hostname.slice(4) + : parsed.hostname] + const gitHostName = gitHostShortcut || gitHostDomain + if (!gitHostName) { + return + } + + const gitHostInfo = gitHosts[gitHostShortcut || gitHostDomain] + let auth = null + if (protocols[parsed.protocol]?.auth && (parsed.username || parsed.password)) { + auth = `${parsed.username}${parsed.password ? ':' + parsed.password : ''}` + } + + let committish = null + let user = null + let project = null + let defaultRepresentation = null + + try { + if (gitHostShortcut) { + let pathname = parsed.pathname.startsWith('/') ? parsed.pathname.slice(1) : parsed.pathname + const firstAt = pathname.indexOf('@') + // we ignore auth for shortcuts, so just trim it out + if (firstAt > -1) { + pathname = pathname.slice(firstAt + 1) + } + + const lastSlash = pathname.lastIndexOf('/') + if (lastSlash > -1) { + user = decodeURIComponent(pathname.slice(0, lastSlash)) + // we want nulls only, never empty strings + if (!user) { + user = null + } + project = decodeURIComponent(pathname.slice(lastSlash + 1)) + } else { + project = decodeURIComponent(pathname) + } + + if (project.endsWith('.git')) { + project = project.slice(0, -4) + } + + if (parsed.hash) { + committish = decodeURIComponent(parsed.hash.slice(1)) + } + + defaultRepresentation = 'shortcut' + } else { + if (!gitHostInfo.protocols.includes(parsed.protocol)) { + return + } + + const segments = gitHostInfo.extract(parsed) + if (!segments) { + return + } + + user = segments.user && decodeURIComponent(segments.user) + project = decodeURIComponent(segments.project) + committish = decodeURIComponent(segments.committish) + defaultRepresentation = protocols[parsed.protocol]?.name || parsed.protocol.slice(0, -1) + } + } catch (err) { + /* istanbul ignore else */ + if (err instanceof URIError) { + return + } else { + throw err + } + } + + return [gitHostName, user, auth, project, committish, defaultRepresentation, opts] +} diff --git a/node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/hosts.js b/node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/hosts.js new file mode 100644 index 0000000000000..2a88e95927772 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/hosts.js @@ -0,0 +1,231 @@ +/* eslint-disable max-len */ + +'use strict' + +const maybeJoin = (...args) => args.every(arg => arg) ? args.join('') : '' +const maybeEncode = (arg) => arg ? encodeURIComponent(arg) : '' +const formatHashFragment = (f) => f.toLowerCase() + .replace(/^\W+/g, '') // strip leading non-characters + .replace(/(? + `git@${domain}:${user}/${project}.git${maybeJoin('#', committish)}`, + sshurltemplate: ({ domain, user, project, committish }) => + `git+ssh://git@${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, + edittemplate: ({ domain, user, project, committish, editpath, path }) => + `https://${domain}/${user}/${project}${maybeJoin('/', editpath, '/', maybeEncode(committish || 'HEAD'), '/', path)}`, + browsetemplate: ({ domain, user, project, committish, treepath }) => + `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}`, + browsetreetemplate: ({ domain, user, project, committish, treepath, path, fragment, hashformat }) => + `https://${domain}/${user}/${project}/${treepath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`, + browseblobtemplate: ({ domain, user, project, committish, blobpath, path, fragment, hashformat }) => + `https://${domain}/${user}/${project}/${blobpath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`, + docstemplate: ({ domain, user, project, treepath, committish }) => + `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}#readme`, + httpstemplate: ({ auth, domain, user, project, committish }) => + `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, + filetemplate: ({ domain, user, project, committish, path }) => + `https://${domain}/${user}/${project}/raw/${maybeEncode(committish || 'HEAD')}/${path}`, + shortcuttemplate: ({ type, user, project, committish }) => + `${type}:${user}/${project}${maybeJoin('#', committish)}`, + pathtemplate: ({ user, project, committish }) => + `${user}/${project}${maybeJoin('#', committish)}`, + bugstemplate: ({ domain, user, project }) => + `https://${domain}/${user}/${project}/issues`, + hashformat: formatHashFragment, +} + +const hosts = {} +hosts.github = { + // First two are insecure and generally shouldn't be used any more, but + // they are still supported. + protocols: ['git:', 'http:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'], + domain: 'github.com', + treepath: 'tree', + blobpath: 'blob', + editpath: 'edit', + filetemplate: ({ auth, user, project, committish, path }) => + `https://${maybeJoin(auth, '@')}raw.githubusercontent.com/${user}/${project}/${maybeEncode(committish || 'HEAD')}/${path}`, + gittemplate: ({ auth, domain, user, project, committish }) => + `git://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, + tarballtemplate: ({ domain, user, project, committish }) => + `https://codeload.${domain}/${user}/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`, + extract: (url) => { + let [, user, project, type, committish] = url.pathname.split('/', 5) + if (type && type !== 'tree') { + return + } + + if (!type) { + committish = url.hash.slice(1) + } + + if (project && project.endsWith('.git')) { + project = project.slice(0, -4) + } + + if (!user || !project) { + return + } + + return { user, project, committish } + }, +} + +hosts.bitbucket = { + protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'], + domain: 'bitbucket.org', + treepath: 'src', + blobpath: 'src', + editpath: '?mode=edit', + edittemplate: ({ domain, user, project, committish, treepath, path, editpath }) => + `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish || 'HEAD'), '/', path, editpath)}`, + tarballtemplate: ({ domain, user, project, committish }) => + `https://${domain}/${user}/${project}/get/${maybeEncode(committish || 'HEAD')}.tar.gz`, + extract: (url) => { + let [, user, project, aux] = url.pathname.split('/', 4) + if (['get'].includes(aux)) { + return + } + + if (project && project.endsWith('.git')) { + project = project.slice(0, -4) + } + + if (!user || !project) { + return + } + + return { user, project, committish: url.hash.slice(1) } + }, +} + +hosts.gitlab = { + protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'], + domain: 'gitlab.com', + treepath: 'tree', + blobpath: 'tree', + editpath: '-/edit', + httpstemplate: ({ auth, domain, user, project, committish }) => + `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, + tarballtemplate: ({ domain, user, project, committish }) => + `https://${domain}/${user}/${project}/repository/archive.tar.gz?ref=${maybeEncode(committish || 'HEAD')}`, + extract: (url) => { + const path = url.pathname.slice(1) + if (path.includes('/-/') || path.includes('/archive.tar.gz')) { + return + } + + const segments = path.split('/') + let project = segments.pop() + if (project.endsWith('.git')) { + project = project.slice(0, -4) + } + + const user = segments.join('/') + if (!user || !project) { + return + } + + return { user, project, committish: url.hash.slice(1) } + }, +} + +hosts.gist = { + protocols: ['git:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'], + domain: 'gist.github.com', + editpath: 'edit', + sshtemplate: ({ domain, project, committish }) => + `git@${domain}:${project}.git${maybeJoin('#', committish)}`, + sshurltemplate: ({ domain, project, committish }) => + `git+ssh://git@${domain}/${project}.git${maybeJoin('#', committish)}`, + edittemplate: ({ domain, user, project, committish, editpath }) => + `https://${domain}/${user}/${project}${maybeJoin('/', maybeEncode(committish))}/${editpath}`, + browsetemplate: ({ domain, project, committish }) => + `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`, + browsetreetemplate: ({ domain, project, committish, path, hashformat }) => + `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`, + browseblobtemplate: ({ domain, project, committish, path, hashformat }) => + `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`, + docstemplate: ({ domain, project, committish }) => + `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`, + httpstemplate: ({ domain, project, committish }) => + `git+https://${domain}/${project}.git${maybeJoin('#', committish)}`, + filetemplate: ({ user, project, committish, path }) => + `https://gist.githubusercontent.com/${user}/${project}/raw${maybeJoin('/', maybeEncode(committish))}/${path}`, + shortcuttemplate: ({ type, project, committish }) => + `${type}:${project}${maybeJoin('#', committish)}`, + pathtemplate: ({ project, committish }) => + `${project}${maybeJoin('#', committish)}`, + bugstemplate: ({ domain, project }) => + `https://${domain}/${project}`, + gittemplate: ({ domain, project, committish }) => + `git://${domain}/${project}.git${maybeJoin('#', committish)}`, + tarballtemplate: ({ project, committish }) => + `https://codeload.github.com/gist/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`, + extract: (url) => { + let [, user, project, aux] = url.pathname.split('/', 4) + if (aux === 'raw') { + return + } + + if (!project) { + if (!user) { + return + } + + project = user + user = null + } + + if (project.endsWith('.git')) { + project = project.slice(0, -4) + } + + return { user, project, committish: url.hash.slice(1) } + }, + hashformat: function (fragment) { + return fragment && 'file-' + formatHashFragment(fragment) + }, +} + +hosts.sourcehut = { + protocols: ['git+ssh:', 'https:'], + domain: 'git.sr.ht', + treepath: 'tree', + blobpath: 'tree', + filetemplate: ({ domain, user, project, committish, path }) => + `https://${domain}/${user}/${project}/blob/${maybeEncode(committish) || 'HEAD'}/${path}`, + httpstemplate: ({ domain, user, project, committish }) => + `https://${domain}/${user}/${project}.git${maybeJoin('#', committish)}`, + tarballtemplate: ({ domain, user, project, committish }) => + `https://${domain}/${user}/${project}/archive/${maybeEncode(committish) || 'HEAD'}.tar.gz`, + bugstemplate: () => null, + extract: (url) => { + let [, user, project, aux] = url.pathname.split('/', 4) + + // tarball url + if (['archive'].includes(aux)) { + return + } + + if (project && project.endsWith('.git')) { + project = project.slice(0, -4) + } + + if (!user || !project) { + return + } + + return { user, project, committish: url.hash.slice(1) } + }, +} + +for (const [name, host] of Object.entries(hosts)) { + hosts[name] = Object.assign({}, defaults, host) +} + +module.exports = hosts diff --git a/node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/index.js b/node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/index.js new file mode 100644 index 0000000000000..2a7100dcee6e7 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/index.js @@ -0,0 +1,227 @@ +'use strict' + +const { LRUCache } = require('lru-cache') +const hosts = require('./hosts.js') +const fromUrl = require('./from-url.js') +const parseUrl = require('./parse-url.js') + +const cache = new LRUCache({ max: 1000 }) + +function unknownHostedUrl (url) { + try { + const { + protocol, + hostname, + pathname, + } = new URL(url) + + if (!hostname) { + return null + } + + const proto = /(?:git\+)http:$/.test(protocol) ? 'http:' : 'https:' + const path = pathname.replace(/\.git$/, '') + return `${proto}//${hostname}${path}` + } catch { + return null + } +} + +class GitHost { + constructor (type, user, auth, project, committish, defaultRepresentation, opts = {}) { + Object.assign(this, GitHost.#gitHosts[type], { + type, + user, + auth, + project, + committish, + default: defaultRepresentation, + opts, + }) + } + + static #gitHosts = { byShortcut: {}, byDomain: {} } + static #protocols = { + 'git+ssh:': { name: 'sshurl' }, + 'ssh:': { name: 'sshurl' }, + 'git+https:': { name: 'https', auth: true }, + 'git:': { auth: true }, + 'http:': { auth: true }, + 'https:': { auth: true }, + 'git+http:': { auth: true }, + } + + static addHost (name, host) { + GitHost.#gitHosts[name] = host + GitHost.#gitHosts.byDomain[host.domain] = name + GitHost.#gitHosts.byShortcut[`${name}:`] = name + GitHost.#protocols[`${name}:`] = { name } + } + + static fromUrl (giturl, opts) { + if (typeof giturl !== 'string') { + return + } + + const key = giturl + JSON.stringify(opts || {}) + + if (!cache.has(key)) { + const hostArgs = fromUrl(giturl, opts, { + gitHosts: GitHost.#gitHosts, + protocols: GitHost.#protocols, + }) + cache.set(key, hostArgs ? new GitHost(...hostArgs) : undefined) + } + + return cache.get(key) + } + + static fromManifest (manifest, opts = {}) { + if (!manifest || typeof manifest !== 'object') { + return + } + + const r = manifest.repository + // TODO: look into also checking the `bugs`/`homepage` URLs + + const rurl = r && ( + typeof r === 'string' + ? r + : typeof r === 'object' && typeof r.url === 'string' + ? r.url + : null + ) + + if (!rurl) { + throw new Error('no repository') + } + + const info = (rurl && GitHost.fromUrl(rurl.replace(/^git\+/, ''), opts)) || null + if (info) { + return info + } + const unk = unknownHostedUrl(rurl) + return GitHost.fromUrl(unk, opts) || unk + } + + static parseUrl (url) { + return parseUrl(url) + } + + #fill (template, opts) { + if (typeof template !== 'function') { + return null + } + + const options = { ...this, ...this.opts, ...opts } + + // the path should always be set so we don't end up with 'undefined' in urls + if (!options.path) { + options.path = '' + } + + // template functions will insert the leading slash themselves + if (options.path.startsWith('/')) { + options.path = options.path.slice(1) + } + + if (options.noCommittish) { + options.committish = null + } + + const result = template(options) + return options.noGitPlus && result.startsWith('git+') ? result.slice(4) : result + } + + hash () { + return this.committish ? `#${this.committish}` : '' + } + + ssh (opts) { + return this.#fill(this.sshtemplate, opts) + } + + sshurl (opts) { + return this.#fill(this.sshurltemplate, opts) + } + + browse (path, ...args) { + // not a string, treat path as opts + if (typeof path !== 'string') { + return this.#fill(this.browsetemplate, path) + } + + if (typeof args[0] !== 'string') { + return this.#fill(this.browsetreetemplate, { ...args[0], path }) + } + + return this.#fill(this.browsetreetemplate, { ...args[1], fragment: args[0], path }) + } + + // If the path is known to be a file, then browseFile should be used. For some hosts + // the url is the same as browse, but for others like GitHub a file can use both `/tree/` + // and `/blob/` in the path. When using a default committish of `HEAD` then the `/tree/` + // path will redirect to a specific commit. Using the `/blob/` path avoids this and + // does not redirect to a different commit. + browseFile (path, ...args) { + if (typeof args[0] !== 'string') { + return this.#fill(this.browseblobtemplate, { ...args[0], path }) + } + + return this.#fill(this.browseblobtemplate, { ...args[1], fragment: args[0], path }) + } + + docs (opts) { + return this.#fill(this.docstemplate, opts) + } + + bugs (opts) { + return this.#fill(this.bugstemplate, opts) + } + + https (opts) { + return this.#fill(this.httpstemplate, opts) + } + + git (opts) { + return this.#fill(this.gittemplate, opts) + } + + shortcut (opts) { + return this.#fill(this.shortcuttemplate, opts) + } + + path (opts) { + return this.#fill(this.pathtemplate, opts) + } + + tarball (opts) { + return this.#fill(this.tarballtemplate, { ...opts, noCommittish: false }) + } + + file (path, opts) { + return this.#fill(this.filetemplate, { ...opts, path }) + } + + edit (path, opts) { + return this.#fill(this.edittemplate, { ...opts, path }) + } + + getDefaultRepresentation () { + return this.default + } + + toString (opts) { + if (this.default && typeof this[this.default] === 'function') { + return this[this.default](opts) + } + + return this.sshurl(opts) + } +} + +for (const [name, host] of Object.entries(hosts)) { + GitHost.addHost(name, host) +} + +module.exports = GitHost diff --git a/node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/parse-url.js b/node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/parse-url.js new file mode 100644 index 0000000000000..7d5489c008ab4 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/parse-url.js @@ -0,0 +1,78 @@ +const url = require('url') + +const lastIndexOfBefore = (str, char, beforeChar) => { + const startPosition = str.indexOf(beforeChar) + return str.lastIndexOf(char, startPosition > -1 ? startPosition : Infinity) +} + +const safeUrl = (u) => { + try { + return new url.URL(u) + } catch { + // this fn should never throw + } +} + +// accepts input like git:github.com:user/repo and inserts the // after the first : +const correctProtocol = (arg, protocols) => { + const firstColon = arg.indexOf(':') + const proto = arg.slice(0, firstColon + 1) + if (Object.prototype.hasOwnProperty.call(protocols, proto)) { + return arg + } + + const firstAt = arg.indexOf('@') + if (firstAt > -1) { + if (firstAt > firstColon) { + return `git+ssh://${arg}` + } else { + return arg + } + } + + const doubleSlash = arg.indexOf('//') + if (doubleSlash === firstColon + 1) { + return arg + } + + return `${arg.slice(0, firstColon + 1)}//${arg.slice(firstColon + 1)}` +} + +// attempt to correct an scp style url so that it will parse with `new URL()` +const correctUrl = (giturl) => { + // ignore @ that come after the first hash since the denotes the start + // of a committish which can contain @ characters + const firstAt = lastIndexOfBefore(giturl, '@', '#') + // ignore colons that come after the hash since that could include colons such as: + // git@github.com:user/package-2#semver:^1.0.0 + const lastColonBeforeHash = lastIndexOfBefore(giturl, ':', '#') + + if (lastColonBeforeHash > firstAt) { + // the last : comes after the first @ (or there is no @) + // like it would in: + // proto://hostname.com:user/repo + // username@hostname.com:user/repo + // :password@hostname.com:user/repo + // username:password@hostname.com:user/repo + // proto://username@hostname.com:user/repo + // proto://:password@hostname.com:user/repo + // proto://username:password@hostname.com:user/repo + // then we replace the last : with a / to create a valid path + giturl = giturl.slice(0, lastColonBeforeHash) + '/' + giturl.slice(lastColonBeforeHash + 1) + } + + if (lastIndexOfBefore(giturl, ':', '#') === -1 && giturl.indexOf('//') === -1) { + // we have no : at all + // as it would be in: + // username@hostname.com/user/repo + // then we prepend a protocol + giturl = `git+ssh://${giturl}` + } + + return giturl +} + +module.exports = (giturl, protocols) => { + const withProtocol = protocols ? correctProtocol(giturl, protocols) : giturl + return safeUrl(withProtocol) || safeUrl(correctUrl(withProtocol)) +} diff --git a/node_modules/@npmcli/package-json/node_modules/hosted-git-info/package.json b/node_modules/@npmcli/package-json/node_modules/hosted-git-info/package.json new file mode 100644 index 0000000000000..5883a7d308d79 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/hosted-git-info/package.json @@ -0,0 +1,61 @@ +{ + "name": "hosted-git-info", + "version": "9.0.0", + "description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab", + "main": "./lib/index.js", + "repository": { + "type": "git", + "url": "git+https://github.com/npm/hosted-git-info.git" + }, + "keywords": [ + "git", + "github", + "bitbucket", + "gitlab" + ], + "author": "GitHub Inc.", + "license": "ISC", + "bugs": { + "url": "https://github.com/npm/hosted-git-info/issues" + }, + "homepage": "https://github.com/npm/hosted-git-info", + "scripts": { + "posttest": "npm run lint", + "snap": "tap", + "test": "tap", + "test:coverage": "tap --coverage-report=html", + "lint": "npm run eslint", + "postlint": "template-oss-check", + "lintfix": "npm run eslint -- --fix", + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" + }, + "dependencies": { + "lru-cache": "^11.1.0" + }, + "devDependencies": { + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.25.0", + "tap": "^16.0.1" + }, + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^20.17.0 || >=22.9.0" + }, + "tap": { + "color": 1, + "coverage": true, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.25.0", + "publish": "true" + } +} diff --git a/node_modules/@npmcli/package-json/node_modules/jackspeak/LICENSE.md b/node_modules/@npmcli/package-json/node_modules/jackspeak/LICENSE.md new file mode 100644 index 0000000000000..8cb5cc6e616c0 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/jackspeak/LICENSE.md @@ -0,0 +1,55 @@ +# Blue Oak Model License + +Version 1.0.0 + +## Purpose + +This license gives everyone as much permission to work with +this software as possible, while protecting contributors +from liability. + +## Acceptance + +In order to receive this license, you must agree to its +rules. The rules of this license are both obligations +under that agreement and conditions to your license. +You must not do anything with this software that triggers +a rule that you cannot or will not follow. + +## Copyright + +Each contributor licenses you to do everything with this +software that would otherwise infringe that contributor's +copyright in it. + +## Notices + +You must ensure that everyone who gets a copy of +any part of this software from you, with or without +changes, also gets the text of this license or a link to +. + +## Excuse + +If anyone notifies you in writing that you have not +complied with [Notices](#notices), you can keep your +license by taking all practical steps to comply within 30 +days after the notice. If you do not do so, your license +ends immediately. + +## Patent + +Each contributor licenses you to do everything with this +software that would otherwise infringe any patent claims +they can license or become able to license. + +## Reliability + +No contributor can revoke this license. + +## No Liability + +**_As far as the law allows, this software comes as is, +without any warranty or condition, and no contributor +will be liable to anyone for any damages related to this +software or this license, under any kind of legal claim._** diff --git a/node_modules/@npmcli/package-json/node_modules/jackspeak/dist/commonjs/index.js b/node_modules/@npmcli/package-json/node_modules/jackspeak/dist/commonjs/index.js new file mode 100644 index 0000000000000..543412746cc8f --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/jackspeak/dist/commonjs/index.js @@ -0,0 +1,947 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.jack = exports.Jack = exports.isConfigOption = exports.isConfigOptionOfType = exports.isConfigType = void 0; +const node_util_1 = require("node:util"); +// it's a tiny API, just cast it inline, it's fine +//@ts-ignore +const cliui_1 = __importDefault(require("@isaacs/cliui")); +const node_path_1 = require("node:path"); +const isConfigType = (t) => typeof t === 'string' && + (t === 'string' || t === 'number' || t === 'boolean'); +exports.isConfigType = isConfigType; +const isValidValue = (v, type, multi) => { + if (multi) { + if (!Array.isArray(v)) + return false; + return !v.some((v) => !isValidValue(v, type, false)); + } + if (Array.isArray(v)) + return false; + return typeof v === type; +}; +const isValidOption = (v, vo) => !!vo && + (Array.isArray(v) ? v.every(x => isValidOption(x, vo)) : vo.includes(v)); +/** + * Determine whether an unknown object is a {@link ConfigOption} based only + * on its `type` and `multiple` property + */ +const isConfigOptionOfType = (o, type, multi) => !!o && + typeof o === 'object' && + (0, exports.isConfigType)(o.type) && + o.type === type && + !!o.multiple === multi; +exports.isConfigOptionOfType = isConfigOptionOfType; +/** + * Determine whether an unknown object is a {@link ConfigOption} based on + * it having all valid properties + */ +const isConfigOption = (o, type, multi) => (0, exports.isConfigOptionOfType)(o, type, multi) && + undefOrType(o.short, 'string') && + undefOrType(o.description, 'string') && + undefOrType(o.hint, 'string') && + undefOrType(o.validate, 'function') && + (o.type === 'boolean' ? + o.validOptions === undefined + : undefOrTypeArray(o.validOptions, o.type)) && + (o.default === undefined || isValidValue(o.default, type, multi)); +exports.isConfigOption = isConfigOption; +const isHeading = (r) => r.type === 'heading'; +const isDescription = (r) => r.type === 'description'; +const width = Math.min(process?.stdout?.columns ?? 80, 80); +// indentation spaces from heading level +const indent = (n) => (n - 1) * 2; +const toEnvKey = (pref, key) => [pref, key.replace(/[^a-zA-Z0-9]+/g, ' ')] + .join(' ') + .trim() + .toUpperCase() + .replace(/ /g, '_'); +const toEnvVal = (value, delim = '\n') => { + const str = typeof value === 'string' ? value + : typeof value === 'boolean' ? + value ? '1' + : '0' + : typeof value === 'number' ? String(value) + : Array.isArray(value) ? + value.map((v) => toEnvVal(v)).join(delim) + : /* c8 ignore start */ undefined; + if (typeof str !== 'string') { + throw new Error(`could not serialize value to environment: ${JSON.stringify(value)}`, { cause: { code: 'JACKSPEAK' } }); + } + /* c8 ignore stop */ + return str; +}; +const fromEnvVal = (env, type, multiple, delim = '\n') => (multiple ? + env ? env.split(delim).map(v => fromEnvVal(v, type, false)) + : [] + : type === 'string' ? env + : type === 'boolean' ? env === '1' + : +env.trim()); +const undefOrType = (v, t) => v === undefined || typeof v === t; +const undefOrTypeArray = (v, t) => v === undefined || (Array.isArray(v) && v.every(x => typeof x === t)); +// print the value type, for error message reporting +const valueType = (v) => typeof v === 'string' ? 'string' + : typeof v === 'boolean' ? 'boolean' + : typeof v === 'number' ? 'number' + : Array.isArray(v) ? + `${joinTypes([...new Set(v.map(v => valueType(v)))])}[]` + : `${v.type}${v.multiple ? '[]' : ''}`; +const joinTypes = (types) => types.length === 1 && typeof types[0] === 'string' ? + types[0] + : `(${types.join('|')})`; +const validateFieldMeta = (field, fieldMeta) => { + if (fieldMeta) { + if (field.type !== undefined && field.type !== fieldMeta.type) { + throw new TypeError(`invalid type`, { + cause: { + found: field.type, + wanted: [fieldMeta.type, undefined], + }, + }); + } + if (field.multiple !== undefined && + !!field.multiple !== fieldMeta.multiple) { + throw new TypeError(`invalid multiple`, { + cause: { + found: field.multiple, + wanted: [fieldMeta.multiple, undefined], + }, + }); + } + return fieldMeta; + } + if (!(0, exports.isConfigType)(field.type)) { + throw new TypeError(`invalid type`, { + cause: { + found: field.type, + wanted: ['string', 'number', 'boolean'], + }, + }); + } + return { + type: field.type, + multiple: !!field.multiple, + }; +}; +const validateField = (o, type, multiple) => { + const validateValidOptions = (def, validOptions) => { + if (!undefOrTypeArray(validOptions, type)) { + throw new TypeError('invalid validOptions', { + cause: { + found: validOptions, + wanted: valueType({ type, multiple: true }), + }, + }); + } + if (def !== undefined && validOptions !== undefined) { + const valid = Array.isArray(def) ? + def.every(v => validOptions.includes(v)) + : validOptions.includes(def); + if (!valid) { + throw new TypeError('invalid default value not in validOptions', { + cause: { + found: def, + wanted: validOptions, + }, + }); + } + } + }; + if (o.default !== undefined && + !isValidValue(o.default, type, multiple)) { + throw new TypeError('invalid default value', { + cause: { + found: o.default, + wanted: valueType({ type, multiple }), + }, + }); + } + if ((0, exports.isConfigOptionOfType)(o, 'number', false) || + (0, exports.isConfigOptionOfType)(o, 'number', true)) { + validateValidOptions(o.default, o.validOptions); + } + else if ((0, exports.isConfigOptionOfType)(o, 'string', false) || + (0, exports.isConfigOptionOfType)(o, 'string', true)) { + validateValidOptions(o.default, o.validOptions); + } + else if ((0, exports.isConfigOptionOfType)(o, 'boolean', false) || + (0, exports.isConfigOptionOfType)(o, 'boolean', true)) { + if (o.hint !== undefined) { + throw new TypeError('cannot provide hint for flag'); + } + if (o.validOptions !== undefined) { + throw new TypeError('cannot provide validOptions for flag'); + } + } + return o; +}; +const toParseArgsOptionsConfig = (options) => { + return Object.entries(options).reduce((acc, [longOption, o]) => { + const p = { + type: 'string', + multiple: !!o.multiple, + ...(typeof o.short === 'string' ? { short: o.short } : undefined), + }; + const setNoBool = () => { + if (!longOption.startsWith('no-') && !options[`no-${longOption}`]) { + acc[`no-${longOption}`] = { + type: 'boolean', + multiple: !!o.multiple, + }; + } + }; + const setDefault = (def, fn) => { + if (def !== undefined) { + p.default = fn(def); + } + }; + if ((0, exports.isConfigOption)(o, 'number', false)) { + setDefault(o.default, String); + } + else if ((0, exports.isConfigOption)(o, 'number', true)) { + setDefault(o.default, d => d.map(v => String(v))); + } + else if ((0, exports.isConfigOption)(o, 'string', false) || + (0, exports.isConfigOption)(o, 'string', true)) { + setDefault(o.default, v => v); + } + else if ((0, exports.isConfigOption)(o, 'boolean', false) || + (0, exports.isConfigOption)(o, 'boolean', true)) { + p.type = 'boolean'; + setDefault(o.default, v => v); + setNoBool(); + } + acc[longOption] = p; + return acc; + }, {}); +}; +/** + * Class returned by the {@link jack} function and all configuration + * definition methods. This is what gets chained together. + */ +class Jack { + #configSet; + #shorts; + #options; + #fields = []; + #env; + #envPrefix; + #allowPositionals; + #usage; + #usageMarkdown; + constructor(options = {}) { + this.#options = options; + this.#allowPositionals = options.allowPositionals !== false; + this.#env = + this.#options.env === undefined ? process.env : this.#options.env; + this.#envPrefix = options.envPrefix; + // We need to fib a little, because it's always the same object, but it + // starts out as having an empty config set. Then each method that adds + // fields returns `this as Jack` + this.#configSet = Object.create(null); + this.#shorts = Object.create(null); + } + /** + * Resulting definitions, suitable to be passed to Node's `util.parseArgs`, + * but also including `description` and `short` fields, if set. + */ + get definitions() { + return this.#configSet; + } + /** map of `{ : }` strings for each short name defined */ + get shorts() { + return this.#shorts; + } + /** + * options passed to the {@link Jack} constructor + */ + get jackOptions() { + return this.#options; + } + /** + * the data used to generate {@link Jack#usage} and + * {@link Jack#usageMarkdown} content. + */ + get usageFields() { + return this.#fields; + } + /** + * Set the default value (which will still be overridden by env or cli) + * as if from a parsed config file. The optional `source` param, if + * provided, will be included in error messages if a value is invalid or + * unknown. + */ + setConfigValues(values, source = '') { + try { + this.validate(values); + } + catch (er) { + if (source && er instanceof Error) { + /* c8 ignore next */ + const cause = typeof er.cause === 'object' ? er.cause : {}; + er.cause = { ...cause, path: source }; + Error.captureStackTrace(er, this.setConfigValues); + } + throw er; + } + for (const [field, value] of Object.entries(values)) { + const my = this.#configSet[field]; + // already validated, just for TS's benefit + /* c8 ignore start */ + if (!my) { + throw new Error('unexpected field in config set: ' + field, { + cause: { + code: 'JACKSPEAK', + found: field, + }, + }); + } + /* c8 ignore stop */ + my.default = value; + } + return this; + } + /** + * Parse a string of arguments, and return the resulting + * `{ values, positionals }` object. + * + * If an {@link JackOptions#envPrefix} is set, then it will read default + * values from the environment, and write the resulting values back + * to the environment as well. + * + * Environment values always take precedence over any other value, except + * an explicit CLI setting. + */ + parse(args = process.argv) { + this.loadEnvDefaults(); + const p = this.parseRaw(args); + this.applyDefaults(p); + this.writeEnv(p); + return p; + } + loadEnvDefaults() { + if (this.#envPrefix) { + for (const [field, my] of Object.entries(this.#configSet)) { + const ek = toEnvKey(this.#envPrefix, field); + const env = this.#env[ek]; + if (env !== undefined) { + my.default = fromEnvVal(env, my.type, !!my.multiple, my.delim); + } + } + } + } + applyDefaults(p) { + for (const [field, c] of Object.entries(this.#configSet)) { + if (c.default !== undefined && !(field in p.values)) { + //@ts-ignore + p.values[field] = c.default; + } + } + } + /** + * Only parse the command line arguments passed in. + * Does not strip off the `node script.js` bits, so it must be just the + * arguments you wish to have parsed. + * Does not read from or write to the environment, or set defaults. + */ + parseRaw(args) { + if (args === process.argv) { + args = args.slice(process._eval !== undefined ? 1 : 2); + } + const result = (0, node_util_1.parseArgs)({ + args, + options: toParseArgsOptionsConfig(this.#configSet), + // always strict, but using our own logic + strict: false, + allowPositionals: this.#allowPositionals, + tokens: true, + }); + const p = { + values: {}, + positionals: [], + }; + for (const token of result.tokens) { + if (token.kind === 'positional') { + p.positionals.push(token.value); + if (this.#options.stopAtPositional || + this.#options.stopAtPositionalTest?.(token.value)) { + p.positionals.push(...args.slice(token.index + 1)); + break; + } + } + else if (token.kind === 'option') { + let value = undefined; + if (token.name.startsWith('no-')) { + const my = this.#configSet[token.name]; + const pname = token.name.substring('no-'.length); + const pos = this.#configSet[pname]; + if (pos && + pos.type === 'boolean' && + (!my || + (my.type === 'boolean' && !!my.multiple === !!pos.multiple))) { + value = false; + token.name = pname; + } + } + const my = this.#configSet[token.name]; + if (!my) { + throw new Error(`Unknown option '${token.rawName}'. ` + + `To specify a positional argument starting with a '-', ` + + `place it at the end of the command after '--', as in ` + + `'-- ${token.rawName}'`, { + cause: { + code: 'JACKSPEAK', + found: token.rawName + (token.value ? `=${token.value}` : ''), + }, + }); + } + if (value === undefined) { + if (token.value === undefined) { + if (my.type !== 'boolean') { + throw new Error(`No value provided for ${token.rawName}, expected ${my.type}`, { + cause: { + code: 'JACKSPEAK', + name: token.rawName, + wanted: valueType(my), + }, + }); + } + value = true; + } + else { + if (my.type === 'boolean') { + throw new Error(`Flag ${token.rawName} does not take a value, received '${token.value}'`, { cause: { code: 'JACKSPEAK', found: token } }); + } + if (my.type === 'string') { + value = token.value; + } + else { + value = +token.value; + if (value !== value) { + throw new Error(`Invalid value '${token.value}' provided for ` + + `'${token.rawName}' option, expected number`, { + cause: { + code: 'JACKSPEAK', + name: token.rawName, + found: token.value, + wanted: 'number', + }, + }); + } + } + } + } + if (my.multiple) { + const pv = p.values; + const tn = pv[token.name] ?? []; + pv[token.name] = tn; + tn.push(value); + } + else { + const pv = p.values; + pv[token.name] = value; + } + } + } + for (const [field, value] of Object.entries(p.values)) { + const valid = this.#configSet[field]?.validate; + const validOptions = this.#configSet[field]?.validOptions; + const cause = validOptions && !isValidOption(value, validOptions) ? + { name: field, found: value, validOptions } + : valid && !valid(value) ? { name: field, found: value } + : undefined; + if (cause) { + throw new Error(`Invalid value provided for --${field}: ${JSON.stringify(value)}`, { cause: { ...cause, code: 'JACKSPEAK' } }); + } + } + return p; + } + /** + * do not set fields as 'no-foo' if 'foo' exists and both are bools + * just set foo. + */ + #noNoFields(f, val, s = f) { + if (!f.startsWith('no-') || typeof val !== 'boolean') + return; + const yes = f.substring('no-'.length); + // recurse so we get the core config key we care about. + this.#noNoFields(yes, val, s); + if (this.#configSet[yes]?.type === 'boolean') { + throw new Error(`do not set '${s}', instead set '${yes}' as desired.`, { cause: { code: 'JACKSPEAK', found: s, wanted: yes } }); + } + } + /** + * Validate that any arbitrary object is a valid configuration `values` + * object. Useful when loading config files or other sources. + */ + validate(o) { + if (!o || typeof o !== 'object') { + throw new Error('Invalid config: not an object', { + cause: { code: 'JACKSPEAK', found: o }, + }); + } + const opts = o; + for (const field in o) { + const value = opts[field]; + /* c8 ignore next - for TS */ + if (value === undefined) + continue; + this.#noNoFields(field, value); + const config = this.#configSet[field]; + if (!config) { + throw new Error(`Unknown config option: ${field}`, { + cause: { code: 'JACKSPEAK', found: field }, + }); + } + if (!isValidValue(value, config.type, !!config.multiple)) { + throw new Error(`Invalid value ${valueType(value)} for ${field}, expected ${valueType(config)}`, { + cause: { + code: 'JACKSPEAK', + name: field, + found: value, + wanted: valueType(config), + }, + }); + } + const cause = config.validOptions && !isValidOption(value, config.validOptions) ? + { name: field, found: value, validOptions: config.validOptions } + : config.validate && !config.validate(value) ? + { name: field, found: value } + : undefined; + if (cause) { + throw new Error(`Invalid config value for ${field}: ${value}`, { + cause: { ...cause, code: 'JACKSPEAK' }, + }); + } + } + } + writeEnv(p) { + if (!this.#env || !this.#envPrefix) + return; + for (const [field, value] of Object.entries(p.values)) { + const my = this.#configSet[field]; + this.#env[toEnvKey(this.#envPrefix, field)] = toEnvVal(value, my?.delim); + } + } + /** + * Add a heading to the usage output banner + */ + heading(text, level, { pre = false } = {}) { + if (level === undefined) { + level = this.#fields.some(r => isHeading(r)) ? 2 : 1; + } + this.#fields.push({ type: 'heading', text, level, pre }); + return this; + } + /** + * Add a long-form description to the usage output at this position. + */ + description(text, { pre } = {}) { + this.#fields.push({ type: 'description', text, pre }); + return this; + } + /** + * Add one or more number fields. + */ + num(fields) { + return this.#addFieldsWith(fields, 'number', false); + } + /** + * Add one or more multiple number fields. + */ + numList(fields) { + return this.#addFieldsWith(fields, 'number', true); + } + /** + * Add one or more string option fields. + */ + opt(fields) { + return this.#addFieldsWith(fields, 'string', false); + } + /** + * Add one or more multiple string option fields. + */ + optList(fields) { + return this.#addFieldsWith(fields, 'string', true); + } + /** + * Add one or more flag fields. + */ + flag(fields) { + return this.#addFieldsWith(fields, 'boolean', false); + } + /** + * Add one or more multiple flag fields. + */ + flagList(fields) { + return this.#addFieldsWith(fields, 'boolean', true); + } + /** + * Generic field definition method. Similar to flag/flagList/number/etc, + * but you must specify the `type` (and optionally `multiple` and `delim`) + * fields on each one, or Jack won't know how to define them. + */ + addFields(fields) { + return this.#addFields(this, fields); + } + #addFieldsWith(fields, type, multiple) { + return this.#addFields(this, fields, { + type, + multiple, + }); + } + #addFields(next, fields, opt) { + Object.assign(next.#configSet, Object.fromEntries(Object.entries(fields).map(([name, field]) => { + this.#validateName(name, field); + const { type, multiple } = validateFieldMeta(field, opt); + const value = { ...field, type, multiple }; + validateField(value, type, multiple); + next.#fields.push({ type: 'config', name, value }); + return [name, value]; + }))); + return next; + } + #validateName(name, field) { + if (!/^[a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?$/.test(name)) { + throw new TypeError(`Invalid option name: ${name}, ` + + `must be '-' delimited ASCII alphanumeric`); + } + if (this.#configSet[name]) { + throw new TypeError(`Cannot redefine option ${field}`); + } + if (this.#shorts[name]) { + throw new TypeError(`Cannot redefine option ${name}, already ` + + `in use for ${this.#shorts[name]}`); + } + if (field.short) { + if (!/^[a-zA-Z0-9]$/.test(field.short)) { + throw new TypeError(`Invalid ${name} short option: ${field.short}, ` + + 'must be 1 ASCII alphanumeric character'); + } + if (this.#shorts[field.short]) { + throw new TypeError(`Invalid ${name} short option: ${field.short}, ` + + `already in use for ${this.#shorts[field.short]}`); + } + this.#shorts[field.short] = name; + this.#shorts[name] = name; + } + } + /** + * Return the usage banner for the given configuration + */ + usage() { + if (this.#usage) + return this.#usage; + let headingLevel = 1; + //@ts-ignore + const ui = (0, cliui_1.default)({ width }); + const first = this.#fields[0]; + let start = first?.type === 'heading' ? 1 : 0; + if (first?.type === 'heading') { + ui.div({ + padding: [0, 0, 0, 0], + text: normalize(first.text), + }); + } + ui.div({ padding: [0, 0, 0, 0], text: 'Usage:' }); + if (this.#options.usage) { + ui.div({ + text: this.#options.usage, + padding: [0, 0, 0, 2], + }); + } + else { + const cmd = (0, node_path_1.basename)(String(process.argv[1])); + const shortFlags = []; + const shorts = []; + const flags = []; + const opts = []; + for (const [field, config] of Object.entries(this.#configSet)) { + if (config.short) { + if (config.type === 'boolean') + shortFlags.push(config.short); + else + shorts.push([config.short, config.hint || field]); + } + else { + if (config.type === 'boolean') + flags.push(field); + else + opts.push([field, config.hint || field]); + } + } + const sf = shortFlags.length ? ' -' + shortFlags.join('') : ''; + const so = shorts.map(([k, v]) => ` --${k}=<${v}>`).join(''); + const lf = flags.map(k => ` --${k}`).join(''); + const lo = opts.map(([k, v]) => ` --${k}=<${v}>`).join(''); + const usage = `${cmd}${sf}${so}${lf}${lo}`.trim(); + ui.div({ + text: usage, + padding: [0, 0, 0, 2], + }); + } + ui.div({ padding: [0, 0, 0, 0], text: '' }); + const maybeDesc = this.#fields[start]; + if (maybeDesc && isDescription(maybeDesc)) { + const print = normalize(maybeDesc.text, maybeDesc.pre); + start++; + ui.div({ padding: [0, 0, 0, 0], text: print }); + ui.div({ padding: [0, 0, 0, 0], text: '' }); + } + const { rows, maxWidth } = this.#usageRows(start); + // every heading/description after the first gets indented by 2 + // extra spaces. + for (const row of rows) { + if (row.left) { + // If the row is too long, don't wrap it + // Bump the right-hand side down a line to make room + const configIndent = indent(Math.max(headingLevel, 2)); + if (row.left.length > maxWidth - 3) { + ui.div({ text: row.left, padding: [0, 0, 0, configIndent] }); + ui.div({ text: row.text, padding: [0, 0, 0, maxWidth] }); + } + else { + ui.div({ + text: row.left, + padding: [0, 1, 0, configIndent], + width: maxWidth, + }, { padding: [0, 0, 0, 0], text: row.text }); + } + if (row.skipLine) { + ui.div({ padding: [0, 0, 0, 0], text: '' }); + } + } + else { + if (isHeading(row)) { + const { level } = row; + headingLevel = level; + // only h1 and h2 have bottom padding + // h3-h6 do not + const b = level <= 2 ? 1 : 0; + ui.div({ ...row, padding: [0, 0, b, indent(level)] }); + } + else { + ui.div({ ...row, padding: [0, 0, 1, indent(headingLevel + 1)] }); + } + } + } + return (this.#usage = ui.toString()); + } + /** + * Return the usage banner markdown for the given configuration + */ + usageMarkdown() { + if (this.#usageMarkdown) + return this.#usageMarkdown; + const out = []; + let headingLevel = 1; + const first = this.#fields[0]; + let start = first?.type === 'heading' ? 1 : 0; + if (first?.type === 'heading') { + out.push(`# ${normalizeOneLine(first.text)}`); + } + out.push('Usage:'); + if (this.#options.usage) { + out.push(normalizeMarkdown(this.#options.usage, true)); + } + else { + const cmd = (0, node_path_1.basename)(String(process.argv[1])); + const shortFlags = []; + const shorts = []; + const flags = []; + const opts = []; + for (const [field, config] of Object.entries(this.#configSet)) { + if (config.short) { + if (config.type === 'boolean') + shortFlags.push(config.short); + else + shorts.push([config.short, config.hint || field]); + } + else { + if (config.type === 'boolean') + flags.push(field); + else + opts.push([field, config.hint || field]); + } + } + const sf = shortFlags.length ? ' -' + shortFlags.join('') : ''; + const so = shorts.map(([k, v]) => ` --${k}=<${v}>`).join(''); + const lf = flags.map(k => ` --${k}`).join(''); + const lo = opts.map(([k, v]) => ` --${k}=<${v}>`).join(''); + const usage = `${cmd}${sf}${so}${lf}${lo}`.trim(); + out.push(normalizeMarkdown(usage, true)); + } + const maybeDesc = this.#fields[start]; + if (maybeDesc && isDescription(maybeDesc)) { + out.push(normalizeMarkdown(maybeDesc.text, maybeDesc.pre)); + start++; + } + const { rows } = this.#usageRows(start); + // heading level in markdown is number of # ahead of text + for (const row of rows) { + if (row.left) { + out.push('#'.repeat(headingLevel + 1) + + ' ' + + normalizeOneLine(row.left, true)); + if (row.text) + out.push(normalizeMarkdown(row.text)); + } + else if (isHeading(row)) { + const { level } = row; + headingLevel = level; + out.push(`${'#'.repeat(headingLevel)} ${normalizeOneLine(row.text, row.pre)}`); + } + else { + out.push(normalizeMarkdown(row.text, !!row.pre)); + } + } + return (this.#usageMarkdown = out.join('\n\n') + '\n'); + } + #usageRows(start) { + // turn each config type into a row, and figure out the width of the + // left hand indentation for the option descriptions. + let maxMax = Math.max(12, Math.min(26, Math.floor(width / 3))); + let maxWidth = 8; + let prev = undefined; + const rows = []; + for (const field of this.#fields.slice(start)) { + if (field.type !== 'config') { + if (prev?.type === 'config') + prev.skipLine = true; + prev = undefined; + field.text = normalize(field.text, !!field.pre); + rows.push(field); + continue; + } + const { value } = field; + const desc = value.description || ''; + const mult = value.multiple ? 'Can be set multiple times' : ''; + const opts = value.validOptions?.length ? + `Valid options:${value.validOptions.map(v => ` ${JSON.stringify(v)}`)}` + : ''; + const dmDelim = desc.includes('\n') ? '\n\n' : '\n'; + const extra = [opts, mult].join(dmDelim).trim(); + const text = (normalize(desc) + dmDelim + extra).trim(); + const hint = value.hint || + (value.type === 'number' ? 'n' + : value.type === 'string' ? field.name + : undefined); + const short = !value.short ? '' + : value.type === 'boolean' ? `-${value.short} ` + : `-${value.short}<${hint}> `; + const left = value.type === 'boolean' ? + `${short}--${field.name}` + : `${short}--${field.name}=<${hint}>`; + const row = { text, left, type: 'config' }; + if (text.length > width - maxMax) { + row.skipLine = true; + } + if (prev && left.length > maxMax) + prev.skipLine = true; + prev = row; + const len = left.length + 4; + if (len > maxWidth && len < maxMax) { + maxWidth = len; + } + rows.push(row); + } + return { rows, maxWidth }; + } + /** + * Return the configuration options as a plain object + */ + toJSON() { + return Object.fromEntries(Object.entries(this.#configSet).map(([field, def]) => [ + field, + { + type: def.type, + ...(def.multiple ? { multiple: true } : {}), + ...(def.delim ? { delim: def.delim } : {}), + ...(def.short ? { short: def.short } : {}), + ...(def.description ? + { description: normalize(def.description) } + : {}), + ...(def.validate ? { validate: def.validate } : {}), + ...(def.validOptions ? { validOptions: def.validOptions } : {}), + ...(def.default !== undefined ? { default: def.default } : {}), + ...(def.hint ? { hint: def.hint } : {}), + }, + ])); + } + /** + * Custom printer for `util.inspect` + */ + [node_util_1.inspect.custom](_, options) { + return `Jack ${(0, node_util_1.inspect)(this.toJSON(), options)}`; + } +} +exports.Jack = Jack; +/** + * Main entry point. Create and return a {@link Jack} object. + */ +const jack = (options = {}) => new Jack(options); +exports.jack = jack; +// Unwrap and un-indent, so we can wrap description +// strings however makes them look nice in the code. +const normalize = (s, pre = false) => { + if (pre) + // prepend a ZWSP to each line so cliui doesn't strip it. + return s + .split('\n') + .map(l => `\u200b${l}`) + .join('\n'); + return s + .split(/^\s*```\s*$/gm) + .map((s, i) => { + if (i % 2 === 1) { + if (!s.trim()) { + return `\`\`\`\n\`\`\`\n`; + } + // outdent the ``` blocks, but preserve whitespace otherwise. + const split = s.split('\n'); + // throw out the \n at the start and end + split.pop(); + split.shift(); + const si = split.reduce((shortest, l) => { + /* c8 ignore next */ + const ind = l.match(/^\s*/)?.[0] ?? ''; + if (ind.length) + return Math.min(ind.length, shortest); + else + return shortest; + }, Infinity); + /* c8 ignore next */ + const i = isFinite(si) ? si : 0; + return ('\n```\n' + + split.map(s => `\u200b${s.substring(i)}`).join('\n') + + '\n```\n'); + } + return (s + // remove single line breaks, except for lists + .replace(/([^\n])\n[ \t]*([^\n])/g, (_, $1, $2) => !/^[-*]/.test($2) ? `${$1} ${$2}` : `${$1}\n${$2}`) + // normalize mid-line whitespace + .replace(/([^\n])[ \t]+([^\n])/g, '$1 $2') + // two line breaks are enough + .replace(/\n{3,}/g, '\n\n') + // remove any spaces at the start of a line + .replace(/\n[ \t]+/g, '\n') + .trim()); + }) + .join('\n'); +}; +// normalize for markdown printing, remove leading spaces on lines +const normalizeMarkdown = (s, pre = false) => { + const n = normalize(s, pre).replace(/\\/g, '\\\\'); + return pre ? + `\`\`\`\n${n.replace(/\u200b/g, '')}\n\`\`\`` + : n.replace(/\n +/g, '\n').trim(); +}; +const normalizeOneLine = (s, pre = false) => { + const n = normalize(s, pre) + .replace(/[\s\u200b]+/g, ' ') + .trim(); + return pre ? `\`${n}\`` : n; +}; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@npmcli/package-json/node_modules/jackspeak/dist/commonjs/package.json b/node_modules/@npmcli/package-json/node_modules/jackspeak/dist/commonjs/package.json new file mode 100644 index 0000000000000..5bbefffbabee3 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/jackspeak/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/node_modules/@npmcli/package-json/node_modules/jackspeak/dist/esm/index.js b/node_modules/@npmcli/package-json/node_modules/jackspeak/dist/esm/index.js new file mode 100644 index 0000000000000..b959f5126423c --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/jackspeak/dist/esm/index.js @@ -0,0 +1,936 @@ +import { inspect, parseArgs, } from 'node:util'; +// it's a tiny API, just cast it inline, it's fine +//@ts-ignore +import cliui from '@isaacs/cliui'; +import { basename } from 'node:path'; +export const isConfigType = (t) => typeof t === 'string' && + (t === 'string' || t === 'number' || t === 'boolean'); +const isValidValue = (v, type, multi) => { + if (multi) { + if (!Array.isArray(v)) + return false; + return !v.some((v) => !isValidValue(v, type, false)); + } + if (Array.isArray(v)) + return false; + return typeof v === type; +}; +const isValidOption = (v, vo) => !!vo && + (Array.isArray(v) ? v.every(x => isValidOption(x, vo)) : vo.includes(v)); +/** + * Determine whether an unknown object is a {@link ConfigOption} based only + * on its `type` and `multiple` property + */ +export const isConfigOptionOfType = (o, type, multi) => !!o && + typeof o === 'object' && + isConfigType(o.type) && + o.type === type && + !!o.multiple === multi; +/** + * Determine whether an unknown object is a {@link ConfigOption} based on + * it having all valid properties + */ +export const isConfigOption = (o, type, multi) => isConfigOptionOfType(o, type, multi) && + undefOrType(o.short, 'string') && + undefOrType(o.description, 'string') && + undefOrType(o.hint, 'string') && + undefOrType(o.validate, 'function') && + (o.type === 'boolean' ? + o.validOptions === undefined + : undefOrTypeArray(o.validOptions, o.type)) && + (o.default === undefined || isValidValue(o.default, type, multi)); +const isHeading = (r) => r.type === 'heading'; +const isDescription = (r) => r.type === 'description'; +const width = Math.min(process?.stdout?.columns ?? 80, 80); +// indentation spaces from heading level +const indent = (n) => (n - 1) * 2; +const toEnvKey = (pref, key) => [pref, key.replace(/[^a-zA-Z0-9]+/g, ' ')] + .join(' ') + .trim() + .toUpperCase() + .replace(/ /g, '_'); +const toEnvVal = (value, delim = '\n') => { + const str = typeof value === 'string' ? value + : typeof value === 'boolean' ? + value ? '1' + : '0' + : typeof value === 'number' ? String(value) + : Array.isArray(value) ? + value.map((v) => toEnvVal(v)).join(delim) + : /* c8 ignore start */ undefined; + if (typeof str !== 'string') { + throw new Error(`could not serialize value to environment: ${JSON.stringify(value)}`, { cause: { code: 'JACKSPEAK' } }); + } + /* c8 ignore stop */ + return str; +}; +const fromEnvVal = (env, type, multiple, delim = '\n') => (multiple ? + env ? env.split(delim).map(v => fromEnvVal(v, type, false)) + : [] + : type === 'string' ? env + : type === 'boolean' ? env === '1' + : +env.trim()); +const undefOrType = (v, t) => v === undefined || typeof v === t; +const undefOrTypeArray = (v, t) => v === undefined || (Array.isArray(v) && v.every(x => typeof x === t)); +// print the value type, for error message reporting +const valueType = (v) => typeof v === 'string' ? 'string' + : typeof v === 'boolean' ? 'boolean' + : typeof v === 'number' ? 'number' + : Array.isArray(v) ? + `${joinTypes([...new Set(v.map(v => valueType(v)))])}[]` + : `${v.type}${v.multiple ? '[]' : ''}`; +const joinTypes = (types) => types.length === 1 && typeof types[0] === 'string' ? + types[0] + : `(${types.join('|')})`; +const validateFieldMeta = (field, fieldMeta) => { + if (fieldMeta) { + if (field.type !== undefined && field.type !== fieldMeta.type) { + throw new TypeError(`invalid type`, { + cause: { + found: field.type, + wanted: [fieldMeta.type, undefined], + }, + }); + } + if (field.multiple !== undefined && + !!field.multiple !== fieldMeta.multiple) { + throw new TypeError(`invalid multiple`, { + cause: { + found: field.multiple, + wanted: [fieldMeta.multiple, undefined], + }, + }); + } + return fieldMeta; + } + if (!isConfigType(field.type)) { + throw new TypeError(`invalid type`, { + cause: { + found: field.type, + wanted: ['string', 'number', 'boolean'], + }, + }); + } + return { + type: field.type, + multiple: !!field.multiple, + }; +}; +const validateField = (o, type, multiple) => { + const validateValidOptions = (def, validOptions) => { + if (!undefOrTypeArray(validOptions, type)) { + throw new TypeError('invalid validOptions', { + cause: { + found: validOptions, + wanted: valueType({ type, multiple: true }), + }, + }); + } + if (def !== undefined && validOptions !== undefined) { + const valid = Array.isArray(def) ? + def.every(v => validOptions.includes(v)) + : validOptions.includes(def); + if (!valid) { + throw new TypeError('invalid default value not in validOptions', { + cause: { + found: def, + wanted: validOptions, + }, + }); + } + } + }; + if (o.default !== undefined && + !isValidValue(o.default, type, multiple)) { + throw new TypeError('invalid default value', { + cause: { + found: o.default, + wanted: valueType({ type, multiple }), + }, + }); + } + if (isConfigOptionOfType(o, 'number', false) || + isConfigOptionOfType(o, 'number', true)) { + validateValidOptions(o.default, o.validOptions); + } + else if (isConfigOptionOfType(o, 'string', false) || + isConfigOptionOfType(o, 'string', true)) { + validateValidOptions(o.default, o.validOptions); + } + else if (isConfigOptionOfType(o, 'boolean', false) || + isConfigOptionOfType(o, 'boolean', true)) { + if (o.hint !== undefined) { + throw new TypeError('cannot provide hint for flag'); + } + if (o.validOptions !== undefined) { + throw new TypeError('cannot provide validOptions for flag'); + } + } + return o; +}; +const toParseArgsOptionsConfig = (options) => { + return Object.entries(options).reduce((acc, [longOption, o]) => { + const p = { + type: 'string', + multiple: !!o.multiple, + ...(typeof o.short === 'string' ? { short: o.short } : undefined), + }; + const setNoBool = () => { + if (!longOption.startsWith('no-') && !options[`no-${longOption}`]) { + acc[`no-${longOption}`] = { + type: 'boolean', + multiple: !!o.multiple, + }; + } + }; + const setDefault = (def, fn) => { + if (def !== undefined) { + p.default = fn(def); + } + }; + if (isConfigOption(o, 'number', false)) { + setDefault(o.default, String); + } + else if (isConfigOption(o, 'number', true)) { + setDefault(o.default, d => d.map(v => String(v))); + } + else if (isConfigOption(o, 'string', false) || + isConfigOption(o, 'string', true)) { + setDefault(o.default, v => v); + } + else if (isConfigOption(o, 'boolean', false) || + isConfigOption(o, 'boolean', true)) { + p.type = 'boolean'; + setDefault(o.default, v => v); + setNoBool(); + } + acc[longOption] = p; + return acc; + }, {}); +}; +/** + * Class returned by the {@link jack} function and all configuration + * definition methods. This is what gets chained together. + */ +export class Jack { + #configSet; + #shorts; + #options; + #fields = []; + #env; + #envPrefix; + #allowPositionals; + #usage; + #usageMarkdown; + constructor(options = {}) { + this.#options = options; + this.#allowPositionals = options.allowPositionals !== false; + this.#env = + this.#options.env === undefined ? process.env : this.#options.env; + this.#envPrefix = options.envPrefix; + // We need to fib a little, because it's always the same object, but it + // starts out as having an empty config set. Then each method that adds + // fields returns `this as Jack` + this.#configSet = Object.create(null); + this.#shorts = Object.create(null); + } + /** + * Resulting definitions, suitable to be passed to Node's `util.parseArgs`, + * but also including `description` and `short` fields, if set. + */ + get definitions() { + return this.#configSet; + } + /** map of `{ : }` strings for each short name defined */ + get shorts() { + return this.#shorts; + } + /** + * options passed to the {@link Jack} constructor + */ + get jackOptions() { + return this.#options; + } + /** + * the data used to generate {@link Jack#usage} and + * {@link Jack#usageMarkdown} content. + */ + get usageFields() { + return this.#fields; + } + /** + * Set the default value (which will still be overridden by env or cli) + * as if from a parsed config file. The optional `source` param, if + * provided, will be included in error messages if a value is invalid or + * unknown. + */ + setConfigValues(values, source = '') { + try { + this.validate(values); + } + catch (er) { + if (source && er instanceof Error) { + /* c8 ignore next */ + const cause = typeof er.cause === 'object' ? er.cause : {}; + er.cause = { ...cause, path: source }; + Error.captureStackTrace(er, this.setConfigValues); + } + throw er; + } + for (const [field, value] of Object.entries(values)) { + const my = this.#configSet[field]; + // already validated, just for TS's benefit + /* c8 ignore start */ + if (!my) { + throw new Error('unexpected field in config set: ' + field, { + cause: { + code: 'JACKSPEAK', + found: field, + }, + }); + } + /* c8 ignore stop */ + my.default = value; + } + return this; + } + /** + * Parse a string of arguments, and return the resulting + * `{ values, positionals }` object. + * + * If an {@link JackOptions#envPrefix} is set, then it will read default + * values from the environment, and write the resulting values back + * to the environment as well. + * + * Environment values always take precedence over any other value, except + * an explicit CLI setting. + */ + parse(args = process.argv) { + this.loadEnvDefaults(); + const p = this.parseRaw(args); + this.applyDefaults(p); + this.writeEnv(p); + return p; + } + loadEnvDefaults() { + if (this.#envPrefix) { + for (const [field, my] of Object.entries(this.#configSet)) { + const ek = toEnvKey(this.#envPrefix, field); + const env = this.#env[ek]; + if (env !== undefined) { + my.default = fromEnvVal(env, my.type, !!my.multiple, my.delim); + } + } + } + } + applyDefaults(p) { + for (const [field, c] of Object.entries(this.#configSet)) { + if (c.default !== undefined && !(field in p.values)) { + //@ts-ignore + p.values[field] = c.default; + } + } + } + /** + * Only parse the command line arguments passed in. + * Does not strip off the `node script.js` bits, so it must be just the + * arguments you wish to have parsed. + * Does not read from or write to the environment, or set defaults. + */ + parseRaw(args) { + if (args === process.argv) { + args = args.slice(process._eval !== undefined ? 1 : 2); + } + const result = parseArgs({ + args, + options: toParseArgsOptionsConfig(this.#configSet), + // always strict, but using our own logic + strict: false, + allowPositionals: this.#allowPositionals, + tokens: true, + }); + const p = { + values: {}, + positionals: [], + }; + for (const token of result.tokens) { + if (token.kind === 'positional') { + p.positionals.push(token.value); + if (this.#options.stopAtPositional || + this.#options.stopAtPositionalTest?.(token.value)) { + p.positionals.push(...args.slice(token.index + 1)); + break; + } + } + else if (token.kind === 'option') { + let value = undefined; + if (token.name.startsWith('no-')) { + const my = this.#configSet[token.name]; + const pname = token.name.substring('no-'.length); + const pos = this.#configSet[pname]; + if (pos && + pos.type === 'boolean' && + (!my || + (my.type === 'boolean' && !!my.multiple === !!pos.multiple))) { + value = false; + token.name = pname; + } + } + const my = this.#configSet[token.name]; + if (!my) { + throw new Error(`Unknown option '${token.rawName}'. ` + + `To specify a positional argument starting with a '-', ` + + `place it at the end of the command after '--', as in ` + + `'-- ${token.rawName}'`, { + cause: { + code: 'JACKSPEAK', + found: token.rawName + (token.value ? `=${token.value}` : ''), + }, + }); + } + if (value === undefined) { + if (token.value === undefined) { + if (my.type !== 'boolean') { + throw new Error(`No value provided for ${token.rawName}, expected ${my.type}`, { + cause: { + code: 'JACKSPEAK', + name: token.rawName, + wanted: valueType(my), + }, + }); + } + value = true; + } + else { + if (my.type === 'boolean') { + throw new Error(`Flag ${token.rawName} does not take a value, received '${token.value}'`, { cause: { code: 'JACKSPEAK', found: token } }); + } + if (my.type === 'string') { + value = token.value; + } + else { + value = +token.value; + if (value !== value) { + throw new Error(`Invalid value '${token.value}' provided for ` + + `'${token.rawName}' option, expected number`, { + cause: { + code: 'JACKSPEAK', + name: token.rawName, + found: token.value, + wanted: 'number', + }, + }); + } + } + } + } + if (my.multiple) { + const pv = p.values; + const tn = pv[token.name] ?? []; + pv[token.name] = tn; + tn.push(value); + } + else { + const pv = p.values; + pv[token.name] = value; + } + } + } + for (const [field, value] of Object.entries(p.values)) { + const valid = this.#configSet[field]?.validate; + const validOptions = this.#configSet[field]?.validOptions; + const cause = validOptions && !isValidOption(value, validOptions) ? + { name: field, found: value, validOptions } + : valid && !valid(value) ? { name: field, found: value } + : undefined; + if (cause) { + throw new Error(`Invalid value provided for --${field}: ${JSON.stringify(value)}`, { cause: { ...cause, code: 'JACKSPEAK' } }); + } + } + return p; + } + /** + * do not set fields as 'no-foo' if 'foo' exists and both are bools + * just set foo. + */ + #noNoFields(f, val, s = f) { + if (!f.startsWith('no-') || typeof val !== 'boolean') + return; + const yes = f.substring('no-'.length); + // recurse so we get the core config key we care about. + this.#noNoFields(yes, val, s); + if (this.#configSet[yes]?.type === 'boolean') { + throw new Error(`do not set '${s}', instead set '${yes}' as desired.`, { cause: { code: 'JACKSPEAK', found: s, wanted: yes } }); + } + } + /** + * Validate that any arbitrary object is a valid configuration `values` + * object. Useful when loading config files or other sources. + */ + validate(o) { + if (!o || typeof o !== 'object') { + throw new Error('Invalid config: not an object', { + cause: { code: 'JACKSPEAK', found: o }, + }); + } + const opts = o; + for (const field in o) { + const value = opts[field]; + /* c8 ignore next - for TS */ + if (value === undefined) + continue; + this.#noNoFields(field, value); + const config = this.#configSet[field]; + if (!config) { + throw new Error(`Unknown config option: ${field}`, { + cause: { code: 'JACKSPEAK', found: field }, + }); + } + if (!isValidValue(value, config.type, !!config.multiple)) { + throw new Error(`Invalid value ${valueType(value)} for ${field}, expected ${valueType(config)}`, { + cause: { + code: 'JACKSPEAK', + name: field, + found: value, + wanted: valueType(config), + }, + }); + } + const cause = config.validOptions && !isValidOption(value, config.validOptions) ? + { name: field, found: value, validOptions: config.validOptions } + : config.validate && !config.validate(value) ? + { name: field, found: value } + : undefined; + if (cause) { + throw new Error(`Invalid config value for ${field}: ${value}`, { + cause: { ...cause, code: 'JACKSPEAK' }, + }); + } + } + } + writeEnv(p) { + if (!this.#env || !this.#envPrefix) + return; + for (const [field, value] of Object.entries(p.values)) { + const my = this.#configSet[field]; + this.#env[toEnvKey(this.#envPrefix, field)] = toEnvVal(value, my?.delim); + } + } + /** + * Add a heading to the usage output banner + */ + heading(text, level, { pre = false } = {}) { + if (level === undefined) { + level = this.#fields.some(r => isHeading(r)) ? 2 : 1; + } + this.#fields.push({ type: 'heading', text, level, pre }); + return this; + } + /** + * Add a long-form description to the usage output at this position. + */ + description(text, { pre } = {}) { + this.#fields.push({ type: 'description', text, pre }); + return this; + } + /** + * Add one or more number fields. + */ + num(fields) { + return this.#addFieldsWith(fields, 'number', false); + } + /** + * Add one or more multiple number fields. + */ + numList(fields) { + return this.#addFieldsWith(fields, 'number', true); + } + /** + * Add one or more string option fields. + */ + opt(fields) { + return this.#addFieldsWith(fields, 'string', false); + } + /** + * Add one or more multiple string option fields. + */ + optList(fields) { + return this.#addFieldsWith(fields, 'string', true); + } + /** + * Add one or more flag fields. + */ + flag(fields) { + return this.#addFieldsWith(fields, 'boolean', false); + } + /** + * Add one or more multiple flag fields. + */ + flagList(fields) { + return this.#addFieldsWith(fields, 'boolean', true); + } + /** + * Generic field definition method. Similar to flag/flagList/number/etc, + * but you must specify the `type` (and optionally `multiple` and `delim`) + * fields on each one, or Jack won't know how to define them. + */ + addFields(fields) { + return this.#addFields(this, fields); + } + #addFieldsWith(fields, type, multiple) { + return this.#addFields(this, fields, { + type, + multiple, + }); + } + #addFields(next, fields, opt) { + Object.assign(next.#configSet, Object.fromEntries(Object.entries(fields).map(([name, field]) => { + this.#validateName(name, field); + const { type, multiple } = validateFieldMeta(field, opt); + const value = { ...field, type, multiple }; + validateField(value, type, multiple); + next.#fields.push({ type: 'config', name, value }); + return [name, value]; + }))); + return next; + } + #validateName(name, field) { + if (!/^[a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?$/.test(name)) { + throw new TypeError(`Invalid option name: ${name}, ` + + `must be '-' delimited ASCII alphanumeric`); + } + if (this.#configSet[name]) { + throw new TypeError(`Cannot redefine option ${field}`); + } + if (this.#shorts[name]) { + throw new TypeError(`Cannot redefine option ${name}, already ` + + `in use for ${this.#shorts[name]}`); + } + if (field.short) { + if (!/^[a-zA-Z0-9]$/.test(field.short)) { + throw new TypeError(`Invalid ${name} short option: ${field.short}, ` + + 'must be 1 ASCII alphanumeric character'); + } + if (this.#shorts[field.short]) { + throw new TypeError(`Invalid ${name} short option: ${field.short}, ` + + `already in use for ${this.#shorts[field.short]}`); + } + this.#shorts[field.short] = name; + this.#shorts[name] = name; + } + } + /** + * Return the usage banner for the given configuration + */ + usage() { + if (this.#usage) + return this.#usage; + let headingLevel = 1; + //@ts-ignore + const ui = cliui({ width }); + const first = this.#fields[0]; + let start = first?.type === 'heading' ? 1 : 0; + if (first?.type === 'heading') { + ui.div({ + padding: [0, 0, 0, 0], + text: normalize(first.text), + }); + } + ui.div({ padding: [0, 0, 0, 0], text: 'Usage:' }); + if (this.#options.usage) { + ui.div({ + text: this.#options.usage, + padding: [0, 0, 0, 2], + }); + } + else { + const cmd = basename(String(process.argv[1])); + const shortFlags = []; + const shorts = []; + const flags = []; + const opts = []; + for (const [field, config] of Object.entries(this.#configSet)) { + if (config.short) { + if (config.type === 'boolean') + shortFlags.push(config.short); + else + shorts.push([config.short, config.hint || field]); + } + else { + if (config.type === 'boolean') + flags.push(field); + else + opts.push([field, config.hint || field]); + } + } + const sf = shortFlags.length ? ' -' + shortFlags.join('') : ''; + const so = shorts.map(([k, v]) => ` --${k}=<${v}>`).join(''); + const lf = flags.map(k => ` --${k}`).join(''); + const lo = opts.map(([k, v]) => ` --${k}=<${v}>`).join(''); + const usage = `${cmd}${sf}${so}${lf}${lo}`.trim(); + ui.div({ + text: usage, + padding: [0, 0, 0, 2], + }); + } + ui.div({ padding: [0, 0, 0, 0], text: '' }); + const maybeDesc = this.#fields[start]; + if (maybeDesc && isDescription(maybeDesc)) { + const print = normalize(maybeDesc.text, maybeDesc.pre); + start++; + ui.div({ padding: [0, 0, 0, 0], text: print }); + ui.div({ padding: [0, 0, 0, 0], text: '' }); + } + const { rows, maxWidth } = this.#usageRows(start); + // every heading/description after the first gets indented by 2 + // extra spaces. + for (const row of rows) { + if (row.left) { + // If the row is too long, don't wrap it + // Bump the right-hand side down a line to make room + const configIndent = indent(Math.max(headingLevel, 2)); + if (row.left.length > maxWidth - 3) { + ui.div({ text: row.left, padding: [0, 0, 0, configIndent] }); + ui.div({ text: row.text, padding: [0, 0, 0, maxWidth] }); + } + else { + ui.div({ + text: row.left, + padding: [0, 1, 0, configIndent], + width: maxWidth, + }, { padding: [0, 0, 0, 0], text: row.text }); + } + if (row.skipLine) { + ui.div({ padding: [0, 0, 0, 0], text: '' }); + } + } + else { + if (isHeading(row)) { + const { level } = row; + headingLevel = level; + // only h1 and h2 have bottom padding + // h3-h6 do not + const b = level <= 2 ? 1 : 0; + ui.div({ ...row, padding: [0, 0, b, indent(level)] }); + } + else { + ui.div({ ...row, padding: [0, 0, 1, indent(headingLevel + 1)] }); + } + } + } + return (this.#usage = ui.toString()); + } + /** + * Return the usage banner markdown for the given configuration + */ + usageMarkdown() { + if (this.#usageMarkdown) + return this.#usageMarkdown; + const out = []; + let headingLevel = 1; + const first = this.#fields[0]; + let start = first?.type === 'heading' ? 1 : 0; + if (first?.type === 'heading') { + out.push(`# ${normalizeOneLine(first.text)}`); + } + out.push('Usage:'); + if (this.#options.usage) { + out.push(normalizeMarkdown(this.#options.usage, true)); + } + else { + const cmd = basename(String(process.argv[1])); + const shortFlags = []; + const shorts = []; + const flags = []; + const opts = []; + for (const [field, config] of Object.entries(this.#configSet)) { + if (config.short) { + if (config.type === 'boolean') + shortFlags.push(config.short); + else + shorts.push([config.short, config.hint || field]); + } + else { + if (config.type === 'boolean') + flags.push(field); + else + opts.push([field, config.hint || field]); + } + } + const sf = shortFlags.length ? ' -' + shortFlags.join('') : ''; + const so = shorts.map(([k, v]) => ` --${k}=<${v}>`).join(''); + const lf = flags.map(k => ` --${k}`).join(''); + const lo = opts.map(([k, v]) => ` --${k}=<${v}>`).join(''); + const usage = `${cmd}${sf}${so}${lf}${lo}`.trim(); + out.push(normalizeMarkdown(usage, true)); + } + const maybeDesc = this.#fields[start]; + if (maybeDesc && isDescription(maybeDesc)) { + out.push(normalizeMarkdown(maybeDesc.text, maybeDesc.pre)); + start++; + } + const { rows } = this.#usageRows(start); + // heading level in markdown is number of # ahead of text + for (const row of rows) { + if (row.left) { + out.push('#'.repeat(headingLevel + 1) + + ' ' + + normalizeOneLine(row.left, true)); + if (row.text) + out.push(normalizeMarkdown(row.text)); + } + else if (isHeading(row)) { + const { level } = row; + headingLevel = level; + out.push(`${'#'.repeat(headingLevel)} ${normalizeOneLine(row.text, row.pre)}`); + } + else { + out.push(normalizeMarkdown(row.text, !!row.pre)); + } + } + return (this.#usageMarkdown = out.join('\n\n') + '\n'); + } + #usageRows(start) { + // turn each config type into a row, and figure out the width of the + // left hand indentation for the option descriptions. + let maxMax = Math.max(12, Math.min(26, Math.floor(width / 3))); + let maxWidth = 8; + let prev = undefined; + const rows = []; + for (const field of this.#fields.slice(start)) { + if (field.type !== 'config') { + if (prev?.type === 'config') + prev.skipLine = true; + prev = undefined; + field.text = normalize(field.text, !!field.pre); + rows.push(field); + continue; + } + const { value } = field; + const desc = value.description || ''; + const mult = value.multiple ? 'Can be set multiple times' : ''; + const opts = value.validOptions?.length ? + `Valid options:${value.validOptions.map(v => ` ${JSON.stringify(v)}`)}` + : ''; + const dmDelim = desc.includes('\n') ? '\n\n' : '\n'; + const extra = [opts, mult].join(dmDelim).trim(); + const text = (normalize(desc) + dmDelim + extra).trim(); + const hint = value.hint || + (value.type === 'number' ? 'n' + : value.type === 'string' ? field.name + : undefined); + const short = !value.short ? '' + : value.type === 'boolean' ? `-${value.short} ` + : `-${value.short}<${hint}> `; + const left = value.type === 'boolean' ? + `${short}--${field.name}` + : `${short}--${field.name}=<${hint}>`; + const row = { text, left, type: 'config' }; + if (text.length > width - maxMax) { + row.skipLine = true; + } + if (prev && left.length > maxMax) + prev.skipLine = true; + prev = row; + const len = left.length + 4; + if (len > maxWidth && len < maxMax) { + maxWidth = len; + } + rows.push(row); + } + return { rows, maxWidth }; + } + /** + * Return the configuration options as a plain object + */ + toJSON() { + return Object.fromEntries(Object.entries(this.#configSet).map(([field, def]) => [ + field, + { + type: def.type, + ...(def.multiple ? { multiple: true } : {}), + ...(def.delim ? { delim: def.delim } : {}), + ...(def.short ? { short: def.short } : {}), + ...(def.description ? + { description: normalize(def.description) } + : {}), + ...(def.validate ? { validate: def.validate } : {}), + ...(def.validOptions ? { validOptions: def.validOptions } : {}), + ...(def.default !== undefined ? { default: def.default } : {}), + ...(def.hint ? { hint: def.hint } : {}), + }, + ])); + } + /** + * Custom printer for `util.inspect` + */ + [inspect.custom](_, options) { + return `Jack ${inspect(this.toJSON(), options)}`; + } +} +/** + * Main entry point. Create and return a {@link Jack} object. + */ +export const jack = (options = {}) => new Jack(options); +// Unwrap and un-indent, so we can wrap description +// strings however makes them look nice in the code. +const normalize = (s, pre = false) => { + if (pre) + // prepend a ZWSP to each line so cliui doesn't strip it. + return s + .split('\n') + .map(l => `\u200b${l}`) + .join('\n'); + return s + .split(/^\s*```\s*$/gm) + .map((s, i) => { + if (i % 2 === 1) { + if (!s.trim()) { + return `\`\`\`\n\`\`\`\n`; + } + // outdent the ``` blocks, but preserve whitespace otherwise. + const split = s.split('\n'); + // throw out the \n at the start and end + split.pop(); + split.shift(); + const si = split.reduce((shortest, l) => { + /* c8 ignore next */ + const ind = l.match(/^\s*/)?.[0] ?? ''; + if (ind.length) + return Math.min(ind.length, shortest); + else + return shortest; + }, Infinity); + /* c8 ignore next */ + const i = isFinite(si) ? si : 0; + return ('\n```\n' + + split.map(s => `\u200b${s.substring(i)}`).join('\n') + + '\n```\n'); + } + return (s + // remove single line breaks, except for lists + .replace(/([^\n])\n[ \t]*([^\n])/g, (_, $1, $2) => !/^[-*]/.test($2) ? `${$1} ${$2}` : `${$1}\n${$2}`) + // normalize mid-line whitespace + .replace(/([^\n])[ \t]+([^\n])/g, '$1 $2') + // two line breaks are enough + .replace(/\n{3,}/g, '\n\n') + // remove any spaces at the start of a line + .replace(/\n[ \t]+/g, '\n') + .trim()); + }) + .join('\n'); +}; +// normalize for markdown printing, remove leading spaces on lines +const normalizeMarkdown = (s, pre = false) => { + const n = normalize(s, pre).replace(/\\/g, '\\\\'); + return pre ? + `\`\`\`\n${n.replace(/\u200b/g, '')}\n\`\`\`` + : n.replace(/\n +/g, '\n').trim(); +}; +const normalizeOneLine = (s, pre = false) => { + const n = normalize(s, pre) + .replace(/[\s\u200b]+/g, ' ') + .trim(); + return pre ? `\`${n}\`` : n; +}; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@npmcli/package-json/node_modules/jackspeak/dist/esm/package.json b/node_modules/@npmcli/package-json/node_modules/jackspeak/dist/esm/package.json new file mode 100644 index 0000000000000..3dbc1ca591c05 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/jackspeak/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@npmcli/package-json/node_modules/jackspeak/package.json b/node_modules/@npmcli/package-json/node_modules/jackspeak/package.json new file mode 100644 index 0000000000000..aa85d230f6d24 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/jackspeak/package.json @@ -0,0 +1,94 @@ +{ + "name": "jackspeak", + "version": "4.1.1", + "description": "A very strict and proper argument parser.", + "tshy": { + "main": true, + "exports": { + "./package.json": "./package.json", + ".": "./src/index.js" + } + }, + "main": "./dist/commonjs/index.js", + "types": "./dist/commonjs/index.d.ts", + "type": "module", + "exports": { + "./package.json": "./package.json", + ".": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } + }, + "files": [ + "dist" + ], + "scripts": { + "build-examples": "for i in examples/*.js ; do node $i -h > ${i/.js/.txt}; done", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "prepare": "tshy", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "test": "tap", + "snap": "tap", + "format": "prettier --write . --log-level warn", + "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts" + }, + "license": "BlueOak-1.0.0", + "prettier": { + "experimentalTernaries": true, + "semi": false, + "printWidth": 75, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "devDependencies": { + "@types/node": "^22.6.0", + "prettier": "^3.3.3", + "tap": "^21.0.1", + "tshy": "^3.0.2", + "typedoc": "^0.26.7" + }, + "dependencies": { + "@isaacs/cliui": "^8.0.2" + }, + "engines": { + "node": "20 || >=22" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/jackspeak.git" + }, + "keywords": [ + "argument", + "parser", + "args", + "option", + "flag", + "cli", + "command", + "line", + "parse", + "parsing" + ], + "author": "Isaac Z. Schlueter ", + "tap": { + "typecheck": true + }, + "module": "./dist/esm/index.js" +} diff --git a/node_modules/@npmcli/package-json/node_modules/lru-cache/LICENSE b/node_modules/@npmcli/package-json/node_modules/lru-cache/LICENSE new file mode 100644 index 0000000000000..f785757cd63f8 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/lru-cache/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/commonjs/index.js b/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/commonjs/index.js new file mode 100644 index 0000000000000..921b8f10f71b1 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/commonjs/index.js @@ -0,0 +1,1564 @@ +"use strict"; +/** + * @module LRUCache + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.LRUCache = void 0; +const defaultPerf = (typeof performance === 'object' && + performance && + typeof performance.now === 'function') ? + performance + : Date; +const warned = new Set(); +/* c8 ignore start */ +const PROCESS = (typeof process === 'object' && !!process ? + process + : {}); +/* c8 ignore start */ +const emitWarning = (msg, type, code, fn) => { + typeof PROCESS.emitWarning === 'function' ? + PROCESS.emitWarning(msg, type, code, fn) + : console.error(`[${code}] ${type}: ${msg}`); +}; +let AC = globalThis.AbortController; +let AS = globalThis.AbortSignal; +/* c8 ignore start */ +if (typeof AC === 'undefined') { + //@ts-ignore + AS = class AbortSignal { + onabort; + _onabort = []; + reason; + aborted = false; + addEventListener(_, fn) { + this._onabort.push(fn); + } + }; + //@ts-ignore + AC = class AbortController { + constructor() { + warnACPolyfill(); + } + signal = new AS(); + abort(reason) { + if (this.signal.aborted) + return; + //@ts-ignore + this.signal.reason = reason; + //@ts-ignore + this.signal.aborted = true; + //@ts-ignore + for (const fn of this.signal._onabort) { + fn(reason); + } + this.signal.onabort?.(reason); + } + }; + let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1'; + const warnACPolyfill = () => { + if (!printACPolyfillWarning) + return; + printACPolyfillWarning = false; + emitWarning('AbortController is not defined. If using lru-cache in ' + + 'node 14, load an AbortController polyfill from the ' + + '`node-abort-controller` package. A minimal polyfill is ' + + 'provided for use by LRUCache.fetch(), but it should not be ' + + 'relied upon in other contexts (eg, passing it to other APIs that ' + + 'use AbortController/AbortSignal might have undesirable effects). ' + + 'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill); + }; +} +/* c8 ignore stop */ +const shouldWarn = (code) => !warned.has(code); +const TYPE = Symbol('type'); +const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n); +/* c8 ignore start */ +// This is a little bit ridiculous, tbh. +// The maximum array length is 2^32-1 or thereabouts on most JS impls. +// And well before that point, you're caching the entire world, I mean, +// that's ~32GB of just integers for the next/prev links, plus whatever +// else to hold that many keys and values. Just filling the memory with +// zeroes at init time is brutal when you get that big. +// But why not be complete? +// Maybe in the future, these limits will have expanded. +const getUintArray = (max) => !isPosInt(max) ? null + : max <= Math.pow(2, 8) ? Uint8Array + : max <= Math.pow(2, 16) ? Uint16Array + : max <= Math.pow(2, 32) ? Uint32Array + : max <= Number.MAX_SAFE_INTEGER ? ZeroArray + : null; +/* c8 ignore stop */ +class ZeroArray extends Array { + constructor(size) { + super(size); + this.fill(0); + } +} +class Stack { + heap; + length; + // private constructor + static #constructing = false; + static create(max) { + const HeapCls = getUintArray(max); + if (!HeapCls) + return []; + Stack.#constructing = true; + const s = new Stack(max, HeapCls); + Stack.#constructing = false; + return s; + } + constructor(max, HeapCls) { + /* c8 ignore start */ + if (!Stack.#constructing) { + throw new TypeError('instantiate Stack using Stack.create(n)'); + } + /* c8 ignore stop */ + this.heap = new HeapCls(max); + this.length = 0; + } + push(n) { + this.heap[this.length++] = n; + } + pop() { + return this.heap[--this.length]; + } +} +/** + * Default export, the thing you're using this module to get. + * + * The `K` and `V` types define the key and value types, respectively. The + * optional `FC` type defines the type of the `context` object passed to + * `cache.fetch()` and `cache.memo()`. + * + * Keys and values **must not** be `null` or `undefined`. + * + * All properties from the options object (with the exception of `max`, + * `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are + * added as normal public members. (The listed options are read-only getters.) + * + * Changing any of these will alter the defaults for subsequent method calls. + */ +class LRUCache { + // options that cannot be changed without disaster + #max; + #maxSize; + #dispose; + #onInsert; + #disposeAfter; + #fetchMethod; + #memoMethod; + #perf; + /** + * {@link LRUCache.OptionsBase.perf} + */ + get perf() { + return this.#perf; + } + /** + * {@link LRUCache.OptionsBase.ttl} + */ + ttl; + /** + * {@link LRUCache.OptionsBase.ttlResolution} + */ + ttlResolution; + /** + * {@link LRUCache.OptionsBase.ttlAutopurge} + */ + ttlAutopurge; + /** + * {@link LRUCache.OptionsBase.updateAgeOnGet} + */ + updateAgeOnGet; + /** + * {@link LRUCache.OptionsBase.updateAgeOnHas} + */ + updateAgeOnHas; + /** + * {@link LRUCache.OptionsBase.allowStale} + */ + allowStale; + /** + * {@link LRUCache.OptionsBase.noDisposeOnSet} + */ + noDisposeOnSet; + /** + * {@link LRUCache.OptionsBase.noUpdateTTL} + */ + noUpdateTTL; + /** + * {@link LRUCache.OptionsBase.maxEntrySize} + */ + maxEntrySize; + /** + * {@link LRUCache.OptionsBase.sizeCalculation} + */ + sizeCalculation; + /** + * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection} + */ + noDeleteOnFetchRejection; + /** + * {@link LRUCache.OptionsBase.noDeleteOnStaleGet} + */ + noDeleteOnStaleGet; + /** + * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort} + */ + allowStaleOnFetchAbort; + /** + * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} + */ + allowStaleOnFetchRejection; + /** + * {@link LRUCache.OptionsBase.ignoreFetchAbort} + */ + ignoreFetchAbort; + // computed properties + #size; + #calculatedSize; + #keyMap; + #keyList; + #valList; + #next; + #prev; + #head; + #tail; + #free; + #disposed; + #sizes; + #starts; + #ttls; + #hasDispose; + #hasFetchMethod; + #hasDisposeAfter; + #hasOnInsert; + /** + * Do not call this method unless you need to inspect the + * inner workings of the cache. If anything returned by this + * object is modified in any way, strange breakage may occur. + * + * These fields are private for a reason! + * + * @internal + */ + static unsafeExposeInternals(c) { + return { + // properties + starts: c.#starts, + ttls: c.#ttls, + sizes: c.#sizes, + keyMap: c.#keyMap, + keyList: c.#keyList, + valList: c.#valList, + next: c.#next, + prev: c.#prev, + get head() { + return c.#head; + }, + get tail() { + return c.#tail; + }, + free: c.#free, + // methods + isBackgroundFetch: (p) => c.#isBackgroundFetch(p), + backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context), + moveToTail: (index) => c.#moveToTail(index), + indexes: (options) => c.#indexes(options), + rindexes: (options) => c.#rindexes(options), + isStale: (index) => c.#isStale(index), + }; + } + // Protected read-only members + /** + * {@link LRUCache.OptionsBase.max} (read-only) + */ + get max() { + return this.#max; + } + /** + * {@link LRUCache.OptionsBase.maxSize} (read-only) + */ + get maxSize() { + return this.#maxSize; + } + /** + * The total computed size of items in the cache (read-only) + */ + get calculatedSize() { + return this.#calculatedSize; + } + /** + * The number of items stored in the cache (read-only) + */ + get size() { + return this.#size; + } + /** + * {@link LRUCache.OptionsBase.fetchMethod} (read-only) + */ + get fetchMethod() { + return this.#fetchMethod; + } + get memoMethod() { + return this.#memoMethod; + } + /** + * {@link LRUCache.OptionsBase.dispose} (read-only) + */ + get dispose() { + return this.#dispose; + } + /** + * {@link LRUCache.OptionsBase.onInsert} (read-only) + */ + get onInsert() { + return this.#onInsert; + } + /** + * {@link LRUCache.OptionsBase.disposeAfter} (read-only) + */ + get disposeAfter() { + return this.#disposeAfter; + } + constructor(options) { + const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, onInsert, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, perf, } = options; + if (perf !== undefined) { + if (typeof perf?.now !== 'function') { + throw new TypeError('perf option must have a now() method if specified'); + } + } + this.#perf = perf ?? defaultPerf; + if (max !== 0 && !isPosInt(max)) { + throw new TypeError('max option must be a nonnegative integer'); + } + const UintArray = max ? getUintArray(max) : Array; + if (!UintArray) { + throw new Error('invalid max value: ' + max); + } + this.#max = max; + this.#maxSize = maxSize; + this.maxEntrySize = maxEntrySize || this.#maxSize; + this.sizeCalculation = sizeCalculation; + if (this.sizeCalculation) { + if (!this.#maxSize && !this.maxEntrySize) { + throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize'); + } + if (typeof this.sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation set to non-function'); + } + } + if (memoMethod !== undefined && + typeof memoMethod !== 'function') { + throw new TypeError('memoMethod must be a function if defined'); + } + this.#memoMethod = memoMethod; + if (fetchMethod !== undefined && + typeof fetchMethod !== 'function') { + throw new TypeError('fetchMethod must be a function if specified'); + } + this.#fetchMethod = fetchMethod; + this.#hasFetchMethod = !!fetchMethod; + this.#keyMap = new Map(); + this.#keyList = new Array(max).fill(undefined); + this.#valList = new Array(max).fill(undefined); + this.#next = new UintArray(max); + this.#prev = new UintArray(max); + this.#head = 0; + this.#tail = 0; + this.#free = Stack.create(max); + this.#size = 0; + this.#calculatedSize = 0; + if (typeof dispose === 'function') { + this.#dispose = dispose; + } + if (typeof onInsert === 'function') { + this.#onInsert = onInsert; + } + if (typeof disposeAfter === 'function') { + this.#disposeAfter = disposeAfter; + this.#disposed = []; + } + else { + this.#disposeAfter = undefined; + this.#disposed = undefined; + } + this.#hasDispose = !!this.#dispose; + this.#hasOnInsert = !!this.#onInsert; + this.#hasDisposeAfter = !!this.#disposeAfter; + this.noDisposeOnSet = !!noDisposeOnSet; + this.noUpdateTTL = !!noUpdateTTL; + this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection; + this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection; + this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort; + this.ignoreFetchAbort = !!ignoreFetchAbort; + // NB: maxEntrySize is set to maxSize if it's set + if (this.maxEntrySize !== 0) { + if (this.#maxSize !== 0) { + if (!isPosInt(this.#maxSize)) { + throw new TypeError('maxSize must be a positive integer if specified'); + } + } + if (!isPosInt(this.maxEntrySize)) { + throw new TypeError('maxEntrySize must be a positive integer if specified'); + } + this.#initializeSizeTracking(); + } + this.allowStale = !!allowStale; + this.noDeleteOnStaleGet = !!noDeleteOnStaleGet; + this.updateAgeOnGet = !!updateAgeOnGet; + this.updateAgeOnHas = !!updateAgeOnHas; + this.ttlResolution = + isPosInt(ttlResolution) || ttlResolution === 0 ? + ttlResolution + : 1; + this.ttlAutopurge = !!ttlAutopurge; + this.ttl = ttl || 0; + if (this.ttl) { + if (!isPosInt(this.ttl)) { + throw new TypeError('ttl must be a positive integer if specified'); + } + this.#initializeTTLTracking(); + } + // do not allow completely unbounded caches + if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) { + throw new TypeError('At least one of max, maxSize, or ttl is required'); + } + if (!this.ttlAutopurge && !this.#max && !this.#maxSize) { + const code = 'LRU_CACHE_UNBOUNDED'; + if (shouldWarn(code)) { + warned.add(code); + const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' + + 'result in unbounded memory consumption.'; + emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache); + } + } + } + /** + * Return the number of ms left in the item's TTL. If item is not in cache, + * returns `0`. Returns `Infinity` if item is in cache without a defined TTL. + */ + getRemainingTTL(key) { + return this.#keyMap.has(key) ? Infinity : 0; + } + #initializeTTLTracking() { + const ttls = new ZeroArray(this.#max); + const starts = new ZeroArray(this.#max); + this.#ttls = ttls; + this.#starts = starts; + this.#setItemTTL = (index, ttl, start = this.#perf.now()) => { + starts[index] = ttl !== 0 ? start : 0; + ttls[index] = ttl; + if (ttl !== 0 && this.ttlAutopurge) { + const t = setTimeout(() => { + if (this.#isStale(index)) { + this.#delete(this.#keyList[index], 'expire'); + } + }, ttl + 1); + // unref() not supported on all platforms + /* c8 ignore start */ + if (t.unref) { + t.unref(); + } + /* c8 ignore stop */ + } + }; + this.#updateItemAge = index => { + starts[index] = ttls[index] !== 0 ? this.#perf.now() : 0; + }; + this.#statusTTL = (status, index) => { + if (ttls[index]) { + const ttl = ttls[index]; + const start = starts[index]; + /* c8 ignore next */ + if (!ttl || !start) + return; + status.ttl = ttl; + status.start = start; + status.now = cachedNow || getNow(); + const age = status.now - start; + status.remainingTTL = ttl - age; + } + }; + // debounce calls to perf.now() to 1s so we're not hitting + // that costly call repeatedly. + let cachedNow = 0; + const getNow = () => { + const n = this.#perf.now(); + if (this.ttlResolution > 0) { + cachedNow = n; + const t = setTimeout(() => (cachedNow = 0), this.ttlResolution); + // not available on all platforms + /* c8 ignore start */ + if (t.unref) { + t.unref(); + } + /* c8 ignore stop */ + } + return n; + }; + this.getRemainingTTL = key => { + const index = this.#keyMap.get(key); + if (index === undefined) { + return 0; + } + const ttl = ttls[index]; + const start = starts[index]; + if (!ttl || !start) { + return Infinity; + } + const age = (cachedNow || getNow()) - start; + return ttl - age; + }; + this.#isStale = index => { + const s = starts[index]; + const t = ttls[index]; + return !!t && !!s && (cachedNow || getNow()) - s > t; + }; + } + // conditionally set private methods related to TTL + #updateItemAge = () => { }; + #statusTTL = () => { }; + #setItemTTL = () => { }; + /* c8 ignore stop */ + #isStale = () => false; + #initializeSizeTracking() { + const sizes = new ZeroArray(this.#max); + this.#calculatedSize = 0; + this.#sizes = sizes; + this.#removeItemSize = index => { + this.#calculatedSize -= sizes[index]; + sizes[index] = 0; + }; + this.#requireSize = (k, v, size, sizeCalculation) => { + // provisionally accept background fetches. + // actual value size will be checked when they return. + if (this.#isBackgroundFetch(v)) { + return 0; + } + if (!isPosInt(size)) { + if (sizeCalculation) { + if (typeof sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation must be a function'); + } + size = sizeCalculation(v, k); + if (!isPosInt(size)) { + throw new TypeError('sizeCalculation return invalid (expect positive integer)'); + } + } + else { + throw new TypeError('invalid size value (must be positive integer). ' + + 'When maxSize or maxEntrySize is used, sizeCalculation ' + + 'or size must be set.'); + } + } + return size; + }; + this.#addItemSize = (index, size, status) => { + sizes[index] = size; + if (this.#maxSize) { + const maxSize = this.#maxSize - sizes[index]; + while (this.#calculatedSize > maxSize) { + this.#evict(true); + } + } + this.#calculatedSize += sizes[index]; + if (status) { + status.entrySize = size; + status.totalCalculatedSize = this.#calculatedSize; + } + }; + } + #removeItemSize = _i => { }; + #addItemSize = (_i, _s, _st) => { }; + #requireSize = (_k, _v, size, sizeCalculation) => { + if (size || sizeCalculation) { + throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache'); + } + return 0; + }; + *#indexes({ allowStale = this.allowStale } = {}) { + if (this.#size) { + for (let i = this.#tail; true;) { + if (!this.#isValidIndex(i)) { + break; + } + if (allowStale || !this.#isStale(i)) { + yield i; + } + if (i === this.#head) { + break; + } + else { + i = this.#prev[i]; + } + } + } + } + *#rindexes({ allowStale = this.allowStale } = {}) { + if (this.#size) { + for (let i = this.#head; true;) { + if (!this.#isValidIndex(i)) { + break; + } + if (allowStale || !this.#isStale(i)) { + yield i; + } + if (i === this.#tail) { + break; + } + else { + i = this.#next[i]; + } + } + } + } + #isValidIndex(index) { + return (index !== undefined && + this.#keyMap.get(this.#keyList[index]) === index); + } + /** + * Return a generator yielding `[key, value]` pairs, + * in order from most recently used to least recently used. + */ + *entries() { + for (const i of this.#indexes()) { + if (this.#valList[i] !== undefined && + this.#keyList[i] !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield [this.#keyList[i], this.#valList[i]]; + } + } + } + /** + * Inverse order version of {@link LRUCache.entries} + * + * Return a generator yielding `[key, value]` pairs, + * in order from least recently used to most recently used. + */ + *rentries() { + for (const i of this.#rindexes()) { + if (this.#valList[i] !== undefined && + this.#keyList[i] !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield [this.#keyList[i], this.#valList[i]]; + } + } + } + /** + * Return a generator yielding the keys in the cache, + * in order from most recently used to least recently used. + */ + *keys() { + for (const i of this.#indexes()) { + const k = this.#keyList[i]; + if (k !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield k; + } + } + } + /** + * Inverse order version of {@link LRUCache.keys} + * + * Return a generator yielding the keys in the cache, + * in order from least recently used to most recently used. + */ + *rkeys() { + for (const i of this.#rindexes()) { + const k = this.#keyList[i]; + if (k !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield k; + } + } + } + /** + * Return a generator yielding the values in the cache, + * in order from most recently used to least recently used. + */ + *values() { + for (const i of this.#indexes()) { + const v = this.#valList[i]; + if (v !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield this.#valList[i]; + } + } + } + /** + * Inverse order version of {@link LRUCache.values} + * + * Return a generator yielding the values in the cache, + * in order from least recently used to most recently used. + */ + *rvalues() { + for (const i of this.#rindexes()) { + const v = this.#valList[i]; + if (v !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield this.#valList[i]; + } + } + } + /** + * Iterating over the cache itself yields the same results as + * {@link LRUCache.entries} + */ + [Symbol.iterator]() { + return this.entries(); + } + /** + * A String value that is used in the creation of the default string + * description of an object. Called by the built-in method + * `Object.prototype.toString`. + */ + [Symbol.toStringTag] = 'LRUCache'; + /** + * Find a value for which the supplied fn method returns a truthy value, + * similar to `Array.find()`. fn is called as `fn(value, key, cache)`. + */ + find(fn, getOptions = {}) { + for (const i of this.#indexes()) { + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; + if (value === undefined) + continue; + if (fn(value, this.#keyList[i], this)) { + return this.get(this.#keyList[i], getOptions); + } + } + } + /** + * Call the supplied function on each item in the cache, in order from most + * recently used to least recently used. + * + * `fn` is called as `fn(value, key, cache)`. + * + * If `thisp` is provided, function will be called in the `this`-context of + * the provided object, or the cache if no `thisp` object is provided. + * + * Does not update age or recenty of use, or iterate over stale values. + */ + forEach(fn, thisp = this) { + for (const i of this.#indexes()) { + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; + if (value === undefined) + continue; + fn.call(thisp, value, this.#keyList[i], this); + } + } + /** + * The same as {@link LRUCache.forEach} but items are iterated over in + * reverse order. (ie, less recently used items are iterated over first.) + */ + rforEach(fn, thisp = this) { + for (const i of this.#rindexes()) { + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; + if (value === undefined) + continue; + fn.call(thisp, value, this.#keyList[i], this); + } + } + /** + * Delete any stale entries. Returns true if anything was removed, + * false otherwise. + */ + purgeStale() { + let deleted = false; + for (const i of this.#rindexes({ allowStale: true })) { + if (this.#isStale(i)) { + this.#delete(this.#keyList[i], 'expire'); + deleted = true; + } + } + return deleted; + } + /** + * Get the extended info about a given entry, to get its value, size, and + * TTL info simultaneously. Returns `undefined` if the key is not present. + * + * Unlike {@link LRUCache#dump}, which is designed to be portable and survive + * serialization, the `start` value is always the current timestamp, and the + * `ttl` is a calculated remaining time to live (negative if expired). + * + * Always returns stale values, if their info is found in the cache, so be + * sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl}) + * if relevant. + */ + info(key) { + const i = this.#keyMap.get(key); + if (i === undefined) + return undefined; + const v = this.#valList[i]; + /* c8 ignore start - this isn't tested for the info function, + * but it's the same logic as found in other places. */ + const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; + if (value === undefined) + return undefined; + /* c8 ignore end */ + const entry = { value }; + if (this.#ttls && this.#starts) { + const ttl = this.#ttls[i]; + const start = this.#starts[i]; + if (ttl && start) { + const remain = ttl - (this.#perf.now() - start); + entry.ttl = remain; + entry.start = Date.now(); + } + } + if (this.#sizes) { + entry.size = this.#sizes[i]; + } + return entry; + } + /** + * Return an array of [key, {@link LRUCache.Entry}] tuples which can be + * passed to {@link LRUCache#load}. + * + * The `start` fields are calculated relative to a portable `Date.now()` + * timestamp, even if `performance.now()` is available. + * + * Stale entries are always included in the `dump`, even if + * {@link LRUCache.OptionsBase.allowStale} is false. + * + * Note: this returns an actual array, not a generator, so it can be more + * easily passed around. + */ + dump() { + const arr = []; + for (const i of this.#indexes({ allowStale: true })) { + const key = this.#keyList[i]; + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; + if (value === undefined || key === undefined) + continue; + const entry = { value }; + if (this.#ttls && this.#starts) { + entry.ttl = this.#ttls[i]; + // always dump the start relative to a portable timestamp + // it's ok for this to be a bit slow, it's a rare operation. + const age = this.#perf.now() - this.#starts[i]; + entry.start = Math.floor(Date.now() - age); + } + if (this.#sizes) { + entry.size = this.#sizes[i]; + } + arr.unshift([key, entry]); + } + return arr; + } + /** + * Reset the cache and load in the items in entries in the order listed. + * + * The shape of the resulting cache may be different if the same options are + * not used in both caches. + * + * The `start` fields are assumed to be calculated relative to a portable + * `Date.now()` timestamp, even if `performance.now()` is available. + */ + load(arr) { + this.clear(); + for (const [key, entry] of arr) { + if (entry.start) { + // entry.start is a portable timestamp, but we may be using + // node's performance.now(), so calculate the offset, so that + // we get the intended remaining TTL, no matter how long it's + // been on ice. + // + // it's ok for this to be a bit slow, it's a rare operation. + const age = Date.now() - entry.start; + entry.start = this.#perf.now() - age; + } + this.set(key, entry.value, entry); + } + } + /** + * Add a value to the cache. + * + * Note: if `undefined` is specified as a value, this is an alias for + * {@link LRUCache#delete} + * + * Fields on the {@link LRUCache.SetOptions} options param will override + * their corresponding values in the constructor options for the scope + * of this single `set()` operation. + * + * If `start` is provided, then that will set the effective start + * time for the TTL calculation. Note that this must be a previous + * value of `performance.now()` if supported, or a previous value of + * `Date.now()` if not. + * + * Options object may also include `size`, which will prevent + * calling the `sizeCalculation` function and just use the specified + * number if it is a positive integer, and `noDisposeOnSet` which + * will prevent calling a `dispose` function in the case of + * overwrites. + * + * If the `size` (or return value of `sizeCalculation`) for a given + * entry is greater than `maxEntrySize`, then the item will not be + * added to the cache. + * + * Will update the recency of the entry. + * + * If the value is `undefined`, then this is an alias for + * `cache.delete(key)`. `undefined` is never stored in the cache. + */ + set(k, v, setOptions = {}) { + if (v === undefined) { + this.delete(k); + return this; + } + const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions; + let { noUpdateTTL = this.noUpdateTTL } = setOptions; + const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation); + // if the item doesn't fit, don't do anything + // NB: maxEntrySize set to maxSize by default + if (this.maxEntrySize && size > this.maxEntrySize) { + if (status) { + status.set = 'miss'; + status.maxEntrySizeExceeded = true; + } + // have to delete, in case something is there already. + this.#delete(k, 'set'); + return this; + } + let index = this.#size === 0 ? undefined : this.#keyMap.get(k); + if (index === undefined) { + // addition + index = (this.#size === 0 ? this.#tail + : this.#free.length !== 0 ? this.#free.pop() + : this.#size === this.#max ? this.#evict(false) + : this.#size); + this.#keyList[index] = k; + this.#valList[index] = v; + this.#keyMap.set(k, index); + this.#next[this.#tail] = index; + this.#prev[index] = this.#tail; + this.#tail = index; + this.#size++; + this.#addItemSize(index, size, status); + if (status) + status.set = 'add'; + noUpdateTTL = false; + if (this.#hasOnInsert) { + this.#onInsert?.(v, k, 'add'); + } + } + else { + // update + this.#moveToTail(index); + const oldVal = this.#valList[index]; + if (v !== oldVal) { + if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) { + oldVal.__abortController.abort(new Error('replaced')); + const { __staleWhileFetching: s } = oldVal; + if (s !== undefined && !noDisposeOnSet) { + if (this.#hasDispose) { + this.#dispose?.(s, k, 'set'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([s, k, 'set']); + } + } + } + else if (!noDisposeOnSet) { + if (this.#hasDispose) { + this.#dispose?.(oldVal, k, 'set'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([oldVal, k, 'set']); + } + } + this.#removeItemSize(index); + this.#addItemSize(index, size, status); + this.#valList[index] = v; + if (status) { + status.set = 'replace'; + const oldValue = oldVal && this.#isBackgroundFetch(oldVal) ? + oldVal.__staleWhileFetching + : oldVal; + if (oldValue !== undefined) + status.oldValue = oldValue; + } + } + else if (status) { + status.set = 'update'; + } + if (this.#hasOnInsert) { + this.onInsert?.(v, k, v === oldVal ? 'update' : 'replace'); + } + } + if (ttl !== 0 && !this.#ttls) { + this.#initializeTTLTracking(); + } + if (this.#ttls) { + if (!noUpdateTTL) { + this.#setItemTTL(index, ttl, start); + } + if (status) + this.#statusTTL(status, index); + } + if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + return this; + } + /** + * Evict the least recently used item, returning its value or + * `undefined` if cache is empty. + */ + pop() { + try { + while (this.#size) { + const val = this.#valList[this.#head]; + this.#evict(true); + if (this.#isBackgroundFetch(val)) { + if (val.__staleWhileFetching) { + return val.__staleWhileFetching; + } + } + else if (val !== undefined) { + return val; + } + } + } + finally { + if (this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + } + } + #evict(free) { + const head = this.#head; + const k = this.#keyList[head]; + const v = this.#valList[head]; + if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('evicted')); + } + else if (this.#hasDispose || this.#hasDisposeAfter) { + if (this.#hasDispose) { + this.#dispose?.(v, k, 'evict'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v, k, 'evict']); + } + } + this.#removeItemSize(head); + // if we aren't about to use the index, then null these out + if (free) { + this.#keyList[head] = undefined; + this.#valList[head] = undefined; + this.#free.push(head); + } + if (this.#size === 1) { + this.#head = this.#tail = 0; + this.#free.length = 0; + } + else { + this.#head = this.#next[head]; + } + this.#keyMap.delete(k); + this.#size--; + return head; + } + /** + * Check if a key is in the cache, without updating the recency of use. + * Will return false if the item is stale, even though it is technically + * in the cache. + * + * Check if a key is in the cache, without updating the recency of + * use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set + * to `true` in either the options or the constructor. + * + * Will return `false` if the item is stale, even though it is technically in + * the cache. The difference can be determined (if it matters) by using a + * `status` argument, and inspecting the `has` field. + * + * Will not update item age unless + * {@link LRUCache.OptionsBase.updateAgeOnHas} is set. + */ + has(k, hasOptions = {}) { + const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions; + const index = this.#keyMap.get(k); + if (index !== undefined) { + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v) && + v.__staleWhileFetching === undefined) { + return false; + } + if (!this.#isStale(index)) { + if (updateAgeOnHas) { + this.#updateItemAge(index); + } + if (status) { + status.has = 'hit'; + this.#statusTTL(status, index); + } + return true; + } + else if (status) { + status.has = 'stale'; + this.#statusTTL(status, index); + } + } + else if (status) { + status.has = 'miss'; + } + return false; + } + /** + * Like {@link LRUCache#get} but doesn't update recency or delete stale + * items. + * + * Returns `undefined` if the item is stale, unless + * {@link LRUCache.OptionsBase.allowStale} is set. + */ + peek(k, peekOptions = {}) { + const { allowStale = this.allowStale } = peekOptions; + const index = this.#keyMap.get(k); + if (index === undefined || + (!allowStale && this.#isStale(index))) { + return; + } + const v = this.#valList[index]; + // either stale and allowed, or forcing a refresh of non-stale value + return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; + } + #backgroundFetch(k, index, options, context) { + const v = index === undefined ? undefined : this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + return v; + } + const ac = new AC(); + const { signal } = options; + // when/if our AC signals, then stop listening to theirs. + signal?.addEventListener('abort', () => ac.abort(signal.reason), { + signal: ac.signal, + }); + const fetchOpts = { + signal: ac.signal, + options, + context, + }; + const cb = (v, updateCache = false) => { + const { aborted } = ac.signal; + const ignoreAbort = options.ignoreFetchAbort && v !== undefined; + if (options.status) { + if (aborted && !updateCache) { + options.status.fetchAborted = true; + options.status.fetchError = ac.signal.reason; + if (ignoreAbort) + options.status.fetchAbortIgnored = true; + } + else { + options.status.fetchResolved = true; + } + } + if (aborted && !ignoreAbort && !updateCache) { + return fetchFail(ac.signal.reason); + } + // either we didn't abort, and are still here, or we did, and ignored + const bf = p; + if (this.#valList[index] === p) { + if (v === undefined) { + if (bf.__staleWhileFetching !== undefined) { + this.#valList[index] = bf.__staleWhileFetching; + } + else { + this.#delete(k, 'fetch'); + } + } + else { + if (options.status) + options.status.fetchUpdated = true; + this.set(k, v, fetchOpts.options); + } + } + return v; + }; + const eb = (er) => { + if (options.status) { + options.status.fetchRejected = true; + options.status.fetchError = er; + } + return fetchFail(er); + }; + const fetchFail = (er) => { + const { aborted } = ac.signal; + const allowStaleAborted = aborted && options.allowStaleOnFetchAbort; + const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection; + const noDelete = allowStale || options.noDeleteOnFetchRejection; + const bf = p; + if (this.#valList[index] === p) { + // if we allow stale on fetch rejections, then we need to ensure that + // the stale value is not removed from the cache when the fetch fails. + const del = !noDelete || bf.__staleWhileFetching === undefined; + if (del) { + this.#delete(k, 'fetch'); + } + else if (!allowStaleAborted) { + // still replace the *promise* with the stale value, + // since we are done with the promise at this point. + // leave it untouched if we're still waiting for an + // aborted background fetch that hasn't yet returned. + this.#valList[index] = bf.__staleWhileFetching; + } + } + if (allowStale) { + if (options.status && bf.__staleWhileFetching !== undefined) { + options.status.returnedStale = true; + } + return bf.__staleWhileFetching; + } + else if (bf.__returned === bf) { + throw er; + } + }; + const pcall = (res, rej) => { + const fmp = this.#fetchMethod?.(k, v, fetchOpts); + if (fmp && fmp instanceof Promise) { + fmp.then(v => res(v === undefined ? undefined : v), rej); + } + // ignored, we go until we finish, regardless. + // defer check until we are actually aborting, + // so fetchMethod can override. + ac.signal.addEventListener('abort', () => { + if (!options.ignoreFetchAbort || + options.allowStaleOnFetchAbort) { + res(undefined); + // when it eventually resolves, update the cache. + if (options.allowStaleOnFetchAbort) { + res = v => cb(v, true); + } + } + }); + }; + if (options.status) + options.status.fetchDispatched = true; + const p = new Promise(pcall).then(cb, eb); + const bf = Object.assign(p, { + __abortController: ac, + __staleWhileFetching: v, + __returned: undefined, + }); + if (index === undefined) { + // internal, don't expose status. + this.set(k, bf, { ...fetchOpts.options, status: undefined }); + index = this.#keyMap.get(k); + } + else { + this.#valList[index] = bf; + } + return bf; + } + #isBackgroundFetch(p) { + if (!this.#hasFetchMethod) + return false; + const b = p; + return (!!b && + b instanceof Promise && + b.hasOwnProperty('__staleWhileFetching') && + b.__abortController instanceof AC); + } + async fetch(k, fetchOptions = {}) { + const { + // get options + allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, + // set options + ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, + // fetch exclusive options + noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions; + if (!this.#hasFetchMethod) { + if (status) + status.fetch = 'get'; + return this.get(k, { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + status, + }); + } + const options = { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + ttl, + noDisposeOnSet, + size, + sizeCalculation, + noUpdateTTL, + noDeleteOnFetchRejection, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + status, + signal, + }; + let index = this.#keyMap.get(k); + if (index === undefined) { + if (status) + status.fetch = 'miss'; + const p = this.#backgroundFetch(k, index, options, context); + return (p.__returned = p); + } + else { + // in cache, maybe already fetching + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + const stale = allowStale && v.__staleWhileFetching !== undefined; + if (status) { + status.fetch = 'inflight'; + if (stale) + status.returnedStale = true; + } + return stale ? v.__staleWhileFetching : (v.__returned = v); + } + // if we force a refresh, that means do NOT serve the cached value, + // unless we are already in the process of refreshing the cache. + const isStale = this.#isStale(index); + if (!forceRefresh && !isStale) { + if (status) + status.fetch = 'hit'; + this.#moveToTail(index); + if (updateAgeOnGet) { + this.#updateItemAge(index); + } + if (status) + this.#statusTTL(status, index); + return v; + } + // ok, it is stale or a forced refresh, and not already fetching. + // refresh the cache. + const p = this.#backgroundFetch(k, index, options, context); + const hasStale = p.__staleWhileFetching !== undefined; + const staleVal = hasStale && allowStale; + if (status) { + status.fetch = isStale ? 'stale' : 'refresh'; + if (staleVal && isStale) + status.returnedStale = true; + } + return staleVal ? p.__staleWhileFetching : (p.__returned = p); + } + } + async forceFetch(k, fetchOptions = {}) { + const v = await this.fetch(k, fetchOptions); + if (v === undefined) + throw new Error('fetch() returned undefined'); + return v; + } + memo(k, memoOptions = {}) { + const memoMethod = this.#memoMethod; + if (!memoMethod) { + throw new Error('no memoMethod provided to constructor'); + } + const { context, forceRefresh, ...options } = memoOptions; + const v = this.get(k, options); + if (!forceRefresh && v !== undefined) + return v; + const vv = memoMethod(k, v, { + options, + context, + }); + this.set(k, vv, options); + return vv; + } + /** + * Return a value from the cache. Will update the recency of the cache + * entry found. + * + * If the key is not found, get() will return `undefined`. + */ + get(k, getOptions = {}) { + const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions; + const index = this.#keyMap.get(k); + if (index !== undefined) { + const value = this.#valList[index]; + const fetching = this.#isBackgroundFetch(value); + if (status) + this.#statusTTL(status, index); + if (this.#isStale(index)) { + if (status) + status.get = 'stale'; + // delete only if not an in-flight background fetch + if (!fetching) { + if (!noDeleteOnStaleGet) { + this.#delete(k, 'expire'); + } + if (status && allowStale) + status.returnedStale = true; + return allowStale ? value : undefined; + } + else { + if (status && + allowStale && + value.__staleWhileFetching !== undefined) { + status.returnedStale = true; + } + return allowStale ? value.__staleWhileFetching : undefined; + } + } + else { + if (status) + status.get = 'hit'; + // if we're currently fetching it, we don't actually have it yet + // it's not stale, which means this isn't a staleWhileRefetching. + // If it's not stale, and fetching, AND has a __staleWhileFetching + // value, then that means the user fetched with {forceRefresh:true}, + // so it's safe to return that value. + if (fetching) { + return value.__staleWhileFetching; + } + this.#moveToTail(index); + if (updateAgeOnGet) { + this.#updateItemAge(index); + } + return value; + } + } + else if (status) { + status.get = 'miss'; + } + } + #connect(p, n) { + this.#prev[n] = p; + this.#next[p] = n; + } + #moveToTail(index) { + // if tail already, nothing to do + // if head, move head to next[index] + // else + // move next[prev[index]] to next[index] (head has no prev) + // move prev[next[index]] to prev[index] + // prev[index] = tail + // next[tail] = index + // tail = index + if (index !== this.#tail) { + if (index === this.#head) { + this.#head = this.#next[index]; + } + else { + this.#connect(this.#prev[index], this.#next[index]); + } + this.#connect(this.#tail, index); + this.#tail = index; + } + } + /** + * Deletes a key out of the cache. + * + * Returns true if the key was deleted, false otherwise. + */ + delete(k) { + return this.#delete(k, 'delete'); + } + #delete(k, reason) { + let deleted = false; + if (this.#size !== 0) { + const index = this.#keyMap.get(k); + if (index !== undefined) { + deleted = true; + if (this.#size === 1) { + this.#clear(reason); + } + else { + this.#removeItemSize(index); + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')); + } + else if (this.#hasDispose || this.#hasDisposeAfter) { + if (this.#hasDispose) { + this.#dispose?.(v, k, reason); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v, k, reason]); + } + } + this.#keyMap.delete(k); + this.#keyList[index] = undefined; + this.#valList[index] = undefined; + if (index === this.#tail) { + this.#tail = this.#prev[index]; + } + else if (index === this.#head) { + this.#head = this.#next[index]; + } + else { + const pi = this.#prev[index]; + this.#next[pi] = this.#next[index]; + const ni = this.#next[index]; + this.#prev[ni] = this.#prev[index]; + } + this.#size--; + this.#free.push(index); + } + } + } + if (this.#hasDisposeAfter && this.#disposed?.length) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + return deleted; + } + /** + * Clear the cache entirely, throwing away all values. + */ + clear() { + return this.#clear('delete'); + } + #clear(reason) { + for (const index of this.#rindexes({ allowStale: true })) { + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')); + } + else { + const k = this.#keyList[index]; + if (this.#hasDispose) { + this.#dispose?.(v, k, reason); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v, k, reason]); + } + } + } + this.#keyMap.clear(); + this.#valList.fill(undefined); + this.#keyList.fill(undefined); + if (this.#ttls && this.#starts) { + this.#ttls.fill(0); + this.#starts.fill(0); + } + if (this.#sizes) { + this.#sizes.fill(0); + } + this.#head = 0; + this.#tail = 0; + this.#free.length = 0; + this.#calculatedSize = 0; + this.#size = 0; + if (this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + } +} +exports.LRUCache = LRUCache; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/commonjs/index.min.js b/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/commonjs/index.min.js new file mode 100644 index 0000000000000..ef5027b91650d --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/commonjs/index.min.js @@ -0,0 +1,2 @@ +"use strict";Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var M=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,x=new Set,R=typeof process=="object"&&process?process:{},U=(a,t,e,i)=>{typeof R.emitWarning=="function"?R.emitWarning(a,t,e,i):console.error(`[${e}] ${t}: ${a}`)},C=globalThis.AbortController,L=globalThis.AbortSignal;if(typeof C>"u"){L=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},C=class{constructor(){t()}signal=new L;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let a=R.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{a&&(a=!1,U("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var G=a=>!x.has(a),H=Symbol("type"),y=a=>a&&a===Math.floor(a)&&a>0&&isFinite(a),I=a=>y(a)?a<=Math.pow(2,8)?Uint8Array:a<=Math.pow(2,16)?Uint16Array:a<=Math.pow(2,32)?Uint32Array:a<=Number.MAX_SAFE_INTEGER?E:null:null,E=class extends Array{constructor(t){super(t),this.fill(0)}},W=class a{heap;length;static#l=!1;static create(t){let e=I(t);if(!e)return[];a.#l=!0;let i=new a(t,e);return a.#l=!1,i}constructor(t,e){if(!a.#l)throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},D=class a{#l;#c;#p;#v;#w;#D;#L;#S;get perf(){return this.#S}ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#_;#s;#i;#t;#a;#u;#o;#h;#m;#r;#b;#y;#d;#A;#z;#f;#x;static unsafeExposeInternals(t){return{starts:t.#y,ttls:t.#d,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#a,prev:t.#u,get head(){return t.#o},get tail(){return t.#h},free:t.#m,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#M(e,i,s,n),moveToTail:e=>t.#W(e),indexes:e=>t.#F(e),rindexes:e=>t.#T(e),isStale:e=>t.#g(e)}}get max(){return this.#l}get maxSize(){return this.#c}get calculatedSize(){return this.#_}get size(){return this.#n}get fetchMethod(){return this.#D}get memoMethod(){return this.#L}get dispose(){return this.#p}get onInsert(){return this.#v}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,onInsert:_,disposeAfter:f,noDisposeOnSet:c,noUpdateTTL:u,maxSize:A=0,maxEntrySize:d=0,sizeCalculation:m,fetchMethod:l,memoMethod:w,noDeleteOnFetchRejection:b,noDeleteOnStaleGet:p,allowStaleOnFetchRejection:S,allowStaleOnFetchAbort:z,ignoreFetchAbort:F,perf:v}=t;if(v!==void 0&&typeof v?.now!="function")throw new TypeError("perf option must have a now() method if specified");if(this.#S=v??M,e!==0&&!y(e))throw new TypeError("max option must be a nonnegative integer");let T=e?I(e):Array;if(!T)throw new Error("invalid max value: "+e);if(this.#l=e,this.#c=A,this.maxEntrySize=d||this.#c,this.sizeCalculation=m,this.sizeCalculation){if(!this.#c&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#L=w,l!==void 0&&typeof l!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#D=l,this.#z=!!l,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#a=new T(e),this.#u=new T(e),this.#o=0,this.#h=0,this.#m=W.create(e),this.#n=0,this.#_=0,typeof g=="function"&&(this.#p=g),typeof _=="function"&&(this.#v=_),typeof f=="function"?(this.#w=f,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#A=!!this.#p,this.#x=!!this.#v,this.#f=!!this.#w,this.noDisposeOnSet=!!c,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!b,this.allowStaleOnFetchRejection=!!S,this.allowStaleOnFetchAbort=!!z,this.ignoreFetchAbort=!!F,this.maxEntrySize!==0){if(this.#c!==0&&!y(this.#c))throw new TypeError("maxSize must be a positive integer if specified");if(!y(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#V()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!p,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=y(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!y(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#G()}if(this.#l===0&&this.ttl===0&&this.#c===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#l&&!this.#c){let O="LRU_CACHE_UNBOUNDED";G(O)&&(x.add(O),U("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",O,a))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#G(){let t=new E(this.#l),e=new E(this.#l);this.#d=t,this.#y=e,this.#j=(n,h,o=this.#S.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#g(n)&&this.#O(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#C=n=>{e[n]=t[n]!==0?this.#S.now():0},this.#E=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=this.#S.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#g=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#C=()=>{};#E=()=>{};#j=()=>{};#g=()=>!1;#V(){let t=new E(this.#l);this.#_=0,this.#b=t,this.#R=e=>{this.#_-=t[e],t[e]=0},this.#N=(e,i,s,n)=>{if(this.#e(i))return 0;if(!y(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!y(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#U=(e,i,s)=>{if(t[e]=i,this.#c){let n=this.#c-t[e];for(;this.#_>n;)this.#I(!0)}this.#_+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#_)}}#R=t=>{};#U=(t,e,i)=>{};#N=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#o));)e=this.#u[e]}*#T({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#h));)e=this.#a[e]}#P(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#T())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#T()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#T())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#T()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#T({allowStale:!0}))this.#g(e)&&(this.#O(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#d&&this.#y){let h=this.#d[e],o=this.#y[e];if(h&&o){let r=h-(this.#S.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#F({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#d&&this.#y){h.ttl=this.#d[e];let o=this.#S.now()-this.#y[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=this.#S.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,_=this.#N(t,e,i.size||0,o);if(this.maxEntrySize&&_>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#O(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#m.length!==0?this.#m.pop():this.#n===this.#l?this.#I(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#a[this.#h]=f,this.#u[f]=this.#h,this.#h=f,this.#n++,this.#U(f,_,r),r&&(r.set="add"),g=!1,this.#x&&this.#v?.(e,t,"add");else{this.#W(f);let c=this.#t[f];if(e!==c){if(this.#z&&this.#e(c)){c.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:u}=c;u!==void 0&&!h&&(this.#A&&this.#p?.(u,t,"set"),this.#f&&this.#r?.push([u,t,"set"]))}else h||(this.#A&&this.#p?.(c,t,"set"),this.#f&&this.#r?.push([c,t,"set"]));if(this.#R(f),this.#U(f,_,r),this.#t[f]=e,r){r.set="replace";let u=c&&this.#e(c)?c.__staleWhileFetching:c;u!==void 0&&(r.oldValue=u)}}else r&&(r.set="update");this.#x&&this.onInsert?.(e,t,e===c?"update":"replace")}if(s!==0&&!this.#d&&this.#G(),this.#d&&(g||this.#j(f,s,n),r&&this.#E(r,f)),!h&&this.#f&&this.#r){let c=this.#r,u;for(;u=c?.shift();)this.#w?.(...u)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#I(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#f&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#I(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#z&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(s,i,"evict"),this.#f&&this.#r?.push([s,i,"evict"])),this.#R(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#m.push(e)),this.#n===1?(this.#o=this.#h=0,this.#m.length=0):this.#o=this.#a[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#g(n))s&&(s.has="stale",this.#E(s,n));else return i&&this.#C(n),s&&(s.has="hit",this.#E(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#g(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#M(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new C,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,m=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!m?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!m)return f(h.signal.reason);let b=u;return this.#t[e]===u&&(d===void 0?b.__staleWhileFetching!==void 0?this.#t[e]=b.__staleWhileFetching:this.#O(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},_=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:m}=h.signal,l=m&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=u;if(this.#t[e]===u&&(!b||p.__staleWhileFetching===void 0?this.#O(t,"fetch"):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},c=(d,m)=>{let l=this.#D?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),m),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let u=new Promise(c).then(g,_),A=Object.assign(u,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,A,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=A,A}#e(t){if(!this.#z)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof C}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:_=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:c=this.allowStaleOnFetchRejection,ignoreFetchAbort:u=this.ignoreFetchAbort,allowStaleOnFetchAbort:A=this.allowStaleOnFetchAbort,context:d,forceRefresh:m=!1,status:l,signal:w}=e;if(!this.#z)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:_,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:c,allowStaleOnFetchAbort:A,ignoreFetchAbort:u,status:l,signal:w},p=this.#s.get(t);if(p===void 0){l&&(l.fetch="miss");let S=this.#M(t,p,b,d);return S.__returned=S}else{let S=this.#t[p];if(this.#e(S)){let O=i&&S.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",O&&(l.returnedStale=!0)),O?S.__staleWhileFetching:S.__returned=S}let z=this.#g(p);if(!m&&!z)return l&&(l.fetch="hit"),this.#W(p),s&&this.#C(p),l&&this.#E(l,p),S;let F=this.#M(t,p,b,d),T=F.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=z?"stale":"refresh",T&&z&&(l.returnedStale=!0)),T?F.__staleWhileFetching:F.__returned=F}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#L;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#E(h,o),this.#g(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#O(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#W(o),s&&this.#C(o),r))}else h&&(h.get="miss")}#H(t,e){this.#u[e]=t,this.#a[t]=e}#W(t){t!==this.#h&&(t===this.#o?this.#o=this.#a[t]:this.#H(this.#u[t],this.#a[t]),this.#H(this.#h,t),this.#h=t)}delete(t){return this.#O(t,"delete")}#O(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#k(e);else{this.#R(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(n,t,e),this.#f&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#u[s];else if(s===this.#o)this.#o=this.#a[s];else{let h=this.#u[s];this.#a[h]=this.#a[s];let o=this.#a[s];this.#u[o]=this.#u[s]}this.#n--,this.#m.push(s)}}if(this.#f&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#k("delete")}#k(t){for(let e of this.#T({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#A&&this.#p?.(i,s,t),this.#f&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#d&&this.#y&&(this.#d.fill(0),this.#y.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#m.length=0,this.#_=0,this.#n=0,this.#f&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};exports.LRUCache=D; +//# sourceMappingURL=index.min.js.map diff --git a/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/commonjs/package.json b/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/commonjs/package.json new file mode 100644 index 0000000000000..5bbefffbabee3 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/esm/index.js b/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/esm/index.js new file mode 100644 index 0000000000000..8fd8fc5f31507 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/esm/index.js @@ -0,0 +1,1560 @@ +/** + * @module LRUCache + */ +const defaultPerf = (typeof performance === 'object' && + performance && + typeof performance.now === 'function') ? + performance + : Date; +const warned = new Set(); +/* c8 ignore start */ +const PROCESS = (typeof process === 'object' && !!process ? + process + : {}); +/* c8 ignore start */ +const emitWarning = (msg, type, code, fn) => { + typeof PROCESS.emitWarning === 'function' ? + PROCESS.emitWarning(msg, type, code, fn) + : console.error(`[${code}] ${type}: ${msg}`); +}; +let AC = globalThis.AbortController; +let AS = globalThis.AbortSignal; +/* c8 ignore start */ +if (typeof AC === 'undefined') { + //@ts-ignore + AS = class AbortSignal { + onabort; + _onabort = []; + reason; + aborted = false; + addEventListener(_, fn) { + this._onabort.push(fn); + } + }; + //@ts-ignore + AC = class AbortController { + constructor() { + warnACPolyfill(); + } + signal = new AS(); + abort(reason) { + if (this.signal.aborted) + return; + //@ts-ignore + this.signal.reason = reason; + //@ts-ignore + this.signal.aborted = true; + //@ts-ignore + for (const fn of this.signal._onabort) { + fn(reason); + } + this.signal.onabort?.(reason); + } + }; + let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1'; + const warnACPolyfill = () => { + if (!printACPolyfillWarning) + return; + printACPolyfillWarning = false; + emitWarning('AbortController is not defined. If using lru-cache in ' + + 'node 14, load an AbortController polyfill from the ' + + '`node-abort-controller` package. A minimal polyfill is ' + + 'provided for use by LRUCache.fetch(), but it should not be ' + + 'relied upon in other contexts (eg, passing it to other APIs that ' + + 'use AbortController/AbortSignal might have undesirable effects). ' + + 'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill); + }; +} +/* c8 ignore stop */ +const shouldWarn = (code) => !warned.has(code); +const TYPE = Symbol('type'); +const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n); +/* c8 ignore start */ +// This is a little bit ridiculous, tbh. +// The maximum array length is 2^32-1 or thereabouts on most JS impls. +// And well before that point, you're caching the entire world, I mean, +// that's ~32GB of just integers for the next/prev links, plus whatever +// else to hold that many keys and values. Just filling the memory with +// zeroes at init time is brutal when you get that big. +// But why not be complete? +// Maybe in the future, these limits will have expanded. +const getUintArray = (max) => !isPosInt(max) ? null + : max <= Math.pow(2, 8) ? Uint8Array + : max <= Math.pow(2, 16) ? Uint16Array + : max <= Math.pow(2, 32) ? Uint32Array + : max <= Number.MAX_SAFE_INTEGER ? ZeroArray + : null; +/* c8 ignore stop */ +class ZeroArray extends Array { + constructor(size) { + super(size); + this.fill(0); + } +} +class Stack { + heap; + length; + // private constructor + static #constructing = false; + static create(max) { + const HeapCls = getUintArray(max); + if (!HeapCls) + return []; + Stack.#constructing = true; + const s = new Stack(max, HeapCls); + Stack.#constructing = false; + return s; + } + constructor(max, HeapCls) { + /* c8 ignore start */ + if (!Stack.#constructing) { + throw new TypeError('instantiate Stack using Stack.create(n)'); + } + /* c8 ignore stop */ + this.heap = new HeapCls(max); + this.length = 0; + } + push(n) { + this.heap[this.length++] = n; + } + pop() { + return this.heap[--this.length]; + } +} +/** + * Default export, the thing you're using this module to get. + * + * The `K` and `V` types define the key and value types, respectively. The + * optional `FC` type defines the type of the `context` object passed to + * `cache.fetch()` and `cache.memo()`. + * + * Keys and values **must not** be `null` or `undefined`. + * + * All properties from the options object (with the exception of `max`, + * `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are + * added as normal public members. (The listed options are read-only getters.) + * + * Changing any of these will alter the defaults for subsequent method calls. + */ +export class LRUCache { + // options that cannot be changed without disaster + #max; + #maxSize; + #dispose; + #onInsert; + #disposeAfter; + #fetchMethod; + #memoMethod; + #perf; + /** + * {@link LRUCache.OptionsBase.perf} + */ + get perf() { + return this.#perf; + } + /** + * {@link LRUCache.OptionsBase.ttl} + */ + ttl; + /** + * {@link LRUCache.OptionsBase.ttlResolution} + */ + ttlResolution; + /** + * {@link LRUCache.OptionsBase.ttlAutopurge} + */ + ttlAutopurge; + /** + * {@link LRUCache.OptionsBase.updateAgeOnGet} + */ + updateAgeOnGet; + /** + * {@link LRUCache.OptionsBase.updateAgeOnHas} + */ + updateAgeOnHas; + /** + * {@link LRUCache.OptionsBase.allowStale} + */ + allowStale; + /** + * {@link LRUCache.OptionsBase.noDisposeOnSet} + */ + noDisposeOnSet; + /** + * {@link LRUCache.OptionsBase.noUpdateTTL} + */ + noUpdateTTL; + /** + * {@link LRUCache.OptionsBase.maxEntrySize} + */ + maxEntrySize; + /** + * {@link LRUCache.OptionsBase.sizeCalculation} + */ + sizeCalculation; + /** + * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection} + */ + noDeleteOnFetchRejection; + /** + * {@link LRUCache.OptionsBase.noDeleteOnStaleGet} + */ + noDeleteOnStaleGet; + /** + * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort} + */ + allowStaleOnFetchAbort; + /** + * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} + */ + allowStaleOnFetchRejection; + /** + * {@link LRUCache.OptionsBase.ignoreFetchAbort} + */ + ignoreFetchAbort; + // computed properties + #size; + #calculatedSize; + #keyMap; + #keyList; + #valList; + #next; + #prev; + #head; + #tail; + #free; + #disposed; + #sizes; + #starts; + #ttls; + #hasDispose; + #hasFetchMethod; + #hasDisposeAfter; + #hasOnInsert; + /** + * Do not call this method unless you need to inspect the + * inner workings of the cache. If anything returned by this + * object is modified in any way, strange breakage may occur. + * + * These fields are private for a reason! + * + * @internal + */ + static unsafeExposeInternals(c) { + return { + // properties + starts: c.#starts, + ttls: c.#ttls, + sizes: c.#sizes, + keyMap: c.#keyMap, + keyList: c.#keyList, + valList: c.#valList, + next: c.#next, + prev: c.#prev, + get head() { + return c.#head; + }, + get tail() { + return c.#tail; + }, + free: c.#free, + // methods + isBackgroundFetch: (p) => c.#isBackgroundFetch(p), + backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context), + moveToTail: (index) => c.#moveToTail(index), + indexes: (options) => c.#indexes(options), + rindexes: (options) => c.#rindexes(options), + isStale: (index) => c.#isStale(index), + }; + } + // Protected read-only members + /** + * {@link LRUCache.OptionsBase.max} (read-only) + */ + get max() { + return this.#max; + } + /** + * {@link LRUCache.OptionsBase.maxSize} (read-only) + */ + get maxSize() { + return this.#maxSize; + } + /** + * The total computed size of items in the cache (read-only) + */ + get calculatedSize() { + return this.#calculatedSize; + } + /** + * The number of items stored in the cache (read-only) + */ + get size() { + return this.#size; + } + /** + * {@link LRUCache.OptionsBase.fetchMethod} (read-only) + */ + get fetchMethod() { + return this.#fetchMethod; + } + get memoMethod() { + return this.#memoMethod; + } + /** + * {@link LRUCache.OptionsBase.dispose} (read-only) + */ + get dispose() { + return this.#dispose; + } + /** + * {@link LRUCache.OptionsBase.onInsert} (read-only) + */ + get onInsert() { + return this.#onInsert; + } + /** + * {@link LRUCache.OptionsBase.disposeAfter} (read-only) + */ + get disposeAfter() { + return this.#disposeAfter; + } + constructor(options) { + const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, onInsert, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, perf, } = options; + if (perf !== undefined) { + if (typeof perf?.now !== 'function') { + throw new TypeError('perf option must have a now() method if specified'); + } + } + this.#perf = perf ?? defaultPerf; + if (max !== 0 && !isPosInt(max)) { + throw new TypeError('max option must be a nonnegative integer'); + } + const UintArray = max ? getUintArray(max) : Array; + if (!UintArray) { + throw new Error('invalid max value: ' + max); + } + this.#max = max; + this.#maxSize = maxSize; + this.maxEntrySize = maxEntrySize || this.#maxSize; + this.sizeCalculation = sizeCalculation; + if (this.sizeCalculation) { + if (!this.#maxSize && !this.maxEntrySize) { + throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize'); + } + if (typeof this.sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation set to non-function'); + } + } + if (memoMethod !== undefined && + typeof memoMethod !== 'function') { + throw new TypeError('memoMethod must be a function if defined'); + } + this.#memoMethod = memoMethod; + if (fetchMethod !== undefined && + typeof fetchMethod !== 'function') { + throw new TypeError('fetchMethod must be a function if specified'); + } + this.#fetchMethod = fetchMethod; + this.#hasFetchMethod = !!fetchMethod; + this.#keyMap = new Map(); + this.#keyList = new Array(max).fill(undefined); + this.#valList = new Array(max).fill(undefined); + this.#next = new UintArray(max); + this.#prev = new UintArray(max); + this.#head = 0; + this.#tail = 0; + this.#free = Stack.create(max); + this.#size = 0; + this.#calculatedSize = 0; + if (typeof dispose === 'function') { + this.#dispose = dispose; + } + if (typeof onInsert === 'function') { + this.#onInsert = onInsert; + } + if (typeof disposeAfter === 'function') { + this.#disposeAfter = disposeAfter; + this.#disposed = []; + } + else { + this.#disposeAfter = undefined; + this.#disposed = undefined; + } + this.#hasDispose = !!this.#dispose; + this.#hasOnInsert = !!this.#onInsert; + this.#hasDisposeAfter = !!this.#disposeAfter; + this.noDisposeOnSet = !!noDisposeOnSet; + this.noUpdateTTL = !!noUpdateTTL; + this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection; + this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection; + this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort; + this.ignoreFetchAbort = !!ignoreFetchAbort; + // NB: maxEntrySize is set to maxSize if it's set + if (this.maxEntrySize !== 0) { + if (this.#maxSize !== 0) { + if (!isPosInt(this.#maxSize)) { + throw new TypeError('maxSize must be a positive integer if specified'); + } + } + if (!isPosInt(this.maxEntrySize)) { + throw new TypeError('maxEntrySize must be a positive integer if specified'); + } + this.#initializeSizeTracking(); + } + this.allowStale = !!allowStale; + this.noDeleteOnStaleGet = !!noDeleteOnStaleGet; + this.updateAgeOnGet = !!updateAgeOnGet; + this.updateAgeOnHas = !!updateAgeOnHas; + this.ttlResolution = + isPosInt(ttlResolution) || ttlResolution === 0 ? + ttlResolution + : 1; + this.ttlAutopurge = !!ttlAutopurge; + this.ttl = ttl || 0; + if (this.ttl) { + if (!isPosInt(this.ttl)) { + throw new TypeError('ttl must be a positive integer if specified'); + } + this.#initializeTTLTracking(); + } + // do not allow completely unbounded caches + if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) { + throw new TypeError('At least one of max, maxSize, or ttl is required'); + } + if (!this.ttlAutopurge && !this.#max && !this.#maxSize) { + const code = 'LRU_CACHE_UNBOUNDED'; + if (shouldWarn(code)) { + warned.add(code); + const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' + + 'result in unbounded memory consumption.'; + emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache); + } + } + } + /** + * Return the number of ms left in the item's TTL. If item is not in cache, + * returns `0`. Returns `Infinity` if item is in cache without a defined TTL. + */ + getRemainingTTL(key) { + return this.#keyMap.has(key) ? Infinity : 0; + } + #initializeTTLTracking() { + const ttls = new ZeroArray(this.#max); + const starts = new ZeroArray(this.#max); + this.#ttls = ttls; + this.#starts = starts; + this.#setItemTTL = (index, ttl, start = this.#perf.now()) => { + starts[index] = ttl !== 0 ? start : 0; + ttls[index] = ttl; + if (ttl !== 0 && this.ttlAutopurge) { + const t = setTimeout(() => { + if (this.#isStale(index)) { + this.#delete(this.#keyList[index], 'expire'); + } + }, ttl + 1); + // unref() not supported on all platforms + /* c8 ignore start */ + if (t.unref) { + t.unref(); + } + /* c8 ignore stop */ + } + }; + this.#updateItemAge = index => { + starts[index] = ttls[index] !== 0 ? this.#perf.now() : 0; + }; + this.#statusTTL = (status, index) => { + if (ttls[index]) { + const ttl = ttls[index]; + const start = starts[index]; + /* c8 ignore next */ + if (!ttl || !start) + return; + status.ttl = ttl; + status.start = start; + status.now = cachedNow || getNow(); + const age = status.now - start; + status.remainingTTL = ttl - age; + } + }; + // debounce calls to perf.now() to 1s so we're not hitting + // that costly call repeatedly. + let cachedNow = 0; + const getNow = () => { + const n = this.#perf.now(); + if (this.ttlResolution > 0) { + cachedNow = n; + const t = setTimeout(() => (cachedNow = 0), this.ttlResolution); + // not available on all platforms + /* c8 ignore start */ + if (t.unref) { + t.unref(); + } + /* c8 ignore stop */ + } + return n; + }; + this.getRemainingTTL = key => { + const index = this.#keyMap.get(key); + if (index === undefined) { + return 0; + } + const ttl = ttls[index]; + const start = starts[index]; + if (!ttl || !start) { + return Infinity; + } + const age = (cachedNow || getNow()) - start; + return ttl - age; + }; + this.#isStale = index => { + const s = starts[index]; + const t = ttls[index]; + return !!t && !!s && (cachedNow || getNow()) - s > t; + }; + } + // conditionally set private methods related to TTL + #updateItemAge = () => { }; + #statusTTL = () => { }; + #setItemTTL = () => { }; + /* c8 ignore stop */ + #isStale = () => false; + #initializeSizeTracking() { + const sizes = new ZeroArray(this.#max); + this.#calculatedSize = 0; + this.#sizes = sizes; + this.#removeItemSize = index => { + this.#calculatedSize -= sizes[index]; + sizes[index] = 0; + }; + this.#requireSize = (k, v, size, sizeCalculation) => { + // provisionally accept background fetches. + // actual value size will be checked when they return. + if (this.#isBackgroundFetch(v)) { + return 0; + } + if (!isPosInt(size)) { + if (sizeCalculation) { + if (typeof sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation must be a function'); + } + size = sizeCalculation(v, k); + if (!isPosInt(size)) { + throw new TypeError('sizeCalculation return invalid (expect positive integer)'); + } + } + else { + throw new TypeError('invalid size value (must be positive integer). ' + + 'When maxSize or maxEntrySize is used, sizeCalculation ' + + 'or size must be set.'); + } + } + return size; + }; + this.#addItemSize = (index, size, status) => { + sizes[index] = size; + if (this.#maxSize) { + const maxSize = this.#maxSize - sizes[index]; + while (this.#calculatedSize > maxSize) { + this.#evict(true); + } + } + this.#calculatedSize += sizes[index]; + if (status) { + status.entrySize = size; + status.totalCalculatedSize = this.#calculatedSize; + } + }; + } + #removeItemSize = _i => { }; + #addItemSize = (_i, _s, _st) => { }; + #requireSize = (_k, _v, size, sizeCalculation) => { + if (size || sizeCalculation) { + throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache'); + } + return 0; + }; + *#indexes({ allowStale = this.allowStale } = {}) { + if (this.#size) { + for (let i = this.#tail; true;) { + if (!this.#isValidIndex(i)) { + break; + } + if (allowStale || !this.#isStale(i)) { + yield i; + } + if (i === this.#head) { + break; + } + else { + i = this.#prev[i]; + } + } + } + } + *#rindexes({ allowStale = this.allowStale } = {}) { + if (this.#size) { + for (let i = this.#head; true;) { + if (!this.#isValidIndex(i)) { + break; + } + if (allowStale || !this.#isStale(i)) { + yield i; + } + if (i === this.#tail) { + break; + } + else { + i = this.#next[i]; + } + } + } + } + #isValidIndex(index) { + return (index !== undefined && + this.#keyMap.get(this.#keyList[index]) === index); + } + /** + * Return a generator yielding `[key, value]` pairs, + * in order from most recently used to least recently used. + */ + *entries() { + for (const i of this.#indexes()) { + if (this.#valList[i] !== undefined && + this.#keyList[i] !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield [this.#keyList[i], this.#valList[i]]; + } + } + } + /** + * Inverse order version of {@link LRUCache.entries} + * + * Return a generator yielding `[key, value]` pairs, + * in order from least recently used to most recently used. + */ + *rentries() { + for (const i of this.#rindexes()) { + if (this.#valList[i] !== undefined && + this.#keyList[i] !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield [this.#keyList[i], this.#valList[i]]; + } + } + } + /** + * Return a generator yielding the keys in the cache, + * in order from most recently used to least recently used. + */ + *keys() { + for (const i of this.#indexes()) { + const k = this.#keyList[i]; + if (k !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield k; + } + } + } + /** + * Inverse order version of {@link LRUCache.keys} + * + * Return a generator yielding the keys in the cache, + * in order from least recently used to most recently used. + */ + *rkeys() { + for (const i of this.#rindexes()) { + const k = this.#keyList[i]; + if (k !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield k; + } + } + } + /** + * Return a generator yielding the values in the cache, + * in order from most recently used to least recently used. + */ + *values() { + for (const i of this.#indexes()) { + const v = this.#valList[i]; + if (v !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield this.#valList[i]; + } + } + } + /** + * Inverse order version of {@link LRUCache.values} + * + * Return a generator yielding the values in the cache, + * in order from least recently used to most recently used. + */ + *rvalues() { + for (const i of this.#rindexes()) { + const v = this.#valList[i]; + if (v !== undefined && + !this.#isBackgroundFetch(this.#valList[i])) { + yield this.#valList[i]; + } + } + } + /** + * Iterating over the cache itself yields the same results as + * {@link LRUCache.entries} + */ + [Symbol.iterator]() { + return this.entries(); + } + /** + * A String value that is used in the creation of the default string + * description of an object. Called by the built-in method + * `Object.prototype.toString`. + */ + [Symbol.toStringTag] = 'LRUCache'; + /** + * Find a value for which the supplied fn method returns a truthy value, + * similar to `Array.find()`. fn is called as `fn(value, key, cache)`. + */ + find(fn, getOptions = {}) { + for (const i of this.#indexes()) { + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; + if (value === undefined) + continue; + if (fn(value, this.#keyList[i], this)) { + return this.get(this.#keyList[i], getOptions); + } + } + } + /** + * Call the supplied function on each item in the cache, in order from most + * recently used to least recently used. + * + * `fn` is called as `fn(value, key, cache)`. + * + * If `thisp` is provided, function will be called in the `this`-context of + * the provided object, or the cache if no `thisp` object is provided. + * + * Does not update age or recenty of use, or iterate over stale values. + */ + forEach(fn, thisp = this) { + for (const i of this.#indexes()) { + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; + if (value === undefined) + continue; + fn.call(thisp, value, this.#keyList[i], this); + } + } + /** + * The same as {@link LRUCache.forEach} but items are iterated over in + * reverse order. (ie, less recently used items are iterated over first.) + */ + rforEach(fn, thisp = this) { + for (const i of this.#rindexes()) { + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; + if (value === undefined) + continue; + fn.call(thisp, value, this.#keyList[i], this); + } + } + /** + * Delete any stale entries. Returns true if anything was removed, + * false otherwise. + */ + purgeStale() { + let deleted = false; + for (const i of this.#rindexes({ allowStale: true })) { + if (this.#isStale(i)) { + this.#delete(this.#keyList[i], 'expire'); + deleted = true; + } + } + return deleted; + } + /** + * Get the extended info about a given entry, to get its value, size, and + * TTL info simultaneously. Returns `undefined` if the key is not present. + * + * Unlike {@link LRUCache#dump}, which is designed to be portable and survive + * serialization, the `start` value is always the current timestamp, and the + * `ttl` is a calculated remaining time to live (negative if expired). + * + * Always returns stale values, if their info is found in the cache, so be + * sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl}) + * if relevant. + */ + info(key) { + const i = this.#keyMap.get(key); + if (i === undefined) + return undefined; + const v = this.#valList[i]; + /* c8 ignore start - this isn't tested for the info function, + * but it's the same logic as found in other places. */ + const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; + if (value === undefined) + return undefined; + /* c8 ignore end */ + const entry = { value }; + if (this.#ttls && this.#starts) { + const ttl = this.#ttls[i]; + const start = this.#starts[i]; + if (ttl && start) { + const remain = ttl - (this.#perf.now() - start); + entry.ttl = remain; + entry.start = Date.now(); + } + } + if (this.#sizes) { + entry.size = this.#sizes[i]; + } + return entry; + } + /** + * Return an array of [key, {@link LRUCache.Entry}] tuples which can be + * passed to {@link LRUCache#load}. + * + * The `start` fields are calculated relative to a portable `Date.now()` + * timestamp, even if `performance.now()` is available. + * + * Stale entries are always included in the `dump`, even if + * {@link LRUCache.OptionsBase.allowStale} is false. + * + * Note: this returns an actual array, not a generator, so it can be more + * easily passed around. + */ + dump() { + const arr = []; + for (const i of this.#indexes({ allowStale: true })) { + const key = this.#keyList[i]; + const v = this.#valList[i]; + const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; + if (value === undefined || key === undefined) + continue; + const entry = { value }; + if (this.#ttls && this.#starts) { + entry.ttl = this.#ttls[i]; + // always dump the start relative to a portable timestamp + // it's ok for this to be a bit slow, it's a rare operation. + const age = this.#perf.now() - this.#starts[i]; + entry.start = Math.floor(Date.now() - age); + } + if (this.#sizes) { + entry.size = this.#sizes[i]; + } + arr.unshift([key, entry]); + } + return arr; + } + /** + * Reset the cache and load in the items in entries in the order listed. + * + * The shape of the resulting cache may be different if the same options are + * not used in both caches. + * + * The `start` fields are assumed to be calculated relative to a portable + * `Date.now()` timestamp, even if `performance.now()` is available. + */ + load(arr) { + this.clear(); + for (const [key, entry] of arr) { + if (entry.start) { + // entry.start is a portable timestamp, but we may be using + // node's performance.now(), so calculate the offset, so that + // we get the intended remaining TTL, no matter how long it's + // been on ice. + // + // it's ok for this to be a bit slow, it's a rare operation. + const age = Date.now() - entry.start; + entry.start = this.#perf.now() - age; + } + this.set(key, entry.value, entry); + } + } + /** + * Add a value to the cache. + * + * Note: if `undefined` is specified as a value, this is an alias for + * {@link LRUCache#delete} + * + * Fields on the {@link LRUCache.SetOptions} options param will override + * their corresponding values in the constructor options for the scope + * of this single `set()` operation. + * + * If `start` is provided, then that will set the effective start + * time for the TTL calculation. Note that this must be a previous + * value of `performance.now()` if supported, or a previous value of + * `Date.now()` if not. + * + * Options object may also include `size`, which will prevent + * calling the `sizeCalculation` function and just use the specified + * number if it is a positive integer, and `noDisposeOnSet` which + * will prevent calling a `dispose` function in the case of + * overwrites. + * + * If the `size` (or return value of `sizeCalculation`) for a given + * entry is greater than `maxEntrySize`, then the item will not be + * added to the cache. + * + * Will update the recency of the entry. + * + * If the value is `undefined`, then this is an alias for + * `cache.delete(key)`. `undefined` is never stored in the cache. + */ + set(k, v, setOptions = {}) { + if (v === undefined) { + this.delete(k); + return this; + } + const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions; + let { noUpdateTTL = this.noUpdateTTL } = setOptions; + const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation); + // if the item doesn't fit, don't do anything + // NB: maxEntrySize set to maxSize by default + if (this.maxEntrySize && size > this.maxEntrySize) { + if (status) { + status.set = 'miss'; + status.maxEntrySizeExceeded = true; + } + // have to delete, in case something is there already. + this.#delete(k, 'set'); + return this; + } + let index = this.#size === 0 ? undefined : this.#keyMap.get(k); + if (index === undefined) { + // addition + index = (this.#size === 0 ? this.#tail + : this.#free.length !== 0 ? this.#free.pop() + : this.#size === this.#max ? this.#evict(false) + : this.#size); + this.#keyList[index] = k; + this.#valList[index] = v; + this.#keyMap.set(k, index); + this.#next[this.#tail] = index; + this.#prev[index] = this.#tail; + this.#tail = index; + this.#size++; + this.#addItemSize(index, size, status); + if (status) + status.set = 'add'; + noUpdateTTL = false; + if (this.#hasOnInsert) { + this.#onInsert?.(v, k, 'add'); + } + } + else { + // update + this.#moveToTail(index); + const oldVal = this.#valList[index]; + if (v !== oldVal) { + if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) { + oldVal.__abortController.abort(new Error('replaced')); + const { __staleWhileFetching: s } = oldVal; + if (s !== undefined && !noDisposeOnSet) { + if (this.#hasDispose) { + this.#dispose?.(s, k, 'set'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([s, k, 'set']); + } + } + } + else if (!noDisposeOnSet) { + if (this.#hasDispose) { + this.#dispose?.(oldVal, k, 'set'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([oldVal, k, 'set']); + } + } + this.#removeItemSize(index); + this.#addItemSize(index, size, status); + this.#valList[index] = v; + if (status) { + status.set = 'replace'; + const oldValue = oldVal && this.#isBackgroundFetch(oldVal) ? + oldVal.__staleWhileFetching + : oldVal; + if (oldValue !== undefined) + status.oldValue = oldValue; + } + } + else if (status) { + status.set = 'update'; + } + if (this.#hasOnInsert) { + this.onInsert?.(v, k, v === oldVal ? 'update' : 'replace'); + } + } + if (ttl !== 0 && !this.#ttls) { + this.#initializeTTLTracking(); + } + if (this.#ttls) { + if (!noUpdateTTL) { + this.#setItemTTL(index, ttl, start); + } + if (status) + this.#statusTTL(status, index); + } + if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + return this; + } + /** + * Evict the least recently used item, returning its value or + * `undefined` if cache is empty. + */ + pop() { + try { + while (this.#size) { + const val = this.#valList[this.#head]; + this.#evict(true); + if (this.#isBackgroundFetch(val)) { + if (val.__staleWhileFetching) { + return val.__staleWhileFetching; + } + } + else if (val !== undefined) { + return val; + } + } + } + finally { + if (this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + } + } + #evict(free) { + const head = this.#head; + const k = this.#keyList[head]; + const v = this.#valList[head]; + if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('evicted')); + } + else if (this.#hasDispose || this.#hasDisposeAfter) { + if (this.#hasDispose) { + this.#dispose?.(v, k, 'evict'); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v, k, 'evict']); + } + } + this.#removeItemSize(head); + // if we aren't about to use the index, then null these out + if (free) { + this.#keyList[head] = undefined; + this.#valList[head] = undefined; + this.#free.push(head); + } + if (this.#size === 1) { + this.#head = this.#tail = 0; + this.#free.length = 0; + } + else { + this.#head = this.#next[head]; + } + this.#keyMap.delete(k); + this.#size--; + return head; + } + /** + * Check if a key is in the cache, without updating the recency of use. + * Will return false if the item is stale, even though it is technically + * in the cache. + * + * Check if a key is in the cache, without updating the recency of + * use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set + * to `true` in either the options or the constructor. + * + * Will return `false` if the item is stale, even though it is technically in + * the cache. The difference can be determined (if it matters) by using a + * `status` argument, and inspecting the `has` field. + * + * Will not update item age unless + * {@link LRUCache.OptionsBase.updateAgeOnHas} is set. + */ + has(k, hasOptions = {}) { + const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions; + const index = this.#keyMap.get(k); + if (index !== undefined) { + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v) && + v.__staleWhileFetching === undefined) { + return false; + } + if (!this.#isStale(index)) { + if (updateAgeOnHas) { + this.#updateItemAge(index); + } + if (status) { + status.has = 'hit'; + this.#statusTTL(status, index); + } + return true; + } + else if (status) { + status.has = 'stale'; + this.#statusTTL(status, index); + } + } + else if (status) { + status.has = 'miss'; + } + return false; + } + /** + * Like {@link LRUCache#get} but doesn't update recency or delete stale + * items. + * + * Returns `undefined` if the item is stale, unless + * {@link LRUCache.OptionsBase.allowStale} is set. + */ + peek(k, peekOptions = {}) { + const { allowStale = this.allowStale } = peekOptions; + const index = this.#keyMap.get(k); + if (index === undefined || + (!allowStale && this.#isStale(index))) { + return; + } + const v = this.#valList[index]; + // either stale and allowed, or forcing a refresh of non-stale value + return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v; + } + #backgroundFetch(k, index, options, context) { + const v = index === undefined ? undefined : this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + return v; + } + const ac = new AC(); + const { signal } = options; + // when/if our AC signals, then stop listening to theirs. + signal?.addEventListener('abort', () => ac.abort(signal.reason), { + signal: ac.signal, + }); + const fetchOpts = { + signal: ac.signal, + options, + context, + }; + const cb = (v, updateCache = false) => { + const { aborted } = ac.signal; + const ignoreAbort = options.ignoreFetchAbort && v !== undefined; + if (options.status) { + if (aborted && !updateCache) { + options.status.fetchAborted = true; + options.status.fetchError = ac.signal.reason; + if (ignoreAbort) + options.status.fetchAbortIgnored = true; + } + else { + options.status.fetchResolved = true; + } + } + if (aborted && !ignoreAbort && !updateCache) { + return fetchFail(ac.signal.reason); + } + // either we didn't abort, and are still here, or we did, and ignored + const bf = p; + if (this.#valList[index] === p) { + if (v === undefined) { + if (bf.__staleWhileFetching !== undefined) { + this.#valList[index] = bf.__staleWhileFetching; + } + else { + this.#delete(k, 'fetch'); + } + } + else { + if (options.status) + options.status.fetchUpdated = true; + this.set(k, v, fetchOpts.options); + } + } + return v; + }; + const eb = (er) => { + if (options.status) { + options.status.fetchRejected = true; + options.status.fetchError = er; + } + return fetchFail(er); + }; + const fetchFail = (er) => { + const { aborted } = ac.signal; + const allowStaleAborted = aborted && options.allowStaleOnFetchAbort; + const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection; + const noDelete = allowStale || options.noDeleteOnFetchRejection; + const bf = p; + if (this.#valList[index] === p) { + // if we allow stale on fetch rejections, then we need to ensure that + // the stale value is not removed from the cache when the fetch fails. + const del = !noDelete || bf.__staleWhileFetching === undefined; + if (del) { + this.#delete(k, 'fetch'); + } + else if (!allowStaleAborted) { + // still replace the *promise* with the stale value, + // since we are done with the promise at this point. + // leave it untouched if we're still waiting for an + // aborted background fetch that hasn't yet returned. + this.#valList[index] = bf.__staleWhileFetching; + } + } + if (allowStale) { + if (options.status && bf.__staleWhileFetching !== undefined) { + options.status.returnedStale = true; + } + return bf.__staleWhileFetching; + } + else if (bf.__returned === bf) { + throw er; + } + }; + const pcall = (res, rej) => { + const fmp = this.#fetchMethod?.(k, v, fetchOpts); + if (fmp && fmp instanceof Promise) { + fmp.then(v => res(v === undefined ? undefined : v), rej); + } + // ignored, we go until we finish, regardless. + // defer check until we are actually aborting, + // so fetchMethod can override. + ac.signal.addEventListener('abort', () => { + if (!options.ignoreFetchAbort || + options.allowStaleOnFetchAbort) { + res(undefined); + // when it eventually resolves, update the cache. + if (options.allowStaleOnFetchAbort) { + res = v => cb(v, true); + } + } + }); + }; + if (options.status) + options.status.fetchDispatched = true; + const p = new Promise(pcall).then(cb, eb); + const bf = Object.assign(p, { + __abortController: ac, + __staleWhileFetching: v, + __returned: undefined, + }); + if (index === undefined) { + // internal, don't expose status. + this.set(k, bf, { ...fetchOpts.options, status: undefined }); + index = this.#keyMap.get(k); + } + else { + this.#valList[index] = bf; + } + return bf; + } + #isBackgroundFetch(p) { + if (!this.#hasFetchMethod) + return false; + const b = p; + return (!!b && + b instanceof Promise && + b.hasOwnProperty('__staleWhileFetching') && + b.__abortController instanceof AC); + } + async fetch(k, fetchOptions = {}) { + const { + // get options + allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, + // set options + ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, + // fetch exclusive options + noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions; + if (!this.#hasFetchMethod) { + if (status) + status.fetch = 'get'; + return this.get(k, { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + status, + }); + } + const options = { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + ttl, + noDisposeOnSet, + size, + sizeCalculation, + noUpdateTTL, + noDeleteOnFetchRejection, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + status, + signal, + }; + let index = this.#keyMap.get(k); + if (index === undefined) { + if (status) + status.fetch = 'miss'; + const p = this.#backgroundFetch(k, index, options, context); + return (p.__returned = p); + } + else { + // in cache, maybe already fetching + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + const stale = allowStale && v.__staleWhileFetching !== undefined; + if (status) { + status.fetch = 'inflight'; + if (stale) + status.returnedStale = true; + } + return stale ? v.__staleWhileFetching : (v.__returned = v); + } + // if we force a refresh, that means do NOT serve the cached value, + // unless we are already in the process of refreshing the cache. + const isStale = this.#isStale(index); + if (!forceRefresh && !isStale) { + if (status) + status.fetch = 'hit'; + this.#moveToTail(index); + if (updateAgeOnGet) { + this.#updateItemAge(index); + } + if (status) + this.#statusTTL(status, index); + return v; + } + // ok, it is stale or a forced refresh, and not already fetching. + // refresh the cache. + const p = this.#backgroundFetch(k, index, options, context); + const hasStale = p.__staleWhileFetching !== undefined; + const staleVal = hasStale && allowStale; + if (status) { + status.fetch = isStale ? 'stale' : 'refresh'; + if (staleVal && isStale) + status.returnedStale = true; + } + return staleVal ? p.__staleWhileFetching : (p.__returned = p); + } + } + async forceFetch(k, fetchOptions = {}) { + const v = await this.fetch(k, fetchOptions); + if (v === undefined) + throw new Error('fetch() returned undefined'); + return v; + } + memo(k, memoOptions = {}) { + const memoMethod = this.#memoMethod; + if (!memoMethod) { + throw new Error('no memoMethod provided to constructor'); + } + const { context, forceRefresh, ...options } = memoOptions; + const v = this.get(k, options); + if (!forceRefresh && v !== undefined) + return v; + const vv = memoMethod(k, v, { + options, + context, + }); + this.set(k, vv, options); + return vv; + } + /** + * Return a value from the cache. Will update the recency of the cache + * entry found. + * + * If the key is not found, get() will return `undefined`. + */ + get(k, getOptions = {}) { + const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions; + const index = this.#keyMap.get(k); + if (index !== undefined) { + const value = this.#valList[index]; + const fetching = this.#isBackgroundFetch(value); + if (status) + this.#statusTTL(status, index); + if (this.#isStale(index)) { + if (status) + status.get = 'stale'; + // delete only if not an in-flight background fetch + if (!fetching) { + if (!noDeleteOnStaleGet) { + this.#delete(k, 'expire'); + } + if (status && allowStale) + status.returnedStale = true; + return allowStale ? value : undefined; + } + else { + if (status && + allowStale && + value.__staleWhileFetching !== undefined) { + status.returnedStale = true; + } + return allowStale ? value.__staleWhileFetching : undefined; + } + } + else { + if (status) + status.get = 'hit'; + // if we're currently fetching it, we don't actually have it yet + // it's not stale, which means this isn't a staleWhileRefetching. + // If it's not stale, and fetching, AND has a __staleWhileFetching + // value, then that means the user fetched with {forceRefresh:true}, + // so it's safe to return that value. + if (fetching) { + return value.__staleWhileFetching; + } + this.#moveToTail(index); + if (updateAgeOnGet) { + this.#updateItemAge(index); + } + return value; + } + } + else if (status) { + status.get = 'miss'; + } + } + #connect(p, n) { + this.#prev[n] = p; + this.#next[p] = n; + } + #moveToTail(index) { + // if tail already, nothing to do + // if head, move head to next[index] + // else + // move next[prev[index]] to next[index] (head has no prev) + // move prev[next[index]] to prev[index] + // prev[index] = tail + // next[tail] = index + // tail = index + if (index !== this.#tail) { + if (index === this.#head) { + this.#head = this.#next[index]; + } + else { + this.#connect(this.#prev[index], this.#next[index]); + } + this.#connect(this.#tail, index); + this.#tail = index; + } + } + /** + * Deletes a key out of the cache. + * + * Returns true if the key was deleted, false otherwise. + */ + delete(k) { + return this.#delete(k, 'delete'); + } + #delete(k, reason) { + let deleted = false; + if (this.#size !== 0) { + const index = this.#keyMap.get(k); + if (index !== undefined) { + deleted = true; + if (this.#size === 1) { + this.#clear(reason); + } + else { + this.#removeItemSize(index); + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')); + } + else if (this.#hasDispose || this.#hasDisposeAfter) { + if (this.#hasDispose) { + this.#dispose?.(v, k, reason); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v, k, reason]); + } + } + this.#keyMap.delete(k); + this.#keyList[index] = undefined; + this.#valList[index] = undefined; + if (index === this.#tail) { + this.#tail = this.#prev[index]; + } + else if (index === this.#head) { + this.#head = this.#next[index]; + } + else { + const pi = this.#prev[index]; + this.#next[pi] = this.#next[index]; + const ni = this.#next[index]; + this.#prev[ni] = this.#prev[index]; + } + this.#size--; + this.#free.push(index); + } + } + } + if (this.#hasDisposeAfter && this.#disposed?.length) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + return deleted; + } + /** + * Clear the cache entirely, throwing away all values. + */ + clear() { + return this.#clear('delete'); + } + #clear(reason) { + for (const index of this.#rindexes({ allowStale: true })) { + const v = this.#valList[index]; + if (this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')); + } + else { + const k = this.#keyList[index]; + if (this.#hasDispose) { + this.#dispose?.(v, k, reason); + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v, k, reason]); + } + } + } + this.#keyMap.clear(); + this.#valList.fill(undefined); + this.#keyList.fill(undefined); + if (this.#ttls && this.#starts) { + this.#ttls.fill(0); + this.#starts.fill(0); + } + if (this.#sizes) { + this.#sizes.fill(0); + } + this.#head = 0; + this.#tail = 0; + this.#free.length = 0; + this.#calculatedSize = 0; + this.#size = 0; + if (this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed; + let task; + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task); + } + } + } +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/esm/index.min.js b/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/esm/index.min.js new file mode 100644 index 0000000000000..07dd8fc3c59d8 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/esm/index.min.js @@ -0,0 +1,2 @@ +var M=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,x=new Set,R=typeof process=="object"&&process?process:{},I=(a,t,e,i)=>{typeof R.emitWarning=="function"?R.emitWarning(a,t,e,i):console.error(`[${e}] ${t}: ${a}`)},C=globalThis.AbortController,D=globalThis.AbortSignal;if(typeof C>"u"){D=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},C=class{constructor(){t()}signal=new D;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let a=R.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{a&&(a=!1,I("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var G=a=>!x.has(a),H=Symbol("type"),y=a=>a&&a===Math.floor(a)&&a>0&&isFinite(a),U=a=>y(a)?a<=Math.pow(2,8)?Uint8Array:a<=Math.pow(2,16)?Uint16Array:a<=Math.pow(2,32)?Uint32Array:a<=Number.MAX_SAFE_INTEGER?O:null:null,O=class extends Array{constructor(t){super(t),this.fill(0)}},W=class a{heap;length;static#l=!1;static create(t){let e=U(t);if(!e)return[];a.#l=!0;let i=new a(t,e);return a.#l=!1,i}constructor(t,e){if(!a.#l)throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},L=class a{#l;#c;#p;#v;#w;#D;#L;#S;get perf(){return this.#S}ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#_;#s;#i;#t;#a;#u;#o;#h;#m;#r;#b;#y;#d;#A;#z;#f;#x;static unsafeExposeInternals(t){return{starts:t.#y,ttls:t.#d,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#a,prev:t.#u,get head(){return t.#o},get tail(){return t.#h},free:t.#m,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#M(e,i,s,n),moveToTail:e=>t.#W(e),indexes:e=>t.#F(e),rindexes:e=>t.#T(e),isStale:e=>t.#g(e)}}get max(){return this.#l}get maxSize(){return this.#c}get calculatedSize(){return this.#_}get size(){return this.#n}get fetchMethod(){return this.#D}get memoMethod(){return this.#L}get dispose(){return this.#p}get onInsert(){return this.#v}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,onInsert:_,disposeAfter:f,noDisposeOnSet:c,noUpdateTTL:u,maxSize:A=0,maxEntrySize:d=0,sizeCalculation:m,fetchMethod:l,memoMethod:w,noDeleteOnFetchRejection:b,noDeleteOnStaleGet:p,allowStaleOnFetchRejection:S,allowStaleOnFetchAbort:z,ignoreFetchAbort:F,perf:v}=t;if(v!==void 0&&typeof v?.now!="function")throw new TypeError("perf option must have a now() method if specified");if(this.#S=v??M,e!==0&&!y(e))throw new TypeError("max option must be a nonnegative integer");let T=e?U(e):Array;if(!T)throw new Error("invalid max value: "+e);if(this.#l=e,this.#c=A,this.maxEntrySize=d||this.#c,this.sizeCalculation=m,this.sizeCalculation){if(!this.#c&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#L=w,l!==void 0&&typeof l!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#D=l,this.#z=!!l,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#a=new T(e),this.#u=new T(e),this.#o=0,this.#h=0,this.#m=W.create(e),this.#n=0,this.#_=0,typeof g=="function"&&(this.#p=g),typeof _=="function"&&(this.#v=_),typeof f=="function"?(this.#w=f,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#A=!!this.#p,this.#x=!!this.#v,this.#f=!!this.#w,this.noDisposeOnSet=!!c,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!b,this.allowStaleOnFetchRejection=!!S,this.allowStaleOnFetchAbort=!!z,this.ignoreFetchAbort=!!F,this.maxEntrySize!==0){if(this.#c!==0&&!y(this.#c))throw new TypeError("maxSize must be a positive integer if specified");if(!y(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#V()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!p,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=y(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!y(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#G()}if(this.#l===0&&this.ttl===0&&this.#c===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#l&&!this.#c){let E="LRU_CACHE_UNBOUNDED";G(E)&&(x.add(E),I("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",E,a))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#G(){let t=new O(this.#l),e=new O(this.#l);this.#d=t,this.#y=e,this.#j=(n,h,o=this.#S.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#g(n)&&this.#E(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#C=n=>{e[n]=t[n]!==0?this.#S.now():0},this.#O=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=this.#S.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#g=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#C=()=>{};#O=()=>{};#j=()=>{};#g=()=>!1;#V(){let t=new O(this.#l);this.#_=0,this.#b=t,this.#R=e=>{this.#_-=t[e],t[e]=0},this.#N=(e,i,s,n)=>{if(this.#e(i))return 0;if(!y(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!y(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#I=(e,i,s)=>{if(t[e]=i,this.#c){let n=this.#c-t[e];for(;this.#_>n;)this.#U(!0)}this.#_+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#_)}}#R=t=>{};#I=(t,e,i)=>{};#N=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#o));)e=this.#u[e]}*#T({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#h));)e=this.#a[e]}#P(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#T())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#T()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#T())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#T()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#T({allowStale:!0}))this.#g(e)&&(this.#E(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#d&&this.#y){let h=this.#d[e],o=this.#y[e];if(h&&o){let r=h-(this.#S.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#F({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#d&&this.#y){h.ttl=this.#d[e];let o=this.#S.now()-this.#y[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=this.#S.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,_=this.#N(t,e,i.size||0,o);if(this.maxEntrySize&&_>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#E(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#m.length!==0?this.#m.pop():this.#n===this.#l?this.#U(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#a[this.#h]=f,this.#u[f]=this.#h,this.#h=f,this.#n++,this.#I(f,_,r),r&&(r.set="add"),g=!1,this.#x&&this.#v?.(e,t,"add");else{this.#W(f);let c=this.#t[f];if(e!==c){if(this.#z&&this.#e(c)){c.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:u}=c;u!==void 0&&!h&&(this.#A&&this.#p?.(u,t,"set"),this.#f&&this.#r?.push([u,t,"set"]))}else h||(this.#A&&this.#p?.(c,t,"set"),this.#f&&this.#r?.push([c,t,"set"]));if(this.#R(f),this.#I(f,_,r),this.#t[f]=e,r){r.set="replace";let u=c&&this.#e(c)?c.__staleWhileFetching:c;u!==void 0&&(r.oldValue=u)}}else r&&(r.set="update");this.#x&&this.onInsert?.(e,t,e===c?"update":"replace")}if(s!==0&&!this.#d&&this.#G(),this.#d&&(g||this.#j(f,s,n),r&&this.#O(r,f)),!h&&this.#f&&this.#r){let c=this.#r,u;for(;u=c?.shift();)this.#w?.(...u)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#U(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#f&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#U(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#z&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(s,i,"evict"),this.#f&&this.#r?.push([s,i,"evict"])),this.#R(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#m.push(e)),this.#n===1?(this.#o=this.#h=0,this.#m.length=0):this.#o=this.#a[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#g(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#C(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#g(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#M(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new C,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,m=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!m?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!m)return f(h.signal.reason);let b=u;return this.#t[e]===u&&(d===void 0?b.__staleWhileFetching!==void 0?this.#t[e]=b.__staleWhileFetching:this.#E(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},_=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:m}=h.signal,l=m&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=u;if(this.#t[e]===u&&(!b||p.__staleWhileFetching===void 0?this.#E(t,"fetch"):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},c=(d,m)=>{let l=this.#D?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),m),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let u=new Promise(c).then(g,_),A=Object.assign(u,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,A,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=A,A}#e(t){if(!this.#z)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof C}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:_=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:c=this.allowStaleOnFetchRejection,ignoreFetchAbort:u=this.ignoreFetchAbort,allowStaleOnFetchAbort:A=this.allowStaleOnFetchAbort,context:d,forceRefresh:m=!1,status:l,signal:w}=e;if(!this.#z)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:_,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:c,allowStaleOnFetchAbort:A,ignoreFetchAbort:u,status:l,signal:w},p=this.#s.get(t);if(p===void 0){l&&(l.fetch="miss");let S=this.#M(t,p,b,d);return S.__returned=S}else{let S=this.#t[p];if(this.#e(S)){let E=i&&S.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",E&&(l.returnedStale=!0)),E?S.__staleWhileFetching:S.__returned=S}let z=this.#g(p);if(!m&&!z)return l&&(l.fetch="hit"),this.#W(p),s&&this.#C(p),l&&this.#O(l,p),S;let F=this.#M(t,p,b,d),T=F.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=z?"stale":"refresh",T&&z&&(l.returnedStale=!0)),T?F.__staleWhileFetching:F.__returned=F}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#L;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#O(h,o),this.#g(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#E(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#W(o),s&&this.#C(o),r))}else h&&(h.get="miss")}#H(t,e){this.#u[e]=t,this.#a[t]=e}#W(t){t!==this.#h&&(t===this.#o?this.#o=this.#a[t]:this.#H(this.#u[t],this.#a[t]),this.#H(this.#h,t),this.#h=t)}delete(t){return this.#E(t,"delete")}#E(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#k(e);else{this.#R(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(n,t,e),this.#f&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#u[s];else if(s===this.#o)this.#o=this.#a[s];else{let h=this.#u[s];this.#a[h]=this.#a[s];let o=this.#a[s];this.#u[o]=this.#u[s]}this.#n--,this.#m.push(s)}}if(this.#f&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#k("delete")}#k(t){for(let e of this.#T({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#A&&this.#p?.(i,s,t),this.#f&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#d&&this.#y&&(this.#d.fill(0),this.#y.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#m.length=0,this.#_=0,this.#n=0,this.#f&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};export{L as LRUCache}; +//# sourceMappingURL=index.min.js.map diff --git a/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/esm/package.json b/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/esm/package.json new file mode 100644 index 0000000000000..3dbc1ca591c05 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/node_modules/@npmcli/package-json/node_modules/lru-cache/package.json b/node_modules/@npmcli/package-json/node_modules/lru-cache/package.json new file mode 100644 index 0000000000000..4953bdf4a7a35 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/lru-cache/package.json @@ -0,0 +1,113 @@ +{ + "name": "lru-cache", + "description": "A cache object that deletes the least-recently-used items.", + "version": "11.2.1", + "author": "Isaac Z. Schlueter ", + "keywords": [ + "mru", + "lru", + "cache" + ], + "sideEffects": false, + "scripts": { + "build": "npm run prepare", + "prepare": "tshy && bash fixup.sh", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "test": "tap", + "snap": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "format": "prettier --write .", + "typedoc": "typedoc --tsconfig ./.tshy/esm.json ./src/*.ts", + "benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh", + "prebenchmark": "npm run prepare", + "benchmark": "make -C benchmark", + "preprofile": "npm run prepare", + "profile": "make -C benchmark profile" + }, + "main": "./dist/commonjs/index.js", + "types": "./dist/commonjs/index.d.ts", + "tshy": { + "exports": { + ".": "./src/index.ts", + "./min": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.min.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.min.js" + } + } + } + }, + "repository": { + "type": "git", + "url": "git://github.com/isaacs/node-lru-cache.git" + }, + "devDependencies": { + "@types/node": "^24.3.0", + "benchmark": "^2.1.4", + "esbuild": "^0.25.9", + "marked": "^4.2.12", + "mkdirp": "^3.0.1", + "prettier": "^3.6.2", + "tap": "^21.1.0", + "tshy": "^3.0.2", + "typedoc": "^0.28.12" + }, + "license": "ISC", + "files": [ + "dist" + ], + "engines": { + "node": "20 || >=22" + }, + "prettier": { + "experimentalTernaries": true, + "semi": false, + "printWidth": 70, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "tap": { + "node-arg": [ + "--expose-gc" + ], + "plugin": [ + "@tapjs/clock" + ] + }, + "exports": { + ".": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + }, + "./min": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.min.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.min.js" + } + } + }, + "type": "module", + "module": "./dist/esm/index.js" +} diff --git a/node_modules/@npmcli/package-json/node_modules/minimatch/LICENSE b/node_modules/@npmcli/package-json/node_modules/minimatch/LICENSE new file mode 100644 index 0000000000000..1493534e60dce --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/minimatch/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2011-2023 Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js b/node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js new file mode 100644 index 0000000000000..5fc86bbd0116c --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js @@ -0,0 +1,14 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.assertValidPattern = void 0; +const MAX_PATTERN_LENGTH = 1024 * 64; +const assertValidPattern = (pattern) => { + if (typeof pattern !== 'string') { + throw new TypeError('invalid pattern'); + } + if (pattern.length > MAX_PATTERN_LENGTH) { + throw new TypeError('pattern is too long'); + } +}; +exports.assertValidPattern = assertValidPattern; +//# sourceMappingURL=assert-valid-pattern.js.map \ No newline at end of file diff --git a/node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/ast.js b/node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/ast.js new file mode 100644 index 0000000000000..7b2109625eaeb --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/ast.js @@ -0,0 +1,592 @@ +"use strict"; +// parse a single path portion +Object.defineProperty(exports, "__esModule", { value: true }); +exports.AST = void 0; +const brace_expressions_js_1 = require("./brace-expressions.js"); +const unescape_js_1 = require("./unescape.js"); +const types = new Set(['!', '?', '+', '*', '@']); +const isExtglobType = (c) => types.has(c); +// Patterns that get prepended to bind to the start of either the +// entire string, or just a single path portion, to prevent dots +// and/or traversal patterns, when needed. +// Exts don't need the ^ or / bit, because the root binds that already. +const startNoTraversal = '(?!(?:^|/)\\.\\.?(?:$|/))'; +const startNoDot = '(?!\\.)'; +// characters that indicate a start of pattern needs the "no dots" bit, +// because a dot *might* be matched. ( is not in the list, because in +// the case of a child extglob, it will handle the prevention itself. +const addPatternStart = new Set(['[', '.']); +// cases where traversal is A-OK, no dot prevention needed +const justDots = new Set(['..', '.']); +const reSpecials = new Set('().*{}+?[]^$\\!'); +const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&'); +// any single thing other than / +const qmark = '[^/]'; +// * => any number of characters +const star = qmark + '*?'; +// use + when we need to ensure that *something* matches, because the * is +// the only thing in the path portion. +const starNoEmpty = qmark + '+?'; +// remove the \ chars that we added if we end up doing a nonmagic compare +// const deslash = (s: string) => s.replace(/\\(.)/g, '$1') +class AST { + type; + #root; + #hasMagic; + #uflag = false; + #parts = []; + #parent; + #parentIndex; + #negs; + #filledNegs = false; + #options; + #toString; + // set to true if it's an extglob with no children + // (which really means one child of '') + #emptyExt = false; + constructor(type, parent, options = {}) { + this.type = type; + // extglobs are inherently magical + if (type) + this.#hasMagic = true; + this.#parent = parent; + this.#root = this.#parent ? this.#parent.#root : this; + this.#options = this.#root === this ? options : this.#root.#options; + this.#negs = this.#root === this ? [] : this.#root.#negs; + if (type === '!' && !this.#root.#filledNegs) + this.#negs.push(this); + this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0; + } + get hasMagic() { + /* c8 ignore start */ + if (this.#hasMagic !== undefined) + return this.#hasMagic; + /* c8 ignore stop */ + for (const p of this.#parts) { + if (typeof p === 'string') + continue; + if (p.type || p.hasMagic) + return (this.#hasMagic = true); + } + // note: will be undefined until we generate the regexp src and find out + return this.#hasMagic; + } + // reconstructs the pattern + toString() { + if (this.#toString !== undefined) + return this.#toString; + if (!this.type) { + return (this.#toString = this.#parts.map(p => String(p)).join('')); + } + else { + return (this.#toString = + this.type + '(' + this.#parts.map(p => String(p)).join('|') + ')'); + } + } + #fillNegs() { + /* c8 ignore start */ + if (this !== this.#root) + throw new Error('should only call on root'); + if (this.#filledNegs) + return this; + /* c8 ignore stop */ + // call toString() once to fill this out + this.toString(); + this.#filledNegs = true; + let n; + while ((n = this.#negs.pop())) { + if (n.type !== '!') + continue; + // walk up the tree, appending everthing that comes AFTER parentIndex + let p = n; + let pp = p.#parent; + while (pp) { + for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) { + for (const part of n.#parts) { + /* c8 ignore start */ + if (typeof part === 'string') { + throw new Error('string part in extglob AST??'); + } + /* c8 ignore stop */ + part.copyIn(pp.#parts[i]); + } + } + p = pp; + pp = p.#parent; + } + } + return this; + } + push(...parts) { + for (const p of parts) { + if (p === '') + continue; + /* c8 ignore start */ + if (typeof p !== 'string' && !(p instanceof AST && p.#parent === this)) { + throw new Error('invalid part: ' + p); + } + /* c8 ignore stop */ + this.#parts.push(p); + } + } + toJSON() { + const ret = this.type === null + ? this.#parts.slice().map(p => (typeof p === 'string' ? p : p.toJSON())) + : [this.type, ...this.#parts.map(p => p.toJSON())]; + if (this.isStart() && !this.type) + ret.unshift([]); + if (this.isEnd() && + (this === this.#root || + (this.#root.#filledNegs && this.#parent?.type === '!'))) { + ret.push({}); + } + return ret; + } + isStart() { + if (this.#root === this) + return true; + // if (this.type) return !!this.#parent?.isStart() + if (!this.#parent?.isStart()) + return false; + if (this.#parentIndex === 0) + return true; + // if everything AHEAD of this is a negation, then it's still the "start" + const p = this.#parent; + for (let i = 0; i < this.#parentIndex; i++) { + const pp = p.#parts[i]; + if (!(pp instanceof AST && pp.type === '!')) { + return false; + } + } + return true; + } + isEnd() { + if (this.#root === this) + return true; + if (this.#parent?.type === '!') + return true; + if (!this.#parent?.isEnd()) + return false; + if (!this.type) + return this.#parent?.isEnd(); + // if not root, it'll always have a parent + /* c8 ignore start */ + const pl = this.#parent ? this.#parent.#parts.length : 0; + /* c8 ignore stop */ + return this.#parentIndex === pl - 1; + } + copyIn(part) { + if (typeof part === 'string') + this.push(part); + else + this.push(part.clone(this)); + } + clone(parent) { + const c = new AST(this.type, parent); + for (const p of this.#parts) { + c.copyIn(p); + } + return c; + } + static #parseAST(str, ast, pos, opt) { + let escaping = false; + let inBrace = false; + let braceStart = -1; + let braceNeg = false; + if (ast.type === null) { + // outside of a extglob, append until we find a start + let i = pos; + let acc = ''; + while (i < str.length) { + const c = str.charAt(i++); + // still accumulate escapes at this point, but we do ignore + // starts that are escaped + if (escaping || c === '\\') { + escaping = !escaping; + acc += c; + continue; + } + if (inBrace) { + if (i === braceStart + 1) { + if (c === '^' || c === '!') { + braceNeg = true; + } + } + else if (c === ']' && !(i === braceStart + 2 && braceNeg)) { + inBrace = false; + } + acc += c; + continue; + } + else if (c === '[') { + inBrace = true; + braceStart = i; + braceNeg = false; + acc += c; + continue; + } + if (!opt.noext && isExtglobType(c) && str.charAt(i) === '(') { + ast.push(acc); + acc = ''; + const ext = new AST(c, ast); + i = AST.#parseAST(str, ext, i, opt); + ast.push(ext); + continue; + } + acc += c; + } + ast.push(acc); + return i; + } + // some kind of extglob, pos is at the ( + // find the next | or ) + let i = pos + 1; + let part = new AST(null, ast); + const parts = []; + let acc = ''; + while (i < str.length) { + const c = str.charAt(i++); + // still accumulate escapes at this point, but we do ignore + // starts that are escaped + if (escaping || c === '\\') { + escaping = !escaping; + acc += c; + continue; + } + if (inBrace) { + if (i === braceStart + 1) { + if (c === '^' || c === '!') { + braceNeg = true; + } + } + else if (c === ']' && !(i === braceStart + 2 && braceNeg)) { + inBrace = false; + } + acc += c; + continue; + } + else if (c === '[') { + inBrace = true; + braceStart = i; + braceNeg = false; + acc += c; + continue; + } + if (isExtglobType(c) && str.charAt(i) === '(') { + part.push(acc); + acc = ''; + const ext = new AST(c, part); + part.push(ext); + i = AST.#parseAST(str, ext, i, opt); + continue; + } + if (c === '|') { + part.push(acc); + acc = ''; + parts.push(part); + part = new AST(null, ast); + continue; + } + if (c === ')') { + if (acc === '' && ast.#parts.length === 0) { + ast.#emptyExt = true; + } + part.push(acc); + acc = ''; + ast.push(...parts, part); + return i; + } + acc += c; + } + // unfinished extglob + // if we got here, it was a malformed extglob! not an extglob, but + // maybe something else in there. + ast.type = null; + ast.#hasMagic = undefined; + ast.#parts = [str.substring(pos - 1)]; + return i; + } + static fromGlob(pattern, options = {}) { + const ast = new AST(null, undefined, options); + AST.#parseAST(pattern, ast, 0, options); + return ast; + } + // returns the regular expression if there's magic, or the unescaped + // string if not. + toMMPattern() { + // should only be called on root + /* c8 ignore start */ + if (this !== this.#root) + return this.#root.toMMPattern(); + /* c8 ignore stop */ + const glob = this.toString(); + const [re, body, hasMagic, uflag] = this.toRegExpSource(); + // if we're in nocase mode, and not nocaseMagicOnly, then we do + // still need a regular expression if we have to case-insensitively + // match capital/lowercase characters. + const anyMagic = hasMagic || + this.#hasMagic || + (this.#options.nocase && + !this.#options.nocaseMagicOnly && + glob.toUpperCase() !== glob.toLowerCase()); + if (!anyMagic) { + return body; + } + const flags = (this.#options.nocase ? 'i' : '') + (uflag ? 'u' : ''); + return Object.assign(new RegExp(`^${re}$`, flags), { + _src: re, + _glob: glob, + }); + } + get options() { + return this.#options; + } + // returns the string match, the regexp source, whether there's magic + // in the regexp (so a regular expression is required) and whether or + // not the uflag is needed for the regular expression (for posix classes) + // TODO: instead of injecting the start/end at this point, just return + // the BODY of the regexp, along with the start/end portions suitable + // for binding the start/end in either a joined full-path makeRe context + // (where we bind to (^|/), or a standalone matchPart context (where + // we bind to ^, and not /). Otherwise slashes get duped! + // + // In part-matching mode, the start is: + // - if not isStart: nothing + // - if traversal possible, but not allowed: ^(?!\.\.?$) + // - if dots allowed or not possible: ^ + // - if dots possible and not allowed: ^(?!\.) + // end is: + // - if not isEnd(): nothing + // - else: $ + // + // In full-path matching mode, we put the slash at the START of the + // pattern, so start is: + // - if first pattern: same as part-matching mode + // - if not isStart(): nothing + // - if traversal possible, but not allowed: /(?!\.\.?(?:$|/)) + // - if dots allowed or not possible: / + // - if dots possible and not allowed: /(?!\.) + // end is: + // - if last pattern, same as part-matching mode + // - else nothing + // + // Always put the (?:$|/) on negated tails, though, because that has to be + // there to bind the end of the negated pattern portion, and it's easier to + // just stick it in now rather than try to inject it later in the middle of + // the pattern. + // + // We can just always return the same end, and leave it up to the caller + // to know whether it's going to be used joined or in parts. + // And, if the start is adjusted slightly, can do the same there: + // - if not isStart: nothing + // - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$) + // - if dots allowed or not possible: (?:/|^) + // - if dots possible and not allowed: (?:/|^)(?!\.) + // + // But it's better to have a simpler binding without a conditional, for + // performance, so probably better to return both start options. + // + // Then the caller just ignores the end if it's not the first pattern, + // and the start always gets applied. + // + // But that's always going to be $ if it's the ending pattern, or nothing, + // so the caller can just attach $ at the end of the pattern when building. + // + // So the todo is: + // - better detect what kind of start is needed + // - return both flavors of starting pattern + // - attach $ at the end of the pattern when creating the actual RegExp + // + // Ah, but wait, no, that all only applies to the root when the first pattern + // is not an extglob. If the first pattern IS an extglob, then we need all + // that dot prevention biz to live in the extglob portions, because eg + // +(*|.x*) can match .xy but not .yx. + // + // So, return the two flavors if it's #root and the first child is not an + // AST, otherwise leave it to the child AST to handle it, and there, + // use the (?:^|/) style of start binding. + // + // Even simplified further: + // - Since the start for a join is eg /(?!\.) and the start for a part + // is ^(?!\.), we can just prepend (?!\.) to the pattern (either root + // or start or whatever) and prepend ^ or / at the Regexp construction. + toRegExpSource(allowDot) { + const dot = allowDot ?? !!this.#options.dot; + if (this.#root === this) + this.#fillNegs(); + if (!this.type) { + const noEmpty = this.isStart() && this.isEnd(); + const src = this.#parts + .map(p => { + const [re, _, hasMagic, uflag] = typeof p === 'string' + ? AST.#parseGlob(p, this.#hasMagic, noEmpty) + : p.toRegExpSource(allowDot); + this.#hasMagic = this.#hasMagic || hasMagic; + this.#uflag = this.#uflag || uflag; + return re; + }) + .join(''); + let start = ''; + if (this.isStart()) { + if (typeof this.#parts[0] === 'string') { + // this is the string that will match the start of the pattern, + // so we need to protect against dots and such. + // '.' and '..' cannot match unless the pattern is that exactly, + // even if it starts with . or dot:true is set. + const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]); + if (!dotTravAllowed) { + const aps = addPatternStart; + // check if we have a possibility of matching . or .., + // and prevent that. + const needNoTrav = + // dots are allowed, and the pattern starts with [ or . + (dot && aps.has(src.charAt(0))) || + // the pattern starts with \., and then [ or . + (src.startsWith('\\.') && aps.has(src.charAt(2))) || + // the pattern starts with \.\., and then [ or . + (src.startsWith('\\.\\.') && aps.has(src.charAt(4))); + // no need to prevent dots if it can't match a dot, or if a + // sub-pattern will be preventing it anyway. + const needNoDot = !dot && !allowDot && aps.has(src.charAt(0)); + start = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : ''; + } + } + } + // append the "end of path portion" pattern to negation tails + let end = ''; + if (this.isEnd() && + this.#root.#filledNegs && + this.#parent?.type === '!') { + end = '(?:$|\\/)'; + } + const final = start + src + end; + return [ + final, + (0, unescape_js_1.unescape)(src), + (this.#hasMagic = !!this.#hasMagic), + this.#uflag, + ]; + } + // We need to calculate the body *twice* if it's a repeat pattern + // at the start, once in nodot mode, then again in dot mode, so a + // pattern like *(?) can match 'x.y' + const repeated = this.type === '*' || this.type === '+'; + // some kind of extglob + const start = this.type === '!' ? '(?:(?!(?:' : '(?:'; + let body = this.#partsToRegExp(dot); + if (this.isStart() && this.isEnd() && !body && this.type !== '!') { + // invalid extglob, has to at least be *something* present, if it's + // the entire path portion. + const s = this.toString(); + this.#parts = [s]; + this.type = null; + this.#hasMagic = undefined; + return [s, (0, unescape_js_1.unescape)(this.toString()), false, false]; + } + // XXX abstract out this map method + let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot + ? '' + : this.#partsToRegExp(true); + if (bodyDotAllowed === body) { + bodyDotAllowed = ''; + } + if (bodyDotAllowed) { + body = `(?:${body})(?:${bodyDotAllowed})*?`; + } + // an empty !() is exactly equivalent to a starNoEmpty + let final = ''; + if (this.type === '!' && this.#emptyExt) { + final = (this.isStart() && !dot ? startNoDot : '') + starNoEmpty; + } + else { + const close = this.type === '!' + ? // !() must match something,but !(x) can match '' + '))' + + (this.isStart() && !dot && !allowDot ? startNoDot : '') + + star + + ')' + : this.type === '@' + ? ')' + : this.type === '?' + ? ')?' + : this.type === '+' && bodyDotAllowed + ? ')' + : this.type === '*' && bodyDotAllowed + ? `)?` + : `)${this.type}`; + final = start + body + close; + } + return [ + final, + (0, unescape_js_1.unescape)(body), + (this.#hasMagic = !!this.#hasMagic), + this.#uflag, + ]; + } + #partsToRegExp(dot) { + return this.#parts + .map(p => { + // extglob ASTs should only contain parent ASTs + /* c8 ignore start */ + if (typeof p === 'string') { + throw new Error('string type in extglob ast??'); + } + /* c8 ignore stop */ + // can ignore hasMagic, because extglobs are already always magic + const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot); + this.#uflag = this.#uflag || uflag; + return re; + }) + .filter(p => !(this.isStart() && this.isEnd()) || !!p) + .join('|'); + } + static #parseGlob(glob, hasMagic, noEmpty = false) { + let escaping = false; + let re = ''; + let uflag = false; + for (let i = 0; i < glob.length; i++) { + const c = glob.charAt(i); + if (escaping) { + escaping = false; + re += (reSpecials.has(c) ? '\\' : '') + c; + continue; + } + if (c === '\\') { + if (i === glob.length - 1) { + re += '\\\\'; + } + else { + escaping = true; + } + continue; + } + if (c === '[') { + const [src, needUflag, consumed, magic] = (0, brace_expressions_js_1.parseClass)(glob, i); + if (consumed) { + re += src; + uflag = uflag || needUflag; + i += consumed - 1; + hasMagic = hasMagic || magic; + continue; + } + } + if (c === '*') { + if (noEmpty && glob === '*') + re += starNoEmpty; + else + re += star; + hasMagic = true; + continue; + } + if (c === '?') { + re += qmark; + hasMagic = true; + continue; + } + re += regExpEscape(c); + } + return [re, (0, unescape_js_1.unescape)(glob), !!hasMagic, uflag]; + } +} +exports.AST = AST; +//# sourceMappingURL=ast.js.map \ No newline at end of file diff --git a/node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/brace-expressions.js b/node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/brace-expressions.js new file mode 100644 index 0000000000000..0e13eefc4cfee --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/brace-expressions.js @@ -0,0 +1,152 @@ +"use strict"; +// translate the various posix character classes into unicode properties +// this works across all unicode locales +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parseClass = void 0; +// { : [, /u flag required, negated] +const posixClasses = { + '[:alnum:]': ['\\p{L}\\p{Nl}\\p{Nd}', true], + '[:alpha:]': ['\\p{L}\\p{Nl}', true], + '[:ascii:]': ['\\x' + '00-\\x' + '7f', false], + '[:blank:]': ['\\p{Zs}\\t', true], + '[:cntrl:]': ['\\p{Cc}', true], + '[:digit:]': ['\\p{Nd}', true], + '[:graph:]': ['\\p{Z}\\p{C}', true, true], + '[:lower:]': ['\\p{Ll}', true], + '[:print:]': ['\\p{C}', true], + '[:punct:]': ['\\p{P}', true], + '[:space:]': ['\\p{Z}\\t\\r\\n\\v\\f', true], + '[:upper:]': ['\\p{Lu}', true], + '[:word:]': ['\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}', true], + '[:xdigit:]': ['A-Fa-f0-9', false], +}; +// only need to escape a few things inside of brace expressions +// escapes: [ \ ] - +const braceEscape = (s) => s.replace(/[[\]\\-]/g, '\\$&'); +// escape all regexp magic characters +const regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&'); +// everything has already been escaped, we just have to join +const rangesToString = (ranges) => ranges.join(''); +// takes a glob string at a posix brace expression, and returns +// an equivalent regular expression source, and boolean indicating +// whether the /u flag needs to be applied, and the number of chars +// consumed to parse the character class. +// This also removes out of order ranges, and returns ($.) if the +// entire class just no good. +const parseClass = (glob, position) => { + const pos = position; + /* c8 ignore start */ + if (glob.charAt(pos) !== '[') { + throw new Error('not in a brace expression'); + } + /* c8 ignore stop */ + const ranges = []; + const negs = []; + let i = pos + 1; + let sawStart = false; + let uflag = false; + let escaping = false; + let negate = false; + let endPos = pos; + let rangeStart = ''; + WHILE: while (i < glob.length) { + const c = glob.charAt(i); + if ((c === '!' || c === '^') && i === pos + 1) { + negate = true; + i++; + continue; + } + if (c === ']' && sawStart && !escaping) { + endPos = i + 1; + break; + } + sawStart = true; + if (c === '\\') { + if (!escaping) { + escaping = true; + i++; + continue; + } + // escaped \ char, fall through and treat like normal char + } + if (c === '[' && !escaping) { + // either a posix class, a collation equivalent, or just a [ + for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) { + if (glob.startsWith(cls, i)) { + // invalid, [a-[] is fine, but not [a-[:alpha]] + if (rangeStart) { + return ['$.', false, glob.length - pos, true]; + } + i += cls.length; + if (neg) + negs.push(unip); + else + ranges.push(unip); + uflag = uflag || u; + continue WHILE; + } + } + } + // now it's just a normal character, effectively + escaping = false; + if (rangeStart) { + // throw this range away if it's not valid, but others + // can still match. + if (c > rangeStart) { + ranges.push(braceEscape(rangeStart) + '-' + braceEscape(c)); + } + else if (c === rangeStart) { + ranges.push(braceEscape(c)); + } + rangeStart = ''; + i++; + continue; + } + // now might be the start of a range. + // can be either c-d or c-] or c] or c] at this point + if (glob.startsWith('-]', i + 1)) { + ranges.push(braceEscape(c + '-')); + i += 2; + continue; + } + if (glob.startsWith('-', i + 1)) { + rangeStart = c; + i += 2; + continue; + } + // not the start of a range, just a single character + ranges.push(braceEscape(c)); + i++; + } + if (endPos < i) { + // didn't see the end of the class, not a valid class, + // but might still be valid as a literal match. + return ['', false, 0, false]; + } + // if we got no ranges and no negates, then we have a range that + // cannot possibly match anything, and that poisons the whole glob + if (!ranges.length && !negs.length) { + return ['$.', false, glob.length - pos, true]; + } + // if we got one positive range, and it's a single character, then that's + // not actually a magic pattern, it's just that one literal character. + // we should not treat that as "magic", we should just return the literal + // character. [_] is a perfectly valid way to escape glob magic chars. + if (negs.length === 0 && + ranges.length === 1 && + /^\\?.$/.test(ranges[0]) && + !negate) { + const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0]; + return [regexpEscape(r), false, endPos - pos, false]; + } + const sranges = '[' + (negate ? '^' : '') + rangesToString(ranges) + ']'; + const snegs = '[' + (negate ? '' : '^') + rangesToString(negs) + ']'; + const comb = ranges.length && negs.length + ? '(' + sranges + '|' + snegs + ')' + : ranges.length + ? sranges + : snegs; + return [comb, uflag, endPos - pos, true]; +}; +exports.parseClass = parseClass; +//# sourceMappingURL=brace-expressions.js.map \ No newline at end of file diff --git a/node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/escape.js b/node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/escape.js new file mode 100644 index 0000000000000..02a4f8a8e0a58 --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/escape.js @@ -0,0 +1,22 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.escape = void 0; +/** + * Escape all magic characters in a glob pattern. + * + * If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape} + * option is used, then characters are escaped by wrapping in `[]`, because + * a magic character wrapped in a character class can only be satisfied by + * that exact character. In this mode, `\` is _not_ escaped, because it is + * not interpreted as a magic character, but instead as a path separator. + */ +const escape = (s, { windowsPathsNoEscape = false, } = {}) => { + // don't need to escape +@! because we escape the parens + // that make those magic, and escaping ! as [!] isn't valid, + // because [!]] is a valid glob class meaning not ']'. + return windowsPathsNoEscape + ? s.replace(/[?*()[\]]/g, '[$&]') + : s.replace(/[?*()[\]\\]/g, '\\$&'); +}; +exports.escape = escape; +//# sourceMappingURL=escape.js.map \ No newline at end of file diff --git a/node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/index.js b/node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/index.js new file mode 100644 index 0000000000000..f58fb8616aa9a --- /dev/null +++ b/node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/index.js @@ -0,0 +1,1014 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.unescape = exports.escape = exports.AST = exports.Minimatch = exports.match = exports.makeRe = exports.braceExpand = exports.defaults = exports.filter = exports.GLOBSTAR = exports.sep = exports.minimatch = void 0; +const brace_expansion_1 = require("@isaacs/brace-expansion"); +const assert_valid_pattern_js_1 = require("./assert-valid-pattern.js"); +const ast_js_1 = require("./ast.js"); +const escape_js_1 = require("./escape.js"); +const unescape_js_1 = require("./unescape.js"); +const minimatch = (p, pattern, options = {}) => { + (0, assert_valid_pattern_js_1.assertValidPattern)(pattern); + // shortcut: comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + return false; + } + return new Minimatch(pattern, options).match(p); +}; +exports.minimatch = minimatch; +// Optimized checking for the most common glob patterns. +const starDotExtRE = /^\*+([^+@!?\*\[\(]*)$/; +const starDotExtTest = (ext) => (f) => !f.startsWith('.') && f.endsWith(ext); +const starDotExtTestDot = (ext) => (f) => f.endsWith(ext); +const starDotExtTestNocase = (ext) => { + ext = ext.toLowerCase(); + return (f) => !f.startsWith('.') && f.toLowerCase().endsWith(ext); +}; +const starDotExtTestNocaseDot = (ext) => { + ext = ext.toLowerCase(); + return (f) => f.toLowerCase().endsWith(ext); +}; +const starDotStarRE = /^\*+\.\*+$/; +const starDotStarTest = (f) => !f.startsWith('.') && f.includes('.'); +const starDotStarTestDot = (f) => f !== '.' && f !== '..' && f.includes('.'); +const dotStarRE = /^\.\*+$/; +const dotStarTest = (f) => f !== '.' && f !== '..' && f.startsWith('.'); +const starRE = /^\*+$/; +const starTest = (f) => f.length !== 0 && !f.startsWith('.'); +const starTestDot = (f) => f.length !== 0 && f !== '.' && f !== '..'; +const qmarksRE = /^\?+([^+@!?\*\[\(]*)?$/; +const qmarksTestNocase = ([$0, ext = '']) => { + const noext = qmarksTestNoExt([$0]); + if (!ext) + return noext; + ext = ext.toLowerCase(); + return (f) => noext(f) && f.toLowerCase().endsWith(ext); +}; +const qmarksTestNocaseDot = ([$0, ext = '']) => { + const noext = qmarksTestNoExtDot([$0]); + if (!ext) + return noext; + ext = ext.toLowerCase(); + return (f) => noext(f) && f.toLowerCase().endsWith(ext); +}; +const qmarksTestDot = ([$0, ext = '']) => { + const noext = qmarksTestNoExtDot([$0]); + return !ext ? noext : (f) => noext(f) && f.endsWith(ext); +}; +const qmarksTest = ([$0, ext = '']) => { + const noext = qmarksTestNoExt([$0]); + return !ext ? noext : (f) => noext(f) && f.endsWith(ext); +}; +const qmarksTestNoExt = ([$0]) => { + const len = $0.length; + return (f) => f.length === len && !f.startsWith('.'); +}; +const qmarksTestNoExtDot = ([$0]) => { + const len = $0.length; + return (f) => f.length === len && f !== '.' && f !== '..'; +}; +/* c8 ignore start */ +const defaultPlatform = (typeof process === 'object' && process + ? (typeof process.env === 'object' && + process.env && + process.env.__MINIMATCH_TESTING_PLATFORM__) || + process.platform + : 'posix'); +const path = { + win32: { sep: '\\' }, + posix: { sep: '/' }, +}; +/* c8 ignore stop */ +exports.sep = defaultPlatform === 'win32' ? path.win32.sep : path.posix.sep; +exports.minimatch.sep = exports.sep; +exports.GLOBSTAR = Symbol('globstar **'); +exports.minimatch.GLOBSTAR = exports.GLOBSTAR; +// any single thing other than / +// don't need to escape / when using new RegExp() +const qmark = '[^/]'; +// * => any number of characters +const star = qmark + '*?'; +// ** when dots are allowed. Anything goes, except .. and . +// not (^ or / followed by one or two dots followed by $ or /), +// followed by anything, any number of times. +const twoStarDot = '(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?'; +// not a ^ or / followed by a dot, +// followed by anything, any number of times. +const twoStarNoDot = '(?:(?!(?:\\/|^)\\.).)*?'; +const filter = (pattern, options = {}) => (p) => (0, exports.minimatch)(p, pattern, options); +exports.filter = filter; +exports.minimatch.filter = exports.filter; +const ext = (a, b = {}) => Object.assign({}, a, b); +const defaults = (def) => { + if (!def || typeof def !== 'object' || !Object.keys(def).length) { + return exports.minimatch; + } + const orig = exports.minimatch; + const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options)); + return Object.assign(m, { + Minimatch: class Minimatch extends orig.Minimatch { + constructor(pattern, options = {}) { + super(pattern, ext(def, options)); + } + static defaults(options) { + return orig.defaults(ext(def, options)).Minimatch; + } + }, + AST: class AST extends orig.AST { + /* c8 ignore start */ + constructor(type, parent, options = {}) { + super(type, parent, ext(def, options)); + } + /* c8 ignore stop */ + static fromGlob(pattern, options = {}) { + return orig.AST.fromGlob(pattern, ext(def, options)); + } + }, + unescape: (s, options = {}) => orig.unescape(s, ext(def, options)), + escape: (s, options = {}) => orig.escape(s, ext(def, options)), + filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)), + defaults: (options) => orig.defaults(ext(def, options)), + makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)), + braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)), + match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)), + sep: orig.sep, + GLOBSTAR: exports.GLOBSTAR, + }); +}; +exports.defaults = defaults; +exports.minimatch.defaults = exports.defaults; +// Brace expansion: +// a{b,c}d -> abd acd +// a{b,}c -> abc ac +// a{0..3}d -> a0d a1d a2d a3d +// a{b,c{d,e}f}g -> abg acdfg acefg +// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg +// +// Invalid sets are not expanded. +// a{2..}b -> a{2..}b +// a{b}c -> a{b}c +const braceExpand = (pattern, options = {}) => { + (0, assert_valid_pattern_js_1.assertValidPattern)(pattern); + // Thanks to Yeting Li for + // improving this regexp to avoid a ReDOS vulnerability. + if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) { + // shortcut. no need to expand. + return [pattern]; + } + return (0, brace_expansion_1.expand)(pattern); +}; +exports.braceExpand = braceExpand; +exports.minimatch.braceExpand = exports.braceExpand; +// parse a component of the expanded set. +// At this point, no pattern may contain "/" in it +// so we're going to return a 2d array, where each entry is the full +// pattern, split on '/', and then turned into a regular expression. +// A regexp is made at the end which joins each array with an +// escaped /, and another full one which joins each regexp with |. +// +// Following the lead of Bash 4.1, note that "**" only has special meaning +// when it is the *only* thing in a path portion. Otherwise, any series +// of * is equivalent to a single *. Globstar behavior is enabled by +// default, and can be disabled by setting options.noglobstar. +const makeRe = (pattern, options = {}) => new Minimatch(pattern, options).makeRe(); +exports.makeRe = makeRe; +exports.minimatch.makeRe = exports.makeRe; +const match = (list, pattern, options = {}) => { + const mm = new Minimatch(pattern, options); + list = list.filter(f => mm.match(f)); + if (mm.options.nonull && !list.length) { + list.push(pattern); + } + return list; +}; +exports.match = match; +exports.minimatch.match = exports.match; +// replace stuff like \* with * +const globMagic = /[?*]|[+@!]\(.*?\)|\[|\]/; +const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&'); +class Minimatch { + options; + set; + pattern; + windowsPathsNoEscape; + nonegate; + negate; + comment; + empty; + preserveMultipleSlashes; + partial; + globSet; + globParts; + nocase; + isWindows; + platform; + windowsNoMagicRoot; + regexp; + constructor(pattern, options = {}) { + (0, assert_valid_pattern_js_1.assertValidPattern)(pattern); + options = options || {}; + this.options = options; + this.pattern = pattern; + this.platform = options.platform || defaultPlatform; + this.isWindows = this.platform === 'win32'; + this.windowsPathsNoEscape = + !!options.windowsPathsNoEscape || options.allowWindowsEscape === false; + if (this.windowsPathsNoEscape) { + this.pattern = this.pattern.replace(/\\/g, '/'); + } + this.preserveMultipleSlashes = !!options.preserveMultipleSlashes; + this.regexp = null; + this.negate = false; + this.nonegate = !!options.nonegate; + this.comment = false; + this.empty = false; + this.partial = !!options.partial; + this.nocase = !!this.options.nocase; + this.windowsNoMagicRoot = + options.windowsNoMagicRoot !== undefined + ? options.windowsNoMagicRoot + : !!(this.isWindows && this.nocase); + this.globSet = []; + this.globParts = []; + this.set = []; + // make the set of regexps etc. + this.make(); + } + hasMagic() { + if (this.options.magicalBraces && this.set.length > 1) { + return true; + } + for (const pattern of this.set) { + for (const part of pattern) { + if (typeof part !== 'string') + return true; + } + } + return false; + } + debug(..._) { } + make() { + const pattern = this.pattern; + const options = this.options; + // empty patterns and comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + this.comment = true; + return; + } + if (!pattern) { + this.empty = true; + return; + } + // step 1: figure out negation, etc. + this.parseNegate(); + // step 2: expand braces + this.globSet = [...new Set(this.braceExpand())]; + if (options.debug) { + this.debug = (...args) => console.error(...args); + } + this.debug(this.pattern, this.globSet); + // step 3: now we have a set, so turn each one into a series of + // path-portion matching patterns. + // These will be regexps, except in the case of "**", which is + // set to the GLOBSTAR object for globstar behavior, + // and will not contain any / characters + // + // First, we preprocess to make the glob pattern sets a bit simpler + // and deduped. There are some perf-killing patterns that can cause + // problems with a glob walk, but we can simplify them down a bit. + const rawGlobParts = this.globSet.map(s => this.slashSplit(s)); + this.globParts = this.preprocess(rawGlobParts); + this.debug(this.pattern, this.globParts); + // glob --> regexps + let set = this.globParts.map((s, _, __) => { + if (this.isWindows && this.windowsNoMagicRoot) { + // check if it's a drive or unc path. + const isUNC = s[0] === '' && + s[1] === '' && + (s[2] === '?' || !globMagic.test(s[2])) && + !globMagic.test(s[3]); + const isDrive = /^[a-z]:/i.test(s[0]); + if (isUNC) { + return [...s.slice(0, 4), ...s.slice(4).map(ss => this.parse(ss))]; + } + else if (isDrive) { + return [s[0], ...s.slice(1).map(ss => this.parse(ss))]; + } + } + return s.map(ss => this.parse(ss)); + }); + this.debug(this.pattern, set); + // filter out everything that didn't compile properly. + this.set = set.filter(s => s.indexOf(false) === -1); + // do not treat the ? in UNC paths as magic + if (this.isWindows) { + for (let i = 0; i < this.set.length; i++) { + const p = this.set[i]; + if (p[0] === '' && + p[1] === '' && + this.globParts[i][2] === '?' && + typeof p[3] === 'string' && + /^[a-z]:$/i.test(p[3])) { + p[2] = '?'; + } + } + } + this.debug(this.pattern, this.set); + } + // various transforms to equivalent pattern sets that are + // faster to process in a filesystem walk. The goal is to + // eliminate what we can, and push all ** patterns as far + // to the right as possible, even if it increases the number + // of patterns that we have to process. + preprocess(globParts) { + // if we're not in globstar mode, then turn all ** into * + if (this.options.noglobstar) { + for (let i = 0; i < globParts.length; i++) { + for (let j = 0; j < globParts[i].length; j++) { + if (globParts[i][j] === '**') { + globParts[i][j] = '*'; + } + } + } + } + const { optimizationLevel = 1 } = this.options; + if (optimizationLevel >= 2) { + // aggressive optimization for the purpose of fs walking + globParts = this.firstPhasePreProcess(globParts); + globParts = this.secondPhasePreProcess(globParts); + } + else if (optimizationLevel >= 1) { + // just basic optimizations to remove some .. parts + globParts = this.levelOneOptimize(globParts); + } + else { + // just collapse multiple ** portions into one + globParts = this.adjascentGlobstarOptimize(globParts); + } + return globParts; + } + // just get rid of adjascent ** portions + adjascentGlobstarOptimize(globParts) { + return globParts.map(parts => { + let gs = -1; + while (-1 !== (gs = parts.indexOf('**', gs + 1))) { + let i = gs; + while (parts[i + 1] === '**') { + i++; + } + if (i !== gs) { + parts.splice(gs, i - gs); + } + } + return parts; + }); + } + // get rid of adjascent ** and resolve .. portions + levelOneOptimize(globParts) { + return globParts.map(parts => { + parts = parts.reduce((set, part) => { + const prev = set[set.length - 1]; + if (part === '**' && prev === '**') { + return set; + } + if (part === '..') { + if (prev && prev !== '..' && prev !== '.' && prev !== '**') { + set.pop(); + return set; + } + } + set.push(part); + return set; + }, []); + return parts.length === 0 ? [''] : parts; + }); + } + levelTwoFileOptimize(parts) { + if (!Array.isArray(parts)) { + parts = this.slashSplit(parts); + } + let didSomething = false; + do { + didSomething = false; + //
// -> 
/
+            if (!this.preserveMultipleSlashes) {
+                for (let i = 1; i < parts.length - 1; i++) {
+                    const p = parts[i];
+                    // don't squeeze out UNC patterns
+                    if (i === 1 && p === '' && parts[0] === '')
+                        continue;
+                    if (p === '.' || p === '') {
+                        didSomething = true;
+                        parts.splice(i, 1);
+                        i--;
+                    }
+                }
+                if (parts[0] === '.' &&
+                    parts.length === 2 &&
+                    (parts[1] === '.' || parts[1] === '')) {
+                    didSomething = true;
+                    parts.pop();
+                }
+            }
+            // 
/

/../ ->

/
+            let dd = 0;
+            while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                const p = parts[dd - 1];
+                if (p && p !== '.' && p !== '..' && p !== '**') {
+                    didSomething = true;
+                    parts.splice(dd - 1, 2);
+                    dd -= 2;
+                }
+            }
+        } while (didSomething);
+        return parts.length === 0 ? [''] : parts;
+    }
+    // First phase: single-pattern processing
+    // 
 is 1 or more portions
+    //  is 1 or more portions
+    // 

is any portion other than ., .., '', or ** + // is . or '' + // + // **/.. is *brutal* for filesystem walking performance, because + // it effectively resets the recursive walk each time it occurs, + // and ** cannot be reduced out by a .. pattern part like a regexp + // or most strings (other than .., ., and '') can be. + // + //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + //

// -> 
/
+    // 
/

/../ ->

/
+    // **/**/ -> **/
+    //
+    // **/*/ -> */**/ <== not valid because ** doesn't follow
+    // this WOULD be allowed if ** did follow symlinks, or * didn't
+    firstPhasePreProcess(globParts) {
+        let didSomething = false;
+        do {
+            didSomething = false;
+            // 
/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + for (let parts of globParts) { + let gs = -1; + while (-1 !== (gs = parts.indexOf('**', gs + 1))) { + let gss = gs; + while (parts[gss + 1] === '**') { + //

/**/**/ -> 
/**/
+                        gss++;
+                    }
+                    // eg, if gs is 2 and gss is 4, that means we have 3 **
+                    // parts, and can remove 2 of them.
+                    if (gss > gs) {
+                        parts.splice(gs + 1, gss - gs);
+                    }
+                    let next = parts[gs + 1];
+                    const p = parts[gs + 2];
+                    const p2 = parts[gs + 3];
+                    if (next !== '..')
+                        continue;
+                    if (!p ||
+                        p === '.' ||
+                        p === '..' ||
+                        !p2 ||
+                        p2 === '.' ||
+                        p2 === '..') {
+                        continue;
+                    }
+                    didSomething = true;
+                    // edit parts in place, and push the new one
+                    parts.splice(gs, 1);
+                    const other = parts.slice(0);
+                    other[gs] = '**';
+                    globParts.push(other);
+                    gs--;
+                }
+                // 
// -> 
/
+                if (!this.preserveMultipleSlashes) {
+                    for (let i = 1; i < parts.length - 1; i++) {
+                        const p = parts[i];
+                        // don't squeeze out UNC patterns
+                        if (i === 1 && p === '' && parts[0] === '')
+                            continue;
+                        if (p === '.' || p === '') {
+                            didSomething = true;
+                            parts.splice(i, 1);
+                            i--;
+                        }
+                    }
+                    if (parts[0] === '.' &&
+                        parts.length === 2 &&
+                        (parts[1] === '.' || parts[1] === '')) {
+                        didSomething = true;
+                        parts.pop();
+                    }
+                }
+                // 
/

/../ ->

/
+                let dd = 0;
+                while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                    const p = parts[dd - 1];
+                    if (p && p !== '.' && p !== '..' && p !== '**') {
+                        didSomething = true;
+                        const needDot = dd === 1 && parts[dd + 1] === '**';
+                        const splin = needDot ? ['.'] : [];
+                        parts.splice(dd - 1, 2, ...splin);
+                        if (parts.length === 0)
+                            parts.push('');
+                        dd -= 2;
+                    }
+                }
+            }
+        } while (didSomething);
+        return globParts;
+    }
+    // second phase: multi-pattern dedupes
+    // {
/*/,
/

/} ->

/*/
+    // {
/,
/} -> 
/
+    // {
/**/,
/} -> 
/**/
+    //
+    // {
/**/,
/**/

/} ->

/**/
+    // ^-- not valid because ** doens't follow symlinks
+    secondPhasePreProcess(globParts) {
+        for (let i = 0; i < globParts.length - 1; i++) {
+            for (let j = i + 1; j < globParts.length; j++) {
+                const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);
+                if (matched) {
+                    globParts[i] = [];
+                    globParts[j] = matched;
+                    break;
+                }
+            }
+        }
+        return globParts.filter(gs => gs.length);
+    }
+    partsMatch(a, b, emptyGSMatch = false) {
+        let ai = 0;
+        let bi = 0;
+        let result = [];
+        let which = '';
+        while (ai < a.length && bi < b.length) {
+            if (a[ai] === b[bi]) {
+                result.push(which === 'b' ? b[bi] : a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (emptyGSMatch && a[ai] === '**' && b[bi] === a[ai + 1]) {
+                result.push(a[ai]);
+                ai++;
+            }
+            else if (emptyGSMatch && b[bi] === '**' && a[ai] === b[bi + 1]) {
+                result.push(b[bi]);
+                bi++;
+            }
+            else if (a[ai] === '*' &&
+                b[bi] &&
+                (this.options.dot || !b[bi].startsWith('.')) &&
+                b[bi] !== '**') {
+                if (which === 'b')
+                    return false;
+                which = 'a';
+                result.push(a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (b[bi] === '*' &&
+                a[ai] &&
+                (this.options.dot || !a[ai].startsWith('.')) &&
+                a[ai] !== '**') {
+                if (which === 'a')
+                    return false;
+                which = 'b';
+                result.push(b[bi]);
+                ai++;
+                bi++;
+            }
+            else {
+                return false;
+            }
+        }
+        // if we fall out of the loop, it means they two are identical
+        // as long as their lengths match
+        return a.length === b.length && result;
+    }
+    parseNegate() {
+        if (this.nonegate)
+            return;
+        const pattern = this.pattern;
+        let negate = false;
+        let negateOffset = 0;
+        for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {
+            negate = !negate;
+            negateOffset++;
+        }
+        if (negateOffset)
+            this.pattern = pattern.slice(negateOffset);
+        this.negate = negate;
+    }
+    // set partial to true to test if, for example,
+    // "/a/b" matches the start of "/*/b/*/d"
+    // Partial means, if you run out of file before you run
+    // out of pattern, then that's fine, as long as all
+    // the parts match.
+    matchOne(file, pattern, partial = false) {
+        const options = this.options;
+        // UNC paths like //?/X:/... can match X:/... and vice versa
+        // Drive letters in absolute drive or unc paths are always compared
+        // case-insensitively.
+        if (this.isWindows) {
+            const fileDrive = typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0]);
+            const fileUNC = !fileDrive &&
+                file[0] === '' &&
+                file[1] === '' &&
+                file[2] === '?' &&
+                /^[a-z]:$/i.test(file[3]);
+            const patternDrive = typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0]);
+            const patternUNC = !patternDrive &&
+                pattern[0] === '' &&
+                pattern[1] === '' &&
+                pattern[2] === '?' &&
+                typeof pattern[3] === 'string' &&
+                /^[a-z]:$/i.test(pattern[3]);
+            const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined;
+            const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined;
+            if (typeof fdi === 'number' && typeof pdi === 'number') {
+                const [fd, pd] = [file[fdi], pattern[pdi]];
+                if (fd.toLowerCase() === pd.toLowerCase()) {
+                    pattern[pdi] = fd;
+                    if (pdi > fdi) {
+                        pattern = pattern.slice(pdi);
+                    }
+                    else if (fdi > pdi) {
+                        file = file.slice(fdi);
+                    }
+                }
+            }
+        }
+        // resolve and reduce . and .. portions in the file as well.
+        // dont' need to do the second phase, because it's only one string[]
+        const { optimizationLevel = 1 } = this.options;
+        if (optimizationLevel >= 2) {
+            file = this.levelTwoFileOptimize(file);
+        }
+        this.debug('matchOne', this, { file, pattern });
+        this.debug('matchOne', file.length, pattern.length);
+        for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
+            this.debug('matchOne loop');
+            var p = pattern[pi];
+            var f = file[fi];
+            this.debug(pattern, p, f);
+            // should be impossible.
+            // some invalid regexp stuff in the set.
+            /* c8 ignore start */
+            if (p === false) {
+                return false;
+            }
+            /* c8 ignore stop */
+            if (p === exports.GLOBSTAR) {
+                this.debug('GLOBSTAR', [pattern, p, f]);
+                // "**"
+                // a/**/b/**/c would match the following:
+                // a/b/x/y/z/c
+                // a/x/y/z/b/c
+                // a/b/x/b/x/c
+                // a/b/c
+                // To do this, take the rest of the pattern after
+                // the **, and see if it would match the file remainder.
+                // If so, return success.
+                // If not, the ** "swallows" a segment, and try again.
+                // This is recursively awful.
+                //
+                // a/**/b/**/c matching a/b/x/y/z/c
+                // - a matches a
+                // - doublestar
+                //   - matchOne(b/x/y/z/c, b/**/c)
+                //     - b matches b
+                //     - doublestar
+                //       - matchOne(x/y/z/c, c) -> no
+                //       - matchOne(y/z/c, c) -> no
+                //       - matchOne(z/c, c) -> no
+                //       - matchOne(c, c) yes, hit
+                var fr = fi;
+                var pr = pi + 1;
+                if (pr === pl) {
+                    this.debug('** at the end');
+                    // a ** at the end will just swallow the rest.
+                    // We have found a match.
+                    // however, it will not swallow /.x, unless
+                    // options.dot is set.
+                    // . and .. are *never* matched by **, for explosively
+                    // exponential reasons.
+                    for (; fi < fl; fi++) {
+                        if (file[fi] === '.' ||
+                            file[fi] === '..' ||
+                            (!options.dot && file[fi].charAt(0) === '.'))
+                            return false;
+                    }
+                    return true;
+                }
+                // ok, let's see if we can swallow whatever we can.
+                while (fr < fl) {
+                    var swallowee = file[fr];
+                    this.debug('\nglobstar while', file, fr, pattern, pr, swallowee);
+                    // XXX remove this slice.  Just pass the start index.
+                    if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
+                        this.debug('globstar found match!', fr, fl, swallowee);
+                        // found a match.
+                        return true;
+                    }
+                    else {
+                        // can't swallow "." or ".." ever.
+                        // can only swallow ".foo" when explicitly asked.
+                        if (swallowee === '.' ||
+                            swallowee === '..' ||
+                            (!options.dot && swallowee.charAt(0) === '.')) {
+                            this.debug('dot detected!', file, fr, pattern, pr);
+                            break;
+                        }
+                        // ** swallows a segment, and continue.
+                        this.debug('globstar swallow a segment, and continue');
+                        fr++;
+                    }
+                }
+                // no match was found.
+                // However, in partial mode, we can't say this is necessarily over.
+                /* c8 ignore start */
+                if (partial) {
+                    // ran out of file
+                    this.debug('\n>>> no match, partial?', file, fr, pattern, pr);
+                    if (fr === fl) {
+                        return true;
+                    }
+                }
+                /* c8 ignore stop */
+                return false;
+            }
+            // something other than **
+            // non-magic patterns just have to match exactly
+            // patterns with magic have been turned into regexps.
+            let hit;
+            if (typeof p === 'string') {
+                hit = f === p;
+                this.debug('string match', p, f, hit);
+            }
+            else {
+                hit = p.test(f);
+                this.debug('pattern match', p, f, hit);
+            }
+            if (!hit)
+                return false;
+        }
+        // Note: ending in / means that we'll get a final ""
+        // at the end of the pattern.  This can only match a
+        // corresponding "" at the end of the file.
+        // If the file ends in /, then it can only match a
+        // a pattern that ends in /, unless the pattern just
+        // doesn't have any more for it. But, a/b/ should *not*
+        // match "a/b/*", even though "" matches against the
+        // [^/]*? pattern, except in partial mode, where it might
+        // simply not be reached yet.
+        // However, a/b/ should still satisfy a/*
+        // now either we fell off the end of the pattern, or we're done.
+        if (fi === fl && pi === pl) {
+            // ran out of pattern and filename at the same time.
+            // an exact hit!
+            return true;
+        }
+        else if (fi === fl) {
+            // ran out of file, but still had pattern left.
+            // this is ok if we're doing the match as part of
+            // a glob fs traversal.
+            return partial;
+        }
+        else if (pi === pl) {
+            // ran out of pattern, still have file left.
+            // this is only acceptable if we're on the very last
+            // empty segment of a file with a trailing slash.
+            // a/* should match a/b/
+            return fi === fl - 1 && file[fi] === '';
+            /* c8 ignore start */
+        }
+        else {
+            // should be unreachable.
+            throw new Error('wtf?');
+        }
+        /* c8 ignore stop */
+    }
+    braceExpand() {
+        return (0, exports.braceExpand)(this.pattern, this.options);
+    }
+    parse(pattern) {
+        (0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
+        const options = this.options;
+        // shortcuts
+        if (pattern === '**')
+            return exports.GLOBSTAR;
+        if (pattern === '')
+            return '';
+        // far and away, the most common glob pattern parts are
+        // *, *.*, and *.  Add a fast check method for those.
+        let m;
+        let fastTest = null;
+        if ((m = pattern.match(starRE))) {
+            fastTest = options.dot ? starTestDot : starTest;
+        }
+        else if ((m = pattern.match(starDotExtRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? starDotExtTestNocaseDot
+                    : starDotExtTestNocase
+                : options.dot
+                    ? starDotExtTestDot
+                    : starDotExtTest)(m[1]);
+        }
+        else if ((m = pattern.match(qmarksRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? qmarksTestNocaseDot
+                    : qmarksTestNocase
+                : options.dot
+                    ? qmarksTestDot
+                    : qmarksTest)(m);
+        }
+        else if ((m = pattern.match(starDotStarRE))) {
+            fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
+        }
+        else if ((m = pattern.match(dotStarRE))) {
+            fastTest = dotStarTest;
+        }
+        const re = ast_js_1.AST.fromGlob(pattern, this.options).toMMPattern();
+        if (fastTest && typeof re === 'object') {
+            // Avoids overriding in frozen environments
+            Reflect.defineProperty(re, 'test', { value: fastTest });
+        }
+        return re;
+    }
+    makeRe() {
+        if (this.regexp || this.regexp === false)
+            return this.regexp;
+        // at this point, this.set is a 2d array of partial
+        // pattern strings, or "**".
+        //
+        // It's better to use .match().  This function shouldn't
+        // be used, really, but it's pretty convenient sometimes,
+        // when you just want to work with a regex.
+        const set = this.set;
+        if (!set.length) {
+            this.regexp = false;
+            return this.regexp;
+        }
+        const options = this.options;
+        const twoStar = options.noglobstar
+            ? star
+            : options.dot
+                ? twoStarDot
+                : twoStarNoDot;
+        const flags = new Set(options.nocase ? ['i'] : []);
+        // regexpify non-globstar patterns
+        // if ** is only item, then we just do one twoStar
+        // if ** is first, and there are more, prepend (\/|twoStar\/)? to next
+        // if ** is last, append (\/twoStar|) to previous
+        // if ** is in the middle, append (\/|\/twoStar\/) to previous
+        // then filter out GLOBSTAR symbols
+        let re = set
+            .map(pattern => {
+            const pp = pattern.map(p => {
+                if (p instanceof RegExp) {
+                    for (const f of p.flags.split(''))
+                        flags.add(f);
+                }
+                return typeof p === 'string'
+                    ? regExpEscape(p)
+                    : p === exports.GLOBSTAR
+                        ? exports.GLOBSTAR
+                        : p._src;
+            });
+            pp.forEach((p, i) => {
+                const next = pp[i + 1];
+                const prev = pp[i - 1];
+                if (p !== exports.GLOBSTAR || prev === exports.GLOBSTAR) {
+                    return;
+                }
+                if (prev === undefined) {
+                    if (next !== undefined && next !== exports.GLOBSTAR) {
+                        pp[i + 1] = '(?:\\/|' + twoStar + '\\/)?' + next;
+                    }
+                    else {
+                        pp[i] = twoStar;
+                    }
+                }
+                else if (next === undefined) {
+                    pp[i - 1] = prev + '(?:\\/|' + twoStar + ')?';
+                }
+                else if (next !== exports.GLOBSTAR) {
+                    pp[i - 1] = prev + '(?:\\/|\\/' + twoStar + '\\/)' + next;
+                    pp[i + 1] = exports.GLOBSTAR;
+                }
+            });
+            return pp.filter(p => p !== exports.GLOBSTAR).join('/');
+        })
+            .join('|');
+        // need to wrap in parens if we had more than one thing with |,
+        // otherwise only the first will be anchored to ^ and the last to $
+        const [open, close] = set.length > 1 ? ['(?:', ')'] : ['', ''];
+        // must match entire pattern
+        // ending in a * or ** will make it less strict.
+        re = '^' + open + re + close + '$';
+        // can match anything, as long as it's not this.
+        if (this.negate)
+            re = '^(?!' + re + ').+$';
+        try {
+            this.regexp = new RegExp(re, [...flags].join(''));
+            /* c8 ignore start */
+        }
+        catch (ex) {
+            // should be impossible
+            this.regexp = false;
+        }
+        /* c8 ignore stop */
+        return this.regexp;
+    }
+    slashSplit(p) {
+        // if p starts with // on windows, we preserve that
+        // so that UNC paths aren't broken.  Otherwise, any number of
+        // / characters are coalesced into one, unless
+        // preserveMultipleSlashes is set to true.
+        if (this.preserveMultipleSlashes) {
+            return p.split('/');
+        }
+        else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
+            // add an extra '' for the one we lose
+            return ['', ...p.split(/\/+/)];
+        }
+        else {
+            return p.split(/\/+/);
+        }
+    }
+    match(f, partial = this.partial) {
+        this.debug('match', f, this.pattern);
+        // short-circuit in the case of busted things.
+        // comments, etc.
+        if (this.comment) {
+            return false;
+        }
+        if (this.empty) {
+            return f === '';
+        }
+        if (f === '/' && partial) {
+            return true;
+        }
+        const options = this.options;
+        // windows: need to use /, not \
+        if (this.isWindows) {
+            f = f.split('\\').join('/');
+        }
+        // treat the test path as a set of pathparts.
+        const ff = this.slashSplit(f);
+        this.debug(this.pattern, 'split', ff);
+        // just ONE of the pattern sets in this.set needs to match
+        // in order for it to be valid.  If negating, then just one
+        // match means that we have failed.
+        // Either way, return on the first hit.
+        const set = this.set;
+        this.debug(this.pattern, 'set', set);
+        // Find the basename of the path by looking for the last non-empty segment
+        let filename = ff[ff.length - 1];
+        if (!filename) {
+            for (let i = ff.length - 2; !filename && i >= 0; i--) {
+                filename = ff[i];
+            }
+        }
+        for (let i = 0; i < set.length; i++) {
+            const pattern = set[i];
+            let file = ff;
+            if (options.matchBase && pattern.length === 1) {
+                file = [filename];
+            }
+            const hit = this.matchOne(file, pattern, partial);
+            if (hit) {
+                if (options.flipNegate) {
+                    return true;
+                }
+                return !this.negate;
+            }
+        }
+        // didn't get any hits.  this is success if it's a negative
+        // pattern, failure otherwise.
+        if (options.flipNegate) {
+            return false;
+        }
+        return this.negate;
+    }
+    static defaults(def) {
+        return exports.minimatch.defaults(def).Minimatch;
+    }
+}
+exports.Minimatch = Minimatch;
+/* c8 ignore start */
+var ast_js_2 = require("./ast.js");
+Object.defineProperty(exports, "AST", { enumerable: true, get: function () { return ast_js_2.AST; } });
+var escape_js_2 = require("./escape.js");
+Object.defineProperty(exports, "escape", { enumerable: true, get: function () { return escape_js_2.escape; } });
+var unescape_js_2 = require("./unescape.js");
+Object.defineProperty(exports, "unescape", { enumerable: true, get: function () { return unescape_js_2.unescape; } });
+/* c8 ignore stop */
+exports.minimatch.AST = ast_js_1.AST;
+exports.minimatch.Minimatch = Minimatch;
+exports.minimatch.escape = escape_js_1.escape;
+exports.minimatch.unescape = unescape_js_1.unescape;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/package.json b/node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/package.json
new file mode 100644
index 0000000000000..5bbefffbabee3
--- /dev/null
+++ b/node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/unescape.js b/node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/unescape.js
new file mode 100644
index 0000000000000..47c36bcee5a02
--- /dev/null
+++ b/node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/unescape.js
@@ -0,0 +1,24 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.unescape = void 0;
+/**
+ * Un-escape a string that has been escaped with {@link escape}.
+ *
+ * If the {@link windowsPathsNoEscape} option is used, then square-brace
+ * escapes are removed, but not backslash escapes.  For example, it will turn
+ * the string `'[*]'` into `*`, but it will not turn `'\\*'` into `'*'`,
+ * becuase `\` is a path separator in `windowsPathsNoEscape` mode.
+ *
+ * When `windowsPathsNoEscape` is not set, then both brace escapes and
+ * backslash escapes are removed.
+ *
+ * Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot be escaped
+ * or unescaped.
+ */
+const unescape = (s, { windowsPathsNoEscape = false, } = {}) => {
+    return windowsPathsNoEscape
+        ? s.replace(/\[([^\/\\])\]/g, '$1')
+        : s.replace(/((?!\\).|^)\[([^\/\\])\]/g, '$1$2').replace(/\\([^\/])/g, '$1');
+};
+exports.unescape = unescape;
+//# sourceMappingURL=unescape.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/assert-valid-pattern.js b/node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/assert-valid-pattern.js
new file mode 100644
index 0000000000000..7b534fc30200b
--- /dev/null
+++ b/node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/assert-valid-pattern.js
@@ -0,0 +1,10 @@
+const MAX_PATTERN_LENGTH = 1024 * 64;
+export const assertValidPattern = (pattern) => {
+    if (typeof pattern !== 'string') {
+        throw new TypeError('invalid pattern');
+    }
+    if (pattern.length > MAX_PATTERN_LENGTH) {
+        throw new TypeError('pattern is too long');
+    }
+};
+//# sourceMappingURL=assert-valid-pattern.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/ast.js b/node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/ast.js
new file mode 100644
index 0000000000000..2d2bced6533de
--- /dev/null
+++ b/node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/ast.js
@@ -0,0 +1,588 @@
+// parse a single path portion
+import { parseClass } from './brace-expressions.js';
+import { unescape } from './unescape.js';
+const types = new Set(['!', '?', '+', '*', '@']);
+const isExtglobType = (c) => types.has(c);
+// Patterns that get prepended to bind to the start of either the
+// entire string, or just a single path portion, to prevent dots
+// and/or traversal patterns, when needed.
+// Exts don't need the ^ or / bit, because the root binds that already.
+const startNoTraversal = '(?!(?:^|/)\\.\\.?(?:$|/))';
+const startNoDot = '(?!\\.)';
+// characters that indicate a start of pattern needs the "no dots" bit,
+// because a dot *might* be matched. ( is not in the list, because in
+// the case of a child extglob, it will handle the prevention itself.
+const addPatternStart = new Set(['[', '.']);
+// cases where traversal is A-OK, no dot prevention needed
+const justDots = new Set(['..', '.']);
+const reSpecials = new Set('().*{}+?[]^$\\!');
+const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+// any single thing other than /
+const qmark = '[^/]';
+// * => any number of characters
+const star = qmark + '*?';
+// use + when we need to ensure that *something* matches, because the * is
+// the only thing in the path portion.
+const starNoEmpty = qmark + '+?';
+// remove the \ chars that we added if we end up doing a nonmagic compare
+// const deslash = (s: string) => s.replace(/\\(.)/g, '$1')
+export class AST {
+    type;
+    #root;
+    #hasMagic;
+    #uflag = false;
+    #parts = [];
+    #parent;
+    #parentIndex;
+    #negs;
+    #filledNegs = false;
+    #options;
+    #toString;
+    // set to true if it's an extglob with no children
+    // (which really means one child of '')
+    #emptyExt = false;
+    constructor(type, parent, options = {}) {
+        this.type = type;
+        // extglobs are inherently magical
+        if (type)
+            this.#hasMagic = true;
+        this.#parent = parent;
+        this.#root = this.#parent ? this.#parent.#root : this;
+        this.#options = this.#root === this ? options : this.#root.#options;
+        this.#negs = this.#root === this ? [] : this.#root.#negs;
+        if (type === '!' && !this.#root.#filledNegs)
+            this.#negs.push(this);
+        this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0;
+    }
+    get hasMagic() {
+        /* c8 ignore start */
+        if (this.#hasMagic !== undefined)
+            return this.#hasMagic;
+        /* c8 ignore stop */
+        for (const p of this.#parts) {
+            if (typeof p === 'string')
+                continue;
+            if (p.type || p.hasMagic)
+                return (this.#hasMagic = true);
+        }
+        // note: will be undefined until we generate the regexp src and find out
+        return this.#hasMagic;
+    }
+    // reconstructs the pattern
+    toString() {
+        if (this.#toString !== undefined)
+            return this.#toString;
+        if (!this.type) {
+            return (this.#toString = this.#parts.map(p => String(p)).join(''));
+        }
+        else {
+            return (this.#toString =
+                this.type + '(' + this.#parts.map(p => String(p)).join('|') + ')');
+        }
+    }
+    #fillNegs() {
+        /* c8 ignore start */
+        if (this !== this.#root)
+            throw new Error('should only call on root');
+        if (this.#filledNegs)
+            return this;
+        /* c8 ignore stop */
+        // call toString() once to fill this out
+        this.toString();
+        this.#filledNegs = true;
+        let n;
+        while ((n = this.#negs.pop())) {
+            if (n.type !== '!')
+                continue;
+            // walk up the tree, appending everthing that comes AFTER parentIndex
+            let p = n;
+            let pp = p.#parent;
+            while (pp) {
+                for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) {
+                    for (const part of n.#parts) {
+                        /* c8 ignore start */
+                        if (typeof part === 'string') {
+                            throw new Error('string part in extglob AST??');
+                        }
+                        /* c8 ignore stop */
+                        part.copyIn(pp.#parts[i]);
+                    }
+                }
+                p = pp;
+                pp = p.#parent;
+            }
+        }
+        return this;
+    }
+    push(...parts) {
+        for (const p of parts) {
+            if (p === '')
+                continue;
+            /* c8 ignore start */
+            if (typeof p !== 'string' && !(p instanceof AST && p.#parent === this)) {
+                throw new Error('invalid part: ' + p);
+            }
+            /* c8 ignore stop */
+            this.#parts.push(p);
+        }
+    }
+    toJSON() {
+        const ret = this.type === null
+            ? this.#parts.slice().map(p => (typeof p === 'string' ? p : p.toJSON()))
+            : [this.type, ...this.#parts.map(p => p.toJSON())];
+        if (this.isStart() && !this.type)
+            ret.unshift([]);
+        if (this.isEnd() &&
+            (this === this.#root ||
+                (this.#root.#filledNegs && this.#parent?.type === '!'))) {
+            ret.push({});
+        }
+        return ret;
+    }
+    isStart() {
+        if (this.#root === this)
+            return true;
+        // if (this.type) return !!this.#parent?.isStart()
+        if (!this.#parent?.isStart())
+            return false;
+        if (this.#parentIndex === 0)
+            return true;
+        // if everything AHEAD of this is a negation, then it's still the "start"
+        const p = this.#parent;
+        for (let i = 0; i < this.#parentIndex; i++) {
+            const pp = p.#parts[i];
+            if (!(pp instanceof AST && pp.type === '!')) {
+                return false;
+            }
+        }
+        return true;
+    }
+    isEnd() {
+        if (this.#root === this)
+            return true;
+        if (this.#parent?.type === '!')
+            return true;
+        if (!this.#parent?.isEnd())
+            return false;
+        if (!this.type)
+            return this.#parent?.isEnd();
+        // if not root, it'll always have a parent
+        /* c8 ignore start */
+        const pl = this.#parent ? this.#parent.#parts.length : 0;
+        /* c8 ignore stop */
+        return this.#parentIndex === pl - 1;
+    }
+    copyIn(part) {
+        if (typeof part === 'string')
+            this.push(part);
+        else
+            this.push(part.clone(this));
+    }
+    clone(parent) {
+        const c = new AST(this.type, parent);
+        for (const p of this.#parts) {
+            c.copyIn(p);
+        }
+        return c;
+    }
+    static #parseAST(str, ast, pos, opt) {
+        let escaping = false;
+        let inBrace = false;
+        let braceStart = -1;
+        let braceNeg = false;
+        if (ast.type === null) {
+            // outside of a extglob, append until we find a start
+            let i = pos;
+            let acc = '';
+            while (i < str.length) {
+                const c = str.charAt(i++);
+                // still accumulate escapes at this point, but we do ignore
+                // starts that are escaped
+                if (escaping || c === '\\') {
+                    escaping = !escaping;
+                    acc += c;
+                    continue;
+                }
+                if (inBrace) {
+                    if (i === braceStart + 1) {
+                        if (c === '^' || c === '!') {
+                            braceNeg = true;
+                        }
+                    }
+                    else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
+                        inBrace = false;
+                    }
+                    acc += c;
+                    continue;
+                }
+                else if (c === '[') {
+                    inBrace = true;
+                    braceStart = i;
+                    braceNeg = false;
+                    acc += c;
+                    continue;
+                }
+                if (!opt.noext && isExtglobType(c) && str.charAt(i) === '(') {
+                    ast.push(acc);
+                    acc = '';
+                    const ext = new AST(c, ast);
+                    i = AST.#parseAST(str, ext, i, opt);
+                    ast.push(ext);
+                    continue;
+                }
+                acc += c;
+            }
+            ast.push(acc);
+            return i;
+        }
+        // some kind of extglob, pos is at the (
+        // find the next | or )
+        let i = pos + 1;
+        let part = new AST(null, ast);
+        const parts = [];
+        let acc = '';
+        while (i < str.length) {
+            const c = str.charAt(i++);
+            // still accumulate escapes at this point, but we do ignore
+            // starts that are escaped
+            if (escaping || c === '\\') {
+                escaping = !escaping;
+                acc += c;
+                continue;
+            }
+            if (inBrace) {
+                if (i === braceStart + 1) {
+                    if (c === '^' || c === '!') {
+                        braceNeg = true;
+                    }
+                }
+                else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
+                    inBrace = false;
+                }
+                acc += c;
+                continue;
+            }
+            else if (c === '[') {
+                inBrace = true;
+                braceStart = i;
+                braceNeg = false;
+                acc += c;
+                continue;
+            }
+            if (isExtglobType(c) && str.charAt(i) === '(') {
+                part.push(acc);
+                acc = '';
+                const ext = new AST(c, part);
+                part.push(ext);
+                i = AST.#parseAST(str, ext, i, opt);
+                continue;
+            }
+            if (c === '|') {
+                part.push(acc);
+                acc = '';
+                parts.push(part);
+                part = new AST(null, ast);
+                continue;
+            }
+            if (c === ')') {
+                if (acc === '' && ast.#parts.length === 0) {
+                    ast.#emptyExt = true;
+                }
+                part.push(acc);
+                acc = '';
+                ast.push(...parts, part);
+                return i;
+            }
+            acc += c;
+        }
+        // unfinished extglob
+        // if we got here, it was a malformed extglob! not an extglob, but
+        // maybe something else in there.
+        ast.type = null;
+        ast.#hasMagic = undefined;
+        ast.#parts = [str.substring(pos - 1)];
+        return i;
+    }
+    static fromGlob(pattern, options = {}) {
+        const ast = new AST(null, undefined, options);
+        AST.#parseAST(pattern, ast, 0, options);
+        return ast;
+    }
+    // returns the regular expression if there's magic, or the unescaped
+    // string if not.
+    toMMPattern() {
+        // should only be called on root
+        /* c8 ignore start */
+        if (this !== this.#root)
+            return this.#root.toMMPattern();
+        /* c8 ignore stop */
+        const glob = this.toString();
+        const [re, body, hasMagic, uflag] = this.toRegExpSource();
+        // if we're in nocase mode, and not nocaseMagicOnly, then we do
+        // still need a regular expression if we have to case-insensitively
+        // match capital/lowercase characters.
+        const anyMagic = hasMagic ||
+            this.#hasMagic ||
+            (this.#options.nocase &&
+                !this.#options.nocaseMagicOnly &&
+                glob.toUpperCase() !== glob.toLowerCase());
+        if (!anyMagic) {
+            return body;
+        }
+        const flags = (this.#options.nocase ? 'i' : '') + (uflag ? 'u' : '');
+        return Object.assign(new RegExp(`^${re}$`, flags), {
+            _src: re,
+            _glob: glob,
+        });
+    }
+    get options() {
+        return this.#options;
+    }
+    // returns the string match, the regexp source, whether there's magic
+    // in the regexp (so a regular expression is required) and whether or
+    // not the uflag is needed for the regular expression (for posix classes)
+    // TODO: instead of injecting the start/end at this point, just return
+    // the BODY of the regexp, along with the start/end portions suitable
+    // for binding the start/end in either a joined full-path makeRe context
+    // (where we bind to (^|/), or a standalone matchPart context (where
+    // we bind to ^, and not /).  Otherwise slashes get duped!
+    //
+    // In part-matching mode, the start is:
+    // - if not isStart: nothing
+    // - if traversal possible, but not allowed: ^(?!\.\.?$)
+    // - if dots allowed or not possible: ^
+    // - if dots possible and not allowed: ^(?!\.)
+    // end is:
+    // - if not isEnd(): nothing
+    // - else: $
+    //
+    // In full-path matching mode, we put the slash at the START of the
+    // pattern, so start is:
+    // - if first pattern: same as part-matching mode
+    // - if not isStart(): nothing
+    // - if traversal possible, but not allowed: /(?!\.\.?(?:$|/))
+    // - if dots allowed or not possible: /
+    // - if dots possible and not allowed: /(?!\.)
+    // end is:
+    // - if last pattern, same as part-matching mode
+    // - else nothing
+    //
+    // Always put the (?:$|/) on negated tails, though, because that has to be
+    // there to bind the end of the negated pattern portion, and it's easier to
+    // just stick it in now rather than try to inject it later in the middle of
+    // the pattern.
+    //
+    // We can just always return the same end, and leave it up to the caller
+    // to know whether it's going to be used joined or in parts.
+    // And, if the start is adjusted slightly, can do the same there:
+    // - if not isStart: nothing
+    // - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$)
+    // - if dots allowed or not possible: (?:/|^)
+    // - if dots possible and not allowed: (?:/|^)(?!\.)
+    //
+    // But it's better to have a simpler binding without a conditional, for
+    // performance, so probably better to return both start options.
+    //
+    // Then the caller just ignores the end if it's not the first pattern,
+    // and the start always gets applied.
+    //
+    // But that's always going to be $ if it's the ending pattern, or nothing,
+    // so the caller can just attach $ at the end of the pattern when building.
+    //
+    // So the todo is:
+    // - better detect what kind of start is needed
+    // - return both flavors of starting pattern
+    // - attach $ at the end of the pattern when creating the actual RegExp
+    //
+    // Ah, but wait, no, that all only applies to the root when the first pattern
+    // is not an extglob. If the first pattern IS an extglob, then we need all
+    // that dot prevention biz to live in the extglob portions, because eg
+    // +(*|.x*) can match .xy but not .yx.
+    //
+    // So, return the two flavors if it's #root and the first child is not an
+    // AST, otherwise leave it to the child AST to handle it, and there,
+    // use the (?:^|/) style of start binding.
+    //
+    // Even simplified further:
+    // - Since the start for a join is eg /(?!\.) and the start for a part
+    // is ^(?!\.), we can just prepend (?!\.) to the pattern (either root
+    // or start or whatever) and prepend ^ or / at the Regexp construction.
+    toRegExpSource(allowDot) {
+        const dot = allowDot ?? !!this.#options.dot;
+        if (this.#root === this)
+            this.#fillNegs();
+        if (!this.type) {
+            const noEmpty = this.isStart() && this.isEnd();
+            const src = this.#parts
+                .map(p => {
+                const [re, _, hasMagic, uflag] = typeof p === 'string'
+                    ? AST.#parseGlob(p, this.#hasMagic, noEmpty)
+                    : p.toRegExpSource(allowDot);
+                this.#hasMagic = this.#hasMagic || hasMagic;
+                this.#uflag = this.#uflag || uflag;
+                return re;
+            })
+                .join('');
+            let start = '';
+            if (this.isStart()) {
+                if (typeof this.#parts[0] === 'string') {
+                    // this is the string that will match the start of the pattern,
+                    // so we need to protect against dots and such.
+                    // '.' and '..' cannot match unless the pattern is that exactly,
+                    // even if it starts with . or dot:true is set.
+                    const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]);
+                    if (!dotTravAllowed) {
+                        const aps = addPatternStart;
+                        // check if we have a possibility of matching . or ..,
+                        // and prevent that.
+                        const needNoTrav = 
+                        // dots are allowed, and the pattern starts with [ or .
+                        (dot && aps.has(src.charAt(0))) ||
+                            // the pattern starts with \., and then [ or .
+                            (src.startsWith('\\.') && aps.has(src.charAt(2))) ||
+                            // the pattern starts with \.\., and then [ or .
+                            (src.startsWith('\\.\\.') && aps.has(src.charAt(4)));
+                        // no need to prevent dots if it can't match a dot, or if a
+                        // sub-pattern will be preventing it anyway.
+                        const needNoDot = !dot && !allowDot && aps.has(src.charAt(0));
+                        start = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : '';
+                    }
+                }
+            }
+            // append the "end of path portion" pattern to negation tails
+            let end = '';
+            if (this.isEnd() &&
+                this.#root.#filledNegs &&
+                this.#parent?.type === '!') {
+                end = '(?:$|\\/)';
+            }
+            const final = start + src + end;
+            return [
+                final,
+                unescape(src),
+                (this.#hasMagic = !!this.#hasMagic),
+                this.#uflag,
+            ];
+        }
+        // We need to calculate the body *twice* if it's a repeat pattern
+        // at the start, once in nodot mode, then again in dot mode, so a
+        // pattern like *(?) can match 'x.y'
+        const repeated = this.type === '*' || this.type === '+';
+        // some kind of extglob
+        const start = this.type === '!' ? '(?:(?!(?:' : '(?:';
+        let body = this.#partsToRegExp(dot);
+        if (this.isStart() && this.isEnd() && !body && this.type !== '!') {
+            // invalid extglob, has to at least be *something* present, if it's
+            // the entire path portion.
+            const s = this.toString();
+            this.#parts = [s];
+            this.type = null;
+            this.#hasMagic = undefined;
+            return [s, unescape(this.toString()), false, false];
+        }
+        // XXX abstract out this map method
+        let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot
+            ? ''
+            : this.#partsToRegExp(true);
+        if (bodyDotAllowed === body) {
+            bodyDotAllowed = '';
+        }
+        if (bodyDotAllowed) {
+            body = `(?:${body})(?:${bodyDotAllowed})*?`;
+        }
+        // an empty !() is exactly equivalent to a starNoEmpty
+        let final = '';
+        if (this.type === '!' && this.#emptyExt) {
+            final = (this.isStart() && !dot ? startNoDot : '') + starNoEmpty;
+        }
+        else {
+            const close = this.type === '!'
+                ? // !() must match something,but !(x) can match ''
+                    '))' +
+                        (this.isStart() && !dot && !allowDot ? startNoDot : '') +
+                        star +
+                        ')'
+                : this.type === '@'
+                    ? ')'
+                    : this.type === '?'
+                        ? ')?'
+                        : this.type === '+' && bodyDotAllowed
+                            ? ')'
+                            : this.type === '*' && bodyDotAllowed
+                                ? `)?`
+                                : `)${this.type}`;
+            final = start + body + close;
+        }
+        return [
+            final,
+            unescape(body),
+            (this.#hasMagic = !!this.#hasMagic),
+            this.#uflag,
+        ];
+    }
+    #partsToRegExp(dot) {
+        return this.#parts
+            .map(p => {
+            // extglob ASTs should only contain parent ASTs
+            /* c8 ignore start */
+            if (typeof p === 'string') {
+                throw new Error('string type in extglob ast??');
+            }
+            /* c8 ignore stop */
+            // can ignore hasMagic, because extglobs are already always magic
+            const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot);
+            this.#uflag = this.#uflag || uflag;
+            return re;
+        })
+            .filter(p => !(this.isStart() && this.isEnd()) || !!p)
+            .join('|');
+    }
+    static #parseGlob(glob, hasMagic, noEmpty = false) {
+        let escaping = false;
+        let re = '';
+        let uflag = false;
+        for (let i = 0; i < glob.length; i++) {
+            const c = glob.charAt(i);
+            if (escaping) {
+                escaping = false;
+                re += (reSpecials.has(c) ? '\\' : '') + c;
+                continue;
+            }
+            if (c === '\\') {
+                if (i === glob.length - 1) {
+                    re += '\\\\';
+                }
+                else {
+                    escaping = true;
+                }
+                continue;
+            }
+            if (c === '[') {
+                const [src, needUflag, consumed, magic] = parseClass(glob, i);
+                if (consumed) {
+                    re += src;
+                    uflag = uflag || needUflag;
+                    i += consumed - 1;
+                    hasMagic = hasMagic || magic;
+                    continue;
+                }
+            }
+            if (c === '*') {
+                if (noEmpty && glob === '*')
+                    re += starNoEmpty;
+                else
+                    re += star;
+                hasMagic = true;
+                continue;
+            }
+            if (c === '?') {
+                re += qmark;
+                hasMagic = true;
+                continue;
+            }
+            re += regExpEscape(c);
+        }
+        return [re, unescape(glob), !!hasMagic, uflag];
+    }
+}
+//# sourceMappingURL=ast.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/brace-expressions.js b/node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/brace-expressions.js
new file mode 100644
index 0000000000000..c629d6ae816e2
--- /dev/null
+++ b/node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/brace-expressions.js
@@ -0,0 +1,148 @@
+// translate the various posix character classes into unicode properties
+// this works across all unicode locales
+// { : [, /u flag required, negated]
+const posixClasses = {
+    '[:alnum:]': ['\\p{L}\\p{Nl}\\p{Nd}', true],
+    '[:alpha:]': ['\\p{L}\\p{Nl}', true],
+    '[:ascii:]': ['\\x' + '00-\\x' + '7f', false],
+    '[:blank:]': ['\\p{Zs}\\t', true],
+    '[:cntrl:]': ['\\p{Cc}', true],
+    '[:digit:]': ['\\p{Nd}', true],
+    '[:graph:]': ['\\p{Z}\\p{C}', true, true],
+    '[:lower:]': ['\\p{Ll}', true],
+    '[:print:]': ['\\p{C}', true],
+    '[:punct:]': ['\\p{P}', true],
+    '[:space:]': ['\\p{Z}\\t\\r\\n\\v\\f', true],
+    '[:upper:]': ['\\p{Lu}', true],
+    '[:word:]': ['\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}', true],
+    '[:xdigit:]': ['A-Fa-f0-9', false],
+};
+// only need to escape a few things inside of brace expressions
+// escapes: [ \ ] -
+const braceEscape = (s) => s.replace(/[[\]\\-]/g, '\\$&');
+// escape all regexp magic characters
+const regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+// everything has already been escaped, we just have to join
+const rangesToString = (ranges) => ranges.join('');
+// takes a glob string at a posix brace expression, and returns
+// an equivalent regular expression source, and boolean indicating
+// whether the /u flag needs to be applied, and the number of chars
+// consumed to parse the character class.
+// This also removes out of order ranges, and returns ($.) if the
+// entire class just no good.
+export const parseClass = (glob, position) => {
+    const pos = position;
+    /* c8 ignore start */
+    if (glob.charAt(pos) !== '[') {
+        throw new Error('not in a brace expression');
+    }
+    /* c8 ignore stop */
+    const ranges = [];
+    const negs = [];
+    let i = pos + 1;
+    let sawStart = false;
+    let uflag = false;
+    let escaping = false;
+    let negate = false;
+    let endPos = pos;
+    let rangeStart = '';
+    WHILE: while (i < glob.length) {
+        const c = glob.charAt(i);
+        if ((c === '!' || c === '^') && i === pos + 1) {
+            negate = true;
+            i++;
+            continue;
+        }
+        if (c === ']' && sawStart && !escaping) {
+            endPos = i + 1;
+            break;
+        }
+        sawStart = true;
+        if (c === '\\') {
+            if (!escaping) {
+                escaping = true;
+                i++;
+                continue;
+            }
+            // escaped \ char, fall through and treat like normal char
+        }
+        if (c === '[' && !escaping) {
+            // either a posix class, a collation equivalent, or just a [
+            for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) {
+                if (glob.startsWith(cls, i)) {
+                    // invalid, [a-[] is fine, but not [a-[:alpha]]
+                    if (rangeStart) {
+                        return ['$.', false, glob.length - pos, true];
+                    }
+                    i += cls.length;
+                    if (neg)
+                        negs.push(unip);
+                    else
+                        ranges.push(unip);
+                    uflag = uflag || u;
+                    continue WHILE;
+                }
+            }
+        }
+        // now it's just a normal character, effectively
+        escaping = false;
+        if (rangeStart) {
+            // throw this range away if it's not valid, but others
+            // can still match.
+            if (c > rangeStart) {
+                ranges.push(braceEscape(rangeStart) + '-' + braceEscape(c));
+            }
+            else if (c === rangeStart) {
+                ranges.push(braceEscape(c));
+            }
+            rangeStart = '';
+            i++;
+            continue;
+        }
+        // now might be the start of a range.
+        // can be either c-d or c-] or c] or c] at this point
+        if (glob.startsWith('-]', i + 1)) {
+            ranges.push(braceEscape(c + '-'));
+            i += 2;
+            continue;
+        }
+        if (glob.startsWith('-', i + 1)) {
+            rangeStart = c;
+            i += 2;
+            continue;
+        }
+        // not the start of a range, just a single character
+        ranges.push(braceEscape(c));
+        i++;
+    }
+    if (endPos < i) {
+        // didn't see the end of the class, not a valid class,
+        // but might still be valid as a literal match.
+        return ['', false, 0, false];
+    }
+    // if we got no ranges and no negates, then we have a range that
+    // cannot possibly match anything, and that poisons the whole glob
+    if (!ranges.length && !negs.length) {
+        return ['$.', false, glob.length - pos, true];
+    }
+    // if we got one positive range, and it's a single character, then that's
+    // not actually a magic pattern, it's just that one literal character.
+    // we should not treat that as "magic", we should just return the literal
+    // character. [_] is a perfectly valid way to escape glob magic chars.
+    if (negs.length === 0 &&
+        ranges.length === 1 &&
+        /^\\?.$/.test(ranges[0]) &&
+        !negate) {
+        const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0];
+        return [regexpEscape(r), false, endPos - pos, false];
+    }
+    const sranges = '[' + (negate ? '^' : '') + rangesToString(ranges) + ']';
+    const snegs = '[' + (negate ? '' : '^') + rangesToString(negs) + ']';
+    const comb = ranges.length && negs.length
+        ? '(' + sranges + '|' + snegs + ')'
+        : ranges.length
+            ? sranges
+            : snegs;
+    return [comb, uflag, endPos - pos, true];
+};
+//# sourceMappingURL=brace-expressions.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/escape.js b/node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/escape.js
new file mode 100644
index 0000000000000..16f7c8c7bdc64
--- /dev/null
+++ b/node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/escape.js
@@ -0,0 +1,18 @@
+/**
+ * Escape all magic characters in a glob pattern.
+ *
+ * If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape}
+ * option is used, then characters are escaped by wrapping in `[]`, because
+ * a magic character wrapped in a character class can only be satisfied by
+ * that exact character.  In this mode, `\` is _not_ escaped, because it is
+ * not interpreted as a magic character, but instead as a path separator.
+ */
+export const escape = (s, { windowsPathsNoEscape = false, } = {}) => {
+    // don't need to escape +@! because we escape the parens
+    // that make those magic, and escaping ! as [!] isn't valid,
+    // because [!]] is a valid glob class meaning not ']'.
+    return windowsPathsNoEscape
+        ? s.replace(/[?*()[\]]/g, '[$&]')
+        : s.replace(/[?*()[\]\\]/g, '\\$&');
+};
+//# sourceMappingURL=escape.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/index.js b/node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/index.js
new file mode 100644
index 0000000000000..790d6c02a2f22
--- /dev/null
+++ b/node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/index.js
@@ -0,0 +1,1001 @@
+import { expand } from '@isaacs/brace-expansion';
+import { assertValidPattern } from './assert-valid-pattern.js';
+import { AST } from './ast.js';
+import { escape } from './escape.js';
+import { unescape } from './unescape.js';
+export const minimatch = (p, pattern, options = {}) => {
+    assertValidPattern(pattern);
+    // shortcut: comments match nothing.
+    if (!options.nocomment && pattern.charAt(0) === '#') {
+        return false;
+    }
+    return new Minimatch(pattern, options).match(p);
+};
+// Optimized checking for the most common glob patterns.
+const starDotExtRE = /^\*+([^+@!?\*\[\(]*)$/;
+const starDotExtTest = (ext) => (f) => !f.startsWith('.') && f.endsWith(ext);
+const starDotExtTestDot = (ext) => (f) => f.endsWith(ext);
+const starDotExtTestNocase = (ext) => {
+    ext = ext.toLowerCase();
+    return (f) => !f.startsWith('.') && f.toLowerCase().endsWith(ext);
+};
+const starDotExtTestNocaseDot = (ext) => {
+    ext = ext.toLowerCase();
+    return (f) => f.toLowerCase().endsWith(ext);
+};
+const starDotStarRE = /^\*+\.\*+$/;
+const starDotStarTest = (f) => !f.startsWith('.') && f.includes('.');
+const starDotStarTestDot = (f) => f !== '.' && f !== '..' && f.includes('.');
+const dotStarRE = /^\.\*+$/;
+const dotStarTest = (f) => f !== '.' && f !== '..' && f.startsWith('.');
+const starRE = /^\*+$/;
+const starTest = (f) => f.length !== 0 && !f.startsWith('.');
+const starTestDot = (f) => f.length !== 0 && f !== '.' && f !== '..';
+const qmarksRE = /^\?+([^+@!?\*\[\(]*)?$/;
+const qmarksTestNocase = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExt([$0]);
+    if (!ext)
+        return noext;
+    ext = ext.toLowerCase();
+    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
+};
+const qmarksTestNocaseDot = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExtDot([$0]);
+    if (!ext)
+        return noext;
+    ext = ext.toLowerCase();
+    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
+};
+const qmarksTestDot = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExtDot([$0]);
+    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
+};
+const qmarksTest = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExt([$0]);
+    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
+};
+const qmarksTestNoExt = ([$0]) => {
+    const len = $0.length;
+    return (f) => f.length === len && !f.startsWith('.');
+};
+const qmarksTestNoExtDot = ([$0]) => {
+    const len = $0.length;
+    return (f) => f.length === len && f !== '.' && f !== '..';
+};
+/* c8 ignore start */
+const defaultPlatform = (typeof process === 'object' && process
+    ? (typeof process.env === 'object' &&
+        process.env &&
+        process.env.__MINIMATCH_TESTING_PLATFORM__) ||
+        process.platform
+    : 'posix');
+const path = {
+    win32: { sep: '\\' },
+    posix: { sep: '/' },
+};
+/* c8 ignore stop */
+export const sep = defaultPlatform === 'win32' ? path.win32.sep : path.posix.sep;
+minimatch.sep = sep;
+export const GLOBSTAR = Symbol('globstar **');
+minimatch.GLOBSTAR = GLOBSTAR;
+// any single thing other than /
+// don't need to escape / when using new RegExp()
+const qmark = '[^/]';
+// * => any number of characters
+const star = qmark + '*?';
+// ** when dots are allowed.  Anything goes, except .. and .
+// not (^ or / followed by one or two dots followed by $ or /),
+// followed by anything, any number of times.
+const twoStarDot = '(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?';
+// not a ^ or / followed by a dot,
+// followed by anything, any number of times.
+const twoStarNoDot = '(?:(?!(?:\\/|^)\\.).)*?';
+export const filter = (pattern, options = {}) => (p) => minimatch(p, pattern, options);
+minimatch.filter = filter;
+const ext = (a, b = {}) => Object.assign({}, a, b);
+export const defaults = (def) => {
+    if (!def || typeof def !== 'object' || !Object.keys(def).length) {
+        return minimatch;
+    }
+    const orig = minimatch;
+    const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options));
+    return Object.assign(m, {
+        Minimatch: class Minimatch extends orig.Minimatch {
+            constructor(pattern, options = {}) {
+                super(pattern, ext(def, options));
+            }
+            static defaults(options) {
+                return orig.defaults(ext(def, options)).Minimatch;
+            }
+        },
+        AST: class AST extends orig.AST {
+            /* c8 ignore start */
+            constructor(type, parent, options = {}) {
+                super(type, parent, ext(def, options));
+            }
+            /* c8 ignore stop */
+            static fromGlob(pattern, options = {}) {
+                return orig.AST.fromGlob(pattern, ext(def, options));
+            }
+        },
+        unescape: (s, options = {}) => orig.unescape(s, ext(def, options)),
+        escape: (s, options = {}) => orig.escape(s, ext(def, options)),
+        filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)),
+        defaults: (options) => orig.defaults(ext(def, options)),
+        makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)),
+        braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)),
+        match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)),
+        sep: orig.sep,
+        GLOBSTAR: GLOBSTAR,
+    });
+};
+minimatch.defaults = defaults;
+// Brace expansion:
+// a{b,c}d -> abd acd
+// a{b,}c -> abc ac
+// a{0..3}d -> a0d a1d a2d a3d
+// a{b,c{d,e}f}g -> abg acdfg acefg
+// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
+//
+// Invalid sets are not expanded.
+// a{2..}b -> a{2..}b
+// a{b}c -> a{b}c
+export const braceExpand = (pattern, options = {}) => {
+    assertValidPattern(pattern);
+    // Thanks to Yeting Li  for
+    // improving this regexp to avoid a ReDOS vulnerability.
+    if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
+        // shortcut. no need to expand.
+        return [pattern];
+    }
+    return expand(pattern);
+};
+minimatch.braceExpand = braceExpand;
+// parse a component of the expanded set.
+// At this point, no pattern may contain "/" in it
+// so we're going to return a 2d array, where each entry is the full
+// pattern, split on '/', and then turned into a regular expression.
+// A regexp is made at the end which joins each array with an
+// escaped /, and another full one which joins each regexp with |.
+//
+// Following the lead of Bash 4.1, note that "**" only has special meaning
+// when it is the *only* thing in a path portion.  Otherwise, any series
+// of * is equivalent to a single *.  Globstar behavior is enabled by
+// default, and can be disabled by setting options.noglobstar.
+export const makeRe = (pattern, options = {}) => new Minimatch(pattern, options).makeRe();
+minimatch.makeRe = makeRe;
+export const match = (list, pattern, options = {}) => {
+    const mm = new Minimatch(pattern, options);
+    list = list.filter(f => mm.match(f));
+    if (mm.options.nonull && !list.length) {
+        list.push(pattern);
+    }
+    return list;
+};
+minimatch.match = match;
+// replace stuff like \* with *
+const globMagic = /[?*]|[+@!]\(.*?\)|\[|\]/;
+const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+export class Minimatch {
+    options;
+    set;
+    pattern;
+    windowsPathsNoEscape;
+    nonegate;
+    negate;
+    comment;
+    empty;
+    preserveMultipleSlashes;
+    partial;
+    globSet;
+    globParts;
+    nocase;
+    isWindows;
+    platform;
+    windowsNoMagicRoot;
+    regexp;
+    constructor(pattern, options = {}) {
+        assertValidPattern(pattern);
+        options = options || {};
+        this.options = options;
+        this.pattern = pattern;
+        this.platform = options.platform || defaultPlatform;
+        this.isWindows = this.platform === 'win32';
+        this.windowsPathsNoEscape =
+            !!options.windowsPathsNoEscape || options.allowWindowsEscape === false;
+        if (this.windowsPathsNoEscape) {
+            this.pattern = this.pattern.replace(/\\/g, '/');
+        }
+        this.preserveMultipleSlashes = !!options.preserveMultipleSlashes;
+        this.regexp = null;
+        this.negate = false;
+        this.nonegate = !!options.nonegate;
+        this.comment = false;
+        this.empty = false;
+        this.partial = !!options.partial;
+        this.nocase = !!this.options.nocase;
+        this.windowsNoMagicRoot =
+            options.windowsNoMagicRoot !== undefined
+                ? options.windowsNoMagicRoot
+                : !!(this.isWindows && this.nocase);
+        this.globSet = [];
+        this.globParts = [];
+        this.set = [];
+        // make the set of regexps etc.
+        this.make();
+    }
+    hasMagic() {
+        if (this.options.magicalBraces && this.set.length > 1) {
+            return true;
+        }
+        for (const pattern of this.set) {
+            for (const part of pattern) {
+                if (typeof part !== 'string')
+                    return true;
+            }
+        }
+        return false;
+    }
+    debug(..._) { }
+    make() {
+        const pattern = this.pattern;
+        const options = this.options;
+        // empty patterns and comments match nothing.
+        if (!options.nocomment && pattern.charAt(0) === '#') {
+            this.comment = true;
+            return;
+        }
+        if (!pattern) {
+            this.empty = true;
+            return;
+        }
+        // step 1: figure out negation, etc.
+        this.parseNegate();
+        // step 2: expand braces
+        this.globSet = [...new Set(this.braceExpand())];
+        if (options.debug) {
+            this.debug = (...args) => console.error(...args);
+        }
+        this.debug(this.pattern, this.globSet);
+        // step 3: now we have a set, so turn each one into a series of
+        // path-portion matching patterns.
+        // These will be regexps, except in the case of "**", which is
+        // set to the GLOBSTAR object for globstar behavior,
+        // and will not contain any / characters
+        //
+        // First, we preprocess to make the glob pattern sets a bit simpler
+        // and deduped.  There are some perf-killing patterns that can cause
+        // problems with a glob walk, but we can simplify them down a bit.
+        const rawGlobParts = this.globSet.map(s => this.slashSplit(s));
+        this.globParts = this.preprocess(rawGlobParts);
+        this.debug(this.pattern, this.globParts);
+        // glob --> regexps
+        let set = this.globParts.map((s, _, __) => {
+            if (this.isWindows && this.windowsNoMagicRoot) {
+                // check if it's a drive or unc path.
+                const isUNC = s[0] === '' &&
+                    s[1] === '' &&
+                    (s[2] === '?' || !globMagic.test(s[2])) &&
+                    !globMagic.test(s[3]);
+                const isDrive = /^[a-z]:/i.test(s[0]);
+                if (isUNC) {
+                    return [...s.slice(0, 4), ...s.slice(4).map(ss => this.parse(ss))];
+                }
+                else if (isDrive) {
+                    return [s[0], ...s.slice(1).map(ss => this.parse(ss))];
+                }
+            }
+            return s.map(ss => this.parse(ss));
+        });
+        this.debug(this.pattern, set);
+        // filter out everything that didn't compile properly.
+        this.set = set.filter(s => s.indexOf(false) === -1);
+        // do not treat the ? in UNC paths as magic
+        if (this.isWindows) {
+            for (let i = 0; i < this.set.length; i++) {
+                const p = this.set[i];
+                if (p[0] === '' &&
+                    p[1] === '' &&
+                    this.globParts[i][2] === '?' &&
+                    typeof p[3] === 'string' &&
+                    /^[a-z]:$/i.test(p[3])) {
+                    p[2] = '?';
+                }
+            }
+        }
+        this.debug(this.pattern, this.set);
+    }
+    // various transforms to equivalent pattern sets that are
+    // faster to process in a filesystem walk.  The goal is to
+    // eliminate what we can, and push all ** patterns as far
+    // to the right as possible, even if it increases the number
+    // of patterns that we have to process.
+    preprocess(globParts) {
+        // if we're not in globstar mode, then turn all ** into *
+        if (this.options.noglobstar) {
+            for (let i = 0; i < globParts.length; i++) {
+                for (let j = 0; j < globParts[i].length; j++) {
+                    if (globParts[i][j] === '**') {
+                        globParts[i][j] = '*';
+                    }
+                }
+            }
+        }
+        const { optimizationLevel = 1 } = this.options;
+        if (optimizationLevel >= 2) {
+            // aggressive optimization for the purpose of fs walking
+            globParts = this.firstPhasePreProcess(globParts);
+            globParts = this.secondPhasePreProcess(globParts);
+        }
+        else if (optimizationLevel >= 1) {
+            // just basic optimizations to remove some .. parts
+            globParts = this.levelOneOptimize(globParts);
+        }
+        else {
+            // just collapse multiple ** portions into one
+            globParts = this.adjascentGlobstarOptimize(globParts);
+        }
+        return globParts;
+    }
+    // just get rid of adjascent ** portions
+    adjascentGlobstarOptimize(globParts) {
+        return globParts.map(parts => {
+            let gs = -1;
+            while (-1 !== (gs = parts.indexOf('**', gs + 1))) {
+                let i = gs;
+                while (parts[i + 1] === '**') {
+                    i++;
+                }
+                if (i !== gs) {
+                    parts.splice(gs, i - gs);
+                }
+            }
+            return parts;
+        });
+    }
+    // get rid of adjascent ** and resolve .. portions
+    levelOneOptimize(globParts) {
+        return globParts.map(parts => {
+            parts = parts.reduce((set, part) => {
+                const prev = set[set.length - 1];
+                if (part === '**' && prev === '**') {
+                    return set;
+                }
+                if (part === '..') {
+                    if (prev && prev !== '..' && prev !== '.' && prev !== '**') {
+                        set.pop();
+                        return set;
+                    }
+                }
+                set.push(part);
+                return set;
+            }, []);
+            return parts.length === 0 ? [''] : parts;
+        });
+    }
+    levelTwoFileOptimize(parts) {
+        if (!Array.isArray(parts)) {
+            parts = this.slashSplit(parts);
+        }
+        let didSomething = false;
+        do {
+            didSomething = false;
+            // 
// -> 
/
+            if (!this.preserveMultipleSlashes) {
+                for (let i = 1; i < parts.length - 1; i++) {
+                    const p = parts[i];
+                    // don't squeeze out UNC patterns
+                    if (i === 1 && p === '' && parts[0] === '')
+                        continue;
+                    if (p === '.' || p === '') {
+                        didSomething = true;
+                        parts.splice(i, 1);
+                        i--;
+                    }
+                }
+                if (parts[0] === '.' &&
+                    parts.length === 2 &&
+                    (parts[1] === '.' || parts[1] === '')) {
+                    didSomething = true;
+                    parts.pop();
+                }
+            }
+            // 
/

/../ ->

/
+            let dd = 0;
+            while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                const p = parts[dd - 1];
+                if (p && p !== '.' && p !== '..' && p !== '**') {
+                    didSomething = true;
+                    parts.splice(dd - 1, 2);
+                    dd -= 2;
+                }
+            }
+        } while (didSomething);
+        return parts.length === 0 ? [''] : parts;
+    }
+    // First phase: single-pattern processing
+    // 
 is 1 or more portions
+    //  is 1 or more portions
+    // 

is any portion other than ., .., '', or ** + // is . or '' + // + // **/.. is *brutal* for filesystem walking performance, because + // it effectively resets the recursive walk each time it occurs, + // and ** cannot be reduced out by a .. pattern part like a regexp + // or most strings (other than .., ., and '') can be. + // + //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + //

// -> 
/
+    // 
/

/../ ->

/
+    // **/**/ -> **/
+    //
+    // **/*/ -> */**/ <== not valid because ** doesn't follow
+    // this WOULD be allowed if ** did follow symlinks, or * didn't
+    firstPhasePreProcess(globParts) {
+        let didSomething = false;
+        do {
+            didSomething = false;
+            // 
/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + for (let parts of globParts) { + let gs = -1; + while (-1 !== (gs = parts.indexOf('**', gs + 1))) { + let gss = gs; + while (parts[gss + 1] === '**') { + //

/**/**/ -> 
/**/
+                        gss++;
+                    }
+                    // eg, if gs is 2 and gss is 4, that means we have 3 **
+                    // parts, and can remove 2 of them.
+                    if (gss > gs) {
+                        parts.splice(gs + 1, gss - gs);
+                    }
+                    let next = parts[gs + 1];
+                    const p = parts[gs + 2];
+                    const p2 = parts[gs + 3];
+                    if (next !== '..')
+                        continue;
+                    if (!p ||
+                        p === '.' ||
+                        p === '..' ||
+                        !p2 ||
+                        p2 === '.' ||
+                        p2 === '..') {
+                        continue;
+                    }
+                    didSomething = true;
+                    // edit parts in place, and push the new one
+                    parts.splice(gs, 1);
+                    const other = parts.slice(0);
+                    other[gs] = '**';
+                    globParts.push(other);
+                    gs--;
+                }
+                // 
// -> 
/
+                if (!this.preserveMultipleSlashes) {
+                    for (let i = 1; i < parts.length - 1; i++) {
+                        const p = parts[i];
+                        // don't squeeze out UNC patterns
+                        if (i === 1 && p === '' && parts[0] === '')
+                            continue;
+                        if (p === '.' || p === '') {
+                            didSomething = true;
+                            parts.splice(i, 1);
+                            i--;
+                        }
+                    }
+                    if (parts[0] === '.' &&
+                        parts.length === 2 &&
+                        (parts[1] === '.' || parts[1] === '')) {
+                        didSomething = true;
+                        parts.pop();
+                    }
+                }
+                // 
/

/../ ->

/
+                let dd = 0;
+                while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                    const p = parts[dd - 1];
+                    if (p && p !== '.' && p !== '..' && p !== '**') {
+                        didSomething = true;
+                        const needDot = dd === 1 && parts[dd + 1] === '**';
+                        const splin = needDot ? ['.'] : [];
+                        parts.splice(dd - 1, 2, ...splin);
+                        if (parts.length === 0)
+                            parts.push('');
+                        dd -= 2;
+                    }
+                }
+            }
+        } while (didSomething);
+        return globParts;
+    }
+    // second phase: multi-pattern dedupes
+    // {
/*/,
/

/} ->

/*/
+    // {
/,
/} -> 
/
+    // {
/**/,
/} -> 
/**/
+    //
+    // {
/**/,
/**/

/} ->

/**/
+    // ^-- not valid because ** doens't follow symlinks
+    secondPhasePreProcess(globParts) {
+        for (let i = 0; i < globParts.length - 1; i++) {
+            for (let j = i + 1; j < globParts.length; j++) {
+                const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);
+                if (matched) {
+                    globParts[i] = [];
+                    globParts[j] = matched;
+                    break;
+                }
+            }
+        }
+        return globParts.filter(gs => gs.length);
+    }
+    partsMatch(a, b, emptyGSMatch = false) {
+        let ai = 0;
+        let bi = 0;
+        let result = [];
+        let which = '';
+        while (ai < a.length && bi < b.length) {
+            if (a[ai] === b[bi]) {
+                result.push(which === 'b' ? b[bi] : a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (emptyGSMatch && a[ai] === '**' && b[bi] === a[ai + 1]) {
+                result.push(a[ai]);
+                ai++;
+            }
+            else if (emptyGSMatch && b[bi] === '**' && a[ai] === b[bi + 1]) {
+                result.push(b[bi]);
+                bi++;
+            }
+            else if (a[ai] === '*' &&
+                b[bi] &&
+                (this.options.dot || !b[bi].startsWith('.')) &&
+                b[bi] !== '**') {
+                if (which === 'b')
+                    return false;
+                which = 'a';
+                result.push(a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (b[bi] === '*' &&
+                a[ai] &&
+                (this.options.dot || !a[ai].startsWith('.')) &&
+                a[ai] !== '**') {
+                if (which === 'a')
+                    return false;
+                which = 'b';
+                result.push(b[bi]);
+                ai++;
+                bi++;
+            }
+            else {
+                return false;
+            }
+        }
+        // if we fall out of the loop, it means they two are identical
+        // as long as their lengths match
+        return a.length === b.length && result;
+    }
+    parseNegate() {
+        if (this.nonegate)
+            return;
+        const pattern = this.pattern;
+        let negate = false;
+        let negateOffset = 0;
+        for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {
+            negate = !negate;
+            negateOffset++;
+        }
+        if (negateOffset)
+            this.pattern = pattern.slice(negateOffset);
+        this.negate = negate;
+    }
+    // set partial to true to test if, for example,
+    // "/a/b" matches the start of "/*/b/*/d"
+    // Partial means, if you run out of file before you run
+    // out of pattern, then that's fine, as long as all
+    // the parts match.
+    matchOne(file, pattern, partial = false) {
+        const options = this.options;
+        // UNC paths like //?/X:/... can match X:/... and vice versa
+        // Drive letters in absolute drive or unc paths are always compared
+        // case-insensitively.
+        if (this.isWindows) {
+            const fileDrive = typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0]);
+            const fileUNC = !fileDrive &&
+                file[0] === '' &&
+                file[1] === '' &&
+                file[2] === '?' &&
+                /^[a-z]:$/i.test(file[3]);
+            const patternDrive = typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0]);
+            const patternUNC = !patternDrive &&
+                pattern[0] === '' &&
+                pattern[1] === '' &&
+                pattern[2] === '?' &&
+                typeof pattern[3] === 'string' &&
+                /^[a-z]:$/i.test(pattern[3]);
+            const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined;
+            const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined;
+            if (typeof fdi === 'number' && typeof pdi === 'number') {
+                const [fd, pd] = [file[fdi], pattern[pdi]];
+                if (fd.toLowerCase() === pd.toLowerCase()) {
+                    pattern[pdi] = fd;
+                    if (pdi > fdi) {
+                        pattern = pattern.slice(pdi);
+                    }
+                    else if (fdi > pdi) {
+                        file = file.slice(fdi);
+                    }
+                }
+            }
+        }
+        // resolve and reduce . and .. portions in the file as well.
+        // dont' need to do the second phase, because it's only one string[]
+        const { optimizationLevel = 1 } = this.options;
+        if (optimizationLevel >= 2) {
+            file = this.levelTwoFileOptimize(file);
+        }
+        this.debug('matchOne', this, { file, pattern });
+        this.debug('matchOne', file.length, pattern.length);
+        for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
+            this.debug('matchOne loop');
+            var p = pattern[pi];
+            var f = file[fi];
+            this.debug(pattern, p, f);
+            // should be impossible.
+            // some invalid regexp stuff in the set.
+            /* c8 ignore start */
+            if (p === false) {
+                return false;
+            }
+            /* c8 ignore stop */
+            if (p === GLOBSTAR) {
+                this.debug('GLOBSTAR', [pattern, p, f]);
+                // "**"
+                // a/**/b/**/c would match the following:
+                // a/b/x/y/z/c
+                // a/x/y/z/b/c
+                // a/b/x/b/x/c
+                // a/b/c
+                // To do this, take the rest of the pattern after
+                // the **, and see if it would match the file remainder.
+                // If so, return success.
+                // If not, the ** "swallows" a segment, and try again.
+                // This is recursively awful.
+                //
+                // a/**/b/**/c matching a/b/x/y/z/c
+                // - a matches a
+                // - doublestar
+                //   - matchOne(b/x/y/z/c, b/**/c)
+                //     - b matches b
+                //     - doublestar
+                //       - matchOne(x/y/z/c, c) -> no
+                //       - matchOne(y/z/c, c) -> no
+                //       - matchOne(z/c, c) -> no
+                //       - matchOne(c, c) yes, hit
+                var fr = fi;
+                var pr = pi + 1;
+                if (pr === pl) {
+                    this.debug('** at the end');
+                    // a ** at the end will just swallow the rest.
+                    // We have found a match.
+                    // however, it will not swallow /.x, unless
+                    // options.dot is set.
+                    // . and .. are *never* matched by **, for explosively
+                    // exponential reasons.
+                    for (; fi < fl; fi++) {
+                        if (file[fi] === '.' ||
+                            file[fi] === '..' ||
+                            (!options.dot && file[fi].charAt(0) === '.'))
+                            return false;
+                    }
+                    return true;
+                }
+                // ok, let's see if we can swallow whatever we can.
+                while (fr < fl) {
+                    var swallowee = file[fr];
+                    this.debug('\nglobstar while', file, fr, pattern, pr, swallowee);
+                    // XXX remove this slice.  Just pass the start index.
+                    if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
+                        this.debug('globstar found match!', fr, fl, swallowee);
+                        // found a match.
+                        return true;
+                    }
+                    else {
+                        // can't swallow "." or ".." ever.
+                        // can only swallow ".foo" when explicitly asked.
+                        if (swallowee === '.' ||
+                            swallowee === '..' ||
+                            (!options.dot && swallowee.charAt(0) === '.')) {
+                            this.debug('dot detected!', file, fr, pattern, pr);
+                            break;
+                        }
+                        // ** swallows a segment, and continue.
+                        this.debug('globstar swallow a segment, and continue');
+                        fr++;
+                    }
+                }
+                // no match was found.
+                // However, in partial mode, we can't say this is necessarily over.
+                /* c8 ignore start */
+                if (partial) {
+                    // ran out of file
+                    this.debug('\n>>> no match, partial?', file, fr, pattern, pr);
+                    if (fr === fl) {
+                        return true;
+                    }
+                }
+                /* c8 ignore stop */
+                return false;
+            }
+            // something other than **
+            // non-magic patterns just have to match exactly
+            // patterns with magic have been turned into regexps.
+            let hit;
+            if (typeof p === 'string') {
+                hit = f === p;
+                this.debug('string match', p, f, hit);
+            }
+            else {
+                hit = p.test(f);
+                this.debug('pattern match', p, f, hit);
+            }
+            if (!hit)
+                return false;
+        }
+        // Note: ending in / means that we'll get a final ""
+        // at the end of the pattern.  This can only match a
+        // corresponding "" at the end of the file.
+        // If the file ends in /, then it can only match a
+        // a pattern that ends in /, unless the pattern just
+        // doesn't have any more for it. But, a/b/ should *not*
+        // match "a/b/*", even though "" matches against the
+        // [^/]*? pattern, except in partial mode, where it might
+        // simply not be reached yet.
+        // However, a/b/ should still satisfy a/*
+        // now either we fell off the end of the pattern, or we're done.
+        if (fi === fl && pi === pl) {
+            // ran out of pattern and filename at the same time.
+            // an exact hit!
+            return true;
+        }
+        else if (fi === fl) {
+            // ran out of file, but still had pattern left.
+            // this is ok if we're doing the match as part of
+            // a glob fs traversal.
+            return partial;
+        }
+        else if (pi === pl) {
+            // ran out of pattern, still have file left.
+            // this is only acceptable if we're on the very last
+            // empty segment of a file with a trailing slash.
+            // a/* should match a/b/
+            return fi === fl - 1 && file[fi] === '';
+            /* c8 ignore start */
+        }
+        else {
+            // should be unreachable.
+            throw new Error('wtf?');
+        }
+        /* c8 ignore stop */
+    }
+    braceExpand() {
+        return braceExpand(this.pattern, this.options);
+    }
+    parse(pattern) {
+        assertValidPattern(pattern);
+        const options = this.options;
+        // shortcuts
+        if (pattern === '**')
+            return GLOBSTAR;
+        if (pattern === '')
+            return '';
+        // far and away, the most common glob pattern parts are
+        // *, *.*, and *.  Add a fast check method for those.
+        let m;
+        let fastTest = null;
+        if ((m = pattern.match(starRE))) {
+            fastTest = options.dot ? starTestDot : starTest;
+        }
+        else if ((m = pattern.match(starDotExtRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? starDotExtTestNocaseDot
+                    : starDotExtTestNocase
+                : options.dot
+                    ? starDotExtTestDot
+                    : starDotExtTest)(m[1]);
+        }
+        else if ((m = pattern.match(qmarksRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? qmarksTestNocaseDot
+                    : qmarksTestNocase
+                : options.dot
+                    ? qmarksTestDot
+                    : qmarksTest)(m);
+        }
+        else if ((m = pattern.match(starDotStarRE))) {
+            fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
+        }
+        else if ((m = pattern.match(dotStarRE))) {
+            fastTest = dotStarTest;
+        }
+        const re = AST.fromGlob(pattern, this.options).toMMPattern();
+        if (fastTest && typeof re === 'object') {
+            // Avoids overriding in frozen environments
+            Reflect.defineProperty(re, 'test', { value: fastTest });
+        }
+        return re;
+    }
+    makeRe() {
+        if (this.regexp || this.regexp === false)
+            return this.regexp;
+        // at this point, this.set is a 2d array of partial
+        // pattern strings, or "**".
+        //
+        // It's better to use .match().  This function shouldn't
+        // be used, really, but it's pretty convenient sometimes,
+        // when you just want to work with a regex.
+        const set = this.set;
+        if (!set.length) {
+            this.regexp = false;
+            return this.regexp;
+        }
+        const options = this.options;
+        const twoStar = options.noglobstar
+            ? star
+            : options.dot
+                ? twoStarDot
+                : twoStarNoDot;
+        const flags = new Set(options.nocase ? ['i'] : []);
+        // regexpify non-globstar patterns
+        // if ** is only item, then we just do one twoStar
+        // if ** is first, and there are more, prepend (\/|twoStar\/)? to next
+        // if ** is last, append (\/twoStar|) to previous
+        // if ** is in the middle, append (\/|\/twoStar\/) to previous
+        // then filter out GLOBSTAR symbols
+        let re = set
+            .map(pattern => {
+            const pp = pattern.map(p => {
+                if (p instanceof RegExp) {
+                    for (const f of p.flags.split(''))
+                        flags.add(f);
+                }
+                return typeof p === 'string'
+                    ? regExpEscape(p)
+                    : p === GLOBSTAR
+                        ? GLOBSTAR
+                        : p._src;
+            });
+            pp.forEach((p, i) => {
+                const next = pp[i + 1];
+                const prev = pp[i - 1];
+                if (p !== GLOBSTAR || prev === GLOBSTAR) {
+                    return;
+                }
+                if (prev === undefined) {
+                    if (next !== undefined && next !== GLOBSTAR) {
+                        pp[i + 1] = '(?:\\/|' + twoStar + '\\/)?' + next;
+                    }
+                    else {
+                        pp[i] = twoStar;
+                    }
+                }
+                else if (next === undefined) {
+                    pp[i - 1] = prev + '(?:\\/|' + twoStar + ')?';
+                }
+                else if (next !== GLOBSTAR) {
+                    pp[i - 1] = prev + '(?:\\/|\\/' + twoStar + '\\/)' + next;
+                    pp[i + 1] = GLOBSTAR;
+                }
+            });
+            return pp.filter(p => p !== GLOBSTAR).join('/');
+        })
+            .join('|');
+        // need to wrap in parens if we had more than one thing with |,
+        // otherwise only the first will be anchored to ^ and the last to $
+        const [open, close] = set.length > 1 ? ['(?:', ')'] : ['', ''];
+        // must match entire pattern
+        // ending in a * or ** will make it less strict.
+        re = '^' + open + re + close + '$';
+        // can match anything, as long as it's not this.
+        if (this.negate)
+            re = '^(?!' + re + ').+$';
+        try {
+            this.regexp = new RegExp(re, [...flags].join(''));
+            /* c8 ignore start */
+        }
+        catch (ex) {
+            // should be impossible
+            this.regexp = false;
+        }
+        /* c8 ignore stop */
+        return this.regexp;
+    }
+    slashSplit(p) {
+        // if p starts with // on windows, we preserve that
+        // so that UNC paths aren't broken.  Otherwise, any number of
+        // / characters are coalesced into one, unless
+        // preserveMultipleSlashes is set to true.
+        if (this.preserveMultipleSlashes) {
+            return p.split('/');
+        }
+        else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
+            // add an extra '' for the one we lose
+            return ['', ...p.split(/\/+/)];
+        }
+        else {
+            return p.split(/\/+/);
+        }
+    }
+    match(f, partial = this.partial) {
+        this.debug('match', f, this.pattern);
+        // short-circuit in the case of busted things.
+        // comments, etc.
+        if (this.comment) {
+            return false;
+        }
+        if (this.empty) {
+            return f === '';
+        }
+        if (f === '/' && partial) {
+            return true;
+        }
+        const options = this.options;
+        // windows: need to use /, not \
+        if (this.isWindows) {
+            f = f.split('\\').join('/');
+        }
+        // treat the test path as a set of pathparts.
+        const ff = this.slashSplit(f);
+        this.debug(this.pattern, 'split', ff);
+        // just ONE of the pattern sets in this.set needs to match
+        // in order for it to be valid.  If negating, then just one
+        // match means that we have failed.
+        // Either way, return on the first hit.
+        const set = this.set;
+        this.debug(this.pattern, 'set', set);
+        // Find the basename of the path by looking for the last non-empty segment
+        let filename = ff[ff.length - 1];
+        if (!filename) {
+            for (let i = ff.length - 2; !filename && i >= 0; i--) {
+                filename = ff[i];
+            }
+        }
+        for (let i = 0; i < set.length; i++) {
+            const pattern = set[i];
+            let file = ff;
+            if (options.matchBase && pattern.length === 1) {
+                file = [filename];
+            }
+            const hit = this.matchOne(file, pattern, partial);
+            if (hit) {
+                if (options.flipNegate) {
+                    return true;
+                }
+                return !this.negate;
+            }
+        }
+        // didn't get any hits.  this is success if it's a negative
+        // pattern, failure otherwise.
+        if (options.flipNegate) {
+            return false;
+        }
+        return this.negate;
+    }
+    static defaults(def) {
+        return minimatch.defaults(def).Minimatch;
+    }
+}
+/* c8 ignore start */
+export { AST } from './ast.js';
+export { escape } from './escape.js';
+export { unescape } from './unescape.js';
+/* c8 ignore stop */
+minimatch.AST = AST;
+minimatch.Minimatch = Minimatch;
+minimatch.escape = escape;
+minimatch.unescape = unescape;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/package.json b/node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/package.json
new file mode 100644
index 0000000000000..3dbc1ca591c05
--- /dev/null
+++ b/node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/unescape.js b/node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/unescape.js
new file mode 100644
index 0000000000000..0faf9a2b7306f
--- /dev/null
+++ b/node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/unescape.js
@@ -0,0 +1,20 @@
+/**
+ * Un-escape a string that has been escaped with {@link escape}.
+ *
+ * If the {@link windowsPathsNoEscape} option is used, then square-brace
+ * escapes are removed, but not backslash escapes.  For example, it will turn
+ * the string `'[*]'` into `*`, but it will not turn `'\\*'` into `'*'`,
+ * becuase `\` is a path separator in `windowsPathsNoEscape` mode.
+ *
+ * When `windowsPathsNoEscape` is not set, then both brace escapes and
+ * backslash escapes are removed.
+ *
+ * Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot be escaped
+ * or unescaped.
+ */
+export const unescape = (s, { windowsPathsNoEscape = false, } = {}) => {
+    return windowsPathsNoEscape
+        ? s.replace(/\[([^\/\\])\]/g, '$1')
+        : s.replace(/((?!\\).|^)\[([^\/\\])\]/g, '$1$2').replace(/\\([^\/])/g, '$1');
+};
+//# sourceMappingURL=unescape.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/package-json/node_modules/minimatch/package.json b/node_modules/@npmcli/package-json/node_modules/minimatch/package.json
new file mode 100644
index 0000000000000..bfa2423f50b5e
--- /dev/null
+++ b/node_modules/@npmcli/package-json/node_modules/minimatch/package.json
@@ -0,0 +1,79 @@
+{
+  "author": "Isaac Z. Schlueter  (http://blog.izs.me)",
+  "name": "minimatch",
+  "description": "a glob matcher in javascript",
+  "version": "10.0.3",
+  "repository": {
+    "type": "git",
+    "url": "git://github.com/isaacs/minimatch.git"
+  },
+  "main": "./dist/commonjs/index.js",
+  "types": "./dist/commonjs/index.d.ts",
+  "exports": {
+    "./package.json": "./package.json",
+    ".": {
+      "import": {
+        "types": "./dist/esm/index.d.ts",
+        "default": "./dist/esm/index.js"
+      },
+      "require": {
+        "types": "./dist/commonjs/index.d.ts",
+        "default": "./dist/commonjs/index.js"
+      }
+    }
+  },
+  "files": [
+    "dist"
+  ],
+  "scripts": {
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "prepare": "tshy",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "test": "tap",
+    "snap": "tap",
+    "format": "prettier --write . --loglevel warn",
+    "benchmark": "node benchmark/index.js",
+    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
+  },
+  "prettier": {
+    "semi": false,
+    "printWidth": 80,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "engines": {
+    "node": "20 || >=22"
+  },
+  "devDependencies": {
+    "@types/brace-expansion": "^1.1.2",
+    "@types/node": "^24.0.0",
+    "mkdirp": "^3.0.1",
+    "prettier": "^3.3.2",
+    "tap": "^21.1.0",
+    "tshy": "^3.0.2",
+    "typedoc": "^0.28.5"
+  },
+  "funding": {
+    "url": "https://github.com/sponsors/isaacs"
+  },
+  "license": "ISC",
+  "tshy": {
+    "exports": {
+      "./package.json": "./package.json",
+      ".": "./src/index.ts"
+    }
+  },
+  "type": "module",
+  "module": "./dist/esm/index.js",
+  "dependencies": {
+    "@isaacs/brace-expansion": "^5.0.0"
+  }
+}
diff --git a/node_modules/@npmcli/package-json/node_modules/npm-package-arg/LICENSE b/node_modules/@npmcli/package-json/node_modules/npm-package-arg/LICENSE
new file mode 100644
index 0000000000000..19cec97b18468
--- /dev/null
+++ b/node_modules/@npmcli/package-json/node_modules/npm-package-arg/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/@npmcli/package-json/node_modules/npm-package-arg/lib/npa.js b/node_modules/@npmcli/package-json/node_modules/npm-package-arg/lib/npa.js
new file mode 100644
index 0000000000000..d409b7f1becfc
--- /dev/null
+++ b/node_modules/@npmcli/package-json/node_modules/npm-package-arg/lib/npa.js
@@ -0,0 +1,481 @@
+'use strict'
+
+const isWindows = process.platform === 'win32'
+
+const { URL } = require('node:url')
+// We need to use path/win32 so that we get consistent results in tests, but this also means we need to manually convert backslashes to forward slashes when generating file: urls with paths.
+const path = isWindows ? require('node:path/win32') : require('node:path')
+const { homedir } = require('node:os')
+const HostedGit = require('hosted-git-info')
+const semver = require('semver')
+const validatePackageName = require('validate-npm-package-name')
+const { log } = require('proc-log')
+
+const hasSlashes = isWindows ? /\\|[/]/ : /[/]/
+const isURL = /^(?:git[+])?[a-z]+:/i
+const isGit = /^[^@]+@[^:.]+\.[^:]+:.+$/i
+const isFileType = /[.](?:tgz|tar.gz|tar)$/i
+const isPortNumber = /:[0-9]+(\/|$)/i
+const isWindowsFile = /^(?:[.]|~[/]|[/\\]|[a-zA-Z]:)/
+const isPosixFile = /^(?:[.]|~[/]|[/]|[a-zA-Z]:)/
+const defaultRegistry = 'https://registry.npmjs.org'
+
+function npa (arg, where) {
+  let name
+  let spec
+  if (typeof arg === 'object') {
+    if (arg instanceof Result && (!where || where === arg.where)) {
+      return arg
+    } else if (arg.name && arg.rawSpec) {
+      return npa.resolve(arg.name, arg.rawSpec, where || arg.where)
+    } else {
+      return npa(arg.raw, where || arg.where)
+    }
+  }
+  const nameEndsAt = arg.indexOf('@', 1) // Skip possible leading @
+  const namePart = nameEndsAt > 0 ? arg.slice(0, nameEndsAt) : arg
+  if (isURL.test(arg)) {
+    spec = arg
+  } else if (isGit.test(arg)) {
+    spec = `git+ssh://${arg}`
+  // eslint-disable-next-line max-len
+  } else if (!namePart.startsWith('@') && (hasSlashes.test(namePart) || isFileType.test(namePart))) {
+    spec = arg
+  } else if (nameEndsAt > 0) {
+    name = namePart
+    spec = arg.slice(nameEndsAt + 1) || '*'
+  } else {
+    const valid = validatePackageName(arg)
+    if (valid.validForOldPackages) {
+      name = arg
+      spec = '*'
+    } else {
+      spec = arg
+    }
+  }
+  return resolve(name, spec, where, arg)
+}
+
+function isFileSpec (spec) {
+  if (!spec) {
+    return false
+  }
+  if (spec.toLowerCase().startsWith('file:')) {
+    return true
+  }
+  if (isWindows) {
+    return isWindowsFile.test(spec)
+  }
+  // We never hit this in windows tests, obviously
+  /* istanbul ignore next */
+  return isPosixFile.test(spec)
+}
+
+function isAliasSpec (spec) {
+  if (!spec) {
+    return false
+  }
+  return spec.toLowerCase().startsWith('npm:')
+}
+
+function resolve (name, spec, where, arg) {
+  const res = new Result({
+    raw: arg,
+    name: name,
+    rawSpec: spec,
+    fromArgument: arg != null,
+  })
+
+  if (name) {
+    res.name = name
+  }
+
+  if (!where) {
+    where = process.cwd()
+  }
+
+  if (isFileSpec(spec)) {
+    return fromFile(res, where)
+  } else if (isAliasSpec(spec)) {
+    return fromAlias(res, where)
+  }
+
+  const hosted = HostedGit.fromUrl(spec, {
+    noGitPlus: true,
+    noCommittish: true,
+  })
+  if (hosted) {
+    return fromHostedGit(res, hosted)
+  } else if (spec && isURL.test(spec)) {
+    return fromURL(res)
+  } else if (spec && (hasSlashes.test(spec) || isFileType.test(spec))) {
+    return fromFile(res, where)
+  } else {
+    return fromRegistry(res)
+  }
+}
+
+function toPurl (arg, reg = defaultRegistry) {
+  const res = npa(arg)
+
+  if (res.type !== 'version') {
+    throw invalidPurlType(res.type, res.raw)
+  }
+
+  // URI-encode leading @ of scoped packages
+  let purl = 'pkg:npm/' + res.name.replace(/^@/, '%40') + '@' + res.rawSpec
+  if (reg !== defaultRegistry) {
+    purl += '?repository_url=' + reg
+  }
+
+  return purl
+}
+
+function invalidPackageName (name, valid, raw) {
+  // eslint-disable-next-line max-len
+  const err = new Error(`Invalid package name "${name}" of package "${raw}": ${valid.errors.join('; ')}.`)
+  err.code = 'EINVALIDPACKAGENAME'
+  return err
+}
+
+function invalidTagName (name, raw) {
+  // eslint-disable-next-line max-len
+  const err = new Error(`Invalid tag name "${name}" of package "${raw}": Tags may not have any characters that encodeURIComponent encodes.`)
+  err.code = 'EINVALIDTAGNAME'
+  return err
+}
+
+function invalidPurlType (type, raw) {
+  // eslint-disable-next-line max-len
+  const err = new Error(`Invalid type "${type}" of package "${raw}": Purl can only be generated for "version" types.`)
+  err.code = 'EINVALIDPURLTYPE'
+  return err
+}
+
+class Result {
+  constructor (opts) {
+    this.type = opts.type
+    this.registry = opts.registry
+    this.where = opts.where
+    if (opts.raw == null) {
+      this.raw = opts.name ? `${opts.name}@${opts.rawSpec}` : opts.rawSpec
+    } else {
+      this.raw = opts.raw
+    }
+    this.name = undefined
+    this.escapedName = undefined
+    this.scope = undefined
+    this.rawSpec = opts.rawSpec || ''
+    this.saveSpec = opts.saveSpec
+    this.fetchSpec = opts.fetchSpec
+    if (opts.name) {
+      this.setName(opts.name)
+    }
+    this.gitRange = opts.gitRange
+    this.gitCommittish = opts.gitCommittish
+    this.gitSubdir = opts.gitSubdir
+    this.hosted = opts.hosted
+  }
+
+  // TODO move this to a getter/setter in a semver major
+  setName (name) {
+    const valid = validatePackageName(name)
+    if (!valid.validForOldPackages) {
+      throw invalidPackageName(name, valid, this.raw)
+    }
+
+    this.name = name
+    this.scope = name[0] === '@' ? name.slice(0, name.indexOf('/')) : undefined
+    // scoped packages in couch must have slash url-encoded, e.g. @foo%2Fbar
+    this.escapedName = name.replace('/', '%2f')
+    return this
+  }
+
+  toString () {
+    const full = []
+    if (this.name != null && this.name !== '') {
+      full.push(this.name)
+    }
+    const spec = this.saveSpec || this.fetchSpec || this.rawSpec
+    if (spec != null && spec !== '') {
+      full.push(spec)
+    }
+    return full.length ? full.join('@') : this.raw
+  }
+
+  toJSON () {
+    const result = Object.assign({}, this)
+    delete result.hosted
+    return result
+  }
+}
+
+// sets res.gitCommittish, res.gitRange, and res.gitSubdir
+function setGitAttrs (res, committish) {
+  if (!committish) {
+    res.gitCommittish = null
+    return
+  }
+
+  // for each :: separated item:
+  for (const part of committish.split('::')) {
+    // if the item has no : the n it is a commit-ish
+    if (!part.includes(':')) {
+      if (res.gitRange) {
+        throw new Error('cannot override existing semver range with a committish')
+      }
+      if (res.gitCommittish) {
+        throw new Error('cannot override existing committish with a second committish')
+      }
+      res.gitCommittish = part
+      continue
+    }
+    // split on name:value
+    const [name, value] = part.split(':')
+    // if name is semver do semver lookup of ref or tag
+    if (name === 'semver') {
+      if (res.gitCommittish) {
+        throw new Error('cannot override existing committish with a semver range')
+      }
+      if (res.gitRange) {
+        throw new Error('cannot override existing semver range with a second semver range')
+      }
+      res.gitRange = decodeURIComponent(value)
+      continue
+    }
+    if (name === 'path') {
+      if (res.gitSubdir) {
+        throw new Error('cannot override existing path with a second path')
+      }
+      res.gitSubdir = `/${value}`
+      continue
+    }
+    log.warn('npm-package-arg', `ignoring unknown key "${name}"`)
+  }
+}
+
+// Taken from: EncodePathChars and lookup_table in src/node_url.cc
+// url.pathToFileURL only returns absolute references.  We can't use it to encode paths.
+// encodeURI mangles windows paths. We can't use it to encode paths.
+// Under the hood, url.pathToFileURL does a limited set of encoding, with an extra windows step, and then calls path.resolve.
+// The encoding node does without path.resolve is not available outside of the source, so we are recreating it here.
+const encodedPathChars = new Map([
+  ['\0', '%00'],
+  ['\t', '%09'],
+  ['\n', '%0A'],
+  ['\r', '%0D'],
+  [' ', '%20'],
+  ['"', '%22'],
+  ['#', '%23'],
+  ['%', '%25'],
+  ['?', '%3F'],
+  ['[', '%5B'],
+  ['\\', isWindows ? '/' : '%5C'],
+  [']', '%5D'],
+  ['^', '%5E'],
+  ['|', '%7C'],
+  ['~', '%7E'],
+])
+
+function pathToFileURL (str) {
+  let result = ''
+  for (let i = 0; i < str.length; i++) {
+    result = `${result}${encodedPathChars.get(str[i]) ?? str[i]}`
+  }
+  if (result.startsWith('file:')) {
+    return result
+  }
+  return `file:${result}`
+}
+
+function fromFile (res, where) {
+  res.type = isFileType.test(res.rawSpec) ? 'file' : 'directory'
+  res.where = where
+
+  let rawSpec = pathToFileURL(res.rawSpec)
+
+  if (rawSpec.startsWith('file:/')) {
+    // XXX backwards compatibility lack of compliance with RFC 8089
+
+    // turn file://path into file:/path
+    if (/^file:\/\/[^/]/.test(rawSpec)) {
+      rawSpec = `file:/${rawSpec.slice(5)}`
+    }
+
+    // turn file:/../path into file:../path
+    // for 1 or 3 leading slashes (2 is already ruled out from handling file:// explicitly above)
+    if (/^\/{1,3}\.\.?(\/|$)/.test(rawSpec.slice(5))) {
+      rawSpec = rawSpec.replace(/^file:\/{1,3}/, 'file:')
+    }
+  }
+
+  let resolvedUrl
+  let specUrl
+  try {
+    // always put the '/' on "where", or else file:foo from /path/to/bar goes to /path/to/foo, when we want it to be /path/to/bar/foo
+    resolvedUrl = new URL(rawSpec, `${pathToFileURL(path.resolve(where))}/`)
+    specUrl = new URL(rawSpec)
+  } catch (originalError) {
+    const er = new Error('Invalid file: URL, must comply with RFC 8089')
+    throw Object.assign(er, {
+      raw: res.rawSpec,
+      spec: res,
+      where,
+      originalError,
+    })
+  }
+
+  // turn /C:/blah into just C:/blah on windows
+  let specPath = decodeURIComponent(specUrl.pathname)
+  let resolvedPath = decodeURIComponent(resolvedUrl.pathname)
+  if (isWindows) {
+    specPath = specPath.replace(/^\/+([a-z]:\/)/i, '$1')
+    resolvedPath = resolvedPath.replace(/^\/+([a-z]:\/)/i, '$1')
+  }
+
+  // replace ~ with homedir, but keep the ~ in the saveSpec
+  // otherwise, make it relative to where param
+  if (/^\/~(\/|$)/.test(specPath)) {
+    res.saveSpec = `file:${specPath.substr(1)}`
+    resolvedPath = path.resolve(homedir(), specPath.substr(3))
+  } else if (!path.isAbsolute(rawSpec.slice(5))) {
+    res.saveSpec = `file:${path.relative(where, resolvedPath)}`
+  } else {
+    res.saveSpec = `file:${path.resolve(resolvedPath)}`
+  }
+
+  res.fetchSpec = path.resolve(where, resolvedPath)
+  // re-normalize the slashes in saveSpec due to node:path/win32 behavior in windows
+  res.saveSpec = res.saveSpec.split('\\').join('/')
+  // Ignoring because this only happens in windows
+  /* istanbul ignore next */
+  if (res.saveSpec.startsWith('file://')) {
+    // normalization of \\win32\root paths can cause a double / which we don't want
+    res.saveSpec = `file:/${res.saveSpec.slice(7)}`
+  }
+  return res
+}
+
+function fromHostedGit (res, hosted) {
+  res.type = 'git'
+  res.hosted = hosted
+  res.saveSpec = hosted.toString({ noGitPlus: false, noCommittish: false })
+  res.fetchSpec = hosted.getDefaultRepresentation() === 'shortcut' ? null : hosted.toString()
+  setGitAttrs(res, hosted.committish)
+  return res
+}
+
+function unsupportedURLType (protocol, spec) {
+  const err = new Error(`Unsupported URL Type "${protocol}": ${spec}`)
+  err.code = 'EUNSUPPORTEDPROTOCOL'
+  return err
+}
+
+function fromURL (res) {
+  let rawSpec = res.rawSpec
+  res.saveSpec = rawSpec
+  if (rawSpec.startsWith('git+ssh:')) {
+    // git ssh specifiers are overloaded to also use scp-style git
+    // specifiers, so we have to parse those out and treat them special.
+    // They are NOT true URIs, so we can't hand them to URL.
+
+    // This regex looks for things that look like:
+    // git+ssh://git@my.custom.git.com:username/project.git#deadbeef
+    // ...and various combinations. The username in the beginning is *required*.
+    const matched = rawSpec.match(/^git\+ssh:\/\/([^:#]+:[^#]+(?:\.git)?)(?:#(.*))?$/i)
+    // Filter out all-number "usernames" which are really port numbers
+    // They can either be :1234 :1234/ or :1234/path but not :12abc
+    if (matched && !matched[1].match(isPortNumber)) {
+      res.type = 'git'
+      setGitAttrs(res, matched[2])
+      res.fetchSpec = matched[1]
+      return res
+    }
+  } else if (rawSpec.startsWith('git+file://')) {
+    // URL can't handle windows paths
+    rawSpec = rawSpec.replace(/\\/g, '/')
+  }
+  const parsedUrl = new URL(rawSpec)
+  // check the protocol, and then see if it's git or not
+  switch (parsedUrl.protocol) {
+    case 'git:':
+    case 'git+http:':
+    case 'git+https:':
+    case 'git+rsync:':
+    case 'git+ftp:':
+    case 'git+file:':
+    case 'git+ssh:':
+      res.type = 'git'
+      setGitAttrs(res, parsedUrl.hash.slice(1))
+      if (parsedUrl.protocol === 'git+file:' && /^git\+file:\/\/[a-z]:/i.test(rawSpec)) {
+        // URL can't handle drive letters on windows file paths, the host can't contain a :
+        res.fetchSpec = `git+file://${parsedUrl.host.toLowerCase()}:${parsedUrl.pathname}`
+      } else {
+        parsedUrl.hash = ''
+        res.fetchSpec = parsedUrl.toString()
+      }
+      if (res.fetchSpec.startsWith('git+')) {
+        res.fetchSpec = res.fetchSpec.slice(4)
+      }
+      break
+    case 'http:':
+    case 'https:':
+      res.type = 'remote'
+      res.fetchSpec = res.saveSpec
+      break
+
+    default:
+      throw unsupportedURLType(parsedUrl.protocol, rawSpec)
+  }
+
+  return res
+}
+
+function fromAlias (res, where) {
+  const subSpec = npa(res.rawSpec.substr(4), where)
+  if (subSpec.type === 'alias') {
+    throw new Error('nested aliases not supported')
+  }
+
+  if (!subSpec.registry) {
+    throw new Error('aliases only work for registry deps')
+  }
+
+  if (!subSpec.name) {
+    throw new Error('aliases must have a name')
+  }
+
+  res.subSpec = subSpec
+  res.registry = true
+  res.type = 'alias'
+  res.saveSpec = null
+  res.fetchSpec = null
+  return res
+}
+
+function fromRegistry (res) {
+  res.registry = true
+  const spec = res.rawSpec.trim()
+  // no save spec for registry components as we save based on the fetched
+  // version, not on the argument so this can't compute that.
+  res.saveSpec = null
+  res.fetchSpec = spec
+  const version = semver.valid(spec, true)
+  const range = semver.validRange(spec, true)
+  if (version) {
+    res.type = 'version'
+  } else if (range) {
+    res.type = 'range'
+  } else {
+    if (encodeURIComponent(spec) !== spec) {
+      throw invalidTagName(spec, res.raw)
+    }
+    res.type = 'tag'
+  }
+  return res
+}
+
+module.exports = npa
+module.exports.resolve = resolve
+module.exports.toPurl = toPurl
+module.exports.Result = Result
diff --git a/node_modules/@npmcli/package-json/node_modules/npm-package-arg/package.json b/node_modules/@npmcli/package-json/node_modules/npm-package-arg/package.json
new file mode 100644
index 0000000000000..db6ce9074cfa2
--- /dev/null
+++ b/node_modules/@npmcli/package-json/node_modules/npm-package-arg/package.json
@@ -0,0 +1,61 @@
+{
+  "name": "npm-package-arg",
+  "version": "13.0.0",
+  "description": "Parse the things that can be arguments to `npm install`",
+  "main": "./lib/npa.js",
+  "directories": {
+    "test": "test"
+  },
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "dependencies": {
+    "hosted-git-info": "^9.0.0",
+    "proc-log": "^5.0.0",
+    "semver": "^7.3.5",
+    "validate-npm-package-name": "^6.0.0"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^5.0.0",
+    "@npmcli/template-oss": "4.23.5",
+    "tap": "^16.0.1"
+  },
+  "scripts": {
+    "test": "tap",
+    "snap": "tap",
+    "npmclilint": "npmcli-lint",
+    "lint": "npm run eslint",
+    "lintfix": "npm run eslint -- --fix",
+    "posttest": "npm run lint",
+    "postsnap": "npm run lintfix --",
+    "postlint": "template-oss-check",
+    "template-oss-apply": "template-oss-apply --force",
+    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/npm/npm-package-arg.git"
+  },
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "bugs": {
+    "url": "https://github.com/npm/npm-package-arg/issues"
+  },
+  "homepage": "https://github.com/npm/npm-package-arg",
+  "engines": {
+    "node": "^20.17.0 || >=22.9.0"
+  },
+  "tap": {
+    "branches": 97,
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.23.5",
+    "publish": true
+  }
+}
diff --git a/node_modules/@npmcli/package-json/node_modules/npm-pick-manifest/LICENSE.md b/node_modules/@npmcli/package-json/node_modules/npm-pick-manifest/LICENSE.md
new file mode 100644
index 0000000000000..8d28acf866d93
--- /dev/null
+++ b/node_modules/@npmcli/package-json/node_modules/npm-pick-manifest/LICENSE.md
@@ -0,0 +1,16 @@
+ISC License
+
+Copyright (c) npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this software for
+any purpose with or without fee is hereby granted, provided that the
+above copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
+ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
+COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
+CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
+USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/@npmcli/package-json/node_modules/npm-pick-manifest/lib/index.js b/node_modules/@npmcli/package-json/node_modules/npm-pick-manifest/lib/index.js
new file mode 100644
index 0000000000000..985c78df7a9bf
--- /dev/null
+++ b/node_modules/@npmcli/package-json/node_modules/npm-pick-manifest/lib/index.js
@@ -0,0 +1,219 @@
+'use strict'
+
+const npa = require('npm-package-arg')
+const semver = require('semver')
+const { checkEngine } = require('npm-install-checks')
+const normalizeBin = require('npm-normalize-package-bin')
+
+const engineOk = (manifest, npmVersion, nodeVersion) => {
+  try {
+    checkEngine(manifest, npmVersion, nodeVersion)
+    return true
+  } catch (_) {
+    return false
+  }
+}
+
+const isBefore = (verTimes, ver, time) =>
+  !verTimes || !verTimes[ver] || Date.parse(verTimes[ver]) <= time
+
+const avoidSemverOpt = { includePrerelease: true, loose: true }
+const shouldAvoid = (ver, avoid) =>
+  avoid && semver.satisfies(ver, avoid, avoidSemverOpt)
+
+const decorateAvoid = (result, avoid) =>
+  result && shouldAvoid(result.version, avoid)
+    ? { ...result, _shouldAvoid: true }
+    : result
+
+const pickManifest = (packument, wanted, opts) => {
+  const {
+    defaultTag = 'latest',
+    before = null,
+    nodeVersion = process.version,
+    npmVersion = null,
+    includeStaged = false,
+    avoid = null,
+    avoidStrict = false,
+  } = opts
+
+  const { name, time: verTimes } = packument
+  const versions = packument.versions || {}
+
+  if (avoidStrict) {
+    const looseOpts = {
+      ...opts,
+      avoidStrict: false,
+    }
+
+    const result = pickManifest(packument, wanted, looseOpts)
+    if (!result || !result._shouldAvoid) {
+      return result
+    }
+
+    const caret = pickManifest(packument, `^${result.version}`, looseOpts)
+    if (!caret || !caret._shouldAvoid) {
+      return {
+        ...caret,
+        _outsideDependencyRange: true,
+        _isSemVerMajor: false,
+      }
+    }
+
+    const star = pickManifest(packument, '*', looseOpts)
+    if (!star || !star._shouldAvoid) {
+      return {
+        ...star,
+        _outsideDependencyRange: true,
+        _isSemVerMajor: true,
+      }
+    }
+
+    throw Object.assign(new Error(`No avoidable versions for ${name}`), {
+      code: 'ETARGET',
+      name,
+      wanted,
+      avoid,
+      before,
+      versions: Object.keys(versions),
+    })
+  }
+
+  const staged = (includeStaged && packument.stagedVersions &&
+    packument.stagedVersions.versions) || {}
+  const restricted = (packument.policyRestrictions &&
+    packument.policyRestrictions.versions) || {}
+
+  const time = before && verTimes ? +(new Date(before)) : Infinity
+  const spec = npa.resolve(name, wanted || defaultTag)
+  const type = spec.type
+  const distTags = packument['dist-tags'] || {}
+
+  if (type !== 'tag' && type !== 'version' && type !== 'range') {
+    throw new Error('Only tag, version, and range are supported')
+  }
+
+  // if the type is 'tag', and not just the implicit default, then it must be that exactly, or nothing else will do.
+  if (wanted && type === 'tag') {
+    const ver = distTags[wanted]
+    // if the version in the dist-tags is before the before date, then we use that. Otherwise, we get the highest precedence version prior to the dist-tag.
+    if (isBefore(verTimes, ver, time)) {
+      return decorateAvoid(versions[ver] || staged[ver] || restricted[ver], avoid)
+    } else {
+      return pickManifest(packument, `<=${ver}`, opts)
+    }
+  }
+
+  // similarly, if a specific version, then only that version will do
+  if (wanted && type === 'version') {
+    const ver = semver.clean(wanted, { loose: true })
+    const mani = versions[ver] || staged[ver] || restricted[ver]
+    return isBefore(verTimes, ver, time) ? decorateAvoid(mani, avoid) : null
+  }
+
+  // ok, sort based on our heuristics, and pick the best fit
+  const range = type === 'range' ? wanted : '*'
+
+  // if the range is *, then we prefer the 'latest' if available but skip this if it should be avoided, in that case we have to try a little harder.
+  const defaultVer = distTags[defaultTag]
+  if (defaultVer &&
+      (range === '*' || semver.satisfies(defaultVer, range, { loose: true })) &&
+      !restricted[defaultVer] &&
+      !shouldAvoid(defaultVer, avoid)) {
+    const mani = versions[defaultVer]
+    const ok = mani &&
+      isBefore(verTimes, defaultVer, time) &&
+      engineOk(mani, npmVersion, nodeVersion) &&
+      !mani.deprecated &&
+      !staged[defaultVer]
+    if (ok) {
+      return mani
+    }
+  }
+
+  // ok, actually have to sort the list and take the winner
+  const allEntries = Object.entries(versions)
+    .concat(Object.entries(staged))
+    .concat(Object.entries(restricted))
+    .filter(([ver]) => isBefore(verTimes, ver, time))
+
+  if (!allEntries.length) {
+    throw Object.assign(new Error(`No versions available for ${name}`), {
+      code: 'ENOVERSIONS',
+      name,
+      type,
+      wanted,
+      before,
+      versions: Object.keys(versions),
+    })
+  }
+
+  const sortSemverOpt = { loose: true }
+  const entries = allEntries.filter(([ver]) =>
+    semver.satisfies(ver, range, { loose: true }))
+    .sort((a, b) => {
+      const [vera, mania] = a
+      const [verb, manib] = b
+      const notavoida = !shouldAvoid(vera, avoid)
+      const notavoidb = !shouldAvoid(verb, avoid)
+      const notrestra = !restricted[vera]
+      const notrestrb = !restricted[verb]
+      const notstagea = !staged[vera]
+      const notstageb = !staged[verb]
+      const notdepra = !mania.deprecated
+      const notdeprb = !manib.deprecated
+      const enginea = engineOk(mania, npmVersion, nodeVersion)
+      const engineb = engineOk(manib, npmVersion, nodeVersion)
+      // sort by:
+      // - not an avoided version
+      // - not restricted
+      // - not staged
+      // - not deprecated and engine ok
+      // - engine ok
+      // - not deprecated
+      // - semver
+      return (notavoidb - notavoida) ||
+        (notrestrb - notrestra) ||
+        (notstageb - notstagea) ||
+        ((notdeprb && engineb) - (notdepra && enginea)) ||
+        (engineb - enginea) ||
+        (notdeprb - notdepra) ||
+        semver.rcompare(vera, verb, sortSemverOpt)
+    })
+
+  return decorateAvoid(entries[0] && entries[0][1], avoid)
+}
+
+module.exports = (packument, wanted, opts = {}) => {
+  const mani = pickManifest(packument, wanted, opts)
+  const picked = mani && normalizeBin(mani)
+  const policyRestrictions = packument.policyRestrictions
+  const restricted = (policyRestrictions && policyRestrictions.versions) || {}
+
+  if (picked && !restricted[picked.version]) {
+    return picked
+  }
+
+  const { before = null, defaultTag = 'latest' } = opts
+  const bstr = before ? new Date(before).toLocaleString() : ''
+  const { name } = packument
+  const pckg = `${name}@${wanted}` +
+    (before ? ` with a date before ${bstr}` : '')
+
+  const isForbidden = picked && !!restricted[picked.version]
+  const polMsg = isForbidden ? policyRestrictions.message : ''
+
+  const msg = !isForbidden ? `No matching version found for ${pckg}.`
+    : `Could not download ${pckg} due to policy violations:\n${polMsg}`
+
+  const code = isForbidden ? 'E403' : 'ETARGET'
+  throw Object.assign(new Error(msg), {
+    code,
+    type: npa.resolve(packument.name, wanted).type,
+    wanted,
+    versions: Object.keys(packument.versions ?? {}),
+    name,
+    distTags: packument['dist-tags'],
+    defaultTag,
+  })
+}
diff --git a/node_modules/@npmcli/package-json/node_modules/npm-pick-manifest/package.json b/node_modules/@npmcli/package-json/node_modules/npm-pick-manifest/package.json
new file mode 100644
index 0000000000000..f1ca18ed32108
--- /dev/null
+++ b/node_modules/@npmcli/package-json/node_modules/npm-pick-manifest/package.json
@@ -0,0 +1,58 @@
+{
+  "name": "npm-pick-manifest",
+  "version": "11.0.1",
+  "description": "Resolves a matching manifest from a package metadata document according to standard npm semver resolution rules.",
+  "main": "./lib",
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "scripts": {
+    "coverage": "tap",
+    "lint": "npm run eslint",
+    "test": "tap",
+    "posttest": "npm run lint",
+    "postlint": "template-oss-check",
+    "lintfix": "npm run eslint -- --fix",
+    "snap": "tap",
+    "template-oss-apply": "template-oss-apply --force",
+    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/npm/npm-pick-manifest.git"
+  },
+  "keywords": [
+    "npm",
+    "semver",
+    "package manager"
+  ],
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "dependencies": {
+    "npm-install-checks": "^7.1.0",
+    "npm-normalize-package-bin": "^4.0.0",
+    "npm-package-arg": "^13.0.0",
+    "semver": "^7.3.5"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^5.0.0",
+    "@npmcli/template-oss": "4.25.0",
+    "tap": "^16.0.1"
+  },
+  "tap": {
+    "check-coverage": true,
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  },
+  "engines": {
+    "node": "^20.17.0 || >=22.9.0"
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.25.0",
+    "publish": true
+  }
+}
diff --git a/node_modules/@npmcli/package-json/node_modules/path-scurry/LICENSE.md b/node_modules/@npmcli/package-json/node_modules/path-scurry/LICENSE.md
new file mode 100644
index 0000000000000..c5402b9577a8c
--- /dev/null
+++ b/node_modules/@npmcli/package-json/node_modules/path-scurry/LICENSE.md
@@ -0,0 +1,55 @@
+# Blue Oak Model License
+
+Version 1.0.0
+
+## Purpose
+
+This license gives everyone as much permission to work with
+this software as possible, while protecting contributors
+from liability.
+
+## Acceptance
+
+In order to receive this license, you must agree to its
+rules.  The rules of this license are both obligations
+under that agreement and conditions to your license.
+You must not do anything with this software that triggers
+a rule that you cannot or will not follow.
+
+## Copyright
+
+Each contributor licenses you to do everything with this
+software that would otherwise infringe that contributor's
+copyright in it.
+
+## Notices
+
+You must ensure that everyone who gets a copy of
+any part of this software from you, with or without
+changes, also gets the text of this license or a link to
+.
+
+## Excuse
+
+If anyone notifies you in writing that you have not
+complied with [Notices](#notices), you can keep your
+license by taking all practical steps to comply within 30
+days after the notice.  If you do not do so, your license
+ends immediately.
+
+## Patent
+
+Each contributor licenses you to do everything with this
+software that would otherwise infringe any patent claims
+they can license or become able to license.
+
+## Reliability
+
+No contributor can revoke this license.
+
+## No Liability
+
+***As far as the law allows, this software comes as is,
+without any warranty or condition, and no contributor
+will be liable to anyone for any damages related to this
+software or this license, under any kind of legal claim.***
diff --git a/node_modules/@npmcli/package-json/node_modules/path-scurry/dist/commonjs/index.js b/node_modules/@npmcli/package-json/node_modules/path-scurry/dist/commonjs/index.js
new file mode 100644
index 0000000000000..af3e7595f577f
--- /dev/null
+++ b/node_modules/@npmcli/package-json/node_modules/path-scurry/dist/commonjs/index.js
@@ -0,0 +1,2016 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.PathScurry = exports.Path = exports.PathScurryDarwin = exports.PathScurryPosix = exports.PathScurryWin32 = exports.PathScurryBase = exports.PathPosix = exports.PathWin32 = exports.PathBase = exports.ChildrenCache = exports.ResolveCache = void 0;
+const lru_cache_1 = require("lru-cache");
+const node_path_1 = require("node:path");
+const node_url_1 = require("node:url");
+const fs_1 = require("fs");
+const actualFS = __importStar(require("node:fs"));
+const realpathSync = fs_1.realpathSync.native;
+// TODO: test perf of fs/promises realpath vs realpathCB,
+// since the promises one uses realpath.native
+const promises_1 = require("node:fs/promises");
+const minipass_1 = require("minipass");
+const defaultFS = {
+    lstatSync: fs_1.lstatSync,
+    readdir: fs_1.readdir,
+    readdirSync: fs_1.readdirSync,
+    readlinkSync: fs_1.readlinkSync,
+    realpathSync,
+    promises: {
+        lstat: promises_1.lstat,
+        readdir: promises_1.readdir,
+        readlink: promises_1.readlink,
+        realpath: promises_1.realpath,
+    },
+};
+// if they just gave us require('fs') then use our default
+const fsFromOption = (fsOption) => !fsOption || fsOption === defaultFS || fsOption === actualFS ?
+    defaultFS
+    : {
+        ...defaultFS,
+        ...fsOption,
+        promises: {
+            ...defaultFS.promises,
+            ...(fsOption.promises || {}),
+        },
+    };
+// turn something like //?/c:/ into c:\
+const uncDriveRegexp = /^\\\\\?\\([a-z]:)\\?$/i;
+const uncToDrive = (rootPath) => rootPath.replace(/\//g, '\\').replace(uncDriveRegexp, '$1\\');
+// windows paths are separated by either / or \
+const eitherSep = /[\\\/]/;
+const UNKNOWN = 0; // may not even exist, for all we know
+const IFIFO = 0b0001;
+const IFCHR = 0b0010;
+const IFDIR = 0b0100;
+const IFBLK = 0b0110;
+const IFREG = 0b1000;
+const IFLNK = 0b1010;
+const IFSOCK = 0b1100;
+const IFMT = 0b1111;
+// mask to unset low 4 bits
+const IFMT_UNKNOWN = ~IFMT;
+// set after successfully calling readdir() and getting entries.
+const READDIR_CALLED = 0b0000_0001_0000;
+// set after a successful lstat()
+const LSTAT_CALLED = 0b0000_0010_0000;
+// set if an entry (or one of its parents) is definitely not a dir
+const ENOTDIR = 0b0000_0100_0000;
+// set if an entry (or one of its parents) does not exist
+// (can also be set on lstat errors like EACCES or ENAMETOOLONG)
+const ENOENT = 0b0000_1000_0000;
+// cannot have child entries -- also verify &IFMT is either IFDIR or IFLNK
+// set if we fail to readlink
+const ENOREADLINK = 0b0001_0000_0000;
+// set if we know realpath() will fail
+const ENOREALPATH = 0b0010_0000_0000;
+const ENOCHILD = ENOTDIR | ENOENT | ENOREALPATH;
+const TYPEMASK = 0b0011_1111_1111;
+const entToType = (s) => s.isFile() ? IFREG
+    : s.isDirectory() ? IFDIR
+        : s.isSymbolicLink() ? IFLNK
+            : s.isCharacterDevice() ? IFCHR
+                : s.isBlockDevice() ? IFBLK
+                    : s.isSocket() ? IFSOCK
+                        : s.isFIFO() ? IFIFO
+                            : UNKNOWN;
+// normalize unicode path names
+const normalizeCache = new Map();
+const normalize = (s) => {
+    const c = normalizeCache.get(s);
+    if (c)
+        return c;
+    const n = s.normalize('NFKD');
+    normalizeCache.set(s, n);
+    return n;
+};
+const normalizeNocaseCache = new Map();
+const normalizeNocase = (s) => {
+    const c = normalizeNocaseCache.get(s);
+    if (c)
+        return c;
+    const n = normalize(s.toLowerCase());
+    normalizeNocaseCache.set(s, n);
+    return n;
+};
+/**
+ * An LRUCache for storing resolved path strings or Path objects.
+ * @internal
+ */
+class ResolveCache extends lru_cache_1.LRUCache {
+    constructor() {
+        super({ max: 256 });
+    }
+}
+exports.ResolveCache = ResolveCache;
+// In order to prevent blowing out the js heap by allocating hundreds of
+// thousands of Path entries when walking extremely large trees, the "children"
+// in this tree are represented by storing an array of Path entries in an
+// LRUCache, indexed by the parent.  At any time, Path.children() may return an
+// empty array, indicating that it doesn't know about any of its children, and
+// thus has to rebuild that cache.  This is fine, it just means that we don't
+// benefit as much from having the cached entries, but huge directory walks
+// don't blow out the stack, and smaller ones are still as fast as possible.
+//
+//It does impose some complexity when building up the readdir data, because we
+//need to pass a reference to the children array that we started with.
+/**
+ * an LRUCache for storing child entries.
+ * @internal
+ */
+class ChildrenCache extends lru_cache_1.LRUCache {
+    constructor(maxSize = 16 * 1024) {
+        super({
+            maxSize,
+            // parent + children
+            sizeCalculation: a => a.length + 1,
+        });
+    }
+}
+exports.ChildrenCache = ChildrenCache;
+const setAsCwd = Symbol('PathScurry setAsCwd');
+/**
+ * Path objects are sort of like a super-powered
+ * {@link https://nodejs.org/docs/latest/api/fs.html#class-fsdirent fs.Dirent}
+ *
+ * Each one represents a single filesystem entry on disk, which may or may not
+ * exist. It includes methods for reading various types of information via
+ * lstat, readlink, and readdir, and caches all information to the greatest
+ * degree possible.
+ *
+ * Note that fs operations that would normally throw will instead return an
+ * "empty" value. This is in order to prevent excessive overhead from error
+ * stack traces.
+ */
+class PathBase {
+    /**
+     * the basename of this path
+     *
+     * **Important**: *always* test the path name against any test string
+     * usingthe {@link isNamed} method, and not by directly comparing this
+     * string. Otherwise, unicode path strings that the system sees as identical
+     * will not be properly treated as the same path, leading to incorrect
+     * behavior and possible security issues.
+     */
+    name;
+    /**
+     * the Path entry corresponding to the path root.
+     *
+     * @internal
+     */
+    root;
+    /**
+     * All roots found within the current PathScurry family
+     *
+     * @internal
+     */
+    roots;
+    /**
+     * a reference to the parent path, or undefined in the case of root entries
+     *
+     * @internal
+     */
+    parent;
+    /**
+     * boolean indicating whether paths are compared case-insensitively
+     * @internal
+     */
+    nocase;
+    /**
+     * boolean indicating that this path is the current working directory
+     * of the PathScurry collection that contains it.
+     */
+    isCWD = false;
+    // potential default fs override
+    #fs;
+    // Stats fields
+    #dev;
+    get dev() {
+        return this.#dev;
+    }
+    #mode;
+    get mode() {
+        return this.#mode;
+    }
+    #nlink;
+    get nlink() {
+        return this.#nlink;
+    }
+    #uid;
+    get uid() {
+        return this.#uid;
+    }
+    #gid;
+    get gid() {
+        return this.#gid;
+    }
+    #rdev;
+    get rdev() {
+        return this.#rdev;
+    }
+    #blksize;
+    get blksize() {
+        return this.#blksize;
+    }
+    #ino;
+    get ino() {
+        return this.#ino;
+    }
+    #size;
+    get size() {
+        return this.#size;
+    }
+    #blocks;
+    get blocks() {
+        return this.#blocks;
+    }
+    #atimeMs;
+    get atimeMs() {
+        return this.#atimeMs;
+    }
+    #mtimeMs;
+    get mtimeMs() {
+        return this.#mtimeMs;
+    }
+    #ctimeMs;
+    get ctimeMs() {
+        return this.#ctimeMs;
+    }
+    #birthtimeMs;
+    get birthtimeMs() {
+        return this.#birthtimeMs;
+    }
+    #atime;
+    get atime() {
+        return this.#atime;
+    }
+    #mtime;
+    get mtime() {
+        return this.#mtime;
+    }
+    #ctime;
+    get ctime() {
+        return this.#ctime;
+    }
+    #birthtime;
+    get birthtime() {
+        return this.#birthtime;
+    }
+    #matchName;
+    #depth;
+    #fullpath;
+    #fullpathPosix;
+    #relative;
+    #relativePosix;
+    #type;
+    #children;
+    #linkTarget;
+    #realpath;
+    /**
+     * This property is for compatibility with the Dirent class as of
+     * Node v20, where Dirent['parentPath'] refers to the path of the
+     * directory that was passed to readdir. For root entries, it's the path
+     * to the entry itself.
+     */
+    get parentPath() {
+        return (this.parent || this).fullpath();
+    }
+    /**
+     * Deprecated alias for Dirent['parentPath'] Somewhat counterintuitively,
+     * this property refers to the *parent* path, not the path object itself.
+     *
+     * @deprecated
+     */
+    get path() {
+        return this.parentPath;
+    }
+    /**
+     * Do not create new Path objects directly.  They should always be accessed
+     * via the PathScurry class or other methods on the Path class.
+     *
+     * @internal
+     */
+    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
+        this.name = name;
+        this.#matchName = nocase ? normalizeNocase(name) : normalize(name);
+        this.#type = type & TYPEMASK;
+        this.nocase = nocase;
+        this.roots = roots;
+        this.root = root || this;
+        this.#children = children;
+        this.#fullpath = opts.fullpath;
+        this.#relative = opts.relative;
+        this.#relativePosix = opts.relativePosix;
+        this.parent = opts.parent;
+        if (this.parent) {
+            this.#fs = this.parent.#fs;
+        }
+        else {
+            this.#fs = fsFromOption(opts.fs);
+        }
+    }
+    /**
+     * Returns the depth of the Path object from its root.
+     *
+     * For example, a path at `/foo/bar` would have a depth of 2.
+     */
+    depth() {
+        if (this.#depth !== undefined)
+            return this.#depth;
+        if (!this.parent)
+            return (this.#depth = 0);
+        return (this.#depth = this.parent.depth() + 1);
+    }
+    /**
+     * @internal
+     */
+    childrenCache() {
+        return this.#children;
+    }
+    /**
+     * Get the Path object referenced by the string path, resolved from this Path
+     */
+    resolve(path) {
+        if (!path) {
+            return this;
+        }
+        const rootPath = this.getRootString(path);
+        const dir = path.substring(rootPath.length);
+        const dirParts = dir.split(this.splitSep);
+        const result = rootPath ?
+            this.getRoot(rootPath).#resolveParts(dirParts)
+            : this.#resolveParts(dirParts);
+        return result;
+    }
+    #resolveParts(dirParts) {
+        let p = this;
+        for (const part of dirParts) {
+            p = p.child(part);
+        }
+        return p;
+    }
+    /**
+     * Returns the cached children Path objects, if still available.  If they
+     * have fallen out of the cache, then returns an empty array, and resets the
+     * READDIR_CALLED bit, so that future calls to readdir() will require an fs
+     * lookup.
+     *
+     * @internal
+     */
+    children() {
+        const cached = this.#children.get(this);
+        if (cached) {
+            return cached;
+        }
+        const children = Object.assign([], { provisional: 0 });
+        this.#children.set(this, children);
+        this.#type &= ~READDIR_CALLED;
+        return children;
+    }
+    /**
+     * Resolves a path portion and returns or creates the child Path.
+     *
+     * Returns `this` if pathPart is `''` or `'.'`, or `parent` if pathPart is
+     * `'..'`.
+     *
+     * This should not be called directly.  If `pathPart` contains any path
+     * separators, it will lead to unsafe undefined behavior.
+     *
+     * Use `Path.resolve()` instead.
+     *
+     * @internal
+     */
+    child(pathPart, opts) {
+        if (pathPart === '' || pathPart === '.') {
+            return this;
+        }
+        if (pathPart === '..') {
+            return this.parent || this;
+        }
+        // find the child
+        const children = this.children();
+        const name = this.nocase ? normalizeNocase(pathPart) : normalize(pathPart);
+        for (const p of children) {
+            if (p.#matchName === name) {
+                return p;
+            }
+        }
+        // didn't find it, create provisional child, since it might not
+        // actually exist.  If we know the parent isn't a dir, then
+        // in fact it CAN'T exist.
+        const s = this.parent ? this.sep : '';
+        const fullpath = this.#fullpath ? this.#fullpath + s + pathPart : undefined;
+        const pchild = this.newChild(pathPart, UNKNOWN, {
+            ...opts,
+            parent: this,
+            fullpath,
+        });
+        if (!this.canReaddir()) {
+            pchild.#type |= ENOENT;
+        }
+        // don't have to update provisional, because if we have real children,
+        // then provisional is set to children.length, otherwise a lower number
+        children.push(pchild);
+        return pchild;
+    }
+    /**
+     * The relative path from the cwd. If it does not share an ancestor with
+     * the cwd, then this ends up being equivalent to the fullpath()
+     */
+    relative() {
+        if (this.isCWD)
+            return '';
+        if (this.#relative !== undefined) {
+            return this.#relative;
+        }
+        const name = this.name;
+        const p = this.parent;
+        if (!p) {
+            return (this.#relative = this.name);
+        }
+        const pv = p.relative();
+        return pv + (!pv || !p.parent ? '' : this.sep) + name;
+    }
+    /**
+     * The relative path from the cwd, using / as the path separator.
+     * If it does not share an ancestor with
+     * the cwd, then this ends up being equivalent to the fullpathPosix()
+     * On posix systems, this is identical to relative().
+     */
+    relativePosix() {
+        if (this.sep === '/')
+            return this.relative();
+        if (this.isCWD)
+            return '';
+        if (this.#relativePosix !== undefined)
+            return this.#relativePosix;
+        const name = this.name;
+        const p = this.parent;
+        if (!p) {
+            return (this.#relativePosix = this.fullpathPosix());
+        }
+        const pv = p.relativePosix();
+        return pv + (!pv || !p.parent ? '' : '/') + name;
+    }
+    /**
+     * The fully resolved path string for this Path entry
+     */
+    fullpath() {
+        if (this.#fullpath !== undefined) {
+            return this.#fullpath;
+        }
+        const name = this.name;
+        const p = this.parent;
+        if (!p) {
+            return (this.#fullpath = this.name);
+        }
+        const pv = p.fullpath();
+        const fp = pv + (!p.parent ? '' : this.sep) + name;
+        return (this.#fullpath = fp);
+    }
+    /**
+     * On platforms other than windows, this is identical to fullpath.
+     *
+     * On windows, this is overridden to return the forward-slash form of the
+     * full UNC path.
+     */
+    fullpathPosix() {
+        if (this.#fullpathPosix !== undefined)
+            return this.#fullpathPosix;
+        if (this.sep === '/')
+            return (this.#fullpathPosix = this.fullpath());
+        if (!this.parent) {
+            const p = this.fullpath().replace(/\\/g, '/');
+            if (/^[a-z]:\//i.test(p)) {
+                return (this.#fullpathPosix = `//?/${p}`);
+            }
+            else {
+                return (this.#fullpathPosix = p);
+            }
+        }
+        const p = this.parent;
+        const pfpp = p.fullpathPosix();
+        const fpp = pfpp + (!pfpp || !p.parent ? '' : '/') + this.name;
+        return (this.#fullpathPosix = fpp);
+    }
+    /**
+     * Is the Path of an unknown type?
+     *
+     * Note that we might know *something* about it if there has been a previous
+     * filesystem operation, for example that it does not exist, or is not a
+     * link, or whether it has child entries.
+     */
+    isUnknown() {
+        return (this.#type & IFMT) === UNKNOWN;
+    }
+    isType(type) {
+        return this[`is${type}`]();
+    }
+    getType() {
+        return (this.isUnknown() ? 'Unknown'
+            : this.isDirectory() ? 'Directory'
+                : this.isFile() ? 'File'
+                    : this.isSymbolicLink() ? 'SymbolicLink'
+                        : this.isFIFO() ? 'FIFO'
+                            : this.isCharacterDevice() ? 'CharacterDevice'
+                                : this.isBlockDevice() ? 'BlockDevice'
+                                    : /* c8 ignore start */ this.isSocket() ? 'Socket'
+                                        : 'Unknown');
+        /* c8 ignore stop */
+    }
+    /**
+     * Is the Path a regular file?
+     */
+    isFile() {
+        return (this.#type & IFMT) === IFREG;
+    }
+    /**
+     * Is the Path a directory?
+     */
+    isDirectory() {
+        return (this.#type & IFMT) === IFDIR;
+    }
+    /**
+     * Is the path a character device?
+     */
+    isCharacterDevice() {
+        return (this.#type & IFMT) === IFCHR;
+    }
+    /**
+     * Is the path a block device?
+     */
+    isBlockDevice() {
+        return (this.#type & IFMT) === IFBLK;
+    }
+    /**
+     * Is the path a FIFO pipe?
+     */
+    isFIFO() {
+        return (this.#type & IFMT) === IFIFO;
+    }
+    /**
+     * Is the path a socket?
+     */
+    isSocket() {
+        return (this.#type & IFMT) === IFSOCK;
+    }
+    /**
+     * Is the path a symbolic link?
+     */
+    isSymbolicLink() {
+        return (this.#type & IFLNK) === IFLNK;
+    }
+    /**
+     * Return the entry if it has been subject of a successful lstat, or
+     * undefined otherwise.
+     *
+     * Does not read the filesystem, so an undefined result *could* simply
+     * mean that we haven't called lstat on it.
+     */
+    lstatCached() {
+        return this.#type & LSTAT_CALLED ? this : undefined;
+    }
+    /**
+     * Return the cached link target if the entry has been the subject of a
+     * successful readlink, or undefined otherwise.
+     *
+     * Does not read the filesystem, so an undefined result *could* just mean we
+     * don't have any cached data. Only use it if you are very sure that a
+     * readlink() has been called at some point.
+     */
+    readlinkCached() {
+        return this.#linkTarget;
+    }
+    /**
+     * Returns the cached realpath target if the entry has been the subject
+     * of a successful realpath, or undefined otherwise.
+     *
+     * Does not read the filesystem, so an undefined result *could* just mean we
+     * don't have any cached data. Only use it if you are very sure that a
+     * realpath() has been called at some point.
+     */
+    realpathCached() {
+        return this.#realpath;
+    }
+    /**
+     * Returns the cached child Path entries array if the entry has been the
+     * subject of a successful readdir(), or [] otherwise.
+     *
+     * Does not read the filesystem, so an empty array *could* just mean we
+     * don't have any cached data. Only use it if you are very sure that a
+     * readdir() has been called recently enough to still be valid.
+     */
+    readdirCached() {
+        const children = this.children();
+        return children.slice(0, children.provisional);
+    }
+    /**
+     * Return true if it's worth trying to readlink.  Ie, we don't (yet) have
+     * any indication that readlink will definitely fail.
+     *
+     * Returns false if the path is known to not be a symlink, if a previous
+     * readlink failed, or if the entry does not exist.
+     */
+    canReadlink() {
+        if (this.#linkTarget)
+            return true;
+        if (!this.parent)
+            return false;
+        // cases where it cannot possibly succeed
+        const ifmt = this.#type & IFMT;
+        return !((ifmt !== UNKNOWN && ifmt !== IFLNK) ||
+            this.#type & ENOREADLINK ||
+            this.#type & ENOENT);
+    }
+    /**
+     * Return true if readdir has previously been successfully called on this
+     * path, indicating that cachedReaddir() is likely valid.
+     */
+    calledReaddir() {
+        return !!(this.#type & READDIR_CALLED);
+    }
+    /**
+     * Returns true if the path is known to not exist. That is, a previous lstat
+     * or readdir failed to verify its existence when that would have been
+     * expected, or a parent entry was marked either enoent or enotdir.
+     */
+    isENOENT() {
+        return !!(this.#type & ENOENT);
+    }
+    /**
+     * Return true if the path is a match for the given path name.  This handles
+     * case sensitivity and unicode normalization.
+     *
+     * Note: even on case-sensitive systems, it is **not** safe to test the
+     * equality of the `.name` property to determine whether a given pathname
+     * matches, due to unicode normalization mismatches.
+     *
+     * Always use this method instead of testing the `path.name` property
+     * directly.
+     */
+    isNamed(n) {
+        return !this.nocase ?
+            this.#matchName === normalize(n)
+            : this.#matchName === normalizeNocase(n);
+    }
+    /**
+     * Return the Path object corresponding to the target of a symbolic link.
+     *
+     * If the Path is not a symbolic link, or if the readlink call fails for any
+     * reason, `undefined` is returned.
+     *
+     * Result is cached, and thus may be outdated if the filesystem is mutated.
+     */
+    async readlink() {
+        const target = this.#linkTarget;
+        if (target) {
+            return target;
+        }
+        if (!this.canReadlink()) {
+            return undefined;
+        }
+        /* c8 ignore start */
+        // already covered by the canReadlink test, here for ts grumples
+        if (!this.parent) {
+            return undefined;
+        }
+        /* c8 ignore stop */
+        try {
+            const read = await this.#fs.promises.readlink(this.fullpath());
+            const linkTarget = (await this.parent.realpath())?.resolve(read);
+            if (linkTarget) {
+                return (this.#linkTarget = linkTarget);
+            }
+        }
+        catch (er) {
+            this.#readlinkFail(er.code);
+            return undefined;
+        }
+    }
+    /**
+     * Synchronous {@link PathBase.readlink}
+     */
+    readlinkSync() {
+        const target = this.#linkTarget;
+        if (target) {
+            return target;
+        }
+        if (!this.canReadlink()) {
+            return undefined;
+        }
+        /* c8 ignore start */
+        // already covered by the canReadlink test, here for ts grumples
+        if (!this.parent) {
+            return undefined;
+        }
+        /* c8 ignore stop */
+        try {
+            const read = this.#fs.readlinkSync(this.fullpath());
+            const linkTarget = this.parent.realpathSync()?.resolve(read);
+            if (linkTarget) {
+                return (this.#linkTarget = linkTarget);
+            }
+        }
+        catch (er) {
+            this.#readlinkFail(er.code);
+            return undefined;
+        }
+    }
+    #readdirSuccess(children) {
+        // succeeded, mark readdir called bit
+        this.#type |= READDIR_CALLED;
+        // mark all remaining provisional children as ENOENT
+        for (let p = children.provisional; p < children.length; p++) {
+            const c = children[p];
+            if (c)
+                c.#markENOENT();
+        }
+    }
+    #markENOENT() {
+        // mark as UNKNOWN and ENOENT
+        if (this.#type & ENOENT)
+            return;
+        this.#type = (this.#type | ENOENT) & IFMT_UNKNOWN;
+        this.#markChildrenENOENT();
+    }
+    #markChildrenENOENT() {
+        // all children are provisional and do not exist
+        const children = this.children();
+        children.provisional = 0;
+        for (const p of children) {
+            p.#markENOENT();
+        }
+    }
+    #markENOREALPATH() {
+        this.#type |= ENOREALPATH;
+        this.#markENOTDIR();
+    }
+    // save the information when we know the entry is not a dir
+    #markENOTDIR() {
+        // entry is not a directory, so any children can't exist.
+        // this *should* be impossible, since any children created
+        // after it's been marked ENOTDIR should be marked ENOENT,
+        // so it won't even get to this point.
+        /* c8 ignore start */
+        if (this.#type & ENOTDIR)
+            return;
+        /* c8 ignore stop */
+        let t = this.#type;
+        // this could happen if we stat a dir, then delete it,
+        // then try to read it or one of its children.
+        if ((t & IFMT) === IFDIR)
+            t &= IFMT_UNKNOWN;
+        this.#type = t | ENOTDIR;
+        this.#markChildrenENOENT();
+    }
+    #readdirFail(code = '') {
+        // markENOTDIR and markENOENT also set provisional=0
+        if (code === 'ENOTDIR' || code === 'EPERM') {
+            this.#markENOTDIR();
+        }
+        else if (code === 'ENOENT') {
+            this.#markENOENT();
+        }
+        else {
+            this.children().provisional = 0;
+        }
+    }
+    #lstatFail(code = '') {
+        // Windows just raises ENOENT in this case, disable for win CI
+        /* c8 ignore start */
+        if (code === 'ENOTDIR') {
+            // already know it has a parent by this point
+            const p = this.parent;
+            p.#markENOTDIR();
+        }
+        else if (code === 'ENOENT') {
+            /* c8 ignore stop */
+            this.#markENOENT();
+        }
+    }
+    #readlinkFail(code = '') {
+        let ter = this.#type;
+        ter |= ENOREADLINK;
+        if (code === 'ENOENT')
+            ter |= ENOENT;
+        // windows gets a weird error when you try to readlink a file
+        if (code === 'EINVAL' || code === 'UNKNOWN') {
+            // exists, but not a symlink, we don't know WHAT it is, so remove
+            // all IFMT bits.
+            ter &= IFMT_UNKNOWN;
+        }
+        this.#type = ter;
+        // windows just gets ENOENT in this case.  We do cover the case,
+        // just disabled because it's impossible on Windows CI
+        /* c8 ignore start */
+        if (code === 'ENOTDIR' && this.parent) {
+            this.parent.#markENOTDIR();
+        }
+        /* c8 ignore stop */
+    }
+    #readdirAddChild(e, c) {
+        return (this.#readdirMaybePromoteChild(e, c) ||
+            this.#readdirAddNewChild(e, c));
+    }
+    #readdirAddNewChild(e, c) {
+        // alloc new entry at head, so it's never provisional
+        const type = entToType(e);
+        const child = this.newChild(e.name, type, { parent: this });
+        const ifmt = child.#type & IFMT;
+        if (ifmt !== IFDIR && ifmt !== IFLNK && ifmt !== UNKNOWN) {
+            child.#type |= ENOTDIR;
+        }
+        c.unshift(child);
+        c.provisional++;
+        return child;
+    }
+    #readdirMaybePromoteChild(e, c) {
+        for (let p = c.provisional; p < c.length; p++) {
+            const pchild = c[p];
+            const name = this.nocase ? normalizeNocase(e.name) : normalize(e.name);
+            if (name !== pchild.#matchName) {
+                continue;
+            }
+            return this.#readdirPromoteChild(e, pchild, p, c);
+        }
+    }
+    #readdirPromoteChild(e, p, index, c) {
+        const v = p.name;
+        // retain any other flags, but set ifmt from dirent
+        p.#type = (p.#type & IFMT_UNKNOWN) | entToType(e);
+        // case sensitivity fixing when we learn the true name.
+        if (v !== e.name)
+            p.name = e.name;
+        // just advance provisional index (potentially off the list),
+        // otherwise we have to splice/pop it out and re-insert at head
+        if (index !== c.provisional) {
+            if (index === c.length - 1)
+                c.pop();
+            else
+                c.splice(index, 1);
+            c.unshift(p);
+        }
+        c.provisional++;
+        return p;
+    }
+    /**
+     * Call lstat() on this Path, and update all known information that can be
+     * determined.
+     *
+     * Note that unlike `fs.lstat()`, the returned value does not contain some
+     * information, such as `mode`, `dev`, `nlink`, and `ino`.  If that
+     * information is required, you will need to call `fs.lstat` yourself.
+     *
+     * If the Path refers to a nonexistent file, or if the lstat call fails for
+     * any reason, `undefined` is returned.  Otherwise the updated Path object is
+     * returned.
+     *
+     * Results are cached, and thus may be out of date if the filesystem is
+     * mutated.
+     */
+    async lstat() {
+        if ((this.#type & ENOENT) === 0) {
+            try {
+                this.#applyStat(await this.#fs.promises.lstat(this.fullpath()));
+                return this;
+            }
+            catch (er) {
+                this.#lstatFail(er.code);
+            }
+        }
+    }
+    /**
+     * synchronous {@link PathBase.lstat}
+     */
+    lstatSync() {
+        if ((this.#type & ENOENT) === 0) {
+            try {
+                this.#applyStat(this.#fs.lstatSync(this.fullpath()));
+                return this;
+            }
+            catch (er) {
+                this.#lstatFail(er.code);
+            }
+        }
+    }
+    #applyStat(st) {
+        const { atime, atimeMs, birthtime, birthtimeMs, blksize, blocks, ctime, ctimeMs, dev, gid, ino, mode, mtime, mtimeMs, nlink, rdev, size, uid, } = st;
+        this.#atime = atime;
+        this.#atimeMs = atimeMs;
+        this.#birthtime = birthtime;
+        this.#birthtimeMs = birthtimeMs;
+        this.#blksize = blksize;
+        this.#blocks = blocks;
+        this.#ctime = ctime;
+        this.#ctimeMs = ctimeMs;
+        this.#dev = dev;
+        this.#gid = gid;
+        this.#ino = ino;
+        this.#mode = mode;
+        this.#mtime = mtime;
+        this.#mtimeMs = mtimeMs;
+        this.#nlink = nlink;
+        this.#rdev = rdev;
+        this.#size = size;
+        this.#uid = uid;
+        const ifmt = entToType(st);
+        // retain any other flags, but set the ifmt
+        this.#type = (this.#type & IFMT_UNKNOWN) | ifmt | LSTAT_CALLED;
+        if (ifmt !== UNKNOWN && ifmt !== IFDIR && ifmt !== IFLNK) {
+            this.#type |= ENOTDIR;
+        }
+    }
+    #onReaddirCB = [];
+    #readdirCBInFlight = false;
+    #callOnReaddirCB(children) {
+        this.#readdirCBInFlight = false;
+        const cbs = this.#onReaddirCB.slice();
+        this.#onReaddirCB.length = 0;
+        cbs.forEach(cb => cb(null, children));
+    }
+    /**
+     * Standard node-style callback interface to get list of directory entries.
+     *
+     * If the Path cannot or does not contain any children, then an empty array
+     * is returned.
+     *
+     * Results are cached, and thus may be out of date if the filesystem is
+     * mutated.
+     *
+     * @param cb The callback called with (er, entries).  Note that the `er`
+     * param is somewhat extraneous, as all readdir() errors are handled and
+     * simply result in an empty set of entries being returned.
+     * @param allowZalgo Boolean indicating that immediately known results should
+     * *not* be deferred with `queueMicrotask`. Defaults to `false`. Release
+     * zalgo at your peril, the dark pony lord is devious and unforgiving.
+     */
+    readdirCB(cb, allowZalgo = false) {
+        if (!this.canReaddir()) {
+            if (allowZalgo)
+                cb(null, []);
+            else
+                queueMicrotask(() => cb(null, []));
+            return;
+        }
+        const children = this.children();
+        if (this.calledReaddir()) {
+            const c = children.slice(0, children.provisional);
+            if (allowZalgo)
+                cb(null, c);
+            else
+                queueMicrotask(() => cb(null, c));
+            return;
+        }
+        // don't have to worry about zalgo at this point.
+        this.#onReaddirCB.push(cb);
+        if (this.#readdirCBInFlight) {
+            return;
+        }
+        this.#readdirCBInFlight = true;
+        // else read the directory, fill up children
+        // de-provisionalize any provisional children.
+        const fullpath = this.fullpath();
+        this.#fs.readdir(fullpath, { withFileTypes: true }, (er, entries) => {
+            if (er) {
+                this.#readdirFail(er.code);
+                children.provisional = 0;
+            }
+            else {
+                // if we didn't get an error, we always get entries.
+                //@ts-ignore
+                for (const e of entries) {
+                    this.#readdirAddChild(e, children);
+                }
+                this.#readdirSuccess(children);
+            }
+            this.#callOnReaddirCB(children.slice(0, children.provisional));
+            return;
+        });
+    }
+    #asyncReaddirInFlight;
+    /**
+     * Return an array of known child entries.
+     *
+     * If the Path cannot or does not contain any children, then an empty array
+     * is returned.
+     *
+     * Results are cached, and thus may be out of date if the filesystem is
+     * mutated.
+     */
+    async readdir() {
+        if (!this.canReaddir()) {
+            return [];
+        }
+        const children = this.children();
+        if (this.calledReaddir()) {
+            return children.slice(0, children.provisional);
+        }
+        // else read the directory, fill up children
+        // de-provisionalize any provisional children.
+        const fullpath = this.fullpath();
+        if (this.#asyncReaddirInFlight) {
+            await this.#asyncReaddirInFlight;
+        }
+        else {
+            /* c8 ignore start */
+            let resolve = () => { };
+            /* c8 ignore stop */
+            this.#asyncReaddirInFlight = new Promise(res => (resolve = res));
+            try {
+                for (const e of await this.#fs.promises.readdir(fullpath, {
+                    withFileTypes: true,
+                })) {
+                    this.#readdirAddChild(e, children);
+                }
+                this.#readdirSuccess(children);
+            }
+            catch (er) {
+                this.#readdirFail(er.code);
+                children.provisional = 0;
+            }
+            this.#asyncReaddirInFlight = undefined;
+            resolve();
+        }
+        return children.slice(0, children.provisional);
+    }
+    /**
+     * synchronous {@link PathBase.readdir}
+     */
+    readdirSync() {
+        if (!this.canReaddir()) {
+            return [];
+        }
+        const children = this.children();
+        if (this.calledReaddir()) {
+            return children.slice(0, children.provisional);
+        }
+        // else read the directory, fill up children
+        // de-provisionalize any provisional children.
+        const fullpath = this.fullpath();
+        try {
+            for (const e of this.#fs.readdirSync(fullpath, {
+                withFileTypes: true,
+            })) {
+                this.#readdirAddChild(e, children);
+            }
+            this.#readdirSuccess(children);
+        }
+        catch (er) {
+            this.#readdirFail(er.code);
+            children.provisional = 0;
+        }
+        return children.slice(0, children.provisional);
+    }
+    canReaddir() {
+        if (this.#type & ENOCHILD)
+            return false;
+        const ifmt = IFMT & this.#type;
+        // we always set ENOTDIR when setting IFMT, so should be impossible
+        /* c8 ignore start */
+        if (!(ifmt === UNKNOWN || ifmt === IFDIR || ifmt === IFLNK)) {
+            return false;
+        }
+        /* c8 ignore stop */
+        return true;
+    }
+    shouldWalk(dirs, walkFilter) {
+        return ((this.#type & IFDIR) === IFDIR &&
+            !(this.#type & ENOCHILD) &&
+            !dirs.has(this) &&
+            (!walkFilter || walkFilter(this)));
+    }
+    /**
+     * Return the Path object corresponding to path as resolved
+     * by realpath(3).
+     *
+     * If the realpath call fails for any reason, `undefined` is returned.
+     *
+     * Result is cached, and thus may be outdated if the filesystem is mutated.
+     * On success, returns a Path object.
+     */
+    async realpath() {
+        if (this.#realpath)
+            return this.#realpath;
+        if ((ENOREALPATH | ENOREADLINK | ENOENT) & this.#type)
+            return undefined;
+        try {
+            const rp = await this.#fs.promises.realpath(this.fullpath());
+            return (this.#realpath = this.resolve(rp));
+        }
+        catch (_) {
+            this.#markENOREALPATH();
+        }
+    }
+    /**
+     * Synchronous {@link realpath}
+     */
+    realpathSync() {
+        if (this.#realpath)
+            return this.#realpath;
+        if ((ENOREALPATH | ENOREADLINK | ENOENT) & this.#type)
+            return undefined;
+        try {
+            const rp = this.#fs.realpathSync(this.fullpath());
+            return (this.#realpath = this.resolve(rp));
+        }
+        catch (_) {
+            this.#markENOREALPATH();
+        }
+    }
+    /**
+     * Internal method to mark this Path object as the scurry cwd,
+     * called by {@link PathScurry#chdir}
+     *
+     * @internal
+     */
+    [setAsCwd](oldCwd) {
+        if (oldCwd === this)
+            return;
+        oldCwd.isCWD = false;
+        this.isCWD = true;
+        const changed = new Set([]);
+        let rp = [];
+        let p = this;
+        while (p && p.parent) {
+            changed.add(p);
+            p.#relative = rp.join(this.sep);
+            p.#relativePosix = rp.join('/');
+            p = p.parent;
+            rp.push('..');
+        }
+        // now un-memoize parents of old cwd
+        p = oldCwd;
+        while (p && p.parent && !changed.has(p)) {
+            p.#relative = undefined;
+            p.#relativePosix = undefined;
+            p = p.parent;
+        }
+    }
+}
+exports.PathBase = PathBase;
+/**
+ * Path class used on win32 systems
+ *
+ * Uses `'\\'` as the path separator for returned paths, either `'\\'` or `'/'`
+ * as the path separator for parsing paths.
+ */
+class PathWin32 extends PathBase {
+    /**
+     * Separator for generating path strings.
+     */
+    sep = '\\';
+    /**
+     * Separator for parsing path strings.
+     */
+    splitSep = eitherSep;
+    /**
+     * Do not create new Path objects directly.  They should always be accessed
+     * via the PathScurry class or other methods on the Path class.
+     *
+     * @internal
+     */
+    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
+        super(name, type, root, roots, nocase, children, opts);
+    }
+    /**
+     * @internal
+     */
+    newChild(name, type = UNKNOWN, opts = {}) {
+        return new PathWin32(name, type, this.root, this.roots, this.nocase, this.childrenCache(), opts);
+    }
+    /**
+     * @internal
+     */
+    getRootString(path) {
+        return node_path_1.win32.parse(path).root;
+    }
+    /**
+     * @internal
+     */
+    getRoot(rootPath) {
+        rootPath = uncToDrive(rootPath.toUpperCase());
+        if (rootPath === this.root.name) {
+            return this.root;
+        }
+        // ok, not that one, check if it matches another we know about
+        for (const [compare, root] of Object.entries(this.roots)) {
+            if (this.sameRoot(rootPath, compare)) {
+                return (this.roots[rootPath] = root);
+            }
+        }
+        // otherwise, have to create a new one.
+        return (this.roots[rootPath] = new PathScurryWin32(rootPath, this).root);
+    }
+    /**
+     * @internal
+     */
+    sameRoot(rootPath, compare = this.root.name) {
+        // windows can (rarely) have case-sensitive filesystem, but
+        // UNC and drive letters are always case-insensitive, and canonically
+        // represented uppercase.
+        rootPath = rootPath
+            .toUpperCase()
+            .replace(/\//g, '\\')
+            .replace(uncDriveRegexp, '$1\\');
+        return rootPath === compare;
+    }
+}
+exports.PathWin32 = PathWin32;
+/**
+ * Path class used on all posix systems.
+ *
+ * Uses `'/'` as the path separator.
+ */
+class PathPosix extends PathBase {
+    /**
+     * separator for parsing path strings
+     */
+    splitSep = '/';
+    /**
+     * separator for generating path strings
+     */
+    sep = '/';
+    /**
+     * Do not create new Path objects directly.  They should always be accessed
+     * via the PathScurry class or other methods on the Path class.
+     *
+     * @internal
+     */
+    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
+        super(name, type, root, roots, nocase, children, opts);
+    }
+    /**
+     * @internal
+     */
+    getRootString(path) {
+        return path.startsWith('/') ? '/' : '';
+    }
+    /**
+     * @internal
+     */
+    getRoot(_rootPath) {
+        return this.root;
+    }
+    /**
+     * @internal
+     */
+    newChild(name, type = UNKNOWN, opts = {}) {
+        return new PathPosix(name, type, this.root, this.roots, this.nocase, this.childrenCache(), opts);
+    }
+}
+exports.PathPosix = PathPosix;
+/**
+ * The base class for all PathScurry classes, providing the interface for path
+ * resolution and filesystem operations.
+ *
+ * Typically, you should *not* instantiate this class directly, but rather one
+ * of the platform-specific classes, or the exported {@link PathScurry} which
+ * defaults to the current platform.
+ */
+class PathScurryBase {
+    /**
+     * The root Path entry for the current working directory of this Scurry
+     */
+    root;
+    /**
+     * The string path for the root of this Scurry's current working directory
+     */
+    rootPath;
+    /**
+     * A collection of all roots encountered, referenced by rootPath
+     */
+    roots;
+    /**
+     * The Path entry corresponding to this PathScurry's current working directory.
+     */
+    cwd;
+    #resolveCache;
+    #resolvePosixCache;
+    #children;
+    /**
+     * Perform path comparisons case-insensitively.
+     *
+     * Defaults true on Darwin and Windows systems, false elsewhere.
+     */
+    nocase;
+    #fs;
+    /**
+     * This class should not be instantiated directly.
+     *
+     * Use PathScurryWin32, PathScurryDarwin, PathScurryPosix, or PathScurry
+     *
+     * @internal
+     */
+    constructor(cwd = process.cwd(), pathImpl, sep, { nocase, childrenCacheSize = 16 * 1024, fs = defaultFS, } = {}) {
+        this.#fs = fsFromOption(fs);
+        if (cwd instanceof URL || cwd.startsWith('file://')) {
+            cwd = (0, node_url_1.fileURLToPath)(cwd);
+        }
+        // resolve and split root, and then add to the store.
+        // this is the only time we call path.resolve()
+        const cwdPath = pathImpl.resolve(cwd);
+        this.roots = Object.create(null);
+        this.rootPath = this.parseRootPath(cwdPath);
+        this.#resolveCache = new ResolveCache();
+        this.#resolvePosixCache = new ResolveCache();
+        this.#children = new ChildrenCache(childrenCacheSize);
+        const split = cwdPath.substring(this.rootPath.length).split(sep);
+        // resolve('/') leaves '', splits to [''], we don't want that.
+        if (split.length === 1 && !split[0]) {
+            split.pop();
+        }
+        /* c8 ignore start */
+        if (nocase === undefined) {
+            throw new TypeError('must provide nocase setting to PathScurryBase ctor');
+        }
+        /* c8 ignore stop */
+        this.nocase = nocase;
+        this.root = this.newRoot(this.#fs);
+        this.roots[this.rootPath] = this.root;
+        let prev = this.root;
+        let len = split.length - 1;
+        const joinSep = pathImpl.sep;
+        let abs = this.rootPath;
+        let sawFirst = false;
+        for (const part of split) {
+            const l = len--;
+            prev = prev.child(part, {
+                relative: new Array(l).fill('..').join(joinSep),
+                relativePosix: new Array(l).fill('..').join('/'),
+                fullpath: (abs += (sawFirst ? '' : joinSep) + part),
+            });
+            sawFirst = true;
+        }
+        this.cwd = prev;
+    }
+    /**
+     * Get the depth of a provided path, string, or the cwd
+     */
+    depth(path = this.cwd) {
+        if (typeof path === 'string') {
+            path = this.cwd.resolve(path);
+        }
+        return path.depth();
+    }
+    /**
+     * Return the cache of child entries.  Exposed so subclasses can create
+     * child Path objects in a platform-specific way.
+     *
+     * @internal
+     */
+    childrenCache() {
+        return this.#children;
+    }
+    /**
+     * Resolve one or more path strings to a resolved string
+     *
+     * Same interface as require('path').resolve.
+     *
+     * Much faster than path.resolve() when called multiple times for the same
+     * path, because the resolved Path objects are cached.  Much slower
+     * otherwise.
+     */
+    resolve(...paths) {
+        // first figure out the minimum number of paths we have to test
+        // we always start at cwd, but any absolutes will bump the start
+        let r = '';
+        for (let i = paths.length - 1; i >= 0; i--) {
+            const p = paths[i];
+            if (!p || p === '.')
+                continue;
+            r = r ? `${p}/${r}` : p;
+            if (this.isAbsolute(p)) {
+                break;
+            }
+        }
+        const cached = this.#resolveCache.get(r);
+        if (cached !== undefined) {
+            return cached;
+        }
+        const result = this.cwd.resolve(r).fullpath();
+        this.#resolveCache.set(r, result);
+        return result;
+    }
+    /**
+     * Resolve one or more path strings to a resolved string, returning
+     * the posix path.  Identical to .resolve() on posix systems, but on
+     * windows will return a forward-slash separated UNC path.
+     *
+     * Same interface as require('path').resolve.
+     *
+     * Much faster than path.resolve() when called multiple times for the same
+     * path, because the resolved Path objects are cached.  Much slower
+     * otherwise.
+     */
+    resolvePosix(...paths) {
+        // first figure out the minimum number of paths we have to test
+        // we always start at cwd, but any absolutes will bump the start
+        let r = '';
+        for (let i = paths.length - 1; i >= 0; i--) {
+            const p = paths[i];
+            if (!p || p === '.')
+                continue;
+            r = r ? `${p}/${r}` : p;
+            if (this.isAbsolute(p)) {
+                break;
+            }
+        }
+        const cached = this.#resolvePosixCache.get(r);
+        if (cached !== undefined) {
+            return cached;
+        }
+        const result = this.cwd.resolve(r).fullpathPosix();
+        this.#resolvePosixCache.set(r, result);
+        return result;
+    }
+    /**
+     * find the relative path from the cwd to the supplied path string or entry
+     */
+    relative(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.relative();
+    }
+    /**
+     * find the relative path from the cwd to the supplied path string or
+     * entry, using / as the path delimiter, even on Windows.
+     */
+    relativePosix(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.relativePosix();
+    }
+    /**
+     * Return the basename for the provided string or Path object
+     */
+    basename(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.name;
+    }
+    /**
+     * Return the dirname for the provided string or Path object
+     */
+    dirname(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return (entry.parent || entry).fullpath();
+    }
+    async readdir(entry = this.cwd, opts = {
+        withFileTypes: true,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes } = opts;
+        if (!entry.canReaddir()) {
+            return [];
+        }
+        else {
+            const p = await entry.readdir();
+            return withFileTypes ? p : p.map(e => e.name);
+        }
+    }
+    readdirSync(entry = this.cwd, opts = {
+        withFileTypes: true,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true } = opts;
+        if (!entry.canReaddir()) {
+            return [];
+        }
+        else if (withFileTypes) {
+            return entry.readdirSync();
+        }
+        else {
+            return entry.readdirSync().map(e => e.name);
+        }
+    }
+    /**
+     * Call lstat() on the string or Path object, and update all known
+     * information that can be determined.
+     *
+     * Note that unlike `fs.lstat()`, the returned value does not contain some
+     * information, such as `mode`, `dev`, `nlink`, and `ino`.  If that
+     * information is required, you will need to call `fs.lstat` yourself.
+     *
+     * If the Path refers to a nonexistent file, or if the lstat call fails for
+     * any reason, `undefined` is returned.  Otherwise the updated Path object is
+     * returned.
+     *
+     * Results are cached, and thus may be out of date if the filesystem is
+     * mutated.
+     */
+    async lstat(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.lstat();
+    }
+    /**
+     * synchronous {@link PathScurryBase.lstat}
+     */
+    lstatSync(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.lstatSync();
+    }
+    async readlink(entry = this.cwd, { withFileTypes } = {
+        withFileTypes: false,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            withFileTypes = entry.withFileTypes;
+            entry = this.cwd;
+        }
+        const e = await entry.readlink();
+        return withFileTypes ? e : e?.fullpath();
+    }
+    readlinkSync(entry = this.cwd, { withFileTypes } = {
+        withFileTypes: false,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            withFileTypes = entry.withFileTypes;
+            entry = this.cwd;
+        }
+        const e = entry.readlinkSync();
+        return withFileTypes ? e : e?.fullpath();
+    }
+    async realpath(entry = this.cwd, { withFileTypes } = {
+        withFileTypes: false,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            withFileTypes = entry.withFileTypes;
+            entry = this.cwd;
+        }
+        const e = await entry.realpath();
+        return withFileTypes ? e : e?.fullpath();
+    }
+    realpathSync(entry = this.cwd, { withFileTypes } = {
+        withFileTypes: false,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            withFileTypes = entry.withFileTypes;
+            entry = this.cwd;
+        }
+        const e = entry.realpathSync();
+        return withFileTypes ? e : e?.fullpath();
+    }
+    async walk(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        const results = [];
+        if (!filter || filter(entry)) {
+            results.push(withFileTypes ? entry : entry.fullpath());
+        }
+        const dirs = new Set();
+        const walk = (dir, cb) => {
+            dirs.add(dir);
+            dir.readdirCB((er, entries) => {
+                /* c8 ignore start */
+                if (er) {
+                    return cb(er);
+                }
+                /* c8 ignore stop */
+                let len = entries.length;
+                if (!len)
+                    return cb();
+                const next = () => {
+                    if (--len === 0) {
+                        cb();
+                    }
+                };
+                for (const e of entries) {
+                    if (!filter || filter(e)) {
+                        results.push(withFileTypes ? e : e.fullpath());
+                    }
+                    if (follow && e.isSymbolicLink()) {
+                        e.realpath()
+                            .then(r => (r?.isUnknown() ? r.lstat() : r))
+                            .then(r => r?.shouldWalk(dirs, walkFilter) ? walk(r, next) : next());
+                    }
+                    else {
+                        if (e.shouldWalk(dirs, walkFilter)) {
+                            walk(e, next);
+                        }
+                        else {
+                            next();
+                        }
+                    }
+                }
+            }, true); // zalgooooooo
+        };
+        const start = entry;
+        return new Promise((res, rej) => {
+            walk(start, er => {
+                /* c8 ignore start */
+                if (er)
+                    return rej(er);
+                /* c8 ignore stop */
+                res(results);
+            });
+        });
+    }
+    walkSync(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        const results = [];
+        if (!filter || filter(entry)) {
+            results.push(withFileTypes ? entry : entry.fullpath());
+        }
+        const dirs = new Set([entry]);
+        for (const dir of dirs) {
+            const entries = dir.readdirSync();
+            for (const e of entries) {
+                if (!filter || filter(e)) {
+                    results.push(withFileTypes ? e : e.fullpath());
+                }
+                let r = e;
+                if (e.isSymbolicLink()) {
+                    if (!(follow && (r = e.realpathSync())))
+                        continue;
+                    if (r.isUnknown())
+                        r.lstatSync();
+                }
+                if (r.shouldWalk(dirs, walkFilter)) {
+                    dirs.add(r);
+                }
+            }
+        }
+        return results;
+    }
+    /**
+     * Support for `for await`
+     *
+     * Alias for {@link PathScurryBase.iterate}
+     *
+     * Note: As of Node 19, this is very slow, compared to other methods of
+     * walking.  Consider using {@link PathScurryBase.stream} if memory overhead
+     * and backpressure are concerns, or {@link PathScurryBase.walk} if not.
+     */
+    [Symbol.asyncIterator]() {
+        return this.iterate();
+    }
+    iterate(entry = this.cwd, options = {}) {
+        // iterating async over the stream is significantly more performant,
+        // especially in the warm-cache scenario, because it buffers up directory
+        // entries in the background instead of waiting for a yield for each one.
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            options = entry;
+            entry = this.cwd;
+        }
+        return this.stream(entry, options)[Symbol.asyncIterator]();
+    }
+    /**
+     * Iterating over a PathScurry performs a synchronous walk.
+     *
+     * Alias for {@link PathScurryBase.iterateSync}
+     */
+    [Symbol.iterator]() {
+        return this.iterateSync();
+    }
+    *iterateSync(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        if (!filter || filter(entry)) {
+            yield withFileTypes ? entry : entry.fullpath();
+        }
+        const dirs = new Set([entry]);
+        for (const dir of dirs) {
+            const entries = dir.readdirSync();
+            for (const e of entries) {
+                if (!filter || filter(e)) {
+                    yield withFileTypes ? e : e.fullpath();
+                }
+                let r = e;
+                if (e.isSymbolicLink()) {
+                    if (!(follow && (r = e.realpathSync())))
+                        continue;
+                    if (r.isUnknown())
+                        r.lstatSync();
+                }
+                if (r.shouldWalk(dirs, walkFilter)) {
+                    dirs.add(r);
+                }
+            }
+        }
+    }
+    stream(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        const results = new minipass_1.Minipass({ objectMode: true });
+        if (!filter || filter(entry)) {
+            results.write(withFileTypes ? entry : entry.fullpath());
+        }
+        const dirs = new Set();
+        const queue = [entry];
+        let processing = 0;
+        const process = () => {
+            let paused = false;
+            while (!paused) {
+                const dir = queue.shift();
+                if (!dir) {
+                    if (processing === 0)
+                        results.end();
+                    return;
+                }
+                processing++;
+                dirs.add(dir);
+                const onReaddir = (er, entries, didRealpaths = false) => {
+                    /* c8 ignore start */
+                    if (er)
+                        return results.emit('error', er);
+                    /* c8 ignore stop */
+                    if (follow && !didRealpaths) {
+                        const promises = [];
+                        for (const e of entries) {
+                            if (e.isSymbolicLink()) {
+                                promises.push(e
+                                    .realpath()
+                                    .then((r) => r?.isUnknown() ? r.lstat() : r));
+                            }
+                        }
+                        if (promises.length) {
+                            Promise.all(promises).then(() => onReaddir(null, entries, true));
+                            return;
+                        }
+                    }
+                    for (const e of entries) {
+                        if (e && (!filter || filter(e))) {
+                            if (!results.write(withFileTypes ? e : e.fullpath())) {
+                                paused = true;
+                            }
+                        }
+                    }
+                    processing--;
+                    for (const e of entries) {
+                        const r = e.realpathCached() || e;
+                        if (r.shouldWalk(dirs, walkFilter)) {
+                            queue.push(r);
+                        }
+                    }
+                    if (paused && !results.flowing) {
+                        results.once('drain', process);
+                    }
+                    else if (!sync) {
+                        process();
+                    }
+                };
+                // zalgo containment
+                let sync = true;
+                dir.readdirCB(onReaddir, true);
+                sync = false;
+            }
+        };
+        process();
+        return results;
+    }
+    streamSync(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        const results = new minipass_1.Minipass({ objectMode: true });
+        const dirs = new Set();
+        if (!filter || filter(entry)) {
+            results.write(withFileTypes ? entry : entry.fullpath());
+        }
+        const queue = [entry];
+        let processing = 0;
+        const process = () => {
+            let paused = false;
+            while (!paused) {
+                const dir = queue.shift();
+                if (!dir) {
+                    if (processing === 0)
+                        results.end();
+                    return;
+                }
+                processing++;
+                dirs.add(dir);
+                const entries = dir.readdirSync();
+                for (const e of entries) {
+                    if (!filter || filter(e)) {
+                        if (!results.write(withFileTypes ? e : e.fullpath())) {
+                            paused = true;
+                        }
+                    }
+                }
+                processing--;
+                for (const e of entries) {
+                    let r = e;
+                    if (e.isSymbolicLink()) {
+                        if (!(follow && (r = e.realpathSync())))
+                            continue;
+                        if (r.isUnknown())
+                            r.lstatSync();
+                    }
+                    if (r.shouldWalk(dirs, walkFilter)) {
+                        queue.push(r);
+                    }
+                }
+            }
+            if (paused && !results.flowing)
+                results.once('drain', process);
+        };
+        process();
+        return results;
+    }
+    chdir(path = this.cwd) {
+        const oldCwd = this.cwd;
+        this.cwd = typeof path === 'string' ? this.cwd.resolve(path) : path;
+        this.cwd[setAsCwd](oldCwd);
+    }
+}
+exports.PathScurryBase = PathScurryBase;
+/**
+ * Windows implementation of {@link PathScurryBase}
+ *
+ * Defaults to case insensitve, uses `'\\'` to generate path strings.  Uses
+ * {@link PathWin32} for Path objects.
+ */
+class PathScurryWin32 extends PathScurryBase {
+    /**
+     * separator for generating path strings
+     */
+    sep = '\\';
+    constructor(cwd = process.cwd(), opts = {}) {
+        const { nocase = true } = opts;
+        super(cwd, node_path_1.win32, '\\', { ...opts, nocase });
+        this.nocase = nocase;
+        for (let p = this.cwd; p; p = p.parent) {
+            p.nocase = this.nocase;
+        }
+    }
+    /**
+     * @internal
+     */
+    parseRootPath(dir) {
+        // if the path starts with a single separator, it's not a UNC, and we'll
+        // just get separator as the root, and driveFromUNC will return \
+        // In that case, mount \ on the root from the cwd.
+        return node_path_1.win32.parse(dir).root.toUpperCase();
+    }
+    /**
+     * @internal
+     */
+    newRoot(fs) {
+        return new PathWin32(this.rootPath, IFDIR, undefined, this.roots, this.nocase, this.childrenCache(), { fs });
+    }
+    /**
+     * Return true if the provided path string is an absolute path
+     */
+    isAbsolute(p) {
+        return (p.startsWith('/') || p.startsWith('\\') || /^[a-z]:(\/|\\)/i.test(p));
+    }
+}
+exports.PathScurryWin32 = PathScurryWin32;
+/**
+ * {@link PathScurryBase} implementation for all posix systems other than Darwin.
+ *
+ * Defaults to case-sensitive matching, uses `'/'` to generate path strings.
+ *
+ * Uses {@link PathPosix} for Path objects.
+ */
+class PathScurryPosix extends PathScurryBase {
+    /**
+     * separator for generating path strings
+     */
+    sep = '/';
+    constructor(cwd = process.cwd(), opts = {}) {
+        const { nocase = false } = opts;
+        super(cwd, node_path_1.posix, '/', { ...opts, nocase });
+        this.nocase = nocase;
+    }
+    /**
+     * @internal
+     */
+    parseRootPath(_dir) {
+        return '/';
+    }
+    /**
+     * @internal
+     */
+    newRoot(fs) {
+        return new PathPosix(this.rootPath, IFDIR, undefined, this.roots, this.nocase, this.childrenCache(), { fs });
+    }
+    /**
+     * Return true if the provided path string is an absolute path
+     */
+    isAbsolute(p) {
+        return p.startsWith('/');
+    }
+}
+exports.PathScurryPosix = PathScurryPosix;
+/**
+ * {@link PathScurryBase} implementation for Darwin (macOS) systems.
+ *
+ * Defaults to case-insensitive matching, uses `'/'` for generating path
+ * strings.
+ *
+ * Uses {@link PathPosix} for Path objects.
+ */
+class PathScurryDarwin extends PathScurryPosix {
+    constructor(cwd = process.cwd(), opts = {}) {
+        const { nocase = true } = opts;
+        super(cwd, { ...opts, nocase });
+    }
+}
+exports.PathScurryDarwin = PathScurryDarwin;
+/**
+ * Default {@link PathBase} implementation for the current platform.
+ *
+ * {@link PathWin32} on Windows systems, {@link PathPosix} on all others.
+ */
+exports.Path = process.platform === 'win32' ? PathWin32 : PathPosix;
+/**
+ * Default {@link PathScurryBase} implementation for the current platform.
+ *
+ * {@link PathScurryWin32} on Windows systems, {@link PathScurryDarwin} on
+ * Darwin (macOS) systems, {@link PathScurryPosix} on all others.
+ */
+exports.PathScurry = process.platform === 'win32' ? PathScurryWin32
+    : process.platform === 'darwin' ? PathScurryDarwin
+        : PathScurryPosix;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/package-json/node_modules/path-scurry/dist/commonjs/package.json b/node_modules/@npmcli/package-json/node_modules/path-scurry/dist/commonjs/package.json
new file mode 100644
index 0000000000000..5bbefffbabee3
--- /dev/null
+++ b/node_modules/@npmcli/package-json/node_modules/path-scurry/dist/commonjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/node_modules/@npmcli/package-json/node_modules/path-scurry/dist/esm/index.js b/node_modules/@npmcli/package-json/node_modules/path-scurry/dist/esm/index.js
new file mode 100644
index 0000000000000..42be74c37ad9d
--- /dev/null
+++ b/node_modules/@npmcli/package-json/node_modules/path-scurry/dist/esm/index.js
@@ -0,0 +1,1981 @@
+import { LRUCache } from 'lru-cache';
+import { posix, win32 } from 'node:path';
+import { fileURLToPath } from 'node:url';
+import { lstatSync, readdir as readdirCB, readdirSync, readlinkSync, realpathSync as rps, } from 'fs';
+import * as actualFS from 'node:fs';
+const realpathSync = rps.native;
+// TODO: test perf of fs/promises realpath vs realpathCB,
+// since the promises one uses realpath.native
+import { lstat, readdir, readlink, realpath } from 'node:fs/promises';
+import { Minipass } from 'minipass';
+const defaultFS = {
+    lstatSync,
+    readdir: readdirCB,
+    readdirSync,
+    readlinkSync,
+    realpathSync,
+    promises: {
+        lstat,
+        readdir,
+        readlink,
+        realpath,
+    },
+};
+// if they just gave us require('fs') then use our default
+const fsFromOption = (fsOption) => !fsOption || fsOption === defaultFS || fsOption === actualFS ?
+    defaultFS
+    : {
+        ...defaultFS,
+        ...fsOption,
+        promises: {
+            ...defaultFS.promises,
+            ...(fsOption.promises || {}),
+        },
+    };
+// turn something like //?/c:/ into c:\
+const uncDriveRegexp = /^\\\\\?\\([a-z]:)\\?$/i;
+const uncToDrive = (rootPath) => rootPath.replace(/\//g, '\\').replace(uncDriveRegexp, '$1\\');
+// windows paths are separated by either / or \
+const eitherSep = /[\\\/]/;
+const UNKNOWN = 0; // may not even exist, for all we know
+const IFIFO = 0b0001;
+const IFCHR = 0b0010;
+const IFDIR = 0b0100;
+const IFBLK = 0b0110;
+const IFREG = 0b1000;
+const IFLNK = 0b1010;
+const IFSOCK = 0b1100;
+const IFMT = 0b1111;
+// mask to unset low 4 bits
+const IFMT_UNKNOWN = ~IFMT;
+// set after successfully calling readdir() and getting entries.
+const READDIR_CALLED = 0b0000_0001_0000;
+// set after a successful lstat()
+const LSTAT_CALLED = 0b0000_0010_0000;
+// set if an entry (or one of its parents) is definitely not a dir
+const ENOTDIR = 0b0000_0100_0000;
+// set if an entry (or one of its parents) does not exist
+// (can also be set on lstat errors like EACCES or ENAMETOOLONG)
+const ENOENT = 0b0000_1000_0000;
+// cannot have child entries -- also verify &IFMT is either IFDIR or IFLNK
+// set if we fail to readlink
+const ENOREADLINK = 0b0001_0000_0000;
+// set if we know realpath() will fail
+const ENOREALPATH = 0b0010_0000_0000;
+const ENOCHILD = ENOTDIR | ENOENT | ENOREALPATH;
+const TYPEMASK = 0b0011_1111_1111;
+const entToType = (s) => s.isFile() ? IFREG
+    : s.isDirectory() ? IFDIR
+        : s.isSymbolicLink() ? IFLNK
+            : s.isCharacterDevice() ? IFCHR
+                : s.isBlockDevice() ? IFBLK
+                    : s.isSocket() ? IFSOCK
+                        : s.isFIFO() ? IFIFO
+                            : UNKNOWN;
+// normalize unicode path names
+const normalizeCache = new Map();
+const normalize = (s) => {
+    const c = normalizeCache.get(s);
+    if (c)
+        return c;
+    const n = s.normalize('NFKD');
+    normalizeCache.set(s, n);
+    return n;
+};
+const normalizeNocaseCache = new Map();
+const normalizeNocase = (s) => {
+    const c = normalizeNocaseCache.get(s);
+    if (c)
+        return c;
+    const n = normalize(s.toLowerCase());
+    normalizeNocaseCache.set(s, n);
+    return n;
+};
+/**
+ * An LRUCache for storing resolved path strings or Path objects.
+ * @internal
+ */
+export class ResolveCache extends LRUCache {
+    constructor() {
+        super({ max: 256 });
+    }
+}
+// In order to prevent blowing out the js heap by allocating hundreds of
+// thousands of Path entries when walking extremely large trees, the "children"
+// in this tree are represented by storing an array of Path entries in an
+// LRUCache, indexed by the parent.  At any time, Path.children() may return an
+// empty array, indicating that it doesn't know about any of its children, and
+// thus has to rebuild that cache.  This is fine, it just means that we don't
+// benefit as much from having the cached entries, but huge directory walks
+// don't blow out the stack, and smaller ones are still as fast as possible.
+//
+//It does impose some complexity when building up the readdir data, because we
+//need to pass a reference to the children array that we started with.
+/**
+ * an LRUCache for storing child entries.
+ * @internal
+ */
+export class ChildrenCache extends LRUCache {
+    constructor(maxSize = 16 * 1024) {
+        super({
+            maxSize,
+            // parent + children
+            sizeCalculation: a => a.length + 1,
+        });
+    }
+}
+const setAsCwd = Symbol('PathScurry setAsCwd');
+/**
+ * Path objects are sort of like a super-powered
+ * {@link https://nodejs.org/docs/latest/api/fs.html#class-fsdirent fs.Dirent}
+ *
+ * Each one represents a single filesystem entry on disk, which may or may not
+ * exist. It includes methods for reading various types of information via
+ * lstat, readlink, and readdir, and caches all information to the greatest
+ * degree possible.
+ *
+ * Note that fs operations that would normally throw will instead return an
+ * "empty" value. This is in order to prevent excessive overhead from error
+ * stack traces.
+ */
+export class PathBase {
+    /**
+     * the basename of this path
+     *
+     * **Important**: *always* test the path name against any test string
+     * usingthe {@link isNamed} method, and not by directly comparing this
+     * string. Otherwise, unicode path strings that the system sees as identical
+     * will not be properly treated as the same path, leading to incorrect
+     * behavior and possible security issues.
+     */
+    name;
+    /**
+     * the Path entry corresponding to the path root.
+     *
+     * @internal
+     */
+    root;
+    /**
+     * All roots found within the current PathScurry family
+     *
+     * @internal
+     */
+    roots;
+    /**
+     * a reference to the parent path, or undefined in the case of root entries
+     *
+     * @internal
+     */
+    parent;
+    /**
+     * boolean indicating whether paths are compared case-insensitively
+     * @internal
+     */
+    nocase;
+    /**
+     * boolean indicating that this path is the current working directory
+     * of the PathScurry collection that contains it.
+     */
+    isCWD = false;
+    // potential default fs override
+    #fs;
+    // Stats fields
+    #dev;
+    get dev() {
+        return this.#dev;
+    }
+    #mode;
+    get mode() {
+        return this.#mode;
+    }
+    #nlink;
+    get nlink() {
+        return this.#nlink;
+    }
+    #uid;
+    get uid() {
+        return this.#uid;
+    }
+    #gid;
+    get gid() {
+        return this.#gid;
+    }
+    #rdev;
+    get rdev() {
+        return this.#rdev;
+    }
+    #blksize;
+    get blksize() {
+        return this.#blksize;
+    }
+    #ino;
+    get ino() {
+        return this.#ino;
+    }
+    #size;
+    get size() {
+        return this.#size;
+    }
+    #blocks;
+    get blocks() {
+        return this.#blocks;
+    }
+    #atimeMs;
+    get atimeMs() {
+        return this.#atimeMs;
+    }
+    #mtimeMs;
+    get mtimeMs() {
+        return this.#mtimeMs;
+    }
+    #ctimeMs;
+    get ctimeMs() {
+        return this.#ctimeMs;
+    }
+    #birthtimeMs;
+    get birthtimeMs() {
+        return this.#birthtimeMs;
+    }
+    #atime;
+    get atime() {
+        return this.#atime;
+    }
+    #mtime;
+    get mtime() {
+        return this.#mtime;
+    }
+    #ctime;
+    get ctime() {
+        return this.#ctime;
+    }
+    #birthtime;
+    get birthtime() {
+        return this.#birthtime;
+    }
+    #matchName;
+    #depth;
+    #fullpath;
+    #fullpathPosix;
+    #relative;
+    #relativePosix;
+    #type;
+    #children;
+    #linkTarget;
+    #realpath;
+    /**
+     * This property is for compatibility with the Dirent class as of
+     * Node v20, where Dirent['parentPath'] refers to the path of the
+     * directory that was passed to readdir. For root entries, it's the path
+     * to the entry itself.
+     */
+    get parentPath() {
+        return (this.parent || this).fullpath();
+    }
+    /**
+     * Deprecated alias for Dirent['parentPath'] Somewhat counterintuitively,
+     * this property refers to the *parent* path, not the path object itself.
+     *
+     * @deprecated
+     */
+    get path() {
+        return this.parentPath;
+    }
+    /**
+     * Do not create new Path objects directly.  They should always be accessed
+     * via the PathScurry class or other methods on the Path class.
+     *
+     * @internal
+     */
+    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
+        this.name = name;
+        this.#matchName = nocase ? normalizeNocase(name) : normalize(name);
+        this.#type = type & TYPEMASK;
+        this.nocase = nocase;
+        this.roots = roots;
+        this.root = root || this;
+        this.#children = children;
+        this.#fullpath = opts.fullpath;
+        this.#relative = opts.relative;
+        this.#relativePosix = opts.relativePosix;
+        this.parent = opts.parent;
+        if (this.parent) {
+            this.#fs = this.parent.#fs;
+        }
+        else {
+            this.#fs = fsFromOption(opts.fs);
+        }
+    }
+    /**
+     * Returns the depth of the Path object from its root.
+     *
+     * For example, a path at `/foo/bar` would have a depth of 2.
+     */
+    depth() {
+        if (this.#depth !== undefined)
+            return this.#depth;
+        if (!this.parent)
+            return (this.#depth = 0);
+        return (this.#depth = this.parent.depth() + 1);
+    }
+    /**
+     * @internal
+     */
+    childrenCache() {
+        return this.#children;
+    }
+    /**
+     * Get the Path object referenced by the string path, resolved from this Path
+     */
+    resolve(path) {
+        if (!path) {
+            return this;
+        }
+        const rootPath = this.getRootString(path);
+        const dir = path.substring(rootPath.length);
+        const dirParts = dir.split(this.splitSep);
+        const result = rootPath ?
+            this.getRoot(rootPath).#resolveParts(dirParts)
+            : this.#resolveParts(dirParts);
+        return result;
+    }
+    #resolveParts(dirParts) {
+        let p = this;
+        for (const part of dirParts) {
+            p = p.child(part);
+        }
+        return p;
+    }
+    /**
+     * Returns the cached children Path objects, if still available.  If they
+     * have fallen out of the cache, then returns an empty array, and resets the
+     * READDIR_CALLED bit, so that future calls to readdir() will require an fs
+     * lookup.
+     *
+     * @internal
+     */
+    children() {
+        const cached = this.#children.get(this);
+        if (cached) {
+            return cached;
+        }
+        const children = Object.assign([], { provisional: 0 });
+        this.#children.set(this, children);
+        this.#type &= ~READDIR_CALLED;
+        return children;
+    }
+    /**
+     * Resolves a path portion and returns or creates the child Path.
+     *
+     * Returns `this` if pathPart is `''` or `'.'`, or `parent` if pathPart is
+     * `'..'`.
+     *
+     * This should not be called directly.  If `pathPart` contains any path
+     * separators, it will lead to unsafe undefined behavior.
+     *
+     * Use `Path.resolve()` instead.
+     *
+     * @internal
+     */
+    child(pathPart, opts) {
+        if (pathPart === '' || pathPart === '.') {
+            return this;
+        }
+        if (pathPart === '..') {
+            return this.parent || this;
+        }
+        // find the child
+        const children = this.children();
+        const name = this.nocase ? normalizeNocase(pathPart) : normalize(pathPart);
+        for (const p of children) {
+            if (p.#matchName === name) {
+                return p;
+            }
+        }
+        // didn't find it, create provisional child, since it might not
+        // actually exist.  If we know the parent isn't a dir, then
+        // in fact it CAN'T exist.
+        const s = this.parent ? this.sep : '';
+        const fullpath = this.#fullpath ? this.#fullpath + s + pathPart : undefined;
+        const pchild = this.newChild(pathPart, UNKNOWN, {
+            ...opts,
+            parent: this,
+            fullpath,
+        });
+        if (!this.canReaddir()) {
+            pchild.#type |= ENOENT;
+        }
+        // don't have to update provisional, because if we have real children,
+        // then provisional is set to children.length, otherwise a lower number
+        children.push(pchild);
+        return pchild;
+    }
+    /**
+     * The relative path from the cwd. If it does not share an ancestor with
+     * the cwd, then this ends up being equivalent to the fullpath()
+     */
+    relative() {
+        if (this.isCWD)
+            return '';
+        if (this.#relative !== undefined) {
+            return this.#relative;
+        }
+        const name = this.name;
+        const p = this.parent;
+        if (!p) {
+            return (this.#relative = this.name);
+        }
+        const pv = p.relative();
+        return pv + (!pv || !p.parent ? '' : this.sep) + name;
+    }
+    /**
+     * The relative path from the cwd, using / as the path separator.
+     * If it does not share an ancestor with
+     * the cwd, then this ends up being equivalent to the fullpathPosix()
+     * On posix systems, this is identical to relative().
+     */
+    relativePosix() {
+        if (this.sep === '/')
+            return this.relative();
+        if (this.isCWD)
+            return '';
+        if (this.#relativePosix !== undefined)
+            return this.#relativePosix;
+        const name = this.name;
+        const p = this.parent;
+        if (!p) {
+            return (this.#relativePosix = this.fullpathPosix());
+        }
+        const pv = p.relativePosix();
+        return pv + (!pv || !p.parent ? '' : '/') + name;
+    }
+    /**
+     * The fully resolved path string for this Path entry
+     */
+    fullpath() {
+        if (this.#fullpath !== undefined) {
+            return this.#fullpath;
+        }
+        const name = this.name;
+        const p = this.parent;
+        if (!p) {
+            return (this.#fullpath = this.name);
+        }
+        const pv = p.fullpath();
+        const fp = pv + (!p.parent ? '' : this.sep) + name;
+        return (this.#fullpath = fp);
+    }
+    /**
+     * On platforms other than windows, this is identical to fullpath.
+     *
+     * On windows, this is overridden to return the forward-slash form of the
+     * full UNC path.
+     */
+    fullpathPosix() {
+        if (this.#fullpathPosix !== undefined)
+            return this.#fullpathPosix;
+        if (this.sep === '/')
+            return (this.#fullpathPosix = this.fullpath());
+        if (!this.parent) {
+            const p = this.fullpath().replace(/\\/g, '/');
+            if (/^[a-z]:\//i.test(p)) {
+                return (this.#fullpathPosix = `//?/${p}`);
+            }
+            else {
+                return (this.#fullpathPosix = p);
+            }
+        }
+        const p = this.parent;
+        const pfpp = p.fullpathPosix();
+        const fpp = pfpp + (!pfpp || !p.parent ? '' : '/') + this.name;
+        return (this.#fullpathPosix = fpp);
+    }
+    /**
+     * Is the Path of an unknown type?
+     *
+     * Note that we might know *something* about it if there has been a previous
+     * filesystem operation, for example that it does not exist, or is not a
+     * link, or whether it has child entries.
+     */
+    isUnknown() {
+        return (this.#type & IFMT) === UNKNOWN;
+    }
+    isType(type) {
+        return this[`is${type}`]();
+    }
+    getType() {
+        return (this.isUnknown() ? 'Unknown'
+            : this.isDirectory() ? 'Directory'
+                : this.isFile() ? 'File'
+                    : this.isSymbolicLink() ? 'SymbolicLink'
+                        : this.isFIFO() ? 'FIFO'
+                            : this.isCharacterDevice() ? 'CharacterDevice'
+                                : this.isBlockDevice() ? 'BlockDevice'
+                                    : /* c8 ignore start */ this.isSocket() ? 'Socket'
+                                        : 'Unknown');
+        /* c8 ignore stop */
+    }
+    /**
+     * Is the Path a regular file?
+     */
+    isFile() {
+        return (this.#type & IFMT) === IFREG;
+    }
+    /**
+     * Is the Path a directory?
+     */
+    isDirectory() {
+        return (this.#type & IFMT) === IFDIR;
+    }
+    /**
+     * Is the path a character device?
+     */
+    isCharacterDevice() {
+        return (this.#type & IFMT) === IFCHR;
+    }
+    /**
+     * Is the path a block device?
+     */
+    isBlockDevice() {
+        return (this.#type & IFMT) === IFBLK;
+    }
+    /**
+     * Is the path a FIFO pipe?
+     */
+    isFIFO() {
+        return (this.#type & IFMT) === IFIFO;
+    }
+    /**
+     * Is the path a socket?
+     */
+    isSocket() {
+        return (this.#type & IFMT) === IFSOCK;
+    }
+    /**
+     * Is the path a symbolic link?
+     */
+    isSymbolicLink() {
+        return (this.#type & IFLNK) === IFLNK;
+    }
+    /**
+     * Return the entry if it has been subject of a successful lstat, or
+     * undefined otherwise.
+     *
+     * Does not read the filesystem, so an undefined result *could* simply
+     * mean that we haven't called lstat on it.
+     */
+    lstatCached() {
+        return this.#type & LSTAT_CALLED ? this : undefined;
+    }
+    /**
+     * Return the cached link target if the entry has been the subject of a
+     * successful readlink, or undefined otherwise.
+     *
+     * Does not read the filesystem, so an undefined result *could* just mean we
+     * don't have any cached data. Only use it if you are very sure that a
+     * readlink() has been called at some point.
+     */
+    readlinkCached() {
+        return this.#linkTarget;
+    }
+    /**
+     * Returns the cached realpath target if the entry has been the subject
+     * of a successful realpath, or undefined otherwise.
+     *
+     * Does not read the filesystem, so an undefined result *could* just mean we
+     * don't have any cached data. Only use it if you are very sure that a
+     * realpath() has been called at some point.
+     */
+    realpathCached() {
+        return this.#realpath;
+    }
+    /**
+     * Returns the cached child Path entries array if the entry has been the
+     * subject of a successful readdir(), or [] otherwise.
+     *
+     * Does not read the filesystem, so an empty array *could* just mean we
+     * don't have any cached data. Only use it if you are very sure that a
+     * readdir() has been called recently enough to still be valid.
+     */
+    readdirCached() {
+        const children = this.children();
+        return children.slice(0, children.provisional);
+    }
+    /**
+     * Return true if it's worth trying to readlink.  Ie, we don't (yet) have
+     * any indication that readlink will definitely fail.
+     *
+     * Returns false if the path is known to not be a symlink, if a previous
+     * readlink failed, or if the entry does not exist.
+     */
+    canReadlink() {
+        if (this.#linkTarget)
+            return true;
+        if (!this.parent)
+            return false;
+        // cases where it cannot possibly succeed
+        const ifmt = this.#type & IFMT;
+        return !((ifmt !== UNKNOWN && ifmt !== IFLNK) ||
+            this.#type & ENOREADLINK ||
+            this.#type & ENOENT);
+    }
+    /**
+     * Return true if readdir has previously been successfully called on this
+     * path, indicating that cachedReaddir() is likely valid.
+     */
+    calledReaddir() {
+        return !!(this.#type & READDIR_CALLED);
+    }
+    /**
+     * Returns true if the path is known to not exist. That is, a previous lstat
+     * or readdir failed to verify its existence when that would have been
+     * expected, or a parent entry was marked either enoent or enotdir.
+     */
+    isENOENT() {
+        return !!(this.#type & ENOENT);
+    }
+    /**
+     * Return true if the path is a match for the given path name.  This handles
+     * case sensitivity and unicode normalization.
+     *
+     * Note: even on case-sensitive systems, it is **not** safe to test the
+     * equality of the `.name` property to determine whether a given pathname
+     * matches, due to unicode normalization mismatches.
+     *
+     * Always use this method instead of testing the `path.name` property
+     * directly.
+     */
+    isNamed(n) {
+        return !this.nocase ?
+            this.#matchName === normalize(n)
+            : this.#matchName === normalizeNocase(n);
+    }
+    /**
+     * Return the Path object corresponding to the target of a symbolic link.
+     *
+     * If the Path is not a symbolic link, or if the readlink call fails for any
+     * reason, `undefined` is returned.
+     *
+     * Result is cached, and thus may be outdated if the filesystem is mutated.
+     */
+    async readlink() {
+        const target = this.#linkTarget;
+        if (target) {
+            return target;
+        }
+        if (!this.canReadlink()) {
+            return undefined;
+        }
+        /* c8 ignore start */
+        // already covered by the canReadlink test, here for ts grumples
+        if (!this.parent) {
+            return undefined;
+        }
+        /* c8 ignore stop */
+        try {
+            const read = await this.#fs.promises.readlink(this.fullpath());
+            const linkTarget = (await this.parent.realpath())?.resolve(read);
+            if (linkTarget) {
+                return (this.#linkTarget = linkTarget);
+            }
+        }
+        catch (er) {
+            this.#readlinkFail(er.code);
+            return undefined;
+        }
+    }
+    /**
+     * Synchronous {@link PathBase.readlink}
+     */
+    readlinkSync() {
+        const target = this.#linkTarget;
+        if (target) {
+            return target;
+        }
+        if (!this.canReadlink()) {
+            return undefined;
+        }
+        /* c8 ignore start */
+        // already covered by the canReadlink test, here for ts grumples
+        if (!this.parent) {
+            return undefined;
+        }
+        /* c8 ignore stop */
+        try {
+            const read = this.#fs.readlinkSync(this.fullpath());
+            const linkTarget = this.parent.realpathSync()?.resolve(read);
+            if (linkTarget) {
+                return (this.#linkTarget = linkTarget);
+            }
+        }
+        catch (er) {
+            this.#readlinkFail(er.code);
+            return undefined;
+        }
+    }
+    #readdirSuccess(children) {
+        // succeeded, mark readdir called bit
+        this.#type |= READDIR_CALLED;
+        // mark all remaining provisional children as ENOENT
+        for (let p = children.provisional; p < children.length; p++) {
+            const c = children[p];
+            if (c)
+                c.#markENOENT();
+        }
+    }
+    #markENOENT() {
+        // mark as UNKNOWN and ENOENT
+        if (this.#type & ENOENT)
+            return;
+        this.#type = (this.#type | ENOENT) & IFMT_UNKNOWN;
+        this.#markChildrenENOENT();
+    }
+    #markChildrenENOENT() {
+        // all children are provisional and do not exist
+        const children = this.children();
+        children.provisional = 0;
+        for (const p of children) {
+            p.#markENOENT();
+        }
+    }
+    #markENOREALPATH() {
+        this.#type |= ENOREALPATH;
+        this.#markENOTDIR();
+    }
+    // save the information when we know the entry is not a dir
+    #markENOTDIR() {
+        // entry is not a directory, so any children can't exist.
+        // this *should* be impossible, since any children created
+        // after it's been marked ENOTDIR should be marked ENOENT,
+        // so it won't even get to this point.
+        /* c8 ignore start */
+        if (this.#type & ENOTDIR)
+            return;
+        /* c8 ignore stop */
+        let t = this.#type;
+        // this could happen if we stat a dir, then delete it,
+        // then try to read it or one of its children.
+        if ((t & IFMT) === IFDIR)
+            t &= IFMT_UNKNOWN;
+        this.#type = t | ENOTDIR;
+        this.#markChildrenENOENT();
+    }
+    #readdirFail(code = '') {
+        // markENOTDIR and markENOENT also set provisional=0
+        if (code === 'ENOTDIR' || code === 'EPERM') {
+            this.#markENOTDIR();
+        }
+        else if (code === 'ENOENT') {
+            this.#markENOENT();
+        }
+        else {
+            this.children().provisional = 0;
+        }
+    }
+    #lstatFail(code = '') {
+        // Windows just raises ENOENT in this case, disable for win CI
+        /* c8 ignore start */
+        if (code === 'ENOTDIR') {
+            // already know it has a parent by this point
+            const p = this.parent;
+            p.#markENOTDIR();
+        }
+        else if (code === 'ENOENT') {
+            /* c8 ignore stop */
+            this.#markENOENT();
+        }
+    }
+    #readlinkFail(code = '') {
+        let ter = this.#type;
+        ter |= ENOREADLINK;
+        if (code === 'ENOENT')
+            ter |= ENOENT;
+        // windows gets a weird error when you try to readlink a file
+        if (code === 'EINVAL' || code === 'UNKNOWN') {
+            // exists, but not a symlink, we don't know WHAT it is, so remove
+            // all IFMT bits.
+            ter &= IFMT_UNKNOWN;
+        }
+        this.#type = ter;
+        // windows just gets ENOENT in this case.  We do cover the case,
+        // just disabled because it's impossible on Windows CI
+        /* c8 ignore start */
+        if (code === 'ENOTDIR' && this.parent) {
+            this.parent.#markENOTDIR();
+        }
+        /* c8 ignore stop */
+    }
+    #readdirAddChild(e, c) {
+        return (this.#readdirMaybePromoteChild(e, c) ||
+            this.#readdirAddNewChild(e, c));
+    }
+    #readdirAddNewChild(e, c) {
+        // alloc new entry at head, so it's never provisional
+        const type = entToType(e);
+        const child = this.newChild(e.name, type, { parent: this });
+        const ifmt = child.#type & IFMT;
+        if (ifmt !== IFDIR && ifmt !== IFLNK && ifmt !== UNKNOWN) {
+            child.#type |= ENOTDIR;
+        }
+        c.unshift(child);
+        c.provisional++;
+        return child;
+    }
+    #readdirMaybePromoteChild(e, c) {
+        for (let p = c.provisional; p < c.length; p++) {
+            const pchild = c[p];
+            const name = this.nocase ? normalizeNocase(e.name) : normalize(e.name);
+            if (name !== pchild.#matchName) {
+                continue;
+            }
+            return this.#readdirPromoteChild(e, pchild, p, c);
+        }
+    }
+    #readdirPromoteChild(e, p, index, c) {
+        const v = p.name;
+        // retain any other flags, but set ifmt from dirent
+        p.#type = (p.#type & IFMT_UNKNOWN) | entToType(e);
+        // case sensitivity fixing when we learn the true name.
+        if (v !== e.name)
+            p.name = e.name;
+        // just advance provisional index (potentially off the list),
+        // otherwise we have to splice/pop it out and re-insert at head
+        if (index !== c.provisional) {
+            if (index === c.length - 1)
+                c.pop();
+            else
+                c.splice(index, 1);
+            c.unshift(p);
+        }
+        c.provisional++;
+        return p;
+    }
+    /**
+     * Call lstat() on this Path, and update all known information that can be
+     * determined.
+     *
+     * Note that unlike `fs.lstat()`, the returned value does not contain some
+     * information, such as `mode`, `dev`, `nlink`, and `ino`.  If that
+     * information is required, you will need to call `fs.lstat` yourself.
+     *
+     * If the Path refers to a nonexistent file, or if the lstat call fails for
+     * any reason, `undefined` is returned.  Otherwise the updated Path object is
+     * returned.
+     *
+     * Results are cached, and thus may be out of date if the filesystem is
+     * mutated.
+     */
+    async lstat() {
+        if ((this.#type & ENOENT) === 0) {
+            try {
+                this.#applyStat(await this.#fs.promises.lstat(this.fullpath()));
+                return this;
+            }
+            catch (er) {
+                this.#lstatFail(er.code);
+            }
+        }
+    }
+    /**
+     * synchronous {@link PathBase.lstat}
+     */
+    lstatSync() {
+        if ((this.#type & ENOENT) === 0) {
+            try {
+                this.#applyStat(this.#fs.lstatSync(this.fullpath()));
+                return this;
+            }
+            catch (er) {
+                this.#lstatFail(er.code);
+            }
+        }
+    }
+    #applyStat(st) {
+        const { atime, atimeMs, birthtime, birthtimeMs, blksize, blocks, ctime, ctimeMs, dev, gid, ino, mode, mtime, mtimeMs, nlink, rdev, size, uid, } = st;
+        this.#atime = atime;
+        this.#atimeMs = atimeMs;
+        this.#birthtime = birthtime;
+        this.#birthtimeMs = birthtimeMs;
+        this.#blksize = blksize;
+        this.#blocks = blocks;
+        this.#ctime = ctime;
+        this.#ctimeMs = ctimeMs;
+        this.#dev = dev;
+        this.#gid = gid;
+        this.#ino = ino;
+        this.#mode = mode;
+        this.#mtime = mtime;
+        this.#mtimeMs = mtimeMs;
+        this.#nlink = nlink;
+        this.#rdev = rdev;
+        this.#size = size;
+        this.#uid = uid;
+        const ifmt = entToType(st);
+        // retain any other flags, but set the ifmt
+        this.#type = (this.#type & IFMT_UNKNOWN) | ifmt | LSTAT_CALLED;
+        if (ifmt !== UNKNOWN && ifmt !== IFDIR && ifmt !== IFLNK) {
+            this.#type |= ENOTDIR;
+        }
+    }
+    #onReaddirCB = [];
+    #readdirCBInFlight = false;
+    #callOnReaddirCB(children) {
+        this.#readdirCBInFlight = false;
+        const cbs = this.#onReaddirCB.slice();
+        this.#onReaddirCB.length = 0;
+        cbs.forEach(cb => cb(null, children));
+    }
+    /**
+     * Standard node-style callback interface to get list of directory entries.
+     *
+     * If the Path cannot or does not contain any children, then an empty array
+     * is returned.
+     *
+     * Results are cached, and thus may be out of date if the filesystem is
+     * mutated.
+     *
+     * @param cb The callback called with (er, entries).  Note that the `er`
+     * param is somewhat extraneous, as all readdir() errors are handled and
+     * simply result in an empty set of entries being returned.
+     * @param allowZalgo Boolean indicating that immediately known results should
+     * *not* be deferred with `queueMicrotask`. Defaults to `false`. Release
+     * zalgo at your peril, the dark pony lord is devious and unforgiving.
+     */
+    readdirCB(cb, allowZalgo = false) {
+        if (!this.canReaddir()) {
+            if (allowZalgo)
+                cb(null, []);
+            else
+                queueMicrotask(() => cb(null, []));
+            return;
+        }
+        const children = this.children();
+        if (this.calledReaddir()) {
+            const c = children.slice(0, children.provisional);
+            if (allowZalgo)
+                cb(null, c);
+            else
+                queueMicrotask(() => cb(null, c));
+            return;
+        }
+        // don't have to worry about zalgo at this point.
+        this.#onReaddirCB.push(cb);
+        if (this.#readdirCBInFlight) {
+            return;
+        }
+        this.#readdirCBInFlight = true;
+        // else read the directory, fill up children
+        // de-provisionalize any provisional children.
+        const fullpath = this.fullpath();
+        this.#fs.readdir(fullpath, { withFileTypes: true }, (er, entries) => {
+            if (er) {
+                this.#readdirFail(er.code);
+                children.provisional = 0;
+            }
+            else {
+                // if we didn't get an error, we always get entries.
+                //@ts-ignore
+                for (const e of entries) {
+                    this.#readdirAddChild(e, children);
+                }
+                this.#readdirSuccess(children);
+            }
+            this.#callOnReaddirCB(children.slice(0, children.provisional));
+            return;
+        });
+    }
+    #asyncReaddirInFlight;
+    /**
+     * Return an array of known child entries.
+     *
+     * If the Path cannot or does not contain any children, then an empty array
+     * is returned.
+     *
+     * Results are cached, and thus may be out of date if the filesystem is
+     * mutated.
+     */
+    async readdir() {
+        if (!this.canReaddir()) {
+            return [];
+        }
+        const children = this.children();
+        if (this.calledReaddir()) {
+            return children.slice(0, children.provisional);
+        }
+        // else read the directory, fill up children
+        // de-provisionalize any provisional children.
+        const fullpath = this.fullpath();
+        if (this.#asyncReaddirInFlight) {
+            await this.#asyncReaddirInFlight;
+        }
+        else {
+            /* c8 ignore start */
+            let resolve = () => { };
+            /* c8 ignore stop */
+            this.#asyncReaddirInFlight = new Promise(res => (resolve = res));
+            try {
+                for (const e of await this.#fs.promises.readdir(fullpath, {
+                    withFileTypes: true,
+                })) {
+                    this.#readdirAddChild(e, children);
+                }
+                this.#readdirSuccess(children);
+            }
+            catch (er) {
+                this.#readdirFail(er.code);
+                children.provisional = 0;
+            }
+            this.#asyncReaddirInFlight = undefined;
+            resolve();
+        }
+        return children.slice(0, children.provisional);
+    }
+    /**
+     * synchronous {@link PathBase.readdir}
+     */
+    readdirSync() {
+        if (!this.canReaddir()) {
+            return [];
+        }
+        const children = this.children();
+        if (this.calledReaddir()) {
+            return children.slice(0, children.provisional);
+        }
+        // else read the directory, fill up children
+        // de-provisionalize any provisional children.
+        const fullpath = this.fullpath();
+        try {
+            for (const e of this.#fs.readdirSync(fullpath, {
+                withFileTypes: true,
+            })) {
+                this.#readdirAddChild(e, children);
+            }
+            this.#readdirSuccess(children);
+        }
+        catch (er) {
+            this.#readdirFail(er.code);
+            children.provisional = 0;
+        }
+        return children.slice(0, children.provisional);
+    }
+    canReaddir() {
+        if (this.#type & ENOCHILD)
+            return false;
+        const ifmt = IFMT & this.#type;
+        // we always set ENOTDIR when setting IFMT, so should be impossible
+        /* c8 ignore start */
+        if (!(ifmt === UNKNOWN || ifmt === IFDIR || ifmt === IFLNK)) {
+            return false;
+        }
+        /* c8 ignore stop */
+        return true;
+    }
+    shouldWalk(dirs, walkFilter) {
+        return ((this.#type & IFDIR) === IFDIR &&
+            !(this.#type & ENOCHILD) &&
+            !dirs.has(this) &&
+            (!walkFilter || walkFilter(this)));
+    }
+    /**
+     * Return the Path object corresponding to path as resolved
+     * by realpath(3).
+     *
+     * If the realpath call fails for any reason, `undefined` is returned.
+     *
+     * Result is cached, and thus may be outdated if the filesystem is mutated.
+     * On success, returns a Path object.
+     */
+    async realpath() {
+        if (this.#realpath)
+            return this.#realpath;
+        if ((ENOREALPATH | ENOREADLINK | ENOENT) & this.#type)
+            return undefined;
+        try {
+            const rp = await this.#fs.promises.realpath(this.fullpath());
+            return (this.#realpath = this.resolve(rp));
+        }
+        catch (_) {
+            this.#markENOREALPATH();
+        }
+    }
+    /**
+     * Synchronous {@link realpath}
+     */
+    realpathSync() {
+        if (this.#realpath)
+            return this.#realpath;
+        if ((ENOREALPATH | ENOREADLINK | ENOENT) & this.#type)
+            return undefined;
+        try {
+            const rp = this.#fs.realpathSync(this.fullpath());
+            return (this.#realpath = this.resolve(rp));
+        }
+        catch (_) {
+            this.#markENOREALPATH();
+        }
+    }
+    /**
+     * Internal method to mark this Path object as the scurry cwd,
+     * called by {@link PathScurry#chdir}
+     *
+     * @internal
+     */
+    [setAsCwd](oldCwd) {
+        if (oldCwd === this)
+            return;
+        oldCwd.isCWD = false;
+        this.isCWD = true;
+        const changed = new Set([]);
+        let rp = [];
+        let p = this;
+        while (p && p.parent) {
+            changed.add(p);
+            p.#relative = rp.join(this.sep);
+            p.#relativePosix = rp.join('/');
+            p = p.parent;
+            rp.push('..');
+        }
+        // now un-memoize parents of old cwd
+        p = oldCwd;
+        while (p && p.parent && !changed.has(p)) {
+            p.#relative = undefined;
+            p.#relativePosix = undefined;
+            p = p.parent;
+        }
+    }
+}
+/**
+ * Path class used on win32 systems
+ *
+ * Uses `'\\'` as the path separator for returned paths, either `'\\'` or `'/'`
+ * as the path separator for parsing paths.
+ */
+export class PathWin32 extends PathBase {
+    /**
+     * Separator for generating path strings.
+     */
+    sep = '\\';
+    /**
+     * Separator for parsing path strings.
+     */
+    splitSep = eitherSep;
+    /**
+     * Do not create new Path objects directly.  They should always be accessed
+     * via the PathScurry class or other methods on the Path class.
+     *
+     * @internal
+     */
+    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
+        super(name, type, root, roots, nocase, children, opts);
+    }
+    /**
+     * @internal
+     */
+    newChild(name, type = UNKNOWN, opts = {}) {
+        return new PathWin32(name, type, this.root, this.roots, this.nocase, this.childrenCache(), opts);
+    }
+    /**
+     * @internal
+     */
+    getRootString(path) {
+        return win32.parse(path).root;
+    }
+    /**
+     * @internal
+     */
+    getRoot(rootPath) {
+        rootPath = uncToDrive(rootPath.toUpperCase());
+        if (rootPath === this.root.name) {
+            return this.root;
+        }
+        // ok, not that one, check if it matches another we know about
+        for (const [compare, root] of Object.entries(this.roots)) {
+            if (this.sameRoot(rootPath, compare)) {
+                return (this.roots[rootPath] = root);
+            }
+        }
+        // otherwise, have to create a new one.
+        return (this.roots[rootPath] = new PathScurryWin32(rootPath, this).root);
+    }
+    /**
+     * @internal
+     */
+    sameRoot(rootPath, compare = this.root.name) {
+        // windows can (rarely) have case-sensitive filesystem, but
+        // UNC and drive letters are always case-insensitive, and canonically
+        // represented uppercase.
+        rootPath = rootPath
+            .toUpperCase()
+            .replace(/\//g, '\\')
+            .replace(uncDriveRegexp, '$1\\');
+        return rootPath === compare;
+    }
+}
+/**
+ * Path class used on all posix systems.
+ *
+ * Uses `'/'` as the path separator.
+ */
+export class PathPosix extends PathBase {
+    /**
+     * separator for parsing path strings
+     */
+    splitSep = '/';
+    /**
+     * separator for generating path strings
+     */
+    sep = '/';
+    /**
+     * Do not create new Path objects directly.  They should always be accessed
+     * via the PathScurry class or other methods on the Path class.
+     *
+     * @internal
+     */
+    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
+        super(name, type, root, roots, nocase, children, opts);
+    }
+    /**
+     * @internal
+     */
+    getRootString(path) {
+        return path.startsWith('/') ? '/' : '';
+    }
+    /**
+     * @internal
+     */
+    getRoot(_rootPath) {
+        return this.root;
+    }
+    /**
+     * @internal
+     */
+    newChild(name, type = UNKNOWN, opts = {}) {
+        return new PathPosix(name, type, this.root, this.roots, this.nocase, this.childrenCache(), opts);
+    }
+}
+/**
+ * The base class for all PathScurry classes, providing the interface for path
+ * resolution and filesystem operations.
+ *
+ * Typically, you should *not* instantiate this class directly, but rather one
+ * of the platform-specific classes, or the exported {@link PathScurry} which
+ * defaults to the current platform.
+ */
+export class PathScurryBase {
+    /**
+     * The root Path entry for the current working directory of this Scurry
+     */
+    root;
+    /**
+     * The string path for the root of this Scurry's current working directory
+     */
+    rootPath;
+    /**
+     * A collection of all roots encountered, referenced by rootPath
+     */
+    roots;
+    /**
+     * The Path entry corresponding to this PathScurry's current working directory.
+     */
+    cwd;
+    #resolveCache;
+    #resolvePosixCache;
+    #children;
+    /**
+     * Perform path comparisons case-insensitively.
+     *
+     * Defaults true on Darwin and Windows systems, false elsewhere.
+     */
+    nocase;
+    #fs;
+    /**
+     * This class should not be instantiated directly.
+     *
+     * Use PathScurryWin32, PathScurryDarwin, PathScurryPosix, or PathScurry
+     *
+     * @internal
+     */
+    constructor(cwd = process.cwd(), pathImpl, sep, { nocase, childrenCacheSize = 16 * 1024, fs = defaultFS, } = {}) {
+        this.#fs = fsFromOption(fs);
+        if (cwd instanceof URL || cwd.startsWith('file://')) {
+            cwd = fileURLToPath(cwd);
+        }
+        // resolve and split root, and then add to the store.
+        // this is the only time we call path.resolve()
+        const cwdPath = pathImpl.resolve(cwd);
+        this.roots = Object.create(null);
+        this.rootPath = this.parseRootPath(cwdPath);
+        this.#resolveCache = new ResolveCache();
+        this.#resolvePosixCache = new ResolveCache();
+        this.#children = new ChildrenCache(childrenCacheSize);
+        const split = cwdPath.substring(this.rootPath.length).split(sep);
+        // resolve('/') leaves '', splits to [''], we don't want that.
+        if (split.length === 1 && !split[0]) {
+            split.pop();
+        }
+        /* c8 ignore start */
+        if (nocase === undefined) {
+            throw new TypeError('must provide nocase setting to PathScurryBase ctor');
+        }
+        /* c8 ignore stop */
+        this.nocase = nocase;
+        this.root = this.newRoot(this.#fs);
+        this.roots[this.rootPath] = this.root;
+        let prev = this.root;
+        let len = split.length - 1;
+        const joinSep = pathImpl.sep;
+        let abs = this.rootPath;
+        let sawFirst = false;
+        for (const part of split) {
+            const l = len--;
+            prev = prev.child(part, {
+                relative: new Array(l).fill('..').join(joinSep),
+                relativePosix: new Array(l).fill('..').join('/'),
+                fullpath: (abs += (sawFirst ? '' : joinSep) + part),
+            });
+            sawFirst = true;
+        }
+        this.cwd = prev;
+    }
+    /**
+     * Get the depth of a provided path, string, or the cwd
+     */
+    depth(path = this.cwd) {
+        if (typeof path === 'string') {
+            path = this.cwd.resolve(path);
+        }
+        return path.depth();
+    }
+    /**
+     * Return the cache of child entries.  Exposed so subclasses can create
+     * child Path objects in a platform-specific way.
+     *
+     * @internal
+     */
+    childrenCache() {
+        return this.#children;
+    }
+    /**
+     * Resolve one or more path strings to a resolved string
+     *
+     * Same interface as require('path').resolve.
+     *
+     * Much faster than path.resolve() when called multiple times for the same
+     * path, because the resolved Path objects are cached.  Much slower
+     * otherwise.
+     */
+    resolve(...paths) {
+        // first figure out the minimum number of paths we have to test
+        // we always start at cwd, but any absolutes will bump the start
+        let r = '';
+        for (let i = paths.length - 1; i >= 0; i--) {
+            const p = paths[i];
+            if (!p || p === '.')
+                continue;
+            r = r ? `${p}/${r}` : p;
+            if (this.isAbsolute(p)) {
+                break;
+            }
+        }
+        const cached = this.#resolveCache.get(r);
+        if (cached !== undefined) {
+            return cached;
+        }
+        const result = this.cwd.resolve(r).fullpath();
+        this.#resolveCache.set(r, result);
+        return result;
+    }
+    /**
+     * Resolve one or more path strings to a resolved string, returning
+     * the posix path.  Identical to .resolve() on posix systems, but on
+     * windows will return a forward-slash separated UNC path.
+     *
+     * Same interface as require('path').resolve.
+     *
+     * Much faster than path.resolve() when called multiple times for the same
+     * path, because the resolved Path objects are cached.  Much slower
+     * otherwise.
+     */
+    resolvePosix(...paths) {
+        // first figure out the minimum number of paths we have to test
+        // we always start at cwd, but any absolutes will bump the start
+        let r = '';
+        for (let i = paths.length - 1; i >= 0; i--) {
+            const p = paths[i];
+            if (!p || p === '.')
+                continue;
+            r = r ? `${p}/${r}` : p;
+            if (this.isAbsolute(p)) {
+                break;
+            }
+        }
+        const cached = this.#resolvePosixCache.get(r);
+        if (cached !== undefined) {
+            return cached;
+        }
+        const result = this.cwd.resolve(r).fullpathPosix();
+        this.#resolvePosixCache.set(r, result);
+        return result;
+    }
+    /**
+     * find the relative path from the cwd to the supplied path string or entry
+     */
+    relative(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.relative();
+    }
+    /**
+     * find the relative path from the cwd to the supplied path string or
+     * entry, using / as the path delimiter, even on Windows.
+     */
+    relativePosix(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.relativePosix();
+    }
+    /**
+     * Return the basename for the provided string or Path object
+     */
+    basename(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.name;
+    }
+    /**
+     * Return the dirname for the provided string or Path object
+     */
+    dirname(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return (entry.parent || entry).fullpath();
+    }
+    async readdir(entry = this.cwd, opts = {
+        withFileTypes: true,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes } = opts;
+        if (!entry.canReaddir()) {
+            return [];
+        }
+        else {
+            const p = await entry.readdir();
+            return withFileTypes ? p : p.map(e => e.name);
+        }
+    }
+    readdirSync(entry = this.cwd, opts = {
+        withFileTypes: true,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true } = opts;
+        if (!entry.canReaddir()) {
+            return [];
+        }
+        else if (withFileTypes) {
+            return entry.readdirSync();
+        }
+        else {
+            return entry.readdirSync().map(e => e.name);
+        }
+    }
+    /**
+     * Call lstat() on the string or Path object, and update all known
+     * information that can be determined.
+     *
+     * Note that unlike `fs.lstat()`, the returned value does not contain some
+     * information, such as `mode`, `dev`, `nlink`, and `ino`.  If that
+     * information is required, you will need to call `fs.lstat` yourself.
+     *
+     * If the Path refers to a nonexistent file, or if the lstat call fails for
+     * any reason, `undefined` is returned.  Otherwise the updated Path object is
+     * returned.
+     *
+     * Results are cached, and thus may be out of date if the filesystem is
+     * mutated.
+     */
+    async lstat(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.lstat();
+    }
+    /**
+     * synchronous {@link PathScurryBase.lstat}
+     */
+    lstatSync(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.lstatSync();
+    }
+    async readlink(entry = this.cwd, { withFileTypes } = {
+        withFileTypes: false,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            withFileTypes = entry.withFileTypes;
+            entry = this.cwd;
+        }
+        const e = await entry.readlink();
+        return withFileTypes ? e : e?.fullpath();
+    }
+    readlinkSync(entry = this.cwd, { withFileTypes } = {
+        withFileTypes: false,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            withFileTypes = entry.withFileTypes;
+            entry = this.cwd;
+        }
+        const e = entry.readlinkSync();
+        return withFileTypes ? e : e?.fullpath();
+    }
+    async realpath(entry = this.cwd, { withFileTypes } = {
+        withFileTypes: false,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            withFileTypes = entry.withFileTypes;
+            entry = this.cwd;
+        }
+        const e = await entry.realpath();
+        return withFileTypes ? e : e?.fullpath();
+    }
+    realpathSync(entry = this.cwd, { withFileTypes } = {
+        withFileTypes: false,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            withFileTypes = entry.withFileTypes;
+            entry = this.cwd;
+        }
+        const e = entry.realpathSync();
+        return withFileTypes ? e : e?.fullpath();
+    }
+    async walk(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        const results = [];
+        if (!filter || filter(entry)) {
+            results.push(withFileTypes ? entry : entry.fullpath());
+        }
+        const dirs = new Set();
+        const walk = (dir, cb) => {
+            dirs.add(dir);
+            dir.readdirCB((er, entries) => {
+                /* c8 ignore start */
+                if (er) {
+                    return cb(er);
+                }
+                /* c8 ignore stop */
+                let len = entries.length;
+                if (!len)
+                    return cb();
+                const next = () => {
+                    if (--len === 0) {
+                        cb();
+                    }
+                };
+                for (const e of entries) {
+                    if (!filter || filter(e)) {
+                        results.push(withFileTypes ? e : e.fullpath());
+                    }
+                    if (follow && e.isSymbolicLink()) {
+                        e.realpath()
+                            .then(r => (r?.isUnknown() ? r.lstat() : r))
+                            .then(r => r?.shouldWalk(dirs, walkFilter) ? walk(r, next) : next());
+                    }
+                    else {
+                        if (e.shouldWalk(dirs, walkFilter)) {
+                            walk(e, next);
+                        }
+                        else {
+                            next();
+                        }
+                    }
+                }
+            }, true); // zalgooooooo
+        };
+        const start = entry;
+        return new Promise((res, rej) => {
+            walk(start, er => {
+                /* c8 ignore start */
+                if (er)
+                    return rej(er);
+                /* c8 ignore stop */
+                res(results);
+            });
+        });
+    }
+    walkSync(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        const results = [];
+        if (!filter || filter(entry)) {
+            results.push(withFileTypes ? entry : entry.fullpath());
+        }
+        const dirs = new Set([entry]);
+        for (const dir of dirs) {
+            const entries = dir.readdirSync();
+            for (const e of entries) {
+                if (!filter || filter(e)) {
+                    results.push(withFileTypes ? e : e.fullpath());
+                }
+                let r = e;
+                if (e.isSymbolicLink()) {
+                    if (!(follow && (r = e.realpathSync())))
+                        continue;
+                    if (r.isUnknown())
+                        r.lstatSync();
+                }
+                if (r.shouldWalk(dirs, walkFilter)) {
+                    dirs.add(r);
+                }
+            }
+        }
+        return results;
+    }
+    /**
+     * Support for `for await`
+     *
+     * Alias for {@link PathScurryBase.iterate}
+     *
+     * Note: As of Node 19, this is very slow, compared to other methods of
+     * walking.  Consider using {@link PathScurryBase.stream} if memory overhead
+     * and backpressure are concerns, or {@link PathScurryBase.walk} if not.
+     */
+    [Symbol.asyncIterator]() {
+        return this.iterate();
+    }
+    iterate(entry = this.cwd, options = {}) {
+        // iterating async over the stream is significantly more performant,
+        // especially in the warm-cache scenario, because it buffers up directory
+        // entries in the background instead of waiting for a yield for each one.
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            options = entry;
+            entry = this.cwd;
+        }
+        return this.stream(entry, options)[Symbol.asyncIterator]();
+    }
+    /**
+     * Iterating over a PathScurry performs a synchronous walk.
+     *
+     * Alias for {@link PathScurryBase.iterateSync}
+     */
+    [Symbol.iterator]() {
+        return this.iterateSync();
+    }
+    *iterateSync(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        if (!filter || filter(entry)) {
+            yield withFileTypes ? entry : entry.fullpath();
+        }
+        const dirs = new Set([entry]);
+        for (const dir of dirs) {
+            const entries = dir.readdirSync();
+            for (const e of entries) {
+                if (!filter || filter(e)) {
+                    yield withFileTypes ? e : e.fullpath();
+                }
+                let r = e;
+                if (e.isSymbolicLink()) {
+                    if (!(follow && (r = e.realpathSync())))
+                        continue;
+                    if (r.isUnknown())
+                        r.lstatSync();
+                }
+                if (r.shouldWalk(dirs, walkFilter)) {
+                    dirs.add(r);
+                }
+            }
+        }
+    }
+    stream(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        const results = new Minipass({ objectMode: true });
+        if (!filter || filter(entry)) {
+            results.write(withFileTypes ? entry : entry.fullpath());
+        }
+        const dirs = new Set();
+        const queue = [entry];
+        let processing = 0;
+        const process = () => {
+            let paused = false;
+            while (!paused) {
+                const dir = queue.shift();
+                if (!dir) {
+                    if (processing === 0)
+                        results.end();
+                    return;
+                }
+                processing++;
+                dirs.add(dir);
+                const onReaddir = (er, entries, didRealpaths = false) => {
+                    /* c8 ignore start */
+                    if (er)
+                        return results.emit('error', er);
+                    /* c8 ignore stop */
+                    if (follow && !didRealpaths) {
+                        const promises = [];
+                        for (const e of entries) {
+                            if (e.isSymbolicLink()) {
+                                promises.push(e
+                                    .realpath()
+                                    .then((r) => r?.isUnknown() ? r.lstat() : r));
+                            }
+                        }
+                        if (promises.length) {
+                            Promise.all(promises).then(() => onReaddir(null, entries, true));
+                            return;
+                        }
+                    }
+                    for (const e of entries) {
+                        if (e && (!filter || filter(e))) {
+                            if (!results.write(withFileTypes ? e : e.fullpath())) {
+                                paused = true;
+                            }
+                        }
+                    }
+                    processing--;
+                    for (const e of entries) {
+                        const r = e.realpathCached() || e;
+                        if (r.shouldWalk(dirs, walkFilter)) {
+                            queue.push(r);
+                        }
+                    }
+                    if (paused && !results.flowing) {
+                        results.once('drain', process);
+                    }
+                    else if (!sync) {
+                        process();
+                    }
+                };
+                // zalgo containment
+                let sync = true;
+                dir.readdirCB(onReaddir, true);
+                sync = false;
+            }
+        };
+        process();
+        return results;
+    }
+    streamSync(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        const results = new Minipass({ objectMode: true });
+        const dirs = new Set();
+        if (!filter || filter(entry)) {
+            results.write(withFileTypes ? entry : entry.fullpath());
+        }
+        const queue = [entry];
+        let processing = 0;
+        const process = () => {
+            let paused = false;
+            while (!paused) {
+                const dir = queue.shift();
+                if (!dir) {
+                    if (processing === 0)
+                        results.end();
+                    return;
+                }
+                processing++;
+                dirs.add(dir);
+                const entries = dir.readdirSync();
+                for (const e of entries) {
+                    if (!filter || filter(e)) {
+                        if (!results.write(withFileTypes ? e : e.fullpath())) {
+                            paused = true;
+                        }
+                    }
+                }
+                processing--;
+                for (const e of entries) {
+                    let r = e;
+                    if (e.isSymbolicLink()) {
+                        if (!(follow && (r = e.realpathSync())))
+                            continue;
+                        if (r.isUnknown())
+                            r.lstatSync();
+                    }
+                    if (r.shouldWalk(dirs, walkFilter)) {
+                        queue.push(r);
+                    }
+                }
+            }
+            if (paused && !results.flowing)
+                results.once('drain', process);
+        };
+        process();
+        return results;
+    }
+    chdir(path = this.cwd) {
+        const oldCwd = this.cwd;
+        this.cwd = typeof path === 'string' ? this.cwd.resolve(path) : path;
+        this.cwd[setAsCwd](oldCwd);
+    }
+}
+/**
+ * Windows implementation of {@link PathScurryBase}
+ *
+ * Defaults to case insensitve, uses `'\\'` to generate path strings.  Uses
+ * {@link PathWin32} for Path objects.
+ */
+export class PathScurryWin32 extends PathScurryBase {
+    /**
+     * separator for generating path strings
+     */
+    sep = '\\';
+    constructor(cwd = process.cwd(), opts = {}) {
+        const { nocase = true } = opts;
+        super(cwd, win32, '\\', { ...opts, nocase });
+        this.nocase = nocase;
+        for (let p = this.cwd; p; p = p.parent) {
+            p.nocase = this.nocase;
+        }
+    }
+    /**
+     * @internal
+     */
+    parseRootPath(dir) {
+        // if the path starts with a single separator, it's not a UNC, and we'll
+        // just get separator as the root, and driveFromUNC will return \
+        // In that case, mount \ on the root from the cwd.
+        return win32.parse(dir).root.toUpperCase();
+    }
+    /**
+     * @internal
+     */
+    newRoot(fs) {
+        return new PathWin32(this.rootPath, IFDIR, undefined, this.roots, this.nocase, this.childrenCache(), { fs });
+    }
+    /**
+     * Return true if the provided path string is an absolute path
+     */
+    isAbsolute(p) {
+        return (p.startsWith('/') || p.startsWith('\\') || /^[a-z]:(\/|\\)/i.test(p));
+    }
+}
+/**
+ * {@link PathScurryBase} implementation for all posix systems other than Darwin.
+ *
+ * Defaults to case-sensitive matching, uses `'/'` to generate path strings.
+ *
+ * Uses {@link PathPosix} for Path objects.
+ */
+export class PathScurryPosix extends PathScurryBase {
+    /**
+     * separator for generating path strings
+     */
+    sep = '/';
+    constructor(cwd = process.cwd(), opts = {}) {
+        const { nocase = false } = opts;
+        super(cwd, posix, '/', { ...opts, nocase });
+        this.nocase = nocase;
+    }
+    /**
+     * @internal
+     */
+    parseRootPath(_dir) {
+        return '/';
+    }
+    /**
+     * @internal
+     */
+    newRoot(fs) {
+        return new PathPosix(this.rootPath, IFDIR, undefined, this.roots, this.nocase, this.childrenCache(), { fs });
+    }
+    /**
+     * Return true if the provided path string is an absolute path
+     */
+    isAbsolute(p) {
+        return p.startsWith('/');
+    }
+}
+/**
+ * {@link PathScurryBase} implementation for Darwin (macOS) systems.
+ *
+ * Defaults to case-insensitive matching, uses `'/'` for generating path
+ * strings.
+ *
+ * Uses {@link PathPosix} for Path objects.
+ */
+export class PathScurryDarwin extends PathScurryPosix {
+    constructor(cwd = process.cwd(), opts = {}) {
+        const { nocase = true } = opts;
+        super(cwd, { ...opts, nocase });
+    }
+}
+/**
+ * Default {@link PathBase} implementation for the current platform.
+ *
+ * {@link PathWin32} on Windows systems, {@link PathPosix} on all others.
+ */
+export const Path = process.platform === 'win32' ? PathWin32 : PathPosix;
+/**
+ * Default {@link PathScurryBase} implementation for the current platform.
+ *
+ * {@link PathScurryWin32} on Windows systems, {@link PathScurryDarwin} on
+ * Darwin (macOS) systems, {@link PathScurryPosix} on all others.
+ */
+export const PathScurry = process.platform === 'win32' ? PathScurryWin32
+    : process.platform === 'darwin' ? PathScurryDarwin
+        : PathScurryPosix;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/package-json/node_modules/path-scurry/dist/esm/package.json b/node_modules/@npmcli/package-json/node_modules/path-scurry/dist/esm/package.json
new file mode 100644
index 0000000000000..3dbc1ca591c05
--- /dev/null
+++ b/node_modules/@npmcli/package-json/node_modules/path-scurry/dist/esm/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/node_modules/@npmcli/package-json/node_modules/path-scurry/package.json b/node_modules/@npmcli/package-json/node_modules/path-scurry/package.json
new file mode 100644
index 0000000000000..c3cb39dced545
--- /dev/null
+++ b/node_modules/@npmcli/package-json/node_modules/path-scurry/package.json
@@ -0,0 +1,88 @@
+{
+  "name": "path-scurry",
+  "version": "2.0.0",
+  "description": "walk paths fast and efficiently",
+  "author": "Isaac Z. Schlueter  (https://blog.izs.me)",
+  "main": "./dist/commonjs/index.js",
+  "type": "module",
+  "exports": {
+    "./package.json": "./package.json",
+    ".": {
+      "import": {
+        "types": "./dist/esm/index.d.ts",
+        "default": "./dist/esm/index.js"
+      },
+      "require": {
+        "types": "./dist/commonjs/index.d.ts",
+        "default": "./dist/commonjs/index.js"
+      }
+    }
+  },
+  "files": [
+    "dist"
+  ],
+  "license": "BlueOak-1.0.0",
+  "scripts": {
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "prepare": "tshy",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "test": "tap",
+    "snap": "tap",
+    "format": "prettier --write . --log-level warn",
+    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts",
+    "bench": "bash ./scripts/bench.sh"
+  },
+  "prettier": {
+    "experimentalTernaries": true,
+    "semi": false,
+    "printWidth": 75,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "devDependencies": {
+    "@nodelib/fs.walk": "^2.0.0",
+    "@types/node": "^20.14.10",
+    "mkdirp": "^3.0.0",
+    "prettier": "^3.3.2",
+    "rimraf": "^5.0.8",
+    "tap": "^20.0.3",
+    "ts-node": "^10.9.2",
+    "tshy": "^2.0.1",
+    "typedoc": "^0.26.3",
+    "typescript": "^5.5.3"
+  },
+  "tap": {
+    "typecheck": true
+  },
+  "engines": {
+    "node": "20 || >=22"
+  },
+  "funding": {
+    "url": "https://github.com/sponsors/isaacs"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/isaacs/path-scurry"
+  },
+  "dependencies": {
+    "lru-cache": "^11.0.0",
+    "minipass": "^7.1.2"
+  },
+  "tshy": {
+    "selfLink": false,
+    "exports": {
+      "./package.json": "./package.json",
+      ".": "./src/index.ts"
+    }
+  },
+  "types": "./dist/commonjs/index.d.ts",
+  "module": "./dist/esm/index.js"
+}
diff --git a/node_modules/@npmcli/package-json/package.json b/node_modules/@npmcli/package-json/package.json
index 263d67ff3bc5b..46c39c22a1900 100644
--- a/node_modules/@npmcli/package-json/package.json
+++ b/node_modules/@npmcli/package-json/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@npmcli/package-json",
-  "version": "6.2.0",
+  "version": "7.0.1",
   "description": "Programmatic API to update package.json",
   "keywords": [
     "npm",
@@ -29,9 +29,9 @@
     "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
   },
   "dependencies": {
-    "@npmcli/git": "^6.0.0",
-    "glob": "^10.2.2",
-    "hosted-git-info": "^8.0.0",
+    "@npmcli/git": "^7.0.0",
+    "glob": "^11.0.3",
+    "hosted-git-info": "^9.0.0",
     "json-parse-even-better-errors": "^4.0.0",
     "proc-log": "^5.0.0",
     "semver": "^7.5.3",
@@ -39,17 +39,15 @@
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^5.1.0",
-    "@npmcli/template-oss": "4.23.6",
-    "read-package-json": "^7.0.0",
-    "read-package-json-fast": "^4.0.0",
+    "@npmcli/template-oss": "4.25.0",
     "tap": "^16.0.1"
   },
   "engines": {
-    "node": "^18.17.0 || >=20.5.0"
+    "node": "^20.17.0 || >=22.9.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.23.6",
+    "version": "4.25.0",
     "publish": "true"
   },
   "tap": {
diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/LICENSE b/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/LICENSE
new file mode 100644
index 0000000000000..6a1f3708f6d70
--- /dev/null
+++ b/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/LICENSE
@@ -0,0 +1,18 @@
+ISC License
+
+Copyright GitHub Inc.
+
+Permission to use, copy, modify, and/or distribute this
+software for any purpose with or without fee is hereby
+granted, provided that the above copyright notice and this
+permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL
+WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO
+EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
+WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
+TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
+USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/index.js b/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/index.js
new file mode 100644
index 0000000000000..7eff602d73a3f
--- /dev/null
+++ b/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/index.js
@@ -0,0 +1,286 @@
+const { readFile, writeFile } = require('node:fs/promises')
+const { resolve } = require('node:path')
+const parseJSON = require('json-parse-even-better-errors')
+
+const updateDeps = require('./update-dependencies.js')
+const updateScripts = require('./update-scripts.js')
+const updateWorkspaces = require('./update-workspaces.js')
+const normalize = require('./normalize.js')
+const { read, parse } = require('./read-package.js')
+const { packageSort } = require('./sort.js')
+
+// a list of handy specialized helper functions that take
+// care of special cases that are handled by the npm cli
+const knownSteps = new Set([
+  updateDeps,
+  updateScripts,
+  updateWorkspaces,
+])
+
+// list of all keys that are handled by "knownSteps" helpers
+const knownKeys = new Set([
+  ...updateDeps.knownKeys,
+  'scripts',
+  'workspaces',
+])
+
+class PackageJson {
+  static normalizeSteps = Object.freeze([
+    '_id',
+    '_attributes',
+    'bundledDependencies',
+    'bundleDependencies',
+    'optionalDedupe',
+    'scripts',
+    'funding',
+    'bin',
+  ])
+
+  // npm pkg fix
+  static fixSteps = Object.freeze([
+    'binRefs',
+    'bundleDependencies',
+    'bundleDependenciesFalse',
+    'fixName',
+    'fixNameField',
+    'fixVersionField',
+    'fixRepositoryField',
+    'fixDependencies',
+    'devDependencies',
+    'scriptpath',
+  ])
+
+  static prepareSteps = Object.freeze([
+    '_id',
+    '_attributes',
+    'bundledDependencies',
+    'bundleDependencies',
+    'bundleDependenciesDeleteFalse',
+    'gypfile',
+    'serverjs',
+    'scriptpath',
+    'authors',
+    'readme',
+    'mans',
+    'binDir',
+    'gitHead',
+    'fillTypes',
+    'normalizeData',
+    'binRefs',
+  ])
+
+  // create a new empty package.json, so we can save at the given path even
+  // though we didn't start from a parsed file
+  static async create (path, opts = {}) {
+    const p = new PackageJson()
+    await p.create(path)
+    if (opts.data) {
+      return p.update(opts.data)
+    }
+    return p
+  }
+
+  // Loads a package.json at given path and JSON parses
+  static async load (path, opts = {}) {
+    const p = new PackageJson()
+    // Avoid try/catch if we aren't going to create
+    if (!opts.create) {
+      return p.load(path)
+    }
+
+    try {
+      return await p.load(path)
+    } catch (err) {
+      if (!err.message.startsWith('Could not read package.json')) {
+        throw err
+      }
+      return await p.create(path)
+    }
+  }
+
+  // npm pkg fix
+  static async fix (path, opts) {
+    const p = new PackageJson()
+    await p.load(path, true)
+    return p.fix(opts)
+  }
+
+  // read-package-json compatible behavior
+  static async prepare (path, opts) {
+    const p = new PackageJson()
+    await p.load(path, true)
+    return p.prepare(opts)
+  }
+
+  // read-package-json-fast compatible behavior
+  static async normalize (path, opts) {
+    const p = new PackageJson()
+    await p.load(path)
+    return p.normalize(opts)
+  }
+
+  #path
+  #manifest
+  #readFileContent = ''
+  #canSave = true
+
+  // Load content from given path
+  async load (path, parseIndex) {
+    this.#path = path
+    let parseErr
+    try {
+      this.#readFileContent = await read(this.filename)
+    } catch (err) {
+      if (!parseIndex) {
+        throw err
+      }
+      parseErr = err
+    }
+
+    if (parseErr) {
+      const indexFile = resolve(this.path, 'index.js')
+      let indexFileContent
+      try {
+        indexFileContent = await readFile(indexFile, 'utf8')
+      } catch (err) {
+        throw parseErr
+      }
+      try {
+        this.fromComment(indexFileContent)
+      } catch (err) {
+        throw parseErr
+      }
+      // This wasn't a package.json so prevent saving
+      this.#canSave = false
+      return this
+    }
+
+    return this.fromJSON(this.#readFileContent)
+  }
+
+  // Load data from a JSON string/buffer
+  fromJSON (data) {
+    this.#manifest = parse(data)
+    return this
+  }
+
+  fromContent (data) {
+    this.#manifest = data
+    this.#canSave = false
+    return this
+  }
+
+  // Load data from a comment
+  // /**package { "name": "foo", "version": "1.2.3", ... } **/
+  fromComment (data) {
+    data = data.split(/^\/\*\*package(?:\s|$)/m)
+
+    if (data.length < 2) {
+      throw new Error('File has no package in comments')
+    }
+    data = data[1]
+    data = data.split(/\*\*\/$/m)
+
+    if (data.length < 2) {
+      throw new Error('File has no package in comments')
+    }
+    data = data[0]
+    data = data.replace(/^\s*\*/mg, '')
+
+    this.#manifest = parseJSON(data)
+    return this
+  }
+
+  get content () {
+    return this.#manifest
+  }
+
+  get path () {
+    return this.#path
+  }
+
+  get filename () {
+    if (this.path) {
+      return resolve(this.path, 'package.json')
+    }
+    return undefined
+  }
+
+  create (path) {
+    this.#path = path
+    this.#manifest = {}
+    return this
+  }
+
+  // This should be the ONLY way to set content in the manifest
+  update (content) {
+    if (!this.content) {
+      throw new Error('Can not update without content.  Please `load` or `create`')
+    }
+
+    for (const step of knownSteps) {
+      this.#manifest = step({ content, originalContent: this.content })
+    }
+
+    // unknown properties will just be overwitten
+    for (const [key, value] of Object.entries(content)) {
+      if (!knownKeys.has(key)) {
+        this.content[key] = value
+      }
+    }
+
+    return this
+  }
+
+  async save ({ sort } = {}) {
+    if (!this.#canSave) {
+      throw new Error('No package.json to save to')
+    }
+    const {
+      [Symbol.for('indent')]: indent,
+      [Symbol.for('newline')]: newline,
+      ...rest
+    } = this.content
+
+    const format = indent === undefined ? '  ' : indent
+    const eol = newline === undefined ? '\n' : newline
+
+    const content = sort ? packageSort(rest) : rest
+
+    const fileContent = `${
+      JSON.stringify(content, null, format)
+    }\n`
+      .replace(/\n/g, eol)
+
+    if (fileContent.trim() !== this.#readFileContent.trim()) {
+      const written = await writeFile(this.filename, fileContent)
+      this.#readFileContent = fileContent
+      return written
+    }
+  }
+
+  async normalize (opts = {}) {
+    if (!opts.steps) {
+      opts.steps = this.constructor.normalizeSteps
+    }
+    await normalize(this, opts)
+    return this
+  }
+
+  async prepare (opts = {}) {
+    if (!opts.steps) {
+      opts.steps = this.constructor.prepareSteps
+    }
+    await normalize(this, opts)
+    return this
+  }
+
+  async fix (opts = {}) {
+    // This one is not overridable
+    opts.steps = this.constructor.fixSteps
+    await normalize(this, opts)
+    return this
+  }
+}
+
+module.exports = PackageJson
diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/normalize-data.js b/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/normalize-data.js
new file mode 100644
index 0000000000000..79b0bafbcd3a4
--- /dev/null
+++ b/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/normalize-data.js
@@ -0,0 +1,257 @@
+// Originally normalize-package-data
+
+const url = require('node:url')
+const hostedGitInfo = require('hosted-git-info')
+const validateLicense = require('validate-npm-package-license')
+
+const typos = {
+  dependancies: 'dependencies',
+  dependecies: 'dependencies',
+  depdenencies: 'dependencies',
+  devEependencies: 'devDependencies',
+  depends: 'dependencies',
+  'dev-dependencies': 'devDependencies',
+  devDependences: 'devDependencies',
+  devDepenencies: 'devDependencies',
+  devdependencies: 'devDependencies',
+  repostitory: 'repository',
+  repo: 'repository',
+  prefereGlobal: 'preferGlobal',
+  hompage: 'homepage',
+  hampage: 'homepage',
+  autohr: 'author',
+  autor: 'author',
+  contributers: 'contributors',
+  publicationConfig: 'publishConfig',
+  script: 'scripts',
+}
+
+const isEmail = str => str.includes('@') && (str.indexOf('@') < str.lastIndexOf('.'))
+
+// Extracts description from contents of a readme file in markdown format
+function extractDescription (description) {
+  // the first block of text before the first heading that isn't the first line heading
+  const lines = description.trim().split('\n')
+  let start = 0
+  // skip initial empty lines and lines that start with #
+  while (lines[start]?.trim().match(/^(#|$)/)) {
+    start++
+  }
+  let end = start + 1
+  // keep going till we get to the end or an empty line
+  while (end < lines.length && lines[end].trim()) {
+    end++
+  }
+  return lines.slice(start, end).join(' ').trim()
+}
+
+function stringifyPerson (person) {
+  if (typeof person !== 'string') {
+    const name = person.name || ''
+    const u = person.url || person.web
+    const wrappedUrl = u ? (' (' + u + ')') : ''
+    const e = person.email || person.mail
+    const wrappedEmail = e ? (' <' + e + '>') : ''
+    person = name + wrappedEmail + wrappedUrl
+  }
+  const matchedName = person.match(/^([^(<]+)/)
+  const matchedUrl = person.match(/\(([^()]+)\)/)
+  const matchedEmail = person.match(/<([^<>]+)>/)
+  const parsed = {}
+  if (matchedName?.[0].trim()) {
+    parsed.name = matchedName[0].trim()
+  }
+  if (matchedEmail) {
+    parsed.email = matchedEmail[1]
+  }
+  if (matchedUrl) {
+    parsed.url = matchedUrl[1]
+  }
+  return parsed
+}
+
+function normalizeData (data, changes) {
+  // fixDescriptionField
+  if (data.description && typeof data.description !== 'string') {
+    changes?.push(`'description' field should be a string`)
+    delete data.description
+  }
+  if (data.readme && !data.description && data.readme !== 'ERROR: No README data found!') {
+    data.description = extractDescription(data.readme)
+  }
+  if (data.description === undefined) {
+    delete data.description
+  }
+  if (!data.description) {
+    changes?.push('No description')
+  }
+
+  // fixModulesField
+  if (data.modules) {
+    changes?.push(`modules field is deprecated`)
+    delete data.modules
+  }
+
+  // fixFilesField
+  const files = data.files
+  if (files && !Array.isArray(files)) {
+    changes?.push(`Invalid 'files' member`)
+    delete data.files
+  } else if (data.files) {
+    data.files = data.files.filter(function (file) {
+      if (!file || typeof file !== 'string') {
+        changes?.push(`Invalid filename in 'files' list: ${file}`)
+        return false
+      } else {
+        return true
+      }
+    })
+  }
+
+  // fixManField
+  if (data.man && typeof data.man === 'string') {
+    data.man = [data.man]
+  }
+
+  // fixBugsField
+  if (!data.bugs && data.repository?.url) {
+    const hosted = hostedGitInfo.fromUrl(data.repository.url)
+    if (hosted && hosted.bugs()) {
+      data.bugs = { url: hosted.bugs() }
+    }
+  } else if (data.bugs) {
+    if (typeof data.bugs === 'string') {
+      if (isEmail(data.bugs)) {
+        data.bugs = { email: data.bugs }
+        /* eslint-disable-next-line node/no-deprecated-api */
+      } else if (url.parse(data.bugs).protocol) {
+        data.bugs = { url: data.bugs }
+      } else {
+        changes?.push(`Bug string field must be url, email, or {email,url}`)
+      }
+    } else {
+      for (const k in data.bugs) {
+        if (['web', 'name'].includes(k)) {
+          changes?.push(`bugs['${k}'] should probably be bugs['url'].`)
+          data.bugs.url = data.bugs[k]
+          delete data.bugs[k]
+        }
+      }
+      const oldBugs = data.bugs
+      data.bugs = {}
+      if (oldBugs.url) {
+        /* eslint-disable-next-line node/no-deprecated-api */
+        if (typeof (oldBugs.url) === 'string' && url.parse(oldBugs.url).protocol) {
+          data.bugs.url = oldBugs.url
+        } else {
+          changes?.push('bugs.url field must be a string url. Deleted.')
+        }
+      }
+      if (oldBugs.email) {
+        if (typeof (oldBugs.email) === 'string' && isEmail(oldBugs.email)) {
+          data.bugs.email = oldBugs.email
+        } else {
+          changes?.push('bugs.email field must be a string email. Deleted.')
+        }
+      }
+    }
+    if (!data.bugs.email && !data.bugs.url) {
+      delete data.bugs
+      changes?.push('Normalized value of bugs field is an empty object. Deleted.')
+    }
+  }
+  // fixKeywordsField
+  if (typeof data.keywords === 'string') {
+    data.keywords = data.keywords.split(/,\s+/)
+  }
+  if (data.keywords && !Array.isArray(data.keywords)) {
+    delete data.keywords
+    changes?.push(`keywords should be an array of strings`)
+  } else if (data.keywords) {
+    data.keywords = data.keywords.filter(function (kw) {
+      if (typeof kw !== 'string' || !kw) {
+        changes?.push(`keywords should be an array of strings`)
+        return false
+      } else {
+        return true
+      }
+    })
+  }
+  // fixBundleDependenciesField
+  const bdd = 'bundledDependencies'
+  const bd = 'bundleDependencies'
+  if (data[bdd] && !data[bd]) {
+    data[bd] = data[bdd]
+    delete data[bdd]
+  }
+  if (data[bd] && !Array.isArray(data[bd])) {
+    changes?.push(`Invalid 'bundleDependencies' list. Must be array of package names`)
+    delete data[bd]
+  } else if (data[bd]) {
+    data[bd] = data[bd].filter(function (filtered) {
+      if (!filtered || typeof filtered !== 'string') {
+        changes?.push(`Invalid bundleDependencies member: ${filtered}`)
+        return false
+      } else {
+        if (!data.dependencies) {
+          data.dependencies = {}
+        }
+        if (!Object.prototype.hasOwnProperty.call(data.dependencies, filtered)) {
+          changes?.push(`Non-dependency in bundleDependencies: ${filtered}`)
+          data.dependencies[filtered] = '*'
+        }
+        return true
+      }
+    })
+  }
+  // fixHomepageField
+  if (!data.homepage && data.repository && data.repository.url) {
+    const hosted = hostedGitInfo.fromUrl(data.repository.url)
+    if (hosted) {
+      data.homepage = hosted.docs()
+    }
+  }
+  if (data.homepage) {
+    if (typeof data.homepage !== 'string') {
+      changes?.push('homepage field must be a string url. Deleted.')
+      delete data.homepage
+    } else {
+      /* eslint-disable-next-line node/no-deprecated-api */
+      if (!url.parse(data.homepage).protocol) {
+        data.homepage = 'http://' + data.homepage
+      }
+    }
+  }
+  // fixReadmeField
+  if (!data.readme) {
+    changes?.push('No README data')
+    data.readme = 'ERROR: No README data found!'
+  }
+  // fixLicenseField
+  const license = data.license || data.licence
+  if (!license) {
+    changes?.push('No license field.')
+  } else if (typeof (license) !== 'string' || license.length < 1 || license.trim() === '') {
+    changes?.push('license should be a valid SPDX license expression')
+  } else if (!validateLicense(license).validForNewPackages) {
+    changes?.push('license should be a valid SPDX license expression')
+  }
+  // fixPeople
+  if (data.author) {
+    data.author = stringifyPerson(data.author)
+  }
+  ['maintainers', 'contributors'].forEach(function (set) {
+    if (!Array.isArray(data[set])) {
+      return
+    }
+    data[set] = data[set].map(stringifyPerson)
+  })
+  // fixTypos
+  for (const d in typos) {
+    if (Object.prototype.hasOwnProperty.call(data, d)) {
+      changes?.push(`${d} should probably be ${typos[d]}.`)
+    }
+  }
+}
+
+module.exports = { normalizeData }
diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/normalize.js b/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/normalize.js
new file mode 100644
index 0000000000000..845f6753a9a00
--- /dev/null
+++ b/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/normalize.js
@@ -0,0 +1,601 @@
+const valid = require('semver/functions/valid')
+const clean = require('semver/functions/clean')
+const fs = require('node:fs/promises')
+const path = require('node:path')
+const { log } = require('proc-log')
+const moduleBuiltin = require('node:module')
+
+/**
+ * @type {import('hosted-git-info')}
+ */
+let _hostedGitInfo
+function lazyHostedGitInfo () {
+  if (!_hostedGitInfo) {
+    _hostedGitInfo = require('hosted-git-info')
+  }
+  return _hostedGitInfo
+}
+
+/**
+ * @type {import('glob').glob}
+ */
+let _glob
+function lazyLoadGlob () {
+  if (!_glob) {
+    _glob = require('glob').glob
+  }
+  return _glob
+}
+
+// used to be npm-normalize-package-bin
+function normalizePackageBin (pkg, changes) {
+  if (pkg.bin) {
+    if (typeof pkg.bin === 'string' && pkg.name) {
+      changes?.push('"bin" was converted to an object')
+      pkg.bin = { [pkg.name]: pkg.bin }
+    } else if (Array.isArray(pkg.bin)) {
+      changes?.push('"bin" was converted to an object')
+      pkg.bin = pkg.bin.reduce((acc, k) => {
+        acc[path.basename(k)] = k
+        return acc
+      }, {})
+    }
+    if (typeof pkg.bin === 'object') {
+      for (const binKey in pkg.bin) {
+        if (typeof pkg.bin[binKey] !== 'string') {
+          delete pkg.bin[binKey]
+          changes?.push(`removed invalid "bin[${binKey}]"`)
+          continue
+        }
+        const base = path.basename(secureAndUnixifyPath(binKey))
+        if (!base) {
+          delete pkg.bin[binKey]
+          changes?.push(`removed invalid "bin[${binKey}]"`)
+          continue
+        }
+
+        const binTarget = secureAndUnixifyPath(pkg.bin[binKey])
+
+        if (!binTarget) {
+          delete pkg.bin[binKey]
+          changes?.push(`removed invalid "bin[${binKey}]"`)
+          continue
+        }
+
+        if (base !== binKey) {
+          delete pkg.bin[binKey]
+          changes?.push(`"bin[${binKey}]" was renamed to "bin[${base}]"`)
+        }
+        if (binTarget !== pkg.bin[binKey]) {
+          changes?.push(`"bin[${base}]" script name was cleaned`)
+        }
+        pkg.bin[base] = binTarget
+      }
+
+      if (Object.keys(pkg.bin).length === 0) {
+        changes?.push('empty "bin" was removed')
+        delete pkg.bin
+      }
+
+      return pkg
+    }
+  }
+  delete pkg.bin
+}
+
+function normalizePackageMan (pkg, changes) {
+  if (pkg.man) {
+    const mans = []
+    for (const man of (Array.isArray(pkg.man) ? pkg.man : [pkg.man])) {
+      if (typeof man !== 'string') {
+        changes?.push(`removed invalid "man [${man}]"`)
+      } else {
+        mans.push(secureAndUnixifyPath(man))
+      }
+    }
+
+    if (!mans.length) {
+      changes?.push('empty "man" was removed')
+    } else {
+      pkg.man = mans
+      return pkg
+    }
+  }
+  delete pkg.man
+}
+
+function isCorrectlyEncodedName (spec) {
+  return !spec.match(/[/@\s+%:]/) &&
+    spec === encodeURIComponent(spec)
+}
+
+function isValidScopedPackageName (spec) {
+  if (spec.charAt(0) !== '@') {
+    return false
+  }
+
+  const rest = spec.slice(1).split('/')
+  if (rest.length !== 2) {
+    return false
+  }
+
+  return rest[0] && rest[1] &&
+    rest[0] === encodeURIComponent(rest[0]) &&
+    rest[1] === encodeURIComponent(rest[1])
+}
+
+function unixifyPath (ref) {
+  return ref.replace(/\\|:/g, '/')
+}
+
+function secureAndUnixifyPath (ref) {
+  const secured = unixifyPath(path.join('.', path.join('/', unixifyPath(ref))))
+  return secured.startsWith('./') ? '' : secured
+}
+
+// We don't want the `changes` array in here by default because this is a hot
+// path for parsing packuments during install.  So the calling method passes it
+// in if it wants to track changes.
+const normalize = async (pkg, { strict, steps, root, changes, allowLegacyCase }) => {
+  if (!pkg.content) {
+    throw new Error('Can not normalize without content')
+  }
+  const data = pkg.content
+  const scripts = data.scripts || {}
+  const pkgId = `${data.name ?? ''}@${data.version ?? ''}`
+
+  // name and version are load bearing so we have to clean them up first
+  if (steps.includes('fixName') || steps.includes('fixNameField') || steps.includes('normalizeData')) {
+    if (!data.name && !strict) {
+      changes?.push('Missing "name" field was set to an empty string')
+      data.name = ''
+    } else {
+      if (typeof data.name !== 'string') {
+        throw new Error('name field must be a string.')
+      }
+      if (!strict) {
+        const name = data.name.trim()
+        if (data.name !== name) {
+          changes?.push(`Whitespace was trimmed from "name"`)
+          data.name = name
+        }
+      }
+
+      if (data.name.startsWith('.') ||
+        !(isValidScopedPackageName(data.name) || isCorrectlyEncodedName(data.name)) ||
+        (strict && (!allowLegacyCase) && data.name !== data.name.toLowerCase()) ||
+        data.name.toLowerCase() === 'node_modules' ||
+        data.name.toLowerCase() === 'favicon.ico') {
+        throw new Error('Invalid name: ' + JSON.stringify(data.name))
+      }
+    }
+  }
+
+  if (steps.includes('fixName')) {
+    // Check for conflicts with builtin modules
+    if (moduleBuiltin.builtinModules.includes(data.name)) {
+      log.warn('package-json', pkgId, `Package name "${data.name}" conflicts with a Node.js built-in module name`)
+    }
+  }
+
+  if (steps.includes('fixVersionField') || steps.includes('normalizeData')) {
+    // allow "loose" semver 1.0 versions in non-strict mode
+    // enforce strict semver 2.0 compliance in strict mode
+    const loose = !strict
+    if (!data.version) {
+      data.version = ''
+    } else {
+      if (!valid(data.version, loose)) {
+        throw new Error(`Invalid version: "${data.version}"`)
+      }
+      const version = clean(data.version, loose)
+      if (version !== data.version) {
+        changes?.push(`"version" was cleaned and set to "${version}"`)
+        data.version = version
+      }
+    }
+  }
+  // remove attributes that start with "_"
+  if (steps.includes('_attributes')) {
+    for (const key in data) {
+      if (key.startsWith('_')) {
+        changes?.push(`"${key}" was removed`)
+        delete pkg.content[key]
+      }
+    }
+  }
+
+  // build the "_id" attribute
+  if (steps.includes('_id')) {
+    if (data.name && data.version) {
+      changes?.push(`"_id" was set to ${pkgId}`)
+      data._id = pkgId
+    }
+  }
+
+  // fix bundledDependencies typo
+  // normalize bundleDependencies
+  if (steps.includes('bundledDependencies')) {
+    if (data.bundleDependencies === undefined && data.bundledDependencies !== undefined) {
+      data.bundleDependencies = data.bundledDependencies
+    }
+    changes?.push(`Deleted incorrect "bundledDependencies"`)
+    delete data.bundledDependencies
+  }
+  // expand "bundleDependencies: true or translate from object"
+  if (steps.includes('bundleDependencies')) {
+    const bd = data.bundleDependencies
+    if (bd === false && !steps.includes('bundleDependenciesDeleteFalse')) {
+      changes?.push(`"bundleDependencies" was changed from "false" to "[]"`)
+      data.bundleDependencies = []
+    } else if (bd === true) {
+      changes?.push(`"bundleDependencies" was auto-populated from "dependencies"`)
+      data.bundleDependencies = Object.keys(data.dependencies || {})
+    } else if (bd && typeof bd === 'object') {
+      if (!Array.isArray(bd)) {
+        changes?.push(`"bundleDependencies" was changed from an object to an array`)
+        data.bundleDependencies = Object.keys(bd)
+      }
+    } else if ('bundleDependencies' in data) {
+      changes?.push(`"bundleDependencies" was removed`)
+      delete data.bundleDependencies
+    }
+  }
+
+  // it was once common practice to list deps both in optionalDependencies and
+  // in dependencies, to support npm versions that did not know about
+  // optionalDependencies.  This is no longer a relevant need, so duplicating
+  // the deps in two places is unnecessary and excessive.
+  if (steps.includes('optionalDedupe')) {
+    if (data.dependencies &&
+      data.optionalDependencies && typeof data.optionalDependencies === 'object') {
+      for (const name in data.optionalDependencies) {
+        changes?.push(`optionalDependencies."${name}" was removed`)
+        delete data.dependencies[name]
+      }
+      if (!Object.keys(data.dependencies).length) {
+        changes?.push(`Empty "optionalDependencies" was removed`)
+        delete data.dependencies
+      }
+    }
+  }
+
+  // add "install" attribute if any "*.gyp" files exist
+  if (steps.includes('gypfile')) {
+    if (!scripts.install && !scripts.preinstall && data.gypfile !== false) {
+      const files = await lazyLoadGlob()('*.gyp', { cwd: pkg.path })
+      if (files.length) {
+        scripts.install = 'node-gyp rebuild'
+        data.scripts = scripts
+        data.gypfile = true
+        changes?.push(`"scripts.install" was set to "node-gyp rebuild"`)
+        changes?.push(`"gypfile" was set to "true"`)
+      }
+    }
+  }
+
+  // add "start" attribute if "server.js" exists
+  if (steps.includes('serverjs') && !scripts.start) {
+    try {
+      await fs.access(path.join(pkg.path, 'server.js'))
+      scripts.start = 'node server.js'
+      data.scripts = scripts
+      changes?.push('"scripts.start" was set to "node server.js"')
+    } catch {
+      // do nothing
+    }
+  }
+
+  // strip "node_modules/.bin" from scripts entries
+  // remove invalid scripts entries (non-strings)
+  if ((steps.includes('scripts') || steps.includes('scriptpath')) && data.scripts !== undefined) {
+    const spre = /^(\.[/\\])?node_modules[/\\].bin[\\/]/
+    if (typeof data.scripts === 'object') {
+      for (const name in data.scripts) {
+        if (typeof data.scripts[name] !== 'string') {
+          delete data.scripts[name]
+          changes?.push(`Invalid scripts."${name}" was removed`)
+        } else if (steps.includes('scriptpath') && spre.test(data.scripts[name])) {
+          data.scripts[name] = data.scripts[name].replace(spre, '')
+          changes?.push(`scripts entry "${name}" was fixed to remove node_modules/.bin reference`)
+        }
+      }
+    } else {
+      changes?.push(`Removed invalid "scripts"`)
+      delete data.scripts
+    }
+  }
+
+  if (steps.includes('funding')) {
+    if (data.funding && typeof data.funding === 'string') {
+      data.funding = { url: data.funding }
+      changes?.push(`"funding" was changed to an object with a url attribute`)
+    }
+  }
+
+  // populate "authors" attribute
+  if (steps.includes('authors') && !data.contributors) {
+    try {
+      const authorData = await fs.readFile(path.join(pkg.path, 'AUTHORS'), 'utf8')
+      const authors = authorData.split(/\r?\n/g)
+        .map(line => line.replace(/^\s*#.*$/, '').trim())
+        .filter(line => line)
+      data.contributors = authors
+      changes?.push('"contributors" was auto-populated with the contents of the "AUTHORS" file')
+    } catch {
+      // do nothing
+    }
+  }
+
+  // populate "readme" attribute
+  if (steps.includes('readme') && !data.readme) {
+    const mdre = /\.m?a?r?k?d?o?w?n?$/i
+    const files = await lazyLoadGlob()('{README,README.*}', {
+      cwd: pkg.path,
+      nocase: true,
+      mark: true,
+    })
+    let readmeFile
+    for (const file of files) {
+      // don't accept directories.
+      if (!file.endsWith(path.sep)) {
+        if (file.match(mdre)) {
+          readmeFile = file
+          break
+        }
+        if (file.endsWith('README')) {
+          readmeFile = file
+        }
+      }
+    }
+    if (readmeFile) {
+      const readmeData = await fs.readFile(path.join(pkg.path, readmeFile), 'utf8')
+      data.readme = readmeData
+      data.readmeFilename = readmeFile
+      changes?.push(`"readme" was set to the contents of ${readmeFile}`)
+      changes?.push(`"readmeFilename" was set to ${readmeFile}`)
+    }
+    if (!data.readme) {
+      data.readme = 'ERROR: No README data found!'
+    }
+  }
+
+  // expand directories.man
+  if (steps.includes('mans')) {
+    if (data.directories?.man && !data.man) {
+      const manDir = secureAndUnixifyPath(data.directories.man)
+      const cwd = path.resolve(pkg.path, manDir)
+      const files = await lazyLoadGlob()('**/*.[0-9]', { cwd })
+      data.man = files.map(man =>
+        path.relative(pkg.path, path.join(cwd, man)).split(path.sep).join('/')
+      )
+    }
+    normalizePackageMan(data, changes)
+  }
+
+  if (steps.includes('bin') || steps.includes('binDir') || steps.includes('binRefs')) {
+    normalizePackageBin(data, changes)
+  }
+
+  // expand "directories.bin"
+  if (steps.includes('binDir') && data.directories?.bin && !data.bin) {
+    const binsDir = path.resolve(pkg.path, secureAndUnixifyPath(data.directories.bin))
+    const bins = await lazyLoadGlob()('**', { cwd: binsDir })
+    data.bin = bins.reduce((acc, binFile) => {
+      if (binFile && !binFile.startsWith('.')) {
+        const binName = path.basename(binFile)
+        acc[binName] = path.join(data.directories.bin, binFile)
+      }
+      return acc
+    }, {})
+    // *sigh*
+    normalizePackageBin(data, changes)
+  }
+
+  // populate "gitHead" attribute
+  if (steps.includes('gitHead') && !data.gitHead) {
+    const git = require('@npmcli/git')
+    const gitRoot = await git.find({ cwd: pkg.path, root })
+    let head
+    if (gitRoot) {
+      try {
+        head = await fs.readFile(path.resolve(gitRoot, '.git/HEAD'), 'utf8')
+      } catch (err) {
+      // do nothing
+      }
+    }
+    let headData
+    if (head) {
+      if (head.startsWith('ref: ')) {
+        const headRef = head.replace(/^ref: /, '').trim()
+        const headFile = path.resolve(gitRoot, '.git', headRef)
+        try {
+          headData = await fs.readFile(headFile, 'utf8')
+          headData = headData.replace(/^ref: /, '').trim()
+        } catch (err) {
+          // do nothing
+        }
+        if (!headData) {
+          const packFile = path.resolve(gitRoot, '.git/packed-refs')
+          try {
+            let refs = await fs.readFile(packFile, 'utf8')
+            if (refs) {
+              refs = refs.split('\n')
+              for (let i = 0; i < refs.length; i++) {
+                const match = refs[i].match(/^([0-9a-f]{40}) (.+)$/)
+                if (match && match[2].trim() === headRef) {
+                  headData = match[1]
+                  break
+                }
+              }
+            }
+          } catch {
+            // do nothing
+          }
+        }
+      } else {
+        headData = head.trim()
+      }
+    }
+    if (headData) {
+      data.gitHead = headData
+    }
+  }
+
+  // populate "types" attribute
+  if (steps.includes('fillTypes')) {
+    const index = data.main || 'index.js'
+
+    if (typeof index !== 'string') {
+      throw new TypeError('The "main" attribute must be of type string.')
+    }
+
+    // TODO exports is much more complicated than this in verbose format
+    // We need to support for instance
+
+    // "exports": {
+    //   ".": [
+    //     {
+    //       "default": "./lib/npm.js"
+    //     },
+    //     "./lib/npm.js"
+    //   ],
+    //   "./package.json": "./package.json"
+    // },
+    // as well as conditional exports
+
+    // if (data.exports && typeof data.exports === 'string') {
+    //   index = data.exports
+    // }
+
+    // if (data.exports && data.exports['.']) {
+    //   index = data.exports['.']
+    //   if (typeof index !== 'string') {
+    //   }
+    // }
+    const extless = path.join(path.dirname(index), path.basename(index, path.extname(index)))
+    const dts = `./${extless}.d.ts`
+    const hasDTSFields = 'types' in data || 'typings' in data
+    if (!hasDTSFields) {
+      try {
+        await fs.access(path.join(pkg.path, dts))
+        data.types = dts.split(path.sep).join('/')
+      } catch {
+        // do nothing
+      }
+    }
+  }
+
+  // "normalizeData" from "read-package-json", which was just a call through to
+  // "normalize-package-data".  We only call the "fixer" functions because
+  // outside of that it was also clobbering _id (which we already conditionally
+  // do) and also adding the gypfile script (which we also already
+  // conditionally do)
+
+  // Some steps are isolated so we can do a limited subset of these in `fix`
+  if (steps.includes('fixRepositoryField') || steps.includes('normalizeData')) {
+    if (data.repositories) {
+      changes?.push(`"repository" was set to the first entry in "repositories" (${data.repository})`)
+      data.repository = data.repositories[0]
+    }
+    if (data.repository) {
+      if (typeof data.repository === 'string') {
+        changes?.push('"repository" was changed from a string to an object')
+        data.repository = {
+          type: 'git',
+          url: data.repository,
+        }
+      }
+      if (data.repository.url) {
+        const hosted = lazyHostedGitInfo().fromUrl(data.repository.url)
+        let r
+        if (hosted) {
+          if (hosted.getDefaultRepresentation() === 'shortcut') {
+            r = hosted.https()
+          } else {
+            r = hosted.toString()
+          }
+          if (r !== data.repository.url) {
+            changes?.push(`"repository.url" was normalized to "${r}"`)
+            data.repository.url = r
+          }
+        }
+      }
+    }
+  }
+
+  if (steps.includes('fixDependencies') || steps.includes('normalizeData')) {
+    // peerDependencies?
+    // devDependencies is meaningless here, it's ignored on an installed package
+    for (const type of ['dependencies', 'devDependencies', 'optionalDependencies']) {
+      if (data[type]) {
+        let secondWarning = true
+        if (typeof data[type] === 'string') {
+          changes?.push(`"${type}" was converted from a string into an object`)
+          data[type] = data[type].trim().split(/[\n\r\s\t ,]+/)
+          secondWarning = false
+        }
+        if (Array.isArray(data[type])) {
+          if (secondWarning) {
+            changes?.push(`"${type}" was converted from an array into an object`)
+          }
+          const o = {}
+          for (const d of data[type]) {
+            if (typeof d === 'string') {
+              const dep = d.trim().split(/(:?[@\s><=])/)
+              const dn = dep.shift()
+              const dv = dep.join('').replace(/^@/, '').trim()
+              o[dn] = dv
+            }
+          }
+          data[type] = o
+        }
+      }
+    }
+    // normalize-package-data used to put optional dependencies BACK into
+    // dependencies here, we no longer do this
+
+    for (const deps of ['dependencies', 'devDependencies']) {
+      if (deps in data) {
+        if (!data[deps] || typeof data[deps] !== 'object') {
+          changes?.push(`Removed invalid "${deps}"`)
+          delete data[deps]
+        } else {
+          for (const d in data[deps]) {
+            const r = data[deps][d]
+            if (typeof r !== 'string') {
+              changes?.push(`Removed invalid "${deps}.${d}"`)
+              delete data[deps][d]
+            }
+            const hosted = lazyHostedGitInfo().fromUrl(data[deps][d])?.toString()
+            if (hosted && hosted !== data[deps][d]) {
+              changes?.push(`Normalized git reference to "${deps}.${d}"`)
+              data[deps][d] = hosted.toString()
+            }
+          }
+        }
+      }
+    }
+  }
+
+  // TODO some of this is duplicated in other steps here, a future breaking change may be able to remove the duplicates involved in this step
+  if (steps.includes('normalizeData')) {
+    const { normalizeData } = require('./normalize-data.js')
+    normalizeData(data, changes)
+  }
+
+  // Warn if the bin references don't point to anything.  This might be better
+  // in normalize-package-data if it had access to the file path.
+  if (steps.includes('binRefs') && data.bin instanceof Object) {
+    for (const key in data.bin) {
+      try {
+        await fs.access(path.resolve(pkg.path, data.bin[key]))
+      } catch {
+        log.warn('package-json', pkgId, `No bin file found at ${data.bin[key]}`)
+        // XXX: should a future breaking change delete bin entries that cannot be accessed?
+      }
+    }
+  }
+}
+
+module.exports = normalize
diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/read-package.js b/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/read-package.js
new file mode 100644
index 0000000000000..d6c86ce388e6c
--- /dev/null
+++ b/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/read-package.js
@@ -0,0 +1,39 @@
+// This is JUST the code needed to open a package.json file and parse it.
+// It's isolated out so that code needing to parse a package.json file can do so in the same way as this module does, without needing to require the whole module, or needing to require the underlying parsing library.
+
+const { readFile } = require('fs/promises')
+const parseJSON = require('json-parse-even-better-errors')
+
+async function read (filename) {
+  try {
+    const data = await readFile(filename, 'utf8')
+    return data
+  } catch (err) {
+    err.message = `Could not read package.json: ${err}`
+    throw err
+  }
+}
+
+function parse (data) {
+  try {
+    const content = parseJSON(data)
+    return content
+  } catch (err) {
+    err.message = `Invalid package.json: ${err}`
+    throw err
+  }
+}
+
+// This is what most external libs will use.
+// PackageJson will call read and parse separately
+async function readPackage (filename) {
+  const data = await read(filename)
+  const content = parse(data)
+  return content
+}
+
+module.exports = {
+  read,
+  parse,
+  readPackage,
+}
diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/sort.js b/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/sort.js
new file mode 100644
index 0000000000000..0bd0d5199da58
--- /dev/null
+++ b/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/sort.js
@@ -0,0 +1,101 @@
+/**
+ * arbitrary sort order for package.json largely pulled from:
+ * https://github.com/keithamus/sort-package-json/blob/main/defaultRules.md
+ *
+ * cross checked with:
+ * https://github.com/npm/types/blob/main/types/index.d.ts#L104
+ * https://docs.npmjs.com/cli/configuring-npm/package-json
+ */
+function packageSort (json) {
+  const {
+    name,
+    version,
+    private: isPrivate,
+    description,
+    keywords,
+    homepage,
+    bugs,
+    repository,
+    funding,
+    license,
+    author,
+    maintainers,
+    contributors,
+    type,
+    imports,
+    exports,
+    main,
+    browser,
+    types,
+    bin,
+    man,
+    directories,
+    files,
+    workspaces,
+    scripts,
+    config,
+    dependencies,
+    devDependencies,
+    peerDependencies,
+    peerDependenciesMeta,
+    optionalDependencies,
+    bundledDependencies,
+    bundleDependencies,
+    engines,
+    os,
+    cpu,
+    publishConfig,
+    devEngines,
+    licenses,
+    overrides,
+    ...rest
+  } = json
+
+  return {
+    ...(typeof name !== 'undefined' ? { name } : {}),
+    ...(typeof version !== 'undefined' ? { version } : {}),
+    ...(typeof isPrivate !== 'undefined' ? { private: isPrivate } : {}),
+    ...(typeof description !== 'undefined' ? { description } : {}),
+    ...(typeof keywords !== 'undefined' ? { keywords } : {}),
+    ...(typeof homepage !== 'undefined' ? { homepage } : {}),
+    ...(typeof bugs !== 'undefined' ? { bugs } : {}),
+    ...(typeof repository !== 'undefined' ? { repository } : {}),
+    ...(typeof funding !== 'undefined' ? { funding } : {}),
+    ...(typeof license !== 'undefined' ? { license } : {}),
+    ...(typeof author !== 'undefined' ? { author } : {}),
+    ...(typeof maintainers !== 'undefined' ? { maintainers } : {}),
+    ...(typeof contributors !== 'undefined' ? { contributors } : {}),
+    ...(typeof type !== 'undefined' ? { type } : {}),
+    ...(typeof imports !== 'undefined' ? { imports } : {}),
+    ...(typeof exports !== 'undefined' ? { exports } : {}),
+    ...(typeof main !== 'undefined' ? { main } : {}),
+    ...(typeof browser !== 'undefined' ? { browser } : {}),
+    ...(typeof types !== 'undefined' ? { types } : {}),
+    ...(typeof bin !== 'undefined' ? { bin } : {}),
+    ...(typeof man !== 'undefined' ? { man } : {}),
+    ...(typeof directories !== 'undefined' ? { directories } : {}),
+    ...(typeof files !== 'undefined' ? { files } : {}),
+    ...(typeof workspaces !== 'undefined' ? { workspaces } : {}),
+    ...(typeof scripts !== 'undefined' ? { scripts } : {}),
+    ...(typeof config !== 'undefined' ? { config } : {}),
+    ...(typeof dependencies !== 'undefined' ? { dependencies } : {}),
+    ...(typeof devDependencies !== 'undefined' ? { devDependencies } : {}),
+    ...(typeof peerDependencies !== 'undefined' ? { peerDependencies } : {}),
+    ...(typeof peerDependenciesMeta !== 'undefined' ? { peerDependenciesMeta } : {}),
+    ...(typeof optionalDependencies !== 'undefined' ? { optionalDependencies } : {}),
+    ...(typeof bundledDependencies !== 'undefined' ? { bundledDependencies } : {}),
+    ...(typeof bundleDependencies !== 'undefined' ? { bundleDependencies } : {}),
+    ...(typeof engines !== 'undefined' ? { engines } : {}),
+    ...(typeof os !== 'undefined' ? { os } : {}),
+    ...(typeof cpu !== 'undefined' ? { cpu } : {}),
+    ...(typeof publishConfig !== 'undefined' ? { publishConfig } : {}),
+    ...(typeof devEngines !== 'undefined' ? { devEngines } : {}),
+    ...(typeof licenses !== 'undefined' ? { licenses } : {}),
+    ...(typeof overrides !== 'undefined' ? { overrides } : {}),
+    ...rest,
+  }
+}
+
+module.exports = {
+  packageSort,
+}
diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/update-dependencies.js b/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/update-dependencies.js
new file mode 100644
index 0000000000000..7259949ab661d
--- /dev/null
+++ b/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/update-dependencies.js
@@ -0,0 +1,75 @@
+const depTypes = new Set([
+  'dependencies',
+  'optionalDependencies',
+  'devDependencies',
+  'peerDependencies',
+])
+
+// sort alphabetically all types of deps for a given package
+const orderDeps = (content) => {
+  for (const type of depTypes) {
+    if (content && content[type]) {
+      content[type] = Object.keys(content[type])
+        .sort((a, b) => a.localeCompare(b, 'en'))
+        .reduce((res, key) => {
+          res[key] = content[type][key]
+          return res
+        }, {})
+    }
+  }
+  return content
+}
+
+const updateDependencies = ({ content, originalContent }) => {
+  const pkg = orderDeps({
+    ...content,
+  })
+
+  // optionalDependencies don't need to be repeated in two places
+  if (pkg.dependencies) {
+    if (pkg.optionalDependencies) {
+      for (const name of Object.keys(pkg.optionalDependencies)) {
+        delete pkg.dependencies[name]
+      }
+    }
+  }
+
+  const result = { ...originalContent }
+
+  // loop through all types of dependencies and update package json pkg
+  for (const type of depTypes) {
+    if (pkg[type]) {
+      result[type] = pkg[type]
+    }
+
+    // prune empty type props from resulting object
+    const emptyDepType =
+      pkg[type]
+      && typeof pkg === 'object'
+      && Object.keys(pkg[type]).length === 0
+    if (emptyDepType) {
+      delete result[type]
+    }
+  }
+
+  // if original package.json had dep in peerDeps AND deps, preserve that.
+  const { dependencies: origProd, peerDependencies: origPeer } =
+    originalContent || {}
+  const { peerDependencies: newPeer } = result
+  if (origProd && origPeer && newPeer) {
+    // we have original prod/peer deps, and new peer deps
+    // copy over any that were in both in the original
+    for (const name of Object.keys(origPeer)) {
+      if (origProd[name] !== undefined && newPeer[name] !== undefined) {
+        result.dependencies = result.dependencies || {}
+        result.dependencies[name] = newPeer[name]
+      }
+    }
+  }
+
+  return result
+}
+
+updateDependencies.knownKeys = depTypes
+
+module.exports = updateDependencies
diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/update-scripts.js b/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/update-scripts.js
new file mode 100644
index 0000000000000..30495e54cc3c7
--- /dev/null
+++ b/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/update-scripts.js
@@ -0,0 +1,29 @@
+const updateScripts = ({ content, originalContent = {} }) => {
+  const newScripts = content.scripts
+
+  if (!newScripts) {
+    return originalContent
+  }
+
+  // validate scripts content being appended
+  const hasInvalidScripts = () =>
+    Object.entries(newScripts)
+      .some(([key, value]) =>
+        typeof key !== 'string' || typeof value !== 'string')
+  if (hasInvalidScripts()) {
+    throw Object.assign(
+      new TypeError(
+        'package.json scripts should be a key-value pair of strings.'),
+      { code: 'ESCRIPTSINVALID' }
+    )
+  }
+
+  return {
+    ...originalContent,
+    scripts: {
+      ...newScripts,
+    },
+  }
+}
+
+module.exports = updateScripts
diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/update-workspaces.js b/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/update-workspaces.js
new file mode 100644
index 0000000000000..04bf63230636f
--- /dev/null
+++ b/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/update-workspaces.js
@@ -0,0 +1,26 @@
+const updateWorkspaces = ({ content, originalContent = {} }) => {
+  const newWorkspaces = content.workspaces
+
+  if (!newWorkspaces) {
+    return originalContent
+  }
+
+  // validate workspaces content being appended
+  const hasInvalidWorkspaces = () =>
+    newWorkspaces.some(w => !(typeof w === 'string'))
+  if (!newWorkspaces.length || hasInvalidWorkspaces()) {
+    throw Object.assign(
+      new TypeError('workspaces should be an array of strings.'),
+      { code: 'EWORKSPACESINVALID' }
+    )
+  }
+
+  return {
+    ...originalContent,
+    workspaces: [
+      ...newWorkspaces,
+    ],
+  }
+}
+
+module.exports = updateWorkspaces
diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/package.json b/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/package.json
new file mode 100644
index 0000000000000..263d67ff3bc5b
--- /dev/null
+++ b/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/package.json
@@ -0,0 +1,61 @@
+{
+  "name": "@npmcli/package-json",
+  "version": "6.2.0",
+  "description": "Programmatic API to update package.json",
+  "keywords": [
+    "npm",
+    "oss"
+  ],
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/npm/package-json.git"
+  },
+  "license": "ISC",
+  "author": "GitHub Inc.",
+  "main": "lib/index.js",
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "scripts": {
+    "snap": "tap",
+    "test": "tap",
+    "lint": "npm run eslint",
+    "lintfix": "npm run eslint -- --fix",
+    "posttest": "npm run lint",
+    "postsnap": "npm run lintfix --",
+    "postlint": "template-oss-check",
+    "template-oss-apply": "template-oss-apply --force",
+    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
+  },
+  "dependencies": {
+    "@npmcli/git": "^6.0.0",
+    "glob": "^10.2.2",
+    "hosted-git-info": "^8.0.0",
+    "json-parse-even-better-errors": "^4.0.0",
+    "proc-log": "^5.0.0",
+    "semver": "^7.5.3",
+    "validate-npm-package-license": "^3.0.4"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^5.1.0",
+    "@npmcli/template-oss": "4.23.6",
+    "read-package-json": "^7.0.0",
+    "read-package-json-fast": "^4.0.0",
+    "tap": "^16.0.1"
+  },
+  "engines": {
+    "node": "^18.17.0 || >=20.5.0"
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.23.6",
+    "publish": "true"
+  },
+  "tap": {
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  }
+}
diff --git a/node_modules/init-package-json/node_modules/@npmcli/package-json/LICENSE b/node_modules/init-package-json/node_modules/@npmcli/package-json/LICENSE
new file mode 100644
index 0000000000000..6a1f3708f6d70
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/@npmcli/package-json/LICENSE
@@ -0,0 +1,18 @@
+ISC License
+
+Copyright GitHub Inc.
+
+Permission to use, copy, modify, and/or distribute this
+software for any purpose with or without fee is hereby
+granted, provided that the above copyright notice and this
+permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL
+WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO
+EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
+WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
+TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
+USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/index.js b/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/index.js
new file mode 100644
index 0000000000000..7eff602d73a3f
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/index.js
@@ -0,0 +1,286 @@
+const { readFile, writeFile } = require('node:fs/promises')
+const { resolve } = require('node:path')
+const parseJSON = require('json-parse-even-better-errors')
+
+const updateDeps = require('./update-dependencies.js')
+const updateScripts = require('./update-scripts.js')
+const updateWorkspaces = require('./update-workspaces.js')
+const normalize = require('./normalize.js')
+const { read, parse } = require('./read-package.js')
+const { packageSort } = require('./sort.js')
+
+// a list of handy specialized helper functions that take
+// care of special cases that are handled by the npm cli
+const knownSteps = new Set([
+  updateDeps,
+  updateScripts,
+  updateWorkspaces,
+])
+
+// list of all keys that are handled by "knownSteps" helpers
+const knownKeys = new Set([
+  ...updateDeps.knownKeys,
+  'scripts',
+  'workspaces',
+])
+
+class PackageJson {
+  static normalizeSteps = Object.freeze([
+    '_id',
+    '_attributes',
+    'bundledDependencies',
+    'bundleDependencies',
+    'optionalDedupe',
+    'scripts',
+    'funding',
+    'bin',
+  ])
+
+  // npm pkg fix
+  static fixSteps = Object.freeze([
+    'binRefs',
+    'bundleDependencies',
+    'bundleDependenciesFalse',
+    'fixName',
+    'fixNameField',
+    'fixVersionField',
+    'fixRepositoryField',
+    'fixDependencies',
+    'devDependencies',
+    'scriptpath',
+  ])
+
+  static prepareSteps = Object.freeze([
+    '_id',
+    '_attributes',
+    'bundledDependencies',
+    'bundleDependencies',
+    'bundleDependenciesDeleteFalse',
+    'gypfile',
+    'serverjs',
+    'scriptpath',
+    'authors',
+    'readme',
+    'mans',
+    'binDir',
+    'gitHead',
+    'fillTypes',
+    'normalizeData',
+    'binRefs',
+  ])
+
+  // create a new empty package.json, so we can save at the given path even
+  // though we didn't start from a parsed file
+  static async create (path, opts = {}) {
+    const p = new PackageJson()
+    await p.create(path)
+    if (opts.data) {
+      return p.update(opts.data)
+    }
+    return p
+  }
+
+  // Loads a package.json at given path and JSON parses
+  static async load (path, opts = {}) {
+    const p = new PackageJson()
+    // Avoid try/catch if we aren't going to create
+    if (!opts.create) {
+      return p.load(path)
+    }
+
+    try {
+      return await p.load(path)
+    } catch (err) {
+      if (!err.message.startsWith('Could not read package.json')) {
+        throw err
+      }
+      return await p.create(path)
+    }
+  }
+
+  // npm pkg fix
+  static async fix (path, opts) {
+    const p = new PackageJson()
+    await p.load(path, true)
+    return p.fix(opts)
+  }
+
+  // read-package-json compatible behavior
+  static async prepare (path, opts) {
+    const p = new PackageJson()
+    await p.load(path, true)
+    return p.prepare(opts)
+  }
+
+  // read-package-json-fast compatible behavior
+  static async normalize (path, opts) {
+    const p = new PackageJson()
+    await p.load(path)
+    return p.normalize(opts)
+  }
+
+  #path
+  #manifest
+  #readFileContent = ''
+  #canSave = true
+
+  // Load content from given path
+  async load (path, parseIndex) {
+    this.#path = path
+    let parseErr
+    try {
+      this.#readFileContent = await read(this.filename)
+    } catch (err) {
+      if (!parseIndex) {
+        throw err
+      }
+      parseErr = err
+    }
+
+    if (parseErr) {
+      const indexFile = resolve(this.path, 'index.js')
+      let indexFileContent
+      try {
+        indexFileContent = await readFile(indexFile, 'utf8')
+      } catch (err) {
+        throw parseErr
+      }
+      try {
+        this.fromComment(indexFileContent)
+      } catch (err) {
+        throw parseErr
+      }
+      // This wasn't a package.json so prevent saving
+      this.#canSave = false
+      return this
+    }
+
+    return this.fromJSON(this.#readFileContent)
+  }
+
+  // Load data from a JSON string/buffer
+  fromJSON (data) {
+    this.#manifest = parse(data)
+    return this
+  }
+
+  fromContent (data) {
+    this.#manifest = data
+    this.#canSave = false
+    return this
+  }
+
+  // Load data from a comment
+  // /**package { "name": "foo", "version": "1.2.3", ... } **/
+  fromComment (data) {
+    data = data.split(/^\/\*\*package(?:\s|$)/m)
+
+    if (data.length < 2) {
+      throw new Error('File has no package in comments')
+    }
+    data = data[1]
+    data = data.split(/\*\*\/$/m)
+
+    if (data.length < 2) {
+      throw new Error('File has no package in comments')
+    }
+    data = data[0]
+    data = data.replace(/^\s*\*/mg, '')
+
+    this.#manifest = parseJSON(data)
+    return this
+  }
+
+  get content () {
+    return this.#manifest
+  }
+
+  get path () {
+    return this.#path
+  }
+
+  get filename () {
+    if (this.path) {
+      return resolve(this.path, 'package.json')
+    }
+    return undefined
+  }
+
+  create (path) {
+    this.#path = path
+    this.#manifest = {}
+    return this
+  }
+
+  // This should be the ONLY way to set content in the manifest
+  update (content) {
+    if (!this.content) {
+      throw new Error('Can not update without content.  Please `load` or `create`')
+    }
+
+    for (const step of knownSteps) {
+      this.#manifest = step({ content, originalContent: this.content })
+    }
+
+    // unknown properties will just be overwitten
+    for (const [key, value] of Object.entries(content)) {
+      if (!knownKeys.has(key)) {
+        this.content[key] = value
+      }
+    }
+
+    return this
+  }
+
+  async save ({ sort } = {}) {
+    if (!this.#canSave) {
+      throw new Error('No package.json to save to')
+    }
+    const {
+      [Symbol.for('indent')]: indent,
+      [Symbol.for('newline')]: newline,
+      ...rest
+    } = this.content
+
+    const format = indent === undefined ? '  ' : indent
+    const eol = newline === undefined ? '\n' : newline
+
+    const content = sort ? packageSort(rest) : rest
+
+    const fileContent = `${
+      JSON.stringify(content, null, format)
+    }\n`
+      .replace(/\n/g, eol)
+
+    if (fileContent.trim() !== this.#readFileContent.trim()) {
+      const written = await writeFile(this.filename, fileContent)
+      this.#readFileContent = fileContent
+      return written
+    }
+  }
+
+  async normalize (opts = {}) {
+    if (!opts.steps) {
+      opts.steps = this.constructor.normalizeSteps
+    }
+    await normalize(this, opts)
+    return this
+  }
+
+  async prepare (opts = {}) {
+    if (!opts.steps) {
+      opts.steps = this.constructor.prepareSteps
+    }
+    await normalize(this, opts)
+    return this
+  }
+
+  async fix (opts = {}) {
+    // This one is not overridable
+    opts.steps = this.constructor.fixSteps
+    await normalize(this, opts)
+    return this
+  }
+}
+
+module.exports = PackageJson
diff --git a/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/normalize-data.js b/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/normalize-data.js
new file mode 100644
index 0000000000000..79b0bafbcd3a4
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/normalize-data.js
@@ -0,0 +1,257 @@
+// Originally normalize-package-data
+
+const url = require('node:url')
+const hostedGitInfo = require('hosted-git-info')
+const validateLicense = require('validate-npm-package-license')
+
+const typos = {
+  dependancies: 'dependencies',
+  dependecies: 'dependencies',
+  depdenencies: 'dependencies',
+  devEependencies: 'devDependencies',
+  depends: 'dependencies',
+  'dev-dependencies': 'devDependencies',
+  devDependences: 'devDependencies',
+  devDepenencies: 'devDependencies',
+  devdependencies: 'devDependencies',
+  repostitory: 'repository',
+  repo: 'repository',
+  prefereGlobal: 'preferGlobal',
+  hompage: 'homepage',
+  hampage: 'homepage',
+  autohr: 'author',
+  autor: 'author',
+  contributers: 'contributors',
+  publicationConfig: 'publishConfig',
+  script: 'scripts',
+}
+
+const isEmail = str => str.includes('@') && (str.indexOf('@') < str.lastIndexOf('.'))
+
+// Extracts description from contents of a readme file in markdown format
+function extractDescription (description) {
+  // the first block of text before the first heading that isn't the first line heading
+  const lines = description.trim().split('\n')
+  let start = 0
+  // skip initial empty lines and lines that start with #
+  while (lines[start]?.trim().match(/^(#|$)/)) {
+    start++
+  }
+  let end = start + 1
+  // keep going till we get to the end or an empty line
+  while (end < lines.length && lines[end].trim()) {
+    end++
+  }
+  return lines.slice(start, end).join(' ').trim()
+}
+
+function stringifyPerson (person) {
+  if (typeof person !== 'string') {
+    const name = person.name || ''
+    const u = person.url || person.web
+    const wrappedUrl = u ? (' (' + u + ')') : ''
+    const e = person.email || person.mail
+    const wrappedEmail = e ? (' <' + e + '>') : ''
+    person = name + wrappedEmail + wrappedUrl
+  }
+  const matchedName = person.match(/^([^(<]+)/)
+  const matchedUrl = person.match(/\(([^()]+)\)/)
+  const matchedEmail = person.match(/<([^<>]+)>/)
+  const parsed = {}
+  if (matchedName?.[0].trim()) {
+    parsed.name = matchedName[0].trim()
+  }
+  if (matchedEmail) {
+    parsed.email = matchedEmail[1]
+  }
+  if (matchedUrl) {
+    parsed.url = matchedUrl[1]
+  }
+  return parsed
+}
+
+function normalizeData (data, changes) {
+  // fixDescriptionField
+  if (data.description && typeof data.description !== 'string') {
+    changes?.push(`'description' field should be a string`)
+    delete data.description
+  }
+  if (data.readme && !data.description && data.readme !== 'ERROR: No README data found!') {
+    data.description = extractDescription(data.readme)
+  }
+  if (data.description === undefined) {
+    delete data.description
+  }
+  if (!data.description) {
+    changes?.push('No description')
+  }
+
+  // fixModulesField
+  if (data.modules) {
+    changes?.push(`modules field is deprecated`)
+    delete data.modules
+  }
+
+  // fixFilesField
+  const files = data.files
+  if (files && !Array.isArray(files)) {
+    changes?.push(`Invalid 'files' member`)
+    delete data.files
+  } else if (data.files) {
+    data.files = data.files.filter(function (file) {
+      if (!file || typeof file !== 'string') {
+        changes?.push(`Invalid filename in 'files' list: ${file}`)
+        return false
+      } else {
+        return true
+      }
+    })
+  }
+
+  // fixManField
+  if (data.man && typeof data.man === 'string') {
+    data.man = [data.man]
+  }
+
+  // fixBugsField
+  if (!data.bugs && data.repository?.url) {
+    const hosted = hostedGitInfo.fromUrl(data.repository.url)
+    if (hosted && hosted.bugs()) {
+      data.bugs = { url: hosted.bugs() }
+    }
+  } else if (data.bugs) {
+    if (typeof data.bugs === 'string') {
+      if (isEmail(data.bugs)) {
+        data.bugs = { email: data.bugs }
+        /* eslint-disable-next-line node/no-deprecated-api */
+      } else if (url.parse(data.bugs).protocol) {
+        data.bugs = { url: data.bugs }
+      } else {
+        changes?.push(`Bug string field must be url, email, or {email,url}`)
+      }
+    } else {
+      for (const k in data.bugs) {
+        if (['web', 'name'].includes(k)) {
+          changes?.push(`bugs['${k}'] should probably be bugs['url'].`)
+          data.bugs.url = data.bugs[k]
+          delete data.bugs[k]
+        }
+      }
+      const oldBugs = data.bugs
+      data.bugs = {}
+      if (oldBugs.url) {
+        /* eslint-disable-next-line node/no-deprecated-api */
+        if (typeof (oldBugs.url) === 'string' && url.parse(oldBugs.url).protocol) {
+          data.bugs.url = oldBugs.url
+        } else {
+          changes?.push('bugs.url field must be a string url. Deleted.')
+        }
+      }
+      if (oldBugs.email) {
+        if (typeof (oldBugs.email) === 'string' && isEmail(oldBugs.email)) {
+          data.bugs.email = oldBugs.email
+        } else {
+          changes?.push('bugs.email field must be a string email. Deleted.')
+        }
+      }
+    }
+    if (!data.bugs.email && !data.bugs.url) {
+      delete data.bugs
+      changes?.push('Normalized value of bugs field is an empty object. Deleted.')
+    }
+  }
+  // fixKeywordsField
+  if (typeof data.keywords === 'string') {
+    data.keywords = data.keywords.split(/,\s+/)
+  }
+  if (data.keywords && !Array.isArray(data.keywords)) {
+    delete data.keywords
+    changes?.push(`keywords should be an array of strings`)
+  } else if (data.keywords) {
+    data.keywords = data.keywords.filter(function (kw) {
+      if (typeof kw !== 'string' || !kw) {
+        changes?.push(`keywords should be an array of strings`)
+        return false
+      } else {
+        return true
+      }
+    })
+  }
+  // fixBundleDependenciesField
+  const bdd = 'bundledDependencies'
+  const bd = 'bundleDependencies'
+  if (data[bdd] && !data[bd]) {
+    data[bd] = data[bdd]
+    delete data[bdd]
+  }
+  if (data[bd] && !Array.isArray(data[bd])) {
+    changes?.push(`Invalid 'bundleDependencies' list. Must be array of package names`)
+    delete data[bd]
+  } else if (data[bd]) {
+    data[bd] = data[bd].filter(function (filtered) {
+      if (!filtered || typeof filtered !== 'string') {
+        changes?.push(`Invalid bundleDependencies member: ${filtered}`)
+        return false
+      } else {
+        if (!data.dependencies) {
+          data.dependencies = {}
+        }
+        if (!Object.prototype.hasOwnProperty.call(data.dependencies, filtered)) {
+          changes?.push(`Non-dependency in bundleDependencies: ${filtered}`)
+          data.dependencies[filtered] = '*'
+        }
+        return true
+      }
+    })
+  }
+  // fixHomepageField
+  if (!data.homepage && data.repository && data.repository.url) {
+    const hosted = hostedGitInfo.fromUrl(data.repository.url)
+    if (hosted) {
+      data.homepage = hosted.docs()
+    }
+  }
+  if (data.homepage) {
+    if (typeof data.homepage !== 'string') {
+      changes?.push('homepage field must be a string url. Deleted.')
+      delete data.homepage
+    } else {
+      /* eslint-disable-next-line node/no-deprecated-api */
+      if (!url.parse(data.homepage).protocol) {
+        data.homepage = 'http://' + data.homepage
+      }
+    }
+  }
+  // fixReadmeField
+  if (!data.readme) {
+    changes?.push('No README data')
+    data.readme = 'ERROR: No README data found!'
+  }
+  // fixLicenseField
+  const license = data.license || data.licence
+  if (!license) {
+    changes?.push('No license field.')
+  } else if (typeof (license) !== 'string' || license.length < 1 || license.trim() === '') {
+    changes?.push('license should be a valid SPDX license expression')
+  } else if (!validateLicense(license).validForNewPackages) {
+    changes?.push('license should be a valid SPDX license expression')
+  }
+  // fixPeople
+  if (data.author) {
+    data.author = stringifyPerson(data.author)
+  }
+  ['maintainers', 'contributors'].forEach(function (set) {
+    if (!Array.isArray(data[set])) {
+      return
+    }
+    data[set] = data[set].map(stringifyPerson)
+  })
+  // fixTypos
+  for (const d in typos) {
+    if (Object.prototype.hasOwnProperty.call(data, d)) {
+      changes?.push(`${d} should probably be ${typos[d]}.`)
+    }
+  }
+}
+
+module.exports = { normalizeData }
diff --git a/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/normalize.js b/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/normalize.js
new file mode 100644
index 0000000000000..845f6753a9a00
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/normalize.js
@@ -0,0 +1,601 @@
+const valid = require('semver/functions/valid')
+const clean = require('semver/functions/clean')
+const fs = require('node:fs/promises')
+const path = require('node:path')
+const { log } = require('proc-log')
+const moduleBuiltin = require('node:module')
+
+/**
+ * @type {import('hosted-git-info')}
+ */
+let _hostedGitInfo
+function lazyHostedGitInfo () {
+  if (!_hostedGitInfo) {
+    _hostedGitInfo = require('hosted-git-info')
+  }
+  return _hostedGitInfo
+}
+
+/**
+ * @type {import('glob').glob}
+ */
+let _glob
+function lazyLoadGlob () {
+  if (!_glob) {
+    _glob = require('glob').glob
+  }
+  return _glob
+}
+
+// used to be npm-normalize-package-bin
+function normalizePackageBin (pkg, changes) {
+  if (pkg.bin) {
+    if (typeof pkg.bin === 'string' && pkg.name) {
+      changes?.push('"bin" was converted to an object')
+      pkg.bin = { [pkg.name]: pkg.bin }
+    } else if (Array.isArray(pkg.bin)) {
+      changes?.push('"bin" was converted to an object')
+      pkg.bin = pkg.bin.reduce((acc, k) => {
+        acc[path.basename(k)] = k
+        return acc
+      }, {})
+    }
+    if (typeof pkg.bin === 'object') {
+      for (const binKey in pkg.bin) {
+        if (typeof pkg.bin[binKey] !== 'string') {
+          delete pkg.bin[binKey]
+          changes?.push(`removed invalid "bin[${binKey}]"`)
+          continue
+        }
+        const base = path.basename(secureAndUnixifyPath(binKey))
+        if (!base) {
+          delete pkg.bin[binKey]
+          changes?.push(`removed invalid "bin[${binKey}]"`)
+          continue
+        }
+
+        const binTarget = secureAndUnixifyPath(pkg.bin[binKey])
+
+        if (!binTarget) {
+          delete pkg.bin[binKey]
+          changes?.push(`removed invalid "bin[${binKey}]"`)
+          continue
+        }
+
+        if (base !== binKey) {
+          delete pkg.bin[binKey]
+          changes?.push(`"bin[${binKey}]" was renamed to "bin[${base}]"`)
+        }
+        if (binTarget !== pkg.bin[binKey]) {
+          changes?.push(`"bin[${base}]" script name was cleaned`)
+        }
+        pkg.bin[base] = binTarget
+      }
+
+      if (Object.keys(pkg.bin).length === 0) {
+        changes?.push('empty "bin" was removed')
+        delete pkg.bin
+      }
+
+      return pkg
+    }
+  }
+  delete pkg.bin
+}
+
+function normalizePackageMan (pkg, changes) {
+  if (pkg.man) {
+    const mans = []
+    for (const man of (Array.isArray(pkg.man) ? pkg.man : [pkg.man])) {
+      if (typeof man !== 'string') {
+        changes?.push(`removed invalid "man [${man}]"`)
+      } else {
+        mans.push(secureAndUnixifyPath(man))
+      }
+    }
+
+    if (!mans.length) {
+      changes?.push('empty "man" was removed')
+    } else {
+      pkg.man = mans
+      return pkg
+    }
+  }
+  delete pkg.man
+}
+
+function isCorrectlyEncodedName (spec) {
+  return !spec.match(/[/@\s+%:]/) &&
+    spec === encodeURIComponent(spec)
+}
+
+function isValidScopedPackageName (spec) {
+  if (spec.charAt(0) !== '@') {
+    return false
+  }
+
+  const rest = spec.slice(1).split('/')
+  if (rest.length !== 2) {
+    return false
+  }
+
+  return rest[0] && rest[1] &&
+    rest[0] === encodeURIComponent(rest[0]) &&
+    rest[1] === encodeURIComponent(rest[1])
+}
+
+function unixifyPath (ref) {
+  return ref.replace(/\\|:/g, '/')
+}
+
+function secureAndUnixifyPath (ref) {
+  const secured = unixifyPath(path.join('.', path.join('/', unixifyPath(ref))))
+  return secured.startsWith('./') ? '' : secured
+}
+
+// We don't want the `changes` array in here by default because this is a hot
+// path for parsing packuments during install.  So the calling method passes it
+// in if it wants to track changes.
+const normalize = async (pkg, { strict, steps, root, changes, allowLegacyCase }) => {
+  if (!pkg.content) {
+    throw new Error('Can not normalize without content')
+  }
+  const data = pkg.content
+  const scripts = data.scripts || {}
+  const pkgId = `${data.name ?? ''}@${data.version ?? ''}`
+
+  // name and version are load bearing so we have to clean them up first
+  if (steps.includes('fixName') || steps.includes('fixNameField') || steps.includes('normalizeData')) {
+    if (!data.name && !strict) {
+      changes?.push('Missing "name" field was set to an empty string')
+      data.name = ''
+    } else {
+      if (typeof data.name !== 'string') {
+        throw new Error('name field must be a string.')
+      }
+      if (!strict) {
+        const name = data.name.trim()
+        if (data.name !== name) {
+          changes?.push(`Whitespace was trimmed from "name"`)
+          data.name = name
+        }
+      }
+
+      if (data.name.startsWith('.') ||
+        !(isValidScopedPackageName(data.name) || isCorrectlyEncodedName(data.name)) ||
+        (strict && (!allowLegacyCase) && data.name !== data.name.toLowerCase()) ||
+        data.name.toLowerCase() === 'node_modules' ||
+        data.name.toLowerCase() === 'favicon.ico') {
+        throw new Error('Invalid name: ' + JSON.stringify(data.name))
+      }
+    }
+  }
+
+  if (steps.includes('fixName')) {
+    // Check for conflicts with builtin modules
+    if (moduleBuiltin.builtinModules.includes(data.name)) {
+      log.warn('package-json', pkgId, `Package name "${data.name}" conflicts with a Node.js built-in module name`)
+    }
+  }
+
+  if (steps.includes('fixVersionField') || steps.includes('normalizeData')) {
+    // allow "loose" semver 1.0 versions in non-strict mode
+    // enforce strict semver 2.0 compliance in strict mode
+    const loose = !strict
+    if (!data.version) {
+      data.version = ''
+    } else {
+      if (!valid(data.version, loose)) {
+        throw new Error(`Invalid version: "${data.version}"`)
+      }
+      const version = clean(data.version, loose)
+      if (version !== data.version) {
+        changes?.push(`"version" was cleaned and set to "${version}"`)
+        data.version = version
+      }
+    }
+  }
+  // remove attributes that start with "_"
+  if (steps.includes('_attributes')) {
+    for (const key in data) {
+      if (key.startsWith('_')) {
+        changes?.push(`"${key}" was removed`)
+        delete pkg.content[key]
+      }
+    }
+  }
+
+  // build the "_id" attribute
+  if (steps.includes('_id')) {
+    if (data.name && data.version) {
+      changes?.push(`"_id" was set to ${pkgId}`)
+      data._id = pkgId
+    }
+  }
+
+  // fix bundledDependencies typo
+  // normalize bundleDependencies
+  if (steps.includes('bundledDependencies')) {
+    if (data.bundleDependencies === undefined && data.bundledDependencies !== undefined) {
+      data.bundleDependencies = data.bundledDependencies
+    }
+    changes?.push(`Deleted incorrect "bundledDependencies"`)
+    delete data.bundledDependencies
+  }
+  // expand "bundleDependencies: true or translate from object"
+  if (steps.includes('bundleDependencies')) {
+    const bd = data.bundleDependencies
+    if (bd === false && !steps.includes('bundleDependenciesDeleteFalse')) {
+      changes?.push(`"bundleDependencies" was changed from "false" to "[]"`)
+      data.bundleDependencies = []
+    } else if (bd === true) {
+      changes?.push(`"bundleDependencies" was auto-populated from "dependencies"`)
+      data.bundleDependencies = Object.keys(data.dependencies || {})
+    } else if (bd && typeof bd === 'object') {
+      if (!Array.isArray(bd)) {
+        changes?.push(`"bundleDependencies" was changed from an object to an array`)
+        data.bundleDependencies = Object.keys(bd)
+      }
+    } else if ('bundleDependencies' in data) {
+      changes?.push(`"bundleDependencies" was removed`)
+      delete data.bundleDependencies
+    }
+  }
+
+  // it was once common practice to list deps both in optionalDependencies and
+  // in dependencies, to support npm versions that did not know about
+  // optionalDependencies.  This is no longer a relevant need, so duplicating
+  // the deps in two places is unnecessary and excessive.
+  if (steps.includes('optionalDedupe')) {
+    if (data.dependencies &&
+      data.optionalDependencies && typeof data.optionalDependencies === 'object') {
+      for (const name in data.optionalDependencies) {
+        changes?.push(`optionalDependencies."${name}" was removed`)
+        delete data.dependencies[name]
+      }
+      if (!Object.keys(data.dependencies).length) {
+        changes?.push(`Empty "optionalDependencies" was removed`)
+        delete data.dependencies
+      }
+    }
+  }
+
+  // add "install" attribute if any "*.gyp" files exist
+  if (steps.includes('gypfile')) {
+    if (!scripts.install && !scripts.preinstall && data.gypfile !== false) {
+      const files = await lazyLoadGlob()('*.gyp', { cwd: pkg.path })
+      if (files.length) {
+        scripts.install = 'node-gyp rebuild'
+        data.scripts = scripts
+        data.gypfile = true
+        changes?.push(`"scripts.install" was set to "node-gyp rebuild"`)
+        changes?.push(`"gypfile" was set to "true"`)
+      }
+    }
+  }
+
+  // add "start" attribute if "server.js" exists
+  if (steps.includes('serverjs') && !scripts.start) {
+    try {
+      await fs.access(path.join(pkg.path, 'server.js'))
+      scripts.start = 'node server.js'
+      data.scripts = scripts
+      changes?.push('"scripts.start" was set to "node server.js"')
+    } catch {
+      // do nothing
+    }
+  }
+
+  // strip "node_modules/.bin" from scripts entries
+  // remove invalid scripts entries (non-strings)
+  if ((steps.includes('scripts') || steps.includes('scriptpath')) && data.scripts !== undefined) {
+    const spre = /^(\.[/\\])?node_modules[/\\].bin[\\/]/
+    if (typeof data.scripts === 'object') {
+      for (const name in data.scripts) {
+        if (typeof data.scripts[name] !== 'string') {
+          delete data.scripts[name]
+          changes?.push(`Invalid scripts."${name}" was removed`)
+        } else if (steps.includes('scriptpath') && spre.test(data.scripts[name])) {
+          data.scripts[name] = data.scripts[name].replace(spre, '')
+          changes?.push(`scripts entry "${name}" was fixed to remove node_modules/.bin reference`)
+        }
+      }
+    } else {
+      changes?.push(`Removed invalid "scripts"`)
+      delete data.scripts
+    }
+  }
+
+  if (steps.includes('funding')) {
+    if (data.funding && typeof data.funding === 'string') {
+      data.funding = { url: data.funding }
+      changes?.push(`"funding" was changed to an object with a url attribute`)
+    }
+  }
+
+  // populate "authors" attribute
+  if (steps.includes('authors') && !data.contributors) {
+    try {
+      const authorData = await fs.readFile(path.join(pkg.path, 'AUTHORS'), 'utf8')
+      const authors = authorData.split(/\r?\n/g)
+        .map(line => line.replace(/^\s*#.*$/, '').trim())
+        .filter(line => line)
+      data.contributors = authors
+      changes?.push('"contributors" was auto-populated with the contents of the "AUTHORS" file')
+    } catch {
+      // do nothing
+    }
+  }
+
+  // populate "readme" attribute
+  if (steps.includes('readme') && !data.readme) {
+    const mdre = /\.m?a?r?k?d?o?w?n?$/i
+    const files = await lazyLoadGlob()('{README,README.*}', {
+      cwd: pkg.path,
+      nocase: true,
+      mark: true,
+    })
+    let readmeFile
+    for (const file of files) {
+      // don't accept directories.
+      if (!file.endsWith(path.sep)) {
+        if (file.match(mdre)) {
+          readmeFile = file
+          break
+        }
+        if (file.endsWith('README')) {
+          readmeFile = file
+        }
+      }
+    }
+    if (readmeFile) {
+      const readmeData = await fs.readFile(path.join(pkg.path, readmeFile), 'utf8')
+      data.readme = readmeData
+      data.readmeFilename = readmeFile
+      changes?.push(`"readme" was set to the contents of ${readmeFile}`)
+      changes?.push(`"readmeFilename" was set to ${readmeFile}`)
+    }
+    if (!data.readme) {
+      data.readme = 'ERROR: No README data found!'
+    }
+  }
+
+  // expand directories.man
+  if (steps.includes('mans')) {
+    if (data.directories?.man && !data.man) {
+      const manDir = secureAndUnixifyPath(data.directories.man)
+      const cwd = path.resolve(pkg.path, manDir)
+      const files = await lazyLoadGlob()('**/*.[0-9]', { cwd })
+      data.man = files.map(man =>
+        path.relative(pkg.path, path.join(cwd, man)).split(path.sep).join('/')
+      )
+    }
+    normalizePackageMan(data, changes)
+  }
+
+  if (steps.includes('bin') || steps.includes('binDir') || steps.includes('binRefs')) {
+    normalizePackageBin(data, changes)
+  }
+
+  // expand "directories.bin"
+  if (steps.includes('binDir') && data.directories?.bin && !data.bin) {
+    const binsDir = path.resolve(pkg.path, secureAndUnixifyPath(data.directories.bin))
+    const bins = await lazyLoadGlob()('**', { cwd: binsDir })
+    data.bin = bins.reduce((acc, binFile) => {
+      if (binFile && !binFile.startsWith('.')) {
+        const binName = path.basename(binFile)
+        acc[binName] = path.join(data.directories.bin, binFile)
+      }
+      return acc
+    }, {})
+    // *sigh*
+    normalizePackageBin(data, changes)
+  }
+
+  // populate "gitHead" attribute
+  if (steps.includes('gitHead') && !data.gitHead) {
+    const git = require('@npmcli/git')
+    const gitRoot = await git.find({ cwd: pkg.path, root })
+    let head
+    if (gitRoot) {
+      try {
+        head = await fs.readFile(path.resolve(gitRoot, '.git/HEAD'), 'utf8')
+      } catch (err) {
+      // do nothing
+      }
+    }
+    let headData
+    if (head) {
+      if (head.startsWith('ref: ')) {
+        const headRef = head.replace(/^ref: /, '').trim()
+        const headFile = path.resolve(gitRoot, '.git', headRef)
+        try {
+          headData = await fs.readFile(headFile, 'utf8')
+          headData = headData.replace(/^ref: /, '').trim()
+        } catch (err) {
+          // do nothing
+        }
+        if (!headData) {
+          const packFile = path.resolve(gitRoot, '.git/packed-refs')
+          try {
+            let refs = await fs.readFile(packFile, 'utf8')
+            if (refs) {
+              refs = refs.split('\n')
+              for (let i = 0; i < refs.length; i++) {
+                const match = refs[i].match(/^([0-9a-f]{40}) (.+)$/)
+                if (match && match[2].trim() === headRef) {
+                  headData = match[1]
+                  break
+                }
+              }
+            }
+          } catch {
+            // do nothing
+          }
+        }
+      } else {
+        headData = head.trim()
+      }
+    }
+    if (headData) {
+      data.gitHead = headData
+    }
+  }
+
+  // populate "types" attribute
+  if (steps.includes('fillTypes')) {
+    const index = data.main || 'index.js'
+
+    if (typeof index !== 'string') {
+      throw new TypeError('The "main" attribute must be of type string.')
+    }
+
+    // TODO exports is much more complicated than this in verbose format
+    // We need to support for instance
+
+    // "exports": {
+    //   ".": [
+    //     {
+    //       "default": "./lib/npm.js"
+    //     },
+    //     "./lib/npm.js"
+    //   ],
+    //   "./package.json": "./package.json"
+    // },
+    // as well as conditional exports
+
+    // if (data.exports && typeof data.exports === 'string') {
+    //   index = data.exports
+    // }
+
+    // if (data.exports && data.exports['.']) {
+    //   index = data.exports['.']
+    //   if (typeof index !== 'string') {
+    //   }
+    // }
+    const extless = path.join(path.dirname(index), path.basename(index, path.extname(index)))
+    const dts = `./${extless}.d.ts`
+    const hasDTSFields = 'types' in data || 'typings' in data
+    if (!hasDTSFields) {
+      try {
+        await fs.access(path.join(pkg.path, dts))
+        data.types = dts.split(path.sep).join('/')
+      } catch {
+        // do nothing
+      }
+    }
+  }
+
+  // "normalizeData" from "read-package-json", which was just a call through to
+  // "normalize-package-data".  We only call the "fixer" functions because
+  // outside of that it was also clobbering _id (which we already conditionally
+  // do) and also adding the gypfile script (which we also already
+  // conditionally do)
+
+  // Some steps are isolated so we can do a limited subset of these in `fix`
+  if (steps.includes('fixRepositoryField') || steps.includes('normalizeData')) {
+    if (data.repositories) {
+      changes?.push(`"repository" was set to the first entry in "repositories" (${data.repository})`)
+      data.repository = data.repositories[0]
+    }
+    if (data.repository) {
+      if (typeof data.repository === 'string') {
+        changes?.push('"repository" was changed from a string to an object')
+        data.repository = {
+          type: 'git',
+          url: data.repository,
+        }
+      }
+      if (data.repository.url) {
+        const hosted = lazyHostedGitInfo().fromUrl(data.repository.url)
+        let r
+        if (hosted) {
+          if (hosted.getDefaultRepresentation() === 'shortcut') {
+            r = hosted.https()
+          } else {
+            r = hosted.toString()
+          }
+          if (r !== data.repository.url) {
+            changes?.push(`"repository.url" was normalized to "${r}"`)
+            data.repository.url = r
+          }
+        }
+      }
+    }
+  }
+
+  if (steps.includes('fixDependencies') || steps.includes('normalizeData')) {
+    // peerDependencies?
+    // devDependencies is meaningless here, it's ignored on an installed package
+    for (const type of ['dependencies', 'devDependencies', 'optionalDependencies']) {
+      if (data[type]) {
+        let secondWarning = true
+        if (typeof data[type] === 'string') {
+          changes?.push(`"${type}" was converted from a string into an object`)
+          data[type] = data[type].trim().split(/[\n\r\s\t ,]+/)
+          secondWarning = false
+        }
+        if (Array.isArray(data[type])) {
+          if (secondWarning) {
+            changes?.push(`"${type}" was converted from an array into an object`)
+          }
+          const o = {}
+          for (const d of data[type]) {
+            if (typeof d === 'string') {
+              const dep = d.trim().split(/(:?[@\s><=])/)
+              const dn = dep.shift()
+              const dv = dep.join('').replace(/^@/, '').trim()
+              o[dn] = dv
+            }
+          }
+          data[type] = o
+        }
+      }
+    }
+    // normalize-package-data used to put optional dependencies BACK into
+    // dependencies here, we no longer do this
+
+    for (const deps of ['dependencies', 'devDependencies']) {
+      if (deps in data) {
+        if (!data[deps] || typeof data[deps] !== 'object') {
+          changes?.push(`Removed invalid "${deps}"`)
+          delete data[deps]
+        } else {
+          for (const d in data[deps]) {
+            const r = data[deps][d]
+            if (typeof r !== 'string') {
+              changes?.push(`Removed invalid "${deps}.${d}"`)
+              delete data[deps][d]
+            }
+            const hosted = lazyHostedGitInfo().fromUrl(data[deps][d])?.toString()
+            if (hosted && hosted !== data[deps][d]) {
+              changes?.push(`Normalized git reference to "${deps}.${d}"`)
+              data[deps][d] = hosted.toString()
+            }
+          }
+        }
+      }
+    }
+  }
+
+  // TODO some of this is duplicated in other steps here, a future breaking change may be able to remove the duplicates involved in this step
+  if (steps.includes('normalizeData')) {
+    const { normalizeData } = require('./normalize-data.js')
+    normalizeData(data, changes)
+  }
+
+  // Warn if the bin references don't point to anything.  This might be better
+  // in normalize-package-data if it had access to the file path.
+  if (steps.includes('binRefs') && data.bin instanceof Object) {
+    for (const key in data.bin) {
+      try {
+        await fs.access(path.resolve(pkg.path, data.bin[key]))
+      } catch {
+        log.warn('package-json', pkgId, `No bin file found at ${data.bin[key]}`)
+        // XXX: should a future breaking change delete bin entries that cannot be accessed?
+      }
+    }
+  }
+}
+
+module.exports = normalize
diff --git a/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/read-package.js b/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/read-package.js
new file mode 100644
index 0000000000000..d6c86ce388e6c
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/read-package.js
@@ -0,0 +1,39 @@
+// This is JUST the code needed to open a package.json file and parse it.
+// It's isolated out so that code needing to parse a package.json file can do so in the same way as this module does, without needing to require the whole module, or needing to require the underlying parsing library.
+
+const { readFile } = require('fs/promises')
+const parseJSON = require('json-parse-even-better-errors')
+
+async function read (filename) {
+  try {
+    const data = await readFile(filename, 'utf8')
+    return data
+  } catch (err) {
+    err.message = `Could not read package.json: ${err}`
+    throw err
+  }
+}
+
+function parse (data) {
+  try {
+    const content = parseJSON(data)
+    return content
+  } catch (err) {
+    err.message = `Invalid package.json: ${err}`
+    throw err
+  }
+}
+
+// This is what most external libs will use.
+// PackageJson will call read and parse separately
+async function readPackage (filename) {
+  const data = await read(filename)
+  const content = parse(data)
+  return content
+}
+
+module.exports = {
+  read,
+  parse,
+  readPackage,
+}
diff --git a/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/sort.js b/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/sort.js
new file mode 100644
index 0000000000000..0bd0d5199da58
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/sort.js
@@ -0,0 +1,101 @@
+/**
+ * arbitrary sort order for package.json largely pulled from:
+ * https://github.com/keithamus/sort-package-json/blob/main/defaultRules.md
+ *
+ * cross checked with:
+ * https://github.com/npm/types/blob/main/types/index.d.ts#L104
+ * https://docs.npmjs.com/cli/configuring-npm/package-json
+ */
+function packageSort (json) {
+  const {
+    name,
+    version,
+    private: isPrivate,
+    description,
+    keywords,
+    homepage,
+    bugs,
+    repository,
+    funding,
+    license,
+    author,
+    maintainers,
+    contributors,
+    type,
+    imports,
+    exports,
+    main,
+    browser,
+    types,
+    bin,
+    man,
+    directories,
+    files,
+    workspaces,
+    scripts,
+    config,
+    dependencies,
+    devDependencies,
+    peerDependencies,
+    peerDependenciesMeta,
+    optionalDependencies,
+    bundledDependencies,
+    bundleDependencies,
+    engines,
+    os,
+    cpu,
+    publishConfig,
+    devEngines,
+    licenses,
+    overrides,
+    ...rest
+  } = json
+
+  return {
+    ...(typeof name !== 'undefined' ? { name } : {}),
+    ...(typeof version !== 'undefined' ? { version } : {}),
+    ...(typeof isPrivate !== 'undefined' ? { private: isPrivate } : {}),
+    ...(typeof description !== 'undefined' ? { description } : {}),
+    ...(typeof keywords !== 'undefined' ? { keywords } : {}),
+    ...(typeof homepage !== 'undefined' ? { homepage } : {}),
+    ...(typeof bugs !== 'undefined' ? { bugs } : {}),
+    ...(typeof repository !== 'undefined' ? { repository } : {}),
+    ...(typeof funding !== 'undefined' ? { funding } : {}),
+    ...(typeof license !== 'undefined' ? { license } : {}),
+    ...(typeof author !== 'undefined' ? { author } : {}),
+    ...(typeof maintainers !== 'undefined' ? { maintainers } : {}),
+    ...(typeof contributors !== 'undefined' ? { contributors } : {}),
+    ...(typeof type !== 'undefined' ? { type } : {}),
+    ...(typeof imports !== 'undefined' ? { imports } : {}),
+    ...(typeof exports !== 'undefined' ? { exports } : {}),
+    ...(typeof main !== 'undefined' ? { main } : {}),
+    ...(typeof browser !== 'undefined' ? { browser } : {}),
+    ...(typeof types !== 'undefined' ? { types } : {}),
+    ...(typeof bin !== 'undefined' ? { bin } : {}),
+    ...(typeof man !== 'undefined' ? { man } : {}),
+    ...(typeof directories !== 'undefined' ? { directories } : {}),
+    ...(typeof files !== 'undefined' ? { files } : {}),
+    ...(typeof workspaces !== 'undefined' ? { workspaces } : {}),
+    ...(typeof scripts !== 'undefined' ? { scripts } : {}),
+    ...(typeof config !== 'undefined' ? { config } : {}),
+    ...(typeof dependencies !== 'undefined' ? { dependencies } : {}),
+    ...(typeof devDependencies !== 'undefined' ? { devDependencies } : {}),
+    ...(typeof peerDependencies !== 'undefined' ? { peerDependencies } : {}),
+    ...(typeof peerDependenciesMeta !== 'undefined' ? { peerDependenciesMeta } : {}),
+    ...(typeof optionalDependencies !== 'undefined' ? { optionalDependencies } : {}),
+    ...(typeof bundledDependencies !== 'undefined' ? { bundledDependencies } : {}),
+    ...(typeof bundleDependencies !== 'undefined' ? { bundleDependencies } : {}),
+    ...(typeof engines !== 'undefined' ? { engines } : {}),
+    ...(typeof os !== 'undefined' ? { os } : {}),
+    ...(typeof cpu !== 'undefined' ? { cpu } : {}),
+    ...(typeof publishConfig !== 'undefined' ? { publishConfig } : {}),
+    ...(typeof devEngines !== 'undefined' ? { devEngines } : {}),
+    ...(typeof licenses !== 'undefined' ? { licenses } : {}),
+    ...(typeof overrides !== 'undefined' ? { overrides } : {}),
+    ...rest,
+  }
+}
+
+module.exports = {
+  packageSort,
+}
diff --git a/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/update-dependencies.js b/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/update-dependencies.js
new file mode 100644
index 0000000000000..7259949ab661d
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/update-dependencies.js
@@ -0,0 +1,75 @@
+const depTypes = new Set([
+  'dependencies',
+  'optionalDependencies',
+  'devDependencies',
+  'peerDependencies',
+])
+
+// sort alphabetically all types of deps for a given package
+const orderDeps = (content) => {
+  for (const type of depTypes) {
+    if (content && content[type]) {
+      content[type] = Object.keys(content[type])
+        .sort((a, b) => a.localeCompare(b, 'en'))
+        .reduce((res, key) => {
+          res[key] = content[type][key]
+          return res
+        }, {})
+    }
+  }
+  return content
+}
+
+const updateDependencies = ({ content, originalContent }) => {
+  const pkg = orderDeps({
+    ...content,
+  })
+
+  // optionalDependencies don't need to be repeated in two places
+  if (pkg.dependencies) {
+    if (pkg.optionalDependencies) {
+      for (const name of Object.keys(pkg.optionalDependencies)) {
+        delete pkg.dependencies[name]
+      }
+    }
+  }
+
+  const result = { ...originalContent }
+
+  // loop through all types of dependencies and update package json pkg
+  for (const type of depTypes) {
+    if (pkg[type]) {
+      result[type] = pkg[type]
+    }
+
+    // prune empty type props from resulting object
+    const emptyDepType =
+      pkg[type]
+      && typeof pkg === 'object'
+      && Object.keys(pkg[type]).length === 0
+    if (emptyDepType) {
+      delete result[type]
+    }
+  }
+
+  // if original package.json had dep in peerDeps AND deps, preserve that.
+  const { dependencies: origProd, peerDependencies: origPeer } =
+    originalContent || {}
+  const { peerDependencies: newPeer } = result
+  if (origProd && origPeer && newPeer) {
+    // we have original prod/peer deps, and new peer deps
+    // copy over any that were in both in the original
+    for (const name of Object.keys(origPeer)) {
+      if (origProd[name] !== undefined && newPeer[name] !== undefined) {
+        result.dependencies = result.dependencies || {}
+        result.dependencies[name] = newPeer[name]
+      }
+    }
+  }
+
+  return result
+}
+
+updateDependencies.knownKeys = depTypes
+
+module.exports = updateDependencies
diff --git a/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/update-scripts.js b/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/update-scripts.js
new file mode 100644
index 0000000000000..30495e54cc3c7
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/update-scripts.js
@@ -0,0 +1,29 @@
+const updateScripts = ({ content, originalContent = {} }) => {
+  const newScripts = content.scripts
+
+  if (!newScripts) {
+    return originalContent
+  }
+
+  // validate scripts content being appended
+  const hasInvalidScripts = () =>
+    Object.entries(newScripts)
+      .some(([key, value]) =>
+        typeof key !== 'string' || typeof value !== 'string')
+  if (hasInvalidScripts()) {
+    throw Object.assign(
+      new TypeError(
+        'package.json scripts should be a key-value pair of strings.'),
+      { code: 'ESCRIPTSINVALID' }
+    )
+  }
+
+  return {
+    ...originalContent,
+    scripts: {
+      ...newScripts,
+    },
+  }
+}
+
+module.exports = updateScripts
diff --git a/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/update-workspaces.js b/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/update-workspaces.js
new file mode 100644
index 0000000000000..04bf63230636f
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/update-workspaces.js
@@ -0,0 +1,26 @@
+const updateWorkspaces = ({ content, originalContent = {} }) => {
+  const newWorkspaces = content.workspaces
+
+  if (!newWorkspaces) {
+    return originalContent
+  }
+
+  // validate workspaces content being appended
+  const hasInvalidWorkspaces = () =>
+    newWorkspaces.some(w => !(typeof w === 'string'))
+  if (!newWorkspaces.length || hasInvalidWorkspaces()) {
+    throw Object.assign(
+      new TypeError('workspaces should be an array of strings.'),
+      { code: 'EWORKSPACESINVALID' }
+    )
+  }
+
+  return {
+    ...originalContent,
+    workspaces: [
+      ...newWorkspaces,
+    ],
+  }
+}
+
+module.exports = updateWorkspaces
diff --git a/node_modules/init-package-json/node_modules/@npmcli/package-json/package.json b/node_modules/init-package-json/node_modules/@npmcli/package-json/package.json
new file mode 100644
index 0000000000000..263d67ff3bc5b
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/@npmcli/package-json/package.json
@@ -0,0 +1,61 @@
+{
+  "name": "@npmcli/package-json",
+  "version": "6.2.0",
+  "description": "Programmatic API to update package.json",
+  "keywords": [
+    "npm",
+    "oss"
+  ],
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/npm/package-json.git"
+  },
+  "license": "ISC",
+  "author": "GitHub Inc.",
+  "main": "lib/index.js",
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "scripts": {
+    "snap": "tap",
+    "test": "tap",
+    "lint": "npm run eslint",
+    "lintfix": "npm run eslint -- --fix",
+    "posttest": "npm run lint",
+    "postsnap": "npm run lintfix --",
+    "postlint": "template-oss-check",
+    "template-oss-apply": "template-oss-apply --force",
+    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
+  },
+  "dependencies": {
+    "@npmcli/git": "^6.0.0",
+    "glob": "^10.2.2",
+    "hosted-git-info": "^8.0.0",
+    "json-parse-even-better-errors": "^4.0.0",
+    "proc-log": "^5.0.0",
+    "semver": "^7.5.3",
+    "validate-npm-package-license": "^3.0.4"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^5.1.0",
+    "@npmcli/template-oss": "4.23.6",
+    "read-package-json": "^7.0.0",
+    "read-package-json-fast": "^4.0.0",
+    "tap": "^16.0.1"
+  },
+  "engines": {
+    "node": "^18.17.0 || >=20.5.0"
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.23.6",
+    "publish": "true"
+  },
+  "tap": {
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  }
+}
diff --git a/node_modules/pacote/node_modules/@npmcli/package-json/LICENSE b/node_modules/pacote/node_modules/@npmcli/package-json/LICENSE
new file mode 100644
index 0000000000000..6a1f3708f6d70
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/package-json/LICENSE
@@ -0,0 +1,18 @@
+ISC License
+
+Copyright GitHub Inc.
+
+Permission to use, copy, modify, and/or distribute this
+software for any purpose with or without fee is hereby
+granted, provided that the above copyright notice and this
+permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL
+WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO
+EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
+WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
+TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
+USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/@npmcli/package-json/lib/index.js b/node_modules/pacote/node_modules/@npmcli/package-json/lib/index.js
new file mode 100644
index 0000000000000..7eff602d73a3f
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/package-json/lib/index.js
@@ -0,0 +1,286 @@
+const { readFile, writeFile } = require('node:fs/promises')
+const { resolve } = require('node:path')
+const parseJSON = require('json-parse-even-better-errors')
+
+const updateDeps = require('./update-dependencies.js')
+const updateScripts = require('./update-scripts.js')
+const updateWorkspaces = require('./update-workspaces.js')
+const normalize = require('./normalize.js')
+const { read, parse } = require('./read-package.js')
+const { packageSort } = require('./sort.js')
+
+// a list of handy specialized helper functions that take
+// care of special cases that are handled by the npm cli
+const knownSteps = new Set([
+  updateDeps,
+  updateScripts,
+  updateWorkspaces,
+])
+
+// list of all keys that are handled by "knownSteps" helpers
+const knownKeys = new Set([
+  ...updateDeps.knownKeys,
+  'scripts',
+  'workspaces',
+])
+
+class PackageJson {
+  static normalizeSteps = Object.freeze([
+    '_id',
+    '_attributes',
+    'bundledDependencies',
+    'bundleDependencies',
+    'optionalDedupe',
+    'scripts',
+    'funding',
+    'bin',
+  ])
+
+  // npm pkg fix
+  static fixSteps = Object.freeze([
+    'binRefs',
+    'bundleDependencies',
+    'bundleDependenciesFalse',
+    'fixName',
+    'fixNameField',
+    'fixVersionField',
+    'fixRepositoryField',
+    'fixDependencies',
+    'devDependencies',
+    'scriptpath',
+  ])
+
+  static prepareSteps = Object.freeze([
+    '_id',
+    '_attributes',
+    'bundledDependencies',
+    'bundleDependencies',
+    'bundleDependenciesDeleteFalse',
+    'gypfile',
+    'serverjs',
+    'scriptpath',
+    'authors',
+    'readme',
+    'mans',
+    'binDir',
+    'gitHead',
+    'fillTypes',
+    'normalizeData',
+    'binRefs',
+  ])
+
+  // create a new empty package.json, so we can save at the given path even
+  // though we didn't start from a parsed file
+  static async create (path, opts = {}) {
+    const p = new PackageJson()
+    await p.create(path)
+    if (opts.data) {
+      return p.update(opts.data)
+    }
+    return p
+  }
+
+  // Loads a package.json at given path and JSON parses
+  static async load (path, opts = {}) {
+    const p = new PackageJson()
+    // Avoid try/catch if we aren't going to create
+    if (!opts.create) {
+      return p.load(path)
+    }
+
+    try {
+      return await p.load(path)
+    } catch (err) {
+      if (!err.message.startsWith('Could not read package.json')) {
+        throw err
+      }
+      return await p.create(path)
+    }
+  }
+
+  // npm pkg fix
+  static async fix (path, opts) {
+    const p = new PackageJson()
+    await p.load(path, true)
+    return p.fix(opts)
+  }
+
+  // read-package-json compatible behavior
+  static async prepare (path, opts) {
+    const p = new PackageJson()
+    await p.load(path, true)
+    return p.prepare(opts)
+  }
+
+  // read-package-json-fast compatible behavior
+  static async normalize (path, opts) {
+    const p = new PackageJson()
+    await p.load(path)
+    return p.normalize(opts)
+  }
+
+  #path
+  #manifest
+  #readFileContent = ''
+  #canSave = true
+
+  // Load content from given path
+  async load (path, parseIndex) {
+    this.#path = path
+    let parseErr
+    try {
+      this.#readFileContent = await read(this.filename)
+    } catch (err) {
+      if (!parseIndex) {
+        throw err
+      }
+      parseErr = err
+    }
+
+    if (parseErr) {
+      const indexFile = resolve(this.path, 'index.js')
+      let indexFileContent
+      try {
+        indexFileContent = await readFile(indexFile, 'utf8')
+      } catch (err) {
+        throw parseErr
+      }
+      try {
+        this.fromComment(indexFileContent)
+      } catch (err) {
+        throw parseErr
+      }
+      // This wasn't a package.json so prevent saving
+      this.#canSave = false
+      return this
+    }
+
+    return this.fromJSON(this.#readFileContent)
+  }
+
+  // Load data from a JSON string/buffer
+  fromJSON (data) {
+    this.#manifest = parse(data)
+    return this
+  }
+
+  fromContent (data) {
+    this.#manifest = data
+    this.#canSave = false
+    return this
+  }
+
+  // Load data from a comment
+  // /**package { "name": "foo", "version": "1.2.3", ... } **/
+  fromComment (data) {
+    data = data.split(/^\/\*\*package(?:\s|$)/m)
+
+    if (data.length < 2) {
+      throw new Error('File has no package in comments')
+    }
+    data = data[1]
+    data = data.split(/\*\*\/$/m)
+
+    if (data.length < 2) {
+      throw new Error('File has no package in comments')
+    }
+    data = data[0]
+    data = data.replace(/^\s*\*/mg, '')
+
+    this.#manifest = parseJSON(data)
+    return this
+  }
+
+  get content () {
+    return this.#manifest
+  }
+
+  get path () {
+    return this.#path
+  }
+
+  get filename () {
+    if (this.path) {
+      return resolve(this.path, 'package.json')
+    }
+    return undefined
+  }
+
+  create (path) {
+    this.#path = path
+    this.#manifest = {}
+    return this
+  }
+
+  // This should be the ONLY way to set content in the manifest
+  update (content) {
+    if (!this.content) {
+      throw new Error('Can not update without content.  Please `load` or `create`')
+    }
+
+    for (const step of knownSteps) {
+      this.#manifest = step({ content, originalContent: this.content })
+    }
+
+    // unknown properties will just be overwitten
+    for (const [key, value] of Object.entries(content)) {
+      if (!knownKeys.has(key)) {
+        this.content[key] = value
+      }
+    }
+
+    return this
+  }
+
+  async save ({ sort } = {}) {
+    if (!this.#canSave) {
+      throw new Error('No package.json to save to')
+    }
+    const {
+      [Symbol.for('indent')]: indent,
+      [Symbol.for('newline')]: newline,
+      ...rest
+    } = this.content
+
+    const format = indent === undefined ? '  ' : indent
+    const eol = newline === undefined ? '\n' : newline
+
+    const content = sort ? packageSort(rest) : rest
+
+    const fileContent = `${
+      JSON.stringify(content, null, format)
+    }\n`
+      .replace(/\n/g, eol)
+
+    if (fileContent.trim() !== this.#readFileContent.trim()) {
+      const written = await writeFile(this.filename, fileContent)
+      this.#readFileContent = fileContent
+      return written
+    }
+  }
+
+  async normalize (opts = {}) {
+    if (!opts.steps) {
+      opts.steps = this.constructor.normalizeSteps
+    }
+    await normalize(this, opts)
+    return this
+  }
+
+  async prepare (opts = {}) {
+    if (!opts.steps) {
+      opts.steps = this.constructor.prepareSteps
+    }
+    await normalize(this, opts)
+    return this
+  }
+
+  async fix (opts = {}) {
+    // This one is not overridable
+    opts.steps = this.constructor.fixSteps
+    await normalize(this, opts)
+    return this
+  }
+}
+
+module.exports = PackageJson
diff --git a/node_modules/pacote/node_modules/@npmcli/package-json/lib/normalize-data.js b/node_modules/pacote/node_modules/@npmcli/package-json/lib/normalize-data.js
new file mode 100644
index 0000000000000..79b0bafbcd3a4
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/package-json/lib/normalize-data.js
@@ -0,0 +1,257 @@
+// Originally normalize-package-data
+
+const url = require('node:url')
+const hostedGitInfo = require('hosted-git-info')
+const validateLicense = require('validate-npm-package-license')
+
+const typos = {
+  dependancies: 'dependencies',
+  dependecies: 'dependencies',
+  depdenencies: 'dependencies',
+  devEependencies: 'devDependencies',
+  depends: 'dependencies',
+  'dev-dependencies': 'devDependencies',
+  devDependences: 'devDependencies',
+  devDepenencies: 'devDependencies',
+  devdependencies: 'devDependencies',
+  repostitory: 'repository',
+  repo: 'repository',
+  prefereGlobal: 'preferGlobal',
+  hompage: 'homepage',
+  hampage: 'homepage',
+  autohr: 'author',
+  autor: 'author',
+  contributers: 'contributors',
+  publicationConfig: 'publishConfig',
+  script: 'scripts',
+}
+
+const isEmail = str => str.includes('@') && (str.indexOf('@') < str.lastIndexOf('.'))
+
+// Extracts description from contents of a readme file in markdown format
+function extractDescription (description) {
+  // the first block of text before the first heading that isn't the first line heading
+  const lines = description.trim().split('\n')
+  let start = 0
+  // skip initial empty lines and lines that start with #
+  while (lines[start]?.trim().match(/^(#|$)/)) {
+    start++
+  }
+  let end = start + 1
+  // keep going till we get to the end or an empty line
+  while (end < lines.length && lines[end].trim()) {
+    end++
+  }
+  return lines.slice(start, end).join(' ').trim()
+}
+
+function stringifyPerson (person) {
+  if (typeof person !== 'string') {
+    const name = person.name || ''
+    const u = person.url || person.web
+    const wrappedUrl = u ? (' (' + u + ')') : ''
+    const e = person.email || person.mail
+    const wrappedEmail = e ? (' <' + e + '>') : ''
+    person = name + wrappedEmail + wrappedUrl
+  }
+  const matchedName = person.match(/^([^(<]+)/)
+  const matchedUrl = person.match(/\(([^()]+)\)/)
+  const matchedEmail = person.match(/<([^<>]+)>/)
+  const parsed = {}
+  if (matchedName?.[0].trim()) {
+    parsed.name = matchedName[0].trim()
+  }
+  if (matchedEmail) {
+    parsed.email = matchedEmail[1]
+  }
+  if (matchedUrl) {
+    parsed.url = matchedUrl[1]
+  }
+  return parsed
+}
+
+function normalizeData (data, changes) {
+  // fixDescriptionField
+  if (data.description && typeof data.description !== 'string') {
+    changes?.push(`'description' field should be a string`)
+    delete data.description
+  }
+  if (data.readme && !data.description && data.readme !== 'ERROR: No README data found!') {
+    data.description = extractDescription(data.readme)
+  }
+  if (data.description === undefined) {
+    delete data.description
+  }
+  if (!data.description) {
+    changes?.push('No description')
+  }
+
+  // fixModulesField
+  if (data.modules) {
+    changes?.push(`modules field is deprecated`)
+    delete data.modules
+  }
+
+  // fixFilesField
+  const files = data.files
+  if (files && !Array.isArray(files)) {
+    changes?.push(`Invalid 'files' member`)
+    delete data.files
+  } else if (data.files) {
+    data.files = data.files.filter(function (file) {
+      if (!file || typeof file !== 'string') {
+        changes?.push(`Invalid filename in 'files' list: ${file}`)
+        return false
+      } else {
+        return true
+      }
+    })
+  }
+
+  // fixManField
+  if (data.man && typeof data.man === 'string') {
+    data.man = [data.man]
+  }
+
+  // fixBugsField
+  if (!data.bugs && data.repository?.url) {
+    const hosted = hostedGitInfo.fromUrl(data.repository.url)
+    if (hosted && hosted.bugs()) {
+      data.bugs = { url: hosted.bugs() }
+    }
+  } else if (data.bugs) {
+    if (typeof data.bugs === 'string') {
+      if (isEmail(data.bugs)) {
+        data.bugs = { email: data.bugs }
+        /* eslint-disable-next-line node/no-deprecated-api */
+      } else if (url.parse(data.bugs).protocol) {
+        data.bugs = { url: data.bugs }
+      } else {
+        changes?.push(`Bug string field must be url, email, or {email,url}`)
+      }
+    } else {
+      for (const k in data.bugs) {
+        if (['web', 'name'].includes(k)) {
+          changes?.push(`bugs['${k}'] should probably be bugs['url'].`)
+          data.bugs.url = data.bugs[k]
+          delete data.bugs[k]
+        }
+      }
+      const oldBugs = data.bugs
+      data.bugs = {}
+      if (oldBugs.url) {
+        /* eslint-disable-next-line node/no-deprecated-api */
+        if (typeof (oldBugs.url) === 'string' && url.parse(oldBugs.url).protocol) {
+          data.bugs.url = oldBugs.url
+        } else {
+          changes?.push('bugs.url field must be a string url. Deleted.')
+        }
+      }
+      if (oldBugs.email) {
+        if (typeof (oldBugs.email) === 'string' && isEmail(oldBugs.email)) {
+          data.bugs.email = oldBugs.email
+        } else {
+          changes?.push('bugs.email field must be a string email. Deleted.')
+        }
+      }
+    }
+    if (!data.bugs.email && !data.bugs.url) {
+      delete data.bugs
+      changes?.push('Normalized value of bugs field is an empty object. Deleted.')
+    }
+  }
+  // fixKeywordsField
+  if (typeof data.keywords === 'string') {
+    data.keywords = data.keywords.split(/,\s+/)
+  }
+  if (data.keywords && !Array.isArray(data.keywords)) {
+    delete data.keywords
+    changes?.push(`keywords should be an array of strings`)
+  } else if (data.keywords) {
+    data.keywords = data.keywords.filter(function (kw) {
+      if (typeof kw !== 'string' || !kw) {
+        changes?.push(`keywords should be an array of strings`)
+        return false
+      } else {
+        return true
+      }
+    })
+  }
+  // fixBundleDependenciesField
+  const bdd = 'bundledDependencies'
+  const bd = 'bundleDependencies'
+  if (data[bdd] && !data[bd]) {
+    data[bd] = data[bdd]
+    delete data[bdd]
+  }
+  if (data[bd] && !Array.isArray(data[bd])) {
+    changes?.push(`Invalid 'bundleDependencies' list. Must be array of package names`)
+    delete data[bd]
+  } else if (data[bd]) {
+    data[bd] = data[bd].filter(function (filtered) {
+      if (!filtered || typeof filtered !== 'string') {
+        changes?.push(`Invalid bundleDependencies member: ${filtered}`)
+        return false
+      } else {
+        if (!data.dependencies) {
+          data.dependencies = {}
+        }
+        if (!Object.prototype.hasOwnProperty.call(data.dependencies, filtered)) {
+          changes?.push(`Non-dependency in bundleDependencies: ${filtered}`)
+          data.dependencies[filtered] = '*'
+        }
+        return true
+      }
+    })
+  }
+  // fixHomepageField
+  if (!data.homepage && data.repository && data.repository.url) {
+    const hosted = hostedGitInfo.fromUrl(data.repository.url)
+    if (hosted) {
+      data.homepage = hosted.docs()
+    }
+  }
+  if (data.homepage) {
+    if (typeof data.homepage !== 'string') {
+      changes?.push('homepage field must be a string url. Deleted.')
+      delete data.homepage
+    } else {
+      /* eslint-disable-next-line node/no-deprecated-api */
+      if (!url.parse(data.homepage).protocol) {
+        data.homepage = 'http://' + data.homepage
+      }
+    }
+  }
+  // fixReadmeField
+  if (!data.readme) {
+    changes?.push('No README data')
+    data.readme = 'ERROR: No README data found!'
+  }
+  // fixLicenseField
+  const license = data.license || data.licence
+  if (!license) {
+    changes?.push('No license field.')
+  } else if (typeof (license) !== 'string' || license.length < 1 || license.trim() === '') {
+    changes?.push('license should be a valid SPDX license expression')
+  } else if (!validateLicense(license).validForNewPackages) {
+    changes?.push('license should be a valid SPDX license expression')
+  }
+  // fixPeople
+  if (data.author) {
+    data.author = stringifyPerson(data.author)
+  }
+  ['maintainers', 'contributors'].forEach(function (set) {
+    if (!Array.isArray(data[set])) {
+      return
+    }
+    data[set] = data[set].map(stringifyPerson)
+  })
+  // fixTypos
+  for (const d in typos) {
+    if (Object.prototype.hasOwnProperty.call(data, d)) {
+      changes?.push(`${d} should probably be ${typos[d]}.`)
+    }
+  }
+}
+
+module.exports = { normalizeData }
diff --git a/node_modules/pacote/node_modules/@npmcli/package-json/lib/normalize.js b/node_modules/pacote/node_modules/@npmcli/package-json/lib/normalize.js
new file mode 100644
index 0000000000000..845f6753a9a00
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/package-json/lib/normalize.js
@@ -0,0 +1,601 @@
+const valid = require('semver/functions/valid')
+const clean = require('semver/functions/clean')
+const fs = require('node:fs/promises')
+const path = require('node:path')
+const { log } = require('proc-log')
+const moduleBuiltin = require('node:module')
+
+/**
+ * @type {import('hosted-git-info')}
+ */
+let _hostedGitInfo
+function lazyHostedGitInfo () {
+  if (!_hostedGitInfo) {
+    _hostedGitInfo = require('hosted-git-info')
+  }
+  return _hostedGitInfo
+}
+
+/**
+ * @type {import('glob').glob}
+ */
+let _glob
+function lazyLoadGlob () {
+  if (!_glob) {
+    _glob = require('glob').glob
+  }
+  return _glob
+}
+
+// used to be npm-normalize-package-bin
+function normalizePackageBin (pkg, changes) {
+  if (pkg.bin) {
+    if (typeof pkg.bin === 'string' && pkg.name) {
+      changes?.push('"bin" was converted to an object')
+      pkg.bin = { [pkg.name]: pkg.bin }
+    } else if (Array.isArray(pkg.bin)) {
+      changes?.push('"bin" was converted to an object')
+      pkg.bin = pkg.bin.reduce((acc, k) => {
+        acc[path.basename(k)] = k
+        return acc
+      }, {})
+    }
+    if (typeof pkg.bin === 'object') {
+      for (const binKey in pkg.bin) {
+        if (typeof pkg.bin[binKey] !== 'string') {
+          delete pkg.bin[binKey]
+          changes?.push(`removed invalid "bin[${binKey}]"`)
+          continue
+        }
+        const base = path.basename(secureAndUnixifyPath(binKey))
+        if (!base) {
+          delete pkg.bin[binKey]
+          changes?.push(`removed invalid "bin[${binKey}]"`)
+          continue
+        }
+
+        const binTarget = secureAndUnixifyPath(pkg.bin[binKey])
+
+        if (!binTarget) {
+          delete pkg.bin[binKey]
+          changes?.push(`removed invalid "bin[${binKey}]"`)
+          continue
+        }
+
+        if (base !== binKey) {
+          delete pkg.bin[binKey]
+          changes?.push(`"bin[${binKey}]" was renamed to "bin[${base}]"`)
+        }
+        if (binTarget !== pkg.bin[binKey]) {
+          changes?.push(`"bin[${base}]" script name was cleaned`)
+        }
+        pkg.bin[base] = binTarget
+      }
+
+      if (Object.keys(pkg.bin).length === 0) {
+        changes?.push('empty "bin" was removed')
+        delete pkg.bin
+      }
+
+      return pkg
+    }
+  }
+  delete pkg.bin
+}
+
+function normalizePackageMan (pkg, changes) {
+  if (pkg.man) {
+    const mans = []
+    for (const man of (Array.isArray(pkg.man) ? pkg.man : [pkg.man])) {
+      if (typeof man !== 'string') {
+        changes?.push(`removed invalid "man [${man}]"`)
+      } else {
+        mans.push(secureAndUnixifyPath(man))
+      }
+    }
+
+    if (!mans.length) {
+      changes?.push('empty "man" was removed')
+    } else {
+      pkg.man = mans
+      return pkg
+    }
+  }
+  delete pkg.man
+}
+
+function isCorrectlyEncodedName (spec) {
+  return !spec.match(/[/@\s+%:]/) &&
+    spec === encodeURIComponent(spec)
+}
+
+function isValidScopedPackageName (spec) {
+  if (spec.charAt(0) !== '@') {
+    return false
+  }
+
+  const rest = spec.slice(1).split('/')
+  if (rest.length !== 2) {
+    return false
+  }
+
+  return rest[0] && rest[1] &&
+    rest[0] === encodeURIComponent(rest[0]) &&
+    rest[1] === encodeURIComponent(rest[1])
+}
+
+function unixifyPath (ref) {
+  return ref.replace(/\\|:/g, '/')
+}
+
+function secureAndUnixifyPath (ref) {
+  const secured = unixifyPath(path.join('.', path.join('/', unixifyPath(ref))))
+  return secured.startsWith('./') ? '' : secured
+}
+
+// We don't want the `changes` array in here by default because this is a hot
+// path for parsing packuments during install.  So the calling method passes it
+// in if it wants to track changes.
+const normalize = async (pkg, { strict, steps, root, changes, allowLegacyCase }) => {
+  if (!pkg.content) {
+    throw new Error('Can not normalize without content')
+  }
+  const data = pkg.content
+  const scripts = data.scripts || {}
+  const pkgId = `${data.name ?? ''}@${data.version ?? ''}`
+
+  // name and version are load bearing so we have to clean them up first
+  if (steps.includes('fixName') || steps.includes('fixNameField') || steps.includes('normalizeData')) {
+    if (!data.name && !strict) {
+      changes?.push('Missing "name" field was set to an empty string')
+      data.name = ''
+    } else {
+      if (typeof data.name !== 'string') {
+        throw new Error('name field must be a string.')
+      }
+      if (!strict) {
+        const name = data.name.trim()
+        if (data.name !== name) {
+          changes?.push(`Whitespace was trimmed from "name"`)
+          data.name = name
+        }
+      }
+
+      if (data.name.startsWith('.') ||
+        !(isValidScopedPackageName(data.name) || isCorrectlyEncodedName(data.name)) ||
+        (strict && (!allowLegacyCase) && data.name !== data.name.toLowerCase()) ||
+        data.name.toLowerCase() === 'node_modules' ||
+        data.name.toLowerCase() === 'favicon.ico') {
+        throw new Error('Invalid name: ' + JSON.stringify(data.name))
+      }
+    }
+  }
+
+  if (steps.includes('fixName')) {
+    // Check for conflicts with builtin modules
+    if (moduleBuiltin.builtinModules.includes(data.name)) {
+      log.warn('package-json', pkgId, `Package name "${data.name}" conflicts with a Node.js built-in module name`)
+    }
+  }
+
+  if (steps.includes('fixVersionField') || steps.includes('normalizeData')) {
+    // allow "loose" semver 1.0 versions in non-strict mode
+    // enforce strict semver 2.0 compliance in strict mode
+    const loose = !strict
+    if (!data.version) {
+      data.version = ''
+    } else {
+      if (!valid(data.version, loose)) {
+        throw new Error(`Invalid version: "${data.version}"`)
+      }
+      const version = clean(data.version, loose)
+      if (version !== data.version) {
+        changes?.push(`"version" was cleaned and set to "${version}"`)
+        data.version = version
+      }
+    }
+  }
+  // remove attributes that start with "_"
+  if (steps.includes('_attributes')) {
+    for (const key in data) {
+      if (key.startsWith('_')) {
+        changes?.push(`"${key}" was removed`)
+        delete pkg.content[key]
+      }
+    }
+  }
+
+  // build the "_id" attribute
+  if (steps.includes('_id')) {
+    if (data.name && data.version) {
+      changes?.push(`"_id" was set to ${pkgId}`)
+      data._id = pkgId
+    }
+  }
+
+  // fix bundledDependencies typo
+  // normalize bundleDependencies
+  if (steps.includes('bundledDependencies')) {
+    if (data.bundleDependencies === undefined && data.bundledDependencies !== undefined) {
+      data.bundleDependencies = data.bundledDependencies
+    }
+    changes?.push(`Deleted incorrect "bundledDependencies"`)
+    delete data.bundledDependencies
+  }
+  // expand "bundleDependencies: true or translate from object"
+  if (steps.includes('bundleDependencies')) {
+    const bd = data.bundleDependencies
+    if (bd === false && !steps.includes('bundleDependenciesDeleteFalse')) {
+      changes?.push(`"bundleDependencies" was changed from "false" to "[]"`)
+      data.bundleDependencies = []
+    } else if (bd === true) {
+      changes?.push(`"bundleDependencies" was auto-populated from "dependencies"`)
+      data.bundleDependencies = Object.keys(data.dependencies || {})
+    } else if (bd && typeof bd === 'object') {
+      if (!Array.isArray(bd)) {
+        changes?.push(`"bundleDependencies" was changed from an object to an array`)
+        data.bundleDependencies = Object.keys(bd)
+      }
+    } else if ('bundleDependencies' in data) {
+      changes?.push(`"bundleDependencies" was removed`)
+      delete data.bundleDependencies
+    }
+  }
+
+  // it was once common practice to list deps both in optionalDependencies and
+  // in dependencies, to support npm versions that did not know about
+  // optionalDependencies.  This is no longer a relevant need, so duplicating
+  // the deps in two places is unnecessary and excessive.
+  if (steps.includes('optionalDedupe')) {
+    if (data.dependencies &&
+      data.optionalDependencies && typeof data.optionalDependencies === 'object') {
+      for (const name in data.optionalDependencies) {
+        changes?.push(`optionalDependencies."${name}" was removed`)
+        delete data.dependencies[name]
+      }
+      if (!Object.keys(data.dependencies).length) {
+        changes?.push(`Empty "optionalDependencies" was removed`)
+        delete data.dependencies
+      }
+    }
+  }
+
+  // add "install" attribute if any "*.gyp" files exist
+  if (steps.includes('gypfile')) {
+    if (!scripts.install && !scripts.preinstall && data.gypfile !== false) {
+      const files = await lazyLoadGlob()('*.gyp', { cwd: pkg.path })
+      if (files.length) {
+        scripts.install = 'node-gyp rebuild'
+        data.scripts = scripts
+        data.gypfile = true
+        changes?.push(`"scripts.install" was set to "node-gyp rebuild"`)
+        changes?.push(`"gypfile" was set to "true"`)
+      }
+    }
+  }
+
+  // add "start" attribute if "server.js" exists
+  if (steps.includes('serverjs') && !scripts.start) {
+    try {
+      await fs.access(path.join(pkg.path, 'server.js'))
+      scripts.start = 'node server.js'
+      data.scripts = scripts
+      changes?.push('"scripts.start" was set to "node server.js"')
+    } catch {
+      // do nothing
+    }
+  }
+
+  // strip "node_modules/.bin" from scripts entries
+  // remove invalid scripts entries (non-strings)
+  if ((steps.includes('scripts') || steps.includes('scriptpath')) && data.scripts !== undefined) {
+    const spre = /^(\.[/\\])?node_modules[/\\].bin[\\/]/
+    if (typeof data.scripts === 'object') {
+      for (const name in data.scripts) {
+        if (typeof data.scripts[name] !== 'string') {
+          delete data.scripts[name]
+          changes?.push(`Invalid scripts."${name}" was removed`)
+        } else if (steps.includes('scriptpath') && spre.test(data.scripts[name])) {
+          data.scripts[name] = data.scripts[name].replace(spre, '')
+          changes?.push(`scripts entry "${name}" was fixed to remove node_modules/.bin reference`)
+        }
+      }
+    } else {
+      changes?.push(`Removed invalid "scripts"`)
+      delete data.scripts
+    }
+  }
+
+  if (steps.includes('funding')) {
+    if (data.funding && typeof data.funding === 'string') {
+      data.funding = { url: data.funding }
+      changes?.push(`"funding" was changed to an object with a url attribute`)
+    }
+  }
+
+  // populate "authors" attribute
+  if (steps.includes('authors') && !data.contributors) {
+    try {
+      const authorData = await fs.readFile(path.join(pkg.path, 'AUTHORS'), 'utf8')
+      const authors = authorData.split(/\r?\n/g)
+        .map(line => line.replace(/^\s*#.*$/, '').trim())
+        .filter(line => line)
+      data.contributors = authors
+      changes?.push('"contributors" was auto-populated with the contents of the "AUTHORS" file')
+    } catch {
+      // do nothing
+    }
+  }
+
+  // populate "readme" attribute
+  if (steps.includes('readme') && !data.readme) {
+    const mdre = /\.m?a?r?k?d?o?w?n?$/i
+    const files = await lazyLoadGlob()('{README,README.*}', {
+      cwd: pkg.path,
+      nocase: true,
+      mark: true,
+    })
+    let readmeFile
+    for (const file of files) {
+      // don't accept directories.
+      if (!file.endsWith(path.sep)) {
+        if (file.match(mdre)) {
+          readmeFile = file
+          break
+        }
+        if (file.endsWith('README')) {
+          readmeFile = file
+        }
+      }
+    }
+    if (readmeFile) {
+      const readmeData = await fs.readFile(path.join(pkg.path, readmeFile), 'utf8')
+      data.readme = readmeData
+      data.readmeFilename = readmeFile
+      changes?.push(`"readme" was set to the contents of ${readmeFile}`)
+      changes?.push(`"readmeFilename" was set to ${readmeFile}`)
+    }
+    if (!data.readme) {
+      data.readme = 'ERROR: No README data found!'
+    }
+  }
+
+  // expand directories.man
+  if (steps.includes('mans')) {
+    if (data.directories?.man && !data.man) {
+      const manDir = secureAndUnixifyPath(data.directories.man)
+      const cwd = path.resolve(pkg.path, manDir)
+      const files = await lazyLoadGlob()('**/*.[0-9]', { cwd })
+      data.man = files.map(man =>
+        path.relative(pkg.path, path.join(cwd, man)).split(path.sep).join('/')
+      )
+    }
+    normalizePackageMan(data, changes)
+  }
+
+  if (steps.includes('bin') || steps.includes('binDir') || steps.includes('binRefs')) {
+    normalizePackageBin(data, changes)
+  }
+
+  // expand "directories.bin"
+  if (steps.includes('binDir') && data.directories?.bin && !data.bin) {
+    const binsDir = path.resolve(pkg.path, secureAndUnixifyPath(data.directories.bin))
+    const bins = await lazyLoadGlob()('**', { cwd: binsDir })
+    data.bin = bins.reduce((acc, binFile) => {
+      if (binFile && !binFile.startsWith('.')) {
+        const binName = path.basename(binFile)
+        acc[binName] = path.join(data.directories.bin, binFile)
+      }
+      return acc
+    }, {})
+    // *sigh*
+    normalizePackageBin(data, changes)
+  }
+
+  // populate "gitHead" attribute
+  if (steps.includes('gitHead') && !data.gitHead) {
+    const git = require('@npmcli/git')
+    const gitRoot = await git.find({ cwd: pkg.path, root })
+    let head
+    if (gitRoot) {
+      try {
+        head = await fs.readFile(path.resolve(gitRoot, '.git/HEAD'), 'utf8')
+      } catch (err) {
+      // do nothing
+      }
+    }
+    let headData
+    if (head) {
+      if (head.startsWith('ref: ')) {
+        const headRef = head.replace(/^ref: /, '').trim()
+        const headFile = path.resolve(gitRoot, '.git', headRef)
+        try {
+          headData = await fs.readFile(headFile, 'utf8')
+          headData = headData.replace(/^ref: /, '').trim()
+        } catch (err) {
+          // do nothing
+        }
+        if (!headData) {
+          const packFile = path.resolve(gitRoot, '.git/packed-refs')
+          try {
+            let refs = await fs.readFile(packFile, 'utf8')
+            if (refs) {
+              refs = refs.split('\n')
+              for (let i = 0; i < refs.length; i++) {
+                const match = refs[i].match(/^([0-9a-f]{40}) (.+)$/)
+                if (match && match[2].trim() === headRef) {
+                  headData = match[1]
+                  break
+                }
+              }
+            }
+          } catch {
+            // do nothing
+          }
+        }
+      } else {
+        headData = head.trim()
+      }
+    }
+    if (headData) {
+      data.gitHead = headData
+    }
+  }
+
+  // populate "types" attribute
+  if (steps.includes('fillTypes')) {
+    const index = data.main || 'index.js'
+
+    if (typeof index !== 'string') {
+      throw new TypeError('The "main" attribute must be of type string.')
+    }
+
+    // TODO exports is much more complicated than this in verbose format
+    // We need to support for instance
+
+    // "exports": {
+    //   ".": [
+    //     {
+    //       "default": "./lib/npm.js"
+    //     },
+    //     "./lib/npm.js"
+    //   ],
+    //   "./package.json": "./package.json"
+    // },
+    // as well as conditional exports
+
+    // if (data.exports && typeof data.exports === 'string') {
+    //   index = data.exports
+    // }
+
+    // if (data.exports && data.exports['.']) {
+    //   index = data.exports['.']
+    //   if (typeof index !== 'string') {
+    //   }
+    // }
+    const extless = path.join(path.dirname(index), path.basename(index, path.extname(index)))
+    const dts = `./${extless}.d.ts`
+    const hasDTSFields = 'types' in data || 'typings' in data
+    if (!hasDTSFields) {
+      try {
+        await fs.access(path.join(pkg.path, dts))
+        data.types = dts.split(path.sep).join('/')
+      } catch {
+        // do nothing
+      }
+    }
+  }
+
+  // "normalizeData" from "read-package-json", which was just a call through to
+  // "normalize-package-data".  We only call the "fixer" functions because
+  // outside of that it was also clobbering _id (which we already conditionally
+  // do) and also adding the gypfile script (which we also already
+  // conditionally do)
+
+  // Some steps are isolated so we can do a limited subset of these in `fix`
+  if (steps.includes('fixRepositoryField') || steps.includes('normalizeData')) {
+    if (data.repositories) {
+      changes?.push(`"repository" was set to the first entry in "repositories" (${data.repository})`)
+      data.repository = data.repositories[0]
+    }
+    if (data.repository) {
+      if (typeof data.repository === 'string') {
+        changes?.push('"repository" was changed from a string to an object')
+        data.repository = {
+          type: 'git',
+          url: data.repository,
+        }
+      }
+      if (data.repository.url) {
+        const hosted = lazyHostedGitInfo().fromUrl(data.repository.url)
+        let r
+        if (hosted) {
+          if (hosted.getDefaultRepresentation() === 'shortcut') {
+            r = hosted.https()
+          } else {
+            r = hosted.toString()
+          }
+          if (r !== data.repository.url) {
+            changes?.push(`"repository.url" was normalized to "${r}"`)
+            data.repository.url = r
+          }
+        }
+      }
+    }
+  }
+
+  if (steps.includes('fixDependencies') || steps.includes('normalizeData')) {
+    // peerDependencies?
+    // devDependencies is meaningless here, it's ignored on an installed package
+    for (const type of ['dependencies', 'devDependencies', 'optionalDependencies']) {
+      if (data[type]) {
+        let secondWarning = true
+        if (typeof data[type] === 'string') {
+          changes?.push(`"${type}" was converted from a string into an object`)
+          data[type] = data[type].trim().split(/[\n\r\s\t ,]+/)
+          secondWarning = false
+        }
+        if (Array.isArray(data[type])) {
+          if (secondWarning) {
+            changes?.push(`"${type}" was converted from an array into an object`)
+          }
+          const o = {}
+          for (const d of data[type]) {
+            if (typeof d === 'string') {
+              const dep = d.trim().split(/(:?[@\s><=])/)
+              const dn = dep.shift()
+              const dv = dep.join('').replace(/^@/, '').trim()
+              o[dn] = dv
+            }
+          }
+          data[type] = o
+        }
+      }
+    }
+    // normalize-package-data used to put optional dependencies BACK into
+    // dependencies here, we no longer do this
+
+    for (const deps of ['dependencies', 'devDependencies']) {
+      if (deps in data) {
+        if (!data[deps] || typeof data[deps] !== 'object') {
+          changes?.push(`Removed invalid "${deps}"`)
+          delete data[deps]
+        } else {
+          for (const d in data[deps]) {
+            const r = data[deps][d]
+            if (typeof r !== 'string') {
+              changes?.push(`Removed invalid "${deps}.${d}"`)
+              delete data[deps][d]
+            }
+            const hosted = lazyHostedGitInfo().fromUrl(data[deps][d])?.toString()
+            if (hosted && hosted !== data[deps][d]) {
+              changes?.push(`Normalized git reference to "${deps}.${d}"`)
+              data[deps][d] = hosted.toString()
+            }
+          }
+        }
+      }
+    }
+  }
+
+  // TODO some of this is duplicated in other steps here, a future breaking change may be able to remove the duplicates involved in this step
+  if (steps.includes('normalizeData')) {
+    const { normalizeData } = require('./normalize-data.js')
+    normalizeData(data, changes)
+  }
+
+  // Warn if the bin references don't point to anything.  This might be better
+  // in normalize-package-data if it had access to the file path.
+  if (steps.includes('binRefs') && data.bin instanceof Object) {
+    for (const key in data.bin) {
+      try {
+        await fs.access(path.resolve(pkg.path, data.bin[key]))
+      } catch {
+        log.warn('package-json', pkgId, `No bin file found at ${data.bin[key]}`)
+        // XXX: should a future breaking change delete bin entries that cannot be accessed?
+      }
+    }
+  }
+}
+
+module.exports = normalize
diff --git a/node_modules/pacote/node_modules/@npmcli/package-json/lib/read-package.js b/node_modules/pacote/node_modules/@npmcli/package-json/lib/read-package.js
new file mode 100644
index 0000000000000..d6c86ce388e6c
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/package-json/lib/read-package.js
@@ -0,0 +1,39 @@
+// This is JUST the code needed to open a package.json file and parse it.
+// It's isolated out so that code needing to parse a package.json file can do so in the same way as this module does, without needing to require the whole module, or needing to require the underlying parsing library.
+
+const { readFile } = require('fs/promises')
+const parseJSON = require('json-parse-even-better-errors')
+
+async function read (filename) {
+  try {
+    const data = await readFile(filename, 'utf8')
+    return data
+  } catch (err) {
+    err.message = `Could not read package.json: ${err}`
+    throw err
+  }
+}
+
+function parse (data) {
+  try {
+    const content = parseJSON(data)
+    return content
+  } catch (err) {
+    err.message = `Invalid package.json: ${err}`
+    throw err
+  }
+}
+
+// This is what most external libs will use.
+// PackageJson will call read and parse separately
+async function readPackage (filename) {
+  const data = await read(filename)
+  const content = parse(data)
+  return content
+}
+
+module.exports = {
+  read,
+  parse,
+  readPackage,
+}
diff --git a/node_modules/pacote/node_modules/@npmcli/package-json/lib/sort.js b/node_modules/pacote/node_modules/@npmcli/package-json/lib/sort.js
new file mode 100644
index 0000000000000..0bd0d5199da58
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/package-json/lib/sort.js
@@ -0,0 +1,101 @@
+/**
+ * arbitrary sort order for package.json largely pulled from:
+ * https://github.com/keithamus/sort-package-json/blob/main/defaultRules.md
+ *
+ * cross checked with:
+ * https://github.com/npm/types/blob/main/types/index.d.ts#L104
+ * https://docs.npmjs.com/cli/configuring-npm/package-json
+ */
+function packageSort (json) {
+  const {
+    name,
+    version,
+    private: isPrivate,
+    description,
+    keywords,
+    homepage,
+    bugs,
+    repository,
+    funding,
+    license,
+    author,
+    maintainers,
+    contributors,
+    type,
+    imports,
+    exports,
+    main,
+    browser,
+    types,
+    bin,
+    man,
+    directories,
+    files,
+    workspaces,
+    scripts,
+    config,
+    dependencies,
+    devDependencies,
+    peerDependencies,
+    peerDependenciesMeta,
+    optionalDependencies,
+    bundledDependencies,
+    bundleDependencies,
+    engines,
+    os,
+    cpu,
+    publishConfig,
+    devEngines,
+    licenses,
+    overrides,
+    ...rest
+  } = json
+
+  return {
+    ...(typeof name !== 'undefined' ? { name } : {}),
+    ...(typeof version !== 'undefined' ? { version } : {}),
+    ...(typeof isPrivate !== 'undefined' ? { private: isPrivate } : {}),
+    ...(typeof description !== 'undefined' ? { description } : {}),
+    ...(typeof keywords !== 'undefined' ? { keywords } : {}),
+    ...(typeof homepage !== 'undefined' ? { homepage } : {}),
+    ...(typeof bugs !== 'undefined' ? { bugs } : {}),
+    ...(typeof repository !== 'undefined' ? { repository } : {}),
+    ...(typeof funding !== 'undefined' ? { funding } : {}),
+    ...(typeof license !== 'undefined' ? { license } : {}),
+    ...(typeof author !== 'undefined' ? { author } : {}),
+    ...(typeof maintainers !== 'undefined' ? { maintainers } : {}),
+    ...(typeof contributors !== 'undefined' ? { contributors } : {}),
+    ...(typeof type !== 'undefined' ? { type } : {}),
+    ...(typeof imports !== 'undefined' ? { imports } : {}),
+    ...(typeof exports !== 'undefined' ? { exports } : {}),
+    ...(typeof main !== 'undefined' ? { main } : {}),
+    ...(typeof browser !== 'undefined' ? { browser } : {}),
+    ...(typeof types !== 'undefined' ? { types } : {}),
+    ...(typeof bin !== 'undefined' ? { bin } : {}),
+    ...(typeof man !== 'undefined' ? { man } : {}),
+    ...(typeof directories !== 'undefined' ? { directories } : {}),
+    ...(typeof files !== 'undefined' ? { files } : {}),
+    ...(typeof workspaces !== 'undefined' ? { workspaces } : {}),
+    ...(typeof scripts !== 'undefined' ? { scripts } : {}),
+    ...(typeof config !== 'undefined' ? { config } : {}),
+    ...(typeof dependencies !== 'undefined' ? { dependencies } : {}),
+    ...(typeof devDependencies !== 'undefined' ? { devDependencies } : {}),
+    ...(typeof peerDependencies !== 'undefined' ? { peerDependencies } : {}),
+    ...(typeof peerDependenciesMeta !== 'undefined' ? { peerDependenciesMeta } : {}),
+    ...(typeof optionalDependencies !== 'undefined' ? { optionalDependencies } : {}),
+    ...(typeof bundledDependencies !== 'undefined' ? { bundledDependencies } : {}),
+    ...(typeof bundleDependencies !== 'undefined' ? { bundleDependencies } : {}),
+    ...(typeof engines !== 'undefined' ? { engines } : {}),
+    ...(typeof os !== 'undefined' ? { os } : {}),
+    ...(typeof cpu !== 'undefined' ? { cpu } : {}),
+    ...(typeof publishConfig !== 'undefined' ? { publishConfig } : {}),
+    ...(typeof devEngines !== 'undefined' ? { devEngines } : {}),
+    ...(typeof licenses !== 'undefined' ? { licenses } : {}),
+    ...(typeof overrides !== 'undefined' ? { overrides } : {}),
+    ...rest,
+  }
+}
+
+module.exports = {
+  packageSort,
+}
diff --git a/node_modules/pacote/node_modules/@npmcli/package-json/lib/update-dependencies.js b/node_modules/pacote/node_modules/@npmcli/package-json/lib/update-dependencies.js
new file mode 100644
index 0000000000000..7259949ab661d
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/package-json/lib/update-dependencies.js
@@ -0,0 +1,75 @@
+const depTypes = new Set([
+  'dependencies',
+  'optionalDependencies',
+  'devDependencies',
+  'peerDependencies',
+])
+
+// sort alphabetically all types of deps for a given package
+const orderDeps = (content) => {
+  for (const type of depTypes) {
+    if (content && content[type]) {
+      content[type] = Object.keys(content[type])
+        .sort((a, b) => a.localeCompare(b, 'en'))
+        .reduce((res, key) => {
+          res[key] = content[type][key]
+          return res
+        }, {})
+    }
+  }
+  return content
+}
+
+const updateDependencies = ({ content, originalContent }) => {
+  const pkg = orderDeps({
+    ...content,
+  })
+
+  // optionalDependencies don't need to be repeated in two places
+  if (pkg.dependencies) {
+    if (pkg.optionalDependencies) {
+      for (const name of Object.keys(pkg.optionalDependencies)) {
+        delete pkg.dependencies[name]
+      }
+    }
+  }
+
+  const result = { ...originalContent }
+
+  // loop through all types of dependencies and update package json pkg
+  for (const type of depTypes) {
+    if (pkg[type]) {
+      result[type] = pkg[type]
+    }
+
+    // prune empty type props from resulting object
+    const emptyDepType =
+      pkg[type]
+      && typeof pkg === 'object'
+      && Object.keys(pkg[type]).length === 0
+    if (emptyDepType) {
+      delete result[type]
+    }
+  }
+
+  // if original package.json had dep in peerDeps AND deps, preserve that.
+  const { dependencies: origProd, peerDependencies: origPeer } =
+    originalContent || {}
+  const { peerDependencies: newPeer } = result
+  if (origProd && origPeer && newPeer) {
+    // we have original prod/peer deps, and new peer deps
+    // copy over any that were in both in the original
+    for (const name of Object.keys(origPeer)) {
+      if (origProd[name] !== undefined && newPeer[name] !== undefined) {
+        result.dependencies = result.dependencies || {}
+        result.dependencies[name] = newPeer[name]
+      }
+    }
+  }
+
+  return result
+}
+
+updateDependencies.knownKeys = depTypes
+
+module.exports = updateDependencies
diff --git a/node_modules/pacote/node_modules/@npmcli/package-json/lib/update-scripts.js b/node_modules/pacote/node_modules/@npmcli/package-json/lib/update-scripts.js
new file mode 100644
index 0000000000000..30495e54cc3c7
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/package-json/lib/update-scripts.js
@@ -0,0 +1,29 @@
+const updateScripts = ({ content, originalContent = {} }) => {
+  const newScripts = content.scripts
+
+  if (!newScripts) {
+    return originalContent
+  }
+
+  // validate scripts content being appended
+  const hasInvalidScripts = () =>
+    Object.entries(newScripts)
+      .some(([key, value]) =>
+        typeof key !== 'string' || typeof value !== 'string')
+  if (hasInvalidScripts()) {
+    throw Object.assign(
+      new TypeError(
+        'package.json scripts should be a key-value pair of strings.'),
+      { code: 'ESCRIPTSINVALID' }
+    )
+  }
+
+  return {
+    ...originalContent,
+    scripts: {
+      ...newScripts,
+    },
+  }
+}
+
+module.exports = updateScripts
diff --git a/node_modules/pacote/node_modules/@npmcli/package-json/lib/update-workspaces.js b/node_modules/pacote/node_modules/@npmcli/package-json/lib/update-workspaces.js
new file mode 100644
index 0000000000000..04bf63230636f
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/package-json/lib/update-workspaces.js
@@ -0,0 +1,26 @@
+const updateWorkspaces = ({ content, originalContent = {} }) => {
+  const newWorkspaces = content.workspaces
+
+  if (!newWorkspaces) {
+    return originalContent
+  }
+
+  // validate workspaces content being appended
+  const hasInvalidWorkspaces = () =>
+    newWorkspaces.some(w => !(typeof w === 'string'))
+  if (!newWorkspaces.length || hasInvalidWorkspaces()) {
+    throw Object.assign(
+      new TypeError('workspaces should be an array of strings.'),
+      { code: 'EWORKSPACESINVALID' }
+    )
+  }
+
+  return {
+    ...originalContent,
+    workspaces: [
+      ...newWorkspaces,
+    ],
+  }
+}
+
+module.exports = updateWorkspaces
diff --git a/node_modules/pacote/node_modules/@npmcli/package-json/package.json b/node_modules/pacote/node_modules/@npmcli/package-json/package.json
new file mode 100644
index 0000000000000..263d67ff3bc5b
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/package-json/package.json
@@ -0,0 +1,61 @@
+{
+  "name": "@npmcli/package-json",
+  "version": "6.2.0",
+  "description": "Programmatic API to update package.json",
+  "keywords": [
+    "npm",
+    "oss"
+  ],
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/npm/package-json.git"
+  },
+  "license": "ISC",
+  "author": "GitHub Inc.",
+  "main": "lib/index.js",
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "scripts": {
+    "snap": "tap",
+    "test": "tap",
+    "lint": "npm run eslint",
+    "lintfix": "npm run eslint -- --fix",
+    "posttest": "npm run lint",
+    "postsnap": "npm run lintfix --",
+    "postlint": "template-oss-check",
+    "template-oss-apply": "template-oss-apply --force",
+    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
+  },
+  "dependencies": {
+    "@npmcli/git": "^6.0.0",
+    "glob": "^10.2.2",
+    "hosted-git-info": "^8.0.0",
+    "json-parse-even-better-errors": "^4.0.0",
+    "proc-log": "^5.0.0",
+    "semver": "^7.5.3",
+    "validate-npm-package-license": "^3.0.4"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^5.1.0",
+    "@npmcli/template-oss": "4.23.6",
+    "read-package-json": "^7.0.0",
+    "read-package-json-fast": "^4.0.0",
+    "tap": "^16.0.1"
+  },
+  "engines": {
+    "node": "^18.17.0 || >=20.5.0"
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.23.6",
+    "publish": "true"
+  },
+  "tap": {
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  }
+}
diff --git a/package-lock.json b/package-lock.json
index a4c6653add2d8..342e81eff6233 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -89,7 +89,7 @@
         "@npmcli/config": "^10.4.0",
         "@npmcli/fs": "^4.0.0",
         "@npmcli/map-workspaces": "^4.0.2",
-        "@npmcli/package-json": "^6.2.0",
+        "@npmcli/package-json": "^7.0.1",
         "@npmcli/promise-spawn": "^8.0.2",
         "@npmcli/redact": "^3.2.2",
         "@npmcli/run-script": "^9.1.0",
@@ -3003,7 +3003,7 @@
       "version": "4.0.1",
       "resolved": "https://registry.npmjs.org/@isaacs/balanced-match/-/balanced-match-4.0.1.tgz",
       "integrity": "sha512-yzMTt9lEb8Gv7zRioUilSglI0c0smZ9k5D65677DLWLtWJaXIS3CqcGyUFByYKlnUj6TkjLVs54fBl6+TiGQDQ==",
-      "dev": true,
+      "inBundle": true,
       "license": "MIT",
       "engines": {
         "node": "20 || >=22"
@@ -3013,7 +3013,7 @@
       "version": "5.0.0",
       "resolved": "https://registry.npmjs.org/@isaacs/brace-expansion/-/brace-expansion-5.0.0.tgz",
       "integrity": "sha512-ZT55BDLV0yv0RBm2czMiZ+SqCGO7AvmOM3G/w2xhVPH+te0aKgFjmBvGlL1dH+ql2tgGO3MVrbb3jCKyvpgnxA==",
-      "dev": true,
+      "inBundle": true,
       "license": "MIT",
       "dependencies": {
         "@isaacs/balanced-match": "^4.0.1"
@@ -3472,6 +3472,25 @@
         "node": "^18.17.0 || >=20.5.0"
       }
     },
+    "node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json": {
+      "version": "6.2.0",
+      "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-6.2.0.tgz",
+      "integrity": "sha512-rCNLSB/JzNvot0SEyXqWZ7tX2B5dD2a1br2Dp0vSYVo5jh8Z0EZ7lS9TsZ1UtziddB1UfNUaMCc538/HztnJGA==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "@npmcli/git": "^6.0.0",
+        "glob": "^10.2.2",
+        "hosted-git-info": "^8.0.0",
+        "json-parse-even-better-errors": "^4.0.0",
+        "proc-log": "^5.0.0",
+        "semver": "^7.5.3",
+        "validate-npm-package-license": "^3.0.4"
+      },
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
     "node_modules/@npmcli/metavuln-calculator": {
       "version": "9.0.1",
       "resolved": "https://registry.npmjs.org/@npmcli/metavuln-calculator/-/metavuln-calculator-9.0.1.tgz",
@@ -3517,22 +3536,170 @@
       }
     },
     "node_modules/@npmcli/package-json": {
-      "version": "6.2.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-6.2.0.tgz",
-      "integrity": "sha512-rCNLSB/JzNvot0SEyXqWZ7tX2B5dD2a1br2Dp0vSYVo5jh8Z0EZ7lS9TsZ1UtziddB1UfNUaMCc538/HztnJGA==",
+      "version": "7.0.1",
+      "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-7.0.1.tgz",
+      "integrity": "sha512-956YUeI0YITbk2+KnirCkD19HLzES0habV+Els+dyZaVsaM6VGSiNwnRu6t3CZaqDLz4KXy2zx+0N/Zy6YjlAA==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
-        "@npmcli/git": "^6.0.0",
-        "glob": "^10.2.2",
-        "hosted-git-info": "^8.0.0",
+        "@npmcli/git": "^7.0.0",
+        "glob": "^11.0.3",
+        "hosted-git-info": "^9.0.0",
         "json-parse-even-better-errors": "^4.0.0",
         "proc-log": "^5.0.0",
         "semver": "^7.5.3",
         "validate-npm-package-license": "^3.0.4"
       },
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/@npmcli/package-json/node_modules/@npmcli/git": {
+      "version": "7.0.0",
+      "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-7.0.0.tgz",
+      "integrity": "sha512-vnz7BVGtOctJAIHouCJdvWBhsTVSICMeUgZo2c7XAi5d5Rrl80S1H7oPym7K03cRuinK5Q6s2dw36+PgXQTcMA==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "@npmcli/promise-spawn": "^8.0.0",
+        "ini": "^5.0.0",
+        "lru-cache": "^11.2.1",
+        "npm-pick-manifest": "^11.0.1",
+        "proc-log": "^5.0.0",
+        "promise-retry": "^2.0.1",
+        "semver": "^7.3.5",
+        "which": "^5.0.0"
+      },
+      "engines": {
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/@npmcli/package-json/node_modules/glob": {
+      "version": "11.0.3",
+      "resolved": "https://registry.npmjs.org/glob/-/glob-11.0.3.tgz",
+      "integrity": "sha512-2Nim7dha1KVkaiF4q6Dj+ngPPMdfvLJEOpZk/jKiUAkqKebpGAWQXAq9z1xu9HKu5lWfqw/FASuccEjyznjPaA==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "foreground-child": "^3.3.1",
+        "jackspeak": "^4.1.1",
+        "minimatch": "^10.0.3",
+        "minipass": "^7.1.2",
+        "package-json-from-dist": "^1.0.0",
+        "path-scurry": "^2.0.0"
+      },
+      "bin": {
+        "glob": "dist/esm/bin.mjs"
+      },
+      "engines": {
+        "node": "20 || >=22"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
+      }
+    },
+    "node_modules/@npmcli/package-json/node_modules/hosted-git-info": {
+      "version": "9.0.0",
+      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-9.0.0.tgz",
+      "integrity": "sha512-gEf705MZLrDPkbbhi8PnoO4ZwYgKoNL+ISZ3AjZMht2r3N5tuTwncyDi6Fv2/qDnMmZxgs0yI8WDOyR8q3G+SQ==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "lru-cache": "^11.1.0"
+      },
+      "engines": {
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/@npmcli/package-json/node_modules/jackspeak": {
+      "version": "4.1.1",
+      "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-4.1.1.tgz",
+      "integrity": "sha512-zptv57P3GpL+O0I7VdMJNBZCu+BPHVQUk55Ft8/QCJjTVxrnJHuVuX/0Bl2A6/+2oyR/ZMEuFKwmzqqZ/U5nPQ==",
+      "inBundle": true,
+      "license": "BlueOak-1.0.0",
+      "dependencies": {
+        "@isaacs/cliui": "^8.0.2"
+      },
+      "engines": {
+        "node": "20 || >=22"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
+      }
+    },
+    "node_modules/@npmcli/package-json/node_modules/lru-cache": {
+      "version": "11.2.1",
+      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.1.tgz",
+      "integrity": "sha512-r8LA6i4LP4EeWOhqBaZZjDWwehd1xUJPCJd9Sv300H0ZmcUER4+JPh7bqqZeqs1o5pgtgvXm+d9UGrB5zZGDiQ==",
+      "inBundle": true,
+      "license": "ISC",
+      "engines": {
+        "node": "20 || >=22"
+      }
+    },
+    "node_modules/@npmcli/package-json/node_modules/minimatch": {
+      "version": "10.0.3",
+      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.0.3.tgz",
+      "integrity": "sha512-IPZ167aShDZZUMdRk66cyQAW3qr0WzbHkPdMYa8bzZhlHhO3jALbKdxcaak7W9FfT2rZNpQuUu4Od7ILEpXSaw==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "@isaacs/brace-expansion": "^5.0.0"
+      },
+      "engines": {
+        "node": "20 || >=22"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
+      }
+    },
+    "node_modules/@npmcli/package-json/node_modules/npm-package-arg": {
+      "version": "13.0.0",
+      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-13.0.0.tgz",
+      "integrity": "sha512-+t2etZAGcB7TbbLHfDwooV9ppB2LhhcT6A+L9cahsf9mEUAoQ6CktLEVvEnpD0N5CkX7zJqnPGaFtoQDy9EkHQ==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "hosted-git-info": "^9.0.0",
+        "proc-log": "^5.0.0",
+        "semver": "^7.3.5",
+        "validate-npm-package-name": "^6.0.0"
+      },
+      "engines": {
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/@npmcli/package-json/node_modules/npm-pick-manifest": {
+      "version": "11.0.1",
+      "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-11.0.1.tgz",
+      "integrity": "sha512-HnU7FYSWbo7dTVHtK0G+BXbZ0aIfxz/aUCVLN0979Ec6rGUX5cJ6RbgVx5fqb5G31ufz+BVFA7y1SkRTPVNoVQ==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "npm-install-checks": "^7.1.0",
+        "npm-normalize-package-bin": "^4.0.0",
+        "npm-package-arg": "^13.0.0",
+        "semver": "^7.3.5"
+      },
+      "engines": {
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/@npmcli/package-json/node_modules/path-scurry": {
+      "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-2.0.0.tgz",
+      "integrity": "sha512-ypGJsmGtdXUOeM5u93TyeIEfEhM6s+ljAhrk5vAvSx8uyY/02OvrZnA0YNGUrPXfpJMgI1ODd3nwz8Npx4O4cg==",
+      "inBundle": true,
+      "license": "BlueOak-1.0.0",
+      "dependencies": {
+        "lru-cache": "^11.0.0",
+        "minipass": "^7.1.2"
+      },
+      "engines": {
+        "node": "20 || >=22"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
       }
     },
     "node_modules/@npmcli/promise-spawn": {
@@ -3588,6 +3755,25 @@
         "node": "^18.17.0 || >=20.5.0"
       }
     },
+    "node_modules/@npmcli/run-script/node_modules/@npmcli/package-json": {
+      "version": "6.2.0",
+      "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-6.2.0.tgz",
+      "integrity": "sha512-rCNLSB/JzNvot0SEyXqWZ7tX2B5dD2a1br2Dp0vSYVo5jh8Z0EZ7lS9TsZ1UtziddB1UfNUaMCc538/HztnJGA==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "@npmcli/git": "^6.0.0",
+        "glob": "^10.2.2",
+        "hosted-git-info": "^8.0.0",
+        "json-parse-even-better-errors": "^4.0.0",
+        "proc-log": "^5.0.0",
+        "semver": "^7.5.3",
+        "validate-npm-package-license": "^3.0.4"
+      },
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
     "node_modules/@npmcli/smoke-tests": {
       "resolved": "smoke-tests",
       "link": true
@@ -3708,6 +3894,25 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist/node_modules/@npmcli/package-json": {
+      "version": "5.2.1",
+      "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-5.2.1.tgz",
+      "integrity": "sha512-f7zYC6kQautXHvNbLEWgD/uGu1+xCn9izgqBfgItWSx22U0ZDekxN08A1vM8cTxj/cRVe0Q94Ode+tdoYmIOOQ==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "@npmcli/git": "^5.0.0",
+        "glob": "^10.2.2",
+        "hosted-git-info": "^7.0.0",
+        "json-parse-even-better-errors": "^3.0.0",
+        "normalize-package-data": "^6.0.0",
+        "proc-log": "^4.0.0",
+        "semver": "^7.5.3"
+      },
+      "engines": {
+        "node": "^16.14.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/fs": {
       "version": "3.1.1",
       "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-3.1.1.tgz",
@@ -3813,22 +4018,179 @@
       }
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/package-json": {
-      "version": "5.2.1",
-      "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-5.2.1.tgz",
-      "integrity": "sha512-f7zYC6kQautXHvNbLEWgD/uGu1+xCn9izgqBfgItWSx22U0ZDekxN08A1vM8cTxj/cRVe0Q94Ode+tdoYmIOOQ==",
+      "version": "6.2.0",
+      "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-6.2.0.tgz",
+      "integrity": "sha512-rCNLSB/JzNvot0SEyXqWZ7tX2B5dD2a1br2Dp0vSYVo5jh8Z0EZ7lS9TsZ1UtziddB1UfNUaMCc538/HztnJGA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
-        "@npmcli/git": "^5.0.0",
+        "@npmcli/git": "^6.0.0",
         "glob": "^10.2.2",
-        "hosted-git-info": "^7.0.0",
-        "json-parse-even-better-errors": "^3.0.0",
-        "normalize-package-data": "^6.0.0",
-        "proc-log": "^4.0.0",
-        "semver": "^7.5.3"
+        "hosted-git-info": "^8.0.0",
+        "json-parse-even-better-errors": "^4.0.0",
+        "proc-log": "^5.0.0",
+        "semver": "^7.5.3",
+        "validate-npm-package-license": "^3.0.4"
       },
       "engines": {
-        "node": "^16.14.0 || >=18.0.0"
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/package-json/node_modules/@npmcli/git": {
+      "version": "6.0.3",
+      "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-6.0.3.tgz",
+      "integrity": "sha512-GUYESQlxZRAdhs3UhbB6pVRNUELQOHXwK9ruDkwmCv2aZ5y0SApQzUJCg02p3A7Ue2J5hxvlk1YI53c00NmRyQ==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "@npmcli/promise-spawn": "^8.0.0",
+        "ini": "^5.0.0",
+        "lru-cache": "^10.0.1",
+        "npm-pick-manifest": "^10.0.0",
+        "proc-log": "^5.0.0",
+        "promise-retry": "^2.0.1",
+        "semver": "^7.3.5",
+        "which": "^5.0.0"
+      },
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/package-json/node_modules/@npmcli/promise-spawn": {
+      "version": "8.0.3",
+      "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-8.0.3.tgz",
+      "integrity": "sha512-Yb00SWaL4F8w+K8YGhQ55+xE4RUNdMHV43WZGsiTM92gS+lC0mGsn7I4hLug7pbao035S6bj3Y3w0cUNGLfmkg==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "which": "^5.0.0"
+      },
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/package-json/node_modules/hosted-git-info": {
+      "version": "8.1.0",
+      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-8.1.0.tgz",
+      "integrity": "sha512-Rw/B2DNQaPBICNXEm8balFz9a6WpZrkCGpcWFpy7nCj+NyhSdqXipmfvtmWt9xGfp0wZnBxB+iVpLmQMYt47Tw==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "lru-cache": "^10.0.1"
+      },
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/package-json/node_modules/ini": {
+      "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/ini/-/ini-5.0.0.tgz",
+      "integrity": "sha512-+N0ngpO3e7cRUWOJAS7qw0IZIVc6XPrW4MlFBdD066F2L4k1L6ker3hLqSq7iXxU5tgS4WGkIUElWn5vogAEnw==",
+      "dev": true,
+      "license": "ISC",
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/package-json/node_modules/json-parse-even-better-errors": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-4.0.0.tgz",
+      "integrity": "sha512-lR4MXjGNgkJc7tkQ97kb2nuEMnNCyU//XYVH0MKTGcXEiSudQ5MKGKen3C5QubYy0vmq+JGitUg92uuywGEwIA==",
+      "dev": true,
+      "license": "MIT",
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/package-json/node_modules/npm-install-checks": {
+      "version": "7.1.2",
+      "resolved": "https://registry.npmjs.org/npm-install-checks/-/npm-install-checks-7.1.2.tgz",
+      "integrity": "sha512-z9HJBCYw9Zr8BqXcllKIs5nI+QggAImbBdHphOzVYrz2CB4iQ6FzWyKmlqDZua+51nAu7FcemlbTc9VgQN5XDQ==",
+      "dev": true,
+      "license": "BSD-2-Clause",
+      "dependencies": {
+        "semver": "^7.1.1"
+      },
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/package-json/node_modules/npm-normalize-package-bin": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-4.0.0.tgz",
+      "integrity": "sha512-TZKxPvItzai9kN9H/TkmCtx/ZN/hvr3vUycjlfmH0ootY9yFBzNOpiXAdIn1Iteqsvk4lQn6B5PTrt+n6h8k/w==",
+      "dev": true,
+      "license": "ISC",
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/package-json/node_modules/npm-package-arg": {
+      "version": "12.0.2",
+      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-12.0.2.tgz",
+      "integrity": "sha512-f1NpFjNI9O4VbKMOlA5QoBq/vSQPORHcTZ2feJpFkTHJ9eQkdlmZEKSjcAhxTGInC7RlEyScT9ui67NaOsjFWA==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "hosted-git-info": "^8.0.0",
+        "proc-log": "^5.0.0",
+        "semver": "^7.3.5",
+        "validate-npm-package-name": "^6.0.0"
+      },
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/package-json/node_modules/npm-pick-manifest": {
+      "version": "10.0.0",
+      "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-10.0.0.tgz",
+      "integrity": "sha512-r4fFa4FqYY8xaM7fHecQ9Z2nE9hgNfJR+EmoKv0+chvzWkBcORX3r0FpTByP+CbOVJDladMXnPQGVN8PBLGuTQ==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "npm-install-checks": "^7.1.0",
+        "npm-normalize-package-bin": "^4.0.0",
+        "npm-package-arg": "^12.0.0",
+        "semver": "^7.3.5"
+      },
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/package-json/node_modules/proc-log": {
+      "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-5.0.0.tgz",
+      "integrity": "sha512-Azwzvl90HaF0aCz1JrDdXQykFakSSNPaPoiZ9fm5qJIMHioDZEi7OAdRwSm6rSoPtY3Qutnm3L7ogmg3dc+wbQ==",
+      "dev": true,
+      "license": "ISC",
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/package-json/node_modules/validate-npm-package-name": {
+      "version": "6.0.2",
+      "resolved": "https://registry.npmjs.org/validate-npm-package-name/-/validate-npm-package-name-6.0.2.tgz",
+      "integrity": "sha512-IUoow1YUtvoBBC06dXs8bR8B9vuA3aJfmQNKMoaPG/OFsPmoQvw8xh+6Ye25Gx9DQhoEom3Pcu9MKHerm/NpUQ==",
+      "dev": true,
+      "license": "ISC",
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/package-json/node_modules/which": {
+      "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/which/-/which-5.0.0.tgz",
+      "integrity": "sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "isexe": "^3.1.1"
+      },
+      "bin": {
+        "node-which": "bin/which.js"
+      },
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
       }
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/promise-spawn": {
@@ -3885,6 +4247,25 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json": {
+      "version": "5.2.1",
+      "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-5.2.1.tgz",
+      "integrity": "sha512-f7zYC6kQautXHvNbLEWgD/uGu1+xCn9izgqBfgItWSx22U0ZDekxN08A1vM8cTxj/cRVe0Q94Ode+tdoYmIOOQ==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "@npmcli/git": "^5.0.0",
+        "glob": "^10.2.2",
+        "hosted-git-info": "^7.0.0",
+        "json-parse-even-better-errors": "^3.0.0",
+        "normalize-package-data": "^6.0.0",
+        "proc-log": "^4.0.0",
+        "semver": "^7.5.3"
+      },
+      "engines": {
+        "node": "^16.14.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/@sigstore/bundle": {
       "version": "2.3.2",
       "resolved": "https://registry.npmjs.org/@sigstore/bundle/-/bundle-2.3.2.tgz",
@@ -4342,6 +4723,25 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/pacote/node_modules/@npmcli/package-json": {
+      "version": "5.2.1",
+      "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-5.2.1.tgz",
+      "integrity": "sha512-f7zYC6kQautXHvNbLEWgD/uGu1+xCn9izgqBfgItWSx22U0ZDekxN08A1vM8cTxj/cRVe0Q94Ode+tdoYmIOOQ==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "@npmcli/git": "^5.0.0",
+        "glob": "^10.2.2",
+        "hosted-git-info": "^7.0.0",
+        "json-parse-even-better-errors": "^3.0.0",
+        "normalize-package-data": "^6.0.0",
+        "proc-log": "^4.0.0",
+        "semver": "^7.5.3"
+      },
+      "engines": {
+        "node": "^16.14.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/parse-conflict-json": {
       "version": "3.0.1",
       "resolved": "https://registry.npmjs.org/parse-conflict-json/-/parse-conflict-json-3.0.1.tgz",
@@ -9282,6 +9682,25 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
+    "node_modules/init-package-json/node_modules/@npmcli/package-json": {
+      "version": "6.2.0",
+      "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-6.2.0.tgz",
+      "integrity": "sha512-rCNLSB/JzNvot0SEyXqWZ7tX2B5dD2a1br2Dp0vSYVo5jh8Z0EZ7lS9TsZ1UtziddB1UfNUaMCc538/HztnJGA==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "@npmcli/git": "^6.0.0",
+        "glob": "^10.2.2",
+        "hosted-git-info": "^8.0.0",
+        "json-parse-even-better-errors": "^4.0.0",
+        "proc-log": "^5.0.0",
+        "semver": "^7.5.3",
+        "validate-npm-package-license": "^3.0.4"
+      },
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
     "node_modules/internal-slot": {
       "version": "1.1.0",
       "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.1.0.tgz",
@@ -12879,6 +13298,25 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
+    "node_modules/pacote/node_modules/@npmcli/package-json": {
+      "version": "6.2.0",
+      "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-6.2.0.tgz",
+      "integrity": "sha512-rCNLSB/JzNvot0SEyXqWZ7tX2B5dD2a1br2Dp0vSYVo5jh8Z0EZ7lS9TsZ1UtziddB1UfNUaMCc538/HztnJGA==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "@npmcli/git": "^6.0.0",
+        "glob": "^10.2.2",
+        "hosted-git-info": "^8.0.0",
+        "json-parse-even-better-errors": "^4.0.0",
+        "proc-log": "^5.0.0",
+        "semver": "^7.5.3",
+        "validate-npm-package-license": "^3.0.4"
+      },
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
     "node_modules/parent-module": {
       "version": "1.0.1",
       "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz",
@@ -18652,10 +19090,10 @@
         "@npmcli/fs": "^4.0.0",
         "@npmcli/installed-package-contents": "^3.0.0",
         "@npmcli/map-workspaces": "^4.0.1",
-        "@npmcli/metavuln-calculator": "^9.0.0",
+        "@npmcli/metavuln-calculator": "^9.0.1",
         "@npmcli/name-from-folder": "^3.0.0",
         "@npmcli/node-gyp": "^4.0.0",
-        "@npmcli/package-json": "^6.0.1",
+        "@npmcli/package-json": "^7.0.0",
         "@npmcli/query": "^4.0.0",
         "@npmcli/redact": "^3.0.0",
         "@npmcli/run-script": "^9.0.1",
@@ -18706,7 +19144,7 @@
       "license": "ISC",
       "dependencies": {
         "@npmcli/map-workspaces": "^4.0.1",
-        "@npmcli/package-json": "^6.0.1",
+        "@npmcli/package-json": "^7.0.0",
         "ci-info": "^4.0.0",
         "ini": "^5.0.0",
         "nopt": "^8.1.0",
@@ -18768,7 +19206,7 @@
       "license": "ISC",
       "dependencies": {
         "@npmcli/arborist": "^9.1.4",
-        "@npmcli/package-json": "^6.1.1",
+        "@npmcli/package-json": "^7.0.0",
         "@npmcli/run-script": "^9.0.1",
         "ci-info": "^4.0.0",
         "npm-package-arg": "^12.0.0",
@@ -18851,7 +19289,7 @@
       "version": "11.1.0",
       "license": "ISC",
       "dependencies": {
-        "@npmcli/package-json": "^6.2.0",
+        "@npmcli/package-json": "^7.0.0",
         "ci-info": "^4.0.0",
         "npm-package-arg": "^12.0.0",
         "npm-registry-fetch": "^18.0.1",
diff --git a/package.json b/package.json
index 76ebe1ab9c6c7..df85273c08fc7 100644
--- a/package.json
+++ b/package.json
@@ -56,7 +56,7 @@
     "@npmcli/config": "^10.4.0",
     "@npmcli/fs": "^4.0.0",
     "@npmcli/map-workspaces": "^4.0.2",
-    "@npmcli/package-json": "^6.2.0",
+    "@npmcli/package-json": "^7.0.1",
     "@npmcli/promise-spawn": "^8.0.2",
     "@npmcli/redact": "^3.2.2",
     "@npmcli/run-script": "^9.1.0",
diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json
index 30f8a2b995cad..ae9900e83ee64 100644
--- a/workspaces/arborist/package.json
+++ b/workspaces/arborist/package.json
@@ -7,10 +7,10 @@
     "@npmcli/fs": "^4.0.0",
     "@npmcli/installed-package-contents": "^3.0.0",
     "@npmcli/map-workspaces": "^4.0.1",
-    "@npmcli/metavuln-calculator": "^9.0.0",
+    "@npmcli/metavuln-calculator": "^9.0.1",
     "@npmcli/name-from-folder": "^3.0.0",
     "@npmcli/node-gyp": "^4.0.0",
-    "@npmcli/package-json": "^6.0.1",
+    "@npmcli/package-json": "^7.0.0",
     "@npmcli/query": "^4.0.0",
     "@npmcli/redact": "^3.0.0",
     "@npmcli/run-script": "^9.0.1",
diff --git a/workspaces/config/package.json b/workspaces/config/package.json
index 5cb8925d4cf4b..daf535a2672a5 100644
--- a/workspaces/config/package.json
+++ b/workspaces/config/package.json
@@ -38,7 +38,7 @@
   },
   "dependencies": {
     "@npmcli/map-workspaces": "^4.0.1",
-    "@npmcli/package-json": "^6.0.1",
+    "@npmcli/package-json": "^7.0.0",
     "ci-info": "^4.0.0",
     "ini": "^5.0.0",
     "nopt": "^8.1.0",
diff --git a/workspaces/libnpmexec/package.json b/workspaces/libnpmexec/package.json
index 827b1f38a73b0..b5b59c8248fc6 100644
--- a/workspaces/libnpmexec/package.json
+++ b/workspaces/libnpmexec/package.json
@@ -61,7 +61,7 @@
   },
   "dependencies": {
     "@npmcli/arborist": "^9.1.4",
-    "@npmcli/package-json": "^6.1.1",
+    "@npmcli/package-json": "^7.0.0",
     "@npmcli/run-script": "^9.0.1",
     "ci-info": "^4.0.0",
     "npm-package-arg": "^12.0.0",
diff --git a/workspaces/libnpmpublish/package.json b/workspaces/libnpmpublish/package.json
index b6774b39afc13..c51d4997cac14 100644
--- a/workspaces/libnpmpublish/package.json
+++ b/workspaces/libnpmpublish/package.json
@@ -38,7 +38,7 @@
   "bugs": "https://github.com/npm/cli/issues",
   "homepage": "https://npmjs.com/package/libnpmpublish",
   "dependencies": {
-    "@npmcli/package-json": "^6.2.0",
+    "@npmcli/package-json": "^7.0.0",
     "ci-info": "^4.0.0",
     "npm-package-arg": "^12.0.0",
     "npm-registry-fetch": "^18.0.1",

From 1b4433fdb85623e019a6194cb01ff85c7f64ccad Mon Sep 17 00:00:00 2001
From: Gar 
Date: Wed, 17 Sep 2025 10:16:51 -0700
Subject: [PATCH 06/63] deps: @npmcli/map-workspaces@5.0.0

---
 DEPENDENCIES.md                               |    3 +-
 node_modules/.gitignore                       |    8 +-
 .../node_modules/@npmcli/package-json/LICENSE |   18 -
 .../@npmcli/package-json/lib/index.js         |  286 ---
 .../package-json/lib/normalize-data.js        |  257 ---
 .../@npmcli/package-json/lib/normalize.js     |  601 -----
 .../@npmcli/package-json/lib/read-package.js  |   39 -
 .../@npmcli/package-json/lib/sort.js          |  101 -
 .../package-json/lib/update-dependencies.js   |   75 -
 .../package-json/lib/update-scripts.js        |   29 -
 .../package-json/lib/update-workspaces.js     |   26 -
 .../@npmcli/package-json/package.json         |   61 -
 .../map-workspaces/node_modules/glob/LICENSE  |   15 +
 .../node_modules/glob/dist/commonjs/glob.js   |  247 ++
 .../glob/dist/commonjs/has-magic.js           |   27 +
 .../node_modules/glob/dist/commonjs/ignore.js |  119 +
 .../node_modules/glob/dist/commonjs/index.js  |   68 +
 .../glob/dist/commonjs/package.json           |    3 +
 .../glob/dist/commonjs/pattern.js             |  219 ++
 .../glob/dist/commonjs/processor.js           |  301 +++
 .../node_modules/glob/dist/commonjs/walker.js |  387 ++++
 .../node_modules/glob/dist/esm/bin.d.mts      |    3 +
 .../node_modules/glob/dist/esm/bin.mjs        |  276 +++
 .../node_modules/glob/dist/esm/glob.js        |  243 ++
 .../node_modules/glob/dist/esm/has-magic.js   |   23 +
 .../node_modules/glob/dist/esm/ignore.js      |  115 +
 .../node_modules/glob/dist/esm/index.js       |   55 +
 .../node_modules/glob/dist/esm/package.json   |    3 +
 .../node_modules/glob/dist/esm/pattern.js     |  215 ++
 .../node_modules/glob/dist/esm/processor.js   |  294 +++
 .../node_modules/glob/dist/esm/walker.js      |  381 ++++
 .../node_modules/glob/package.json            |   97 +
 .../node_modules/jackspeak/LICENSE.md         |   55 +
 .../jackspeak/dist/commonjs/index.js          |  947 ++++++++
 .../jackspeak/dist/commonjs/package.json      |    3 +
 .../node_modules/jackspeak/dist/esm/index.js  |  936 ++++++++
 .../jackspeak/dist/esm/package.json           |    3 +
 .../node_modules/jackspeak/package.json       |   94 +
 .../node_modules/lru-cache/LICENSE            |   15 +
 .../lru-cache/dist/commonjs/index.js          | 1564 +++++++++++++
 .../lru-cache/dist/commonjs/index.min.js      |    2 +
 .../lru-cache/dist/commonjs/package.json      |    3 +
 .../node_modules/lru-cache/dist/esm/index.js  | 1560 +++++++++++++
 .../lru-cache/dist/esm/index.min.js           |    2 +
 .../lru-cache/dist/esm/package.json           |    3 +
 .../node_modules/lru-cache/package.json       |  113 +
 .../node_modules/minimatch/LICENSE            |   15 +
 .../dist/commonjs/assert-valid-pattern.js     |   14 +
 .../minimatch/dist/commonjs/ast.js            |  592 +++++
 .../dist/commonjs/brace-expressions.js        |  152 ++
 .../minimatch/dist/commonjs/escape.js         |   22 +
 .../minimatch/dist/commonjs/index.js          | 1014 +++++++++
 .../minimatch/dist/commonjs/package.json      |    3 +
 .../minimatch/dist/commonjs/unescape.js       |   24 +
 .../dist/esm/assert-valid-pattern.js          |   10 +
 .../node_modules/minimatch/dist/esm/ast.js    |  588 +++++
 .../minimatch/dist/esm/brace-expressions.js   |  148 ++
 .../node_modules/minimatch/dist/esm/escape.js |   18 +
 .../node_modules/minimatch/dist/esm/index.js  | 1001 ++++++++
 .../minimatch/dist/esm/package.json           |    3 +
 .../minimatch/dist/esm/unescape.js            |   20 +
 .../node_modules/minimatch/package.json       |   79 +
 .../node_modules/path-scurry/LICENSE.md       |   55 +
 .../path-scurry/dist/commonjs/index.js        | 2016 +++++++++++++++++
 .../path-scurry/dist/commonjs/package.json    |    3 +
 .../path-scurry/dist/esm/index.js             | 1981 ++++++++++++++++
 .../path-scurry/dist/esm/package.json         |    3 +
 .../node_modules/path-scurry/package.json     |   88 +
 .../@npmcli/map-workspaces/package.json       |   14 +-
 package-lock.json                             |  148 +-
 package.json                                  |    2 +-
 workspaces/arborist/package.json              |    2 +-
 workspaces/config/package.json                |    2 +-
 73 files changed, 16376 insertions(+), 1536 deletions(-)
 delete mode 100644 node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/LICENSE
 delete mode 100644 node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/index.js
 delete mode 100644 node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/normalize-data.js
 delete mode 100644 node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/normalize.js
 delete mode 100644 node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/read-package.js
 delete mode 100644 node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/sort.js
 delete mode 100644 node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/update-dependencies.js
 delete mode 100644 node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/update-scripts.js
 delete mode 100644 node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/update-workspaces.js
 delete mode 100644 node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/package.json
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/glob/LICENSE
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/glob.js
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/has-magic.js
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/ignore.js
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/index.js
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/package.json
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/pattern.js
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/processor.js
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/walker.js
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/bin.d.mts
 create mode 100755 node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/bin.mjs
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/glob.js
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/has-magic.js
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/ignore.js
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/index.js
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/package.json
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/pattern.js
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/processor.js
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/walker.js
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/glob/package.json
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/jackspeak/LICENSE.md
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/jackspeak/dist/commonjs/index.js
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/jackspeak/dist/commonjs/package.json
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/jackspeak/dist/esm/index.js
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/jackspeak/dist/esm/package.json
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/jackspeak/package.json
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/lru-cache/LICENSE
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/commonjs/index.js
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/commonjs/index.min.js
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/commonjs/package.json
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/esm/index.js
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/esm/index.min.js
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/esm/package.json
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/lru-cache/package.json
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/minimatch/LICENSE
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/ast.js
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/brace-expressions.js
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/escape.js
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/index.js
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/package.json
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/unescape.js
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/assert-valid-pattern.js
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/ast.js
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/brace-expressions.js
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/escape.js
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/index.js
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/package.json
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/unescape.js
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/minimatch/package.json
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/path-scurry/LICENSE.md
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/path-scurry/dist/commonjs/index.js
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/path-scurry/dist/commonjs/package.json
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/path-scurry/dist/esm/index.js
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/path-scurry/dist/esm/package.json
 create mode 100644 node_modules/@npmcli/map-workspaces/node_modules/path-scurry/package.json

diff --git a/DEPENDENCIES.md b/DEPENDENCIES.md
index 68de2df464a6c..c8c0852ff8fb7 100644
--- a/DEPENDENCIES.md
+++ b/DEPENDENCIES.md
@@ -305,6 +305,7 @@ graph LR;
   ip-address-->jsbn;
   ip-address-->sprintf-js;
   is-cidr-->cidr-regex;
+  isaacs-brace-expansion-->isaacs-balanced-match["@isaacs/balanced-match"];
   isaacs-cliui-->string-width-cjs;
   isaacs-cliui-->string-width;
   isaacs-cliui-->strip-ansi-cjs;
@@ -313,7 +314,6 @@ graph LR;
   isaacs-cliui-->wrap-ansi;
   isaacs-fs-minipass-->minipass;
   jackspeak-->isaacs-cliui["@isaacs/cliui"];
-  jackspeak-->pkgjs-parseargs["@pkgjs/parseargs"];
   libnpmaccess-->npm-package-arg;
   libnpmaccess-->npm-registry-fetch;
   libnpmaccess-->npmcli-eslint-config["@npmcli/eslint-config"];
@@ -416,6 +416,7 @@ graph LR;
   make-fetch-happen-->promise-retry;
   make-fetch-happen-->ssri;
   minimatch-->brace-expansion;
+  minimatch-->isaacs-brace-expansion["@isaacs/brace-expansion"];
   minipass-->yallist;
   minipass-collect-->minipass;
   minipass-fetch-->encoding;
diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 514ff1c417f92..dee02e20a8142 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -25,9 +25,11 @@
 !/@npmcli/map-workspaces
 !/@npmcli/map-workspaces/node_modules/
 /@npmcli/map-workspaces/node_modules/*
-!/@npmcli/map-workspaces/node_modules/@npmcli/
-/@npmcli/map-workspaces/node_modules/@npmcli/*
-!/@npmcli/map-workspaces/node_modules/@npmcli/package-json
+!/@npmcli/map-workspaces/node_modules/glob
+!/@npmcli/map-workspaces/node_modules/jackspeak
+!/@npmcli/map-workspaces/node_modules/lru-cache
+!/@npmcli/map-workspaces/node_modules/minimatch
+!/@npmcli/map-workspaces/node_modules/path-scurry
 !/@npmcli/metavuln-calculator
 !/@npmcli/name-from-folder
 !/@npmcli/node-gyp
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/LICENSE b/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/LICENSE
deleted file mode 100644
index 6a1f3708f6d70..0000000000000
--- a/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/LICENSE
+++ /dev/null
@@ -1,18 +0,0 @@
-ISC License
-
-Copyright GitHub Inc.
-
-Permission to use, copy, modify, and/or distribute this
-software for any purpose with or without fee is hereby
-granted, provided that the above copyright notice and this
-permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL
-WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
-IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO
-EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT,
-INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
-WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
-TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/index.js b/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/index.js
deleted file mode 100644
index 7eff602d73a3f..0000000000000
--- a/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/index.js
+++ /dev/null
@@ -1,286 +0,0 @@
-const { readFile, writeFile } = require('node:fs/promises')
-const { resolve } = require('node:path')
-const parseJSON = require('json-parse-even-better-errors')
-
-const updateDeps = require('./update-dependencies.js')
-const updateScripts = require('./update-scripts.js')
-const updateWorkspaces = require('./update-workspaces.js')
-const normalize = require('./normalize.js')
-const { read, parse } = require('./read-package.js')
-const { packageSort } = require('./sort.js')
-
-// a list of handy specialized helper functions that take
-// care of special cases that are handled by the npm cli
-const knownSteps = new Set([
-  updateDeps,
-  updateScripts,
-  updateWorkspaces,
-])
-
-// list of all keys that are handled by "knownSteps" helpers
-const knownKeys = new Set([
-  ...updateDeps.knownKeys,
-  'scripts',
-  'workspaces',
-])
-
-class PackageJson {
-  static normalizeSteps = Object.freeze([
-    '_id',
-    '_attributes',
-    'bundledDependencies',
-    'bundleDependencies',
-    'optionalDedupe',
-    'scripts',
-    'funding',
-    'bin',
-  ])
-
-  // npm pkg fix
-  static fixSteps = Object.freeze([
-    'binRefs',
-    'bundleDependencies',
-    'bundleDependenciesFalse',
-    'fixName',
-    'fixNameField',
-    'fixVersionField',
-    'fixRepositoryField',
-    'fixDependencies',
-    'devDependencies',
-    'scriptpath',
-  ])
-
-  static prepareSteps = Object.freeze([
-    '_id',
-    '_attributes',
-    'bundledDependencies',
-    'bundleDependencies',
-    'bundleDependenciesDeleteFalse',
-    'gypfile',
-    'serverjs',
-    'scriptpath',
-    'authors',
-    'readme',
-    'mans',
-    'binDir',
-    'gitHead',
-    'fillTypes',
-    'normalizeData',
-    'binRefs',
-  ])
-
-  // create a new empty package.json, so we can save at the given path even
-  // though we didn't start from a parsed file
-  static async create (path, opts = {}) {
-    const p = new PackageJson()
-    await p.create(path)
-    if (opts.data) {
-      return p.update(opts.data)
-    }
-    return p
-  }
-
-  // Loads a package.json at given path and JSON parses
-  static async load (path, opts = {}) {
-    const p = new PackageJson()
-    // Avoid try/catch if we aren't going to create
-    if (!opts.create) {
-      return p.load(path)
-    }
-
-    try {
-      return await p.load(path)
-    } catch (err) {
-      if (!err.message.startsWith('Could not read package.json')) {
-        throw err
-      }
-      return await p.create(path)
-    }
-  }
-
-  // npm pkg fix
-  static async fix (path, opts) {
-    const p = new PackageJson()
-    await p.load(path, true)
-    return p.fix(opts)
-  }
-
-  // read-package-json compatible behavior
-  static async prepare (path, opts) {
-    const p = new PackageJson()
-    await p.load(path, true)
-    return p.prepare(opts)
-  }
-
-  // read-package-json-fast compatible behavior
-  static async normalize (path, opts) {
-    const p = new PackageJson()
-    await p.load(path)
-    return p.normalize(opts)
-  }
-
-  #path
-  #manifest
-  #readFileContent = ''
-  #canSave = true
-
-  // Load content from given path
-  async load (path, parseIndex) {
-    this.#path = path
-    let parseErr
-    try {
-      this.#readFileContent = await read(this.filename)
-    } catch (err) {
-      if (!parseIndex) {
-        throw err
-      }
-      parseErr = err
-    }
-
-    if (parseErr) {
-      const indexFile = resolve(this.path, 'index.js')
-      let indexFileContent
-      try {
-        indexFileContent = await readFile(indexFile, 'utf8')
-      } catch (err) {
-        throw parseErr
-      }
-      try {
-        this.fromComment(indexFileContent)
-      } catch (err) {
-        throw parseErr
-      }
-      // This wasn't a package.json so prevent saving
-      this.#canSave = false
-      return this
-    }
-
-    return this.fromJSON(this.#readFileContent)
-  }
-
-  // Load data from a JSON string/buffer
-  fromJSON (data) {
-    this.#manifest = parse(data)
-    return this
-  }
-
-  fromContent (data) {
-    this.#manifest = data
-    this.#canSave = false
-    return this
-  }
-
-  // Load data from a comment
-  // /**package { "name": "foo", "version": "1.2.3", ... } **/
-  fromComment (data) {
-    data = data.split(/^\/\*\*package(?:\s|$)/m)
-
-    if (data.length < 2) {
-      throw new Error('File has no package in comments')
-    }
-    data = data[1]
-    data = data.split(/\*\*\/$/m)
-
-    if (data.length < 2) {
-      throw new Error('File has no package in comments')
-    }
-    data = data[0]
-    data = data.replace(/^\s*\*/mg, '')
-
-    this.#manifest = parseJSON(data)
-    return this
-  }
-
-  get content () {
-    return this.#manifest
-  }
-
-  get path () {
-    return this.#path
-  }
-
-  get filename () {
-    if (this.path) {
-      return resolve(this.path, 'package.json')
-    }
-    return undefined
-  }
-
-  create (path) {
-    this.#path = path
-    this.#manifest = {}
-    return this
-  }
-
-  // This should be the ONLY way to set content in the manifest
-  update (content) {
-    if (!this.content) {
-      throw new Error('Can not update without content.  Please `load` or `create`')
-    }
-
-    for (const step of knownSteps) {
-      this.#manifest = step({ content, originalContent: this.content })
-    }
-
-    // unknown properties will just be overwitten
-    for (const [key, value] of Object.entries(content)) {
-      if (!knownKeys.has(key)) {
-        this.content[key] = value
-      }
-    }
-
-    return this
-  }
-
-  async save ({ sort } = {}) {
-    if (!this.#canSave) {
-      throw new Error('No package.json to save to')
-    }
-    const {
-      [Symbol.for('indent')]: indent,
-      [Symbol.for('newline')]: newline,
-      ...rest
-    } = this.content
-
-    const format = indent === undefined ? '  ' : indent
-    const eol = newline === undefined ? '\n' : newline
-
-    const content = sort ? packageSort(rest) : rest
-
-    const fileContent = `${
-      JSON.stringify(content, null, format)
-    }\n`
-      .replace(/\n/g, eol)
-
-    if (fileContent.trim() !== this.#readFileContent.trim()) {
-      const written = await writeFile(this.filename, fileContent)
-      this.#readFileContent = fileContent
-      return written
-    }
-  }
-
-  async normalize (opts = {}) {
-    if (!opts.steps) {
-      opts.steps = this.constructor.normalizeSteps
-    }
-    await normalize(this, opts)
-    return this
-  }
-
-  async prepare (opts = {}) {
-    if (!opts.steps) {
-      opts.steps = this.constructor.prepareSteps
-    }
-    await normalize(this, opts)
-    return this
-  }
-
-  async fix (opts = {}) {
-    // This one is not overridable
-    opts.steps = this.constructor.fixSteps
-    await normalize(this, opts)
-    return this
-  }
-}
-
-module.exports = PackageJson
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/normalize-data.js b/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/normalize-data.js
deleted file mode 100644
index 79b0bafbcd3a4..0000000000000
--- a/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/normalize-data.js
+++ /dev/null
@@ -1,257 +0,0 @@
-// Originally normalize-package-data
-
-const url = require('node:url')
-const hostedGitInfo = require('hosted-git-info')
-const validateLicense = require('validate-npm-package-license')
-
-const typos = {
-  dependancies: 'dependencies',
-  dependecies: 'dependencies',
-  depdenencies: 'dependencies',
-  devEependencies: 'devDependencies',
-  depends: 'dependencies',
-  'dev-dependencies': 'devDependencies',
-  devDependences: 'devDependencies',
-  devDepenencies: 'devDependencies',
-  devdependencies: 'devDependencies',
-  repostitory: 'repository',
-  repo: 'repository',
-  prefereGlobal: 'preferGlobal',
-  hompage: 'homepage',
-  hampage: 'homepage',
-  autohr: 'author',
-  autor: 'author',
-  contributers: 'contributors',
-  publicationConfig: 'publishConfig',
-  script: 'scripts',
-}
-
-const isEmail = str => str.includes('@') && (str.indexOf('@') < str.lastIndexOf('.'))
-
-// Extracts description from contents of a readme file in markdown format
-function extractDescription (description) {
-  // the first block of text before the first heading that isn't the first line heading
-  const lines = description.trim().split('\n')
-  let start = 0
-  // skip initial empty lines and lines that start with #
-  while (lines[start]?.trim().match(/^(#|$)/)) {
-    start++
-  }
-  let end = start + 1
-  // keep going till we get to the end or an empty line
-  while (end < lines.length && lines[end].trim()) {
-    end++
-  }
-  return lines.slice(start, end).join(' ').trim()
-}
-
-function stringifyPerson (person) {
-  if (typeof person !== 'string') {
-    const name = person.name || ''
-    const u = person.url || person.web
-    const wrappedUrl = u ? (' (' + u + ')') : ''
-    const e = person.email || person.mail
-    const wrappedEmail = e ? (' <' + e + '>') : ''
-    person = name + wrappedEmail + wrappedUrl
-  }
-  const matchedName = person.match(/^([^(<]+)/)
-  const matchedUrl = person.match(/\(([^()]+)\)/)
-  const matchedEmail = person.match(/<([^<>]+)>/)
-  const parsed = {}
-  if (matchedName?.[0].trim()) {
-    parsed.name = matchedName[0].trim()
-  }
-  if (matchedEmail) {
-    parsed.email = matchedEmail[1]
-  }
-  if (matchedUrl) {
-    parsed.url = matchedUrl[1]
-  }
-  return parsed
-}
-
-function normalizeData (data, changes) {
-  // fixDescriptionField
-  if (data.description && typeof data.description !== 'string') {
-    changes?.push(`'description' field should be a string`)
-    delete data.description
-  }
-  if (data.readme && !data.description && data.readme !== 'ERROR: No README data found!') {
-    data.description = extractDescription(data.readme)
-  }
-  if (data.description === undefined) {
-    delete data.description
-  }
-  if (!data.description) {
-    changes?.push('No description')
-  }
-
-  // fixModulesField
-  if (data.modules) {
-    changes?.push(`modules field is deprecated`)
-    delete data.modules
-  }
-
-  // fixFilesField
-  const files = data.files
-  if (files && !Array.isArray(files)) {
-    changes?.push(`Invalid 'files' member`)
-    delete data.files
-  } else if (data.files) {
-    data.files = data.files.filter(function (file) {
-      if (!file || typeof file !== 'string') {
-        changes?.push(`Invalid filename in 'files' list: ${file}`)
-        return false
-      } else {
-        return true
-      }
-    })
-  }
-
-  // fixManField
-  if (data.man && typeof data.man === 'string') {
-    data.man = [data.man]
-  }
-
-  // fixBugsField
-  if (!data.bugs && data.repository?.url) {
-    const hosted = hostedGitInfo.fromUrl(data.repository.url)
-    if (hosted && hosted.bugs()) {
-      data.bugs = { url: hosted.bugs() }
-    }
-  } else if (data.bugs) {
-    if (typeof data.bugs === 'string') {
-      if (isEmail(data.bugs)) {
-        data.bugs = { email: data.bugs }
-        /* eslint-disable-next-line node/no-deprecated-api */
-      } else if (url.parse(data.bugs).protocol) {
-        data.bugs = { url: data.bugs }
-      } else {
-        changes?.push(`Bug string field must be url, email, or {email,url}`)
-      }
-    } else {
-      for (const k in data.bugs) {
-        if (['web', 'name'].includes(k)) {
-          changes?.push(`bugs['${k}'] should probably be bugs['url'].`)
-          data.bugs.url = data.bugs[k]
-          delete data.bugs[k]
-        }
-      }
-      const oldBugs = data.bugs
-      data.bugs = {}
-      if (oldBugs.url) {
-        /* eslint-disable-next-line node/no-deprecated-api */
-        if (typeof (oldBugs.url) === 'string' && url.parse(oldBugs.url).protocol) {
-          data.bugs.url = oldBugs.url
-        } else {
-          changes?.push('bugs.url field must be a string url. Deleted.')
-        }
-      }
-      if (oldBugs.email) {
-        if (typeof (oldBugs.email) === 'string' && isEmail(oldBugs.email)) {
-          data.bugs.email = oldBugs.email
-        } else {
-          changes?.push('bugs.email field must be a string email. Deleted.')
-        }
-      }
-    }
-    if (!data.bugs.email && !data.bugs.url) {
-      delete data.bugs
-      changes?.push('Normalized value of bugs field is an empty object. Deleted.')
-    }
-  }
-  // fixKeywordsField
-  if (typeof data.keywords === 'string') {
-    data.keywords = data.keywords.split(/,\s+/)
-  }
-  if (data.keywords && !Array.isArray(data.keywords)) {
-    delete data.keywords
-    changes?.push(`keywords should be an array of strings`)
-  } else if (data.keywords) {
-    data.keywords = data.keywords.filter(function (kw) {
-      if (typeof kw !== 'string' || !kw) {
-        changes?.push(`keywords should be an array of strings`)
-        return false
-      } else {
-        return true
-      }
-    })
-  }
-  // fixBundleDependenciesField
-  const bdd = 'bundledDependencies'
-  const bd = 'bundleDependencies'
-  if (data[bdd] && !data[bd]) {
-    data[bd] = data[bdd]
-    delete data[bdd]
-  }
-  if (data[bd] && !Array.isArray(data[bd])) {
-    changes?.push(`Invalid 'bundleDependencies' list. Must be array of package names`)
-    delete data[bd]
-  } else if (data[bd]) {
-    data[bd] = data[bd].filter(function (filtered) {
-      if (!filtered || typeof filtered !== 'string') {
-        changes?.push(`Invalid bundleDependencies member: ${filtered}`)
-        return false
-      } else {
-        if (!data.dependencies) {
-          data.dependencies = {}
-        }
-        if (!Object.prototype.hasOwnProperty.call(data.dependencies, filtered)) {
-          changes?.push(`Non-dependency in bundleDependencies: ${filtered}`)
-          data.dependencies[filtered] = '*'
-        }
-        return true
-      }
-    })
-  }
-  // fixHomepageField
-  if (!data.homepage && data.repository && data.repository.url) {
-    const hosted = hostedGitInfo.fromUrl(data.repository.url)
-    if (hosted) {
-      data.homepage = hosted.docs()
-    }
-  }
-  if (data.homepage) {
-    if (typeof data.homepage !== 'string') {
-      changes?.push('homepage field must be a string url. Deleted.')
-      delete data.homepage
-    } else {
-      /* eslint-disable-next-line node/no-deprecated-api */
-      if (!url.parse(data.homepage).protocol) {
-        data.homepage = 'http://' + data.homepage
-      }
-    }
-  }
-  // fixReadmeField
-  if (!data.readme) {
-    changes?.push('No README data')
-    data.readme = 'ERROR: No README data found!'
-  }
-  // fixLicenseField
-  const license = data.license || data.licence
-  if (!license) {
-    changes?.push('No license field.')
-  } else if (typeof (license) !== 'string' || license.length < 1 || license.trim() === '') {
-    changes?.push('license should be a valid SPDX license expression')
-  } else if (!validateLicense(license).validForNewPackages) {
-    changes?.push('license should be a valid SPDX license expression')
-  }
-  // fixPeople
-  if (data.author) {
-    data.author = stringifyPerson(data.author)
-  }
-  ['maintainers', 'contributors'].forEach(function (set) {
-    if (!Array.isArray(data[set])) {
-      return
-    }
-    data[set] = data[set].map(stringifyPerson)
-  })
-  // fixTypos
-  for (const d in typos) {
-    if (Object.prototype.hasOwnProperty.call(data, d)) {
-      changes?.push(`${d} should probably be ${typos[d]}.`)
-    }
-  }
-}
-
-module.exports = { normalizeData }
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/normalize.js b/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/normalize.js
deleted file mode 100644
index 845f6753a9a00..0000000000000
--- a/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/normalize.js
+++ /dev/null
@@ -1,601 +0,0 @@
-const valid = require('semver/functions/valid')
-const clean = require('semver/functions/clean')
-const fs = require('node:fs/promises')
-const path = require('node:path')
-const { log } = require('proc-log')
-const moduleBuiltin = require('node:module')
-
-/**
- * @type {import('hosted-git-info')}
- */
-let _hostedGitInfo
-function lazyHostedGitInfo () {
-  if (!_hostedGitInfo) {
-    _hostedGitInfo = require('hosted-git-info')
-  }
-  return _hostedGitInfo
-}
-
-/**
- * @type {import('glob').glob}
- */
-let _glob
-function lazyLoadGlob () {
-  if (!_glob) {
-    _glob = require('glob').glob
-  }
-  return _glob
-}
-
-// used to be npm-normalize-package-bin
-function normalizePackageBin (pkg, changes) {
-  if (pkg.bin) {
-    if (typeof pkg.bin === 'string' && pkg.name) {
-      changes?.push('"bin" was converted to an object')
-      pkg.bin = { [pkg.name]: pkg.bin }
-    } else if (Array.isArray(pkg.bin)) {
-      changes?.push('"bin" was converted to an object')
-      pkg.bin = pkg.bin.reduce((acc, k) => {
-        acc[path.basename(k)] = k
-        return acc
-      }, {})
-    }
-    if (typeof pkg.bin === 'object') {
-      for (const binKey in pkg.bin) {
-        if (typeof pkg.bin[binKey] !== 'string') {
-          delete pkg.bin[binKey]
-          changes?.push(`removed invalid "bin[${binKey}]"`)
-          continue
-        }
-        const base = path.basename(secureAndUnixifyPath(binKey))
-        if (!base) {
-          delete pkg.bin[binKey]
-          changes?.push(`removed invalid "bin[${binKey}]"`)
-          continue
-        }
-
-        const binTarget = secureAndUnixifyPath(pkg.bin[binKey])
-
-        if (!binTarget) {
-          delete pkg.bin[binKey]
-          changes?.push(`removed invalid "bin[${binKey}]"`)
-          continue
-        }
-
-        if (base !== binKey) {
-          delete pkg.bin[binKey]
-          changes?.push(`"bin[${binKey}]" was renamed to "bin[${base}]"`)
-        }
-        if (binTarget !== pkg.bin[binKey]) {
-          changes?.push(`"bin[${base}]" script name was cleaned`)
-        }
-        pkg.bin[base] = binTarget
-      }
-
-      if (Object.keys(pkg.bin).length === 0) {
-        changes?.push('empty "bin" was removed')
-        delete pkg.bin
-      }
-
-      return pkg
-    }
-  }
-  delete pkg.bin
-}
-
-function normalizePackageMan (pkg, changes) {
-  if (pkg.man) {
-    const mans = []
-    for (const man of (Array.isArray(pkg.man) ? pkg.man : [pkg.man])) {
-      if (typeof man !== 'string') {
-        changes?.push(`removed invalid "man [${man}]"`)
-      } else {
-        mans.push(secureAndUnixifyPath(man))
-      }
-    }
-
-    if (!mans.length) {
-      changes?.push('empty "man" was removed')
-    } else {
-      pkg.man = mans
-      return pkg
-    }
-  }
-  delete pkg.man
-}
-
-function isCorrectlyEncodedName (spec) {
-  return !spec.match(/[/@\s+%:]/) &&
-    spec === encodeURIComponent(spec)
-}
-
-function isValidScopedPackageName (spec) {
-  if (spec.charAt(0) !== '@') {
-    return false
-  }
-
-  const rest = spec.slice(1).split('/')
-  if (rest.length !== 2) {
-    return false
-  }
-
-  return rest[0] && rest[1] &&
-    rest[0] === encodeURIComponent(rest[0]) &&
-    rest[1] === encodeURIComponent(rest[1])
-}
-
-function unixifyPath (ref) {
-  return ref.replace(/\\|:/g, '/')
-}
-
-function secureAndUnixifyPath (ref) {
-  const secured = unixifyPath(path.join('.', path.join('/', unixifyPath(ref))))
-  return secured.startsWith('./') ? '' : secured
-}
-
-// We don't want the `changes` array in here by default because this is a hot
-// path for parsing packuments during install.  So the calling method passes it
-// in if it wants to track changes.
-const normalize = async (pkg, { strict, steps, root, changes, allowLegacyCase }) => {
-  if (!pkg.content) {
-    throw new Error('Can not normalize without content')
-  }
-  const data = pkg.content
-  const scripts = data.scripts || {}
-  const pkgId = `${data.name ?? ''}@${data.version ?? ''}`
-
-  // name and version are load bearing so we have to clean them up first
-  if (steps.includes('fixName') || steps.includes('fixNameField') || steps.includes('normalizeData')) {
-    if (!data.name && !strict) {
-      changes?.push('Missing "name" field was set to an empty string')
-      data.name = ''
-    } else {
-      if (typeof data.name !== 'string') {
-        throw new Error('name field must be a string.')
-      }
-      if (!strict) {
-        const name = data.name.trim()
-        if (data.name !== name) {
-          changes?.push(`Whitespace was trimmed from "name"`)
-          data.name = name
-        }
-      }
-
-      if (data.name.startsWith('.') ||
-        !(isValidScopedPackageName(data.name) || isCorrectlyEncodedName(data.name)) ||
-        (strict && (!allowLegacyCase) && data.name !== data.name.toLowerCase()) ||
-        data.name.toLowerCase() === 'node_modules' ||
-        data.name.toLowerCase() === 'favicon.ico') {
-        throw new Error('Invalid name: ' + JSON.stringify(data.name))
-      }
-    }
-  }
-
-  if (steps.includes('fixName')) {
-    // Check for conflicts with builtin modules
-    if (moduleBuiltin.builtinModules.includes(data.name)) {
-      log.warn('package-json', pkgId, `Package name "${data.name}" conflicts with a Node.js built-in module name`)
-    }
-  }
-
-  if (steps.includes('fixVersionField') || steps.includes('normalizeData')) {
-    // allow "loose" semver 1.0 versions in non-strict mode
-    // enforce strict semver 2.0 compliance in strict mode
-    const loose = !strict
-    if (!data.version) {
-      data.version = ''
-    } else {
-      if (!valid(data.version, loose)) {
-        throw new Error(`Invalid version: "${data.version}"`)
-      }
-      const version = clean(data.version, loose)
-      if (version !== data.version) {
-        changes?.push(`"version" was cleaned and set to "${version}"`)
-        data.version = version
-      }
-    }
-  }
-  // remove attributes that start with "_"
-  if (steps.includes('_attributes')) {
-    for (const key in data) {
-      if (key.startsWith('_')) {
-        changes?.push(`"${key}" was removed`)
-        delete pkg.content[key]
-      }
-    }
-  }
-
-  // build the "_id" attribute
-  if (steps.includes('_id')) {
-    if (data.name && data.version) {
-      changes?.push(`"_id" was set to ${pkgId}`)
-      data._id = pkgId
-    }
-  }
-
-  // fix bundledDependencies typo
-  // normalize bundleDependencies
-  if (steps.includes('bundledDependencies')) {
-    if (data.bundleDependencies === undefined && data.bundledDependencies !== undefined) {
-      data.bundleDependencies = data.bundledDependencies
-    }
-    changes?.push(`Deleted incorrect "bundledDependencies"`)
-    delete data.bundledDependencies
-  }
-  // expand "bundleDependencies: true or translate from object"
-  if (steps.includes('bundleDependencies')) {
-    const bd = data.bundleDependencies
-    if (bd === false && !steps.includes('bundleDependenciesDeleteFalse')) {
-      changes?.push(`"bundleDependencies" was changed from "false" to "[]"`)
-      data.bundleDependencies = []
-    } else if (bd === true) {
-      changes?.push(`"bundleDependencies" was auto-populated from "dependencies"`)
-      data.bundleDependencies = Object.keys(data.dependencies || {})
-    } else if (bd && typeof bd === 'object') {
-      if (!Array.isArray(bd)) {
-        changes?.push(`"bundleDependencies" was changed from an object to an array`)
-        data.bundleDependencies = Object.keys(bd)
-      }
-    } else if ('bundleDependencies' in data) {
-      changes?.push(`"bundleDependencies" was removed`)
-      delete data.bundleDependencies
-    }
-  }
-
-  // it was once common practice to list deps both in optionalDependencies and
-  // in dependencies, to support npm versions that did not know about
-  // optionalDependencies.  This is no longer a relevant need, so duplicating
-  // the deps in two places is unnecessary and excessive.
-  if (steps.includes('optionalDedupe')) {
-    if (data.dependencies &&
-      data.optionalDependencies && typeof data.optionalDependencies === 'object') {
-      for (const name in data.optionalDependencies) {
-        changes?.push(`optionalDependencies."${name}" was removed`)
-        delete data.dependencies[name]
-      }
-      if (!Object.keys(data.dependencies).length) {
-        changes?.push(`Empty "optionalDependencies" was removed`)
-        delete data.dependencies
-      }
-    }
-  }
-
-  // add "install" attribute if any "*.gyp" files exist
-  if (steps.includes('gypfile')) {
-    if (!scripts.install && !scripts.preinstall && data.gypfile !== false) {
-      const files = await lazyLoadGlob()('*.gyp', { cwd: pkg.path })
-      if (files.length) {
-        scripts.install = 'node-gyp rebuild'
-        data.scripts = scripts
-        data.gypfile = true
-        changes?.push(`"scripts.install" was set to "node-gyp rebuild"`)
-        changes?.push(`"gypfile" was set to "true"`)
-      }
-    }
-  }
-
-  // add "start" attribute if "server.js" exists
-  if (steps.includes('serverjs') && !scripts.start) {
-    try {
-      await fs.access(path.join(pkg.path, 'server.js'))
-      scripts.start = 'node server.js'
-      data.scripts = scripts
-      changes?.push('"scripts.start" was set to "node server.js"')
-    } catch {
-      // do nothing
-    }
-  }
-
-  // strip "node_modules/.bin" from scripts entries
-  // remove invalid scripts entries (non-strings)
-  if ((steps.includes('scripts') || steps.includes('scriptpath')) && data.scripts !== undefined) {
-    const spre = /^(\.[/\\])?node_modules[/\\].bin[\\/]/
-    if (typeof data.scripts === 'object') {
-      for (const name in data.scripts) {
-        if (typeof data.scripts[name] !== 'string') {
-          delete data.scripts[name]
-          changes?.push(`Invalid scripts."${name}" was removed`)
-        } else if (steps.includes('scriptpath') && spre.test(data.scripts[name])) {
-          data.scripts[name] = data.scripts[name].replace(spre, '')
-          changes?.push(`scripts entry "${name}" was fixed to remove node_modules/.bin reference`)
-        }
-      }
-    } else {
-      changes?.push(`Removed invalid "scripts"`)
-      delete data.scripts
-    }
-  }
-
-  if (steps.includes('funding')) {
-    if (data.funding && typeof data.funding === 'string') {
-      data.funding = { url: data.funding }
-      changes?.push(`"funding" was changed to an object with a url attribute`)
-    }
-  }
-
-  // populate "authors" attribute
-  if (steps.includes('authors') && !data.contributors) {
-    try {
-      const authorData = await fs.readFile(path.join(pkg.path, 'AUTHORS'), 'utf8')
-      const authors = authorData.split(/\r?\n/g)
-        .map(line => line.replace(/^\s*#.*$/, '').trim())
-        .filter(line => line)
-      data.contributors = authors
-      changes?.push('"contributors" was auto-populated with the contents of the "AUTHORS" file')
-    } catch {
-      // do nothing
-    }
-  }
-
-  // populate "readme" attribute
-  if (steps.includes('readme') && !data.readme) {
-    const mdre = /\.m?a?r?k?d?o?w?n?$/i
-    const files = await lazyLoadGlob()('{README,README.*}', {
-      cwd: pkg.path,
-      nocase: true,
-      mark: true,
-    })
-    let readmeFile
-    for (const file of files) {
-      // don't accept directories.
-      if (!file.endsWith(path.sep)) {
-        if (file.match(mdre)) {
-          readmeFile = file
-          break
-        }
-        if (file.endsWith('README')) {
-          readmeFile = file
-        }
-      }
-    }
-    if (readmeFile) {
-      const readmeData = await fs.readFile(path.join(pkg.path, readmeFile), 'utf8')
-      data.readme = readmeData
-      data.readmeFilename = readmeFile
-      changes?.push(`"readme" was set to the contents of ${readmeFile}`)
-      changes?.push(`"readmeFilename" was set to ${readmeFile}`)
-    }
-    if (!data.readme) {
-      data.readme = 'ERROR: No README data found!'
-    }
-  }
-
-  // expand directories.man
-  if (steps.includes('mans')) {
-    if (data.directories?.man && !data.man) {
-      const manDir = secureAndUnixifyPath(data.directories.man)
-      const cwd = path.resolve(pkg.path, manDir)
-      const files = await lazyLoadGlob()('**/*.[0-9]', { cwd })
-      data.man = files.map(man =>
-        path.relative(pkg.path, path.join(cwd, man)).split(path.sep).join('/')
-      )
-    }
-    normalizePackageMan(data, changes)
-  }
-
-  if (steps.includes('bin') || steps.includes('binDir') || steps.includes('binRefs')) {
-    normalizePackageBin(data, changes)
-  }
-
-  // expand "directories.bin"
-  if (steps.includes('binDir') && data.directories?.bin && !data.bin) {
-    const binsDir = path.resolve(pkg.path, secureAndUnixifyPath(data.directories.bin))
-    const bins = await lazyLoadGlob()('**', { cwd: binsDir })
-    data.bin = bins.reduce((acc, binFile) => {
-      if (binFile && !binFile.startsWith('.')) {
-        const binName = path.basename(binFile)
-        acc[binName] = path.join(data.directories.bin, binFile)
-      }
-      return acc
-    }, {})
-    // *sigh*
-    normalizePackageBin(data, changes)
-  }
-
-  // populate "gitHead" attribute
-  if (steps.includes('gitHead') && !data.gitHead) {
-    const git = require('@npmcli/git')
-    const gitRoot = await git.find({ cwd: pkg.path, root })
-    let head
-    if (gitRoot) {
-      try {
-        head = await fs.readFile(path.resolve(gitRoot, '.git/HEAD'), 'utf8')
-      } catch (err) {
-      // do nothing
-      }
-    }
-    let headData
-    if (head) {
-      if (head.startsWith('ref: ')) {
-        const headRef = head.replace(/^ref: /, '').trim()
-        const headFile = path.resolve(gitRoot, '.git', headRef)
-        try {
-          headData = await fs.readFile(headFile, 'utf8')
-          headData = headData.replace(/^ref: /, '').trim()
-        } catch (err) {
-          // do nothing
-        }
-        if (!headData) {
-          const packFile = path.resolve(gitRoot, '.git/packed-refs')
-          try {
-            let refs = await fs.readFile(packFile, 'utf8')
-            if (refs) {
-              refs = refs.split('\n')
-              for (let i = 0; i < refs.length; i++) {
-                const match = refs[i].match(/^([0-9a-f]{40}) (.+)$/)
-                if (match && match[2].trim() === headRef) {
-                  headData = match[1]
-                  break
-                }
-              }
-            }
-          } catch {
-            // do nothing
-          }
-        }
-      } else {
-        headData = head.trim()
-      }
-    }
-    if (headData) {
-      data.gitHead = headData
-    }
-  }
-
-  // populate "types" attribute
-  if (steps.includes('fillTypes')) {
-    const index = data.main || 'index.js'
-
-    if (typeof index !== 'string') {
-      throw new TypeError('The "main" attribute must be of type string.')
-    }
-
-    // TODO exports is much more complicated than this in verbose format
-    // We need to support for instance
-
-    // "exports": {
-    //   ".": [
-    //     {
-    //       "default": "./lib/npm.js"
-    //     },
-    //     "./lib/npm.js"
-    //   ],
-    //   "./package.json": "./package.json"
-    // },
-    // as well as conditional exports
-
-    // if (data.exports && typeof data.exports === 'string') {
-    //   index = data.exports
-    // }
-
-    // if (data.exports && data.exports['.']) {
-    //   index = data.exports['.']
-    //   if (typeof index !== 'string') {
-    //   }
-    // }
-    const extless = path.join(path.dirname(index), path.basename(index, path.extname(index)))
-    const dts = `./${extless}.d.ts`
-    const hasDTSFields = 'types' in data || 'typings' in data
-    if (!hasDTSFields) {
-      try {
-        await fs.access(path.join(pkg.path, dts))
-        data.types = dts.split(path.sep).join('/')
-      } catch {
-        // do nothing
-      }
-    }
-  }
-
-  // "normalizeData" from "read-package-json", which was just a call through to
-  // "normalize-package-data".  We only call the "fixer" functions because
-  // outside of that it was also clobbering _id (which we already conditionally
-  // do) and also adding the gypfile script (which we also already
-  // conditionally do)
-
-  // Some steps are isolated so we can do a limited subset of these in `fix`
-  if (steps.includes('fixRepositoryField') || steps.includes('normalizeData')) {
-    if (data.repositories) {
-      changes?.push(`"repository" was set to the first entry in "repositories" (${data.repository})`)
-      data.repository = data.repositories[0]
-    }
-    if (data.repository) {
-      if (typeof data.repository === 'string') {
-        changes?.push('"repository" was changed from a string to an object')
-        data.repository = {
-          type: 'git',
-          url: data.repository,
-        }
-      }
-      if (data.repository.url) {
-        const hosted = lazyHostedGitInfo().fromUrl(data.repository.url)
-        let r
-        if (hosted) {
-          if (hosted.getDefaultRepresentation() === 'shortcut') {
-            r = hosted.https()
-          } else {
-            r = hosted.toString()
-          }
-          if (r !== data.repository.url) {
-            changes?.push(`"repository.url" was normalized to "${r}"`)
-            data.repository.url = r
-          }
-        }
-      }
-    }
-  }
-
-  if (steps.includes('fixDependencies') || steps.includes('normalizeData')) {
-    // peerDependencies?
-    // devDependencies is meaningless here, it's ignored on an installed package
-    for (const type of ['dependencies', 'devDependencies', 'optionalDependencies']) {
-      if (data[type]) {
-        let secondWarning = true
-        if (typeof data[type] === 'string') {
-          changes?.push(`"${type}" was converted from a string into an object`)
-          data[type] = data[type].trim().split(/[\n\r\s\t ,]+/)
-          secondWarning = false
-        }
-        if (Array.isArray(data[type])) {
-          if (secondWarning) {
-            changes?.push(`"${type}" was converted from an array into an object`)
-          }
-          const o = {}
-          for (const d of data[type]) {
-            if (typeof d === 'string') {
-              const dep = d.trim().split(/(:?[@\s><=])/)
-              const dn = dep.shift()
-              const dv = dep.join('').replace(/^@/, '').trim()
-              o[dn] = dv
-            }
-          }
-          data[type] = o
-        }
-      }
-    }
-    // normalize-package-data used to put optional dependencies BACK into
-    // dependencies here, we no longer do this
-
-    for (const deps of ['dependencies', 'devDependencies']) {
-      if (deps in data) {
-        if (!data[deps] || typeof data[deps] !== 'object') {
-          changes?.push(`Removed invalid "${deps}"`)
-          delete data[deps]
-        } else {
-          for (const d in data[deps]) {
-            const r = data[deps][d]
-            if (typeof r !== 'string') {
-              changes?.push(`Removed invalid "${deps}.${d}"`)
-              delete data[deps][d]
-            }
-            const hosted = lazyHostedGitInfo().fromUrl(data[deps][d])?.toString()
-            if (hosted && hosted !== data[deps][d]) {
-              changes?.push(`Normalized git reference to "${deps}.${d}"`)
-              data[deps][d] = hosted.toString()
-            }
-          }
-        }
-      }
-    }
-  }
-
-  // TODO some of this is duplicated in other steps here, a future breaking change may be able to remove the duplicates involved in this step
-  if (steps.includes('normalizeData')) {
-    const { normalizeData } = require('./normalize-data.js')
-    normalizeData(data, changes)
-  }
-
-  // Warn if the bin references don't point to anything.  This might be better
-  // in normalize-package-data if it had access to the file path.
-  if (steps.includes('binRefs') && data.bin instanceof Object) {
-    for (const key in data.bin) {
-      try {
-        await fs.access(path.resolve(pkg.path, data.bin[key]))
-      } catch {
-        log.warn('package-json', pkgId, `No bin file found at ${data.bin[key]}`)
-        // XXX: should a future breaking change delete bin entries that cannot be accessed?
-      }
-    }
-  }
-}
-
-module.exports = normalize
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/read-package.js b/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/read-package.js
deleted file mode 100644
index d6c86ce388e6c..0000000000000
--- a/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/read-package.js
+++ /dev/null
@@ -1,39 +0,0 @@
-// This is JUST the code needed to open a package.json file and parse it.
-// It's isolated out so that code needing to parse a package.json file can do so in the same way as this module does, without needing to require the whole module, or needing to require the underlying parsing library.
-
-const { readFile } = require('fs/promises')
-const parseJSON = require('json-parse-even-better-errors')
-
-async function read (filename) {
-  try {
-    const data = await readFile(filename, 'utf8')
-    return data
-  } catch (err) {
-    err.message = `Could not read package.json: ${err}`
-    throw err
-  }
-}
-
-function parse (data) {
-  try {
-    const content = parseJSON(data)
-    return content
-  } catch (err) {
-    err.message = `Invalid package.json: ${err}`
-    throw err
-  }
-}
-
-// This is what most external libs will use.
-// PackageJson will call read and parse separately
-async function readPackage (filename) {
-  const data = await read(filename)
-  const content = parse(data)
-  return content
-}
-
-module.exports = {
-  read,
-  parse,
-  readPackage,
-}
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/sort.js b/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/sort.js
deleted file mode 100644
index 0bd0d5199da58..0000000000000
--- a/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/sort.js
+++ /dev/null
@@ -1,101 +0,0 @@
-/**
- * arbitrary sort order for package.json largely pulled from:
- * https://github.com/keithamus/sort-package-json/blob/main/defaultRules.md
- *
- * cross checked with:
- * https://github.com/npm/types/blob/main/types/index.d.ts#L104
- * https://docs.npmjs.com/cli/configuring-npm/package-json
- */
-function packageSort (json) {
-  const {
-    name,
-    version,
-    private: isPrivate,
-    description,
-    keywords,
-    homepage,
-    bugs,
-    repository,
-    funding,
-    license,
-    author,
-    maintainers,
-    contributors,
-    type,
-    imports,
-    exports,
-    main,
-    browser,
-    types,
-    bin,
-    man,
-    directories,
-    files,
-    workspaces,
-    scripts,
-    config,
-    dependencies,
-    devDependencies,
-    peerDependencies,
-    peerDependenciesMeta,
-    optionalDependencies,
-    bundledDependencies,
-    bundleDependencies,
-    engines,
-    os,
-    cpu,
-    publishConfig,
-    devEngines,
-    licenses,
-    overrides,
-    ...rest
-  } = json
-
-  return {
-    ...(typeof name !== 'undefined' ? { name } : {}),
-    ...(typeof version !== 'undefined' ? { version } : {}),
-    ...(typeof isPrivate !== 'undefined' ? { private: isPrivate } : {}),
-    ...(typeof description !== 'undefined' ? { description } : {}),
-    ...(typeof keywords !== 'undefined' ? { keywords } : {}),
-    ...(typeof homepage !== 'undefined' ? { homepage } : {}),
-    ...(typeof bugs !== 'undefined' ? { bugs } : {}),
-    ...(typeof repository !== 'undefined' ? { repository } : {}),
-    ...(typeof funding !== 'undefined' ? { funding } : {}),
-    ...(typeof license !== 'undefined' ? { license } : {}),
-    ...(typeof author !== 'undefined' ? { author } : {}),
-    ...(typeof maintainers !== 'undefined' ? { maintainers } : {}),
-    ...(typeof contributors !== 'undefined' ? { contributors } : {}),
-    ...(typeof type !== 'undefined' ? { type } : {}),
-    ...(typeof imports !== 'undefined' ? { imports } : {}),
-    ...(typeof exports !== 'undefined' ? { exports } : {}),
-    ...(typeof main !== 'undefined' ? { main } : {}),
-    ...(typeof browser !== 'undefined' ? { browser } : {}),
-    ...(typeof types !== 'undefined' ? { types } : {}),
-    ...(typeof bin !== 'undefined' ? { bin } : {}),
-    ...(typeof man !== 'undefined' ? { man } : {}),
-    ...(typeof directories !== 'undefined' ? { directories } : {}),
-    ...(typeof files !== 'undefined' ? { files } : {}),
-    ...(typeof workspaces !== 'undefined' ? { workspaces } : {}),
-    ...(typeof scripts !== 'undefined' ? { scripts } : {}),
-    ...(typeof config !== 'undefined' ? { config } : {}),
-    ...(typeof dependencies !== 'undefined' ? { dependencies } : {}),
-    ...(typeof devDependencies !== 'undefined' ? { devDependencies } : {}),
-    ...(typeof peerDependencies !== 'undefined' ? { peerDependencies } : {}),
-    ...(typeof peerDependenciesMeta !== 'undefined' ? { peerDependenciesMeta } : {}),
-    ...(typeof optionalDependencies !== 'undefined' ? { optionalDependencies } : {}),
-    ...(typeof bundledDependencies !== 'undefined' ? { bundledDependencies } : {}),
-    ...(typeof bundleDependencies !== 'undefined' ? { bundleDependencies } : {}),
-    ...(typeof engines !== 'undefined' ? { engines } : {}),
-    ...(typeof os !== 'undefined' ? { os } : {}),
-    ...(typeof cpu !== 'undefined' ? { cpu } : {}),
-    ...(typeof publishConfig !== 'undefined' ? { publishConfig } : {}),
-    ...(typeof devEngines !== 'undefined' ? { devEngines } : {}),
-    ...(typeof licenses !== 'undefined' ? { licenses } : {}),
-    ...(typeof overrides !== 'undefined' ? { overrides } : {}),
-    ...rest,
-  }
-}
-
-module.exports = {
-  packageSort,
-}
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/update-dependencies.js b/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/update-dependencies.js
deleted file mode 100644
index 7259949ab661d..0000000000000
--- a/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/update-dependencies.js
+++ /dev/null
@@ -1,75 +0,0 @@
-const depTypes = new Set([
-  'dependencies',
-  'optionalDependencies',
-  'devDependencies',
-  'peerDependencies',
-])
-
-// sort alphabetically all types of deps for a given package
-const orderDeps = (content) => {
-  for (const type of depTypes) {
-    if (content && content[type]) {
-      content[type] = Object.keys(content[type])
-        .sort((a, b) => a.localeCompare(b, 'en'))
-        .reduce((res, key) => {
-          res[key] = content[type][key]
-          return res
-        }, {})
-    }
-  }
-  return content
-}
-
-const updateDependencies = ({ content, originalContent }) => {
-  const pkg = orderDeps({
-    ...content,
-  })
-
-  // optionalDependencies don't need to be repeated in two places
-  if (pkg.dependencies) {
-    if (pkg.optionalDependencies) {
-      for (const name of Object.keys(pkg.optionalDependencies)) {
-        delete pkg.dependencies[name]
-      }
-    }
-  }
-
-  const result = { ...originalContent }
-
-  // loop through all types of dependencies and update package json pkg
-  for (const type of depTypes) {
-    if (pkg[type]) {
-      result[type] = pkg[type]
-    }
-
-    // prune empty type props from resulting object
-    const emptyDepType =
-      pkg[type]
-      && typeof pkg === 'object'
-      && Object.keys(pkg[type]).length === 0
-    if (emptyDepType) {
-      delete result[type]
-    }
-  }
-
-  // if original package.json had dep in peerDeps AND deps, preserve that.
-  const { dependencies: origProd, peerDependencies: origPeer } =
-    originalContent || {}
-  const { peerDependencies: newPeer } = result
-  if (origProd && origPeer && newPeer) {
-    // we have original prod/peer deps, and new peer deps
-    // copy over any that were in both in the original
-    for (const name of Object.keys(origPeer)) {
-      if (origProd[name] !== undefined && newPeer[name] !== undefined) {
-        result.dependencies = result.dependencies || {}
-        result.dependencies[name] = newPeer[name]
-      }
-    }
-  }
-
-  return result
-}
-
-updateDependencies.knownKeys = depTypes
-
-module.exports = updateDependencies
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/update-scripts.js b/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/update-scripts.js
deleted file mode 100644
index 30495e54cc3c7..0000000000000
--- a/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/update-scripts.js
+++ /dev/null
@@ -1,29 +0,0 @@
-const updateScripts = ({ content, originalContent = {} }) => {
-  const newScripts = content.scripts
-
-  if (!newScripts) {
-    return originalContent
-  }
-
-  // validate scripts content being appended
-  const hasInvalidScripts = () =>
-    Object.entries(newScripts)
-      .some(([key, value]) =>
-        typeof key !== 'string' || typeof value !== 'string')
-  if (hasInvalidScripts()) {
-    throw Object.assign(
-      new TypeError(
-        'package.json scripts should be a key-value pair of strings.'),
-      { code: 'ESCRIPTSINVALID' }
-    )
-  }
-
-  return {
-    ...originalContent,
-    scripts: {
-      ...newScripts,
-    },
-  }
-}
-
-module.exports = updateScripts
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/update-workspaces.js b/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/update-workspaces.js
deleted file mode 100644
index 04bf63230636f..0000000000000
--- a/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/lib/update-workspaces.js
+++ /dev/null
@@ -1,26 +0,0 @@
-const updateWorkspaces = ({ content, originalContent = {} }) => {
-  const newWorkspaces = content.workspaces
-
-  if (!newWorkspaces) {
-    return originalContent
-  }
-
-  // validate workspaces content being appended
-  const hasInvalidWorkspaces = () =>
-    newWorkspaces.some(w => !(typeof w === 'string'))
-  if (!newWorkspaces.length || hasInvalidWorkspaces()) {
-    throw Object.assign(
-      new TypeError('workspaces should be an array of strings.'),
-      { code: 'EWORKSPACESINVALID' }
-    )
-  }
-
-  return {
-    ...originalContent,
-    workspaces: [
-      ...newWorkspaces,
-    ],
-  }
-}
-
-module.exports = updateWorkspaces
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/package.json b/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/package.json
deleted file mode 100644
index 263d67ff3bc5b..0000000000000
--- a/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json/package.json
+++ /dev/null
@@ -1,61 +0,0 @@
-{
-  "name": "@npmcli/package-json",
-  "version": "6.2.0",
-  "description": "Programmatic API to update package.json",
-  "keywords": [
-    "npm",
-    "oss"
-  ],
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/npm/package-json.git"
-  },
-  "license": "ISC",
-  "author": "GitHub Inc.",
-  "main": "lib/index.js",
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "scripts": {
-    "snap": "tap",
-    "test": "tap",
-    "lint": "npm run eslint",
-    "lintfix": "npm run eslint -- --fix",
-    "posttest": "npm run lint",
-    "postsnap": "npm run lintfix --",
-    "postlint": "template-oss-check",
-    "template-oss-apply": "template-oss-apply --force",
-    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
-  },
-  "dependencies": {
-    "@npmcli/git": "^6.0.0",
-    "glob": "^10.2.2",
-    "hosted-git-info": "^8.0.0",
-    "json-parse-even-better-errors": "^4.0.0",
-    "proc-log": "^5.0.0",
-    "semver": "^7.5.3",
-    "validate-npm-package-license": "^3.0.4"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^5.1.0",
-    "@npmcli/template-oss": "4.23.6",
-    "read-package-json": "^7.0.0",
-    "read-package-json-fast": "^4.0.0",
-    "tap": "^16.0.1"
-  },
-  "engines": {
-    "node": "^18.17.0 || >=20.5.0"
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.23.6",
-    "publish": "true"
-  },
-  "tap": {
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  }
-}
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/LICENSE b/node_modules/@npmcli/map-workspaces/node_modules/glob/LICENSE
new file mode 100644
index 0000000000000..ec7df93329abf
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/glob/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) 2009-2023 Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/glob.js b/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/glob.js
new file mode 100644
index 0000000000000..e1339bbbcf57f
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/glob.js
@@ -0,0 +1,247 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Glob = void 0;
+const minimatch_1 = require("minimatch");
+const node_url_1 = require("node:url");
+const path_scurry_1 = require("path-scurry");
+const pattern_js_1 = require("./pattern.js");
+const walker_js_1 = require("./walker.js");
+// if no process global, just call it linux.
+// so we default to case-sensitive, / separators
+const defaultPlatform = (typeof process === 'object' &&
+    process &&
+    typeof process.platform === 'string') ?
+    process.platform
+    : 'linux';
+/**
+ * An object that can perform glob pattern traversals.
+ */
+class Glob {
+    absolute;
+    cwd;
+    root;
+    dot;
+    dotRelative;
+    follow;
+    ignore;
+    magicalBraces;
+    mark;
+    matchBase;
+    maxDepth;
+    nobrace;
+    nocase;
+    nodir;
+    noext;
+    noglobstar;
+    pattern;
+    platform;
+    realpath;
+    scurry;
+    stat;
+    signal;
+    windowsPathsNoEscape;
+    withFileTypes;
+    includeChildMatches;
+    /**
+     * The options provided to the constructor.
+     */
+    opts;
+    /**
+     * An array of parsed immutable {@link Pattern} objects.
+     */
+    patterns;
+    /**
+     * All options are stored as properties on the `Glob` object.
+     *
+     * See {@link GlobOptions} for full options descriptions.
+     *
+     * Note that a previous `Glob` object can be passed as the
+     * `GlobOptions` to another `Glob` instantiation to re-use settings
+     * and caches with a new pattern.
+     *
+     * Traversal functions can be called multiple times to run the walk
+     * again.
+     */
+    constructor(pattern, opts) {
+        /* c8 ignore start */
+        if (!opts)
+            throw new TypeError('glob options required');
+        /* c8 ignore stop */
+        this.withFileTypes = !!opts.withFileTypes;
+        this.signal = opts.signal;
+        this.follow = !!opts.follow;
+        this.dot = !!opts.dot;
+        this.dotRelative = !!opts.dotRelative;
+        this.nodir = !!opts.nodir;
+        this.mark = !!opts.mark;
+        if (!opts.cwd) {
+            this.cwd = '';
+        }
+        else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) {
+            opts.cwd = (0, node_url_1.fileURLToPath)(opts.cwd);
+        }
+        this.cwd = opts.cwd || '';
+        this.root = opts.root;
+        this.magicalBraces = !!opts.magicalBraces;
+        this.nobrace = !!opts.nobrace;
+        this.noext = !!opts.noext;
+        this.realpath = !!opts.realpath;
+        this.absolute = opts.absolute;
+        this.includeChildMatches = opts.includeChildMatches !== false;
+        this.noglobstar = !!opts.noglobstar;
+        this.matchBase = !!opts.matchBase;
+        this.maxDepth =
+            typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity;
+        this.stat = !!opts.stat;
+        this.ignore = opts.ignore;
+        if (this.withFileTypes && this.absolute !== undefined) {
+            throw new Error('cannot set absolute and withFileTypes:true');
+        }
+        if (typeof pattern === 'string') {
+            pattern = [pattern];
+        }
+        this.windowsPathsNoEscape =
+            !!opts.windowsPathsNoEscape ||
+                opts.allowWindowsEscape ===
+                    false;
+        if (this.windowsPathsNoEscape) {
+            pattern = pattern.map(p => p.replace(/\\/g, '/'));
+        }
+        if (this.matchBase) {
+            if (opts.noglobstar) {
+                throw new TypeError('base matching requires globstar');
+            }
+            pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`));
+        }
+        this.pattern = pattern;
+        this.platform = opts.platform || defaultPlatform;
+        this.opts = { ...opts, platform: this.platform };
+        if (opts.scurry) {
+            this.scurry = opts.scurry;
+            if (opts.nocase !== undefined &&
+                opts.nocase !== opts.scurry.nocase) {
+                throw new Error('nocase option contradicts provided scurry option');
+            }
+        }
+        else {
+            const Scurry = opts.platform === 'win32' ? path_scurry_1.PathScurryWin32
+                : opts.platform === 'darwin' ? path_scurry_1.PathScurryDarwin
+                    : opts.platform ? path_scurry_1.PathScurryPosix
+                        : path_scurry_1.PathScurry;
+            this.scurry = new Scurry(this.cwd, {
+                nocase: opts.nocase,
+                fs: opts.fs,
+            });
+        }
+        this.nocase = this.scurry.nocase;
+        // If you do nocase:true on a case-sensitive file system, then
+        // we need to use regexps instead of strings for non-magic
+        // path portions, because statting `aBc` won't return results
+        // for the file `AbC` for example.
+        const nocaseMagicOnly = this.platform === 'darwin' || this.platform === 'win32';
+        const mmo = {
+            // default nocase based on platform
+            ...opts,
+            dot: this.dot,
+            matchBase: this.matchBase,
+            nobrace: this.nobrace,
+            nocase: this.nocase,
+            nocaseMagicOnly,
+            nocomment: true,
+            noext: this.noext,
+            nonegate: true,
+            optimizationLevel: 2,
+            platform: this.platform,
+            windowsPathsNoEscape: this.windowsPathsNoEscape,
+            debug: !!this.opts.debug,
+        };
+        const mms = this.pattern.map(p => new minimatch_1.Minimatch(p, mmo));
+        const [matchSet, globParts] = mms.reduce((set, m) => {
+            set[0].push(...m.set);
+            set[1].push(...m.globParts);
+            return set;
+        }, [[], []]);
+        this.patterns = matchSet.map((set, i) => {
+            const g = globParts[i];
+            /* c8 ignore start */
+            if (!g)
+                throw new Error('invalid pattern object');
+            /* c8 ignore stop */
+            return new pattern_js_1.Pattern(set, g, 0, this.platform);
+        });
+    }
+    async walk() {
+        // Walkers always return array of Path objects, so we just have to
+        // coerce them into the right shape.  It will have already called
+        // realpath() if the option was set to do so, so we know that's cached.
+        // start out knowing the cwd, at least
+        return [
+            ...(await new walker_js_1.GlobWalker(this.patterns, this.scurry.cwd, {
+                ...this.opts,
+                maxDepth: this.maxDepth !== Infinity ?
+                    this.maxDepth + this.scurry.cwd.depth()
+                    : Infinity,
+                platform: this.platform,
+                nocase: this.nocase,
+                includeChildMatches: this.includeChildMatches,
+            }).walk()),
+        ];
+    }
+    walkSync() {
+        return [
+            ...new walker_js_1.GlobWalker(this.patterns, this.scurry.cwd, {
+                ...this.opts,
+                maxDepth: this.maxDepth !== Infinity ?
+                    this.maxDepth + this.scurry.cwd.depth()
+                    : Infinity,
+                platform: this.platform,
+                nocase: this.nocase,
+                includeChildMatches: this.includeChildMatches,
+            }).walkSync(),
+        ];
+    }
+    stream() {
+        return new walker_js_1.GlobStream(this.patterns, this.scurry.cwd, {
+            ...this.opts,
+            maxDepth: this.maxDepth !== Infinity ?
+                this.maxDepth + this.scurry.cwd.depth()
+                : Infinity,
+            platform: this.platform,
+            nocase: this.nocase,
+            includeChildMatches: this.includeChildMatches,
+        }).stream();
+    }
+    streamSync() {
+        return new walker_js_1.GlobStream(this.patterns, this.scurry.cwd, {
+            ...this.opts,
+            maxDepth: this.maxDepth !== Infinity ?
+                this.maxDepth + this.scurry.cwd.depth()
+                : Infinity,
+            platform: this.platform,
+            nocase: this.nocase,
+            includeChildMatches: this.includeChildMatches,
+        }).streamSync();
+    }
+    /**
+     * Default sync iteration function. Returns a Generator that
+     * iterates over the results.
+     */
+    iterateSync() {
+        return this.streamSync()[Symbol.iterator]();
+    }
+    [Symbol.iterator]() {
+        return this.iterateSync();
+    }
+    /**
+     * Default async iteration function. Returns an AsyncGenerator that
+     * iterates over the results.
+     */
+    iterate() {
+        return this.stream()[Symbol.asyncIterator]();
+    }
+    [Symbol.asyncIterator]() {
+        return this.iterate();
+    }
+}
+exports.Glob = Glob;
+//# sourceMappingURL=glob.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/has-magic.js b/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/has-magic.js
new file mode 100644
index 0000000000000..0918bd57e0f1c
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/has-magic.js
@@ -0,0 +1,27 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.hasMagic = void 0;
+const minimatch_1 = require("minimatch");
+/**
+ * Return true if the patterns provided contain any magic glob characters,
+ * given the options provided.
+ *
+ * Brace expansion is not considered "magic" unless the `magicalBraces` option
+ * is set, as brace expansion just turns one string into an array of strings.
+ * So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and
+ * `'xby'` both do not contain any magic glob characters, and it's treated the
+ * same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true`
+ * is in the options, brace expansion _is_ treated as a pattern having magic.
+ */
+const hasMagic = (pattern, options = {}) => {
+    if (!Array.isArray(pattern)) {
+        pattern = [pattern];
+    }
+    for (const p of pattern) {
+        if (new minimatch_1.Minimatch(p, options).hasMagic())
+            return true;
+    }
+    return false;
+};
+exports.hasMagic = hasMagic;
+//# sourceMappingURL=has-magic.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/ignore.js b/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/ignore.js
new file mode 100644
index 0000000000000..5f1fde0680dea
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/ignore.js
@@ -0,0 +1,119 @@
+"use strict";
+// give it a pattern, and it'll be able to tell you if
+// a given path should be ignored.
+// Ignoring a path ignores its children if the pattern ends in /**
+// Ignores are always parsed in dot:true mode
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Ignore = void 0;
+const minimatch_1 = require("minimatch");
+const pattern_js_1 = require("./pattern.js");
+const defaultPlatform = (typeof process === 'object' &&
+    process &&
+    typeof process.platform === 'string') ?
+    process.platform
+    : 'linux';
+/**
+ * Class used to process ignored patterns
+ */
+class Ignore {
+    relative;
+    relativeChildren;
+    absolute;
+    absoluteChildren;
+    platform;
+    mmopts;
+    constructor(ignored, { nobrace, nocase, noext, noglobstar, platform = defaultPlatform, }) {
+        this.relative = [];
+        this.absolute = [];
+        this.relativeChildren = [];
+        this.absoluteChildren = [];
+        this.platform = platform;
+        this.mmopts = {
+            dot: true,
+            nobrace,
+            nocase,
+            noext,
+            noglobstar,
+            optimizationLevel: 2,
+            platform,
+            nocomment: true,
+            nonegate: true,
+        };
+        for (const ign of ignored)
+            this.add(ign);
+    }
+    add(ign) {
+        // this is a little weird, but it gives us a clean set of optimized
+        // minimatch matchers, without getting tripped up if one of them
+        // ends in /** inside a brace section, and it's only inefficient at
+        // the start of the walk, not along it.
+        // It'd be nice if the Pattern class just had a .test() method, but
+        // handling globstars is a bit of a pita, and that code already lives
+        // in minimatch anyway.
+        // Another way would be if maybe Minimatch could take its set/globParts
+        // as an option, and then we could at least just use Pattern to test
+        // for absolute-ness.
+        // Yet another way, Minimatch could take an array of glob strings, and
+        // a cwd option, and do the right thing.
+        const mm = new minimatch_1.Minimatch(ign, this.mmopts);
+        for (let i = 0; i < mm.set.length; i++) {
+            const parsed = mm.set[i];
+            const globParts = mm.globParts[i];
+            /* c8 ignore start */
+            if (!parsed || !globParts) {
+                throw new Error('invalid pattern object');
+            }
+            // strip off leading ./ portions
+            // https://github.com/isaacs/node-glob/issues/570
+            while (parsed[0] === '.' && globParts[0] === '.') {
+                parsed.shift();
+                globParts.shift();
+            }
+            /* c8 ignore stop */
+            const p = new pattern_js_1.Pattern(parsed, globParts, 0, this.platform);
+            const m = new minimatch_1.Minimatch(p.globString(), this.mmopts);
+            const children = globParts[globParts.length - 1] === '**';
+            const absolute = p.isAbsolute();
+            if (absolute)
+                this.absolute.push(m);
+            else
+                this.relative.push(m);
+            if (children) {
+                if (absolute)
+                    this.absoluteChildren.push(m);
+                else
+                    this.relativeChildren.push(m);
+            }
+        }
+    }
+    ignored(p) {
+        const fullpath = p.fullpath();
+        const fullpaths = `${fullpath}/`;
+        const relative = p.relative() || '.';
+        const relatives = `${relative}/`;
+        for (const m of this.relative) {
+            if (m.match(relative) || m.match(relatives))
+                return true;
+        }
+        for (const m of this.absolute) {
+            if (m.match(fullpath) || m.match(fullpaths))
+                return true;
+        }
+        return false;
+    }
+    childrenIgnored(p) {
+        const fullpath = p.fullpath() + '/';
+        const relative = (p.relative() || '.') + '/';
+        for (const m of this.relativeChildren) {
+            if (m.match(relative))
+                return true;
+        }
+        for (const m of this.absoluteChildren) {
+            if (m.match(fullpath))
+                return true;
+        }
+        return false;
+    }
+}
+exports.Ignore = Ignore;
+//# sourceMappingURL=ignore.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/index.js b/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/index.js
new file mode 100644
index 0000000000000..151495d170efa
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/index.js
@@ -0,0 +1,68 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.glob = exports.sync = exports.iterate = exports.iterateSync = exports.stream = exports.streamSync = exports.Ignore = exports.hasMagic = exports.Glob = exports.unescape = exports.escape = void 0;
+exports.globStreamSync = globStreamSync;
+exports.globStream = globStream;
+exports.globSync = globSync;
+exports.globIterateSync = globIterateSync;
+exports.globIterate = globIterate;
+const minimatch_1 = require("minimatch");
+const glob_js_1 = require("./glob.js");
+const has_magic_js_1 = require("./has-magic.js");
+var minimatch_2 = require("minimatch");
+Object.defineProperty(exports, "escape", { enumerable: true, get: function () { return minimatch_2.escape; } });
+Object.defineProperty(exports, "unescape", { enumerable: true, get: function () { return minimatch_2.unescape; } });
+var glob_js_2 = require("./glob.js");
+Object.defineProperty(exports, "Glob", { enumerable: true, get: function () { return glob_js_2.Glob; } });
+var has_magic_js_2 = require("./has-magic.js");
+Object.defineProperty(exports, "hasMagic", { enumerable: true, get: function () { return has_magic_js_2.hasMagic; } });
+var ignore_js_1 = require("./ignore.js");
+Object.defineProperty(exports, "Ignore", { enumerable: true, get: function () { return ignore_js_1.Ignore; } });
+function globStreamSync(pattern, options = {}) {
+    return new glob_js_1.Glob(pattern, options).streamSync();
+}
+function globStream(pattern, options = {}) {
+    return new glob_js_1.Glob(pattern, options).stream();
+}
+function globSync(pattern, options = {}) {
+    return new glob_js_1.Glob(pattern, options).walkSync();
+}
+async function glob_(pattern, options = {}) {
+    return new glob_js_1.Glob(pattern, options).walk();
+}
+function globIterateSync(pattern, options = {}) {
+    return new glob_js_1.Glob(pattern, options).iterateSync();
+}
+function globIterate(pattern, options = {}) {
+    return new glob_js_1.Glob(pattern, options).iterate();
+}
+// aliases: glob.sync.stream() glob.stream.sync() glob.sync() etc
+exports.streamSync = globStreamSync;
+exports.stream = Object.assign(globStream, { sync: globStreamSync });
+exports.iterateSync = globIterateSync;
+exports.iterate = Object.assign(globIterate, {
+    sync: globIterateSync,
+});
+exports.sync = Object.assign(globSync, {
+    stream: globStreamSync,
+    iterate: globIterateSync,
+});
+exports.glob = Object.assign(glob_, {
+    glob: glob_,
+    globSync,
+    sync: exports.sync,
+    globStream,
+    stream: exports.stream,
+    globStreamSync,
+    streamSync: exports.streamSync,
+    globIterate,
+    iterate: exports.iterate,
+    globIterateSync,
+    iterateSync: exports.iterateSync,
+    Glob: glob_js_1.Glob,
+    hasMagic: has_magic_js_1.hasMagic,
+    escape: minimatch_1.escape,
+    unescape: minimatch_1.unescape,
+});
+exports.glob.glob = exports.glob;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/package.json b/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/package.json
new file mode 100644
index 0000000000000..5bbefffbabee3
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/pattern.js b/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/pattern.js
new file mode 100644
index 0000000000000..f0de35fb5bed9
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/pattern.js
@@ -0,0 +1,219 @@
+"use strict";
+// this is just a very light wrapper around 2 arrays with an offset index
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Pattern = void 0;
+const minimatch_1 = require("minimatch");
+const isPatternList = (pl) => pl.length >= 1;
+const isGlobList = (gl) => gl.length >= 1;
+/**
+ * An immutable-ish view on an array of glob parts and their parsed
+ * results
+ */
+class Pattern {
+    #patternList;
+    #globList;
+    #index;
+    length;
+    #platform;
+    #rest;
+    #globString;
+    #isDrive;
+    #isUNC;
+    #isAbsolute;
+    #followGlobstar = true;
+    constructor(patternList, globList, index, platform) {
+        if (!isPatternList(patternList)) {
+            throw new TypeError('empty pattern list');
+        }
+        if (!isGlobList(globList)) {
+            throw new TypeError('empty glob list');
+        }
+        if (globList.length !== patternList.length) {
+            throw new TypeError('mismatched pattern list and glob list lengths');
+        }
+        this.length = patternList.length;
+        if (index < 0 || index >= this.length) {
+            throw new TypeError('index out of range');
+        }
+        this.#patternList = patternList;
+        this.#globList = globList;
+        this.#index = index;
+        this.#platform = platform;
+        // normalize root entries of absolute patterns on initial creation.
+        if (this.#index === 0) {
+            // c: => ['c:/']
+            // C:/ => ['C:/']
+            // C:/x => ['C:/', 'x']
+            // //host/share => ['//host/share/']
+            // //host/share/ => ['//host/share/']
+            // //host/share/x => ['//host/share/', 'x']
+            // /etc => ['/', 'etc']
+            // / => ['/']
+            if (this.isUNC()) {
+                // '' / '' / 'host' / 'share'
+                const [p0, p1, p2, p3, ...prest] = this.#patternList;
+                const [g0, g1, g2, g3, ...grest] = this.#globList;
+                if (prest[0] === '') {
+                    // ends in /
+                    prest.shift();
+                    grest.shift();
+                }
+                const p = [p0, p1, p2, p3, ''].join('/');
+                const g = [g0, g1, g2, g3, ''].join('/');
+                this.#patternList = [p, ...prest];
+                this.#globList = [g, ...grest];
+                this.length = this.#patternList.length;
+            }
+            else if (this.isDrive() || this.isAbsolute()) {
+                const [p1, ...prest] = this.#patternList;
+                const [g1, ...grest] = this.#globList;
+                if (prest[0] === '') {
+                    // ends in /
+                    prest.shift();
+                    grest.shift();
+                }
+                const p = p1 + '/';
+                const g = g1 + '/';
+                this.#patternList = [p, ...prest];
+                this.#globList = [g, ...grest];
+                this.length = this.#patternList.length;
+            }
+        }
+    }
+    /**
+     * The first entry in the parsed list of patterns
+     */
+    pattern() {
+        return this.#patternList[this.#index];
+    }
+    /**
+     * true of if pattern() returns a string
+     */
+    isString() {
+        return typeof this.#patternList[this.#index] === 'string';
+    }
+    /**
+     * true of if pattern() returns GLOBSTAR
+     */
+    isGlobstar() {
+        return this.#patternList[this.#index] === minimatch_1.GLOBSTAR;
+    }
+    /**
+     * true if pattern() returns a regexp
+     */
+    isRegExp() {
+        return this.#patternList[this.#index] instanceof RegExp;
+    }
+    /**
+     * The /-joined set of glob parts that make up this pattern
+     */
+    globString() {
+        return (this.#globString =
+            this.#globString ||
+                (this.#index === 0 ?
+                    this.isAbsolute() ?
+                        this.#globList[0] + this.#globList.slice(1).join('/')
+                        : this.#globList.join('/')
+                    : this.#globList.slice(this.#index).join('/')));
+    }
+    /**
+     * true if there are more pattern parts after this one
+     */
+    hasMore() {
+        return this.length > this.#index + 1;
+    }
+    /**
+     * The rest of the pattern after this part, or null if this is the end
+     */
+    rest() {
+        if (this.#rest !== undefined)
+            return this.#rest;
+        if (!this.hasMore())
+            return (this.#rest = null);
+        this.#rest = new Pattern(this.#patternList, this.#globList, this.#index + 1, this.#platform);
+        this.#rest.#isAbsolute = this.#isAbsolute;
+        this.#rest.#isUNC = this.#isUNC;
+        this.#rest.#isDrive = this.#isDrive;
+        return this.#rest;
+    }
+    /**
+     * true if the pattern represents a //unc/path/ on windows
+     */
+    isUNC() {
+        const pl = this.#patternList;
+        return this.#isUNC !== undefined ?
+            this.#isUNC
+            : (this.#isUNC =
+                this.#platform === 'win32' &&
+                    this.#index === 0 &&
+                    pl[0] === '' &&
+                    pl[1] === '' &&
+                    typeof pl[2] === 'string' &&
+                    !!pl[2] &&
+                    typeof pl[3] === 'string' &&
+                    !!pl[3]);
+    }
+    // pattern like C:/...
+    // split = ['C:', ...]
+    // XXX: would be nice to handle patterns like `c:*` to test the cwd
+    // in c: for *, but I don't know of a way to even figure out what that
+    // cwd is without actually chdir'ing into it?
+    /**
+     * True if the pattern starts with a drive letter on Windows
+     */
+    isDrive() {
+        const pl = this.#patternList;
+        return this.#isDrive !== undefined ?
+            this.#isDrive
+            : (this.#isDrive =
+                this.#platform === 'win32' &&
+                    this.#index === 0 &&
+                    this.length > 1 &&
+                    typeof pl[0] === 'string' &&
+                    /^[a-z]:$/i.test(pl[0]));
+    }
+    // pattern = '/' or '/...' or '/x/...'
+    // split = ['', ''] or ['', ...] or ['', 'x', ...]
+    // Drive and UNC both considered absolute on windows
+    /**
+     * True if the pattern is rooted on an absolute path
+     */
+    isAbsolute() {
+        const pl = this.#patternList;
+        return this.#isAbsolute !== undefined ?
+            this.#isAbsolute
+            : (this.#isAbsolute =
+                (pl[0] === '' && pl.length > 1) ||
+                    this.isDrive() ||
+                    this.isUNC());
+    }
+    /**
+     * consume the root of the pattern, and return it
+     */
+    root() {
+        const p = this.#patternList[0];
+        return (typeof p === 'string' && this.isAbsolute() && this.#index === 0) ?
+            p
+            : '';
+    }
+    /**
+     * Check to see if the current globstar pattern is allowed to follow
+     * a symbolic link.
+     */
+    checkFollowGlobstar() {
+        return !(this.#index === 0 ||
+            !this.isGlobstar() ||
+            !this.#followGlobstar);
+    }
+    /**
+     * Mark that the current globstar pattern is following a symbolic link
+     */
+    markFollowGlobstar() {
+        if (this.#index === 0 || !this.isGlobstar() || !this.#followGlobstar)
+            return false;
+        this.#followGlobstar = false;
+        return true;
+    }
+}
+exports.Pattern = Pattern;
+//# sourceMappingURL=pattern.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/processor.js b/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/processor.js
new file mode 100644
index 0000000000000..ee3bb4397e0b2
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/processor.js
@@ -0,0 +1,301 @@
+"use strict";
+// synchronous utility for filtering entries and calculating subwalks
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Processor = exports.SubWalks = exports.MatchRecord = exports.HasWalkedCache = void 0;
+const minimatch_1 = require("minimatch");
+/**
+ * A cache of which patterns have been processed for a given Path
+ */
+class HasWalkedCache {
+    store;
+    constructor(store = new Map()) {
+        this.store = store;
+    }
+    copy() {
+        return new HasWalkedCache(new Map(this.store));
+    }
+    hasWalked(target, pattern) {
+        return this.store.get(target.fullpath())?.has(pattern.globString());
+    }
+    storeWalked(target, pattern) {
+        const fullpath = target.fullpath();
+        const cached = this.store.get(fullpath);
+        if (cached)
+            cached.add(pattern.globString());
+        else
+            this.store.set(fullpath, new Set([pattern.globString()]));
+    }
+}
+exports.HasWalkedCache = HasWalkedCache;
+/**
+ * A record of which paths have been matched in a given walk step,
+ * and whether they only are considered a match if they are a directory,
+ * and whether their absolute or relative path should be returned.
+ */
+class MatchRecord {
+    store = new Map();
+    add(target, absolute, ifDir) {
+        const n = (absolute ? 2 : 0) | (ifDir ? 1 : 0);
+        const current = this.store.get(target);
+        this.store.set(target, current === undefined ? n : n & current);
+    }
+    // match, absolute, ifdir
+    entries() {
+        return [...this.store.entries()].map(([path, n]) => [
+            path,
+            !!(n & 2),
+            !!(n & 1),
+        ]);
+    }
+}
+exports.MatchRecord = MatchRecord;
+/**
+ * A collection of patterns that must be processed in a subsequent step
+ * for a given path.
+ */
+class SubWalks {
+    store = new Map();
+    add(target, pattern) {
+        if (!target.canReaddir()) {
+            return;
+        }
+        const subs = this.store.get(target);
+        if (subs) {
+            if (!subs.find(p => p.globString() === pattern.globString())) {
+                subs.push(pattern);
+            }
+        }
+        else
+            this.store.set(target, [pattern]);
+    }
+    get(target) {
+        const subs = this.store.get(target);
+        /* c8 ignore start */
+        if (!subs) {
+            throw new Error('attempting to walk unknown path');
+        }
+        /* c8 ignore stop */
+        return subs;
+    }
+    entries() {
+        return this.keys().map(k => [k, this.store.get(k)]);
+    }
+    keys() {
+        return [...this.store.keys()].filter(t => t.canReaddir());
+    }
+}
+exports.SubWalks = SubWalks;
+/**
+ * The class that processes patterns for a given path.
+ *
+ * Handles child entry filtering, and determining whether a path's
+ * directory contents must be read.
+ */
+class Processor {
+    hasWalkedCache;
+    matches = new MatchRecord();
+    subwalks = new SubWalks();
+    patterns;
+    follow;
+    dot;
+    opts;
+    constructor(opts, hasWalkedCache) {
+        this.opts = opts;
+        this.follow = !!opts.follow;
+        this.dot = !!opts.dot;
+        this.hasWalkedCache =
+            hasWalkedCache ? hasWalkedCache.copy() : new HasWalkedCache();
+    }
+    processPatterns(target, patterns) {
+        this.patterns = patterns;
+        const processingSet = patterns.map(p => [target, p]);
+        // map of paths to the magic-starting subwalks they need to walk
+        // first item in patterns is the filter
+        for (let [t, pattern] of processingSet) {
+            this.hasWalkedCache.storeWalked(t, pattern);
+            const root = pattern.root();
+            const absolute = pattern.isAbsolute() && this.opts.absolute !== false;
+            // start absolute patterns at root
+            if (root) {
+                t = t.resolve(root === '/' && this.opts.root !== undefined ?
+                    this.opts.root
+                    : root);
+                const rest = pattern.rest();
+                if (!rest) {
+                    this.matches.add(t, true, false);
+                    continue;
+                }
+                else {
+                    pattern = rest;
+                }
+            }
+            if (t.isENOENT())
+                continue;
+            let p;
+            let rest;
+            let changed = false;
+            while (typeof (p = pattern.pattern()) === 'string' &&
+                (rest = pattern.rest())) {
+                const c = t.resolve(p);
+                t = c;
+                pattern = rest;
+                changed = true;
+            }
+            p = pattern.pattern();
+            rest = pattern.rest();
+            if (changed) {
+                if (this.hasWalkedCache.hasWalked(t, pattern))
+                    continue;
+                this.hasWalkedCache.storeWalked(t, pattern);
+            }
+            // now we have either a final string for a known entry,
+            // more strings for an unknown entry,
+            // or a pattern starting with magic, mounted on t.
+            if (typeof p === 'string') {
+                // must not be final entry, otherwise we would have
+                // concatenated it earlier.
+                const ifDir = p === '..' || p === '' || p === '.';
+                this.matches.add(t.resolve(p), absolute, ifDir);
+                continue;
+            }
+            else if (p === minimatch_1.GLOBSTAR) {
+                // if no rest, match and subwalk pattern
+                // if rest, process rest and subwalk pattern
+                // if it's a symlink, but we didn't get here by way of a
+                // globstar match (meaning it's the first time THIS globstar
+                // has traversed a symlink), then we follow it. Otherwise, stop.
+                if (!t.isSymbolicLink() ||
+                    this.follow ||
+                    pattern.checkFollowGlobstar()) {
+                    this.subwalks.add(t, pattern);
+                }
+                const rp = rest?.pattern();
+                const rrest = rest?.rest();
+                if (!rest || ((rp === '' || rp === '.') && !rrest)) {
+                    // only HAS to be a dir if it ends in **/ or **/.
+                    // but ending in ** will match files as well.
+                    this.matches.add(t, absolute, rp === '' || rp === '.');
+                }
+                else {
+                    if (rp === '..') {
+                        // this would mean you're matching **/.. at the fs root,
+                        // and no thanks, I'm not gonna test that specific case.
+                        /* c8 ignore start */
+                        const tp = t.parent || t;
+                        /* c8 ignore stop */
+                        if (!rrest)
+                            this.matches.add(tp, absolute, true);
+                        else if (!this.hasWalkedCache.hasWalked(tp, rrest)) {
+                            this.subwalks.add(tp, rrest);
+                        }
+                    }
+                }
+            }
+            else if (p instanceof RegExp) {
+                this.subwalks.add(t, pattern);
+            }
+        }
+        return this;
+    }
+    subwalkTargets() {
+        return this.subwalks.keys();
+    }
+    child() {
+        return new Processor(this.opts, this.hasWalkedCache);
+    }
+    // return a new Processor containing the subwalks for each
+    // child entry, and a set of matches, and
+    // a hasWalkedCache that's a copy of this one
+    // then we're going to call
+    filterEntries(parent, entries) {
+        const patterns = this.subwalks.get(parent);
+        // put matches and entry walks into the results processor
+        const results = this.child();
+        for (const e of entries) {
+            for (const pattern of patterns) {
+                const absolute = pattern.isAbsolute();
+                const p = pattern.pattern();
+                const rest = pattern.rest();
+                if (p === minimatch_1.GLOBSTAR) {
+                    results.testGlobstar(e, pattern, rest, absolute);
+                }
+                else if (p instanceof RegExp) {
+                    results.testRegExp(e, p, rest, absolute);
+                }
+                else {
+                    results.testString(e, p, rest, absolute);
+                }
+            }
+        }
+        return results;
+    }
+    testGlobstar(e, pattern, rest, absolute) {
+        if (this.dot || !e.name.startsWith('.')) {
+            if (!pattern.hasMore()) {
+                this.matches.add(e, absolute, false);
+            }
+            if (e.canReaddir()) {
+                // if we're in follow mode or it's not a symlink, just keep
+                // testing the same pattern. If there's more after the globstar,
+                // then this symlink consumes the globstar. If not, then we can
+                // follow at most ONE symlink along the way, so we mark it, which
+                // also checks to ensure that it wasn't already marked.
+                if (this.follow || !e.isSymbolicLink()) {
+                    this.subwalks.add(e, pattern);
+                }
+                else if (e.isSymbolicLink()) {
+                    if (rest && pattern.checkFollowGlobstar()) {
+                        this.subwalks.add(e, rest);
+                    }
+                    else if (pattern.markFollowGlobstar()) {
+                        this.subwalks.add(e, pattern);
+                    }
+                }
+            }
+        }
+        // if the NEXT thing matches this entry, then also add
+        // the rest.
+        if (rest) {
+            const rp = rest.pattern();
+            if (typeof rp === 'string' &&
+                // dots and empty were handled already
+                rp !== '..' &&
+                rp !== '' &&
+                rp !== '.') {
+                this.testString(e, rp, rest.rest(), absolute);
+            }
+            else if (rp === '..') {
+                /* c8 ignore start */
+                const ep = e.parent || e;
+                /* c8 ignore stop */
+                this.subwalks.add(ep, rest);
+            }
+            else if (rp instanceof RegExp) {
+                this.testRegExp(e, rp, rest.rest(), absolute);
+            }
+        }
+    }
+    testRegExp(e, p, rest, absolute) {
+        if (!p.test(e.name))
+            return;
+        if (!rest) {
+            this.matches.add(e, absolute, false);
+        }
+        else {
+            this.subwalks.add(e, rest);
+        }
+    }
+    testString(e, p, rest, absolute) {
+        // should never happen?
+        if (!e.isNamed(p))
+            return;
+        if (!rest) {
+            this.matches.add(e, absolute, false);
+        }
+        else {
+            this.subwalks.add(e, rest);
+        }
+    }
+}
+exports.Processor = Processor;
+//# sourceMappingURL=processor.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/walker.js b/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/walker.js
new file mode 100644
index 0000000000000..cb15946d9a852
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/walker.js
@@ -0,0 +1,387 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.GlobStream = exports.GlobWalker = exports.GlobUtil = void 0;
+/**
+ * Single-use utility classes to provide functionality to the {@link Glob}
+ * methods.
+ *
+ * @module
+ */
+const minipass_1 = require("minipass");
+const ignore_js_1 = require("./ignore.js");
+const processor_js_1 = require("./processor.js");
+const makeIgnore = (ignore, opts) => typeof ignore === 'string' ? new ignore_js_1.Ignore([ignore], opts)
+    : Array.isArray(ignore) ? new ignore_js_1.Ignore(ignore, opts)
+        : ignore;
+/**
+ * basic walking utilities that all the glob walker types use
+ */
+class GlobUtil {
+    path;
+    patterns;
+    opts;
+    seen = new Set();
+    paused = false;
+    aborted = false;
+    #onResume = [];
+    #ignore;
+    #sep;
+    signal;
+    maxDepth;
+    includeChildMatches;
+    constructor(patterns, path, opts) {
+        this.patterns = patterns;
+        this.path = path;
+        this.opts = opts;
+        this.#sep = !opts.posix && opts.platform === 'win32' ? '\\' : '/';
+        this.includeChildMatches = opts.includeChildMatches !== false;
+        if (opts.ignore || !this.includeChildMatches) {
+            this.#ignore = makeIgnore(opts.ignore ?? [], opts);
+            if (!this.includeChildMatches &&
+                typeof this.#ignore.add !== 'function') {
+                const m = 'cannot ignore child matches, ignore lacks add() method.';
+                throw new Error(m);
+            }
+        }
+        // ignore, always set with maxDepth, but it's optional on the
+        // GlobOptions type
+        /* c8 ignore start */
+        this.maxDepth = opts.maxDepth || Infinity;
+        /* c8 ignore stop */
+        if (opts.signal) {
+            this.signal = opts.signal;
+            this.signal.addEventListener('abort', () => {
+                this.#onResume.length = 0;
+            });
+        }
+    }
+    #ignored(path) {
+        return this.seen.has(path) || !!this.#ignore?.ignored?.(path);
+    }
+    #childrenIgnored(path) {
+        return !!this.#ignore?.childrenIgnored?.(path);
+    }
+    // backpressure mechanism
+    pause() {
+        this.paused = true;
+    }
+    resume() {
+        /* c8 ignore start */
+        if (this.signal?.aborted)
+            return;
+        /* c8 ignore stop */
+        this.paused = false;
+        let fn = undefined;
+        while (!this.paused && (fn = this.#onResume.shift())) {
+            fn();
+        }
+    }
+    onResume(fn) {
+        if (this.signal?.aborted)
+            return;
+        /* c8 ignore start */
+        if (!this.paused) {
+            fn();
+        }
+        else {
+            /* c8 ignore stop */
+            this.#onResume.push(fn);
+        }
+    }
+    // do the requisite realpath/stat checking, and return the path
+    // to add or undefined to filter it out.
+    async matchCheck(e, ifDir) {
+        if (ifDir && this.opts.nodir)
+            return undefined;
+        let rpc;
+        if (this.opts.realpath) {
+            rpc = e.realpathCached() || (await e.realpath());
+            if (!rpc)
+                return undefined;
+            e = rpc;
+        }
+        const needStat = e.isUnknown() || this.opts.stat;
+        const s = needStat ? await e.lstat() : e;
+        if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) {
+            const target = await s.realpath();
+            /* c8 ignore start */
+            if (target && (target.isUnknown() || this.opts.stat)) {
+                await target.lstat();
+            }
+            /* c8 ignore stop */
+        }
+        return this.matchCheckTest(s, ifDir);
+    }
+    matchCheckTest(e, ifDir) {
+        return (e &&
+            (this.maxDepth === Infinity || e.depth() <= this.maxDepth) &&
+            (!ifDir || e.canReaddir()) &&
+            (!this.opts.nodir || !e.isDirectory()) &&
+            (!this.opts.nodir ||
+                !this.opts.follow ||
+                !e.isSymbolicLink() ||
+                !e.realpathCached()?.isDirectory()) &&
+            !this.#ignored(e)) ?
+            e
+            : undefined;
+    }
+    matchCheckSync(e, ifDir) {
+        if (ifDir && this.opts.nodir)
+            return undefined;
+        let rpc;
+        if (this.opts.realpath) {
+            rpc = e.realpathCached() || e.realpathSync();
+            if (!rpc)
+                return undefined;
+            e = rpc;
+        }
+        const needStat = e.isUnknown() || this.opts.stat;
+        const s = needStat ? e.lstatSync() : e;
+        if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) {
+            const target = s.realpathSync();
+            if (target && (target?.isUnknown() || this.opts.stat)) {
+                target.lstatSync();
+            }
+        }
+        return this.matchCheckTest(s, ifDir);
+    }
+    matchFinish(e, absolute) {
+        if (this.#ignored(e))
+            return;
+        // we know we have an ignore if this is false, but TS doesn't
+        if (!this.includeChildMatches && this.#ignore?.add) {
+            const ign = `${e.relativePosix()}/**`;
+            this.#ignore.add(ign);
+        }
+        const abs = this.opts.absolute === undefined ? absolute : this.opts.absolute;
+        this.seen.add(e);
+        const mark = this.opts.mark && e.isDirectory() ? this.#sep : '';
+        // ok, we have what we need!
+        if (this.opts.withFileTypes) {
+            this.matchEmit(e);
+        }
+        else if (abs) {
+            const abs = this.opts.posix ? e.fullpathPosix() : e.fullpath();
+            this.matchEmit(abs + mark);
+        }
+        else {
+            const rel = this.opts.posix ? e.relativePosix() : e.relative();
+            const pre = this.opts.dotRelative && !rel.startsWith('..' + this.#sep) ?
+                '.' + this.#sep
+                : '';
+            this.matchEmit(!rel ? '.' + mark : pre + rel + mark);
+        }
+    }
+    async match(e, absolute, ifDir) {
+        const p = await this.matchCheck(e, ifDir);
+        if (p)
+            this.matchFinish(p, absolute);
+    }
+    matchSync(e, absolute, ifDir) {
+        const p = this.matchCheckSync(e, ifDir);
+        if (p)
+            this.matchFinish(p, absolute);
+    }
+    walkCB(target, patterns, cb) {
+        /* c8 ignore start */
+        if (this.signal?.aborted)
+            cb();
+        /* c8 ignore stop */
+        this.walkCB2(target, patterns, new processor_js_1.Processor(this.opts), cb);
+    }
+    walkCB2(target, patterns, processor, cb) {
+        if (this.#childrenIgnored(target))
+            return cb();
+        if (this.signal?.aborted)
+            cb();
+        if (this.paused) {
+            this.onResume(() => this.walkCB2(target, patterns, processor, cb));
+            return;
+        }
+        processor.processPatterns(target, patterns);
+        // done processing.  all of the above is sync, can be abstracted out.
+        // subwalks is a map of paths to the entry filters they need
+        // matches is a map of paths to [absolute, ifDir] tuples.
+        let tasks = 1;
+        const next = () => {
+            if (--tasks === 0)
+                cb();
+        };
+        for (const [m, absolute, ifDir] of processor.matches.entries()) {
+            if (this.#ignored(m))
+                continue;
+            tasks++;
+            this.match(m, absolute, ifDir).then(() => next());
+        }
+        for (const t of processor.subwalkTargets()) {
+            if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) {
+                continue;
+            }
+            tasks++;
+            const childrenCached = t.readdirCached();
+            if (t.calledReaddir())
+                this.walkCB3(t, childrenCached, processor, next);
+            else {
+                t.readdirCB((_, entries) => this.walkCB3(t, entries, processor, next), true);
+            }
+        }
+        next();
+    }
+    walkCB3(target, entries, processor, cb) {
+        processor = processor.filterEntries(target, entries);
+        let tasks = 1;
+        const next = () => {
+            if (--tasks === 0)
+                cb();
+        };
+        for (const [m, absolute, ifDir] of processor.matches.entries()) {
+            if (this.#ignored(m))
+                continue;
+            tasks++;
+            this.match(m, absolute, ifDir).then(() => next());
+        }
+        for (const [target, patterns] of processor.subwalks.entries()) {
+            tasks++;
+            this.walkCB2(target, patterns, processor.child(), next);
+        }
+        next();
+    }
+    walkCBSync(target, patterns, cb) {
+        /* c8 ignore start */
+        if (this.signal?.aborted)
+            cb();
+        /* c8 ignore stop */
+        this.walkCB2Sync(target, patterns, new processor_js_1.Processor(this.opts), cb);
+    }
+    walkCB2Sync(target, patterns, processor, cb) {
+        if (this.#childrenIgnored(target))
+            return cb();
+        if (this.signal?.aborted)
+            cb();
+        if (this.paused) {
+            this.onResume(() => this.walkCB2Sync(target, patterns, processor, cb));
+            return;
+        }
+        processor.processPatterns(target, patterns);
+        // done processing.  all of the above is sync, can be abstracted out.
+        // subwalks is a map of paths to the entry filters they need
+        // matches is a map of paths to [absolute, ifDir] tuples.
+        let tasks = 1;
+        const next = () => {
+            if (--tasks === 0)
+                cb();
+        };
+        for (const [m, absolute, ifDir] of processor.matches.entries()) {
+            if (this.#ignored(m))
+                continue;
+            this.matchSync(m, absolute, ifDir);
+        }
+        for (const t of processor.subwalkTargets()) {
+            if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) {
+                continue;
+            }
+            tasks++;
+            const children = t.readdirSync();
+            this.walkCB3Sync(t, children, processor, next);
+        }
+        next();
+    }
+    walkCB3Sync(target, entries, processor, cb) {
+        processor = processor.filterEntries(target, entries);
+        let tasks = 1;
+        const next = () => {
+            if (--tasks === 0)
+                cb();
+        };
+        for (const [m, absolute, ifDir] of processor.matches.entries()) {
+            if (this.#ignored(m))
+                continue;
+            this.matchSync(m, absolute, ifDir);
+        }
+        for (const [target, patterns] of processor.subwalks.entries()) {
+            tasks++;
+            this.walkCB2Sync(target, patterns, processor.child(), next);
+        }
+        next();
+    }
+}
+exports.GlobUtil = GlobUtil;
+class GlobWalker extends GlobUtil {
+    matches = new Set();
+    constructor(patterns, path, opts) {
+        super(patterns, path, opts);
+    }
+    matchEmit(e) {
+        this.matches.add(e);
+    }
+    async walk() {
+        if (this.signal?.aborted)
+            throw this.signal.reason;
+        if (this.path.isUnknown()) {
+            await this.path.lstat();
+        }
+        await new Promise((res, rej) => {
+            this.walkCB(this.path, this.patterns, () => {
+                if (this.signal?.aborted) {
+                    rej(this.signal.reason);
+                }
+                else {
+                    res(this.matches);
+                }
+            });
+        });
+        return this.matches;
+    }
+    walkSync() {
+        if (this.signal?.aborted)
+            throw this.signal.reason;
+        if (this.path.isUnknown()) {
+            this.path.lstatSync();
+        }
+        // nothing for the callback to do, because this never pauses
+        this.walkCBSync(this.path, this.patterns, () => {
+            if (this.signal?.aborted)
+                throw this.signal.reason;
+        });
+        return this.matches;
+    }
+}
+exports.GlobWalker = GlobWalker;
+class GlobStream extends GlobUtil {
+    results;
+    constructor(patterns, path, opts) {
+        super(patterns, path, opts);
+        this.results = new minipass_1.Minipass({
+            signal: this.signal,
+            objectMode: true,
+        });
+        this.results.on('drain', () => this.resume());
+        this.results.on('resume', () => this.resume());
+    }
+    matchEmit(e) {
+        this.results.write(e);
+        if (!this.results.flowing)
+            this.pause();
+    }
+    stream() {
+        const target = this.path;
+        if (target.isUnknown()) {
+            target.lstat().then(() => {
+                this.walkCB(target, this.patterns, () => this.results.end());
+            });
+        }
+        else {
+            this.walkCB(target, this.patterns, () => this.results.end());
+        }
+        return this.results;
+    }
+    streamSync() {
+        if (this.path.isUnknown()) {
+            this.path.lstatSync();
+        }
+        this.walkCBSync(this.path, this.patterns, () => this.results.end());
+        return this.results;
+    }
+}
+exports.GlobStream = GlobStream;
+//# sourceMappingURL=walker.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/bin.d.mts b/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/bin.d.mts
new file mode 100644
index 0000000000000..77298e4770817
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/bin.d.mts
@@ -0,0 +1,3 @@
+#!/usr/bin/env node
+export {};
+//# sourceMappingURL=bin.d.mts.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/bin.mjs b/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/bin.mjs
new file mode 100755
index 0000000000000..553bb79303d90
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/bin.mjs
@@ -0,0 +1,276 @@
+#!/usr/bin/env node
+import { foregroundChild } from 'foreground-child';
+import { existsSync } from 'fs';
+import { jack } from 'jackspeak';
+import { loadPackageJson } from 'package-json-from-dist';
+import { join } from 'path';
+import { globStream } from './index.js';
+const { version } = loadPackageJson(import.meta.url, '../package.json');
+const j = jack({
+    usage: 'glob [options] [ [ ...]]',
+})
+    .description(`
+    Glob v${version}
+
+    Expand the positional glob expression arguments into any matching file
+    system paths found.
+  `)
+    .opt({
+    cmd: {
+        short: 'c',
+        hint: 'command',
+        description: `Run the command provided, passing the glob expression
+                    matches as arguments.`,
+    },
+})
+    .opt({
+    default: {
+        short: 'p',
+        hint: 'pattern',
+        description: `If no positional arguments are provided, glob will use
+                    this pattern`,
+    },
+})
+    .flag({
+    all: {
+        short: 'A',
+        description: `By default, the glob cli command will not expand any
+                    arguments that are an exact match to a file on disk.
+
+                    This prevents double-expanding, in case the shell expands
+                    an argument whose filename is a glob expression.
+
+                    For example, if 'app/*.ts' would match 'app/[id].ts', then
+                    on Windows powershell or cmd.exe, 'glob app/*.ts' will
+                    expand to 'app/[id].ts', as expected. However, in posix
+                    shells such as bash or zsh, the shell will first expand
+                    'app/*.ts' to a list of filenames. Then glob will look
+                    for a file matching 'app/[id].ts' (ie, 'app/i.ts' or
+                    'app/d.ts'), which is unexpected.
+
+                    Setting '--all' prevents this behavior, causing glob
+                    to treat ALL patterns as glob expressions to be expanded,
+                    even if they are an exact match to a file on disk.
+
+                    When setting this option, be sure to enquote arguments
+                    so that the shell will not expand them prior to passing
+                    them to the glob command process.
+      `,
+    },
+    absolute: {
+        short: 'a',
+        description: 'Expand to absolute paths',
+    },
+    'dot-relative': {
+        short: 'd',
+        description: `Prepend './' on relative matches`,
+    },
+    mark: {
+        short: 'm',
+        description: `Append a / on any directories matched`,
+    },
+    posix: {
+        short: 'x',
+        description: `Always resolve to posix style paths, using '/' as the
+                    directory separator, even on Windows. Drive letter
+                    absolute matches on Windows will be expanded to their
+                    full resolved UNC maths, eg instead of 'C:\\foo\\bar',
+                    it will expand to '//?/C:/foo/bar'.
+      `,
+    },
+    follow: {
+        short: 'f',
+        description: `Follow symlinked directories when expanding '**'`,
+    },
+    realpath: {
+        short: 'R',
+        description: `Call 'fs.realpath' on all of the results. In the case
+                    of an entry that cannot be resolved, the entry is
+                    omitted. This incurs a slight performance penalty, of
+                    course, because of the added system calls.`,
+    },
+    stat: {
+        short: 's',
+        description: `Call 'fs.lstat' on all entries, whether required or not
+                    to determine if it's a valid match.`,
+    },
+    'match-base': {
+        short: 'b',
+        description: `Perform a basename-only match if the pattern does not
+                    contain any slash characters. That is, '*.js' would be
+                    treated as equivalent to '**/*.js', matching js files
+                    in all directories.
+      `,
+    },
+    dot: {
+        description: `Allow patterns to match files/directories that start
+                    with '.', even if the pattern does not start with '.'
+      `,
+    },
+    nobrace: {
+        description: 'Do not expand {...} patterns',
+    },
+    nocase: {
+        description: `Perform a case-insensitive match. This defaults to
+                    'true' on macOS and Windows platforms, and false on
+                    all others.
+
+                    Note: 'nocase' should only be explicitly set when it is
+                    known that the filesystem's case sensitivity differs
+                    from the platform default. If set 'true' on
+                    case-insensitive file systems, then the walk may return
+                    more or less results than expected.
+      `,
+    },
+    nodir: {
+        description: `Do not match directories, only files.
+
+                    Note: to *only* match directories, append a '/' at the
+                    end of the pattern.
+      `,
+    },
+    noext: {
+        description: `Do not expand extglob patterns, such as '+(a|b)'`,
+    },
+    noglobstar: {
+        description: `Do not expand '**' against multiple path portions.
+                    Ie, treat it as a normal '*' instead.`,
+    },
+    'windows-path-no-escape': {
+        description: `Use '\\' as a path separator *only*, and *never* as an
+                    escape character. If set, all '\\' characters are
+                    replaced with '/' in the pattern.`,
+    },
+})
+    .num({
+    'max-depth': {
+        short: 'D',
+        description: `Maximum depth to traverse from the current
+                    working directory`,
+    },
+})
+    .opt({
+    cwd: {
+        short: 'C',
+        description: 'Current working directory to execute/match in',
+        default: process.cwd(),
+    },
+    root: {
+        short: 'r',
+        description: `A string path resolved against the 'cwd', which is
+                    used as the starting point for absolute patterns that
+                    start with '/' (but not drive letters or UNC paths
+                    on Windows).
+
+                    Note that this *doesn't* necessarily limit the walk to
+                    the 'root' directory, and doesn't affect the cwd
+                    starting point for non-absolute patterns. A pattern
+                    containing '..' will still be able to traverse out of
+                    the root directory, if it is not an actual root directory
+                    on the filesystem, and any non-absolute patterns will
+                    still be matched in the 'cwd'.
+
+                    To start absolute and non-absolute patterns in the same
+                    path, you can use '--root=' to set it to the empty
+                    string. However, be aware that on Windows systems, a
+                    pattern like 'x:/*' or '//host/share/*' will *always*
+                    start in the 'x:/' or '//host/share/' directory,
+                    regardless of the --root setting.
+      `,
+    },
+    platform: {
+        description: `Defaults to the value of 'process.platform' if
+                    available, or 'linux' if not. Setting --platform=win32
+                    on non-Windows systems may cause strange behavior!`,
+        validOptions: [
+            'aix',
+            'android',
+            'darwin',
+            'freebsd',
+            'haiku',
+            'linux',
+            'openbsd',
+            'sunos',
+            'win32',
+            'cygwin',
+            'netbsd',
+        ],
+    },
+})
+    .optList({
+    ignore: {
+        short: 'i',
+        description: `Glob patterns to ignore`,
+    },
+})
+    .flag({
+    debug: {
+        short: 'v',
+        description: `Output a huge amount of noisy debug information about
+                    patterns as they are parsed and used to match files.`,
+    },
+    version: {
+        short: 'V',
+        description: `Output the version (${version})`,
+    },
+    help: {
+        short: 'h',
+        description: 'Show this usage information',
+    },
+});
+try {
+    const { positionals, values } = j.parse();
+    if (values.version) {
+        console.log(version);
+        process.exit(0);
+    }
+    if (values.help) {
+        console.log(j.usage());
+        process.exit(0);
+    }
+    if (positionals.length === 0 && !values.default)
+        throw 'No patterns provided';
+    if (positionals.length === 0 && values.default)
+        positionals.push(values.default);
+    const patterns = values.all ? positionals : positionals.filter(p => !existsSync(p));
+    const matches = values.all ?
+        []
+        : positionals.filter(p => existsSync(p)).map(p => join(p));
+    const stream = globStream(patterns, {
+        absolute: values.absolute,
+        cwd: values.cwd,
+        dot: values.dot,
+        dotRelative: values['dot-relative'],
+        follow: values.follow,
+        ignore: values.ignore,
+        mark: values.mark,
+        matchBase: values['match-base'],
+        maxDepth: values['max-depth'],
+        nobrace: values.nobrace,
+        nocase: values.nocase,
+        nodir: values.nodir,
+        noext: values.noext,
+        noglobstar: values.noglobstar,
+        platform: values.platform,
+        realpath: values.realpath,
+        root: values.root,
+        stat: values.stat,
+        debug: values.debug,
+        posix: values.posix,
+    });
+    const cmd = values.cmd;
+    if (!cmd) {
+        matches.forEach(m => console.log(m));
+        stream.on('data', f => console.log(f));
+    }
+    else {
+        stream.on('data', f => matches.push(f));
+        stream.on('end', () => foregroundChild(cmd, matches, { shell: true }));
+    }
+}
+catch (e) {
+    console.error(j.usage());
+    console.error(e instanceof Error ? e.message : String(e));
+    process.exit(1);
+}
+//# sourceMappingURL=bin.mjs.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/glob.js b/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/glob.js
new file mode 100644
index 0000000000000..c9ff3b0036d94
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/glob.js
@@ -0,0 +1,243 @@
+import { Minimatch } from 'minimatch';
+import { fileURLToPath } from 'node:url';
+import { PathScurry, PathScurryDarwin, PathScurryPosix, PathScurryWin32, } from 'path-scurry';
+import { Pattern } from './pattern.js';
+import { GlobStream, GlobWalker } from './walker.js';
+// if no process global, just call it linux.
+// so we default to case-sensitive, / separators
+const defaultPlatform = (typeof process === 'object' &&
+    process &&
+    typeof process.platform === 'string') ?
+    process.platform
+    : 'linux';
+/**
+ * An object that can perform glob pattern traversals.
+ */
+export class Glob {
+    absolute;
+    cwd;
+    root;
+    dot;
+    dotRelative;
+    follow;
+    ignore;
+    magicalBraces;
+    mark;
+    matchBase;
+    maxDepth;
+    nobrace;
+    nocase;
+    nodir;
+    noext;
+    noglobstar;
+    pattern;
+    platform;
+    realpath;
+    scurry;
+    stat;
+    signal;
+    windowsPathsNoEscape;
+    withFileTypes;
+    includeChildMatches;
+    /**
+     * The options provided to the constructor.
+     */
+    opts;
+    /**
+     * An array of parsed immutable {@link Pattern} objects.
+     */
+    patterns;
+    /**
+     * All options are stored as properties on the `Glob` object.
+     *
+     * See {@link GlobOptions} for full options descriptions.
+     *
+     * Note that a previous `Glob` object can be passed as the
+     * `GlobOptions` to another `Glob` instantiation to re-use settings
+     * and caches with a new pattern.
+     *
+     * Traversal functions can be called multiple times to run the walk
+     * again.
+     */
+    constructor(pattern, opts) {
+        /* c8 ignore start */
+        if (!opts)
+            throw new TypeError('glob options required');
+        /* c8 ignore stop */
+        this.withFileTypes = !!opts.withFileTypes;
+        this.signal = opts.signal;
+        this.follow = !!opts.follow;
+        this.dot = !!opts.dot;
+        this.dotRelative = !!opts.dotRelative;
+        this.nodir = !!opts.nodir;
+        this.mark = !!opts.mark;
+        if (!opts.cwd) {
+            this.cwd = '';
+        }
+        else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) {
+            opts.cwd = fileURLToPath(opts.cwd);
+        }
+        this.cwd = opts.cwd || '';
+        this.root = opts.root;
+        this.magicalBraces = !!opts.magicalBraces;
+        this.nobrace = !!opts.nobrace;
+        this.noext = !!opts.noext;
+        this.realpath = !!opts.realpath;
+        this.absolute = opts.absolute;
+        this.includeChildMatches = opts.includeChildMatches !== false;
+        this.noglobstar = !!opts.noglobstar;
+        this.matchBase = !!opts.matchBase;
+        this.maxDepth =
+            typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity;
+        this.stat = !!opts.stat;
+        this.ignore = opts.ignore;
+        if (this.withFileTypes && this.absolute !== undefined) {
+            throw new Error('cannot set absolute and withFileTypes:true');
+        }
+        if (typeof pattern === 'string') {
+            pattern = [pattern];
+        }
+        this.windowsPathsNoEscape =
+            !!opts.windowsPathsNoEscape ||
+                opts.allowWindowsEscape ===
+                    false;
+        if (this.windowsPathsNoEscape) {
+            pattern = pattern.map(p => p.replace(/\\/g, '/'));
+        }
+        if (this.matchBase) {
+            if (opts.noglobstar) {
+                throw new TypeError('base matching requires globstar');
+            }
+            pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`));
+        }
+        this.pattern = pattern;
+        this.platform = opts.platform || defaultPlatform;
+        this.opts = { ...opts, platform: this.platform };
+        if (opts.scurry) {
+            this.scurry = opts.scurry;
+            if (opts.nocase !== undefined &&
+                opts.nocase !== opts.scurry.nocase) {
+                throw new Error('nocase option contradicts provided scurry option');
+            }
+        }
+        else {
+            const Scurry = opts.platform === 'win32' ? PathScurryWin32
+                : opts.platform === 'darwin' ? PathScurryDarwin
+                    : opts.platform ? PathScurryPosix
+                        : PathScurry;
+            this.scurry = new Scurry(this.cwd, {
+                nocase: opts.nocase,
+                fs: opts.fs,
+            });
+        }
+        this.nocase = this.scurry.nocase;
+        // If you do nocase:true on a case-sensitive file system, then
+        // we need to use regexps instead of strings for non-magic
+        // path portions, because statting `aBc` won't return results
+        // for the file `AbC` for example.
+        const nocaseMagicOnly = this.platform === 'darwin' || this.platform === 'win32';
+        const mmo = {
+            // default nocase based on platform
+            ...opts,
+            dot: this.dot,
+            matchBase: this.matchBase,
+            nobrace: this.nobrace,
+            nocase: this.nocase,
+            nocaseMagicOnly,
+            nocomment: true,
+            noext: this.noext,
+            nonegate: true,
+            optimizationLevel: 2,
+            platform: this.platform,
+            windowsPathsNoEscape: this.windowsPathsNoEscape,
+            debug: !!this.opts.debug,
+        };
+        const mms = this.pattern.map(p => new Minimatch(p, mmo));
+        const [matchSet, globParts] = mms.reduce((set, m) => {
+            set[0].push(...m.set);
+            set[1].push(...m.globParts);
+            return set;
+        }, [[], []]);
+        this.patterns = matchSet.map((set, i) => {
+            const g = globParts[i];
+            /* c8 ignore start */
+            if (!g)
+                throw new Error('invalid pattern object');
+            /* c8 ignore stop */
+            return new Pattern(set, g, 0, this.platform);
+        });
+    }
+    async walk() {
+        // Walkers always return array of Path objects, so we just have to
+        // coerce them into the right shape.  It will have already called
+        // realpath() if the option was set to do so, so we know that's cached.
+        // start out knowing the cwd, at least
+        return [
+            ...(await new GlobWalker(this.patterns, this.scurry.cwd, {
+                ...this.opts,
+                maxDepth: this.maxDepth !== Infinity ?
+                    this.maxDepth + this.scurry.cwd.depth()
+                    : Infinity,
+                platform: this.platform,
+                nocase: this.nocase,
+                includeChildMatches: this.includeChildMatches,
+            }).walk()),
+        ];
+    }
+    walkSync() {
+        return [
+            ...new GlobWalker(this.patterns, this.scurry.cwd, {
+                ...this.opts,
+                maxDepth: this.maxDepth !== Infinity ?
+                    this.maxDepth + this.scurry.cwd.depth()
+                    : Infinity,
+                platform: this.platform,
+                nocase: this.nocase,
+                includeChildMatches: this.includeChildMatches,
+            }).walkSync(),
+        ];
+    }
+    stream() {
+        return new GlobStream(this.patterns, this.scurry.cwd, {
+            ...this.opts,
+            maxDepth: this.maxDepth !== Infinity ?
+                this.maxDepth + this.scurry.cwd.depth()
+                : Infinity,
+            platform: this.platform,
+            nocase: this.nocase,
+            includeChildMatches: this.includeChildMatches,
+        }).stream();
+    }
+    streamSync() {
+        return new GlobStream(this.patterns, this.scurry.cwd, {
+            ...this.opts,
+            maxDepth: this.maxDepth !== Infinity ?
+                this.maxDepth + this.scurry.cwd.depth()
+                : Infinity,
+            platform: this.platform,
+            nocase: this.nocase,
+            includeChildMatches: this.includeChildMatches,
+        }).streamSync();
+    }
+    /**
+     * Default sync iteration function. Returns a Generator that
+     * iterates over the results.
+     */
+    iterateSync() {
+        return this.streamSync()[Symbol.iterator]();
+    }
+    [Symbol.iterator]() {
+        return this.iterateSync();
+    }
+    /**
+     * Default async iteration function. Returns an AsyncGenerator that
+     * iterates over the results.
+     */
+    iterate() {
+        return this.stream()[Symbol.asyncIterator]();
+    }
+    [Symbol.asyncIterator]() {
+        return this.iterate();
+    }
+}
+//# sourceMappingURL=glob.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/has-magic.js b/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/has-magic.js
new file mode 100644
index 0000000000000..ba2321ab868d0
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/has-magic.js
@@ -0,0 +1,23 @@
+import { Minimatch } from 'minimatch';
+/**
+ * Return true if the patterns provided contain any magic glob characters,
+ * given the options provided.
+ *
+ * Brace expansion is not considered "magic" unless the `magicalBraces` option
+ * is set, as brace expansion just turns one string into an array of strings.
+ * So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and
+ * `'xby'` both do not contain any magic glob characters, and it's treated the
+ * same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true`
+ * is in the options, brace expansion _is_ treated as a pattern having magic.
+ */
+export const hasMagic = (pattern, options = {}) => {
+    if (!Array.isArray(pattern)) {
+        pattern = [pattern];
+    }
+    for (const p of pattern) {
+        if (new Minimatch(p, options).hasMagic())
+            return true;
+    }
+    return false;
+};
+//# sourceMappingURL=has-magic.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/ignore.js b/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/ignore.js
new file mode 100644
index 0000000000000..539c4a4fdebc4
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/ignore.js
@@ -0,0 +1,115 @@
+// give it a pattern, and it'll be able to tell you if
+// a given path should be ignored.
+// Ignoring a path ignores its children if the pattern ends in /**
+// Ignores are always parsed in dot:true mode
+import { Minimatch } from 'minimatch';
+import { Pattern } from './pattern.js';
+const defaultPlatform = (typeof process === 'object' &&
+    process &&
+    typeof process.platform === 'string') ?
+    process.platform
+    : 'linux';
+/**
+ * Class used to process ignored patterns
+ */
+export class Ignore {
+    relative;
+    relativeChildren;
+    absolute;
+    absoluteChildren;
+    platform;
+    mmopts;
+    constructor(ignored, { nobrace, nocase, noext, noglobstar, platform = defaultPlatform, }) {
+        this.relative = [];
+        this.absolute = [];
+        this.relativeChildren = [];
+        this.absoluteChildren = [];
+        this.platform = platform;
+        this.mmopts = {
+            dot: true,
+            nobrace,
+            nocase,
+            noext,
+            noglobstar,
+            optimizationLevel: 2,
+            platform,
+            nocomment: true,
+            nonegate: true,
+        };
+        for (const ign of ignored)
+            this.add(ign);
+    }
+    add(ign) {
+        // this is a little weird, but it gives us a clean set of optimized
+        // minimatch matchers, without getting tripped up if one of them
+        // ends in /** inside a brace section, and it's only inefficient at
+        // the start of the walk, not along it.
+        // It'd be nice if the Pattern class just had a .test() method, but
+        // handling globstars is a bit of a pita, and that code already lives
+        // in minimatch anyway.
+        // Another way would be if maybe Minimatch could take its set/globParts
+        // as an option, and then we could at least just use Pattern to test
+        // for absolute-ness.
+        // Yet another way, Minimatch could take an array of glob strings, and
+        // a cwd option, and do the right thing.
+        const mm = new Minimatch(ign, this.mmopts);
+        for (let i = 0; i < mm.set.length; i++) {
+            const parsed = mm.set[i];
+            const globParts = mm.globParts[i];
+            /* c8 ignore start */
+            if (!parsed || !globParts) {
+                throw new Error('invalid pattern object');
+            }
+            // strip off leading ./ portions
+            // https://github.com/isaacs/node-glob/issues/570
+            while (parsed[0] === '.' && globParts[0] === '.') {
+                parsed.shift();
+                globParts.shift();
+            }
+            /* c8 ignore stop */
+            const p = new Pattern(parsed, globParts, 0, this.platform);
+            const m = new Minimatch(p.globString(), this.mmopts);
+            const children = globParts[globParts.length - 1] === '**';
+            const absolute = p.isAbsolute();
+            if (absolute)
+                this.absolute.push(m);
+            else
+                this.relative.push(m);
+            if (children) {
+                if (absolute)
+                    this.absoluteChildren.push(m);
+                else
+                    this.relativeChildren.push(m);
+            }
+        }
+    }
+    ignored(p) {
+        const fullpath = p.fullpath();
+        const fullpaths = `${fullpath}/`;
+        const relative = p.relative() || '.';
+        const relatives = `${relative}/`;
+        for (const m of this.relative) {
+            if (m.match(relative) || m.match(relatives))
+                return true;
+        }
+        for (const m of this.absolute) {
+            if (m.match(fullpath) || m.match(fullpaths))
+                return true;
+        }
+        return false;
+    }
+    childrenIgnored(p) {
+        const fullpath = p.fullpath() + '/';
+        const relative = (p.relative() || '.') + '/';
+        for (const m of this.relativeChildren) {
+            if (m.match(relative))
+                return true;
+        }
+        for (const m of this.absoluteChildren) {
+            if (m.match(fullpath))
+                return true;
+        }
+        return false;
+    }
+}
+//# sourceMappingURL=ignore.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/index.js b/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/index.js
new file mode 100644
index 0000000000000..e15c1f9c4cb03
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/index.js
@@ -0,0 +1,55 @@
+import { escape, unescape } from 'minimatch';
+import { Glob } from './glob.js';
+import { hasMagic } from './has-magic.js';
+export { escape, unescape } from 'minimatch';
+export { Glob } from './glob.js';
+export { hasMagic } from './has-magic.js';
+export { Ignore } from './ignore.js';
+export function globStreamSync(pattern, options = {}) {
+    return new Glob(pattern, options).streamSync();
+}
+export function globStream(pattern, options = {}) {
+    return new Glob(pattern, options).stream();
+}
+export function globSync(pattern, options = {}) {
+    return new Glob(pattern, options).walkSync();
+}
+async function glob_(pattern, options = {}) {
+    return new Glob(pattern, options).walk();
+}
+export function globIterateSync(pattern, options = {}) {
+    return new Glob(pattern, options).iterateSync();
+}
+export function globIterate(pattern, options = {}) {
+    return new Glob(pattern, options).iterate();
+}
+// aliases: glob.sync.stream() glob.stream.sync() glob.sync() etc
+export const streamSync = globStreamSync;
+export const stream = Object.assign(globStream, { sync: globStreamSync });
+export const iterateSync = globIterateSync;
+export const iterate = Object.assign(globIterate, {
+    sync: globIterateSync,
+});
+export const sync = Object.assign(globSync, {
+    stream: globStreamSync,
+    iterate: globIterateSync,
+});
+export const glob = Object.assign(glob_, {
+    glob: glob_,
+    globSync,
+    sync,
+    globStream,
+    stream,
+    globStreamSync,
+    streamSync,
+    globIterate,
+    iterate,
+    globIterateSync,
+    iterateSync,
+    Glob,
+    hasMagic,
+    escape,
+    unescape,
+});
+glob.glob = glob;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/package.json b/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/package.json
new file mode 100644
index 0000000000000..3dbc1ca591c05
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/pattern.js b/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/pattern.js
new file mode 100644
index 0000000000000..b41defa10c6a3
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/pattern.js
@@ -0,0 +1,215 @@
+// this is just a very light wrapper around 2 arrays with an offset index
+import { GLOBSTAR } from 'minimatch';
+const isPatternList = (pl) => pl.length >= 1;
+const isGlobList = (gl) => gl.length >= 1;
+/**
+ * An immutable-ish view on an array of glob parts and their parsed
+ * results
+ */
+export class Pattern {
+    #patternList;
+    #globList;
+    #index;
+    length;
+    #platform;
+    #rest;
+    #globString;
+    #isDrive;
+    #isUNC;
+    #isAbsolute;
+    #followGlobstar = true;
+    constructor(patternList, globList, index, platform) {
+        if (!isPatternList(patternList)) {
+            throw new TypeError('empty pattern list');
+        }
+        if (!isGlobList(globList)) {
+            throw new TypeError('empty glob list');
+        }
+        if (globList.length !== patternList.length) {
+            throw new TypeError('mismatched pattern list and glob list lengths');
+        }
+        this.length = patternList.length;
+        if (index < 0 || index >= this.length) {
+            throw new TypeError('index out of range');
+        }
+        this.#patternList = patternList;
+        this.#globList = globList;
+        this.#index = index;
+        this.#platform = platform;
+        // normalize root entries of absolute patterns on initial creation.
+        if (this.#index === 0) {
+            // c: => ['c:/']
+            // C:/ => ['C:/']
+            // C:/x => ['C:/', 'x']
+            // //host/share => ['//host/share/']
+            // //host/share/ => ['//host/share/']
+            // //host/share/x => ['//host/share/', 'x']
+            // /etc => ['/', 'etc']
+            // / => ['/']
+            if (this.isUNC()) {
+                // '' / '' / 'host' / 'share'
+                const [p0, p1, p2, p3, ...prest] = this.#patternList;
+                const [g0, g1, g2, g3, ...grest] = this.#globList;
+                if (prest[0] === '') {
+                    // ends in /
+                    prest.shift();
+                    grest.shift();
+                }
+                const p = [p0, p1, p2, p3, ''].join('/');
+                const g = [g0, g1, g2, g3, ''].join('/');
+                this.#patternList = [p, ...prest];
+                this.#globList = [g, ...grest];
+                this.length = this.#patternList.length;
+            }
+            else if (this.isDrive() || this.isAbsolute()) {
+                const [p1, ...prest] = this.#patternList;
+                const [g1, ...grest] = this.#globList;
+                if (prest[0] === '') {
+                    // ends in /
+                    prest.shift();
+                    grest.shift();
+                }
+                const p = p1 + '/';
+                const g = g1 + '/';
+                this.#patternList = [p, ...prest];
+                this.#globList = [g, ...grest];
+                this.length = this.#patternList.length;
+            }
+        }
+    }
+    /**
+     * The first entry in the parsed list of patterns
+     */
+    pattern() {
+        return this.#patternList[this.#index];
+    }
+    /**
+     * true of if pattern() returns a string
+     */
+    isString() {
+        return typeof this.#patternList[this.#index] === 'string';
+    }
+    /**
+     * true of if pattern() returns GLOBSTAR
+     */
+    isGlobstar() {
+        return this.#patternList[this.#index] === GLOBSTAR;
+    }
+    /**
+     * true if pattern() returns a regexp
+     */
+    isRegExp() {
+        return this.#patternList[this.#index] instanceof RegExp;
+    }
+    /**
+     * The /-joined set of glob parts that make up this pattern
+     */
+    globString() {
+        return (this.#globString =
+            this.#globString ||
+                (this.#index === 0 ?
+                    this.isAbsolute() ?
+                        this.#globList[0] + this.#globList.slice(1).join('/')
+                        : this.#globList.join('/')
+                    : this.#globList.slice(this.#index).join('/')));
+    }
+    /**
+     * true if there are more pattern parts after this one
+     */
+    hasMore() {
+        return this.length > this.#index + 1;
+    }
+    /**
+     * The rest of the pattern after this part, or null if this is the end
+     */
+    rest() {
+        if (this.#rest !== undefined)
+            return this.#rest;
+        if (!this.hasMore())
+            return (this.#rest = null);
+        this.#rest = new Pattern(this.#patternList, this.#globList, this.#index + 1, this.#platform);
+        this.#rest.#isAbsolute = this.#isAbsolute;
+        this.#rest.#isUNC = this.#isUNC;
+        this.#rest.#isDrive = this.#isDrive;
+        return this.#rest;
+    }
+    /**
+     * true if the pattern represents a //unc/path/ on windows
+     */
+    isUNC() {
+        const pl = this.#patternList;
+        return this.#isUNC !== undefined ?
+            this.#isUNC
+            : (this.#isUNC =
+                this.#platform === 'win32' &&
+                    this.#index === 0 &&
+                    pl[0] === '' &&
+                    pl[1] === '' &&
+                    typeof pl[2] === 'string' &&
+                    !!pl[2] &&
+                    typeof pl[3] === 'string' &&
+                    !!pl[3]);
+    }
+    // pattern like C:/...
+    // split = ['C:', ...]
+    // XXX: would be nice to handle patterns like `c:*` to test the cwd
+    // in c: for *, but I don't know of a way to even figure out what that
+    // cwd is without actually chdir'ing into it?
+    /**
+     * True if the pattern starts with a drive letter on Windows
+     */
+    isDrive() {
+        const pl = this.#patternList;
+        return this.#isDrive !== undefined ?
+            this.#isDrive
+            : (this.#isDrive =
+                this.#platform === 'win32' &&
+                    this.#index === 0 &&
+                    this.length > 1 &&
+                    typeof pl[0] === 'string' &&
+                    /^[a-z]:$/i.test(pl[0]));
+    }
+    // pattern = '/' or '/...' or '/x/...'
+    // split = ['', ''] or ['', ...] or ['', 'x', ...]
+    // Drive and UNC both considered absolute on windows
+    /**
+     * True if the pattern is rooted on an absolute path
+     */
+    isAbsolute() {
+        const pl = this.#patternList;
+        return this.#isAbsolute !== undefined ?
+            this.#isAbsolute
+            : (this.#isAbsolute =
+                (pl[0] === '' && pl.length > 1) ||
+                    this.isDrive() ||
+                    this.isUNC());
+    }
+    /**
+     * consume the root of the pattern, and return it
+     */
+    root() {
+        const p = this.#patternList[0];
+        return (typeof p === 'string' && this.isAbsolute() && this.#index === 0) ?
+            p
+            : '';
+    }
+    /**
+     * Check to see if the current globstar pattern is allowed to follow
+     * a symbolic link.
+     */
+    checkFollowGlobstar() {
+        return !(this.#index === 0 ||
+            !this.isGlobstar() ||
+            !this.#followGlobstar);
+    }
+    /**
+     * Mark that the current globstar pattern is following a symbolic link
+     */
+    markFollowGlobstar() {
+        if (this.#index === 0 || !this.isGlobstar() || !this.#followGlobstar)
+            return false;
+        this.#followGlobstar = false;
+        return true;
+    }
+}
+//# sourceMappingURL=pattern.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/processor.js b/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/processor.js
new file mode 100644
index 0000000000000..f874892ffed0c
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/processor.js
@@ -0,0 +1,294 @@
+// synchronous utility for filtering entries and calculating subwalks
+import { GLOBSTAR } from 'minimatch';
+/**
+ * A cache of which patterns have been processed for a given Path
+ */
+export class HasWalkedCache {
+    store;
+    constructor(store = new Map()) {
+        this.store = store;
+    }
+    copy() {
+        return new HasWalkedCache(new Map(this.store));
+    }
+    hasWalked(target, pattern) {
+        return this.store.get(target.fullpath())?.has(pattern.globString());
+    }
+    storeWalked(target, pattern) {
+        const fullpath = target.fullpath();
+        const cached = this.store.get(fullpath);
+        if (cached)
+            cached.add(pattern.globString());
+        else
+            this.store.set(fullpath, new Set([pattern.globString()]));
+    }
+}
+/**
+ * A record of which paths have been matched in a given walk step,
+ * and whether they only are considered a match if they are a directory,
+ * and whether their absolute or relative path should be returned.
+ */
+export class MatchRecord {
+    store = new Map();
+    add(target, absolute, ifDir) {
+        const n = (absolute ? 2 : 0) | (ifDir ? 1 : 0);
+        const current = this.store.get(target);
+        this.store.set(target, current === undefined ? n : n & current);
+    }
+    // match, absolute, ifdir
+    entries() {
+        return [...this.store.entries()].map(([path, n]) => [
+            path,
+            !!(n & 2),
+            !!(n & 1),
+        ]);
+    }
+}
+/**
+ * A collection of patterns that must be processed in a subsequent step
+ * for a given path.
+ */
+export class SubWalks {
+    store = new Map();
+    add(target, pattern) {
+        if (!target.canReaddir()) {
+            return;
+        }
+        const subs = this.store.get(target);
+        if (subs) {
+            if (!subs.find(p => p.globString() === pattern.globString())) {
+                subs.push(pattern);
+            }
+        }
+        else
+            this.store.set(target, [pattern]);
+    }
+    get(target) {
+        const subs = this.store.get(target);
+        /* c8 ignore start */
+        if (!subs) {
+            throw new Error('attempting to walk unknown path');
+        }
+        /* c8 ignore stop */
+        return subs;
+    }
+    entries() {
+        return this.keys().map(k => [k, this.store.get(k)]);
+    }
+    keys() {
+        return [...this.store.keys()].filter(t => t.canReaddir());
+    }
+}
+/**
+ * The class that processes patterns for a given path.
+ *
+ * Handles child entry filtering, and determining whether a path's
+ * directory contents must be read.
+ */
+export class Processor {
+    hasWalkedCache;
+    matches = new MatchRecord();
+    subwalks = new SubWalks();
+    patterns;
+    follow;
+    dot;
+    opts;
+    constructor(opts, hasWalkedCache) {
+        this.opts = opts;
+        this.follow = !!opts.follow;
+        this.dot = !!opts.dot;
+        this.hasWalkedCache =
+            hasWalkedCache ? hasWalkedCache.copy() : new HasWalkedCache();
+    }
+    processPatterns(target, patterns) {
+        this.patterns = patterns;
+        const processingSet = patterns.map(p => [target, p]);
+        // map of paths to the magic-starting subwalks they need to walk
+        // first item in patterns is the filter
+        for (let [t, pattern] of processingSet) {
+            this.hasWalkedCache.storeWalked(t, pattern);
+            const root = pattern.root();
+            const absolute = pattern.isAbsolute() && this.opts.absolute !== false;
+            // start absolute patterns at root
+            if (root) {
+                t = t.resolve(root === '/' && this.opts.root !== undefined ?
+                    this.opts.root
+                    : root);
+                const rest = pattern.rest();
+                if (!rest) {
+                    this.matches.add(t, true, false);
+                    continue;
+                }
+                else {
+                    pattern = rest;
+                }
+            }
+            if (t.isENOENT())
+                continue;
+            let p;
+            let rest;
+            let changed = false;
+            while (typeof (p = pattern.pattern()) === 'string' &&
+                (rest = pattern.rest())) {
+                const c = t.resolve(p);
+                t = c;
+                pattern = rest;
+                changed = true;
+            }
+            p = pattern.pattern();
+            rest = pattern.rest();
+            if (changed) {
+                if (this.hasWalkedCache.hasWalked(t, pattern))
+                    continue;
+                this.hasWalkedCache.storeWalked(t, pattern);
+            }
+            // now we have either a final string for a known entry,
+            // more strings for an unknown entry,
+            // or a pattern starting with magic, mounted on t.
+            if (typeof p === 'string') {
+                // must not be final entry, otherwise we would have
+                // concatenated it earlier.
+                const ifDir = p === '..' || p === '' || p === '.';
+                this.matches.add(t.resolve(p), absolute, ifDir);
+                continue;
+            }
+            else if (p === GLOBSTAR) {
+                // if no rest, match and subwalk pattern
+                // if rest, process rest and subwalk pattern
+                // if it's a symlink, but we didn't get here by way of a
+                // globstar match (meaning it's the first time THIS globstar
+                // has traversed a symlink), then we follow it. Otherwise, stop.
+                if (!t.isSymbolicLink() ||
+                    this.follow ||
+                    pattern.checkFollowGlobstar()) {
+                    this.subwalks.add(t, pattern);
+                }
+                const rp = rest?.pattern();
+                const rrest = rest?.rest();
+                if (!rest || ((rp === '' || rp === '.') && !rrest)) {
+                    // only HAS to be a dir if it ends in **/ or **/.
+                    // but ending in ** will match files as well.
+                    this.matches.add(t, absolute, rp === '' || rp === '.');
+                }
+                else {
+                    if (rp === '..') {
+                        // this would mean you're matching **/.. at the fs root,
+                        // and no thanks, I'm not gonna test that specific case.
+                        /* c8 ignore start */
+                        const tp = t.parent || t;
+                        /* c8 ignore stop */
+                        if (!rrest)
+                            this.matches.add(tp, absolute, true);
+                        else if (!this.hasWalkedCache.hasWalked(tp, rrest)) {
+                            this.subwalks.add(tp, rrest);
+                        }
+                    }
+                }
+            }
+            else if (p instanceof RegExp) {
+                this.subwalks.add(t, pattern);
+            }
+        }
+        return this;
+    }
+    subwalkTargets() {
+        return this.subwalks.keys();
+    }
+    child() {
+        return new Processor(this.opts, this.hasWalkedCache);
+    }
+    // return a new Processor containing the subwalks for each
+    // child entry, and a set of matches, and
+    // a hasWalkedCache that's a copy of this one
+    // then we're going to call
+    filterEntries(parent, entries) {
+        const patterns = this.subwalks.get(parent);
+        // put matches and entry walks into the results processor
+        const results = this.child();
+        for (const e of entries) {
+            for (const pattern of patterns) {
+                const absolute = pattern.isAbsolute();
+                const p = pattern.pattern();
+                const rest = pattern.rest();
+                if (p === GLOBSTAR) {
+                    results.testGlobstar(e, pattern, rest, absolute);
+                }
+                else if (p instanceof RegExp) {
+                    results.testRegExp(e, p, rest, absolute);
+                }
+                else {
+                    results.testString(e, p, rest, absolute);
+                }
+            }
+        }
+        return results;
+    }
+    testGlobstar(e, pattern, rest, absolute) {
+        if (this.dot || !e.name.startsWith('.')) {
+            if (!pattern.hasMore()) {
+                this.matches.add(e, absolute, false);
+            }
+            if (e.canReaddir()) {
+                // if we're in follow mode or it's not a symlink, just keep
+                // testing the same pattern. If there's more after the globstar,
+                // then this symlink consumes the globstar. If not, then we can
+                // follow at most ONE symlink along the way, so we mark it, which
+                // also checks to ensure that it wasn't already marked.
+                if (this.follow || !e.isSymbolicLink()) {
+                    this.subwalks.add(e, pattern);
+                }
+                else if (e.isSymbolicLink()) {
+                    if (rest && pattern.checkFollowGlobstar()) {
+                        this.subwalks.add(e, rest);
+                    }
+                    else if (pattern.markFollowGlobstar()) {
+                        this.subwalks.add(e, pattern);
+                    }
+                }
+            }
+        }
+        // if the NEXT thing matches this entry, then also add
+        // the rest.
+        if (rest) {
+            const rp = rest.pattern();
+            if (typeof rp === 'string' &&
+                // dots and empty were handled already
+                rp !== '..' &&
+                rp !== '' &&
+                rp !== '.') {
+                this.testString(e, rp, rest.rest(), absolute);
+            }
+            else if (rp === '..') {
+                /* c8 ignore start */
+                const ep = e.parent || e;
+                /* c8 ignore stop */
+                this.subwalks.add(ep, rest);
+            }
+            else if (rp instanceof RegExp) {
+                this.testRegExp(e, rp, rest.rest(), absolute);
+            }
+        }
+    }
+    testRegExp(e, p, rest, absolute) {
+        if (!p.test(e.name))
+            return;
+        if (!rest) {
+            this.matches.add(e, absolute, false);
+        }
+        else {
+            this.subwalks.add(e, rest);
+        }
+    }
+    testString(e, p, rest, absolute) {
+        // should never happen?
+        if (!e.isNamed(p))
+            return;
+        if (!rest) {
+            this.matches.add(e, absolute, false);
+        }
+        else {
+            this.subwalks.add(e, rest);
+        }
+    }
+}
+//# sourceMappingURL=processor.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/walker.js b/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/walker.js
new file mode 100644
index 0000000000000..3d68196c4f175
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/walker.js
@@ -0,0 +1,381 @@
+/**
+ * Single-use utility classes to provide functionality to the {@link Glob}
+ * methods.
+ *
+ * @module
+ */
+import { Minipass } from 'minipass';
+import { Ignore } from './ignore.js';
+import { Processor } from './processor.js';
+const makeIgnore = (ignore, opts) => typeof ignore === 'string' ? new Ignore([ignore], opts)
+    : Array.isArray(ignore) ? new Ignore(ignore, opts)
+        : ignore;
+/**
+ * basic walking utilities that all the glob walker types use
+ */
+export class GlobUtil {
+    path;
+    patterns;
+    opts;
+    seen = new Set();
+    paused = false;
+    aborted = false;
+    #onResume = [];
+    #ignore;
+    #sep;
+    signal;
+    maxDepth;
+    includeChildMatches;
+    constructor(patterns, path, opts) {
+        this.patterns = patterns;
+        this.path = path;
+        this.opts = opts;
+        this.#sep = !opts.posix && opts.platform === 'win32' ? '\\' : '/';
+        this.includeChildMatches = opts.includeChildMatches !== false;
+        if (opts.ignore || !this.includeChildMatches) {
+            this.#ignore = makeIgnore(opts.ignore ?? [], opts);
+            if (!this.includeChildMatches &&
+                typeof this.#ignore.add !== 'function') {
+                const m = 'cannot ignore child matches, ignore lacks add() method.';
+                throw new Error(m);
+            }
+        }
+        // ignore, always set with maxDepth, but it's optional on the
+        // GlobOptions type
+        /* c8 ignore start */
+        this.maxDepth = opts.maxDepth || Infinity;
+        /* c8 ignore stop */
+        if (opts.signal) {
+            this.signal = opts.signal;
+            this.signal.addEventListener('abort', () => {
+                this.#onResume.length = 0;
+            });
+        }
+    }
+    #ignored(path) {
+        return this.seen.has(path) || !!this.#ignore?.ignored?.(path);
+    }
+    #childrenIgnored(path) {
+        return !!this.#ignore?.childrenIgnored?.(path);
+    }
+    // backpressure mechanism
+    pause() {
+        this.paused = true;
+    }
+    resume() {
+        /* c8 ignore start */
+        if (this.signal?.aborted)
+            return;
+        /* c8 ignore stop */
+        this.paused = false;
+        let fn = undefined;
+        while (!this.paused && (fn = this.#onResume.shift())) {
+            fn();
+        }
+    }
+    onResume(fn) {
+        if (this.signal?.aborted)
+            return;
+        /* c8 ignore start */
+        if (!this.paused) {
+            fn();
+        }
+        else {
+            /* c8 ignore stop */
+            this.#onResume.push(fn);
+        }
+    }
+    // do the requisite realpath/stat checking, and return the path
+    // to add or undefined to filter it out.
+    async matchCheck(e, ifDir) {
+        if (ifDir && this.opts.nodir)
+            return undefined;
+        let rpc;
+        if (this.opts.realpath) {
+            rpc = e.realpathCached() || (await e.realpath());
+            if (!rpc)
+                return undefined;
+            e = rpc;
+        }
+        const needStat = e.isUnknown() || this.opts.stat;
+        const s = needStat ? await e.lstat() : e;
+        if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) {
+            const target = await s.realpath();
+            /* c8 ignore start */
+            if (target && (target.isUnknown() || this.opts.stat)) {
+                await target.lstat();
+            }
+            /* c8 ignore stop */
+        }
+        return this.matchCheckTest(s, ifDir);
+    }
+    matchCheckTest(e, ifDir) {
+        return (e &&
+            (this.maxDepth === Infinity || e.depth() <= this.maxDepth) &&
+            (!ifDir || e.canReaddir()) &&
+            (!this.opts.nodir || !e.isDirectory()) &&
+            (!this.opts.nodir ||
+                !this.opts.follow ||
+                !e.isSymbolicLink() ||
+                !e.realpathCached()?.isDirectory()) &&
+            !this.#ignored(e)) ?
+            e
+            : undefined;
+    }
+    matchCheckSync(e, ifDir) {
+        if (ifDir && this.opts.nodir)
+            return undefined;
+        let rpc;
+        if (this.opts.realpath) {
+            rpc = e.realpathCached() || e.realpathSync();
+            if (!rpc)
+                return undefined;
+            e = rpc;
+        }
+        const needStat = e.isUnknown() || this.opts.stat;
+        const s = needStat ? e.lstatSync() : e;
+        if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) {
+            const target = s.realpathSync();
+            if (target && (target?.isUnknown() || this.opts.stat)) {
+                target.lstatSync();
+            }
+        }
+        return this.matchCheckTest(s, ifDir);
+    }
+    matchFinish(e, absolute) {
+        if (this.#ignored(e))
+            return;
+        // we know we have an ignore if this is false, but TS doesn't
+        if (!this.includeChildMatches && this.#ignore?.add) {
+            const ign = `${e.relativePosix()}/**`;
+            this.#ignore.add(ign);
+        }
+        const abs = this.opts.absolute === undefined ? absolute : this.opts.absolute;
+        this.seen.add(e);
+        const mark = this.opts.mark && e.isDirectory() ? this.#sep : '';
+        // ok, we have what we need!
+        if (this.opts.withFileTypes) {
+            this.matchEmit(e);
+        }
+        else if (abs) {
+            const abs = this.opts.posix ? e.fullpathPosix() : e.fullpath();
+            this.matchEmit(abs + mark);
+        }
+        else {
+            const rel = this.opts.posix ? e.relativePosix() : e.relative();
+            const pre = this.opts.dotRelative && !rel.startsWith('..' + this.#sep) ?
+                '.' + this.#sep
+                : '';
+            this.matchEmit(!rel ? '.' + mark : pre + rel + mark);
+        }
+    }
+    async match(e, absolute, ifDir) {
+        const p = await this.matchCheck(e, ifDir);
+        if (p)
+            this.matchFinish(p, absolute);
+    }
+    matchSync(e, absolute, ifDir) {
+        const p = this.matchCheckSync(e, ifDir);
+        if (p)
+            this.matchFinish(p, absolute);
+    }
+    walkCB(target, patterns, cb) {
+        /* c8 ignore start */
+        if (this.signal?.aborted)
+            cb();
+        /* c8 ignore stop */
+        this.walkCB2(target, patterns, new Processor(this.opts), cb);
+    }
+    walkCB2(target, patterns, processor, cb) {
+        if (this.#childrenIgnored(target))
+            return cb();
+        if (this.signal?.aborted)
+            cb();
+        if (this.paused) {
+            this.onResume(() => this.walkCB2(target, patterns, processor, cb));
+            return;
+        }
+        processor.processPatterns(target, patterns);
+        // done processing.  all of the above is sync, can be abstracted out.
+        // subwalks is a map of paths to the entry filters they need
+        // matches is a map of paths to [absolute, ifDir] tuples.
+        let tasks = 1;
+        const next = () => {
+            if (--tasks === 0)
+                cb();
+        };
+        for (const [m, absolute, ifDir] of processor.matches.entries()) {
+            if (this.#ignored(m))
+                continue;
+            tasks++;
+            this.match(m, absolute, ifDir).then(() => next());
+        }
+        for (const t of processor.subwalkTargets()) {
+            if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) {
+                continue;
+            }
+            tasks++;
+            const childrenCached = t.readdirCached();
+            if (t.calledReaddir())
+                this.walkCB3(t, childrenCached, processor, next);
+            else {
+                t.readdirCB((_, entries) => this.walkCB3(t, entries, processor, next), true);
+            }
+        }
+        next();
+    }
+    walkCB3(target, entries, processor, cb) {
+        processor = processor.filterEntries(target, entries);
+        let tasks = 1;
+        const next = () => {
+            if (--tasks === 0)
+                cb();
+        };
+        for (const [m, absolute, ifDir] of processor.matches.entries()) {
+            if (this.#ignored(m))
+                continue;
+            tasks++;
+            this.match(m, absolute, ifDir).then(() => next());
+        }
+        for (const [target, patterns] of processor.subwalks.entries()) {
+            tasks++;
+            this.walkCB2(target, patterns, processor.child(), next);
+        }
+        next();
+    }
+    walkCBSync(target, patterns, cb) {
+        /* c8 ignore start */
+        if (this.signal?.aborted)
+            cb();
+        /* c8 ignore stop */
+        this.walkCB2Sync(target, patterns, new Processor(this.opts), cb);
+    }
+    walkCB2Sync(target, patterns, processor, cb) {
+        if (this.#childrenIgnored(target))
+            return cb();
+        if (this.signal?.aborted)
+            cb();
+        if (this.paused) {
+            this.onResume(() => this.walkCB2Sync(target, patterns, processor, cb));
+            return;
+        }
+        processor.processPatterns(target, patterns);
+        // done processing.  all of the above is sync, can be abstracted out.
+        // subwalks is a map of paths to the entry filters they need
+        // matches is a map of paths to [absolute, ifDir] tuples.
+        let tasks = 1;
+        const next = () => {
+            if (--tasks === 0)
+                cb();
+        };
+        for (const [m, absolute, ifDir] of processor.matches.entries()) {
+            if (this.#ignored(m))
+                continue;
+            this.matchSync(m, absolute, ifDir);
+        }
+        for (const t of processor.subwalkTargets()) {
+            if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) {
+                continue;
+            }
+            tasks++;
+            const children = t.readdirSync();
+            this.walkCB3Sync(t, children, processor, next);
+        }
+        next();
+    }
+    walkCB3Sync(target, entries, processor, cb) {
+        processor = processor.filterEntries(target, entries);
+        let tasks = 1;
+        const next = () => {
+            if (--tasks === 0)
+                cb();
+        };
+        for (const [m, absolute, ifDir] of processor.matches.entries()) {
+            if (this.#ignored(m))
+                continue;
+            this.matchSync(m, absolute, ifDir);
+        }
+        for (const [target, patterns] of processor.subwalks.entries()) {
+            tasks++;
+            this.walkCB2Sync(target, patterns, processor.child(), next);
+        }
+        next();
+    }
+}
+export class GlobWalker extends GlobUtil {
+    matches = new Set();
+    constructor(patterns, path, opts) {
+        super(patterns, path, opts);
+    }
+    matchEmit(e) {
+        this.matches.add(e);
+    }
+    async walk() {
+        if (this.signal?.aborted)
+            throw this.signal.reason;
+        if (this.path.isUnknown()) {
+            await this.path.lstat();
+        }
+        await new Promise((res, rej) => {
+            this.walkCB(this.path, this.patterns, () => {
+                if (this.signal?.aborted) {
+                    rej(this.signal.reason);
+                }
+                else {
+                    res(this.matches);
+                }
+            });
+        });
+        return this.matches;
+    }
+    walkSync() {
+        if (this.signal?.aborted)
+            throw this.signal.reason;
+        if (this.path.isUnknown()) {
+            this.path.lstatSync();
+        }
+        // nothing for the callback to do, because this never pauses
+        this.walkCBSync(this.path, this.patterns, () => {
+            if (this.signal?.aborted)
+                throw this.signal.reason;
+        });
+        return this.matches;
+    }
+}
+export class GlobStream extends GlobUtil {
+    results;
+    constructor(patterns, path, opts) {
+        super(patterns, path, opts);
+        this.results = new Minipass({
+            signal: this.signal,
+            objectMode: true,
+        });
+        this.results.on('drain', () => this.resume());
+        this.results.on('resume', () => this.resume());
+    }
+    matchEmit(e) {
+        this.results.write(e);
+        if (!this.results.flowing)
+            this.pause();
+    }
+    stream() {
+        const target = this.path;
+        if (target.isUnknown()) {
+            target.lstat().then(() => {
+                this.walkCB(target, this.patterns, () => this.results.end());
+            });
+        }
+        else {
+            this.walkCB(target, this.patterns, () => this.results.end());
+        }
+        return this.results;
+    }
+    streamSync() {
+        if (this.path.isUnknown()) {
+            this.path.lstatSync();
+        }
+        this.walkCBSync(this.path, this.patterns, () => this.results.end());
+        return this.results;
+    }
+}
+//# sourceMappingURL=walker.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/package.json b/node_modules/@npmcli/map-workspaces/node_modules/glob/package.json
new file mode 100644
index 0000000000000..7be2c53bd5c9f
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/glob/package.json
@@ -0,0 +1,97 @@
+{
+  "author": "Isaac Z. Schlueter  (https://blog.izs.me/)",
+  "name": "glob",
+  "description": "the most correct and second fastest glob implementation in JavaScript",
+  "version": "11.0.3",
+  "type": "module",
+  "tshy": {
+    "main": true,
+    "exports": {
+      "./package.json": "./package.json",
+      ".": "./src/index.ts"
+    }
+  },
+  "bin": "./dist/esm/bin.mjs",
+  "main": "./dist/commonjs/index.js",
+  "types": "./dist/commonjs/index.d.ts",
+  "exports": {
+    "./package.json": "./package.json",
+    ".": {
+      "import": {
+        "types": "./dist/esm/index.d.ts",
+        "default": "./dist/esm/index.js"
+      },
+      "require": {
+        "types": "./dist/commonjs/index.d.ts",
+        "default": "./dist/commonjs/index.js"
+      }
+    }
+  },
+  "repository": {
+    "type": "git",
+    "url": "git://github.com/isaacs/node-glob.git"
+  },
+  "files": [
+    "dist"
+  ],
+  "scripts": {
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "npm run benchclean; git push origin --follow-tags",
+    "prepare": "tshy",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "test": "tap",
+    "snap": "tap",
+    "format": "prettier --write . --log-level warn",
+    "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts",
+    "profclean": "rm -f v8.log profile.txt",
+    "test-regen": "npm run profclean && TEST_REGEN=1 node --no-warnings --loader ts-node/esm test/00-setup.ts",
+    "prebench": "npm run prepare",
+    "bench": "bash benchmark.sh",
+    "preprof": "npm run prepare",
+    "prof": "bash prof.sh",
+    "benchclean": "node benchclean.cjs"
+  },
+  "prettier": {
+    "experimentalTernaries": true,
+    "semi": false,
+    "printWidth": 75,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "dependencies": {
+    "foreground-child": "^3.3.1",
+    "jackspeak": "^4.1.1",
+    "minimatch": "^10.0.3",
+    "minipass": "^7.1.2",
+    "package-json-from-dist": "^1.0.0",
+    "path-scurry": "^2.0.0"
+  },
+  "devDependencies": {
+    "@types/node": "^24.0.1",
+    "memfs": "^4.17.2",
+    "mkdirp": "^3.0.1",
+    "prettier": "^3.5.3",
+    "rimraf": "^6.0.1",
+    "tap": "^21.1.0",
+    "tshy": "^3.0.2",
+    "typedoc": "^0.28.5"
+  },
+  "tap": {
+    "before": "test/00-setup.ts"
+  },
+  "license": "ISC",
+  "funding": {
+    "url": "https://github.com/sponsors/isaacs"
+  },
+  "engines": {
+    "node": "20 || >=22"
+  },
+  "module": "./dist/esm/index.js"
+}
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/jackspeak/LICENSE.md b/node_modules/@npmcli/map-workspaces/node_modules/jackspeak/LICENSE.md
new file mode 100644
index 0000000000000..8cb5cc6e616c0
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/jackspeak/LICENSE.md
@@ -0,0 +1,55 @@
+# Blue Oak Model License
+
+Version 1.0.0
+
+## Purpose
+
+This license gives everyone as much permission to work with
+this software as possible, while protecting contributors
+from liability.
+
+## Acceptance
+
+In order to receive this license, you must agree to its
+rules. The rules of this license are both obligations
+under that agreement and conditions to your license.
+You must not do anything with this software that triggers
+a rule that you cannot or will not follow.
+
+## Copyright
+
+Each contributor licenses you to do everything with this
+software that would otherwise infringe that contributor's
+copyright in it.
+
+## Notices
+
+You must ensure that everyone who gets a copy of
+any part of this software from you, with or without
+changes, also gets the text of this license or a link to
+.
+
+## Excuse
+
+If anyone notifies you in writing that you have not
+complied with [Notices](#notices), you can keep your
+license by taking all practical steps to comply within 30
+days after the notice. If you do not do so, your license
+ends immediately.
+
+## Patent
+
+Each contributor licenses you to do everything with this
+software that would otherwise infringe any patent claims
+they can license or become able to license.
+
+## Reliability
+
+No contributor can revoke this license.
+
+## No Liability
+
+**_As far as the law allows, this software comes as is,
+without any warranty or condition, and no contributor
+will be liable to anyone for any damages related to this
+software or this license, under any kind of legal claim._**
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/jackspeak/dist/commonjs/index.js b/node_modules/@npmcli/map-workspaces/node_modules/jackspeak/dist/commonjs/index.js
new file mode 100644
index 0000000000000..543412746cc8f
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/jackspeak/dist/commonjs/index.js
@@ -0,0 +1,947 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.jack = exports.Jack = exports.isConfigOption = exports.isConfigOptionOfType = exports.isConfigType = void 0;
+const node_util_1 = require("node:util");
+// it's a tiny API, just cast it inline, it's fine
+//@ts-ignore
+const cliui_1 = __importDefault(require("@isaacs/cliui"));
+const node_path_1 = require("node:path");
+const isConfigType = (t) => typeof t === 'string' &&
+    (t === 'string' || t === 'number' || t === 'boolean');
+exports.isConfigType = isConfigType;
+const isValidValue = (v, type, multi) => {
+    if (multi) {
+        if (!Array.isArray(v))
+            return false;
+        return !v.some((v) => !isValidValue(v, type, false));
+    }
+    if (Array.isArray(v))
+        return false;
+    return typeof v === type;
+};
+const isValidOption = (v, vo) => !!vo &&
+    (Array.isArray(v) ? v.every(x => isValidOption(x, vo)) : vo.includes(v));
+/**
+ * Determine whether an unknown object is a {@link ConfigOption} based only
+ * on its `type` and `multiple` property
+ */
+const isConfigOptionOfType = (o, type, multi) => !!o &&
+    typeof o === 'object' &&
+    (0, exports.isConfigType)(o.type) &&
+    o.type === type &&
+    !!o.multiple === multi;
+exports.isConfigOptionOfType = isConfigOptionOfType;
+/**
+ * Determine whether an unknown object is a {@link ConfigOption} based on
+ * it having all valid properties
+ */
+const isConfigOption = (o, type, multi) => (0, exports.isConfigOptionOfType)(o, type, multi) &&
+    undefOrType(o.short, 'string') &&
+    undefOrType(o.description, 'string') &&
+    undefOrType(o.hint, 'string') &&
+    undefOrType(o.validate, 'function') &&
+    (o.type === 'boolean' ?
+        o.validOptions === undefined
+        : undefOrTypeArray(o.validOptions, o.type)) &&
+    (o.default === undefined || isValidValue(o.default, type, multi));
+exports.isConfigOption = isConfigOption;
+const isHeading = (r) => r.type === 'heading';
+const isDescription = (r) => r.type === 'description';
+const width = Math.min(process?.stdout?.columns ?? 80, 80);
+// indentation spaces from heading level
+const indent = (n) => (n - 1) * 2;
+const toEnvKey = (pref, key) => [pref, key.replace(/[^a-zA-Z0-9]+/g, ' ')]
+    .join(' ')
+    .trim()
+    .toUpperCase()
+    .replace(/ /g, '_');
+const toEnvVal = (value, delim = '\n') => {
+    const str = typeof value === 'string' ? value
+        : typeof value === 'boolean' ?
+            value ? '1'
+                : '0'
+            : typeof value === 'number' ? String(value)
+                : Array.isArray(value) ?
+                    value.map((v) => toEnvVal(v)).join(delim)
+                    : /* c8 ignore start */ undefined;
+    if (typeof str !== 'string') {
+        throw new Error(`could not serialize value to environment: ${JSON.stringify(value)}`, { cause: { code: 'JACKSPEAK' } });
+    }
+    /* c8 ignore stop */
+    return str;
+};
+const fromEnvVal = (env, type, multiple, delim = '\n') => (multiple ?
+    env ? env.split(delim).map(v => fromEnvVal(v, type, false))
+        : []
+    : type === 'string' ? env
+        : type === 'boolean' ? env === '1'
+            : +env.trim());
+const undefOrType = (v, t) => v === undefined || typeof v === t;
+const undefOrTypeArray = (v, t) => v === undefined || (Array.isArray(v) && v.every(x => typeof x === t));
+// print the value type, for error message reporting
+const valueType = (v) => typeof v === 'string' ? 'string'
+    : typeof v === 'boolean' ? 'boolean'
+        : typeof v === 'number' ? 'number'
+            : Array.isArray(v) ?
+                `${joinTypes([...new Set(v.map(v => valueType(v)))])}[]`
+                : `${v.type}${v.multiple ? '[]' : ''}`;
+const joinTypes = (types) => types.length === 1 && typeof types[0] === 'string' ?
+    types[0]
+    : `(${types.join('|')})`;
+const validateFieldMeta = (field, fieldMeta) => {
+    if (fieldMeta) {
+        if (field.type !== undefined && field.type !== fieldMeta.type) {
+            throw new TypeError(`invalid type`, {
+                cause: {
+                    found: field.type,
+                    wanted: [fieldMeta.type, undefined],
+                },
+            });
+        }
+        if (field.multiple !== undefined &&
+            !!field.multiple !== fieldMeta.multiple) {
+            throw new TypeError(`invalid multiple`, {
+                cause: {
+                    found: field.multiple,
+                    wanted: [fieldMeta.multiple, undefined],
+                },
+            });
+        }
+        return fieldMeta;
+    }
+    if (!(0, exports.isConfigType)(field.type)) {
+        throw new TypeError(`invalid type`, {
+            cause: {
+                found: field.type,
+                wanted: ['string', 'number', 'boolean'],
+            },
+        });
+    }
+    return {
+        type: field.type,
+        multiple: !!field.multiple,
+    };
+};
+const validateField = (o, type, multiple) => {
+    const validateValidOptions = (def, validOptions) => {
+        if (!undefOrTypeArray(validOptions, type)) {
+            throw new TypeError('invalid validOptions', {
+                cause: {
+                    found: validOptions,
+                    wanted: valueType({ type, multiple: true }),
+                },
+            });
+        }
+        if (def !== undefined && validOptions !== undefined) {
+            const valid = Array.isArray(def) ?
+                def.every(v => validOptions.includes(v))
+                : validOptions.includes(def);
+            if (!valid) {
+                throw new TypeError('invalid default value not in validOptions', {
+                    cause: {
+                        found: def,
+                        wanted: validOptions,
+                    },
+                });
+            }
+        }
+    };
+    if (o.default !== undefined &&
+        !isValidValue(o.default, type, multiple)) {
+        throw new TypeError('invalid default value', {
+            cause: {
+                found: o.default,
+                wanted: valueType({ type, multiple }),
+            },
+        });
+    }
+    if ((0, exports.isConfigOptionOfType)(o, 'number', false) ||
+        (0, exports.isConfigOptionOfType)(o, 'number', true)) {
+        validateValidOptions(o.default, o.validOptions);
+    }
+    else if ((0, exports.isConfigOptionOfType)(o, 'string', false) ||
+        (0, exports.isConfigOptionOfType)(o, 'string', true)) {
+        validateValidOptions(o.default, o.validOptions);
+    }
+    else if ((0, exports.isConfigOptionOfType)(o, 'boolean', false) ||
+        (0, exports.isConfigOptionOfType)(o, 'boolean', true)) {
+        if (o.hint !== undefined) {
+            throw new TypeError('cannot provide hint for flag');
+        }
+        if (o.validOptions !== undefined) {
+            throw new TypeError('cannot provide validOptions for flag');
+        }
+    }
+    return o;
+};
+const toParseArgsOptionsConfig = (options) => {
+    return Object.entries(options).reduce((acc, [longOption, o]) => {
+        const p = {
+            type: 'string',
+            multiple: !!o.multiple,
+            ...(typeof o.short === 'string' ? { short: o.short } : undefined),
+        };
+        const setNoBool = () => {
+            if (!longOption.startsWith('no-') && !options[`no-${longOption}`]) {
+                acc[`no-${longOption}`] = {
+                    type: 'boolean',
+                    multiple: !!o.multiple,
+                };
+            }
+        };
+        const setDefault = (def, fn) => {
+            if (def !== undefined) {
+                p.default = fn(def);
+            }
+        };
+        if ((0, exports.isConfigOption)(o, 'number', false)) {
+            setDefault(o.default, String);
+        }
+        else if ((0, exports.isConfigOption)(o, 'number', true)) {
+            setDefault(o.default, d => d.map(v => String(v)));
+        }
+        else if ((0, exports.isConfigOption)(o, 'string', false) ||
+            (0, exports.isConfigOption)(o, 'string', true)) {
+            setDefault(o.default, v => v);
+        }
+        else if ((0, exports.isConfigOption)(o, 'boolean', false) ||
+            (0, exports.isConfigOption)(o, 'boolean', true)) {
+            p.type = 'boolean';
+            setDefault(o.default, v => v);
+            setNoBool();
+        }
+        acc[longOption] = p;
+        return acc;
+    }, {});
+};
+/**
+ * Class returned by the {@link jack} function and all configuration
+ * definition methods.  This is what gets chained together.
+ */
+class Jack {
+    #configSet;
+    #shorts;
+    #options;
+    #fields = [];
+    #env;
+    #envPrefix;
+    #allowPositionals;
+    #usage;
+    #usageMarkdown;
+    constructor(options = {}) {
+        this.#options = options;
+        this.#allowPositionals = options.allowPositionals !== false;
+        this.#env =
+            this.#options.env === undefined ? process.env : this.#options.env;
+        this.#envPrefix = options.envPrefix;
+        // We need to fib a little, because it's always the same object, but it
+        // starts out as having an empty config set.  Then each method that adds
+        // fields returns `this as Jack`
+        this.#configSet = Object.create(null);
+        this.#shorts = Object.create(null);
+    }
+    /**
+     * Resulting definitions, suitable to be passed to Node's `util.parseArgs`,
+     * but also including `description` and `short` fields, if set.
+     */
+    get definitions() {
+        return this.#configSet;
+    }
+    /** map of `{ :  }` strings for each short name defined */
+    get shorts() {
+        return this.#shorts;
+    }
+    /**
+     * options passed to the {@link Jack} constructor
+     */
+    get jackOptions() {
+        return this.#options;
+    }
+    /**
+     * the data used to generate {@link Jack#usage} and
+     * {@link Jack#usageMarkdown} content.
+     */
+    get usageFields() {
+        return this.#fields;
+    }
+    /**
+     * Set the default value (which will still be overridden by env or cli)
+     * as if from a parsed config file. The optional `source` param, if
+     * provided, will be included in error messages if a value is invalid or
+     * unknown.
+     */
+    setConfigValues(values, source = '') {
+        try {
+            this.validate(values);
+        }
+        catch (er) {
+            if (source && er instanceof Error) {
+                /* c8 ignore next */
+                const cause = typeof er.cause === 'object' ? er.cause : {};
+                er.cause = { ...cause, path: source };
+                Error.captureStackTrace(er, this.setConfigValues);
+            }
+            throw er;
+        }
+        for (const [field, value] of Object.entries(values)) {
+            const my = this.#configSet[field];
+            // already validated, just for TS's benefit
+            /* c8 ignore start */
+            if (!my) {
+                throw new Error('unexpected field in config set: ' + field, {
+                    cause: {
+                        code: 'JACKSPEAK',
+                        found: field,
+                    },
+                });
+            }
+            /* c8 ignore stop */
+            my.default = value;
+        }
+        return this;
+    }
+    /**
+     * Parse a string of arguments, and return the resulting
+     * `{ values, positionals }` object.
+     *
+     * If an {@link JackOptions#envPrefix} is set, then it will read default
+     * values from the environment, and write the resulting values back
+     * to the environment as well.
+     *
+     * Environment values always take precedence over any other value, except
+     * an explicit CLI setting.
+     */
+    parse(args = process.argv) {
+        this.loadEnvDefaults();
+        const p = this.parseRaw(args);
+        this.applyDefaults(p);
+        this.writeEnv(p);
+        return p;
+    }
+    loadEnvDefaults() {
+        if (this.#envPrefix) {
+            for (const [field, my] of Object.entries(this.#configSet)) {
+                const ek = toEnvKey(this.#envPrefix, field);
+                const env = this.#env[ek];
+                if (env !== undefined) {
+                    my.default = fromEnvVal(env, my.type, !!my.multiple, my.delim);
+                }
+            }
+        }
+    }
+    applyDefaults(p) {
+        for (const [field, c] of Object.entries(this.#configSet)) {
+            if (c.default !== undefined && !(field in p.values)) {
+                //@ts-ignore
+                p.values[field] = c.default;
+            }
+        }
+    }
+    /**
+     * Only parse the command line arguments passed in.
+     * Does not strip off the `node script.js` bits, so it must be just the
+     * arguments you wish to have parsed.
+     * Does not read from or write to the environment, or set defaults.
+     */
+    parseRaw(args) {
+        if (args === process.argv) {
+            args = args.slice(process._eval !== undefined ? 1 : 2);
+        }
+        const result = (0, node_util_1.parseArgs)({
+            args,
+            options: toParseArgsOptionsConfig(this.#configSet),
+            // always strict, but using our own logic
+            strict: false,
+            allowPositionals: this.#allowPositionals,
+            tokens: true,
+        });
+        const p = {
+            values: {},
+            positionals: [],
+        };
+        for (const token of result.tokens) {
+            if (token.kind === 'positional') {
+                p.positionals.push(token.value);
+                if (this.#options.stopAtPositional ||
+                    this.#options.stopAtPositionalTest?.(token.value)) {
+                    p.positionals.push(...args.slice(token.index + 1));
+                    break;
+                }
+            }
+            else if (token.kind === 'option') {
+                let value = undefined;
+                if (token.name.startsWith('no-')) {
+                    const my = this.#configSet[token.name];
+                    const pname = token.name.substring('no-'.length);
+                    const pos = this.#configSet[pname];
+                    if (pos &&
+                        pos.type === 'boolean' &&
+                        (!my ||
+                            (my.type === 'boolean' && !!my.multiple === !!pos.multiple))) {
+                        value = false;
+                        token.name = pname;
+                    }
+                }
+                const my = this.#configSet[token.name];
+                if (!my) {
+                    throw new Error(`Unknown option '${token.rawName}'. ` +
+                        `To specify a positional argument starting with a '-', ` +
+                        `place it at the end of the command after '--', as in ` +
+                        `'-- ${token.rawName}'`, {
+                        cause: {
+                            code: 'JACKSPEAK',
+                            found: token.rawName + (token.value ? `=${token.value}` : ''),
+                        },
+                    });
+                }
+                if (value === undefined) {
+                    if (token.value === undefined) {
+                        if (my.type !== 'boolean') {
+                            throw new Error(`No value provided for ${token.rawName}, expected ${my.type}`, {
+                                cause: {
+                                    code: 'JACKSPEAK',
+                                    name: token.rawName,
+                                    wanted: valueType(my),
+                                },
+                            });
+                        }
+                        value = true;
+                    }
+                    else {
+                        if (my.type === 'boolean') {
+                            throw new Error(`Flag ${token.rawName} does not take a value, received '${token.value}'`, { cause: { code: 'JACKSPEAK', found: token } });
+                        }
+                        if (my.type === 'string') {
+                            value = token.value;
+                        }
+                        else {
+                            value = +token.value;
+                            if (value !== value) {
+                                throw new Error(`Invalid value '${token.value}' provided for ` +
+                                    `'${token.rawName}' option, expected number`, {
+                                    cause: {
+                                        code: 'JACKSPEAK',
+                                        name: token.rawName,
+                                        found: token.value,
+                                        wanted: 'number',
+                                    },
+                                });
+                            }
+                        }
+                    }
+                }
+                if (my.multiple) {
+                    const pv = p.values;
+                    const tn = pv[token.name] ?? [];
+                    pv[token.name] = tn;
+                    tn.push(value);
+                }
+                else {
+                    const pv = p.values;
+                    pv[token.name] = value;
+                }
+            }
+        }
+        for (const [field, value] of Object.entries(p.values)) {
+            const valid = this.#configSet[field]?.validate;
+            const validOptions = this.#configSet[field]?.validOptions;
+            const cause = validOptions && !isValidOption(value, validOptions) ?
+                { name: field, found: value, validOptions }
+                : valid && !valid(value) ? { name: field, found: value }
+                    : undefined;
+            if (cause) {
+                throw new Error(`Invalid value provided for --${field}: ${JSON.stringify(value)}`, { cause: { ...cause, code: 'JACKSPEAK' } });
+            }
+        }
+        return p;
+    }
+    /**
+     * do not set fields as 'no-foo' if 'foo' exists and both are bools
+     * just set foo.
+     */
+    #noNoFields(f, val, s = f) {
+        if (!f.startsWith('no-') || typeof val !== 'boolean')
+            return;
+        const yes = f.substring('no-'.length);
+        // recurse so we get the core config key we care about.
+        this.#noNoFields(yes, val, s);
+        if (this.#configSet[yes]?.type === 'boolean') {
+            throw new Error(`do not set '${s}', instead set '${yes}' as desired.`, { cause: { code: 'JACKSPEAK', found: s, wanted: yes } });
+        }
+    }
+    /**
+     * Validate that any arbitrary object is a valid configuration `values`
+     * object.  Useful when loading config files or other sources.
+     */
+    validate(o) {
+        if (!o || typeof o !== 'object') {
+            throw new Error('Invalid config: not an object', {
+                cause: { code: 'JACKSPEAK', found: o },
+            });
+        }
+        const opts = o;
+        for (const field in o) {
+            const value = opts[field];
+            /* c8 ignore next - for TS */
+            if (value === undefined)
+                continue;
+            this.#noNoFields(field, value);
+            const config = this.#configSet[field];
+            if (!config) {
+                throw new Error(`Unknown config option: ${field}`, {
+                    cause: { code: 'JACKSPEAK', found: field },
+                });
+            }
+            if (!isValidValue(value, config.type, !!config.multiple)) {
+                throw new Error(`Invalid value ${valueType(value)} for ${field}, expected ${valueType(config)}`, {
+                    cause: {
+                        code: 'JACKSPEAK',
+                        name: field,
+                        found: value,
+                        wanted: valueType(config),
+                    },
+                });
+            }
+            const cause = config.validOptions && !isValidOption(value, config.validOptions) ?
+                { name: field, found: value, validOptions: config.validOptions }
+                : config.validate && !config.validate(value) ?
+                    { name: field, found: value }
+                    : undefined;
+            if (cause) {
+                throw new Error(`Invalid config value for ${field}: ${value}`, {
+                    cause: { ...cause, code: 'JACKSPEAK' },
+                });
+            }
+        }
+    }
+    writeEnv(p) {
+        if (!this.#env || !this.#envPrefix)
+            return;
+        for (const [field, value] of Object.entries(p.values)) {
+            const my = this.#configSet[field];
+            this.#env[toEnvKey(this.#envPrefix, field)] = toEnvVal(value, my?.delim);
+        }
+    }
+    /**
+     * Add a heading to the usage output banner
+     */
+    heading(text, level, { pre = false } = {}) {
+        if (level === undefined) {
+            level = this.#fields.some(r => isHeading(r)) ? 2 : 1;
+        }
+        this.#fields.push({ type: 'heading', text, level, pre });
+        return this;
+    }
+    /**
+     * Add a long-form description to the usage output at this position.
+     */
+    description(text, { pre } = {}) {
+        this.#fields.push({ type: 'description', text, pre });
+        return this;
+    }
+    /**
+     * Add one or more number fields.
+     */
+    num(fields) {
+        return this.#addFieldsWith(fields, 'number', false);
+    }
+    /**
+     * Add one or more multiple number fields.
+     */
+    numList(fields) {
+        return this.#addFieldsWith(fields, 'number', true);
+    }
+    /**
+     * Add one or more string option fields.
+     */
+    opt(fields) {
+        return this.#addFieldsWith(fields, 'string', false);
+    }
+    /**
+     * Add one or more multiple string option fields.
+     */
+    optList(fields) {
+        return this.#addFieldsWith(fields, 'string', true);
+    }
+    /**
+     * Add one or more flag fields.
+     */
+    flag(fields) {
+        return this.#addFieldsWith(fields, 'boolean', false);
+    }
+    /**
+     * Add one or more multiple flag fields.
+     */
+    flagList(fields) {
+        return this.#addFieldsWith(fields, 'boolean', true);
+    }
+    /**
+     * Generic field definition method. Similar to flag/flagList/number/etc,
+     * but you must specify the `type` (and optionally `multiple` and `delim`)
+     * fields on each one, or Jack won't know how to define them.
+     */
+    addFields(fields) {
+        return this.#addFields(this, fields);
+    }
+    #addFieldsWith(fields, type, multiple) {
+        return this.#addFields(this, fields, {
+            type,
+            multiple,
+        });
+    }
+    #addFields(next, fields, opt) {
+        Object.assign(next.#configSet, Object.fromEntries(Object.entries(fields).map(([name, field]) => {
+            this.#validateName(name, field);
+            const { type, multiple } = validateFieldMeta(field, opt);
+            const value = { ...field, type, multiple };
+            validateField(value, type, multiple);
+            next.#fields.push({ type: 'config', name, value });
+            return [name, value];
+        })));
+        return next;
+    }
+    #validateName(name, field) {
+        if (!/^[a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?$/.test(name)) {
+            throw new TypeError(`Invalid option name: ${name}, ` +
+                `must be '-' delimited ASCII alphanumeric`);
+        }
+        if (this.#configSet[name]) {
+            throw new TypeError(`Cannot redefine option ${field}`);
+        }
+        if (this.#shorts[name]) {
+            throw new TypeError(`Cannot redefine option ${name}, already ` +
+                `in use for ${this.#shorts[name]}`);
+        }
+        if (field.short) {
+            if (!/^[a-zA-Z0-9]$/.test(field.short)) {
+                throw new TypeError(`Invalid ${name} short option: ${field.short}, ` +
+                    'must be 1 ASCII alphanumeric character');
+            }
+            if (this.#shorts[field.short]) {
+                throw new TypeError(`Invalid ${name} short option: ${field.short}, ` +
+                    `already in use for ${this.#shorts[field.short]}`);
+            }
+            this.#shorts[field.short] = name;
+            this.#shorts[name] = name;
+        }
+    }
+    /**
+     * Return the usage banner for the given configuration
+     */
+    usage() {
+        if (this.#usage)
+            return this.#usage;
+        let headingLevel = 1;
+        //@ts-ignore
+        const ui = (0, cliui_1.default)({ width });
+        const first = this.#fields[0];
+        let start = first?.type === 'heading' ? 1 : 0;
+        if (first?.type === 'heading') {
+            ui.div({
+                padding: [0, 0, 0, 0],
+                text: normalize(first.text),
+            });
+        }
+        ui.div({ padding: [0, 0, 0, 0], text: 'Usage:' });
+        if (this.#options.usage) {
+            ui.div({
+                text: this.#options.usage,
+                padding: [0, 0, 0, 2],
+            });
+        }
+        else {
+            const cmd = (0, node_path_1.basename)(String(process.argv[1]));
+            const shortFlags = [];
+            const shorts = [];
+            const flags = [];
+            const opts = [];
+            for (const [field, config] of Object.entries(this.#configSet)) {
+                if (config.short) {
+                    if (config.type === 'boolean')
+                        shortFlags.push(config.short);
+                    else
+                        shorts.push([config.short, config.hint || field]);
+                }
+                else {
+                    if (config.type === 'boolean')
+                        flags.push(field);
+                    else
+                        opts.push([field, config.hint || field]);
+                }
+            }
+            const sf = shortFlags.length ? ' -' + shortFlags.join('') : '';
+            const so = shorts.map(([k, v]) => ` --${k}=<${v}>`).join('');
+            const lf = flags.map(k => ` --${k}`).join('');
+            const lo = opts.map(([k, v]) => ` --${k}=<${v}>`).join('');
+            const usage = `${cmd}${sf}${so}${lf}${lo}`.trim();
+            ui.div({
+                text: usage,
+                padding: [0, 0, 0, 2],
+            });
+        }
+        ui.div({ padding: [0, 0, 0, 0], text: '' });
+        const maybeDesc = this.#fields[start];
+        if (maybeDesc && isDescription(maybeDesc)) {
+            const print = normalize(maybeDesc.text, maybeDesc.pre);
+            start++;
+            ui.div({ padding: [0, 0, 0, 0], text: print });
+            ui.div({ padding: [0, 0, 0, 0], text: '' });
+        }
+        const { rows, maxWidth } = this.#usageRows(start);
+        // every heading/description after the first gets indented by 2
+        // extra spaces.
+        for (const row of rows) {
+            if (row.left) {
+                // If the row is too long, don't wrap it
+                // Bump the right-hand side down a line to make room
+                const configIndent = indent(Math.max(headingLevel, 2));
+                if (row.left.length > maxWidth - 3) {
+                    ui.div({ text: row.left, padding: [0, 0, 0, configIndent] });
+                    ui.div({ text: row.text, padding: [0, 0, 0, maxWidth] });
+                }
+                else {
+                    ui.div({
+                        text: row.left,
+                        padding: [0, 1, 0, configIndent],
+                        width: maxWidth,
+                    }, { padding: [0, 0, 0, 0], text: row.text });
+                }
+                if (row.skipLine) {
+                    ui.div({ padding: [0, 0, 0, 0], text: '' });
+                }
+            }
+            else {
+                if (isHeading(row)) {
+                    const { level } = row;
+                    headingLevel = level;
+                    // only h1 and h2 have bottom padding
+                    // h3-h6 do not
+                    const b = level <= 2 ? 1 : 0;
+                    ui.div({ ...row, padding: [0, 0, b, indent(level)] });
+                }
+                else {
+                    ui.div({ ...row, padding: [0, 0, 1, indent(headingLevel + 1)] });
+                }
+            }
+        }
+        return (this.#usage = ui.toString());
+    }
+    /**
+     * Return the usage banner markdown for the given configuration
+     */
+    usageMarkdown() {
+        if (this.#usageMarkdown)
+            return this.#usageMarkdown;
+        const out = [];
+        let headingLevel = 1;
+        const first = this.#fields[0];
+        let start = first?.type === 'heading' ? 1 : 0;
+        if (first?.type === 'heading') {
+            out.push(`# ${normalizeOneLine(first.text)}`);
+        }
+        out.push('Usage:');
+        if (this.#options.usage) {
+            out.push(normalizeMarkdown(this.#options.usage, true));
+        }
+        else {
+            const cmd = (0, node_path_1.basename)(String(process.argv[1]));
+            const shortFlags = [];
+            const shorts = [];
+            const flags = [];
+            const opts = [];
+            for (const [field, config] of Object.entries(this.#configSet)) {
+                if (config.short) {
+                    if (config.type === 'boolean')
+                        shortFlags.push(config.short);
+                    else
+                        shorts.push([config.short, config.hint || field]);
+                }
+                else {
+                    if (config.type === 'boolean')
+                        flags.push(field);
+                    else
+                        opts.push([field, config.hint || field]);
+                }
+            }
+            const sf = shortFlags.length ? ' -' + shortFlags.join('') : '';
+            const so = shorts.map(([k, v]) => ` --${k}=<${v}>`).join('');
+            const lf = flags.map(k => ` --${k}`).join('');
+            const lo = opts.map(([k, v]) => ` --${k}=<${v}>`).join('');
+            const usage = `${cmd}${sf}${so}${lf}${lo}`.trim();
+            out.push(normalizeMarkdown(usage, true));
+        }
+        const maybeDesc = this.#fields[start];
+        if (maybeDesc && isDescription(maybeDesc)) {
+            out.push(normalizeMarkdown(maybeDesc.text, maybeDesc.pre));
+            start++;
+        }
+        const { rows } = this.#usageRows(start);
+        // heading level in markdown is number of # ahead of text
+        for (const row of rows) {
+            if (row.left) {
+                out.push('#'.repeat(headingLevel + 1) +
+                    ' ' +
+                    normalizeOneLine(row.left, true));
+                if (row.text)
+                    out.push(normalizeMarkdown(row.text));
+            }
+            else if (isHeading(row)) {
+                const { level } = row;
+                headingLevel = level;
+                out.push(`${'#'.repeat(headingLevel)} ${normalizeOneLine(row.text, row.pre)}`);
+            }
+            else {
+                out.push(normalizeMarkdown(row.text, !!row.pre));
+            }
+        }
+        return (this.#usageMarkdown = out.join('\n\n') + '\n');
+    }
+    #usageRows(start) {
+        // turn each config type into a row, and figure out the width of the
+        // left hand indentation for the option descriptions.
+        let maxMax = Math.max(12, Math.min(26, Math.floor(width / 3)));
+        let maxWidth = 8;
+        let prev = undefined;
+        const rows = [];
+        for (const field of this.#fields.slice(start)) {
+            if (field.type !== 'config') {
+                if (prev?.type === 'config')
+                    prev.skipLine = true;
+                prev = undefined;
+                field.text = normalize(field.text, !!field.pre);
+                rows.push(field);
+                continue;
+            }
+            const { value } = field;
+            const desc = value.description || '';
+            const mult = value.multiple ? 'Can be set multiple times' : '';
+            const opts = value.validOptions?.length ?
+                `Valid options:${value.validOptions.map(v => ` ${JSON.stringify(v)}`)}`
+                : '';
+            const dmDelim = desc.includes('\n') ? '\n\n' : '\n';
+            const extra = [opts, mult].join(dmDelim).trim();
+            const text = (normalize(desc) + dmDelim + extra).trim();
+            const hint = value.hint ||
+                (value.type === 'number' ? 'n'
+                    : value.type === 'string' ? field.name
+                        : undefined);
+            const short = !value.short ? ''
+                : value.type === 'boolean' ? `-${value.short} `
+                    : `-${value.short}<${hint}> `;
+            const left = value.type === 'boolean' ?
+                `${short}--${field.name}`
+                : `${short}--${field.name}=<${hint}>`;
+            const row = { text, left, type: 'config' };
+            if (text.length > width - maxMax) {
+                row.skipLine = true;
+            }
+            if (prev && left.length > maxMax)
+                prev.skipLine = true;
+            prev = row;
+            const len = left.length + 4;
+            if (len > maxWidth && len < maxMax) {
+                maxWidth = len;
+            }
+            rows.push(row);
+        }
+        return { rows, maxWidth };
+    }
+    /**
+     * Return the configuration options as a plain object
+     */
+    toJSON() {
+        return Object.fromEntries(Object.entries(this.#configSet).map(([field, def]) => [
+            field,
+            {
+                type: def.type,
+                ...(def.multiple ? { multiple: true } : {}),
+                ...(def.delim ? { delim: def.delim } : {}),
+                ...(def.short ? { short: def.short } : {}),
+                ...(def.description ?
+                    { description: normalize(def.description) }
+                    : {}),
+                ...(def.validate ? { validate: def.validate } : {}),
+                ...(def.validOptions ? { validOptions: def.validOptions } : {}),
+                ...(def.default !== undefined ? { default: def.default } : {}),
+                ...(def.hint ? { hint: def.hint } : {}),
+            },
+        ]));
+    }
+    /**
+     * Custom printer for `util.inspect`
+     */
+    [node_util_1.inspect.custom](_, options) {
+        return `Jack ${(0, node_util_1.inspect)(this.toJSON(), options)}`;
+    }
+}
+exports.Jack = Jack;
+/**
+ * Main entry point. Create and return a {@link Jack} object.
+ */
+const jack = (options = {}) => new Jack(options);
+exports.jack = jack;
+// Unwrap and un-indent, so we can wrap description
+// strings however makes them look nice in the code.
+const normalize = (s, pre = false) => {
+    if (pre)
+        // prepend a ZWSP to each line so cliui doesn't strip it.
+        return s
+            .split('\n')
+            .map(l => `\u200b${l}`)
+            .join('\n');
+    return s
+        .split(/^\s*```\s*$/gm)
+        .map((s, i) => {
+        if (i % 2 === 1) {
+            if (!s.trim()) {
+                return `\`\`\`\n\`\`\`\n`;
+            }
+            // outdent the ``` blocks, but preserve whitespace otherwise.
+            const split = s.split('\n');
+            // throw out the \n at the start and end
+            split.pop();
+            split.shift();
+            const si = split.reduce((shortest, l) => {
+                /* c8 ignore next */
+                const ind = l.match(/^\s*/)?.[0] ?? '';
+                if (ind.length)
+                    return Math.min(ind.length, shortest);
+                else
+                    return shortest;
+            }, Infinity);
+            /* c8 ignore next */
+            const i = isFinite(si) ? si : 0;
+            return ('\n```\n' +
+                split.map(s => `\u200b${s.substring(i)}`).join('\n') +
+                '\n```\n');
+        }
+        return (s
+            // remove single line breaks, except for lists
+            .replace(/([^\n])\n[ \t]*([^\n])/g, (_, $1, $2) => !/^[-*]/.test($2) ? `${$1} ${$2}` : `${$1}\n${$2}`)
+            // normalize mid-line whitespace
+            .replace(/([^\n])[ \t]+([^\n])/g, '$1 $2')
+            // two line breaks are enough
+            .replace(/\n{3,}/g, '\n\n')
+            // remove any spaces at the start of a line
+            .replace(/\n[ \t]+/g, '\n')
+            .trim());
+    })
+        .join('\n');
+};
+// normalize for markdown printing, remove leading spaces on lines
+const normalizeMarkdown = (s, pre = false) => {
+    const n = normalize(s, pre).replace(/\\/g, '\\\\');
+    return pre ?
+        `\`\`\`\n${n.replace(/\u200b/g, '')}\n\`\`\``
+        : n.replace(/\n +/g, '\n').trim();
+};
+const normalizeOneLine = (s, pre = false) => {
+    const n = normalize(s, pre)
+        .replace(/[\s\u200b]+/g, ' ')
+        .trim();
+    return pre ? `\`${n}\`` : n;
+};
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/jackspeak/dist/commonjs/package.json b/node_modules/@npmcli/map-workspaces/node_modules/jackspeak/dist/commonjs/package.json
new file mode 100644
index 0000000000000..5bbefffbabee3
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/jackspeak/dist/commonjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/jackspeak/dist/esm/index.js b/node_modules/@npmcli/map-workspaces/node_modules/jackspeak/dist/esm/index.js
new file mode 100644
index 0000000000000..b959f5126423c
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/jackspeak/dist/esm/index.js
@@ -0,0 +1,936 @@
+import { inspect, parseArgs, } from 'node:util';
+// it's a tiny API, just cast it inline, it's fine
+//@ts-ignore
+import cliui from '@isaacs/cliui';
+import { basename } from 'node:path';
+export const isConfigType = (t) => typeof t === 'string' &&
+    (t === 'string' || t === 'number' || t === 'boolean');
+const isValidValue = (v, type, multi) => {
+    if (multi) {
+        if (!Array.isArray(v))
+            return false;
+        return !v.some((v) => !isValidValue(v, type, false));
+    }
+    if (Array.isArray(v))
+        return false;
+    return typeof v === type;
+};
+const isValidOption = (v, vo) => !!vo &&
+    (Array.isArray(v) ? v.every(x => isValidOption(x, vo)) : vo.includes(v));
+/**
+ * Determine whether an unknown object is a {@link ConfigOption} based only
+ * on its `type` and `multiple` property
+ */
+export const isConfigOptionOfType = (o, type, multi) => !!o &&
+    typeof o === 'object' &&
+    isConfigType(o.type) &&
+    o.type === type &&
+    !!o.multiple === multi;
+/**
+ * Determine whether an unknown object is a {@link ConfigOption} based on
+ * it having all valid properties
+ */
+export const isConfigOption = (o, type, multi) => isConfigOptionOfType(o, type, multi) &&
+    undefOrType(o.short, 'string') &&
+    undefOrType(o.description, 'string') &&
+    undefOrType(o.hint, 'string') &&
+    undefOrType(o.validate, 'function') &&
+    (o.type === 'boolean' ?
+        o.validOptions === undefined
+        : undefOrTypeArray(o.validOptions, o.type)) &&
+    (o.default === undefined || isValidValue(o.default, type, multi));
+const isHeading = (r) => r.type === 'heading';
+const isDescription = (r) => r.type === 'description';
+const width = Math.min(process?.stdout?.columns ?? 80, 80);
+// indentation spaces from heading level
+const indent = (n) => (n - 1) * 2;
+const toEnvKey = (pref, key) => [pref, key.replace(/[^a-zA-Z0-9]+/g, ' ')]
+    .join(' ')
+    .trim()
+    .toUpperCase()
+    .replace(/ /g, '_');
+const toEnvVal = (value, delim = '\n') => {
+    const str = typeof value === 'string' ? value
+        : typeof value === 'boolean' ?
+            value ? '1'
+                : '0'
+            : typeof value === 'number' ? String(value)
+                : Array.isArray(value) ?
+                    value.map((v) => toEnvVal(v)).join(delim)
+                    : /* c8 ignore start */ undefined;
+    if (typeof str !== 'string') {
+        throw new Error(`could not serialize value to environment: ${JSON.stringify(value)}`, { cause: { code: 'JACKSPEAK' } });
+    }
+    /* c8 ignore stop */
+    return str;
+};
+const fromEnvVal = (env, type, multiple, delim = '\n') => (multiple ?
+    env ? env.split(delim).map(v => fromEnvVal(v, type, false))
+        : []
+    : type === 'string' ? env
+        : type === 'boolean' ? env === '1'
+            : +env.trim());
+const undefOrType = (v, t) => v === undefined || typeof v === t;
+const undefOrTypeArray = (v, t) => v === undefined || (Array.isArray(v) && v.every(x => typeof x === t));
+// print the value type, for error message reporting
+const valueType = (v) => typeof v === 'string' ? 'string'
+    : typeof v === 'boolean' ? 'boolean'
+        : typeof v === 'number' ? 'number'
+            : Array.isArray(v) ?
+                `${joinTypes([...new Set(v.map(v => valueType(v)))])}[]`
+                : `${v.type}${v.multiple ? '[]' : ''}`;
+const joinTypes = (types) => types.length === 1 && typeof types[0] === 'string' ?
+    types[0]
+    : `(${types.join('|')})`;
+const validateFieldMeta = (field, fieldMeta) => {
+    if (fieldMeta) {
+        if (field.type !== undefined && field.type !== fieldMeta.type) {
+            throw new TypeError(`invalid type`, {
+                cause: {
+                    found: field.type,
+                    wanted: [fieldMeta.type, undefined],
+                },
+            });
+        }
+        if (field.multiple !== undefined &&
+            !!field.multiple !== fieldMeta.multiple) {
+            throw new TypeError(`invalid multiple`, {
+                cause: {
+                    found: field.multiple,
+                    wanted: [fieldMeta.multiple, undefined],
+                },
+            });
+        }
+        return fieldMeta;
+    }
+    if (!isConfigType(field.type)) {
+        throw new TypeError(`invalid type`, {
+            cause: {
+                found: field.type,
+                wanted: ['string', 'number', 'boolean'],
+            },
+        });
+    }
+    return {
+        type: field.type,
+        multiple: !!field.multiple,
+    };
+};
+const validateField = (o, type, multiple) => {
+    const validateValidOptions = (def, validOptions) => {
+        if (!undefOrTypeArray(validOptions, type)) {
+            throw new TypeError('invalid validOptions', {
+                cause: {
+                    found: validOptions,
+                    wanted: valueType({ type, multiple: true }),
+                },
+            });
+        }
+        if (def !== undefined && validOptions !== undefined) {
+            const valid = Array.isArray(def) ?
+                def.every(v => validOptions.includes(v))
+                : validOptions.includes(def);
+            if (!valid) {
+                throw new TypeError('invalid default value not in validOptions', {
+                    cause: {
+                        found: def,
+                        wanted: validOptions,
+                    },
+                });
+            }
+        }
+    };
+    if (o.default !== undefined &&
+        !isValidValue(o.default, type, multiple)) {
+        throw new TypeError('invalid default value', {
+            cause: {
+                found: o.default,
+                wanted: valueType({ type, multiple }),
+            },
+        });
+    }
+    if (isConfigOptionOfType(o, 'number', false) ||
+        isConfigOptionOfType(o, 'number', true)) {
+        validateValidOptions(o.default, o.validOptions);
+    }
+    else if (isConfigOptionOfType(o, 'string', false) ||
+        isConfigOptionOfType(o, 'string', true)) {
+        validateValidOptions(o.default, o.validOptions);
+    }
+    else if (isConfigOptionOfType(o, 'boolean', false) ||
+        isConfigOptionOfType(o, 'boolean', true)) {
+        if (o.hint !== undefined) {
+            throw new TypeError('cannot provide hint for flag');
+        }
+        if (o.validOptions !== undefined) {
+            throw new TypeError('cannot provide validOptions for flag');
+        }
+    }
+    return o;
+};
+const toParseArgsOptionsConfig = (options) => {
+    return Object.entries(options).reduce((acc, [longOption, o]) => {
+        const p = {
+            type: 'string',
+            multiple: !!o.multiple,
+            ...(typeof o.short === 'string' ? { short: o.short } : undefined),
+        };
+        const setNoBool = () => {
+            if (!longOption.startsWith('no-') && !options[`no-${longOption}`]) {
+                acc[`no-${longOption}`] = {
+                    type: 'boolean',
+                    multiple: !!o.multiple,
+                };
+            }
+        };
+        const setDefault = (def, fn) => {
+            if (def !== undefined) {
+                p.default = fn(def);
+            }
+        };
+        if (isConfigOption(o, 'number', false)) {
+            setDefault(o.default, String);
+        }
+        else if (isConfigOption(o, 'number', true)) {
+            setDefault(o.default, d => d.map(v => String(v)));
+        }
+        else if (isConfigOption(o, 'string', false) ||
+            isConfigOption(o, 'string', true)) {
+            setDefault(o.default, v => v);
+        }
+        else if (isConfigOption(o, 'boolean', false) ||
+            isConfigOption(o, 'boolean', true)) {
+            p.type = 'boolean';
+            setDefault(o.default, v => v);
+            setNoBool();
+        }
+        acc[longOption] = p;
+        return acc;
+    }, {});
+};
+/**
+ * Class returned by the {@link jack} function and all configuration
+ * definition methods.  This is what gets chained together.
+ */
+export class Jack {
+    #configSet;
+    #shorts;
+    #options;
+    #fields = [];
+    #env;
+    #envPrefix;
+    #allowPositionals;
+    #usage;
+    #usageMarkdown;
+    constructor(options = {}) {
+        this.#options = options;
+        this.#allowPositionals = options.allowPositionals !== false;
+        this.#env =
+            this.#options.env === undefined ? process.env : this.#options.env;
+        this.#envPrefix = options.envPrefix;
+        // We need to fib a little, because it's always the same object, but it
+        // starts out as having an empty config set.  Then each method that adds
+        // fields returns `this as Jack`
+        this.#configSet = Object.create(null);
+        this.#shorts = Object.create(null);
+    }
+    /**
+     * Resulting definitions, suitable to be passed to Node's `util.parseArgs`,
+     * but also including `description` and `short` fields, if set.
+     */
+    get definitions() {
+        return this.#configSet;
+    }
+    /** map of `{ :  }` strings for each short name defined */
+    get shorts() {
+        return this.#shorts;
+    }
+    /**
+     * options passed to the {@link Jack} constructor
+     */
+    get jackOptions() {
+        return this.#options;
+    }
+    /**
+     * the data used to generate {@link Jack#usage} and
+     * {@link Jack#usageMarkdown} content.
+     */
+    get usageFields() {
+        return this.#fields;
+    }
+    /**
+     * Set the default value (which will still be overridden by env or cli)
+     * as if from a parsed config file. The optional `source` param, if
+     * provided, will be included in error messages if a value is invalid or
+     * unknown.
+     */
+    setConfigValues(values, source = '') {
+        try {
+            this.validate(values);
+        }
+        catch (er) {
+            if (source && er instanceof Error) {
+                /* c8 ignore next */
+                const cause = typeof er.cause === 'object' ? er.cause : {};
+                er.cause = { ...cause, path: source };
+                Error.captureStackTrace(er, this.setConfigValues);
+            }
+            throw er;
+        }
+        for (const [field, value] of Object.entries(values)) {
+            const my = this.#configSet[field];
+            // already validated, just for TS's benefit
+            /* c8 ignore start */
+            if (!my) {
+                throw new Error('unexpected field in config set: ' + field, {
+                    cause: {
+                        code: 'JACKSPEAK',
+                        found: field,
+                    },
+                });
+            }
+            /* c8 ignore stop */
+            my.default = value;
+        }
+        return this;
+    }
+    /**
+     * Parse a string of arguments, and return the resulting
+     * `{ values, positionals }` object.
+     *
+     * If an {@link JackOptions#envPrefix} is set, then it will read default
+     * values from the environment, and write the resulting values back
+     * to the environment as well.
+     *
+     * Environment values always take precedence over any other value, except
+     * an explicit CLI setting.
+     */
+    parse(args = process.argv) {
+        this.loadEnvDefaults();
+        const p = this.parseRaw(args);
+        this.applyDefaults(p);
+        this.writeEnv(p);
+        return p;
+    }
+    loadEnvDefaults() {
+        if (this.#envPrefix) {
+            for (const [field, my] of Object.entries(this.#configSet)) {
+                const ek = toEnvKey(this.#envPrefix, field);
+                const env = this.#env[ek];
+                if (env !== undefined) {
+                    my.default = fromEnvVal(env, my.type, !!my.multiple, my.delim);
+                }
+            }
+        }
+    }
+    applyDefaults(p) {
+        for (const [field, c] of Object.entries(this.#configSet)) {
+            if (c.default !== undefined && !(field in p.values)) {
+                //@ts-ignore
+                p.values[field] = c.default;
+            }
+        }
+    }
+    /**
+     * Only parse the command line arguments passed in.
+     * Does not strip off the `node script.js` bits, so it must be just the
+     * arguments you wish to have parsed.
+     * Does not read from or write to the environment, or set defaults.
+     */
+    parseRaw(args) {
+        if (args === process.argv) {
+            args = args.slice(process._eval !== undefined ? 1 : 2);
+        }
+        const result = parseArgs({
+            args,
+            options: toParseArgsOptionsConfig(this.#configSet),
+            // always strict, but using our own logic
+            strict: false,
+            allowPositionals: this.#allowPositionals,
+            tokens: true,
+        });
+        const p = {
+            values: {},
+            positionals: [],
+        };
+        for (const token of result.tokens) {
+            if (token.kind === 'positional') {
+                p.positionals.push(token.value);
+                if (this.#options.stopAtPositional ||
+                    this.#options.stopAtPositionalTest?.(token.value)) {
+                    p.positionals.push(...args.slice(token.index + 1));
+                    break;
+                }
+            }
+            else if (token.kind === 'option') {
+                let value = undefined;
+                if (token.name.startsWith('no-')) {
+                    const my = this.#configSet[token.name];
+                    const pname = token.name.substring('no-'.length);
+                    const pos = this.#configSet[pname];
+                    if (pos &&
+                        pos.type === 'boolean' &&
+                        (!my ||
+                            (my.type === 'boolean' && !!my.multiple === !!pos.multiple))) {
+                        value = false;
+                        token.name = pname;
+                    }
+                }
+                const my = this.#configSet[token.name];
+                if (!my) {
+                    throw new Error(`Unknown option '${token.rawName}'. ` +
+                        `To specify a positional argument starting with a '-', ` +
+                        `place it at the end of the command after '--', as in ` +
+                        `'-- ${token.rawName}'`, {
+                        cause: {
+                            code: 'JACKSPEAK',
+                            found: token.rawName + (token.value ? `=${token.value}` : ''),
+                        },
+                    });
+                }
+                if (value === undefined) {
+                    if (token.value === undefined) {
+                        if (my.type !== 'boolean') {
+                            throw new Error(`No value provided for ${token.rawName}, expected ${my.type}`, {
+                                cause: {
+                                    code: 'JACKSPEAK',
+                                    name: token.rawName,
+                                    wanted: valueType(my),
+                                },
+                            });
+                        }
+                        value = true;
+                    }
+                    else {
+                        if (my.type === 'boolean') {
+                            throw new Error(`Flag ${token.rawName} does not take a value, received '${token.value}'`, { cause: { code: 'JACKSPEAK', found: token } });
+                        }
+                        if (my.type === 'string') {
+                            value = token.value;
+                        }
+                        else {
+                            value = +token.value;
+                            if (value !== value) {
+                                throw new Error(`Invalid value '${token.value}' provided for ` +
+                                    `'${token.rawName}' option, expected number`, {
+                                    cause: {
+                                        code: 'JACKSPEAK',
+                                        name: token.rawName,
+                                        found: token.value,
+                                        wanted: 'number',
+                                    },
+                                });
+                            }
+                        }
+                    }
+                }
+                if (my.multiple) {
+                    const pv = p.values;
+                    const tn = pv[token.name] ?? [];
+                    pv[token.name] = tn;
+                    tn.push(value);
+                }
+                else {
+                    const pv = p.values;
+                    pv[token.name] = value;
+                }
+            }
+        }
+        for (const [field, value] of Object.entries(p.values)) {
+            const valid = this.#configSet[field]?.validate;
+            const validOptions = this.#configSet[field]?.validOptions;
+            const cause = validOptions && !isValidOption(value, validOptions) ?
+                { name: field, found: value, validOptions }
+                : valid && !valid(value) ? { name: field, found: value }
+                    : undefined;
+            if (cause) {
+                throw new Error(`Invalid value provided for --${field}: ${JSON.stringify(value)}`, { cause: { ...cause, code: 'JACKSPEAK' } });
+            }
+        }
+        return p;
+    }
+    /**
+     * do not set fields as 'no-foo' if 'foo' exists and both are bools
+     * just set foo.
+     */
+    #noNoFields(f, val, s = f) {
+        if (!f.startsWith('no-') || typeof val !== 'boolean')
+            return;
+        const yes = f.substring('no-'.length);
+        // recurse so we get the core config key we care about.
+        this.#noNoFields(yes, val, s);
+        if (this.#configSet[yes]?.type === 'boolean') {
+            throw new Error(`do not set '${s}', instead set '${yes}' as desired.`, { cause: { code: 'JACKSPEAK', found: s, wanted: yes } });
+        }
+    }
+    /**
+     * Validate that any arbitrary object is a valid configuration `values`
+     * object.  Useful when loading config files or other sources.
+     */
+    validate(o) {
+        if (!o || typeof o !== 'object') {
+            throw new Error('Invalid config: not an object', {
+                cause: { code: 'JACKSPEAK', found: o },
+            });
+        }
+        const opts = o;
+        for (const field in o) {
+            const value = opts[field];
+            /* c8 ignore next - for TS */
+            if (value === undefined)
+                continue;
+            this.#noNoFields(field, value);
+            const config = this.#configSet[field];
+            if (!config) {
+                throw new Error(`Unknown config option: ${field}`, {
+                    cause: { code: 'JACKSPEAK', found: field },
+                });
+            }
+            if (!isValidValue(value, config.type, !!config.multiple)) {
+                throw new Error(`Invalid value ${valueType(value)} for ${field}, expected ${valueType(config)}`, {
+                    cause: {
+                        code: 'JACKSPEAK',
+                        name: field,
+                        found: value,
+                        wanted: valueType(config),
+                    },
+                });
+            }
+            const cause = config.validOptions && !isValidOption(value, config.validOptions) ?
+                { name: field, found: value, validOptions: config.validOptions }
+                : config.validate && !config.validate(value) ?
+                    { name: field, found: value }
+                    : undefined;
+            if (cause) {
+                throw new Error(`Invalid config value for ${field}: ${value}`, {
+                    cause: { ...cause, code: 'JACKSPEAK' },
+                });
+            }
+        }
+    }
+    writeEnv(p) {
+        if (!this.#env || !this.#envPrefix)
+            return;
+        for (const [field, value] of Object.entries(p.values)) {
+            const my = this.#configSet[field];
+            this.#env[toEnvKey(this.#envPrefix, field)] = toEnvVal(value, my?.delim);
+        }
+    }
+    /**
+     * Add a heading to the usage output banner
+     */
+    heading(text, level, { pre = false } = {}) {
+        if (level === undefined) {
+            level = this.#fields.some(r => isHeading(r)) ? 2 : 1;
+        }
+        this.#fields.push({ type: 'heading', text, level, pre });
+        return this;
+    }
+    /**
+     * Add a long-form description to the usage output at this position.
+     */
+    description(text, { pre } = {}) {
+        this.#fields.push({ type: 'description', text, pre });
+        return this;
+    }
+    /**
+     * Add one or more number fields.
+     */
+    num(fields) {
+        return this.#addFieldsWith(fields, 'number', false);
+    }
+    /**
+     * Add one or more multiple number fields.
+     */
+    numList(fields) {
+        return this.#addFieldsWith(fields, 'number', true);
+    }
+    /**
+     * Add one or more string option fields.
+     */
+    opt(fields) {
+        return this.#addFieldsWith(fields, 'string', false);
+    }
+    /**
+     * Add one or more multiple string option fields.
+     */
+    optList(fields) {
+        return this.#addFieldsWith(fields, 'string', true);
+    }
+    /**
+     * Add one or more flag fields.
+     */
+    flag(fields) {
+        return this.#addFieldsWith(fields, 'boolean', false);
+    }
+    /**
+     * Add one or more multiple flag fields.
+     */
+    flagList(fields) {
+        return this.#addFieldsWith(fields, 'boolean', true);
+    }
+    /**
+     * Generic field definition method. Similar to flag/flagList/number/etc,
+     * but you must specify the `type` (and optionally `multiple` and `delim`)
+     * fields on each one, or Jack won't know how to define them.
+     */
+    addFields(fields) {
+        return this.#addFields(this, fields);
+    }
+    #addFieldsWith(fields, type, multiple) {
+        return this.#addFields(this, fields, {
+            type,
+            multiple,
+        });
+    }
+    #addFields(next, fields, opt) {
+        Object.assign(next.#configSet, Object.fromEntries(Object.entries(fields).map(([name, field]) => {
+            this.#validateName(name, field);
+            const { type, multiple } = validateFieldMeta(field, opt);
+            const value = { ...field, type, multiple };
+            validateField(value, type, multiple);
+            next.#fields.push({ type: 'config', name, value });
+            return [name, value];
+        })));
+        return next;
+    }
+    #validateName(name, field) {
+        if (!/^[a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?$/.test(name)) {
+            throw new TypeError(`Invalid option name: ${name}, ` +
+                `must be '-' delimited ASCII alphanumeric`);
+        }
+        if (this.#configSet[name]) {
+            throw new TypeError(`Cannot redefine option ${field}`);
+        }
+        if (this.#shorts[name]) {
+            throw new TypeError(`Cannot redefine option ${name}, already ` +
+                `in use for ${this.#shorts[name]}`);
+        }
+        if (field.short) {
+            if (!/^[a-zA-Z0-9]$/.test(field.short)) {
+                throw new TypeError(`Invalid ${name} short option: ${field.short}, ` +
+                    'must be 1 ASCII alphanumeric character');
+            }
+            if (this.#shorts[field.short]) {
+                throw new TypeError(`Invalid ${name} short option: ${field.short}, ` +
+                    `already in use for ${this.#shorts[field.short]}`);
+            }
+            this.#shorts[field.short] = name;
+            this.#shorts[name] = name;
+        }
+    }
+    /**
+     * Return the usage banner for the given configuration
+     */
+    usage() {
+        if (this.#usage)
+            return this.#usage;
+        let headingLevel = 1;
+        //@ts-ignore
+        const ui = cliui({ width });
+        const first = this.#fields[0];
+        let start = first?.type === 'heading' ? 1 : 0;
+        if (first?.type === 'heading') {
+            ui.div({
+                padding: [0, 0, 0, 0],
+                text: normalize(first.text),
+            });
+        }
+        ui.div({ padding: [0, 0, 0, 0], text: 'Usage:' });
+        if (this.#options.usage) {
+            ui.div({
+                text: this.#options.usage,
+                padding: [0, 0, 0, 2],
+            });
+        }
+        else {
+            const cmd = basename(String(process.argv[1]));
+            const shortFlags = [];
+            const shorts = [];
+            const flags = [];
+            const opts = [];
+            for (const [field, config] of Object.entries(this.#configSet)) {
+                if (config.short) {
+                    if (config.type === 'boolean')
+                        shortFlags.push(config.short);
+                    else
+                        shorts.push([config.short, config.hint || field]);
+                }
+                else {
+                    if (config.type === 'boolean')
+                        flags.push(field);
+                    else
+                        opts.push([field, config.hint || field]);
+                }
+            }
+            const sf = shortFlags.length ? ' -' + shortFlags.join('') : '';
+            const so = shorts.map(([k, v]) => ` --${k}=<${v}>`).join('');
+            const lf = flags.map(k => ` --${k}`).join('');
+            const lo = opts.map(([k, v]) => ` --${k}=<${v}>`).join('');
+            const usage = `${cmd}${sf}${so}${lf}${lo}`.trim();
+            ui.div({
+                text: usage,
+                padding: [0, 0, 0, 2],
+            });
+        }
+        ui.div({ padding: [0, 0, 0, 0], text: '' });
+        const maybeDesc = this.#fields[start];
+        if (maybeDesc && isDescription(maybeDesc)) {
+            const print = normalize(maybeDesc.text, maybeDesc.pre);
+            start++;
+            ui.div({ padding: [0, 0, 0, 0], text: print });
+            ui.div({ padding: [0, 0, 0, 0], text: '' });
+        }
+        const { rows, maxWidth } = this.#usageRows(start);
+        // every heading/description after the first gets indented by 2
+        // extra spaces.
+        for (const row of rows) {
+            if (row.left) {
+                // If the row is too long, don't wrap it
+                // Bump the right-hand side down a line to make room
+                const configIndent = indent(Math.max(headingLevel, 2));
+                if (row.left.length > maxWidth - 3) {
+                    ui.div({ text: row.left, padding: [0, 0, 0, configIndent] });
+                    ui.div({ text: row.text, padding: [0, 0, 0, maxWidth] });
+                }
+                else {
+                    ui.div({
+                        text: row.left,
+                        padding: [0, 1, 0, configIndent],
+                        width: maxWidth,
+                    }, { padding: [0, 0, 0, 0], text: row.text });
+                }
+                if (row.skipLine) {
+                    ui.div({ padding: [0, 0, 0, 0], text: '' });
+                }
+            }
+            else {
+                if (isHeading(row)) {
+                    const { level } = row;
+                    headingLevel = level;
+                    // only h1 and h2 have bottom padding
+                    // h3-h6 do not
+                    const b = level <= 2 ? 1 : 0;
+                    ui.div({ ...row, padding: [0, 0, b, indent(level)] });
+                }
+                else {
+                    ui.div({ ...row, padding: [0, 0, 1, indent(headingLevel + 1)] });
+                }
+            }
+        }
+        return (this.#usage = ui.toString());
+    }
+    /**
+     * Return the usage banner markdown for the given configuration
+     */
+    usageMarkdown() {
+        if (this.#usageMarkdown)
+            return this.#usageMarkdown;
+        const out = [];
+        let headingLevel = 1;
+        const first = this.#fields[0];
+        let start = first?.type === 'heading' ? 1 : 0;
+        if (first?.type === 'heading') {
+            out.push(`# ${normalizeOneLine(first.text)}`);
+        }
+        out.push('Usage:');
+        if (this.#options.usage) {
+            out.push(normalizeMarkdown(this.#options.usage, true));
+        }
+        else {
+            const cmd = basename(String(process.argv[1]));
+            const shortFlags = [];
+            const shorts = [];
+            const flags = [];
+            const opts = [];
+            for (const [field, config] of Object.entries(this.#configSet)) {
+                if (config.short) {
+                    if (config.type === 'boolean')
+                        shortFlags.push(config.short);
+                    else
+                        shorts.push([config.short, config.hint || field]);
+                }
+                else {
+                    if (config.type === 'boolean')
+                        flags.push(field);
+                    else
+                        opts.push([field, config.hint || field]);
+                }
+            }
+            const sf = shortFlags.length ? ' -' + shortFlags.join('') : '';
+            const so = shorts.map(([k, v]) => ` --${k}=<${v}>`).join('');
+            const lf = flags.map(k => ` --${k}`).join('');
+            const lo = opts.map(([k, v]) => ` --${k}=<${v}>`).join('');
+            const usage = `${cmd}${sf}${so}${lf}${lo}`.trim();
+            out.push(normalizeMarkdown(usage, true));
+        }
+        const maybeDesc = this.#fields[start];
+        if (maybeDesc && isDescription(maybeDesc)) {
+            out.push(normalizeMarkdown(maybeDesc.text, maybeDesc.pre));
+            start++;
+        }
+        const { rows } = this.#usageRows(start);
+        // heading level in markdown is number of # ahead of text
+        for (const row of rows) {
+            if (row.left) {
+                out.push('#'.repeat(headingLevel + 1) +
+                    ' ' +
+                    normalizeOneLine(row.left, true));
+                if (row.text)
+                    out.push(normalizeMarkdown(row.text));
+            }
+            else if (isHeading(row)) {
+                const { level } = row;
+                headingLevel = level;
+                out.push(`${'#'.repeat(headingLevel)} ${normalizeOneLine(row.text, row.pre)}`);
+            }
+            else {
+                out.push(normalizeMarkdown(row.text, !!row.pre));
+            }
+        }
+        return (this.#usageMarkdown = out.join('\n\n') + '\n');
+    }
+    #usageRows(start) {
+        // turn each config type into a row, and figure out the width of the
+        // left hand indentation for the option descriptions.
+        let maxMax = Math.max(12, Math.min(26, Math.floor(width / 3)));
+        let maxWidth = 8;
+        let prev = undefined;
+        const rows = [];
+        for (const field of this.#fields.slice(start)) {
+            if (field.type !== 'config') {
+                if (prev?.type === 'config')
+                    prev.skipLine = true;
+                prev = undefined;
+                field.text = normalize(field.text, !!field.pre);
+                rows.push(field);
+                continue;
+            }
+            const { value } = field;
+            const desc = value.description || '';
+            const mult = value.multiple ? 'Can be set multiple times' : '';
+            const opts = value.validOptions?.length ?
+                `Valid options:${value.validOptions.map(v => ` ${JSON.stringify(v)}`)}`
+                : '';
+            const dmDelim = desc.includes('\n') ? '\n\n' : '\n';
+            const extra = [opts, mult].join(dmDelim).trim();
+            const text = (normalize(desc) + dmDelim + extra).trim();
+            const hint = value.hint ||
+                (value.type === 'number' ? 'n'
+                    : value.type === 'string' ? field.name
+                        : undefined);
+            const short = !value.short ? ''
+                : value.type === 'boolean' ? `-${value.short} `
+                    : `-${value.short}<${hint}> `;
+            const left = value.type === 'boolean' ?
+                `${short}--${field.name}`
+                : `${short}--${field.name}=<${hint}>`;
+            const row = { text, left, type: 'config' };
+            if (text.length > width - maxMax) {
+                row.skipLine = true;
+            }
+            if (prev && left.length > maxMax)
+                prev.skipLine = true;
+            prev = row;
+            const len = left.length + 4;
+            if (len > maxWidth && len < maxMax) {
+                maxWidth = len;
+            }
+            rows.push(row);
+        }
+        return { rows, maxWidth };
+    }
+    /**
+     * Return the configuration options as a plain object
+     */
+    toJSON() {
+        return Object.fromEntries(Object.entries(this.#configSet).map(([field, def]) => [
+            field,
+            {
+                type: def.type,
+                ...(def.multiple ? { multiple: true } : {}),
+                ...(def.delim ? { delim: def.delim } : {}),
+                ...(def.short ? { short: def.short } : {}),
+                ...(def.description ?
+                    { description: normalize(def.description) }
+                    : {}),
+                ...(def.validate ? { validate: def.validate } : {}),
+                ...(def.validOptions ? { validOptions: def.validOptions } : {}),
+                ...(def.default !== undefined ? { default: def.default } : {}),
+                ...(def.hint ? { hint: def.hint } : {}),
+            },
+        ]));
+    }
+    /**
+     * Custom printer for `util.inspect`
+     */
+    [inspect.custom](_, options) {
+        return `Jack ${inspect(this.toJSON(), options)}`;
+    }
+}
+/**
+ * Main entry point. Create and return a {@link Jack} object.
+ */
+export const jack = (options = {}) => new Jack(options);
+// Unwrap and un-indent, so we can wrap description
+// strings however makes them look nice in the code.
+const normalize = (s, pre = false) => {
+    if (pre)
+        // prepend a ZWSP to each line so cliui doesn't strip it.
+        return s
+            .split('\n')
+            .map(l => `\u200b${l}`)
+            .join('\n');
+    return s
+        .split(/^\s*```\s*$/gm)
+        .map((s, i) => {
+        if (i % 2 === 1) {
+            if (!s.trim()) {
+                return `\`\`\`\n\`\`\`\n`;
+            }
+            // outdent the ``` blocks, but preserve whitespace otherwise.
+            const split = s.split('\n');
+            // throw out the \n at the start and end
+            split.pop();
+            split.shift();
+            const si = split.reduce((shortest, l) => {
+                /* c8 ignore next */
+                const ind = l.match(/^\s*/)?.[0] ?? '';
+                if (ind.length)
+                    return Math.min(ind.length, shortest);
+                else
+                    return shortest;
+            }, Infinity);
+            /* c8 ignore next */
+            const i = isFinite(si) ? si : 0;
+            return ('\n```\n' +
+                split.map(s => `\u200b${s.substring(i)}`).join('\n') +
+                '\n```\n');
+        }
+        return (s
+            // remove single line breaks, except for lists
+            .replace(/([^\n])\n[ \t]*([^\n])/g, (_, $1, $2) => !/^[-*]/.test($2) ? `${$1} ${$2}` : `${$1}\n${$2}`)
+            // normalize mid-line whitespace
+            .replace(/([^\n])[ \t]+([^\n])/g, '$1 $2')
+            // two line breaks are enough
+            .replace(/\n{3,}/g, '\n\n')
+            // remove any spaces at the start of a line
+            .replace(/\n[ \t]+/g, '\n')
+            .trim());
+    })
+        .join('\n');
+};
+// normalize for markdown printing, remove leading spaces on lines
+const normalizeMarkdown = (s, pre = false) => {
+    const n = normalize(s, pre).replace(/\\/g, '\\\\');
+    return pre ?
+        `\`\`\`\n${n.replace(/\u200b/g, '')}\n\`\`\``
+        : n.replace(/\n +/g, '\n').trim();
+};
+const normalizeOneLine = (s, pre = false) => {
+    const n = normalize(s, pre)
+        .replace(/[\s\u200b]+/g, ' ')
+        .trim();
+    return pre ? `\`${n}\`` : n;
+};
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/jackspeak/dist/esm/package.json b/node_modules/@npmcli/map-workspaces/node_modules/jackspeak/dist/esm/package.json
new file mode 100644
index 0000000000000..3dbc1ca591c05
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/jackspeak/dist/esm/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/jackspeak/package.json b/node_modules/@npmcli/map-workspaces/node_modules/jackspeak/package.json
new file mode 100644
index 0000000000000..aa85d230f6d24
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/jackspeak/package.json
@@ -0,0 +1,94 @@
+{
+  "name": "jackspeak",
+  "version": "4.1.1",
+  "description": "A very strict and proper argument parser.",
+  "tshy": {
+    "main": true,
+    "exports": {
+      "./package.json": "./package.json",
+      ".": "./src/index.js"
+    }
+  },
+  "main": "./dist/commonjs/index.js",
+  "types": "./dist/commonjs/index.d.ts",
+  "type": "module",
+  "exports": {
+    "./package.json": "./package.json",
+    ".": {
+      "import": {
+        "types": "./dist/esm/index.d.ts",
+        "default": "./dist/esm/index.js"
+      },
+      "require": {
+        "types": "./dist/commonjs/index.d.ts",
+        "default": "./dist/commonjs/index.js"
+      }
+    }
+  },
+  "files": [
+    "dist"
+  ],
+  "scripts": {
+    "build-examples": "for i in examples/*.js ; do node $i -h > ${i/.js/.txt}; done",
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "prepare": "tshy",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "test": "tap",
+    "snap": "tap",
+    "format": "prettier --write . --log-level warn",
+    "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
+  },
+  "license": "BlueOak-1.0.0",
+  "prettier": {
+    "experimentalTernaries": true,
+    "semi": false,
+    "printWidth": 75,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "devDependencies": {
+    "@types/node": "^22.6.0",
+    "prettier": "^3.3.3",
+    "tap": "^21.0.1",
+    "tshy": "^3.0.2",
+    "typedoc": "^0.26.7"
+  },
+  "dependencies": {
+    "@isaacs/cliui": "^8.0.2"
+  },
+  "engines": {
+    "node": "20 || >=22"
+  },
+  "funding": {
+    "url": "https://github.com/sponsors/isaacs"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/isaacs/jackspeak.git"
+  },
+  "keywords": [
+    "argument",
+    "parser",
+    "args",
+    "option",
+    "flag",
+    "cli",
+    "command",
+    "line",
+    "parse",
+    "parsing"
+  ],
+  "author": "Isaac Z. Schlueter ",
+  "tap": {
+    "typecheck": true
+  },
+  "module": "./dist/esm/index.js"
+}
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/lru-cache/LICENSE b/node_modules/@npmcli/map-workspaces/node_modules/lru-cache/LICENSE
new file mode 100644
index 0000000000000..f785757cd63f8
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/lru-cache/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/commonjs/index.js b/node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/commonjs/index.js
new file mode 100644
index 0000000000000..921b8f10f71b1
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/commonjs/index.js
@@ -0,0 +1,1564 @@
+"use strict";
+/**
+ * @module LRUCache
+ */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.LRUCache = void 0;
+const defaultPerf = (typeof performance === 'object' &&
+    performance &&
+    typeof performance.now === 'function') ?
+    performance
+    : Date;
+const warned = new Set();
+/* c8 ignore start */
+const PROCESS = (typeof process === 'object' && !!process ?
+    process
+    : {});
+/* c8 ignore start */
+const emitWarning = (msg, type, code, fn) => {
+    typeof PROCESS.emitWarning === 'function' ?
+        PROCESS.emitWarning(msg, type, code, fn)
+        : console.error(`[${code}] ${type}: ${msg}`);
+};
+let AC = globalThis.AbortController;
+let AS = globalThis.AbortSignal;
+/* c8 ignore start */
+if (typeof AC === 'undefined') {
+    //@ts-ignore
+    AS = class AbortSignal {
+        onabort;
+        _onabort = [];
+        reason;
+        aborted = false;
+        addEventListener(_, fn) {
+            this._onabort.push(fn);
+        }
+    };
+    //@ts-ignore
+    AC = class AbortController {
+        constructor() {
+            warnACPolyfill();
+        }
+        signal = new AS();
+        abort(reason) {
+            if (this.signal.aborted)
+                return;
+            //@ts-ignore
+            this.signal.reason = reason;
+            //@ts-ignore
+            this.signal.aborted = true;
+            //@ts-ignore
+            for (const fn of this.signal._onabort) {
+                fn(reason);
+            }
+            this.signal.onabort?.(reason);
+        }
+    };
+    let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
+    const warnACPolyfill = () => {
+        if (!printACPolyfillWarning)
+            return;
+        printACPolyfillWarning = false;
+        emitWarning('AbortController is not defined. If using lru-cache in ' +
+            'node 14, load an AbortController polyfill from the ' +
+            '`node-abort-controller` package. A minimal polyfill is ' +
+            'provided for use by LRUCache.fetch(), but it should not be ' +
+            'relied upon in other contexts (eg, passing it to other APIs that ' +
+            'use AbortController/AbortSignal might have undesirable effects). ' +
+            'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
+    };
+}
+/* c8 ignore stop */
+const shouldWarn = (code) => !warned.has(code);
+const TYPE = Symbol('type');
+const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
+/* c8 ignore start */
+// This is a little bit ridiculous, tbh.
+// The maximum array length is 2^32-1 or thereabouts on most JS impls.
+// And well before that point, you're caching the entire world, I mean,
+// that's ~32GB of just integers for the next/prev links, plus whatever
+// else to hold that many keys and values.  Just filling the memory with
+// zeroes at init time is brutal when you get that big.
+// But why not be complete?
+// Maybe in the future, these limits will have expanded.
+const getUintArray = (max) => !isPosInt(max) ? null
+    : max <= Math.pow(2, 8) ? Uint8Array
+        : max <= Math.pow(2, 16) ? Uint16Array
+            : max <= Math.pow(2, 32) ? Uint32Array
+                : max <= Number.MAX_SAFE_INTEGER ? ZeroArray
+                    : null;
+/* c8 ignore stop */
+class ZeroArray extends Array {
+    constructor(size) {
+        super(size);
+        this.fill(0);
+    }
+}
+class Stack {
+    heap;
+    length;
+    // private constructor
+    static #constructing = false;
+    static create(max) {
+        const HeapCls = getUintArray(max);
+        if (!HeapCls)
+            return [];
+        Stack.#constructing = true;
+        const s = new Stack(max, HeapCls);
+        Stack.#constructing = false;
+        return s;
+    }
+    constructor(max, HeapCls) {
+        /* c8 ignore start */
+        if (!Stack.#constructing) {
+            throw new TypeError('instantiate Stack using Stack.create(n)');
+        }
+        /* c8 ignore stop */
+        this.heap = new HeapCls(max);
+        this.length = 0;
+    }
+    push(n) {
+        this.heap[this.length++] = n;
+    }
+    pop() {
+        return this.heap[--this.length];
+    }
+}
+/**
+ * Default export, the thing you're using this module to get.
+ *
+ * The `K` and `V` types define the key and value types, respectively. The
+ * optional `FC` type defines the type of the `context` object passed to
+ * `cache.fetch()` and `cache.memo()`.
+ *
+ * Keys and values **must not** be `null` or `undefined`.
+ *
+ * All properties from the options object (with the exception of `max`,
+ * `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are
+ * added as normal public members. (The listed options are read-only getters.)
+ *
+ * Changing any of these will alter the defaults for subsequent method calls.
+ */
+class LRUCache {
+    // options that cannot be changed without disaster
+    #max;
+    #maxSize;
+    #dispose;
+    #onInsert;
+    #disposeAfter;
+    #fetchMethod;
+    #memoMethod;
+    #perf;
+    /**
+     * {@link LRUCache.OptionsBase.perf}
+     */
+    get perf() {
+        return this.#perf;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.ttl}
+     */
+    ttl;
+    /**
+     * {@link LRUCache.OptionsBase.ttlResolution}
+     */
+    ttlResolution;
+    /**
+     * {@link LRUCache.OptionsBase.ttlAutopurge}
+     */
+    ttlAutopurge;
+    /**
+     * {@link LRUCache.OptionsBase.updateAgeOnGet}
+     */
+    updateAgeOnGet;
+    /**
+     * {@link LRUCache.OptionsBase.updateAgeOnHas}
+     */
+    updateAgeOnHas;
+    /**
+     * {@link LRUCache.OptionsBase.allowStale}
+     */
+    allowStale;
+    /**
+     * {@link LRUCache.OptionsBase.noDisposeOnSet}
+     */
+    noDisposeOnSet;
+    /**
+     * {@link LRUCache.OptionsBase.noUpdateTTL}
+     */
+    noUpdateTTL;
+    /**
+     * {@link LRUCache.OptionsBase.maxEntrySize}
+     */
+    maxEntrySize;
+    /**
+     * {@link LRUCache.OptionsBase.sizeCalculation}
+     */
+    sizeCalculation;
+    /**
+     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
+     */
+    noDeleteOnFetchRejection;
+    /**
+     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
+     */
+    noDeleteOnStaleGet;
+    /**
+     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
+     */
+    allowStaleOnFetchAbort;
+    /**
+     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
+     */
+    allowStaleOnFetchRejection;
+    /**
+     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
+     */
+    ignoreFetchAbort;
+    // computed properties
+    #size;
+    #calculatedSize;
+    #keyMap;
+    #keyList;
+    #valList;
+    #next;
+    #prev;
+    #head;
+    #tail;
+    #free;
+    #disposed;
+    #sizes;
+    #starts;
+    #ttls;
+    #hasDispose;
+    #hasFetchMethod;
+    #hasDisposeAfter;
+    #hasOnInsert;
+    /**
+     * Do not call this method unless you need to inspect the
+     * inner workings of the cache.  If anything returned by this
+     * object is modified in any way, strange breakage may occur.
+     *
+     * These fields are private for a reason!
+     *
+     * @internal
+     */
+    static unsafeExposeInternals(c) {
+        return {
+            // properties
+            starts: c.#starts,
+            ttls: c.#ttls,
+            sizes: c.#sizes,
+            keyMap: c.#keyMap,
+            keyList: c.#keyList,
+            valList: c.#valList,
+            next: c.#next,
+            prev: c.#prev,
+            get head() {
+                return c.#head;
+            },
+            get tail() {
+                return c.#tail;
+            },
+            free: c.#free,
+            // methods
+            isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
+            backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
+            moveToTail: (index) => c.#moveToTail(index),
+            indexes: (options) => c.#indexes(options),
+            rindexes: (options) => c.#rindexes(options),
+            isStale: (index) => c.#isStale(index),
+        };
+    }
+    // Protected read-only members
+    /**
+     * {@link LRUCache.OptionsBase.max} (read-only)
+     */
+    get max() {
+        return this.#max;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.maxSize} (read-only)
+     */
+    get maxSize() {
+        return this.#maxSize;
+    }
+    /**
+     * The total computed size of items in the cache (read-only)
+     */
+    get calculatedSize() {
+        return this.#calculatedSize;
+    }
+    /**
+     * The number of items stored in the cache (read-only)
+     */
+    get size() {
+        return this.#size;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
+     */
+    get fetchMethod() {
+        return this.#fetchMethod;
+    }
+    get memoMethod() {
+        return this.#memoMethod;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.dispose} (read-only)
+     */
+    get dispose() {
+        return this.#dispose;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.onInsert} (read-only)
+     */
+    get onInsert() {
+        return this.#onInsert;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
+     */
+    get disposeAfter() {
+        return this.#disposeAfter;
+    }
+    constructor(options) {
+        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, onInsert, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, perf, } = options;
+        if (perf !== undefined) {
+            if (typeof perf?.now !== 'function') {
+                throw new TypeError('perf option must have a now() method if specified');
+            }
+        }
+        this.#perf = perf ?? defaultPerf;
+        if (max !== 0 && !isPosInt(max)) {
+            throw new TypeError('max option must be a nonnegative integer');
+        }
+        const UintArray = max ? getUintArray(max) : Array;
+        if (!UintArray) {
+            throw new Error('invalid max value: ' + max);
+        }
+        this.#max = max;
+        this.#maxSize = maxSize;
+        this.maxEntrySize = maxEntrySize || this.#maxSize;
+        this.sizeCalculation = sizeCalculation;
+        if (this.sizeCalculation) {
+            if (!this.#maxSize && !this.maxEntrySize) {
+                throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
+            }
+            if (typeof this.sizeCalculation !== 'function') {
+                throw new TypeError('sizeCalculation set to non-function');
+            }
+        }
+        if (memoMethod !== undefined &&
+            typeof memoMethod !== 'function') {
+            throw new TypeError('memoMethod must be a function if defined');
+        }
+        this.#memoMethod = memoMethod;
+        if (fetchMethod !== undefined &&
+            typeof fetchMethod !== 'function') {
+            throw new TypeError('fetchMethod must be a function if specified');
+        }
+        this.#fetchMethod = fetchMethod;
+        this.#hasFetchMethod = !!fetchMethod;
+        this.#keyMap = new Map();
+        this.#keyList = new Array(max).fill(undefined);
+        this.#valList = new Array(max).fill(undefined);
+        this.#next = new UintArray(max);
+        this.#prev = new UintArray(max);
+        this.#head = 0;
+        this.#tail = 0;
+        this.#free = Stack.create(max);
+        this.#size = 0;
+        this.#calculatedSize = 0;
+        if (typeof dispose === 'function') {
+            this.#dispose = dispose;
+        }
+        if (typeof onInsert === 'function') {
+            this.#onInsert = onInsert;
+        }
+        if (typeof disposeAfter === 'function') {
+            this.#disposeAfter = disposeAfter;
+            this.#disposed = [];
+        }
+        else {
+            this.#disposeAfter = undefined;
+            this.#disposed = undefined;
+        }
+        this.#hasDispose = !!this.#dispose;
+        this.#hasOnInsert = !!this.#onInsert;
+        this.#hasDisposeAfter = !!this.#disposeAfter;
+        this.noDisposeOnSet = !!noDisposeOnSet;
+        this.noUpdateTTL = !!noUpdateTTL;
+        this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
+        this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
+        this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
+        this.ignoreFetchAbort = !!ignoreFetchAbort;
+        // NB: maxEntrySize is set to maxSize if it's set
+        if (this.maxEntrySize !== 0) {
+            if (this.#maxSize !== 0) {
+                if (!isPosInt(this.#maxSize)) {
+                    throw new TypeError('maxSize must be a positive integer if specified');
+                }
+            }
+            if (!isPosInt(this.maxEntrySize)) {
+                throw new TypeError('maxEntrySize must be a positive integer if specified');
+            }
+            this.#initializeSizeTracking();
+        }
+        this.allowStale = !!allowStale;
+        this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
+        this.updateAgeOnGet = !!updateAgeOnGet;
+        this.updateAgeOnHas = !!updateAgeOnHas;
+        this.ttlResolution =
+            isPosInt(ttlResolution) || ttlResolution === 0 ?
+                ttlResolution
+                : 1;
+        this.ttlAutopurge = !!ttlAutopurge;
+        this.ttl = ttl || 0;
+        if (this.ttl) {
+            if (!isPosInt(this.ttl)) {
+                throw new TypeError('ttl must be a positive integer if specified');
+            }
+            this.#initializeTTLTracking();
+        }
+        // do not allow completely unbounded caches
+        if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
+            throw new TypeError('At least one of max, maxSize, or ttl is required');
+        }
+        if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
+            const code = 'LRU_CACHE_UNBOUNDED';
+            if (shouldWarn(code)) {
+                warned.add(code);
+                const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
+                    'result in unbounded memory consumption.';
+                emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
+            }
+        }
+    }
+    /**
+     * Return the number of ms left in the item's TTL. If item is not in cache,
+     * returns `0`. Returns `Infinity` if item is in cache without a defined TTL.
+     */
+    getRemainingTTL(key) {
+        return this.#keyMap.has(key) ? Infinity : 0;
+    }
+    #initializeTTLTracking() {
+        const ttls = new ZeroArray(this.#max);
+        const starts = new ZeroArray(this.#max);
+        this.#ttls = ttls;
+        this.#starts = starts;
+        this.#setItemTTL = (index, ttl, start = this.#perf.now()) => {
+            starts[index] = ttl !== 0 ? start : 0;
+            ttls[index] = ttl;
+            if (ttl !== 0 && this.ttlAutopurge) {
+                const t = setTimeout(() => {
+                    if (this.#isStale(index)) {
+                        this.#delete(this.#keyList[index], 'expire');
+                    }
+                }, ttl + 1);
+                // unref() not supported on all platforms
+                /* c8 ignore start */
+                if (t.unref) {
+                    t.unref();
+                }
+                /* c8 ignore stop */
+            }
+        };
+        this.#updateItemAge = index => {
+            starts[index] = ttls[index] !== 0 ? this.#perf.now() : 0;
+        };
+        this.#statusTTL = (status, index) => {
+            if (ttls[index]) {
+                const ttl = ttls[index];
+                const start = starts[index];
+                /* c8 ignore next */
+                if (!ttl || !start)
+                    return;
+                status.ttl = ttl;
+                status.start = start;
+                status.now = cachedNow || getNow();
+                const age = status.now - start;
+                status.remainingTTL = ttl - age;
+            }
+        };
+        // debounce calls to perf.now() to 1s so we're not hitting
+        // that costly call repeatedly.
+        let cachedNow = 0;
+        const getNow = () => {
+            const n = this.#perf.now();
+            if (this.ttlResolution > 0) {
+                cachedNow = n;
+                const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
+                // not available on all platforms
+                /* c8 ignore start */
+                if (t.unref) {
+                    t.unref();
+                }
+                /* c8 ignore stop */
+            }
+            return n;
+        };
+        this.getRemainingTTL = key => {
+            const index = this.#keyMap.get(key);
+            if (index === undefined) {
+                return 0;
+            }
+            const ttl = ttls[index];
+            const start = starts[index];
+            if (!ttl || !start) {
+                return Infinity;
+            }
+            const age = (cachedNow || getNow()) - start;
+            return ttl - age;
+        };
+        this.#isStale = index => {
+            const s = starts[index];
+            const t = ttls[index];
+            return !!t && !!s && (cachedNow || getNow()) - s > t;
+        };
+    }
+    // conditionally set private methods related to TTL
+    #updateItemAge = () => { };
+    #statusTTL = () => { };
+    #setItemTTL = () => { };
+    /* c8 ignore stop */
+    #isStale = () => false;
+    #initializeSizeTracking() {
+        const sizes = new ZeroArray(this.#max);
+        this.#calculatedSize = 0;
+        this.#sizes = sizes;
+        this.#removeItemSize = index => {
+            this.#calculatedSize -= sizes[index];
+            sizes[index] = 0;
+        };
+        this.#requireSize = (k, v, size, sizeCalculation) => {
+            // provisionally accept background fetches.
+            // actual value size will be checked when they return.
+            if (this.#isBackgroundFetch(v)) {
+                return 0;
+            }
+            if (!isPosInt(size)) {
+                if (sizeCalculation) {
+                    if (typeof sizeCalculation !== 'function') {
+                        throw new TypeError('sizeCalculation must be a function');
+                    }
+                    size = sizeCalculation(v, k);
+                    if (!isPosInt(size)) {
+                        throw new TypeError('sizeCalculation return invalid (expect positive integer)');
+                    }
+                }
+                else {
+                    throw new TypeError('invalid size value (must be positive integer). ' +
+                        'When maxSize or maxEntrySize is used, sizeCalculation ' +
+                        'or size must be set.');
+                }
+            }
+            return size;
+        };
+        this.#addItemSize = (index, size, status) => {
+            sizes[index] = size;
+            if (this.#maxSize) {
+                const maxSize = this.#maxSize - sizes[index];
+                while (this.#calculatedSize > maxSize) {
+                    this.#evict(true);
+                }
+            }
+            this.#calculatedSize += sizes[index];
+            if (status) {
+                status.entrySize = size;
+                status.totalCalculatedSize = this.#calculatedSize;
+            }
+        };
+    }
+    #removeItemSize = _i => { };
+    #addItemSize = (_i, _s, _st) => { };
+    #requireSize = (_k, _v, size, sizeCalculation) => {
+        if (size || sizeCalculation) {
+            throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
+        }
+        return 0;
+    };
+    *#indexes({ allowStale = this.allowStale } = {}) {
+        if (this.#size) {
+            for (let i = this.#tail; true;) {
+                if (!this.#isValidIndex(i)) {
+                    break;
+                }
+                if (allowStale || !this.#isStale(i)) {
+                    yield i;
+                }
+                if (i === this.#head) {
+                    break;
+                }
+                else {
+                    i = this.#prev[i];
+                }
+            }
+        }
+    }
+    *#rindexes({ allowStale = this.allowStale } = {}) {
+        if (this.#size) {
+            for (let i = this.#head; true;) {
+                if (!this.#isValidIndex(i)) {
+                    break;
+                }
+                if (allowStale || !this.#isStale(i)) {
+                    yield i;
+                }
+                if (i === this.#tail) {
+                    break;
+                }
+                else {
+                    i = this.#next[i];
+                }
+            }
+        }
+    }
+    #isValidIndex(index) {
+        return (index !== undefined &&
+            this.#keyMap.get(this.#keyList[index]) === index);
+    }
+    /**
+     * Return a generator yielding `[key, value]` pairs,
+     * in order from most recently used to least recently used.
+     */
+    *entries() {
+        for (const i of this.#indexes()) {
+            if (this.#valList[i] !== undefined &&
+                this.#keyList[i] !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield [this.#keyList[i], this.#valList[i]];
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.entries}
+     *
+     * Return a generator yielding `[key, value]` pairs,
+     * in order from least recently used to most recently used.
+     */
+    *rentries() {
+        for (const i of this.#rindexes()) {
+            if (this.#valList[i] !== undefined &&
+                this.#keyList[i] !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield [this.#keyList[i], this.#valList[i]];
+            }
+        }
+    }
+    /**
+     * Return a generator yielding the keys in the cache,
+     * in order from most recently used to least recently used.
+     */
+    *keys() {
+        for (const i of this.#indexes()) {
+            const k = this.#keyList[i];
+            if (k !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield k;
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.keys}
+     *
+     * Return a generator yielding the keys in the cache,
+     * in order from least recently used to most recently used.
+     */
+    *rkeys() {
+        for (const i of this.#rindexes()) {
+            const k = this.#keyList[i];
+            if (k !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield k;
+            }
+        }
+    }
+    /**
+     * Return a generator yielding the values in the cache,
+     * in order from most recently used to least recently used.
+     */
+    *values() {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            if (v !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield this.#valList[i];
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.values}
+     *
+     * Return a generator yielding the values in the cache,
+     * in order from least recently used to most recently used.
+     */
+    *rvalues() {
+        for (const i of this.#rindexes()) {
+            const v = this.#valList[i];
+            if (v !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield this.#valList[i];
+            }
+        }
+    }
+    /**
+     * Iterating over the cache itself yields the same results as
+     * {@link LRUCache.entries}
+     */
+    [Symbol.iterator]() {
+        return this.entries();
+    }
+    /**
+     * A String value that is used in the creation of the default string
+     * description of an object. Called by the built-in method
+     * `Object.prototype.toString`.
+     */
+    [Symbol.toStringTag] = 'LRUCache';
+    /**
+     * Find a value for which the supplied fn method returns a truthy value,
+     * similar to `Array.find()`. fn is called as `fn(value, key, cache)`.
+     */
+    find(fn, getOptions = {}) {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            if (fn(value, this.#keyList[i], this)) {
+                return this.get(this.#keyList[i], getOptions);
+            }
+        }
+    }
+    /**
+     * Call the supplied function on each item in the cache, in order from most
+     * recently used to least recently used.
+     *
+     * `fn` is called as `fn(value, key, cache)`.
+     *
+     * If `thisp` is provided, function will be called in the `this`-context of
+     * the provided object, or the cache if no `thisp` object is provided.
+     *
+     * Does not update age or recenty of use, or iterate over stale values.
+     */
+    forEach(fn, thisp = this) {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            fn.call(thisp, value, this.#keyList[i], this);
+        }
+    }
+    /**
+     * The same as {@link LRUCache.forEach} but items are iterated over in
+     * reverse order.  (ie, less recently used items are iterated over first.)
+     */
+    rforEach(fn, thisp = this) {
+        for (const i of this.#rindexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            fn.call(thisp, value, this.#keyList[i], this);
+        }
+    }
+    /**
+     * Delete any stale entries. Returns true if anything was removed,
+     * false otherwise.
+     */
+    purgeStale() {
+        let deleted = false;
+        for (const i of this.#rindexes({ allowStale: true })) {
+            if (this.#isStale(i)) {
+                this.#delete(this.#keyList[i], 'expire');
+                deleted = true;
+            }
+        }
+        return deleted;
+    }
+    /**
+     * Get the extended info about a given entry, to get its value, size, and
+     * TTL info simultaneously. Returns `undefined` if the key is not present.
+     *
+     * Unlike {@link LRUCache#dump}, which is designed to be portable and survive
+     * serialization, the `start` value is always the current timestamp, and the
+     * `ttl` is a calculated remaining time to live (negative if expired).
+     *
+     * Always returns stale values, if their info is found in the cache, so be
+     * sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl})
+     * if relevant.
+     */
+    info(key) {
+        const i = this.#keyMap.get(key);
+        if (i === undefined)
+            return undefined;
+        const v = this.#valList[i];
+        /* c8 ignore start - this isn't tested for the info function,
+         * but it's the same logic as found in other places. */
+        const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+        if (value === undefined)
+            return undefined;
+        /* c8 ignore end */
+        const entry = { value };
+        if (this.#ttls && this.#starts) {
+            const ttl = this.#ttls[i];
+            const start = this.#starts[i];
+            if (ttl && start) {
+                const remain = ttl - (this.#perf.now() - start);
+                entry.ttl = remain;
+                entry.start = Date.now();
+            }
+        }
+        if (this.#sizes) {
+            entry.size = this.#sizes[i];
+        }
+        return entry;
+    }
+    /**
+     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
+     * passed to {@link LRUCache#load}.
+     *
+     * The `start` fields are calculated relative to a portable `Date.now()`
+     * timestamp, even if `performance.now()` is available.
+     *
+     * Stale entries are always included in the `dump`, even if
+     * {@link LRUCache.OptionsBase.allowStale} is false.
+     *
+     * Note: this returns an actual array, not a generator, so it can be more
+     * easily passed around.
+     */
+    dump() {
+        const arr = [];
+        for (const i of this.#indexes({ allowStale: true })) {
+            const key = this.#keyList[i];
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined || key === undefined)
+                continue;
+            const entry = { value };
+            if (this.#ttls && this.#starts) {
+                entry.ttl = this.#ttls[i];
+                // always dump the start relative to a portable timestamp
+                // it's ok for this to be a bit slow, it's a rare operation.
+                const age = this.#perf.now() - this.#starts[i];
+                entry.start = Math.floor(Date.now() - age);
+            }
+            if (this.#sizes) {
+                entry.size = this.#sizes[i];
+            }
+            arr.unshift([key, entry]);
+        }
+        return arr;
+    }
+    /**
+     * Reset the cache and load in the items in entries in the order listed.
+     *
+     * The shape of the resulting cache may be different if the same options are
+     * not used in both caches.
+     *
+     * The `start` fields are assumed to be calculated relative to a portable
+     * `Date.now()` timestamp, even if `performance.now()` is available.
+     */
+    load(arr) {
+        this.clear();
+        for (const [key, entry] of arr) {
+            if (entry.start) {
+                // entry.start is a portable timestamp, but we may be using
+                // node's performance.now(), so calculate the offset, so that
+                // we get the intended remaining TTL, no matter how long it's
+                // been on ice.
+                //
+                // it's ok for this to be a bit slow, it's a rare operation.
+                const age = Date.now() - entry.start;
+                entry.start = this.#perf.now() - age;
+            }
+            this.set(key, entry.value, entry);
+        }
+    }
+    /**
+     * Add a value to the cache.
+     *
+     * Note: if `undefined` is specified as a value, this is an alias for
+     * {@link LRUCache#delete}
+     *
+     * Fields on the {@link LRUCache.SetOptions} options param will override
+     * their corresponding values in the constructor options for the scope
+     * of this single `set()` operation.
+     *
+     * If `start` is provided, then that will set the effective start
+     * time for the TTL calculation. Note that this must be a previous
+     * value of `performance.now()` if supported, or a previous value of
+     * `Date.now()` if not.
+     *
+     * Options object may also include `size`, which will prevent
+     * calling the `sizeCalculation` function and just use the specified
+     * number if it is a positive integer, and `noDisposeOnSet` which
+     * will prevent calling a `dispose` function in the case of
+     * overwrites.
+     *
+     * If the `size` (or return value of `sizeCalculation`) for a given
+     * entry is greater than `maxEntrySize`, then the item will not be
+     * added to the cache.
+     *
+     * Will update the recency of the entry.
+     *
+     * If the value is `undefined`, then this is an alias for
+     * `cache.delete(key)`. `undefined` is never stored in the cache.
+     */
+    set(k, v, setOptions = {}) {
+        if (v === undefined) {
+            this.delete(k);
+            return this;
+        }
+        const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
+        let { noUpdateTTL = this.noUpdateTTL } = setOptions;
+        const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
+        // if the item doesn't fit, don't do anything
+        // NB: maxEntrySize set to maxSize by default
+        if (this.maxEntrySize && size > this.maxEntrySize) {
+            if (status) {
+                status.set = 'miss';
+                status.maxEntrySizeExceeded = true;
+            }
+            // have to delete, in case something is there already.
+            this.#delete(k, 'set');
+            return this;
+        }
+        let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
+        if (index === undefined) {
+            // addition
+            index = (this.#size === 0 ? this.#tail
+                : this.#free.length !== 0 ? this.#free.pop()
+                    : this.#size === this.#max ? this.#evict(false)
+                        : this.#size);
+            this.#keyList[index] = k;
+            this.#valList[index] = v;
+            this.#keyMap.set(k, index);
+            this.#next[this.#tail] = index;
+            this.#prev[index] = this.#tail;
+            this.#tail = index;
+            this.#size++;
+            this.#addItemSize(index, size, status);
+            if (status)
+                status.set = 'add';
+            noUpdateTTL = false;
+            if (this.#hasOnInsert) {
+                this.#onInsert?.(v, k, 'add');
+            }
+        }
+        else {
+            // update
+            this.#moveToTail(index);
+            const oldVal = this.#valList[index];
+            if (v !== oldVal) {
+                if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
+                    oldVal.__abortController.abort(new Error('replaced'));
+                    const { __staleWhileFetching: s } = oldVal;
+                    if (s !== undefined && !noDisposeOnSet) {
+                        if (this.#hasDispose) {
+                            this.#dispose?.(s, k, 'set');
+                        }
+                        if (this.#hasDisposeAfter) {
+                            this.#disposed?.push([s, k, 'set']);
+                        }
+                    }
+                }
+                else if (!noDisposeOnSet) {
+                    if (this.#hasDispose) {
+                        this.#dispose?.(oldVal, k, 'set');
+                    }
+                    if (this.#hasDisposeAfter) {
+                        this.#disposed?.push([oldVal, k, 'set']);
+                    }
+                }
+                this.#removeItemSize(index);
+                this.#addItemSize(index, size, status);
+                this.#valList[index] = v;
+                if (status) {
+                    status.set = 'replace';
+                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal) ?
+                        oldVal.__staleWhileFetching
+                        : oldVal;
+                    if (oldValue !== undefined)
+                        status.oldValue = oldValue;
+                }
+            }
+            else if (status) {
+                status.set = 'update';
+            }
+            if (this.#hasOnInsert) {
+                this.onInsert?.(v, k, v === oldVal ? 'update' : 'replace');
+            }
+        }
+        if (ttl !== 0 && !this.#ttls) {
+            this.#initializeTTLTracking();
+        }
+        if (this.#ttls) {
+            if (!noUpdateTTL) {
+                this.#setItemTTL(index, ttl, start);
+            }
+            if (status)
+                this.#statusTTL(status, index);
+        }
+        if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+        return this;
+    }
+    /**
+     * Evict the least recently used item, returning its value or
+     * `undefined` if cache is empty.
+     */
+    pop() {
+        try {
+            while (this.#size) {
+                const val = this.#valList[this.#head];
+                this.#evict(true);
+                if (this.#isBackgroundFetch(val)) {
+                    if (val.__staleWhileFetching) {
+                        return val.__staleWhileFetching;
+                    }
+                }
+                else if (val !== undefined) {
+                    return val;
+                }
+            }
+        }
+        finally {
+            if (this.#hasDisposeAfter && this.#disposed) {
+                const dt = this.#disposed;
+                let task;
+                while ((task = dt?.shift())) {
+                    this.#disposeAfter?.(...task);
+                }
+            }
+        }
+    }
+    #evict(free) {
+        const head = this.#head;
+        const k = this.#keyList[head];
+        const v = this.#valList[head];
+        if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
+            v.__abortController.abort(new Error('evicted'));
+        }
+        else if (this.#hasDispose || this.#hasDisposeAfter) {
+            if (this.#hasDispose) {
+                this.#dispose?.(v, k, 'evict');
+            }
+            if (this.#hasDisposeAfter) {
+                this.#disposed?.push([v, k, 'evict']);
+            }
+        }
+        this.#removeItemSize(head);
+        // if we aren't about to use the index, then null these out
+        if (free) {
+            this.#keyList[head] = undefined;
+            this.#valList[head] = undefined;
+            this.#free.push(head);
+        }
+        if (this.#size === 1) {
+            this.#head = this.#tail = 0;
+            this.#free.length = 0;
+        }
+        else {
+            this.#head = this.#next[head];
+        }
+        this.#keyMap.delete(k);
+        this.#size--;
+        return head;
+    }
+    /**
+     * Check if a key is in the cache, without updating the recency of use.
+     * Will return false if the item is stale, even though it is technically
+     * in the cache.
+     *
+     * Check if a key is in the cache, without updating the recency of
+     * use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set
+     * to `true` in either the options or the constructor.
+     *
+     * Will return `false` if the item is stale, even though it is technically in
+     * the cache. The difference can be determined (if it matters) by using a
+     * `status` argument, and inspecting the `has` field.
+     *
+     * Will not update item age unless
+     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
+     */
+    has(k, hasOptions = {}) {
+        const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
+        const index = this.#keyMap.get(k);
+        if (index !== undefined) {
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v) &&
+                v.__staleWhileFetching === undefined) {
+                return false;
+            }
+            if (!this.#isStale(index)) {
+                if (updateAgeOnHas) {
+                    this.#updateItemAge(index);
+                }
+                if (status) {
+                    status.has = 'hit';
+                    this.#statusTTL(status, index);
+                }
+                return true;
+            }
+            else if (status) {
+                status.has = 'stale';
+                this.#statusTTL(status, index);
+            }
+        }
+        else if (status) {
+            status.has = 'miss';
+        }
+        return false;
+    }
+    /**
+     * Like {@link LRUCache#get} but doesn't update recency or delete stale
+     * items.
+     *
+     * Returns `undefined` if the item is stale, unless
+     * {@link LRUCache.OptionsBase.allowStale} is set.
+     */
+    peek(k, peekOptions = {}) {
+        const { allowStale = this.allowStale } = peekOptions;
+        const index = this.#keyMap.get(k);
+        if (index === undefined ||
+            (!allowStale && this.#isStale(index))) {
+            return;
+        }
+        const v = this.#valList[index];
+        // either stale and allowed, or forcing a refresh of non-stale value
+        return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+    }
+    #backgroundFetch(k, index, options, context) {
+        const v = index === undefined ? undefined : this.#valList[index];
+        if (this.#isBackgroundFetch(v)) {
+            return v;
+        }
+        const ac = new AC();
+        const { signal } = options;
+        // when/if our AC signals, then stop listening to theirs.
+        signal?.addEventListener('abort', () => ac.abort(signal.reason), {
+            signal: ac.signal,
+        });
+        const fetchOpts = {
+            signal: ac.signal,
+            options,
+            context,
+        };
+        const cb = (v, updateCache = false) => {
+            const { aborted } = ac.signal;
+            const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
+            if (options.status) {
+                if (aborted && !updateCache) {
+                    options.status.fetchAborted = true;
+                    options.status.fetchError = ac.signal.reason;
+                    if (ignoreAbort)
+                        options.status.fetchAbortIgnored = true;
+                }
+                else {
+                    options.status.fetchResolved = true;
+                }
+            }
+            if (aborted && !ignoreAbort && !updateCache) {
+                return fetchFail(ac.signal.reason);
+            }
+            // either we didn't abort, and are still here, or we did, and ignored
+            const bf = p;
+            if (this.#valList[index] === p) {
+                if (v === undefined) {
+                    if (bf.__staleWhileFetching !== undefined) {
+                        this.#valList[index] = bf.__staleWhileFetching;
+                    }
+                    else {
+                        this.#delete(k, 'fetch');
+                    }
+                }
+                else {
+                    if (options.status)
+                        options.status.fetchUpdated = true;
+                    this.set(k, v, fetchOpts.options);
+                }
+            }
+            return v;
+        };
+        const eb = (er) => {
+            if (options.status) {
+                options.status.fetchRejected = true;
+                options.status.fetchError = er;
+            }
+            return fetchFail(er);
+        };
+        const fetchFail = (er) => {
+            const { aborted } = ac.signal;
+            const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
+            const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
+            const noDelete = allowStale || options.noDeleteOnFetchRejection;
+            const bf = p;
+            if (this.#valList[index] === p) {
+                // if we allow stale on fetch rejections, then we need to ensure that
+                // the stale value is not removed from the cache when the fetch fails.
+                const del = !noDelete || bf.__staleWhileFetching === undefined;
+                if (del) {
+                    this.#delete(k, 'fetch');
+                }
+                else if (!allowStaleAborted) {
+                    // still replace the *promise* with the stale value,
+                    // since we are done with the promise at this point.
+                    // leave it untouched if we're still waiting for an
+                    // aborted background fetch that hasn't yet returned.
+                    this.#valList[index] = bf.__staleWhileFetching;
+                }
+            }
+            if (allowStale) {
+                if (options.status && bf.__staleWhileFetching !== undefined) {
+                    options.status.returnedStale = true;
+                }
+                return bf.__staleWhileFetching;
+            }
+            else if (bf.__returned === bf) {
+                throw er;
+            }
+        };
+        const pcall = (res, rej) => {
+            const fmp = this.#fetchMethod?.(k, v, fetchOpts);
+            if (fmp && fmp instanceof Promise) {
+                fmp.then(v => res(v === undefined ? undefined : v), rej);
+            }
+            // ignored, we go until we finish, regardless.
+            // defer check until we are actually aborting,
+            // so fetchMethod can override.
+            ac.signal.addEventListener('abort', () => {
+                if (!options.ignoreFetchAbort ||
+                    options.allowStaleOnFetchAbort) {
+                    res(undefined);
+                    // when it eventually resolves, update the cache.
+                    if (options.allowStaleOnFetchAbort) {
+                        res = v => cb(v, true);
+                    }
+                }
+            });
+        };
+        if (options.status)
+            options.status.fetchDispatched = true;
+        const p = new Promise(pcall).then(cb, eb);
+        const bf = Object.assign(p, {
+            __abortController: ac,
+            __staleWhileFetching: v,
+            __returned: undefined,
+        });
+        if (index === undefined) {
+            // internal, don't expose status.
+            this.set(k, bf, { ...fetchOpts.options, status: undefined });
+            index = this.#keyMap.get(k);
+        }
+        else {
+            this.#valList[index] = bf;
+        }
+        return bf;
+    }
+    #isBackgroundFetch(p) {
+        if (!this.#hasFetchMethod)
+            return false;
+        const b = p;
+        return (!!b &&
+            b instanceof Promise &&
+            b.hasOwnProperty('__staleWhileFetching') &&
+            b.__abortController instanceof AC);
+    }
+    async fetch(k, fetchOptions = {}) {
+        const { 
+        // get options
+        allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, 
+        // set options
+        ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, 
+        // fetch exclusive options
+        noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
+        if (!this.#hasFetchMethod) {
+            if (status)
+                status.fetch = 'get';
+            return this.get(k, {
+                allowStale,
+                updateAgeOnGet,
+                noDeleteOnStaleGet,
+                status,
+            });
+        }
+        const options = {
+            allowStale,
+            updateAgeOnGet,
+            noDeleteOnStaleGet,
+            ttl,
+            noDisposeOnSet,
+            size,
+            sizeCalculation,
+            noUpdateTTL,
+            noDeleteOnFetchRejection,
+            allowStaleOnFetchRejection,
+            allowStaleOnFetchAbort,
+            ignoreFetchAbort,
+            status,
+            signal,
+        };
+        let index = this.#keyMap.get(k);
+        if (index === undefined) {
+            if (status)
+                status.fetch = 'miss';
+            const p = this.#backgroundFetch(k, index, options, context);
+            return (p.__returned = p);
+        }
+        else {
+            // in cache, maybe already fetching
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v)) {
+                const stale = allowStale && v.__staleWhileFetching !== undefined;
+                if (status) {
+                    status.fetch = 'inflight';
+                    if (stale)
+                        status.returnedStale = true;
+                }
+                return stale ? v.__staleWhileFetching : (v.__returned = v);
+            }
+            // if we force a refresh, that means do NOT serve the cached value,
+            // unless we are already in the process of refreshing the cache.
+            const isStale = this.#isStale(index);
+            if (!forceRefresh && !isStale) {
+                if (status)
+                    status.fetch = 'hit';
+                this.#moveToTail(index);
+                if (updateAgeOnGet) {
+                    this.#updateItemAge(index);
+                }
+                if (status)
+                    this.#statusTTL(status, index);
+                return v;
+            }
+            // ok, it is stale or a forced refresh, and not already fetching.
+            // refresh the cache.
+            const p = this.#backgroundFetch(k, index, options, context);
+            const hasStale = p.__staleWhileFetching !== undefined;
+            const staleVal = hasStale && allowStale;
+            if (status) {
+                status.fetch = isStale ? 'stale' : 'refresh';
+                if (staleVal && isStale)
+                    status.returnedStale = true;
+            }
+            return staleVal ? p.__staleWhileFetching : (p.__returned = p);
+        }
+    }
+    async forceFetch(k, fetchOptions = {}) {
+        const v = await this.fetch(k, fetchOptions);
+        if (v === undefined)
+            throw new Error('fetch() returned undefined');
+        return v;
+    }
+    memo(k, memoOptions = {}) {
+        const memoMethod = this.#memoMethod;
+        if (!memoMethod) {
+            throw new Error('no memoMethod provided to constructor');
+        }
+        const { context, forceRefresh, ...options } = memoOptions;
+        const v = this.get(k, options);
+        if (!forceRefresh && v !== undefined)
+            return v;
+        const vv = memoMethod(k, v, {
+            options,
+            context,
+        });
+        this.set(k, vv, options);
+        return vv;
+    }
+    /**
+     * Return a value from the cache. Will update the recency of the cache
+     * entry found.
+     *
+     * If the key is not found, get() will return `undefined`.
+     */
+    get(k, getOptions = {}) {
+        const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
+        const index = this.#keyMap.get(k);
+        if (index !== undefined) {
+            const value = this.#valList[index];
+            const fetching = this.#isBackgroundFetch(value);
+            if (status)
+                this.#statusTTL(status, index);
+            if (this.#isStale(index)) {
+                if (status)
+                    status.get = 'stale';
+                // delete only if not an in-flight background fetch
+                if (!fetching) {
+                    if (!noDeleteOnStaleGet) {
+                        this.#delete(k, 'expire');
+                    }
+                    if (status && allowStale)
+                        status.returnedStale = true;
+                    return allowStale ? value : undefined;
+                }
+                else {
+                    if (status &&
+                        allowStale &&
+                        value.__staleWhileFetching !== undefined) {
+                        status.returnedStale = true;
+                    }
+                    return allowStale ? value.__staleWhileFetching : undefined;
+                }
+            }
+            else {
+                if (status)
+                    status.get = 'hit';
+                // if we're currently fetching it, we don't actually have it yet
+                // it's not stale, which means this isn't a staleWhileRefetching.
+                // If it's not stale, and fetching, AND has a __staleWhileFetching
+                // value, then that means the user fetched with {forceRefresh:true},
+                // so it's safe to return that value.
+                if (fetching) {
+                    return value.__staleWhileFetching;
+                }
+                this.#moveToTail(index);
+                if (updateAgeOnGet) {
+                    this.#updateItemAge(index);
+                }
+                return value;
+            }
+        }
+        else if (status) {
+            status.get = 'miss';
+        }
+    }
+    #connect(p, n) {
+        this.#prev[n] = p;
+        this.#next[p] = n;
+    }
+    #moveToTail(index) {
+        // if tail already, nothing to do
+        // if head, move head to next[index]
+        // else
+        //   move next[prev[index]] to next[index] (head has no prev)
+        //   move prev[next[index]] to prev[index]
+        // prev[index] = tail
+        // next[tail] = index
+        // tail = index
+        if (index !== this.#tail) {
+            if (index === this.#head) {
+                this.#head = this.#next[index];
+            }
+            else {
+                this.#connect(this.#prev[index], this.#next[index]);
+            }
+            this.#connect(this.#tail, index);
+            this.#tail = index;
+        }
+    }
+    /**
+     * Deletes a key out of the cache.
+     *
+     * Returns true if the key was deleted, false otherwise.
+     */
+    delete(k) {
+        return this.#delete(k, 'delete');
+    }
+    #delete(k, reason) {
+        let deleted = false;
+        if (this.#size !== 0) {
+            const index = this.#keyMap.get(k);
+            if (index !== undefined) {
+                deleted = true;
+                if (this.#size === 1) {
+                    this.#clear(reason);
+                }
+                else {
+                    this.#removeItemSize(index);
+                    const v = this.#valList[index];
+                    if (this.#isBackgroundFetch(v)) {
+                        v.__abortController.abort(new Error('deleted'));
+                    }
+                    else if (this.#hasDispose || this.#hasDisposeAfter) {
+                        if (this.#hasDispose) {
+                            this.#dispose?.(v, k, reason);
+                        }
+                        if (this.#hasDisposeAfter) {
+                            this.#disposed?.push([v, k, reason]);
+                        }
+                    }
+                    this.#keyMap.delete(k);
+                    this.#keyList[index] = undefined;
+                    this.#valList[index] = undefined;
+                    if (index === this.#tail) {
+                        this.#tail = this.#prev[index];
+                    }
+                    else if (index === this.#head) {
+                        this.#head = this.#next[index];
+                    }
+                    else {
+                        const pi = this.#prev[index];
+                        this.#next[pi] = this.#next[index];
+                        const ni = this.#next[index];
+                        this.#prev[ni] = this.#prev[index];
+                    }
+                    this.#size--;
+                    this.#free.push(index);
+                }
+            }
+        }
+        if (this.#hasDisposeAfter && this.#disposed?.length) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+        return deleted;
+    }
+    /**
+     * Clear the cache entirely, throwing away all values.
+     */
+    clear() {
+        return this.#clear('delete');
+    }
+    #clear(reason) {
+        for (const index of this.#rindexes({ allowStale: true })) {
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v)) {
+                v.__abortController.abort(new Error('deleted'));
+            }
+            else {
+                const k = this.#keyList[index];
+                if (this.#hasDispose) {
+                    this.#dispose?.(v, k, reason);
+                }
+                if (this.#hasDisposeAfter) {
+                    this.#disposed?.push([v, k, reason]);
+                }
+            }
+        }
+        this.#keyMap.clear();
+        this.#valList.fill(undefined);
+        this.#keyList.fill(undefined);
+        if (this.#ttls && this.#starts) {
+            this.#ttls.fill(0);
+            this.#starts.fill(0);
+        }
+        if (this.#sizes) {
+            this.#sizes.fill(0);
+        }
+        this.#head = 0;
+        this.#tail = 0;
+        this.#free.length = 0;
+        this.#calculatedSize = 0;
+        this.#size = 0;
+        if (this.#hasDisposeAfter && this.#disposed) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+    }
+}
+exports.LRUCache = LRUCache;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/commonjs/index.min.js b/node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/commonjs/index.min.js
new file mode 100644
index 0000000000000..ef5027b91650d
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/commonjs/index.min.js
@@ -0,0 +1,2 @@
+"use strict";Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var M=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,x=new Set,R=typeof process=="object"&&process?process:{},U=(a,t,e,i)=>{typeof R.emitWarning=="function"?R.emitWarning(a,t,e,i):console.error(`[${e}] ${t}: ${a}`)},C=globalThis.AbortController,L=globalThis.AbortSignal;if(typeof C>"u"){L=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},C=class{constructor(){t()}signal=new L;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let a=R.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{a&&(a=!1,U("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var G=a=>!x.has(a),H=Symbol("type"),y=a=>a&&a===Math.floor(a)&&a>0&&isFinite(a),I=a=>y(a)?a<=Math.pow(2,8)?Uint8Array:a<=Math.pow(2,16)?Uint16Array:a<=Math.pow(2,32)?Uint32Array:a<=Number.MAX_SAFE_INTEGER?E:null:null,E=class extends Array{constructor(t){super(t),this.fill(0)}},W=class a{heap;length;static#l=!1;static create(t){let e=I(t);if(!e)return[];a.#l=!0;let i=new a(t,e);return a.#l=!1,i}constructor(t,e){if(!a.#l)throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},D=class a{#l;#c;#p;#v;#w;#D;#L;#S;get perf(){return this.#S}ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#_;#s;#i;#t;#a;#u;#o;#h;#m;#r;#b;#y;#d;#A;#z;#f;#x;static unsafeExposeInternals(t){return{starts:t.#y,ttls:t.#d,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#a,prev:t.#u,get head(){return t.#o},get tail(){return t.#h},free:t.#m,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#M(e,i,s,n),moveToTail:e=>t.#W(e),indexes:e=>t.#F(e),rindexes:e=>t.#T(e),isStale:e=>t.#g(e)}}get max(){return this.#l}get maxSize(){return this.#c}get calculatedSize(){return this.#_}get size(){return this.#n}get fetchMethod(){return this.#D}get memoMethod(){return this.#L}get dispose(){return this.#p}get onInsert(){return this.#v}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,onInsert:_,disposeAfter:f,noDisposeOnSet:c,noUpdateTTL:u,maxSize:A=0,maxEntrySize:d=0,sizeCalculation:m,fetchMethod:l,memoMethod:w,noDeleteOnFetchRejection:b,noDeleteOnStaleGet:p,allowStaleOnFetchRejection:S,allowStaleOnFetchAbort:z,ignoreFetchAbort:F,perf:v}=t;if(v!==void 0&&typeof v?.now!="function")throw new TypeError("perf option must have a now() method if specified");if(this.#S=v??M,e!==0&&!y(e))throw new TypeError("max option must be a nonnegative integer");let T=e?I(e):Array;if(!T)throw new Error("invalid max value: "+e);if(this.#l=e,this.#c=A,this.maxEntrySize=d||this.#c,this.sizeCalculation=m,this.sizeCalculation){if(!this.#c&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#L=w,l!==void 0&&typeof l!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#D=l,this.#z=!!l,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#a=new T(e),this.#u=new T(e),this.#o=0,this.#h=0,this.#m=W.create(e),this.#n=0,this.#_=0,typeof g=="function"&&(this.#p=g),typeof _=="function"&&(this.#v=_),typeof f=="function"?(this.#w=f,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#A=!!this.#p,this.#x=!!this.#v,this.#f=!!this.#w,this.noDisposeOnSet=!!c,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!b,this.allowStaleOnFetchRejection=!!S,this.allowStaleOnFetchAbort=!!z,this.ignoreFetchAbort=!!F,this.maxEntrySize!==0){if(this.#c!==0&&!y(this.#c))throw new TypeError("maxSize must be a positive integer if specified");if(!y(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#V()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!p,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=y(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!y(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#G()}if(this.#l===0&&this.ttl===0&&this.#c===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#l&&!this.#c){let O="LRU_CACHE_UNBOUNDED";G(O)&&(x.add(O),U("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",O,a))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#G(){let t=new E(this.#l),e=new E(this.#l);this.#d=t,this.#y=e,this.#j=(n,h,o=this.#S.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#g(n)&&this.#O(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#C=n=>{e[n]=t[n]!==0?this.#S.now():0},this.#E=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=this.#S.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#g=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#C=()=>{};#E=()=>{};#j=()=>{};#g=()=>!1;#V(){let t=new E(this.#l);this.#_=0,this.#b=t,this.#R=e=>{this.#_-=t[e],t[e]=0},this.#N=(e,i,s,n)=>{if(this.#e(i))return 0;if(!y(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!y(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#U=(e,i,s)=>{if(t[e]=i,this.#c){let n=this.#c-t[e];for(;this.#_>n;)this.#I(!0)}this.#_+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#_)}}#R=t=>{};#U=(t,e,i)=>{};#N=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#o));)e=this.#u[e]}*#T({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#h));)e=this.#a[e]}#P(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#T())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#T()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#T())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#T()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#T({allowStale:!0}))this.#g(e)&&(this.#O(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#d&&this.#y){let h=this.#d[e],o=this.#y[e];if(h&&o){let r=h-(this.#S.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#F({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#d&&this.#y){h.ttl=this.#d[e];let o=this.#S.now()-this.#y[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=this.#S.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,_=this.#N(t,e,i.size||0,o);if(this.maxEntrySize&&_>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#O(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#m.length!==0?this.#m.pop():this.#n===this.#l?this.#I(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#a[this.#h]=f,this.#u[f]=this.#h,this.#h=f,this.#n++,this.#U(f,_,r),r&&(r.set="add"),g=!1,this.#x&&this.#v?.(e,t,"add");else{this.#W(f);let c=this.#t[f];if(e!==c){if(this.#z&&this.#e(c)){c.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:u}=c;u!==void 0&&!h&&(this.#A&&this.#p?.(u,t,"set"),this.#f&&this.#r?.push([u,t,"set"]))}else h||(this.#A&&this.#p?.(c,t,"set"),this.#f&&this.#r?.push([c,t,"set"]));if(this.#R(f),this.#U(f,_,r),this.#t[f]=e,r){r.set="replace";let u=c&&this.#e(c)?c.__staleWhileFetching:c;u!==void 0&&(r.oldValue=u)}}else r&&(r.set="update");this.#x&&this.onInsert?.(e,t,e===c?"update":"replace")}if(s!==0&&!this.#d&&this.#G(),this.#d&&(g||this.#j(f,s,n),r&&this.#E(r,f)),!h&&this.#f&&this.#r){let c=this.#r,u;for(;u=c?.shift();)this.#w?.(...u)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#I(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#f&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#I(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#z&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(s,i,"evict"),this.#f&&this.#r?.push([s,i,"evict"])),this.#R(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#m.push(e)),this.#n===1?(this.#o=this.#h=0,this.#m.length=0):this.#o=this.#a[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#g(n))s&&(s.has="stale",this.#E(s,n));else return i&&this.#C(n),s&&(s.has="hit",this.#E(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#g(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#M(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new C,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,m=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!m?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!m)return f(h.signal.reason);let b=u;return this.#t[e]===u&&(d===void 0?b.__staleWhileFetching!==void 0?this.#t[e]=b.__staleWhileFetching:this.#O(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},_=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:m}=h.signal,l=m&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=u;if(this.#t[e]===u&&(!b||p.__staleWhileFetching===void 0?this.#O(t,"fetch"):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},c=(d,m)=>{let l=this.#D?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),m),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let u=new Promise(c).then(g,_),A=Object.assign(u,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,A,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=A,A}#e(t){if(!this.#z)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof C}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:_=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:c=this.allowStaleOnFetchRejection,ignoreFetchAbort:u=this.ignoreFetchAbort,allowStaleOnFetchAbort:A=this.allowStaleOnFetchAbort,context:d,forceRefresh:m=!1,status:l,signal:w}=e;if(!this.#z)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:_,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:c,allowStaleOnFetchAbort:A,ignoreFetchAbort:u,status:l,signal:w},p=this.#s.get(t);if(p===void 0){l&&(l.fetch="miss");let S=this.#M(t,p,b,d);return S.__returned=S}else{let S=this.#t[p];if(this.#e(S)){let O=i&&S.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",O&&(l.returnedStale=!0)),O?S.__staleWhileFetching:S.__returned=S}let z=this.#g(p);if(!m&&!z)return l&&(l.fetch="hit"),this.#W(p),s&&this.#C(p),l&&this.#E(l,p),S;let F=this.#M(t,p,b,d),T=F.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=z?"stale":"refresh",T&&z&&(l.returnedStale=!0)),T?F.__staleWhileFetching:F.__returned=F}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#L;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#E(h,o),this.#g(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#O(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#W(o),s&&this.#C(o),r))}else h&&(h.get="miss")}#H(t,e){this.#u[e]=t,this.#a[t]=e}#W(t){t!==this.#h&&(t===this.#o?this.#o=this.#a[t]:this.#H(this.#u[t],this.#a[t]),this.#H(this.#h,t),this.#h=t)}delete(t){return this.#O(t,"delete")}#O(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#k(e);else{this.#R(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(n,t,e),this.#f&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#u[s];else if(s===this.#o)this.#o=this.#a[s];else{let h=this.#u[s];this.#a[h]=this.#a[s];let o=this.#a[s];this.#u[o]=this.#u[s]}this.#n--,this.#m.push(s)}}if(this.#f&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#k("delete")}#k(t){for(let e of this.#T({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#A&&this.#p?.(i,s,t),this.#f&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#d&&this.#y&&(this.#d.fill(0),this.#y.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#m.length=0,this.#_=0,this.#n=0,this.#f&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};exports.LRUCache=D;
+//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/commonjs/package.json b/node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/commonjs/package.json
new file mode 100644
index 0000000000000..5bbefffbabee3
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/commonjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/esm/index.js b/node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/esm/index.js
new file mode 100644
index 0000000000000..8fd8fc5f31507
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/esm/index.js
@@ -0,0 +1,1560 @@
+/**
+ * @module LRUCache
+ */
+const defaultPerf = (typeof performance === 'object' &&
+    performance &&
+    typeof performance.now === 'function') ?
+    performance
+    : Date;
+const warned = new Set();
+/* c8 ignore start */
+const PROCESS = (typeof process === 'object' && !!process ?
+    process
+    : {});
+/* c8 ignore start */
+const emitWarning = (msg, type, code, fn) => {
+    typeof PROCESS.emitWarning === 'function' ?
+        PROCESS.emitWarning(msg, type, code, fn)
+        : console.error(`[${code}] ${type}: ${msg}`);
+};
+let AC = globalThis.AbortController;
+let AS = globalThis.AbortSignal;
+/* c8 ignore start */
+if (typeof AC === 'undefined') {
+    //@ts-ignore
+    AS = class AbortSignal {
+        onabort;
+        _onabort = [];
+        reason;
+        aborted = false;
+        addEventListener(_, fn) {
+            this._onabort.push(fn);
+        }
+    };
+    //@ts-ignore
+    AC = class AbortController {
+        constructor() {
+            warnACPolyfill();
+        }
+        signal = new AS();
+        abort(reason) {
+            if (this.signal.aborted)
+                return;
+            //@ts-ignore
+            this.signal.reason = reason;
+            //@ts-ignore
+            this.signal.aborted = true;
+            //@ts-ignore
+            for (const fn of this.signal._onabort) {
+                fn(reason);
+            }
+            this.signal.onabort?.(reason);
+        }
+    };
+    let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
+    const warnACPolyfill = () => {
+        if (!printACPolyfillWarning)
+            return;
+        printACPolyfillWarning = false;
+        emitWarning('AbortController is not defined. If using lru-cache in ' +
+            'node 14, load an AbortController polyfill from the ' +
+            '`node-abort-controller` package. A minimal polyfill is ' +
+            'provided for use by LRUCache.fetch(), but it should not be ' +
+            'relied upon in other contexts (eg, passing it to other APIs that ' +
+            'use AbortController/AbortSignal might have undesirable effects). ' +
+            'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
+    };
+}
+/* c8 ignore stop */
+const shouldWarn = (code) => !warned.has(code);
+const TYPE = Symbol('type');
+const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
+/* c8 ignore start */
+// This is a little bit ridiculous, tbh.
+// The maximum array length is 2^32-1 or thereabouts on most JS impls.
+// And well before that point, you're caching the entire world, I mean,
+// that's ~32GB of just integers for the next/prev links, plus whatever
+// else to hold that many keys and values.  Just filling the memory with
+// zeroes at init time is brutal when you get that big.
+// But why not be complete?
+// Maybe in the future, these limits will have expanded.
+const getUintArray = (max) => !isPosInt(max) ? null
+    : max <= Math.pow(2, 8) ? Uint8Array
+        : max <= Math.pow(2, 16) ? Uint16Array
+            : max <= Math.pow(2, 32) ? Uint32Array
+                : max <= Number.MAX_SAFE_INTEGER ? ZeroArray
+                    : null;
+/* c8 ignore stop */
+class ZeroArray extends Array {
+    constructor(size) {
+        super(size);
+        this.fill(0);
+    }
+}
+class Stack {
+    heap;
+    length;
+    // private constructor
+    static #constructing = false;
+    static create(max) {
+        const HeapCls = getUintArray(max);
+        if (!HeapCls)
+            return [];
+        Stack.#constructing = true;
+        const s = new Stack(max, HeapCls);
+        Stack.#constructing = false;
+        return s;
+    }
+    constructor(max, HeapCls) {
+        /* c8 ignore start */
+        if (!Stack.#constructing) {
+            throw new TypeError('instantiate Stack using Stack.create(n)');
+        }
+        /* c8 ignore stop */
+        this.heap = new HeapCls(max);
+        this.length = 0;
+    }
+    push(n) {
+        this.heap[this.length++] = n;
+    }
+    pop() {
+        return this.heap[--this.length];
+    }
+}
+/**
+ * Default export, the thing you're using this module to get.
+ *
+ * The `K` and `V` types define the key and value types, respectively. The
+ * optional `FC` type defines the type of the `context` object passed to
+ * `cache.fetch()` and `cache.memo()`.
+ *
+ * Keys and values **must not** be `null` or `undefined`.
+ *
+ * All properties from the options object (with the exception of `max`,
+ * `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are
+ * added as normal public members. (The listed options are read-only getters.)
+ *
+ * Changing any of these will alter the defaults for subsequent method calls.
+ */
+export class LRUCache {
+    // options that cannot be changed without disaster
+    #max;
+    #maxSize;
+    #dispose;
+    #onInsert;
+    #disposeAfter;
+    #fetchMethod;
+    #memoMethod;
+    #perf;
+    /**
+     * {@link LRUCache.OptionsBase.perf}
+     */
+    get perf() {
+        return this.#perf;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.ttl}
+     */
+    ttl;
+    /**
+     * {@link LRUCache.OptionsBase.ttlResolution}
+     */
+    ttlResolution;
+    /**
+     * {@link LRUCache.OptionsBase.ttlAutopurge}
+     */
+    ttlAutopurge;
+    /**
+     * {@link LRUCache.OptionsBase.updateAgeOnGet}
+     */
+    updateAgeOnGet;
+    /**
+     * {@link LRUCache.OptionsBase.updateAgeOnHas}
+     */
+    updateAgeOnHas;
+    /**
+     * {@link LRUCache.OptionsBase.allowStale}
+     */
+    allowStale;
+    /**
+     * {@link LRUCache.OptionsBase.noDisposeOnSet}
+     */
+    noDisposeOnSet;
+    /**
+     * {@link LRUCache.OptionsBase.noUpdateTTL}
+     */
+    noUpdateTTL;
+    /**
+     * {@link LRUCache.OptionsBase.maxEntrySize}
+     */
+    maxEntrySize;
+    /**
+     * {@link LRUCache.OptionsBase.sizeCalculation}
+     */
+    sizeCalculation;
+    /**
+     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
+     */
+    noDeleteOnFetchRejection;
+    /**
+     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
+     */
+    noDeleteOnStaleGet;
+    /**
+     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
+     */
+    allowStaleOnFetchAbort;
+    /**
+     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
+     */
+    allowStaleOnFetchRejection;
+    /**
+     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
+     */
+    ignoreFetchAbort;
+    // computed properties
+    #size;
+    #calculatedSize;
+    #keyMap;
+    #keyList;
+    #valList;
+    #next;
+    #prev;
+    #head;
+    #tail;
+    #free;
+    #disposed;
+    #sizes;
+    #starts;
+    #ttls;
+    #hasDispose;
+    #hasFetchMethod;
+    #hasDisposeAfter;
+    #hasOnInsert;
+    /**
+     * Do not call this method unless you need to inspect the
+     * inner workings of the cache.  If anything returned by this
+     * object is modified in any way, strange breakage may occur.
+     *
+     * These fields are private for a reason!
+     *
+     * @internal
+     */
+    static unsafeExposeInternals(c) {
+        return {
+            // properties
+            starts: c.#starts,
+            ttls: c.#ttls,
+            sizes: c.#sizes,
+            keyMap: c.#keyMap,
+            keyList: c.#keyList,
+            valList: c.#valList,
+            next: c.#next,
+            prev: c.#prev,
+            get head() {
+                return c.#head;
+            },
+            get tail() {
+                return c.#tail;
+            },
+            free: c.#free,
+            // methods
+            isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
+            backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
+            moveToTail: (index) => c.#moveToTail(index),
+            indexes: (options) => c.#indexes(options),
+            rindexes: (options) => c.#rindexes(options),
+            isStale: (index) => c.#isStale(index),
+        };
+    }
+    // Protected read-only members
+    /**
+     * {@link LRUCache.OptionsBase.max} (read-only)
+     */
+    get max() {
+        return this.#max;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.maxSize} (read-only)
+     */
+    get maxSize() {
+        return this.#maxSize;
+    }
+    /**
+     * The total computed size of items in the cache (read-only)
+     */
+    get calculatedSize() {
+        return this.#calculatedSize;
+    }
+    /**
+     * The number of items stored in the cache (read-only)
+     */
+    get size() {
+        return this.#size;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
+     */
+    get fetchMethod() {
+        return this.#fetchMethod;
+    }
+    get memoMethod() {
+        return this.#memoMethod;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.dispose} (read-only)
+     */
+    get dispose() {
+        return this.#dispose;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.onInsert} (read-only)
+     */
+    get onInsert() {
+        return this.#onInsert;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
+     */
+    get disposeAfter() {
+        return this.#disposeAfter;
+    }
+    constructor(options) {
+        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, onInsert, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, perf, } = options;
+        if (perf !== undefined) {
+            if (typeof perf?.now !== 'function') {
+                throw new TypeError('perf option must have a now() method if specified');
+            }
+        }
+        this.#perf = perf ?? defaultPerf;
+        if (max !== 0 && !isPosInt(max)) {
+            throw new TypeError('max option must be a nonnegative integer');
+        }
+        const UintArray = max ? getUintArray(max) : Array;
+        if (!UintArray) {
+            throw new Error('invalid max value: ' + max);
+        }
+        this.#max = max;
+        this.#maxSize = maxSize;
+        this.maxEntrySize = maxEntrySize || this.#maxSize;
+        this.sizeCalculation = sizeCalculation;
+        if (this.sizeCalculation) {
+            if (!this.#maxSize && !this.maxEntrySize) {
+                throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
+            }
+            if (typeof this.sizeCalculation !== 'function') {
+                throw new TypeError('sizeCalculation set to non-function');
+            }
+        }
+        if (memoMethod !== undefined &&
+            typeof memoMethod !== 'function') {
+            throw new TypeError('memoMethod must be a function if defined');
+        }
+        this.#memoMethod = memoMethod;
+        if (fetchMethod !== undefined &&
+            typeof fetchMethod !== 'function') {
+            throw new TypeError('fetchMethod must be a function if specified');
+        }
+        this.#fetchMethod = fetchMethod;
+        this.#hasFetchMethod = !!fetchMethod;
+        this.#keyMap = new Map();
+        this.#keyList = new Array(max).fill(undefined);
+        this.#valList = new Array(max).fill(undefined);
+        this.#next = new UintArray(max);
+        this.#prev = new UintArray(max);
+        this.#head = 0;
+        this.#tail = 0;
+        this.#free = Stack.create(max);
+        this.#size = 0;
+        this.#calculatedSize = 0;
+        if (typeof dispose === 'function') {
+            this.#dispose = dispose;
+        }
+        if (typeof onInsert === 'function') {
+            this.#onInsert = onInsert;
+        }
+        if (typeof disposeAfter === 'function') {
+            this.#disposeAfter = disposeAfter;
+            this.#disposed = [];
+        }
+        else {
+            this.#disposeAfter = undefined;
+            this.#disposed = undefined;
+        }
+        this.#hasDispose = !!this.#dispose;
+        this.#hasOnInsert = !!this.#onInsert;
+        this.#hasDisposeAfter = !!this.#disposeAfter;
+        this.noDisposeOnSet = !!noDisposeOnSet;
+        this.noUpdateTTL = !!noUpdateTTL;
+        this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
+        this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
+        this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
+        this.ignoreFetchAbort = !!ignoreFetchAbort;
+        // NB: maxEntrySize is set to maxSize if it's set
+        if (this.maxEntrySize !== 0) {
+            if (this.#maxSize !== 0) {
+                if (!isPosInt(this.#maxSize)) {
+                    throw new TypeError('maxSize must be a positive integer if specified');
+                }
+            }
+            if (!isPosInt(this.maxEntrySize)) {
+                throw new TypeError('maxEntrySize must be a positive integer if specified');
+            }
+            this.#initializeSizeTracking();
+        }
+        this.allowStale = !!allowStale;
+        this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
+        this.updateAgeOnGet = !!updateAgeOnGet;
+        this.updateAgeOnHas = !!updateAgeOnHas;
+        this.ttlResolution =
+            isPosInt(ttlResolution) || ttlResolution === 0 ?
+                ttlResolution
+                : 1;
+        this.ttlAutopurge = !!ttlAutopurge;
+        this.ttl = ttl || 0;
+        if (this.ttl) {
+            if (!isPosInt(this.ttl)) {
+                throw new TypeError('ttl must be a positive integer if specified');
+            }
+            this.#initializeTTLTracking();
+        }
+        // do not allow completely unbounded caches
+        if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
+            throw new TypeError('At least one of max, maxSize, or ttl is required');
+        }
+        if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
+            const code = 'LRU_CACHE_UNBOUNDED';
+            if (shouldWarn(code)) {
+                warned.add(code);
+                const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
+                    'result in unbounded memory consumption.';
+                emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
+            }
+        }
+    }
+    /**
+     * Return the number of ms left in the item's TTL. If item is not in cache,
+     * returns `0`. Returns `Infinity` if item is in cache without a defined TTL.
+     */
+    getRemainingTTL(key) {
+        return this.#keyMap.has(key) ? Infinity : 0;
+    }
+    #initializeTTLTracking() {
+        const ttls = new ZeroArray(this.#max);
+        const starts = new ZeroArray(this.#max);
+        this.#ttls = ttls;
+        this.#starts = starts;
+        this.#setItemTTL = (index, ttl, start = this.#perf.now()) => {
+            starts[index] = ttl !== 0 ? start : 0;
+            ttls[index] = ttl;
+            if (ttl !== 0 && this.ttlAutopurge) {
+                const t = setTimeout(() => {
+                    if (this.#isStale(index)) {
+                        this.#delete(this.#keyList[index], 'expire');
+                    }
+                }, ttl + 1);
+                // unref() not supported on all platforms
+                /* c8 ignore start */
+                if (t.unref) {
+                    t.unref();
+                }
+                /* c8 ignore stop */
+            }
+        };
+        this.#updateItemAge = index => {
+            starts[index] = ttls[index] !== 0 ? this.#perf.now() : 0;
+        };
+        this.#statusTTL = (status, index) => {
+            if (ttls[index]) {
+                const ttl = ttls[index];
+                const start = starts[index];
+                /* c8 ignore next */
+                if (!ttl || !start)
+                    return;
+                status.ttl = ttl;
+                status.start = start;
+                status.now = cachedNow || getNow();
+                const age = status.now - start;
+                status.remainingTTL = ttl - age;
+            }
+        };
+        // debounce calls to perf.now() to 1s so we're not hitting
+        // that costly call repeatedly.
+        let cachedNow = 0;
+        const getNow = () => {
+            const n = this.#perf.now();
+            if (this.ttlResolution > 0) {
+                cachedNow = n;
+                const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
+                // not available on all platforms
+                /* c8 ignore start */
+                if (t.unref) {
+                    t.unref();
+                }
+                /* c8 ignore stop */
+            }
+            return n;
+        };
+        this.getRemainingTTL = key => {
+            const index = this.#keyMap.get(key);
+            if (index === undefined) {
+                return 0;
+            }
+            const ttl = ttls[index];
+            const start = starts[index];
+            if (!ttl || !start) {
+                return Infinity;
+            }
+            const age = (cachedNow || getNow()) - start;
+            return ttl - age;
+        };
+        this.#isStale = index => {
+            const s = starts[index];
+            const t = ttls[index];
+            return !!t && !!s && (cachedNow || getNow()) - s > t;
+        };
+    }
+    // conditionally set private methods related to TTL
+    #updateItemAge = () => { };
+    #statusTTL = () => { };
+    #setItemTTL = () => { };
+    /* c8 ignore stop */
+    #isStale = () => false;
+    #initializeSizeTracking() {
+        const sizes = new ZeroArray(this.#max);
+        this.#calculatedSize = 0;
+        this.#sizes = sizes;
+        this.#removeItemSize = index => {
+            this.#calculatedSize -= sizes[index];
+            sizes[index] = 0;
+        };
+        this.#requireSize = (k, v, size, sizeCalculation) => {
+            // provisionally accept background fetches.
+            // actual value size will be checked when they return.
+            if (this.#isBackgroundFetch(v)) {
+                return 0;
+            }
+            if (!isPosInt(size)) {
+                if (sizeCalculation) {
+                    if (typeof sizeCalculation !== 'function') {
+                        throw new TypeError('sizeCalculation must be a function');
+                    }
+                    size = sizeCalculation(v, k);
+                    if (!isPosInt(size)) {
+                        throw new TypeError('sizeCalculation return invalid (expect positive integer)');
+                    }
+                }
+                else {
+                    throw new TypeError('invalid size value (must be positive integer). ' +
+                        'When maxSize or maxEntrySize is used, sizeCalculation ' +
+                        'or size must be set.');
+                }
+            }
+            return size;
+        };
+        this.#addItemSize = (index, size, status) => {
+            sizes[index] = size;
+            if (this.#maxSize) {
+                const maxSize = this.#maxSize - sizes[index];
+                while (this.#calculatedSize > maxSize) {
+                    this.#evict(true);
+                }
+            }
+            this.#calculatedSize += sizes[index];
+            if (status) {
+                status.entrySize = size;
+                status.totalCalculatedSize = this.#calculatedSize;
+            }
+        };
+    }
+    #removeItemSize = _i => { };
+    #addItemSize = (_i, _s, _st) => { };
+    #requireSize = (_k, _v, size, sizeCalculation) => {
+        if (size || sizeCalculation) {
+            throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
+        }
+        return 0;
+    };
+    *#indexes({ allowStale = this.allowStale } = {}) {
+        if (this.#size) {
+            for (let i = this.#tail; true;) {
+                if (!this.#isValidIndex(i)) {
+                    break;
+                }
+                if (allowStale || !this.#isStale(i)) {
+                    yield i;
+                }
+                if (i === this.#head) {
+                    break;
+                }
+                else {
+                    i = this.#prev[i];
+                }
+            }
+        }
+    }
+    *#rindexes({ allowStale = this.allowStale } = {}) {
+        if (this.#size) {
+            for (let i = this.#head; true;) {
+                if (!this.#isValidIndex(i)) {
+                    break;
+                }
+                if (allowStale || !this.#isStale(i)) {
+                    yield i;
+                }
+                if (i === this.#tail) {
+                    break;
+                }
+                else {
+                    i = this.#next[i];
+                }
+            }
+        }
+    }
+    #isValidIndex(index) {
+        return (index !== undefined &&
+            this.#keyMap.get(this.#keyList[index]) === index);
+    }
+    /**
+     * Return a generator yielding `[key, value]` pairs,
+     * in order from most recently used to least recently used.
+     */
+    *entries() {
+        for (const i of this.#indexes()) {
+            if (this.#valList[i] !== undefined &&
+                this.#keyList[i] !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield [this.#keyList[i], this.#valList[i]];
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.entries}
+     *
+     * Return a generator yielding `[key, value]` pairs,
+     * in order from least recently used to most recently used.
+     */
+    *rentries() {
+        for (const i of this.#rindexes()) {
+            if (this.#valList[i] !== undefined &&
+                this.#keyList[i] !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield [this.#keyList[i], this.#valList[i]];
+            }
+        }
+    }
+    /**
+     * Return a generator yielding the keys in the cache,
+     * in order from most recently used to least recently used.
+     */
+    *keys() {
+        for (const i of this.#indexes()) {
+            const k = this.#keyList[i];
+            if (k !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield k;
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.keys}
+     *
+     * Return a generator yielding the keys in the cache,
+     * in order from least recently used to most recently used.
+     */
+    *rkeys() {
+        for (const i of this.#rindexes()) {
+            const k = this.#keyList[i];
+            if (k !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield k;
+            }
+        }
+    }
+    /**
+     * Return a generator yielding the values in the cache,
+     * in order from most recently used to least recently used.
+     */
+    *values() {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            if (v !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield this.#valList[i];
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.values}
+     *
+     * Return a generator yielding the values in the cache,
+     * in order from least recently used to most recently used.
+     */
+    *rvalues() {
+        for (const i of this.#rindexes()) {
+            const v = this.#valList[i];
+            if (v !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield this.#valList[i];
+            }
+        }
+    }
+    /**
+     * Iterating over the cache itself yields the same results as
+     * {@link LRUCache.entries}
+     */
+    [Symbol.iterator]() {
+        return this.entries();
+    }
+    /**
+     * A String value that is used in the creation of the default string
+     * description of an object. Called by the built-in method
+     * `Object.prototype.toString`.
+     */
+    [Symbol.toStringTag] = 'LRUCache';
+    /**
+     * Find a value for which the supplied fn method returns a truthy value,
+     * similar to `Array.find()`. fn is called as `fn(value, key, cache)`.
+     */
+    find(fn, getOptions = {}) {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            if (fn(value, this.#keyList[i], this)) {
+                return this.get(this.#keyList[i], getOptions);
+            }
+        }
+    }
+    /**
+     * Call the supplied function on each item in the cache, in order from most
+     * recently used to least recently used.
+     *
+     * `fn` is called as `fn(value, key, cache)`.
+     *
+     * If `thisp` is provided, function will be called in the `this`-context of
+     * the provided object, or the cache if no `thisp` object is provided.
+     *
+     * Does not update age or recenty of use, or iterate over stale values.
+     */
+    forEach(fn, thisp = this) {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            fn.call(thisp, value, this.#keyList[i], this);
+        }
+    }
+    /**
+     * The same as {@link LRUCache.forEach} but items are iterated over in
+     * reverse order.  (ie, less recently used items are iterated over first.)
+     */
+    rforEach(fn, thisp = this) {
+        for (const i of this.#rindexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            fn.call(thisp, value, this.#keyList[i], this);
+        }
+    }
+    /**
+     * Delete any stale entries. Returns true if anything was removed,
+     * false otherwise.
+     */
+    purgeStale() {
+        let deleted = false;
+        for (const i of this.#rindexes({ allowStale: true })) {
+            if (this.#isStale(i)) {
+                this.#delete(this.#keyList[i], 'expire');
+                deleted = true;
+            }
+        }
+        return deleted;
+    }
+    /**
+     * Get the extended info about a given entry, to get its value, size, and
+     * TTL info simultaneously. Returns `undefined` if the key is not present.
+     *
+     * Unlike {@link LRUCache#dump}, which is designed to be portable and survive
+     * serialization, the `start` value is always the current timestamp, and the
+     * `ttl` is a calculated remaining time to live (negative if expired).
+     *
+     * Always returns stale values, if their info is found in the cache, so be
+     * sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl})
+     * if relevant.
+     */
+    info(key) {
+        const i = this.#keyMap.get(key);
+        if (i === undefined)
+            return undefined;
+        const v = this.#valList[i];
+        /* c8 ignore start - this isn't tested for the info function,
+         * but it's the same logic as found in other places. */
+        const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+        if (value === undefined)
+            return undefined;
+        /* c8 ignore end */
+        const entry = { value };
+        if (this.#ttls && this.#starts) {
+            const ttl = this.#ttls[i];
+            const start = this.#starts[i];
+            if (ttl && start) {
+                const remain = ttl - (this.#perf.now() - start);
+                entry.ttl = remain;
+                entry.start = Date.now();
+            }
+        }
+        if (this.#sizes) {
+            entry.size = this.#sizes[i];
+        }
+        return entry;
+    }
+    /**
+     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
+     * passed to {@link LRUCache#load}.
+     *
+     * The `start` fields are calculated relative to a portable `Date.now()`
+     * timestamp, even if `performance.now()` is available.
+     *
+     * Stale entries are always included in the `dump`, even if
+     * {@link LRUCache.OptionsBase.allowStale} is false.
+     *
+     * Note: this returns an actual array, not a generator, so it can be more
+     * easily passed around.
+     */
+    dump() {
+        const arr = [];
+        for (const i of this.#indexes({ allowStale: true })) {
+            const key = this.#keyList[i];
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined || key === undefined)
+                continue;
+            const entry = { value };
+            if (this.#ttls && this.#starts) {
+                entry.ttl = this.#ttls[i];
+                // always dump the start relative to a portable timestamp
+                // it's ok for this to be a bit slow, it's a rare operation.
+                const age = this.#perf.now() - this.#starts[i];
+                entry.start = Math.floor(Date.now() - age);
+            }
+            if (this.#sizes) {
+                entry.size = this.#sizes[i];
+            }
+            arr.unshift([key, entry]);
+        }
+        return arr;
+    }
+    /**
+     * Reset the cache and load in the items in entries in the order listed.
+     *
+     * The shape of the resulting cache may be different if the same options are
+     * not used in both caches.
+     *
+     * The `start` fields are assumed to be calculated relative to a portable
+     * `Date.now()` timestamp, even if `performance.now()` is available.
+     */
+    load(arr) {
+        this.clear();
+        for (const [key, entry] of arr) {
+            if (entry.start) {
+                // entry.start is a portable timestamp, but we may be using
+                // node's performance.now(), so calculate the offset, so that
+                // we get the intended remaining TTL, no matter how long it's
+                // been on ice.
+                //
+                // it's ok for this to be a bit slow, it's a rare operation.
+                const age = Date.now() - entry.start;
+                entry.start = this.#perf.now() - age;
+            }
+            this.set(key, entry.value, entry);
+        }
+    }
+    /**
+     * Add a value to the cache.
+     *
+     * Note: if `undefined` is specified as a value, this is an alias for
+     * {@link LRUCache#delete}
+     *
+     * Fields on the {@link LRUCache.SetOptions} options param will override
+     * their corresponding values in the constructor options for the scope
+     * of this single `set()` operation.
+     *
+     * If `start` is provided, then that will set the effective start
+     * time for the TTL calculation. Note that this must be a previous
+     * value of `performance.now()` if supported, or a previous value of
+     * `Date.now()` if not.
+     *
+     * Options object may also include `size`, which will prevent
+     * calling the `sizeCalculation` function and just use the specified
+     * number if it is a positive integer, and `noDisposeOnSet` which
+     * will prevent calling a `dispose` function in the case of
+     * overwrites.
+     *
+     * If the `size` (or return value of `sizeCalculation`) for a given
+     * entry is greater than `maxEntrySize`, then the item will not be
+     * added to the cache.
+     *
+     * Will update the recency of the entry.
+     *
+     * If the value is `undefined`, then this is an alias for
+     * `cache.delete(key)`. `undefined` is never stored in the cache.
+     */
+    set(k, v, setOptions = {}) {
+        if (v === undefined) {
+            this.delete(k);
+            return this;
+        }
+        const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
+        let { noUpdateTTL = this.noUpdateTTL } = setOptions;
+        const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
+        // if the item doesn't fit, don't do anything
+        // NB: maxEntrySize set to maxSize by default
+        if (this.maxEntrySize && size > this.maxEntrySize) {
+            if (status) {
+                status.set = 'miss';
+                status.maxEntrySizeExceeded = true;
+            }
+            // have to delete, in case something is there already.
+            this.#delete(k, 'set');
+            return this;
+        }
+        let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
+        if (index === undefined) {
+            // addition
+            index = (this.#size === 0 ? this.#tail
+                : this.#free.length !== 0 ? this.#free.pop()
+                    : this.#size === this.#max ? this.#evict(false)
+                        : this.#size);
+            this.#keyList[index] = k;
+            this.#valList[index] = v;
+            this.#keyMap.set(k, index);
+            this.#next[this.#tail] = index;
+            this.#prev[index] = this.#tail;
+            this.#tail = index;
+            this.#size++;
+            this.#addItemSize(index, size, status);
+            if (status)
+                status.set = 'add';
+            noUpdateTTL = false;
+            if (this.#hasOnInsert) {
+                this.#onInsert?.(v, k, 'add');
+            }
+        }
+        else {
+            // update
+            this.#moveToTail(index);
+            const oldVal = this.#valList[index];
+            if (v !== oldVal) {
+                if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
+                    oldVal.__abortController.abort(new Error('replaced'));
+                    const { __staleWhileFetching: s } = oldVal;
+                    if (s !== undefined && !noDisposeOnSet) {
+                        if (this.#hasDispose) {
+                            this.#dispose?.(s, k, 'set');
+                        }
+                        if (this.#hasDisposeAfter) {
+                            this.#disposed?.push([s, k, 'set']);
+                        }
+                    }
+                }
+                else if (!noDisposeOnSet) {
+                    if (this.#hasDispose) {
+                        this.#dispose?.(oldVal, k, 'set');
+                    }
+                    if (this.#hasDisposeAfter) {
+                        this.#disposed?.push([oldVal, k, 'set']);
+                    }
+                }
+                this.#removeItemSize(index);
+                this.#addItemSize(index, size, status);
+                this.#valList[index] = v;
+                if (status) {
+                    status.set = 'replace';
+                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal) ?
+                        oldVal.__staleWhileFetching
+                        : oldVal;
+                    if (oldValue !== undefined)
+                        status.oldValue = oldValue;
+                }
+            }
+            else if (status) {
+                status.set = 'update';
+            }
+            if (this.#hasOnInsert) {
+                this.onInsert?.(v, k, v === oldVal ? 'update' : 'replace');
+            }
+        }
+        if (ttl !== 0 && !this.#ttls) {
+            this.#initializeTTLTracking();
+        }
+        if (this.#ttls) {
+            if (!noUpdateTTL) {
+                this.#setItemTTL(index, ttl, start);
+            }
+            if (status)
+                this.#statusTTL(status, index);
+        }
+        if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+        return this;
+    }
+    /**
+     * Evict the least recently used item, returning its value or
+     * `undefined` if cache is empty.
+     */
+    pop() {
+        try {
+            while (this.#size) {
+                const val = this.#valList[this.#head];
+                this.#evict(true);
+                if (this.#isBackgroundFetch(val)) {
+                    if (val.__staleWhileFetching) {
+                        return val.__staleWhileFetching;
+                    }
+                }
+                else if (val !== undefined) {
+                    return val;
+                }
+            }
+        }
+        finally {
+            if (this.#hasDisposeAfter && this.#disposed) {
+                const dt = this.#disposed;
+                let task;
+                while ((task = dt?.shift())) {
+                    this.#disposeAfter?.(...task);
+                }
+            }
+        }
+    }
+    #evict(free) {
+        const head = this.#head;
+        const k = this.#keyList[head];
+        const v = this.#valList[head];
+        if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
+            v.__abortController.abort(new Error('evicted'));
+        }
+        else if (this.#hasDispose || this.#hasDisposeAfter) {
+            if (this.#hasDispose) {
+                this.#dispose?.(v, k, 'evict');
+            }
+            if (this.#hasDisposeAfter) {
+                this.#disposed?.push([v, k, 'evict']);
+            }
+        }
+        this.#removeItemSize(head);
+        // if we aren't about to use the index, then null these out
+        if (free) {
+            this.#keyList[head] = undefined;
+            this.#valList[head] = undefined;
+            this.#free.push(head);
+        }
+        if (this.#size === 1) {
+            this.#head = this.#tail = 0;
+            this.#free.length = 0;
+        }
+        else {
+            this.#head = this.#next[head];
+        }
+        this.#keyMap.delete(k);
+        this.#size--;
+        return head;
+    }
+    /**
+     * Check if a key is in the cache, without updating the recency of use.
+     * Will return false if the item is stale, even though it is technically
+     * in the cache.
+     *
+     * Check if a key is in the cache, without updating the recency of
+     * use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set
+     * to `true` in either the options or the constructor.
+     *
+     * Will return `false` if the item is stale, even though it is technically in
+     * the cache. The difference can be determined (if it matters) by using a
+     * `status` argument, and inspecting the `has` field.
+     *
+     * Will not update item age unless
+     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
+     */
+    has(k, hasOptions = {}) {
+        const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
+        const index = this.#keyMap.get(k);
+        if (index !== undefined) {
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v) &&
+                v.__staleWhileFetching === undefined) {
+                return false;
+            }
+            if (!this.#isStale(index)) {
+                if (updateAgeOnHas) {
+                    this.#updateItemAge(index);
+                }
+                if (status) {
+                    status.has = 'hit';
+                    this.#statusTTL(status, index);
+                }
+                return true;
+            }
+            else if (status) {
+                status.has = 'stale';
+                this.#statusTTL(status, index);
+            }
+        }
+        else if (status) {
+            status.has = 'miss';
+        }
+        return false;
+    }
+    /**
+     * Like {@link LRUCache#get} but doesn't update recency or delete stale
+     * items.
+     *
+     * Returns `undefined` if the item is stale, unless
+     * {@link LRUCache.OptionsBase.allowStale} is set.
+     */
+    peek(k, peekOptions = {}) {
+        const { allowStale = this.allowStale } = peekOptions;
+        const index = this.#keyMap.get(k);
+        if (index === undefined ||
+            (!allowStale && this.#isStale(index))) {
+            return;
+        }
+        const v = this.#valList[index];
+        // either stale and allowed, or forcing a refresh of non-stale value
+        return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+    }
+    #backgroundFetch(k, index, options, context) {
+        const v = index === undefined ? undefined : this.#valList[index];
+        if (this.#isBackgroundFetch(v)) {
+            return v;
+        }
+        const ac = new AC();
+        const { signal } = options;
+        // when/if our AC signals, then stop listening to theirs.
+        signal?.addEventListener('abort', () => ac.abort(signal.reason), {
+            signal: ac.signal,
+        });
+        const fetchOpts = {
+            signal: ac.signal,
+            options,
+            context,
+        };
+        const cb = (v, updateCache = false) => {
+            const { aborted } = ac.signal;
+            const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
+            if (options.status) {
+                if (aborted && !updateCache) {
+                    options.status.fetchAborted = true;
+                    options.status.fetchError = ac.signal.reason;
+                    if (ignoreAbort)
+                        options.status.fetchAbortIgnored = true;
+                }
+                else {
+                    options.status.fetchResolved = true;
+                }
+            }
+            if (aborted && !ignoreAbort && !updateCache) {
+                return fetchFail(ac.signal.reason);
+            }
+            // either we didn't abort, and are still here, or we did, and ignored
+            const bf = p;
+            if (this.#valList[index] === p) {
+                if (v === undefined) {
+                    if (bf.__staleWhileFetching !== undefined) {
+                        this.#valList[index] = bf.__staleWhileFetching;
+                    }
+                    else {
+                        this.#delete(k, 'fetch');
+                    }
+                }
+                else {
+                    if (options.status)
+                        options.status.fetchUpdated = true;
+                    this.set(k, v, fetchOpts.options);
+                }
+            }
+            return v;
+        };
+        const eb = (er) => {
+            if (options.status) {
+                options.status.fetchRejected = true;
+                options.status.fetchError = er;
+            }
+            return fetchFail(er);
+        };
+        const fetchFail = (er) => {
+            const { aborted } = ac.signal;
+            const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
+            const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
+            const noDelete = allowStale || options.noDeleteOnFetchRejection;
+            const bf = p;
+            if (this.#valList[index] === p) {
+                // if we allow stale on fetch rejections, then we need to ensure that
+                // the stale value is not removed from the cache when the fetch fails.
+                const del = !noDelete || bf.__staleWhileFetching === undefined;
+                if (del) {
+                    this.#delete(k, 'fetch');
+                }
+                else if (!allowStaleAborted) {
+                    // still replace the *promise* with the stale value,
+                    // since we are done with the promise at this point.
+                    // leave it untouched if we're still waiting for an
+                    // aborted background fetch that hasn't yet returned.
+                    this.#valList[index] = bf.__staleWhileFetching;
+                }
+            }
+            if (allowStale) {
+                if (options.status && bf.__staleWhileFetching !== undefined) {
+                    options.status.returnedStale = true;
+                }
+                return bf.__staleWhileFetching;
+            }
+            else if (bf.__returned === bf) {
+                throw er;
+            }
+        };
+        const pcall = (res, rej) => {
+            const fmp = this.#fetchMethod?.(k, v, fetchOpts);
+            if (fmp && fmp instanceof Promise) {
+                fmp.then(v => res(v === undefined ? undefined : v), rej);
+            }
+            // ignored, we go until we finish, regardless.
+            // defer check until we are actually aborting,
+            // so fetchMethod can override.
+            ac.signal.addEventListener('abort', () => {
+                if (!options.ignoreFetchAbort ||
+                    options.allowStaleOnFetchAbort) {
+                    res(undefined);
+                    // when it eventually resolves, update the cache.
+                    if (options.allowStaleOnFetchAbort) {
+                        res = v => cb(v, true);
+                    }
+                }
+            });
+        };
+        if (options.status)
+            options.status.fetchDispatched = true;
+        const p = new Promise(pcall).then(cb, eb);
+        const bf = Object.assign(p, {
+            __abortController: ac,
+            __staleWhileFetching: v,
+            __returned: undefined,
+        });
+        if (index === undefined) {
+            // internal, don't expose status.
+            this.set(k, bf, { ...fetchOpts.options, status: undefined });
+            index = this.#keyMap.get(k);
+        }
+        else {
+            this.#valList[index] = bf;
+        }
+        return bf;
+    }
+    #isBackgroundFetch(p) {
+        if (!this.#hasFetchMethod)
+            return false;
+        const b = p;
+        return (!!b &&
+            b instanceof Promise &&
+            b.hasOwnProperty('__staleWhileFetching') &&
+            b.__abortController instanceof AC);
+    }
+    async fetch(k, fetchOptions = {}) {
+        const { 
+        // get options
+        allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, 
+        // set options
+        ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, 
+        // fetch exclusive options
+        noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
+        if (!this.#hasFetchMethod) {
+            if (status)
+                status.fetch = 'get';
+            return this.get(k, {
+                allowStale,
+                updateAgeOnGet,
+                noDeleteOnStaleGet,
+                status,
+            });
+        }
+        const options = {
+            allowStale,
+            updateAgeOnGet,
+            noDeleteOnStaleGet,
+            ttl,
+            noDisposeOnSet,
+            size,
+            sizeCalculation,
+            noUpdateTTL,
+            noDeleteOnFetchRejection,
+            allowStaleOnFetchRejection,
+            allowStaleOnFetchAbort,
+            ignoreFetchAbort,
+            status,
+            signal,
+        };
+        let index = this.#keyMap.get(k);
+        if (index === undefined) {
+            if (status)
+                status.fetch = 'miss';
+            const p = this.#backgroundFetch(k, index, options, context);
+            return (p.__returned = p);
+        }
+        else {
+            // in cache, maybe already fetching
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v)) {
+                const stale = allowStale && v.__staleWhileFetching !== undefined;
+                if (status) {
+                    status.fetch = 'inflight';
+                    if (stale)
+                        status.returnedStale = true;
+                }
+                return stale ? v.__staleWhileFetching : (v.__returned = v);
+            }
+            // if we force a refresh, that means do NOT serve the cached value,
+            // unless we are already in the process of refreshing the cache.
+            const isStale = this.#isStale(index);
+            if (!forceRefresh && !isStale) {
+                if (status)
+                    status.fetch = 'hit';
+                this.#moveToTail(index);
+                if (updateAgeOnGet) {
+                    this.#updateItemAge(index);
+                }
+                if (status)
+                    this.#statusTTL(status, index);
+                return v;
+            }
+            // ok, it is stale or a forced refresh, and not already fetching.
+            // refresh the cache.
+            const p = this.#backgroundFetch(k, index, options, context);
+            const hasStale = p.__staleWhileFetching !== undefined;
+            const staleVal = hasStale && allowStale;
+            if (status) {
+                status.fetch = isStale ? 'stale' : 'refresh';
+                if (staleVal && isStale)
+                    status.returnedStale = true;
+            }
+            return staleVal ? p.__staleWhileFetching : (p.__returned = p);
+        }
+    }
+    async forceFetch(k, fetchOptions = {}) {
+        const v = await this.fetch(k, fetchOptions);
+        if (v === undefined)
+            throw new Error('fetch() returned undefined');
+        return v;
+    }
+    memo(k, memoOptions = {}) {
+        const memoMethod = this.#memoMethod;
+        if (!memoMethod) {
+            throw new Error('no memoMethod provided to constructor');
+        }
+        const { context, forceRefresh, ...options } = memoOptions;
+        const v = this.get(k, options);
+        if (!forceRefresh && v !== undefined)
+            return v;
+        const vv = memoMethod(k, v, {
+            options,
+            context,
+        });
+        this.set(k, vv, options);
+        return vv;
+    }
+    /**
+     * Return a value from the cache. Will update the recency of the cache
+     * entry found.
+     *
+     * If the key is not found, get() will return `undefined`.
+     */
+    get(k, getOptions = {}) {
+        const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
+        const index = this.#keyMap.get(k);
+        if (index !== undefined) {
+            const value = this.#valList[index];
+            const fetching = this.#isBackgroundFetch(value);
+            if (status)
+                this.#statusTTL(status, index);
+            if (this.#isStale(index)) {
+                if (status)
+                    status.get = 'stale';
+                // delete only if not an in-flight background fetch
+                if (!fetching) {
+                    if (!noDeleteOnStaleGet) {
+                        this.#delete(k, 'expire');
+                    }
+                    if (status && allowStale)
+                        status.returnedStale = true;
+                    return allowStale ? value : undefined;
+                }
+                else {
+                    if (status &&
+                        allowStale &&
+                        value.__staleWhileFetching !== undefined) {
+                        status.returnedStale = true;
+                    }
+                    return allowStale ? value.__staleWhileFetching : undefined;
+                }
+            }
+            else {
+                if (status)
+                    status.get = 'hit';
+                // if we're currently fetching it, we don't actually have it yet
+                // it's not stale, which means this isn't a staleWhileRefetching.
+                // If it's not stale, and fetching, AND has a __staleWhileFetching
+                // value, then that means the user fetched with {forceRefresh:true},
+                // so it's safe to return that value.
+                if (fetching) {
+                    return value.__staleWhileFetching;
+                }
+                this.#moveToTail(index);
+                if (updateAgeOnGet) {
+                    this.#updateItemAge(index);
+                }
+                return value;
+            }
+        }
+        else if (status) {
+            status.get = 'miss';
+        }
+    }
+    #connect(p, n) {
+        this.#prev[n] = p;
+        this.#next[p] = n;
+    }
+    #moveToTail(index) {
+        // if tail already, nothing to do
+        // if head, move head to next[index]
+        // else
+        //   move next[prev[index]] to next[index] (head has no prev)
+        //   move prev[next[index]] to prev[index]
+        // prev[index] = tail
+        // next[tail] = index
+        // tail = index
+        if (index !== this.#tail) {
+            if (index === this.#head) {
+                this.#head = this.#next[index];
+            }
+            else {
+                this.#connect(this.#prev[index], this.#next[index]);
+            }
+            this.#connect(this.#tail, index);
+            this.#tail = index;
+        }
+    }
+    /**
+     * Deletes a key out of the cache.
+     *
+     * Returns true if the key was deleted, false otherwise.
+     */
+    delete(k) {
+        return this.#delete(k, 'delete');
+    }
+    #delete(k, reason) {
+        let deleted = false;
+        if (this.#size !== 0) {
+            const index = this.#keyMap.get(k);
+            if (index !== undefined) {
+                deleted = true;
+                if (this.#size === 1) {
+                    this.#clear(reason);
+                }
+                else {
+                    this.#removeItemSize(index);
+                    const v = this.#valList[index];
+                    if (this.#isBackgroundFetch(v)) {
+                        v.__abortController.abort(new Error('deleted'));
+                    }
+                    else if (this.#hasDispose || this.#hasDisposeAfter) {
+                        if (this.#hasDispose) {
+                            this.#dispose?.(v, k, reason);
+                        }
+                        if (this.#hasDisposeAfter) {
+                            this.#disposed?.push([v, k, reason]);
+                        }
+                    }
+                    this.#keyMap.delete(k);
+                    this.#keyList[index] = undefined;
+                    this.#valList[index] = undefined;
+                    if (index === this.#tail) {
+                        this.#tail = this.#prev[index];
+                    }
+                    else if (index === this.#head) {
+                        this.#head = this.#next[index];
+                    }
+                    else {
+                        const pi = this.#prev[index];
+                        this.#next[pi] = this.#next[index];
+                        const ni = this.#next[index];
+                        this.#prev[ni] = this.#prev[index];
+                    }
+                    this.#size--;
+                    this.#free.push(index);
+                }
+            }
+        }
+        if (this.#hasDisposeAfter && this.#disposed?.length) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+        return deleted;
+    }
+    /**
+     * Clear the cache entirely, throwing away all values.
+     */
+    clear() {
+        return this.#clear('delete');
+    }
+    #clear(reason) {
+        for (const index of this.#rindexes({ allowStale: true })) {
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v)) {
+                v.__abortController.abort(new Error('deleted'));
+            }
+            else {
+                const k = this.#keyList[index];
+                if (this.#hasDispose) {
+                    this.#dispose?.(v, k, reason);
+                }
+                if (this.#hasDisposeAfter) {
+                    this.#disposed?.push([v, k, reason]);
+                }
+            }
+        }
+        this.#keyMap.clear();
+        this.#valList.fill(undefined);
+        this.#keyList.fill(undefined);
+        if (this.#ttls && this.#starts) {
+            this.#ttls.fill(0);
+            this.#starts.fill(0);
+        }
+        if (this.#sizes) {
+            this.#sizes.fill(0);
+        }
+        this.#head = 0;
+        this.#tail = 0;
+        this.#free.length = 0;
+        this.#calculatedSize = 0;
+        this.#size = 0;
+        if (this.#hasDisposeAfter && this.#disposed) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+    }
+}
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/esm/index.min.js b/node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/esm/index.min.js
new file mode 100644
index 0000000000000..07dd8fc3c59d8
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/esm/index.min.js
@@ -0,0 +1,2 @@
+var M=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,x=new Set,R=typeof process=="object"&&process?process:{},I=(a,t,e,i)=>{typeof R.emitWarning=="function"?R.emitWarning(a,t,e,i):console.error(`[${e}] ${t}: ${a}`)},C=globalThis.AbortController,D=globalThis.AbortSignal;if(typeof C>"u"){D=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},C=class{constructor(){t()}signal=new D;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let a=R.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{a&&(a=!1,I("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var G=a=>!x.has(a),H=Symbol("type"),y=a=>a&&a===Math.floor(a)&&a>0&&isFinite(a),U=a=>y(a)?a<=Math.pow(2,8)?Uint8Array:a<=Math.pow(2,16)?Uint16Array:a<=Math.pow(2,32)?Uint32Array:a<=Number.MAX_SAFE_INTEGER?O:null:null,O=class extends Array{constructor(t){super(t),this.fill(0)}},W=class a{heap;length;static#l=!1;static create(t){let e=U(t);if(!e)return[];a.#l=!0;let i=new a(t,e);return a.#l=!1,i}constructor(t,e){if(!a.#l)throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},L=class a{#l;#c;#p;#v;#w;#D;#L;#S;get perf(){return this.#S}ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#_;#s;#i;#t;#a;#u;#o;#h;#m;#r;#b;#y;#d;#A;#z;#f;#x;static unsafeExposeInternals(t){return{starts:t.#y,ttls:t.#d,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#a,prev:t.#u,get head(){return t.#o},get tail(){return t.#h},free:t.#m,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#M(e,i,s,n),moveToTail:e=>t.#W(e),indexes:e=>t.#F(e),rindexes:e=>t.#T(e),isStale:e=>t.#g(e)}}get max(){return this.#l}get maxSize(){return this.#c}get calculatedSize(){return this.#_}get size(){return this.#n}get fetchMethod(){return this.#D}get memoMethod(){return this.#L}get dispose(){return this.#p}get onInsert(){return this.#v}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,onInsert:_,disposeAfter:f,noDisposeOnSet:c,noUpdateTTL:u,maxSize:A=0,maxEntrySize:d=0,sizeCalculation:m,fetchMethod:l,memoMethod:w,noDeleteOnFetchRejection:b,noDeleteOnStaleGet:p,allowStaleOnFetchRejection:S,allowStaleOnFetchAbort:z,ignoreFetchAbort:F,perf:v}=t;if(v!==void 0&&typeof v?.now!="function")throw new TypeError("perf option must have a now() method if specified");if(this.#S=v??M,e!==0&&!y(e))throw new TypeError("max option must be a nonnegative integer");let T=e?U(e):Array;if(!T)throw new Error("invalid max value: "+e);if(this.#l=e,this.#c=A,this.maxEntrySize=d||this.#c,this.sizeCalculation=m,this.sizeCalculation){if(!this.#c&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#L=w,l!==void 0&&typeof l!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#D=l,this.#z=!!l,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#a=new T(e),this.#u=new T(e),this.#o=0,this.#h=0,this.#m=W.create(e),this.#n=0,this.#_=0,typeof g=="function"&&(this.#p=g),typeof _=="function"&&(this.#v=_),typeof f=="function"?(this.#w=f,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#A=!!this.#p,this.#x=!!this.#v,this.#f=!!this.#w,this.noDisposeOnSet=!!c,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!b,this.allowStaleOnFetchRejection=!!S,this.allowStaleOnFetchAbort=!!z,this.ignoreFetchAbort=!!F,this.maxEntrySize!==0){if(this.#c!==0&&!y(this.#c))throw new TypeError("maxSize must be a positive integer if specified");if(!y(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#V()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!p,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=y(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!y(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#G()}if(this.#l===0&&this.ttl===0&&this.#c===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#l&&!this.#c){let E="LRU_CACHE_UNBOUNDED";G(E)&&(x.add(E),I("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",E,a))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#G(){let t=new O(this.#l),e=new O(this.#l);this.#d=t,this.#y=e,this.#j=(n,h,o=this.#S.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#g(n)&&this.#E(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#C=n=>{e[n]=t[n]!==0?this.#S.now():0},this.#O=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=this.#S.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#g=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#C=()=>{};#O=()=>{};#j=()=>{};#g=()=>!1;#V(){let t=new O(this.#l);this.#_=0,this.#b=t,this.#R=e=>{this.#_-=t[e],t[e]=0},this.#N=(e,i,s,n)=>{if(this.#e(i))return 0;if(!y(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!y(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#I=(e,i,s)=>{if(t[e]=i,this.#c){let n=this.#c-t[e];for(;this.#_>n;)this.#U(!0)}this.#_+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#_)}}#R=t=>{};#I=(t,e,i)=>{};#N=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#o));)e=this.#u[e]}*#T({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#h));)e=this.#a[e]}#P(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#T())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#T()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#T())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#T()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#T({allowStale:!0}))this.#g(e)&&(this.#E(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#d&&this.#y){let h=this.#d[e],o=this.#y[e];if(h&&o){let r=h-(this.#S.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#F({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#d&&this.#y){h.ttl=this.#d[e];let o=this.#S.now()-this.#y[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=this.#S.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,_=this.#N(t,e,i.size||0,o);if(this.maxEntrySize&&_>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#E(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#m.length!==0?this.#m.pop():this.#n===this.#l?this.#U(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#a[this.#h]=f,this.#u[f]=this.#h,this.#h=f,this.#n++,this.#I(f,_,r),r&&(r.set="add"),g=!1,this.#x&&this.#v?.(e,t,"add");else{this.#W(f);let c=this.#t[f];if(e!==c){if(this.#z&&this.#e(c)){c.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:u}=c;u!==void 0&&!h&&(this.#A&&this.#p?.(u,t,"set"),this.#f&&this.#r?.push([u,t,"set"]))}else h||(this.#A&&this.#p?.(c,t,"set"),this.#f&&this.#r?.push([c,t,"set"]));if(this.#R(f),this.#I(f,_,r),this.#t[f]=e,r){r.set="replace";let u=c&&this.#e(c)?c.__staleWhileFetching:c;u!==void 0&&(r.oldValue=u)}}else r&&(r.set="update");this.#x&&this.onInsert?.(e,t,e===c?"update":"replace")}if(s!==0&&!this.#d&&this.#G(),this.#d&&(g||this.#j(f,s,n),r&&this.#O(r,f)),!h&&this.#f&&this.#r){let c=this.#r,u;for(;u=c?.shift();)this.#w?.(...u)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#U(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#f&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#U(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#z&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(s,i,"evict"),this.#f&&this.#r?.push([s,i,"evict"])),this.#R(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#m.push(e)),this.#n===1?(this.#o=this.#h=0,this.#m.length=0):this.#o=this.#a[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#g(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#C(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#g(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#M(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new C,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,m=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!m?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!m)return f(h.signal.reason);let b=u;return this.#t[e]===u&&(d===void 0?b.__staleWhileFetching!==void 0?this.#t[e]=b.__staleWhileFetching:this.#E(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},_=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:m}=h.signal,l=m&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=u;if(this.#t[e]===u&&(!b||p.__staleWhileFetching===void 0?this.#E(t,"fetch"):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},c=(d,m)=>{let l=this.#D?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),m),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let u=new Promise(c).then(g,_),A=Object.assign(u,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,A,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=A,A}#e(t){if(!this.#z)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof C}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:_=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:c=this.allowStaleOnFetchRejection,ignoreFetchAbort:u=this.ignoreFetchAbort,allowStaleOnFetchAbort:A=this.allowStaleOnFetchAbort,context:d,forceRefresh:m=!1,status:l,signal:w}=e;if(!this.#z)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:_,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:c,allowStaleOnFetchAbort:A,ignoreFetchAbort:u,status:l,signal:w},p=this.#s.get(t);if(p===void 0){l&&(l.fetch="miss");let S=this.#M(t,p,b,d);return S.__returned=S}else{let S=this.#t[p];if(this.#e(S)){let E=i&&S.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",E&&(l.returnedStale=!0)),E?S.__staleWhileFetching:S.__returned=S}let z=this.#g(p);if(!m&&!z)return l&&(l.fetch="hit"),this.#W(p),s&&this.#C(p),l&&this.#O(l,p),S;let F=this.#M(t,p,b,d),T=F.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=z?"stale":"refresh",T&&z&&(l.returnedStale=!0)),T?F.__staleWhileFetching:F.__returned=F}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#L;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#O(h,o),this.#g(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#E(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#W(o),s&&this.#C(o),r))}else h&&(h.get="miss")}#H(t,e){this.#u[e]=t,this.#a[t]=e}#W(t){t!==this.#h&&(t===this.#o?this.#o=this.#a[t]:this.#H(this.#u[t],this.#a[t]),this.#H(this.#h,t),this.#h=t)}delete(t){return this.#E(t,"delete")}#E(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#k(e);else{this.#R(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(n,t,e),this.#f&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#u[s];else if(s===this.#o)this.#o=this.#a[s];else{let h=this.#u[s];this.#a[h]=this.#a[s];let o=this.#a[s];this.#u[o]=this.#u[s]}this.#n--,this.#m.push(s)}}if(this.#f&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#k("delete")}#k(t){for(let e of this.#T({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#A&&this.#p?.(i,s,t),this.#f&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#d&&this.#y&&(this.#d.fill(0),this.#y.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#m.length=0,this.#_=0,this.#n=0,this.#f&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};export{L as LRUCache};
+//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/esm/package.json b/node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/esm/package.json
new file mode 100644
index 0000000000000..3dbc1ca591c05
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/esm/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/lru-cache/package.json b/node_modules/@npmcli/map-workspaces/node_modules/lru-cache/package.json
new file mode 100644
index 0000000000000..4953bdf4a7a35
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/lru-cache/package.json
@@ -0,0 +1,113 @@
+{
+  "name": "lru-cache",
+  "description": "A cache object that deletes the least-recently-used items.",
+  "version": "11.2.1",
+  "author": "Isaac Z. Schlueter ",
+  "keywords": [
+    "mru",
+    "lru",
+    "cache"
+  ],
+  "sideEffects": false,
+  "scripts": {
+    "build": "npm run prepare",
+    "prepare": "tshy && bash fixup.sh",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "test": "tap",
+    "snap": "tap",
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "format": "prettier --write .",
+    "typedoc": "typedoc --tsconfig ./.tshy/esm.json ./src/*.ts",
+    "benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh",
+    "prebenchmark": "npm run prepare",
+    "benchmark": "make -C benchmark",
+    "preprofile": "npm run prepare",
+    "profile": "make -C benchmark profile"
+  },
+  "main": "./dist/commonjs/index.js",
+  "types": "./dist/commonjs/index.d.ts",
+  "tshy": {
+    "exports": {
+      ".": "./src/index.ts",
+      "./min": {
+        "import": {
+          "types": "./dist/esm/index.d.ts",
+          "default": "./dist/esm/index.min.js"
+        },
+        "require": {
+          "types": "./dist/commonjs/index.d.ts",
+          "default": "./dist/commonjs/index.min.js"
+        }
+      }
+    }
+  },
+  "repository": {
+    "type": "git",
+    "url": "git://github.com/isaacs/node-lru-cache.git"
+  },
+  "devDependencies": {
+    "@types/node": "^24.3.0",
+    "benchmark": "^2.1.4",
+    "esbuild": "^0.25.9",
+    "marked": "^4.2.12",
+    "mkdirp": "^3.0.1",
+    "prettier": "^3.6.2",
+    "tap": "^21.1.0",
+    "tshy": "^3.0.2",
+    "typedoc": "^0.28.12"
+  },
+  "license": "ISC",
+  "files": [
+    "dist"
+  ],
+  "engines": {
+    "node": "20 || >=22"
+  },
+  "prettier": {
+    "experimentalTernaries": true,
+    "semi": false,
+    "printWidth": 70,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "tap": {
+    "node-arg": [
+      "--expose-gc"
+    ],
+    "plugin": [
+      "@tapjs/clock"
+    ]
+  },
+  "exports": {
+    ".": {
+      "import": {
+        "types": "./dist/esm/index.d.ts",
+        "default": "./dist/esm/index.js"
+      },
+      "require": {
+        "types": "./dist/commonjs/index.d.ts",
+        "default": "./dist/commonjs/index.js"
+      }
+    },
+    "./min": {
+      "import": {
+        "types": "./dist/esm/index.d.ts",
+        "default": "./dist/esm/index.min.js"
+      },
+      "require": {
+        "types": "./dist/commonjs/index.d.ts",
+        "default": "./dist/commonjs/index.min.js"
+      }
+    }
+  },
+  "type": "module",
+  "module": "./dist/esm/index.js"
+}
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/minimatch/LICENSE b/node_modules/@npmcli/map-workspaces/node_modules/minimatch/LICENSE
new file mode 100644
index 0000000000000..1493534e60dce
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/minimatch/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) 2011-2023 Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js b/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
new file mode 100644
index 0000000000000..5fc86bbd0116c
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
@@ -0,0 +1,14 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.assertValidPattern = void 0;
+const MAX_PATTERN_LENGTH = 1024 * 64;
+const assertValidPattern = (pattern) => {
+    if (typeof pattern !== 'string') {
+        throw new TypeError('invalid pattern');
+    }
+    if (pattern.length > MAX_PATTERN_LENGTH) {
+        throw new TypeError('pattern is too long');
+    }
+};
+exports.assertValidPattern = assertValidPattern;
+//# sourceMappingURL=assert-valid-pattern.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/ast.js b/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/ast.js
new file mode 100644
index 0000000000000..7b2109625eaeb
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/ast.js
@@ -0,0 +1,592 @@
+"use strict";
+// parse a single path portion
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.AST = void 0;
+const brace_expressions_js_1 = require("./brace-expressions.js");
+const unescape_js_1 = require("./unescape.js");
+const types = new Set(['!', '?', '+', '*', '@']);
+const isExtglobType = (c) => types.has(c);
+// Patterns that get prepended to bind to the start of either the
+// entire string, or just a single path portion, to prevent dots
+// and/or traversal patterns, when needed.
+// Exts don't need the ^ or / bit, because the root binds that already.
+const startNoTraversal = '(?!(?:^|/)\\.\\.?(?:$|/))';
+const startNoDot = '(?!\\.)';
+// characters that indicate a start of pattern needs the "no dots" bit,
+// because a dot *might* be matched. ( is not in the list, because in
+// the case of a child extglob, it will handle the prevention itself.
+const addPatternStart = new Set(['[', '.']);
+// cases where traversal is A-OK, no dot prevention needed
+const justDots = new Set(['..', '.']);
+const reSpecials = new Set('().*{}+?[]^$\\!');
+const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+// any single thing other than /
+const qmark = '[^/]';
+// * => any number of characters
+const star = qmark + '*?';
+// use + when we need to ensure that *something* matches, because the * is
+// the only thing in the path portion.
+const starNoEmpty = qmark + '+?';
+// remove the \ chars that we added if we end up doing a nonmagic compare
+// const deslash = (s: string) => s.replace(/\\(.)/g, '$1')
+class AST {
+    type;
+    #root;
+    #hasMagic;
+    #uflag = false;
+    #parts = [];
+    #parent;
+    #parentIndex;
+    #negs;
+    #filledNegs = false;
+    #options;
+    #toString;
+    // set to true if it's an extglob with no children
+    // (which really means one child of '')
+    #emptyExt = false;
+    constructor(type, parent, options = {}) {
+        this.type = type;
+        // extglobs are inherently magical
+        if (type)
+            this.#hasMagic = true;
+        this.#parent = parent;
+        this.#root = this.#parent ? this.#parent.#root : this;
+        this.#options = this.#root === this ? options : this.#root.#options;
+        this.#negs = this.#root === this ? [] : this.#root.#negs;
+        if (type === '!' && !this.#root.#filledNegs)
+            this.#negs.push(this);
+        this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0;
+    }
+    get hasMagic() {
+        /* c8 ignore start */
+        if (this.#hasMagic !== undefined)
+            return this.#hasMagic;
+        /* c8 ignore stop */
+        for (const p of this.#parts) {
+            if (typeof p === 'string')
+                continue;
+            if (p.type || p.hasMagic)
+                return (this.#hasMagic = true);
+        }
+        // note: will be undefined until we generate the regexp src and find out
+        return this.#hasMagic;
+    }
+    // reconstructs the pattern
+    toString() {
+        if (this.#toString !== undefined)
+            return this.#toString;
+        if (!this.type) {
+            return (this.#toString = this.#parts.map(p => String(p)).join(''));
+        }
+        else {
+            return (this.#toString =
+                this.type + '(' + this.#parts.map(p => String(p)).join('|') + ')');
+        }
+    }
+    #fillNegs() {
+        /* c8 ignore start */
+        if (this !== this.#root)
+            throw new Error('should only call on root');
+        if (this.#filledNegs)
+            return this;
+        /* c8 ignore stop */
+        // call toString() once to fill this out
+        this.toString();
+        this.#filledNegs = true;
+        let n;
+        while ((n = this.#negs.pop())) {
+            if (n.type !== '!')
+                continue;
+            // walk up the tree, appending everthing that comes AFTER parentIndex
+            let p = n;
+            let pp = p.#parent;
+            while (pp) {
+                for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) {
+                    for (const part of n.#parts) {
+                        /* c8 ignore start */
+                        if (typeof part === 'string') {
+                            throw new Error('string part in extglob AST??');
+                        }
+                        /* c8 ignore stop */
+                        part.copyIn(pp.#parts[i]);
+                    }
+                }
+                p = pp;
+                pp = p.#parent;
+            }
+        }
+        return this;
+    }
+    push(...parts) {
+        for (const p of parts) {
+            if (p === '')
+                continue;
+            /* c8 ignore start */
+            if (typeof p !== 'string' && !(p instanceof AST && p.#parent === this)) {
+                throw new Error('invalid part: ' + p);
+            }
+            /* c8 ignore stop */
+            this.#parts.push(p);
+        }
+    }
+    toJSON() {
+        const ret = this.type === null
+            ? this.#parts.slice().map(p => (typeof p === 'string' ? p : p.toJSON()))
+            : [this.type, ...this.#parts.map(p => p.toJSON())];
+        if (this.isStart() && !this.type)
+            ret.unshift([]);
+        if (this.isEnd() &&
+            (this === this.#root ||
+                (this.#root.#filledNegs && this.#parent?.type === '!'))) {
+            ret.push({});
+        }
+        return ret;
+    }
+    isStart() {
+        if (this.#root === this)
+            return true;
+        // if (this.type) return !!this.#parent?.isStart()
+        if (!this.#parent?.isStart())
+            return false;
+        if (this.#parentIndex === 0)
+            return true;
+        // if everything AHEAD of this is a negation, then it's still the "start"
+        const p = this.#parent;
+        for (let i = 0; i < this.#parentIndex; i++) {
+            const pp = p.#parts[i];
+            if (!(pp instanceof AST && pp.type === '!')) {
+                return false;
+            }
+        }
+        return true;
+    }
+    isEnd() {
+        if (this.#root === this)
+            return true;
+        if (this.#parent?.type === '!')
+            return true;
+        if (!this.#parent?.isEnd())
+            return false;
+        if (!this.type)
+            return this.#parent?.isEnd();
+        // if not root, it'll always have a parent
+        /* c8 ignore start */
+        const pl = this.#parent ? this.#parent.#parts.length : 0;
+        /* c8 ignore stop */
+        return this.#parentIndex === pl - 1;
+    }
+    copyIn(part) {
+        if (typeof part === 'string')
+            this.push(part);
+        else
+            this.push(part.clone(this));
+    }
+    clone(parent) {
+        const c = new AST(this.type, parent);
+        for (const p of this.#parts) {
+            c.copyIn(p);
+        }
+        return c;
+    }
+    static #parseAST(str, ast, pos, opt) {
+        let escaping = false;
+        let inBrace = false;
+        let braceStart = -1;
+        let braceNeg = false;
+        if (ast.type === null) {
+            // outside of a extglob, append until we find a start
+            let i = pos;
+            let acc = '';
+            while (i < str.length) {
+                const c = str.charAt(i++);
+                // still accumulate escapes at this point, but we do ignore
+                // starts that are escaped
+                if (escaping || c === '\\') {
+                    escaping = !escaping;
+                    acc += c;
+                    continue;
+                }
+                if (inBrace) {
+                    if (i === braceStart + 1) {
+                        if (c === '^' || c === '!') {
+                            braceNeg = true;
+                        }
+                    }
+                    else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
+                        inBrace = false;
+                    }
+                    acc += c;
+                    continue;
+                }
+                else if (c === '[') {
+                    inBrace = true;
+                    braceStart = i;
+                    braceNeg = false;
+                    acc += c;
+                    continue;
+                }
+                if (!opt.noext && isExtglobType(c) && str.charAt(i) === '(') {
+                    ast.push(acc);
+                    acc = '';
+                    const ext = new AST(c, ast);
+                    i = AST.#parseAST(str, ext, i, opt);
+                    ast.push(ext);
+                    continue;
+                }
+                acc += c;
+            }
+            ast.push(acc);
+            return i;
+        }
+        // some kind of extglob, pos is at the (
+        // find the next | or )
+        let i = pos + 1;
+        let part = new AST(null, ast);
+        const parts = [];
+        let acc = '';
+        while (i < str.length) {
+            const c = str.charAt(i++);
+            // still accumulate escapes at this point, but we do ignore
+            // starts that are escaped
+            if (escaping || c === '\\') {
+                escaping = !escaping;
+                acc += c;
+                continue;
+            }
+            if (inBrace) {
+                if (i === braceStart + 1) {
+                    if (c === '^' || c === '!') {
+                        braceNeg = true;
+                    }
+                }
+                else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
+                    inBrace = false;
+                }
+                acc += c;
+                continue;
+            }
+            else if (c === '[') {
+                inBrace = true;
+                braceStart = i;
+                braceNeg = false;
+                acc += c;
+                continue;
+            }
+            if (isExtglobType(c) && str.charAt(i) === '(') {
+                part.push(acc);
+                acc = '';
+                const ext = new AST(c, part);
+                part.push(ext);
+                i = AST.#parseAST(str, ext, i, opt);
+                continue;
+            }
+            if (c === '|') {
+                part.push(acc);
+                acc = '';
+                parts.push(part);
+                part = new AST(null, ast);
+                continue;
+            }
+            if (c === ')') {
+                if (acc === '' && ast.#parts.length === 0) {
+                    ast.#emptyExt = true;
+                }
+                part.push(acc);
+                acc = '';
+                ast.push(...parts, part);
+                return i;
+            }
+            acc += c;
+        }
+        // unfinished extglob
+        // if we got here, it was a malformed extglob! not an extglob, but
+        // maybe something else in there.
+        ast.type = null;
+        ast.#hasMagic = undefined;
+        ast.#parts = [str.substring(pos - 1)];
+        return i;
+    }
+    static fromGlob(pattern, options = {}) {
+        const ast = new AST(null, undefined, options);
+        AST.#parseAST(pattern, ast, 0, options);
+        return ast;
+    }
+    // returns the regular expression if there's magic, or the unescaped
+    // string if not.
+    toMMPattern() {
+        // should only be called on root
+        /* c8 ignore start */
+        if (this !== this.#root)
+            return this.#root.toMMPattern();
+        /* c8 ignore stop */
+        const glob = this.toString();
+        const [re, body, hasMagic, uflag] = this.toRegExpSource();
+        // if we're in nocase mode, and not nocaseMagicOnly, then we do
+        // still need a regular expression if we have to case-insensitively
+        // match capital/lowercase characters.
+        const anyMagic = hasMagic ||
+            this.#hasMagic ||
+            (this.#options.nocase &&
+                !this.#options.nocaseMagicOnly &&
+                glob.toUpperCase() !== glob.toLowerCase());
+        if (!anyMagic) {
+            return body;
+        }
+        const flags = (this.#options.nocase ? 'i' : '') + (uflag ? 'u' : '');
+        return Object.assign(new RegExp(`^${re}$`, flags), {
+            _src: re,
+            _glob: glob,
+        });
+    }
+    get options() {
+        return this.#options;
+    }
+    // returns the string match, the regexp source, whether there's magic
+    // in the regexp (so a regular expression is required) and whether or
+    // not the uflag is needed for the regular expression (for posix classes)
+    // TODO: instead of injecting the start/end at this point, just return
+    // the BODY of the regexp, along with the start/end portions suitable
+    // for binding the start/end in either a joined full-path makeRe context
+    // (where we bind to (^|/), or a standalone matchPart context (where
+    // we bind to ^, and not /).  Otherwise slashes get duped!
+    //
+    // In part-matching mode, the start is:
+    // - if not isStart: nothing
+    // - if traversal possible, but not allowed: ^(?!\.\.?$)
+    // - if dots allowed or not possible: ^
+    // - if dots possible and not allowed: ^(?!\.)
+    // end is:
+    // - if not isEnd(): nothing
+    // - else: $
+    //
+    // In full-path matching mode, we put the slash at the START of the
+    // pattern, so start is:
+    // - if first pattern: same as part-matching mode
+    // - if not isStart(): nothing
+    // - if traversal possible, but not allowed: /(?!\.\.?(?:$|/))
+    // - if dots allowed or not possible: /
+    // - if dots possible and not allowed: /(?!\.)
+    // end is:
+    // - if last pattern, same as part-matching mode
+    // - else nothing
+    //
+    // Always put the (?:$|/) on negated tails, though, because that has to be
+    // there to bind the end of the negated pattern portion, and it's easier to
+    // just stick it in now rather than try to inject it later in the middle of
+    // the pattern.
+    //
+    // We can just always return the same end, and leave it up to the caller
+    // to know whether it's going to be used joined or in parts.
+    // And, if the start is adjusted slightly, can do the same there:
+    // - if not isStart: nothing
+    // - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$)
+    // - if dots allowed or not possible: (?:/|^)
+    // - if dots possible and not allowed: (?:/|^)(?!\.)
+    //
+    // But it's better to have a simpler binding without a conditional, for
+    // performance, so probably better to return both start options.
+    //
+    // Then the caller just ignores the end if it's not the first pattern,
+    // and the start always gets applied.
+    //
+    // But that's always going to be $ if it's the ending pattern, or nothing,
+    // so the caller can just attach $ at the end of the pattern when building.
+    //
+    // So the todo is:
+    // - better detect what kind of start is needed
+    // - return both flavors of starting pattern
+    // - attach $ at the end of the pattern when creating the actual RegExp
+    //
+    // Ah, but wait, no, that all only applies to the root when the first pattern
+    // is not an extglob. If the first pattern IS an extglob, then we need all
+    // that dot prevention biz to live in the extglob portions, because eg
+    // +(*|.x*) can match .xy but not .yx.
+    //
+    // So, return the two flavors if it's #root and the first child is not an
+    // AST, otherwise leave it to the child AST to handle it, and there,
+    // use the (?:^|/) style of start binding.
+    //
+    // Even simplified further:
+    // - Since the start for a join is eg /(?!\.) and the start for a part
+    // is ^(?!\.), we can just prepend (?!\.) to the pattern (either root
+    // or start or whatever) and prepend ^ or / at the Regexp construction.
+    toRegExpSource(allowDot) {
+        const dot = allowDot ?? !!this.#options.dot;
+        if (this.#root === this)
+            this.#fillNegs();
+        if (!this.type) {
+            const noEmpty = this.isStart() && this.isEnd();
+            const src = this.#parts
+                .map(p => {
+                const [re, _, hasMagic, uflag] = typeof p === 'string'
+                    ? AST.#parseGlob(p, this.#hasMagic, noEmpty)
+                    : p.toRegExpSource(allowDot);
+                this.#hasMagic = this.#hasMagic || hasMagic;
+                this.#uflag = this.#uflag || uflag;
+                return re;
+            })
+                .join('');
+            let start = '';
+            if (this.isStart()) {
+                if (typeof this.#parts[0] === 'string') {
+                    // this is the string that will match the start of the pattern,
+                    // so we need to protect against dots and such.
+                    // '.' and '..' cannot match unless the pattern is that exactly,
+                    // even if it starts with . or dot:true is set.
+                    const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]);
+                    if (!dotTravAllowed) {
+                        const aps = addPatternStart;
+                        // check if we have a possibility of matching . or ..,
+                        // and prevent that.
+                        const needNoTrav = 
+                        // dots are allowed, and the pattern starts with [ or .
+                        (dot && aps.has(src.charAt(0))) ||
+                            // the pattern starts with \., and then [ or .
+                            (src.startsWith('\\.') && aps.has(src.charAt(2))) ||
+                            // the pattern starts with \.\., and then [ or .
+                            (src.startsWith('\\.\\.') && aps.has(src.charAt(4)));
+                        // no need to prevent dots if it can't match a dot, or if a
+                        // sub-pattern will be preventing it anyway.
+                        const needNoDot = !dot && !allowDot && aps.has(src.charAt(0));
+                        start = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : '';
+                    }
+                }
+            }
+            // append the "end of path portion" pattern to negation tails
+            let end = '';
+            if (this.isEnd() &&
+                this.#root.#filledNegs &&
+                this.#parent?.type === '!') {
+                end = '(?:$|\\/)';
+            }
+            const final = start + src + end;
+            return [
+                final,
+                (0, unescape_js_1.unescape)(src),
+                (this.#hasMagic = !!this.#hasMagic),
+                this.#uflag,
+            ];
+        }
+        // We need to calculate the body *twice* if it's a repeat pattern
+        // at the start, once in nodot mode, then again in dot mode, so a
+        // pattern like *(?) can match 'x.y'
+        const repeated = this.type === '*' || this.type === '+';
+        // some kind of extglob
+        const start = this.type === '!' ? '(?:(?!(?:' : '(?:';
+        let body = this.#partsToRegExp(dot);
+        if (this.isStart() && this.isEnd() && !body && this.type !== '!') {
+            // invalid extglob, has to at least be *something* present, if it's
+            // the entire path portion.
+            const s = this.toString();
+            this.#parts = [s];
+            this.type = null;
+            this.#hasMagic = undefined;
+            return [s, (0, unescape_js_1.unescape)(this.toString()), false, false];
+        }
+        // XXX abstract out this map method
+        let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot
+            ? ''
+            : this.#partsToRegExp(true);
+        if (bodyDotAllowed === body) {
+            bodyDotAllowed = '';
+        }
+        if (bodyDotAllowed) {
+            body = `(?:${body})(?:${bodyDotAllowed})*?`;
+        }
+        // an empty !() is exactly equivalent to a starNoEmpty
+        let final = '';
+        if (this.type === '!' && this.#emptyExt) {
+            final = (this.isStart() && !dot ? startNoDot : '') + starNoEmpty;
+        }
+        else {
+            const close = this.type === '!'
+                ? // !() must match something,but !(x) can match ''
+                    '))' +
+                        (this.isStart() && !dot && !allowDot ? startNoDot : '') +
+                        star +
+                        ')'
+                : this.type === '@'
+                    ? ')'
+                    : this.type === '?'
+                        ? ')?'
+                        : this.type === '+' && bodyDotAllowed
+                            ? ')'
+                            : this.type === '*' && bodyDotAllowed
+                                ? `)?`
+                                : `)${this.type}`;
+            final = start + body + close;
+        }
+        return [
+            final,
+            (0, unescape_js_1.unescape)(body),
+            (this.#hasMagic = !!this.#hasMagic),
+            this.#uflag,
+        ];
+    }
+    #partsToRegExp(dot) {
+        return this.#parts
+            .map(p => {
+            // extglob ASTs should only contain parent ASTs
+            /* c8 ignore start */
+            if (typeof p === 'string') {
+                throw new Error('string type in extglob ast??');
+            }
+            /* c8 ignore stop */
+            // can ignore hasMagic, because extglobs are already always magic
+            const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot);
+            this.#uflag = this.#uflag || uflag;
+            return re;
+        })
+            .filter(p => !(this.isStart() && this.isEnd()) || !!p)
+            .join('|');
+    }
+    static #parseGlob(glob, hasMagic, noEmpty = false) {
+        let escaping = false;
+        let re = '';
+        let uflag = false;
+        for (let i = 0; i < glob.length; i++) {
+            const c = glob.charAt(i);
+            if (escaping) {
+                escaping = false;
+                re += (reSpecials.has(c) ? '\\' : '') + c;
+                continue;
+            }
+            if (c === '\\') {
+                if (i === glob.length - 1) {
+                    re += '\\\\';
+                }
+                else {
+                    escaping = true;
+                }
+                continue;
+            }
+            if (c === '[') {
+                const [src, needUflag, consumed, magic] = (0, brace_expressions_js_1.parseClass)(glob, i);
+                if (consumed) {
+                    re += src;
+                    uflag = uflag || needUflag;
+                    i += consumed - 1;
+                    hasMagic = hasMagic || magic;
+                    continue;
+                }
+            }
+            if (c === '*') {
+                if (noEmpty && glob === '*')
+                    re += starNoEmpty;
+                else
+                    re += star;
+                hasMagic = true;
+                continue;
+            }
+            if (c === '?') {
+                re += qmark;
+                hasMagic = true;
+                continue;
+            }
+            re += regExpEscape(c);
+        }
+        return [re, (0, unescape_js_1.unescape)(glob), !!hasMagic, uflag];
+    }
+}
+exports.AST = AST;
+//# sourceMappingURL=ast.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/brace-expressions.js b/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/brace-expressions.js
new file mode 100644
index 0000000000000..0e13eefc4cfee
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/brace-expressions.js
@@ -0,0 +1,152 @@
+"use strict";
+// translate the various posix character classes into unicode properties
+// this works across all unicode locales
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.parseClass = void 0;
+// { : [, /u flag required, negated]
+const posixClasses = {
+    '[:alnum:]': ['\\p{L}\\p{Nl}\\p{Nd}', true],
+    '[:alpha:]': ['\\p{L}\\p{Nl}', true],
+    '[:ascii:]': ['\\x' + '00-\\x' + '7f', false],
+    '[:blank:]': ['\\p{Zs}\\t', true],
+    '[:cntrl:]': ['\\p{Cc}', true],
+    '[:digit:]': ['\\p{Nd}', true],
+    '[:graph:]': ['\\p{Z}\\p{C}', true, true],
+    '[:lower:]': ['\\p{Ll}', true],
+    '[:print:]': ['\\p{C}', true],
+    '[:punct:]': ['\\p{P}', true],
+    '[:space:]': ['\\p{Z}\\t\\r\\n\\v\\f', true],
+    '[:upper:]': ['\\p{Lu}', true],
+    '[:word:]': ['\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}', true],
+    '[:xdigit:]': ['A-Fa-f0-9', false],
+};
+// only need to escape a few things inside of brace expressions
+// escapes: [ \ ] -
+const braceEscape = (s) => s.replace(/[[\]\\-]/g, '\\$&');
+// escape all regexp magic characters
+const regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+// everything has already been escaped, we just have to join
+const rangesToString = (ranges) => ranges.join('');
+// takes a glob string at a posix brace expression, and returns
+// an equivalent regular expression source, and boolean indicating
+// whether the /u flag needs to be applied, and the number of chars
+// consumed to parse the character class.
+// This also removes out of order ranges, and returns ($.) if the
+// entire class just no good.
+const parseClass = (glob, position) => {
+    const pos = position;
+    /* c8 ignore start */
+    if (glob.charAt(pos) !== '[') {
+        throw new Error('not in a brace expression');
+    }
+    /* c8 ignore stop */
+    const ranges = [];
+    const negs = [];
+    let i = pos + 1;
+    let sawStart = false;
+    let uflag = false;
+    let escaping = false;
+    let negate = false;
+    let endPos = pos;
+    let rangeStart = '';
+    WHILE: while (i < glob.length) {
+        const c = glob.charAt(i);
+        if ((c === '!' || c === '^') && i === pos + 1) {
+            negate = true;
+            i++;
+            continue;
+        }
+        if (c === ']' && sawStart && !escaping) {
+            endPos = i + 1;
+            break;
+        }
+        sawStart = true;
+        if (c === '\\') {
+            if (!escaping) {
+                escaping = true;
+                i++;
+                continue;
+            }
+            // escaped \ char, fall through and treat like normal char
+        }
+        if (c === '[' && !escaping) {
+            // either a posix class, a collation equivalent, or just a [
+            for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) {
+                if (glob.startsWith(cls, i)) {
+                    // invalid, [a-[] is fine, but not [a-[:alpha]]
+                    if (rangeStart) {
+                        return ['$.', false, glob.length - pos, true];
+                    }
+                    i += cls.length;
+                    if (neg)
+                        negs.push(unip);
+                    else
+                        ranges.push(unip);
+                    uflag = uflag || u;
+                    continue WHILE;
+                }
+            }
+        }
+        // now it's just a normal character, effectively
+        escaping = false;
+        if (rangeStart) {
+            // throw this range away if it's not valid, but others
+            // can still match.
+            if (c > rangeStart) {
+                ranges.push(braceEscape(rangeStart) + '-' + braceEscape(c));
+            }
+            else if (c === rangeStart) {
+                ranges.push(braceEscape(c));
+            }
+            rangeStart = '';
+            i++;
+            continue;
+        }
+        // now might be the start of a range.
+        // can be either c-d or c-] or c] or c] at this point
+        if (glob.startsWith('-]', i + 1)) {
+            ranges.push(braceEscape(c + '-'));
+            i += 2;
+            continue;
+        }
+        if (glob.startsWith('-', i + 1)) {
+            rangeStart = c;
+            i += 2;
+            continue;
+        }
+        // not the start of a range, just a single character
+        ranges.push(braceEscape(c));
+        i++;
+    }
+    if (endPos < i) {
+        // didn't see the end of the class, not a valid class,
+        // but might still be valid as a literal match.
+        return ['', false, 0, false];
+    }
+    // if we got no ranges and no negates, then we have a range that
+    // cannot possibly match anything, and that poisons the whole glob
+    if (!ranges.length && !negs.length) {
+        return ['$.', false, glob.length - pos, true];
+    }
+    // if we got one positive range, and it's a single character, then that's
+    // not actually a magic pattern, it's just that one literal character.
+    // we should not treat that as "magic", we should just return the literal
+    // character. [_] is a perfectly valid way to escape glob magic chars.
+    if (negs.length === 0 &&
+        ranges.length === 1 &&
+        /^\\?.$/.test(ranges[0]) &&
+        !negate) {
+        const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0];
+        return [regexpEscape(r), false, endPos - pos, false];
+    }
+    const sranges = '[' + (negate ? '^' : '') + rangesToString(ranges) + ']';
+    const snegs = '[' + (negate ? '' : '^') + rangesToString(negs) + ']';
+    const comb = ranges.length && negs.length
+        ? '(' + sranges + '|' + snegs + ')'
+        : ranges.length
+            ? sranges
+            : snegs;
+    return [comb, uflag, endPos - pos, true];
+};
+exports.parseClass = parseClass;
+//# sourceMappingURL=brace-expressions.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/escape.js b/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/escape.js
new file mode 100644
index 0000000000000..02a4f8a8e0a58
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/escape.js
@@ -0,0 +1,22 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.escape = void 0;
+/**
+ * Escape all magic characters in a glob pattern.
+ *
+ * If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape}
+ * option is used, then characters are escaped by wrapping in `[]`, because
+ * a magic character wrapped in a character class can only be satisfied by
+ * that exact character.  In this mode, `\` is _not_ escaped, because it is
+ * not interpreted as a magic character, but instead as a path separator.
+ */
+const escape = (s, { windowsPathsNoEscape = false, } = {}) => {
+    // don't need to escape +@! because we escape the parens
+    // that make those magic, and escaping ! as [!] isn't valid,
+    // because [!]] is a valid glob class meaning not ']'.
+    return windowsPathsNoEscape
+        ? s.replace(/[?*()[\]]/g, '[$&]')
+        : s.replace(/[?*()[\]\\]/g, '\\$&');
+};
+exports.escape = escape;
+//# sourceMappingURL=escape.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/index.js b/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/index.js
new file mode 100644
index 0000000000000..f58fb8616aa9a
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/index.js
@@ -0,0 +1,1014 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.unescape = exports.escape = exports.AST = exports.Minimatch = exports.match = exports.makeRe = exports.braceExpand = exports.defaults = exports.filter = exports.GLOBSTAR = exports.sep = exports.minimatch = void 0;
+const brace_expansion_1 = require("@isaacs/brace-expansion");
+const assert_valid_pattern_js_1 = require("./assert-valid-pattern.js");
+const ast_js_1 = require("./ast.js");
+const escape_js_1 = require("./escape.js");
+const unescape_js_1 = require("./unescape.js");
+const minimatch = (p, pattern, options = {}) => {
+    (0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
+    // shortcut: comments match nothing.
+    if (!options.nocomment && pattern.charAt(0) === '#') {
+        return false;
+    }
+    return new Minimatch(pattern, options).match(p);
+};
+exports.minimatch = minimatch;
+// Optimized checking for the most common glob patterns.
+const starDotExtRE = /^\*+([^+@!?\*\[\(]*)$/;
+const starDotExtTest = (ext) => (f) => !f.startsWith('.') && f.endsWith(ext);
+const starDotExtTestDot = (ext) => (f) => f.endsWith(ext);
+const starDotExtTestNocase = (ext) => {
+    ext = ext.toLowerCase();
+    return (f) => !f.startsWith('.') && f.toLowerCase().endsWith(ext);
+};
+const starDotExtTestNocaseDot = (ext) => {
+    ext = ext.toLowerCase();
+    return (f) => f.toLowerCase().endsWith(ext);
+};
+const starDotStarRE = /^\*+\.\*+$/;
+const starDotStarTest = (f) => !f.startsWith('.') && f.includes('.');
+const starDotStarTestDot = (f) => f !== '.' && f !== '..' && f.includes('.');
+const dotStarRE = /^\.\*+$/;
+const dotStarTest = (f) => f !== '.' && f !== '..' && f.startsWith('.');
+const starRE = /^\*+$/;
+const starTest = (f) => f.length !== 0 && !f.startsWith('.');
+const starTestDot = (f) => f.length !== 0 && f !== '.' && f !== '..';
+const qmarksRE = /^\?+([^+@!?\*\[\(]*)?$/;
+const qmarksTestNocase = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExt([$0]);
+    if (!ext)
+        return noext;
+    ext = ext.toLowerCase();
+    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
+};
+const qmarksTestNocaseDot = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExtDot([$0]);
+    if (!ext)
+        return noext;
+    ext = ext.toLowerCase();
+    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
+};
+const qmarksTestDot = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExtDot([$0]);
+    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
+};
+const qmarksTest = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExt([$0]);
+    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
+};
+const qmarksTestNoExt = ([$0]) => {
+    const len = $0.length;
+    return (f) => f.length === len && !f.startsWith('.');
+};
+const qmarksTestNoExtDot = ([$0]) => {
+    const len = $0.length;
+    return (f) => f.length === len && f !== '.' && f !== '..';
+};
+/* c8 ignore start */
+const defaultPlatform = (typeof process === 'object' && process
+    ? (typeof process.env === 'object' &&
+        process.env &&
+        process.env.__MINIMATCH_TESTING_PLATFORM__) ||
+        process.platform
+    : 'posix');
+const path = {
+    win32: { sep: '\\' },
+    posix: { sep: '/' },
+};
+/* c8 ignore stop */
+exports.sep = defaultPlatform === 'win32' ? path.win32.sep : path.posix.sep;
+exports.minimatch.sep = exports.sep;
+exports.GLOBSTAR = Symbol('globstar **');
+exports.minimatch.GLOBSTAR = exports.GLOBSTAR;
+// any single thing other than /
+// don't need to escape / when using new RegExp()
+const qmark = '[^/]';
+// * => any number of characters
+const star = qmark + '*?';
+// ** when dots are allowed.  Anything goes, except .. and .
+// not (^ or / followed by one or two dots followed by $ or /),
+// followed by anything, any number of times.
+const twoStarDot = '(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?';
+// not a ^ or / followed by a dot,
+// followed by anything, any number of times.
+const twoStarNoDot = '(?:(?!(?:\\/|^)\\.).)*?';
+const filter = (pattern, options = {}) => (p) => (0, exports.minimatch)(p, pattern, options);
+exports.filter = filter;
+exports.minimatch.filter = exports.filter;
+const ext = (a, b = {}) => Object.assign({}, a, b);
+const defaults = (def) => {
+    if (!def || typeof def !== 'object' || !Object.keys(def).length) {
+        return exports.minimatch;
+    }
+    const orig = exports.minimatch;
+    const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options));
+    return Object.assign(m, {
+        Minimatch: class Minimatch extends orig.Minimatch {
+            constructor(pattern, options = {}) {
+                super(pattern, ext(def, options));
+            }
+            static defaults(options) {
+                return orig.defaults(ext(def, options)).Minimatch;
+            }
+        },
+        AST: class AST extends orig.AST {
+            /* c8 ignore start */
+            constructor(type, parent, options = {}) {
+                super(type, parent, ext(def, options));
+            }
+            /* c8 ignore stop */
+            static fromGlob(pattern, options = {}) {
+                return orig.AST.fromGlob(pattern, ext(def, options));
+            }
+        },
+        unescape: (s, options = {}) => orig.unescape(s, ext(def, options)),
+        escape: (s, options = {}) => orig.escape(s, ext(def, options)),
+        filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)),
+        defaults: (options) => orig.defaults(ext(def, options)),
+        makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)),
+        braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)),
+        match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)),
+        sep: orig.sep,
+        GLOBSTAR: exports.GLOBSTAR,
+    });
+};
+exports.defaults = defaults;
+exports.minimatch.defaults = exports.defaults;
+// Brace expansion:
+// a{b,c}d -> abd acd
+// a{b,}c -> abc ac
+// a{0..3}d -> a0d a1d a2d a3d
+// a{b,c{d,e}f}g -> abg acdfg acefg
+// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
+//
+// Invalid sets are not expanded.
+// a{2..}b -> a{2..}b
+// a{b}c -> a{b}c
+const braceExpand = (pattern, options = {}) => {
+    (0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
+    // Thanks to Yeting Li  for
+    // improving this regexp to avoid a ReDOS vulnerability.
+    if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
+        // shortcut. no need to expand.
+        return [pattern];
+    }
+    return (0, brace_expansion_1.expand)(pattern);
+};
+exports.braceExpand = braceExpand;
+exports.minimatch.braceExpand = exports.braceExpand;
+// parse a component of the expanded set.
+// At this point, no pattern may contain "/" in it
+// so we're going to return a 2d array, where each entry is the full
+// pattern, split on '/', and then turned into a regular expression.
+// A regexp is made at the end which joins each array with an
+// escaped /, and another full one which joins each regexp with |.
+//
+// Following the lead of Bash 4.1, note that "**" only has special meaning
+// when it is the *only* thing in a path portion.  Otherwise, any series
+// of * is equivalent to a single *.  Globstar behavior is enabled by
+// default, and can be disabled by setting options.noglobstar.
+const makeRe = (pattern, options = {}) => new Minimatch(pattern, options).makeRe();
+exports.makeRe = makeRe;
+exports.minimatch.makeRe = exports.makeRe;
+const match = (list, pattern, options = {}) => {
+    const mm = new Minimatch(pattern, options);
+    list = list.filter(f => mm.match(f));
+    if (mm.options.nonull && !list.length) {
+        list.push(pattern);
+    }
+    return list;
+};
+exports.match = match;
+exports.minimatch.match = exports.match;
+// replace stuff like \* with *
+const globMagic = /[?*]|[+@!]\(.*?\)|\[|\]/;
+const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+class Minimatch {
+    options;
+    set;
+    pattern;
+    windowsPathsNoEscape;
+    nonegate;
+    negate;
+    comment;
+    empty;
+    preserveMultipleSlashes;
+    partial;
+    globSet;
+    globParts;
+    nocase;
+    isWindows;
+    platform;
+    windowsNoMagicRoot;
+    regexp;
+    constructor(pattern, options = {}) {
+        (0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
+        options = options || {};
+        this.options = options;
+        this.pattern = pattern;
+        this.platform = options.platform || defaultPlatform;
+        this.isWindows = this.platform === 'win32';
+        this.windowsPathsNoEscape =
+            !!options.windowsPathsNoEscape || options.allowWindowsEscape === false;
+        if (this.windowsPathsNoEscape) {
+            this.pattern = this.pattern.replace(/\\/g, '/');
+        }
+        this.preserveMultipleSlashes = !!options.preserveMultipleSlashes;
+        this.regexp = null;
+        this.negate = false;
+        this.nonegate = !!options.nonegate;
+        this.comment = false;
+        this.empty = false;
+        this.partial = !!options.partial;
+        this.nocase = !!this.options.nocase;
+        this.windowsNoMagicRoot =
+            options.windowsNoMagicRoot !== undefined
+                ? options.windowsNoMagicRoot
+                : !!(this.isWindows && this.nocase);
+        this.globSet = [];
+        this.globParts = [];
+        this.set = [];
+        // make the set of regexps etc.
+        this.make();
+    }
+    hasMagic() {
+        if (this.options.magicalBraces && this.set.length > 1) {
+            return true;
+        }
+        for (const pattern of this.set) {
+            for (const part of pattern) {
+                if (typeof part !== 'string')
+                    return true;
+            }
+        }
+        return false;
+    }
+    debug(..._) { }
+    make() {
+        const pattern = this.pattern;
+        const options = this.options;
+        // empty patterns and comments match nothing.
+        if (!options.nocomment && pattern.charAt(0) === '#') {
+            this.comment = true;
+            return;
+        }
+        if (!pattern) {
+            this.empty = true;
+            return;
+        }
+        // step 1: figure out negation, etc.
+        this.parseNegate();
+        // step 2: expand braces
+        this.globSet = [...new Set(this.braceExpand())];
+        if (options.debug) {
+            this.debug = (...args) => console.error(...args);
+        }
+        this.debug(this.pattern, this.globSet);
+        // step 3: now we have a set, so turn each one into a series of
+        // path-portion matching patterns.
+        // These will be regexps, except in the case of "**", which is
+        // set to the GLOBSTAR object for globstar behavior,
+        // and will not contain any / characters
+        //
+        // First, we preprocess to make the glob pattern sets a bit simpler
+        // and deduped.  There are some perf-killing patterns that can cause
+        // problems with a glob walk, but we can simplify them down a bit.
+        const rawGlobParts = this.globSet.map(s => this.slashSplit(s));
+        this.globParts = this.preprocess(rawGlobParts);
+        this.debug(this.pattern, this.globParts);
+        // glob --> regexps
+        let set = this.globParts.map((s, _, __) => {
+            if (this.isWindows && this.windowsNoMagicRoot) {
+                // check if it's a drive or unc path.
+                const isUNC = s[0] === '' &&
+                    s[1] === '' &&
+                    (s[2] === '?' || !globMagic.test(s[2])) &&
+                    !globMagic.test(s[3]);
+                const isDrive = /^[a-z]:/i.test(s[0]);
+                if (isUNC) {
+                    return [...s.slice(0, 4), ...s.slice(4).map(ss => this.parse(ss))];
+                }
+                else if (isDrive) {
+                    return [s[0], ...s.slice(1).map(ss => this.parse(ss))];
+                }
+            }
+            return s.map(ss => this.parse(ss));
+        });
+        this.debug(this.pattern, set);
+        // filter out everything that didn't compile properly.
+        this.set = set.filter(s => s.indexOf(false) === -1);
+        // do not treat the ? in UNC paths as magic
+        if (this.isWindows) {
+            for (let i = 0; i < this.set.length; i++) {
+                const p = this.set[i];
+                if (p[0] === '' &&
+                    p[1] === '' &&
+                    this.globParts[i][2] === '?' &&
+                    typeof p[3] === 'string' &&
+                    /^[a-z]:$/i.test(p[3])) {
+                    p[2] = '?';
+                }
+            }
+        }
+        this.debug(this.pattern, this.set);
+    }
+    // various transforms to equivalent pattern sets that are
+    // faster to process in a filesystem walk.  The goal is to
+    // eliminate what we can, and push all ** patterns as far
+    // to the right as possible, even if it increases the number
+    // of patterns that we have to process.
+    preprocess(globParts) {
+        // if we're not in globstar mode, then turn all ** into *
+        if (this.options.noglobstar) {
+            for (let i = 0; i < globParts.length; i++) {
+                for (let j = 0; j < globParts[i].length; j++) {
+                    if (globParts[i][j] === '**') {
+                        globParts[i][j] = '*';
+                    }
+                }
+            }
+        }
+        const { optimizationLevel = 1 } = this.options;
+        if (optimizationLevel >= 2) {
+            // aggressive optimization for the purpose of fs walking
+            globParts = this.firstPhasePreProcess(globParts);
+            globParts = this.secondPhasePreProcess(globParts);
+        }
+        else if (optimizationLevel >= 1) {
+            // just basic optimizations to remove some .. parts
+            globParts = this.levelOneOptimize(globParts);
+        }
+        else {
+            // just collapse multiple ** portions into one
+            globParts = this.adjascentGlobstarOptimize(globParts);
+        }
+        return globParts;
+    }
+    // just get rid of adjascent ** portions
+    adjascentGlobstarOptimize(globParts) {
+        return globParts.map(parts => {
+            let gs = -1;
+            while (-1 !== (gs = parts.indexOf('**', gs + 1))) {
+                let i = gs;
+                while (parts[i + 1] === '**') {
+                    i++;
+                }
+                if (i !== gs) {
+                    parts.splice(gs, i - gs);
+                }
+            }
+            return parts;
+        });
+    }
+    // get rid of adjascent ** and resolve .. portions
+    levelOneOptimize(globParts) {
+        return globParts.map(parts => {
+            parts = parts.reduce((set, part) => {
+                const prev = set[set.length - 1];
+                if (part === '**' && prev === '**') {
+                    return set;
+                }
+                if (part === '..') {
+                    if (prev && prev !== '..' && prev !== '.' && prev !== '**') {
+                        set.pop();
+                        return set;
+                    }
+                }
+                set.push(part);
+                return set;
+            }, []);
+            return parts.length === 0 ? [''] : parts;
+        });
+    }
+    levelTwoFileOptimize(parts) {
+        if (!Array.isArray(parts)) {
+            parts = this.slashSplit(parts);
+        }
+        let didSomething = false;
+        do {
+            didSomething = false;
+            // 
// -> 
/
+            if (!this.preserveMultipleSlashes) {
+                for (let i = 1; i < parts.length - 1; i++) {
+                    const p = parts[i];
+                    // don't squeeze out UNC patterns
+                    if (i === 1 && p === '' && parts[0] === '')
+                        continue;
+                    if (p === '.' || p === '') {
+                        didSomething = true;
+                        parts.splice(i, 1);
+                        i--;
+                    }
+                }
+                if (parts[0] === '.' &&
+                    parts.length === 2 &&
+                    (parts[1] === '.' || parts[1] === '')) {
+                    didSomething = true;
+                    parts.pop();
+                }
+            }
+            // 
/

/../ ->

/
+            let dd = 0;
+            while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                const p = parts[dd - 1];
+                if (p && p !== '.' && p !== '..' && p !== '**') {
+                    didSomething = true;
+                    parts.splice(dd - 1, 2);
+                    dd -= 2;
+                }
+            }
+        } while (didSomething);
+        return parts.length === 0 ? [''] : parts;
+    }
+    // First phase: single-pattern processing
+    // 
 is 1 or more portions
+    //  is 1 or more portions
+    // 

is any portion other than ., .., '', or ** + // is . or '' + // + // **/.. is *brutal* for filesystem walking performance, because + // it effectively resets the recursive walk each time it occurs, + // and ** cannot be reduced out by a .. pattern part like a regexp + // or most strings (other than .., ., and '') can be. + // + //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + //

// -> 
/
+    // 
/

/../ ->

/
+    // **/**/ -> **/
+    //
+    // **/*/ -> */**/ <== not valid because ** doesn't follow
+    // this WOULD be allowed if ** did follow symlinks, or * didn't
+    firstPhasePreProcess(globParts) {
+        let didSomething = false;
+        do {
+            didSomething = false;
+            // 
/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + for (let parts of globParts) { + let gs = -1; + while (-1 !== (gs = parts.indexOf('**', gs + 1))) { + let gss = gs; + while (parts[gss + 1] === '**') { + //

/**/**/ -> 
/**/
+                        gss++;
+                    }
+                    // eg, if gs is 2 and gss is 4, that means we have 3 **
+                    // parts, and can remove 2 of them.
+                    if (gss > gs) {
+                        parts.splice(gs + 1, gss - gs);
+                    }
+                    let next = parts[gs + 1];
+                    const p = parts[gs + 2];
+                    const p2 = parts[gs + 3];
+                    if (next !== '..')
+                        continue;
+                    if (!p ||
+                        p === '.' ||
+                        p === '..' ||
+                        !p2 ||
+                        p2 === '.' ||
+                        p2 === '..') {
+                        continue;
+                    }
+                    didSomething = true;
+                    // edit parts in place, and push the new one
+                    parts.splice(gs, 1);
+                    const other = parts.slice(0);
+                    other[gs] = '**';
+                    globParts.push(other);
+                    gs--;
+                }
+                // 
// -> 
/
+                if (!this.preserveMultipleSlashes) {
+                    for (let i = 1; i < parts.length - 1; i++) {
+                        const p = parts[i];
+                        // don't squeeze out UNC patterns
+                        if (i === 1 && p === '' && parts[0] === '')
+                            continue;
+                        if (p === '.' || p === '') {
+                            didSomething = true;
+                            parts.splice(i, 1);
+                            i--;
+                        }
+                    }
+                    if (parts[0] === '.' &&
+                        parts.length === 2 &&
+                        (parts[1] === '.' || parts[1] === '')) {
+                        didSomething = true;
+                        parts.pop();
+                    }
+                }
+                // 
/

/../ ->

/
+                let dd = 0;
+                while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                    const p = parts[dd - 1];
+                    if (p && p !== '.' && p !== '..' && p !== '**') {
+                        didSomething = true;
+                        const needDot = dd === 1 && parts[dd + 1] === '**';
+                        const splin = needDot ? ['.'] : [];
+                        parts.splice(dd - 1, 2, ...splin);
+                        if (parts.length === 0)
+                            parts.push('');
+                        dd -= 2;
+                    }
+                }
+            }
+        } while (didSomething);
+        return globParts;
+    }
+    // second phase: multi-pattern dedupes
+    // {
/*/,
/

/} ->

/*/
+    // {
/,
/} -> 
/
+    // {
/**/,
/} -> 
/**/
+    //
+    // {
/**/,
/**/

/} ->

/**/
+    // ^-- not valid because ** doens't follow symlinks
+    secondPhasePreProcess(globParts) {
+        for (let i = 0; i < globParts.length - 1; i++) {
+            for (let j = i + 1; j < globParts.length; j++) {
+                const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);
+                if (matched) {
+                    globParts[i] = [];
+                    globParts[j] = matched;
+                    break;
+                }
+            }
+        }
+        return globParts.filter(gs => gs.length);
+    }
+    partsMatch(a, b, emptyGSMatch = false) {
+        let ai = 0;
+        let bi = 0;
+        let result = [];
+        let which = '';
+        while (ai < a.length && bi < b.length) {
+            if (a[ai] === b[bi]) {
+                result.push(which === 'b' ? b[bi] : a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (emptyGSMatch && a[ai] === '**' && b[bi] === a[ai + 1]) {
+                result.push(a[ai]);
+                ai++;
+            }
+            else if (emptyGSMatch && b[bi] === '**' && a[ai] === b[bi + 1]) {
+                result.push(b[bi]);
+                bi++;
+            }
+            else if (a[ai] === '*' &&
+                b[bi] &&
+                (this.options.dot || !b[bi].startsWith('.')) &&
+                b[bi] !== '**') {
+                if (which === 'b')
+                    return false;
+                which = 'a';
+                result.push(a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (b[bi] === '*' &&
+                a[ai] &&
+                (this.options.dot || !a[ai].startsWith('.')) &&
+                a[ai] !== '**') {
+                if (which === 'a')
+                    return false;
+                which = 'b';
+                result.push(b[bi]);
+                ai++;
+                bi++;
+            }
+            else {
+                return false;
+            }
+        }
+        // if we fall out of the loop, it means they two are identical
+        // as long as their lengths match
+        return a.length === b.length && result;
+    }
+    parseNegate() {
+        if (this.nonegate)
+            return;
+        const pattern = this.pattern;
+        let negate = false;
+        let negateOffset = 0;
+        for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {
+            negate = !negate;
+            negateOffset++;
+        }
+        if (negateOffset)
+            this.pattern = pattern.slice(negateOffset);
+        this.negate = negate;
+    }
+    // set partial to true to test if, for example,
+    // "/a/b" matches the start of "/*/b/*/d"
+    // Partial means, if you run out of file before you run
+    // out of pattern, then that's fine, as long as all
+    // the parts match.
+    matchOne(file, pattern, partial = false) {
+        const options = this.options;
+        // UNC paths like //?/X:/... can match X:/... and vice versa
+        // Drive letters in absolute drive or unc paths are always compared
+        // case-insensitively.
+        if (this.isWindows) {
+            const fileDrive = typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0]);
+            const fileUNC = !fileDrive &&
+                file[0] === '' &&
+                file[1] === '' &&
+                file[2] === '?' &&
+                /^[a-z]:$/i.test(file[3]);
+            const patternDrive = typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0]);
+            const patternUNC = !patternDrive &&
+                pattern[0] === '' &&
+                pattern[1] === '' &&
+                pattern[2] === '?' &&
+                typeof pattern[3] === 'string' &&
+                /^[a-z]:$/i.test(pattern[3]);
+            const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined;
+            const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined;
+            if (typeof fdi === 'number' && typeof pdi === 'number') {
+                const [fd, pd] = [file[fdi], pattern[pdi]];
+                if (fd.toLowerCase() === pd.toLowerCase()) {
+                    pattern[pdi] = fd;
+                    if (pdi > fdi) {
+                        pattern = pattern.slice(pdi);
+                    }
+                    else if (fdi > pdi) {
+                        file = file.slice(fdi);
+                    }
+                }
+            }
+        }
+        // resolve and reduce . and .. portions in the file as well.
+        // dont' need to do the second phase, because it's only one string[]
+        const { optimizationLevel = 1 } = this.options;
+        if (optimizationLevel >= 2) {
+            file = this.levelTwoFileOptimize(file);
+        }
+        this.debug('matchOne', this, { file, pattern });
+        this.debug('matchOne', file.length, pattern.length);
+        for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
+            this.debug('matchOne loop');
+            var p = pattern[pi];
+            var f = file[fi];
+            this.debug(pattern, p, f);
+            // should be impossible.
+            // some invalid regexp stuff in the set.
+            /* c8 ignore start */
+            if (p === false) {
+                return false;
+            }
+            /* c8 ignore stop */
+            if (p === exports.GLOBSTAR) {
+                this.debug('GLOBSTAR', [pattern, p, f]);
+                // "**"
+                // a/**/b/**/c would match the following:
+                // a/b/x/y/z/c
+                // a/x/y/z/b/c
+                // a/b/x/b/x/c
+                // a/b/c
+                // To do this, take the rest of the pattern after
+                // the **, and see if it would match the file remainder.
+                // If so, return success.
+                // If not, the ** "swallows" a segment, and try again.
+                // This is recursively awful.
+                //
+                // a/**/b/**/c matching a/b/x/y/z/c
+                // - a matches a
+                // - doublestar
+                //   - matchOne(b/x/y/z/c, b/**/c)
+                //     - b matches b
+                //     - doublestar
+                //       - matchOne(x/y/z/c, c) -> no
+                //       - matchOne(y/z/c, c) -> no
+                //       - matchOne(z/c, c) -> no
+                //       - matchOne(c, c) yes, hit
+                var fr = fi;
+                var pr = pi + 1;
+                if (pr === pl) {
+                    this.debug('** at the end');
+                    // a ** at the end will just swallow the rest.
+                    // We have found a match.
+                    // however, it will not swallow /.x, unless
+                    // options.dot is set.
+                    // . and .. are *never* matched by **, for explosively
+                    // exponential reasons.
+                    for (; fi < fl; fi++) {
+                        if (file[fi] === '.' ||
+                            file[fi] === '..' ||
+                            (!options.dot && file[fi].charAt(0) === '.'))
+                            return false;
+                    }
+                    return true;
+                }
+                // ok, let's see if we can swallow whatever we can.
+                while (fr < fl) {
+                    var swallowee = file[fr];
+                    this.debug('\nglobstar while', file, fr, pattern, pr, swallowee);
+                    // XXX remove this slice.  Just pass the start index.
+                    if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
+                        this.debug('globstar found match!', fr, fl, swallowee);
+                        // found a match.
+                        return true;
+                    }
+                    else {
+                        // can't swallow "." or ".." ever.
+                        // can only swallow ".foo" when explicitly asked.
+                        if (swallowee === '.' ||
+                            swallowee === '..' ||
+                            (!options.dot && swallowee.charAt(0) === '.')) {
+                            this.debug('dot detected!', file, fr, pattern, pr);
+                            break;
+                        }
+                        // ** swallows a segment, and continue.
+                        this.debug('globstar swallow a segment, and continue');
+                        fr++;
+                    }
+                }
+                // no match was found.
+                // However, in partial mode, we can't say this is necessarily over.
+                /* c8 ignore start */
+                if (partial) {
+                    // ran out of file
+                    this.debug('\n>>> no match, partial?', file, fr, pattern, pr);
+                    if (fr === fl) {
+                        return true;
+                    }
+                }
+                /* c8 ignore stop */
+                return false;
+            }
+            // something other than **
+            // non-magic patterns just have to match exactly
+            // patterns with magic have been turned into regexps.
+            let hit;
+            if (typeof p === 'string') {
+                hit = f === p;
+                this.debug('string match', p, f, hit);
+            }
+            else {
+                hit = p.test(f);
+                this.debug('pattern match', p, f, hit);
+            }
+            if (!hit)
+                return false;
+        }
+        // Note: ending in / means that we'll get a final ""
+        // at the end of the pattern.  This can only match a
+        // corresponding "" at the end of the file.
+        // If the file ends in /, then it can only match a
+        // a pattern that ends in /, unless the pattern just
+        // doesn't have any more for it. But, a/b/ should *not*
+        // match "a/b/*", even though "" matches against the
+        // [^/]*? pattern, except in partial mode, where it might
+        // simply not be reached yet.
+        // However, a/b/ should still satisfy a/*
+        // now either we fell off the end of the pattern, or we're done.
+        if (fi === fl && pi === pl) {
+            // ran out of pattern and filename at the same time.
+            // an exact hit!
+            return true;
+        }
+        else if (fi === fl) {
+            // ran out of file, but still had pattern left.
+            // this is ok if we're doing the match as part of
+            // a glob fs traversal.
+            return partial;
+        }
+        else if (pi === pl) {
+            // ran out of pattern, still have file left.
+            // this is only acceptable if we're on the very last
+            // empty segment of a file with a trailing slash.
+            // a/* should match a/b/
+            return fi === fl - 1 && file[fi] === '';
+            /* c8 ignore start */
+        }
+        else {
+            // should be unreachable.
+            throw new Error('wtf?');
+        }
+        /* c8 ignore stop */
+    }
+    braceExpand() {
+        return (0, exports.braceExpand)(this.pattern, this.options);
+    }
+    parse(pattern) {
+        (0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
+        const options = this.options;
+        // shortcuts
+        if (pattern === '**')
+            return exports.GLOBSTAR;
+        if (pattern === '')
+            return '';
+        // far and away, the most common glob pattern parts are
+        // *, *.*, and *.  Add a fast check method for those.
+        let m;
+        let fastTest = null;
+        if ((m = pattern.match(starRE))) {
+            fastTest = options.dot ? starTestDot : starTest;
+        }
+        else if ((m = pattern.match(starDotExtRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? starDotExtTestNocaseDot
+                    : starDotExtTestNocase
+                : options.dot
+                    ? starDotExtTestDot
+                    : starDotExtTest)(m[1]);
+        }
+        else if ((m = pattern.match(qmarksRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? qmarksTestNocaseDot
+                    : qmarksTestNocase
+                : options.dot
+                    ? qmarksTestDot
+                    : qmarksTest)(m);
+        }
+        else if ((m = pattern.match(starDotStarRE))) {
+            fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
+        }
+        else if ((m = pattern.match(dotStarRE))) {
+            fastTest = dotStarTest;
+        }
+        const re = ast_js_1.AST.fromGlob(pattern, this.options).toMMPattern();
+        if (fastTest && typeof re === 'object') {
+            // Avoids overriding in frozen environments
+            Reflect.defineProperty(re, 'test', { value: fastTest });
+        }
+        return re;
+    }
+    makeRe() {
+        if (this.regexp || this.regexp === false)
+            return this.regexp;
+        // at this point, this.set is a 2d array of partial
+        // pattern strings, or "**".
+        //
+        // It's better to use .match().  This function shouldn't
+        // be used, really, but it's pretty convenient sometimes,
+        // when you just want to work with a regex.
+        const set = this.set;
+        if (!set.length) {
+            this.regexp = false;
+            return this.regexp;
+        }
+        const options = this.options;
+        const twoStar = options.noglobstar
+            ? star
+            : options.dot
+                ? twoStarDot
+                : twoStarNoDot;
+        const flags = new Set(options.nocase ? ['i'] : []);
+        // regexpify non-globstar patterns
+        // if ** is only item, then we just do one twoStar
+        // if ** is first, and there are more, prepend (\/|twoStar\/)? to next
+        // if ** is last, append (\/twoStar|) to previous
+        // if ** is in the middle, append (\/|\/twoStar\/) to previous
+        // then filter out GLOBSTAR symbols
+        let re = set
+            .map(pattern => {
+            const pp = pattern.map(p => {
+                if (p instanceof RegExp) {
+                    for (const f of p.flags.split(''))
+                        flags.add(f);
+                }
+                return typeof p === 'string'
+                    ? regExpEscape(p)
+                    : p === exports.GLOBSTAR
+                        ? exports.GLOBSTAR
+                        : p._src;
+            });
+            pp.forEach((p, i) => {
+                const next = pp[i + 1];
+                const prev = pp[i - 1];
+                if (p !== exports.GLOBSTAR || prev === exports.GLOBSTAR) {
+                    return;
+                }
+                if (prev === undefined) {
+                    if (next !== undefined && next !== exports.GLOBSTAR) {
+                        pp[i + 1] = '(?:\\/|' + twoStar + '\\/)?' + next;
+                    }
+                    else {
+                        pp[i] = twoStar;
+                    }
+                }
+                else if (next === undefined) {
+                    pp[i - 1] = prev + '(?:\\/|' + twoStar + ')?';
+                }
+                else if (next !== exports.GLOBSTAR) {
+                    pp[i - 1] = prev + '(?:\\/|\\/' + twoStar + '\\/)' + next;
+                    pp[i + 1] = exports.GLOBSTAR;
+                }
+            });
+            return pp.filter(p => p !== exports.GLOBSTAR).join('/');
+        })
+            .join('|');
+        // need to wrap in parens if we had more than one thing with |,
+        // otherwise only the first will be anchored to ^ and the last to $
+        const [open, close] = set.length > 1 ? ['(?:', ')'] : ['', ''];
+        // must match entire pattern
+        // ending in a * or ** will make it less strict.
+        re = '^' + open + re + close + '$';
+        // can match anything, as long as it's not this.
+        if (this.negate)
+            re = '^(?!' + re + ').+$';
+        try {
+            this.regexp = new RegExp(re, [...flags].join(''));
+            /* c8 ignore start */
+        }
+        catch (ex) {
+            // should be impossible
+            this.regexp = false;
+        }
+        /* c8 ignore stop */
+        return this.regexp;
+    }
+    slashSplit(p) {
+        // if p starts with // on windows, we preserve that
+        // so that UNC paths aren't broken.  Otherwise, any number of
+        // / characters are coalesced into one, unless
+        // preserveMultipleSlashes is set to true.
+        if (this.preserveMultipleSlashes) {
+            return p.split('/');
+        }
+        else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
+            // add an extra '' for the one we lose
+            return ['', ...p.split(/\/+/)];
+        }
+        else {
+            return p.split(/\/+/);
+        }
+    }
+    match(f, partial = this.partial) {
+        this.debug('match', f, this.pattern);
+        // short-circuit in the case of busted things.
+        // comments, etc.
+        if (this.comment) {
+            return false;
+        }
+        if (this.empty) {
+            return f === '';
+        }
+        if (f === '/' && partial) {
+            return true;
+        }
+        const options = this.options;
+        // windows: need to use /, not \
+        if (this.isWindows) {
+            f = f.split('\\').join('/');
+        }
+        // treat the test path as a set of pathparts.
+        const ff = this.slashSplit(f);
+        this.debug(this.pattern, 'split', ff);
+        // just ONE of the pattern sets in this.set needs to match
+        // in order for it to be valid.  If negating, then just one
+        // match means that we have failed.
+        // Either way, return on the first hit.
+        const set = this.set;
+        this.debug(this.pattern, 'set', set);
+        // Find the basename of the path by looking for the last non-empty segment
+        let filename = ff[ff.length - 1];
+        if (!filename) {
+            for (let i = ff.length - 2; !filename && i >= 0; i--) {
+                filename = ff[i];
+            }
+        }
+        for (let i = 0; i < set.length; i++) {
+            const pattern = set[i];
+            let file = ff;
+            if (options.matchBase && pattern.length === 1) {
+                file = [filename];
+            }
+            const hit = this.matchOne(file, pattern, partial);
+            if (hit) {
+                if (options.flipNegate) {
+                    return true;
+                }
+                return !this.negate;
+            }
+        }
+        // didn't get any hits.  this is success if it's a negative
+        // pattern, failure otherwise.
+        if (options.flipNegate) {
+            return false;
+        }
+        return this.negate;
+    }
+    static defaults(def) {
+        return exports.minimatch.defaults(def).Minimatch;
+    }
+}
+exports.Minimatch = Minimatch;
+/* c8 ignore start */
+var ast_js_2 = require("./ast.js");
+Object.defineProperty(exports, "AST", { enumerable: true, get: function () { return ast_js_2.AST; } });
+var escape_js_2 = require("./escape.js");
+Object.defineProperty(exports, "escape", { enumerable: true, get: function () { return escape_js_2.escape; } });
+var unescape_js_2 = require("./unescape.js");
+Object.defineProperty(exports, "unescape", { enumerable: true, get: function () { return unescape_js_2.unescape; } });
+/* c8 ignore stop */
+exports.minimatch.AST = ast_js_1.AST;
+exports.minimatch.Minimatch = Minimatch;
+exports.minimatch.escape = escape_js_1.escape;
+exports.minimatch.unescape = unescape_js_1.unescape;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/package.json b/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/package.json
new file mode 100644
index 0000000000000..5bbefffbabee3
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/unescape.js b/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/unescape.js
new file mode 100644
index 0000000000000..47c36bcee5a02
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/unescape.js
@@ -0,0 +1,24 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.unescape = void 0;
+/**
+ * Un-escape a string that has been escaped with {@link escape}.
+ *
+ * If the {@link windowsPathsNoEscape} option is used, then square-brace
+ * escapes are removed, but not backslash escapes.  For example, it will turn
+ * the string `'[*]'` into `*`, but it will not turn `'\\*'` into `'*'`,
+ * becuase `\` is a path separator in `windowsPathsNoEscape` mode.
+ *
+ * When `windowsPathsNoEscape` is not set, then both brace escapes and
+ * backslash escapes are removed.
+ *
+ * Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot be escaped
+ * or unescaped.
+ */
+const unescape = (s, { windowsPathsNoEscape = false, } = {}) => {
+    return windowsPathsNoEscape
+        ? s.replace(/\[([^\/\\])\]/g, '$1')
+        : s.replace(/((?!\\).|^)\[([^\/\\])\]/g, '$1$2').replace(/\\([^\/])/g, '$1');
+};
+exports.unescape = unescape;
+//# sourceMappingURL=unescape.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/assert-valid-pattern.js b/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/assert-valid-pattern.js
new file mode 100644
index 0000000000000..7b534fc30200b
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/assert-valid-pattern.js
@@ -0,0 +1,10 @@
+const MAX_PATTERN_LENGTH = 1024 * 64;
+export const assertValidPattern = (pattern) => {
+    if (typeof pattern !== 'string') {
+        throw new TypeError('invalid pattern');
+    }
+    if (pattern.length > MAX_PATTERN_LENGTH) {
+        throw new TypeError('pattern is too long');
+    }
+};
+//# sourceMappingURL=assert-valid-pattern.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/ast.js b/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/ast.js
new file mode 100644
index 0000000000000..2d2bced6533de
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/ast.js
@@ -0,0 +1,588 @@
+// parse a single path portion
+import { parseClass } from './brace-expressions.js';
+import { unescape } from './unescape.js';
+const types = new Set(['!', '?', '+', '*', '@']);
+const isExtglobType = (c) => types.has(c);
+// Patterns that get prepended to bind to the start of either the
+// entire string, or just a single path portion, to prevent dots
+// and/or traversal patterns, when needed.
+// Exts don't need the ^ or / bit, because the root binds that already.
+const startNoTraversal = '(?!(?:^|/)\\.\\.?(?:$|/))';
+const startNoDot = '(?!\\.)';
+// characters that indicate a start of pattern needs the "no dots" bit,
+// because a dot *might* be matched. ( is not in the list, because in
+// the case of a child extglob, it will handle the prevention itself.
+const addPatternStart = new Set(['[', '.']);
+// cases where traversal is A-OK, no dot prevention needed
+const justDots = new Set(['..', '.']);
+const reSpecials = new Set('().*{}+?[]^$\\!');
+const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+// any single thing other than /
+const qmark = '[^/]';
+// * => any number of characters
+const star = qmark + '*?';
+// use + when we need to ensure that *something* matches, because the * is
+// the only thing in the path portion.
+const starNoEmpty = qmark + '+?';
+// remove the \ chars that we added if we end up doing a nonmagic compare
+// const deslash = (s: string) => s.replace(/\\(.)/g, '$1')
+export class AST {
+    type;
+    #root;
+    #hasMagic;
+    #uflag = false;
+    #parts = [];
+    #parent;
+    #parentIndex;
+    #negs;
+    #filledNegs = false;
+    #options;
+    #toString;
+    // set to true if it's an extglob with no children
+    // (which really means one child of '')
+    #emptyExt = false;
+    constructor(type, parent, options = {}) {
+        this.type = type;
+        // extglobs are inherently magical
+        if (type)
+            this.#hasMagic = true;
+        this.#parent = parent;
+        this.#root = this.#parent ? this.#parent.#root : this;
+        this.#options = this.#root === this ? options : this.#root.#options;
+        this.#negs = this.#root === this ? [] : this.#root.#negs;
+        if (type === '!' && !this.#root.#filledNegs)
+            this.#negs.push(this);
+        this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0;
+    }
+    get hasMagic() {
+        /* c8 ignore start */
+        if (this.#hasMagic !== undefined)
+            return this.#hasMagic;
+        /* c8 ignore stop */
+        for (const p of this.#parts) {
+            if (typeof p === 'string')
+                continue;
+            if (p.type || p.hasMagic)
+                return (this.#hasMagic = true);
+        }
+        // note: will be undefined until we generate the regexp src and find out
+        return this.#hasMagic;
+    }
+    // reconstructs the pattern
+    toString() {
+        if (this.#toString !== undefined)
+            return this.#toString;
+        if (!this.type) {
+            return (this.#toString = this.#parts.map(p => String(p)).join(''));
+        }
+        else {
+            return (this.#toString =
+                this.type + '(' + this.#parts.map(p => String(p)).join('|') + ')');
+        }
+    }
+    #fillNegs() {
+        /* c8 ignore start */
+        if (this !== this.#root)
+            throw new Error('should only call on root');
+        if (this.#filledNegs)
+            return this;
+        /* c8 ignore stop */
+        // call toString() once to fill this out
+        this.toString();
+        this.#filledNegs = true;
+        let n;
+        while ((n = this.#negs.pop())) {
+            if (n.type !== '!')
+                continue;
+            // walk up the tree, appending everthing that comes AFTER parentIndex
+            let p = n;
+            let pp = p.#parent;
+            while (pp) {
+                for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) {
+                    for (const part of n.#parts) {
+                        /* c8 ignore start */
+                        if (typeof part === 'string') {
+                            throw new Error('string part in extglob AST??');
+                        }
+                        /* c8 ignore stop */
+                        part.copyIn(pp.#parts[i]);
+                    }
+                }
+                p = pp;
+                pp = p.#parent;
+            }
+        }
+        return this;
+    }
+    push(...parts) {
+        for (const p of parts) {
+            if (p === '')
+                continue;
+            /* c8 ignore start */
+            if (typeof p !== 'string' && !(p instanceof AST && p.#parent === this)) {
+                throw new Error('invalid part: ' + p);
+            }
+            /* c8 ignore stop */
+            this.#parts.push(p);
+        }
+    }
+    toJSON() {
+        const ret = this.type === null
+            ? this.#parts.slice().map(p => (typeof p === 'string' ? p : p.toJSON()))
+            : [this.type, ...this.#parts.map(p => p.toJSON())];
+        if (this.isStart() && !this.type)
+            ret.unshift([]);
+        if (this.isEnd() &&
+            (this === this.#root ||
+                (this.#root.#filledNegs && this.#parent?.type === '!'))) {
+            ret.push({});
+        }
+        return ret;
+    }
+    isStart() {
+        if (this.#root === this)
+            return true;
+        // if (this.type) return !!this.#parent?.isStart()
+        if (!this.#parent?.isStart())
+            return false;
+        if (this.#parentIndex === 0)
+            return true;
+        // if everything AHEAD of this is a negation, then it's still the "start"
+        const p = this.#parent;
+        for (let i = 0; i < this.#parentIndex; i++) {
+            const pp = p.#parts[i];
+            if (!(pp instanceof AST && pp.type === '!')) {
+                return false;
+            }
+        }
+        return true;
+    }
+    isEnd() {
+        if (this.#root === this)
+            return true;
+        if (this.#parent?.type === '!')
+            return true;
+        if (!this.#parent?.isEnd())
+            return false;
+        if (!this.type)
+            return this.#parent?.isEnd();
+        // if not root, it'll always have a parent
+        /* c8 ignore start */
+        const pl = this.#parent ? this.#parent.#parts.length : 0;
+        /* c8 ignore stop */
+        return this.#parentIndex === pl - 1;
+    }
+    copyIn(part) {
+        if (typeof part === 'string')
+            this.push(part);
+        else
+            this.push(part.clone(this));
+    }
+    clone(parent) {
+        const c = new AST(this.type, parent);
+        for (const p of this.#parts) {
+            c.copyIn(p);
+        }
+        return c;
+    }
+    static #parseAST(str, ast, pos, opt) {
+        let escaping = false;
+        let inBrace = false;
+        let braceStart = -1;
+        let braceNeg = false;
+        if (ast.type === null) {
+            // outside of a extglob, append until we find a start
+            let i = pos;
+            let acc = '';
+            while (i < str.length) {
+                const c = str.charAt(i++);
+                // still accumulate escapes at this point, but we do ignore
+                // starts that are escaped
+                if (escaping || c === '\\') {
+                    escaping = !escaping;
+                    acc += c;
+                    continue;
+                }
+                if (inBrace) {
+                    if (i === braceStart + 1) {
+                        if (c === '^' || c === '!') {
+                            braceNeg = true;
+                        }
+                    }
+                    else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
+                        inBrace = false;
+                    }
+                    acc += c;
+                    continue;
+                }
+                else if (c === '[') {
+                    inBrace = true;
+                    braceStart = i;
+                    braceNeg = false;
+                    acc += c;
+                    continue;
+                }
+                if (!opt.noext && isExtglobType(c) && str.charAt(i) === '(') {
+                    ast.push(acc);
+                    acc = '';
+                    const ext = new AST(c, ast);
+                    i = AST.#parseAST(str, ext, i, opt);
+                    ast.push(ext);
+                    continue;
+                }
+                acc += c;
+            }
+            ast.push(acc);
+            return i;
+        }
+        // some kind of extglob, pos is at the (
+        // find the next | or )
+        let i = pos + 1;
+        let part = new AST(null, ast);
+        const parts = [];
+        let acc = '';
+        while (i < str.length) {
+            const c = str.charAt(i++);
+            // still accumulate escapes at this point, but we do ignore
+            // starts that are escaped
+            if (escaping || c === '\\') {
+                escaping = !escaping;
+                acc += c;
+                continue;
+            }
+            if (inBrace) {
+                if (i === braceStart + 1) {
+                    if (c === '^' || c === '!') {
+                        braceNeg = true;
+                    }
+                }
+                else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
+                    inBrace = false;
+                }
+                acc += c;
+                continue;
+            }
+            else if (c === '[') {
+                inBrace = true;
+                braceStart = i;
+                braceNeg = false;
+                acc += c;
+                continue;
+            }
+            if (isExtglobType(c) && str.charAt(i) === '(') {
+                part.push(acc);
+                acc = '';
+                const ext = new AST(c, part);
+                part.push(ext);
+                i = AST.#parseAST(str, ext, i, opt);
+                continue;
+            }
+            if (c === '|') {
+                part.push(acc);
+                acc = '';
+                parts.push(part);
+                part = new AST(null, ast);
+                continue;
+            }
+            if (c === ')') {
+                if (acc === '' && ast.#parts.length === 0) {
+                    ast.#emptyExt = true;
+                }
+                part.push(acc);
+                acc = '';
+                ast.push(...parts, part);
+                return i;
+            }
+            acc += c;
+        }
+        // unfinished extglob
+        // if we got here, it was a malformed extglob! not an extglob, but
+        // maybe something else in there.
+        ast.type = null;
+        ast.#hasMagic = undefined;
+        ast.#parts = [str.substring(pos - 1)];
+        return i;
+    }
+    static fromGlob(pattern, options = {}) {
+        const ast = new AST(null, undefined, options);
+        AST.#parseAST(pattern, ast, 0, options);
+        return ast;
+    }
+    // returns the regular expression if there's magic, or the unescaped
+    // string if not.
+    toMMPattern() {
+        // should only be called on root
+        /* c8 ignore start */
+        if (this !== this.#root)
+            return this.#root.toMMPattern();
+        /* c8 ignore stop */
+        const glob = this.toString();
+        const [re, body, hasMagic, uflag] = this.toRegExpSource();
+        // if we're in nocase mode, and not nocaseMagicOnly, then we do
+        // still need a regular expression if we have to case-insensitively
+        // match capital/lowercase characters.
+        const anyMagic = hasMagic ||
+            this.#hasMagic ||
+            (this.#options.nocase &&
+                !this.#options.nocaseMagicOnly &&
+                glob.toUpperCase() !== glob.toLowerCase());
+        if (!anyMagic) {
+            return body;
+        }
+        const flags = (this.#options.nocase ? 'i' : '') + (uflag ? 'u' : '');
+        return Object.assign(new RegExp(`^${re}$`, flags), {
+            _src: re,
+            _glob: glob,
+        });
+    }
+    get options() {
+        return this.#options;
+    }
+    // returns the string match, the regexp source, whether there's magic
+    // in the regexp (so a regular expression is required) and whether or
+    // not the uflag is needed for the regular expression (for posix classes)
+    // TODO: instead of injecting the start/end at this point, just return
+    // the BODY of the regexp, along with the start/end portions suitable
+    // for binding the start/end in either a joined full-path makeRe context
+    // (where we bind to (^|/), or a standalone matchPart context (where
+    // we bind to ^, and not /).  Otherwise slashes get duped!
+    //
+    // In part-matching mode, the start is:
+    // - if not isStart: nothing
+    // - if traversal possible, but not allowed: ^(?!\.\.?$)
+    // - if dots allowed or not possible: ^
+    // - if dots possible and not allowed: ^(?!\.)
+    // end is:
+    // - if not isEnd(): nothing
+    // - else: $
+    //
+    // In full-path matching mode, we put the slash at the START of the
+    // pattern, so start is:
+    // - if first pattern: same as part-matching mode
+    // - if not isStart(): nothing
+    // - if traversal possible, but not allowed: /(?!\.\.?(?:$|/))
+    // - if dots allowed or not possible: /
+    // - if dots possible and not allowed: /(?!\.)
+    // end is:
+    // - if last pattern, same as part-matching mode
+    // - else nothing
+    //
+    // Always put the (?:$|/) on negated tails, though, because that has to be
+    // there to bind the end of the negated pattern portion, and it's easier to
+    // just stick it in now rather than try to inject it later in the middle of
+    // the pattern.
+    //
+    // We can just always return the same end, and leave it up to the caller
+    // to know whether it's going to be used joined or in parts.
+    // And, if the start is adjusted slightly, can do the same there:
+    // - if not isStart: nothing
+    // - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$)
+    // - if dots allowed or not possible: (?:/|^)
+    // - if dots possible and not allowed: (?:/|^)(?!\.)
+    //
+    // But it's better to have a simpler binding without a conditional, for
+    // performance, so probably better to return both start options.
+    //
+    // Then the caller just ignores the end if it's not the first pattern,
+    // and the start always gets applied.
+    //
+    // But that's always going to be $ if it's the ending pattern, or nothing,
+    // so the caller can just attach $ at the end of the pattern when building.
+    //
+    // So the todo is:
+    // - better detect what kind of start is needed
+    // - return both flavors of starting pattern
+    // - attach $ at the end of the pattern when creating the actual RegExp
+    //
+    // Ah, but wait, no, that all only applies to the root when the first pattern
+    // is not an extglob. If the first pattern IS an extglob, then we need all
+    // that dot prevention biz to live in the extglob portions, because eg
+    // +(*|.x*) can match .xy but not .yx.
+    //
+    // So, return the two flavors if it's #root and the first child is not an
+    // AST, otherwise leave it to the child AST to handle it, and there,
+    // use the (?:^|/) style of start binding.
+    //
+    // Even simplified further:
+    // - Since the start for a join is eg /(?!\.) and the start for a part
+    // is ^(?!\.), we can just prepend (?!\.) to the pattern (either root
+    // or start or whatever) and prepend ^ or / at the Regexp construction.
+    toRegExpSource(allowDot) {
+        const dot = allowDot ?? !!this.#options.dot;
+        if (this.#root === this)
+            this.#fillNegs();
+        if (!this.type) {
+            const noEmpty = this.isStart() && this.isEnd();
+            const src = this.#parts
+                .map(p => {
+                const [re, _, hasMagic, uflag] = typeof p === 'string'
+                    ? AST.#parseGlob(p, this.#hasMagic, noEmpty)
+                    : p.toRegExpSource(allowDot);
+                this.#hasMagic = this.#hasMagic || hasMagic;
+                this.#uflag = this.#uflag || uflag;
+                return re;
+            })
+                .join('');
+            let start = '';
+            if (this.isStart()) {
+                if (typeof this.#parts[0] === 'string') {
+                    // this is the string that will match the start of the pattern,
+                    // so we need to protect against dots and such.
+                    // '.' and '..' cannot match unless the pattern is that exactly,
+                    // even if it starts with . or dot:true is set.
+                    const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]);
+                    if (!dotTravAllowed) {
+                        const aps = addPatternStart;
+                        // check if we have a possibility of matching . or ..,
+                        // and prevent that.
+                        const needNoTrav = 
+                        // dots are allowed, and the pattern starts with [ or .
+                        (dot && aps.has(src.charAt(0))) ||
+                            // the pattern starts with \., and then [ or .
+                            (src.startsWith('\\.') && aps.has(src.charAt(2))) ||
+                            // the pattern starts with \.\., and then [ or .
+                            (src.startsWith('\\.\\.') && aps.has(src.charAt(4)));
+                        // no need to prevent dots if it can't match a dot, or if a
+                        // sub-pattern will be preventing it anyway.
+                        const needNoDot = !dot && !allowDot && aps.has(src.charAt(0));
+                        start = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : '';
+                    }
+                }
+            }
+            // append the "end of path portion" pattern to negation tails
+            let end = '';
+            if (this.isEnd() &&
+                this.#root.#filledNegs &&
+                this.#parent?.type === '!') {
+                end = '(?:$|\\/)';
+            }
+            const final = start + src + end;
+            return [
+                final,
+                unescape(src),
+                (this.#hasMagic = !!this.#hasMagic),
+                this.#uflag,
+            ];
+        }
+        // We need to calculate the body *twice* if it's a repeat pattern
+        // at the start, once in nodot mode, then again in dot mode, so a
+        // pattern like *(?) can match 'x.y'
+        const repeated = this.type === '*' || this.type === '+';
+        // some kind of extglob
+        const start = this.type === '!' ? '(?:(?!(?:' : '(?:';
+        let body = this.#partsToRegExp(dot);
+        if (this.isStart() && this.isEnd() && !body && this.type !== '!') {
+            // invalid extglob, has to at least be *something* present, if it's
+            // the entire path portion.
+            const s = this.toString();
+            this.#parts = [s];
+            this.type = null;
+            this.#hasMagic = undefined;
+            return [s, unescape(this.toString()), false, false];
+        }
+        // XXX abstract out this map method
+        let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot
+            ? ''
+            : this.#partsToRegExp(true);
+        if (bodyDotAllowed === body) {
+            bodyDotAllowed = '';
+        }
+        if (bodyDotAllowed) {
+            body = `(?:${body})(?:${bodyDotAllowed})*?`;
+        }
+        // an empty !() is exactly equivalent to a starNoEmpty
+        let final = '';
+        if (this.type === '!' && this.#emptyExt) {
+            final = (this.isStart() && !dot ? startNoDot : '') + starNoEmpty;
+        }
+        else {
+            const close = this.type === '!'
+                ? // !() must match something,but !(x) can match ''
+                    '))' +
+                        (this.isStart() && !dot && !allowDot ? startNoDot : '') +
+                        star +
+                        ')'
+                : this.type === '@'
+                    ? ')'
+                    : this.type === '?'
+                        ? ')?'
+                        : this.type === '+' && bodyDotAllowed
+                            ? ')'
+                            : this.type === '*' && bodyDotAllowed
+                                ? `)?`
+                                : `)${this.type}`;
+            final = start + body + close;
+        }
+        return [
+            final,
+            unescape(body),
+            (this.#hasMagic = !!this.#hasMagic),
+            this.#uflag,
+        ];
+    }
+    #partsToRegExp(dot) {
+        return this.#parts
+            .map(p => {
+            // extglob ASTs should only contain parent ASTs
+            /* c8 ignore start */
+            if (typeof p === 'string') {
+                throw new Error('string type in extglob ast??');
+            }
+            /* c8 ignore stop */
+            // can ignore hasMagic, because extglobs are already always magic
+            const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot);
+            this.#uflag = this.#uflag || uflag;
+            return re;
+        })
+            .filter(p => !(this.isStart() && this.isEnd()) || !!p)
+            .join('|');
+    }
+    static #parseGlob(glob, hasMagic, noEmpty = false) {
+        let escaping = false;
+        let re = '';
+        let uflag = false;
+        for (let i = 0; i < glob.length; i++) {
+            const c = glob.charAt(i);
+            if (escaping) {
+                escaping = false;
+                re += (reSpecials.has(c) ? '\\' : '') + c;
+                continue;
+            }
+            if (c === '\\') {
+                if (i === glob.length - 1) {
+                    re += '\\\\';
+                }
+                else {
+                    escaping = true;
+                }
+                continue;
+            }
+            if (c === '[') {
+                const [src, needUflag, consumed, magic] = parseClass(glob, i);
+                if (consumed) {
+                    re += src;
+                    uflag = uflag || needUflag;
+                    i += consumed - 1;
+                    hasMagic = hasMagic || magic;
+                    continue;
+                }
+            }
+            if (c === '*') {
+                if (noEmpty && glob === '*')
+                    re += starNoEmpty;
+                else
+                    re += star;
+                hasMagic = true;
+                continue;
+            }
+            if (c === '?') {
+                re += qmark;
+                hasMagic = true;
+                continue;
+            }
+            re += regExpEscape(c);
+        }
+        return [re, unescape(glob), !!hasMagic, uflag];
+    }
+}
+//# sourceMappingURL=ast.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/brace-expressions.js b/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/brace-expressions.js
new file mode 100644
index 0000000000000..c629d6ae816e2
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/brace-expressions.js
@@ -0,0 +1,148 @@
+// translate the various posix character classes into unicode properties
+// this works across all unicode locales
+// { : [, /u flag required, negated]
+const posixClasses = {
+    '[:alnum:]': ['\\p{L}\\p{Nl}\\p{Nd}', true],
+    '[:alpha:]': ['\\p{L}\\p{Nl}', true],
+    '[:ascii:]': ['\\x' + '00-\\x' + '7f', false],
+    '[:blank:]': ['\\p{Zs}\\t', true],
+    '[:cntrl:]': ['\\p{Cc}', true],
+    '[:digit:]': ['\\p{Nd}', true],
+    '[:graph:]': ['\\p{Z}\\p{C}', true, true],
+    '[:lower:]': ['\\p{Ll}', true],
+    '[:print:]': ['\\p{C}', true],
+    '[:punct:]': ['\\p{P}', true],
+    '[:space:]': ['\\p{Z}\\t\\r\\n\\v\\f', true],
+    '[:upper:]': ['\\p{Lu}', true],
+    '[:word:]': ['\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}', true],
+    '[:xdigit:]': ['A-Fa-f0-9', false],
+};
+// only need to escape a few things inside of brace expressions
+// escapes: [ \ ] -
+const braceEscape = (s) => s.replace(/[[\]\\-]/g, '\\$&');
+// escape all regexp magic characters
+const regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+// everything has already been escaped, we just have to join
+const rangesToString = (ranges) => ranges.join('');
+// takes a glob string at a posix brace expression, and returns
+// an equivalent regular expression source, and boolean indicating
+// whether the /u flag needs to be applied, and the number of chars
+// consumed to parse the character class.
+// This also removes out of order ranges, and returns ($.) if the
+// entire class just no good.
+export const parseClass = (glob, position) => {
+    const pos = position;
+    /* c8 ignore start */
+    if (glob.charAt(pos) !== '[') {
+        throw new Error('not in a brace expression');
+    }
+    /* c8 ignore stop */
+    const ranges = [];
+    const negs = [];
+    let i = pos + 1;
+    let sawStart = false;
+    let uflag = false;
+    let escaping = false;
+    let negate = false;
+    let endPos = pos;
+    let rangeStart = '';
+    WHILE: while (i < glob.length) {
+        const c = glob.charAt(i);
+        if ((c === '!' || c === '^') && i === pos + 1) {
+            negate = true;
+            i++;
+            continue;
+        }
+        if (c === ']' && sawStart && !escaping) {
+            endPos = i + 1;
+            break;
+        }
+        sawStart = true;
+        if (c === '\\') {
+            if (!escaping) {
+                escaping = true;
+                i++;
+                continue;
+            }
+            // escaped \ char, fall through and treat like normal char
+        }
+        if (c === '[' && !escaping) {
+            // either a posix class, a collation equivalent, or just a [
+            for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) {
+                if (glob.startsWith(cls, i)) {
+                    // invalid, [a-[] is fine, but not [a-[:alpha]]
+                    if (rangeStart) {
+                        return ['$.', false, glob.length - pos, true];
+                    }
+                    i += cls.length;
+                    if (neg)
+                        negs.push(unip);
+                    else
+                        ranges.push(unip);
+                    uflag = uflag || u;
+                    continue WHILE;
+                }
+            }
+        }
+        // now it's just a normal character, effectively
+        escaping = false;
+        if (rangeStart) {
+            // throw this range away if it's not valid, but others
+            // can still match.
+            if (c > rangeStart) {
+                ranges.push(braceEscape(rangeStart) + '-' + braceEscape(c));
+            }
+            else if (c === rangeStart) {
+                ranges.push(braceEscape(c));
+            }
+            rangeStart = '';
+            i++;
+            continue;
+        }
+        // now might be the start of a range.
+        // can be either c-d or c-] or c] or c] at this point
+        if (glob.startsWith('-]', i + 1)) {
+            ranges.push(braceEscape(c + '-'));
+            i += 2;
+            continue;
+        }
+        if (glob.startsWith('-', i + 1)) {
+            rangeStart = c;
+            i += 2;
+            continue;
+        }
+        // not the start of a range, just a single character
+        ranges.push(braceEscape(c));
+        i++;
+    }
+    if (endPos < i) {
+        // didn't see the end of the class, not a valid class,
+        // but might still be valid as a literal match.
+        return ['', false, 0, false];
+    }
+    // if we got no ranges and no negates, then we have a range that
+    // cannot possibly match anything, and that poisons the whole glob
+    if (!ranges.length && !negs.length) {
+        return ['$.', false, glob.length - pos, true];
+    }
+    // if we got one positive range, and it's a single character, then that's
+    // not actually a magic pattern, it's just that one literal character.
+    // we should not treat that as "magic", we should just return the literal
+    // character. [_] is a perfectly valid way to escape glob magic chars.
+    if (negs.length === 0 &&
+        ranges.length === 1 &&
+        /^\\?.$/.test(ranges[0]) &&
+        !negate) {
+        const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0];
+        return [regexpEscape(r), false, endPos - pos, false];
+    }
+    const sranges = '[' + (negate ? '^' : '') + rangesToString(ranges) + ']';
+    const snegs = '[' + (negate ? '' : '^') + rangesToString(negs) + ']';
+    const comb = ranges.length && negs.length
+        ? '(' + sranges + '|' + snegs + ')'
+        : ranges.length
+            ? sranges
+            : snegs;
+    return [comb, uflag, endPos - pos, true];
+};
+//# sourceMappingURL=brace-expressions.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/escape.js b/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/escape.js
new file mode 100644
index 0000000000000..16f7c8c7bdc64
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/escape.js
@@ -0,0 +1,18 @@
+/**
+ * Escape all magic characters in a glob pattern.
+ *
+ * If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape}
+ * option is used, then characters are escaped by wrapping in `[]`, because
+ * a magic character wrapped in a character class can only be satisfied by
+ * that exact character.  In this mode, `\` is _not_ escaped, because it is
+ * not interpreted as a magic character, but instead as a path separator.
+ */
+export const escape = (s, { windowsPathsNoEscape = false, } = {}) => {
+    // don't need to escape +@! because we escape the parens
+    // that make those magic, and escaping ! as [!] isn't valid,
+    // because [!]] is a valid glob class meaning not ']'.
+    return windowsPathsNoEscape
+        ? s.replace(/[?*()[\]]/g, '[$&]')
+        : s.replace(/[?*()[\]\\]/g, '\\$&');
+};
+//# sourceMappingURL=escape.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/index.js b/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/index.js
new file mode 100644
index 0000000000000..790d6c02a2f22
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/index.js
@@ -0,0 +1,1001 @@
+import { expand } from '@isaacs/brace-expansion';
+import { assertValidPattern } from './assert-valid-pattern.js';
+import { AST } from './ast.js';
+import { escape } from './escape.js';
+import { unescape } from './unescape.js';
+export const minimatch = (p, pattern, options = {}) => {
+    assertValidPattern(pattern);
+    // shortcut: comments match nothing.
+    if (!options.nocomment && pattern.charAt(0) === '#') {
+        return false;
+    }
+    return new Minimatch(pattern, options).match(p);
+};
+// Optimized checking for the most common glob patterns.
+const starDotExtRE = /^\*+([^+@!?\*\[\(]*)$/;
+const starDotExtTest = (ext) => (f) => !f.startsWith('.') && f.endsWith(ext);
+const starDotExtTestDot = (ext) => (f) => f.endsWith(ext);
+const starDotExtTestNocase = (ext) => {
+    ext = ext.toLowerCase();
+    return (f) => !f.startsWith('.') && f.toLowerCase().endsWith(ext);
+};
+const starDotExtTestNocaseDot = (ext) => {
+    ext = ext.toLowerCase();
+    return (f) => f.toLowerCase().endsWith(ext);
+};
+const starDotStarRE = /^\*+\.\*+$/;
+const starDotStarTest = (f) => !f.startsWith('.') && f.includes('.');
+const starDotStarTestDot = (f) => f !== '.' && f !== '..' && f.includes('.');
+const dotStarRE = /^\.\*+$/;
+const dotStarTest = (f) => f !== '.' && f !== '..' && f.startsWith('.');
+const starRE = /^\*+$/;
+const starTest = (f) => f.length !== 0 && !f.startsWith('.');
+const starTestDot = (f) => f.length !== 0 && f !== '.' && f !== '..';
+const qmarksRE = /^\?+([^+@!?\*\[\(]*)?$/;
+const qmarksTestNocase = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExt([$0]);
+    if (!ext)
+        return noext;
+    ext = ext.toLowerCase();
+    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
+};
+const qmarksTestNocaseDot = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExtDot([$0]);
+    if (!ext)
+        return noext;
+    ext = ext.toLowerCase();
+    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
+};
+const qmarksTestDot = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExtDot([$0]);
+    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
+};
+const qmarksTest = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExt([$0]);
+    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
+};
+const qmarksTestNoExt = ([$0]) => {
+    const len = $0.length;
+    return (f) => f.length === len && !f.startsWith('.');
+};
+const qmarksTestNoExtDot = ([$0]) => {
+    const len = $0.length;
+    return (f) => f.length === len && f !== '.' && f !== '..';
+};
+/* c8 ignore start */
+const defaultPlatform = (typeof process === 'object' && process
+    ? (typeof process.env === 'object' &&
+        process.env &&
+        process.env.__MINIMATCH_TESTING_PLATFORM__) ||
+        process.platform
+    : 'posix');
+const path = {
+    win32: { sep: '\\' },
+    posix: { sep: '/' },
+};
+/* c8 ignore stop */
+export const sep = defaultPlatform === 'win32' ? path.win32.sep : path.posix.sep;
+minimatch.sep = sep;
+export const GLOBSTAR = Symbol('globstar **');
+minimatch.GLOBSTAR = GLOBSTAR;
+// any single thing other than /
+// don't need to escape / when using new RegExp()
+const qmark = '[^/]';
+// * => any number of characters
+const star = qmark + '*?';
+// ** when dots are allowed.  Anything goes, except .. and .
+// not (^ or / followed by one or two dots followed by $ or /),
+// followed by anything, any number of times.
+const twoStarDot = '(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?';
+// not a ^ or / followed by a dot,
+// followed by anything, any number of times.
+const twoStarNoDot = '(?:(?!(?:\\/|^)\\.).)*?';
+export const filter = (pattern, options = {}) => (p) => minimatch(p, pattern, options);
+minimatch.filter = filter;
+const ext = (a, b = {}) => Object.assign({}, a, b);
+export const defaults = (def) => {
+    if (!def || typeof def !== 'object' || !Object.keys(def).length) {
+        return minimatch;
+    }
+    const orig = minimatch;
+    const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options));
+    return Object.assign(m, {
+        Minimatch: class Minimatch extends orig.Minimatch {
+            constructor(pattern, options = {}) {
+                super(pattern, ext(def, options));
+            }
+            static defaults(options) {
+                return orig.defaults(ext(def, options)).Minimatch;
+            }
+        },
+        AST: class AST extends orig.AST {
+            /* c8 ignore start */
+            constructor(type, parent, options = {}) {
+                super(type, parent, ext(def, options));
+            }
+            /* c8 ignore stop */
+            static fromGlob(pattern, options = {}) {
+                return orig.AST.fromGlob(pattern, ext(def, options));
+            }
+        },
+        unescape: (s, options = {}) => orig.unescape(s, ext(def, options)),
+        escape: (s, options = {}) => orig.escape(s, ext(def, options)),
+        filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)),
+        defaults: (options) => orig.defaults(ext(def, options)),
+        makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)),
+        braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)),
+        match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)),
+        sep: orig.sep,
+        GLOBSTAR: GLOBSTAR,
+    });
+};
+minimatch.defaults = defaults;
+// Brace expansion:
+// a{b,c}d -> abd acd
+// a{b,}c -> abc ac
+// a{0..3}d -> a0d a1d a2d a3d
+// a{b,c{d,e}f}g -> abg acdfg acefg
+// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
+//
+// Invalid sets are not expanded.
+// a{2..}b -> a{2..}b
+// a{b}c -> a{b}c
+export const braceExpand = (pattern, options = {}) => {
+    assertValidPattern(pattern);
+    // Thanks to Yeting Li  for
+    // improving this regexp to avoid a ReDOS vulnerability.
+    if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
+        // shortcut. no need to expand.
+        return [pattern];
+    }
+    return expand(pattern);
+};
+minimatch.braceExpand = braceExpand;
+// parse a component of the expanded set.
+// At this point, no pattern may contain "/" in it
+// so we're going to return a 2d array, where each entry is the full
+// pattern, split on '/', and then turned into a regular expression.
+// A regexp is made at the end which joins each array with an
+// escaped /, and another full one which joins each regexp with |.
+//
+// Following the lead of Bash 4.1, note that "**" only has special meaning
+// when it is the *only* thing in a path portion.  Otherwise, any series
+// of * is equivalent to a single *.  Globstar behavior is enabled by
+// default, and can be disabled by setting options.noglobstar.
+export const makeRe = (pattern, options = {}) => new Minimatch(pattern, options).makeRe();
+minimatch.makeRe = makeRe;
+export const match = (list, pattern, options = {}) => {
+    const mm = new Minimatch(pattern, options);
+    list = list.filter(f => mm.match(f));
+    if (mm.options.nonull && !list.length) {
+        list.push(pattern);
+    }
+    return list;
+};
+minimatch.match = match;
+// replace stuff like \* with *
+const globMagic = /[?*]|[+@!]\(.*?\)|\[|\]/;
+const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+export class Minimatch {
+    options;
+    set;
+    pattern;
+    windowsPathsNoEscape;
+    nonegate;
+    negate;
+    comment;
+    empty;
+    preserveMultipleSlashes;
+    partial;
+    globSet;
+    globParts;
+    nocase;
+    isWindows;
+    platform;
+    windowsNoMagicRoot;
+    regexp;
+    constructor(pattern, options = {}) {
+        assertValidPattern(pattern);
+        options = options || {};
+        this.options = options;
+        this.pattern = pattern;
+        this.platform = options.platform || defaultPlatform;
+        this.isWindows = this.platform === 'win32';
+        this.windowsPathsNoEscape =
+            !!options.windowsPathsNoEscape || options.allowWindowsEscape === false;
+        if (this.windowsPathsNoEscape) {
+            this.pattern = this.pattern.replace(/\\/g, '/');
+        }
+        this.preserveMultipleSlashes = !!options.preserveMultipleSlashes;
+        this.regexp = null;
+        this.negate = false;
+        this.nonegate = !!options.nonegate;
+        this.comment = false;
+        this.empty = false;
+        this.partial = !!options.partial;
+        this.nocase = !!this.options.nocase;
+        this.windowsNoMagicRoot =
+            options.windowsNoMagicRoot !== undefined
+                ? options.windowsNoMagicRoot
+                : !!(this.isWindows && this.nocase);
+        this.globSet = [];
+        this.globParts = [];
+        this.set = [];
+        // make the set of regexps etc.
+        this.make();
+    }
+    hasMagic() {
+        if (this.options.magicalBraces && this.set.length > 1) {
+            return true;
+        }
+        for (const pattern of this.set) {
+            for (const part of pattern) {
+                if (typeof part !== 'string')
+                    return true;
+            }
+        }
+        return false;
+    }
+    debug(..._) { }
+    make() {
+        const pattern = this.pattern;
+        const options = this.options;
+        // empty patterns and comments match nothing.
+        if (!options.nocomment && pattern.charAt(0) === '#') {
+            this.comment = true;
+            return;
+        }
+        if (!pattern) {
+            this.empty = true;
+            return;
+        }
+        // step 1: figure out negation, etc.
+        this.parseNegate();
+        // step 2: expand braces
+        this.globSet = [...new Set(this.braceExpand())];
+        if (options.debug) {
+            this.debug = (...args) => console.error(...args);
+        }
+        this.debug(this.pattern, this.globSet);
+        // step 3: now we have a set, so turn each one into a series of
+        // path-portion matching patterns.
+        // These will be regexps, except in the case of "**", which is
+        // set to the GLOBSTAR object for globstar behavior,
+        // and will not contain any / characters
+        //
+        // First, we preprocess to make the glob pattern sets a bit simpler
+        // and deduped.  There are some perf-killing patterns that can cause
+        // problems with a glob walk, but we can simplify them down a bit.
+        const rawGlobParts = this.globSet.map(s => this.slashSplit(s));
+        this.globParts = this.preprocess(rawGlobParts);
+        this.debug(this.pattern, this.globParts);
+        // glob --> regexps
+        let set = this.globParts.map((s, _, __) => {
+            if (this.isWindows && this.windowsNoMagicRoot) {
+                // check if it's a drive or unc path.
+                const isUNC = s[0] === '' &&
+                    s[1] === '' &&
+                    (s[2] === '?' || !globMagic.test(s[2])) &&
+                    !globMagic.test(s[3]);
+                const isDrive = /^[a-z]:/i.test(s[0]);
+                if (isUNC) {
+                    return [...s.slice(0, 4), ...s.slice(4).map(ss => this.parse(ss))];
+                }
+                else if (isDrive) {
+                    return [s[0], ...s.slice(1).map(ss => this.parse(ss))];
+                }
+            }
+            return s.map(ss => this.parse(ss));
+        });
+        this.debug(this.pattern, set);
+        // filter out everything that didn't compile properly.
+        this.set = set.filter(s => s.indexOf(false) === -1);
+        // do not treat the ? in UNC paths as magic
+        if (this.isWindows) {
+            for (let i = 0; i < this.set.length; i++) {
+                const p = this.set[i];
+                if (p[0] === '' &&
+                    p[1] === '' &&
+                    this.globParts[i][2] === '?' &&
+                    typeof p[3] === 'string' &&
+                    /^[a-z]:$/i.test(p[3])) {
+                    p[2] = '?';
+                }
+            }
+        }
+        this.debug(this.pattern, this.set);
+    }
+    // various transforms to equivalent pattern sets that are
+    // faster to process in a filesystem walk.  The goal is to
+    // eliminate what we can, and push all ** patterns as far
+    // to the right as possible, even if it increases the number
+    // of patterns that we have to process.
+    preprocess(globParts) {
+        // if we're not in globstar mode, then turn all ** into *
+        if (this.options.noglobstar) {
+            for (let i = 0; i < globParts.length; i++) {
+                for (let j = 0; j < globParts[i].length; j++) {
+                    if (globParts[i][j] === '**') {
+                        globParts[i][j] = '*';
+                    }
+                }
+            }
+        }
+        const { optimizationLevel = 1 } = this.options;
+        if (optimizationLevel >= 2) {
+            // aggressive optimization for the purpose of fs walking
+            globParts = this.firstPhasePreProcess(globParts);
+            globParts = this.secondPhasePreProcess(globParts);
+        }
+        else if (optimizationLevel >= 1) {
+            // just basic optimizations to remove some .. parts
+            globParts = this.levelOneOptimize(globParts);
+        }
+        else {
+            // just collapse multiple ** portions into one
+            globParts = this.adjascentGlobstarOptimize(globParts);
+        }
+        return globParts;
+    }
+    // just get rid of adjascent ** portions
+    adjascentGlobstarOptimize(globParts) {
+        return globParts.map(parts => {
+            let gs = -1;
+            while (-1 !== (gs = parts.indexOf('**', gs + 1))) {
+                let i = gs;
+                while (parts[i + 1] === '**') {
+                    i++;
+                }
+                if (i !== gs) {
+                    parts.splice(gs, i - gs);
+                }
+            }
+            return parts;
+        });
+    }
+    // get rid of adjascent ** and resolve .. portions
+    levelOneOptimize(globParts) {
+        return globParts.map(parts => {
+            parts = parts.reduce((set, part) => {
+                const prev = set[set.length - 1];
+                if (part === '**' && prev === '**') {
+                    return set;
+                }
+                if (part === '..') {
+                    if (prev && prev !== '..' && prev !== '.' && prev !== '**') {
+                        set.pop();
+                        return set;
+                    }
+                }
+                set.push(part);
+                return set;
+            }, []);
+            return parts.length === 0 ? [''] : parts;
+        });
+    }
+    levelTwoFileOptimize(parts) {
+        if (!Array.isArray(parts)) {
+            parts = this.slashSplit(parts);
+        }
+        let didSomething = false;
+        do {
+            didSomething = false;
+            // 
// -> 
/
+            if (!this.preserveMultipleSlashes) {
+                for (let i = 1; i < parts.length - 1; i++) {
+                    const p = parts[i];
+                    // don't squeeze out UNC patterns
+                    if (i === 1 && p === '' && parts[0] === '')
+                        continue;
+                    if (p === '.' || p === '') {
+                        didSomething = true;
+                        parts.splice(i, 1);
+                        i--;
+                    }
+                }
+                if (parts[0] === '.' &&
+                    parts.length === 2 &&
+                    (parts[1] === '.' || parts[1] === '')) {
+                    didSomething = true;
+                    parts.pop();
+                }
+            }
+            // 
/

/../ ->

/
+            let dd = 0;
+            while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                const p = parts[dd - 1];
+                if (p && p !== '.' && p !== '..' && p !== '**') {
+                    didSomething = true;
+                    parts.splice(dd - 1, 2);
+                    dd -= 2;
+                }
+            }
+        } while (didSomething);
+        return parts.length === 0 ? [''] : parts;
+    }
+    // First phase: single-pattern processing
+    // 
 is 1 or more portions
+    //  is 1 or more portions
+    // 

is any portion other than ., .., '', or ** + // is . or '' + // + // **/.. is *brutal* for filesystem walking performance, because + // it effectively resets the recursive walk each time it occurs, + // and ** cannot be reduced out by a .. pattern part like a regexp + // or most strings (other than .., ., and '') can be. + // + //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + //

// -> 
/
+    // 
/

/../ ->

/
+    // **/**/ -> **/
+    //
+    // **/*/ -> */**/ <== not valid because ** doesn't follow
+    // this WOULD be allowed if ** did follow symlinks, or * didn't
+    firstPhasePreProcess(globParts) {
+        let didSomething = false;
+        do {
+            didSomething = false;
+            // 
/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + for (let parts of globParts) { + let gs = -1; + while (-1 !== (gs = parts.indexOf('**', gs + 1))) { + let gss = gs; + while (parts[gss + 1] === '**') { + //

/**/**/ -> 
/**/
+                        gss++;
+                    }
+                    // eg, if gs is 2 and gss is 4, that means we have 3 **
+                    // parts, and can remove 2 of them.
+                    if (gss > gs) {
+                        parts.splice(gs + 1, gss - gs);
+                    }
+                    let next = parts[gs + 1];
+                    const p = parts[gs + 2];
+                    const p2 = parts[gs + 3];
+                    if (next !== '..')
+                        continue;
+                    if (!p ||
+                        p === '.' ||
+                        p === '..' ||
+                        !p2 ||
+                        p2 === '.' ||
+                        p2 === '..') {
+                        continue;
+                    }
+                    didSomething = true;
+                    // edit parts in place, and push the new one
+                    parts.splice(gs, 1);
+                    const other = parts.slice(0);
+                    other[gs] = '**';
+                    globParts.push(other);
+                    gs--;
+                }
+                // 
// -> 
/
+                if (!this.preserveMultipleSlashes) {
+                    for (let i = 1; i < parts.length - 1; i++) {
+                        const p = parts[i];
+                        // don't squeeze out UNC patterns
+                        if (i === 1 && p === '' && parts[0] === '')
+                            continue;
+                        if (p === '.' || p === '') {
+                            didSomething = true;
+                            parts.splice(i, 1);
+                            i--;
+                        }
+                    }
+                    if (parts[0] === '.' &&
+                        parts.length === 2 &&
+                        (parts[1] === '.' || parts[1] === '')) {
+                        didSomething = true;
+                        parts.pop();
+                    }
+                }
+                // 
/

/../ ->

/
+                let dd = 0;
+                while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                    const p = parts[dd - 1];
+                    if (p && p !== '.' && p !== '..' && p !== '**') {
+                        didSomething = true;
+                        const needDot = dd === 1 && parts[dd + 1] === '**';
+                        const splin = needDot ? ['.'] : [];
+                        parts.splice(dd - 1, 2, ...splin);
+                        if (parts.length === 0)
+                            parts.push('');
+                        dd -= 2;
+                    }
+                }
+            }
+        } while (didSomething);
+        return globParts;
+    }
+    // second phase: multi-pattern dedupes
+    // {
/*/,
/

/} ->

/*/
+    // {
/,
/} -> 
/
+    // {
/**/,
/} -> 
/**/
+    //
+    // {
/**/,
/**/

/} ->

/**/
+    // ^-- not valid because ** doens't follow symlinks
+    secondPhasePreProcess(globParts) {
+        for (let i = 0; i < globParts.length - 1; i++) {
+            for (let j = i + 1; j < globParts.length; j++) {
+                const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);
+                if (matched) {
+                    globParts[i] = [];
+                    globParts[j] = matched;
+                    break;
+                }
+            }
+        }
+        return globParts.filter(gs => gs.length);
+    }
+    partsMatch(a, b, emptyGSMatch = false) {
+        let ai = 0;
+        let bi = 0;
+        let result = [];
+        let which = '';
+        while (ai < a.length && bi < b.length) {
+            if (a[ai] === b[bi]) {
+                result.push(which === 'b' ? b[bi] : a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (emptyGSMatch && a[ai] === '**' && b[bi] === a[ai + 1]) {
+                result.push(a[ai]);
+                ai++;
+            }
+            else if (emptyGSMatch && b[bi] === '**' && a[ai] === b[bi + 1]) {
+                result.push(b[bi]);
+                bi++;
+            }
+            else if (a[ai] === '*' &&
+                b[bi] &&
+                (this.options.dot || !b[bi].startsWith('.')) &&
+                b[bi] !== '**') {
+                if (which === 'b')
+                    return false;
+                which = 'a';
+                result.push(a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (b[bi] === '*' &&
+                a[ai] &&
+                (this.options.dot || !a[ai].startsWith('.')) &&
+                a[ai] !== '**') {
+                if (which === 'a')
+                    return false;
+                which = 'b';
+                result.push(b[bi]);
+                ai++;
+                bi++;
+            }
+            else {
+                return false;
+            }
+        }
+        // if we fall out of the loop, it means they two are identical
+        // as long as their lengths match
+        return a.length === b.length && result;
+    }
+    parseNegate() {
+        if (this.nonegate)
+            return;
+        const pattern = this.pattern;
+        let negate = false;
+        let negateOffset = 0;
+        for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {
+            negate = !negate;
+            negateOffset++;
+        }
+        if (negateOffset)
+            this.pattern = pattern.slice(negateOffset);
+        this.negate = negate;
+    }
+    // set partial to true to test if, for example,
+    // "/a/b" matches the start of "/*/b/*/d"
+    // Partial means, if you run out of file before you run
+    // out of pattern, then that's fine, as long as all
+    // the parts match.
+    matchOne(file, pattern, partial = false) {
+        const options = this.options;
+        // UNC paths like //?/X:/... can match X:/... and vice versa
+        // Drive letters in absolute drive or unc paths are always compared
+        // case-insensitively.
+        if (this.isWindows) {
+            const fileDrive = typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0]);
+            const fileUNC = !fileDrive &&
+                file[0] === '' &&
+                file[1] === '' &&
+                file[2] === '?' &&
+                /^[a-z]:$/i.test(file[3]);
+            const patternDrive = typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0]);
+            const patternUNC = !patternDrive &&
+                pattern[0] === '' &&
+                pattern[1] === '' &&
+                pattern[2] === '?' &&
+                typeof pattern[3] === 'string' &&
+                /^[a-z]:$/i.test(pattern[3]);
+            const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined;
+            const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined;
+            if (typeof fdi === 'number' && typeof pdi === 'number') {
+                const [fd, pd] = [file[fdi], pattern[pdi]];
+                if (fd.toLowerCase() === pd.toLowerCase()) {
+                    pattern[pdi] = fd;
+                    if (pdi > fdi) {
+                        pattern = pattern.slice(pdi);
+                    }
+                    else if (fdi > pdi) {
+                        file = file.slice(fdi);
+                    }
+                }
+            }
+        }
+        // resolve and reduce . and .. portions in the file as well.
+        // dont' need to do the second phase, because it's only one string[]
+        const { optimizationLevel = 1 } = this.options;
+        if (optimizationLevel >= 2) {
+            file = this.levelTwoFileOptimize(file);
+        }
+        this.debug('matchOne', this, { file, pattern });
+        this.debug('matchOne', file.length, pattern.length);
+        for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
+            this.debug('matchOne loop');
+            var p = pattern[pi];
+            var f = file[fi];
+            this.debug(pattern, p, f);
+            // should be impossible.
+            // some invalid regexp stuff in the set.
+            /* c8 ignore start */
+            if (p === false) {
+                return false;
+            }
+            /* c8 ignore stop */
+            if (p === GLOBSTAR) {
+                this.debug('GLOBSTAR', [pattern, p, f]);
+                // "**"
+                // a/**/b/**/c would match the following:
+                // a/b/x/y/z/c
+                // a/x/y/z/b/c
+                // a/b/x/b/x/c
+                // a/b/c
+                // To do this, take the rest of the pattern after
+                // the **, and see if it would match the file remainder.
+                // If so, return success.
+                // If not, the ** "swallows" a segment, and try again.
+                // This is recursively awful.
+                //
+                // a/**/b/**/c matching a/b/x/y/z/c
+                // - a matches a
+                // - doublestar
+                //   - matchOne(b/x/y/z/c, b/**/c)
+                //     - b matches b
+                //     - doublestar
+                //       - matchOne(x/y/z/c, c) -> no
+                //       - matchOne(y/z/c, c) -> no
+                //       - matchOne(z/c, c) -> no
+                //       - matchOne(c, c) yes, hit
+                var fr = fi;
+                var pr = pi + 1;
+                if (pr === pl) {
+                    this.debug('** at the end');
+                    // a ** at the end will just swallow the rest.
+                    // We have found a match.
+                    // however, it will not swallow /.x, unless
+                    // options.dot is set.
+                    // . and .. are *never* matched by **, for explosively
+                    // exponential reasons.
+                    for (; fi < fl; fi++) {
+                        if (file[fi] === '.' ||
+                            file[fi] === '..' ||
+                            (!options.dot && file[fi].charAt(0) === '.'))
+                            return false;
+                    }
+                    return true;
+                }
+                // ok, let's see if we can swallow whatever we can.
+                while (fr < fl) {
+                    var swallowee = file[fr];
+                    this.debug('\nglobstar while', file, fr, pattern, pr, swallowee);
+                    // XXX remove this slice.  Just pass the start index.
+                    if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
+                        this.debug('globstar found match!', fr, fl, swallowee);
+                        // found a match.
+                        return true;
+                    }
+                    else {
+                        // can't swallow "." or ".." ever.
+                        // can only swallow ".foo" when explicitly asked.
+                        if (swallowee === '.' ||
+                            swallowee === '..' ||
+                            (!options.dot && swallowee.charAt(0) === '.')) {
+                            this.debug('dot detected!', file, fr, pattern, pr);
+                            break;
+                        }
+                        // ** swallows a segment, and continue.
+                        this.debug('globstar swallow a segment, and continue');
+                        fr++;
+                    }
+                }
+                // no match was found.
+                // However, in partial mode, we can't say this is necessarily over.
+                /* c8 ignore start */
+                if (partial) {
+                    // ran out of file
+                    this.debug('\n>>> no match, partial?', file, fr, pattern, pr);
+                    if (fr === fl) {
+                        return true;
+                    }
+                }
+                /* c8 ignore stop */
+                return false;
+            }
+            // something other than **
+            // non-magic patterns just have to match exactly
+            // patterns with magic have been turned into regexps.
+            let hit;
+            if (typeof p === 'string') {
+                hit = f === p;
+                this.debug('string match', p, f, hit);
+            }
+            else {
+                hit = p.test(f);
+                this.debug('pattern match', p, f, hit);
+            }
+            if (!hit)
+                return false;
+        }
+        // Note: ending in / means that we'll get a final ""
+        // at the end of the pattern.  This can only match a
+        // corresponding "" at the end of the file.
+        // If the file ends in /, then it can only match a
+        // a pattern that ends in /, unless the pattern just
+        // doesn't have any more for it. But, a/b/ should *not*
+        // match "a/b/*", even though "" matches against the
+        // [^/]*? pattern, except in partial mode, where it might
+        // simply not be reached yet.
+        // However, a/b/ should still satisfy a/*
+        // now either we fell off the end of the pattern, or we're done.
+        if (fi === fl && pi === pl) {
+            // ran out of pattern and filename at the same time.
+            // an exact hit!
+            return true;
+        }
+        else if (fi === fl) {
+            // ran out of file, but still had pattern left.
+            // this is ok if we're doing the match as part of
+            // a glob fs traversal.
+            return partial;
+        }
+        else if (pi === pl) {
+            // ran out of pattern, still have file left.
+            // this is only acceptable if we're on the very last
+            // empty segment of a file with a trailing slash.
+            // a/* should match a/b/
+            return fi === fl - 1 && file[fi] === '';
+            /* c8 ignore start */
+        }
+        else {
+            // should be unreachable.
+            throw new Error('wtf?');
+        }
+        /* c8 ignore stop */
+    }
+    braceExpand() {
+        return braceExpand(this.pattern, this.options);
+    }
+    parse(pattern) {
+        assertValidPattern(pattern);
+        const options = this.options;
+        // shortcuts
+        if (pattern === '**')
+            return GLOBSTAR;
+        if (pattern === '')
+            return '';
+        // far and away, the most common glob pattern parts are
+        // *, *.*, and *.  Add a fast check method for those.
+        let m;
+        let fastTest = null;
+        if ((m = pattern.match(starRE))) {
+            fastTest = options.dot ? starTestDot : starTest;
+        }
+        else if ((m = pattern.match(starDotExtRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? starDotExtTestNocaseDot
+                    : starDotExtTestNocase
+                : options.dot
+                    ? starDotExtTestDot
+                    : starDotExtTest)(m[1]);
+        }
+        else if ((m = pattern.match(qmarksRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? qmarksTestNocaseDot
+                    : qmarksTestNocase
+                : options.dot
+                    ? qmarksTestDot
+                    : qmarksTest)(m);
+        }
+        else if ((m = pattern.match(starDotStarRE))) {
+            fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
+        }
+        else if ((m = pattern.match(dotStarRE))) {
+            fastTest = dotStarTest;
+        }
+        const re = AST.fromGlob(pattern, this.options).toMMPattern();
+        if (fastTest && typeof re === 'object') {
+            // Avoids overriding in frozen environments
+            Reflect.defineProperty(re, 'test', { value: fastTest });
+        }
+        return re;
+    }
+    makeRe() {
+        if (this.regexp || this.regexp === false)
+            return this.regexp;
+        // at this point, this.set is a 2d array of partial
+        // pattern strings, or "**".
+        //
+        // It's better to use .match().  This function shouldn't
+        // be used, really, but it's pretty convenient sometimes,
+        // when you just want to work with a regex.
+        const set = this.set;
+        if (!set.length) {
+            this.regexp = false;
+            return this.regexp;
+        }
+        const options = this.options;
+        const twoStar = options.noglobstar
+            ? star
+            : options.dot
+                ? twoStarDot
+                : twoStarNoDot;
+        const flags = new Set(options.nocase ? ['i'] : []);
+        // regexpify non-globstar patterns
+        // if ** is only item, then we just do one twoStar
+        // if ** is first, and there are more, prepend (\/|twoStar\/)? to next
+        // if ** is last, append (\/twoStar|) to previous
+        // if ** is in the middle, append (\/|\/twoStar\/) to previous
+        // then filter out GLOBSTAR symbols
+        let re = set
+            .map(pattern => {
+            const pp = pattern.map(p => {
+                if (p instanceof RegExp) {
+                    for (const f of p.flags.split(''))
+                        flags.add(f);
+                }
+                return typeof p === 'string'
+                    ? regExpEscape(p)
+                    : p === GLOBSTAR
+                        ? GLOBSTAR
+                        : p._src;
+            });
+            pp.forEach((p, i) => {
+                const next = pp[i + 1];
+                const prev = pp[i - 1];
+                if (p !== GLOBSTAR || prev === GLOBSTAR) {
+                    return;
+                }
+                if (prev === undefined) {
+                    if (next !== undefined && next !== GLOBSTAR) {
+                        pp[i + 1] = '(?:\\/|' + twoStar + '\\/)?' + next;
+                    }
+                    else {
+                        pp[i] = twoStar;
+                    }
+                }
+                else if (next === undefined) {
+                    pp[i - 1] = prev + '(?:\\/|' + twoStar + ')?';
+                }
+                else if (next !== GLOBSTAR) {
+                    pp[i - 1] = prev + '(?:\\/|\\/' + twoStar + '\\/)' + next;
+                    pp[i + 1] = GLOBSTAR;
+                }
+            });
+            return pp.filter(p => p !== GLOBSTAR).join('/');
+        })
+            .join('|');
+        // need to wrap in parens if we had more than one thing with |,
+        // otherwise only the first will be anchored to ^ and the last to $
+        const [open, close] = set.length > 1 ? ['(?:', ')'] : ['', ''];
+        // must match entire pattern
+        // ending in a * or ** will make it less strict.
+        re = '^' + open + re + close + '$';
+        // can match anything, as long as it's not this.
+        if (this.negate)
+            re = '^(?!' + re + ').+$';
+        try {
+            this.regexp = new RegExp(re, [...flags].join(''));
+            /* c8 ignore start */
+        }
+        catch (ex) {
+            // should be impossible
+            this.regexp = false;
+        }
+        /* c8 ignore stop */
+        return this.regexp;
+    }
+    slashSplit(p) {
+        // if p starts with // on windows, we preserve that
+        // so that UNC paths aren't broken.  Otherwise, any number of
+        // / characters are coalesced into one, unless
+        // preserveMultipleSlashes is set to true.
+        if (this.preserveMultipleSlashes) {
+            return p.split('/');
+        }
+        else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
+            // add an extra '' for the one we lose
+            return ['', ...p.split(/\/+/)];
+        }
+        else {
+            return p.split(/\/+/);
+        }
+    }
+    match(f, partial = this.partial) {
+        this.debug('match', f, this.pattern);
+        // short-circuit in the case of busted things.
+        // comments, etc.
+        if (this.comment) {
+            return false;
+        }
+        if (this.empty) {
+            return f === '';
+        }
+        if (f === '/' && partial) {
+            return true;
+        }
+        const options = this.options;
+        // windows: need to use /, not \
+        if (this.isWindows) {
+            f = f.split('\\').join('/');
+        }
+        // treat the test path as a set of pathparts.
+        const ff = this.slashSplit(f);
+        this.debug(this.pattern, 'split', ff);
+        // just ONE of the pattern sets in this.set needs to match
+        // in order for it to be valid.  If negating, then just one
+        // match means that we have failed.
+        // Either way, return on the first hit.
+        const set = this.set;
+        this.debug(this.pattern, 'set', set);
+        // Find the basename of the path by looking for the last non-empty segment
+        let filename = ff[ff.length - 1];
+        if (!filename) {
+            for (let i = ff.length - 2; !filename && i >= 0; i--) {
+                filename = ff[i];
+            }
+        }
+        for (let i = 0; i < set.length; i++) {
+            const pattern = set[i];
+            let file = ff;
+            if (options.matchBase && pattern.length === 1) {
+                file = [filename];
+            }
+            const hit = this.matchOne(file, pattern, partial);
+            if (hit) {
+                if (options.flipNegate) {
+                    return true;
+                }
+                return !this.negate;
+            }
+        }
+        // didn't get any hits.  this is success if it's a negative
+        // pattern, failure otherwise.
+        if (options.flipNegate) {
+            return false;
+        }
+        return this.negate;
+    }
+    static defaults(def) {
+        return minimatch.defaults(def).Minimatch;
+    }
+}
+/* c8 ignore start */
+export { AST } from './ast.js';
+export { escape } from './escape.js';
+export { unescape } from './unescape.js';
+/* c8 ignore stop */
+minimatch.AST = AST;
+minimatch.Minimatch = Minimatch;
+minimatch.escape = escape;
+minimatch.unescape = unescape;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/package.json b/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/package.json
new file mode 100644
index 0000000000000..3dbc1ca591c05
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/unescape.js b/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/unescape.js
new file mode 100644
index 0000000000000..0faf9a2b7306f
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/unescape.js
@@ -0,0 +1,20 @@
+/**
+ * Un-escape a string that has been escaped with {@link escape}.
+ *
+ * If the {@link windowsPathsNoEscape} option is used, then square-brace
+ * escapes are removed, but not backslash escapes.  For example, it will turn
+ * the string `'[*]'` into `*`, but it will not turn `'\\*'` into `'*'`,
+ * becuase `\` is a path separator in `windowsPathsNoEscape` mode.
+ *
+ * When `windowsPathsNoEscape` is not set, then both brace escapes and
+ * backslash escapes are removed.
+ *
+ * Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot be escaped
+ * or unescaped.
+ */
+export const unescape = (s, { windowsPathsNoEscape = false, } = {}) => {
+    return windowsPathsNoEscape
+        ? s.replace(/\[([^\/\\])\]/g, '$1')
+        : s.replace(/((?!\\).|^)\[([^\/\\])\]/g, '$1$2').replace(/\\([^\/])/g, '$1');
+};
+//# sourceMappingURL=unescape.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/minimatch/package.json b/node_modules/@npmcli/map-workspaces/node_modules/minimatch/package.json
new file mode 100644
index 0000000000000..bfa2423f50b5e
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/minimatch/package.json
@@ -0,0 +1,79 @@
+{
+  "author": "Isaac Z. Schlueter  (http://blog.izs.me)",
+  "name": "minimatch",
+  "description": "a glob matcher in javascript",
+  "version": "10.0.3",
+  "repository": {
+    "type": "git",
+    "url": "git://github.com/isaacs/minimatch.git"
+  },
+  "main": "./dist/commonjs/index.js",
+  "types": "./dist/commonjs/index.d.ts",
+  "exports": {
+    "./package.json": "./package.json",
+    ".": {
+      "import": {
+        "types": "./dist/esm/index.d.ts",
+        "default": "./dist/esm/index.js"
+      },
+      "require": {
+        "types": "./dist/commonjs/index.d.ts",
+        "default": "./dist/commonjs/index.js"
+      }
+    }
+  },
+  "files": [
+    "dist"
+  ],
+  "scripts": {
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "prepare": "tshy",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "test": "tap",
+    "snap": "tap",
+    "format": "prettier --write . --loglevel warn",
+    "benchmark": "node benchmark/index.js",
+    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
+  },
+  "prettier": {
+    "semi": false,
+    "printWidth": 80,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "engines": {
+    "node": "20 || >=22"
+  },
+  "devDependencies": {
+    "@types/brace-expansion": "^1.1.2",
+    "@types/node": "^24.0.0",
+    "mkdirp": "^3.0.1",
+    "prettier": "^3.3.2",
+    "tap": "^21.1.0",
+    "tshy": "^3.0.2",
+    "typedoc": "^0.28.5"
+  },
+  "funding": {
+    "url": "https://github.com/sponsors/isaacs"
+  },
+  "license": "ISC",
+  "tshy": {
+    "exports": {
+      "./package.json": "./package.json",
+      ".": "./src/index.ts"
+    }
+  },
+  "type": "module",
+  "module": "./dist/esm/index.js",
+  "dependencies": {
+    "@isaacs/brace-expansion": "^5.0.0"
+  }
+}
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/path-scurry/LICENSE.md b/node_modules/@npmcli/map-workspaces/node_modules/path-scurry/LICENSE.md
new file mode 100644
index 0000000000000..c5402b9577a8c
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/path-scurry/LICENSE.md
@@ -0,0 +1,55 @@
+# Blue Oak Model License
+
+Version 1.0.0
+
+## Purpose
+
+This license gives everyone as much permission to work with
+this software as possible, while protecting contributors
+from liability.
+
+## Acceptance
+
+In order to receive this license, you must agree to its
+rules.  The rules of this license are both obligations
+under that agreement and conditions to your license.
+You must not do anything with this software that triggers
+a rule that you cannot or will not follow.
+
+## Copyright
+
+Each contributor licenses you to do everything with this
+software that would otherwise infringe that contributor's
+copyright in it.
+
+## Notices
+
+You must ensure that everyone who gets a copy of
+any part of this software from you, with or without
+changes, also gets the text of this license or a link to
+.
+
+## Excuse
+
+If anyone notifies you in writing that you have not
+complied with [Notices](#notices), you can keep your
+license by taking all practical steps to comply within 30
+days after the notice.  If you do not do so, your license
+ends immediately.
+
+## Patent
+
+Each contributor licenses you to do everything with this
+software that would otherwise infringe any patent claims
+they can license or become able to license.
+
+## Reliability
+
+No contributor can revoke this license.
+
+## No Liability
+
+***As far as the law allows, this software comes as is,
+without any warranty or condition, and no contributor
+will be liable to anyone for any damages related to this
+software or this license, under any kind of legal claim.***
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/path-scurry/dist/commonjs/index.js b/node_modules/@npmcli/map-workspaces/node_modules/path-scurry/dist/commonjs/index.js
new file mode 100644
index 0000000000000..af3e7595f577f
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/path-scurry/dist/commonjs/index.js
@@ -0,0 +1,2016 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.PathScurry = exports.Path = exports.PathScurryDarwin = exports.PathScurryPosix = exports.PathScurryWin32 = exports.PathScurryBase = exports.PathPosix = exports.PathWin32 = exports.PathBase = exports.ChildrenCache = exports.ResolveCache = void 0;
+const lru_cache_1 = require("lru-cache");
+const node_path_1 = require("node:path");
+const node_url_1 = require("node:url");
+const fs_1 = require("fs");
+const actualFS = __importStar(require("node:fs"));
+const realpathSync = fs_1.realpathSync.native;
+// TODO: test perf of fs/promises realpath vs realpathCB,
+// since the promises one uses realpath.native
+const promises_1 = require("node:fs/promises");
+const minipass_1 = require("minipass");
+const defaultFS = {
+    lstatSync: fs_1.lstatSync,
+    readdir: fs_1.readdir,
+    readdirSync: fs_1.readdirSync,
+    readlinkSync: fs_1.readlinkSync,
+    realpathSync,
+    promises: {
+        lstat: promises_1.lstat,
+        readdir: promises_1.readdir,
+        readlink: promises_1.readlink,
+        realpath: promises_1.realpath,
+    },
+};
+// if they just gave us require('fs') then use our default
+const fsFromOption = (fsOption) => !fsOption || fsOption === defaultFS || fsOption === actualFS ?
+    defaultFS
+    : {
+        ...defaultFS,
+        ...fsOption,
+        promises: {
+            ...defaultFS.promises,
+            ...(fsOption.promises || {}),
+        },
+    };
+// turn something like //?/c:/ into c:\
+const uncDriveRegexp = /^\\\\\?\\([a-z]:)\\?$/i;
+const uncToDrive = (rootPath) => rootPath.replace(/\//g, '\\').replace(uncDriveRegexp, '$1\\');
+// windows paths are separated by either / or \
+const eitherSep = /[\\\/]/;
+const UNKNOWN = 0; // may not even exist, for all we know
+const IFIFO = 0b0001;
+const IFCHR = 0b0010;
+const IFDIR = 0b0100;
+const IFBLK = 0b0110;
+const IFREG = 0b1000;
+const IFLNK = 0b1010;
+const IFSOCK = 0b1100;
+const IFMT = 0b1111;
+// mask to unset low 4 bits
+const IFMT_UNKNOWN = ~IFMT;
+// set after successfully calling readdir() and getting entries.
+const READDIR_CALLED = 0b0000_0001_0000;
+// set after a successful lstat()
+const LSTAT_CALLED = 0b0000_0010_0000;
+// set if an entry (or one of its parents) is definitely not a dir
+const ENOTDIR = 0b0000_0100_0000;
+// set if an entry (or one of its parents) does not exist
+// (can also be set on lstat errors like EACCES or ENAMETOOLONG)
+const ENOENT = 0b0000_1000_0000;
+// cannot have child entries -- also verify &IFMT is either IFDIR or IFLNK
+// set if we fail to readlink
+const ENOREADLINK = 0b0001_0000_0000;
+// set if we know realpath() will fail
+const ENOREALPATH = 0b0010_0000_0000;
+const ENOCHILD = ENOTDIR | ENOENT | ENOREALPATH;
+const TYPEMASK = 0b0011_1111_1111;
+const entToType = (s) => s.isFile() ? IFREG
+    : s.isDirectory() ? IFDIR
+        : s.isSymbolicLink() ? IFLNK
+            : s.isCharacterDevice() ? IFCHR
+                : s.isBlockDevice() ? IFBLK
+                    : s.isSocket() ? IFSOCK
+                        : s.isFIFO() ? IFIFO
+                            : UNKNOWN;
+// normalize unicode path names
+const normalizeCache = new Map();
+const normalize = (s) => {
+    const c = normalizeCache.get(s);
+    if (c)
+        return c;
+    const n = s.normalize('NFKD');
+    normalizeCache.set(s, n);
+    return n;
+};
+const normalizeNocaseCache = new Map();
+const normalizeNocase = (s) => {
+    const c = normalizeNocaseCache.get(s);
+    if (c)
+        return c;
+    const n = normalize(s.toLowerCase());
+    normalizeNocaseCache.set(s, n);
+    return n;
+};
+/**
+ * An LRUCache for storing resolved path strings or Path objects.
+ * @internal
+ */
+class ResolveCache extends lru_cache_1.LRUCache {
+    constructor() {
+        super({ max: 256 });
+    }
+}
+exports.ResolveCache = ResolveCache;
+// In order to prevent blowing out the js heap by allocating hundreds of
+// thousands of Path entries when walking extremely large trees, the "children"
+// in this tree are represented by storing an array of Path entries in an
+// LRUCache, indexed by the parent.  At any time, Path.children() may return an
+// empty array, indicating that it doesn't know about any of its children, and
+// thus has to rebuild that cache.  This is fine, it just means that we don't
+// benefit as much from having the cached entries, but huge directory walks
+// don't blow out the stack, and smaller ones are still as fast as possible.
+//
+//It does impose some complexity when building up the readdir data, because we
+//need to pass a reference to the children array that we started with.
+/**
+ * an LRUCache for storing child entries.
+ * @internal
+ */
+class ChildrenCache extends lru_cache_1.LRUCache {
+    constructor(maxSize = 16 * 1024) {
+        super({
+            maxSize,
+            // parent + children
+            sizeCalculation: a => a.length + 1,
+        });
+    }
+}
+exports.ChildrenCache = ChildrenCache;
+const setAsCwd = Symbol('PathScurry setAsCwd');
+/**
+ * Path objects are sort of like a super-powered
+ * {@link https://nodejs.org/docs/latest/api/fs.html#class-fsdirent fs.Dirent}
+ *
+ * Each one represents a single filesystem entry on disk, which may or may not
+ * exist. It includes methods for reading various types of information via
+ * lstat, readlink, and readdir, and caches all information to the greatest
+ * degree possible.
+ *
+ * Note that fs operations that would normally throw will instead return an
+ * "empty" value. This is in order to prevent excessive overhead from error
+ * stack traces.
+ */
+class PathBase {
+    /**
+     * the basename of this path
+     *
+     * **Important**: *always* test the path name against any test string
+     * usingthe {@link isNamed} method, and not by directly comparing this
+     * string. Otherwise, unicode path strings that the system sees as identical
+     * will not be properly treated as the same path, leading to incorrect
+     * behavior and possible security issues.
+     */
+    name;
+    /**
+     * the Path entry corresponding to the path root.
+     *
+     * @internal
+     */
+    root;
+    /**
+     * All roots found within the current PathScurry family
+     *
+     * @internal
+     */
+    roots;
+    /**
+     * a reference to the parent path, or undefined in the case of root entries
+     *
+     * @internal
+     */
+    parent;
+    /**
+     * boolean indicating whether paths are compared case-insensitively
+     * @internal
+     */
+    nocase;
+    /**
+     * boolean indicating that this path is the current working directory
+     * of the PathScurry collection that contains it.
+     */
+    isCWD = false;
+    // potential default fs override
+    #fs;
+    // Stats fields
+    #dev;
+    get dev() {
+        return this.#dev;
+    }
+    #mode;
+    get mode() {
+        return this.#mode;
+    }
+    #nlink;
+    get nlink() {
+        return this.#nlink;
+    }
+    #uid;
+    get uid() {
+        return this.#uid;
+    }
+    #gid;
+    get gid() {
+        return this.#gid;
+    }
+    #rdev;
+    get rdev() {
+        return this.#rdev;
+    }
+    #blksize;
+    get blksize() {
+        return this.#blksize;
+    }
+    #ino;
+    get ino() {
+        return this.#ino;
+    }
+    #size;
+    get size() {
+        return this.#size;
+    }
+    #blocks;
+    get blocks() {
+        return this.#blocks;
+    }
+    #atimeMs;
+    get atimeMs() {
+        return this.#atimeMs;
+    }
+    #mtimeMs;
+    get mtimeMs() {
+        return this.#mtimeMs;
+    }
+    #ctimeMs;
+    get ctimeMs() {
+        return this.#ctimeMs;
+    }
+    #birthtimeMs;
+    get birthtimeMs() {
+        return this.#birthtimeMs;
+    }
+    #atime;
+    get atime() {
+        return this.#atime;
+    }
+    #mtime;
+    get mtime() {
+        return this.#mtime;
+    }
+    #ctime;
+    get ctime() {
+        return this.#ctime;
+    }
+    #birthtime;
+    get birthtime() {
+        return this.#birthtime;
+    }
+    #matchName;
+    #depth;
+    #fullpath;
+    #fullpathPosix;
+    #relative;
+    #relativePosix;
+    #type;
+    #children;
+    #linkTarget;
+    #realpath;
+    /**
+     * This property is for compatibility with the Dirent class as of
+     * Node v20, where Dirent['parentPath'] refers to the path of the
+     * directory that was passed to readdir. For root entries, it's the path
+     * to the entry itself.
+     */
+    get parentPath() {
+        return (this.parent || this).fullpath();
+    }
+    /**
+     * Deprecated alias for Dirent['parentPath'] Somewhat counterintuitively,
+     * this property refers to the *parent* path, not the path object itself.
+     *
+     * @deprecated
+     */
+    get path() {
+        return this.parentPath;
+    }
+    /**
+     * Do not create new Path objects directly.  They should always be accessed
+     * via the PathScurry class or other methods on the Path class.
+     *
+     * @internal
+     */
+    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
+        this.name = name;
+        this.#matchName = nocase ? normalizeNocase(name) : normalize(name);
+        this.#type = type & TYPEMASK;
+        this.nocase = nocase;
+        this.roots = roots;
+        this.root = root || this;
+        this.#children = children;
+        this.#fullpath = opts.fullpath;
+        this.#relative = opts.relative;
+        this.#relativePosix = opts.relativePosix;
+        this.parent = opts.parent;
+        if (this.parent) {
+            this.#fs = this.parent.#fs;
+        }
+        else {
+            this.#fs = fsFromOption(opts.fs);
+        }
+    }
+    /**
+     * Returns the depth of the Path object from its root.
+     *
+     * For example, a path at `/foo/bar` would have a depth of 2.
+     */
+    depth() {
+        if (this.#depth !== undefined)
+            return this.#depth;
+        if (!this.parent)
+            return (this.#depth = 0);
+        return (this.#depth = this.parent.depth() + 1);
+    }
+    /**
+     * @internal
+     */
+    childrenCache() {
+        return this.#children;
+    }
+    /**
+     * Get the Path object referenced by the string path, resolved from this Path
+     */
+    resolve(path) {
+        if (!path) {
+            return this;
+        }
+        const rootPath = this.getRootString(path);
+        const dir = path.substring(rootPath.length);
+        const dirParts = dir.split(this.splitSep);
+        const result = rootPath ?
+            this.getRoot(rootPath).#resolveParts(dirParts)
+            : this.#resolveParts(dirParts);
+        return result;
+    }
+    #resolveParts(dirParts) {
+        let p = this;
+        for (const part of dirParts) {
+            p = p.child(part);
+        }
+        return p;
+    }
+    /**
+     * Returns the cached children Path objects, if still available.  If they
+     * have fallen out of the cache, then returns an empty array, and resets the
+     * READDIR_CALLED bit, so that future calls to readdir() will require an fs
+     * lookup.
+     *
+     * @internal
+     */
+    children() {
+        const cached = this.#children.get(this);
+        if (cached) {
+            return cached;
+        }
+        const children = Object.assign([], { provisional: 0 });
+        this.#children.set(this, children);
+        this.#type &= ~READDIR_CALLED;
+        return children;
+    }
+    /**
+     * Resolves a path portion and returns or creates the child Path.
+     *
+     * Returns `this` if pathPart is `''` or `'.'`, or `parent` if pathPart is
+     * `'..'`.
+     *
+     * This should not be called directly.  If `pathPart` contains any path
+     * separators, it will lead to unsafe undefined behavior.
+     *
+     * Use `Path.resolve()` instead.
+     *
+     * @internal
+     */
+    child(pathPart, opts) {
+        if (pathPart === '' || pathPart === '.') {
+            return this;
+        }
+        if (pathPart === '..') {
+            return this.parent || this;
+        }
+        // find the child
+        const children = this.children();
+        const name = this.nocase ? normalizeNocase(pathPart) : normalize(pathPart);
+        for (const p of children) {
+            if (p.#matchName === name) {
+                return p;
+            }
+        }
+        // didn't find it, create provisional child, since it might not
+        // actually exist.  If we know the parent isn't a dir, then
+        // in fact it CAN'T exist.
+        const s = this.parent ? this.sep : '';
+        const fullpath = this.#fullpath ? this.#fullpath + s + pathPart : undefined;
+        const pchild = this.newChild(pathPart, UNKNOWN, {
+            ...opts,
+            parent: this,
+            fullpath,
+        });
+        if (!this.canReaddir()) {
+            pchild.#type |= ENOENT;
+        }
+        // don't have to update provisional, because if we have real children,
+        // then provisional is set to children.length, otherwise a lower number
+        children.push(pchild);
+        return pchild;
+    }
+    /**
+     * The relative path from the cwd. If it does not share an ancestor with
+     * the cwd, then this ends up being equivalent to the fullpath()
+     */
+    relative() {
+        if (this.isCWD)
+            return '';
+        if (this.#relative !== undefined) {
+            return this.#relative;
+        }
+        const name = this.name;
+        const p = this.parent;
+        if (!p) {
+            return (this.#relative = this.name);
+        }
+        const pv = p.relative();
+        return pv + (!pv || !p.parent ? '' : this.sep) + name;
+    }
+    /**
+     * The relative path from the cwd, using / as the path separator.
+     * If it does not share an ancestor with
+     * the cwd, then this ends up being equivalent to the fullpathPosix()
+     * On posix systems, this is identical to relative().
+     */
+    relativePosix() {
+        if (this.sep === '/')
+            return this.relative();
+        if (this.isCWD)
+            return '';
+        if (this.#relativePosix !== undefined)
+            return this.#relativePosix;
+        const name = this.name;
+        const p = this.parent;
+        if (!p) {
+            return (this.#relativePosix = this.fullpathPosix());
+        }
+        const pv = p.relativePosix();
+        return pv + (!pv || !p.parent ? '' : '/') + name;
+    }
+    /**
+     * The fully resolved path string for this Path entry
+     */
+    fullpath() {
+        if (this.#fullpath !== undefined) {
+            return this.#fullpath;
+        }
+        const name = this.name;
+        const p = this.parent;
+        if (!p) {
+            return (this.#fullpath = this.name);
+        }
+        const pv = p.fullpath();
+        const fp = pv + (!p.parent ? '' : this.sep) + name;
+        return (this.#fullpath = fp);
+    }
+    /**
+     * On platforms other than windows, this is identical to fullpath.
+     *
+     * On windows, this is overridden to return the forward-slash form of the
+     * full UNC path.
+     */
+    fullpathPosix() {
+        if (this.#fullpathPosix !== undefined)
+            return this.#fullpathPosix;
+        if (this.sep === '/')
+            return (this.#fullpathPosix = this.fullpath());
+        if (!this.parent) {
+            const p = this.fullpath().replace(/\\/g, '/');
+            if (/^[a-z]:\//i.test(p)) {
+                return (this.#fullpathPosix = `//?/${p}`);
+            }
+            else {
+                return (this.#fullpathPosix = p);
+            }
+        }
+        const p = this.parent;
+        const pfpp = p.fullpathPosix();
+        const fpp = pfpp + (!pfpp || !p.parent ? '' : '/') + this.name;
+        return (this.#fullpathPosix = fpp);
+    }
+    /**
+     * Is the Path of an unknown type?
+     *
+     * Note that we might know *something* about it if there has been a previous
+     * filesystem operation, for example that it does not exist, or is not a
+     * link, or whether it has child entries.
+     */
+    isUnknown() {
+        return (this.#type & IFMT) === UNKNOWN;
+    }
+    isType(type) {
+        return this[`is${type}`]();
+    }
+    getType() {
+        return (this.isUnknown() ? 'Unknown'
+            : this.isDirectory() ? 'Directory'
+                : this.isFile() ? 'File'
+                    : this.isSymbolicLink() ? 'SymbolicLink'
+                        : this.isFIFO() ? 'FIFO'
+                            : this.isCharacterDevice() ? 'CharacterDevice'
+                                : this.isBlockDevice() ? 'BlockDevice'
+                                    : /* c8 ignore start */ this.isSocket() ? 'Socket'
+                                        : 'Unknown');
+        /* c8 ignore stop */
+    }
+    /**
+     * Is the Path a regular file?
+     */
+    isFile() {
+        return (this.#type & IFMT) === IFREG;
+    }
+    /**
+     * Is the Path a directory?
+     */
+    isDirectory() {
+        return (this.#type & IFMT) === IFDIR;
+    }
+    /**
+     * Is the path a character device?
+     */
+    isCharacterDevice() {
+        return (this.#type & IFMT) === IFCHR;
+    }
+    /**
+     * Is the path a block device?
+     */
+    isBlockDevice() {
+        return (this.#type & IFMT) === IFBLK;
+    }
+    /**
+     * Is the path a FIFO pipe?
+     */
+    isFIFO() {
+        return (this.#type & IFMT) === IFIFO;
+    }
+    /**
+     * Is the path a socket?
+     */
+    isSocket() {
+        return (this.#type & IFMT) === IFSOCK;
+    }
+    /**
+     * Is the path a symbolic link?
+     */
+    isSymbolicLink() {
+        return (this.#type & IFLNK) === IFLNK;
+    }
+    /**
+     * Return the entry if it has been subject of a successful lstat, or
+     * undefined otherwise.
+     *
+     * Does not read the filesystem, so an undefined result *could* simply
+     * mean that we haven't called lstat on it.
+     */
+    lstatCached() {
+        return this.#type & LSTAT_CALLED ? this : undefined;
+    }
+    /**
+     * Return the cached link target if the entry has been the subject of a
+     * successful readlink, or undefined otherwise.
+     *
+     * Does not read the filesystem, so an undefined result *could* just mean we
+     * don't have any cached data. Only use it if you are very sure that a
+     * readlink() has been called at some point.
+     */
+    readlinkCached() {
+        return this.#linkTarget;
+    }
+    /**
+     * Returns the cached realpath target if the entry has been the subject
+     * of a successful realpath, or undefined otherwise.
+     *
+     * Does not read the filesystem, so an undefined result *could* just mean we
+     * don't have any cached data. Only use it if you are very sure that a
+     * realpath() has been called at some point.
+     */
+    realpathCached() {
+        return this.#realpath;
+    }
+    /**
+     * Returns the cached child Path entries array if the entry has been the
+     * subject of a successful readdir(), or [] otherwise.
+     *
+     * Does not read the filesystem, so an empty array *could* just mean we
+     * don't have any cached data. Only use it if you are very sure that a
+     * readdir() has been called recently enough to still be valid.
+     */
+    readdirCached() {
+        const children = this.children();
+        return children.slice(0, children.provisional);
+    }
+    /**
+     * Return true if it's worth trying to readlink.  Ie, we don't (yet) have
+     * any indication that readlink will definitely fail.
+     *
+     * Returns false if the path is known to not be a symlink, if a previous
+     * readlink failed, or if the entry does not exist.
+     */
+    canReadlink() {
+        if (this.#linkTarget)
+            return true;
+        if (!this.parent)
+            return false;
+        // cases where it cannot possibly succeed
+        const ifmt = this.#type & IFMT;
+        return !((ifmt !== UNKNOWN && ifmt !== IFLNK) ||
+            this.#type & ENOREADLINK ||
+            this.#type & ENOENT);
+    }
+    /**
+     * Return true if readdir has previously been successfully called on this
+     * path, indicating that cachedReaddir() is likely valid.
+     */
+    calledReaddir() {
+        return !!(this.#type & READDIR_CALLED);
+    }
+    /**
+     * Returns true if the path is known to not exist. That is, a previous lstat
+     * or readdir failed to verify its existence when that would have been
+     * expected, or a parent entry was marked either enoent or enotdir.
+     */
+    isENOENT() {
+        return !!(this.#type & ENOENT);
+    }
+    /**
+     * Return true if the path is a match for the given path name.  This handles
+     * case sensitivity and unicode normalization.
+     *
+     * Note: even on case-sensitive systems, it is **not** safe to test the
+     * equality of the `.name` property to determine whether a given pathname
+     * matches, due to unicode normalization mismatches.
+     *
+     * Always use this method instead of testing the `path.name` property
+     * directly.
+     */
+    isNamed(n) {
+        return !this.nocase ?
+            this.#matchName === normalize(n)
+            : this.#matchName === normalizeNocase(n);
+    }
+    /**
+     * Return the Path object corresponding to the target of a symbolic link.
+     *
+     * If the Path is not a symbolic link, or if the readlink call fails for any
+     * reason, `undefined` is returned.
+     *
+     * Result is cached, and thus may be outdated if the filesystem is mutated.
+     */
+    async readlink() {
+        const target = this.#linkTarget;
+        if (target) {
+            return target;
+        }
+        if (!this.canReadlink()) {
+            return undefined;
+        }
+        /* c8 ignore start */
+        // already covered by the canReadlink test, here for ts grumples
+        if (!this.parent) {
+            return undefined;
+        }
+        /* c8 ignore stop */
+        try {
+            const read = await this.#fs.promises.readlink(this.fullpath());
+            const linkTarget = (await this.parent.realpath())?.resolve(read);
+            if (linkTarget) {
+                return (this.#linkTarget = linkTarget);
+            }
+        }
+        catch (er) {
+            this.#readlinkFail(er.code);
+            return undefined;
+        }
+    }
+    /**
+     * Synchronous {@link PathBase.readlink}
+     */
+    readlinkSync() {
+        const target = this.#linkTarget;
+        if (target) {
+            return target;
+        }
+        if (!this.canReadlink()) {
+            return undefined;
+        }
+        /* c8 ignore start */
+        // already covered by the canReadlink test, here for ts grumples
+        if (!this.parent) {
+            return undefined;
+        }
+        /* c8 ignore stop */
+        try {
+            const read = this.#fs.readlinkSync(this.fullpath());
+            const linkTarget = this.parent.realpathSync()?.resolve(read);
+            if (linkTarget) {
+                return (this.#linkTarget = linkTarget);
+            }
+        }
+        catch (er) {
+            this.#readlinkFail(er.code);
+            return undefined;
+        }
+    }
+    #readdirSuccess(children) {
+        // succeeded, mark readdir called bit
+        this.#type |= READDIR_CALLED;
+        // mark all remaining provisional children as ENOENT
+        for (let p = children.provisional; p < children.length; p++) {
+            const c = children[p];
+            if (c)
+                c.#markENOENT();
+        }
+    }
+    #markENOENT() {
+        // mark as UNKNOWN and ENOENT
+        if (this.#type & ENOENT)
+            return;
+        this.#type = (this.#type | ENOENT) & IFMT_UNKNOWN;
+        this.#markChildrenENOENT();
+    }
+    #markChildrenENOENT() {
+        // all children are provisional and do not exist
+        const children = this.children();
+        children.provisional = 0;
+        for (const p of children) {
+            p.#markENOENT();
+        }
+    }
+    #markENOREALPATH() {
+        this.#type |= ENOREALPATH;
+        this.#markENOTDIR();
+    }
+    // save the information when we know the entry is not a dir
+    #markENOTDIR() {
+        // entry is not a directory, so any children can't exist.
+        // this *should* be impossible, since any children created
+        // after it's been marked ENOTDIR should be marked ENOENT,
+        // so it won't even get to this point.
+        /* c8 ignore start */
+        if (this.#type & ENOTDIR)
+            return;
+        /* c8 ignore stop */
+        let t = this.#type;
+        // this could happen if we stat a dir, then delete it,
+        // then try to read it or one of its children.
+        if ((t & IFMT) === IFDIR)
+            t &= IFMT_UNKNOWN;
+        this.#type = t | ENOTDIR;
+        this.#markChildrenENOENT();
+    }
+    #readdirFail(code = '') {
+        // markENOTDIR and markENOENT also set provisional=0
+        if (code === 'ENOTDIR' || code === 'EPERM') {
+            this.#markENOTDIR();
+        }
+        else if (code === 'ENOENT') {
+            this.#markENOENT();
+        }
+        else {
+            this.children().provisional = 0;
+        }
+    }
+    #lstatFail(code = '') {
+        // Windows just raises ENOENT in this case, disable for win CI
+        /* c8 ignore start */
+        if (code === 'ENOTDIR') {
+            // already know it has a parent by this point
+            const p = this.parent;
+            p.#markENOTDIR();
+        }
+        else if (code === 'ENOENT') {
+            /* c8 ignore stop */
+            this.#markENOENT();
+        }
+    }
+    #readlinkFail(code = '') {
+        let ter = this.#type;
+        ter |= ENOREADLINK;
+        if (code === 'ENOENT')
+            ter |= ENOENT;
+        // windows gets a weird error when you try to readlink a file
+        if (code === 'EINVAL' || code === 'UNKNOWN') {
+            // exists, but not a symlink, we don't know WHAT it is, so remove
+            // all IFMT bits.
+            ter &= IFMT_UNKNOWN;
+        }
+        this.#type = ter;
+        // windows just gets ENOENT in this case.  We do cover the case,
+        // just disabled because it's impossible on Windows CI
+        /* c8 ignore start */
+        if (code === 'ENOTDIR' && this.parent) {
+            this.parent.#markENOTDIR();
+        }
+        /* c8 ignore stop */
+    }
+    #readdirAddChild(e, c) {
+        return (this.#readdirMaybePromoteChild(e, c) ||
+            this.#readdirAddNewChild(e, c));
+    }
+    #readdirAddNewChild(e, c) {
+        // alloc new entry at head, so it's never provisional
+        const type = entToType(e);
+        const child = this.newChild(e.name, type, { parent: this });
+        const ifmt = child.#type & IFMT;
+        if (ifmt !== IFDIR && ifmt !== IFLNK && ifmt !== UNKNOWN) {
+            child.#type |= ENOTDIR;
+        }
+        c.unshift(child);
+        c.provisional++;
+        return child;
+    }
+    #readdirMaybePromoteChild(e, c) {
+        for (let p = c.provisional; p < c.length; p++) {
+            const pchild = c[p];
+            const name = this.nocase ? normalizeNocase(e.name) : normalize(e.name);
+            if (name !== pchild.#matchName) {
+                continue;
+            }
+            return this.#readdirPromoteChild(e, pchild, p, c);
+        }
+    }
+    #readdirPromoteChild(e, p, index, c) {
+        const v = p.name;
+        // retain any other flags, but set ifmt from dirent
+        p.#type = (p.#type & IFMT_UNKNOWN) | entToType(e);
+        // case sensitivity fixing when we learn the true name.
+        if (v !== e.name)
+            p.name = e.name;
+        // just advance provisional index (potentially off the list),
+        // otherwise we have to splice/pop it out and re-insert at head
+        if (index !== c.provisional) {
+            if (index === c.length - 1)
+                c.pop();
+            else
+                c.splice(index, 1);
+            c.unshift(p);
+        }
+        c.provisional++;
+        return p;
+    }
+    /**
+     * Call lstat() on this Path, and update all known information that can be
+     * determined.
+     *
+     * Note that unlike `fs.lstat()`, the returned value does not contain some
+     * information, such as `mode`, `dev`, `nlink`, and `ino`.  If that
+     * information is required, you will need to call `fs.lstat` yourself.
+     *
+     * If the Path refers to a nonexistent file, or if the lstat call fails for
+     * any reason, `undefined` is returned.  Otherwise the updated Path object is
+     * returned.
+     *
+     * Results are cached, and thus may be out of date if the filesystem is
+     * mutated.
+     */
+    async lstat() {
+        if ((this.#type & ENOENT) === 0) {
+            try {
+                this.#applyStat(await this.#fs.promises.lstat(this.fullpath()));
+                return this;
+            }
+            catch (er) {
+                this.#lstatFail(er.code);
+            }
+        }
+    }
+    /**
+     * synchronous {@link PathBase.lstat}
+     */
+    lstatSync() {
+        if ((this.#type & ENOENT) === 0) {
+            try {
+                this.#applyStat(this.#fs.lstatSync(this.fullpath()));
+                return this;
+            }
+            catch (er) {
+                this.#lstatFail(er.code);
+            }
+        }
+    }
+    #applyStat(st) {
+        const { atime, atimeMs, birthtime, birthtimeMs, blksize, blocks, ctime, ctimeMs, dev, gid, ino, mode, mtime, mtimeMs, nlink, rdev, size, uid, } = st;
+        this.#atime = atime;
+        this.#atimeMs = atimeMs;
+        this.#birthtime = birthtime;
+        this.#birthtimeMs = birthtimeMs;
+        this.#blksize = blksize;
+        this.#blocks = blocks;
+        this.#ctime = ctime;
+        this.#ctimeMs = ctimeMs;
+        this.#dev = dev;
+        this.#gid = gid;
+        this.#ino = ino;
+        this.#mode = mode;
+        this.#mtime = mtime;
+        this.#mtimeMs = mtimeMs;
+        this.#nlink = nlink;
+        this.#rdev = rdev;
+        this.#size = size;
+        this.#uid = uid;
+        const ifmt = entToType(st);
+        // retain any other flags, but set the ifmt
+        this.#type = (this.#type & IFMT_UNKNOWN) | ifmt | LSTAT_CALLED;
+        if (ifmt !== UNKNOWN && ifmt !== IFDIR && ifmt !== IFLNK) {
+            this.#type |= ENOTDIR;
+        }
+    }
+    #onReaddirCB = [];
+    #readdirCBInFlight = false;
+    #callOnReaddirCB(children) {
+        this.#readdirCBInFlight = false;
+        const cbs = this.#onReaddirCB.slice();
+        this.#onReaddirCB.length = 0;
+        cbs.forEach(cb => cb(null, children));
+    }
+    /**
+     * Standard node-style callback interface to get list of directory entries.
+     *
+     * If the Path cannot or does not contain any children, then an empty array
+     * is returned.
+     *
+     * Results are cached, and thus may be out of date if the filesystem is
+     * mutated.
+     *
+     * @param cb The callback called with (er, entries).  Note that the `er`
+     * param is somewhat extraneous, as all readdir() errors are handled and
+     * simply result in an empty set of entries being returned.
+     * @param allowZalgo Boolean indicating that immediately known results should
+     * *not* be deferred with `queueMicrotask`. Defaults to `false`. Release
+     * zalgo at your peril, the dark pony lord is devious and unforgiving.
+     */
+    readdirCB(cb, allowZalgo = false) {
+        if (!this.canReaddir()) {
+            if (allowZalgo)
+                cb(null, []);
+            else
+                queueMicrotask(() => cb(null, []));
+            return;
+        }
+        const children = this.children();
+        if (this.calledReaddir()) {
+            const c = children.slice(0, children.provisional);
+            if (allowZalgo)
+                cb(null, c);
+            else
+                queueMicrotask(() => cb(null, c));
+            return;
+        }
+        // don't have to worry about zalgo at this point.
+        this.#onReaddirCB.push(cb);
+        if (this.#readdirCBInFlight) {
+            return;
+        }
+        this.#readdirCBInFlight = true;
+        // else read the directory, fill up children
+        // de-provisionalize any provisional children.
+        const fullpath = this.fullpath();
+        this.#fs.readdir(fullpath, { withFileTypes: true }, (er, entries) => {
+            if (er) {
+                this.#readdirFail(er.code);
+                children.provisional = 0;
+            }
+            else {
+                // if we didn't get an error, we always get entries.
+                //@ts-ignore
+                for (const e of entries) {
+                    this.#readdirAddChild(e, children);
+                }
+                this.#readdirSuccess(children);
+            }
+            this.#callOnReaddirCB(children.slice(0, children.provisional));
+            return;
+        });
+    }
+    #asyncReaddirInFlight;
+    /**
+     * Return an array of known child entries.
+     *
+     * If the Path cannot or does not contain any children, then an empty array
+     * is returned.
+     *
+     * Results are cached, and thus may be out of date if the filesystem is
+     * mutated.
+     */
+    async readdir() {
+        if (!this.canReaddir()) {
+            return [];
+        }
+        const children = this.children();
+        if (this.calledReaddir()) {
+            return children.slice(0, children.provisional);
+        }
+        // else read the directory, fill up children
+        // de-provisionalize any provisional children.
+        const fullpath = this.fullpath();
+        if (this.#asyncReaddirInFlight) {
+            await this.#asyncReaddirInFlight;
+        }
+        else {
+            /* c8 ignore start */
+            let resolve = () => { };
+            /* c8 ignore stop */
+            this.#asyncReaddirInFlight = new Promise(res => (resolve = res));
+            try {
+                for (const e of await this.#fs.promises.readdir(fullpath, {
+                    withFileTypes: true,
+                })) {
+                    this.#readdirAddChild(e, children);
+                }
+                this.#readdirSuccess(children);
+            }
+            catch (er) {
+                this.#readdirFail(er.code);
+                children.provisional = 0;
+            }
+            this.#asyncReaddirInFlight = undefined;
+            resolve();
+        }
+        return children.slice(0, children.provisional);
+    }
+    /**
+     * synchronous {@link PathBase.readdir}
+     */
+    readdirSync() {
+        if (!this.canReaddir()) {
+            return [];
+        }
+        const children = this.children();
+        if (this.calledReaddir()) {
+            return children.slice(0, children.provisional);
+        }
+        // else read the directory, fill up children
+        // de-provisionalize any provisional children.
+        const fullpath = this.fullpath();
+        try {
+            for (const e of this.#fs.readdirSync(fullpath, {
+                withFileTypes: true,
+            })) {
+                this.#readdirAddChild(e, children);
+            }
+            this.#readdirSuccess(children);
+        }
+        catch (er) {
+            this.#readdirFail(er.code);
+            children.provisional = 0;
+        }
+        return children.slice(0, children.provisional);
+    }
+    canReaddir() {
+        if (this.#type & ENOCHILD)
+            return false;
+        const ifmt = IFMT & this.#type;
+        // we always set ENOTDIR when setting IFMT, so should be impossible
+        /* c8 ignore start */
+        if (!(ifmt === UNKNOWN || ifmt === IFDIR || ifmt === IFLNK)) {
+            return false;
+        }
+        /* c8 ignore stop */
+        return true;
+    }
+    shouldWalk(dirs, walkFilter) {
+        return ((this.#type & IFDIR) === IFDIR &&
+            !(this.#type & ENOCHILD) &&
+            !dirs.has(this) &&
+            (!walkFilter || walkFilter(this)));
+    }
+    /**
+     * Return the Path object corresponding to path as resolved
+     * by realpath(3).
+     *
+     * If the realpath call fails for any reason, `undefined` is returned.
+     *
+     * Result is cached, and thus may be outdated if the filesystem is mutated.
+     * On success, returns a Path object.
+     */
+    async realpath() {
+        if (this.#realpath)
+            return this.#realpath;
+        if ((ENOREALPATH | ENOREADLINK | ENOENT) & this.#type)
+            return undefined;
+        try {
+            const rp = await this.#fs.promises.realpath(this.fullpath());
+            return (this.#realpath = this.resolve(rp));
+        }
+        catch (_) {
+            this.#markENOREALPATH();
+        }
+    }
+    /**
+     * Synchronous {@link realpath}
+     */
+    realpathSync() {
+        if (this.#realpath)
+            return this.#realpath;
+        if ((ENOREALPATH | ENOREADLINK | ENOENT) & this.#type)
+            return undefined;
+        try {
+            const rp = this.#fs.realpathSync(this.fullpath());
+            return (this.#realpath = this.resolve(rp));
+        }
+        catch (_) {
+            this.#markENOREALPATH();
+        }
+    }
+    /**
+     * Internal method to mark this Path object as the scurry cwd,
+     * called by {@link PathScurry#chdir}
+     *
+     * @internal
+     */
+    [setAsCwd](oldCwd) {
+        if (oldCwd === this)
+            return;
+        oldCwd.isCWD = false;
+        this.isCWD = true;
+        const changed = new Set([]);
+        let rp = [];
+        let p = this;
+        while (p && p.parent) {
+            changed.add(p);
+            p.#relative = rp.join(this.sep);
+            p.#relativePosix = rp.join('/');
+            p = p.parent;
+            rp.push('..');
+        }
+        // now un-memoize parents of old cwd
+        p = oldCwd;
+        while (p && p.parent && !changed.has(p)) {
+            p.#relative = undefined;
+            p.#relativePosix = undefined;
+            p = p.parent;
+        }
+    }
+}
+exports.PathBase = PathBase;
+/**
+ * Path class used on win32 systems
+ *
+ * Uses `'\\'` as the path separator for returned paths, either `'\\'` or `'/'`
+ * as the path separator for parsing paths.
+ */
+class PathWin32 extends PathBase {
+    /**
+     * Separator for generating path strings.
+     */
+    sep = '\\';
+    /**
+     * Separator for parsing path strings.
+     */
+    splitSep = eitherSep;
+    /**
+     * Do not create new Path objects directly.  They should always be accessed
+     * via the PathScurry class or other methods on the Path class.
+     *
+     * @internal
+     */
+    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
+        super(name, type, root, roots, nocase, children, opts);
+    }
+    /**
+     * @internal
+     */
+    newChild(name, type = UNKNOWN, opts = {}) {
+        return new PathWin32(name, type, this.root, this.roots, this.nocase, this.childrenCache(), opts);
+    }
+    /**
+     * @internal
+     */
+    getRootString(path) {
+        return node_path_1.win32.parse(path).root;
+    }
+    /**
+     * @internal
+     */
+    getRoot(rootPath) {
+        rootPath = uncToDrive(rootPath.toUpperCase());
+        if (rootPath === this.root.name) {
+            return this.root;
+        }
+        // ok, not that one, check if it matches another we know about
+        for (const [compare, root] of Object.entries(this.roots)) {
+            if (this.sameRoot(rootPath, compare)) {
+                return (this.roots[rootPath] = root);
+            }
+        }
+        // otherwise, have to create a new one.
+        return (this.roots[rootPath] = new PathScurryWin32(rootPath, this).root);
+    }
+    /**
+     * @internal
+     */
+    sameRoot(rootPath, compare = this.root.name) {
+        // windows can (rarely) have case-sensitive filesystem, but
+        // UNC and drive letters are always case-insensitive, and canonically
+        // represented uppercase.
+        rootPath = rootPath
+            .toUpperCase()
+            .replace(/\//g, '\\')
+            .replace(uncDriveRegexp, '$1\\');
+        return rootPath === compare;
+    }
+}
+exports.PathWin32 = PathWin32;
+/**
+ * Path class used on all posix systems.
+ *
+ * Uses `'/'` as the path separator.
+ */
+class PathPosix extends PathBase {
+    /**
+     * separator for parsing path strings
+     */
+    splitSep = '/';
+    /**
+     * separator for generating path strings
+     */
+    sep = '/';
+    /**
+     * Do not create new Path objects directly.  They should always be accessed
+     * via the PathScurry class or other methods on the Path class.
+     *
+     * @internal
+     */
+    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
+        super(name, type, root, roots, nocase, children, opts);
+    }
+    /**
+     * @internal
+     */
+    getRootString(path) {
+        return path.startsWith('/') ? '/' : '';
+    }
+    /**
+     * @internal
+     */
+    getRoot(_rootPath) {
+        return this.root;
+    }
+    /**
+     * @internal
+     */
+    newChild(name, type = UNKNOWN, opts = {}) {
+        return new PathPosix(name, type, this.root, this.roots, this.nocase, this.childrenCache(), opts);
+    }
+}
+exports.PathPosix = PathPosix;
+/**
+ * The base class for all PathScurry classes, providing the interface for path
+ * resolution and filesystem operations.
+ *
+ * Typically, you should *not* instantiate this class directly, but rather one
+ * of the platform-specific classes, or the exported {@link PathScurry} which
+ * defaults to the current platform.
+ */
+class PathScurryBase {
+    /**
+     * The root Path entry for the current working directory of this Scurry
+     */
+    root;
+    /**
+     * The string path for the root of this Scurry's current working directory
+     */
+    rootPath;
+    /**
+     * A collection of all roots encountered, referenced by rootPath
+     */
+    roots;
+    /**
+     * The Path entry corresponding to this PathScurry's current working directory.
+     */
+    cwd;
+    #resolveCache;
+    #resolvePosixCache;
+    #children;
+    /**
+     * Perform path comparisons case-insensitively.
+     *
+     * Defaults true on Darwin and Windows systems, false elsewhere.
+     */
+    nocase;
+    #fs;
+    /**
+     * This class should not be instantiated directly.
+     *
+     * Use PathScurryWin32, PathScurryDarwin, PathScurryPosix, or PathScurry
+     *
+     * @internal
+     */
+    constructor(cwd = process.cwd(), pathImpl, sep, { nocase, childrenCacheSize = 16 * 1024, fs = defaultFS, } = {}) {
+        this.#fs = fsFromOption(fs);
+        if (cwd instanceof URL || cwd.startsWith('file://')) {
+            cwd = (0, node_url_1.fileURLToPath)(cwd);
+        }
+        // resolve and split root, and then add to the store.
+        // this is the only time we call path.resolve()
+        const cwdPath = pathImpl.resolve(cwd);
+        this.roots = Object.create(null);
+        this.rootPath = this.parseRootPath(cwdPath);
+        this.#resolveCache = new ResolveCache();
+        this.#resolvePosixCache = new ResolveCache();
+        this.#children = new ChildrenCache(childrenCacheSize);
+        const split = cwdPath.substring(this.rootPath.length).split(sep);
+        // resolve('/') leaves '', splits to [''], we don't want that.
+        if (split.length === 1 && !split[0]) {
+            split.pop();
+        }
+        /* c8 ignore start */
+        if (nocase === undefined) {
+            throw new TypeError('must provide nocase setting to PathScurryBase ctor');
+        }
+        /* c8 ignore stop */
+        this.nocase = nocase;
+        this.root = this.newRoot(this.#fs);
+        this.roots[this.rootPath] = this.root;
+        let prev = this.root;
+        let len = split.length - 1;
+        const joinSep = pathImpl.sep;
+        let abs = this.rootPath;
+        let sawFirst = false;
+        for (const part of split) {
+            const l = len--;
+            prev = prev.child(part, {
+                relative: new Array(l).fill('..').join(joinSep),
+                relativePosix: new Array(l).fill('..').join('/'),
+                fullpath: (abs += (sawFirst ? '' : joinSep) + part),
+            });
+            sawFirst = true;
+        }
+        this.cwd = prev;
+    }
+    /**
+     * Get the depth of a provided path, string, or the cwd
+     */
+    depth(path = this.cwd) {
+        if (typeof path === 'string') {
+            path = this.cwd.resolve(path);
+        }
+        return path.depth();
+    }
+    /**
+     * Return the cache of child entries.  Exposed so subclasses can create
+     * child Path objects in a platform-specific way.
+     *
+     * @internal
+     */
+    childrenCache() {
+        return this.#children;
+    }
+    /**
+     * Resolve one or more path strings to a resolved string
+     *
+     * Same interface as require('path').resolve.
+     *
+     * Much faster than path.resolve() when called multiple times for the same
+     * path, because the resolved Path objects are cached.  Much slower
+     * otherwise.
+     */
+    resolve(...paths) {
+        // first figure out the minimum number of paths we have to test
+        // we always start at cwd, but any absolutes will bump the start
+        let r = '';
+        for (let i = paths.length - 1; i >= 0; i--) {
+            const p = paths[i];
+            if (!p || p === '.')
+                continue;
+            r = r ? `${p}/${r}` : p;
+            if (this.isAbsolute(p)) {
+                break;
+            }
+        }
+        const cached = this.#resolveCache.get(r);
+        if (cached !== undefined) {
+            return cached;
+        }
+        const result = this.cwd.resolve(r).fullpath();
+        this.#resolveCache.set(r, result);
+        return result;
+    }
+    /**
+     * Resolve one or more path strings to a resolved string, returning
+     * the posix path.  Identical to .resolve() on posix systems, but on
+     * windows will return a forward-slash separated UNC path.
+     *
+     * Same interface as require('path').resolve.
+     *
+     * Much faster than path.resolve() when called multiple times for the same
+     * path, because the resolved Path objects are cached.  Much slower
+     * otherwise.
+     */
+    resolvePosix(...paths) {
+        // first figure out the minimum number of paths we have to test
+        // we always start at cwd, but any absolutes will bump the start
+        let r = '';
+        for (let i = paths.length - 1; i >= 0; i--) {
+            const p = paths[i];
+            if (!p || p === '.')
+                continue;
+            r = r ? `${p}/${r}` : p;
+            if (this.isAbsolute(p)) {
+                break;
+            }
+        }
+        const cached = this.#resolvePosixCache.get(r);
+        if (cached !== undefined) {
+            return cached;
+        }
+        const result = this.cwd.resolve(r).fullpathPosix();
+        this.#resolvePosixCache.set(r, result);
+        return result;
+    }
+    /**
+     * find the relative path from the cwd to the supplied path string or entry
+     */
+    relative(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.relative();
+    }
+    /**
+     * find the relative path from the cwd to the supplied path string or
+     * entry, using / as the path delimiter, even on Windows.
+     */
+    relativePosix(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.relativePosix();
+    }
+    /**
+     * Return the basename for the provided string or Path object
+     */
+    basename(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.name;
+    }
+    /**
+     * Return the dirname for the provided string or Path object
+     */
+    dirname(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return (entry.parent || entry).fullpath();
+    }
+    async readdir(entry = this.cwd, opts = {
+        withFileTypes: true,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes } = opts;
+        if (!entry.canReaddir()) {
+            return [];
+        }
+        else {
+            const p = await entry.readdir();
+            return withFileTypes ? p : p.map(e => e.name);
+        }
+    }
+    readdirSync(entry = this.cwd, opts = {
+        withFileTypes: true,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true } = opts;
+        if (!entry.canReaddir()) {
+            return [];
+        }
+        else if (withFileTypes) {
+            return entry.readdirSync();
+        }
+        else {
+            return entry.readdirSync().map(e => e.name);
+        }
+    }
+    /**
+     * Call lstat() on the string or Path object, and update all known
+     * information that can be determined.
+     *
+     * Note that unlike `fs.lstat()`, the returned value does not contain some
+     * information, such as `mode`, `dev`, `nlink`, and `ino`.  If that
+     * information is required, you will need to call `fs.lstat` yourself.
+     *
+     * If the Path refers to a nonexistent file, or if the lstat call fails for
+     * any reason, `undefined` is returned.  Otherwise the updated Path object is
+     * returned.
+     *
+     * Results are cached, and thus may be out of date if the filesystem is
+     * mutated.
+     */
+    async lstat(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.lstat();
+    }
+    /**
+     * synchronous {@link PathScurryBase.lstat}
+     */
+    lstatSync(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.lstatSync();
+    }
+    async readlink(entry = this.cwd, { withFileTypes } = {
+        withFileTypes: false,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            withFileTypes = entry.withFileTypes;
+            entry = this.cwd;
+        }
+        const e = await entry.readlink();
+        return withFileTypes ? e : e?.fullpath();
+    }
+    readlinkSync(entry = this.cwd, { withFileTypes } = {
+        withFileTypes: false,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            withFileTypes = entry.withFileTypes;
+            entry = this.cwd;
+        }
+        const e = entry.readlinkSync();
+        return withFileTypes ? e : e?.fullpath();
+    }
+    async realpath(entry = this.cwd, { withFileTypes } = {
+        withFileTypes: false,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            withFileTypes = entry.withFileTypes;
+            entry = this.cwd;
+        }
+        const e = await entry.realpath();
+        return withFileTypes ? e : e?.fullpath();
+    }
+    realpathSync(entry = this.cwd, { withFileTypes } = {
+        withFileTypes: false,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            withFileTypes = entry.withFileTypes;
+            entry = this.cwd;
+        }
+        const e = entry.realpathSync();
+        return withFileTypes ? e : e?.fullpath();
+    }
+    async walk(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        const results = [];
+        if (!filter || filter(entry)) {
+            results.push(withFileTypes ? entry : entry.fullpath());
+        }
+        const dirs = new Set();
+        const walk = (dir, cb) => {
+            dirs.add(dir);
+            dir.readdirCB((er, entries) => {
+                /* c8 ignore start */
+                if (er) {
+                    return cb(er);
+                }
+                /* c8 ignore stop */
+                let len = entries.length;
+                if (!len)
+                    return cb();
+                const next = () => {
+                    if (--len === 0) {
+                        cb();
+                    }
+                };
+                for (const e of entries) {
+                    if (!filter || filter(e)) {
+                        results.push(withFileTypes ? e : e.fullpath());
+                    }
+                    if (follow && e.isSymbolicLink()) {
+                        e.realpath()
+                            .then(r => (r?.isUnknown() ? r.lstat() : r))
+                            .then(r => r?.shouldWalk(dirs, walkFilter) ? walk(r, next) : next());
+                    }
+                    else {
+                        if (e.shouldWalk(dirs, walkFilter)) {
+                            walk(e, next);
+                        }
+                        else {
+                            next();
+                        }
+                    }
+                }
+            }, true); // zalgooooooo
+        };
+        const start = entry;
+        return new Promise((res, rej) => {
+            walk(start, er => {
+                /* c8 ignore start */
+                if (er)
+                    return rej(er);
+                /* c8 ignore stop */
+                res(results);
+            });
+        });
+    }
+    walkSync(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        const results = [];
+        if (!filter || filter(entry)) {
+            results.push(withFileTypes ? entry : entry.fullpath());
+        }
+        const dirs = new Set([entry]);
+        for (const dir of dirs) {
+            const entries = dir.readdirSync();
+            for (const e of entries) {
+                if (!filter || filter(e)) {
+                    results.push(withFileTypes ? e : e.fullpath());
+                }
+                let r = e;
+                if (e.isSymbolicLink()) {
+                    if (!(follow && (r = e.realpathSync())))
+                        continue;
+                    if (r.isUnknown())
+                        r.lstatSync();
+                }
+                if (r.shouldWalk(dirs, walkFilter)) {
+                    dirs.add(r);
+                }
+            }
+        }
+        return results;
+    }
+    /**
+     * Support for `for await`
+     *
+     * Alias for {@link PathScurryBase.iterate}
+     *
+     * Note: As of Node 19, this is very slow, compared to other methods of
+     * walking.  Consider using {@link PathScurryBase.stream} if memory overhead
+     * and backpressure are concerns, or {@link PathScurryBase.walk} if not.
+     */
+    [Symbol.asyncIterator]() {
+        return this.iterate();
+    }
+    iterate(entry = this.cwd, options = {}) {
+        // iterating async over the stream is significantly more performant,
+        // especially in the warm-cache scenario, because it buffers up directory
+        // entries in the background instead of waiting for a yield for each one.
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            options = entry;
+            entry = this.cwd;
+        }
+        return this.stream(entry, options)[Symbol.asyncIterator]();
+    }
+    /**
+     * Iterating over a PathScurry performs a synchronous walk.
+     *
+     * Alias for {@link PathScurryBase.iterateSync}
+     */
+    [Symbol.iterator]() {
+        return this.iterateSync();
+    }
+    *iterateSync(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        if (!filter || filter(entry)) {
+            yield withFileTypes ? entry : entry.fullpath();
+        }
+        const dirs = new Set([entry]);
+        for (const dir of dirs) {
+            const entries = dir.readdirSync();
+            for (const e of entries) {
+                if (!filter || filter(e)) {
+                    yield withFileTypes ? e : e.fullpath();
+                }
+                let r = e;
+                if (e.isSymbolicLink()) {
+                    if (!(follow && (r = e.realpathSync())))
+                        continue;
+                    if (r.isUnknown())
+                        r.lstatSync();
+                }
+                if (r.shouldWalk(dirs, walkFilter)) {
+                    dirs.add(r);
+                }
+            }
+        }
+    }
+    stream(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        const results = new minipass_1.Minipass({ objectMode: true });
+        if (!filter || filter(entry)) {
+            results.write(withFileTypes ? entry : entry.fullpath());
+        }
+        const dirs = new Set();
+        const queue = [entry];
+        let processing = 0;
+        const process = () => {
+            let paused = false;
+            while (!paused) {
+                const dir = queue.shift();
+                if (!dir) {
+                    if (processing === 0)
+                        results.end();
+                    return;
+                }
+                processing++;
+                dirs.add(dir);
+                const onReaddir = (er, entries, didRealpaths = false) => {
+                    /* c8 ignore start */
+                    if (er)
+                        return results.emit('error', er);
+                    /* c8 ignore stop */
+                    if (follow && !didRealpaths) {
+                        const promises = [];
+                        for (const e of entries) {
+                            if (e.isSymbolicLink()) {
+                                promises.push(e
+                                    .realpath()
+                                    .then((r) => r?.isUnknown() ? r.lstat() : r));
+                            }
+                        }
+                        if (promises.length) {
+                            Promise.all(promises).then(() => onReaddir(null, entries, true));
+                            return;
+                        }
+                    }
+                    for (const e of entries) {
+                        if (e && (!filter || filter(e))) {
+                            if (!results.write(withFileTypes ? e : e.fullpath())) {
+                                paused = true;
+                            }
+                        }
+                    }
+                    processing--;
+                    for (const e of entries) {
+                        const r = e.realpathCached() || e;
+                        if (r.shouldWalk(dirs, walkFilter)) {
+                            queue.push(r);
+                        }
+                    }
+                    if (paused && !results.flowing) {
+                        results.once('drain', process);
+                    }
+                    else if (!sync) {
+                        process();
+                    }
+                };
+                // zalgo containment
+                let sync = true;
+                dir.readdirCB(onReaddir, true);
+                sync = false;
+            }
+        };
+        process();
+        return results;
+    }
+    streamSync(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        const results = new minipass_1.Minipass({ objectMode: true });
+        const dirs = new Set();
+        if (!filter || filter(entry)) {
+            results.write(withFileTypes ? entry : entry.fullpath());
+        }
+        const queue = [entry];
+        let processing = 0;
+        const process = () => {
+            let paused = false;
+            while (!paused) {
+                const dir = queue.shift();
+                if (!dir) {
+                    if (processing === 0)
+                        results.end();
+                    return;
+                }
+                processing++;
+                dirs.add(dir);
+                const entries = dir.readdirSync();
+                for (const e of entries) {
+                    if (!filter || filter(e)) {
+                        if (!results.write(withFileTypes ? e : e.fullpath())) {
+                            paused = true;
+                        }
+                    }
+                }
+                processing--;
+                for (const e of entries) {
+                    let r = e;
+                    if (e.isSymbolicLink()) {
+                        if (!(follow && (r = e.realpathSync())))
+                            continue;
+                        if (r.isUnknown())
+                            r.lstatSync();
+                    }
+                    if (r.shouldWalk(dirs, walkFilter)) {
+                        queue.push(r);
+                    }
+                }
+            }
+            if (paused && !results.flowing)
+                results.once('drain', process);
+        };
+        process();
+        return results;
+    }
+    chdir(path = this.cwd) {
+        const oldCwd = this.cwd;
+        this.cwd = typeof path === 'string' ? this.cwd.resolve(path) : path;
+        this.cwd[setAsCwd](oldCwd);
+    }
+}
+exports.PathScurryBase = PathScurryBase;
+/**
+ * Windows implementation of {@link PathScurryBase}
+ *
+ * Defaults to case insensitve, uses `'\\'` to generate path strings.  Uses
+ * {@link PathWin32} for Path objects.
+ */
+class PathScurryWin32 extends PathScurryBase {
+    /**
+     * separator for generating path strings
+     */
+    sep = '\\';
+    constructor(cwd = process.cwd(), opts = {}) {
+        const { nocase = true } = opts;
+        super(cwd, node_path_1.win32, '\\', { ...opts, nocase });
+        this.nocase = nocase;
+        for (let p = this.cwd; p; p = p.parent) {
+            p.nocase = this.nocase;
+        }
+    }
+    /**
+     * @internal
+     */
+    parseRootPath(dir) {
+        // if the path starts with a single separator, it's not a UNC, and we'll
+        // just get separator as the root, and driveFromUNC will return \
+        // In that case, mount \ on the root from the cwd.
+        return node_path_1.win32.parse(dir).root.toUpperCase();
+    }
+    /**
+     * @internal
+     */
+    newRoot(fs) {
+        return new PathWin32(this.rootPath, IFDIR, undefined, this.roots, this.nocase, this.childrenCache(), { fs });
+    }
+    /**
+     * Return true if the provided path string is an absolute path
+     */
+    isAbsolute(p) {
+        return (p.startsWith('/') || p.startsWith('\\') || /^[a-z]:(\/|\\)/i.test(p));
+    }
+}
+exports.PathScurryWin32 = PathScurryWin32;
+/**
+ * {@link PathScurryBase} implementation for all posix systems other than Darwin.
+ *
+ * Defaults to case-sensitive matching, uses `'/'` to generate path strings.
+ *
+ * Uses {@link PathPosix} for Path objects.
+ */
+class PathScurryPosix extends PathScurryBase {
+    /**
+     * separator for generating path strings
+     */
+    sep = '/';
+    constructor(cwd = process.cwd(), opts = {}) {
+        const { nocase = false } = opts;
+        super(cwd, node_path_1.posix, '/', { ...opts, nocase });
+        this.nocase = nocase;
+    }
+    /**
+     * @internal
+     */
+    parseRootPath(_dir) {
+        return '/';
+    }
+    /**
+     * @internal
+     */
+    newRoot(fs) {
+        return new PathPosix(this.rootPath, IFDIR, undefined, this.roots, this.nocase, this.childrenCache(), { fs });
+    }
+    /**
+     * Return true if the provided path string is an absolute path
+     */
+    isAbsolute(p) {
+        return p.startsWith('/');
+    }
+}
+exports.PathScurryPosix = PathScurryPosix;
+/**
+ * {@link PathScurryBase} implementation for Darwin (macOS) systems.
+ *
+ * Defaults to case-insensitive matching, uses `'/'` for generating path
+ * strings.
+ *
+ * Uses {@link PathPosix} for Path objects.
+ */
+class PathScurryDarwin extends PathScurryPosix {
+    constructor(cwd = process.cwd(), opts = {}) {
+        const { nocase = true } = opts;
+        super(cwd, { ...opts, nocase });
+    }
+}
+exports.PathScurryDarwin = PathScurryDarwin;
+/**
+ * Default {@link PathBase} implementation for the current platform.
+ *
+ * {@link PathWin32} on Windows systems, {@link PathPosix} on all others.
+ */
+exports.Path = process.platform === 'win32' ? PathWin32 : PathPosix;
+/**
+ * Default {@link PathScurryBase} implementation for the current platform.
+ *
+ * {@link PathScurryWin32} on Windows systems, {@link PathScurryDarwin} on
+ * Darwin (macOS) systems, {@link PathScurryPosix} on all others.
+ */
+exports.PathScurry = process.platform === 'win32' ? PathScurryWin32
+    : process.platform === 'darwin' ? PathScurryDarwin
+        : PathScurryPosix;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/path-scurry/dist/commonjs/package.json b/node_modules/@npmcli/map-workspaces/node_modules/path-scurry/dist/commonjs/package.json
new file mode 100644
index 0000000000000..5bbefffbabee3
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/path-scurry/dist/commonjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/path-scurry/dist/esm/index.js b/node_modules/@npmcli/map-workspaces/node_modules/path-scurry/dist/esm/index.js
new file mode 100644
index 0000000000000..42be74c37ad9d
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/path-scurry/dist/esm/index.js
@@ -0,0 +1,1981 @@
+import { LRUCache } from 'lru-cache';
+import { posix, win32 } from 'node:path';
+import { fileURLToPath } from 'node:url';
+import { lstatSync, readdir as readdirCB, readdirSync, readlinkSync, realpathSync as rps, } from 'fs';
+import * as actualFS from 'node:fs';
+const realpathSync = rps.native;
+// TODO: test perf of fs/promises realpath vs realpathCB,
+// since the promises one uses realpath.native
+import { lstat, readdir, readlink, realpath } from 'node:fs/promises';
+import { Minipass } from 'minipass';
+const defaultFS = {
+    lstatSync,
+    readdir: readdirCB,
+    readdirSync,
+    readlinkSync,
+    realpathSync,
+    promises: {
+        lstat,
+        readdir,
+        readlink,
+        realpath,
+    },
+};
+// if they just gave us require('fs') then use our default
+const fsFromOption = (fsOption) => !fsOption || fsOption === defaultFS || fsOption === actualFS ?
+    defaultFS
+    : {
+        ...defaultFS,
+        ...fsOption,
+        promises: {
+            ...defaultFS.promises,
+            ...(fsOption.promises || {}),
+        },
+    };
+// turn something like //?/c:/ into c:\
+const uncDriveRegexp = /^\\\\\?\\([a-z]:)\\?$/i;
+const uncToDrive = (rootPath) => rootPath.replace(/\//g, '\\').replace(uncDriveRegexp, '$1\\');
+// windows paths are separated by either / or \
+const eitherSep = /[\\\/]/;
+const UNKNOWN = 0; // may not even exist, for all we know
+const IFIFO = 0b0001;
+const IFCHR = 0b0010;
+const IFDIR = 0b0100;
+const IFBLK = 0b0110;
+const IFREG = 0b1000;
+const IFLNK = 0b1010;
+const IFSOCK = 0b1100;
+const IFMT = 0b1111;
+// mask to unset low 4 bits
+const IFMT_UNKNOWN = ~IFMT;
+// set after successfully calling readdir() and getting entries.
+const READDIR_CALLED = 0b0000_0001_0000;
+// set after a successful lstat()
+const LSTAT_CALLED = 0b0000_0010_0000;
+// set if an entry (or one of its parents) is definitely not a dir
+const ENOTDIR = 0b0000_0100_0000;
+// set if an entry (or one of its parents) does not exist
+// (can also be set on lstat errors like EACCES or ENAMETOOLONG)
+const ENOENT = 0b0000_1000_0000;
+// cannot have child entries -- also verify &IFMT is either IFDIR or IFLNK
+// set if we fail to readlink
+const ENOREADLINK = 0b0001_0000_0000;
+// set if we know realpath() will fail
+const ENOREALPATH = 0b0010_0000_0000;
+const ENOCHILD = ENOTDIR | ENOENT | ENOREALPATH;
+const TYPEMASK = 0b0011_1111_1111;
+const entToType = (s) => s.isFile() ? IFREG
+    : s.isDirectory() ? IFDIR
+        : s.isSymbolicLink() ? IFLNK
+            : s.isCharacterDevice() ? IFCHR
+                : s.isBlockDevice() ? IFBLK
+                    : s.isSocket() ? IFSOCK
+                        : s.isFIFO() ? IFIFO
+                            : UNKNOWN;
+// normalize unicode path names
+const normalizeCache = new Map();
+const normalize = (s) => {
+    const c = normalizeCache.get(s);
+    if (c)
+        return c;
+    const n = s.normalize('NFKD');
+    normalizeCache.set(s, n);
+    return n;
+};
+const normalizeNocaseCache = new Map();
+const normalizeNocase = (s) => {
+    const c = normalizeNocaseCache.get(s);
+    if (c)
+        return c;
+    const n = normalize(s.toLowerCase());
+    normalizeNocaseCache.set(s, n);
+    return n;
+};
+/**
+ * An LRUCache for storing resolved path strings or Path objects.
+ * @internal
+ */
+export class ResolveCache extends LRUCache {
+    constructor() {
+        super({ max: 256 });
+    }
+}
+// In order to prevent blowing out the js heap by allocating hundreds of
+// thousands of Path entries when walking extremely large trees, the "children"
+// in this tree are represented by storing an array of Path entries in an
+// LRUCache, indexed by the parent.  At any time, Path.children() may return an
+// empty array, indicating that it doesn't know about any of its children, and
+// thus has to rebuild that cache.  This is fine, it just means that we don't
+// benefit as much from having the cached entries, but huge directory walks
+// don't blow out the stack, and smaller ones are still as fast as possible.
+//
+//It does impose some complexity when building up the readdir data, because we
+//need to pass a reference to the children array that we started with.
+/**
+ * an LRUCache for storing child entries.
+ * @internal
+ */
+export class ChildrenCache extends LRUCache {
+    constructor(maxSize = 16 * 1024) {
+        super({
+            maxSize,
+            // parent + children
+            sizeCalculation: a => a.length + 1,
+        });
+    }
+}
+const setAsCwd = Symbol('PathScurry setAsCwd');
+/**
+ * Path objects are sort of like a super-powered
+ * {@link https://nodejs.org/docs/latest/api/fs.html#class-fsdirent fs.Dirent}
+ *
+ * Each one represents a single filesystem entry on disk, which may or may not
+ * exist. It includes methods for reading various types of information via
+ * lstat, readlink, and readdir, and caches all information to the greatest
+ * degree possible.
+ *
+ * Note that fs operations that would normally throw will instead return an
+ * "empty" value. This is in order to prevent excessive overhead from error
+ * stack traces.
+ */
+export class PathBase {
+    /**
+     * the basename of this path
+     *
+     * **Important**: *always* test the path name against any test string
+     * usingthe {@link isNamed} method, and not by directly comparing this
+     * string. Otherwise, unicode path strings that the system sees as identical
+     * will not be properly treated as the same path, leading to incorrect
+     * behavior and possible security issues.
+     */
+    name;
+    /**
+     * the Path entry corresponding to the path root.
+     *
+     * @internal
+     */
+    root;
+    /**
+     * All roots found within the current PathScurry family
+     *
+     * @internal
+     */
+    roots;
+    /**
+     * a reference to the parent path, or undefined in the case of root entries
+     *
+     * @internal
+     */
+    parent;
+    /**
+     * boolean indicating whether paths are compared case-insensitively
+     * @internal
+     */
+    nocase;
+    /**
+     * boolean indicating that this path is the current working directory
+     * of the PathScurry collection that contains it.
+     */
+    isCWD = false;
+    // potential default fs override
+    #fs;
+    // Stats fields
+    #dev;
+    get dev() {
+        return this.#dev;
+    }
+    #mode;
+    get mode() {
+        return this.#mode;
+    }
+    #nlink;
+    get nlink() {
+        return this.#nlink;
+    }
+    #uid;
+    get uid() {
+        return this.#uid;
+    }
+    #gid;
+    get gid() {
+        return this.#gid;
+    }
+    #rdev;
+    get rdev() {
+        return this.#rdev;
+    }
+    #blksize;
+    get blksize() {
+        return this.#blksize;
+    }
+    #ino;
+    get ino() {
+        return this.#ino;
+    }
+    #size;
+    get size() {
+        return this.#size;
+    }
+    #blocks;
+    get blocks() {
+        return this.#blocks;
+    }
+    #atimeMs;
+    get atimeMs() {
+        return this.#atimeMs;
+    }
+    #mtimeMs;
+    get mtimeMs() {
+        return this.#mtimeMs;
+    }
+    #ctimeMs;
+    get ctimeMs() {
+        return this.#ctimeMs;
+    }
+    #birthtimeMs;
+    get birthtimeMs() {
+        return this.#birthtimeMs;
+    }
+    #atime;
+    get atime() {
+        return this.#atime;
+    }
+    #mtime;
+    get mtime() {
+        return this.#mtime;
+    }
+    #ctime;
+    get ctime() {
+        return this.#ctime;
+    }
+    #birthtime;
+    get birthtime() {
+        return this.#birthtime;
+    }
+    #matchName;
+    #depth;
+    #fullpath;
+    #fullpathPosix;
+    #relative;
+    #relativePosix;
+    #type;
+    #children;
+    #linkTarget;
+    #realpath;
+    /**
+     * This property is for compatibility with the Dirent class as of
+     * Node v20, where Dirent['parentPath'] refers to the path of the
+     * directory that was passed to readdir. For root entries, it's the path
+     * to the entry itself.
+     */
+    get parentPath() {
+        return (this.parent || this).fullpath();
+    }
+    /**
+     * Deprecated alias for Dirent['parentPath'] Somewhat counterintuitively,
+     * this property refers to the *parent* path, not the path object itself.
+     *
+     * @deprecated
+     */
+    get path() {
+        return this.parentPath;
+    }
+    /**
+     * Do not create new Path objects directly.  They should always be accessed
+     * via the PathScurry class or other methods on the Path class.
+     *
+     * @internal
+     */
+    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
+        this.name = name;
+        this.#matchName = nocase ? normalizeNocase(name) : normalize(name);
+        this.#type = type & TYPEMASK;
+        this.nocase = nocase;
+        this.roots = roots;
+        this.root = root || this;
+        this.#children = children;
+        this.#fullpath = opts.fullpath;
+        this.#relative = opts.relative;
+        this.#relativePosix = opts.relativePosix;
+        this.parent = opts.parent;
+        if (this.parent) {
+            this.#fs = this.parent.#fs;
+        }
+        else {
+            this.#fs = fsFromOption(opts.fs);
+        }
+    }
+    /**
+     * Returns the depth of the Path object from its root.
+     *
+     * For example, a path at `/foo/bar` would have a depth of 2.
+     */
+    depth() {
+        if (this.#depth !== undefined)
+            return this.#depth;
+        if (!this.parent)
+            return (this.#depth = 0);
+        return (this.#depth = this.parent.depth() + 1);
+    }
+    /**
+     * @internal
+     */
+    childrenCache() {
+        return this.#children;
+    }
+    /**
+     * Get the Path object referenced by the string path, resolved from this Path
+     */
+    resolve(path) {
+        if (!path) {
+            return this;
+        }
+        const rootPath = this.getRootString(path);
+        const dir = path.substring(rootPath.length);
+        const dirParts = dir.split(this.splitSep);
+        const result = rootPath ?
+            this.getRoot(rootPath).#resolveParts(dirParts)
+            : this.#resolveParts(dirParts);
+        return result;
+    }
+    #resolveParts(dirParts) {
+        let p = this;
+        for (const part of dirParts) {
+            p = p.child(part);
+        }
+        return p;
+    }
+    /**
+     * Returns the cached children Path objects, if still available.  If they
+     * have fallen out of the cache, then returns an empty array, and resets the
+     * READDIR_CALLED bit, so that future calls to readdir() will require an fs
+     * lookup.
+     *
+     * @internal
+     */
+    children() {
+        const cached = this.#children.get(this);
+        if (cached) {
+            return cached;
+        }
+        const children = Object.assign([], { provisional: 0 });
+        this.#children.set(this, children);
+        this.#type &= ~READDIR_CALLED;
+        return children;
+    }
+    /**
+     * Resolves a path portion and returns or creates the child Path.
+     *
+     * Returns `this` if pathPart is `''` or `'.'`, or `parent` if pathPart is
+     * `'..'`.
+     *
+     * This should not be called directly.  If `pathPart` contains any path
+     * separators, it will lead to unsafe undefined behavior.
+     *
+     * Use `Path.resolve()` instead.
+     *
+     * @internal
+     */
+    child(pathPart, opts) {
+        if (pathPart === '' || pathPart === '.') {
+            return this;
+        }
+        if (pathPart === '..') {
+            return this.parent || this;
+        }
+        // find the child
+        const children = this.children();
+        const name = this.nocase ? normalizeNocase(pathPart) : normalize(pathPart);
+        for (const p of children) {
+            if (p.#matchName === name) {
+                return p;
+            }
+        }
+        // didn't find it, create provisional child, since it might not
+        // actually exist.  If we know the parent isn't a dir, then
+        // in fact it CAN'T exist.
+        const s = this.parent ? this.sep : '';
+        const fullpath = this.#fullpath ? this.#fullpath + s + pathPart : undefined;
+        const pchild = this.newChild(pathPart, UNKNOWN, {
+            ...opts,
+            parent: this,
+            fullpath,
+        });
+        if (!this.canReaddir()) {
+            pchild.#type |= ENOENT;
+        }
+        // don't have to update provisional, because if we have real children,
+        // then provisional is set to children.length, otherwise a lower number
+        children.push(pchild);
+        return pchild;
+    }
+    /**
+     * The relative path from the cwd. If it does not share an ancestor with
+     * the cwd, then this ends up being equivalent to the fullpath()
+     */
+    relative() {
+        if (this.isCWD)
+            return '';
+        if (this.#relative !== undefined) {
+            return this.#relative;
+        }
+        const name = this.name;
+        const p = this.parent;
+        if (!p) {
+            return (this.#relative = this.name);
+        }
+        const pv = p.relative();
+        return pv + (!pv || !p.parent ? '' : this.sep) + name;
+    }
+    /**
+     * The relative path from the cwd, using / as the path separator.
+     * If it does not share an ancestor with
+     * the cwd, then this ends up being equivalent to the fullpathPosix()
+     * On posix systems, this is identical to relative().
+     */
+    relativePosix() {
+        if (this.sep === '/')
+            return this.relative();
+        if (this.isCWD)
+            return '';
+        if (this.#relativePosix !== undefined)
+            return this.#relativePosix;
+        const name = this.name;
+        const p = this.parent;
+        if (!p) {
+            return (this.#relativePosix = this.fullpathPosix());
+        }
+        const pv = p.relativePosix();
+        return pv + (!pv || !p.parent ? '' : '/') + name;
+    }
+    /**
+     * The fully resolved path string for this Path entry
+     */
+    fullpath() {
+        if (this.#fullpath !== undefined) {
+            return this.#fullpath;
+        }
+        const name = this.name;
+        const p = this.parent;
+        if (!p) {
+            return (this.#fullpath = this.name);
+        }
+        const pv = p.fullpath();
+        const fp = pv + (!p.parent ? '' : this.sep) + name;
+        return (this.#fullpath = fp);
+    }
+    /**
+     * On platforms other than windows, this is identical to fullpath.
+     *
+     * On windows, this is overridden to return the forward-slash form of the
+     * full UNC path.
+     */
+    fullpathPosix() {
+        if (this.#fullpathPosix !== undefined)
+            return this.#fullpathPosix;
+        if (this.sep === '/')
+            return (this.#fullpathPosix = this.fullpath());
+        if (!this.parent) {
+            const p = this.fullpath().replace(/\\/g, '/');
+            if (/^[a-z]:\//i.test(p)) {
+                return (this.#fullpathPosix = `//?/${p}`);
+            }
+            else {
+                return (this.#fullpathPosix = p);
+            }
+        }
+        const p = this.parent;
+        const pfpp = p.fullpathPosix();
+        const fpp = pfpp + (!pfpp || !p.parent ? '' : '/') + this.name;
+        return (this.#fullpathPosix = fpp);
+    }
+    /**
+     * Is the Path of an unknown type?
+     *
+     * Note that we might know *something* about it if there has been a previous
+     * filesystem operation, for example that it does not exist, or is not a
+     * link, or whether it has child entries.
+     */
+    isUnknown() {
+        return (this.#type & IFMT) === UNKNOWN;
+    }
+    isType(type) {
+        return this[`is${type}`]();
+    }
+    getType() {
+        return (this.isUnknown() ? 'Unknown'
+            : this.isDirectory() ? 'Directory'
+                : this.isFile() ? 'File'
+                    : this.isSymbolicLink() ? 'SymbolicLink'
+                        : this.isFIFO() ? 'FIFO'
+                            : this.isCharacterDevice() ? 'CharacterDevice'
+                                : this.isBlockDevice() ? 'BlockDevice'
+                                    : /* c8 ignore start */ this.isSocket() ? 'Socket'
+                                        : 'Unknown');
+        /* c8 ignore stop */
+    }
+    /**
+     * Is the Path a regular file?
+     */
+    isFile() {
+        return (this.#type & IFMT) === IFREG;
+    }
+    /**
+     * Is the Path a directory?
+     */
+    isDirectory() {
+        return (this.#type & IFMT) === IFDIR;
+    }
+    /**
+     * Is the path a character device?
+     */
+    isCharacterDevice() {
+        return (this.#type & IFMT) === IFCHR;
+    }
+    /**
+     * Is the path a block device?
+     */
+    isBlockDevice() {
+        return (this.#type & IFMT) === IFBLK;
+    }
+    /**
+     * Is the path a FIFO pipe?
+     */
+    isFIFO() {
+        return (this.#type & IFMT) === IFIFO;
+    }
+    /**
+     * Is the path a socket?
+     */
+    isSocket() {
+        return (this.#type & IFMT) === IFSOCK;
+    }
+    /**
+     * Is the path a symbolic link?
+     */
+    isSymbolicLink() {
+        return (this.#type & IFLNK) === IFLNK;
+    }
+    /**
+     * Return the entry if it has been subject of a successful lstat, or
+     * undefined otherwise.
+     *
+     * Does not read the filesystem, so an undefined result *could* simply
+     * mean that we haven't called lstat on it.
+     */
+    lstatCached() {
+        return this.#type & LSTAT_CALLED ? this : undefined;
+    }
+    /**
+     * Return the cached link target if the entry has been the subject of a
+     * successful readlink, or undefined otherwise.
+     *
+     * Does not read the filesystem, so an undefined result *could* just mean we
+     * don't have any cached data. Only use it if you are very sure that a
+     * readlink() has been called at some point.
+     */
+    readlinkCached() {
+        return this.#linkTarget;
+    }
+    /**
+     * Returns the cached realpath target if the entry has been the subject
+     * of a successful realpath, or undefined otherwise.
+     *
+     * Does not read the filesystem, so an undefined result *could* just mean we
+     * don't have any cached data. Only use it if you are very sure that a
+     * realpath() has been called at some point.
+     */
+    realpathCached() {
+        return this.#realpath;
+    }
+    /**
+     * Returns the cached child Path entries array if the entry has been the
+     * subject of a successful readdir(), or [] otherwise.
+     *
+     * Does not read the filesystem, so an empty array *could* just mean we
+     * don't have any cached data. Only use it if you are very sure that a
+     * readdir() has been called recently enough to still be valid.
+     */
+    readdirCached() {
+        const children = this.children();
+        return children.slice(0, children.provisional);
+    }
+    /**
+     * Return true if it's worth trying to readlink.  Ie, we don't (yet) have
+     * any indication that readlink will definitely fail.
+     *
+     * Returns false if the path is known to not be a symlink, if a previous
+     * readlink failed, or if the entry does not exist.
+     */
+    canReadlink() {
+        if (this.#linkTarget)
+            return true;
+        if (!this.parent)
+            return false;
+        // cases where it cannot possibly succeed
+        const ifmt = this.#type & IFMT;
+        return !((ifmt !== UNKNOWN && ifmt !== IFLNK) ||
+            this.#type & ENOREADLINK ||
+            this.#type & ENOENT);
+    }
+    /**
+     * Return true if readdir has previously been successfully called on this
+     * path, indicating that cachedReaddir() is likely valid.
+     */
+    calledReaddir() {
+        return !!(this.#type & READDIR_CALLED);
+    }
+    /**
+     * Returns true if the path is known to not exist. That is, a previous lstat
+     * or readdir failed to verify its existence when that would have been
+     * expected, or a parent entry was marked either enoent or enotdir.
+     */
+    isENOENT() {
+        return !!(this.#type & ENOENT);
+    }
+    /**
+     * Return true if the path is a match for the given path name.  This handles
+     * case sensitivity and unicode normalization.
+     *
+     * Note: even on case-sensitive systems, it is **not** safe to test the
+     * equality of the `.name` property to determine whether a given pathname
+     * matches, due to unicode normalization mismatches.
+     *
+     * Always use this method instead of testing the `path.name` property
+     * directly.
+     */
+    isNamed(n) {
+        return !this.nocase ?
+            this.#matchName === normalize(n)
+            : this.#matchName === normalizeNocase(n);
+    }
+    /**
+     * Return the Path object corresponding to the target of a symbolic link.
+     *
+     * If the Path is not a symbolic link, or if the readlink call fails for any
+     * reason, `undefined` is returned.
+     *
+     * Result is cached, and thus may be outdated if the filesystem is mutated.
+     */
+    async readlink() {
+        const target = this.#linkTarget;
+        if (target) {
+            return target;
+        }
+        if (!this.canReadlink()) {
+            return undefined;
+        }
+        /* c8 ignore start */
+        // already covered by the canReadlink test, here for ts grumples
+        if (!this.parent) {
+            return undefined;
+        }
+        /* c8 ignore stop */
+        try {
+            const read = await this.#fs.promises.readlink(this.fullpath());
+            const linkTarget = (await this.parent.realpath())?.resolve(read);
+            if (linkTarget) {
+                return (this.#linkTarget = linkTarget);
+            }
+        }
+        catch (er) {
+            this.#readlinkFail(er.code);
+            return undefined;
+        }
+    }
+    /**
+     * Synchronous {@link PathBase.readlink}
+     */
+    readlinkSync() {
+        const target = this.#linkTarget;
+        if (target) {
+            return target;
+        }
+        if (!this.canReadlink()) {
+            return undefined;
+        }
+        /* c8 ignore start */
+        // already covered by the canReadlink test, here for ts grumples
+        if (!this.parent) {
+            return undefined;
+        }
+        /* c8 ignore stop */
+        try {
+            const read = this.#fs.readlinkSync(this.fullpath());
+            const linkTarget = this.parent.realpathSync()?.resolve(read);
+            if (linkTarget) {
+                return (this.#linkTarget = linkTarget);
+            }
+        }
+        catch (er) {
+            this.#readlinkFail(er.code);
+            return undefined;
+        }
+    }
+    #readdirSuccess(children) {
+        // succeeded, mark readdir called bit
+        this.#type |= READDIR_CALLED;
+        // mark all remaining provisional children as ENOENT
+        for (let p = children.provisional; p < children.length; p++) {
+            const c = children[p];
+            if (c)
+                c.#markENOENT();
+        }
+    }
+    #markENOENT() {
+        // mark as UNKNOWN and ENOENT
+        if (this.#type & ENOENT)
+            return;
+        this.#type = (this.#type | ENOENT) & IFMT_UNKNOWN;
+        this.#markChildrenENOENT();
+    }
+    #markChildrenENOENT() {
+        // all children are provisional and do not exist
+        const children = this.children();
+        children.provisional = 0;
+        for (const p of children) {
+            p.#markENOENT();
+        }
+    }
+    #markENOREALPATH() {
+        this.#type |= ENOREALPATH;
+        this.#markENOTDIR();
+    }
+    // save the information when we know the entry is not a dir
+    #markENOTDIR() {
+        // entry is not a directory, so any children can't exist.
+        // this *should* be impossible, since any children created
+        // after it's been marked ENOTDIR should be marked ENOENT,
+        // so it won't even get to this point.
+        /* c8 ignore start */
+        if (this.#type & ENOTDIR)
+            return;
+        /* c8 ignore stop */
+        let t = this.#type;
+        // this could happen if we stat a dir, then delete it,
+        // then try to read it or one of its children.
+        if ((t & IFMT) === IFDIR)
+            t &= IFMT_UNKNOWN;
+        this.#type = t | ENOTDIR;
+        this.#markChildrenENOENT();
+    }
+    #readdirFail(code = '') {
+        // markENOTDIR and markENOENT also set provisional=0
+        if (code === 'ENOTDIR' || code === 'EPERM') {
+            this.#markENOTDIR();
+        }
+        else if (code === 'ENOENT') {
+            this.#markENOENT();
+        }
+        else {
+            this.children().provisional = 0;
+        }
+    }
+    #lstatFail(code = '') {
+        // Windows just raises ENOENT in this case, disable for win CI
+        /* c8 ignore start */
+        if (code === 'ENOTDIR') {
+            // already know it has a parent by this point
+            const p = this.parent;
+            p.#markENOTDIR();
+        }
+        else if (code === 'ENOENT') {
+            /* c8 ignore stop */
+            this.#markENOENT();
+        }
+    }
+    #readlinkFail(code = '') {
+        let ter = this.#type;
+        ter |= ENOREADLINK;
+        if (code === 'ENOENT')
+            ter |= ENOENT;
+        // windows gets a weird error when you try to readlink a file
+        if (code === 'EINVAL' || code === 'UNKNOWN') {
+            // exists, but not a symlink, we don't know WHAT it is, so remove
+            // all IFMT bits.
+            ter &= IFMT_UNKNOWN;
+        }
+        this.#type = ter;
+        // windows just gets ENOENT in this case.  We do cover the case,
+        // just disabled because it's impossible on Windows CI
+        /* c8 ignore start */
+        if (code === 'ENOTDIR' && this.parent) {
+            this.parent.#markENOTDIR();
+        }
+        /* c8 ignore stop */
+    }
+    #readdirAddChild(e, c) {
+        return (this.#readdirMaybePromoteChild(e, c) ||
+            this.#readdirAddNewChild(e, c));
+    }
+    #readdirAddNewChild(e, c) {
+        // alloc new entry at head, so it's never provisional
+        const type = entToType(e);
+        const child = this.newChild(e.name, type, { parent: this });
+        const ifmt = child.#type & IFMT;
+        if (ifmt !== IFDIR && ifmt !== IFLNK && ifmt !== UNKNOWN) {
+            child.#type |= ENOTDIR;
+        }
+        c.unshift(child);
+        c.provisional++;
+        return child;
+    }
+    #readdirMaybePromoteChild(e, c) {
+        for (let p = c.provisional; p < c.length; p++) {
+            const pchild = c[p];
+            const name = this.nocase ? normalizeNocase(e.name) : normalize(e.name);
+            if (name !== pchild.#matchName) {
+                continue;
+            }
+            return this.#readdirPromoteChild(e, pchild, p, c);
+        }
+    }
+    #readdirPromoteChild(e, p, index, c) {
+        const v = p.name;
+        // retain any other flags, but set ifmt from dirent
+        p.#type = (p.#type & IFMT_UNKNOWN) | entToType(e);
+        // case sensitivity fixing when we learn the true name.
+        if (v !== e.name)
+            p.name = e.name;
+        // just advance provisional index (potentially off the list),
+        // otherwise we have to splice/pop it out and re-insert at head
+        if (index !== c.provisional) {
+            if (index === c.length - 1)
+                c.pop();
+            else
+                c.splice(index, 1);
+            c.unshift(p);
+        }
+        c.provisional++;
+        return p;
+    }
+    /**
+     * Call lstat() on this Path, and update all known information that can be
+     * determined.
+     *
+     * Note that unlike `fs.lstat()`, the returned value does not contain some
+     * information, such as `mode`, `dev`, `nlink`, and `ino`.  If that
+     * information is required, you will need to call `fs.lstat` yourself.
+     *
+     * If the Path refers to a nonexistent file, or if the lstat call fails for
+     * any reason, `undefined` is returned.  Otherwise the updated Path object is
+     * returned.
+     *
+     * Results are cached, and thus may be out of date if the filesystem is
+     * mutated.
+     */
+    async lstat() {
+        if ((this.#type & ENOENT) === 0) {
+            try {
+                this.#applyStat(await this.#fs.promises.lstat(this.fullpath()));
+                return this;
+            }
+            catch (er) {
+                this.#lstatFail(er.code);
+            }
+        }
+    }
+    /**
+     * synchronous {@link PathBase.lstat}
+     */
+    lstatSync() {
+        if ((this.#type & ENOENT) === 0) {
+            try {
+                this.#applyStat(this.#fs.lstatSync(this.fullpath()));
+                return this;
+            }
+            catch (er) {
+                this.#lstatFail(er.code);
+            }
+        }
+    }
+    #applyStat(st) {
+        const { atime, atimeMs, birthtime, birthtimeMs, blksize, blocks, ctime, ctimeMs, dev, gid, ino, mode, mtime, mtimeMs, nlink, rdev, size, uid, } = st;
+        this.#atime = atime;
+        this.#atimeMs = atimeMs;
+        this.#birthtime = birthtime;
+        this.#birthtimeMs = birthtimeMs;
+        this.#blksize = blksize;
+        this.#blocks = blocks;
+        this.#ctime = ctime;
+        this.#ctimeMs = ctimeMs;
+        this.#dev = dev;
+        this.#gid = gid;
+        this.#ino = ino;
+        this.#mode = mode;
+        this.#mtime = mtime;
+        this.#mtimeMs = mtimeMs;
+        this.#nlink = nlink;
+        this.#rdev = rdev;
+        this.#size = size;
+        this.#uid = uid;
+        const ifmt = entToType(st);
+        // retain any other flags, but set the ifmt
+        this.#type = (this.#type & IFMT_UNKNOWN) | ifmt | LSTAT_CALLED;
+        if (ifmt !== UNKNOWN && ifmt !== IFDIR && ifmt !== IFLNK) {
+            this.#type |= ENOTDIR;
+        }
+    }
+    #onReaddirCB = [];
+    #readdirCBInFlight = false;
+    #callOnReaddirCB(children) {
+        this.#readdirCBInFlight = false;
+        const cbs = this.#onReaddirCB.slice();
+        this.#onReaddirCB.length = 0;
+        cbs.forEach(cb => cb(null, children));
+    }
+    /**
+     * Standard node-style callback interface to get list of directory entries.
+     *
+     * If the Path cannot or does not contain any children, then an empty array
+     * is returned.
+     *
+     * Results are cached, and thus may be out of date if the filesystem is
+     * mutated.
+     *
+     * @param cb The callback called with (er, entries).  Note that the `er`
+     * param is somewhat extraneous, as all readdir() errors are handled and
+     * simply result in an empty set of entries being returned.
+     * @param allowZalgo Boolean indicating that immediately known results should
+     * *not* be deferred with `queueMicrotask`. Defaults to `false`. Release
+     * zalgo at your peril, the dark pony lord is devious and unforgiving.
+     */
+    readdirCB(cb, allowZalgo = false) {
+        if (!this.canReaddir()) {
+            if (allowZalgo)
+                cb(null, []);
+            else
+                queueMicrotask(() => cb(null, []));
+            return;
+        }
+        const children = this.children();
+        if (this.calledReaddir()) {
+            const c = children.slice(0, children.provisional);
+            if (allowZalgo)
+                cb(null, c);
+            else
+                queueMicrotask(() => cb(null, c));
+            return;
+        }
+        // don't have to worry about zalgo at this point.
+        this.#onReaddirCB.push(cb);
+        if (this.#readdirCBInFlight) {
+            return;
+        }
+        this.#readdirCBInFlight = true;
+        // else read the directory, fill up children
+        // de-provisionalize any provisional children.
+        const fullpath = this.fullpath();
+        this.#fs.readdir(fullpath, { withFileTypes: true }, (er, entries) => {
+            if (er) {
+                this.#readdirFail(er.code);
+                children.provisional = 0;
+            }
+            else {
+                // if we didn't get an error, we always get entries.
+                //@ts-ignore
+                for (const e of entries) {
+                    this.#readdirAddChild(e, children);
+                }
+                this.#readdirSuccess(children);
+            }
+            this.#callOnReaddirCB(children.slice(0, children.provisional));
+            return;
+        });
+    }
+    #asyncReaddirInFlight;
+    /**
+     * Return an array of known child entries.
+     *
+     * If the Path cannot or does not contain any children, then an empty array
+     * is returned.
+     *
+     * Results are cached, and thus may be out of date if the filesystem is
+     * mutated.
+     */
+    async readdir() {
+        if (!this.canReaddir()) {
+            return [];
+        }
+        const children = this.children();
+        if (this.calledReaddir()) {
+            return children.slice(0, children.provisional);
+        }
+        // else read the directory, fill up children
+        // de-provisionalize any provisional children.
+        const fullpath = this.fullpath();
+        if (this.#asyncReaddirInFlight) {
+            await this.#asyncReaddirInFlight;
+        }
+        else {
+            /* c8 ignore start */
+            let resolve = () => { };
+            /* c8 ignore stop */
+            this.#asyncReaddirInFlight = new Promise(res => (resolve = res));
+            try {
+                for (const e of await this.#fs.promises.readdir(fullpath, {
+                    withFileTypes: true,
+                })) {
+                    this.#readdirAddChild(e, children);
+                }
+                this.#readdirSuccess(children);
+            }
+            catch (er) {
+                this.#readdirFail(er.code);
+                children.provisional = 0;
+            }
+            this.#asyncReaddirInFlight = undefined;
+            resolve();
+        }
+        return children.slice(0, children.provisional);
+    }
+    /**
+     * synchronous {@link PathBase.readdir}
+     */
+    readdirSync() {
+        if (!this.canReaddir()) {
+            return [];
+        }
+        const children = this.children();
+        if (this.calledReaddir()) {
+            return children.slice(0, children.provisional);
+        }
+        // else read the directory, fill up children
+        // de-provisionalize any provisional children.
+        const fullpath = this.fullpath();
+        try {
+            for (const e of this.#fs.readdirSync(fullpath, {
+                withFileTypes: true,
+            })) {
+                this.#readdirAddChild(e, children);
+            }
+            this.#readdirSuccess(children);
+        }
+        catch (er) {
+            this.#readdirFail(er.code);
+            children.provisional = 0;
+        }
+        return children.slice(0, children.provisional);
+    }
+    canReaddir() {
+        if (this.#type & ENOCHILD)
+            return false;
+        const ifmt = IFMT & this.#type;
+        // we always set ENOTDIR when setting IFMT, so should be impossible
+        /* c8 ignore start */
+        if (!(ifmt === UNKNOWN || ifmt === IFDIR || ifmt === IFLNK)) {
+            return false;
+        }
+        /* c8 ignore stop */
+        return true;
+    }
+    shouldWalk(dirs, walkFilter) {
+        return ((this.#type & IFDIR) === IFDIR &&
+            !(this.#type & ENOCHILD) &&
+            !dirs.has(this) &&
+            (!walkFilter || walkFilter(this)));
+    }
+    /**
+     * Return the Path object corresponding to path as resolved
+     * by realpath(3).
+     *
+     * If the realpath call fails for any reason, `undefined` is returned.
+     *
+     * Result is cached, and thus may be outdated if the filesystem is mutated.
+     * On success, returns a Path object.
+     */
+    async realpath() {
+        if (this.#realpath)
+            return this.#realpath;
+        if ((ENOREALPATH | ENOREADLINK | ENOENT) & this.#type)
+            return undefined;
+        try {
+            const rp = await this.#fs.promises.realpath(this.fullpath());
+            return (this.#realpath = this.resolve(rp));
+        }
+        catch (_) {
+            this.#markENOREALPATH();
+        }
+    }
+    /**
+     * Synchronous {@link realpath}
+     */
+    realpathSync() {
+        if (this.#realpath)
+            return this.#realpath;
+        if ((ENOREALPATH | ENOREADLINK | ENOENT) & this.#type)
+            return undefined;
+        try {
+            const rp = this.#fs.realpathSync(this.fullpath());
+            return (this.#realpath = this.resolve(rp));
+        }
+        catch (_) {
+            this.#markENOREALPATH();
+        }
+    }
+    /**
+     * Internal method to mark this Path object as the scurry cwd,
+     * called by {@link PathScurry#chdir}
+     *
+     * @internal
+     */
+    [setAsCwd](oldCwd) {
+        if (oldCwd === this)
+            return;
+        oldCwd.isCWD = false;
+        this.isCWD = true;
+        const changed = new Set([]);
+        let rp = [];
+        let p = this;
+        while (p && p.parent) {
+            changed.add(p);
+            p.#relative = rp.join(this.sep);
+            p.#relativePosix = rp.join('/');
+            p = p.parent;
+            rp.push('..');
+        }
+        // now un-memoize parents of old cwd
+        p = oldCwd;
+        while (p && p.parent && !changed.has(p)) {
+            p.#relative = undefined;
+            p.#relativePosix = undefined;
+            p = p.parent;
+        }
+    }
+}
+/**
+ * Path class used on win32 systems
+ *
+ * Uses `'\\'` as the path separator for returned paths, either `'\\'` or `'/'`
+ * as the path separator for parsing paths.
+ */
+export class PathWin32 extends PathBase {
+    /**
+     * Separator for generating path strings.
+     */
+    sep = '\\';
+    /**
+     * Separator for parsing path strings.
+     */
+    splitSep = eitherSep;
+    /**
+     * Do not create new Path objects directly.  They should always be accessed
+     * via the PathScurry class or other methods on the Path class.
+     *
+     * @internal
+     */
+    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
+        super(name, type, root, roots, nocase, children, opts);
+    }
+    /**
+     * @internal
+     */
+    newChild(name, type = UNKNOWN, opts = {}) {
+        return new PathWin32(name, type, this.root, this.roots, this.nocase, this.childrenCache(), opts);
+    }
+    /**
+     * @internal
+     */
+    getRootString(path) {
+        return win32.parse(path).root;
+    }
+    /**
+     * @internal
+     */
+    getRoot(rootPath) {
+        rootPath = uncToDrive(rootPath.toUpperCase());
+        if (rootPath === this.root.name) {
+            return this.root;
+        }
+        // ok, not that one, check if it matches another we know about
+        for (const [compare, root] of Object.entries(this.roots)) {
+            if (this.sameRoot(rootPath, compare)) {
+                return (this.roots[rootPath] = root);
+            }
+        }
+        // otherwise, have to create a new one.
+        return (this.roots[rootPath] = new PathScurryWin32(rootPath, this).root);
+    }
+    /**
+     * @internal
+     */
+    sameRoot(rootPath, compare = this.root.name) {
+        // windows can (rarely) have case-sensitive filesystem, but
+        // UNC and drive letters are always case-insensitive, and canonically
+        // represented uppercase.
+        rootPath = rootPath
+            .toUpperCase()
+            .replace(/\//g, '\\')
+            .replace(uncDriveRegexp, '$1\\');
+        return rootPath === compare;
+    }
+}
+/**
+ * Path class used on all posix systems.
+ *
+ * Uses `'/'` as the path separator.
+ */
+export class PathPosix extends PathBase {
+    /**
+     * separator for parsing path strings
+     */
+    splitSep = '/';
+    /**
+     * separator for generating path strings
+     */
+    sep = '/';
+    /**
+     * Do not create new Path objects directly.  They should always be accessed
+     * via the PathScurry class or other methods on the Path class.
+     *
+     * @internal
+     */
+    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
+        super(name, type, root, roots, nocase, children, opts);
+    }
+    /**
+     * @internal
+     */
+    getRootString(path) {
+        return path.startsWith('/') ? '/' : '';
+    }
+    /**
+     * @internal
+     */
+    getRoot(_rootPath) {
+        return this.root;
+    }
+    /**
+     * @internal
+     */
+    newChild(name, type = UNKNOWN, opts = {}) {
+        return new PathPosix(name, type, this.root, this.roots, this.nocase, this.childrenCache(), opts);
+    }
+}
+/**
+ * The base class for all PathScurry classes, providing the interface for path
+ * resolution and filesystem operations.
+ *
+ * Typically, you should *not* instantiate this class directly, but rather one
+ * of the platform-specific classes, or the exported {@link PathScurry} which
+ * defaults to the current platform.
+ */
+export class PathScurryBase {
+    /**
+     * The root Path entry for the current working directory of this Scurry
+     */
+    root;
+    /**
+     * The string path for the root of this Scurry's current working directory
+     */
+    rootPath;
+    /**
+     * A collection of all roots encountered, referenced by rootPath
+     */
+    roots;
+    /**
+     * The Path entry corresponding to this PathScurry's current working directory.
+     */
+    cwd;
+    #resolveCache;
+    #resolvePosixCache;
+    #children;
+    /**
+     * Perform path comparisons case-insensitively.
+     *
+     * Defaults true on Darwin and Windows systems, false elsewhere.
+     */
+    nocase;
+    #fs;
+    /**
+     * This class should not be instantiated directly.
+     *
+     * Use PathScurryWin32, PathScurryDarwin, PathScurryPosix, or PathScurry
+     *
+     * @internal
+     */
+    constructor(cwd = process.cwd(), pathImpl, sep, { nocase, childrenCacheSize = 16 * 1024, fs = defaultFS, } = {}) {
+        this.#fs = fsFromOption(fs);
+        if (cwd instanceof URL || cwd.startsWith('file://')) {
+            cwd = fileURLToPath(cwd);
+        }
+        // resolve and split root, and then add to the store.
+        // this is the only time we call path.resolve()
+        const cwdPath = pathImpl.resolve(cwd);
+        this.roots = Object.create(null);
+        this.rootPath = this.parseRootPath(cwdPath);
+        this.#resolveCache = new ResolveCache();
+        this.#resolvePosixCache = new ResolveCache();
+        this.#children = new ChildrenCache(childrenCacheSize);
+        const split = cwdPath.substring(this.rootPath.length).split(sep);
+        // resolve('/') leaves '', splits to [''], we don't want that.
+        if (split.length === 1 && !split[0]) {
+            split.pop();
+        }
+        /* c8 ignore start */
+        if (nocase === undefined) {
+            throw new TypeError('must provide nocase setting to PathScurryBase ctor');
+        }
+        /* c8 ignore stop */
+        this.nocase = nocase;
+        this.root = this.newRoot(this.#fs);
+        this.roots[this.rootPath] = this.root;
+        let prev = this.root;
+        let len = split.length - 1;
+        const joinSep = pathImpl.sep;
+        let abs = this.rootPath;
+        let sawFirst = false;
+        for (const part of split) {
+            const l = len--;
+            prev = prev.child(part, {
+                relative: new Array(l).fill('..').join(joinSep),
+                relativePosix: new Array(l).fill('..').join('/'),
+                fullpath: (abs += (sawFirst ? '' : joinSep) + part),
+            });
+            sawFirst = true;
+        }
+        this.cwd = prev;
+    }
+    /**
+     * Get the depth of a provided path, string, or the cwd
+     */
+    depth(path = this.cwd) {
+        if (typeof path === 'string') {
+            path = this.cwd.resolve(path);
+        }
+        return path.depth();
+    }
+    /**
+     * Return the cache of child entries.  Exposed so subclasses can create
+     * child Path objects in a platform-specific way.
+     *
+     * @internal
+     */
+    childrenCache() {
+        return this.#children;
+    }
+    /**
+     * Resolve one or more path strings to a resolved string
+     *
+     * Same interface as require('path').resolve.
+     *
+     * Much faster than path.resolve() when called multiple times for the same
+     * path, because the resolved Path objects are cached.  Much slower
+     * otherwise.
+     */
+    resolve(...paths) {
+        // first figure out the minimum number of paths we have to test
+        // we always start at cwd, but any absolutes will bump the start
+        let r = '';
+        for (let i = paths.length - 1; i >= 0; i--) {
+            const p = paths[i];
+            if (!p || p === '.')
+                continue;
+            r = r ? `${p}/${r}` : p;
+            if (this.isAbsolute(p)) {
+                break;
+            }
+        }
+        const cached = this.#resolveCache.get(r);
+        if (cached !== undefined) {
+            return cached;
+        }
+        const result = this.cwd.resolve(r).fullpath();
+        this.#resolveCache.set(r, result);
+        return result;
+    }
+    /**
+     * Resolve one or more path strings to a resolved string, returning
+     * the posix path.  Identical to .resolve() on posix systems, but on
+     * windows will return a forward-slash separated UNC path.
+     *
+     * Same interface as require('path').resolve.
+     *
+     * Much faster than path.resolve() when called multiple times for the same
+     * path, because the resolved Path objects are cached.  Much slower
+     * otherwise.
+     */
+    resolvePosix(...paths) {
+        // first figure out the minimum number of paths we have to test
+        // we always start at cwd, but any absolutes will bump the start
+        let r = '';
+        for (let i = paths.length - 1; i >= 0; i--) {
+            const p = paths[i];
+            if (!p || p === '.')
+                continue;
+            r = r ? `${p}/${r}` : p;
+            if (this.isAbsolute(p)) {
+                break;
+            }
+        }
+        const cached = this.#resolvePosixCache.get(r);
+        if (cached !== undefined) {
+            return cached;
+        }
+        const result = this.cwd.resolve(r).fullpathPosix();
+        this.#resolvePosixCache.set(r, result);
+        return result;
+    }
+    /**
+     * find the relative path from the cwd to the supplied path string or entry
+     */
+    relative(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.relative();
+    }
+    /**
+     * find the relative path from the cwd to the supplied path string or
+     * entry, using / as the path delimiter, even on Windows.
+     */
+    relativePosix(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.relativePosix();
+    }
+    /**
+     * Return the basename for the provided string or Path object
+     */
+    basename(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.name;
+    }
+    /**
+     * Return the dirname for the provided string or Path object
+     */
+    dirname(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return (entry.parent || entry).fullpath();
+    }
+    async readdir(entry = this.cwd, opts = {
+        withFileTypes: true,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes } = opts;
+        if (!entry.canReaddir()) {
+            return [];
+        }
+        else {
+            const p = await entry.readdir();
+            return withFileTypes ? p : p.map(e => e.name);
+        }
+    }
+    readdirSync(entry = this.cwd, opts = {
+        withFileTypes: true,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true } = opts;
+        if (!entry.canReaddir()) {
+            return [];
+        }
+        else if (withFileTypes) {
+            return entry.readdirSync();
+        }
+        else {
+            return entry.readdirSync().map(e => e.name);
+        }
+    }
+    /**
+     * Call lstat() on the string or Path object, and update all known
+     * information that can be determined.
+     *
+     * Note that unlike `fs.lstat()`, the returned value does not contain some
+     * information, such as `mode`, `dev`, `nlink`, and `ino`.  If that
+     * information is required, you will need to call `fs.lstat` yourself.
+     *
+     * If the Path refers to a nonexistent file, or if the lstat call fails for
+     * any reason, `undefined` is returned.  Otherwise the updated Path object is
+     * returned.
+     *
+     * Results are cached, and thus may be out of date if the filesystem is
+     * mutated.
+     */
+    async lstat(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.lstat();
+    }
+    /**
+     * synchronous {@link PathScurryBase.lstat}
+     */
+    lstatSync(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.lstatSync();
+    }
+    async readlink(entry = this.cwd, { withFileTypes } = {
+        withFileTypes: false,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            withFileTypes = entry.withFileTypes;
+            entry = this.cwd;
+        }
+        const e = await entry.readlink();
+        return withFileTypes ? e : e?.fullpath();
+    }
+    readlinkSync(entry = this.cwd, { withFileTypes } = {
+        withFileTypes: false,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            withFileTypes = entry.withFileTypes;
+            entry = this.cwd;
+        }
+        const e = entry.readlinkSync();
+        return withFileTypes ? e : e?.fullpath();
+    }
+    async realpath(entry = this.cwd, { withFileTypes } = {
+        withFileTypes: false,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            withFileTypes = entry.withFileTypes;
+            entry = this.cwd;
+        }
+        const e = await entry.realpath();
+        return withFileTypes ? e : e?.fullpath();
+    }
+    realpathSync(entry = this.cwd, { withFileTypes } = {
+        withFileTypes: false,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            withFileTypes = entry.withFileTypes;
+            entry = this.cwd;
+        }
+        const e = entry.realpathSync();
+        return withFileTypes ? e : e?.fullpath();
+    }
+    async walk(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        const results = [];
+        if (!filter || filter(entry)) {
+            results.push(withFileTypes ? entry : entry.fullpath());
+        }
+        const dirs = new Set();
+        const walk = (dir, cb) => {
+            dirs.add(dir);
+            dir.readdirCB((er, entries) => {
+                /* c8 ignore start */
+                if (er) {
+                    return cb(er);
+                }
+                /* c8 ignore stop */
+                let len = entries.length;
+                if (!len)
+                    return cb();
+                const next = () => {
+                    if (--len === 0) {
+                        cb();
+                    }
+                };
+                for (const e of entries) {
+                    if (!filter || filter(e)) {
+                        results.push(withFileTypes ? e : e.fullpath());
+                    }
+                    if (follow && e.isSymbolicLink()) {
+                        e.realpath()
+                            .then(r => (r?.isUnknown() ? r.lstat() : r))
+                            .then(r => r?.shouldWalk(dirs, walkFilter) ? walk(r, next) : next());
+                    }
+                    else {
+                        if (e.shouldWalk(dirs, walkFilter)) {
+                            walk(e, next);
+                        }
+                        else {
+                            next();
+                        }
+                    }
+                }
+            }, true); // zalgooooooo
+        };
+        const start = entry;
+        return new Promise((res, rej) => {
+            walk(start, er => {
+                /* c8 ignore start */
+                if (er)
+                    return rej(er);
+                /* c8 ignore stop */
+                res(results);
+            });
+        });
+    }
+    walkSync(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        const results = [];
+        if (!filter || filter(entry)) {
+            results.push(withFileTypes ? entry : entry.fullpath());
+        }
+        const dirs = new Set([entry]);
+        for (const dir of dirs) {
+            const entries = dir.readdirSync();
+            for (const e of entries) {
+                if (!filter || filter(e)) {
+                    results.push(withFileTypes ? e : e.fullpath());
+                }
+                let r = e;
+                if (e.isSymbolicLink()) {
+                    if (!(follow && (r = e.realpathSync())))
+                        continue;
+                    if (r.isUnknown())
+                        r.lstatSync();
+                }
+                if (r.shouldWalk(dirs, walkFilter)) {
+                    dirs.add(r);
+                }
+            }
+        }
+        return results;
+    }
+    /**
+     * Support for `for await`
+     *
+     * Alias for {@link PathScurryBase.iterate}
+     *
+     * Note: As of Node 19, this is very slow, compared to other methods of
+     * walking.  Consider using {@link PathScurryBase.stream} if memory overhead
+     * and backpressure are concerns, or {@link PathScurryBase.walk} if not.
+     */
+    [Symbol.asyncIterator]() {
+        return this.iterate();
+    }
+    iterate(entry = this.cwd, options = {}) {
+        // iterating async over the stream is significantly more performant,
+        // especially in the warm-cache scenario, because it buffers up directory
+        // entries in the background instead of waiting for a yield for each one.
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            options = entry;
+            entry = this.cwd;
+        }
+        return this.stream(entry, options)[Symbol.asyncIterator]();
+    }
+    /**
+     * Iterating over a PathScurry performs a synchronous walk.
+     *
+     * Alias for {@link PathScurryBase.iterateSync}
+     */
+    [Symbol.iterator]() {
+        return this.iterateSync();
+    }
+    *iterateSync(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        if (!filter || filter(entry)) {
+            yield withFileTypes ? entry : entry.fullpath();
+        }
+        const dirs = new Set([entry]);
+        for (const dir of dirs) {
+            const entries = dir.readdirSync();
+            for (const e of entries) {
+                if (!filter || filter(e)) {
+                    yield withFileTypes ? e : e.fullpath();
+                }
+                let r = e;
+                if (e.isSymbolicLink()) {
+                    if (!(follow && (r = e.realpathSync())))
+                        continue;
+                    if (r.isUnknown())
+                        r.lstatSync();
+                }
+                if (r.shouldWalk(dirs, walkFilter)) {
+                    dirs.add(r);
+                }
+            }
+        }
+    }
+    stream(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        const results = new Minipass({ objectMode: true });
+        if (!filter || filter(entry)) {
+            results.write(withFileTypes ? entry : entry.fullpath());
+        }
+        const dirs = new Set();
+        const queue = [entry];
+        let processing = 0;
+        const process = () => {
+            let paused = false;
+            while (!paused) {
+                const dir = queue.shift();
+                if (!dir) {
+                    if (processing === 0)
+                        results.end();
+                    return;
+                }
+                processing++;
+                dirs.add(dir);
+                const onReaddir = (er, entries, didRealpaths = false) => {
+                    /* c8 ignore start */
+                    if (er)
+                        return results.emit('error', er);
+                    /* c8 ignore stop */
+                    if (follow && !didRealpaths) {
+                        const promises = [];
+                        for (const e of entries) {
+                            if (e.isSymbolicLink()) {
+                                promises.push(e
+                                    .realpath()
+                                    .then((r) => r?.isUnknown() ? r.lstat() : r));
+                            }
+                        }
+                        if (promises.length) {
+                            Promise.all(promises).then(() => onReaddir(null, entries, true));
+                            return;
+                        }
+                    }
+                    for (const e of entries) {
+                        if (e && (!filter || filter(e))) {
+                            if (!results.write(withFileTypes ? e : e.fullpath())) {
+                                paused = true;
+                            }
+                        }
+                    }
+                    processing--;
+                    for (const e of entries) {
+                        const r = e.realpathCached() || e;
+                        if (r.shouldWalk(dirs, walkFilter)) {
+                            queue.push(r);
+                        }
+                    }
+                    if (paused && !results.flowing) {
+                        results.once('drain', process);
+                    }
+                    else if (!sync) {
+                        process();
+                    }
+                };
+                // zalgo containment
+                let sync = true;
+                dir.readdirCB(onReaddir, true);
+                sync = false;
+            }
+        };
+        process();
+        return results;
+    }
+    streamSync(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        const results = new Minipass({ objectMode: true });
+        const dirs = new Set();
+        if (!filter || filter(entry)) {
+            results.write(withFileTypes ? entry : entry.fullpath());
+        }
+        const queue = [entry];
+        let processing = 0;
+        const process = () => {
+            let paused = false;
+            while (!paused) {
+                const dir = queue.shift();
+                if (!dir) {
+                    if (processing === 0)
+                        results.end();
+                    return;
+                }
+                processing++;
+                dirs.add(dir);
+                const entries = dir.readdirSync();
+                for (const e of entries) {
+                    if (!filter || filter(e)) {
+                        if (!results.write(withFileTypes ? e : e.fullpath())) {
+                            paused = true;
+                        }
+                    }
+                }
+                processing--;
+                for (const e of entries) {
+                    let r = e;
+                    if (e.isSymbolicLink()) {
+                        if (!(follow && (r = e.realpathSync())))
+                            continue;
+                        if (r.isUnknown())
+                            r.lstatSync();
+                    }
+                    if (r.shouldWalk(dirs, walkFilter)) {
+                        queue.push(r);
+                    }
+                }
+            }
+            if (paused && !results.flowing)
+                results.once('drain', process);
+        };
+        process();
+        return results;
+    }
+    chdir(path = this.cwd) {
+        const oldCwd = this.cwd;
+        this.cwd = typeof path === 'string' ? this.cwd.resolve(path) : path;
+        this.cwd[setAsCwd](oldCwd);
+    }
+}
+/**
+ * Windows implementation of {@link PathScurryBase}
+ *
+ * Defaults to case insensitve, uses `'\\'` to generate path strings.  Uses
+ * {@link PathWin32} for Path objects.
+ */
+export class PathScurryWin32 extends PathScurryBase {
+    /**
+     * separator for generating path strings
+     */
+    sep = '\\';
+    constructor(cwd = process.cwd(), opts = {}) {
+        const { nocase = true } = opts;
+        super(cwd, win32, '\\', { ...opts, nocase });
+        this.nocase = nocase;
+        for (let p = this.cwd; p; p = p.parent) {
+            p.nocase = this.nocase;
+        }
+    }
+    /**
+     * @internal
+     */
+    parseRootPath(dir) {
+        // if the path starts with a single separator, it's not a UNC, and we'll
+        // just get separator as the root, and driveFromUNC will return \
+        // In that case, mount \ on the root from the cwd.
+        return win32.parse(dir).root.toUpperCase();
+    }
+    /**
+     * @internal
+     */
+    newRoot(fs) {
+        return new PathWin32(this.rootPath, IFDIR, undefined, this.roots, this.nocase, this.childrenCache(), { fs });
+    }
+    /**
+     * Return true if the provided path string is an absolute path
+     */
+    isAbsolute(p) {
+        return (p.startsWith('/') || p.startsWith('\\') || /^[a-z]:(\/|\\)/i.test(p));
+    }
+}
+/**
+ * {@link PathScurryBase} implementation for all posix systems other than Darwin.
+ *
+ * Defaults to case-sensitive matching, uses `'/'` to generate path strings.
+ *
+ * Uses {@link PathPosix} for Path objects.
+ */
+export class PathScurryPosix extends PathScurryBase {
+    /**
+     * separator for generating path strings
+     */
+    sep = '/';
+    constructor(cwd = process.cwd(), opts = {}) {
+        const { nocase = false } = opts;
+        super(cwd, posix, '/', { ...opts, nocase });
+        this.nocase = nocase;
+    }
+    /**
+     * @internal
+     */
+    parseRootPath(_dir) {
+        return '/';
+    }
+    /**
+     * @internal
+     */
+    newRoot(fs) {
+        return new PathPosix(this.rootPath, IFDIR, undefined, this.roots, this.nocase, this.childrenCache(), { fs });
+    }
+    /**
+     * Return true if the provided path string is an absolute path
+     */
+    isAbsolute(p) {
+        return p.startsWith('/');
+    }
+}
+/**
+ * {@link PathScurryBase} implementation for Darwin (macOS) systems.
+ *
+ * Defaults to case-insensitive matching, uses `'/'` for generating path
+ * strings.
+ *
+ * Uses {@link PathPosix} for Path objects.
+ */
+export class PathScurryDarwin extends PathScurryPosix {
+    constructor(cwd = process.cwd(), opts = {}) {
+        const { nocase = true } = opts;
+        super(cwd, { ...opts, nocase });
+    }
+}
+/**
+ * Default {@link PathBase} implementation for the current platform.
+ *
+ * {@link PathWin32} on Windows systems, {@link PathPosix} on all others.
+ */
+export const Path = process.platform === 'win32' ? PathWin32 : PathPosix;
+/**
+ * Default {@link PathScurryBase} implementation for the current platform.
+ *
+ * {@link PathScurryWin32} on Windows systems, {@link PathScurryDarwin} on
+ * Darwin (macOS) systems, {@link PathScurryPosix} on all others.
+ */
+export const PathScurry = process.platform === 'win32' ? PathScurryWin32
+    : process.platform === 'darwin' ? PathScurryDarwin
+        : PathScurryPosix;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/path-scurry/dist/esm/package.json b/node_modules/@npmcli/map-workspaces/node_modules/path-scurry/dist/esm/package.json
new file mode 100644
index 0000000000000..3dbc1ca591c05
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/path-scurry/dist/esm/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/path-scurry/package.json b/node_modules/@npmcli/map-workspaces/node_modules/path-scurry/package.json
new file mode 100644
index 0000000000000..c3cb39dced545
--- /dev/null
+++ b/node_modules/@npmcli/map-workspaces/node_modules/path-scurry/package.json
@@ -0,0 +1,88 @@
+{
+  "name": "path-scurry",
+  "version": "2.0.0",
+  "description": "walk paths fast and efficiently",
+  "author": "Isaac Z. Schlueter  (https://blog.izs.me)",
+  "main": "./dist/commonjs/index.js",
+  "type": "module",
+  "exports": {
+    "./package.json": "./package.json",
+    ".": {
+      "import": {
+        "types": "./dist/esm/index.d.ts",
+        "default": "./dist/esm/index.js"
+      },
+      "require": {
+        "types": "./dist/commonjs/index.d.ts",
+        "default": "./dist/commonjs/index.js"
+      }
+    }
+  },
+  "files": [
+    "dist"
+  ],
+  "license": "BlueOak-1.0.0",
+  "scripts": {
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "prepare": "tshy",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "test": "tap",
+    "snap": "tap",
+    "format": "prettier --write . --log-level warn",
+    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts",
+    "bench": "bash ./scripts/bench.sh"
+  },
+  "prettier": {
+    "experimentalTernaries": true,
+    "semi": false,
+    "printWidth": 75,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "devDependencies": {
+    "@nodelib/fs.walk": "^2.0.0",
+    "@types/node": "^20.14.10",
+    "mkdirp": "^3.0.0",
+    "prettier": "^3.3.2",
+    "rimraf": "^5.0.8",
+    "tap": "^20.0.3",
+    "ts-node": "^10.9.2",
+    "tshy": "^2.0.1",
+    "typedoc": "^0.26.3",
+    "typescript": "^5.5.3"
+  },
+  "tap": {
+    "typecheck": true
+  },
+  "engines": {
+    "node": "20 || >=22"
+  },
+  "funding": {
+    "url": "https://github.com/sponsors/isaacs"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/isaacs/path-scurry"
+  },
+  "dependencies": {
+    "lru-cache": "^11.0.0",
+    "minipass": "^7.1.2"
+  },
+  "tshy": {
+    "selfLink": false,
+    "exports": {
+      "./package.json": "./package.json",
+      ".": "./src/index.ts"
+    }
+  },
+  "types": "./dist/commonjs/index.d.ts",
+  "module": "./dist/esm/index.js"
+}
diff --git a/node_modules/@npmcli/map-workspaces/package.json b/node_modules/@npmcli/map-workspaces/package.json
index 78a515e027b01..fb77ea8615c1c 100644
--- a/node_modules/@npmcli/map-workspaces/package.json
+++ b/node_modules/@npmcli/map-workspaces/package.json
@@ -1,13 +1,13 @@
 {
   "name": "@npmcli/map-workspaces",
-  "version": "4.0.2",
+  "version": "5.0.0",
   "main": "lib/index.js",
   "files": [
     "bin/",
     "lib/"
   ],
   "engines": {
-    "node": "^18.17.0 || >=20.5.0"
+    "node": "^20.17.0 || >=22.9.0"
   },
   "description": "Retrieves a name:pathname Map for a given workspaces config",
   "repository": {
@@ -44,18 +44,18 @@
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.23.4",
+    "@npmcli/template-oss": "4.25.0",
     "tap": "^16.0.1"
   },
   "dependencies": {
     "@npmcli/name-from-folder": "^3.0.0",
-    "@npmcli/package-json": "^6.0.0",
-    "glob": "^10.2.2",
-    "minimatch": "^9.0.0"
+    "@npmcli/package-json": "^7.0.0",
+    "glob": "^11.0.3",
+    "minimatch": "^10.0.3"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.23.4",
+    "version": "4.25.0",
     "publish": "true"
   }
 }
diff --git a/package-lock.json b/package-lock.json
index 342e81eff6233..08949f5429bec 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -88,7 +88,7 @@
         "@npmcli/arborist": "^9.1.4",
         "@npmcli/config": "^10.4.0",
         "@npmcli/fs": "^4.0.0",
-        "@npmcli/map-workspaces": "^4.0.2",
+        "@npmcli/map-workspaces": "^5.0.0",
         "@npmcli/package-json": "^7.0.1",
         "@npmcli/promise-spawn": "^8.0.2",
         "@npmcli/redact": "^3.2.2",
@@ -3457,38 +3457,102 @@
       }
     },
     "node_modules/@npmcli/map-workspaces": {
-      "version": "4.0.2",
-      "resolved": "https://registry.npmjs.org/@npmcli/map-workspaces/-/map-workspaces-4.0.2.tgz",
-      "integrity": "sha512-mnuMuibEbkaBTYj9HQ3dMe6L0ylYW+s/gfz7tBDMFY/la0w9Kf44P9aLn4/+/t3aTR3YUHKoT6XQL9rlicIe3Q==",
+      "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/@npmcli/map-workspaces/-/map-workspaces-5.0.0.tgz",
+      "integrity": "sha512-+YJN6+BIQEC5QL4EqffJ2I1S9ySspwn7GP7uQINtZhf3uy7P0KnnIg+Ab5WeSUTZYpg+jn3GSfMme2FutB7qEQ==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
         "@npmcli/name-from-folder": "^3.0.0",
-        "@npmcli/package-json": "^6.0.0",
-        "glob": "^10.2.2",
-        "minimatch": "^9.0.0"
+        "@npmcli/package-json": "^7.0.0",
+        "glob": "^11.0.3",
+        "minimatch": "^10.0.3"
       },
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/@npmcli/map-workspaces/node_modules/@npmcli/package-json": {
-      "version": "6.2.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-6.2.0.tgz",
-      "integrity": "sha512-rCNLSB/JzNvot0SEyXqWZ7tX2B5dD2a1br2Dp0vSYVo5jh8Z0EZ7lS9TsZ1UtziddB1UfNUaMCc538/HztnJGA==",
+    "node_modules/@npmcli/map-workspaces/node_modules/glob": {
+      "version": "11.0.3",
+      "resolved": "https://registry.npmjs.org/glob/-/glob-11.0.3.tgz",
+      "integrity": "sha512-2Nim7dha1KVkaiF4q6Dj+ngPPMdfvLJEOpZk/jKiUAkqKebpGAWQXAq9z1xu9HKu5lWfqw/FASuccEjyznjPaA==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
-        "@npmcli/git": "^6.0.0",
-        "glob": "^10.2.2",
-        "hosted-git-info": "^8.0.0",
-        "json-parse-even-better-errors": "^4.0.0",
-        "proc-log": "^5.0.0",
-        "semver": "^7.5.3",
-        "validate-npm-package-license": "^3.0.4"
+        "foreground-child": "^3.3.1",
+        "jackspeak": "^4.1.1",
+        "minimatch": "^10.0.3",
+        "minipass": "^7.1.2",
+        "package-json-from-dist": "^1.0.0",
+        "path-scurry": "^2.0.0"
+      },
+      "bin": {
+        "glob": "dist/esm/bin.mjs"
       },
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "20 || >=22"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
+      }
+    },
+    "node_modules/@npmcli/map-workspaces/node_modules/jackspeak": {
+      "version": "4.1.1",
+      "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-4.1.1.tgz",
+      "integrity": "sha512-zptv57P3GpL+O0I7VdMJNBZCu+BPHVQUk55Ft8/QCJjTVxrnJHuVuX/0Bl2A6/+2oyR/ZMEuFKwmzqqZ/U5nPQ==",
+      "inBundle": true,
+      "license": "BlueOak-1.0.0",
+      "dependencies": {
+        "@isaacs/cliui": "^8.0.2"
+      },
+      "engines": {
+        "node": "20 || >=22"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
+      }
+    },
+    "node_modules/@npmcli/map-workspaces/node_modules/lru-cache": {
+      "version": "11.2.1",
+      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.1.tgz",
+      "integrity": "sha512-r8LA6i4LP4EeWOhqBaZZjDWwehd1xUJPCJd9Sv300H0ZmcUER4+JPh7bqqZeqs1o5pgtgvXm+d9UGrB5zZGDiQ==",
+      "inBundle": true,
+      "license": "ISC",
+      "engines": {
+        "node": "20 || >=22"
+      }
+    },
+    "node_modules/@npmcli/map-workspaces/node_modules/minimatch": {
+      "version": "10.0.3",
+      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.0.3.tgz",
+      "integrity": "sha512-IPZ167aShDZZUMdRk66cyQAW3qr0WzbHkPdMYa8bzZhlHhO3jALbKdxcaak7W9FfT2rZNpQuUu4Od7ILEpXSaw==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "@isaacs/brace-expansion": "^5.0.0"
+      },
+      "engines": {
+        "node": "20 || >=22"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
+      }
+    },
+    "node_modules/@npmcli/map-workspaces/node_modules/path-scurry": {
+      "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-2.0.0.tgz",
+      "integrity": "sha512-ypGJsmGtdXUOeM5u93TyeIEfEhM6s+ljAhrk5vAvSx8uyY/02OvrZnA0YNGUrPXfpJMgI1ODd3nwz8Npx4O4cg==",
+      "inBundle": true,
+      "license": "BlueOak-1.0.0",
+      "dependencies": {
+        "lru-cache": "^11.0.0",
+        "minipass": "^7.1.2"
+      },
+      "engines": {
+        "node": "20 || >=22"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
       }
     },
     "node_modules/@npmcli/metavuln-calculator": {
@@ -3894,6 +3958,22 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist/node_modules/@npmcli/map-workspaces": {
+      "version": "3.0.6",
+      "resolved": "https://registry.npmjs.org/@npmcli/map-workspaces/-/map-workspaces-3.0.6.tgz",
+      "integrity": "sha512-tkYs0OYnzQm6iIRdfy+LcLBjcKuQCeE5YLb8KnrIlutJfheNaPvPpgoFEyEFgbjzl5PLZ3IA/BWAwRU0eHuQDA==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "@npmcli/name-from-folder": "^2.0.0",
+        "glob": "^10.2.2",
+        "minimatch": "^9.0.0",
+        "read-package-json-fast": "^3.0.0"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist/node_modules/@npmcli/package-json": {
       "version": "5.2.1",
       "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-5.2.1.tgz",
@@ -3965,19 +4045,29 @@
       }
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/map-workspaces": {
-      "version": "3.0.6",
-      "resolved": "https://registry.npmjs.org/@npmcli/map-workspaces/-/map-workspaces-3.0.6.tgz",
-      "integrity": "sha512-tkYs0OYnzQm6iIRdfy+LcLBjcKuQCeE5YLb8KnrIlutJfheNaPvPpgoFEyEFgbjzl5PLZ3IA/BWAwRU0eHuQDA==",
+      "version": "4.0.2",
+      "resolved": "https://registry.npmjs.org/@npmcli/map-workspaces/-/map-workspaces-4.0.2.tgz",
+      "integrity": "sha512-mnuMuibEbkaBTYj9HQ3dMe6L0ylYW+s/gfz7tBDMFY/la0w9Kf44P9aLn4/+/t3aTR3YUHKoT6XQL9rlicIe3Q==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
-        "@npmcli/name-from-folder": "^2.0.0",
+        "@npmcli/name-from-folder": "^3.0.0",
+        "@npmcli/package-json": "^6.0.0",
         "glob": "^10.2.2",
-        "minimatch": "^9.0.0",
-        "read-package-json-fast": "^3.0.0"
+        "minimatch": "^9.0.0"
       },
       "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/name-from-folder": {
+      "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/@npmcli/name-from-folder/-/name-from-folder-3.0.0.tgz",
+      "integrity": "sha512-61cDL8LUc9y80fXn+lir+iVt8IS0xHqEKwPu/5jCjxQTVoSCmkXvw4vbMrzAMtmghz3/AkiBjhHkDKUH+kf7kA==",
+      "dev": true,
+      "license": "ISC",
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
       }
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/metavuln-calculator": {
@@ -19089,7 +19179,7 @@
         "@isaacs/string-locale-compare": "^1.1.0",
         "@npmcli/fs": "^4.0.0",
         "@npmcli/installed-package-contents": "^3.0.0",
-        "@npmcli/map-workspaces": "^4.0.1",
+        "@npmcli/map-workspaces": "^5.0.0",
         "@npmcli/metavuln-calculator": "^9.0.1",
         "@npmcli/name-from-folder": "^3.0.0",
         "@npmcli/node-gyp": "^4.0.0",
@@ -19143,7 +19233,7 @@
       "version": "10.4.0",
       "license": "ISC",
       "dependencies": {
-        "@npmcli/map-workspaces": "^4.0.1",
+        "@npmcli/map-workspaces": "^5.0.0",
         "@npmcli/package-json": "^7.0.0",
         "ci-info": "^4.0.0",
         "ini": "^5.0.0",
diff --git a/package.json b/package.json
index df85273c08fc7..1d0ae432724a8 100644
--- a/package.json
+++ b/package.json
@@ -55,7 +55,7 @@
     "@npmcli/arborist": "^9.1.4",
     "@npmcli/config": "^10.4.0",
     "@npmcli/fs": "^4.0.0",
-    "@npmcli/map-workspaces": "^4.0.2",
+    "@npmcli/map-workspaces": "^5.0.0",
     "@npmcli/package-json": "^7.0.1",
     "@npmcli/promise-spawn": "^8.0.2",
     "@npmcli/redact": "^3.2.2",
diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json
index ae9900e83ee64..53458f7469ca1 100644
--- a/workspaces/arborist/package.json
+++ b/workspaces/arborist/package.json
@@ -6,7 +6,7 @@
     "@isaacs/string-locale-compare": "^1.1.0",
     "@npmcli/fs": "^4.0.0",
     "@npmcli/installed-package-contents": "^3.0.0",
-    "@npmcli/map-workspaces": "^4.0.1",
+    "@npmcli/map-workspaces": "^5.0.0",
     "@npmcli/metavuln-calculator": "^9.0.1",
     "@npmcli/name-from-folder": "^3.0.0",
     "@npmcli/node-gyp": "^4.0.0",
diff --git a/workspaces/config/package.json b/workspaces/config/package.json
index daf535a2672a5..6db1b77174a9b 100644
--- a/workspaces/config/package.json
+++ b/workspaces/config/package.json
@@ -37,7 +37,7 @@
     "tap": "^16.3.8"
   },
   "dependencies": {
-    "@npmcli/map-workspaces": "^4.0.1",
+    "@npmcli/map-workspaces": "^5.0.0",
     "@npmcli/package-json": "^7.0.0",
     "ci-info": "^4.0.0",
     "ini": "^5.0.0",

From b6bb9aea4134c47f0593c111a734eda12ec3c20d Mon Sep 17 00:00:00 2001
From: Gar 
Date: Wed, 17 Sep 2025 10:20:07 -0700
Subject: [PATCH 07/63] deps: pacote@21.0.3

---
 mock-registry/package.json                    |    2 +-
 node_modules/.gitignore                       |   41 +-
 .../node_modules}/ignore-walk/LICENSE         |    0
 .../node_modules}/ignore-walk/lib/index.js    |    0
 .../node_modules}/ignore-walk/package.json    |   10 +-
 .../node_modules/minimatch/LICENSE            |   15 +
 .../dist/commonjs/assert-valid-pattern.js     |   14 +
 .../minimatch/dist/commonjs/ast.js            |  592 +++++
 .../dist/commonjs/brace-expressions.js        |  152 ++
 .../minimatch/dist/commonjs/escape.js         |   22 +
 .../minimatch/dist/commonjs/index.js          | 1014 ++++++++
 .../minimatch/dist/commonjs/package.json      |    3 +
 .../minimatch/dist/commonjs/unescape.js       |   24 +
 .../dist/esm/assert-valid-pattern.js          |   10 +
 .../node_modules/minimatch/dist/esm/ast.js    |  588 +++++
 .../minimatch/dist/esm/brace-expressions.js   |  148 ++
 .../node_modules/minimatch/dist/esm/escape.js |   18 +
 .../node_modules/minimatch/dist/esm/index.js  | 1001 ++++++++
 .../minimatch/dist/esm/package.json           |    3 +
 .../minimatch/dist/esm/unescape.js            |   20 +
 .../node_modules/minimatch/package.json       |   79 +
 node_modules/npm-packlist/package.json        |   10 +-
 .../pacote/node_modules/@npmcli/git/LICENSE   |   15 +
 .../node_modules/@npmcli/git/lib/clone.js     |  172 ++
 .../node_modules/@npmcli/git/lib/errors.js    |   36 +
 .../node_modules/@npmcli/git/lib/find.js      |   15 +
 .../node_modules/@npmcli/git/lib/index.js     |    9 +
 .../node_modules/@npmcli/git/lib/is-clean.js  |    6 +
 .../pacote/node_modules/@npmcli/git/lib/is.js |    4 +
 .../@npmcli/git/lib/lines-to-revs.js          |  147 ++
 .../@npmcli/git/lib/make-error.js             |   33 +
 .../node_modules/@npmcli/git/lib/opts.js      |   57 +
 .../node_modules/@npmcli/git/lib/revs.js      |   22 +
 .../node_modules/@npmcli/git/lib/spawn.js     |   44 +
 .../node_modules/@npmcli/git/lib/utils.js     |    3 +
 .../node_modules/@npmcli/git/lib/which.js     |   18 +
 .../node_modules/@npmcli/git/package.json     |   58 +
 .../@npmcli/package-json/lib/index.js         |  286 ---
 .../package-json/lib/normalize-data.js        |  257 ---
 .../@npmcli/package-json/lib/normalize.js     |  601 -----
 .../@npmcli/package-json/lib/read-package.js  |   39 -
 .../@npmcli/package-json/lib/sort.js          |  101 -
 .../package-json/lib/update-dependencies.js   |   75 -
 .../package-json/lib/update-scripts.js        |   29 -
 .../package-json/lib/update-workspaces.js     |   26 -
 .../node_modules/@npmcli/run-script/LICENSE   |   15 +
 .../run-script/lib/is-server-package.js       |   11 +
 .../@npmcli/run-script/lib/make-spawn-args.js |   53 +
 .../run-script/lib/node-gyp-bin/node-gyp      |    2 +
 .../run-script/lib/node-gyp-bin/node-gyp.cmd  |    1 +
 .../@npmcli/run-script/lib/package-envs.js    |   29 +
 .../@npmcli/run-script/lib/run-script-pkg.js  |  114 +
 .../@npmcli/run-script/lib/run-script.js      |   15 +
 .../@npmcli/run-script/lib/set-path.js        |   45 +
 .../@npmcli/run-script/lib/signal-manager.js  |   50 +
 .../run-script/lib/validate-options.js        |   39 +
 .../@npmcli/run-script/package.json           |   54 +
 .../node_modules/@sigstore/bundle/LICENSE     |  202 ++
 .../@sigstore/bundle/dist/build.js            |  100 +
 .../@sigstore/bundle/dist/bundle.js           |   24 +
 .../@sigstore/bundle/dist/error.js            |   25 +
 .../@sigstore/bundle/dist/index.js            |   43 +
 .../@sigstore/bundle/dist/serialized.js       |   49 +
 .../@sigstore/bundle/dist/utility.js          |    2 +
 .../@sigstore/bundle/dist/validate.js         |  199 ++
 .../@sigstore/bundle/package.json             |   35 +
 .../node_modules/@sigstore/core/LICENSE       |  202 ++
 .../@sigstore/core/dist/asn1/error.js         |   24 +
 .../@sigstore/core/dist/asn1/index.js         |   20 +
 .../@sigstore/core/dist/asn1/length.js        |   62 +
 .../@sigstore/core/dist/asn1/obj.js           |  152 ++
 .../@sigstore/core/dist/asn1/parse.js         |  124 +
 .../@sigstore/core/dist/asn1/tag.js           |   86 +
 .../@sigstore/core/dist/crypto.js             |   60 +
 .../node_modules/@sigstore/core/dist/dsse.js  |   30 +
 .../@sigstore/core/dist/encoding.js           |   27 +
 .../node_modules/@sigstore/core/dist/index.js |   66 +
 .../node_modules/@sigstore/core/dist/json.js  |   60 +
 .../node_modules/@sigstore/core/dist/oid.js   |   14 +
 .../node_modules/@sigstore/core/dist/pem.js   |   43 +
 .../@sigstore/core/dist/rfc3161/error.js      |   21 +
 .../@sigstore/core/dist/rfc3161/index.js      |   20 +
 .../@sigstore/core/dist/rfc3161/timestamp.js  |  211 ++
 .../@sigstore/core/dist/rfc3161/tstinfo.js    |   71 +
 .../@sigstore/core/dist/stream.js             |  115 +
 .../@sigstore/core/dist/x509/cert.js          |  241 ++
 .../@sigstore/core/dist/x509/ext.js           |  145 ++
 .../@sigstore/core/dist/x509/index.js         |   23 +
 .../@sigstore/core/dist/x509/sct.js           |  151 ++
 .../node_modules/@sigstore/core/package.json  |   31 +
 .../@sigstore/protobuf-specs/LICENSE          |  202 ++
 .../dist/__generated__/envelope.js            |   59 +
 .../dist/__generated__/events.js              |  174 ++
 .../google/api/field_behavior.js              |  141 ++
 .../dist/__generated__/google/protobuf/any.js |   35 +
 .../google/protobuf/descriptor.js             | 2042 +++++++++++++++++
 .../google/protobuf/timestamp.js              |   29 +
 .../dist/__generated__/rekor/v2/dsse.js       |   55 +
 .../dist/__generated__/rekor/v2/entry.js      |   81 +
 .../__generated__/rekor/v2/hashedrekord.js    |   56 +
 .../dist/__generated__/rekor/v2/verifier.js   |   74 +
 .../dist/__generated__/sigstore_bundle.js     |  103 +
 .../dist/__generated__/sigstore_common.js     |  596 +++++
 .../dist/__generated__/sigstore_rekor.js      |  137 ++
 .../dist/__generated__/sigstore_trustroot.js  |  284 +++
 .../__generated__/sigstore_verification.js    |  281 +++
 .../@sigstore/protobuf-specs/dist/index.js    |   37 +
 .../protobuf-specs/dist/rekor/v2/index.js     |   35 +
 .../@sigstore/protobuf-specs/package.json     |   35 +
 .../node_modules/@sigstore/sign/LICENSE       |  202 ++
 .../@sigstore/sign/dist/bundler/base.js       |   50 +
 .../@sigstore/sign/dist/bundler/bundle.js     |   81 +
 .../@sigstore/sign/dist/bundler/dsse.js       |   46 +
 .../@sigstore/sign/dist/bundler/index.js      |    7 +
 .../@sigstore/sign/dist/bundler/message.js    |   30 +
 .../node_modules/@sigstore/sign/dist/error.js |   39 +
 .../@sigstore/sign/dist/external/error.js     |   26 +
 .../@sigstore/sign/dist/external/fetch.js     |   98 +
 .../@sigstore/sign/dist/external/fulcio.js    |   41 +
 .../@sigstore/sign/dist/external/rekor.js     |   80 +
 .../@sigstore/sign/dist/external/tsa.js       |   38 +
 .../@sigstore/sign/dist/identity/ci.js        |   73 +
 .../@sigstore/sign/dist/identity/index.js     |   20 +
 .../@sigstore/sign/dist/identity/provider.js  |    2 +
 .../node_modules/@sigstore/sign/dist/index.js |   17 +
 .../@sigstore/sign/dist/signer/fulcio/ca.js   |   59 +
 .../sign/dist/signer/fulcio/ephemeral.js      |   45 +
 .../sign/dist/signer/fulcio/index.js          |   87 +
 .../@sigstore/sign/dist/signer/index.js       |   22 +
 .../@sigstore/sign/dist/signer/signer.js      |   17 +
 .../@sigstore/sign/dist/types/fetch.js        |    2 +
 .../@sigstore/sign/dist/util/index.js         |   59 +
 .../@sigstore/sign/dist/util/oidc.js          |   30 +
 .../@sigstore/sign/dist/util/ua.js            |   32 +
 .../@sigstore/sign/dist/witness/index.js      |   24 +
 .../sign/dist/witness/tlog/client.js          |   61 +
 .../@sigstore/sign/dist/witness/tlog/entry.js |  140 ++
 .../@sigstore/sign/dist/witness/tlog/index.js |   82 +
 .../@sigstore/sign/dist/witness/tsa/client.js |   46 +
 .../@sigstore/sign/dist/witness/tsa/index.js  |   44 +
 .../@sigstore/sign/dist/witness/witness.js    |    2 +
 .../node_modules/@sigstore/sign/package.json  |   46 +
 .../pacote/node_modules/@sigstore/tuf/LICENSE |  202 ++
 .../@sigstore/tuf/dist/appdata.js             |   43 +
 .../node_modules/@sigstore/tuf/dist/client.js |  113 +
 .../node_modules/@sigstore/tuf/dist/error.js  |   12 +
 .../node_modules/@sigstore/tuf/dist/index.js  |   56 +
 .../node_modules/@sigstore/tuf/dist/target.js |   79 +
 .../node_modules/@sigstore/tuf/package.json   |   41 +
 .../node_modules/@sigstore/tuf/seeds.json     |    1 +
 .../@sigstore/verify/dist/bundle/dsse.js      |   43 +
 .../@sigstore/verify/dist/bundle/index.js     |   57 +
 .../@sigstore/verify/dist/bundle/message.js   |   36 +
 .../@sigstore/verify/dist/error.js            |   32 +
 .../@sigstore/verify/dist/index.js            |   28 +
 .../@sigstore/verify/dist/key/certificate.js  |  212 ++
 .../@sigstore/verify/dist/key/index.js        |   67 +
 .../@sigstore/verify/dist/key/sct.js          |   78 +
 .../@sigstore/verify/dist/policy.js           |   24 +
 .../@sigstore/verify/dist/shared.types.js     |    2 +
 .../verify/dist/timestamp/checkpoint.js       |  157 ++
 .../@sigstore/verify/dist/timestamp/index.js  |   46 +
 .../@sigstore/verify/dist/timestamp/merkle.js |  104 +
 .../@sigstore/verify/dist/timestamp/set.js    |   60 +
 .../@sigstore/verify/dist/timestamp/tsa.js    |   63 +
 .../@sigstore/verify/dist/tlog/dsse.js        |   57 +
 .../verify/dist/tlog/hashedrekord.js          |   51 +
 .../@sigstore/verify/dist/tlog/index.js       |   47 +
 .../@sigstore/verify/dist/tlog/intoto.js      |   62 +
 .../@sigstore/verify/dist/trust/filter.js     |   23 +
 .../@sigstore/verify/dist/trust/index.js      |   86 +
 .../verify/dist/trust/trust.types.js          |    2 +
 .../@sigstore/verify/dist/verifier.js         |  143 ++
 .../@sigstore/verify/package.json             |   36 +
 .../pacote/node_modules/@tufjs/models/LICENSE |   21 +
 .../node_modules/@tufjs/models/dist/base.js   |   96 +
 .../@tufjs/models/dist/delegations.js         |  119 +
 .../node_modules/@tufjs/models/dist/error.js  |   27 +
 .../node_modules/@tufjs/models/dist/file.js   |  191 ++
 .../node_modules/@tufjs/models/dist/index.js  |   24 +
 .../node_modules/@tufjs/models/dist/key.js    |   90 +
 .../@tufjs/models/dist/metadata.js            |  165 ++
 .../node_modules/@tufjs/models/dist/role.js   |  310 +++
 .../node_modules/@tufjs/models/dist/root.js   |  119 +
 .../@tufjs/models/dist/signature.js           |   40 +
 .../@tufjs/models/dist/snapshot.js            |   72 +
 .../@tufjs/models/dist/targets.js             |   94 +
 .../@tufjs/models/dist/timestamp.js           |   59 +
 .../@tufjs/models/dist/utils/guard.js         |   32 +
 .../@tufjs/models/dist/utils/index.js         |   38 +
 .../@tufjs/models/dist/utils/key.js           |  142 ++
 .../@tufjs/models/dist/utils/oid.js           |   26 +
 .../@tufjs/models/dist/utils/types.js         |    2 +
 .../@tufjs/models/dist/utils/verify.js        |   13 +
 .../models/node_modules/minimatch/LICENSE     |   15 +
 .../dist/commonjs/assert-valid-pattern.js     |   14 +
 .../minimatch/dist/commonjs/ast.js            |  592 +++++
 .../dist/commonjs/brace-expressions.js        |  152 ++
 .../minimatch/dist/commonjs/escape.js         |   22 +
 .../minimatch/dist/commonjs/index.js          | 1017 ++++++++
 .../minimatch/dist/commonjs/package.json      |    3 +
 .../minimatch/dist/commonjs/unescape.js       |   24 +
 .../dist/esm/assert-valid-pattern.js          |   10 +
 .../node_modules/minimatch/dist/esm/ast.js    |  588 +++++
 .../minimatch/dist/esm/brace-expressions.js   |  148 ++
 .../node_modules/minimatch/dist/esm/escape.js |   18 +
 .../node_modules/minimatch/dist/esm/index.js  | 1001 ++++++++
 .../minimatch/dist/esm/package.json           |    3 +
 .../minimatch/dist/esm/unescape.js            |   20 +
 .../node_modules/minimatch/package.json       |   82 +
 .../node_modules/@tufjs/models/package.json   |   37 +
 .../pacote/node_modules/cacache/LICENSE.md    |   16 +
 .../node_modules/cacache/lib/content/path.js  |   29 +
 .../node_modules/cacache/lib/content/read.js  |  165 ++
 .../node_modules/cacache/lib/content/rm.js    |   18 +
 .../node_modules/cacache/lib/content/write.js |  206 ++
 .../node_modules/cacache/lib/entry-index.js   |  336 +++
 .../pacote/node_modules/cacache/lib/get.js    |  170 ++
 .../pacote/node_modules/cacache/lib/index.js  |   42 +
 .../node_modules/cacache/lib/memoization.js   |   72 +
 .../pacote/node_modules/cacache/lib/put.js    |   80 +
 .../pacote/node_modules/cacache/lib/rm.js     |   31 +
 .../node_modules/cacache/lib/util/glob.js     |    7 +
 .../cacache/lib/util/hash-to-segments.js      |    7 +
 .../node_modules/cacache/lib/util/tmp.js      |   26 +
 .../pacote/node_modules/cacache/lib/verify.js |  258 +++
 .../pacote/node_modules/cacache/package.json  |   82 +
 .../pacote/node_modules/chownr/LICENSE.md     |   63 +
 .../chownr/dist/commonjs/index.js             |   93 +
 .../chownr/dist/commonjs/package.json         |    3 +
 .../node_modules/chownr/dist/esm/index.js     |   85 +
 .../node_modules/chownr/dist/esm/package.json |    3 +
 .../pacote/node_modules/chownr/package.json   |   69 +
 node_modules/pacote/node_modules/glob/LICENSE |   15 +
 .../node_modules/glob/dist/commonjs/glob.js   |  247 ++
 .../glob/dist/commonjs/has-magic.js           |   27 +
 .../node_modules/glob/dist/commonjs/ignore.js |  119 +
 .../node_modules/glob/dist/commonjs/index.js  |   68 +
 .../glob/dist/commonjs/package.json           |    3 +
 .../glob/dist/commonjs/pattern.js             |  219 ++
 .../glob/dist/commonjs/processor.js           |  301 +++
 .../node_modules/glob/dist/commonjs/walker.js |  387 ++++
 .../node_modules/glob/dist/esm/bin.d.mts      |    3 +
 .../pacote/node_modules/glob/dist/esm/bin.mjs |  276 +++
 .../pacote/node_modules/glob/dist/esm/glob.js |  243 ++
 .../node_modules/glob/dist/esm/has-magic.js   |   23 +
 .../node_modules/glob/dist/esm/ignore.js      |  115 +
 .../node_modules/glob/dist/esm/index.js       |   55 +
 .../node_modules/glob/dist/esm/package.json   |    3 +
 .../node_modules/glob/dist/esm/pattern.js     |  215 ++
 .../node_modules/glob/dist/esm/processor.js   |  294 +++
 .../node_modules/glob/dist/esm/walker.js      |  381 +++
 .../pacote/node_modules/glob/package.json     |   97 +
 .../node_modules/hosted-git-info/LICENSE      |   13 +
 .../hosted-git-info/lib/from-url.js           |  122 +
 .../node_modules/hosted-git-info/lib/hosts.js |  231 ++
 .../node_modules/hosted-git-info/lib/index.js |  227 ++
 .../hosted-git-info/lib/parse-url.js          |   78 +
 .../node_modules/hosted-git-info/package.json |   61 +
 .../pacote/node_modules/jackspeak/LICENSE.md  |   55 +
 .../jackspeak/dist/commonjs/index.js          |  947 ++++++++
 .../jackspeak/dist/commonjs/package.json      |    3 +
 .../node_modules/jackspeak/dist/esm/index.js  |  936 ++++++++
 .../jackspeak/dist/esm/package.json           |    3 +
 .../node_modules/jackspeak/package.json       |   94 +
 .../pacote/node_modules/lru-cache/LICENSE     |   15 +
 .../lru-cache/dist/commonjs/index.js          | 1564 +++++++++++++
 .../lru-cache/dist/commonjs/index.min.js      |    2 +
 .../lru-cache/dist/commonjs/package.json      |    3 +
 .../node_modules/lru-cache/dist/esm/index.js  | 1560 +++++++++++++
 .../lru-cache/dist/esm/index.min.js           |    2 +
 .../lru-cache/dist/esm/package.json           |    3 +
 .../node_modules/lru-cache/package.json       |  113 +
 .../node_modules/make-fetch-happen/LICENSE    |   16 +
 .../make-fetch-happen/lib/cache/entry.js      |  471 ++++
 .../make-fetch-happen/lib/cache/errors.js     |   11 +
 .../make-fetch-happen/lib/cache/index.js      |   49 +
 .../make-fetch-happen/lib/cache/key.js        |   17 +
 .../make-fetch-happen/lib/cache/policy.js     |  161 ++
 .../make-fetch-happen/lib/fetch.js            |  118 +
 .../make-fetch-happen/lib/index.js            |   41 +
 .../make-fetch-happen/lib/options.js          |   59 +
 .../make-fetch-happen/lib/pipeline.js         |   41 +
 .../make-fetch-happen/lib/remote.js           |  132 ++
 .../make-fetch-happen/package.json            |   74 +
 .../pacote/node_modules/minimatch/LICENSE     |   15 +
 .../dist/commonjs/assert-valid-pattern.js     |   14 +
 .../minimatch/dist/commonjs/ast.js            |  592 +++++
 .../dist/commonjs/brace-expressions.js        |  152 ++
 .../minimatch/dist/commonjs/escape.js         |   22 +
 .../minimatch/dist/commonjs/index.js          | 1014 ++++++++
 .../minimatch/dist/commonjs/package.json      |    3 +
 .../minimatch/dist/commonjs/unescape.js       |   24 +
 .../dist/esm/assert-valid-pattern.js          |   10 +
 .../node_modules/minimatch/dist/esm/ast.js    |  588 +++++
 .../minimatch/dist/esm/brace-expressions.js   |  148 ++
 .../node_modules/minimatch/dist/esm/escape.js |   18 +
 .../node_modules/minimatch/dist/esm/index.js  | 1001 ++++++++
 .../minimatch/dist/esm/package.json           |    3 +
 .../minimatch/dist/esm/unescape.js            |   20 +
 .../node_modules/minimatch/package.json       |   79 +
 .../pacote/node_modules/minizlib/LICENSE      |   26 +
 .../minizlib/dist/commonjs/constants.js       |  123 +
 .../minizlib/dist/commonjs/index.js           |  392 ++++
 .../minizlib/dist/commonjs/package.json       |    3 +
 .../minizlib/dist/esm/constants.js            |  117 +
 .../node_modules/minizlib/dist/esm/index.js   |  340 +++
 .../minizlib/dist/esm/package.json            |    3 +
 .../pacote/node_modules/minizlib/package.json |   80 +
 .../pacote/node_modules/mkdirp/LICENSE        |   21 +
 .../node_modules/mkdirp/dist/cjs/package.json |   91 +
 .../node_modules/mkdirp/dist/cjs/src/bin.js   |   80 +
 .../mkdirp/dist/cjs/src/find-made.js          |   35 +
 .../node_modules/mkdirp/dist/cjs/src/index.js |   53 +
 .../mkdirp/dist/cjs/src/mkdirp-manual.js      |   79 +
 .../mkdirp/dist/cjs/src/mkdirp-native.js      |   50 +
 .../mkdirp/dist/cjs/src/opts-arg.js           |   38 +
 .../mkdirp/dist/cjs/src/path-arg.js           |   28 +
 .../mkdirp/dist/cjs/src/use-native.js         |   17 +
 .../node_modules/mkdirp/dist/mjs/find-made.js |   30 +
 .../node_modules/mkdirp/dist/mjs/index.js     |   43 +
 .../mkdirp/dist/mjs/mkdirp-manual.js          |   75 +
 .../mkdirp/dist/mjs/mkdirp-native.js          |   46 +
 .../node_modules/mkdirp/dist/mjs/opts-arg.js  |   34 +
 .../node_modules/mkdirp/dist/mjs/package.json |    3 +
 .../node_modules/mkdirp/dist/mjs/path-arg.js  |   24 +
 .../mkdirp/dist/mjs/use-native.js             |   14 +
 .../pacote/node_modules/mkdirp/package.json   |   91 +
 .../pacote/node_modules/negotiator/HISTORY.md |  114 +
 .../pacote/node_modules/negotiator/LICENSE    |   24 +
 .../pacote/node_modules/negotiator/index.js   |   83 +
 .../node_modules/negotiator/lib/charset.js    |  169 ++
 .../node_modules/negotiator/lib/encoding.js   |  205 ++
 .../node_modules/negotiator/lib/language.js   |  179 ++
 .../node_modules/negotiator/lib/mediaType.js  |  294 +++
 .../node_modules/negotiator/package.json      |   43 +
 .../node_modules/npm-package-arg/LICENSE      |   15 +
 .../node_modules/npm-package-arg/lib/npa.js   |  481 ++++
 .../package.json                              |   68 +-
 .../node_modules/npm-pick-manifest/LICENSE.md |   16 +
 .../npm-pick-manifest/lib/index.js            |  219 ++
 .../npm-pick-manifest/package.json            |   58 +
 .../LICENSE => npm-registry-fetch/LICENSE.md} |    4 +-
 .../npm-registry-fetch/lib/auth.js            |  181 ++
 .../npm-registry-fetch/lib/check-response.js  |  108 +
 .../npm-registry-fetch/lib/default-opts.js    |   19 +
 .../npm-registry-fetch/lib/errors.js          |   80 +
 .../npm-registry-fetch/lib/index.js           |  247 ++
 .../npm-registry-fetch/lib/json-stream.js     |  223 ++
 .../npm-registry-fetch/package.json           |   68 +
 .../node_modules/path-scurry/LICENSE.md       |   55 +
 .../path-scurry/dist/commonjs/index.js        | 2016 ++++++++++++++++
 .../path-scurry/dist/commonjs/package.json    |    3 +
 .../path-scurry/dist/esm/index.js             | 1981 ++++++++++++++++
 .../path-scurry/dist/esm/package.json         |    3 +
 .../node_modules/path-scurry/package.json     |   88 +
 .../pacote/node_modules/sigstore/LICENSE      |  202 ++
 .../node_modules/sigstore/dist/config.js      |  120 +
 .../node_modules/sigstore/dist/index.js       |   34 +
 .../node_modules/sigstore/dist/sigstore.js    |  112 +
 .../pacote/node_modules/sigstore/package.json |   47 +
 node_modules/pacote/node_modules/tar/LICENSE  |   15 +
 .../node_modules/tar/dist/commonjs/create.js  |   83 +
 .../tar/dist/commonjs/cwd-error.js            |   18 +
 .../node_modules/tar/dist/commonjs/extract.js |   78 +
 .../tar/dist/commonjs/get-write-flag.js       |   29 +
 .../node_modules/tar/dist/commonjs/header.js  |  306 +++
 .../node_modules/tar/dist/commonjs/index.js   |   54 +
 .../tar/dist/commonjs/large-numbers.js        |   99 +
 .../node_modules/tar/dist/commonjs/list.js    |  136 ++
 .../tar/dist/commonjs/make-command.js         |   61 +
 .../node_modules/tar/dist/commonjs/mkdir.js   |  209 ++
 .../tar/dist/commonjs/mode-fix.js             |   29 +
 .../tar/dist/commonjs/normalize-unicode.js    |   17 +
 .../dist/commonjs/normalize-windows-path.js   |   12 +
 .../node_modules/tar/dist/commonjs/options.js |   66 +
 .../node_modules/tar/dist/commonjs/pack.js    |  477 ++++
 .../tar/dist/commonjs/package.json            |    3 +
 .../node_modules/tar/dist/commonjs/parse.js   |  599 +++++
 .../tar/dist/commonjs/path-reservations.js    |  170 ++
 .../node_modules/tar/dist/commonjs/pax.js     |  158 ++
 .../tar/dist/commonjs/read-entry.js           |  140 ++
 .../node_modules/tar/dist/commonjs/replace.js |  231 ++
 .../tar/dist/commonjs/strip-absolute-path.js  |   29 +
 .../dist/commonjs/strip-trailing-slashes.js   |   18 +
 .../tar/dist/commonjs/symlink-error.js        |   19 +
 .../node_modules/tar/dist/commonjs/types.js   |   50 +
 .../node_modules/tar/dist/commonjs/unpack.js  |  919 ++++++++
 .../node_modules/tar/dist/commonjs/update.js  |   33 +
 .../tar/dist/commonjs/warn-method.js          |   31 +
 .../tar/dist/commonjs/winchars.js             |   14 +
 .../tar/dist/commonjs/write-entry.js          |  689 ++++++
 .../node_modules/tar/dist/esm/create.js       |   77 +
 .../node_modules/tar/dist/esm/cwd-error.js    |   14 +
 .../node_modules/tar/dist/esm/extract.js      |   49 +
 .../tar/dist/esm/get-write-flag.js            |   23 +
 .../node_modules/tar/dist/esm/header.js       |  279 +++
 .../pacote/node_modules/tar/dist/esm/index.js |   20 +
 .../tar/dist/esm/large-numbers.js             |   94 +
 .../pacote/node_modules/tar/dist/esm/list.js  |  106 +
 .../node_modules/tar/dist/esm/make-command.js |   57 +
 .../pacote/node_modules/tar/dist/esm/mkdir.js |  201 ++
 .../node_modules/tar/dist/esm/mode-fix.js     |   25 +
 .../tar/dist/esm/normalize-unicode.js         |   13 +
 .../tar/dist/esm/normalize-windows-path.js    |    9 +
 .../node_modules/tar/dist/esm/options.js      |   54 +
 .../pacote/node_modules/tar/dist/esm/pack.js  |  445 ++++
 .../node_modules/tar/dist/esm/package.json    |    3 +
 .../pacote/node_modules/tar/dist/esm/parse.js |  595 +++++
 .../tar/dist/esm/path-reservations.js         |  166 ++
 .../pacote/node_modules/tar/dist/esm/pax.js   |  154 ++
 .../node_modules/tar/dist/esm/read-entry.js   |  136 ++
 .../node_modules/tar/dist/esm/replace.js      |  225 ++
 .../tar/dist/esm/strip-absolute-path.js       |   25 +
 .../tar/dist/esm/strip-trailing-slashes.js    |   14 +
 .../tar/dist/esm/symlink-error.js             |   15 +
 .../pacote/node_modules/tar/dist/esm/types.js |   45 +
 .../node_modules/tar/dist/esm/unpack.js       |  888 +++++++
 .../node_modules/tar/dist/esm/update.js       |   30 +
 .../node_modules/tar/dist/esm/warn-method.js  |   27 +
 .../node_modules/tar/dist/esm/winchars.js     |    9 +
 .../node_modules/tar/dist/esm/write-entry.js  |  657 ++++++
 .../pacote/node_modules/tar/package.json      |  325 +++
 .../pacote/node_modules/tuf-js/LICENSE        |   21 +
 .../pacote/node_modules/tuf-js/dist/config.js |   15 +
 .../pacote/node_modules/tuf-js/dist/error.js  |   49 +
 .../node_modules/tuf-js/dist/fetcher.js       |   86 +
 .../pacote/node_modules/tuf-js/dist/index.js  |    9 +
 .../pacote/node_modules/tuf-js/dist/store.js  |  219 ++
 .../node_modules/tuf-js/dist/updater.js       |  368 +++
 .../node_modules/tuf-js/dist/utils/tmpfile.js |   25 +
 .../node_modules/tuf-js/dist/utils/url.js     |   13 +
 .../pacote/node_modules/tuf-js/package.json   |   43 +
 .../pacote/node_modules/yallist/LICENSE.md    |   63 +
 .../yallist/dist/commonjs/index.js            |  384 ++++
 .../yallist/dist/commonjs/package.json        |    3 +
 .../node_modules/yallist/dist/esm/index.js    |  379 +++
 .../yallist/dist/esm/package.json             |    3 +
 .../pacote/node_modules/yallist/package.json  |   68 +
 node_modules/pacote/package.json              |   26 +-
 package-lock.json                             |  535 ++++-
 package.json                                  |    2 +-
 workspaces/arborist/package.json              |    2 +-
 workspaces/libnpmdiff/package.json            |    2 +-
 workspaces/libnpmexec/package.json            |    2 +-
 workspaces/libnpmpack/package.json            |    2 +-
 446 files changed, 58262 insertions(+), 1519 deletions(-)
 rename node_modules/{ => npm-packlist/node_modules}/ignore-walk/LICENSE (100%)
 rename node_modules/{ => npm-packlist/node_modules}/ignore-walk/lib/index.js (100%)
 rename node_modules/{ => npm-packlist/node_modules}/ignore-walk/package.json (90%)
 create mode 100644 node_modules/npm-packlist/node_modules/minimatch/LICENSE
 create mode 100644 node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
 create mode 100644 node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/ast.js
 create mode 100644 node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/brace-expressions.js
 create mode 100644 node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/escape.js
 create mode 100644 node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/index.js
 create mode 100644 node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/package.json
 create mode 100644 node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/unescape.js
 create mode 100644 node_modules/npm-packlist/node_modules/minimatch/dist/esm/assert-valid-pattern.js
 create mode 100644 node_modules/npm-packlist/node_modules/minimatch/dist/esm/ast.js
 create mode 100644 node_modules/npm-packlist/node_modules/minimatch/dist/esm/brace-expressions.js
 create mode 100644 node_modules/npm-packlist/node_modules/minimatch/dist/esm/escape.js
 create mode 100644 node_modules/npm-packlist/node_modules/minimatch/dist/esm/index.js
 create mode 100644 node_modules/npm-packlist/node_modules/minimatch/dist/esm/package.json
 create mode 100644 node_modules/npm-packlist/node_modules/minimatch/dist/esm/unescape.js
 create mode 100644 node_modules/npm-packlist/node_modules/minimatch/package.json
 create mode 100644 node_modules/pacote/node_modules/@npmcli/git/LICENSE
 create mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/clone.js
 create mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/errors.js
 create mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/find.js
 create mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/index.js
 create mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/is-clean.js
 create mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/is.js
 create mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/lines-to-revs.js
 create mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/make-error.js
 create mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/opts.js
 create mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/revs.js
 create mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/spawn.js
 create mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/utils.js
 create mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/which.js
 create mode 100644 node_modules/pacote/node_modules/@npmcli/git/package.json
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/package-json/lib/index.js
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/package-json/lib/normalize-data.js
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/package-json/lib/normalize.js
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/package-json/lib/read-package.js
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/package-json/lib/sort.js
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/package-json/lib/update-dependencies.js
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/package-json/lib/update-scripts.js
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/package-json/lib/update-workspaces.js
 create mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/LICENSE
 create mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/lib/is-server-package.js
 create mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/lib/make-spawn-args.js
 create mode 100755 node_modules/pacote/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp
 create mode 100755 node_modules/pacote/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp.cmd
 create mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/lib/package-envs.js
 create mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/lib/run-script-pkg.js
 create mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/lib/run-script.js
 create mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/lib/set-path.js
 create mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/lib/signal-manager.js
 create mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/lib/validate-options.js
 create mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/package.json
 create mode 100644 node_modules/pacote/node_modules/@sigstore/bundle/LICENSE
 create mode 100644 node_modules/pacote/node_modules/@sigstore/bundle/dist/build.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/bundle/dist/bundle.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/bundle/dist/error.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/bundle/dist/index.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/bundle/dist/serialized.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/bundle/dist/utility.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/bundle/dist/validate.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/bundle/package.json
 create mode 100644 node_modules/pacote/node_modules/@sigstore/core/LICENSE
 create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/asn1/error.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/asn1/index.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/asn1/length.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/asn1/obj.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/asn1/parse.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/asn1/tag.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/crypto.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/dsse.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/encoding.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/index.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/json.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/oid.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/pem.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/error.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/index.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/timestamp.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/stream.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/x509/cert.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/x509/ext.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/x509/index.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/x509/sct.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/core/package.json
 create mode 100644 node_modules/pacote/node_modules/@sigstore/protobuf-specs/LICENSE
 create mode 100644 node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/index.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/rekor/v2/index.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/protobuf-specs/package.json
 create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/LICENSE
 create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/base.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/bundle.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/dsse.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/index.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/message.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/error.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/external/error.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/external/fetch.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/external/fulcio.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/external/rekor.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/external/tsa.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/identity/ci.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/identity/index.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/identity/provider.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/index.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/index.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/signer/index.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/signer/signer.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/types/fetch.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/util/index.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/util/oidc.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/util/ua.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/witness/index.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/client.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/entry.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/index.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tsa/client.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tsa/index.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/witness/witness.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/sign/package.json
 create mode 100644 node_modules/pacote/node_modules/@sigstore/tuf/LICENSE
 create mode 100644 node_modules/pacote/node_modules/@sigstore/tuf/dist/appdata.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/tuf/dist/client.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/tuf/dist/error.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/tuf/dist/index.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/tuf/dist/target.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/tuf/package.json
 create mode 100644 node_modules/pacote/node_modules/@sigstore/tuf/seeds.json
 create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/dsse.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/index.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/message.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/error.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/index.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/key/certificate.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/key/index.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/key/sct.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/policy.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/shared.types.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/checkpoint.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/index.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/merkle.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/set.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/tsa.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/dsse.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/hashedrekord.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/index.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/intoto.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/trust/filter.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/trust/index.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/trust/trust.types.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/verifier.js
 create mode 100644 node_modules/pacote/node_modules/@sigstore/verify/package.json
 create mode 100644 node_modules/pacote/node_modules/@tufjs/models/LICENSE
 create mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/base.js
 create mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/delegations.js
 create mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/error.js
 create mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/file.js
 create mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/index.js
 create mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/key.js
 create mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/metadata.js
 create mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/role.js
 create mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/root.js
 create mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/signature.js
 create mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/snapshot.js
 create mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/targets.js
 create mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/timestamp.js
 create mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/utils/guard.js
 create mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/utils/index.js
 create mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/utils/key.js
 create mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/utils/oid.js
 create mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/utils/types.js
 create mode 100644 node_modules/pacote/node_modules/@tufjs/models/dist/utils/verify.js
 create mode 100644 node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/LICENSE
 create mode 100644 node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
 create mode 100644 node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/ast.js
 create mode 100644 node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/brace-expressions.js
 create mode 100644 node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/escape.js
 create mode 100644 node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/index.js
 create mode 100644 node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/package.json
 create mode 100644 node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/unescape.js
 create mode 100644 node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/assert-valid-pattern.js
 create mode 100644 node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/ast.js
 create mode 100644 node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/brace-expressions.js
 create mode 100644 node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/escape.js
 create mode 100644 node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/index.js
 create mode 100644 node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/package.json
 create mode 100644 node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/unescape.js
 create mode 100644 node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/package.json
 create mode 100644 node_modules/pacote/node_modules/@tufjs/models/package.json
 create mode 100644 node_modules/pacote/node_modules/cacache/LICENSE.md
 create mode 100644 node_modules/pacote/node_modules/cacache/lib/content/path.js
 create mode 100644 node_modules/pacote/node_modules/cacache/lib/content/read.js
 create mode 100644 node_modules/pacote/node_modules/cacache/lib/content/rm.js
 create mode 100644 node_modules/pacote/node_modules/cacache/lib/content/write.js
 create mode 100644 node_modules/pacote/node_modules/cacache/lib/entry-index.js
 create mode 100644 node_modules/pacote/node_modules/cacache/lib/get.js
 create mode 100644 node_modules/pacote/node_modules/cacache/lib/index.js
 create mode 100644 node_modules/pacote/node_modules/cacache/lib/memoization.js
 create mode 100644 node_modules/pacote/node_modules/cacache/lib/put.js
 create mode 100644 node_modules/pacote/node_modules/cacache/lib/rm.js
 create mode 100644 node_modules/pacote/node_modules/cacache/lib/util/glob.js
 create mode 100644 node_modules/pacote/node_modules/cacache/lib/util/hash-to-segments.js
 create mode 100644 node_modules/pacote/node_modules/cacache/lib/util/tmp.js
 create mode 100644 node_modules/pacote/node_modules/cacache/lib/verify.js
 create mode 100644 node_modules/pacote/node_modules/cacache/package.json
 create mode 100644 node_modules/pacote/node_modules/chownr/LICENSE.md
 create mode 100644 node_modules/pacote/node_modules/chownr/dist/commonjs/index.js
 create mode 100644 node_modules/pacote/node_modules/chownr/dist/commonjs/package.json
 create mode 100644 node_modules/pacote/node_modules/chownr/dist/esm/index.js
 create mode 100644 node_modules/pacote/node_modules/chownr/dist/esm/package.json
 create mode 100644 node_modules/pacote/node_modules/chownr/package.json
 create mode 100644 node_modules/pacote/node_modules/glob/LICENSE
 create mode 100644 node_modules/pacote/node_modules/glob/dist/commonjs/glob.js
 create mode 100644 node_modules/pacote/node_modules/glob/dist/commonjs/has-magic.js
 create mode 100644 node_modules/pacote/node_modules/glob/dist/commonjs/ignore.js
 create mode 100644 node_modules/pacote/node_modules/glob/dist/commonjs/index.js
 create mode 100644 node_modules/pacote/node_modules/glob/dist/commonjs/package.json
 create mode 100644 node_modules/pacote/node_modules/glob/dist/commonjs/pattern.js
 create mode 100644 node_modules/pacote/node_modules/glob/dist/commonjs/processor.js
 create mode 100644 node_modules/pacote/node_modules/glob/dist/commonjs/walker.js
 create mode 100644 node_modules/pacote/node_modules/glob/dist/esm/bin.d.mts
 create mode 100755 node_modules/pacote/node_modules/glob/dist/esm/bin.mjs
 create mode 100644 node_modules/pacote/node_modules/glob/dist/esm/glob.js
 create mode 100644 node_modules/pacote/node_modules/glob/dist/esm/has-magic.js
 create mode 100644 node_modules/pacote/node_modules/glob/dist/esm/ignore.js
 create mode 100644 node_modules/pacote/node_modules/glob/dist/esm/index.js
 create mode 100644 node_modules/pacote/node_modules/glob/dist/esm/package.json
 create mode 100644 node_modules/pacote/node_modules/glob/dist/esm/pattern.js
 create mode 100644 node_modules/pacote/node_modules/glob/dist/esm/processor.js
 create mode 100644 node_modules/pacote/node_modules/glob/dist/esm/walker.js
 create mode 100644 node_modules/pacote/node_modules/glob/package.json
 create mode 100644 node_modules/pacote/node_modules/hosted-git-info/LICENSE
 create mode 100644 node_modules/pacote/node_modules/hosted-git-info/lib/from-url.js
 create mode 100644 node_modules/pacote/node_modules/hosted-git-info/lib/hosts.js
 create mode 100644 node_modules/pacote/node_modules/hosted-git-info/lib/index.js
 create mode 100644 node_modules/pacote/node_modules/hosted-git-info/lib/parse-url.js
 create mode 100644 node_modules/pacote/node_modules/hosted-git-info/package.json
 create mode 100644 node_modules/pacote/node_modules/jackspeak/LICENSE.md
 create mode 100644 node_modules/pacote/node_modules/jackspeak/dist/commonjs/index.js
 create mode 100644 node_modules/pacote/node_modules/jackspeak/dist/commonjs/package.json
 create mode 100644 node_modules/pacote/node_modules/jackspeak/dist/esm/index.js
 create mode 100644 node_modules/pacote/node_modules/jackspeak/dist/esm/package.json
 create mode 100644 node_modules/pacote/node_modules/jackspeak/package.json
 create mode 100644 node_modules/pacote/node_modules/lru-cache/LICENSE
 create mode 100644 node_modules/pacote/node_modules/lru-cache/dist/commonjs/index.js
 create mode 100644 node_modules/pacote/node_modules/lru-cache/dist/commonjs/index.min.js
 create mode 100644 node_modules/pacote/node_modules/lru-cache/dist/commonjs/package.json
 create mode 100644 node_modules/pacote/node_modules/lru-cache/dist/esm/index.js
 create mode 100644 node_modules/pacote/node_modules/lru-cache/dist/esm/index.min.js
 create mode 100644 node_modules/pacote/node_modules/lru-cache/dist/esm/package.json
 create mode 100644 node_modules/pacote/node_modules/lru-cache/package.json
 create mode 100644 node_modules/pacote/node_modules/make-fetch-happen/LICENSE
 create mode 100644 node_modules/pacote/node_modules/make-fetch-happen/lib/cache/entry.js
 create mode 100644 node_modules/pacote/node_modules/make-fetch-happen/lib/cache/errors.js
 create mode 100644 node_modules/pacote/node_modules/make-fetch-happen/lib/cache/index.js
 create mode 100644 node_modules/pacote/node_modules/make-fetch-happen/lib/cache/key.js
 create mode 100644 node_modules/pacote/node_modules/make-fetch-happen/lib/cache/policy.js
 create mode 100644 node_modules/pacote/node_modules/make-fetch-happen/lib/fetch.js
 create mode 100644 node_modules/pacote/node_modules/make-fetch-happen/lib/index.js
 create mode 100644 node_modules/pacote/node_modules/make-fetch-happen/lib/options.js
 create mode 100644 node_modules/pacote/node_modules/make-fetch-happen/lib/pipeline.js
 create mode 100644 node_modules/pacote/node_modules/make-fetch-happen/lib/remote.js
 create mode 100644 node_modules/pacote/node_modules/make-fetch-happen/package.json
 create mode 100644 node_modules/pacote/node_modules/minimatch/LICENSE
 create mode 100644 node_modules/pacote/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
 create mode 100644 node_modules/pacote/node_modules/minimatch/dist/commonjs/ast.js
 create mode 100644 node_modules/pacote/node_modules/minimatch/dist/commonjs/brace-expressions.js
 create mode 100644 node_modules/pacote/node_modules/minimatch/dist/commonjs/escape.js
 create mode 100644 node_modules/pacote/node_modules/minimatch/dist/commonjs/index.js
 create mode 100644 node_modules/pacote/node_modules/minimatch/dist/commonjs/package.json
 create mode 100644 node_modules/pacote/node_modules/minimatch/dist/commonjs/unescape.js
 create mode 100644 node_modules/pacote/node_modules/minimatch/dist/esm/assert-valid-pattern.js
 create mode 100644 node_modules/pacote/node_modules/minimatch/dist/esm/ast.js
 create mode 100644 node_modules/pacote/node_modules/minimatch/dist/esm/brace-expressions.js
 create mode 100644 node_modules/pacote/node_modules/minimatch/dist/esm/escape.js
 create mode 100644 node_modules/pacote/node_modules/minimatch/dist/esm/index.js
 create mode 100644 node_modules/pacote/node_modules/minimatch/dist/esm/package.json
 create mode 100644 node_modules/pacote/node_modules/minimatch/dist/esm/unescape.js
 create mode 100644 node_modules/pacote/node_modules/minimatch/package.json
 create mode 100644 node_modules/pacote/node_modules/minizlib/LICENSE
 create mode 100644 node_modules/pacote/node_modules/minizlib/dist/commonjs/constants.js
 create mode 100644 node_modules/pacote/node_modules/minizlib/dist/commonjs/index.js
 create mode 100644 node_modules/pacote/node_modules/minizlib/dist/commonjs/package.json
 create mode 100644 node_modules/pacote/node_modules/minizlib/dist/esm/constants.js
 create mode 100644 node_modules/pacote/node_modules/minizlib/dist/esm/index.js
 create mode 100644 node_modules/pacote/node_modules/minizlib/dist/esm/package.json
 create mode 100644 node_modules/pacote/node_modules/minizlib/package.json
 create mode 100644 node_modules/pacote/node_modules/mkdirp/LICENSE
 create mode 100644 node_modules/pacote/node_modules/mkdirp/dist/cjs/package.json
 create mode 100755 node_modules/pacote/node_modules/mkdirp/dist/cjs/src/bin.js
 create mode 100644 node_modules/pacote/node_modules/mkdirp/dist/cjs/src/find-made.js
 create mode 100644 node_modules/pacote/node_modules/mkdirp/dist/cjs/src/index.js
 create mode 100644 node_modules/pacote/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js
 create mode 100644 node_modules/pacote/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js
 create mode 100644 node_modules/pacote/node_modules/mkdirp/dist/cjs/src/opts-arg.js
 create mode 100644 node_modules/pacote/node_modules/mkdirp/dist/cjs/src/path-arg.js
 create mode 100644 node_modules/pacote/node_modules/mkdirp/dist/cjs/src/use-native.js
 create mode 100644 node_modules/pacote/node_modules/mkdirp/dist/mjs/find-made.js
 create mode 100644 node_modules/pacote/node_modules/mkdirp/dist/mjs/index.js
 create mode 100644 node_modules/pacote/node_modules/mkdirp/dist/mjs/mkdirp-manual.js
 create mode 100644 node_modules/pacote/node_modules/mkdirp/dist/mjs/mkdirp-native.js
 create mode 100644 node_modules/pacote/node_modules/mkdirp/dist/mjs/opts-arg.js
 create mode 100644 node_modules/pacote/node_modules/mkdirp/dist/mjs/package.json
 create mode 100644 node_modules/pacote/node_modules/mkdirp/dist/mjs/path-arg.js
 create mode 100644 node_modules/pacote/node_modules/mkdirp/dist/mjs/use-native.js
 create mode 100644 node_modules/pacote/node_modules/mkdirp/package.json
 create mode 100644 node_modules/pacote/node_modules/negotiator/HISTORY.md
 create mode 100644 node_modules/pacote/node_modules/negotiator/LICENSE
 create mode 100644 node_modules/pacote/node_modules/negotiator/index.js
 create mode 100644 node_modules/pacote/node_modules/negotiator/lib/charset.js
 create mode 100644 node_modules/pacote/node_modules/negotiator/lib/encoding.js
 create mode 100644 node_modules/pacote/node_modules/negotiator/lib/language.js
 create mode 100644 node_modules/pacote/node_modules/negotiator/lib/mediaType.js
 create mode 100644 node_modules/pacote/node_modules/negotiator/package.json
 create mode 100644 node_modules/pacote/node_modules/npm-package-arg/LICENSE
 create mode 100644 node_modules/pacote/node_modules/npm-package-arg/lib/npa.js
 rename node_modules/pacote/node_modules/{@npmcli/package-json => npm-package-arg}/package.json (54%)
 create mode 100644 node_modules/pacote/node_modules/npm-pick-manifest/LICENSE.md
 create mode 100644 node_modules/pacote/node_modules/npm-pick-manifest/lib/index.js
 create mode 100644 node_modules/pacote/node_modules/npm-pick-manifest/package.json
 rename node_modules/pacote/node_modules/{@npmcli/package-json/LICENSE => npm-registry-fetch/LICENSE.md} (87%)
 create mode 100644 node_modules/pacote/node_modules/npm-registry-fetch/lib/auth.js
 create mode 100644 node_modules/pacote/node_modules/npm-registry-fetch/lib/check-response.js
 create mode 100644 node_modules/pacote/node_modules/npm-registry-fetch/lib/default-opts.js
 create mode 100644 node_modules/pacote/node_modules/npm-registry-fetch/lib/errors.js
 create mode 100644 node_modules/pacote/node_modules/npm-registry-fetch/lib/index.js
 create mode 100644 node_modules/pacote/node_modules/npm-registry-fetch/lib/json-stream.js
 create mode 100644 node_modules/pacote/node_modules/npm-registry-fetch/package.json
 create mode 100644 node_modules/pacote/node_modules/path-scurry/LICENSE.md
 create mode 100644 node_modules/pacote/node_modules/path-scurry/dist/commonjs/index.js
 create mode 100644 node_modules/pacote/node_modules/path-scurry/dist/commonjs/package.json
 create mode 100644 node_modules/pacote/node_modules/path-scurry/dist/esm/index.js
 create mode 100644 node_modules/pacote/node_modules/path-scurry/dist/esm/package.json
 create mode 100644 node_modules/pacote/node_modules/path-scurry/package.json
 create mode 100644 node_modules/pacote/node_modules/sigstore/LICENSE
 create mode 100644 node_modules/pacote/node_modules/sigstore/dist/config.js
 create mode 100644 node_modules/pacote/node_modules/sigstore/dist/index.js
 create mode 100644 node_modules/pacote/node_modules/sigstore/dist/sigstore.js
 create mode 100644 node_modules/pacote/node_modules/sigstore/package.json
 create mode 100644 node_modules/pacote/node_modules/tar/LICENSE
 create mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/create.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/cwd-error.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/extract.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/get-write-flag.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/header.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/index.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/large-numbers.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/list.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/make-command.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/mkdir.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/mode-fix.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/normalize-unicode.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/normalize-windows-path.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/options.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/pack.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/package.json
 create mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/parse.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/path-reservations.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/pax.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/read-entry.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/replace.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/strip-absolute-path.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/strip-trailing-slashes.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/symlink-error.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/types.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/unpack.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/update.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/warn-method.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/winchars.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/write-entry.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/esm/create.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/esm/cwd-error.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/esm/extract.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/esm/get-write-flag.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/esm/header.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/esm/index.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/esm/large-numbers.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/esm/list.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/esm/make-command.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/esm/mkdir.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/esm/mode-fix.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/esm/normalize-unicode.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/esm/normalize-windows-path.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/esm/options.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/esm/pack.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/esm/package.json
 create mode 100644 node_modules/pacote/node_modules/tar/dist/esm/parse.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/esm/path-reservations.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/esm/pax.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/esm/read-entry.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/esm/replace.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/esm/strip-absolute-path.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/esm/strip-trailing-slashes.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/esm/symlink-error.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/esm/types.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/esm/unpack.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/esm/update.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/esm/warn-method.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/esm/winchars.js
 create mode 100644 node_modules/pacote/node_modules/tar/dist/esm/write-entry.js
 create mode 100644 node_modules/pacote/node_modules/tar/package.json
 create mode 100644 node_modules/pacote/node_modules/tuf-js/LICENSE
 create mode 100644 node_modules/pacote/node_modules/tuf-js/dist/config.js
 create mode 100644 node_modules/pacote/node_modules/tuf-js/dist/error.js
 create mode 100644 node_modules/pacote/node_modules/tuf-js/dist/fetcher.js
 create mode 100644 node_modules/pacote/node_modules/tuf-js/dist/index.js
 create mode 100644 node_modules/pacote/node_modules/tuf-js/dist/store.js
 create mode 100644 node_modules/pacote/node_modules/tuf-js/dist/updater.js
 create mode 100644 node_modules/pacote/node_modules/tuf-js/dist/utils/tmpfile.js
 create mode 100644 node_modules/pacote/node_modules/tuf-js/dist/utils/url.js
 create mode 100644 node_modules/pacote/node_modules/tuf-js/package.json
 create mode 100644 node_modules/pacote/node_modules/yallist/LICENSE.md
 create mode 100644 node_modules/pacote/node_modules/yallist/dist/commonjs/index.js
 create mode 100644 node_modules/pacote/node_modules/yallist/dist/commonjs/package.json
 create mode 100644 node_modules/pacote/node_modules/yallist/dist/esm/index.js
 create mode 100644 node_modules/pacote/node_modules/yallist/dist/esm/package.json
 create mode 100644 node_modules/pacote/node_modules/yallist/package.json

diff --git a/mock-registry/package.json b/mock-registry/package.json
index 3f43061223f52..5e854daa47ff9 100644
--- a/mock-registry/package.json
+++ b/mock-registry/package.json
@@ -52,7 +52,7 @@
     "json-stringify-safe": "^5.0.1",
     "nock": "^13.3.3",
     "npm-package-arg": "^12.0.0",
-    "pacote": "^21.0.0",
+    "pacote": "^21.0.2",
     "tap": "^16.3.8"
   }
 }
diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index dee02e20a8142..1477ba9c79d32 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -121,7 +121,6 @@
 !/http-proxy-agent
 !/https-proxy-agent
 !/iconv-lite
-!/ignore-walk
 !/imurmurhash
 !/ini
 !/init-package-json
@@ -189,6 +188,10 @@
 !/npm-normalize-package-bin
 !/npm-package-arg
 !/npm-packlist
+!/npm-packlist/node_modules/
+/npm-packlist/node_modules/*
+!/npm-packlist/node_modules/ignore-walk
+!/npm-packlist/node_modules/minimatch
 !/npm-pick-manifest
 !/npm-profile
 !/npm-registry-fetch
@@ -203,7 +206,41 @@
 /pacote/node_modules/*
 !/pacote/node_modules/@npmcli/
 /pacote/node_modules/@npmcli/*
-!/pacote/node_modules/@npmcli/package-json
+!/pacote/node_modules/@npmcli/git
+!/pacote/node_modules/@npmcli/run-script
+!/pacote/node_modules/@sigstore/
+/pacote/node_modules/@sigstore/*
+!/pacote/node_modules/@sigstore/bundle
+!/pacote/node_modules/@sigstore/core
+!/pacote/node_modules/@sigstore/protobuf-specs
+!/pacote/node_modules/@sigstore/sign
+!/pacote/node_modules/@sigstore/tuf
+!/pacote/node_modules/@sigstore/verify
+!/pacote/node_modules/@tufjs/
+/pacote/node_modules/@tufjs/*
+!/pacote/node_modules/@tufjs/models
+!/pacote/node_modules/@tufjs/models/node_modules/
+/pacote/node_modules/@tufjs/models/node_modules/*
+!/pacote/node_modules/@tufjs/models/node_modules/minimatch
+!/pacote/node_modules/cacache
+!/pacote/node_modules/chownr
+!/pacote/node_modules/glob
+!/pacote/node_modules/hosted-git-info
+!/pacote/node_modules/jackspeak
+!/pacote/node_modules/lru-cache
+!/pacote/node_modules/make-fetch-happen
+!/pacote/node_modules/minimatch
+!/pacote/node_modules/minizlib
+!/pacote/node_modules/mkdirp
+!/pacote/node_modules/negotiator
+!/pacote/node_modules/npm-package-arg
+!/pacote/node_modules/npm-pick-manifest
+!/pacote/node_modules/npm-registry-fetch
+!/pacote/node_modules/path-scurry
+!/pacote/node_modules/sigstore
+!/pacote/node_modules/tar
+!/pacote/node_modules/tuf-js
+!/pacote/node_modules/yallist
 !/parse-conflict-json
 !/path-key
 !/path-scurry
diff --git a/node_modules/ignore-walk/LICENSE b/node_modules/npm-packlist/node_modules/ignore-walk/LICENSE
similarity index 100%
rename from node_modules/ignore-walk/LICENSE
rename to node_modules/npm-packlist/node_modules/ignore-walk/LICENSE
diff --git a/node_modules/ignore-walk/lib/index.js b/node_modules/npm-packlist/node_modules/ignore-walk/lib/index.js
similarity index 100%
rename from node_modules/ignore-walk/lib/index.js
rename to node_modules/npm-packlist/node_modules/ignore-walk/lib/index.js
diff --git a/node_modules/ignore-walk/package.json b/node_modules/npm-packlist/node_modules/ignore-walk/package.json
similarity index 90%
rename from node_modules/ignore-walk/package.json
rename to node_modules/npm-packlist/node_modules/ignore-walk/package.json
index 125fc071939db..ea640d5dbc1fa 100644
--- a/node_modules/ignore-walk/package.json
+++ b/node_modules/npm-packlist/node_modules/ignore-walk/package.json
@@ -1,11 +1,11 @@
 {
   "name": "ignore-walk",
-  "version": "7.0.0",
+  "version": "8.0.0",
   "description": "Nested/recursive `.gitignore`/`.npmignore` parsing and filtering.",
   "main": "lib/index.js",
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.23.3",
+    "@npmcli/template-oss": "4.24.3",
     "mutate-fs": "^2.1.1",
     "tap": "^16.0.1"
   },
@@ -39,7 +39,7 @@
     "lib/"
   ],
   "dependencies": {
-    "minimatch": "^9.0.0"
+    "minimatch": "^10.0.3"
   },
   "tap": {
     "test-env": "LC_ALL=sk",
@@ -53,11 +53,11 @@
     ]
   },
   "engines": {
-    "node": "^18.17.0 || >=20.5.0"
+    "node": "^20.17.0 || >=22.9.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.23.3",
+    "version": "4.24.3",
     "content": "scripts/template-oss",
     "publish": "true"
   }
diff --git a/node_modules/npm-packlist/node_modules/minimatch/LICENSE b/node_modules/npm-packlist/node_modules/minimatch/LICENSE
new file mode 100644
index 0000000000000..1493534e60dce
--- /dev/null
+++ b/node_modules/npm-packlist/node_modules/minimatch/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) 2011-2023 Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js b/node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
new file mode 100644
index 0000000000000..5fc86bbd0116c
--- /dev/null
+++ b/node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
@@ -0,0 +1,14 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.assertValidPattern = void 0;
+const MAX_PATTERN_LENGTH = 1024 * 64;
+const assertValidPattern = (pattern) => {
+    if (typeof pattern !== 'string') {
+        throw new TypeError('invalid pattern');
+    }
+    if (pattern.length > MAX_PATTERN_LENGTH) {
+        throw new TypeError('pattern is too long');
+    }
+};
+exports.assertValidPattern = assertValidPattern;
+//# sourceMappingURL=assert-valid-pattern.js.map
\ No newline at end of file
diff --git a/node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/ast.js b/node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/ast.js
new file mode 100644
index 0000000000000..7b2109625eaeb
--- /dev/null
+++ b/node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/ast.js
@@ -0,0 +1,592 @@
+"use strict";
+// parse a single path portion
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.AST = void 0;
+const brace_expressions_js_1 = require("./brace-expressions.js");
+const unescape_js_1 = require("./unescape.js");
+const types = new Set(['!', '?', '+', '*', '@']);
+const isExtglobType = (c) => types.has(c);
+// Patterns that get prepended to bind to the start of either the
+// entire string, or just a single path portion, to prevent dots
+// and/or traversal patterns, when needed.
+// Exts don't need the ^ or / bit, because the root binds that already.
+const startNoTraversal = '(?!(?:^|/)\\.\\.?(?:$|/))';
+const startNoDot = '(?!\\.)';
+// characters that indicate a start of pattern needs the "no dots" bit,
+// because a dot *might* be matched. ( is not in the list, because in
+// the case of a child extglob, it will handle the prevention itself.
+const addPatternStart = new Set(['[', '.']);
+// cases where traversal is A-OK, no dot prevention needed
+const justDots = new Set(['..', '.']);
+const reSpecials = new Set('().*{}+?[]^$\\!');
+const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+// any single thing other than /
+const qmark = '[^/]';
+// * => any number of characters
+const star = qmark + '*?';
+// use + when we need to ensure that *something* matches, because the * is
+// the only thing in the path portion.
+const starNoEmpty = qmark + '+?';
+// remove the \ chars that we added if we end up doing a nonmagic compare
+// const deslash = (s: string) => s.replace(/\\(.)/g, '$1')
+class AST {
+    type;
+    #root;
+    #hasMagic;
+    #uflag = false;
+    #parts = [];
+    #parent;
+    #parentIndex;
+    #negs;
+    #filledNegs = false;
+    #options;
+    #toString;
+    // set to true if it's an extglob with no children
+    // (which really means one child of '')
+    #emptyExt = false;
+    constructor(type, parent, options = {}) {
+        this.type = type;
+        // extglobs are inherently magical
+        if (type)
+            this.#hasMagic = true;
+        this.#parent = parent;
+        this.#root = this.#parent ? this.#parent.#root : this;
+        this.#options = this.#root === this ? options : this.#root.#options;
+        this.#negs = this.#root === this ? [] : this.#root.#negs;
+        if (type === '!' && !this.#root.#filledNegs)
+            this.#negs.push(this);
+        this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0;
+    }
+    get hasMagic() {
+        /* c8 ignore start */
+        if (this.#hasMagic !== undefined)
+            return this.#hasMagic;
+        /* c8 ignore stop */
+        for (const p of this.#parts) {
+            if (typeof p === 'string')
+                continue;
+            if (p.type || p.hasMagic)
+                return (this.#hasMagic = true);
+        }
+        // note: will be undefined until we generate the regexp src and find out
+        return this.#hasMagic;
+    }
+    // reconstructs the pattern
+    toString() {
+        if (this.#toString !== undefined)
+            return this.#toString;
+        if (!this.type) {
+            return (this.#toString = this.#parts.map(p => String(p)).join(''));
+        }
+        else {
+            return (this.#toString =
+                this.type + '(' + this.#parts.map(p => String(p)).join('|') + ')');
+        }
+    }
+    #fillNegs() {
+        /* c8 ignore start */
+        if (this !== this.#root)
+            throw new Error('should only call on root');
+        if (this.#filledNegs)
+            return this;
+        /* c8 ignore stop */
+        // call toString() once to fill this out
+        this.toString();
+        this.#filledNegs = true;
+        let n;
+        while ((n = this.#negs.pop())) {
+            if (n.type !== '!')
+                continue;
+            // walk up the tree, appending everthing that comes AFTER parentIndex
+            let p = n;
+            let pp = p.#parent;
+            while (pp) {
+                for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) {
+                    for (const part of n.#parts) {
+                        /* c8 ignore start */
+                        if (typeof part === 'string') {
+                            throw new Error('string part in extglob AST??');
+                        }
+                        /* c8 ignore stop */
+                        part.copyIn(pp.#parts[i]);
+                    }
+                }
+                p = pp;
+                pp = p.#parent;
+            }
+        }
+        return this;
+    }
+    push(...parts) {
+        for (const p of parts) {
+            if (p === '')
+                continue;
+            /* c8 ignore start */
+            if (typeof p !== 'string' && !(p instanceof AST && p.#parent === this)) {
+                throw new Error('invalid part: ' + p);
+            }
+            /* c8 ignore stop */
+            this.#parts.push(p);
+        }
+    }
+    toJSON() {
+        const ret = this.type === null
+            ? this.#parts.slice().map(p => (typeof p === 'string' ? p : p.toJSON()))
+            : [this.type, ...this.#parts.map(p => p.toJSON())];
+        if (this.isStart() && !this.type)
+            ret.unshift([]);
+        if (this.isEnd() &&
+            (this === this.#root ||
+                (this.#root.#filledNegs && this.#parent?.type === '!'))) {
+            ret.push({});
+        }
+        return ret;
+    }
+    isStart() {
+        if (this.#root === this)
+            return true;
+        // if (this.type) return !!this.#parent?.isStart()
+        if (!this.#parent?.isStart())
+            return false;
+        if (this.#parentIndex === 0)
+            return true;
+        // if everything AHEAD of this is a negation, then it's still the "start"
+        const p = this.#parent;
+        for (let i = 0; i < this.#parentIndex; i++) {
+            const pp = p.#parts[i];
+            if (!(pp instanceof AST && pp.type === '!')) {
+                return false;
+            }
+        }
+        return true;
+    }
+    isEnd() {
+        if (this.#root === this)
+            return true;
+        if (this.#parent?.type === '!')
+            return true;
+        if (!this.#parent?.isEnd())
+            return false;
+        if (!this.type)
+            return this.#parent?.isEnd();
+        // if not root, it'll always have a parent
+        /* c8 ignore start */
+        const pl = this.#parent ? this.#parent.#parts.length : 0;
+        /* c8 ignore stop */
+        return this.#parentIndex === pl - 1;
+    }
+    copyIn(part) {
+        if (typeof part === 'string')
+            this.push(part);
+        else
+            this.push(part.clone(this));
+    }
+    clone(parent) {
+        const c = new AST(this.type, parent);
+        for (const p of this.#parts) {
+            c.copyIn(p);
+        }
+        return c;
+    }
+    static #parseAST(str, ast, pos, opt) {
+        let escaping = false;
+        let inBrace = false;
+        let braceStart = -1;
+        let braceNeg = false;
+        if (ast.type === null) {
+            // outside of a extglob, append until we find a start
+            let i = pos;
+            let acc = '';
+            while (i < str.length) {
+                const c = str.charAt(i++);
+                // still accumulate escapes at this point, but we do ignore
+                // starts that are escaped
+                if (escaping || c === '\\') {
+                    escaping = !escaping;
+                    acc += c;
+                    continue;
+                }
+                if (inBrace) {
+                    if (i === braceStart + 1) {
+                        if (c === '^' || c === '!') {
+                            braceNeg = true;
+                        }
+                    }
+                    else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
+                        inBrace = false;
+                    }
+                    acc += c;
+                    continue;
+                }
+                else if (c === '[') {
+                    inBrace = true;
+                    braceStart = i;
+                    braceNeg = false;
+                    acc += c;
+                    continue;
+                }
+                if (!opt.noext && isExtglobType(c) && str.charAt(i) === '(') {
+                    ast.push(acc);
+                    acc = '';
+                    const ext = new AST(c, ast);
+                    i = AST.#parseAST(str, ext, i, opt);
+                    ast.push(ext);
+                    continue;
+                }
+                acc += c;
+            }
+            ast.push(acc);
+            return i;
+        }
+        // some kind of extglob, pos is at the (
+        // find the next | or )
+        let i = pos + 1;
+        let part = new AST(null, ast);
+        const parts = [];
+        let acc = '';
+        while (i < str.length) {
+            const c = str.charAt(i++);
+            // still accumulate escapes at this point, but we do ignore
+            // starts that are escaped
+            if (escaping || c === '\\') {
+                escaping = !escaping;
+                acc += c;
+                continue;
+            }
+            if (inBrace) {
+                if (i === braceStart + 1) {
+                    if (c === '^' || c === '!') {
+                        braceNeg = true;
+                    }
+                }
+                else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
+                    inBrace = false;
+                }
+                acc += c;
+                continue;
+            }
+            else if (c === '[') {
+                inBrace = true;
+                braceStart = i;
+                braceNeg = false;
+                acc += c;
+                continue;
+            }
+            if (isExtglobType(c) && str.charAt(i) === '(') {
+                part.push(acc);
+                acc = '';
+                const ext = new AST(c, part);
+                part.push(ext);
+                i = AST.#parseAST(str, ext, i, opt);
+                continue;
+            }
+            if (c === '|') {
+                part.push(acc);
+                acc = '';
+                parts.push(part);
+                part = new AST(null, ast);
+                continue;
+            }
+            if (c === ')') {
+                if (acc === '' && ast.#parts.length === 0) {
+                    ast.#emptyExt = true;
+                }
+                part.push(acc);
+                acc = '';
+                ast.push(...parts, part);
+                return i;
+            }
+            acc += c;
+        }
+        // unfinished extglob
+        // if we got here, it was a malformed extglob! not an extglob, but
+        // maybe something else in there.
+        ast.type = null;
+        ast.#hasMagic = undefined;
+        ast.#parts = [str.substring(pos - 1)];
+        return i;
+    }
+    static fromGlob(pattern, options = {}) {
+        const ast = new AST(null, undefined, options);
+        AST.#parseAST(pattern, ast, 0, options);
+        return ast;
+    }
+    // returns the regular expression if there's magic, or the unescaped
+    // string if not.
+    toMMPattern() {
+        // should only be called on root
+        /* c8 ignore start */
+        if (this !== this.#root)
+            return this.#root.toMMPattern();
+        /* c8 ignore stop */
+        const glob = this.toString();
+        const [re, body, hasMagic, uflag] = this.toRegExpSource();
+        // if we're in nocase mode, and not nocaseMagicOnly, then we do
+        // still need a regular expression if we have to case-insensitively
+        // match capital/lowercase characters.
+        const anyMagic = hasMagic ||
+            this.#hasMagic ||
+            (this.#options.nocase &&
+                !this.#options.nocaseMagicOnly &&
+                glob.toUpperCase() !== glob.toLowerCase());
+        if (!anyMagic) {
+            return body;
+        }
+        const flags = (this.#options.nocase ? 'i' : '') + (uflag ? 'u' : '');
+        return Object.assign(new RegExp(`^${re}$`, flags), {
+            _src: re,
+            _glob: glob,
+        });
+    }
+    get options() {
+        return this.#options;
+    }
+    // returns the string match, the regexp source, whether there's magic
+    // in the regexp (so a regular expression is required) and whether or
+    // not the uflag is needed for the regular expression (for posix classes)
+    // TODO: instead of injecting the start/end at this point, just return
+    // the BODY of the regexp, along with the start/end portions suitable
+    // for binding the start/end in either a joined full-path makeRe context
+    // (where we bind to (^|/), or a standalone matchPart context (where
+    // we bind to ^, and not /).  Otherwise slashes get duped!
+    //
+    // In part-matching mode, the start is:
+    // - if not isStart: nothing
+    // - if traversal possible, but not allowed: ^(?!\.\.?$)
+    // - if dots allowed or not possible: ^
+    // - if dots possible and not allowed: ^(?!\.)
+    // end is:
+    // - if not isEnd(): nothing
+    // - else: $
+    //
+    // In full-path matching mode, we put the slash at the START of the
+    // pattern, so start is:
+    // - if first pattern: same as part-matching mode
+    // - if not isStart(): nothing
+    // - if traversal possible, but not allowed: /(?!\.\.?(?:$|/))
+    // - if dots allowed or not possible: /
+    // - if dots possible and not allowed: /(?!\.)
+    // end is:
+    // - if last pattern, same as part-matching mode
+    // - else nothing
+    //
+    // Always put the (?:$|/) on negated tails, though, because that has to be
+    // there to bind the end of the negated pattern portion, and it's easier to
+    // just stick it in now rather than try to inject it later in the middle of
+    // the pattern.
+    //
+    // We can just always return the same end, and leave it up to the caller
+    // to know whether it's going to be used joined or in parts.
+    // And, if the start is adjusted slightly, can do the same there:
+    // - if not isStart: nothing
+    // - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$)
+    // - if dots allowed or not possible: (?:/|^)
+    // - if dots possible and not allowed: (?:/|^)(?!\.)
+    //
+    // But it's better to have a simpler binding without a conditional, for
+    // performance, so probably better to return both start options.
+    //
+    // Then the caller just ignores the end if it's not the first pattern,
+    // and the start always gets applied.
+    //
+    // But that's always going to be $ if it's the ending pattern, or nothing,
+    // so the caller can just attach $ at the end of the pattern when building.
+    //
+    // So the todo is:
+    // - better detect what kind of start is needed
+    // - return both flavors of starting pattern
+    // - attach $ at the end of the pattern when creating the actual RegExp
+    //
+    // Ah, but wait, no, that all only applies to the root when the first pattern
+    // is not an extglob. If the first pattern IS an extglob, then we need all
+    // that dot prevention biz to live in the extglob portions, because eg
+    // +(*|.x*) can match .xy but not .yx.
+    //
+    // So, return the two flavors if it's #root and the first child is not an
+    // AST, otherwise leave it to the child AST to handle it, and there,
+    // use the (?:^|/) style of start binding.
+    //
+    // Even simplified further:
+    // - Since the start for a join is eg /(?!\.) and the start for a part
+    // is ^(?!\.), we can just prepend (?!\.) to the pattern (either root
+    // or start or whatever) and prepend ^ or / at the Regexp construction.
+    toRegExpSource(allowDot) {
+        const dot = allowDot ?? !!this.#options.dot;
+        if (this.#root === this)
+            this.#fillNegs();
+        if (!this.type) {
+            const noEmpty = this.isStart() && this.isEnd();
+            const src = this.#parts
+                .map(p => {
+                const [re, _, hasMagic, uflag] = typeof p === 'string'
+                    ? AST.#parseGlob(p, this.#hasMagic, noEmpty)
+                    : p.toRegExpSource(allowDot);
+                this.#hasMagic = this.#hasMagic || hasMagic;
+                this.#uflag = this.#uflag || uflag;
+                return re;
+            })
+                .join('');
+            let start = '';
+            if (this.isStart()) {
+                if (typeof this.#parts[0] === 'string') {
+                    // this is the string that will match the start of the pattern,
+                    // so we need to protect against dots and such.
+                    // '.' and '..' cannot match unless the pattern is that exactly,
+                    // even if it starts with . or dot:true is set.
+                    const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]);
+                    if (!dotTravAllowed) {
+                        const aps = addPatternStart;
+                        // check if we have a possibility of matching . or ..,
+                        // and prevent that.
+                        const needNoTrav = 
+                        // dots are allowed, and the pattern starts with [ or .
+                        (dot && aps.has(src.charAt(0))) ||
+                            // the pattern starts with \., and then [ or .
+                            (src.startsWith('\\.') && aps.has(src.charAt(2))) ||
+                            // the pattern starts with \.\., and then [ or .
+                            (src.startsWith('\\.\\.') && aps.has(src.charAt(4)));
+                        // no need to prevent dots if it can't match a dot, or if a
+                        // sub-pattern will be preventing it anyway.
+                        const needNoDot = !dot && !allowDot && aps.has(src.charAt(0));
+                        start = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : '';
+                    }
+                }
+            }
+            // append the "end of path portion" pattern to negation tails
+            let end = '';
+            if (this.isEnd() &&
+                this.#root.#filledNegs &&
+                this.#parent?.type === '!') {
+                end = '(?:$|\\/)';
+            }
+            const final = start + src + end;
+            return [
+                final,
+                (0, unescape_js_1.unescape)(src),
+                (this.#hasMagic = !!this.#hasMagic),
+                this.#uflag,
+            ];
+        }
+        // We need to calculate the body *twice* if it's a repeat pattern
+        // at the start, once in nodot mode, then again in dot mode, so a
+        // pattern like *(?) can match 'x.y'
+        const repeated = this.type === '*' || this.type === '+';
+        // some kind of extglob
+        const start = this.type === '!' ? '(?:(?!(?:' : '(?:';
+        let body = this.#partsToRegExp(dot);
+        if (this.isStart() && this.isEnd() && !body && this.type !== '!') {
+            // invalid extglob, has to at least be *something* present, if it's
+            // the entire path portion.
+            const s = this.toString();
+            this.#parts = [s];
+            this.type = null;
+            this.#hasMagic = undefined;
+            return [s, (0, unescape_js_1.unescape)(this.toString()), false, false];
+        }
+        // XXX abstract out this map method
+        let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot
+            ? ''
+            : this.#partsToRegExp(true);
+        if (bodyDotAllowed === body) {
+            bodyDotAllowed = '';
+        }
+        if (bodyDotAllowed) {
+            body = `(?:${body})(?:${bodyDotAllowed})*?`;
+        }
+        // an empty !() is exactly equivalent to a starNoEmpty
+        let final = '';
+        if (this.type === '!' && this.#emptyExt) {
+            final = (this.isStart() && !dot ? startNoDot : '') + starNoEmpty;
+        }
+        else {
+            const close = this.type === '!'
+                ? // !() must match something,but !(x) can match ''
+                    '))' +
+                        (this.isStart() && !dot && !allowDot ? startNoDot : '') +
+                        star +
+                        ')'
+                : this.type === '@'
+                    ? ')'
+                    : this.type === '?'
+                        ? ')?'
+                        : this.type === '+' && bodyDotAllowed
+                            ? ')'
+                            : this.type === '*' && bodyDotAllowed
+                                ? `)?`
+                                : `)${this.type}`;
+            final = start + body + close;
+        }
+        return [
+            final,
+            (0, unescape_js_1.unescape)(body),
+            (this.#hasMagic = !!this.#hasMagic),
+            this.#uflag,
+        ];
+    }
+    #partsToRegExp(dot) {
+        return this.#parts
+            .map(p => {
+            // extglob ASTs should only contain parent ASTs
+            /* c8 ignore start */
+            if (typeof p === 'string') {
+                throw new Error('string type in extglob ast??');
+            }
+            /* c8 ignore stop */
+            // can ignore hasMagic, because extglobs are already always magic
+            const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot);
+            this.#uflag = this.#uflag || uflag;
+            return re;
+        })
+            .filter(p => !(this.isStart() && this.isEnd()) || !!p)
+            .join('|');
+    }
+    static #parseGlob(glob, hasMagic, noEmpty = false) {
+        let escaping = false;
+        let re = '';
+        let uflag = false;
+        for (let i = 0; i < glob.length; i++) {
+            const c = glob.charAt(i);
+            if (escaping) {
+                escaping = false;
+                re += (reSpecials.has(c) ? '\\' : '') + c;
+                continue;
+            }
+            if (c === '\\') {
+                if (i === glob.length - 1) {
+                    re += '\\\\';
+                }
+                else {
+                    escaping = true;
+                }
+                continue;
+            }
+            if (c === '[') {
+                const [src, needUflag, consumed, magic] = (0, brace_expressions_js_1.parseClass)(glob, i);
+                if (consumed) {
+                    re += src;
+                    uflag = uflag || needUflag;
+                    i += consumed - 1;
+                    hasMagic = hasMagic || magic;
+                    continue;
+                }
+            }
+            if (c === '*') {
+                if (noEmpty && glob === '*')
+                    re += starNoEmpty;
+                else
+                    re += star;
+                hasMagic = true;
+                continue;
+            }
+            if (c === '?') {
+                re += qmark;
+                hasMagic = true;
+                continue;
+            }
+            re += regExpEscape(c);
+        }
+        return [re, (0, unescape_js_1.unescape)(glob), !!hasMagic, uflag];
+    }
+}
+exports.AST = AST;
+//# sourceMappingURL=ast.js.map
\ No newline at end of file
diff --git a/node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/brace-expressions.js b/node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/brace-expressions.js
new file mode 100644
index 0000000000000..0e13eefc4cfee
--- /dev/null
+++ b/node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/brace-expressions.js
@@ -0,0 +1,152 @@
+"use strict";
+// translate the various posix character classes into unicode properties
+// this works across all unicode locales
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.parseClass = void 0;
+// { : [, /u flag required, negated]
+const posixClasses = {
+    '[:alnum:]': ['\\p{L}\\p{Nl}\\p{Nd}', true],
+    '[:alpha:]': ['\\p{L}\\p{Nl}', true],
+    '[:ascii:]': ['\\x' + '00-\\x' + '7f', false],
+    '[:blank:]': ['\\p{Zs}\\t', true],
+    '[:cntrl:]': ['\\p{Cc}', true],
+    '[:digit:]': ['\\p{Nd}', true],
+    '[:graph:]': ['\\p{Z}\\p{C}', true, true],
+    '[:lower:]': ['\\p{Ll}', true],
+    '[:print:]': ['\\p{C}', true],
+    '[:punct:]': ['\\p{P}', true],
+    '[:space:]': ['\\p{Z}\\t\\r\\n\\v\\f', true],
+    '[:upper:]': ['\\p{Lu}', true],
+    '[:word:]': ['\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}', true],
+    '[:xdigit:]': ['A-Fa-f0-9', false],
+};
+// only need to escape a few things inside of brace expressions
+// escapes: [ \ ] -
+const braceEscape = (s) => s.replace(/[[\]\\-]/g, '\\$&');
+// escape all regexp magic characters
+const regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+// everything has already been escaped, we just have to join
+const rangesToString = (ranges) => ranges.join('');
+// takes a glob string at a posix brace expression, and returns
+// an equivalent regular expression source, and boolean indicating
+// whether the /u flag needs to be applied, and the number of chars
+// consumed to parse the character class.
+// This also removes out of order ranges, and returns ($.) if the
+// entire class just no good.
+const parseClass = (glob, position) => {
+    const pos = position;
+    /* c8 ignore start */
+    if (glob.charAt(pos) !== '[') {
+        throw new Error('not in a brace expression');
+    }
+    /* c8 ignore stop */
+    const ranges = [];
+    const negs = [];
+    let i = pos + 1;
+    let sawStart = false;
+    let uflag = false;
+    let escaping = false;
+    let negate = false;
+    let endPos = pos;
+    let rangeStart = '';
+    WHILE: while (i < glob.length) {
+        const c = glob.charAt(i);
+        if ((c === '!' || c === '^') && i === pos + 1) {
+            negate = true;
+            i++;
+            continue;
+        }
+        if (c === ']' && sawStart && !escaping) {
+            endPos = i + 1;
+            break;
+        }
+        sawStart = true;
+        if (c === '\\') {
+            if (!escaping) {
+                escaping = true;
+                i++;
+                continue;
+            }
+            // escaped \ char, fall through and treat like normal char
+        }
+        if (c === '[' && !escaping) {
+            // either a posix class, a collation equivalent, or just a [
+            for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) {
+                if (glob.startsWith(cls, i)) {
+                    // invalid, [a-[] is fine, but not [a-[:alpha]]
+                    if (rangeStart) {
+                        return ['$.', false, glob.length - pos, true];
+                    }
+                    i += cls.length;
+                    if (neg)
+                        negs.push(unip);
+                    else
+                        ranges.push(unip);
+                    uflag = uflag || u;
+                    continue WHILE;
+                }
+            }
+        }
+        // now it's just a normal character, effectively
+        escaping = false;
+        if (rangeStart) {
+            // throw this range away if it's not valid, but others
+            // can still match.
+            if (c > rangeStart) {
+                ranges.push(braceEscape(rangeStart) + '-' + braceEscape(c));
+            }
+            else if (c === rangeStart) {
+                ranges.push(braceEscape(c));
+            }
+            rangeStart = '';
+            i++;
+            continue;
+        }
+        // now might be the start of a range.
+        // can be either c-d or c-] or c] or c] at this point
+        if (glob.startsWith('-]', i + 1)) {
+            ranges.push(braceEscape(c + '-'));
+            i += 2;
+            continue;
+        }
+        if (glob.startsWith('-', i + 1)) {
+            rangeStart = c;
+            i += 2;
+            continue;
+        }
+        // not the start of a range, just a single character
+        ranges.push(braceEscape(c));
+        i++;
+    }
+    if (endPos < i) {
+        // didn't see the end of the class, not a valid class,
+        // but might still be valid as a literal match.
+        return ['', false, 0, false];
+    }
+    // if we got no ranges and no negates, then we have a range that
+    // cannot possibly match anything, and that poisons the whole glob
+    if (!ranges.length && !negs.length) {
+        return ['$.', false, glob.length - pos, true];
+    }
+    // if we got one positive range, and it's a single character, then that's
+    // not actually a magic pattern, it's just that one literal character.
+    // we should not treat that as "magic", we should just return the literal
+    // character. [_] is a perfectly valid way to escape glob magic chars.
+    if (negs.length === 0 &&
+        ranges.length === 1 &&
+        /^\\?.$/.test(ranges[0]) &&
+        !negate) {
+        const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0];
+        return [regexpEscape(r), false, endPos - pos, false];
+    }
+    const sranges = '[' + (negate ? '^' : '') + rangesToString(ranges) + ']';
+    const snegs = '[' + (negate ? '' : '^') + rangesToString(negs) + ']';
+    const comb = ranges.length && negs.length
+        ? '(' + sranges + '|' + snegs + ')'
+        : ranges.length
+            ? sranges
+            : snegs;
+    return [comb, uflag, endPos - pos, true];
+};
+exports.parseClass = parseClass;
+//# sourceMappingURL=brace-expressions.js.map
\ No newline at end of file
diff --git a/node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/escape.js b/node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/escape.js
new file mode 100644
index 0000000000000..02a4f8a8e0a58
--- /dev/null
+++ b/node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/escape.js
@@ -0,0 +1,22 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.escape = void 0;
+/**
+ * Escape all magic characters in a glob pattern.
+ *
+ * If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape}
+ * option is used, then characters are escaped by wrapping in `[]`, because
+ * a magic character wrapped in a character class can only be satisfied by
+ * that exact character.  In this mode, `\` is _not_ escaped, because it is
+ * not interpreted as a magic character, but instead as a path separator.
+ */
+const escape = (s, { windowsPathsNoEscape = false, } = {}) => {
+    // don't need to escape +@! because we escape the parens
+    // that make those magic, and escaping ! as [!] isn't valid,
+    // because [!]] is a valid glob class meaning not ']'.
+    return windowsPathsNoEscape
+        ? s.replace(/[?*()[\]]/g, '[$&]')
+        : s.replace(/[?*()[\]\\]/g, '\\$&');
+};
+exports.escape = escape;
+//# sourceMappingURL=escape.js.map
\ No newline at end of file
diff --git a/node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/index.js b/node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/index.js
new file mode 100644
index 0000000000000..f58fb8616aa9a
--- /dev/null
+++ b/node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/index.js
@@ -0,0 +1,1014 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.unescape = exports.escape = exports.AST = exports.Minimatch = exports.match = exports.makeRe = exports.braceExpand = exports.defaults = exports.filter = exports.GLOBSTAR = exports.sep = exports.minimatch = void 0;
+const brace_expansion_1 = require("@isaacs/brace-expansion");
+const assert_valid_pattern_js_1 = require("./assert-valid-pattern.js");
+const ast_js_1 = require("./ast.js");
+const escape_js_1 = require("./escape.js");
+const unescape_js_1 = require("./unescape.js");
+const minimatch = (p, pattern, options = {}) => {
+    (0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
+    // shortcut: comments match nothing.
+    if (!options.nocomment && pattern.charAt(0) === '#') {
+        return false;
+    }
+    return new Minimatch(pattern, options).match(p);
+};
+exports.minimatch = minimatch;
+// Optimized checking for the most common glob patterns.
+const starDotExtRE = /^\*+([^+@!?\*\[\(]*)$/;
+const starDotExtTest = (ext) => (f) => !f.startsWith('.') && f.endsWith(ext);
+const starDotExtTestDot = (ext) => (f) => f.endsWith(ext);
+const starDotExtTestNocase = (ext) => {
+    ext = ext.toLowerCase();
+    return (f) => !f.startsWith('.') && f.toLowerCase().endsWith(ext);
+};
+const starDotExtTestNocaseDot = (ext) => {
+    ext = ext.toLowerCase();
+    return (f) => f.toLowerCase().endsWith(ext);
+};
+const starDotStarRE = /^\*+\.\*+$/;
+const starDotStarTest = (f) => !f.startsWith('.') && f.includes('.');
+const starDotStarTestDot = (f) => f !== '.' && f !== '..' && f.includes('.');
+const dotStarRE = /^\.\*+$/;
+const dotStarTest = (f) => f !== '.' && f !== '..' && f.startsWith('.');
+const starRE = /^\*+$/;
+const starTest = (f) => f.length !== 0 && !f.startsWith('.');
+const starTestDot = (f) => f.length !== 0 && f !== '.' && f !== '..';
+const qmarksRE = /^\?+([^+@!?\*\[\(]*)?$/;
+const qmarksTestNocase = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExt([$0]);
+    if (!ext)
+        return noext;
+    ext = ext.toLowerCase();
+    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
+};
+const qmarksTestNocaseDot = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExtDot([$0]);
+    if (!ext)
+        return noext;
+    ext = ext.toLowerCase();
+    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
+};
+const qmarksTestDot = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExtDot([$0]);
+    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
+};
+const qmarksTest = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExt([$0]);
+    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
+};
+const qmarksTestNoExt = ([$0]) => {
+    const len = $0.length;
+    return (f) => f.length === len && !f.startsWith('.');
+};
+const qmarksTestNoExtDot = ([$0]) => {
+    const len = $0.length;
+    return (f) => f.length === len && f !== '.' && f !== '..';
+};
+/* c8 ignore start */
+const defaultPlatform = (typeof process === 'object' && process
+    ? (typeof process.env === 'object' &&
+        process.env &&
+        process.env.__MINIMATCH_TESTING_PLATFORM__) ||
+        process.platform
+    : 'posix');
+const path = {
+    win32: { sep: '\\' },
+    posix: { sep: '/' },
+};
+/* c8 ignore stop */
+exports.sep = defaultPlatform === 'win32' ? path.win32.sep : path.posix.sep;
+exports.minimatch.sep = exports.sep;
+exports.GLOBSTAR = Symbol('globstar **');
+exports.minimatch.GLOBSTAR = exports.GLOBSTAR;
+// any single thing other than /
+// don't need to escape / when using new RegExp()
+const qmark = '[^/]';
+// * => any number of characters
+const star = qmark + '*?';
+// ** when dots are allowed.  Anything goes, except .. and .
+// not (^ or / followed by one or two dots followed by $ or /),
+// followed by anything, any number of times.
+const twoStarDot = '(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?';
+// not a ^ or / followed by a dot,
+// followed by anything, any number of times.
+const twoStarNoDot = '(?:(?!(?:\\/|^)\\.).)*?';
+const filter = (pattern, options = {}) => (p) => (0, exports.minimatch)(p, pattern, options);
+exports.filter = filter;
+exports.minimatch.filter = exports.filter;
+const ext = (a, b = {}) => Object.assign({}, a, b);
+const defaults = (def) => {
+    if (!def || typeof def !== 'object' || !Object.keys(def).length) {
+        return exports.minimatch;
+    }
+    const orig = exports.minimatch;
+    const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options));
+    return Object.assign(m, {
+        Minimatch: class Minimatch extends orig.Minimatch {
+            constructor(pattern, options = {}) {
+                super(pattern, ext(def, options));
+            }
+            static defaults(options) {
+                return orig.defaults(ext(def, options)).Minimatch;
+            }
+        },
+        AST: class AST extends orig.AST {
+            /* c8 ignore start */
+            constructor(type, parent, options = {}) {
+                super(type, parent, ext(def, options));
+            }
+            /* c8 ignore stop */
+            static fromGlob(pattern, options = {}) {
+                return orig.AST.fromGlob(pattern, ext(def, options));
+            }
+        },
+        unescape: (s, options = {}) => orig.unescape(s, ext(def, options)),
+        escape: (s, options = {}) => orig.escape(s, ext(def, options)),
+        filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)),
+        defaults: (options) => orig.defaults(ext(def, options)),
+        makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)),
+        braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)),
+        match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)),
+        sep: orig.sep,
+        GLOBSTAR: exports.GLOBSTAR,
+    });
+};
+exports.defaults = defaults;
+exports.minimatch.defaults = exports.defaults;
+// Brace expansion:
+// a{b,c}d -> abd acd
+// a{b,}c -> abc ac
+// a{0..3}d -> a0d a1d a2d a3d
+// a{b,c{d,e}f}g -> abg acdfg acefg
+// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
+//
+// Invalid sets are not expanded.
+// a{2..}b -> a{2..}b
+// a{b}c -> a{b}c
+const braceExpand = (pattern, options = {}) => {
+    (0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
+    // Thanks to Yeting Li  for
+    // improving this regexp to avoid a ReDOS vulnerability.
+    if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
+        // shortcut. no need to expand.
+        return [pattern];
+    }
+    return (0, brace_expansion_1.expand)(pattern);
+};
+exports.braceExpand = braceExpand;
+exports.minimatch.braceExpand = exports.braceExpand;
+// parse a component of the expanded set.
+// At this point, no pattern may contain "/" in it
+// so we're going to return a 2d array, where each entry is the full
+// pattern, split on '/', and then turned into a regular expression.
+// A regexp is made at the end which joins each array with an
+// escaped /, and another full one which joins each regexp with |.
+//
+// Following the lead of Bash 4.1, note that "**" only has special meaning
+// when it is the *only* thing in a path portion.  Otherwise, any series
+// of * is equivalent to a single *.  Globstar behavior is enabled by
+// default, and can be disabled by setting options.noglobstar.
+const makeRe = (pattern, options = {}) => new Minimatch(pattern, options).makeRe();
+exports.makeRe = makeRe;
+exports.minimatch.makeRe = exports.makeRe;
+const match = (list, pattern, options = {}) => {
+    const mm = new Minimatch(pattern, options);
+    list = list.filter(f => mm.match(f));
+    if (mm.options.nonull && !list.length) {
+        list.push(pattern);
+    }
+    return list;
+};
+exports.match = match;
+exports.minimatch.match = exports.match;
+// replace stuff like \* with *
+const globMagic = /[?*]|[+@!]\(.*?\)|\[|\]/;
+const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+class Minimatch {
+    options;
+    set;
+    pattern;
+    windowsPathsNoEscape;
+    nonegate;
+    negate;
+    comment;
+    empty;
+    preserveMultipleSlashes;
+    partial;
+    globSet;
+    globParts;
+    nocase;
+    isWindows;
+    platform;
+    windowsNoMagicRoot;
+    regexp;
+    constructor(pattern, options = {}) {
+        (0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
+        options = options || {};
+        this.options = options;
+        this.pattern = pattern;
+        this.platform = options.platform || defaultPlatform;
+        this.isWindows = this.platform === 'win32';
+        this.windowsPathsNoEscape =
+            !!options.windowsPathsNoEscape || options.allowWindowsEscape === false;
+        if (this.windowsPathsNoEscape) {
+            this.pattern = this.pattern.replace(/\\/g, '/');
+        }
+        this.preserveMultipleSlashes = !!options.preserveMultipleSlashes;
+        this.regexp = null;
+        this.negate = false;
+        this.nonegate = !!options.nonegate;
+        this.comment = false;
+        this.empty = false;
+        this.partial = !!options.partial;
+        this.nocase = !!this.options.nocase;
+        this.windowsNoMagicRoot =
+            options.windowsNoMagicRoot !== undefined
+                ? options.windowsNoMagicRoot
+                : !!(this.isWindows && this.nocase);
+        this.globSet = [];
+        this.globParts = [];
+        this.set = [];
+        // make the set of regexps etc.
+        this.make();
+    }
+    hasMagic() {
+        if (this.options.magicalBraces && this.set.length > 1) {
+            return true;
+        }
+        for (const pattern of this.set) {
+            for (const part of pattern) {
+                if (typeof part !== 'string')
+                    return true;
+            }
+        }
+        return false;
+    }
+    debug(..._) { }
+    make() {
+        const pattern = this.pattern;
+        const options = this.options;
+        // empty patterns and comments match nothing.
+        if (!options.nocomment && pattern.charAt(0) === '#') {
+            this.comment = true;
+            return;
+        }
+        if (!pattern) {
+            this.empty = true;
+            return;
+        }
+        // step 1: figure out negation, etc.
+        this.parseNegate();
+        // step 2: expand braces
+        this.globSet = [...new Set(this.braceExpand())];
+        if (options.debug) {
+            this.debug = (...args) => console.error(...args);
+        }
+        this.debug(this.pattern, this.globSet);
+        // step 3: now we have a set, so turn each one into a series of
+        // path-portion matching patterns.
+        // These will be regexps, except in the case of "**", which is
+        // set to the GLOBSTAR object for globstar behavior,
+        // and will not contain any / characters
+        //
+        // First, we preprocess to make the glob pattern sets a bit simpler
+        // and deduped.  There are some perf-killing patterns that can cause
+        // problems with a glob walk, but we can simplify them down a bit.
+        const rawGlobParts = this.globSet.map(s => this.slashSplit(s));
+        this.globParts = this.preprocess(rawGlobParts);
+        this.debug(this.pattern, this.globParts);
+        // glob --> regexps
+        let set = this.globParts.map((s, _, __) => {
+            if (this.isWindows && this.windowsNoMagicRoot) {
+                // check if it's a drive or unc path.
+                const isUNC = s[0] === '' &&
+                    s[1] === '' &&
+                    (s[2] === '?' || !globMagic.test(s[2])) &&
+                    !globMagic.test(s[3]);
+                const isDrive = /^[a-z]:/i.test(s[0]);
+                if (isUNC) {
+                    return [...s.slice(0, 4), ...s.slice(4).map(ss => this.parse(ss))];
+                }
+                else if (isDrive) {
+                    return [s[0], ...s.slice(1).map(ss => this.parse(ss))];
+                }
+            }
+            return s.map(ss => this.parse(ss));
+        });
+        this.debug(this.pattern, set);
+        // filter out everything that didn't compile properly.
+        this.set = set.filter(s => s.indexOf(false) === -1);
+        // do not treat the ? in UNC paths as magic
+        if (this.isWindows) {
+            for (let i = 0; i < this.set.length; i++) {
+                const p = this.set[i];
+                if (p[0] === '' &&
+                    p[1] === '' &&
+                    this.globParts[i][2] === '?' &&
+                    typeof p[3] === 'string' &&
+                    /^[a-z]:$/i.test(p[3])) {
+                    p[2] = '?';
+                }
+            }
+        }
+        this.debug(this.pattern, this.set);
+    }
+    // various transforms to equivalent pattern sets that are
+    // faster to process in a filesystem walk.  The goal is to
+    // eliminate what we can, and push all ** patterns as far
+    // to the right as possible, even if it increases the number
+    // of patterns that we have to process.
+    preprocess(globParts) {
+        // if we're not in globstar mode, then turn all ** into *
+        if (this.options.noglobstar) {
+            for (let i = 0; i < globParts.length; i++) {
+                for (let j = 0; j < globParts[i].length; j++) {
+                    if (globParts[i][j] === '**') {
+                        globParts[i][j] = '*';
+                    }
+                }
+            }
+        }
+        const { optimizationLevel = 1 } = this.options;
+        if (optimizationLevel >= 2) {
+            // aggressive optimization for the purpose of fs walking
+            globParts = this.firstPhasePreProcess(globParts);
+            globParts = this.secondPhasePreProcess(globParts);
+        }
+        else if (optimizationLevel >= 1) {
+            // just basic optimizations to remove some .. parts
+            globParts = this.levelOneOptimize(globParts);
+        }
+        else {
+            // just collapse multiple ** portions into one
+            globParts = this.adjascentGlobstarOptimize(globParts);
+        }
+        return globParts;
+    }
+    // just get rid of adjascent ** portions
+    adjascentGlobstarOptimize(globParts) {
+        return globParts.map(parts => {
+            let gs = -1;
+            while (-1 !== (gs = parts.indexOf('**', gs + 1))) {
+                let i = gs;
+                while (parts[i + 1] === '**') {
+                    i++;
+                }
+                if (i !== gs) {
+                    parts.splice(gs, i - gs);
+                }
+            }
+            return parts;
+        });
+    }
+    // get rid of adjascent ** and resolve .. portions
+    levelOneOptimize(globParts) {
+        return globParts.map(parts => {
+            parts = parts.reduce((set, part) => {
+                const prev = set[set.length - 1];
+                if (part === '**' && prev === '**') {
+                    return set;
+                }
+                if (part === '..') {
+                    if (prev && prev !== '..' && prev !== '.' && prev !== '**') {
+                        set.pop();
+                        return set;
+                    }
+                }
+                set.push(part);
+                return set;
+            }, []);
+            return parts.length === 0 ? [''] : parts;
+        });
+    }
+    levelTwoFileOptimize(parts) {
+        if (!Array.isArray(parts)) {
+            parts = this.slashSplit(parts);
+        }
+        let didSomething = false;
+        do {
+            didSomething = false;
+            // 
// -> 
/
+            if (!this.preserveMultipleSlashes) {
+                for (let i = 1; i < parts.length - 1; i++) {
+                    const p = parts[i];
+                    // don't squeeze out UNC patterns
+                    if (i === 1 && p === '' && parts[0] === '')
+                        continue;
+                    if (p === '.' || p === '') {
+                        didSomething = true;
+                        parts.splice(i, 1);
+                        i--;
+                    }
+                }
+                if (parts[0] === '.' &&
+                    parts.length === 2 &&
+                    (parts[1] === '.' || parts[1] === '')) {
+                    didSomething = true;
+                    parts.pop();
+                }
+            }
+            // 
/

/../ ->

/
+            let dd = 0;
+            while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                const p = parts[dd - 1];
+                if (p && p !== '.' && p !== '..' && p !== '**') {
+                    didSomething = true;
+                    parts.splice(dd - 1, 2);
+                    dd -= 2;
+                }
+            }
+        } while (didSomething);
+        return parts.length === 0 ? [''] : parts;
+    }
+    // First phase: single-pattern processing
+    // 
 is 1 or more portions
+    //  is 1 or more portions
+    // 

is any portion other than ., .., '', or ** + // is . or '' + // + // **/.. is *brutal* for filesystem walking performance, because + // it effectively resets the recursive walk each time it occurs, + // and ** cannot be reduced out by a .. pattern part like a regexp + // or most strings (other than .., ., and '') can be. + // + //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + //

// -> 
/
+    // 
/

/../ ->

/
+    // **/**/ -> **/
+    //
+    // **/*/ -> */**/ <== not valid because ** doesn't follow
+    // this WOULD be allowed if ** did follow symlinks, or * didn't
+    firstPhasePreProcess(globParts) {
+        let didSomething = false;
+        do {
+            didSomething = false;
+            // 
/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + for (let parts of globParts) { + let gs = -1; + while (-1 !== (gs = parts.indexOf('**', gs + 1))) { + let gss = gs; + while (parts[gss + 1] === '**') { + //

/**/**/ -> 
/**/
+                        gss++;
+                    }
+                    // eg, if gs is 2 and gss is 4, that means we have 3 **
+                    // parts, and can remove 2 of them.
+                    if (gss > gs) {
+                        parts.splice(gs + 1, gss - gs);
+                    }
+                    let next = parts[gs + 1];
+                    const p = parts[gs + 2];
+                    const p2 = parts[gs + 3];
+                    if (next !== '..')
+                        continue;
+                    if (!p ||
+                        p === '.' ||
+                        p === '..' ||
+                        !p2 ||
+                        p2 === '.' ||
+                        p2 === '..') {
+                        continue;
+                    }
+                    didSomething = true;
+                    // edit parts in place, and push the new one
+                    parts.splice(gs, 1);
+                    const other = parts.slice(0);
+                    other[gs] = '**';
+                    globParts.push(other);
+                    gs--;
+                }
+                // 
// -> 
/
+                if (!this.preserveMultipleSlashes) {
+                    for (let i = 1; i < parts.length - 1; i++) {
+                        const p = parts[i];
+                        // don't squeeze out UNC patterns
+                        if (i === 1 && p === '' && parts[0] === '')
+                            continue;
+                        if (p === '.' || p === '') {
+                            didSomething = true;
+                            parts.splice(i, 1);
+                            i--;
+                        }
+                    }
+                    if (parts[0] === '.' &&
+                        parts.length === 2 &&
+                        (parts[1] === '.' || parts[1] === '')) {
+                        didSomething = true;
+                        parts.pop();
+                    }
+                }
+                // 
/

/../ ->

/
+                let dd = 0;
+                while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                    const p = parts[dd - 1];
+                    if (p && p !== '.' && p !== '..' && p !== '**') {
+                        didSomething = true;
+                        const needDot = dd === 1 && parts[dd + 1] === '**';
+                        const splin = needDot ? ['.'] : [];
+                        parts.splice(dd - 1, 2, ...splin);
+                        if (parts.length === 0)
+                            parts.push('');
+                        dd -= 2;
+                    }
+                }
+            }
+        } while (didSomething);
+        return globParts;
+    }
+    // second phase: multi-pattern dedupes
+    // {
/*/,
/

/} ->

/*/
+    // {
/,
/} -> 
/
+    // {
/**/,
/} -> 
/**/
+    //
+    // {
/**/,
/**/

/} ->

/**/
+    // ^-- not valid because ** doens't follow symlinks
+    secondPhasePreProcess(globParts) {
+        for (let i = 0; i < globParts.length - 1; i++) {
+            for (let j = i + 1; j < globParts.length; j++) {
+                const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);
+                if (matched) {
+                    globParts[i] = [];
+                    globParts[j] = matched;
+                    break;
+                }
+            }
+        }
+        return globParts.filter(gs => gs.length);
+    }
+    partsMatch(a, b, emptyGSMatch = false) {
+        let ai = 0;
+        let bi = 0;
+        let result = [];
+        let which = '';
+        while (ai < a.length && bi < b.length) {
+            if (a[ai] === b[bi]) {
+                result.push(which === 'b' ? b[bi] : a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (emptyGSMatch && a[ai] === '**' && b[bi] === a[ai + 1]) {
+                result.push(a[ai]);
+                ai++;
+            }
+            else if (emptyGSMatch && b[bi] === '**' && a[ai] === b[bi + 1]) {
+                result.push(b[bi]);
+                bi++;
+            }
+            else if (a[ai] === '*' &&
+                b[bi] &&
+                (this.options.dot || !b[bi].startsWith('.')) &&
+                b[bi] !== '**') {
+                if (which === 'b')
+                    return false;
+                which = 'a';
+                result.push(a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (b[bi] === '*' &&
+                a[ai] &&
+                (this.options.dot || !a[ai].startsWith('.')) &&
+                a[ai] !== '**') {
+                if (which === 'a')
+                    return false;
+                which = 'b';
+                result.push(b[bi]);
+                ai++;
+                bi++;
+            }
+            else {
+                return false;
+            }
+        }
+        // if we fall out of the loop, it means they two are identical
+        // as long as their lengths match
+        return a.length === b.length && result;
+    }
+    parseNegate() {
+        if (this.nonegate)
+            return;
+        const pattern = this.pattern;
+        let negate = false;
+        let negateOffset = 0;
+        for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {
+            negate = !negate;
+            negateOffset++;
+        }
+        if (negateOffset)
+            this.pattern = pattern.slice(negateOffset);
+        this.negate = negate;
+    }
+    // set partial to true to test if, for example,
+    // "/a/b" matches the start of "/*/b/*/d"
+    // Partial means, if you run out of file before you run
+    // out of pattern, then that's fine, as long as all
+    // the parts match.
+    matchOne(file, pattern, partial = false) {
+        const options = this.options;
+        // UNC paths like //?/X:/... can match X:/... and vice versa
+        // Drive letters in absolute drive or unc paths are always compared
+        // case-insensitively.
+        if (this.isWindows) {
+            const fileDrive = typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0]);
+            const fileUNC = !fileDrive &&
+                file[0] === '' &&
+                file[1] === '' &&
+                file[2] === '?' &&
+                /^[a-z]:$/i.test(file[3]);
+            const patternDrive = typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0]);
+            const patternUNC = !patternDrive &&
+                pattern[0] === '' &&
+                pattern[1] === '' &&
+                pattern[2] === '?' &&
+                typeof pattern[3] === 'string' &&
+                /^[a-z]:$/i.test(pattern[3]);
+            const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined;
+            const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined;
+            if (typeof fdi === 'number' && typeof pdi === 'number') {
+                const [fd, pd] = [file[fdi], pattern[pdi]];
+                if (fd.toLowerCase() === pd.toLowerCase()) {
+                    pattern[pdi] = fd;
+                    if (pdi > fdi) {
+                        pattern = pattern.slice(pdi);
+                    }
+                    else if (fdi > pdi) {
+                        file = file.slice(fdi);
+                    }
+                }
+            }
+        }
+        // resolve and reduce . and .. portions in the file as well.
+        // dont' need to do the second phase, because it's only one string[]
+        const { optimizationLevel = 1 } = this.options;
+        if (optimizationLevel >= 2) {
+            file = this.levelTwoFileOptimize(file);
+        }
+        this.debug('matchOne', this, { file, pattern });
+        this.debug('matchOne', file.length, pattern.length);
+        for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
+            this.debug('matchOne loop');
+            var p = pattern[pi];
+            var f = file[fi];
+            this.debug(pattern, p, f);
+            // should be impossible.
+            // some invalid regexp stuff in the set.
+            /* c8 ignore start */
+            if (p === false) {
+                return false;
+            }
+            /* c8 ignore stop */
+            if (p === exports.GLOBSTAR) {
+                this.debug('GLOBSTAR', [pattern, p, f]);
+                // "**"
+                // a/**/b/**/c would match the following:
+                // a/b/x/y/z/c
+                // a/x/y/z/b/c
+                // a/b/x/b/x/c
+                // a/b/c
+                // To do this, take the rest of the pattern after
+                // the **, and see if it would match the file remainder.
+                // If so, return success.
+                // If not, the ** "swallows" a segment, and try again.
+                // This is recursively awful.
+                //
+                // a/**/b/**/c matching a/b/x/y/z/c
+                // - a matches a
+                // - doublestar
+                //   - matchOne(b/x/y/z/c, b/**/c)
+                //     - b matches b
+                //     - doublestar
+                //       - matchOne(x/y/z/c, c) -> no
+                //       - matchOne(y/z/c, c) -> no
+                //       - matchOne(z/c, c) -> no
+                //       - matchOne(c, c) yes, hit
+                var fr = fi;
+                var pr = pi + 1;
+                if (pr === pl) {
+                    this.debug('** at the end');
+                    // a ** at the end will just swallow the rest.
+                    // We have found a match.
+                    // however, it will not swallow /.x, unless
+                    // options.dot is set.
+                    // . and .. are *never* matched by **, for explosively
+                    // exponential reasons.
+                    for (; fi < fl; fi++) {
+                        if (file[fi] === '.' ||
+                            file[fi] === '..' ||
+                            (!options.dot && file[fi].charAt(0) === '.'))
+                            return false;
+                    }
+                    return true;
+                }
+                // ok, let's see if we can swallow whatever we can.
+                while (fr < fl) {
+                    var swallowee = file[fr];
+                    this.debug('\nglobstar while', file, fr, pattern, pr, swallowee);
+                    // XXX remove this slice.  Just pass the start index.
+                    if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
+                        this.debug('globstar found match!', fr, fl, swallowee);
+                        // found a match.
+                        return true;
+                    }
+                    else {
+                        // can't swallow "." or ".." ever.
+                        // can only swallow ".foo" when explicitly asked.
+                        if (swallowee === '.' ||
+                            swallowee === '..' ||
+                            (!options.dot && swallowee.charAt(0) === '.')) {
+                            this.debug('dot detected!', file, fr, pattern, pr);
+                            break;
+                        }
+                        // ** swallows a segment, and continue.
+                        this.debug('globstar swallow a segment, and continue');
+                        fr++;
+                    }
+                }
+                // no match was found.
+                // However, in partial mode, we can't say this is necessarily over.
+                /* c8 ignore start */
+                if (partial) {
+                    // ran out of file
+                    this.debug('\n>>> no match, partial?', file, fr, pattern, pr);
+                    if (fr === fl) {
+                        return true;
+                    }
+                }
+                /* c8 ignore stop */
+                return false;
+            }
+            // something other than **
+            // non-magic patterns just have to match exactly
+            // patterns with magic have been turned into regexps.
+            let hit;
+            if (typeof p === 'string') {
+                hit = f === p;
+                this.debug('string match', p, f, hit);
+            }
+            else {
+                hit = p.test(f);
+                this.debug('pattern match', p, f, hit);
+            }
+            if (!hit)
+                return false;
+        }
+        // Note: ending in / means that we'll get a final ""
+        // at the end of the pattern.  This can only match a
+        // corresponding "" at the end of the file.
+        // If the file ends in /, then it can only match a
+        // a pattern that ends in /, unless the pattern just
+        // doesn't have any more for it. But, a/b/ should *not*
+        // match "a/b/*", even though "" matches against the
+        // [^/]*? pattern, except in partial mode, where it might
+        // simply not be reached yet.
+        // However, a/b/ should still satisfy a/*
+        // now either we fell off the end of the pattern, or we're done.
+        if (fi === fl && pi === pl) {
+            // ran out of pattern and filename at the same time.
+            // an exact hit!
+            return true;
+        }
+        else if (fi === fl) {
+            // ran out of file, but still had pattern left.
+            // this is ok if we're doing the match as part of
+            // a glob fs traversal.
+            return partial;
+        }
+        else if (pi === pl) {
+            // ran out of pattern, still have file left.
+            // this is only acceptable if we're on the very last
+            // empty segment of a file with a trailing slash.
+            // a/* should match a/b/
+            return fi === fl - 1 && file[fi] === '';
+            /* c8 ignore start */
+        }
+        else {
+            // should be unreachable.
+            throw new Error('wtf?');
+        }
+        /* c8 ignore stop */
+    }
+    braceExpand() {
+        return (0, exports.braceExpand)(this.pattern, this.options);
+    }
+    parse(pattern) {
+        (0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
+        const options = this.options;
+        // shortcuts
+        if (pattern === '**')
+            return exports.GLOBSTAR;
+        if (pattern === '')
+            return '';
+        // far and away, the most common glob pattern parts are
+        // *, *.*, and *.  Add a fast check method for those.
+        let m;
+        let fastTest = null;
+        if ((m = pattern.match(starRE))) {
+            fastTest = options.dot ? starTestDot : starTest;
+        }
+        else if ((m = pattern.match(starDotExtRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? starDotExtTestNocaseDot
+                    : starDotExtTestNocase
+                : options.dot
+                    ? starDotExtTestDot
+                    : starDotExtTest)(m[1]);
+        }
+        else if ((m = pattern.match(qmarksRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? qmarksTestNocaseDot
+                    : qmarksTestNocase
+                : options.dot
+                    ? qmarksTestDot
+                    : qmarksTest)(m);
+        }
+        else if ((m = pattern.match(starDotStarRE))) {
+            fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
+        }
+        else if ((m = pattern.match(dotStarRE))) {
+            fastTest = dotStarTest;
+        }
+        const re = ast_js_1.AST.fromGlob(pattern, this.options).toMMPattern();
+        if (fastTest && typeof re === 'object') {
+            // Avoids overriding in frozen environments
+            Reflect.defineProperty(re, 'test', { value: fastTest });
+        }
+        return re;
+    }
+    makeRe() {
+        if (this.regexp || this.regexp === false)
+            return this.regexp;
+        // at this point, this.set is a 2d array of partial
+        // pattern strings, or "**".
+        //
+        // It's better to use .match().  This function shouldn't
+        // be used, really, but it's pretty convenient sometimes,
+        // when you just want to work with a regex.
+        const set = this.set;
+        if (!set.length) {
+            this.regexp = false;
+            return this.regexp;
+        }
+        const options = this.options;
+        const twoStar = options.noglobstar
+            ? star
+            : options.dot
+                ? twoStarDot
+                : twoStarNoDot;
+        const flags = new Set(options.nocase ? ['i'] : []);
+        // regexpify non-globstar patterns
+        // if ** is only item, then we just do one twoStar
+        // if ** is first, and there are more, prepend (\/|twoStar\/)? to next
+        // if ** is last, append (\/twoStar|) to previous
+        // if ** is in the middle, append (\/|\/twoStar\/) to previous
+        // then filter out GLOBSTAR symbols
+        let re = set
+            .map(pattern => {
+            const pp = pattern.map(p => {
+                if (p instanceof RegExp) {
+                    for (const f of p.flags.split(''))
+                        flags.add(f);
+                }
+                return typeof p === 'string'
+                    ? regExpEscape(p)
+                    : p === exports.GLOBSTAR
+                        ? exports.GLOBSTAR
+                        : p._src;
+            });
+            pp.forEach((p, i) => {
+                const next = pp[i + 1];
+                const prev = pp[i - 1];
+                if (p !== exports.GLOBSTAR || prev === exports.GLOBSTAR) {
+                    return;
+                }
+                if (prev === undefined) {
+                    if (next !== undefined && next !== exports.GLOBSTAR) {
+                        pp[i + 1] = '(?:\\/|' + twoStar + '\\/)?' + next;
+                    }
+                    else {
+                        pp[i] = twoStar;
+                    }
+                }
+                else if (next === undefined) {
+                    pp[i - 1] = prev + '(?:\\/|' + twoStar + ')?';
+                }
+                else if (next !== exports.GLOBSTAR) {
+                    pp[i - 1] = prev + '(?:\\/|\\/' + twoStar + '\\/)' + next;
+                    pp[i + 1] = exports.GLOBSTAR;
+                }
+            });
+            return pp.filter(p => p !== exports.GLOBSTAR).join('/');
+        })
+            .join('|');
+        // need to wrap in parens if we had more than one thing with |,
+        // otherwise only the first will be anchored to ^ and the last to $
+        const [open, close] = set.length > 1 ? ['(?:', ')'] : ['', ''];
+        // must match entire pattern
+        // ending in a * or ** will make it less strict.
+        re = '^' + open + re + close + '$';
+        // can match anything, as long as it's not this.
+        if (this.negate)
+            re = '^(?!' + re + ').+$';
+        try {
+            this.regexp = new RegExp(re, [...flags].join(''));
+            /* c8 ignore start */
+        }
+        catch (ex) {
+            // should be impossible
+            this.regexp = false;
+        }
+        /* c8 ignore stop */
+        return this.regexp;
+    }
+    slashSplit(p) {
+        // if p starts with // on windows, we preserve that
+        // so that UNC paths aren't broken.  Otherwise, any number of
+        // / characters are coalesced into one, unless
+        // preserveMultipleSlashes is set to true.
+        if (this.preserveMultipleSlashes) {
+            return p.split('/');
+        }
+        else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
+            // add an extra '' for the one we lose
+            return ['', ...p.split(/\/+/)];
+        }
+        else {
+            return p.split(/\/+/);
+        }
+    }
+    match(f, partial = this.partial) {
+        this.debug('match', f, this.pattern);
+        // short-circuit in the case of busted things.
+        // comments, etc.
+        if (this.comment) {
+            return false;
+        }
+        if (this.empty) {
+            return f === '';
+        }
+        if (f === '/' && partial) {
+            return true;
+        }
+        const options = this.options;
+        // windows: need to use /, not \
+        if (this.isWindows) {
+            f = f.split('\\').join('/');
+        }
+        // treat the test path as a set of pathparts.
+        const ff = this.slashSplit(f);
+        this.debug(this.pattern, 'split', ff);
+        // just ONE of the pattern sets in this.set needs to match
+        // in order for it to be valid.  If negating, then just one
+        // match means that we have failed.
+        // Either way, return on the first hit.
+        const set = this.set;
+        this.debug(this.pattern, 'set', set);
+        // Find the basename of the path by looking for the last non-empty segment
+        let filename = ff[ff.length - 1];
+        if (!filename) {
+            for (let i = ff.length - 2; !filename && i >= 0; i--) {
+                filename = ff[i];
+            }
+        }
+        for (let i = 0; i < set.length; i++) {
+            const pattern = set[i];
+            let file = ff;
+            if (options.matchBase && pattern.length === 1) {
+                file = [filename];
+            }
+            const hit = this.matchOne(file, pattern, partial);
+            if (hit) {
+                if (options.flipNegate) {
+                    return true;
+                }
+                return !this.negate;
+            }
+        }
+        // didn't get any hits.  this is success if it's a negative
+        // pattern, failure otherwise.
+        if (options.flipNegate) {
+            return false;
+        }
+        return this.negate;
+    }
+    static defaults(def) {
+        return exports.minimatch.defaults(def).Minimatch;
+    }
+}
+exports.Minimatch = Minimatch;
+/* c8 ignore start */
+var ast_js_2 = require("./ast.js");
+Object.defineProperty(exports, "AST", { enumerable: true, get: function () { return ast_js_2.AST; } });
+var escape_js_2 = require("./escape.js");
+Object.defineProperty(exports, "escape", { enumerable: true, get: function () { return escape_js_2.escape; } });
+var unescape_js_2 = require("./unescape.js");
+Object.defineProperty(exports, "unescape", { enumerable: true, get: function () { return unescape_js_2.unescape; } });
+/* c8 ignore stop */
+exports.minimatch.AST = ast_js_1.AST;
+exports.minimatch.Minimatch = Minimatch;
+exports.minimatch.escape = escape_js_1.escape;
+exports.minimatch.unescape = unescape_js_1.unescape;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/package.json b/node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/package.json
new file mode 100644
index 0000000000000..5bbefffbabee3
--- /dev/null
+++ b/node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/unescape.js b/node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/unescape.js
new file mode 100644
index 0000000000000..47c36bcee5a02
--- /dev/null
+++ b/node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/unescape.js
@@ -0,0 +1,24 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.unescape = void 0;
+/**
+ * Un-escape a string that has been escaped with {@link escape}.
+ *
+ * If the {@link windowsPathsNoEscape} option is used, then square-brace
+ * escapes are removed, but not backslash escapes.  For example, it will turn
+ * the string `'[*]'` into `*`, but it will not turn `'\\*'` into `'*'`,
+ * becuase `\` is a path separator in `windowsPathsNoEscape` mode.
+ *
+ * When `windowsPathsNoEscape` is not set, then both brace escapes and
+ * backslash escapes are removed.
+ *
+ * Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot be escaped
+ * or unescaped.
+ */
+const unescape = (s, { windowsPathsNoEscape = false, } = {}) => {
+    return windowsPathsNoEscape
+        ? s.replace(/\[([^\/\\])\]/g, '$1')
+        : s.replace(/((?!\\).|^)\[([^\/\\])\]/g, '$1$2').replace(/\\([^\/])/g, '$1');
+};
+exports.unescape = unescape;
+//# sourceMappingURL=unescape.js.map
\ No newline at end of file
diff --git a/node_modules/npm-packlist/node_modules/minimatch/dist/esm/assert-valid-pattern.js b/node_modules/npm-packlist/node_modules/minimatch/dist/esm/assert-valid-pattern.js
new file mode 100644
index 0000000000000..7b534fc30200b
--- /dev/null
+++ b/node_modules/npm-packlist/node_modules/minimatch/dist/esm/assert-valid-pattern.js
@@ -0,0 +1,10 @@
+const MAX_PATTERN_LENGTH = 1024 * 64;
+export const assertValidPattern = (pattern) => {
+    if (typeof pattern !== 'string') {
+        throw new TypeError('invalid pattern');
+    }
+    if (pattern.length > MAX_PATTERN_LENGTH) {
+        throw new TypeError('pattern is too long');
+    }
+};
+//# sourceMappingURL=assert-valid-pattern.js.map
\ No newline at end of file
diff --git a/node_modules/npm-packlist/node_modules/minimatch/dist/esm/ast.js b/node_modules/npm-packlist/node_modules/minimatch/dist/esm/ast.js
new file mode 100644
index 0000000000000..2d2bced6533de
--- /dev/null
+++ b/node_modules/npm-packlist/node_modules/minimatch/dist/esm/ast.js
@@ -0,0 +1,588 @@
+// parse a single path portion
+import { parseClass } from './brace-expressions.js';
+import { unescape } from './unescape.js';
+const types = new Set(['!', '?', '+', '*', '@']);
+const isExtglobType = (c) => types.has(c);
+// Patterns that get prepended to bind to the start of either the
+// entire string, or just a single path portion, to prevent dots
+// and/or traversal patterns, when needed.
+// Exts don't need the ^ or / bit, because the root binds that already.
+const startNoTraversal = '(?!(?:^|/)\\.\\.?(?:$|/))';
+const startNoDot = '(?!\\.)';
+// characters that indicate a start of pattern needs the "no dots" bit,
+// because a dot *might* be matched. ( is not in the list, because in
+// the case of a child extglob, it will handle the prevention itself.
+const addPatternStart = new Set(['[', '.']);
+// cases where traversal is A-OK, no dot prevention needed
+const justDots = new Set(['..', '.']);
+const reSpecials = new Set('().*{}+?[]^$\\!');
+const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+// any single thing other than /
+const qmark = '[^/]';
+// * => any number of characters
+const star = qmark + '*?';
+// use + when we need to ensure that *something* matches, because the * is
+// the only thing in the path portion.
+const starNoEmpty = qmark + '+?';
+// remove the \ chars that we added if we end up doing a nonmagic compare
+// const deslash = (s: string) => s.replace(/\\(.)/g, '$1')
+export class AST {
+    type;
+    #root;
+    #hasMagic;
+    #uflag = false;
+    #parts = [];
+    #parent;
+    #parentIndex;
+    #negs;
+    #filledNegs = false;
+    #options;
+    #toString;
+    // set to true if it's an extglob with no children
+    // (which really means one child of '')
+    #emptyExt = false;
+    constructor(type, parent, options = {}) {
+        this.type = type;
+        // extglobs are inherently magical
+        if (type)
+            this.#hasMagic = true;
+        this.#parent = parent;
+        this.#root = this.#parent ? this.#parent.#root : this;
+        this.#options = this.#root === this ? options : this.#root.#options;
+        this.#negs = this.#root === this ? [] : this.#root.#negs;
+        if (type === '!' && !this.#root.#filledNegs)
+            this.#negs.push(this);
+        this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0;
+    }
+    get hasMagic() {
+        /* c8 ignore start */
+        if (this.#hasMagic !== undefined)
+            return this.#hasMagic;
+        /* c8 ignore stop */
+        for (const p of this.#parts) {
+            if (typeof p === 'string')
+                continue;
+            if (p.type || p.hasMagic)
+                return (this.#hasMagic = true);
+        }
+        // note: will be undefined until we generate the regexp src and find out
+        return this.#hasMagic;
+    }
+    // reconstructs the pattern
+    toString() {
+        if (this.#toString !== undefined)
+            return this.#toString;
+        if (!this.type) {
+            return (this.#toString = this.#parts.map(p => String(p)).join(''));
+        }
+        else {
+            return (this.#toString =
+                this.type + '(' + this.#parts.map(p => String(p)).join('|') + ')');
+        }
+    }
+    #fillNegs() {
+        /* c8 ignore start */
+        if (this !== this.#root)
+            throw new Error('should only call on root');
+        if (this.#filledNegs)
+            return this;
+        /* c8 ignore stop */
+        // call toString() once to fill this out
+        this.toString();
+        this.#filledNegs = true;
+        let n;
+        while ((n = this.#negs.pop())) {
+            if (n.type !== '!')
+                continue;
+            // walk up the tree, appending everthing that comes AFTER parentIndex
+            let p = n;
+            let pp = p.#parent;
+            while (pp) {
+                for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) {
+                    for (const part of n.#parts) {
+                        /* c8 ignore start */
+                        if (typeof part === 'string') {
+                            throw new Error('string part in extglob AST??');
+                        }
+                        /* c8 ignore stop */
+                        part.copyIn(pp.#parts[i]);
+                    }
+                }
+                p = pp;
+                pp = p.#parent;
+            }
+        }
+        return this;
+    }
+    push(...parts) {
+        for (const p of parts) {
+            if (p === '')
+                continue;
+            /* c8 ignore start */
+            if (typeof p !== 'string' && !(p instanceof AST && p.#parent === this)) {
+                throw new Error('invalid part: ' + p);
+            }
+            /* c8 ignore stop */
+            this.#parts.push(p);
+        }
+    }
+    toJSON() {
+        const ret = this.type === null
+            ? this.#parts.slice().map(p => (typeof p === 'string' ? p : p.toJSON()))
+            : [this.type, ...this.#parts.map(p => p.toJSON())];
+        if (this.isStart() && !this.type)
+            ret.unshift([]);
+        if (this.isEnd() &&
+            (this === this.#root ||
+                (this.#root.#filledNegs && this.#parent?.type === '!'))) {
+            ret.push({});
+        }
+        return ret;
+    }
+    isStart() {
+        if (this.#root === this)
+            return true;
+        // if (this.type) return !!this.#parent?.isStart()
+        if (!this.#parent?.isStart())
+            return false;
+        if (this.#parentIndex === 0)
+            return true;
+        // if everything AHEAD of this is a negation, then it's still the "start"
+        const p = this.#parent;
+        for (let i = 0; i < this.#parentIndex; i++) {
+            const pp = p.#parts[i];
+            if (!(pp instanceof AST && pp.type === '!')) {
+                return false;
+            }
+        }
+        return true;
+    }
+    isEnd() {
+        if (this.#root === this)
+            return true;
+        if (this.#parent?.type === '!')
+            return true;
+        if (!this.#parent?.isEnd())
+            return false;
+        if (!this.type)
+            return this.#parent?.isEnd();
+        // if not root, it'll always have a parent
+        /* c8 ignore start */
+        const pl = this.#parent ? this.#parent.#parts.length : 0;
+        /* c8 ignore stop */
+        return this.#parentIndex === pl - 1;
+    }
+    copyIn(part) {
+        if (typeof part === 'string')
+            this.push(part);
+        else
+            this.push(part.clone(this));
+    }
+    clone(parent) {
+        const c = new AST(this.type, parent);
+        for (const p of this.#parts) {
+            c.copyIn(p);
+        }
+        return c;
+    }
+    static #parseAST(str, ast, pos, opt) {
+        let escaping = false;
+        let inBrace = false;
+        let braceStart = -1;
+        let braceNeg = false;
+        if (ast.type === null) {
+            // outside of a extglob, append until we find a start
+            let i = pos;
+            let acc = '';
+            while (i < str.length) {
+                const c = str.charAt(i++);
+                // still accumulate escapes at this point, but we do ignore
+                // starts that are escaped
+                if (escaping || c === '\\') {
+                    escaping = !escaping;
+                    acc += c;
+                    continue;
+                }
+                if (inBrace) {
+                    if (i === braceStart + 1) {
+                        if (c === '^' || c === '!') {
+                            braceNeg = true;
+                        }
+                    }
+                    else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
+                        inBrace = false;
+                    }
+                    acc += c;
+                    continue;
+                }
+                else if (c === '[') {
+                    inBrace = true;
+                    braceStart = i;
+                    braceNeg = false;
+                    acc += c;
+                    continue;
+                }
+                if (!opt.noext && isExtglobType(c) && str.charAt(i) === '(') {
+                    ast.push(acc);
+                    acc = '';
+                    const ext = new AST(c, ast);
+                    i = AST.#parseAST(str, ext, i, opt);
+                    ast.push(ext);
+                    continue;
+                }
+                acc += c;
+            }
+            ast.push(acc);
+            return i;
+        }
+        // some kind of extglob, pos is at the (
+        // find the next | or )
+        let i = pos + 1;
+        let part = new AST(null, ast);
+        const parts = [];
+        let acc = '';
+        while (i < str.length) {
+            const c = str.charAt(i++);
+            // still accumulate escapes at this point, but we do ignore
+            // starts that are escaped
+            if (escaping || c === '\\') {
+                escaping = !escaping;
+                acc += c;
+                continue;
+            }
+            if (inBrace) {
+                if (i === braceStart + 1) {
+                    if (c === '^' || c === '!') {
+                        braceNeg = true;
+                    }
+                }
+                else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
+                    inBrace = false;
+                }
+                acc += c;
+                continue;
+            }
+            else if (c === '[') {
+                inBrace = true;
+                braceStart = i;
+                braceNeg = false;
+                acc += c;
+                continue;
+            }
+            if (isExtglobType(c) && str.charAt(i) === '(') {
+                part.push(acc);
+                acc = '';
+                const ext = new AST(c, part);
+                part.push(ext);
+                i = AST.#parseAST(str, ext, i, opt);
+                continue;
+            }
+            if (c === '|') {
+                part.push(acc);
+                acc = '';
+                parts.push(part);
+                part = new AST(null, ast);
+                continue;
+            }
+            if (c === ')') {
+                if (acc === '' && ast.#parts.length === 0) {
+                    ast.#emptyExt = true;
+                }
+                part.push(acc);
+                acc = '';
+                ast.push(...parts, part);
+                return i;
+            }
+            acc += c;
+        }
+        // unfinished extglob
+        // if we got here, it was a malformed extglob! not an extglob, but
+        // maybe something else in there.
+        ast.type = null;
+        ast.#hasMagic = undefined;
+        ast.#parts = [str.substring(pos - 1)];
+        return i;
+    }
+    static fromGlob(pattern, options = {}) {
+        const ast = new AST(null, undefined, options);
+        AST.#parseAST(pattern, ast, 0, options);
+        return ast;
+    }
+    // returns the regular expression if there's magic, or the unescaped
+    // string if not.
+    toMMPattern() {
+        // should only be called on root
+        /* c8 ignore start */
+        if (this !== this.#root)
+            return this.#root.toMMPattern();
+        /* c8 ignore stop */
+        const glob = this.toString();
+        const [re, body, hasMagic, uflag] = this.toRegExpSource();
+        // if we're in nocase mode, and not nocaseMagicOnly, then we do
+        // still need a regular expression if we have to case-insensitively
+        // match capital/lowercase characters.
+        const anyMagic = hasMagic ||
+            this.#hasMagic ||
+            (this.#options.nocase &&
+                !this.#options.nocaseMagicOnly &&
+                glob.toUpperCase() !== glob.toLowerCase());
+        if (!anyMagic) {
+            return body;
+        }
+        const flags = (this.#options.nocase ? 'i' : '') + (uflag ? 'u' : '');
+        return Object.assign(new RegExp(`^${re}$`, flags), {
+            _src: re,
+            _glob: glob,
+        });
+    }
+    get options() {
+        return this.#options;
+    }
+    // returns the string match, the regexp source, whether there's magic
+    // in the regexp (so a regular expression is required) and whether or
+    // not the uflag is needed for the regular expression (for posix classes)
+    // TODO: instead of injecting the start/end at this point, just return
+    // the BODY of the regexp, along with the start/end portions suitable
+    // for binding the start/end in either a joined full-path makeRe context
+    // (where we bind to (^|/), or a standalone matchPart context (where
+    // we bind to ^, and not /).  Otherwise slashes get duped!
+    //
+    // In part-matching mode, the start is:
+    // - if not isStart: nothing
+    // - if traversal possible, but not allowed: ^(?!\.\.?$)
+    // - if dots allowed or not possible: ^
+    // - if dots possible and not allowed: ^(?!\.)
+    // end is:
+    // - if not isEnd(): nothing
+    // - else: $
+    //
+    // In full-path matching mode, we put the slash at the START of the
+    // pattern, so start is:
+    // - if first pattern: same as part-matching mode
+    // - if not isStart(): nothing
+    // - if traversal possible, but not allowed: /(?!\.\.?(?:$|/))
+    // - if dots allowed or not possible: /
+    // - if dots possible and not allowed: /(?!\.)
+    // end is:
+    // - if last pattern, same as part-matching mode
+    // - else nothing
+    //
+    // Always put the (?:$|/) on negated tails, though, because that has to be
+    // there to bind the end of the negated pattern portion, and it's easier to
+    // just stick it in now rather than try to inject it later in the middle of
+    // the pattern.
+    //
+    // We can just always return the same end, and leave it up to the caller
+    // to know whether it's going to be used joined or in parts.
+    // And, if the start is adjusted slightly, can do the same there:
+    // - if not isStart: nothing
+    // - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$)
+    // - if dots allowed or not possible: (?:/|^)
+    // - if dots possible and not allowed: (?:/|^)(?!\.)
+    //
+    // But it's better to have a simpler binding without a conditional, for
+    // performance, so probably better to return both start options.
+    //
+    // Then the caller just ignores the end if it's not the first pattern,
+    // and the start always gets applied.
+    //
+    // But that's always going to be $ if it's the ending pattern, or nothing,
+    // so the caller can just attach $ at the end of the pattern when building.
+    //
+    // So the todo is:
+    // - better detect what kind of start is needed
+    // - return both flavors of starting pattern
+    // - attach $ at the end of the pattern when creating the actual RegExp
+    //
+    // Ah, but wait, no, that all only applies to the root when the first pattern
+    // is not an extglob. If the first pattern IS an extglob, then we need all
+    // that dot prevention biz to live in the extglob portions, because eg
+    // +(*|.x*) can match .xy but not .yx.
+    //
+    // So, return the two flavors if it's #root and the first child is not an
+    // AST, otherwise leave it to the child AST to handle it, and there,
+    // use the (?:^|/) style of start binding.
+    //
+    // Even simplified further:
+    // - Since the start for a join is eg /(?!\.) and the start for a part
+    // is ^(?!\.), we can just prepend (?!\.) to the pattern (either root
+    // or start or whatever) and prepend ^ or / at the Regexp construction.
+    toRegExpSource(allowDot) {
+        const dot = allowDot ?? !!this.#options.dot;
+        if (this.#root === this)
+            this.#fillNegs();
+        if (!this.type) {
+            const noEmpty = this.isStart() && this.isEnd();
+            const src = this.#parts
+                .map(p => {
+                const [re, _, hasMagic, uflag] = typeof p === 'string'
+                    ? AST.#parseGlob(p, this.#hasMagic, noEmpty)
+                    : p.toRegExpSource(allowDot);
+                this.#hasMagic = this.#hasMagic || hasMagic;
+                this.#uflag = this.#uflag || uflag;
+                return re;
+            })
+                .join('');
+            let start = '';
+            if (this.isStart()) {
+                if (typeof this.#parts[0] === 'string') {
+                    // this is the string that will match the start of the pattern,
+                    // so we need to protect against dots and such.
+                    // '.' and '..' cannot match unless the pattern is that exactly,
+                    // even if it starts with . or dot:true is set.
+                    const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]);
+                    if (!dotTravAllowed) {
+                        const aps = addPatternStart;
+                        // check if we have a possibility of matching . or ..,
+                        // and prevent that.
+                        const needNoTrav = 
+                        // dots are allowed, and the pattern starts with [ or .
+                        (dot && aps.has(src.charAt(0))) ||
+                            // the pattern starts with \., and then [ or .
+                            (src.startsWith('\\.') && aps.has(src.charAt(2))) ||
+                            // the pattern starts with \.\., and then [ or .
+                            (src.startsWith('\\.\\.') && aps.has(src.charAt(4)));
+                        // no need to prevent dots if it can't match a dot, or if a
+                        // sub-pattern will be preventing it anyway.
+                        const needNoDot = !dot && !allowDot && aps.has(src.charAt(0));
+                        start = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : '';
+                    }
+                }
+            }
+            // append the "end of path portion" pattern to negation tails
+            let end = '';
+            if (this.isEnd() &&
+                this.#root.#filledNegs &&
+                this.#parent?.type === '!') {
+                end = '(?:$|\\/)';
+            }
+            const final = start + src + end;
+            return [
+                final,
+                unescape(src),
+                (this.#hasMagic = !!this.#hasMagic),
+                this.#uflag,
+            ];
+        }
+        // We need to calculate the body *twice* if it's a repeat pattern
+        // at the start, once in nodot mode, then again in dot mode, so a
+        // pattern like *(?) can match 'x.y'
+        const repeated = this.type === '*' || this.type === '+';
+        // some kind of extglob
+        const start = this.type === '!' ? '(?:(?!(?:' : '(?:';
+        let body = this.#partsToRegExp(dot);
+        if (this.isStart() && this.isEnd() && !body && this.type !== '!') {
+            // invalid extglob, has to at least be *something* present, if it's
+            // the entire path portion.
+            const s = this.toString();
+            this.#parts = [s];
+            this.type = null;
+            this.#hasMagic = undefined;
+            return [s, unescape(this.toString()), false, false];
+        }
+        // XXX abstract out this map method
+        let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot
+            ? ''
+            : this.#partsToRegExp(true);
+        if (bodyDotAllowed === body) {
+            bodyDotAllowed = '';
+        }
+        if (bodyDotAllowed) {
+            body = `(?:${body})(?:${bodyDotAllowed})*?`;
+        }
+        // an empty !() is exactly equivalent to a starNoEmpty
+        let final = '';
+        if (this.type === '!' && this.#emptyExt) {
+            final = (this.isStart() && !dot ? startNoDot : '') + starNoEmpty;
+        }
+        else {
+            const close = this.type === '!'
+                ? // !() must match something,but !(x) can match ''
+                    '))' +
+                        (this.isStart() && !dot && !allowDot ? startNoDot : '') +
+                        star +
+                        ')'
+                : this.type === '@'
+                    ? ')'
+                    : this.type === '?'
+                        ? ')?'
+                        : this.type === '+' && bodyDotAllowed
+                            ? ')'
+                            : this.type === '*' && bodyDotAllowed
+                                ? `)?`
+                                : `)${this.type}`;
+            final = start + body + close;
+        }
+        return [
+            final,
+            unescape(body),
+            (this.#hasMagic = !!this.#hasMagic),
+            this.#uflag,
+        ];
+    }
+    #partsToRegExp(dot) {
+        return this.#parts
+            .map(p => {
+            // extglob ASTs should only contain parent ASTs
+            /* c8 ignore start */
+            if (typeof p === 'string') {
+                throw new Error('string type in extglob ast??');
+            }
+            /* c8 ignore stop */
+            // can ignore hasMagic, because extglobs are already always magic
+            const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot);
+            this.#uflag = this.#uflag || uflag;
+            return re;
+        })
+            .filter(p => !(this.isStart() && this.isEnd()) || !!p)
+            .join('|');
+    }
+    static #parseGlob(glob, hasMagic, noEmpty = false) {
+        let escaping = false;
+        let re = '';
+        let uflag = false;
+        for (let i = 0; i < glob.length; i++) {
+            const c = glob.charAt(i);
+            if (escaping) {
+                escaping = false;
+                re += (reSpecials.has(c) ? '\\' : '') + c;
+                continue;
+            }
+            if (c === '\\') {
+                if (i === glob.length - 1) {
+                    re += '\\\\';
+                }
+                else {
+                    escaping = true;
+                }
+                continue;
+            }
+            if (c === '[') {
+                const [src, needUflag, consumed, magic] = parseClass(glob, i);
+                if (consumed) {
+                    re += src;
+                    uflag = uflag || needUflag;
+                    i += consumed - 1;
+                    hasMagic = hasMagic || magic;
+                    continue;
+                }
+            }
+            if (c === '*') {
+                if (noEmpty && glob === '*')
+                    re += starNoEmpty;
+                else
+                    re += star;
+                hasMagic = true;
+                continue;
+            }
+            if (c === '?') {
+                re += qmark;
+                hasMagic = true;
+                continue;
+            }
+            re += regExpEscape(c);
+        }
+        return [re, unescape(glob), !!hasMagic, uflag];
+    }
+}
+//# sourceMappingURL=ast.js.map
\ No newline at end of file
diff --git a/node_modules/npm-packlist/node_modules/minimatch/dist/esm/brace-expressions.js b/node_modules/npm-packlist/node_modules/minimatch/dist/esm/brace-expressions.js
new file mode 100644
index 0000000000000..c629d6ae816e2
--- /dev/null
+++ b/node_modules/npm-packlist/node_modules/minimatch/dist/esm/brace-expressions.js
@@ -0,0 +1,148 @@
+// translate the various posix character classes into unicode properties
+// this works across all unicode locales
+// { : [, /u flag required, negated]
+const posixClasses = {
+    '[:alnum:]': ['\\p{L}\\p{Nl}\\p{Nd}', true],
+    '[:alpha:]': ['\\p{L}\\p{Nl}', true],
+    '[:ascii:]': ['\\x' + '00-\\x' + '7f', false],
+    '[:blank:]': ['\\p{Zs}\\t', true],
+    '[:cntrl:]': ['\\p{Cc}', true],
+    '[:digit:]': ['\\p{Nd}', true],
+    '[:graph:]': ['\\p{Z}\\p{C}', true, true],
+    '[:lower:]': ['\\p{Ll}', true],
+    '[:print:]': ['\\p{C}', true],
+    '[:punct:]': ['\\p{P}', true],
+    '[:space:]': ['\\p{Z}\\t\\r\\n\\v\\f', true],
+    '[:upper:]': ['\\p{Lu}', true],
+    '[:word:]': ['\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}', true],
+    '[:xdigit:]': ['A-Fa-f0-9', false],
+};
+// only need to escape a few things inside of brace expressions
+// escapes: [ \ ] -
+const braceEscape = (s) => s.replace(/[[\]\\-]/g, '\\$&');
+// escape all regexp magic characters
+const regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+// everything has already been escaped, we just have to join
+const rangesToString = (ranges) => ranges.join('');
+// takes a glob string at a posix brace expression, and returns
+// an equivalent regular expression source, and boolean indicating
+// whether the /u flag needs to be applied, and the number of chars
+// consumed to parse the character class.
+// This also removes out of order ranges, and returns ($.) if the
+// entire class just no good.
+export const parseClass = (glob, position) => {
+    const pos = position;
+    /* c8 ignore start */
+    if (glob.charAt(pos) !== '[') {
+        throw new Error('not in a brace expression');
+    }
+    /* c8 ignore stop */
+    const ranges = [];
+    const negs = [];
+    let i = pos + 1;
+    let sawStart = false;
+    let uflag = false;
+    let escaping = false;
+    let negate = false;
+    let endPos = pos;
+    let rangeStart = '';
+    WHILE: while (i < glob.length) {
+        const c = glob.charAt(i);
+        if ((c === '!' || c === '^') && i === pos + 1) {
+            negate = true;
+            i++;
+            continue;
+        }
+        if (c === ']' && sawStart && !escaping) {
+            endPos = i + 1;
+            break;
+        }
+        sawStart = true;
+        if (c === '\\') {
+            if (!escaping) {
+                escaping = true;
+                i++;
+                continue;
+            }
+            // escaped \ char, fall through and treat like normal char
+        }
+        if (c === '[' && !escaping) {
+            // either a posix class, a collation equivalent, or just a [
+            for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) {
+                if (glob.startsWith(cls, i)) {
+                    // invalid, [a-[] is fine, but not [a-[:alpha]]
+                    if (rangeStart) {
+                        return ['$.', false, glob.length - pos, true];
+                    }
+                    i += cls.length;
+                    if (neg)
+                        negs.push(unip);
+                    else
+                        ranges.push(unip);
+                    uflag = uflag || u;
+                    continue WHILE;
+                }
+            }
+        }
+        // now it's just a normal character, effectively
+        escaping = false;
+        if (rangeStart) {
+            // throw this range away if it's not valid, but others
+            // can still match.
+            if (c > rangeStart) {
+                ranges.push(braceEscape(rangeStart) + '-' + braceEscape(c));
+            }
+            else if (c === rangeStart) {
+                ranges.push(braceEscape(c));
+            }
+            rangeStart = '';
+            i++;
+            continue;
+        }
+        // now might be the start of a range.
+        // can be either c-d or c-] or c] or c] at this point
+        if (glob.startsWith('-]', i + 1)) {
+            ranges.push(braceEscape(c + '-'));
+            i += 2;
+            continue;
+        }
+        if (glob.startsWith('-', i + 1)) {
+            rangeStart = c;
+            i += 2;
+            continue;
+        }
+        // not the start of a range, just a single character
+        ranges.push(braceEscape(c));
+        i++;
+    }
+    if (endPos < i) {
+        // didn't see the end of the class, not a valid class,
+        // but might still be valid as a literal match.
+        return ['', false, 0, false];
+    }
+    // if we got no ranges and no negates, then we have a range that
+    // cannot possibly match anything, and that poisons the whole glob
+    if (!ranges.length && !negs.length) {
+        return ['$.', false, glob.length - pos, true];
+    }
+    // if we got one positive range, and it's a single character, then that's
+    // not actually a magic pattern, it's just that one literal character.
+    // we should not treat that as "magic", we should just return the literal
+    // character. [_] is a perfectly valid way to escape glob magic chars.
+    if (negs.length === 0 &&
+        ranges.length === 1 &&
+        /^\\?.$/.test(ranges[0]) &&
+        !negate) {
+        const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0];
+        return [regexpEscape(r), false, endPos - pos, false];
+    }
+    const sranges = '[' + (negate ? '^' : '') + rangesToString(ranges) + ']';
+    const snegs = '[' + (negate ? '' : '^') + rangesToString(negs) + ']';
+    const comb = ranges.length && negs.length
+        ? '(' + sranges + '|' + snegs + ')'
+        : ranges.length
+            ? sranges
+            : snegs;
+    return [comb, uflag, endPos - pos, true];
+};
+//# sourceMappingURL=brace-expressions.js.map
\ No newline at end of file
diff --git a/node_modules/npm-packlist/node_modules/minimatch/dist/esm/escape.js b/node_modules/npm-packlist/node_modules/minimatch/dist/esm/escape.js
new file mode 100644
index 0000000000000..16f7c8c7bdc64
--- /dev/null
+++ b/node_modules/npm-packlist/node_modules/minimatch/dist/esm/escape.js
@@ -0,0 +1,18 @@
+/**
+ * Escape all magic characters in a glob pattern.
+ *
+ * If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape}
+ * option is used, then characters are escaped by wrapping in `[]`, because
+ * a magic character wrapped in a character class can only be satisfied by
+ * that exact character.  In this mode, `\` is _not_ escaped, because it is
+ * not interpreted as a magic character, but instead as a path separator.
+ */
+export const escape = (s, { windowsPathsNoEscape = false, } = {}) => {
+    // don't need to escape +@! because we escape the parens
+    // that make those magic, and escaping ! as [!] isn't valid,
+    // because [!]] is a valid glob class meaning not ']'.
+    return windowsPathsNoEscape
+        ? s.replace(/[?*()[\]]/g, '[$&]')
+        : s.replace(/[?*()[\]\\]/g, '\\$&');
+};
+//# sourceMappingURL=escape.js.map
\ No newline at end of file
diff --git a/node_modules/npm-packlist/node_modules/minimatch/dist/esm/index.js b/node_modules/npm-packlist/node_modules/minimatch/dist/esm/index.js
new file mode 100644
index 0000000000000..790d6c02a2f22
--- /dev/null
+++ b/node_modules/npm-packlist/node_modules/minimatch/dist/esm/index.js
@@ -0,0 +1,1001 @@
+import { expand } from '@isaacs/brace-expansion';
+import { assertValidPattern } from './assert-valid-pattern.js';
+import { AST } from './ast.js';
+import { escape } from './escape.js';
+import { unescape } from './unescape.js';
+export const minimatch = (p, pattern, options = {}) => {
+    assertValidPattern(pattern);
+    // shortcut: comments match nothing.
+    if (!options.nocomment && pattern.charAt(0) === '#') {
+        return false;
+    }
+    return new Minimatch(pattern, options).match(p);
+};
+// Optimized checking for the most common glob patterns.
+const starDotExtRE = /^\*+([^+@!?\*\[\(]*)$/;
+const starDotExtTest = (ext) => (f) => !f.startsWith('.') && f.endsWith(ext);
+const starDotExtTestDot = (ext) => (f) => f.endsWith(ext);
+const starDotExtTestNocase = (ext) => {
+    ext = ext.toLowerCase();
+    return (f) => !f.startsWith('.') && f.toLowerCase().endsWith(ext);
+};
+const starDotExtTestNocaseDot = (ext) => {
+    ext = ext.toLowerCase();
+    return (f) => f.toLowerCase().endsWith(ext);
+};
+const starDotStarRE = /^\*+\.\*+$/;
+const starDotStarTest = (f) => !f.startsWith('.') && f.includes('.');
+const starDotStarTestDot = (f) => f !== '.' && f !== '..' && f.includes('.');
+const dotStarRE = /^\.\*+$/;
+const dotStarTest = (f) => f !== '.' && f !== '..' && f.startsWith('.');
+const starRE = /^\*+$/;
+const starTest = (f) => f.length !== 0 && !f.startsWith('.');
+const starTestDot = (f) => f.length !== 0 && f !== '.' && f !== '..';
+const qmarksRE = /^\?+([^+@!?\*\[\(]*)?$/;
+const qmarksTestNocase = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExt([$0]);
+    if (!ext)
+        return noext;
+    ext = ext.toLowerCase();
+    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
+};
+const qmarksTestNocaseDot = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExtDot([$0]);
+    if (!ext)
+        return noext;
+    ext = ext.toLowerCase();
+    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
+};
+const qmarksTestDot = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExtDot([$0]);
+    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
+};
+const qmarksTest = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExt([$0]);
+    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
+};
+const qmarksTestNoExt = ([$0]) => {
+    const len = $0.length;
+    return (f) => f.length === len && !f.startsWith('.');
+};
+const qmarksTestNoExtDot = ([$0]) => {
+    const len = $0.length;
+    return (f) => f.length === len && f !== '.' && f !== '..';
+};
+/* c8 ignore start */
+const defaultPlatform = (typeof process === 'object' && process
+    ? (typeof process.env === 'object' &&
+        process.env &&
+        process.env.__MINIMATCH_TESTING_PLATFORM__) ||
+        process.platform
+    : 'posix');
+const path = {
+    win32: { sep: '\\' },
+    posix: { sep: '/' },
+};
+/* c8 ignore stop */
+export const sep = defaultPlatform === 'win32' ? path.win32.sep : path.posix.sep;
+minimatch.sep = sep;
+export const GLOBSTAR = Symbol('globstar **');
+minimatch.GLOBSTAR = GLOBSTAR;
+// any single thing other than /
+// don't need to escape / when using new RegExp()
+const qmark = '[^/]';
+// * => any number of characters
+const star = qmark + '*?';
+// ** when dots are allowed.  Anything goes, except .. and .
+// not (^ or / followed by one or two dots followed by $ or /),
+// followed by anything, any number of times.
+const twoStarDot = '(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?';
+// not a ^ or / followed by a dot,
+// followed by anything, any number of times.
+const twoStarNoDot = '(?:(?!(?:\\/|^)\\.).)*?';
+export const filter = (pattern, options = {}) => (p) => minimatch(p, pattern, options);
+minimatch.filter = filter;
+const ext = (a, b = {}) => Object.assign({}, a, b);
+export const defaults = (def) => {
+    if (!def || typeof def !== 'object' || !Object.keys(def).length) {
+        return minimatch;
+    }
+    const orig = minimatch;
+    const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options));
+    return Object.assign(m, {
+        Minimatch: class Minimatch extends orig.Minimatch {
+            constructor(pattern, options = {}) {
+                super(pattern, ext(def, options));
+            }
+            static defaults(options) {
+                return orig.defaults(ext(def, options)).Minimatch;
+            }
+        },
+        AST: class AST extends orig.AST {
+            /* c8 ignore start */
+            constructor(type, parent, options = {}) {
+                super(type, parent, ext(def, options));
+            }
+            /* c8 ignore stop */
+            static fromGlob(pattern, options = {}) {
+                return orig.AST.fromGlob(pattern, ext(def, options));
+            }
+        },
+        unescape: (s, options = {}) => orig.unescape(s, ext(def, options)),
+        escape: (s, options = {}) => orig.escape(s, ext(def, options)),
+        filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)),
+        defaults: (options) => orig.defaults(ext(def, options)),
+        makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)),
+        braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)),
+        match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)),
+        sep: orig.sep,
+        GLOBSTAR: GLOBSTAR,
+    });
+};
+minimatch.defaults = defaults;
+// Brace expansion:
+// a{b,c}d -> abd acd
+// a{b,}c -> abc ac
+// a{0..3}d -> a0d a1d a2d a3d
+// a{b,c{d,e}f}g -> abg acdfg acefg
+// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
+//
+// Invalid sets are not expanded.
+// a{2..}b -> a{2..}b
+// a{b}c -> a{b}c
+export const braceExpand = (pattern, options = {}) => {
+    assertValidPattern(pattern);
+    // Thanks to Yeting Li  for
+    // improving this regexp to avoid a ReDOS vulnerability.
+    if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
+        // shortcut. no need to expand.
+        return [pattern];
+    }
+    return expand(pattern);
+};
+minimatch.braceExpand = braceExpand;
+// parse a component of the expanded set.
+// At this point, no pattern may contain "/" in it
+// so we're going to return a 2d array, where each entry is the full
+// pattern, split on '/', and then turned into a regular expression.
+// A regexp is made at the end which joins each array with an
+// escaped /, and another full one which joins each regexp with |.
+//
+// Following the lead of Bash 4.1, note that "**" only has special meaning
+// when it is the *only* thing in a path portion.  Otherwise, any series
+// of * is equivalent to a single *.  Globstar behavior is enabled by
+// default, and can be disabled by setting options.noglobstar.
+export const makeRe = (pattern, options = {}) => new Minimatch(pattern, options).makeRe();
+minimatch.makeRe = makeRe;
+export const match = (list, pattern, options = {}) => {
+    const mm = new Minimatch(pattern, options);
+    list = list.filter(f => mm.match(f));
+    if (mm.options.nonull && !list.length) {
+        list.push(pattern);
+    }
+    return list;
+};
+minimatch.match = match;
+// replace stuff like \* with *
+const globMagic = /[?*]|[+@!]\(.*?\)|\[|\]/;
+const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+export class Minimatch {
+    options;
+    set;
+    pattern;
+    windowsPathsNoEscape;
+    nonegate;
+    negate;
+    comment;
+    empty;
+    preserveMultipleSlashes;
+    partial;
+    globSet;
+    globParts;
+    nocase;
+    isWindows;
+    platform;
+    windowsNoMagicRoot;
+    regexp;
+    constructor(pattern, options = {}) {
+        assertValidPattern(pattern);
+        options = options || {};
+        this.options = options;
+        this.pattern = pattern;
+        this.platform = options.platform || defaultPlatform;
+        this.isWindows = this.platform === 'win32';
+        this.windowsPathsNoEscape =
+            !!options.windowsPathsNoEscape || options.allowWindowsEscape === false;
+        if (this.windowsPathsNoEscape) {
+            this.pattern = this.pattern.replace(/\\/g, '/');
+        }
+        this.preserveMultipleSlashes = !!options.preserveMultipleSlashes;
+        this.regexp = null;
+        this.negate = false;
+        this.nonegate = !!options.nonegate;
+        this.comment = false;
+        this.empty = false;
+        this.partial = !!options.partial;
+        this.nocase = !!this.options.nocase;
+        this.windowsNoMagicRoot =
+            options.windowsNoMagicRoot !== undefined
+                ? options.windowsNoMagicRoot
+                : !!(this.isWindows && this.nocase);
+        this.globSet = [];
+        this.globParts = [];
+        this.set = [];
+        // make the set of regexps etc.
+        this.make();
+    }
+    hasMagic() {
+        if (this.options.magicalBraces && this.set.length > 1) {
+            return true;
+        }
+        for (const pattern of this.set) {
+            for (const part of pattern) {
+                if (typeof part !== 'string')
+                    return true;
+            }
+        }
+        return false;
+    }
+    debug(..._) { }
+    make() {
+        const pattern = this.pattern;
+        const options = this.options;
+        // empty patterns and comments match nothing.
+        if (!options.nocomment && pattern.charAt(0) === '#') {
+            this.comment = true;
+            return;
+        }
+        if (!pattern) {
+            this.empty = true;
+            return;
+        }
+        // step 1: figure out negation, etc.
+        this.parseNegate();
+        // step 2: expand braces
+        this.globSet = [...new Set(this.braceExpand())];
+        if (options.debug) {
+            this.debug = (...args) => console.error(...args);
+        }
+        this.debug(this.pattern, this.globSet);
+        // step 3: now we have a set, so turn each one into a series of
+        // path-portion matching patterns.
+        // These will be regexps, except in the case of "**", which is
+        // set to the GLOBSTAR object for globstar behavior,
+        // and will not contain any / characters
+        //
+        // First, we preprocess to make the glob pattern sets a bit simpler
+        // and deduped.  There are some perf-killing patterns that can cause
+        // problems with a glob walk, but we can simplify them down a bit.
+        const rawGlobParts = this.globSet.map(s => this.slashSplit(s));
+        this.globParts = this.preprocess(rawGlobParts);
+        this.debug(this.pattern, this.globParts);
+        // glob --> regexps
+        let set = this.globParts.map((s, _, __) => {
+            if (this.isWindows && this.windowsNoMagicRoot) {
+                // check if it's a drive or unc path.
+                const isUNC = s[0] === '' &&
+                    s[1] === '' &&
+                    (s[2] === '?' || !globMagic.test(s[2])) &&
+                    !globMagic.test(s[3]);
+                const isDrive = /^[a-z]:/i.test(s[0]);
+                if (isUNC) {
+                    return [...s.slice(0, 4), ...s.slice(4).map(ss => this.parse(ss))];
+                }
+                else if (isDrive) {
+                    return [s[0], ...s.slice(1).map(ss => this.parse(ss))];
+                }
+            }
+            return s.map(ss => this.parse(ss));
+        });
+        this.debug(this.pattern, set);
+        // filter out everything that didn't compile properly.
+        this.set = set.filter(s => s.indexOf(false) === -1);
+        // do not treat the ? in UNC paths as magic
+        if (this.isWindows) {
+            for (let i = 0; i < this.set.length; i++) {
+                const p = this.set[i];
+                if (p[0] === '' &&
+                    p[1] === '' &&
+                    this.globParts[i][2] === '?' &&
+                    typeof p[3] === 'string' &&
+                    /^[a-z]:$/i.test(p[3])) {
+                    p[2] = '?';
+                }
+            }
+        }
+        this.debug(this.pattern, this.set);
+    }
+    // various transforms to equivalent pattern sets that are
+    // faster to process in a filesystem walk.  The goal is to
+    // eliminate what we can, and push all ** patterns as far
+    // to the right as possible, even if it increases the number
+    // of patterns that we have to process.
+    preprocess(globParts) {
+        // if we're not in globstar mode, then turn all ** into *
+        if (this.options.noglobstar) {
+            for (let i = 0; i < globParts.length; i++) {
+                for (let j = 0; j < globParts[i].length; j++) {
+                    if (globParts[i][j] === '**') {
+                        globParts[i][j] = '*';
+                    }
+                }
+            }
+        }
+        const { optimizationLevel = 1 } = this.options;
+        if (optimizationLevel >= 2) {
+            // aggressive optimization for the purpose of fs walking
+            globParts = this.firstPhasePreProcess(globParts);
+            globParts = this.secondPhasePreProcess(globParts);
+        }
+        else if (optimizationLevel >= 1) {
+            // just basic optimizations to remove some .. parts
+            globParts = this.levelOneOptimize(globParts);
+        }
+        else {
+            // just collapse multiple ** portions into one
+            globParts = this.adjascentGlobstarOptimize(globParts);
+        }
+        return globParts;
+    }
+    // just get rid of adjascent ** portions
+    adjascentGlobstarOptimize(globParts) {
+        return globParts.map(parts => {
+            let gs = -1;
+            while (-1 !== (gs = parts.indexOf('**', gs + 1))) {
+                let i = gs;
+                while (parts[i + 1] === '**') {
+                    i++;
+                }
+                if (i !== gs) {
+                    parts.splice(gs, i - gs);
+                }
+            }
+            return parts;
+        });
+    }
+    // get rid of adjascent ** and resolve .. portions
+    levelOneOptimize(globParts) {
+        return globParts.map(parts => {
+            parts = parts.reduce((set, part) => {
+                const prev = set[set.length - 1];
+                if (part === '**' && prev === '**') {
+                    return set;
+                }
+                if (part === '..') {
+                    if (prev && prev !== '..' && prev !== '.' && prev !== '**') {
+                        set.pop();
+                        return set;
+                    }
+                }
+                set.push(part);
+                return set;
+            }, []);
+            return parts.length === 0 ? [''] : parts;
+        });
+    }
+    levelTwoFileOptimize(parts) {
+        if (!Array.isArray(parts)) {
+            parts = this.slashSplit(parts);
+        }
+        let didSomething = false;
+        do {
+            didSomething = false;
+            // 
// -> 
/
+            if (!this.preserveMultipleSlashes) {
+                for (let i = 1; i < parts.length - 1; i++) {
+                    const p = parts[i];
+                    // don't squeeze out UNC patterns
+                    if (i === 1 && p === '' && parts[0] === '')
+                        continue;
+                    if (p === '.' || p === '') {
+                        didSomething = true;
+                        parts.splice(i, 1);
+                        i--;
+                    }
+                }
+                if (parts[0] === '.' &&
+                    parts.length === 2 &&
+                    (parts[1] === '.' || parts[1] === '')) {
+                    didSomething = true;
+                    parts.pop();
+                }
+            }
+            // 
/

/../ ->

/
+            let dd = 0;
+            while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                const p = parts[dd - 1];
+                if (p && p !== '.' && p !== '..' && p !== '**') {
+                    didSomething = true;
+                    parts.splice(dd - 1, 2);
+                    dd -= 2;
+                }
+            }
+        } while (didSomething);
+        return parts.length === 0 ? [''] : parts;
+    }
+    // First phase: single-pattern processing
+    // 
 is 1 or more portions
+    //  is 1 or more portions
+    // 

is any portion other than ., .., '', or ** + // is . or '' + // + // **/.. is *brutal* for filesystem walking performance, because + // it effectively resets the recursive walk each time it occurs, + // and ** cannot be reduced out by a .. pattern part like a regexp + // or most strings (other than .., ., and '') can be. + // + //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + //

// -> 
/
+    // 
/

/../ ->

/
+    // **/**/ -> **/
+    //
+    // **/*/ -> */**/ <== not valid because ** doesn't follow
+    // this WOULD be allowed if ** did follow symlinks, or * didn't
+    firstPhasePreProcess(globParts) {
+        let didSomething = false;
+        do {
+            didSomething = false;
+            // 
/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + for (let parts of globParts) { + let gs = -1; + while (-1 !== (gs = parts.indexOf('**', gs + 1))) { + let gss = gs; + while (parts[gss + 1] === '**') { + //

/**/**/ -> 
/**/
+                        gss++;
+                    }
+                    // eg, if gs is 2 and gss is 4, that means we have 3 **
+                    // parts, and can remove 2 of them.
+                    if (gss > gs) {
+                        parts.splice(gs + 1, gss - gs);
+                    }
+                    let next = parts[gs + 1];
+                    const p = parts[gs + 2];
+                    const p2 = parts[gs + 3];
+                    if (next !== '..')
+                        continue;
+                    if (!p ||
+                        p === '.' ||
+                        p === '..' ||
+                        !p2 ||
+                        p2 === '.' ||
+                        p2 === '..') {
+                        continue;
+                    }
+                    didSomething = true;
+                    // edit parts in place, and push the new one
+                    parts.splice(gs, 1);
+                    const other = parts.slice(0);
+                    other[gs] = '**';
+                    globParts.push(other);
+                    gs--;
+                }
+                // 
// -> 
/
+                if (!this.preserveMultipleSlashes) {
+                    for (let i = 1; i < parts.length - 1; i++) {
+                        const p = parts[i];
+                        // don't squeeze out UNC patterns
+                        if (i === 1 && p === '' && parts[0] === '')
+                            continue;
+                        if (p === '.' || p === '') {
+                            didSomething = true;
+                            parts.splice(i, 1);
+                            i--;
+                        }
+                    }
+                    if (parts[0] === '.' &&
+                        parts.length === 2 &&
+                        (parts[1] === '.' || parts[1] === '')) {
+                        didSomething = true;
+                        parts.pop();
+                    }
+                }
+                // 
/

/../ ->

/
+                let dd = 0;
+                while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                    const p = parts[dd - 1];
+                    if (p && p !== '.' && p !== '..' && p !== '**') {
+                        didSomething = true;
+                        const needDot = dd === 1 && parts[dd + 1] === '**';
+                        const splin = needDot ? ['.'] : [];
+                        parts.splice(dd - 1, 2, ...splin);
+                        if (parts.length === 0)
+                            parts.push('');
+                        dd -= 2;
+                    }
+                }
+            }
+        } while (didSomething);
+        return globParts;
+    }
+    // second phase: multi-pattern dedupes
+    // {
/*/,
/

/} ->

/*/
+    // {
/,
/} -> 
/
+    // {
/**/,
/} -> 
/**/
+    //
+    // {
/**/,
/**/

/} ->

/**/
+    // ^-- not valid because ** doens't follow symlinks
+    secondPhasePreProcess(globParts) {
+        for (let i = 0; i < globParts.length - 1; i++) {
+            for (let j = i + 1; j < globParts.length; j++) {
+                const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);
+                if (matched) {
+                    globParts[i] = [];
+                    globParts[j] = matched;
+                    break;
+                }
+            }
+        }
+        return globParts.filter(gs => gs.length);
+    }
+    partsMatch(a, b, emptyGSMatch = false) {
+        let ai = 0;
+        let bi = 0;
+        let result = [];
+        let which = '';
+        while (ai < a.length && bi < b.length) {
+            if (a[ai] === b[bi]) {
+                result.push(which === 'b' ? b[bi] : a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (emptyGSMatch && a[ai] === '**' && b[bi] === a[ai + 1]) {
+                result.push(a[ai]);
+                ai++;
+            }
+            else if (emptyGSMatch && b[bi] === '**' && a[ai] === b[bi + 1]) {
+                result.push(b[bi]);
+                bi++;
+            }
+            else if (a[ai] === '*' &&
+                b[bi] &&
+                (this.options.dot || !b[bi].startsWith('.')) &&
+                b[bi] !== '**') {
+                if (which === 'b')
+                    return false;
+                which = 'a';
+                result.push(a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (b[bi] === '*' &&
+                a[ai] &&
+                (this.options.dot || !a[ai].startsWith('.')) &&
+                a[ai] !== '**') {
+                if (which === 'a')
+                    return false;
+                which = 'b';
+                result.push(b[bi]);
+                ai++;
+                bi++;
+            }
+            else {
+                return false;
+            }
+        }
+        // if we fall out of the loop, it means they two are identical
+        // as long as their lengths match
+        return a.length === b.length && result;
+    }
+    parseNegate() {
+        if (this.nonegate)
+            return;
+        const pattern = this.pattern;
+        let negate = false;
+        let negateOffset = 0;
+        for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {
+            negate = !negate;
+            negateOffset++;
+        }
+        if (negateOffset)
+            this.pattern = pattern.slice(negateOffset);
+        this.negate = negate;
+    }
+    // set partial to true to test if, for example,
+    // "/a/b" matches the start of "/*/b/*/d"
+    // Partial means, if you run out of file before you run
+    // out of pattern, then that's fine, as long as all
+    // the parts match.
+    matchOne(file, pattern, partial = false) {
+        const options = this.options;
+        // UNC paths like //?/X:/... can match X:/... and vice versa
+        // Drive letters in absolute drive or unc paths are always compared
+        // case-insensitively.
+        if (this.isWindows) {
+            const fileDrive = typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0]);
+            const fileUNC = !fileDrive &&
+                file[0] === '' &&
+                file[1] === '' &&
+                file[2] === '?' &&
+                /^[a-z]:$/i.test(file[3]);
+            const patternDrive = typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0]);
+            const patternUNC = !patternDrive &&
+                pattern[0] === '' &&
+                pattern[1] === '' &&
+                pattern[2] === '?' &&
+                typeof pattern[3] === 'string' &&
+                /^[a-z]:$/i.test(pattern[3]);
+            const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined;
+            const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined;
+            if (typeof fdi === 'number' && typeof pdi === 'number') {
+                const [fd, pd] = [file[fdi], pattern[pdi]];
+                if (fd.toLowerCase() === pd.toLowerCase()) {
+                    pattern[pdi] = fd;
+                    if (pdi > fdi) {
+                        pattern = pattern.slice(pdi);
+                    }
+                    else if (fdi > pdi) {
+                        file = file.slice(fdi);
+                    }
+                }
+            }
+        }
+        // resolve and reduce . and .. portions in the file as well.
+        // dont' need to do the second phase, because it's only one string[]
+        const { optimizationLevel = 1 } = this.options;
+        if (optimizationLevel >= 2) {
+            file = this.levelTwoFileOptimize(file);
+        }
+        this.debug('matchOne', this, { file, pattern });
+        this.debug('matchOne', file.length, pattern.length);
+        for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
+            this.debug('matchOne loop');
+            var p = pattern[pi];
+            var f = file[fi];
+            this.debug(pattern, p, f);
+            // should be impossible.
+            // some invalid regexp stuff in the set.
+            /* c8 ignore start */
+            if (p === false) {
+                return false;
+            }
+            /* c8 ignore stop */
+            if (p === GLOBSTAR) {
+                this.debug('GLOBSTAR', [pattern, p, f]);
+                // "**"
+                // a/**/b/**/c would match the following:
+                // a/b/x/y/z/c
+                // a/x/y/z/b/c
+                // a/b/x/b/x/c
+                // a/b/c
+                // To do this, take the rest of the pattern after
+                // the **, and see if it would match the file remainder.
+                // If so, return success.
+                // If not, the ** "swallows" a segment, and try again.
+                // This is recursively awful.
+                //
+                // a/**/b/**/c matching a/b/x/y/z/c
+                // - a matches a
+                // - doublestar
+                //   - matchOne(b/x/y/z/c, b/**/c)
+                //     - b matches b
+                //     - doublestar
+                //       - matchOne(x/y/z/c, c) -> no
+                //       - matchOne(y/z/c, c) -> no
+                //       - matchOne(z/c, c) -> no
+                //       - matchOne(c, c) yes, hit
+                var fr = fi;
+                var pr = pi + 1;
+                if (pr === pl) {
+                    this.debug('** at the end');
+                    // a ** at the end will just swallow the rest.
+                    // We have found a match.
+                    // however, it will not swallow /.x, unless
+                    // options.dot is set.
+                    // . and .. are *never* matched by **, for explosively
+                    // exponential reasons.
+                    for (; fi < fl; fi++) {
+                        if (file[fi] === '.' ||
+                            file[fi] === '..' ||
+                            (!options.dot && file[fi].charAt(0) === '.'))
+                            return false;
+                    }
+                    return true;
+                }
+                // ok, let's see if we can swallow whatever we can.
+                while (fr < fl) {
+                    var swallowee = file[fr];
+                    this.debug('\nglobstar while', file, fr, pattern, pr, swallowee);
+                    // XXX remove this slice.  Just pass the start index.
+                    if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
+                        this.debug('globstar found match!', fr, fl, swallowee);
+                        // found a match.
+                        return true;
+                    }
+                    else {
+                        // can't swallow "." or ".." ever.
+                        // can only swallow ".foo" when explicitly asked.
+                        if (swallowee === '.' ||
+                            swallowee === '..' ||
+                            (!options.dot && swallowee.charAt(0) === '.')) {
+                            this.debug('dot detected!', file, fr, pattern, pr);
+                            break;
+                        }
+                        // ** swallows a segment, and continue.
+                        this.debug('globstar swallow a segment, and continue');
+                        fr++;
+                    }
+                }
+                // no match was found.
+                // However, in partial mode, we can't say this is necessarily over.
+                /* c8 ignore start */
+                if (partial) {
+                    // ran out of file
+                    this.debug('\n>>> no match, partial?', file, fr, pattern, pr);
+                    if (fr === fl) {
+                        return true;
+                    }
+                }
+                /* c8 ignore stop */
+                return false;
+            }
+            // something other than **
+            // non-magic patterns just have to match exactly
+            // patterns with magic have been turned into regexps.
+            let hit;
+            if (typeof p === 'string') {
+                hit = f === p;
+                this.debug('string match', p, f, hit);
+            }
+            else {
+                hit = p.test(f);
+                this.debug('pattern match', p, f, hit);
+            }
+            if (!hit)
+                return false;
+        }
+        // Note: ending in / means that we'll get a final ""
+        // at the end of the pattern.  This can only match a
+        // corresponding "" at the end of the file.
+        // If the file ends in /, then it can only match a
+        // a pattern that ends in /, unless the pattern just
+        // doesn't have any more for it. But, a/b/ should *not*
+        // match "a/b/*", even though "" matches against the
+        // [^/]*? pattern, except in partial mode, where it might
+        // simply not be reached yet.
+        // However, a/b/ should still satisfy a/*
+        // now either we fell off the end of the pattern, or we're done.
+        if (fi === fl && pi === pl) {
+            // ran out of pattern and filename at the same time.
+            // an exact hit!
+            return true;
+        }
+        else if (fi === fl) {
+            // ran out of file, but still had pattern left.
+            // this is ok if we're doing the match as part of
+            // a glob fs traversal.
+            return partial;
+        }
+        else if (pi === pl) {
+            // ran out of pattern, still have file left.
+            // this is only acceptable if we're on the very last
+            // empty segment of a file with a trailing slash.
+            // a/* should match a/b/
+            return fi === fl - 1 && file[fi] === '';
+            /* c8 ignore start */
+        }
+        else {
+            // should be unreachable.
+            throw new Error('wtf?');
+        }
+        /* c8 ignore stop */
+    }
+    braceExpand() {
+        return braceExpand(this.pattern, this.options);
+    }
+    parse(pattern) {
+        assertValidPattern(pattern);
+        const options = this.options;
+        // shortcuts
+        if (pattern === '**')
+            return GLOBSTAR;
+        if (pattern === '')
+            return '';
+        // far and away, the most common glob pattern parts are
+        // *, *.*, and *.  Add a fast check method for those.
+        let m;
+        let fastTest = null;
+        if ((m = pattern.match(starRE))) {
+            fastTest = options.dot ? starTestDot : starTest;
+        }
+        else if ((m = pattern.match(starDotExtRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? starDotExtTestNocaseDot
+                    : starDotExtTestNocase
+                : options.dot
+                    ? starDotExtTestDot
+                    : starDotExtTest)(m[1]);
+        }
+        else if ((m = pattern.match(qmarksRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? qmarksTestNocaseDot
+                    : qmarksTestNocase
+                : options.dot
+                    ? qmarksTestDot
+                    : qmarksTest)(m);
+        }
+        else if ((m = pattern.match(starDotStarRE))) {
+            fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
+        }
+        else if ((m = pattern.match(dotStarRE))) {
+            fastTest = dotStarTest;
+        }
+        const re = AST.fromGlob(pattern, this.options).toMMPattern();
+        if (fastTest && typeof re === 'object') {
+            // Avoids overriding in frozen environments
+            Reflect.defineProperty(re, 'test', { value: fastTest });
+        }
+        return re;
+    }
+    makeRe() {
+        if (this.regexp || this.regexp === false)
+            return this.regexp;
+        // at this point, this.set is a 2d array of partial
+        // pattern strings, or "**".
+        //
+        // It's better to use .match().  This function shouldn't
+        // be used, really, but it's pretty convenient sometimes,
+        // when you just want to work with a regex.
+        const set = this.set;
+        if (!set.length) {
+            this.regexp = false;
+            return this.regexp;
+        }
+        const options = this.options;
+        const twoStar = options.noglobstar
+            ? star
+            : options.dot
+                ? twoStarDot
+                : twoStarNoDot;
+        const flags = new Set(options.nocase ? ['i'] : []);
+        // regexpify non-globstar patterns
+        // if ** is only item, then we just do one twoStar
+        // if ** is first, and there are more, prepend (\/|twoStar\/)? to next
+        // if ** is last, append (\/twoStar|) to previous
+        // if ** is in the middle, append (\/|\/twoStar\/) to previous
+        // then filter out GLOBSTAR symbols
+        let re = set
+            .map(pattern => {
+            const pp = pattern.map(p => {
+                if (p instanceof RegExp) {
+                    for (const f of p.flags.split(''))
+                        flags.add(f);
+                }
+                return typeof p === 'string'
+                    ? regExpEscape(p)
+                    : p === GLOBSTAR
+                        ? GLOBSTAR
+                        : p._src;
+            });
+            pp.forEach((p, i) => {
+                const next = pp[i + 1];
+                const prev = pp[i - 1];
+                if (p !== GLOBSTAR || prev === GLOBSTAR) {
+                    return;
+                }
+                if (prev === undefined) {
+                    if (next !== undefined && next !== GLOBSTAR) {
+                        pp[i + 1] = '(?:\\/|' + twoStar + '\\/)?' + next;
+                    }
+                    else {
+                        pp[i] = twoStar;
+                    }
+                }
+                else if (next === undefined) {
+                    pp[i - 1] = prev + '(?:\\/|' + twoStar + ')?';
+                }
+                else if (next !== GLOBSTAR) {
+                    pp[i - 1] = prev + '(?:\\/|\\/' + twoStar + '\\/)' + next;
+                    pp[i + 1] = GLOBSTAR;
+                }
+            });
+            return pp.filter(p => p !== GLOBSTAR).join('/');
+        })
+            .join('|');
+        // need to wrap in parens if we had more than one thing with |,
+        // otherwise only the first will be anchored to ^ and the last to $
+        const [open, close] = set.length > 1 ? ['(?:', ')'] : ['', ''];
+        // must match entire pattern
+        // ending in a * or ** will make it less strict.
+        re = '^' + open + re + close + '$';
+        // can match anything, as long as it's not this.
+        if (this.negate)
+            re = '^(?!' + re + ').+$';
+        try {
+            this.regexp = new RegExp(re, [...flags].join(''));
+            /* c8 ignore start */
+        }
+        catch (ex) {
+            // should be impossible
+            this.regexp = false;
+        }
+        /* c8 ignore stop */
+        return this.regexp;
+    }
+    slashSplit(p) {
+        // if p starts with // on windows, we preserve that
+        // so that UNC paths aren't broken.  Otherwise, any number of
+        // / characters are coalesced into one, unless
+        // preserveMultipleSlashes is set to true.
+        if (this.preserveMultipleSlashes) {
+            return p.split('/');
+        }
+        else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
+            // add an extra '' for the one we lose
+            return ['', ...p.split(/\/+/)];
+        }
+        else {
+            return p.split(/\/+/);
+        }
+    }
+    match(f, partial = this.partial) {
+        this.debug('match', f, this.pattern);
+        // short-circuit in the case of busted things.
+        // comments, etc.
+        if (this.comment) {
+            return false;
+        }
+        if (this.empty) {
+            return f === '';
+        }
+        if (f === '/' && partial) {
+            return true;
+        }
+        const options = this.options;
+        // windows: need to use /, not \
+        if (this.isWindows) {
+            f = f.split('\\').join('/');
+        }
+        // treat the test path as a set of pathparts.
+        const ff = this.slashSplit(f);
+        this.debug(this.pattern, 'split', ff);
+        // just ONE of the pattern sets in this.set needs to match
+        // in order for it to be valid.  If negating, then just one
+        // match means that we have failed.
+        // Either way, return on the first hit.
+        const set = this.set;
+        this.debug(this.pattern, 'set', set);
+        // Find the basename of the path by looking for the last non-empty segment
+        let filename = ff[ff.length - 1];
+        if (!filename) {
+            for (let i = ff.length - 2; !filename && i >= 0; i--) {
+                filename = ff[i];
+            }
+        }
+        for (let i = 0; i < set.length; i++) {
+            const pattern = set[i];
+            let file = ff;
+            if (options.matchBase && pattern.length === 1) {
+                file = [filename];
+            }
+            const hit = this.matchOne(file, pattern, partial);
+            if (hit) {
+                if (options.flipNegate) {
+                    return true;
+                }
+                return !this.negate;
+            }
+        }
+        // didn't get any hits.  this is success if it's a negative
+        // pattern, failure otherwise.
+        if (options.flipNegate) {
+            return false;
+        }
+        return this.negate;
+    }
+    static defaults(def) {
+        return minimatch.defaults(def).Minimatch;
+    }
+}
+/* c8 ignore start */
+export { AST } from './ast.js';
+export { escape } from './escape.js';
+export { unescape } from './unescape.js';
+/* c8 ignore stop */
+minimatch.AST = AST;
+minimatch.Minimatch = Minimatch;
+minimatch.escape = escape;
+minimatch.unescape = unescape;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/npm-packlist/node_modules/minimatch/dist/esm/package.json b/node_modules/npm-packlist/node_modules/minimatch/dist/esm/package.json
new file mode 100644
index 0000000000000..3dbc1ca591c05
--- /dev/null
+++ b/node_modules/npm-packlist/node_modules/minimatch/dist/esm/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/node_modules/npm-packlist/node_modules/minimatch/dist/esm/unescape.js b/node_modules/npm-packlist/node_modules/minimatch/dist/esm/unescape.js
new file mode 100644
index 0000000000000..0faf9a2b7306f
--- /dev/null
+++ b/node_modules/npm-packlist/node_modules/minimatch/dist/esm/unescape.js
@@ -0,0 +1,20 @@
+/**
+ * Un-escape a string that has been escaped with {@link escape}.
+ *
+ * If the {@link windowsPathsNoEscape} option is used, then square-brace
+ * escapes are removed, but not backslash escapes.  For example, it will turn
+ * the string `'[*]'` into `*`, but it will not turn `'\\*'` into `'*'`,
+ * becuase `\` is a path separator in `windowsPathsNoEscape` mode.
+ *
+ * When `windowsPathsNoEscape` is not set, then both brace escapes and
+ * backslash escapes are removed.
+ *
+ * Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot be escaped
+ * or unescaped.
+ */
+export const unescape = (s, { windowsPathsNoEscape = false, } = {}) => {
+    return windowsPathsNoEscape
+        ? s.replace(/\[([^\/\\])\]/g, '$1')
+        : s.replace(/((?!\\).|^)\[([^\/\\])\]/g, '$1$2').replace(/\\([^\/])/g, '$1');
+};
+//# sourceMappingURL=unescape.js.map
\ No newline at end of file
diff --git a/node_modules/npm-packlist/node_modules/minimatch/package.json b/node_modules/npm-packlist/node_modules/minimatch/package.json
new file mode 100644
index 0000000000000..bfa2423f50b5e
--- /dev/null
+++ b/node_modules/npm-packlist/node_modules/minimatch/package.json
@@ -0,0 +1,79 @@
+{
+  "author": "Isaac Z. Schlueter  (http://blog.izs.me)",
+  "name": "minimatch",
+  "description": "a glob matcher in javascript",
+  "version": "10.0.3",
+  "repository": {
+    "type": "git",
+    "url": "git://github.com/isaacs/minimatch.git"
+  },
+  "main": "./dist/commonjs/index.js",
+  "types": "./dist/commonjs/index.d.ts",
+  "exports": {
+    "./package.json": "./package.json",
+    ".": {
+      "import": {
+        "types": "./dist/esm/index.d.ts",
+        "default": "./dist/esm/index.js"
+      },
+      "require": {
+        "types": "./dist/commonjs/index.d.ts",
+        "default": "./dist/commonjs/index.js"
+      }
+    }
+  },
+  "files": [
+    "dist"
+  ],
+  "scripts": {
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "prepare": "tshy",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "test": "tap",
+    "snap": "tap",
+    "format": "prettier --write . --loglevel warn",
+    "benchmark": "node benchmark/index.js",
+    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
+  },
+  "prettier": {
+    "semi": false,
+    "printWidth": 80,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "engines": {
+    "node": "20 || >=22"
+  },
+  "devDependencies": {
+    "@types/brace-expansion": "^1.1.2",
+    "@types/node": "^24.0.0",
+    "mkdirp": "^3.0.1",
+    "prettier": "^3.3.2",
+    "tap": "^21.1.0",
+    "tshy": "^3.0.2",
+    "typedoc": "^0.28.5"
+  },
+  "funding": {
+    "url": "https://github.com/sponsors/isaacs"
+  },
+  "license": "ISC",
+  "tshy": {
+    "exports": {
+      "./package.json": "./package.json",
+      ".": "./src/index.ts"
+    }
+  },
+  "type": "module",
+  "module": "./dist/esm/index.js",
+  "dependencies": {
+    "@isaacs/brace-expansion": "^5.0.0"
+  }
+}
diff --git a/node_modules/npm-packlist/package.json b/node_modules/npm-packlist/package.json
index b25864612030f..66212c9ba4240 100644
--- a/node_modules/npm-packlist/package.json
+++ b/node_modules/npm-packlist/package.json
@@ -1,13 +1,13 @@
 {
   "name": "npm-packlist",
-  "version": "10.0.0",
+  "version": "10.0.1",
   "description": "Get a list of the files to add from a folder into an npm package",
   "directories": {
     "test": "test"
   },
   "main": "lib/index.js",
   "dependencies": {
-    "ignore-walk": "^7.0.0"
+    "ignore-walk": "^8.0.0"
   },
   "author": "GitHub Inc.",
   "license": "ISC",
@@ -16,9 +16,9 @@
     "lib/"
   ],
   "devDependencies": {
-    "@npmcli/arborist": "^8.0.0",
+    "@npmcli/arborist": "^9.0.0",
     "@npmcli/eslint-config": "^5.0.1",
-    "@npmcli/template-oss": "4.23.4",
+    "@npmcli/template-oss": "4.25.0",
     "mutate-fs": "^2.1.1",
     "tap": "^16.0.1"
   },
@@ -55,7 +55,7 @@
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.23.4",
+    "version": "4.25.0",
     "publish": true
   }
 }
diff --git a/node_modules/pacote/node_modules/@npmcli/git/LICENSE b/node_modules/pacote/node_modules/@npmcli/git/LICENSE
new file mode 100644
index 0000000000000..8f90f96f4c6c5
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/git/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE NPM DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS. IN NO EVENT SHALL THE NPM BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT,
+OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
+DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
+ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS
+SOFTWARE.
diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/clone.js b/node_modules/pacote/node_modules/@npmcli/git/lib/clone.js
new file mode 100644
index 0000000000000..e25a4d1426821
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/git/lib/clone.js
@@ -0,0 +1,172 @@
+// The goal here is to minimize both git workload and
+// the number of refs we download over the network.
+//
+// Every method ends up with the checked out working dir
+// at the specified ref, and resolves with the git sha.
+
+// Only certain whitelisted hosts get shallow cloning.
+// Many hosts (including GHE) don't always support it.
+// A failed shallow fetch takes a LOT longer than a full
+// fetch in most cases, so we skip it entirely.
+// Set opts.gitShallow = true/false to force this behavior
+// one way or the other.
+const shallowHosts = new Set([
+  'github.com',
+  'gist.github.com',
+  'gitlab.com',
+  'bitbucket.com',
+  'bitbucket.org',
+])
+// we have to use url.parse until we add the same shim that hosted-git-info has
+// to handle scp:// urls
+const { parse } = require('url') // eslint-disable-line node/no-deprecated-api
+const path = require('path')
+
+const getRevs = require('./revs.js')
+const spawn = require('./spawn.js')
+const { isWindows } = require('./utils.js')
+
+const pickManifest = require('npm-pick-manifest')
+const fs = require('fs/promises')
+
+module.exports = (repo, ref = 'HEAD', target = null, opts = {}) =>
+  getRevs(repo, opts).then(revs => clone(
+    repo,
+    revs,
+    ref,
+    resolveRef(revs, ref, opts),
+    target || defaultTarget(repo, opts.cwd),
+    opts
+  ))
+
+const maybeShallow = (repo, opts) => {
+  if (opts.gitShallow === false || opts.gitShallow) {
+    return opts.gitShallow
+  }
+  return shallowHosts.has(parse(repo).host)
+}
+
+const defaultTarget = (repo, /* istanbul ignore next */ cwd = process.cwd()) =>
+  path.resolve(cwd, path.basename(repo.replace(/[/\\]?\.git$/, '')))
+
+const clone = (repo, revs, ref, revDoc, target, opts) => {
+  if (!revDoc) {
+    return unresolved(repo, ref, target, opts)
+  }
+  if (revDoc.sha === revs.refs.HEAD.sha) {
+    return plain(repo, revDoc, target, opts)
+  }
+  if (revDoc.type === 'tag' || revDoc.type === 'branch') {
+    return branch(repo, revDoc, target, opts)
+  }
+  return other(repo, revDoc, target, opts)
+}
+
+const resolveRef = (revs, ref, opts) => {
+  const { spec = {} } = opts
+  ref = spec.gitCommittish || ref
+  /* istanbul ignore next - will fail anyway, can't pull */
+  if (!revs) {
+    return null
+  }
+  if (spec.gitRange) {
+    return pickManifest(revs, spec.gitRange, opts)
+  }
+  if (!ref) {
+    return revs.refs.HEAD
+  }
+  if (revs.refs[ref]) {
+    return revs.refs[ref]
+  }
+  if (revs.shas[ref]) {
+    return revs.refs[revs.shas[ref][0]]
+  }
+  return null
+}
+
+// pull request or some other kind of advertised ref
+const other = (repo, revDoc, target, opts) => {
+  const shallow = maybeShallow(repo, opts)
+
+  const fetchOrigin = ['fetch', 'origin', revDoc.rawRef]
+    .concat(shallow ? ['--depth=1'] : [])
+
+  const git = (args) => spawn(args, { ...opts, cwd: target })
+  return fs.mkdir(target, { recursive: true })
+    .then(() => git(['init']))
+    .then(() => isWindows(opts)
+      ? git(['config', '--local', '--add', 'core.longpaths', 'true'])
+      : null)
+    .then(() => git(['remote', 'add', 'origin', repo]))
+    .then(() => git(fetchOrigin))
+    .then(() => git(['checkout', revDoc.sha]))
+    .then(() => updateSubmodules(target, opts))
+    .then(() => revDoc.sha)
+}
+
+// tag or branches.  use -b
+const branch = (repo, revDoc, target, opts) => {
+  const args = [
+    'clone',
+    '-b',
+    revDoc.ref,
+    repo,
+    target,
+    '--recurse-submodules',
+  ]
+  if (maybeShallow(repo, opts)) {
+    args.push('--depth=1')
+  }
+  if (isWindows(opts)) {
+    args.push('--config', 'core.longpaths=true')
+  }
+  return spawn(args, opts).then(() => revDoc.sha)
+}
+
+// just the head.  clone it
+const plain = (repo, revDoc, target, opts) => {
+  const args = [
+    'clone',
+    repo,
+    target,
+    '--recurse-submodules',
+  ]
+  if (maybeShallow(repo, opts)) {
+    args.push('--depth=1')
+  }
+  if (isWindows(opts)) {
+    args.push('--config', 'core.longpaths=true')
+  }
+  return spawn(args, opts).then(() => revDoc.sha)
+}
+
+const updateSubmodules = async (target, opts) => {
+  const hasSubmodules = await fs.stat(`${target}/.gitmodules`)
+    .then(() => true)
+    .catch(() => false)
+  if (!hasSubmodules) {
+    return null
+  }
+  return spawn([
+    'submodule',
+    'update',
+    '-q',
+    '--init',
+    '--recursive',
+  ], { ...opts, cwd: target })
+}
+
+const unresolved = (repo, ref, target, opts) => {
+  // can't do this one shallowly, because the ref isn't advertised
+  // but we can avoid checking out the working dir twice, at least
+  const lp = isWindows(opts) ? ['--config', 'core.longpaths=true'] : []
+  const cloneArgs = ['clone', '--mirror', '-q', repo, target + '/.git']
+  const git = (args) => spawn(args, { ...opts, cwd: target })
+  return fs.mkdir(target, { recursive: true })
+    .then(() => git(cloneArgs.concat(lp)))
+    .then(() => git(['init']))
+    .then(() => git(['checkout', ref]))
+    .then(() => updateSubmodules(target, opts))
+    .then(() => git(['rev-parse', '--revs-only', 'HEAD']))
+    .then(({ stdout }) => stdout.trim())
+}
diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/errors.js b/node_modules/pacote/node_modules/@npmcli/git/lib/errors.js
new file mode 100644
index 0000000000000..3ceaa45811669
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/git/lib/errors.js
@@ -0,0 +1,36 @@
+
+const maxRetry = 3
+
+class GitError extends Error {
+  shouldRetry () {
+    return false
+  }
+}
+
+class GitConnectionError extends GitError {
+  constructor () {
+    super('A git connection error occurred')
+  }
+
+  shouldRetry (number) {
+    return number < maxRetry
+  }
+}
+
+class GitPathspecError extends GitError {
+  constructor () {
+    super('The git reference could not be found')
+  }
+}
+
+class GitUnknownError extends GitError {
+  constructor () {
+    super('An unknown git error occurred')
+  }
+}
+
+module.exports = {
+  GitConnectionError,
+  GitPathspecError,
+  GitUnknownError,
+}
diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/find.js b/node_modules/pacote/node_modules/@npmcli/git/lib/find.js
new file mode 100644
index 0000000000000..34bd310b88e5d
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/git/lib/find.js
@@ -0,0 +1,15 @@
+const is = require('./is.js')
+const { dirname } = require('path')
+
+module.exports = async ({ cwd = process.cwd(), root } = {}) => {
+  while (true) {
+    if (await is({ cwd })) {
+      return cwd
+    }
+    const next = dirname(cwd)
+    if (cwd === root || cwd === next) {
+      return null
+    }
+    cwd = next
+  }
+}
diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/index.js b/node_modules/pacote/node_modules/@npmcli/git/lib/index.js
new file mode 100644
index 0000000000000..10a65f782e6da
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/git/lib/index.js
@@ -0,0 +1,9 @@
+module.exports = {
+  clone: require('./clone.js'),
+  revs: require('./revs.js'),
+  spawn: require('./spawn.js'),
+  is: require('./is.js'),
+  find: require('./find.js'),
+  isClean: require('./is-clean.js'),
+  errors: require('./errors.js'),
+}
diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/is-clean.js b/node_modules/pacote/node_modules/@npmcli/git/lib/is-clean.js
new file mode 100644
index 0000000000000..182373be94193
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/git/lib/is-clean.js
@@ -0,0 +1,6 @@
+const spawn = require('./spawn.js')
+
+module.exports = (opts = {}) =>
+  spawn(['status', '--porcelain=v1', '-uno'], opts)
+    .then(res => !res.stdout.trim().split(/\r?\n+/)
+      .map(l => l.trim()).filter(l => l).length)
diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/is.js b/node_modules/pacote/node_modules/@npmcli/git/lib/is.js
new file mode 100644
index 0000000000000..f5a0e8754f10d
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/git/lib/is.js
@@ -0,0 +1,4 @@
+// not an airtight indicator, but a good gut-check to even bother trying
+const { stat } = require('fs/promises')
+module.exports = ({ cwd = process.cwd() } = {}) =>
+  stat(cwd + '/.git').then(() => true, () => false)
diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/lines-to-revs.js b/node_modules/pacote/node_modules/@npmcli/git/lib/lines-to-revs.js
new file mode 100644
index 0000000000000..6bd7e7a4c1531
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/git/lib/lines-to-revs.js
@@ -0,0 +1,147 @@
+// turn an array of lines from `git ls-remote` into a thing
+// vaguely resembling a packument, where docs are a resolved ref
+
+const semver = require('semver')
+
+module.exports = lines => finish(lines.reduce(linesToRevsReducer, {
+  versions: {},
+  'dist-tags': {},
+  refs: {},
+  shas: {},
+}))
+
+const finish = revs => distTags(shaList(peelTags(revs)))
+
+// We can check out shallow clones on specific SHAs if we have a ref
+const shaList = revs => {
+  Object.keys(revs.refs).forEach(ref => {
+    const doc = revs.refs[ref]
+    if (!revs.shas[doc.sha]) {
+      revs.shas[doc.sha] = [ref]
+    } else {
+      revs.shas[doc.sha].push(ref)
+    }
+  })
+  return revs
+}
+
+// Replace any tags with their ^{} counterparts, if those exist
+const peelTags = revs => {
+  Object.keys(revs.refs).filter(ref => ref.endsWith('^{}')).forEach(ref => {
+    const peeled = revs.refs[ref]
+    const unpeeled = revs.refs[ref.replace(/\^\{\}$/, '')]
+    if (unpeeled) {
+      unpeeled.sha = peeled.sha
+      delete revs.refs[ref]
+    }
+  })
+  return revs
+}
+
+const distTags = revs => {
+  // not entirely sure what situations would result in an
+  // ichabod repo, but best to be careful in Sleepy Hollow anyway
+  const HEAD = revs.refs.HEAD || /* istanbul ignore next */ {}
+  const versions = Object.keys(revs.versions)
+  versions.forEach(v => {
+    // simulate a dist-tags with latest pointing at the
+    // 'latest' branch if one exists and is a version,
+    // or HEAD if not.
+    const ver = revs.versions[v]
+    if (revs.refs.latest && ver.sha === revs.refs.latest.sha) {
+      revs['dist-tags'].latest = v
+    } else if (ver.sha === HEAD.sha) {
+      revs['dist-tags'].HEAD = v
+      if (!revs.refs.latest) {
+        revs['dist-tags'].latest = v
+      }
+    }
+  })
+  return revs
+}
+
+const refType = ref => {
+  if (ref.startsWith('refs/tags/')) {
+    return 'tag'
+  }
+  if (ref.startsWith('refs/heads/')) {
+    return 'branch'
+  }
+  if (ref.startsWith('refs/pull/')) {
+    return 'pull'
+  }
+  if (ref === 'HEAD') {
+    return 'head'
+  }
+  // Could be anything, ignore for now
+  /* istanbul ignore next */
+  return 'other'
+}
+
+// return the doc, or null if we should ignore it.
+const lineToRevDoc = line => {
+  const split = line.trim().split(/\s+/, 2)
+  if (split.length < 2) {
+    return null
+  }
+
+  const sha = split[0].trim()
+  const rawRef = split[1].trim()
+  const type = refType(rawRef)
+
+  if (type === 'tag') {
+    // refs/tags/foo^{} is the 'peeled tag', ie the commit
+    // that is tagged by refs/tags/foo they resolve to the same
+    // content, just different objects in git's data structure.
+    // But, we care about the thing the tag POINTS to, not the tag
+    // object itself, so we only look at the peeled tag refs, and
+    // ignore the pointer.
+    // For now, though, we have to save both, because some tags
+    // don't have peels, if they were not annotated.
+    const ref = rawRef.slice('refs/tags/'.length)
+    return { sha, ref, rawRef, type }
+  }
+
+  if (type === 'branch') {
+    const ref = rawRef.slice('refs/heads/'.length)
+    return { sha, ref, rawRef, type }
+  }
+
+  if (type === 'pull') {
+    // NB: merged pull requests installable with #pull/123/merge
+    // for the merged pr, or #pull/123 for the PR head
+    const ref = rawRef.slice('refs/'.length).replace(/\/head$/, '')
+    return { sha, ref, rawRef, type }
+  }
+
+  if (type === 'head') {
+    const ref = 'HEAD'
+    return { sha, ref, rawRef, type }
+  }
+
+  // at this point, all we can do is leave the ref un-munged
+  return { sha, ref: rawRef, rawRef, type }
+}
+
+const linesToRevsReducer = (revs, line) => {
+  const doc = lineToRevDoc(line)
+
+  if (!doc) {
+    return revs
+  }
+
+  revs.refs[doc.ref] = doc
+  revs.refs[doc.rawRef] = doc
+
+  if (doc.type === 'tag') {
+    // try to pull a semver value out of tags like `release-v1.2.3`
+    // which is a pretty common pattern.
+    const match = !doc.ref.endsWith('^{}') &&
+      doc.ref.match(/v?(\d+\.\d+\.\d+(?:[-+].+)?)$/)
+    if (match && semver.valid(match[1], true)) {
+      revs.versions[semver.clean(match[1], true)] = doc
+    }
+  }
+
+  return revs
+}
diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/make-error.js b/node_modules/pacote/node_modules/@npmcli/git/lib/make-error.js
new file mode 100644
index 0000000000000..7540ec7c8b9f7
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/git/lib/make-error.js
@@ -0,0 +1,33 @@
+const {
+  GitConnectionError,
+  GitPathspecError,
+  GitUnknownError,
+} = require('./errors.js')
+
+const connectionErrorRe = new RegExp([
+  'remote error: Internal Server Error',
+  'The remote end hung up unexpectedly',
+  'Connection timed out',
+  'Operation timed out',
+  'Failed to connect to .* Timed out',
+  'Connection reset by peer',
+  'SSL_ERROR_SYSCALL',
+  'The requested URL returned error: 503',
+].join('|'))
+
+const missingPathspecRe = /pathspec .* did not match any file\(s\) known to git/
+
+function makeError (er) {
+  const message = er.stderr
+  let gitEr
+  if (connectionErrorRe.test(message)) {
+    gitEr = new GitConnectionError(message)
+  } else if (missingPathspecRe.test(message)) {
+    gitEr = new GitPathspecError(message)
+  } else {
+    gitEr = new GitUnknownError(message)
+  }
+  return Object.assign(gitEr, er)
+}
+
+module.exports = makeError
diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/opts.js b/node_modules/pacote/node_modules/@npmcli/git/lib/opts.js
new file mode 100644
index 0000000000000..1e80e9efe4989
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/git/lib/opts.js
@@ -0,0 +1,57 @@
+const fs = require('node:fs')
+const os = require('node:os')
+const path = require('node:path')
+const ini = require('ini')
+
+const gitConfigPath = path.join(os.homedir(), '.gitconfig')
+
+let cachedConfig = null
+
+// Function to load and cache the git config
+const loadGitConfig = () => {
+  if (cachedConfig === null) {
+    try {
+      cachedConfig = {}
+      if (fs.existsSync(gitConfigPath)) {
+        const configContent = fs.readFileSync(gitConfigPath, 'utf-8')
+        cachedConfig = ini.parse(configContent)
+      }
+    } catch (error) {
+      cachedConfig = {}
+    }
+  }
+  return cachedConfig
+}
+
+const checkGitConfigs = () => {
+  const config = loadGitConfig()
+  return {
+    sshCommandSetInConfig: config?.core?.sshCommand !== undefined,
+    askPassSetInConfig: config?.core?.askpass !== undefined,
+  }
+}
+
+const sshCommandSetInEnv = process.env.GIT_SSH_COMMAND !== undefined
+const askPassSetInEnv = process.env.GIT_ASKPASS !== undefined
+const { sshCommandSetInConfig, askPassSetInConfig } = checkGitConfigs()
+
+// Values we want to set if they're not already defined by the end user
+// This defaults to accepting new ssh host key fingerprints
+const finalGitEnv = {
+  ...(askPassSetInEnv || askPassSetInConfig ? {} : {
+    GIT_ASKPASS: 'echo',
+  }),
+  ...(sshCommandSetInEnv || sshCommandSetInConfig ? {} : {
+    GIT_SSH_COMMAND: 'ssh -oStrictHostKeyChecking=accept-new',
+  }),
+}
+
+module.exports = (opts = {}) => ({
+  stdioString: true,
+  ...opts,
+  shell: false,
+  env: opts.env || { ...finalGitEnv, ...process.env },
+})
+
+// Export the loadGitConfig function for testing
+module.exports.loadGitConfig = loadGitConfig
diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/revs.js b/node_modules/pacote/node_modules/@npmcli/git/lib/revs.js
new file mode 100644
index 0000000000000..ebcc848fa3458
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/git/lib/revs.js
@@ -0,0 +1,22 @@
+const spawn = require('./spawn.js')
+const { LRUCache } = require('lru-cache')
+const linesToRevs = require('./lines-to-revs.js')
+
+const revsCache = new LRUCache({
+  max: 100,
+  ttl: 5 * 60 * 1000,
+})
+
+module.exports = async (repo, opts = {}) => {
+  if (!opts.noGitRevCache) {
+    const cached = revsCache.get(repo)
+    if (cached) {
+      return cached
+    }
+  }
+
+  const { stdout } = await spawn(['ls-remote', repo], opts)
+  const revs = linesToRevs(stdout.trim().split('\n'))
+  revsCache.set(repo, revs)
+  return revs
+}
diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/spawn.js b/node_modules/pacote/node_modules/@npmcli/git/lib/spawn.js
new file mode 100644
index 0000000000000..03c1cbde21547
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/git/lib/spawn.js
@@ -0,0 +1,44 @@
+const spawn = require('@npmcli/promise-spawn')
+const promiseRetry = require('promise-retry')
+const { log } = require('proc-log')
+const makeError = require('./make-error.js')
+const makeOpts = require('./opts.js')
+
+module.exports = (gitArgs, opts = {}) => {
+  const whichGit = require('./which.js')
+  const gitPath = whichGit(opts)
+
+  if (gitPath instanceof Error) {
+    return Promise.reject(gitPath)
+  }
+
+  // undocumented option, mostly only here for tests
+  const args = opts.allowReplace || gitArgs[0] === '--no-replace-objects'
+    ? gitArgs
+    : ['--no-replace-objects', ...gitArgs]
+
+  let retryOpts = opts.retry
+  if (retryOpts === null || retryOpts === undefined) {
+    retryOpts = {
+      retries: opts.fetchRetries || 2,
+      factor: opts.fetchRetryFactor || 10,
+      maxTimeout: opts.fetchRetryMaxtimeout || 60000,
+      minTimeout: opts.fetchRetryMintimeout || 1000,
+    }
+  }
+  return promiseRetry((retryFn, number) => {
+    if (number !== 1) {
+      log.silly('git', `Retrying git command: ${
+        args.join(' ')} attempt # ${number}`)
+    }
+
+    return spawn(gitPath, args, makeOpts(opts))
+      .catch(er => {
+        const gitError = makeError(er)
+        if (!gitError.shouldRetry(number)) {
+          throw gitError
+        }
+        retryFn(gitError)
+      })
+  }, retryOpts)
+}
diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/utils.js b/node_modules/pacote/node_modules/@npmcli/git/lib/utils.js
new file mode 100644
index 0000000000000..fcd9578a19597
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/git/lib/utils.js
@@ -0,0 +1,3 @@
+const isWindows = opts => (opts.fakePlatform || process.platform) === 'win32'
+
+exports.isWindows = isWindows
diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/which.js b/node_modules/pacote/node_modules/@npmcli/git/lib/which.js
new file mode 100644
index 0000000000000..dc2a1ad212166
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/git/lib/which.js
@@ -0,0 +1,18 @@
+const which = require('which')
+
+let gitPath
+try {
+  gitPath = which.sync('git')
+} catch {
+  // ignore errors
+}
+
+module.exports = (opts = {}) => {
+  if (opts.git) {
+    return opts.git
+  }
+  if (!gitPath || opts.git === false) {
+    return Object.assign(new Error('No git binary found in $PATH'), { code: 'ENOGIT' })
+  }
+  return gitPath
+}
diff --git a/node_modules/pacote/node_modules/@npmcli/git/package.json b/node_modules/pacote/node_modules/@npmcli/git/package.json
new file mode 100644
index 0000000000000..f4e844bccab0d
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/git/package.json
@@ -0,0 +1,58 @@
+{
+  "name": "@npmcli/git",
+  "version": "7.0.0",
+  "main": "lib/index.js",
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "description": "a util for spawning git from npm CLI contexts",
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/npm/git.git"
+  },
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "scripts": {
+    "lint": "npm run eslint",
+    "snap": "tap",
+    "test": "tap",
+    "posttest": "npm run lint",
+    "postlint": "template-oss-check",
+    "lintfix": "npm run eslint -- --fix",
+    "template-oss-apply": "template-oss-apply --force",
+    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
+  },
+  "tap": {
+    "timeout": 600,
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^5.0.0",
+    "@npmcli/template-oss": "4.24.1",
+    "npm-package-arg": "^13.0.0",
+    "slash": "^3.0.0",
+    "tap": "^16.0.1"
+  },
+  "dependencies": {
+    "@npmcli/promise-spawn": "^8.0.0",
+    "ini": "^5.0.0",
+    "lru-cache": "^11.2.1",
+    "npm-pick-manifest": "^11.0.1",
+    "proc-log": "^5.0.0",
+    "promise-retry": "^2.0.1",
+    "semver": "^7.3.5",
+    "which": "^5.0.0"
+  },
+  "engines": {
+    "node": "^20.17.0 || >=22.9.0"
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.24.1",
+    "publish": true
+  }
+}
diff --git a/node_modules/pacote/node_modules/@npmcli/package-json/lib/index.js b/node_modules/pacote/node_modules/@npmcli/package-json/lib/index.js
deleted file mode 100644
index 7eff602d73a3f..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/package-json/lib/index.js
+++ /dev/null
@@ -1,286 +0,0 @@
-const { readFile, writeFile } = require('node:fs/promises')
-const { resolve } = require('node:path')
-const parseJSON = require('json-parse-even-better-errors')
-
-const updateDeps = require('./update-dependencies.js')
-const updateScripts = require('./update-scripts.js')
-const updateWorkspaces = require('./update-workspaces.js')
-const normalize = require('./normalize.js')
-const { read, parse } = require('./read-package.js')
-const { packageSort } = require('./sort.js')
-
-// a list of handy specialized helper functions that take
-// care of special cases that are handled by the npm cli
-const knownSteps = new Set([
-  updateDeps,
-  updateScripts,
-  updateWorkspaces,
-])
-
-// list of all keys that are handled by "knownSteps" helpers
-const knownKeys = new Set([
-  ...updateDeps.knownKeys,
-  'scripts',
-  'workspaces',
-])
-
-class PackageJson {
-  static normalizeSteps = Object.freeze([
-    '_id',
-    '_attributes',
-    'bundledDependencies',
-    'bundleDependencies',
-    'optionalDedupe',
-    'scripts',
-    'funding',
-    'bin',
-  ])
-
-  // npm pkg fix
-  static fixSteps = Object.freeze([
-    'binRefs',
-    'bundleDependencies',
-    'bundleDependenciesFalse',
-    'fixName',
-    'fixNameField',
-    'fixVersionField',
-    'fixRepositoryField',
-    'fixDependencies',
-    'devDependencies',
-    'scriptpath',
-  ])
-
-  static prepareSteps = Object.freeze([
-    '_id',
-    '_attributes',
-    'bundledDependencies',
-    'bundleDependencies',
-    'bundleDependenciesDeleteFalse',
-    'gypfile',
-    'serverjs',
-    'scriptpath',
-    'authors',
-    'readme',
-    'mans',
-    'binDir',
-    'gitHead',
-    'fillTypes',
-    'normalizeData',
-    'binRefs',
-  ])
-
-  // create a new empty package.json, so we can save at the given path even
-  // though we didn't start from a parsed file
-  static async create (path, opts = {}) {
-    const p = new PackageJson()
-    await p.create(path)
-    if (opts.data) {
-      return p.update(opts.data)
-    }
-    return p
-  }
-
-  // Loads a package.json at given path and JSON parses
-  static async load (path, opts = {}) {
-    const p = new PackageJson()
-    // Avoid try/catch if we aren't going to create
-    if (!opts.create) {
-      return p.load(path)
-    }
-
-    try {
-      return await p.load(path)
-    } catch (err) {
-      if (!err.message.startsWith('Could not read package.json')) {
-        throw err
-      }
-      return await p.create(path)
-    }
-  }
-
-  // npm pkg fix
-  static async fix (path, opts) {
-    const p = new PackageJson()
-    await p.load(path, true)
-    return p.fix(opts)
-  }
-
-  // read-package-json compatible behavior
-  static async prepare (path, opts) {
-    const p = new PackageJson()
-    await p.load(path, true)
-    return p.prepare(opts)
-  }
-
-  // read-package-json-fast compatible behavior
-  static async normalize (path, opts) {
-    const p = new PackageJson()
-    await p.load(path)
-    return p.normalize(opts)
-  }
-
-  #path
-  #manifest
-  #readFileContent = ''
-  #canSave = true
-
-  // Load content from given path
-  async load (path, parseIndex) {
-    this.#path = path
-    let parseErr
-    try {
-      this.#readFileContent = await read(this.filename)
-    } catch (err) {
-      if (!parseIndex) {
-        throw err
-      }
-      parseErr = err
-    }
-
-    if (parseErr) {
-      const indexFile = resolve(this.path, 'index.js')
-      let indexFileContent
-      try {
-        indexFileContent = await readFile(indexFile, 'utf8')
-      } catch (err) {
-        throw parseErr
-      }
-      try {
-        this.fromComment(indexFileContent)
-      } catch (err) {
-        throw parseErr
-      }
-      // This wasn't a package.json so prevent saving
-      this.#canSave = false
-      return this
-    }
-
-    return this.fromJSON(this.#readFileContent)
-  }
-
-  // Load data from a JSON string/buffer
-  fromJSON (data) {
-    this.#manifest = parse(data)
-    return this
-  }
-
-  fromContent (data) {
-    this.#manifest = data
-    this.#canSave = false
-    return this
-  }
-
-  // Load data from a comment
-  // /**package { "name": "foo", "version": "1.2.3", ... } **/
-  fromComment (data) {
-    data = data.split(/^\/\*\*package(?:\s|$)/m)
-
-    if (data.length < 2) {
-      throw new Error('File has no package in comments')
-    }
-    data = data[1]
-    data = data.split(/\*\*\/$/m)
-
-    if (data.length < 2) {
-      throw new Error('File has no package in comments')
-    }
-    data = data[0]
-    data = data.replace(/^\s*\*/mg, '')
-
-    this.#manifest = parseJSON(data)
-    return this
-  }
-
-  get content () {
-    return this.#manifest
-  }
-
-  get path () {
-    return this.#path
-  }
-
-  get filename () {
-    if (this.path) {
-      return resolve(this.path, 'package.json')
-    }
-    return undefined
-  }
-
-  create (path) {
-    this.#path = path
-    this.#manifest = {}
-    return this
-  }
-
-  // This should be the ONLY way to set content in the manifest
-  update (content) {
-    if (!this.content) {
-      throw new Error('Can not update without content.  Please `load` or `create`')
-    }
-
-    for (const step of knownSteps) {
-      this.#manifest = step({ content, originalContent: this.content })
-    }
-
-    // unknown properties will just be overwitten
-    for (const [key, value] of Object.entries(content)) {
-      if (!knownKeys.has(key)) {
-        this.content[key] = value
-      }
-    }
-
-    return this
-  }
-
-  async save ({ sort } = {}) {
-    if (!this.#canSave) {
-      throw new Error('No package.json to save to')
-    }
-    const {
-      [Symbol.for('indent')]: indent,
-      [Symbol.for('newline')]: newline,
-      ...rest
-    } = this.content
-
-    const format = indent === undefined ? '  ' : indent
-    const eol = newline === undefined ? '\n' : newline
-
-    const content = sort ? packageSort(rest) : rest
-
-    const fileContent = `${
-      JSON.stringify(content, null, format)
-    }\n`
-      .replace(/\n/g, eol)
-
-    if (fileContent.trim() !== this.#readFileContent.trim()) {
-      const written = await writeFile(this.filename, fileContent)
-      this.#readFileContent = fileContent
-      return written
-    }
-  }
-
-  async normalize (opts = {}) {
-    if (!opts.steps) {
-      opts.steps = this.constructor.normalizeSteps
-    }
-    await normalize(this, opts)
-    return this
-  }
-
-  async prepare (opts = {}) {
-    if (!opts.steps) {
-      opts.steps = this.constructor.prepareSteps
-    }
-    await normalize(this, opts)
-    return this
-  }
-
-  async fix (opts = {}) {
-    // This one is not overridable
-    opts.steps = this.constructor.fixSteps
-    await normalize(this, opts)
-    return this
-  }
-}
-
-module.exports = PackageJson
diff --git a/node_modules/pacote/node_modules/@npmcli/package-json/lib/normalize-data.js b/node_modules/pacote/node_modules/@npmcli/package-json/lib/normalize-data.js
deleted file mode 100644
index 79b0bafbcd3a4..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/package-json/lib/normalize-data.js
+++ /dev/null
@@ -1,257 +0,0 @@
-// Originally normalize-package-data
-
-const url = require('node:url')
-const hostedGitInfo = require('hosted-git-info')
-const validateLicense = require('validate-npm-package-license')
-
-const typos = {
-  dependancies: 'dependencies',
-  dependecies: 'dependencies',
-  depdenencies: 'dependencies',
-  devEependencies: 'devDependencies',
-  depends: 'dependencies',
-  'dev-dependencies': 'devDependencies',
-  devDependences: 'devDependencies',
-  devDepenencies: 'devDependencies',
-  devdependencies: 'devDependencies',
-  repostitory: 'repository',
-  repo: 'repository',
-  prefereGlobal: 'preferGlobal',
-  hompage: 'homepage',
-  hampage: 'homepage',
-  autohr: 'author',
-  autor: 'author',
-  contributers: 'contributors',
-  publicationConfig: 'publishConfig',
-  script: 'scripts',
-}
-
-const isEmail = str => str.includes('@') && (str.indexOf('@') < str.lastIndexOf('.'))
-
-// Extracts description from contents of a readme file in markdown format
-function extractDescription (description) {
-  // the first block of text before the first heading that isn't the first line heading
-  const lines = description.trim().split('\n')
-  let start = 0
-  // skip initial empty lines and lines that start with #
-  while (lines[start]?.trim().match(/^(#|$)/)) {
-    start++
-  }
-  let end = start + 1
-  // keep going till we get to the end or an empty line
-  while (end < lines.length && lines[end].trim()) {
-    end++
-  }
-  return lines.slice(start, end).join(' ').trim()
-}
-
-function stringifyPerson (person) {
-  if (typeof person !== 'string') {
-    const name = person.name || ''
-    const u = person.url || person.web
-    const wrappedUrl = u ? (' (' + u + ')') : ''
-    const e = person.email || person.mail
-    const wrappedEmail = e ? (' <' + e + '>') : ''
-    person = name + wrappedEmail + wrappedUrl
-  }
-  const matchedName = person.match(/^([^(<]+)/)
-  const matchedUrl = person.match(/\(([^()]+)\)/)
-  const matchedEmail = person.match(/<([^<>]+)>/)
-  const parsed = {}
-  if (matchedName?.[0].trim()) {
-    parsed.name = matchedName[0].trim()
-  }
-  if (matchedEmail) {
-    parsed.email = matchedEmail[1]
-  }
-  if (matchedUrl) {
-    parsed.url = matchedUrl[1]
-  }
-  return parsed
-}
-
-function normalizeData (data, changes) {
-  // fixDescriptionField
-  if (data.description && typeof data.description !== 'string') {
-    changes?.push(`'description' field should be a string`)
-    delete data.description
-  }
-  if (data.readme && !data.description && data.readme !== 'ERROR: No README data found!') {
-    data.description = extractDescription(data.readme)
-  }
-  if (data.description === undefined) {
-    delete data.description
-  }
-  if (!data.description) {
-    changes?.push('No description')
-  }
-
-  // fixModulesField
-  if (data.modules) {
-    changes?.push(`modules field is deprecated`)
-    delete data.modules
-  }
-
-  // fixFilesField
-  const files = data.files
-  if (files && !Array.isArray(files)) {
-    changes?.push(`Invalid 'files' member`)
-    delete data.files
-  } else if (data.files) {
-    data.files = data.files.filter(function (file) {
-      if (!file || typeof file !== 'string') {
-        changes?.push(`Invalid filename in 'files' list: ${file}`)
-        return false
-      } else {
-        return true
-      }
-    })
-  }
-
-  // fixManField
-  if (data.man && typeof data.man === 'string') {
-    data.man = [data.man]
-  }
-
-  // fixBugsField
-  if (!data.bugs && data.repository?.url) {
-    const hosted = hostedGitInfo.fromUrl(data.repository.url)
-    if (hosted && hosted.bugs()) {
-      data.bugs = { url: hosted.bugs() }
-    }
-  } else if (data.bugs) {
-    if (typeof data.bugs === 'string') {
-      if (isEmail(data.bugs)) {
-        data.bugs = { email: data.bugs }
-        /* eslint-disable-next-line node/no-deprecated-api */
-      } else if (url.parse(data.bugs).protocol) {
-        data.bugs = { url: data.bugs }
-      } else {
-        changes?.push(`Bug string field must be url, email, or {email,url}`)
-      }
-    } else {
-      for (const k in data.bugs) {
-        if (['web', 'name'].includes(k)) {
-          changes?.push(`bugs['${k}'] should probably be bugs['url'].`)
-          data.bugs.url = data.bugs[k]
-          delete data.bugs[k]
-        }
-      }
-      const oldBugs = data.bugs
-      data.bugs = {}
-      if (oldBugs.url) {
-        /* eslint-disable-next-line node/no-deprecated-api */
-        if (typeof (oldBugs.url) === 'string' && url.parse(oldBugs.url).protocol) {
-          data.bugs.url = oldBugs.url
-        } else {
-          changes?.push('bugs.url field must be a string url. Deleted.')
-        }
-      }
-      if (oldBugs.email) {
-        if (typeof (oldBugs.email) === 'string' && isEmail(oldBugs.email)) {
-          data.bugs.email = oldBugs.email
-        } else {
-          changes?.push('bugs.email field must be a string email. Deleted.')
-        }
-      }
-    }
-    if (!data.bugs.email && !data.bugs.url) {
-      delete data.bugs
-      changes?.push('Normalized value of bugs field is an empty object. Deleted.')
-    }
-  }
-  // fixKeywordsField
-  if (typeof data.keywords === 'string') {
-    data.keywords = data.keywords.split(/,\s+/)
-  }
-  if (data.keywords && !Array.isArray(data.keywords)) {
-    delete data.keywords
-    changes?.push(`keywords should be an array of strings`)
-  } else if (data.keywords) {
-    data.keywords = data.keywords.filter(function (kw) {
-      if (typeof kw !== 'string' || !kw) {
-        changes?.push(`keywords should be an array of strings`)
-        return false
-      } else {
-        return true
-      }
-    })
-  }
-  // fixBundleDependenciesField
-  const bdd = 'bundledDependencies'
-  const bd = 'bundleDependencies'
-  if (data[bdd] && !data[bd]) {
-    data[bd] = data[bdd]
-    delete data[bdd]
-  }
-  if (data[bd] && !Array.isArray(data[bd])) {
-    changes?.push(`Invalid 'bundleDependencies' list. Must be array of package names`)
-    delete data[bd]
-  } else if (data[bd]) {
-    data[bd] = data[bd].filter(function (filtered) {
-      if (!filtered || typeof filtered !== 'string') {
-        changes?.push(`Invalid bundleDependencies member: ${filtered}`)
-        return false
-      } else {
-        if (!data.dependencies) {
-          data.dependencies = {}
-        }
-        if (!Object.prototype.hasOwnProperty.call(data.dependencies, filtered)) {
-          changes?.push(`Non-dependency in bundleDependencies: ${filtered}`)
-          data.dependencies[filtered] = '*'
-        }
-        return true
-      }
-    })
-  }
-  // fixHomepageField
-  if (!data.homepage && data.repository && data.repository.url) {
-    const hosted = hostedGitInfo.fromUrl(data.repository.url)
-    if (hosted) {
-      data.homepage = hosted.docs()
-    }
-  }
-  if (data.homepage) {
-    if (typeof data.homepage !== 'string') {
-      changes?.push('homepage field must be a string url. Deleted.')
-      delete data.homepage
-    } else {
-      /* eslint-disable-next-line node/no-deprecated-api */
-      if (!url.parse(data.homepage).protocol) {
-        data.homepage = 'http://' + data.homepage
-      }
-    }
-  }
-  // fixReadmeField
-  if (!data.readme) {
-    changes?.push('No README data')
-    data.readme = 'ERROR: No README data found!'
-  }
-  // fixLicenseField
-  const license = data.license || data.licence
-  if (!license) {
-    changes?.push('No license field.')
-  } else if (typeof (license) !== 'string' || license.length < 1 || license.trim() === '') {
-    changes?.push('license should be a valid SPDX license expression')
-  } else if (!validateLicense(license).validForNewPackages) {
-    changes?.push('license should be a valid SPDX license expression')
-  }
-  // fixPeople
-  if (data.author) {
-    data.author = stringifyPerson(data.author)
-  }
-  ['maintainers', 'contributors'].forEach(function (set) {
-    if (!Array.isArray(data[set])) {
-      return
-    }
-    data[set] = data[set].map(stringifyPerson)
-  })
-  // fixTypos
-  for (const d in typos) {
-    if (Object.prototype.hasOwnProperty.call(data, d)) {
-      changes?.push(`${d} should probably be ${typos[d]}.`)
-    }
-  }
-}
-
-module.exports = { normalizeData }
diff --git a/node_modules/pacote/node_modules/@npmcli/package-json/lib/normalize.js b/node_modules/pacote/node_modules/@npmcli/package-json/lib/normalize.js
deleted file mode 100644
index 845f6753a9a00..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/package-json/lib/normalize.js
+++ /dev/null
@@ -1,601 +0,0 @@
-const valid = require('semver/functions/valid')
-const clean = require('semver/functions/clean')
-const fs = require('node:fs/promises')
-const path = require('node:path')
-const { log } = require('proc-log')
-const moduleBuiltin = require('node:module')
-
-/**
- * @type {import('hosted-git-info')}
- */
-let _hostedGitInfo
-function lazyHostedGitInfo () {
-  if (!_hostedGitInfo) {
-    _hostedGitInfo = require('hosted-git-info')
-  }
-  return _hostedGitInfo
-}
-
-/**
- * @type {import('glob').glob}
- */
-let _glob
-function lazyLoadGlob () {
-  if (!_glob) {
-    _glob = require('glob').glob
-  }
-  return _glob
-}
-
-// used to be npm-normalize-package-bin
-function normalizePackageBin (pkg, changes) {
-  if (pkg.bin) {
-    if (typeof pkg.bin === 'string' && pkg.name) {
-      changes?.push('"bin" was converted to an object')
-      pkg.bin = { [pkg.name]: pkg.bin }
-    } else if (Array.isArray(pkg.bin)) {
-      changes?.push('"bin" was converted to an object')
-      pkg.bin = pkg.bin.reduce((acc, k) => {
-        acc[path.basename(k)] = k
-        return acc
-      }, {})
-    }
-    if (typeof pkg.bin === 'object') {
-      for (const binKey in pkg.bin) {
-        if (typeof pkg.bin[binKey] !== 'string') {
-          delete pkg.bin[binKey]
-          changes?.push(`removed invalid "bin[${binKey}]"`)
-          continue
-        }
-        const base = path.basename(secureAndUnixifyPath(binKey))
-        if (!base) {
-          delete pkg.bin[binKey]
-          changes?.push(`removed invalid "bin[${binKey}]"`)
-          continue
-        }
-
-        const binTarget = secureAndUnixifyPath(pkg.bin[binKey])
-
-        if (!binTarget) {
-          delete pkg.bin[binKey]
-          changes?.push(`removed invalid "bin[${binKey}]"`)
-          continue
-        }
-
-        if (base !== binKey) {
-          delete pkg.bin[binKey]
-          changes?.push(`"bin[${binKey}]" was renamed to "bin[${base}]"`)
-        }
-        if (binTarget !== pkg.bin[binKey]) {
-          changes?.push(`"bin[${base}]" script name was cleaned`)
-        }
-        pkg.bin[base] = binTarget
-      }
-
-      if (Object.keys(pkg.bin).length === 0) {
-        changes?.push('empty "bin" was removed')
-        delete pkg.bin
-      }
-
-      return pkg
-    }
-  }
-  delete pkg.bin
-}
-
-function normalizePackageMan (pkg, changes) {
-  if (pkg.man) {
-    const mans = []
-    for (const man of (Array.isArray(pkg.man) ? pkg.man : [pkg.man])) {
-      if (typeof man !== 'string') {
-        changes?.push(`removed invalid "man [${man}]"`)
-      } else {
-        mans.push(secureAndUnixifyPath(man))
-      }
-    }
-
-    if (!mans.length) {
-      changes?.push('empty "man" was removed')
-    } else {
-      pkg.man = mans
-      return pkg
-    }
-  }
-  delete pkg.man
-}
-
-function isCorrectlyEncodedName (spec) {
-  return !spec.match(/[/@\s+%:]/) &&
-    spec === encodeURIComponent(spec)
-}
-
-function isValidScopedPackageName (spec) {
-  if (spec.charAt(0) !== '@') {
-    return false
-  }
-
-  const rest = spec.slice(1).split('/')
-  if (rest.length !== 2) {
-    return false
-  }
-
-  return rest[0] && rest[1] &&
-    rest[0] === encodeURIComponent(rest[0]) &&
-    rest[1] === encodeURIComponent(rest[1])
-}
-
-function unixifyPath (ref) {
-  return ref.replace(/\\|:/g, '/')
-}
-
-function secureAndUnixifyPath (ref) {
-  const secured = unixifyPath(path.join('.', path.join('/', unixifyPath(ref))))
-  return secured.startsWith('./') ? '' : secured
-}
-
-// We don't want the `changes` array in here by default because this is a hot
-// path for parsing packuments during install.  So the calling method passes it
-// in if it wants to track changes.
-const normalize = async (pkg, { strict, steps, root, changes, allowLegacyCase }) => {
-  if (!pkg.content) {
-    throw new Error('Can not normalize without content')
-  }
-  const data = pkg.content
-  const scripts = data.scripts || {}
-  const pkgId = `${data.name ?? ''}@${data.version ?? ''}`
-
-  // name and version are load bearing so we have to clean them up first
-  if (steps.includes('fixName') || steps.includes('fixNameField') || steps.includes('normalizeData')) {
-    if (!data.name && !strict) {
-      changes?.push('Missing "name" field was set to an empty string')
-      data.name = ''
-    } else {
-      if (typeof data.name !== 'string') {
-        throw new Error('name field must be a string.')
-      }
-      if (!strict) {
-        const name = data.name.trim()
-        if (data.name !== name) {
-          changes?.push(`Whitespace was trimmed from "name"`)
-          data.name = name
-        }
-      }
-
-      if (data.name.startsWith('.') ||
-        !(isValidScopedPackageName(data.name) || isCorrectlyEncodedName(data.name)) ||
-        (strict && (!allowLegacyCase) && data.name !== data.name.toLowerCase()) ||
-        data.name.toLowerCase() === 'node_modules' ||
-        data.name.toLowerCase() === 'favicon.ico') {
-        throw new Error('Invalid name: ' + JSON.stringify(data.name))
-      }
-    }
-  }
-
-  if (steps.includes('fixName')) {
-    // Check for conflicts with builtin modules
-    if (moduleBuiltin.builtinModules.includes(data.name)) {
-      log.warn('package-json', pkgId, `Package name "${data.name}" conflicts with a Node.js built-in module name`)
-    }
-  }
-
-  if (steps.includes('fixVersionField') || steps.includes('normalizeData')) {
-    // allow "loose" semver 1.0 versions in non-strict mode
-    // enforce strict semver 2.0 compliance in strict mode
-    const loose = !strict
-    if (!data.version) {
-      data.version = ''
-    } else {
-      if (!valid(data.version, loose)) {
-        throw new Error(`Invalid version: "${data.version}"`)
-      }
-      const version = clean(data.version, loose)
-      if (version !== data.version) {
-        changes?.push(`"version" was cleaned and set to "${version}"`)
-        data.version = version
-      }
-    }
-  }
-  // remove attributes that start with "_"
-  if (steps.includes('_attributes')) {
-    for (const key in data) {
-      if (key.startsWith('_')) {
-        changes?.push(`"${key}" was removed`)
-        delete pkg.content[key]
-      }
-    }
-  }
-
-  // build the "_id" attribute
-  if (steps.includes('_id')) {
-    if (data.name && data.version) {
-      changes?.push(`"_id" was set to ${pkgId}`)
-      data._id = pkgId
-    }
-  }
-
-  // fix bundledDependencies typo
-  // normalize bundleDependencies
-  if (steps.includes('bundledDependencies')) {
-    if (data.bundleDependencies === undefined && data.bundledDependencies !== undefined) {
-      data.bundleDependencies = data.bundledDependencies
-    }
-    changes?.push(`Deleted incorrect "bundledDependencies"`)
-    delete data.bundledDependencies
-  }
-  // expand "bundleDependencies: true or translate from object"
-  if (steps.includes('bundleDependencies')) {
-    const bd = data.bundleDependencies
-    if (bd === false && !steps.includes('bundleDependenciesDeleteFalse')) {
-      changes?.push(`"bundleDependencies" was changed from "false" to "[]"`)
-      data.bundleDependencies = []
-    } else if (bd === true) {
-      changes?.push(`"bundleDependencies" was auto-populated from "dependencies"`)
-      data.bundleDependencies = Object.keys(data.dependencies || {})
-    } else if (bd && typeof bd === 'object') {
-      if (!Array.isArray(bd)) {
-        changes?.push(`"bundleDependencies" was changed from an object to an array`)
-        data.bundleDependencies = Object.keys(bd)
-      }
-    } else if ('bundleDependencies' in data) {
-      changes?.push(`"bundleDependencies" was removed`)
-      delete data.bundleDependencies
-    }
-  }
-
-  // it was once common practice to list deps both in optionalDependencies and
-  // in dependencies, to support npm versions that did not know about
-  // optionalDependencies.  This is no longer a relevant need, so duplicating
-  // the deps in two places is unnecessary and excessive.
-  if (steps.includes('optionalDedupe')) {
-    if (data.dependencies &&
-      data.optionalDependencies && typeof data.optionalDependencies === 'object') {
-      for (const name in data.optionalDependencies) {
-        changes?.push(`optionalDependencies."${name}" was removed`)
-        delete data.dependencies[name]
-      }
-      if (!Object.keys(data.dependencies).length) {
-        changes?.push(`Empty "optionalDependencies" was removed`)
-        delete data.dependencies
-      }
-    }
-  }
-
-  // add "install" attribute if any "*.gyp" files exist
-  if (steps.includes('gypfile')) {
-    if (!scripts.install && !scripts.preinstall && data.gypfile !== false) {
-      const files = await lazyLoadGlob()('*.gyp', { cwd: pkg.path })
-      if (files.length) {
-        scripts.install = 'node-gyp rebuild'
-        data.scripts = scripts
-        data.gypfile = true
-        changes?.push(`"scripts.install" was set to "node-gyp rebuild"`)
-        changes?.push(`"gypfile" was set to "true"`)
-      }
-    }
-  }
-
-  // add "start" attribute if "server.js" exists
-  if (steps.includes('serverjs') && !scripts.start) {
-    try {
-      await fs.access(path.join(pkg.path, 'server.js'))
-      scripts.start = 'node server.js'
-      data.scripts = scripts
-      changes?.push('"scripts.start" was set to "node server.js"')
-    } catch {
-      // do nothing
-    }
-  }
-
-  // strip "node_modules/.bin" from scripts entries
-  // remove invalid scripts entries (non-strings)
-  if ((steps.includes('scripts') || steps.includes('scriptpath')) && data.scripts !== undefined) {
-    const spre = /^(\.[/\\])?node_modules[/\\].bin[\\/]/
-    if (typeof data.scripts === 'object') {
-      for (const name in data.scripts) {
-        if (typeof data.scripts[name] !== 'string') {
-          delete data.scripts[name]
-          changes?.push(`Invalid scripts."${name}" was removed`)
-        } else if (steps.includes('scriptpath') && spre.test(data.scripts[name])) {
-          data.scripts[name] = data.scripts[name].replace(spre, '')
-          changes?.push(`scripts entry "${name}" was fixed to remove node_modules/.bin reference`)
-        }
-      }
-    } else {
-      changes?.push(`Removed invalid "scripts"`)
-      delete data.scripts
-    }
-  }
-
-  if (steps.includes('funding')) {
-    if (data.funding && typeof data.funding === 'string') {
-      data.funding = { url: data.funding }
-      changes?.push(`"funding" was changed to an object with a url attribute`)
-    }
-  }
-
-  // populate "authors" attribute
-  if (steps.includes('authors') && !data.contributors) {
-    try {
-      const authorData = await fs.readFile(path.join(pkg.path, 'AUTHORS'), 'utf8')
-      const authors = authorData.split(/\r?\n/g)
-        .map(line => line.replace(/^\s*#.*$/, '').trim())
-        .filter(line => line)
-      data.contributors = authors
-      changes?.push('"contributors" was auto-populated with the contents of the "AUTHORS" file')
-    } catch {
-      // do nothing
-    }
-  }
-
-  // populate "readme" attribute
-  if (steps.includes('readme') && !data.readme) {
-    const mdre = /\.m?a?r?k?d?o?w?n?$/i
-    const files = await lazyLoadGlob()('{README,README.*}', {
-      cwd: pkg.path,
-      nocase: true,
-      mark: true,
-    })
-    let readmeFile
-    for (const file of files) {
-      // don't accept directories.
-      if (!file.endsWith(path.sep)) {
-        if (file.match(mdre)) {
-          readmeFile = file
-          break
-        }
-        if (file.endsWith('README')) {
-          readmeFile = file
-        }
-      }
-    }
-    if (readmeFile) {
-      const readmeData = await fs.readFile(path.join(pkg.path, readmeFile), 'utf8')
-      data.readme = readmeData
-      data.readmeFilename = readmeFile
-      changes?.push(`"readme" was set to the contents of ${readmeFile}`)
-      changes?.push(`"readmeFilename" was set to ${readmeFile}`)
-    }
-    if (!data.readme) {
-      data.readme = 'ERROR: No README data found!'
-    }
-  }
-
-  // expand directories.man
-  if (steps.includes('mans')) {
-    if (data.directories?.man && !data.man) {
-      const manDir = secureAndUnixifyPath(data.directories.man)
-      const cwd = path.resolve(pkg.path, manDir)
-      const files = await lazyLoadGlob()('**/*.[0-9]', { cwd })
-      data.man = files.map(man =>
-        path.relative(pkg.path, path.join(cwd, man)).split(path.sep).join('/')
-      )
-    }
-    normalizePackageMan(data, changes)
-  }
-
-  if (steps.includes('bin') || steps.includes('binDir') || steps.includes('binRefs')) {
-    normalizePackageBin(data, changes)
-  }
-
-  // expand "directories.bin"
-  if (steps.includes('binDir') && data.directories?.bin && !data.bin) {
-    const binsDir = path.resolve(pkg.path, secureAndUnixifyPath(data.directories.bin))
-    const bins = await lazyLoadGlob()('**', { cwd: binsDir })
-    data.bin = bins.reduce((acc, binFile) => {
-      if (binFile && !binFile.startsWith('.')) {
-        const binName = path.basename(binFile)
-        acc[binName] = path.join(data.directories.bin, binFile)
-      }
-      return acc
-    }, {})
-    // *sigh*
-    normalizePackageBin(data, changes)
-  }
-
-  // populate "gitHead" attribute
-  if (steps.includes('gitHead') && !data.gitHead) {
-    const git = require('@npmcli/git')
-    const gitRoot = await git.find({ cwd: pkg.path, root })
-    let head
-    if (gitRoot) {
-      try {
-        head = await fs.readFile(path.resolve(gitRoot, '.git/HEAD'), 'utf8')
-      } catch (err) {
-      // do nothing
-      }
-    }
-    let headData
-    if (head) {
-      if (head.startsWith('ref: ')) {
-        const headRef = head.replace(/^ref: /, '').trim()
-        const headFile = path.resolve(gitRoot, '.git', headRef)
-        try {
-          headData = await fs.readFile(headFile, 'utf8')
-          headData = headData.replace(/^ref: /, '').trim()
-        } catch (err) {
-          // do nothing
-        }
-        if (!headData) {
-          const packFile = path.resolve(gitRoot, '.git/packed-refs')
-          try {
-            let refs = await fs.readFile(packFile, 'utf8')
-            if (refs) {
-              refs = refs.split('\n')
-              for (let i = 0; i < refs.length; i++) {
-                const match = refs[i].match(/^([0-9a-f]{40}) (.+)$/)
-                if (match && match[2].trim() === headRef) {
-                  headData = match[1]
-                  break
-                }
-              }
-            }
-          } catch {
-            // do nothing
-          }
-        }
-      } else {
-        headData = head.trim()
-      }
-    }
-    if (headData) {
-      data.gitHead = headData
-    }
-  }
-
-  // populate "types" attribute
-  if (steps.includes('fillTypes')) {
-    const index = data.main || 'index.js'
-
-    if (typeof index !== 'string') {
-      throw new TypeError('The "main" attribute must be of type string.')
-    }
-
-    // TODO exports is much more complicated than this in verbose format
-    // We need to support for instance
-
-    // "exports": {
-    //   ".": [
-    //     {
-    //       "default": "./lib/npm.js"
-    //     },
-    //     "./lib/npm.js"
-    //   ],
-    //   "./package.json": "./package.json"
-    // },
-    // as well as conditional exports
-
-    // if (data.exports && typeof data.exports === 'string') {
-    //   index = data.exports
-    // }
-
-    // if (data.exports && data.exports['.']) {
-    //   index = data.exports['.']
-    //   if (typeof index !== 'string') {
-    //   }
-    // }
-    const extless = path.join(path.dirname(index), path.basename(index, path.extname(index)))
-    const dts = `./${extless}.d.ts`
-    const hasDTSFields = 'types' in data || 'typings' in data
-    if (!hasDTSFields) {
-      try {
-        await fs.access(path.join(pkg.path, dts))
-        data.types = dts.split(path.sep).join('/')
-      } catch {
-        // do nothing
-      }
-    }
-  }
-
-  // "normalizeData" from "read-package-json", which was just a call through to
-  // "normalize-package-data".  We only call the "fixer" functions because
-  // outside of that it was also clobbering _id (which we already conditionally
-  // do) and also adding the gypfile script (which we also already
-  // conditionally do)
-
-  // Some steps are isolated so we can do a limited subset of these in `fix`
-  if (steps.includes('fixRepositoryField') || steps.includes('normalizeData')) {
-    if (data.repositories) {
-      changes?.push(`"repository" was set to the first entry in "repositories" (${data.repository})`)
-      data.repository = data.repositories[0]
-    }
-    if (data.repository) {
-      if (typeof data.repository === 'string') {
-        changes?.push('"repository" was changed from a string to an object')
-        data.repository = {
-          type: 'git',
-          url: data.repository,
-        }
-      }
-      if (data.repository.url) {
-        const hosted = lazyHostedGitInfo().fromUrl(data.repository.url)
-        let r
-        if (hosted) {
-          if (hosted.getDefaultRepresentation() === 'shortcut') {
-            r = hosted.https()
-          } else {
-            r = hosted.toString()
-          }
-          if (r !== data.repository.url) {
-            changes?.push(`"repository.url" was normalized to "${r}"`)
-            data.repository.url = r
-          }
-        }
-      }
-    }
-  }
-
-  if (steps.includes('fixDependencies') || steps.includes('normalizeData')) {
-    // peerDependencies?
-    // devDependencies is meaningless here, it's ignored on an installed package
-    for (const type of ['dependencies', 'devDependencies', 'optionalDependencies']) {
-      if (data[type]) {
-        let secondWarning = true
-        if (typeof data[type] === 'string') {
-          changes?.push(`"${type}" was converted from a string into an object`)
-          data[type] = data[type].trim().split(/[\n\r\s\t ,]+/)
-          secondWarning = false
-        }
-        if (Array.isArray(data[type])) {
-          if (secondWarning) {
-            changes?.push(`"${type}" was converted from an array into an object`)
-          }
-          const o = {}
-          for (const d of data[type]) {
-            if (typeof d === 'string') {
-              const dep = d.trim().split(/(:?[@\s><=])/)
-              const dn = dep.shift()
-              const dv = dep.join('').replace(/^@/, '').trim()
-              o[dn] = dv
-            }
-          }
-          data[type] = o
-        }
-      }
-    }
-    // normalize-package-data used to put optional dependencies BACK into
-    // dependencies here, we no longer do this
-
-    for (const deps of ['dependencies', 'devDependencies']) {
-      if (deps in data) {
-        if (!data[deps] || typeof data[deps] !== 'object') {
-          changes?.push(`Removed invalid "${deps}"`)
-          delete data[deps]
-        } else {
-          for (const d in data[deps]) {
-            const r = data[deps][d]
-            if (typeof r !== 'string') {
-              changes?.push(`Removed invalid "${deps}.${d}"`)
-              delete data[deps][d]
-            }
-            const hosted = lazyHostedGitInfo().fromUrl(data[deps][d])?.toString()
-            if (hosted && hosted !== data[deps][d]) {
-              changes?.push(`Normalized git reference to "${deps}.${d}"`)
-              data[deps][d] = hosted.toString()
-            }
-          }
-        }
-      }
-    }
-  }
-
-  // TODO some of this is duplicated in other steps here, a future breaking change may be able to remove the duplicates involved in this step
-  if (steps.includes('normalizeData')) {
-    const { normalizeData } = require('./normalize-data.js')
-    normalizeData(data, changes)
-  }
-
-  // Warn if the bin references don't point to anything.  This might be better
-  // in normalize-package-data if it had access to the file path.
-  if (steps.includes('binRefs') && data.bin instanceof Object) {
-    for (const key in data.bin) {
-      try {
-        await fs.access(path.resolve(pkg.path, data.bin[key]))
-      } catch {
-        log.warn('package-json', pkgId, `No bin file found at ${data.bin[key]}`)
-        // XXX: should a future breaking change delete bin entries that cannot be accessed?
-      }
-    }
-  }
-}
-
-module.exports = normalize
diff --git a/node_modules/pacote/node_modules/@npmcli/package-json/lib/read-package.js b/node_modules/pacote/node_modules/@npmcli/package-json/lib/read-package.js
deleted file mode 100644
index d6c86ce388e6c..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/package-json/lib/read-package.js
+++ /dev/null
@@ -1,39 +0,0 @@
-// This is JUST the code needed to open a package.json file and parse it.
-// It's isolated out so that code needing to parse a package.json file can do so in the same way as this module does, without needing to require the whole module, or needing to require the underlying parsing library.
-
-const { readFile } = require('fs/promises')
-const parseJSON = require('json-parse-even-better-errors')
-
-async function read (filename) {
-  try {
-    const data = await readFile(filename, 'utf8')
-    return data
-  } catch (err) {
-    err.message = `Could not read package.json: ${err}`
-    throw err
-  }
-}
-
-function parse (data) {
-  try {
-    const content = parseJSON(data)
-    return content
-  } catch (err) {
-    err.message = `Invalid package.json: ${err}`
-    throw err
-  }
-}
-
-// This is what most external libs will use.
-// PackageJson will call read and parse separately
-async function readPackage (filename) {
-  const data = await read(filename)
-  const content = parse(data)
-  return content
-}
-
-module.exports = {
-  read,
-  parse,
-  readPackage,
-}
diff --git a/node_modules/pacote/node_modules/@npmcli/package-json/lib/sort.js b/node_modules/pacote/node_modules/@npmcli/package-json/lib/sort.js
deleted file mode 100644
index 0bd0d5199da58..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/package-json/lib/sort.js
+++ /dev/null
@@ -1,101 +0,0 @@
-/**
- * arbitrary sort order for package.json largely pulled from:
- * https://github.com/keithamus/sort-package-json/blob/main/defaultRules.md
- *
- * cross checked with:
- * https://github.com/npm/types/blob/main/types/index.d.ts#L104
- * https://docs.npmjs.com/cli/configuring-npm/package-json
- */
-function packageSort (json) {
-  const {
-    name,
-    version,
-    private: isPrivate,
-    description,
-    keywords,
-    homepage,
-    bugs,
-    repository,
-    funding,
-    license,
-    author,
-    maintainers,
-    contributors,
-    type,
-    imports,
-    exports,
-    main,
-    browser,
-    types,
-    bin,
-    man,
-    directories,
-    files,
-    workspaces,
-    scripts,
-    config,
-    dependencies,
-    devDependencies,
-    peerDependencies,
-    peerDependenciesMeta,
-    optionalDependencies,
-    bundledDependencies,
-    bundleDependencies,
-    engines,
-    os,
-    cpu,
-    publishConfig,
-    devEngines,
-    licenses,
-    overrides,
-    ...rest
-  } = json
-
-  return {
-    ...(typeof name !== 'undefined' ? { name } : {}),
-    ...(typeof version !== 'undefined' ? { version } : {}),
-    ...(typeof isPrivate !== 'undefined' ? { private: isPrivate } : {}),
-    ...(typeof description !== 'undefined' ? { description } : {}),
-    ...(typeof keywords !== 'undefined' ? { keywords } : {}),
-    ...(typeof homepage !== 'undefined' ? { homepage } : {}),
-    ...(typeof bugs !== 'undefined' ? { bugs } : {}),
-    ...(typeof repository !== 'undefined' ? { repository } : {}),
-    ...(typeof funding !== 'undefined' ? { funding } : {}),
-    ...(typeof license !== 'undefined' ? { license } : {}),
-    ...(typeof author !== 'undefined' ? { author } : {}),
-    ...(typeof maintainers !== 'undefined' ? { maintainers } : {}),
-    ...(typeof contributors !== 'undefined' ? { contributors } : {}),
-    ...(typeof type !== 'undefined' ? { type } : {}),
-    ...(typeof imports !== 'undefined' ? { imports } : {}),
-    ...(typeof exports !== 'undefined' ? { exports } : {}),
-    ...(typeof main !== 'undefined' ? { main } : {}),
-    ...(typeof browser !== 'undefined' ? { browser } : {}),
-    ...(typeof types !== 'undefined' ? { types } : {}),
-    ...(typeof bin !== 'undefined' ? { bin } : {}),
-    ...(typeof man !== 'undefined' ? { man } : {}),
-    ...(typeof directories !== 'undefined' ? { directories } : {}),
-    ...(typeof files !== 'undefined' ? { files } : {}),
-    ...(typeof workspaces !== 'undefined' ? { workspaces } : {}),
-    ...(typeof scripts !== 'undefined' ? { scripts } : {}),
-    ...(typeof config !== 'undefined' ? { config } : {}),
-    ...(typeof dependencies !== 'undefined' ? { dependencies } : {}),
-    ...(typeof devDependencies !== 'undefined' ? { devDependencies } : {}),
-    ...(typeof peerDependencies !== 'undefined' ? { peerDependencies } : {}),
-    ...(typeof peerDependenciesMeta !== 'undefined' ? { peerDependenciesMeta } : {}),
-    ...(typeof optionalDependencies !== 'undefined' ? { optionalDependencies } : {}),
-    ...(typeof bundledDependencies !== 'undefined' ? { bundledDependencies } : {}),
-    ...(typeof bundleDependencies !== 'undefined' ? { bundleDependencies } : {}),
-    ...(typeof engines !== 'undefined' ? { engines } : {}),
-    ...(typeof os !== 'undefined' ? { os } : {}),
-    ...(typeof cpu !== 'undefined' ? { cpu } : {}),
-    ...(typeof publishConfig !== 'undefined' ? { publishConfig } : {}),
-    ...(typeof devEngines !== 'undefined' ? { devEngines } : {}),
-    ...(typeof licenses !== 'undefined' ? { licenses } : {}),
-    ...(typeof overrides !== 'undefined' ? { overrides } : {}),
-    ...rest,
-  }
-}
-
-module.exports = {
-  packageSort,
-}
diff --git a/node_modules/pacote/node_modules/@npmcli/package-json/lib/update-dependencies.js b/node_modules/pacote/node_modules/@npmcli/package-json/lib/update-dependencies.js
deleted file mode 100644
index 7259949ab661d..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/package-json/lib/update-dependencies.js
+++ /dev/null
@@ -1,75 +0,0 @@
-const depTypes = new Set([
-  'dependencies',
-  'optionalDependencies',
-  'devDependencies',
-  'peerDependencies',
-])
-
-// sort alphabetically all types of deps for a given package
-const orderDeps = (content) => {
-  for (const type of depTypes) {
-    if (content && content[type]) {
-      content[type] = Object.keys(content[type])
-        .sort((a, b) => a.localeCompare(b, 'en'))
-        .reduce((res, key) => {
-          res[key] = content[type][key]
-          return res
-        }, {})
-    }
-  }
-  return content
-}
-
-const updateDependencies = ({ content, originalContent }) => {
-  const pkg = orderDeps({
-    ...content,
-  })
-
-  // optionalDependencies don't need to be repeated in two places
-  if (pkg.dependencies) {
-    if (pkg.optionalDependencies) {
-      for (const name of Object.keys(pkg.optionalDependencies)) {
-        delete pkg.dependencies[name]
-      }
-    }
-  }
-
-  const result = { ...originalContent }
-
-  // loop through all types of dependencies and update package json pkg
-  for (const type of depTypes) {
-    if (pkg[type]) {
-      result[type] = pkg[type]
-    }
-
-    // prune empty type props from resulting object
-    const emptyDepType =
-      pkg[type]
-      && typeof pkg === 'object'
-      && Object.keys(pkg[type]).length === 0
-    if (emptyDepType) {
-      delete result[type]
-    }
-  }
-
-  // if original package.json had dep in peerDeps AND deps, preserve that.
-  const { dependencies: origProd, peerDependencies: origPeer } =
-    originalContent || {}
-  const { peerDependencies: newPeer } = result
-  if (origProd && origPeer && newPeer) {
-    // we have original prod/peer deps, and new peer deps
-    // copy over any that were in both in the original
-    for (const name of Object.keys(origPeer)) {
-      if (origProd[name] !== undefined && newPeer[name] !== undefined) {
-        result.dependencies = result.dependencies || {}
-        result.dependencies[name] = newPeer[name]
-      }
-    }
-  }
-
-  return result
-}
-
-updateDependencies.knownKeys = depTypes
-
-module.exports = updateDependencies
diff --git a/node_modules/pacote/node_modules/@npmcli/package-json/lib/update-scripts.js b/node_modules/pacote/node_modules/@npmcli/package-json/lib/update-scripts.js
deleted file mode 100644
index 30495e54cc3c7..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/package-json/lib/update-scripts.js
+++ /dev/null
@@ -1,29 +0,0 @@
-const updateScripts = ({ content, originalContent = {} }) => {
-  const newScripts = content.scripts
-
-  if (!newScripts) {
-    return originalContent
-  }
-
-  // validate scripts content being appended
-  const hasInvalidScripts = () =>
-    Object.entries(newScripts)
-      .some(([key, value]) =>
-        typeof key !== 'string' || typeof value !== 'string')
-  if (hasInvalidScripts()) {
-    throw Object.assign(
-      new TypeError(
-        'package.json scripts should be a key-value pair of strings.'),
-      { code: 'ESCRIPTSINVALID' }
-    )
-  }
-
-  return {
-    ...originalContent,
-    scripts: {
-      ...newScripts,
-    },
-  }
-}
-
-module.exports = updateScripts
diff --git a/node_modules/pacote/node_modules/@npmcli/package-json/lib/update-workspaces.js b/node_modules/pacote/node_modules/@npmcli/package-json/lib/update-workspaces.js
deleted file mode 100644
index 04bf63230636f..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/package-json/lib/update-workspaces.js
+++ /dev/null
@@ -1,26 +0,0 @@
-const updateWorkspaces = ({ content, originalContent = {} }) => {
-  const newWorkspaces = content.workspaces
-
-  if (!newWorkspaces) {
-    return originalContent
-  }
-
-  // validate workspaces content being appended
-  const hasInvalidWorkspaces = () =>
-    newWorkspaces.some(w => !(typeof w === 'string'))
-  if (!newWorkspaces.length || hasInvalidWorkspaces()) {
-    throw Object.assign(
-      new TypeError('workspaces should be an array of strings.'),
-      { code: 'EWORKSPACESINVALID' }
-    )
-  }
-
-  return {
-    ...originalContent,
-    workspaces: [
-      ...newWorkspaces,
-    ],
-  }
-}
-
-module.exports = updateWorkspaces
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/LICENSE b/node_modules/pacote/node_modules/@npmcli/run-script/LICENSE
new file mode 100644
index 0000000000000..19cec97b18468
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/run-script/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/is-server-package.js b/node_modules/pacote/node_modules/@npmcli/run-script/lib/is-server-package.js
new file mode 100644
index 0000000000000..c36c40d4898d5
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/run-script/lib/is-server-package.js
@@ -0,0 +1,11 @@
+const { stat } = require('node:fs/promises')
+const { resolve } = require('node:path')
+
+module.exports = async path => {
+  try {
+    const st = await stat(resolve(path, 'server.js'))
+    return st.isFile()
+  } catch (er) {
+    return false
+  }
+}
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/make-spawn-args.js b/node_modules/pacote/node_modules/@npmcli/run-script/lib/make-spawn-args.js
new file mode 100644
index 0000000000000..1c9f02c062f72
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/run-script/lib/make-spawn-args.js
@@ -0,0 +1,53 @@
+/* eslint camelcase: "off" */
+const setPATH = require('./set-path.js')
+const { resolve } = require('path')
+
+let npm_config_node_gyp
+
+const makeSpawnArgs = options => {
+  const {
+    args,
+    binPaths,
+    cmd,
+    env,
+    event,
+    nodeGyp,
+    path,
+    scriptShell = true,
+    stdio,
+    stdioString,
+  } = options
+
+  if (nodeGyp) {
+    // npm already pulled this from env and passes it in to options
+    npm_config_node_gyp = nodeGyp
+  } else if (env.npm_config_node_gyp) {
+    // legacy mode for standalone user
+    npm_config_node_gyp = env.npm_config_node_gyp
+  } else {
+    // default
+    npm_config_node_gyp = require.resolve('node-gyp/bin/node-gyp.js')
+  }
+
+  const spawnEnv = setPATH(path, binPaths, {
+    // we need to at least save the PATH environment var
+    ...process.env,
+    ...env,
+    npm_package_json: resolve(path, 'package.json'),
+    npm_lifecycle_event: event,
+    npm_lifecycle_script: cmd,
+    npm_config_node_gyp,
+  })
+
+  const spawnOpts = {
+    env: spawnEnv,
+    stdioString,
+    stdio,
+    cwd: path,
+    shell: scriptShell,
+  }
+
+  return [cmd, args, spawnOpts]
+}
+
+module.exports = makeSpawnArgs
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp b/node_modules/pacote/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp
new file mode 100755
index 0000000000000..5bec64d961a3a
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp
@@ -0,0 +1,2 @@
+#!/usr/bin/env sh
+node "$npm_config_node_gyp" "$@"
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp.cmd b/node_modules/pacote/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp.cmd
new file mode 100755
index 0000000000000..4c6987ac9868b
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp.cmd
@@ -0,0 +1 @@
+@node "%npm_config_node_gyp%" %*
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/package-envs.js b/node_modules/pacote/node_modules/@npmcli/run-script/lib/package-envs.js
new file mode 100644
index 0000000000000..612f850fb076c
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/run-script/lib/package-envs.js
@@ -0,0 +1,29 @@
+const packageEnvs = (vals, prefix, env = {}) => {
+  for (const [key, val] of Object.entries(vals)) {
+    if (val === undefined) {
+      continue
+    } else if (val === null || val === false) {
+      env[`${prefix}${key}`] = ''
+    } else if (Array.isArray(val)) {
+      val.forEach((item, index) => {
+        packageEnvs({ [`${key}_${index}`]: item }, `${prefix}`, env)
+      })
+    } else if (typeof val === 'object') {
+      packageEnvs(val, `${prefix}${key}_`, env)
+    } else {
+      env[`${prefix}${key}`] = String(val)
+    }
+  }
+  return env
+}
+
+// https://github.com/npm/rfcs/pull/183 defines which fields we put into the environment
+module.exports = pkg => {
+  return packageEnvs({
+    name: pkg.name,
+    version: pkg.version,
+    config: pkg.config,
+    engines: pkg.engines,
+    bin: pkg.bin,
+  }, 'npm_package_')
+}
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/run-script-pkg.js b/node_modules/pacote/node_modules/@npmcli/run-script/lib/run-script-pkg.js
new file mode 100644
index 0000000000000..161caebb98d97
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/run-script/lib/run-script-pkg.js
@@ -0,0 +1,114 @@
+const makeSpawnArgs = require('./make-spawn-args.js')
+const promiseSpawn = require('@npmcli/promise-spawn')
+const packageEnvs = require('./package-envs.js')
+const { isNodeGypPackage, defaultGypInstallScript } = require('@npmcli/node-gyp')
+const signalManager = require('./signal-manager.js')
+const isServerPackage = require('./is-server-package.js')
+
+const runScriptPkg = async options => {
+  const {
+    args = [],
+    binPaths = false,
+    env = {},
+    event,
+    nodeGyp,
+    path,
+    pkg,
+    scriptShell,
+    // how long to wait for a process.kill signal
+    // only exposed here so that we can make the test go a bit faster.
+    signalTimeout = 500,
+    stdio = 'pipe',
+    stdioString,
+  } = options
+
+  const { scripts = {}, gypfile } = pkg
+  let cmd = null
+  if (options.cmd) {
+    cmd = options.cmd
+  } else if (pkg.scripts && pkg.scripts[event]) {
+    cmd = pkg.scripts[event]
+  } else if (
+    // If there is no preinstall or install script, default to rebuilding node-gyp packages.
+    event === 'install' &&
+    !scripts.install &&
+    !scripts.preinstall &&
+    gypfile !== false &&
+    await isNodeGypPackage(path)
+  ) {
+    cmd = defaultGypInstallScript
+  } else if (event === 'start' && await isServerPackage(path)) {
+    cmd = 'node server.js'
+  }
+
+  if (!cmd) {
+    return { code: 0, signal: null }
+  }
+
+  let inputEnd = () => {}
+  if (stdio === 'inherit') {
+    let banner
+    if (pkg._id) {
+      banner = `\n> ${pkg._id} ${event}\n`
+    } else {
+      banner = `\n> ${event}\n`
+    }
+    banner += `> ${cmd.trim().replace(/\n/g, '\n> ')}`
+    if (args.length) {
+      banner += ` ${args.join(' ')}`
+    }
+    banner += '\n'
+    const { output, input } = require('proc-log')
+    output.standard(banner)
+    inputEnd = input.start()
+  }
+
+  const [spawnShell, spawnArgs, spawnOpts] = makeSpawnArgs({
+    args,
+    binPaths,
+    cmd,
+    env: { ...env, ...packageEnvs(pkg) },
+    event,
+    nodeGyp,
+    path,
+    scriptShell,
+    stdio,
+    stdioString,
+  })
+
+  const p = promiseSpawn(spawnShell, spawnArgs, spawnOpts, {
+    event,
+    script: cmd,
+    pkgid: pkg._id,
+    path,
+  })
+
+  if (stdio === 'inherit') {
+    signalManager.add(p.process)
+  }
+
+  if (p.stdin) {
+    p.stdin.end()
+  }
+
+  return p.catch(er => {
+    const { signal } = er
+    // coverage disabled because win32 never emits signals
+    /* istanbul ignore next */
+    if (stdio === 'inherit' && signal) {
+      // by the time we reach here, the child has already exited. we send the
+      // signal back to ourselves again so that npm will exit with the same
+      // status as the child
+      process.kill(process.pid, signal)
+
+      // just in case we don't die, reject after 500ms
+      // this also keeps the node process open long enough to actually
+      // get the signal, rather than terminating gracefully.
+      return new Promise((res, rej) => setTimeout(() => rej(er), signalTimeout))
+    } else {
+      throw er
+    }
+  }).finally(inputEnd)
+}
+
+module.exports = runScriptPkg
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/run-script.js b/node_modules/pacote/node_modules/@npmcli/run-script/lib/run-script.js
new file mode 100644
index 0000000000000..b00304c8d6e7f
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/run-script/lib/run-script.js
@@ -0,0 +1,15 @@
+const PackageJson = require('@npmcli/package-json')
+const runScriptPkg = require('./run-script-pkg.js')
+const validateOptions = require('./validate-options.js')
+const isServerPackage = require('./is-server-package.js')
+
+const runScript = async options => {
+  validateOptions(options)
+  if (options.pkg) {
+    return runScriptPkg(options)
+  }
+  const { content: pkg } = await PackageJson.normalize(options.path)
+  return runScriptPkg({ ...options, pkg })
+}
+
+module.exports = Object.assign(runScript, { isServerPackage })
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/set-path.js b/node_modules/pacote/node_modules/@npmcli/run-script/lib/set-path.js
new file mode 100644
index 0000000000000..c59c270d9969a
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/run-script/lib/set-path.js
@@ -0,0 +1,45 @@
+const { resolve, dirname, delimiter } = require('path')
+// the path here is relative, even though it does not need to be
+// in order to make the posix tests pass in windows
+const nodeGypPath = resolve(__dirname, '../lib/node-gyp-bin')
+
+// Windows typically calls its PATH environ 'Path', but this is not
+// guaranteed, nor is it guaranteed to be the only one.  Merge them
+// all together in the order they appear in the object.
+const setPATH = (projectPath, binPaths, env) => {
+  const PATH = Object.keys(env).filter(p => /^path$/i.test(p) && env[p])
+    .map(p => env[p].split(delimiter))
+    .reduce((set, p) => set.concat(p.filter(concatted => !set.includes(concatted))), [])
+    .join(delimiter)
+
+  const pathArr = []
+  if (binPaths) {
+    pathArr.push(...binPaths)
+  }
+  // unshift the ./node_modules/.bin from every folder
+  // walk up until dirname() does nothing, at the root
+  // XXX we should specify a cwd that we don't go above
+  let p = projectPath
+  let pp
+  do {
+    pathArr.push(resolve(p, 'node_modules', '.bin'))
+    pp = p
+    p = dirname(p)
+  } while (p !== pp)
+  pathArr.push(nodeGypPath, PATH)
+
+  const pathVal = pathArr.join(delimiter)
+
+  // XXX include the node-gyp-bin path somehow?  Probably better for
+  // npm or arborist or whoever to just provide that by putting it in
+  // the PATH environ, since that's preserved anyway.
+  for (const key of Object.keys(env)) {
+    if (/^path$/i.test(key)) {
+      env[key] = pathVal
+    }
+  }
+
+  return env
+}
+
+module.exports = setPATH
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/signal-manager.js b/node_modules/pacote/node_modules/@npmcli/run-script/lib/signal-manager.js
new file mode 100644
index 0000000000000..a099a4af2b9be
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/run-script/lib/signal-manager.js
@@ -0,0 +1,50 @@
+const runningProcs = new Set()
+let handlersInstalled = false
+
+const forwardedSignals = [
+  'SIGINT',
+  'SIGTERM',
+]
+
+// no-op, this is so receiving the signal doesn't cause us to exit immediately
+// instead, we exit after all children have exited when we re-send the signal
+// to ourselves. see the catch handler at the bottom of run-script-pkg.js
+const handleSignal = signal => {
+  for (const proc of runningProcs) {
+    proc.kill(signal)
+  }
+}
+
+const setupListeners = () => {
+  for (const signal of forwardedSignals) {
+    process.on(signal, handleSignal)
+  }
+  handlersInstalled = true
+}
+
+const cleanupListeners = () => {
+  if (runningProcs.size === 0) {
+    for (const signal of forwardedSignals) {
+      process.removeListener(signal, handleSignal)
+    }
+    handlersInstalled = false
+  }
+}
+
+const add = proc => {
+  runningProcs.add(proc)
+  if (!handlersInstalled) {
+    setupListeners()
+  }
+
+  proc.once('exit', () => {
+    runningProcs.delete(proc)
+    cleanupListeners()
+  })
+}
+
+module.exports = {
+  add,
+  handleSignal,
+  forwardedSignals,
+}
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/validate-options.js b/node_modules/pacote/node_modules/@npmcli/run-script/lib/validate-options.js
new file mode 100644
index 0000000000000..8d855916ecd15
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/run-script/lib/validate-options.js
@@ -0,0 +1,39 @@
+const validateOptions = options => {
+  if (typeof options !== 'object' || !options) {
+    throw new TypeError('invalid options object provided to runScript')
+  }
+
+  const {
+    event,
+    path,
+    scriptShell,
+    env = {},
+    stdio = 'pipe',
+    args = [],
+    cmd,
+  } = options
+
+  if (!event || typeof event !== 'string') {
+    throw new TypeError('valid event not provided to runScript')
+  }
+  if (!path || typeof path !== 'string') {
+    throw new TypeError('valid path not provided to runScript')
+  }
+  if (scriptShell !== undefined && typeof scriptShell !== 'string') {
+    throw new TypeError('invalid scriptShell option provided to runScript')
+  }
+  if (typeof env !== 'object' || !env) {
+    throw new TypeError('invalid env option provided to runScript')
+  }
+  if (typeof stdio !== 'string' && !Array.isArray(stdio)) {
+    throw new TypeError('invalid stdio option provided to runScript')
+  }
+  if (!Array.isArray(args) || args.some(a => typeof a !== 'string')) {
+    throw new TypeError('invalid args option provided to runScript')
+  }
+  if (cmd !== undefined && typeof cmd !== 'string') {
+    throw new TypeError('invalid cmd option provided to runScript')
+  }
+}
+
+module.exports = validateOptions
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/package.json b/node_modules/pacote/node_modules/@npmcli/run-script/package.json
new file mode 100644
index 0000000000000..2873f7cbf91c5
--- /dev/null
+++ b/node_modules/pacote/node_modules/@npmcli/run-script/package.json
@@ -0,0 +1,54 @@
+{
+  "name": "@npmcli/run-script",
+  "version": "10.0.0",
+  "description": "Run a lifecycle script for a package (descendant of npm-lifecycle)",
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "scripts": {
+    "test": "tap",
+    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
+    "lint": "npm run eslint",
+    "lintfix": "npm run eslint -- --fix",
+    "postlint": "template-oss-check",
+    "snap": "tap",
+    "posttest": "npm run lint",
+    "template-oss-apply": "template-oss-apply --force"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^5.0.0",
+    "@npmcli/template-oss": "4.25.0",
+    "spawk": "^1.8.1",
+    "tap": "^16.0.1"
+  },
+  "dependencies": {
+    "@npmcli/node-gyp": "^4.0.0",
+    "@npmcli/package-json": "^7.0.0",
+    "@npmcli/promise-spawn": "^8.0.0",
+    "node-gyp": "^11.0.0",
+    "proc-log": "^5.0.0",
+    "which": "^5.0.0"
+  },
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "main": "lib/run-script.js",
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/npm/run-script.git"
+  },
+  "engines": {
+    "node": "^20.17.0 || >=22.9.0"
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.25.0",
+    "publish": "true"
+  },
+  "tap": {
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  }
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/bundle/LICENSE b/node_modules/pacote/node_modules/@sigstore/bundle/LICENSE
new file mode 100644
index 0000000000000..e9e7c1679a09d
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/bundle/LICENSE
@@ -0,0 +1,202 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright 2023 The Sigstore Authors
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/node_modules/pacote/node_modules/@sigstore/bundle/dist/build.js b/node_modules/pacote/node_modules/@sigstore/bundle/dist/build.js
new file mode 100644
index 0000000000000..ade736407554c
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/bundle/dist/build.js
@@ -0,0 +1,100 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.toMessageSignatureBundle = toMessageSignatureBundle;
+exports.toDSSEBundle = toDSSEBundle;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const protobuf_specs_1 = require("@sigstore/protobuf-specs");
+const bundle_1 = require("./bundle");
+// Message signature bundle - $case: 'messageSignature'
+function toMessageSignatureBundle(options) {
+    return {
+        mediaType: options.certificateChain
+            ? bundle_1.BUNDLE_V02_MEDIA_TYPE
+            : bundle_1.BUNDLE_V03_MEDIA_TYPE,
+        content: {
+            $case: 'messageSignature',
+            messageSignature: {
+                messageDigest: {
+                    algorithm: protobuf_specs_1.HashAlgorithm.SHA2_256,
+                    digest: options.digest,
+                },
+                signature: options.signature,
+            },
+        },
+        verificationMaterial: toVerificationMaterial(options),
+    };
+}
+// DSSE envelope bundle - $case: 'dsseEnvelope'
+function toDSSEBundle(options) {
+    return {
+        mediaType: options.certificateChain
+            ? bundle_1.BUNDLE_V02_MEDIA_TYPE
+            : bundle_1.BUNDLE_V03_MEDIA_TYPE,
+        content: {
+            $case: 'dsseEnvelope',
+            dsseEnvelope: toEnvelope(options),
+        },
+        verificationMaterial: toVerificationMaterial(options),
+    };
+}
+function toEnvelope(options) {
+    return {
+        payloadType: options.artifactType,
+        payload: options.artifact,
+        signatures: [toSignature(options)],
+    };
+}
+function toSignature(options) {
+    return {
+        keyid: options.keyHint || '',
+        sig: options.signature,
+    };
+}
+// Verification material
+function toVerificationMaterial(options) {
+    return {
+        content: toKeyContent(options),
+        tlogEntries: [],
+        timestampVerificationData: { rfc3161Timestamps: [] },
+    };
+}
+function toKeyContent(options) {
+    if (options.certificate) {
+        if (options.certificateChain) {
+            return {
+                $case: 'x509CertificateChain',
+                x509CertificateChain: {
+                    certificates: [{ rawBytes: options.certificate }],
+                },
+            };
+        }
+        else {
+            return {
+                $case: 'certificate',
+                certificate: { rawBytes: options.certificate },
+            };
+        }
+    }
+    else {
+        return {
+            $case: 'publicKey',
+            publicKey: {
+                hint: options.keyHint || '',
+            },
+        };
+    }
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/bundle/dist/bundle.js b/node_modules/pacote/node_modules/@sigstore/bundle/dist/bundle.js
new file mode 100644
index 0000000000000..eb67a0ddc17bb
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/bundle/dist/bundle.js
@@ -0,0 +1,24 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.BUNDLE_V03_MEDIA_TYPE = exports.BUNDLE_V03_LEGACY_MEDIA_TYPE = exports.BUNDLE_V02_MEDIA_TYPE = exports.BUNDLE_V01_MEDIA_TYPE = void 0;
+exports.isBundleWithCertificateChain = isBundleWithCertificateChain;
+exports.isBundleWithPublicKey = isBundleWithPublicKey;
+exports.isBundleWithMessageSignature = isBundleWithMessageSignature;
+exports.isBundleWithDsseEnvelope = isBundleWithDsseEnvelope;
+exports.BUNDLE_V01_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.1';
+exports.BUNDLE_V02_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.2';
+exports.BUNDLE_V03_LEGACY_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.3';
+exports.BUNDLE_V03_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle.v0.3+json';
+// Type guards for bundle variants.
+function isBundleWithCertificateChain(b) {
+    return b.verificationMaterial.content.$case === 'x509CertificateChain';
+}
+function isBundleWithPublicKey(b) {
+    return b.verificationMaterial.content.$case === 'publicKey';
+}
+function isBundleWithMessageSignature(b) {
+    return b.content.$case === 'messageSignature';
+}
+function isBundleWithDsseEnvelope(b) {
+    return b.content.$case === 'dsseEnvelope';
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/bundle/dist/error.js b/node_modules/pacote/node_modules/@sigstore/bundle/dist/error.js
new file mode 100644
index 0000000000000..f84295323b812
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/bundle/dist/error.js
@@ -0,0 +1,25 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.ValidationError = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+class ValidationError extends Error {
+    constructor(message, fields) {
+        super(message);
+        this.fields = fields;
+    }
+}
+exports.ValidationError = ValidationError;
diff --git a/node_modules/pacote/node_modules/@sigstore/bundle/dist/index.js b/node_modules/pacote/node_modules/@sigstore/bundle/dist/index.js
new file mode 100644
index 0000000000000..1b012acad4d85
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/bundle/dist/index.js
@@ -0,0 +1,43 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.isBundleV01 = exports.assertBundleV02 = exports.assertBundleV01 = exports.assertBundleLatest = exports.assertBundle = exports.envelopeToJSON = exports.envelopeFromJSON = exports.bundleToJSON = exports.bundleFromJSON = exports.ValidationError = exports.isBundleWithPublicKey = exports.isBundleWithMessageSignature = exports.isBundleWithDsseEnvelope = exports.isBundleWithCertificateChain = exports.BUNDLE_V03_MEDIA_TYPE = exports.BUNDLE_V03_LEGACY_MEDIA_TYPE = exports.BUNDLE_V02_MEDIA_TYPE = exports.BUNDLE_V01_MEDIA_TYPE = exports.toMessageSignatureBundle = exports.toDSSEBundle = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+var build_1 = require("./build");
+Object.defineProperty(exports, "toDSSEBundle", { enumerable: true, get: function () { return build_1.toDSSEBundle; } });
+Object.defineProperty(exports, "toMessageSignatureBundle", { enumerable: true, get: function () { return build_1.toMessageSignatureBundle; } });
+var bundle_1 = require("./bundle");
+Object.defineProperty(exports, "BUNDLE_V01_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V01_MEDIA_TYPE; } });
+Object.defineProperty(exports, "BUNDLE_V02_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V02_MEDIA_TYPE; } });
+Object.defineProperty(exports, "BUNDLE_V03_LEGACY_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V03_LEGACY_MEDIA_TYPE; } });
+Object.defineProperty(exports, "BUNDLE_V03_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V03_MEDIA_TYPE; } });
+Object.defineProperty(exports, "isBundleWithCertificateChain", { enumerable: true, get: function () { return bundle_1.isBundleWithCertificateChain; } });
+Object.defineProperty(exports, "isBundleWithDsseEnvelope", { enumerable: true, get: function () { return bundle_1.isBundleWithDsseEnvelope; } });
+Object.defineProperty(exports, "isBundleWithMessageSignature", { enumerable: true, get: function () { return bundle_1.isBundleWithMessageSignature; } });
+Object.defineProperty(exports, "isBundleWithPublicKey", { enumerable: true, get: function () { return bundle_1.isBundleWithPublicKey; } });
+var error_1 = require("./error");
+Object.defineProperty(exports, "ValidationError", { enumerable: true, get: function () { return error_1.ValidationError; } });
+var serialized_1 = require("./serialized");
+Object.defineProperty(exports, "bundleFromJSON", { enumerable: true, get: function () { return serialized_1.bundleFromJSON; } });
+Object.defineProperty(exports, "bundleToJSON", { enumerable: true, get: function () { return serialized_1.bundleToJSON; } });
+Object.defineProperty(exports, "envelopeFromJSON", { enumerable: true, get: function () { return serialized_1.envelopeFromJSON; } });
+Object.defineProperty(exports, "envelopeToJSON", { enumerable: true, get: function () { return serialized_1.envelopeToJSON; } });
+var validate_1 = require("./validate");
+Object.defineProperty(exports, "assertBundle", { enumerable: true, get: function () { return validate_1.assertBundle; } });
+Object.defineProperty(exports, "assertBundleLatest", { enumerable: true, get: function () { return validate_1.assertBundleLatest; } });
+Object.defineProperty(exports, "assertBundleV01", { enumerable: true, get: function () { return validate_1.assertBundleV01; } });
+Object.defineProperty(exports, "assertBundleV02", { enumerable: true, get: function () { return validate_1.assertBundleV02; } });
+Object.defineProperty(exports, "isBundleV01", { enumerable: true, get: function () { return validate_1.isBundleV01; } });
diff --git a/node_modules/pacote/node_modules/@sigstore/bundle/dist/serialized.js b/node_modules/pacote/node_modules/@sigstore/bundle/dist/serialized.js
new file mode 100644
index 0000000000000..be0d2a2d54d09
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/bundle/dist/serialized.js
@@ -0,0 +1,49 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.envelopeToJSON = exports.envelopeFromJSON = exports.bundleToJSON = exports.bundleFromJSON = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const protobuf_specs_1 = require("@sigstore/protobuf-specs");
+const bundle_1 = require("./bundle");
+const validate_1 = require("./validate");
+const bundleFromJSON = (obj) => {
+    const bundle = protobuf_specs_1.Bundle.fromJSON(obj);
+    switch (bundle.mediaType) {
+        case bundle_1.BUNDLE_V01_MEDIA_TYPE:
+            (0, validate_1.assertBundleV01)(bundle);
+            break;
+        case bundle_1.BUNDLE_V02_MEDIA_TYPE:
+            (0, validate_1.assertBundleV02)(bundle);
+            break;
+        default:
+            (0, validate_1.assertBundleLatest)(bundle);
+            break;
+    }
+    return bundle;
+};
+exports.bundleFromJSON = bundleFromJSON;
+const bundleToJSON = (bundle) => {
+    return protobuf_specs_1.Bundle.toJSON(bundle);
+};
+exports.bundleToJSON = bundleToJSON;
+const envelopeFromJSON = (obj) => {
+    return protobuf_specs_1.Envelope.fromJSON(obj);
+};
+exports.envelopeFromJSON = envelopeFromJSON;
+const envelopeToJSON = (envelope) => {
+    return protobuf_specs_1.Envelope.toJSON(envelope);
+};
+exports.envelopeToJSON = envelopeToJSON;
diff --git a/node_modules/pacote/node_modules/@sigstore/bundle/dist/utility.js b/node_modules/pacote/node_modules/@sigstore/bundle/dist/utility.js
new file mode 100644
index 0000000000000..c8ad2e549bdc6
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/bundle/dist/utility.js
@@ -0,0 +1,2 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/pacote/node_modules/@sigstore/bundle/dist/validate.js b/node_modules/pacote/node_modules/@sigstore/bundle/dist/validate.js
new file mode 100644
index 0000000000000..21b8b5ee293ba
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/bundle/dist/validate.js
@@ -0,0 +1,199 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.assertBundle = assertBundle;
+exports.assertBundleV01 = assertBundleV01;
+exports.isBundleV01 = isBundleV01;
+exports.assertBundleV02 = assertBundleV02;
+exports.assertBundleLatest = assertBundleLatest;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const error_1 = require("./error");
+// Performs basic validation of a Sigstore bundle to ensure that all required
+// fields are populated. This is not a complete validation of the bundle, but
+// rather a check that the bundle is in a valid state to be processed by the
+// rest of the code.
+function assertBundle(b) {
+    const invalidValues = validateBundleBase(b);
+    if (invalidValues.length > 0) {
+        throw new error_1.ValidationError('invalid bundle', invalidValues);
+    }
+}
+// Asserts that the given bundle conforms to the v0.1 bundle format.
+function assertBundleV01(b) {
+    const invalidValues = [];
+    invalidValues.push(...validateBundleBase(b));
+    invalidValues.push(...validateInclusionPromise(b));
+    if (invalidValues.length > 0) {
+        throw new error_1.ValidationError('invalid v0.1 bundle', invalidValues);
+    }
+}
+// Type guard to determine if Bundle is a v0.1 bundle.
+function isBundleV01(b) {
+    try {
+        assertBundleV01(b);
+        return true;
+    }
+    catch (e) {
+        return false;
+    }
+}
+// Asserts that the given bundle conforms to the v0.2 bundle format.
+function assertBundleV02(b) {
+    const invalidValues = [];
+    invalidValues.push(...validateBundleBase(b));
+    invalidValues.push(...validateInclusionProof(b));
+    if (invalidValues.length > 0) {
+        throw new error_1.ValidationError('invalid v0.2 bundle', invalidValues);
+    }
+}
+// Asserts that the given bundle conforms to the newest (0.3) bundle format.
+function assertBundleLatest(b) {
+    const invalidValues = [];
+    invalidValues.push(...validateBundleBase(b));
+    invalidValues.push(...validateInclusionProof(b));
+    invalidValues.push(...validateNoCertificateChain(b));
+    if (invalidValues.length > 0) {
+        throw new error_1.ValidationError('invalid bundle', invalidValues);
+    }
+}
+function validateBundleBase(b) {
+    const invalidValues = [];
+    // Media type validation
+    if (b.mediaType === undefined ||
+        (!b.mediaType.match(/^application\/vnd\.dev\.sigstore\.bundle\+json;version=\d\.\d/) &&
+            !b.mediaType.match(/^application\/vnd\.dev\.sigstore\.bundle\.v\d\.\d\+json/))) {
+        invalidValues.push('mediaType');
+    }
+    // Content-related validation
+    if (b.content === undefined) {
+        invalidValues.push('content');
+    }
+    else {
+        switch (b.content.$case) {
+            case 'messageSignature':
+                if (b.content.messageSignature.messageDigest === undefined) {
+                    invalidValues.push('content.messageSignature.messageDigest');
+                }
+                else {
+                    if (b.content.messageSignature.messageDigest.digest.length === 0) {
+                        invalidValues.push('content.messageSignature.messageDigest.digest');
+                    }
+                }
+                if (b.content.messageSignature.signature.length === 0) {
+                    invalidValues.push('content.messageSignature.signature');
+                }
+                break;
+            case 'dsseEnvelope':
+                if (b.content.dsseEnvelope.payload.length === 0) {
+                    invalidValues.push('content.dsseEnvelope.payload');
+                }
+                if (b.content.dsseEnvelope.signatures.length !== 1) {
+                    invalidValues.push('content.dsseEnvelope.signatures');
+                }
+                else {
+                    if (b.content.dsseEnvelope.signatures[0].sig.length === 0) {
+                        invalidValues.push('content.dsseEnvelope.signatures[0].sig');
+                    }
+                }
+                break;
+        }
+    }
+    // Verification material-related validation
+    if (b.verificationMaterial === undefined) {
+        invalidValues.push('verificationMaterial');
+    }
+    else {
+        if (b.verificationMaterial.content === undefined) {
+            invalidValues.push('verificationMaterial.content');
+        }
+        else {
+            switch (b.verificationMaterial.content.$case) {
+                case 'x509CertificateChain':
+                    if (b.verificationMaterial.content.x509CertificateChain.certificates
+                        .length === 0) {
+                        invalidValues.push('verificationMaterial.content.x509CertificateChain.certificates');
+                    }
+                    b.verificationMaterial.content.x509CertificateChain.certificates.forEach((cert, i) => {
+                        if (cert.rawBytes.length === 0) {
+                            invalidValues.push(`verificationMaterial.content.x509CertificateChain.certificates[${i}].rawBytes`);
+                        }
+                    });
+                    break;
+                case 'certificate':
+                    if (b.verificationMaterial.content.certificate.rawBytes.length === 0) {
+                        invalidValues.push('verificationMaterial.content.certificate.rawBytes');
+                    }
+                    break;
+            }
+        }
+        if (b.verificationMaterial.tlogEntries === undefined) {
+            invalidValues.push('verificationMaterial.tlogEntries');
+        }
+        else {
+            if (b.verificationMaterial.tlogEntries.length > 0) {
+                b.verificationMaterial.tlogEntries.forEach((entry, i) => {
+                    if (entry.logId === undefined) {
+                        invalidValues.push(`verificationMaterial.tlogEntries[${i}].logId`);
+                    }
+                    if (entry.kindVersion === undefined) {
+                        invalidValues.push(`verificationMaterial.tlogEntries[${i}].kindVersion`);
+                    }
+                });
+            }
+        }
+    }
+    return invalidValues;
+}
+// Necessary for V01 bundles
+function validateInclusionPromise(b) {
+    const invalidValues = [];
+    if (b.verificationMaterial &&
+        b.verificationMaterial.tlogEntries?.length > 0) {
+        b.verificationMaterial.tlogEntries.forEach((entry, i) => {
+            if (entry.inclusionPromise === undefined) {
+                invalidValues.push(`verificationMaterial.tlogEntries[${i}].inclusionPromise`);
+            }
+        });
+    }
+    return invalidValues;
+}
+// Necessary for V02 and later bundles
+function validateInclusionProof(b) {
+    const invalidValues = [];
+    if (b.verificationMaterial &&
+        b.verificationMaterial.tlogEntries?.length > 0) {
+        b.verificationMaterial.tlogEntries.forEach((entry, i) => {
+            if (entry.inclusionProof === undefined) {
+                invalidValues.push(`verificationMaterial.tlogEntries[${i}].inclusionProof`);
+            }
+            else {
+                if (entry.inclusionProof.checkpoint === undefined) {
+                    invalidValues.push(`verificationMaterial.tlogEntries[${i}].inclusionProof.checkpoint`);
+                }
+            }
+        });
+    }
+    return invalidValues;
+}
+// Necessary for V03 and later bundles
+function validateNoCertificateChain(b) {
+    const invalidValues = [];
+    /* istanbul ignore next */
+    if (b.verificationMaterial?.content?.$case === 'x509CertificateChain') {
+        invalidValues.push('verificationMaterial.content.$case');
+    }
+    return invalidValues;
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/bundle/package.json b/node_modules/pacote/node_modules/@sigstore/bundle/package.json
new file mode 100644
index 0000000000000..03291b2159b79
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/bundle/package.json
@@ -0,0 +1,35 @@
+{
+  "name": "@sigstore/bundle",
+  "version": "4.0.0",
+  "description": "Sigstore bundle type",
+  "main": "dist/index.js",
+  "types": "dist/index.d.ts",
+  "scripts": {
+    "clean": "shx rm -rf dist *.tsbuildinfo",
+    "build": "tsc --build",
+    "test": "jest"
+  },
+  "files": [
+    "dist",
+    "store"
+  ],
+  "author": "bdehamer@github.com",
+  "license": "Apache-2.0",
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/sigstore/sigstore-js.git"
+  },
+  "bugs": {
+    "url": "https://github.com/sigstore/sigstore-js/issues"
+  },
+  "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/bundle#readme",
+  "publishConfig": {
+    "provenance": true
+  },
+  "dependencies": {
+    "@sigstore/protobuf-specs": "^0.5.0"
+  },
+  "engines": {
+    "node": "^20.17.0 || >=22.9.0"
+  }
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/core/LICENSE b/node_modules/pacote/node_modules/@sigstore/core/LICENSE
new file mode 100644
index 0000000000000..e9e7c1679a09d
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/core/LICENSE
@@ -0,0 +1,202 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright 2023 The Sigstore Authors
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/error.js b/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/error.js
new file mode 100644
index 0000000000000..17d93b0f7e706
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/error.js
@@ -0,0 +1,24 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.ASN1TypeError = exports.ASN1ParseError = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+class ASN1ParseError extends Error {
+}
+exports.ASN1ParseError = ASN1ParseError;
+class ASN1TypeError extends Error {
+}
+exports.ASN1TypeError = ASN1TypeError;
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/index.js b/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/index.js
new file mode 100644
index 0000000000000..348b2ea4022e5
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/index.js
@@ -0,0 +1,20 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.ASN1Obj = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+var obj_1 = require("./obj");
+Object.defineProperty(exports, "ASN1Obj", { enumerable: true, get: function () { return obj_1.ASN1Obj; } });
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/length.js b/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/length.js
new file mode 100644
index 0000000000000..cb7ebf09dbefa
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/length.js
@@ -0,0 +1,62 @@
+"use strict";
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.decodeLength = decodeLength;
+exports.encodeLength = encodeLength;
+const error_1 = require("./error");
+// Decodes the length of a DER-encoded ANS.1 element from the supplied stream.
+// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-encoded-length-and-value-bytes
+function decodeLength(stream) {
+    const buf = stream.getUint8();
+    // If the most significant bit is UNSET the length is just the value of the
+    // byte.
+    if ((buf & 0x80) === 0x00) {
+        return buf;
+    }
+    // Otherwise, the lower 7 bits of the first byte indicate the number of bytes
+    // that follow to encode the length.
+    const byteCount = buf & 0x7f;
+    // Ensure the encoded length can safely fit in a JS number.
+    if (byteCount > 6) {
+        throw new error_1.ASN1ParseError('length exceeds 6 byte limit');
+    }
+    // Iterate over the bytes that encode the length.
+    let len = 0;
+    for (let i = 0; i < byteCount; i++) {
+        len = len * 256 + stream.getUint8();
+    }
+    // This is a valid ASN.1 length encoding, but we don't support it.
+    if (len === 0) {
+        throw new error_1.ASN1ParseError('indefinite length encoding not supported');
+    }
+    return len;
+}
+// Translates the supplied value to a DER-encoded length.
+function encodeLength(len) {
+    if (len < 128) {
+        return Buffer.from([len]);
+    }
+    // Bitwise operations on large numbers are not supported in JS, so we need to
+    // use BigInts.
+    let val = BigInt(len);
+    const bytes = [];
+    while (val > 0n) {
+        bytes.unshift(Number(val & 255n));
+        val = val >> 8n;
+    }
+    return Buffer.from([0x80 | bytes.length, ...bytes]);
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/obj.js b/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/obj.js
new file mode 100644
index 0000000000000..5f9ac9cdbc493
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/obj.js
@@ -0,0 +1,152 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.ASN1Obj = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const stream_1 = require("../stream");
+const error_1 = require("./error");
+const length_1 = require("./length");
+const parse_1 = require("./parse");
+const tag_1 = require("./tag");
+class ASN1Obj {
+    constructor(tag, value, subs) {
+        this.tag = tag;
+        this.value = value;
+        this.subs = subs;
+    }
+    // Constructs an ASN.1 object from a Buffer of DER-encoded bytes.
+    static parseBuffer(buf) {
+        return parseStream(new stream_1.ByteStream(buf));
+    }
+    toDER() {
+        const valueStream = new stream_1.ByteStream();
+        if (this.subs.length > 0) {
+            for (const sub of this.subs) {
+                valueStream.appendView(sub.toDER());
+            }
+        }
+        else {
+            valueStream.appendView(this.value);
+        }
+        const value = valueStream.buffer;
+        // Concat tag/length/value
+        const obj = new stream_1.ByteStream();
+        obj.appendChar(this.tag.toDER());
+        obj.appendView((0, length_1.encodeLength)(value.length));
+        obj.appendView(value);
+        return obj.buffer;
+    }
+    /////////////////////////////////////////////////////////////////////////////
+    // Convenience methods for parsing ASN.1 primitives into JS types
+    // Returns the ASN.1 object's value as a boolean. Throws an error if the
+    // object is not a boolean.
+    toBoolean() {
+        if (!this.tag.isBoolean()) {
+            throw new error_1.ASN1TypeError('not a boolean');
+        }
+        return (0, parse_1.parseBoolean)(this.value);
+    }
+    // Returns the ASN.1 object's value as a BigInt. Throws an error if the
+    // object is not an integer.
+    toInteger() {
+        if (!this.tag.isInteger()) {
+            throw new error_1.ASN1TypeError('not an integer');
+        }
+        return (0, parse_1.parseInteger)(this.value);
+    }
+    // Returns the ASN.1 object's value as an OID string. Throws an error if the
+    // object is not an OID.
+    toOID() {
+        if (!this.tag.isOID()) {
+            throw new error_1.ASN1TypeError('not an OID');
+        }
+        return (0, parse_1.parseOID)(this.value);
+    }
+    // Returns the ASN.1 object's value as a Date. Throws an error if the object
+    // is not either a UTCTime or a GeneralizedTime.
+    toDate() {
+        switch (true) {
+            case this.tag.isUTCTime():
+                return (0, parse_1.parseTime)(this.value, true);
+            case this.tag.isGeneralizedTime():
+                return (0, parse_1.parseTime)(this.value, false);
+            default:
+                throw new error_1.ASN1TypeError('not a date');
+        }
+    }
+    // Returns the ASN.1 object's value as a number[] where each number is the
+    // value of a bit in the bit string. Throws an error if the object is not a
+    // bit string.
+    toBitString() {
+        if (!this.tag.isBitString()) {
+            throw new error_1.ASN1TypeError('not a bit string');
+        }
+        return (0, parse_1.parseBitString)(this.value);
+    }
+}
+exports.ASN1Obj = ASN1Obj;
+/////////////////////////////////////////////////////////////////////////////
+// Internal stream parsing functions
+function parseStream(stream) {
+    // Parse tag, length, and value from stream
+    const tag = new tag_1.ASN1Tag(stream.getUint8());
+    const len = (0, length_1.decodeLength)(stream);
+    const value = stream.slice(stream.position, len);
+    const start = stream.position;
+    let subs = [];
+    // If the object is constructed, parse its children. Sometimes, children
+    // are embedded in OCTESTRING objects, so we need to check those
+    // for children as well.
+    if (tag.constructed) {
+        subs = collectSubs(stream, len);
+    }
+    else if (tag.isOctetString()) {
+        // Attempt to parse children of OCTETSTRING objects. If anything fails,
+        // assume the object is not constructed and treat as primitive.
+        try {
+            subs = collectSubs(stream, len);
+        }
+        catch (e) {
+            // Fail silently and treat as primitive
+        }
+    }
+    // If there are no children, move stream cursor to the end of the object
+    if (subs.length === 0) {
+        stream.seek(start + len);
+    }
+    return new ASN1Obj(tag, value, subs);
+}
+function collectSubs(stream, len) {
+    // Calculate end of object content
+    const end = stream.position + len;
+    // Make sure there are enough bytes left in the stream. This should never
+    // happen, cause it'll get caught when the stream is sliced in parseStream.
+    // Leaving as an extra check just in case.
+    /* istanbul ignore if */
+    if (end > stream.length) {
+        throw new error_1.ASN1ParseError('invalid length');
+    }
+    // Parse all children
+    const subs = [];
+    while (stream.position < end) {
+        subs.push(parseStream(stream));
+    }
+    // When we're done parsing children, we should be at the end of the object
+    if (stream.position !== end) {
+        throw new error_1.ASN1ParseError('invalid length');
+    }
+    return subs;
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/parse.js b/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/parse.js
new file mode 100644
index 0000000000000..7fbb42632c60e
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/parse.js
@@ -0,0 +1,124 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.parseInteger = parseInteger;
+exports.parseStringASCII = parseStringASCII;
+exports.parseTime = parseTime;
+exports.parseOID = parseOID;
+exports.parseBoolean = parseBoolean;
+exports.parseBitString = parseBitString;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const RE_TIME_SHORT_YEAR = /^(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})(\.\d{3})?Z$/;
+const RE_TIME_LONG_YEAR = /^(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})(\.\d{3})?Z$/;
+// Parse a BigInt from the DER-encoded buffer
+// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-integer
+function parseInteger(buf) {
+    let pos = 0;
+    const end = buf.length;
+    let val = buf[pos];
+    const neg = val > 0x7f;
+    // Consume any padding bytes
+    const pad = neg ? 0xff : 0x00;
+    while (val == pad && ++pos < end) {
+        val = buf[pos];
+    }
+    // Calculate remaining bytes to read
+    const len = end - pos;
+    if (len === 0)
+        return BigInt(neg ? -1 : 0);
+    // Handle two's complement for negative numbers
+    val = neg ? val - 256 : val;
+    // Parse remaining bytes
+    let n = BigInt(val);
+    for (let i = pos + 1; i < end; ++i) {
+        n = n * BigInt(256) + BigInt(buf[i]);
+    }
+    return n;
+}
+// Parse an ASCII string from the DER-encoded buffer
+// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-basic-types#boolean
+function parseStringASCII(buf) {
+    return buf.toString('ascii');
+}
+// Parse a Date from the DER-encoded buffer
+// https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.5.1
+function parseTime(buf, shortYear) {
+    const timeStr = parseStringASCII(buf);
+    // Parse the time string into matches - captured groups start at index 1
+    const m = shortYear
+        ? RE_TIME_SHORT_YEAR.exec(timeStr)
+        : RE_TIME_LONG_YEAR.exec(timeStr);
+    if (!m) {
+        throw new Error('invalid time');
+    }
+    // Translate dates with a 2-digit year to 4 digits per the spec
+    if (shortYear) {
+        let year = Number(m[1]);
+        year += year >= 50 ? 1900 : 2000;
+        m[1] = year.toString();
+    }
+    // Translate to ISO8601 format and parse
+    return new Date(`${m[1]}-${m[2]}-${m[3]}T${m[4]}:${m[5]}:${m[6]}Z`);
+}
+// Parse an OID from the DER-encoded buffer
+// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-object-identifier
+function parseOID(buf) {
+    let pos = 0;
+    const end = buf.length;
+    // Consume first byte which encodes the first two OID components
+    let n = buf[pos++];
+    const first = Math.floor(n / 40);
+    const second = n % 40;
+    let oid = `${first}.${second}`;
+    // Consume remaining bytes
+    let val = 0;
+    for (; pos < end; ++pos) {
+        n = buf[pos];
+        val = (val << 7) + (n & 0x7f);
+        // If the left-most bit is NOT set, then this is the last byte in the
+        // sequence and we can add the value to the OID and reset the accumulator
+        if ((n & 0x80) === 0) {
+            oid += `.${val}`;
+            val = 0;
+        }
+    }
+    return oid;
+}
+// Parse a boolean from the DER-encoded buffer
+// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-basic-types#boolean
+function parseBoolean(buf) {
+    return buf[0] !== 0;
+}
+// Parse a bit string from the DER-encoded buffer
+// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-bit-string
+function parseBitString(buf) {
+    // First byte tell us how many unused bits are in the last byte
+    const unused = buf[0];
+    const start = 1;
+    const end = buf.length;
+    const bits = [];
+    for (let i = start; i < end; ++i) {
+        const byte = buf[i];
+        // The skip value is only used for the last byte
+        const skip = i === end - 1 ? unused : 0;
+        // Iterate over each bit in the byte (most significant first)
+        for (let j = 7; j >= skip; --j) {
+            // Read the bit and add it to the bit string
+            bits.push((byte >> j) & 0x01);
+        }
+    }
+    return bits;
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/tag.js b/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/tag.js
new file mode 100644
index 0000000000000..84dd938d049aa
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/tag.js
@@ -0,0 +1,86 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.ASN1Tag = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const error_1 = require("./error");
+const UNIVERSAL_TAG = {
+    BOOLEAN: 0x01,
+    INTEGER: 0x02,
+    BIT_STRING: 0x03,
+    OCTET_STRING: 0x04,
+    OBJECT_IDENTIFIER: 0x06,
+    SEQUENCE: 0x10,
+    SET: 0x11,
+    PRINTABLE_STRING: 0x13,
+    UTC_TIME: 0x17,
+    GENERALIZED_TIME: 0x18,
+};
+const TAG_CLASS = {
+    UNIVERSAL: 0x00,
+    APPLICATION: 0x01,
+    CONTEXT_SPECIFIC: 0x02,
+    PRIVATE: 0x03,
+};
+// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-encoded-tag-bytes
+class ASN1Tag {
+    constructor(enc) {
+        // Bits 0 through 4 are the tag number
+        this.number = enc & 0x1f;
+        // Bit 5 is the constructed bit
+        this.constructed = (enc & 0x20) === 0x20;
+        // Bit 6 & 7 are the class
+        this.class = enc >> 6;
+        if (this.number === 0x1f) {
+            throw new error_1.ASN1ParseError('long form tags not supported');
+        }
+        if (this.class === TAG_CLASS.UNIVERSAL && this.number === 0x00) {
+            throw new error_1.ASN1ParseError('unsupported tag 0x00');
+        }
+    }
+    isUniversal() {
+        return this.class === TAG_CLASS.UNIVERSAL;
+    }
+    isContextSpecific(num) {
+        const res = this.class === TAG_CLASS.CONTEXT_SPECIFIC;
+        return num !== undefined ? res && this.number === num : res;
+    }
+    isBoolean() {
+        return this.isUniversal() && this.number === UNIVERSAL_TAG.BOOLEAN;
+    }
+    isInteger() {
+        return this.isUniversal() && this.number === UNIVERSAL_TAG.INTEGER;
+    }
+    isBitString() {
+        return this.isUniversal() && this.number === UNIVERSAL_TAG.BIT_STRING;
+    }
+    isOctetString() {
+        return this.isUniversal() && this.number === UNIVERSAL_TAG.OCTET_STRING;
+    }
+    isOID() {
+        return (this.isUniversal() && this.number === UNIVERSAL_TAG.OBJECT_IDENTIFIER);
+    }
+    isUTCTime() {
+        return this.isUniversal() && this.number === UNIVERSAL_TAG.UTC_TIME;
+    }
+    isGeneralizedTime() {
+        return this.isUniversal() && this.number === UNIVERSAL_TAG.GENERALIZED_TIME;
+    }
+    toDER() {
+        return this.number | (this.constructed ? 0x20 : 0x00) | (this.class << 6);
+    }
+}
+exports.ASN1Tag = ASN1Tag;
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/crypto.js b/node_modules/pacote/node_modules/@sigstore/core/dist/crypto.js
new file mode 100644
index 0000000000000..296b5ba43e86a
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/core/dist/crypto.js
@@ -0,0 +1,60 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.createPublicKey = createPublicKey;
+exports.digest = digest;
+exports.verify = verify;
+exports.bufferEqual = bufferEqual;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const crypto_1 = __importDefault(require("crypto"));
+function createPublicKey(key, type = 'spki') {
+    if (typeof key === 'string') {
+        return crypto_1.default.createPublicKey(key);
+    }
+    else {
+        return crypto_1.default.createPublicKey({ key, format: 'der', type: type });
+    }
+}
+function digest(algorithm, ...data) {
+    const hash = crypto_1.default.createHash(algorithm);
+    for (const d of data) {
+        hash.update(d);
+    }
+    return hash.digest();
+}
+function verify(data, key, signature, algorithm) {
+    // The try/catch is to work around an issue in Node 14.x where verify throws
+    // an error in some scenarios if the signature is invalid.
+    try {
+        return crypto_1.default.verify(algorithm, data, key, signature);
+    }
+    catch (e) {
+        /* istanbul ignore next */
+        return false;
+    }
+}
+function bufferEqual(a, b) {
+    try {
+        return crypto_1.default.timingSafeEqual(a, b);
+    }
+    catch {
+        /* istanbul ignore next */
+        return false;
+    }
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/dsse.js b/node_modules/pacote/node_modules/@sigstore/core/dist/dsse.js
new file mode 100644
index 0000000000000..ca7b63630e2ba
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/core/dist/dsse.js
@@ -0,0 +1,30 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.preAuthEncoding = preAuthEncoding;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const PAE_PREFIX = 'DSSEv1';
+// DSSE Pre-Authentication Encoding
+function preAuthEncoding(payloadType, payload) {
+    const prefix = [
+        PAE_PREFIX,
+        payloadType.length,
+        payloadType,
+        payload.length,
+        '',
+    ].join(' ');
+    return Buffer.concat([Buffer.from(prefix, 'ascii'), payload]);
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/encoding.js b/node_modules/pacote/node_modules/@sigstore/core/dist/encoding.js
new file mode 100644
index 0000000000000..7113af66db4c2
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/core/dist/encoding.js
@@ -0,0 +1,27 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.base64Encode = base64Encode;
+exports.base64Decode = base64Decode;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const BASE64_ENCODING = 'base64';
+const UTF8_ENCODING = 'utf-8';
+function base64Encode(str) {
+    return Buffer.from(str, UTF8_ENCODING).toString(BASE64_ENCODING);
+}
+function base64Decode(str) {
+    return Buffer.from(str, BASE64_ENCODING).toString(UTF8_ENCODING);
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/index.js b/node_modules/pacote/node_modules/@sigstore/core/dist/index.js
new file mode 100644
index 0000000000000..49859d84db756
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/core/dist/index.js
@@ -0,0 +1,66 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || (function () {
+    var ownKeys = function(o) {
+        ownKeys = Object.getOwnPropertyNames || function (o) {
+            var ar = [];
+            for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
+            return ar;
+        };
+        return ownKeys(o);
+    };
+    return function (mod) {
+        if (mod && mod.__esModule) return mod;
+        var result = {};
+        if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
+        __setModuleDefault(result, mod);
+        return result;
+    };
+})();
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.X509SCTExtension = exports.X509Certificate = exports.EXTENSION_OID_SCT = exports.ByteStream = exports.RFC3161Timestamp = exports.pem = exports.json = exports.encoding = exports.dsse = exports.crypto = exports.ASN1Obj = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+var asn1_1 = require("./asn1");
+Object.defineProperty(exports, "ASN1Obj", { enumerable: true, get: function () { return asn1_1.ASN1Obj; } });
+exports.crypto = __importStar(require("./crypto"));
+exports.dsse = __importStar(require("./dsse"));
+exports.encoding = __importStar(require("./encoding"));
+exports.json = __importStar(require("./json"));
+exports.pem = __importStar(require("./pem"));
+var rfc3161_1 = require("./rfc3161");
+Object.defineProperty(exports, "RFC3161Timestamp", { enumerable: true, get: function () { return rfc3161_1.RFC3161Timestamp; } });
+var stream_1 = require("./stream");
+Object.defineProperty(exports, "ByteStream", { enumerable: true, get: function () { return stream_1.ByteStream; } });
+var x509_1 = require("./x509");
+Object.defineProperty(exports, "EXTENSION_OID_SCT", { enumerable: true, get: function () { return x509_1.EXTENSION_OID_SCT; } });
+Object.defineProperty(exports, "X509Certificate", { enumerable: true, get: function () { return x509_1.X509Certificate; } });
+Object.defineProperty(exports, "X509SCTExtension", { enumerable: true, get: function () { return x509_1.X509SCTExtension; } });
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/json.js b/node_modules/pacote/node_modules/@sigstore/core/dist/json.js
new file mode 100644
index 0000000000000..7808d033b98cc
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/core/dist/json.js
@@ -0,0 +1,60 @@
+"use strict";
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.canonicalize = canonicalize;
+// JSON canonicalization per https://github.com/cyberphone/json-canonicalization
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+function canonicalize(object) {
+    let buffer = '';
+    if (object === null || typeof object !== 'object' || object.toJSON != null) {
+        // Primitives or toJSONable objects
+        buffer += JSON.stringify(object);
+    }
+    else if (Array.isArray(object)) {
+        // Array - maintain element order
+        buffer += '[';
+        let first = true;
+        object.forEach((element) => {
+            if (!first) {
+                buffer += ',';
+            }
+            first = false;
+            // recursive call
+            buffer += canonicalize(element);
+        });
+        buffer += ']';
+    }
+    else {
+        // Object - Sort properties before serializing
+        buffer += '{';
+        let first = true;
+        Object.keys(object)
+            .sort()
+            .forEach((property) => {
+            if (!first) {
+                buffer += ',';
+            }
+            first = false;
+            buffer += JSON.stringify(property);
+            buffer += ':';
+            // recursive call
+            buffer += canonicalize(object[property]);
+        });
+        buffer += '}';
+    }
+    return buffer;
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/oid.js b/node_modules/pacote/node_modules/@sigstore/core/dist/oid.js
new file mode 100644
index 0000000000000..ac7a643067ad0
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/core/dist/oid.js
@@ -0,0 +1,14 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.SHA2_HASH_ALGOS = exports.ECDSA_SIGNATURE_ALGOS = void 0;
+exports.ECDSA_SIGNATURE_ALGOS = {
+    '1.2.840.10045.4.3.1': 'sha224',
+    '1.2.840.10045.4.3.2': 'sha256',
+    '1.2.840.10045.4.3.3': 'sha384',
+    '1.2.840.10045.4.3.4': 'sha512',
+};
+exports.SHA2_HASH_ALGOS = {
+    '2.16.840.1.101.3.4.2.1': 'sha256',
+    '2.16.840.1.101.3.4.2.2': 'sha384',
+    '2.16.840.1.101.3.4.2.3': 'sha512',
+};
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/pem.js b/node_modules/pacote/node_modules/@sigstore/core/dist/pem.js
new file mode 100644
index 0000000000000..f1241d28d586e
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/core/dist/pem.js
@@ -0,0 +1,43 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.toDER = toDER;
+exports.fromDER = fromDER;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const PEM_HEADER = /-----BEGIN (.*)-----/;
+const PEM_FOOTER = /-----END (.*)-----/;
+function toDER(certificate) {
+    let der = '';
+    certificate.split('\n').forEach((line) => {
+        if (line.match(PEM_HEADER) || line.match(PEM_FOOTER)) {
+            return;
+        }
+        der += line;
+    });
+    return Buffer.from(der, 'base64');
+}
+// Translates a DER-encoded buffer into a PEM-encoded string. Standard PEM
+// encoding dictates that each certificate should have a trailing newline after
+// the footer.
+function fromDER(certificate, type = 'CERTIFICATE') {
+    // Base64-encode the certificate.
+    const der = certificate.toString('base64');
+    // Split the certificate into lines of 64 characters.
+    const lines = der.match(/.{1,64}/g) || '';
+    return [`-----BEGIN ${type}-----`, ...lines, `-----END ${type}-----`]
+        .join('\n')
+        .concat('\n');
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/error.js b/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/error.js
new file mode 100644
index 0000000000000..b9b549b0bb323
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/error.js
@@ -0,0 +1,21 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.RFC3161TimestampVerificationError = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+class RFC3161TimestampVerificationError extends Error {
+}
+exports.RFC3161TimestampVerificationError = RFC3161TimestampVerificationError;
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/index.js b/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/index.js
new file mode 100644
index 0000000000000..b77ecf1c7d50c
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/index.js
@@ -0,0 +1,20 @@
+"use strict";
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.RFC3161Timestamp = void 0;
+var timestamp_1 = require("./timestamp");
+Object.defineProperty(exports, "RFC3161Timestamp", { enumerable: true, get: function () { return timestamp_1.RFC3161Timestamp; } });
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/timestamp.js b/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/timestamp.js
new file mode 100644
index 0000000000000..982fb5e6126e8
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/timestamp.js
@@ -0,0 +1,211 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || (function () {
+    var ownKeys = function(o) {
+        ownKeys = Object.getOwnPropertyNames || function (o) {
+            var ar = [];
+            for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
+            return ar;
+        };
+        return ownKeys(o);
+    };
+    return function (mod) {
+        if (mod && mod.__esModule) return mod;
+        var result = {};
+        if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
+        __setModuleDefault(result, mod);
+        return result;
+    };
+})();
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.RFC3161Timestamp = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const asn1_1 = require("../asn1");
+const crypto = __importStar(require("../crypto"));
+const oid_1 = require("../oid");
+const error_1 = require("./error");
+const tstinfo_1 = require("./tstinfo");
+const OID_PKCS9_CONTENT_TYPE_SIGNED_DATA = '1.2.840.113549.1.7.2';
+const OID_PKCS9_CONTENT_TYPE_TSTINFO = '1.2.840.113549.1.9.16.1.4';
+const OID_PKCS9_MESSAGE_DIGEST_KEY = '1.2.840.113549.1.9.4';
+class RFC3161Timestamp {
+    constructor(asn1) {
+        this.root = asn1;
+    }
+    static parse(der) {
+        const asn1 = asn1_1.ASN1Obj.parseBuffer(der);
+        return new RFC3161Timestamp(asn1);
+    }
+    get status() {
+        return this.pkiStatusInfoObj.subs[0].toInteger();
+    }
+    get contentType() {
+        return this.contentTypeObj.toOID();
+    }
+    get eContentType() {
+        return this.eContentTypeObj.toOID();
+    }
+    get signingTime() {
+        return this.tstInfo.genTime;
+    }
+    get signerIssuer() {
+        return this.signerSidObj.subs[0].value;
+    }
+    get signerSerialNumber() {
+        return this.signerSidObj.subs[1].value;
+    }
+    get signerDigestAlgorithm() {
+        const oid = this.signerDigestAlgorithmObj.subs[0].toOID();
+        return oid_1.SHA2_HASH_ALGOS[oid];
+    }
+    get signatureAlgorithm() {
+        const oid = this.signatureAlgorithmObj.subs[0].toOID();
+        return oid_1.ECDSA_SIGNATURE_ALGOS[oid];
+    }
+    get signatureValue() {
+        return this.signatureValueObj.value;
+    }
+    get tstInfo() {
+        // Need to unpack tstInfo from an OCTET STRING
+        return new tstinfo_1.TSTInfo(this.eContentObj.subs[0].subs[0]);
+    }
+    verify(data, publicKey) {
+        if (!this.timeStampTokenObj) {
+            throw new error_1.RFC3161TimestampVerificationError('timeStampToken is missing');
+        }
+        // Check for expected ContentInfo content type
+        if (this.contentType !== OID_PKCS9_CONTENT_TYPE_SIGNED_DATA) {
+            throw new error_1.RFC3161TimestampVerificationError(`incorrect content type: ${this.contentType}`);
+        }
+        // Check for expected encapsulated content type
+        if (this.eContentType !== OID_PKCS9_CONTENT_TYPE_TSTINFO) {
+            throw new error_1.RFC3161TimestampVerificationError(`incorrect encapsulated content type: ${this.eContentType}`);
+        }
+        // Check that the tstInfo references the correct artifact
+        this.tstInfo.verify(data);
+        // Check that the signed message digest matches the tstInfo
+        this.verifyMessageDigest();
+        // Check that the signature is valid for the signed attributes
+        this.verifySignature(publicKey);
+    }
+    verifyMessageDigest() {
+        // Check that the tstInfo matches the signed data
+        const tstInfoDigest = crypto.digest(this.signerDigestAlgorithm, this.tstInfo.raw);
+        const expectedDigest = this.messageDigestAttributeObj.subs[1].subs[0].value;
+        if (!crypto.bufferEqual(tstInfoDigest, expectedDigest)) {
+            throw new error_1.RFC3161TimestampVerificationError('signed data does not match tstInfo');
+        }
+    }
+    verifySignature(key) {
+        // Encode the signed attributes for verification
+        const signedAttrs = this.signedAttrsObj.toDER();
+        signedAttrs[0] = 0x31; // Change context-specific tag to SET
+        // Check that the signature is valid for the signed attributes
+        const verified = crypto.verify(signedAttrs, key, this.signatureValue, this.signatureAlgorithm);
+        if (!verified) {
+            throw new error_1.RFC3161TimestampVerificationError('signature verification failed');
+        }
+    }
+    // https://www.rfc-editor.org/rfc/rfc3161#section-2.4.2
+    get pkiStatusInfoObj() {
+        // pkiStatusInfo is the first element of the timestamp response sequence
+        return this.root.subs[0];
+    }
+    // https://www.rfc-editor.org/rfc/rfc3161#section-2.4.2
+    get timeStampTokenObj() {
+        // timeStampToken is the first element of the timestamp response sequence
+        return this.root.subs[1];
+    }
+    // https://datatracker.ietf.org/doc/html/rfc5652#section-3
+    get contentTypeObj() {
+        return this.timeStampTokenObj.subs[0];
+    }
+    // https://www.rfc-editor.org/rfc/rfc5652#section-3
+    get signedDataObj() {
+        const obj = this.timeStampTokenObj.subs.find((sub) => sub.tag.isContextSpecific(0x00));
+        return obj.subs[0];
+    }
+    // https://datatracker.ietf.org/doc/html/rfc5652#section-5.1
+    get encapContentInfoObj() {
+        return this.signedDataObj.subs[2];
+    }
+    // https://datatracker.ietf.org/doc/html/rfc5652#section-5.1
+    get signerInfosObj() {
+        // SignerInfos is the last element of the signed data sequence
+        const sd = this.signedDataObj;
+        return sd.subs[sd.subs.length - 1];
+    }
+    // https://www.rfc-editor.org/rfc/rfc5652#section-5.1
+    get signerInfoObj() {
+        // Only supporting one signer
+        return this.signerInfosObj.subs[0];
+    }
+    // https://datatracker.ietf.org/doc/html/rfc5652#section-5.2
+    get eContentTypeObj() {
+        return this.encapContentInfoObj.subs[0];
+    }
+    // https://datatracker.ietf.org/doc/html/rfc5652#section-5.2
+    get eContentObj() {
+        return this.encapContentInfoObj.subs[1];
+    }
+    // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3
+    get signedAttrsObj() {
+        const signedAttrs = this.signerInfoObj.subs.find((sub) => sub.tag.isContextSpecific(0x00));
+        return signedAttrs;
+    }
+    // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3
+    get messageDigestAttributeObj() {
+        const messageDigest = this.signedAttrsObj.subs.find((sub) => sub.subs[0].tag.isOID() &&
+            sub.subs[0].toOID() === OID_PKCS9_MESSAGE_DIGEST_KEY);
+        return messageDigest;
+    }
+    // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3
+    get signerSidObj() {
+        return this.signerInfoObj.subs[1];
+    }
+    // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3
+    get signerDigestAlgorithmObj() {
+        // Signature is the 2nd element of the signerInfoObj object
+        return this.signerInfoObj.subs[2];
+    }
+    // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3
+    get signatureAlgorithmObj() {
+        // Signature is the 4th element of the signerInfoObj object
+        return this.signerInfoObj.subs[4];
+    }
+    // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3
+    get signatureValueObj() {
+        // Signature is the 6th element of the signerInfoObj object
+        return this.signerInfoObj.subs[5];
+    }
+}
+exports.RFC3161Timestamp = RFC3161Timestamp;
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js b/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js
new file mode 100644
index 0000000000000..d5001c42c108f
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js
@@ -0,0 +1,71 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || (function () {
+    var ownKeys = function(o) {
+        ownKeys = Object.getOwnPropertyNames || function (o) {
+            var ar = [];
+            for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
+            return ar;
+        };
+        return ownKeys(o);
+    };
+    return function (mod) {
+        if (mod && mod.__esModule) return mod;
+        var result = {};
+        if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
+        __setModuleDefault(result, mod);
+        return result;
+    };
+})();
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TSTInfo = void 0;
+const crypto = __importStar(require("../crypto"));
+const oid_1 = require("../oid");
+const error_1 = require("./error");
+class TSTInfo {
+    constructor(asn1) {
+        this.root = asn1;
+    }
+    get version() {
+        return this.root.subs[0].toInteger();
+    }
+    get genTime() {
+        return this.root.subs[4].toDate();
+    }
+    get messageImprintHashAlgorithm() {
+        const oid = this.messageImprintObj.subs[0].subs[0].toOID();
+        return oid_1.SHA2_HASH_ALGOS[oid];
+    }
+    get messageImprintHashedMessage() {
+        return this.messageImprintObj.subs[1].value;
+    }
+    get raw() {
+        return this.root.toDER();
+    }
+    verify(data) {
+        const digest = crypto.digest(this.messageImprintHashAlgorithm, data);
+        if (!crypto.bufferEqual(digest, this.messageImprintHashedMessage)) {
+            throw new error_1.RFC3161TimestampVerificationError('message imprint does not match artifact');
+        }
+    }
+    // https://www.rfc-editor.org/rfc/rfc3161#section-2.4.2
+    get messageImprintObj() {
+        return this.root.subs[2];
+    }
+}
+exports.TSTInfo = TSTInfo;
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/stream.js b/node_modules/pacote/node_modules/@sigstore/core/dist/stream.js
new file mode 100644
index 0000000000000..0a24f8582eb23
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/core/dist/stream.js
@@ -0,0 +1,115 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.ByteStream = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+class StreamError extends Error {
+}
+class ByteStream {
+    constructor(buffer) {
+        this.start = 0;
+        if (buffer) {
+            this.buf = buffer;
+            this.view = Buffer.from(buffer);
+        }
+        else {
+            this.buf = new ArrayBuffer(0);
+            this.view = Buffer.from(this.buf);
+        }
+    }
+    get buffer() {
+        return this.view.subarray(0, this.start);
+    }
+    get length() {
+        return this.view.byteLength;
+    }
+    get position() {
+        return this.start;
+    }
+    seek(position) {
+        this.start = position;
+    }
+    // Returns a Buffer containing the specified number of bytes starting at the
+    // given start position.
+    slice(start, len) {
+        const end = start + len;
+        if (end > this.length) {
+            throw new StreamError('request past end of buffer');
+        }
+        return this.view.subarray(start, end);
+    }
+    appendChar(char) {
+        this.ensureCapacity(1);
+        this.view[this.start] = char;
+        this.start += 1;
+    }
+    appendUint16(num) {
+        this.ensureCapacity(2);
+        const value = new Uint16Array([num]);
+        const view = new Uint8Array(value.buffer);
+        this.view[this.start] = view[1];
+        this.view[this.start + 1] = view[0];
+        this.start += 2;
+    }
+    appendUint24(num) {
+        this.ensureCapacity(3);
+        const value = new Uint32Array([num]);
+        const view = new Uint8Array(value.buffer);
+        this.view[this.start] = view[2];
+        this.view[this.start + 1] = view[1];
+        this.view[this.start + 2] = view[0];
+        this.start += 3;
+    }
+    appendView(view) {
+        this.ensureCapacity(view.length);
+        this.view.set(view, this.start);
+        this.start += view.length;
+    }
+    getBlock(size) {
+        if (size <= 0) {
+            return Buffer.alloc(0);
+        }
+        if (this.start + size > this.view.length) {
+            throw new Error('request past end of buffer');
+        }
+        const result = this.view.subarray(this.start, this.start + size);
+        this.start += size;
+        return result;
+    }
+    getUint8() {
+        return this.getBlock(1)[0];
+    }
+    getUint16() {
+        const block = this.getBlock(2);
+        return (block[0] << 8) | block[1];
+    }
+    ensureCapacity(size) {
+        if (this.start + size > this.view.byteLength) {
+            const blockSize = ByteStream.BLOCK_SIZE + (size > ByteStream.BLOCK_SIZE ? size : 0);
+            this.realloc(this.view.byteLength + blockSize);
+        }
+    }
+    realloc(size) {
+        const newArray = new ArrayBuffer(size);
+        const newView = Buffer.from(newArray);
+        // Copy the old buffer into the new one
+        newView.set(this.view);
+        this.buf = newArray;
+        this.view = newView;
+    }
+}
+exports.ByteStream = ByteStream;
+ByteStream.BLOCK_SIZE = 1024;
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/x509/cert.js b/node_modules/pacote/node_modules/@sigstore/core/dist/x509/cert.js
new file mode 100644
index 0000000000000..83aee7d1215a4
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/core/dist/x509/cert.js
@@ -0,0 +1,241 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || (function () {
+    var ownKeys = function(o) {
+        ownKeys = Object.getOwnPropertyNames || function (o) {
+            var ar = [];
+            for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
+            return ar;
+        };
+        return ownKeys(o);
+    };
+    return function (mod) {
+        if (mod && mod.__esModule) return mod;
+        var result = {};
+        if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
+        __setModuleDefault(result, mod);
+        return result;
+    };
+})();
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.X509Certificate = exports.EXTENSION_OID_SCT = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const asn1_1 = require("../asn1");
+const crypto = __importStar(require("../crypto"));
+const oid_1 = require("../oid");
+const pem = __importStar(require("../pem"));
+const ext_1 = require("./ext");
+const EXTENSION_OID_SUBJECT_KEY_ID = '2.5.29.14';
+const EXTENSION_OID_KEY_USAGE = '2.5.29.15';
+const EXTENSION_OID_SUBJECT_ALT_NAME = '2.5.29.17';
+const EXTENSION_OID_BASIC_CONSTRAINTS = '2.5.29.19';
+const EXTENSION_OID_AUTHORITY_KEY_ID = '2.5.29.35';
+exports.EXTENSION_OID_SCT = '1.3.6.1.4.1.11129.2.4.2';
+class X509Certificate {
+    constructor(asn1) {
+        this.root = asn1;
+    }
+    static parse(cert) {
+        const der = typeof cert === 'string' ? pem.toDER(cert) : cert;
+        const asn1 = asn1_1.ASN1Obj.parseBuffer(der);
+        return new X509Certificate(asn1);
+    }
+    get tbsCertificate() {
+        return this.tbsCertificateObj;
+    }
+    get version() {
+        // version number is the first element of the version context specific tag
+        const ver = this.versionObj.subs[0].toInteger();
+        return `v${(ver + BigInt(1)).toString()}`;
+    }
+    get serialNumber() {
+        return this.serialNumberObj.value;
+    }
+    get notBefore() {
+        // notBefore is the first element of the validity sequence
+        return this.validityObj.subs[0].toDate();
+    }
+    get notAfter() {
+        // notAfter is the second element of the validity sequence
+        return this.validityObj.subs[1].toDate();
+    }
+    get issuer() {
+        return this.issuerObj.value;
+    }
+    get subject() {
+        return this.subjectObj.value;
+    }
+    get publicKey() {
+        return this.subjectPublicKeyInfoObj.toDER();
+    }
+    get signatureAlgorithm() {
+        const oid = this.signatureAlgorithmObj.subs[0].toOID();
+        return oid_1.ECDSA_SIGNATURE_ALGOS[oid];
+    }
+    get signatureValue() {
+        // Signature value is a bit string, so we need to skip the first byte
+        return this.signatureValueObj.value.subarray(1);
+    }
+    get subjectAltName() {
+        const ext = this.extSubjectAltName;
+        return ext?.uri || /* istanbul ignore next */ ext?.rfc822Name;
+    }
+    get extensions() {
+        // The extension list is the first (and only) element of the extensions
+        // context specific tag
+        /* istanbul ignore next */
+        const extSeq = this.extensionsObj?.subs[0];
+        /* istanbul ignore next */
+        return extSeq?.subs || [];
+    }
+    get extKeyUsage() {
+        const ext = this.findExtension(EXTENSION_OID_KEY_USAGE);
+        return ext ? new ext_1.X509KeyUsageExtension(ext) : undefined;
+    }
+    get extBasicConstraints() {
+        const ext = this.findExtension(EXTENSION_OID_BASIC_CONSTRAINTS);
+        return ext ? new ext_1.X509BasicConstraintsExtension(ext) : undefined;
+    }
+    get extSubjectAltName() {
+        const ext = this.findExtension(EXTENSION_OID_SUBJECT_ALT_NAME);
+        return ext ? new ext_1.X509SubjectAlternativeNameExtension(ext) : undefined;
+    }
+    get extAuthorityKeyID() {
+        const ext = this.findExtension(EXTENSION_OID_AUTHORITY_KEY_ID);
+        return ext ? new ext_1.X509AuthorityKeyIDExtension(ext) : undefined;
+    }
+    get extSubjectKeyID() {
+        const ext = this.findExtension(EXTENSION_OID_SUBJECT_KEY_ID);
+        return ext
+            ? new ext_1.X509SubjectKeyIDExtension(ext)
+            : /* istanbul ignore next */ undefined;
+    }
+    get extSCT() {
+        const ext = this.findExtension(exports.EXTENSION_OID_SCT);
+        return ext ? new ext_1.X509SCTExtension(ext) : undefined;
+    }
+    get isCA() {
+        const ca = this.extBasicConstraints?.isCA || false;
+        // If the KeyUsage extension is present, keyCertSign must be set
+        /* istanbul ignore else */
+        if (this.extKeyUsage) {
+            return ca && this.extKeyUsage.keyCertSign;
+        }
+        // TODO: test coverage for this case
+        /* istanbul ignore next */
+        return ca;
+    }
+    extension(oid) {
+        const ext = this.findExtension(oid);
+        return ext ? new ext_1.X509Extension(ext) : undefined;
+    }
+    verify(issuerCertificate) {
+        // Use the issuer's public key if provided, otherwise use the subject's
+        const publicKey = issuerCertificate?.publicKey || this.publicKey;
+        const key = crypto.createPublicKey(publicKey);
+        return crypto.verify(this.tbsCertificate.toDER(), key, this.signatureValue, this.signatureAlgorithm);
+    }
+    validForDate(date) {
+        return this.notBefore <= date && date <= this.notAfter;
+    }
+    equals(other) {
+        return this.root.toDER().equals(other.root.toDER());
+    }
+    // Creates a copy of the certificate with a new buffer
+    clone() {
+        const der = this.root.toDER();
+        const clone = Buffer.alloc(der.length);
+        der.copy(clone);
+        return X509Certificate.parse(clone);
+    }
+    findExtension(oid) {
+        // Find the extension with the given OID. The OID will always be the first
+        // element of the extension sequence
+        return this.extensions.find((ext) => ext.subs[0].toOID() === oid);
+    }
+    /////////////////////////////////////////////////////////////////////////////
+    // The following properties use the documented x509 structure to locate the
+    // desired ASN.1 object
+    // https://www.rfc-editor.org/rfc/rfc5280#section-4.1
+    // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.1.1
+    get tbsCertificateObj() {
+        // tbsCertificate is the first element of the certificate sequence
+        return this.root.subs[0];
+    }
+    // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.1.2
+    get signatureAlgorithmObj() {
+        // signatureAlgorithm is the second element of the certificate sequence
+        return this.root.subs[1];
+    }
+    // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.1.3
+    get signatureValueObj() {
+        // signatureValue is the third element of the certificate sequence
+        return this.root.subs[2];
+    }
+    // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.1
+    get versionObj() {
+        // version is the first element of the tbsCertificate sequence
+        return this.tbsCertificateObj.subs[0];
+    }
+    // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.2
+    get serialNumberObj() {
+        // serialNumber is the second element of the tbsCertificate sequence
+        return this.tbsCertificateObj.subs[1];
+    }
+    // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.4
+    get issuerObj() {
+        // issuer is the fourth element of the tbsCertificate sequence
+        return this.tbsCertificateObj.subs[3];
+    }
+    // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.5
+    get validityObj() {
+        // version is the fifth element of the tbsCertificate sequence
+        return this.tbsCertificateObj.subs[4];
+    }
+    // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.6
+    get subjectObj() {
+        // subject is the sixth element of the tbsCertificate sequence
+        return this.tbsCertificateObj.subs[5];
+    }
+    // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.7
+    get subjectPublicKeyInfoObj() {
+        // subjectPublicKeyInfo is the seventh element of the tbsCertificate sequence
+        return this.tbsCertificateObj.subs[6];
+    }
+    // Extensions can't be located by index because their position varies. Instead,
+    // we need to find the extensions context specific tag
+    // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.9
+    get extensionsObj() {
+        return this.tbsCertificateObj.subs.find((sub) => sub.tag.isContextSpecific(0x03));
+    }
+}
+exports.X509Certificate = X509Certificate;
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/x509/ext.js b/node_modules/pacote/node_modules/@sigstore/core/dist/x509/ext.js
new file mode 100644
index 0000000000000..1d481261b0aa6
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/core/dist/x509/ext.js
@@ -0,0 +1,145 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.X509SCTExtension = exports.X509SubjectKeyIDExtension = exports.X509AuthorityKeyIDExtension = exports.X509SubjectAlternativeNameExtension = exports.X509KeyUsageExtension = exports.X509BasicConstraintsExtension = exports.X509Extension = void 0;
+const stream_1 = require("../stream");
+const sct_1 = require("./sct");
+// https://www.rfc-editor.org/rfc/rfc5280#section-4.1
+class X509Extension {
+    constructor(asn1) {
+        this.root = asn1;
+    }
+    get oid() {
+        return this.root.subs[0].toOID();
+    }
+    get critical() {
+        // The critical field is optional and will be the second element of the
+        // extension sequence if present. Default to false if not present.
+        return this.root.subs.length === 3 ? this.root.subs[1].toBoolean() : false;
+    }
+    get value() {
+        return this.extnValueObj.value;
+    }
+    get valueObj() {
+        return this.extnValueObj;
+    }
+    get extnValueObj() {
+        // The extnValue field will be the last element of the extension sequence
+        return this.root.subs[this.root.subs.length - 1];
+    }
+}
+exports.X509Extension = X509Extension;
+// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.9
+class X509BasicConstraintsExtension extends X509Extension {
+    get isCA() {
+        return this.sequence.subs[0]?.toBoolean() ?? false;
+    }
+    get pathLenConstraint() {
+        return this.sequence.subs.length > 1
+            ? this.sequence.subs[1].toInteger()
+            : undefined;
+    }
+    // The extnValue field contains a single sequence wrapping the isCA and
+    // pathLenConstraint.
+    get sequence() {
+        return this.extnValueObj.subs[0];
+    }
+}
+exports.X509BasicConstraintsExtension = X509BasicConstraintsExtension;
+// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.3
+class X509KeyUsageExtension extends X509Extension {
+    get digitalSignature() {
+        return this.bitString[0] === 1;
+    }
+    get keyCertSign() {
+        return this.bitString[5] === 1;
+    }
+    get crlSign() {
+        return this.bitString[6] === 1;
+    }
+    // The extnValue field contains a single bit string which is a bit mask
+    // indicating which key usages are enabled.
+    get bitString() {
+        return this.extnValueObj.subs[0].toBitString();
+    }
+}
+exports.X509KeyUsageExtension = X509KeyUsageExtension;
+// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.6
+class X509SubjectAlternativeNameExtension extends X509Extension {
+    get rfc822Name() {
+        return this.findGeneralName(0x01)?.value.toString('ascii');
+    }
+    get uri() {
+        return this.findGeneralName(0x06)?.value.toString('ascii');
+    }
+    // Retrieve the value of an otherName with the given OID.
+    otherName(oid) {
+        const otherName = this.findGeneralName(0x00);
+        if (otherName === undefined) {
+            return undefined;
+        }
+        // The otherName is a sequence containing an OID and a value.
+        // Need to check that the OID matches the one we're looking for.
+        const otherNameOID = otherName.subs[0].toOID();
+        if (otherNameOID !== oid) {
+            return undefined;
+        }
+        // The otherNameValue is a sequence containing the actual value.
+        const otherNameValue = otherName.subs[1];
+        return otherNameValue.subs[0].value.toString('ascii');
+    }
+    findGeneralName(tag) {
+        return this.generalNames.find((gn) => gn.tag.isContextSpecific(tag));
+    }
+    // The extnValue field contains a sequence of GeneralNames.
+    get generalNames() {
+        return this.extnValueObj.subs[0].subs;
+    }
+}
+exports.X509SubjectAlternativeNameExtension = X509SubjectAlternativeNameExtension;
+// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.1
+class X509AuthorityKeyIDExtension extends X509Extension {
+    get keyIdentifier() {
+        return this.findSequenceMember(0x00)?.value;
+    }
+    findSequenceMember(tag) {
+        return this.sequence.subs.find((el) => el.tag.isContextSpecific(tag));
+    }
+    // The extnValue field contains a single sequence wrapping the keyIdentifier
+    get sequence() {
+        return this.extnValueObj.subs[0];
+    }
+}
+exports.X509AuthorityKeyIDExtension = X509AuthorityKeyIDExtension;
+// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.2
+class X509SubjectKeyIDExtension extends X509Extension {
+    get keyIdentifier() {
+        return this.extnValueObj.subs[0].value;
+    }
+}
+exports.X509SubjectKeyIDExtension = X509SubjectKeyIDExtension;
+// https://www.rfc-editor.org/rfc/rfc6962#section-3.3
+class X509SCTExtension extends X509Extension {
+    constructor(asn1) {
+        super(asn1);
+    }
+    get signedCertificateTimestamps() {
+        const buf = this.extnValueObj.subs[0].value;
+        const stream = new stream_1.ByteStream(buf);
+        // The overall list length is encoded in the first two bytes -- note this
+        // is the length of the list in bytes, NOT the number of SCTs in the list
+        const end = stream.getUint16() + 2;
+        const sctList = [];
+        while (stream.position < end) {
+            // Read the length of the next SCT
+            const sctLength = stream.getUint16();
+            // Slice out the bytes for the next SCT and parse it
+            const sct = stream.getBlock(sctLength);
+            sctList.push(sct_1.SignedCertificateTimestamp.parse(sct));
+        }
+        if (stream.position !== end) {
+            throw new Error('SCT list length does not match actual length');
+        }
+        return sctList;
+    }
+}
+exports.X509SCTExtension = X509SCTExtension;
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/x509/index.js b/node_modules/pacote/node_modules/@sigstore/core/dist/x509/index.js
new file mode 100644
index 0000000000000..cdd77e58f37d5
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/core/dist/x509/index.js
@@ -0,0 +1,23 @@
+"use strict";
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.X509SCTExtension = exports.X509Certificate = exports.EXTENSION_OID_SCT = void 0;
+var cert_1 = require("./cert");
+Object.defineProperty(exports, "EXTENSION_OID_SCT", { enumerable: true, get: function () { return cert_1.EXTENSION_OID_SCT; } });
+Object.defineProperty(exports, "X509Certificate", { enumerable: true, get: function () { return cert_1.X509Certificate; } });
+var ext_1 = require("./ext");
+Object.defineProperty(exports, "X509SCTExtension", { enumerable: true, get: function () { return ext_1.X509SCTExtension; } });
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/x509/sct.js b/node_modules/pacote/node_modules/@sigstore/core/dist/x509/sct.js
new file mode 100644
index 0000000000000..55885e3b30742
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/core/dist/x509/sct.js
@@ -0,0 +1,151 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || (function () {
+    var ownKeys = function(o) {
+        ownKeys = Object.getOwnPropertyNames || function (o) {
+            var ar = [];
+            for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
+            return ar;
+        };
+        return ownKeys(o);
+    };
+    return function (mod) {
+        if (mod && mod.__esModule) return mod;
+        var result = {};
+        if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
+        __setModuleDefault(result, mod);
+        return result;
+    };
+})();
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.SignedCertificateTimestamp = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const crypto = __importStar(require("../crypto"));
+const stream_1 = require("../stream");
+class SignedCertificateTimestamp {
+    constructor(options) {
+        this.version = options.version;
+        this.logID = options.logID;
+        this.timestamp = options.timestamp;
+        this.extensions = options.extensions;
+        this.hashAlgorithm = options.hashAlgorithm;
+        this.signatureAlgorithm = options.signatureAlgorithm;
+        this.signature = options.signature;
+    }
+    get datetime() {
+        return new Date(Number(this.timestamp.readBigInt64BE()));
+    }
+    // Returns the hash algorithm used to generate the SCT's signature.
+    // https://www.rfc-editor.org/rfc/rfc5246#section-7.4.1.4.1
+    get algorithm() {
+        switch (this.hashAlgorithm) {
+            /* istanbul ignore next */
+            case 0:
+                return 'none';
+            /* istanbul ignore next */
+            case 1:
+                return 'md5';
+            /* istanbul ignore next */
+            case 2:
+                return 'sha1';
+            /* istanbul ignore next */
+            case 3:
+                return 'sha224';
+            case 4:
+                return 'sha256';
+            /* istanbul ignore next */
+            case 5:
+                return 'sha384';
+            /* istanbul ignore next */
+            case 6:
+                return 'sha512';
+            /* istanbul ignore next */
+            default:
+                return 'unknown';
+        }
+    }
+    verify(preCert, key) {
+        // Assemble the digitally-signed struct (the data over which the signature
+        // was generated).
+        // https://www.rfc-editor.org/rfc/rfc6962#section-3.2
+        const stream = new stream_1.ByteStream();
+        stream.appendChar(this.version);
+        stream.appendChar(0x00); // SignatureType = certificate_timestamp(0)
+        stream.appendView(this.timestamp);
+        stream.appendUint16(0x01); // LogEntryType = precert_entry(1)
+        stream.appendView(preCert);
+        stream.appendUint16(this.extensions.byteLength);
+        /* istanbul ignore next - extensions are very uncommon */
+        if (this.extensions.byteLength > 0) {
+            stream.appendView(this.extensions);
+        }
+        return crypto.verify(stream.buffer, key, this.signature, this.algorithm);
+    }
+    // Parses a SignedCertificateTimestamp from a buffer. SCTs are encoded using
+    // TLS encoding which means the fields and lengths of most fields are
+    // specified as part of the SCT and TLS specs.
+    // https://www.rfc-editor.org/rfc/rfc6962#section-3.2
+    // https://www.rfc-editor.org/rfc/rfc5246#section-7.4.1.4.1
+    static parse(buf) {
+        const stream = new stream_1.ByteStream(buf);
+        // Version - enum { v1(0), (255) }
+        const version = stream.getUint8();
+        // Log ID  - struct { opaque key_id[32]; }
+        const logID = stream.getBlock(32);
+        // Timestamp - uint64
+        const timestamp = stream.getBlock(8);
+        // Extensions - opaque extensions<0..2^16-1>;
+        const extenstionLength = stream.getUint16();
+        const extensions = stream.getBlock(extenstionLength);
+        // Hash algo - enum { sha256(4), . . . (255) }
+        const hashAlgorithm = stream.getUint8();
+        // Signature algo - enum { anonymous(0), rsa(1), dsa(2), ecdsa(3), (255) }
+        const signatureAlgorithm = stream.getUint8();
+        // Signature  - opaque signature<0..2^16-1>;
+        const sigLength = stream.getUint16();
+        const signature = stream.getBlock(sigLength);
+        // Check that we read the entire buffer
+        if (stream.position !== buf.length) {
+            throw new Error('SCT buffer length mismatch');
+        }
+        return new SignedCertificateTimestamp({
+            version,
+            logID,
+            timestamp,
+            extensions,
+            hashAlgorithm,
+            signatureAlgorithm,
+            signature,
+        });
+    }
+}
+exports.SignedCertificateTimestamp = SignedCertificateTimestamp;
diff --git a/node_modules/pacote/node_modules/@sigstore/core/package.json b/node_modules/pacote/node_modules/@sigstore/core/package.json
new file mode 100644
index 0000000000000..7d2f8d5de3f7a
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/core/package.json
@@ -0,0 +1,31 @@
+{
+  "name": "@sigstore/core",
+  "version": "3.0.0",
+  "description": "Base library for Sigstore",
+  "main": "dist/index.js",
+  "types": "dist/index.d.ts",
+  "scripts": {
+    "clean": "shx rm -rf dist *.tsbuildinfo",
+    "build": "tsc --build",
+    "test": "jest"
+  },
+  "files": [
+    "dist"
+  ],
+  "author": "bdehamer@github.com",
+  "license": "Apache-2.0",
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/sigstore/sigstore-js.git"
+  },
+  "bugs": {
+    "url": "https://github.com/sigstore/sigstore-js/issues"
+  },
+  "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/core#readme",
+  "publishConfig": {
+    "provenance": true
+  },
+  "engines": {
+    "node": "^20.17.0 || >=22.9.0"
+  }
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/protobuf-specs/LICENSE b/node_modules/pacote/node_modules/@sigstore/protobuf-specs/LICENSE
new file mode 100644
index 0000000000000..e9e7c1679a09d
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/protobuf-specs/LICENSE
@@ -0,0 +1,202 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright 2023 The Sigstore Authors
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js b/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
new file mode 100644
index 0000000000000..5c4f37bfaf3fb
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
@@ -0,0 +1,59 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: envelope.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Signature = exports.Envelope = void 0;
+exports.Envelope = {
+    fromJSON(object) {
+        return {
+            payload: isSet(object.payload) ? Buffer.from(bytesFromBase64(object.payload)) : Buffer.alloc(0),
+            payloadType: isSet(object.payloadType) ? globalThis.String(object.payloadType) : "",
+            signatures: globalThis.Array.isArray(object?.signatures)
+                ? object.signatures.map((e) => exports.Signature.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.payload.length !== 0) {
+            obj.payload = base64FromBytes(message.payload);
+        }
+        if (message.payloadType !== "") {
+            obj.payloadType = message.payloadType;
+        }
+        if (message.signatures?.length) {
+            obj.signatures = message.signatures.map((e) => exports.Signature.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.Signature = {
+    fromJSON(object) {
+        return {
+            sig: isSet(object.sig) ? Buffer.from(bytesFromBase64(object.sig)) : Buffer.alloc(0),
+            keyid: isSet(object.keyid) ? globalThis.String(object.keyid) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.sig.length !== 0) {
+            obj.sig = base64FromBytes(message.sig);
+        }
+        if (message.keyid !== "") {
+            obj.keyid = message.keyid;
+        }
+        return obj;
+    },
+};
+function bytesFromBase64(b64) {
+    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+}
+function base64FromBytes(arr) {
+    return globalThis.Buffer.from(arr).toString("base64");
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js b/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
new file mode 100644
index 0000000000000..6138fef5672fc
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
@@ -0,0 +1,174 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: events.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.CloudEventBatch = exports.CloudEvent_CloudEventAttributeValue = exports.CloudEvent_AttributesEntry = exports.CloudEvent = void 0;
+/* eslint-disable */
+const any_1 = require("./google/protobuf/any");
+const timestamp_1 = require("./google/protobuf/timestamp");
+exports.CloudEvent = {
+    fromJSON(object) {
+        return {
+            id: isSet(object.id) ? globalThis.String(object.id) : "",
+            source: isSet(object.source) ? globalThis.String(object.source) : "",
+            specVersion: isSet(object.specVersion) ? globalThis.String(object.specVersion) : "",
+            type: isSet(object.type) ? globalThis.String(object.type) : "",
+            attributes: isObject(object.attributes)
+                ? Object.entries(object.attributes).reduce((acc, [key, value]) => {
+                    acc[key] = exports.CloudEvent_CloudEventAttributeValue.fromJSON(value);
+                    return acc;
+                }, {})
+                : {},
+            data: isSet(object.binaryData)
+                ? { $case: "binaryData", binaryData: Buffer.from(bytesFromBase64(object.binaryData)) }
+                : isSet(object.textData)
+                    ? { $case: "textData", textData: globalThis.String(object.textData) }
+                    : isSet(object.protoData)
+                        ? { $case: "protoData", protoData: any_1.Any.fromJSON(object.protoData) }
+                        : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.id !== "") {
+            obj.id = message.id;
+        }
+        if (message.source !== "") {
+            obj.source = message.source;
+        }
+        if (message.specVersion !== "") {
+            obj.specVersion = message.specVersion;
+        }
+        if (message.type !== "") {
+            obj.type = message.type;
+        }
+        if (message.attributes) {
+            const entries = Object.entries(message.attributes);
+            if (entries.length > 0) {
+                obj.attributes = {};
+                entries.forEach(([k, v]) => {
+                    obj.attributes[k] = exports.CloudEvent_CloudEventAttributeValue.toJSON(v);
+                });
+            }
+        }
+        if (message.data?.$case === "binaryData") {
+            obj.binaryData = base64FromBytes(message.data.binaryData);
+        }
+        else if (message.data?.$case === "textData") {
+            obj.textData = message.data.textData;
+        }
+        else if (message.data?.$case === "protoData") {
+            obj.protoData = any_1.Any.toJSON(message.data.protoData);
+        }
+        return obj;
+    },
+};
+exports.CloudEvent_AttributesEntry = {
+    fromJSON(object) {
+        return {
+            key: isSet(object.key) ? globalThis.String(object.key) : "",
+            value: isSet(object.value) ? exports.CloudEvent_CloudEventAttributeValue.fromJSON(object.value) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.key !== "") {
+            obj.key = message.key;
+        }
+        if (message.value !== undefined) {
+            obj.value = exports.CloudEvent_CloudEventAttributeValue.toJSON(message.value);
+        }
+        return obj;
+    },
+};
+exports.CloudEvent_CloudEventAttributeValue = {
+    fromJSON(object) {
+        return {
+            attr: isSet(object.ceBoolean)
+                ? { $case: "ceBoolean", ceBoolean: globalThis.Boolean(object.ceBoolean) }
+                : isSet(object.ceInteger)
+                    ? { $case: "ceInteger", ceInteger: globalThis.Number(object.ceInteger) }
+                    : isSet(object.ceString)
+                        ? { $case: "ceString", ceString: globalThis.String(object.ceString) }
+                        : isSet(object.ceBytes)
+                            ? { $case: "ceBytes", ceBytes: Buffer.from(bytesFromBase64(object.ceBytes)) }
+                            : isSet(object.ceUri)
+                                ? { $case: "ceUri", ceUri: globalThis.String(object.ceUri) }
+                                : isSet(object.ceUriRef)
+                                    ? { $case: "ceUriRef", ceUriRef: globalThis.String(object.ceUriRef) }
+                                    : isSet(object.ceTimestamp)
+                                        ? { $case: "ceTimestamp", ceTimestamp: fromJsonTimestamp(object.ceTimestamp) }
+                                        : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.attr?.$case === "ceBoolean") {
+            obj.ceBoolean = message.attr.ceBoolean;
+        }
+        else if (message.attr?.$case === "ceInteger") {
+            obj.ceInteger = Math.round(message.attr.ceInteger);
+        }
+        else if (message.attr?.$case === "ceString") {
+            obj.ceString = message.attr.ceString;
+        }
+        else if (message.attr?.$case === "ceBytes") {
+            obj.ceBytes = base64FromBytes(message.attr.ceBytes);
+        }
+        else if (message.attr?.$case === "ceUri") {
+            obj.ceUri = message.attr.ceUri;
+        }
+        else if (message.attr?.$case === "ceUriRef") {
+            obj.ceUriRef = message.attr.ceUriRef;
+        }
+        else if (message.attr?.$case === "ceTimestamp") {
+            obj.ceTimestamp = message.attr.ceTimestamp.toISOString();
+        }
+        return obj;
+    },
+};
+exports.CloudEventBatch = {
+    fromJSON(object) {
+        return {
+            events: globalThis.Array.isArray(object?.events) ? object.events.map((e) => exports.CloudEvent.fromJSON(e)) : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.events?.length) {
+            obj.events = message.events.map((e) => exports.CloudEvent.toJSON(e));
+        }
+        return obj;
+    },
+};
+function bytesFromBase64(b64) {
+    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+}
+function base64FromBytes(arr) {
+    return globalThis.Buffer.from(arr).toString("base64");
+}
+function fromTimestamp(t) {
+    let millis = (globalThis.Number(t.seconds) || 0) * 1_000;
+    millis += (t.nanos || 0) / 1_000_000;
+    return new globalThis.Date(millis);
+}
+function fromJsonTimestamp(o) {
+    if (o instanceof globalThis.Date) {
+        return o;
+    }
+    else if (typeof o === "string") {
+        return new globalThis.Date(o);
+    }
+    else {
+        return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
+    }
+}
+function isObject(value) {
+    return typeof value === "object" && value !== null;
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js b/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
new file mode 100644
index 0000000000000..b4d9ccc781c2f
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
@@ -0,0 +1,141 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: google/api/field_behavior.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.FieldBehavior = void 0;
+exports.fieldBehaviorFromJSON = fieldBehaviorFromJSON;
+exports.fieldBehaviorToJSON = fieldBehaviorToJSON;
+/* eslint-disable */
+/**
+ * An indicator of the behavior of a given field (for example, that a field
+ * is required in requests, or given as output but ignored as input).
+ * This **does not** change the behavior in protocol buffers itself; it only
+ * denotes the behavior and may affect how API tooling handles the field.
+ *
+ * Note: This enum **may** receive new values in the future.
+ */
+var FieldBehavior;
+(function (FieldBehavior) {
+    /** FIELD_BEHAVIOR_UNSPECIFIED - Conventional default for enums. Do not use this. */
+    FieldBehavior[FieldBehavior["FIELD_BEHAVIOR_UNSPECIFIED"] = 0] = "FIELD_BEHAVIOR_UNSPECIFIED";
+    /**
+     * OPTIONAL - Specifically denotes a field as optional.
+     * While all fields in protocol buffers are optional, this may be specified
+     * for emphasis if appropriate.
+     */
+    FieldBehavior[FieldBehavior["OPTIONAL"] = 1] = "OPTIONAL";
+    /**
+     * REQUIRED - Denotes a field as required.
+     * This indicates that the field **must** be provided as part of the request,
+     * and failure to do so will cause an error (usually `INVALID_ARGUMENT`).
+     */
+    FieldBehavior[FieldBehavior["REQUIRED"] = 2] = "REQUIRED";
+    /**
+     * OUTPUT_ONLY - Denotes a field as output only.
+     * This indicates that the field is provided in responses, but including the
+     * field in a request does nothing (the server *must* ignore it and
+     * *must not* throw an error as a result of the field's presence).
+     */
+    FieldBehavior[FieldBehavior["OUTPUT_ONLY"] = 3] = "OUTPUT_ONLY";
+    /**
+     * INPUT_ONLY - Denotes a field as input only.
+     * This indicates that the field is provided in requests, and the
+     * corresponding field is not included in output.
+     */
+    FieldBehavior[FieldBehavior["INPUT_ONLY"] = 4] = "INPUT_ONLY";
+    /**
+     * IMMUTABLE - Denotes a field as immutable.
+     * This indicates that the field may be set once in a request to create a
+     * resource, but may not be changed thereafter.
+     */
+    FieldBehavior[FieldBehavior["IMMUTABLE"] = 5] = "IMMUTABLE";
+    /**
+     * UNORDERED_LIST - Denotes that a (repeated) field is an unordered list.
+     * This indicates that the service may provide the elements of the list
+     * in any arbitrary  order, rather than the order the user originally
+     * provided. Additionally, the list's order may or may not be stable.
+     */
+    FieldBehavior[FieldBehavior["UNORDERED_LIST"] = 6] = "UNORDERED_LIST";
+    /**
+     * NON_EMPTY_DEFAULT - Denotes that this field returns a non-empty default value if not set.
+     * This indicates that if the user provides the empty value in a request,
+     * a non-empty value will be returned. The user will not be aware of what
+     * non-empty value to expect.
+     */
+    FieldBehavior[FieldBehavior["NON_EMPTY_DEFAULT"] = 7] = "NON_EMPTY_DEFAULT";
+    /**
+     * IDENTIFIER - Denotes that the field in a resource (a message annotated with
+     * google.api.resource) is used in the resource name to uniquely identify the
+     * resource. For AIP-compliant APIs, this should only be applied to the
+     * `name` field on the resource.
+     *
+     * This behavior should not be applied to references to other resources within
+     * the message.
+     *
+     * The identifier field of resources often have different field behavior
+     * depending on the request it is embedded in (e.g. for Create methods name
+     * is optional and unused, while for Update methods it is required). Instead
+     * of method-specific annotations, only `IDENTIFIER` is required.
+     */
+    FieldBehavior[FieldBehavior["IDENTIFIER"] = 8] = "IDENTIFIER";
+})(FieldBehavior || (exports.FieldBehavior = FieldBehavior = {}));
+function fieldBehaviorFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "FIELD_BEHAVIOR_UNSPECIFIED":
+            return FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED;
+        case 1:
+        case "OPTIONAL":
+            return FieldBehavior.OPTIONAL;
+        case 2:
+        case "REQUIRED":
+            return FieldBehavior.REQUIRED;
+        case 3:
+        case "OUTPUT_ONLY":
+            return FieldBehavior.OUTPUT_ONLY;
+        case 4:
+        case "INPUT_ONLY":
+            return FieldBehavior.INPUT_ONLY;
+        case 5:
+        case "IMMUTABLE":
+            return FieldBehavior.IMMUTABLE;
+        case 6:
+        case "UNORDERED_LIST":
+            return FieldBehavior.UNORDERED_LIST;
+        case 7:
+        case "NON_EMPTY_DEFAULT":
+            return FieldBehavior.NON_EMPTY_DEFAULT;
+        case 8:
+        case "IDENTIFIER":
+            return FieldBehavior.IDENTIFIER;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
+    }
+}
+function fieldBehaviorToJSON(object) {
+    switch (object) {
+        case FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED:
+            return "FIELD_BEHAVIOR_UNSPECIFIED";
+        case FieldBehavior.OPTIONAL:
+            return "OPTIONAL";
+        case FieldBehavior.REQUIRED:
+            return "REQUIRED";
+        case FieldBehavior.OUTPUT_ONLY:
+            return "OUTPUT_ONLY";
+        case FieldBehavior.INPUT_ONLY:
+            return "INPUT_ONLY";
+        case FieldBehavior.IMMUTABLE:
+            return "IMMUTABLE";
+        case FieldBehavior.UNORDERED_LIST:
+            return "UNORDERED_LIST";
+        case FieldBehavior.NON_EMPTY_DEFAULT:
+            return "NON_EMPTY_DEFAULT";
+        case FieldBehavior.IDENTIFIER:
+            return "IDENTIFIER";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
+    }
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js b/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
new file mode 100644
index 0000000000000..f0c8aab773e4c
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
@@ -0,0 +1,35 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: google/protobuf/any.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Any = void 0;
+exports.Any = {
+    fromJSON(object) {
+        return {
+            typeUrl: isSet(object.typeUrl) ? globalThis.String(object.typeUrl) : "",
+            value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.typeUrl !== "") {
+            obj.typeUrl = message.typeUrl;
+        }
+        if (message.value.length !== 0) {
+            obj.value = base64FromBytes(message.value);
+        }
+        return obj;
+    },
+};
+function bytesFromBase64(b64) {
+    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+}
+function base64FromBytes(arr) {
+    return globalThis.Buffer.from(arr).toString("base64");
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js b/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
new file mode 100644
index 0000000000000..d6f8ddddf799d
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
@@ -0,0 +1,2042 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: google/protobuf/descriptor.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.GeneratedCodeInfo = exports.SourceCodeInfo_Location = exports.SourceCodeInfo = exports.FeatureSetDefaults_FeatureSetEditionDefault = exports.FeatureSetDefaults = exports.FeatureSet = exports.UninterpretedOption_NamePart = exports.UninterpretedOption = exports.MethodOptions = exports.ServiceOptions = exports.EnumValueOptions = exports.EnumOptions = exports.OneofOptions = exports.FieldOptions_FeatureSupport = exports.FieldOptions_EditionDefault = exports.FieldOptions = exports.MessageOptions = exports.FileOptions = exports.MethodDescriptorProto = exports.ServiceDescriptorProto = exports.EnumValueDescriptorProto = exports.EnumDescriptorProto_EnumReservedRange = exports.EnumDescriptorProto = exports.OneofDescriptorProto = exports.FieldDescriptorProto = exports.ExtensionRangeOptions_Declaration = exports.ExtensionRangeOptions = exports.DescriptorProto_ReservedRange = exports.DescriptorProto_ExtensionRange = exports.DescriptorProto = exports.FileDescriptorProto = exports.FileDescriptorSet = exports.GeneratedCodeInfo_Annotation_Semantic = exports.FeatureSet_EnforceNamingStyle = exports.FeatureSet_JsonFormat = exports.FeatureSet_MessageEncoding = exports.FeatureSet_Utf8Validation = exports.FeatureSet_RepeatedFieldEncoding = exports.FeatureSet_EnumType = exports.FeatureSet_FieldPresence = exports.MethodOptions_IdempotencyLevel = exports.FieldOptions_OptionTargetType = exports.FieldOptions_OptionRetention = exports.FieldOptions_JSType = exports.FieldOptions_CType = exports.FileOptions_OptimizeMode = exports.FieldDescriptorProto_Label = exports.FieldDescriptorProto_Type = exports.ExtensionRangeOptions_VerificationState = exports.Edition = void 0;
+exports.GeneratedCodeInfo_Annotation = void 0;
+exports.editionFromJSON = editionFromJSON;
+exports.editionToJSON = editionToJSON;
+exports.extensionRangeOptions_VerificationStateFromJSON = extensionRangeOptions_VerificationStateFromJSON;
+exports.extensionRangeOptions_VerificationStateToJSON = extensionRangeOptions_VerificationStateToJSON;
+exports.fieldDescriptorProto_TypeFromJSON = fieldDescriptorProto_TypeFromJSON;
+exports.fieldDescriptorProto_TypeToJSON = fieldDescriptorProto_TypeToJSON;
+exports.fieldDescriptorProto_LabelFromJSON = fieldDescriptorProto_LabelFromJSON;
+exports.fieldDescriptorProto_LabelToJSON = fieldDescriptorProto_LabelToJSON;
+exports.fileOptions_OptimizeModeFromJSON = fileOptions_OptimizeModeFromJSON;
+exports.fileOptions_OptimizeModeToJSON = fileOptions_OptimizeModeToJSON;
+exports.fieldOptions_CTypeFromJSON = fieldOptions_CTypeFromJSON;
+exports.fieldOptions_CTypeToJSON = fieldOptions_CTypeToJSON;
+exports.fieldOptions_JSTypeFromJSON = fieldOptions_JSTypeFromJSON;
+exports.fieldOptions_JSTypeToJSON = fieldOptions_JSTypeToJSON;
+exports.fieldOptions_OptionRetentionFromJSON = fieldOptions_OptionRetentionFromJSON;
+exports.fieldOptions_OptionRetentionToJSON = fieldOptions_OptionRetentionToJSON;
+exports.fieldOptions_OptionTargetTypeFromJSON = fieldOptions_OptionTargetTypeFromJSON;
+exports.fieldOptions_OptionTargetTypeToJSON = fieldOptions_OptionTargetTypeToJSON;
+exports.methodOptions_IdempotencyLevelFromJSON = methodOptions_IdempotencyLevelFromJSON;
+exports.methodOptions_IdempotencyLevelToJSON = methodOptions_IdempotencyLevelToJSON;
+exports.featureSet_FieldPresenceFromJSON = featureSet_FieldPresenceFromJSON;
+exports.featureSet_FieldPresenceToJSON = featureSet_FieldPresenceToJSON;
+exports.featureSet_EnumTypeFromJSON = featureSet_EnumTypeFromJSON;
+exports.featureSet_EnumTypeToJSON = featureSet_EnumTypeToJSON;
+exports.featureSet_RepeatedFieldEncodingFromJSON = featureSet_RepeatedFieldEncodingFromJSON;
+exports.featureSet_RepeatedFieldEncodingToJSON = featureSet_RepeatedFieldEncodingToJSON;
+exports.featureSet_Utf8ValidationFromJSON = featureSet_Utf8ValidationFromJSON;
+exports.featureSet_Utf8ValidationToJSON = featureSet_Utf8ValidationToJSON;
+exports.featureSet_MessageEncodingFromJSON = featureSet_MessageEncodingFromJSON;
+exports.featureSet_MessageEncodingToJSON = featureSet_MessageEncodingToJSON;
+exports.featureSet_JsonFormatFromJSON = featureSet_JsonFormatFromJSON;
+exports.featureSet_JsonFormatToJSON = featureSet_JsonFormatToJSON;
+exports.featureSet_EnforceNamingStyleFromJSON = featureSet_EnforceNamingStyleFromJSON;
+exports.featureSet_EnforceNamingStyleToJSON = featureSet_EnforceNamingStyleToJSON;
+exports.generatedCodeInfo_Annotation_SemanticFromJSON = generatedCodeInfo_Annotation_SemanticFromJSON;
+exports.generatedCodeInfo_Annotation_SemanticToJSON = generatedCodeInfo_Annotation_SemanticToJSON;
+/* eslint-disable */
+/** The full set of known editions. */
+var Edition;
+(function (Edition) {
+    /** EDITION_UNKNOWN - A placeholder for an unknown edition value. */
+    Edition[Edition["EDITION_UNKNOWN"] = 0] = "EDITION_UNKNOWN";
+    /**
+     * EDITION_LEGACY - A placeholder edition for specifying default behaviors *before* a feature
+     * was first introduced.  This is effectively an "infinite past".
+     */
+    Edition[Edition["EDITION_LEGACY"] = 900] = "EDITION_LEGACY";
+    /**
+     * EDITION_PROTO2 - Legacy syntax "editions".  These pre-date editions, but behave much like
+     * distinct editions.  These can't be used to specify the edition of proto
+     * files, but feature definitions must supply proto2/proto3 defaults for
+     * backwards compatibility.
+     */
+    Edition[Edition["EDITION_PROTO2"] = 998] = "EDITION_PROTO2";
+    Edition[Edition["EDITION_PROTO3"] = 999] = "EDITION_PROTO3";
+    /**
+     * EDITION_2023 - Editions that have been released.  The specific values are arbitrary and
+     * should not be depended on, but they will always be time-ordered for easy
+     * comparison.
+     */
+    Edition[Edition["EDITION_2023"] = 1000] = "EDITION_2023";
+    Edition[Edition["EDITION_2024"] = 1001] = "EDITION_2024";
+    /**
+     * EDITION_1_TEST_ONLY - Placeholder editions for testing feature resolution.  These should not be
+     * used or relied on outside of tests.
+     */
+    Edition[Edition["EDITION_1_TEST_ONLY"] = 1] = "EDITION_1_TEST_ONLY";
+    Edition[Edition["EDITION_2_TEST_ONLY"] = 2] = "EDITION_2_TEST_ONLY";
+    Edition[Edition["EDITION_99997_TEST_ONLY"] = 99997] = "EDITION_99997_TEST_ONLY";
+    Edition[Edition["EDITION_99998_TEST_ONLY"] = 99998] = "EDITION_99998_TEST_ONLY";
+    Edition[Edition["EDITION_99999_TEST_ONLY"] = 99999] = "EDITION_99999_TEST_ONLY";
+    /**
+     * EDITION_MAX - Placeholder for specifying unbounded edition support.  This should only
+     * ever be used by plugins that can expect to never require any changes to
+     * support a new edition.
+     */
+    Edition[Edition["EDITION_MAX"] = 2147483647] = "EDITION_MAX";
+})(Edition || (exports.Edition = Edition = {}));
+function editionFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "EDITION_UNKNOWN":
+            return Edition.EDITION_UNKNOWN;
+        case 900:
+        case "EDITION_LEGACY":
+            return Edition.EDITION_LEGACY;
+        case 998:
+        case "EDITION_PROTO2":
+            return Edition.EDITION_PROTO2;
+        case 999:
+        case "EDITION_PROTO3":
+            return Edition.EDITION_PROTO3;
+        case 1000:
+        case "EDITION_2023":
+            return Edition.EDITION_2023;
+        case 1001:
+        case "EDITION_2024":
+            return Edition.EDITION_2024;
+        case 1:
+        case "EDITION_1_TEST_ONLY":
+            return Edition.EDITION_1_TEST_ONLY;
+        case 2:
+        case "EDITION_2_TEST_ONLY":
+            return Edition.EDITION_2_TEST_ONLY;
+        case 99997:
+        case "EDITION_99997_TEST_ONLY":
+            return Edition.EDITION_99997_TEST_ONLY;
+        case 99998:
+        case "EDITION_99998_TEST_ONLY":
+            return Edition.EDITION_99998_TEST_ONLY;
+        case 99999:
+        case "EDITION_99999_TEST_ONLY":
+            return Edition.EDITION_99999_TEST_ONLY;
+        case 2147483647:
+        case "EDITION_MAX":
+            return Edition.EDITION_MAX;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum Edition");
+    }
+}
+function editionToJSON(object) {
+    switch (object) {
+        case Edition.EDITION_UNKNOWN:
+            return "EDITION_UNKNOWN";
+        case Edition.EDITION_LEGACY:
+            return "EDITION_LEGACY";
+        case Edition.EDITION_PROTO2:
+            return "EDITION_PROTO2";
+        case Edition.EDITION_PROTO3:
+            return "EDITION_PROTO3";
+        case Edition.EDITION_2023:
+            return "EDITION_2023";
+        case Edition.EDITION_2024:
+            return "EDITION_2024";
+        case Edition.EDITION_1_TEST_ONLY:
+            return "EDITION_1_TEST_ONLY";
+        case Edition.EDITION_2_TEST_ONLY:
+            return "EDITION_2_TEST_ONLY";
+        case Edition.EDITION_99997_TEST_ONLY:
+            return "EDITION_99997_TEST_ONLY";
+        case Edition.EDITION_99998_TEST_ONLY:
+            return "EDITION_99998_TEST_ONLY";
+        case Edition.EDITION_99999_TEST_ONLY:
+            return "EDITION_99999_TEST_ONLY";
+        case Edition.EDITION_MAX:
+            return "EDITION_MAX";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum Edition");
+    }
+}
+/** The verification state of the extension range. */
+var ExtensionRangeOptions_VerificationState;
+(function (ExtensionRangeOptions_VerificationState) {
+    /** DECLARATION - All the extensions of the range must be declared. */
+    ExtensionRangeOptions_VerificationState[ExtensionRangeOptions_VerificationState["DECLARATION"] = 0] = "DECLARATION";
+    ExtensionRangeOptions_VerificationState[ExtensionRangeOptions_VerificationState["UNVERIFIED"] = 1] = "UNVERIFIED";
+})(ExtensionRangeOptions_VerificationState || (exports.ExtensionRangeOptions_VerificationState = ExtensionRangeOptions_VerificationState = {}));
+function extensionRangeOptions_VerificationStateFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "DECLARATION":
+            return ExtensionRangeOptions_VerificationState.DECLARATION;
+        case 1:
+        case "UNVERIFIED":
+            return ExtensionRangeOptions_VerificationState.UNVERIFIED;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum ExtensionRangeOptions_VerificationState");
+    }
+}
+function extensionRangeOptions_VerificationStateToJSON(object) {
+    switch (object) {
+        case ExtensionRangeOptions_VerificationState.DECLARATION:
+            return "DECLARATION";
+        case ExtensionRangeOptions_VerificationState.UNVERIFIED:
+            return "UNVERIFIED";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum ExtensionRangeOptions_VerificationState");
+    }
+}
+var FieldDescriptorProto_Type;
+(function (FieldDescriptorProto_Type) {
+    /**
+     * TYPE_DOUBLE - 0 is reserved for errors.
+     * Order is weird for historical reasons.
+     */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_DOUBLE"] = 1] = "TYPE_DOUBLE";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FLOAT"] = 2] = "TYPE_FLOAT";
+    /**
+     * TYPE_INT64 - Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT64 if
+     * negative values are likely.
+     */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT64"] = 3] = "TYPE_INT64";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT64"] = 4] = "TYPE_UINT64";
+    /**
+     * TYPE_INT32 - Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT32 if
+     * negative values are likely.
+     */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT32"] = 5] = "TYPE_INT32";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED64"] = 6] = "TYPE_FIXED64";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED32"] = 7] = "TYPE_FIXED32";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BOOL"] = 8] = "TYPE_BOOL";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_STRING"] = 9] = "TYPE_STRING";
+    /**
+     * TYPE_GROUP - Tag-delimited aggregate.
+     * Group type is deprecated and not supported after google.protobuf. However, Proto3
+     * implementations should still be able to parse the group wire format and
+     * treat group fields as unknown fields.  In Editions, the group wire format
+     * can be enabled via the `message_encoding` feature.
+     */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_GROUP"] = 10] = "TYPE_GROUP";
+    /** TYPE_MESSAGE - Length-delimited aggregate. */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_MESSAGE"] = 11] = "TYPE_MESSAGE";
+    /** TYPE_BYTES - New in version 2. */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BYTES"] = 12] = "TYPE_BYTES";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT32"] = 13] = "TYPE_UINT32";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_ENUM"] = 14] = "TYPE_ENUM";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED32"] = 15] = "TYPE_SFIXED32";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED64"] = 16] = "TYPE_SFIXED64";
+    /** TYPE_SINT32 - Uses ZigZag encoding. */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT32"] = 17] = "TYPE_SINT32";
+    /** TYPE_SINT64 - Uses ZigZag encoding. */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT64"] = 18] = "TYPE_SINT64";
+})(FieldDescriptorProto_Type || (exports.FieldDescriptorProto_Type = FieldDescriptorProto_Type = {}));
+function fieldDescriptorProto_TypeFromJSON(object) {
+    switch (object) {
+        case 1:
+        case "TYPE_DOUBLE":
+            return FieldDescriptorProto_Type.TYPE_DOUBLE;
+        case 2:
+        case "TYPE_FLOAT":
+            return FieldDescriptorProto_Type.TYPE_FLOAT;
+        case 3:
+        case "TYPE_INT64":
+            return FieldDescriptorProto_Type.TYPE_INT64;
+        case 4:
+        case "TYPE_UINT64":
+            return FieldDescriptorProto_Type.TYPE_UINT64;
+        case 5:
+        case "TYPE_INT32":
+            return FieldDescriptorProto_Type.TYPE_INT32;
+        case 6:
+        case "TYPE_FIXED64":
+            return FieldDescriptorProto_Type.TYPE_FIXED64;
+        case 7:
+        case "TYPE_FIXED32":
+            return FieldDescriptorProto_Type.TYPE_FIXED32;
+        case 8:
+        case "TYPE_BOOL":
+            return FieldDescriptorProto_Type.TYPE_BOOL;
+        case 9:
+        case "TYPE_STRING":
+            return FieldDescriptorProto_Type.TYPE_STRING;
+        case 10:
+        case "TYPE_GROUP":
+            return FieldDescriptorProto_Type.TYPE_GROUP;
+        case 11:
+        case "TYPE_MESSAGE":
+            return FieldDescriptorProto_Type.TYPE_MESSAGE;
+        case 12:
+        case "TYPE_BYTES":
+            return FieldDescriptorProto_Type.TYPE_BYTES;
+        case 13:
+        case "TYPE_UINT32":
+            return FieldDescriptorProto_Type.TYPE_UINT32;
+        case 14:
+        case "TYPE_ENUM":
+            return FieldDescriptorProto_Type.TYPE_ENUM;
+        case 15:
+        case "TYPE_SFIXED32":
+            return FieldDescriptorProto_Type.TYPE_SFIXED32;
+        case 16:
+        case "TYPE_SFIXED64":
+            return FieldDescriptorProto_Type.TYPE_SFIXED64;
+        case 17:
+        case "TYPE_SINT32":
+            return FieldDescriptorProto_Type.TYPE_SINT32;
+        case 18:
+        case "TYPE_SINT64":
+            return FieldDescriptorProto_Type.TYPE_SINT64;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
+    }
+}
+function fieldDescriptorProto_TypeToJSON(object) {
+    switch (object) {
+        case FieldDescriptorProto_Type.TYPE_DOUBLE:
+            return "TYPE_DOUBLE";
+        case FieldDescriptorProto_Type.TYPE_FLOAT:
+            return "TYPE_FLOAT";
+        case FieldDescriptorProto_Type.TYPE_INT64:
+            return "TYPE_INT64";
+        case FieldDescriptorProto_Type.TYPE_UINT64:
+            return "TYPE_UINT64";
+        case FieldDescriptorProto_Type.TYPE_INT32:
+            return "TYPE_INT32";
+        case FieldDescriptorProto_Type.TYPE_FIXED64:
+            return "TYPE_FIXED64";
+        case FieldDescriptorProto_Type.TYPE_FIXED32:
+            return "TYPE_FIXED32";
+        case FieldDescriptorProto_Type.TYPE_BOOL:
+            return "TYPE_BOOL";
+        case FieldDescriptorProto_Type.TYPE_STRING:
+            return "TYPE_STRING";
+        case FieldDescriptorProto_Type.TYPE_GROUP:
+            return "TYPE_GROUP";
+        case FieldDescriptorProto_Type.TYPE_MESSAGE:
+            return "TYPE_MESSAGE";
+        case FieldDescriptorProto_Type.TYPE_BYTES:
+            return "TYPE_BYTES";
+        case FieldDescriptorProto_Type.TYPE_UINT32:
+            return "TYPE_UINT32";
+        case FieldDescriptorProto_Type.TYPE_ENUM:
+            return "TYPE_ENUM";
+        case FieldDescriptorProto_Type.TYPE_SFIXED32:
+            return "TYPE_SFIXED32";
+        case FieldDescriptorProto_Type.TYPE_SFIXED64:
+            return "TYPE_SFIXED64";
+        case FieldDescriptorProto_Type.TYPE_SINT32:
+            return "TYPE_SINT32";
+        case FieldDescriptorProto_Type.TYPE_SINT64:
+            return "TYPE_SINT64";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
+    }
+}
+var FieldDescriptorProto_Label;
+(function (FieldDescriptorProto_Label) {
+    /** LABEL_OPTIONAL - 0 is reserved for errors */
+    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_OPTIONAL"] = 1] = "LABEL_OPTIONAL";
+    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REPEATED"] = 3] = "LABEL_REPEATED";
+    /**
+     * LABEL_REQUIRED - The required label is only allowed in google.protobuf.  In proto3 and Editions
+     * it's explicitly prohibited.  In Editions, the `field_presence` feature
+     * can be used to get this behavior.
+     */
+    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REQUIRED"] = 2] = "LABEL_REQUIRED";
+})(FieldDescriptorProto_Label || (exports.FieldDescriptorProto_Label = FieldDescriptorProto_Label = {}));
+function fieldDescriptorProto_LabelFromJSON(object) {
+    switch (object) {
+        case 1:
+        case "LABEL_OPTIONAL":
+            return FieldDescriptorProto_Label.LABEL_OPTIONAL;
+        case 3:
+        case "LABEL_REPEATED":
+            return FieldDescriptorProto_Label.LABEL_REPEATED;
+        case 2:
+        case "LABEL_REQUIRED":
+            return FieldDescriptorProto_Label.LABEL_REQUIRED;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
+    }
+}
+function fieldDescriptorProto_LabelToJSON(object) {
+    switch (object) {
+        case FieldDescriptorProto_Label.LABEL_OPTIONAL:
+            return "LABEL_OPTIONAL";
+        case FieldDescriptorProto_Label.LABEL_REPEATED:
+            return "LABEL_REPEATED";
+        case FieldDescriptorProto_Label.LABEL_REQUIRED:
+            return "LABEL_REQUIRED";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
+    }
+}
+/** Generated classes can be optimized for speed or code size. */
+var FileOptions_OptimizeMode;
+(function (FileOptions_OptimizeMode) {
+    /** SPEED - Generate complete code for parsing, serialization, */
+    FileOptions_OptimizeMode[FileOptions_OptimizeMode["SPEED"] = 1] = "SPEED";
+    /** CODE_SIZE - etc. */
+    FileOptions_OptimizeMode[FileOptions_OptimizeMode["CODE_SIZE"] = 2] = "CODE_SIZE";
+    /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */
+    FileOptions_OptimizeMode[FileOptions_OptimizeMode["LITE_RUNTIME"] = 3] = "LITE_RUNTIME";
+})(FileOptions_OptimizeMode || (exports.FileOptions_OptimizeMode = FileOptions_OptimizeMode = {}));
+function fileOptions_OptimizeModeFromJSON(object) {
+    switch (object) {
+        case 1:
+        case "SPEED":
+            return FileOptions_OptimizeMode.SPEED;
+        case 2:
+        case "CODE_SIZE":
+            return FileOptions_OptimizeMode.CODE_SIZE;
+        case 3:
+        case "LITE_RUNTIME":
+            return FileOptions_OptimizeMode.LITE_RUNTIME;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
+    }
+}
+function fileOptions_OptimizeModeToJSON(object) {
+    switch (object) {
+        case FileOptions_OptimizeMode.SPEED:
+            return "SPEED";
+        case FileOptions_OptimizeMode.CODE_SIZE:
+            return "CODE_SIZE";
+        case FileOptions_OptimizeMode.LITE_RUNTIME:
+            return "LITE_RUNTIME";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
+    }
+}
+var FieldOptions_CType;
+(function (FieldOptions_CType) {
+    /** STRING - Default mode. */
+    FieldOptions_CType[FieldOptions_CType["STRING"] = 0] = "STRING";
+    /**
+     * CORD - The option [ctype=CORD] may be applied to a non-repeated field of type
+     * "bytes". It indicates that in C++, the data should be stored in a Cord
+     * instead of a string.  For very large strings, this may reduce memory
+     * fragmentation. It may also allow better performance when parsing from a
+     * Cord, or when parsing with aliasing enabled, as the parsed Cord may then
+     * alias the original buffer.
+     */
+    FieldOptions_CType[FieldOptions_CType["CORD"] = 1] = "CORD";
+    FieldOptions_CType[FieldOptions_CType["STRING_PIECE"] = 2] = "STRING_PIECE";
+})(FieldOptions_CType || (exports.FieldOptions_CType = FieldOptions_CType = {}));
+function fieldOptions_CTypeFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "STRING":
+            return FieldOptions_CType.STRING;
+        case 1:
+        case "CORD":
+            return FieldOptions_CType.CORD;
+        case 2:
+        case "STRING_PIECE":
+            return FieldOptions_CType.STRING_PIECE;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
+    }
+}
+function fieldOptions_CTypeToJSON(object) {
+    switch (object) {
+        case FieldOptions_CType.STRING:
+            return "STRING";
+        case FieldOptions_CType.CORD:
+            return "CORD";
+        case FieldOptions_CType.STRING_PIECE:
+            return "STRING_PIECE";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
+    }
+}
+var FieldOptions_JSType;
+(function (FieldOptions_JSType) {
+    /** JS_NORMAL - Use the default type. */
+    FieldOptions_JSType[FieldOptions_JSType["JS_NORMAL"] = 0] = "JS_NORMAL";
+    /** JS_STRING - Use JavaScript strings. */
+    FieldOptions_JSType[FieldOptions_JSType["JS_STRING"] = 1] = "JS_STRING";
+    /** JS_NUMBER - Use JavaScript numbers. */
+    FieldOptions_JSType[FieldOptions_JSType["JS_NUMBER"] = 2] = "JS_NUMBER";
+})(FieldOptions_JSType || (exports.FieldOptions_JSType = FieldOptions_JSType = {}));
+function fieldOptions_JSTypeFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "JS_NORMAL":
+            return FieldOptions_JSType.JS_NORMAL;
+        case 1:
+        case "JS_STRING":
+            return FieldOptions_JSType.JS_STRING;
+        case 2:
+        case "JS_NUMBER":
+            return FieldOptions_JSType.JS_NUMBER;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
+    }
+}
+function fieldOptions_JSTypeToJSON(object) {
+    switch (object) {
+        case FieldOptions_JSType.JS_NORMAL:
+            return "JS_NORMAL";
+        case FieldOptions_JSType.JS_STRING:
+            return "JS_STRING";
+        case FieldOptions_JSType.JS_NUMBER:
+            return "JS_NUMBER";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
+    }
+}
+/** If set to RETENTION_SOURCE, the option will be omitted from the binary. */
+var FieldOptions_OptionRetention;
+(function (FieldOptions_OptionRetention) {
+    FieldOptions_OptionRetention[FieldOptions_OptionRetention["RETENTION_UNKNOWN"] = 0] = "RETENTION_UNKNOWN";
+    FieldOptions_OptionRetention[FieldOptions_OptionRetention["RETENTION_RUNTIME"] = 1] = "RETENTION_RUNTIME";
+    FieldOptions_OptionRetention[FieldOptions_OptionRetention["RETENTION_SOURCE"] = 2] = "RETENTION_SOURCE";
+})(FieldOptions_OptionRetention || (exports.FieldOptions_OptionRetention = FieldOptions_OptionRetention = {}));
+function fieldOptions_OptionRetentionFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "RETENTION_UNKNOWN":
+            return FieldOptions_OptionRetention.RETENTION_UNKNOWN;
+        case 1:
+        case "RETENTION_RUNTIME":
+            return FieldOptions_OptionRetention.RETENTION_RUNTIME;
+        case 2:
+        case "RETENTION_SOURCE":
+            return FieldOptions_OptionRetention.RETENTION_SOURCE;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionRetention");
+    }
+}
+function fieldOptions_OptionRetentionToJSON(object) {
+    switch (object) {
+        case FieldOptions_OptionRetention.RETENTION_UNKNOWN:
+            return "RETENTION_UNKNOWN";
+        case FieldOptions_OptionRetention.RETENTION_RUNTIME:
+            return "RETENTION_RUNTIME";
+        case FieldOptions_OptionRetention.RETENTION_SOURCE:
+            return "RETENTION_SOURCE";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionRetention");
+    }
+}
+/**
+ * This indicates the types of entities that the field may apply to when used
+ * as an option. If it is unset, then the field may be freely used as an
+ * option on any kind of entity.
+ */
+var FieldOptions_OptionTargetType;
+(function (FieldOptions_OptionTargetType) {
+    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_UNKNOWN"] = 0] = "TARGET_TYPE_UNKNOWN";
+    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_FILE"] = 1] = "TARGET_TYPE_FILE";
+    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_EXTENSION_RANGE"] = 2] = "TARGET_TYPE_EXTENSION_RANGE";
+    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_MESSAGE"] = 3] = "TARGET_TYPE_MESSAGE";
+    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_FIELD"] = 4] = "TARGET_TYPE_FIELD";
+    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_ONEOF"] = 5] = "TARGET_TYPE_ONEOF";
+    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_ENUM"] = 6] = "TARGET_TYPE_ENUM";
+    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_ENUM_ENTRY"] = 7] = "TARGET_TYPE_ENUM_ENTRY";
+    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_SERVICE"] = 8] = "TARGET_TYPE_SERVICE";
+    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_METHOD"] = 9] = "TARGET_TYPE_METHOD";
+})(FieldOptions_OptionTargetType || (exports.FieldOptions_OptionTargetType = FieldOptions_OptionTargetType = {}));
+function fieldOptions_OptionTargetTypeFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "TARGET_TYPE_UNKNOWN":
+            return FieldOptions_OptionTargetType.TARGET_TYPE_UNKNOWN;
+        case 1:
+        case "TARGET_TYPE_FILE":
+            return FieldOptions_OptionTargetType.TARGET_TYPE_FILE;
+        case 2:
+        case "TARGET_TYPE_EXTENSION_RANGE":
+            return FieldOptions_OptionTargetType.TARGET_TYPE_EXTENSION_RANGE;
+        case 3:
+        case "TARGET_TYPE_MESSAGE":
+            return FieldOptions_OptionTargetType.TARGET_TYPE_MESSAGE;
+        case 4:
+        case "TARGET_TYPE_FIELD":
+            return FieldOptions_OptionTargetType.TARGET_TYPE_FIELD;
+        case 5:
+        case "TARGET_TYPE_ONEOF":
+            return FieldOptions_OptionTargetType.TARGET_TYPE_ONEOF;
+        case 6:
+        case "TARGET_TYPE_ENUM":
+            return FieldOptions_OptionTargetType.TARGET_TYPE_ENUM;
+        case 7:
+        case "TARGET_TYPE_ENUM_ENTRY":
+            return FieldOptions_OptionTargetType.TARGET_TYPE_ENUM_ENTRY;
+        case 8:
+        case "TARGET_TYPE_SERVICE":
+            return FieldOptions_OptionTargetType.TARGET_TYPE_SERVICE;
+        case 9:
+        case "TARGET_TYPE_METHOD":
+            return FieldOptions_OptionTargetType.TARGET_TYPE_METHOD;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionTargetType");
+    }
+}
+function fieldOptions_OptionTargetTypeToJSON(object) {
+    switch (object) {
+        case FieldOptions_OptionTargetType.TARGET_TYPE_UNKNOWN:
+            return "TARGET_TYPE_UNKNOWN";
+        case FieldOptions_OptionTargetType.TARGET_TYPE_FILE:
+            return "TARGET_TYPE_FILE";
+        case FieldOptions_OptionTargetType.TARGET_TYPE_EXTENSION_RANGE:
+            return "TARGET_TYPE_EXTENSION_RANGE";
+        case FieldOptions_OptionTargetType.TARGET_TYPE_MESSAGE:
+            return "TARGET_TYPE_MESSAGE";
+        case FieldOptions_OptionTargetType.TARGET_TYPE_FIELD:
+            return "TARGET_TYPE_FIELD";
+        case FieldOptions_OptionTargetType.TARGET_TYPE_ONEOF:
+            return "TARGET_TYPE_ONEOF";
+        case FieldOptions_OptionTargetType.TARGET_TYPE_ENUM:
+            return "TARGET_TYPE_ENUM";
+        case FieldOptions_OptionTargetType.TARGET_TYPE_ENUM_ENTRY:
+            return "TARGET_TYPE_ENUM_ENTRY";
+        case FieldOptions_OptionTargetType.TARGET_TYPE_SERVICE:
+            return "TARGET_TYPE_SERVICE";
+        case FieldOptions_OptionTargetType.TARGET_TYPE_METHOD:
+            return "TARGET_TYPE_METHOD";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionTargetType");
+    }
+}
+/**
+ * Is this method side-effect-free (or safe in HTTP parlance), or idempotent,
+ * or neither? HTTP based RPC implementation may choose GET verb for safe
+ * methods, and PUT verb for idempotent methods instead of the default POST.
+ */
+var MethodOptions_IdempotencyLevel;
+(function (MethodOptions_IdempotencyLevel) {
+    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENCY_UNKNOWN"] = 0] = "IDEMPOTENCY_UNKNOWN";
+    /** NO_SIDE_EFFECTS - implies idempotent */
+    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["NO_SIDE_EFFECTS"] = 1] = "NO_SIDE_EFFECTS";
+    /** IDEMPOTENT - idempotent, but may have side effects */
+    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENT"] = 2] = "IDEMPOTENT";
+})(MethodOptions_IdempotencyLevel || (exports.MethodOptions_IdempotencyLevel = MethodOptions_IdempotencyLevel = {}));
+function methodOptions_IdempotencyLevelFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "IDEMPOTENCY_UNKNOWN":
+            return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN;
+        case 1:
+        case "NO_SIDE_EFFECTS":
+            return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS;
+        case 2:
+        case "IDEMPOTENT":
+            return MethodOptions_IdempotencyLevel.IDEMPOTENT;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
+    }
+}
+function methodOptions_IdempotencyLevelToJSON(object) {
+    switch (object) {
+        case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN:
+            return "IDEMPOTENCY_UNKNOWN";
+        case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS:
+            return "NO_SIDE_EFFECTS";
+        case MethodOptions_IdempotencyLevel.IDEMPOTENT:
+            return "IDEMPOTENT";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
+    }
+}
+var FeatureSet_FieldPresence;
+(function (FeatureSet_FieldPresence) {
+    FeatureSet_FieldPresence[FeatureSet_FieldPresence["FIELD_PRESENCE_UNKNOWN"] = 0] = "FIELD_PRESENCE_UNKNOWN";
+    FeatureSet_FieldPresence[FeatureSet_FieldPresence["EXPLICIT"] = 1] = "EXPLICIT";
+    FeatureSet_FieldPresence[FeatureSet_FieldPresence["IMPLICIT"] = 2] = "IMPLICIT";
+    FeatureSet_FieldPresence[FeatureSet_FieldPresence["LEGACY_REQUIRED"] = 3] = "LEGACY_REQUIRED";
+})(FeatureSet_FieldPresence || (exports.FeatureSet_FieldPresence = FeatureSet_FieldPresence = {}));
+function featureSet_FieldPresenceFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "FIELD_PRESENCE_UNKNOWN":
+            return FeatureSet_FieldPresence.FIELD_PRESENCE_UNKNOWN;
+        case 1:
+        case "EXPLICIT":
+            return FeatureSet_FieldPresence.EXPLICIT;
+        case 2:
+        case "IMPLICIT":
+            return FeatureSet_FieldPresence.IMPLICIT;
+        case 3:
+        case "LEGACY_REQUIRED":
+            return FeatureSet_FieldPresence.LEGACY_REQUIRED;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_FieldPresence");
+    }
+}
+function featureSet_FieldPresenceToJSON(object) {
+    switch (object) {
+        case FeatureSet_FieldPresence.FIELD_PRESENCE_UNKNOWN:
+            return "FIELD_PRESENCE_UNKNOWN";
+        case FeatureSet_FieldPresence.EXPLICIT:
+            return "EXPLICIT";
+        case FeatureSet_FieldPresence.IMPLICIT:
+            return "IMPLICIT";
+        case FeatureSet_FieldPresence.LEGACY_REQUIRED:
+            return "LEGACY_REQUIRED";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_FieldPresence");
+    }
+}
+var FeatureSet_EnumType;
+(function (FeatureSet_EnumType) {
+    FeatureSet_EnumType[FeatureSet_EnumType["ENUM_TYPE_UNKNOWN"] = 0] = "ENUM_TYPE_UNKNOWN";
+    FeatureSet_EnumType[FeatureSet_EnumType["OPEN"] = 1] = "OPEN";
+    FeatureSet_EnumType[FeatureSet_EnumType["CLOSED"] = 2] = "CLOSED";
+})(FeatureSet_EnumType || (exports.FeatureSet_EnumType = FeatureSet_EnumType = {}));
+function featureSet_EnumTypeFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "ENUM_TYPE_UNKNOWN":
+            return FeatureSet_EnumType.ENUM_TYPE_UNKNOWN;
+        case 1:
+        case "OPEN":
+            return FeatureSet_EnumType.OPEN;
+        case 2:
+        case "CLOSED":
+            return FeatureSet_EnumType.CLOSED;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnumType");
+    }
+}
+function featureSet_EnumTypeToJSON(object) {
+    switch (object) {
+        case FeatureSet_EnumType.ENUM_TYPE_UNKNOWN:
+            return "ENUM_TYPE_UNKNOWN";
+        case FeatureSet_EnumType.OPEN:
+            return "OPEN";
+        case FeatureSet_EnumType.CLOSED:
+            return "CLOSED";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnumType");
+    }
+}
+var FeatureSet_RepeatedFieldEncoding;
+(function (FeatureSet_RepeatedFieldEncoding) {
+    FeatureSet_RepeatedFieldEncoding[FeatureSet_RepeatedFieldEncoding["REPEATED_FIELD_ENCODING_UNKNOWN"] = 0] = "REPEATED_FIELD_ENCODING_UNKNOWN";
+    FeatureSet_RepeatedFieldEncoding[FeatureSet_RepeatedFieldEncoding["PACKED"] = 1] = "PACKED";
+    FeatureSet_RepeatedFieldEncoding[FeatureSet_RepeatedFieldEncoding["EXPANDED"] = 2] = "EXPANDED";
+})(FeatureSet_RepeatedFieldEncoding || (exports.FeatureSet_RepeatedFieldEncoding = FeatureSet_RepeatedFieldEncoding = {}));
+function featureSet_RepeatedFieldEncodingFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "REPEATED_FIELD_ENCODING_UNKNOWN":
+            return FeatureSet_RepeatedFieldEncoding.REPEATED_FIELD_ENCODING_UNKNOWN;
+        case 1:
+        case "PACKED":
+            return FeatureSet_RepeatedFieldEncoding.PACKED;
+        case 2:
+        case "EXPANDED":
+            return FeatureSet_RepeatedFieldEncoding.EXPANDED;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_RepeatedFieldEncoding");
+    }
+}
+function featureSet_RepeatedFieldEncodingToJSON(object) {
+    switch (object) {
+        case FeatureSet_RepeatedFieldEncoding.REPEATED_FIELD_ENCODING_UNKNOWN:
+            return "REPEATED_FIELD_ENCODING_UNKNOWN";
+        case FeatureSet_RepeatedFieldEncoding.PACKED:
+            return "PACKED";
+        case FeatureSet_RepeatedFieldEncoding.EXPANDED:
+            return "EXPANDED";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_RepeatedFieldEncoding");
+    }
+}
+var FeatureSet_Utf8Validation;
+(function (FeatureSet_Utf8Validation) {
+    FeatureSet_Utf8Validation[FeatureSet_Utf8Validation["UTF8_VALIDATION_UNKNOWN"] = 0] = "UTF8_VALIDATION_UNKNOWN";
+    FeatureSet_Utf8Validation[FeatureSet_Utf8Validation["VERIFY"] = 2] = "VERIFY";
+    FeatureSet_Utf8Validation[FeatureSet_Utf8Validation["NONE"] = 3] = "NONE";
+})(FeatureSet_Utf8Validation || (exports.FeatureSet_Utf8Validation = FeatureSet_Utf8Validation = {}));
+function featureSet_Utf8ValidationFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "UTF8_VALIDATION_UNKNOWN":
+            return FeatureSet_Utf8Validation.UTF8_VALIDATION_UNKNOWN;
+        case 2:
+        case "VERIFY":
+            return FeatureSet_Utf8Validation.VERIFY;
+        case 3:
+        case "NONE":
+            return FeatureSet_Utf8Validation.NONE;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_Utf8Validation");
+    }
+}
+function featureSet_Utf8ValidationToJSON(object) {
+    switch (object) {
+        case FeatureSet_Utf8Validation.UTF8_VALIDATION_UNKNOWN:
+            return "UTF8_VALIDATION_UNKNOWN";
+        case FeatureSet_Utf8Validation.VERIFY:
+            return "VERIFY";
+        case FeatureSet_Utf8Validation.NONE:
+            return "NONE";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_Utf8Validation");
+    }
+}
+var FeatureSet_MessageEncoding;
+(function (FeatureSet_MessageEncoding) {
+    FeatureSet_MessageEncoding[FeatureSet_MessageEncoding["MESSAGE_ENCODING_UNKNOWN"] = 0] = "MESSAGE_ENCODING_UNKNOWN";
+    FeatureSet_MessageEncoding[FeatureSet_MessageEncoding["LENGTH_PREFIXED"] = 1] = "LENGTH_PREFIXED";
+    FeatureSet_MessageEncoding[FeatureSet_MessageEncoding["DELIMITED"] = 2] = "DELIMITED";
+})(FeatureSet_MessageEncoding || (exports.FeatureSet_MessageEncoding = FeatureSet_MessageEncoding = {}));
+function featureSet_MessageEncodingFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "MESSAGE_ENCODING_UNKNOWN":
+            return FeatureSet_MessageEncoding.MESSAGE_ENCODING_UNKNOWN;
+        case 1:
+        case "LENGTH_PREFIXED":
+            return FeatureSet_MessageEncoding.LENGTH_PREFIXED;
+        case 2:
+        case "DELIMITED":
+            return FeatureSet_MessageEncoding.DELIMITED;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_MessageEncoding");
+    }
+}
+function featureSet_MessageEncodingToJSON(object) {
+    switch (object) {
+        case FeatureSet_MessageEncoding.MESSAGE_ENCODING_UNKNOWN:
+            return "MESSAGE_ENCODING_UNKNOWN";
+        case FeatureSet_MessageEncoding.LENGTH_PREFIXED:
+            return "LENGTH_PREFIXED";
+        case FeatureSet_MessageEncoding.DELIMITED:
+            return "DELIMITED";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_MessageEncoding");
+    }
+}
+var FeatureSet_JsonFormat;
+(function (FeatureSet_JsonFormat) {
+    FeatureSet_JsonFormat[FeatureSet_JsonFormat["JSON_FORMAT_UNKNOWN"] = 0] = "JSON_FORMAT_UNKNOWN";
+    FeatureSet_JsonFormat[FeatureSet_JsonFormat["ALLOW"] = 1] = "ALLOW";
+    FeatureSet_JsonFormat[FeatureSet_JsonFormat["LEGACY_BEST_EFFORT"] = 2] = "LEGACY_BEST_EFFORT";
+})(FeatureSet_JsonFormat || (exports.FeatureSet_JsonFormat = FeatureSet_JsonFormat = {}));
+function featureSet_JsonFormatFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "JSON_FORMAT_UNKNOWN":
+            return FeatureSet_JsonFormat.JSON_FORMAT_UNKNOWN;
+        case 1:
+        case "ALLOW":
+            return FeatureSet_JsonFormat.ALLOW;
+        case 2:
+        case "LEGACY_BEST_EFFORT":
+            return FeatureSet_JsonFormat.LEGACY_BEST_EFFORT;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_JsonFormat");
+    }
+}
+function featureSet_JsonFormatToJSON(object) {
+    switch (object) {
+        case FeatureSet_JsonFormat.JSON_FORMAT_UNKNOWN:
+            return "JSON_FORMAT_UNKNOWN";
+        case FeatureSet_JsonFormat.ALLOW:
+            return "ALLOW";
+        case FeatureSet_JsonFormat.LEGACY_BEST_EFFORT:
+            return "LEGACY_BEST_EFFORT";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_JsonFormat");
+    }
+}
+var FeatureSet_EnforceNamingStyle;
+(function (FeatureSet_EnforceNamingStyle) {
+    FeatureSet_EnforceNamingStyle[FeatureSet_EnforceNamingStyle["ENFORCE_NAMING_STYLE_UNKNOWN"] = 0] = "ENFORCE_NAMING_STYLE_UNKNOWN";
+    FeatureSet_EnforceNamingStyle[FeatureSet_EnforceNamingStyle["STYLE2024"] = 1] = "STYLE2024";
+    FeatureSet_EnforceNamingStyle[FeatureSet_EnforceNamingStyle["STYLE_LEGACY"] = 2] = "STYLE_LEGACY";
+})(FeatureSet_EnforceNamingStyle || (exports.FeatureSet_EnforceNamingStyle = FeatureSet_EnforceNamingStyle = {}));
+function featureSet_EnforceNamingStyleFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "ENFORCE_NAMING_STYLE_UNKNOWN":
+            return FeatureSet_EnforceNamingStyle.ENFORCE_NAMING_STYLE_UNKNOWN;
+        case 1:
+        case "STYLE2024":
+            return FeatureSet_EnforceNamingStyle.STYLE2024;
+        case 2:
+        case "STYLE_LEGACY":
+            return FeatureSet_EnforceNamingStyle.STYLE_LEGACY;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnforceNamingStyle");
+    }
+}
+function featureSet_EnforceNamingStyleToJSON(object) {
+    switch (object) {
+        case FeatureSet_EnforceNamingStyle.ENFORCE_NAMING_STYLE_UNKNOWN:
+            return "ENFORCE_NAMING_STYLE_UNKNOWN";
+        case FeatureSet_EnforceNamingStyle.STYLE2024:
+            return "STYLE2024";
+        case FeatureSet_EnforceNamingStyle.STYLE_LEGACY:
+            return "STYLE_LEGACY";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnforceNamingStyle");
+    }
+}
+/**
+ * Represents the identified object's effect on the element in the original
+ * .proto file.
+ */
+var GeneratedCodeInfo_Annotation_Semantic;
+(function (GeneratedCodeInfo_Annotation_Semantic) {
+    /** NONE - There is no effect or the effect is indescribable. */
+    GeneratedCodeInfo_Annotation_Semantic[GeneratedCodeInfo_Annotation_Semantic["NONE"] = 0] = "NONE";
+    /** SET - The element is set or otherwise mutated. */
+    GeneratedCodeInfo_Annotation_Semantic[GeneratedCodeInfo_Annotation_Semantic["SET"] = 1] = "SET";
+    /** ALIAS - An alias to the element is returned. */
+    GeneratedCodeInfo_Annotation_Semantic[GeneratedCodeInfo_Annotation_Semantic["ALIAS"] = 2] = "ALIAS";
+})(GeneratedCodeInfo_Annotation_Semantic || (exports.GeneratedCodeInfo_Annotation_Semantic = GeneratedCodeInfo_Annotation_Semantic = {}));
+function generatedCodeInfo_Annotation_SemanticFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "NONE":
+            return GeneratedCodeInfo_Annotation_Semantic.NONE;
+        case 1:
+        case "SET":
+            return GeneratedCodeInfo_Annotation_Semantic.SET;
+        case 2:
+        case "ALIAS":
+            return GeneratedCodeInfo_Annotation_Semantic.ALIAS;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum GeneratedCodeInfo_Annotation_Semantic");
+    }
+}
+function generatedCodeInfo_Annotation_SemanticToJSON(object) {
+    switch (object) {
+        case GeneratedCodeInfo_Annotation_Semantic.NONE:
+            return "NONE";
+        case GeneratedCodeInfo_Annotation_Semantic.SET:
+            return "SET";
+        case GeneratedCodeInfo_Annotation_Semantic.ALIAS:
+            return "ALIAS";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum GeneratedCodeInfo_Annotation_Semantic");
+    }
+}
+exports.FileDescriptorSet = {
+    fromJSON(object) {
+        return {
+            file: globalThis.Array.isArray(object?.file) ? object.file.map((e) => exports.FileDescriptorProto.fromJSON(e)) : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.file?.length) {
+            obj.file = message.file.map((e) => exports.FileDescriptorProto.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.FileDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? globalThis.String(object.name) : "",
+            package: isSet(object.package) ? globalThis.String(object.package) : "",
+            dependency: globalThis.Array.isArray(object?.dependency)
+                ? object.dependency.map((e) => globalThis.String(e))
+                : [],
+            publicDependency: globalThis.Array.isArray(object?.publicDependency)
+                ? object.publicDependency.map((e) => globalThis.Number(e))
+                : [],
+            weakDependency: globalThis.Array.isArray(object?.weakDependency)
+                ? object.weakDependency.map((e) => globalThis.Number(e))
+                : [],
+            messageType: globalThis.Array.isArray(object?.messageType)
+                ? object.messageType.map((e) => exports.DescriptorProto.fromJSON(e))
+                : [],
+            enumType: globalThis.Array.isArray(object?.enumType)
+                ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e))
+                : [],
+            service: globalThis.Array.isArray(object?.service)
+                ? object.service.map((e) => exports.ServiceDescriptorProto.fromJSON(e))
+                : [],
+            extension: globalThis.Array.isArray(object?.extension)
+                ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e))
+                : [],
+            options: isSet(object.options) ? exports.FileOptions.fromJSON(object.options) : undefined,
+            sourceCodeInfo: isSet(object.sourceCodeInfo) ? exports.SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined,
+            syntax: isSet(object.syntax) ? globalThis.String(object.syntax) : "",
+            edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name !== undefined && message.name !== "") {
+            obj.name = message.name;
+        }
+        if (message.package !== undefined && message.package !== "") {
+            obj.package = message.package;
+        }
+        if (message.dependency?.length) {
+            obj.dependency = message.dependency;
+        }
+        if (message.publicDependency?.length) {
+            obj.publicDependency = message.publicDependency.map((e) => Math.round(e));
+        }
+        if (message.weakDependency?.length) {
+            obj.weakDependency = message.weakDependency.map((e) => Math.round(e));
+        }
+        if (message.messageType?.length) {
+            obj.messageType = message.messageType.map((e) => exports.DescriptorProto.toJSON(e));
+        }
+        if (message.enumType?.length) {
+            obj.enumType = message.enumType.map((e) => exports.EnumDescriptorProto.toJSON(e));
+        }
+        if (message.service?.length) {
+            obj.service = message.service.map((e) => exports.ServiceDescriptorProto.toJSON(e));
+        }
+        if (message.extension?.length) {
+            obj.extension = message.extension.map((e) => exports.FieldDescriptorProto.toJSON(e));
+        }
+        if (message.options !== undefined) {
+            obj.options = exports.FileOptions.toJSON(message.options);
+        }
+        if (message.sourceCodeInfo !== undefined) {
+            obj.sourceCodeInfo = exports.SourceCodeInfo.toJSON(message.sourceCodeInfo);
+        }
+        if (message.syntax !== undefined && message.syntax !== "") {
+            obj.syntax = message.syntax;
+        }
+        if (message.edition !== undefined && message.edition !== 0) {
+            obj.edition = editionToJSON(message.edition);
+        }
+        return obj;
+    },
+};
+exports.DescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? globalThis.String(object.name) : "",
+            field: globalThis.Array.isArray(object?.field)
+                ? object.field.map((e) => exports.FieldDescriptorProto.fromJSON(e))
+                : [],
+            extension: globalThis.Array.isArray(object?.extension)
+                ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e))
+                : [],
+            nestedType: globalThis.Array.isArray(object?.nestedType)
+                ? object.nestedType.map((e) => exports.DescriptorProto.fromJSON(e))
+                : [],
+            enumType: globalThis.Array.isArray(object?.enumType)
+                ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e))
+                : [],
+            extensionRange: globalThis.Array.isArray(object?.extensionRange)
+                ? object.extensionRange.map((e) => exports.DescriptorProto_ExtensionRange.fromJSON(e))
+                : [],
+            oneofDecl: globalThis.Array.isArray(object?.oneofDecl)
+                ? object.oneofDecl.map((e) => exports.OneofDescriptorProto.fromJSON(e))
+                : [],
+            options: isSet(object.options) ? exports.MessageOptions.fromJSON(object.options) : undefined,
+            reservedRange: globalThis.Array.isArray(object?.reservedRange)
+                ? object.reservedRange.map((e) => exports.DescriptorProto_ReservedRange.fromJSON(e))
+                : [],
+            reservedName: globalThis.Array.isArray(object?.reservedName)
+                ? object.reservedName.map((e) => globalThis.String(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name !== undefined && message.name !== "") {
+            obj.name = message.name;
+        }
+        if (message.field?.length) {
+            obj.field = message.field.map((e) => exports.FieldDescriptorProto.toJSON(e));
+        }
+        if (message.extension?.length) {
+            obj.extension = message.extension.map((e) => exports.FieldDescriptorProto.toJSON(e));
+        }
+        if (message.nestedType?.length) {
+            obj.nestedType = message.nestedType.map((e) => exports.DescriptorProto.toJSON(e));
+        }
+        if (message.enumType?.length) {
+            obj.enumType = message.enumType.map((e) => exports.EnumDescriptorProto.toJSON(e));
+        }
+        if (message.extensionRange?.length) {
+            obj.extensionRange = message.extensionRange.map((e) => exports.DescriptorProto_ExtensionRange.toJSON(e));
+        }
+        if (message.oneofDecl?.length) {
+            obj.oneofDecl = message.oneofDecl.map((e) => exports.OneofDescriptorProto.toJSON(e));
+        }
+        if (message.options !== undefined) {
+            obj.options = exports.MessageOptions.toJSON(message.options);
+        }
+        if (message.reservedRange?.length) {
+            obj.reservedRange = message.reservedRange.map((e) => exports.DescriptorProto_ReservedRange.toJSON(e));
+        }
+        if (message.reservedName?.length) {
+            obj.reservedName = message.reservedName;
+        }
+        return obj;
+    },
+};
+exports.DescriptorProto_ExtensionRange = {
+    fromJSON(object) {
+        return {
+            start: isSet(object.start) ? globalThis.Number(object.start) : 0,
+            end: isSet(object.end) ? globalThis.Number(object.end) : 0,
+            options: isSet(object.options) ? exports.ExtensionRangeOptions.fromJSON(object.options) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.start !== undefined && message.start !== 0) {
+            obj.start = Math.round(message.start);
+        }
+        if (message.end !== undefined && message.end !== 0) {
+            obj.end = Math.round(message.end);
+        }
+        if (message.options !== undefined) {
+            obj.options = exports.ExtensionRangeOptions.toJSON(message.options);
+        }
+        return obj;
+    },
+};
+exports.DescriptorProto_ReservedRange = {
+    fromJSON(object) {
+        return {
+            start: isSet(object.start) ? globalThis.Number(object.start) : 0,
+            end: isSet(object.end) ? globalThis.Number(object.end) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.start !== undefined && message.start !== 0) {
+            obj.start = Math.round(message.start);
+        }
+        if (message.end !== undefined && message.end !== 0) {
+            obj.end = Math.round(message.end);
+        }
+        return obj;
+    },
+};
+exports.ExtensionRangeOptions = {
+    fromJSON(object) {
+        return {
+            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+            declaration: globalThis.Array.isArray(object?.declaration)
+                ? object.declaration.map((e) => exports.ExtensionRangeOptions_Declaration.fromJSON(e))
+                : [],
+            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+            verification: isSet(object.verification)
+                ? extensionRangeOptions_VerificationStateFromJSON(object.verification)
+                : 1,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.uninterpretedOption?.length) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
+        }
+        if (message.declaration?.length) {
+            obj.declaration = message.declaration.map((e) => exports.ExtensionRangeOptions_Declaration.toJSON(e));
+        }
+        if (message.features !== undefined) {
+            obj.features = exports.FeatureSet.toJSON(message.features);
+        }
+        if (message.verification !== undefined && message.verification !== 1) {
+            obj.verification = extensionRangeOptions_VerificationStateToJSON(message.verification);
+        }
+        return obj;
+    },
+};
+exports.ExtensionRangeOptions_Declaration = {
+    fromJSON(object) {
+        return {
+            number: isSet(object.number) ? globalThis.Number(object.number) : 0,
+            fullName: isSet(object.fullName) ? globalThis.String(object.fullName) : "",
+            type: isSet(object.type) ? globalThis.String(object.type) : "",
+            reserved: isSet(object.reserved) ? globalThis.Boolean(object.reserved) : false,
+            repeated: isSet(object.repeated) ? globalThis.Boolean(object.repeated) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.number !== undefined && message.number !== 0) {
+            obj.number = Math.round(message.number);
+        }
+        if (message.fullName !== undefined && message.fullName !== "") {
+            obj.fullName = message.fullName;
+        }
+        if (message.type !== undefined && message.type !== "") {
+            obj.type = message.type;
+        }
+        if (message.reserved !== undefined && message.reserved !== false) {
+            obj.reserved = message.reserved;
+        }
+        if (message.repeated !== undefined && message.repeated !== false) {
+            obj.repeated = message.repeated;
+        }
+        return obj;
+    },
+};
+exports.FieldDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? globalThis.String(object.name) : "",
+            number: isSet(object.number) ? globalThis.Number(object.number) : 0,
+            label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1,
+            type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1,
+            typeName: isSet(object.typeName) ? globalThis.String(object.typeName) : "",
+            extendee: isSet(object.extendee) ? globalThis.String(object.extendee) : "",
+            defaultValue: isSet(object.defaultValue) ? globalThis.String(object.defaultValue) : "",
+            oneofIndex: isSet(object.oneofIndex) ? globalThis.Number(object.oneofIndex) : 0,
+            jsonName: isSet(object.jsonName) ? globalThis.String(object.jsonName) : "",
+            options: isSet(object.options) ? exports.FieldOptions.fromJSON(object.options) : undefined,
+            proto3Optional: isSet(object.proto3Optional) ? globalThis.Boolean(object.proto3Optional) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name !== undefined && message.name !== "") {
+            obj.name = message.name;
+        }
+        if (message.number !== undefined && message.number !== 0) {
+            obj.number = Math.round(message.number);
+        }
+        if (message.label !== undefined && message.label !== 1) {
+            obj.label = fieldDescriptorProto_LabelToJSON(message.label);
+        }
+        if (message.type !== undefined && message.type !== 1) {
+            obj.type = fieldDescriptorProto_TypeToJSON(message.type);
+        }
+        if (message.typeName !== undefined && message.typeName !== "") {
+            obj.typeName = message.typeName;
+        }
+        if (message.extendee !== undefined && message.extendee !== "") {
+            obj.extendee = message.extendee;
+        }
+        if (message.defaultValue !== undefined && message.defaultValue !== "") {
+            obj.defaultValue = message.defaultValue;
+        }
+        if (message.oneofIndex !== undefined && message.oneofIndex !== 0) {
+            obj.oneofIndex = Math.round(message.oneofIndex);
+        }
+        if (message.jsonName !== undefined && message.jsonName !== "") {
+            obj.jsonName = message.jsonName;
+        }
+        if (message.options !== undefined) {
+            obj.options = exports.FieldOptions.toJSON(message.options);
+        }
+        if (message.proto3Optional !== undefined && message.proto3Optional !== false) {
+            obj.proto3Optional = message.proto3Optional;
+        }
+        return obj;
+    },
+};
+exports.OneofDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? globalThis.String(object.name) : "",
+            options: isSet(object.options) ? exports.OneofOptions.fromJSON(object.options) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name !== undefined && message.name !== "") {
+            obj.name = message.name;
+        }
+        if (message.options !== undefined) {
+            obj.options = exports.OneofOptions.toJSON(message.options);
+        }
+        return obj;
+    },
+};
+exports.EnumDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? globalThis.String(object.name) : "",
+            value: globalThis.Array.isArray(object?.value)
+                ? object.value.map((e) => exports.EnumValueDescriptorProto.fromJSON(e))
+                : [],
+            options: isSet(object.options) ? exports.EnumOptions.fromJSON(object.options) : undefined,
+            reservedRange: globalThis.Array.isArray(object?.reservedRange)
+                ? object.reservedRange.map((e) => exports.EnumDescriptorProto_EnumReservedRange.fromJSON(e))
+                : [],
+            reservedName: globalThis.Array.isArray(object?.reservedName)
+                ? object.reservedName.map((e) => globalThis.String(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name !== undefined && message.name !== "") {
+            obj.name = message.name;
+        }
+        if (message.value?.length) {
+            obj.value = message.value.map((e) => exports.EnumValueDescriptorProto.toJSON(e));
+        }
+        if (message.options !== undefined) {
+            obj.options = exports.EnumOptions.toJSON(message.options);
+        }
+        if (message.reservedRange?.length) {
+            obj.reservedRange = message.reservedRange.map((e) => exports.EnumDescriptorProto_EnumReservedRange.toJSON(e));
+        }
+        if (message.reservedName?.length) {
+            obj.reservedName = message.reservedName;
+        }
+        return obj;
+    },
+};
+exports.EnumDescriptorProto_EnumReservedRange = {
+    fromJSON(object) {
+        return {
+            start: isSet(object.start) ? globalThis.Number(object.start) : 0,
+            end: isSet(object.end) ? globalThis.Number(object.end) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.start !== undefined && message.start !== 0) {
+            obj.start = Math.round(message.start);
+        }
+        if (message.end !== undefined && message.end !== 0) {
+            obj.end = Math.round(message.end);
+        }
+        return obj;
+    },
+};
+exports.EnumValueDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? globalThis.String(object.name) : "",
+            number: isSet(object.number) ? globalThis.Number(object.number) : 0,
+            options: isSet(object.options) ? exports.EnumValueOptions.fromJSON(object.options) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name !== undefined && message.name !== "") {
+            obj.name = message.name;
+        }
+        if (message.number !== undefined && message.number !== 0) {
+            obj.number = Math.round(message.number);
+        }
+        if (message.options !== undefined) {
+            obj.options = exports.EnumValueOptions.toJSON(message.options);
+        }
+        return obj;
+    },
+};
+exports.ServiceDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? globalThis.String(object.name) : "",
+            method: globalThis.Array.isArray(object?.method)
+                ? object.method.map((e) => exports.MethodDescriptorProto.fromJSON(e))
+                : [],
+            options: isSet(object.options) ? exports.ServiceOptions.fromJSON(object.options) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name !== undefined && message.name !== "") {
+            obj.name = message.name;
+        }
+        if (message.method?.length) {
+            obj.method = message.method.map((e) => exports.MethodDescriptorProto.toJSON(e));
+        }
+        if (message.options !== undefined) {
+            obj.options = exports.ServiceOptions.toJSON(message.options);
+        }
+        return obj;
+    },
+};
+exports.MethodDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? globalThis.String(object.name) : "",
+            inputType: isSet(object.inputType) ? globalThis.String(object.inputType) : "",
+            outputType: isSet(object.outputType) ? globalThis.String(object.outputType) : "",
+            options: isSet(object.options) ? exports.MethodOptions.fromJSON(object.options) : undefined,
+            clientStreaming: isSet(object.clientStreaming) ? globalThis.Boolean(object.clientStreaming) : false,
+            serverStreaming: isSet(object.serverStreaming) ? globalThis.Boolean(object.serverStreaming) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name !== undefined && message.name !== "") {
+            obj.name = message.name;
+        }
+        if (message.inputType !== undefined && message.inputType !== "") {
+            obj.inputType = message.inputType;
+        }
+        if (message.outputType !== undefined && message.outputType !== "") {
+            obj.outputType = message.outputType;
+        }
+        if (message.options !== undefined) {
+            obj.options = exports.MethodOptions.toJSON(message.options);
+        }
+        if (message.clientStreaming !== undefined && message.clientStreaming !== false) {
+            obj.clientStreaming = message.clientStreaming;
+        }
+        if (message.serverStreaming !== undefined && message.serverStreaming !== false) {
+            obj.serverStreaming = message.serverStreaming;
+        }
+        return obj;
+    },
+};
+exports.FileOptions = {
+    fromJSON(object) {
+        return {
+            javaPackage: isSet(object.javaPackage) ? globalThis.String(object.javaPackage) : "",
+            javaOuterClassname: isSet(object.javaOuterClassname) ? globalThis.String(object.javaOuterClassname) : "",
+            javaMultipleFiles: isSet(object.javaMultipleFiles) ? globalThis.Boolean(object.javaMultipleFiles) : false,
+            javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash)
+                ? globalThis.Boolean(object.javaGenerateEqualsAndHash)
+                : false,
+            javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? globalThis.Boolean(object.javaStringCheckUtf8) : false,
+            optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1,
+            goPackage: isSet(object.goPackage) ? globalThis.String(object.goPackage) : "",
+            ccGenericServices: isSet(object.ccGenericServices) ? globalThis.Boolean(object.ccGenericServices) : false,
+            javaGenericServices: isSet(object.javaGenericServices) ? globalThis.Boolean(object.javaGenericServices) : false,
+            pyGenericServices: isSet(object.pyGenericServices) ? globalThis.Boolean(object.pyGenericServices) : false,
+            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
+            ccEnableArenas: isSet(object.ccEnableArenas) ? globalThis.Boolean(object.ccEnableArenas) : true,
+            objcClassPrefix: isSet(object.objcClassPrefix) ? globalThis.String(object.objcClassPrefix) : "",
+            csharpNamespace: isSet(object.csharpNamespace) ? globalThis.String(object.csharpNamespace) : "",
+            swiftPrefix: isSet(object.swiftPrefix) ? globalThis.String(object.swiftPrefix) : "",
+            phpClassPrefix: isSet(object.phpClassPrefix) ? globalThis.String(object.phpClassPrefix) : "",
+            phpNamespace: isSet(object.phpNamespace) ? globalThis.String(object.phpNamespace) : "",
+            phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? globalThis.String(object.phpMetadataNamespace) : "",
+            rubyPackage: isSet(object.rubyPackage) ? globalThis.String(object.rubyPackage) : "",
+            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.javaPackage !== undefined && message.javaPackage !== "") {
+            obj.javaPackage = message.javaPackage;
+        }
+        if (message.javaOuterClassname !== undefined && message.javaOuterClassname !== "") {
+            obj.javaOuterClassname = message.javaOuterClassname;
+        }
+        if (message.javaMultipleFiles !== undefined && message.javaMultipleFiles !== false) {
+            obj.javaMultipleFiles = message.javaMultipleFiles;
+        }
+        if (message.javaGenerateEqualsAndHash !== undefined && message.javaGenerateEqualsAndHash !== false) {
+            obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash;
+        }
+        if (message.javaStringCheckUtf8 !== undefined && message.javaStringCheckUtf8 !== false) {
+            obj.javaStringCheckUtf8 = message.javaStringCheckUtf8;
+        }
+        if (message.optimizeFor !== undefined && message.optimizeFor !== 1) {
+            obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor);
+        }
+        if (message.goPackage !== undefined && message.goPackage !== "") {
+            obj.goPackage = message.goPackage;
+        }
+        if (message.ccGenericServices !== undefined && message.ccGenericServices !== false) {
+            obj.ccGenericServices = message.ccGenericServices;
+        }
+        if (message.javaGenericServices !== undefined && message.javaGenericServices !== false) {
+            obj.javaGenericServices = message.javaGenericServices;
+        }
+        if (message.pyGenericServices !== undefined && message.pyGenericServices !== false) {
+            obj.pyGenericServices = message.pyGenericServices;
+        }
+        if (message.deprecated !== undefined && message.deprecated !== false) {
+            obj.deprecated = message.deprecated;
+        }
+        if (message.ccEnableArenas !== undefined && message.ccEnableArenas !== true) {
+            obj.ccEnableArenas = message.ccEnableArenas;
+        }
+        if (message.objcClassPrefix !== undefined && message.objcClassPrefix !== "") {
+            obj.objcClassPrefix = message.objcClassPrefix;
+        }
+        if (message.csharpNamespace !== undefined && message.csharpNamespace !== "") {
+            obj.csharpNamespace = message.csharpNamespace;
+        }
+        if (message.swiftPrefix !== undefined && message.swiftPrefix !== "") {
+            obj.swiftPrefix = message.swiftPrefix;
+        }
+        if (message.phpClassPrefix !== undefined && message.phpClassPrefix !== "") {
+            obj.phpClassPrefix = message.phpClassPrefix;
+        }
+        if (message.phpNamespace !== undefined && message.phpNamespace !== "") {
+            obj.phpNamespace = message.phpNamespace;
+        }
+        if (message.phpMetadataNamespace !== undefined && message.phpMetadataNamespace !== "") {
+            obj.phpMetadataNamespace = message.phpMetadataNamespace;
+        }
+        if (message.rubyPackage !== undefined && message.rubyPackage !== "") {
+            obj.rubyPackage = message.rubyPackage;
+        }
+        if (message.features !== undefined) {
+            obj.features = exports.FeatureSet.toJSON(message.features);
+        }
+        if (message.uninterpretedOption?.length) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.MessageOptions = {
+    fromJSON(object) {
+        return {
+            messageSetWireFormat: isSet(object.messageSetWireFormat)
+                ? globalThis.Boolean(object.messageSetWireFormat)
+                : false,
+            noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor)
+                ? globalThis.Boolean(object.noStandardDescriptorAccessor)
+                : false,
+            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
+            mapEntry: isSet(object.mapEntry) ? globalThis.Boolean(object.mapEntry) : false,
+            deprecatedLegacyJsonFieldConflicts: isSet(object.deprecatedLegacyJsonFieldConflicts)
+                ? globalThis.Boolean(object.deprecatedLegacyJsonFieldConflicts)
+                : false,
+            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.messageSetWireFormat !== undefined && message.messageSetWireFormat !== false) {
+            obj.messageSetWireFormat = message.messageSetWireFormat;
+        }
+        if (message.noStandardDescriptorAccessor !== undefined && message.noStandardDescriptorAccessor !== false) {
+            obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor;
+        }
+        if (message.deprecated !== undefined && message.deprecated !== false) {
+            obj.deprecated = message.deprecated;
+        }
+        if (message.mapEntry !== undefined && message.mapEntry !== false) {
+            obj.mapEntry = message.mapEntry;
+        }
+        if (message.deprecatedLegacyJsonFieldConflicts !== undefined && message.deprecatedLegacyJsonFieldConflicts !== false) {
+            obj.deprecatedLegacyJsonFieldConflicts = message.deprecatedLegacyJsonFieldConflicts;
+        }
+        if (message.features !== undefined) {
+            obj.features = exports.FeatureSet.toJSON(message.features);
+        }
+        if (message.uninterpretedOption?.length) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.FieldOptions = {
+    fromJSON(object) {
+        return {
+            ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0,
+            packed: isSet(object.packed) ? globalThis.Boolean(object.packed) : false,
+            jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0,
+            lazy: isSet(object.lazy) ? globalThis.Boolean(object.lazy) : false,
+            unverifiedLazy: isSet(object.unverifiedLazy) ? globalThis.Boolean(object.unverifiedLazy) : false,
+            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
+            weak: isSet(object.weak) ? globalThis.Boolean(object.weak) : false,
+            debugRedact: isSet(object.debugRedact) ? globalThis.Boolean(object.debugRedact) : false,
+            retention: isSet(object.retention) ? fieldOptions_OptionRetentionFromJSON(object.retention) : 0,
+            targets: globalThis.Array.isArray(object?.targets)
+                ? object.targets.map((e) => fieldOptions_OptionTargetTypeFromJSON(e))
+                : [],
+            editionDefaults: globalThis.Array.isArray(object?.editionDefaults)
+                ? object.editionDefaults.map((e) => exports.FieldOptions_EditionDefault.fromJSON(e))
+                : [],
+            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+            featureSupport: isSet(object.featureSupport)
+                ? exports.FieldOptions_FeatureSupport.fromJSON(object.featureSupport)
+                : undefined,
+            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.ctype !== undefined && message.ctype !== 0) {
+            obj.ctype = fieldOptions_CTypeToJSON(message.ctype);
+        }
+        if (message.packed !== undefined && message.packed !== false) {
+            obj.packed = message.packed;
+        }
+        if (message.jstype !== undefined && message.jstype !== 0) {
+            obj.jstype = fieldOptions_JSTypeToJSON(message.jstype);
+        }
+        if (message.lazy !== undefined && message.lazy !== false) {
+            obj.lazy = message.lazy;
+        }
+        if (message.unverifiedLazy !== undefined && message.unverifiedLazy !== false) {
+            obj.unverifiedLazy = message.unverifiedLazy;
+        }
+        if (message.deprecated !== undefined && message.deprecated !== false) {
+            obj.deprecated = message.deprecated;
+        }
+        if (message.weak !== undefined && message.weak !== false) {
+            obj.weak = message.weak;
+        }
+        if (message.debugRedact !== undefined && message.debugRedact !== false) {
+            obj.debugRedact = message.debugRedact;
+        }
+        if (message.retention !== undefined && message.retention !== 0) {
+            obj.retention = fieldOptions_OptionRetentionToJSON(message.retention);
+        }
+        if (message.targets?.length) {
+            obj.targets = message.targets.map((e) => fieldOptions_OptionTargetTypeToJSON(e));
+        }
+        if (message.editionDefaults?.length) {
+            obj.editionDefaults = message.editionDefaults.map((e) => exports.FieldOptions_EditionDefault.toJSON(e));
+        }
+        if (message.features !== undefined) {
+            obj.features = exports.FeatureSet.toJSON(message.features);
+        }
+        if (message.featureSupport !== undefined) {
+            obj.featureSupport = exports.FieldOptions_FeatureSupport.toJSON(message.featureSupport);
+        }
+        if (message.uninterpretedOption?.length) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.FieldOptions_EditionDefault = {
+    fromJSON(object) {
+        return {
+            edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0,
+            value: isSet(object.value) ? globalThis.String(object.value) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.edition !== undefined && message.edition !== 0) {
+            obj.edition = editionToJSON(message.edition);
+        }
+        if (message.value !== undefined && message.value !== "") {
+            obj.value = message.value;
+        }
+        return obj;
+    },
+};
+exports.FieldOptions_FeatureSupport = {
+    fromJSON(object) {
+        return {
+            editionIntroduced: isSet(object.editionIntroduced) ? editionFromJSON(object.editionIntroduced) : 0,
+            editionDeprecated: isSet(object.editionDeprecated) ? editionFromJSON(object.editionDeprecated) : 0,
+            deprecationWarning: isSet(object.deprecationWarning) ? globalThis.String(object.deprecationWarning) : "",
+            editionRemoved: isSet(object.editionRemoved) ? editionFromJSON(object.editionRemoved) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.editionIntroduced !== undefined && message.editionIntroduced !== 0) {
+            obj.editionIntroduced = editionToJSON(message.editionIntroduced);
+        }
+        if (message.editionDeprecated !== undefined && message.editionDeprecated !== 0) {
+            obj.editionDeprecated = editionToJSON(message.editionDeprecated);
+        }
+        if (message.deprecationWarning !== undefined && message.deprecationWarning !== "") {
+            obj.deprecationWarning = message.deprecationWarning;
+        }
+        if (message.editionRemoved !== undefined && message.editionRemoved !== 0) {
+            obj.editionRemoved = editionToJSON(message.editionRemoved);
+        }
+        return obj;
+    },
+};
+exports.OneofOptions = {
+    fromJSON(object) {
+        return {
+            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.features !== undefined) {
+            obj.features = exports.FeatureSet.toJSON(message.features);
+        }
+        if (message.uninterpretedOption?.length) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.EnumOptions = {
+    fromJSON(object) {
+        return {
+            allowAlias: isSet(object.allowAlias) ? globalThis.Boolean(object.allowAlias) : false,
+            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
+            deprecatedLegacyJsonFieldConflicts: isSet(object.deprecatedLegacyJsonFieldConflicts)
+                ? globalThis.Boolean(object.deprecatedLegacyJsonFieldConflicts)
+                : false,
+            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.allowAlias !== undefined && message.allowAlias !== false) {
+            obj.allowAlias = message.allowAlias;
+        }
+        if (message.deprecated !== undefined && message.deprecated !== false) {
+            obj.deprecated = message.deprecated;
+        }
+        if (message.deprecatedLegacyJsonFieldConflicts !== undefined && message.deprecatedLegacyJsonFieldConflicts !== false) {
+            obj.deprecatedLegacyJsonFieldConflicts = message.deprecatedLegacyJsonFieldConflicts;
+        }
+        if (message.features !== undefined) {
+            obj.features = exports.FeatureSet.toJSON(message.features);
+        }
+        if (message.uninterpretedOption?.length) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.EnumValueOptions = {
+    fromJSON(object) {
+        return {
+            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
+            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+            debugRedact: isSet(object.debugRedact) ? globalThis.Boolean(object.debugRedact) : false,
+            featureSupport: isSet(object.featureSupport)
+                ? exports.FieldOptions_FeatureSupport.fromJSON(object.featureSupport)
+                : undefined,
+            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.deprecated !== undefined && message.deprecated !== false) {
+            obj.deprecated = message.deprecated;
+        }
+        if (message.features !== undefined) {
+            obj.features = exports.FeatureSet.toJSON(message.features);
+        }
+        if (message.debugRedact !== undefined && message.debugRedact !== false) {
+            obj.debugRedact = message.debugRedact;
+        }
+        if (message.featureSupport !== undefined) {
+            obj.featureSupport = exports.FieldOptions_FeatureSupport.toJSON(message.featureSupport);
+        }
+        if (message.uninterpretedOption?.length) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.ServiceOptions = {
+    fromJSON(object) {
+        return {
+            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
+            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.features !== undefined) {
+            obj.features = exports.FeatureSet.toJSON(message.features);
+        }
+        if (message.deprecated !== undefined && message.deprecated !== false) {
+            obj.deprecated = message.deprecated;
+        }
+        if (message.uninterpretedOption?.length) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.MethodOptions = {
+    fromJSON(object) {
+        return {
+            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
+            idempotencyLevel: isSet(object.idempotencyLevel)
+                ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel)
+                : 0,
+            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.deprecated !== undefined && message.deprecated !== false) {
+            obj.deprecated = message.deprecated;
+        }
+        if (message.idempotencyLevel !== undefined && message.idempotencyLevel !== 0) {
+            obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel);
+        }
+        if (message.features !== undefined) {
+            obj.features = exports.FeatureSet.toJSON(message.features);
+        }
+        if (message.uninterpretedOption?.length) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.UninterpretedOption = {
+    fromJSON(object) {
+        return {
+            name: globalThis.Array.isArray(object?.name)
+                ? object.name.map((e) => exports.UninterpretedOption_NamePart.fromJSON(e))
+                : [],
+            identifierValue: isSet(object.identifierValue) ? globalThis.String(object.identifierValue) : "",
+            positiveIntValue: isSet(object.positiveIntValue) ? globalThis.String(object.positiveIntValue) : "0",
+            negativeIntValue: isSet(object.negativeIntValue) ? globalThis.String(object.negativeIntValue) : "0",
+            doubleValue: isSet(object.doubleValue) ? globalThis.Number(object.doubleValue) : 0,
+            stringValue: isSet(object.stringValue) ? Buffer.from(bytesFromBase64(object.stringValue)) : Buffer.alloc(0),
+            aggregateValue: isSet(object.aggregateValue) ? globalThis.String(object.aggregateValue) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name?.length) {
+            obj.name = message.name.map((e) => exports.UninterpretedOption_NamePart.toJSON(e));
+        }
+        if (message.identifierValue !== undefined && message.identifierValue !== "") {
+            obj.identifierValue = message.identifierValue;
+        }
+        if (message.positiveIntValue !== undefined && message.positiveIntValue !== "0") {
+            obj.positiveIntValue = message.positiveIntValue;
+        }
+        if (message.negativeIntValue !== undefined && message.negativeIntValue !== "0") {
+            obj.negativeIntValue = message.negativeIntValue;
+        }
+        if (message.doubleValue !== undefined && message.doubleValue !== 0) {
+            obj.doubleValue = message.doubleValue;
+        }
+        if (message.stringValue !== undefined && message.stringValue.length !== 0) {
+            obj.stringValue = base64FromBytes(message.stringValue);
+        }
+        if (message.aggregateValue !== undefined && message.aggregateValue !== "") {
+            obj.aggregateValue = message.aggregateValue;
+        }
+        return obj;
+    },
+};
+exports.UninterpretedOption_NamePart = {
+    fromJSON(object) {
+        return {
+            namePart: isSet(object.namePart) ? globalThis.String(object.namePart) : "",
+            isExtension: isSet(object.isExtension) ? globalThis.Boolean(object.isExtension) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.namePart !== "") {
+            obj.namePart = message.namePart;
+        }
+        if (message.isExtension !== false) {
+            obj.isExtension = message.isExtension;
+        }
+        return obj;
+    },
+};
+exports.FeatureSet = {
+    fromJSON(object) {
+        return {
+            fieldPresence: isSet(object.fieldPresence) ? featureSet_FieldPresenceFromJSON(object.fieldPresence) : 0,
+            enumType: isSet(object.enumType) ? featureSet_EnumTypeFromJSON(object.enumType) : 0,
+            repeatedFieldEncoding: isSet(object.repeatedFieldEncoding)
+                ? featureSet_RepeatedFieldEncodingFromJSON(object.repeatedFieldEncoding)
+                : 0,
+            utf8Validation: isSet(object.utf8Validation) ? featureSet_Utf8ValidationFromJSON(object.utf8Validation) : 0,
+            messageEncoding: isSet(object.messageEncoding) ? featureSet_MessageEncodingFromJSON(object.messageEncoding) : 0,
+            jsonFormat: isSet(object.jsonFormat) ? featureSet_JsonFormatFromJSON(object.jsonFormat) : 0,
+            enforceNamingStyle: isSet(object.enforceNamingStyle)
+                ? featureSet_EnforceNamingStyleFromJSON(object.enforceNamingStyle)
+                : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.fieldPresence !== undefined && message.fieldPresence !== 0) {
+            obj.fieldPresence = featureSet_FieldPresenceToJSON(message.fieldPresence);
+        }
+        if (message.enumType !== undefined && message.enumType !== 0) {
+            obj.enumType = featureSet_EnumTypeToJSON(message.enumType);
+        }
+        if (message.repeatedFieldEncoding !== undefined && message.repeatedFieldEncoding !== 0) {
+            obj.repeatedFieldEncoding = featureSet_RepeatedFieldEncodingToJSON(message.repeatedFieldEncoding);
+        }
+        if (message.utf8Validation !== undefined && message.utf8Validation !== 0) {
+            obj.utf8Validation = featureSet_Utf8ValidationToJSON(message.utf8Validation);
+        }
+        if (message.messageEncoding !== undefined && message.messageEncoding !== 0) {
+            obj.messageEncoding = featureSet_MessageEncodingToJSON(message.messageEncoding);
+        }
+        if (message.jsonFormat !== undefined && message.jsonFormat !== 0) {
+            obj.jsonFormat = featureSet_JsonFormatToJSON(message.jsonFormat);
+        }
+        if (message.enforceNamingStyle !== undefined && message.enforceNamingStyle !== 0) {
+            obj.enforceNamingStyle = featureSet_EnforceNamingStyleToJSON(message.enforceNamingStyle);
+        }
+        return obj;
+    },
+};
+exports.FeatureSetDefaults = {
+    fromJSON(object) {
+        return {
+            defaults: globalThis.Array.isArray(object?.defaults)
+                ? object.defaults.map((e) => exports.FeatureSetDefaults_FeatureSetEditionDefault.fromJSON(e))
+                : [],
+            minimumEdition: isSet(object.minimumEdition) ? editionFromJSON(object.minimumEdition) : 0,
+            maximumEdition: isSet(object.maximumEdition) ? editionFromJSON(object.maximumEdition) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.defaults?.length) {
+            obj.defaults = message.defaults.map((e) => exports.FeatureSetDefaults_FeatureSetEditionDefault.toJSON(e));
+        }
+        if (message.minimumEdition !== undefined && message.minimumEdition !== 0) {
+            obj.minimumEdition = editionToJSON(message.minimumEdition);
+        }
+        if (message.maximumEdition !== undefined && message.maximumEdition !== 0) {
+            obj.maximumEdition = editionToJSON(message.maximumEdition);
+        }
+        return obj;
+    },
+};
+exports.FeatureSetDefaults_FeatureSetEditionDefault = {
+    fromJSON(object) {
+        return {
+            edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0,
+            overridableFeatures: isSet(object.overridableFeatures)
+                ? exports.FeatureSet.fromJSON(object.overridableFeatures)
+                : undefined,
+            fixedFeatures: isSet(object.fixedFeatures) ? exports.FeatureSet.fromJSON(object.fixedFeatures) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.edition !== undefined && message.edition !== 0) {
+            obj.edition = editionToJSON(message.edition);
+        }
+        if (message.overridableFeatures !== undefined) {
+            obj.overridableFeatures = exports.FeatureSet.toJSON(message.overridableFeatures);
+        }
+        if (message.fixedFeatures !== undefined) {
+            obj.fixedFeatures = exports.FeatureSet.toJSON(message.fixedFeatures);
+        }
+        return obj;
+    },
+};
+exports.SourceCodeInfo = {
+    fromJSON(object) {
+        return {
+            location: globalThis.Array.isArray(object?.location)
+                ? object.location.map((e) => exports.SourceCodeInfo_Location.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.location?.length) {
+            obj.location = message.location.map((e) => exports.SourceCodeInfo_Location.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.SourceCodeInfo_Location = {
+    fromJSON(object) {
+        return {
+            path: globalThis.Array.isArray(object?.path)
+                ? object.path.map((e) => globalThis.Number(e))
+                : [],
+            span: globalThis.Array.isArray(object?.span) ? object.span.map((e) => globalThis.Number(e)) : [],
+            leadingComments: isSet(object.leadingComments) ? globalThis.String(object.leadingComments) : "",
+            trailingComments: isSet(object.trailingComments) ? globalThis.String(object.trailingComments) : "",
+            leadingDetachedComments: globalThis.Array.isArray(object?.leadingDetachedComments)
+                ? object.leadingDetachedComments.map((e) => globalThis.String(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.path?.length) {
+            obj.path = message.path.map((e) => Math.round(e));
+        }
+        if (message.span?.length) {
+            obj.span = message.span.map((e) => Math.round(e));
+        }
+        if (message.leadingComments !== undefined && message.leadingComments !== "") {
+            obj.leadingComments = message.leadingComments;
+        }
+        if (message.trailingComments !== undefined && message.trailingComments !== "") {
+            obj.trailingComments = message.trailingComments;
+        }
+        if (message.leadingDetachedComments?.length) {
+            obj.leadingDetachedComments = message.leadingDetachedComments;
+        }
+        return obj;
+    },
+};
+exports.GeneratedCodeInfo = {
+    fromJSON(object) {
+        return {
+            annotation: globalThis.Array.isArray(object?.annotation)
+                ? object.annotation.map((e) => exports.GeneratedCodeInfo_Annotation.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.annotation?.length) {
+            obj.annotation = message.annotation.map((e) => exports.GeneratedCodeInfo_Annotation.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.GeneratedCodeInfo_Annotation = {
+    fromJSON(object) {
+        return {
+            path: globalThis.Array.isArray(object?.path)
+                ? object.path.map((e) => globalThis.Number(e))
+                : [],
+            sourceFile: isSet(object.sourceFile) ? globalThis.String(object.sourceFile) : "",
+            begin: isSet(object.begin) ? globalThis.Number(object.begin) : 0,
+            end: isSet(object.end) ? globalThis.Number(object.end) : 0,
+            semantic: isSet(object.semantic) ? generatedCodeInfo_Annotation_SemanticFromJSON(object.semantic) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.path?.length) {
+            obj.path = message.path.map((e) => Math.round(e));
+        }
+        if (message.sourceFile !== undefined && message.sourceFile !== "") {
+            obj.sourceFile = message.sourceFile;
+        }
+        if (message.begin !== undefined && message.begin !== 0) {
+            obj.begin = Math.round(message.begin);
+        }
+        if (message.end !== undefined && message.end !== 0) {
+            obj.end = Math.round(message.end);
+        }
+        if (message.semantic !== undefined && message.semantic !== 0) {
+            obj.semantic = generatedCodeInfo_Annotation_SemanticToJSON(message.semantic);
+        }
+        return obj;
+    },
+};
+function bytesFromBase64(b64) {
+    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+}
+function base64FromBytes(arr) {
+    return globalThis.Buffer.from(arr).toString("base64");
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js b/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
new file mode 100644
index 0000000000000..9d24cbba10de9
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
@@ -0,0 +1,29 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: google/protobuf/timestamp.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Timestamp = void 0;
+exports.Timestamp = {
+    fromJSON(object) {
+        return {
+            seconds: isSet(object.seconds) ? globalThis.String(object.seconds) : "0",
+            nanos: isSet(object.nanos) ? globalThis.Number(object.nanos) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.seconds !== "0") {
+            obj.seconds = message.seconds;
+        }
+        if (message.nanos !== 0) {
+            obj.nanos = Math.round(message.nanos);
+        }
+        return obj;
+    },
+};
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js b/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js
new file mode 100644
index 0000000000000..abc766bed3b88
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js
@@ -0,0 +1,55 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: rekor/v2/dsse.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.DSSELogEntryV002 = exports.DSSERequestV002 = void 0;
+/* eslint-disable */
+const envelope_1 = require("../../envelope");
+const sigstore_common_1 = require("../../sigstore_common");
+const verifier_1 = require("./verifier");
+exports.DSSERequestV002 = {
+    fromJSON(object) {
+        return {
+            envelope: isSet(object.envelope) ? envelope_1.Envelope.fromJSON(object.envelope) : undefined,
+            verifiers: globalThis.Array.isArray(object?.verifiers)
+                ? object.verifiers.map((e) => verifier_1.Verifier.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.envelope !== undefined) {
+            obj.envelope = envelope_1.Envelope.toJSON(message.envelope);
+        }
+        if (message.verifiers?.length) {
+            obj.verifiers = message.verifiers.map((e) => verifier_1.Verifier.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.DSSELogEntryV002 = {
+    fromJSON(object) {
+        return {
+            payloadHash: isSet(object.payloadHash) ? sigstore_common_1.HashOutput.fromJSON(object.payloadHash) : undefined,
+            signatures: globalThis.Array.isArray(object?.signatures)
+                ? object.signatures.map((e) => verifier_1.Signature.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.payloadHash !== undefined) {
+            obj.payloadHash = sigstore_common_1.HashOutput.toJSON(message.payloadHash);
+        }
+        if (message.signatures?.length) {
+            obj.signatures = message.signatures.map((e) => verifier_1.Signature.toJSON(e));
+        }
+        return obj;
+    },
+};
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js b/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js
new file mode 100644
index 0000000000000..c5eccb10e0a68
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js
@@ -0,0 +1,81 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: rekor/v2/entry.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.CreateEntryRequest = exports.Spec = exports.Entry = void 0;
+/* eslint-disable */
+const dsse_1 = require("./dsse");
+const hashedrekord_1 = require("./hashedrekord");
+exports.Entry = {
+    fromJSON(object) {
+        return {
+            kind: isSet(object.kind) ? globalThis.String(object.kind) : "",
+            apiVersion: isSet(object.apiVersion) ? globalThis.String(object.apiVersion) : "",
+            spec: isSet(object.spec) ? exports.Spec.fromJSON(object.spec) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.kind !== "") {
+            obj.kind = message.kind;
+        }
+        if (message.apiVersion !== "") {
+            obj.apiVersion = message.apiVersion;
+        }
+        if (message.spec !== undefined) {
+            obj.spec = exports.Spec.toJSON(message.spec);
+        }
+        return obj;
+    },
+};
+exports.Spec = {
+    fromJSON(object) {
+        return {
+            spec: isSet(object.hashedRekordV002)
+                ? { $case: "hashedRekordV002", hashedRekordV002: hashedrekord_1.HashedRekordLogEntryV002.fromJSON(object.hashedRekordV002) }
+                : isSet(object.dsseV002)
+                    ? { $case: "dsseV002", dsseV002: dsse_1.DSSELogEntryV002.fromJSON(object.dsseV002) }
+                    : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.spec?.$case === "hashedRekordV002") {
+            obj.hashedRekordV002 = hashedrekord_1.HashedRekordLogEntryV002.toJSON(message.spec.hashedRekordV002);
+        }
+        else if (message.spec?.$case === "dsseV002") {
+            obj.dsseV002 = dsse_1.DSSELogEntryV002.toJSON(message.spec.dsseV002);
+        }
+        return obj;
+    },
+};
+exports.CreateEntryRequest = {
+    fromJSON(object) {
+        return {
+            spec: isSet(object.hashedRekordRequestV002)
+                ? {
+                    $case: "hashedRekordRequestV002",
+                    hashedRekordRequestV002: hashedrekord_1.HashedRekordRequestV002.fromJSON(object.hashedRekordRequestV002),
+                }
+                : isSet(object.dsseRequestV002)
+                    ? { $case: "dsseRequestV002", dsseRequestV002: dsse_1.DSSERequestV002.fromJSON(object.dsseRequestV002) }
+                    : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.spec?.$case === "hashedRekordRequestV002") {
+            obj.hashedRekordRequestV002 = hashedrekord_1.HashedRekordRequestV002.toJSON(message.spec.hashedRekordRequestV002);
+        }
+        else if (message.spec?.$case === "dsseRequestV002") {
+            obj.dsseRequestV002 = dsse_1.DSSERequestV002.toJSON(message.spec.dsseRequestV002);
+        }
+        return obj;
+    },
+};
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js b/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js
new file mode 100644
index 0000000000000..d3fd1af2483d1
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js
@@ -0,0 +1,56 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: rekor/v2/hashedrekord.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.HashedRekordLogEntryV002 = exports.HashedRekordRequestV002 = void 0;
+/* eslint-disable */
+const sigstore_common_1 = require("../../sigstore_common");
+const verifier_1 = require("./verifier");
+exports.HashedRekordRequestV002 = {
+    fromJSON(object) {
+        return {
+            digest: isSet(object.digest) ? Buffer.from(bytesFromBase64(object.digest)) : Buffer.alloc(0),
+            signature: isSet(object.signature) ? verifier_1.Signature.fromJSON(object.signature) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.digest.length !== 0) {
+            obj.digest = base64FromBytes(message.digest);
+        }
+        if (message.signature !== undefined) {
+            obj.signature = verifier_1.Signature.toJSON(message.signature);
+        }
+        return obj;
+    },
+};
+exports.HashedRekordLogEntryV002 = {
+    fromJSON(object) {
+        return {
+            data: isSet(object.data) ? sigstore_common_1.HashOutput.fromJSON(object.data) : undefined,
+            signature: isSet(object.signature) ? verifier_1.Signature.fromJSON(object.signature) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.data !== undefined) {
+            obj.data = sigstore_common_1.HashOutput.toJSON(message.data);
+        }
+        if (message.signature !== undefined) {
+            obj.signature = verifier_1.Signature.toJSON(message.signature);
+        }
+        return obj;
+    },
+};
+function bytesFromBase64(b64) {
+    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+}
+function base64FromBytes(arr) {
+    return globalThis.Buffer.from(arr).toString("base64");
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js b/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js
new file mode 100644
index 0000000000000..c437d5053a3cb
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js
@@ -0,0 +1,74 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: rekor/v2/verifier.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Signature = exports.Verifier = exports.PublicKey = void 0;
+/* eslint-disable */
+const sigstore_common_1 = require("../../sigstore_common");
+exports.PublicKey = {
+    fromJSON(object) {
+        return { rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : Buffer.alloc(0) };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.rawBytes.length !== 0) {
+            obj.rawBytes = base64FromBytes(message.rawBytes);
+        }
+        return obj;
+    },
+};
+exports.Verifier = {
+    fromJSON(object) {
+        return {
+            verifier: isSet(object.publicKey)
+                ? { $case: "publicKey", publicKey: exports.PublicKey.fromJSON(object.publicKey) }
+                : isSet(object.x509Certificate)
+                    ? { $case: "x509Certificate", x509Certificate: sigstore_common_1.X509Certificate.fromJSON(object.x509Certificate) }
+                    : undefined,
+            keyDetails: isSet(object.keyDetails) ? (0, sigstore_common_1.publicKeyDetailsFromJSON)(object.keyDetails) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.verifier?.$case === "publicKey") {
+            obj.publicKey = exports.PublicKey.toJSON(message.verifier.publicKey);
+        }
+        else if (message.verifier?.$case === "x509Certificate") {
+            obj.x509Certificate = sigstore_common_1.X509Certificate.toJSON(message.verifier.x509Certificate);
+        }
+        if (message.keyDetails !== 0) {
+            obj.keyDetails = (0, sigstore_common_1.publicKeyDetailsToJSON)(message.keyDetails);
+        }
+        return obj;
+    },
+};
+exports.Signature = {
+    fromJSON(object) {
+        return {
+            content: isSet(object.content) ? Buffer.from(bytesFromBase64(object.content)) : Buffer.alloc(0),
+            verifier: isSet(object.verifier) ? exports.Verifier.fromJSON(object.verifier) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.content.length !== 0) {
+            obj.content = base64FromBytes(message.content);
+        }
+        if (message.verifier !== undefined) {
+            obj.verifier = exports.Verifier.toJSON(message.verifier);
+        }
+        return obj;
+    },
+};
+function bytesFromBase64(b64) {
+    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+}
+function base64FromBytes(arr) {
+    return globalThis.Buffer.from(arr).toString("base64");
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js b/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
new file mode 100644
index 0000000000000..aed636f00e7cf
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
@@ -0,0 +1,103 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: sigstore_bundle.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Bundle = exports.VerificationMaterial = exports.TimestampVerificationData = void 0;
+/* eslint-disable */
+const envelope_1 = require("./envelope");
+const sigstore_common_1 = require("./sigstore_common");
+const sigstore_rekor_1 = require("./sigstore_rekor");
+exports.TimestampVerificationData = {
+    fromJSON(object) {
+        return {
+            rfc3161Timestamps: globalThis.Array.isArray(object?.rfc3161Timestamps)
+                ? object.rfc3161Timestamps.map((e) => sigstore_common_1.RFC3161SignedTimestamp.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.rfc3161Timestamps?.length) {
+            obj.rfc3161Timestamps = message.rfc3161Timestamps.map((e) => sigstore_common_1.RFC3161SignedTimestamp.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.VerificationMaterial = {
+    fromJSON(object) {
+        return {
+            content: isSet(object.publicKey)
+                ? { $case: "publicKey", publicKey: sigstore_common_1.PublicKeyIdentifier.fromJSON(object.publicKey) }
+                : isSet(object.x509CertificateChain)
+                    ? {
+                        $case: "x509CertificateChain",
+                        x509CertificateChain: sigstore_common_1.X509CertificateChain.fromJSON(object.x509CertificateChain),
+                    }
+                    : isSet(object.certificate)
+                        ? { $case: "certificate", certificate: sigstore_common_1.X509Certificate.fromJSON(object.certificate) }
+                        : undefined,
+            tlogEntries: globalThis.Array.isArray(object?.tlogEntries)
+                ? object.tlogEntries.map((e) => sigstore_rekor_1.TransparencyLogEntry.fromJSON(e))
+                : [],
+            timestampVerificationData: isSet(object.timestampVerificationData)
+                ? exports.TimestampVerificationData.fromJSON(object.timestampVerificationData)
+                : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.content?.$case === "publicKey") {
+            obj.publicKey = sigstore_common_1.PublicKeyIdentifier.toJSON(message.content.publicKey);
+        }
+        else if (message.content?.$case === "x509CertificateChain") {
+            obj.x509CertificateChain = sigstore_common_1.X509CertificateChain.toJSON(message.content.x509CertificateChain);
+        }
+        else if (message.content?.$case === "certificate") {
+            obj.certificate = sigstore_common_1.X509Certificate.toJSON(message.content.certificate);
+        }
+        if (message.tlogEntries?.length) {
+            obj.tlogEntries = message.tlogEntries.map((e) => sigstore_rekor_1.TransparencyLogEntry.toJSON(e));
+        }
+        if (message.timestampVerificationData !== undefined) {
+            obj.timestampVerificationData = exports.TimestampVerificationData.toJSON(message.timestampVerificationData);
+        }
+        return obj;
+    },
+};
+exports.Bundle = {
+    fromJSON(object) {
+        return {
+            mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "",
+            verificationMaterial: isSet(object.verificationMaterial)
+                ? exports.VerificationMaterial.fromJSON(object.verificationMaterial)
+                : undefined,
+            content: isSet(object.messageSignature)
+                ? { $case: "messageSignature", messageSignature: sigstore_common_1.MessageSignature.fromJSON(object.messageSignature) }
+                : isSet(object.dsseEnvelope)
+                    ? { $case: "dsseEnvelope", dsseEnvelope: envelope_1.Envelope.fromJSON(object.dsseEnvelope) }
+                    : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.mediaType !== "") {
+            obj.mediaType = message.mediaType;
+        }
+        if (message.verificationMaterial !== undefined) {
+            obj.verificationMaterial = exports.VerificationMaterial.toJSON(message.verificationMaterial);
+        }
+        if (message.content?.$case === "messageSignature") {
+            obj.messageSignature = sigstore_common_1.MessageSignature.toJSON(message.content.messageSignature);
+        }
+        else if (message.content?.$case === "dsseEnvelope") {
+            obj.dsseEnvelope = envelope_1.Envelope.toJSON(message.content.dsseEnvelope);
+        }
+        return obj;
+    },
+};
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js b/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
new file mode 100644
index 0000000000000..b900516ed3b55
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
@@ -0,0 +1,596 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: sigstore_common.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TimeRange = exports.X509CertificateChain = exports.SubjectAlternativeName = exports.X509Certificate = exports.DistinguishedName = exports.ObjectIdentifierValuePair = exports.ObjectIdentifier = exports.PublicKeyIdentifier = exports.PublicKey = exports.RFC3161SignedTimestamp = exports.LogId = exports.MessageSignature = exports.HashOutput = exports.SubjectAlternativeNameType = exports.PublicKeyDetails = exports.HashAlgorithm = void 0;
+exports.hashAlgorithmFromJSON = hashAlgorithmFromJSON;
+exports.hashAlgorithmToJSON = hashAlgorithmToJSON;
+exports.publicKeyDetailsFromJSON = publicKeyDetailsFromJSON;
+exports.publicKeyDetailsToJSON = publicKeyDetailsToJSON;
+exports.subjectAlternativeNameTypeFromJSON = subjectAlternativeNameTypeFromJSON;
+exports.subjectAlternativeNameTypeToJSON = subjectAlternativeNameTypeToJSON;
+/* eslint-disable */
+const timestamp_1 = require("./google/protobuf/timestamp");
+/**
+ * Only a subset of the secure hash standard algorithms are supported.
+ * See  for more
+ * details.
+ * UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force
+ * any proto JSON serialization to emit the used hash algorithm, as default
+ * option is to *omit* the default value of an enum (which is the first
+ * value, represented by '0'.
+ */
+var HashAlgorithm;
+(function (HashAlgorithm) {
+    HashAlgorithm[HashAlgorithm["HASH_ALGORITHM_UNSPECIFIED"] = 0] = "HASH_ALGORITHM_UNSPECIFIED";
+    HashAlgorithm[HashAlgorithm["SHA2_256"] = 1] = "SHA2_256";
+    HashAlgorithm[HashAlgorithm["SHA2_384"] = 2] = "SHA2_384";
+    HashAlgorithm[HashAlgorithm["SHA2_512"] = 3] = "SHA2_512";
+    HashAlgorithm[HashAlgorithm["SHA3_256"] = 4] = "SHA3_256";
+    HashAlgorithm[HashAlgorithm["SHA3_384"] = 5] = "SHA3_384";
+})(HashAlgorithm || (exports.HashAlgorithm = HashAlgorithm = {}));
+function hashAlgorithmFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "HASH_ALGORITHM_UNSPECIFIED":
+            return HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED;
+        case 1:
+        case "SHA2_256":
+            return HashAlgorithm.SHA2_256;
+        case 2:
+        case "SHA2_384":
+            return HashAlgorithm.SHA2_384;
+        case 3:
+        case "SHA2_512":
+            return HashAlgorithm.SHA2_512;
+        case 4:
+        case "SHA3_256":
+            return HashAlgorithm.SHA3_256;
+        case 5:
+        case "SHA3_384":
+            return HashAlgorithm.SHA3_384;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
+    }
+}
+function hashAlgorithmToJSON(object) {
+    switch (object) {
+        case HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED:
+            return "HASH_ALGORITHM_UNSPECIFIED";
+        case HashAlgorithm.SHA2_256:
+            return "SHA2_256";
+        case HashAlgorithm.SHA2_384:
+            return "SHA2_384";
+        case HashAlgorithm.SHA2_512:
+            return "SHA2_512";
+        case HashAlgorithm.SHA3_256:
+            return "SHA3_256";
+        case HashAlgorithm.SHA3_384:
+            return "SHA3_384";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
+    }
+}
+/**
+ * Details of a specific public key, capturing the the key encoding method,
+ * and signature algorithm.
+ *
+ * PublicKeyDetails captures the public key/hash algorithm combinations
+ * recommended in the Sigstore ecosystem.
+ *
+ * This is modelled as a linear set as we want to provide a small number of
+ * opinionated options instead of allowing every possible permutation.
+ *
+ * Any changes to this enum MUST be reflected in the algorithm registry.
+ *
+ * See: 
+ *
+ * To avoid the possibility of contradicting formats such as PKCS1 with
+ * ED25519 the valid permutations are listed as a linear set instead of a
+ * cartesian set (i.e one combined variable instead of two, one for encoding
+ * and one for the signature algorithm).
+ */
+var PublicKeyDetails;
+(function (PublicKeyDetails) {
+    PublicKeyDetails[PublicKeyDetails["PUBLIC_KEY_DETAILS_UNSPECIFIED"] = 0] = "PUBLIC_KEY_DETAILS_UNSPECIFIED";
+    /**
+     * PKCS1_RSA_PKCS1V5 - RSA
+     *
+     * @deprecated
+     */
+    PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PKCS1V5"] = 1] = "PKCS1_RSA_PKCS1V5";
+    /**
+     * PKCS1_RSA_PSS - See RFC8017
+     *
+     * @deprecated
+     */
+    PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PSS"] = 2] = "PKCS1_RSA_PSS";
+    /** @deprecated */
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V5"] = 3] = "PKIX_RSA_PKCS1V5";
+    /** @deprecated */
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS"] = 4] = "PKIX_RSA_PSS";
+    /** PKIX_RSA_PKCS1V15_2048_SHA256 - RSA public key in PKIX format, PKCS#1v1.5 signature */
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V15_2048_SHA256"] = 9] = "PKIX_RSA_PKCS1V15_2048_SHA256";
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V15_3072_SHA256"] = 10] = "PKIX_RSA_PKCS1V15_3072_SHA256";
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V15_4096_SHA256"] = 11] = "PKIX_RSA_PKCS1V15_4096_SHA256";
+    /** PKIX_RSA_PSS_2048_SHA256 - RSA public key in PKIX format, RSASSA-PSS signature */
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS_2048_SHA256"] = 16] = "PKIX_RSA_PSS_2048_SHA256";
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS_3072_SHA256"] = 17] = "PKIX_RSA_PSS_3072_SHA256";
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS_4096_SHA256"] = 18] = "PKIX_RSA_PSS_4096_SHA256";
+    /**
+     * PKIX_ECDSA_P256_HMAC_SHA_256 - ECDSA
+     *
+     * @deprecated
+     */
+    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_HMAC_SHA_256"] = 6] = "PKIX_ECDSA_P256_HMAC_SHA_256";
+    /** PKIX_ECDSA_P256_SHA_256 - See NIST FIPS 186-4 */
+    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_SHA_256"] = 5] = "PKIX_ECDSA_P256_SHA_256";
+    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P384_SHA_384"] = 12] = "PKIX_ECDSA_P384_SHA_384";
+    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P521_SHA_512"] = 13] = "PKIX_ECDSA_P521_SHA_512";
+    /** PKIX_ED25519 - Ed 25519 */
+    PublicKeyDetails[PublicKeyDetails["PKIX_ED25519"] = 7] = "PKIX_ED25519";
+    PublicKeyDetails[PublicKeyDetails["PKIX_ED25519_PH"] = 8] = "PKIX_ED25519_PH";
+    /**
+     * PKIX_ECDSA_P384_SHA_256 - These algorithms are deprecated and should not be used, but they
+     * were/are being used by most Sigstore clients implementations.
+     *
+     * @deprecated
+     */
+    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P384_SHA_256"] = 19] = "PKIX_ECDSA_P384_SHA_256";
+    /** @deprecated */
+    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P521_SHA_256"] = 20] = "PKIX_ECDSA_P521_SHA_256";
+    /**
+     * LMS_SHA256 - LMS and LM-OTS
+     *
+     * These algorithms are deprecated and should not be used.
+     * Keys and signatures MAY be used by private Sigstore
+     * deployments, but will not be supported by the public
+     * good instance.
+     *
+     * USER WARNING: LMS and LM-OTS are both stateful signature schemes.
+     * Using them correctly requires discretion and careful consideration
+     * to ensure that individual secret keys are not used more than once.
+     * In addition, LM-OTS is a single-use scheme, meaning that it
+     * MUST NOT be used for more than one signature per LM-OTS key.
+     * If you cannot maintain these invariants, you MUST NOT use these
+     * schemes.
+     *
+     * @deprecated
+     */
+    PublicKeyDetails[PublicKeyDetails["LMS_SHA256"] = 14] = "LMS_SHA256";
+    /** @deprecated */
+    PublicKeyDetails[PublicKeyDetails["LMOTS_SHA256"] = 15] = "LMOTS_SHA256";
+    /**
+     * ML_DSA_65 - ML-DSA
+     *
+     * These ML_DSA_65 and ML-DSA_87 algorithms are the pure variants that
+     * take data to sign rather than the prehash variants (HashML-DSA), which
+     * take digests.  While considered quantum-resistant, their usage
+     * involves tradeoffs in that signatures and keys are much larger, and
+     * this makes deployments more costly.
+     *
+     * USER WARNING: ML_DSA_65 and ML_DSA_87 are experimental algorithms.
+     * In the future they MAY be used by private Sigstore deployments, but
+     * they are not yet fully functional.  This warning will be removed when
+     * these algorithms are widely supported by Sigstore clients and servers,
+     * but care should still be taken for production environments.
+     */
+    PublicKeyDetails[PublicKeyDetails["ML_DSA_65"] = 21] = "ML_DSA_65";
+    PublicKeyDetails[PublicKeyDetails["ML_DSA_87"] = 22] = "ML_DSA_87";
+})(PublicKeyDetails || (exports.PublicKeyDetails = PublicKeyDetails = {}));
+function publicKeyDetailsFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "PUBLIC_KEY_DETAILS_UNSPECIFIED":
+            return PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED;
+        case 1:
+        case "PKCS1_RSA_PKCS1V5":
+            return PublicKeyDetails.PKCS1_RSA_PKCS1V5;
+        case 2:
+        case "PKCS1_RSA_PSS":
+            return PublicKeyDetails.PKCS1_RSA_PSS;
+        case 3:
+        case "PKIX_RSA_PKCS1V5":
+            return PublicKeyDetails.PKIX_RSA_PKCS1V5;
+        case 4:
+        case "PKIX_RSA_PSS":
+            return PublicKeyDetails.PKIX_RSA_PSS;
+        case 9:
+        case "PKIX_RSA_PKCS1V15_2048_SHA256":
+            return PublicKeyDetails.PKIX_RSA_PKCS1V15_2048_SHA256;
+        case 10:
+        case "PKIX_RSA_PKCS1V15_3072_SHA256":
+            return PublicKeyDetails.PKIX_RSA_PKCS1V15_3072_SHA256;
+        case 11:
+        case "PKIX_RSA_PKCS1V15_4096_SHA256":
+            return PublicKeyDetails.PKIX_RSA_PKCS1V15_4096_SHA256;
+        case 16:
+        case "PKIX_RSA_PSS_2048_SHA256":
+            return PublicKeyDetails.PKIX_RSA_PSS_2048_SHA256;
+        case 17:
+        case "PKIX_RSA_PSS_3072_SHA256":
+            return PublicKeyDetails.PKIX_RSA_PSS_3072_SHA256;
+        case 18:
+        case "PKIX_RSA_PSS_4096_SHA256":
+            return PublicKeyDetails.PKIX_RSA_PSS_4096_SHA256;
+        case 6:
+        case "PKIX_ECDSA_P256_HMAC_SHA_256":
+            return PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256;
+        case 5:
+        case "PKIX_ECDSA_P256_SHA_256":
+            return PublicKeyDetails.PKIX_ECDSA_P256_SHA_256;
+        case 12:
+        case "PKIX_ECDSA_P384_SHA_384":
+            return PublicKeyDetails.PKIX_ECDSA_P384_SHA_384;
+        case 13:
+        case "PKIX_ECDSA_P521_SHA_512":
+            return PublicKeyDetails.PKIX_ECDSA_P521_SHA_512;
+        case 7:
+        case "PKIX_ED25519":
+            return PublicKeyDetails.PKIX_ED25519;
+        case 8:
+        case "PKIX_ED25519_PH":
+            return PublicKeyDetails.PKIX_ED25519_PH;
+        case 19:
+        case "PKIX_ECDSA_P384_SHA_256":
+            return PublicKeyDetails.PKIX_ECDSA_P384_SHA_256;
+        case 20:
+        case "PKIX_ECDSA_P521_SHA_256":
+            return PublicKeyDetails.PKIX_ECDSA_P521_SHA_256;
+        case 14:
+        case "LMS_SHA256":
+            return PublicKeyDetails.LMS_SHA256;
+        case 15:
+        case "LMOTS_SHA256":
+            return PublicKeyDetails.LMOTS_SHA256;
+        case 21:
+        case "ML_DSA_65":
+            return PublicKeyDetails.ML_DSA_65;
+        case 22:
+        case "ML_DSA_87":
+            return PublicKeyDetails.ML_DSA_87;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
+    }
+}
+function publicKeyDetailsToJSON(object) {
+    switch (object) {
+        case PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED:
+            return "PUBLIC_KEY_DETAILS_UNSPECIFIED";
+        case PublicKeyDetails.PKCS1_RSA_PKCS1V5:
+            return "PKCS1_RSA_PKCS1V5";
+        case PublicKeyDetails.PKCS1_RSA_PSS:
+            return "PKCS1_RSA_PSS";
+        case PublicKeyDetails.PKIX_RSA_PKCS1V5:
+            return "PKIX_RSA_PKCS1V5";
+        case PublicKeyDetails.PKIX_RSA_PSS:
+            return "PKIX_RSA_PSS";
+        case PublicKeyDetails.PKIX_RSA_PKCS1V15_2048_SHA256:
+            return "PKIX_RSA_PKCS1V15_2048_SHA256";
+        case PublicKeyDetails.PKIX_RSA_PKCS1V15_3072_SHA256:
+            return "PKIX_RSA_PKCS1V15_3072_SHA256";
+        case PublicKeyDetails.PKIX_RSA_PKCS1V15_4096_SHA256:
+            return "PKIX_RSA_PKCS1V15_4096_SHA256";
+        case PublicKeyDetails.PKIX_RSA_PSS_2048_SHA256:
+            return "PKIX_RSA_PSS_2048_SHA256";
+        case PublicKeyDetails.PKIX_RSA_PSS_3072_SHA256:
+            return "PKIX_RSA_PSS_3072_SHA256";
+        case PublicKeyDetails.PKIX_RSA_PSS_4096_SHA256:
+            return "PKIX_RSA_PSS_4096_SHA256";
+        case PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256:
+            return "PKIX_ECDSA_P256_HMAC_SHA_256";
+        case PublicKeyDetails.PKIX_ECDSA_P256_SHA_256:
+            return "PKIX_ECDSA_P256_SHA_256";
+        case PublicKeyDetails.PKIX_ECDSA_P384_SHA_384:
+            return "PKIX_ECDSA_P384_SHA_384";
+        case PublicKeyDetails.PKIX_ECDSA_P521_SHA_512:
+            return "PKIX_ECDSA_P521_SHA_512";
+        case PublicKeyDetails.PKIX_ED25519:
+            return "PKIX_ED25519";
+        case PublicKeyDetails.PKIX_ED25519_PH:
+            return "PKIX_ED25519_PH";
+        case PublicKeyDetails.PKIX_ECDSA_P384_SHA_256:
+            return "PKIX_ECDSA_P384_SHA_256";
+        case PublicKeyDetails.PKIX_ECDSA_P521_SHA_256:
+            return "PKIX_ECDSA_P521_SHA_256";
+        case PublicKeyDetails.LMS_SHA256:
+            return "LMS_SHA256";
+        case PublicKeyDetails.LMOTS_SHA256:
+            return "LMOTS_SHA256";
+        case PublicKeyDetails.ML_DSA_65:
+            return "ML_DSA_65";
+        case PublicKeyDetails.ML_DSA_87:
+            return "ML_DSA_87";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
+    }
+}
+var SubjectAlternativeNameType;
+(function (SubjectAlternativeNameType) {
+    SubjectAlternativeNameType[SubjectAlternativeNameType["SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED"] = 0] = "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
+    SubjectAlternativeNameType[SubjectAlternativeNameType["EMAIL"] = 1] = "EMAIL";
+    SubjectAlternativeNameType[SubjectAlternativeNameType["URI"] = 2] = "URI";
+    /**
+     * OTHER_NAME - OID 1.3.6.1.4.1.57264.1.7
+     * See https://github.com/sigstore/fulcio/blob/main/docs/oid-info.md#1361415726417--othername-san
+     * for more details.
+     */
+    SubjectAlternativeNameType[SubjectAlternativeNameType["OTHER_NAME"] = 3] = "OTHER_NAME";
+})(SubjectAlternativeNameType || (exports.SubjectAlternativeNameType = SubjectAlternativeNameType = {}));
+function subjectAlternativeNameTypeFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED":
+            return SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED;
+        case 1:
+        case "EMAIL":
+            return SubjectAlternativeNameType.EMAIL;
+        case 2:
+        case "URI":
+            return SubjectAlternativeNameType.URI;
+        case 3:
+        case "OTHER_NAME":
+            return SubjectAlternativeNameType.OTHER_NAME;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
+    }
+}
+function subjectAlternativeNameTypeToJSON(object) {
+    switch (object) {
+        case SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED:
+            return "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
+        case SubjectAlternativeNameType.EMAIL:
+            return "EMAIL";
+        case SubjectAlternativeNameType.URI:
+            return "URI";
+        case SubjectAlternativeNameType.OTHER_NAME:
+            return "OTHER_NAME";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
+    }
+}
+exports.HashOutput = {
+    fromJSON(object) {
+        return {
+            algorithm: isSet(object.algorithm) ? hashAlgorithmFromJSON(object.algorithm) : 0,
+            digest: isSet(object.digest) ? Buffer.from(bytesFromBase64(object.digest)) : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.algorithm !== 0) {
+            obj.algorithm = hashAlgorithmToJSON(message.algorithm);
+        }
+        if (message.digest.length !== 0) {
+            obj.digest = base64FromBytes(message.digest);
+        }
+        return obj;
+    },
+};
+exports.MessageSignature = {
+    fromJSON(object) {
+        return {
+            messageDigest: isSet(object.messageDigest) ? exports.HashOutput.fromJSON(object.messageDigest) : undefined,
+            signature: isSet(object.signature) ? Buffer.from(bytesFromBase64(object.signature)) : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.messageDigest !== undefined) {
+            obj.messageDigest = exports.HashOutput.toJSON(message.messageDigest);
+        }
+        if (message.signature.length !== 0) {
+            obj.signature = base64FromBytes(message.signature);
+        }
+        return obj;
+    },
+};
+exports.LogId = {
+    fromJSON(object) {
+        return { keyId: isSet(object.keyId) ? Buffer.from(bytesFromBase64(object.keyId)) : Buffer.alloc(0) };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.keyId.length !== 0) {
+            obj.keyId = base64FromBytes(message.keyId);
+        }
+        return obj;
+    },
+};
+exports.RFC3161SignedTimestamp = {
+    fromJSON(object) {
+        return {
+            signedTimestamp: isSet(object.signedTimestamp)
+                ? Buffer.from(bytesFromBase64(object.signedTimestamp))
+                : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.signedTimestamp.length !== 0) {
+            obj.signedTimestamp = base64FromBytes(message.signedTimestamp);
+        }
+        return obj;
+    },
+};
+exports.PublicKey = {
+    fromJSON(object) {
+        return {
+            rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : undefined,
+            keyDetails: isSet(object.keyDetails) ? publicKeyDetailsFromJSON(object.keyDetails) : 0,
+            validFor: isSet(object.validFor) ? exports.TimeRange.fromJSON(object.validFor) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.rawBytes !== undefined) {
+            obj.rawBytes = base64FromBytes(message.rawBytes);
+        }
+        if (message.keyDetails !== 0) {
+            obj.keyDetails = publicKeyDetailsToJSON(message.keyDetails);
+        }
+        if (message.validFor !== undefined) {
+            obj.validFor = exports.TimeRange.toJSON(message.validFor);
+        }
+        return obj;
+    },
+};
+exports.PublicKeyIdentifier = {
+    fromJSON(object) {
+        return { hint: isSet(object.hint) ? globalThis.String(object.hint) : "" };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.hint !== "") {
+            obj.hint = message.hint;
+        }
+        return obj;
+    },
+};
+exports.ObjectIdentifier = {
+    fromJSON(object) {
+        return { id: globalThis.Array.isArray(object?.id) ? object.id.map((e) => globalThis.Number(e)) : [] };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.id?.length) {
+            obj.id = message.id.map((e) => Math.round(e));
+        }
+        return obj;
+    },
+};
+exports.ObjectIdentifierValuePair = {
+    fromJSON(object) {
+        return {
+            oid: isSet(object.oid) ? exports.ObjectIdentifier.fromJSON(object.oid) : undefined,
+            value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.oid !== undefined) {
+            obj.oid = exports.ObjectIdentifier.toJSON(message.oid);
+        }
+        if (message.value.length !== 0) {
+            obj.value = base64FromBytes(message.value);
+        }
+        return obj;
+    },
+};
+exports.DistinguishedName = {
+    fromJSON(object) {
+        return {
+            organization: isSet(object.organization) ? globalThis.String(object.organization) : "",
+            commonName: isSet(object.commonName) ? globalThis.String(object.commonName) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.organization !== "") {
+            obj.organization = message.organization;
+        }
+        if (message.commonName !== "") {
+            obj.commonName = message.commonName;
+        }
+        return obj;
+    },
+};
+exports.X509Certificate = {
+    fromJSON(object) {
+        return { rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : Buffer.alloc(0) };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.rawBytes.length !== 0) {
+            obj.rawBytes = base64FromBytes(message.rawBytes);
+        }
+        return obj;
+    },
+};
+exports.SubjectAlternativeName = {
+    fromJSON(object) {
+        return {
+            type: isSet(object.type) ? subjectAlternativeNameTypeFromJSON(object.type) : 0,
+            identity: isSet(object.regexp)
+                ? { $case: "regexp", regexp: globalThis.String(object.regexp) }
+                : isSet(object.value)
+                    ? { $case: "value", value: globalThis.String(object.value) }
+                    : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.type !== 0) {
+            obj.type = subjectAlternativeNameTypeToJSON(message.type);
+        }
+        if (message.identity?.$case === "regexp") {
+            obj.regexp = message.identity.regexp;
+        }
+        else if (message.identity?.$case === "value") {
+            obj.value = message.identity.value;
+        }
+        return obj;
+    },
+};
+exports.X509CertificateChain = {
+    fromJSON(object) {
+        return {
+            certificates: globalThis.Array.isArray(object?.certificates)
+                ? object.certificates.map((e) => exports.X509Certificate.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.certificates?.length) {
+            obj.certificates = message.certificates.map((e) => exports.X509Certificate.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.TimeRange = {
+    fromJSON(object) {
+        return {
+            start: isSet(object.start) ? fromJsonTimestamp(object.start) : undefined,
+            end: isSet(object.end) ? fromJsonTimestamp(object.end) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.start !== undefined) {
+            obj.start = message.start.toISOString();
+        }
+        if (message.end !== undefined) {
+            obj.end = message.end.toISOString();
+        }
+        return obj;
+    },
+};
+function bytesFromBase64(b64) {
+    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+}
+function base64FromBytes(arr) {
+    return globalThis.Buffer.from(arr).toString("base64");
+}
+function fromTimestamp(t) {
+    let millis = (globalThis.Number(t.seconds) || 0) * 1_000;
+    millis += (t.nanos || 0) / 1_000_000;
+    return new globalThis.Date(millis);
+}
+function fromJsonTimestamp(o) {
+    if (o instanceof globalThis.Date) {
+        return o;
+    }
+    else if (typeof o === "string") {
+        return new globalThis.Date(o);
+    }
+    else {
+        return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
+    }
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js b/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
new file mode 100644
index 0000000000000..fd8ea8384664d
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
@@ -0,0 +1,137 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: sigstore_rekor.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TransparencyLogEntry = exports.InclusionPromise = exports.InclusionProof = exports.Checkpoint = exports.KindVersion = void 0;
+/* eslint-disable */
+const sigstore_common_1 = require("./sigstore_common");
+exports.KindVersion = {
+    fromJSON(object) {
+        return {
+            kind: isSet(object.kind) ? globalThis.String(object.kind) : "",
+            version: isSet(object.version) ? globalThis.String(object.version) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.kind !== "") {
+            obj.kind = message.kind;
+        }
+        if (message.version !== "") {
+            obj.version = message.version;
+        }
+        return obj;
+    },
+};
+exports.Checkpoint = {
+    fromJSON(object) {
+        return { envelope: isSet(object.envelope) ? globalThis.String(object.envelope) : "" };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.envelope !== "") {
+            obj.envelope = message.envelope;
+        }
+        return obj;
+    },
+};
+exports.InclusionProof = {
+    fromJSON(object) {
+        return {
+            logIndex: isSet(object.logIndex) ? globalThis.String(object.logIndex) : "0",
+            rootHash: isSet(object.rootHash) ? Buffer.from(bytesFromBase64(object.rootHash)) : Buffer.alloc(0),
+            treeSize: isSet(object.treeSize) ? globalThis.String(object.treeSize) : "0",
+            hashes: globalThis.Array.isArray(object?.hashes)
+                ? object.hashes.map((e) => Buffer.from(bytesFromBase64(e)))
+                : [],
+            checkpoint: isSet(object.checkpoint) ? exports.Checkpoint.fromJSON(object.checkpoint) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.logIndex !== "0") {
+            obj.logIndex = message.logIndex;
+        }
+        if (message.rootHash.length !== 0) {
+            obj.rootHash = base64FromBytes(message.rootHash);
+        }
+        if (message.treeSize !== "0") {
+            obj.treeSize = message.treeSize;
+        }
+        if (message.hashes?.length) {
+            obj.hashes = message.hashes.map((e) => base64FromBytes(e));
+        }
+        if (message.checkpoint !== undefined) {
+            obj.checkpoint = exports.Checkpoint.toJSON(message.checkpoint);
+        }
+        return obj;
+    },
+};
+exports.InclusionPromise = {
+    fromJSON(object) {
+        return {
+            signedEntryTimestamp: isSet(object.signedEntryTimestamp)
+                ? Buffer.from(bytesFromBase64(object.signedEntryTimestamp))
+                : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.signedEntryTimestamp.length !== 0) {
+            obj.signedEntryTimestamp = base64FromBytes(message.signedEntryTimestamp);
+        }
+        return obj;
+    },
+};
+exports.TransparencyLogEntry = {
+    fromJSON(object) {
+        return {
+            logIndex: isSet(object.logIndex) ? globalThis.String(object.logIndex) : "0",
+            logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
+            kindVersion: isSet(object.kindVersion) ? exports.KindVersion.fromJSON(object.kindVersion) : undefined,
+            integratedTime: isSet(object.integratedTime) ? globalThis.String(object.integratedTime) : "0",
+            inclusionPromise: isSet(object.inclusionPromise) ? exports.InclusionPromise.fromJSON(object.inclusionPromise) : undefined,
+            inclusionProof: isSet(object.inclusionProof) ? exports.InclusionProof.fromJSON(object.inclusionProof) : undefined,
+            canonicalizedBody: isSet(object.canonicalizedBody)
+                ? Buffer.from(bytesFromBase64(object.canonicalizedBody))
+                : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.logIndex !== "0") {
+            obj.logIndex = message.logIndex;
+        }
+        if (message.logId !== undefined) {
+            obj.logId = sigstore_common_1.LogId.toJSON(message.logId);
+        }
+        if (message.kindVersion !== undefined) {
+            obj.kindVersion = exports.KindVersion.toJSON(message.kindVersion);
+        }
+        if (message.integratedTime !== "0") {
+            obj.integratedTime = message.integratedTime;
+        }
+        if (message.inclusionPromise !== undefined) {
+            obj.inclusionPromise = exports.InclusionPromise.toJSON(message.inclusionPromise);
+        }
+        if (message.inclusionProof !== undefined) {
+            obj.inclusionProof = exports.InclusionProof.toJSON(message.inclusionProof);
+        }
+        if (message.canonicalizedBody.length !== 0) {
+            obj.canonicalizedBody = base64FromBytes(message.canonicalizedBody);
+        }
+        return obj;
+    },
+};
+function bytesFromBase64(b64) {
+    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+}
+function base64FromBytes(arr) {
+    return globalThis.Buffer.from(arr).toString("base64");
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js b/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
new file mode 100644
index 0000000000000..1b5492fb1a77e
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
@@ -0,0 +1,284 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: sigstore_trustroot.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.ClientTrustConfig = exports.ServiceConfiguration = exports.Service = exports.SigningConfig = exports.TrustedRoot = exports.CertificateAuthority = exports.TransparencyLogInstance = exports.ServiceSelector = void 0;
+exports.serviceSelectorFromJSON = serviceSelectorFromJSON;
+exports.serviceSelectorToJSON = serviceSelectorToJSON;
+/* eslint-disable */
+const sigstore_common_1 = require("./sigstore_common");
+/**
+ * ServiceSelector specifies how a client SHOULD select a set of
+ * Services to connect to. A client SHOULD throw an error if
+ * the value is SERVICE_SELECTOR_UNDEFINED.
+ */
+var ServiceSelector;
+(function (ServiceSelector) {
+    ServiceSelector[ServiceSelector["SERVICE_SELECTOR_UNDEFINED"] = 0] = "SERVICE_SELECTOR_UNDEFINED";
+    /**
+     * ALL - Clients SHOULD select all Services based on supported API version
+     * and validity window.
+     */
+    ServiceSelector[ServiceSelector["ALL"] = 1] = "ALL";
+    /**
+     * ANY - Clients SHOULD select one Service based on supported API version
+     * and validity window. It is up to the client implementation to
+     * decide how to select the Service, e.g. random or round-robin.
+     */
+    ServiceSelector[ServiceSelector["ANY"] = 2] = "ANY";
+    /**
+     * EXACT - Clients SHOULD select a specific number of Services based on
+     * supported API version and validity window, using the provided
+     * `count`. It is up to the client implementation to decide how to
+     * select the Service, e.g. random or round-robin.
+     */
+    ServiceSelector[ServiceSelector["EXACT"] = 3] = "EXACT";
+})(ServiceSelector || (exports.ServiceSelector = ServiceSelector = {}));
+function serviceSelectorFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "SERVICE_SELECTOR_UNDEFINED":
+            return ServiceSelector.SERVICE_SELECTOR_UNDEFINED;
+        case 1:
+        case "ALL":
+            return ServiceSelector.ALL;
+        case 2:
+        case "ANY":
+            return ServiceSelector.ANY;
+        case 3:
+        case "EXACT":
+            return ServiceSelector.EXACT;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum ServiceSelector");
+    }
+}
+function serviceSelectorToJSON(object) {
+    switch (object) {
+        case ServiceSelector.SERVICE_SELECTOR_UNDEFINED:
+            return "SERVICE_SELECTOR_UNDEFINED";
+        case ServiceSelector.ALL:
+            return "ALL";
+        case ServiceSelector.ANY:
+            return "ANY";
+        case ServiceSelector.EXACT:
+            return "EXACT";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum ServiceSelector");
+    }
+}
+exports.TransparencyLogInstance = {
+    fromJSON(object) {
+        return {
+            baseUrl: isSet(object.baseUrl) ? globalThis.String(object.baseUrl) : "",
+            hashAlgorithm: isSet(object.hashAlgorithm) ? (0, sigstore_common_1.hashAlgorithmFromJSON)(object.hashAlgorithm) : 0,
+            publicKey: isSet(object.publicKey) ? sigstore_common_1.PublicKey.fromJSON(object.publicKey) : undefined,
+            logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
+            checkpointKeyId: isSet(object.checkpointKeyId) ? sigstore_common_1.LogId.fromJSON(object.checkpointKeyId) : undefined,
+            operator: isSet(object.operator) ? globalThis.String(object.operator) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.baseUrl !== "") {
+            obj.baseUrl = message.baseUrl;
+        }
+        if (message.hashAlgorithm !== 0) {
+            obj.hashAlgorithm = (0, sigstore_common_1.hashAlgorithmToJSON)(message.hashAlgorithm);
+        }
+        if (message.publicKey !== undefined) {
+            obj.publicKey = sigstore_common_1.PublicKey.toJSON(message.publicKey);
+        }
+        if (message.logId !== undefined) {
+            obj.logId = sigstore_common_1.LogId.toJSON(message.logId);
+        }
+        if (message.checkpointKeyId !== undefined) {
+            obj.checkpointKeyId = sigstore_common_1.LogId.toJSON(message.checkpointKeyId);
+        }
+        if (message.operator !== "") {
+            obj.operator = message.operator;
+        }
+        return obj;
+    },
+};
+exports.CertificateAuthority = {
+    fromJSON(object) {
+        return {
+            subject: isSet(object.subject) ? sigstore_common_1.DistinguishedName.fromJSON(object.subject) : undefined,
+            uri: isSet(object.uri) ? globalThis.String(object.uri) : "",
+            certChain: isSet(object.certChain) ? sigstore_common_1.X509CertificateChain.fromJSON(object.certChain) : undefined,
+            validFor: isSet(object.validFor) ? sigstore_common_1.TimeRange.fromJSON(object.validFor) : undefined,
+            operator: isSet(object.operator) ? globalThis.String(object.operator) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.subject !== undefined) {
+            obj.subject = sigstore_common_1.DistinguishedName.toJSON(message.subject);
+        }
+        if (message.uri !== "") {
+            obj.uri = message.uri;
+        }
+        if (message.certChain !== undefined) {
+            obj.certChain = sigstore_common_1.X509CertificateChain.toJSON(message.certChain);
+        }
+        if (message.validFor !== undefined) {
+            obj.validFor = sigstore_common_1.TimeRange.toJSON(message.validFor);
+        }
+        if (message.operator !== "") {
+            obj.operator = message.operator;
+        }
+        return obj;
+    },
+};
+exports.TrustedRoot = {
+    fromJSON(object) {
+        return {
+            mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "",
+            tlogs: globalThis.Array.isArray(object?.tlogs)
+                ? object.tlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e))
+                : [],
+            certificateAuthorities: globalThis.Array.isArray(object?.certificateAuthorities)
+                ? object.certificateAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
+                : [],
+            ctlogs: globalThis.Array.isArray(object?.ctlogs)
+                ? object.ctlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e))
+                : [],
+            timestampAuthorities: globalThis.Array.isArray(object?.timestampAuthorities)
+                ? object.timestampAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.mediaType !== "") {
+            obj.mediaType = message.mediaType;
+        }
+        if (message.tlogs?.length) {
+            obj.tlogs = message.tlogs.map((e) => exports.TransparencyLogInstance.toJSON(e));
+        }
+        if (message.certificateAuthorities?.length) {
+            obj.certificateAuthorities = message.certificateAuthorities.map((e) => exports.CertificateAuthority.toJSON(e));
+        }
+        if (message.ctlogs?.length) {
+            obj.ctlogs = message.ctlogs.map((e) => exports.TransparencyLogInstance.toJSON(e));
+        }
+        if (message.timestampAuthorities?.length) {
+            obj.timestampAuthorities = message.timestampAuthorities.map((e) => exports.CertificateAuthority.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.SigningConfig = {
+    fromJSON(object) {
+        return {
+            mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "",
+            caUrls: globalThis.Array.isArray(object?.caUrls) ? object.caUrls.map((e) => exports.Service.fromJSON(e)) : [],
+            oidcUrls: globalThis.Array.isArray(object?.oidcUrls) ? object.oidcUrls.map((e) => exports.Service.fromJSON(e)) : [],
+            rekorTlogUrls: globalThis.Array.isArray(object?.rekorTlogUrls)
+                ? object.rekorTlogUrls.map((e) => exports.Service.fromJSON(e))
+                : [],
+            rekorTlogConfig: isSet(object.rekorTlogConfig)
+                ? exports.ServiceConfiguration.fromJSON(object.rekorTlogConfig)
+                : undefined,
+            tsaUrls: globalThis.Array.isArray(object?.tsaUrls) ? object.tsaUrls.map((e) => exports.Service.fromJSON(e)) : [],
+            tsaConfig: isSet(object.tsaConfig) ? exports.ServiceConfiguration.fromJSON(object.tsaConfig) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.mediaType !== "") {
+            obj.mediaType = message.mediaType;
+        }
+        if (message.caUrls?.length) {
+            obj.caUrls = message.caUrls.map((e) => exports.Service.toJSON(e));
+        }
+        if (message.oidcUrls?.length) {
+            obj.oidcUrls = message.oidcUrls.map((e) => exports.Service.toJSON(e));
+        }
+        if (message.rekorTlogUrls?.length) {
+            obj.rekorTlogUrls = message.rekorTlogUrls.map((e) => exports.Service.toJSON(e));
+        }
+        if (message.rekorTlogConfig !== undefined) {
+            obj.rekorTlogConfig = exports.ServiceConfiguration.toJSON(message.rekorTlogConfig);
+        }
+        if (message.tsaUrls?.length) {
+            obj.tsaUrls = message.tsaUrls.map((e) => exports.Service.toJSON(e));
+        }
+        if (message.tsaConfig !== undefined) {
+            obj.tsaConfig = exports.ServiceConfiguration.toJSON(message.tsaConfig);
+        }
+        return obj;
+    },
+};
+exports.Service = {
+    fromJSON(object) {
+        return {
+            url: isSet(object.url) ? globalThis.String(object.url) : "",
+            majorApiVersion: isSet(object.majorApiVersion) ? globalThis.Number(object.majorApiVersion) : 0,
+            validFor: isSet(object.validFor) ? sigstore_common_1.TimeRange.fromJSON(object.validFor) : undefined,
+            operator: isSet(object.operator) ? globalThis.String(object.operator) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.url !== "") {
+            obj.url = message.url;
+        }
+        if (message.majorApiVersion !== 0) {
+            obj.majorApiVersion = Math.round(message.majorApiVersion);
+        }
+        if (message.validFor !== undefined) {
+            obj.validFor = sigstore_common_1.TimeRange.toJSON(message.validFor);
+        }
+        if (message.operator !== "") {
+            obj.operator = message.operator;
+        }
+        return obj;
+    },
+};
+exports.ServiceConfiguration = {
+    fromJSON(object) {
+        return {
+            selector: isSet(object.selector) ? serviceSelectorFromJSON(object.selector) : 0,
+            count: isSet(object.count) ? globalThis.Number(object.count) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.selector !== 0) {
+            obj.selector = serviceSelectorToJSON(message.selector);
+        }
+        if (message.count !== 0) {
+            obj.count = Math.round(message.count);
+        }
+        return obj;
+    },
+};
+exports.ClientTrustConfig = {
+    fromJSON(object) {
+        return {
+            mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "",
+            trustedRoot: isSet(object.trustedRoot) ? exports.TrustedRoot.fromJSON(object.trustedRoot) : undefined,
+            signingConfig: isSet(object.signingConfig) ? exports.SigningConfig.fromJSON(object.signingConfig) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.mediaType !== "") {
+            obj.mediaType = message.mediaType;
+        }
+        if (message.trustedRoot !== undefined) {
+            obj.trustedRoot = exports.TrustedRoot.toJSON(message.trustedRoot);
+        }
+        if (message.signingConfig !== undefined) {
+            obj.signingConfig = exports.SigningConfig.toJSON(message.signingConfig);
+        }
+        return obj;
+    },
+};
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js b/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
new file mode 100644
index 0000000000000..876fe9cc1db1d
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
@@ -0,0 +1,281 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: sigstore_verification.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Input = exports.Artifact = exports.ArtifactVerificationOptions_ObserverTimestampOptions = exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions = exports.ArtifactVerificationOptions_TimestampAuthorityOptions = exports.ArtifactVerificationOptions_CtlogOptions = exports.ArtifactVerificationOptions_TlogOptions = exports.ArtifactVerificationOptions = exports.PublicKeyIdentities = exports.CertificateIdentities = exports.CertificateIdentity = void 0;
+/* eslint-disable */
+const sigstore_bundle_1 = require("./sigstore_bundle");
+const sigstore_common_1 = require("./sigstore_common");
+const sigstore_trustroot_1 = require("./sigstore_trustroot");
+exports.CertificateIdentity = {
+    fromJSON(object) {
+        return {
+            issuer: isSet(object.issuer) ? globalThis.String(object.issuer) : "",
+            san: isSet(object.san) ? sigstore_common_1.SubjectAlternativeName.fromJSON(object.san) : undefined,
+            oids: globalThis.Array.isArray(object?.oids)
+                ? object.oids.map((e) => sigstore_common_1.ObjectIdentifierValuePair.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.issuer !== "") {
+            obj.issuer = message.issuer;
+        }
+        if (message.san !== undefined) {
+            obj.san = sigstore_common_1.SubjectAlternativeName.toJSON(message.san);
+        }
+        if (message.oids?.length) {
+            obj.oids = message.oids.map((e) => sigstore_common_1.ObjectIdentifierValuePair.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.CertificateIdentities = {
+    fromJSON(object) {
+        return {
+            identities: globalThis.Array.isArray(object?.identities)
+                ? object.identities.map((e) => exports.CertificateIdentity.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.identities?.length) {
+            obj.identities = message.identities.map((e) => exports.CertificateIdentity.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.PublicKeyIdentities = {
+    fromJSON(object) {
+        return {
+            publicKeys: globalThis.Array.isArray(object?.publicKeys)
+                ? object.publicKeys.map((e) => sigstore_common_1.PublicKey.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.publicKeys?.length) {
+            obj.publicKeys = message.publicKeys.map((e) => sigstore_common_1.PublicKey.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.ArtifactVerificationOptions = {
+    fromJSON(object) {
+        return {
+            signers: isSet(object.certificateIdentities)
+                ? {
+                    $case: "certificateIdentities",
+                    certificateIdentities: exports.CertificateIdentities.fromJSON(object.certificateIdentities),
+                }
+                : isSet(object.publicKeys)
+                    ? { $case: "publicKeys", publicKeys: exports.PublicKeyIdentities.fromJSON(object.publicKeys) }
+                    : undefined,
+            tlogOptions: isSet(object.tlogOptions)
+                ? exports.ArtifactVerificationOptions_TlogOptions.fromJSON(object.tlogOptions)
+                : undefined,
+            ctlogOptions: isSet(object.ctlogOptions)
+                ? exports.ArtifactVerificationOptions_CtlogOptions.fromJSON(object.ctlogOptions)
+                : undefined,
+            tsaOptions: isSet(object.tsaOptions)
+                ? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.fromJSON(object.tsaOptions)
+                : undefined,
+            integratedTsOptions: isSet(object.integratedTsOptions)
+                ? exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions.fromJSON(object.integratedTsOptions)
+                : undefined,
+            observerOptions: isSet(object.observerOptions)
+                ? exports.ArtifactVerificationOptions_ObserverTimestampOptions.fromJSON(object.observerOptions)
+                : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.signers?.$case === "certificateIdentities") {
+            obj.certificateIdentities = exports.CertificateIdentities.toJSON(message.signers.certificateIdentities);
+        }
+        else if (message.signers?.$case === "publicKeys") {
+            obj.publicKeys = exports.PublicKeyIdentities.toJSON(message.signers.publicKeys);
+        }
+        if (message.tlogOptions !== undefined) {
+            obj.tlogOptions = exports.ArtifactVerificationOptions_TlogOptions.toJSON(message.tlogOptions);
+        }
+        if (message.ctlogOptions !== undefined) {
+            obj.ctlogOptions = exports.ArtifactVerificationOptions_CtlogOptions.toJSON(message.ctlogOptions);
+        }
+        if (message.tsaOptions !== undefined) {
+            obj.tsaOptions = exports.ArtifactVerificationOptions_TimestampAuthorityOptions.toJSON(message.tsaOptions);
+        }
+        if (message.integratedTsOptions !== undefined) {
+            obj.integratedTsOptions = exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions.toJSON(message.integratedTsOptions);
+        }
+        if (message.observerOptions !== undefined) {
+            obj.observerOptions = exports.ArtifactVerificationOptions_ObserverTimestampOptions.toJSON(message.observerOptions);
+        }
+        return obj;
+    },
+};
+exports.ArtifactVerificationOptions_TlogOptions = {
+    fromJSON(object) {
+        return {
+            threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
+            performOnlineVerification: isSet(object.performOnlineVerification)
+                ? globalThis.Boolean(object.performOnlineVerification)
+                : false,
+            disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.threshold !== 0) {
+            obj.threshold = Math.round(message.threshold);
+        }
+        if (message.performOnlineVerification !== false) {
+            obj.performOnlineVerification = message.performOnlineVerification;
+        }
+        if (message.disable !== false) {
+            obj.disable = message.disable;
+        }
+        return obj;
+    },
+};
+exports.ArtifactVerificationOptions_CtlogOptions = {
+    fromJSON(object) {
+        return {
+            threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
+            disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.threshold !== 0) {
+            obj.threshold = Math.round(message.threshold);
+        }
+        if (message.disable !== false) {
+            obj.disable = message.disable;
+        }
+        return obj;
+    },
+};
+exports.ArtifactVerificationOptions_TimestampAuthorityOptions = {
+    fromJSON(object) {
+        return {
+            threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
+            disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.threshold !== 0) {
+            obj.threshold = Math.round(message.threshold);
+        }
+        if (message.disable !== false) {
+            obj.disable = message.disable;
+        }
+        return obj;
+    },
+};
+exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions = {
+    fromJSON(object) {
+        return {
+            threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
+            disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.threshold !== 0) {
+            obj.threshold = Math.round(message.threshold);
+        }
+        if (message.disable !== false) {
+            obj.disable = message.disable;
+        }
+        return obj;
+    },
+};
+exports.ArtifactVerificationOptions_ObserverTimestampOptions = {
+    fromJSON(object) {
+        return {
+            threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
+            disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.threshold !== 0) {
+            obj.threshold = Math.round(message.threshold);
+        }
+        if (message.disable !== false) {
+            obj.disable = message.disable;
+        }
+        return obj;
+    },
+};
+exports.Artifact = {
+    fromJSON(object) {
+        return {
+            data: isSet(object.artifactUri)
+                ? { $case: "artifactUri", artifactUri: globalThis.String(object.artifactUri) }
+                : isSet(object.artifact)
+                    ? { $case: "artifact", artifact: Buffer.from(bytesFromBase64(object.artifact)) }
+                    : isSet(object.artifactDigest)
+                        ? { $case: "artifactDigest", artifactDigest: sigstore_common_1.HashOutput.fromJSON(object.artifactDigest) }
+                        : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.data?.$case === "artifactUri") {
+            obj.artifactUri = message.data.artifactUri;
+        }
+        else if (message.data?.$case === "artifact") {
+            obj.artifact = base64FromBytes(message.data.artifact);
+        }
+        else if (message.data?.$case === "artifactDigest") {
+            obj.artifactDigest = sigstore_common_1.HashOutput.toJSON(message.data.artifactDigest);
+        }
+        return obj;
+    },
+};
+exports.Input = {
+    fromJSON(object) {
+        return {
+            artifactTrustRoot: isSet(object.artifactTrustRoot) ? sigstore_trustroot_1.TrustedRoot.fromJSON(object.artifactTrustRoot) : undefined,
+            artifactVerificationOptions: isSet(object.artifactVerificationOptions)
+                ? exports.ArtifactVerificationOptions.fromJSON(object.artifactVerificationOptions)
+                : undefined,
+            bundle: isSet(object.bundle) ? sigstore_bundle_1.Bundle.fromJSON(object.bundle) : undefined,
+            artifact: isSet(object.artifact) ? exports.Artifact.fromJSON(object.artifact) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.artifactTrustRoot !== undefined) {
+            obj.artifactTrustRoot = sigstore_trustroot_1.TrustedRoot.toJSON(message.artifactTrustRoot);
+        }
+        if (message.artifactVerificationOptions !== undefined) {
+            obj.artifactVerificationOptions = exports.ArtifactVerificationOptions.toJSON(message.artifactVerificationOptions);
+        }
+        if (message.bundle !== undefined) {
+            obj.bundle = sigstore_bundle_1.Bundle.toJSON(message.bundle);
+        }
+        if (message.artifact !== undefined) {
+            obj.artifact = exports.Artifact.toJSON(message.artifact);
+        }
+        return obj;
+    },
+};
+function bytesFromBase64(b64) {
+    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+}
+function base64FromBytes(arr) {
+    return globalThis.Buffer.from(arr).toString("base64");
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/index.js b/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/index.js
new file mode 100644
index 0000000000000..eafb768c48fca
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/index.js
@@ -0,0 +1,37 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __exportStar = (this && this.__exportStar) || function(m, exports) {
+    for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+__exportStar(require("./__generated__/envelope"), exports);
+__exportStar(require("./__generated__/sigstore_bundle"), exports);
+__exportStar(require("./__generated__/sigstore_common"), exports);
+__exportStar(require("./__generated__/sigstore_rekor"), exports);
+__exportStar(require("./__generated__/sigstore_trustroot"), exports);
+__exportStar(require("./__generated__/sigstore_verification"), exports);
diff --git a/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/rekor/v2/index.js b/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/rekor/v2/index.js
new file mode 100644
index 0000000000000..10745efc39a1f
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/rekor/v2/index.js
@@ -0,0 +1,35 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __exportStar = (this && this.__exportStar) || function(m, exports) {
+    for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+/*
+Copyright 2025 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+__exportStar(require("../../__generated__/rekor/v2/dsse"), exports);
+__exportStar(require("../../__generated__/rekor/v2/entry"), exports);
+__exportStar(require("../../__generated__/rekor/v2/hashedrekord"), exports);
+__exportStar(require("../../__generated__/rekor/v2/verifier"), exports);
diff --git a/node_modules/pacote/node_modules/@sigstore/protobuf-specs/package.json b/node_modules/pacote/node_modules/@sigstore/protobuf-specs/package.json
new file mode 100644
index 0000000000000..f87b2540fbf98
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/protobuf-specs/package.json
@@ -0,0 +1,35 @@
+{
+  "name": "@sigstore/protobuf-specs",
+  "version": "0.5.0",
+  "description": "code-signing for npm packages",
+  "main": "dist/index.js",
+  "types": "dist/index.d.ts",
+  "exports": {
+    ".": "./dist/index.js",
+    "./rekor/v2": "./dist/rekor/v2/index.js"
+  },
+  "scripts": {
+    "build": "tsc"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/sigstore/protobuf-specs.git"
+  },
+  "files": [
+    "dist"
+  ],
+  "author": "bdehamer@github.com",
+  "license": "Apache-2.0",
+  "bugs": {
+    "url": "https://github.com/sigstore/protobuf-specs/issues"
+  },
+  "homepage": "https://github.com/sigstore/protobuf-specs#readme",
+  "devDependencies": {
+    "@tsconfig/node18": "^18.2.4",
+    "@types/node": "^18.14.0",
+    "typescript": "^5.7.2"
+  },
+  "engines": {
+    "node": "^18.17.0 || >=20.5.0"
+  }
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/LICENSE b/node_modules/pacote/node_modules/@sigstore/sign/LICENSE
new file mode 100644
index 0000000000000..e9e7c1679a09d
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/sign/LICENSE
@@ -0,0 +1,202 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright 2023 The Sigstore Authors
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/base.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/base.js
new file mode 100644
index 0000000000000..61d5eba4568a3
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/base.js
@@ -0,0 +1,50 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.BaseBundleBuilder = void 0;
+// BaseBundleBuilder is a base class for BundleBuilder implementations. It
+// provides a the basic wokflow for signing and witnessing an artifact.
+// Subclasses must implement the `package` method to assemble a valid bundle
+// with the generated signature and verification material.
+class BaseBundleBuilder {
+    constructor(options) {
+        this.signer = options.signer;
+        this.witnesses = options.witnesses;
+    }
+    // Executes the signing/witnessing process for the given artifact.
+    async create(artifact) {
+        const signature = await this.prepare(artifact).then((blob) => this.signer.sign(blob));
+        const bundle = await this.package(artifact, signature);
+        // Invoke all of the witnesses in parallel
+        const verificationMaterials = await Promise.all(this.witnesses.map((witness) => witness.testify(bundle.content, publicKey(signature.key))));
+        // Collect the verification material from all of the witnesses
+        const tlogEntryList = [];
+        const timestampList = [];
+        verificationMaterials.forEach(({ tlogEntries, rfc3161Timestamps }) => {
+            tlogEntryList.push(...(tlogEntries ?? []));
+            timestampList.push(...(rfc3161Timestamps ?? []));
+        });
+        // Merge the collected verification material into the bundle
+        bundle.verificationMaterial.tlogEntries = tlogEntryList;
+        bundle.verificationMaterial.timestampVerificationData = {
+            rfc3161Timestamps: timestampList,
+        };
+        return bundle;
+    }
+    // Override this function to apply any pre-signing transformations to the
+    // artifact. The returned buffer will be signed by the signer. The default
+    // implementation simply returns the artifact data.
+    async prepare(artifact) {
+        return artifact.data;
+    }
+}
+exports.BaseBundleBuilder = BaseBundleBuilder;
+// Extracts the public key from a KeyMaterial. Returns either the public key
+// or the certificate, depending on the type of key material.
+function publicKey(key) {
+    switch (key.$case) {
+        case 'publicKey':
+            return key.publicKey;
+        case 'x509Certificate':
+            return key.certificate;
+    }
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/bundle.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/bundle.js
new file mode 100644
index 0000000000000..34b1d12f2b44c
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/bundle.js
@@ -0,0 +1,81 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || (function () {
+    var ownKeys = function(o) {
+        ownKeys = Object.getOwnPropertyNames || function (o) {
+            var ar = [];
+            for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
+            return ar;
+        };
+        return ownKeys(o);
+    };
+    return function (mod) {
+        if (mod && mod.__esModule) return mod;
+        var result = {};
+        if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
+        __setModuleDefault(result, mod);
+        return result;
+    };
+})();
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.toMessageSignatureBundle = toMessageSignatureBundle;
+exports.toDSSEBundle = toDSSEBundle;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const sigstore = __importStar(require("@sigstore/bundle"));
+const util_1 = require("../util");
+// Helper functions for assembling the parts of a Sigstore bundle
+// Message signature bundle - $case: 'messageSignature'
+function toMessageSignatureBundle(artifact, signature) {
+    const digest = util_1.crypto.digest('sha256', artifact.data);
+    return sigstore.toMessageSignatureBundle({
+        digest,
+        signature: signature.signature,
+        certificate: signature.key.$case === 'x509Certificate'
+            ? util_1.pem.toDER(signature.key.certificate)
+            : undefined,
+        keyHint: signature.key.$case === 'publicKey' ? signature.key.hint : undefined,
+        certificateChain: true,
+    });
+}
+// DSSE envelope bundle - $case: 'dsseEnvelope'
+function toDSSEBundle(artifact, signature, certificateChain) {
+    return sigstore.toDSSEBundle({
+        artifact: artifact.data,
+        artifactType: artifact.type,
+        signature: signature.signature,
+        certificate: signature.key.$case === 'x509Certificate'
+            ? util_1.pem.toDER(signature.key.certificate)
+            : undefined,
+        keyHint: signature.key.$case === 'publicKey' ? signature.key.hint : undefined,
+        certificateChain,
+    });
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/dsse.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/dsse.js
new file mode 100644
index 0000000000000..86046ba8f3013
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/dsse.js
@@ -0,0 +1,46 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.DSSEBundleBuilder = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const util_1 = require("../util");
+const base_1 = require("./base");
+const bundle_1 = require("./bundle");
+// BundleBuilder implementation for DSSE wrapped attestations
+class DSSEBundleBuilder extends base_1.BaseBundleBuilder {
+    constructor(options) {
+        super(options);
+        this.certificateChain = options.certificateChain ?? false;
+    }
+    // DSSE requires the artifact to be pre-encoded with the payload type
+    // before the signature is generated.
+    async prepare(artifact) {
+        const a = artifactDefaults(artifact);
+        return util_1.dsse.preAuthEncoding(a.type, a.data);
+    }
+    // Packages the artifact and signature into a DSSE bundle
+    async package(artifact, signature) {
+        return (0, bundle_1.toDSSEBundle)(artifactDefaults(artifact), signature, this.certificateChain);
+    }
+}
+exports.DSSEBundleBuilder = DSSEBundleBuilder;
+// Defaults the artifact type to an empty string if not provided
+function artifactDefaults(artifact) {
+    return {
+        ...artifact,
+        type: artifact.type ?? '',
+    };
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/index.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/index.js
new file mode 100644
index 0000000000000..d67c8c324a4f0
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/index.js
@@ -0,0 +1,7 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.MessageSignatureBundleBuilder = exports.DSSEBundleBuilder = void 0;
+var dsse_1 = require("./dsse");
+Object.defineProperty(exports, "DSSEBundleBuilder", { enumerable: true, get: function () { return dsse_1.DSSEBundleBuilder; } });
+var message_1 = require("./message");
+Object.defineProperty(exports, "MessageSignatureBundleBuilder", { enumerable: true, get: function () { return message_1.MessageSignatureBundleBuilder; } });
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/message.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/message.js
new file mode 100644
index 0000000000000..e3991f42bab93
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/message.js
@@ -0,0 +1,30 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.MessageSignatureBundleBuilder = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const base_1 = require("./base");
+const bundle_1 = require("./bundle");
+// BundleBuilder implementation for raw message signatures
+class MessageSignatureBundleBuilder extends base_1.BaseBundleBuilder {
+    constructor(options) {
+        super(options);
+    }
+    async package(artifact, signature) {
+        return (0, bundle_1.toMessageSignatureBundle)(artifact, signature);
+    }
+}
+exports.MessageSignatureBundleBuilder = MessageSignatureBundleBuilder;
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/error.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/error.js
new file mode 100644
index 0000000000000..d28f1913cc77e
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/error.js
@@ -0,0 +1,39 @@
+"use strict";
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.InternalError = void 0;
+exports.internalError = internalError;
+const error_1 = require("./external/error");
+class InternalError extends Error {
+    constructor({ code, message, cause, }) {
+        super(message);
+        this.name = this.constructor.name;
+        this.cause = cause;
+        this.code = code;
+    }
+}
+exports.InternalError = InternalError;
+function internalError(err, code, message) {
+    if (err instanceof error_1.HTTPError) {
+        message += ` - ${err.message}`;
+    }
+    throw new InternalError({
+        code: code,
+        message: message,
+        cause: err,
+    });
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/external/error.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/external/error.js
new file mode 100644
index 0000000000000..a6a65adebb176
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/external/error.js
@@ -0,0 +1,26 @@
+"use strict";
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.HTTPError = void 0;
+class HTTPError extends Error {
+    constructor({ status, message, location, }) {
+        super(`(${status}) ${message}`);
+        this.statusCode = status;
+        this.location = location;
+    }
+}
+exports.HTTPError = HTTPError;
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/external/fetch.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/external/fetch.js
new file mode 100644
index 0000000000000..116090f3c641e
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/external/fetch.js
@@ -0,0 +1,98 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.fetchWithRetry = fetchWithRetry;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const http2_1 = require("http2");
+const make_fetch_happen_1 = __importDefault(require("make-fetch-happen"));
+const proc_log_1 = require("proc-log");
+const promise_retry_1 = __importDefault(require("promise-retry"));
+const util_1 = require("../util");
+const error_1 = require("./error");
+const { HTTP2_HEADER_LOCATION, HTTP2_HEADER_CONTENT_TYPE, HTTP2_HEADER_USER_AGENT, HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_TOO_MANY_REQUESTS, HTTP_STATUS_REQUEST_TIMEOUT, } = http2_1.constants;
+async function fetchWithRetry(url, options) {
+    return (0, promise_retry_1.default)(async (retry, attemptNum) => {
+        const method = options.method || 'POST';
+        const headers = {
+            [HTTP2_HEADER_USER_AGENT]: util_1.ua.getUserAgent(),
+            ...options.headers,
+        };
+        const response = await (0, make_fetch_happen_1.default)(url, {
+            method,
+            headers,
+            body: options.body,
+            timeout: options.timeout,
+            retry: false, // We're handling retries ourselves
+        }).catch((reason) => {
+            proc_log_1.log.http('fetch', `${method} ${url} attempt ${attemptNum} failed with ${reason}`);
+            return retry(reason);
+        });
+        if (response.ok) {
+            return response;
+        }
+        else {
+            const error = await errorFromResponse(response);
+            proc_log_1.log.http('fetch', `${method} ${url} attempt ${attemptNum} failed with ${response.status}`);
+            if (retryable(response.status)) {
+                return retry(error);
+            }
+            else {
+                throw error;
+            }
+        }
+    }, retryOpts(options.retry));
+}
+// Translate a Response into an HTTPError instance. This will attempt to parse
+// the response body for a message, but will default to the statusText if none
+// is found.
+const errorFromResponse = async (response) => {
+    let message = response.statusText;
+    const location = response.headers.get(HTTP2_HEADER_LOCATION) || undefined;
+    const contentType = response.headers.get(HTTP2_HEADER_CONTENT_TYPE);
+    // If response type is JSON, try to parse the body for a message
+    if (contentType?.includes('application/json')) {
+        try {
+            const body = await response.json();
+            message = body.message || message;
+        }
+        catch (e) {
+            // ignore
+        }
+    }
+    return new error_1.HTTPError({
+        status: response.status,
+        message: message,
+        location: location,
+    });
+};
+// Determine if a status code is retryable. This includes 5xx errors, 408, and
+// 429.
+const retryable = (status) => [HTTP_STATUS_REQUEST_TIMEOUT, HTTP_STATUS_TOO_MANY_REQUESTS].includes(status) || status >= HTTP_STATUS_INTERNAL_SERVER_ERROR;
+// Normalize the retry options to the format expected by promise-retry
+const retryOpts = (retry) => {
+    if (typeof retry === 'boolean') {
+        return { retries: retry ? 1 : 0 };
+    }
+    else if (typeof retry === 'number') {
+        return { retries: retry };
+    }
+    else {
+        return { retries: 0, ...retry };
+    }
+};
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/external/fulcio.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/external/fulcio.js
new file mode 100644
index 0000000000000..de6a1ad9f9e79
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/external/fulcio.js
@@ -0,0 +1,41 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Fulcio = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const fetch_1 = require("./fetch");
+/**
+ * Fulcio API client.
+ */
+class Fulcio {
+    constructor(options) {
+        this.options = options;
+    }
+    async createSigningCertificate(request) {
+        const { baseURL, retry, timeout } = this.options;
+        const url = `${baseURL}/api/v2/signingCert`;
+        const response = await (0, fetch_1.fetchWithRetry)(url, {
+            headers: {
+                'Content-Type': 'application/json',
+            },
+            body: JSON.stringify(request),
+            timeout,
+            retry,
+        });
+        return response.json();
+    }
+}
+exports.Fulcio = Fulcio;
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/external/rekor.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/external/rekor.js
new file mode 100644
index 0000000000000..bb59a126e032f
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/external/rekor.js
@@ -0,0 +1,80 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Rekor = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const fetch_1 = require("./fetch");
+/**
+ * Rekor API client.
+ */
+class Rekor {
+    constructor(options) {
+        this.options = options;
+    }
+    /**
+     * Create a new entry in the Rekor log.
+     * @param propsedEntry {ProposedEntry} Data to create a new entry
+     * @returns {Promise} The created entry
+     */
+    async createEntry(propsedEntry) {
+        const { baseURL, timeout, retry } = this.options;
+        const url = `${baseURL}/api/v1/log/entries`;
+        const response = await (0, fetch_1.fetchWithRetry)(url, {
+            headers: {
+                'Content-Type': 'application/json',
+                Accept: 'application/json',
+            },
+            body: JSON.stringify(propsedEntry),
+            timeout,
+            retry,
+        });
+        const data = await response.json();
+        return entryFromResponse(data);
+    }
+    /**
+     * Get an entry from the Rekor log.
+     * @param uuid {string} The UUID of the entry to retrieve
+     * @returns {Promise} The retrieved entry
+     */
+    async getEntry(uuid) {
+        const { baseURL, timeout, retry } = this.options;
+        const url = `${baseURL}/api/v1/log/entries/${uuid}`;
+        const response = await (0, fetch_1.fetchWithRetry)(url, {
+            method: 'GET',
+            headers: {
+                Accept: 'application/json',
+            },
+            timeout,
+            retry,
+        });
+        const data = await response.json();
+        return entryFromResponse(data);
+    }
+}
+exports.Rekor = Rekor;
+// Unpack the response from the Rekor API into a more convenient format.
+function entryFromResponse(data) {
+    const entries = Object.entries(data);
+    if (entries.length != 1) {
+        throw new Error('Received multiple entries in Rekor response');
+    }
+    // Grab UUID and entry data from the response
+    const [uuid, entry] = entries[0];
+    return {
+        ...entry,
+        uuid,
+    };
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/external/tsa.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/external/tsa.js
new file mode 100644
index 0000000000000..a948ba9cca2c7
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/external/tsa.js
@@ -0,0 +1,38 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TimestampAuthority = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const fetch_1 = require("./fetch");
+class TimestampAuthority {
+    constructor(options) {
+        this.options = options;
+    }
+    async createTimestamp(request) {
+        const { baseURL, timeout, retry } = this.options;
+        const url = `${baseURL}/api/v1/timestamp`;
+        const response = await (0, fetch_1.fetchWithRetry)(url, {
+            headers: {
+                'Content-Type': 'application/json',
+            },
+            body: JSON.stringify(request),
+            timeout,
+            retry,
+        });
+        return response.buffer();
+    }
+}
+exports.TimestampAuthority = TimestampAuthority;
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/identity/ci.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/identity/ci.js
new file mode 100644
index 0000000000000..d79133952b605
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/identity/ci.js
@@ -0,0 +1,73 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.CIContextProvider = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const make_fetch_happen_1 = __importDefault(require("make-fetch-happen"));
+// Collection of all the CI-specific providers we have implemented
+const providers = [getGHAToken, getEnv];
+/**
+ * CIContextProvider is a composite identity provider which will iterate
+ * over all of the CI-specific providers and return the token from the first
+ * one that resolves.
+ */
+class CIContextProvider {
+    /* istanbul ignore next */
+    constructor(audience = 'sigstore') {
+        this.audience = audience;
+    }
+    // Invoke all registered ProviderFuncs and return the value of whichever one
+    // resolves first.
+    async getToken() {
+        return Promise.any(providers.map((getToken) => getToken(this.audience))).catch(() => Promise.reject('CI: no tokens available'));
+    }
+}
+exports.CIContextProvider = CIContextProvider;
+/**
+ * getGHAToken can retrieve an OIDC token when running in a GitHub Actions
+ * workflow
+ */
+async function getGHAToken(audience) {
+    // Check to see if we're running in GitHub Actions
+    if (!process.env.ACTIONS_ID_TOKEN_REQUEST_URL ||
+        !process.env.ACTIONS_ID_TOKEN_REQUEST_TOKEN) {
+        return Promise.reject('no token available');
+    }
+    // Construct URL to request token w/ appropriate audience
+    const url = new URL(process.env.ACTIONS_ID_TOKEN_REQUEST_URL);
+    url.searchParams.append('audience', audience);
+    const response = await (0, make_fetch_happen_1.default)(url.href, {
+        retry: 2,
+        headers: {
+            Accept: 'application/json',
+            Authorization: `Bearer ${process.env.ACTIONS_ID_TOKEN_REQUEST_TOKEN}`,
+        },
+    });
+    return response.json().then((data) => data.value);
+}
+/**
+ * getEnv can retrieve an OIDC token from an environment variable.
+ * This matches the behavior of https://github.com/sigstore/cosign/tree/main/pkg/providers/envvar
+ */
+async function getEnv() {
+    if (!process.env.SIGSTORE_ID_TOKEN) {
+        return Promise.reject('no token available');
+    }
+    return process.env.SIGSTORE_ID_TOKEN;
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/identity/index.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/identity/index.js
new file mode 100644
index 0000000000000..1c1223b443fab
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/identity/index.js
@@ -0,0 +1,20 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.CIContextProvider = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+var ci_1 = require("./ci");
+Object.defineProperty(exports, "CIContextProvider", { enumerable: true, get: function () { return ci_1.CIContextProvider; } });
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/identity/provider.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/identity/provider.js
new file mode 100644
index 0000000000000..c8ad2e549bdc6
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/identity/provider.js
@@ -0,0 +1,2 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/index.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/index.js
new file mode 100644
index 0000000000000..383b76083361b
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/index.js
@@ -0,0 +1,17 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TSAWitness = exports.RekorWitness = exports.DEFAULT_REKOR_URL = exports.FulcioSigner = exports.DEFAULT_FULCIO_URL = exports.CIContextProvider = exports.InternalError = exports.MessageSignatureBundleBuilder = exports.DSSEBundleBuilder = void 0;
+var bundler_1 = require("./bundler");
+Object.defineProperty(exports, "DSSEBundleBuilder", { enumerable: true, get: function () { return bundler_1.DSSEBundleBuilder; } });
+Object.defineProperty(exports, "MessageSignatureBundleBuilder", { enumerable: true, get: function () { return bundler_1.MessageSignatureBundleBuilder; } });
+var error_1 = require("./error");
+Object.defineProperty(exports, "InternalError", { enumerable: true, get: function () { return error_1.InternalError; } });
+var identity_1 = require("./identity");
+Object.defineProperty(exports, "CIContextProvider", { enumerable: true, get: function () { return identity_1.CIContextProvider; } });
+var signer_1 = require("./signer");
+Object.defineProperty(exports, "DEFAULT_FULCIO_URL", { enumerable: true, get: function () { return signer_1.DEFAULT_FULCIO_URL; } });
+Object.defineProperty(exports, "FulcioSigner", { enumerable: true, get: function () { return signer_1.FulcioSigner; } });
+var witness_1 = require("./witness");
+Object.defineProperty(exports, "DEFAULT_REKOR_URL", { enumerable: true, get: function () { return witness_1.DEFAULT_REKOR_URL; } });
+Object.defineProperty(exports, "RekorWitness", { enumerable: true, get: function () { return witness_1.RekorWitness; } });
+Object.defineProperty(exports, "TSAWitness", { enumerable: true, get: function () { return witness_1.TSAWitness; } });
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js
new file mode 100644
index 0000000000000..f01703cfab564
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js
@@ -0,0 +1,59 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.CAClient = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const error_1 = require("../../error");
+const fulcio_1 = require("../../external/fulcio");
+class CAClient {
+    constructor(options) {
+        this.fulcio = new fulcio_1.Fulcio({
+            baseURL: options.fulcioBaseURL,
+            retry: options.retry,
+            timeout: options.timeout,
+        });
+    }
+    async createSigningCertificate(identityToken, publicKey, challenge) {
+        const request = toCertificateRequest(identityToken, publicKey, challenge);
+        try {
+            const resp = await this.fulcio.createSigningCertificate(request);
+            // Account for the fact that the response may contain either a
+            // signedCertificateEmbeddedSct or a signedCertificateDetachedSct.
+            const cert = resp.signedCertificateEmbeddedSct
+                ? resp.signedCertificateEmbeddedSct
+                : resp.signedCertificateDetachedSct;
+            return cert.chain.certificates;
+        }
+        catch (err) {
+            (0, error_1.internalError)(err, 'CA_CREATE_SIGNING_CERTIFICATE_ERROR', 'error creating signing certificate');
+        }
+    }
+}
+exports.CAClient = CAClient;
+function toCertificateRequest(identityToken, publicKey, challenge) {
+    return {
+        credentials: {
+            oidcIdentityToken: identityToken,
+        },
+        publicKeyRequest: {
+            publicKey: {
+                algorithm: 'ECDSA',
+                content: publicKey,
+            },
+            proofOfPossession: challenge.toString('base64'),
+        },
+    };
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js
new file mode 100644
index 0000000000000..481aa5c3579a2
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js
@@ -0,0 +1,45 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.EphemeralSigner = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const crypto_1 = __importDefault(require("crypto"));
+const EC_KEYPAIR_TYPE = 'ec';
+const P256_CURVE = 'P-256';
+// Signer implementation which uses an ephemeral keypair to sign artifacts.
+// The private key lives only in memory and is tied to the lifetime of the
+// EphemeralSigner instance.
+class EphemeralSigner {
+    constructor() {
+        this.keypair = crypto_1.default.generateKeyPairSync(EC_KEYPAIR_TYPE, {
+            namedCurve: P256_CURVE,
+        });
+    }
+    async sign(data) {
+        const signature = crypto_1.default.sign(null, data, this.keypair.privateKey);
+        const publicKey = this.keypair.publicKey
+            .export({ format: 'pem', type: 'spki' })
+            .toString('ascii');
+        return {
+            signature: signature,
+            key: { $case: 'publicKey', publicKey },
+        };
+    }
+}
+exports.EphemeralSigner = EphemeralSigner;
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/index.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/index.js
new file mode 100644
index 0000000000000..89a432548d2b4
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/index.js
@@ -0,0 +1,87 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.FulcioSigner = exports.DEFAULT_FULCIO_URL = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const error_1 = require("../../error");
+const util_1 = require("../../util");
+const ca_1 = require("./ca");
+const ephemeral_1 = require("./ephemeral");
+exports.DEFAULT_FULCIO_URL = 'https://fulcio.sigstore.dev';
+// Signer implementation which can be used to decorate another signer
+// with a Fulcio-issued signing certificate for the signer's public key.
+// Must be instantiated with an identity provider which can provide a JWT
+// which represents the identity to be bound to the signing certificate.
+class FulcioSigner {
+    constructor(options) {
+        this.ca = new ca_1.CAClient({
+            ...options,
+            fulcioBaseURL: options.fulcioBaseURL || /* istanbul ignore next */ exports.DEFAULT_FULCIO_URL,
+        });
+        this.identityProvider = options.identityProvider;
+        this.keyHolder = options.keyHolder || new ephemeral_1.EphemeralSigner();
+    }
+    async sign(data) {
+        // Retrieve identity token from the supplied identity provider
+        const identityToken = await this.getIdentityToken();
+        // Extract challenge claim from OIDC token
+        let subject;
+        try {
+            subject = util_1.oidc.extractJWTSubject(identityToken);
+        }
+        catch (err) {
+            throw new error_1.InternalError({
+                code: 'IDENTITY_TOKEN_PARSE_ERROR',
+                message: `invalid identity token: ${identityToken}`,
+                cause: err,
+            });
+        }
+        // Construct challenge value by signing the subject claim
+        const challenge = await this.keyHolder.sign(Buffer.from(subject));
+        if (challenge.key.$case !== 'publicKey') {
+            throw new error_1.InternalError({
+                code: 'CA_CREATE_SIGNING_CERTIFICATE_ERROR',
+                message: 'unexpected format for signing key',
+            });
+        }
+        // Create signing certificate
+        const certificates = await this.ca.createSigningCertificate(identityToken, challenge.key.publicKey, challenge.signature);
+        // Generate artifact signature
+        const signature = await this.keyHolder.sign(data);
+        // Specifically returning only the first certificate in the chain
+        // as the key.
+        return {
+            signature: signature.signature,
+            key: {
+                $case: 'x509Certificate',
+                certificate: certificates[0],
+            },
+        };
+    }
+    async getIdentityToken() {
+        try {
+            return await this.identityProvider.getToken();
+        }
+        catch (err) {
+            throw new error_1.InternalError({
+                code: 'IDENTITY_TOKEN_READ_ERROR',
+                message: 'error retrieving identity token',
+                cause: err,
+            });
+        }
+    }
+}
+exports.FulcioSigner = FulcioSigner;
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/index.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/index.js
new file mode 100644
index 0000000000000..e2087767b81c1
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/index.js
@@ -0,0 +1,22 @@
+"use strict";
+/* istanbul ignore file */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.FulcioSigner = exports.DEFAULT_FULCIO_URL = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+var fulcio_1 = require("./fulcio");
+Object.defineProperty(exports, "DEFAULT_FULCIO_URL", { enumerable: true, get: function () { return fulcio_1.DEFAULT_FULCIO_URL; } });
+Object.defineProperty(exports, "FulcioSigner", { enumerable: true, get: function () { return fulcio_1.FulcioSigner; } });
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/signer.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/signer.js
new file mode 100644
index 0000000000000..b92c54183375d
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/signer.js
@@ -0,0 +1,17 @@
+"use strict";
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/types/fetch.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/types/fetch.js
new file mode 100644
index 0000000000000..c8ad2e549bdc6
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/types/fetch.js
@@ -0,0 +1,2 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/util/index.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/util/index.js
new file mode 100644
index 0000000000000..436630cfbbf19
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/util/index.js
@@ -0,0 +1,59 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || (function () {
+    var ownKeys = function(o) {
+        ownKeys = Object.getOwnPropertyNames || function (o) {
+            var ar = [];
+            for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
+            return ar;
+        };
+        return ownKeys(o);
+    };
+    return function (mod) {
+        if (mod && mod.__esModule) return mod;
+        var result = {};
+        if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
+        __setModuleDefault(result, mod);
+        return result;
+    };
+})();
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.ua = exports.oidc = exports.pem = exports.json = exports.encoding = exports.dsse = exports.crypto = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+var core_1 = require("@sigstore/core");
+Object.defineProperty(exports, "crypto", { enumerable: true, get: function () { return core_1.crypto; } });
+Object.defineProperty(exports, "dsse", { enumerable: true, get: function () { return core_1.dsse; } });
+Object.defineProperty(exports, "encoding", { enumerable: true, get: function () { return core_1.encoding; } });
+Object.defineProperty(exports, "json", { enumerable: true, get: function () { return core_1.json; } });
+Object.defineProperty(exports, "pem", { enumerable: true, get: function () { return core_1.pem; } });
+exports.oidc = __importStar(require("./oidc"));
+exports.ua = __importStar(require("./ua"));
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/util/oidc.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/util/oidc.js
new file mode 100644
index 0000000000000..37c5b168ee12e
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/util/oidc.js
@@ -0,0 +1,30 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.extractJWTSubject = extractJWTSubject;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const core_1 = require("@sigstore/core");
+function extractJWTSubject(jwt) {
+    const parts = jwt.split('.', 3);
+    const payload = JSON.parse(core_1.encoding.base64Decode(parts[1]));
+    switch (payload.iss) {
+        case 'https://accounts.google.com':
+        case 'https://oauth2.sigstore.dev/auth':
+            return payload.email;
+        default:
+            return payload.sub;
+    }
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/util/ua.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/util/ua.js
new file mode 100644
index 0000000000000..b15ff2070fb9f
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/util/ua.js
@@ -0,0 +1,32 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.getUserAgent = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const os_1 = __importDefault(require("os"));
+// Format User-Agent:  /  ()
+// source: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/User-Agent
+const getUserAgent = () => {
+    const packageVersion = require('../../package.json').version;
+    const nodeVersion = process.version;
+    const platformName = os_1.default.platform();
+    const archName = os_1.default.arch();
+    return `sigstore-js/${packageVersion} (Node ${nodeVersion}) (${platformName}/${archName})`;
+};
+exports.getUserAgent = getUserAgent;
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/index.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/index.js
new file mode 100644
index 0000000000000..72677c399caa7
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/index.js
@@ -0,0 +1,24 @@
+"use strict";
+/* istanbul ignore file */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TSAWitness = exports.RekorWitness = exports.DEFAULT_REKOR_URL = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+var tlog_1 = require("./tlog");
+Object.defineProperty(exports, "DEFAULT_REKOR_URL", { enumerable: true, get: function () { return tlog_1.DEFAULT_REKOR_URL; } });
+Object.defineProperty(exports, "RekorWitness", { enumerable: true, get: function () { return tlog_1.RekorWitness; } });
+var tsa_1 = require("./tsa");
+Object.defineProperty(exports, "TSAWitness", { enumerable: true, get: function () { return tsa_1.TSAWitness; } });
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/client.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/client.js
new file mode 100644
index 0000000000000..22c895f2ca7ed
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/client.js
@@ -0,0 +1,61 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TLogClient = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const error_1 = require("../../error");
+const error_2 = require("../../external/error");
+const rekor_1 = require("../../external/rekor");
+class TLogClient {
+    constructor(options) {
+        this.fetchOnConflict = options.fetchOnConflict ?? false;
+        this.rekor = new rekor_1.Rekor({
+            baseURL: options.rekorBaseURL,
+            retry: options.retry,
+            timeout: options.timeout,
+        });
+    }
+    async createEntry(proposedEntry) {
+        let entry;
+        try {
+            entry = await this.rekor.createEntry(proposedEntry);
+        }
+        catch (err) {
+            // If the entry already exists, fetch it (if enabled)
+            if (entryExistsError(err) && this.fetchOnConflict) {
+                // Grab the UUID of the existing entry from the location header
+                /* istanbul ignore next */
+                const uuid = err.location.split('/').pop() || '';
+                try {
+                    entry = await this.rekor.getEntry(uuid);
+                }
+                catch (err) {
+                    (0, error_1.internalError)(err, 'TLOG_FETCH_ENTRY_ERROR', 'error fetching tlog entry');
+                }
+            }
+            else {
+                (0, error_1.internalError)(err, 'TLOG_CREATE_ENTRY_ERROR', 'error creating tlog entry');
+            }
+        }
+        return entry;
+    }
+}
+exports.TLogClient = TLogClient;
+function entryExistsError(value) {
+    return (value instanceof error_2.HTTPError &&
+        value.statusCode === 409 &&
+        value.location !== undefined);
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/entry.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/entry.js
new file mode 100644
index 0000000000000..bb1c68e914b90
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/entry.js
@@ -0,0 +1,140 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.toProposedEntry = toProposedEntry;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const bundle_1 = require("@sigstore/bundle");
+const util_1 = require("../../util");
+const SHA256_ALGORITHM = 'sha256';
+function toProposedEntry(content, publicKey, 
+// TODO: Remove this parameter once have completely switched to 'dsse' entries
+entryType = 'dsse') {
+    switch (content.$case) {
+        case 'dsseEnvelope':
+            // TODO: Remove this conditional once have completely ditched "intoto" entries
+            if (entryType === 'intoto') {
+                return toProposedIntotoEntry(content.dsseEnvelope, publicKey);
+            }
+            return toProposedDSSEEntry(content.dsseEnvelope, publicKey);
+        case 'messageSignature':
+            return toProposedHashedRekordEntry(content.messageSignature, publicKey);
+    }
+}
+// Returns a properly formatted Rekor "hashedrekord" entry for the given digest
+// and signature
+function toProposedHashedRekordEntry(messageSignature, publicKey) {
+    const hexDigest = messageSignature.messageDigest.digest.toString('hex');
+    const b64Signature = messageSignature.signature.toString('base64');
+    const b64Key = util_1.encoding.base64Encode(publicKey);
+    return {
+        apiVersion: '0.0.1',
+        kind: 'hashedrekord',
+        spec: {
+            data: {
+                hash: {
+                    algorithm: SHA256_ALGORITHM,
+                    value: hexDigest,
+                },
+            },
+            signature: {
+                content: b64Signature,
+                publicKey: {
+                    content: b64Key,
+                },
+            },
+        },
+    };
+}
+// Returns a properly formatted Rekor "dsse" entry for the given DSSE envelope
+// and signature
+function toProposedDSSEEntry(envelope, publicKey) {
+    const envelopeJSON = JSON.stringify((0, bundle_1.envelopeToJSON)(envelope));
+    const encodedKey = util_1.encoding.base64Encode(publicKey);
+    return {
+        apiVersion: '0.0.1',
+        kind: 'dsse',
+        spec: {
+            proposedContent: {
+                envelope: envelopeJSON,
+                verifiers: [encodedKey],
+            },
+        },
+    };
+}
+// Returns a properly formatted Rekor "intoto" entry for the given DSSE
+// envelope and signature
+function toProposedIntotoEntry(envelope, publicKey) {
+    // Calculate the value for the payloadHash field in the Rekor entry
+    const payloadHash = util_1.crypto
+        .digest(SHA256_ALGORITHM, envelope.payload)
+        .toString('hex');
+    // Calculate the value for the hash field in the Rekor entry
+    const envelopeHash = calculateDSSEHash(envelope, publicKey);
+    // Collect values for re-creating the DSSE envelope.
+    // Double-encode payload and signature cause that's what Rekor expects
+    const payload = util_1.encoding.base64Encode(envelope.payload.toString('base64'));
+    const sig = util_1.encoding.base64Encode(envelope.signatures[0].sig.toString('base64'));
+    const keyid = envelope.signatures[0].keyid;
+    const encodedKey = util_1.encoding.base64Encode(publicKey);
+    // Create the envelope portion of the entry. Note the inclusion of the
+    // publicKey in the signature struct is not a standard part of a DSSE
+    // envelope, but is required by Rekor.
+    const dsse = {
+        payloadType: envelope.payloadType,
+        payload: payload,
+        signatures: [{ sig, publicKey: encodedKey }],
+    };
+    // If the keyid is an empty string, Rekor seems to remove it altogether. We
+    // need to do the same here so that we can properly recreate the entry for
+    // verification.
+    if (keyid.length > 0) {
+        dsse.signatures[0].keyid = keyid;
+    }
+    return {
+        apiVersion: '0.0.2',
+        kind: 'intoto',
+        spec: {
+            content: {
+                envelope: dsse,
+                hash: { algorithm: SHA256_ALGORITHM, value: envelopeHash },
+                payloadHash: { algorithm: SHA256_ALGORITHM, value: payloadHash },
+            },
+        },
+    };
+}
+// Calculates the hash of a DSSE envelope for inclusion in a Rekor entry.
+// There is no standard way to do this, so the scheme we're using as as
+// follows:
+//  * payload is base64 encoded
+//  * signature is base64 encoded (only the first signature is used)
+//  * keyid is included ONLY if it is NOT an empty string
+//  * The resulting JSON is canonicalized and hashed to a hex string
+function calculateDSSEHash(envelope, publicKey) {
+    const dsse = {
+        payloadType: envelope.payloadType,
+        payload: envelope.payload.toString('base64'),
+        signatures: [
+            { sig: envelope.signatures[0].sig.toString('base64'), publicKey },
+        ],
+    };
+    // If the keyid is an empty string, Rekor seems to remove it altogether.
+    if (envelope.signatures[0].keyid.length > 0) {
+        dsse.signatures[0].keyid = envelope.signatures[0].keyid;
+    }
+    return util_1.crypto
+        .digest(SHA256_ALGORITHM, util_1.json.canonicalize(dsse))
+        .toString('hex');
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/index.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/index.js
new file mode 100644
index 0000000000000..6197b09d4cdd9
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/index.js
@@ -0,0 +1,82 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.RekorWitness = exports.DEFAULT_REKOR_URL = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const util_1 = require("../../util");
+const client_1 = require("./client");
+const entry_1 = require("./entry");
+exports.DEFAULT_REKOR_URL = 'https://rekor.sigstore.dev';
+class RekorWitness {
+    constructor(options) {
+        this.entryType = options.entryType;
+        this.tlog = new client_1.TLogClient({
+            ...options,
+            rekorBaseURL: options.rekorBaseURL || /* istanbul ignore next */ exports.DEFAULT_REKOR_URL,
+        });
+    }
+    async testify(content, publicKey) {
+        const proposedEntry = (0, entry_1.toProposedEntry)(content, publicKey, this.entryType);
+        const entry = await this.tlog.createEntry(proposedEntry);
+        return toTransparencyLogEntry(entry);
+    }
+}
+exports.RekorWitness = RekorWitness;
+function toTransparencyLogEntry(entry) {
+    const logID = Buffer.from(entry.logID, 'hex');
+    // Parse entry body so we can extract the kind and version.
+    const bodyJSON = util_1.encoding.base64Decode(entry.body);
+    const entryBody = JSON.parse(bodyJSON);
+    const promise = entry?.verification?.signedEntryTimestamp
+        ? inclusionPromise(entry.verification.signedEntryTimestamp)
+        : undefined;
+    const proof = entry?.verification?.inclusionProof
+        ? inclusionProof(entry.verification.inclusionProof)
+        : undefined;
+    const tlogEntry = {
+        logIndex: entry.logIndex.toString(),
+        logId: {
+            keyId: logID,
+        },
+        integratedTime: entry.integratedTime.toString(),
+        kindVersion: {
+            kind: entryBody.kind,
+            version: entryBody.apiVersion,
+        },
+        inclusionPromise: promise,
+        inclusionProof: proof,
+        canonicalizedBody: Buffer.from(entry.body, 'base64'),
+    };
+    return {
+        tlogEntries: [tlogEntry],
+    };
+}
+function inclusionPromise(promise) {
+    return {
+        signedEntryTimestamp: Buffer.from(promise, 'base64'),
+    };
+}
+function inclusionProof(proof) {
+    return {
+        logIndex: proof.logIndex.toString(),
+        treeSize: proof.treeSize.toString(),
+        rootHash: Buffer.from(proof.rootHash, 'hex'),
+        hashes: proof.hashes.map((h) => Buffer.from(h, 'hex')),
+        checkpoint: {
+            envelope: proof.checkpoint,
+        },
+    };
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tsa/client.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tsa/client.js
new file mode 100644
index 0000000000000..754de3748dbb3
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tsa/client.js
@@ -0,0 +1,46 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TSAClient = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const error_1 = require("../../error");
+const tsa_1 = require("../../external/tsa");
+const util_1 = require("../../util");
+const SHA256_ALGORITHM = 'sha256';
+class TSAClient {
+    constructor(options) {
+        this.tsa = new tsa_1.TimestampAuthority({
+            baseURL: options.tsaBaseURL,
+            retry: options.retry,
+            timeout: options.timeout,
+        });
+    }
+    async createTimestamp(signature) {
+        const request = {
+            artifactHash: util_1.crypto
+                .digest(SHA256_ALGORITHM, signature)
+                .toString('base64'),
+            hashAlgorithm: SHA256_ALGORITHM,
+        };
+        try {
+            return await this.tsa.createTimestamp(request);
+        }
+        catch (err) {
+            (0, error_1.internalError)(err, 'TSA_CREATE_TIMESTAMP_ERROR', 'error creating timestamp');
+        }
+    }
+}
+exports.TSAClient = TSAClient;
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tsa/index.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tsa/index.js
new file mode 100644
index 0000000000000..d4f5c7c859d10
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tsa/index.js
@@ -0,0 +1,44 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TSAWitness = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const client_1 = require("./client");
+class TSAWitness {
+    constructor(options) {
+        this.tsa = new client_1.TSAClient({
+            tsaBaseURL: options.tsaBaseURL,
+            retry: options.retry,
+            timeout: options.timeout,
+        });
+    }
+    async testify(content) {
+        const signature = extractSignature(content);
+        const timestamp = await this.tsa.createTimestamp(signature);
+        return {
+            rfc3161Timestamps: [{ signedTimestamp: timestamp }],
+        };
+    }
+}
+exports.TSAWitness = TSAWitness;
+function extractSignature(content) {
+    switch (content.$case) {
+        case 'dsseEnvelope':
+            return content.dsseEnvelope.signatures[0].sig;
+        case 'messageSignature':
+            return content.messageSignature.signature;
+    }
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/witness.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/witness.js
new file mode 100644
index 0000000000000..c8ad2e549bdc6
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/witness.js
@@ -0,0 +1,2 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/package.json b/node_modules/pacote/node_modules/@sigstore/sign/package.json
new file mode 100644
index 0000000000000..4059997ced341
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/sign/package.json
@@ -0,0 +1,46 @@
+{
+  "name": "@sigstore/sign",
+  "version": "4.0.0",
+  "description": "Sigstore signing library",
+  "main": "dist/index.js",
+  "types": "dist/index.d.ts",
+  "scripts": {
+    "clean": "shx rm -rf dist *.tsbuildinfo",
+    "build": "tsc --build",
+    "test": "jest"
+  },
+  "files": [
+    "dist"
+  ],
+  "author": "bdehamer@github.com",
+  "license": "Apache-2.0",
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/sigstore/sigstore-js.git"
+  },
+  "bugs": {
+    "url": "https://github.com/sigstore/sigstore-js/issues"
+  },
+  "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/sign#readme",
+  "publishConfig": {
+    "provenance": true
+  },
+  "devDependencies": {
+    "@sigstore/jest": "^0.0.0",
+    "@sigstore/mock": "^0.11.0",
+    "@sigstore/rekor-types": "^4.0.0",
+    "@types/make-fetch-happen": "^10.0.4",
+    "@types/promise-retry": "^1.1.6"
+  },
+  "dependencies": {
+    "@sigstore/bundle": "^4.0.0",
+    "@sigstore/core": "^3.0.0",
+    "@sigstore/protobuf-specs": "^0.5.0",
+    "make-fetch-happen": "^15.0.0",
+    "proc-log": "^5.0.0",
+    "promise-retry": "^2.0.1"
+  },
+  "engines": {
+    "node": "^20.17.0 || >=22.9.0"
+  }
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/tuf/LICENSE b/node_modules/pacote/node_modules/@sigstore/tuf/LICENSE
new file mode 100644
index 0000000000000..e9e7c1679a09d
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/tuf/LICENSE
@@ -0,0 +1,202 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright 2023 The Sigstore Authors
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/node_modules/pacote/node_modules/@sigstore/tuf/dist/appdata.js b/node_modules/pacote/node_modules/@sigstore/tuf/dist/appdata.js
new file mode 100644
index 0000000000000..06a8143e70da2
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/tuf/dist/appdata.js
@@ -0,0 +1,43 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.appDataPath = appDataPath;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const os_1 = __importDefault(require("os"));
+const path_1 = __importDefault(require("path"));
+function appDataPath(name) {
+    const homedir = os_1.default.homedir();
+    switch (process.platform) {
+        /* istanbul ignore next */
+        case 'darwin': {
+            const appSupport = path_1.default.join(homedir, 'Library', 'Application Support');
+            return path_1.default.join(appSupport, name);
+        }
+        /* istanbul ignore next */
+        case 'win32': {
+            const localAppData = process.env.LOCALAPPDATA || path_1.default.join(homedir, 'AppData', 'Local');
+            return path_1.default.join(localAppData, name, 'Data');
+        }
+        /* istanbul ignore next */
+        default: {
+            const localData = process.env.XDG_DATA_HOME || path_1.default.join(homedir, '.local', 'share');
+            return path_1.default.join(localData, name);
+        }
+    }
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/tuf/dist/client.js b/node_modules/pacote/node_modules/@sigstore/tuf/dist/client.js
new file mode 100644
index 0000000000000..2931a0a6b3ab5
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/tuf/dist/client.js
@@ -0,0 +1,113 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TUFClient = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const fs_1 = __importDefault(require("fs"));
+const path_1 = __importDefault(require("path"));
+const tuf_js_1 = require("tuf-js");
+const _1 = require(".");
+const target_1 = require("./target");
+const TARGETS_DIR_NAME = 'targets';
+class TUFClient {
+    constructor(options) {
+        const url = new URL(options.mirrorURL);
+        const repoName = encodeURIComponent(url.host + url.pathname.replace(/\/$/, ''));
+        const cachePath = path_1.default.join(options.cachePath, repoName);
+        initTufCache(cachePath);
+        seedCache({
+            cachePath,
+            mirrorURL: options.mirrorURL,
+            tufRootPath: options.rootPath,
+            forceInit: options.forceInit,
+        });
+        this.updater = initClient({
+            mirrorURL: options.mirrorURL,
+            cachePath,
+            forceCache: options.forceCache,
+            retry: options.retry,
+            timeout: options.timeout,
+        });
+    }
+    async refresh() {
+        return this.updater.refresh();
+    }
+    getTarget(targetName) {
+        return (0, target_1.readTarget)(this.updater, targetName);
+    }
+}
+exports.TUFClient = TUFClient;
+// Initializes the TUF cache directory structure including the initial
+// root.json file. If the cache directory does not exist, it will be
+// created. If the targets directory does not exist, it will be created.
+// If the root.json file does not exist, it will be copied from the
+// rootPath argument.
+function initTufCache(cachePath) {
+    const targetsPath = path_1.default.join(cachePath, TARGETS_DIR_NAME);
+    if (!fs_1.default.existsSync(cachePath)) {
+        fs_1.default.mkdirSync(cachePath, { recursive: true });
+    }
+    /* istanbul ignore else */
+    if (!fs_1.default.existsSync(targetsPath)) {
+        fs_1.default.mkdirSync(targetsPath);
+    }
+}
+// Populates the TUF cache with the initial root.json file. If the root.json
+// file does not exist (or we're forcing re-initialization), copy it from either
+// the rootPath argument or from one of the repo seeds.
+function seedCache({ cachePath, mirrorURL, tufRootPath, forceInit, }) {
+    const cachedRootPath = path_1.default.join(cachePath, 'root.json');
+    // If the root.json file does not exist (or we're forcing re-initialization),
+    // populate it either from the supplied rootPath or from one of the repo seeds.
+    /* istanbul ignore else */
+    if (!fs_1.default.existsSync(cachedRootPath) || forceInit) {
+        if (tufRootPath) {
+            fs_1.default.copyFileSync(tufRootPath, cachedRootPath);
+        }
+        else {
+            const seeds = require('../seeds.json');
+            const repoSeed = seeds[mirrorURL];
+            if (!repoSeed) {
+                throw new _1.TUFError({
+                    code: 'TUF_INIT_CACHE_ERROR',
+                    message: `No root.json found for mirror: ${mirrorURL}`,
+                });
+            }
+            fs_1.default.writeFileSync(cachedRootPath, Buffer.from(repoSeed['root.json'], 'base64'));
+            // Copy any seed targets into the cache
+            Object.entries(repoSeed.targets).forEach(([targetName, target]) => {
+                fs_1.default.writeFileSync(path_1.default.join(cachePath, TARGETS_DIR_NAME, targetName), Buffer.from(target, 'base64'));
+            });
+        }
+    }
+}
+function initClient(options) {
+    const config = {
+        fetchTimeout: options.timeout,
+        fetchRetry: options.retry,
+    };
+    return new tuf_js_1.Updater({
+        metadataBaseUrl: options.mirrorURL,
+        targetBaseUrl: `${options.mirrorURL}/targets`,
+        metadataDir: options.cachePath,
+        targetDir: path_1.default.join(options.cachePath, TARGETS_DIR_NAME),
+        forceCache: options.forceCache,
+        config,
+    });
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/tuf/dist/error.js b/node_modules/pacote/node_modules/@sigstore/tuf/dist/error.js
new file mode 100644
index 0000000000000..e13971b289ff2
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/tuf/dist/error.js
@@ -0,0 +1,12 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TUFError = void 0;
+class TUFError extends Error {
+    constructor({ code, message, cause, }) {
+        super(message);
+        this.code = code;
+        this.cause = cause;
+        this.name = this.constructor.name;
+    }
+}
+exports.TUFError = TUFError;
diff --git a/node_modules/pacote/node_modules/@sigstore/tuf/dist/index.js b/node_modules/pacote/node_modules/@sigstore/tuf/dist/index.js
new file mode 100644
index 0000000000000..2af5de93ec5d2
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/tuf/dist/index.js
@@ -0,0 +1,56 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TUFError = exports.DEFAULT_MIRROR_URL = void 0;
+exports.getTrustedRoot = getTrustedRoot;
+exports.initTUF = initTUF;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const protobuf_specs_1 = require("@sigstore/protobuf-specs");
+const appdata_1 = require("./appdata");
+const client_1 = require("./client");
+exports.DEFAULT_MIRROR_URL = 'https://tuf-repo-cdn.sigstore.dev';
+const DEFAULT_CACHE_DIR = 'sigstore-js';
+const DEFAULT_RETRY = { retries: 2 };
+const DEFAULT_TIMEOUT = 5000;
+const TRUSTED_ROOT_TARGET = 'trusted_root.json';
+async function getTrustedRoot(
+/* istanbul ignore next */
+options = {}) {
+    const client = createClient(options);
+    const trustedRoot = await client.getTarget(TRUSTED_ROOT_TARGET);
+    return protobuf_specs_1.TrustedRoot.fromJSON(JSON.parse(trustedRoot));
+}
+async function initTUF(
+/* istanbul ignore next */
+options = {}) {
+    const client = createClient(options);
+    return client.refresh().then(() => client);
+}
+// Create a TUF client with default options
+function createClient(options) {
+    /* istanbul ignore next */
+    return new client_1.TUFClient({
+        cachePath: options.cachePath || (0, appdata_1.appDataPath)(DEFAULT_CACHE_DIR),
+        rootPath: options.rootPath,
+        mirrorURL: options.mirrorURL || exports.DEFAULT_MIRROR_URL,
+        retry: options.retry ?? DEFAULT_RETRY,
+        timeout: options.timeout ?? DEFAULT_TIMEOUT,
+        forceCache: options.forceCache ?? false,
+        forceInit: options.forceInit ?? options.force ?? false,
+    });
+}
+var error_1 = require("./error");
+Object.defineProperty(exports, "TUFError", { enumerable: true, get: function () { return error_1.TUFError; } });
diff --git a/node_modules/pacote/node_modules/@sigstore/tuf/dist/target.js b/node_modules/pacote/node_modules/@sigstore/tuf/dist/target.js
new file mode 100644
index 0000000000000..5c6675bdfbf5f
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/tuf/dist/target.js
@@ -0,0 +1,79 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.readTarget = readTarget;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const fs_1 = __importDefault(require("fs"));
+const error_1 = require("./error");
+// Downloads and returns the specified target from the provided TUF Updater.
+async function readTarget(tuf, targetPath) {
+    const path = await getTargetPath(tuf, targetPath);
+    return new Promise((resolve, reject) => {
+        fs_1.default.readFile(path, 'utf-8', (err, data) => {
+            if (err) {
+                reject(new error_1.TUFError({
+                    code: 'TUF_READ_TARGET_ERROR',
+                    message: `error reading target ${path}`,
+                    cause: err,
+                }));
+            }
+            else {
+                resolve(data);
+            }
+        });
+    });
+}
+// Returns the local path to the specified target. If the target is not yet
+// cached locally, the provided TUF Updater will be used to download and
+// cache the target.
+async function getTargetPath(tuf, target) {
+    let targetInfo;
+    try {
+        targetInfo = await tuf.getTargetInfo(target);
+    }
+    catch (err) {
+        throw new error_1.TUFError({
+            code: 'TUF_REFRESH_METADATA_ERROR',
+            message: 'error refreshing TUF metadata',
+            cause: err,
+        });
+    }
+    if (!targetInfo) {
+        throw new error_1.TUFError({
+            code: 'TUF_FIND_TARGET_ERROR',
+            message: `target ${target} not found`,
+        });
+    }
+    let path = await tuf.findCachedTarget(targetInfo);
+    // An empty path here means the target has not been cached locally, or is
+    // out of date. In either case, we need to download it.
+    if (!path) {
+        try {
+            path = await tuf.downloadTarget(targetInfo);
+        }
+        catch (err) {
+            throw new error_1.TUFError({
+                code: 'TUF_DOWNLOAD_TARGET_ERROR',
+                message: `error downloading target ${path}`,
+                cause: err,
+            });
+        }
+    }
+    return path;
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/tuf/package.json b/node_modules/pacote/node_modules/@sigstore/tuf/package.json
new file mode 100644
index 0000000000000..42dad938c2808
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/tuf/package.json
@@ -0,0 +1,41 @@
+{
+  "name": "@sigstore/tuf",
+  "version": "4.0.0",
+  "description": "Client for the Sigstore TUF repository",
+  "main": "dist/index.js",
+  "types": "dist/index.d.ts",
+  "scripts": {
+    "clean": "shx rm -rf dist *.tsbuildinfo",
+    "build": "tsc --build",
+    "test": "jest"
+  },
+  "files": [
+    "dist",
+    "seeds.json"
+  ],
+  "author": "bdehamer@github.com",
+  "license": "Apache-2.0",
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/sigstore/sigstore-js.git"
+  },
+  "bugs": {
+    "url": "https://github.com/sigstore/sigstore-js/issues"
+  },
+  "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/tuf#readme",
+  "publishConfig": {
+    "provenance": true
+  },
+  "devDependencies": {
+    "@sigstore/jest": "^0.0.0",
+    "@tufjs/repo-mock": "^3.0.1",
+    "@types/make-fetch-happen": "^10.0.4"
+  },
+  "dependencies": {
+    "@sigstore/protobuf-specs": "^0.5.0",
+    "tuf-js": "^4.0.0"
+  },
+  "engines": {
+    "node": "^20.17.0 || >=22.9.0"
+  }
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/tuf/seeds.json b/node_modules/pacote/node_modules/@sigstore/tuf/seeds.json
new file mode 100644
index 0000000000000..6d48f33afe700
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/tuf/seeds.json
@@ -0,0 +1 @@
+{"https://tuf-repo-cdn.sigstore.dev":{"root.json":"ewogInNpZ25hdHVyZXMiOiBbCiAgewogICAia2V5aWQiOiAiNmYyNjAwODlkNTkyM2RhZjIwMTY2Y2E2NTdjNTQzYWY2MTgzNDZhYjk3MTg4NGE5OTk2MmIwMTk4OGJiZTBjMyIsCiAgICJzaWciOiAiIgogIH0sCiAgewogICAia2V5aWQiOiAiZTcxYTU0ZDU0MzgzNWJhODZhZGFkOTQ2MDM3OWM3NjQxZmI4NzI2ZDE2NGVhNzY2ODAxYTFjNTIyYWJhN2VhMiIsCiAgICJzaWciOiAiMzA0NTAyMjEwMGJiZGRkNDY0ZjgwNjZjZWI4OGJhNzg3Mzc1YzEyY2Q2MzMwNjgwZTA4YzI5MTA3MDNlNjUzOGM3MWNjNzlhZDIwMjIwNTE5MGIwNmU0NTM3ZmU5NjFiM2VmODFmZTY4ZWRjZDAwODljMTlmOTE5YWZlZDQyM2I5YWFmZDcwMDY0MTE1MyIKICB9LAogIHsKICAgImtleWlkIjogIjIyZjRjYWVjNmQ4ZTZmOTU1NWFmNjZiM2Q0YzNjYjA2YTNiYjIzZmRjN2UzOWM5MTZjNjFmNDYyZTZmNTJiMDYiLAogICAic2lnIjogIjMwNDQwMjIwNjkzMDZjZDUyNTdmNzMyYTc0MGMxYWZlNjBhOGU0MzNjNWRlNThlYWZlYWRiZTk5YzMzNmM5YzcxZDE5OGNmODAyMjAwZDc3Mzk1M2FlN2RiYzQ4ZDNlNWJhZDlhNmY2NGJhZmZmMTk2YjdlMmFkNGE1MmExOTUxOTM2N2Q0N2RjMDQyIgogIH0sCiAgewogICAia2V5aWQiOiAiNjE2NDM4MzgxMjViNDQwYjQwZGI2OTQyZjVjYjVhMzFjMGRjMDQzNjgzMTZlYjJhYWE1OGI5NTkwNGE1ODIyMiIsCiAgICJzaWciOiAiMzA0NDAyMjA0ZDIxYTJlYzgwZGY2NmU2MWY2ZmUyOTEyOTUxZGM0N2RmODM2MDM2ZjhjMGFiMTA4MTZkMzc1ZTcxZGJmNzllMDIyMDU0N2FkY2UxYWZkZjA0ZTY3OTRlZmEyMDNkZDUyNjRjNmY3ZTBlZjc4ZTU3ZmU5MzRiMGQyNmNiOTk0ZWVjNzYiCiAgfSwKICB7CiAgICJrZXlpZCI6ICJhNjg3ZTViZjRmYWI4MmIwZWU1OGQ0NmUwNWM5NTM1MTQ1YTJjOWFmYjQ1OGY0M2Q0MmI0NWNhMGZkY2UyYTcwIiwKICAgInNpZyI6ICIzMDQ1MDIyMDYwODI2NDk2NTU3MTQ0ZWIxNjQ5ODkzZWQ1ZjZmNGVhNTQ1MzZmZWIwY2E4MmY4Yjg5YWU2NDFiZTM5NzQzZTUwMjIxMDBhZDcxMThiNWU5ZDQ4MzczMjYyMDZlNDEyZmM2ZGEyOTk5OTI1ZDExMDMyOGE3YzE2NmIwNmM2MjQzMzZjOTNmIgogIH0sCiAgewogICAia2V5aWQiOiAiMTgzZTY0ZjM3NjcwZGMxM2NhMGQyODk5NWEzMDUzZjM3NDA5NTRkZGNlNDQzMjFhNDFlNDY1MzRjZjQ0ZTYzMiIsCiAgICJzaWciOiAiMzA0NjAyMjEwMGQ4MTc5NDM5YzJlNzNlYjBjMTczM2FiZWU3ZmFmODMyZGNhZWE3MjYzZWRjYjQ5MTk4OTFjM2EyNDdmMDU5MjMwMjIxMDBlMWE0MzdlMDc5N2U4MDNmOWI3MmRjOWQyZDkyMTU1YjBhMjI3MGMyNGVmZGQ1ZjRiM2E1ZDhmMGIwZjQzMWE3IgogIH0KIF0sCiAic2lnbmVkIjogewogICJfdHlwZSI6ICJyb290IiwKICAiY29uc2lzdGVudF9zbmFwc2hvdCI6IHRydWUsCiAgImV4cGlyZXMiOiAiMjAyNi0wMS0yMlQxMzowNTo1OVoiLAogICJrZXlzIjogewogICAiMGM4NzQzMmMzYmYwOWZkOTkxODlmZGMzMmZhNWVhZWRmNGU0YTVmYWM3YmFiNzNmYTA0YTJlMGZjNjRhZjZmNSI6IHsKICAgICJrZXlpZF9oYXNoX2FsZ29yaXRobXMiOiBbCiAgICAgInNoYTI1NiIsCiAgICAgInNoYTUxMiIKICAgIF0sCiAgICAia2V5dHlwZSI6ICJlY2RzYSIsCiAgICAia2V5dmFsIjogewogICAgICJwdWJsaWMiOiAiLS0tLS1CRUdJTiBQVUJMSUMgS0VZLS0tLS1cbk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRVdSaUdyNStqKzNKNVNzSCtadHI1bkUySDJ3TzdcbkJWK25PM3M5M2dMY2ExOHFUT3pIWTFvV3lBR0R5a01Tc0dUVUJTdDlEK0FuMEtmS3NEMm1mU000MlE9PVxuLS0tLS1FTkQgUFVCTElDIEtFWS0tLS0tXG4iCiAgICB9LAogICAgInNjaGVtZSI6ICJlY2RzYS1zaGEyLW5pc3RwMjU2IiwKICAgICJ4LXR1Zi1vbi1jaS1vbmxpbmUtdXJpIjogImdjcGttczpwcm9qZWN0cy9zaWdzdG9yZS1yb290LXNpZ25pbmcvbG9jYXRpb25zL2dsb2JhbC9rZXlSaW5ncy9yb290L2NyeXB0b0tleXMvdGltZXN0YW1wL2NyeXB0b0tleVZlcnNpb25zLzEiCiAgIH0sCiAgICIxODNlNjRmMzc2NzBkYzEzY2EwZDI4OTk1YTMwNTNmMzc0MDk1NGRkY2U0NDMyMWE0MWU0NjUzNGNmNDRlNjMyIjogewogICAgImtleXR5cGUiOiAiZWNkc2EiLAogICAgImtleXZhbCI6IHsKICAgICAicHVibGljIjogIi0tLS0tQkVHSU4gUFVCTElDIEtFWS0tLS0tXG5NRmt3RXdZSEtvWkl6ajBDQVFZSUtvWkl6ajBEQVFjRFFnQUVNeHBQT0pDSVo1b3RHNDEwNmZHSnNlRVFpM1Y5XG5wa01ZUTR1eVY5VGoxTTdXSFhJeUxHK2prZnZ1RzBnbFExSlpiUlpaQlYzZ0FSNHNvamRHSElTZW93PT1cbi0tLS0tRU5EIFBVQkxJQyBLRVktLS0tLVxuIgogICAgfSwKICAgICJzY2hlbWUiOiAiZWNkc2Etc2hhMi1uaXN0cDI1NiIsCiAgICAieC10dWYtb24tY2kta2V5b3duZXIiOiAiQGxhbmNlIgogICB9LAogICAiMjJmNGNhZWM2ZDhlNmY5NTU1YWY2NmIzZDRjM2NiMDZhM2JiMjNmZGM3ZTM5YzkxNmM2MWY0NjJlNmY1MmIwNiI6IHsKICAgICJrZXlpZF9oYXNoX2FsZ29yaXRobXMiOiBbCiAgICAgInNoYTI1NiIsCiAgICAgInNoYTUxMiIKICAgIF0sCiAgICAia2V5dHlwZSI6ICJlY2RzYSIsCiAgICAia2V5dmFsIjogewogICAgICJwdWJsaWMiOiAiLS0tLS1CRUdJTiBQVUJMSUMgS0VZLS0tLS1cbk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRXpCelZPbUhDUG9qTVZMU0kzNjRXaWlWOE5QckRcbjZJZ1J4Vmxpc2t6L3YreTNKRVI1bWNWR2NPTmxpRGNXTUM1SjJsZkhtalBOUGhiNEg3eG04THpmU0E9PVxuLS0tLS1FTkQgUFVCTElDIEtFWS0tLS0tXG4iCiAgICB9LAogICAgInNjaGVtZSI6ICJlY2RzYS1zaGEyLW5pc3RwMjU2IiwKICAgICJ4LXR1Zi1vbi1jaS1rZXlvd25lciI6ICJAc2FudGlhZ290b3JyZXMiCiAgIH0sCiAgICI2MTY0MzgzODEyNWI0NDBiNDBkYjY5NDJmNWNiNWEzMWMwZGMwNDM2ODMxNmViMmFhYTU4Yjk1OTA0YTU4MjIyIjogewogICAgImtleWlkX2hhc2hfYWxnb3JpdGhtcyI6IFsKICAgICAic2hhMjU2IiwKICAgICAic2hhNTEyIgogICAgXSwKICAgICJrZXl0eXBlIjogImVjZHNhIiwKICAgICJrZXl2YWwiOiB7CiAgICAgInB1YmxpYyI6ICItLS0tLUJFR0lOIFBVQkxJQyBLRVktLS0tLVxuTUZrd0V3WUhLb1pJemowQ0FRWUlLb1pJemowREFRY0RRZ0FFaW5pa1NzQVFtWWtOZUg1ZVlxL0NuSXpMYWFjT1xueGxTYWF3UURPd3FLeS90Q3F4cTV4eFBTSmMyMUs0V0loczlHeU9rS2Z6dWVZM0dJTHpjTUpaNGNXdz09XG4tLS0tLUVORCBQVUJMSUMgS0VZLS0tLS1cbiIKICAgIH0sCiAgICAic2NoZW1lIjogImVjZHNhLXNoYTItbmlzdHAyNTYiLAogICAgIngtdHVmLW9uLWNpLWtleW93bmVyIjogIkBib2JjYWxsYXdheSIKICAgfSwKICAgImE2ODdlNWJmNGZhYjgyYjBlZTU4ZDQ2ZTA1Yzk1MzUxNDVhMmM5YWZiNDU4ZjQzZDQyYjQ1Y2EwZmRjZTJhNzAiOiB7CiAgICAia2V5aWRfaGFzaF9hbGdvcml0aG1zIjogWwogICAgICJzaGEyNTYiLAogICAgICJzaGE1MTIiCiAgICBdLAogICAgImtleXR5cGUiOiAiZWNkc2EiLAogICAgImtleXZhbCI6IHsKICAgICAicHVibGljIjogIi0tLS0tQkVHSU4gUFVCTElDIEtFWS0tLS0tXG5NRmt3RXdZSEtvWkl6ajBDQVFZSUtvWkl6ajBEQVFjRFFnQUUwZ2hyaDkyTHcxWXIzaWRHVjVXcUN0TURCOEN4XG4rRDhoZEM0dzJaTE5JcGxWUm9WR0xza1lhM2doZU15T2ppSjhrUGkxNWFRMi8vN1Arb2o3VXZKUEd3PT1cbi0tLS0tRU5EIFBVQkxJQyBLRVktLS0tLVxuIgogICAgfSwKICAgICJzY2hlbWUiOiAiZWNkc2Etc2hhMi1uaXN0cDI1NiIsCiAgICAieC10dWYtb24tY2kta2V5b3duZXIiOiAiQGpvc2h1YWdsIgogICB9LAogICAiZTcxYTU0ZDU0MzgzNWJhODZhZGFkOTQ2MDM3OWM3NjQxZmI4NzI2ZDE2NGVhNzY2ODAxYTFjNTIyYWJhN2VhMiI6IHsKICAgICJrZXlpZF9oYXNoX2FsZ29yaXRobXMiOiBbCiAgICAgInNoYTI1NiIsCiAgICAgInNoYTUxMiIKICAgIF0sCiAgICAia2V5dHlwZSI6ICJlY2RzYSIsCiAgICAia2V5dmFsIjogewogICAgICJwdWJsaWMiOiAiLS0tLS1CRUdJTiBQVUJMSUMgS0VZLS0tLS1cbk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRUVYc3ozU1pYRmI4ak1WNDJqNnBKbHlqYmpSOEtcbk4zQndvY2V4cTZMTUliNXFzV0tPUXZMTjE2TlVlZkxjNEhzd09vdW1Sc1ZWYWFqU3BRUzZmb2JrUnc9PVxuLS0tLS1FTkQgUFVCTElDIEtFWS0tLS0tXG4iCiAgICB9LAogICAgInNjaGVtZSI6ICJlY2RzYS1zaGEyLW5pc3RwMjU2IiwKICAgICJ4LXR1Zi1vbi1jaS1rZXlvd25lciI6ICJAbW5tNjc4IgogICB9CiAgfSwKICAicm9sZXMiOiB7CiAgICJyb290IjogewogICAgImtleWlkcyI6IFsKICAgICAiZTcxYTU0ZDU0MzgzNWJhODZhZGFkOTQ2MDM3OWM3NjQxZmI4NzI2ZDE2NGVhNzY2ODAxYTFjNTIyYWJhN2VhMiIsCiAgICAgIjIyZjRjYWVjNmQ4ZTZmOTU1NWFmNjZiM2Q0YzNjYjA2YTNiYjIzZmRjN2UzOWM5MTZjNjFmNDYyZTZmNTJiMDYiLAogICAgICI2MTY0MzgzODEyNWI0NDBiNDBkYjY5NDJmNWNiNWEzMWMwZGMwNDM2ODMxNmViMmFhYTU4Yjk1OTA0YTU4MjIyIiwKICAgICAiYTY4N2U1YmY0ZmFiODJiMGVlNThkNDZlMDVjOTUzNTE0NWEyYzlhZmI0NThmNDNkNDJiNDVjYTBmZGNlMmE3MCIsCiAgICAgIjE4M2U2NGYzNzY3MGRjMTNjYTBkMjg5OTVhMzA1M2YzNzQwOTU0ZGRjZTQ0MzIxYTQxZTQ2NTM0Y2Y0NGU2MzIiCiAgICBdLAogICAgInRocmVzaG9sZCI6IDMKICAgfSwKICAgInNuYXBzaG90IjogewogICAgImtleWlkcyI6IFsKICAgICAiMGM4NzQzMmMzYmYwOWZkOTkxODlmZGMzMmZhNWVhZWRmNGU0YTVmYWM3YmFiNzNmYTA0YTJlMGZjNjRhZjZmNSIKICAgIF0sCiAgICAidGhyZXNob2xkIjogMSwKICAgICJ4LXR1Zi1vbi1jaS1leHBpcnktcGVyaW9kIjogMzY1MCwKICAgICJ4LXR1Zi1vbi1jaS1zaWduaW5nLXBlcmlvZCI6IDM2NQogICB9LAogICAidGFyZ2V0cyI6IHsKICAgICJrZXlpZHMiOiBbCiAgICAgImU3MWE1NGQ1NDM4MzViYTg2YWRhZDk0NjAzNzljNzY0MWZiODcyNmQxNjRlYTc2NjgwMWExYzUyMmFiYTdlYTIiLAogICAgICIyMmY0Y2FlYzZkOGU2Zjk1NTVhZjY2YjNkNGMzY2IwNmEzYmIyM2ZkYzdlMzljOTE2YzYxZjQ2MmU2ZjUyYjA2IiwKICAgICAiNjE2NDM4MzgxMjViNDQwYjQwZGI2OTQyZjVjYjVhMzFjMGRjMDQzNjgzMTZlYjJhYWE1OGI5NTkwNGE1ODIyMiIsCiAgICAgImE2ODdlNWJmNGZhYjgyYjBlZTU4ZDQ2ZTA1Yzk1MzUxNDVhMmM5YWZiNDU4ZjQzZDQyYjQ1Y2EwZmRjZTJhNzAiLAogICAgICIxODNlNjRmMzc2NzBkYzEzY2EwZDI4OTk1YTMwNTNmMzc0MDk1NGRkY2U0NDMyMWE0MWU0NjUzNGNmNDRlNjMyIgogICAgXSwKICAgICJ0aHJlc2hvbGQiOiAzCiAgIH0sCiAgICJ0aW1lc3RhbXAiOiB7CiAgICAia2V5aWRzIjogWwogICAgICIwYzg3NDMyYzNiZjA5ZmQ5OTE4OWZkYzMyZmE1ZWFlZGY0ZTRhNWZhYzdiYWI3M2ZhMDRhMmUwZmM2NGFmNmY1IgogICAgXSwKICAgICJ0aHJlc2hvbGQiOiAxLAogICAgIngtdHVmLW9uLWNpLWV4cGlyeS1wZXJpb2QiOiA3LAogICAgIngtdHVmLW9uLWNpLXNpZ25pbmctcGVyaW9kIjogNgogICB9CiAgfSwKICAic3BlY192ZXJzaW9uIjogIjEuMCIsCiAgInZlcnNpb24iOiAxMywKICAieC10dWYtb24tY2ktZXhwaXJ5LXBlcmlvZCI6IDE5NywKICAieC10dWYtb24tY2ktc2lnbmluZy1wZXJpb2QiOiA0NgogfQp9","targets":{"trusted_root.json":"{
  "mediaType": "application/vnd.dev.sigstore.trustedroot+json;version=0.1",
  "tlogs": [
    {
      "baseUrl": "https://rekor.sigstore.dev",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE2G2Y+2tabdTV5BcGiBIx0a9fAFwrkBbmLSGtks4L3qX6yYY0zufBnhC8Ur/iy55GhWP/9A/bY2LhC30M9+RYtw==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2021-01-12T11:53:27Z"
        }
      },
      "logId": {
        "keyId": "wNI9atQGlz+VWfO6LRygH4QUfY/8W4RFwiT5i5WRgB0="
      }
    }
  ],
  "certificateAuthorities": [
    {
      "subject": {
        "organization": "sigstore.dev",
        "commonName": "sigstore"
      },
      "uri": "https://fulcio.sigstore.dev",
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIIB+DCCAX6gAwIBAgITNVkDZoCiofPDsy7dfm6geLbuhzAKBggqhkjOPQQDAzAqMRUwEwYDVQQKEwxzaWdzdG9yZS5kZXYxETAPBgNVBAMTCHNpZ3N0b3JlMB4XDTIxMDMwNzAzMjAyOVoXDTMxMDIyMzAzMjAyOVowKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTB2MBAGByqGSM49AgEGBSuBBAAiA2IABLSyA7Ii5k+pNO8ZEWY0ylemWDowOkNa3kL+GZE5Z5GWehL9/A9bRNA3RbrsZ5i0JcastaRL7Sp5fp/jD5dxqc/UdTVnlvS16an+2Yfswe/QuLolRUCrcOE2+2iA5+tzd6NmMGQwDgYDVR0PAQH/BAQDAgEGMBIGA1UdEwEB/wQIMAYBAf8CAQEwHQYDVR0OBBYEFMjFHQBBmiQpMlEk6w2uSu1KBtPsMB8GA1UdIwQYMBaAFMjFHQBBmiQpMlEk6w2uSu1KBtPsMAoGCCqGSM49BAMDA2gAMGUCMH8liWJfMui6vXXBhjDgY4MwslmN/TJxVe/83WrFomwmNf056y1X48F9c4m3a3ozXAIxAKjRay5/aj/jsKKGIkmQatjI8uupHr/+CxFvaJWmpYqNkLDGRU+9orzh5hI2RrcuaQ=="
          }
        ]
      },
      "validFor": {
        "start": "2021-03-07T03:20:29Z",
        "end": "2022-12-31T23:59:59.999Z"
      }
    },
    {
      "subject": {
        "organization": "sigstore.dev",
        "commonName": "sigstore"
      },
      "uri": "https://fulcio.sigstore.dev",
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIICGjCCAaGgAwIBAgIUALnViVfnU0brJasmRkHrn/UnfaQwCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMjA0MTMyMDA2MTVaFw0zMTEwMDUxMzU2NThaMDcxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjEeMBwGA1UEAxMVc2lnc3RvcmUtaW50ZXJtZWRpYXRlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE8RVS/ysH+NOvuDZyPIZtilgUF9NlarYpAd9HP1vBBH1U5CV77LSS7s0ZiH4nE7Hv7ptS6LvvR/STk798LVgMzLlJ4HeIfF3tHSaexLcYpSASr1kS0N/RgBJz/9jWCiXno3sweTAOBgNVHQ8BAf8EBAMCAQYwEwYDVR0lBAwwCgYIKwYBBQUHAwMwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQU39Ppz1YkEZb5qNjpKFWixi4YZD8wHwYDVR0jBBgwFoAUWMAeX5FFpWapesyQoZMi0CrFxfowCgYIKoZIzj0EAwMDZwAwZAIwPCsQK4DYiZYDPIaDi5HFKnfxXx6ASSVmERfsynYBiX2X6SJRnZU84/9DZdnFvvxmAjBOt6QpBlc4J/0DxvkTCqpclvziL6BCCPnjdlIB3Pu3BxsPmygUY7Ii2zbdCdliiow="
          },
          {
            "rawBytes": "MIIB9zCCAXygAwIBAgIUALZNAPFdxHPwjeDloDwyYChAO/4wCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMTEwMDcxMzU2NTlaFw0zMTEwMDUxMzU2NThaMCoxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjERMA8GA1UEAxMIc2lnc3RvcmUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAT7XeFT4rb3PQGwS4IajtLk3/OlnpgangaBclYpsYBr5i+4ynB07ceb3LP0OIOZdxexX69c5iVuyJRQ+Hz05yi+UF3uBWAlHpiS5sh0+H2GHE7SXrk1EC5m1Tr19L9gg92jYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRYwB5fkUWlZql6zJChkyLQKsXF+jAfBgNVHSMEGDAWgBRYwB5fkUWlZql6zJChkyLQKsXF+jAKBggqhkjOPQQDAwNpADBmAjEAj1nHeXZp+13NWBNa+EDsDP8G1WWg1tCMWP/WHPqpaVo0jhsweNFZgSs0eE7wYI4qAjEA2WB9ot98sIkoF3vZYdd3/VtWB5b9TNMea7Ix/stJ5TfcLLeABLE4BNJOsQ4vnBHJ"
          }
        ]
      },
      "validFor": {
        "start": "2022-04-13T20:06:15Z"
      }
    }
  ],
  "ctlogs": [
    {
      "baseUrl": "https://ctfe.sigstore.dev/test",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEbfwR+RJudXscgRBRpKX1XFDy3PyudDxz/SfnRi1fT8ekpfBd2O1uoz7jr3Z8nKzxA69EUQ+eFCFI3zeubPWU7w==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2021-03-14T00:00:00Z",
          "end": "2022-10-31T23:59:59.999Z"
        }
      },
      "logId": {
        "keyId": "CGCS8ChS/2hF0dFrJ4ScRWcYrBY9wzjSbea8IgY2b3I="
      }
    },
    {
      "baseUrl": "https://ctfe.sigstore.dev/2022",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEiPSlFi0CmFTfEjCUqF9HuCEcYXNKAaYalIJmBZ8yyezPjTqhxrKBpMnaocVtLJBI1eM3uXnQzQGAJdJ4gs9Fyw==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2022-10-20T00:00:00Z"
        }
      },
      "logId": {
        "keyId": "3T0wasbHETJjGR4cmWc3AqJKXrjePK3/h4pygC8p7o4="
      }
    }
  ],
  "timestampAuthorities": [
    {
      "subject": {
        "organization": "sigstore.dev",
        "commonName": "sigstore-tsa-selfsigned"
      },
      "uri": "https://timestamp.sigstore.dev/api/v1/timestamp",
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIICEDCCAZagAwIBAgIUOhNULwyQYe68wUMvy4qOiyojiwwwCgYIKoZIzj0EAwMwOTEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MSAwHgYDVQQDExdzaWdzdG9yZS10c2Etc2VsZnNpZ25lZDAeFw0yNTA0MDgwNjU5NDNaFw0zNTA0MDYwNjU5NDNaMC4xFTATBgNVBAoTDHNpZ3N0b3JlLmRldjEVMBMGA1UEAxMMc2lnc3RvcmUtdHNhMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE4ra2Z8hKNig2T9kFjCAToGG30jky+WQv3BzL+mKvh1SKNR/UwuwsfNCg4sryoYAd8E6isovVA3M4aoNdm9QDi50Z8nTEyvqgfDPtTIwXItfiW/AFf1V7uwkbkAoj0xxco2owaDAOBgNVHQ8BAf8EBAMCB4AwHQYDVR0OBBYEFIn9eUOHz9BlRsMCRscsc1t9tOsDMB8GA1UdIwQYMBaAFJjsAe9/u1H/1JUeb4qImFMHic6/MBYGA1UdJQEB/wQMMAoGCCsGAQUFBwMIMAoGCCqGSM49BAMDA2gAMGUCMDtpsV/6KaO0qyF/UMsX2aSUXKQFdoGTptQGc0ftq1csulHPGG6dsmyMNd3JB+G3EQIxAOajvBcjpJmKb4Nv+2Taoj8Uc5+b6ih6FXCCKraSqupe07zqswMcXJTe1cExvHvvlw=="
          },
          {
            "rawBytes": "MIIB9zCCAXygAwIBAgIUV7f0GLDOoEzIh8LXSW80OJiUp14wCgYIKoZIzj0EAwMwOTEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MSAwHgYDVQQDExdzaWdzdG9yZS10c2Etc2VsZnNpZ25lZDAeFw0yNTA0MDgwNjU5NDNaFw0zNTA0MDYwNjU5NDNaMDkxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjEgMB4GA1UEAxMXc2lnc3RvcmUtdHNhLXNlbGZzaWduZWQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQUQNtfRT/ou3YATa6wB/kKTe70cfJwyRIBovMnt8RcJph/COE82uyS6FmppLLL1VBPGcPfpQPYJNXzWwi8icwhKQ6W/Qe2h3oebBb2FHpwNJDqo+TMaC/tdfkv/ElJB72jRTBDMA4GA1UdDwEB/wQEAwIBBjASBgNVHRMBAf8ECDAGAQH/AgEAMB0GA1UdDgQWBBSY7AHvf7tR/9SVHm+KiJhTB4nOvzAKBggqhkjOPQQDAwNpADBmAjEAwGEGrfGZR1cen1R8/DTVMI943LssZmJRtDp/i7SfGHmGRP6gRbuj9vOK3b67Z0QQAjEAuT2H673LQEaHTcyQSZrkp4mX7WwkmF+sVbkYY5mXN+RMH13KUEHHOqASaemYWK/E"
          }
        ]
      },
      "validFor": {
        "start": "2025-07-04T00:00:00Z"
      }
    }
  ]
}
","registry.npmjs.org%2Fkeys.json":"ewogICAgImtleXMiOiBbCiAgICAgICAgewogICAgICAgICAgICAia2V5SWQiOiAiU0hBMjU2OmpsM2J3c3d1ODBQampva0NnaDBvMnc1YzJVNExoUUFFNTdnajljejFrekEiLAogICAgICAgICAgICAia2V5VXNhZ2UiOiAibnBtOnNpZ25hdHVyZXMiLAogICAgICAgICAgICAicHVibGljS2V5IjogewogICAgICAgICAgICAgICAgInJhd0J5dGVzIjogIk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRTFPbGIzek1BRkZ4WEtIaUlrUU81Y0ozWWhsNWk2VVBwK0lodXRlQkpidUhjQTVVb2dLbzBFV3RsV3dXNktTYUtvVE5FWUw3SmxDUWlWbmtoQmt0VWdnPT0iLAogICAgICAgICAgICAgICAgImtleURldGFpbHMiOiAiUEtJWF9FQ0RTQV9QMjU2X1NIQV8yNTYiLAogICAgICAgICAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgICAgICAgICAgICJzdGFydCI6ICIxOTk5LTAxLTAxVDAwOjAwOjAwLjAwMFoiLAogICAgICAgICAgICAgICAgICAgICJlbmQiOiAiMjAyNS0wMS0yOVQwMDowMDowMC4wMDBaIgogICAgICAgICAgICAgICAgfQogICAgICAgICAgICB9CiAgICAgICAgfSwKICAgICAgICB7CiAgICAgICAgICAgICJrZXlJZCI6ICJTSEEyNTY6amwzYndzd3U4MFBqam9rQ2doMG8ydzVjMlU0TGhRQUU1N2dqOWN6MWt6QSIsCiAgICAgICAgICAgICJrZXlVc2FnZSI6ICJucG06YXR0ZXN0YXRpb25zIiwKICAgICAgICAgICAgInB1YmxpY0tleSI6IHsKICAgICAgICAgICAgICAgICJyYXdCeXRlcyI6ICJNRmt3RXdZSEtvWkl6ajBDQVFZSUtvWkl6ajBEQVFjRFFnQUUxT2xiM3pNQUZGeFhLSGlJa1FPNWNKM1lobDVpNlVQcCtJaHV0ZUJKYnVIY0E1VW9nS28wRVd0bFd3VzZLU2FLb1RORVlMN0psQ1FpVm5raEJrdFVnZz09IiwKICAgICAgICAgICAgICAgICJrZXlEZXRhaWxzIjogIlBLSVhfRUNEU0FfUDI1Nl9TSEFfMjU2IiwKICAgICAgICAgICAgICAgICJ2YWxpZEZvciI6IHsKICAgICAgICAgICAgICAgICAgICAic3RhcnQiOiAiMjAyMi0xMi0wMVQwMDowMDowMC4wMDBaIiwKICAgICAgICAgICAgICAgICAgICAiZW5kIjogIjIwMjUtMDEtMjlUMDA6MDA6MDAuMDAwWiIKICAgICAgICAgICAgICAgIH0KICAgICAgICAgICAgfQogICAgICAgIH0sCiAgICAgICAgewogICAgICAgICAgICAia2V5SWQiOiAiU0hBMjU2OkRoUTh3UjVBUEJ2RkhMRi8rVGMrQVl2UE9kVHBjSURxT2h4c0JIUndDN1UiLAogICAgICAgICAgICAia2V5VXNhZ2UiOiAibnBtOnNpZ25hdHVyZXMiLAogICAgICAgICAgICAicHVibGljS2V5IjogewogICAgICAgICAgICAgICAgInJhd0J5dGVzIjogIk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRVk2WWE3VysrN2FVUHp2TVRyZXpINlljeDNjK0hPS1lDY05HeWJKWlNDSnEvZmQ3UWE4dXVBS3RkSWtVUXRRaUVLRVJoQW1FNWxNTUpoUDhPa0RPYTJnPT0iLAogICAgICAgICAgICAgICAgImtleURldGFpbHMiOiAiUEtJWF9FQ0RTQV9QMjU2X1NIQV8yNTYiLAogICAgICAgICAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgICAgICAgICAgICJzdGFydCI6ICIyMDI1LTAxLTEzVDAwOjAwOjAwLjAwMFoiCiAgICAgICAgICAgICAgICB9CiAgICAgICAgICAgIH0KICAgICAgICB9LAogICAgICAgIHsKICAgICAgICAgICAgImtleUlkIjogIlNIQTI1NjpEaFE4d1I1QVBCdkZITEYvK1RjK0FZdlBPZFRwY0lEcU9oeHNCSFJ3QzdVIiwKICAgICAgICAgICAgImtleVVzYWdlIjogIm5wbTphdHRlc3RhdGlvbnMiLAogICAgICAgICAgICAicHVibGljS2V5IjogewogICAgICAgICAgICAgICAgInJhd0J5dGVzIjogIk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRVk2WWE3VysrN2FVUHp2TVRyZXpINlljeDNjK0hPS1lDY05HeWJKWlNDSnEvZmQ3UWE4dXVBS3RkSWtVUXRRaUVLRVJoQW1FNWxNTUpoUDhPa0RPYTJnPT0iLAogICAgICAgICAgICAgICAgImtleURldGFpbHMiOiAiUEtJWF9FQ0RTQV9QMjU2X1NIQV8yNTYiLAogICAgICAgICAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgICAgICAgICAgICJzdGFydCI6ICIyMDI1LTAxLTEzVDAwOjAwOjAwLjAwMFoiCiAgICAgICAgICAgICAgICB9CiAgICAgICAgICAgIH0KICAgICAgICB9CiAgICBdCn0K"}}}
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/dsse.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/dsse.js
new file mode 100644
index 0000000000000..1033fc422aba0
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/dsse.js
@@ -0,0 +1,43 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.DSSESignatureContent = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const core_1 = require("@sigstore/core");
+class DSSESignatureContent {
+    constructor(env) {
+        this.env = env;
+    }
+    compareDigest(digest) {
+        return core_1.crypto.bufferEqual(digest, core_1.crypto.digest('sha256', this.env.payload));
+    }
+    compareSignature(signature) {
+        return core_1.crypto.bufferEqual(signature, this.signature);
+    }
+    verifySignature(key) {
+        return core_1.crypto.verify(this.preAuthEncoding, key, this.signature);
+    }
+    get signature() {
+        return this.env.signatures.length > 0
+            ? this.env.signatures[0].sig
+            : Buffer.from('');
+    }
+    // DSSE Pre-Authentication Encoding
+    get preAuthEncoding() {
+        return core_1.dsse.preAuthEncoding(this.env.payloadType, this.env.payload);
+    }
+}
+exports.DSSESignatureContent = DSSESignatureContent;
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/index.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/index.js
new file mode 100644
index 0000000000000..4287d8032b75f
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/index.js
@@ -0,0 +1,57 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.toSignedEntity = toSignedEntity;
+exports.signatureContent = signatureContent;
+const core_1 = require("@sigstore/core");
+const dsse_1 = require("./dsse");
+const message_1 = require("./message");
+function toSignedEntity(bundle, artifact) {
+    const { tlogEntries, timestampVerificationData } = bundle.verificationMaterial;
+    const timestamps = [];
+    for (const entry of tlogEntries) {
+        timestamps.push({
+            $case: 'transparency-log',
+            tlogEntry: entry,
+        });
+    }
+    for (const ts of timestampVerificationData?.rfc3161Timestamps ?? []) {
+        timestamps.push({
+            $case: 'timestamp-authority',
+            timestamp: core_1.RFC3161Timestamp.parse(ts.signedTimestamp),
+        });
+    }
+    return {
+        signature: signatureContent(bundle, artifact),
+        key: key(bundle),
+        tlogEntries,
+        timestamps,
+    };
+}
+function signatureContent(bundle, artifact) {
+    switch (bundle.content.$case) {
+        case 'dsseEnvelope':
+            return new dsse_1.DSSESignatureContent(bundle.content.dsseEnvelope);
+        case 'messageSignature':
+            return new message_1.MessageSignatureContent(bundle.content.messageSignature, artifact);
+    }
+}
+function key(bundle) {
+    switch (bundle.verificationMaterial.content.$case) {
+        case 'publicKey':
+            return {
+                $case: 'public-key',
+                hint: bundle.verificationMaterial.content.publicKey.hint,
+            };
+        case 'x509CertificateChain':
+            return {
+                $case: 'certificate',
+                certificate: core_1.X509Certificate.parse(bundle.verificationMaterial.content.x509CertificateChain
+                    .certificates[0].rawBytes),
+            };
+        case 'certificate':
+            return {
+                $case: 'certificate',
+                certificate: core_1.X509Certificate.parse(bundle.verificationMaterial.content.certificate.rawBytes),
+            };
+    }
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/message.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/message.js
new file mode 100644
index 0000000000000..836148c68a8b6
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/message.js
@@ -0,0 +1,36 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.MessageSignatureContent = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const core_1 = require("@sigstore/core");
+class MessageSignatureContent {
+    constructor(messageSignature, artifact) {
+        this.signature = messageSignature.signature;
+        this.messageDigest = messageSignature.messageDigest.digest;
+        this.artifact = artifact;
+    }
+    compareSignature(signature) {
+        return core_1.crypto.bufferEqual(signature, this.signature);
+    }
+    compareDigest(digest) {
+        return core_1.crypto.bufferEqual(digest, this.messageDigest);
+    }
+    verifySignature(key) {
+        return core_1.crypto.verify(this.artifact, key, this.signature);
+    }
+}
+exports.MessageSignatureContent = MessageSignatureContent;
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/error.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/error.js
new file mode 100644
index 0000000000000..6cb1cd4121343
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/error.js
@@ -0,0 +1,32 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.PolicyError = exports.VerificationError = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+class BaseError extends Error {
+    constructor({ code, message, cause, }) {
+        super(message);
+        this.code = code;
+        this.cause = cause;
+        this.name = this.constructor.name;
+    }
+}
+class VerificationError extends BaseError {
+}
+exports.VerificationError = VerificationError;
+class PolicyError extends BaseError {
+}
+exports.PolicyError = PolicyError;
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/index.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/index.js
new file mode 100644
index 0000000000000..3222876fcd68b
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/index.js
@@ -0,0 +1,28 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Verifier = exports.toTrustMaterial = exports.VerificationError = exports.PolicyError = exports.toSignedEntity = void 0;
+/* istanbul ignore file */
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+var bundle_1 = require("./bundle");
+Object.defineProperty(exports, "toSignedEntity", { enumerable: true, get: function () { return bundle_1.toSignedEntity; } });
+var error_1 = require("./error");
+Object.defineProperty(exports, "PolicyError", { enumerable: true, get: function () { return error_1.PolicyError; } });
+Object.defineProperty(exports, "VerificationError", { enumerable: true, get: function () { return error_1.VerificationError; } });
+var trust_1 = require("./trust");
+Object.defineProperty(exports, "toTrustMaterial", { enumerable: true, get: function () { return trust_1.toTrustMaterial; } });
+var verifier_1 = require("./verifier");
+Object.defineProperty(exports, "Verifier", { enumerable: true, get: function () { return verifier_1.Verifier; } });
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/key/certificate.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/key/certificate.js
new file mode 100644
index 0000000000000..35ad947f0bafc
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/key/certificate.js
@@ -0,0 +1,212 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.CertificateChainVerifier = void 0;
+exports.verifyCertificateChain = verifyCertificateChain;
+const error_1 = require("../error");
+const trust_1 = require("../trust");
+function verifyCertificateChain(timestamp, leaf, certificateAuthorities) {
+    // Filter list of trusted CAs to those which are valid for the given
+    // timestamp
+    const cas = (0, trust_1.filterCertAuthorities)(certificateAuthorities, timestamp);
+    /* eslint-disable-next-line @typescript-eslint/no-explicit-any */
+    let error;
+    for (const ca of cas) {
+        try {
+            const verifier = new CertificateChainVerifier({
+                trustedCerts: ca.certChain,
+                untrustedCert: leaf,
+                timestamp,
+            });
+            return verifier.verify();
+        }
+        catch (err) {
+            error = err;
+        }
+    }
+    // If we failed to verify the certificate chain for all of the trusted
+    // CAs, throw the last error we encountered.
+    throw new error_1.VerificationError({
+        code: 'CERTIFICATE_ERROR',
+        message: 'Failed to verify certificate chain',
+        cause: error,
+    });
+}
+class CertificateChainVerifier {
+    constructor(opts) {
+        this.untrustedCert = opts.untrustedCert;
+        this.trustedCerts = opts.trustedCerts;
+        this.localCerts = dedupeCertificates([
+            ...opts.trustedCerts,
+            opts.untrustedCert,
+        ]);
+        this.timestamp = opts.timestamp;
+    }
+    verify() {
+        // Construct certificate path from leaf to root
+        const certificatePath = this.sort();
+        // Perform validation checks on each certificate in the path
+        this.checkPath(certificatePath);
+        const validForDate = certificatePath.every((cert) => cert.validForDate(this.timestamp));
+        if (!validForDate) {
+            throw new error_1.VerificationError({
+                code: 'CERTIFICATE_ERROR',
+                message: 'certificate is not valid or expired at the specified date',
+            });
+        }
+        // Return verified certificate path
+        return certificatePath;
+    }
+    sort() {
+        const leafCert = this.untrustedCert;
+        // Construct all possible paths from the leaf
+        let paths = this.buildPaths(leafCert);
+        // Filter for paths which contain a trusted certificate
+        paths = paths.filter((path) => path.some((cert) => this.trustedCerts.includes(cert)));
+        if (paths.length === 0) {
+            throw new error_1.VerificationError({
+                code: 'CERTIFICATE_ERROR',
+                message: 'no trusted certificate path found',
+            });
+        }
+        // Find the shortest of possible paths
+        /* istanbul ignore next */
+        const path = paths.reduce((prev, curr) => prev.length < curr.length ? prev : curr);
+        // Construct chain from shortest path
+        // Removes the last certificate in the path, which will be a second copy
+        // of the root certificate given that the root is self-signed.
+        return [leafCert, ...path].slice(0, -1);
+    }
+    // Recursively build all possible paths from the leaf to the root
+    buildPaths(certificate) {
+        const paths = [];
+        const issuers = this.findIssuer(certificate);
+        if (issuers.length === 0) {
+            throw new error_1.VerificationError({
+                code: 'CERTIFICATE_ERROR',
+                message: 'no valid certificate path found',
+            });
+        }
+        for (let i = 0; i < issuers.length; i++) {
+            const issuer = issuers[i];
+            // Base case - issuer is self
+            if (issuer.equals(certificate)) {
+                paths.push([certificate]);
+                continue;
+            }
+            // Recursively build path for the issuer
+            const subPaths = this.buildPaths(issuer);
+            // Construct paths by appending the issuer to each subpath
+            for (let j = 0; j < subPaths.length; j++) {
+                paths.push([issuer, ...subPaths[j]]);
+            }
+        }
+        return paths;
+    }
+    // Return all possible issuers for the given certificate
+    findIssuer(certificate) {
+        let issuers = [];
+        let keyIdentifier;
+        // Exit early if the certificate is self-signed
+        if (certificate.subject.equals(certificate.issuer)) {
+            if (certificate.verify()) {
+                return [certificate];
+            }
+        }
+        // If the certificate has an authority key identifier, use that
+        // to find the issuer
+        if (certificate.extAuthorityKeyID) {
+            keyIdentifier = certificate.extAuthorityKeyID.keyIdentifier;
+            // TODO: Add support for authorityCertIssuer/authorityCertSerialNumber
+            // though Fulcio doesn't appear to use these
+        }
+        // Find possible issuers by comparing the authorityKeyID/subjectKeyID
+        // or issuer/subject. Potential issuers are added to the result array.
+        this.localCerts.forEach((possibleIssuer) => {
+            if (keyIdentifier) {
+                /* istanbul ignore else */
+                if (possibleIssuer.extSubjectKeyID) {
+                    if (possibleIssuer.extSubjectKeyID.keyIdentifier.equals(keyIdentifier)) {
+                        issuers.push(possibleIssuer);
+                    }
+                    return;
+                }
+            }
+            // Fallback to comparing certificate issuer and subject if
+            // subjectKey/authorityKey extensions are not present
+            if (possibleIssuer.subject.equals(certificate.issuer)) {
+                issuers.push(possibleIssuer);
+            }
+        });
+        // Remove any issuers which fail to verify the certificate
+        issuers = issuers.filter((issuer) => {
+            try {
+                return certificate.verify(issuer);
+            }
+            catch (ex) {
+                /* istanbul ignore next - should never error */
+                return false;
+            }
+        });
+        return issuers;
+    }
+    checkPath(path) {
+        /* istanbul ignore if */
+        if (path.length < 1) {
+            throw new error_1.VerificationError({
+                code: 'CERTIFICATE_ERROR',
+                message: 'certificate chain must contain at least one certificate',
+            });
+        }
+        // Ensure that all certificates beyond the leaf are CAs
+        const validCAs = path.slice(1).every((cert) => cert.isCA);
+        if (!validCAs) {
+            throw new error_1.VerificationError({
+                code: 'CERTIFICATE_ERROR',
+                message: 'intermediate certificate is not a CA',
+            });
+        }
+        // Certificate's issuer must match the subject of the next certificate
+        // in the chain
+        for (let i = path.length - 2; i >= 0; i--) {
+            /* istanbul ignore if */
+            if (!path[i].issuer.equals(path[i + 1].subject)) {
+                throw new error_1.VerificationError({
+                    code: 'CERTIFICATE_ERROR',
+                    message: 'incorrect certificate name chaining',
+                });
+            }
+        }
+        // Check pathlength constraints
+        for (let i = 0; i < path.length; i++) {
+            const cert = path[i];
+            // If the certificate is a CA, check the path length
+            if (cert.extBasicConstraints?.isCA) {
+                const pathLength = cert.extBasicConstraints.pathLenConstraint;
+                // The path length, if set, indicates how many intermediate
+                // certificates (NOT including the leaf) are allowed to follow. The
+                // pathLength constraint of any intermediate CA certificate MUST be
+                // greater than or equal to it's own depth in the chain (with an
+                // adjustment for the leaf certificate)
+                if (pathLength !== undefined && pathLength < i - 1) {
+                    throw new error_1.VerificationError({
+                        code: 'CERTIFICATE_ERROR',
+                        message: 'path length constraint exceeded',
+                    });
+                }
+            }
+        }
+    }
+}
+exports.CertificateChainVerifier = CertificateChainVerifier;
+// Remove duplicate certificates from the array
+function dedupeCertificates(certs) {
+    for (let i = 0; i < certs.length; i++) {
+        for (let j = i + 1; j < certs.length; j++) {
+            if (certs[i].equals(certs[j])) {
+                certs.splice(j, 1);
+                j--;
+            }
+        }
+    }
+    return certs;
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/key/index.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/key/index.js
new file mode 100644
index 0000000000000..c966ccb1e925e
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/key/index.js
@@ -0,0 +1,67 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verifyPublicKey = verifyPublicKey;
+exports.verifyCertificate = verifyCertificate;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const core_1 = require("@sigstore/core");
+const error_1 = require("../error");
+const certificate_1 = require("./certificate");
+const sct_1 = require("./sct");
+const OID_FULCIO_ISSUER_V1 = '1.3.6.1.4.1.57264.1.1';
+const OID_FULCIO_ISSUER_V2 = '1.3.6.1.4.1.57264.1.8';
+function verifyPublicKey(hint, timestamps, trustMaterial) {
+    const key = trustMaterial.publicKey(hint);
+    timestamps.forEach((timestamp) => {
+        if (!key.validFor(timestamp)) {
+            throw new error_1.VerificationError({
+                code: 'PUBLIC_KEY_ERROR',
+                message: `Public key is not valid for timestamp: ${timestamp.toISOString()}`,
+            });
+        }
+    });
+    return { key: key.publicKey };
+}
+function verifyCertificate(leaf, timestamps, trustMaterial) {
+    // Check that leaf certificate chains to a trusted CA
+    let path = [];
+    timestamps.forEach((timestamp) => {
+        path = (0, certificate_1.verifyCertificateChain)(timestamp, leaf, trustMaterial.certificateAuthorities);
+    });
+    return {
+        scts: (0, sct_1.verifySCTs)(path[0], path[1], trustMaterial.ctlogs),
+        signer: getSigner(path[0]),
+    };
+}
+function getSigner(cert) {
+    let issuer;
+    const issuerExtension = cert.extension(OID_FULCIO_ISSUER_V2);
+    /* istanbul ignore next */
+    if (issuerExtension) {
+        issuer = issuerExtension.valueObj.subs?.[0]?.value.toString('ascii');
+    }
+    else {
+        issuer = cert.extension(OID_FULCIO_ISSUER_V1)?.value.toString('ascii');
+    }
+    const identity = {
+        extensions: { issuer },
+        subjectAlternativeName: cert.subjectAltName,
+    };
+    return {
+        key: core_1.crypto.createPublicKey(cert.publicKey),
+        identity,
+    };
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/key/sct.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/key/sct.js
new file mode 100644
index 0000000000000..8eca48738096e
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/key/sct.js
@@ -0,0 +1,78 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verifySCTs = verifySCTs;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const core_1 = require("@sigstore/core");
+const error_1 = require("../error");
+const trust_1 = require("../trust");
+function verifySCTs(cert, issuer, ctlogs) {
+    let extSCT;
+    // Verifying the SCT requires that we remove the SCT extension and
+    // re-encode the TBS structure to DER -- this value is part of the data
+    // over which the signature is calculated. Since this is a destructive action
+    // we create a copy of the certificate so we can remove the SCT extension
+    // without affecting the original certificate.
+    const clone = cert.clone();
+    // Intentionally not using the findExtension method here because we want to
+    // remove the the SCT extension from the certificate before calculating the
+    // PreCert structure
+    for (let i = 0; i < clone.extensions.length; i++) {
+        const ext = clone.extensions[i];
+        if (ext.subs[0].toOID() === core_1.EXTENSION_OID_SCT) {
+            extSCT = new core_1.X509SCTExtension(ext);
+            // Remove the extension from the certificate
+            clone.extensions.splice(i, 1);
+            break;
+        }
+    }
+    // No SCT extension found to verify
+    if (!extSCT) {
+        return [];
+    }
+    // Found an SCT extension but it has no SCTs
+    /* istanbul ignore if -- too difficult to fabricate test case for this */
+    if (extSCT.signedCertificateTimestamps.length === 0) {
+        return [];
+    }
+    // Construct the PreCert structure
+    // https://www.rfc-editor.org/rfc/rfc6962#section-3.2
+    const preCert = new core_1.ByteStream();
+    // Calculate hash of the issuer's public key
+    const issuerId = core_1.crypto.digest('sha256', issuer.publicKey);
+    preCert.appendView(issuerId);
+    // Re-encodes the certificate to DER after removing the SCT extension
+    const tbs = clone.tbsCertificate.toDER();
+    preCert.appendUint24(tbs.length);
+    preCert.appendView(tbs);
+    // Calculate and return the verification results for each SCT
+    return extSCT.signedCertificateTimestamps.map((sct) => {
+        // Find the ctlog instance that corresponds to the SCT's logID
+        const validCTLogs = (0, trust_1.filterTLogAuthorities)(ctlogs, {
+            logID: sct.logID,
+            targetDate: sct.datetime,
+        });
+        // See if the SCT is valid for any of the CT logs
+        const verified = validCTLogs.some((log) => sct.verify(preCert.buffer, log.publicKey));
+        if (!verified) {
+            throw new error_1.VerificationError({
+                code: 'CERTIFICATE_ERROR',
+                message: 'SCT verification failed',
+            });
+        }
+        return sct.logID;
+    });
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/policy.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/policy.js
new file mode 100644
index 0000000000000..f5960cf047b84
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/policy.js
@@ -0,0 +1,24 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verifySubjectAlternativeName = verifySubjectAlternativeName;
+exports.verifyExtensions = verifyExtensions;
+const error_1 = require("./error");
+function verifySubjectAlternativeName(policyIdentity, signerIdentity) {
+    if (signerIdentity === undefined || !signerIdentity.match(policyIdentity)) {
+        throw new error_1.PolicyError({
+            code: 'UNTRUSTED_SIGNER_ERROR',
+            message: `certificate identity error - expected ${policyIdentity}, got ${signerIdentity}`,
+        });
+    }
+}
+function verifyExtensions(policyExtensions, signerExtensions = {}) {
+    let key;
+    for (key in policyExtensions) {
+        if (signerExtensions[key] !== policyExtensions[key]) {
+            throw new error_1.PolicyError({
+                code: 'UNTRUSTED_SIGNER_ERROR',
+                message: `invalid certificate extension - expected ${key}=${policyExtensions[key]}, got ${key}=${signerExtensions[key]}`,
+            });
+        }
+    }
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/shared.types.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/shared.types.js
new file mode 100644
index 0000000000000..c8ad2e549bdc6
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/shared.types.js
@@ -0,0 +1,2 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/checkpoint.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/checkpoint.js
new file mode 100644
index 0000000000000..46619b675f886
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/checkpoint.js
@@ -0,0 +1,157 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verifyCheckpoint = verifyCheckpoint;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const core_1 = require("@sigstore/core");
+const error_1 = require("../error");
+const trust_1 = require("../trust");
+// Separator between the note and the signatures in a checkpoint
+const CHECKPOINT_SEPARATOR = '\n\n';
+// Checkpoint signatures are of the following form:
+// "–  \n"
+// where:
+// - the prefix is an emdash (U+2014).
+// -  gives a human-readable representation of the signing ID.
+// -  is the first 4 bytes of the SHA256 hash of the
+//   associated public key followed by the signature bytes.
+const SIGNATURE_REGEX = /\u2014 (\S+) (\S+)\n/g;
+// Verifies the checkpoint value in the given tlog entry. There are two steps
+// to the verification:
+// 1. Verify that all signatures in the checkpoint can be verified against a
+//    trusted public key
+// 2. Verify that the root hash in the checkpoint matches the root hash in the
+//    inclusion proof
+// See: https://github.com/transparency-dev/formats/blob/main/log/README.md
+function verifyCheckpoint(entry, tlogs) {
+    // Filter tlog instances to just those which were valid at the time of the
+    // entry
+    const validTLogs = (0, trust_1.filterTLogAuthorities)(tlogs, {
+        targetDate: new Date(Number(entry.integratedTime) * 1000),
+    });
+    const inclusionProof = entry.inclusionProof;
+    const signedNote = SignedNote.fromString(inclusionProof.checkpoint.envelope);
+    const checkpoint = LogCheckpoint.fromString(signedNote.note);
+    // Verify that the signatures in the checkpoint are all valid
+    if (!verifySignedNote(signedNote, validTLogs)) {
+        throw new error_1.VerificationError({
+            code: 'TLOG_INCLUSION_PROOF_ERROR',
+            message: 'invalid checkpoint signature',
+        });
+    }
+    // Verify that the root hash from the checkpoint matches the root hash in the
+    // inclusion proof
+    if (!core_1.crypto.bufferEqual(checkpoint.logHash, inclusionProof.rootHash)) {
+        throw new error_1.VerificationError({
+            code: 'TLOG_INCLUSION_PROOF_ERROR',
+            message: 'root hash mismatch',
+        });
+    }
+}
+// Verifies the signatures in the SignedNote. For each signature, the
+// corresponding transparency log is looked up by the key hint and the
+// signature is verified against the public key in the transparency log.
+// Throws an error if any of the signatures are invalid.
+function verifySignedNote(signedNote, tlogs) {
+    const data = Buffer.from(signedNote.note, 'utf-8');
+    return signedNote.signatures.every((signature) => {
+        // Find the transparency log instance with the matching key hint
+        const tlog = tlogs.find((tlog) => core_1.crypto.bufferEqual(tlog.logID.subarray(0, 4), signature.keyHint));
+        if (!tlog) {
+            return false;
+        }
+        return core_1.crypto.verify(data, tlog.publicKey, signature.signature);
+    });
+}
+// SignedNote represents a signed note from a transparency log checkpoint. Consists
+// of a body (or note) and one more signatures calculated over the body. See
+// https://github.com/transparency-dev/formats/blob/main/log/README.md#signed-envelope
+class SignedNote {
+    constructor(note, signatures) {
+        this.note = note;
+        this.signatures = signatures;
+    }
+    // Deserialize a SignedNote from a string
+    static fromString(envelope) {
+        if (!envelope.includes(CHECKPOINT_SEPARATOR)) {
+            throw new error_1.VerificationError({
+                code: 'TLOG_INCLUSION_PROOF_ERROR',
+                message: 'missing checkpoint separator',
+            });
+        }
+        // Split the note into the header and the data portions at the separator
+        const split = envelope.indexOf(CHECKPOINT_SEPARATOR);
+        const header = envelope.slice(0, split + 1);
+        const data = envelope.slice(split + CHECKPOINT_SEPARATOR.length);
+        // Find all the signature lines in the data portion
+        const matches = data.matchAll(SIGNATURE_REGEX);
+        // Parse each of the matched signature lines into the name and signature.
+        // The first four bytes of the signature are the key hint (should match the
+        // first four bytes of the log ID), and the rest is the signature itself.
+        const signatures = Array.from(matches, (match) => {
+            const [, name, signature] = match;
+            const sigBytes = Buffer.from(signature, 'base64');
+            if (sigBytes.length < 5) {
+                throw new error_1.VerificationError({
+                    code: 'TLOG_INCLUSION_PROOF_ERROR',
+                    message: 'malformed checkpoint signature',
+                });
+            }
+            return {
+                name,
+                keyHint: sigBytes.subarray(0, 4),
+                signature: sigBytes.subarray(4),
+            };
+        });
+        if (signatures.length === 0) {
+            throw new error_1.VerificationError({
+                code: 'TLOG_INCLUSION_PROOF_ERROR',
+                message: 'no signatures found in checkpoint',
+            });
+        }
+        return new SignedNote(header, signatures);
+    }
+}
+// LogCheckpoint represents a transparency log checkpoint. Consists of the
+// following:
+//  - origin: the name of the transparency log
+//  - logSize: the size of the log at the time of the checkpoint
+//  - logHash: the root hash of the log at the time of the checkpoint
+//  - rest: the rest of the checkpoint body, which is a list of log entries
+// See:
+// https://github.com/transparency-dev/formats/blob/main/log/README.md#checkpoint-body
+class LogCheckpoint {
+    constructor(origin, logSize, logHash, rest) {
+        this.origin = origin;
+        this.logSize = logSize;
+        this.logHash = logHash;
+        this.rest = rest;
+    }
+    static fromString(note) {
+        const lines = note.trimEnd().split('\n');
+        if (lines.length < 3) {
+            throw new error_1.VerificationError({
+                code: 'TLOG_INCLUSION_PROOF_ERROR',
+                message: 'too few lines in checkpoint header',
+            });
+        }
+        const origin = lines[0];
+        const logSize = BigInt(lines[1]);
+        const rootHash = Buffer.from(lines[2], 'base64');
+        const rest = lines.slice(3);
+        return new LogCheckpoint(origin, logSize, rootHash, rest);
+    }
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/index.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/index.js
new file mode 100644
index 0000000000000..56e948de19338
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/index.js
@@ -0,0 +1,46 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verifyTSATimestamp = verifyTSATimestamp;
+exports.verifyTLogTimestamp = verifyTLogTimestamp;
+const error_1 = require("../error");
+const checkpoint_1 = require("./checkpoint");
+const merkle_1 = require("./merkle");
+const set_1 = require("./set");
+const tsa_1 = require("./tsa");
+function verifyTSATimestamp(timestamp, data, timestampAuthorities) {
+    (0, tsa_1.verifyRFC3161Timestamp)(timestamp, data, timestampAuthorities);
+    return {
+        type: 'timestamp-authority',
+        logID: timestamp.signerSerialNumber,
+        timestamp: timestamp.signingTime,
+    };
+}
+function verifyTLogTimestamp(entry, tlogAuthorities) {
+    let inclusionVerified = false;
+    if (isTLogEntryWithInclusionPromise(entry)) {
+        (0, set_1.verifyTLogSET)(entry, tlogAuthorities);
+        inclusionVerified = true;
+    }
+    if (isTLogEntryWithInclusionProof(entry)) {
+        (0, merkle_1.verifyMerkleInclusion)(entry);
+        (0, checkpoint_1.verifyCheckpoint)(entry, tlogAuthorities);
+        inclusionVerified = true;
+    }
+    if (!inclusionVerified) {
+        throw new error_1.VerificationError({
+            code: 'TLOG_MISSING_INCLUSION_ERROR',
+            message: 'inclusion could not be verified',
+        });
+    }
+    return {
+        type: 'transparency-log',
+        logID: entry.logId.keyId,
+        timestamp: new Date(Number(entry.integratedTime) * 1000),
+    };
+}
+function isTLogEntryWithInclusionPromise(entry) {
+    return entry.inclusionPromise !== undefined;
+}
+function isTLogEntryWithInclusionProof(entry) {
+    return entry.inclusionProof !== undefined;
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/merkle.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/merkle.js
new file mode 100644
index 0000000000000..f57cae42002bd
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/merkle.js
@@ -0,0 +1,104 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verifyMerkleInclusion = verifyMerkleInclusion;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const core_1 = require("@sigstore/core");
+const error_1 = require("../error");
+const RFC6962_LEAF_HASH_PREFIX = Buffer.from([0x00]);
+const RFC6962_NODE_HASH_PREFIX = Buffer.from([0x01]);
+function verifyMerkleInclusion(entry) {
+    const inclusionProof = entry.inclusionProof;
+    const logIndex = BigInt(inclusionProof.logIndex);
+    const treeSize = BigInt(inclusionProof.treeSize);
+    if (logIndex < 0n || logIndex >= treeSize) {
+        throw new error_1.VerificationError({
+            code: 'TLOG_INCLUSION_PROOF_ERROR',
+            message: `invalid index: ${logIndex}`,
+        });
+    }
+    // Figure out which subset of hashes corresponds to the inner and border
+    // nodes
+    const { inner, border } = decompInclProof(logIndex, treeSize);
+    if (inclusionProof.hashes.length !== inner + border) {
+        throw new error_1.VerificationError({
+            code: 'TLOG_INCLUSION_PROOF_ERROR',
+            message: 'invalid hash count',
+        });
+    }
+    const innerHashes = inclusionProof.hashes.slice(0, inner);
+    const borderHashes = inclusionProof.hashes.slice(inner);
+    // The entry's hash is the leaf hash
+    const leafHash = hashLeaf(entry.canonicalizedBody);
+    // Chain the hashes belonging to the inner and border portions
+    const calculatedHash = chainBorderRight(chainInner(leafHash, innerHashes, logIndex), borderHashes);
+    // Calculated hash should match the root hash in the inclusion proof
+    if (!core_1.crypto.bufferEqual(calculatedHash, inclusionProof.rootHash)) {
+        throw new error_1.VerificationError({
+            code: 'TLOG_INCLUSION_PROOF_ERROR',
+            message: 'calculated root hash does not match inclusion proof',
+        });
+    }
+}
+// Breaks down inclusion proof for a leaf at the specified index in a tree of
+// the specified size. The split point is where paths to the index leaf and
+// the (size - 1) leaf diverge. Returns lengths of the bottom and upper proof
+// parts.
+function decompInclProof(index, size) {
+    const inner = innerProofSize(index, size);
+    const border = onesCount(index >> BigInt(inner));
+    return { inner, border };
+}
+// Computes a subtree hash for a node on or below the tree's right border.
+// Assumes the provided proof hashes are ordered from lower to higher levels
+// and seed is the initial hash of the node specified by the index.
+function chainInner(seed, hashes, index) {
+    return hashes.reduce((acc, h, i) => {
+        if ((index >> BigInt(i)) & BigInt(1)) {
+            return hashChildren(h, acc);
+        }
+        else {
+            return hashChildren(acc, h);
+        }
+    }, seed);
+}
+// Computes a subtree hash for nodes along the tree's right border.
+function chainBorderRight(seed, hashes) {
+    return hashes.reduce((acc, h) => hashChildren(h, acc), seed);
+}
+function innerProofSize(index, size) {
+    return bitLength(index ^ (size - BigInt(1)));
+}
+// Counts the number of ones in the binary representation of the given number.
+// https://en.wikipedia.org/wiki/Hamming_weight
+function onesCount(num) {
+    return num.toString(2).split('1').length - 1;
+}
+// Returns the number of bits necessary to represent an integer in binary.
+function bitLength(n) {
+    if (n === 0n) {
+        return 0;
+    }
+    return n.toString(2).length;
+}
+// Hashing logic according to RFC6962.
+// https://datatracker.ietf.org/doc/html/rfc6962#section-2
+function hashChildren(left, right) {
+    return core_1.crypto.digest('sha256', RFC6962_NODE_HASH_PREFIX, left, right);
+}
+function hashLeaf(leaf) {
+    return core_1.crypto.digest('sha256', RFC6962_LEAF_HASH_PREFIX, leaf);
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/set.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/set.js
new file mode 100644
index 0000000000000..5d3f47bb88746
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/set.js
@@ -0,0 +1,60 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verifyTLogSET = verifyTLogSET;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const core_1 = require("@sigstore/core");
+const error_1 = require("../error");
+const trust_1 = require("../trust");
+// Verifies the SET for the given entry against the list of trusted
+// transparency logs. Returns true if the SET can be verified against at least
+// one of the trusted logs; otherwise, returns false.
+function verifyTLogSET(entry, tlogs) {
+    // Filter the list of tlog instances to only those which might be able to
+    // verify the SET
+    const validTLogs = (0, trust_1.filterTLogAuthorities)(tlogs, {
+        logID: entry.logId.keyId,
+        targetDate: new Date(Number(entry.integratedTime) * 1000),
+    });
+    // Check to see if we can verify the SET against any of the valid tlogs
+    const verified = validTLogs.some((tlog) => {
+        // Re-create the original Rekor verification payload
+        const payload = toVerificationPayload(entry);
+        // Canonicalize the payload and turn into a buffer for verification
+        const data = Buffer.from(core_1.json.canonicalize(payload), 'utf8');
+        // Extract the SET from the tlog entry
+        const signature = entry.inclusionPromise.signedEntryTimestamp;
+        return core_1.crypto.verify(data, tlog.publicKey, signature);
+    });
+    if (!verified) {
+        throw new error_1.VerificationError({
+            code: 'TLOG_INCLUSION_PROMISE_ERROR',
+            message: 'inclusion promise could not be verified',
+        });
+    }
+}
+// Returns a properly formatted "VerificationPayload" for one of the
+// transaction log entires in the given bundle which can be used for SET
+// verification.
+function toVerificationPayload(entry) {
+    const { integratedTime, logIndex, logId, canonicalizedBody } = entry;
+    return {
+        body: canonicalizedBody.toString('base64'),
+        integratedTime: Number(integratedTime),
+        logIndex: Number(logIndex),
+        logID: logId.keyId.toString('hex'),
+    };
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/tsa.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/tsa.js
new file mode 100644
index 0000000000000..0da4a3de8247f
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/tsa.js
@@ -0,0 +1,63 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verifyRFC3161Timestamp = verifyRFC3161Timestamp;
+const core_1 = require("@sigstore/core");
+const error_1 = require("../error");
+const certificate_1 = require("../key/certificate");
+const trust_1 = require("../trust");
+function verifyRFC3161Timestamp(timestamp, data, timestampAuthorities) {
+    const signingTime = timestamp.signingTime;
+    // Filter for CAs which were valid at the time of signing
+    timestampAuthorities = (0, trust_1.filterCertAuthorities)(timestampAuthorities, signingTime);
+    // Filter for CAs which match serial and issuer embedded in the timestamp
+    timestampAuthorities = filterCAsBySerialAndIssuer(timestampAuthorities, {
+        serialNumber: timestamp.signerSerialNumber,
+        issuer: timestamp.signerIssuer,
+    });
+    // Check that we can verify the timestamp with AT LEAST ONE of the remaining
+    // CAs
+    const verified = timestampAuthorities.some((ca) => {
+        try {
+            verifyTimestampForCA(timestamp, data, ca);
+            return true;
+        }
+        catch (e) {
+            return false;
+        }
+    });
+    if (!verified) {
+        throw new error_1.VerificationError({
+            code: 'TIMESTAMP_ERROR',
+            message: 'timestamp could not be verified',
+        });
+    }
+}
+function verifyTimestampForCA(timestamp, data, ca) {
+    const [leaf, ...cas] = ca.certChain;
+    const signingKey = core_1.crypto.createPublicKey(leaf.publicKey);
+    const signingTime = timestamp.signingTime;
+    // Verify the certificate chain for the provided CA
+    try {
+        new certificate_1.CertificateChainVerifier({
+            untrustedCert: leaf,
+            trustedCerts: cas,
+            timestamp: signingTime,
+        }).verify();
+    }
+    catch (e) {
+        throw new error_1.VerificationError({
+            code: 'TIMESTAMP_ERROR',
+            message: 'invalid certificate chain',
+        });
+    }
+    // Check that the signing certificate's key can be used to verify the
+    // timestamp signature.
+    timestamp.verify(data, signingKey);
+}
+// Filters the list of CAs to those which have a leaf signing certificate which
+// matches the given serial number and issuer.
+function filterCAsBySerialAndIssuer(timestampAuthorities, criteria) {
+    return timestampAuthorities.filter((ca) => ca.certChain.length > 0 &&
+        core_1.crypto.bufferEqual(ca.certChain[0].serialNumber, criteria.serialNumber) &&
+        core_1.crypto.bufferEqual(ca.certChain[0].issuer, criteria.issuer));
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/dsse.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/dsse.js
new file mode 100644
index 0000000000000..d71ed8c6e7ad9
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/dsse.js
@@ -0,0 +1,57 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verifyDSSETLogBody = verifyDSSETLogBody;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const error_1 = require("../error");
+// Compare the given intoto tlog entry to the given bundle
+function verifyDSSETLogBody(tlogEntry, content) {
+    switch (tlogEntry.apiVersion) {
+        case '0.0.1':
+            return verifyDSSE001TLogBody(tlogEntry, content);
+        default:
+            throw new error_1.VerificationError({
+                code: 'TLOG_BODY_ERROR',
+                message: `unsupported dsse version: ${tlogEntry.apiVersion}`,
+            });
+    }
+}
+// Compare the given dsse v0.0.1 tlog entry to the given DSSE envelope.
+function verifyDSSE001TLogBody(tlogEntry, content) {
+    // Ensure the bundle's DSSE only contains a single signature
+    if (tlogEntry.spec.signatures?.length !== 1) {
+        throw new error_1.VerificationError({
+            code: 'TLOG_BODY_ERROR',
+            message: 'signature count mismatch',
+        });
+    }
+    const tlogSig = tlogEntry.spec.signatures[0].signature;
+    // Ensure that the signature in the bundle's DSSE matches tlog entry
+    if (!content.compareSignature(Buffer.from(tlogSig, 'base64')))
+        throw new error_1.VerificationError({
+            code: 'TLOG_BODY_ERROR',
+            message: 'tlog entry signature mismatch',
+        });
+    // Ensure the digest of the bundle's DSSE payload matches the digest in the
+    // tlog entry
+    const tlogHash = tlogEntry.spec.payloadHash?.value || '';
+    if (!content.compareDigest(Buffer.from(tlogHash, 'hex'))) {
+        throw new error_1.VerificationError({
+            code: 'TLOG_BODY_ERROR',
+            message: 'DSSE payload hash mismatch',
+        });
+    }
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/hashedrekord.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/hashedrekord.js
new file mode 100644
index 0000000000000..c4aa345b57ba7
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/hashedrekord.js
@@ -0,0 +1,51 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verifyHashedRekordTLogBody = verifyHashedRekordTLogBody;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const error_1 = require("../error");
+// Compare the given hashedrekord tlog entry to the given bundle
+function verifyHashedRekordTLogBody(tlogEntry, content) {
+    switch (tlogEntry.apiVersion) {
+        case '0.0.1':
+            return verifyHashedrekord001TLogBody(tlogEntry, content);
+        default:
+            throw new error_1.VerificationError({
+                code: 'TLOG_BODY_ERROR',
+                message: `unsupported hashedrekord version: ${tlogEntry.apiVersion}`,
+            });
+    }
+}
+// Compare the given hashedrekord v0.0.1 tlog entry to the given message
+// signature
+function verifyHashedrekord001TLogBody(tlogEntry, content) {
+    // Ensure that the bundles message signature matches the tlog entry
+    const tlogSig = tlogEntry.spec.signature.content || '';
+    if (!content.compareSignature(Buffer.from(tlogSig, 'base64'))) {
+        throw new error_1.VerificationError({
+            code: 'TLOG_BODY_ERROR',
+            message: 'signature mismatch',
+        });
+    }
+    // Ensure that the bundle's message digest matches the tlog entry
+    const tlogDigest = tlogEntry.spec.data.hash?.value || '';
+    if (!content.compareDigest(Buffer.from(tlogDigest, 'hex'))) {
+        throw new error_1.VerificationError({
+            code: 'TLOG_BODY_ERROR',
+            message: 'digest mismatch',
+        });
+    }
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/index.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/index.js
new file mode 100644
index 0000000000000..da235360c594a
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/index.js
@@ -0,0 +1,47 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verifyTLogBody = verifyTLogBody;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const error_1 = require("../error");
+const dsse_1 = require("./dsse");
+const hashedrekord_1 = require("./hashedrekord");
+const intoto_1 = require("./intoto");
+// Verifies that the given tlog entry matches the supplied signature content.
+function verifyTLogBody(entry, sigContent) {
+    const { kind, version } = entry.kindVersion;
+    const body = JSON.parse(entry.canonicalizedBody.toString('utf8'));
+    if (kind !== body.kind || version !== body.apiVersion) {
+        throw new error_1.VerificationError({
+            code: 'TLOG_BODY_ERROR',
+            message: `kind/version mismatch - expected: ${kind}/${version}, received: ${body.kind}/${body.apiVersion}`,
+        });
+    }
+    switch (body.kind) {
+        case 'dsse':
+            return (0, dsse_1.verifyDSSETLogBody)(body, sigContent);
+        case 'intoto':
+            return (0, intoto_1.verifyIntotoTLogBody)(body, sigContent);
+        case 'hashedrekord':
+            return (0, hashedrekord_1.verifyHashedRekordTLogBody)(body, sigContent);
+        /* istanbul ignore next */
+        default:
+            throw new error_1.VerificationError({
+                code: 'TLOG_BODY_ERROR',
+                message: `unsupported kind: ${kind}`,
+            });
+    }
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/intoto.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/intoto.js
new file mode 100644
index 0000000000000..9096ae9418cc3
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/intoto.js
@@ -0,0 +1,62 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verifyIntotoTLogBody = verifyIntotoTLogBody;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const error_1 = require("../error");
+// Compare the given intoto tlog entry to the given bundle
+function verifyIntotoTLogBody(tlogEntry, content) {
+    switch (tlogEntry.apiVersion) {
+        case '0.0.2':
+            return verifyIntoto002TLogBody(tlogEntry, content);
+        default:
+            throw new error_1.VerificationError({
+                code: 'TLOG_BODY_ERROR',
+                message: `unsupported intoto version: ${tlogEntry.apiVersion}`,
+            });
+    }
+}
+// Compare the given intoto v0.0.2 tlog entry to the given DSSE envelope.
+function verifyIntoto002TLogBody(tlogEntry, content) {
+    // Ensure the bundle's DSSE contains a single signature
+    if (tlogEntry.spec.content.envelope.signatures?.length !== 1) {
+        throw new error_1.VerificationError({
+            code: 'TLOG_BODY_ERROR',
+            message: 'signature count mismatch',
+        });
+    }
+    // Signature is double-base64-encoded in the tlog entry
+    const tlogSig = base64Decode(tlogEntry.spec.content.envelope.signatures[0].sig);
+    // Ensure that the signature in the bundle's DSSE matches tlog entry
+    if (!content.compareSignature(Buffer.from(tlogSig, 'base64'))) {
+        throw new error_1.VerificationError({
+            code: 'TLOG_BODY_ERROR',
+            message: 'tlog entry signature mismatch',
+        });
+    }
+    // Ensure the digest of the bundle's DSSE payload matches the digest in the
+    // tlog entry
+    const tlogHash = tlogEntry.spec.content.payloadHash?.value || '';
+    if (!content.compareDigest(Buffer.from(tlogHash, 'hex'))) {
+        throw new error_1.VerificationError({
+            code: 'TLOG_BODY_ERROR',
+            message: 'DSSE payload hash mismatch',
+        });
+    }
+}
+function base64Decode(str) {
+    return Buffer.from(str, 'base64').toString('utf-8');
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/trust/filter.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/trust/filter.js
new file mode 100644
index 0000000000000..98bd25cd70e59
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/trust/filter.js
@@ -0,0 +1,23 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.filterCertAuthorities = filterCertAuthorities;
+exports.filterTLogAuthorities = filterTLogAuthorities;
+function filterCertAuthorities(certAuthorities, timestamp) {
+    return certAuthorities.filter((ca) => {
+        return ca.validFor.start <= timestamp && ca.validFor.end >= timestamp;
+    });
+}
+// Filter the list of tlog instances to only those which match the given log
+// ID and have public keys which are valid for the given integrated time.
+function filterTLogAuthorities(tlogAuthorities, criteria) {
+    return tlogAuthorities.filter((tlog) => {
+        // If we're filtering by log ID and the log IDs don't match, we can't use
+        // this tlog
+        if (criteria.logID && !tlog.logID.equals(criteria.logID)) {
+            return false;
+        }
+        // Check that the integrated time is within the validFor range
+        return (tlog.validFor.start <= criteria.targetDate &&
+            criteria.targetDate <= tlog.validFor.end);
+    });
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/trust/index.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/trust/index.js
new file mode 100644
index 0000000000000..bfab2eb4f9975
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/trust/index.js
@@ -0,0 +1,86 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.filterTLogAuthorities = exports.filterCertAuthorities = void 0;
+exports.toTrustMaterial = toTrustMaterial;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const core_1 = require("@sigstore/core");
+const protobuf_specs_1 = require("@sigstore/protobuf-specs");
+const error_1 = require("../error");
+const BEGINNING_OF_TIME = new Date(0);
+const END_OF_TIME = new Date(8640000000000000);
+var filter_1 = require("./filter");
+Object.defineProperty(exports, "filterCertAuthorities", { enumerable: true, get: function () { return filter_1.filterCertAuthorities; } });
+Object.defineProperty(exports, "filterTLogAuthorities", { enumerable: true, get: function () { return filter_1.filterTLogAuthorities; } });
+function toTrustMaterial(root, keys) {
+    const keyFinder = typeof keys === 'function' ? keys : keyLocator(keys);
+    return {
+        certificateAuthorities: root.certificateAuthorities.map(createCertAuthority),
+        timestampAuthorities: root.timestampAuthorities.map(createCertAuthority),
+        tlogs: root.tlogs.map(createTLogAuthority),
+        ctlogs: root.ctlogs.map(createTLogAuthority),
+        publicKey: keyFinder,
+    };
+}
+function createTLogAuthority(tlogInstance) {
+    const keyDetails = tlogInstance.publicKey.keyDetails;
+    const keyType = keyDetails === protobuf_specs_1.PublicKeyDetails.PKCS1_RSA_PKCS1V5 ||
+        keyDetails === protobuf_specs_1.PublicKeyDetails.PKIX_RSA_PKCS1V5 ||
+        keyDetails === protobuf_specs_1.PublicKeyDetails.PKIX_RSA_PKCS1V15_2048_SHA256 ||
+        keyDetails === protobuf_specs_1.PublicKeyDetails.PKIX_RSA_PKCS1V15_3072_SHA256 ||
+        keyDetails === protobuf_specs_1.PublicKeyDetails.PKIX_RSA_PKCS1V15_4096_SHA256
+        ? 'pkcs1'
+        : 'spki';
+    return {
+        logID: tlogInstance.logId.keyId,
+        publicKey: core_1.crypto.createPublicKey(tlogInstance.publicKey.rawBytes, keyType),
+        validFor: {
+            start: tlogInstance.publicKey.validFor?.start || BEGINNING_OF_TIME,
+            end: tlogInstance.publicKey.validFor?.end || END_OF_TIME,
+        },
+    };
+}
+function createCertAuthority(ca) {
+    /* istanbul ignore next */
+    return {
+        certChain: ca.certChain.certificates.map((cert) => {
+            return core_1.X509Certificate.parse(cert.rawBytes);
+        }),
+        validFor: {
+            start: ca.validFor?.start || BEGINNING_OF_TIME,
+            end: ca.validFor?.end || END_OF_TIME,
+        },
+    };
+}
+function keyLocator(keys) {
+    return (hint) => {
+        const key = (keys || {})[hint];
+        if (!key) {
+            throw new error_1.VerificationError({
+                code: 'PUBLIC_KEY_ERROR',
+                message: `key not found: ${hint}`,
+            });
+        }
+        return {
+            publicKey: core_1.crypto.createPublicKey(key.rawBytes),
+            validFor: (date) => {
+                /* istanbul ignore next */
+                return ((key.validFor?.start || BEGINNING_OF_TIME) <= date &&
+                    (key.validFor?.end || END_OF_TIME) >= date);
+            },
+        };
+    };
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/trust/trust.types.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/trust/trust.types.js
new file mode 100644
index 0000000000000..c8ad2e549bdc6
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/trust/trust.types.js
@@ -0,0 +1,2 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/verifier.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/verifier.js
new file mode 100644
index 0000000000000..6a9d11a3b6f8f
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/verify/dist/verifier.js
@@ -0,0 +1,143 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Verifier = void 0;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const util_1 = require("util");
+const error_1 = require("./error");
+const key_1 = require("./key");
+const policy_1 = require("./policy");
+const timestamp_1 = require("./timestamp");
+const tlog_1 = require("./tlog");
+class Verifier {
+    constructor(trustMaterial, options = {}) {
+        this.trustMaterial = trustMaterial;
+        this.options = {
+            ctlogThreshold: options.ctlogThreshold ?? 1,
+            tlogThreshold: options.tlogThreshold ?? 1,
+            tsaThreshold: options.tsaThreshold ?? 0,
+        };
+    }
+    verify(entity, policy) {
+        const timestamps = this.verifyTimestamps(entity);
+        const signer = this.verifySigningKey(entity, timestamps);
+        this.verifyTLogs(entity);
+        this.verifySignature(entity, signer);
+        if (policy) {
+            this.verifyPolicy(policy, signer.identity || {});
+        }
+        return signer;
+    }
+    // Checks that all of the timestamps in the entity are valid and returns them
+    verifyTimestamps(entity) {
+        let tlogCount = 0;
+        let tsaCount = 0;
+        const timestamps = entity.timestamps.map((timestamp) => {
+            switch (timestamp.$case) {
+                case 'timestamp-authority':
+                    tsaCount++;
+                    return (0, timestamp_1.verifyTSATimestamp)(timestamp.timestamp, entity.signature.signature, this.trustMaterial.timestampAuthorities);
+                case 'transparency-log':
+                    tlogCount++;
+                    return (0, timestamp_1.verifyTLogTimestamp)(timestamp.tlogEntry, this.trustMaterial.tlogs);
+            }
+        });
+        // Check for duplicate timestamps
+        if (containsDupes(timestamps)) {
+            throw new error_1.VerificationError({
+                code: 'TIMESTAMP_ERROR',
+                message: 'duplicate timestamp',
+            });
+        }
+        if (tlogCount < this.options.tlogThreshold) {
+            throw new error_1.VerificationError({
+                code: 'TIMESTAMP_ERROR',
+                message: `expected ${this.options.tlogThreshold} tlog timestamps, got ${tlogCount}`,
+            });
+        }
+        if (tsaCount < this.options.tsaThreshold) {
+            throw new error_1.VerificationError({
+                code: 'TIMESTAMP_ERROR',
+                message: `expected ${this.options.tsaThreshold} tsa timestamps, got ${tsaCount}`,
+            });
+        }
+        return timestamps.map((t) => t.timestamp);
+    }
+    // Checks that the signing key is valid for all of the the supplied timestamps
+    // and returns the signer.
+    verifySigningKey({ key }, timestamps) {
+        switch (key.$case) {
+            case 'public-key': {
+                return (0, key_1.verifyPublicKey)(key.hint, timestamps, this.trustMaterial);
+            }
+            case 'certificate': {
+                const result = (0, key_1.verifyCertificate)(key.certificate, timestamps, this.trustMaterial);
+                /* istanbul ignore next - no fixture */
+                if (containsDupes(result.scts)) {
+                    throw new error_1.VerificationError({
+                        code: 'CERTIFICATE_ERROR',
+                        message: 'duplicate SCT',
+                    });
+                }
+                if (result.scts.length < this.options.ctlogThreshold) {
+                    throw new error_1.VerificationError({
+                        code: 'CERTIFICATE_ERROR',
+                        message: `expected ${this.options.ctlogThreshold} SCTs, got ${result.scts.length}`,
+                    });
+                }
+                return result.signer;
+            }
+        }
+    }
+    // Checks that the tlog entries are valid for the supplied content
+    verifyTLogs({ signature: content, tlogEntries }) {
+        tlogEntries.forEach((entry) => (0, tlog_1.verifyTLogBody)(entry, content));
+    }
+    // Checks that the signature is valid for the supplied content
+    verifySignature(entity, signer) {
+        if (!entity.signature.verifySignature(signer.key)) {
+            throw new error_1.VerificationError({
+                code: 'SIGNATURE_ERROR',
+                message: 'signature verification failed',
+            });
+        }
+    }
+    verifyPolicy(policy, identity) {
+        // Check the subject alternative name of the signer matches the policy
+        /* istanbul ignore else */
+        if (policy.subjectAlternativeName) {
+            (0, policy_1.verifySubjectAlternativeName)(policy.subjectAlternativeName, identity.subjectAlternativeName);
+        }
+        // Check that the extensions of the signer match the policy
+        /* istanbul ignore else */
+        if (policy.extensions) {
+            (0, policy_1.verifyExtensions)(policy.extensions, identity.extensions);
+        }
+    }
+}
+exports.Verifier = Verifier;
+// Checks for duplicate items in the array. Objects are compared using
+// deep equality.
+function containsDupes(arr) {
+    for (let i = 0; i < arr.length; i++) {
+        for (let j = i + 1; j < arr.length; j++) {
+            if ((0, util_1.isDeepStrictEqual)(arr[i], arr[j])) {
+                return true;
+            }
+        }
+    }
+    return false;
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/package.json b/node_modules/pacote/node_modules/@sigstore/verify/package.json
new file mode 100644
index 0000000000000..eaf12376c9025
--- /dev/null
+++ b/node_modules/pacote/node_modules/@sigstore/verify/package.json
@@ -0,0 +1,36 @@
+{
+  "name": "@sigstore/verify",
+  "version": "3.0.0",
+  "description": "Verification of Sigstore signatures",
+  "main": "dist/index.js",
+  "types": "dist/index.d.ts",
+  "scripts": {
+    "clean": "shx rm -rf dist *.tsbuildinfo",
+    "build": "tsc --build",
+    "test": "jest"
+  },
+  "files": [
+    "dist"
+  ],
+  "author": "bdehamer@github.com",
+  "license": "Apache-2.0",
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/sigstore/sigstore-js.git"
+  },
+  "bugs": {
+    "url": "https://github.com/sigstore/sigstore-js/issues"
+  },
+  "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/verify#readme",
+  "publishConfig": {
+    "provenance": true
+  },
+  "dependencies": {
+    "@sigstore/protobuf-specs": "^0.5.0",
+    "@sigstore/bundle": "^4.0.0",
+    "@sigstore/core": "^3.0.0"
+  },
+  "engines": {
+    "node": "^20.17.0 || >=22.9.0"
+  }
+}
diff --git a/node_modules/pacote/node_modules/@tufjs/models/LICENSE b/node_modules/pacote/node_modules/@tufjs/models/LICENSE
new file mode 100644
index 0000000000000..420700f5d3765
--- /dev/null
+++ b/node_modules/pacote/node_modules/@tufjs/models/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2022 GitHub and the TUF Contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/base.js b/node_modules/pacote/node_modules/@tufjs/models/dist/base.js
new file mode 100644
index 0000000000000..14f0024f8091a
--- /dev/null
+++ b/node_modules/pacote/node_modules/@tufjs/models/dist/base.js
@@ -0,0 +1,96 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Signed = exports.MetadataKind = void 0;
+exports.isMetadataKind = isMetadataKind;
+const util_1 = __importDefault(require("util"));
+const error_1 = require("./error");
+const utils_1 = require("./utils");
+const SPECIFICATION_VERSION = ['1', '0', '31'];
+var MetadataKind;
+(function (MetadataKind) {
+    MetadataKind["Root"] = "root";
+    MetadataKind["Timestamp"] = "timestamp";
+    MetadataKind["Snapshot"] = "snapshot";
+    MetadataKind["Targets"] = "targets";
+})(MetadataKind || (exports.MetadataKind = MetadataKind = {}));
+function isMetadataKind(value) {
+    return (typeof value === 'string' &&
+        Object.values(MetadataKind).includes(value));
+}
+/***
+ * A base class for the signed part of TUF metadata.
+ *
+ * Objects with base class Signed are usually included in a ``Metadata`` object
+ * on the signed attribute. This class provides attributes and methods that
+ * are common for all TUF metadata types (roles).
+ */
+class Signed {
+    specVersion;
+    expires;
+    version;
+    unrecognizedFields;
+    constructor(options) {
+        this.specVersion = options.specVersion || SPECIFICATION_VERSION.join('.');
+        const specList = this.specVersion.split('.');
+        if (!(specList.length === 2 || specList.length === 3) ||
+            !specList.every((item) => isNumeric(item))) {
+            throw new error_1.ValueError('Failed to parse specVersion');
+        }
+        // major version must match
+        if (specList[0] != SPECIFICATION_VERSION[0]) {
+            throw new error_1.ValueError('Unsupported specVersion');
+        }
+        this.expires = options.expires;
+        this.version = options.version;
+        this.unrecognizedFields = options.unrecognizedFields || {};
+    }
+    equals(other) {
+        if (!(other instanceof Signed)) {
+            return false;
+        }
+        return (this.specVersion === other.specVersion &&
+            this.expires === other.expires &&
+            this.version === other.version &&
+            util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields));
+    }
+    isExpired(referenceTime) {
+        if (!referenceTime) {
+            referenceTime = new Date();
+        }
+        return referenceTime >= new Date(this.expires);
+    }
+    static commonFieldsFromJSON(data) {
+        const { spec_version, expires, version, ...rest } = data;
+        if (!utils_1.guard.isDefined(spec_version)) {
+            throw new error_1.ValueError('spec_version is not defined');
+        }
+        else if (typeof spec_version !== 'string') {
+            throw new TypeError('spec_version must be a string');
+        }
+        if (!utils_1.guard.isDefined(expires)) {
+            throw new error_1.ValueError('expires is not defined');
+        }
+        else if (!(typeof expires === 'string')) {
+            throw new TypeError('expires must be a string');
+        }
+        if (!utils_1.guard.isDefined(version)) {
+            throw new error_1.ValueError('version is not defined');
+        }
+        else if (!(typeof version === 'number')) {
+            throw new TypeError('version must be a number');
+        }
+        return {
+            specVersion: spec_version,
+            expires,
+            version,
+            unrecognizedFields: rest,
+        };
+    }
+}
+exports.Signed = Signed;
+function isNumeric(str) {
+    return !isNaN(Number(str));
+}
diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/delegations.js b/node_modules/pacote/node_modules/@tufjs/models/dist/delegations.js
new file mode 100644
index 0000000000000..9ad8bf05f1c6b
--- /dev/null
+++ b/node_modules/pacote/node_modules/@tufjs/models/dist/delegations.js
@@ -0,0 +1,119 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Delegations = void 0;
+const util_1 = __importDefault(require("util"));
+const error_1 = require("./error");
+const key_1 = require("./key");
+const role_1 = require("./role");
+const utils_1 = require("./utils");
+/**
+ * A container object storing information about all delegations.
+ *
+ * Targets roles that are trusted to provide signed metadata files
+ * describing targets with designated pathnames and/or further delegations.
+ */
+class Delegations {
+    keys;
+    roles;
+    unrecognizedFields;
+    succinctRoles;
+    constructor(options) {
+        this.keys = options.keys;
+        this.unrecognizedFields = options.unrecognizedFields || {};
+        if (options.roles) {
+            if (Object.keys(options.roles).some((roleName) => role_1.TOP_LEVEL_ROLE_NAMES.includes(roleName))) {
+                throw new error_1.ValueError('Delegated role name conflicts with top-level role name');
+            }
+        }
+        this.succinctRoles = options.succinctRoles;
+        this.roles = options.roles;
+    }
+    equals(other) {
+        if (!(other instanceof Delegations)) {
+            return false;
+        }
+        return (util_1.default.isDeepStrictEqual(this.keys, other.keys) &&
+            util_1.default.isDeepStrictEqual(this.roles, other.roles) &&
+            util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields) &&
+            util_1.default.isDeepStrictEqual(this.succinctRoles, other.succinctRoles));
+    }
+    *rolesForTarget(targetPath) {
+        if (this.roles) {
+            for (const role of Object.values(this.roles)) {
+                if (role.isDelegatedPath(targetPath)) {
+                    yield { role: role.name, terminating: role.terminating };
+                }
+            }
+        }
+        else if (this.succinctRoles) {
+            yield {
+                role: this.succinctRoles.getRoleForTarget(targetPath),
+                terminating: true,
+            };
+        }
+    }
+    toJSON() {
+        const json = {
+            keys: keysToJSON(this.keys),
+            ...this.unrecognizedFields,
+        };
+        if (this.roles) {
+            json.roles = rolesToJSON(this.roles);
+        }
+        else if (this.succinctRoles) {
+            json.succinct_roles = this.succinctRoles.toJSON();
+        }
+        return json;
+    }
+    static fromJSON(data) {
+        const { keys, roles, succinct_roles, ...unrecognizedFields } = data;
+        let succinctRoles;
+        if (utils_1.guard.isObject(succinct_roles)) {
+            succinctRoles = role_1.SuccinctRoles.fromJSON(succinct_roles);
+        }
+        return new Delegations({
+            keys: keysFromJSON(keys),
+            roles: rolesFromJSON(roles),
+            unrecognizedFields,
+            succinctRoles,
+        });
+    }
+}
+exports.Delegations = Delegations;
+function keysToJSON(keys) {
+    return Object.entries(keys).reduce((acc, [keyId, key]) => ({
+        ...acc,
+        [keyId]: key.toJSON(),
+    }), {});
+}
+function rolesToJSON(roles) {
+    return Object.values(roles).map((role) => role.toJSON());
+}
+function keysFromJSON(data) {
+    if (!utils_1.guard.isObjectRecord(data)) {
+        throw new TypeError('keys is malformed');
+    }
+    return Object.entries(data).reduce((acc, [keyID, keyData]) => ({
+        ...acc,
+        [keyID]: key_1.Key.fromJSON(keyID, keyData),
+    }), {});
+}
+function rolesFromJSON(data) {
+    let roleMap;
+    if (utils_1.guard.isDefined(data)) {
+        if (!utils_1.guard.isObjectArray(data)) {
+            throw new TypeError('roles is malformed');
+        }
+        roleMap = data.reduce((acc, role) => {
+            const delegatedRole = role_1.DelegatedRole.fromJSON(role);
+            return {
+                ...acc,
+                [delegatedRole.name]: delegatedRole,
+            };
+        }, {});
+    }
+    return roleMap;
+}
diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/error.js b/node_modules/pacote/node_modules/@tufjs/models/dist/error.js
new file mode 100644
index 0000000000000..ba80698747ba0
--- /dev/null
+++ b/node_modules/pacote/node_modules/@tufjs/models/dist/error.js
@@ -0,0 +1,27 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.UnsupportedAlgorithmError = exports.CryptoError = exports.LengthOrHashMismatchError = exports.UnsignedMetadataError = exports.RepositoryError = exports.ValueError = void 0;
+// An error about insufficient values
+class ValueError extends Error {
+}
+exports.ValueError = ValueError;
+// An error with a repository's state, such as a missing file.
+// It covers all exceptions that come from the repository side when
+// looking from the perspective of users of metadata API or ngclient.
+class RepositoryError extends Error {
+}
+exports.RepositoryError = RepositoryError;
+// An error about metadata object with insufficient threshold of signatures.
+class UnsignedMetadataError extends RepositoryError {
+}
+exports.UnsignedMetadataError = UnsignedMetadataError;
+// An error while checking the length and hash values of an object.
+class LengthOrHashMismatchError extends RepositoryError {
+}
+exports.LengthOrHashMismatchError = LengthOrHashMismatchError;
+class CryptoError extends Error {
+}
+exports.CryptoError = CryptoError;
+class UnsupportedAlgorithmError extends CryptoError {
+}
+exports.UnsupportedAlgorithmError = UnsupportedAlgorithmError;
diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/file.js b/node_modules/pacote/node_modules/@tufjs/models/dist/file.js
new file mode 100644
index 0000000000000..c8cdcb1c40271
--- /dev/null
+++ b/node_modules/pacote/node_modules/@tufjs/models/dist/file.js
@@ -0,0 +1,191 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TargetFile = exports.MetaFile = void 0;
+const crypto_1 = __importDefault(require("crypto"));
+const util_1 = __importDefault(require("util"));
+const error_1 = require("./error");
+const utils_1 = require("./utils");
+// A container with information about a particular metadata file.
+//
+// This class is used for Timestamp and Snapshot metadata.
+class MetaFile {
+    version;
+    length;
+    hashes;
+    unrecognizedFields;
+    constructor(opts) {
+        if (opts.version <= 0) {
+            throw new error_1.ValueError('Metafile version must be at least 1');
+        }
+        if (opts.length !== undefined) {
+            validateLength(opts.length);
+        }
+        this.version = opts.version;
+        this.length = opts.length;
+        this.hashes = opts.hashes;
+        this.unrecognizedFields = opts.unrecognizedFields || {};
+    }
+    equals(other) {
+        if (!(other instanceof MetaFile)) {
+            return false;
+        }
+        return (this.version === other.version &&
+            this.length === other.length &&
+            util_1.default.isDeepStrictEqual(this.hashes, other.hashes) &&
+            util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields));
+    }
+    verify(data) {
+        // Verifies that the given data matches the expected length.
+        if (this.length !== undefined) {
+            if (data.length !== this.length) {
+                throw new error_1.LengthOrHashMismatchError(`Expected length ${this.length} but got ${data.length}`);
+            }
+        }
+        // Verifies that the given data matches the supplied hashes.
+        if (this.hashes) {
+            Object.entries(this.hashes).forEach(([key, value]) => {
+                let hash;
+                try {
+                    hash = crypto_1.default.createHash(key);
+                }
+                catch (e) {
+                    throw new error_1.LengthOrHashMismatchError(`Hash algorithm ${key} not supported`);
+                }
+                const observedHash = hash.update(data).digest('hex');
+                if (observedHash !== value) {
+                    throw new error_1.LengthOrHashMismatchError(`Expected hash ${value} but got ${observedHash}`);
+                }
+            });
+        }
+    }
+    toJSON() {
+        const json = {
+            version: this.version,
+            ...this.unrecognizedFields,
+        };
+        if (this.length !== undefined) {
+            json.length = this.length;
+        }
+        if (this.hashes) {
+            json.hashes = this.hashes;
+        }
+        return json;
+    }
+    static fromJSON(data) {
+        const { version, length, hashes, ...rest } = data;
+        if (typeof version !== 'number') {
+            throw new TypeError('version must be a number');
+        }
+        if (utils_1.guard.isDefined(length) && typeof length !== 'number') {
+            throw new TypeError('length must be a number');
+        }
+        if (utils_1.guard.isDefined(hashes) && !utils_1.guard.isStringRecord(hashes)) {
+            throw new TypeError('hashes must be string keys and values');
+        }
+        return new MetaFile({
+            version,
+            length,
+            hashes,
+            unrecognizedFields: rest,
+        });
+    }
+}
+exports.MetaFile = MetaFile;
+// Container for info about a particular target file.
+//
+// This class is used for Target metadata.
+class TargetFile {
+    length;
+    path;
+    hashes;
+    unrecognizedFields;
+    constructor(opts) {
+        validateLength(opts.length);
+        this.length = opts.length;
+        this.path = opts.path;
+        this.hashes = opts.hashes;
+        this.unrecognizedFields = opts.unrecognizedFields || {};
+    }
+    get custom() {
+        const custom = this.unrecognizedFields['custom'];
+        if (!custom || Array.isArray(custom) || !(typeof custom === 'object')) {
+            return {};
+        }
+        return custom;
+    }
+    equals(other) {
+        if (!(other instanceof TargetFile)) {
+            return false;
+        }
+        return (this.length === other.length &&
+            this.path === other.path &&
+            util_1.default.isDeepStrictEqual(this.hashes, other.hashes) &&
+            util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields));
+    }
+    async verify(stream) {
+        let observedLength = 0;
+        // Create a digest for each hash algorithm
+        const digests = Object.keys(this.hashes).reduce((acc, key) => {
+            try {
+                acc[key] = crypto_1.default.createHash(key);
+            }
+            catch (e) {
+                throw new error_1.LengthOrHashMismatchError(`Hash algorithm ${key} not supported`);
+            }
+            return acc;
+        }, {});
+        // Read stream chunk by chunk
+        for await (const chunk of stream) {
+            // Keep running tally of stream length
+            observedLength += chunk.length;
+            // Append chunk to each digest
+            Object.values(digests).forEach((digest) => {
+                digest.update(chunk);
+            });
+        }
+        // Verify length matches expected value
+        if (observedLength !== this.length) {
+            throw new error_1.LengthOrHashMismatchError(`Expected length ${this.length} but got ${observedLength}`);
+        }
+        // Verify each digest matches expected value
+        Object.entries(digests).forEach(([key, value]) => {
+            const expected = this.hashes[key];
+            const actual = value.digest('hex');
+            if (actual !== expected) {
+                throw new error_1.LengthOrHashMismatchError(`Expected hash ${expected} but got ${actual}`);
+            }
+        });
+    }
+    toJSON() {
+        return {
+            length: this.length,
+            hashes: this.hashes,
+            ...this.unrecognizedFields,
+        };
+    }
+    static fromJSON(path, data) {
+        const { length, hashes, ...rest } = data;
+        if (typeof length !== 'number') {
+            throw new TypeError('length must be a number');
+        }
+        if (!utils_1.guard.isStringRecord(hashes)) {
+            throw new TypeError('hashes must have string keys and values');
+        }
+        return new TargetFile({
+            length,
+            path,
+            hashes,
+            unrecognizedFields: rest,
+        });
+    }
+}
+exports.TargetFile = TargetFile;
+// Check that supplied length if valid
+function validateLength(length) {
+    if (length < 0) {
+        throw new error_1.ValueError('Length must be at least 0');
+    }
+}
diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/index.js b/node_modules/pacote/node_modules/@tufjs/models/dist/index.js
new file mode 100644
index 0000000000000..a4dc783659f04
--- /dev/null
+++ b/node_modules/pacote/node_modules/@tufjs/models/dist/index.js
@@ -0,0 +1,24 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Timestamp = exports.Targets = exports.Snapshot = exports.Signature = exports.Root = exports.Metadata = exports.Key = exports.TargetFile = exports.MetaFile = exports.ValueError = exports.MetadataKind = void 0;
+var base_1 = require("./base");
+Object.defineProperty(exports, "MetadataKind", { enumerable: true, get: function () { return base_1.MetadataKind; } });
+var error_1 = require("./error");
+Object.defineProperty(exports, "ValueError", { enumerable: true, get: function () { return error_1.ValueError; } });
+var file_1 = require("./file");
+Object.defineProperty(exports, "MetaFile", { enumerable: true, get: function () { return file_1.MetaFile; } });
+Object.defineProperty(exports, "TargetFile", { enumerable: true, get: function () { return file_1.TargetFile; } });
+var key_1 = require("./key");
+Object.defineProperty(exports, "Key", { enumerable: true, get: function () { return key_1.Key; } });
+var metadata_1 = require("./metadata");
+Object.defineProperty(exports, "Metadata", { enumerable: true, get: function () { return metadata_1.Metadata; } });
+var root_1 = require("./root");
+Object.defineProperty(exports, "Root", { enumerable: true, get: function () { return root_1.Root; } });
+var signature_1 = require("./signature");
+Object.defineProperty(exports, "Signature", { enumerable: true, get: function () { return signature_1.Signature; } });
+var snapshot_1 = require("./snapshot");
+Object.defineProperty(exports, "Snapshot", { enumerable: true, get: function () { return snapshot_1.Snapshot; } });
+var targets_1 = require("./targets");
+Object.defineProperty(exports, "Targets", { enumerable: true, get: function () { return targets_1.Targets; } });
+var timestamp_1 = require("./timestamp");
+Object.defineProperty(exports, "Timestamp", { enumerable: true, get: function () { return timestamp_1.Timestamp; } });
diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/key.js b/node_modules/pacote/node_modules/@tufjs/models/dist/key.js
new file mode 100644
index 0000000000000..10bf2f4b66fc0
--- /dev/null
+++ b/node_modules/pacote/node_modules/@tufjs/models/dist/key.js
@@ -0,0 +1,90 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Key = void 0;
+const util_1 = __importDefault(require("util"));
+const error_1 = require("./error");
+const utils_1 = require("./utils");
+const key_1 = require("./utils/key");
+// A container class representing the public portion of a Key.
+class Key {
+    keyID;
+    keyType;
+    scheme;
+    keyVal;
+    unrecognizedFields;
+    constructor(options) {
+        const { keyID, keyType, scheme, keyVal, unrecognizedFields } = options;
+        this.keyID = keyID;
+        this.keyType = keyType;
+        this.scheme = scheme;
+        this.keyVal = keyVal;
+        this.unrecognizedFields = unrecognizedFields || {};
+    }
+    // Verifies the that the metadata.signatures contains a signature made with
+    // this key and is correctly signed.
+    verifySignature(metadata) {
+        const signature = metadata.signatures[this.keyID];
+        if (!signature)
+            throw new error_1.UnsignedMetadataError('no signature for key found in metadata');
+        if (!this.keyVal.public)
+            throw new error_1.UnsignedMetadataError('no public key found');
+        const publicKey = (0, key_1.getPublicKey)({
+            keyType: this.keyType,
+            scheme: this.scheme,
+            keyVal: this.keyVal.public,
+        });
+        const signedData = metadata.signed.toJSON();
+        try {
+            if (!utils_1.crypto.verifySignature(signedData, publicKey, signature.sig)) {
+                throw new error_1.UnsignedMetadataError(`failed to verify ${this.keyID} signature`);
+            }
+        }
+        catch (error) {
+            if (error instanceof error_1.UnsignedMetadataError) {
+                throw error;
+            }
+            throw new error_1.UnsignedMetadataError(`failed to verify ${this.keyID} signature`);
+        }
+    }
+    equals(other) {
+        if (!(other instanceof Key)) {
+            return false;
+        }
+        return (this.keyID === other.keyID &&
+            this.keyType === other.keyType &&
+            this.scheme === other.scheme &&
+            util_1.default.isDeepStrictEqual(this.keyVal, other.keyVal) &&
+            util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields));
+    }
+    toJSON() {
+        return {
+            keytype: this.keyType,
+            scheme: this.scheme,
+            keyval: this.keyVal,
+            ...this.unrecognizedFields,
+        };
+    }
+    static fromJSON(keyID, data) {
+        const { keytype, scheme, keyval, ...rest } = data;
+        if (typeof keytype !== 'string') {
+            throw new TypeError('keytype must be a string');
+        }
+        if (typeof scheme !== 'string') {
+            throw new TypeError('scheme must be a string');
+        }
+        if (!utils_1.guard.isStringRecord(keyval)) {
+            throw new TypeError('keyval must be a string record');
+        }
+        return new Key({
+            keyID,
+            keyType: keytype,
+            scheme,
+            keyVal: keyval,
+            unrecognizedFields: rest,
+        });
+    }
+}
+exports.Key = Key;
diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/metadata.js b/node_modules/pacote/node_modules/@tufjs/models/dist/metadata.js
new file mode 100644
index 0000000000000..1ae4b6829c0c7
--- /dev/null
+++ b/node_modules/pacote/node_modules/@tufjs/models/dist/metadata.js
@@ -0,0 +1,165 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Metadata = void 0;
+const canonical_json_1 = require("@tufjs/canonical-json");
+const util_1 = __importDefault(require("util"));
+const base_1 = require("./base");
+const error_1 = require("./error");
+const root_1 = require("./root");
+const signature_1 = require("./signature");
+const snapshot_1 = require("./snapshot");
+const targets_1 = require("./targets");
+const timestamp_1 = require("./timestamp");
+const utils_1 = require("./utils");
+/***
+ * A container for signed TUF metadata.
+ *
+ * Provides methods to convert to and from json, read and write to and
+ * from JSON and to create and verify metadata signatures.
+ *
+ * ``Metadata[T]`` is a generic container type where T can be any one type of
+ * [``Root``, ``Timestamp``, ``Snapshot``, ``Targets``]. The purpose of this
+ * is to allow static type checking of the signed attribute in code using
+ * Metadata::
+ *
+ * root_md = Metadata[Root].fromJSON("root.json")
+ * # root_md type is now Metadata[Root]. This means signed and its
+ * # attributes like consistent_snapshot are now statically typed and the
+ * # types can be verified by static type checkers and shown by IDEs
+ *
+ * Using a type constraint is not required but not doing so means T is not a
+ * specific type so static typing cannot happen. Note that the type constraint
+ * ``[Root]`` is not validated at runtime (as pure annotations are not available
+ * then).
+ *
+ * Apart from ``expires`` all of the arguments to the inner constructors have
+ * reasonable default values for new metadata.
+ */
+class Metadata {
+    signed;
+    signatures;
+    unrecognizedFields;
+    constructor(signed, signatures, unrecognizedFields) {
+        this.signed = signed;
+        this.signatures = signatures || {};
+        this.unrecognizedFields = unrecognizedFields || {};
+    }
+    sign(signer, append = true) {
+        const bytes = Buffer.from((0, canonical_json_1.canonicalize)(this.signed.toJSON()));
+        const signature = signer(bytes);
+        if (!append) {
+            this.signatures = {};
+        }
+        this.signatures[signature.keyID] = signature;
+    }
+    verifyDelegate(delegatedRole, delegatedMetadata) {
+        let role;
+        let keys = {};
+        switch (this.signed.type) {
+            case base_1.MetadataKind.Root:
+                keys = this.signed.keys;
+                role = this.signed.roles[delegatedRole];
+                break;
+            case base_1.MetadataKind.Targets:
+                if (!this.signed.delegations) {
+                    throw new error_1.ValueError(`No delegations found for ${delegatedRole}`);
+                }
+                keys = this.signed.delegations.keys;
+                if (this.signed.delegations.roles) {
+                    role = this.signed.delegations.roles[delegatedRole];
+                }
+                else if (this.signed.delegations.succinctRoles) {
+                    if (this.signed.delegations.succinctRoles.isDelegatedRole(delegatedRole)) {
+                        role = this.signed.delegations.succinctRoles;
+                    }
+                }
+                break;
+            default:
+                throw new TypeError('invalid metadata type');
+        }
+        if (!role) {
+            throw new error_1.ValueError(`no delegation found for ${delegatedRole}`);
+        }
+        const signingKeys = new Set();
+        role.keyIDs.forEach((keyID) => {
+            const key = keys[keyID];
+            // If we dont' have the key, continue checking other keys
+            if (!key) {
+                return;
+            }
+            try {
+                key.verifySignature(delegatedMetadata);
+                signingKeys.add(key.keyID);
+            }
+            catch (error) {
+                // continue
+            }
+        });
+        if (signingKeys.size < role.threshold) {
+            throw new error_1.UnsignedMetadataError(`${delegatedRole} was signed by ${signingKeys.size}/${role.threshold} keys`);
+        }
+    }
+    equals(other) {
+        if (!(other instanceof Metadata)) {
+            return false;
+        }
+        return (
+        // eslint-disable-next-line @typescript-eslint/no-unsafe-argument
+        this.signed.equals(other.signed) &&
+            util_1.default.isDeepStrictEqual(this.signatures, other.signatures) &&
+            util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields));
+    }
+    toJSON() {
+        const signatures = Object.values(this.signatures).map((signature) => {
+            return signature.toJSON();
+        });
+        return {
+            signatures,
+            signed: this.signed.toJSON(),
+            ...this.unrecognizedFields,
+        };
+    }
+    static fromJSON(type, data) {
+        const { signed, signatures, ...rest } = data;
+        if (!utils_1.guard.isDefined(signed) || !utils_1.guard.isObject(signed)) {
+            throw new TypeError('signed is not defined');
+        }
+        if (type !== signed._type) {
+            throw new error_1.ValueError(`expected '${type}', got ${signed['_type']}`);
+        }
+        if (!utils_1.guard.isObjectArray(signatures)) {
+            throw new TypeError('signatures is not an array');
+        }
+        let signedObj;
+        switch (type) {
+            case base_1.MetadataKind.Root:
+                signedObj = root_1.Root.fromJSON(signed);
+                break;
+            case base_1.MetadataKind.Timestamp:
+                signedObj = timestamp_1.Timestamp.fromJSON(signed);
+                break;
+            case base_1.MetadataKind.Snapshot:
+                signedObj = snapshot_1.Snapshot.fromJSON(signed);
+                break;
+            case base_1.MetadataKind.Targets:
+                signedObj = targets_1.Targets.fromJSON(signed);
+                break;
+            default:
+                throw new TypeError('invalid metadata type');
+        }
+        const sigMap = {};
+        // Ensure that each signature is unique
+        signatures.forEach((sigData) => {
+            const sig = signature_1.Signature.fromJSON(sigData);
+            if (sigMap[sig.keyID]) {
+                throw new error_1.ValueError(`multiple signatures found for keyid: ${sig.keyID}`);
+            }
+            sigMap[sig.keyID] = sig;
+        });
+        return new Metadata(signedObj, sigMap, rest);
+    }
+}
+exports.Metadata = Metadata;
diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/role.js b/node_modules/pacote/node_modules/@tufjs/models/dist/role.js
new file mode 100644
index 0000000000000..6c049e17c8dab
--- /dev/null
+++ b/node_modules/pacote/node_modules/@tufjs/models/dist/role.js
@@ -0,0 +1,310 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.SuccinctRoles = exports.DelegatedRole = exports.Role = exports.TOP_LEVEL_ROLE_NAMES = void 0;
+const crypto_1 = __importDefault(require("crypto"));
+const minimatch_1 = require("minimatch");
+const util_1 = __importDefault(require("util"));
+const error_1 = require("./error");
+const utils_1 = require("./utils");
+exports.TOP_LEVEL_ROLE_NAMES = [
+    'root',
+    'targets',
+    'snapshot',
+    'timestamp',
+];
+/**
+ * Container that defines which keys are required to sign roles metadata.
+ *
+ * Role defines how many keys are required to successfully sign the roles
+ * metadata, and which keys are accepted.
+ */
+class Role {
+    keyIDs;
+    threshold;
+    unrecognizedFields;
+    constructor(options) {
+        const { keyIDs, threshold, unrecognizedFields } = options;
+        if (hasDuplicates(keyIDs)) {
+            throw new error_1.ValueError('duplicate key IDs found');
+        }
+        if (threshold < 1) {
+            throw new error_1.ValueError('threshold must be at least 1');
+        }
+        this.keyIDs = keyIDs;
+        this.threshold = threshold;
+        this.unrecognizedFields = unrecognizedFields || {};
+    }
+    equals(other) {
+        if (!(other instanceof Role)) {
+            return false;
+        }
+        return (this.threshold === other.threshold &&
+            util_1.default.isDeepStrictEqual(this.keyIDs, other.keyIDs) &&
+            util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields));
+    }
+    toJSON() {
+        return {
+            keyids: this.keyIDs,
+            threshold: this.threshold,
+            ...this.unrecognizedFields,
+        };
+    }
+    static fromJSON(data) {
+        const { keyids, threshold, ...rest } = data;
+        if (!utils_1.guard.isStringArray(keyids)) {
+            throw new TypeError('keyids must be an array');
+        }
+        if (typeof threshold !== 'number') {
+            throw new TypeError('threshold must be a number');
+        }
+        return new Role({
+            keyIDs: keyids,
+            threshold,
+            unrecognizedFields: rest,
+        });
+    }
+}
+exports.Role = Role;
+function hasDuplicates(array) {
+    return new Set(array).size !== array.length;
+}
+/**
+ * A container with information about a delegated role.
+ *
+ * A delegation can happen in two ways:
+ *   - ``paths`` is set: delegates targets matching any path pattern in ``paths``
+ *   - ``pathHashPrefixes`` is set: delegates targets whose target path hash
+ *      starts with any of the prefixes in ``pathHashPrefixes``
+ *
+ *   ``paths`` and ``pathHashPrefixes`` are mutually exclusive: both cannot be
+ *   set, at least one of them must be set.
+ */
+class DelegatedRole extends Role {
+    name;
+    terminating;
+    paths;
+    pathHashPrefixes;
+    constructor(opts) {
+        super(opts);
+        const { name, terminating, paths, pathHashPrefixes } = opts;
+        this.name = name;
+        this.terminating = terminating;
+        if (opts.paths && opts.pathHashPrefixes) {
+            throw new error_1.ValueError('paths and pathHashPrefixes are mutually exclusive');
+        }
+        this.paths = paths;
+        this.pathHashPrefixes = pathHashPrefixes;
+    }
+    equals(other) {
+        if (!(other instanceof DelegatedRole)) {
+            return false;
+        }
+        return (super.equals(other) &&
+            this.name === other.name &&
+            this.terminating === other.terminating &&
+            util_1.default.isDeepStrictEqual(this.paths, other.paths) &&
+            util_1.default.isDeepStrictEqual(this.pathHashPrefixes, other.pathHashPrefixes));
+    }
+    isDelegatedPath(targetFilepath) {
+        if (this.paths) {
+            return this.paths.some((pathPattern) => isTargetInPathPattern(targetFilepath, pathPattern));
+        }
+        if (this.pathHashPrefixes) {
+            const hasher = crypto_1.default.createHash('sha256');
+            const pathHash = hasher.update(targetFilepath).digest('hex');
+            return this.pathHashPrefixes.some((pathHashPrefix) => pathHash.startsWith(pathHashPrefix));
+        }
+        return false;
+    }
+    toJSON() {
+        const json = {
+            ...super.toJSON(),
+            name: this.name,
+            terminating: this.terminating,
+        };
+        if (this.paths) {
+            json.paths = this.paths;
+        }
+        if (this.pathHashPrefixes) {
+            json.path_hash_prefixes = this.pathHashPrefixes;
+        }
+        return json;
+    }
+    static fromJSON(data) {
+        const { keyids, threshold, name, terminating, paths, path_hash_prefixes, ...rest } = data;
+        if (!utils_1.guard.isStringArray(keyids)) {
+            throw new TypeError('keyids must be an array of strings');
+        }
+        if (typeof threshold !== 'number') {
+            throw new TypeError('threshold must be a number');
+        }
+        if (typeof name !== 'string') {
+            throw new TypeError('name must be a string');
+        }
+        if (typeof terminating !== 'boolean') {
+            throw new TypeError('terminating must be a boolean');
+        }
+        if (utils_1.guard.isDefined(paths) && !utils_1.guard.isStringArray(paths)) {
+            throw new TypeError('paths must be an array of strings');
+        }
+        if (utils_1.guard.isDefined(path_hash_prefixes) &&
+            !utils_1.guard.isStringArray(path_hash_prefixes)) {
+            throw new TypeError('path_hash_prefixes must be an array of strings');
+        }
+        return new DelegatedRole({
+            keyIDs: keyids,
+            threshold,
+            name,
+            terminating,
+            paths,
+            pathHashPrefixes: path_hash_prefixes,
+            unrecognizedFields: rest,
+        });
+    }
+}
+exports.DelegatedRole = DelegatedRole;
+// JS version of Ruby's Array#zip
+const zip = (a, b) => a.map((k, i) => [k, b[i]]);
+function isTargetInPathPattern(target, pattern) {
+    const targetParts = target.split('/');
+    const patternParts = pattern.split('/');
+    if (patternParts.length != targetParts.length) {
+        return false;
+    }
+    return zip(targetParts, patternParts).every(([targetPart, patternPart]) => (0, minimatch_1.minimatch)(targetPart, patternPart));
+}
+/**
+ * Succinctly defines a hash bin delegation graph.
+ *
+ * A ``SuccinctRoles`` object describes a delegation graph that covers all
+ * targets, distributing them uniformly over the delegated roles (i.e. bins)
+ * in the graph.
+ *
+ * The total number of bins is 2 to the power of the passed ``bit_length``.
+ *
+ * Bin names are the concatenation of the passed ``name_prefix`` and a
+ * zero-padded hex representation of the bin index separated by a hyphen.
+ *
+ * The passed ``keyids`` and ``threshold`` is used for each bin, and each bin
+ * is 'terminating'.
+ *
+ * For details: https://github.com/theupdateframework/taps/blob/master/tap15.md
+ */
+class SuccinctRoles extends Role {
+    bitLength;
+    namePrefix;
+    numberOfBins;
+    suffixLen;
+    constructor(opts) {
+        super(opts);
+        const { bitLength, namePrefix } = opts;
+        if (bitLength <= 0 || bitLength > 32) {
+            throw new error_1.ValueError('bitLength must be between 1 and 32');
+        }
+        this.bitLength = bitLength;
+        this.namePrefix = namePrefix;
+        // Calculate the suffix_len value based on the total number of bins in
+        // hex. If bit_length = 10 then number_of_bins = 1024 or bin names will
+        // have a suffix between "000" and "3ff" in hex and suffix_len will be 3
+        // meaning the third bin will have a suffix of "003".
+        this.numberOfBins = Math.pow(2, bitLength);
+        // suffix_len is calculated based on "number_of_bins - 1" as the name
+        // of the last bin contains the number "number_of_bins -1" as a suffix.
+        this.suffixLen = (this.numberOfBins - 1).toString(16).length;
+    }
+    equals(other) {
+        if (!(other instanceof SuccinctRoles)) {
+            return false;
+        }
+        return (super.equals(other) &&
+            this.bitLength === other.bitLength &&
+            this.namePrefix === other.namePrefix);
+    }
+    /***
+     * Calculates the name of the delegated role responsible for 'target_filepath'.
+     *
+     * The target at path ''target_filepath' is assigned to a bin by casting
+     * the left-most 'bit_length' of bits of the file path hash digest to
+     * int, using it as bin index between 0 and '2**bit_length - 1'.
+     *
+     * Args:
+     *  target_filepath: URL path to a target file, relative to a base
+     *  targets URL.
+     */
+    getRoleForTarget(targetFilepath) {
+        const hasher = crypto_1.default.createHash('sha256');
+        const hasherBuffer = hasher.update(targetFilepath).digest();
+        // can't ever need more than 4 bytes (32 bits).
+        const hashBytes = hasherBuffer.subarray(0, 4);
+        // Right shift hash bytes, so that we only have the leftmost
+        // bit_length bits that we care about.
+        const shiftValue = 32 - this.bitLength;
+        const binNumber = hashBytes.readUInt32BE() >>> shiftValue;
+        // Add zero padding if necessary and cast to hex the suffix.
+        const suffix = binNumber.toString(16).padStart(this.suffixLen, '0');
+        return `${this.namePrefix}-${suffix}`;
+    }
+    *getRoles() {
+        for (let i = 0; i < this.numberOfBins; i++) {
+            const suffix = i.toString(16).padStart(this.suffixLen, '0');
+            yield `${this.namePrefix}-${suffix}`;
+        }
+    }
+    /***
+     * Determines whether the given ``role_name`` is in one of
+     * the delegated roles that ``SuccinctRoles`` represents.
+     *
+     * Args:
+     *  role_name: The name of the role to check against.
+     */
+    isDelegatedRole(roleName) {
+        const desiredPrefix = this.namePrefix + '-';
+        if (!roleName.startsWith(desiredPrefix)) {
+            return false;
+        }
+        const suffix = roleName.slice(desiredPrefix.length, roleName.length);
+        if (suffix.length != this.suffixLen) {
+            return false;
+        }
+        // make sure the suffix is a hex string
+        if (!suffix.match(/^[0-9a-fA-F]+$/)) {
+            return false;
+        }
+        const num = parseInt(suffix, 16);
+        return 0 <= num && num < this.numberOfBins;
+    }
+    toJSON() {
+        const json = {
+            ...super.toJSON(),
+            bit_length: this.bitLength,
+            name_prefix: this.namePrefix,
+        };
+        return json;
+    }
+    static fromJSON(data) {
+        const { keyids, threshold, bit_length, name_prefix, ...rest } = data;
+        if (!utils_1.guard.isStringArray(keyids)) {
+            throw new TypeError('keyids must be an array of strings');
+        }
+        if (typeof threshold !== 'number') {
+            throw new TypeError('threshold must be a number');
+        }
+        if (typeof bit_length !== 'number') {
+            throw new TypeError('bit_length must be a number');
+        }
+        if (typeof name_prefix !== 'string') {
+            throw new TypeError('name_prefix must be a string');
+        }
+        return new SuccinctRoles({
+            keyIDs: keyids,
+            threshold,
+            bitLength: bit_length,
+            namePrefix: name_prefix,
+            unrecognizedFields: rest,
+        });
+    }
+}
+exports.SuccinctRoles = SuccinctRoles;
diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/root.js b/node_modules/pacote/node_modules/@tufjs/models/dist/root.js
new file mode 100644
index 0000000000000..76d4e4039980e
--- /dev/null
+++ b/node_modules/pacote/node_modules/@tufjs/models/dist/root.js
@@ -0,0 +1,119 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Root = void 0;
+const util_1 = __importDefault(require("util"));
+const base_1 = require("./base");
+const error_1 = require("./error");
+const key_1 = require("./key");
+const role_1 = require("./role");
+const utils_1 = require("./utils");
+/**
+ * A container for the signed part of root metadata.
+ *
+ * The top-level role and metadata file signed by the root keys.
+ * This role specifies trusted keys for all other top-level roles, which may further delegate trust.
+ */
+class Root extends base_1.Signed {
+    type = base_1.MetadataKind.Root;
+    keys;
+    roles;
+    consistentSnapshot;
+    constructor(options) {
+        super(options);
+        this.keys = options.keys || {};
+        this.consistentSnapshot = options.consistentSnapshot ?? true;
+        if (!options.roles) {
+            this.roles = role_1.TOP_LEVEL_ROLE_NAMES.reduce((acc, role) => ({
+                ...acc,
+                [role]: new role_1.Role({ keyIDs: [], threshold: 1 }),
+            }), {});
+        }
+        else {
+            const roleNames = new Set(Object.keys(options.roles));
+            if (!role_1.TOP_LEVEL_ROLE_NAMES.every((role) => roleNames.has(role))) {
+                throw new error_1.ValueError('missing top-level role');
+            }
+            this.roles = options.roles;
+        }
+    }
+    addKey(key, role) {
+        if (!this.roles[role]) {
+            throw new error_1.ValueError(`role ${role} does not exist`);
+        }
+        if (!this.roles[role].keyIDs.includes(key.keyID)) {
+            this.roles[role].keyIDs.push(key.keyID);
+        }
+        this.keys[key.keyID] = key;
+    }
+    equals(other) {
+        if (!(other instanceof Root)) {
+            return false;
+        }
+        return (super.equals(other) &&
+            this.consistentSnapshot === other.consistentSnapshot &&
+            util_1.default.isDeepStrictEqual(this.keys, other.keys) &&
+            util_1.default.isDeepStrictEqual(this.roles, other.roles));
+    }
+    toJSON() {
+        return {
+            _type: this.type,
+            spec_version: this.specVersion,
+            version: this.version,
+            expires: this.expires,
+            keys: keysToJSON(this.keys),
+            roles: rolesToJSON(this.roles),
+            consistent_snapshot: this.consistentSnapshot,
+            ...this.unrecognizedFields,
+        };
+    }
+    static fromJSON(data) {
+        const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data);
+        const { keys, roles, consistent_snapshot, ...rest } = unrecognizedFields;
+        if (typeof consistent_snapshot !== 'boolean') {
+            throw new TypeError('consistent_snapshot must be a boolean');
+        }
+        return new Root({
+            ...commonFields,
+            keys: keysFromJSON(keys),
+            roles: rolesFromJSON(roles),
+            consistentSnapshot: consistent_snapshot,
+            unrecognizedFields: rest,
+        });
+    }
+}
+exports.Root = Root;
+function keysToJSON(keys) {
+    return Object.entries(keys).reduce((acc, [keyID, key]) => ({ ...acc, [keyID]: key.toJSON() }), {});
+}
+function rolesToJSON(roles) {
+    return Object.entries(roles).reduce((acc, [roleName, role]) => ({ ...acc, [roleName]: role.toJSON() }), {});
+}
+function keysFromJSON(data) {
+    let keys;
+    if (utils_1.guard.isDefined(data)) {
+        if (!utils_1.guard.isObjectRecord(data)) {
+            throw new TypeError('keys must be an object');
+        }
+        keys = Object.entries(data).reduce((acc, [keyID, keyData]) => ({
+            ...acc,
+            [keyID]: key_1.Key.fromJSON(keyID, keyData),
+        }), {});
+    }
+    return keys;
+}
+function rolesFromJSON(data) {
+    let roles;
+    if (utils_1.guard.isDefined(data)) {
+        if (!utils_1.guard.isObjectRecord(data)) {
+            throw new TypeError('roles must be an object');
+        }
+        roles = Object.entries(data).reduce((acc, [roleName, roleData]) => ({
+            ...acc,
+            [roleName]: role_1.Role.fromJSON(roleData),
+        }), {});
+    }
+    return roles;
+}
diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/signature.js b/node_modules/pacote/node_modules/@tufjs/models/dist/signature.js
new file mode 100644
index 0000000000000..43c0bfe58c483
--- /dev/null
+++ b/node_modules/pacote/node_modules/@tufjs/models/dist/signature.js
@@ -0,0 +1,40 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Signature = void 0;
+/**
+ * A container class containing information about a signature.
+ *
+ * Contains a signature and the keyid uniquely identifying the key used
+ * to generate the signature.
+ *
+ * Provide a `fromJSON` method to create a Signature from a JSON object.
+ */
+class Signature {
+    keyID;
+    sig;
+    constructor(options) {
+        const { keyID, sig } = options;
+        this.keyID = keyID;
+        this.sig = sig;
+    }
+    toJSON() {
+        return {
+            keyid: this.keyID,
+            sig: this.sig,
+        };
+    }
+    static fromJSON(data) {
+        const { keyid, sig } = data;
+        if (typeof keyid !== 'string') {
+            throw new TypeError('keyid must be a string');
+        }
+        if (typeof sig !== 'string') {
+            throw new TypeError('sig must be a string');
+        }
+        return new Signature({
+            keyID: keyid,
+            sig: sig,
+        });
+    }
+}
+exports.Signature = Signature;
diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/snapshot.js b/node_modules/pacote/node_modules/@tufjs/models/dist/snapshot.js
new file mode 100644
index 0000000000000..bc9983c12e669
--- /dev/null
+++ b/node_modules/pacote/node_modules/@tufjs/models/dist/snapshot.js
@@ -0,0 +1,72 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Snapshot = void 0;
+const util_1 = __importDefault(require("util"));
+const base_1 = require("./base");
+const file_1 = require("./file");
+const utils_1 = require("./utils");
+/**
+ * A container for the signed part of snapshot metadata.
+ *
+ * Snapshot contains information about all target Metadata files.
+ * A top-level role that specifies the latest versions of all targets metadata files,
+ * and hence the latest versions of all targets (including any dependencies between them) on the repository.
+ */
+class Snapshot extends base_1.Signed {
+    type = base_1.MetadataKind.Snapshot;
+    meta;
+    constructor(opts) {
+        super(opts);
+        this.meta = opts.meta || { 'targets.json': new file_1.MetaFile({ version: 1 }) };
+    }
+    equals(other) {
+        if (!(other instanceof Snapshot)) {
+            return false;
+        }
+        return super.equals(other) && util_1.default.isDeepStrictEqual(this.meta, other.meta);
+    }
+    toJSON() {
+        return {
+            _type: this.type,
+            meta: metaToJSON(this.meta),
+            spec_version: this.specVersion,
+            version: this.version,
+            expires: this.expires,
+            ...this.unrecognizedFields,
+        };
+    }
+    static fromJSON(data) {
+        const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data);
+        const { meta, ...rest } = unrecognizedFields;
+        return new Snapshot({
+            ...commonFields,
+            meta: metaFromJSON(meta),
+            unrecognizedFields: rest,
+        });
+    }
+}
+exports.Snapshot = Snapshot;
+function metaToJSON(meta) {
+    return Object.entries(meta).reduce((acc, [path, metadata]) => ({
+        ...acc,
+        [path]: metadata.toJSON(),
+    }), {});
+}
+function metaFromJSON(data) {
+    let meta;
+    if (utils_1.guard.isDefined(data)) {
+        if (!utils_1.guard.isObjectRecord(data)) {
+            throw new TypeError('meta field is malformed');
+        }
+        else {
+            meta = Object.entries(data).reduce((acc, [path, metadata]) => ({
+                ...acc,
+                [path]: file_1.MetaFile.fromJSON(metadata),
+            }), {});
+        }
+    }
+    return meta;
+}
diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/targets.js b/node_modules/pacote/node_modules/@tufjs/models/dist/targets.js
new file mode 100644
index 0000000000000..e509722f94758
--- /dev/null
+++ b/node_modules/pacote/node_modules/@tufjs/models/dist/targets.js
@@ -0,0 +1,94 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Targets = void 0;
+const util_1 = __importDefault(require("util"));
+const base_1 = require("./base");
+const delegations_1 = require("./delegations");
+const file_1 = require("./file");
+const utils_1 = require("./utils");
+// Container for the signed part of targets metadata.
+//
+// Targets contains verifying information about target files and also delegates
+// responsible to other Targets roles.
+class Targets extends base_1.Signed {
+    type = base_1.MetadataKind.Targets;
+    targets;
+    delegations;
+    constructor(options) {
+        super(options);
+        this.targets = options.targets || {};
+        this.delegations = options.delegations;
+    }
+    addTarget(target) {
+        this.targets[target.path] = target;
+    }
+    equals(other) {
+        if (!(other instanceof Targets)) {
+            return false;
+        }
+        return (super.equals(other) &&
+            util_1.default.isDeepStrictEqual(this.targets, other.targets) &&
+            util_1.default.isDeepStrictEqual(this.delegations, other.delegations));
+    }
+    toJSON() {
+        const json = {
+            _type: this.type,
+            spec_version: this.specVersion,
+            version: this.version,
+            expires: this.expires,
+            targets: targetsToJSON(this.targets),
+            ...this.unrecognizedFields,
+        };
+        if (this.delegations) {
+            json.delegations = this.delegations.toJSON();
+        }
+        return json;
+    }
+    static fromJSON(data) {
+        const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data);
+        const { targets, delegations, ...rest } = unrecognizedFields;
+        return new Targets({
+            ...commonFields,
+            targets: targetsFromJSON(targets),
+            delegations: delegationsFromJSON(delegations),
+            unrecognizedFields: rest,
+        });
+    }
+}
+exports.Targets = Targets;
+function targetsToJSON(targets) {
+    return Object.entries(targets).reduce((acc, [path, target]) => ({
+        ...acc,
+        [path]: target.toJSON(),
+    }), {});
+}
+function targetsFromJSON(data) {
+    let targets;
+    if (utils_1.guard.isDefined(data)) {
+        if (!utils_1.guard.isObjectRecord(data)) {
+            throw new TypeError('targets must be an object');
+        }
+        else {
+            targets = Object.entries(data).reduce((acc, [path, target]) => ({
+                ...acc,
+                [path]: file_1.TargetFile.fromJSON(path, target),
+            }), {});
+        }
+    }
+    return targets;
+}
+function delegationsFromJSON(data) {
+    let delegations;
+    if (utils_1.guard.isDefined(data)) {
+        if (!utils_1.guard.isObject(data)) {
+            throw new TypeError('delegations must be an object');
+        }
+        else {
+            delegations = delegations_1.Delegations.fromJSON(data);
+        }
+    }
+    return delegations;
+}
diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/timestamp.js b/node_modules/pacote/node_modules/@tufjs/models/dist/timestamp.js
new file mode 100644
index 0000000000000..d454b308f27e1
--- /dev/null
+++ b/node_modules/pacote/node_modules/@tufjs/models/dist/timestamp.js
@@ -0,0 +1,59 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Timestamp = void 0;
+const base_1 = require("./base");
+const file_1 = require("./file");
+const utils_1 = require("./utils");
+/**
+ * A container for the signed part of timestamp metadata.
+ *
+ * A top-level that specifies the latest version of the snapshot role metadata file,
+ * and hence the latest versions of all metadata and targets on the repository.
+ */
+class Timestamp extends base_1.Signed {
+    type = base_1.MetadataKind.Timestamp;
+    snapshotMeta;
+    constructor(options) {
+        super(options);
+        this.snapshotMeta = options.snapshotMeta || new file_1.MetaFile({ version: 1 });
+    }
+    equals(other) {
+        if (!(other instanceof Timestamp)) {
+            return false;
+        }
+        return super.equals(other) && this.snapshotMeta.equals(other.snapshotMeta);
+    }
+    toJSON() {
+        return {
+            _type: this.type,
+            spec_version: this.specVersion,
+            version: this.version,
+            expires: this.expires,
+            meta: { 'snapshot.json': this.snapshotMeta.toJSON() },
+            ...this.unrecognizedFields,
+        };
+    }
+    static fromJSON(data) {
+        const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data);
+        const { meta, ...rest } = unrecognizedFields;
+        return new Timestamp({
+            ...commonFields,
+            snapshotMeta: snapshotMetaFromJSON(meta),
+            unrecognizedFields: rest,
+        });
+    }
+}
+exports.Timestamp = Timestamp;
+function snapshotMetaFromJSON(data) {
+    let snapshotMeta;
+    if (utils_1.guard.isDefined(data)) {
+        const snapshotData = data['snapshot.json'];
+        if (!utils_1.guard.isDefined(snapshotData) || !utils_1.guard.isObject(snapshotData)) {
+            throw new TypeError('missing snapshot.json in meta');
+        }
+        else {
+            snapshotMeta = file_1.MetaFile.fromJSON(snapshotData);
+        }
+    }
+    return snapshotMeta;
+}
diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/utils/guard.js b/node_modules/pacote/node_modules/@tufjs/models/dist/utils/guard.js
new file mode 100644
index 0000000000000..911e8475986bb
--- /dev/null
+++ b/node_modules/pacote/node_modules/@tufjs/models/dist/utils/guard.js
@@ -0,0 +1,32 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.isDefined = isDefined;
+exports.isObject = isObject;
+exports.isStringArray = isStringArray;
+exports.isObjectArray = isObjectArray;
+exports.isStringRecord = isStringRecord;
+exports.isObjectRecord = isObjectRecord;
+function isDefined(val) {
+    return val !== undefined;
+}
+function isObject(value) {
+    return typeof value === 'object' && value !== null;
+}
+function isStringArray(value) {
+    return Array.isArray(value) && value.every((v) => typeof v === 'string');
+}
+function isObjectArray(value) {
+    return Array.isArray(value) && value.every(isObject);
+}
+function isStringRecord(value) {
+    return (typeof value === 'object' &&
+        value !== null &&
+        Object.keys(value).every((k) => typeof k === 'string') &&
+        Object.values(value).every((v) => typeof v === 'string'));
+}
+function isObjectRecord(value) {
+    return (typeof value === 'object' &&
+        value !== null &&
+        Object.keys(value).every((k) => typeof k === 'string') &&
+        Object.values(value).every((v) => typeof v === 'object' && v !== null));
+}
diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/utils/index.js b/node_modules/pacote/node_modules/@tufjs/models/dist/utils/index.js
new file mode 100644
index 0000000000000..395cccc36cf92
--- /dev/null
+++ b/node_modules/pacote/node_modules/@tufjs/models/dist/utils/index.js
@@ -0,0 +1,38 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || (function () {
+    var ownKeys = function(o) {
+        ownKeys = Object.getOwnPropertyNames || function (o) {
+            var ar = [];
+            for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
+            return ar;
+        };
+        return ownKeys(o);
+    };
+    return function (mod) {
+        if (mod && mod.__esModule) return mod;
+        var result = {};
+        if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
+        __setModuleDefault(result, mod);
+        return result;
+    };
+})();
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.crypto = exports.guard = void 0;
+exports.guard = __importStar(require("./guard"));
+exports.crypto = __importStar(require("./verify"));
diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/utils/key.js b/node_modules/pacote/node_modules/@tufjs/models/dist/utils/key.js
new file mode 100644
index 0000000000000..3c3ec07f1425a
--- /dev/null
+++ b/node_modules/pacote/node_modules/@tufjs/models/dist/utils/key.js
@@ -0,0 +1,142 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.getPublicKey = getPublicKey;
+const crypto_1 = __importDefault(require("crypto"));
+const error_1 = require("../error");
+const oid_1 = require("./oid");
+const ASN1_TAG_SEQUENCE = 0x30;
+const ANS1_TAG_BIT_STRING = 0x03;
+const NULL_BYTE = 0x00;
+const OID_EDDSA = '1.3.101.112';
+const OID_EC_PUBLIC_KEY = '1.2.840.10045.2.1';
+const OID_EC_CURVE_P256V1 = '1.2.840.10045.3.1.7';
+const PEM_HEADER = '-----BEGIN PUBLIC KEY-----';
+function getPublicKey(keyInfo) {
+    switch (keyInfo.keyType) {
+        case 'rsa':
+            return getRSAPublicKey(keyInfo);
+        case 'ed25519':
+            return getED25519PublicKey(keyInfo);
+        case 'ecdsa':
+        case 'ecdsa-sha2-nistp256':
+        case 'ecdsa-sha2-nistp384':
+            return getECDCSAPublicKey(keyInfo);
+        default:
+            throw new error_1.UnsupportedAlgorithmError(`Unsupported key type: ${keyInfo.keyType}`);
+    }
+}
+function getRSAPublicKey(keyInfo) {
+    // Only support PEM-encoded RSA keys
+    if (!keyInfo.keyVal.startsWith(PEM_HEADER)) {
+        throw new error_1.CryptoError('Invalid key format');
+    }
+    const key = crypto_1.default.createPublicKey(keyInfo.keyVal);
+    switch (keyInfo.scheme) {
+        case 'rsassa-pss-sha256':
+            return {
+                key: key,
+                padding: crypto_1.default.constants.RSA_PKCS1_PSS_PADDING,
+            };
+        default:
+            throw new error_1.UnsupportedAlgorithmError(`Unsupported RSA scheme: ${keyInfo.scheme}`);
+    }
+}
+function getED25519PublicKey(keyInfo) {
+    let key;
+    // If key is already PEM-encoded we can just parse it
+    if (keyInfo.keyVal.startsWith(PEM_HEADER)) {
+        key = crypto_1.default.createPublicKey(keyInfo.keyVal);
+    }
+    else {
+        // If key is not PEM-encoded it had better be hex
+        if (!isHex(keyInfo.keyVal)) {
+            throw new error_1.CryptoError('Invalid key format');
+        }
+        key = crypto_1.default.createPublicKey({
+            key: ed25519.hexToDER(keyInfo.keyVal),
+            format: 'der',
+            type: 'spki',
+        });
+    }
+    return { key };
+}
+function getECDCSAPublicKey(keyInfo) {
+    let key;
+    // If key is already PEM-encoded we can just parse it
+    if (keyInfo.keyVal.startsWith(PEM_HEADER)) {
+        key = crypto_1.default.createPublicKey(keyInfo.keyVal);
+    }
+    else {
+        // If key is not PEM-encoded it had better be hex
+        if (!isHex(keyInfo.keyVal)) {
+            throw new error_1.CryptoError('Invalid key format');
+        }
+        key = crypto_1.default.createPublicKey({
+            key: ecdsa.hexToDER(keyInfo.keyVal),
+            format: 'der',
+            type: 'spki',
+        });
+    }
+    return { key };
+}
+const ed25519 = {
+    // Translates a hex key into a crypto KeyObject
+    // https://keygen.sh/blog/how-to-use-hexadecimal-ed25519-keys-in-node/
+    hexToDER: (hex) => {
+        const key = Buffer.from(hex, 'hex');
+        const oid = (0, oid_1.encodeOIDString)(OID_EDDSA);
+        // Create a byte sequence containing the OID and key
+        const elements = Buffer.concat([
+            Buffer.concat([
+                Buffer.from([ASN1_TAG_SEQUENCE]),
+                Buffer.from([oid.length]),
+                oid,
+            ]),
+            Buffer.concat([
+                Buffer.from([ANS1_TAG_BIT_STRING]),
+                Buffer.from([key.length + 1]),
+                Buffer.from([NULL_BYTE]),
+                key,
+            ]),
+        ]);
+        // Wrap up by creating a sequence of elements
+        const der = Buffer.concat([
+            Buffer.from([ASN1_TAG_SEQUENCE]),
+            Buffer.from([elements.length]),
+            elements,
+        ]);
+        return der;
+    },
+};
+const ecdsa = {
+    hexToDER: (hex) => {
+        const key = Buffer.from(hex, 'hex');
+        const bitString = Buffer.concat([
+            Buffer.from([ANS1_TAG_BIT_STRING]),
+            Buffer.from([key.length + 1]),
+            Buffer.from([NULL_BYTE]),
+            key,
+        ]);
+        const oids = Buffer.concat([
+            (0, oid_1.encodeOIDString)(OID_EC_PUBLIC_KEY),
+            (0, oid_1.encodeOIDString)(OID_EC_CURVE_P256V1),
+        ]);
+        const oidSequence = Buffer.concat([
+            Buffer.from([ASN1_TAG_SEQUENCE]),
+            Buffer.from([oids.length]),
+            oids,
+        ]);
+        // Wrap up by creating a sequence of elements
+        const der = Buffer.concat([
+            Buffer.from([ASN1_TAG_SEQUENCE]),
+            Buffer.from([oidSequence.length + bitString.length]),
+            oidSequence,
+            bitString,
+        ]);
+        return der;
+    },
+};
+const isHex = (key) => /^[0-9a-fA-F]+$/.test(key);
diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/utils/oid.js b/node_modules/pacote/node_modules/@tufjs/models/dist/utils/oid.js
new file mode 100644
index 0000000000000..00b29c3030d1e
--- /dev/null
+++ b/node_modules/pacote/node_modules/@tufjs/models/dist/utils/oid.js
@@ -0,0 +1,26 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.encodeOIDString = encodeOIDString;
+const ANS1_TAG_OID = 0x06;
+function encodeOIDString(oid) {
+    const parts = oid.split('.');
+    // The first two subidentifiers are encoded into the first byte
+    const first = parseInt(parts[0], 10) * 40 + parseInt(parts[1], 10);
+    const rest = [];
+    parts.slice(2).forEach((part) => {
+        const bytes = encodeVariableLengthInteger(parseInt(part, 10));
+        rest.push(...bytes);
+    });
+    const der = Buffer.from([first, ...rest]);
+    return Buffer.from([ANS1_TAG_OID, der.length, ...der]);
+}
+function encodeVariableLengthInteger(value) {
+    const bytes = [];
+    let mask = 0x00;
+    while (value > 0) {
+        bytes.unshift((value & 0x7f) | mask);
+        value >>= 7;
+        mask = 0x80;
+    }
+    return bytes;
+}
diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/utils/types.js b/node_modules/pacote/node_modules/@tufjs/models/dist/utils/types.js
new file mode 100644
index 0000000000000..c8ad2e549bdc6
--- /dev/null
+++ b/node_modules/pacote/node_modules/@tufjs/models/dist/utils/types.js
@@ -0,0 +1,2 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/utils/verify.js b/node_modules/pacote/node_modules/@tufjs/models/dist/utils/verify.js
new file mode 100644
index 0000000000000..8232b6f6a97ab
--- /dev/null
+++ b/node_modules/pacote/node_modules/@tufjs/models/dist/utils/verify.js
@@ -0,0 +1,13 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verifySignature = void 0;
+const canonical_json_1 = require("@tufjs/canonical-json");
+const crypto_1 = __importDefault(require("crypto"));
+const verifySignature = (metaDataSignedData, key, signature) => {
+    const canonicalData = Buffer.from((0, canonical_json_1.canonicalize)(metaDataSignedData));
+    return crypto_1.default.verify(undefined, canonicalData, key, Buffer.from(signature, 'hex'));
+};
+exports.verifySignature = verifySignature;
diff --git a/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/LICENSE b/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/LICENSE
new file mode 100644
index 0000000000000..1493534e60dce
--- /dev/null
+++ b/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) 2011-2023 Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js b/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
new file mode 100644
index 0000000000000..5fc86bbd0116c
--- /dev/null
+++ b/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
@@ -0,0 +1,14 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.assertValidPattern = void 0;
+const MAX_PATTERN_LENGTH = 1024 * 64;
+const assertValidPattern = (pattern) => {
+    if (typeof pattern !== 'string') {
+        throw new TypeError('invalid pattern');
+    }
+    if (pattern.length > MAX_PATTERN_LENGTH) {
+        throw new TypeError('pattern is too long');
+    }
+};
+exports.assertValidPattern = assertValidPattern;
+//# sourceMappingURL=assert-valid-pattern.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/ast.js b/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/ast.js
new file mode 100644
index 0000000000000..7b2109625eaeb
--- /dev/null
+++ b/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/ast.js
@@ -0,0 +1,592 @@
+"use strict";
+// parse a single path portion
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.AST = void 0;
+const brace_expressions_js_1 = require("./brace-expressions.js");
+const unescape_js_1 = require("./unescape.js");
+const types = new Set(['!', '?', '+', '*', '@']);
+const isExtglobType = (c) => types.has(c);
+// Patterns that get prepended to bind to the start of either the
+// entire string, or just a single path portion, to prevent dots
+// and/or traversal patterns, when needed.
+// Exts don't need the ^ or / bit, because the root binds that already.
+const startNoTraversal = '(?!(?:^|/)\\.\\.?(?:$|/))';
+const startNoDot = '(?!\\.)';
+// characters that indicate a start of pattern needs the "no dots" bit,
+// because a dot *might* be matched. ( is not in the list, because in
+// the case of a child extglob, it will handle the prevention itself.
+const addPatternStart = new Set(['[', '.']);
+// cases where traversal is A-OK, no dot prevention needed
+const justDots = new Set(['..', '.']);
+const reSpecials = new Set('().*{}+?[]^$\\!');
+const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+// any single thing other than /
+const qmark = '[^/]';
+// * => any number of characters
+const star = qmark + '*?';
+// use + when we need to ensure that *something* matches, because the * is
+// the only thing in the path portion.
+const starNoEmpty = qmark + '+?';
+// remove the \ chars that we added if we end up doing a nonmagic compare
+// const deslash = (s: string) => s.replace(/\\(.)/g, '$1')
+class AST {
+    type;
+    #root;
+    #hasMagic;
+    #uflag = false;
+    #parts = [];
+    #parent;
+    #parentIndex;
+    #negs;
+    #filledNegs = false;
+    #options;
+    #toString;
+    // set to true if it's an extglob with no children
+    // (which really means one child of '')
+    #emptyExt = false;
+    constructor(type, parent, options = {}) {
+        this.type = type;
+        // extglobs are inherently magical
+        if (type)
+            this.#hasMagic = true;
+        this.#parent = parent;
+        this.#root = this.#parent ? this.#parent.#root : this;
+        this.#options = this.#root === this ? options : this.#root.#options;
+        this.#negs = this.#root === this ? [] : this.#root.#negs;
+        if (type === '!' && !this.#root.#filledNegs)
+            this.#negs.push(this);
+        this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0;
+    }
+    get hasMagic() {
+        /* c8 ignore start */
+        if (this.#hasMagic !== undefined)
+            return this.#hasMagic;
+        /* c8 ignore stop */
+        for (const p of this.#parts) {
+            if (typeof p === 'string')
+                continue;
+            if (p.type || p.hasMagic)
+                return (this.#hasMagic = true);
+        }
+        // note: will be undefined until we generate the regexp src and find out
+        return this.#hasMagic;
+    }
+    // reconstructs the pattern
+    toString() {
+        if (this.#toString !== undefined)
+            return this.#toString;
+        if (!this.type) {
+            return (this.#toString = this.#parts.map(p => String(p)).join(''));
+        }
+        else {
+            return (this.#toString =
+                this.type + '(' + this.#parts.map(p => String(p)).join('|') + ')');
+        }
+    }
+    #fillNegs() {
+        /* c8 ignore start */
+        if (this !== this.#root)
+            throw new Error('should only call on root');
+        if (this.#filledNegs)
+            return this;
+        /* c8 ignore stop */
+        // call toString() once to fill this out
+        this.toString();
+        this.#filledNegs = true;
+        let n;
+        while ((n = this.#negs.pop())) {
+            if (n.type !== '!')
+                continue;
+            // walk up the tree, appending everthing that comes AFTER parentIndex
+            let p = n;
+            let pp = p.#parent;
+            while (pp) {
+                for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) {
+                    for (const part of n.#parts) {
+                        /* c8 ignore start */
+                        if (typeof part === 'string') {
+                            throw new Error('string part in extglob AST??');
+                        }
+                        /* c8 ignore stop */
+                        part.copyIn(pp.#parts[i]);
+                    }
+                }
+                p = pp;
+                pp = p.#parent;
+            }
+        }
+        return this;
+    }
+    push(...parts) {
+        for (const p of parts) {
+            if (p === '')
+                continue;
+            /* c8 ignore start */
+            if (typeof p !== 'string' && !(p instanceof AST && p.#parent === this)) {
+                throw new Error('invalid part: ' + p);
+            }
+            /* c8 ignore stop */
+            this.#parts.push(p);
+        }
+    }
+    toJSON() {
+        const ret = this.type === null
+            ? this.#parts.slice().map(p => (typeof p === 'string' ? p : p.toJSON()))
+            : [this.type, ...this.#parts.map(p => p.toJSON())];
+        if (this.isStart() && !this.type)
+            ret.unshift([]);
+        if (this.isEnd() &&
+            (this === this.#root ||
+                (this.#root.#filledNegs && this.#parent?.type === '!'))) {
+            ret.push({});
+        }
+        return ret;
+    }
+    isStart() {
+        if (this.#root === this)
+            return true;
+        // if (this.type) return !!this.#parent?.isStart()
+        if (!this.#parent?.isStart())
+            return false;
+        if (this.#parentIndex === 0)
+            return true;
+        // if everything AHEAD of this is a negation, then it's still the "start"
+        const p = this.#parent;
+        for (let i = 0; i < this.#parentIndex; i++) {
+            const pp = p.#parts[i];
+            if (!(pp instanceof AST && pp.type === '!')) {
+                return false;
+            }
+        }
+        return true;
+    }
+    isEnd() {
+        if (this.#root === this)
+            return true;
+        if (this.#parent?.type === '!')
+            return true;
+        if (!this.#parent?.isEnd())
+            return false;
+        if (!this.type)
+            return this.#parent?.isEnd();
+        // if not root, it'll always have a parent
+        /* c8 ignore start */
+        const pl = this.#parent ? this.#parent.#parts.length : 0;
+        /* c8 ignore stop */
+        return this.#parentIndex === pl - 1;
+    }
+    copyIn(part) {
+        if (typeof part === 'string')
+            this.push(part);
+        else
+            this.push(part.clone(this));
+    }
+    clone(parent) {
+        const c = new AST(this.type, parent);
+        for (const p of this.#parts) {
+            c.copyIn(p);
+        }
+        return c;
+    }
+    static #parseAST(str, ast, pos, opt) {
+        let escaping = false;
+        let inBrace = false;
+        let braceStart = -1;
+        let braceNeg = false;
+        if (ast.type === null) {
+            // outside of a extglob, append until we find a start
+            let i = pos;
+            let acc = '';
+            while (i < str.length) {
+                const c = str.charAt(i++);
+                // still accumulate escapes at this point, but we do ignore
+                // starts that are escaped
+                if (escaping || c === '\\') {
+                    escaping = !escaping;
+                    acc += c;
+                    continue;
+                }
+                if (inBrace) {
+                    if (i === braceStart + 1) {
+                        if (c === '^' || c === '!') {
+                            braceNeg = true;
+                        }
+                    }
+                    else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
+                        inBrace = false;
+                    }
+                    acc += c;
+                    continue;
+                }
+                else if (c === '[') {
+                    inBrace = true;
+                    braceStart = i;
+                    braceNeg = false;
+                    acc += c;
+                    continue;
+                }
+                if (!opt.noext && isExtglobType(c) && str.charAt(i) === '(') {
+                    ast.push(acc);
+                    acc = '';
+                    const ext = new AST(c, ast);
+                    i = AST.#parseAST(str, ext, i, opt);
+                    ast.push(ext);
+                    continue;
+                }
+                acc += c;
+            }
+            ast.push(acc);
+            return i;
+        }
+        // some kind of extglob, pos is at the (
+        // find the next | or )
+        let i = pos + 1;
+        let part = new AST(null, ast);
+        const parts = [];
+        let acc = '';
+        while (i < str.length) {
+            const c = str.charAt(i++);
+            // still accumulate escapes at this point, but we do ignore
+            // starts that are escaped
+            if (escaping || c === '\\') {
+                escaping = !escaping;
+                acc += c;
+                continue;
+            }
+            if (inBrace) {
+                if (i === braceStart + 1) {
+                    if (c === '^' || c === '!') {
+                        braceNeg = true;
+                    }
+                }
+                else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
+                    inBrace = false;
+                }
+                acc += c;
+                continue;
+            }
+            else if (c === '[') {
+                inBrace = true;
+                braceStart = i;
+                braceNeg = false;
+                acc += c;
+                continue;
+            }
+            if (isExtglobType(c) && str.charAt(i) === '(') {
+                part.push(acc);
+                acc = '';
+                const ext = new AST(c, part);
+                part.push(ext);
+                i = AST.#parseAST(str, ext, i, opt);
+                continue;
+            }
+            if (c === '|') {
+                part.push(acc);
+                acc = '';
+                parts.push(part);
+                part = new AST(null, ast);
+                continue;
+            }
+            if (c === ')') {
+                if (acc === '' && ast.#parts.length === 0) {
+                    ast.#emptyExt = true;
+                }
+                part.push(acc);
+                acc = '';
+                ast.push(...parts, part);
+                return i;
+            }
+            acc += c;
+        }
+        // unfinished extglob
+        // if we got here, it was a malformed extglob! not an extglob, but
+        // maybe something else in there.
+        ast.type = null;
+        ast.#hasMagic = undefined;
+        ast.#parts = [str.substring(pos - 1)];
+        return i;
+    }
+    static fromGlob(pattern, options = {}) {
+        const ast = new AST(null, undefined, options);
+        AST.#parseAST(pattern, ast, 0, options);
+        return ast;
+    }
+    // returns the regular expression if there's magic, or the unescaped
+    // string if not.
+    toMMPattern() {
+        // should only be called on root
+        /* c8 ignore start */
+        if (this !== this.#root)
+            return this.#root.toMMPattern();
+        /* c8 ignore stop */
+        const glob = this.toString();
+        const [re, body, hasMagic, uflag] = this.toRegExpSource();
+        // if we're in nocase mode, and not nocaseMagicOnly, then we do
+        // still need a regular expression if we have to case-insensitively
+        // match capital/lowercase characters.
+        const anyMagic = hasMagic ||
+            this.#hasMagic ||
+            (this.#options.nocase &&
+                !this.#options.nocaseMagicOnly &&
+                glob.toUpperCase() !== glob.toLowerCase());
+        if (!anyMagic) {
+            return body;
+        }
+        const flags = (this.#options.nocase ? 'i' : '') + (uflag ? 'u' : '');
+        return Object.assign(new RegExp(`^${re}$`, flags), {
+            _src: re,
+            _glob: glob,
+        });
+    }
+    get options() {
+        return this.#options;
+    }
+    // returns the string match, the regexp source, whether there's magic
+    // in the regexp (so a regular expression is required) and whether or
+    // not the uflag is needed for the regular expression (for posix classes)
+    // TODO: instead of injecting the start/end at this point, just return
+    // the BODY of the regexp, along with the start/end portions suitable
+    // for binding the start/end in either a joined full-path makeRe context
+    // (where we bind to (^|/), or a standalone matchPart context (where
+    // we bind to ^, and not /).  Otherwise slashes get duped!
+    //
+    // In part-matching mode, the start is:
+    // - if not isStart: nothing
+    // - if traversal possible, but not allowed: ^(?!\.\.?$)
+    // - if dots allowed or not possible: ^
+    // - if dots possible and not allowed: ^(?!\.)
+    // end is:
+    // - if not isEnd(): nothing
+    // - else: $
+    //
+    // In full-path matching mode, we put the slash at the START of the
+    // pattern, so start is:
+    // - if first pattern: same as part-matching mode
+    // - if not isStart(): nothing
+    // - if traversal possible, but not allowed: /(?!\.\.?(?:$|/))
+    // - if dots allowed or not possible: /
+    // - if dots possible and not allowed: /(?!\.)
+    // end is:
+    // - if last pattern, same as part-matching mode
+    // - else nothing
+    //
+    // Always put the (?:$|/) on negated tails, though, because that has to be
+    // there to bind the end of the negated pattern portion, and it's easier to
+    // just stick it in now rather than try to inject it later in the middle of
+    // the pattern.
+    //
+    // We can just always return the same end, and leave it up to the caller
+    // to know whether it's going to be used joined or in parts.
+    // And, if the start is adjusted slightly, can do the same there:
+    // - if not isStart: nothing
+    // - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$)
+    // - if dots allowed or not possible: (?:/|^)
+    // - if dots possible and not allowed: (?:/|^)(?!\.)
+    //
+    // But it's better to have a simpler binding without a conditional, for
+    // performance, so probably better to return both start options.
+    //
+    // Then the caller just ignores the end if it's not the first pattern,
+    // and the start always gets applied.
+    //
+    // But that's always going to be $ if it's the ending pattern, or nothing,
+    // so the caller can just attach $ at the end of the pattern when building.
+    //
+    // So the todo is:
+    // - better detect what kind of start is needed
+    // - return both flavors of starting pattern
+    // - attach $ at the end of the pattern when creating the actual RegExp
+    //
+    // Ah, but wait, no, that all only applies to the root when the first pattern
+    // is not an extglob. If the first pattern IS an extglob, then we need all
+    // that dot prevention biz to live in the extglob portions, because eg
+    // +(*|.x*) can match .xy but not .yx.
+    //
+    // So, return the two flavors if it's #root and the first child is not an
+    // AST, otherwise leave it to the child AST to handle it, and there,
+    // use the (?:^|/) style of start binding.
+    //
+    // Even simplified further:
+    // - Since the start for a join is eg /(?!\.) and the start for a part
+    // is ^(?!\.), we can just prepend (?!\.) to the pattern (either root
+    // or start or whatever) and prepend ^ or / at the Regexp construction.
+    toRegExpSource(allowDot) {
+        const dot = allowDot ?? !!this.#options.dot;
+        if (this.#root === this)
+            this.#fillNegs();
+        if (!this.type) {
+            const noEmpty = this.isStart() && this.isEnd();
+            const src = this.#parts
+                .map(p => {
+                const [re, _, hasMagic, uflag] = typeof p === 'string'
+                    ? AST.#parseGlob(p, this.#hasMagic, noEmpty)
+                    : p.toRegExpSource(allowDot);
+                this.#hasMagic = this.#hasMagic || hasMagic;
+                this.#uflag = this.#uflag || uflag;
+                return re;
+            })
+                .join('');
+            let start = '';
+            if (this.isStart()) {
+                if (typeof this.#parts[0] === 'string') {
+                    // this is the string that will match the start of the pattern,
+                    // so we need to protect against dots and such.
+                    // '.' and '..' cannot match unless the pattern is that exactly,
+                    // even if it starts with . or dot:true is set.
+                    const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]);
+                    if (!dotTravAllowed) {
+                        const aps = addPatternStart;
+                        // check if we have a possibility of matching . or ..,
+                        // and prevent that.
+                        const needNoTrav = 
+                        // dots are allowed, and the pattern starts with [ or .
+                        (dot && aps.has(src.charAt(0))) ||
+                            // the pattern starts with \., and then [ or .
+                            (src.startsWith('\\.') && aps.has(src.charAt(2))) ||
+                            // the pattern starts with \.\., and then [ or .
+                            (src.startsWith('\\.\\.') && aps.has(src.charAt(4)));
+                        // no need to prevent dots if it can't match a dot, or if a
+                        // sub-pattern will be preventing it anyway.
+                        const needNoDot = !dot && !allowDot && aps.has(src.charAt(0));
+                        start = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : '';
+                    }
+                }
+            }
+            // append the "end of path portion" pattern to negation tails
+            let end = '';
+            if (this.isEnd() &&
+                this.#root.#filledNegs &&
+                this.#parent?.type === '!') {
+                end = '(?:$|\\/)';
+            }
+            const final = start + src + end;
+            return [
+                final,
+                (0, unescape_js_1.unescape)(src),
+                (this.#hasMagic = !!this.#hasMagic),
+                this.#uflag,
+            ];
+        }
+        // We need to calculate the body *twice* if it's a repeat pattern
+        // at the start, once in nodot mode, then again in dot mode, so a
+        // pattern like *(?) can match 'x.y'
+        const repeated = this.type === '*' || this.type === '+';
+        // some kind of extglob
+        const start = this.type === '!' ? '(?:(?!(?:' : '(?:';
+        let body = this.#partsToRegExp(dot);
+        if (this.isStart() && this.isEnd() && !body && this.type !== '!') {
+            // invalid extglob, has to at least be *something* present, if it's
+            // the entire path portion.
+            const s = this.toString();
+            this.#parts = [s];
+            this.type = null;
+            this.#hasMagic = undefined;
+            return [s, (0, unescape_js_1.unescape)(this.toString()), false, false];
+        }
+        // XXX abstract out this map method
+        let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot
+            ? ''
+            : this.#partsToRegExp(true);
+        if (bodyDotAllowed === body) {
+            bodyDotAllowed = '';
+        }
+        if (bodyDotAllowed) {
+            body = `(?:${body})(?:${bodyDotAllowed})*?`;
+        }
+        // an empty !() is exactly equivalent to a starNoEmpty
+        let final = '';
+        if (this.type === '!' && this.#emptyExt) {
+            final = (this.isStart() && !dot ? startNoDot : '') + starNoEmpty;
+        }
+        else {
+            const close = this.type === '!'
+                ? // !() must match something,but !(x) can match ''
+                    '))' +
+                        (this.isStart() && !dot && !allowDot ? startNoDot : '') +
+                        star +
+                        ')'
+                : this.type === '@'
+                    ? ')'
+                    : this.type === '?'
+                        ? ')?'
+                        : this.type === '+' && bodyDotAllowed
+                            ? ')'
+                            : this.type === '*' && bodyDotAllowed
+                                ? `)?`
+                                : `)${this.type}`;
+            final = start + body + close;
+        }
+        return [
+            final,
+            (0, unescape_js_1.unescape)(body),
+            (this.#hasMagic = !!this.#hasMagic),
+            this.#uflag,
+        ];
+    }
+    #partsToRegExp(dot) {
+        return this.#parts
+            .map(p => {
+            // extglob ASTs should only contain parent ASTs
+            /* c8 ignore start */
+            if (typeof p === 'string') {
+                throw new Error('string type in extglob ast??');
+            }
+            /* c8 ignore stop */
+            // can ignore hasMagic, because extglobs are already always magic
+            const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot);
+            this.#uflag = this.#uflag || uflag;
+            return re;
+        })
+            .filter(p => !(this.isStart() && this.isEnd()) || !!p)
+            .join('|');
+    }
+    static #parseGlob(glob, hasMagic, noEmpty = false) {
+        let escaping = false;
+        let re = '';
+        let uflag = false;
+        for (let i = 0; i < glob.length; i++) {
+            const c = glob.charAt(i);
+            if (escaping) {
+                escaping = false;
+                re += (reSpecials.has(c) ? '\\' : '') + c;
+                continue;
+            }
+            if (c === '\\') {
+                if (i === glob.length - 1) {
+                    re += '\\\\';
+                }
+                else {
+                    escaping = true;
+                }
+                continue;
+            }
+            if (c === '[') {
+                const [src, needUflag, consumed, magic] = (0, brace_expressions_js_1.parseClass)(glob, i);
+                if (consumed) {
+                    re += src;
+                    uflag = uflag || needUflag;
+                    i += consumed - 1;
+                    hasMagic = hasMagic || magic;
+                    continue;
+                }
+            }
+            if (c === '*') {
+                if (noEmpty && glob === '*')
+                    re += starNoEmpty;
+                else
+                    re += star;
+                hasMagic = true;
+                continue;
+            }
+            if (c === '?') {
+                re += qmark;
+                hasMagic = true;
+                continue;
+            }
+            re += regExpEscape(c);
+        }
+        return [re, (0, unescape_js_1.unescape)(glob), !!hasMagic, uflag];
+    }
+}
+exports.AST = AST;
+//# sourceMappingURL=ast.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/brace-expressions.js b/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/brace-expressions.js
new file mode 100644
index 0000000000000..0e13eefc4cfee
--- /dev/null
+++ b/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/brace-expressions.js
@@ -0,0 +1,152 @@
+"use strict";
+// translate the various posix character classes into unicode properties
+// this works across all unicode locales
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.parseClass = void 0;
+// { : [, /u flag required, negated]
+const posixClasses = {
+    '[:alnum:]': ['\\p{L}\\p{Nl}\\p{Nd}', true],
+    '[:alpha:]': ['\\p{L}\\p{Nl}', true],
+    '[:ascii:]': ['\\x' + '00-\\x' + '7f', false],
+    '[:blank:]': ['\\p{Zs}\\t', true],
+    '[:cntrl:]': ['\\p{Cc}', true],
+    '[:digit:]': ['\\p{Nd}', true],
+    '[:graph:]': ['\\p{Z}\\p{C}', true, true],
+    '[:lower:]': ['\\p{Ll}', true],
+    '[:print:]': ['\\p{C}', true],
+    '[:punct:]': ['\\p{P}', true],
+    '[:space:]': ['\\p{Z}\\t\\r\\n\\v\\f', true],
+    '[:upper:]': ['\\p{Lu}', true],
+    '[:word:]': ['\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}', true],
+    '[:xdigit:]': ['A-Fa-f0-9', false],
+};
+// only need to escape a few things inside of brace expressions
+// escapes: [ \ ] -
+const braceEscape = (s) => s.replace(/[[\]\\-]/g, '\\$&');
+// escape all regexp magic characters
+const regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+// everything has already been escaped, we just have to join
+const rangesToString = (ranges) => ranges.join('');
+// takes a glob string at a posix brace expression, and returns
+// an equivalent regular expression source, and boolean indicating
+// whether the /u flag needs to be applied, and the number of chars
+// consumed to parse the character class.
+// This also removes out of order ranges, and returns ($.) if the
+// entire class just no good.
+const parseClass = (glob, position) => {
+    const pos = position;
+    /* c8 ignore start */
+    if (glob.charAt(pos) !== '[') {
+        throw new Error('not in a brace expression');
+    }
+    /* c8 ignore stop */
+    const ranges = [];
+    const negs = [];
+    let i = pos + 1;
+    let sawStart = false;
+    let uflag = false;
+    let escaping = false;
+    let negate = false;
+    let endPos = pos;
+    let rangeStart = '';
+    WHILE: while (i < glob.length) {
+        const c = glob.charAt(i);
+        if ((c === '!' || c === '^') && i === pos + 1) {
+            negate = true;
+            i++;
+            continue;
+        }
+        if (c === ']' && sawStart && !escaping) {
+            endPos = i + 1;
+            break;
+        }
+        sawStart = true;
+        if (c === '\\') {
+            if (!escaping) {
+                escaping = true;
+                i++;
+                continue;
+            }
+            // escaped \ char, fall through and treat like normal char
+        }
+        if (c === '[' && !escaping) {
+            // either a posix class, a collation equivalent, or just a [
+            for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) {
+                if (glob.startsWith(cls, i)) {
+                    // invalid, [a-[] is fine, but not [a-[:alpha]]
+                    if (rangeStart) {
+                        return ['$.', false, glob.length - pos, true];
+                    }
+                    i += cls.length;
+                    if (neg)
+                        negs.push(unip);
+                    else
+                        ranges.push(unip);
+                    uflag = uflag || u;
+                    continue WHILE;
+                }
+            }
+        }
+        // now it's just a normal character, effectively
+        escaping = false;
+        if (rangeStart) {
+            // throw this range away if it's not valid, but others
+            // can still match.
+            if (c > rangeStart) {
+                ranges.push(braceEscape(rangeStart) + '-' + braceEscape(c));
+            }
+            else if (c === rangeStart) {
+                ranges.push(braceEscape(c));
+            }
+            rangeStart = '';
+            i++;
+            continue;
+        }
+        // now might be the start of a range.
+        // can be either c-d or c-] or c] or c] at this point
+        if (glob.startsWith('-]', i + 1)) {
+            ranges.push(braceEscape(c + '-'));
+            i += 2;
+            continue;
+        }
+        if (glob.startsWith('-', i + 1)) {
+            rangeStart = c;
+            i += 2;
+            continue;
+        }
+        // not the start of a range, just a single character
+        ranges.push(braceEscape(c));
+        i++;
+    }
+    if (endPos < i) {
+        // didn't see the end of the class, not a valid class,
+        // but might still be valid as a literal match.
+        return ['', false, 0, false];
+    }
+    // if we got no ranges and no negates, then we have a range that
+    // cannot possibly match anything, and that poisons the whole glob
+    if (!ranges.length && !negs.length) {
+        return ['$.', false, glob.length - pos, true];
+    }
+    // if we got one positive range, and it's a single character, then that's
+    // not actually a magic pattern, it's just that one literal character.
+    // we should not treat that as "magic", we should just return the literal
+    // character. [_] is a perfectly valid way to escape glob magic chars.
+    if (negs.length === 0 &&
+        ranges.length === 1 &&
+        /^\\?.$/.test(ranges[0]) &&
+        !negate) {
+        const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0];
+        return [regexpEscape(r), false, endPos - pos, false];
+    }
+    const sranges = '[' + (negate ? '^' : '') + rangesToString(ranges) + ']';
+    const snegs = '[' + (negate ? '' : '^') + rangesToString(negs) + ']';
+    const comb = ranges.length && negs.length
+        ? '(' + sranges + '|' + snegs + ')'
+        : ranges.length
+            ? sranges
+            : snegs;
+    return [comb, uflag, endPos - pos, true];
+};
+exports.parseClass = parseClass;
+//# sourceMappingURL=brace-expressions.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/escape.js b/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/escape.js
new file mode 100644
index 0000000000000..02a4f8a8e0a58
--- /dev/null
+++ b/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/escape.js
@@ -0,0 +1,22 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.escape = void 0;
+/**
+ * Escape all magic characters in a glob pattern.
+ *
+ * If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape}
+ * option is used, then characters are escaped by wrapping in `[]`, because
+ * a magic character wrapped in a character class can only be satisfied by
+ * that exact character.  In this mode, `\` is _not_ escaped, because it is
+ * not interpreted as a magic character, but instead as a path separator.
+ */
+const escape = (s, { windowsPathsNoEscape = false, } = {}) => {
+    // don't need to escape +@! because we escape the parens
+    // that make those magic, and escaping ! as [!] isn't valid,
+    // because [!]] is a valid glob class meaning not ']'.
+    return windowsPathsNoEscape
+        ? s.replace(/[?*()[\]]/g, '[$&]')
+        : s.replace(/[?*()[\]\\]/g, '\\$&');
+};
+exports.escape = escape;
+//# sourceMappingURL=escape.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/index.js b/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/index.js
new file mode 100644
index 0000000000000..64a0f1f833222
--- /dev/null
+++ b/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/index.js
@@ -0,0 +1,1017 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.unescape = exports.escape = exports.AST = exports.Minimatch = exports.match = exports.makeRe = exports.braceExpand = exports.defaults = exports.filter = exports.GLOBSTAR = exports.sep = exports.minimatch = void 0;
+const brace_expansion_1 = __importDefault(require("brace-expansion"));
+const assert_valid_pattern_js_1 = require("./assert-valid-pattern.js");
+const ast_js_1 = require("./ast.js");
+const escape_js_1 = require("./escape.js");
+const unescape_js_1 = require("./unescape.js");
+const minimatch = (p, pattern, options = {}) => {
+    (0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
+    // shortcut: comments match nothing.
+    if (!options.nocomment && pattern.charAt(0) === '#') {
+        return false;
+    }
+    return new Minimatch(pattern, options).match(p);
+};
+exports.minimatch = minimatch;
+// Optimized checking for the most common glob patterns.
+const starDotExtRE = /^\*+([^+@!?\*\[\(]*)$/;
+const starDotExtTest = (ext) => (f) => !f.startsWith('.') && f.endsWith(ext);
+const starDotExtTestDot = (ext) => (f) => f.endsWith(ext);
+const starDotExtTestNocase = (ext) => {
+    ext = ext.toLowerCase();
+    return (f) => !f.startsWith('.') && f.toLowerCase().endsWith(ext);
+};
+const starDotExtTestNocaseDot = (ext) => {
+    ext = ext.toLowerCase();
+    return (f) => f.toLowerCase().endsWith(ext);
+};
+const starDotStarRE = /^\*+\.\*+$/;
+const starDotStarTest = (f) => !f.startsWith('.') && f.includes('.');
+const starDotStarTestDot = (f) => f !== '.' && f !== '..' && f.includes('.');
+const dotStarRE = /^\.\*+$/;
+const dotStarTest = (f) => f !== '.' && f !== '..' && f.startsWith('.');
+const starRE = /^\*+$/;
+const starTest = (f) => f.length !== 0 && !f.startsWith('.');
+const starTestDot = (f) => f.length !== 0 && f !== '.' && f !== '..';
+const qmarksRE = /^\?+([^+@!?\*\[\(]*)?$/;
+const qmarksTestNocase = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExt([$0]);
+    if (!ext)
+        return noext;
+    ext = ext.toLowerCase();
+    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
+};
+const qmarksTestNocaseDot = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExtDot([$0]);
+    if (!ext)
+        return noext;
+    ext = ext.toLowerCase();
+    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
+};
+const qmarksTestDot = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExtDot([$0]);
+    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
+};
+const qmarksTest = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExt([$0]);
+    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
+};
+const qmarksTestNoExt = ([$0]) => {
+    const len = $0.length;
+    return (f) => f.length === len && !f.startsWith('.');
+};
+const qmarksTestNoExtDot = ([$0]) => {
+    const len = $0.length;
+    return (f) => f.length === len && f !== '.' && f !== '..';
+};
+/* c8 ignore start */
+const defaultPlatform = (typeof process === 'object' && process
+    ? (typeof process.env === 'object' &&
+        process.env &&
+        process.env.__MINIMATCH_TESTING_PLATFORM__) ||
+        process.platform
+    : 'posix');
+const path = {
+    win32: { sep: '\\' },
+    posix: { sep: '/' },
+};
+/* c8 ignore stop */
+exports.sep = defaultPlatform === 'win32' ? path.win32.sep : path.posix.sep;
+exports.minimatch.sep = exports.sep;
+exports.GLOBSTAR = Symbol('globstar **');
+exports.minimatch.GLOBSTAR = exports.GLOBSTAR;
+// any single thing other than /
+// don't need to escape / when using new RegExp()
+const qmark = '[^/]';
+// * => any number of characters
+const star = qmark + '*?';
+// ** when dots are allowed.  Anything goes, except .. and .
+// not (^ or / followed by one or two dots followed by $ or /),
+// followed by anything, any number of times.
+const twoStarDot = '(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?';
+// not a ^ or / followed by a dot,
+// followed by anything, any number of times.
+const twoStarNoDot = '(?:(?!(?:\\/|^)\\.).)*?';
+const filter = (pattern, options = {}) => (p) => (0, exports.minimatch)(p, pattern, options);
+exports.filter = filter;
+exports.minimatch.filter = exports.filter;
+const ext = (a, b = {}) => Object.assign({}, a, b);
+const defaults = (def) => {
+    if (!def || typeof def !== 'object' || !Object.keys(def).length) {
+        return exports.minimatch;
+    }
+    const orig = exports.minimatch;
+    const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options));
+    return Object.assign(m, {
+        Minimatch: class Minimatch extends orig.Minimatch {
+            constructor(pattern, options = {}) {
+                super(pattern, ext(def, options));
+            }
+            static defaults(options) {
+                return orig.defaults(ext(def, options)).Minimatch;
+            }
+        },
+        AST: class AST extends orig.AST {
+            /* c8 ignore start */
+            constructor(type, parent, options = {}) {
+                super(type, parent, ext(def, options));
+            }
+            /* c8 ignore stop */
+            static fromGlob(pattern, options = {}) {
+                return orig.AST.fromGlob(pattern, ext(def, options));
+            }
+        },
+        unescape: (s, options = {}) => orig.unescape(s, ext(def, options)),
+        escape: (s, options = {}) => orig.escape(s, ext(def, options)),
+        filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)),
+        defaults: (options) => orig.defaults(ext(def, options)),
+        makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)),
+        braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)),
+        match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)),
+        sep: orig.sep,
+        GLOBSTAR: exports.GLOBSTAR,
+    });
+};
+exports.defaults = defaults;
+exports.minimatch.defaults = exports.defaults;
+// Brace expansion:
+// a{b,c}d -> abd acd
+// a{b,}c -> abc ac
+// a{0..3}d -> a0d a1d a2d a3d
+// a{b,c{d,e}f}g -> abg acdfg acefg
+// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
+//
+// Invalid sets are not expanded.
+// a{2..}b -> a{2..}b
+// a{b}c -> a{b}c
+const braceExpand = (pattern, options = {}) => {
+    (0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
+    // Thanks to Yeting Li  for
+    // improving this regexp to avoid a ReDOS vulnerability.
+    if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
+        // shortcut. no need to expand.
+        return [pattern];
+    }
+    return (0, brace_expansion_1.default)(pattern);
+};
+exports.braceExpand = braceExpand;
+exports.minimatch.braceExpand = exports.braceExpand;
+// parse a component of the expanded set.
+// At this point, no pattern may contain "/" in it
+// so we're going to return a 2d array, where each entry is the full
+// pattern, split on '/', and then turned into a regular expression.
+// A regexp is made at the end which joins each array with an
+// escaped /, and another full one which joins each regexp with |.
+//
+// Following the lead of Bash 4.1, note that "**" only has special meaning
+// when it is the *only* thing in a path portion.  Otherwise, any series
+// of * is equivalent to a single *.  Globstar behavior is enabled by
+// default, and can be disabled by setting options.noglobstar.
+const makeRe = (pattern, options = {}) => new Minimatch(pattern, options).makeRe();
+exports.makeRe = makeRe;
+exports.minimatch.makeRe = exports.makeRe;
+const match = (list, pattern, options = {}) => {
+    const mm = new Minimatch(pattern, options);
+    list = list.filter(f => mm.match(f));
+    if (mm.options.nonull && !list.length) {
+        list.push(pattern);
+    }
+    return list;
+};
+exports.match = match;
+exports.minimatch.match = exports.match;
+// replace stuff like \* with *
+const globMagic = /[?*]|[+@!]\(.*?\)|\[|\]/;
+const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+class Minimatch {
+    options;
+    set;
+    pattern;
+    windowsPathsNoEscape;
+    nonegate;
+    negate;
+    comment;
+    empty;
+    preserveMultipleSlashes;
+    partial;
+    globSet;
+    globParts;
+    nocase;
+    isWindows;
+    platform;
+    windowsNoMagicRoot;
+    regexp;
+    constructor(pattern, options = {}) {
+        (0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
+        options = options || {};
+        this.options = options;
+        this.pattern = pattern;
+        this.platform = options.platform || defaultPlatform;
+        this.isWindows = this.platform === 'win32';
+        this.windowsPathsNoEscape =
+            !!options.windowsPathsNoEscape || options.allowWindowsEscape === false;
+        if (this.windowsPathsNoEscape) {
+            this.pattern = this.pattern.replace(/\\/g, '/');
+        }
+        this.preserveMultipleSlashes = !!options.preserveMultipleSlashes;
+        this.regexp = null;
+        this.negate = false;
+        this.nonegate = !!options.nonegate;
+        this.comment = false;
+        this.empty = false;
+        this.partial = !!options.partial;
+        this.nocase = !!this.options.nocase;
+        this.windowsNoMagicRoot =
+            options.windowsNoMagicRoot !== undefined
+                ? options.windowsNoMagicRoot
+                : !!(this.isWindows && this.nocase);
+        this.globSet = [];
+        this.globParts = [];
+        this.set = [];
+        // make the set of regexps etc.
+        this.make();
+    }
+    hasMagic() {
+        if (this.options.magicalBraces && this.set.length > 1) {
+            return true;
+        }
+        for (const pattern of this.set) {
+            for (const part of pattern) {
+                if (typeof part !== 'string')
+                    return true;
+            }
+        }
+        return false;
+    }
+    debug(..._) { }
+    make() {
+        const pattern = this.pattern;
+        const options = this.options;
+        // empty patterns and comments match nothing.
+        if (!options.nocomment && pattern.charAt(0) === '#') {
+            this.comment = true;
+            return;
+        }
+        if (!pattern) {
+            this.empty = true;
+            return;
+        }
+        // step 1: figure out negation, etc.
+        this.parseNegate();
+        // step 2: expand braces
+        this.globSet = [...new Set(this.braceExpand())];
+        if (options.debug) {
+            this.debug = (...args) => console.error(...args);
+        }
+        this.debug(this.pattern, this.globSet);
+        // step 3: now we have a set, so turn each one into a series of
+        // path-portion matching patterns.
+        // These will be regexps, except in the case of "**", which is
+        // set to the GLOBSTAR object for globstar behavior,
+        // and will not contain any / characters
+        //
+        // First, we preprocess to make the glob pattern sets a bit simpler
+        // and deduped.  There are some perf-killing patterns that can cause
+        // problems with a glob walk, but we can simplify them down a bit.
+        const rawGlobParts = this.globSet.map(s => this.slashSplit(s));
+        this.globParts = this.preprocess(rawGlobParts);
+        this.debug(this.pattern, this.globParts);
+        // glob --> regexps
+        let set = this.globParts.map((s, _, __) => {
+            if (this.isWindows && this.windowsNoMagicRoot) {
+                // check if it's a drive or unc path.
+                const isUNC = s[0] === '' &&
+                    s[1] === '' &&
+                    (s[2] === '?' || !globMagic.test(s[2])) &&
+                    !globMagic.test(s[3]);
+                const isDrive = /^[a-z]:/i.test(s[0]);
+                if (isUNC) {
+                    return [...s.slice(0, 4), ...s.slice(4).map(ss => this.parse(ss))];
+                }
+                else if (isDrive) {
+                    return [s[0], ...s.slice(1).map(ss => this.parse(ss))];
+                }
+            }
+            return s.map(ss => this.parse(ss));
+        });
+        this.debug(this.pattern, set);
+        // filter out everything that didn't compile properly.
+        this.set = set.filter(s => s.indexOf(false) === -1);
+        // do not treat the ? in UNC paths as magic
+        if (this.isWindows) {
+            for (let i = 0; i < this.set.length; i++) {
+                const p = this.set[i];
+                if (p[0] === '' &&
+                    p[1] === '' &&
+                    this.globParts[i][2] === '?' &&
+                    typeof p[3] === 'string' &&
+                    /^[a-z]:$/i.test(p[3])) {
+                    p[2] = '?';
+                }
+            }
+        }
+        this.debug(this.pattern, this.set);
+    }
+    // various transforms to equivalent pattern sets that are
+    // faster to process in a filesystem walk.  The goal is to
+    // eliminate what we can, and push all ** patterns as far
+    // to the right as possible, even if it increases the number
+    // of patterns that we have to process.
+    preprocess(globParts) {
+        // if we're not in globstar mode, then turn all ** into *
+        if (this.options.noglobstar) {
+            for (let i = 0; i < globParts.length; i++) {
+                for (let j = 0; j < globParts[i].length; j++) {
+                    if (globParts[i][j] === '**') {
+                        globParts[i][j] = '*';
+                    }
+                }
+            }
+        }
+        const { optimizationLevel = 1 } = this.options;
+        if (optimizationLevel >= 2) {
+            // aggressive optimization for the purpose of fs walking
+            globParts = this.firstPhasePreProcess(globParts);
+            globParts = this.secondPhasePreProcess(globParts);
+        }
+        else if (optimizationLevel >= 1) {
+            // just basic optimizations to remove some .. parts
+            globParts = this.levelOneOptimize(globParts);
+        }
+        else {
+            // just collapse multiple ** portions into one
+            globParts = this.adjascentGlobstarOptimize(globParts);
+        }
+        return globParts;
+    }
+    // just get rid of adjascent ** portions
+    adjascentGlobstarOptimize(globParts) {
+        return globParts.map(parts => {
+            let gs = -1;
+            while (-1 !== (gs = parts.indexOf('**', gs + 1))) {
+                let i = gs;
+                while (parts[i + 1] === '**') {
+                    i++;
+                }
+                if (i !== gs) {
+                    parts.splice(gs, i - gs);
+                }
+            }
+            return parts;
+        });
+    }
+    // get rid of adjascent ** and resolve .. portions
+    levelOneOptimize(globParts) {
+        return globParts.map(parts => {
+            parts = parts.reduce((set, part) => {
+                const prev = set[set.length - 1];
+                if (part === '**' && prev === '**') {
+                    return set;
+                }
+                if (part === '..') {
+                    if (prev && prev !== '..' && prev !== '.' && prev !== '**') {
+                        set.pop();
+                        return set;
+                    }
+                }
+                set.push(part);
+                return set;
+            }, []);
+            return parts.length === 0 ? [''] : parts;
+        });
+    }
+    levelTwoFileOptimize(parts) {
+        if (!Array.isArray(parts)) {
+            parts = this.slashSplit(parts);
+        }
+        let didSomething = false;
+        do {
+            didSomething = false;
+            // 
// -> 
/
+            if (!this.preserveMultipleSlashes) {
+                for (let i = 1; i < parts.length - 1; i++) {
+                    const p = parts[i];
+                    // don't squeeze out UNC patterns
+                    if (i === 1 && p === '' && parts[0] === '')
+                        continue;
+                    if (p === '.' || p === '') {
+                        didSomething = true;
+                        parts.splice(i, 1);
+                        i--;
+                    }
+                }
+                if (parts[0] === '.' &&
+                    parts.length === 2 &&
+                    (parts[1] === '.' || parts[1] === '')) {
+                    didSomething = true;
+                    parts.pop();
+                }
+            }
+            // 
/

/../ ->

/
+            let dd = 0;
+            while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                const p = parts[dd - 1];
+                if (p && p !== '.' && p !== '..' && p !== '**') {
+                    didSomething = true;
+                    parts.splice(dd - 1, 2);
+                    dd -= 2;
+                }
+            }
+        } while (didSomething);
+        return parts.length === 0 ? [''] : parts;
+    }
+    // First phase: single-pattern processing
+    // 
 is 1 or more portions
+    //  is 1 or more portions
+    // 

is any portion other than ., .., '', or ** + // is . or '' + // + // **/.. is *brutal* for filesystem walking performance, because + // it effectively resets the recursive walk each time it occurs, + // and ** cannot be reduced out by a .. pattern part like a regexp + // or most strings (other than .., ., and '') can be. + // + //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + //

// -> 
/
+    // 
/

/../ ->

/
+    // **/**/ -> **/
+    //
+    // **/*/ -> */**/ <== not valid because ** doesn't follow
+    // this WOULD be allowed if ** did follow symlinks, or * didn't
+    firstPhasePreProcess(globParts) {
+        let didSomething = false;
+        do {
+            didSomething = false;
+            // 
/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + for (let parts of globParts) { + let gs = -1; + while (-1 !== (gs = parts.indexOf('**', gs + 1))) { + let gss = gs; + while (parts[gss + 1] === '**') { + //

/**/**/ -> 
/**/
+                        gss++;
+                    }
+                    // eg, if gs is 2 and gss is 4, that means we have 3 **
+                    // parts, and can remove 2 of them.
+                    if (gss > gs) {
+                        parts.splice(gs + 1, gss - gs);
+                    }
+                    let next = parts[gs + 1];
+                    const p = parts[gs + 2];
+                    const p2 = parts[gs + 3];
+                    if (next !== '..')
+                        continue;
+                    if (!p ||
+                        p === '.' ||
+                        p === '..' ||
+                        !p2 ||
+                        p2 === '.' ||
+                        p2 === '..') {
+                        continue;
+                    }
+                    didSomething = true;
+                    // edit parts in place, and push the new one
+                    parts.splice(gs, 1);
+                    const other = parts.slice(0);
+                    other[gs] = '**';
+                    globParts.push(other);
+                    gs--;
+                }
+                // 
// -> 
/
+                if (!this.preserveMultipleSlashes) {
+                    for (let i = 1; i < parts.length - 1; i++) {
+                        const p = parts[i];
+                        // don't squeeze out UNC patterns
+                        if (i === 1 && p === '' && parts[0] === '')
+                            continue;
+                        if (p === '.' || p === '') {
+                            didSomething = true;
+                            parts.splice(i, 1);
+                            i--;
+                        }
+                    }
+                    if (parts[0] === '.' &&
+                        parts.length === 2 &&
+                        (parts[1] === '.' || parts[1] === '')) {
+                        didSomething = true;
+                        parts.pop();
+                    }
+                }
+                // 
/

/../ ->

/
+                let dd = 0;
+                while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                    const p = parts[dd - 1];
+                    if (p && p !== '.' && p !== '..' && p !== '**') {
+                        didSomething = true;
+                        const needDot = dd === 1 && parts[dd + 1] === '**';
+                        const splin = needDot ? ['.'] : [];
+                        parts.splice(dd - 1, 2, ...splin);
+                        if (parts.length === 0)
+                            parts.push('');
+                        dd -= 2;
+                    }
+                }
+            }
+        } while (didSomething);
+        return globParts;
+    }
+    // second phase: multi-pattern dedupes
+    // {
/*/,
/

/} ->

/*/
+    // {
/,
/} -> 
/
+    // {
/**/,
/} -> 
/**/
+    //
+    // {
/**/,
/**/

/} ->

/**/
+    // ^-- not valid because ** doens't follow symlinks
+    secondPhasePreProcess(globParts) {
+        for (let i = 0; i < globParts.length - 1; i++) {
+            for (let j = i + 1; j < globParts.length; j++) {
+                const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);
+                if (matched) {
+                    globParts[i] = [];
+                    globParts[j] = matched;
+                    break;
+                }
+            }
+        }
+        return globParts.filter(gs => gs.length);
+    }
+    partsMatch(a, b, emptyGSMatch = false) {
+        let ai = 0;
+        let bi = 0;
+        let result = [];
+        let which = '';
+        while (ai < a.length && bi < b.length) {
+            if (a[ai] === b[bi]) {
+                result.push(which === 'b' ? b[bi] : a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (emptyGSMatch && a[ai] === '**' && b[bi] === a[ai + 1]) {
+                result.push(a[ai]);
+                ai++;
+            }
+            else if (emptyGSMatch && b[bi] === '**' && a[ai] === b[bi + 1]) {
+                result.push(b[bi]);
+                bi++;
+            }
+            else if (a[ai] === '*' &&
+                b[bi] &&
+                (this.options.dot || !b[bi].startsWith('.')) &&
+                b[bi] !== '**') {
+                if (which === 'b')
+                    return false;
+                which = 'a';
+                result.push(a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (b[bi] === '*' &&
+                a[ai] &&
+                (this.options.dot || !a[ai].startsWith('.')) &&
+                a[ai] !== '**') {
+                if (which === 'a')
+                    return false;
+                which = 'b';
+                result.push(b[bi]);
+                ai++;
+                bi++;
+            }
+            else {
+                return false;
+            }
+        }
+        // if we fall out of the loop, it means they two are identical
+        // as long as their lengths match
+        return a.length === b.length && result;
+    }
+    parseNegate() {
+        if (this.nonegate)
+            return;
+        const pattern = this.pattern;
+        let negate = false;
+        let negateOffset = 0;
+        for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {
+            negate = !negate;
+            negateOffset++;
+        }
+        if (negateOffset)
+            this.pattern = pattern.slice(negateOffset);
+        this.negate = negate;
+    }
+    // set partial to true to test if, for example,
+    // "/a/b" matches the start of "/*/b/*/d"
+    // Partial means, if you run out of file before you run
+    // out of pattern, then that's fine, as long as all
+    // the parts match.
+    matchOne(file, pattern, partial = false) {
+        const options = this.options;
+        // UNC paths like //?/X:/... can match X:/... and vice versa
+        // Drive letters in absolute drive or unc paths are always compared
+        // case-insensitively.
+        if (this.isWindows) {
+            const fileDrive = typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0]);
+            const fileUNC = !fileDrive &&
+                file[0] === '' &&
+                file[1] === '' &&
+                file[2] === '?' &&
+                /^[a-z]:$/i.test(file[3]);
+            const patternDrive = typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0]);
+            const patternUNC = !patternDrive &&
+                pattern[0] === '' &&
+                pattern[1] === '' &&
+                pattern[2] === '?' &&
+                typeof pattern[3] === 'string' &&
+                /^[a-z]:$/i.test(pattern[3]);
+            const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined;
+            const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined;
+            if (typeof fdi === 'number' && typeof pdi === 'number') {
+                const [fd, pd] = [file[fdi], pattern[pdi]];
+                if (fd.toLowerCase() === pd.toLowerCase()) {
+                    pattern[pdi] = fd;
+                    if (pdi > fdi) {
+                        pattern = pattern.slice(pdi);
+                    }
+                    else if (fdi > pdi) {
+                        file = file.slice(fdi);
+                    }
+                }
+            }
+        }
+        // resolve and reduce . and .. portions in the file as well.
+        // dont' need to do the second phase, because it's only one string[]
+        const { optimizationLevel = 1 } = this.options;
+        if (optimizationLevel >= 2) {
+            file = this.levelTwoFileOptimize(file);
+        }
+        this.debug('matchOne', this, { file, pattern });
+        this.debug('matchOne', file.length, pattern.length);
+        for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
+            this.debug('matchOne loop');
+            var p = pattern[pi];
+            var f = file[fi];
+            this.debug(pattern, p, f);
+            // should be impossible.
+            // some invalid regexp stuff in the set.
+            /* c8 ignore start */
+            if (p === false) {
+                return false;
+            }
+            /* c8 ignore stop */
+            if (p === exports.GLOBSTAR) {
+                this.debug('GLOBSTAR', [pattern, p, f]);
+                // "**"
+                // a/**/b/**/c would match the following:
+                // a/b/x/y/z/c
+                // a/x/y/z/b/c
+                // a/b/x/b/x/c
+                // a/b/c
+                // To do this, take the rest of the pattern after
+                // the **, and see if it would match the file remainder.
+                // If so, return success.
+                // If not, the ** "swallows" a segment, and try again.
+                // This is recursively awful.
+                //
+                // a/**/b/**/c matching a/b/x/y/z/c
+                // - a matches a
+                // - doublestar
+                //   - matchOne(b/x/y/z/c, b/**/c)
+                //     - b matches b
+                //     - doublestar
+                //       - matchOne(x/y/z/c, c) -> no
+                //       - matchOne(y/z/c, c) -> no
+                //       - matchOne(z/c, c) -> no
+                //       - matchOne(c, c) yes, hit
+                var fr = fi;
+                var pr = pi + 1;
+                if (pr === pl) {
+                    this.debug('** at the end');
+                    // a ** at the end will just swallow the rest.
+                    // We have found a match.
+                    // however, it will not swallow /.x, unless
+                    // options.dot is set.
+                    // . and .. are *never* matched by **, for explosively
+                    // exponential reasons.
+                    for (; fi < fl; fi++) {
+                        if (file[fi] === '.' ||
+                            file[fi] === '..' ||
+                            (!options.dot && file[fi].charAt(0) === '.'))
+                            return false;
+                    }
+                    return true;
+                }
+                // ok, let's see if we can swallow whatever we can.
+                while (fr < fl) {
+                    var swallowee = file[fr];
+                    this.debug('\nglobstar while', file, fr, pattern, pr, swallowee);
+                    // XXX remove this slice.  Just pass the start index.
+                    if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
+                        this.debug('globstar found match!', fr, fl, swallowee);
+                        // found a match.
+                        return true;
+                    }
+                    else {
+                        // can't swallow "." or ".." ever.
+                        // can only swallow ".foo" when explicitly asked.
+                        if (swallowee === '.' ||
+                            swallowee === '..' ||
+                            (!options.dot && swallowee.charAt(0) === '.')) {
+                            this.debug('dot detected!', file, fr, pattern, pr);
+                            break;
+                        }
+                        // ** swallows a segment, and continue.
+                        this.debug('globstar swallow a segment, and continue');
+                        fr++;
+                    }
+                }
+                // no match was found.
+                // However, in partial mode, we can't say this is necessarily over.
+                /* c8 ignore start */
+                if (partial) {
+                    // ran out of file
+                    this.debug('\n>>> no match, partial?', file, fr, pattern, pr);
+                    if (fr === fl) {
+                        return true;
+                    }
+                }
+                /* c8 ignore stop */
+                return false;
+            }
+            // something other than **
+            // non-magic patterns just have to match exactly
+            // patterns with magic have been turned into regexps.
+            let hit;
+            if (typeof p === 'string') {
+                hit = f === p;
+                this.debug('string match', p, f, hit);
+            }
+            else {
+                hit = p.test(f);
+                this.debug('pattern match', p, f, hit);
+            }
+            if (!hit)
+                return false;
+        }
+        // Note: ending in / means that we'll get a final ""
+        // at the end of the pattern.  This can only match a
+        // corresponding "" at the end of the file.
+        // If the file ends in /, then it can only match a
+        // a pattern that ends in /, unless the pattern just
+        // doesn't have any more for it. But, a/b/ should *not*
+        // match "a/b/*", even though "" matches against the
+        // [^/]*? pattern, except in partial mode, where it might
+        // simply not be reached yet.
+        // However, a/b/ should still satisfy a/*
+        // now either we fell off the end of the pattern, or we're done.
+        if (fi === fl && pi === pl) {
+            // ran out of pattern and filename at the same time.
+            // an exact hit!
+            return true;
+        }
+        else if (fi === fl) {
+            // ran out of file, but still had pattern left.
+            // this is ok if we're doing the match as part of
+            // a glob fs traversal.
+            return partial;
+        }
+        else if (pi === pl) {
+            // ran out of pattern, still have file left.
+            // this is only acceptable if we're on the very last
+            // empty segment of a file with a trailing slash.
+            // a/* should match a/b/
+            return fi === fl - 1 && file[fi] === '';
+            /* c8 ignore start */
+        }
+        else {
+            // should be unreachable.
+            throw new Error('wtf?');
+        }
+        /* c8 ignore stop */
+    }
+    braceExpand() {
+        return (0, exports.braceExpand)(this.pattern, this.options);
+    }
+    parse(pattern) {
+        (0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
+        const options = this.options;
+        // shortcuts
+        if (pattern === '**')
+            return exports.GLOBSTAR;
+        if (pattern === '')
+            return '';
+        // far and away, the most common glob pattern parts are
+        // *, *.*, and *.  Add a fast check method for those.
+        let m;
+        let fastTest = null;
+        if ((m = pattern.match(starRE))) {
+            fastTest = options.dot ? starTestDot : starTest;
+        }
+        else if ((m = pattern.match(starDotExtRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? starDotExtTestNocaseDot
+                    : starDotExtTestNocase
+                : options.dot
+                    ? starDotExtTestDot
+                    : starDotExtTest)(m[1]);
+        }
+        else if ((m = pattern.match(qmarksRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? qmarksTestNocaseDot
+                    : qmarksTestNocase
+                : options.dot
+                    ? qmarksTestDot
+                    : qmarksTest)(m);
+        }
+        else if ((m = pattern.match(starDotStarRE))) {
+            fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
+        }
+        else if ((m = pattern.match(dotStarRE))) {
+            fastTest = dotStarTest;
+        }
+        const re = ast_js_1.AST.fromGlob(pattern, this.options).toMMPattern();
+        if (fastTest && typeof re === 'object') {
+            // Avoids overriding in frozen environments
+            Reflect.defineProperty(re, 'test', { value: fastTest });
+        }
+        return re;
+    }
+    makeRe() {
+        if (this.regexp || this.regexp === false)
+            return this.regexp;
+        // at this point, this.set is a 2d array of partial
+        // pattern strings, or "**".
+        //
+        // It's better to use .match().  This function shouldn't
+        // be used, really, but it's pretty convenient sometimes,
+        // when you just want to work with a regex.
+        const set = this.set;
+        if (!set.length) {
+            this.regexp = false;
+            return this.regexp;
+        }
+        const options = this.options;
+        const twoStar = options.noglobstar
+            ? star
+            : options.dot
+                ? twoStarDot
+                : twoStarNoDot;
+        const flags = new Set(options.nocase ? ['i'] : []);
+        // regexpify non-globstar patterns
+        // if ** is only item, then we just do one twoStar
+        // if ** is first, and there are more, prepend (\/|twoStar\/)? to next
+        // if ** is last, append (\/twoStar|) to previous
+        // if ** is in the middle, append (\/|\/twoStar\/) to previous
+        // then filter out GLOBSTAR symbols
+        let re = set
+            .map(pattern => {
+            const pp = pattern.map(p => {
+                if (p instanceof RegExp) {
+                    for (const f of p.flags.split(''))
+                        flags.add(f);
+                }
+                return typeof p === 'string'
+                    ? regExpEscape(p)
+                    : p === exports.GLOBSTAR
+                        ? exports.GLOBSTAR
+                        : p._src;
+            });
+            pp.forEach((p, i) => {
+                const next = pp[i + 1];
+                const prev = pp[i - 1];
+                if (p !== exports.GLOBSTAR || prev === exports.GLOBSTAR) {
+                    return;
+                }
+                if (prev === undefined) {
+                    if (next !== undefined && next !== exports.GLOBSTAR) {
+                        pp[i + 1] = '(?:\\/|' + twoStar + '\\/)?' + next;
+                    }
+                    else {
+                        pp[i] = twoStar;
+                    }
+                }
+                else if (next === undefined) {
+                    pp[i - 1] = prev + '(?:\\/|' + twoStar + ')?';
+                }
+                else if (next !== exports.GLOBSTAR) {
+                    pp[i - 1] = prev + '(?:\\/|\\/' + twoStar + '\\/)' + next;
+                    pp[i + 1] = exports.GLOBSTAR;
+                }
+            });
+            return pp.filter(p => p !== exports.GLOBSTAR).join('/');
+        })
+            .join('|');
+        // need to wrap in parens if we had more than one thing with |,
+        // otherwise only the first will be anchored to ^ and the last to $
+        const [open, close] = set.length > 1 ? ['(?:', ')'] : ['', ''];
+        // must match entire pattern
+        // ending in a * or ** will make it less strict.
+        re = '^' + open + re + close + '$';
+        // can match anything, as long as it's not this.
+        if (this.negate)
+            re = '^(?!' + re + ').+$';
+        try {
+            this.regexp = new RegExp(re, [...flags].join(''));
+            /* c8 ignore start */
+        }
+        catch (ex) {
+            // should be impossible
+            this.regexp = false;
+        }
+        /* c8 ignore stop */
+        return this.regexp;
+    }
+    slashSplit(p) {
+        // if p starts with // on windows, we preserve that
+        // so that UNC paths aren't broken.  Otherwise, any number of
+        // / characters are coalesced into one, unless
+        // preserveMultipleSlashes is set to true.
+        if (this.preserveMultipleSlashes) {
+            return p.split('/');
+        }
+        else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
+            // add an extra '' for the one we lose
+            return ['', ...p.split(/\/+/)];
+        }
+        else {
+            return p.split(/\/+/);
+        }
+    }
+    match(f, partial = this.partial) {
+        this.debug('match', f, this.pattern);
+        // short-circuit in the case of busted things.
+        // comments, etc.
+        if (this.comment) {
+            return false;
+        }
+        if (this.empty) {
+            return f === '';
+        }
+        if (f === '/' && partial) {
+            return true;
+        }
+        const options = this.options;
+        // windows: need to use /, not \
+        if (this.isWindows) {
+            f = f.split('\\').join('/');
+        }
+        // treat the test path as a set of pathparts.
+        const ff = this.slashSplit(f);
+        this.debug(this.pattern, 'split', ff);
+        // just ONE of the pattern sets in this.set needs to match
+        // in order for it to be valid.  If negating, then just one
+        // match means that we have failed.
+        // Either way, return on the first hit.
+        const set = this.set;
+        this.debug(this.pattern, 'set', set);
+        // Find the basename of the path by looking for the last non-empty segment
+        let filename = ff[ff.length - 1];
+        if (!filename) {
+            for (let i = ff.length - 2; !filename && i >= 0; i--) {
+                filename = ff[i];
+            }
+        }
+        for (let i = 0; i < set.length; i++) {
+            const pattern = set[i];
+            let file = ff;
+            if (options.matchBase && pattern.length === 1) {
+                file = [filename];
+            }
+            const hit = this.matchOne(file, pattern, partial);
+            if (hit) {
+                if (options.flipNegate) {
+                    return true;
+                }
+                return !this.negate;
+            }
+        }
+        // didn't get any hits.  this is success if it's a negative
+        // pattern, failure otherwise.
+        if (options.flipNegate) {
+            return false;
+        }
+        return this.negate;
+    }
+    static defaults(def) {
+        return exports.minimatch.defaults(def).Minimatch;
+    }
+}
+exports.Minimatch = Minimatch;
+/* c8 ignore start */
+var ast_js_2 = require("./ast.js");
+Object.defineProperty(exports, "AST", { enumerable: true, get: function () { return ast_js_2.AST; } });
+var escape_js_2 = require("./escape.js");
+Object.defineProperty(exports, "escape", { enumerable: true, get: function () { return escape_js_2.escape; } });
+var unescape_js_2 = require("./unescape.js");
+Object.defineProperty(exports, "unescape", { enumerable: true, get: function () { return unescape_js_2.unescape; } });
+/* c8 ignore stop */
+exports.minimatch.AST = ast_js_1.AST;
+exports.minimatch.Minimatch = Minimatch;
+exports.minimatch.escape = escape_js_1.escape;
+exports.minimatch.unescape = unescape_js_1.unescape;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/package.json b/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/package.json
new file mode 100644
index 0000000000000..5bbefffbabee3
--- /dev/null
+++ b/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/unescape.js b/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/unescape.js
new file mode 100644
index 0000000000000..47c36bcee5a02
--- /dev/null
+++ b/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/unescape.js
@@ -0,0 +1,24 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.unescape = void 0;
+/**
+ * Un-escape a string that has been escaped with {@link escape}.
+ *
+ * If the {@link windowsPathsNoEscape} option is used, then square-brace
+ * escapes are removed, but not backslash escapes.  For example, it will turn
+ * the string `'[*]'` into `*`, but it will not turn `'\\*'` into `'*'`,
+ * becuase `\` is a path separator in `windowsPathsNoEscape` mode.
+ *
+ * When `windowsPathsNoEscape` is not set, then both brace escapes and
+ * backslash escapes are removed.
+ *
+ * Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot be escaped
+ * or unescaped.
+ */
+const unescape = (s, { windowsPathsNoEscape = false, } = {}) => {
+    return windowsPathsNoEscape
+        ? s.replace(/\[([^\/\\])\]/g, '$1')
+        : s.replace(/((?!\\).|^)\[([^\/\\])\]/g, '$1$2').replace(/\\([^\/])/g, '$1');
+};
+exports.unescape = unescape;
+//# sourceMappingURL=unescape.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/assert-valid-pattern.js b/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/assert-valid-pattern.js
new file mode 100644
index 0000000000000..7b534fc30200b
--- /dev/null
+++ b/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/assert-valid-pattern.js
@@ -0,0 +1,10 @@
+const MAX_PATTERN_LENGTH = 1024 * 64;
+export const assertValidPattern = (pattern) => {
+    if (typeof pattern !== 'string') {
+        throw new TypeError('invalid pattern');
+    }
+    if (pattern.length > MAX_PATTERN_LENGTH) {
+        throw new TypeError('pattern is too long');
+    }
+};
+//# sourceMappingURL=assert-valid-pattern.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/ast.js b/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/ast.js
new file mode 100644
index 0000000000000..2d2bced6533de
--- /dev/null
+++ b/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/ast.js
@@ -0,0 +1,588 @@
+// parse a single path portion
+import { parseClass } from './brace-expressions.js';
+import { unescape } from './unescape.js';
+const types = new Set(['!', '?', '+', '*', '@']);
+const isExtglobType = (c) => types.has(c);
+// Patterns that get prepended to bind to the start of either the
+// entire string, or just a single path portion, to prevent dots
+// and/or traversal patterns, when needed.
+// Exts don't need the ^ or / bit, because the root binds that already.
+const startNoTraversal = '(?!(?:^|/)\\.\\.?(?:$|/))';
+const startNoDot = '(?!\\.)';
+// characters that indicate a start of pattern needs the "no dots" bit,
+// because a dot *might* be matched. ( is not in the list, because in
+// the case of a child extglob, it will handle the prevention itself.
+const addPatternStart = new Set(['[', '.']);
+// cases where traversal is A-OK, no dot prevention needed
+const justDots = new Set(['..', '.']);
+const reSpecials = new Set('().*{}+?[]^$\\!');
+const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+// any single thing other than /
+const qmark = '[^/]';
+// * => any number of characters
+const star = qmark + '*?';
+// use + when we need to ensure that *something* matches, because the * is
+// the only thing in the path portion.
+const starNoEmpty = qmark + '+?';
+// remove the \ chars that we added if we end up doing a nonmagic compare
+// const deslash = (s: string) => s.replace(/\\(.)/g, '$1')
+export class AST {
+    type;
+    #root;
+    #hasMagic;
+    #uflag = false;
+    #parts = [];
+    #parent;
+    #parentIndex;
+    #negs;
+    #filledNegs = false;
+    #options;
+    #toString;
+    // set to true if it's an extglob with no children
+    // (which really means one child of '')
+    #emptyExt = false;
+    constructor(type, parent, options = {}) {
+        this.type = type;
+        // extglobs are inherently magical
+        if (type)
+            this.#hasMagic = true;
+        this.#parent = parent;
+        this.#root = this.#parent ? this.#parent.#root : this;
+        this.#options = this.#root === this ? options : this.#root.#options;
+        this.#negs = this.#root === this ? [] : this.#root.#negs;
+        if (type === '!' && !this.#root.#filledNegs)
+            this.#negs.push(this);
+        this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0;
+    }
+    get hasMagic() {
+        /* c8 ignore start */
+        if (this.#hasMagic !== undefined)
+            return this.#hasMagic;
+        /* c8 ignore stop */
+        for (const p of this.#parts) {
+            if (typeof p === 'string')
+                continue;
+            if (p.type || p.hasMagic)
+                return (this.#hasMagic = true);
+        }
+        // note: will be undefined until we generate the regexp src and find out
+        return this.#hasMagic;
+    }
+    // reconstructs the pattern
+    toString() {
+        if (this.#toString !== undefined)
+            return this.#toString;
+        if (!this.type) {
+            return (this.#toString = this.#parts.map(p => String(p)).join(''));
+        }
+        else {
+            return (this.#toString =
+                this.type + '(' + this.#parts.map(p => String(p)).join('|') + ')');
+        }
+    }
+    #fillNegs() {
+        /* c8 ignore start */
+        if (this !== this.#root)
+            throw new Error('should only call on root');
+        if (this.#filledNegs)
+            return this;
+        /* c8 ignore stop */
+        // call toString() once to fill this out
+        this.toString();
+        this.#filledNegs = true;
+        let n;
+        while ((n = this.#negs.pop())) {
+            if (n.type !== '!')
+                continue;
+            // walk up the tree, appending everthing that comes AFTER parentIndex
+            let p = n;
+            let pp = p.#parent;
+            while (pp) {
+                for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) {
+                    for (const part of n.#parts) {
+                        /* c8 ignore start */
+                        if (typeof part === 'string') {
+                            throw new Error('string part in extglob AST??');
+                        }
+                        /* c8 ignore stop */
+                        part.copyIn(pp.#parts[i]);
+                    }
+                }
+                p = pp;
+                pp = p.#parent;
+            }
+        }
+        return this;
+    }
+    push(...parts) {
+        for (const p of parts) {
+            if (p === '')
+                continue;
+            /* c8 ignore start */
+            if (typeof p !== 'string' && !(p instanceof AST && p.#parent === this)) {
+                throw new Error('invalid part: ' + p);
+            }
+            /* c8 ignore stop */
+            this.#parts.push(p);
+        }
+    }
+    toJSON() {
+        const ret = this.type === null
+            ? this.#parts.slice().map(p => (typeof p === 'string' ? p : p.toJSON()))
+            : [this.type, ...this.#parts.map(p => p.toJSON())];
+        if (this.isStart() && !this.type)
+            ret.unshift([]);
+        if (this.isEnd() &&
+            (this === this.#root ||
+                (this.#root.#filledNegs && this.#parent?.type === '!'))) {
+            ret.push({});
+        }
+        return ret;
+    }
+    isStart() {
+        if (this.#root === this)
+            return true;
+        // if (this.type) return !!this.#parent?.isStart()
+        if (!this.#parent?.isStart())
+            return false;
+        if (this.#parentIndex === 0)
+            return true;
+        // if everything AHEAD of this is a negation, then it's still the "start"
+        const p = this.#parent;
+        for (let i = 0; i < this.#parentIndex; i++) {
+            const pp = p.#parts[i];
+            if (!(pp instanceof AST && pp.type === '!')) {
+                return false;
+            }
+        }
+        return true;
+    }
+    isEnd() {
+        if (this.#root === this)
+            return true;
+        if (this.#parent?.type === '!')
+            return true;
+        if (!this.#parent?.isEnd())
+            return false;
+        if (!this.type)
+            return this.#parent?.isEnd();
+        // if not root, it'll always have a parent
+        /* c8 ignore start */
+        const pl = this.#parent ? this.#parent.#parts.length : 0;
+        /* c8 ignore stop */
+        return this.#parentIndex === pl - 1;
+    }
+    copyIn(part) {
+        if (typeof part === 'string')
+            this.push(part);
+        else
+            this.push(part.clone(this));
+    }
+    clone(parent) {
+        const c = new AST(this.type, parent);
+        for (const p of this.#parts) {
+            c.copyIn(p);
+        }
+        return c;
+    }
+    static #parseAST(str, ast, pos, opt) {
+        let escaping = false;
+        let inBrace = false;
+        let braceStart = -1;
+        let braceNeg = false;
+        if (ast.type === null) {
+            // outside of a extglob, append until we find a start
+            let i = pos;
+            let acc = '';
+            while (i < str.length) {
+                const c = str.charAt(i++);
+                // still accumulate escapes at this point, but we do ignore
+                // starts that are escaped
+                if (escaping || c === '\\') {
+                    escaping = !escaping;
+                    acc += c;
+                    continue;
+                }
+                if (inBrace) {
+                    if (i === braceStart + 1) {
+                        if (c === '^' || c === '!') {
+                            braceNeg = true;
+                        }
+                    }
+                    else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
+                        inBrace = false;
+                    }
+                    acc += c;
+                    continue;
+                }
+                else if (c === '[') {
+                    inBrace = true;
+                    braceStart = i;
+                    braceNeg = false;
+                    acc += c;
+                    continue;
+                }
+                if (!opt.noext && isExtglobType(c) && str.charAt(i) === '(') {
+                    ast.push(acc);
+                    acc = '';
+                    const ext = new AST(c, ast);
+                    i = AST.#parseAST(str, ext, i, opt);
+                    ast.push(ext);
+                    continue;
+                }
+                acc += c;
+            }
+            ast.push(acc);
+            return i;
+        }
+        // some kind of extglob, pos is at the (
+        // find the next | or )
+        let i = pos + 1;
+        let part = new AST(null, ast);
+        const parts = [];
+        let acc = '';
+        while (i < str.length) {
+            const c = str.charAt(i++);
+            // still accumulate escapes at this point, but we do ignore
+            // starts that are escaped
+            if (escaping || c === '\\') {
+                escaping = !escaping;
+                acc += c;
+                continue;
+            }
+            if (inBrace) {
+                if (i === braceStart + 1) {
+                    if (c === '^' || c === '!') {
+                        braceNeg = true;
+                    }
+                }
+                else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
+                    inBrace = false;
+                }
+                acc += c;
+                continue;
+            }
+            else if (c === '[') {
+                inBrace = true;
+                braceStart = i;
+                braceNeg = false;
+                acc += c;
+                continue;
+            }
+            if (isExtglobType(c) && str.charAt(i) === '(') {
+                part.push(acc);
+                acc = '';
+                const ext = new AST(c, part);
+                part.push(ext);
+                i = AST.#parseAST(str, ext, i, opt);
+                continue;
+            }
+            if (c === '|') {
+                part.push(acc);
+                acc = '';
+                parts.push(part);
+                part = new AST(null, ast);
+                continue;
+            }
+            if (c === ')') {
+                if (acc === '' && ast.#parts.length === 0) {
+                    ast.#emptyExt = true;
+                }
+                part.push(acc);
+                acc = '';
+                ast.push(...parts, part);
+                return i;
+            }
+            acc += c;
+        }
+        // unfinished extglob
+        // if we got here, it was a malformed extglob! not an extglob, but
+        // maybe something else in there.
+        ast.type = null;
+        ast.#hasMagic = undefined;
+        ast.#parts = [str.substring(pos - 1)];
+        return i;
+    }
+    static fromGlob(pattern, options = {}) {
+        const ast = new AST(null, undefined, options);
+        AST.#parseAST(pattern, ast, 0, options);
+        return ast;
+    }
+    // returns the regular expression if there's magic, or the unescaped
+    // string if not.
+    toMMPattern() {
+        // should only be called on root
+        /* c8 ignore start */
+        if (this !== this.#root)
+            return this.#root.toMMPattern();
+        /* c8 ignore stop */
+        const glob = this.toString();
+        const [re, body, hasMagic, uflag] = this.toRegExpSource();
+        // if we're in nocase mode, and not nocaseMagicOnly, then we do
+        // still need a regular expression if we have to case-insensitively
+        // match capital/lowercase characters.
+        const anyMagic = hasMagic ||
+            this.#hasMagic ||
+            (this.#options.nocase &&
+                !this.#options.nocaseMagicOnly &&
+                glob.toUpperCase() !== glob.toLowerCase());
+        if (!anyMagic) {
+            return body;
+        }
+        const flags = (this.#options.nocase ? 'i' : '') + (uflag ? 'u' : '');
+        return Object.assign(new RegExp(`^${re}$`, flags), {
+            _src: re,
+            _glob: glob,
+        });
+    }
+    get options() {
+        return this.#options;
+    }
+    // returns the string match, the regexp source, whether there's magic
+    // in the regexp (so a regular expression is required) and whether or
+    // not the uflag is needed for the regular expression (for posix classes)
+    // TODO: instead of injecting the start/end at this point, just return
+    // the BODY of the regexp, along with the start/end portions suitable
+    // for binding the start/end in either a joined full-path makeRe context
+    // (where we bind to (^|/), or a standalone matchPart context (where
+    // we bind to ^, and not /).  Otherwise slashes get duped!
+    //
+    // In part-matching mode, the start is:
+    // - if not isStart: nothing
+    // - if traversal possible, but not allowed: ^(?!\.\.?$)
+    // - if dots allowed or not possible: ^
+    // - if dots possible and not allowed: ^(?!\.)
+    // end is:
+    // - if not isEnd(): nothing
+    // - else: $
+    //
+    // In full-path matching mode, we put the slash at the START of the
+    // pattern, so start is:
+    // - if first pattern: same as part-matching mode
+    // - if not isStart(): nothing
+    // - if traversal possible, but not allowed: /(?!\.\.?(?:$|/))
+    // - if dots allowed or not possible: /
+    // - if dots possible and not allowed: /(?!\.)
+    // end is:
+    // - if last pattern, same as part-matching mode
+    // - else nothing
+    //
+    // Always put the (?:$|/) on negated tails, though, because that has to be
+    // there to bind the end of the negated pattern portion, and it's easier to
+    // just stick it in now rather than try to inject it later in the middle of
+    // the pattern.
+    //
+    // We can just always return the same end, and leave it up to the caller
+    // to know whether it's going to be used joined or in parts.
+    // And, if the start is adjusted slightly, can do the same there:
+    // - if not isStart: nothing
+    // - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$)
+    // - if dots allowed or not possible: (?:/|^)
+    // - if dots possible and not allowed: (?:/|^)(?!\.)
+    //
+    // But it's better to have a simpler binding without a conditional, for
+    // performance, so probably better to return both start options.
+    //
+    // Then the caller just ignores the end if it's not the first pattern,
+    // and the start always gets applied.
+    //
+    // But that's always going to be $ if it's the ending pattern, or nothing,
+    // so the caller can just attach $ at the end of the pattern when building.
+    //
+    // So the todo is:
+    // - better detect what kind of start is needed
+    // - return both flavors of starting pattern
+    // - attach $ at the end of the pattern when creating the actual RegExp
+    //
+    // Ah, but wait, no, that all only applies to the root when the first pattern
+    // is not an extglob. If the first pattern IS an extglob, then we need all
+    // that dot prevention biz to live in the extglob portions, because eg
+    // +(*|.x*) can match .xy but not .yx.
+    //
+    // So, return the two flavors if it's #root and the first child is not an
+    // AST, otherwise leave it to the child AST to handle it, and there,
+    // use the (?:^|/) style of start binding.
+    //
+    // Even simplified further:
+    // - Since the start for a join is eg /(?!\.) and the start for a part
+    // is ^(?!\.), we can just prepend (?!\.) to the pattern (either root
+    // or start or whatever) and prepend ^ or / at the Regexp construction.
+    toRegExpSource(allowDot) {
+        const dot = allowDot ?? !!this.#options.dot;
+        if (this.#root === this)
+            this.#fillNegs();
+        if (!this.type) {
+            const noEmpty = this.isStart() && this.isEnd();
+            const src = this.#parts
+                .map(p => {
+                const [re, _, hasMagic, uflag] = typeof p === 'string'
+                    ? AST.#parseGlob(p, this.#hasMagic, noEmpty)
+                    : p.toRegExpSource(allowDot);
+                this.#hasMagic = this.#hasMagic || hasMagic;
+                this.#uflag = this.#uflag || uflag;
+                return re;
+            })
+                .join('');
+            let start = '';
+            if (this.isStart()) {
+                if (typeof this.#parts[0] === 'string') {
+                    // this is the string that will match the start of the pattern,
+                    // so we need to protect against dots and such.
+                    // '.' and '..' cannot match unless the pattern is that exactly,
+                    // even if it starts with . or dot:true is set.
+                    const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]);
+                    if (!dotTravAllowed) {
+                        const aps = addPatternStart;
+                        // check if we have a possibility of matching . or ..,
+                        // and prevent that.
+                        const needNoTrav = 
+                        // dots are allowed, and the pattern starts with [ or .
+                        (dot && aps.has(src.charAt(0))) ||
+                            // the pattern starts with \., and then [ or .
+                            (src.startsWith('\\.') && aps.has(src.charAt(2))) ||
+                            // the pattern starts with \.\., and then [ or .
+                            (src.startsWith('\\.\\.') && aps.has(src.charAt(4)));
+                        // no need to prevent dots if it can't match a dot, or if a
+                        // sub-pattern will be preventing it anyway.
+                        const needNoDot = !dot && !allowDot && aps.has(src.charAt(0));
+                        start = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : '';
+                    }
+                }
+            }
+            // append the "end of path portion" pattern to negation tails
+            let end = '';
+            if (this.isEnd() &&
+                this.#root.#filledNegs &&
+                this.#parent?.type === '!') {
+                end = '(?:$|\\/)';
+            }
+            const final = start + src + end;
+            return [
+                final,
+                unescape(src),
+                (this.#hasMagic = !!this.#hasMagic),
+                this.#uflag,
+            ];
+        }
+        // We need to calculate the body *twice* if it's a repeat pattern
+        // at the start, once in nodot mode, then again in dot mode, so a
+        // pattern like *(?) can match 'x.y'
+        const repeated = this.type === '*' || this.type === '+';
+        // some kind of extglob
+        const start = this.type === '!' ? '(?:(?!(?:' : '(?:';
+        let body = this.#partsToRegExp(dot);
+        if (this.isStart() && this.isEnd() && !body && this.type !== '!') {
+            // invalid extglob, has to at least be *something* present, if it's
+            // the entire path portion.
+            const s = this.toString();
+            this.#parts = [s];
+            this.type = null;
+            this.#hasMagic = undefined;
+            return [s, unescape(this.toString()), false, false];
+        }
+        // XXX abstract out this map method
+        let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot
+            ? ''
+            : this.#partsToRegExp(true);
+        if (bodyDotAllowed === body) {
+            bodyDotAllowed = '';
+        }
+        if (bodyDotAllowed) {
+            body = `(?:${body})(?:${bodyDotAllowed})*?`;
+        }
+        // an empty !() is exactly equivalent to a starNoEmpty
+        let final = '';
+        if (this.type === '!' && this.#emptyExt) {
+            final = (this.isStart() && !dot ? startNoDot : '') + starNoEmpty;
+        }
+        else {
+            const close = this.type === '!'
+                ? // !() must match something,but !(x) can match ''
+                    '))' +
+                        (this.isStart() && !dot && !allowDot ? startNoDot : '') +
+                        star +
+                        ')'
+                : this.type === '@'
+                    ? ')'
+                    : this.type === '?'
+                        ? ')?'
+                        : this.type === '+' && bodyDotAllowed
+                            ? ')'
+                            : this.type === '*' && bodyDotAllowed
+                                ? `)?`
+                                : `)${this.type}`;
+            final = start + body + close;
+        }
+        return [
+            final,
+            unescape(body),
+            (this.#hasMagic = !!this.#hasMagic),
+            this.#uflag,
+        ];
+    }
+    #partsToRegExp(dot) {
+        return this.#parts
+            .map(p => {
+            // extglob ASTs should only contain parent ASTs
+            /* c8 ignore start */
+            if (typeof p === 'string') {
+                throw new Error('string type in extglob ast??');
+            }
+            /* c8 ignore stop */
+            // can ignore hasMagic, because extglobs are already always magic
+            const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot);
+            this.#uflag = this.#uflag || uflag;
+            return re;
+        })
+            .filter(p => !(this.isStart() && this.isEnd()) || !!p)
+            .join('|');
+    }
+    static #parseGlob(glob, hasMagic, noEmpty = false) {
+        let escaping = false;
+        let re = '';
+        let uflag = false;
+        for (let i = 0; i < glob.length; i++) {
+            const c = glob.charAt(i);
+            if (escaping) {
+                escaping = false;
+                re += (reSpecials.has(c) ? '\\' : '') + c;
+                continue;
+            }
+            if (c === '\\') {
+                if (i === glob.length - 1) {
+                    re += '\\\\';
+                }
+                else {
+                    escaping = true;
+                }
+                continue;
+            }
+            if (c === '[') {
+                const [src, needUflag, consumed, magic] = parseClass(glob, i);
+                if (consumed) {
+                    re += src;
+                    uflag = uflag || needUflag;
+                    i += consumed - 1;
+                    hasMagic = hasMagic || magic;
+                    continue;
+                }
+            }
+            if (c === '*') {
+                if (noEmpty && glob === '*')
+                    re += starNoEmpty;
+                else
+                    re += star;
+                hasMagic = true;
+                continue;
+            }
+            if (c === '?') {
+                re += qmark;
+                hasMagic = true;
+                continue;
+            }
+            re += regExpEscape(c);
+        }
+        return [re, unescape(glob), !!hasMagic, uflag];
+    }
+}
+//# sourceMappingURL=ast.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/brace-expressions.js b/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/brace-expressions.js
new file mode 100644
index 0000000000000..c629d6ae816e2
--- /dev/null
+++ b/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/brace-expressions.js
@@ -0,0 +1,148 @@
+// translate the various posix character classes into unicode properties
+// this works across all unicode locales
+// { : [, /u flag required, negated]
+const posixClasses = {
+    '[:alnum:]': ['\\p{L}\\p{Nl}\\p{Nd}', true],
+    '[:alpha:]': ['\\p{L}\\p{Nl}', true],
+    '[:ascii:]': ['\\x' + '00-\\x' + '7f', false],
+    '[:blank:]': ['\\p{Zs}\\t', true],
+    '[:cntrl:]': ['\\p{Cc}', true],
+    '[:digit:]': ['\\p{Nd}', true],
+    '[:graph:]': ['\\p{Z}\\p{C}', true, true],
+    '[:lower:]': ['\\p{Ll}', true],
+    '[:print:]': ['\\p{C}', true],
+    '[:punct:]': ['\\p{P}', true],
+    '[:space:]': ['\\p{Z}\\t\\r\\n\\v\\f', true],
+    '[:upper:]': ['\\p{Lu}', true],
+    '[:word:]': ['\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}', true],
+    '[:xdigit:]': ['A-Fa-f0-9', false],
+};
+// only need to escape a few things inside of brace expressions
+// escapes: [ \ ] -
+const braceEscape = (s) => s.replace(/[[\]\\-]/g, '\\$&');
+// escape all regexp magic characters
+const regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+// everything has already been escaped, we just have to join
+const rangesToString = (ranges) => ranges.join('');
+// takes a glob string at a posix brace expression, and returns
+// an equivalent regular expression source, and boolean indicating
+// whether the /u flag needs to be applied, and the number of chars
+// consumed to parse the character class.
+// This also removes out of order ranges, and returns ($.) if the
+// entire class just no good.
+export const parseClass = (glob, position) => {
+    const pos = position;
+    /* c8 ignore start */
+    if (glob.charAt(pos) !== '[') {
+        throw new Error('not in a brace expression');
+    }
+    /* c8 ignore stop */
+    const ranges = [];
+    const negs = [];
+    let i = pos + 1;
+    let sawStart = false;
+    let uflag = false;
+    let escaping = false;
+    let negate = false;
+    let endPos = pos;
+    let rangeStart = '';
+    WHILE: while (i < glob.length) {
+        const c = glob.charAt(i);
+        if ((c === '!' || c === '^') && i === pos + 1) {
+            negate = true;
+            i++;
+            continue;
+        }
+        if (c === ']' && sawStart && !escaping) {
+            endPos = i + 1;
+            break;
+        }
+        sawStart = true;
+        if (c === '\\') {
+            if (!escaping) {
+                escaping = true;
+                i++;
+                continue;
+            }
+            // escaped \ char, fall through and treat like normal char
+        }
+        if (c === '[' && !escaping) {
+            // either a posix class, a collation equivalent, or just a [
+            for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) {
+                if (glob.startsWith(cls, i)) {
+                    // invalid, [a-[] is fine, but not [a-[:alpha]]
+                    if (rangeStart) {
+                        return ['$.', false, glob.length - pos, true];
+                    }
+                    i += cls.length;
+                    if (neg)
+                        negs.push(unip);
+                    else
+                        ranges.push(unip);
+                    uflag = uflag || u;
+                    continue WHILE;
+                }
+            }
+        }
+        // now it's just a normal character, effectively
+        escaping = false;
+        if (rangeStart) {
+            // throw this range away if it's not valid, but others
+            // can still match.
+            if (c > rangeStart) {
+                ranges.push(braceEscape(rangeStart) + '-' + braceEscape(c));
+            }
+            else if (c === rangeStart) {
+                ranges.push(braceEscape(c));
+            }
+            rangeStart = '';
+            i++;
+            continue;
+        }
+        // now might be the start of a range.
+        // can be either c-d or c-] or c] or c] at this point
+        if (glob.startsWith('-]', i + 1)) {
+            ranges.push(braceEscape(c + '-'));
+            i += 2;
+            continue;
+        }
+        if (glob.startsWith('-', i + 1)) {
+            rangeStart = c;
+            i += 2;
+            continue;
+        }
+        // not the start of a range, just a single character
+        ranges.push(braceEscape(c));
+        i++;
+    }
+    if (endPos < i) {
+        // didn't see the end of the class, not a valid class,
+        // but might still be valid as a literal match.
+        return ['', false, 0, false];
+    }
+    // if we got no ranges and no negates, then we have a range that
+    // cannot possibly match anything, and that poisons the whole glob
+    if (!ranges.length && !negs.length) {
+        return ['$.', false, glob.length - pos, true];
+    }
+    // if we got one positive range, and it's a single character, then that's
+    // not actually a magic pattern, it's just that one literal character.
+    // we should not treat that as "magic", we should just return the literal
+    // character. [_] is a perfectly valid way to escape glob magic chars.
+    if (negs.length === 0 &&
+        ranges.length === 1 &&
+        /^\\?.$/.test(ranges[0]) &&
+        !negate) {
+        const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0];
+        return [regexpEscape(r), false, endPos - pos, false];
+    }
+    const sranges = '[' + (negate ? '^' : '') + rangesToString(ranges) + ']';
+    const snegs = '[' + (negate ? '' : '^') + rangesToString(negs) + ']';
+    const comb = ranges.length && negs.length
+        ? '(' + sranges + '|' + snegs + ')'
+        : ranges.length
+            ? sranges
+            : snegs;
+    return [comb, uflag, endPos - pos, true];
+};
+//# sourceMappingURL=brace-expressions.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/escape.js b/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/escape.js
new file mode 100644
index 0000000000000..16f7c8c7bdc64
--- /dev/null
+++ b/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/escape.js
@@ -0,0 +1,18 @@
+/**
+ * Escape all magic characters in a glob pattern.
+ *
+ * If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape}
+ * option is used, then characters are escaped by wrapping in `[]`, because
+ * a magic character wrapped in a character class can only be satisfied by
+ * that exact character.  In this mode, `\` is _not_ escaped, because it is
+ * not interpreted as a magic character, but instead as a path separator.
+ */
+export const escape = (s, { windowsPathsNoEscape = false, } = {}) => {
+    // don't need to escape +@! because we escape the parens
+    // that make those magic, and escaping ! as [!] isn't valid,
+    // because [!]] is a valid glob class meaning not ']'.
+    return windowsPathsNoEscape
+        ? s.replace(/[?*()[\]]/g, '[$&]')
+        : s.replace(/[?*()[\]\\]/g, '\\$&');
+};
+//# sourceMappingURL=escape.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/index.js b/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/index.js
new file mode 100644
index 0000000000000..84b577b0472cb
--- /dev/null
+++ b/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/index.js
@@ -0,0 +1,1001 @@
+import expand from 'brace-expansion';
+import { assertValidPattern } from './assert-valid-pattern.js';
+import { AST } from './ast.js';
+import { escape } from './escape.js';
+import { unescape } from './unescape.js';
+export const minimatch = (p, pattern, options = {}) => {
+    assertValidPattern(pattern);
+    // shortcut: comments match nothing.
+    if (!options.nocomment && pattern.charAt(0) === '#') {
+        return false;
+    }
+    return new Minimatch(pattern, options).match(p);
+};
+// Optimized checking for the most common glob patterns.
+const starDotExtRE = /^\*+([^+@!?\*\[\(]*)$/;
+const starDotExtTest = (ext) => (f) => !f.startsWith('.') && f.endsWith(ext);
+const starDotExtTestDot = (ext) => (f) => f.endsWith(ext);
+const starDotExtTestNocase = (ext) => {
+    ext = ext.toLowerCase();
+    return (f) => !f.startsWith('.') && f.toLowerCase().endsWith(ext);
+};
+const starDotExtTestNocaseDot = (ext) => {
+    ext = ext.toLowerCase();
+    return (f) => f.toLowerCase().endsWith(ext);
+};
+const starDotStarRE = /^\*+\.\*+$/;
+const starDotStarTest = (f) => !f.startsWith('.') && f.includes('.');
+const starDotStarTestDot = (f) => f !== '.' && f !== '..' && f.includes('.');
+const dotStarRE = /^\.\*+$/;
+const dotStarTest = (f) => f !== '.' && f !== '..' && f.startsWith('.');
+const starRE = /^\*+$/;
+const starTest = (f) => f.length !== 0 && !f.startsWith('.');
+const starTestDot = (f) => f.length !== 0 && f !== '.' && f !== '..';
+const qmarksRE = /^\?+([^+@!?\*\[\(]*)?$/;
+const qmarksTestNocase = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExt([$0]);
+    if (!ext)
+        return noext;
+    ext = ext.toLowerCase();
+    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
+};
+const qmarksTestNocaseDot = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExtDot([$0]);
+    if (!ext)
+        return noext;
+    ext = ext.toLowerCase();
+    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
+};
+const qmarksTestDot = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExtDot([$0]);
+    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
+};
+const qmarksTest = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExt([$0]);
+    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
+};
+const qmarksTestNoExt = ([$0]) => {
+    const len = $0.length;
+    return (f) => f.length === len && !f.startsWith('.');
+};
+const qmarksTestNoExtDot = ([$0]) => {
+    const len = $0.length;
+    return (f) => f.length === len && f !== '.' && f !== '..';
+};
+/* c8 ignore start */
+const defaultPlatform = (typeof process === 'object' && process
+    ? (typeof process.env === 'object' &&
+        process.env &&
+        process.env.__MINIMATCH_TESTING_PLATFORM__) ||
+        process.platform
+    : 'posix');
+const path = {
+    win32: { sep: '\\' },
+    posix: { sep: '/' },
+};
+/* c8 ignore stop */
+export const sep = defaultPlatform === 'win32' ? path.win32.sep : path.posix.sep;
+minimatch.sep = sep;
+export const GLOBSTAR = Symbol('globstar **');
+minimatch.GLOBSTAR = GLOBSTAR;
+// any single thing other than /
+// don't need to escape / when using new RegExp()
+const qmark = '[^/]';
+// * => any number of characters
+const star = qmark + '*?';
+// ** when dots are allowed.  Anything goes, except .. and .
+// not (^ or / followed by one or two dots followed by $ or /),
+// followed by anything, any number of times.
+const twoStarDot = '(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?';
+// not a ^ or / followed by a dot,
+// followed by anything, any number of times.
+const twoStarNoDot = '(?:(?!(?:\\/|^)\\.).)*?';
+export const filter = (pattern, options = {}) => (p) => minimatch(p, pattern, options);
+minimatch.filter = filter;
+const ext = (a, b = {}) => Object.assign({}, a, b);
+export const defaults = (def) => {
+    if (!def || typeof def !== 'object' || !Object.keys(def).length) {
+        return minimatch;
+    }
+    const orig = minimatch;
+    const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options));
+    return Object.assign(m, {
+        Minimatch: class Minimatch extends orig.Minimatch {
+            constructor(pattern, options = {}) {
+                super(pattern, ext(def, options));
+            }
+            static defaults(options) {
+                return orig.defaults(ext(def, options)).Minimatch;
+            }
+        },
+        AST: class AST extends orig.AST {
+            /* c8 ignore start */
+            constructor(type, parent, options = {}) {
+                super(type, parent, ext(def, options));
+            }
+            /* c8 ignore stop */
+            static fromGlob(pattern, options = {}) {
+                return orig.AST.fromGlob(pattern, ext(def, options));
+            }
+        },
+        unescape: (s, options = {}) => orig.unescape(s, ext(def, options)),
+        escape: (s, options = {}) => orig.escape(s, ext(def, options)),
+        filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)),
+        defaults: (options) => orig.defaults(ext(def, options)),
+        makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)),
+        braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)),
+        match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)),
+        sep: orig.sep,
+        GLOBSTAR: GLOBSTAR,
+    });
+};
+minimatch.defaults = defaults;
+// Brace expansion:
+// a{b,c}d -> abd acd
+// a{b,}c -> abc ac
+// a{0..3}d -> a0d a1d a2d a3d
+// a{b,c{d,e}f}g -> abg acdfg acefg
+// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
+//
+// Invalid sets are not expanded.
+// a{2..}b -> a{2..}b
+// a{b}c -> a{b}c
+export const braceExpand = (pattern, options = {}) => {
+    assertValidPattern(pattern);
+    // Thanks to Yeting Li  for
+    // improving this regexp to avoid a ReDOS vulnerability.
+    if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
+        // shortcut. no need to expand.
+        return [pattern];
+    }
+    return expand(pattern);
+};
+minimatch.braceExpand = braceExpand;
+// parse a component of the expanded set.
+// At this point, no pattern may contain "/" in it
+// so we're going to return a 2d array, where each entry is the full
+// pattern, split on '/', and then turned into a regular expression.
+// A regexp is made at the end which joins each array with an
+// escaped /, and another full one which joins each regexp with |.
+//
+// Following the lead of Bash 4.1, note that "**" only has special meaning
+// when it is the *only* thing in a path portion.  Otherwise, any series
+// of * is equivalent to a single *.  Globstar behavior is enabled by
+// default, and can be disabled by setting options.noglobstar.
+export const makeRe = (pattern, options = {}) => new Minimatch(pattern, options).makeRe();
+minimatch.makeRe = makeRe;
+export const match = (list, pattern, options = {}) => {
+    const mm = new Minimatch(pattern, options);
+    list = list.filter(f => mm.match(f));
+    if (mm.options.nonull && !list.length) {
+        list.push(pattern);
+    }
+    return list;
+};
+minimatch.match = match;
+// replace stuff like \* with *
+const globMagic = /[?*]|[+@!]\(.*?\)|\[|\]/;
+const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+export class Minimatch {
+    options;
+    set;
+    pattern;
+    windowsPathsNoEscape;
+    nonegate;
+    negate;
+    comment;
+    empty;
+    preserveMultipleSlashes;
+    partial;
+    globSet;
+    globParts;
+    nocase;
+    isWindows;
+    platform;
+    windowsNoMagicRoot;
+    regexp;
+    constructor(pattern, options = {}) {
+        assertValidPattern(pattern);
+        options = options || {};
+        this.options = options;
+        this.pattern = pattern;
+        this.platform = options.platform || defaultPlatform;
+        this.isWindows = this.platform === 'win32';
+        this.windowsPathsNoEscape =
+            !!options.windowsPathsNoEscape || options.allowWindowsEscape === false;
+        if (this.windowsPathsNoEscape) {
+            this.pattern = this.pattern.replace(/\\/g, '/');
+        }
+        this.preserveMultipleSlashes = !!options.preserveMultipleSlashes;
+        this.regexp = null;
+        this.negate = false;
+        this.nonegate = !!options.nonegate;
+        this.comment = false;
+        this.empty = false;
+        this.partial = !!options.partial;
+        this.nocase = !!this.options.nocase;
+        this.windowsNoMagicRoot =
+            options.windowsNoMagicRoot !== undefined
+                ? options.windowsNoMagicRoot
+                : !!(this.isWindows && this.nocase);
+        this.globSet = [];
+        this.globParts = [];
+        this.set = [];
+        // make the set of regexps etc.
+        this.make();
+    }
+    hasMagic() {
+        if (this.options.magicalBraces && this.set.length > 1) {
+            return true;
+        }
+        for (const pattern of this.set) {
+            for (const part of pattern) {
+                if (typeof part !== 'string')
+                    return true;
+            }
+        }
+        return false;
+    }
+    debug(..._) { }
+    make() {
+        const pattern = this.pattern;
+        const options = this.options;
+        // empty patterns and comments match nothing.
+        if (!options.nocomment && pattern.charAt(0) === '#') {
+            this.comment = true;
+            return;
+        }
+        if (!pattern) {
+            this.empty = true;
+            return;
+        }
+        // step 1: figure out negation, etc.
+        this.parseNegate();
+        // step 2: expand braces
+        this.globSet = [...new Set(this.braceExpand())];
+        if (options.debug) {
+            this.debug = (...args) => console.error(...args);
+        }
+        this.debug(this.pattern, this.globSet);
+        // step 3: now we have a set, so turn each one into a series of
+        // path-portion matching patterns.
+        // These will be regexps, except in the case of "**", which is
+        // set to the GLOBSTAR object for globstar behavior,
+        // and will not contain any / characters
+        //
+        // First, we preprocess to make the glob pattern sets a bit simpler
+        // and deduped.  There are some perf-killing patterns that can cause
+        // problems with a glob walk, but we can simplify them down a bit.
+        const rawGlobParts = this.globSet.map(s => this.slashSplit(s));
+        this.globParts = this.preprocess(rawGlobParts);
+        this.debug(this.pattern, this.globParts);
+        // glob --> regexps
+        let set = this.globParts.map((s, _, __) => {
+            if (this.isWindows && this.windowsNoMagicRoot) {
+                // check if it's a drive or unc path.
+                const isUNC = s[0] === '' &&
+                    s[1] === '' &&
+                    (s[2] === '?' || !globMagic.test(s[2])) &&
+                    !globMagic.test(s[3]);
+                const isDrive = /^[a-z]:/i.test(s[0]);
+                if (isUNC) {
+                    return [...s.slice(0, 4), ...s.slice(4).map(ss => this.parse(ss))];
+                }
+                else if (isDrive) {
+                    return [s[0], ...s.slice(1).map(ss => this.parse(ss))];
+                }
+            }
+            return s.map(ss => this.parse(ss));
+        });
+        this.debug(this.pattern, set);
+        // filter out everything that didn't compile properly.
+        this.set = set.filter(s => s.indexOf(false) === -1);
+        // do not treat the ? in UNC paths as magic
+        if (this.isWindows) {
+            for (let i = 0; i < this.set.length; i++) {
+                const p = this.set[i];
+                if (p[0] === '' &&
+                    p[1] === '' &&
+                    this.globParts[i][2] === '?' &&
+                    typeof p[3] === 'string' &&
+                    /^[a-z]:$/i.test(p[3])) {
+                    p[2] = '?';
+                }
+            }
+        }
+        this.debug(this.pattern, this.set);
+    }
+    // various transforms to equivalent pattern sets that are
+    // faster to process in a filesystem walk.  The goal is to
+    // eliminate what we can, and push all ** patterns as far
+    // to the right as possible, even if it increases the number
+    // of patterns that we have to process.
+    preprocess(globParts) {
+        // if we're not in globstar mode, then turn all ** into *
+        if (this.options.noglobstar) {
+            for (let i = 0; i < globParts.length; i++) {
+                for (let j = 0; j < globParts[i].length; j++) {
+                    if (globParts[i][j] === '**') {
+                        globParts[i][j] = '*';
+                    }
+                }
+            }
+        }
+        const { optimizationLevel = 1 } = this.options;
+        if (optimizationLevel >= 2) {
+            // aggressive optimization for the purpose of fs walking
+            globParts = this.firstPhasePreProcess(globParts);
+            globParts = this.secondPhasePreProcess(globParts);
+        }
+        else if (optimizationLevel >= 1) {
+            // just basic optimizations to remove some .. parts
+            globParts = this.levelOneOptimize(globParts);
+        }
+        else {
+            // just collapse multiple ** portions into one
+            globParts = this.adjascentGlobstarOptimize(globParts);
+        }
+        return globParts;
+    }
+    // just get rid of adjascent ** portions
+    adjascentGlobstarOptimize(globParts) {
+        return globParts.map(parts => {
+            let gs = -1;
+            while (-1 !== (gs = parts.indexOf('**', gs + 1))) {
+                let i = gs;
+                while (parts[i + 1] === '**') {
+                    i++;
+                }
+                if (i !== gs) {
+                    parts.splice(gs, i - gs);
+                }
+            }
+            return parts;
+        });
+    }
+    // get rid of adjascent ** and resolve .. portions
+    levelOneOptimize(globParts) {
+        return globParts.map(parts => {
+            parts = parts.reduce((set, part) => {
+                const prev = set[set.length - 1];
+                if (part === '**' && prev === '**') {
+                    return set;
+                }
+                if (part === '..') {
+                    if (prev && prev !== '..' && prev !== '.' && prev !== '**') {
+                        set.pop();
+                        return set;
+                    }
+                }
+                set.push(part);
+                return set;
+            }, []);
+            return parts.length === 0 ? [''] : parts;
+        });
+    }
+    levelTwoFileOptimize(parts) {
+        if (!Array.isArray(parts)) {
+            parts = this.slashSplit(parts);
+        }
+        let didSomething = false;
+        do {
+            didSomething = false;
+            // 
// -> 
/
+            if (!this.preserveMultipleSlashes) {
+                for (let i = 1; i < parts.length - 1; i++) {
+                    const p = parts[i];
+                    // don't squeeze out UNC patterns
+                    if (i === 1 && p === '' && parts[0] === '')
+                        continue;
+                    if (p === '.' || p === '') {
+                        didSomething = true;
+                        parts.splice(i, 1);
+                        i--;
+                    }
+                }
+                if (parts[0] === '.' &&
+                    parts.length === 2 &&
+                    (parts[1] === '.' || parts[1] === '')) {
+                    didSomething = true;
+                    parts.pop();
+                }
+            }
+            // 
/

/../ ->

/
+            let dd = 0;
+            while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                const p = parts[dd - 1];
+                if (p && p !== '.' && p !== '..' && p !== '**') {
+                    didSomething = true;
+                    parts.splice(dd - 1, 2);
+                    dd -= 2;
+                }
+            }
+        } while (didSomething);
+        return parts.length === 0 ? [''] : parts;
+    }
+    // First phase: single-pattern processing
+    // 
 is 1 or more portions
+    //  is 1 or more portions
+    // 

is any portion other than ., .., '', or ** + // is . or '' + // + // **/.. is *brutal* for filesystem walking performance, because + // it effectively resets the recursive walk each time it occurs, + // and ** cannot be reduced out by a .. pattern part like a regexp + // or most strings (other than .., ., and '') can be. + // + //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + //

// -> 
/
+    // 
/

/../ ->

/
+    // **/**/ -> **/
+    //
+    // **/*/ -> */**/ <== not valid because ** doesn't follow
+    // this WOULD be allowed if ** did follow symlinks, or * didn't
+    firstPhasePreProcess(globParts) {
+        let didSomething = false;
+        do {
+            didSomething = false;
+            // 
/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + for (let parts of globParts) { + let gs = -1; + while (-1 !== (gs = parts.indexOf('**', gs + 1))) { + let gss = gs; + while (parts[gss + 1] === '**') { + //

/**/**/ -> 
/**/
+                        gss++;
+                    }
+                    // eg, if gs is 2 and gss is 4, that means we have 3 **
+                    // parts, and can remove 2 of them.
+                    if (gss > gs) {
+                        parts.splice(gs + 1, gss - gs);
+                    }
+                    let next = parts[gs + 1];
+                    const p = parts[gs + 2];
+                    const p2 = parts[gs + 3];
+                    if (next !== '..')
+                        continue;
+                    if (!p ||
+                        p === '.' ||
+                        p === '..' ||
+                        !p2 ||
+                        p2 === '.' ||
+                        p2 === '..') {
+                        continue;
+                    }
+                    didSomething = true;
+                    // edit parts in place, and push the new one
+                    parts.splice(gs, 1);
+                    const other = parts.slice(0);
+                    other[gs] = '**';
+                    globParts.push(other);
+                    gs--;
+                }
+                // 
// -> 
/
+                if (!this.preserveMultipleSlashes) {
+                    for (let i = 1; i < parts.length - 1; i++) {
+                        const p = parts[i];
+                        // don't squeeze out UNC patterns
+                        if (i === 1 && p === '' && parts[0] === '')
+                            continue;
+                        if (p === '.' || p === '') {
+                            didSomething = true;
+                            parts.splice(i, 1);
+                            i--;
+                        }
+                    }
+                    if (parts[0] === '.' &&
+                        parts.length === 2 &&
+                        (parts[1] === '.' || parts[1] === '')) {
+                        didSomething = true;
+                        parts.pop();
+                    }
+                }
+                // 
/

/../ ->

/
+                let dd = 0;
+                while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                    const p = parts[dd - 1];
+                    if (p && p !== '.' && p !== '..' && p !== '**') {
+                        didSomething = true;
+                        const needDot = dd === 1 && parts[dd + 1] === '**';
+                        const splin = needDot ? ['.'] : [];
+                        parts.splice(dd - 1, 2, ...splin);
+                        if (parts.length === 0)
+                            parts.push('');
+                        dd -= 2;
+                    }
+                }
+            }
+        } while (didSomething);
+        return globParts;
+    }
+    // second phase: multi-pattern dedupes
+    // {
/*/,
/

/} ->

/*/
+    // {
/,
/} -> 
/
+    // {
/**/,
/} -> 
/**/
+    //
+    // {
/**/,
/**/

/} ->

/**/
+    // ^-- not valid because ** doens't follow symlinks
+    secondPhasePreProcess(globParts) {
+        for (let i = 0; i < globParts.length - 1; i++) {
+            for (let j = i + 1; j < globParts.length; j++) {
+                const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);
+                if (matched) {
+                    globParts[i] = [];
+                    globParts[j] = matched;
+                    break;
+                }
+            }
+        }
+        return globParts.filter(gs => gs.length);
+    }
+    partsMatch(a, b, emptyGSMatch = false) {
+        let ai = 0;
+        let bi = 0;
+        let result = [];
+        let which = '';
+        while (ai < a.length && bi < b.length) {
+            if (a[ai] === b[bi]) {
+                result.push(which === 'b' ? b[bi] : a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (emptyGSMatch && a[ai] === '**' && b[bi] === a[ai + 1]) {
+                result.push(a[ai]);
+                ai++;
+            }
+            else if (emptyGSMatch && b[bi] === '**' && a[ai] === b[bi + 1]) {
+                result.push(b[bi]);
+                bi++;
+            }
+            else if (a[ai] === '*' &&
+                b[bi] &&
+                (this.options.dot || !b[bi].startsWith('.')) &&
+                b[bi] !== '**') {
+                if (which === 'b')
+                    return false;
+                which = 'a';
+                result.push(a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (b[bi] === '*' &&
+                a[ai] &&
+                (this.options.dot || !a[ai].startsWith('.')) &&
+                a[ai] !== '**') {
+                if (which === 'a')
+                    return false;
+                which = 'b';
+                result.push(b[bi]);
+                ai++;
+                bi++;
+            }
+            else {
+                return false;
+            }
+        }
+        // if we fall out of the loop, it means they two are identical
+        // as long as their lengths match
+        return a.length === b.length && result;
+    }
+    parseNegate() {
+        if (this.nonegate)
+            return;
+        const pattern = this.pattern;
+        let negate = false;
+        let negateOffset = 0;
+        for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {
+            negate = !negate;
+            negateOffset++;
+        }
+        if (negateOffset)
+            this.pattern = pattern.slice(negateOffset);
+        this.negate = negate;
+    }
+    // set partial to true to test if, for example,
+    // "/a/b" matches the start of "/*/b/*/d"
+    // Partial means, if you run out of file before you run
+    // out of pattern, then that's fine, as long as all
+    // the parts match.
+    matchOne(file, pattern, partial = false) {
+        const options = this.options;
+        // UNC paths like //?/X:/... can match X:/... and vice versa
+        // Drive letters in absolute drive or unc paths are always compared
+        // case-insensitively.
+        if (this.isWindows) {
+            const fileDrive = typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0]);
+            const fileUNC = !fileDrive &&
+                file[0] === '' &&
+                file[1] === '' &&
+                file[2] === '?' &&
+                /^[a-z]:$/i.test(file[3]);
+            const patternDrive = typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0]);
+            const patternUNC = !patternDrive &&
+                pattern[0] === '' &&
+                pattern[1] === '' &&
+                pattern[2] === '?' &&
+                typeof pattern[3] === 'string' &&
+                /^[a-z]:$/i.test(pattern[3]);
+            const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined;
+            const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined;
+            if (typeof fdi === 'number' && typeof pdi === 'number') {
+                const [fd, pd] = [file[fdi], pattern[pdi]];
+                if (fd.toLowerCase() === pd.toLowerCase()) {
+                    pattern[pdi] = fd;
+                    if (pdi > fdi) {
+                        pattern = pattern.slice(pdi);
+                    }
+                    else if (fdi > pdi) {
+                        file = file.slice(fdi);
+                    }
+                }
+            }
+        }
+        // resolve and reduce . and .. portions in the file as well.
+        // dont' need to do the second phase, because it's only one string[]
+        const { optimizationLevel = 1 } = this.options;
+        if (optimizationLevel >= 2) {
+            file = this.levelTwoFileOptimize(file);
+        }
+        this.debug('matchOne', this, { file, pattern });
+        this.debug('matchOne', file.length, pattern.length);
+        for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
+            this.debug('matchOne loop');
+            var p = pattern[pi];
+            var f = file[fi];
+            this.debug(pattern, p, f);
+            // should be impossible.
+            // some invalid regexp stuff in the set.
+            /* c8 ignore start */
+            if (p === false) {
+                return false;
+            }
+            /* c8 ignore stop */
+            if (p === GLOBSTAR) {
+                this.debug('GLOBSTAR', [pattern, p, f]);
+                // "**"
+                // a/**/b/**/c would match the following:
+                // a/b/x/y/z/c
+                // a/x/y/z/b/c
+                // a/b/x/b/x/c
+                // a/b/c
+                // To do this, take the rest of the pattern after
+                // the **, and see if it would match the file remainder.
+                // If so, return success.
+                // If not, the ** "swallows" a segment, and try again.
+                // This is recursively awful.
+                //
+                // a/**/b/**/c matching a/b/x/y/z/c
+                // - a matches a
+                // - doublestar
+                //   - matchOne(b/x/y/z/c, b/**/c)
+                //     - b matches b
+                //     - doublestar
+                //       - matchOne(x/y/z/c, c) -> no
+                //       - matchOne(y/z/c, c) -> no
+                //       - matchOne(z/c, c) -> no
+                //       - matchOne(c, c) yes, hit
+                var fr = fi;
+                var pr = pi + 1;
+                if (pr === pl) {
+                    this.debug('** at the end');
+                    // a ** at the end will just swallow the rest.
+                    // We have found a match.
+                    // however, it will not swallow /.x, unless
+                    // options.dot is set.
+                    // . and .. are *never* matched by **, for explosively
+                    // exponential reasons.
+                    for (; fi < fl; fi++) {
+                        if (file[fi] === '.' ||
+                            file[fi] === '..' ||
+                            (!options.dot && file[fi].charAt(0) === '.'))
+                            return false;
+                    }
+                    return true;
+                }
+                // ok, let's see if we can swallow whatever we can.
+                while (fr < fl) {
+                    var swallowee = file[fr];
+                    this.debug('\nglobstar while', file, fr, pattern, pr, swallowee);
+                    // XXX remove this slice.  Just pass the start index.
+                    if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
+                        this.debug('globstar found match!', fr, fl, swallowee);
+                        // found a match.
+                        return true;
+                    }
+                    else {
+                        // can't swallow "." or ".." ever.
+                        // can only swallow ".foo" when explicitly asked.
+                        if (swallowee === '.' ||
+                            swallowee === '..' ||
+                            (!options.dot && swallowee.charAt(0) === '.')) {
+                            this.debug('dot detected!', file, fr, pattern, pr);
+                            break;
+                        }
+                        // ** swallows a segment, and continue.
+                        this.debug('globstar swallow a segment, and continue');
+                        fr++;
+                    }
+                }
+                // no match was found.
+                // However, in partial mode, we can't say this is necessarily over.
+                /* c8 ignore start */
+                if (partial) {
+                    // ran out of file
+                    this.debug('\n>>> no match, partial?', file, fr, pattern, pr);
+                    if (fr === fl) {
+                        return true;
+                    }
+                }
+                /* c8 ignore stop */
+                return false;
+            }
+            // something other than **
+            // non-magic patterns just have to match exactly
+            // patterns with magic have been turned into regexps.
+            let hit;
+            if (typeof p === 'string') {
+                hit = f === p;
+                this.debug('string match', p, f, hit);
+            }
+            else {
+                hit = p.test(f);
+                this.debug('pattern match', p, f, hit);
+            }
+            if (!hit)
+                return false;
+        }
+        // Note: ending in / means that we'll get a final ""
+        // at the end of the pattern.  This can only match a
+        // corresponding "" at the end of the file.
+        // If the file ends in /, then it can only match a
+        // a pattern that ends in /, unless the pattern just
+        // doesn't have any more for it. But, a/b/ should *not*
+        // match "a/b/*", even though "" matches against the
+        // [^/]*? pattern, except in partial mode, where it might
+        // simply not be reached yet.
+        // However, a/b/ should still satisfy a/*
+        // now either we fell off the end of the pattern, or we're done.
+        if (fi === fl && pi === pl) {
+            // ran out of pattern and filename at the same time.
+            // an exact hit!
+            return true;
+        }
+        else if (fi === fl) {
+            // ran out of file, but still had pattern left.
+            // this is ok if we're doing the match as part of
+            // a glob fs traversal.
+            return partial;
+        }
+        else if (pi === pl) {
+            // ran out of pattern, still have file left.
+            // this is only acceptable if we're on the very last
+            // empty segment of a file with a trailing slash.
+            // a/* should match a/b/
+            return fi === fl - 1 && file[fi] === '';
+            /* c8 ignore start */
+        }
+        else {
+            // should be unreachable.
+            throw new Error('wtf?');
+        }
+        /* c8 ignore stop */
+    }
+    braceExpand() {
+        return braceExpand(this.pattern, this.options);
+    }
+    parse(pattern) {
+        assertValidPattern(pattern);
+        const options = this.options;
+        // shortcuts
+        if (pattern === '**')
+            return GLOBSTAR;
+        if (pattern === '')
+            return '';
+        // far and away, the most common glob pattern parts are
+        // *, *.*, and *.  Add a fast check method for those.
+        let m;
+        let fastTest = null;
+        if ((m = pattern.match(starRE))) {
+            fastTest = options.dot ? starTestDot : starTest;
+        }
+        else if ((m = pattern.match(starDotExtRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? starDotExtTestNocaseDot
+                    : starDotExtTestNocase
+                : options.dot
+                    ? starDotExtTestDot
+                    : starDotExtTest)(m[1]);
+        }
+        else if ((m = pattern.match(qmarksRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? qmarksTestNocaseDot
+                    : qmarksTestNocase
+                : options.dot
+                    ? qmarksTestDot
+                    : qmarksTest)(m);
+        }
+        else if ((m = pattern.match(starDotStarRE))) {
+            fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
+        }
+        else if ((m = pattern.match(dotStarRE))) {
+            fastTest = dotStarTest;
+        }
+        const re = AST.fromGlob(pattern, this.options).toMMPattern();
+        if (fastTest && typeof re === 'object') {
+            // Avoids overriding in frozen environments
+            Reflect.defineProperty(re, 'test', { value: fastTest });
+        }
+        return re;
+    }
+    makeRe() {
+        if (this.regexp || this.regexp === false)
+            return this.regexp;
+        // at this point, this.set is a 2d array of partial
+        // pattern strings, or "**".
+        //
+        // It's better to use .match().  This function shouldn't
+        // be used, really, but it's pretty convenient sometimes,
+        // when you just want to work with a regex.
+        const set = this.set;
+        if (!set.length) {
+            this.regexp = false;
+            return this.regexp;
+        }
+        const options = this.options;
+        const twoStar = options.noglobstar
+            ? star
+            : options.dot
+                ? twoStarDot
+                : twoStarNoDot;
+        const flags = new Set(options.nocase ? ['i'] : []);
+        // regexpify non-globstar patterns
+        // if ** is only item, then we just do one twoStar
+        // if ** is first, and there are more, prepend (\/|twoStar\/)? to next
+        // if ** is last, append (\/twoStar|) to previous
+        // if ** is in the middle, append (\/|\/twoStar\/) to previous
+        // then filter out GLOBSTAR symbols
+        let re = set
+            .map(pattern => {
+            const pp = pattern.map(p => {
+                if (p instanceof RegExp) {
+                    for (const f of p.flags.split(''))
+                        flags.add(f);
+                }
+                return typeof p === 'string'
+                    ? regExpEscape(p)
+                    : p === GLOBSTAR
+                        ? GLOBSTAR
+                        : p._src;
+            });
+            pp.forEach((p, i) => {
+                const next = pp[i + 1];
+                const prev = pp[i - 1];
+                if (p !== GLOBSTAR || prev === GLOBSTAR) {
+                    return;
+                }
+                if (prev === undefined) {
+                    if (next !== undefined && next !== GLOBSTAR) {
+                        pp[i + 1] = '(?:\\/|' + twoStar + '\\/)?' + next;
+                    }
+                    else {
+                        pp[i] = twoStar;
+                    }
+                }
+                else if (next === undefined) {
+                    pp[i - 1] = prev + '(?:\\/|' + twoStar + ')?';
+                }
+                else if (next !== GLOBSTAR) {
+                    pp[i - 1] = prev + '(?:\\/|\\/' + twoStar + '\\/)' + next;
+                    pp[i + 1] = GLOBSTAR;
+                }
+            });
+            return pp.filter(p => p !== GLOBSTAR).join('/');
+        })
+            .join('|');
+        // need to wrap in parens if we had more than one thing with |,
+        // otherwise only the first will be anchored to ^ and the last to $
+        const [open, close] = set.length > 1 ? ['(?:', ')'] : ['', ''];
+        // must match entire pattern
+        // ending in a * or ** will make it less strict.
+        re = '^' + open + re + close + '$';
+        // can match anything, as long as it's not this.
+        if (this.negate)
+            re = '^(?!' + re + ').+$';
+        try {
+            this.regexp = new RegExp(re, [...flags].join(''));
+            /* c8 ignore start */
+        }
+        catch (ex) {
+            // should be impossible
+            this.regexp = false;
+        }
+        /* c8 ignore stop */
+        return this.regexp;
+    }
+    slashSplit(p) {
+        // if p starts with // on windows, we preserve that
+        // so that UNC paths aren't broken.  Otherwise, any number of
+        // / characters are coalesced into one, unless
+        // preserveMultipleSlashes is set to true.
+        if (this.preserveMultipleSlashes) {
+            return p.split('/');
+        }
+        else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
+            // add an extra '' for the one we lose
+            return ['', ...p.split(/\/+/)];
+        }
+        else {
+            return p.split(/\/+/);
+        }
+    }
+    match(f, partial = this.partial) {
+        this.debug('match', f, this.pattern);
+        // short-circuit in the case of busted things.
+        // comments, etc.
+        if (this.comment) {
+            return false;
+        }
+        if (this.empty) {
+            return f === '';
+        }
+        if (f === '/' && partial) {
+            return true;
+        }
+        const options = this.options;
+        // windows: need to use /, not \
+        if (this.isWindows) {
+            f = f.split('\\').join('/');
+        }
+        // treat the test path as a set of pathparts.
+        const ff = this.slashSplit(f);
+        this.debug(this.pattern, 'split', ff);
+        // just ONE of the pattern sets in this.set needs to match
+        // in order for it to be valid.  If negating, then just one
+        // match means that we have failed.
+        // Either way, return on the first hit.
+        const set = this.set;
+        this.debug(this.pattern, 'set', set);
+        // Find the basename of the path by looking for the last non-empty segment
+        let filename = ff[ff.length - 1];
+        if (!filename) {
+            for (let i = ff.length - 2; !filename && i >= 0; i--) {
+                filename = ff[i];
+            }
+        }
+        for (let i = 0; i < set.length; i++) {
+            const pattern = set[i];
+            let file = ff;
+            if (options.matchBase && pattern.length === 1) {
+                file = [filename];
+            }
+            const hit = this.matchOne(file, pattern, partial);
+            if (hit) {
+                if (options.flipNegate) {
+                    return true;
+                }
+                return !this.negate;
+            }
+        }
+        // didn't get any hits.  this is success if it's a negative
+        // pattern, failure otherwise.
+        if (options.flipNegate) {
+            return false;
+        }
+        return this.negate;
+    }
+    static defaults(def) {
+        return minimatch.defaults(def).Minimatch;
+    }
+}
+/* c8 ignore start */
+export { AST } from './ast.js';
+export { escape } from './escape.js';
+export { unescape } from './unescape.js';
+/* c8 ignore stop */
+minimatch.AST = AST;
+minimatch.Minimatch = Minimatch;
+minimatch.escape = escape;
+minimatch.unescape = unescape;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/package.json b/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/package.json
new file mode 100644
index 0000000000000..3dbc1ca591c05
--- /dev/null
+++ b/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/unescape.js b/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/unescape.js
new file mode 100644
index 0000000000000..0faf9a2b7306f
--- /dev/null
+++ b/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/unescape.js
@@ -0,0 +1,20 @@
+/**
+ * Un-escape a string that has been escaped with {@link escape}.
+ *
+ * If the {@link windowsPathsNoEscape} option is used, then square-brace
+ * escapes are removed, but not backslash escapes.  For example, it will turn
+ * the string `'[*]'` into `*`, but it will not turn `'\\*'` into `'*'`,
+ * becuase `\` is a path separator in `windowsPathsNoEscape` mode.
+ *
+ * When `windowsPathsNoEscape` is not set, then both brace escapes and
+ * backslash escapes are removed.
+ *
+ * Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot be escaped
+ * or unescaped.
+ */
+export const unescape = (s, { windowsPathsNoEscape = false, } = {}) => {
+    return windowsPathsNoEscape
+        ? s.replace(/\[([^\/\\])\]/g, '$1')
+        : s.replace(/((?!\\).|^)\[([^\/\\])\]/g, '$1$2').replace(/\\([^\/])/g, '$1');
+};
+//# sourceMappingURL=unescape.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/package.json b/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/package.json
new file mode 100644
index 0000000000000..01fc48ecfd6a9
--- /dev/null
+++ b/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/package.json
@@ -0,0 +1,82 @@
+{
+  "author": "Isaac Z. Schlueter  (http://blog.izs.me)",
+  "name": "minimatch",
+  "description": "a glob matcher in javascript",
+  "version": "9.0.5",
+  "repository": {
+    "type": "git",
+    "url": "git://github.com/isaacs/minimatch.git"
+  },
+  "main": "./dist/commonjs/index.js",
+  "types": "./dist/commonjs/index.d.ts",
+  "exports": {
+    "./package.json": "./package.json",
+    ".": {
+      "import": {
+        "types": "./dist/esm/index.d.ts",
+        "default": "./dist/esm/index.js"
+      },
+      "require": {
+        "types": "./dist/commonjs/index.d.ts",
+        "default": "./dist/commonjs/index.js"
+      }
+    }
+  },
+  "files": [
+    "dist"
+  ],
+  "scripts": {
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "prepare": "tshy",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "test": "tap",
+    "snap": "tap",
+    "format": "prettier --write . --loglevel warn",
+    "benchmark": "node benchmark/index.js",
+    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
+  },
+  "prettier": {
+    "semi": false,
+    "printWidth": 80,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "engines": {
+    "node": ">=16 || 14 >=14.17"
+  },
+  "dependencies": {
+    "brace-expansion": "^2.0.1"
+  },
+  "devDependencies": {
+    "@types/brace-expansion": "^1.1.0",
+    "@types/node": "^18.15.11",
+    "@types/tap": "^15.0.8",
+    "eslint-config-prettier": "^8.6.0",
+    "mkdirp": "1",
+    "prettier": "^2.8.2",
+    "tap": "^18.7.2",
+    "ts-node": "^10.9.1",
+    "tshy": "^1.12.0",
+    "typedoc": "^0.23.21",
+    "typescript": "^4.9.3"
+  },
+  "funding": {
+    "url": "https://github.com/sponsors/isaacs"
+  },
+  "license": "ISC",
+  "tshy": {
+    "exports": {
+      "./package.json": "./package.json",
+      ".": "./src/index.ts"
+    }
+  },
+  "type": "module"
+}
diff --git a/node_modules/pacote/node_modules/@tufjs/models/package.json b/node_modules/pacote/node_modules/@tufjs/models/package.json
new file mode 100644
index 0000000000000..dfd60d248118c
--- /dev/null
+++ b/node_modules/pacote/node_modules/@tufjs/models/package.json
@@ -0,0 +1,37 @@
+{
+  "name": "@tufjs/models",
+  "version": "4.0.0",
+  "description": "TUF metadata models",
+  "main": "dist/index.js",
+  "types": "dist/index.d.ts",
+  "files": [
+    "dist"
+  ],
+  "scripts": {
+    "build": "tsc --build tsconfig.build.json",
+    "clean": "rm -rf dist && rm tsconfig.build.tsbuildinfo",
+    "test": "jest"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/theupdateframework/tuf-js.git"
+  },
+  "keywords": [
+    "tuf",
+    "security",
+    "update"
+  ],
+  "author": "bdehamer@github.com",
+  "license": "MIT",
+  "bugs": {
+    "url": "https://github.com/theupdateframework/tuf-js/issues"
+  },
+  "homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/models#readme",
+  "dependencies": {
+    "@tufjs/canonical-json": "2.0.0",
+    "minimatch": "^9.0.5"
+  },
+  "engines": {
+    "node": "^20.17.0 || >=22.9.0"
+  }
+}
diff --git a/node_modules/pacote/node_modules/cacache/LICENSE.md b/node_modules/pacote/node_modules/cacache/LICENSE.md
new file mode 100644
index 0000000000000..8d28acf866d93
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/LICENSE.md
@@ -0,0 +1,16 @@
+ISC License
+
+Copyright (c) npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this software for
+any purpose with or without fee is hereby granted, provided that the
+above copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
+ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
+COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
+CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
+USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/cacache/lib/content/path.js b/node_modules/pacote/node_modules/cacache/lib/content/path.js
new file mode 100644
index 0000000000000..ad5a76a4f73f2
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/content/path.js
@@ -0,0 +1,29 @@
+'use strict'
+
+const contentVer = require('../../package.json')['cache-version'].content
+const hashToSegments = require('../util/hash-to-segments')
+const path = require('path')
+const ssri = require('ssri')
+
+// Current format of content file path:
+//
+// sha512-BaSE64Hex= ->
+// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee
+//
+module.exports = contentPath
+
+function contentPath (cache, integrity) {
+  const sri = ssri.parse(integrity, { single: true })
+  // contentPath is the *strongest* algo given
+  return path.join(
+    contentDir(cache),
+    sri.algorithm,
+    ...hashToSegments(sri.hexDigest())
+  )
+}
+
+module.exports.contentDir = contentDir
+
+function contentDir (cache) {
+  return path.join(cache, `content-v${contentVer}`)
+}
diff --git a/node_modules/pacote/node_modules/cacache/lib/content/read.js b/node_modules/pacote/node_modules/cacache/lib/content/read.js
new file mode 100644
index 0000000000000..5f6192c3cec56
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/content/read.js
@@ -0,0 +1,165 @@
+'use strict'
+
+const fs = require('fs/promises')
+const fsm = require('fs-minipass')
+const ssri = require('ssri')
+const contentPath = require('./path')
+const Pipeline = require('minipass-pipeline')
+
+module.exports = read
+
+const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024
+async function read (cache, integrity, opts = {}) {
+  const { size } = opts
+  const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
+    // get size
+    const stat = size ? { size } : await fs.stat(cpath)
+    return { stat, cpath, sri }
+  })
+
+  if (stat.size > MAX_SINGLE_READ_SIZE) {
+    return readPipeline(cpath, stat.size, sri, new Pipeline()).concat()
+  }
+
+  const data = await fs.readFile(cpath, { encoding: null })
+
+  if (stat.size !== data.length) {
+    throw sizeError(stat.size, data.length)
+  }
+
+  if (!ssri.checkData(data, sri)) {
+    throw integrityError(sri, cpath)
+  }
+
+  return data
+}
+
+const readPipeline = (cpath, size, sri, stream) => {
+  stream.push(
+    new fsm.ReadStream(cpath, {
+      size,
+      readSize: MAX_SINGLE_READ_SIZE,
+    }),
+    ssri.integrityStream({
+      integrity: sri,
+      size,
+    })
+  )
+  return stream
+}
+
+module.exports.stream = readStream
+module.exports.readStream = readStream
+
+function readStream (cache, integrity, opts = {}) {
+  const { size } = opts
+  const stream = new Pipeline()
+  // Set all this up to run on the stream and then just return the stream
+  Promise.resolve().then(async () => {
+    const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
+      // get size
+      const stat = size ? { size } : await fs.stat(cpath)
+      return { stat, cpath, sri }
+    })
+
+    return readPipeline(cpath, stat.size, sri, stream)
+  }).catch(err => stream.emit('error', err))
+
+  return stream
+}
+
+module.exports.copy = copy
+
+function copy (cache, integrity, dest) {
+  return withContentSri(cache, integrity, (cpath) => {
+    return fs.copyFile(cpath, dest)
+  })
+}
+
+module.exports.hasContent = hasContent
+
+async function hasContent (cache, integrity) {
+  if (!integrity) {
+    return false
+  }
+
+  try {
+    return await withContentSri(cache, integrity, async (cpath, sri) => {
+      const stat = await fs.stat(cpath)
+      return { size: stat.size, sri, stat }
+    })
+  } catch (err) {
+    if (err.code === 'ENOENT') {
+      return false
+    }
+
+    if (err.code === 'EPERM') {
+      /* istanbul ignore else */
+      if (process.platform !== 'win32') {
+        throw err
+      } else {
+        return false
+      }
+    }
+  }
+}
+
+async function withContentSri (cache, integrity, fn) {
+  const sri = ssri.parse(integrity)
+  // If `integrity` has multiple entries, pick the first digest
+  // with available local data.
+  const algo = sri.pickAlgorithm()
+  const digests = sri[algo]
+
+  if (digests.length <= 1) {
+    const cpath = contentPath(cache, digests[0])
+    return fn(cpath, digests[0])
+  } else {
+    // Can't use race here because a generic error can happen before
+    // a ENOENT error, and can happen before a valid result
+    const results = await Promise.all(digests.map(async (meta) => {
+      try {
+        return await withContentSri(cache, meta, fn)
+      } catch (err) {
+        if (err.code === 'ENOENT') {
+          return Object.assign(
+            new Error('No matching content found for ' + sri.toString()),
+            { code: 'ENOENT' }
+          )
+        }
+        return err
+      }
+    }))
+    // Return the first non error if it is found
+    const result = results.find((r) => !(r instanceof Error))
+    if (result) {
+      return result
+    }
+
+    // Throw the No matching content found error
+    const enoentError = results.find((r) => r.code === 'ENOENT')
+    if (enoentError) {
+      throw enoentError
+    }
+
+    // Throw generic error
+    throw results.find((r) => r instanceof Error)
+  }
+}
+
+function sizeError (expected, found) {
+  /* eslint-disable-next-line max-len */
+  const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
+  err.expected = expected
+  err.found = found
+  err.code = 'EBADSIZE'
+  return err
+}
+
+function integrityError (sri, path) {
+  const err = new Error(`Integrity verification failed for ${sri} (${path})`)
+  err.code = 'EINTEGRITY'
+  err.sri = sri
+  err.path = path
+  return err
+}
diff --git a/node_modules/pacote/node_modules/cacache/lib/content/rm.js b/node_modules/pacote/node_modules/cacache/lib/content/rm.js
new file mode 100644
index 0000000000000..ce58d679e4cb2
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/content/rm.js
@@ -0,0 +1,18 @@
+'use strict'
+
+const fs = require('fs/promises')
+const contentPath = require('./path')
+const { hasContent } = require('./read')
+
+module.exports = rm
+
+async function rm (cache, integrity) {
+  const content = await hasContent(cache, integrity)
+  // ~pretty~ sure we can't end up with a content lacking sri, but be safe
+  if (content && content.sri) {
+    await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true })
+    return true
+  } else {
+    return false
+  }
+}
diff --git a/node_modules/pacote/node_modules/cacache/lib/content/write.js b/node_modules/pacote/node_modules/cacache/lib/content/write.js
new file mode 100644
index 0000000000000..e7187abca8788
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/content/write.js
@@ -0,0 +1,206 @@
+'use strict'
+
+const events = require('events')
+
+const contentPath = require('./path')
+const fs = require('fs/promises')
+const { moveFile } = require('@npmcli/fs')
+const { Minipass } = require('minipass')
+const Pipeline = require('minipass-pipeline')
+const Flush = require('minipass-flush')
+const path = require('path')
+const ssri = require('ssri')
+const uniqueFilename = require('unique-filename')
+const fsm = require('fs-minipass')
+
+module.exports = write
+
+// Cache of move operations in process so we don't duplicate
+const moveOperations = new Map()
+
+async function write (cache, data, opts = {}) {
+  const { algorithms, size, integrity } = opts
+
+  if (typeof size === 'number' && data.length !== size) {
+    throw sizeError(size, data.length)
+  }
+
+  const sri = ssri.fromData(data, algorithms ? { algorithms } : {})
+  if (integrity && !ssri.checkData(data, integrity, opts)) {
+    throw checksumError(integrity, sri)
+  }
+
+  for (const algo in sri) {
+    const tmp = await makeTmp(cache, opts)
+    const hash = sri[algo].toString()
+    try {
+      await fs.writeFile(tmp.target, data, { flag: 'wx' })
+      await moveToDestination(tmp, cache, hash, opts)
+    } finally {
+      if (!tmp.moved) {
+        await fs.rm(tmp.target, { recursive: true, force: true })
+      }
+    }
+  }
+  return { integrity: sri, size: data.length }
+}
+
+module.exports.stream = writeStream
+
+// writes proxied to the 'inputStream' that is passed to the Promise
+// 'end' is deferred until content is handled.
+class CacacheWriteStream extends Flush {
+  constructor (cache, opts) {
+    super()
+    this.opts = opts
+    this.cache = cache
+    this.inputStream = new Minipass()
+    this.inputStream.on('error', er => this.emit('error', er))
+    this.inputStream.on('drain', () => this.emit('drain'))
+    this.handleContentP = null
+  }
+
+  write (chunk, encoding, cb) {
+    if (!this.handleContentP) {
+      this.handleContentP = handleContent(
+        this.inputStream,
+        this.cache,
+        this.opts
+      )
+      this.handleContentP.catch(error => this.emit('error', error))
+    }
+    return this.inputStream.write(chunk, encoding, cb)
+  }
+
+  flush (cb) {
+    this.inputStream.end(() => {
+      if (!this.handleContentP) {
+        const e = new Error('Cache input stream was empty')
+        e.code = 'ENODATA'
+        // empty streams are probably emitting end right away.
+        // defer this one tick by rejecting a promise on it.
+        return Promise.reject(e).catch(cb)
+      }
+      // eslint-disable-next-line promise/catch-or-return
+      this.handleContentP.then(
+        (res) => {
+          res.integrity && this.emit('integrity', res.integrity)
+          // eslint-disable-next-line promise/always-return
+          res.size !== null && this.emit('size', res.size)
+          cb()
+        },
+        (er) => cb(er)
+      )
+    })
+  }
+}
+
+function writeStream (cache, opts = {}) {
+  return new CacacheWriteStream(cache, opts)
+}
+
+async function handleContent (inputStream, cache, opts) {
+  const tmp = await makeTmp(cache, opts)
+  try {
+    const res = await pipeToTmp(inputStream, cache, tmp.target, opts)
+    await moveToDestination(
+      tmp,
+      cache,
+      res.integrity,
+      opts
+    )
+    return res
+  } finally {
+    if (!tmp.moved) {
+      await fs.rm(tmp.target, { recursive: true, force: true })
+    }
+  }
+}
+
+async function pipeToTmp (inputStream, cache, tmpTarget, opts) {
+  const outStream = new fsm.WriteStream(tmpTarget, {
+    flags: 'wx',
+  })
+
+  if (opts.integrityEmitter) {
+    // we need to create these all simultaneously since they can fire in any order
+    const [integrity, size] = await Promise.all([
+      events.once(opts.integrityEmitter, 'integrity').then(res => res[0]),
+      events.once(opts.integrityEmitter, 'size').then(res => res[0]),
+      new Pipeline(inputStream, outStream).promise(),
+    ])
+    return { integrity, size }
+  }
+
+  let integrity
+  let size
+  const hashStream = ssri.integrityStream({
+    integrity: opts.integrity,
+    algorithms: opts.algorithms,
+    size: opts.size,
+  })
+  hashStream.on('integrity', i => {
+    integrity = i
+  })
+  hashStream.on('size', s => {
+    size = s
+  })
+
+  const pipeline = new Pipeline(inputStream, hashStream, outStream)
+  await pipeline.promise()
+  return { integrity, size }
+}
+
+async function makeTmp (cache, opts) {
+  const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
+  await fs.mkdir(path.dirname(tmpTarget), { recursive: true })
+  return {
+    target: tmpTarget,
+    moved: false,
+  }
+}
+
+async function moveToDestination (tmp, cache, sri) {
+  const destination = contentPath(cache, sri)
+  const destDir = path.dirname(destination)
+  if (moveOperations.has(destination)) {
+    return moveOperations.get(destination)
+  }
+  moveOperations.set(
+    destination,
+    fs.mkdir(destDir, { recursive: true })
+      .then(async () => {
+        await moveFile(tmp.target, destination, { overwrite: false })
+        tmp.moved = true
+        return tmp.moved
+      })
+      .catch(err => {
+        if (!err.message.startsWith('The destination file exists')) {
+          throw Object.assign(err, { code: 'EEXIST' })
+        }
+      }).finally(() => {
+        moveOperations.delete(destination)
+      })
+
+  )
+  return moveOperations.get(destination)
+}
+
+function sizeError (expected, found) {
+  /* eslint-disable-next-line max-len */
+  const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
+  err.expected = expected
+  err.found = found
+  err.code = 'EBADSIZE'
+  return err
+}
+
+function checksumError (expected, found) {
+  const err = new Error(`Integrity check failed:
+  Wanted: ${expected}
+   Found: ${found}`)
+  err.code = 'EINTEGRITY'
+  err.expected = expected
+  err.found = found
+  return err
+}
diff --git a/node_modules/pacote/node_modules/cacache/lib/entry-index.js b/node_modules/pacote/node_modules/cacache/lib/entry-index.js
new file mode 100644
index 0000000000000..0e09b10818d09
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/entry-index.js
@@ -0,0 +1,336 @@
+'use strict'
+
+const crypto = require('crypto')
+const {
+  appendFile,
+  mkdir,
+  readFile,
+  readdir,
+  rm,
+  writeFile,
+} = require('fs/promises')
+const { Minipass } = require('minipass')
+const path = require('path')
+const ssri = require('ssri')
+const uniqueFilename = require('unique-filename')
+
+const contentPath = require('./content/path')
+const hashToSegments = require('./util/hash-to-segments')
+const indexV = require('../package.json')['cache-version'].index
+const { moveFile } = require('@npmcli/fs')
+
+const lsStreamConcurrency = 5
+
+module.exports.NotFoundError = class NotFoundError extends Error {
+  constructor (cache, key) {
+    super(`No cache entry for ${key} found in ${cache}`)
+    this.code = 'ENOENT'
+    this.cache = cache
+    this.key = key
+  }
+}
+
+module.exports.compact = compact
+
+async function compact (cache, key, matchFn, opts = {}) {
+  const bucket = bucketPath(cache, key)
+  const entries = await bucketEntries(bucket)
+  const newEntries = []
+  // we loop backwards because the bottom-most result is the newest
+  // since we add new entries with appendFile
+  for (let i = entries.length - 1; i >= 0; --i) {
+    const entry = entries[i]
+    // a null integrity could mean either a delete was appended
+    // or the user has simply stored an index that does not map
+    // to any content. we determine if the user wants to keep the
+    // null integrity based on the validateEntry function passed in options.
+    // if the integrity is null and no validateEntry is provided, we break
+    // as we consider the null integrity to be a deletion of everything
+    // that came before it.
+    if (entry.integrity === null && !opts.validateEntry) {
+      break
+    }
+
+    // if this entry is valid, and it is either the first entry or
+    // the newEntries array doesn't already include an entry that
+    // matches this one based on the provided matchFn, then we add
+    // it to the beginning of our list
+    if ((!opts.validateEntry || opts.validateEntry(entry) === true) &&
+      (newEntries.length === 0 ||
+        !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) {
+      newEntries.unshift(entry)
+    }
+  }
+
+  const newIndex = '\n' + newEntries.map((entry) => {
+    const stringified = JSON.stringify(entry)
+    const hash = hashEntry(stringified)
+    return `${hash}\t${stringified}`
+  }).join('\n')
+
+  const setup = async () => {
+    const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
+    await mkdir(path.dirname(target), { recursive: true })
+    return {
+      target,
+      moved: false,
+    }
+  }
+
+  const teardown = async (tmp) => {
+    if (!tmp.moved) {
+      return rm(tmp.target, { recursive: true, force: true })
+    }
+  }
+
+  const write = async (tmp) => {
+    await writeFile(tmp.target, newIndex, { flag: 'wx' })
+    await mkdir(path.dirname(bucket), { recursive: true })
+    // we use @npmcli/move-file directly here because we
+    // want to overwrite the existing file
+    await moveFile(tmp.target, bucket)
+    tmp.moved = true
+  }
+
+  // write the file atomically
+  const tmp = await setup()
+  try {
+    await write(tmp)
+  } finally {
+    await teardown(tmp)
+  }
+
+  // we reverse the list we generated such that the newest
+  // entries come first in order to make looping through them easier
+  // the true passed to formatEntry tells it to keep null
+  // integrity values, if they made it this far it's because
+  // validateEntry returned true, and as such we should return it
+  return newEntries.reverse().map((entry) => formatEntry(cache, entry, true))
+}
+
+module.exports.insert = insert
+
+async function insert (cache, key, integrity, opts = {}) {
+  const { metadata, size, time } = opts
+  const bucket = bucketPath(cache, key)
+  const entry = {
+    key,
+    integrity: integrity && ssri.stringify(integrity),
+    time: time || Date.now(),
+    size,
+    metadata,
+  }
+  try {
+    await mkdir(path.dirname(bucket), { recursive: true })
+    const stringified = JSON.stringify(entry)
+    // NOTE - Cleverness ahoy!
+    //
+    // This works because it's tremendously unlikely for an entry to corrupt
+    // another while still preserving the string length of the JSON in
+    // question. So, we just slap the length in there and verify it on read.
+    //
+    // Thanks to @isaacs for the whiteboarding session that ended up with
+    // this.
+    await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`)
+  } catch (err) {
+    if (err.code === 'ENOENT') {
+      return undefined
+    }
+
+    throw err
+  }
+  return formatEntry(cache, entry)
+}
+
+module.exports.find = find
+
+async function find (cache, key) {
+  const bucket = bucketPath(cache, key)
+  try {
+    const entries = await bucketEntries(bucket)
+    return entries.reduce((latest, next) => {
+      if (next && next.key === key) {
+        return formatEntry(cache, next)
+      } else {
+        return latest
+      }
+    }, null)
+  } catch (err) {
+    if (err.code === 'ENOENT') {
+      return null
+    } else {
+      throw err
+    }
+  }
+}
+
+module.exports.delete = del
+
+function del (cache, key, opts = {}) {
+  if (!opts.removeFully) {
+    return insert(cache, key, null, opts)
+  }
+
+  const bucket = bucketPath(cache, key)
+  return rm(bucket, { recursive: true, force: true })
+}
+
+module.exports.lsStream = lsStream
+
+function lsStream (cache) {
+  const indexDir = bucketDir(cache)
+  const stream = new Minipass({ objectMode: true })
+
+  // Set all this up to run on the stream and then just return the stream
+  Promise.resolve().then(async () => {
+    const { default: pMap } = await import('p-map')
+    const buckets = await readdirOrEmpty(indexDir)
+    await pMap(buckets, async (bucket) => {
+      const bucketPath = path.join(indexDir, bucket)
+      const subbuckets = await readdirOrEmpty(bucketPath)
+      await pMap(subbuckets, async (subbucket) => {
+        const subbucketPath = path.join(bucketPath, subbucket)
+
+        // "/cachename//./*"
+        const subbucketEntries = await readdirOrEmpty(subbucketPath)
+        await pMap(subbucketEntries, async (entry) => {
+          const entryPath = path.join(subbucketPath, entry)
+          try {
+            const entries = await bucketEntries(entryPath)
+            // using a Map here prevents duplicate keys from showing up
+            // twice, I guess?
+            const reduced = entries.reduce((acc, entry) => {
+              acc.set(entry.key, entry)
+              return acc
+            }, new Map())
+            // reduced is a map of key => entry
+            for (const entry of reduced.values()) {
+              const formatted = formatEntry(cache, entry)
+              if (formatted) {
+                stream.write(formatted)
+              }
+            }
+          } catch (err) {
+            if (err.code === 'ENOENT') {
+              return undefined
+            }
+            throw err
+          }
+        },
+        { concurrency: lsStreamConcurrency })
+      },
+      { concurrency: lsStreamConcurrency })
+    },
+    { concurrency: lsStreamConcurrency })
+    stream.end()
+    return stream
+  }).catch(err => stream.emit('error', err))
+
+  return stream
+}
+
+module.exports.ls = ls
+
+async function ls (cache) {
+  const entries = await lsStream(cache).collect()
+  return entries.reduce((acc, xs) => {
+    acc[xs.key] = xs
+    return acc
+  }, {})
+}
+
+module.exports.bucketEntries = bucketEntries
+
+async function bucketEntries (bucket, filter) {
+  const data = await readFile(bucket, 'utf8')
+  return _bucketEntries(data, filter)
+}
+
+function _bucketEntries (data) {
+  const entries = []
+  data.split('\n').forEach((entry) => {
+    if (!entry) {
+      return
+    }
+
+    const pieces = entry.split('\t')
+    if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) {
+      // Hash is no good! Corruption or malice? Doesn't matter!
+      // EJECT EJECT
+      return
+    }
+    let obj
+    try {
+      obj = JSON.parse(pieces[1])
+    } catch (_) {
+      // eslint-ignore-next-line no-empty-block
+    }
+    // coverage disabled here, no need to test with an entry that parses to something falsey
+    // istanbul ignore else
+    if (obj) {
+      entries.push(obj)
+    }
+  })
+  return entries
+}
+
+module.exports.bucketDir = bucketDir
+
+function bucketDir (cache) {
+  return path.join(cache, `index-v${indexV}`)
+}
+
+module.exports.bucketPath = bucketPath
+
+function bucketPath (cache, key) {
+  const hashed = hashKey(key)
+  return path.join.apply(
+    path,
+    [bucketDir(cache)].concat(hashToSegments(hashed))
+  )
+}
+
+module.exports.hashKey = hashKey
+
+function hashKey (key) {
+  return hash(key, 'sha256')
+}
+
+module.exports.hashEntry = hashEntry
+
+function hashEntry (str) {
+  return hash(str, 'sha1')
+}
+
+function hash (str, digest) {
+  return crypto
+    .createHash(digest)
+    .update(str)
+    .digest('hex')
+}
+
+function formatEntry (cache, entry, keepAll) {
+  // Treat null digests as deletions. They'll shadow any previous entries.
+  if (!entry.integrity && !keepAll) {
+    return null
+  }
+
+  return {
+    key: entry.key,
+    integrity: entry.integrity,
+    path: entry.integrity ? contentPath(cache, entry.integrity) : undefined,
+    size: entry.size,
+    time: entry.time,
+    metadata: entry.metadata,
+  }
+}
+
+function readdirOrEmpty (dir) {
+  return readdir(dir).catch((err) => {
+    if (err.code === 'ENOENT' || err.code === 'ENOTDIR') {
+      return []
+    }
+
+    throw err
+  })
+}
diff --git a/node_modules/pacote/node_modules/cacache/lib/get.js b/node_modules/pacote/node_modules/cacache/lib/get.js
new file mode 100644
index 0000000000000..80ec206c7ecaa
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/get.js
@@ -0,0 +1,170 @@
+'use strict'
+
+const Collect = require('minipass-collect')
+const { Minipass } = require('minipass')
+const Pipeline = require('minipass-pipeline')
+
+const index = require('./entry-index')
+const memo = require('./memoization')
+const read = require('./content/read')
+
+async function getData (cache, key, opts = {}) {
+  const { integrity, memoize, size } = opts
+  const memoized = memo.get(cache, key, opts)
+  if (memoized && memoize !== false) {
+    return {
+      metadata: memoized.entry.metadata,
+      data: memoized.data,
+      integrity: memoized.entry.integrity,
+      size: memoized.entry.size,
+    }
+  }
+
+  const entry = await index.find(cache, key, opts)
+  if (!entry) {
+    throw new index.NotFoundError(cache, key)
+  }
+  const data = await read(cache, entry.integrity, { integrity, size })
+  if (memoize) {
+    memo.put(cache, entry, data, opts)
+  }
+
+  return {
+    data,
+    metadata: entry.metadata,
+    size: entry.size,
+    integrity: entry.integrity,
+  }
+}
+module.exports = getData
+
+async function getDataByDigest (cache, key, opts = {}) {
+  const { integrity, memoize, size } = opts
+  const memoized = memo.get.byDigest(cache, key, opts)
+  if (memoized && memoize !== false) {
+    return memoized
+  }
+
+  const res = await read(cache, key, { integrity, size })
+  if (memoize) {
+    memo.put.byDigest(cache, key, res, opts)
+  }
+  return res
+}
+module.exports.byDigest = getDataByDigest
+
+const getMemoizedStream = (memoized) => {
+  const stream = new Minipass()
+  stream.on('newListener', function (ev, cb) {
+    ev === 'metadata' && cb(memoized.entry.metadata)
+    ev === 'integrity' && cb(memoized.entry.integrity)
+    ev === 'size' && cb(memoized.entry.size)
+  })
+  stream.end(memoized.data)
+  return stream
+}
+
+function getStream (cache, key, opts = {}) {
+  const { memoize, size } = opts
+  const memoized = memo.get(cache, key, opts)
+  if (memoized && memoize !== false) {
+    return getMemoizedStream(memoized)
+  }
+
+  const stream = new Pipeline()
+  // Set all this up to run on the stream and then just return the stream
+  Promise.resolve().then(async () => {
+    const entry = await index.find(cache, key)
+    if (!entry) {
+      throw new index.NotFoundError(cache, key)
+    }
+
+    stream.emit('metadata', entry.metadata)
+    stream.emit('integrity', entry.integrity)
+    stream.emit('size', entry.size)
+    stream.on('newListener', function (ev, cb) {
+      ev === 'metadata' && cb(entry.metadata)
+      ev === 'integrity' && cb(entry.integrity)
+      ev === 'size' && cb(entry.size)
+    })
+
+    const src = read.readStream(
+      cache,
+      entry.integrity,
+      { ...opts, size: typeof size !== 'number' ? entry.size : size }
+    )
+
+    if (memoize) {
+      const memoStream = new Collect.PassThrough()
+      memoStream.on('collect', data => memo.put(cache, entry, data, opts))
+      stream.unshift(memoStream)
+    }
+    stream.unshift(src)
+    return stream
+  }).catch((err) => stream.emit('error', err))
+
+  return stream
+}
+
+module.exports.stream = getStream
+
+function getStreamDigest (cache, integrity, opts = {}) {
+  const { memoize } = opts
+  const memoized = memo.get.byDigest(cache, integrity, opts)
+  if (memoized && memoize !== false) {
+    const stream = new Minipass()
+    stream.end(memoized)
+    return stream
+  } else {
+    const stream = read.readStream(cache, integrity, opts)
+    if (!memoize) {
+      return stream
+    }
+
+    const memoStream = new Collect.PassThrough()
+    memoStream.on('collect', data => memo.put.byDigest(
+      cache,
+      integrity,
+      data,
+      opts
+    ))
+    return new Pipeline(stream, memoStream)
+  }
+}
+
+module.exports.stream.byDigest = getStreamDigest
+
+function info (cache, key, opts = {}) {
+  const { memoize } = opts
+  const memoized = memo.get(cache, key, opts)
+  if (memoized && memoize !== false) {
+    return Promise.resolve(memoized.entry)
+  } else {
+    return index.find(cache, key)
+  }
+}
+module.exports.info = info
+
+async function copy (cache, key, dest, opts = {}) {
+  const entry = await index.find(cache, key, opts)
+  if (!entry) {
+    throw new index.NotFoundError(cache, key)
+  }
+  await read.copy(cache, entry.integrity, dest, opts)
+  return {
+    metadata: entry.metadata,
+    size: entry.size,
+    integrity: entry.integrity,
+  }
+}
+
+module.exports.copy = copy
+
+async function copyByDigest (cache, key, dest, opts = {}) {
+  await read.copy(cache, key, dest, opts)
+  return key
+}
+
+module.exports.copy.byDigest = copyByDigest
+
+module.exports.hasContent = read.hasContent
diff --git a/node_modules/pacote/node_modules/cacache/lib/index.js b/node_modules/pacote/node_modules/cacache/lib/index.js
new file mode 100644
index 0000000000000..c9b0da5f3a271
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/index.js
@@ -0,0 +1,42 @@
+'use strict'
+
+const get = require('./get.js')
+const put = require('./put.js')
+const rm = require('./rm.js')
+const verify = require('./verify.js')
+const { clearMemoized } = require('./memoization.js')
+const tmp = require('./util/tmp.js')
+const index = require('./entry-index.js')
+
+module.exports.index = {}
+module.exports.index.compact = index.compact
+module.exports.index.insert = index.insert
+
+module.exports.ls = index.ls
+module.exports.ls.stream = index.lsStream
+
+module.exports.get = get
+module.exports.get.byDigest = get.byDigest
+module.exports.get.stream = get.stream
+module.exports.get.stream.byDigest = get.stream.byDigest
+module.exports.get.copy = get.copy
+module.exports.get.copy.byDigest = get.copy.byDigest
+module.exports.get.info = get.info
+module.exports.get.hasContent = get.hasContent
+
+module.exports.put = put
+module.exports.put.stream = put.stream
+
+module.exports.rm = rm.entry
+module.exports.rm.all = rm.all
+module.exports.rm.entry = module.exports.rm
+module.exports.rm.content = rm.content
+
+module.exports.clearMemoized = clearMemoized
+
+module.exports.tmp = {}
+module.exports.tmp.mkdir = tmp.mkdir
+module.exports.tmp.withTmp = tmp.withTmp
+
+module.exports.verify = verify
+module.exports.verify.lastRun = verify.lastRun
diff --git a/node_modules/pacote/node_modules/cacache/lib/memoization.js b/node_modules/pacote/node_modules/cacache/lib/memoization.js
new file mode 100644
index 0000000000000..2ecc60912e456
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/memoization.js
@@ -0,0 +1,72 @@
+'use strict'
+
+const { LRUCache } = require('lru-cache')
+
+const MEMOIZED = new LRUCache({
+  max: 500,
+  maxSize: 50 * 1024 * 1024, // 50MB
+  ttl: 3 * 60 * 1000, // 3 minutes
+  sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length,
+})
+
+module.exports.clearMemoized = clearMemoized
+
+function clearMemoized () {
+  const old = {}
+  MEMOIZED.forEach((v, k) => {
+    old[k] = v
+  })
+  MEMOIZED.clear()
+  return old
+}
+
+module.exports.put = put
+
+function put (cache, entry, data, opts) {
+  pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data })
+  putDigest(cache, entry.integrity, data, opts)
+}
+
+module.exports.put.byDigest = putDigest
+
+function putDigest (cache, integrity, data, opts) {
+  pickMem(opts).set(`digest:${cache}:${integrity}`, data)
+}
+
+module.exports.get = get
+
+function get (cache, key, opts) {
+  return pickMem(opts).get(`key:${cache}:${key}`)
+}
+
+module.exports.get.byDigest = getDigest
+
+function getDigest (cache, integrity, opts) {
+  return pickMem(opts).get(`digest:${cache}:${integrity}`)
+}
+
+class ObjProxy {
+  constructor (obj) {
+    this.obj = obj
+  }
+
+  get (key) {
+    return this.obj[key]
+  }
+
+  set (key, val) {
+    this.obj[key] = val
+  }
+}
+
+function pickMem (opts) {
+  if (!opts || !opts.memoize) {
+    return MEMOIZED
+  } else if (opts.memoize.get && opts.memoize.set) {
+    return opts.memoize
+  } else if (typeof opts.memoize === 'object') {
+    return new ObjProxy(opts.memoize)
+  } else {
+    return MEMOIZED
+  }
+}
diff --git a/node_modules/pacote/node_modules/cacache/lib/put.js b/node_modules/pacote/node_modules/cacache/lib/put.js
new file mode 100644
index 0000000000000..9fc932d5f6dec
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/put.js
@@ -0,0 +1,80 @@
+'use strict'
+
+const index = require('./entry-index')
+const memo = require('./memoization')
+const write = require('./content/write')
+const Flush = require('minipass-flush')
+const { PassThrough } = require('minipass-collect')
+const Pipeline = require('minipass-pipeline')
+
+const putOpts = (opts) => ({
+  algorithms: ['sha512'],
+  ...opts,
+})
+
+module.exports = putData
+
+async function putData (cache, key, data, opts = {}) {
+  const { memoize } = opts
+  opts = putOpts(opts)
+  const res = await write(cache, data, opts)
+  const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size })
+  if (memoize) {
+    memo.put(cache, entry, data, opts)
+  }
+
+  return res.integrity
+}
+
+module.exports.stream = putStream
+
+function putStream (cache, key, opts = {}) {
+  const { memoize } = opts
+  opts = putOpts(opts)
+  let integrity
+  let size
+  let error
+
+  let memoData
+  const pipeline = new Pipeline()
+  // first item in the pipeline is the memoizer, because we need
+  // that to end first and get the collected data.
+  if (memoize) {
+    const memoizer = new PassThrough().on('collect', data => {
+      memoData = data
+    })
+    pipeline.push(memoizer)
+  }
+
+  // contentStream is a write-only, not a passthrough
+  // no data comes out of it.
+  const contentStream = write.stream(cache, opts)
+    .on('integrity', (int) => {
+      integrity = int
+    })
+    .on('size', (s) => {
+      size = s
+    })
+    .on('error', (err) => {
+      error = err
+    })
+
+  pipeline.push(contentStream)
+
+  // last but not least, we write the index and emit hash and size,
+  // and memoize if we're doing that
+  pipeline.push(new Flush({
+    async flush () {
+      if (!error) {
+        const entry = await index.insert(cache, key, integrity, { ...opts, size })
+        if (memoize && memoData) {
+          memo.put(cache, entry, memoData, opts)
+        }
+        pipeline.emit('integrity', integrity)
+        pipeline.emit('size', size)
+      }
+    },
+  }))
+
+  return pipeline
+}
diff --git a/node_modules/pacote/node_modules/cacache/lib/rm.js b/node_modules/pacote/node_modules/cacache/lib/rm.js
new file mode 100644
index 0000000000000..a94760c7cf243
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/rm.js
@@ -0,0 +1,31 @@
+'use strict'
+
+const { rm } = require('fs/promises')
+const glob = require('./util/glob.js')
+const index = require('./entry-index')
+const memo = require('./memoization')
+const path = require('path')
+const rmContent = require('./content/rm')
+
+module.exports = entry
+module.exports.entry = entry
+
+function entry (cache, key, opts) {
+  memo.clearMemoized()
+  return index.delete(cache, key, opts)
+}
+
+module.exports.content = content
+
+function content (cache, integrity) {
+  memo.clearMemoized()
+  return rmContent(cache, integrity)
+}
+
+module.exports.all = all
+
+async function all (cache) {
+  memo.clearMemoized()
+  const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true })
+  return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true })))
+}
diff --git a/node_modules/pacote/node_modules/cacache/lib/util/glob.js b/node_modules/pacote/node_modules/cacache/lib/util/glob.js
new file mode 100644
index 0000000000000..8500c1c16a429
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/util/glob.js
@@ -0,0 +1,7 @@
+'use strict'
+
+const { glob } = require('glob')
+const path = require('path')
+
+const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep)
+module.exports = (path, options) => glob(globify(path), options)
diff --git a/node_modules/pacote/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/pacote/node_modules/cacache/lib/util/hash-to-segments.js
new file mode 100644
index 0000000000000..445599b503808
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/util/hash-to-segments.js
@@ -0,0 +1,7 @@
+'use strict'
+
+module.exports = hashToSegments
+
+function hashToSegments (hash) {
+  return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)]
+}
diff --git a/node_modules/pacote/node_modules/cacache/lib/util/tmp.js b/node_modules/pacote/node_modules/cacache/lib/util/tmp.js
new file mode 100644
index 0000000000000..0bf5302136ebe
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/util/tmp.js
@@ -0,0 +1,26 @@
+'use strict'
+
+const { withTempDir } = require('@npmcli/fs')
+const fs = require('fs/promises')
+const path = require('path')
+
+module.exports.mkdir = mktmpdir
+
+async function mktmpdir (cache, opts = {}) {
+  const { tmpPrefix } = opts
+  const tmpDir = path.join(cache, 'tmp')
+  await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' })
+  // do not use path.join(), it drops the trailing / if tmpPrefix is unset
+  const target = `${tmpDir}${path.sep}${tmpPrefix || ''}`
+  return fs.mkdtemp(target, { owner: 'inherit' })
+}
+
+module.exports.withTmp = withTmp
+
+function withTmp (cache, opts, cb) {
+  if (!cb) {
+    cb = opts
+    opts = {}
+  }
+  return withTempDir(path.join(cache, 'tmp'), cb, opts)
+}
diff --git a/node_modules/pacote/node_modules/cacache/lib/verify.js b/node_modules/pacote/node_modules/cacache/lib/verify.js
new file mode 100644
index 0000000000000..dcff3aa73f317
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/lib/verify.js
@@ -0,0 +1,258 @@
+'use strict'
+
+const {
+  mkdir,
+  readFile,
+  rm,
+  stat,
+  truncate,
+  writeFile,
+} = require('fs/promises')
+const contentPath = require('./content/path')
+const fsm = require('fs-minipass')
+const glob = require('./util/glob.js')
+const index = require('./entry-index')
+const path = require('path')
+const ssri = require('ssri')
+
+const hasOwnProperty = (obj, key) =>
+  Object.prototype.hasOwnProperty.call(obj, key)
+
+const verifyOpts = (opts) => ({
+  concurrency: 20,
+  log: { silly () {} },
+  ...opts,
+})
+
+module.exports = verify
+
+async function verify (cache, opts) {
+  opts = verifyOpts(opts)
+  opts.log.silly('verify', 'verifying cache at', cache)
+
+  const steps = [
+    markStartTime,
+    fixPerms,
+    garbageCollect,
+    rebuildIndex,
+    cleanTmp,
+    writeVerifile,
+    markEndTime,
+  ]
+
+  const stats = {}
+  for (const step of steps) {
+    const label = step.name
+    const start = new Date()
+    const s = await step(cache, opts)
+    if (s) {
+      Object.keys(s).forEach((k) => {
+        stats[k] = s[k]
+      })
+    }
+    const end = new Date()
+    if (!stats.runTime) {
+      stats.runTime = {}
+    }
+    stats.runTime[label] = end - start
+  }
+  stats.runTime.total = stats.endTime - stats.startTime
+  opts.log.silly(
+    'verify',
+    'verification finished for',
+    cache,
+    'in',
+    `${stats.runTime.total}ms`
+  )
+  return stats
+}
+
+async function markStartTime () {
+  return { startTime: new Date() }
+}
+
+async function markEndTime () {
+  return { endTime: new Date() }
+}
+
+async function fixPerms (cache, opts) {
+  opts.log.silly('verify', 'fixing cache permissions')
+  await mkdir(cache, { recursive: true })
+  return null
+}
+
+// Implements a naive mark-and-sweep tracing garbage collector.
+//
+// The algorithm is basically as follows:
+// 1. Read (and filter) all index entries ("pointers")
+// 2. Mark each integrity value as "live"
+// 3. Read entire filesystem tree in `content-vX/` dir
+// 4. If content is live, verify its checksum and delete it if it fails
+// 5. If content is not marked as live, rm it.
+//
+async function garbageCollect (cache, opts) {
+  opts.log.silly('verify', 'garbage collecting content')
+  const { default: pMap } = await import('p-map')
+  const indexStream = index.lsStream(cache)
+  const liveContent = new Set()
+  indexStream.on('data', (entry) => {
+    if (opts.filter && !opts.filter(entry)) {
+      return
+    }
+
+    // integrity is stringified, re-parse it so we can get each hash
+    const integrity = ssri.parse(entry.integrity)
+    for (const algo in integrity) {
+      liveContent.add(integrity[algo].toString())
+    }
+  })
+  await new Promise((resolve, reject) => {
+    indexStream.on('end', resolve).on('error', reject)
+  })
+  const contentDir = contentPath.contentDir(cache)
+  const files = await glob(path.join(contentDir, '**'), {
+    follow: false,
+    nodir: true,
+    nosort: true,
+  })
+  const stats = {
+    verifiedContent: 0,
+    reclaimedCount: 0,
+    reclaimedSize: 0,
+    badContentCount: 0,
+    keptSize: 0,
+  }
+  await pMap(
+    files,
+    async (f) => {
+      const split = f.split(/[/\\]/)
+      const digest = split.slice(split.length - 3).join('')
+      const algo = split[split.length - 4]
+      const integrity = ssri.fromHex(digest, algo)
+      if (liveContent.has(integrity.toString())) {
+        const info = await verifyContent(f, integrity)
+        if (!info.valid) {
+          stats.reclaimedCount++
+          stats.badContentCount++
+          stats.reclaimedSize += info.size
+        } else {
+          stats.verifiedContent++
+          stats.keptSize += info.size
+        }
+      } else {
+        // No entries refer to this content. We can delete.
+        stats.reclaimedCount++
+        const s = await stat(f)
+        await rm(f, { recursive: true, force: true })
+        stats.reclaimedSize += s.size
+      }
+      return stats
+    },
+    { concurrency: opts.concurrency }
+  )
+  return stats
+}
+
+async function verifyContent (filepath, sri) {
+  const contentInfo = {}
+  try {
+    const { size } = await stat(filepath)
+    contentInfo.size = size
+    contentInfo.valid = true
+    await ssri.checkStream(new fsm.ReadStream(filepath), sri)
+  } catch (err) {
+    if (err.code === 'ENOENT') {
+      return { size: 0, valid: false }
+    }
+    if (err.code !== 'EINTEGRITY') {
+      throw err
+    }
+
+    await rm(filepath, { recursive: true, force: true })
+    contentInfo.valid = false
+  }
+  return contentInfo
+}
+
+async function rebuildIndex (cache, opts) {
+  opts.log.silly('verify', 'rebuilding index')
+  const { default: pMap } = await import('p-map')
+  const entries = await index.ls(cache)
+  const stats = {
+    missingContent: 0,
+    rejectedEntries: 0,
+    totalEntries: 0,
+  }
+  const buckets = {}
+  for (const k in entries) {
+    /* istanbul ignore else */
+    if (hasOwnProperty(entries, k)) {
+      const hashed = index.hashKey(k)
+      const entry = entries[k]
+      const excluded = opts.filter && !opts.filter(entry)
+      excluded && stats.rejectedEntries++
+      if (buckets[hashed] && !excluded) {
+        buckets[hashed].push(entry)
+      } else if (buckets[hashed] && excluded) {
+        // skip
+      } else if (excluded) {
+        buckets[hashed] = []
+        buckets[hashed]._path = index.bucketPath(cache, k)
+      } else {
+        buckets[hashed] = [entry]
+        buckets[hashed]._path = index.bucketPath(cache, k)
+      }
+    }
+  }
+  await pMap(
+    Object.keys(buckets),
+    (key) => {
+      return rebuildBucket(cache, buckets[key], stats, opts)
+    },
+    { concurrency: opts.concurrency }
+  )
+  return stats
+}
+
+async function rebuildBucket (cache, bucket, stats) {
+  await truncate(bucket._path)
+  // This needs to be serialized because cacache explicitly
+  // lets very racy bucket conflicts clobber each other.
+  for (const entry of bucket) {
+    const content = contentPath(cache, entry.integrity)
+    try {
+      await stat(content)
+      await index.insert(cache, entry.key, entry.integrity, {
+        metadata: entry.metadata,
+        size: entry.size,
+        time: entry.time,
+      })
+      stats.totalEntries++
+    } catch (err) {
+      if (err.code === 'ENOENT') {
+        stats.rejectedEntries++
+        stats.missingContent++
+      } else {
+        throw err
+      }
+    }
+  }
+}
+
+function cleanTmp (cache, opts) {
+  opts.log.silly('verify', 'cleaning tmp directory')
+  return rm(path.join(cache, 'tmp'), { recursive: true, force: true })
+}
+
+async function writeVerifile (cache, opts) {
+  const verifile = path.join(cache, '_lastverified')
+  opts.log.silly('verify', 'writing verifile to ' + verifile)
+  return writeFile(verifile, `${Date.now()}`)
+}
+
+module.exports.lastRun = lastRun
+
+async function lastRun (cache) {
+  const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' })
+  return new Date(+data)
+}
diff --git a/node_modules/pacote/node_modules/cacache/package.json b/node_modules/pacote/node_modules/cacache/package.json
new file mode 100644
index 0000000000000..6eec0a8375e5c
--- /dev/null
+++ b/node_modules/pacote/node_modules/cacache/package.json
@@ -0,0 +1,82 @@
+{
+  "name": "cacache",
+  "version": "20.0.1",
+  "cache-version": {
+    "content": "2",
+    "index": "5"
+  },
+  "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.",
+  "main": "lib/index.js",
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "scripts": {
+    "test": "tap",
+    "snap": "tap",
+    "coverage": "tap",
+    "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test",
+    "lint": "npm run eslint",
+    "npmclilint": "npmcli-lint",
+    "lintfix": "npm run eslint -- --fix",
+    "postsnap": "npm run lintfix --",
+    "postlint": "template-oss-check",
+    "posttest": "npm run lint",
+    "template-oss-apply": "template-oss-apply --force",
+    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/npm/cacache.git"
+  },
+  "keywords": [
+    "cache",
+    "caching",
+    "content-addressable",
+    "sri",
+    "sri hash",
+    "subresource integrity",
+    "cache",
+    "storage",
+    "store",
+    "file store",
+    "filesystem",
+    "disk cache",
+    "disk storage"
+  ],
+  "license": "ISC",
+  "dependencies": {
+    "@npmcli/fs": "^4.0.0",
+    "fs-minipass": "^3.0.0",
+    "glob": "^11.0.3",
+    "lru-cache": "^11.1.0",
+    "minipass": "^7.0.3",
+    "minipass-collect": "^2.0.1",
+    "minipass-flush": "^1.0.5",
+    "minipass-pipeline": "^1.2.4",
+    "p-map": "^7.0.2",
+    "ssri": "^12.0.0",
+    "unique-filename": "^4.0.0"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^5.0.0",
+    "@npmcli/template-oss": "4.25.0",
+    "tap": "^16.0.0"
+  },
+  "engines": {
+    "node": "^20.17.0 || >=22.9.0"
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "windowsCI": false,
+    "version": "4.25.0",
+    "publish": "true"
+  },
+  "author": "GitHub Inc.",
+  "tap": {
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  }
+}
diff --git a/node_modules/pacote/node_modules/chownr/LICENSE.md b/node_modules/pacote/node_modules/chownr/LICENSE.md
new file mode 100644
index 0000000000000..881248b6d7f0c
--- /dev/null
+++ b/node_modules/pacote/node_modules/chownr/LICENSE.md
@@ -0,0 +1,63 @@
+All packages under `src/` are licensed according to the terms in
+their respective `LICENSE` or `LICENSE.md` files.
+
+The remainder of this project is licensed under the Blue Oak
+Model License, as follows:
+
+-----
+
+# Blue Oak Model License
+
+Version 1.0.0
+
+## Purpose
+
+This license gives everyone as much permission to work with
+this software as possible, while protecting contributors
+from liability.
+
+## Acceptance
+
+In order to receive this license, you must agree to its
+rules.  The rules of this license are both obligations
+under that agreement and conditions to your license.
+You must not do anything with this software that triggers
+a rule that you cannot or will not follow.
+
+## Copyright
+
+Each contributor licenses you to do everything with this
+software that would otherwise infringe that contributor's
+copyright in it.
+
+## Notices
+
+You must ensure that everyone who gets a copy of
+any part of this software from you, with or without
+changes, also gets the text of this license or a link to
+.
+
+## Excuse
+
+If anyone notifies you in writing that you have not
+complied with [Notices](#notices), you can keep your
+license by taking all practical steps to comply within 30
+days after the notice.  If you do not do so, your license
+ends immediately.
+
+## Patent
+
+Each contributor licenses you to do everything with this
+software that would otherwise infringe any patent claims
+they can license or become able to license.
+
+## Reliability
+
+No contributor can revoke this license.
+
+## No Liability
+
+***As far as the law allows, this software comes as is,
+without any warranty or condition, and no contributor
+will be liable to anyone for any damages related to this
+software or this license, under any kind of legal claim.***
diff --git a/node_modules/pacote/node_modules/chownr/dist/commonjs/index.js b/node_modules/pacote/node_modules/chownr/dist/commonjs/index.js
new file mode 100644
index 0000000000000..6a7b68d5eac26
--- /dev/null
+++ b/node_modules/pacote/node_modules/chownr/dist/commonjs/index.js
@@ -0,0 +1,93 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.chownrSync = exports.chownr = void 0;
+const node_fs_1 = __importDefault(require("node:fs"));
+const node_path_1 = __importDefault(require("node:path"));
+const lchownSync = (path, uid, gid) => {
+    try {
+        return node_fs_1.default.lchownSync(path, uid, gid);
+    }
+    catch (er) {
+        if (er?.code !== 'ENOENT')
+            throw er;
+    }
+};
+const chown = (cpath, uid, gid, cb) => {
+    node_fs_1.default.lchown(cpath, uid, gid, er => {
+        // Skip ENOENT error
+        cb(er && er?.code !== 'ENOENT' ? er : null);
+    });
+};
+const chownrKid = (p, child, uid, gid, cb) => {
+    if (child.isDirectory()) {
+        (0, exports.chownr)(node_path_1.default.resolve(p, child.name), uid, gid, (er) => {
+            if (er)
+                return cb(er);
+            const cpath = node_path_1.default.resolve(p, child.name);
+            chown(cpath, uid, gid, cb);
+        });
+    }
+    else {
+        const cpath = node_path_1.default.resolve(p, child.name);
+        chown(cpath, uid, gid, cb);
+    }
+};
+const chownr = (p, uid, gid, cb) => {
+    node_fs_1.default.readdir(p, { withFileTypes: true }, (er, children) => {
+        // any error other than ENOTDIR or ENOTSUP means it's not readable,
+        // or doesn't exist.  give up.
+        if (er) {
+            if (er.code === 'ENOENT')
+                return cb();
+            else if (er.code !== 'ENOTDIR' && er.code !== 'ENOTSUP')
+                return cb(er);
+        }
+        if (er || !children.length)
+            return chown(p, uid, gid, cb);
+        let len = children.length;
+        let errState = null;
+        const then = (er) => {
+            /* c8 ignore start */
+            if (errState)
+                return;
+            /* c8 ignore stop */
+            if (er)
+                return cb((errState = er));
+            if (--len === 0)
+                return chown(p, uid, gid, cb);
+        };
+        for (const child of children) {
+            chownrKid(p, child, uid, gid, then);
+        }
+    });
+};
+exports.chownr = chownr;
+const chownrKidSync = (p, child, uid, gid) => {
+    if (child.isDirectory())
+        (0, exports.chownrSync)(node_path_1.default.resolve(p, child.name), uid, gid);
+    lchownSync(node_path_1.default.resolve(p, child.name), uid, gid);
+};
+const chownrSync = (p, uid, gid) => {
+    let children;
+    try {
+        children = node_fs_1.default.readdirSync(p, { withFileTypes: true });
+    }
+    catch (er) {
+        const e = er;
+        if (e?.code === 'ENOENT')
+            return;
+        else if (e?.code === 'ENOTDIR' || e?.code === 'ENOTSUP')
+            return lchownSync(p, uid, gid);
+        else
+            throw e;
+    }
+    for (const child of children) {
+        chownrKidSync(p, child, uid, gid);
+    }
+    return lchownSync(p, uid, gid);
+};
+exports.chownrSync = chownrSync;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/chownr/dist/commonjs/package.json b/node_modules/pacote/node_modules/chownr/dist/commonjs/package.json
new file mode 100644
index 0000000000000..5bbefffbabee3
--- /dev/null
+++ b/node_modules/pacote/node_modules/chownr/dist/commonjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/node_modules/pacote/node_modules/chownr/dist/esm/index.js b/node_modules/pacote/node_modules/chownr/dist/esm/index.js
new file mode 100644
index 0000000000000..5c2815297a67c
--- /dev/null
+++ b/node_modules/pacote/node_modules/chownr/dist/esm/index.js
@@ -0,0 +1,85 @@
+import fs from 'node:fs';
+import path from 'node:path';
+const lchownSync = (path, uid, gid) => {
+    try {
+        return fs.lchownSync(path, uid, gid);
+    }
+    catch (er) {
+        if (er?.code !== 'ENOENT')
+            throw er;
+    }
+};
+const chown = (cpath, uid, gid, cb) => {
+    fs.lchown(cpath, uid, gid, er => {
+        // Skip ENOENT error
+        cb(er && er?.code !== 'ENOENT' ? er : null);
+    });
+};
+const chownrKid = (p, child, uid, gid, cb) => {
+    if (child.isDirectory()) {
+        chownr(path.resolve(p, child.name), uid, gid, (er) => {
+            if (er)
+                return cb(er);
+            const cpath = path.resolve(p, child.name);
+            chown(cpath, uid, gid, cb);
+        });
+    }
+    else {
+        const cpath = path.resolve(p, child.name);
+        chown(cpath, uid, gid, cb);
+    }
+};
+export const chownr = (p, uid, gid, cb) => {
+    fs.readdir(p, { withFileTypes: true }, (er, children) => {
+        // any error other than ENOTDIR or ENOTSUP means it's not readable,
+        // or doesn't exist.  give up.
+        if (er) {
+            if (er.code === 'ENOENT')
+                return cb();
+            else if (er.code !== 'ENOTDIR' && er.code !== 'ENOTSUP')
+                return cb(er);
+        }
+        if (er || !children.length)
+            return chown(p, uid, gid, cb);
+        let len = children.length;
+        let errState = null;
+        const then = (er) => {
+            /* c8 ignore start */
+            if (errState)
+                return;
+            /* c8 ignore stop */
+            if (er)
+                return cb((errState = er));
+            if (--len === 0)
+                return chown(p, uid, gid, cb);
+        };
+        for (const child of children) {
+            chownrKid(p, child, uid, gid, then);
+        }
+    });
+};
+const chownrKidSync = (p, child, uid, gid) => {
+    if (child.isDirectory())
+        chownrSync(path.resolve(p, child.name), uid, gid);
+    lchownSync(path.resolve(p, child.name), uid, gid);
+};
+export const chownrSync = (p, uid, gid) => {
+    let children;
+    try {
+        children = fs.readdirSync(p, { withFileTypes: true });
+    }
+    catch (er) {
+        const e = er;
+        if (e?.code === 'ENOENT')
+            return;
+        else if (e?.code === 'ENOTDIR' || e?.code === 'ENOTSUP')
+            return lchownSync(p, uid, gid);
+        else
+            throw e;
+    }
+    for (const child of children) {
+        chownrKidSync(p, child, uid, gid);
+    }
+    return lchownSync(p, uid, gid);
+};
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/chownr/dist/esm/package.json b/node_modules/pacote/node_modules/chownr/dist/esm/package.json
new file mode 100644
index 0000000000000..3dbc1ca591c05
--- /dev/null
+++ b/node_modules/pacote/node_modules/chownr/dist/esm/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/node_modules/pacote/node_modules/chownr/package.json b/node_modules/pacote/node_modules/chownr/package.json
new file mode 100644
index 0000000000000..09aa6b2e2e576
--- /dev/null
+++ b/node_modules/pacote/node_modules/chownr/package.json
@@ -0,0 +1,69 @@
+{
+  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
+  "name": "chownr",
+  "description": "like `chown -R`",
+  "version": "3.0.0",
+  "repository": {
+    "type": "git",
+    "url": "git://github.com/isaacs/chownr.git"
+  },
+  "files": [
+    "dist"
+  ],
+  "devDependencies": {
+    "@types/node": "^20.12.5",
+    "mkdirp": "^3.0.1",
+    "prettier": "^3.2.5",
+    "rimraf": "^5.0.5",
+    "tap": "^18.7.2",
+    "tshy": "^1.13.1",
+    "typedoc": "^0.25.12"
+  },
+  "scripts": {
+    "prepare": "tshy",
+    "pretest": "npm run prepare",
+    "test": "tap",
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "format": "prettier --write . --loglevel warn",
+    "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
+  },
+  "license": "BlueOak-1.0.0",
+  "engines": {
+    "node": ">=18"
+  },
+  "tshy": {
+    "exports": {
+      "./package.json": "./package.json",
+      ".": "./src/index.ts"
+    }
+  },
+  "exports": {
+    "./package.json": "./package.json",
+    ".": {
+      "import": {
+        "types": "./dist/esm/index.d.ts",
+        "default": "./dist/esm/index.js"
+      },
+      "require": {
+        "types": "./dist/commonjs/index.d.ts",
+        "default": "./dist/commonjs/index.js"
+      }
+    }
+  },
+  "main": "./dist/commonjs/index.js",
+  "types": "./dist/commonjs/index.d.ts",
+  "type": "module",
+  "prettier": {
+    "semi": false,
+    "printWidth": 75,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  }
+}
diff --git a/node_modules/pacote/node_modules/glob/LICENSE b/node_modules/pacote/node_modules/glob/LICENSE
new file mode 100644
index 0000000000000..ec7df93329abf
--- /dev/null
+++ b/node_modules/pacote/node_modules/glob/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) 2009-2023 Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/glob/dist/commonjs/glob.js b/node_modules/pacote/node_modules/glob/dist/commonjs/glob.js
new file mode 100644
index 0000000000000..e1339bbbcf57f
--- /dev/null
+++ b/node_modules/pacote/node_modules/glob/dist/commonjs/glob.js
@@ -0,0 +1,247 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Glob = void 0;
+const minimatch_1 = require("minimatch");
+const node_url_1 = require("node:url");
+const path_scurry_1 = require("path-scurry");
+const pattern_js_1 = require("./pattern.js");
+const walker_js_1 = require("./walker.js");
+// if no process global, just call it linux.
+// so we default to case-sensitive, / separators
+const defaultPlatform = (typeof process === 'object' &&
+    process &&
+    typeof process.platform === 'string') ?
+    process.platform
+    : 'linux';
+/**
+ * An object that can perform glob pattern traversals.
+ */
+class Glob {
+    absolute;
+    cwd;
+    root;
+    dot;
+    dotRelative;
+    follow;
+    ignore;
+    magicalBraces;
+    mark;
+    matchBase;
+    maxDepth;
+    nobrace;
+    nocase;
+    nodir;
+    noext;
+    noglobstar;
+    pattern;
+    platform;
+    realpath;
+    scurry;
+    stat;
+    signal;
+    windowsPathsNoEscape;
+    withFileTypes;
+    includeChildMatches;
+    /**
+     * The options provided to the constructor.
+     */
+    opts;
+    /**
+     * An array of parsed immutable {@link Pattern} objects.
+     */
+    patterns;
+    /**
+     * All options are stored as properties on the `Glob` object.
+     *
+     * See {@link GlobOptions} for full options descriptions.
+     *
+     * Note that a previous `Glob` object can be passed as the
+     * `GlobOptions` to another `Glob` instantiation to re-use settings
+     * and caches with a new pattern.
+     *
+     * Traversal functions can be called multiple times to run the walk
+     * again.
+     */
+    constructor(pattern, opts) {
+        /* c8 ignore start */
+        if (!opts)
+            throw new TypeError('glob options required');
+        /* c8 ignore stop */
+        this.withFileTypes = !!opts.withFileTypes;
+        this.signal = opts.signal;
+        this.follow = !!opts.follow;
+        this.dot = !!opts.dot;
+        this.dotRelative = !!opts.dotRelative;
+        this.nodir = !!opts.nodir;
+        this.mark = !!opts.mark;
+        if (!opts.cwd) {
+            this.cwd = '';
+        }
+        else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) {
+            opts.cwd = (0, node_url_1.fileURLToPath)(opts.cwd);
+        }
+        this.cwd = opts.cwd || '';
+        this.root = opts.root;
+        this.magicalBraces = !!opts.magicalBraces;
+        this.nobrace = !!opts.nobrace;
+        this.noext = !!opts.noext;
+        this.realpath = !!opts.realpath;
+        this.absolute = opts.absolute;
+        this.includeChildMatches = opts.includeChildMatches !== false;
+        this.noglobstar = !!opts.noglobstar;
+        this.matchBase = !!opts.matchBase;
+        this.maxDepth =
+            typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity;
+        this.stat = !!opts.stat;
+        this.ignore = opts.ignore;
+        if (this.withFileTypes && this.absolute !== undefined) {
+            throw new Error('cannot set absolute and withFileTypes:true');
+        }
+        if (typeof pattern === 'string') {
+            pattern = [pattern];
+        }
+        this.windowsPathsNoEscape =
+            !!opts.windowsPathsNoEscape ||
+                opts.allowWindowsEscape ===
+                    false;
+        if (this.windowsPathsNoEscape) {
+            pattern = pattern.map(p => p.replace(/\\/g, '/'));
+        }
+        if (this.matchBase) {
+            if (opts.noglobstar) {
+                throw new TypeError('base matching requires globstar');
+            }
+            pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`));
+        }
+        this.pattern = pattern;
+        this.platform = opts.platform || defaultPlatform;
+        this.opts = { ...opts, platform: this.platform };
+        if (opts.scurry) {
+            this.scurry = opts.scurry;
+            if (opts.nocase !== undefined &&
+                opts.nocase !== opts.scurry.nocase) {
+                throw new Error('nocase option contradicts provided scurry option');
+            }
+        }
+        else {
+            const Scurry = opts.platform === 'win32' ? path_scurry_1.PathScurryWin32
+                : opts.platform === 'darwin' ? path_scurry_1.PathScurryDarwin
+                    : opts.platform ? path_scurry_1.PathScurryPosix
+                        : path_scurry_1.PathScurry;
+            this.scurry = new Scurry(this.cwd, {
+                nocase: opts.nocase,
+                fs: opts.fs,
+            });
+        }
+        this.nocase = this.scurry.nocase;
+        // If you do nocase:true on a case-sensitive file system, then
+        // we need to use regexps instead of strings for non-magic
+        // path portions, because statting `aBc` won't return results
+        // for the file `AbC` for example.
+        const nocaseMagicOnly = this.platform === 'darwin' || this.platform === 'win32';
+        const mmo = {
+            // default nocase based on platform
+            ...opts,
+            dot: this.dot,
+            matchBase: this.matchBase,
+            nobrace: this.nobrace,
+            nocase: this.nocase,
+            nocaseMagicOnly,
+            nocomment: true,
+            noext: this.noext,
+            nonegate: true,
+            optimizationLevel: 2,
+            platform: this.platform,
+            windowsPathsNoEscape: this.windowsPathsNoEscape,
+            debug: !!this.opts.debug,
+        };
+        const mms = this.pattern.map(p => new minimatch_1.Minimatch(p, mmo));
+        const [matchSet, globParts] = mms.reduce((set, m) => {
+            set[0].push(...m.set);
+            set[1].push(...m.globParts);
+            return set;
+        }, [[], []]);
+        this.patterns = matchSet.map((set, i) => {
+            const g = globParts[i];
+            /* c8 ignore start */
+            if (!g)
+                throw new Error('invalid pattern object');
+            /* c8 ignore stop */
+            return new pattern_js_1.Pattern(set, g, 0, this.platform);
+        });
+    }
+    async walk() {
+        // Walkers always return array of Path objects, so we just have to
+        // coerce them into the right shape.  It will have already called
+        // realpath() if the option was set to do so, so we know that's cached.
+        // start out knowing the cwd, at least
+        return [
+            ...(await new walker_js_1.GlobWalker(this.patterns, this.scurry.cwd, {
+                ...this.opts,
+                maxDepth: this.maxDepth !== Infinity ?
+                    this.maxDepth + this.scurry.cwd.depth()
+                    : Infinity,
+                platform: this.platform,
+                nocase: this.nocase,
+                includeChildMatches: this.includeChildMatches,
+            }).walk()),
+        ];
+    }
+    walkSync() {
+        return [
+            ...new walker_js_1.GlobWalker(this.patterns, this.scurry.cwd, {
+                ...this.opts,
+                maxDepth: this.maxDepth !== Infinity ?
+                    this.maxDepth + this.scurry.cwd.depth()
+                    : Infinity,
+                platform: this.platform,
+                nocase: this.nocase,
+                includeChildMatches: this.includeChildMatches,
+            }).walkSync(),
+        ];
+    }
+    stream() {
+        return new walker_js_1.GlobStream(this.patterns, this.scurry.cwd, {
+            ...this.opts,
+            maxDepth: this.maxDepth !== Infinity ?
+                this.maxDepth + this.scurry.cwd.depth()
+                : Infinity,
+            platform: this.platform,
+            nocase: this.nocase,
+            includeChildMatches: this.includeChildMatches,
+        }).stream();
+    }
+    streamSync() {
+        return new walker_js_1.GlobStream(this.patterns, this.scurry.cwd, {
+            ...this.opts,
+            maxDepth: this.maxDepth !== Infinity ?
+                this.maxDepth + this.scurry.cwd.depth()
+                : Infinity,
+            platform: this.platform,
+            nocase: this.nocase,
+            includeChildMatches: this.includeChildMatches,
+        }).streamSync();
+    }
+    /**
+     * Default sync iteration function. Returns a Generator that
+     * iterates over the results.
+     */
+    iterateSync() {
+        return this.streamSync()[Symbol.iterator]();
+    }
+    [Symbol.iterator]() {
+        return this.iterateSync();
+    }
+    /**
+     * Default async iteration function. Returns an AsyncGenerator that
+     * iterates over the results.
+     */
+    iterate() {
+        return this.stream()[Symbol.asyncIterator]();
+    }
+    [Symbol.asyncIterator]() {
+        return this.iterate();
+    }
+}
+exports.Glob = Glob;
+//# sourceMappingURL=glob.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/glob/dist/commonjs/has-magic.js b/node_modules/pacote/node_modules/glob/dist/commonjs/has-magic.js
new file mode 100644
index 0000000000000..0918bd57e0f1c
--- /dev/null
+++ b/node_modules/pacote/node_modules/glob/dist/commonjs/has-magic.js
@@ -0,0 +1,27 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.hasMagic = void 0;
+const minimatch_1 = require("minimatch");
+/**
+ * Return true if the patterns provided contain any magic glob characters,
+ * given the options provided.
+ *
+ * Brace expansion is not considered "magic" unless the `magicalBraces` option
+ * is set, as brace expansion just turns one string into an array of strings.
+ * So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and
+ * `'xby'` both do not contain any magic glob characters, and it's treated the
+ * same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true`
+ * is in the options, brace expansion _is_ treated as a pattern having magic.
+ */
+const hasMagic = (pattern, options = {}) => {
+    if (!Array.isArray(pattern)) {
+        pattern = [pattern];
+    }
+    for (const p of pattern) {
+        if (new minimatch_1.Minimatch(p, options).hasMagic())
+            return true;
+    }
+    return false;
+};
+exports.hasMagic = hasMagic;
+//# sourceMappingURL=has-magic.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/glob/dist/commonjs/ignore.js b/node_modules/pacote/node_modules/glob/dist/commonjs/ignore.js
new file mode 100644
index 0000000000000..5f1fde0680dea
--- /dev/null
+++ b/node_modules/pacote/node_modules/glob/dist/commonjs/ignore.js
@@ -0,0 +1,119 @@
+"use strict";
+// give it a pattern, and it'll be able to tell you if
+// a given path should be ignored.
+// Ignoring a path ignores its children if the pattern ends in /**
+// Ignores are always parsed in dot:true mode
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Ignore = void 0;
+const minimatch_1 = require("minimatch");
+const pattern_js_1 = require("./pattern.js");
+const defaultPlatform = (typeof process === 'object' &&
+    process &&
+    typeof process.platform === 'string') ?
+    process.platform
+    : 'linux';
+/**
+ * Class used to process ignored patterns
+ */
+class Ignore {
+    relative;
+    relativeChildren;
+    absolute;
+    absoluteChildren;
+    platform;
+    mmopts;
+    constructor(ignored, { nobrace, nocase, noext, noglobstar, platform = defaultPlatform, }) {
+        this.relative = [];
+        this.absolute = [];
+        this.relativeChildren = [];
+        this.absoluteChildren = [];
+        this.platform = platform;
+        this.mmopts = {
+            dot: true,
+            nobrace,
+            nocase,
+            noext,
+            noglobstar,
+            optimizationLevel: 2,
+            platform,
+            nocomment: true,
+            nonegate: true,
+        };
+        for (const ign of ignored)
+            this.add(ign);
+    }
+    add(ign) {
+        // this is a little weird, but it gives us a clean set of optimized
+        // minimatch matchers, without getting tripped up if one of them
+        // ends in /** inside a brace section, and it's only inefficient at
+        // the start of the walk, not along it.
+        // It'd be nice if the Pattern class just had a .test() method, but
+        // handling globstars is a bit of a pita, and that code already lives
+        // in minimatch anyway.
+        // Another way would be if maybe Minimatch could take its set/globParts
+        // as an option, and then we could at least just use Pattern to test
+        // for absolute-ness.
+        // Yet another way, Minimatch could take an array of glob strings, and
+        // a cwd option, and do the right thing.
+        const mm = new minimatch_1.Minimatch(ign, this.mmopts);
+        for (let i = 0; i < mm.set.length; i++) {
+            const parsed = mm.set[i];
+            const globParts = mm.globParts[i];
+            /* c8 ignore start */
+            if (!parsed || !globParts) {
+                throw new Error('invalid pattern object');
+            }
+            // strip off leading ./ portions
+            // https://github.com/isaacs/node-glob/issues/570
+            while (parsed[0] === '.' && globParts[0] === '.') {
+                parsed.shift();
+                globParts.shift();
+            }
+            /* c8 ignore stop */
+            const p = new pattern_js_1.Pattern(parsed, globParts, 0, this.platform);
+            const m = new minimatch_1.Minimatch(p.globString(), this.mmopts);
+            const children = globParts[globParts.length - 1] === '**';
+            const absolute = p.isAbsolute();
+            if (absolute)
+                this.absolute.push(m);
+            else
+                this.relative.push(m);
+            if (children) {
+                if (absolute)
+                    this.absoluteChildren.push(m);
+                else
+                    this.relativeChildren.push(m);
+            }
+        }
+    }
+    ignored(p) {
+        const fullpath = p.fullpath();
+        const fullpaths = `${fullpath}/`;
+        const relative = p.relative() || '.';
+        const relatives = `${relative}/`;
+        for (const m of this.relative) {
+            if (m.match(relative) || m.match(relatives))
+                return true;
+        }
+        for (const m of this.absolute) {
+            if (m.match(fullpath) || m.match(fullpaths))
+                return true;
+        }
+        return false;
+    }
+    childrenIgnored(p) {
+        const fullpath = p.fullpath() + '/';
+        const relative = (p.relative() || '.') + '/';
+        for (const m of this.relativeChildren) {
+            if (m.match(relative))
+                return true;
+        }
+        for (const m of this.absoluteChildren) {
+            if (m.match(fullpath))
+                return true;
+        }
+        return false;
+    }
+}
+exports.Ignore = Ignore;
+//# sourceMappingURL=ignore.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/glob/dist/commonjs/index.js b/node_modules/pacote/node_modules/glob/dist/commonjs/index.js
new file mode 100644
index 0000000000000..151495d170efa
--- /dev/null
+++ b/node_modules/pacote/node_modules/glob/dist/commonjs/index.js
@@ -0,0 +1,68 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.glob = exports.sync = exports.iterate = exports.iterateSync = exports.stream = exports.streamSync = exports.Ignore = exports.hasMagic = exports.Glob = exports.unescape = exports.escape = void 0;
+exports.globStreamSync = globStreamSync;
+exports.globStream = globStream;
+exports.globSync = globSync;
+exports.globIterateSync = globIterateSync;
+exports.globIterate = globIterate;
+const minimatch_1 = require("minimatch");
+const glob_js_1 = require("./glob.js");
+const has_magic_js_1 = require("./has-magic.js");
+var minimatch_2 = require("minimatch");
+Object.defineProperty(exports, "escape", { enumerable: true, get: function () { return minimatch_2.escape; } });
+Object.defineProperty(exports, "unescape", { enumerable: true, get: function () { return minimatch_2.unescape; } });
+var glob_js_2 = require("./glob.js");
+Object.defineProperty(exports, "Glob", { enumerable: true, get: function () { return glob_js_2.Glob; } });
+var has_magic_js_2 = require("./has-magic.js");
+Object.defineProperty(exports, "hasMagic", { enumerable: true, get: function () { return has_magic_js_2.hasMagic; } });
+var ignore_js_1 = require("./ignore.js");
+Object.defineProperty(exports, "Ignore", { enumerable: true, get: function () { return ignore_js_1.Ignore; } });
+function globStreamSync(pattern, options = {}) {
+    return new glob_js_1.Glob(pattern, options).streamSync();
+}
+function globStream(pattern, options = {}) {
+    return new glob_js_1.Glob(pattern, options).stream();
+}
+function globSync(pattern, options = {}) {
+    return new glob_js_1.Glob(pattern, options).walkSync();
+}
+async function glob_(pattern, options = {}) {
+    return new glob_js_1.Glob(pattern, options).walk();
+}
+function globIterateSync(pattern, options = {}) {
+    return new glob_js_1.Glob(pattern, options).iterateSync();
+}
+function globIterate(pattern, options = {}) {
+    return new glob_js_1.Glob(pattern, options).iterate();
+}
+// aliases: glob.sync.stream() glob.stream.sync() glob.sync() etc
+exports.streamSync = globStreamSync;
+exports.stream = Object.assign(globStream, { sync: globStreamSync });
+exports.iterateSync = globIterateSync;
+exports.iterate = Object.assign(globIterate, {
+    sync: globIterateSync,
+});
+exports.sync = Object.assign(globSync, {
+    stream: globStreamSync,
+    iterate: globIterateSync,
+});
+exports.glob = Object.assign(glob_, {
+    glob: glob_,
+    globSync,
+    sync: exports.sync,
+    globStream,
+    stream: exports.stream,
+    globStreamSync,
+    streamSync: exports.streamSync,
+    globIterate,
+    iterate: exports.iterate,
+    globIterateSync,
+    iterateSync: exports.iterateSync,
+    Glob: glob_js_1.Glob,
+    hasMagic: has_magic_js_1.hasMagic,
+    escape: minimatch_1.escape,
+    unescape: minimatch_1.unescape,
+});
+exports.glob.glob = exports.glob;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/glob/dist/commonjs/package.json b/node_modules/pacote/node_modules/glob/dist/commonjs/package.json
new file mode 100644
index 0000000000000..5bbefffbabee3
--- /dev/null
+++ b/node_modules/pacote/node_modules/glob/dist/commonjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/node_modules/pacote/node_modules/glob/dist/commonjs/pattern.js b/node_modules/pacote/node_modules/glob/dist/commonjs/pattern.js
new file mode 100644
index 0000000000000..f0de35fb5bed9
--- /dev/null
+++ b/node_modules/pacote/node_modules/glob/dist/commonjs/pattern.js
@@ -0,0 +1,219 @@
+"use strict";
+// this is just a very light wrapper around 2 arrays with an offset index
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Pattern = void 0;
+const minimatch_1 = require("minimatch");
+const isPatternList = (pl) => pl.length >= 1;
+const isGlobList = (gl) => gl.length >= 1;
+/**
+ * An immutable-ish view on an array of glob parts and their parsed
+ * results
+ */
+class Pattern {
+    #patternList;
+    #globList;
+    #index;
+    length;
+    #platform;
+    #rest;
+    #globString;
+    #isDrive;
+    #isUNC;
+    #isAbsolute;
+    #followGlobstar = true;
+    constructor(patternList, globList, index, platform) {
+        if (!isPatternList(patternList)) {
+            throw new TypeError('empty pattern list');
+        }
+        if (!isGlobList(globList)) {
+            throw new TypeError('empty glob list');
+        }
+        if (globList.length !== patternList.length) {
+            throw new TypeError('mismatched pattern list and glob list lengths');
+        }
+        this.length = patternList.length;
+        if (index < 0 || index >= this.length) {
+            throw new TypeError('index out of range');
+        }
+        this.#patternList = patternList;
+        this.#globList = globList;
+        this.#index = index;
+        this.#platform = platform;
+        // normalize root entries of absolute patterns on initial creation.
+        if (this.#index === 0) {
+            // c: => ['c:/']
+            // C:/ => ['C:/']
+            // C:/x => ['C:/', 'x']
+            // //host/share => ['//host/share/']
+            // //host/share/ => ['//host/share/']
+            // //host/share/x => ['//host/share/', 'x']
+            // /etc => ['/', 'etc']
+            // / => ['/']
+            if (this.isUNC()) {
+                // '' / '' / 'host' / 'share'
+                const [p0, p1, p2, p3, ...prest] = this.#patternList;
+                const [g0, g1, g2, g3, ...grest] = this.#globList;
+                if (prest[0] === '') {
+                    // ends in /
+                    prest.shift();
+                    grest.shift();
+                }
+                const p = [p0, p1, p2, p3, ''].join('/');
+                const g = [g0, g1, g2, g3, ''].join('/');
+                this.#patternList = [p, ...prest];
+                this.#globList = [g, ...grest];
+                this.length = this.#patternList.length;
+            }
+            else if (this.isDrive() || this.isAbsolute()) {
+                const [p1, ...prest] = this.#patternList;
+                const [g1, ...grest] = this.#globList;
+                if (prest[0] === '') {
+                    // ends in /
+                    prest.shift();
+                    grest.shift();
+                }
+                const p = p1 + '/';
+                const g = g1 + '/';
+                this.#patternList = [p, ...prest];
+                this.#globList = [g, ...grest];
+                this.length = this.#patternList.length;
+            }
+        }
+    }
+    /**
+     * The first entry in the parsed list of patterns
+     */
+    pattern() {
+        return this.#patternList[this.#index];
+    }
+    /**
+     * true of if pattern() returns a string
+     */
+    isString() {
+        return typeof this.#patternList[this.#index] === 'string';
+    }
+    /**
+     * true of if pattern() returns GLOBSTAR
+     */
+    isGlobstar() {
+        return this.#patternList[this.#index] === minimatch_1.GLOBSTAR;
+    }
+    /**
+     * true if pattern() returns a regexp
+     */
+    isRegExp() {
+        return this.#patternList[this.#index] instanceof RegExp;
+    }
+    /**
+     * The /-joined set of glob parts that make up this pattern
+     */
+    globString() {
+        return (this.#globString =
+            this.#globString ||
+                (this.#index === 0 ?
+                    this.isAbsolute() ?
+                        this.#globList[0] + this.#globList.slice(1).join('/')
+                        : this.#globList.join('/')
+                    : this.#globList.slice(this.#index).join('/')));
+    }
+    /**
+     * true if there are more pattern parts after this one
+     */
+    hasMore() {
+        return this.length > this.#index + 1;
+    }
+    /**
+     * The rest of the pattern after this part, or null if this is the end
+     */
+    rest() {
+        if (this.#rest !== undefined)
+            return this.#rest;
+        if (!this.hasMore())
+            return (this.#rest = null);
+        this.#rest = new Pattern(this.#patternList, this.#globList, this.#index + 1, this.#platform);
+        this.#rest.#isAbsolute = this.#isAbsolute;
+        this.#rest.#isUNC = this.#isUNC;
+        this.#rest.#isDrive = this.#isDrive;
+        return this.#rest;
+    }
+    /**
+     * true if the pattern represents a //unc/path/ on windows
+     */
+    isUNC() {
+        const pl = this.#patternList;
+        return this.#isUNC !== undefined ?
+            this.#isUNC
+            : (this.#isUNC =
+                this.#platform === 'win32' &&
+                    this.#index === 0 &&
+                    pl[0] === '' &&
+                    pl[1] === '' &&
+                    typeof pl[2] === 'string' &&
+                    !!pl[2] &&
+                    typeof pl[3] === 'string' &&
+                    !!pl[3]);
+    }
+    // pattern like C:/...
+    // split = ['C:', ...]
+    // XXX: would be nice to handle patterns like `c:*` to test the cwd
+    // in c: for *, but I don't know of a way to even figure out what that
+    // cwd is without actually chdir'ing into it?
+    /**
+     * True if the pattern starts with a drive letter on Windows
+     */
+    isDrive() {
+        const pl = this.#patternList;
+        return this.#isDrive !== undefined ?
+            this.#isDrive
+            : (this.#isDrive =
+                this.#platform === 'win32' &&
+                    this.#index === 0 &&
+                    this.length > 1 &&
+                    typeof pl[0] === 'string' &&
+                    /^[a-z]:$/i.test(pl[0]));
+    }
+    // pattern = '/' or '/...' or '/x/...'
+    // split = ['', ''] or ['', ...] or ['', 'x', ...]
+    // Drive and UNC both considered absolute on windows
+    /**
+     * True if the pattern is rooted on an absolute path
+     */
+    isAbsolute() {
+        const pl = this.#patternList;
+        return this.#isAbsolute !== undefined ?
+            this.#isAbsolute
+            : (this.#isAbsolute =
+                (pl[0] === '' && pl.length > 1) ||
+                    this.isDrive() ||
+                    this.isUNC());
+    }
+    /**
+     * consume the root of the pattern, and return it
+     */
+    root() {
+        const p = this.#patternList[0];
+        return (typeof p === 'string' && this.isAbsolute() && this.#index === 0) ?
+            p
+            : '';
+    }
+    /**
+     * Check to see if the current globstar pattern is allowed to follow
+     * a symbolic link.
+     */
+    checkFollowGlobstar() {
+        return !(this.#index === 0 ||
+            !this.isGlobstar() ||
+            !this.#followGlobstar);
+    }
+    /**
+     * Mark that the current globstar pattern is following a symbolic link
+     */
+    markFollowGlobstar() {
+        if (this.#index === 0 || !this.isGlobstar() || !this.#followGlobstar)
+            return false;
+        this.#followGlobstar = false;
+        return true;
+    }
+}
+exports.Pattern = Pattern;
+//# sourceMappingURL=pattern.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/glob/dist/commonjs/processor.js b/node_modules/pacote/node_modules/glob/dist/commonjs/processor.js
new file mode 100644
index 0000000000000..ee3bb4397e0b2
--- /dev/null
+++ b/node_modules/pacote/node_modules/glob/dist/commonjs/processor.js
@@ -0,0 +1,301 @@
+"use strict";
+// synchronous utility for filtering entries and calculating subwalks
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Processor = exports.SubWalks = exports.MatchRecord = exports.HasWalkedCache = void 0;
+const minimatch_1 = require("minimatch");
+/**
+ * A cache of which patterns have been processed for a given Path
+ */
+class HasWalkedCache {
+    store;
+    constructor(store = new Map()) {
+        this.store = store;
+    }
+    copy() {
+        return new HasWalkedCache(new Map(this.store));
+    }
+    hasWalked(target, pattern) {
+        return this.store.get(target.fullpath())?.has(pattern.globString());
+    }
+    storeWalked(target, pattern) {
+        const fullpath = target.fullpath();
+        const cached = this.store.get(fullpath);
+        if (cached)
+            cached.add(pattern.globString());
+        else
+            this.store.set(fullpath, new Set([pattern.globString()]));
+    }
+}
+exports.HasWalkedCache = HasWalkedCache;
+/**
+ * A record of which paths have been matched in a given walk step,
+ * and whether they only are considered a match if they are a directory,
+ * and whether their absolute or relative path should be returned.
+ */
+class MatchRecord {
+    store = new Map();
+    add(target, absolute, ifDir) {
+        const n = (absolute ? 2 : 0) | (ifDir ? 1 : 0);
+        const current = this.store.get(target);
+        this.store.set(target, current === undefined ? n : n & current);
+    }
+    // match, absolute, ifdir
+    entries() {
+        return [...this.store.entries()].map(([path, n]) => [
+            path,
+            !!(n & 2),
+            !!(n & 1),
+        ]);
+    }
+}
+exports.MatchRecord = MatchRecord;
+/**
+ * A collection of patterns that must be processed in a subsequent step
+ * for a given path.
+ */
+class SubWalks {
+    store = new Map();
+    add(target, pattern) {
+        if (!target.canReaddir()) {
+            return;
+        }
+        const subs = this.store.get(target);
+        if (subs) {
+            if (!subs.find(p => p.globString() === pattern.globString())) {
+                subs.push(pattern);
+            }
+        }
+        else
+            this.store.set(target, [pattern]);
+    }
+    get(target) {
+        const subs = this.store.get(target);
+        /* c8 ignore start */
+        if (!subs) {
+            throw new Error('attempting to walk unknown path');
+        }
+        /* c8 ignore stop */
+        return subs;
+    }
+    entries() {
+        return this.keys().map(k => [k, this.store.get(k)]);
+    }
+    keys() {
+        return [...this.store.keys()].filter(t => t.canReaddir());
+    }
+}
+exports.SubWalks = SubWalks;
+/**
+ * The class that processes patterns for a given path.
+ *
+ * Handles child entry filtering, and determining whether a path's
+ * directory contents must be read.
+ */
+class Processor {
+    hasWalkedCache;
+    matches = new MatchRecord();
+    subwalks = new SubWalks();
+    patterns;
+    follow;
+    dot;
+    opts;
+    constructor(opts, hasWalkedCache) {
+        this.opts = opts;
+        this.follow = !!opts.follow;
+        this.dot = !!opts.dot;
+        this.hasWalkedCache =
+            hasWalkedCache ? hasWalkedCache.copy() : new HasWalkedCache();
+    }
+    processPatterns(target, patterns) {
+        this.patterns = patterns;
+        const processingSet = patterns.map(p => [target, p]);
+        // map of paths to the magic-starting subwalks they need to walk
+        // first item in patterns is the filter
+        for (let [t, pattern] of processingSet) {
+            this.hasWalkedCache.storeWalked(t, pattern);
+            const root = pattern.root();
+            const absolute = pattern.isAbsolute() && this.opts.absolute !== false;
+            // start absolute patterns at root
+            if (root) {
+                t = t.resolve(root === '/' && this.opts.root !== undefined ?
+                    this.opts.root
+                    : root);
+                const rest = pattern.rest();
+                if (!rest) {
+                    this.matches.add(t, true, false);
+                    continue;
+                }
+                else {
+                    pattern = rest;
+                }
+            }
+            if (t.isENOENT())
+                continue;
+            let p;
+            let rest;
+            let changed = false;
+            while (typeof (p = pattern.pattern()) === 'string' &&
+                (rest = pattern.rest())) {
+                const c = t.resolve(p);
+                t = c;
+                pattern = rest;
+                changed = true;
+            }
+            p = pattern.pattern();
+            rest = pattern.rest();
+            if (changed) {
+                if (this.hasWalkedCache.hasWalked(t, pattern))
+                    continue;
+                this.hasWalkedCache.storeWalked(t, pattern);
+            }
+            // now we have either a final string for a known entry,
+            // more strings for an unknown entry,
+            // or a pattern starting with magic, mounted on t.
+            if (typeof p === 'string') {
+                // must not be final entry, otherwise we would have
+                // concatenated it earlier.
+                const ifDir = p === '..' || p === '' || p === '.';
+                this.matches.add(t.resolve(p), absolute, ifDir);
+                continue;
+            }
+            else if (p === minimatch_1.GLOBSTAR) {
+                // if no rest, match and subwalk pattern
+                // if rest, process rest and subwalk pattern
+                // if it's a symlink, but we didn't get here by way of a
+                // globstar match (meaning it's the first time THIS globstar
+                // has traversed a symlink), then we follow it. Otherwise, stop.
+                if (!t.isSymbolicLink() ||
+                    this.follow ||
+                    pattern.checkFollowGlobstar()) {
+                    this.subwalks.add(t, pattern);
+                }
+                const rp = rest?.pattern();
+                const rrest = rest?.rest();
+                if (!rest || ((rp === '' || rp === '.') && !rrest)) {
+                    // only HAS to be a dir if it ends in **/ or **/.
+                    // but ending in ** will match files as well.
+                    this.matches.add(t, absolute, rp === '' || rp === '.');
+                }
+                else {
+                    if (rp === '..') {
+                        // this would mean you're matching **/.. at the fs root,
+                        // and no thanks, I'm not gonna test that specific case.
+                        /* c8 ignore start */
+                        const tp = t.parent || t;
+                        /* c8 ignore stop */
+                        if (!rrest)
+                            this.matches.add(tp, absolute, true);
+                        else if (!this.hasWalkedCache.hasWalked(tp, rrest)) {
+                            this.subwalks.add(tp, rrest);
+                        }
+                    }
+                }
+            }
+            else if (p instanceof RegExp) {
+                this.subwalks.add(t, pattern);
+            }
+        }
+        return this;
+    }
+    subwalkTargets() {
+        return this.subwalks.keys();
+    }
+    child() {
+        return new Processor(this.opts, this.hasWalkedCache);
+    }
+    // return a new Processor containing the subwalks for each
+    // child entry, and a set of matches, and
+    // a hasWalkedCache that's a copy of this one
+    // then we're going to call
+    filterEntries(parent, entries) {
+        const patterns = this.subwalks.get(parent);
+        // put matches and entry walks into the results processor
+        const results = this.child();
+        for (const e of entries) {
+            for (const pattern of patterns) {
+                const absolute = pattern.isAbsolute();
+                const p = pattern.pattern();
+                const rest = pattern.rest();
+                if (p === minimatch_1.GLOBSTAR) {
+                    results.testGlobstar(e, pattern, rest, absolute);
+                }
+                else if (p instanceof RegExp) {
+                    results.testRegExp(e, p, rest, absolute);
+                }
+                else {
+                    results.testString(e, p, rest, absolute);
+                }
+            }
+        }
+        return results;
+    }
+    testGlobstar(e, pattern, rest, absolute) {
+        if (this.dot || !e.name.startsWith('.')) {
+            if (!pattern.hasMore()) {
+                this.matches.add(e, absolute, false);
+            }
+            if (e.canReaddir()) {
+                // if we're in follow mode or it's not a symlink, just keep
+                // testing the same pattern. If there's more after the globstar,
+                // then this symlink consumes the globstar. If not, then we can
+                // follow at most ONE symlink along the way, so we mark it, which
+                // also checks to ensure that it wasn't already marked.
+                if (this.follow || !e.isSymbolicLink()) {
+                    this.subwalks.add(e, pattern);
+                }
+                else if (e.isSymbolicLink()) {
+                    if (rest && pattern.checkFollowGlobstar()) {
+                        this.subwalks.add(e, rest);
+                    }
+                    else if (pattern.markFollowGlobstar()) {
+                        this.subwalks.add(e, pattern);
+                    }
+                }
+            }
+        }
+        // if the NEXT thing matches this entry, then also add
+        // the rest.
+        if (rest) {
+            const rp = rest.pattern();
+            if (typeof rp === 'string' &&
+                // dots and empty were handled already
+                rp !== '..' &&
+                rp !== '' &&
+                rp !== '.') {
+                this.testString(e, rp, rest.rest(), absolute);
+            }
+            else if (rp === '..') {
+                /* c8 ignore start */
+                const ep = e.parent || e;
+                /* c8 ignore stop */
+                this.subwalks.add(ep, rest);
+            }
+            else if (rp instanceof RegExp) {
+                this.testRegExp(e, rp, rest.rest(), absolute);
+            }
+        }
+    }
+    testRegExp(e, p, rest, absolute) {
+        if (!p.test(e.name))
+            return;
+        if (!rest) {
+            this.matches.add(e, absolute, false);
+        }
+        else {
+            this.subwalks.add(e, rest);
+        }
+    }
+    testString(e, p, rest, absolute) {
+        // should never happen?
+        if (!e.isNamed(p))
+            return;
+        if (!rest) {
+            this.matches.add(e, absolute, false);
+        }
+        else {
+            this.subwalks.add(e, rest);
+        }
+    }
+}
+exports.Processor = Processor;
+//# sourceMappingURL=processor.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/glob/dist/commonjs/walker.js b/node_modules/pacote/node_modules/glob/dist/commonjs/walker.js
new file mode 100644
index 0000000000000..cb15946d9a852
--- /dev/null
+++ b/node_modules/pacote/node_modules/glob/dist/commonjs/walker.js
@@ -0,0 +1,387 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.GlobStream = exports.GlobWalker = exports.GlobUtil = void 0;
+/**
+ * Single-use utility classes to provide functionality to the {@link Glob}
+ * methods.
+ *
+ * @module
+ */
+const minipass_1 = require("minipass");
+const ignore_js_1 = require("./ignore.js");
+const processor_js_1 = require("./processor.js");
+const makeIgnore = (ignore, opts) => typeof ignore === 'string' ? new ignore_js_1.Ignore([ignore], opts)
+    : Array.isArray(ignore) ? new ignore_js_1.Ignore(ignore, opts)
+        : ignore;
+/**
+ * basic walking utilities that all the glob walker types use
+ */
+class GlobUtil {
+    path;
+    patterns;
+    opts;
+    seen = new Set();
+    paused = false;
+    aborted = false;
+    #onResume = [];
+    #ignore;
+    #sep;
+    signal;
+    maxDepth;
+    includeChildMatches;
+    constructor(patterns, path, opts) {
+        this.patterns = patterns;
+        this.path = path;
+        this.opts = opts;
+        this.#sep = !opts.posix && opts.platform === 'win32' ? '\\' : '/';
+        this.includeChildMatches = opts.includeChildMatches !== false;
+        if (opts.ignore || !this.includeChildMatches) {
+            this.#ignore = makeIgnore(opts.ignore ?? [], opts);
+            if (!this.includeChildMatches &&
+                typeof this.#ignore.add !== 'function') {
+                const m = 'cannot ignore child matches, ignore lacks add() method.';
+                throw new Error(m);
+            }
+        }
+        // ignore, always set with maxDepth, but it's optional on the
+        // GlobOptions type
+        /* c8 ignore start */
+        this.maxDepth = opts.maxDepth || Infinity;
+        /* c8 ignore stop */
+        if (opts.signal) {
+            this.signal = opts.signal;
+            this.signal.addEventListener('abort', () => {
+                this.#onResume.length = 0;
+            });
+        }
+    }
+    #ignored(path) {
+        return this.seen.has(path) || !!this.#ignore?.ignored?.(path);
+    }
+    #childrenIgnored(path) {
+        return !!this.#ignore?.childrenIgnored?.(path);
+    }
+    // backpressure mechanism
+    pause() {
+        this.paused = true;
+    }
+    resume() {
+        /* c8 ignore start */
+        if (this.signal?.aborted)
+            return;
+        /* c8 ignore stop */
+        this.paused = false;
+        let fn = undefined;
+        while (!this.paused && (fn = this.#onResume.shift())) {
+            fn();
+        }
+    }
+    onResume(fn) {
+        if (this.signal?.aborted)
+            return;
+        /* c8 ignore start */
+        if (!this.paused) {
+            fn();
+        }
+        else {
+            /* c8 ignore stop */
+            this.#onResume.push(fn);
+        }
+    }
+    // do the requisite realpath/stat checking, and return the path
+    // to add or undefined to filter it out.
+    async matchCheck(e, ifDir) {
+        if (ifDir && this.opts.nodir)
+            return undefined;
+        let rpc;
+        if (this.opts.realpath) {
+            rpc = e.realpathCached() || (await e.realpath());
+            if (!rpc)
+                return undefined;
+            e = rpc;
+        }
+        const needStat = e.isUnknown() || this.opts.stat;
+        const s = needStat ? await e.lstat() : e;
+        if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) {
+            const target = await s.realpath();
+            /* c8 ignore start */
+            if (target && (target.isUnknown() || this.opts.stat)) {
+                await target.lstat();
+            }
+            /* c8 ignore stop */
+        }
+        return this.matchCheckTest(s, ifDir);
+    }
+    matchCheckTest(e, ifDir) {
+        return (e &&
+            (this.maxDepth === Infinity || e.depth() <= this.maxDepth) &&
+            (!ifDir || e.canReaddir()) &&
+            (!this.opts.nodir || !e.isDirectory()) &&
+            (!this.opts.nodir ||
+                !this.opts.follow ||
+                !e.isSymbolicLink() ||
+                !e.realpathCached()?.isDirectory()) &&
+            !this.#ignored(e)) ?
+            e
+            : undefined;
+    }
+    matchCheckSync(e, ifDir) {
+        if (ifDir && this.opts.nodir)
+            return undefined;
+        let rpc;
+        if (this.opts.realpath) {
+            rpc = e.realpathCached() || e.realpathSync();
+            if (!rpc)
+                return undefined;
+            e = rpc;
+        }
+        const needStat = e.isUnknown() || this.opts.stat;
+        const s = needStat ? e.lstatSync() : e;
+        if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) {
+            const target = s.realpathSync();
+            if (target && (target?.isUnknown() || this.opts.stat)) {
+                target.lstatSync();
+            }
+        }
+        return this.matchCheckTest(s, ifDir);
+    }
+    matchFinish(e, absolute) {
+        if (this.#ignored(e))
+            return;
+        // we know we have an ignore if this is false, but TS doesn't
+        if (!this.includeChildMatches && this.#ignore?.add) {
+            const ign = `${e.relativePosix()}/**`;
+            this.#ignore.add(ign);
+        }
+        const abs = this.opts.absolute === undefined ? absolute : this.opts.absolute;
+        this.seen.add(e);
+        const mark = this.opts.mark && e.isDirectory() ? this.#sep : '';
+        // ok, we have what we need!
+        if (this.opts.withFileTypes) {
+            this.matchEmit(e);
+        }
+        else if (abs) {
+            const abs = this.opts.posix ? e.fullpathPosix() : e.fullpath();
+            this.matchEmit(abs + mark);
+        }
+        else {
+            const rel = this.opts.posix ? e.relativePosix() : e.relative();
+            const pre = this.opts.dotRelative && !rel.startsWith('..' + this.#sep) ?
+                '.' + this.#sep
+                : '';
+            this.matchEmit(!rel ? '.' + mark : pre + rel + mark);
+        }
+    }
+    async match(e, absolute, ifDir) {
+        const p = await this.matchCheck(e, ifDir);
+        if (p)
+            this.matchFinish(p, absolute);
+    }
+    matchSync(e, absolute, ifDir) {
+        const p = this.matchCheckSync(e, ifDir);
+        if (p)
+            this.matchFinish(p, absolute);
+    }
+    walkCB(target, patterns, cb) {
+        /* c8 ignore start */
+        if (this.signal?.aborted)
+            cb();
+        /* c8 ignore stop */
+        this.walkCB2(target, patterns, new processor_js_1.Processor(this.opts), cb);
+    }
+    walkCB2(target, patterns, processor, cb) {
+        if (this.#childrenIgnored(target))
+            return cb();
+        if (this.signal?.aborted)
+            cb();
+        if (this.paused) {
+            this.onResume(() => this.walkCB2(target, patterns, processor, cb));
+            return;
+        }
+        processor.processPatterns(target, patterns);
+        // done processing.  all of the above is sync, can be abstracted out.
+        // subwalks is a map of paths to the entry filters they need
+        // matches is a map of paths to [absolute, ifDir] tuples.
+        let tasks = 1;
+        const next = () => {
+            if (--tasks === 0)
+                cb();
+        };
+        for (const [m, absolute, ifDir] of processor.matches.entries()) {
+            if (this.#ignored(m))
+                continue;
+            tasks++;
+            this.match(m, absolute, ifDir).then(() => next());
+        }
+        for (const t of processor.subwalkTargets()) {
+            if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) {
+                continue;
+            }
+            tasks++;
+            const childrenCached = t.readdirCached();
+            if (t.calledReaddir())
+                this.walkCB3(t, childrenCached, processor, next);
+            else {
+                t.readdirCB((_, entries) => this.walkCB3(t, entries, processor, next), true);
+            }
+        }
+        next();
+    }
+    walkCB3(target, entries, processor, cb) {
+        processor = processor.filterEntries(target, entries);
+        let tasks = 1;
+        const next = () => {
+            if (--tasks === 0)
+                cb();
+        };
+        for (const [m, absolute, ifDir] of processor.matches.entries()) {
+            if (this.#ignored(m))
+                continue;
+            tasks++;
+            this.match(m, absolute, ifDir).then(() => next());
+        }
+        for (const [target, patterns] of processor.subwalks.entries()) {
+            tasks++;
+            this.walkCB2(target, patterns, processor.child(), next);
+        }
+        next();
+    }
+    walkCBSync(target, patterns, cb) {
+        /* c8 ignore start */
+        if (this.signal?.aborted)
+            cb();
+        /* c8 ignore stop */
+        this.walkCB2Sync(target, patterns, new processor_js_1.Processor(this.opts), cb);
+    }
+    walkCB2Sync(target, patterns, processor, cb) {
+        if (this.#childrenIgnored(target))
+            return cb();
+        if (this.signal?.aborted)
+            cb();
+        if (this.paused) {
+            this.onResume(() => this.walkCB2Sync(target, patterns, processor, cb));
+            return;
+        }
+        processor.processPatterns(target, patterns);
+        // done processing.  all of the above is sync, can be abstracted out.
+        // subwalks is a map of paths to the entry filters they need
+        // matches is a map of paths to [absolute, ifDir] tuples.
+        let tasks = 1;
+        const next = () => {
+            if (--tasks === 0)
+                cb();
+        };
+        for (const [m, absolute, ifDir] of processor.matches.entries()) {
+            if (this.#ignored(m))
+                continue;
+            this.matchSync(m, absolute, ifDir);
+        }
+        for (const t of processor.subwalkTargets()) {
+            if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) {
+                continue;
+            }
+            tasks++;
+            const children = t.readdirSync();
+            this.walkCB3Sync(t, children, processor, next);
+        }
+        next();
+    }
+    walkCB3Sync(target, entries, processor, cb) {
+        processor = processor.filterEntries(target, entries);
+        let tasks = 1;
+        const next = () => {
+            if (--tasks === 0)
+                cb();
+        };
+        for (const [m, absolute, ifDir] of processor.matches.entries()) {
+            if (this.#ignored(m))
+                continue;
+            this.matchSync(m, absolute, ifDir);
+        }
+        for (const [target, patterns] of processor.subwalks.entries()) {
+            tasks++;
+            this.walkCB2Sync(target, patterns, processor.child(), next);
+        }
+        next();
+    }
+}
+exports.GlobUtil = GlobUtil;
+class GlobWalker extends GlobUtil {
+    matches = new Set();
+    constructor(patterns, path, opts) {
+        super(patterns, path, opts);
+    }
+    matchEmit(e) {
+        this.matches.add(e);
+    }
+    async walk() {
+        if (this.signal?.aborted)
+            throw this.signal.reason;
+        if (this.path.isUnknown()) {
+            await this.path.lstat();
+        }
+        await new Promise((res, rej) => {
+            this.walkCB(this.path, this.patterns, () => {
+                if (this.signal?.aborted) {
+                    rej(this.signal.reason);
+                }
+                else {
+                    res(this.matches);
+                }
+            });
+        });
+        return this.matches;
+    }
+    walkSync() {
+        if (this.signal?.aborted)
+            throw this.signal.reason;
+        if (this.path.isUnknown()) {
+            this.path.lstatSync();
+        }
+        // nothing for the callback to do, because this never pauses
+        this.walkCBSync(this.path, this.patterns, () => {
+            if (this.signal?.aborted)
+                throw this.signal.reason;
+        });
+        return this.matches;
+    }
+}
+exports.GlobWalker = GlobWalker;
+class GlobStream extends GlobUtil {
+    results;
+    constructor(patterns, path, opts) {
+        super(patterns, path, opts);
+        this.results = new minipass_1.Minipass({
+            signal: this.signal,
+            objectMode: true,
+        });
+        this.results.on('drain', () => this.resume());
+        this.results.on('resume', () => this.resume());
+    }
+    matchEmit(e) {
+        this.results.write(e);
+        if (!this.results.flowing)
+            this.pause();
+    }
+    stream() {
+        const target = this.path;
+        if (target.isUnknown()) {
+            target.lstat().then(() => {
+                this.walkCB(target, this.patterns, () => this.results.end());
+            });
+        }
+        else {
+            this.walkCB(target, this.patterns, () => this.results.end());
+        }
+        return this.results;
+    }
+    streamSync() {
+        if (this.path.isUnknown()) {
+            this.path.lstatSync();
+        }
+        this.walkCBSync(this.path, this.patterns, () => this.results.end());
+        return this.results;
+    }
+}
+exports.GlobStream = GlobStream;
+//# sourceMappingURL=walker.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/glob/dist/esm/bin.d.mts b/node_modules/pacote/node_modules/glob/dist/esm/bin.d.mts
new file mode 100644
index 0000000000000..77298e4770817
--- /dev/null
+++ b/node_modules/pacote/node_modules/glob/dist/esm/bin.d.mts
@@ -0,0 +1,3 @@
+#!/usr/bin/env node
+export {};
+//# sourceMappingURL=bin.d.mts.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/glob/dist/esm/bin.mjs b/node_modules/pacote/node_modules/glob/dist/esm/bin.mjs
new file mode 100755
index 0000000000000..553bb79303d90
--- /dev/null
+++ b/node_modules/pacote/node_modules/glob/dist/esm/bin.mjs
@@ -0,0 +1,276 @@
+#!/usr/bin/env node
+import { foregroundChild } from 'foreground-child';
+import { existsSync } from 'fs';
+import { jack } from 'jackspeak';
+import { loadPackageJson } from 'package-json-from-dist';
+import { join } from 'path';
+import { globStream } from './index.js';
+const { version } = loadPackageJson(import.meta.url, '../package.json');
+const j = jack({
+    usage: 'glob [options] [ [ ...]]',
+})
+    .description(`
+    Glob v${version}
+
+    Expand the positional glob expression arguments into any matching file
+    system paths found.
+  `)
+    .opt({
+    cmd: {
+        short: 'c',
+        hint: 'command',
+        description: `Run the command provided, passing the glob expression
+                    matches as arguments.`,
+    },
+})
+    .opt({
+    default: {
+        short: 'p',
+        hint: 'pattern',
+        description: `If no positional arguments are provided, glob will use
+                    this pattern`,
+    },
+})
+    .flag({
+    all: {
+        short: 'A',
+        description: `By default, the glob cli command will not expand any
+                    arguments that are an exact match to a file on disk.
+
+                    This prevents double-expanding, in case the shell expands
+                    an argument whose filename is a glob expression.
+
+                    For example, if 'app/*.ts' would match 'app/[id].ts', then
+                    on Windows powershell or cmd.exe, 'glob app/*.ts' will
+                    expand to 'app/[id].ts', as expected. However, in posix
+                    shells such as bash or zsh, the shell will first expand
+                    'app/*.ts' to a list of filenames. Then glob will look
+                    for a file matching 'app/[id].ts' (ie, 'app/i.ts' or
+                    'app/d.ts'), which is unexpected.
+
+                    Setting '--all' prevents this behavior, causing glob
+                    to treat ALL patterns as glob expressions to be expanded,
+                    even if they are an exact match to a file on disk.
+
+                    When setting this option, be sure to enquote arguments
+                    so that the shell will not expand them prior to passing
+                    them to the glob command process.
+      `,
+    },
+    absolute: {
+        short: 'a',
+        description: 'Expand to absolute paths',
+    },
+    'dot-relative': {
+        short: 'd',
+        description: `Prepend './' on relative matches`,
+    },
+    mark: {
+        short: 'm',
+        description: `Append a / on any directories matched`,
+    },
+    posix: {
+        short: 'x',
+        description: `Always resolve to posix style paths, using '/' as the
+                    directory separator, even on Windows. Drive letter
+                    absolute matches on Windows will be expanded to their
+                    full resolved UNC maths, eg instead of 'C:\\foo\\bar',
+                    it will expand to '//?/C:/foo/bar'.
+      `,
+    },
+    follow: {
+        short: 'f',
+        description: `Follow symlinked directories when expanding '**'`,
+    },
+    realpath: {
+        short: 'R',
+        description: `Call 'fs.realpath' on all of the results. In the case
+                    of an entry that cannot be resolved, the entry is
+                    omitted. This incurs a slight performance penalty, of
+                    course, because of the added system calls.`,
+    },
+    stat: {
+        short: 's',
+        description: `Call 'fs.lstat' on all entries, whether required or not
+                    to determine if it's a valid match.`,
+    },
+    'match-base': {
+        short: 'b',
+        description: `Perform a basename-only match if the pattern does not
+                    contain any slash characters. That is, '*.js' would be
+                    treated as equivalent to '**/*.js', matching js files
+                    in all directories.
+      `,
+    },
+    dot: {
+        description: `Allow patterns to match files/directories that start
+                    with '.', even if the pattern does not start with '.'
+      `,
+    },
+    nobrace: {
+        description: 'Do not expand {...} patterns',
+    },
+    nocase: {
+        description: `Perform a case-insensitive match. This defaults to
+                    'true' on macOS and Windows platforms, and false on
+                    all others.
+
+                    Note: 'nocase' should only be explicitly set when it is
+                    known that the filesystem's case sensitivity differs
+                    from the platform default. If set 'true' on
+                    case-insensitive file systems, then the walk may return
+                    more or less results than expected.
+      `,
+    },
+    nodir: {
+        description: `Do not match directories, only files.
+
+                    Note: to *only* match directories, append a '/' at the
+                    end of the pattern.
+      `,
+    },
+    noext: {
+        description: `Do not expand extglob patterns, such as '+(a|b)'`,
+    },
+    noglobstar: {
+        description: `Do not expand '**' against multiple path portions.
+                    Ie, treat it as a normal '*' instead.`,
+    },
+    'windows-path-no-escape': {
+        description: `Use '\\' as a path separator *only*, and *never* as an
+                    escape character. If set, all '\\' characters are
+                    replaced with '/' in the pattern.`,
+    },
+})
+    .num({
+    'max-depth': {
+        short: 'D',
+        description: `Maximum depth to traverse from the current
+                    working directory`,
+    },
+})
+    .opt({
+    cwd: {
+        short: 'C',
+        description: 'Current working directory to execute/match in',
+        default: process.cwd(),
+    },
+    root: {
+        short: 'r',
+        description: `A string path resolved against the 'cwd', which is
+                    used as the starting point for absolute patterns that
+                    start with '/' (but not drive letters or UNC paths
+                    on Windows).
+
+                    Note that this *doesn't* necessarily limit the walk to
+                    the 'root' directory, and doesn't affect the cwd
+                    starting point for non-absolute patterns. A pattern
+                    containing '..' will still be able to traverse out of
+                    the root directory, if it is not an actual root directory
+                    on the filesystem, and any non-absolute patterns will
+                    still be matched in the 'cwd'.
+
+                    To start absolute and non-absolute patterns in the same
+                    path, you can use '--root=' to set it to the empty
+                    string. However, be aware that on Windows systems, a
+                    pattern like 'x:/*' or '//host/share/*' will *always*
+                    start in the 'x:/' or '//host/share/' directory,
+                    regardless of the --root setting.
+      `,
+    },
+    platform: {
+        description: `Defaults to the value of 'process.platform' if
+                    available, or 'linux' if not. Setting --platform=win32
+                    on non-Windows systems may cause strange behavior!`,
+        validOptions: [
+            'aix',
+            'android',
+            'darwin',
+            'freebsd',
+            'haiku',
+            'linux',
+            'openbsd',
+            'sunos',
+            'win32',
+            'cygwin',
+            'netbsd',
+        ],
+    },
+})
+    .optList({
+    ignore: {
+        short: 'i',
+        description: `Glob patterns to ignore`,
+    },
+})
+    .flag({
+    debug: {
+        short: 'v',
+        description: `Output a huge amount of noisy debug information about
+                    patterns as they are parsed and used to match files.`,
+    },
+    version: {
+        short: 'V',
+        description: `Output the version (${version})`,
+    },
+    help: {
+        short: 'h',
+        description: 'Show this usage information',
+    },
+});
+try {
+    const { positionals, values } = j.parse();
+    if (values.version) {
+        console.log(version);
+        process.exit(0);
+    }
+    if (values.help) {
+        console.log(j.usage());
+        process.exit(0);
+    }
+    if (positionals.length === 0 && !values.default)
+        throw 'No patterns provided';
+    if (positionals.length === 0 && values.default)
+        positionals.push(values.default);
+    const patterns = values.all ? positionals : positionals.filter(p => !existsSync(p));
+    const matches = values.all ?
+        []
+        : positionals.filter(p => existsSync(p)).map(p => join(p));
+    const stream = globStream(patterns, {
+        absolute: values.absolute,
+        cwd: values.cwd,
+        dot: values.dot,
+        dotRelative: values['dot-relative'],
+        follow: values.follow,
+        ignore: values.ignore,
+        mark: values.mark,
+        matchBase: values['match-base'],
+        maxDepth: values['max-depth'],
+        nobrace: values.nobrace,
+        nocase: values.nocase,
+        nodir: values.nodir,
+        noext: values.noext,
+        noglobstar: values.noglobstar,
+        platform: values.platform,
+        realpath: values.realpath,
+        root: values.root,
+        stat: values.stat,
+        debug: values.debug,
+        posix: values.posix,
+    });
+    const cmd = values.cmd;
+    if (!cmd) {
+        matches.forEach(m => console.log(m));
+        stream.on('data', f => console.log(f));
+    }
+    else {
+        stream.on('data', f => matches.push(f));
+        stream.on('end', () => foregroundChild(cmd, matches, { shell: true }));
+    }
+}
+catch (e) {
+    console.error(j.usage());
+    console.error(e instanceof Error ? e.message : String(e));
+    process.exit(1);
+}
+//# sourceMappingURL=bin.mjs.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/glob/dist/esm/glob.js b/node_modules/pacote/node_modules/glob/dist/esm/glob.js
new file mode 100644
index 0000000000000..c9ff3b0036d94
--- /dev/null
+++ b/node_modules/pacote/node_modules/glob/dist/esm/glob.js
@@ -0,0 +1,243 @@
+import { Minimatch } from 'minimatch';
+import { fileURLToPath } from 'node:url';
+import { PathScurry, PathScurryDarwin, PathScurryPosix, PathScurryWin32, } from 'path-scurry';
+import { Pattern } from './pattern.js';
+import { GlobStream, GlobWalker } from './walker.js';
+// if no process global, just call it linux.
+// so we default to case-sensitive, / separators
+const defaultPlatform = (typeof process === 'object' &&
+    process &&
+    typeof process.platform === 'string') ?
+    process.platform
+    : 'linux';
+/**
+ * An object that can perform glob pattern traversals.
+ */
+export class Glob {
+    absolute;
+    cwd;
+    root;
+    dot;
+    dotRelative;
+    follow;
+    ignore;
+    magicalBraces;
+    mark;
+    matchBase;
+    maxDepth;
+    nobrace;
+    nocase;
+    nodir;
+    noext;
+    noglobstar;
+    pattern;
+    platform;
+    realpath;
+    scurry;
+    stat;
+    signal;
+    windowsPathsNoEscape;
+    withFileTypes;
+    includeChildMatches;
+    /**
+     * The options provided to the constructor.
+     */
+    opts;
+    /**
+     * An array of parsed immutable {@link Pattern} objects.
+     */
+    patterns;
+    /**
+     * All options are stored as properties on the `Glob` object.
+     *
+     * See {@link GlobOptions} for full options descriptions.
+     *
+     * Note that a previous `Glob` object can be passed as the
+     * `GlobOptions` to another `Glob` instantiation to re-use settings
+     * and caches with a new pattern.
+     *
+     * Traversal functions can be called multiple times to run the walk
+     * again.
+     */
+    constructor(pattern, opts) {
+        /* c8 ignore start */
+        if (!opts)
+            throw new TypeError('glob options required');
+        /* c8 ignore stop */
+        this.withFileTypes = !!opts.withFileTypes;
+        this.signal = opts.signal;
+        this.follow = !!opts.follow;
+        this.dot = !!opts.dot;
+        this.dotRelative = !!opts.dotRelative;
+        this.nodir = !!opts.nodir;
+        this.mark = !!opts.mark;
+        if (!opts.cwd) {
+            this.cwd = '';
+        }
+        else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) {
+            opts.cwd = fileURLToPath(opts.cwd);
+        }
+        this.cwd = opts.cwd || '';
+        this.root = opts.root;
+        this.magicalBraces = !!opts.magicalBraces;
+        this.nobrace = !!opts.nobrace;
+        this.noext = !!opts.noext;
+        this.realpath = !!opts.realpath;
+        this.absolute = opts.absolute;
+        this.includeChildMatches = opts.includeChildMatches !== false;
+        this.noglobstar = !!opts.noglobstar;
+        this.matchBase = !!opts.matchBase;
+        this.maxDepth =
+            typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity;
+        this.stat = !!opts.stat;
+        this.ignore = opts.ignore;
+        if (this.withFileTypes && this.absolute !== undefined) {
+            throw new Error('cannot set absolute and withFileTypes:true');
+        }
+        if (typeof pattern === 'string') {
+            pattern = [pattern];
+        }
+        this.windowsPathsNoEscape =
+            !!opts.windowsPathsNoEscape ||
+                opts.allowWindowsEscape ===
+                    false;
+        if (this.windowsPathsNoEscape) {
+            pattern = pattern.map(p => p.replace(/\\/g, '/'));
+        }
+        if (this.matchBase) {
+            if (opts.noglobstar) {
+                throw new TypeError('base matching requires globstar');
+            }
+            pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`));
+        }
+        this.pattern = pattern;
+        this.platform = opts.platform || defaultPlatform;
+        this.opts = { ...opts, platform: this.platform };
+        if (opts.scurry) {
+            this.scurry = opts.scurry;
+            if (opts.nocase !== undefined &&
+                opts.nocase !== opts.scurry.nocase) {
+                throw new Error('nocase option contradicts provided scurry option');
+            }
+        }
+        else {
+            const Scurry = opts.platform === 'win32' ? PathScurryWin32
+                : opts.platform === 'darwin' ? PathScurryDarwin
+                    : opts.platform ? PathScurryPosix
+                        : PathScurry;
+            this.scurry = new Scurry(this.cwd, {
+                nocase: opts.nocase,
+                fs: opts.fs,
+            });
+        }
+        this.nocase = this.scurry.nocase;
+        // If you do nocase:true on a case-sensitive file system, then
+        // we need to use regexps instead of strings for non-magic
+        // path portions, because statting `aBc` won't return results
+        // for the file `AbC` for example.
+        const nocaseMagicOnly = this.platform === 'darwin' || this.platform === 'win32';
+        const mmo = {
+            // default nocase based on platform
+            ...opts,
+            dot: this.dot,
+            matchBase: this.matchBase,
+            nobrace: this.nobrace,
+            nocase: this.nocase,
+            nocaseMagicOnly,
+            nocomment: true,
+            noext: this.noext,
+            nonegate: true,
+            optimizationLevel: 2,
+            platform: this.platform,
+            windowsPathsNoEscape: this.windowsPathsNoEscape,
+            debug: !!this.opts.debug,
+        };
+        const mms = this.pattern.map(p => new Minimatch(p, mmo));
+        const [matchSet, globParts] = mms.reduce((set, m) => {
+            set[0].push(...m.set);
+            set[1].push(...m.globParts);
+            return set;
+        }, [[], []]);
+        this.patterns = matchSet.map((set, i) => {
+            const g = globParts[i];
+            /* c8 ignore start */
+            if (!g)
+                throw new Error('invalid pattern object');
+            /* c8 ignore stop */
+            return new Pattern(set, g, 0, this.platform);
+        });
+    }
+    async walk() {
+        // Walkers always return array of Path objects, so we just have to
+        // coerce them into the right shape.  It will have already called
+        // realpath() if the option was set to do so, so we know that's cached.
+        // start out knowing the cwd, at least
+        return [
+            ...(await new GlobWalker(this.patterns, this.scurry.cwd, {
+                ...this.opts,
+                maxDepth: this.maxDepth !== Infinity ?
+                    this.maxDepth + this.scurry.cwd.depth()
+                    : Infinity,
+                platform: this.platform,
+                nocase: this.nocase,
+                includeChildMatches: this.includeChildMatches,
+            }).walk()),
+        ];
+    }
+    walkSync() {
+        return [
+            ...new GlobWalker(this.patterns, this.scurry.cwd, {
+                ...this.opts,
+                maxDepth: this.maxDepth !== Infinity ?
+                    this.maxDepth + this.scurry.cwd.depth()
+                    : Infinity,
+                platform: this.platform,
+                nocase: this.nocase,
+                includeChildMatches: this.includeChildMatches,
+            }).walkSync(),
+        ];
+    }
+    stream() {
+        return new GlobStream(this.patterns, this.scurry.cwd, {
+            ...this.opts,
+            maxDepth: this.maxDepth !== Infinity ?
+                this.maxDepth + this.scurry.cwd.depth()
+                : Infinity,
+            platform: this.platform,
+            nocase: this.nocase,
+            includeChildMatches: this.includeChildMatches,
+        }).stream();
+    }
+    streamSync() {
+        return new GlobStream(this.patterns, this.scurry.cwd, {
+            ...this.opts,
+            maxDepth: this.maxDepth !== Infinity ?
+                this.maxDepth + this.scurry.cwd.depth()
+                : Infinity,
+            platform: this.platform,
+            nocase: this.nocase,
+            includeChildMatches: this.includeChildMatches,
+        }).streamSync();
+    }
+    /**
+     * Default sync iteration function. Returns a Generator that
+     * iterates over the results.
+     */
+    iterateSync() {
+        return this.streamSync()[Symbol.iterator]();
+    }
+    [Symbol.iterator]() {
+        return this.iterateSync();
+    }
+    /**
+     * Default async iteration function. Returns an AsyncGenerator that
+     * iterates over the results.
+     */
+    iterate() {
+        return this.stream()[Symbol.asyncIterator]();
+    }
+    [Symbol.asyncIterator]() {
+        return this.iterate();
+    }
+}
+//# sourceMappingURL=glob.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/glob/dist/esm/has-magic.js b/node_modules/pacote/node_modules/glob/dist/esm/has-magic.js
new file mode 100644
index 0000000000000..ba2321ab868d0
--- /dev/null
+++ b/node_modules/pacote/node_modules/glob/dist/esm/has-magic.js
@@ -0,0 +1,23 @@
+import { Minimatch } from 'minimatch';
+/**
+ * Return true if the patterns provided contain any magic glob characters,
+ * given the options provided.
+ *
+ * Brace expansion is not considered "magic" unless the `magicalBraces` option
+ * is set, as brace expansion just turns one string into an array of strings.
+ * So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and
+ * `'xby'` both do not contain any magic glob characters, and it's treated the
+ * same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true`
+ * is in the options, brace expansion _is_ treated as a pattern having magic.
+ */
+export const hasMagic = (pattern, options = {}) => {
+    if (!Array.isArray(pattern)) {
+        pattern = [pattern];
+    }
+    for (const p of pattern) {
+        if (new Minimatch(p, options).hasMagic())
+            return true;
+    }
+    return false;
+};
+//# sourceMappingURL=has-magic.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/glob/dist/esm/ignore.js b/node_modules/pacote/node_modules/glob/dist/esm/ignore.js
new file mode 100644
index 0000000000000..539c4a4fdebc4
--- /dev/null
+++ b/node_modules/pacote/node_modules/glob/dist/esm/ignore.js
@@ -0,0 +1,115 @@
+// give it a pattern, and it'll be able to tell you if
+// a given path should be ignored.
+// Ignoring a path ignores its children if the pattern ends in /**
+// Ignores are always parsed in dot:true mode
+import { Minimatch } from 'minimatch';
+import { Pattern } from './pattern.js';
+const defaultPlatform = (typeof process === 'object' &&
+    process &&
+    typeof process.platform === 'string') ?
+    process.platform
+    : 'linux';
+/**
+ * Class used to process ignored patterns
+ */
+export class Ignore {
+    relative;
+    relativeChildren;
+    absolute;
+    absoluteChildren;
+    platform;
+    mmopts;
+    constructor(ignored, { nobrace, nocase, noext, noglobstar, platform = defaultPlatform, }) {
+        this.relative = [];
+        this.absolute = [];
+        this.relativeChildren = [];
+        this.absoluteChildren = [];
+        this.platform = platform;
+        this.mmopts = {
+            dot: true,
+            nobrace,
+            nocase,
+            noext,
+            noglobstar,
+            optimizationLevel: 2,
+            platform,
+            nocomment: true,
+            nonegate: true,
+        };
+        for (const ign of ignored)
+            this.add(ign);
+    }
+    add(ign) {
+        // this is a little weird, but it gives us a clean set of optimized
+        // minimatch matchers, without getting tripped up if one of them
+        // ends in /** inside a brace section, and it's only inefficient at
+        // the start of the walk, not along it.
+        // It'd be nice if the Pattern class just had a .test() method, but
+        // handling globstars is a bit of a pita, and that code already lives
+        // in minimatch anyway.
+        // Another way would be if maybe Minimatch could take its set/globParts
+        // as an option, and then we could at least just use Pattern to test
+        // for absolute-ness.
+        // Yet another way, Minimatch could take an array of glob strings, and
+        // a cwd option, and do the right thing.
+        const mm = new Minimatch(ign, this.mmopts);
+        for (let i = 0; i < mm.set.length; i++) {
+            const parsed = mm.set[i];
+            const globParts = mm.globParts[i];
+            /* c8 ignore start */
+            if (!parsed || !globParts) {
+                throw new Error('invalid pattern object');
+            }
+            // strip off leading ./ portions
+            // https://github.com/isaacs/node-glob/issues/570
+            while (parsed[0] === '.' && globParts[0] === '.') {
+                parsed.shift();
+                globParts.shift();
+            }
+            /* c8 ignore stop */
+            const p = new Pattern(parsed, globParts, 0, this.platform);
+            const m = new Minimatch(p.globString(), this.mmopts);
+            const children = globParts[globParts.length - 1] === '**';
+            const absolute = p.isAbsolute();
+            if (absolute)
+                this.absolute.push(m);
+            else
+                this.relative.push(m);
+            if (children) {
+                if (absolute)
+                    this.absoluteChildren.push(m);
+                else
+                    this.relativeChildren.push(m);
+            }
+        }
+    }
+    ignored(p) {
+        const fullpath = p.fullpath();
+        const fullpaths = `${fullpath}/`;
+        const relative = p.relative() || '.';
+        const relatives = `${relative}/`;
+        for (const m of this.relative) {
+            if (m.match(relative) || m.match(relatives))
+                return true;
+        }
+        for (const m of this.absolute) {
+            if (m.match(fullpath) || m.match(fullpaths))
+                return true;
+        }
+        return false;
+    }
+    childrenIgnored(p) {
+        const fullpath = p.fullpath() + '/';
+        const relative = (p.relative() || '.') + '/';
+        for (const m of this.relativeChildren) {
+            if (m.match(relative))
+                return true;
+        }
+        for (const m of this.absoluteChildren) {
+            if (m.match(fullpath))
+                return true;
+        }
+        return false;
+    }
+}
+//# sourceMappingURL=ignore.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/glob/dist/esm/index.js b/node_modules/pacote/node_modules/glob/dist/esm/index.js
new file mode 100644
index 0000000000000..e15c1f9c4cb03
--- /dev/null
+++ b/node_modules/pacote/node_modules/glob/dist/esm/index.js
@@ -0,0 +1,55 @@
+import { escape, unescape } from 'minimatch';
+import { Glob } from './glob.js';
+import { hasMagic } from './has-magic.js';
+export { escape, unescape } from 'minimatch';
+export { Glob } from './glob.js';
+export { hasMagic } from './has-magic.js';
+export { Ignore } from './ignore.js';
+export function globStreamSync(pattern, options = {}) {
+    return new Glob(pattern, options).streamSync();
+}
+export function globStream(pattern, options = {}) {
+    return new Glob(pattern, options).stream();
+}
+export function globSync(pattern, options = {}) {
+    return new Glob(pattern, options).walkSync();
+}
+async function glob_(pattern, options = {}) {
+    return new Glob(pattern, options).walk();
+}
+export function globIterateSync(pattern, options = {}) {
+    return new Glob(pattern, options).iterateSync();
+}
+export function globIterate(pattern, options = {}) {
+    return new Glob(pattern, options).iterate();
+}
+// aliases: glob.sync.stream() glob.stream.sync() glob.sync() etc
+export const streamSync = globStreamSync;
+export const stream = Object.assign(globStream, { sync: globStreamSync });
+export const iterateSync = globIterateSync;
+export const iterate = Object.assign(globIterate, {
+    sync: globIterateSync,
+});
+export const sync = Object.assign(globSync, {
+    stream: globStreamSync,
+    iterate: globIterateSync,
+});
+export const glob = Object.assign(glob_, {
+    glob: glob_,
+    globSync,
+    sync,
+    globStream,
+    stream,
+    globStreamSync,
+    streamSync,
+    globIterate,
+    iterate,
+    globIterateSync,
+    iterateSync,
+    Glob,
+    hasMagic,
+    escape,
+    unescape,
+});
+glob.glob = glob;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/glob/dist/esm/package.json b/node_modules/pacote/node_modules/glob/dist/esm/package.json
new file mode 100644
index 0000000000000..3dbc1ca591c05
--- /dev/null
+++ b/node_modules/pacote/node_modules/glob/dist/esm/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/node_modules/pacote/node_modules/glob/dist/esm/pattern.js b/node_modules/pacote/node_modules/glob/dist/esm/pattern.js
new file mode 100644
index 0000000000000..b41defa10c6a3
--- /dev/null
+++ b/node_modules/pacote/node_modules/glob/dist/esm/pattern.js
@@ -0,0 +1,215 @@
+// this is just a very light wrapper around 2 arrays with an offset index
+import { GLOBSTAR } from 'minimatch';
+const isPatternList = (pl) => pl.length >= 1;
+const isGlobList = (gl) => gl.length >= 1;
+/**
+ * An immutable-ish view on an array of glob parts and their parsed
+ * results
+ */
+export class Pattern {
+    #patternList;
+    #globList;
+    #index;
+    length;
+    #platform;
+    #rest;
+    #globString;
+    #isDrive;
+    #isUNC;
+    #isAbsolute;
+    #followGlobstar = true;
+    constructor(patternList, globList, index, platform) {
+        if (!isPatternList(patternList)) {
+            throw new TypeError('empty pattern list');
+        }
+        if (!isGlobList(globList)) {
+            throw new TypeError('empty glob list');
+        }
+        if (globList.length !== patternList.length) {
+            throw new TypeError('mismatched pattern list and glob list lengths');
+        }
+        this.length = patternList.length;
+        if (index < 0 || index >= this.length) {
+            throw new TypeError('index out of range');
+        }
+        this.#patternList = patternList;
+        this.#globList = globList;
+        this.#index = index;
+        this.#platform = platform;
+        // normalize root entries of absolute patterns on initial creation.
+        if (this.#index === 0) {
+            // c: => ['c:/']
+            // C:/ => ['C:/']
+            // C:/x => ['C:/', 'x']
+            // //host/share => ['//host/share/']
+            // //host/share/ => ['//host/share/']
+            // //host/share/x => ['//host/share/', 'x']
+            // /etc => ['/', 'etc']
+            // / => ['/']
+            if (this.isUNC()) {
+                // '' / '' / 'host' / 'share'
+                const [p0, p1, p2, p3, ...prest] = this.#patternList;
+                const [g0, g1, g2, g3, ...grest] = this.#globList;
+                if (prest[0] === '') {
+                    // ends in /
+                    prest.shift();
+                    grest.shift();
+                }
+                const p = [p0, p1, p2, p3, ''].join('/');
+                const g = [g0, g1, g2, g3, ''].join('/');
+                this.#patternList = [p, ...prest];
+                this.#globList = [g, ...grest];
+                this.length = this.#patternList.length;
+            }
+            else if (this.isDrive() || this.isAbsolute()) {
+                const [p1, ...prest] = this.#patternList;
+                const [g1, ...grest] = this.#globList;
+                if (prest[0] === '') {
+                    // ends in /
+                    prest.shift();
+                    grest.shift();
+                }
+                const p = p1 + '/';
+                const g = g1 + '/';
+                this.#patternList = [p, ...prest];
+                this.#globList = [g, ...grest];
+                this.length = this.#patternList.length;
+            }
+        }
+    }
+    /**
+     * The first entry in the parsed list of patterns
+     */
+    pattern() {
+        return this.#patternList[this.#index];
+    }
+    /**
+     * true of if pattern() returns a string
+     */
+    isString() {
+        return typeof this.#patternList[this.#index] === 'string';
+    }
+    /**
+     * true of if pattern() returns GLOBSTAR
+     */
+    isGlobstar() {
+        return this.#patternList[this.#index] === GLOBSTAR;
+    }
+    /**
+     * true if pattern() returns a regexp
+     */
+    isRegExp() {
+        return this.#patternList[this.#index] instanceof RegExp;
+    }
+    /**
+     * The /-joined set of glob parts that make up this pattern
+     */
+    globString() {
+        return (this.#globString =
+            this.#globString ||
+                (this.#index === 0 ?
+                    this.isAbsolute() ?
+                        this.#globList[0] + this.#globList.slice(1).join('/')
+                        : this.#globList.join('/')
+                    : this.#globList.slice(this.#index).join('/')));
+    }
+    /**
+     * true if there are more pattern parts after this one
+     */
+    hasMore() {
+        return this.length > this.#index + 1;
+    }
+    /**
+     * The rest of the pattern after this part, or null if this is the end
+     */
+    rest() {
+        if (this.#rest !== undefined)
+            return this.#rest;
+        if (!this.hasMore())
+            return (this.#rest = null);
+        this.#rest = new Pattern(this.#patternList, this.#globList, this.#index + 1, this.#platform);
+        this.#rest.#isAbsolute = this.#isAbsolute;
+        this.#rest.#isUNC = this.#isUNC;
+        this.#rest.#isDrive = this.#isDrive;
+        return this.#rest;
+    }
+    /**
+     * true if the pattern represents a //unc/path/ on windows
+     */
+    isUNC() {
+        const pl = this.#patternList;
+        return this.#isUNC !== undefined ?
+            this.#isUNC
+            : (this.#isUNC =
+                this.#platform === 'win32' &&
+                    this.#index === 0 &&
+                    pl[0] === '' &&
+                    pl[1] === '' &&
+                    typeof pl[2] === 'string' &&
+                    !!pl[2] &&
+                    typeof pl[3] === 'string' &&
+                    !!pl[3]);
+    }
+    // pattern like C:/...
+    // split = ['C:', ...]
+    // XXX: would be nice to handle patterns like `c:*` to test the cwd
+    // in c: for *, but I don't know of a way to even figure out what that
+    // cwd is without actually chdir'ing into it?
+    /**
+     * True if the pattern starts with a drive letter on Windows
+     */
+    isDrive() {
+        const pl = this.#patternList;
+        return this.#isDrive !== undefined ?
+            this.#isDrive
+            : (this.#isDrive =
+                this.#platform === 'win32' &&
+                    this.#index === 0 &&
+                    this.length > 1 &&
+                    typeof pl[0] === 'string' &&
+                    /^[a-z]:$/i.test(pl[0]));
+    }
+    // pattern = '/' or '/...' or '/x/...'
+    // split = ['', ''] or ['', ...] or ['', 'x', ...]
+    // Drive and UNC both considered absolute on windows
+    /**
+     * True if the pattern is rooted on an absolute path
+     */
+    isAbsolute() {
+        const pl = this.#patternList;
+        return this.#isAbsolute !== undefined ?
+            this.#isAbsolute
+            : (this.#isAbsolute =
+                (pl[0] === '' && pl.length > 1) ||
+                    this.isDrive() ||
+                    this.isUNC());
+    }
+    /**
+     * consume the root of the pattern, and return it
+     */
+    root() {
+        const p = this.#patternList[0];
+        return (typeof p === 'string' && this.isAbsolute() && this.#index === 0) ?
+            p
+            : '';
+    }
+    /**
+     * Check to see if the current globstar pattern is allowed to follow
+     * a symbolic link.
+     */
+    checkFollowGlobstar() {
+        return !(this.#index === 0 ||
+            !this.isGlobstar() ||
+            !this.#followGlobstar);
+    }
+    /**
+     * Mark that the current globstar pattern is following a symbolic link
+     */
+    markFollowGlobstar() {
+        if (this.#index === 0 || !this.isGlobstar() || !this.#followGlobstar)
+            return false;
+        this.#followGlobstar = false;
+        return true;
+    }
+}
+//# sourceMappingURL=pattern.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/glob/dist/esm/processor.js b/node_modules/pacote/node_modules/glob/dist/esm/processor.js
new file mode 100644
index 0000000000000..f874892ffed0c
--- /dev/null
+++ b/node_modules/pacote/node_modules/glob/dist/esm/processor.js
@@ -0,0 +1,294 @@
+// synchronous utility for filtering entries and calculating subwalks
+import { GLOBSTAR } from 'minimatch';
+/**
+ * A cache of which patterns have been processed for a given Path
+ */
+export class HasWalkedCache {
+    store;
+    constructor(store = new Map()) {
+        this.store = store;
+    }
+    copy() {
+        return new HasWalkedCache(new Map(this.store));
+    }
+    hasWalked(target, pattern) {
+        return this.store.get(target.fullpath())?.has(pattern.globString());
+    }
+    storeWalked(target, pattern) {
+        const fullpath = target.fullpath();
+        const cached = this.store.get(fullpath);
+        if (cached)
+            cached.add(pattern.globString());
+        else
+            this.store.set(fullpath, new Set([pattern.globString()]));
+    }
+}
+/**
+ * A record of which paths have been matched in a given walk step,
+ * and whether they only are considered a match if they are a directory,
+ * and whether their absolute or relative path should be returned.
+ */
+export class MatchRecord {
+    store = new Map();
+    add(target, absolute, ifDir) {
+        const n = (absolute ? 2 : 0) | (ifDir ? 1 : 0);
+        const current = this.store.get(target);
+        this.store.set(target, current === undefined ? n : n & current);
+    }
+    // match, absolute, ifdir
+    entries() {
+        return [...this.store.entries()].map(([path, n]) => [
+            path,
+            !!(n & 2),
+            !!(n & 1),
+        ]);
+    }
+}
+/**
+ * A collection of patterns that must be processed in a subsequent step
+ * for a given path.
+ */
+export class SubWalks {
+    store = new Map();
+    add(target, pattern) {
+        if (!target.canReaddir()) {
+            return;
+        }
+        const subs = this.store.get(target);
+        if (subs) {
+            if (!subs.find(p => p.globString() === pattern.globString())) {
+                subs.push(pattern);
+            }
+        }
+        else
+            this.store.set(target, [pattern]);
+    }
+    get(target) {
+        const subs = this.store.get(target);
+        /* c8 ignore start */
+        if (!subs) {
+            throw new Error('attempting to walk unknown path');
+        }
+        /* c8 ignore stop */
+        return subs;
+    }
+    entries() {
+        return this.keys().map(k => [k, this.store.get(k)]);
+    }
+    keys() {
+        return [...this.store.keys()].filter(t => t.canReaddir());
+    }
+}
+/**
+ * The class that processes patterns for a given path.
+ *
+ * Handles child entry filtering, and determining whether a path's
+ * directory contents must be read.
+ */
+export class Processor {
+    hasWalkedCache;
+    matches = new MatchRecord();
+    subwalks = new SubWalks();
+    patterns;
+    follow;
+    dot;
+    opts;
+    constructor(opts, hasWalkedCache) {
+        this.opts = opts;
+        this.follow = !!opts.follow;
+        this.dot = !!opts.dot;
+        this.hasWalkedCache =
+            hasWalkedCache ? hasWalkedCache.copy() : new HasWalkedCache();
+    }
+    processPatterns(target, patterns) {
+        this.patterns = patterns;
+        const processingSet = patterns.map(p => [target, p]);
+        // map of paths to the magic-starting subwalks they need to walk
+        // first item in patterns is the filter
+        for (let [t, pattern] of processingSet) {
+            this.hasWalkedCache.storeWalked(t, pattern);
+            const root = pattern.root();
+            const absolute = pattern.isAbsolute() && this.opts.absolute !== false;
+            // start absolute patterns at root
+            if (root) {
+                t = t.resolve(root === '/' && this.opts.root !== undefined ?
+                    this.opts.root
+                    : root);
+                const rest = pattern.rest();
+                if (!rest) {
+                    this.matches.add(t, true, false);
+                    continue;
+                }
+                else {
+                    pattern = rest;
+                }
+            }
+            if (t.isENOENT())
+                continue;
+            let p;
+            let rest;
+            let changed = false;
+            while (typeof (p = pattern.pattern()) === 'string' &&
+                (rest = pattern.rest())) {
+                const c = t.resolve(p);
+                t = c;
+                pattern = rest;
+                changed = true;
+            }
+            p = pattern.pattern();
+            rest = pattern.rest();
+            if (changed) {
+                if (this.hasWalkedCache.hasWalked(t, pattern))
+                    continue;
+                this.hasWalkedCache.storeWalked(t, pattern);
+            }
+            // now we have either a final string for a known entry,
+            // more strings for an unknown entry,
+            // or a pattern starting with magic, mounted on t.
+            if (typeof p === 'string') {
+                // must not be final entry, otherwise we would have
+                // concatenated it earlier.
+                const ifDir = p === '..' || p === '' || p === '.';
+                this.matches.add(t.resolve(p), absolute, ifDir);
+                continue;
+            }
+            else if (p === GLOBSTAR) {
+                // if no rest, match and subwalk pattern
+                // if rest, process rest and subwalk pattern
+                // if it's a symlink, but we didn't get here by way of a
+                // globstar match (meaning it's the first time THIS globstar
+                // has traversed a symlink), then we follow it. Otherwise, stop.
+                if (!t.isSymbolicLink() ||
+                    this.follow ||
+                    pattern.checkFollowGlobstar()) {
+                    this.subwalks.add(t, pattern);
+                }
+                const rp = rest?.pattern();
+                const rrest = rest?.rest();
+                if (!rest || ((rp === '' || rp === '.') && !rrest)) {
+                    // only HAS to be a dir if it ends in **/ or **/.
+                    // but ending in ** will match files as well.
+                    this.matches.add(t, absolute, rp === '' || rp === '.');
+                }
+                else {
+                    if (rp === '..') {
+                        // this would mean you're matching **/.. at the fs root,
+                        // and no thanks, I'm not gonna test that specific case.
+                        /* c8 ignore start */
+                        const tp = t.parent || t;
+                        /* c8 ignore stop */
+                        if (!rrest)
+                            this.matches.add(tp, absolute, true);
+                        else if (!this.hasWalkedCache.hasWalked(tp, rrest)) {
+                            this.subwalks.add(tp, rrest);
+                        }
+                    }
+                }
+            }
+            else if (p instanceof RegExp) {
+                this.subwalks.add(t, pattern);
+            }
+        }
+        return this;
+    }
+    subwalkTargets() {
+        return this.subwalks.keys();
+    }
+    child() {
+        return new Processor(this.opts, this.hasWalkedCache);
+    }
+    // return a new Processor containing the subwalks for each
+    // child entry, and a set of matches, and
+    // a hasWalkedCache that's a copy of this one
+    // then we're going to call
+    filterEntries(parent, entries) {
+        const patterns = this.subwalks.get(parent);
+        // put matches and entry walks into the results processor
+        const results = this.child();
+        for (const e of entries) {
+            for (const pattern of patterns) {
+                const absolute = pattern.isAbsolute();
+                const p = pattern.pattern();
+                const rest = pattern.rest();
+                if (p === GLOBSTAR) {
+                    results.testGlobstar(e, pattern, rest, absolute);
+                }
+                else if (p instanceof RegExp) {
+                    results.testRegExp(e, p, rest, absolute);
+                }
+                else {
+                    results.testString(e, p, rest, absolute);
+                }
+            }
+        }
+        return results;
+    }
+    testGlobstar(e, pattern, rest, absolute) {
+        if (this.dot || !e.name.startsWith('.')) {
+            if (!pattern.hasMore()) {
+                this.matches.add(e, absolute, false);
+            }
+            if (e.canReaddir()) {
+                // if we're in follow mode or it's not a symlink, just keep
+                // testing the same pattern. If there's more after the globstar,
+                // then this symlink consumes the globstar. If not, then we can
+                // follow at most ONE symlink along the way, so we mark it, which
+                // also checks to ensure that it wasn't already marked.
+                if (this.follow || !e.isSymbolicLink()) {
+                    this.subwalks.add(e, pattern);
+                }
+                else if (e.isSymbolicLink()) {
+                    if (rest && pattern.checkFollowGlobstar()) {
+                        this.subwalks.add(e, rest);
+                    }
+                    else if (pattern.markFollowGlobstar()) {
+                        this.subwalks.add(e, pattern);
+                    }
+                }
+            }
+        }
+        // if the NEXT thing matches this entry, then also add
+        // the rest.
+        if (rest) {
+            const rp = rest.pattern();
+            if (typeof rp === 'string' &&
+                // dots and empty were handled already
+                rp !== '..' &&
+                rp !== '' &&
+                rp !== '.') {
+                this.testString(e, rp, rest.rest(), absolute);
+            }
+            else if (rp === '..') {
+                /* c8 ignore start */
+                const ep = e.parent || e;
+                /* c8 ignore stop */
+                this.subwalks.add(ep, rest);
+            }
+            else if (rp instanceof RegExp) {
+                this.testRegExp(e, rp, rest.rest(), absolute);
+            }
+        }
+    }
+    testRegExp(e, p, rest, absolute) {
+        if (!p.test(e.name))
+            return;
+        if (!rest) {
+            this.matches.add(e, absolute, false);
+        }
+        else {
+            this.subwalks.add(e, rest);
+        }
+    }
+    testString(e, p, rest, absolute) {
+        // should never happen?
+        if (!e.isNamed(p))
+            return;
+        if (!rest) {
+            this.matches.add(e, absolute, false);
+        }
+        else {
+            this.subwalks.add(e, rest);
+        }
+    }
+}
+//# sourceMappingURL=processor.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/glob/dist/esm/walker.js b/node_modules/pacote/node_modules/glob/dist/esm/walker.js
new file mode 100644
index 0000000000000..3d68196c4f175
--- /dev/null
+++ b/node_modules/pacote/node_modules/glob/dist/esm/walker.js
@@ -0,0 +1,381 @@
+/**
+ * Single-use utility classes to provide functionality to the {@link Glob}
+ * methods.
+ *
+ * @module
+ */
+import { Minipass } from 'minipass';
+import { Ignore } from './ignore.js';
+import { Processor } from './processor.js';
+const makeIgnore = (ignore, opts) => typeof ignore === 'string' ? new Ignore([ignore], opts)
+    : Array.isArray(ignore) ? new Ignore(ignore, opts)
+        : ignore;
+/**
+ * basic walking utilities that all the glob walker types use
+ */
+export class GlobUtil {
+    path;
+    patterns;
+    opts;
+    seen = new Set();
+    paused = false;
+    aborted = false;
+    #onResume = [];
+    #ignore;
+    #sep;
+    signal;
+    maxDepth;
+    includeChildMatches;
+    constructor(patterns, path, opts) {
+        this.patterns = patterns;
+        this.path = path;
+        this.opts = opts;
+        this.#sep = !opts.posix && opts.platform === 'win32' ? '\\' : '/';
+        this.includeChildMatches = opts.includeChildMatches !== false;
+        if (opts.ignore || !this.includeChildMatches) {
+            this.#ignore = makeIgnore(opts.ignore ?? [], opts);
+            if (!this.includeChildMatches &&
+                typeof this.#ignore.add !== 'function') {
+                const m = 'cannot ignore child matches, ignore lacks add() method.';
+                throw new Error(m);
+            }
+        }
+        // ignore, always set with maxDepth, but it's optional on the
+        // GlobOptions type
+        /* c8 ignore start */
+        this.maxDepth = opts.maxDepth || Infinity;
+        /* c8 ignore stop */
+        if (opts.signal) {
+            this.signal = opts.signal;
+            this.signal.addEventListener('abort', () => {
+                this.#onResume.length = 0;
+            });
+        }
+    }
+    #ignored(path) {
+        return this.seen.has(path) || !!this.#ignore?.ignored?.(path);
+    }
+    #childrenIgnored(path) {
+        return !!this.#ignore?.childrenIgnored?.(path);
+    }
+    // backpressure mechanism
+    pause() {
+        this.paused = true;
+    }
+    resume() {
+        /* c8 ignore start */
+        if (this.signal?.aborted)
+            return;
+        /* c8 ignore stop */
+        this.paused = false;
+        let fn = undefined;
+        while (!this.paused && (fn = this.#onResume.shift())) {
+            fn();
+        }
+    }
+    onResume(fn) {
+        if (this.signal?.aborted)
+            return;
+        /* c8 ignore start */
+        if (!this.paused) {
+            fn();
+        }
+        else {
+            /* c8 ignore stop */
+            this.#onResume.push(fn);
+        }
+    }
+    // do the requisite realpath/stat checking, and return the path
+    // to add or undefined to filter it out.
+    async matchCheck(e, ifDir) {
+        if (ifDir && this.opts.nodir)
+            return undefined;
+        let rpc;
+        if (this.opts.realpath) {
+            rpc = e.realpathCached() || (await e.realpath());
+            if (!rpc)
+                return undefined;
+            e = rpc;
+        }
+        const needStat = e.isUnknown() || this.opts.stat;
+        const s = needStat ? await e.lstat() : e;
+        if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) {
+            const target = await s.realpath();
+            /* c8 ignore start */
+            if (target && (target.isUnknown() || this.opts.stat)) {
+                await target.lstat();
+            }
+            /* c8 ignore stop */
+        }
+        return this.matchCheckTest(s, ifDir);
+    }
+    matchCheckTest(e, ifDir) {
+        return (e &&
+            (this.maxDepth === Infinity || e.depth() <= this.maxDepth) &&
+            (!ifDir || e.canReaddir()) &&
+            (!this.opts.nodir || !e.isDirectory()) &&
+            (!this.opts.nodir ||
+                !this.opts.follow ||
+                !e.isSymbolicLink() ||
+                !e.realpathCached()?.isDirectory()) &&
+            !this.#ignored(e)) ?
+            e
+            : undefined;
+    }
+    matchCheckSync(e, ifDir) {
+        if (ifDir && this.opts.nodir)
+            return undefined;
+        let rpc;
+        if (this.opts.realpath) {
+            rpc = e.realpathCached() || e.realpathSync();
+            if (!rpc)
+                return undefined;
+            e = rpc;
+        }
+        const needStat = e.isUnknown() || this.opts.stat;
+        const s = needStat ? e.lstatSync() : e;
+        if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) {
+            const target = s.realpathSync();
+            if (target && (target?.isUnknown() || this.opts.stat)) {
+                target.lstatSync();
+            }
+        }
+        return this.matchCheckTest(s, ifDir);
+    }
+    matchFinish(e, absolute) {
+        if (this.#ignored(e))
+            return;
+        // we know we have an ignore if this is false, but TS doesn't
+        if (!this.includeChildMatches && this.#ignore?.add) {
+            const ign = `${e.relativePosix()}/**`;
+            this.#ignore.add(ign);
+        }
+        const abs = this.opts.absolute === undefined ? absolute : this.opts.absolute;
+        this.seen.add(e);
+        const mark = this.opts.mark && e.isDirectory() ? this.#sep : '';
+        // ok, we have what we need!
+        if (this.opts.withFileTypes) {
+            this.matchEmit(e);
+        }
+        else if (abs) {
+            const abs = this.opts.posix ? e.fullpathPosix() : e.fullpath();
+            this.matchEmit(abs + mark);
+        }
+        else {
+            const rel = this.opts.posix ? e.relativePosix() : e.relative();
+            const pre = this.opts.dotRelative && !rel.startsWith('..' + this.#sep) ?
+                '.' + this.#sep
+                : '';
+            this.matchEmit(!rel ? '.' + mark : pre + rel + mark);
+        }
+    }
+    async match(e, absolute, ifDir) {
+        const p = await this.matchCheck(e, ifDir);
+        if (p)
+            this.matchFinish(p, absolute);
+    }
+    matchSync(e, absolute, ifDir) {
+        const p = this.matchCheckSync(e, ifDir);
+        if (p)
+            this.matchFinish(p, absolute);
+    }
+    walkCB(target, patterns, cb) {
+        /* c8 ignore start */
+        if (this.signal?.aborted)
+            cb();
+        /* c8 ignore stop */
+        this.walkCB2(target, patterns, new Processor(this.opts), cb);
+    }
+    walkCB2(target, patterns, processor, cb) {
+        if (this.#childrenIgnored(target))
+            return cb();
+        if (this.signal?.aborted)
+            cb();
+        if (this.paused) {
+            this.onResume(() => this.walkCB2(target, patterns, processor, cb));
+            return;
+        }
+        processor.processPatterns(target, patterns);
+        // done processing.  all of the above is sync, can be abstracted out.
+        // subwalks is a map of paths to the entry filters they need
+        // matches is a map of paths to [absolute, ifDir] tuples.
+        let tasks = 1;
+        const next = () => {
+            if (--tasks === 0)
+                cb();
+        };
+        for (const [m, absolute, ifDir] of processor.matches.entries()) {
+            if (this.#ignored(m))
+                continue;
+            tasks++;
+            this.match(m, absolute, ifDir).then(() => next());
+        }
+        for (const t of processor.subwalkTargets()) {
+            if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) {
+                continue;
+            }
+            tasks++;
+            const childrenCached = t.readdirCached();
+            if (t.calledReaddir())
+                this.walkCB3(t, childrenCached, processor, next);
+            else {
+                t.readdirCB((_, entries) => this.walkCB3(t, entries, processor, next), true);
+            }
+        }
+        next();
+    }
+    walkCB3(target, entries, processor, cb) {
+        processor = processor.filterEntries(target, entries);
+        let tasks = 1;
+        const next = () => {
+            if (--tasks === 0)
+                cb();
+        };
+        for (const [m, absolute, ifDir] of processor.matches.entries()) {
+            if (this.#ignored(m))
+                continue;
+            tasks++;
+            this.match(m, absolute, ifDir).then(() => next());
+        }
+        for (const [target, patterns] of processor.subwalks.entries()) {
+            tasks++;
+            this.walkCB2(target, patterns, processor.child(), next);
+        }
+        next();
+    }
+    walkCBSync(target, patterns, cb) {
+        /* c8 ignore start */
+        if (this.signal?.aborted)
+            cb();
+        /* c8 ignore stop */
+        this.walkCB2Sync(target, patterns, new Processor(this.opts), cb);
+    }
+    walkCB2Sync(target, patterns, processor, cb) {
+        if (this.#childrenIgnored(target))
+            return cb();
+        if (this.signal?.aborted)
+            cb();
+        if (this.paused) {
+            this.onResume(() => this.walkCB2Sync(target, patterns, processor, cb));
+            return;
+        }
+        processor.processPatterns(target, patterns);
+        // done processing.  all of the above is sync, can be abstracted out.
+        // subwalks is a map of paths to the entry filters they need
+        // matches is a map of paths to [absolute, ifDir] tuples.
+        let tasks = 1;
+        const next = () => {
+            if (--tasks === 0)
+                cb();
+        };
+        for (const [m, absolute, ifDir] of processor.matches.entries()) {
+            if (this.#ignored(m))
+                continue;
+            this.matchSync(m, absolute, ifDir);
+        }
+        for (const t of processor.subwalkTargets()) {
+            if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) {
+                continue;
+            }
+            tasks++;
+            const children = t.readdirSync();
+            this.walkCB3Sync(t, children, processor, next);
+        }
+        next();
+    }
+    walkCB3Sync(target, entries, processor, cb) {
+        processor = processor.filterEntries(target, entries);
+        let tasks = 1;
+        const next = () => {
+            if (--tasks === 0)
+                cb();
+        };
+        for (const [m, absolute, ifDir] of processor.matches.entries()) {
+            if (this.#ignored(m))
+                continue;
+            this.matchSync(m, absolute, ifDir);
+        }
+        for (const [target, patterns] of processor.subwalks.entries()) {
+            tasks++;
+            this.walkCB2Sync(target, patterns, processor.child(), next);
+        }
+        next();
+    }
+}
+export class GlobWalker extends GlobUtil {
+    matches = new Set();
+    constructor(patterns, path, opts) {
+        super(patterns, path, opts);
+    }
+    matchEmit(e) {
+        this.matches.add(e);
+    }
+    async walk() {
+        if (this.signal?.aborted)
+            throw this.signal.reason;
+        if (this.path.isUnknown()) {
+            await this.path.lstat();
+        }
+        await new Promise((res, rej) => {
+            this.walkCB(this.path, this.patterns, () => {
+                if (this.signal?.aborted) {
+                    rej(this.signal.reason);
+                }
+                else {
+                    res(this.matches);
+                }
+            });
+        });
+        return this.matches;
+    }
+    walkSync() {
+        if (this.signal?.aborted)
+            throw this.signal.reason;
+        if (this.path.isUnknown()) {
+            this.path.lstatSync();
+        }
+        // nothing for the callback to do, because this never pauses
+        this.walkCBSync(this.path, this.patterns, () => {
+            if (this.signal?.aborted)
+                throw this.signal.reason;
+        });
+        return this.matches;
+    }
+}
+export class GlobStream extends GlobUtil {
+    results;
+    constructor(patterns, path, opts) {
+        super(patterns, path, opts);
+        this.results = new Minipass({
+            signal: this.signal,
+            objectMode: true,
+        });
+        this.results.on('drain', () => this.resume());
+        this.results.on('resume', () => this.resume());
+    }
+    matchEmit(e) {
+        this.results.write(e);
+        if (!this.results.flowing)
+            this.pause();
+    }
+    stream() {
+        const target = this.path;
+        if (target.isUnknown()) {
+            target.lstat().then(() => {
+                this.walkCB(target, this.patterns, () => this.results.end());
+            });
+        }
+        else {
+            this.walkCB(target, this.patterns, () => this.results.end());
+        }
+        return this.results;
+    }
+    streamSync() {
+        if (this.path.isUnknown()) {
+            this.path.lstatSync();
+        }
+        this.walkCBSync(this.path, this.patterns, () => this.results.end());
+        return this.results;
+    }
+}
+//# sourceMappingURL=walker.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/glob/package.json b/node_modules/pacote/node_modules/glob/package.json
new file mode 100644
index 0000000000000..7be2c53bd5c9f
--- /dev/null
+++ b/node_modules/pacote/node_modules/glob/package.json
@@ -0,0 +1,97 @@
+{
+  "author": "Isaac Z. Schlueter  (https://blog.izs.me/)",
+  "name": "glob",
+  "description": "the most correct and second fastest glob implementation in JavaScript",
+  "version": "11.0.3",
+  "type": "module",
+  "tshy": {
+    "main": true,
+    "exports": {
+      "./package.json": "./package.json",
+      ".": "./src/index.ts"
+    }
+  },
+  "bin": "./dist/esm/bin.mjs",
+  "main": "./dist/commonjs/index.js",
+  "types": "./dist/commonjs/index.d.ts",
+  "exports": {
+    "./package.json": "./package.json",
+    ".": {
+      "import": {
+        "types": "./dist/esm/index.d.ts",
+        "default": "./dist/esm/index.js"
+      },
+      "require": {
+        "types": "./dist/commonjs/index.d.ts",
+        "default": "./dist/commonjs/index.js"
+      }
+    }
+  },
+  "repository": {
+    "type": "git",
+    "url": "git://github.com/isaacs/node-glob.git"
+  },
+  "files": [
+    "dist"
+  ],
+  "scripts": {
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "npm run benchclean; git push origin --follow-tags",
+    "prepare": "tshy",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "test": "tap",
+    "snap": "tap",
+    "format": "prettier --write . --log-level warn",
+    "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts",
+    "profclean": "rm -f v8.log profile.txt",
+    "test-regen": "npm run profclean && TEST_REGEN=1 node --no-warnings --loader ts-node/esm test/00-setup.ts",
+    "prebench": "npm run prepare",
+    "bench": "bash benchmark.sh",
+    "preprof": "npm run prepare",
+    "prof": "bash prof.sh",
+    "benchclean": "node benchclean.cjs"
+  },
+  "prettier": {
+    "experimentalTernaries": true,
+    "semi": false,
+    "printWidth": 75,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "dependencies": {
+    "foreground-child": "^3.3.1",
+    "jackspeak": "^4.1.1",
+    "minimatch": "^10.0.3",
+    "minipass": "^7.1.2",
+    "package-json-from-dist": "^1.0.0",
+    "path-scurry": "^2.0.0"
+  },
+  "devDependencies": {
+    "@types/node": "^24.0.1",
+    "memfs": "^4.17.2",
+    "mkdirp": "^3.0.1",
+    "prettier": "^3.5.3",
+    "rimraf": "^6.0.1",
+    "tap": "^21.1.0",
+    "tshy": "^3.0.2",
+    "typedoc": "^0.28.5"
+  },
+  "tap": {
+    "before": "test/00-setup.ts"
+  },
+  "license": "ISC",
+  "funding": {
+    "url": "https://github.com/sponsors/isaacs"
+  },
+  "engines": {
+    "node": "20 || >=22"
+  },
+  "module": "./dist/esm/index.js"
+}
diff --git a/node_modules/pacote/node_modules/hosted-git-info/LICENSE b/node_modules/pacote/node_modules/hosted-git-info/LICENSE
new file mode 100644
index 0000000000000..45055763dc838
--- /dev/null
+++ b/node_modules/pacote/node_modules/hosted-git-info/LICENSE
@@ -0,0 +1,13 @@
+Copyright (c) 2015, Rebecca Turner
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
+OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/hosted-git-info/lib/from-url.js b/node_modules/pacote/node_modules/hosted-git-info/lib/from-url.js
new file mode 100644
index 0000000000000..efc1247d59d12
--- /dev/null
+++ b/node_modules/pacote/node_modules/hosted-git-info/lib/from-url.js
@@ -0,0 +1,122 @@
+'use strict'
+
+const parseUrl = require('./parse-url')
+
+// look for github shorthand inputs, such as npm/cli
+const isGitHubShorthand = (arg) => {
+  // it cannot contain whitespace before the first #
+  // it cannot start with a / because that's probably an absolute file path
+  // but it must include a slash since repos are username/repository
+  // it cannot start with a . because that's probably a relative file path
+  // it cannot start with an @ because that's a scoped package if it passes the other tests
+  // it cannot contain a : before a # because that tells us that there's a protocol
+  // a second / may not exist before a #
+  const firstHash = arg.indexOf('#')
+  const firstSlash = arg.indexOf('/')
+  const secondSlash = arg.indexOf('/', firstSlash + 1)
+  const firstColon = arg.indexOf(':')
+  const firstSpace = /\s/.exec(arg)
+  const firstAt = arg.indexOf('@')
+
+  const spaceOnlyAfterHash = !firstSpace || (firstHash > -1 && firstSpace.index > firstHash)
+  const atOnlyAfterHash = firstAt === -1 || (firstHash > -1 && firstAt > firstHash)
+  const colonOnlyAfterHash = firstColon === -1 || (firstHash > -1 && firstColon > firstHash)
+  const secondSlashOnlyAfterHash = secondSlash === -1 || (firstHash > -1 && secondSlash > firstHash)
+  const hasSlash = firstSlash > 0
+  // if a # is found, what we really want to know is that the character
+  // immediately before # is not a /
+  const doesNotEndWithSlash = firstHash > -1 ? arg[firstHash - 1] !== '/' : !arg.endsWith('/')
+  const doesNotStartWithDot = !arg.startsWith('.')
+
+  return spaceOnlyAfterHash && hasSlash && doesNotEndWithSlash &&
+    doesNotStartWithDot && atOnlyAfterHash && colonOnlyAfterHash &&
+    secondSlashOnlyAfterHash
+}
+
+module.exports = (giturl, opts, { gitHosts, protocols }) => {
+  if (!giturl) {
+    return
+  }
+
+  const correctedUrl = isGitHubShorthand(giturl) ? `github:${giturl}` : giturl
+  const parsed = parseUrl(correctedUrl, protocols)
+  if (!parsed) {
+    return
+  }
+
+  const gitHostShortcut = gitHosts.byShortcut[parsed.protocol]
+  const gitHostDomain = gitHosts.byDomain[parsed.hostname.startsWith('www.')
+    ? parsed.hostname.slice(4)
+    : parsed.hostname]
+  const gitHostName = gitHostShortcut || gitHostDomain
+  if (!gitHostName) {
+    return
+  }
+
+  const gitHostInfo = gitHosts[gitHostShortcut || gitHostDomain]
+  let auth = null
+  if (protocols[parsed.protocol]?.auth && (parsed.username || parsed.password)) {
+    auth = `${parsed.username}${parsed.password ? ':' + parsed.password : ''}`
+  }
+
+  let committish = null
+  let user = null
+  let project = null
+  let defaultRepresentation = null
+
+  try {
+    if (gitHostShortcut) {
+      let pathname = parsed.pathname.startsWith('/') ? parsed.pathname.slice(1) : parsed.pathname
+      const firstAt = pathname.indexOf('@')
+      // we ignore auth for shortcuts, so just trim it out
+      if (firstAt > -1) {
+        pathname = pathname.slice(firstAt + 1)
+      }
+
+      const lastSlash = pathname.lastIndexOf('/')
+      if (lastSlash > -1) {
+        user = decodeURIComponent(pathname.slice(0, lastSlash))
+        // we want nulls only, never empty strings
+        if (!user) {
+          user = null
+        }
+        project = decodeURIComponent(pathname.slice(lastSlash + 1))
+      } else {
+        project = decodeURIComponent(pathname)
+      }
+
+      if (project.endsWith('.git')) {
+        project = project.slice(0, -4)
+      }
+
+      if (parsed.hash) {
+        committish = decodeURIComponent(parsed.hash.slice(1))
+      }
+
+      defaultRepresentation = 'shortcut'
+    } else {
+      if (!gitHostInfo.protocols.includes(parsed.protocol)) {
+        return
+      }
+
+      const segments = gitHostInfo.extract(parsed)
+      if (!segments) {
+        return
+      }
+
+      user = segments.user && decodeURIComponent(segments.user)
+      project = decodeURIComponent(segments.project)
+      committish = decodeURIComponent(segments.committish)
+      defaultRepresentation = protocols[parsed.protocol]?.name || parsed.protocol.slice(0, -1)
+    }
+  } catch (err) {
+    /* istanbul ignore else */
+    if (err instanceof URIError) {
+      return
+    } else {
+      throw err
+    }
+  }
+
+  return [gitHostName, user, auth, project, committish, defaultRepresentation, opts]
+}
diff --git a/node_modules/pacote/node_modules/hosted-git-info/lib/hosts.js b/node_modules/pacote/node_modules/hosted-git-info/lib/hosts.js
new file mode 100644
index 0000000000000..2a88e95927772
--- /dev/null
+++ b/node_modules/pacote/node_modules/hosted-git-info/lib/hosts.js
@@ -0,0 +1,231 @@
+/* eslint-disable max-len */
+
+'use strict'
+
+const maybeJoin = (...args) => args.every(arg => arg) ? args.join('') : ''
+const maybeEncode = (arg) => arg ? encodeURIComponent(arg) : ''
+const formatHashFragment = (f) => f.toLowerCase()
+  .replace(/^\W+/g, '') // strip leading non-characters
+  .replace(/(?
+    `git@${domain}:${user}/${project}.git${maybeJoin('#', committish)}`,
+  sshurltemplate: ({ domain, user, project, committish }) =>
+    `git+ssh://git@${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
+  edittemplate: ({ domain, user, project, committish, editpath, path }) =>
+    `https://${domain}/${user}/${project}${maybeJoin('/', editpath, '/', maybeEncode(committish || 'HEAD'), '/', path)}`,
+  browsetemplate: ({ domain, user, project, committish, treepath }) =>
+    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}`,
+  browsetreetemplate: ({ domain, user, project, committish, treepath, path, fragment, hashformat }) =>
+    `https://${domain}/${user}/${project}/${treepath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`,
+  browseblobtemplate: ({ domain, user, project, committish, blobpath, path, fragment, hashformat }) =>
+    `https://${domain}/${user}/${project}/${blobpath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`,
+  docstemplate: ({ domain, user, project, treepath, committish }) =>
+    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}#readme`,
+  httpstemplate: ({ auth, domain, user, project, committish }) =>
+    `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
+  filetemplate: ({ domain, user, project, committish, path }) =>
+    `https://${domain}/${user}/${project}/raw/${maybeEncode(committish || 'HEAD')}/${path}`,
+  shortcuttemplate: ({ type, user, project, committish }) =>
+    `${type}:${user}/${project}${maybeJoin('#', committish)}`,
+  pathtemplate: ({ user, project, committish }) =>
+    `${user}/${project}${maybeJoin('#', committish)}`,
+  bugstemplate: ({ domain, user, project }) =>
+    `https://${domain}/${user}/${project}/issues`,
+  hashformat: formatHashFragment,
+}
+
+const hosts = {}
+hosts.github = {
+  // First two are insecure and generally shouldn't be used any more, but
+  // they are still supported.
+  protocols: ['git:', 'http:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'],
+  domain: 'github.com',
+  treepath: 'tree',
+  blobpath: 'blob',
+  editpath: 'edit',
+  filetemplate: ({ auth, user, project, committish, path }) =>
+    `https://${maybeJoin(auth, '@')}raw.githubusercontent.com/${user}/${project}/${maybeEncode(committish || 'HEAD')}/${path}`,
+  gittemplate: ({ auth, domain, user, project, committish }) =>
+    `git://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
+  tarballtemplate: ({ domain, user, project, committish }) =>
+    `https://codeload.${domain}/${user}/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`,
+  extract: (url) => {
+    let [, user, project, type, committish] = url.pathname.split('/', 5)
+    if (type && type !== 'tree') {
+      return
+    }
+
+    if (!type) {
+      committish = url.hash.slice(1)
+    }
+
+    if (project && project.endsWith('.git')) {
+      project = project.slice(0, -4)
+    }
+
+    if (!user || !project) {
+      return
+    }
+
+    return { user, project, committish }
+  },
+}
+
+hosts.bitbucket = {
+  protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'],
+  domain: 'bitbucket.org',
+  treepath: 'src',
+  blobpath: 'src',
+  editpath: '?mode=edit',
+  edittemplate: ({ domain, user, project, committish, treepath, path, editpath }) =>
+    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish || 'HEAD'), '/', path, editpath)}`,
+  tarballtemplate: ({ domain, user, project, committish }) =>
+    `https://${domain}/${user}/${project}/get/${maybeEncode(committish || 'HEAD')}.tar.gz`,
+  extract: (url) => {
+    let [, user, project, aux] = url.pathname.split('/', 4)
+    if (['get'].includes(aux)) {
+      return
+    }
+
+    if (project && project.endsWith('.git')) {
+      project = project.slice(0, -4)
+    }
+
+    if (!user || !project) {
+      return
+    }
+
+    return { user, project, committish: url.hash.slice(1) }
+  },
+}
+
+hosts.gitlab = {
+  protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'],
+  domain: 'gitlab.com',
+  treepath: 'tree',
+  blobpath: 'tree',
+  editpath: '-/edit',
+  httpstemplate: ({ auth, domain, user, project, committish }) =>
+    `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
+  tarballtemplate: ({ domain, user, project, committish }) =>
+    `https://${domain}/${user}/${project}/repository/archive.tar.gz?ref=${maybeEncode(committish || 'HEAD')}`,
+  extract: (url) => {
+    const path = url.pathname.slice(1)
+    if (path.includes('/-/') || path.includes('/archive.tar.gz')) {
+      return
+    }
+
+    const segments = path.split('/')
+    let project = segments.pop()
+    if (project.endsWith('.git')) {
+      project = project.slice(0, -4)
+    }
+
+    const user = segments.join('/')
+    if (!user || !project) {
+      return
+    }
+
+    return { user, project, committish: url.hash.slice(1) }
+  },
+}
+
+hosts.gist = {
+  protocols: ['git:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'],
+  domain: 'gist.github.com',
+  editpath: 'edit',
+  sshtemplate: ({ domain, project, committish }) =>
+    `git@${domain}:${project}.git${maybeJoin('#', committish)}`,
+  sshurltemplate: ({ domain, project, committish }) =>
+    `git+ssh://git@${domain}/${project}.git${maybeJoin('#', committish)}`,
+  edittemplate: ({ domain, user, project, committish, editpath }) =>
+    `https://${domain}/${user}/${project}${maybeJoin('/', maybeEncode(committish))}/${editpath}`,
+  browsetemplate: ({ domain, project, committish }) =>
+    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`,
+  browsetreetemplate: ({ domain, project, committish, path, hashformat }) =>
+    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`,
+  browseblobtemplate: ({ domain, project, committish, path, hashformat }) =>
+    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`,
+  docstemplate: ({ domain, project, committish }) =>
+    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`,
+  httpstemplate: ({ domain, project, committish }) =>
+    `git+https://${domain}/${project}.git${maybeJoin('#', committish)}`,
+  filetemplate: ({ user, project, committish, path }) =>
+    `https://gist.githubusercontent.com/${user}/${project}/raw${maybeJoin('/', maybeEncode(committish))}/${path}`,
+  shortcuttemplate: ({ type, project, committish }) =>
+    `${type}:${project}${maybeJoin('#', committish)}`,
+  pathtemplate: ({ project, committish }) =>
+    `${project}${maybeJoin('#', committish)}`,
+  bugstemplate: ({ domain, project }) =>
+    `https://${domain}/${project}`,
+  gittemplate: ({ domain, project, committish }) =>
+    `git://${domain}/${project}.git${maybeJoin('#', committish)}`,
+  tarballtemplate: ({ project, committish }) =>
+    `https://codeload.github.com/gist/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`,
+  extract: (url) => {
+    let [, user, project, aux] = url.pathname.split('/', 4)
+    if (aux === 'raw') {
+      return
+    }
+
+    if (!project) {
+      if (!user) {
+        return
+      }
+
+      project = user
+      user = null
+    }
+
+    if (project.endsWith('.git')) {
+      project = project.slice(0, -4)
+    }
+
+    return { user, project, committish: url.hash.slice(1) }
+  },
+  hashformat: function (fragment) {
+    return fragment && 'file-' + formatHashFragment(fragment)
+  },
+}
+
+hosts.sourcehut = {
+  protocols: ['git+ssh:', 'https:'],
+  domain: 'git.sr.ht',
+  treepath: 'tree',
+  blobpath: 'tree',
+  filetemplate: ({ domain, user, project, committish, path }) =>
+    `https://${domain}/${user}/${project}/blob/${maybeEncode(committish) || 'HEAD'}/${path}`,
+  httpstemplate: ({ domain, user, project, committish }) =>
+    `https://${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
+  tarballtemplate: ({ domain, user, project, committish }) =>
+    `https://${domain}/${user}/${project}/archive/${maybeEncode(committish) || 'HEAD'}.tar.gz`,
+  bugstemplate: () => null,
+  extract: (url) => {
+    let [, user, project, aux] = url.pathname.split('/', 4)
+
+    // tarball url
+    if (['archive'].includes(aux)) {
+      return
+    }
+
+    if (project && project.endsWith('.git')) {
+      project = project.slice(0, -4)
+    }
+
+    if (!user || !project) {
+      return
+    }
+
+    return { user, project, committish: url.hash.slice(1) }
+  },
+}
+
+for (const [name, host] of Object.entries(hosts)) {
+  hosts[name] = Object.assign({}, defaults, host)
+}
+
+module.exports = hosts
diff --git a/node_modules/pacote/node_modules/hosted-git-info/lib/index.js b/node_modules/pacote/node_modules/hosted-git-info/lib/index.js
new file mode 100644
index 0000000000000..2a7100dcee6e7
--- /dev/null
+++ b/node_modules/pacote/node_modules/hosted-git-info/lib/index.js
@@ -0,0 +1,227 @@
+'use strict'
+
+const { LRUCache } = require('lru-cache')
+const hosts = require('./hosts.js')
+const fromUrl = require('./from-url.js')
+const parseUrl = require('./parse-url.js')
+
+const cache = new LRUCache({ max: 1000 })
+
+function unknownHostedUrl (url) {
+  try {
+    const {
+      protocol,
+      hostname,
+      pathname,
+    } = new URL(url)
+
+    if (!hostname) {
+      return null
+    }
+
+    const proto = /(?:git\+)http:$/.test(protocol) ? 'http:' : 'https:'
+    const path = pathname.replace(/\.git$/, '')
+    return `${proto}//${hostname}${path}`
+  } catch {
+    return null
+  }
+}
+
+class GitHost {
+  constructor (type, user, auth, project, committish, defaultRepresentation, opts = {}) {
+    Object.assign(this, GitHost.#gitHosts[type], {
+      type,
+      user,
+      auth,
+      project,
+      committish,
+      default: defaultRepresentation,
+      opts,
+    })
+  }
+
+  static #gitHosts = { byShortcut: {}, byDomain: {} }
+  static #protocols = {
+    'git+ssh:': { name: 'sshurl' },
+    'ssh:': { name: 'sshurl' },
+    'git+https:': { name: 'https', auth: true },
+    'git:': { auth: true },
+    'http:': { auth: true },
+    'https:': { auth: true },
+    'git+http:': { auth: true },
+  }
+
+  static addHost (name, host) {
+    GitHost.#gitHosts[name] = host
+    GitHost.#gitHosts.byDomain[host.domain] = name
+    GitHost.#gitHosts.byShortcut[`${name}:`] = name
+    GitHost.#protocols[`${name}:`] = { name }
+  }
+
+  static fromUrl (giturl, opts) {
+    if (typeof giturl !== 'string') {
+      return
+    }
+
+    const key = giturl + JSON.stringify(opts || {})
+
+    if (!cache.has(key)) {
+      const hostArgs = fromUrl(giturl, opts, {
+        gitHosts: GitHost.#gitHosts,
+        protocols: GitHost.#protocols,
+      })
+      cache.set(key, hostArgs ? new GitHost(...hostArgs) : undefined)
+    }
+
+    return cache.get(key)
+  }
+
+  static fromManifest (manifest, opts = {}) {
+    if (!manifest || typeof manifest !== 'object') {
+      return
+    }
+
+    const r = manifest.repository
+    // TODO: look into also checking the `bugs`/`homepage` URLs
+
+    const rurl = r && (
+      typeof r === 'string'
+        ? r
+        : typeof r === 'object' && typeof r.url === 'string'
+          ? r.url
+          : null
+    )
+
+    if (!rurl) {
+      throw new Error('no repository')
+    }
+
+    const info = (rurl && GitHost.fromUrl(rurl.replace(/^git\+/, ''), opts)) || null
+    if (info) {
+      return info
+    }
+    const unk = unknownHostedUrl(rurl)
+    return GitHost.fromUrl(unk, opts) || unk
+  }
+
+  static parseUrl (url) {
+    return parseUrl(url)
+  }
+
+  #fill (template, opts) {
+    if (typeof template !== 'function') {
+      return null
+    }
+
+    const options = { ...this, ...this.opts, ...opts }
+
+    // the path should always be set so we don't end up with 'undefined' in urls
+    if (!options.path) {
+      options.path = ''
+    }
+
+    // template functions will insert the leading slash themselves
+    if (options.path.startsWith('/')) {
+      options.path = options.path.slice(1)
+    }
+
+    if (options.noCommittish) {
+      options.committish = null
+    }
+
+    const result = template(options)
+    return options.noGitPlus && result.startsWith('git+') ? result.slice(4) : result
+  }
+
+  hash () {
+    return this.committish ? `#${this.committish}` : ''
+  }
+
+  ssh (opts) {
+    return this.#fill(this.sshtemplate, opts)
+  }
+
+  sshurl (opts) {
+    return this.#fill(this.sshurltemplate, opts)
+  }
+
+  browse (path, ...args) {
+    // not a string, treat path as opts
+    if (typeof path !== 'string') {
+      return this.#fill(this.browsetemplate, path)
+    }
+
+    if (typeof args[0] !== 'string') {
+      return this.#fill(this.browsetreetemplate, { ...args[0], path })
+    }
+
+    return this.#fill(this.browsetreetemplate, { ...args[1], fragment: args[0], path })
+  }
+
+  // If the path is known to be a file, then browseFile should be used. For some hosts
+  // the url is the same as browse, but for others like GitHub a file can use both `/tree/`
+  // and `/blob/` in the path. When using a default committish of `HEAD` then the `/tree/`
+  // path will redirect to a specific commit. Using the `/blob/` path avoids this and
+  // does not redirect to a different commit.
+  browseFile (path, ...args) {
+    if (typeof args[0] !== 'string') {
+      return this.#fill(this.browseblobtemplate, { ...args[0], path })
+    }
+
+    return this.#fill(this.browseblobtemplate, { ...args[1], fragment: args[0], path })
+  }
+
+  docs (opts) {
+    return this.#fill(this.docstemplate, opts)
+  }
+
+  bugs (opts) {
+    return this.#fill(this.bugstemplate, opts)
+  }
+
+  https (opts) {
+    return this.#fill(this.httpstemplate, opts)
+  }
+
+  git (opts) {
+    return this.#fill(this.gittemplate, opts)
+  }
+
+  shortcut (opts) {
+    return this.#fill(this.shortcuttemplate, opts)
+  }
+
+  path (opts) {
+    return this.#fill(this.pathtemplate, opts)
+  }
+
+  tarball (opts) {
+    return this.#fill(this.tarballtemplate, { ...opts, noCommittish: false })
+  }
+
+  file (path, opts) {
+    return this.#fill(this.filetemplate, { ...opts, path })
+  }
+
+  edit (path, opts) {
+    return this.#fill(this.edittemplate, { ...opts, path })
+  }
+
+  getDefaultRepresentation () {
+    return this.default
+  }
+
+  toString (opts) {
+    if (this.default && typeof this[this.default] === 'function') {
+      return this[this.default](opts)
+    }
+
+    return this.sshurl(opts)
+  }
+}
+
+for (const [name, host] of Object.entries(hosts)) {
+  GitHost.addHost(name, host)
+}
+
+module.exports = GitHost
diff --git a/node_modules/pacote/node_modules/hosted-git-info/lib/parse-url.js b/node_modules/pacote/node_modules/hosted-git-info/lib/parse-url.js
new file mode 100644
index 0000000000000..7d5489c008ab4
--- /dev/null
+++ b/node_modules/pacote/node_modules/hosted-git-info/lib/parse-url.js
@@ -0,0 +1,78 @@
+const url = require('url')
+
+const lastIndexOfBefore = (str, char, beforeChar) => {
+  const startPosition = str.indexOf(beforeChar)
+  return str.lastIndexOf(char, startPosition > -1 ? startPosition : Infinity)
+}
+
+const safeUrl = (u) => {
+  try {
+    return new url.URL(u)
+  } catch {
+    // this fn should never throw
+  }
+}
+
+// accepts input like git:github.com:user/repo and inserts the // after the first :
+const correctProtocol = (arg, protocols) => {
+  const firstColon = arg.indexOf(':')
+  const proto = arg.slice(0, firstColon + 1)
+  if (Object.prototype.hasOwnProperty.call(protocols, proto)) {
+    return arg
+  }
+
+  const firstAt = arg.indexOf('@')
+  if (firstAt > -1) {
+    if (firstAt > firstColon) {
+      return `git+ssh://${arg}`
+    } else {
+      return arg
+    }
+  }
+
+  const doubleSlash = arg.indexOf('//')
+  if (doubleSlash === firstColon + 1) {
+    return arg
+  }
+
+  return `${arg.slice(0, firstColon + 1)}//${arg.slice(firstColon + 1)}`
+}
+
+// attempt to correct an scp style url so that it will parse with `new URL()`
+const correctUrl = (giturl) => {
+  // ignore @ that come after the first hash since the denotes the start
+  // of a committish which can contain @ characters
+  const firstAt = lastIndexOfBefore(giturl, '@', '#')
+  // ignore colons that come after the hash since that could include colons such as:
+  // git@github.com:user/package-2#semver:^1.0.0
+  const lastColonBeforeHash = lastIndexOfBefore(giturl, ':', '#')
+
+  if (lastColonBeforeHash > firstAt) {
+    // the last : comes after the first @ (or there is no @)
+    // like it would in:
+    // proto://hostname.com:user/repo
+    // username@hostname.com:user/repo
+    // :password@hostname.com:user/repo
+    // username:password@hostname.com:user/repo
+    // proto://username@hostname.com:user/repo
+    // proto://:password@hostname.com:user/repo
+    // proto://username:password@hostname.com:user/repo
+    // then we replace the last : with a / to create a valid path
+    giturl = giturl.slice(0, lastColonBeforeHash) + '/' + giturl.slice(lastColonBeforeHash + 1)
+  }
+
+  if (lastIndexOfBefore(giturl, ':', '#') === -1 && giturl.indexOf('//') === -1) {
+    // we have no : at all
+    // as it would be in:
+    // username@hostname.com/user/repo
+    // then we prepend a protocol
+    giturl = `git+ssh://${giturl}`
+  }
+
+  return giturl
+}
+
+module.exports = (giturl, protocols) => {
+  const withProtocol = protocols ? correctProtocol(giturl, protocols) : giturl
+  return safeUrl(withProtocol) || safeUrl(correctUrl(withProtocol))
+}
diff --git a/node_modules/pacote/node_modules/hosted-git-info/package.json b/node_modules/pacote/node_modules/hosted-git-info/package.json
new file mode 100644
index 0000000000000..5883a7d308d79
--- /dev/null
+++ b/node_modules/pacote/node_modules/hosted-git-info/package.json
@@ -0,0 +1,61 @@
+{
+  "name": "hosted-git-info",
+  "version": "9.0.0",
+  "description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab",
+  "main": "./lib/index.js",
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/npm/hosted-git-info.git"
+  },
+  "keywords": [
+    "git",
+    "github",
+    "bitbucket",
+    "gitlab"
+  ],
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "bugs": {
+    "url": "https://github.com/npm/hosted-git-info/issues"
+  },
+  "homepage": "https://github.com/npm/hosted-git-info",
+  "scripts": {
+    "posttest": "npm run lint",
+    "snap": "tap",
+    "test": "tap",
+    "test:coverage": "tap --coverage-report=html",
+    "lint": "npm run eslint",
+    "postlint": "template-oss-check",
+    "lintfix": "npm run eslint -- --fix",
+    "template-oss-apply": "template-oss-apply --force",
+    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
+  },
+  "dependencies": {
+    "lru-cache": "^11.1.0"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^5.0.0",
+    "@npmcli/template-oss": "4.25.0",
+    "tap": "^16.0.1"
+  },
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "engines": {
+    "node": "^20.17.0 || >=22.9.0"
+  },
+  "tap": {
+    "color": 1,
+    "coverage": true,
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.25.0",
+    "publish": "true"
+  }
+}
diff --git a/node_modules/pacote/node_modules/jackspeak/LICENSE.md b/node_modules/pacote/node_modules/jackspeak/LICENSE.md
new file mode 100644
index 0000000000000..8cb5cc6e616c0
--- /dev/null
+++ b/node_modules/pacote/node_modules/jackspeak/LICENSE.md
@@ -0,0 +1,55 @@
+# Blue Oak Model License
+
+Version 1.0.0
+
+## Purpose
+
+This license gives everyone as much permission to work with
+this software as possible, while protecting contributors
+from liability.
+
+## Acceptance
+
+In order to receive this license, you must agree to its
+rules. The rules of this license are both obligations
+under that agreement and conditions to your license.
+You must not do anything with this software that triggers
+a rule that you cannot or will not follow.
+
+## Copyright
+
+Each contributor licenses you to do everything with this
+software that would otherwise infringe that contributor's
+copyright in it.
+
+## Notices
+
+You must ensure that everyone who gets a copy of
+any part of this software from you, with or without
+changes, also gets the text of this license or a link to
+.
+
+## Excuse
+
+If anyone notifies you in writing that you have not
+complied with [Notices](#notices), you can keep your
+license by taking all practical steps to comply within 30
+days after the notice. If you do not do so, your license
+ends immediately.
+
+## Patent
+
+Each contributor licenses you to do everything with this
+software that would otherwise infringe any patent claims
+they can license or become able to license.
+
+## Reliability
+
+No contributor can revoke this license.
+
+## No Liability
+
+**_As far as the law allows, this software comes as is,
+without any warranty or condition, and no contributor
+will be liable to anyone for any damages related to this
+software or this license, under any kind of legal claim._**
diff --git a/node_modules/pacote/node_modules/jackspeak/dist/commonjs/index.js b/node_modules/pacote/node_modules/jackspeak/dist/commonjs/index.js
new file mode 100644
index 0000000000000..543412746cc8f
--- /dev/null
+++ b/node_modules/pacote/node_modules/jackspeak/dist/commonjs/index.js
@@ -0,0 +1,947 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.jack = exports.Jack = exports.isConfigOption = exports.isConfigOptionOfType = exports.isConfigType = void 0;
+const node_util_1 = require("node:util");
+// it's a tiny API, just cast it inline, it's fine
+//@ts-ignore
+const cliui_1 = __importDefault(require("@isaacs/cliui"));
+const node_path_1 = require("node:path");
+const isConfigType = (t) => typeof t === 'string' &&
+    (t === 'string' || t === 'number' || t === 'boolean');
+exports.isConfigType = isConfigType;
+const isValidValue = (v, type, multi) => {
+    if (multi) {
+        if (!Array.isArray(v))
+            return false;
+        return !v.some((v) => !isValidValue(v, type, false));
+    }
+    if (Array.isArray(v))
+        return false;
+    return typeof v === type;
+};
+const isValidOption = (v, vo) => !!vo &&
+    (Array.isArray(v) ? v.every(x => isValidOption(x, vo)) : vo.includes(v));
+/**
+ * Determine whether an unknown object is a {@link ConfigOption} based only
+ * on its `type` and `multiple` property
+ */
+const isConfigOptionOfType = (o, type, multi) => !!o &&
+    typeof o === 'object' &&
+    (0, exports.isConfigType)(o.type) &&
+    o.type === type &&
+    !!o.multiple === multi;
+exports.isConfigOptionOfType = isConfigOptionOfType;
+/**
+ * Determine whether an unknown object is a {@link ConfigOption} based on
+ * it having all valid properties
+ */
+const isConfigOption = (o, type, multi) => (0, exports.isConfigOptionOfType)(o, type, multi) &&
+    undefOrType(o.short, 'string') &&
+    undefOrType(o.description, 'string') &&
+    undefOrType(o.hint, 'string') &&
+    undefOrType(o.validate, 'function') &&
+    (o.type === 'boolean' ?
+        o.validOptions === undefined
+        : undefOrTypeArray(o.validOptions, o.type)) &&
+    (o.default === undefined || isValidValue(o.default, type, multi));
+exports.isConfigOption = isConfigOption;
+const isHeading = (r) => r.type === 'heading';
+const isDescription = (r) => r.type === 'description';
+const width = Math.min(process?.stdout?.columns ?? 80, 80);
+// indentation spaces from heading level
+const indent = (n) => (n - 1) * 2;
+const toEnvKey = (pref, key) => [pref, key.replace(/[^a-zA-Z0-9]+/g, ' ')]
+    .join(' ')
+    .trim()
+    .toUpperCase()
+    .replace(/ /g, '_');
+const toEnvVal = (value, delim = '\n') => {
+    const str = typeof value === 'string' ? value
+        : typeof value === 'boolean' ?
+            value ? '1'
+                : '0'
+            : typeof value === 'number' ? String(value)
+                : Array.isArray(value) ?
+                    value.map((v) => toEnvVal(v)).join(delim)
+                    : /* c8 ignore start */ undefined;
+    if (typeof str !== 'string') {
+        throw new Error(`could not serialize value to environment: ${JSON.stringify(value)}`, { cause: { code: 'JACKSPEAK' } });
+    }
+    /* c8 ignore stop */
+    return str;
+};
+const fromEnvVal = (env, type, multiple, delim = '\n') => (multiple ?
+    env ? env.split(delim).map(v => fromEnvVal(v, type, false))
+        : []
+    : type === 'string' ? env
+        : type === 'boolean' ? env === '1'
+            : +env.trim());
+const undefOrType = (v, t) => v === undefined || typeof v === t;
+const undefOrTypeArray = (v, t) => v === undefined || (Array.isArray(v) && v.every(x => typeof x === t));
+// print the value type, for error message reporting
+const valueType = (v) => typeof v === 'string' ? 'string'
+    : typeof v === 'boolean' ? 'boolean'
+        : typeof v === 'number' ? 'number'
+            : Array.isArray(v) ?
+                `${joinTypes([...new Set(v.map(v => valueType(v)))])}[]`
+                : `${v.type}${v.multiple ? '[]' : ''}`;
+const joinTypes = (types) => types.length === 1 && typeof types[0] === 'string' ?
+    types[0]
+    : `(${types.join('|')})`;
+const validateFieldMeta = (field, fieldMeta) => {
+    if (fieldMeta) {
+        if (field.type !== undefined && field.type !== fieldMeta.type) {
+            throw new TypeError(`invalid type`, {
+                cause: {
+                    found: field.type,
+                    wanted: [fieldMeta.type, undefined],
+                },
+            });
+        }
+        if (field.multiple !== undefined &&
+            !!field.multiple !== fieldMeta.multiple) {
+            throw new TypeError(`invalid multiple`, {
+                cause: {
+                    found: field.multiple,
+                    wanted: [fieldMeta.multiple, undefined],
+                },
+            });
+        }
+        return fieldMeta;
+    }
+    if (!(0, exports.isConfigType)(field.type)) {
+        throw new TypeError(`invalid type`, {
+            cause: {
+                found: field.type,
+                wanted: ['string', 'number', 'boolean'],
+            },
+        });
+    }
+    return {
+        type: field.type,
+        multiple: !!field.multiple,
+    };
+};
+const validateField = (o, type, multiple) => {
+    const validateValidOptions = (def, validOptions) => {
+        if (!undefOrTypeArray(validOptions, type)) {
+            throw new TypeError('invalid validOptions', {
+                cause: {
+                    found: validOptions,
+                    wanted: valueType({ type, multiple: true }),
+                },
+            });
+        }
+        if (def !== undefined && validOptions !== undefined) {
+            const valid = Array.isArray(def) ?
+                def.every(v => validOptions.includes(v))
+                : validOptions.includes(def);
+            if (!valid) {
+                throw new TypeError('invalid default value not in validOptions', {
+                    cause: {
+                        found: def,
+                        wanted: validOptions,
+                    },
+                });
+            }
+        }
+    };
+    if (o.default !== undefined &&
+        !isValidValue(o.default, type, multiple)) {
+        throw new TypeError('invalid default value', {
+            cause: {
+                found: o.default,
+                wanted: valueType({ type, multiple }),
+            },
+        });
+    }
+    if ((0, exports.isConfigOptionOfType)(o, 'number', false) ||
+        (0, exports.isConfigOptionOfType)(o, 'number', true)) {
+        validateValidOptions(o.default, o.validOptions);
+    }
+    else if ((0, exports.isConfigOptionOfType)(o, 'string', false) ||
+        (0, exports.isConfigOptionOfType)(o, 'string', true)) {
+        validateValidOptions(o.default, o.validOptions);
+    }
+    else if ((0, exports.isConfigOptionOfType)(o, 'boolean', false) ||
+        (0, exports.isConfigOptionOfType)(o, 'boolean', true)) {
+        if (o.hint !== undefined) {
+            throw new TypeError('cannot provide hint for flag');
+        }
+        if (o.validOptions !== undefined) {
+            throw new TypeError('cannot provide validOptions for flag');
+        }
+    }
+    return o;
+};
+const toParseArgsOptionsConfig = (options) => {
+    return Object.entries(options).reduce((acc, [longOption, o]) => {
+        const p = {
+            type: 'string',
+            multiple: !!o.multiple,
+            ...(typeof o.short === 'string' ? { short: o.short } : undefined),
+        };
+        const setNoBool = () => {
+            if (!longOption.startsWith('no-') && !options[`no-${longOption}`]) {
+                acc[`no-${longOption}`] = {
+                    type: 'boolean',
+                    multiple: !!o.multiple,
+                };
+            }
+        };
+        const setDefault = (def, fn) => {
+            if (def !== undefined) {
+                p.default = fn(def);
+            }
+        };
+        if ((0, exports.isConfigOption)(o, 'number', false)) {
+            setDefault(o.default, String);
+        }
+        else if ((0, exports.isConfigOption)(o, 'number', true)) {
+            setDefault(o.default, d => d.map(v => String(v)));
+        }
+        else if ((0, exports.isConfigOption)(o, 'string', false) ||
+            (0, exports.isConfigOption)(o, 'string', true)) {
+            setDefault(o.default, v => v);
+        }
+        else if ((0, exports.isConfigOption)(o, 'boolean', false) ||
+            (0, exports.isConfigOption)(o, 'boolean', true)) {
+            p.type = 'boolean';
+            setDefault(o.default, v => v);
+            setNoBool();
+        }
+        acc[longOption] = p;
+        return acc;
+    }, {});
+};
+/**
+ * Class returned by the {@link jack} function and all configuration
+ * definition methods.  This is what gets chained together.
+ */
+class Jack {
+    #configSet;
+    #shorts;
+    #options;
+    #fields = [];
+    #env;
+    #envPrefix;
+    #allowPositionals;
+    #usage;
+    #usageMarkdown;
+    constructor(options = {}) {
+        this.#options = options;
+        this.#allowPositionals = options.allowPositionals !== false;
+        this.#env =
+            this.#options.env === undefined ? process.env : this.#options.env;
+        this.#envPrefix = options.envPrefix;
+        // We need to fib a little, because it's always the same object, but it
+        // starts out as having an empty config set.  Then each method that adds
+        // fields returns `this as Jack`
+        this.#configSet = Object.create(null);
+        this.#shorts = Object.create(null);
+    }
+    /**
+     * Resulting definitions, suitable to be passed to Node's `util.parseArgs`,
+     * but also including `description` and `short` fields, if set.
+     */
+    get definitions() {
+        return this.#configSet;
+    }
+    /** map of `{ :  }` strings for each short name defined */
+    get shorts() {
+        return this.#shorts;
+    }
+    /**
+     * options passed to the {@link Jack} constructor
+     */
+    get jackOptions() {
+        return this.#options;
+    }
+    /**
+     * the data used to generate {@link Jack#usage} and
+     * {@link Jack#usageMarkdown} content.
+     */
+    get usageFields() {
+        return this.#fields;
+    }
+    /**
+     * Set the default value (which will still be overridden by env or cli)
+     * as if from a parsed config file. The optional `source` param, if
+     * provided, will be included in error messages if a value is invalid or
+     * unknown.
+     */
+    setConfigValues(values, source = '') {
+        try {
+            this.validate(values);
+        }
+        catch (er) {
+            if (source && er instanceof Error) {
+                /* c8 ignore next */
+                const cause = typeof er.cause === 'object' ? er.cause : {};
+                er.cause = { ...cause, path: source };
+                Error.captureStackTrace(er, this.setConfigValues);
+            }
+            throw er;
+        }
+        for (const [field, value] of Object.entries(values)) {
+            const my = this.#configSet[field];
+            // already validated, just for TS's benefit
+            /* c8 ignore start */
+            if (!my) {
+                throw new Error('unexpected field in config set: ' + field, {
+                    cause: {
+                        code: 'JACKSPEAK',
+                        found: field,
+                    },
+                });
+            }
+            /* c8 ignore stop */
+            my.default = value;
+        }
+        return this;
+    }
+    /**
+     * Parse a string of arguments, and return the resulting
+     * `{ values, positionals }` object.
+     *
+     * If an {@link JackOptions#envPrefix} is set, then it will read default
+     * values from the environment, and write the resulting values back
+     * to the environment as well.
+     *
+     * Environment values always take precedence over any other value, except
+     * an explicit CLI setting.
+     */
+    parse(args = process.argv) {
+        this.loadEnvDefaults();
+        const p = this.parseRaw(args);
+        this.applyDefaults(p);
+        this.writeEnv(p);
+        return p;
+    }
+    loadEnvDefaults() {
+        if (this.#envPrefix) {
+            for (const [field, my] of Object.entries(this.#configSet)) {
+                const ek = toEnvKey(this.#envPrefix, field);
+                const env = this.#env[ek];
+                if (env !== undefined) {
+                    my.default = fromEnvVal(env, my.type, !!my.multiple, my.delim);
+                }
+            }
+        }
+    }
+    applyDefaults(p) {
+        for (const [field, c] of Object.entries(this.#configSet)) {
+            if (c.default !== undefined && !(field in p.values)) {
+                //@ts-ignore
+                p.values[field] = c.default;
+            }
+        }
+    }
+    /**
+     * Only parse the command line arguments passed in.
+     * Does not strip off the `node script.js` bits, so it must be just the
+     * arguments you wish to have parsed.
+     * Does not read from or write to the environment, or set defaults.
+     */
+    parseRaw(args) {
+        if (args === process.argv) {
+            args = args.slice(process._eval !== undefined ? 1 : 2);
+        }
+        const result = (0, node_util_1.parseArgs)({
+            args,
+            options: toParseArgsOptionsConfig(this.#configSet),
+            // always strict, but using our own logic
+            strict: false,
+            allowPositionals: this.#allowPositionals,
+            tokens: true,
+        });
+        const p = {
+            values: {},
+            positionals: [],
+        };
+        for (const token of result.tokens) {
+            if (token.kind === 'positional') {
+                p.positionals.push(token.value);
+                if (this.#options.stopAtPositional ||
+                    this.#options.stopAtPositionalTest?.(token.value)) {
+                    p.positionals.push(...args.slice(token.index + 1));
+                    break;
+                }
+            }
+            else if (token.kind === 'option') {
+                let value = undefined;
+                if (token.name.startsWith('no-')) {
+                    const my = this.#configSet[token.name];
+                    const pname = token.name.substring('no-'.length);
+                    const pos = this.#configSet[pname];
+                    if (pos &&
+                        pos.type === 'boolean' &&
+                        (!my ||
+                            (my.type === 'boolean' && !!my.multiple === !!pos.multiple))) {
+                        value = false;
+                        token.name = pname;
+                    }
+                }
+                const my = this.#configSet[token.name];
+                if (!my) {
+                    throw new Error(`Unknown option '${token.rawName}'. ` +
+                        `To specify a positional argument starting with a '-', ` +
+                        `place it at the end of the command after '--', as in ` +
+                        `'-- ${token.rawName}'`, {
+                        cause: {
+                            code: 'JACKSPEAK',
+                            found: token.rawName + (token.value ? `=${token.value}` : ''),
+                        },
+                    });
+                }
+                if (value === undefined) {
+                    if (token.value === undefined) {
+                        if (my.type !== 'boolean') {
+                            throw new Error(`No value provided for ${token.rawName}, expected ${my.type}`, {
+                                cause: {
+                                    code: 'JACKSPEAK',
+                                    name: token.rawName,
+                                    wanted: valueType(my),
+                                },
+                            });
+                        }
+                        value = true;
+                    }
+                    else {
+                        if (my.type === 'boolean') {
+                            throw new Error(`Flag ${token.rawName} does not take a value, received '${token.value}'`, { cause: { code: 'JACKSPEAK', found: token } });
+                        }
+                        if (my.type === 'string') {
+                            value = token.value;
+                        }
+                        else {
+                            value = +token.value;
+                            if (value !== value) {
+                                throw new Error(`Invalid value '${token.value}' provided for ` +
+                                    `'${token.rawName}' option, expected number`, {
+                                    cause: {
+                                        code: 'JACKSPEAK',
+                                        name: token.rawName,
+                                        found: token.value,
+                                        wanted: 'number',
+                                    },
+                                });
+                            }
+                        }
+                    }
+                }
+                if (my.multiple) {
+                    const pv = p.values;
+                    const tn = pv[token.name] ?? [];
+                    pv[token.name] = tn;
+                    tn.push(value);
+                }
+                else {
+                    const pv = p.values;
+                    pv[token.name] = value;
+                }
+            }
+        }
+        for (const [field, value] of Object.entries(p.values)) {
+            const valid = this.#configSet[field]?.validate;
+            const validOptions = this.#configSet[field]?.validOptions;
+            const cause = validOptions && !isValidOption(value, validOptions) ?
+                { name: field, found: value, validOptions }
+                : valid && !valid(value) ? { name: field, found: value }
+                    : undefined;
+            if (cause) {
+                throw new Error(`Invalid value provided for --${field}: ${JSON.stringify(value)}`, { cause: { ...cause, code: 'JACKSPEAK' } });
+            }
+        }
+        return p;
+    }
+    /**
+     * do not set fields as 'no-foo' if 'foo' exists and both are bools
+     * just set foo.
+     */
+    #noNoFields(f, val, s = f) {
+        if (!f.startsWith('no-') || typeof val !== 'boolean')
+            return;
+        const yes = f.substring('no-'.length);
+        // recurse so we get the core config key we care about.
+        this.#noNoFields(yes, val, s);
+        if (this.#configSet[yes]?.type === 'boolean') {
+            throw new Error(`do not set '${s}', instead set '${yes}' as desired.`, { cause: { code: 'JACKSPEAK', found: s, wanted: yes } });
+        }
+    }
+    /**
+     * Validate that any arbitrary object is a valid configuration `values`
+     * object.  Useful when loading config files or other sources.
+     */
+    validate(o) {
+        if (!o || typeof o !== 'object') {
+            throw new Error('Invalid config: not an object', {
+                cause: { code: 'JACKSPEAK', found: o },
+            });
+        }
+        const opts = o;
+        for (const field in o) {
+            const value = opts[field];
+            /* c8 ignore next - for TS */
+            if (value === undefined)
+                continue;
+            this.#noNoFields(field, value);
+            const config = this.#configSet[field];
+            if (!config) {
+                throw new Error(`Unknown config option: ${field}`, {
+                    cause: { code: 'JACKSPEAK', found: field },
+                });
+            }
+            if (!isValidValue(value, config.type, !!config.multiple)) {
+                throw new Error(`Invalid value ${valueType(value)} for ${field}, expected ${valueType(config)}`, {
+                    cause: {
+                        code: 'JACKSPEAK',
+                        name: field,
+                        found: value,
+                        wanted: valueType(config),
+                    },
+                });
+            }
+            const cause = config.validOptions && !isValidOption(value, config.validOptions) ?
+                { name: field, found: value, validOptions: config.validOptions }
+                : config.validate && !config.validate(value) ?
+                    { name: field, found: value }
+                    : undefined;
+            if (cause) {
+                throw new Error(`Invalid config value for ${field}: ${value}`, {
+                    cause: { ...cause, code: 'JACKSPEAK' },
+                });
+            }
+        }
+    }
+    writeEnv(p) {
+        if (!this.#env || !this.#envPrefix)
+            return;
+        for (const [field, value] of Object.entries(p.values)) {
+            const my = this.#configSet[field];
+            this.#env[toEnvKey(this.#envPrefix, field)] = toEnvVal(value, my?.delim);
+        }
+    }
+    /**
+     * Add a heading to the usage output banner
+     */
+    heading(text, level, { pre = false } = {}) {
+        if (level === undefined) {
+            level = this.#fields.some(r => isHeading(r)) ? 2 : 1;
+        }
+        this.#fields.push({ type: 'heading', text, level, pre });
+        return this;
+    }
+    /**
+     * Add a long-form description to the usage output at this position.
+     */
+    description(text, { pre } = {}) {
+        this.#fields.push({ type: 'description', text, pre });
+        return this;
+    }
+    /**
+     * Add one or more number fields.
+     */
+    num(fields) {
+        return this.#addFieldsWith(fields, 'number', false);
+    }
+    /**
+     * Add one or more multiple number fields.
+     */
+    numList(fields) {
+        return this.#addFieldsWith(fields, 'number', true);
+    }
+    /**
+     * Add one or more string option fields.
+     */
+    opt(fields) {
+        return this.#addFieldsWith(fields, 'string', false);
+    }
+    /**
+     * Add one or more multiple string option fields.
+     */
+    optList(fields) {
+        return this.#addFieldsWith(fields, 'string', true);
+    }
+    /**
+     * Add one or more flag fields.
+     */
+    flag(fields) {
+        return this.#addFieldsWith(fields, 'boolean', false);
+    }
+    /**
+     * Add one or more multiple flag fields.
+     */
+    flagList(fields) {
+        return this.#addFieldsWith(fields, 'boolean', true);
+    }
+    /**
+     * Generic field definition method. Similar to flag/flagList/number/etc,
+     * but you must specify the `type` (and optionally `multiple` and `delim`)
+     * fields on each one, or Jack won't know how to define them.
+     */
+    addFields(fields) {
+        return this.#addFields(this, fields);
+    }
+    #addFieldsWith(fields, type, multiple) {
+        return this.#addFields(this, fields, {
+            type,
+            multiple,
+        });
+    }
+    #addFields(next, fields, opt) {
+        Object.assign(next.#configSet, Object.fromEntries(Object.entries(fields).map(([name, field]) => {
+            this.#validateName(name, field);
+            const { type, multiple } = validateFieldMeta(field, opt);
+            const value = { ...field, type, multiple };
+            validateField(value, type, multiple);
+            next.#fields.push({ type: 'config', name, value });
+            return [name, value];
+        })));
+        return next;
+    }
+    #validateName(name, field) {
+        if (!/^[a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?$/.test(name)) {
+            throw new TypeError(`Invalid option name: ${name}, ` +
+                `must be '-' delimited ASCII alphanumeric`);
+        }
+        if (this.#configSet[name]) {
+            throw new TypeError(`Cannot redefine option ${field}`);
+        }
+        if (this.#shorts[name]) {
+            throw new TypeError(`Cannot redefine option ${name}, already ` +
+                `in use for ${this.#shorts[name]}`);
+        }
+        if (field.short) {
+            if (!/^[a-zA-Z0-9]$/.test(field.short)) {
+                throw new TypeError(`Invalid ${name} short option: ${field.short}, ` +
+                    'must be 1 ASCII alphanumeric character');
+            }
+            if (this.#shorts[field.short]) {
+                throw new TypeError(`Invalid ${name} short option: ${field.short}, ` +
+                    `already in use for ${this.#shorts[field.short]}`);
+            }
+            this.#shorts[field.short] = name;
+            this.#shorts[name] = name;
+        }
+    }
+    /**
+     * Return the usage banner for the given configuration
+     */
+    usage() {
+        if (this.#usage)
+            return this.#usage;
+        let headingLevel = 1;
+        //@ts-ignore
+        const ui = (0, cliui_1.default)({ width });
+        const first = this.#fields[0];
+        let start = first?.type === 'heading' ? 1 : 0;
+        if (first?.type === 'heading') {
+            ui.div({
+                padding: [0, 0, 0, 0],
+                text: normalize(first.text),
+            });
+        }
+        ui.div({ padding: [0, 0, 0, 0], text: 'Usage:' });
+        if (this.#options.usage) {
+            ui.div({
+                text: this.#options.usage,
+                padding: [0, 0, 0, 2],
+            });
+        }
+        else {
+            const cmd = (0, node_path_1.basename)(String(process.argv[1]));
+            const shortFlags = [];
+            const shorts = [];
+            const flags = [];
+            const opts = [];
+            for (const [field, config] of Object.entries(this.#configSet)) {
+                if (config.short) {
+                    if (config.type === 'boolean')
+                        shortFlags.push(config.short);
+                    else
+                        shorts.push([config.short, config.hint || field]);
+                }
+                else {
+                    if (config.type === 'boolean')
+                        flags.push(field);
+                    else
+                        opts.push([field, config.hint || field]);
+                }
+            }
+            const sf = shortFlags.length ? ' -' + shortFlags.join('') : '';
+            const so = shorts.map(([k, v]) => ` --${k}=<${v}>`).join('');
+            const lf = flags.map(k => ` --${k}`).join('');
+            const lo = opts.map(([k, v]) => ` --${k}=<${v}>`).join('');
+            const usage = `${cmd}${sf}${so}${lf}${lo}`.trim();
+            ui.div({
+                text: usage,
+                padding: [0, 0, 0, 2],
+            });
+        }
+        ui.div({ padding: [0, 0, 0, 0], text: '' });
+        const maybeDesc = this.#fields[start];
+        if (maybeDesc && isDescription(maybeDesc)) {
+            const print = normalize(maybeDesc.text, maybeDesc.pre);
+            start++;
+            ui.div({ padding: [0, 0, 0, 0], text: print });
+            ui.div({ padding: [0, 0, 0, 0], text: '' });
+        }
+        const { rows, maxWidth } = this.#usageRows(start);
+        // every heading/description after the first gets indented by 2
+        // extra spaces.
+        for (const row of rows) {
+            if (row.left) {
+                // If the row is too long, don't wrap it
+                // Bump the right-hand side down a line to make room
+                const configIndent = indent(Math.max(headingLevel, 2));
+                if (row.left.length > maxWidth - 3) {
+                    ui.div({ text: row.left, padding: [0, 0, 0, configIndent] });
+                    ui.div({ text: row.text, padding: [0, 0, 0, maxWidth] });
+                }
+                else {
+                    ui.div({
+                        text: row.left,
+                        padding: [0, 1, 0, configIndent],
+                        width: maxWidth,
+                    }, { padding: [0, 0, 0, 0], text: row.text });
+                }
+                if (row.skipLine) {
+                    ui.div({ padding: [0, 0, 0, 0], text: '' });
+                }
+            }
+            else {
+                if (isHeading(row)) {
+                    const { level } = row;
+                    headingLevel = level;
+                    // only h1 and h2 have bottom padding
+                    // h3-h6 do not
+                    const b = level <= 2 ? 1 : 0;
+                    ui.div({ ...row, padding: [0, 0, b, indent(level)] });
+                }
+                else {
+                    ui.div({ ...row, padding: [0, 0, 1, indent(headingLevel + 1)] });
+                }
+            }
+        }
+        return (this.#usage = ui.toString());
+    }
+    /**
+     * Return the usage banner markdown for the given configuration
+     */
+    usageMarkdown() {
+        if (this.#usageMarkdown)
+            return this.#usageMarkdown;
+        const out = [];
+        let headingLevel = 1;
+        const first = this.#fields[0];
+        let start = first?.type === 'heading' ? 1 : 0;
+        if (first?.type === 'heading') {
+            out.push(`# ${normalizeOneLine(first.text)}`);
+        }
+        out.push('Usage:');
+        if (this.#options.usage) {
+            out.push(normalizeMarkdown(this.#options.usage, true));
+        }
+        else {
+            const cmd = (0, node_path_1.basename)(String(process.argv[1]));
+            const shortFlags = [];
+            const shorts = [];
+            const flags = [];
+            const opts = [];
+            for (const [field, config] of Object.entries(this.#configSet)) {
+                if (config.short) {
+                    if (config.type === 'boolean')
+                        shortFlags.push(config.short);
+                    else
+                        shorts.push([config.short, config.hint || field]);
+                }
+                else {
+                    if (config.type === 'boolean')
+                        flags.push(field);
+                    else
+                        opts.push([field, config.hint || field]);
+                }
+            }
+            const sf = shortFlags.length ? ' -' + shortFlags.join('') : '';
+            const so = shorts.map(([k, v]) => ` --${k}=<${v}>`).join('');
+            const lf = flags.map(k => ` --${k}`).join('');
+            const lo = opts.map(([k, v]) => ` --${k}=<${v}>`).join('');
+            const usage = `${cmd}${sf}${so}${lf}${lo}`.trim();
+            out.push(normalizeMarkdown(usage, true));
+        }
+        const maybeDesc = this.#fields[start];
+        if (maybeDesc && isDescription(maybeDesc)) {
+            out.push(normalizeMarkdown(maybeDesc.text, maybeDesc.pre));
+            start++;
+        }
+        const { rows } = this.#usageRows(start);
+        // heading level in markdown is number of # ahead of text
+        for (const row of rows) {
+            if (row.left) {
+                out.push('#'.repeat(headingLevel + 1) +
+                    ' ' +
+                    normalizeOneLine(row.left, true));
+                if (row.text)
+                    out.push(normalizeMarkdown(row.text));
+            }
+            else if (isHeading(row)) {
+                const { level } = row;
+                headingLevel = level;
+                out.push(`${'#'.repeat(headingLevel)} ${normalizeOneLine(row.text, row.pre)}`);
+            }
+            else {
+                out.push(normalizeMarkdown(row.text, !!row.pre));
+            }
+        }
+        return (this.#usageMarkdown = out.join('\n\n') + '\n');
+    }
+    #usageRows(start) {
+        // turn each config type into a row, and figure out the width of the
+        // left hand indentation for the option descriptions.
+        let maxMax = Math.max(12, Math.min(26, Math.floor(width / 3)));
+        let maxWidth = 8;
+        let prev = undefined;
+        const rows = [];
+        for (const field of this.#fields.slice(start)) {
+            if (field.type !== 'config') {
+                if (prev?.type === 'config')
+                    prev.skipLine = true;
+                prev = undefined;
+                field.text = normalize(field.text, !!field.pre);
+                rows.push(field);
+                continue;
+            }
+            const { value } = field;
+            const desc = value.description || '';
+            const mult = value.multiple ? 'Can be set multiple times' : '';
+            const opts = value.validOptions?.length ?
+                `Valid options:${value.validOptions.map(v => ` ${JSON.stringify(v)}`)}`
+                : '';
+            const dmDelim = desc.includes('\n') ? '\n\n' : '\n';
+            const extra = [opts, mult].join(dmDelim).trim();
+            const text = (normalize(desc) + dmDelim + extra).trim();
+            const hint = value.hint ||
+                (value.type === 'number' ? 'n'
+                    : value.type === 'string' ? field.name
+                        : undefined);
+            const short = !value.short ? ''
+                : value.type === 'boolean' ? `-${value.short} `
+                    : `-${value.short}<${hint}> `;
+            const left = value.type === 'boolean' ?
+                `${short}--${field.name}`
+                : `${short}--${field.name}=<${hint}>`;
+            const row = { text, left, type: 'config' };
+            if (text.length > width - maxMax) {
+                row.skipLine = true;
+            }
+            if (prev && left.length > maxMax)
+                prev.skipLine = true;
+            prev = row;
+            const len = left.length + 4;
+            if (len > maxWidth && len < maxMax) {
+                maxWidth = len;
+            }
+            rows.push(row);
+        }
+        return { rows, maxWidth };
+    }
+    /**
+     * Return the configuration options as a plain object
+     */
+    toJSON() {
+        return Object.fromEntries(Object.entries(this.#configSet).map(([field, def]) => [
+            field,
+            {
+                type: def.type,
+                ...(def.multiple ? { multiple: true } : {}),
+                ...(def.delim ? { delim: def.delim } : {}),
+                ...(def.short ? { short: def.short } : {}),
+                ...(def.description ?
+                    { description: normalize(def.description) }
+                    : {}),
+                ...(def.validate ? { validate: def.validate } : {}),
+                ...(def.validOptions ? { validOptions: def.validOptions } : {}),
+                ...(def.default !== undefined ? { default: def.default } : {}),
+                ...(def.hint ? { hint: def.hint } : {}),
+            },
+        ]));
+    }
+    /**
+     * Custom printer for `util.inspect`
+     */
+    [node_util_1.inspect.custom](_, options) {
+        return `Jack ${(0, node_util_1.inspect)(this.toJSON(), options)}`;
+    }
+}
+exports.Jack = Jack;
+/**
+ * Main entry point. Create and return a {@link Jack} object.
+ */
+const jack = (options = {}) => new Jack(options);
+exports.jack = jack;
+// Unwrap and un-indent, so we can wrap description
+// strings however makes them look nice in the code.
+const normalize = (s, pre = false) => {
+    if (pre)
+        // prepend a ZWSP to each line so cliui doesn't strip it.
+        return s
+            .split('\n')
+            .map(l => `\u200b${l}`)
+            .join('\n');
+    return s
+        .split(/^\s*```\s*$/gm)
+        .map((s, i) => {
+        if (i % 2 === 1) {
+            if (!s.trim()) {
+                return `\`\`\`\n\`\`\`\n`;
+            }
+            // outdent the ``` blocks, but preserve whitespace otherwise.
+            const split = s.split('\n');
+            // throw out the \n at the start and end
+            split.pop();
+            split.shift();
+            const si = split.reduce((shortest, l) => {
+                /* c8 ignore next */
+                const ind = l.match(/^\s*/)?.[0] ?? '';
+                if (ind.length)
+                    return Math.min(ind.length, shortest);
+                else
+                    return shortest;
+            }, Infinity);
+            /* c8 ignore next */
+            const i = isFinite(si) ? si : 0;
+            return ('\n```\n' +
+                split.map(s => `\u200b${s.substring(i)}`).join('\n') +
+                '\n```\n');
+        }
+        return (s
+            // remove single line breaks, except for lists
+            .replace(/([^\n])\n[ \t]*([^\n])/g, (_, $1, $2) => !/^[-*]/.test($2) ? `${$1} ${$2}` : `${$1}\n${$2}`)
+            // normalize mid-line whitespace
+            .replace(/([^\n])[ \t]+([^\n])/g, '$1 $2')
+            // two line breaks are enough
+            .replace(/\n{3,}/g, '\n\n')
+            // remove any spaces at the start of a line
+            .replace(/\n[ \t]+/g, '\n')
+            .trim());
+    })
+        .join('\n');
+};
+// normalize for markdown printing, remove leading spaces on lines
+const normalizeMarkdown = (s, pre = false) => {
+    const n = normalize(s, pre).replace(/\\/g, '\\\\');
+    return pre ?
+        `\`\`\`\n${n.replace(/\u200b/g, '')}\n\`\`\``
+        : n.replace(/\n +/g, '\n').trim();
+};
+const normalizeOneLine = (s, pre = false) => {
+    const n = normalize(s, pre)
+        .replace(/[\s\u200b]+/g, ' ')
+        .trim();
+    return pre ? `\`${n}\`` : n;
+};
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/jackspeak/dist/commonjs/package.json b/node_modules/pacote/node_modules/jackspeak/dist/commonjs/package.json
new file mode 100644
index 0000000000000..5bbefffbabee3
--- /dev/null
+++ b/node_modules/pacote/node_modules/jackspeak/dist/commonjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/node_modules/pacote/node_modules/jackspeak/dist/esm/index.js b/node_modules/pacote/node_modules/jackspeak/dist/esm/index.js
new file mode 100644
index 0000000000000..b959f5126423c
--- /dev/null
+++ b/node_modules/pacote/node_modules/jackspeak/dist/esm/index.js
@@ -0,0 +1,936 @@
+import { inspect, parseArgs, } from 'node:util';
+// it's a tiny API, just cast it inline, it's fine
+//@ts-ignore
+import cliui from '@isaacs/cliui';
+import { basename } from 'node:path';
+export const isConfigType = (t) => typeof t === 'string' &&
+    (t === 'string' || t === 'number' || t === 'boolean');
+const isValidValue = (v, type, multi) => {
+    if (multi) {
+        if (!Array.isArray(v))
+            return false;
+        return !v.some((v) => !isValidValue(v, type, false));
+    }
+    if (Array.isArray(v))
+        return false;
+    return typeof v === type;
+};
+const isValidOption = (v, vo) => !!vo &&
+    (Array.isArray(v) ? v.every(x => isValidOption(x, vo)) : vo.includes(v));
+/**
+ * Determine whether an unknown object is a {@link ConfigOption} based only
+ * on its `type` and `multiple` property
+ */
+export const isConfigOptionOfType = (o, type, multi) => !!o &&
+    typeof o === 'object' &&
+    isConfigType(o.type) &&
+    o.type === type &&
+    !!o.multiple === multi;
+/**
+ * Determine whether an unknown object is a {@link ConfigOption} based on
+ * it having all valid properties
+ */
+export const isConfigOption = (o, type, multi) => isConfigOptionOfType(o, type, multi) &&
+    undefOrType(o.short, 'string') &&
+    undefOrType(o.description, 'string') &&
+    undefOrType(o.hint, 'string') &&
+    undefOrType(o.validate, 'function') &&
+    (o.type === 'boolean' ?
+        o.validOptions === undefined
+        : undefOrTypeArray(o.validOptions, o.type)) &&
+    (o.default === undefined || isValidValue(o.default, type, multi));
+const isHeading = (r) => r.type === 'heading';
+const isDescription = (r) => r.type === 'description';
+const width = Math.min(process?.stdout?.columns ?? 80, 80);
+// indentation spaces from heading level
+const indent = (n) => (n - 1) * 2;
+const toEnvKey = (pref, key) => [pref, key.replace(/[^a-zA-Z0-9]+/g, ' ')]
+    .join(' ')
+    .trim()
+    .toUpperCase()
+    .replace(/ /g, '_');
+const toEnvVal = (value, delim = '\n') => {
+    const str = typeof value === 'string' ? value
+        : typeof value === 'boolean' ?
+            value ? '1'
+                : '0'
+            : typeof value === 'number' ? String(value)
+                : Array.isArray(value) ?
+                    value.map((v) => toEnvVal(v)).join(delim)
+                    : /* c8 ignore start */ undefined;
+    if (typeof str !== 'string') {
+        throw new Error(`could not serialize value to environment: ${JSON.stringify(value)}`, { cause: { code: 'JACKSPEAK' } });
+    }
+    /* c8 ignore stop */
+    return str;
+};
+const fromEnvVal = (env, type, multiple, delim = '\n') => (multiple ?
+    env ? env.split(delim).map(v => fromEnvVal(v, type, false))
+        : []
+    : type === 'string' ? env
+        : type === 'boolean' ? env === '1'
+            : +env.trim());
+const undefOrType = (v, t) => v === undefined || typeof v === t;
+const undefOrTypeArray = (v, t) => v === undefined || (Array.isArray(v) && v.every(x => typeof x === t));
+// print the value type, for error message reporting
+const valueType = (v) => typeof v === 'string' ? 'string'
+    : typeof v === 'boolean' ? 'boolean'
+        : typeof v === 'number' ? 'number'
+            : Array.isArray(v) ?
+                `${joinTypes([...new Set(v.map(v => valueType(v)))])}[]`
+                : `${v.type}${v.multiple ? '[]' : ''}`;
+const joinTypes = (types) => types.length === 1 && typeof types[0] === 'string' ?
+    types[0]
+    : `(${types.join('|')})`;
+const validateFieldMeta = (field, fieldMeta) => {
+    if (fieldMeta) {
+        if (field.type !== undefined && field.type !== fieldMeta.type) {
+            throw new TypeError(`invalid type`, {
+                cause: {
+                    found: field.type,
+                    wanted: [fieldMeta.type, undefined],
+                },
+            });
+        }
+        if (field.multiple !== undefined &&
+            !!field.multiple !== fieldMeta.multiple) {
+            throw new TypeError(`invalid multiple`, {
+                cause: {
+                    found: field.multiple,
+                    wanted: [fieldMeta.multiple, undefined],
+                },
+            });
+        }
+        return fieldMeta;
+    }
+    if (!isConfigType(field.type)) {
+        throw new TypeError(`invalid type`, {
+            cause: {
+                found: field.type,
+                wanted: ['string', 'number', 'boolean'],
+            },
+        });
+    }
+    return {
+        type: field.type,
+        multiple: !!field.multiple,
+    };
+};
+const validateField = (o, type, multiple) => {
+    const validateValidOptions = (def, validOptions) => {
+        if (!undefOrTypeArray(validOptions, type)) {
+            throw new TypeError('invalid validOptions', {
+                cause: {
+                    found: validOptions,
+                    wanted: valueType({ type, multiple: true }),
+                },
+            });
+        }
+        if (def !== undefined && validOptions !== undefined) {
+            const valid = Array.isArray(def) ?
+                def.every(v => validOptions.includes(v))
+                : validOptions.includes(def);
+            if (!valid) {
+                throw new TypeError('invalid default value not in validOptions', {
+                    cause: {
+                        found: def,
+                        wanted: validOptions,
+                    },
+                });
+            }
+        }
+    };
+    if (o.default !== undefined &&
+        !isValidValue(o.default, type, multiple)) {
+        throw new TypeError('invalid default value', {
+            cause: {
+                found: o.default,
+                wanted: valueType({ type, multiple }),
+            },
+        });
+    }
+    if (isConfigOptionOfType(o, 'number', false) ||
+        isConfigOptionOfType(o, 'number', true)) {
+        validateValidOptions(o.default, o.validOptions);
+    }
+    else if (isConfigOptionOfType(o, 'string', false) ||
+        isConfigOptionOfType(o, 'string', true)) {
+        validateValidOptions(o.default, o.validOptions);
+    }
+    else if (isConfigOptionOfType(o, 'boolean', false) ||
+        isConfigOptionOfType(o, 'boolean', true)) {
+        if (o.hint !== undefined) {
+            throw new TypeError('cannot provide hint for flag');
+        }
+        if (o.validOptions !== undefined) {
+            throw new TypeError('cannot provide validOptions for flag');
+        }
+    }
+    return o;
+};
+const toParseArgsOptionsConfig = (options) => {
+    return Object.entries(options).reduce((acc, [longOption, o]) => {
+        const p = {
+            type: 'string',
+            multiple: !!o.multiple,
+            ...(typeof o.short === 'string' ? { short: o.short } : undefined),
+        };
+        const setNoBool = () => {
+            if (!longOption.startsWith('no-') && !options[`no-${longOption}`]) {
+                acc[`no-${longOption}`] = {
+                    type: 'boolean',
+                    multiple: !!o.multiple,
+                };
+            }
+        };
+        const setDefault = (def, fn) => {
+            if (def !== undefined) {
+                p.default = fn(def);
+            }
+        };
+        if (isConfigOption(o, 'number', false)) {
+            setDefault(o.default, String);
+        }
+        else if (isConfigOption(o, 'number', true)) {
+            setDefault(o.default, d => d.map(v => String(v)));
+        }
+        else if (isConfigOption(o, 'string', false) ||
+            isConfigOption(o, 'string', true)) {
+            setDefault(o.default, v => v);
+        }
+        else if (isConfigOption(o, 'boolean', false) ||
+            isConfigOption(o, 'boolean', true)) {
+            p.type = 'boolean';
+            setDefault(o.default, v => v);
+            setNoBool();
+        }
+        acc[longOption] = p;
+        return acc;
+    }, {});
+};
+/**
+ * Class returned by the {@link jack} function and all configuration
+ * definition methods.  This is what gets chained together.
+ */
+export class Jack {
+    #configSet;
+    #shorts;
+    #options;
+    #fields = [];
+    #env;
+    #envPrefix;
+    #allowPositionals;
+    #usage;
+    #usageMarkdown;
+    constructor(options = {}) {
+        this.#options = options;
+        this.#allowPositionals = options.allowPositionals !== false;
+        this.#env =
+            this.#options.env === undefined ? process.env : this.#options.env;
+        this.#envPrefix = options.envPrefix;
+        // We need to fib a little, because it's always the same object, but it
+        // starts out as having an empty config set.  Then each method that adds
+        // fields returns `this as Jack`
+        this.#configSet = Object.create(null);
+        this.#shorts = Object.create(null);
+    }
+    /**
+     * Resulting definitions, suitable to be passed to Node's `util.parseArgs`,
+     * but also including `description` and `short` fields, if set.
+     */
+    get definitions() {
+        return this.#configSet;
+    }
+    /** map of `{ :  }` strings for each short name defined */
+    get shorts() {
+        return this.#shorts;
+    }
+    /**
+     * options passed to the {@link Jack} constructor
+     */
+    get jackOptions() {
+        return this.#options;
+    }
+    /**
+     * the data used to generate {@link Jack#usage} and
+     * {@link Jack#usageMarkdown} content.
+     */
+    get usageFields() {
+        return this.#fields;
+    }
+    /**
+     * Set the default value (which will still be overridden by env or cli)
+     * as if from a parsed config file. The optional `source` param, if
+     * provided, will be included in error messages if a value is invalid or
+     * unknown.
+     */
+    setConfigValues(values, source = '') {
+        try {
+            this.validate(values);
+        }
+        catch (er) {
+            if (source && er instanceof Error) {
+                /* c8 ignore next */
+                const cause = typeof er.cause === 'object' ? er.cause : {};
+                er.cause = { ...cause, path: source };
+                Error.captureStackTrace(er, this.setConfigValues);
+            }
+            throw er;
+        }
+        for (const [field, value] of Object.entries(values)) {
+            const my = this.#configSet[field];
+            // already validated, just for TS's benefit
+            /* c8 ignore start */
+            if (!my) {
+                throw new Error('unexpected field in config set: ' + field, {
+                    cause: {
+                        code: 'JACKSPEAK',
+                        found: field,
+                    },
+                });
+            }
+            /* c8 ignore stop */
+            my.default = value;
+        }
+        return this;
+    }
+    /**
+     * Parse a string of arguments, and return the resulting
+     * `{ values, positionals }` object.
+     *
+     * If an {@link JackOptions#envPrefix} is set, then it will read default
+     * values from the environment, and write the resulting values back
+     * to the environment as well.
+     *
+     * Environment values always take precedence over any other value, except
+     * an explicit CLI setting.
+     */
+    parse(args = process.argv) {
+        this.loadEnvDefaults();
+        const p = this.parseRaw(args);
+        this.applyDefaults(p);
+        this.writeEnv(p);
+        return p;
+    }
+    loadEnvDefaults() {
+        if (this.#envPrefix) {
+            for (const [field, my] of Object.entries(this.#configSet)) {
+                const ek = toEnvKey(this.#envPrefix, field);
+                const env = this.#env[ek];
+                if (env !== undefined) {
+                    my.default = fromEnvVal(env, my.type, !!my.multiple, my.delim);
+                }
+            }
+        }
+    }
+    applyDefaults(p) {
+        for (const [field, c] of Object.entries(this.#configSet)) {
+            if (c.default !== undefined && !(field in p.values)) {
+                //@ts-ignore
+                p.values[field] = c.default;
+            }
+        }
+    }
+    /**
+     * Only parse the command line arguments passed in.
+     * Does not strip off the `node script.js` bits, so it must be just the
+     * arguments you wish to have parsed.
+     * Does not read from or write to the environment, or set defaults.
+     */
+    parseRaw(args) {
+        if (args === process.argv) {
+            args = args.slice(process._eval !== undefined ? 1 : 2);
+        }
+        const result = parseArgs({
+            args,
+            options: toParseArgsOptionsConfig(this.#configSet),
+            // always strict, but using our own logic
+            strict: false,
+            allowPositionals: this.#allowPositionals,
+            tokens: true,
+        });
+        const p = {
+            values: {},
+            positionals: [],
+        };
+        for (const token of result.tokens) {
+            if (token.kind === 'positional') {
+                p.positionals.push(token.value);
+                if (this.#options.stopAtPositional ||
+                    this.#options.stopAtPositionalTest?.(token.value)) {
+                    p.positionals.push(...args.slice(token.index + 1));
+                    break;
+                }
+            }
+            else if (token.kind === 'option') {
+                let value = undefined;
+                if (token.name.startsWith('no-')) {
+                    const my = this.#configSet[token.name];
+                    const pname = token.name.substring('no-'.length);
+                    const pos = this.#configSet[pname];
+                    if (pos &&
+                        pos.type === 'boolean' &&
+                        (!my ||
+                            (my.type === 'boolean' && !!my.multiple === !!pos.multiple))) {
+                        value = false;
+                        token.name = pname;
+                    }
+                }
+                const my = this.#configSet[token.name];
+                if (!my) {
+                    throw new Error(`Unknown option '${token.rawName}'. ` +
+                        `To specify a positional argument starting with a '-', ` +
+                        `place it at the end of the command after '--', as in ` +
+                        `'-- ${token.rawName}'`, {
+                        cause: {
+                            code: 'JACKSPEAK',
+                            found: token.rawName + (token.value ? `=${token.value}` : ''),
+                        },
+                    });
+                }
+                if (value === undefined) {
+                    if (token.value === undefined) {
+                        if (my.type !== 'boolean') {
+                            throw new Error(`No value provided for ${token.rawName}, expected ${my.type}`, {
+                                cause: {
+                                    code: 'JACKSPEAK',
+                                    name: token.rawName,
+                                    wanted: valueType(my),
+                                },
+                            });
+                        }
+                        value = true;
+                    }
+                    else {
+                        if (my.type === 'boolean') {
+                            throw new Error(`Flag ${token.rawName} does not take a value, received '${token.value}'`, { cause: { code: 'JACKSPEAK', found: token } });
+                        }
+                        if (my.type === 'string') {
+                            value = token.value;
+                        }
+                        else {
+                            value = +token.value;
+                            if (value !== value) {
+                                throw new Error(`Invalid value '${token.value}' provided for ` +
+                                    `'${token.rawName}' option, expected number`, {
+                                    cause: {
+                                        code: 'JACKSPEAK',
+                                        name: token.rawName,
+                                        found: token.value,
+                                        wanted: 'number',
+                                    },
+                                });
+                            }
+                        }
+                    }
+                }
+                if (my.multiple) {
+                    const pv = p.values;
+                    const tn = pv[token.name] ?? [];
+                    pv[token.name] = tn;
+                    tn.push(value);
+                }
+                else {
+                    const pv = p.values;
+                    pv[token.name] = value;
+                }
+            }
+        }
+        for (const [field, value] of Object.entries(p.values)) {
+            const valid = this.#configSet[field]?.validate;
+            const validOptions = this.#configSet[field]?.validOptions;
+            const cause = validOptions && !isValidOption(value, validOptions) ?
+                { name: field, found: value, validOptions }
+                : valid && !valid(value) ? { name: field, found: value }
+                    : undefined;
+            if (cause) {
+                throw new Error(`Invalid value provided for --${field}: ${JSON.stringify(value)}`, { cause: { ...cause, code: 'JACKSPEAK' } });
+            }
+        }
+        return p;
+    }
+    /**
+     * do not set fields as 'no-foo' if 'foo' exists and both are bools
+     * just set foo.
+     */
+    #noNoFields(f, val, s = f) {
+        if (!f.startsWith('no-') || typeof val !== 'boolean')
+            return;
+        const yes = f.substring('no-'.length);
+        // recurse so we get the core config key we care about.
+        this.#noNoFields(yes, val, s);
+        if (this.#configSet[yes]?.type === 'boolean') {
+            throw new Error(`do not set '${s}', instead set '${yes}' as desired.`, { cause: { code: 'JACKSPEAK', found: s, wanted: yes } });
+        }
+    }
+    /**
+     * Validate that any arbitrary object is a valid configuration `values`
+     * object.  Useful when loading config files or other sources.
+     */
+    validate(o) {
+        if (!o || typeof o !== 'object') {
+            throw new Error('Invalid config: not an object', {
+                cause: { code: 'JACKSPEAK', found: o },
+            });
+        }
+        const opts = o;
+        for (const field in o) {
+            const value = opts[field];
+            /* c8 ignore next - for TS */
+            if (value === undefined)
+                continue;
+            this.#noNoFields(field, value);
+            const config = this.#configSet[field];
+            if (!config) {
+                throw new Error(`Unknown config option: ${field}`, {
+                    cause: { code: 'JACKSPEAK', found: field },
+                });
+            }
+            if (!isValidValue(value, config.type, !!config.multiple)) {
+                throw new Error(`Invalid value ${valueType(value)} for ${field}, expected ${valueType(config)}`, {
+                    cause: {
+                        code: 'JACKSPEAK',
+                        name: field,
+                        found: value,
+                        wanted: valueType(config),
+                    },
+                });
+            }
+            const cause = config.validOptions && !isValidOption(value, config.validOptions) ?
+                { name: field, found: value, validOptions: config.validOptions }
+                : config.validate && !config.validate(value) ?
+                    { name: field, found: value }
+                    : undefined;
+            if (cause) {
+                throw new Error(`Invalid config value for ${field}: ${value}`, {
+                    cause: { ...cause, code: 'JACKSPEAK' },
+                });
+            }
+        }
+    }
+    writeEnv(p) {
+        if (!this.#env || !this.#envPrefix)
+            return;
+        for (const [field, value] of Object.entries(p.values)) {
+            const my = this.#configSet[field];
+            this.#env[toEnvKey(this.#envPrefix, field)] = toEnvVal(value, my?.delim);
+        }
+    }
+    /**
+     * Add a heading to the usage output banner
+     */
+    heading(text, level, { pre = false } = {}) {
+        if (level === undefined) {
+            level = this.#fields.some(r => isHeading(r)) ? 2 : 1;
+        }
+        this.#fields.push({ type: 'heading', text, level, pre });
+        return this;
+    }
+    /**
+     * Add a long-form description to the usage output at this position.
+     */
+    description(text, { pre } = {}) {
+        this.#fields.push({ type: 'description', text, pre });
+        return this;
+    }
+    /**
+     * Add one or more number fields.
+     */
+    num(fields) {
+        return this.#addFieldsWith(fields, 'number', false);
+    }
+    /**
+     * Add one or more multiple number fields.
+     */
+    numList(fields) {
+        return this.#addFieldsWith(fields, 'number', true);
+    }
+    /**
+     * Add one or more string option fields.
+     */
+    opt(fields) {
+        return this.#addFieldsWith(fields, 'string', false);
+    }
+    /**
+     * Add one or more multiple string option fields.
+     */
+    optList(fields) {
+        return this.#addFieldsWith(fields, 'string', true);
+    }
+    /**
+     * Add one or more flag fields.
+     */
+    flag(fields) {
+        return this.#addFieldsWith(fields, 'boolean', false);
+    }
+    /**
+     * Add one or more multiple flag fields.
+     */
+    flagList(fields) {
+        return this.#addFieldsWith(fields, 'boolean', true);
+    }
+    /**
+     * Generic field definition method. Similar to flag/flagList/number/etc,
+     * but you must specify the `type` (and optionally `multiple` and `delim`)
+     * fields on each one, or Jack won't know how to define them.
+     */
+    addFields(fields) {
+        return this.#addFields(this, fields);
+    }
+    #addFieldsWith(fields, type, multiple) {
+        return this.#addFields(this, fields, {
+            type,
+            multiple,
+        });
+    }
+    #addFields(next, fields, opt) {
+        Object.assign(next.#configSet, Object.fromEntries(Object.entries(fields).map(([name, field]) => {
+            this.#validateName(name, field);
+            const { type, multiple } = validateFieldMeta(field, opt);
+            const value = { ...field, type, multiple };
+            validateField(value, type, multiple);
+            next.#fields.push({ type: 'config', name, value });
+            return [name, value];
+        })));
+        return next;
+    }
+    #validateName(name, field) {
+        if (!/^[a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?$/.test(name)) {
+            throw new TypeError(`Invalid option name: ${name}, ` +
+                `must be '-' delimited ASCII alphanumeric`);
+        }
+        if (this.#configSet[name]) {
+            throw new TypeError(`Cannot redefine option ${field}`);
+        }
+        if (this.#shorts[name]) {
+            throw new TypeError(`Cannot redefine option ${name}, already ` +
+                `in use for ${this.#shorts[name]}`);
+        }
+        if (field.short) {
+            if (!/^[a-zA-Z0-9]$/.test(field.short)) {
+                throw new TypeError(`Invalid ${name} short option: ${field.short}, ` +
+                    'must be 1 ASCII alphanumeric character');
+            }
+            if (this.#shorts[field.short]) {
+                throw new TypeError(`Invalid ${name} short option: ${field.short}, ` +
+                    `already in use for ${this.#shorts[field.short]}`);
+            }
+            this.#shorts[field.short] = name;
+            this.#shorts[name] = name;
+        }
+    }
+    /**
+     * Return the usage banner for the given configuration
+     */
+    usage() {
+        if (this.#usage)
+            return this.#usage;
+        let headingLevel = 1;
+        //@ts-ignore
+        const ui = cliui({ width });
+        const first = this.#fields[0];
+        let start = first?.type === 'heading' ? 1 : 0;
+        if (first?.type === 'heading') {
+            ui.div({
+                padding: [0, 0, 0, 0],
+                text: normalize(first.text),
+            });
+        }
+        ui.div({ padding: [0, 0, 0, 0], text: 'Usage:' });
+        if (this.#options.usage) {
+            ui.div({
+                text: this.#options.usage,
+                padding: [0, 0, 0, 2],
+            });
+        }
+        else {
+            const cmd = basename(String(process.argv[1]));
+            const shortFlags = [];
+            const shorts = [];
+            const flags = [];
+            const opts = [];
+            for (const [field, config] of Object.entries(this.#configSet)) {
+                if (config.short) {
+                    if (config.type === 'boolean')
+                        shortFlags.push(config.short);
+                    else
+                        shorts.push([config.short, config.hint || field]);
+                }
+                else {
+                    if (config.type === 'boolean')
+                        flags.push(field);
+                    else
+                        opts.push([field, config.hint || field]);
+                }
+            }
+            const sf = shortFlags.length ? ' -' + shortFlags.join('') : '';
+            const so = shorts.map(([k, v]) => ` --${k}=<${v}>`).join('');
+            const lf = flags.map(k => ` --${k}`).join('');
+            const lo = opts.map(([k, v]) => ` --${k}=<${v}>`).join('');
+            const usage = `${cmd}${sf}${so}${lf}${lo}`.trim();
+            ui.div({
+                text: usage,
+                padding: [0, 0, 0, 2],
+            });
+        }
+        ui.div({ padding: [0, 0, 0, 0], text: '' });
+        const maybeDesc = this.#fields[start];
+        if (maybeDesc && isDescription(maybeDesc)) {
+            const print = normalize(maybeDesc.text, maybeDesc.pre);
+            start++;
+            ui.div({ padding: [0, 0, 0, 0], text: print });
+            ui.div({ padding: [0, 0, 0, 0], text: '' });
+        }
+        const { rows, maxWidth } = this.#usageRows(start);
+        // every heading/description after the first gets indented by 2
+        // extra spaces.
+        for (const row of rows) {
+            if (row.left) {
+                // If the row is too long, don't wrap it
+                // Bump the right-hand side down a line to make room
+                const configIndent = indent(Math.max(headingLevel, 2));
+                if (row.left.length > maxWidth - 3) {
+                    ui.div({ text: row.left, padding: [0, 0, 0, configIndent] });
+                    ui.div({ text: row.text, padding: [0, 0, 0, maxWidth] });
+                }
+                else {
+                    ui.div({
+                        text: row.left,
+                        padding: [0, 1, 0, configIndent],
+                        width: maxWidth,
+                    }, { padding: [0, 0, 0, 0], text: row.text });
+                }
+                if (row.skipLine) {
+                    ui.div({ padding: [0, 0, 0, 0], text: '' });
+                }
+            }
+            else {
+                if (isHeading(row)) {
+                    const { level } = row;
+                    headingLevel = level;
+                    // only h1 and h2 have bottom padding
+                    // h3-h6 do not
+                    const b = level <= 2 ? 1 : 0;
+                    ui.div({ ...row, padding: [0, 0, b, indent(level)] });
+                }
+                else {
+                    ui.div({ ...row, padding: [0, 0, 1, indent(headingLevel + 1)] });
+                }
+            }
+        }
+        return (this.#usage = ui.toString());
+    }
+    /**
+     * Return the usage banner markdown for the given configuration
+     */
+    usageMarkdown() {
+        if (this.#usageMarkdown)
+            return this.#usageMarkdown;
+        const out = [];
+        let headingLevel = 1;
+        const first = this.#fields[0];
+        let start = first?.type === 'heading' ? 1 : 0;
+        if (first?.type === 'heading') {
+            out.push(`# ${normalizeOneLine(first.text)}`);
+        }
+        out.push('Usage:');
+        if (this.#options.usage) {
+            out.push(normalizeMarkdown(this.#options.usage, true));
+        }
+        else {
+            const cmd = basename(String(process.argv[1]));
+            const shortFlags = [];
+            const shorts = [];
+            const flags = [];
+            const opts = [];
+            for (const [field, config] of Object.entries(this.#configSet)) {
+                if (config.short) {
+                    if (config.type === 'boolean')
+                        shortFlags.push(config.short);
+                    else
+                        shorts.push([config.short, config.hint || field]);
+                }
+                else {
+                    if (config.type === 'boolean')
+                        flags.push(field);
+                    else
+                        opts.push([field, config.hint || field]);
+                }
+            }
+            const sf = shortFlags.length ? ' -' + shortFlags.join('') : '';
+            const so = shorts.map(([k, v]) => ` --${k}=<${v}>`).join('');
+            const lf = flags.map(k => ` --${k}`).join('');
+            const lo = opts.map(([k, v]) => ` --${k}=<${v}>`).join('');
+            const usage = `${cmd}${sf}${so}${lf}${lo}`.trim();
+            out.push(normalizeMarkdown(usage, true));
+        }
+        const maybeDesc = this.#fields[start];
+        if (maybeDesc && isDescription(maybeDesc)) {
+            out.push(normalizeMarkdown(maybeDesc.text, maybeDesc.pre));
+            start++;
+        }
+        const { rows } = this.#usageRows(start);
+        // heading level in markdown is number of # ahead of text
+        for (const row of rows) {
+            if (row.left) {
+                out.push('#'.repeat(headingLevel + 1) +
+                    ' ' +
+                    normalizeOneLine(row.left, true));
+                if (row.text)
+                    out.push(normalizeMarkdown(row.text));
+            }
+            else if (isHeading(row)) {
+                const { level } = row;
+                headingLevel = level;
+                out.push(`${'#'.repeat(headingLevel)} ${normalizeOneLine(row.text, row.pre)}`);
+            }
+            else {
+                out.push(normalizeMarkdown(row.text, !!row.pre));
+            }
+        }
+        return (this.#usageMarkdown = out.join('\n\n') + '\n');
+    }
+    #usageRows(start) {
+        // turn each config type into a row, and figure out the width of the
+        // left hand indentation for the option descriptions.
+        let maxMax = Math.max(12, Math.min(26, Math.floor(width / 3)));
+        let maxWidth = 8;
+        let prev = undefined;
+        const rows = [];
+        for (const field of this.#fields.slice(start)) {
+            if (field.type !== 'config') {
+                if (prev?.type === 'config')
+                    prev.skipLine = true;
+                prev = undefined;
+                field.text = normalize(field.text, !!field.pre);
+                rows.push(field);
+                continue;
+            }
+            const { value } = field;
+            const desc = value.description || '';
+            const mult = value.multiple ? 'Can be set multiple times' : '';
+            const opts = value.validOptions?.length ?
+                `Valid options:${value.validOptions.map(v => ` ${JSON.stringify(v)}`)}`
+                : '';
+            const dmDelim = desc.includes('\n') ? '\n\n' : '\n';
+            const extra = [opts, mult].join(dmDelim).trim();
+            const text = (normalize(desc) + dmDelim + extra).trim();
+            const hint = value.hint ||
+                (value.type === 'number' ? 'n'
+                    : value.type === 'string' ? field.name
+                        : undefined);
+            const short = !value.short ? ''
+                : value.type === 'boolean' ? `-${value.short} `
+                    : `-${value.short}<${hint}> `;
+            const left = value.type === 'boolean' ?
+                `${short}--${field.name}`
+                : `${short}--${field.name}=<${hint}>`;
+            const row = { text, left, type: 'config' };
+            if (text.length > width - maxMax) {
+                row.skipLine = true;
+            }
+            if (prev && left.length > maxMax)
+                prev.skipLine = true;
+            prev = row;
+            const len = left.length + 4;
+            if (len > maxWidth && len < maxMax) {
+                maxWidth = len;
+            }
+            rows.push(row);
+        }
+        return { rows, maxWidth };
+    }
+    /**
+     * Return the configuration options as a plain object
+     */
+    toJSON() {
+        return Object.fromEntries(Object.entries(this.#configSet).map(([field, def]) => [
+            field,
+            {
+                type: def.type,
+                ...(def.multiple ? { multiple: true } : {}),
+                ...(def.delim ? { delim: def.delim } : {}),
+                ...(def.short ? { short: def.short } : {}),
+                ...(def.description ?
+                    { description: normalize(def.description) }
+                    : {}),
+                ...(def.validate ? { validate: def.validate } : {}),
+                ...(def.validOptions ? { validOptions: def.validOptions } : {}),
+                ...(def.default !== undefined ? { default: def.default } : {}),
+                ...(def.hint ? { hint: def.hint } : {}),
+            },
+        ]));
+    }
+    /**
+     * Custom printer for `util.inspect`
+     */
+    [inspect.custom](_, options) {
+        return `Jack ${inspect(this.toJSON(), options)}`;
+    }
+}
+/**
+ * Main entry point. Create and return a {@link Jack} object.
+ */
+export const jack = (options = {}) => new Jack(options);
+// Unwrap and un-indent, so we can wrap description
+// strings however makes them look nice in the code.
+const normalize = (s, pre = false) => {
+    if (pre)
+        // prepend a ZWSP to each line so cliui doesn't strip it.
+        return s
+            .split('\n')
+            .map(l => `\u200b${l}`)
+            .join('\n');
+    return s
+        .split(/^\s*```\s*$/gm)
+        .map((s, i) => {
+        if (i % 2 === 1) {
+            if (!s.trim()) {
+                return `\`\`\`\n\`\`\`\n`;
+            }
+            // outdent the ``` blocks, but preserve whitespace otherwise.
+            const split = s.split('\n');
+            // throw out the \n at the start and end
+            split.pop();
+            split.shift();
+            const si = split.reduce((shortest, l) => {
+                /* c8 ignore next */
+                const ind = l.match(/^\s*/)?.[0] ?? '';
+                if (ind.length)
+                    return Math.min(ind.length, shortest);
+                else
+                    return shortest;
+            }, Infinity);
+            /* c8 ignore next */
+            const i = isFinite(si) ? si : 0;
+            return ('\n```\n' +
+                split.map(s => `\u200b${s.substring(i)}`).join('\n') +
+                '\n```\n');
+        }
+        return (s
+            // remove single line breaks, except for lists
+            .replace(/([^\n])\n[ \t]*([^\n])/g, (_, $1, $2) => !/^[-*]/.test($2) ? `${$1} ${$2}` : `${$1}\n${$2}`)
+            // normalize mid-line whitespace
+            .replace(/([^\n])[ \t]+([^\n])/g, '$1 $2')
+            // two line breaks are enough
+            .replace(/\n{3,}/g, '\n\n')
+            // remove any spaces at the start of a line
+            .replace(/\n[ \t]+/g, '\n')
+            .trim());
+    })
+        .join('\n');
+};
+// normalize for markdown printing, remove leading spaces on lines
+const normalizeMarkdown = (s, pre = false) => {
+    const n = normalize(s, pre).replace(/\\/g, '\\\\');
+    return pre ?
+        `\`\`\`\n${n.replace(/\u200b/g, '')}\n\`\`\``
+        : n.replace(/\n +/g, '\n').trim();
+};
+const normalizeOneLine = (s, pre = false) => {
+    const n = normalize(s, pre)
+        .replace(/[\s\u200b]+/g, ' ')
+        .trim();
+    return pre ? `\`${n}\`` : n;
+};
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/jackspeak/dist/esm/package.json b/node_modules/pacote/node_modules/jackspeak/dist/esm/package.json
new file mode 100644
index 0000000000000..3dbc1ca591c05
--- /dev/null
+++ b/node_modules/pacote/node_modules/jackspeak/dist/esm/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/node_modules/pacote/node_modules/jackspeak/package.json b/node_modules/pacote/node_modules/jackspeak/package.json
new file mode 100644
index 0000000000000..aa85d230f6d24
--- /dev/null
+++ b/node_modules/pacote/node_modules/jackspeak/package.json
@@ -0,0 +1,94 @@
+{
+  "name": "jackspeak",
+  "version": "4.1.1",
+  "description": "A very strict and proper argument parser.",
+  "tshy": {
+    "main": true,
+    "exports": {
+      "./package.json": "./package.json",
+      ".": "./src/index.js"
+    }
+  },
+  "main": "./dist/commonjs/index.js",
+  "types": "./dist/commonjs/index.d.ts",
+  "type": "module",
+  "exports": {
+    "./package.json": "./package.json",
+    ".": {
+      "import": {
+        "types": "./dist/esm/index.d.ts",
+        "default": "./dist/esm/index.js"
+      },
+      "require": {
+        "types": "./dist/commonjs/index.d.ts",
+        "default": "./dist/commonjs/index.js"
+      }
+    }
+  },
+  "files": [
+    "dist"
+  ],
+  "scripts": {
+    "build-examples": "for i in examples/*.js ; do node $i -h > ${i/.js/.txt}; done",
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "prepare": "tshy",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "test": "tap",
+    "snap": "tap",
+    "format": "prettier --write . --log-level warn",
+    "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
+  },
+  "license": "BlueOak-1.0.0",
+  "prettier": {
+    "experimentalTernaries": true,
+    "semi": false,
+    "printWidth": 75,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "devDependencies": {
+    "@types/node": "^22.6.0",
+    "prettier": "^3.3.3",
+    "tap": "^21.0.1",
+    "tshy": "^3.0.2",
+    "typedoc": "^0.26.7"
+  },
+  "dependencies": {
+    "@isaacs/cliui": "^8.0.2"
+  },
+  "engines": {
+    "node": "20 || >=22"
+  },
+  "funding": {
+    "url": "https://github.com/sponsors/isaacs"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/isaacs/jackspeak.git"
+  },
+  "keywords": [
+    "argument",
+    "parser",
+    "args",
+    "option",
+    "flag",
+    "cli",
+    "command",
+    "line",
+    "parse",
+    "parsing"
+  ],
+  "author": "Isaac Z. Schlueter ",
+  "tap": {
+    "typecheck": true
+  },
+  "module": "./dist/esm/index.js"
+}
diff --git a/node_modules/pacote/node_modules/lru-cache/LICENSE b/node_modules/pacote/node_modules/lru-cache/LICENSE
new file mode 100644
index 0000000000000..f785757cd63f8
--- /dev/null
+++ b/node_modules/pacote/node_modules/lru-cache/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/lru-cache/dist/commonjs/index.js b/node_modules/pacote/node_modules/lru-cache/dist/commonjs/index.js
new file mode 100644
index 0000000000000..921b8f10f71b1
--- /dev/null
+++ b/node_modules/pacote/node_modules/lru-cache/dist/commonjs/index.js
@@ -0,0 +1,1564 @@
+"use strict";
+/**
+ * @module LRUCache
+ */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.LRUCache = void 0;
+const defaultPerf = (typeof performance === 'object' &&
+    performance &&
+    typeof performance.now === 'function') ?
+    performance
+    : Date;
+const warned = new Set();
+/* c8 ignore start */
+const PROCESS = (typeof process === 'object' && !!process ?
+    process
+    : {});
+/* c8 ignore start */
+const emitWarning = (msg, type, code, fn) => {
+    typeof PROCESS.emitWarning === 'function' ?
+        PROCESS.emitWarning(msg, type, code, fn)
+        : console.error(`[${code}] ${type}: ${msg}`);
+};
+let AC = globalThis.AbortController;
+let AS = globalThis.AbortSignal;
+/* c8 ignore start */
+if (typeof AC === 'undefined') {
+    //@ts-ignore
+    AS = class AbortSignal {
+        onabort;
+        _onabort = [];
+        reason;
+        aborted = false;
+        addEventListener(_, fn) {
+            this._onabort.push(fn);
+        }
+    };
+    //@ts-ignore
+    AC = class AbortController {
+        constructor() {
+            warnACPolyfill();
+        }
+        signal = new AS();
+        abort(reason) {
+            if (this.signal.aborted)
+                return;
+            //@ts-ignore
+            this.signal.reason = reason;
+            //@ts-ignore
+            this.signal.aborted = true;
+            //@ts-ignore
+            for (const fn of this.signal._onabort) {
+                fn(reason);
+            }
+            this.signal.onabort?.(reason);
+        }
+    };
+    let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
+    const warnACPolyfill = () => {
+        if (!printACPolyfillWarning)
+            return;
+        printACPolyfillWarning = false;
+        emitWarning('AbortController is not defined. If using lru-cache in ' +
+            'node 14, load an AbortController polyfill from the ' +
+            '`node-abort-controller` package. A minimal polyfill is ' +
+            'provided for use by LRUCache.fetch(), but it should not be ' +
+            'relied upon in other contexts (eg, passing it to other APIs that ' +
+            'use AbortController/AbortSignal might have undesirable effects). ' +
+            'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
+    };
+}
+/* c8 ignore stop */
+const shouldWarn = (code) => !warned.has(code);
+const TYPE = Symbol('type');
+const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
+/* c8 ignore start */
+// This is a little bit ridiculous, tbh.
+// The maximum array length is 2^32-1 or thereabouts on most JS impls.
+// And well before that point, you're caching the entire world, I mean,
+// that's ~32GB of just integers for the next/prev links, plus whatever
+// else to hold that many keys and values.  Just filling the memory with
+// zeroes at init time is brutal when you get that big.
+// But why not be complete?
+// Maybe in the future, these limits will have expanded.
+const getUintArray = (max) => !isPosInt(max) ? null
+    : max <= Math.pow(2, 8) ? Uint8Array
+        : max <= Math.pow(2, 16) ? Uint16Array
+            : max <= Math.pow(2, 32) ? Uint32Array
+                : max <= Number.MAX_SAFE_INTEGER ? ZeroArray
+                    : null;
+/* c8 ignore stop */
+class ZeroArray extends Array {
+    constructor(size) {
+        super(size);
+        this.fill(0);
+    }
+}
+class Stack {
+    heap;
+    length;
+    // private constructor
+    static #constructing = false;
+    static create(max) {
+        const HeapCls = getUintArray(max);
+        if (!HeapCls)
+            return [];
+        Stack.#constructing = true;
+        const s = new Stack(max, HeapCls);
+        Stack.#constructing = false;
+        return s;
+    }
+    constructor(max, HeapCls) {
+        /* c8 ignore start */
+        if (!Stack.#constructing) {
+            throw new TypeError('instantiate Stack using Stack.create(n)');
+        }
+        /* c8 ignore stop */
+        this.heap = new HeapCls(max);
+        this.length = 0;
+    }
+    push(n) {
+        this.heap[this.length++] = n;
+    }
+    pop() {
+        return this.heap[--this.length];
+    }
+}
+/**
+ * Default export, the thing you're using this module to get.
+ *
+ * The `K` and `V` types define the key and value types, respectively. The
+ * optional `FC` type defines the type of the `context` object passed to
+ * `cache.fetch()` and `cache.memo()`.
+ *
+ * Keys and values **must not** be `null` or `undefined`.
+ *
+ * All properties from the options object (with the exception of `max`,
+ * `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are
+ * added as normal public members. (The listed options are read-only getters.)
+ *
+ * Changing any of these will alter the defaults for subsequent method calls.
+ */
+class LRUCache {
+    // options that cannot be changed without disaster
+    #max;
+    #maxSize;
+    #dispose;
+    #onInsert;
+    #disposeAfter;
+    #fetchMethod;
+    #memoMethod;
+    #perf;
+    /**
+     * {@link LRUCache.OptionsBase.perf}
+     */
+    get perf() {
+        return this.#perf;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.ttl}
+     */
+    ttl;
+    /**
+     * {@link LRUCache.OptionsBase.ttlResolution}
+     */
+    ttlResolution;
+    /**
+     * {@link LRUCache.OptionsBase.ttlAutopurge}
+     */
+    ttlAutopurge;
+    /**
+     * {@link LRUCache.OptionsBase.updateAgeOnGet}
+     */
+    updateAgeOnGet;
+    /**
+     * {@link LRUCache.OptionsBase.updateAgeOnHas}
+     */
+    updateAgeOnHas;
+    /**
+     * {@link LRUCache.OptionsBase.allowStale}
+     */
+    allowStale;
+    /**
+     * {@link LRUCache.OptionsBase.noDisposeOnSet}
+     */
+    noDisposeOnSet;
+    /**
+     * {@link LRUCache.OptionsBase.noUpdateTTL}
+     */
+    noUpdateTTL;
+    /**
+     * {@link LRUCache.OptionsBase.maxEntrySize}
+     */
+    maxEntrySize;
+    /**
+     * {@link LRUCache.OptionsBase.sizeCalculation}
+     */
+    sizeCalculation;
+    /**
+     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
+     */
+    noDeleteOnFetchRejection;
+    /**
+     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
+     */
+    noDeleteOnStaleGet;
+    /**
+     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
+     */
+    allowStaleOnFetchAbort;
+    /**
+     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
+     */
+    allowStaleOnFetchRejection;
+    /**
+     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
+     */
+    ignoreFetchAbort;
+    // computed properties
+    #size;
+    #calculatedSize;
+    #keyMap;
+    #keyList;
+    #valList;
+    #next;
+    #prev;
+    #head;
+    #tail;
+    #free;
+    #disposed;
+    #sizes;
+    #starts;
+    #ttls;
+    #hasDispose;
+    #hasFetchMethod;
+    #hasDisposeAfter;
+    #hasOnInsert;
+    /**
+     * Do not call this method unless you need to inspect the
+     * inner workings of the cache.  If anything returned by this
+     * object is modified in any way, strange breakage may occur.
+     *
+     * These fields are private for a reason!
+     *
+     * @internal
+     */
+    static unsafeExposeInternals(c) {
+        return {
+            // properties
+            starts: c.#starts,
+            ttls: c.#ttls,
+            sizes: c.#sizes,
+            keyMap: c.#keyMap,
+            keyList: c.#keyList,
+            valList: c.#valList,
+            next: c.#next,
+            prev: c.#prev,
+            get head() {
+                return c.#head;
+            },
+            get tail() {
+                return c.#tail;
+            },
+            free: c.#free,
+            // methods
+            isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
+            backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
+            moveToTail: (index) => c.#moveToTail(index),
+            indexes: (options) => c.#indexes(options),
+            rindexes: (options) => c.#rindexes(options),
+            isStale: (index) => c.#isStale(index),
+        };
+    }
+    // Protected read-only members
+    /**
+     * {@link LRUCache.OptionsBase.max} (read-only)
+     */
+    get max() {
+        return this.#max;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.maxSize} (read-only)
+     */
+    get maxSize() {
+        return this.#maxSize;
+    }
+    /**
+     * The total computed size of items in the cache (read-only)
+     */
+    get calculatedSize() {
+        return this.#calculatedSize;
+    }
+    /**
+     * The number of items stored in the cache (read-only)
+     */
+    get size() {
+        return this.#size;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
+     */
+    get fetchMethod() {
+        return this.#fetchMethod;
+    }
+    get memoMethod() {
+        return this.#memoMethod;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.dispose} (read-only)
+     */
+    get dispose() {
+        return this.#dispose;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.onInsert} (read-only)
+     */
+    get onInsert() {
+        return this.#onInsert;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
+     */
+    get disposeAfter() {
+        return this.#disposeAfter;
+    }
+    constructor(options) {
+        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, onInsert, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, perf, } = options;
+        if (perf !== undefined) {
+            if (typeof perf?.now !== 'function') {
+                throw new TypeError('perf option must have a now() method if specified');
+            }
+        }
+        this.#perf = perf ?? defaultPerf;
+        if (max !== 0 && !isPosInt(max)) {
+            throw new TypeError('max option must be a nonnegative integer');
+        }
+        const UintArray = max ? getUintArray(max) : Array;
+        if (!UintArray) {
+            throw new Error('invalid max value: ' + max);
+        }
+        this.#max = max;
+        this.#maxSize = maxSize;
+        this.maxEntrySize = maxEntrySize || this.#maxSize;
+        this.sizeCalculation = sizeCalculation;
+        if (this.sizeCalculation) {
+            if (!this.#maxSize && !this.maxEntrySize) {
+                throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
+            }
+            if (typeof this.sizeCalculation !== 'function') {
+                throw new TypeError('sizeCalculation set to non-function');
+            }
+        }
+        if (memoMethod !== undefined &&
+            typeof memoMethod !== 'function') {
+            throw new TypeError('memoMethod must be a function if defined');
+        }
+        this.#memoMethod = memoMethod;
+        if (fetchMethod !== undefined &&
+            typeof fetchMethod !== 'function') {
+            throw new TypeError('fetchMethod must be a function if specified');
+        }
+        this.#fetchMethod = fetchMethod;
+        this.#hasFetchMethod = !!fetchMethod;
+        this.#keyMap = new Map();
+        this.#keyList = new Array(max).fill(undefined);
+        this.#valList = new Array(max).fill(undefined);
+        this.#next = new UintArray(max);
+        this.#prev = new UintArray(max);
+        this.#head = 0;
+        this.#tail = 0;
+        this.#free = Stack.create(max);
+        this.#size = 0;
+        this.#calculatedSize = 0;
+        if (typeof dispose === 'function') {
+            this.#dispose = dispose;
+        }
+        if (typeof onInsert === 'function') {
+            this.#onInsert = onInsert;
+        }
+        if (typeof disposeAfter === 'function') {
+            this.#disposeAfter = disposeAfter;
+            this.#disposed = [];
+        }
+        else {
+            this.#disposeAfter = undefined;
+            this.#disposed = undefined;
+        }
+        this.#hasDispose = !!this.#dispose;
+        this.#hasOnInsert = !!this.#onInsert;
+        this.#hasDisposeAfter = !!this.#disposeAfter;
+        this.noDisposeOnSet = !!noDisposeOnSet;
+        this.noUpdateTTL = !!noUpdateTTL;
+        this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
+        this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
+        this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
+        this.ignoreFetchAbort = !!ignoreFetchAbort;
+        // NB: maxEntrySize is set to maxSize if it's set
+        if (this.maxEntrySize !== 0) {
+            if (this.#maxSize !== 0) {
+                if (!isPosInt(this.#maxSize)) {
+                    throw new TypeError('maxSize must be a positive integer if specified');
+                }
+            }
+            if (!isPosInt(this.maxEntrySize)) {
+                throw new TypeError('maxEntrySize must be a positive integer if specified');
+            }
+            this.#initializeSizeTracking();
+        }
+        this.allowStale = !!allowStale;
+        this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
+        this.updateAgeOnGet = !!updateAgeOnGet;
+        this.updateAgeOnHas = !!updateAgeOnHas;
+        this.ttlResolution =
+            isPosInt(ttlResolution) || ttlResolution === 0 ?
+                ttlResolution
+                : 1;
+        this.ttlAutopurge = !!ttlAutopurge;
+        this.ttl = ttl || 0;
+        if (this.ttl) {
+            if (!isPosInt(this.ttl)) {
+                throw new TypeError('ttl must be a positive integer if specified');
+            }
+            this.#initializeTTLTracking();
+        }
+        // do not allow completely unbounded caches
+        if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
+            throw new TypeError('At least one of max, maxSize, or ttl is required');
+        }
+        if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
+            const code = 'LRU_CACHE_UNBOUNDED';
+            if (shouldWarn(code)) {
+                warned.add(code);
+                const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
+                    'result in unbounded memory consumption.';
+                emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
+            }
+        }
+    }
+    /**
+     * Return the number of ms left in the item's TTL. If item is not in cache,
+     * returns `0`. Returns `Infinity` if item is in cache without a defined TTL.
+     */
+    getRemainingTTL(key) {
+        return this.#keyMap.has(key) ? Infinity : 0;
+    }
+    #initializeTTLTracking() {
+        const ttls = new ZeroArray(this.#max);
+        const starts = new ZeroArray(this.#max);
+        this.#ttls = ttls;
+        this.#starts = starts;
+        this.#setItemTTL = (index, ttl, start = this.#perf.now()) => {
+            starts[index] = ttl !== 0 ? start : 0;
+            ttls[index] = ttl;
+            if (ttl !== 0 && this.ttlAutopurge) {
+                const t = setTimeout(() => {
+                    if (this.#isStale(index)) {
+                        this.#delete(this.#keyList[index], 'expire');
+                    }
+                }, ttl + 1);
+                // unref() not supported on all platforms
+                /* c8 ignore start */
+                if (t.unref) {
+                    t.unref();
+                }
+                /* c8 ignore stop */
+            }
+        };
+        this.#updateItemAge = index => {
+            starts[index] = ttls[index] !== 0 ? this.#perf.now() : 0;
+        };
+        this.#statusTTL = (status, index) => {
+            if (ttls[index]) {
+                const ttl = ttls[index];
+                const start = starts[index];
+                /* c8 ignore next */
+                if (!ttl || !start)
+                    return;
+                status.ttl = ttl;
+                status.start = start;
+                status.now = cachedNow || getNow();
+                const age = status.now - start;
+                status.remainingTTL = ttl - age;
+            }
+        };
+        // debounce calls to perf.now() to 1s so we're not hitting
+        // that costly call repeatedly.
+        let cachedNow = 0;
+        const getNow = () => {
+            const n = this.#perf.now();
+            if (this.ttlResolution > 0) {
+                cachedNow = n;
+                const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
+                // not available on all platforms
+                /* c8 ignore start */
+                if (t.unref) {
+                    t.unref();
+                }
+                /* c8 ignore stop */
+            }
+            return n;
+        };
+        this.getRemainingTTL = key => {
+            const index = this.#keyMap.get(key);
+            if (index === undefined) {
+                return 0;
+            }
+            const ttl = ttls[index];
+            const start = starts[index];
+            if (!ttl || !start) {
+                return Infinity;
+            }
+            const age = (cachedNow || getNow()) - start;
+            return ttl - age;
+        };
+        this.#isStale = index => {
+            const s = starts[index];
+            const t = ttls[index];
+            return !!t && !!s && (cachedNow || getNow()) - s > t;
+        };
+    }
+    // conditionally set private methods related to TTL
+    #updateItemAge = () => { };
+    #statusTTL = () => { };
+    #setItemTTL = () => { };
+    /* c8 ignore stop */
+    #isStale = () => false;
+    #initializeSizeTracking() {
+        const sizes = new ZeroArray(this.#max);
+        this.#calculatedSize = 0;
+        this.#sizes = sizes;
+        this.#removeItemSize = index => {
+            this.#calculatedSize -= sizes[index];
+            sizes[index] = 0;
+        };
+        this.#requireSize = (k, v, size, sizeCalculation) => {
+            // provisionally accept background fetches.
+            // actual value size will be checked when they return.
+            if (this.#isBackgroundFetch(v)) {
+                return 0;
+            }
+            if (!isPosInt(size)) {
+                if (sizeCalculation) {
+                    if (typeof sizeCalculation !== 'function') {
+                        throw new TypeError('sizeCalculation must be a function');
+                    }
+                    size = sizeCalculation(v, k);
+                    if (!isPosInt(size)) {
+                        throw new TypeError('sizeCalculation return invalid (expect positive integer)');
+                    }
+                }
+                else {
+                    throw new TypeError('invalid size value (must be positive integer). ' +
+                        'When maxSize or maxEntrySize is used, sizeCalculation ' +
+                        'or size must be set.');
+                }
+            }
+            return size;
+        };
+        this.#addItemSize = (index, size, status) => {
+            sizes[index] = size;
+            if (this.#maxSize) {
+                const maxSize = this.#maxSize - sizes[index];
+                while (this.#calculatedSize > maxSize) {
+                    this.#evict(true);
+                }
+            }
+            this.#calculatedSize += sizes[index];
+            if (status) {
+                status.entrySize = size;
+                status.totalCalculatedSize = this.#calculatedSize;
+            }
+        };
+    }
+    #removeItemSize = _i => { };
+    #addItemSize = (_i, _s, _st) => { };
+    #requireSize = (_k, _v, size, sizeCalculation) => {
+        if (size || sizeCalculation) {
+            throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
+        }
+        return 0;
+    };
+    *#indexes({ allowStale = this.allowStale } = {}) {
+        if (this.#size) {
+            for (let i = this.#tail; true;) {
+                if (!this.#isValidIndex(i)) {
+                    break;
+                }
+                if (allowStale || !this.#isStale(i)) {
+                    yield i;
+                }
+                if (i === this.#head) {
+                    break;
+                }
+                else {
+                    i = this.#prev[i];
+                }
+            }
+        }
+    }
+    *#rindexes({ allowStale = this.allowStale } = {}) {
+        if (this.#size) {
+            for (let i = this.#head; true;) {
+                if (!this.#isValidIndex(i)) {
+                    break;
+                }
+                if (allowStale || !this.#isStale(i)) {
+                    yield i;
+                }
+                if (i === this.#tail) {
+                    break;
+                }
+                else {
+                    i = this.#next[i];
+                }
+            }
+        }
+    }
+    #isValidIndex(index) {
+        return (index !== undefined &&
+            this.#keyMap.get(this.#keyList[index]) === index);
+    }
+    /**
+     * Return a generator yielding `[key, value]` pairs,
+     * in order from most recently used to least recently used.
+     */
+    *entries() {
+        for (const i of this.#indexes()) {
+            if (this.#valList[i] !== undefined &&
+                this.#keyList[i] !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield [this.#keyList[i], this.#valList[i]];
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.entries}
+     *
+     * Return a generator yielding `[key, value]` pairs,
+     * in order from least recently used to most recently used.
+     */
+    *rentries() {
+        for (const i of this.#rindexes()) {
+            if (this.#valList[i] !== undefined &&
+                this.#keyList[i] !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield [this.#keyList[i], this.#valList[i]];
+            }
+        }
+    }
+    /**
+     * Return a generator yielding the keys in the cache,
+     * in order from most recently used to least recently used.
+     */
+    *keys() {
+        for (const i of this.#indexes()) {
+            const k = this.#keyList[i];
+            if (k !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield k;
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.keys}
+     *
+     * Return a generator yielding the keys in the cache,
+     * in order from least recently used to most recently used.
+     */
+    *rkeys() {
+        for (const i of this.#rindexes()) {
+            const k = this.#keyList[i];
+            if (k !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield k;
+            }
+        }
+    }
+    /**
+     * Return a generator yielding the values in the cache,
+     * in order from most recently used to least recently used.
+     */
+    *values() {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            if (v !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield this.#valList[i];
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.values}
+     *
+     * Return a generator yielding the values in the cache,
+     * in order from least recently used to most recently used.
+     */
+    *rvalues() {
+        for (const i of this.#rindexes()) {
+            const v = this.#valList[i];
+            if (v !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield this.#valList[i];
+            }
+        }
+    }
+    /**
+     * Iterating over the cache itself yields the same results as
+     * {@link LRUCache.entries}
+     */
+    [Symbol.iterator]() {
+        return this.entries();
+    }
+    /**
+     * A String value that is used in the creation of the default string
+     * description of an object. Called by the built-in method
+     * `Object.prototype.toString`.
+     */
+    [Symbol.toStringTag] = 'LRUCache';
+    /**
+     * Find a value for which the supplied fn method returns a truthy value,
+     * similar to `Array.find()`. fn is called as `fn(value, key, cache)`.
+     */
+    find(fn, getOptions = {}) {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            if (fn(value, this.#keyList[i], this)) {
+                return this.get(this.#keyList[i], getOptions);
+            }
+        }
+    }
+    /**
+     * Call the supplied function on each item in the cache, in order from most
+     * recently used to least recently used.
+     *
+     * `fn` is called as `fn(value, key, cache)`.
+     *
+     * If `thisp` is provided, function will be called in the `this`-context of
+     * the provided object, or the cache if no `thisp` object is provided.
+     *
+     * Does not update age or recenty of use, or iterate over stale values.
+     */
+    forEach(fn, thisp = this) {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            fn.call(thisp, value, this.#keyList[i], this);
+        }
+    }
+    /**
+     * The same as {@link LRUCache.forEach} but items are iterated over in
+     * reverse order.  (ie, less recently used items are iterated over first.)
+     */
+    rforEach(fn, thisp = this) {
+        for (const i of this.#rindexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            fn.call(thisp, value, this.#keyList[i], this);
+        }
+    }
+    /**
+     * Delete any stale entries. Returns true if anything was removed,
+     * false otherwise.
+     */
+    purgeStale() {
+        let deleted = false;
+        for (const i of this.#rindexes({ allowStale: true })) {
+            if (this.#isStale(i)) {
+                this.#delete(this.#keyList[i], 'expire');
+                deleted = true;
+            }
+        }
+        return deleted;
+    }
+    /**
+     * Get the extended info about a given entry, to get its value, size, and
+     * TTL info simultaneously. Returns `undefined` if the key is not present.
+     *
+     * Unlike {@link LRUCache#dump}, which is designed to be portable and survive
+     * serialization, the `start` value is always the current timestamp, and the
+     * `ttl` is a calculated remaining time to live (negative if expired).
+     *
+     * Always returns stale values, if their info is found in the cache, so be
+     * sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl})
+     * if relevant.
+     */
+    info(key) {
+        const i = this.#keyMap.get(key);
+        if (i === undefined)
+            return undefined;
+        const v = this.#valList[i];
+        /* c8 ignore start - this isn't tested for the info function,
+         * but it's the same logic as found in other places. */
+        const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+        if (value === undefined)
+            return undefined;
+        /* c8 ignore end */
+        const entry = { value };
+        if (this.#ttls && this.#starts) {
+            const ttl = this.#ttls[i];
+            const start = this.#starts[i];
+            if (ttl && start) {
+                const remain = ttl - (this.#perf.now() - start);
+                entry.ttl = remain;
+                entry.start = Date.now();
+            }
+        }
+        if (this.#sizes) {
+            entry.size = this.#sizes[i];
+        }
+        return entry;
+    }
+    /**
+     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
+     * passed to {@link LRUCache#load}.
+     *
+     * The `start` fields are calculated relative to a portable `Date.now()`
+     * timestamp, even if `performance.now()` is available.
+     *
+     * Stale entries are always included in the `dump`, even if
+     * {@link LRUCache.OptionsBase.allowStale} is false.
+     *
+     * Note: this returns an actual array, not a generator, so it can be more
+     * easily passed around.
+     */
+    dump() {
+        const arr = [];
+        for (const i of this.#indexes({ allowStale: true })) {
+            const key = this.#keyList[i];
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined || key === undefined)
+                continue;
+            const entry = { value };
+            if (this.#ttls && this.#starts) {
+                entry.ttl = this.#ttls[i];
+                // always dump the start relative to a portable timestamp
+                // it's ok for this to be a bit slow, it's a rare operation.
+                const age = this.#perf.now() - this.#starts[i];
+                entry.start = Math.floor(Date.now() - age);
+            }
+            if (this.#sizes) {
+                entry.size = this.#sizes[i];
+            }
+            arr.unshift([key, entry]);
+        }
+        return arr;
+    }
+    /**
+     * Reset the cache and load in the items in entries in the order listed.
+     *
+     * The shape of the resulting cache may be different if the same options are
+     * not used in both caches.
+     *
+     * The `start` fields are assumed to be calculated relative to a portable
+     * `Date.now()` timestamp, even if `performance.now()` is available.
+     */
+    load(arr) {
+        this.clear();
+        for (const [key, entry] of arr) {
+            if (entry.start) {
+                // entry.start is a portable timestamp, but we may be using
+                // node's performance.now(), so calculate the offset, so that
+                // we get the intended remaining TTL, no matter how long it's
+                // been on ice.
+                //
+                // it's ok for this to be a bit slow, it's a rare operation.
+                const age = Date.now() - entry.start;
+                entry.start = this.#perf.now() - age;
+            }
+            this.set(key, entry.value, entry);
+        }
+    }
+    /**
+     * Add a value to the cache.
+     *
+     * Note: if `undefined` is specified as a value, this is an alias for
+     * {@link LRUCache#delete}
+     *
+     * Fields on the {@link LRUCache.SetOptions} options param will override
+     * their corresponding values in the constructor options for the scope
+     * of this single `set()` operation.
+     *
+     * If `start` is provided, then that will set the effective start
+     * time for the TTL calculation. Note that this must be a previous
+     * value of `performance.now()` if supported, or a previous value of
+     * `Date.now()` if not.
+     *
+     * Options object may also include `size`, which will prevent
+     * calling the `sizeCalculation` function and just use the specified
+     * number if it is a positive integer, and `noDisposeOnSet` which
+     * will prevent calling a `dispose` function in the case of
+     * overwrites.
+     *
+     * If the `size` (or return value of `sizeCalculation`) for a given
+     * entry is greater than `maxEntrySize`, then the item will not be
+     * added to the cache.
+     *
+     * Will update the recency of the entry.
+     *
+     * If the value is `undefined`, then this is an alias for
+     * `cache.delete(key)`. `undefined` is never stored in the cache.
+     */
+    set(k, v, setOptions = {}) {
+        if (v === undefined) {
+            this.delete(k);
+            return this;
+        }
+        const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
+        let { noUpdateTTL = this.noUpdateTTL } = setOptions;
+        const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
+        // if the item doesn't fit, don't do anything
+        // NB: maxEntrySize set to maxSize by default
+        if (this.maxEntrySize && size > this.maxEntrySize) {
+            if (status) {
+                status.set = 'miss';
+                status.maxEntrySizeExceeded = true;
+            }
+            // have to delete, in case something is there already.
+            this.#delete(k, 'set');
+            return this;
+        }
+        let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
+        if (index === undefined) {
+            // addition
+            index = (this.#size === 0 ? this.#tail
+                : this.#free.length !== 0 ? this.#free.pop()
+                    : this.#size === this.#max ? this.#evict(false)
+                        : this.#size);
+            this.#keyList[index] = k;
+            this.#valList[index] = v;
+            this.#keyMap.set(k, index);
+            this.#next[this.#tail] = index;
+            this.#prev[index] = this.#tail;
+            this.#tail = index;
+            this.#size++;
+            this.#addItemSize(index, size, status);
+            if (status)
+                status.set = 'add';
+            noUpdateTTL = false;
+            if (this.#hasOnInsert) {
+                this.#onInsert?.(v, k, 'add');
+            }
+        }
+        else {
+            // update
+            this.#moveToTail(index);
+            const oldVal = this.#valList[index];
+            if (v !== oldVal) {
+                if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
+                    oldVal.__abortController.abort(new Error('replaced'));
+                    const { __staleWhileFetching: s } = oldVal;
+                    if (s !== undefined && !noDisposeOnSet) {
+                        if (this.#hasDispose) {
+                            this.#dispose?.(s, k, 'set');
+                        }
+                        if (this.#hasDisposeAfter) {
+                            this.#disposed?.push([s, k, 'set']);
+                        }
+                    }
+                }
+                else if (!noDisposeOnSet) {
+                    if (this.#hasDispose) {
+                        this.#dispose?.(oldVal, k, 'set');
+                    }
+                    if (this.#hasDisposeAfter) {
+                        this.#disposed?.push([oldVal, k, 'set']);
+                    }
+                }
+                this.#removeItemSize(index);
+                this.#addItemSize(index, size, status);
+                this.#valList[index] = v;
+                if (status) {
+                    status.set = 'replace';
+                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal) ?
+                        oldVal.__staleWhileFetching
+                        : oldVal;
+                    if (oldValue !== undefined)
+                        status.oldValue = oldValue;
+                }
+            }
+            else if (status) {
+                status.set = 'update';
+            }
+            if (this.#hasOnInsert) {
+                this.onInsert?.(v, k, v === oldVal ? 'update' : 'replace');
+            }
+        }
+        if (ttl !== 0 && !this.#ttls) {
+            this.#initializeTTLTracking();
+        }
+        if (this.#ttls) {
+            if (!noUpdateTTL) {
+                this.#setItemTTL(index, ttl, start);
+            }
+            if (status)
+                this.#statusTTL(status, index);
+        }
+        if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+        return this;
+    }
+    /**
+     * Evict the least recently used item, returning its value or
+     * `undefined` if cache is empty.
+     */
+    pop() {
+        try {
+            while (this.#size) {
+                const val = this.#valList[this.#head];
+                this.#evict(true);
+                if (this.#isBackgroundFetch(val)) {
+                    if (val.__staleWhileFetching) {
+                        return val.__staleWhileFetching;
+                    }
+                }
+                else if (val !== undefined) {
+                    return val;
+                }
+            }
+        }
+        finally {
+            if (this.#hasDisposeAfter && this.#disposed) {
+                const dt = this.#disposed;
+                let task;
+                while ((task = dt?.shift())) {
+                    this.#disposeAfter?.(...task);
+                }
+            }
+        }
+    }
+    #evict(free) {
+        const head = this.#head;
+        const k = this.#keyList[head];
+        const v = this.#valList[head];
+        if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
+            v.__abortController.abort(new Error('evicted'));
+        }
+        else if (this.#hasDispose || this.#hasDisposeAfter) {
+            if (this.#hasDispose) {
+                this.#dispose?.(v, k, 'evict');
+            }
+            if (this.#hasDisposeAfter) {
+                this.#disposed?.push([v, k, 'evict']);
+            }
+        }
+        this.#removeItemSize(head);
+        // if we aren't about to use the index, then null these out
+        if (free) {
+            this.#keyList[head] = undefined;
+            this.#valList[head] = undefined;
+            this.#free.push(head);
+        }
+        if (this.#size === 1) {
+            this.#head = this.#tail = 0;
+            this.#free.length = 0;
+        }
+        else {
+            this.#head = this.#next[head];
+        }
+        this.#keyMap.delete(k);
+        this.#size--;
+        return head;
+    }
+    /**
+     * Check if a key is in the cache, without updating the recency of use.
+     * Will return false if the item is stale, even though it is technically
+     * in the cache.
+     *
+     * Check if a key is in the cache, without updating the recency of
+     * use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set
+     * to `true` in either the options or the constructor.
+     *
+     * Will return `false` if the item is stale, even though it is technically in
+     * the cache. The difference can be determined (if it matters) by using a
+     * `status` argument, and inspecting the `has` field.
+     *
+     * Will not update item age unless
+     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
+     */
+    has(k, hasOptions = {}) {
+        const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
+        const index = this.#keyMap.get(k);
+        if (index !== undefined) {
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v) &&
+                v.__staleWhileFetching === undefined) {
+                return false;
+            }
+            if (!this.#isStale(index)) {
+                if (updateAgeOnHas) {
+                    this.#updateItemAge(index);
+                }
+                if (status) {
+                    status.has = 'hit';
+                    this.#statusTTL(status, index);
+                }
+                return true;
+            }
+            else if (status) {
+                status.has = 'stale';
+                this.#statusTTL(status, index);
+            }
+        }
+        else if (status) {
+            status.has = 'miss';
+        }
+        return false;
+    }
+    /**
+     * Like {@link LRUCache#get} but doesn't update recency or delete stale
+     * items.
+     *
+     * Returns `undefined` if the item is stale, unless
+     * {@link LRUCache.OptionsBase.allowStale} is set.
+     */
+    peek(k, peekOptions = {}) {
+        const { allowStale = this.allowStale } = peekOptions;
+        const index = this.#keyMap.get(k);
+        if (index === undefined ||
+            (!allowStale && this.#isStale(index))) {
+            return;
+        }
+        const v = this.#valList[index];
+        // either stale and allowed, or forcing a refresh of non-stale value
+        return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+    }
+    #backgroundFetch(k, index, options, context) {
+        const v = index === undefined ? undefined : this.#valList[index];
+        if (this.#isBackgroundFetch(v)) {
+            return v;
+        }
+        const ac = new AC();
+        const { signal } = options;
+        // when/if our AC signals, then stop listening to theirs.
+        signal?.addEventListener('abort', () => ac.abort(signal.reason), {
+            signal: ac.signal,
+        });
+        const fetchOpts = {
+            signal: ac.signal,
+            options,
+            context,
+        };
+        const cb = (v, updateCache = false) => {
+            const { aborted } = ac.signal;
+            const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
+            if (options.status) {
+                if (aborted && !updateCache) {
+                    options.status.fetchAborted = true;
+                    options.status.fetchError = ac.signal.reason;
+                    if (ignoreAbort)
+                        options.status.fetchAbortIgnored = true;
+                }
+                else {
+                    options.status.fetchResolved = true;
+                }
+            }
+            if (aborted && !ignoreAbort && !updateCache) {
+                return fetchFail(ac.signal.reason);
+            }
+            // either we didn't abort, and are still here, or we did, and ignored
+            const bf = p;
+            if (this.#valList[index] === p) {
+                if (v === undefined) {
+                    if (bf.__staleWhileFetching !== undefined) {
+                        this.#valList[index] = bf.__staleWhileFetching;
+                    }
+                    else {
+                        this.#delete(k, 'fetch');
+                    }
+                }
+                else {
+                    if (options.status)
+                        options.status.fetchUpdated = true;
+                    this.set(k, v, fetchOpts.options);
+                }
+            }
+            return v;
+        };
+        const eb = (er) => {
+            if (options.status) {
+                options.status.fetchRejected = true;
+                options.status.fetchError = er;
+            }
+            return fetchFail(er);
+        };
+        const fetchFail = (er) => {
+            const { aborted } = ac.signal;
+            const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
+            const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
+            const noDelete = allowStale || options.noDeleteOnFetchRejection;
+            const bf = p;
+            if (this.#valList[index] === p) {
+                // if we allow stale on fetch rejections, then we need to ensure that
+                // the stale value is not removed from the cache when the fetch fails.
+                const del = !noDelete || bf.__staleWhileFetching === undefined;
+                if (del) {
+                    this.#delete(k, 'fetch');
+                }
+                else if (!allowStaleAborted) {
+                    // still replace the *promise* with the stale value,
+                    // since we are done with the promise at this point.
+                    // leave it untouched if we're still waiting for an
+                    // aborted background fetch that hasn't yet returned.
+                    this.#valList[index] = bf.__staleWhileFetching;
+                }
+            }
+            if (allowStale) {
+                if (options.status && bf.__staleWhileFetching !== undefined) {
+                    options.status.returnedStale = true;
+                }
+                return bf.__staleWhileFetching;
+            }
+            else if (bf.__returned === bf) {
+                throw er;
+            }
+        };
+        const pcall = (res, rej) => {
+            const fmp = this.#fetchMethod?.(k, v, fetchOpts);
+            if (fmp && fmp instanceof Promise) {
+                fmp.then(v => res(v === undefined ? undefined : v), rej);
+            }
+            // ignored, we go until we finish, regardless.
+            // defer check until we are actually aborting,
+            // so fetchMethod can override.
+            ac.signal.addEventListener('abort', () => {
+                if (!options.ignoreFetchAbort ||
+                    options.allowStaleOnFetchAbort) {
+                    res(undefined);
+                    // when it eventually resolves, update the cache.
+                    if (options.allowStaleOnFetchAbort) {
+                        res = v => cb(v, true);
+                    }
+                }
+            });
+        };
+        if (options.status)
+            options.status.fetchDispatched = true;
+        const p = new Promise(pcall).then(cb, eb);
+        const bf = Object.assign(p, {
+            __abortController: ac,
+            __staleWhileFetching: v,
+            __returned: undefined,
+        });
+        if (index === undefined) {
+            // internal, don't expose status.
+            this.set(k, bf, { ...fetchOpts.options, status: undefined });
+            index = this.#keyMap.get(k);
+        }
+        else {
+            this.#valList[index] = bf;
+        }
+        return bf;
+    }
+    #isBackgroundFetch(p) {
+        if (!this.#hasFetchMethod)
+            return false;
+        const b = p;
+        return (!!b &&
+            b instanceof Promise &&
+            b.hasOwnProperty('__staleWhileFetching') &&
+            b.__abortController instanceof AC);
+    }
+    async fetch(k, fetchOptions = {}) {
+        const { 
+        // get options
+        allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, 
+        // set options
+        ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, 
+        // fetch exclusive options
+        noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
+        if (!this.#hasFetchMethod) {
+            if (status)
+                status.fetch = 'get';
+            return this.get(k, {
+                allowStale,
+                updateAgeOnGet,
+                noDeleteOnStaleGet,
+                status,
+            });
+        }
+        const options = {
+            allowStale,
+            updateAgeOnGet,
+            noDeleteOnStaleGet,
+            ttl,
+            noDisposeOnSet,
+            size,
+            sizeCalculation,
+            noUpdateTTL,
+            noDeleteOnFetchRejection,
+            allowStaleOnFetchRejection,
+            allowStaleOnFetchAbort,
+            ignoreFetchAbort,
+            status,
+            signal,
+        };
+        let index = this.#keyMap.get(k);
+        if (index === undefined) {
+            if (status)
+                status.fetch = 'miss';
+            const p = this.#backgroundFetch(k, index, options, context);
+            return (p.__returned = p);
+        }
+        else {
+            // in cache, maybe already fetching
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v)) {
+                const stale = allowStale && v.__staleWhileFetching !== undefined;
+                if (status) {
+                    status.fetch = 'inflight';
+                    if (stale)
+                        status.returnedStale = true;
+                }
+                return stale ? v.__staleWhileFetching : (v.__returned = v);
+            }
+            // if we force a refresh, that means do NOT serve the cached value,
+            // unless we are already in the process of refreshing the cache.
+            const isStale = this.#isStale(index);
+            if (!forceRefresh && !isStale) {
+                if (status)
+                    status.fetch = 'hit';
+                this.#moveToTail(index);
+                if (updateAgeOnGet) {
+                    this.#updateItemAge(index);
+                }
+                if (status)
+                    this.#statusTTL(status, index);
+                return v;
+            }
+            // ok, it is stale or a forced refresh, and not already fetching.
+            // refresh the cache.
+            const p = this.#backgroundFetch(k, index, options, context);
+            const hasStale = p.__staleWhileFetching !== undefined;
+            const staleVal = hasStale && allowStale;
+            if (status) {
+                status.fetch = isStale ? 'stale' : 'refresh';
+                if (staleVal && isStale)
+                    status.returnedStale = true;
+            }
+            return staleVal ? p.__staleWhileFetching : (p.__returned = p);
+        }
+    }
+    async forceFetch(k, fetchOptions = {}) {
+        const v = await this.fetch(k, fetchOptions);
+        if (v === undefined)
+            throw new Error('fetch() returned undefined');
+        return v;
+    }
+    memo(k, memoOptions = {}) {
+        const memoMethod = this.#memoMethod;
+        if (!memoMethod) {
+            throw new Error('no memoMethod provided to constructor');
+        }
+        const { context, forceRefresh, ...options } = memoOptions;
+        const v = this.get(k, options);
+        if (!forceRefresh && v !== undefined)
+            return v;
+        const vv = memoMethod(k, v, {
+            options,
+            context,
+        });
+        this.set(k, vv, options);
+        return vv;
+    }
+    /**
+     * Return a value from the cache. Will update the recency of the cache
+     * entry found.
+     *
+     * If the key is not found, get() will return `undefined`.
+     */
+    get(k, getOptions = {}) {
+        const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
+        const index = this.#keyMap.get(k);
+        if (index !== undefined) {
+            const value = this.#valList[index];
+            const fetching = this.#isBackgroundFetch(value);
+            if (status)
+                this.#statusTTL(status, index);
+            if (this.#isStale(index)) {
+                if (status)
+                    status.get = 'stale';
+                // delete only if not an in-flight background fetch
+                if (!fetching) {
+                    if (!noDeleteOnStaleGet) {
+                        this.#delete(k, 'expire');
+                    }
+                    if (status && allowStale)
+                        status.returnedStale = true;
+                    return allowStale ? value : undefined;
+                }
+                else {
+                    if (status &&
+                        allowStale &&
+                        value.__staleWhileFetching !== undefined) {
+                        status.returnedStale = true;
+                    }
+                    return allowStale ? value.__staleWhileFetching : undefined;
+                }
+            }
+            else {
+                if (status)
+                    status.get = 'hit';
+                // if we're currently fetching it, we don't actually have it yet
+                // it's not stale, which means this isn't a staleWhileRefetching.
+                // If it's not stale, and fetching, AND has a __staleWhileFetching
+                // value, then that means the user fetched with {forceRefresh:true},
+                // so it's safe to return that value.
+                if (fetching) {
+                    return value.__staleWhileFetching;
+                }
+                this.#moveToTail(index);
+                if (updateAgeOnGet) {
+                    this.#updateItemAge(index);
+                }
+                return value;
+            }
+        }
+        else if (status) {
+            status.get = 'miss';
+        }
+    }
+    #connect(p, n) {
+        this.#prev[n] = p;
+        this.#next[p] = n;
+    }
+    #moveToTail(index) {
+        // if tail already, nothing to do
+        // if head, move head to next[index]
+        // else
+        //   move next[prev[index]] to next[index] (head has no prev)
+        //   move prev[next[index]] to prev[index]
+        // prev[index] = tail
+        // next[tail] = index
+        // tail = index
+        if (index !== this.#tail) {
+            if (index === this.#head) {
+                this.#head = this.#next[index];
+            }
+            else {
+                this.#connect(this.#prev[index], this.#next[index]);
+            }
+            this.#connect(this.#tail, index);
+            this.#tail = index;
+        }
+    }
+    /**
+     * Deletes a key out of the cache.
+     *
+     * Returns true if the key was deleted, false otherwise.
+     */
+    delete(k) {
+        return this.#delete(k, 'delete');
+    }
+    #delete(k, reason) {
+        let deleted = false;
+        if (this.#size !== 0) {
+            const index = this.#keyMap.get(k);
+            if (index !== undefined) {
+                deleted = true;
+                if (this.#size === 1) {
+                    this.#clear(reason);
+                }
+                else {
+                    this.#removeItemSize(index);
+                    const v = this.#valList[index];
+                    if (this.#isBackgroundFetch(v)) {
+                        v.__abortController.abort(new Error('deleted'));
+                    }
+                    else if (this.#hasDispose || this.#hasDisposeAfter) {
+                        if (this.#hasDispose) {
+                            this.#dispose?.(v, k, reason);
+                        }
+                        if (this.#hasDisposeAfter) {
+                            this.#disposed?.push([v, k, reason]);
+                        }
+                    }
+                    this.#keyMap.delete(k);
+                    this.#keyList[index] = undefined;
+                    this.#valList[index] = undefined;
+                    if (index === this.#tail) {
+                        this.#tail = this.#prev[index];
+                    }
+                    else if (index === this.#head) {
+                        this.#head = this.#next[index];
+                    }
+                    else {
+                        const pi = this.#prev[index];
+                        this.#next[pi] = this.#next[index];
+                        const ni = this.#next[index];
+                        this.#prev[ni] = this.#prev[index];
+                    }
+                    this.#size--;
+                    this.#free.push(index);
+                }
+            }
+        }
+        if (this.#hasDisposeAfter && this.#disposed?.length) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+        return deleted;
+    }
+    /**
+     * Clear the cache entirely, throwing away all values.
+     */
+    clear() {
+        return this.#clear('delete');
+    }
+    #clear(reason) {
+        for (const index of this.#rindexes({ allowStale: true })) {
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v)) {
+                v.__abortController.abort(new Error('deleted'));
+            }
+            else {
+                const k = this.#keyList[index];
+                if (this.#hasDispose) {
+                    this.#dispose?.(v, k, reason);
+                }
+                if (this.#hasDisposeAfter) {
+                    this.#disposed?.push([v, k, reason]);
+                }
+            }
+        }
+        this.#keyMap.clear();
+        this.#valList.fill(undefined);
+        this.#keyList.fill(undefined);
+        if (this.#ttls && this.#starts) {
+            this.#ttls.fill(0);
+            this.#starts.fill(0);
+        }
+        if (this.#sizes) {
+            this.#sizes.fill(0);
+        }
+        this.#head = 0;
+        this.#tail = 0;
+        this.#free.length = 0;
+        this.#calculatedSize = 0;
+        this.#size = 0;
+        if (this.#hasDisposeAfter && this.#disposed) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+    }
+}
+exports.LRUCache = LRUCache;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/lru-cache/dist/commonjs/index.min.js b/node_modules/pacote/node_modules/lru-cache/dist/commonjs/index.min.js
new file mode 100644
index 0000000000000..ef5027b91650d
--- /dev/null
+++ b/node_modules/pacote/node_modules/lru-cache/dist/commonjs/index.min.js
@@ -0,0 +1,2 @@
+"use strict";Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var M=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,x=new Set,R=typeof process=="object"&&process?process:{},U=(a,t,e,i)=>{typeof R.emitWarning=="function"?R.emitWarning(a,t,e,i):console.error(`[${e}] ${t}: ${a}`)},C=globalThis.AbortController,L=globalThis.AbortSignal;if(typeof C>"u"){L=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},C=class{constructor(){t()}signal=new L;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let a=R.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{a&&(a=!1,U("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var G=a=>!x.has(a),H=Symbol("type"),y=a=>a&&a===Math.floor(a)&&a>0&&isFinite(a),I=a=>y(a)?a<=Math.pow(2,8)?Uint8Array:a<=Math.pow(2,16)?Uint16Array:a<=Math.pow(2,32)?Uint32Array:a<=Number.MAX_SAFE_INTEGER?E:null:null,E=class extends Array{constructor(t){super(t),this.fill(0)}},W=class a{heap;length;static#l=!1;static create(t){let e=I(t);if(!e)return[];a.#l=!0;let i=new a(t,e);return a.#l=!1,i}constructor(t,e){if(!a.#l)throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},D=class a{#l;#c;#p;#v;#w;#D;#L;#S;get perf(){return this.#S}ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#_;#s;#i;#t;#a;#u;#o;#h;#m;#r;#b;#y;#d;#A;#z;#f;#x;static unsafeExposeInternals(t){return{starts:t.#y,ttls:t.#d,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#a,prev:t.#u,get head(){return t.#o},get tail(){return t.#h},free:t.#m,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#M(e,i,s,n),moveToTail:e=>t.#W(e),indexes:e=>t.#F(e),rindexes:e=>t.#T(e),isStale:e=>t.#g(e)}}get max(){return this.#l}get maxSize(){return this.#c}get calculatedSize(){return this.#_}get size(){return this.#n}get fetchMethod(){return this.#D}get memoMethod(){return this.#L}get dispose(){return this.#p}get onInsert(){return this.#v}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,onInsert:_,disposeAfter:f,noDisposeOnSet:c,noUpdateTTL:u,maxSize:A=0,maxEntrySize:d=0,sizeCalculation:m,fetchMethod:l,memoMethod:w,noDeleteOnFetchRejection:b,noDeleteOnStaleGet:p,allowStaleOnFetchRejection:S,allowStaleOnFetchAbort:z,ignoreFetchAbort:F,perf:v}=t;if(v!==void 0&&typeof v?.now!="function")throw new TypeError("perf option must have a now() method if specified");if(this.#S=v??M,e!==0&&!y(e))throw new TypeError("max option must be a nonnegative integer");let T=e?I(e):Array;if(!T)throw new Error("invalid max value: "+e);if(this.#l=e,this.#c=A,this.maxEntrySize=d||this.#c,this.sizeCalculation=m,this.sizeCalculation){if(!this.#c&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#L=w,l!==void 0&&typeof l!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#D=l,this.#z=!!l,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#a=new T(e),this.#u=new T(e),this.#o=0,this.#h=0,this.#m=W.create(e),this.#n=0,this.#_=0,typeof g=="function"&&(this.#p=g),typeof _=="function"&&(this.#v=_),typeof f=="function"?(this.#w=f,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#A=!!this.#p,this.#x=!!this.#v,this.#f=!!this.#w,this.noDisposeOnSet=!!c,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!b,this.allowStaleOnFetchRejection=!!S,this.allowStaleOnFetchAbort=!!z,this.ignoreFetchAbort=!!F,this.maxEntrySize!==0){if(this.#c!==0&&!y(this.#c))throw new TypeError("maxSize must be a positive integer if specified");if(!y(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#V()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!p,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=y(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!y(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#G()}if(this.#l===0&&this.ttl===0&&this.#c===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#l&&!this.#c){let O="LRU_CACHE_UNBOUNDED";G(O)&&(x.add(O),U("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",O,a))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#G(){let t=new E(this.#l),e=new E(this.#l);this.#d=t,this.#y=e,this.#j=(n,h,o=this.#S.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#g(n)&&this.#O(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#C=n=>{e[n]=t[n]!==0?this.#S.now():0},this.#E=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=this.#S.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#g=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#C=()=>{};#E=()=>{};#j=()=>{};#g=()=>!1;#V(){let t=new E(this.#l);this.#_=0,this.#b=t,this.#R=e=>{this.#_-=t[e],t[e]=0},this.#N=(e,i,s,n)=>{if(this.#e(i))return 0;if(!y(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!y(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#U=(e,i,s)=>{if(t[e]=i,this.#c){let n=this.#c-t[e];for(;this.#_>n;)this.#I(!0)}this.#_+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#_)}}#R=t=>{};#U=(t,e,i)=>{};#N=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#o));)e=this.#u[e]}*#T({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#h));)e=this.#a[e]}#P(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#T())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#T()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#T())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#T()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#T({allowStale:!0}))this.#g(e)&&(this.#O(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#d&&this.#y){let h=this.#d[e],o=this.#y[e];if(h&&o){let r=h-(this.#S.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#F({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#d&&this.#y){h.ttl=this.#d[e];let o=this.#S.now()-this.#y[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=this.#S.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,_=this.#N(t,e,i.size||0,o);if(this.maxEntrySize&&_>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#O(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#m.length!==0?this.#m.pop():this.#n===this.#l?this.#I(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#a[this.#h]=f,this.#u[f]=this.#h,this.#h=f,this.#n++,this.#U(f,_,r),r&&(r.set="add"),g=!1,this.#x&&this.#v?.(e,t,"add");else{this.#W(f);let c=this.#t[f];if(e!==c){if(this.#z&&this.#e(c)){c.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:u}=c;u!==void 0&&!h&&(this.#A&&this.#p?.(u,t,"set"),this.#f&&this.#r?.push([u,t,"set"]))}else h||(this.#A&&this.#p?.(c,t,"set"),this.#f&&this.#r?.push([c,t,"set"]));if(this.#R(f),this.#U(f,_,r),this.#t[f]=e,r){r.set="replace";let u=c&&this.#e(c)?c.__staleWhileFetching:c;u!==void 0&&(r.oldValue=u)}}else r&&(r.set="update");this.#x&&this.onInsert?.(e,t,e===c?"update":"replace")}if(s!==0&&!this.#d&&this.#G(),this.#d&&(g||this.#j(f,s,n),r&&this.#E(r,f)),!h&&this.#f&&this.#r){let c=this.#r,u;for(;u=c?.shift();)this.#w?.(...u)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#I(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#f&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#I(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#z&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(s,i,"evict"),this.#f&&this.#r?.push([s,i,"evict"])),this.#R(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#m.push(e)),this.#n===1?(this.#o=this.#h=0,this.#m.length=0):this.#o=this.#a[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#g(n))s&&(s.has="stale",this.#E(s,n));else return i&&this.#C(n),s&&(s.has="hit",this.#E(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#g(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#M(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new C,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,m=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!m?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!m)return f(h.signal.reason);let b=u;return this.#t[e]===u&&(d===void 0?b.__staleWhileFetching!==void 0?this.#t[e]=b.__staleWhileFetching:this.#O(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},_=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:m}=h.signal,l=m&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=u;if(this.#t[e]===u&&(!b||p.__staleWhileFetching===void 0?this.#O(t,"fetch"):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},c=(d,m)=>{let l=this.#D?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),m),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let u=new Promise(c).then(g,_),A=Object.assign(u,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,A,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=A,A}#e(t){if(!this.#z)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof C}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:_=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:c=this.allowStaleOnFetchRejection,ignoreFetchAbort:u=this.ignoreFetchAbort,allowStaleOnFetchAbort:A=this.allowStaleOnFetchAbort,context:d,forceRefresh:m=!1,status:l,signal:w}=e;if(!this.#z)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:_,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:c,allowStaleOnFetchAbort:A,ignoreFetchAbort:u,status:l,signal:w},p=this.#s.get(t);if(p===void 0){l&&(l.fetch="miss");let S=this.#M(t,p,b,d);return S.__returned=S}else{let S=this.#t[p];if(this.#e(S)){let O=i&&S.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",O&&(l.returnedStale=!0)),O?S.__staleWhileFetching:S.__returned=S}let z=this.#g(p);if(!m&&!z)return l&&(l.fetch="hit"),this.#W(p),s&&this.#C(p),l&&this.#E(l,p),S;let F=this.#M(t,p,b,d),T=F.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=z?"stale":"refresh",T&&z&&(l.returnedStale=!0)),T?F.__staleWhileFetching:F.__returned=F}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#L;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#E(h,o),this.#g(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#O(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#W(o),s&&this.#C(o),r))}else h&&(h.get="miss")}#H(t,e){this.#u[e]=t,this.#a[t]=e}#W(t){t!==this.#h&&(t===this.#o?this.#o=this.#a[t]:this.#H(this.#u[t],this.#a[t]),this.#H(this.#h,t),this.#h=t)}delete(t){return this.#O(t,"delete")}#O(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#k(e);else{this.#R(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(n,t,e),this.#f&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#u[s];else if(s===this.#o)this.#o=this.#a[s];else{let h=this.#u[s];this.#a[h]=this.#a[s];let o=this.#a[s];this.#u[o]=this.#u[s]}this.#n--,this.#m.push(s)}}if(this.#f&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#k("delete")}#k(t){for(let e of this.#T({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#A&&this.#p?.(i,s,t),this.#f&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#d&&this.#y&&(this.#d.fill(0),this.#y.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#m.length=0,this.#_=0,this.#n=0,this.#f&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};exports.LRUCache=D;
+//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/pacote/node_modules/lru-cache/dist/commonjs/package.json b/node_modules/pacote/node_modules/lru-cache/dist/commonjs/package.json
new file mode 100644
index 0000000000000..5bbefffbabee3
--- /dev/null
+++ b/node_modules/pacote/node_modules/lru-cache/dist/commonjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/node_modules/pacote/node_modules/lru-cache/dist/esm/index.js b/node_modules/pacote/node_modules/lru-cache/dist/esm/index.js
new file mode 100644
index 0000000000000..8fd8fc5f31507
--- /dev/null
+++ b/node_modules/pacote/node_modules/lru-cache/dist/esm/index.js
@@ -0,0 +1,1560 @@
+/**
+ * @module LRUCache
+ */
+const defaultPerf = (typeof performance === 'object' &&
+    performance &&
+    typeof performance.now === 'function') ?
+    performance
+    : Date;
+const warned = new Set();
+/* c8 ignore start */
+const PROCESS = (typeof process === 'object' && !!process ?
+    process
+    : {});
+/* c8 ignore start */
+const emitWarning = (msg, type, code, fn) => {
+    typeof PROCESS.emitWarning === 'function' ?
+        PROCESS.emitWarning(msg, type, code, fn)
+        : console.error(`[${code}] ${type}: ${msg}`);
+};
+let AC = globalThis.AbortController;
+let AS = globalThis.AbortSignal;
+/* c8 ignore start */
+if (typeof AC === 'undefined') {
+    //@ts-ignore
+    AS = class AbortSignal {
+        onabort;
+        _onabort = [];
+        reason;
+        aborted = false;
+        addEventListener(_, fn) {
+            this._onabort.push(fn);
+        }
+    };
+    //@ts-ignore
+    AC = class AbortController {
+        constructor() {
+            warnACPolyfill();
+        }
+        signal = new AS();
+        abort(reason) {
+            if (this.signal.aborted)
+                return;
+            //@ts-ignore
+            this.signal.reason = reason;
+            //@ts-ignore
+            this.signal.aborted = true;
+            //@ts-ignore
+            for (const fn of this.signal._onabort) {
+                fn(reason);
+            }
+            this.signal.onabort?.(reason);
+        }
+    };
+    let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
+    const warnACPolyfill = () => {
+        if (!printACPolyfillWarning)
+            return;
+        printACPolyfillWarning = false;
+        emitWarning('AbortController is not defined. If using lru-cache in ' +
+            'node 14, load an AbortController polyfill from the ' +
+            '`node-abort-controller` package. A minimal polyfill is ' +
+            'provided for use by LRUCache.fetch(), but it should not be ' +
+            'relied upon in other contexts (eg, passing it to other APIs that ' +
+            'use AbortController/AbortSignal might have undesirable effects). ' +
+            'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
+    };
+}
+/* c8 ignore stop */
+const shouldWarn = (code) => !warned.has(code);
+const TYPE = Symbol('type');
+const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
+/* c8 ignore start */
+// This is a little bit ridiculous, tbh.
+// The maximum array length is 2^32-1 or thereabouts on most JS impls.
+// And well before that point, you're caching the entire world, I mean,
+// that's ~32GB of just integers for the next/prev links, plus whatever
+// else to hold that many keys and values.  Just filling the memory with
+// zeroes at init time is brutal when you get that big.
+// But why not be complete?
+// Maybe in the future, these limits will have expanded.
+const getUintArray = (max) => !isPosInt(max) ? null
+    : max <= Math.pow(2, 8) ? Uint8Array
+        : max <= Math.pow(2, 16) ? Uint16Array
+            : max <= Math.pow(2, 32) ? Uint32Array
+                : max <= Number.MAX_SAFE_INTEGER ? ZeroArray
+                    : null;
+/* c8 ignore stop */
+class ZeroArray extends Array {
+    constructor(size) {
+        super(size);
+        this.fill(0);
+    }
+}
+class Stack {
+    heap;
+    length;
+    // private constructor
+    static #constructing = false;
+    static create(max) {
+        const HeapCls = getUintArray(max);
+        if (!HeapCls)
+            return [];
+        Stack.#constructing = true;
+        const s = new Stack(max, HeapCls);
+        Stack.#constructing = false;
+        return s;
+    }
+    constructor(max, HeapCls) {
+        /* c8 ignore start */
+        if (!Stack.#constructing) {
+            throw new TypeError('instantiate Stack using Stack.create(n)');
+        }
+        /* c8 ignore stop */
+        this.heap = new HeapCls(max);
+        this.length = 0;
+    }
+    push(n) {
+        this.heap[this.length++] = n;
+    }
+    pop() {
+        return this.heap[--this.length];
+    }
+}
+/**
+ * Default export, the thing you're using this module to get.
+ *
+ * The `K` and `V` types define the key and value types, respectively. The
+ * optional `FC` type defines the type of the `context` object passed to
+ * `cache.fetch()` and `cache.memo()`.
+ *
+ * Keys and values **must not** be `null` or `undefined`.
+ *
+ * All properties from the options object (with the exception of `max`,
+ * `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are
+ * added as normal public members. (The listed options are read-only getters.)
+ *
+ * Changing any of these will alter the defaults for subsequent method calls.
+ */
+export class LRUCache {
+    // options that cannot be changed without disaster
+    #max;
+    #maxSize;
+    #dispose;
+    #onInsert;
+    #disposeAfter;
+    #fetchMethod;
+    #memoMethod;
+    #perf;
+    /**
+     * {@link LRUCache.OptionsBase.perf}
+     */
+    get perf() {
+        return this.#perf;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.ttl}
+     */
+    ttl;
+    /**
+     * {@link LRUCache.OptionsBase.ttlResolution}
+     */
+    ttlResolution;
+    /**
+     * {@link LRUCache.OptionsBase.ttlAutopurge}
+     */
+    ttlAutopurge;
+    /**
+     * {@link LRUCache.OptionsBase.updateAgeOnGet}
+     */
+    updateAgeOnGet;
+    /**
+     * {@link LRUCache.OptionsBase.updateAgeOnHas}
+     */
+    updateAgeOnHas;
+    /**
+     * {@link LRUCache.OptionsBase.allowStale}
+     */
+    allowStale;
+    /**
+     * {@link LRUCache.OptionsBase.noDisposeOnSet}
+     */
+    noDisposeOnSet;
+    /**
+     * {@link LRUCache.OptionsBase.noUpdateTTL}
+     */
+    noUpdateTTL;
+    /**
+     * {@link LRUCache.OptionsBase.maxEntrySize}
+     */
+    maxEntrySize;
+    /**
+     * {@link LRUCache.OptionsBase.sizeCalculation}
+     */
+    sizeCalculation;
+    /**
+     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
+     */
+    noDeleteOnFetchRejection;
+    /**
+     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
+     */
+    noDeleteOnStaleGet;
+    /**
+     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
+     */
+    allowStaleOnFetchAbort;
+    /**
+     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
+     */
+    allowStaleOnFetchRejection;
+    /**
+     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
+     */
+    ignoreFetchAbort;
+    // computed properties
+    #size;
+    #calculatedSize;
+    #keyMap;
+    #keyList;
+    #valList;
+    #next;
+    #prev;
+    #head;
+    #tail;
+    #free;
+    #disposed;
+    #sizes;
+    #starts;
+    #ttls;
+    #hasDispose;
+    #hasFetchMethod;
+    #hasDisposeAfter;
+    #hasOnInsert;
+    /**
+     * Do not call this method unless you need to inspect the
+     * inner workings of the cache.  If anything returned by this
+     * object is modified in any way, strange breakage may occur.
+     *
+     * These fields are private for a reason!
+     *
+     * @internal
+     */
+    static unsafeExposeInternals(c) {
+        return {
+            // properties
+            starts: c.#starts,
+            ttls: c.#ttls,
+            sizes: c.#sizes,
+            keyMap: c.#keyMap,
+            keyList: c.#keyList,
+            valList: c.#valList,
+            next: c.#next,
+            prev: c.#prev,
+            get head() {
+                return c.#head;
+            },
+            get tail() {
+                return c.#tail;
+            },
+            free: c.#free,
+            // methods
+            isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
+            backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
+            moveToTail: (index) => c.#moveToTail(index),
+            indexes: (options) => c.#indexes(options),
+            rindexes: (options) => c.#rindexes(options),
+            isStale: (index) => c.#isStale(index),
+        };
+    }
+    // Protected read-only members
+    /**
+     * {@link LRUCache.OptionsBase.max} (read-only)
+     */
+    get max() {
+        return this.#max;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.maxSize} (read-only)
+     */
+    get maxSize() {
+        return this.#maxSize;
+    }
+    /**
+     * The total computed size of items in the cache (read-only)
+     */
+    get calculatedSize() {
+        return this.#calculatedSize;
+    }
+    /**
+     * The number of items stored in the cache (read-only)
+     */
+    get size() {
+        return this.#size;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
+     */
+    get fetchMethod() {
+        return this.#fetchMethod;
+    }
+    get memoMethod() {
+        return this.#memoMethod;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.dispose} (read-only)
+     */
+    get dispose() {
+        return this.#dispose;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.onInsert} (read-only)
+     */
+    get onInsert() {
+        return this.#onInsert;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
+     */
+    get disposeAfter() {
+        return this.#disposeAfter;
+    }
+    constructor(options) {
+        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, onInsert, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, perf, } = options;
+        if (perf !== undefined) {
+            if (typeof perf?.now !== 'function') {
+                throw new TypeError('perf option must have a now() method if specified');
+            }
+        }
+        this.#perf = perf ?? defaultPerf;
+        if (max !== 0 && !isPosInt(max)) {
+            throw new TypeError('max option must be a nonnegative integer');
+        }
+        const UintArray = max ? getUintArray(max) : Array;
+        if (!UintArray) {
+            throw new Error('invalid max value: ' + max);
+        }
+        this.#max = max;
+        this.#maxSize = maxSize;
+        this.maxEntrySize = maxEntrySize || this.#maxSize;
+        this.sizeCalculation = sizeCalculation;
+        if (this.sizeCalculation) {
+            if (!this.#maxSize && !this.maxEntrySize) {
+                throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
+            }
+            if (typeof this.sizeCalculation !== 'function') {
+                throw new TypeError('sizeCalculation set to non-function');
+            }
+        }
+        if (memoMethod !== undefined &&
+            typeof memoMethod !== 'function') {
+            throw new TypeError('memoMethod must be a function if defined');
+        }
+        this.#memoMethod = memoMethod;
+        if (fetchMethod !== undefined &&
+            typeof fetchMethod !== 'function') {
+            throw new TypeError('fetchMethod must be a function if specified');
+        }
+        this.#fetchMethod = fetchMethod;
+        this.#hasFetchMethod = !!fetchMethod;
+        this.#keyMap = new Map();
+        this.#keyList = new Array(max).fill(undefined);
+        this.#valList = new Array(max).fill(undefined);
+        this.#next = new UintArray(max);
+        this.#prev = new UintArray(max);
+        this.#head = 0;
+        this.#tail = 0;
+        this.#free = Stack.create(max);
+        this.#size = 0;
+        this.#calculatedSize = 0;
+        if (typeof dispose === 'function') {
+            this.#dispose = dispose;
+        }
+        if (typeof onInsert === 'function') {
+            this.#onInsert = onInsert;
+        }
+        if (typeof disposeAfter === 'function') {
+            this.#disposeAfter = disposeAfter;
+            this.#disposed = [];
+        }
+        else {
+            this.#disposeAfter = undefined;
+            this.#disposed = undefined;
+        }
+        this.#hasDispose = !!this.#dispose;
+        this.#hasOnInsert = !!this.#onInsert;
+        this.#hasDisposeAfter = !!this.#disposeAfter;
+        this.noDisposeOnSet = !!noDisposeOnSet;
+        this.noUpdateTTL = !!noUpdateTTL;
+        this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
+        this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
+        this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
+        this.ignoreFetchAbort = !!ignoreFetchAbort;
+        // NB: maxEntrySize is set to maxSize if it's set
+        if (this.maxEntrySize !== 0) {
+            if (this.#maxSize !== 0) {
+                if (!isPosInt(this.#maxSize)) {
+                    throw new TypeError('maxSize must be a positive integer if specified');
+                }
+            }
+            if (!isPosInt(this.maxEntrySize)) {
+                throw new TypeError('maxEntrySize must be a positive integer if specified');
+            }
+            this.#initializeSizeTracking();
+        }
+        this.allowStale = !!allowStale;
+        this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
+        this.updateAgeOnGet = !!updateAgeOnGet;
+        this.updateAgeOnHas = !!updateAgeOnHas;
+        this.ttlResolution =
+            isPosInt(ttlResolution) || ttlResolution === 0 ?
+                ttlResolution
+                : 1;
+        this.ttlAutopurge = !!ttlAutopurge;
+        this.ttl = ttl || 0;
+        if (this.ttl) {
+            if (!isPosInt(this.ttl)) {
+                throw new TypeError('ttl must be a positive integer if specified');
+            }
+            this.#initializeTTLTracking();
+        }
+        // do not allow completely unbounded caches
+        if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
+            throw new TypeError('At least one of max, maxSize, or ttl is required');
+        }
+        if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
+            const code = 'LRU_CACHE_UNBOUNDED';
+            if (shouldWarn(code)) {
+                warned.add(code);
+                const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
+                    'result in unbounded memory consumption.';
+                emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
+            }
+        }
+    }
+    /**
+     * Return the number of ms left in the item's TTL. If item is not in cache,
+     * returns `0`. Returns `Infinity` if item is in cache without a defined TTL.
+     */
+    getRemainingTTL(key) {
+        return this.#keyMap.has(key) ? Infinity : 0;
+    }
+    #initializeTTLTracking() {
+        const ttls = new ZeroArray(this.#max);
+        const starts = new ZeroArray(this.#max);
+        this.#ttls = ttls;
+        this.#starts = starts;
+        this.#setItemTTL = (index, ttl, start = this.#perf.now()) => {
+            starts[index] = ttl !== 0 ? start : 0;
+            ttls[index] = ttl;
+            if (ttl !== 0 && this.ttlAutopurge) {
+                const t = setTimeout(() => {
+                    if (this.#isStale(index)) {
+                        this.#delete(this.#keyList[index], 'expire');
+                    }
+                }, ttl + 1);
+                // unref() not supported on all platforms
+                /* c8 ignore start */
+                if (t.unref) {
+                    t.unref();
+                }
+                /* c8 ignore stop */
+            }
+        };
+        this.#updateItemAge = index => {
+            starts[index] = ttls[index] !== 0 ? this.#perf.now() : 0;
+        };
+        this.#statusTTL = (status, index) => {
+            if (ttls[index]) {
+                const ttl = ttls[index];
+                const start = starts[index];
+                /* c8 ignore next */
+                if (!ttl || !start)
+                    return;
+                status.ttl = ttl;
+                status.start = start;
+                status.now = cachedNow || getNow();
+                const age = status.now - start;
+                status.remainingTTL = ttl - age;
+            }
+        };
+        // debounce calls to perf.now() to 1s so we're not hitting
+        // that costly call repeatedly.
+        let cachedNow = 0;
+        const getNow = () => {
+            const n = this.#perf.now();
+            if (this.ttlResolution > 0) {
+                cachedNow = n;
+                const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
+                // not available on all platforms
+                /* c8 ignore start */
+                if (t.unref) {
+                    t.unref();
+                }
+                /* c8 ignore stop */
+            }
+            return n;
+        };
+        this.getRemainingTTL = key => {
+            const index = this.#keyMap.get(key);
+            if (index === undefined) {
+                return 0;
+            }
+            const ttl = ttls[index];
+            const start = starts[index];
+            if (!ttl || !start) {
+                return Infinity;
+            }
+            const age = (cachedNow || getNow()) - start;
+            return ttl - age;
+        };
+        this.#isStale = index => {
+            const s = starts[index];
+            const t = ttls[index];
+            return !!t && !!s && (cachedNow || getNow()) - s > t;
+        };
+    }
+    // conditionally set private methods related to TTL
+    #updateItemAge = () => { };
+    #statusTTL = () => { };
+    #setItemTTL = () => { };
+    /* c8 ignore stop */
+    #isStale = () => false;
+    #initializeSizeTracking() {
+        const sizes = new ZeroArray(this.#max);
+        this.#calculatedSize = 0;
+        this.#sizes = sizes;
+        this.#removeItemSize = index => {
+            this.#calculatedSize -= sizes[index];
+            sizes[index] = 0;
+        };
+        this.#requireSize = (k, v, size, sizeCalculation) => {
+            // provisionally accept background fetches.
+            // actual value size will be checked when they return.
+            if (this.#isBackgroundFetch(v)) {
+                return 0;
+            }
+            if (!isPosInt(size)) {
+                if (sizeCalculation) {
+                    if (typeof sizeCalculation !== 'function') {
+                        throw new TypeError('sizeCalculation must be a function');
+                    }
+                    size = sizeCalculation(v, k);
+                    if (!isPosInt(size)) {
+                        throw new TypeError('sizeCalculation return invalid (expect positive integer)');
+                    }
+                }
+                else {
+                    throw new TypeError('invalid size value (must be positive integer). ' +
+                        'When maxSize or maxEntrySize is used, sizeCalculation ' +
+                        'or size must be set.');
+                }
+            }
+            return size;
+        };
+        this.#addItemSize = (index, size, status) => {
+            sizes[index] = size;
+            if (this.#maxSize) {
+                const maxSize = this.#maxSize - sizes[index];
+                while (this.#calculatedSize > maxSize) {
+                    this.#evict(true);
+                }
+            }
+            this.#calculatedSize += sizes[index];
+            if (status) {
+                status.entrySize = size;
+                status.totalCalculatedSize = this.#calculatedSize;
+            }
+        };
+    }
+    #removeItemSize = _i => { };
+    #addItemSize = (_i, _s, _st) => { };
+    #requireSize = (_k, _v, size, sizeCalculation) => {
+        if (size || sizeCalculation) {
+            throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
+        }
+        return 0;
+    };
+    *#indexes({ allowStale = this.allowStale } = {}) {
+        if (this.#size) {
+            for (let i = this.#tail; true;) {
+                if (!this.#isValidIndex(i)) {
+                    break;
+                }
+                if (allowStale || !this.#isStale(i)) {
+                    yield i;
+                }
+                if (i === this.#head) {
+                    break;
+                }
+                else {
+                    i = this.#prev[i];
+                }
+            }
+        }
+    }
+    *#rindexes({ allowStale = this.allowStale } = {}) {
+        if (this.#size) {
+            for (let i = this.#head; true;) {
+                if (!this.#isValidIndex(i)) {
+                    break;
+                }
+                if (allowStale || !this.#isStale(i)) {
+                    yield i;
+                }
+                if (i === this.#tail) {
+                    break;
+                }
+                else {
+                    i = this.#next[i];
+                }
+            }
+        }
+    }
+    #isValidIndex(index) {
+        return (index !== undefined &&
+            this.#keyMap.get(this.#keyList[index]) === index);
+    }
+    /**
+     * Return a generator yielding `[key, value]` pairs,
+     * in order from most recently used to least recently used.
+     */
+    *entries() {
+        for (const i of this.#indexes()) {
+            if (this.#valList[i] !== undefined &&
+                this.#keyList[i] !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield [this.#keyList[i], this.#valList[i]];
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.entries}
+     *
+     * Return a generator yielding `[key, value]` pairs,
+     * in order from least recently used to most recently used.
+     */
+    *rentries() {
+        for (const i of this.#rindexes()) {
+            if (this.#valList[i] !== undefined &&
+                this.#keyList[i] !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield [this.#keyList[i], this.#valList[i]];
+            }
+        }
+    }
+    /**
+     * Return a generator yielding the keys in the cache,
+     * in order from most recently used to least recently used.
+     */
+    *keys() {
+        for (const i of this.#indexes()) {
+            const k = this.#keyList[i];
+            if (k !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield k;
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.keys}
+     *
+     * Return a generator yielding the keys in the cache,
+     * in order from least recently used to most recently used.
+     */
+    *rkeys() {
+        for (const i of this.#rindexes()) {
+            const k = this.#keyList[i];
+            if (k !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield k;
+            }
+        }
+    }
+    /**
+     * Return a generator yielding the values in the cache,
+     * in order from most recently used to least recently used.
+     */
+    *values() {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            if (v !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield this.#valList[i];
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.values}
+     *
+     * Return a generator yielding the values in the cache,
+     * in order from least recently used to most recently used.
+     */
+    *rvalues() {
+        for (const i of this.#rindexes()) {
+            const v = this.#valList[i];
+            if (v !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield this.#valList[i];
+            }
+        }
+    }
+    /**
+     * Iterating over the cache itself yields the same results as
+     * {@link LRUCache.entries}
+     */
+    [Symbol.iterator]() {
+        return this.entries();
+    }
+    /**
+     * A String value that is used in the creation of the default string
+     * description of an object. Called by the built-in method
+     * `Object.prototype.toString`.
+     */
+    [Symbol.toStringTag] = 'LRUCache';
+    /**
+     * Find a value for which the supplied fn method returns a truthy value,
+     * similar to `Array.find()`. fn is called as `fn(value, key, cache)`.
+     */
+    find(fn, getOptions = {}) {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            if (fn(value, this.#keyList[i], this)) {
+                return this.get(this.#keyList[i], getOptions);
+            }
+        }
+    }
+    /**
+     * Call the supplied function on each item in the cache, in order from most
+     * recently used to least recently used.
+     *
+     * `fn` is called as `fn(value, key, cache)`.
+     *
+     * If `thisp` is provided, function will be called in the `this`-context of
+     * the provided object, or the cache if no `thisp` object is provided.
+     *
+     * Does not update age or recenty of use, or iterate over stale values.
+     */
+    forEach(fn, thisp = this) {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            fn.call(thisp, value, this.#keyList[i], this);
+        }
+    }
+    /**
+     * The same as {@link LRUCache.forEach} but items are iterated over in
+     * reverse order.  (ie, less recently used items are iterated over first.)
+     */
+    rforEach(fn, thisp = this) {
+        for (const i of this.#rindexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            fn.call(thisp, value, this.#keyList[i], this);
+        }
+    }
+    /**
+     * Delete any stale entries. Returns true if anything was removed,
+     * false otherwise.
+     */
+    purgeStale() {
+        let deleted = false;
+        for (const i of this.#rindexes({ allowStale: true })) {
+            if (this.#isStale(i)) {
+                this.#delete(this.#keyList[i], 'expire');
+                deleted = true;
+            }
+        }
+        return deleted;
+    }
+    /**
+     * Get the extended info about a given entry, to get its value, size, and
+     * TTL info simultaneously. Returns `undefined` if the key is not present.
+     *
+     * Unlike {@link LRUCache#dump}, which is designed to be portable and survive
+     * serialization, the `start` value is always the current timestamp, and the
+     * `ttl` is a calculated remaining time to live (negative if expired).
+     *
+     * Always returns stale values, if their info is found in the cache, so be
+     * sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl})
+     * if relevant.
+     */
+    info(key) {
+        const i = this.#keyMap.get(key);
+        if (i === undefined)
+            return undefined;
+        const v = this.#valList[i];
+        /* c8 ignore start - this isn't tested for the info function,
+         * but it's the same logic as found in other places. */
+        const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+        if (value === undefined)
+            return undefined;
+        /* c8 ignore end */
+        const entry = { value };
+        if (this.#ttls && this.#starts) {
+            const ttl = this.#ttls[i];
+            const start = this.#starts[i];
+            if (ttl && start) {
+                const remain = ttl - (this.#perf.now() - start);
+                entry.ttl = remain;
+                entry.start = Date.now();
+            }
+        }
+        if (this.#sizes) {
+            entry.size = this.#sizes[i];
+        }
+        return entry;
+    }
+    /**
+     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
+     * passed to {@link LRUCache#load}.
+     *
+     * The `start` fields are calculated relative to a portable `Date.now()`
+     * timestamp, even if `performance.now()` is available.
+     *
+     * Stale entries are always included in the `dump`, even if
+     * {@link LRUCache.OptionsBase.allowStale} is false.
+     *
+     * Note: this returns an actual array, not a generator, so it can be more
+     * easily passed around.
+     */
+    dump() {
+        const arr = [];
+        for (const i of this.#indexes({ allowStale: true })) {
+            const key = this.#keyList[i];
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined || key === undefined)
+                continue;
+            const entry = { value };
+            if (this.#ttls && this.#starts) {
+                entry.ttl = this.#ttls[i];
+                // always dump the start relative to a portable timestamp
+                // it's ok for this to be a bit slow, it's a rare operation.
+                const age = this.#perf.now() - this.#starts[i];
+                entry.start = Math.floor(Date.now() - age);
+            }
+            if (this.#sizes) {
+                entry.size = this.#sizes[i];
+            }
+            arr.unshift([key, entry]);
+        }
+        return arr;
+    }
+    /**
+     * Reset the cache and load in the items in entries in the order listed.
+     *
+     * The shape of the resulting cache may be different if the same options are
+     * not used in both caches.
+     *
+     * The `start` fields are assumed to be calculated relative to a portable
+     * `Date.now()` timestamp, even if `performance.now()` is available.
+     */
+    load(arr) {
+        this.clear();
+        for (const [key, entry] of arr) {
+            if (entry.start) {
+                // entry.start is a portable timestamp, but we may be using
+                // node's performance.now(), so calculate the offset, so that
+                // we get the intended remaining TTL, no matter how long it's
+                // been on ice.
+                //
+                // it's ok for this to be a bit slow, it's a rare operation.
+                const age = Date.now() - entry.start;
+                entry.start = this.#perf.now() - age;
+            }
+            this.set(key, entry.value, entry);
+        }
+    }
+    /**
+     * Add a value to the cache.
+     *
+     * Note: if `undefined` is specified as a value, this is an alias for
+     * {@link LRUCache#delete}
+     *
+     * Fields on the {@link LRUCache.SetOptions} options param will override
+     * their corresponding values in the constructor options for the scope
+     * of this single `set()` operation.
+     *
+     * If `start` is provided, then that will set the effective start
+     * time for the TTL calculation. Note that this must be a previous
+     * value of `performance.now()` if supported, or a previous value of
+     * `Date.now()` if not.
+     *
+     * Options object may also include `size`, which will prevent
+     * calling the `sizeCalculation` function and just use the specified
+     * number if it is a positive integer, and `noDisposeOnSet` which
+     * will prevent calling a `dispose` function in the case of
+     * overwrites.
+     *
+     * If the `size` (or return value of `sizeCalculation`) for a given
+     * entry is greater than `maxEntrySize`, then the item will not be
+     * added to the cache.
+     *
+     * Will update the recency of the entry.
+     *
+     * If the value is `undefined`, then this is an alias for
+     * `cache.delete(key)`. `undefined` is never stored in the cache.
+     */
+    set(k, v, setOptions = {}) {
+        if (v === undefined) {
+            this.delete(k);
+            return this;
+        }
+        const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
+        let { noUpdateTTL = this.noUpdateTTL } = setOptions;
+        const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
+        // if the item doesn't fit, don't do anything
+        // NB: maxEntrySize set to maxSize by default
+        if (this.maxEntrySize && size > this.maxEntrySize) {
+            if (status) {
+                status.set = 'miss';
+                status.maxEntrySizeExceeded = true;
+            }
+            // have to delete, in case something is there already.
+            this.#delete(k, 'set');
+            return this;
+        }
+        let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
+        if (index === undefined) {
+            // addition
+            index = (this.#size === 0 ? this.#tail
+                : this.#free.length !== 0 ? this.#free.pop()
+                    : this.#size === this.#max ? this.#evict(false)
+                        : this.#size);
+            this.#keyList[index] = k;
+            this.#valList[index] = v;
+            this.#keyMap.set(k, index);
+            this.#next[this.#tail] = index;
+            this.#prev[index] = this.#tail;
+            this.#tail = index;
+            this.#size++;
+            this.#addItemSize(index, size, status);
+            if (status)
+                status.set = 'add';
+            noUpdateTTL = false;
+            if (this.#hasOnInsert) {
+                this.#onInsert?.(v, k, 'add');
+            }
+        }
+        else {
+            // update
+            this.#moveToTail(index);
+            const oldVal = this.#valList[index];
+            if (v !== oldVal) {
+                if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
+                    oldVal.__abortController.abort(new Error('replaced'));
+                    const { __staleWhileFetching: s } = oldVal;
+                    if (s !== undefined && !noDisposeOnSet) {
+                        if (this.#hasDispose) {
+                            this.#dispose?.(s, k, 'set');
+                        }
+                        if (this.#hasDisposeAfter) {
+                            this.#disposed?.push([s, k, 'set']);
+                        }
+                    }
+                }
+                else if (!noDisposeOnSet) {
+                    if (this.#hasDispose) {
+                        this.#dispose?.(oldVal, k, 'set');
+                    }
+                    if (this.#hasDisposeAfter) {
+                        this.#disposed?.push([oldVal, k, 'set']);
+                    }
+                }
+                this.#removeItemSize(index);
+                this.#addItemSize(index, size, status);
+                this.#valList[index] = v;
+                if (status) {
+                    status.set = 'replace';
+                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal) ?
+                        oldVal.__staleWhileFetching
+                        : oldVal;
+                    if (oldValue !== undefined)
+                        status.oldValue = oldValue;
+                }
+            }
+            else if (status) {
+                status.set = 'update';
+            }
+            if (this.#hasOnInsert) {
+                this.onInsert?.(v, k, v === oldVal ? 'update' : 'replace');
+            }
+        }
+        if (ttl !== 0 && !this.#ttls) {
+            this.#initializeTTLTracking();
+        }
+        if (this.#ttls) {
+            if (!noUpdateTTL) {
+                this.#setItemTTL(index, ttl, start);
+            }
+            if (status)
+                this.#statusTTL(status, index);
+        }
+        if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+        return this;
+    }
+    /**
+     * Evict the least recently used item, returning its value or
+     * `undefined` if cache is empty.
+     */
+    pop() {
+        try {
+            while (this.#size) {
+                const val = this.#valList[this.#head];
+                this.#evict(true);
+                if (this.#isBackgroundFetch(val)) {
+                    if (val.__staleWhileFetching) {
+                        return val.__staleWhileFetching;
+                    }
+                }
+                else if (val !== undefined) {
+                    return val;
+                }
+            }
+        }
+        finally {
+            if (this.#hasDisposeAfter && this.#disposed) {
+                const dt = this.#disposed;
+                let task;
+                while ((task = dt?.shift())) {
+                    this.#disposeAfter?.(...task);
+                }
+            }
+        }
+    }
+    #evict(free) {
+        const head = this.#head;
+        const k = this.#keyList[head];
+        const v = this.#valList[head];
+        if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
+            v.__abortController.abort(new Error('evicted'));
+        }
+        else if (this.#hasDispose || this.#hasDisposeAfter) {
+            if (this.#hasDispose) {
+                this.#dispose?.(v, k, 'evict');
+            }
+            if (this.#hasDisposeAfter) {
+                this.#disposed?.push([v, k, 'evict']);
+            }
+        }
+        this.#removeItemSize(head);
+        // if we aren't about to use the index, then null these out
+        if (free) {
+            this.#keyList[head] = undefined;
+            this.#valList[head] = undefined;
+            this.#free.push(head);
+        }
+        if (this.#size === 1) {
+            this.#head = this.#tail = 0;
+            this.#free.length = 0;
+        }
+        else {
+            this.#head = this.#next[head];
+        }
+        this.#keyMap.delete(k);
+        this.#size--;
+        return head;
+    }
+    /**
+     * Check if a key is in the cache, without updating the recency of use.
+     * Will return false if the item is stale, even though it is technically
+     * in the cache.
+     *
+     * Check if a key is in the cache, without updating the recency of
+     * use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set
+     * to `true` in either the options or the constructor.
+     *
+     * Will return `false` if the item is stale, even though it is technically in
+     * the cache. The difference can be determined (if it matters) by using a
+     * `status` argument, and inspecting the `has` field.
+     *
+     * Will not update item age unless
+     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
+     */
+    has(k, hasOptions = {}) {
+        const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
+        const index = this.#keyMap.get(k);
+        if (index !== undefined) {
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v) &&
+                v.__staleWhileFetching === undefined) {
+                return false;
+            }
+            if (!this.#isStale(index)) {
+                if (updateAgeOnHas) {
+                    this.#updateItemAge(index);
+                }
+                if (status) {
+                    status.has = 'hit';
+                    this.#statusTTL(status, index);
+                }
+                return true;
+            }
+            else if (status) {
+                status.has = 'stale';
+                this.#statusTTL(status, index);
+            }
+        }
+        else if (status) {
+            status.has = 'miss';
+        }
+        return false;
+    }
+    /**
+     * Like {@link LRUCache#get} but doesn't update recency or delete stale
+     * items.
+     *
+     * Returns `undefined` if the item is stale, unless
+     * {@link LRUCache.OptionsBase.allowStale} is set.
+     */
+    peek(k, peekOptions = {}) {
+        const { allowStale = this.allowStale } = peekOptions;
+        const index = this.#keyMap.get(k);
+        if (index === undefined ||
+            (!allowStale && this.#isStale(index))) {
+            return;
+        }
+        const v = this.#valList[index];
+        // either stale and allowed, or forcing a refresh of non-stale value
+        return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+    }
+    #backgroundFetch(k, index, options, context) {
+        const v = index === undefined ? undefined : this.#valList[index];
+        if (this.#isBackgroundFetch(v)) {
+            return v;
+        }
+        const ac = new AC();
+        const { signal } = options;
+        // when/if our AC signals, then stop listening to theirs.
+        signal?.addEventListener('abort', () => ac.abort(signal.reason), {
+            signal: ac.signal,
+        });
+        const fetchOpts = {
+            signal: ac.signal,
+            options,
+            context,
+        };
+        const cb = (v, updateCache = false) => {
+            const { aborted } = ac.signal;
+            const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
+            if (options.status) {
+                if (aborted && !updateCache) {
+                    options.status.fetchAborted = true;
+                    options.status.fetchError = ac.signal.reason;
+                    if (ignoreAbort)
+                        options.status.fetchAbortIgnored = true;
+                }
+                else {
+                    options.status.fetchResolved = true;
+                }
+            }
+            if (aborted && !ignoreAbort && !updateCache) {
+                return fetchFail(ac.signal.reason);
+            }
+            // either we didn't abort, and are still here, or we did, and ignored
+            const bf = p;
+            if (this.#valList[index] === p) {
+                if (v === undefined) {
+                    if (bf.__staleWhileFetching !== undefined) {
+                        this.#valList[index] = bf.__staleWhileFetching;
+                    }
+                    else {
+                        this.#delete(k, 'fetch');
+                    }
+                }
+                else {
+                    if (options.status)
+                        options.status.fetchUpdated = true;
+                    this.set(k, v, fetchOpts.options);
+                }
+            }
+            return v;
+        };
+        const eb = (er) => {
+            if (options.status) {
+                options.status.fetchRejected = true;
+                options.status.fetchError = er;
+            }
+            return fetchFail(er);
+        };
+        const fetchFail = (er) => {
+            const { aborted } = ac.signal;
+            const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
+            const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
+            const noDelete = allowStale || options.noDeleteOnFetchRejection;
+            const bf = p;
+            if (this.#valList[index] === p) {
+                // if we allow stale on fetch rejections, then we need to ensure that
+                // the stale value is not removed from the cache when the fetch fails.
+                const del = !noDelete || bf.__staleWhileFetching === undefined;
+                if (del) {
+                    this.#delete(k, 'fetch');
+                }
+                else if (!allowStaleAborted) {
+                    // still replace the *promise* with the stale value,
+                    // since we are done with the promise at this point.
+                    // leave it untouched if we're still waiting for an
+                    // aborted background fetch that hasn't yet returned.
+                    this.#valList[index] = bf.__staleWhileFetching;
+                }
+            }
+            if (allowStale) {
+                if (options.status && bf.__staleWhileFetching !== undefined) {
+                    options.status.returnedStale = true;
+                }
+                return bf.__staleWhileFetching;
+            }
+            else if (bf.__returned === bf) {
+                throw er;
+            }
+        };
+        const pcall = (res, rej) => {
+            const fmp = this.#fetchMethod?.(k, v, fetchOpts);
+            if (fmp && fmp instanceof Promise) {
+                fmp.then(v => res(v === undefined ? undefined : v), rej);
+            }
+            // ignored, we go until we finish, regardless.
+            // defer check until we are actually aborting,
+            // so fetchMethod can override.
+            ac.signal.addEventListener('abort', () => {
+                if (!options.ignoreFetchAbort ||
+                    options.allowStaleOnFetchAbort) {
+                    res(undefined);
+                    // when it eventually resolves, update the cache.
+                    if (options.allowStaleOnFetchAbort) {
+                        res = v => cb(v, true);
+                    }
+                }
+            });
+        };
+        if (options.status)
+            options.status.fetchDispatched = true;
+        const p = new Promise(pcall).then(cb, eb);
+        const bf = Object.assign(p, {
+            __abortController: ac,
+            __staleWhileFetching: v,
+            __returned: undefined,
+        });
+        if (index === undefined) {
+            // internal, don't expose status.
+            this.set(k, bf, { ...fetchOpts.options, status: undefined });
+            index = this.#keyMap.get(k);
+        }
+        else {
+            this.#valList[index] = bf;
+        }
+        return bf;
+    }
+    #isBackgroundFetch(p) {
+        if (!this.#hasFetchMethod)
+            return false;
+        const b = p;
+        return (!!b &&
+            b instanceof Promise &&
+            b.hasOwnProperty('__staleWhileFetching') &&
+            b.__abortController instanceof AC);
+    }
+    async fetch(k, fetchOptions = {}) {
+        const { 
+        // get options
+        allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, 
+        // set options
+        ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, 
+        // fetch exclusive options
+        noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
+        if (!this.#hasFetchMethod) {
+            if (status)
+                status.fetch = 'get';
+            return this.get(k, {
+                allowStale,
+                updateAgeOnGet,
+                noDeleteOnStaleGet,
+                status,
+            });
+        }
+        const options = {
+            allowStale,
+            updateAgeOnGet,
+            noDeleteOnStaleGet,
+            ttl,
+            noDisposeOnSet,
+            size,
+            sizeCalculation,
+            noUpdateTTL,
+            noDeleteOnFetchRejection,
+            allowStaleOnFetchRejection,
+            allowStaleOnFetchAbort,
+            ignoreFetchAbort,
+            status,
+            signal,
+        };
+        let index = this.#keyMap.get(k);
+        if (index === undefined) {
+            if (status)
+                status.fetch = 'miss';
+            const p = this.#backgroundFetch(k, index, options, context);
+            return (p.__returned = p);
+        }
+        else {
+            // in cache, maybe already fetching
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v)) {
+                const stale = allowStale && v.__staleWhileFetching !== undefined;
+                if (status) {
+                    status.fetch = 'inflight';
+                    if (stale)
+                        status.returnedStale = true;
+                }
+                return stale ? v.__staleWhileFetching : (v.__returned = v);
+            }
+            // if we force a refresh, that means do NOT serve the cached value,
+            // unless we are already in the process of refreshing the cache.
+            const isStale = this.#isStale(index);
+            if (!forceRefresh && !isStale) {
+                if (status)
+                    status.fetch = 'hit';
+                this.#moveToTail(index);
+                if (updateAgeOnGet) {
+                    this.#updateItemAge(index);
+                }
+                if (status)
+                    this.#statusTTL(status, index);
+                return v;
+            }
+            // ok, it is stale or a forced refresh, and not already fetching.
+            // refresh the cache.
+            const p = this.#backgroundFetch(k, index, options, context);
+            const hasStale = p.__staleWhileFetching !== undefined;
+            const staleVal = hasStale && allowStale;
+            if (status) {
+                status.fetch = isStale ? 'stale' : 'refresh';
+                if (staleVal && isStale)
+                    status.returnedStale = true;
+            }
+            return staleVal ? p.__staleWhileFetching : (p.__returned = p);
+        }
+    }
+    async forceFetch(k, fetchOptions = {}) {
+        const v = await this.fetch(k, fetchOptions);
+        if (v === undefined)
+            throw new Error('fetch() returned undefined');
+        return v;
+    }
+    memo(k, memoOptions = {}) {
+        const memoMethod = this.#memoMethod;
+        if (!memoMethod) {
+            throw new Error('no memoMethod provided to constructor');
+        }
+        const { context, forceRefresh, ...options } = memoOptions;
+        const v = this.get(k, options);
+        if (!forceRefresh && v !== undefined)
+            return v;
+        const vv = memoMethod(k, v, {
+            options,
+            context,
+        });
+        this.set(k, vv, options);
+        return vv;
+    }
+    /**
+     * Return a value from the cache. Will update the recency of the cache
+     * entry found.
+     *
+     * If the key is not found, get() will return `undefined`.
+     */
+    get(k, getOptions = {}) {
+        const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
+        const index = this.#keyMap.get(k);
+        if (index !== undefined) {
+            const value = this.#valList[index];
+            const fetching = this.#isBackgroundFetch(value);
+            if (status)
+                this.#statusTTL(status, index);
+            if (this.#isStale(index)) {
+                if (status)
+                    status.get = 'stale';
+                // delete only if not an in-flight background fetch
+                if (!fetching) {
+                    if (!noDeleteOnStaleGet) {
+                        this.#delete(k, 'expire');
+                    }
+                    if (status && allowStale)
+                        status.returnedStale = true;
+                    return allowStale ? value : undefined;
+                }
+                else {
+                    if (status &&
+                        allowStale &&
+                        value.__staleWhileFetching !== undefined) {
+                        status.returnedStale = true;
+                    }
+                    return allowStale ? value.__staleWhileFetching : undefined;
+                }
+            }
+            else {
+                if (status)
+                    status.get = 'hit';
+                // if we're currently fetching it, we don't actually have it yet
+                // it's not stale, which means this isn't a staleWhileRefetching.
+                // If it's not stale, and fetching, AND has a __staleWhileFetching
+                // value, then that means the user fetched with {forceRefresh:true},
+                // so it's safe to return that value.
+                if (fetching) {
+                    return value.__staleWhileFetching;
+                }
+                this.#moveToTail(index);
+                if (updateAgeOnGet) {
+                    this.#updateItemAge(index);
+                }
+                return value;
+            }
+        }
+        else if (status) {
+            status.get = 'miss';
+        }
+    }
+    #connect(p, n) {
+        this.#prev[n] = p;
+        this.#next[p] = n;
+    }
+    #moveToTail(index) {
+        // if tail already, nothing to do
+        // if head, move head to next[index]
+        // else
+        //   move next[prev[index]] to next[index] (head has no prev)
+        //   move prev[next[index]] to prev[index]
+        // prev[index] = tail
+        // next[tail] = index
+        // tail = index
+        if (index !== this.#tail) {
+            if (index === this.#head) {
+                this.#head = this.#next[index];
+            }
+            else {
+                this.#connect(this.#prev[index], this.#next[index]);
+            }
+            this.#connect(this.#tail, index);
+            this.#tail = index;
+        }
+    }
+    /**
+     * Deletes a key out of the cache.
+     *
+     * Returns true if the key was deleted, false otherwise.
+     */
+    delete(k) {
+        return this.#delete(k, 'delete');
+    }
+    #delete(k, reason) {
+        let deleted = false;
+        if (this.#size !== 0) {
+            const index = this.#keyMap.get(k);
+            if (index !== undefined) {
+                deleted = true;
+                if (this.#size === 1) {
+                    this.#clear(reason);
+                }
+                else {
+                    this.#removeItemSize(index);
+                    const v = this.#valList[index];
+                    if (this.#isBackgroundFetch(v)) {
+                        v.__abortController.abort(new Error('deleted'));
+                    }
+                    else if (this.#hasDispose || this.#hasDisposeAfter) {
+                        if (this.#hasDispose) {
+                            this.#dispose?.(v, k, reason);
+                        }
+                        if (this.#hasDisposeAfter) {
+                            this.#disposed?.push([v, k, reason]);
+                        }
+                    }
+                    this.#keyMap.delete(k);
+                    this.#keyList[index] = undefined;
+                    this.#valList[index] = undefined;
+                    if (index === this.#tail) {
+                        this.#tail = this.#prev[index];
+                    }
+                    else if (index === this.#head) {
+                        this.#head = this.#next[index];
+                    }
+                    else {
+                        const pi = this.#prev[index];
+                        this.#next[pi] = this.#next[index];
+                        const ni = this.#next[index];
+                        this.#prev[ni] = this.#prev[index];
+                    }
+                    this.#size--;
+                    this.#free.push(index);
+                }
+            }
+        }
+        if (this.#hasDisposeAfter && this.#disposed?.length) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+        return deleted;
+    }
+    /**
+     * Clear the cache entirely, throwing away all values.
+     */
+    clear() {
+        return this.#clear('delete');
+    }
+    #clear(reason) {
+        for (const index of this.#rindexes({ allowStale: true })) {
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v)) {
+                v.__abortController.abort(new Error('deleted'));
+            }
+            else {
+                const k = this.#keyList[index];
+                if (this.#hasDispose) {
+                    this.#dispose?.(v, k, reason);
+                }
+                if (this.#hasDisposeAfter) {
+                    this.#disposed?.push([v, k, reason]);
+                }
+            }
+        }
+        this.#keyMap.clear();
+        this.#valList.fill(undefined);
+        this.#keyList.fill(undefined);
+        if (this.#ttls && this.#starts) {
+            this.#ttls.fill(0);
+            this.#starts.fill(0);
+        }
+        if (this.#sizes) {
+            this.#sizes.fill(0);
+        }
+        this.#head = 0;
+        this.#tail = 0;
+        this.#free.length = 0;
+        this.#calculatedSize = 0;
+        this.#size = 0;
+        if (this.#hasDisposeAfter && this.#disposed) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+    }
+}
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/lru-cache/dist/esm/index.min.js b/node_modules/pacote/node_modules/lru-cache/dist/esm/index.min.js
new file mode 100644
index 0000000000000..07dd8fc3c59d8
--- /dev/null
+++ b/node_modules/pacote/node_modules/lru-cache/dist/esm/index.min.js
@@ -0,0 +1,2 @@
+var M=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,x=new Set,R=typeof process=="object"&&process?process:{},I=(a,t,e,i)=>{typeof R.emitWarning=="function"?R.emitWarning(a,t,e,i):console.error(`[${e}] ${t}: ${a}`)},C=globalThis.AbortController,D=globalThis.AbortSignal;if(typeof C>"u"){D=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},C=class{constructor(){t()}signal=new D;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let a=R.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{a&&(a=!1,I("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var G=a=>!x.has(a),H=Symbol("type"),y=a=>a&&a===Math.floor(a)&&a>0&&isFinite(a),U=a=>y(a)?a<=Math.pow(2,8)?Uint8Array:a<=Math.pow(2,16)?Uint16Array:a<=Math.pow(2,32)?Uint32Array:a<=Number.MAX_SAFE_INTEGER?O:null:null,O=class extends Array{constructor(t){super(t),this.fill(0)}},W=class a{heap;length;static#l=!1;static create(t){let e=U(t);if(!e)return[];a.#l=!0;let i=new a(t,e);return a.#l=!1,i}constructor(t,e){if(!a.#l)throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},L=class a{#l;#c;#p;#v;#w;#D;#L;#S;get perf(){return this.#S}ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#_;#s;#i;#t;#a;#u;#o;#h;#m;#r;#b;#y;#d;#A;#z;#f;#x;static unsafeExposeInternals(t){return{starts:t.#y,ttls:t.#d,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#a,prev:t.#u,get head(){return t.#o},get tail(){return t.#h},free:t.#m,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#M(e,i,s,n),moveToTail:e=>t.#W(e),indexes:e=>t.#F(e),rindexes:e=>t.#T(e),isStale:e=>t.#g(e)}}get max(){return this.#l}get maxSize(){return this.#c}get calculatedSize(){return this.#_}get size(){return this.#n}get fetchMethod(){return this.#D}get memoMethod(){return this.#L}get dispose(){return this.#p}get onInsert(){return this.#v}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,onInsert:_,disposeAfter:f,noDisposeOnSet:c,noUpdateTTL:u,maxSize:A=0,maxEntrySize:d=0,sizeCalculation:m,fetchMethod:l,memoMethod:w,noDeleteOnFetchRejection:b,noDeleteOnStaleGet:p,allowStaleOnFetchRejection:S,allowStaleOnFetchAbort:z,ignoreFetchAbort:F,perf:v}=t;if(v!==void 0&&typeof v?.now!="function")throw new TypeError("perf option must have a now() method if specified");if(this.#S=v??M,e!==0&&!y(e))throw new TypeError("max option must be a nonnegative integer");let T=e?U(e):Array;if(!T)throw new Error("invalid max value: "+e);if(this.#l=e,this.#c=A,this.maxEntrySize=d||this.#c,this.sizeCalculation=m,this.sizeCalculation){if(!this.#c&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#L=w,l!==void 0&&typeof l!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#D=l,this.#z=!!l,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#a=new T(e),this.#u=new T(e),this.#o=0,this.#h=0,this.#m=W.create(e),this.#n=0,this.#_=0,typeof g=="function"&&(this.#p=g),typeof _=="function"&&(this.#v=_),typeof f=="function"?(this.#w=f,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#A=!!this.#p,this.#x=!!this.#v,this.#f=!!this.#w,this.noDisposeOnSet=!!c,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!b,this.allowStaleOnFetchRejection=!!S,this.allowStaleOnFetchAbort=!!z,this.ignoreFetchAbort=!!F,this.maxEntrySize!==0){if(this.#c!==0&&!y(this.#c))throw new TypeError("maxSize must be a positive integer if specified");if(!y(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#V()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!p,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=y(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!y(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#G()}if(this.#l===0&&this.ttl===0&&this.#c===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#l&&!this.#c){let E="LRU_CACHE_UNBOUNDED";G(E)&&(x.add(E),I("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",E,a))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#G(){let t=new O(this.#l),e=new O(this.#l);this.#d=t,this.#y=e,this.#j=(n,h,o=this.#S.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#g(n)&&this.#E(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#C=n=>{e[n]=t[n]!==0?this.#S.now():0},this.#O=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=this.#S.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#g=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#C=()=>{};#O=()=>{};#j=()=>{};#g=()=>!1;#V(){let t=new O(this.#l);this.#_=0,this.#b=t,this.#R=e=>{this.#_-=t[e],t[e]=0},this.#N=(e,i,s,n)=>{if(this.#e(i))return 0;if(!y(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!y(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#I=(e,i,s)=>{if(t[e]=i,this.#c){let n=this.#c-t[e];for(;this.#_>n;)this.#U(!0)}this.#_+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#_)}}#R=t=>{};#I=(t,e,i)=>{};#N=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#o));)e=this.#u[e]}*#T({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#h));)e=this.#a[e]}#P(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#T())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#T()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#T())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#T()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#T({allowStale:!0}))this.#g(e)&&(this.#E(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#d&&this.#y){let h=this.#d[e],o=this.#y[e];if(h&&o){let r=h-(this.#S.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#F({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#d&&this.#y){h.ttl=this.#d[e];let o=this.#S.now()-this.#y[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=this.#S.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,_=this.#N(t,e,i.size||0,o);if(this.maxEntrySize&&_>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#E(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#m.length!==0?this.#m.pop():this.#n===this.#l?this.#U(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#a[this.#h]=f,this.#u[f]=this.#h,this.#h=f,this.#n++,this.#I(f,_,r),r&&(r.set="add"),g=!1,this.#x&&this.#v?.(e,t,"add");else{this.#W(f);let c=this.#t[f];if(e!==c){if(this.#z&&this.#e(c)){c.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:u}=c;u!==void 0&&!h&&(this.#A&&this.#p?.(u,t,"set"),this.#f&&this.#r?.push([u,t,"set"]))}else h||(this.#A&&this.#p?.(c,t,"set"),this.#f&&this.#r?.push([c,t,"set"]));if(this.#R(f),this.#I(f,_,r),this.#t[f]=e,r){r.set="replace";let u=c&&this.#e(c)?c.__staleWhileFetching:c;u!==void 0&&(r.oldValue=u)}}else r&&(r.set="update");this.#x&&this.onInsert?.(e,t,e===c?"update":"replace")}if(s!==0&&!this.#d&&this.#G(),this.#d&&(g||this.#j(f,s,n),r&&this.#O(r,f)),!h&&this.#f&&this.#r){let c=this.#r,u;for(;u=c?.shift();)this.#w?.(...u)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#U(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#f&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#U(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#z&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(s,i,"evict"),this.#f&&this.#r?.push([s,i,"evict"])),this.#R(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#m.push(e)),this.#n===1?(this.#o=this.#h=0,this.#m.length=0):this.#o=this.#a[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#g(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#C(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#g(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#M(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new C,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,m=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!m?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!m)return f(h.signal.reason);let b=u;return this.#t[e]===u&&(d===void 0?b.__staleWhileFetching!==void 0?this.#t[e]=b.__staleWhileFetching:this.#E(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},_=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:m}=h.signal,l=m&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=u;if(this.#t[e]===u&&(!b||p.__staleWhileFetching===void 0?this.#E(t,"fetch"):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},c=(d,m)=>{let l=this.#D?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),m),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let u=new Promise(c).then(g,_),A=Object.assign(u,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,A,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=A,A}#e(t){if(!this.#z)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof C}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:_=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:c=this.allowStaleOnFetchRejection,ignoreFetchAbort:u=this.ignoreFetchAbort,allowStaleOnFetchAbort:A=this.allowStaleOnFetchAbort,context:d,forceRefresh:m=!1,status:l,signal:w}=e;if(!this.#z)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:_,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:c,allowStaleOnFetchAbort:A,ignoreFetchAbort:u,status:l,signal:w},p=this.#s.get(t);if(p===void 0){l&&(l.fetch="miss");let S=this.#M(t,p,b,d);return S.__returned=S}else{let S=this.#t[p];if(this.#e(S)){let E=i&&S.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",E&&(l.returnedStale=!0)),E?S.__staleWhileFetching:S.__returned=S}let z=this.#g(p);if(!m&&!z)return l&&(l.fetch="hit"),this.#W(p),s&&this.#C(p),l&&this.#O(l,p),S;let F=this.#M(t,p,b,d),T=F.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=z?"stale":"refresh",T&&z&&(l.returnedStale=!0)),T?F.__staleWhileFetching:F.__returned=F}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#L;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#O(h,o),this.#g(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#E(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#W(o),s&&this.#C(o),r))}else h&&(h.get="miss")}#H(t,e){this.#u[e]=t,this.#a[t]=e}#W(t){t!==this.#h&&(t===this.#o?this.#o=this.#a[t]:this.#H(this.#u[t],this.#a[t]),this.#H(this.#h,t),this.#h=t)}delete(t){return this.#E(t,"delete")}#E(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#k(e);else{this.#R(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(n,t,e),this.#f&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#u[s];else if(s===this.#o)this.#o=this.#a[s];else{let h=this.#u[s];this.#a[h]=this.#a[s];let o=this.#a[s];this.#u[o]=this.#u[s]}this.#n--,this.#m.push(s)}}if(this.#f&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#k("delete")}#k(t){for(let e of this.#T({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#A&&this.#p?.(i,s,t),this.#f&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#d&&this.#y&&(this.#d.fill(0),this.#y.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#m.length=0,this.#_=0,this.#n=0,this.#f&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};export{L as LRUCache};
+//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/pacote/node_modules/lru-cache/dist/esm/package.json b/node_modules/pacote/node_modules/lru-cache/dist/esm/package.json
new file mode 100644
index 0000000000000..3dbc1ca591c05
--- /dev/null
+++ b/node_modules/pacote/node_modules/lru-cache/dist/esm/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/node_modules/pacote/node_modules/lru-cache/package.json b/node_modules/pacote/node_modules/lru-cache/package.json
new file mode 100644
index 0000000000000..4953bdf4a7a35
--- /dev/null
+++ b/node_modules/pacote/node_modules/lru-cache/package.json
@@ -0,0 +1,113 @@
+{
+  "name": "lru-cache",
+  "description": "A cache object that deletes the least-recently-used items.",
+  "version": "11.2.1",
+  "author": "Isaac Z. Schlueter ",
+  "keywords": [
+    "mru",
+    "lru",
+    "cache"
+  ],
+  "sideEffects": false,
+  "scripts": {
+    "build": "npm run prepare",
+    "prepare": "tshy && bash fixup.sh",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "test": "tap",
+    "snap": "tap",
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "format": "prettier --write .",
+    "typedoc": "typedoc --tsconfig ./.tshy/esm.json ./src/*.ts",
+    "benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh",
+    "prebenchmark": "npm run prepare",
+    "benchmark": "make -C benchmark",
+    "preprofile": "npm run prepare",
+    "profile": "make -C benchmark profile"
+  },
+  "main": "./dist/commonjs/index.js",
+  "types": "./dist/commonjs/index.d.ts",
+  "tshy": {
+    "exports": {
+      ".": "./src/index.ts",
+      "./min": {
+        "import": {
+          "types": "./dist/esm/index.d.ts",
+          "default": "./dist/esm/index.min.js"
+        },
+        "require": {
+          "types": "./dist/commonjs/index.d.ts",
+          "default": "./dist/commonjs/index.min.js"
+        }
+      }
+    }
+  },
+  "repository": {
+    "type": "git",
+    "url": "git://github.com/isaacs/node-lru-cache.git"
+  },
+  "devDependencies": {
+    "@types/node": "^24.3.0",
+    "benchmark": "^2.1.4",
+    "esbuild": "^0.25.9",
+    "marked": "^4.2.12",
+    "mkdirp": "^3.0.1",
+    "prettier": "^3.6.2",
+    "tap": "^21.1.0",
+    "tshy": "^3.0.2",
+    "typedoc": "^0.28.12"
+  },
+  "license": "ISC",
+  "files": [
+    "dist"
+  ],
+  "engines": {
+    "node": "20 || >=22"
+  },
+  "prettier": {
+    "experimentalTernaries": true,
+    "semi": false,
+    "printWidth": 70,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "tap": {
+    "node-arg": [
+      "--expose-gc"
+    ],
+    "plugin": [
+      "@tapjs/clock"
+    ]
+  },
+  "exports": {
+    ".": {
+      "import": {
+        "types": "./dist/esm/index.d.ts",
+        "default": "./dist/esm/index.js"
+      },
+      "require": {
+        "types": "./dist/commonjs/index.d.ts",
+        "default": "./dist/commonjs/index.js"
+      }
+    },
+    "./min": {
+      "import": {
+        "types": "./dist/esm/index.d.ts",
+        "default": "./dist/esm/index.min.js"
+      },
+      "require": {
+        "types": "./dist/commonjs/index.d.ts",
+        "default": "./dist/commonjs/index.min.js"
+      }
+    }
+  },
+  "type": "module",
+  "module": "./dist/esm/index.js"
+}
diff --git a/node_modules/pacote/node_modules/make-fetch-happen/LICENSE b/node_modules/pacote/node_modules/make-fetch-happen/LICENSE
new file mode 100644
index 0000000000000..1808eb2844231
--- /dev/null
+++ b/node_modules/pacote/node_modules/make-fetch-happen/LICENSE
@@ -0,0 +1,16 @@
+ISC License
+
+Copyright 2017-2022 (c) npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this software for
+any purpose with or without fee is hereby granted, provided that the
+above copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
+ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
+COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
+CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
+USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/make-fetch-happen/lib/cache/entry.js b/node_modules/pacote/node_modules/make-fetch-happen/lib/cache/entry.js
new file mode 100644
index 0000000000000..bfcfacbcc95e1
--- /dev/null
+++ b/node_modules/pacote/node_modules/make-fetch-happen/lib/cache/entry.js
@@ -0,0 +1,471 @@
+const { Request, Response } = require('minipass-fetch')
+const { Minipass } = require('minipass')
+const MinipassFlush = require('minipass-flush')
+const cacache = require('cacache')
+const url = require('url')
+
+const CachingMinipassPipeline = require('../pipeline.js')
+const CachePolicy = require('./policy.js')
+const cacheKey = require('./key.js')
+const remote = require('../remote.js')
+
+const hasOwnProperty = (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop)
+
+// allow list for request headers that will be written to the cache index
+// note: we will also store any request headers
+// that are named in a response's vary header
+const KEEP_REQUEST_HEADERS = [
+  'accept-charset',
+  'accept-encoding',
+  'accept-language',
+  'accept',
+  'cache-control',
+]
+
+// allow list for response headers that will be written to the cache index
+// note: we must not store the real response's age header, or when we load
+// a cache policy based on the metadata it will think the cached response
+// is always stale
+const KEEP_RESPONSE_HEADERS = [
+  'cache-control',
+  'content-encoding',
+  'content-language',
+  'content-type',
+  'date',
+  'etag',
+  'expires',
+  'last-modified',
+  'link',
+  'location',
+  'pragma',
+  'vary',
+]
+
+// return an object containing all metadata to be written to the index
+const getMetadata = (request, response, options) => {
+  const metadata = {
+    time: Date.now(),
+    url: request.url,
+    reqHeaders: {},
+    resHeaders: {},
+
+    // options on which we must match the request and vary the response
+    options: {
+      compress: options.compress != null ? options.compress : request.compress,
+    },
+  }
+
+  // only save the status if it's not a 200 or 304
+  if (response.status !== 200 && response.status !== 304) {
+    metadata.status = response.status
+  }
+
+  for (const name of KEEP_REQUEST_HEADERS) {
+    if (request.headers.has(name)) {
+      metadata.reqHeaders[name] = request.headers.get(name)
+    }
+  }
+
+  // if the request's host header differs from the host in the url
+  // we need to keep it, otherwise it's just noise and we ignore it
+  const host = request.headers.get('host')
+  const parsedUrl = new url.URL(request.url)
+  if (host && parsedUrl.host !== host) {
+    metadata.reqHeaders.host = host
+  }
+
+  // if the response has a vary header, make sure
+  // we store the relevant request headers too
+  if (response.headers.has('vary')) {
+    const vary = response.headers.get('vary')
+    // a vary of "*" means every header causes a different response.
+    // in that scenario, we do not include any additional headers
+    // as the freshness check will always fail anyway and we don't
+    // want to bloat the cache indexes
+    if (vary !== '*') {
+      // copy any other request headers that will vary the response
+      const varyHeaders = vary.trim().toLowerCase().split(/\s*,\s*/)
+      for (const name of varyHeaders) {
+        if (request.headers.has(name)) {
+          metadata.reqHeaders[name] = request.headers.get(name)
+        }
+      }
+    }
+  }
+
+  for (const name of KEEP_RESPONSE_HEADERS) {
+    if (response.headers.has(name)) {
+      metadata.resHeaders[name] = response.headers.get(name)
+    }
+  }
+
+  for (const name of options.cacheAdditionalHeaders) {
+    if (response.headers.has(name)) {
+      metadata.resHeaders[name] = response.headers.get(name)
+    }
+  }
+
+  return metadata
+}
+
+// symbols used to hide objects that may be lazily evaluated in a getter
+const _request = Symbol('request')
+const _response = Symbol('response')
+const _policy = Symbol('policy')
+
+class CacheEntry {
+  constructor ({ entry, request, response, options }) {
+    if (entry) {
+      this.key = entry.key
+      this.entry = entry
+      // previous versions of this module didn't write an explicit timestamp in
+      // the metadata, so fall back to the entry's timestamp. we can't use the
+      // entry timestamp to determine staleness because cacache will update it
+      // when it verifies its data
+      this.entry.metadata.time = this.entry.metadata.time || this.entry.time
+    } else {
+      this.key = cacheKey(request)
+    }
+
+    this.options = options
+
+    // these properties are behind getters that lazily evaluate
+    this[_request] = request
+    this[_response] = response
+    this[_policy] = null
+  }
+
+  // returns a CacheEntry instance that satisfies the given request
+  // or undefined if no existing entry satisfies
+  static async find (request, options) {
+    try {
+      // compacts the index and returns an array of unique entries
+      var matches = await cacache.index.compact(options.cachePath, cacheKey(request), (A, B) => {
+        const entryA = new CacheEntry({ entry: A, options })
+        const entryB = new CacheEntry({ entry: B, options })
+        return entryA.policy.satisfies(entryB.request)
+      }, {
+        validateEntry: (entry) => {
+          // clean out entries with a buggy content-encoding value
+          if (entry.metadata &&
+              entry.metadata.resHeaders &&
+              entry.metadata.resHeaders['content-encoding'] === null) {
+            return false
+          }
+
+          // if an integrity is null, it needs to have a status specified
+          if (entry.integrity === null) {
+            return !!(entry.metadata && entry.metadata.status)
+          }
+
+          return true
+        },
+      })
+    } catch (err) {
+      // if the compact request fails, ignore the error and return
+      return
+    }
+
+    // a cache mode of 'reload' means to behave as though we have no cache
+    // on the way to the network. return undefined to allow cacheFetch to
+    // create a brand new request no matter what.
+    if (options.cache === 'reload') {
+      return
+    }
+
+    // find the specific entry that satisfies the request
+    let match
+    for (const entry of matches) {
+      const _entry = new CacheEntry({
+        entry,
+        options,
+      })
+
+      if (_entry.policy.satisfies(request)) {
+        match = _entry
+        break
+      }
+    }
+
+    return match
+  }
+
+  // if the user made a PUT/POST/PATCH then we invalidate our
+  // cache for the same url by deleting the index entirely
+  static async invalidate (request, options) {
+    const key = cacheKey(request)
+    try {
+      await cacache.rm.entry(options.cachePath, key, { removeFully: true })
+    } catch (err) {
+      // ignore errors
+    }
+  }
+
+  get request () {
+    if (!this[_request]) {
+      this[_request] = new Request(this.entry.metadata.url, {
+        method: 'GET',
+        headers: this.entry.metadata.reqHeaders,
+        ...this.entry.metadata.options,
+      })
+    }
+
+    return this[_request]
+  }
+
+  get response () {
+    if (!this[_response]) {
+      this[_response] = new Response(null, {
+        url: this.entry.metadata.url,
+        counter: this.options.counter,
+        status: this.entry.metadata.status || 200,
+        headers: {
+          ...this.entry.metadata.resHeaders,
+          'content-length': this.entry.size,
+        },
+      })
+    }
+
+    return this[_response]
+  }
+
+  get policy () {
+    if (!this[_policy]) {
+      this[_policy] = new CachePolicy({
+        entry: this.entry,
+        request: this.request,
+        response: this.response,
+        options: this.options,
+      })
+    }
+
+    return this[_policy]
+  }
+
+  // wraps the response in a pipeline that stores the data
+  // in the cache while the user consumes it
+  async store (status) {
+    // if we got a status other than 200, 301, or 308,
+    // or the CachePolicy forbid storage, append the
+    // cache status header and return it untouched
+    if (
+      this.request.method !== 'GET' ||
+      ![200, 301, 308].includes(this.response.status) ||
+      !this.policy.storable()
+    ) {
+      this.response.headers.set('x-local-cache-status', 'skip')
+      return this.response
+    }
+
+    const size = this.response.headers.get('content-length')
+    const cacheOpts = {
+      algorithms: this.options.algorithms,
+      metadata: getMetadata(this.request, this.response, this.options),
+      size,
+      integrity: this.options.integrity,
+      integrityEmitter: this.response.body.hasIntegrityEmitter && this.response.body,
+    }
+
+    let body = null
+    // we only set a body if the status is a 200, redirects are
+    // stored as metadata only
+    if (this.response.status === 200) {
+      let cacheWriteResolve, cacheWriteReject
+      const cacheWritePromise = new Promise((resolve, reject) => {
+        cacheWriteResolve = resolve
+        cacheWriteReject = reject
+      }).catch((err) => {
+        body.emit('error', err)
+      })
+
+      body = new CachingMinipassPipeline({ events: ['integrity', 'size'] }, new MinipassFlush({
+        flush () {
+          return cacheWritePromise
+        },
+      }))
+      // this is always true since if we aren't reusing the one from the remote fetch, we
+      // are using the one from cacache
+      body.hasIntegrityEmitter = true
+
+      const onResume = () => {
+        const tee = new Minipass()
+        const cacheStream = cacache.put.stream(this.options.cachePath, this.key, cacheOpts)
+        // re-emit the integrity and size events on our new response body so they can be reused
+        cacheStream.on('integrity', i => body.emit('integrity', i))
+        cacheStream.on('size', s => body.emit('size', s))
+        // stick a flag on here so downstream users will know if they can expect integrity events
+        tee.pipe(cacheStream)
+        // TODO if the cache write fails, log a warning but return the response anyway
+        // eslint-disable-next-line promise/catch-or-return
+        cacheStream.promise().then(cacheWriteResolve, cacheWriteReject)
+        body.unshift(tee)
+        body.unshift(this.response.body)
+      }
+
+      body.once('resume', onResume)
+      body.once('end', () => body.removeListener('resume', onResume))
+    } else {
+      await cacache.index.insert(this.options.cachePath, this.key, null, cacheOpts)
+    }
+
+    // note: we do not set the x-local-cache-hash header because we do not know
+    // the hash value until after the write to the cache completes, which doesn't
+    // happen until after the response has been sent and it's too late to write
+    // the header anyway
+    this.response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath))
+    this.response.headers.set('x-local-cache-key', encodeURIComponent(this.key))
+    this.response.headers.set('x-local-cache-mode', 'stream')
+    this.response.headers.set('x-local-cache-status', status)
+    this.response.headers.set('x-local-cache-time', new Date().toISOString())
+    const newResponse = new Response(body, {
+      url: this.response.url,
+      status: this.response.status,
+      headers: this.response.headers,
+      counter: this.options.counter,
+    })
+    return newResponse
+  }
+
+  // use the cached data to create a response and return it
+  async respond (method, options, status) {
+    let response
+    if (method === 'HEAD' || [301, 308].includes(this.response.status)) {
+      // if the request is a HEAD, or the response is a redirect,
+      // then the metadata in the entry already includes everything
+      // we need to build a response
+      response = this.response
+    } else {
+      // we're responding with a full cached response, so create a body
+      // that reads from cacache and attach it to a new Response
+      const body = new Minipass()
+      const headers = { ...this.policy.responseHeaders() }
+
+      const onResume = () => {
+        const cacheStream = cacache.get.stream.byDigest(
+          this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }
+        )
+        cacheStream.on('error', async (err) => {
+          cacheStream.pause()
+          if (err.code === 'EINTEGRITY') {
+            await cacache.rm.content(
+              this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }
+            )
+          }
+          if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') {
+            await CacheEntry.invalidate(this.request, this.options)
+          }
+          body.emit('error', err)
+          cacheStream.resume()
+        })
+        // emit the integrity and size events based on our metadata so we're consistent
+        body.emit('integrity', this.entry.integrity)
+        body.emit('size', Number(headers['content-length']))
+        cacheStream.pipe(body)
+      }
+
+      body.once('resume', onResume)
+      body.once('end', () => body.removeListener('resume', onResume))
+      response = new Response(body, {
+        url: this.entry.metadata.url,
+        counter: options.counter,
+        status: 200,
+        headers,
+      })
+    }
+
+    response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath))
+    response.headers.set('x-local-cache-hash', encodeURIComponent(this.entry.integrity))
+    response.headers.set('x-local-cache-key', encodeURIComponent(this.key))
+    response.headers.set('x-local-cache-mode', 'stream')
+    response.headers.set('x-local-cache-status', status)
+    response.headers.set('x-local-cache-time', new Date(this.entry.metadata.time).toUTCString())
+    return response
+  }
+
+  // use the provided request along with this cache entry to
+  // revalidate the stored response. returns a response, either
+  // from the cache or from the update
+  async revalidate (request, options) {
+    const revalidateRequest = new Request(request, {
+      headers: this.policy.revalidationHeaders(request),
+    })
+
+    try {
+      // NOTE: be sure to remove the headers property from the
+      // user supplied options, since we have already defined
+      // them on the new request object. if they're still in the
+      // options then those will overwrite the ones from the policy
+      var response = await remote(revalidateRequest, {
+        ...options,
+        headers: undefined,
+      })
+    } catch (err) {
+      // if the network fetch fails, return the stale
+      // cached response unless it has a cache-control
+      // of 'must-revalidate'
+      if (!this.policy.mustRevalidate) {
+        return this.respond(request.method, options, 'stale')
+      }
+
+      throw err
+    }
+
+    if (this.policy.revalidated(revalidateRequest, response)) {
+      // we got a 304, write a new index to the cache and respond from cache
+      const metadata = getMetadata(request, response, options)
+      // 304 responses do not include headers that are specific to the response data
+      // since they do not include a body, so we copy values for headers that were
+      // in the old cache entry to the new one, if the new metadata does not already
+      // include that header
+      for (const name of KEEP_RESPONSE_HEADERS) {
+        if (
+          !hasOwnProperty(metadata.resHeaders, name) &&
+          hasOwnProperty(this.entry.metadata.resHeaders, name)
+        ) {
+          metadata.resHeaders[name] = this.entry.metadata.resHeaders[name]
+        }
+      }
+
+      for (const name of options.cacheAdditionalHeaders) {
+        const inMeta = hasOwnProperty(metadata.resHeaders, name)
+        const inEntry = hasOwnProperty(this.entry.metadata.resHeaders, name)
+        const inPolicy = hasOwnProperty(this.policy.response.headers, name)
+
+        // if the header is in the existing entry, but it is not in the metadata
+        // then we need to write it to the metadata as this will refresh the on-disk cache
+        if (!inMeta && inEntry) {
+          metadata.resHeaders[name] = this.entry.metadata.resHeaders[name]
+        }
+        // if the header is in the metadata, but not in the policy, then we need to set
+        // it in the policy so that it's included in the immediate response. future
+        // responses will load a new cache entry, so we don't need to change that
+        if (!inPolicy && inMeta) {
+          this.policy.response.headers[name] = metadata.resHeaders[name]
+        }
+      }
+
+      try {
+        await cacache.index.insert(options.cachePath, this.key, this.entry.integrity, {
+          size: this.entry.size,
+          metadata,
+        })
+      } catch (err) {
+        // if updating the cache index fails, we ignore it and
+        // respond anyway
+      }
+      return this.respond(request.method, options, 'revalidated')
+    }
+
+    // if we got a modified response, create a new entry based on it
+    const newEntry = new CacheEntry({
+      request,
+      response,
+      options,
+    })
+
+    // respond with the new entry while writing it to the cache
+    return newEntry.store('updated')
+  }
+}
+
+module.exports = CacheEntry
diff --git a/node_modules/pacote/node_modules/make-fetch-happen/lib/cache/errors.js b/node_modules/pacote/node_modules/make-fetch-happen/lib/cache/errors.js
new file mode 100644
index 0000000000000..67a66573bebe6
--- /dev/null
+++ b/node_modules/pacote/node_modules/make-fetch-happen/lib/cache/errors.js
@@ -0,0 +1,11 @@
+class NotCachedError extends Error {
+  constructor (url) {
+    /* eslint-disable-next-line max-len */
+    super(`request to ${url} failed: cache mode is 'only-if-cached' but no cached response is available.`)
+    this.code = 'ENOTCACHED'
+  }
+}
+
+module.exports = {
+  NotCachedError,
+}
diff --git a/node_modules/pacote/node_modules/make-fetch-happen/lib/cache/index.js b/node_modules/pacote/node_modules/make-fetch-happen/lib/cache/index.js
new file mode 100644
index 0000000000000..0de49d23fb933
--- /dev/null
+++ b/node_modules/pacote/node_modules/make-fetch-happen/lib/cache/index.js
@@ -0,0 +1,49 @@
+const { NotCachedError } = require('./errors.js')
+const CacheEntry = require('./entry.js')
+const remote = require('../remote.js')
+
+// do whatever is necessary to get a Response and return it
+const cacheFetch = async (request, options) => {
+  // try to find a cached entry that satisfies this request
+  const entry = await CacheEntry.find(request, options)
+  if (!entry) {
+    // no cached result, if the cache mode is 'only-if-cached' that's a failure
+    if (options.cache === 'only-if-cached') {
+      throw new NotCachedError(request.url)
+    }
+
+    // otherwise, we make a request, store it and return it
+    const response = await remote(request, options)
+    const newEntry = new CacheEntry({ request, response, options })
+    return newEntry.store('miss')
+  }
+
+  // we have a cached response that satisfies this request, however if the cache
+  // mode is 'no-cache' then we send the revalidation request no matter what
+  if (options.cache === 'no-cache') {
+    return entry.revalidate(request, options)
+  }
+
+  // if the cached entry is not stale, or if the cache mode is 'force-cache' or
+  // 'only-if-cached' we can respond with the cached entry. set the status
+  // based on the result of needsRevalidation and respond
+  const _needsRevalidation = entry.policy.needsRevalidation(request)
+  if (options.cache === 'force-cache' ||
+      options.cache === 'only-if-cached' ||
+      !_needsRevalidation) {
+    return entry.respond(request.method, options, _needsRevalidation ? 'stale' : 'hit')
+  }
+
+  // if we got here, the cache entry is stale so revalidate it
+  return entry.revalidate(request, options)
+}
+
+cacheFetch.invalidate = async (request, options) => {
+  if (!options.cachePath) {
+    return
+  }
+
+  return CacheEntry.invalidate(request, options)
+}
+
+module.exports = cacheFetch
diff --git a/node_modules/pacote/node_modules/make-fetch-happen/lib/cache/key.js b/node_modules/pacote/node_modules/make-fetch-happen/lib/cache/key.js
new file mode 100644
index 0000000000000..f7684d562b7fa
--- /dev/null
+++ b/node_modules/pacote/node_modules/make-fetch-happen/lib/cache/key.js
@@ -0,0 +1,17 @@
+const { URL, format } = require('url')
+
+// options passed to url.format() when generating a key
+const formatOptions = {
+  auth: false,
+  fragment: false,
+  search: true,
+  unicode: false,
+}
+
+// returns a string to be used as the cache key for the Request
+const cacheKey = (request) => {
+  const parsed = new URL(request.url)
+  return `make-fetch-happen:request-cache:${format(parsed, formatOptions)}`
+}
+
+module.exports = cacheKey
diff --git a/node_modules/pacote/node_modules/make-fetch-happen/lib/cache/policy.js b/node_modules/pacote/node_modules/make-fetch-happen/lib/cache/policy.js
new file mode 100644
index 0000000000000..ada3c8600dae9
--- /dev/null
+++ b/node_modules/pacote/node_modules/make-fetch-happen/lib/cache/policy.js
@@ -0,0 +1,161 @@
+const CacheSemantics = require('http-cache-semantics')
+const Negotiator = require('negotiator')
+const ssri = require('ssri')
+
+// options passed to http-cache-semantics constructor
+const policyOptions = {
+  shared: false,
+  ignoreCargoCult: true,
+}
+
+// a fake empty response, used when only testing the
+// request for storability
+const emptyResponse = { status: 200, headers: {} }
+
+// returns a plain object representation of the Request
+const requestObject = (request) => {
+  const _obj = {
+    method: request.method,
+    url: request.url,
+    headers: {},
+    compress: request.compress,
+  }
+
+  request.headers.forEach((value, key) => {
+    _obj.headers[key] = value
+  })
+
+  return _obj
+}
+
+// returns a plain object representation of the Response
+const responseObject = (response) => {
+  const _obj = {
+    status: response.status,
+    headers: {},
+  }
+
+  response.headers.forEach((value, key) => {
+    _obj.headers[key] = value
+  })
+
+  return _obj
+}
+
+class CachePolicy {
+  constructor ({ entry, request, response, options }) {
+    this.entry = entry
+    this.request = requestObject(request)
+    this.response = responseObject(response)
+    this.options = options
+    this.policy = new CacheSemantics(this.request, this.response, policyOptions)
+
+    if (this.entry) {
+      // if we have an entry, copy the timestamp to the _responseTime
+      // this is necessary because the CacheSemantics constructor forces
+      // the value to Date.now() which means a policy created from a
+      // cache entry is likely to always identify itself as stale
+      this.policy._responseTime = this.entry.metadata.time
+    }
+  }
+
+  // static method to quickly determine if a request alone is storable
+  static storable (request, options) {
+    // no cachePath means no caching
+    if (!options.cachePath) {
+      return false
+    }
+
+    // user explicitly asked not to cache
+    if (options.cache === 'no-store') {
+      return false
+    }
+
+    // we only cache GET and HEAD requests
+    if (!['GET', 'HEAD'].includes(request.method)) {
+      return false
+    }
+
+    // otherwise, let http-cache-semantics make the decision
+    // based on the request's headers
+    const policy = new CacheSemantics(requestObject(request), emptyResponse, policyOptions)
+    return policy.storable()
+  }
+
+  // returns true if the policy satisfies the request
+  satisfies (request) {
+    const _req = requestObject(request)
+    if (this.request.headers.host !== _req.headers.host) {
+      return false
+    }
+
+    if (this.request.compress !== _req.compress) {
+      return false
+    }
+
+    const negotiatorA = new Negotiator(this.request)
+    const negotiatorB = new Negotiator(_req)
+
+    if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes())) {
+      return false
+    }
+
+    if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages())) {
+      return false
+    }
+
+    if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings())) {
+      return false
+    }
+
+    if (this.options.integrity) {
+      return ssri.parse(this.options.integrity).match(this.entry.integrity)
+    }
+
+    return true
+  }
+
+  // returns true if the request and response allow caching
+  storable () {
+    return this.policy.storable()
+  }
+
+  // NOTE: this is a hack to avoid parsing the cache-control
+  // header ourselves, it returns true if the response's
+  // cache-control contains must-revalidate
+  get mustRevalidate () {
+    return !!this.policy._rescc['must-revalidate']
+  }
+
+  // returns true if the cached response requires revalidation
+  // for the given request
+  needsRevalidation (request) {
+    const _req = requestObject(request)
+    // force method to GET because we only cache GETs
+    // but can serve a HEAD from a cached GET
+    _req.method = 'GET'
+    return !this.policy.satisfiesWithoutRevalidation(_req)
+  }
+
+  responseHeaders () {
+    return this.policy.responseHeaders()
+  }
+
+  // returns a new object containing the appropriate headers
+  // to send a revalidation request
+  revalidationHeaders (request) {
+    const _req = requestObject(request)
+    return this.policy.revalidationHeaders(_req)
+  }
+
+  // returns true if the request/response was revalidated
+  // successfully. returns false if a new response was received
+  revalidated (request, response) {
+    const _req = requestObject(request)
+    const _res = responseObject(response)
+    const policy = this.policy.revalidatedPolicy(_req, _res)
+    return !policy.modified
+  }
+}
+
+module.exports = CachePolicy
diff --git a/node_modules/pacote/node_modules/make-fetch-happen/lib/fetch.js b/node_modules/pacote/node_modules/make-fetch-happen/lib/fetch.js
new file mode 100644
index 0000000000000..233ba67e16550
--- /dev/null
+++ b/node_modules/pacote/node_modules/make-fetch-happen/lib/fetch.js
@@ -0,0 +1,118 @@
+'use strict'
+
+const { FetchError, Request, isRedirect } = require('minipass-fetch')
+const url = require('url')
+
+const CachePolicy = require('./cache/policy.js')
+const cache = require('./cache/index.js')
+const remote = require('./remote.js')
+
+// given a Request, a Response and user options
+// return true if the response is a redirect that
+// can be followed. we throw errors that will result
+// in the fetch being rejected if the redirect is
+// possible but invalid for some reason
+const canFollowRedirect = (request, response, options) => {
+  if (!isRedirect(response.status)) {
+    return false
+  }
+
+  if (options.redirect === 'manual') {
+    return false
+  }
+
+  if (options.redirect === 'error') {
+    throw new FetchError(`redirect mode is set to error: ${request.url}`,
+      'no-redirect', { code: 'ENOREDIRECT' })
+  }
+
+  if (!response.headers.has('location')) {
+    throw new FetchError(`redirect location header missing for: ${request.url}`,
+      'no-location', { code: 'EINVALIDREDIRECT' })
+  }
+
+  if (request.counter >= request.follow) {
+    throw new FetchError(`maximum redirect reached at: ${request.url}`,
+      'max-redirect', { code: 'EMAXREDIRECT' })
+  }
+
+  return true
+}
+
+// given a Request, a Response, and the user's options return an object
+// with a new Request and a new options object that will be used for
+// following the redirect
+const getRedirect = (request, response, options) => {
+  const _opts = { ...options }
+  const location = response.headers.get('location')
+  const redirectUrl = new url.URL(location, /^https?:/.test(location) ? undefined : request.url)
+  // Comment below is used under the following license:
+  /**
+   * @license
+   * Copyright (c) 2010-2012 Mikeal Rogers
+   * Licensed under the Apache License, Version 2.0 (the "License");
+   * you may not use this file except in compliance with the License.
+   * You may obtain a copy of the License at
+   * http://www.apache.org/licenses/LICENSE-2.0
+   * Unless required by applicable law or agreed to in writing,
+   * software distributed under the License is distributed on an "AS
+   * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+   * express or implied. See the License for the specific language
+   * governing permissions and limitations under the License.
+   */
+
+  // Remove authorization if changing hostnames (but not if just
+  // changing ports or protocols).  This matches the behavior of request:
+  // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138
+  if (new url.URL(request.url).hostname !== redirectUrl.hostname) {
+    request.headers.delete('authorization')
+    request.headers.delete('cookie')
+  }
+
+  // for POST request with 301/302 response, or any request with 303 response,
+  // use GET when following redirect
+  if (
+    response.status === 303 ||
+    (request.method === 'POST' && [301, 302].includes(response.status))
+  ) {
+    _opts.method = 'GET'
+    _opts.body = null
+    request.headers.delete('content-length')
+  }
+
+  _opts.headers = {}
+  request.headers.forEach((value, key) => {
+    _opts.headers[key] = value
+  })
+
+  _opts.counter = ++request.counter
+  const redirectReq = new Request(url.format(redirectUrl), _opts)
+  return {
+    request: redirectReq,
+    options: _opts,
+  }
+}
+
+const fetch = async (request, options) => {
+  const response = CachePolicy.storable(request, options)
+    ? await cache(request, options)
+    : await remote(request, options)
+
+  // if the request wasn't a GET or HEAD, and the response
+  // status is between 200 and 399 inclusive, invalidate the
+  // request url
+  if (!['GET', 'HEAD'].includes(request.method) &&
+      response.status >= 200 &&
+      response.status <= 399) {
+    await cache.invalidate(request, options)
+  }
+
+  if (!canFollowRedirect(request, response, options)) {
+    return response
+  }
+
+  const redirect = getRedirect(request, response, options)
+  return fetch(redirect.request, redirect.options)
+}
+
+module.exports = fetch
diff --git a/node_modules/pacote/node_modules/make-fetch-happen/lib/index.js b/node_modules/pacote/node_modules/make-fetch-happen/lib/index.js
new file mode 100644
index 0000000000000..2f12e8e1b6113
--- /dev/null
+++ b/node_modules/pacote/node_modules/make-fetch-happen/lib/index.js
@@ -0,0 +1,41 @@
+const { FetchError, Headers, Request, Response } = require('minipass-fetch')
+
+const configureOptions = require('./options.js')
+const fetch = require('./fetch.js')
+
+const makeFetchHappen = (url, opts) => {
+  const options = configureOptions(opts)
+
+  const request = new Request(url, options)
+  return fetch(request, options)
+}
+
+makeFetchHappen.defaults = (defaultUrl, defaultOptions = {}, wrappedFetch = makeFetchHappen) => {
+  if (typeof defaultUrl === 'object') {
+    defaultOptions = defaultUrl
+    defaultUrl = null
+  }
+
+  const defaultedFetch = (url, options = {}) => {
+    const finalUrl = url || defaultUrl
+    const finalOptions = {
+      ...defaultOptions,
+      ...options,
+      headers: {
+        ...defaultOptions.headers,
+        ...options.headers,
+      },
+    }
+    return wrappedFetch(finalUrl, finalOptions)
+  }
+
+  defaultedFetch.defaults = (defaultUrl1, defaultOptions1 = {}) =>
+    makeFetchHappen.defaults(defaultUrl1, defaultOptions1, defaultedFetch)
+  return defaultedFetch
+}
+
+module.exports = makeFetchHappen
+module.exports.FetchError = FetchError
+module.exports.Headers = Headers
+module.exports.Request = Request
+module.exports.Response = Response
diff --git a/node_modules/pacote/node_modules/make-fetch-happen/lib/options.js b/node_modules/pacote/node_modules/make-fetch-happen/lib/options.js
new file mode 100644
index 0000000000000..db51cc6324817
--- /dev/null
+++ b/node_modules/pacote/node_modules/make-fetch-happen/lib/options.js
@@ -0,0 +1,59 @@
+const dns = require('dns')
+
+const conditionalHeaders = [
+  'if-modified-since',
+  'if-none-match',
+  'if-unmodified-since',
+  'if-match',
+  'if-range',
+]
+
+const configureOptions = (opts) => {
+  const { strictSSL, ...options } = { ...opts }
+  options.method = options.method ? options.method.toUpperCase() : 'GET'
+
+  if (strictSSL === undefined || strictSSL === null) {
+    options.rejectUnauthorized = process.env.NODE_TLS_REJECT_UNAUTHORIZED !== '0'
+  } else {
+    options.rejectUnauthorized = strictSSL !== false
+  }
+
+  if (!options.retry) {
+    options.retry = { retries: 0 }
+  } else if (typeof options.retry === 'string') {
+    const retries = parseInt(options.retry, 10)
+    if (isFinite(retries)) {
+      options.retry = { retries }
+    } else {
+      options.retry = { retries: 0 }
+    }
+  } else if (typeof options.retry === 'number') {
+    options.retry = { retries: options.retry }
+  } else {
+    options.retry = { retries: 0, ...options.retry }
+  }
+
+  options.dns = { ttl: 5 * 60 * 1000, lookup: dns.lookup, ...options.dns }
+
+  options.cache = options.cache || 'default'
+  if (options.cache === 'default') {
+    const hasConditionalHeader = Object.keys(options.headers || {}).some((name) => {
+      return conditionalHeaders.includes(name.toLowerCase())
+    })
+    if (hasConditionalHeader) {
+      options.cache = 'no-store'
+    }
+  }
+
+  options.cacheAdditionalHeaders = options.cacheAdditionalHeaders || []
+
+  // cacheManager is deprecated, but if it's set and
+  // cachePath is not we should copy it to the new field
+  if (options.cacheManager && !options.cachePath) {
+    options.cachePath = options.cacheManager
+  }
+
+  return options
+}
+
+module.exports = configureOptions
diff --git a/node_modules/pacote/node_modules/make-fetch-happen/lib/pipeline.js b/node_modules/pacote/node_modules/make-fetch-happen/lib/pipeline.js
new file mode 100644
index 0000000000000..b1d221b2d0ce3
--- /dev/null
+++ b/node_modules/pacote/node_modules/make-fetch-happen/lib/pipeline.js
@@ -0,0 +1,41 @@
+'use strict'
+
+const MinipassPipeline = require('minipass-pipeline')
+
+class CachingMinipassPipeline extends MinipassPipeline {
+  #events = []
+  #data = new Map()
+
+  constructor (opts, ...streams) {
+    // CRITICAL: do NOT pass the streams to the call to super(), this will start
+    // the flow of data and potentially cause the events we need to catch to emit
+    // before we've finished our own setup. instead we call super() with no args,
+    // finish our setup, and then push the streams into ourselves to start the
+    // data flow
+    super()
+    this.#events = opts.events
+
+    /* istanbul ignore next - coverage disabled because this is pointless to test here */
+    if (streams.length) {
+      this.push(...streams)
+    }
+  }
+
+  on (event, handler) {
+    if (this.#events.includes(event) && this.#data.has(event)) {
+      return handler(...this.#data.get(event))
+    }
+
+    return super.on(event, handler)
+  }
+
+  emit (event, ...data) {
+    if (this.#events.includes(event)) {
+      this.#data.set(event, data)
+    }
+
+    return super.emit(event, ...data)
+  }
+}
+
+module.exports = CachingMinipassPipeline
diff --git a/node_modules/pacote/node_modules/make-fetch-happen/lib/remote.js b/node_modules/pacote/node_modules/make-fetch-happen/lib/remote.js
new file mode 100644
index 0000000000000..1d640e5380baa
--- /dev/null
+++ b/node_modules/pacote/node_modules/make-fetch-happen/lib/remote.js
@@ -0,0 +1,132 @@
+const { Minipass } = require('minipass')
+const fetch = require('minipass-fetch')
+const promiseRetry = require('promise-retry')
+const ssri = require('ssri')
+const { log } = require('proc-log')
+
+const CachingMinipassPipeline = require('./pipeline.js')
+const { getAgent } = require('@npmcli/agent')
+const pkg = require('../package.json')
+
+const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})`
+
+const RETRY_ERRORS = [
+  'ECONNRESET', // remote socket closed on us
+  'ECONNREFUSED', // remote host refused to open connection
+  'EADDRINUSE', // failed to bind to a local port (proxy?)
+  'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW
+  // from @npmcli/agent
+  'ECONNECTIONTIMEOUT',
+  'EIDLETIMEOUT',
+  'ERESPONSETIMEOUT',
+  'ETRANSFERTIMEOUT',
+  // Known codes we do NOT retry on:
+  // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline)
+  // EINVALIDPROXY // invalid protocol from @npmcli/agent
+  // EINVALIDRESPONSE // invalid status code from @npmcli/agent
+]
+
+const RETRY_TYPES = [
+  'request-timeout',
+]
+
+// make a request directly to the remote source,
+// retrying certain classes of errors as well as
+// following redirects (through the cache if necessary)
+// and verifying response integrity
+const remoteFetch = (request, options) => {
+  // options.signal is intended for the fetch itself, not the agent.  Attaching it to the agent will re-use that signal across multiple requests, which prevents any connections beyond the first one.
+  const agent = getAgent(request.url, { ...options, signal: undefined })
+  if (!request.headers.has('connection')) {
+    request.headers.set('connection', agent ? 'keep-alive' : 'close')
+  }
+
+  if (!request.headers.has('user-agent')) {
+    request.headers.set('user-agent', USER_AGENT)
+  }
+
+  // keep our own options since we're overriding the agent
+  // and the redirect mode
+  const _opts = {
+    ...options,
+    agent,
+    redirect: 'manual',
+  }
+
+  return promiseRetry(async (retryHandler, attemptNum) => {
+    const req = new fetch.Request(request, _opts)
+    try {
+      let res = await fetch(req, _opts)
+      if (_opts.integrity && res.status === 200) {
+        // we got a 200 response and the user has specified an expected
+        // integrity value, so wrap the response in an ssri stream to verify it
+        const integrityStream = ssri.integrityStream({
+          algorithms: _opts.algorithms,
+          integrity: _opts.integrity,
+          size: _opts.size,
+        })
+        const pipeline = new CachingMinipassPipeline({
+          events: ['integrity', 'size'],
+        }, res.body, integrityStream)
+        // we also propagate the integrity and size events out to the pipeline so we can use
+        // this new response body as an integrityEmitter for cacache
+        integrityStream.on('integrity', i => pipeline.emit('integrity', i))
+        integrityStream.on('size', s => pipeline.emit('size', s))
+        res = new fetch.Response(pipeline, res)
+        // set an explicit flag so we know if our response body will emit integrity and size
+        res.body.hasIntegrityEmitter = true
+      }
+
+      res.headers.set('x-fetch-attempts', attemptNum)
+
+      // do not retry POST requests, or requests with a streaming body
+      // do retry requests with a 408, 420, 429 or 500+ status in the response
+      const isStream = Minipass.isStream(req.body)
+      const isRetriable = req.method !== 'POST' &&
+          !isStream &&
+          ([408, 420, 429].includes(res.status) || res.status >= 500)
+
+      if (isRetriable) {
+        if (typeof options.onRetry === 'function') {
+          options.onRetry(res)
+        }
+
+        /* eslint-disable-next-line max-len */
+        log.http('fetch', `${req.method} ${req.url} attempt ${attemptNum} failed with ${res.status}`)
+        return retryHandler(res)
+      }
+
+      return res
+    } catch (err) {
+      const code = (err.code === 'EPROMISERETRY')
+        ? err.retried.code
+        : err.code
+
+      // err.retried will be the thing that was thrown from above
+      // if it's a response, we just got a bad status code and we
+      // can re-throw to allow the retry
+      const isRetryError = err.retried instanceof fetch.Response ||
+        (RETRY_ERRORS.includes(code) && RETRY_TYPES.includes(err.type))
+
+      if (req.method === 'POST' || isRetryError) {
+        throw err
+      }
+
+      if (typeof options.onRetry === 'function') {
+        options.onRetry(err)
+      }
+
+      log.http('fetch', `${req.method} ${req.url} attempt ${attemptNum} failed with ${err.code}`)
+      return retryHandler(err)
+    }
+  }, options.retry).catch((err) => {
+    // don't reject for http errors, just return them
+    if (err.status >= 400 && err.type !== 'system') {
+      return err
+    }
+
+    throw err
+  })
+}
+
+module.exports = remoteFetch
diff --git a/node_modules/pacote/node_modules/make-fetch-happen/package.json b/node_modules/pacote/node_modules/make-fetch-happen/package.json
new file mode 100644
index 0000000000000..1e27d4ee8a70e
--- /dev/null
+++ b/node_modules/pacote/node_modules/make-fetch-happen/package.json
@@ -0,0 +1,74 @@
+{
+  "name": "make-fetch-happen",
+  "version": "15.0.1",
+  "description": "Opinionated, caching, retrying fetch client",
+  "main": "lib/index.js",
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "scripts": {
+    "test": "tap",
+    "posttest": "npm run lint",
+    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
+    "lint": "npm run eslint",
+    "lintfix": "npm run eslint -- --fix",
+    "postlint": "template-oss-check",
+    "snap": "tap",
+    "template-oss-apply": "template-oss-apply --force"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/npm/make-fetch-happen.git"
+  },
+  "keywords": [
+    "http",
+    "request",
+    "fetch",
+    "mean girls",
+    "caching",
+    "cache",
+    "subresource integrity"
+  ],
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "dependencies": {
+    "@npmcli/agent": "^3.0.0",
+    "cacache": "^20.0.1",
+    "http-cache-semantics": "^4.1.1",
+    "minipass": "^7.0.2",
+    "minipass-fetch": "^4.0.0",
+    "minipass-flush": "^1.0.5",
+    "minipass-pipeline": "^1.2.4",
+    "negotiator": "^1.0.0",
+    "proc-log": "^5.0.0",
+    "promise-retry": "^2.0.1",
+    "ssri": "^12.0.0"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^5.0.0",
+    "@npmcli/template-oss": "4.25.0",
+    "nock": "^13.2.4",
+    "safe-buffer": "^5.2.1",
+    "standard-version": "^9.3.2",
+    "tap": "^16.0.0"
+  },
+  "engines": {
+    "node": "^20.17.0 || >=22.9.0"
+  },
+  "tap": {
+    "color": 1,
+    "files": "test/*.js",
+    "check-coverage": true,
+    "timeout": 60,
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.25.0",
+    "publish": "true"
+  }
+}
diff --git a/node_modules/pacote/node_modules/minimatch/LICENSE b/node_modules/pacote/node_modules/minimatch/LICENSE
new file mode 100644
index 0000000000000..1493534e60dce
--- /dev/null
+++ b/node_modules/pacote/node_modules/minimatch/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) 2011-2023 Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js b/node_modules/pacote/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
new file mode 100644
index 0000000000000..5fc86bbd0116c
--- /dev/null
+++ b/node_modules/pacote/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
@@ -0,0 +1,14 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.assertValidPattern = void 0;
+const MAX_PATTERN_LENGTH = 1024 * 64;
+const assertValidPattern = (pattern) => {
+    if (typeof pattern !== 'string') {
+        throw new TypeError('invalid pattern');
+    }
+    if (pattern.length > MAX_PATTERN_LENGTH) {
+        throw new TypeError('pattern is too long');
+    }
+};
+exports.assertValidPattern = assertValidPattern;
+//# sourceMappingURL=assert-valid-pattern.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/minimatch/dist/commonjs/ast.js b/node_modules/pacote/node_modules/minimatch/dist/commonjs/ast.js
new file mode 100644
index 0000000000000..7b2109625eaeb
--- /dev/null
+++ b/node_modules/pacote/node_modules/minimatch/dist/commonjs/ast.js
@@ -0,0 +1,592 @@
+"use strict";
+// parse a single path portion
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.AST = void 0;
+const brace_expressions_js_1 = require("./brace-expressions.js");
+const unescape_js_1 = require("./unescape.js");
+const types = new Set(['!', '?', '+', '*', '@']);
+const isExtglobType = (c) => types.has(c);
+// Patterns that get prepended to bind to the start of either the
+// entire string, or just a single path portion, to prevent dots
+// and/or traversal patterns, when needed.
+// Exts don't need the ^ or / bit, because the root binds that already.
+const startNoTraversal = '(?!(?:^|/)\\.\\.?(?:$|/))';
+const startNoDot = '(?!\\.)';
+// characters that indicate a start of pattern needs the "no dots" bit,
+// because a dot *might* be matched. ( is not in the list, because in
+// the case of a child extglob, it will handle the prevention itself.
+const addPatternStart = new Set(['[', '.']);
+// cases where traversal is A-OK, no dot prevention needed
+const justDots = new Set(['..', '.']);
+const reSpecials = new Set('().*{}+?[]^$\\!');
+const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+// any single thing other than /
+const qmark = '[^/]';
+// * => any number of characters
+const star = qmark + '*?';
+// use + when we need to ensure that *something* matches, because the * is
+// the only thing in the path portion.
+const starNoEmpty = qmark + '+?';
+// remove the \ chars that we added if we end up doing a nonmagic compare
+// const deslash = (s: string) => s.replace(/\\(.)/g, '$1')
+class AST {
+    type;
+    #root;
+    #hasMagic;
+    #uflag = false;
+    #parts = [];
+    #parent;
+    #parentIndex;
+    #negs;
+    #filledNegs = false;
+    #options;
+    #toString;
+    // set to true if it's an extglob with no children
+    // (which really means one child of '')
+    #emptyExt = false;
+    constructor(type, parent, options = {}) {
+        this.type = type;
+        // extglobs are inherently magical
+        if (type)
+            this.#hasMagic = true;
+        this.#parent = parent;
+        this.#root = this.#parent ? this.#parent.#root : this;
+        this.#options = this.#root === this ? options : this.#root.#options;
+        this.#negs = this.#root === this ? [] : this.#root.#negs;
+        if (type === '!' && !this.#root.#filledNegs)
+            this.#negs.push(this);
+        this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0;
+    }
+    get hasMagic() {
+        /* c8 ignore start */
+        if (this.#hasMagic !== undefined)
+            return this.#hasMagic;
+        /* c8 ignore stop */
+        for (const p of this.#parts) {
+            if (typeof p === 'string')
+                continue;
+            if (p.type || p.hasMagic)
+                return (this.#hasMagic = true);
+        }
+        // note: will be undefined until we generate the regexp src and find out
+        return this.#hasMagic;
+    }
+    // reconstructs the pattern
+    toString() {
+        if (this.#toString !== undefined)
+            return this.#toString;
+        if (!this.type) {
+            return (this.#toString = this.#parts.map(p => String(p)).join(''));
+        }
+        else {
+            return (this.#toString =
+                this.type + '(' + this.#parts.map(p => String(p)).join('|') + ')');
+        }
+    }
+    #fillNegs() {
+        /* c8 ignore start */
+        if (this !== this.#root)
+            throw new Error('should only call on root');
+        if (this.#filledNegs)
+            return this;
+        /* c8 ignore stop */
+        // call toString() once to fill this out
+        this.toString();
+        this.#filledNegs = true;
+        let n;
+        while ((n = this.#negs.pop())) {
+            if (n.type !== '!')
+                continue;
+            // walk up the tree, appending everthing that comes AFTER parentIndex
+            let p = n;
+            let pp = p.#parent;
+            while (pp) {
+                for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) {
+                    for (const part of n.#parts) {
+                        /* c8 ignore start */
+                        if (typeof part === 'string') {
+                            throw new Error('string part in extglob AST??');
+                        }
+                        /* c8 ignore stop */
+                        part.copyIn(pp.#parts[i]);
+                    }
+                }
+                p = pp;
+                pp = p.#parent;
+            }
+        }
+        return this;
+    }
+    push(...parts) {
+        for (const p of parts) {
+            if (p === '')
+                continue;
+            /* c8 ignore start */
+            if (typeof p !== 'string' && !(p instanceof AST && p.#parent === this)) {
+                throw new Error('invalid part: ' + p);
+            }
+            /* c8 ignore stop */
+            this.#parts.push(p);
+        }
+    }
+    toJSON() {
+        const ret = this.type === null
+            ? this.#parts.slice().map(p => (typeof p === 'string' ? p : p.toJSON()))
+            : [this.type, ...this.#parts.map(p => p.toJSON())];
+        if (this.isStart() && !this.type)
+            ret.unshift([]);
+        if (this.isEnd() &&
+            (this === this.#root ||
+                (this.#root.#filledNegs && this.#parent?.type === '!'))) {
+            ret.push({});
+        }
+        return ret;
+    }
+    isStart() {
+        if (this.#root === this)
+            return true;
+        // if (this.type) return !!this.#parent?.isStart()
+        if (!this.#parent?.isStart())
+            return false;
+        if (this.#parentIndex === 0)
+            return true;
+        // if everything AHEAD of this is a negation, then it's still the "start"
+        const p = this.#parent;
+        for (let i = 0; i < this.#parentIndex; i++) {
+            const pp = p.#parts[i];
+            if (!(pp instanceof AST && pp.type === '!')) {
+                return false;
+            }
+        }
+        return true;
+    }
+    isEnd() {
+        if (this.#root === this)
+            return true;
+        if (this.#parent?.type === '!')
+            return true;
+        if (!this.#parent?.isEnd())
+            return false;
+        if (!this.type)
+            return this.#parent?.isEnd();
+        // if not root, it'll always have a parent
+        /* c8 ignore start */
+        const pl = this.#parent ? this.#parent.#parts.length : 0;
+        /* c8 ignore stop */
+        return this.#parentIndex === pl - 1;
+    }
+    copyIn(part) {
+        if (typeof part === 'string')
+            this.push(part);
+        else
+            this.push(part.clone(this));
+    }
+    clone(parent) {
+        const c = new AST(this.type, parent);
+        for (const p of this.#parts) {
+            c.copyIn(p);
+        }
+        return c;
+    }
+    static #parseAST(str, ast, pos, opt) {
+        let escaping = false;
+        let inBrace = false;
+        let braceStart = -1;
+        let braceNeg = false;
+        if (ast.type === null) {
+            // outside of a extglob, append until we find a start
+            let i = pos;
+            let acc = '';
+            while (i < str.length) {
+                const c = str.charAt(i++);
+                // still accumulate escapes at this point, but we do ignore
+                // starts that are escaped
+                if (escaping || c === '\\') {
+                    escaping = !escaping;
+                    acc += c;
+                    continue;
+                }
+                if (inBrace) {
+                    if (i === braceStart + 1) {
+                        if (c === '^' || c === '!') {
+                            braceNeg = true;
+                        }
+                    }
+                    else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
+                        inBrace = false;
+                    }
+                    acc += c;
+                    continue;
+                }
+                else if (c === '[') {
+                    inBrace = true;
+                    braceStart = i;
+                    braceNeg = false;
+                    acc += c;
+                    continue;
+                }
+                if (!opt.noext && isExtglobType(c) && str.charAt(i) === '(') {
+                    ast.push(acc);
+                    acc = '';
+                    const ext = new AST(c, ast);
+                    i = AST.#parseAST(str, ext, i, opt);
+                    ast.push(ext);
+                    continue;
+                }
+                acc += c;
+            }
+            ast.push(acc);
+            return i;
+        }
+        // some kind of extglob, pos is at the (
+        // find the next | or )
+        let i = pos + 1;
+        let part = new AST(null, ast);
+        const parts = [];
+        let acc = '';
+        while (i < str.length) {
+            const c = str.charAt(i++);
+            // still accumulate escapes at this point, but we do ignore
+            // starts that are escaped
+            if (escaping || c === '\\') {
+                escaping = !escaping;
+                acc += c;
+                continue;
+            }
+            if (inBrace) {
+                if (i === braceStart + 1) {
+                    if (c === '^' || c === '!') {
+                        braceNeg = true;
+                    }
+                }
+                else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
+                    inBrace = false;
+                }
+                acc += c;
+                continue;
+            }
+            else if (c === '[') {
+                inBrace = true;
+                braceStart = i;
+                braceNeg = false;
+                acc += c;
+                continue;
+            }
+            if (isExtglobType(c) && str.charAt(i) === '(') {
+                part.push(acc);
+                acc = '';
+                const ext = new AST(c, part);
+                part.push(ext);
+                i = AST.#parseAST(str, ext, i, opt);
+                continue;
+            }
+            if (c === '|') {
+                part.push(acc);
+                acc = '';
+                parts.push(part);
+                part = new AST(null, ast);
+                continue;
+            }
+            if (c === ')') {
+                if (acc === '' && ast.#parts.length === 0) {
+                    ast.#emptyExt = true;
+                }
+                part.push(acc);
+                acc = '';
+                ast.push(...parts, part);
+                return i;
+            }
+            acc += c;
+        }
+        // unfinished extglob
+        // if we got here, it was a malformed extglob! not an extglob, but
+        // maybe something else in there.
+        ast.type = null;
+        ast.#hasMagic = undefined;
+        ast.#parts = [str.substring(pos - 1)];
+        return i;
+    }
+    static fromGlob(pattern, options = {}) {
+        const ast = new AST(null, undefined, options);
+        AST.#parseAST(pattern, ast, 0, options);
+        return ast;
+    }
+    // returns the regular expression if there's magic, or the unescaped
+    // string if not.
+    toMMPattern() {
+        // should only be called on root
+        /* c8 ignore start */
+        if (this !== this.#root)
+            return this.#root.toMMPattern();
+        /* c8 ignore stop */
+        const glob = this.toString();
+        const [re, body, hasMagic, uflag] = this.toRegExpSource();
+        // if we're in nocase mode, and not nocaseMagicOnly, then we do
+        // still need a regular expression if we have to case-insensitively
+        // match capital/lowercase characters.
+        const anyMagic = hasMagic ||
+            this.#hasMagic ||
+            (this.#options.nocase &&
+                !this.#options.nocaseMagicOnly &&
+                glob.toUpperCase() !== glob.toLowerCase());
+        if (!anyMagic) {
+            return body;
+        }
+        const flags = (this.#options.nocase ? 'i' : '') + (uflag ? 'u' : '');
+        return Object.assign(new RegExp(`^${re}$`, flags), {
+            _src: re,
+            _glob: glob,
+        });
+    }
+    get options() {
+        return this.#options;
+    }
+    // returns the string match, the regexp source, whether there's magic
+    // in the regexp (so a regular expression is required) and whether or
+    // not the uflag is needed for the regular expression (for posix classes)
+    // TODO: instead of injecting the start/end at this point, just return
+    // the BODY of the regexp, along with the start/end portions suitable
+    // for binding the start/end in either a joined full-path makeRe context
+    // (where we bind to (^|/), or a standalone matchPart context (where
+    // we bind to ^, and not /).  Otherwise slashes get duped!
+    //
+    // In part-matching mode, the start is:
+    // - if not isStart: nothing
+    // - if traversal possible, but not allowed: ^(?!\.\.?$)
+    // - if dots allowed or not possible: ^
+    // - if dots possible and not allowed: ^(?!\.)
+    // end is:
+    // - if not isEnd(): nothing
+    // - else: $
+    //
+    // In full-path matching mode, we put the slash at the START of the
+    // pattern, so start is:
+    // - if first pattern: same as part-matching mode
+    // - if not isStart(): nothing
+    // - if traversal possible, but not allowed: /(?!\.\.?(?:$|/))
+    // - if dots allowed or not possible: /
+    // - if dots possible and not allowed: /(?!\.)
+    // end is:
+    // - if last pattern, same as part-matching mode
+    // - else nothing
+    //
+    // Always put the (?:$|/) on negated tails, though, because that has to be
+    // there to bind the end of the negated pattern portion, and it's easier to
+    // just stick it in now rather than try to inject it later in the middle of
+    // the pattern.
+    //
+    // We can just always return the same end, and leave it up to the caller
+    // to know whether it's going to be used joined or in parts.
+    // And, if the start is adjusted slightly, can do the same there:
+    // - if not isStart: nothing
+    // - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$)
+    // - if dots allowed or not possible: (?:/|^)
+    // - if dots possible and not allowed: (?:/|^)(?!\.)
+    //
+    // But it's better to have a simpler binding without a conditional, for
+    // performance, so probably better to return both start options.
+    //
+    // Then the caller just ignores the end if it's not the first pattern,
+    // and the start always gets applied.
+    //
+    // But that's always going to be $ if it's the ending pattern, or nothing,
+    // so the caller can just attach $ at the end of the pattern when building.
+    //
+    // So the todo is:
+    // - better detect what kind of start is needed
+    // - return both flavors of starting pattern
+    // - attach $ at the end of the pattern when creating the actual RegExp
+    //
+    // Ah, but wait, no, that all only applies to the root when the first pattern
+    // is not an extglob. If the first pattern IS an extglob, then we need all
+    // that dot prevention biz to live in the extglob portions, because eg
+    // +(*|.x*) can match .xy but not .yx.
+    //
+    // So, return the two flavors if it's #root and the first child is not an
+    // AST, otherwise leave it to the child AST to handle it, and there,
+    // use the (?:^|/) style of start binding.
+    //
+    // Even simplified further:
+    // - Since the start for a join is eg /(?!\.) and the start for a part
+    // is ^(?!\.), we can just prepend (?!\.) to the pattern (either root
+    // or start or whatever) and prepend ^ or / at the Regexp construction.
+    toRegExpSource(allowDot) {
+        const dot = allowDot ?? !!this.#options.dot;
+        if (this.#root === this)
+            this.#fillNegs();
+        if (!this.type) {
+            const noEmpty = this.isStart() && this.isEnd();
+            const src = this.#parts
+                .map(p => {
+                const [re, _, hasMagic, uflag] = typeof p === 'string'
+                    ? AST.#parseGlob(p, this.#hasMagic, noEmpty)
+                    : p.toRegExpSource(allowDot);
+                this.#hasMagic = this.#hasMagic || hasMagic;
+                this.#uflag = this.#uflag || uflag;
+                return re;
+            })
+                .join('');
+            let start = '';
+            if (this.isStart()) {
+                if (typeof this.#parts[0] === 'string') {
+                    // this is the string that will match the start of the pattern,
+                    // so we need to protect against dots and such.
+                    // '.' and '..' cannot match unless the pattern is that exactly,
+                    // even if it starts with . or dot:true is set.
+                    const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]);
+                    if (!dotTravAllowed) {
+                        const aps = addPatternStart;
+                        // check if we have a possibility of matching . or ..,
+                        // and prevent that.
+                        const needNoTrav = 
+                        // dots are allowed, and the pattern starts with [ or .
+                        (dot && aps.has(src.charAt(0))) ||
+                            // the pattern starts with \., and then [ or .
+                            (src.startsWith('\\.') && aps.has(src.charAt(2))) ||
+                            // the pattern starts with \.\., and then [ or .
+                            (src.startsWith('\\.\\.') && aps.has(src.charAt(4)));
+                        // no need to prevent dots if it can't match a dot, or if a
+                        // sub-pattern will be preventing it anyway.
+                        const needNoDot = !dot && !allowDot && aps.has(src.charAt(0));
+                        start = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : '';
+                    }
+                }
+            }
+            // append the "end of path portion" pattern to negation tails
+            let end = '';
+            if (this.isEnd() &&
+                this.#root.#filledNegs &&
+                this.#parent?.type === '!') {
+                end = '(?:$|\\/)';
+            }
+            const final = start + src + end;
+            return [
+                final,
+                (0, unescape_js_1.unescape)(src),
+                (this.#hasMagic = !!this.#hasMagic),
+                this.#uflag,
+            ];
+        }
+        // We need to calculate the body *twice* if it's a repeat pattern
+        // at the start, once in nodot mode, then again in dot mode, so a
+        // pattern like *(?) can match 'x.y'
+        const repeated = this.type === '*' || this.type === '+';
+        // some kind of extglob
+        const start = this.type === '!' ? '(?:(?!(?:' : '(?:';
+        let body = this.#partsToRegExp(dot);
+        if (this.isStart() && this.isEnd() && !body && this.type !== '!') {
+            // invalid extglob, has to at least be *something* present, if it's
+            // the entire path portion.
+            const s = this.toString();
+            this.#parts = [s];
+            this.type = null;
+            this.#hasMagic = undefined;
+            return [s, (0, unescape_js_1.unescape)(this.toString()), false, false];
+        }
+        // XXX abstract out this map method
+        let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot
+            ? ''
+            : this.#partsToRegExp(true);
+        if (bodyDotAllowed === body) {
+            bodyDotAllowed = '';
+        }
+        if (bodyDotAllowed) {
+            body = `(?:${body})(?:${bodyDotAllowed})*?`;
+        }
+        // an empty !() is exactly equivalent to a starNoEmpty
+        let final = '';
+        if (this.type === '!' && this.#emptyExt) {
+            final = (this.isStart() && !dot ? startNoDot : '') + starNoEmpty;
+        }
+        else {
+            const close = this.type === '!'
+                ? // !() must match something,but !(x) can match ''
+                    '))' +
+                        (this.isStart() && !dot && !allowDot ? startNoDot : '') +
+                        star +
+                        ')'
+                : this.type === '@'
+                    ? ')'
+                    : this.type === '?'
+                        ? ')?'
+                        : this.type === '+' && bodyDotAllowed
+                            ? ')'
+                            : this.type === '*' && bodyDotAllowed
+                                ? `)?`
+                                : `)${this.type}`;
+            final = start + body + close;
+        }
+        return [
+            final,
+            (0, unescape_js_1.unescape)(body),
+            (this.#hasMagic = !!this.#hasMagic),
+            this.#uflag,
+        ];
+    }
+    #partsToRegExp(dot) {
+        return this.#parts
+            .map(p => {
+            // extglob ASTs should only contain parent ASTs
+            /* c8 ignore start */
+            if (typeof p === 'string') {
+                throw new Error('string type in extglob ast??');
+            }
+            /* c8 ignore stop */
+            // can ignore hasMagic, because extglobs are already always magic
+            const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot);
+            this.#uflag = this.#uflag || uflag;
+            return re;
+        })
+            .filter(p => !(this.isStart() && this.isEnd()) || !!p)
+            .join('|');
+    }
+    static #parseGlob(glob, hasMagic, noEmpty = false) {
+        let escaping = false;
+        let re = '';
+        let uflag = false;
+        for (let i = 0; i < glob.length; i++) {
+            const c = glob.charAt(i);
+            if (escaping) {
+                escaping = false;
+                re += (reSpecials.has(c) ? '\\' : '') + c;
+                continue;
+            }
+            if (c === '\\') {
+                if (i === glob.length - 1) {
+                    re += '\\\\';
+                }
+                else {
+                    escaping = true;
+                }
+                continue;
+            }
+            if (c === '[') {
+                const [src, needUflag, consumed, magic] = (0, brace_expressions_js_1.parseClass)(glob, i);
+                if (consumed) {
+                    re += src;
+                    uflag = uflag || needUflag;
+                    i += consumed - 1;
+                    hasMagic = hasMagic || magic;
+                    continue;
+                }
+            }
+            if (c === '*') {
+                if (noEmpty && glob === '*')
+                    re += starNoEmpty;
+                else
+                    re += star;
+                hasMagic = true;
+                continue;
+            }
+            if (c === '?') {
+                re += qmark;
+                hasMagic = true;
+                continue;
+            }
+            re += regExpEscape(c);
+        }
+        return [re, (0, unescape_js_1.unescape)(glob), !!hasMagic, uflag];
+    }
+}
+exports.AST = AST;
+//# sourceMappingURL=ast.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/minimatch/dist/commonjs/brace-expressions.js b/node_modules/pacote/node_modules/minimatch/dist/commonjs/brace-expressions.js
new file mode 100644
index 0000000000000..0e13eefc4cfee
--- /dev/null
+++ b/node_modules/pacote/node_modules/minimatch/dist/commonjs/brace-expressions.js
@@ -0,0 +1,152 @@
+"use strict";
+// translate the various posix character classes into unicode properties
+// this works across all unicode locales
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.parseClass = void 0;
+// { : [, /u flag required, negated]
+const posixClasses = {
+    '[:alnum:]': ['\\p{L}\\p{Nl}\\p{Nd}', true],
+    '[:alpha:]': ['\\p{L}\\p{Nl}', true],
+    '[:ascii:]': ['\\x' + '00-\\x' + '7f', false],
+    '[:blank:]': ['\\p{Zs}\\t', true],
+    '[:cntrl:]': ['\\p{Cc}', true],
+    '[:digit:]': ['\\p{Nd}', true],
+    '[:graph:]': ['\\p{Z}\\p{C}', true, true],
+    '[:lower:]': ['\\p{Ll}', true],
+    '[:print:]': ['\\p{C}', true],
+    '[:punct:]': ['\\p{P}', true],
+    '[:space:]': ['\\p{Z}\\t\\r\\n\\v\\f', true],
+    '[:upper:]': ['\\p{Lu}', true],
+    '[:word:]': ['\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}', true],
+    '[:xdigit:]': ['A-Fa-f0-9', false],
+};
+// only need to escape a few things inside of brace expressions
+// escapes: [ \ ] -
+const braceEscape = (s) => s.replace(/[[\]\\-]/g, '\\$&');
+// escape all regexp magic characters
+const regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+// everything has already been escaped, we just have to join
+const rangesToString = (ranges) => ranges.join('');
+// takes a glob string at a posix brace expression, and returns
+// an equivalent regular expression source, and boolean indicating
+// whether the /u flag needs to be applied, and the number of chars
+// consumed to parse the character class.
+// This also removes out of order ranges, and returns ($.) if the
+// entire class just no good.
+const parseClass = (glob, position) => {
+    const pos = position;
+    /* c8 ignore start */
+    if (glob.charAt(pos) !== '[') {
+        throw new Error('not in a brace expression');
+    }
+    /* c8 ignore stop */
+    const ranges = [];
+    const negs = [];
+    let i = pos + 1;
+    let sawStart = false;
+    let uflag = false;
+    let escaping = false;
+    let negate = false;
+    let endPos = pos;
+    let rangeStart = '';
+    WHILE: while (i < glob.length) {
+        const c = glob.charAt(i);
+        if ((c === '!' || c === '^') && i === pos + 1) {
+            negate = true;
+            i++;
+            continue;
+        }
+        if (c === ']' && sawStart && !escaping) {
+            endPos = i + 1;
+            break;
+        }
+        sawStart = true;
+        if (c === '\\') {
+            if (!escaping) {
+                escaping = true;
+                i++;
+                continue;
+            }
+            // escaped \ char, fall through and treat like normal char
+        }
+        if (c === '[' && !escaping) {
+            // either a posix class, a collation equivalent, or just a [
+            for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) {
+                if (glob.startsWith(cls, i)) {
+                    // invalid, [a-[] is fine, but not [a-[:alpha]]
+                    if (rangeStart) {
+                        return ['$.', false, glob.length - pos, true];
+                    }
+                    i += cls.length;
+                    if (neg)
+                        negs.push(unip);
+                    else
+                        ranges.push(unip);
+                    uflag = uflag || u;
+                    continue WHILE;
+                }
+            }
+        }
+        // now it's just a normal character, effectively
+        escaping = false;
+        if (rangeStart) {
+            // throw this range away if it's not valid, but others
+            // can still match.
+            if (c > rangeStart) {
+                ranges.push(braceEscape(rangeStart) + '-' + braceEscape(c));
+            }
+            else if (c === rangeStart) {
+                ranges.push(braceEscape(c));
+            }
+            rangeStart = '';
+            i++;
+            continue;
+        }
+        // now might be the start of a range.
+        // can be either c-d or c-] or c] or c] at this point
+        if (glob.startsWith('-]', i + 1)) {
+            ranges.push(braceEscape(c + '-'));
+            i += 2;
+            continue;
+        }
+        if (glob.startsWith('-', i + 1)) {
+            rangeStart = c;
+            i += 2;
+            continue;
+        }
+        // not the start of a range, just a single character
+        ranges.push(braceEscape(c));
+        i++;
+    }
+    if (endPos < i) {
+        // didn't see the end of the class, not a valid class,
+        // but might still be valid as a literal match.
+        return ['', false, 0, false];
+    }
+    // if we got no ranges and no negates, then we have a range that
+    // cannot possibly match anything, and that poisons the whole glob
+    if (!ranges.length && !negs.length) {
+        return ['$.', false, glob.length - pos, true];
+    }
+    // if we got one positive range, and it's a single character, then that's
+    // not actually a magic pattern, it's just that one literal character.
+    // we should not treat that as "magic", we should just return the literal
+    // character. [_] is a perfectly valid way to escape glob magic chars.
+    if (negs.length === 0 &&
+        ranges.length === 1 &&
+        /^\\?.$/.test(ranges[0]) &&
+        !negate) {
+        const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0];
+        return [regexpEscape(r), false, endPos - pos, false];
+    }
+    const sranges = '[' + (negate ? '^' : '') + rangesToString(ranges) + ']';
+    const snegs = '[' + (negate ? '' : '^') + rangesToString(negs) + ']';
+    const comb = ranges.length && negs.length
+        ? '(' + sranges + '|' + snegs + ')'
+        : ranges.length
+            ? sranges
+            : snegs;
+    return [comb, uflag, endPos - pos, true];
+};
+exports.parseClass = parseClass;
+//# sourceMappingURL=brace-expressions.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/minimatch/dist/commonjs/escape.js b/node_modules/pacote/node_modules/minimatch/dist/commonjs/escape.js
new file mode 100644
index 0000000000000..02a4f8a8e0a58
--- /dev/null
+++ b/node_modules/pacote/node_modules/minimatch/dist/commonjs/escape.js
@@ -0,0 +1,22 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.escape = void 0;
+/**
+ * Escape all magic characters in a glob pattern.
+ *
+ * If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape}
+ * option is used, then characters are escaped by wrapping in `[]`, because
+ * a magic character wrapped in a character class can only be satisfied by
+ * that exact character.  In this mode, `\` is _not_ escaped, because it is
+ * not interpreted as a magic character, but instead as a path separator.
+ */
+const escape = (s, { windowsPathsNoEscape = false, } = {}) => {
+    // don't need to escape +@! because we escape the parens
+    // that make those magic, and escaping ! as [!] isn't valid,
+    // because [!]] is a valid glob class meaning not ']'.
+    return windowsPathsNoEscape
+        ? s.replace(/[?*()[\]]/g, '[$&]')
+        : s.replace(/[?*()[\]\\]/g, '\\$&');
+};
+exports.escape = escape;
+//# sourceMappingURL=escape.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/minimatch/dist/commonjs/index.js b/node_modules/pacote/node_modules/minimatch/dist/commonjs/index.js
new file mode 100644
index 0000000000000..f58fb8616aa9a
--- /dev/null
+++ b/node_modules/pacote/node_modules/minimatch/dist/commonjs/index.js
@@ -0,0 +1,1014 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.unescape = exports.escape = exports.AST = exports.Minimatch = exports.match = exports.makeRe = exports.braceExpand = exports.defaults = exports.filter = exports.GLOBSTAR = exports.sep = exports.minimatch = void 0;
+const brace_expansion_1 = require("@isaacs/brace-expansion");
+const assert_valid_pattern_js_1 = require("./assert-valid-pattern.js");
+const ast_js_1 = require("./ast.js");
+const escape_js_1 = require("./escape.js");
+const unescape_js_1 = require("./unescape.js");
+const minimatch = (p, pattern, options = {}) => {
+    (0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
+    // shortcut: comments match nothing.
+    if (!options.nocomment && pattern.charAt(0) === '#') {
+        return false;
+    }
+    return new Minimatch(pattern, options).match(p);
+};
+exports.minimatch = minimatch;
+// Optimized checking for the most common glob patterns.
+const starDotExtRE = /^\*+([^+@!?\*\[\(]*)$/;
+const starDotExtTest = (ext) => (f) => !f.startsWith('.') && f.endsWith(ext);
+const starDotExtTestDot = (ext) => (f) => f.endsWith(ext);
+const starDotExtTestNocase = (ext) => {
+    ext = ext.toLowerCase();
+    return (f) => !f.startsWith('.') && f.toLowerCase().endsWith(ext);
+};
+const starDotExtTestNocaseDot = (ext) => {
+    ext = ext.toLowerCase();
+    return (f) => f.toLowerCase().endsWith(ext);
+};
+const starDotStarRE = /^\*+\.\*+$/;
+const starDotStarTest = (f) => !f.startsWith('.') && f.includes('.');
+const starDotStarTestDot = (f) => f !== '.' && f !== '..' && f.includes('.');
+const dotStarRE = /^\.\*+$/;
+const dotStarTest = (f) => f !== '.' && f !== '..' && f.startsWith('.');
+const starRE = /^\*+$/;
+const starTest = (f) => f.length !== 0 && !f.startsWith('.');
+const starTestDot = (f) => f.length !== 0 && f !== '.' && f !== '..';
+const qmarksRE = /^\?+([^+@!?\*\[\(]*)?$/;
+const qmarksTestNocase = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExt([$0]);
+    if (!ext)
+        return noext;
+    ext = ext.toLowerCase();
+    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
+};
+const qmarksTestNocaseDot = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExtDot([$0]);
+    if (!ext)
+        return noext;
+    ext = ext.toLowerCase();
+    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
+};
+const qmarksTestDot = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExtDot([$0]);
+    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
+};
+const qmarksTest = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExt([$0]);
+    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
+};
+const qmarksTestNoExt = ([$0]) => {
+    const len = $0.length;
+    return (f) => f.length === len && !f.startsWith('.');
+};
+const qmarksTestNoExtDot = ([$0]) => {
+    const len = $0.length;
+    return (f) => f.length === len && f !== '.' && f !== '..';
+};
+/* c8 ignore start */
+const defaultPlatform = (typeof process === 'object' && process
+    ? (typeof process.env === 'object' &&
+        process.env &&
+        process.env.__MINIMATCH_TESTING_PLATFORM__) ||
+        process.platform
+    : 'posix');
+const path = {
+    win32: { sep: '\\' },
+    posix: { sep: '/' },
+};
+/* c8 ignore stop */
+exports.sep = defaultPlatform === 'win32' ? path.win32.sep : path.posix.sep;
+exports.minimatch.sep = exports.sep;
+exports.GLOBSTAR = Symbol('globstar **');
+exports.minimatch.GLOBSTAR = exports.GLOBSTAR;
+// any single thing other than /
+// don't need to escape / when using new RegExp()
+const qmark = '[^/]';
+// * => any number of characters
+const star = qmark + '*?';
+// ** when dots are allowed.  Anything goes, except .. and .
+// not (^ or / followed by one or two dots followed by $ or /),
+// followed by anything, any number of times.
+const twoStarDot = '(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?';
+// not a ^ or / followed by a dot,
+// followed by anything, any number of times.
+const twoStarNoDot = '(?:(?!(?:\\/|^)\\.).)*?';
+const filter = (pattern, options = {}) => (p) => (0, exports.minimatch)(p, pattern, options);
+exports.filter = filter;
+exports.minimatch.filter = exports.filter;
+const ext = (a, b = {}) => Object.assign({}, a, b);
+const defaults = (def) => {
+    if (!def || typeof def !== 'object' || !Object.keys(def).length) {
+        return exports.minimatch;
+    }
+    const orig = exports.minimatch;
+    const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options));
+    return Object.assign(m, {
+        Minimatch: class Minimatch extends orig.Minimatch {
+            constructor(pattern, options = {}) {
+                super(pattern, ext(def, options));
+            }
+            static defaults(options) {
+                return orig.defaults(ext(def, options)).Minimatch;
+            }
+        },
+        AST: class AST extends orig.AST {
+            /* c8 ignore start */
+            constructor(type, parent, options = {}) {
+                super(type, parent, ext(def, options));
+            }
+            /* c8 ignore stop */
+            static fromGlob(pattern, options = {}) {
+                return orig.AST.fromGlob(pattern, ext(def, options));
+            }
+        },
+        unescape: (s, options = {}) => orig.unescape(s, ext(def, options)),
+        escape: (s, options = {}) => orig.escape(s, ext(def, options)),
+        filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)),
+        defaults: (options) => orig.defaults(ext(def, options)),
+        makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)),
+        braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)),
+        match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)),
+        sep: orig.sep,
+        GLOBSTAR: exports.GLOBSTAR,
+    });
+};
+exports.defaults = defaults;
+exports.minimatch.defaults = exports.defaults;
+// Brace expansion:
+// a{b,c}d -> abd acd
+// a{b,}c -> abc ac
+// a{0..3}d -> a0d a1d a2d a3d
+// a{b,c{d,e}f}g -> abg acdfg acefg
+// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
+//
+// Invalid sets are not expanded.
+// a{2..}b -> a{2..}b
+// a{b}c -> a{b}c
+const braceExpand = (pattern, options = {}) => {
+    (0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
+    // Thanks to Yeting Li  for
+    // improving this regexp to avoid a ReDOS vulnerability.
+    if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
+        // shortcut. no need to expand.
+        return [pattern];
+    }
+    return (0, brace_expansion_1.expand)(pattern);
+};
+exports.braceExpand = braceExpand;
+exports.minimatch.braceExpand = exports.braceExpand;
+// parse a component of the expanded set.
+// At this point, no pattern may contain "/" in it
+// so we're going to return a 2d array, where each entry is the full
+// pattern, split on '/', and then turned into a regular expression.
+// A regexp is made at the end which joins each array with an
+// escaped /, and another full one which joins each regexp with |.
+//
+// Following the lead of Bash 4.1, note that "**" only has special meaning
+// when it is the *only* thing in a path portion.  Otherwise, any series
+// of * is equivalent to a single *.  Globstar behavior is enabled by
+// default, and can be disabled by setting options.noglobstar.
+const makeRe = (pattern, options = {}) => new Minimatch(pattern, options).makeRe();
+exports.makeRe = makeRe;
+exports.minimatch.makeRe = exports.makeRe;
+const match = (list, pattern, options = {}) => {
+    const mm = new Minimatch(pattern, options);
+    list = list.filter(f => mm.match(f));
+    if (mm.options.nonull && !list.length) {
+        list.push(pattern);
+    }
+    return list;
+};
+exports.match = match;
+exports.minimatch.match = exports.match;
+// replace stuff like \* with *
+const globMagic = /[?*]|[+@!]\(.*?\)|\[|\]/;
+const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+class Minimatch {
+    options;
+    set;
+    pattern;
+    windowsPathsNoEscape;
+    nonegate;
+    negate;
+    comment;
+    empty;
+    preserveMultipleSlashes;
+    partial;
+    globSet;
+    globParts;
+    nocase;
+    isWindows;
+    platform;
+    windowsNoMagicRoot;
+    regexp;
+    constructor(pattern, options = {}) {
+        (0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
+        options = options || {};
+        this.options = options;
+        this.pattern = pattern;
+        this.platform = options.platform || defaultPlatform;
+        this.isWindows = this.platform === 'win32';
+        this.windowsPathsNoEscape =
+            !!options.windowsPathsNoEscape || options.allowWindowsEscape === false;
+        if (this.windowsPathsNoEscape) {
+            this.pattern = this.pattern.replace(/\\/g, '/');
+        }
+        this.preserveMultipleSlashes = !!options.preserveMultipleSlashes;
+        this.regexp = null;
+        this.negate = false;
+        this.nonegate = !!options.nonegate;
+        this.comment = false;
+        this.empty = false;
+        this.partial = !!options.partial;
+        this.nocase = !!this.options.nocase;
+        this.windowsNoMagicRoot =
+            options.windowsNoMagicRoot !== undefined
+                ? options.windowsNoMagicRoot
+                : !!(this.isWindows && this.nocase);
+        this.globSet = [];
+        this.globParts = [];
+        this.set = [];
+        // make the set of regexps etc.
+        this.make();
+    }
+    hasMagic() {
+        if (this.options.magicalBraces && this.set.length > 1) {
+            return true;
+        }
+        for (const pattern of this.set) {
+            for (const part of pattern) {
+                if (typeof part !== 'string')
+                    return true;
+            }
+        }
+        return false;
+    }
+    debug(..._) { }
+    make() {
+        const pattern = this.pattern;
+        const options = this.options;
+        // empty patterns and comments match nothing.
+        if (!options.nocomment && pattern.charAt(0) === '#') {
+            this.comment = true;
+            return;
+        }
+        if (!pattern) {
+            this.empty = true;
+            return;
+        }
+        // step 1: figure out negation, etc.
+        this.parseNegate();
+        // step 2: expand braces
+        this.globSet = [...new Set(this.braceExpand())];
+        if (options.debug) {
+            this.debug = (...args) => console.error(...args);
+        }
+        this.debug(this.pattern, this.globSet);
+        // step 3: now we have a set, so turn each one into a series of
+        // path-portion matching patterns.
+        // These will be regexps, except in the case of "**", which is
+        // set to the GLOBSTAR object for globstar behavior,
+        // and will not contain any / characters
+        //
+        // First, we preprocess to make the glob pattern sets a bit simpler
+        // and deduped.  There are some perf-killing patterns that can cause
+        // problems with a glob walk, but we can simplify them down a bit.
+        const rawGlobParts = this.globSet.map(s => this.slashSplit(s));
+        this.globParts = this.preprocess(rawGlobParts);
+        this.debug(this.pattern, this.globParts);
+        // glob --> regexps
+        let set = this.globParts.map((s, _, __) => {
+            if (this.isWindows && this.windowsNoMagicRoot) {
+                // check if it's a drive or unc path.
+                const isUNC = s[0] === '' &&
+                    s[1] === '' &&
+                    (s[2] === '?' || !globMagic.test(s[2])) &&
+                    !globMagic.test(s[3]);
+                const isDrive = /^[a-z]:/i.test(s[0]);
+                if (isUNC) {
+                    return [...s.slice(0, 4), ...s.slice(4).map(ss => this.parse(ss))];
+                }
+                else if (isDrive) {
+                    return [s[0], ...s.slice(1).map(ss => this.parse(ss))];
+                }
+            }
+            return s.map(ss => this.parse(ss));
+        });
+        this.debug(this.pattern, set);
+        // filter out everything that didn't compile properly.
+        this.set = set.filter(s => s.indexOf(false) === -1);
+        // do not treat the ? in UNC paths as magic
+        if (this.isWindows) {
+            for (let i = 0; i < this.set.length; i++) {
+                const p = this.set[i];
+                if (p[0] === '' &&
+                    p[1] === '' &&
+                    this.globParts[i][2] === '?' &&
+                    typeof p[3] === 'string' &&
+                    /^[a-z]:$/i.test(p[3])) {
+                    p[2] = '?';
+                }
+            }
+        }
+        this.debug(this.pattern, this.set);
+    }
+    // various transforms to equivalent pattern sets that are
+    // faster to process in a filesystem walk.  The goal is to
+    // eliminate what we can, and push all ** patterns as far
+    // to the right as possible, even if it increases the number
+    // of patterns that we have to process.
+    preprocess(globParts) {
+        // if we're not in globstar mode, then turn all ** into *
+        if (this.options.noglobstar) {
+            for (let i = 0; i < globParts.length; i++) {
+                for (let j = 0; j < globParts[i].length; j++) {
+                    if (globParts[i][j] === '**') {
+                        globParts[i][j] = '*';
+                    }
+                }
+            }
+        }
+        const { optimizationLevel = 1 } = this.options;
+        if (optimizationLevel >= 2) {
+            // aggressive optimization for the purpose of fs walking
+            globParts = this.firstPhasePreProcess(globParts);
+            globParts = this.secondPhasePreProcess(globParts);
+        }
+        else if (optimizationLevel >= 1) {
+            // just basic optimizations to remove some .. parts
+            globParts = this.levelOneOptimize(globParts);
+        }
+        else {
+            // just collapse multiple ** portions into one
+            globParts = this.adjascentGlobstarOptimize(globParts);
+        }
+        return globParts;
+    }
+    // just get rid of adjascent ** portions
+    adjascentGlobstarOptimize(globParts) {
+        return globParts.map(parts => {
+            let gs = -1;
+            while (-1 !== (gs = parts.indexOf('**', gs + 1))) {
+                let i = gs;
+                while (parts[i + 1] === '**') {
+                    i++;
+                }
+                if (i !== gs) {
+                    parts.splice(gs, i - gs);
+                }
+            }
+            return parts;
+        });
+    }
+    // get rid of adjascent ** and resolve .. portions
+    levelOneOptimize(globParts) {
+        return globParts.map(parts => {
+            parts = parts.reduce((set, part) => {
+                const prev = set[set.length - 1];
+                if (part === '**' && prev === '**') {
+                    return set;
+                }
+                if (part === '..') {
+                    if (prev && prev !== '..' && prev !== '.' && prev !== '**') {
+                        set.pop();
+                        return set;
+                    }
+                }
+                set.push(part);
+                return set;
+            }, []);
+            return parts.length === 0 ? [''] : parts;
+        });
+    }
+    levelTwoFileOptimize(parts) {
+        if (!Array.isArray(parts)) {
+            parts = this.slashSplit(parts);
+        }
+        let didSomething = false;
+        do {
+            didSomething = false;
+            // 
// -> 
/
+            if (!this.preserveMultipleSlashes) {
+                for (let i = 1; i < parts.length - 1; i++) {
+                    const p = parts[i];
+                    // don't squeeze out UNC patterns
+                    if (i === 1 && p === '' && parts[0] === '')
+                        continue;
+                    if (p === '.' || p === '') {
+                        didSomething = true;
+                        parts.splice(i, 1);
+                        i--;
+                    }
+                }
+                if (parts[0] === '.' &&
+                    parts.length === 2 &&
+                    (parts[1] === '.' || parts[1] === '')) {
+                    didSomething = true;
+                    parts.pop();
+                }
+            }
+            // 
/

/../ ->

/
+            let dd = 0;
+            while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                const p = parts[dd - 1];
+                if (p && p !== '.' && p !== '..' && p !== '**') {
+                    didSomething = true;
+                    parts.splice(dd - 1, 2);
+                    dd -= 2;
+                }
+            }
+        } while (didSomething);
+        return parts.length === 0 ? [''] : parts;
+    }
+    // First phase: single-pattern processing
+    // 
 is 1 or more portions
+    //  is 1 or more portions
+    // 

is any portion other than ., .., '', or ** + // is . or '' + // + // **/.. is *brutal* for filesystem walking performance, because + // it effectively resets the recursive walk each time it occurs, + // and ** cannot be reduced out by a .. pattern part like a regexp + // or most strings (other than .., ., and '') can be. + // + //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + //

// -> 
/
+    // 
/

/../ ->

/
+    // **/**/ -> **/
+    //
+    // **/*/ -> */**/ <== not valid because ** doesn't follow
+    // this WOULD be allowed if ** did follow symlinks, or * didn't
+    firstPhasePreProcess(globParts) {
+        let didSomething = false;
+        do {
+            didSomething = false;
+            // 
/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + for (let parts of globParts) { + let gs = -1; + while (-1 !== (gs = parts.indexOf('**', gs + 1))) { + let gss = gs; + while (parts[gss + 1] === '**') { + //

/**/**/ -> 
/**/
+                        gss++;
+                    }
+                    // eg, if gs is 2 and gss is 4, that means we have 3 **
+                    // parts, and can remove 2 of them.
+                    if (gss > gs) {
+                        parts.splice(gs + 1, gss - gs);
+                    }
+                    let next = parts[gs + 1];
+                    const p = parts[gs + 2];
+                    const p2 = parts[gs + 3];
+                    if (next !== '..')
+                        continue;
+                    if (!p ||
+                        p === '.' ||
+                        p === '..' ||
+                        !p2 ||
+                        p2 === '.' ||
+                        p2 === '..') {
+                        continue;
+                    }
+                    didSomething = true;
+                    // edit parts in place, and push the new one
+                    parts.splice(gs, 1);
+                    const other = parts.slice(0);
+                    other[gs] = '**';
+                    globParts.push(other);
+                    gs--;
+                }
+                // 
// -> 
/
+                if (!this.preserveMultipleSlashes) {
+                    for (let i = 1; i < parts.length - 1; i++) {
+                        const p = parts[i];
+                        // don't squeeze out UNC patterns
+                        if (i === 1 && p === '' && parts[0] === '')
+                            continue;
+                        if (p === '.' || p === '') {
+                            didSomething = true;
+                            parts.splice(i, 1);
+                            i--;
+                        }
+                    }
+                    if (parts[0] === '.' &&
+                        parts.length === 2 &&
+                        (parts[1] === '.' || parts[1] === '')) {
+                        didSomething = true;
+                        parts.pop();
+                    }
+                }
+                // 
/

/../ ->

/
+                let dd = 0;
+                while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                    const p = parts[dd - 1];
+                    if (p && p !== '.' && p !== '..' && p !== '**') {
+                        didSomething = true;
+                        const needDot = dd === 1 && parts[dd + 1] === '**';
+                        const splin = needDot ? ['.'] : [];
+                        parts.splice(dd - 1, 2, ...splin);
+                        if (parts.length === 0)
+                            parts.push('');
+                        dd -= 2;
+                    }
+                }
+            }
+        } while (didSomething);
+        return globParts;
+    }
+    // second phase: multi-pattern dedupes
+    // {
/*/,
/

/} ->

/*/
+    // {
/,
/} -> 
/
+    // {
/**/,
/} -> 
/**/
+    //
+    // {
/**/,
/**/

/} ->

/**/
+    // ^-- not valid because ** doens't follow symlinks
+    secondPhasePreProcess(globParts) {
+        for (let i = 0; i < globParts.length - 1; i++) {
+            for (let j = i + 1; j < globParts.length; j++) {
+                const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);
+                if (matched) {
+                    globParts[i] = [];
+                    globParts[j] = matched;
+                    break;
+                }
+            }
+        }
+        return globParts.filter(gs => gs.length);
+    }
+    partsMatch(a, b, emptyGSMatch = false) {
+        let ai = 0;
+        let bi = 0;
+        let result = [];
+        let which = '';
+        while (ai < a.length && bi < b.length) {
+            if (a[ai] === b[bi]) {
+                result.push(which === 'b' ? b[bi] : a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (emptyGSMatch && a[ai] === '**' && b[bi] === a[ai + 1]) {
+                result.push(a[ai]);
+                ai++;
+            }
+            else if (emptyGSMatch && b[bi] === '**' && a[ai] === b[bi + 1]) {
+                result.push(b[bi]);
+                bi++;
+            }
+            else if (a[ai] === '*' &&
+                b[bi] &&
+                (this.options.dot || !b[bi].startsWith('.')) &&
+                b[bi] !== '**') {
+                if (which === 'b')
+                    return false;
+                which = 'a';
+                result.push(a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (b[bi] === '*' &&
+                a[ai] &&
+                (this.options.dot || !a[ai].startsWith('.')) &&
+                a[ai] !== '**') {
+                if (which === 'a')
+                    return false;
+                which = 'b';
+                result.push(b[bi]);
+                ai++;
+                bi++;
+            }
+            else {
+                return false;
+            }
+        }
+        // if we fall out of the loop, it means they two are identical
+        // as long as their lengths match
+        return a.length === b.length && result;
+    }
+    parseNegate() {
+        if (this.nonegate)
+            return;
+        const pattern = this.pattern;
+        let negate = false;
+        let negateOffset = 0;
+        for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {
+            negate = !negate;
+            negateOffset++;
+        }
+        if (negateOffset)
+            this.pattern = pattern.slice(negateOffset);
+        this.negate = negate;
+    }
+    // set partial to true to test if, for example,
+    // "/a/b" matches the start of "/*/b/*/d"
+    // Partial means, if you run out of file before you run
+    // out of pattern, then that's fine, as long as all
+    // the parts match.
+    matchOne(file, pattern, partial = false) {
+        const options = this.options;
+        // UNC paths like //?/X:/... can match X:/... and vice versa
+        // Drive letters in absolute drive or unc paths are always compared
+        // case-insensitively.
+        if (this.isWindows) {
+            const fileDrive = typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0]);
+            const fileUNC = !fileDrive &&
+                file[0] === '' &&
+                file[1] === '' &&
+                file[2] === '?' &&
+                /^[a-z]:$/i.test(file[3]);
+            const patternDrive = typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0]);
+            const patternUNC = !patternDrive &&
+                pattern[0] === '' &&
+                pattern[1] === '' &&
+                pattern[2] === '?' &&
+                typeof pattern[3] === 'string' &&
+                /^[a-z]:$/i.test(pattern[3]);
+            const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined;
+            const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined;
+            if (typeof fdi === 'number' && typeof pdi === 'number') {
+                const [fd, pd] = [file[fdi], pattern[pdi]];
+                if (fd.toLowerCase() === pd.toLowerCase()) {
+                    pattern[pdi] = fd;
+                    if (pdi > fdi) {
+                        pattern = pattern.slice(pdi);
+                    }
+                    else if (fdi > pdi) {
+                        file = file.slice(fdi);
+                    }
+                }
+            }
+        }
+        // resolve and reduce . and .. portions in the file as well.
+        // dont' need to do the second phase, because it's only one string[]
+        const { optimizationLevel = 1 } = this.options;
+        if (optimizationLevel >= 2) {
+            file = this.levelTwoFileOptimize(file);
+        }
+        this.debug('matchOne', this, { file, pattern });
+        this.debug('matchOne', file.length, pattern.length);
+        for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
+            this.debug('matchOne loop');
+            var p = pattern[pi];
+            var f = file[fi];
+            this.debug(pattern, p, f);
+            // should be impossible.
+            // some invalid regexp stuff in the set.
+            /* c8 ignore start */
+            if (p === false) {
+                return false;
+            }
+            /* c8 ignore stop */
+            if (p === exports.GLOBSTAR) {
+                this.debug('GLOBSTAR', [pattern, p, f]);
+                // "**"
+                // a/**/b/**/c would match the following:
+                // a/b/x/y/z/c
+                // a/x/y/z/b/c
+                // a/b/x/b/x/c
+                // a/b/c
+                // To do this, take the rest of the pattern after
+                // the **, and see if it would match the file remainder.
+                // If so, return success.
+                // If not, the ** "swallows" a segment, and try again.
+                // This is recursively awful.
+                //
+                // a/**/b/**/c matching a/b/x/y/z/c
+                // - a matches a
+                // - doublestar
+                //   - matchOne(b/x/y/z/c, b/**/c)
+                //     - b matches b
+                //     - doublestar
+                //       - matchOne(x/y/z/c, c) -> no
+                //       - matchOne(y/z/c, c) -> no
+                //       - matchOne(z/c, c) -> no
+                //       - matchOne(c, c) yes, hit
+                var fr = fi;
+                var pr = pi + 1;
+                if (pr === pl) {
+                    this.debug('** at the end');
+                    // a ** at the end will just swallow the rest.
+                    // We have found a match.
+                    // however, it will not swallow /.x, unless
+                    // options.dot is set.
+                    // . and .. are *never* matched by **, for explosively
+                    // exponential reasons.
+                    for (; fi < fl; fi++) {
+                        if (file[fi] === '.' ||
+                            file[fi] === '..' ||
+                            (!options.dot && file[fi].charAt(0) === '.'))
+                            return false;
+                    }
+                    return true;
+                }
+                // ok, let's see if we can swallow whatever we can.
+                while (fr < fl) {
+                    var swallowee = file[fr];
+                    this.debug('\nglobstar while', file, fr, pattern, pr, swallowee);
+                    // XXX remove this slice.  Just pass the start index.
+                    if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
+                        this.debug('globstar found match!', fr, fl, swallowee);
+                        // found a match.
+                        return true;
+                    }
+                    else {
+                        // can't swallow "." or ".." ever.
+                        // can only swallow ".foo" when explicitly asked.
+                        if (swallowee === '.' ||
+                            swallowee === '..' ||
+                            (!options.dot && swallowee.charAt(0) === '.')) {
+                            this.debug('dot detected!', file, fr, pattern, pr);
+                            break;
+                        }
+                        // ** swallows a segment, and continue.
+                        this.debug('globstar swallow a segment, and continue');
+                        fr++;
+                    }
+                }
+                // no match was found.
+                // However, in partial mode, we can't say this is necessarily over.
+                /* c8 ignore start */
+                if (partial) {
+                    // ran out of file
+                    this.debug('\n>>> no match, partial?', file, fr, pattern, pr);
+                    if (fr === fl) {
+                        return true;
+                    }
+                }
+                /* c8 ignore stop */
+                return false;
+            }
+            // something other than **
+            // non-magic patterns just have to match exactly
+            // patterns with magic have been turned into regexps.
+            let hit;
+            if (typeof p === 'string') {
+                hit = f === p;
+                this.debug('string match', p, f, hit);
+            }
+            else {
+                hit = p.test(f);
+                this.debug('pattern match', p, f, hit);
+            }
+            if (!hit)
+                return false;
+        }
+        // Note: ending in / means that we'll get a final ""
+        // at the end of the pattern.  This can only match a
+        // corresponding "" at the end of the file.
+        // If the file ends in /, then it can only match a
+        // a pattern that ends in /, unless the pattern just
+        // doesn't have any more for it. But, a/b/ should *not*
+        // match "a/b/*", even though "" matches against the
+        // [^/]*? pattern, except in partial mode, where it might
+        // simply not be reached yet.
+        // However, a/b/ should still satisfy a/*
+        // now either we fell off the end of the pattern, or we're done.
+        if (fi === fl && pi === pl) {
+            // ran out of pattern and filename at the same time.
+            // an exact hit!
+            return true;
+        }
+        else if (fi === fl) {
+            // ran out of file, but still had pattern left.
+            // this is ok if we're doing the match as part of
+            // a glob fs traversal.
+            return partial;
+        }
+        else if (pi === pl) {
+            // ran out of pattern, still have file left.
+            // this is only acceptable if we're on the very last
+            // empty segment of a file with a trailing slash.
+            // a/* should match a/b/
+            return fi === fl - 1 && file[fi] === '';
+            /* c8 ignore start */
+        }
+        else {
+            // should be unreachable.
+            throw new Error('wtf?');
+        }
+        /* c8 ignore stop */
+    }
+    braceExpand() {
+        return (0, exports.braceExpand)(this.pattern, this.options);
+    }
+    parse(pattern) {
+        (0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
+        const options = this.options;
+        // shortcuts
+        if (pattern === '**')
+            return exports.GLOBSTAR;
+        if (pattern === '')
+            return '';
+        // far and away, the most common glob pattern parts are
+        // *, *.*, and *.  Add a fast check method for those.
+        let m;
+        let fastTest = null;
+        if ((m = pattern.match(starRE))) {
+            fastTest = options.dot ? starTestDot : starTest;
+        }
+        else if ((m = pattern.match(starDotExtRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? starDotExtTestNocaseDot
+                    : starDotExtTestNocase
+                : options.dot
+                    ? starDotExtTestDot
+                    : starDotExtTest)(m[1]);
+        }
+        else if ((m = pattern.match(qmarksRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? qmarksTestNocaseDot
+                    : qmarksTestNocase
+                : options.dot
+                    ? qmarksTestDot
+                    : qmarksTest)(m);
+        }
+        else if ((m = pattern.match(starDotStarRE))) {
+            fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
+        }
+        else if ((m = pattern.match(dotStarRE))) {
+            fastTest = dotStarTest;
+        }
+        const re = ast_js_1.AST.fromGlob(pattern, this.options).toMMPattern();
+        if (fastTest && typeof re === 'object') {
+            // Avoids overriding in frozen environments
+            Reflect.defineProperty(re, 'test', { value: fastTest });
+        }
+        return re;
+    }
+    makeRe() {
+        if (this.regexp || this.regexp === false)
+            return this.regexp;
+        // at this point, this.set is a 2d array of partial
+        // pattern strings, or "**".
+        //
+        // It's better to use .match().  This function shouldn't
+        // be used, really, but it's pretty convenient sometimes,
+        // when you just want to work with a regex.
+        const set = this.set;
+        if (!set.length) {
+            this.regexp = false;
+            return this.regexp;
+        }
+        const options = this.options;
+        const twoStar = options.noglobstar
+            ? star
+            : options.dot
+                ? twoStarDot
+                : twoStarNoDot;
+        const flags = new Set(options.nocase ? ['i'] : []);
+        // regexpify non-globstar patterns
+        // if ** is only item, then we just do one twoStar
+        // if ** is first, and there are more, prepend (\/|twoStar\/)? to next
+        // if ** is last, append (\/twoStar|) to previous
+        // if ** is in the middle, append (\/|\/twoStar\/) to previous
+        // then filter out GLOBSTAR symbols
+        let re = set
+            .map(pattern => {
+            const pp = pattern.map(p => {
+                if (p instanceof RegExp) {
+                    for (const f of p.flags.split(''))
+                        flags.add(f);
+                }
+                return typeof p === 'string'
+                    ? regExpEscape(p)
+                    : p === exports.GLOBSTAR
+                        ? exports.GLOBSTAR
+                        : p._src;
+            });
+            pp.forEach((p, i) => {
+                const next = pp[i + 1];
+                const prev = pp[i - 1];
+                if (p !== exports.GLOBSTAR || prev === exports.GLOBSTAR) {
+                    return;
+                }
+                if (prev === undefined) {
+                    if (next !== undefined && next !== exports.GLOBSTAR) {
+                        pp[i + 1] = '(?:\\/|' + twoStar + '\\/)?' + next;
+                    }
+                    else {
+                        pp[i] = twoStar;
+                    }
+                }
+                else if (next === undefined) {
+                    pp[i - 1] = prev + '(?:\\/|' + twoStar + ')?';
+                }
+                else if (next !== exports.GLOBSTAR) {
+                    pp[i - 1] = prev + '(?:\\/|\\/' + twoStar + '\\/)' + next;
+                    pp[i + 1] = exports.GLOBSTAR;
+                }
+            });
+            return pp.filter(p => p !== exports.GLOBSTAR).join('/');
+        })
+            .join('|');
+        // need to wrap in parens if we had more than one thing with |,
+        // otherwise only the first will be anchored to ^ and the last to $
+        const [open, close] = set.length > 1 ? ['(?:', ')'] : ['', ''];
+        // must match entire pattern
+        // ending in a * or ** will make it less strict.
+        re = '^' + open + re + close + '$';
+        // can match anything, as long as it's not this.
+        if (this.negate)
+            re = '^(?!' + re + ').+$';
+        try {
+            this.regexp = new RegExp(re, [...flags].join(''));
+            /* c8 ignore start */
+        }
+        catch (ex) {
+            // should be impossible
+            this.regexp = false;
+        }
+        /* c8 ignore stop */
+        return this.regexp;
+    }
+    slashSplit(p) {
+        // if p starts with // on windows, we preserve that
+        // so that UNC paths aren't broken.  Otherwise, any number of
+        // / characters are coalesced into one, unless
+        // preserveMultipleSlashes is set to true.
+        if (this.preserveMultipleSlashes) {
+            return p.split('/');
+        }
+        else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
+            // add an extra '' for the one we lose
+            return ['', ...p.split(/\/+/)];
+        }
+        else {
+            return p.split(/\/+/);
+        }
+    }
+    match(f, partial = this.partial) {
+        this.debug('match', f, this.pattern);
+        // short-circuit in the case of busted things.
+        // comments, etc.
+        if (this.comment) {
+            return false;
+        }
+        if (this.empty) {
+            return f === '';
+        }
+        if (f === '/' && partial) {
+            return true;
+        }
+        const options = this.options;
+        // windows: need to use /, not \
+        if (this.isWindows) {
+            f = f.split('\\').join('/');
+        }
+        // treat the test path as a set of pathparts.
+        const ff = this.slashSplit(f);
+        this.debug(this.pattern, 'split', ff);
+        // just ONE of the pattern sets in this.set needs to match
+        // in order for it to be valid.  If negating, then just one
+        // match means that we have failed.
+        // Either way, return on the first hit.
+        const set = this.set;
+        this.debug(this.pattern, 'set', set);
+        // Find the basename of the path by looking for the last non-empty segment
+        let filename = ff[ff.length - 1];
+        if (!filename) {
+            for (let i = ff.length - 2; !filename && i >= 0; i--) {
+                filename = ff[i];
+            }
+        }
+        for (let i = 0; i < set.length; i++) {
+            const pattern = set[i];
+            let file = ff;
+            if (options.matchBase && pattern.length === 1) {
+                file = [filename];
+            }
+            const hit = this.matchOne(file, pattern, partial);
+            if (hit) {
+                if (options.flipNegate) {
+                    return true;
+                }
+                return !this.negate;
+            }
+        }
+        // didn't get any hits.  this is success if it's a negative
+        // pattern, failure otherwise.
+        if (options.flipNegate) {
+            return false;
+        }
+        return this.negate;
+    }
+    static defaults(def) {
+        return exports.minimatch.defaults(def).Minimatch;
+    }
+}
+exports.Minimatch = Minimatch;
+/* c8 ignore start */
+var ast_js_2 = require("./ast.js");
+Object.defineProperty(exports, "AST", { enumerable: true, get: function () { return ast_js_2.AST; } });
+var escape_js_2 = require("./escape.js");
+Object.defineProperty(exports, "escape", { enumerable: true, get: function () { return escape_js_2.escape; } });
+var unescape_js_2 = require("./unescape.js");
+Object.defineProperty(exports, "unescape", { enumerable: true, get: function () { return unescape_js_2.unescape; } });
+/* c8 ignore stop */
+exports.minimatch.AST = ast_js_1.AST;
+exports.minimatch.Minimatch = Minimatch;
+exports.minimatch.escape = escape_js_1.escape;
+exports.minimatch.unescape = unescape_js_1.unescape;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/minimatch/dist/commonjs/package.json b/node_modules/pacote/node_modules/minimatch/dist/commonjs/package.json
new file mode 100644
index 0000000000000..5bbefffbabee3
--- /dev/null
+++ b/node_modules/pacote/node_modules/minimatch/dist/commonjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/node_modules/pacote/node_modules/minimatch/dist/commonjs/unescape.js b/node_modules/pacote/node_modules/minimatch/dist/commonjs/unescape.js
new file mode 100644
index 0000000000000..47c36bcee5a02
--- /dev/null
+++ b/node_modules/pacote/node_modules/minimatch/dist/commonjs/unescape.js
@@ -0,0 +1,24 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.unescape = void 0;
+/**
+ * Un-escape a string that has been escaped with {@link escape}.
+ *
+ * If the {@link windowsPathsNoEscape} option is used, then square-brace
+ * escapes are removed, but not backslash escapes.  For example, it will turn
+ * the string `'[*]'` into `*`, but it will not turn `'\\*'` into `'*'`,
+ * becuase `\` is a path separator in `windowsPathsNoEscape` mode.
+ *
+ * When `windowsPathsNoEscape` is not set, then both brace escapes and
+ * backslash escapes are removed.
+ *
+ * Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot be escaped
+ * or unescaped.
+ */
+const unescape = (s, { windowsPathsNoEscape = false, } = {}) => {
+    return windowsPathsNoEscape
+        ? s.replace(/\[([^\/\\])\]/g, '$1')
+        : s.replace(/((?!\\).|^)\[([^\/\\])\]/g, '$1$2').replace(/\\([^\/])/g, '$1');
+};
+exports.unescape = unescape;
+//# sourceMappingURL=unescape.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/minimatch/dist/esm/assert-valid-pattern.js b/node_modules/pacote/node_modules/minimatch/dist/esm/assert-valid-pattern.js
new file mode 100644
index 0000000000000..7b534fc30200b
--- /dev/null
+++ b/node_modules/pacote/node_modules/minimatch/dist/esm/assert-valid-pattern.js
@@ -0,0 +1,10 @@
+const MAX_PATTERN_LENGTH = 1024 * 64;
+export const assertValidPattern = (pattern) => {
+    if (typeof pattern !== 'string') {
+        throw new TypeError('invalid pattern');
+    }
+    if (pattern.length > MAX_PATTERN_LENGTH) {
+        throw new TypeError('pattern is too long');
+    }
+};
+//# sourceMappingURL=assert-valid-pattern.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/minimatch/dist/esm/ast.js b/node_modules/pacote/node_modules/minimatch/dist/esm/ast.js
new file mode 100644
index 0000000000000..2d2bced6533de
--- /dev/null
+++ b/node_modules/pacote/node_modules/minimatch/dist/esm/ast.js
@@ -0,0 +1,588 @@
+// parse a single path portion
+import { parseClass } from './brace-expressions.js';
+import { unescape } from './unescape.js';
+const types = new Set(['!', '?', '+', '*', '@']);
+const isExtglobType = (c) => types.has(c);
+// Patterns that get prepended to bind to the start of either the
+// entire string, or just a single path portion, to prevent dots
+// and/or traversal patterns, when needed.
+// Exts don't need the ^ or / bit, because the root binds that already.
+const startNoTraversal = '(?!(?:^|/)\\.\\.?(?:$|/))';
+const startNoDot = '(?!\\.)';
+// characters that indicate a start of pattern needs the "no dots" bit,
+// because a dot *might* be matched. ( is not in the list, because in
+// the case of a child extglob, it will handle the prevention itself.
+const addPatternStart = new Set(['[', '.']);
+// cases where traversal is A-OK, no dot prevention needed
+const justDots = new Set(['..', '.']);
+const reSpecials = new Set('().*{}+?[]^$\\!');
+const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+// any single thing other than /
+const qmark = '[^/]';
+// * => any number of characters
+const star = qmark + '*?';
+// use + when we need to ensure that *something* matches, because the * is
+// the only thing in the path portion.
+const starNoEmpty = qmark + '+?';
+// remove the \ chars that we added if we end up doing a nonmagic compare
+// const deslash = (s: string) => s.replace(/\\(.)/g, '$1')
+export class AST {
+    type;
+    #root;
+    #hasMagic;
+    #uflag = false;
+    #parts = [];
+    #parent;
+    #parentIndex;
+    #negs;
+    #filledNegs = false;
+    #options;
+    #toString;
+    // set to true if it's an extglob with no children
+    // (which really means one child of '')
+    #emptyExt = false;
+    constructor(type, parent, options = {}) {
+        this.type = type;
+        // extglobs are inherently magical
+        if (type)
+            this.#hasMagic = true;
+        this.#parent = parent;
+        this.#root = this.#parent ? this.#parent.#root : this;
+        this.#options = this.#root === this ? options : this.#root.#options;
+        this.#negs = this.#root === this ? [] : this.#root.#negs;
+        if (type === '!' && !this.#root.#filledNegs)
+            this.#negs.push(this);
+        this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0;
+    }
+    get hasMagic() {
+        /* c8 ignore start */
+        if (this.#hasMagic !== undefined)
+            return this.#hasMagic;
+        /* c8 ignore stop */
+        for (const p of this.#parts) {
+            if (typeof p === 'string')
+                continue;
+            if (p.type || p.hasMagic)
+                return (this.#hasMagic = true);
+        }
+        // note: will be undefined until we generate the regexp src and find out
+        return this.#hasMagic;
+    }
+    // reconstructs the pattern
+    toString() {
+        if (this.#toString !== undefined)
+            return this.#toString;
+        if (!this.type) {
+            return (this.#toString = this.#parts.map(p => String(p)).join(''));
+        }
+        else {
+            return (this.#toString =
+                this.type + '(' + this.#parts.map(p => String(p)).join('|') + ')');
+        }
+    }
+    #fillNegs() {
+        /* c8 ignore start */
+        if (this !== this.#root)
+            throw new Error('should only call on root');
+        if (this.#filledNegs)
+            return this;
+        /* c8 ignore stop */
+        // call toString() once to fill this out
+        this.toString();
+        this.#filledNegs = true;
+        let n;
+        while ((n = this.#negs.pop())) {
+            if (n.type !== '!')
+                continue;
+            // walk up the tree, appending everthing that comes AFTER parentIndex
+            let p = n;
+            let pp = p.#parent;
+            while (pp) {
+                for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) {
+                    for (const part of n.#parts) {
+                        /* c8 ignore start */
+                        if (typeof part === 'string') {
+                            throw new Error('string part in extglob AST??');
+                        }
+                        /* c8 ignore stop */
+                        part.copyIn(pp.#parts[i]);
+                    }
+                }
+                p = pp;
+                pp = p.#parent;
+            }
+        }
+        return this;
+    }
+    push(...parts) {
+        for (const p of parts) {
+            if (p === '')
+                continue;
+            /* c8 ignore start */
+            if (typeof p !== 'string' && !(p instanceof AST && p.#parent === this)) {
+                throw new Error('invalid part: ' + p);
+            }
+            /* c8 ignore stop */
+            this.#parts.push(p);
+        }
+    }
+    toJSON() {
+        const ret = this.type === null
+            ? this.#parts.slice().map(p => (typeof p === 'string' ? p : p.toJSON()))
+            : [this.type, ...this.#parts.map(p => p.toJSON())];
+        if (this.isStart() && !this.type)
+            ret.unshift([]);
+        if (this.isEnd() &&
+            (this === this.#root ||
+                (this.#root.#filledNegs && this.#parent?.type === '!'))) {
+            ret.push({});
+        }
+        return ret;
+    }
+    isStart() {
+        if (this.#root === this)
+            return true;
+        // if (this.type) return !!this.#parent?.isStart()
+        if (!this.#parent?.isStart())
+            return false;
+        if (this.#parentIndex === 0)
+            return true;
+        // if everything AHEAD of this is a negation, then it's still the "start"
+        const p = this.#parent;
+        for (let i = 0; i < this.#parentIndex; i++) {
+            const pp = p.#parts[i];
+            if (!(pp instanceof AST && pp.type === '!')) {
+                return false;
+            }
+        }
+        return true;
+    }
+    isEnd() {
+        if (this.#root === this)
+            return true;
+        if (this.#parent?.type === '!')
+            return true;
+        if (!this.#parent?.isEnd())
+            return false;
+        if (!this.type)
+            return this.#parent?.isEnd();
+        // if not root, it'll always have a parent
+        /* c8 ignore start */
+        const pl = this.#parent ? this.#parent.#parts.length : 0;
+        /* c8 ignore stop */
+        return this.#parentIndex === pl - 1;
+    }
+    copyIn(part) {
+        if (typeof part === 'string')
+            this.push(part);
+        else
+            this.push(part.clone(this));
+    }
+    clone(parent) {
+        const c = new AST(this.type, parent);
+        for (const p of this.#parts) {
+            c.copyIn(p);
+        }
+        return c;
+    }
+    static #parseAST(str, ast, pos, opt) {
+        let escaping = false;
+        let inBrace = false;
+        let braceStart = -1;
+        let braceNeg = false;
+        if (ast.type === null) {
+            // outside of a extglob, append until we find a start
+            let i = pos;
+            let acc = '';
+            while (i < str.length) {
+                const c = str.charAt(i++);
+                // still accumulate escapes at this point, but we do ignore
+                // starts that are escaped
+                if (escaping || c === '\\') {
+                    escaping = !escaping;
+                    acc += c;
+                    continue;
+                }
+                if (inBrace) {
+                    if (i === braceStart + 1) {
+                        if (c === '^' || c === '!') {
+                            braceNeg = true;
+                        }
+                    }
+                    else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
+                        inBrace = false;
+                    }
+                    acc += c;
+                    continue;
+                }
+                else if (c === '[') {
+                    inBrace = true;
+                    braceStart = i;
+                    braceNeg = false;
+                    acc += c;
+                    continue;
+                }
+                if (!opt.noext && isExtglobType(c) && str.charAt(i) === '(') {
+                    ast.push(acc);
+                    acc = '';
+                    const ext = new AST(c, ast);
+                    i = AST.#parseAST(str, ext, i, opt);
+                    ast.push(ext);
+                    continue;
+                }
+                acc += c;
+            }
+            ast.push(acc);
+            return i;
+        }
+        // some kind of extglob, pos is at the (
+        // find the next | or )
+        let i = pos + 1;
+        let part = new AST(null, ast);
+        const parts = [];
+        let acc = '';
+        while (i < str.length) {
+            const c = str.charAt(i++);
+            // still accumulate escapes at this point, but we do ignore
+            // starts that are escaped
+            if (escaping || c === '\\') {
+                escaping = !escaping;
+                acc += c;
+                continue;
+            }
+            if (inBrace) {
+                if (i === braceStart + 1) {
+                    if (c === '^' || c === '!') {
+                        braceNeg = true;
+                    }
+                }
+                else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
+                    inBrace = false;
+                }
+                acc += c;
+                continue;
+            }
+            else if (c === '[') {
+                inBrace = true;
+                braceStart = i;
+                braceNeg = false;
+                acc += c;
+                continue;
+            }
+            if (isExtglobType(c) && str.charAt(i) === '(') {
+                part.push(acc);
+                acc = '';
+                const ext = new AST(c, part);
+                part.push(ext);
+                i = AST.#parseAST(str, ext, i, opt);
+                continue;
+            }
+            if (c === '|') {
+                part.push(acc);
+                acc = '';
+                parts.push(part);
+                part = new AST(null, ast);
+                continue;
+            }
+            if (c === ')') {
+                if (acc === '' && ast.#parts.length === 0) {
+                    ast.#emptyExt = true;
+                }
+                part.push(acc);
+                acc = '';
+                ast.push(...parts, part);
+                return i;
+            }
+            acc += c;
+        }
+        // unfinished extglob
+        // if we got here, it was a malformed extglob! not an extglob, but
+        // maybe something else in there.
+        ast.type = null;
+        ast.#hasMagic = undefined;
+        ast.#parts = [str.substring(pos - 1)];
+        return i;
+    }
+    static fromGlob(pattern, options = {}) {
+        const ast = new AST(null, undefined, options);
+        AST.#parseAST(pattern, ast, 0, options);
+        return ast;
+    }
+    // returns the regular expression if there's magic, or the unescaped
+    // string if not.
+    toMMPattern() {
+        // should only be called on root
+        /* c8 ignore start */
+        if (this !== this.#root)
+            return this.#root.toMMPattern();
+        /* c8 ignore stop */
+        const glob = this.toString();
+        const [re, body, hasMagic, uflag] = this.toRegExpSource();
+        // if we're in nocase mode, and not nocaseMagicOnly, then we do
+        // still need a regular expression if we have to case-insensitively
+        // match capital/lowercase characters.
+        const anyMagic = hasMagic ||
+            this.#hasMagic ||
+            (this.#options.nocase &&
+                !this.#options.nocaseMagicOnly &&
+                glob.toUpperCase() !== glob.toLowerCase());
+        if (!anyMagic) {
+            return body;
+        }
+        const flags = (this.#options.nocase ? 'i' : '') + (uflag ? 'u' : '');
+        return Object.assign(new RegExp(`^${re}$`, flags), {
+            _src: re,
+            _glob: glob,
+        });
+    }
+    get options() {
+        return this.#options;
+    }
+    // returns the string match, the regexp source, whether there's magic
+    // in the regexp (so a regular expression is required) and whether or
+    // not the uflag is needed for the regular expression (for posix classes)
+    // TODO: instead of injecting the start/end at this point, just return
+    // the BODY of the regexp, along with the start/end portions suitable
+    // for binding the start/end in either a joined full-path makeRe context
+    // (where we bind to (^|/), or a standalone matchPart context (where
+    // we bind to ^, and not /).  Otherwise slashes get duped!
+    //
+    // In part-matching mode, the start is:
+    // - if not isStart: nothing
+    // - if traversal possible, but not allowed: ^(?!\.\.?$)
+    // - if dots allowed or not possible: ^
+    // - if dots possible and not allowed: ^(?!\.)
+    // end is:
+    // - if not isEnd(): nothing
+    // - else: $
+    //
+    // In full-path matching mode, we put the slash at the START of the
+    // pattern, so start is:
+    // - if first pattern: same as part-matching mode
+    // - if not isStart(): nothing
+    // - if traversal possible, but not allowed: /(?!\.\.?(?:$|/))
+    // - if dots allowed or not possible: /
+    // - if dots possible and not allowed: /(?!\.)
+    // end is:
+    // - if last pattern, same as part-matching mode
+    // - else nothing
+    //
+    // Always put the (?:$|/) on negated tails, though, because that has to be
+    // there to bind the end of the negated pattern portion, and it's easier to
+    // just stick it in now rather than try to inject it later in the middle of
+    // the pattern.
+    //
+    // We can just always return the same end, and leave it up to the caller
+    // to know whether it's going to be used joined or in parts.
+    // And, if the start is adjusted slightly, can do the same there:
+    // - if not isStart: nothing
+    // - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$)
+    // - if dots allowed or not possible: (?:/|^)
+    // - if dots possible and not allowed: (?:/|^)(?!\.)
+    //
+    // But it's better to have a simpler binding without a conditional, for
+    // performance, so probably better to return both start options.
+    //
+    // Then the caller just ignores the end if it's not the first pattern,
+    // and the start always gets applied.
+    //
+    // But that's always going to be $ if it's the ending pattern, or nothing,
+    // so the caller can just attach $ at the end of the pattern when building.
+    //
+    // So the todo is:
+    // - better detect what kind of start is needed
+    // - return both flavors of starting pattern
+    // - attach $ at the end of the pattern when creating the actual RegExp
+    //
+    // Ah, but wait, no, that all only applies to the root when the first pattern
+    // is not an extglob. If the first pattern IS an extglob, then we need all
+    // that dot prevention biz to live in the extglob portions, because eg
+    // +(*|.x*) can match .xy but not .yx.
+    //
+    // So, return the two flavors if it's #root and the first child is not an
+    // AST, otherwise leave it to the child AST to handle it, and there,
+    // use the (?:^|/) style of start binding.
+    //
+    // Even simplified further:
+    // - Since the start for a join is eg /(?!\.) and the start for a part
+    // is ^(?!\.), we can just prepend (?!\.) to the pattern (either root
+    // or start or whatever) and prepend ^ or / at the Regexp construction.
+    toRegExpSource(allowDot) {
+        const dot = allowDot ?? !!this.#options.dot;
+        if (this.#root === this)
+            this.#fillNegs();
+        if (!this.type) {
+            const noEmpty = this.isStart() && this.isEnd();
+            const src = this.#parts
+                .map(p => {
+                const [re, _, hasMagic, uflag] = typeof p === 'string'
+                    ? AST.#parseGlob(p, this.#hasMagic, noEmpty)
+                    : p.toRegExpSource(allowDot);
+                this.#hasMagic = this.#hasMagic || hasMagic;
+                this.#uflag = this.#uflag || uflag;
+                return re;
+            })
+                .join('');
+            let start = '';
+            if (this.isStart()) {
+                if (typeof this.#parts[0] === 'string') {
+                    // this is the string that will match the start of the pattern,
+                    // so we need to protect against dots and such.
+                    // '.' and '..' cannot match unless the pattern is that exactly,
+                    // even if it starts with . or dot:true is set.
+                    const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]);
+                    if (!dotTravAllowed) {
+                        const aps = addPatternStart;
+                        // check if we have a possibility of matching . or ..,
+                        // and prevent that.
+                        const needNoTrav = 
+                        // dots are allowed, and the pattern starts with [ or .
+                        (dot && aps.has(src.charAt(0))) ||
+                            // the pattern starts with \., and then [ or .
+                            (src.startsWith('\\.') && aps.has(src.charAt(2))) ||
+                            // the pattern starts with \.\., and then [ or .
+                            (src.startsWith('\\.\\.') && aps.has(src.charAt(4)));
+                        // no need to prevent dots if it can't match a dot, or if a
+                        // sub-pattern will be preventing it anyway.
+                        const needNoDot = !dot && !allowDot && aps.has(src.charAt(0));
+                        start = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : '';
+                    }
+                }
+            }
+            // append the "end of path portion" pattern to negation tails
+            let end = '';
+            if (this.isEnd() &&
+                this.#root.#filledNegs &&
+                this.#parent?.type === '!') {
+                end = '(?:$|\\/)';
+            }
+            const final = start + src + end;
+            return [
+                final,
+                unescape(src),
+                (this.#hasMagic = !!this.#hasMagic),
+                this.#uflag,
+            ];
+        }
+        // We need to calculate the body *twice* if it's a repeat pattern
+        // at the start, once in nodot mode, then again in dot mode, so a
+        // pattern like *(?) can match 'x.y'
+        const repeated = this.type === '*' || this.type === '+';
+        // some kind of extglob
+        const start = this.type === '!' ? '(?:(?!(?:' : '(?:';
+        let body = this.#partsToRegExp(dot);
+        if (this.isStart() && this.isEnd() && !body && this.type !== '!') {
+            // invalid extglob, has to at least be *something* present, if it's
+            // the entire path portion.
+            const s = this.toString();
+            this.#parts = [s];
+            this.type = null;
+            this.#hasMagic = undefined;
+            return [s, unescape(this.toString()), false, false];
+        }
+        // XXX abstract out this map method
+        let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot
+            ? ''
+            : this.#partsToRegExp(true);
+        if (bodyDotAllowed === body) {
+            bodyDotAllowed = '';
+        }
+        if (bodyDotAllowed) {
+            body = `(?:${body})(?:${bodyDotAllowed})*?`;
+        }
+        // an empty !() is exactly equivalent to a starNoEmpty
+        let final = '';
+        if (this.type === '!' && this.#emptyExt) {
+            final = (this.isStart() && !dot ? startNoDot : '') + starNoEmpty;
+        }
+        else {
+            const close = this.type === '!'
+                ? // !() must match something,but !(x) can match ''
+                    '))' +
+                        (this.isStart() && !dot && !allowDot ? startNoDot : '') +
+                        star +
+                        ')'
+                : this.type === '@'
+                    ? ')'
+                    : this.type === '?'
+                        ? ')?'
+                        : this.type === '+' && bodyDotAllowed
+                            ? ')'
+                            : this.type === '*' && bodyDotAllowed
+                                ? `)?`
+                                : `)${this.type}`;
+            final = start + body + close;
+        }
+        return [
+            final,
+            unescape(body),
+            (this.#hasMagic = !!this.#hasMagic),
+            this.#uflag,
+        ];
+    }
+    #partsToRegExp(dot) {
+        return this.#parts
+            .map(p => {
+            // extglob ASTs should only contain parent ASTs
+            /* c8 ignore start */
+            if (typeof p === 'string') {
+                throw new Error('string type in extglob ast??');
+            }
+            /* c8 ignore stop */
+            // can ignore hasMagic, because extglobs are already always magic
+            const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot);
+            this.#uflag = this.#uflag || uflag;
+            return re;
+        })
+            .filter(p => !(this.isStart() && this.isEnd()) || !!p)
+            .join('|');
+    }
+    static #parseGlob(glob, hasMagic, noEmpty = false) {
+        let escaping = false;
+        let re = '';
+        let uflag = false;
+        for (let i = 0; i < glob.length; i++) {
+            const c = glob.charAt(i);
+            if (escaping) {
+                escaping = false;
+                re += (reSpecials.has(c) ? '\\' : '') + c;
+                continue;
+            }
+            if (c === '\\') {
+                if (i === glob.length - 1) {
+                    re += '\\\\';
+                }
+                else {
+                    escaping = true;
+                }
+                continue;
+            }
+            if (c === '[') {
+                const [src, needUflag, consumed, magic] = parseClass(glob, i);
+                if (consumed) {
+                    re += src;
+                    uflag = uflag || needUflag;
+                    i += consumed - 1;
+                    hasMagic = hasMagic || magic;
+                    continue;
+                }
+            }
+            if (c === '*') {
+                if (noEmpty && glob === '*')
+                    re += starNoEmpty;
+                else
+                    re += star;
+                hasMagic = true;
+                continue;
+            }
+            if (c === '?') {
+                re += qmark;
+                hasMagic = true;
+                continue;
+            }
+            re += regExpEscape(c);
+        }
+        return [re, unescape(glob), !!hasMagic, uflag];
+    }
+}
+//# sourceMappingURL=ast.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/minimatch/dist/esm/brace-expressions.js b/node_modules/pacote/node_modules/minimatch/dist/esm/brace-expressions.js
new file mode 100644
index 0000000000000..c629d6ae816e2
--- /dev/null
+++ b/node_modules/pacote/node_modules/minimatch/dist/esm/brace-expressions.js
@@ -0,0 +1,148 @@
+// translate the various posix character classes into unicode properties
+// this works across all unicode locales
+// { : [, /u flag required, negated]
+const posixClasses = {
+    '[:alnum:]': ['\\p{L}\\p{Nl}\\p{Nd}', true],
+    '[:alpha:]': ['\\p{L}\\p{Nl}', true],
+    '[:ascii:]': ['\\x' + '00-\\x' + '7f', false],
+    '[:blank:]': ['\\p{Zs}\\t', true],
+    '[:cntrl:]': ['\\p{Cc}', true],
+    '[:digit:]': ['\\p{Nd}', true],
+    '[:graph:]': ['\\p{Z}\\p{C}', true, true],
+    '[:lower:]': ['\\p{Ll}', true],
+    '[:print:]': ['\\p{C}', true],
+    '[:punct:]': ['\\p{P}', true],
+    '[:space:]': ['\\p{Z}\\t\\r\\n\\v\\f', true],
+    '[:upper:]': ['\\p{Lu}', true],
+    '[:word:]': ['\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}', true],
+    '[:xdigit:]': ['A-Fa-f0-9', false],
+};
+// only need to escape a few things inside of brace expressions
+// escapes: [ \ ] -
+const braceEscape = (s) => s.replace(/[[\]\\-]/g, '\\$&');
+// escape all regexp magic characters
+const regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+// everything has already been escaped, we just have to join
+const rangesToString = (ranges) => ranges.join('');
+// takes a glob string at a posix brace expression, and returns
+// an equivalent regular expression source, and boolean indicating
+// whether the /u flag needs to be applied, and the number of chars
+// consumed to parse the character class.
+// This also removes out of order ranges, and returns ($.) if the
+// entire class just no good.
+export const parseClass = (glob, position) => {
+    const pos = position;
+    /* c8 ignore start */
+    if (glob.charAt(pos) !== '[') {
+        throw new Error('not in a brace expression');
+    }
+    /* c8 ignore stop */
+    const ranges = [];
+    const negs = [];
+    let i = pos + 1;
+    let sawStart = false;
+    let uflag = false;
+    let escaping = false;
+    let negate = false;
+    let endPos = pos;
+    let rangeStart = '';
+    WHILE: while (i < glob.length) {
+        const c = glob.charAt(i);
+        if ((c === '!' || c === '^') && i === pos + 1) {
+            negate = true;
+            i++;
+            continue;
+        }
+        if (c === ']' && sawStart && !escaping) {
+            endPos = i + 1;
+            break;
+        }
+        sawStart = true;
+        if (c === '\\') {
+            if (!escaping) {
+                escaping = true;
+                i++;
+                continue;
+            }
+            // escaped \ char, fall through and treat like normal char
+        }
+        if (c === '[' && !escaping) {
+            // either a posix class, a collation equivalent, or just a [
+            for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) {
+                if (glob.startsWith(cls, i)) {
+                    // invalid, [a-[] is fine, but not [a-[:alpha]]
+                    if (rangeStart) {
+                        return ['$.', false, glob.length - pos, true];
+                    }
+                    i += cls.length;
+                    if (neg)
+                        negs.push(unip);
+                    else
+                        ranges.push(unip);
+                    uflag = uflag || u;
+                    continue WHILE;
+                }
+            }
+        }
+        // now it's just a normal character, effectively
+        escaping = false;
+        if (rangeStart) {
+            // throw this range away if it's not valid, but others
+            // can still match.
+            if (c > rangeStart) {
+                ranges.push(braceEscape(rangeStart) + '-' + braceEscape(c));
+            }
+            else if (c === rangeStart) {
+                ranges.push(braceEscape(c));
+            }
+            rangeStart = '';
+            i++;
+            continue;
+        }
+        // now might be the start of a range.
+        // can be either c-d or c-] or c] or c] at this point
+        if (glob.startsWith('-]', i + 1)) {
+            ranges.push(braceEscape(c + '-'));
+            i += 2;
+            continue;
+        }
+        if (glob.startsWith('-', i + 1)) {
+            rangeStart = c;
+            i += 2;
+            continue;
+        }
+        // not the start of a range, just a single character
+        ranges.push(braceEscape(c));
+        i++;
+    }
+    if (endPos < i) {
+        // didn't see the end of the class, not a valid class,
+        // but might still be valid as a literal match.
+        return ['', false, 0, false];
+    }
+    // if we got no ranges and no negates, then we have a range that
+    // cannot possibly match anything, and that poisons the whole glob
+    if (!ranges.length && !negs.length) {
+        return ['$.', false, glob.length - pos, true];
+    }
+    // if we got one positive range, and it's a single character, then that's
+    // not actually a magic pattern, it's just that one literal character.
+    // we should not treat that as "magic", we should just return the literal
+    // character. [_] is a perfectly valid way to escape glob magic chars.
+    if (negs.length === 0 &&
+        ranges.length === 1 &&
+        /^\\?.$/.test(ranges[0]) &&
+        !negate) {
+        const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0];
+        return [regexpEscape(r), false, endPos - pos, false];
+    }
+    const sranges = '[' + (negate ? '^' : '') + rangesToString(ranges) + ']';
+    const snegs = '[' + (negate ? '' : '^') + rangesToString(negs) + ']';
+    const comb = ranges.length && negs.length
+        ? '(' + sranges + '|' + snegs + ')'
+        : ranges.length
+            ? sranges
+            : snegs;
+    return [comb, uflag, endPos - pos, true];
+};
+//# sourceMappingURL=brace-expressions.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/minimatch/dist/esm/escape.js b/node_modules/pacote/node_modules/minimatch/dist/esm/escape.js
new file mode 100644
index 0000000000000..16f7c8c7bdc64
--- /dev/null
+++ b/node_modules/pacote/node_modules/minimatch/dist/esm/escape.js
@@ -0,0 +1,18 @@
+/**
+ * Escape all magic characters in a glob pattern.
+ *
+ * If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape}
+ * option is used, then characters are escaped by wrapping in `[]`, because
+ * a magic character wrapped in a character class can only be satisfied by
+ * that exact character.  In this mode, `\` is _not_ escaped, because it is
+ * not interpreted as a magic character, but instead as a path separator.
+ */
+export const escape = (s, { windowsPathsNoEscape = false, } = {}) => {
+    // don't need to escape +@! because we escape the parens
+    // that make those magic, and escaping ! as [!] isn't valid,
+    // because [!]] is a valid glob class meaning not ']'.
+    return windowsPathsNoEscape
+        ? s.replace(/[?*()[\]]/g, '[$&]')
+        : s.replace(/[?*()[\]\\]/g, '\\$&');
+};
+//# sourceMappingURL=escape.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/minimatch/dist/esm/index.js b/node_modules/pacote/node_modules/minimatch/dist/esm/index.js
new file mode 100644
index 0000000000000..790d6c02a2f22
--- /dev/null
+++ b/node_modules/pacote/node_modules/minimatch/dist/esm/index.js
@@ -0,0 +1,1001 @@
+import { expand } from '@isaacs/brace-expansion';
+import { assertValidPattern } from './assert-valid-pattern.js';
+import { AST } from './ast.js';
+import { escape } from './escape.js';
+import { unescape } from './unescape.js';
+export const minimatch = (p, pattern, options = {}) => {
+    assertValidPattern(pattern);
+    // shortcut: comments match nothing.
+    if (!options.nocomment && pattern.charAt(0) === '#') {
+        return false;
+    }
+    return new Minimatch(pattern, options).match(p);
+};
+// Optimized checking for the most common glob patterns.
+const starDotExtRE = /^\*+([^+@!?\*\[\(]*)$/;
+const starDotExtTest = (ext) => (f) => !f.startsWith('.') && f.endsWith(ext);
+const starDotExtTestDot = (ext) => (f) => f.endsWith(ext);
+const starDotExtTestNocase = (ext) => {
+    ext = ext.toLowerCase();
+    return (f) => !f.startsWith('.') && f.toLowerCase().endsWith(ext);
+};
+const starDotExtTestNocaseDot = (ext) => {
+    ext = ext.toLowerCase();
+    return (f) => f.toLowerCase().endsWith(ext);
+};
+const starDotStarRE = /^\*+\.\*+$/;
+const starDotStarTest = (f) => !f.startsWith('.') && f.includes('.');
+const starDotStarTestDot = (f) => f !== '.' && f !== '..' && f.includes('.');
+const dotStarRE = /^\.\*+$/;
+const dotStarTest = (f) => f !== '.' && f !== '..' && f.startsWith('.');
+const starRE = /^\*+$/;
+const starTest = (f) => f.length !== 0 && !f.startsWith('.');
+const starTestDot = (f) => f.length !== 0 && f !== '.' && f !== '..';
+const qmarksRE = /^\?+([^+@!?\*\[\(]*)?$/;
+const qmarksTestNocase = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExt([$0]);
+    if (!ext)
+        return noext;
+    ext = ext.toLowerCase();
+    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
+};
+const qmarksTestNocaseDot = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExtDot([$0]);
+    if (!ext)
+        return noext;
+    ext = ext.toLowerCase();
+    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
+};
+const qmarksTestDot = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExtDot([$0]);
+    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
+};
+const qmarksTest = ([$0, ext = '']) => {
+    const noext = qmarksTestNoExt([$0]);
+    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
+};
+const qmarksTestNoExt = ([$0]) => {
+    const len = $0.length;
+    return (f) => f.length === len && !f.startsWith('.');
+};
+const qmarksTestNoExtDot = ([$0]) => {
+    const len = $0.length;
+    return (f) => f.length === len && f !== '.' && f !== '..';
+};
+/* c8 ignore start */
+const defaultPlatform = (typeof process === 'object' && process
+    ? (typeof process.env === 'object' &&
+        process.env &&
+        process.env.__MINIMATCH_TESTING_PLATFORM__) ||
+        process.platform
+    : 'posix');
+const path = {
+    win32: { sep: '\\' },
+    posix: { sep: '/' },
+};
+/* c8 ignore stop */
+export const sep = defaultPlatform === 'win32' ? path.win32.sep : path.posix.sep;
+minimatch.sep = sep;
+export const GLOBSTAR = Symbol('globstar **');
+minimatch.GLOBSTAR = GLOBSTAR;
+// any single thing other than /
+// don't need to escape / when using new RegExp()
+const qmark = '[^/]';
+// * => any number of characters
+const star = qmark + '*?';
+// ** when dots are allowed.  Anything goes, except .. and .
+// not (^ or / followed by one or two dots followed by $ or /),
+// followed by anything, any number of times.
+const twoStarDot = '(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?';
+// not a ^ or / followed by a dot,
+// followed by anything, any number of times.
+const twoStarNoDot = '(?:(?!(?:\\/|^)\\.).)*?';
+export const filter = (pattern, options = {}) => (p) => minimatch(p, pattern, options);
+minimatch.filter = filter;
+const ext = (a, b = {}) => Object.assign({}, a, b);
+export const defaults = (def) => {
+    if (!def || typeof def !== 'object' || !Object.keys(def).length) {
+        return minimatch;
+    }
+    const orig = minimatch;
+    const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options));
+    return Object.assign(m, {
+        Minimatch: class Minimatch extends orig.Minimatch {
+            constructor(pattern, options = {}) {
+                super(pattern, ext(def, options));
+            }
+            static defaults(options) {
+                return orig.defaults(ext(def, options)).Minimatch;
+            }
+        },
+        AST: class AST extends orig.AST {
+            /* c8 ignore start */
+            constructor(type, parent, options = {}) {
+                super(type, parent, ext(def, options));
+            }
+            /* c8 ignore stop */
+            static fromGlob(pattern, options = {}) {
+                return orig.AST.fromGlob(pattern, ext(def, options));
+            }
+        },
+        unescape: (s, options = {}) => orig.unescape(s, ext(def, options)),
+        escape: (s, options = {}) => orig.escape(s, ext(def, options)),
+        filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)),
+        defaults: (options) => orig.defaults(ext(def, options)),
+        makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)),
+        braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)),
+        match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)),
+        sep: orig.sep,
+        GLOBSTAR: GLOBSTAR,
+    });
+};
+minimatch.defaults = defaults;
+// Brace expansion:
+// a{b,c}d -> abd acd
+// a{b,}c -> abc ac
+// a{0..3}d -> a0d a1d a2d a3d
+// a{b,c{d,e}f}g -> abg acdfg acefg
+// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
+//
+// Invalid sets are not expanded.
+// a{2..}b -> a{2..}b
+// a{b}c -> a{b}c
+export const braceExpand = (pattern, options = {}) => {
+    assertValidPattern(pattern);
+    // Thanks to Yeting Li  for
+    // improving this regexp to avoid a ReDOS vulnerability.
+    if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
+        // shortcut. no need to expand.
+        return [pattern];
+    }
+    return expand(pattern);
+};
+minimatch.braceExpand = braceExpand;
+// parse a component of the expanded set.
+// At this point, no pattern may contain "/" in it
+// so we're going to return a 2d array, where each entry is the full
+// pattern, split on '/', and then turned into a regular expression.
+// A regexp is made at the end which joins each array with an
+// escaped /, and another full one which joins each regexp with |.
+//
+// Following the lead of Bash 4.1, note that "**" only has special meaning
+// when it is the *only* thing in a path portion.  Otherwise, any series
+// of * is equivalent to a single *.  Globstar behavior is enabled by
+// default, and can be disabled by setting options.noglobstar.
+export const makeRe = (pattern, options = {}) => new Minimatch(pattern, options).makeRe();
+minimatch.makeRe = makeRe;
+export const match = (list, pattern, options = {}) => {
+    const mm = new Minimatch(pattern, options);
+    list = list.filter(f => mm.match(f));
+    if (mm.options.nonull && !list.length) {
+        list.push(pattern);
+    }
+    return list;
+};
+minimatch.match = match;
+// replace stuff like \* with *
+const globMagic = /[?*]|[+@!]\(.*?\)|\[|\]/;
+const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
+export class Minimatch {
+    options;
+    set;
+    pattern;
+    windowsPathsNoEscape;
+    nonegate;
+    negate;
+    comment;
+    empty;
+    preserveMultipleSlashes;
+    partial;
+    globSet;
+    globParts;
+    nocase;
+    isWindows;
+    platform;
+    windowsNoMagicRoot;
+    regexp;
+    constructor(pattern, options = {}) {
+        assertValidPattern(pattern);
+        options = options || {};
+        this.options = options;
+        this.pattern = pattern;
+        this.platform = options.platform || defaultPlatform;
+        this.isWindows = this.platform === 'win32';
+        this.windowsPathsNoEscape =
+            !!options.windowsPathsNoEscape || options.allowWindowsEscape === false;
+        if (this.windowsPathsNoEscape) {
+            this.pattern = this.pattern.replace(/\\/g, '/');
+        }
+        this.preserveMultipleSlashes = !!options.preserveMultipleSlashes;
+        this.regexp = null;
+        this.negate = false;
+        this.nonegate = !!options.nonegate;
+        this.comment = false;
+        this.empty = false;
+        this.partial = !!options.partial;
+        this.nocase = !!this.options.nocase;
+        this.windowsNoMagicRoot =
+            options.windowsNoMagicRoot !== undefined
+                ? options.windowsNoMagicRoot
+                : !!(this.isWindows && this.nocase);
+        this.globSet = [];
+        this.globParts = [];
+        this.set = [];
+        // make the set of regexps etc.
+        this.make();
+    }
+    hasMagic() {
+        if (this.options.magicalBraces && this.set.length > 1) {
+            return true;
+        }
+        for (const pattern of this.set) {
+            for (const part of pattern) {
+                if (typeof part !== 'string')
+                    return true;
+            }
+        }
+        return false;
+    }
+    debug(..._) { }
+    make() {
+        const pattern = this.pattern;
+        const options = this.options;
+        // empty patterns and comments match nothing.
+        if (!options.nocomment && pattern.charAt(0) === '#') {
+            this.comment = true;
+            return;
+        }
+        if (!pattern) {
+            this.empty = true;
+            return;
+        }
+        // step 1: figure out negation, etc.
+        this.parseNegate();
+        // step 2: expand braces
+        this.globSet = [...new Set(this.braceExpand())];
+        if (options.debug) {
+            this.debug = (...args) => console.error(...args);
+        }
+        this.debug(this.pattern, this.globSet);
+        // step 3: now we have a set, so turn each one into a series of
+        // path-portion matching patterns.
+        // These will be regexps, except in the case of "**", which is
+        // set to the GLOBSTAR object for globstar behavior,
+        // and will not contain any / characters
+        //
+        // First, we preprocess to make the glob pattern sets a bit simpler
+        // and deduped.  There are some perf-killing patterns that can cause
+        // problems with a glob walk, but we can simplify them down a bit.
+        const rawGlobParts = this.globSet.map(s => this.slashSplit(s));
+        this.globParts = this.preprocess(rawGlobParts);
+        this.debug(this.pattern, this.globParts);
+        // glob --> regexps
+        let set = this.globParts.map((s, _, __) => {
+            if (this.isWindows && this.windowsNoMagicRoot) {
+                // check if it's a drive or unc path.
+                const isUNC = s[0] === '' &&
+                    s[1] === '' &&
+                    (s[2] === '?' || !globMagic.test(s[2])) &&
+                    !globMagic.test(s[3]);
+                const isDrive = /^[a-z]:/i.test(s[0]);
+                if (isUNC) {
+                    return [...s.slice(0, 4), ...s.slice(4).map(ss => this.parse(ss))];
+                }
+                else if (isDrive) {
+                    return [s[0], ...s.slice(1).map(ss => this.parse(ss))];
+                }
+            }
+            return s.map(ss => this.parse(ss));
+        });
+        this.debug(this.pattern, set);
+        // filter out everything that didn't compile properly.
+        this.set = set.filter(s => s.indexOf(false) === -1);
+        // do not treat the ? in UNC paths as magic
+        if (this.isWindows) {
+            for (let i = 0; i < this.set.length; i++) {
+                const p = this.set[i];
+                if (p[0] === '' &&
+                    p[1] === '' &&
+                    this.globParts[i][2] === '?' &&
+                    typeof p[3] === 'string' &&
+                    /^[a-z]:$/i.test(p[3])) {
+                    p[2] = '?';
+                }
+            }
+        }
+        this.debug(this.pattern, this.set);
+    }
+    // various transforms to equivalent pattern sets that are
+    // faster to process in a filesystem walk.  The goal is to
+    // eliminate what we can, and push all ** patterns as far
+    // to the right as possible, even if it increases the number
+    // of patterns that we have to process.
+    preprocess(globParts) {
+        // if we're not in globstar mode, then turn all ** into *
+        if (this.options.noglobstar) {
+            for (let i = 0; i < globParts.length; i++) {
+                for (let j = 0; j < globParts[i].length; j++) {
+                    if (globParts[i][j] === '**') {
+                        globParts[i][j] = '*';
+                    }
+                }
+            }
+        }
+        const { optimizationLevel = 1 } = this.options;
+        if (optimizationLevel >= 2) {
+            // aggressive optimization for the purpose of fs walking
+            globParts = this.firstPhasePreProcess(globParts);
+            globParts = this.secondPhasePreProcess(globParts);
+        }
+        else if (optimizationLevel >= 1) {
+            // just basic optimizations to remove some .. parts
+            globParts = this.levelOneOptimize(globParts);
+        }
+        else {
+            // just collapse multiple ** portions into one
+            globParts = this.adjascentGlobstarOptimize(globParts);
+        }
+        return globParts;
+    }
+    // just get rid of adjascent ** portions
+    adjascentGlobstarOptimize(globParts) {
+        return globParts.map(parts => {
+            let gs = -1;
+            while (-1 !== (gs = parts.indexOf('**', gs + 1))) {
+                let i = gs;
+                while (parts[i + 1] === '**') {
+                    i++;
+                }
+                if (i !== gs) {
+                    parts.splice(gs, i - gs);
+                }
+            }
+            return parts;
+        });
+    }
+    // get rid of adjascent ** and resolve .. portions
+    levelOneOptimize(globParts) {
+        return globParts.map(parts => {
+            parts = parts.reduce((set, part) => {
+                const prev = set[set.length - 1];
+                if (part === '**' && prev === '**') {
+                    return set;
+                }
+                if (part === '..') {
+                    if (prev && prev !== '..' && prev !== '.' && prev !== '**') {
+                        set.pop();
+                        return set;
+                    }
+                }
+                set.push(part);
+                return set;
+            }, []);
+            return parts.length === 0 ? [''] : parts;
+        });
+    }
+    levelTwoFileOptimize(parts) {
+        if (!Array.isArray(parts)) {
+            parts = this.slashSplit(parts);
+        }
+        let didSomething = false;
+        do {
+            didSomething = false;
+            // 
// -> 
/
+            if (!this.preserveMultipleSlashes) {
+                for (let i = 1; i < parts.length - 1; i++) {
+                    const p = parts[i];
+                    // don't squeeze out UNC patterns
+                    if (i === 1 && p === '' && parts[0] === '')
+                        continue;
+                    if (p === '.' || p === '') {
+                        didSomething = true;
+                        parts.splice(i, 1);
+                        i--;
+                    }
+                }
+                if (parts[0] === '.' &&
+                    parts.length === 2 &&
+                    (parts[1] === '.' || parts[1] === '')) {
+                    didSomething = true;
+                    parts.pop();
+                }
+            }
+            // 
/

/../ ->

/
+            let dd = 0;
+            while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                const p = parts[dd - 1];
+                if (p && p !== '.' && p !== '..' && p !== '**') {
+                    didSomething = true;
+                    parts.splice(dd - 1, 2);
+                    dd -= 2;
+                }
+            }
+        } while (didSomething);
+        return parts.length === 0 ? [''] : parts;
+    }
+    // First phase: single-pattern processing
+    // 
 is 1 or more portions
+    //  is 1 or more portions
+    // 

is any portion other than ., .., '', or ** + // is . or '' + // + // **/.. is *brutal* for filesystem walking performance, because + // it effectively resets the recursive walk each time it occurs, + // and ** cannot be reduced out by a .. pattern part like a regexp + // or most strings (other than .., ., and '') can be. + // + //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + //

// -> 
/
+    // 
/

/../ ->

/
+    // **/**/ -> **/
+    //
+    // **/*/ -> */**/ <== not valid because ** doesn't follow
+    // this WOULD be allowed if ** did follow symlinks, or * didn't
+    firstPhasePreProcess(globParts) {
+        let didSomething = false;
+        do {
+            didSomething = false;
+            // 
/**/../

/

/ -> {

/../

/

/,

/**/

/

/} + for (let parts of globParts) { + let gs = -1; + while (-1 !== (gs = parts.indexOf('**', gs + 1))) { + let gss = gs; + while (parts[gss + 1] === '**') { + //

/**/**/ -> 
/**/
+                        gss++;
+                    }
+                    // eg, if gs is 2 and gss is 4, that means we have 3 **
+                    // parts, and can remove 2 of them.
+                    if (gss > gs) {
+                        parts.splice(gs + 1, gss - gs);
+                    }
+                    let next = parts[gs + 1];
+                    const p = parts[gs + 2];
+                    const p2 = parts[gs + 3];
+                    if (next !== '..')
+                        continue;
+                    if (!p ||
+                        p === '.' ||
+                        p === '..' ||
+                        !p2 ||
+                        p2 === '.' ||
+                        p2 === '..') {
+                        continue;
+                    }
+                    didSomething = true;
+                    // edit parts in place, and push the new one
+                    parts.splice(gs, 1);
+                    const other = parts.slice(0);
+                    other[gs] = '**';
+                    globParts.push(other);
+                    gs--;
+                }
+                // 
// -> 
/
+                if (!this.preserveMultipleSlashes) {
+                    for (let i = 1; i < parts.length - 1; i++) {
+                        const p = parts[i];
+                        // don't squeeze out UNC patterns
+                        if (i === 1 && p === '' && parts[0] === '')
+                            continue;
+                        if (p === '.' || p === '') {
+                            didSomething = true;
+                            parts.splice(i, 1);
+                            i--;
+                        }
+                    }
+                    if (parts[0] === '.' &&
+                        parts.length === 2 &&
+                        (parts[1] === '.' || parts[1] === '')) {
+                        didSomething = true;
+                        parts.pop();
+                    }
+                }
+                // 
/

/../ ->

/
+                let dd = 0;
+                while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
+                    const p = parts[dd - 1];
+                    if (p && p !== '.' && p !== '..' && p !== '**') {
+                        didSomething = true;
+                        const needDot = dd === 1 && parts[dd + 1] === '**';
+                        const splin = needDot ? ['.'] : [];
+                        parts.splice(dd - 1, 2, ...splin);
+                        if (parts.length === 0)
+                            parts.push('');
+                        dd -= 2;
+                    }
+                }
+            }
+        } while (didSomething);
+        return globParts;
+    }
+    // second phase: multi-pattern dedupes
+    // {
/*/,
/

/} ->

/*/
+    // {
/,
/} -> 
/
+    // {
/**/,
/} -> 
/**/
+    //
+    // {
/**/,
/**/

/} ->

/**/
+    // ^-- not valid because ** doens't follow symlinks
+    secondPhasePreProcess(globParts) {
+        for (let i = 0; i < globParts.length - 1; i++) {
+            for (let j = i + 1; j < globParts.length; j++) {
+                const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);
+                if (matched) {
+                    globParts[i] = [];
+                    globParts[j] = matched;
+                    break;
+                }
+            }
+        }
+        return globParts.filter(gs => gs.length);
+    }
+    partsMatch(a, b, emptyGSMatch = false) {
+        let ai = 0;
+        let bi = 0;
+        let result = [];
+        let which = '';
+        while (ai < a.length && bi < b.length) {
+            if (a[ai] === b[bi]) {
+                result.push(which === 'b' ? b[bi] : a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (emptyGSMatch && a[ai] === '**' && b[bi] === a[ai + 1]) {
+                result.push(a[ai]);
+                ai++;
+            }
+            else if (emptyGSMatch && b[bi] === '**' && a[ai] === b[bi + 1]) {
+                result.push(b[bi]);
+                bi++;
+            }
+            else if (a[ai] === '*' &&
+                b[bi] &&
+                (this.options.dot || !b[bi].startsWith('.')) &&
+                b[bi] !== '**') {
+                if (which === 'b')
+                    return false;
+                which = 'a';
+                result.push(a[ai]);
+                ai++;
+                bi++;
+            }
+            else if (b[bi] === '*' &&
+                a[ai] &&
+                (this.options.dot || !a[ai].startsWith('.')) &&
+                a[ai] !== '**') {
+                if (which === 'a')
+                    return false;
+                which = 'b';
+                result.push(b[bi]);
+                ai++;
+                bi++;
+            }
+            else {
+                return false;
+            }
+        }
+        // if we fall out of the loop, it means they two are identical
+        // as long as their lengths match
+        return a.length === b.length && result;
+    }
+    parseNegate() {
+        if (this.nonegate)
+            return;
+        const pattern = this.pattern;
+        let negate = false;
+        let negateOffset = 0;
+        for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {
+            negate = !negate;
+            negateOffset++;
+        }
+        if (negateOffset)
+            this.pattern = pattern.slice(negateOffset);
+        this.negate = negate;
+    }
+    // set partial to true to test if, for example,
+    // "/a/b" matches the start of "/*/b/*/d"
+    // Partial means, if you run out of file before you run
+    // out of pattern, then that's fine, as long as all
+    // the parts match.
+    matchOne(file, pattern, partial = false) {
+        const options = this.options;
+        // UNC paths like //?/X:/... can match X:/... and vice versa
+        // Drive letters in absolute drive or unc paths are always compared
+        // case-insensitively.
+        if (this.isWindows) {
+            const fileDrive = typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0]);
+            const fileUNC = !fileDrive &&
+                file[0] === '' &&
+                file[1] === '' &&
+                file[2] === '?' &&
+                /^[a-z]:$/i.test(file[3]);
+            const patternDrive = typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0]);
+            const patternUNC = !patternDrive &&
+                pattern[0] === '' &&
+                pattern[1] === '' &&
+                pattern[2] === '?' &&
+                typeof pattern[3] === 'string' &&
+                /^[a-z]:$/i.test(pattern[3]);
+            const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined;
+            const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined;
+            if (typeof fdi === 'number' && typeof pdi === 'number') {
+                const [fd, pd] = [file[fdi], pattern[pdi]];
+                if (fd.toLowerCase() === pd.toLowerCase()) {
+                    pattern[pdi] = fd;
+                    if (pdi > fdi) {
+                        pattern = pattern.slice(pdi);
+                    }
+                    else if (fdi > pdi) {
+                        file = file.slice(fdi);
+                    }
+                }
+            }
+        }
+        // resolve and reduce . and .. portions in the file as well.
+        // dont' need to do the second phase, because it's only one string[]
+        const { optimizationLevel = 1 } = this.options;
+        if (optimizationLevel >= 2) {
+            file = this.levelTwoFileOptimize(file);
+        }
+        this.debug('matchOne', this, { file, pattern });
+        this.debug('matchOne', file.length, pattern.length);
+        for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
+            this.debug('matchOne loop');
+            var p = pattern[pi];
+            var f = file[fi];
+            this.debug(pattern, p, f);
+            // should be impossible.
+            // some invalid regexp stuff in the set.
+            /* c8 ignore start */
+            if (p === false) {
+                return false;
+            }
+            /* c8 ignore stop */
+            if (p === GLOBSTAR) {
+                this.debug('GLOBSTAR', [pattern, p, f]);
+                // "**"
+                // a/**/b/**/c would match the following:
+                // a/b/x/y/z/c
+                // a/x/y/z/b/c
+                // a/b/x/b/x/c
+                // a/b/c
+                // To do this, take the rest of the pattern after
+                // the **, and see if it would match the file remainder.
+                // If so, return success.
+                // If not, the ** "swallows" a segment, and try again.
+                // This is recursively awful.
+                //
+                // a/**/b/**/c matching a/b/x/y/z/c
+                // - a matches a
+                // - doublestar
+                //   - matchOne(b/x/y/z/c, b/**/c)
+                //     - b matches b
+                //     - doublestar
+                //       - matchOne(x/y/z/c, c) -> no
+                //       - matchOne(y/z/c, c) -> no
+                //       - matchOne(z/c, c) -> no
+                //       - matchOne(c, c) yes, hit
+                var fr = fi;
+                var pr = pi + 1;
+                if (pr === pl) {
+                    this.debug('** at the end');
+                    // a ** at the end will just swallow the rest.
+                    // We have found a match.
+                    // however, it will not swallow /.x, unless
+                    // options.dot is set.
+                    // . and .. are *never* matched by **, for explosively
+                    // exponential reasons.
+                    for (; fi < fl; fi++) {
+                        if (file[fi] === '.' ||
+                            file[fi] === '..' ||
+                            (!options.dot && file[fi].charAt(0) === '.'))
+                            return false;
+                    }
+                    return true;
+                }
+                // ok, let's see if we can swallow whatever we can.
+                while (fr < fl) {
+                    var swallowee = file[fr];
+                    this.debug('\nglobstar while', file, fr, pattern, pr, swallowee);
+                    // XXX remove this slice.  Just pass the start index.
+                    if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
+                        this.debug('globstar found match!', fr, fl, swallowee);
+                        // found a match.
+                        return true;
+                    }
+                    else {
+                        // can't swallow "." or ".." ever.
+                        // can only swallow ".foo" when explicitly asked.
+                        if (swallowee === '.' ||
+                            swallowee === '..' ||
+                            (!options.dot && swallowee.charAt(0) === '.')) {
+                            this.debug('dot detected!', file, fr, pattern, pr);
+                            break;
+                        }
+                        // ** swallows a segment, and continue.
+                        this.debug('globstar swallow a segment, and continue');
+                        fr++;
+                    }
+                }
+                // no match was found.
+                // However, in partial mode, we can't say this is necessarily over.
+                /* c8 ignore start */
+                if (partial) {
+                    // ran out of file
+                    this.debug('\n>>> no match, partial?', file, fr, pattern, pr);
+                    if (fr === fl) {
+                        return true;
+                    }
+                }
+                /* c8 ignore stop */
+                return false;
+            }
+            // something other than **
+            // non-magic patterns just have to match exactly
+            // patterns with magic have been turned into regexps.
+            let hit;
+            if (typeof p === 'string') {
+                hit = f === p;
+                this.debug('string match', p, f, hit);
+            }
+            else {
+                hit = p.test(f);
+                this.debug('pattern match', p, f, hit);
+            }
+            if (!hit)
+                return false;
+        }
+        // Note: ending in / means that we'll get a final ""
+        // at the end of the pattern.  This can only match a
+        // corresponding "" at the end of the file.
+        // If the file ends in /, then it can only match a
+        // a pattern that ends in /, unless the pattern just
+        // doesn't have any more for it. But, a/b/ should *not*
+        // match "a/b/*", even though "" matches against the
+        // [^/]*? pattern, except in partial mode, where it might
+        // simply not be reached yet.
+        // However, a/b/ should still satisfy a/*
+        // now either we fell off the end of the pattern, or we're done.
+        if (fi === fl && pi === pl) {
+            // ran out of pattern and filename at the same time.
+            // an exact hit!
+            return true;
+        }
+        else if (fi === fl) {
+            // ran out of file, but still had pattern left.
+            // this is ok if we're doing the match as part of
+            // a glob fs traversal.
+            return partial;
+        }
+        else if (pi === pl) {
+            // ran out of pattern, still have file left.
+            // this is only acceptable if we're on the very last
+            // empty segment of a file with a trailing slash.
+            // a/* should match a/b/
+            return fi === fl - 1 && file[fi] === '';
+            /* c8 ignore start */
+        }
+        else {
+            // should be unreachable.
+            throw new Error('wtf?');
+        }
+        /* c8 ignore stop */
+    }
+    braceExpand() {
+        return braceExpand(this.pattern, this.options);
+    }
+    parse(pattern) {
+        assertValidPattern(pattern);
+        const options = this.options;
+        // shortcuts
+        if (pattern === '**')
+            return GLOBSTAR;
+        if (pattern === '')
+            return '';
+        // far and away, the most common glob pattern parts are
+        // *, *.*, and *.  Add a fast check method for those.
+        let m;
+        let fastTest = null;
+        if ((m = pattern.match(starRE))) {
+            fastTest = options.dot ? starTestDot : starTest;
+        }
+        else if ((m = pattern.match(starDotExtRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? starDotExtTestNocaseDot
+                    : starDotExtTestNocase
+                : options.dot
+                    ? starDotExtTestDot
+                    : starDotExtTest)(m[1]);
+        }
+        else if ((m = pattern.match(qmarksRE))) {
+            fastTest = (options.nocase
+                ? options.dot
+                    ? qmarksTestNocaseDot
+                    : qmarksTestNocase
+                : options.dot
+                    ? qmarksTestDot
+                    : qmarksTest)(m);
+        }
+        else if ((m = pattern.match(starDotStarRE))) {
+            fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
+        }
+        else if ((m = pattern.match(dotStarRE))) {
+            fastTest = dotStarTest;
+        }
+        const re = AST.fromGlob(pattern, this.options).toMMPattern();
+        if (fastTest && typeof re === 'object') {
+            // Avoids overriding in frozen environments
+            Reflect.defineProperty(re, 'test', { value: fastTest });
+        }
+        return re;
+    }
+    makeRe() {
+        if (this.regexp || this.regexp === false)
+            return this.regexp;
+        // at this point, this.set is a 2d array of partial
+        // pattern strings, or "**".
+        //
+        // It's better to use .match().  This function shouldn't
+        // be used, really, but it's pretty convenient sometimes,
+        // when you just want to work with a regex.
+        const set = this.set;
+        if (!set.length) {
+            this.regexp = false;
+            return this.regexp;
+        }
+        const options = this.options;
+        const twoStar = options.noglobstar
+            ? star
+            : options.dot
+                ? twoStarDot
+                : twoStarNoDot;
+        const flags = new Set(options.nocase ? ['i'] : []);
+        // regexpify non-globstar patterns
+        // if ** is only item, then we just do one twoStar
+        // if ** is first, and there are more, prepend (\/|twoStar\/)? to next
+        // if ** is last, append (\/twoStar|) to previous
+        // if ** is in the middle, append (\/|\/twoStar\/) to previous
+        // then filter out GLOBSTAR symbols
+        let re = set
+            .map(pattern => {
+            const pp = pattern.map(p => {
+                if (p instanceof RegExp) {
+                    for (const f of p.flags.split(''))
+                        flags.add(f);
+                }
+                return typeof p === 'string'
+                    ? regExpEscape(p)
+                    : p === GLOBSTAR
+                        ? GLOBSTAR
+                        : p._src;
+            });
+            pp.forEach((p, i) => {
+                const next = pp[i + 1];
+                const prev = pp[i - 1];
+                if (p !== GLOBSTAR || prev === GLOBSTAR) {
+                    return;
+                }
+                if (prev === undefined) {
+                    if (next !== undefined && next !== GLOBSTAR) {
+                        pp[i + 1] = '(?:\\/|' + twoStar + '\\/)?' + next;
+                    }
+                    else {
+                        pp[i] = twoStar;
+                    }
+                }
+                else if (next === undefined) {
+                    pp[i - 1] = prev + '(?:\\/|' + twoStar + ')?';
+                }
+                else if (next !== GLOBSTAR) {
+                    pp[i - 1] = prev + '(?:\\/|\\/' + twoStar + '\\/)' + next;
+                    pp[i + 1] = GLOBSTAR;
+                }
+            });
+            return pp.filter(p => p !== GLOBSTAR).join('/');
+        })
+            .join('|');
+        // need to wrap in parens if we had more than one thing with |,
+        // otherwise only the first will be anchored to ^ and the last to $
+        const [open, close] = set.length > 1 ? ['(?:', ')'] : ['', ''];
+        // must match entire pattern
+        // ending in a * or ** will make it less strict.
+        re = '^' + open + re + close + '$';
+        // can match anything, as long as it's not this.
+        if (this.negate)
+            re = '^(?!' + re + ').+$';
+        try {
+            this.regexp = new RegExp(re, [...flags].join(''));
+            /* c8 ignore start */
+        }
+        catch (ex) {
+            // should be impossible
+            this.regexp = false;
+        }
+        /* c8 ignore stop */
+        return this.regexp;
+    }
+    slashSplit(p) {
+        // if p starts with // on windows, we preserve that
+        // so that UNC paths aren't broken.  Otherwise, any number of
+        // / characters are coalesced into one, unless
+        // preserveMultipleSlashes is set to true.
+        if (this.preserveMultipleSlashes) {
+            return p.split('/');
+        }
+        else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
+            // add an extra '' for the one we lose
+            return ['', ...p.split(/\/+/)];
+        }
+        else {
+            return p.split(/\/+/);
+        }
+    }
+    match(f, partial = this.partial) {
+        this.debug('match', f, this.pattern);
+        // short-circuit in the case of busted things.
+        // comments, etc.
+        if (this.comment) {
+            return false;
+        }
+        if (this.empty) {
+            return f === '';
+        }
+        if (f === '/' && partial) {
+            return true;
+        }
+        const options = this.options;
+        // windows: need to use /, not \
+        if (this.isWindows) {
+            f = f.split('\\').join('/');
+        }
+        // treat the test path as a set of pathparts.
+        const ff = this.slashSplit(f);
+        this.debug(this.pattern, 'split', ff);
+        // just ONE of the pattern sets in this.set needs to match
+        // in order for it to be valid.  If negating, then just one
+        // match means that we have failed.
+        // Either way, return on the first hit.
+        const set = this.set;
+        this.debug(this.pattern, 'set', set);
+        // Find the basename of the path by looking for the last non-empty segment
+        let filename = ff[ff.length - 1];
+        if (!filename) {
+            for (let i = ff.length - 2; !filename && i >= 0; i--) {
+                filename = ff[i];
+            }
+        }
+        for (let i = 0; i < set.length; i++) {
+            const pattern = set[i];
+            let file = ff;
+            if (options.matchBase && pattern.length === 1) {
+                file = [filename];
+            }
+            const hit = this.matchOne(file, pattern, partial);
+            if (hit) {
+                if (options.flipNegate) {
+                    return true;
+                }
+                return !this.negate;
+            }
+        }
+        // didn't get any hits.  this is success if it's a negative
+        // pattern, failure otherwise.
+        if (options.flipNegate) {
+            return false;
+        }
+        return this.negate;
+    }
+    static defaults(def) {
+        return minimatch.defaults(def).Minimatch;
+    }
+}
+/* c8 ignore start */
+export { AST } from './ast.js';
+export { escape } from './escape.js';
+export { unescape } from './unescape.js';
+/* c8 ignore stop */
+minimatch.AST = AST;
+minimatch.Minimatch = Minimatch;
+minimatch.escape = escape;
+minimatch.unescape = unescape;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/minimatch/dist/esm/package.json b/node_modules/pacote/node_modules/minimatch/dist/esm/package.json
new file mode 100644
index 0000000000000..3dbc1ca591c05
--- /dev/null
+++ b/node_modules/pacote/node_modules/minimatch/dist/esm/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/node_modules/pacote/node_modules/minimatch/dist/esm/unescape.js b/node_modules/pacote/node_modules/minimatch/dist/esm/unescape.js
new file mode 100644
index 0000000000000..0faf9a2b7306f
--- /dev/null
+++ b/node_modules/pacote/node_modules/minimatch/dist/esm/unescape.js
@@ -0,0 +1,20 @@
+/**
+ * Un-escape a string that has been escaped with {@link escape}.
+ *
+ * If the {@link windowsPathsNoEscape} option is used, then square-brace
+ * escapes are removed, but not backslash escapes.  For example, it will turn
+ * the string `'[*]'` into `*`, but it will not turn `'\\*'` into `'*'`,
+ * becuase `\` is a path separator in `windowsPathsNoEscape` mode.
+ *
+ * When `windowsPathsNoEscape` is not set, then both brace escapes and
+ * backslash escapes are removed.
+ *
+ * Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot be escaped
+ * or unescaped.
+ */
+export const unescape = (s, { windowsPathsNoEscape = false, } = {}) => {
+    return windowsPathsNoEscape
+        ? s.replace(/\[([^\/\\])\]/g, '$1')
+        : s.replace(/((?!\\).|^)\[([^\/\\])\]/g, '$1$2').replace(/\\([^\/])/g, '$1');
+};
+//# sourceMappingURL=unescape.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/minimatch/package.json b/node_modules/pacote/node_modules/minimatch/package.json
new file mode 100644
index 0000000000000..bfa2423f50b5e
--- /dev/null
+++ b/node_modules/pacote/node_modules/minimatch/package.json
@@ -0,0 +1,79 @@
+{
+  "author": "Isaac Z. Schlueter  (http://blog.izs.me)",
+  "name": "minimatch",
+  "description": "a glob matcher in javascript",
+  "version": "10.0.3",
+  "repository": {
+    "type": "git",
+    "url": "git://github.com/isaacs/minimatch.git"
+  },
+  "main": "./dist/commonjs/index.js",
+  "types": "./dist/commonjs/index.d.ts",
+  "exports": {
+    "./package.json": "./package.json",
+    ".": {
+      "import": {
+        "types": "./dist/esm/index.d.ts",
+        "default": "./dist/esm/index.js"
+      },
+      "require": {
+        "types": "./dist/commonjs/index.d.ts",
+        "default": "./dist/commonjs/index.js"
+      }
+    }
+  },
+  "files": [
+    "dist"
+  ],
+  "scripts": {
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "prepare": "tshy",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "test": "tap",
+    "snap": "tap",
+    "format": "prettier --write . --loglevel warn",
+    "benchmark": "node benchmark/index.js",
+    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
+  },
+  "prettier": {
+    "semi": false,
+    "printWidth": 80,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "engines": {
+    "node": "20 || >=22"
+  },
+  "devDependencies": {
+    "@types/brace-expansion": "^1.1.2",
+    "@types/node": "^24.0.0",
+    "mkdirp": "^3.0.1",
+    "prettier": "^3.3.2",
+    "tap": "^21.1.0",
+    "tshy": "^3.0.2",
+    "typedoc": "^0.28.5"
+  },
+  "funding": {
+    "url": "https://github.com/sponsors/isaacs"
+  },
+  "license": "ISC",
+  "tshy": {
+    "exports": {
+      "./package.json": "./package.json",
+      ".": "./src/index.ts"
+    }
+  },
+  "type": "module",
+  "module": "./dist/esm/index.js",
+  "dependencies": {
+    "@isaacs/brace-expansion": "^5.0.0"
+  }
+}
diff --git a/node_modules/pacote/node_modules/minizlib/LICENSE b/node_modules/pacote/node_modules/minizlib/LICENSE
new file mode 100644
index 0000000000000..49f7efe431c9e
--- /dev/null
+++ b/node_modules/pacote/node_modules/minizlib/LICENSE
@@ -0,0 +1,26 @@
+Minizlib was created by Isaac Z. Schlueter.
+It is a derivative work of the Node.js project.
+
+"""
+Copyright (c) 2017-2023 Isaac Z. Schlueter and Contributors
+Copyright (c) 2017-2023 Node.js contributors. All rights reserved.
+Copyright (c) 2017-2023 Joyent, Inc. and other Node contributors. All rights reserved.
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the "Software"),
+to deal in the Software without restriction, including without limitation
+the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+"""
diff --git a/node_modules/pacote/node_modules/minizlib/dist/commonjs/constants.js b/node_modules/pacote/node_modules/minizlib/dist/commonjs/constants.js
new file mode 100644
index 0000000000000..dfc2c1957bfc9
--- /dev/null
+++ b/node_modules/pacote/node_modules/minizlib/dist/commonjs/constants.js
@@ -0,0 +1,123 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.constants = void 0;
+// Update with any zlib constants that are added or changed in the future.
+// Node v6 didn't export this, so we just hard code the version and rely
+// on all the other hard-coded values from zlib v4736.  When node v6
+// support drops, we can just export the realZlibConstants object.
+const zlib_1 = __importDefault(require("zlib"));
+/* c8 ignore start */
+const realZlibConstants = zlib_1.default.constants || { ZLIB_VERNUM: 4736 };
+/* c8 ignore stop */
+exports.constants = Object.freeze(Object.assign(Object.create(null), {
+    Z_NO_FLUSH: 0,
+    Z_PARTIAL_FLUSH: 1,
+    Z_SYNC_FLUSH: 2,
+    Z_FULL_FLUSH: 3,
+    Z_FINISH: 4,
+    Z_BLOCK: 5,
+    Z_OK: 0,
+    Z_STREAM_END: 1,
+    Z_NEED_DICT: 2,
+    Z_ERRNO: -1,
+    Z_STREAM_ERROR: -2,
+    Z_DATA_ERROR: -3,
+    Z_MEM_ERROR: -4,
+    Z_BUF_ERROR: -5,
+    Z_VERSION_ERROR: -6,
+    Z_NO_COMPRESSION: 0,
+    Z_BEST_SPEED: 1,
+    Z_BEST_COMPRESSION: 9,
+    Z_DEFAULT_COMPRESSION: -1,
+    Z_FILTERED: 1,
+    Z_HUFFMAN_ONLY: 2,
+    Z_RLE: 3,
+    Z_FIXED: 4,
+    Z_DEFAULT_STRATEGY: 0,
+    DEFLATE: 1,
+    INFLATE: 2,
+    GZIP: 3,
+    GUNZIP: 4,
+    DEFLATERAW: 5,
+    INFLATERAW: 6,
+    UNZIP: 7,
+    BROTLI_DECODE: 8,
+    BROTLI_ENCODE: 9,
+    Z_MIN_WINDOWBITS: 8,
+    Z_MAX_WINDOWBITS: 15,
+    Z_DEFAULT_WINDOWBITS: 15,
+    Z_MIN_CHUNK: 64,
+    Z_MAX_CHUNK: Infinity,
+    Z_DEFAULT_CHUNK: 16384,
+    Z_MIN_MEMLEVEL: 1,
+    Z_MAX_MEMLEVEL: 9,
+    Z_DEFAULT_MEMLEVEL: 8,
+    Z_MIN_LEVEL: -1,
+    Z_MAX_LEVEL: 9,
+    Z_DEFAULT_LEVEL: -1,
+    BROTLI_OPERATION_PROCESS: 0,
+    BROTLI_OPERATION_FLUSH: 1,
+    BROTLI_OPERATION_FINISH: 2,
+    BROTLI_OPERATION_EMIT_METADATA: 3,
+    BROTLI_MODE_GENERIC: 0,
+    BROTLI_MODE_TEXT: 1,
+    BROTLI_MODE_FONT: 2,
+    BROTLI_DEFAULT_MODE: 0,
+    BROTLI_MIN_QUALITY: 0,
+    BROTLI_MAX_QUALITY: 11,
+    BROTLI_DEFAULT_QUALITY: 11,
+    BROTLI_MIN_WINDOW_BITS: 10,
+    BROTLI_MAX_WINDOW_BITS: 24,
+    BROTLI_LARGE_MAX_WINDOW_BITS: 30,
+    BROTLI_DEFAULT_WINDOW: 22,
+    BROTLI_MIN_INPUT_BLOCK_BITS: 16,
+    BROTLI_MAX_INPUT_BLOCK_BITS: 24,
+    BROTLI_PARAM_MODE: 0,
+    BROTLI_PARAM_QUALITY: 1,
+    BROTLI_PARAM_LGWIN: 2,
+    BROTLI_PARAM_LGBLOCK: 3,
+    BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4,
+    BROTLI_PARAM_SIZE_HINT: 5,
+    BROTLI_PARAM_LARGE_WINDOW: 6,
+    BROTLI_PARAM_NPOSTFIX: 7,
+    BROTLI_PARAM_NDIRECT: 8,
+    BROTLI_DECODER_RESULT_ERROR: 0,
+    BROTLI_DECODER_RESULT_SUCCESS: 1,
+    BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2,
+    BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3,
+    BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0,
+    BROTLI_DECODER_PARAM_LARGE_WINDOW: 1,
+    BROTLI_DECODER_NO_ERROR: 0,
+    BROTLI_DECODER_SUCCESS: 1,
+    BROTLI_DECODER_NEEDS_MORE_INPUT: 2,
+    BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3,
+    BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1,
+    BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2,
+    BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3,
+    BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4,
+    BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5,
+    BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6,
+    BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7,
+    BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8,
+    BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9,
+    BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10,
+    BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11,
+    BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12,
+    BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13,
+    BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14,
+    BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15,
+    BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16,
+    BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19,
+    BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20,
+    BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21,
+    BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22,
+    BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25,
+    BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26,
+    BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27,
+    BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30,
+    BROTLI_DECODER_ERROR_UNREACHABLE: -31,
+}, realZlibConstants));
+//# sourceMappingURL=constants.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/minizlib/dist/commonjs/index.js b/node_modules/pacote/node_modules/minizlib/dist/commonjs/index.js
new file mode 100644
index 0000000000000..b4906d2783372
--- /dev/null
+++ b/node_modules/pacote/node_modules/minizlib/dist/commonjs/index.js
@@ -0,0 +1,392 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || (function () {
+    var ownKeys = function(o) {
+        ownKeys = Object.getOwnPropertyNames || function (o) {
+            var ar = [];
+            for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
+            return ar;
+        };
+        return ownKeys(o);
+    };
+    return function (mod) {
+        if (mod && mod.__esModule) return mod;
+        var result = {};
+        if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
+        __setModuleDefault(result, mod);
+        return result;
+    };
+})();
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.BrotliDecompress = exports.BrotliCompress = exports.Brotli = exports.Unzip = exports.InflateRaw = exports.DeflateRaw = exports.Gunzip = exports.Gzip = exports.Inflate = exports.Deflate = exports.Zlib = exports.ZlibError = exports.constants = void 0;
+const assert_1 = __importDefault(require("assert"));
+const buffer_1 = require("buffer");
+const minipass_1 = require("minipass");
+const realZlib = __importStar(require("zlib"));
+const constants_js_1 = require("./constants.js");
+var constants_js_2 = require("./constants.js");
+Object.defineProperty(exports, "constants", { enumerable: true, get: function () { return constants_js_2.constants; } });
+const OriginalBufferConcat = buffer_1.Buffer.concat;
+const desc = Object.getOwnPropertyDescriptor(buffer_1.Buffer, 'concat');
+const noop = (args) => args;
+const passthroughBufferConcat = desc?.writable === true || desc?.set !== undefined
+    ? (makeNoOp) => {
+        buffer_1.Buffer.concat = makeNoOp ? noop : OriginalBufferConcat;
+    }
+    : (_) => { };
+const _superWrite = Symbol('_superWrite');
+class ZlibError extends Error {
+    code;
+    errno;
+    constructor(err) {
+        super('zlib: ' + err.message);
+        this.code = err.code;
+        this.errno = err.errno;
+        /* c8 ignore next */
+        if (!this.code)
+            this.code = 'ZLIB_ERROR';
+        this.message = 'zlib: ' + err.message;
+        Error.captureStackTrace(this, this.constructor);
+    }
+    get name() {
+        return 'ZlibError';
+    }
+}
+exports.ZlibError = ZlibError;
+// the Zlib class they all inherit from
+// This thing manages the queue of requests, and returns
+// true or false if there is anything in the queue when
+// you call the .write() method.
+const _flushFlag = Symbol('flushFlag');
+class ZlibBase extends minipass_1.Minipass {
+    #sawError = false;
+    #ended = false;
+    #flushFlag;
+    #finishFlushFlag;
+    #fullFlushFlag;
+    #handle;
+    #onError;
+    get sawError() {
+        return this.#sawError;
+    }
+    get handle() {
+        return this.#handle;
+    }
+    /* c8 ignore start */
+    get flushFlag() {
+        return this.#flushFlag;
+    }
+    /* c8 ignore stop */
+    constructor(opts, mode) {
+        if (!opts || typeof opts !== 'object')
+            throw new TypeError('invalid options for ZlibBase constructor');
+        //@ts-ignore
+        super(opts);
+        /* c8 ignore start */
+        this.#flushFlag = opts.flush ?? 0;
+        this.#finishFlushFlag = opts.finishFlush ?? 0;
+        this.#fullFlushFlag = opts.fullFlushFlag ?? 0;
+        /* c8 ignore stop */
+        // this will throw if any options are invalid for the class selected
+        try {
+            // @types/node doesn't know that it exports the classes, but they're there
+            //@ts-ignore
+            this.#handle = new realZlib[mode](opts);
+        }
+        catch (er) {
+            // make sure that all errors get decorated properly
+            throw new ZlibError(er);
+        }
+        this.#onError = err => {
+            // no sense raising multiple errors, since we abort on the first one.
+            if (this.#sawError)
+                return;
+            this.#sawError = true;
+            // there is no way to cleanly recover.
+            // continuing only obscures problems.
+            this.close();
+            this.emit('error', err);
+        };
+        this.#handle?.on('error', er => this.#onError(new ZlibError(er)));
+        this.once('end', () => this.close);
+    }
+    close() {
+        if (this.#handle) {
+            this.#handle.close();
+            this.#handle = undefined;
+            this.emit('close');
+        }
+    }
+    reset() {
+        if (!this.#sawError) {
+            (0, assert_1.default)(this.#handle, 'zlib binding closed');
+            //@ts-ignore
+            return this.#handle.reset?.();
+        }
+    }
+    flush(flushFlag) {
+        if (this.ended)
+            return;
+        if (typeof flushFlag !== 'number')
+            flushFlag = this.#fullFlushFlag;
+        this.write(Object.assign(buffer_1.Buffer.alloc(0), { [_flushFlag]: flushFlag }));
+    }
+    end(chunk, encoding, cb) {
+        /* c8 ignore start */
+        if (typeof chunk === 'function') {
+            cb = chunk;
+            encoding = undefined;
+            chunk = undefined;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = undefined;
+        }
+        /* c8 ignore stop */
+        if (chunk) {
+            if (encoding)
+                this.write(chunk, encoding);
+            else
+                this.write(chunk);
+        }
+        this.flush(this.#finishFlushFlag);
+        this.#ended = true;
+        return super.end(cb);
+    }
+    get ended() {
+        return this.#ended;
+    }
+    // overridden in the gzip classes to do portable writes
+    [_superWrite](data) {
+        return super.write(data);
+    }
+    write(chunk, encoding, cb) {
+        // process the chunk using the sync process
+        // then super.write() all the outputted chunks
+        if (typeof encoding === 'function')
+            (cb = encoding), (encoding = 'utf8');
+        if (typeof chunk === 'string')
+            chunk = buffer_1.Buffer.from(chunk, encoding);
+        if (this.#sawError)
+            return;
+        (0, assert_1.default)(this.#handle, 'zlib binding closed');
+        // _processChunk tries to .close() the native handle after it's done, so we
+        // intercept that by temporarily making it a no-op.
+        // diving into the node:zlib internals a bit here
+        const nativeHandle = this.#handle
+            ._handle;
+        const originalNativeClose = nativeHandle.close;
+        nativeHandle.close = () => { };
+        const originalClose = this.#handle.close;
+        this.#handle.close = () => { };
+        // It also calls `Buffer.concat()` at the end, which may be convenient
+        // for some, but which we are not interested in as it slows us down.
+        passthroughBufferConcat(true);
+        let result = undefined;
+        try {
+            const flushFlag = typeof chunk[_flushFlag] === 'number'
+                ? chunk[_flushFlag]
+                : this.#flushFlag;
+            result = this.#handle._processChunk(chunk, flushFlag);
+            // if we don't throw, reset it back how it was
+            passthroughBufferConcat(false);
+        }
+        catch (err) {
+            // or if we do, put Buffer.concat() back before we emit error
+            // Error events call into user code, which may call Buffer.concat()
+            passthroughBufferConcat(false);
+            this.#onError(new ZlibError(err));
+        }
+        finally {
+            if (this.#handle) {
+                // Core zlib resets `_handle` to null after attempting to close the
+                // native handle. Our no-op handler prevented actual closure, but we
+                // need to restore the `._handle` property.
+                ;
+                this.#handle._handle =
+                    nativeHandle;
+                nativeHandle.close = originalNativeClose;
+                this.#handle.close = originalClose;
+                // `_processChunk()` adds an 'error' listener. If we don't remove it
+                // after each call, these handlers start piling up.
+                this.#handle.removeAllListeners('error');
+                // make sure OUR error listener is still attached tho
+            }
+        }
+        if (this.#handle)
+            this.#handle.on('error', er => this.#onError(new ZlibError(er)));
+        let writeReturn;
+        if (result) {
+            if (Array.isArray(result) && result.length > 0) {
+                const r = result[0];
+                // The first buffer is always `handle._outBuffer`, which would be
+                // re-used for later invocations; so, we always have to copy that one.
+                writeReturn = this[_superWrite](buffer_1.Buffer.from(r));
+                for (let i = 1; i < result.length; i++) {
+                    writeReturn = this[_superWrite](result[i]);
+                }
+            }
+            else {
+                // either a single Buffer or an empty array
+                writeReturn = this[_superWrite](buffer_1.Buffer.from(result));
+            }
+        }
+        if (cb)
+            cb();
+        return writeReturn;
+    }
+}
+class Zlib extends ZlibBase {
+    #level;
+    #strategy;
+    constructor(opts, mode) {
+        opts = opts || {};
+        opts.flush = opts.flush || constants_js_1.constants.Z_NO_FLUSH;
+        opts.finishFlush = opts.finishFlush || constants_js_1.constants.Z_FINISH;
+        opts.fullFlushFlag = constants_js_1.constants.Z_FULL_FLUSH;
+        super(opts, mode);
+        this.#level = opts.level;
+        this.#strategy = opts.strategy;
+    }
+    params(level, strategy) {
+        if (this.sawError)
+            return;
+        if (!this.handle)
+            throw new Error('cannot switch params when binding is closed');
+        // no way to test this without also not supporting params at all
+        /* c8 ignore start */
+        if (!this.handle.params)
+            throw new Error('not supported in this implementation');
+        /* c8 ignore stop */
+        if (this.#level !== level || this.#strategy !== strategy) {
+            this.flush(constants_js_1.constants.Z_SYNC_FLUSH);
+            (0, assert_1.default)(this.handle, 'zlib binding closed');
+            // .params() calls .flush(), but the latter is always async in the
+            // core zlib. We override .flush() temporarily to intercept that and
+            // flush synchronously.
+            const origFlush = this.handle.flush;
+            this.handle.flush = (flushFlag, cb) => {
+                /* c8 ignore start */
+                if (typeof flushFlag === 'function') {
+                    cb = flushFlag;
+                    flushFlag = this.flushFlag;
+                }
+                /* c8 ignore stop */
+                this.flush(flushFlag);
+                cb?.();
+            };
+            try {
+                ;
+                this.handle.params(level, strategy);
+            }
+            finally {
+                this.handle.flush = origFlush;
+            }
+            /* c8 ignore start */
+            if (this.handle) {
+                this.#level = level;
+                this.#strategy = strategy;
+            }
+            /* c8 ignore stop */
+        }
+    }
+}
+exports.Zlib = Zlib;
+// minimal 2-byte header
+class Deflate extends Zlib {
+    constructor(opts) {
+        super(opts, 'Deflate');
+    }
+}
+exports.Deflate = Deflate;
+class Inflate extends Zlib {
+    constructor(opts) {
+        super(opts, 'Inflate');
+    }
+}
+exports.Inflate = Inflate;
+class Gzip extends Zlib {
+    #portable;
+    constructor(opts) {
+        super(opts, 'Gzip');
+        this.#portable = opts && !!opts.portable;
+    }
+    [_superWrite](data) {
+        if (!this.#portable)
+            return super[_superWrite](data);
+        // we'll always get the header emitted in one first chunk
+        // overwrite the OS indicator byte with 0xFF
+        this.#portable = false;
+        data[9] = 255;
+        return super[_superWrite](data);
+    }
+}
+exports.Gzip = Gzip;
+class Gunzip extends Zlib {
+    constructor(opts) {
+        super(opts, 'Gunzip');
+    }
+}
+exports.Gunzip = Gunzip;
+// raw - no header
+class DeflateRaw extends Zlib {
+    constructor(opts) {
+        super(opts, 'DeflateRaw');
+    }
+}
+exports.DeflateRaw = DeflateRaw;
+class InflateRaw extends Zlib {
+    constructor(opts) {
+        super(opts, 'InflateRaw');
+    }
+}
+exports.InflateRaw = InflateRaw;
+// auto-detect header.
+class Unzip extends Zlib {
+    constructor(opts) {
+        super(opts, 'Unzip');
+    }
+}
+exports.Unzip = Unzip;
+class Brotli extends ZlibBase {
+    constructor(opts, mode) {
+        opts = opts || {};
+        opts.flush = opts.flush || constants_js_1.constants.BROTLI_OPERATION_PROCESS;
+        opts.finishFlush =
+            opts.finishFlush || constants_js_1.constants.BROTLI_OPERATION_FINISH;
+        opts.fullFlushFlag = constants_js_1.constants.BROTLI_OPERATION_FLUSH;
+        super(opts, mode);
+    }
+}
+exports.Brotli = Brotli;
+class BrotliCompress extends Brotli {
+    constructor(opts) {
+        super(opts, 'BrotliCompress');
+    }
+}
+exports.BrotliCompress = BrotliCompress;
+class BrotliDecompress extends Brotli {
+    constructor(opts) {
+        super(opts, 'BrotliDecompress');
+    }
+}
+exports.BrotliDecompress = BrotliDecompress;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/minizlib/dist/commonjs/package.json b/node_modules/pacote/node_modules/minizlib/dist/commonjs/package.json
new file mode 100644
index 0000000000000..5bbefffbabee3
--- /dev/null
+++ b/node_modules/pacote/node_modules/minizlib/dist/commonjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/node_modules/pacote/node_modules/minizlib/dist/esm/constants.js b/node_modules/pacote/node_modules/minizlib/dist/esm/constants.js
new file mode 100644
index 0000000000000..7faf40be5068d
--- /dev/null
+++ b/node_modules/pacote/node_modules/minizlib/dist/esm/constants.js
@@ -0,0 +1,117 @@
+// Update with any zlib constants that are added or changed in the future.
+// Node v6 didn't export this, so we just hard code the version and rely
+// on all the other hard-coded values from zlib v4736.  When node v6
+// support drops, we can just export the realZlibConstants object.
+import realZlib from 'zlib';
+/* c8 ignore start */
+const realZlibConstants = realZlib.constants || { ZLIB_VERNUM: 4736 };
+/* c8 ignore stop */
+export const constants = Object.freeze(Object.assign(Object.create(null), {
+    Z_NO_FLUSH: 0,
+    Z_PARTIAL_FLUSH: 1,
+    Z_SYNC_FLUSH: 2,
+    Z_FULL_FLUSH: 3,
+    Z_FINISH: 4,
+    Z_BLOCK: 5,
+    Z_OK: 0,
+    Z_STREAM_END: 1,
+    Z_NEED_DICT: 2,
+    Z_ERRNO: -1,
+    Z_STREAM_ERROR: -2,
+    Z_DATA_ERROR: -3,
+    Z_MEM_ERROR: -4,
+    Z_BUF_ERROR: -5,
+    Z_VERSION_ERROR: -6,
+    Z_NO_COMPRESSION: 0,
+    Z_BEST_SPEED: 1,
+    Z_BEST_COMPRESSION: 9,
+    Z_DEFAULT_COMPRESSION: -1,
+    Z_FILTERED: 1,
+    Z_HUFFMAN_ONLY: 2,
+    Z_RLE: 3,
+    Z_FIXED: 4,
+    Z_DEFAULT_STRATEGY: 0,
+    DEFLATE: 1,
+    INFLATE: 2,
+    GZIP: 3,
+    GUNZIP: 4,
+    DEFLATERAW: 5,
+    INFLATERAW: 6,
+    UNZIP: 7,
+    BROTLI_DECODE: 8,
+    BROTLI_ENCODE: 9,
+    Z_MIN_WINDOWBITS: 8,
+    Z_MAX_WINDOWBITS: 15,
+    Z_DEFAULT_WINDOWBITS: 15,
+    Z_MIN_CHUNK: 64,
+    Z_MAX_CHUNK: Infinity,
+    Z_DEFAULT_CHUNK: 16384,
+    Z_MIN_MEMLEVEL: 1,
+    Z_MAX_MEMLEVEL: 9,
+    Z_DEFAULT_MEMLEVEL: 8,
+    Z_MIN_LEVEL: -1,
+    Z_MAX_LEVEL: 9,
+    Z_DEFAULT_LEVEL: -1,
+    BROTLI_OPERATION_PROCESS: 0,
+    BROTLI_OPERATION_FLUSH: 1,
+    BROTLI_OPERATION_FINISH: 2,
+    BROTLI_OPERATION_EMIT_METADATA: 3,
+    BROTLI_MODE_GENERIC: 0,
+    BROTLI_MODE_TEXT: 1,
+    BROTLI_MODE_FONT: 2,
+    BROTLI_DEFAULT_MODE: 0,
+    BROTLI_MIN_QUALITY: 0,
+    BROTLI_MAX_QUALITY: 11,
+    BROTLI_DEFAULT_QUALITY: 11,
+    BROTLI_MIN_WINDOW_BITS: 10,
+    BROTLI_MAX_WINDOW_BITS: 24,
+    BROTLI_LARGE_MAX_WINDOW_BITS: 30,
+    BROTLI_DEFAULT_WINDOW: 22,
+    BROTLI_MIN_INPUT_BLOCK_BITS: 16,
+    BROTLI_MAX_INPUT_BLOCK_BITS: 24,
+    BROTLI_PARAM_MODE: 0,
+    BROTLI_PARAM_QUALITY: 1,
+    BROTLI_PARAM_LGWIN: 2,
+    BROTLI_PARAM_LGBLOCK: 3,
+    BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4,
+    BROTLI_PARAM_SIZE_HINT: 5,
+    BROTLI_PARAM_LARGE_WINDOW: 6,
+    BROTLI_PARAM_NPOSTFIX: 7,
+    BROTLI_PARAM_NDIRECT: 8,
+    BROTLI_DECODER_RESULT_ERROR: 0,
+    BROTLI_DECODER_RESULT_SUCCESS: 1,
+    BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2,
+    BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3,
+    BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0,
+    BROTLI_DECODER_PARAM_LARGE_WINDOW: 1,
+    BROTLI_DECODER_NO_ERROR: 0,
+    BROTLI_DECODER_SUCCESS: 1,
+    BROTLI_DECODER_NEEDS_MORE_INPUT: 2,
+    BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3,
+    BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1,
+    BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2,
+    BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3,
+    BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4,
+    BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5,
+    BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6,
+    BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7,
+    BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8,
+    BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9,
+    BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10,
+    BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11,
+    BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12,
+    BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13,
+    BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14,
+    BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15,
+    BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16,
+    BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19,
+    BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20,
+    BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21,
+    BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22,
+    BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25,
+    BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26,
+    BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27,
+    BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30,
+    BROTLI_DECODER_ERROR_UNREACHABLE: -31,
+}, realZlibConstants));
+//# sourceMappingURL=constants.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/minizlib/dist/esm/index.js b/node_modules/pacote/node_modules/minizlib/dist/esm/index.js
new file mode 100644
index 0000000000000..f33586a8ab0ec
--- /dev/null
+++ b/node_modules/pacote/node_modules/minizlib/dist/esm/index.js
@@ -0,0 +1,340 @@
+import assert from 'assert';
+import { Buffer } from 'buffer';
+import { Minipass } from 'minipass';
+import * as realZlib from 'zlib';
+import { constants } from './constants.js';
+export { constants } from './constants.js';
+const OriginalBufferConcat = Buffer.concat;
+const desc = Object.getOwnPropertyDescriptor(Buffer, 'concat');
+const noop = (args) => args;
+const passthroughBufferConcat = desc?.writable === true || desc?.set !== undefined
+    ? (makeNoOp) => {
+        Buffer.concat = makeNoOp ? noop : OriginalBufferConcat;
+    }
+    : (_) => { };
+const _superWrite = Symbol('_superWrite');
+export class ZlibError extends Error {
+    code;
+    errno;
+    constructor(err) {
+        super('zlib: ' + err.message);
+        this.code = err.code;
+        this.errno = err.errno;
+        /* c8 ignore next */
+        if (!this.code)
+            this.code = 'ZLIB_ERROR';
+        this.message = 'zlib: ' + err.message;
+        Error.captureStackTrace(this, this.constructor);
+    }
+    get name() {
+        return 'ZlibError';
+    }
+}
+// the Zlib class they all inherit from
+// This thing manages the queue of requests, and returns
+// true or false if there is anything in the queue when
+// you call the .write() method.
+const _flushFlag = Symbol('flushFlag');
+class ZlibBase extends Minipass {
+    #sawError = false;
+    #ended = false;
+    #flushFlag;
+    #finishFlushFlag;
+    #fullFlushFlag;
+    #handle;
+    #onError;
+    get sawError() {
+        return this.#sawError;
+    }
+    get handle() {
+        return this.#handle;
+    }
+    /* c8 ignore start */
+    get flushFlag() {
+        return this.#flushFlag;
+    }
+    /* c8 ignore stop */
+    constructor(opts, mode) {
+        if (!opts || typeof opts !== 'object')
+            throw new TypeError('invalid options for ZlibBase constructor');
+        //@ts-ignore
+        super(opts);
+        /* c8 ignore start */
+        this.#flushFlag = opts.flush ?? 0;
+        this.#finishFlushFlag = opts.finishFlush ?? 0;
+        this.#fullFlushFlag = opts.fullFlushFlag ?? 0;
+        /* c8 ignore stop */
+        // this will throw if any options are invalid for the class selected
+        try {
+            // @types/node doesn't know that it exports the classes, but they're there
+            //@ts-ignore
+            this.#handle = new realZlib[mode](opts);
+        }
+        catch (er) {
+            // make sure that all errors get decorated properly
+            throw new ZlibError(er);
+        }
+        this.#onError = err => {
+            // no sense raising multiple errors, since we abort on the first one.
+            if (this.#sawError)
+                return;
+            this.#sawError = true;
+            // there is no way to cleanly recover.
+            // continuing only obscures problems.
+            this.close();
+            this.emit('error', err);
+        };
+        this.#handle?.on('error', er => this.#onError(new ZlibError(er)));
+        this.once('end', () => this.close);
+    }
+    close() {
+        if (this.#handle) {
+            this.#handle.close();
+            this.#handle = undefined;
+            this.emit('close');
+        }
+    }
+    reset() {
+        if (!this.#sawError) {
+            assert(this.#handle, 'zlib binding closed');
+            //@ts-ignore
+            return this.#handle.reset?.();
+        }
+    }
+    flush(flushFlag) {
+        if (this.ended)
+            return;
+        if (typeof flushFlag !== 'number')
+            flushFlag = this.#fullFlushFlag;
+        this.write(Object.assign(Buffer.alloc(0), { [_flushFlag]: flushFlag }));
+    }
+    end(chunk, encoding, cb) {
+        /* c8 ignore start */
+        if (typeof chunk === 'function') {
+            cb = chunk;
+            encoding = undefined;
+            chunk = undefined;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = undefined;
+        }
+        /* c8 ignore stop */
+        if (chunk) {
+            if (encoding)
+                this.write(chunk, encoding);
+            else
+                this.write(chunk);
+        }
+        this.flush(this.#finishFlushFlag);
+        this.#ended = true;
+        return super.end(cb);
+    }
+    get ended() {
+        return this.#ended;
+    }
+    // overridden in the gzip classes to do portable writes
+    [_superWrite](data) {
+        return super.write(data);
+    }
+    write(chunk, encoding, cb) {
+        // process the chunk using the sync process
+        // then super.write() all the outputted chunks
+        if (typeof encoding === 'function')
+            (cb = encoding), (encoding = 'utf8');
+        if (typeof chunk === 'string')
+            chunk = Buffer.from(chunk, encoding);
+        if (this.#sawError)
+            return;
+        assert(this.#handle, 'zlib binding closed');
+        // _processChunk tries to .close() the native handle after it's done, so we
+        // intercept that by temporarily making it a no-op.
+        // diving into the node:zlib internals a bit here
+        const nativeHandle = this.#handle
+            ._handle;
+        const originalNativeClose = nativeHandle.close;
+        nativeHandle.close = () => { };
+        const originalClose = this.#handle.close;
+        this.#handle.close = () => { };
+        // It also calls `Buffer.concat()` at the end, which may be convenient
+        // for some, but which we are not interested in as it slows us down.
+        passthroughBufferConcat(true);
+        let result = undefined;
+        try {
+            const flushFlag = typeof chunk[_flushFlag] === 'number'
+                ? chunk[_flushFlag]
+                : this.#flushFlag;
+            result = this.#handle._processChunk(chunk, flushFlag);
+            // if we don't throw, reset it back how it was
+            passthroughBufferConcat(false);
+        }
+        catch (err) {
+            // or if we do, put Buffer.concat() back before we emit error
+            // Error events call into user code, which may call Buffer.concat()
+            passthroughBufferConcat(false);
+            this.#onError(new ZlibError(err));
+        }
+        finally {
+            if (this.#handle) {
+                // Core zlib resets `_handle` to null after attempting to close the
+                // native handle. Our no-op handler prevented actual closure, but we
+                // need to restore the `._handle` property.
+                ;
+                this.#handle._handle =
+                    nativeHandle;
+                nativeHandle.close = originalNativeClose;
+                this.#handle.close = originalClose;
+                // `_processChunk()` adds an 'error' listener. If we don't remove it
+                // after each call, these handlers start piling up.
+                this.#handle.removeAllListeners('error');
+                // make sure OUR error listener is still attached tho
+            }
+        }
+        if (this.#handle)
+            this.#handle.on('error', er => this.#onError(new ZlibError(er)));
+        let writeReturn;
+        if (result) {
+            if (Array.isArray(result) && result.length > 0) {
+                const r = result[0];
+                // The first buffer is always `handle._outBuffer`, which would be
+                // re-used for later invocations; so, we always have to copy that one.
+                writeReturn = this[_superWrite](Buffer.from(r));
+                for (let i = 1; i < result.length; i++) {
+                    writeReturn = this[_superWrite](result[i]);
+                }
+            }
+            else {
+                // either a single Buffer or an empty array
+                writeReturn = this[_superWrite](Buffer.from(result));
+            }
+        }
+        if (cb)
+            cb();
+        return writeReturn;
+    }
+}
+export class Zlib extends ZlibBase {
+    #level;
+    #strategy;
+    constructor(opts, mode) {
+        opts = opts || {};
+        opts.flush = opts.flush || constants.Z_NO_FLUSH;
+        opts.finishFlush = opts.finishFlush || constants.Z_FINISH;
+        opts.fullFlushFlag = constants.Z_FULL_FLUSH;
+        super(opts, mode);
+        this.#level = opts.level;
+        this.#strategy = opts.strategy;
+    }
+    params(level, strategy) {
+        if (this.sawError)
+            return;
+        if (!this.handle)
+            throw new Error('cannot switch params when binding is closed');
+        // no way to test this without also not supporting params at all
+        /* c8 ignore start */
+        if (!this.handle.params)
+            throw new Error('not supported in this implementation');
+        /* c8 ignore stop */
+        if (this.#level !== level || this.#strategy !== strategy) {
+            this.flush(constants.Z_SYNC_FLUSH);
+            assert(this.handle, 'zlib binding closed');
+            // .params() calls .flush(), but the latter is always async in the
+            // core zlib. We override .flush() temporarily to intercept that and
+            // flush synchronously.
+            const origFlush = this.handle.flush;
+            this.handle.flush = (flushFlag, cb) => {
+                /* c8 ignore start */
+                if (typeof flushFlag === 'function') {
+                    cb = flushFlag;
+                    flushFlag = this.flushFlag;
+                }
+                /* c8 ignore stop */
+                this.flush(flushFlag);
+                cb?.();
+            };
+            try {
+                ;
+                this.handle.params(level, strategy);
+            }
+            finally {
+                this.handle.flush = origFlush;
+            }
+            /* c8 ignore start */
+            if (this.handle) {
+                this.#level = level;
+                this.#strategy = strategy;
+            }
+            /* c8 ignore stop */
+        }
+    }
+}
+// minimal 2-byte header
+export class Deflate extends Zlib {
+    constructor(opts) {
+        super(opts, 'Deflate');
+    }
+}
+export class Inflate extends Zlib {
+    constructor(opts) {
+        super(opts, 'Inflate');
+    }
+}
+export class Gzip extends Zlib {
+    #portable;
+    constructor(opts) {
+        super(opts, 'Gzip');
+        this.#portable = opts && !!opts.portable;
+    }
+    [_superWrite](data) {
+        if (!this.#portable)
+            return super[_superWrite](data);
+        // we'll always get the header emitted in one first chunk
+        // overwrite the OS indicator byte with 0xFF
+        this.#portable = false;
+        data[9] = 255;
+        return super[_superWrite](data);
+    }
+}
+export class Gunzip extends Zlib {
+    constructor(opts) {
+        super(opts, 'Gunzip');
+    }
+}
+// raw - no header
+export class DeflateRaw extends Zlib {
+    constructor(opts) {
+        super(opts, 'DeflateRaw');
+    }
+}
+export class InflateRaw extends Zlib {
+    constructor(opts) {
+        super(opts, 'InflateRaw');
+    }
+}
+// auto-detect header.
+export class Unzip extends Zlib {
+    constructor(opts) {
+        super(opts, 'Unzip');
+    }
+}
+export class Brotli extends ZlibBase {
+    constructor(opts, mode) {
+        opts = opts || {};
+        opts.flush = opts.flush || constants.BROTLI_OPERATION_PROCESS;
+        opts.finishFlush =
+            opts.finishFlush || constants.BROTLI_OPERATION_FINISH;
+        opts.fullFlushFlag = constants.BROTLI_OPERATION_FLUSH;
+        super(opts, mode);
+    }
+}
+export class BrotliCompress extends Brotli {
+    constructor(opts) {
+        super(opts, 'BrotliCompress');
+    }
+}
+export class BrotliDecompress extends Brotli {
+    constructor(opts) {
+        super(opts, 'BrotliDecompress');
+    }
+}
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/minizlib/dist/esm/package.json b/node_modules/pacote/node_modules/minizlib/dist/esm/package.json
new file mode 100644
index 0000000000000..3dbc1ca591c05
--- /dev/null
+++ b/node_modules/pacote/node_modules/minizlib/dist/esm/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/node_modules/pacote/node_modules/minizlib/package.json b/node_modules/pacote/node_modules/minizlib/package.json
new file mode 100644
index 0000000000000..43cb855e15a5d
--- /dev/null
+++ b/node_modules/pacote/node_modules/minizlib/package.json
@@ -0,0 +1,80 @@
+{
+  "name": "minizlib",
+  "version": "3.0.2",
+  "description": "A small fast zlib stream built on [minipass](http://npm.im/minipass) and Node.js's zlib binding.",
+  "main": "./dist/commonjs/index.js",
+  "dependencies": {
+    "minipass": "^7.1.2"
+  },
+  "scripts": {
+    "prepare": "tshy",
+    "pretest": "npm run prepare",
+    "test": "tap",
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "format": "prettier --write . --loglevel warn",
+    "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/isaacs/minizlib.git"
+  },
+  "keywords": [
+    "zlib",
+    "gzip",
+    "gunzip",
+    "deflate",
+    "inflate",
+    "compression",
+    "zip",
+    "unzip"
+  ],
+  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
+  "license": "MIT",
+  "devDependencies": {
+    "@types/node": "^22.13.14",
+    "tap": "^21.1.0",
+    "tshy": "^3.0.2",
+    "typedoc": "^0.28.1"
+  },
+  "files": [
+    "dist"
+  ],
+  "engines": {
+    "node": ">= 18"
+  },
+  "tshy": {
+    "exports": {
+      "./package.json": "./package.json",
+      ".": "./src/index.ts"
+    }
+  },
+  "exports": {
+    "./package.json": "./package.json",
+    ".": {
+      "import": {
+        "types": "./dist/esm/index.d.ts",
+        "default": "./dist/esm/index.js"
+      },
+      "require": {
+        "types": "./dist/commonjs/index.d.ts",
+        "default": "./dist/commonjs/index.js"
+      }
+    }
+  },
+  "types": "./dist/commonjs/index.d.ts",
+  "type": "module",
+  "prettier": {
+    "semi": false,
+    "printWidth": 75,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "module": "./dist/esm/index.js"
+}
diff --git a/node_modules/pacote/node_modules/mkdirp/LICENSE b/node_modules/pacote/node_modules/mkdirp/LICENSE
new file mode 100644
index 0000000000000..0a034db7a73b5
--- /dev/null
+++ b/node_modules/pacote/node_modules/mkdirp/LICENSE
@@ -0,0 +1,21 @@
+Copyright (c) 2011-2023 James Halliday (mail@substack.net) and Isaac Z. Schlueter (i@izs.me)
+
+This project is free software released under the MIT license:
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/node_modules/pacote/node_modules/mkdirp/dist/cjs/package.json b/node_modules/pacote/node_modules/mkdirp/dist/cjs/package.json
new file mode 100644
index 0000000000000..9d04a66e16cd9
--- /dev/null
+++ b/node_modules/pacote/node_modules/mkdirp/dist/cjs/package.json
@@ -0,0 +1,91 @@
+{
+    "name": "mkdirp",
+    "description": "Recursively mkdir, like `mkdir -p`",
+    "version": "3.0.1",
+    "keywords": [
+        "mkdir",
+        "directory",
+        "make dir",
+        "make",
+        "dir",
+        "recursive",
+        "native"
+    ],
+    "bin": "./dist/cjs/src/bin.js",
+    "main": "./dist/cjs/src/index.js",
+    "module": "./dist/mjs/index.js",
+    "types": "./dist/mjs/index.d.ts",
+    "exports": {
+        ".": {
+            "import": {
+                "types": "./dist/mjs/index.d.ts",
+                "default": "./dist/mjs/index.js"
+            },
+            "require": {
+                "types": "./dist/cjs/src/index.d.ts",
+                "default": "./dist/cjs/src/index.js"
+            }
+        }
+    },
+    "files": [
+        "dist"
+    ],
+    "scripts": {
+        "preversion": "npm test",
+        "postversion": "npm publish",
+        "prepublishOnly": "git push origin --follow-tags",
+        "preprepare": "rm -rf dist",
+        "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json",
+        "postprepare": "bash fixup.sh",
+        "pretest": "npm run prepare",
+        "presnap": "npm run prepare",
+        "test": "c8 tap",
+        "snap": "c8 tap",
+        "format": "prettier --write . --loglevel warn",
+        "benchmark": "node benchmark/index.js",
+        "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
+    },
+    "prettier": {
+        "semi": false,
+        "printWidth": 80,
+        "tabWidth": 2,
+        "useTabs": false,
+        "singleQuote": true,
+        "jsxSingleQuote": false,
+        "bracketSameLine": true,
+        "arrowParens": "avoid",
+        "endOfLine": "lf"
+    },
+    "devDependencies": {
+        "@types/brace-expansion": "^1.1.0",
+        "@types/node": "^18.11.9",
+        "@types/tap": "^15.0.7",
+        "c8": "^7.12.0",
+        "eslint-config-prettier": "^8.6.0",
+        "prettier": "^2.8.2",
+        "tap": "^16.3.3",
+        "ts-node": "^10.9.1",
+        "typedoc": "^0.23.21",
+        "typescript": "^4.9.3"
+    },
+    "tap": {
+        "coverage": false,
+        "node-arg": [
+            "--no-warnings",
+            "--loader",
+            "ts-node/esm"
+        ],
+        "ts": false
+    },
+    "funding": {
+        "url": "https://github.com/sponsors/isaacs"
+    },
+    "repository": {
+        "type": "git",
+        "url": "https://github.com/isaacs/node-mkdirp.git"
+    },
+    "license": "MIT",
+    "engines": {
+        "node": ">=10"
+    }
+}
diff --git a/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/bin.js b/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/bin.js
new file mode 100755
index 0000000000000..757aae1fd96cb
--- /dev/null
+++ b/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/bin.js
@@ -0,0 +1,80 @@
+#!/usr/bin/env node
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+const package_json_1 = require("../package.json");
+const usage = () => `
+usage: mkdirp [DIR1,DIR2..] {OPTIONS}
+
+  Create each supplied directory including any necessary parent directories
+  that don't yet exist.
+
+  If the directory already exists, do nothing.
+
+OPTIONS are:
+
+  -m       If a directory needs to be created, set the mode as an octal
+  --mode=  permission string.
+
+  -v --version   Print the mkdirp version number
+
+  -h --help      Print this helpful banner
+
+  -p --print     Print the first directories created for each path provided
+
+  --manual       Use manual implementation, even if native is available
+`;
+const dirs = [];
+const opts = {};
+let doPrint = false;
+let dashdash = false;
+let manual = false;
+for (const arg of process.argv.slice(2)) {
+    if (dashdash)
+        dirs.push(arg);
+    else if (arg === '--')
+        dashdash = true;
+    else if (arg === '--manual')
+        manual = true;
+    else if (/^-h/.test(arg) || /^--help/.test(arg)) {
+        console.log(usage());
+        process.exit(0);
+    }
+    else if (arg === '-v' || arg === '--version') {
+        console.log(package_json_1.version);
+        process.exit(0);
+    }
+    else if (arg === '-p' || arg === '--print') {
+        doPrint = true;
+    }
+    else if (/^-m/.test(arg) || /^--mode=/.test(arg)) {
+        // these don't get covered in CI, but work locally
+        // weird because the tests below show as passing in the output.
+        /* c8 ignore start */
+        const mode = parseInt(arg.replace(/^(-m|--mode=)/, ''), 8);
+        if (isNaN(mode)) {
+            console.error(`invalid mode argument: ${arg}\nMust be an octal number.`);
+            process.exit(1);
+        }
+        /* c8 ignore stop */
+        opts.mode = mode;
+    }
+    else
+        dirs.push(arg);
+}
+const index_js_1 = require("./index.js");
+const impl = manual ? index_js_1.mkdirp.manual : index_js_1.mkdirp;
+if (dirs.length === 0) {
+    console.error(usage());
+}
+// these don't get covered in CI, but work locally
+/* c8 ignore start */
+Promise.all(dirs.map(dir => impl(dir, opts)))
+    .then(made => (doPrint ? made.forEach(m => m && console.log(m)) : null))
+    .catch(er => {
+    console.error(er.message);
+    if (er.code)
+        console.error('  code: ' + er.code);
+    process.exit(1);
+});
+/* c8 ignore stop */
+//# sourceMappingURL=bin.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/find-made.js b/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/find-made.js
new file mode 100644
index 0000000000000..e831ef27cadc1
--- /dev/null
+++ b/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/find-made.js
@@ -0,0 +1,35 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.findMadeSync = exports.findMade = void 0;
+const path_1 = require("path");
+const findMade = async (opts, parent, path) => {
+    // we never want the 'made' return value to be a root directory
+    if (path === parent) {
+        return;
+    }
+    return opts.statAsync(parent).then(st => (st.isDirectory() ? path : undefined), // will fail later
+    // will fail later
+    er => {
+        const fer = er;
+        return fer && fer.code === 'ENOENT'
+            ? (0, exports.findMade)(opts, (0, path_1.dirname)(parent), parent)
+            : undefined;
+    });
+};
+exports.findMade = findMade;
+const findMadeSync = (opts, parent, path) => {
+    if (path === parent) {
+        return undefined;
+    }
+    try {
+        return opts.statSync(parent).isDirectory() ? path : undefined;
+    }
+    catch (er) {
+        const fer = er;
+        return fer && fer.code === 'ENOENT'
+            ? (0, exports.findMadeSync)(opts, (0, path_1.dirname)(parent), parent)
+            : undefined;
+    }
+};
+exports.findMadeSync = findMadeSync;
+//# sourceMappingURL=find-made.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/index.js b/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/index.js
new file mode 100644
index 0000000000000..ab9dc62cddda3
--- /dev/null
+++ b/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/index.js
@@ -0,0 +1,53 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.mkdirp = exports.nativeSync = exports.native = exports.manualSync = exports.manual = exports.sync = exports.mkdirpSync = exports.useNativeSync = exports.useNative = exports.mkdirpNativeSync = exports.mkdirpNative = exports.mkdirpManualSync = exports.mkdirpManual = void 0;
+const mkdirp_manual_js_1 = require("./mkdirp-manual.js");
+const mkdirp_native_js_1 = require("./mkdirp-native.js");
+const opts_arg_js_1 = require("./opts-arg.js");
+const path_arg_js_1 = require("./path-arg.js");
+const use_native_js_1 = require("./use-native.js");
+/* c8 ignore start */
+var mkdirp_manual_js_2 = require("./mkdirp-manual.js");
+Object.defineProperty(exports, "mkdirpManual", { enumerable: true, get: function () { return mkdirp_manual_js_2.mkdirpManual; } });
+Object.defineProperty(exports, "mkdirpManualSync", { enumerable: true, get: function () { return mkdirp_manual_js_2.mkdirpManualSync; } });
+var mkdirp_native_js_2 = require("./mkdirp-native.js");
+Object.defineProperty(exports, "mkdirpNative", { enumerable: true, get: function () { return mkdirp_native_js_2.mkdirpNative; } });
+Object.defineProperty(exports, "mkdirpNativeSync", { enumerable: true, get: function () { return mkdirp_native_js_2.mkdirpNativeSync; } });
+var use_native_js_2 = require("./use-native.js");
+Object.defineProperty(exports, "useNative", { enumerable: true, get: function () { return use_native_js_2.useNative; } });
+Object.defineProperty(exports, "useNativeSync", { enumerable: true, get: function () { return use_native_js_2.useNativeSync; } });
+/* c8 ignore stop */
+const mkdirpSync = (path, opts) => {
+    path = (0, path_arg_js_1.pathArg)(path);
+    const resolved = (0, opts_arg_js_1.optsArg)(opts);
+    return (0, use_native_js_1.useNativeSync)(resolved)
+        ? (0, mkdirp_native_js_1.mkdirpNativeSync)(path, resolved)
+        : (0, mkdirp_manual_js_1.mkdirpManualSync)(path, resolved);
+};
+exports.mkdirpSync = mkdirpSync;
+exports.sync = exports.mkdirpSync;
+exports.manual = mkdirp_manual_js_1.mkdirpManual;
+exports.manualSync = mkdirp_manual_js_1.mkdirpManualSync;
+exports.native = mkdirp_native_js_1.mkdirpNative;
+exports.nativeSync = mkdirp_native_js_1.mkdirpNativeSync;
+exports.mkdirp = Object.assign(async (path, opts) => {
+    path = (0, path_arg_js_1.pathArg)(path);
+    const resolved = (0, opts_arg_js_1.optsArg)(opts);
+    return (0, use_native_js_1.useNative)(resolved)
+        ? (0, mkdirp_native_js_1.mkdirpNative)(path, resolved)
+        : (0, mkdirp_manual_js_1.mkdirpManual)(path, resolved);
+}, {
+    mkdirpSync: exports.mkdirpSync,
+    mkdirpNative: mkdirp_native_js_1.mkdirpNative,
+    mkdirpNativeSync: mkdirp_native_js_1.mkdirpNativeSync,
+    mkdirpManual: mkdirp_manual_js_1.mkdirpManual,
+    mkdirpManualSync: mkdirp_manual_js_1.mkdirpManualSync,
+    sync: exports.mkdirpSync,
+    native: mkdirp_native_js_1.mkdirpNative,
+    nativeSync: mkdirp_native_js_1.mkdirpNativeSync,
+    manual: mkdirp_manual_js_1.mkdirpManual,
+    manualSync: mkdirp_manual_js_1.mkdirpManualSync,
+    useNative: use_native_js_1.useNative,
+    useNativeSync: use_native_js_1.useNativeSync,
+});
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js b/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js
new file mode 100644
index 0000000000000..d9bd1d8bb5a49
--- /dev/null
+++ b/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js
@@ -0,0 +1,79 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.mkdirpManual = exports.mkdirpManualSync = void 0;
+const path_1 = require("path");
+const opts_arg_js_1 = require("./opts-arg.js");
+const mkdirpManualSync = (path, options, made) => {
+    const parent = (0, path_1.dirname)(path);
+    const opts = { ...(0, opts_arg_js_1.optsArg)(options), recursive: false };
+    if (parent === path) {
+        try {
+            return opts.mkdirSync(path, opts);
+        }
+        catch (er) {
+            // swallowed by recursive implementation on posix systems
+            // any other error is a failure
+            const fer = er;
+            if (fer && fer.code !== 'EISDIR') {
+                throw er;
+            }
+            return;
+        }
+    }
+    try {
+        opts.mkdirSync(path, opts);
+        return made || path;
+    }
+    catch (er) {
+        const fer = er;
+        if (fer && fer.code === 'ENOENT') {
+            return (0, exports.mkdirpManualSync)(path, opts, (0, exports.mkdirpManualSync)(parent, opts, made));
+        }
+        if (fer && fer.code !== 'EEXIST' && fer && fer.code !== 'EROFS') {
+            throw er;
+        }
+        try {
+            if (!opts.statSync(path).isDirectory())
+                throw er;
+        }
+        catch (_) {
+            throw er;
+        }
+    }
+};
+exports.mkdirpManualSync = mkdirpManualSync;
+exports.mkdirpManual = Object.assign(async (path, options, made) => {
+    const opts = (0, opts_arg_js_1.optsArg)(options);
+    opts.recursive = false;
+    const parent = (0, path_1.dirname)(path);
+    if (parent === path) {
+        return opts.mkdirAsync(path, opts).catch(er => {
+            // swallowed by recursive implementation on posix systems
+            // any other error is a failure
+            const fer = er;
+            if (fer && fer.code !== 'EISDIR') {
+                throw er;
+            }
+        });
+    }
+    return opts.mkdirAsync(path, opts).then(() => made || path, async (er) => {
+        const fer = er;
+        if (fer && fer.code === 'ENOENT') {
+            return (0, exports.mkdirpManual)(parent, opts).then((made) => (0, exports.mkdirpManual)(path, opts, made));
+        }
+        if (fer && fer.code !== 'EEXIST' && fer.code !== 'EROFS') {
+            throw er;
+        }
+        return opts.statAsync(path).then(st => {
+            if (st.isDirectory()) {
+                return made;
+            }
+            else {
+                throw er;
+            }
+        }, () => {
+            throw er;
+        });
+    });
+}, { sync: exports.mkdirpManualSync });
+//# sourceMappingURL=mkdirp-manual.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js b/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js
new file mode 100644
index 0000000000000..9f00567d7cc20
--- /dev/null
+++ b/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js
@@ -0,0 +1,50 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.mkdirpNative = exports.mkdirpNativeSync = void 0;
+const path_1 = require("path");
+const find_made_js_1 = require("./find-made.js");
+const mkdirp_manual_js_1 = require("./mkdirp-manual.js");
+const opts_arg_js_1 = require("./opts-arg.js");
+const mkdirpNativeSync = (path, options) => {
+    const opts = (0, opts_arg_js_1.optsArg)(options);
+    opts.recursive = true;
+    const parent = (0, path_1.dirname)(path);
+    if (parent === path) {
+        return opts.mkdirSync(path, opts);
+    }
+    const made = (0, find_made_js_1.findMadeSync)(opts, path);
+    try {
+        opts.mkdirSync(path, opts);
+        return made;
+    }
+    catch (er) {
+        const fer = er;
+        if (fer && fer.code === 'ENOENT') {
+            return (0, mkdirp_manual_js_1.mkdirpManualSync)(path, opts);
+        }
+        else {
+            throw er;
+        }
+    }
+};
+exports.mkdirpNativeSync = mkdirpNativeSync;
+exports.mkdirpNative = Object.assign(async (path, options) => {
+    const opts = { ...(0, opts_arg_js_1.optsArg)(options), recursive: true };
+    const parent = (0, path_1.dirname)(path);
+    if (parent === path) {
+        return await opts.mkdirAsync(path, opts);
+    }
+    return (0, find_made_js_1.findMade)(opts, path).then((made) => opts
+        .mkdirAsync(path, opts)
+        .then(m => made || m)
+        .catch(er => {
+        const fer = er;
+        if (fer && fer.code === 'ENOENT') {
+            return (0, mkdirp_manual_js_1.mkdirpManual)(path, opts);
+        }
+        else {
+            throw er;
+        }
+    }));
+}, { sync: exports.mkdirpNativeSync });
+//# sourceMappingURL=mkdirp-native.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/opts-arg.js b/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/opts-arg.js
new file mode 100644
index 0000000000000..e8f486c090595
--- /dev/null
+++ b/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/opts-arg.js
@@ -0,0 +1,38 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.optsArg = void 0;
+const fs_1 = require("fs");
+const optsArg = (opts) => {
+    if (!opts) {
+        opts = { mode: 0o777 };
+    }
+    else if (typeof opts === 'object') {
+        opts = { mode: 0o777, ...opts };
+    }
+    else if (typeof opts === 'number') {
+        opts = { mode: opts };
+    }
+    else if (typeof opts === 'string') {
+        opts = { mode: parseInt(opts, 8) };
+    }
+    else {
+        throw new TypeError('invalid options argument');
+    }
+    const resolved = opts;
+    const optsFs = opts.fs || {};
+    opts.mkdir = opts.mkdir || optsFs.mkdir || fs_1.mkdir;
+    opts.mkdirAsync = opts.mkdirAsync
+        ? opts.mkdirAsync
+        : async (path, options) => {
+            return new Promise((res, rej) => resolved.mkdir(path, options, (er, made) => er ? rej(er) : res(made)));
+        };
+    opts.stat = opts.stat || optsFs.stat || fs_1.stat;
+    opts.statAsync = opts.statAsync
+        ? opts.statAsync
+        : async (path) => new Promise((res, rej) => resolved.stat(path, (err, stats) => (err ? rej(err) : res(stats))));
+    opts.statSync = opts.statSync || optsFs.statSync || fs_1.statSync;
+    opts.mkdirSync = opts.mkdirSync || optsFs.mkdirSync || fs_1.mkdirSync;
+    return resolved;
+};
+exports.optsArg = optsArg;
+//# sourceMappingURL=opts-arg.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/path-arg.js b/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/path-arg.js
new file mode 100644
index 0000000000000..a6b457f6e23d5
--- /dev/null
+++ b/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/path-arg.js
@@ -0,0 +1,28 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.pathArg = void 0;
+const platform = process.env.__TESTING_MKDIRP_PLATFORM__ || process.platform;
+const path_1 = require("path");
+const pathArg = (path) => {
+    if (/\0/.test(path)) {
+        // simulate same failure that node raises
+        throw Object.assign(new TypeError('path must be a string without null bytes'), {
+            path,
+            code: 'ERR_INVALID_ARG_VALUE',
+        });
+    }
+    path = (0, path_1.resolve)(path);
+    if (platform === 'win32') {
+        const badWinChars = /[*|"<>?:]/;
+        const { root } = (0, path_1.parse)(path);
+        if (badWinChars.test(path.substring(root.length))) {
+            throw Object.assign(new Error('Illegal characters in path.'), {
+                path,
+                code: 'EINVAL',
+            });
+        }
+    }
+    return path;
+};
+exports.pathArg = pathArg;
+//# sourceMappingURL=path-arg.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/use-native.js b/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/use-native.js
new file mode 100644
index 0000000000000..550b3452688ee
--- /dev/null
+++ b/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/use-native.js
@@ -0,0 +1,17 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.useNative = exports.useNativeSync = void 0;
+const fs_1 = require("fs");
+const opts_arg_js_1 = require("./opts-arg.js");
+const version = process.env.__TESTING_MKDIRP_NODE_VERSION__ || process.version;
+const versArr = version.replace(/^v/, '').split('.');
+const hasNative = +versArr[0] > 10 || (+versArr[0] === 10 && +versArr[1] >= 12);
+exports.useNativeSync = !hasNative
+    ? () => false
+    : (opts) => (0, opts_arg_js_1.optsArg)(opts).mkdirSync === fs_1.mkdirSync;
+exports.useNative = Object.assign(!hasNative
+    ? () => false
+    : (opts) => (0, opts_arg_js_1.optsArg)(opts).mkdir === fs_1.mkdir, {
+    sync: exports.useNativeSync,
+});
+//# sourceMappingURL=use-native.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/mkdirp/dist/mjs/find-made.js b/node_modules/pacote/node_modules/mkdirp/dist/mjs/find-made.js
new file mode 100644
index 0000000000000..3e72fd59a2c1f
--- /dev/null
+++ b/node_modules/pacote/node_modules/mkdirp/dist/mjs/find-made.js
@@ -0,0 +1,30 @@
+import { dirname } from 'path';
+export const findMade = async (opts, parent, path) => {
+    // we never want the 'made' return value to be a root directory
+    if (path === parent) {
+        return;
+    }
+    return opts.statAsync(parent).then(st => (st.isDirectory() ? path : undefined), // will fail later
+    // will fail later
+    er => {
+        const fer = er;
+        return fer && fer.code === 'ENOENT'
+            ? findMade(opts, dirname(parent), parent)
+            : undefined;
+    });
+};
+export const findMadeSync = (opts, parent, path) => {
+    if (path === parent) {
+        return undefined;
+    }
+    try {
+        return opts.statSync(parent).isDirectory() ? path : undefined;
+    }
+    catch (er) {
+        const fer = er;
+        return fer && fer.code === 'ENOENT'
+            ? findMadeSync(opts, dirname(parent), parent)
+            : undefined;
+    }
+};
+//# sourceMappingURL=find-made.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/mkdirp/dist/mjs/index.js b/node_modules/pacote/node_modules/mkdirp/dist/mjs/index.js
new file mode 100644
index 0000000000000..0217ecc8cdd83
--- /dev/null
+++ b/node_modules/pacote/node_modules/mkdirp/dist/mjs/index.js
@@ -0,0 +1,43 @@
+import { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js';
+import { mkdirpNative, mkdirpNativeSync } from './mkdirp-native.js';
+import { optsArg } from './opts-arg.js';
+import { pathArg } from './path-arg.js';
+import { useNative, useNativeSync } from './use-native.js';
+/* c8 ignore start */
+export { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js';
+export { mkdirpNative, mkdirpNativeSync } from './mkdirp-native.js';
+export { useNative, useNativeSync } from './use-native.js';
+/* c8 ignore stop */
+export const mkdirpSync = (path, opts) => {
+    path = pathArg(path);
+    const resolved = optsArg(opts);
+    return useNativeSync(resolved)
+        ? mkdirpNativeSync(path, resolved)
+        : mkdirpManualSync(path, resolved);
+};
+export const sync = mkdirpSync;
+export const manual = mkdirpManual;
+export const manualSync = mkdirpManualSync;
+export const native = mkdirpNative;
+export const nativeSync = mkdirpNativeSync;
+export const mkdirp = Object.assign(async (path, opts) => {
+    path = pathArg(path);
+    const resolved = optsArg(opts);
+    return useNative(resolved)
+        ? mkdirpNative(path, resolved)
+        : mkdirpManual(path, resolved);
+}, {
+    mkdirpSync,
+    mkdirpNative,
+    mkdirpNativeSync,
+    mkdirpManual,
+    mkdirpManualSync,
+    sync: mkdirpSync,
+    native: mkdirpNative,
+    nativeSync: mkdirpNativeSync,
+    manual: mkdirpManual,
+    manualSync: mkdirpManualSync,
+    useNative,
+    useNativeSync,
+});
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/mkdirp/dist/mjs/mkdirp-manual.js b/node_modules/pacote/node_modules/mkdirp/dist/mjs/mkdirp-manual.js
new file mode 100644
index 0000000000000..a4d044e02d3bf
--- /dev/null
+++ b/node_modules/pacote/node_modules/mkdirp/dist/mjs/mkdirp-manual.js
@@ -0,0 +1,75 @@
+import { dirname } from 'path';
+import { optsArg } from './opts-arg.js';
+export const mkdirpManualSync = (path, options, made) => {
+    const parent = dirname(path);
+    const opts = { ...optsArg(options), recursive: false };
+    if (parent === path) {
+        try {
+            return opts.mkdirSync(path, opts);
+        }
+        catch (er) {
+            // swallowed by recursive implementation on posix systems
+            // any other error is a failure
+            const fer = er;
+            if (fer && fer.code !== 'EISDIR') {
+                throw er;
+            }
+            return;
+        }
+    }
+    try {
+        opts.mkdirSync(path, opts);
+        return made || path;
+    }
+    catch (er) {
+        const fer = er;
+        if (fer && fer.code === 'ENOENT') {
+            return mkdirpManualSync(path, opts, mkdirpManualSync(parent, opts, made));
+        }
+        if (fer && fer.code !== 'EEXIST' && fer && fer.code !== 'EROFS') {
+            throw er;
+        }
+        try {
+            if (!opts.statSync(path).isDirectory())
+                throw er;
+        }
+        catch (_) {
+            throw er;
+        }
+    }
+};
+export const mkdirpManual = Object.assign(async (path, options, made) => {
+    const opts = optsArg(options);
+    opts.recursive = false;
+    const parent = dirname(path);
+    if (parent === path) {
+        return opts.mkdirAsync(path, opts).catch(er => {
+            // swallowed by recursive implementation on posix systems
+            // any other error is a failure
+            const fer = er;
+            if (fer && fer.code !== 'EISDIR') {
+                throw er;
+            }
+        });
+    }
+    return opts.mkdirAsync(path, opts).then(() => made || path, async (er) => {
+        const fer = er;
+        if (fer && fer.code === 'ENOENT') {
+            return mkdirpManual(parent, opts).then((made) => mkdirpManual(path, opts, made));
+        }
+        if (fer && fer.code !== 'EEXIST' && fer.code !== 'EROFS') {
+            throw er;
+        }
+        return opts.statAsync(path).then(st => {
+            if (st.isDirectory()) {
+                return made;
+            }
+            else {
+                throw er;
+            }
+        }, () => {
+            throw er;
+        });
+    });
+}, { sync: mkdirpManualSync });
+//# sourceMappingURL=mkdirp-manual.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/mkdirp/dist/mjs/mkdirp-native.js b/node_modules/pacote/node_modules/mkdirp/dist/mjs/mkdirp-native.js
new file mode 100644
index 0000000000000..99d10a5425dad
--- /dev/null
+++ b/node_modules/pacote/node_modules/mkdirp/dist/mjs/mkdirp-native.js
@@ -0,0 +1,46 @@
+import { dirname } from 'path';
+import { findMade, findMadeSync } from './find-made.js';
+import { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js';
+import { optsArg } from './opts-arg.js';
+export const mkdirpNativeSync = (path, options) => {
+    const opts = optsArg(options);
+    opts.recursive = true;
+    const parent = dirname(path);
+    if (parent === path) {
+        return opts.mkdirSync(path, opts);
+    }
+    const made = findMadeSync(opts, path);
+    try {
+        opts.mkdirSync(path, opts);
+        return made;
+    }
+    catch (er) {
+        const fer = er;
+        if (fer && fer.code === 'ENOENT') {
+            return mkdirpManualSync(path, opts);
+        }
+        else {
+            throw er;
+        }
+    }
+};
+export const mkdirpNative = Object.assign(async (path, options) => {
+    const opts = { ...optsArg(options), recursive: true };
+    const parent = dirname(path);
+    if (parent === path) {
+        return await opts.mkdirAsync(path, opts);
+    }
+    return findMade(opts, path).then((made) => opts
+        .mkdirAsync(path, opts)
+        .then(m => made || m)
+        .catch(er => {
+        const fer = er;
+        if (fer && fer.code === 'ENOENT') {
+            return mkdirpManual(path, opts);
+        }
+        else {
+            throw er;
+        }
+    }));
+}, { sync: mkdirpNativeSync });
+//# sourceMappingURL=mkdirp-native.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/mkdirp/dist/mjs/opts-arg.js b/node_modules/pacote/node_modules/mkdirp/dist/mjs/opts-arg.js
new file mode 100644
index 0000000000000..d47e2927fee4c
--- /dev/null
+++ b/node_modules/pacote/node_modules/mkdirp/dist/mjs/opts-arg.js
@@ -0,0 +1,34 @@
+import { mkdir, mkdirSync, stat, statSync, } from 'fs';
+export const optsArg = (opts) => {
+    if (!opts) {
+        opts = { mode: 0o777 };
+    }
+    else if (typeof opts === 'object') {
+        opts = { mode: 0o777, ...opts };
+    }
+    else if (typeof opts === 'number') {
+        opts = { mode: opts };
+    }
+    else if (typeof opts === 'string') {
+        opts = { mode: parseInt(opts, 8) };
+    }
+    else {
+        throw new TypeError('invalid options argument');
+    }
+    const resolved = opts;
+    const optsFs = opts.fs || {};
+    opts.mkdir = opts.mkdir || optsFs.mkdir || mkdir;
+    opts.mkdirAsync = opts.mkdirAsync
+        ? opts.mkdirAsync
+        : async (path, options) => {
+            return new Promise((res, rej) => resolved.mkdir(path, options, (er, made) => er ? rej(er) : res(made)));
+        };
+    opts.stat = opts.stat || optsFs.stat || stat;
+    opts.statAsync = opts.statAsync
+        ? opts.statAsync
+        : async (path) => new Promise((res, rej) => resolved.stat(path, (err, stats) => (err ? rej(err) : res(stats))));
+    opts.statSync = opts.statSync || optsFs.statSync || statSync;
+    opts.mkdirSync = opts.mkdirSync || optsFs.mkdirSync || mkdirSync;
+    return resolved;
+};
+//# sourceMappingURL=opts-arg.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/mkdirp/dist/mjs/package.json b/node_modules/pacote/node_modules/mkdirp/dist/mjs/package.json
new file mode 100644
index 0000000000000..3dbc1ca591c05
--- /dev/null
+++ b/node_modules/pacote/node_modules/mkdirp/dist/mjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/node_modules/pacote/node_modules/mkdirp/dist/mjs/path-arg.js b/node_modules/pacote/node_modules/mkdirp/dist/mjs/path-arg.js
new file mode 100644
index 0000000000000..03539cc5a94f9
--- /dev/null
+++ b/node_modules/pacote/node_modules/mkdirp/dist/mjs/path-arg.js
@@ -0,0 +1,24 @@
+const platform = process.env.__TESTING_MKDIRP_PLATFORM__ || process.platform;
+import { parse, resolve } from 'path';
+export const pathArg = (path) => {
+    if (/\0/.test(path)) {
+        // simulate same failure that node raises
+        throw Object.assign(new TypeError('path must be a string without null bytes'), {
+            path,
+            code: 'ERR_INVALID_ARG_VALUE',
+        });
+    }
+    path = resolve(path);
+    if (platform === 'win32') {
+        const badWinChars = /[*|"<>?:]/;
+        const { root } = parse(path);
+        if (badWinChars.test(path.substring(root.length))) {
+            throw Object.assign(new Error('Illegal characters in path.'), {
+                path,
+                code: 'EINVAL',
+            });
+        }
+    }
+    return path;
+};
+//# sourceMappingURL=path-arg.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/mkdirp/dist/mjs/use-native.js b/node_modules/pacote/node_modules/mkdirp/dist/mjs/use-native.js
new file mode 100644
index 0000000000000..ad2093867eb74
--- /dev/null
+++ b/node_modules/pacote/node_modules/mkdirp/dist/mjs/use-native.js
@@ -0,0 +1,14 @@
+import { mkdir, mkdirSync } from 'fs';
+import { optsArg } from './opts-arg.js';
+const version = process.env.__TESTING_MKDIRP_NODE_VERSION__ || process.version;
+const versArr = version.replace(/^v/, '').split('.');
+const hasNative = +versArr[0] > 10 || (+versArr[0] === 10 && +versArr[1] >= 12);
+export const useNativeSync = !hasNative
+    ? () => false
+    : (opts) => optsArg(opts).mkdirSync === mkdirSync;
+export const useNative = Object.assign(!hasNative
+    ? () => false
+    : (opts) => optsArg(opts).mkdir === mkdir, {
+    sync: useNativeSync,
+});
+//# sourceMappingURL=use-native.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/mkdirp/package.json b/node_modules/pacote/node_modules/mkdirp/package.json
new file mode 100644
index 0000000000000..f31ac3314d6f6
--- /dev/null
+++ b/node_modules/pacote/node_modules/mkdirp/package.json
@@ -0,0 +1,91 @@
+{
+  "name": "mkdirp",
+  "description": "Recursively mkdir, like `mkdir -p`",
+  "version": "3.0.1",
+  "keywords": [
+    "mkdir",
+    "directory",
+    "make dir",
+    "make",
+    "dir",
+    "recursive",
+    "native"
+  ],
+  "bin": "./dist/cjs/src/bin.js",
+  "main": "./dist/cjs/src/index.js",
+  "module": "./dist/mjs/index.js",
+  "types": "./dist/mjs/index.d.ts",
+  "exports": {
+    ".": {
+      "import": {
+        "types": "./dist/mjs/index.d.ts",
+        "default": "./dist/mjs/index.js"
+      },
+      "require": {
+        "types": "./dist/cjs/src/index.d.ts",
+        "default": "./dist/cjs/src/index.js"
+      }
+    }
+  },
+  "files": [
+    "dist"
+  ],
+  "scripts": {
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "preprepare": "rm -rf dist",
+    "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json",
+    "postprepare": "bash fixup.sh",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "test": "c8 tap",
+    "snap": "c8 tap",
+    "format": "prettier --write . --loglevel warn",
+    "benchmark": "node benchmark/index.js",
+    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
+  },
+  "prettier": {
+    "semi": false,
+    "printWidth": 80,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "devDependencies": {
+    "@types/brace-expansion": "^1.1.0",
+    "@types/node": "^18.11.9",
+    "@types/tap": "^15.0.7",
+    "c8": "^7.12.0",
+    "eslint-config-prettier": "^8.6.0",
+    "prettier": "^2.8.2",
+    "tap": "^16.3.3",
+    "ts-node": "^10.9.1",
+    "typedoc": "^0.23.21",
+    "typescript": "^4.9.3"
+  },
+  "tap": {
+    "coverage": false,
+    "node-arg": [
+      "--no-warnings",
+      "--loader",
+      "ts-node/esm"
+    ],
+    "ts": false
+  },
+  "funding": {
+    "url": "https://github.com/sponsors/isaacs"
+  },
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/isaacs/node-mkdirp.git"
+  },
+  "license": "MIT",
+  "engines": {
+    "node": ">=10"
+  }
+}
diff --git a/node_modules/pacote/node_modules/negotiator/HISTORY.md b/node_modules/pacote/node_modules/negotiator/HISTORY.md
new file mode 100644
index 0000000000000..63d537d3f6811
--- /dev/null
+++ b/node_modules/pacote/node_modules/negotiator/HISTORY.md
@@ -0,0 +1,114 @@
+1.0.0 / 2024-08-31
+==================
+
+  * Drop support for node <18
+  * Added an option preferred encodings array #59
+
+0.6.3 / 2022-01-22
+==================
+
+  * Revert "Lazy-load modules from main entry point"
+
+0.6.2 / 2019-04-29
+==================
+
+  * Fix sorting charset, encoding, and language with extra parameters
+
+0.6.1 / 2016-05-02
+==================
+
+  * perf: improve `Accept` parsing speed
+  * perf: improve `Accept-Charset` parsing speed
+  * perf: improve `Accept-Encoding` parsing speed
+  * perf: improve `Accept-Language` parsing speed
+
+0.6.0 / 2015-09-29
+==================
+
+  * Fix including type extensions in parameters in `Accept` parsing
+  * Fix parsing `Accept` parameters with quoted equals
+  * Fix parsing `Accept` parameters with quoted semicolons
+  * Lazy-load modules from main entry point
+  * perf: delay type concatenation until needed
+  * perf: enable strict mode
+  * perf: hoist regular expressions
+  * perf: remove closures getting spec properties
+  * perf: remove a closure from media type parsing
+  * perf: remove property delete from media type parsing
+
+0.5.3 / 2015-05-10
+==================
+
+  * Fix media type parameter matching to be case-insensitive
+
+0.5.2 / 2015-05-06
+==================
+
+  * Fix comparing media types with quoted values
+  * Fix splitting media types with quoted commas
+
+0.5.1 / 2015-02-14
+==================
+
+  * Fix preference sorting to be stable for long acceptable lists
+
+0.5.0 / 2014-12-18
+==================
+
+  * Fix list return order when large accepted list
+  * Fix missing identity encoding when q=0 exists
+  * Remove dynamic building of Negotiator class
+
+0.4.9 / 2014-10-14
+==================
+
+  * Fix error when media type has invalid parameter
+
+0.4.8 / 2014-09-28
+==================
+
+  * Fix all negotiations to be case-insensitive
+  * Stable sort preferences of same quality according to client order
+  * Support Node.js 0.6
+
+0.4.7 / 2014-06-24
+==================
+
+  * Handle invalid provided languages
+  * Handle invalid provided media types
+
+0.4.6 / 2014-06-11
+==================
+
+  *  Order by specificity when quality is the same
+
+0.4.5 / 2014-05-29
+==================
+
+  * Fix regression in empty header handling
+
+0.4.4 / 2014-05-29
+==================
+
+  * Fix behaviors when headers are not present
+
+0.4.3 / 2014-04-16
+==================
+
+  * Handle slashes on media params correctly
+
+0.4.2 / 2014-02-28
+==================
+
+  * Fix media type sorting
+  * Handle media types params strictly
+
+0.4.1 / 2014-01-16
+==================
+
+  * Use most specific matches
+
+0.4.0 / 2014-01-09
+==================
+
+  * Remove preferred prefix from methods
diff --git a/node_modules/pacote/node_modules/negotiator/LICENSE b/node_modules/pacote/node_modules/negotiator/LICENSE
new file mode 100644
index 0000000000000..ea6b9e2e9ac25
--- /dev/null
+++ b/node_modules/pacote/node_modules/negotiator/LICENSE
@@ -0,0 +1,24 @@
+(The MIT License)
+
+Copyright (c) 2012-2014 Federico Romero
+Copyright (c) 2012-2014 Isaac Z. Schlueter
+Copyright (c) 2014-2015 Douglas Christopher Wilson
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+'Software'), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/pacote/node_modules/negotiator/index.js b/node_modules/pacote/node_modules/negotiator/index.js
new file mode 100644
index 0000000000000..4f51315d6af4b
--- /dev/null
+++ b/node_modules/pacote/node_modules/negotiator/index.js
@@ -0,0 +1,83 @@
+/*!
+ * negotiator
+ * Copyright(c) 2012 Federico Romero
+ * Copyright(c) 2012-2014 Isaac Z. Schlueter
+ * Copyright(c) 2015 Douglas Christopher Wilson
+ * MIT Licensed
+ */
+
+'use strict';
+
+var preferredCharsets = require('./lib/charset')
+var preferredEncodings = require('./lib/encoding')
+var preferredLanguages = require('./lib/language')
+var preferredMediaTypes = require('./lib/mediaType')
+
+/**
+ * Module exports.
+ * @public
+ */
+
+module.exports = Negotiator;
+module.exports.Negotiator = Negotiator;
+
+/**
+ * Create a Negotiator instance from a request.
+ * @param {object} request
+ * @public
+ */
+
+function Negotiator(request) {
+  if (!(this instanceof Negotiator)) {
+    return new Negotiator(request);
+  }
+
+  this.request = request;
+}
+
+Negotiator.prototype.charset = function charset(available) {
+  var set = this.charsets(available);
+  return set && set[0];
+};
+
+Negotiator.prototype.charsets = function charsets(available) {
+  return preferredCharsets(this.request.headers['accept-charset'], available);
+};
+
+Negotiator.prototype.encoding = function encoding(available, opts) {
+  var set = this.encodings(available, opts);
+  return set && set[0];
+};
+
+Negotiator.prototype.encodings = function encodings(available, options) {
+  var opts = options || {};
+  return preferredEncodings(this.request.headers['accept-encoding'], available, opts.preferred);
+};
+
+Negotiator.prototype.language = function language(available) {
+  var set = this.languages(available);
+  return set && set[0];
+};
+
+Negotiator.prototype.languages = function languages(available) {
+  return preferredLanguages(this.request.headers['accept-language'], available);
+};
+
+Negotiator.prototype.mediaType = function mediaType(available) {
+  var set = this.mediaTypes(available);
+  return set && set[0];
+};
+
+Negotiator.prototype.mediaTypes = function mediaTypes(available) {
+  return preferredMediaTypes(this.request.headers.accept, available);
+};
+
+// Backwards compatibility
+Negotiator.prototype.preferredCharset = Negotiator.prototype.charset;
+Negotiator.prototype.preferredCharsets = Negotiator.prototype.charsets;
+Negotiator.prototype.preferredEncoding = Negotiator.prototype.encoding;
+Negotiator.prototype.preferredEncodings = Negotiator.prototype.encodings;
+Negotiator.prototype.preferredLanguage = Negotiator.prototype.language;
+Negotiator.prototype.preferredLanguages = Negotiator.prototype.languages;
+Negotiator.prototype.preferredMediaType = Negotiator.prototype.mediaType;
+Negotiator.prototype.preferredMediaTypes = Negotiator.prototype.mediaTypes;
diff --git a/node_modules/pacote/node_modules/negotiator/lib/charset.js b/node_modules/pacote/node_modules/negotiator/lib/charset.js
new file mode 100644
index 0000000000000..cdd014803474a
--- /dev/null
+++ b/node_modules/pacote/node_modules/negotiator/lib/charset.js
@@ -0,0 +1,169 @@
+/**
+ * negotiator
+ * Copyright(c) 2012 Isaac Z. Schlueter
+ * Copyright(c) 2014 Federico Romero
+ * Copyright(c) 2014-2015 Douglas Christopher Wilson
+ * MIT Licensed
+ */
+
+'use strict';
+
+/**
+ * Module exports.
+ * @public
+ */
+
+module.exports = preferredCharsets;
+module.exports.preferredCharsets = preferredCharsets;
+
+/**
+ * Module variables.
+ * @private
+ */
+
+var simpleCharsetRegExp = /^\s*([^\s;]+)\s*(?:;(.*))?$/;
+
+/**
+ * Parse the Accept-Charset header.
+ * @private
+ */
+
+function parseAcceptCharset(accept) {
+  var accepts = accept.split(',');
+
+  for (var i = 0, j = 0; i < accepts.length; i++) {
+    var charset = parseCharset(accepts[i].trim(), i);
+
+    if (charset) {
+      accepts[j++] = charset;
+    }
+  }
+
+  // trim accepts
+  accepts.length = j;
+
+  return accepts;
+}
+
+/**
+ * Parse a charset from the Accept-Charset header.
+ * @private
+ */
+
+function parseCharset(str, i) {
+  var match = simpleCharsetRegExp.exec(str);
+  if (!match) return null;
+
+  var charset = match[1];
+  var q = 1;
+  if (match[2]) {
+    var params = match[2].split(';')
+    for (var j = 0; j < params.length; j++) {
+      var p = params[j].trim().split('=');
+      if (p[0] === 'q') {
+        q = parseFloat(p[1]);
+        break;
+      }
+    }
+  }
+
+  return {
+    charset: charset,
+    q: q,
+    i: i
+  };
+}
+
+/**
+ * Get the priority of a charset.
+ * @private
+ */
+
+function getCharsetPriority(charset, accepted, index) {
+  var priority = {o: -1, q: 0, s: 0};
+
+  for (var i = 0; i < accepted.length; i++) {
+    var spec = specify(charset, accepted[i], index);
+
+    if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
+      priority = spec;
+    }
+  }
+
+  return priority;
+}
+
+/**
+ * Get the specificity of the charset.
+ * @private
+ */
+
+function specify(charset, spec, index) {
+  var s = 0;
+  if(spec.charset.toLowerCase() === charset.toLowerCase()){
+    s |= 1;
+  } else if (spec.charset !== '*' ) {
+    return null
+  }
+
+  return {
+    i: index,
+    o: spec.i,
+    q: spec.q,
+    s: s
+  }
+}
+
+/**
+ * Get the preferred charsets from an Accept-Charset header.
+ * @public
+ */
+
+function preferredCharsets(accept, provided) {
+  // RFC 2616 sec 14.2: no header = *
+  var accepts = parseAcceptCharset(accept === undefined ? '*' : accept || '');
+
+  if (!provided) {
+    // sorted list of all charsets
+    return accepts
+      .filter(isQuality)
+      .sort(compareSpecs)
+      .map(getFullCharset);
+  }
+
+  var priorities = provided.map(function getPriority(type, index) {
+    return getCharsetPriority(type, accepts, index);
+  });
+
+  // sorted list of accepted charsets
+  return priorities.filter(isQuality).sort(compareSpecs).map(function getCharset(priority) {
+    return provided[priorities.indexOf(priority)];
+  });
+}
+
+/**
+ * Compare two specs.
+ * @private
+ */
+
+function compareSpecs(a, b) {
+  return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0;
+}
+
+/**
+ * Get full charset string.
+ * @private
+ */
+
+function getFullCharset(spec) {
+  return spec.charset;
+}
+
+/**
+ * Check if a spec has any quality.
+ * @private
+ */
+
+function isQuality(spec) {
+  return spec.q > 0;
+}
diff --git a/node_modules/pacote/node_modules/negotiator/lib/encoding.js b/node_modules/pacote/node_modules/negotiator/lib/encoding.js
new file mode 100644
index 0000000000000..9ebb633d67743
--- /dev/null
+++ b/node_modules/pacote/node_modules/negotiator/lib/encoding.js
@@ -0,0 +1,205 @@
+/**
+ * negotiator
+ * Copyright(c) 2012 Isaac Z. Schlueter
+ * Copyright(c) 2014 Federico Romero
+ * Copyright(c) 2014-2015 Douglas Christopher Wilson
+ * MIT Licensed
+ */
+
+'use strict';
+
+/**
+ * Module exports.
+ * @public
+ */
+
+module.exports = preferredEncodings;
+module.exports.preferredEncodings = preferredEncodings;
+
+/**
+ * Module variables.
+ * @private
+ */
+
+var simpleEncodingRegExp = /^\s*([^\s;]+)\s*(?:;(.*))?$/;
+
+/**
+ * Parse the Accept-Encoding header.
+ * @private
+ */
+
+function parseAcceptEncoding(accept) {
+  var accepts = accept.split(',');
+  var hasIdentity = false;
+  var minQuality = 1;
+
+  for (var i = 0, j = 0; i < accepts.length; i++) {
+    var encoding = parseEncoding(accepts[i].trim(), i);
+
+    if (encoding) {
+      accepts[j++] = encoding;
+      hasIdentity = hasIdentity || specify('identity', encoding);
+      minQuality = Math.min(minQuality, encoding.q || 1);
+    }
+  }
+
+  if (!hasIdentity) {
+    /*
+     * If identity doesn't explicitly appear in the accept-encoding header,
+     * it's added to the list of acceptable encoding with the lowest q
+     */
+    accepts[j++] = {
+      encoding: 'identity',
+      q: minQuality,
+      i: i
+    };
+  }
+
+  // trim accepts
+  accepts.length = j;
+
+  return accepts;
+}
+
+/**
+ * Parse an encoding from the Accept-Encoding header.
+ * @private
+ */
+
+function parseEncoding(str, i) {
+  var match = simpleEncodingRegExp.exec(str);
+  if (!match) return null;
+
+  var encoding = match[1];
+  var q = 1;
+  if (match[2]) {
+    var params = match[2].split(';');
+    for (var j = 0; j < params.length; j++) {
+      var p = params[j].trim().split('=');
+      if (p[0] === 'q') {
+        q = parseFloat(p[1]);
+        break;
+      }
+    }
+  }
+
+  return {
+    encoding: encoding,
+    q: q,
+    i: i
+  };
+}
+
+/**
+ * Get the priority of an encoding.
+ * @private
+ */
+
+function getEncodingPriority(encoding, accepted, index) {
+  var priority = {encoding: encoding, o: -1, q: 0, s: 0};
+
+  for (var i = 0; i < accepted.length; i++) {
+    var spec = specify(encoding, accepted[i], index);
+
+    if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
+      priority = spec;
+    }
+  }
+
+  return priority;
+}
+
+/**
+ * Get the specificity of the encoding.
+ * @private
+ */
+
+function specify(encoding, spec, index) {
+  var s = 0;
+  if(spec.encoding.toLowerCase() === encoding.toLowerCase()){
+    s |= 1;
+  } else if (spec.encoding !== '*' ) {
+    return null
+  }
+
+  return {
+    encoding: encoding,
+    i: index,
+    o: spec.i,
+    q: spec.q,
+    s: s
+  }
+};
+
+/**
+ * Get the preferred encodings from an Accept-Encoding header.
+ * @public
+ */
+
+function preferredEncodings(accept, provided, preferred) {
+  var accepts = parseAcceptEncoding(accept || '');
+
+  var comparator = preferred ? function comparator (a, b) {
+    if (a.q !== b.q) {
+      return b.q - a.q // higher quality first
+    }
+
+    var aPreferred = preferred.indexOf(a.encoding)
+    var bPreferred = preferred.indexOf(b.encoding)
+
+    if (aPreferred === -1 && bPreferred === -1) {
+      // consider the original specifity/order
+      return (b.s - a.s) || (a.o - b.o) || (a.i - b.i)
+    }
+
+    if (aPreferred !== -1 && bPreferred !== -1) {
+      return aPreferred - bPreferred // consider the preferred order
+    }
+
+    return aPreferred === -1 ? 1 : -1 // preferred first
+  } : compareSpecs;
+
+  if (!provided) {
+    // sorted list of all encodings
+    return accepts
+      .filter(isQuality)
+      .sort(comparator)
+      .map(getFullEncoding);
+  }
+
+  var priorities = provided.map(function getPriority(type, index) {
+    return getEncodingPriority(type, accepts, index);
+  });
+
+  // sorted list of accepted encodings
+  return priorities.filter(isQuality).sort(comparator).map(function getEncoding(priority) {
+    return provided[priorities.indexOf(priority)];
+  });
+}
+
+/**
+ * Compare two specs.
+ * @private
+ */
+
+function compareSpecs(a, b) {
+  return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i);
+}
+
+/**
+ * Get full encoding string.
+ * @private
+ */
+
+function getFullEncoding(spec) {
+  return spec.encoding;
+}
+
+/**
+ * Check if a spec has any quality.
+ * @private
+ */
+
+function isQuality(spec) {
+  return spec.q > 0;
+}
diff --git a/node_modules/pacote/node_modules/negotiator/lib/language.js b/node_modules/pacote/node_modules/negotiator/lib/language.js
new file mode 100644
index 0000000000000..a23167252719b
--- /dev/null
+++ b/node_modules/pacote/node_modules/negotiator/lib/language.js
@@ -0,0 +1,179 @@
+/**
+ * negotiator
+ * Copyright(c) 2012 Isaac Z. Schlueter
+ * Copyright(c) 2014 Federico Romero
+ * Copyright(c) 2014-2015 Douglas Christopher Wilson
+ * MIT Licensed
+ */
+
+'use strict';
+
+/**
+ * Module exports.
+ * @public
+ */
+
+module.exports = preferredLanguages;
+module.exports.preferredLanguages = preferredLanguages;
+
+/**
+ * Module variables.
+ * @private
+ */
+
+var simpleLanguageRegExp = /^\s*([^\s\-;]+)(?:-([^\s;]+))?\s*(?:;(.*))?$/;
+
+/**
+ * Parse the Accept-Language header.
+ * @private
+ */
+
+function parseAcceptLanguage(accept) {
+  var accepts = accept.split(',');
+
+  for (var i = 0, j = 0; i < accepts.length; i++) {
+    var language = parseLanguage(accepts[i].trim(), i);
+
+    if (language) {
+      accepts[j++] = language;
+    }
+  }
+
+  // trim accepts
+  accepts.length = j;
+
+  return accepts;
+}
+
+/**
+ * Parse a language from the Accept-Language header.
+ * @private
+ */
+
+function parseLanguage(str, i) {
+  var match = simpleLanguageRegExp.exec(str);
+  if (!match) return null;
+
+  var prefix = match[1]
+  var suffix = match[2]
+  var full = prefix
+
+  if (suffix) full += "-" + suffix;
+
+  var q = 1;
+  if (match[3]) {
+    var params = match[3].split(';')
+    for (var j = 0; j < params.length; j++) {
+      var p = params[j].split('=');
+      if (p[0] === 'q') q = parseFloat(p[1]);
+    }
+  }
+
+  return {
+    prefix: prefix,
+    suffix: suffix,
+    q: q,
+    i: i,
+    full: full
+  };
+}
+
+/**
+ * Get the priority of a language.
+ * @private
+ */
+
+function getLanguagePriority(language, accepted, index) {
+  var priority = {o: -1, q: 0, s: 0};
+
+  for (var i = 0; i < accepted.length; i++) {
+    var spec = specify(language, accepted[i], index);
+
+    if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
+      priority = spec;
+    }
+  }
+
+  return priority;
+}
+
+/**
+ * Get the specificity of the language.
+ * @private
+ */
+
+function specify(language, spec, index) {
+  var p = parseLanguage(language)
+  if (!p) return null;
+  var s = 0;
+  if(spec.full.toLowerCase() === p.full.toLowerCase()){
+    s |= 4;
+  } else if (spec.prefix.toLowerCase() === p.full.toLowerCase()) {
+    s |= 2;
+  } else if (spec.full.toLowerCase() === p.prefix.toLowerCase()) {
+    s |= 1;
+  } else if (spec.full !== '*' ) {
+    return null
+  }
+
+  return {
+    i: index,
+    o: spec.i,
+    q: spec.q,
+    s: s
+  }
+};
+
+/**
+ * Get the preferred languages from an Accept-Language header.
+ * @public
+ */
+
+function preferredLanguages(accept, provided) {
+  // RFC 2616 sec 14.4: no header = *
+  var accepts = parseAcceptLanguage(accept === undefined ? '*' : accept || '');
+
+  if (!provided) {
+    // sorted list of all languages
+    return accepts
+      .filter(isQuality)
+      .sort(compareSpecs)
+      .map(getFullLanguage);
+  }
+
+  var priorities = provided.map(function getPriority(type, index) {
+    return getLanguagePriority(type, accepts, index);
+  });
+
+  // sorted list of accepted languages
+  return priorities.filter(isQuality).sort(compareSpecs).map(function getLanguage(priority) {
+    return provided[priorities.indexOf(priority)];
+  });
+}
+
+/**
+ * Compare two specs.
+ * @private
+ */
+
+function compareSpecs(a, b) {
+  return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0;
+}
+
+/**
+ * Get full language string.
+ * @private
+ */
+
+function getFullLanguage(spec) {
+  return spec.full;
+}
+
+/**
+ * Check if a spec has any quality.
+ * @private
+ */
+
+function isQuality(spec) {
+  return spec.q > 0;
+}
diff --git a/node_modules/pacote/node_modules/negotiator/lib/mediaType.js b/node_modules/pacote/node_modules/negotiator/lib/mediaType.js
new file mode 100644
index 0000000000000..8e402ea88394c
--- /dev/null
+++ b/node_modules/pacote/node_modules/negotiator/lib/mediaType.js
@@ -0,0 +1,294 @@
+/**
+ * negotiator
+ * Copyright(c) 2012 Isaac Z. Schlueter
+ * Copyright(c) 2014 Federico Romero
+ * Copyright(c) 2014-2015 Douglas Christopher Wilson
+ * MIT Licensed
+ */
+
+'use strict';
+
+/**
+ * Module exports.
+ * @public
+ */
+
+module.exports = preferredMediaTypes;
+module.exports.preferredMediaTypes = preferredMediaTypes;
+
+/**
+ * Module variables.
+ * @private
+ */
+
+var simpleMediaTypeRegExp = /^\s*([^\s\/;]+)\/([^;\s]+)\s*(?:;(.*))?$/;
+
+/**
+ * Parse the Accept header.
+ * @private
+ */
+
+function parseAccept(accept) {
+  var accepts = splitMediaTypes(accept);
+
+  for (var i = 0, j = 0; i < accepts.length; i++) {
+    var mediaType = parseMediaType(accepts[i].trim(), i);
+
+    if (mediaType) {
+      accepts[j++] = mediaType;
+    }
+  }
+
+  // trim accepts
+  accepts.length = j;
+
+  return accepts;
+}
+
+/**
+ * Parse a media type from the Accept header.
+ * @private
+ */
+
+function parseMediaType(str, i) {
+  var match = simpleMediaTypeRegExp.exec(str);
+  if (!match) return null;
+
+  var params = Object.create(null);
+  var q = 1;
+  var subtype = match[2];
+  var type = match[1];
+
+  if (match[3]) {
+    var kvps = splitParameters(match[3]).map(splitKeyValuePair);
+
+    for (var j = 0; j < kvps.length; j++) {
+      var pair = kvps[j];
+      var key = pair[0].toLowerCase();
+      var val = pair[1];
+
+      // get the value, unwrapping quotes
+      var value = val && val[0] === '"' && val[val.length - 1] === '"'
+        ? val.slice(1, -1)
+        : val;
+
+      if (key === 'q') {
+        q = parseFloat(value);
+        break;
+      }
+
+      // store parameter
+      params[key] = value;
+    }
+  }
+
+  return {
+    type: type,
+    subtype: subtype,
+    params: params,
+    q: q,
+    i: i
+  };
+}
+
+/**
+ * Get the priority of a media type.
+ * @private
+ */
+
+function getMediaTypePriority(type, accepted, index) {
+  var priority = {o: -1, q: 0, s: 0};
+
+  for (var i = 0; i < accepted.length; i++) {
+    var spec = specify(type, accepted[i], index);
+
+    if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
+      priority = spec;
+    }
+  }
+
+  return priority;
+}
+
+/**
+ * Get the specificity of the media type.
+ * @private
+ */
+
+function specify(type, spec, index) {
+  var p = parseMediaType(type);
+  var s = 0;
+
+  if (!p) {
+    return null;
+  }
+
+  if(spec.type.toLowerCase() == p.type.toLowerCase()) {
+    s |= 4
+  } else if(spec.type != '*') {
+    return null;
+  }
+
+  if(spec.subtype.toLowerCase() == p.subtype.toLowerCase()) {
+    s |= 2
+  } else if(spec.subtype != '*') {
+    return null;
+  }
+
+  var keys = Object.keys(spec.params);
+  if (keys.length > 0) {
+    if (keys.every(function (k) {
+      return spec.params[k] == '*' || (spec.params[k] || '').toLowerCase() == (p.params[k] || '').toLowerCase();
+    })) {
+      s |= 1
+    } else {
+      return null
+    }
+  }
+
+  return {
+    i: index,
+    o: spec.i,
+    q: spec.q,
+    s: s,
+  }
+}
+
+/**
+ * Get the preferred media types from an Accept header.
+ * @public
+ */
+
+function preferredMediaTypes(accept, provided) {
+  // RFC 2616 sec 14.2: no header = */*
+  var accepts = parseAccept(accept === undefined ? '*/*' : accept || '');
+
+  if (!provided) {
+    // sorted list of all types
+    return accepts
+      .filter(isQuality)
+      .sort(compareSpecs)
+      .map(getFullType);
+  }
+
+  var priorities = provided.map(function getPriority(type, index) {
+    return getMediaTypePriority(type, accepts, index);
+  });
+
+  // sorted list of accepted types
+  return priorities.filter(isQuality).sort(compareSpecs).map(function getType(priority) {
+    return provided[priorities.indexOf(priority)];
+  });
+}
+
+/**
+ * Compare two specs.
+ * @private
+ */
+
+function compareSpecs(a, b) {
+  return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0;
+}
+
+/**
+ * Get full type string.
+ * @private
+ */
+
+function getFullType(spec) {
+  return spec.type + '/' + spec.subtype;
+}
+
+/**
+ * Check if a spec has any quality.
+ * @private
+ */
+
+function isQuality(spec) {
+  return spec.q > 0;
+}
+
+/**
+ * Count the number of quotes in a string.
+ * @private
+ */
+
+function quoteCount(string) {
+  var count = 0;
+  var index = 0;
+
+  while ((index = string.indexOf('"', index)) !== -1) {
+    count++;
+    index++;
+  }
+
+  return count;
+}
+
+/**
+ * Split a key value pair.
+ * @private
+ */
+
+function splitKeyValuePair(str) {
+  var index = str.indexOf('=');
+  var key;
+  var val;
+
+  if (index === -1) {
+    key = str;
+  } else {
+    key = str.slice(0, index);
+    val = str.slice(index + 1);
+  }
+
+  return [key, val];
+}
+
+/**
+ * Split an Accept header into media types.
+ * @private
+ */
+
+function splitMediaTypes(accept) {
+  var accepts = accept.split(',');
+
+  for (var i = 1, j = 0; i < accepts.length; i++) {
+    if (quoteCount(accepts[j]) % 2 == 0) {
+      accepts[++j] = accepts[i];
+    } else {
+      accepts[j] += ',' + accepts[i];
+    }
+  }
+
+  // trim accepts
+  accepts.length = j + 1;
+
+  return accepts;
+}
+
+/**
+ * Split a string of parameters.
+ * @private
+ */
+
+function splitParameters(str) {
+  var parameters = str.split(';');
+
+  for (var i = 1, j = 0; i < parameters.length; i++) {
+    if (quoteCount(parameters[j]) % 2 == 0) {
+      parameters[++j] = parameters[i];
+    } else {
+      parameters[j] += ';' + parameters[i];
+    }
+  }
+
+  // trim parameters
+  parameters.length = j + 1;
+
+  for (var i = 0; i < parameters.length; i++) {
+    parameters[i] = parameters[i].trim();
+  }
+
+  return parameters;
+}
diff --git a/node_modules/pacote/node_modules/negotiator/package.json b/node_modules/pacote/node_modules/negotiator/package.json
new file mode 100644
index 0000000000000..e4bdc1ef4f748
--- /dev/null
+++ b/node_modules/pacote/node_modules/negotiator/package.json
@@ -0,0 +1,43 @@
+{
+  "name": "negotiator",
+  "description": "HTTP content negotiation",
+  "version": "1.0.0",
+  "contributors": [
+    "Douglas Christopher Wilson ",
+    "Federico Romero ",
+    "Isaac Z. Schlueter  (http://blog.izs.me/)"
+  ],
+  "license": "MIT",
+  "keywords": [
+    "http",
+    "content negotiation",
+    "accept",
+    "accept-language",
+    "accept-encoding",
+    "accept-charset"
+  ],
+  "repository": "jshttp/negotiator",
+  "devDependencies": {
+    "eslint": "7.32.0",
+    "eslint-plugin-markdown": "2.2.1",
+    "mocha": "9.1.3",
+    "nyc": "15.1.0"
+  },
+  "files": [
+    "lib/",
+    "HISTORY.md",
+    "LICENSE",
+    "index.js",
+    "README.md"
+  ],
+  "engines": {
+    "node": ">= 0.6"
+  },
+  "scripts": {
+    "lint": "eslint .",
+    "test": "mocha --reporter spec --check-leaks --bail test/",
+    "test:debug": "mocha --reporter spec --check-leaks --inspect --inspect-brk test/",
+    "test-ci": "nyc --reporter=lcov --reporter=text npm test",
+    "test-cov": "nyc --reporter=html --reporter=text npm test"
+  }
+}
diff --git a/node_modules/pacote/node_modules/npm-package-arg/LICENSE b/node_modules/pacote/node_modules/npm-package-arg/LICENSE
new file mode 100644
index 0000000000000..19cec97b18468
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-package-arg/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/npm-package-arg/lib/npa.js b/node_modules/pacote/node_modules/npm-package-arg/lib/npa.js
new file mode 100644
index 0000000000000..d409b7f1becfc
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-package-arg/lib/npa.js
@@ -0,0 +1,481 @@
+'use strict'
+
+const isWindows = process.platform === 'win32'
+
+const { URL } = require('node:url')
+// We need to use path/win32 so that we get consistent results in tests, but this also means we need to manually convert backslashes to forward slashes when generating file: urls with paths.
+const path = isWindows ? require('node:path/win32') : require('node:path')
+const { homedir } = require('node:os')
+const HostedGit = require('hosted-git-info')
+const semver = require('semver')
+const validatePackageName = require('validate-npm-package-name')
+const { log } = require('proc-log')
+
+const hasSlashes = isWindows ? /\\|[/]/ : /[/]/
+const isURL = /^(?:git[+])?[a-z]+:/i
+const isGit = /^[^@]+@[^:.]+\.[^:]+:.+$/i
+const isFileType = /[.](?:tgz|tar.gz|tar)$/i
+const isPortNumber = /:[0-9]+(\/|$)/i
+const isWindowsFile = /^(?:[.]|~[/]|[/\\]|[a-zA-Z]:)/
+const isPosixFile = /^(?:[.]|~[/]|[/]|[a-zA-Z]:)/
+const defaultRegistry = 'https://registry.npmjs.org'
+
+function npa (arg, where) {
+  let name
+  let spec
+  if (typeof arg === 'object') {
+    if (arg instanceof Result && (!where || where === arg.where)) {
+      return arg
+    } else if (arg.name && arg.rawSpec) {
+      return npa.resolve(arg.name, arg.rawSpec, where || arg.where)
+    } else {
+      return npa(arg.raw, where || arg.where)
+    }
+  }
+  const nameEndsAt = arg.indexOf('@', 1) // Skip possible leading @
+  const namePart = nameEndsAt > 0 ? arg.slice(0, nameEndsAt) : arg
+  if (isURL.test(arg)) {
+    spec = arg
+  } else if (isGit.test(arg)) {
+    spec = `git+ssh://${arg}`
+  // eslint-disable-next-line max-len
+  } else if (!namePart.startsWith('@') && (hasSlashes.test(namePart) || isFileType.test(namePart))) {
+    spec = arg
+  } else if (nameEndsAt > 0) {
+    name = namePart
+    spec = arg.slice(nameEndsAt + 1) || '*'
+  } else {
+    const valid = validatePackageName(arg)
+    if (valid.validForOldPackages) {
+      name = arg
+      spec = '*'
+    } else {
+      spec = arg
+    }
+  }
+  return resolve(name, spec, where, arg)
+}
+
+function isFileSpec (spec) {
+  if (!spec) {
+    return false
+  }
+  if (spec.toLowerCase().startsWith('file:')) {
+    return true
+  }
+  if (isWindows) {
+    return isWindowsFile.test(spec)
+  }
+  // We never hit this in windows tests, obviously
+  /* istanbul ignore next */
+  return isPosixFile.test(spec)
+}
+
+function isAliasSpec (spec) {
+  if (!spec) {
+    return false
+  }
+  return spec.toLowerCase().startsWith('npm:')
+}
+
+function resolve (name, spec, where, arg) {
+  const res = new Result({
+    raw: arg,
+    name: name,
+    rawSpec: spec,
+    fromArgument: arg != null,
+  })
+
+  if (name) {
+    res.name = name
+  }
+
+  if (!where) {
+    where = process.cwd()
+  }
+
+  if (isFileSpec(spec)) {
+    return fromFile(res, where)
+  } else if (isAliasSpec(spec)) {
+    return fromAlias(res, where)
+  }
+
+  const hosted = HostedGit.fromUrl(spec, {
+    noGitPlus: true,
+    noCommittish: true,
+  })
+  if (hosted) {
+    return fromHostedGit(res, hosted)
+  } else if (spec && isURL.test(spec)) {
+    return fromURL(res)
+  } else if (spec && (hasSlashes.test(spec) || isFileType.test(spec))) {
+    return fromFile(res, where)
+  } else {
+    return fromRegistry(res)
+  }
+}
+
+function toPurl (arg, reg = defaultRegistry) {
+  const res = npa(arg)
+
+  if (res.type !== 'version') {
+    throw invalidPurlType(res.type, res.raw)
+  }
+
+  // URI-encode leading @ of scoped packages
+  let purl = 'pkg:npm/' + res.name.replace(/^@/, '%40') + '@' + res.rawSpec
+  if (reg !== defaultRegistry) {
+    purl += '?repository_url=' + reg
+  }
+
+  return purl
+}
+
+function invalidPackageName (name, valid, raw) {
+  // eslint-disable-next-line max-len
+  const err = new Error(`Invalid package name "${name}" of package "${raw}": ${valid.errors.join('; ')}.`)
+  err.code = 'EINVALIDPACKAGENAME'
+  return err
+}
+
+function invalidTagName (name, raw) {
+  // eslint-disable-next-line max-len
+  const err = new Error(`Invalid tag name "${name}" of package "${raw}": Tags may not have any characters that encodeURIComponent encodes.`)
+  err.code = 'EINVALIDTAGNAME'
+  return err
+}
+
+function invalidPurlType (type, raw) {
+  // eslint-disable-next-line max-len
+  const err = new Error(`Invalid type "${type}" of package "${raw}": Purl can only be generated for "version" types.`)
+  err.code = 'EINVALIDPURLTYPE'
+  return err
+}
+
+class Result {
+  constructor (opts) {
+    this.type = opts.type
+    this.registry = opts.registry
+    this.where = opts.where
+    if (opts.raw == null) {
+      this.raw = opts.name ? `${opts.name}@${opts.rawSpec}` : opts.rawSpec
+    } else {
+      this.raw = opts.raw
+    }
+    this.name = undefined
+    this.escapedName = undefined
+    this.scope = undefined
+    this.rawSpec = opts.rawSpec || ''
+    this.saveSpec = opts.saveSpec
+    this.fetchSpec = opts.fetchSpec
+    if (opts.name) {
+      this.setName(opts.name)
+    }
+    this.gitRange = opts.gitRange
+    this.gitCommittish = opts.gitCommittish
+    this.gitSubdir = opts.gitSubdir
+    this.hosted = opts.hosted
+  }
+
+  // TODO move this to a getter/setter in a semver major
+  setName (name) {
+    const valid = validatePackageName(name)
+    if (!valid.validForOldPackages) {
+      throw invalidPackageName(name, valid, this.raw)
+    }
+
+    this.name = name
+    this.scope = name[0] === '@' ? name.slice(0, name.indexOf('/')) : undefined
+    // scoped packages in couch must have slash url-encoded, e.g. @foo%2Fbar
+    this.escapedName = name.replace('/', '%2f')
+    return this
+  }
+
+  toString () {
+    const full = []
+    if (this.name != null && this.name !== '') {
+      full.push(this.name)
+    }
+    const spec = this.saveSpec || this.fetchSpec || this.rawSpec
+    if (spec != null && spec !== '') {
+      full.push(spec)
+    }
+    return full.length ? full.join('@') : this.raw
+  }
+
+  toJSON () {
+    const result = Object.assign({}, this)
+    delete result.hosted
+    return result
+  }
+}
+
+// sets res.gitCommittish, res.gitRange, and res.gitSubdir
+function setGitAttrs (res, committish) {
+  if (!committish) {
+    res.gitCommittish = null
+    return
+  }
+
+  // for each :: separated item:
+  for (const part of committish.split('::')) {
+    // if the item has no : the n it is a commit-ish
+    if (!part.includes(':')) {
+      if (res.gitRange) {
+        throw new Error('cannot override existing semver range with a committish')
+      }
+      if (res.gitCommittish) {
+        throw new Error('cannot override existing committish with a second committish')
+      }
+      res.gitCommittish = part
+      continue
+    }
+    // split on name:value
+    const [name, value] = part.split(':')
+    // if name is semver do semver lookup of ref or tag
+    if (name === 'semver') {
+      if (res.gitCommittish) {
+        throw new Error('cannot override existing committish with a semver range')
+      }
+      if (res.gitRange) {
+        throw new Error('cannot override existing semver range with a second semver range')
+      }
+      res.gitRange = decodeURIComponent(value)
+      continue
+    }
+    if (name === 'path') {
+      if (res.gitSubdir) {
+        throw new Error('cannot override existing path with a second path')
+      }
+      res.gitSubdir = `/${value}`
+      continue
+    }
+    log.warn('npm-package-arg', `ignoring unknown key "${name}"`)
+  }
+}
+
+// Taken from: EncodePathChars and lookup_table in src/node_url.cc
+// url.pathToFileURL only returns absolute references.  We can't use it to encode paths.
+// encodeURI mangles windows paths. We can't use it to encode paths.
+// Under the hood, url.pathToFileURL does a limited set of encoding, with an extra windows step, and then calls path.resolve.
+// The encoding node does without path.resolve is not available outside of the source, so we are recreating it here.
+const encodedPathChars = new Map([
+  ['\0', '%00'],
+  ['\t', '%09'],
+  ['\n', '%0A'],
+  ['\r', '%0D'],
+  [' ', '%20'],
+  ['"', '%22'],
+  ['#', '%23'],
+  ['%', '%25'],
+  ['?', '%3F'],
+  ['[', '%5B'],
+  ['\\', isWindows ? '/' : '%5C'],
+  [']', '%5D'],
+  ['^', '%5E'],
+  ['|', '%7C'],
+  ['~', '%7E'],
+])
+
+function pathToFileURL (str) {
+  let result = ''
+  for (let i = 0; i < str.length; i++) {
+    result = `${result}${encodedPathChars.get(str[i]) ?? str[i]}`
+  }
+  if (result.startsWith('file:')) {
+    return result
+  }
+  return `file:${result}`
+}
+
+function fromFile (res, where) {
+  res.type = isFileType.test(res.rawSpec) ? 'file' : 'directory'
+  res.where = where
+
+  let rawSpec = pathToFileURL(res.rawSpec)
+
+  if (rawSpec.startsWith('file:/')) {
+    // XXX backwards compatibility lack of compliance with RFC 8089
+
+    // turn file://path into file:/path
+    if (/^file:\/\/[^/]/.test(rawSpec)) {
+      rawSpec = `file:/${rawSpec.slice(5)}`
+    }
+
+    // turn file:/../path into file:../path
+    // for 1 or 3 leading slashes (2 is already ruled out from handling file:// explicitly above)
+    if (/^\/{1,3}\.\.?(\/|$)/.test(rawSpec.slice(5))) {
+      rawSpec = rawSpec.replace(/^file:\/{1,3}/, 'file:')
+    }
+  }
+
+  let resolvedUrl
+  let specUrl
+  try {
+    // always put the '/' on "where", or else file:foo from /path/to/bar goes to /path/to/foo, when we want it to be /path/to/bar/foo
+    resolvedUrl = new URL(rawSpec, `${pathToFileURL(path.resolve(where))}/`)
+    specUrl = new URL(rawSpec)
+  } catch (originalError) {
+    const er = new Error('Invalid file: URL, must comply with RFC 8089')
+    throw Object.assign(er, {
+      raw: res.rawSpec,
+      spec: res,
+      where,
+      originalError,
+    })
+  }
+
+  // turn /C:/blah into just C:/blah on windows
+  let specPath = decodeURIComponent(specUrl.pathname)
+  let resolvedPath = decodeURIComponent(resolvedUrl.pathname)
+  if (isWindows) {
+    specPath = specPath.replace(/^\/+([a-z]:\/)/i, '$1')
+    resolvedPath = resolvedPath.replace(/^\/+([a-z]:\/)/i, '$1')
+  }
+
+  // replace ~ with homedir, but keep the ~ in the saveSpec
+  // otherwise, make it relative to where param
+  if (/^\/~(\/|$)/.test(specPath)) {
+    res.saveSpec = `file:${specPath.substr(1)}`
+    resolvedPath = path.resolve(homedir(), specPath.substr(3))
+  } else if (!path.isAbsolute(rawSpec.slice(5))) {
+    res.saveSpec = `file:${path.relative(where, resolvedPath)}`
+  } else {
+    res.saveSpec = `file:${path.resolve(resolvedPath)}`
+  }
+
+  res.fetchSpec = path.resolve(where, resolvedPath)
+  // re-normalize the slashes in saveSpec due to node:path/win32 behavior in windows
+  res.saveSpec = res.saveSpec.split('\\').join('/')
+  // Ignoring because this only happens in windows
+  /* istanbul ignore next */
+  if (res.saveSpec.startsWith('file://')) {
+    // normalization of \\win32\root paths can cause a double / which we don't want
+    res.saveSpec = `file:/${res.saveSpec.slice(7)}`
+  }
+  return res
+}
+
+function fromHostedGit (res, hosted) {
+  res.type = 'git'
+  res.hosted = hosted
+  res.saveSpec = hosted.toString({ noGitPlus: false, noCommittish: false })
+  res.fetchSpec = hosted.getDefaultRepresentation() === 'shortcut' ? null : hosted.toString()
+  setGitAttrs(res, hosted.committish)
+  return res
+}
+
+function unsupportedURLType (protocol, spec) {
+  const err = new Error(`Unsupported URL Type "${protocol}": ${spec}`)
+  err.code = 'EUNSUPPORTEDPROTOCOL'
+  return err
+}
+
+function fromURL (res) {
+  let rawSpec = res.rawSpec
+  res.saveSpec = rawSpec
+  if (rawSpec.startsWith('git+ssh:')) {
+    // git ssh specifiers are overloaded to also use scp-style git
+    // specifiers, so we have to parse those out and treat them special.
+    // They are NOT true URIs, so we can't hand them to URL.
+
+    // This regex looks for things that look like:
+    // git+ssh://git@my.custom.git.com:username/project.git#deadbeef
+    // ...and various combinations. The username in the beginning is *required*.
+    const matched = rawSpec.match(/^git\+ssh:\/\/([^:#]+:[^#]+(?:\.git)?)(?:#(.*))?$/i)
+    // Filter out all-number "usernames" which are really port numbers
+    // They can either be :1234 :1234/ or :1234/path but not :12abc
+    if (matched && !matched[1].match(isPortNumber)) {
+      res.type = 'git'
+      setGitAttrs(res, matched[2])
+      res.fetchSpec = matched[1]
+      return res
+    }
+  } else if (rawSpec.startsWith('git+file://')) {
+    // URL can't handle windows paths
+    rawSpec = rawSpec.replace(/\\/g, '/')
+  }
+  const parsedUrl = new URL(rawSpec)
+  // check the protocol, and then see if it's git or not
+  switch (parsedUrl.protocol) {
+    case 'git:':
+    case 'git+http:':
+    case 'git+https:':
+    case 'git+rsync:':
+    case 'git+ftp:':
+    case 'git+file:':
+    case 'git+ssh:':
+      res.type = 'git'
+      setGitAttrs(res, parsedUrl.hash.slice(1))
+      if (parsedUrl.protocol === 'git+file:' && /^git\+file:\/\/[a-z]:/i.test(rawSpec)) {
+        // URL can't handle drive letters on windows file paths, the host can't contain a :
+        res.fetchSpec = `git+file://${parsedUrl.host.toLowerCase()}:${parsedUrl.pathname}`
+      } else {
+        parsedUrl.hash = ''
+        res.fetchSpec = parsedUrl.toString()
+      }
+      if (res.fetchSpec.startsWith('git+')) {
+        res.fetchSpec = res.fetchSpec.slice(4)
+      }
+      break
+    case 'http:':
+    case 'https:':
+      res.type = 'remote'
+      res.fetchSpec = res.saveSpec
+      break
+
+    default:
+      throw unsupportedURLType(parsedUrl.protocol, rawSpec)
+  }
+
+  return res
+}
+
+function fromAlias (res, where) {
+  const subSpec = npa(res.rawSpec.substr(4), where)
+  if (subSpec.type === 'alias') {
+    throw new Error('nested aliases not supported')
+  }
+
+  if (!subSpec.registry) {
+    throw new Error('aliases only work for registry deps')
+  }
+
+  if (!subSpec.name) {
+    throw new Error('aliases must have a name')
+  }
+
+  res.subSpec = subSpec
+  res.registry = true
+  res.type = 'alias'
+  res.saveSpec = null
+  res.fetchSpec = null
+  return res
+}
+
+function fromRegistry (res) {
+  res.registry = true
+  const spec = res.rawSpec.trim()
+  // no save spec for registry components as we save based on the fetched
+  // version, not on the argument so this can't compute that.
+  res.saveSpec = null
+  res.fetchSpec = spec
+  const version = semver.valid(spec, true)
+  const range = semver.validRange(spec, true)
+  if (version) {
+    res.type = 'version'
+  } else if (range) {
+    res.type = 'range'
+  } else {
+    if (encodeURIComponent(spec) !== spec) {
+      throw invalidTagName(spec, res.raw)
+    }
+    res.type = 'tag'
+  }
+  return res
+}
+
+module.exports = npa
+module.exports.resolve = resolve
+module.exports.toPurl = toPurl
+module.exports.Result = Result
diff --git a/node_modules/pacote/node_modules/@npmcli/package-json/package.json b/node_modules/pacote/node_modules/npm-package-arg/package.json
similarity index 54%
rename from node_modules/pacote/node_modules/@npmcli/package-json/package.json
rename to node_modules/pacote/node_modules/npm-package-arg/package.json
index 263d67ff3bc5b..db6ce9074cfa2 100644
--- a/node_modules/pacote/node_modules/@npmcli/package-json/package.json
+++ b/node_modules/pacote/node_modules/npm-package-arg/package.json
@@ -1,25 +1,30 @@
 {
-  "name": "@npmcli/package-json",
-  "version": "6.2.0",
-  "description": "Programmatic API to update package.json",
-  "keywords": [
-    "npm",
-    "oss"
-  ],
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/npm/package-json.git"
+  "name": "npm-package-arg",
+  "version": "13.0.0",
+  "description": "Parse the things that can be arguments to `npm install`",
+  "main": "./lib/npa.js",
+  "directories": {
+    "test": "test"
   },
-  "license": "ISC",
-  "author": "GitHub Inc.",
-  "main": "lib/index.js",
   "files": [
     "bin/",
     "lib/"
   ],
+  "dependencies": {
+    "hosted-git-info": "^9.0.0",
+    "proc-log": "^5.0.0",
+    "semver": "^7.3.5",
+    "validate-npm-package-name": "^6.0.0"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^5.0.0",
+    "@npmcli/template-oss": "4.23.5",
+    "tap": "^16.0.1"
+  },
   "scripts": {
-    "snap": "tap",
     "test": "tap",
+    "snap": "tap",
+    "npmclilint": "npmcli-lint",
     "lint": "npm run eslint",
     "lintfix": "npm run eslint -- --fix",
     "posttest": "npm run lint",
@@ -28,34 +33,29 @@
     "template-oss-apply": "template-oss-apply --force",
     "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
   },
-  "dependencies": {
-    "@npmcli/git": "^6.0.0",
-    "glob": "^10.2.2",
-    "hosted-git-info": "^8.0.0",
-    "json-parse-even-better-errors": "^4.0.0",
-    "proc-log": "^5.0.0",
-    "semver": "^7.5.3",
-    "validate-npm-package-license": "^3.0.4"
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/npm/npm-package-arg.git"
   },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^5.1.0",
-    "@npmcli/template-oss": "4.23.6",
-    "read-package-json": "^7.0.0",
-    "read-package-json-fast": "^4.0.0",
-    "tap": "^16.0.1"
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "bugs": {
+    "url": "https://github.com/npm/npm-package-arg/issues"
   },
+  "homepage": "https://github.com/npm/npm-package-arg",
   "engines": {
-    "node": "^18.17.0 || >=20.5.0"
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.23.6",
-    "publish": "true"
+    "node": "^20.17.0 || >=22.9.0"
   },
   "tap": {
+    "branches": 97,
     "nyc-arg": [
       "--exclude",
       "tap-snapshots/**"
     ]
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.23.5",
+    "publish": true
   }
 }
diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/LICENSE.md b/node_modules/pacote/node_modules/npm-pick-manifest/LICENSE.md
new file mode 100644
index 0000000000000..8d28acf866d93
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-pick-manifest/LICENSE.md
@@ -0,0 +1,16 @@
+ISC License
+
+Copyright (c) npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this software for
+any purpose with or without fee is hereby granted, provided that the
+above copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
+ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
+COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
+CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
+USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/lib/index.js b/node_modules/pacote/node_modules/npm-pick-manifest/lib/index.js
new file mode 100644
index 0000000000000..985c78df7a9bf
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-pick-manifest/lib/index.js
@@ -0,0 +1,219 @@
+'use strict'
+
+const npa = require('npm-package-arg')
+const semver = require('semver')
+const { checkEngine } = require('npm-install-checks')
+const normalizeBin = require('npm-normalize-package-bin')
+
+const engineOk = (manifest, npmVersion, nodeVersion) => {
+  try {
+    checkEngine(manifest, npmVersion, nodeVersion)
+    return true
+  } catch (_) {
+    return false
+  }
+}
+
+const isBefore = (verTimes, ver, time) =>
+  !verTimes || !verTimes[ver] || Date.parse(verTimes[ver]) <= time
+
+const avoidSemverOpt = { includePrerelease: true, loose: true }
+const shouldAvoid = (ver, avoid) =>
+  avoid && semver.satisfies(ver, avoid, avoidSemverOpt)
+
+const decorateAvoid = (result, avoid) =>
+  result && shouldAvoid(result.version, avoid)
+    ? { ...result, _shouldAvoid: true }
+    : result
+
+const pickManifest = (packument, wanted, opts) => {
+  const {
+    defaultTag = 'latest',
+    before = null,
+    nodeVersion = process.version,
+    npmVersion = null,
+    includeStaged = false,
+    avoid = null,
+    avoidStrict = false,
+  } = opts
+
+  const { name, time: verTimes } = packument
+  const versions = packument.versions || {}
+
+  if (avoidStrict) {
+    const looseOpts = {
+      ...opts,
+      avoidStrict: false,
+    }
+
+    const result = pickManifest(packument, wanted, looseOpts)
+    if (!result || !result._shouldAvoid) {
+      return result
+    }
+
+    const caret = pickManifest(packument, `^${result.version}`, looseOpts)
+    if (!caret || !caret._shouldAvoid) {
+      return {
+        ...caret,
+        _outsideDependencyRange: true,
+        _isSemVerMajor: false,
+      }
+    }
+
+    const star = pickManifest(packument, '*', looseOpts)
+    if (!star || !star._shouldAvoid) {
+      return {
+        ...star,
+        _outsideDependencyRange: true,
+        _isSemVerMajor: true,
+      }
+    }
+
+    throw Object.assign(new Error(`No avoidable versions for ${name}`), {
+      code: 'ETARGET',
+      name,
+      wanted,
+      avoid,
+      before,
+      versions: Object.keys(versions),
+    })
+  }
+
+  const staged = (includeStaged && packument.stagedVersions &&
+    packument.stagedVersions.versions) || {}
+  const restricted = (packument.policyRestrictions &&
+    packument.policyRestrictions.versions) || {}
+
+  const time = before && verTimes ? +(new Date(before)) : Infinity
+  const spec = npa.resolve(name, wanted || defaultTag)
+  const type = spec.type
+  const distTags = packument['dist-tags'] || {}
+
+  if (type !== 'tag' && type !== 'version' && type !== 'range') {
+    throw new Error('Only tag, version, and range are supported')
+  }
+
+  // if the type is 'tag', and not just the implicit default, then it must be that exactly, or nothing else will do.
+  if (wanted && type === 'tag') {
+    const ver = distTags[wanted]
+    // if the version in the dist-tags is before the before date, then we use that. Otherwise, we get the highest precedence version prior to the dist-tag.
+    if (isBefore(verTimes, ver, time)) {
+      return decorateAvoid(versions[ver] || staged[ver] || restricted[ver], avoid)
+    } else {
+      return pickManifest(packument, `<=${ver}`, opts)
+    }
+  }
+
+  // similarly, if a specific version, then only that version will do
+  if (wanted && type === 'version') {
+    const ver = semver.clean(wanted, { loose: true })
+    const mani = versions[ver] || staged[ver] || restricted[ver]
+    return isBefore(verTimes, ver, time) ? decorateAvoid(mani, avoid) : null
+  }
+
+  // ok, sort based on our heuristics, and pick the best fit
+  const range = type === 'range' ? wanted : '*'
+
+  // if the range is *, then we prefer the 'latest' if available but skip this if it should be avoided, in that case we have to try a little harder.
+  const defaultVer = distTags[defaultTag]
+  if (defaultVer &&
+      (range === '*' || semver.satisfies(defaultVer, range, { loose: true })) &&
+      !restricted[defaultVer] &&
+      !shouldAvoid(defaultVer, avoid)) {
+    const mani = versions[defaultVer]
+    const ok = mani &&
+      isBefore(verTimes, defaultVer, time) &&
+      engineOk(mani, npmVersion, nodeVersion) &&
+      !mani.deprecated &&
+      !staged[defaultVer]
+    if (ok) {
+      return mani
+    }
+  }
+
+  // ok, actually have to sort the list and take the winner
+  const allEntries = Object.entries(versions)
+    .concat(Object.entries(staged))
+    .concat(Object.entries(restricted))
+    .filter(([ver]) => isBefore(verTimes, ver, time))
+
+  if (!allEntries.length) {
+    throw Object.assign(new Error(`No versions available for ${name}`), {
+      code: 'ENOVERSIONS',
+      name,
+      type,
+      wanted,
+      before,
+      versions: Object.keys(versions),
+    })
+  }
+
+  const sortSemverOpt = { loose: true }
+  const entries = allEntries.filter(([ver]) =>
+    semver.satisfies(ver, range, { loose: true }))
+    .sort((a, b) => {
+      const [vera, mania] = a
+      const [verb, manib] = b
+      const notavoida = !shouldAvoid(vera, avoid)
+      const notavoidb = !shouldAvoid(verb, avoid)
+      const notrestra = !restricted[vera]
+      const notrestrb = !restricted[verb]
+      const notstagea = !staged[vera]
+      const notstageb = !staged[verb]
+      const notdepra = !mania.deprecated
+      const notdeprb = !manib.deprecated
+      const enginea = engineOk(mania, npmVersion, nodeVersion)
+      const engineb = engineOk(manib, npmVersion, nodeVersion)
+      // sort by:
+      // - not an avoided version
+      // - not restricted
+      // - not staged
+      // - not deprecated and engine ok
+      // - engine ok
+      // - not deprecated
+      // - semver
+      return (notavoidb - notavoida) ||
+        (notrestrb - notrestra) ||
+        (notstageb - notstagea) ||
+        ((notdeprb && engineb) - (notdepra && enginea)) ||
+        (engineb - enginea) ||
+        (notdeprb - notdepra) ||
+        semver.rcompare(vera, verb, sortSemverOpt)
+    })
+
+  return decorateAvoid(entries[0] && entries[0][1], avoid)
+}
+
+module.exports = (packument, wanted, opts = {}) => {
+  const mani = pickManifest(packument, wanted, opts)
+  const picked = mani && normalizeBin(mani)
+  const policyRestrictions = packument.policyRestrictions
+  const restricted = (policyRestrictions && policyRestrictions.versions) || {}
+
+  if (picked && !restricted[picked.version]) {
+    return picked
+  }
+
+  const { before = null, defaultTag = 'latest' } = opts
+  const bstr = before ? new Date(before).toLocaleString() : ''
+  const { name } = packument
+  const pckg = `${name}@${wanted}` +
+    (before ? ` with a date before ${bstr}` : '')
+
+  const isForbidden = picked && !!restricted[picked.version]
+  const polMsg = isForbidden ? policyRestrictions.message : ''
+
+  const msg = !isForbidden ? `No matching version found for ${pckg}.`
+    : `Could not download ${pckg} due to policy violations:\n${polMsg}`
+
+  const code = isForbidden ? 'E403' : 'ETARGET'
+  throw Object.assign(new Error(msg), {
+    code,
+    type: npa.resolve(packument.name, wanted).type,
+    wanted,
+    versions: Object.keys(packument.versions ?? {}),
+    name,
+    distTags: packument['dist-tags'],
+    defaultTag,
+  })
+}
diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/package.json b/node_modules/pacote/node_modules/npm-pick-manifest/package.json
new file mode 100644
index 0000000000000..f1ca18ed32108
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-pick-manifest/package.json
@@ -0,0 +1,58 @@
+{
+  "name": "npm-pick-manifest",
+  "version": "11.0.1",
+  "description": "Resolves a matching manifest from a package metadata document according to standard npm semver resolution rules.",
+  "main": "./lib",
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "scripts": {
+    "coverage": "tap",
+    "lint": "npm run eslint",
+    "test": "tap",
+    "posttest": "npm run lint",
+    "postlint": "template-oss-check",
+    "lintfix": "npm run eslint -- --fix",
+    "snap": "tap",
+    "template-oss-apply": "template-oss-apply --force",
+    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/npm/npm-pick-manifest.git"
+  },
+  "keywords": [
+    "npm",
+    "semver",
+    "package manager"
+  ],
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "dependencies": {
+    "npm-install-checks": "^7.1.0",
+    "npm-normalize-package-bin": "^4.0.0",
+    "npm-package-arg": "^13.0.0",
+    "semver": "^7.3.5"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^5.0.0",
+    "@npmcli/template-oss": "4.25.0",
+    "tap": "^16.0.1"
+  },
+  "tap": {
+    "check-coverage": true,
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  },
+  "engines": {
+    "node": "^20.17.0 || >=22.9.0"
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.25.0",
+    "publish": true
+  }
+}
diff --git a/node_modules/pacote/node_modules/@npmcli/package-json/LICENSE b/node_modules/pacote/node_modules/npm-registry-fetch/LICENSE.md
similarity index 87%
rename from node_modules/pacote/node_modules/@npmcli/package-json/LICENSE
rename to node_modules/pacote/node_modules/npm-registry-fetch/LICENSE.md
index 6a1f3708f6d70..5fc208ff122e0 100644
--- a/node_modules/pacote/node_modules/@npmcli/package-json/LICENSE
+++ b/node_modules/pacote/node_modules/npm-registry-fetch/LICENSE.md
@@ -1,6 +1,8 @@
+
+
 ISC License
 
-Copyright GitHub Inc.
+Copyright npm, Inc.
 
 Permission to use, copy, modify, and/or distribute this
 software for any purpose with or without fee is hereby
diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/auth.js b/node_modules/pacote/node_modules/npm-registry-fetch/lib/auth.js
new file mode 100644
index 0000000000000..9270025fa8d90
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-registry-fetch/lib/auth.js
@@ -0,0 +1,181 @@
+'use strict'
+const fs = require('fs')
+const npa = require('npm-package-arg')
+const { URL } = require('url')
+
+// Find the longest registry key that is used for some kind of auth
+// in the options.  Returns the registry key and the auth config.
+const regFromURI = (uri, opts) => {
+  const parsed = new URL(uri)
+  // try to find a config key indicating we have auth for this registry
+  // can be one of :_authToken, :_auth, :_password and :username, or
+  // :certfile and :keyfile
+  // We walk up the "path" until we're left with just //[:],
+  // stopping when we reach '//'.
+  let regKey = `//${parsed.host}${parsed.pathname}`
+  while (regKey.length > '//'.length) {
+    const authKey = hasAuth(regKey, opts)
+    // got some auth for this URI
+    if (authKey) {
+      return { regKey, authKey }
+    }
+
+    // can be either //host/some/path/:_auth or //host/some/path:_auth
+    // walk up by removing EITHER what's after the slash OR the slash itself
+    regKey = regKey.replace(/([^/]+|\/)$/, '')
+  }
+  return { regKey: false, authKey: null }
+}
+
+// Not only do we want to know if there is auth, but if we are calling `npm
+// logout` we want to know what config value specifically provided it.  This is
+// so we can look up where the config came from to delete it (i.e. user vs
+// project)
+const hasAuth = (regKey, opts) => {
+  if (opts[`${regKey}:_authToken`]) {
+    return '_authToken'
+  }
+  if (opts[`${regKey}:_auth`]) {
+    return '_auth'
+  }
+  if (opts[`${regKey}:username`] && opts[`${regKey}:_password`]) {
+    // 'password' can be inferred to also be present
+    return 'username'
+  }
+  if (opts[`${regKey}:certfile`] && opts[`${regKey}:keyfile`]) {
+    // 'keyfile' can be inferred to also be present
+    return 'certfile'
+  }
+  return false
+}
+
+const sameHost = (a, b) => {
+  const parsedA = new URL(a)
+  const parsedB = new URL(b)
+  return parsedA.host === parsedB.host
+}
+
+const getRegistry = opts => {
+  const { spec } = opts
+  const { scope: specScope, subSpec } = spec ? npa(spec) : {}
+  const subSpecScope = subSpec && subSpec.scope
+  const scope = subSpec ? subSpecScope : specScope
+  const scopeReg = scope && opts[`${scope}:registry`]
+  return scopeReg || opts.registry
+}
+
+const maybeReadFile = file => {
+  try {
+    return fs.readFileSync(file, 'utf8')
+  } catch (er) {
+    if (er.code !== 'ENOENT') {
+      throw er
+    }
+    return null
+  }
+}
+
+const getAuth = (uri, opts = {}) => {
+  const { forceAuth } = opts
+  if (!uri) {
+    throw new Error('URI is required')
+  }
+  const { regKey, authKey } = regFromURI(uri, forceAuth || opts)
+
+  // we are only allowed to use what's in forceAuth if specified
+  if (forceAuth && !regKey) {
+    return new Auth({
+      // if we force auth we don't want to refer back to anything in config
+      regKey: false,
+      authKey: null,
+      scopeAuthKey: null,
+      token: forceAuth._authToken || forceAuth.token,
+      username: forceAuth.username,
+      password: forceAuth._password || forceAuth.password,
+      auth: forceAuth._auth || forceAuth.auth,
+      certfile: forceAuth.certfile,
+      keyfile: forceAuth.keyfile,
+    })
+  }
+
+  // no auth for this URI, but might have it for the registry
+  if (!regKey) {
+    const registry = getRegistry(opts)
+    if (registry && uri !== registry && sameHost(uri, registry)) {
+      return getAuth(registry, opts)
+    } else if (registry !== opts.registry) {
+      // If making a tarball request to a different base URI than the
+      // registry where we logged in, but the same auth SHOULD be sent
+      // to that artifact host, then we track where it was coming in from,
+      // and warn the user if we get a 4xx error on it.
+      const { regKey: scopeAuthKey, authKey: _authKey } = regFromURI(registry, opts)
+      return new Auth({ scopeAuthKey, regKey: scopeAuthKey, authKey: _authKey })
+    }
+  }
+
+  const {
+    [`${regKey}:_authToken`]: token,
+    [`${regKey}:username`]: username,
+    [`${regKey}:_password`]: password,
+    [`${regKey}:_auth`]: auth,
+    [`${regKey}:certfile`]: certfile,
+    [`${regKey}:keyfile`]: keyfile,
+  } = opts
+
+  return new Auth({
+    scopeAuthKey: null,
+    regKey,
+    authKey,
+    token,
+    auth,
+    username,
+    password,
+    certfile,
+    keyfile,
+  })
+}
+
+class Auth {
+  constructor ({
+    token,
+    auth,
+    username,
+    password,
+    scopeAuthKey,
+    certfile,
+    keyfile,
+    regKey,
+    authKey,
+  }) {
+    // same as regKey but only present for scoped auth. Should have been named scopeRegKey
+    this.scopeAuthKey = scopeAuthKey
+    // `${regKey}:${authKey}` will get you back to the auth config that gave us auth
+    this.regKey = regKey
+    this.authKey = authKey
+    this.token = null
+    this.auth = null
+    this.isBasicAuth = false
+    this.cert = null
+    this.key = null
+    if (token) {
+      this.token = token
+    } else if (auth) {
+      this.auth = auth
+    } else if (username && password) {
+      const p = Buffer.from(password, 'base64').toString('utf8')
+      this.auth = Buffer.from(`${username}:${p}`, 'utf8').toString('base64')
+      this.isBasicAuth = true
+    }
+    // mTLS may be used in conjunction with another auth method above
+    if (certfile && keyfile) {
+      const cert = maybeReadFile(certfile, 'utf-8')
+      const key = maybeReadFile(keyfile, 'utf-8')
+      if (cert && key) {
+        this.cert = cert
+        this.key = key
+      }
+    }
+  }
+}
+
+module.exports = getAuth
diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/check-response.js b/node_modules/pacote/node_modules/npm-registry-fetch/lib/check-response.js
new file mode 100644
index 0000000000000..2f183082ab2ce
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-registry-fetch/lib/check-response.js
@@ -0,0 +1,108 @@
+'use strict'
+
+const errors = require('./errors.js')
+const { Response } = require('minipass-fetch')
+const defaultOpts = require('./default-opts.js')
+const { log } = require('proc-log')
+const { redact: cleanUrl } = require('@npmcli/redact')
+
+/* eslint-disable-next-line max-len */
+const moreInfoUrl = 'https://github.com/npm/cli/wiki/No-auth-for-URI,-but-auth-present-for-scoped-registry'
+const checkResponse =
+  async ({ method, uri, res, startTime, auth, opts }) => {
+    opts = { ...defaultOpts, ...opts }
+    if (res.headers.has('npm-notice') && !res.headers.has('x-local-cache')) {
+      log.notice('', res.headers.get('npm-notice'))
+    }
+
+    if (res.status >= 400) {
+      logRequest(method, res, startTime)
+      if (auth && auth.scopeAuthKey && !auth.token && !auth.auth) {
+      // we didn't have auth for THIS request, but we do have auth for
+      // requests to the registry indicated by the spec's scope value.
+      // Warn the user.
+        log.warn('registry', `No auth for URI, but auth present for scoped registry.
+
+URI: ${uri}
+Scoped Registry Key: ${auth.scopeAuthKey}
+
+More info here: ${moreInfoUrl}`)
+      }
+      return checkErrors(method, res, startTime, opts)
+    } else {
+      res.body.on('end', () => logRequest(method, res, startTime, opts))
+      if (opts.ignoreBody) {
+        res.body.resume()
+        return new Response(null, res)
+      }
+      return res
+    }
+  }
+module.exports = checkResponse
+
+function logRequest (method, res, startTime) {
+  const elapsedTime = Date.now() - startTime
+  const attempt = res.headers.get('x-fetch-attempts')
+  const attemptStr = attempt && attempt > 1 ? ` attempt #${attempt}` : ''
+  const cacheStatus = res.headers.get('x-local-cache-status')
+  const cacheStr = cacheStatus ? ` (cache ${cacheStatus})` : ''
+  const urlStr = cleanUrl(res.url)
+
+  // If make-fetch-happen reports a cache hit, then there was no fetch
+  if (cacheStatus === 'hit') {
+    log.http(
+      'cache',
+      `${urlStr} ${elapsedTime}ms${attemptStr}${cacheStr}`
+    )
+  } else {
+    log.http(
+      'fetch',
+      `${method.toUpperCase()} ${res.status} ${urlStr} ${elapsedTime}ms${attemptStr}${cacheStr}`
+    )
+  }
+}
+
+function checkErrors (method, res, startTime, opts) {
+  return res.buffer()
+    .catch(() => null)
+    .then(body => {
+      let parsed = body
+      try {
+        parsed = JSON.parse(body.toString('utf8'))
+      } catch {
+        // ignore errors
+      }
+      if (res.status === 401 && res.headers.get('www-authenticate')) {
+        const auth = res.headers.get('www-authenticate')
+          .split(/,\s*/)
+          .map(s => s.toLowerCase())
+        if (auth.indexOf('ipaddress') !== -1) {
+          throw new errors.HttpErrorAuthIPAddress(
+            method, res, parsed, opts.spec
+          )
+        } else if (auth.indexOf('otp') !== -1) {
+          throw new errors.HttpErrorAuthOTP(
+            method, res, parsed, opts.spec
+          )
+        } else {
+          throw new errors.HttpErrorAuthUnknown(
+            method, res, parsed, opts.spec
+          )
+        }
+      } else if (
+        res.status === 401 &&
+        body != null &&
+        /one-time pass/.test(body.toString('utf8'))
+      ) {
+        // Heuristic for malformed OTP responses that don't include the
+        // www-authenticate header.
+        throw new errors.HttpErrorAuthOTP(
+          method, res, parsed, opts.spec
+        )
+      } else {
+        throw new errors.HttpErrorGeneral(
+          method, res, parsed, opts.spec
+        )
+      }
+    })
+}
diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/default-opts.js b/node_modules/pacote/node_modules/npm-registry-fetch/lib/default-opts.js
new file mode 100644
index 0000000000000..f0847f0b507e2
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-registry-fetch/lib/default-opts.js
@@ -0,0 +1,19 @@
+const pkg = require('../package.json')
+module.exports = {
+  maxSockets: 12,
+  method: 'GET',
+  registry: 'https://registry.npmjs.org/',
+  timeout: 5 * 60 * 1000, // 5 minutes
+  strictSSL: true,
+  noProxy: process.env.NOPROXY,
+  userAgent: `${pkg.name
+    }@${
+      pkg.version
+    }/node@${
+      process.version
+    }+${
+      process.arch
+    } (${
+      process.platform
+    })`,
+}
diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/errors.js b/node_modules/pacote/node_modules/npm-registry-fetch/lib/errors.js
new file mode 100644
index 0000000000000..5bf6b012a24ef
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-registry-fetch/lib/errors.js
@@ -0,0 +1,80 @@
+'use strict'
+
+const { URL } = require('node:url')
+
+function packageName (href) {
+  try {
+    let basePath = new URL(href).pathname.slice(1)
+    if (!basePath.match(/^-/)) {
+      basePath = basePath.split('/')
+      var index = basePath.indexOf('_rewrite')
+      if (index === -1) {
+        index = basePath.length - 1
+      } else {
+        index++
+      }
+      return decodeURIComponent(basePath[index])
+    }
+  } catch {
+    // this is ok
+  }
+}
+
+class HttpErrorBase extends Error {
+  constructor (method, res, body, spec) {
+    super()
+    this.name = this.constructor.name
+    this.headers = typeof res.headers?.raw === 'function' ? res.headers.raw() : res.headers
+    this.statusCode = res.status
+    this.code = `E${res.status}`
+    this.method = method
+    this.uri = res.url
+    this.body = body
+    this.pkgid = spec ? spec.toString() : packageName(res.url)
+    Error.captureStackTrace(this, this.constructor)
+  }
+}
+
+class HttpErrorGeneral extends HttpErrorBase {
+  constructor (method, res, body, spec) {
+    super(method, res, body, spec)
+    this.message = `${res.status} ${res.statusText} - ${
+      this.method.toUpperCase()
+    } ${
+      this.spec || this.uri
+    }${
+      (body && body.error) ? ' - ' + body.error : ''
+    }`
+  }
+}
+
+class HttpErrorAuthOTP extends HttpErrorBase {
+  constructor (method, res, body, spec) {
+    super(method, res, body, spec)
+    this.message = 'OTP required for authentication'
+    this.code = 'EOTP'
+  }
+}
+
+class HttpErrorAuthIPAddress extends HttpErrorBase {
+  constructor (method, res, body, spec) {
+    super(method, res, body, spec)
+    this.message = 'Login is not allowed from your IP address'
+    this.code = 'EAUTHIP'
+  }
+}
+
+class HttpErrorAuthUnknown extends HttpErrorBase {
+  constructor (method, res, body, spec) {
+    super(method, res, body, spec)
+    this.message = 'Unable to authenticate, need: ' + res.headers.get('www-authenticate')
+  }
+}
+
+module.exports = {
+  HttpErrorBase,
+  HttpErrorGeneral,
+  HttpErrorAuthOTP,
+  HttpErrorAuthIPAddress,
+  HttpErrorAuthUnknown,
+}
diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/index.js b/node_modules/pacote/node_modules/npm-registry-fetch/lib/index.js
new file mode 100644
index 0000000000000..898c8125bfe0e
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-registry-fetch/lib/index.js
@@ -0,0 +1,247 @@
+'use strict'
+
+const { HttpErrorAuthOTP } = require('./errors.js')
+const checkResponse = require('./check-response.js')
+const getAuth = require('./auth.js')
+const fetch = require('make-fetch-happen')
+const JSONStream = require('./json-stream')
+const npa = require('npm-package-arg')
+const qs = require('querystring')
+const url = require('url')
+const zlib = require('minizlib')
+const { Minipass } = require('minipass')
+
+const defaultOpts = require('./default-opts.js')
+
+// WhatWG URL throws if it's not fully resolved
+const urlIsValid = u => {
+  try {
+    return !!new url.URL(u)
+  } catch (_) {
+    return false
+  }
+}
+
+module.exports = regFetch
+function regFetch (uri, /* istanbul ignore next */ opts_ = {}) {
+  const opts = {
+    ...defaultOpts,
+    ...opts_,
+  }
+
+  // if we did not get a fully qualified URI, then we look at the registry
+  // config or relevant scope to resolve it.
+  const uriValid = urlIsValid(uri)
+  let registry = opts.registry || defaultOpts.registry
+  if (!uriValid) {
+    registry = opts.registry = (
+      (opts.spec && pickRegistry(opts.spec, opts)) ||
+      opts.registry ||
+      registry
+    )
+    uri = `${
+      registry.trim().replace(/\/?$/g, '')
+    }/${
+      uri.trim().replace(/^\//, '')
+    }`
+    // asserts that this is now valid
+    new url.URL(uri)
+  }
+
+  const method = opts.method || 'GET'
+
+  // through that takes into account the scope, the prefix of `uri`, etc
+  const startTime = Date.now()
+  const auth = getAuth(uri, opts)
+  const headers = getHeaders(uri, auth, opts)
+  let body = opts.body
+  const bodyIsStream = Minipass.isStream(body)
+  const bodyIsPromise = body &&
+    typeof body === 'object' &&
+    typeof body.then === 'function'
+
+  if (
+    body && !bodyIsStream && !bodyIsPromise && typeof body !== 'string' && !Buffer.isBuffer(body)
+  ) {
+    headers['content-type'] = headers['content-type'] || 'application/json'
+    body = JSON.stringify(body)
+  } else if (body && !headers['content-type']) {
+    headers['content-type'] = 'application/octet-stream'
+  }
+
+  if (opts.gzip) {
+    headers['content-encoding'] = 'gzip'
+    if (bodyIsStream) {
+      const gz = new zlib.Gzip()
+      body.on('error', /* istanbul ignore next: unlikely and hard to test */
+        err => gz.emit('error', err))
+      body = body.pipe(gz)
+    } else if (!bodyIsPromise) {
+      body = new zlib.Gzip().end(body).concat()
+    }
+  }
+
+  const parsed = new url.URL(uri)
+
+  if (opts.query) {
+    const q = typeof opts.query === 'string' ? qs.parse(opts.query)
+      : opts.query
+
+    Object.keys(q).forEach(key => {
+      if (q[key] !== undefined) {
+        parsed.searchParams.set(key, q[key])
+      }
+    })
+    uri = url.format(parsed)
+  }
+
+  if (parsed.searchParams.get('write') === 'true' && method === 'GET') {
+    // do not cache, because this GET is fetching a rev that will be
+    // used for a subsequent PUT or DELETE, so we need to conditionally
+    // update cache.
+    opts.offline = false
+    opts.preferOffline = false
+    opts.preferOnline = true
+  }
+
+  const doFetch = async fetchBody => {
+    const p = fetch(uri, {
+      agent: opts.agent,
+      algorithms: opts.algorithms,
+      body: fetchBody,
+      cache: getCacheMode(opts),
+      cachePath: opts.cache,
+      ca: opts.ca,
+      cert: auth.cert || opts.cert,
+      headers,
+      integrity: opts.integrity,
+      key: auth.key || opts.key,
+      localAddress: opts.localAddress,
+      maxSockets: opts.maxSockets,
+      memoize: opts.memoize,
+      method: method,
+      noProxy: opts.noProxy,
+      proxy: opts.httpsProxy || opts.proxy,
+      retry: opts.retry ? opts.retry : {
+        retries: opts.fetchRetries,
+        factor: opts.fetchRetryFactor,
+        minTimeout: opts.fetchRetryMintimeout,
+        maxTimeout: opts.fetchRetryMaxtimeout,
+      },
+      strictSSL: opts.strictSSL,
+      timeout: opts.timeout || 30 * 1000,
+    }).then(res => checkResponse({
+      method,
+      uri,
+      res,
+      registry,
+      startTime,
+      auth,
+      opts,
+    }))
+
+    if (typeof opts.otpPrompt === 'function') {
+      return p.catch(async er => {
+        if (er instanceof HttpErrorAuthOTP) {
+          let otp
+          // if otp fails to complete, we fail with that failure
+          try {
+            otp = await opts.otpPrompt()
+          } catch (_) {
+            // ignore this error
+          }
+          // if no otp provided, or otpPrompt errored, throw the original HTTP error
+          if (!otp) {
+            throw er
+          }
+          return regFetch(uri, { ...opts, otp })
+        }
+        throw er
+      })
+    } else {
+      return p
+    }
+  }
+
+  return Promise.resolve(body).then(doFetch)
+}
+
+module.exports.getAuth = getAuth
+
+module.exports.json = fetchJSON
+function fetchJSON (uri, opts) {
+  return regFetch(uri, opts).then(res => res.json())
+}
+
+module.exports.json.stream = fetchJSONStream
+function fetchJSONStream (uri, jsonPath,
+  /* istanbul ignore next */ opts_ = {}) {
+  const opts = { ...defaultOpts, ...opts_ }
+  const parser = JSONStream.parse(jsonPath, opts.mapJSON)
+  regFetch(uri, opts).then(res =>
+    res.body.on('error',
+      /* istanbul ignore next: unlikely and difficult to test */
+      er => parser.emit('error', er)).pipe(parser)
+  ).catch(er => parser.emit('error', er))
+  return parser
+}
+
+module.exports.pickRegistry = pickRegistry
+function pickRegistry (spec, opts = {}) {
+  spec = npa(spec)
+  let registry = spec.scope &&
+    opts[spec.scope.replace(/^@?/, '@') + ':registry']
+
+  if (!registry && opts.scope) {
+    registry = opts[opts.scope.replace(/^@?/, '@') + ':registry']
+  }
+
+  if (!registry) {
+    registry = opts.registry || defaultOpts.registry
+  }
+
+  return registry
+}
+
+function getCacheMode (opts) {
+  return opts.offline ? 'only-if-cached'
+    : opts.preferOffline ? 'force-cache'
+    : opts.preferOnline ? 'no-cache'
+    : 'default'
+}
+
+function getHeaders (uri, auth, opts) {
+  const headers = Object.assign({
+    'user-agent': opts.userAgent,
+  }, opts.headers || {})
+
+  if (opts.authType) {
+    headers['npm-auth-type'] = opts.authType
+  }
+
+  if (opts.scope) {
+    headers['npm-scope'] = opts.scope
+  }
+
+  if (opts.npmSession) {
+    headers['npm-session'] = opts.npmSession
+  }
+
+  if (opts.npmCommand) {
+    headers['npm-command'] = opts.npmCommand
+  }
+
+  // If a tarball is hosted on a different place than the manifest, only send
+  // credentials on `alwaysAuth`
+  if (auth.token) {
+    headers.authorization = `Bearer ${auth.token}`
+  } else if (auth.auth) {
+    headers.authorization = `Basic ${auth.auth}`
+  }
+
+  if (opts.otp) {
+    headers['npm-otp'] = opts.otp
+  }
+
+  return headers
+}
diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/json-stream.js b/node_modules/pacote/node_modules/npm-registry-fetch/lib/json-stream.js
new file mode 100644
index 0000000000000..36b05ad4a20b9
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-registry-fetch/lib/json-stream.js
@@ -0,0 +1,223 @@
+const Parser = require('jsonparse')
+const { Minipass } = require('minipass')
+
+class JSONStreamError extends Error {
+  constructor (err, caller) {
+    super(err.message)
+    Error.captureStackTrace(this, caller || this.constructor)
+  }
+
+  get name () {
+    return 'JSONStreamError'
+  }
+}
+
+const check = (x, y) =>
+  typeof x === 'string' ? String(y) === x
+  : x && typeof x.test === 'function' ? x.test(y)
+  : typeof x === 'boolean' || typeof x === 'object' ? x
+  : typeof x === 'function' ? x(y)
+  : false
+
+class JSONStream extends Minipass {
+  #count = 0
+  #ending = false
+  #footer = null
+  #header = null
+  #map = null
+  #onTokenOriginal
+  #parser
+  #path = null
+  #root = null
+
+  constructor (opts) {
+    super({
+      ...opts,
+      objectMode: true,
+    })
+
+    const parser = this.#parser = new Parser()
+    parser.onValue = value => this.#onValue(value)
+    this.#onTokenOriginal = parser.onToken
+    parser.onToken = (token, value) => this.#onToken(token, value)
+    parser.onError = er => this.#onError(er)
+
+    this.#path = typeof opts.path === 'string'
+      ? opts.path.split('.').map(e =>
+        e === '$*' ? { emitKey: true }
+        : e === '*' ? true
+        : e === '' ? { recurse: true }
+        : e)
+      : Array.isArray(opts.path) && opts.path.length ? opts.path
+      : null
+
+    if (typeof opts.map === 'function') {
+      this.#map = opts.map
+    }
+  }
+
+  #setHeaderFooter (key, value) {
+    // header has not been emitted yet
+    if (this.#header !== false) {
+      this.#header = this.#header || {}
+      this.#header[key] = value
+    }
+
+    // footer has not been emitted yet but header has
+    if (this.#footer !== false && this.#header === false) {
+      this.#footer = this.#footer || {}
+      this.#footer[key] = value
+    }
+  }
+
+  #onError (er) {
+    // error will always happen during a write() call.
+    const caller = this.#ending ? this.end : this.write
+    this.#ending = false
+    return this.emit('error', new JSONStreamError(er, caller))
+  }
+
+  #onToken (token, value) {
+    const parser = this.#parser
+    this.#onTokenOriginal.call(this.#parser, token, value)
+    if (parser.stack.length === 0) {
+      if (this.#root) {
+        const root = this.#root
+        if (!this.#path) {
+          super.write(root)
+        }
+        this.#root = null
+        this.#count = 0
+      }
+    }
+  }
+
+  #onValue (value) {
+    const parser = this.#parser
+    // the LAST onValue encountered is the root object.
+    // just overwrite it each time.
+    this.#root = value
+
+    if (!this.#path) {
+      return
+    }
+
+    let i = 0 // iterates on path
+    let j = 0 // iterates on stack
+    let emitKey = false
+    while (i < this.#path.length) {
+      const key = this.#path[i]
+      j++
+
+      if (key && !key.recurse) {
+        const c = (j === parser.stack.length) ? parser : parser.stack[j]
+        if (!c) {
+          return
+        }
+        if (!check(key, c.key)) {
+          this.#setHeaderFooter(c.key, value)
+          return
+        }
+        emitKey = !!key.emitKey
+        i++
+      } else {
+        i++
+        if (i >= this.#path.length) {
+          return
+        }
+        const nextKey = this.#path[i]
+        if (!nextKey) {
+          return
+        }
+        while (true) {
+          const c = (j === parser.stack.length) ? parser : parser.stack[j]
+          if (!c) {
+            return
+          }
+          if (check(nextKey, c.key)) {
+            i++
+            if (!Object.isFrozen(parser.stack[j])) {
+              parser.stack[j].value = null
+            }
+            break
+          } else {
+            this.#setHeaderFooter(c.key, value)
+          }
+          j++
+        }
+      }
+    }
+
+    // emit header
+    if (this.#header) {
+      const header = this.#header
+      this.#header = false
+      this.emit('header', header)
+    }
+    if (j !== parser.stack.length) {
+      return
+    }
+
+    this.#count++
+    const actualPath = parser.stack.slice(1)
+      .map(e => e.key).concat([parser.key])
+    if (value !== null && value !== undefined) {
+      const data = this.#map ? this.#map(value, actualPath) : value
+      if (data !== null && data !== undefined) {
+        const emit = emitKey ? { value: data } : data
+        if (emitKey) {
+          emit.key = parser.key
+        }
+        super.write(emit)
+      }
+    }
+
+    if (parser.value) {
+      delete parser.value[parser.key]
+    }
+
+    for (const k of parser.stack) {
+      k.value = null
+    }
+  }
+
+  write (chunk, encoding) {
+    if (typeof chunk === 'string') {
+      chunk = Buffer.from(chunk, encoding)
+    } else if (!Buffer.isBuffer(chunk)) {
+      return this.emit('error', new TypeError(
+        'Can only parse JSON from string or buffer input'))
+    }
+    this.#parser.write(chunk)
+    return this.flowing
+  }
+
+  end (chunk, encoding) {
+    this.#ending = true
+    if (chunk) {
+      this.write(chunk, encoding)
+    }
+
+    const h = this.#header
+    this.#header = null
+    const f = this.#footer
+    this.#footer = null
+    if (h) {
+      this.emit('header', h)
+    }
+    if (f) {
+      this.emit('footer', f)
+    }
+    return super.end()
+  }
+
+  static get JSONStreamError () {
+    return JSONStreamError
+  }
+
+  static parse (path, map) {
+    return new JSONStream({ path, map })
+  }
+}
+
+module.exports = JSONStream
diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/package.json b/node_modules/pacote/node_modules/npm-registry-fetch/package.json
new file mode 100644
index 0000000000000..a8e954cdf3c14
--- /dev/null
+++ b/node_modules/pacote/node_modules/npm-registry-fetch/package.json
@@ -0,0 +1,68 @@
+{
+  "name": "npm-registry-fetch",
+  "version": "19.0.0",
+  "description": "Fetch-based http client for use with npm registry APIs",
+  "main": "lib",
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "scripts": {
+    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
+    "lint": "npm run eslint",
+    "lintfix": "npm run eslint -- --fix",
+    "test": "tap",
+    "posttest": "npm run lint",
+    "npmclilint": "npmcli-lint",
+    "postsnap": "npm run lintfix --",
+    "postlint": "template-oss-check",
+    "snap": "tap",
+    "template-oss-apply": "template-oss-apply --force"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/npm/npm-registry-fetch.git"
+  },
+  "keywords": [
+    "npm",
+    "registry",
+    "fetch"
+  ],
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "dependencies": {
+    "@npmcli/redact": "^3.0.0",
+    "jsonparse": "^1.3.1",
+    "make-fetch-happen": "^15.0.0",
+    "minipass": "^7.0.2",
+    "minipass-fetch": "^4.0.0",
+    "minizlib": "^3.0.1",
+    "npm-package-arg": "^13.0.0",
+    "proc-log": "^5.0.0"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^5.0.0",
+    "@npmcli/template-oss": "4.25.0",
+    "cacache": "^20.0.0",
+    "nock": "^13.2.4",
+    "require-inject": "^1.4.4",
+    "ssri": "^12.0.0",
+    "tap": "^16.0.1"
+  },
+  "tap": {
+    "check-coverage": true,
+    "test-ignore": "test[\\\\/](util|cache)[\\\\/]",
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  },
+  "engines": {
+    "node": "^20.17.0 || >=22.9.0"
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.25.0",
+    "publish": "true"
+  }
+}
diff --git a/node_modules/pacote/node_modules/path-scurry/LICENSE.md b/node_modules/pacote/node_modules/path-scurry/LICENSE.md
new file mode 100644
index 0000000000000..c5402b9577a8c
--- /dev/null
+++ b/node_modules/pacote/node_modules/path-scurry/LICENSE.md
@@ -0,0 +1,55 @@
+# Blue Oak Model License
+
+Version 1.0.0
+
+## Purpose
+
+This license gives everyone as much permission to work with
+this software as possible, while protecting contributors
+from liability.
+
+## Acceptance
+
+In order to receive this license, you must agree to its
+rules.  The rules of this license are both obligations
+under that agreement and conditions to your license.
+You must not do anything with this software that triggers
+a rule that you cannot or will not follow.
+
+## Copyright
+
+Each contributor licenses you to do everything with this
+software that would otherwise infringe that contributor's
+copyright in it.
+
+## Notices
+
+You must ensure that everyone who gets a copy of
+any part of this software from you, with or without
+changes, also gets the text of this license or a link to
+.
+
+## Excuse
+
+If anyone notifies you in writing that you have not
+complied with [Notices](#notices), you can keep your
+license by taking all practical steps to comply within 30
+days after the notice.  If you do not do so, your license
+ends immediately.
+
+## Patent
+
+Each contributor licenses you to do everything with this
+software that would otherwise infringe any patent claims
+they can license or become able to license.
+
+## Reliability
+
+No contributor can revoke this license.
+
+## No Liability
+
+***As far as the law allows, this software comes as is,
+without any warranty or condition, and no contributor
+will be liable to anyone for any damages related to this
+software or this license, under any kind of legal claim.***
diff --git a/node_modules/pacote/node_modules/path-scurry/dist/commonjs/index.js b/node_modules/pacote/node_modules/path-scurry/dist/commonjs/index.js
new file mode 100644
index 0000000000000..af3e7595f577f
--- /dev/null
+++ b/node_modules/pacote/node_modules/path-scurry/dist/commonjs/index.js
@@ -0,0 +1,2016 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.PathScurry = exports.Path = exports.PathScurryDarwin = exports.PathScurryPosix = exports.PathScurryWin32 = exports.PathScurryBase = exports.PathPosix = exports.PathWin32 = exports.PathBase = exports.ChildrenCache = exports.ResolveCache = void 0;
+const lru_cache_1 = require("lru-cache");
+const node_path_1 = require("node:path");
+const node_url_1 = require("node:url");
+const fs_1 = require("fs");
+const actualFS = __importStar(require("node:fs"));
+const realpathSync = fs_1.realpathSync.native;
+// TODO: test perf of fs/promises realpath vs realpathCB,
+// since the promises one uses realpath.native
+const promises_1 = require("node:fs/promises");
+const minipass_1 = require("minipass");
+const defaultFS = {
+    lstatSync: fs_1.lstatSync,
+    readdir: fs_1.readdir,
+    readdirSync: fs_1.readdirSync,
+    readlinkSync: fs_1.readlinkSync,
+    realpathSync,
+    promises: {
+        lstat: promises_1.lstat,
+        readdir: promises_1.readdir,
+        readlink: promises_1.readlink,
+        realpath: promises_1.realpath,
+    },
+};
+// if they just gave us require('fs') then use our default
+const fsFromOption = (fsOption) => !fsOption || fsOption === defaultFS || fsOption === actualFS ?
+    defaultFS
+    : {
+        ...defaultFS,
+        ...fsOption,
+        promises: {
+            ...defaultFS.promises,
+            ...(fsOption.promises || {}),
+        },
+    };
+// turn something like //?/c:/ into c:\
+const uncDriveRegexp = /^\\\\\?\\([a-z]:)\\?$/i;
+const uncToDrive = (rootPath) => rootPath.replace(/\//g, '\\').replace(uncDriveRegexp, '$1\\');
+// windows paths are separated by either / or \
+const eitherSep = /[\\\/]/;
+const UNKNOWN = 0; // may not even exist, for all we know
+const IFIFO = 0b0001;
+const IFCHR = 0b0010;
+const IFDIR = 0b0100;
+const IFBLK = 0b0110;
+const IFREG = 0b1000;
+const IFLNK = 0b1010;
+const IFSOCK = 0b1100;
+const IFMT = 0b1111;
+// mask to unset low 4 bits
+const IFMT_UNKNOWN = ~IFMT;
+// set after successfully calling readdir() and getting entries.
+const READDIR_CALLED = 0b0000_0001_0000;
+// set after a successful lstat()
+const LSTAT_CALLED = 0b0000_0010_0000;
+// set if an entry (or one of its parents) is definitely not a dir
+const ENOTDIR = 0b0000_0100_0000;
+// set if an entry (or one of its parents) does not exist
+// (can also be set on lstat errors like EACCES or ENAMETOOLONG)
+const ENOENT = 0b0000_1000_0000;
+// cannot have child entries -- also verify &IFMT is either IFDIR or IFLNK
+// set if we fail to readlink
+const ENOREADLINK = 0b0001_0000_0000;
+// set if we know realpath() will fail
+const ENOREALPATH = 0b0010_0000_0000;
+const ENOCHILD = ENOTDIR | ENOENT | ENOREALPATH;
+const TYPEMASK = 0b0011_1111_1111;
+const entToType = (s) => s.isFile() ? IFREG
+    : s.isDirectory() ? IFDIR
+        : s.isSymbolicLink() ? IFLNK
+            : s.isCharacterDevice() ? IFCHR
+                : s.isBlockDevice() ? IFBLK
+                    : s.isSocket() ? IFSOCK
+                        : s.isFIFO() ? IFIFO
+                            : UNKNOWN;
+// normalize unicode path names
+const normalizeCache = new Map();
+const normalize = (s) => {
+    const c = normalizeCache.get(s);
+    if (c)
+        return c;
+    const n = s.normalize('NFKD');
+    normalizeCache.set(s, n);
+    return n;
+};
+const normalizeNocaseCache = new Map();
+const normalizeNocase = (s) => {
+    const c = normalizeNocaseCache.get(s);
+    if (c)
+        return c;
+    const n = normalize(s.toLowerCase());
+    normalizeNocaseCache.set(s, n);
+    return n;
+};
+/**
+ * An LRUCache for storing resolved path strings or Path objects.
+ * @internal
+ */
+class ResolveCache extends lru_cache_1.LRUCache {
+    constructor() {
+        super({ max: 256 });
+    }
+}
+exports.ResolveCache = ResolveCache;
+// In order to prevent blowing out the js heap by allocating hundreds of
+// thousands of Path entries when walking extremely large trees, the "children"
+// in this tree are represented by storing an array of Path entries in an
+// LRUCache, indexed by the parent.  At any time, Path.children() may return an
+// empty array, indicating that it doesn't know about any of its children, and
+// thus has to rebuild that cache.  This is fine, it just means that we don't
+// benefit as much from having the cached entries, but huge directory walks
+// don't blow out the stack, and smaller ones are still as fast as possible.
+//
+//It does impose some complexity when building up the readdir data, because we
+//need to pass a reference to the children array that we started with.
+/**
+ * an LRUCache for storing child entries.
+ * @internal
+ */
+class ChildrenCache extends lru_cache_1.LRUCache {
+    constructor(maxSize = 16 * 1024) {
+        super({
+            maxSize,
+            // parent + children
+            sizeCalculation: a => a.length + 1,
+        });
+    }
+}
+exports.ChildrenCache = ChildrenCache;
+const setAsCwd = Symbol('PathScurry setAsCwd');
+/**
+ * Path objects are sort of like a super-powered
+ * {@link https://nodejs.org/docs/latest/api/fs.html#class-fsdirent fs.Dirent}
+ *
+ * Each one represents a single filesystem entry on disk, which may or may not
+ * exist. It includes methods for reading various types of information via
+ * lstat, readlink, and readdir, and caches all information to the greatest
+ * degree possible.
+ *
+ * Note that fs operations that would normally throw will instead return an
+ * "empty" value. This is in order to prevent excessive overhead from error
+ * stack traces.
+ */
+class PathBase {
+    /**
+     * the basename of this path
+     *
+     * **Important**: *always* test the path name against any test string
+     * usingthe {@link isNamed} method, and not by directly comparing this
+     * string. Otherwise, unicode path strings that the system sees as identical
+     * will not be properly treated as the same path, leading to incorrect
+     * behavior and possible security issues.
+     */
+    name;
+    /**
+     * the Path entry corresponding to the path root.
+     *
+     * @internal
+     */
+    root;
+    /**
+     * All roots found within the current PathScurry family
+     *
+     * @internal
+     */
+    roots;
+    /**
+     * a reference to the parent path, or undefined in the case of root entries
+     *
+     * @internal
+     */
+    parent;
+    /**
+     * boolean indicating whether paths are compared case-insensitively
+     * @internal
+     */
+    nocase;
+    /**
+     * boolean indicating that this path is the current working directory
+     * of the PathScurry collection that contains it.
+     */
+    isCWD = false;
+    // potential default fs override
+    #fs;
+    // Stats fields
+    #dev;
+    get dev() {
+        return this.#dev;
+    }
+    #mode;
+    get mode() {
+        return this.#mode;
+    }
+    #nlink;
+    get nlink() {
+        return this.#nlink;
+    }
+    #uid;
+    get uid() {
+        return this.#uid;
+    }
+    #gid;
+    get gid() {
+        return this.#gid;
+    }
+    #rdev;
+    get rdev() {
+        return this.#rdev;
+    }
+    #blksize;
+    get blksize() {
+        return this.#blksize;
+    }
+    #ino;
+    get ino() {
+        return this.#ino;
+    }
+    #size;
+    get size() {
+        return this.#size;
+    }
+    #blocks;
+    get blocks() {
+        return this.#blocks;
+    }
+    #atimeMs;
+    get atimeMs() {
+        return this.#atimeMs;
+    }
+    #mtimeMs;
+    get mtimeMs() {
+        return this.#mtimeMs;
+    }
+    #ctimeMs;
+    get ctimeMs() {
+        return this.#ctimeMs;
+    }
+    #birthtimeMs;
+    get birthtimeMs() {
+        return this.#birthtimeMs;
+    }
+    #atime;
+    get atime() {
+        return this.#atime;
+    }
+    #mtime;
+    get mtime() {
+        return this.#mtime;
+    }
+    #ctime;
+    get ctime() {
+        return this.#ctime;
+    }
+    #birthtime;
+    get birthtime() {
+        return this.#birthtime;
+    }
+    #matchName;
+    #depth;
+    #fullpath;
+    #fullpathPosix;
+    #relative;
+    #relativePosix;
+    #type;
+    #children;
+    #linkTarget;
+    #realpath;
+    /**
+     * This property is for compatibility with the Dirent class as of
+     * Node v20, where Dirent['parentPath'] refers to the path of the
+     * directory that was passed to readdir. For root entries, it's the path
+     * to the entry itself.
+     */
+    get parentPath() {
+        return (this.parent || this).fullpath();
+    }
+    /**
+     * Deprecated alias for Dirent['parentPath'] Somewhat counterintuitively,
+     * this property refers to the *parent* path, not the path object itself.
+     *
+     * @deprecated
+     */
+    get path() {
+        return this.parentPath;
+    }
+    /**
+     * Do not create new Path objects directly.  They should always be accessed
+     * via the PathScurry class or other methods on the Path class.
+     *
+     * @internal
+     */
+    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
+        this.name = name;
+        this.#matchName = nocase ? normalizeNocase(name) : normalize(name);
+        this.#type = type & TYPEMASK;
+        this.nocase = nocase;
+        this.roots = roots;
+        this.root = root || this;
+        this.#children = children;
+        this.#fullpath = opts.fullpath;
+        this.#relative = opts.relative;
+        this.#relativePosix = opts.relativePosix;
+        this.parent = opts.parent;
+        if (this.parent) {
+            this.#fs = this.parent.#fs;
+        }
+        else {
+            this.#fs = fsFromOption(opts.fs);
+        }
+    }
+    /**
+     * Returns the depth of the Path object from its root.
+     *
+     * For example, a path at `/foo/bar` would have a depth of 2.
+     */
+    depth() {
+        if (this.#depth !== undefined)
+            return this.#depth;
+        if (!this.parent)
+            return (this.#depth = 0);
+        return (this.#depth = this.parent.depth() + 1);
+    }
+    /**
+     * @internal
+     */
+    childrenCache() {
+        return this.#children;
+    }
+    /**
+     * Get the Path object referenced by the string path, resolved from this Path
+     */
+    resolve(path) {
+        if (!path) {
+            return this;
+        }
+        const rootPath = this.getRootString(path);
+        const dir = path.substring(rootPath.length);
+        const dirParts = dir.split(this.splitSep);
+        const result = rootPath ?
+            this.getRoot(rootPath).#resolveParts(dirParts)
+            : this.#resolveParts(dirParts);
+        return result;
+    }
+    #resolveParts(dirParts) {
+        let p = this;
+        for (const part of dirParts) {
+            p = p.child(part);
+        }
+        return p;
+    }
+    /**
+     * Returns the cached children Path objects, if still available.  If they
+     * have fallen out of the cache, then returns an empty array, and resets the
+     * READDIR_CALLED bit, so that future calls to readdir() will require an fs
+     * lookup.
+     *
+     * @internal
+     */
+    children() {
+        const cached = this.#children.get(this);
+        if (cached) {
+            return cached;
+        }
+        const children = Object.assign([], { provisional: 0 });
+        this.#children.set(this, children);
+        this.#type &= ~READDIR_CALLED;
+        return children;
+    }
+    /**
+     * Resolves a path portion and returns or creates the child Path.
+     *
+     * Returns `this` if pathPart is `''` or `'.'`, or `parent` if pathPart is
+     * `'..'`.
+     *
+     * This should not be called directly.  If `pathPart` contains any path
+     * separators, it will lead to unsafe undefined behavior.
+     *
+     * Use `Path.resolve()` instead.
+     *
+     * @internal
+     */
+    child(pathPart, opts) {
+        if (pathPart === '' || pathPart === '.') {
+            return this;
+        }
+        if (pathPart === '..') {
+            return this.parent || this;
+        }
+        // find the child
+        const children = this.children();
+        const name = this.nocase ? normalizeNocase(pathPart) : normalize(pathPart);
+        for (const p of children) {
+            if (p.#matchName === name) {
+                return p;
+            }
+        }
+        // didn't find it, create provisional child, since it might not
+        // actually exist.  If we know the parent isn't a dir, then
+        // in fact it CAN'T exist.
+        const s = this.parent ? this.sep : '';
+        const fullpath = this.#fullpath ? this.#fullpath + s + pathPart : undefined;
+        const pchild = this.newChild(pathPart, UNKNOWN, {
+            ...opts,
+            parent: this,
+            fullpath,
+        });
+        if (!this.canReaddir()) {
+            pchild.#type |= ENOENT;
+        }
+        // don't have to update provisional, because if we have real children,
+        // then provisional is set to children.length, otherwise a lower number
+        children.push(pchild);
+        return pchild;
+    }
+    /**
+     * The relative path from the cwd. If it does not share an ancestor with
+     * the cwd, then this ends up being equivalent to the fullpath()
+     */
+    relative() {
+        if (this.isCWD)
+            return '';
+        if (this.#relative !== undefined) {
+            return this.#relative;
+        }
+        const name = this.name;
+        const p = this.parent;
+        if (!p) {
+            return (this.#relative = this.name);
+        }
+        const pv = p.relative();
+        return pv + (!pv || !p.parent ? '' : this.sep) + name;
+    }
+    /**
+     * The relative path from the cwd, using / as the path separator.
+     * If it does not share an ancestor with
+     * the cwd, then this ends up being equivalent to the fullpathPosix()
+     * On posix systems, this is identical to relative().
+     */
+    relativePosix() {
+        if (this.sep === '/')
+            return this.relative();
+        if (this.isCWD)
+            return '';
+        if (this.#relativePosix !== undefined)
+            return this.#relativePosix;
+        const name = this.name;
+        const p = this.parent;
+        if (!p) {
+            return (this.#relativePosix = this.fullpathPosix());
+        }
+        const pv = p.relativePosix();
+        return pv + (!pv || !p.parent ? '' : '/') + name;
+    }
+    /**
+     * The fully resolved path string for this Path entry
+     */
+    fullpath() {
+        if (this.#fullpath !== undefined) {
+            return this.#fullpath;
+        }
+        const name = this.name;
+        const p = this.parent;
+        if (!p) {
+            return (this.#fullpath = this.name);
+        }
+        const pv = p.fullpath();
+        const fp = pv + (!p.parent ? '' : this.sep) + name;
+        return (this.#fullpath = fp);
+    }
+    /**
+     * On platforms other than windows, this is identical to fullpath.
+     *
+     * On windows, this is overridden to return the forward-slash form of the
+     * full UNC path.
+     */
+    fullpathPosix() {
+        if (this.#fullpathPosix !== undefined)
+            return this.#fullpathPosix;
+        if (this.sep === '/')
+            return (this.#fullpathPosix = this.fullpath());
+        if (!this.parent) {
+            const p = this.fullpath().replace(/\\/g, '/');
+            if (/^[a-z]:\//i.test(p)) {
+                return (this.#fullpathPosix = `//?/${p}`);
+            }
+            else {
+                return (this.#fullpathPosix = p);
+            }
+        }
+        const p = this.parent;
+        const pfpp = p.fullpathPosix();
+        const fpp = pfpp + (!pfpp || !p.parent ? '' : '/') + this.name;
+        return (this.#fullpathPosix = fpp);
+    }
+    /**
+     * Is the Path of an unknown type?
+     *
+     * Note that we might know *something* about it if there has been a previous
+     * filesystem operation, for example that it does not exist, or is not a
+     * link, or whether it has child entries.
+     */
+    isUnknown() {
+        return (this.#type & IFMT) === UNKNOWN;
+    }
+    isType(type) {
+        return this[`is${type}`]();
+    }
+    getType() {
+        return (this.isUnknown() ? 'Unknown'
+            : this.isDirectory() ? 'Directory'
+                : this.isFile() ? 'File'
+                    : this.isSymbolicLink() ? 'SymbolicLink'
+                        : this.isFIFO() ? 'FIFO'
+                            : this.isCharacterDevice() ? 'CharacterDevice'
+                                : this.isBlockDevice() ? 'BlockDevice'
+                                    : /* c8 ignore start */ this.isSocket() ? 'Socket'
+                                        : 'Unknown');
+        /* c8 ignore stop */
+    }
+    /**
+     * Is the Path a regular file?
+     */
+    isFile() {
+        return (this.#type & IFMT) === IFREG;
+    }
+    /**
+     * Is the Path a directory?
+     */
+    isDirectory() {
+        return (this.#type & IFMT) === IFDIR;
+    }
+    /**
+     * Is the path a character device?
+     */
+    isCharacterDevice() {
+        return (this.#type & IFMT) === IFCHR;
+    }
+    /**
+     * Is the path a block device?
+     */
+    isBlockDevice() {
+        return (this.#type & IFMT) === IFBLK;
+    }
+    /**
+     * Is the path a FIFO pipe?
+     */
+    isFIFO() {
+        return (this.#type & IFMT) === IFIFO;
+    }
+    /**
+     * Is the path a socket?
+     */
+    isSocket() {
+        return (this.#type & IFMT) === IFSOCK;
+    }
+    /**
+     * Is the path a symbolic link?
+     */
+    isSymbolicLink() {
+        return (this.#type & IFLNK) === IFLNK;
+    }
+    /**
+     * Return the entry if it has been subject of a successful lstat, or
+     * undefined otherwise.
+     *
+     * Does not read the filesystem, so an undefined result *could* simply
+     * mean that we haven't called lstat on it.
+     */
+    lstatCached() {
+        return this.#type & LSTAT_CALLED ? this : undefined;
+    }
+    /**
+     * Return the cached link target if the entry has been the subject of a
+     * successful readlink, or undefined otherwise.
+     *
+     * Does not read the filesystem, so an undefined result *could* just mean we
+     * don't have any cached data. Only use it if you are very sure that a
+     * readlink() has been called at some point.
+     */
+    readlinkCached() {
+        return this.#linkTarget;
+    }
+    /**
+     * Returns the cached realpath target if the entry has been the subject
+     * of a successful realpath, or undefined otherwise.
+     *
+     * Does not read the filesystem, so an undefined result *could* just mean we
+     * don't have any cached data. Only use it if you are very sure that a
+     * realpath() has been called at some point.
+     */
+    realpathCached() {
+        return this.#realpath;
+    }
+    /**
+     * Returns the cached child Path entries array if the entry has been the
+     * subject of a successful readdir(), or [] otherwise.
+     *
+     * Does not read the filesystem, so an empty array *could* just mean we
+     * don't have any cached data. Only use it if you are very sure that a
+     * readdir() has been called recently enough to still be valid.
+     */
+    readdirCached() {
+        const children = this.children();
+        return children.slice(0, children.provisional);
+    }
+    /**
+     * Return true if it's worth trying to readlink.  Ie, we don't (yet) have
+     * any indication that readlink will definitely fail.
+     *
+     * Returns false if the path is known to not be a symlink, if a previous
+     * readlink failed, or if the entry does not exist.
+     */
+    canReadlink() {
+        if (this.#linkTarget)
+            return true;
+        if (!this.parent)
+            return false;
+        // cases where it cannot possibly succeed
+        const ifmt = this.#type & IFMT;
+        return !((ifmt !== UNKNOWN && ifmt !== IFLNK) ||
+            this.#type & ENOREADLINK ||
+            this.#type & ENOENT);
+    }
+    /**
+     * Return true if readdir has previously been successfully called on this
+     * path, indicating that cachedReaddir() is likely valid.
+     */
+    calledReaddir() {
+        return !!(this.#type & READDIR_CALLED);
+    }
+    /**
+     * Returns true if the path is known to not exist. That is, a previous lstat
+     * or readdir failed to verify its existence when that would have been
+     * expected, or a parent entry was marked either enoent or enotdir.
+     */
+    isENOENT() {
+        return !!(this.#type & ENOENT);
+    }
+    /**
+     * Return true if the path is a match for the given path name.  This handles
+     * case sensitivity and unicode normalization.
+     *
+     * Note: even on case-sensitive systems, it is **not** safe to test the
+     * equality of the `.name` property to determine whether a given pathname
+     * matches, due to unicode normalization mismatches.
+     *
+     * Always use this method instead of testing the `path.name` property
+     * directly.
+     */
+    isNamed(n) {
+        return !this.nocase ?
+            this.#matchName === normalize(n)
+            : this.#matchName === normalizeNocase(n);
+    }
+    /**
+     * Return the Path object corresponding to the target of a symbolic link.
+     *
+     * If the Path is not a symbolic link, or if the readlink call fails for any
+     * reason, `undefined` is returned.
+     *
+     * Result is cached, and thus may be outdated if the filesystem is mutated.
+     */
+    async readlink() {
+        const target = this.#linkTarget;
+        if (target) {
+            return target;
+        }
+        if (!this.canReadlink()) {
+            return undefined;
+        }
+        /* c8 ignore start */
+        // already covered by the canReadlink test, here for ts grumples
+        if (!this.parent) {
+            return undefined;
+        }
+        /* c8 ignore stop */
+        try {
+            const read = await this.#fs.promises.readlink(this.fullpath());
+            const linkTarget = (await this.parent.realpath())?.resolve(read);
+            if (linkTarget) {
+                return (this.#linkTarget = linkTarget);
+            }
+        }
+        catch (er) {
+            this.#readlinkFail(er.code);
+            return undefined;
+        }
+    }
+    /**
+     * Synchronous {@link PathBase.readlink}
+     */
+    readlinkSync() {
+        const target = this.#linkTarget;
+        if (target) {
+            return target;
+        }
+        if (!this.canReadlink()) {
+            return undefined;
+        }
+        /* c8 ignore start */
+        // already covered by the canReadlink test, here for ts grumples
+        if (!this.parent) {
+            return undefined;
+        }
+        /* c8 ignore stop */
+        try {
+            const read = this.#fs.readlinkSync(this.fullpath());
+            const linkTarget = this.parent.realpathSync()?.resolve(read);
+            if (linkTarget) {
+                return (this.#linkTarget = linkTarget);
+            }
+        }
+        catch (er) {
+            this.#readlinkFail(er.code);
+            return undefined;
+        }
+    }
+    #readdirSuccess(children) {
+        // succeeded, mark readdir called bit
+        this.#type |= READDIR_CALLED;
+        // mark all remaining provisional children as ENOENT
+        for (let p = children.provisional; p < children.length; p++) {
+            const c = children[p];
+            if (c)
+                c.#markENOENT();
+        }
+    }
+    #markENOENT() {
+        // mark as UNKNOWN and ENOENT
+        if (this.#type & ENOENT)
+            return;
+        this.#type = (this.#type | ENOENT) & IFMT_UNKNOWN;
+        this.#markChildrenENOENT();
+    }
+    #markChildrenENOENT() {
+        // all children are provisional and do not exist
+        const children = this.children();
+        children.provisional = 0;
+        for (const p of children) {
+            p.#markENOENT();
+        }
+    }
+    #markENOREALPATH() {
+        this.#type |= ENOREALPATH;
+        this.#markENOTDIR();
+    }
+    // save the information when we know the entry is not a dir
+    #markENOTDIR() {
+        // entry is not a directory, so any children can't exist.
+        // this *should* be impossible, since any children created
+        // after it's been marked ENOTDIR should be marked ENOENT,
+        // so it won't even get to this point.
+        /* c8 ignore start */
+        if (this.#type & ENOTDIR)
+            return;
+        /* c8 ignore stop */
+        let t = this.#type;
+        // this could happen if we stat a dir, then delete it,
+        // then try to read it or one of its children.
+        if ((t & IFMT) === IFDIR)
+            t &= IFMT_UNKNOWN;
+        this.#type = t | ENOTDIR;
+        this.#markChildrenENOENT();
+    }
+    #readdirFail(code = '') {
+        // markENOTDIR and markENOENT also set provisional=0
+        if (code === 'ENOTDIR' || code === 'EPERM') {
+            this.#markENOTDIR();
+        }
+        else if (code === 'ENOENT') {
+            this.#markENOENT();
+        }
+        else {
+            this.children().provisional = 0;
+        }
+    }
+    #lstatFail(code = '') {
+        // Windows just raises ENOENT in this case, disable for win CI
+        /* c8 ignore start */
+        if (code === 'ENOTDIR') {
+            // already know it has a parent by this point
+            const p = this.parent;
+            p.#markENOTDIR();
+        }
+        else if (code === 'ENOENT') {
+            /* c8 ignore stop */
+            this.#markENOENT();
+        }
+    }
+    #readlinkFail(code = '') {
+        let ter = this.#type;
+        ter |= ENOREADLINK;
+        if (code === 'ENOENT')
+            ter |= ENOENT;
+        // windows gets a weird error when you try to readlink a file
+        if (code === 'EINVAL' || code === 'UNKNOWN') {
+            // exists, but not a symlink, we don't know WHAT it is, so remove
+            // all IFMT bits.
+            ter &= IFMT_UNKNOWN;
+        }
+        this.#type = ter;
+        // windows just gets ENOENT in this case.  We do cover the case,
+        // just disabled because it's impossible on Windows CI
+        /* c8 ignore start */
+        if (code === 'ENOTDIR' && this.parent) {
+            this.parent.#markENOTDIR();
+        }
+        /* c8 ignore stop */
+    }
+    #readdirAddChild(e, c) {
+        return (this.#readdirMaybePromoteChild(e, c) ||
+            this.#readdirAddNewChild(e, c));
+    }
+    #readdirAddNewChild(e, c) {
+        // alloc new entry at head, so it's never provisional
+        const type = entToType(e);
+        const child = this.newChild(e.name, type, { parent: this });
+        const ifmt = child.#type & IFMT;
+        if (ifmt !== IFDIR && ifmt !== IFLNK && ifmt !== UNKNOWN) {
+            child.#type |= ENOTDIR;
+        }
+        c.unshift(child);
+        c.provisional++;
+        return child;
+    }
+    #readdirMaybePromoteChild(e, c) {
+        for (let p = c.provisional; p < c.length; p++) {
+            const pchild = c[p];
+            const name = this.nocase ? normalizeNocase(e.name) : normalize(e.name);
+            if (name !== pchild.#matchName) {
+                continue;
+            }
+            return this.#readdirPromoteChild(e, pchild, p, c);
+        }
+    }
+    #readdirPromoteChild(e, p, index, c) {
+        const v = p.name;
+        // retain any other flags, but set ifmt from dirent
+        p.#type = (p.#type & IFMT_UNKNOWN) | entToType(e);
+        // case sensitivity fixing when we learn the true name.
+        if (v !== e.name)
+            p.name = e.name;
+        // just advance provisional index (potentially off the list),
+        // otherwise we have to splice/pop it out and re-insert at head
+        if (index !== c.provisional) {
+            if (index === c.length - 1)
+                c.pop();
+            else
+                c.splice(index, 1);
+            c.unshift(p);
+        }
+        c.provisional++;
+        return p;
+    }
+    /**
+     * Call lstat() on this Path, and update all known information that can be
+     * determined.
+     *
+     * Note that unlike `fs.lstat()`, the returned value does not contain some
+     * information, such as `mode`, `dev`, `nlink`, and `ino`.  If that
+     * information is required, you will need to call `fs.lstat` yourself.
+     *
+     * If the Path refers to a nonexistent file, or if the lstat call fails for
+     * any reason, `undefined` is returned.  Otherwise the updated Path object is
+     * returned.
+     *
+     * Results are cached, and thus may be out of date if the filesystem is
+     * mutated.
+     */
+    async lstat() {
+        if ((this.#type & ENOENT) === 0) {
+            try {
+                this.#applyStat(await this.#fs.promises.lstat(this.fullpath()));
+                return this;
+            }
+            catch (er) {
+                this.#lstatFail(er.code);
+            }
+        }
+    }
+    /**
+     * synchronous {@link PathBase.lstat}
+     */
+    lstatSync() {
+        if ((this.#type & ENOENT) === 0) {
+            try {
+                this.#applyStat(this.#fs.lstatSync(this.fullpath()));
+                return this;
+            }
+            catch (er) {
+                this.#lstatFail(er.code);
+            }
+        }
+    }
+    #applyStat(st) {
+        const { atime, atimeMs, birthtime, birthtimeMs, blksize, blocks, ctime, ctimeMs, dev, gid, ino, mode, mtime, mtimeMs, nlink, rdev, size, uid, } = st;
+        this.#atime = atime;
+        this.#atimeMs = atimeMs;
+        this.#birthtime = birthtime;
+        this.#birthtimeMs = birthtimeMs;
+        this.#blksize = blksize;
+        this.#blocks = blocks;
+        this.#ctime = ctime;
+        this.#ctimeMs = ctimeMs;
+        this.#dev = dev;
+        this.#gid = gid;
+        this.#ino = ino;
+        this.#mode = mode;
+        this.#mtime = mtime;
+        this.#mtimeMs = mtimeMs;
+        this.#nlink = nlink;
+        this.#rdev = rdev;
+        this.#size = size;
+        this.#uid = uid;
+        const ifmt = entToType(st);
+        // retain any other flags, but set the ifmt
+        this.#type = (this.#type & IFMT_UNKNOWN) | ifmt | LSTAT_CALLED;
+        if (ifmt !== UNKNOWN && ifmt !== IFDIR && ifmt !== IFLNK) {
+            this.#type |= ENOTDIR;
+        }
+    }
+    #onReaddirCB = [];
+    #readdirCBInFlight = false;
+    #callOnReaddirCB(children) {
+        this.#readdirCBInFlight = false;
+        const cbs = this.#onReaddirCB.slice();
+        this.#onReaddirCB.length = 0;
+        cbs.forEach(cb => cb(null, children));
+    }
+    /**
+     * Standard node-style callback interface to get list of directory entries.
+     *
+     * If the Path cannot or does not contain any children, then an empty array
+     * is returned.
+     *
+     * Results are cached, and thus may be out of date if the filesystem is
+     * mutated.
+     *
+     * @param cb The callback called with (er, entries).  Note that the `er`
+     * param is somewhat extraneous, as all readdir() errors are handled and
+     * simply result in an empty set of entries being returned.
+     * @param allowZalgo Boolean indicating that immediately known results should
+     * *not* be deferred with `queueMicrotask`. Defaults to `false`. Release
+     * zalgo at your peril, the dark pony lord is devious and unforgiving.
+     */
+    readdirCB(cb, allowZalgo = false) {
+        if (!this.canReaddir()) {
+            if (allowZalgo)
+                cb(null, []);
+            else
+                queueMicrotask(() => cb(null, []));
+            return;
+        }
+        const children = this.children();
+        if (this.calledReaddir()) {
+            const c = children.slice(0, children.provisional);
+            if (allowZalgo)
+                cb(null, c);
+            else
+                queueMicrotask(() => cb(null, c));
+            return;
+        }
+        // don't have to worry about zalgo at this point.
+        this.#onReaddirCB.push(cb);
+        if (this.#readdirCBInFlight) {
+            return;
+        }
+        this.#readdirCBInFlight = true;
+        // else read the directory, fill up children
+        // de-provisionalize any provisional children.
+        const fullpath = this.fullpath();
+        this.#fs.readdir(fullpath, { withFileTypes: true }, (er, entries) => {
+            if (er) {
+                this.#readdirFail(er.code);
+                children.provisional = 0;
+            }
+            else {
+                // if we didn't get an error, we always get entries.
+                //@ts-ignore
+                for (const e of entries) {
+                    this.#readdirAddChild(e, children);
+                }
+                this.#readdirSuccess(children);
+            }
+            this.#callOnReaddirCB(children.slice(0, children.provisional));
+            return;
+        });
+    }
+    #asyncReaddirInFlight;
+    /**
+     * Return an array of known child entries.
+     *
+     * If the Path cannot or does not contain any children, then an empty array
+     * is returned.
+     *
+     * Results are cached, and thus may be out of date if the filesystem is
+     * mutated.
+     */
+    async readdir() {
+        if (!this.canReaddir()) {
+            return [];
+        }
+        const children = this.children();
+        if (this.calledReaddir()) {
+            return children.slice(0, children.provisional);
+        }
+        // else read the directory, fill up children
+        // de-provisionalize any provisional children.
+        const fullpath = this.fullpath();
+        if (this.#asyncReaddirInFlight) {
+            await this.#asyncReaddirInFlight;
+        }
+        else {
+            /* c8 ignore start */
+            let resolve = () => { };
+            /* c8 ignore stop */
+            this.#asyncReaddirInFlight = new Promise(res => (resolve = res));
+            try {
+                for (const e of await this.#fs.promises.readdir(fullpath, {
+                    withFileTypes: true,
+                })) {
+                    this.#readdirAddChild(e, children);
+                }
+                this.#readdirSuccess(children);
+            }
+            catch (er) {
+                this.#readdirFail(er.code);
+                children.provisional = 0;
+            }
+            this.#asyncReaddirInFlight = undefined;
+            resolve();
+        }
+        return children.slice(0, children.provisional);
+    }
+    /**
+     * synchronous {@link PathBase.readdir}
+     */
+    readdirSync() {
+        if (!this.canReaddir()) {
+            return [];
+        }
+        const children = this.children();
+        if (this.calledReaddir()) {
+            return children.slice(0, children.provisional);
+        }
+        // else read the directory, fill up children
+        // de-provisionalize any provisional children.
+        const fullpath = this.fullpath();
+        try {
+            for (const e of this.#fs.readdirSync(fullpath, {
+                withFileTypes: true,
+            })) {
+                this.#readdirAddChild(e, children);
+            }
+            this.#readdirSuccess(children);
+        }
+        catch (er) {
+            this.#readdirFail(er.code);
+            children.provisional = 0;
+        }
+        return children.slice(0, children.provisional);
+    }
+    canReaddir() {
+        if (this.#type & ENOCHILD)
+            return false;
+        const ifmt = IFMT & this.#type;
+        // we always set ENOTDIR when setting IFMT, so should be impossible
+        /* c8 ignore start */
+        if (!(ifmt === UNKNOWN || ifmt === IFDIR || ifmt === IFLNK)) {
+            return false;
+        }
+        /* c8 ignore stop */
+        return true;
+    }
+    shouldWalk(dirs, walkFilter) {
+        return ((this.#type & IFDIR) === IFDIR &&
+            !(this.#type & ENOCHILD) &&
+            !dirs.has(this) &&
+            (!walkFilter || walkFilter(this)));
+    }
+    /**
+     * Return the Path object corresponding to path as resolved
+     * by realpath(3).
+     *
+     * If the realpath call fails for any reason, `undefined` is returned.
+     *
+     * Result is cached, and thus may be outdated if the filesystem is mutated.
+     * On success, returns a Path object.
+     */
+    async realpath() {
+        if (this.#realpath)
+            return this.#realpath;
+        if ((ENOREALPATH | ENOREADLINK | ENOENT) & this.#type)
+            return undefined;
+        try {
+            const rp = await this.#fs.promises.realpath(this.fullpath());
+            return (this.#realpath = this.resolve(rp));
+        }
+        catch (_) {
+            this.#markENOREALPATH();
+        }
+    }
+    /**
+     * Synchronous {@link realpath}
+     */
+    realpathSync() {
+        if (this.#realpath)
+            return this.#realpath;
+        if ((ENOREALPATH | ENOREADLINK | ENOENT) & this.#type)
+            return undefined;
+        try {
+            const rp = this.#fs.realpathSync(this.fullpath());
+            return (this.#realpath = this.resolve(rp));
+        }
+        catch (_) {
+            this.#markENOREALPATH();
+        }
+    }
+    /**
+     * Internal method to mark this Path object as the scurry cwd,
+     * called by {@link PathScurry#chdir}
+     *
+     * @internal
+     */
+    [setAsCwd](oldCwd) {
+        if (oldCwd === this)
+            return;
+        oldCwd.isCWD = false;
+        this.isCWD = true;
+        const changed = new Set([]);
+        let rp = [];
+        let p = this;
+        while (p && p.parent) {
+            changed.add(p);
+            p.#relative = rp.join(this.sep);
+            p.#relativePosix = rp.join('/');
+            p = p.parent;
+            rp.push('..');
+        }
+        // now un-memoize parents of old cwd
+        p = oldCwd;
+        while (p && p.parent && !changed.has(p)) {
+            p.#relative = undefined;
+            p.#relativePosix = undefined;
+            p = p.parent;
+        }
+    }
+}
+exports.PathBase = PathBase;
+/**
+ * Path class used on win32 systems
+ *
+ * Uses `'\\'` as the path separator for returned paths, either `'\\'` or `'/'`
+ * as the path separator for parsing paths.
+ */
+class PathWin32 extends PathBase {
+    /**
+     * Separator for generating path strings.
+     */
+    sep = '\\';
+    /**
+     * Separator for parsing path strings.
+     */
+    splitSep = eitherSep;
+    /**
+     * Do not create new Path objects directly.  They should always be accessed
+     * via the PathScurry class or other methods on the Path class.
+     *
+     * @internal
+     */
+    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
+        super(name, type, root, roots, nocase, children, opts);
+    }
+    /**
+     * @internal
+     */
+    newChild(name, type = UNKNOWN, opts = {}) {
+        return new PathWin32(name, type, this.root, this.roots, this.nocase, this.childrenCache(), opts);
+    }
+    /**
+     * @internal
+     */
+    getRootString(path) {
+        return node_path_1.win32.parse(path).root;
+    }
+    /**
+     * @internal
+     */
+    getRoot(rootPath) {
+        rootPath = uncToDrive(rootPath.toUpperCase());
+        if (rootPath === this.root.name) {
+            return this.root;
+        }
+        // ok, not that one, check if it matches another we know about
+        for (const [compare, root] of Object.entries(this.roots)) {
+            if (this.sameRoot(rootPath, compare)) {
+                return (this.roots[rootPath] = root);
+            }
+        }
+        // otherwise, have to create a new one.
+        return (this.roots[rootPath] = new PathScurryWin32(rootPath, this).root);
+    }
+    /**
+     * @internal
+     */
+    sameRoot(rootPath, compare = this.root.name) {
+        // windows can (rarely) have case-sensitive filesystem, but
+        // UNC and drive letters are always case-insensitive, and canonically
+        // represented uppercase.
+        rootPath = rootPath
+            .toUpperCase()
+            .replace(/\//g, '\\')
+            .replace(uncDriveRegexp, '$1\\');
+        return rootPath === compare;
+    }
+}
+exports.PathWin32 = PathWin32;
+/**
+ * Path class used on all posix systems.
+ *
+ * Uses `'/'` as the path separator.
+ */
+class PathPosix extends PathBase {
+    /**
+     * separator for parsing path strings
+     */
+    splitSep = '/';
+    /**
+     * separator for generating path strings
+     */
+    sep = '/';
+    /**
+     * Do not create new Path objects directly.  They should always be accessed
+     * via the PathScurry class or other methods on the Path class.
+     *
+     * @internal
+     */
+    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
+        super(name, type, root, roots, nocase, children, opts);
+    }
+    /**
+     * @internal
+     */
+    getRootString(path) {
+        return path.startsWith('/') ? '/' : '';
+    }
+    /**
+     * @internal
+     */
+    getRoot(_rootPath) {
+        return this.root;
+    }
+    /**
+     * @internal
+     */
+    newChild(name, type = UNKNOWN, opts = {}) {
+        return new PathPosix(name, type, this.root, this.roots, this.nocase, this.childrenCache(), opts);
+    }
+}
+exports.PathPosix = PathPosix;
+/**
+ * The base class for all PathScurry classes, providing the interface for path
+ * resolution and filesystem operations.
+ *
+ * Typically, you should *not* instantiate this class directly, but rather one
+ * of the platform-specific classes, or the exported {@link PathScurry} which
+ * defaults to the current platform.
+ */
+class PathScurryBase {
+    /**
+     * The root Path entry for the current working directory of this Scurry
+     */
+    root;
+    /**
+     * The string path for the root of this Scurry's current working directory
+     */
+    rootPath;
+    /**
+     * A collection of all roots encountered, referenced by rootPath
+     */
+    roots;
+    /**
+     * The Path entry corresponding to this PathScurry's current working directory.
+     */
+    cwd;
+    #resolveCache;
+    #resolvePosixCache;
+    #children;
+    /**
+     * Perform path comparisons case-insensitively.
+     *
+     * Defaults true on Darwin and Windows systems, false elsewhere.
+     */
+    nocase;
+    #fs;
+    /**
+     * This class should not be instantiated directly.
+     *
+     * Use PathScurryWin32, PathScurryDarwin, PathScurryPosix, or PathScurry
+     *
+     * @internal
+     */
+    constructor(cwd = process.cwd(), pathImpl, sep, { nocase, childrenCacheSize = 16 * 1024, fs = defaultFS, } = {}) {
+        this.#fs = fsFromOption(fs);
+        if (cwd instanceof URL || cwd.startsWith('file://')) {
+            cwd = (0, node_url_1.fileURLToPath)(cwd);
+        }
+        // resolve and split root, and then add to the store.
+        // this is the only time we call path.resolve()
+        const cwdPath = pathImpl.resolve(cwd);
+        this.roots = Object.create(null);
+        this.rootPath = this.parseRootPath(cwdPath);
+        this.#resolveCache = new ResolveCache();
+        this.#resolvePosixCache = new ResolveCache();
+        this.#children = new ChildrenCache(childrenCacheSize);
+        const split = cwdPath.substring(this.rootPath.length).split(sep);
+        // resolve('/') leaves '', splits to [''], we don't want that.
+        if (split.length === 1 && !split[0]) {
+            split.pop();
+        }
+        /* c8 ignore start */
+        if (nocase === undefined) {
+            throw new TypeError('must provide nocase setting to PathScurryBase ctor');
+        }
+        /* c8 ignore stop */
+        this.nocase = nocase;
+        this.root = this.newRoot(this.#fs);
+        this.roots[this.rootPath] = this.root;
+        let prev = this.root;
+        let len = split.length - 1;
+        const joinSep = pathImpl.sep;
+        let abs = this.rootPath;
+        let sawFirst = false;
+        for (const part of split) {
+            const l = len--;
+            prev = prev.child(part, {
+                relative: new Array(l).fill('..').join(joinSep),
+                relativePosix: new Array(l).fill('..').join('/'),
+                fullpath: (abs += (sawFirst ? '' : joinSep) + part),
+            });
+            sawFirst = true;
+        }
+        this.cwd = prev;
+    }
+    /**
+     * Get the depth of a provided path, string, or the cwd
+     */
+    depth(path = this.cwd) {
+        if (typeof path === 'string') {
+            path = this.cwd.resolve(path);
+        }
+        return path.depth();
+    }
+    /**
+     * Return the cache of child entries.  Exposed so subclasses can create
+     * child Path objects in a platform-specific way.
+     *
+     * @internal
+     */
+    childrenCache() {
+        return this.#children;
+    }
+    /**
+     * Resolve one or more path strings to a resolved string
+     *
+     * Same interface as require('path').resolve.
+     *
+     * Much faster than path.resolve() when called multiple times for the same
+     * path, because the resolved Path objects are cached.  Much slower
+     * otherwise.
+     */
+    resolve(...paths) {
+        // first figure out the minimum number of paths we have to test
+        // we always start at cwd, but any absolutes will bump the start
+        let r = '';
+        for (let i = paths.length - 1; i >= 0; i--) {
+            const p = paths[i];
+            if (!p || p === '.')
+                continue;
+            r = r ? `${p}/${r}` : p;
+            if (this.isAbsolute(p)) {
+                break;
+            }
+        }
+        const cached = this.#resolveCache.get(r);
+        if (cached !== undefined) {
+            return cached;
+        }
+        const result = this.cwd.resolve(r).fullpath();
+        this.#resolveCache.set(r, result);
+        return result;
+    }
+    /**
+     * Resolve one or more path strings to a resolved string, returning
+     * the posix path.  Identical to .resolve() on posix systems, but on
+     * windows will return a forward-slash separated UNC path.
+     *
+     * Same interface as require('path').resolve.
+     *
+     * Much faster than path.resolve() when called multiple times for the same
+     * path, because the resolved Path objects are cached.  Much slower
+     * otherwise.
+     */
+    resolvePosix(...paths) {
+        // first figure out the minimum number of paths we have to test
+        // we always start at cwd, but any absolutes will bump the start
+        let r = '';
+        for (let i = paths.length - 1; i >= 0; i--) {
+            const p = paths[i];
+            if (!p || p === '.')
+                continue;
+            r = r ? `${p}/${r}` : p;
+            if (this.isAbsolute(p)) {
+                break;
+            }
+        }
+        const cached = this.#resolvePosixCache.get(r);
+        if (cached !== undefined) {
+            return cached;
+        }
+        const result = this.cwd.resolve(r).fullpathPosix();
+        this.#resolvePosixCache.set(r, result);
+        return result;
+    }
+    /**
+     * find the relative path from the cwd to the supplied path string or entry
+     */
+    relative(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.relative();
+    }
+    /**
+     * find the relative path from the cwd to the supplied path string or
+     * entry, using / as the path delimiter, even on Windows.
+     */
+    relativePosix(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.relativePosix();
+    }
+    /**
+     * Return the basename for the provided string or Path object
+     */
+    basename(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.name;
+    }
+    /**
+     * Return the dirname for the provided string or Path object
+     */
+    dirname(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return (entry.parent || entry).fullpath();
+    }
+    async readdir(entry = this.cwd, opts = {
+        withFileTypes: true,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes } = opts;
+        if (!entry.canReaddir()) {
+            return [];
+        }
+        else {
+            const p = await entry.readdir();
+            return withFileTypes ? p : p.map(e => e.name);
+        }
+    }
+    readdirSync(entry = this.cwd, opts = {
+        withFileTypes: true,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true } = opts;
+        if (!entry.canReaddir()) {
+            return [];
+        }
+        else if (withFileTypes) {
+            return entry.readdirSync();
+        }
+        else {
+            return entry.readdirSync().map(e => e.name);
+        }
+    }
+    /**
+     * Call lstat() on the string or Path object, and update all known
+     * information that can be determined.
+     *
+     * Note that unlike `fs.lstat()`, the returned value does not contain some
+     * information, such as `mode`, `dev`, `nlink`, and `ino`.  If that
+     * information is required, you will need to call `fs.lstat` yourself.
+     *
+     * If the Path refers to a nonexistent file, or if the lstat call fails for
+     * any reason, `undefined` is returned.  Otherwise the updated Path object is
+     * returned.
+     *
+     * Results are cached, and thus may be out of date if the filesystem is
+     * mutated.
+     */
+    async lstat(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.lstat();
+    }
+    /**
+     * synchronous {@link PathScurryBase.lstat}
+     */
+    lstatSync(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.lstatSync();
+    }
+    async readlink(entry = this.cwd, { withFileTypes } = {
+        withFileTypes: false,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            withFileTypes = entry.withFileTypes;
+            entry = this.cwd;
+        }
+        const e = await entry.readlink();
+        return withFileTypes ? e : e?.fullpath();
+    }
+    readlinkSync(entry = this.cwd, { withFileTypes } = {
+        withFileTypes: false,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            withFileTypes = entry.withFileTypes;
+            entry = this.cwd;
+        }
+        const e = entry.readlinkSync();
+        return withFileTypes ? e : e?.fullpath();
+    }
+    async realpath(entry = this.cwd, { withFileTypes } = {
+        withFileTypes: false,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            withFileTypes = entry.withFileTypes;
+            entry = this.cwd;
+        }
+        const e = await entry.realpath();
+        return withFileTypes ? e : e?.fullpath();
+    }
+    realpathSync(entry = this.cwd, { withFileTypes } = {
+        withFileTypes: false,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            withFileTypes = entry.withFileTypes;
+            entry = this.cwd;
+        }
+        const e = entry.realpathSync();
+        return withFileTypes ? e : e?.fullpath();
+    }
+    async walk(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        const results = [];
+        if (!filter || filter(entry)) {
+            results.push(withFileTypes ? entry : entry.fullpath());
+        }
+        const dirs = new Set();
+        const walk = (dir, cb) => {
+            dirs.add(dir);
+            dir.readdirCB((er, entries) => {
+                /* c8 ignore start */
+                if (er) {
+                    return cb(er);
+                }
+                /* c8 ignore stop */
+                let len = entries.length;
+                if (!len)
+                    return cb();
+                const next = () => {
+                    if (--len === 0) {
+                        cb();
+                    }
+                };
+                for (const e of entries) {
+                    if (!filter || filter(e)) {
+                        results.push(withFileTypes ? e : e.fullpath());
+                    }
+                    if (follow && e.isSymbolicLink()) {
+                        e.realpath()
+                            .then(r => (r?.isUnknown() ? r.lstat() : r))
+                            .then(r => r?.shouldWalk(dirs, walkFilter) ? walk(r, next) : next());
+                    }
+                    else {
+                        if (e.shouldWalk(dirs, walkFilter)) {
+                            walk(e, next);
+                        }
+                        else {
+                            next();
+                        }
+                    }
+                }
+            }, true); // zalgooooooo
+        };
+        const start = entry;
+        return new Promise((res, rej) => {
+            walk(start, er => {
+                /* c8 ignore start */
+                if (er)
+                    return rej(er);
+                /* c8 ignore stop */
+                res(results);
+            });
+        });
+    }
+    walkSync(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        const results = [];
+        if (!filter || filter(entry)) {
+            results.push(withFileTypes ? entry : entry.fullpath());
+        }
+        const dirs = new Set([entry]);
+        for (const dir of dirs) {
+            const entries = dir.readdirSync();
+            for (const e of entries) {
+                if (!filter || filter(e)) {
+                    results.push(withFileTypes ? e : e.fullpath());
+                }
+                let r = e;
+                if (e.isSymbolicLink()) {
+                    if (!(follow && (r = e.realpathSync())))
+                        continue;
+                    if (r.isUnknown())
+                        r.lstatSync();
+                }
+                if (r.shouldWalk(dirs, walkFilter)) {
+                    dirs.add(r);
+                }
+            }
+        }
+        return results;
+    }
+    /**
+     * Support for `for await`
+     *
+     * Alias for {@link PathScurryBase.iterate}
+     *
+     * Note: As of Node 19, this is very slow, compared to other methods of
+     * walking.  Consider using {@link PathScurryBase.stream} if memory overhead
+     * and backpressure are concerns, or {@link PathScurryBase.walk} if not.
+     */
+    [Symbol.asyncIterator]() {
+        return this.iterate();
+    }
+    iterate(entry = this.cwd, options = {}) {
+        // iterating async over the stream is significantly more performant,
+        // especially in the warm-cache scenario, because it buffers up directory
+        // entries in the background instead of waiting for a yield for each one.
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            options = entry;
+            entry = this.cwd;
+        }
+        return this.stream(entry, options)[Symbol.asyncIterator]();
+    }
+    /**
+     * Iterating over a PathScurry performs a synchronous walk.
+     *
+     * Alias for {@link PathScurryBase.iterateSync}
+     */
+    [Symbol.iterator]() {
+        return this.iterateSync();
+    }
+    *iterateSync(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        if (!filter || filter(entry)) {
+            yield withFileTypes ? entry : entry.fullpath();
+        }
+        const dirs = new Set([entry]);
+        for (const dir of dirs) {
+            const entries = dir.readdirSync();
+            for (const e of entries) {
+                if (!filter || filter(e)) {
+                    yield withFileTypes ? e : e.fullpath();
+                }
+                let r = e;
+                if (e.isSymbolicLink()) {
+                    if (!(follow && (r = e.realpathSync())))
+                        continue;
+                    if (r.isUnknown())
+                        r.lstatSync();
+                }
+                if (r.shouldWalk(dirs, walkFilter)) {
+                    dirs.add(r);
+                }
+            }
+        }
+    }
+    stream(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        const results = new minipass_1.Minipass({ objectMode: true });
+        if (!filter || filter(entry)) {
+            results.write(withFileTypes ? entry : entry.fullpath());
+        }
+        const dirs = new Set();
+        const queue = [entry];
+        let processing = 0;
+        const process = () => {
+            let paused = false;
+            while (!paused) {
+                const dir = queue.shift();
+                if (!dir) {
+                    if (processing === 0)
+                        results.end();
+                    return;
+                }
+                processing++;
+                dirs.add(dir);
+                const onReaddir = (er, entries, didRealpaths = false) => {
+                    /* c8 ignore start */
+                    if (er)
+                        return results.emit('error', er);
+                    /* c8 ignore stop */
+                    if (follow && !didRealpaths) {
+                        const promises = [];
+                        for (const e of entries) {
+                            if (e.isSymbolicLink()) {
+                                promises.push(e
+                                    .realpath()
+                                    .then((r) => r?.isUnknown() ? r.lstat() : r));
+                            }
+                        }
+                        if (promises.length) {
+                            Promise.all(promises).then(() => onReaddir(null, entries, true));
+                            return;
+                        }
+                    }
+                    for (const e of entries) {
+                        if (e && (!filter || filter(e))) {
+                            if (!results.write(withFileTypes ? e : e.fullpath())) {
+                                paused = true;
+                            }
+                        }
+                    }
+                    processing--;
+                    for (const e of entries) {
+                        const r = e.realpathCached() || e;
+                        if (r.shouldWalk(dirs, walkFilter)) {
+                            queue.push(r);
+                        }
+                    }
+                    if (paused && !results.flowing) {
+                        results.once('drain', process);
+                    }
+                    else if (!sync) {
+                        process();
+                    }
+                };
+                // zalgo containment
+                let sync = true;
+                dir.readdirCB(onReaddir, true);
+                sync = false;
+            }
+        };
+        process();
+        return results;
+    }
+    streamSync(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        const results = new minipass_1.Minipass({ objectMode: true });
+        const dirs = new Set();
+        if (!filter || filter(entry)) {
+            results.write(withFileTypes ? entry : entry.fullpath());
+        }
+        const queue = [entry];
+        let processing = 0;
+        const process = () => {
+            let paused = false;
+            while (!paused) {
+                const dir = queue.shift();
+                if (!dir) {
+                    if (processing === 0)
+                        results.end();
+                    return;
+                }
+                processing++;
+                dirs.add(dir);
+                const entries = dir.readdirSync();
+                for (const e of entries) {
+                    if (!filter || filter(e)) {
+                        if (!results.write(withFileTypes ? e : e.fullpath())) {
+                            paused = true;
+                        }
+                    }
+                }
+                processing--;
+                for (const e of entries) {
+                    let r = e;
+                    if (e.isSymbolicLink()) {
+                        if (!(follow && (r = e.realpathSync())))
+                            continue;
+                        if (r.isUnknown())
+                            r.lstatSync();
+                    }
+                    if (r.shouldWalk(dirs, walkFilter)) {
+                        queue.push(r);
+                    }
+                }
+            }
+            if (paused && !results.flowing)
+                results.once('drain', process);
+        };
+        process();
+        return results;
+    }
+    chdir(path = this.cwd) {
+        const oldCwd = this.cwd;
+        this.cwd = typeof path === 'string' ? this.cwd.resolve(path) : path;
+        this.cwd[setAsCwd](oldCwd);
+    }
+}
+exports.PathScurryBase = PathScurryBase;
+/**
+ * Windows implementation of {@link PathScurryBase}
+ *
+ * Defaults to case insensitve, uses `'\\'` to generate path strings.  Uses
+ * {@link PathWin32} for Path objects.
+ */
+class PathScurryWin32 extends PathScurryBase {
+    /**
+     * separator for generating path strings
+     */
+    sep = '\\';
+    constructor(cwd = process.cwd(), opts = {}) {
+        const { nocase = true } = opts;
+        super(cwd, node_path_1.win32, '\\', { ...opts, nocase });
+        this.nocase = nocase;
+        for (let p = this.cwd; p; p = p.parent) {
+            p.nocase = this.nocase;
+        }
+    }
+    /**
+     * @internal
+     */
+    parseRootPath(dir) {
+        // if the path starts with a single separator, it's not a UNC, and we'll
+        // just get separator as the root, and driveFromUNC will return \
+        // In that case, mount \ on the root from the cwd.
+        return node_path_1.win32.parse(dir).root.toUpperCase();
+    }
+    /**
+     * @internal
+     */
+    newRoot(fs) {
+        return new PathWin32(this.rootPath, IFDIR, undefined, this.roots, this.nocase, this.childrenCache(), { fs });
+    }
+    /**
+     * Return true if the provided path string is an absolute path
+     */
+    isAbsolute(p) {
+        return (p.startsWith('/') || p.startsWith('\\') || /^[a-z]:(\/|\\)/i.test(p));
+    }
+}
+exports.PathScurryWin32 = PathScurryWin32;
+/**
+ * {@link PathScurryBase} implementation for all posix systems other than Darwin.
+ *
+ * Defaults to case-sensitive matching, uses `'/'` to generate path strings.
+ *
+ * Uses {@link PathPosix} for Path objects.
+ */
+class PathScurryPosix extends PathScurryBase {
+    /**
+     * separator for generating path strings
+     */
+    sep = '/';
+    constructor(cwd = process.cwd(), opts = {}) {
+        const { nocase = false } = opts;
+        super(cwd, node_path_1.posix, '/', { ...opts, nocase });
+        this.nocase = nocase;
+    }
+    /**
+     * @internal
+     */
+    parseRootPath(_dir) {
+        return '/';
+    }
+    /**
+     * @internal
+     */
+    newRoot(fs) {
+        return new PathPosix(this.rootPath, IFDIR, undefined, this.roots, this.nocase, this.childrenCache(), { fs });
+    }
+    /**
+     * Return true if the provided path string is an absolute path
+     */
+    isAbsolute(p) {
+        return p.startsWith('/');
+    }
+}
+exports.PathScurryPosix = PathScurryPosix;
+/**
+ * {@link PathScurryBase} implementation for Darwin (macOS) systems.
+ *
+ * Defaults to case-insensitive matching, uses `'/'` for generating path
+ * strings.
+ *
+ * Uses {@link PathPosix} for Path objects.
+ */
+class PathScurryDarwin extends PathScurryPosix {
+    constructor(cwd = process.cwd(), opts = {}) {
+        const { nocase = true } = opts;
+        super(cwd, { ...opts, nocase });
+    }
+}
+exports.PathScurryDarwin = PathScurryDarwin;
+/**
+ * Default {@link PathBase} implementation for the current platform.
+ *
+ * {@link PathWin32} on Windows systems, {@link PathPosix} on all others.
+ */
+exports.Path = process.platform === 'win32' ? PathWin32 : PathPosix;
+/**
+ * Default {@link PathScurryBase} implementation for the current platform.
+ *
+ * {@link PathScurryWin32} on Windows systems, {@link PathScurryDarwin} on
+ * Darwin (macOS) systems, {@link PathScurryPosix} on all others.
+ */
+exports.PathScurry = process.platform === 'win32' ? PathScurryWin32
+    : process.platform === 'darwin' ? PathScurryDarwin
+        : PathScurryPosix;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/path-scurry/dist/commonjs/package.json b/node_modules/pacote/node_modules/path-scurry/dist/commonjs/package.json
new file mode 100644
index 0000000000000..5bbefffbabee3
--- /dev/null
+++ b/node_modules/pacote/node_modules/path-scurry/dist/commonjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/node_modules/pacote/node_modules/path-scurry/dist/esm/index.js b/node_modules/pacote/node_modules/path-scurry/dist/esm/index.js
new file mode 100644
index 0000000000000..42be74c37ad9d
--- /dev/null
+++ b/node_modules/pacote/node_modules/path-scurry/dist/esm/index.js
@@ -0,0 +1,1981 @@
+import { LRUCache } from 'lru-cache';
+import { posix, win32 } from 'node:path';
+import { fileURLToPath } from 'node:url';
+import { lstatSync, readdir as readdirCB, readdirSync, readlinkSync, realpathSync as rps, } from 'fs';
+import * as actualFS from 'node:fs';
+const realpathSync = rps.native;
+// TODO: test perf of fs/promises realpath vs realpathCB,
+// since the promises one uses realpath.native
+import { lstat, readdir, readlink, realpath } from 'node:fs/promises';
+import { Minipass } from 'minipass';
+const defaultFS = {
+    lstatSync,
+    readdir: readdirCB,
+    readdirSync,
+    readlinkSync,
+    realpathSync,
+    promises: {
+        lstat,
+        readdir,
+        readlink,
+        realpath,
+    },
+};
+// if they just gave us require('fs') then use our default
+const fsFromOption = (fsOption) => !fsOption || fsOption === defaultFS || fsOption === actualFS ?
+    defaultFS
+    : {
+        ...defaultFS,
+        ...fsOption,
+        promises: {
+            ...defaultFS.promises,
+            ...(fsOption.promises || {}),
+        },
+    };
+// turn something like //?/c:/ into c:\
+const uncDriveRegexp = /^\\\\\?\\([a-z]:)\\?$/i;
+const uncToDrive = (rootPath) => rootPath.replace(/\//g, '\\').replace(uncDriveRegexp, '$1\\');
+// windows paths are separated by either / or \
+const eitherSep = /[\\\/]/;
+const UNKNOWN = 0; // may not even exist, for all we know
+const IFIFO = 0b0001;
+const IFCHR = 0b0010;
+const IFDIR = 0b0100;
+const IFBLK = 0b0110;
+const IFREG = 0b1000;
+const IFLNK = 0b1010;
+const IFSOCK = 0b1100;
+const IFMT = 0b1111;
+// mask to unset low 4 bits
+const IFMT_UNKNOWN = ~IFMT;
+// set after successfully calling readdir() and getting entries.
+const READDIR_CALLED = 0b0000_0001_0000;
+// set after a successful lstat()
+const LSTAT_CALLED = 0b0000_0010_0000;
+// set if an entry (or one of its parents) is definitely not a dir
+const ENOTDIR = 0b0000_0100_0000;
+// set if an entry (or one of its parents) does not exist
+// (can also be set on lstat errors like EACCES or ENAMETOOLONG)
+const ENOENT = 0b0000_1000_0000;
+// cannot have child entries -- also verify &IFMT is either IFDIR or IFLNK
+// set if we fail to readlink
+const ENOREADLINK = 0b0001_0000_0000;
+// set if we know realpath() will fail
+const ENOREALPATH = 0b0010_0000_0000;
+const ENOCHILD = ENOTDIR | ENOENT | ENOREALPATH;
+const TYPEMASK = 0b0011_1111_1111;
+const entToType = (s) => s.isFile() ? IFREG
+    : s.isDirectory() ? IFDIR
+        : s.isSymbolicLink() ? IFLNK
+            : s.isCharacterDevice() ? IFCHR
+                : s.isBlockDevice() ? IFBLK
+                    : s.isSocket() ? IFSOCK
+                        : s.isFIFO() ? IFIFO
+                            : UNKNOWN;
+// normalize unicode path names
+const normalizeCache = new Map();
+const normalize = (s) => {
+    const c = normalizeCache.get(s);
+    if (c)
+        return c;
+    const n = s.normalize('NFKD');
+    normalizeCache.set(s, n);
+    return n;
+};
+const normalizeNocaseCache = new Map();
+const normalizeNocase = (s) => {
+    const c = normalizeNocaseCache.get(s);
+    if (c)
+        return c;
+    const n = normalize(s.toLowerCase());
+    normalizeNocaseCache.set(s, n);
+    return n;
+};
+/**
+ * An LRUCache for storing resolved path strings or Path objects.
+ * @internal
+ */
+export class ResolveCache extends LRUCache {
+    constructor() {
+        super({ max: 256 });
+    }
+}
+// In order to prevent blowing out the js heap by allocating hundreds of
+// thousands of Path entries when walking extremely large trees, the "children"
+// in this tree are represented by storing an array of Path entries in an
+// LRUCache, indexed by the parent.  At any time, Path.children() may return an
+// empty array, indicating that it doesn't know about any of its children, and
+// thus has to rebuild that cache.  This is fine, it just means that we don't
+// benefit as much from having the cached entries, but huge directory walks
+// don't blow out the stack, and smaller ones are still as fast as possible.
+//
+//It does impose some complexity when building up the readdir data, because we
+//need to pass a reference to the children array that we started with.
+/**
+ * an LRUCache for storing child entries.
+ * @internal
+ */
+export class ChildrenCache extends LRUCache {
+    constructor(maxSize = 16 * 1024) {
+        super({
+            maxSize,
+            // parent + children
+            sizeCalculation: a => a.length + 1,
+        });
+    }
+}
+const setAsCwd = Symbol('PathScurry setAsCwd');
+/**
+ * Path objects are sort of like a super-powered
+ * {@link https://nodejs.org/docs/latest/api/fs.html#class-fsdirent fs.Dirent}
+ *
+ * Each one represents a single filesystem entry on disk, which may or may not
+ * exist. It includes methods for reading various types of information via
+ * lstat, readlink, and readdir, and caches all information to the greatest
+ * degree possible.
+ *
+ * Note that fs operations that would normally throw will instead return an
+ * "empty" value. This is in order to prevent excessive overhead from error
+ * stack traces.
+ */
+export class PathBase {
+    /**
+     * the basename of this path
+     *
+     * **Important**: *always* test the path name against any test string
+     * usingthe {@link isNamed} method, and not by directly comparing this
+     * string. Otherwise, unicode path strings that the system sees as identical
+     * will not be properly treated as the same path, leading to incorrect
+     * behavior and possible security issues.
+     */
+    name;
+    /**
+     * the Path entry corresponding to the path root.
+     *
+     * @internal
+     */
+    root;
+    /**
+     * All roots found within the current PathScurry family
+     *
+     * @internal
+     */
+    roots;
+    /**
+     * a reference to the parent path, or undefined in the case of root entries
+     *
+     * @internal
+     */
+    parent;
+    /**
+     * boolean indicating whether paths are compared case-insensitively
+     * @internal
+     */
+    nocase;
+    /**
+     * boolean indicating that this path is the current working directory
+     * of the PathScurry collection that contains it.
+     */
+    isCWD = false;
+    // potential default fs override
+    #fs;
+    // Stats fields
+    #dev;
+    get dev() {
+        return this.#dev;
+    }
+    #mode;
+    get mode() {
+        return this.#mode;
+    }
+    #nlink;
+    get nlink() {
+        return this.#nlink;
+    }
+    #uid;
+    get uid() {
+        return this.#uid;
+    }
+    #gid;
+    get gid() {
+        return this.#gid;
+    }
+    #rdev;
+    get rdev() {
+        return this.#rdev;
+    }
+    #blksize;
+    get blksize() {
+        return this.#blksize;
+    }
+    #ino;
+    get ino() {
+        return this.#ino;
+    }
+    #size;
+    get size() {
+        return this.#size;
+    }
+    #blocks;
+    get blocks() {
+        return this.#blocks;
+    }
+    #atimeMs;
+    get atimeMs() {
+        return this.#atimeMs;
+    }
+    #mtimeMs;
+    get mtimeMs() {
+        return this.#mtimeMs;
+    }
+    #ctimeMs;
+    get ctimeMs() {
+        return this.#ctimeMs;
+    }
+    #birthtimeMs;
+    get birthtimeMs() {
+        return this.#birthtimeMs;
+    }
+    #atime;
+    get atime() {
+        return this.#atime;
+    }
+    #mtime;
+    get mtime() {
+        return this.#mtime;
+    }
+    #ctime;
+    get ctime() {
+        return this.#ctime;
+    }
+    #birthtime;
+    get birthtime() {
+        return this.#birthtime;
+    }
+    #matchName;
+    #depth;
+    #fullpath;
+    #fullpathPosix;
+    #relative;
+    #relativePosix;
+    #type;
+    #children;
+    #linkTarget;
+    #realpath;
+    /**
+     * This property is for compatibility with the Dirent class as of
+     * Node v20, where Dirent['parentPath'] refers to the path of the
+     * directory that was passed to readdir. For root entries, it's the path
+     * to the entry itself.
+     */
+    get parentPath() {
+        return (this.parent || this).fullpath();
+    }
+    /**
+     * Deprecated alias for Dirent['parentPath'] Somewhat counterintuitively,
+     * this property refers to the *parent* path, not the path object itself.
+     *
+     * @deprecated
+     */
+    get path() {
+        return this.parentPath;
+    }
+    /**
+     * Do not create new Path objects directly.  They should always be accessed
+     * via the PathScurry class or other methods on the Path class.
+     *
+     * @internal
+     */
+    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
+        this.name = name;
+        this.#matchName = nocase ? normalizeNocase(name) : normalize(name);
+        this.#type = type & TYPEMASK;
+        this.nocase = nocase;
+        this.roots = roots;
+        this.root = root || this;
+        this.#children = children;
+        this.#fullpath = opts.fullpath;
+        this.#relative = opts.relative;
+        this.#relativePosix = opts.relativePosix;
+        this.parent = opts.parent;
+        if (this.parent) {
+            this.#fs = this.parent.#fs;
+        }
+        else {
+            this.#fs = fsFromOption(opts.fs);
+        }
+    }
+    /**
+     * Returns the depth of the Path object from its root.
+     *
+     * For example, a path at `/foo/bar` would have a depth of 2.
+     */
+    depth() {
+        if (this.#depth !== undefined)
+            return this.#depth;
+        if (!this.parent)
+            return (this.#depth = 0);
+        return (this.#depth = this.parent.depth() + 1);
+    }
+    /**
+     * @internal
+     */
+    childrenCache() {
+        return this.#children;
+    }
+    /**
+     * Get the Path object referenced by the string path, resolved from this Path
+     */
+    resolve(path) {
+        if (!path) {
+            return this;
+        }
+        const rootPath = this.getRootString(path);
+        const dir = path.substring(rootPath.length);
+        const dirParts = dir.split(this.splitSep);
+        const result = rootPath ?
+            this.getRoot(rootPath).#resolveParts(dirParts)
+            : this.#resolveParts(dirParts);
+        return result;
+    }
+    #resolveParts(dirParts) {
+        let p = this;
+        for (const part of dirParts) {
+            p = p.child(part);
+        }
+        return p;
+    }
+    /**
+     * Returns the cached children Path objects, if still available.  If they
+     * have fallen out of the cache, then returns an empty array, and resets the
+     * READDIR_CALLED bit, so that future calls to readdir() will require an fs
+     * lookup.
+     *
+     * @internal
+     */
+    children() {
+        const cached = this.#children.get(this);
+        if (cached) {
+            return cached;
+        }
+        const children = Object.assign([], { provisional: 0 });
+        this.#children.set(this, children);
+        this.#type &= ~READDIR_CALLED;
+        return children;
+    }
+    /**
+     * Resolves a path portion and returns or creates the child Path.
+     *
+     * Returns `this` if pathPart is `''` or `'.'`, or `parent` if pathPart is
+     * `'..'`.
+     *
+     * This should not be called directly.  If `pathPart` contains any path
+     * separators, it will lead to unsafe undefined behavior.
+     *
+     * Use `Path.resolve()` instead.
+     *
+     * @internal
+     */
+    child(pathPart, opts) {
+        if (pathPart === '' || pathPart === '.') {
+            return this;
+        }
+        if (pathPart === '..') {
+            return this.parent || this;
+        }
+        // find the child
+        const children = this.children();
+        const name = this.nocase ? normalizeNocase(pathPart) : normalize(pathPart);
+        for (const p of children) {
+            if (p.#matchName === name) {
+                return p;
+            }
+        }
+        // didn't find it, create provisional child, since it might not
+        // actually exist.  If we know the parent isn't a dir, then
+        // in fact it CAN'T exist.
+        const s = this.parent ? this.sep : '';
+        const fullpath = this.#fullpath ? this.#fullpath + s + pathPart : undefined;
+        const pchild = this.newChild(pathPart, UNKNOWN, {
+            ...opts,
+            parent: this,
+            fullpath,
+        });
+        if (!this.canReaddir()) {
+            pchild.#type |= ENOENT;
+        }
+        // don't have to update provisional, because if we have real children,
+        // then provisional is set to children.length, otherwise a lower number
+        children.push(pchild);
+        return pchild;
+    }
+    /**
+     * The relative path from the cwd. If it does not share an ancestor with
+     * the cwd, then this ends up being equivalent to the fullpath()
+     */
+    relative() {
+        if (this.isCWD)
+            return '';
+        if (this.#relative !== undefined) {
+            return this.#relative;
+        }
+        const name = this.name;
+        const p = this.parent;
+        if (!p) {
+            return (this.#relative = this.name);
+        }
+        const pv = p.relative();
+        return pv + (!pv || !p.parent ? '' : this.sep) + name;
+    }
+    /**
+     * The relative path from the cwd, using / as the path separator.
+     * If it does not share an ancestor with
+     * the cwd, then this ends up being equivalent to the fullpathPosix()
+     * On posix systems, this is identical to relative().
+     */
+    relativePosix() {
+        if (this.sep === '/')
+            return this.relative();
+        if (this.isCWD)
+            return '';
+        if (this.#relativePosix !== undefined)
+            return this.#relativePosix;
+        const name = this.name;
+        const p = this.parent;
+        if (!p) {
+            return (this.#relativePosix = this.fullpathPosix());
+        }
+        const pv = p.relativePosix();
+        return pv + (!pv || !p.parent ? '' : '/') + name;
+    }
+    /**
+     * The fully resolved path string for this Path entry
+     */
+    fullpath() {
+        if (this.#fullpath !== undefined) {
+            return this.#fullpath;
+        }
+        const name = this.name;
+        const p = this.parent;
+        if (!p) {
+            return (this.#fullpath = this.name);
+        }
+        const pv = p.fullpath();
+        const fp = pv + (!p.parent ? '' : this.sep) + name;
+        return (this.#fullpath = fp);
+    }
+    /**
+     * On platforms other than windows, this is identical to fullpath.
+     *
+     * On windows, this is overridden to return the forward-slash form of the
+     * full UNC path.
+     */
+    fullpathPosix() {
+        if (this.#fullpathPosix !== undefined)
+            return this.#fullpathPosix;
+        if (this.sep === '/')
+            return (this.#fullpathPosix = this.fullpath());
+        if (!this.parent) {
+            const p = this.fullpath().replace(/\\/g, '/');
+            if (/^[a-z]:\//i.test(p)) {
+                return (this.#fullpathPosix = `//?/${p}`);
+            }
+            else {
+                return (this.#fullpathPosix = p);
+            }
+        }
+        const p = this.parent;
+        const pfpp = p.fullpathPosix();
+        const fpp = pfpp + (!pfpp || !p.parent ? '' : '/') + this.name;
+        return (this.#fullpathPosix = fpp);
+    }
+    /**
+     * Is the Path of an unknown type?
+     *
+     * Note that we might know *something* about it if there has been a previous
+     * filesystem operation, for example that it does not exist, or is not a
+     * link, or whether it has child entries.
+     */
+    isUnknown() {
+        return (this.#type & IFMT) === UNKNOWN;
+    }
+    isType(type) {
+        return this[`is${type}`]();
+    }
+    getType() {
+        return (this.isUnknown() ? 'Unknown'
+            : this.isDirectory() ? 'Directory'
+                : this.isFile() ? 'File'
+                    : this.isSymbolicLink() ? 'SymbolicLink'
+                        : this.isFIFO() ? 'FIFO'
+                            : this.isCharacterDevice() ? 'CharacterDevice'
+                                : this.isBlockDevice() ? 'BlockDevice'
+                                    : /* c8 ignore start */ this.isSocket() ? 'Socket'
+                                        : 'Unknown');
+        /* c8 ignore stop */
+    }
+    /**
+     * Is the Path a regular file?
+     */
+    isFile() {
+        return (this.#type & IFMT) === IFREG;
+    }
+    /**
+     * Is the Path a directory?
+     */
+    isDirectory() {
+        return (this.#type & IFMT) === IFDIR;
+    }
+    /**
+     * Is the path a character device?
+     */
+    isCharacterDevice() {
+        return (this.#type & IFMT) === IFCHR;
+    }
+    /**
+     * Is the path a block device?
+     */
+    isBlockDevice() {
+        return (this.#type & IFMT) === IFBLK;
+    }
+    /**
+     * Is the path a FIFO pipe?
+     */
+    isFIFO() {
+        return (this.#type & IFMT) === IFIFO;
+    }
+    /**
+     * Is the path a socket?
+     */
+    isSocket() {
+        return (this.#type & IFMT) === IFSOCK;
+    }
+    /**
+     * Is the path a symbolic link?
+     */
+    isSymbolicLink() {
+        return (this.#type & IFLNK) === IFLNK;
+    }
+    /**
+     * Return the entry if it has been subject of a successful lstat, or
+     * undefined otherwise.
+     *
+     * Does not read the filesystem, so an undefined result *could* simply
+     * mean that we haven't called lstat on it.
+     */
+    lstatCached() {
+        return this.#type & LSTAT_CALLED ? this : undefined;
+    }
+    /**
+     * Return the cached link target if the entry has been the subject of a
+     * successful readlink, or undefined otherwise.
+     *
+     * Does not read the filesystem, so an undefined result *could* just mean we
+     * don't have any cached data. Only use it if you are very sure that a
+     * readlink() has been called at some point.
+     */
+    readlinkCached() {
+        return this.#linkTarget;
+    }
+    /**
+     * Returns the cached realpath target if the entry has been the subject
+     * of a successful realpath, or undefined otherwise.
+     *
+     * Does not read the filesystem, so an undefined result *could* just mean we
+     * don't have any cached data. Only use it if you are very sure that a
+     * realpath() has been called at some point.
+     */
+    realpathCached() {
+        return this.#realpath;
+    }
+    /**
+     * Returns the cached child Path entries array if the entry has been the
+     * subject of a successful readdir(), or [] otherwise.
+     *
+     * Does not read the filesystem, so an empty array *could* just mean we
+     * don't have any cached data. Only use it if you are very sure that a
+     * readdir() has been called recently enough to still be valid.
+     */
+    readdirCached() {
+        const children = this.children();
+        return children.slice(0, children.provisional);
+    }
+    /**
+     * Return true if it's worth trying to readlink.  Ie, we don't (yet) have
+     * any indication that readlink will definitely fail.
+     *
+     * Returns false if the path is known to not be a symlink, if a previous
+     * readlink failed, or if the entry does not exist.
+     */
+    canReadlink() {
+        if (this.#linkTarget)
+            return true;
+        if (!this.parent)
+            return false;
+        // cases where it cannot possibly succeed
+        const ifmt = this.#type & IFMT;
+        return !((ifmt !== UNKNOWN && ifmt !== IFLNK) ||
+            this.#type & ENOREADLINK ||
+            this.#type & ENOENT);
+    }
+    /**
+     * Return true if readdir has previously been successfully called on this
+     * path, indicating that cachedReaddir() is likely valid.
+     */
+    calledReaddir() {
+        return !!(this.#type & READDIR_CALLED);
+    }
+    /**
+     * Returns true if the path is known to not exist. That is, a previous lstat
+     * or readdir failed to verify its existence when that would have been
+     * expected, or a parent entry was marked either enoent or enotdir.
+     */
+    isENOENT() {
+        return !!(this.#type & ENOENT);
+    }
+    /**
+     * Return true if the path is a match for the given path name.  This handles
+     * case sensitivity and unicode normalization.
+     *
+     * Note: even on case-sensitive systems, it is **not** safe to test the
+     * equality of the `.name` property to determine whether a given pathname
+     * matches, due to unicode normalization mismatches.
+     *
+     * Always use this method instead of testing the `path.name` property
+     * directly.
+     */
+    isNamed(n) {
+        return !this.nocase ?
+            this.#matchName === normalize(n)
+            : this.#matchName === normalizeNocase(n);
+    }
+    /**
+     * Return the Path object corresponding to the target of a symbolic link.
+     *
+     * If the Path is not a symbolic link, or if the readlink call fails for any
+     * reason, `undefined` is returned.
+     *
+     * Result is cached, and thus may be outdated if the filesystem is mutated.
+     */
+    async readlink() {
+        const target = this.#linkTarget;
+        if (target) {
+            return target;
+        }
+        if (!this.canReadlink()) {
+            return undefined;
+        }
+        /* c8 ignore start */
+        // already covered by the canReadlink test, here for ts grumples
+        if (!this.parent) {
+            return undefined;
+        }
+        /* c8 ignore stop */
+        try {
+            const read = await this.#fs.promises.readlink(this.fullpath());
+            const linkTarget = (await this.parent.realpath())?.resolve(read);
+            if (linkTarget) {
+                return (this.#linkTarget = linkTarget);
+            }
+        }
+        catch (er) {
+            this.#readlinkFail(er.code);
+            return undefined;
+        }
+    }
+    /**
+     * Synchronous {@link PathBase.readlink}
+     */
+    readlinkSync() {
+        const target = this.#linkTarget;
+        if (target) {
+            return target;
+        }
+        if (!this.canReadlink()) {
+            return undefined;
+        }
+        /* c8 ignore start */
+        // already covered by the canReadlink test, here for ts grumples
+        if (!this.parent) {
+            return undefined;
+        }
+        /* c8 ignore stop */
+        try {
+            const read = this.#fs.readlinkSync(this.fullpath());
+            const linkTarget = this.parent.realpathSync()?.resolve(read);
+            if (linkTarget) {
+                return (this.#linkTarget = linkTarget);
+            }
+        }
+        catch (er) {
+            this.#readlinkFail(er.code);
+            return undefined;
+        }
+    }
+    #readdirSuccess(children) {
+        // succeeded, mark readdir called bit
+        this.#type |= READDIR_CALLED;
+        // mark all remaining provisional children as ENOENT
+        for (let p = children.provisional; p < children.length; p++) {
+            const c = children[p];
+            if (c)
+                c.#markENOENT();
+        }
+    }
+    #markENOENT() {
+        // mark as UNKNOWN and ENOENT
+        if (this.#type & ENOENT)
+            return;
+        this.#type = (this.#type | ENOENT) & IFMT_UNKNOWN;
+        this.#markChildrenENOENT();
+    }
+    #markChildrenENOENT() {
+        // all children are provisional and do not exist
+        const children = this.children();
+        children.provisional = 0;
+        for (const p of children) {
+            p.#markENOENT();
+        }
+    }
+    #markENOREALPATH() {
+        this.#type |= ENOREALPATH;
+        this.#markENOTDIR();
+    }
+    // save the information when we know the entry is not a dir
+    #markENOTDIR() {
+        // entry is not a directory, so any children can't exist.
+        // this *should* be impossible, since any children created
+        // after it's been marked ENOTDIR should be marked ENOENT,
+        // so it won't even get to this point.
+        /* c8 ignore start */
+        if (this.#type & ENOTDIR)
+            return;
+        /* c8 ignore stop */
+        let t = this.#type;
+        // this could happen if we stat a dir, then delete it,
+        // then try to read it or one of its children.
+        if ((t & IFMT) === IFDIR)
+            t &= IFMT_UNKNOWN;
+        this.#type = t | ENOTDIR;
+        this.#markChildrenENOENT();
+    }
+    #readdirFail(code = '') {
+        // markENOTDIR and markENOENT also set provisional=0
+        if (code === 'ENOTDIR' || code === 'EPERM') {
+            this.#markENOTDIR();
+        }
+        else if (code === 'ENOENT') {
+            this.#markENOENT();
+        }
+        else {
+            this.children().provisional = 0;
+        }
+    }
+    #lstatFail(code = '') {
+        // Windows just raises ENOENT in this case, disable for win CI
+        /* c8 ignore start */
+        if (code === 'ENOTDIR') {
+            // already know it has a parent by this point
+            const p = this.parent;
+            p.#markENOTDIR();
+        }
+        else if (code === 'ENOENT') {
+            /* c8 ignore stop */
+            this.#markENOENT();
+        }
+    }
+    #readlinkFail(code = '') {
+        let ter = this.#type;
+        ter |= ENOREADLINK;
+        if (code === 'ENOENT')
+            ter |= ENOENT;
+        // windows gets a weird error when you try to readlink a file
+        if (code === 'EINVAL' || code === 'UNKNOWN') {
+            // exists, but not a symlink, we don't know WHAT it is, so remove
+            // all IFMT bits.
+            ter &= IFMT_UNKNOWN;
+        }
+        this.#type = ter;
+        // windows just gets ENOENT in this case.  We do cover the case,
+        // just disabled because it's impossible on Windows CI
+        /* c8 ignore start */
+        if (code === 'ENOTDIR' && this.parent) {
+            this.parent.#markENOTDIR();
+        }
+        /* c8 ignore stop */
+    }
+    #readdirAddChild(e, c) {
+        return (this.#readdirMaybePromoteChild(e, c) ||
+            this.#readdirAddNewChild(e, c));
+    }
+    #readdirAddNewChild(e, c) {
+        // alloc new entry at head, so it's never provisional
+        const type = entToType(e);
+        const child = this.newChild(e.name, type, { parent: this });
+        const ifmt = child.#type & IFMT;
+        if (ifmt !== IFDIR && ifmt !== IFLNK && ifmt !== UNKNOWN) {
+            child.#type |= ENOTDIR;
+        }
+        c.unshift(child);
+        c.provisional++;
+        return child;
+    }
+    #readdirMaybePromoteChild(e, c) {
+        for (let p = c.provisional; p < c.length; p++) {
+            const pchild = c[p];
+            const name = this.nocase ? normalizeNocase(e.name) : normalize(e.name);
+            if (name !== pchild.#matchName) {
+                continue;
+            }
+            return this.#readdirPromoteChild(e, pchild, p, c);
+        }
+    }
+    #readdirPromoteChild(e, p, index, c) {
+        const v = p.name;
+        // retain any other flags, but set ifmt from dirent
+        p.#type = (p.#type & IFMT_UNKNOWN) | entToType(e);
+        // case sensitivity fixing when we learn the true name.
+        if (v !== e.name)
+            p.name = e.name;
+        // just advance provisional index (potentially off the list),
+        // otherwise we have to splice/pop it out and re-insert at head
+        if (index !== c.provisional) {
+            if (index === c.length - 1)
+                c.pop();
+            else
+                c.splice(index, 1);
+            c.unshift(p);
+        }
+        c.provisional++;
+        return p;
+    }
+    /**
+     * Call lstat() on this Path, and update all known information that can be
+     * determined.
+     *
+     * Note that unlike `fs.lstat()`, the returned value does not contain some
+     * information, such as `mode`, `dev`, `nlink`, and `ino`.  If that
+     * information is required, you will need to call `fs.lstat` yourself.
+     *
+     * If the Path refers to a nonexistent file, or if the lstat call fails for
+     * any reason, `undefined` is returned.  Otherwise the updated Path object is
+     * returned.
+     *
+     * Results are cached, and thus may be out of date if the filesystem is
+     * mutated.
+     */
+    async lstat() {
+        if ((this.#type & ENOENT) === 0) {
+            try {
+                this.#applyStat(await this.#fs.promises.lstat(this.fullpath()));
+                return this;
+            }
+            catch (er) {
+                this.#lstatFail(er.code);
+            }
+        }
+    }
+    /**
+     * synchronous {@link PathBase.lstat}
+     */
+    lstatSync() {
+        if ((this.#type & ENOENT) === 0) {
+            try {
+                this.#applyStat(this.#fs.lstatSync(this.fullpath()));
+                return this;
+            }
+            catch (er) {
+                this.#lstatFail(er.code);
+            }
+        }
+    }
+    #applyStat(st) {
+        const { atime, atimeMs, birthtime, birthtimeMs, blksize, blocks, ctime, ctimeMs, dev, gid, ino, mode, mtime, mtimeMs, nlink, rdev, size, uid, } = st;
+        this.#atime = atime;
+        this.#atimeMs = atimeMs;
+        this.#birthtime = birthtime;
+        this.#birthtimeMs = birthtimeMs;
+        this.#blksize = blksize;
+        this.#blocks = blocks;
+        this.#ctime = ctime;
+        this.#ctimeMs = ctimeMs;
+        this.#dev = dev;
+        this.#gid = gid;
+        this.#ino = ino;
+        this.#mode = mode;
+        this.#mtime = mtime;
+        this.#mtimeMs = mtimeMs;
+        this.#nlink = nlink;
+        this.#rdev = rdev;
+        this.#size = size;
+        this.#uid = uid;
+        const ifmt = entToType(st);
+        // retain any other flags, but set the ifmt
+        this.#type = (this.#type & IFMT_UNKNOWN) | ifmt | LSTAT_CALLED;
+        if (ifmt !== UNKNOWN && ifmt !== IFDIR && ifmt !== IFLNK) {
+            this.#type |= ENOTDIR;
+        }
+    }
+    #onReaddirCB = [];
+    #readdirCBInFlight = false;
+    #callOnReaddirCB(children) {
+        this.#readdirCBInFlight = false;
+        const cbs = this.#onReaddirCB.slice();
+        this.#onReaddirCB.length = 0;
+        cbs.forEach(cb => cb(null, children));
+    }
+    /**
+     * Standard node-style callback interface to get list of directory entries.
+     *
+     * If the Path cannot or does not contain any children, then an empty array
+     * is returned.
+     *
+     * Results are cached, and thus may be out of date if the filesystem is
+     * mutated.
+     *
+     * @param cb The callback called with (er, entries).  Note that the `er`
+     * param is somewhat extraneous, as all readdir() errors are handled and
+     * simply result in an empty set of entries being returned.
+     * @param allowZalgo Boolean indicating that immediately known results should
+     * *not* be deferred with `queueMicrotask`. Defaults to `false`. Release
+     * zalgo at your peril, the dark pony lord is devious and unforgiving.
+     */
+    readdirCB(cb, allowZalgo = false) {
+        if (!this.canReaddir()) {
+            if (allowZalgo)
+                cb(null, []);
+            else
+                queueMicrotask(() => cb(null, []));
+            return;
+        }
+        const children = this.children();
+        if (this.calledReaddir()) {
+            const c = children.slice(0, children.provisional);
+            if (allowZalgo)
+                cb(null, c);
+            else
+                queueMicrotask(() => cb(null, c));
+            return;
+        }
+        // don't have to worry about zalgo at this point.
+        this.#onReaddirCB.push(cb);
+        if (this.#readdirCBInFlight) {
+            return;
+        }
+        this.#readdirCBInFlight = true;
+        // else read the directory, fill up children
+        // de-provisionalize any provisional children.
+        const fullpath = this.fullpath();
+        this.#fs.readdir(fullpath, { withFileTypes: true }, (er, entries) => {
+            if (er) {
+                this.#readdirFail(er.code);
+                children.provisional = 0;
+            }
+            else {
+                // if we didn't get an error, we always get entries.
+                //@ts-ignore
+                for (const e of entries) {
+                    this.#readdirAddChild(e, children);
+                }
+                this.#readdirSuccess(children);
+            }
+            this.#callOnReaddirCB(children.slice(0, children.provisional));
+            return;
+        });
+    }
+    #asyncReaddirInFlight;
+    /**
+     * Return an array of known child entries.
+     *
+     * If the Path cannot or does not contain any children, then an empty array
+     * is returned.
+     *
+     * Results are cached, and thus may be out of date if the filesystem is
+     * mutated.
+     */
+    async readdir() {
+        if (!this.canReaddir()) {
+            return [];
+        }
+        const children = this.children();
+        if (this.calledReaddir()) {
+            return children.slice(0, children.provisional);
+        }
+        // else read the directory, fill up children
+        // de-provisionalize any provisional children.
+        const fullpath = this.fullpath();
+        if (this.#asyncReaddirInFlight) {
+            await this.#asyncReaddirInFlight;
+        }
+        else {
+            /* c8 ignore start */
+            let resolve = () => { };
+            /* c8 ignore stop */
+            this.#asyncReaddirInFlight = new Promise(res => (resolve = res));
+            try {
+                for (const e of await this.#fs.promises.readdir(fullpath, {
+                    withFileTypes: true,
+                })) {
+                    this.#readdirAddChild(e, children);
+                }
+                this.#readdirSuccess(children);
+            }
+            catch (er) {
+                this.#readdirFail(er.code);
+                children.provisional = 0;
+            }
+            this.#asyncReaddirInFlight = undefined;
+            resolve();
+        }
+        return children.slice(0, children.provisional);
+    }
+    /**
+     * synchronous {@link PathBase.readdir}
+     */
+    readdirSync() {
+        if (!this.canReaddir()) {
+            return [];
+        }
+        const children = this.children();
+        if (this.calledReaddir()) {
+            return children.slice(0, children.provisional);
+        }
+        // else read the directory, fill up children
+        // de-provisionalize any provisional children.
+        const fullpath = this.fullpath();
+        try {
+            for (const e of this.#fs.readdirSync(fullpath, {
+                withFileTypes: true,
+            })) {
+                this.#readdirAddChild(e, children);
+            }
+            this.#readdirSuccess(children);
+        }
+        catch (er) {
+            this.#readdirFail(er.code);
+            children.provisional = 0;
+        }
+        return children.slice(0, children.provisional);
+    }
+    canReaddir() {
+        if (this.#type & ENOCHILD)
+            return false;
+        const ifmt = IFMT & this.#type;
+        // we always set ENOTDIR when setting IFMT, so should be impossible
+        /* c8 ignore start */
+        if (!(ifmt === UNKNOWN || ifmt === IFDIR || ifmt === IFLNK)) {
+            return false;
+        }
+        /* c8 ignore stop */
+        return true;
+    }
+    shouldWalk(dirs, walkFilter) {
+        return ((this.#type & IFDIR) === IFDIR &&
+            !(this.#type & ENOCHILD) &&
+            !dirs.has(this) &&
+            (!walkFilter || walkFilter(this)));
+    }
+    /**
+     * Return the Path object corresponding to path as resolved
+     * by realpath(3).
+     *
+     * If the realpath call fails for any reason, `undefined` is returned.
+     *
+     * Result is cached, and thus may be outdated if the filesystem is mutated.
+     * On success, returns a Path object.
+     */
+    async realpath() {
+        if (this.#realpath)
+            return this.#realpath;
+        if ((ENOREALPATH | ENOREADLINK | ENOENT) & this.#type)
+            return undefined;
+        try {
+            const rp = await this.#fs.promises.realpath(this.fullpath());
+            return (this.#realpath = this.resolve(rp));
+        }
+        catch (_) {
+            this.#markENOREALPATH();
+        }
+    }
+    /**
+     * Synchronous {@link realpath}
+     */
+    realpathSync() {
+        if (this.#realpath)
+            return this.#realpath;
+        if ((ENOREALPATH | ENOREADLINK | ENOENT) & this.#type)
+            return undefined;
+        try {
+            const rp = this.#fs.realpathSync(this.fullpath());
+            return (this.#realpath = this.resolve(rp));
+        }
+        catch (_) {
+            this.#markENOREALPATH();
+        }
+    }
+    /**
+     * Internal method to mark this Path object as the scurry cwd,
+     * called by {@link PathScurry#chdir}
+     *
+     * @internal
+     */
+    [setAsCwd](oldCwd) {
+        if (oldCwd === this)
+            return;
+        oldCwd.isCWD = false;
+        this.isCWD = true;
+        const changed = new Set([]);
+        let rp = [];
+        let p = this;
+        while (p && p.parent) {
+            changed.add(p);
+            p.#relative = rp.join(this.sep);
+            p.#relativePosix = rp.join('/');
+            p = p.parent;
+            rp.push('..');
+        }
+        // now un-memoize parents of old cwd
+        p = oldCwd;
+        while (p && p.parent && !changed.has(p)) {
+            p.#relative = undefined;
+            p.#relativePosix = undefined;
+            p = p.parent;
+        }
+    }
+}
+/**
+ * Path class used on win32 systems
+ *
+ * Uses `'\\'` as the path separator for returned paths, either `'\\'` or `'/'`
+ * as the path separator for parsing paths.
+ */
+export class PathWin32 extends PathBase {
+    /**
+     * Separator for generating path strings.
+     */
+    sep = '\\';
+    /**
+     * Separator for parsing path strings.
+     */
+    splitSep = eitherSep;
+    /**
+     * Do not create new Path objects directly.  They should always be accessed
+     * via the PathScurry class or other methods on the Path class.
+     *
+     * @internal
+     */
+    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
+        super(name, type, root, roots, nocase, children, opts);
+    }
+    /**
+     * @internal
+     */
+    newChild(name, type = UNKNOWN, opts = {}) {
+        return new PathWin32(name, type, this.root, this.roots, this.nocase, this.childrenCache(), opts);
+    }
+    /**
+     * @internal
+     */
+    getRootString(path) {
+        return win32.parse(path).root;
+    }
+    /**
+     * @internal
+     */
+    getRoot(rootPath) {
+        rootPath = uncToDrive(rootPath.toUpperCase());
+        if (rootPath === this.root.name) {
+            return this.root;
+        }
+        // ok, not that one, check if it matches another we know about
+        for (const [compare, root] of Object.entries(this.roots)) {
+            if (this.sameRoot(rootPath, compare)) {
+                return (this.roots[rootPath] = root);
+            }
+        }
+        // otherwise, have to create a new one.
+        return (this.roots[rootPath] = new PathScurryWin32(rootPath, this).root);
+    }
+    /**
+     * @internal
+     */
+    sameRoot(rootPath, compare = this.root.name) {
+        // windows can (rarely) have case-sensitive filesystem, but
+        // UNC and drive letters are always case-insensitive, and canonically
+        // represented uppercase.
+        rootPath = rootPath
+            .toUpperCase()
+            .replace(/\//g, '\\')
+            .replace(uncDriveRegexp, '$1\\');
+        return rootPath === compare;
+    }
+}
+/**
+ * Path class used on all posix systems.
+ *
+ * Uses `'/'` as the path separator.
+ */
+export class PathPosix extends PathBase {
+    /**
+     * separator for parsing path strings
+     */
+    splitSep = '/';
+    /**
+     * separator for generating path strings
+     */
+    sep = '/';
+    /**
+     * Do not create new Path objects directly.  They should always be accessed
+     * via the PathScurry class or other methods on the Path class.
+     *
+     * @internal
+     */
+    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
+        super(name, type, root, roots, nocase, children, opts);
+    }
+    /**
+     * @internal
+     */
+    getRootString(path) {
+        return path.startsWith('/') ? '/' : '';
+    }
+    /**
+     * @internal
+     */
+    getRoot(_rootPath) {
+        return this.root;
+    }
+    /**
+     * @internal
+     */
+    newChild(name, type = UNKNOWN, opts = {}) {
+        return new PathPosix(name, type, this.root, this.roots, this.nocase, this.childrenCache(), opts);
+    }
+}
+/**
+ * The base class for all PathScurry classes, providing the interface for path
+ * resolution and filesystem operations.
+ *
+ * Typically, you should *not* instantiate this class directly, but rather one
+ * of the platform-specific classes, or the exported {@link PathScurry} which
+ * defaults to the current platform.
+ */
+export class PathScurryBase {
+    /**
+     * The root Path entry for the current working directory of this Scurry
+     */
+    root;
+    /**
+     * The string path for the root of this Scurry's current working directory
+     */
+    rootPath;
+    /**
+     * A collection of all roots encountered, referenced by rootPath
+     */
+    roots;
+    /**
+     * The Path entry corresponding to this PathScurry's current working directory.
+     */
+    cwd;
+    #resolveCache;
+    #resolvePosixCache;
+    #children;
+    /**
+     * Perform path comparisons case-insensitively.
+     *
+     * Defaults true on Darwin and Windows systems, false elsewhere.
+     */
+    nocase;
+    #fs;
+    /**
+     * This class should not be instantiated directly.
+     *
+     * Use PathScurryWin32, PathScurryDarwin, PathScurryPosix, or PathScurry
+     *
+     * @internal
+     */
+    constructor(cwd = process.cwd(), pathImpl, sep, { nocase, childrenCacheSize = 16 * 1024, fs = defaultFS, } = {}) {
+        this.#fs = fsFromOption(fs);
+        if (cwd instanceof URL || cwd.startsWith('file://')) {
+            cwd = fileURLToPath(cwd);
+        }
+        // resolve and split root, and then add to the store.
+        // this is the only time we call path.resolve()
+        const cwdPath = pathImpl.resolve(cwd);
+        this.roots = Object.create(null);
+        this.rootPath = this.parseRootPath(cwdPath);
+        this.#resolveCache = new ResolveCache();
+        this.#resolvePosixCache = new ResolveCache();
+        this.#children = new ChildrenCache(childrenCacheSize);
+        const split = cwdPath.substring(this.rootPath.length).split(sep);
+        // resolve('/') leaves '', splits to [''], we don't want that.
+        if (split.length === 1 && !split[0]) {
+            split.pop();
+        }
+        /* c8 ignore start */
+        if (nocase === undefined) {
+            throw new TypeError('must provide nocase setting to PathScurryBase ctor');
+        }
+        /* c8 ignore stop */
+        this.nocase = nocase;
+        this.root = this.newRoot(this.#fs);
+        this.roots[this.rootPath] = this.root;
+        let prev = this.root;
+        let len = split.length - 1;
+        const joinSep = pathImpl.sep;
+        let abs = this.rootPath;
+        let sawFirst = false;
+        for (const part of split) {
+            const l = len--;
+            prev = prev.child(part, {
+                relative: new Array(l).fill('..').join(joinSep),
+                relativePosix: new Array(l).fill('..').join('/'),
+                fullpath: (abs += (sawFirst ? '' : joinSep) + part),
+            });
+            sawFirst = true;
+        }
+        this.cwd = prev;
+    }
+    /**
+     * Get the depth of a provided path, string, or the cwd
+     */
+    depth(path = this.cwd) {
+        if (typeof path === 'string') {
+            path = this.cwd.resolve(path);
+        }
+        return path.depth();
+    }
+    /**
+     * Return the cache of child entries.  Exposed so subclasses can create
+     * child Path objects in a platform-specific way.
+     *
+     * @internal
+     */
+    childrenCache() {
+        return this.#children;
+    }
+    /**
+     * Resolve one or more path strings to a resolved string
+     *
+     * Same interface as require('path').resolve.
+     *
+     * Much faster than path.resolve() when called multiple times for the same
+     * path, because the resolved Path objects are cached.  Much slower
+     * otherwise.
+     */
+    resolve(...paths) {
+        // first figure out the minimum number of paths we have to test
+        // we always start at cwd, but any absolutes will bump the start
+        let r = '';
+        for (let i = paths.length - 1; i >= 0; i--) {
+            const p = paths[i];
+            if (!p || p === '.')
+                continue;
+            r = r ? `${p}/${r}` : p;
+            if (this.isAbsolute(p)) {
+                break;
+            }
+        }
+        const cached = this.#resolveCache.get(r);
+        if (cached !== undefined) {
+            return cached;
+        }
+        const result = this.cwd.resolve(r).fullpath();
+        this.#resolveCache.set(r, result);
+        return result;
+    }
+    /**
+     * Resolve one or more path strings to a resolved string, returning
+     * the posix path.  Identical to .resolve() on posix systems, but on
+     * windows will return a forward-slash separated UNC path.
+     *
+     * Same interface as require('path').resolve.
+     *
+     * Much faster than path.resolve() when called multiple times for the same
+     * path, because the resolved Path objects are cached.  Much slower
+     * otherwise.
+     */
+    resolvePosix(...paths) {
+        // first figure out the minimum number of paths we have to test
+        // we always start at cwd, but any absolutes will bump the start
+        let r = '';
+        for (let i = paths.length - 1; i >= 0; i--) {
+            const p = paths[i];
+            if (!p || p === '.')
+                continue;
+            r = r ? `${p}/${r}` : p;
+            if (this.isAbsolute(p)) {
+                break;
+            }
+        }
+        const cached = this.#resolvePosixCache.get(r);
+        if (cached !== undefined) {
+            return cached;
+        }
+        const result = this.cwd.resolve(r).fullpathPosix();
+        this.#resolvePosixCache.set(r, result);
+        return result;
+    }
+    /**
+     * find the relative path from the cwd to the supplied path string or entry
+     */
+    relative(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.relative();
+    }
+    /**
+     * find the relative path from the cwd to the supplied path string or
+     * entry, using / as the path delimiter, even on Windows.
+     */
+    relativePosix(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.relativePosix();
+    }
+    /**
+     * Return the basename for the provided string or Path object
+     */
+    basename(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.name;
+    }
+    /**
+     * Return the dirname for the provided string or Path object
+     */
+    dirname(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return (entry.parent || entry).fullpath();
+    }
+    async readdir(entry = this.cwd, opts = {
+        withFileTypes: true,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes } = opts;
+        if (!entry.canReaddir()) {
+            return [];
+        }
+        else {
+            const p = await entry.readdir();
+            return withFileTypes ? p : p.map(e => e.name);
+        }
+    }
+    readdirSync(entry = this.cwd, opts = {
+        withFileTypes: true,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true } = opts;
+        if (!entry.canReaddir()) {
+            return [];
+        }
+        else if (withFileTypes) {
+            return entry.readdirSync();
+        }
+        else {
+            return entry.readdirSync().map(e => e.name);
+        }
+    }
+    /**
+     * Call lstat() on the string or Path object, and update all known
+     * information that can be determined.
+     *
+     * Note that unlike `fs.lstat()`, the returned value does not contain some
+     * information, such as `mode`, `dev`, `nlink`, and `ino`.  If that
+     * information is required, you will need to call `fs.lstat` yourself.
+     *
+     * If the Path refers to a nonexistent file, or if the lstat call fails for
+     * any reason, `undefined` is returned.  Otherwise the updated Path object is
+     * returned.
+     *
+     * Results are cached, and thus may be out of date if the filesystem is
+     * mutated.
+     */
+    async lstat(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.lstat();
+    }
+    /**
+     * synchronous {@link PathScurryBase.lstat}
+     */
+    lstatSync(entry = this.cwd) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        return entry.lstatSync();
+    }
+    async readlink(entry = this.cwd, { withFileTypes } = {
+        withFileTypes: false,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            withFileTypes = entry.withFileTypes;
+            entry = this.cwd;
+        }
+        const e = await entry.readlink();
+        return withFileTypes ? e : e?.fullpath();
+    }
+    readlinkSync(entry = this.cwd, { withFileTypes } = {
+        withFileTypes: false,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            withFileTypes = entry.withFileTypes;
+            entry = this.cwd;
+        }
+        const e = entry.readlinkSync();
+        return withFileTypes ? e : e?.fullpath();
+    }
+    async realpath(entry = this.cwd, { withFileTypes } = {
+        withFileTypes: false,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            withFileTypes = entry.withFileTypes;
+            entry = this.cwd;
+        }
+        const e = await entry.realpath();
+        return withFileTypes ? e : e?.fullpath();
+    }
+    realpathSync(entry = this.cwd, { withFileTypes } = {
+        withFileTypes: false,
+    }) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            withFileTypes = entry.withFileTypes;
+            entry = this.cwd;
+        }
+        const e = entry.realpathSync();
+        return withFileTypes ? e : e?.fullpath();
+    }
+    async walk(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        const results = [];
+        if (!filter || filter(entry)) {
+            results.push(withFileTypes ? entry : entry.fullpath());
+        }
+        const dirs = new Set();
+        const walk = (dir, cb) => {
+            dirs.add(dir);
+            dir.readdirCB((er, entries) => {
+                /* c8 ignore start */
+                if (er) {
+                    return cb(er);
+                }
+                /* c8 ignore stop */
+                let len = entries.length;
+                if (!len)
+                    return cb();
+                const next = () => {
+                    if (--len === 0) {
+                        cb();
+                    }
+                };
+                for (const e of entries) {
+                    if (!filter || filter(e)) {
+                        results.push(withFileTypes ? e : e.fullpath());
+                    }
+                    if (follow && e.isSymbolicLink()) {
+                        e.realpath()
+                            .then(r => (r?.isUnknown() ? r.lstat() : r))
+                            .then(r => r?.shouldWalk(dirs, walkFilter) ? walk(r, next) : next());
+                    }
+                    else {
+                        if (e.shouldWalk(dirs, walkFilter)) {
+                            walk(e, next);
+                        }
+                        else {
+                            next();
+                        }
+                    }
+                }
+            }, true); // zalgooooooo
+        };
+        const start = entry;
+        return new Promise((res, rej) => {
+            walk(start, er => {
+                /* c8 ignore start */
+                if (er)
+                    return rej(er);
+                /* c8 ignore stop */
+                res(results);
+            });
+        });
+    }
+    walkSync(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        const results = [];
+        if (!filter || filter(entry)) {
+            results.push(withFileTypes ? entry : entry.fullpath());
+        }
+        const dirs = new Set([entry]);
+        for (const dir of dirs) {
+            const entries = dir.readdirSync();
+            for (const e of entries) {
+                if (!filter || filter(e)) {
+                    results.push(withFileTypes ? e : e.fullpath());
+                }
+                let r = e;
+                if (e.isSymbolicLink()) {
+                    if (!(follow && (r = e.realpathSync())))
+                        continue;
+                    if (r.isUnknown())
+                        r.lstatSync();
+                }
+                if (r.shouldWalk(dirs, walkFilter)) {
+                    dirs.add(r);
+                }
+            }
+        }
+        return results;
+    }
+    /**
+     * Support for `for await`
+     *
+     * Alias for {@link PathScurryBase.iterate}
+     *
+     * Note: As of Node 19, this is very slow, compared to other methods of
+     * walking.  Consider using {@link PathScurryBase.stream} if memory overhead
+     * and backpressure are concerns, or {@link PathScurryBase.walk} if not.
+     */
+    [Symbol.asyncIterator]() {
+        return this.iterate();
+    }
+    iterate(entry = this.cwd, options = {}) {
+        // iterating async over the stream is significantly more performant,
+        // especially in the warm-cache scenario, because it buffers up directory
+        // entries in the background instead of waiting for a yield for each one.
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            options = entry;
+            entry = this.cwd;
+        }
+        return this.stream(entry, options)[Symbol.asyncIterator]();
+    }
+    /**
+     * Iterating over a PathScurry performs a synchronous walk.
+     *
+     * Alias for {@link PathScurryBase.iterateSync}
+     */
+    [Symbol.iterator]() {
+        return this.iterateSync();
+    }
+    *iterateSync(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        if (!filter || filter(entry)) {
+            yield withFileTypes ? entry : entry.fullpath();
+        }
+        const dirs = new Set([entry]);
+        for (const dir of dirs) {
+            const entries = dir.readdirSync();
+            for (const e of entries) {
+                if (!filter || filter(e)) {
+                    yield withFileTypes ? e : e.fullpath();
+                }
+                let r = e;
+                if (e.isSymbolicLink()) {
+                    if (!(follow && (r = e.realpathSync())))
+                        continue;
+                    if (r.isUnknown())
+                        r.lstatSync();
+                }
+                if (r.shouldWalk(dirs, walkFilter)) {
+                    dirs.add(r);
+                }
+            }
+        }
+    }
+    stream(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        const results = new Minipass({ objectMode: true });
+        if (!filter || filter(entry)) {
+            results.write(withFileTypes ? entry : entry.fullpath());
+        }
+        const dirs = new Set();
+        const queue = [entry];
+        let processing = 0;
+        const process = () => {
+            let paused = false;
+            while (!paused) {
+                const dir = queue.shift();
+                if (!dir) {
+                    if (processing === 0)
+                        results.end();
+                    return;
+                }
+                processing++;
+                dirs.add(dir);
+                const onReaddir = (er, entries, didRealpaths = false) => {
+                    /* c8 ignore start */
+                    if (er)
+                        return results.emit('error', er);
+                    /* c8 ignore stop */
+                    if (follow && !didRealpaths) {
+                        const promises = [];
+                        for (const e of entries) {
+                            if (e.isSymbolicLink()) {
+                                promises.push(e
+                                    .realpath()
+                                    .then((r) => r?.isUnknown() ? r.lstat() : r));
+                            }
+                        }
+                        if (promises.length) {
+                            Promise.all(promises).then(() => onReaddir(null, entries, true));
+                            return;
+                        }
+                    }
+                    for (const e of entries) {
+                        if (e && (!filter || filter(e))) {
+                            if (!results.write(withFileTypes ? e : e.fullpath())) {
+                                paused = true;
+                            }
+                        }
+                    }
+                    processing--;
+                    for (const e of entries) {
+                        const r = e.realpathCached() || e;
+                        if (r.shouldWalk(dirs, walkFilter)) {
+                            queue.push(r);
+                        }
+                    }
+                    if (paused && !results.flowing) {
+                        results.once('drain', process);
+                    }
+                    else if (!sync) {
+                        process();
+                    }
+                };
+                // zalgo containment
+                let sync = true;
+                dir.readdirCB(onReaddir, true);
+                sync = false;
+            }
+        };
+        process();
+        return results;
+    }
+    streamSync(entry = this.cwd, opts = {}) {
+        if (typeof entry === 'string') {
+            entry = this.cwd.resolve(entry);
+        }
+        else if (!(entry instanceof PathBase)) {
+            opts = entry;
+            entry = this.cwd;
+        }
+        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
+        const results = new Minipass({ objectMode: true });
+        const dirs = new Set();
+        if (!filter || filter(entry)) {
+            results.write(withFileTypes ? entry : entry.fullpath());
+        }
+        const queue = [entry];
+        let processing = 0;
+        const process = () => {
+            let paused = false;
+            while (!paused) {
+                const dir = queue.shift();
+                if (!dir) {
+                    if (processing === 0)
+                        results.end();
+                    return;
+                }
+                processing++;
+                dirs.add(dir);
+                const entries = dir.readdirSync();
+                for (const e of entries) {
+                    if (!filter || filter(e)) {
+                        if (!results.write(withFileTypes ? e : e.fullpath())) {
+                            paused = true;
+                        }
+                    }
+                }
+                processing--;
+                for (const e of entries) {
+                    let r = e;
+                    if (e.isSymbolicLink()) {
+                        if (!(follow && (r = e.realpathSync())))
+                            continue;
+                        if (r.isUnknown())
+                            r.lstatSync();
+                    }
+                    if (r.shouldWalk(dirs, walkFilter)) {
+                        queue.push(r);
+                    }
+                }
+            }
+            if (paused && !results.flowing)
+                results.once('drain', process);
+        };
+        process();
+        return results;
+    }
+    chdir(path = this.cwd) {
+        const oldCwd = this.cwd;
+        this.cwd = typeof path === 'string' ? this.cwd.resolve(path) : path;
+        this.cwd[setAsCwd](oldCwd);
+    }
+}
+/**
+ * Windows implementation of {@link PathScurryBase}
+ *
+ * Defaults to case insensitve, uses `'\\'` to generate path strings.  Uses
+ * {@link PathWin32} for Path objects.
+ */
+export class PathScurryWin32 extends PathScurryBase {
+    /**
+     * separator for generating path strings
+     */
+    sep = '\\';
+    constructor(cwd = process.cwd(), opts = {}) {
+        const { nocase = true } = opts;
+        super(cwd, win32, '\\', { ...opts, nocase });
+        this.nocase = nocase;
+        for (let p = this.cwd; p; p = p.parent) {
+            p.nocase = this.nocase;
+        }
+    }
+    /**
+     * @internal
+     */
+    parseRootPath(dir) {
+        // if the path starts with a single separator, it's not a UNC, and we'll
+        // just get separator as the root, and driveFromUNC will return \
+        // In that case, mount \ on the root from the cwd.
+        return win32.parse(dir).root.toUpperCase();
+    }
+    /**
+     * @internal
+     */
+    newRoot(fs) {
+        return new PathWin32(this.rootPath, IFDIR, undefined, this.roots, this.nocase, this.childrenCache(), { fs });
+    }
+    /**
+     * Return true if the provided path string is an absolute path
+     */
+    isAbsolute(p) {
+        return (p.startsWith('/') || p.startsWith('\\') || /^[a-z]:(\/|\\)/i.test(p));
+    }
+}
+/**
+ * {@link PathScurryBase} implementation for all posix systems other than Darwin.
+ *
+ * Defaults to case-sensitive matching, uses `'/'` to generate path strings.
+ *
+ * Uses {@link PathPosix} for Path objects.
+ */
+export class PathScurryPosix extends PathScurryBase {
+    /**
+     * separator for generating path strings
+     */
+    sep = '/';
+    constructor(cwd = process.cwd(), opts = {}) {
+        const { nocase = false } = opts;
+        super(cwd, posix, '/', { ...opts, nocase });
+        this.nocase = nocase;
+    }
+    /**
+     * @internal
+     */
+    parseRootPath(_dir) {
+        return '/';
+    }
+    /**
+     * @internal
+     */
+    newRoot(fs) {
+        return new PathPosix(this.rootPath, IFDIR, undefined, this.roots, this.nocase, this.childrenCache(), { fs });
+    }
+    /**
+     * Return true if the provided path string is an absolute path
+     */
+    isAbsolute(p) {
+        return p.startsWith('/');
+    }
+}
+/**
+ * {@link PathScurryBase} implementation for Darwin (macOS) systems.
+ *
+ * Defaults to case-insensitive matching, uses `'/'` for generating path
+ * strings.
+ *
+ * Uses {@link PathPosix} for Path objects.
+ */
+export class PathScurryDarwin extends PathScurryPosix {
+    constructor(cwd = process.cwd(), opts = {}) {
+        const { nocase = true } = opts;
+        super(cwd, { ...opts, nocase });
+    }
+}
+/**
+ * Default {@link PathBase} implementation for the current platform.
+ *
+ * {@link PathWin32} on Windows systems, {@link PathPosix} on all others.
+ */
+export const Path = process.platform === 'win32' ? PathWin32 : PathPosix;
+/**
+ * Default {@link PathScurryBase} implementation for the current platform.
+ *
+ * {@link PathScurryWin32} on Windows systems, {@link PathScurryDarwin} on
+ * Darwin (macOS) systems, {@link PathScurryPosix} on all others.
+ */
+export const PathScurry = process.platform === 'win32' ? PathScurryWin32
+    : process.platform === 'darwin' ? PathScurryDarwin
+        : PathScurryPosix;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/path-scurry/dist/esm/package.json b/node_modules/pacote/node_modules/path-scurry/dist/esm/package.json
new file mode 100644
index 0000000000000..3dbc1ca591c05
--- /dev/null
+++ b/node_modules/pacote/node_modules/path-scurry/dist/esm/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/node_modules/pacote/node_modules/path-scurry/package.json b/node_modules/pacote/node_modules/path-scurry/package.json
new file mode 100644
index 0000000000000..c3cb39dced545
--- /dev/null
+++ b/node_modules/pacote/node_modules/path-scurry/package.json
@@ -0,0 +1,88 @@
+{
+  "name": "path-scurry",
+  "version": "2.0.0",
+  "description": "walk paths fast and efficiently",
+  "author": "Isaac Z. Schlueter  (https://blog.izs.me)",
+  "main": "./dist/commonjs/index.js",
+  "type": "module",
+  "exports": {
+    "./package.json": "./package.json",
+    ".": {
+      "import": {
+        "types": "./dist/esm/index.d.ts",
+        "default": "./dist/esm/index.js"
+      },
+      "require": {
+        "types": "./dist/commonjs/index.d.ts",
+        "default": "./dist/commonjs/index.js"
+      }
+    }
+  },
+  "files": [
+    "dist"
+  ],
+  "license": "BlueOak-1.0.0",
+  "scripts": {
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "prepare": "tshy",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "test": "tap",
+    "snap": "tap",
+    "format": "prettier --write . --log-level warn",
+    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts",
+    "bench": "bash ./scripts/bench.sh"
+  },
+  "prettier": {
+    "experimentalTernaries": true,
+    "semi": false,
+    "printWidth": 75,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "devDependencies": {
+    "@nodelib/fs.walk": "^2.0.0",
+    "@types/node": "^20.14.10",
+    "mkdirp": "^3.0.0",
+    "prettier": "^3.3.2",
+    "rimraf": "^5.0.8",
+    "tap": "^20.0.3",
+    "ts-node": "^10.9.2",
+    "tshy": "^2.0.1",
+    "typedoc": "^0.26.3",
+    "typescript": "^5.5.3"
+  },
+  "tap": {
+    "typecheck": true
+  },
+  "engines": {
+    "node": "20 || >=22"
+  },
+  "funding": {
+    "url": "https://github.com/sponsors/isaacs"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/isaacs/path-scurry"
+  },
+  "dependencies": {
+    "lru-cache": "^11.0.0",
+    "minipass": "^7.1.2"
+  },
+  "tshy": {
+    "selfLink": false,
+    "exports": {
+      "./package.json": "./package.json",
+      ".": "./src/index.ts"
+    }
+  },
+  "types": "./dist/commonjs/index.d.ts",
+  "module": "./dist/esm/index.js"
+}
diff --git a/node_modules/pacote/node_modules/sigstore/LICENSE b/node_modules/pacote/node_modules/sigstore/LICENSE
new file mode 100644
index 0000000000000..e9e7c1679a09d
--- /dev/null
+++ b/node_modules/pacote/node_modules/sigstore/LICENSE
@@ -0,0 +1,202 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright 2023 The Sigstore Authors
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
diff --git a/node_modules/pacote/node_modules/sigstore/dist/config.js b/node_modules/pacote/node_modules/sigstore/dist/config.js
new file mode 100644
index 0000000000000..e8b2392f97f23
--- /dev/null
+++ b/node_modules/pacote/node_modules/sigstore/dist/config.js
@@ -0,0 +1,120 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.DEFAULT_TIMEOUT = exports.DEFAULT_RETRY = void 0;
+exports.createBundleBuilder = createBundleBuilder;
+exports.createKeyFinder = createKeyFinder;
+exports.createVerificationPolicy = createVerificationPolicy;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const core_1 = require("@sigstore/core");
+const sign_1 = require("@sigstore/sign");
+const verify_1 = require("@sigstore/verify");
+exports.DEFAULT_RETRY = { retries: 2 };
+exports.DEFAULT_TIMEOUT = 5000;
+function createBundleBuilder(bundleType, options) {
+    const bundlerOptions = {
+        signer: initSigner(options),
+        witnesses: initWitnesses(options),
+    };
+    switch (bundleType) {
+        case 'messageSignature':
+            return new sign_1.MessageSignatureBundleBuilder(bundlerOptions);
+        case 'dsseEnvelope':
+            return new sign_1.DSSEBundleBuilder({
+                ...bundlerOptions,
+                certificateChain: options.legacyCompatibility,
+            });
+    }
+}
+// Translates the public KeySelector type into the KeyFinderFunc type needed by
+// the verifier.
+function createKeyFinder(keySelector) {
+    return (hint) => {
+        const key = keySelector(hint);
+        if (!key) {
+            throw new verify_1.VerificationError({
+                code: 'PUBLIC_KEY_ERROR',
+                message: `key not found: ${hint}`,
+            });
+        }
+        return {
+            publicKey: core_1.crypto.createPublicKey(key),
+            validFor: () => true,
+        };
+    };
+}
+function createVerificationPolicy(options) {
+    const policy = {};
+    const san = options.certificateIdentityEmail || options.certificateIdentityURI;
+    if (san) {
+        policy.subjectAlternativeName = san;
+    }
+    if (options.certificateIssuer) {
+        policy.extensions = { issuer: options.certificateIssuer };
+    }
+    return policy;
+}
+// Instantiate the FulcioSigner based on the supplied options.
+function initSigner(options) {
+    return new sign_1.FulcioSigner({
+        fulcioBaseURL: options.fulcioURL,
+        identityProvider: options.identityProvider || initIdentityProvider(options),
+        retry: options.retry ?? exports.DEFAULT_RETRY,
+        timeout: options.timeout ?? exports.DEFAULT_TIMEOUT,
+    });
+}
+// Instantiate an identity provider based on the supplied options. If an
+// explicit identity token is provided, use that. Otherwise, use the CI
+// context provider.
+function initIdentityProvider(options) {
+    const token = options.identityToken;
+    if (token) {
+        /* istanbul ignore next */
+        return { getToken: () => Promise.resolve(token) };
+    }
+    else {
+        return new sign_1.CIContextProvider('sigstore');
+    }
+}
+// Instantiate a collection of witnesses based on the supplied options.
+function initWitnesses(options) {
+    const witnesses = [];
+    if (isRekorEnabled(options)) {
+        witnesses.push(new sign_1.RekorWitness({
+            rekorBaseURL: options.rekorURL,
+            entryType: options.legacyCompatibility ? 'intoto' : 'dsse',
+            fetchOnConflict: false,
+            retry: options.retry ?? exports.DEFAULT_RETRY,
+            timeout: options.timeout ?? exports.DEFAULT_TIMEOUT,
+        }));
+    }
+    if (isTSAEnabled(options)) {
+        witnesses.push(new sign_1.TSAWitness({
+            tsaBaseURL: options.tsaServerURL,
+            retry: options.retry ?? exports.DEFAULT_RETRY,
+            timeout: options.timeout ?? exports.DEFAULT_TIMEOUT,
+        }));
+    }
+    return witnesses;
+}
+// Type assertion to ensure that Rekor is enabled
+function isRekorEnabled(options) {
+    return options.tlogUpload !== false;
+}
+// Type assertion to ensure that TSA is enabled
+function isTSAEnabled(options) {
+    return options.tsaServerURL !== undefined;
+}
diff --git a/node_modules/pacote/node_modules/sigstore/dist/index.js b/node_modules/pacote/node_modules/sigstore/dist/index.js
new file mode 100644
index 0000000000000..7f6a5cf86bbfc
--- /dev/null
+++ b/node_modules/pacote/node_modules/sigstore/dist/index.js
@@ -0,0 +1,34 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.verify = exports.sign = exports.createVerifier = exports.attest = exports.VerificationError = exports.PolicyError = exports.TUFError = exports.InternalError = exports.DEFAULT_REKOR_URL = exports.DEFAULT_FULCIO_URL = exports.ValidationError = void 0;
+/*
+Copyright 2022 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+var bundle_1 = require("@sigstore/bundle");
+Object.defineProperty(exports, "ValidationError", { enumerable: true, get: function () { return bundle_1.ValidationError; } });
+var sign_1 = require("@sigstore/sign");
+Object.defineProperty(exports, "DEFAULT_FULCIO_URL", { enumerable: true, get: function () { return sign_1.DEFAULT_FULCIO_URL; } });
+Object.defineProperty(exports, "DEFAULT_REKOR_URL", { enumerable: true, get: function () { return sign_1.DEFAULT_REKOR_URL; } });
+Object.defineProperty(exports, "InternalError", { enumerable: true, get: function () { return sign_1.InternalError; } });
+var tuf_1 = require("@sigstore/tuf");
+Object.defineProperty(exports, "TUFError", { enumerable: true, get: function () { return tuf_1.TUFError; } });
+var verify_1 = require("@sigstore/verify");
+Object.defineProperty(exports, "PolicyError", { enumerable: true, get: function () { return verify_1.PolicyError; } });
+Object.defineProperty(exports, "VerificationError", { enumerable: true, get: function () { return verify_1.VerificationError; } });
+var sigstore_1 = require("./sigstore");
+Object.defineProperty(exports, "attest", { enumerable: true, get: function () { return sigstore_1.attest; } });
+Object.defineProperty(exports, "createVerifier", { enumerable: true, get: function () { return sigstore_1.createVerifier; } });
+Object.defineProperty(exports, "sign", { enumerable: true, get: function () { return sigstore_1.sign; } });
+Object.defineProperty(exports, "verify", { enumerable: true, get: function () { return sigstore_1.verify; } });
diff --git a/node_modules/pacote/node_modules/sigstore/dist/sigstore.js b/node_modules/pacote/node_modules/sigstore/dist/sigstore.js
new file mode 100644
index 0000000000000..cb4c66b38111b
--- /dev/null
+++ b/node_modules/pacote/node_modules/sigstore/dist/sigstore.js
@@ -0,0 +1,112 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || (function () {
+    var ownKeys = function(o) {
+        ownKeys = Object.getOwnPropertyNames || function (o) {
+            var ar = [];
+            for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
+            return ar;
+        };
+        return ownKeys(o);
+    };
+    return function (mod) {
+        if (mod && mod.__esModule) return mod;
+        var result = {};
+        if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
+        __setModuleDefault(result, mod);
+        return result;
+    };
+})();
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.sign = sign;
+exports.attest = attest;
+exports.verify = verify;
+exports.createVerifier = createVerifier;
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+const bundle_1 = require("@sigstore/bundle");
+const tuf = __importStar(require("@sigstore/tuf"));
+const verify_1 = require("@sigstore/verify");
+const config = __importStar(require("./config"));
+async function sign(payload, 
+/* istanbul ignore next */
+options = {}) {
+    const bundler = config.createBundleBuilder('messageSignature', options);
+    const bundle = await bundler.create({ data: payload });
+    return (0, bundle_1.bundleToJSON)(bundle);
+}
+async function attest(payload, payloadType, 
+/* istanbul ignore next */
+options = {}) {
+    const bundler = config.createBundleBuilder('dsseEnvelope', options);
+    const bundle = await bundler.create({ data: payload, type: payloadType });
+    return (0, bundle_1.bundleToJSON)(bundle);
+}
+async function verify(bundle, dataOrOptions, options) {
+    let data;
+    if (Buffer.isBuffer(dataOrOptions)) {
+        data = dataOrOptions;
+    }
+    else {
+        options = dataOrOptions;
+    }
+    return createVerifier(options).then((verifier) => verifier.verify(bundle, data));
+}
+async function createVerifier(
+/* istanbul ignore next */
+options = {}) {
+    const trustedRoot = await tuf.getTrustedRoot({
+        mirrorURL: options.tufMirrorURL,
+        rootPath: options.tufRootPath,
+        cachePath: options.tufCachePath,
+        forceCache: options.tufForceCache,
+        retry: options.retry ?? config.DEFAULT_RETRY,
+        timeout: options.timeout ?? config.DEFAULT_TIMEOUT,
+    });
+    const keyFinder = options.keySelector
+        ? config.createKeyFinder(options.keySelector)
+        : undefined;
+    const trustMaterial = (0, verify_1.toTrustMaterial)(trustedRoot, keyFinder);
+    const verifierOptions = {
+        ctlogThreshold: options.ctLogThreshold,
+        tlogThreshold: options.tlogThreshold,
+    };
+    const verifier = new verify_1.Verifier(trustMaterial, verifierOptions);
+    const policy = config.createVerificationPolicy(options);
+    return {
+        verify: (bundle, payload) => {
+            const deserializedBundle = (0, bundle_1.bundleFromJSON)(bundle);
+            const signedEntity = (0, verify_1.toSignedEntity)(deserializedBundle, payload);
+            verifier.verify(signedEntity, policy);
+            return;
+        },
+    };
+}
diff --git a/node_modules/pacote/node_modules/sigstore/package.json b/node_modules/pacote/node_modules/sigstore/package.json
new file mode 100644
index 0000000000000..b036dc787c75c
--- /dev/null
+++ b/node_modules/pacote/node_modules/sigstore/package.json
@@ -0,0 +1,47 @@
+{
+  "name": "sigstore",
+  "version": "4.0.0",
+  "description": "code-signing for npm packages",
+  "main": "dist/index.js",
+  "types": "dist/index.d.ts",
+  "scripts": {
+    "clean": "shx rm -rf dist *.tsbuildinfo",
+    "build": "tsc --build",
+    "test": "jest"
+  },
+  "files": [
+    "dist",
+    "store"
+  ],
+  "author": "bdehamer@github.com",
+  "license": "Apache-2.0",
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/sigstore/sigstore-js.git"
+  },
+  "bugs": {
+    "url": "https://github.com/sigstore/sigstore-js/issues"
+  },
+  "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/client#readme",
+  "publishConfig": {
+    "provenance": true
+  },
+  "devDependencies": {
+    "@sigstore/rekor-types": "^4.0.0",
+    "@sigstore/jest": "^0.0.0",
+    "@sigstore/mock": "^0.11.0",
+    "@tufjs/repo-mock": "^3.0.1",
+    "@types/make-fetch-happen": "^10.0.4"
+  },
+  "dependencies": {
+    "@sigstore/bundle": "^4.0.0",
+    "@sigstore/core": "^3.0.0",
+    "@sigstore/protobuf-specs": "^0.5.0",
+    "@sigstore/sign": "^4.0.0",
+    "@sigstore/tuf": "^4.0.0",
+    "@sigstore/verify": "^3.0.0"
+  },
+  "engines": {
+    "node": "^20.17.0 || >=22.9.0"
+  }
+}
diff --git a/node_modules/pacote/node_modules/tar/LICENSE b/node_modules/pacote/node_modules/tar/LICENSE
new file mode 100644
index 0000000000000..19129e315fe59
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/create.js b/node_modules/pacote/node_modules/tar/dist/commonjs/create.js
new file mode 100644
index 0000000000000..3190afc48318f
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/commonjs/create.js
@@ -0,0 +1,83 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.create = void 0;
+const fs_minipass_1 = require("@isaacs/fs-minipass");
+const node_path_1 = __importDefault(require("node:path"));
+const list_js_1 = require("./list.js");
+const make_command_js_1 = require("./make-command.js");
+const pack_js_1 = require("./pack.js");
+const createFileSync = (opt, files) => {
+    const p = new pack_js_1.PackSync(opt);
+    const stream = new fs_minipass_1.WriteStreamSync(opt.file, {
+        mode: opt.mode || 0o666,
+    });
+    p.pipe(stream);
+    addFilesSync(p, files);
+};
+const createFile = (opt, files) => {
+    const p = new pack_js_1.Pack(opt);
+    const stream = new fs_minipass_1.WriteStream(opt.file, {
+        mode: opt.mode || 0o666,
+    });
+    p.pipe(stream);
+    const promise = new Promise((res, rej) => {
+        stream.on('error', rej);
+        stream.on('close', res);
+        p.on('error', rej);
+    });
+    addFilesAsync(p, files);
+    return promise;
+};
+const addFilesSync = (p, files) => {
+    files.forEach(file => {
+        if (file.charAt(0) === '@') {
+            (0, list_js_1.list)({
+                file: node_path_1.default.resolve(p.cwd, file.slice(1)),
+                sync: true,
+                noResume: true,
+                onReadEntry: entry => p.add(entry),
+            });
+        }
+        else {
+            p.add(file);
+        }
+    });
+    p.end();
+};
+const addFilesAsync = async (p, files) => {
+    for (let i = 0; i < files.length; i++) {
+        const file = String(files[i]);
+        if (file.charAt(0) === '@') {
+            await (0, list_js_1.list)({
+                file: node_path_1.default.resolve(String(p.cwd), file.slice(1)),
+                noResume: true,
+                onReadEntry: entry => {
+                    p.add(entry);
+                },
+            });
+        }
+        else {
+            p.add(file);
+        }
+    }
+    p.end();
+};
+const createSync = (opt, files) => {
+    const p = new pack_js_1.PackSync(opt);
+    addFilesSync(p, files);
+    return p;
+};
+const createAsync = (opt, files) => {
+    const p = new pack_js_1.Pack(opt);
+    addFilesAsync(p, files);
+    return p;
+};
+exports.create = (0, make_command_js_1.makeCommand)(createFileSync, createFile, createSync, createAsync, (_opt, files) => {
+    if (!files?.length) {
+        throw new TypeError('no paths specified to add to archive');
+    }
+});
+//# sourceMappingURL=create.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/cwd-error.js b/node_modules/pacote/node_modules/tar/dist/commonjs/cwd-error.js
new file mode 100644
index 0000000000000..d703a7772be3a
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/commonjs/cwd-error.js
@@ -0,0 +1,18 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.CwdError = void 0;
+class CwdError extends Error {
+    path;
+    code;
+    syscall = 'chdir';
+    constructor(path, code) {
+        super(`${code}: Cannot cd into '${path}'`);
+        this.path = path;
+        this.code = code;
+    }
+    get name() {
+        return 'CwdError';
+    }
+}
+exports.CwdError = CwdError;
+//# sourceMappingURL=cwd-error.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/extract.js b/node_modules/pacote/node_modules/tar/dist/commonjs/extract.js
new file mode 100644
index 0000000000000..f848cbcbf779e
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/commonjs/extract.js
@@ -0,0 +1,78 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.extract = void 0;
+// tar -x
+const fsm = __importStar(require("@isaacs/fs-minipass"));
+const node_fs_1 = __importDefault(require("node:fs"));
+const list_js_1 = require("./list.js");
+const make_command_js_1 = require("./make-command.js");
+const unpack_js_1 = require("./unpack.js");
+const extractFileSync = (opt) => {
+    const u = new unpack_js_1.UnpackSync(opt);
+    const file = opt.file;
+    const stat = node_fs_1.default.statSync(file);
+    // This trades a zero-byte read() syscall for a stat
+    // However, it will usually result in less memory allocation
+    const readSize = opt.maxReadSize || 16 * 1024 * 1024;
+    const stream = new fsm.ReadStreamSync(file, {
+        readSize: readSize,
+        size: stat.size,
+    });
+    stream.pipe(u);
+};
+const extractFile = (opt, _) => {
+    const u = new unpack_js_1.Unpack(opt);
+    const readSize = opt.maxReadSize || 16 * 1024 * 1024;
+    const file = opt.file;
+    const p = new Promise((resolve, reject) => {
+        u.on('error', reject);
+        u.on('close', resolve);
+        // This trades a zero-byte read() syscall for a stat
+        // However, it will usually result in less memory allocation
+        node_fs_1.default.stat(file, (er, stat) => {
+            if (er) {
+                reject(er);
+            }
+            else {
+                const stream = new fsm.ReadStream(file, {
+                    readSize: readSize,
+                    size: stat.size,
+                });
+                stream.on('error', reject);
+                stream.pipe(u);
+            }
+        });
+    });
+    return p;
+};
+exports.extract = (0, make_command_js_1.makeCommand)(extractFileSync, extractFile, opt => new unpack_js_1.UnpackSync(opt), opt => new unpack_js_1.Unpack(opt), (opt, files) => {
+    if (files?.length)
+        (0, list_js_1.filesFilter)(opt, files);
+});
+//# sourceMappingURL=extract.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/get-write-flag.js b/node_modules/pacote/node_modules/tar/dist/commonjs/get-write-flag.js
new file mode 100644
index 0000000000000..94add8f6b2231
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/commonjs/get-write-flag.js
@@ -0,0 +1,29 @@
+"use strict";
+// Get the appropriate flag to use for creating files
+// We use fmap on Windows platforms for files less than
+// 512kb.  This is a fairly low limit, but avoids making
+// things slower in some cases.  Since most of what this
+// library is used for is extracting tarballs of many
+// relatively small files in npm packages and the like,
+// it can be a big boost on Windows platforms.
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.getWriteFlag = void 0;
+const fs_1 = __importDefault(require("fs"));
+const platform = process.env.__FAKE_PLATFORM__ || process.platform;
+const isWindows = platform === 'win32';
+/* c8 ignore start */
+const { O_CREAT, O_TRUNC, O_WRONLY } = fs_1.default.constants;
+const UV_FS_O_FILEMAP = Number(process.env.__FAKE_FS_O_FILENAME__) ||
+    fs_1.default.constants.UV_FS_O_FILEMAP ||
+    0;
+/* c8 ignore stop */
+const fMapEnabled = isWindows && !!UV_FS_O_FILEMAP;
+const fMapLimit = 512 * 1024;
+const fMapFlag = UV_FS_O_FILEMAP | O_TRUNC | O_CREAT | O_WRONLY;
+exports.getWriteFlag = !fMapEnabled ?
+    () => 'w'
+    : (size) => (size < fMapLimit ? fMapFlag : 'w');
+//# sourceMappingURL=get-write-flag.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/header.js b/node_modules/pacote/node_modules/tar/dist/commonjs/header.js
new file mode 100644
index 0000000000000..b3a48037b849a
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/commonjs/header.js
@@ -0,0 +1,306 @@
+"use strict";
+// parse a 512-byte header block to a data object, or vice-versa
+// encode returns `true` if a pax extended header is needed, because
+// the data could not be faithfully encoded in a simple header.
+// (Also, check header.needPax to see if it needs a pax header.)
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Header = void 0;
+const node_path_1 = require("node:path");
+const large = __importStar(require("./large-numbers.js"));
+const types = __importStar(require("./types.js"));
+class Header {
+    cksumValid = false;
+    needPax = false;
+    nullBlock = false;
+    block;
+    path;
+    mode;
+    uid;
+    gid;
+    size;
+    cksum;
+    #type = 'Unsupported';
+    linkpath;
+    uname;
+    gname;
+    devmaj = 0;
+    devmin = 0;
+    atime;
+    ctime;
+    mtime;
+    charset;
+    comment;
+    constructor(data, off = 0, ex, gex) {
+        if (Buffer.isBuffer(data)) {
+            this.decode(data, off || 0, ex, gex);
+        }
+        else if (data) {
+            this.#slurp(data);
+        }
+    }
+    decode(buf, off, ex, gex) {
+        if (!off) {
+            off = 0;
+        }
+        if (!buf || !(buf.length >= off + 512)) {
+            throw new Error('need 512 bytes for header');
+        }
+        this.path = decString(buf, off, 100);
+        this.mode = decNumber(buf, off + 100, 8);
+        this.uid = decNumber(buf, off + 108, 8);
+        this.gid = decNumber(buf, off + 116, 8);
+        this.size = decNumber(buf, off + 124, 12);
+        this.mtime = decDate(buf, off + 136, 12);
+        this.cksum = decNumber(buf, off + 148, 12);
+        // if we have extended or global extended headers, apply them now
+        // See https://github.com/npm/node-tar/pull/187
+        // Apply global before local, so it overrides
+        if (gex)
+            this.#slurp(gex, true);
+        if (ex)
+            this.#slurp(ex);
+        // old tar versions marked dirs as a file with a trailing /
+        const t = decString(buf, off + 156, 1);
+        if (types.isCode(t)) {
+            this.#type = t || '0';
+        }
+        if (this.#type === '0' && this.path.slice(-1) === '/') {
+            this.#type = '5';
+        }
+        // tar implementations sometimes incorrectly put the stat(dir).size
+        // as the size in the tarball, even though Directory entries are
+        // not able to have any body at all.  In the very rare chance that
+        // it actually DOES have a body, we weren't going to do anything with
+        // it anyway, and it'll just be a warning about an invalid header.
+        if (this.#type === '5') {
+            this.size = 0;
+        }
+        this.linkpath = decString(buf, off + 157, 100);
+        if (buf.subarray(off + 257, off + 265).toString() ===
+            'ustar\u000000') {
+            this.uname = decString(buf, off + 265, 32);
+            this.gname = decString(buf, off + 297, 32);
+            /* c8 ignore start */
+            this.devmaj = decNumber(buf, off + 329, 8) ?? 0;
+            this.devmin = decNumber(buf, off + 337, 8) ?? 0;
+            /* c8 ignore stop */
+            if (buf[off + 475] !== 0) {
+                // definitely a prefix, definitely >130 chars.
+                const prefix = decString(buf, off + 345, 155);
+                this.path = prefix + '/' + this.path;
+            }
+            else {
+                const prefix = decString(buf, off + 345, 130);
+                if (prefix) {
+                    this.path = prefix + '/' + this.path;
+                }
+                this.atime = decDate(buf, off + 476, 12);
+                this.ctime = decDate(buf, off + 488, 12);
+            }
+        }
+        let sum = 8 * 0x20;
+        for (let i = off; i < off + 148; i++) {
+            sum += buf[i];
+        }
+        for (let i = off + 156; i < off + 512; i++) {
+            sum += buf[i];
+        }
+        this.cksumValid = sum === this.cksum;
+        if (this.cksum === undefined && sum === 8 * 0x20) {
+            this.nullBlock = true;
+        }
+    }
+    #slurp(ex, gex = false) {
+        Object.assign(this, Object.fromEntries(Object.entries(ex).filter(([k, v]) => {
+            // we slurp in everything except for the path attribute in
+            // a global extended header, because that's weird. Also, any
+            // null/undefined values are ignored.
+            return !(v === null ||
+                v === undefined ||
+                (k === 'path' && gex) ||
+                (k === 'linkpath' && gex) ||
+                k === 'global');
+        })));
+    }
+    encode(buf, off = 0) {
+        if (!buf) {
+            buf = this.block = Buffer.alloc(512);
+        }
+        if (this.#type === 'Unsupported') {
+            this.#type = '0';
+        }
+        if (!(buf.length >= off + 512)) {
+            throw new Error('need 512 bytes for header');
+        }
+        const prefixSize = this.ctime || this.atime ? 130 : 155;
+        const split = splitPrefix(this.path || '', prefixSize);
+        const path = split[0];
+        const prefix = split[1];
+        this.needPax = !!split[2];
+        this.needPax = encString(buf, off, 100, path) || this.needPax;
+        this.needPax =
+            encNumber(buf, off + 100, 8, this.mode) || this.needPax;
+        this.needPax =
+            encNumber(buf, off + 108, 8, this.uid) || this.needPax;
+        this.needPax =
+            encNumber(buf, off + 116, 8, this.gid) || this.needPax;
+        this.needPax =
+            encNumber(buf, off + 124, 12, this.size) || this.needPax;
+        this.needPax =
+            encDate(buf, off + 136, 12, this.mtime) || this.needPax;
+        buf[off + 156] = this.#type.charCodeAt(0);
+        this.needPax =
+            encString(buf, off + 157, 100, this.linkpath) || this.needPax;
+        buf.write('ustar\u000000', off + 257, 8);
+        this.needPax =
+            encString(buf, off + 265, 32, this.uname) || this.needPax;
+        this.needPax =
+            encString(buf, off + 297, 32, this.gname) || this.needPax;
+        this.needPax =
+            encNumber(buf, off + 329, 8, this.devmaj) || this.needPax;
+        this.needPax =
+            encNumber(buf, off + 337, 8, this.devmin) || this.needPax;
+        this.needPax =
+            encString(buf, off + 345, prefixSize, prefix) || this.needPax;
+        if (buf[off + 475] !== 0) {
+            this.needPax =
+                encString(buf, off + 345, 155, prefix) || this.needPax;
+        }
+        else {
+            this.needPax =
+                encString(buf, off + 345, 130, prefix) || this.needPax;
+            this.needPax =
+                encDate(buf, off + 476, 12, this.atime) || this.needPax;
+            this.needPax =
+                encDate(buf, off + 488, 12, this.ctime) || this.needPax;
+        }
+        let sum = 8 * 0x20;
+        for (let i = off; i < off + 148; i++) {
+            sum += buf[i];
+        }
+        for (let i = off + 156; i < off + 512; i++) {
+            sum += buf[i];
+        }
+        this.cksum = sum;
+        encNumber(buf, off + 148, 8, this.cksum);
+        this.cksumValid = true;
+        return this.needPax;
+    }
+    get type() {
+        return (this.#type === 'Unsupported' ?
+            this.#type
+            : types.name.get(this.#type));
+    }
+    get typeKey() {
+        return this.#type;
+    }
+    set type(type) {
+        const c = String(types.code.get(type));
+        if (types.isCode(c) || c === 'Unsupported') {
+            this.#type = c;
+        }
+        else if (types.isCode(type)) {
+            this.#type = type;
+        }
+        else {
+            throw new TypeError('invalid entry type: ' + type);
+        }
+    }
+}
+exports.Header = Header;
+const splitPrefix = (p, prefixSize) => {
+    const pathSize = 100;
+    let pp = p;
+    let prefix = '';
+    let ret = undefined;
+    const root = node_path_1.posix.parse(p).root || '.';
+    if (Buffer.byteLength(pp) < pathSize) {
+        ret = [pp, prefix, false];
+    }
+    else {
+        // first set prefix to the dir, and path to the base
+        prefix = node_path_1.posix.dirname(pp);
+        pp = node_path_1.posix.basename(pp);
+        do {
+            if (Buffer.byteLength(pp) <= pathSize &&
+                Buffer.byteLength(prefix) <= prefixSize) {
+                // both fit!
+                ret = [pp, prefix, false];
+            }
+            else if (Buffer.byteLength(pp) > pathSize &&
+                Buffer.byteLength(prefix) <= prefixSize) {
+                // prefix fits in prefix, but path doesn't fit in path
+                ret = [pp.slice(0, pathSize - 1), prefix, true];
+            }
+            else {
+                // make path take a bit from prefix
+                pp = node_path_1.posix.join(node_path_1.posix.basename(prefix), pp);
+                prefix = node_path_1.posix.dirname(prefix);
+            }
+        } while (prefix !== root && ret === undefined);
+        // at this point, found no resolution, just truncate
+        if (!ret) {
+            ret = [p.slice(0, pathSize - 1), '', true];
+        }
+    }
+    return ret;
+};
+const decString = (buf, off, size) => buf
+    .subarray(off, off + size)
+    .toString('utf8')
+    .replace(/\0.*/, '');
+const decDate = (buf, off, size) => numToDate(decNumber(buf, off, size));
+const numToDate = (num) => num === undefined ? undefined : new Date(num * 1000);
+const decNumber = (buf, off, size) => Number(buf[off]) & 0x80 ?
+    large.parse(buf.subarray(off, off + size))
+    : decSmallNumber(buf, off, size);
+const nanUndef = (value) => (isNaN(value) ? undefined : value);
+const decSmallNumber = (buf, off, size) => nanUndef(parseInt(buf
+    .subarray(off, off + size)
+    .toString('utf8')
+    .replace(/\0.*$/, '')
+    .trim(), 8));
+// the maximum encodable as a null-terminated octal, by field size
+const MAXNUM = {
+    12: 0o77777777777,
+    8: 0o7777777,
+};
+const encNumber = (buf, off, size, num) => num === undefined ? false
+    : num > MAXNUM[size] || num < 0 ?
+        (large.encode(num, buf.subarray(off, off + size)), true)
+        : (encSmallNumber(buf, off, size, num), false);
+const encSmallNumber = (buf, off, size, num) => buf.write(octalString(num, size), off, size, 'ascii');
+const octalString = (num, size) => padOctal(Math.floor(num).toString(8), size);
+const padOctal = (str, size) => (str.length === size - 1 ?
+    str
+    : new Array(size - str.length - 1).join('0') + str + ' ') + '\0';
+const encDate = (buf, off, size, date) => date === undefined ? false : (encNumber(buf, off, size, date.getTime() / 1000));
+// enough to fill the longest string we've got
+const NULLS = new Array(156).join('\0');
+// pad with nulls, return true if it's longer or non-ascii
+const encString = (buf, off, size, str) => str === undefined ? false : ((buf.write(str + NULLS, off, size, 'utf8'),
+    str.length !== Buffer.byteLength(str) || str.length > size));
+//# sourceMappingURL=header.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/index.js b/node_modules/pacote/node_modules/tar/dist/commonjs/index.js
new file mode 100644
index 0000000000000..e93ed5ad54aa6
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/commonjs/index.js
@@ -0,0 +1,54 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __exportStar = (this && this.__exportStar) || function(m, exports) {
+    for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
+};
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.u = exports.types = exports.r = exports.t = exports.x = exports.c = void 0;
+__exportStar(require("./create.js"), exports);
+var create_js_1 = require("./create.js");
+Object.defineProperty(exports, "c", { enumerable: true, get: function () { return create_js_1.create; } });
+__exportStar(require("./extract.js"), exports);
+var extract_js_1 = require("./extract.js");
+Object.defineProperty(exports, "x", { enumerable: true, get: function () { return extract_js_1.extract; } });
+__exportStar(require("./header.js"), exports);
+__exportStar(require("./list.js"), exports);
+var list_js_1 = require("./list.js");
+Object.defineProperty(exports, "t", { enumerable: true, get: function () { return list_js_1.list; } });
+// classes
+__exportStar(require("./pack.js"), exports);
+__exportStar(require("./parse.js"), exports);
+__exportStar(require("./pax.js"), exports);
+__exportStar(require("./read-entry.js"), exports);
+__exportStar(require("./replace.js"), exports);
+var replace_js_1 = require("./replace.js");
+Object.defineProperty(exports, "r", { enumerable: true, get: function () { return replace_js_1.replace; } });
+exports.types = __importStar(require("./types.js"));
+__exportStar(require("./unpack.js"), exports);
+__exportStar(require("./update.js"), exports);
+var update_js_1 = require("./update.js");
+Object.defineProperty(exports, "u", { enumerable: true, get: function () { return update_js_1.update; } });
+__exportStar(require("./write-entry.js"), exports);
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/large-numbers.js b/node_modules/pacote/node_modules/tar/dist/commonjs/large-numbers.js
new file mode 100644
index 0000000000000..5b07aa7f71b48
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/commonjs/large-numbers.js
@@ -0,0 +1,99 @@
+"use strict";
+// Tar can encode large and negative numbers using a leading byte of
+// 0xff for negative, and 0x80 for positive.
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.parse = exports.encode = void 0;
+const encode = (num, buf) => {
+    if (!Number.isSafeInteger(num)) {
+        // The number is so large that javascript cannot represent it with integer
+        // precision.
+        throw Error('cannot encode number outside of javascript safe integer range');
+    }
+    else if (num < 0) {
+        encodeNegative(num, buf);
+    }
+    else {
+        encodePositive(num, buf);
+    }
+    return buf;
+};
+exports.encode = encode;
+const encodePositive = (num, buf) => {
+    buf[0] = 0x80;
+    for (var i = buf.length; i > 1; i--) {
+        buf[i - 1] = num & 0xff;
+        num = Math.floor(num / 0x100);
+    }
+};
+const encodeNegative = (num, buf) => {
+    buf[0] = 0xff;
+    var flipped = false;
+    num = num * -1;
+    for (var i = buf.length; i > 1; i--) {
+        var byte = num & 0xff;
+        num = Math.floor(num / 0x100);
+        if (flipped) {
+            buf[i - 1] = onesComp(byte);
+        }
+        else if (byte === 0) {
+            buf[i - 1] = 0;
+        }
+        else {
+            flipped = true;
+            buf[i - 1] = twosComp(byte);
+        }
+    }
+};
+const parse = (buf) => {
+    const pre = buf[0];
+    const value = pre === 0x80 ? pos(buf.subarray(1, buf.length))
+        : pre === 0xff ? twos(buf)
+            : null;
+    if (value === null) {
+        throw Error('invalid base256 encoding');
+    }
+    if (!Number.isSafeInteger(value)) {
+        // The number is so large that javascript cannot represent it with integer
+        // precision.
+        throw Error('parsed number outside of javascript safe integer range');
+    }
+    return value;
+};
+exports.parse = parse;
+const twos = (buf) => {
+    var len = buf.length;
+    var sum = 0;
+    var flipped = false;
+    for (var i = len - 1; i > -1; i--) {
+        var byte = Number(buf[i]);
+        var f;
+        if (flipped) {
+            f = onesComp(byte);
+        }
+        else if (byte === 0) {
+            f = byte;
+        }
+        else {
+            flipped = true;
+            f = twosComp(byte);
+        }
+        if (f !== 0) {
+            sum -= f * Math.pow(256, len - i - 1);
+        }
+    }
+    return sum;
+};
+const pos = (buf) => {
+    var len = buf.length;
+    var sum = 0;
+    for (var i = len - 1; i > -1; i--) {
+        var byte = Number(buf[i]);
+        if (byte !== 0) {
+            sum += byte * Math.pow(256, len - i - 1);
+        }
+    }
+    return sum;
+};
+const onesComp = (byte) => (0xff ^ byte) & 0xff;
+const twosComp = (byte) => ((0xff ^ byte) + 1) & 0xff;
+//# sourceMappingURL=large-numbers.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/list.js b/node_modules/pacote/node_modules/tar/dist/commonjs/list.js
new file mode 100644
index 0000000000000..3cd34bb4bad48
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/commonjs/list.js
@@ -0,0 +1,136 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.list = exports.filesFilter = void 0;
+// tar -t
+const fsm = __importStar(require("@isaacs/fs-minipass"));
+const node_fs_1 = __importDefault(require("node:fs"));
+const path_1 = require("path");
+const make_command_js_1 = require("./make-command.js");
+const parse_js_1 = require("./parse.js");
+const strip_trailing_slashes_js_1 = require("./strip-trailing-slashes.js");
+const onReadEntryFunction = (opt) => {
+    const onReadEntry = opt.onReadEntry;
+    opt.onReadEntry =
+        onReadEntry ?
+            e => {
+                onReadEntry(e);
+                e.resume();
+            }
+            : e => e.resume();
+};
+// construct a filter that limits the file entries listed
+// include child entries if a dir is included
+const filesFilter = (opt, files) => {
+    const map = new Map(files.map(f => [(0, strip_trailing_slashes_js_1.stripTrailingSlashes)(f), true]));
+    const filter = opt.filter;
+    const mapHas = (file, r = '') => {
+        const root = r || (0, path_1.parse)(file).root || '.';
+        let ret;
+        if (file === root)
+            ret = false;
+        else {
+            const m = map.get(file);
+            if (m !== undefined) {
+                ret = m;
+            }
+            else {
+                ret = mapHas((0, path_1.dirname)(file), root);
+            }
+        }
+        map.set(file, ret);
+        return ret;
+    };
+    opt.filter =
+        filter ?
+            (file, entry) => filter(file, entry) && mapHas((0, strip_trailing_slashes_js_1.stripTrailingSlashes)(file))
+            : file => mapHas((0, strip_trailing_slashes_js_1.stripTrailingSlashes)(file));
+};
+exports.filesFilter = filesFilter;
+const listFileSync = (opt) => {
+    const p = new parse_js_1.Parser(opt);
+    const file = opt.file;
+    let fd;
+    try {
+        const stat = node_fs_1.default.statSync(file);
+        const readSize = opt.maxReadSize || 16 * 1024 * 1024;
+        if (stat.size < readSize) {
+            p.end(node_fs_1.default.readFileSync(file));
+        }
+        else {
+            let pos = 0;
+            const buf = Buffer.allocUnsafe(readSize);
+            fd = node_fs_1.default.openSync(file, 'r');
+            while (pos < stat.size) {
+                const bytesRead = node_fs_1.default.readSync(fd, buf, 0, readSize, pos);
+                pos += bytesRead;
+                p.write(buf.subarray(0, bytesRead));
+            }
+            p.end();
+        }
+    }
+    finally {
+        if (typeof fd === 'number') {
+            try {
+                node_fs_1.default.closeSync(fd);
+                /* c8 ignore next */
+            }
+            catch (er) { }
+        }
+    }
+};
+const listFile = (opt, _files) => {
+    const parse = new parse_js_1.Parser(opt);
+    const readSize = opt.maxReadSize || 16 * 1024 * 1024;
+    const file = opt.file;
+    const p = new Promise((resolve, reject) => {
+        parse.on('error', reject);
+        parse.on('end', resolve);
+        node_fs_1.default.stat(file, (er, stat) => {
+            if (er) {
+                reject(er);
+            }
+            else {
+                const stream = new fsm.ReadStream(file, {
+                    readSize: readSize,
+                    size: stat.size,
+                });
+                stream.on('error', reject);
+                stream.pipe(parse);
+            }
+        });
+    });
+    return p;
+};
+exports.list = (0, make_command_js_1.makeCommand)(listFileSync, listFile, opt => new parse_js_1.Parser(opt), opt => new parse_js_1.Parser(opt), (opt, files) => {
+    if (files?.length)
+        (0, exports.filesFilter)(opt, files);
+    if (!opt.noResume)
+        onReadEntryFunction(opt);
+});
+//# sourceMappingURL=list.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/make-command.js b/node_modules/pacote/node_modules/tar/dist/commonjs/make-command.js
new file mode 100644
index 0000000000000..1814319e78bc6
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/commonjs/make-command.js
@@ -0,0 +1,61 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.makeCommand = void 0;
+const options_js_1 = require("./options.js");
+const makeCommand = (syncFile, asyncFile, syncNoFile, asyncNoFile, validate) => {
+    return Object.assign((opt_ = [], entries, cb) => {
+        if (Array.isArray(opt_)) {
+            entries = opt_;
+            opt_ = {};
+        }
+        if (typeof entries === 'function') {
+            cb = entries;
+            entries = undefined;
+        }
+        if (!entries) {
+            entries = [];
+        }
+        else {
+            entries = Array.from(entries);
+        }
+        const opt = (0, options_js_1.dealias)(opt_);
+        validate?.(opt, entries);
+        if ((0, options_js_1.isSyncFile)(opt)) {
+            if (typeof cb === 'function') {
+                throw new TypeError('callback not supported for sync tar functions');
+            }
+            return syncFile(opt, entries);
+        }
+        else if ((0, options_js_1.isAsyncFile)(opt)) {
+            const p = asyncFile(opt, entries);
+            // weirdness to make TS happy
+            const c = cb ? cb : undefined;
+            return c ? p.then(() => c(), c) : p;
+        }
+        else if ((0, options_js_1.isSyncNoFile)(opt)) {
+            if (typeof cb === 'function') {
+                throw new TypeError('callback not supported for sync tar functions');
+            }
+            return syncNoFile(opt, entries);
+        }
+        else if ((0, options_js_1.isAsyncNoFile)(opt)) {
+            if (typeof cb === 'function') {
+                throw new TypeError('callback only supported with file option');
+            }
+            return asyncNoFile(opt, entries);
+            /* c8 ignore start */
+        }
+        else {
+            throw new Error('impossible options??');
+        }
+        /* c8 ignore stop */
+    }, {
+        syncFile,
+        asyncFile,
+        syncNoFile,
+        asyncNoFile,
+        validate,
+    });
+};
+exports.makeCommand = makeCommand;
+//# sourceMappingURL=make-command.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/mkdir.js b/node_modules/pacote/node_modules/tar/dist/commonjs/mkdir.js
new file mode 100644
index 0000000000000..2b13ecbab6723
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/commonjs/mkdir.js
@@ -0,0 +1,209 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.mkdirSync = exports.mkdir = void 0;
+const chownr_1 = require("chownr");
+const fs_1 = __importDefault(require("fs"));
+const mkdirp_1 = require("mkdirp");
+const node_path_1 = __importDefault(require("node:path"));
+const cwd_error_js_1 = require("./cwd-error.js");
+const normalize_windows_path_js_1 = require("./normalize-windows-path.js");
+const symlink_error_js_1 = require("./symlink-error.js");
+const cGet = (cache, key) => cache.get((0, normalize_windows_path_js_1.normalizeWindowsPath)(key));
+const cSet = (cache, key, val) => cache.set((0, normalize_windows_path_js_1.normalizeWindowsPath)(key), val);
+const checkCwd = (dir, cb) => {
+    fs_1.default.stat(dir, (er, st) => {
+        if (er || !st.isDirectory()) {
+            er = new cwd_error_js_1.CwdError(dir, er?.code || 'ENOTDIR');
+        }
+        cb(er);
+    });
+};
+/**
+ * Wrapper around mkdirp for tar's needs.
+ *
+ * The main purpose is to avoid creating directories if we know that
+ * they already exist (and track which ones exist for this purpose),
+ * and prevent entries from being extracted into symlinked folders,
+ * if `preservePaths` is not set.
+ */
+const mkdir = (dir, opt, cb) => {
+    dir = (0, normalize_windows_path_js_1.normalizeWindowsPath)(dir);
+    // if there's any overlap between mask and mode,
+    // then we'll need an explicit chmod
+    /* c8 ignore next */
+    const umask = opt.umask ?? 0o22;
+    const mode = opt.mode | 0o0700;
+    const needChmod = (mode & umask) !== 0;
+    const uid = opt.uid;
+    const gid = opt.gid;
+    const doChown = typeof uid === 'number' &&
+        typeof gid === 'number' &&
+        (uid !== opt.processUid || gid !== opt.processGid);
+    const preserve = opt.preserve;
+    const unlink = opt.unlink;
+    const cache = opt.cache;
+    const cwd = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.cwd);
+    const done = (er, created) => {
+        if (er) {
+            cb(er);
+        }
+        else {
+            cSet(cache, dir, true);
+            if (created && doChown) {
+                (0, chownr_1.chownr)(created, uid, gid, er => done(er));
+            }
+            else if (needChmod) {
+                fs_1.default.chmod(dir, mode, cb);
+            }
+            else {
+                cb();
+            }
+        }
+    };
+    if (cache && cGet(cache, dir) === true) {
+        return done();
+    }
+    if (dir === cwd) {
+        return checkCwd(dir, done);
+    }
+    if (preserve) {
+        return (0, mkdirp_1.mkdirp)(dir, { mode }).then(made => done(null, made ?? undefined), // oh, ts
+        done);
+    }
+    const sub = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.relative(cwd, dir));
+    const parts = sub.split('/');
+    mkdir_(cwd, parts, mode, cache, unlink, cwd, undefined, done);
+};
+exports.mkdir = mkdir;
+const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => {
+    if (!parts.length) {
+        return cb(null, created);
+    }
+    const p = parts.shift();
+    const part = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(base + '/' + p));
+    if (cGet(cache, part)) {
+        return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
+    }
+    fs_1.default.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb));
+};
+const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => (er) => {
+    if (er) {
+        fs_1.default.lstat(part, (statEr, st) => {
+            if (statEr) {
+                statEr.path =
+                    statEr.path && (0, normalize_windows_path_js_1.normalizeWindowsPath)(statEr.path);
+                cb(statEr);
+            }
+            else if (st.isDirectory()) {
+                mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
+            }
+            else if (unlink) {
+                fs_1.default.unlink(part, er => {
+                    if (er) {
+                        return cb(er);
+                    }
+                    fs_1.default.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb));
+                });
+            }
+            else if (st.isSymbolicLink()) {
+                return cb(new symlink_error_js_1.SymlinkError(part, part + '/' + parts.join('/')));
+            }
+            else {
+                cb(er);
+            }
+        });
+    }
+    else {
+        created = created || part;
+        mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
+    }
+};
+const checkCwdSync = (dir) => {
+    let ok = false;
+    let code = undefined;
+    try {
+        ok = fs_1.default.statSync(dir).isDirectory();
+    }
+    catch (er) {
+        code = er?.code;
+    }
+    finally {
+        if (!ok) {
+            throw new cwd_error_js_1.CwdError(dir, code ?? 'ENOTDIR');
+        }
+    }
+};
+const mkdirSync = (dir, opt) => {
+    dir = (0, normalize_windows_path_js_1.normalizeWindowsPath)(dir);
+    // if there's any overlap between mask and mode,
+    // then we'll need an explicit chmod
+    /* c8 ignore next */
+    const umask = opt.umask ?? 0o22;
+    const mode = opt.mode | 0o700;
+    const needChmod = (mode & umask) !== 0;
+    const uid = opt.uid;
+    const gid = opt.gid;
+    const doChown = typeof uid === 'number' &&
+        typeof gid === 'number' &&
+        (uid !== opt.processUid || gid !== opt.processGid);
+    const preserve = opt.preserve;
+    const unlink = opt.unlink;
+    const cache = opt.cache;
+    const cwd = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.cwd);
+    const done = (created) => {
+        cSet(cache, dir, true);
+        if (created && doChown) {
+            (0, chownr_1.chownrSync)(created, uid, gid);
+        }
+        if (needChmod) {
+            fs_1.default.chmodSync(dir, mode);
+        }
+    };
+    if (cache && cGet(cache, dir) === true) {
+        return done();
+    }
+    if (dir === cwd) {
+        checkCwdSync(cwd);
+        return done();
+    }
+    if (preserve) {
+        return done((0, mkdirp_1.mkdirpSync)(dir, mode) ?? undefined);
+    }
+    const sub = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.relative(cwd, dir));
+    const parts = sub.split('/');
+    let created = undefined;
+    for (let p = parts.shift(), part = cwd; p && (part += '/' + p); p = parts.shift()) {
+        part = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(part));
+        if (cGet(cache, part)) {
+            continue;
+        }
+        try {
+            fs_1.default.mkdirSync(part, mode);
+            created = created || part;
+            cSet(cache, part, true);
+        }
+        catch (er) {
+            const st = fs_1.default.lstatSync(part);
+            if (st.isDirectory()) {
+                cSet(cache, part, true);
+                continue;
+            }
+            else if (unlink) {
+                fs_1.default.unlinkSync(part);
+                fs_1.default.mkdirSync(part, mode);
+                created = created || part;
+                cSet(cache, part, true);
+                continue;
+            }
+            else if (st.isSymbolicLink()) {
+                return new symlink_error_js_1.SymlinkError(part, part + '/' + parts.join('/'));
+            }
+        }
+    }
+    return done(created);
+};
+exports.mkdirSync = mkdirSync;
+//# sourceMappingURL=mkdir.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/mode-fix.js b/node_modules/pacote/node_modules/tar/dist/commonjs/mode-fix.js
new file mode 100644
index 0000000000000..49dd727961d29
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/commonjs/mode-fix.js
@@ -0,0 +1,29 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.modeFix = void 0;
+const modeFix = (mode, isDir, portable) => {
+    mode &= 0o7777;
+    // in portable mode, use the minimum reasonable umask
+    // if this system creates files with 0o664 by default
+    // (as some linux distros do), then we'll write the
+    // archive with 0o644 instead.  Also, don't ever create
+    // a file that is not readable/writable by the owner.
+    if (portable) {
+        mode = (mode | 0o600) & ~0o22;
+    }
+    // if dirs are readable, then they should be listable
+    if (isDir) {
+        if (mode & 0o400) {
+            mode |= 0o100;
+        }
+        if (mode & 0o40) {
+            mode |= 0o10;
+        }
+        if (mode & 0o4) {
+            mode |= 0o1;
+        }
+    }
+    return mode;
+};
+exports.modeFix = modeFix;
+//# sourceMappingURL=mode-fix.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/normalize-unicode.js b/node_modules/pacote/node_modules/tar/dist/commonjs/normalize-unicode.js
new file mode 100644
index 0000000000000..2f08ce46d98c4
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/commonjs/normalize-unicode.js
@@ -0,0 +1,17 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.normalizeUnicode = void 0;
+// warning: extremely hot code path.
+// This has been meticulously optimized for use
+// within npm install on large package trees.
+// Do not edit without careful benchmarking.
+const normalizeCache = Object.create(null);
+const { hasOwnProperty } = Object.prototype;
+const normalizeUnicode = (s) => {
+    if (!hasOwnProperty.call(normalizeCache, s)) {
+        normalizeCache[s] = s.normalize('NFD');
+    }
+    return normalizeCache[s];
+};
+exports.normalizeUnicode = normalizeUnicode;
+//# sourceMappingURL=normalize-unicode.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/normalize-windows-path.js b/node_modules/pacote/node_modules/tar/dist/commonjs/normalize-windows-path.js
new file mode 100644
index 0000000000000..b0c7aaa9f2d17
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/commonjs/normalize-windows-path.js
@@ -0,0 +1,12 @@
+"use strict";
+// on windows, either \ or / are valid directory separators.
+// on unix, \ is a valid character in filenames.
+// so, on windows, and only on windows, we replace all \ chars with /,
+// so that we can use / as our one and only directory separator char.
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.normalizeWindowsPath = void 0;
+const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform;
+exports.normalizeWindowsPath = platform !== 'win32' ?
+    (p) => p
+    : (p) => p && p.replace(/\\/g, '/');
+//# sourceMappingURL=normalize-windows-path.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/options.js b/node_modules/pacote/node_modules/tar/dist/commonjs/options.js
new file mode 100644
index 0000000000000..4cd06505bc72b
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/commonjs/options.js
@@ -0,0 +1,66 @@
+"use strict";
+// turn tar(1) style args like `C` into the more verbose things like `cwd`
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.dealias = exports.isNoFile = exports.isFile = exports.isAsync = exports.isSync = exports.isAsyncNoFile = exports.isSyncNoFile = exports.isAsyncFile = exports.isSyncFile = void 0;
+const argmap = new Map([
+    ['C', 'cwd'],
+    ['f', 'file'],
+    ['z', 'gzip'],
+    ['P', 'preservePaths'],
+    ['U', 'unlink'],
+    ['strip-components', 'strip'],
+    ['stripComponents', 'strip'],
+    ['keep-newer', 'newer'],
+    ['keepNewer', 'newer'],
+    ['keep-newer-files', 'newer'],
+    ['keepNewerFiles', 'newer'],
+    ['k', 'keep'],
+    ['keep-existing', 'keep'],
+    ['keepExisting', 'keep'],
+    ['m', 'noMtime'],
+    ['no-mtime', 'noMtime'],
+    ['p', 'preserveOwner'],
+    ['L', 'follow'],
+    ['h', 'follow'],
+    ['onentry', 'onReadEntry'],
+]);
+const isSyncFile = (o) => !!o.sync && !!o.file;
+exports.isSyncFile = isSyncFile;
+const isAsyncFile = (o) => !o.sync && !!o.file;
+exports.isAsyncFile = isAsyncFile;
+const isSyncNoFile = (o) => !!o.sync && !o.file;
+exports.isSyncNoFile = isSyncNoFile;
+const isAsyncNoFile = (o) => !o.sync && !o.file;
+exports.isAsyncNoFile = isAsyncNoFile;
+const isSync = (o) => !!o.sync;
+exports.isSync = isSync;
+const isAsync = (o) => !o.sync;
+exports.isAsync = isAsync;
+const isFile = (o) => !!o.file;
+exports.isFile = isFile;
+const isNoFile = (o) => !o.file;
+exports.isNoFile = isNoFile;
+const dealiasKey = (k) => {
+    const d = argmap.get(k);
+    if (d)
+        return d;
+    return k;
+};
+const dealias = (opt = {}) => {
+    if (!opt)
+        return {};
+    const result = {};
+    for (const [key, v] of Object.entries(opt)) {
+        // TS doesn't know that aliases are going to always be the same type
+        const k = dealiasKey(key);
+        result[k] = v;
+    }
+    // affordance for deprecated noChmod -> chmod
+    if (result.chmod === undefined && result.noChmod === false) {
+        result.chmod = true;
+    }
+    delete result.noChmod;
+    return result;
+};
+exports.dealias = dealias;
+//# sourceMappingURL=options.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/pack.js b/node_modules/pacote/node_modules/tar/dist/commonjs/pack.js
new file mode 100644
index 0000000000000..303e93063c2db
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/commonjs/pack.js
@@ -0,0 +1,477 @@
+"use strict";
+// A readable tar stream creator
+// Technically, this is a transform stream that you write paths into,
+// and tar format comes out of.
+// The `add()` method is like `write()` but returns this,
+// and end() return `this` as well, so you can
+// do `new Pack(opt).add('files').add('dir').end().pipe(output)
+// You could also do something like:
+// streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar'))
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.PackSync = exports.Pack = exports.PackJob = void 0;
+const fs_1 = __importDefault(require("fs"));
+const write_entry_js_1 = require("./write-entry.js");
+class PackJob {
+    path;
+    absolute;
+    entry;
+    stat;
+    readdir;
+    pending = false;
+    ignore = false;
+    piped = false;
+    constructor(path, absolute) {
+        this.path = path || './';
+        this.absolute = absolute;
+    }
+}
+exports.PackJob = PackJob;
+const minipass_1 = require("minipass");
+const zlib = __importStar(require("minizlib"));
+const yallist_1 = require("yallist");
+const read_entry_js_1 = require("./read-entry.js");
+const warn_method_js_1 = require("./warn-method.js");
+const EOF = Buffer.alloc(1024);
+const ONSTAT = Symbol('onStat');
+const ENDED = Symbol('ended');
+const QUEUE = Symbol('queue');
+const CURRENT = Symbol('current');
+const PROCESS = Symbol('process');
+const PROCESSING = Symbol('processing');
+const PROCESSJOB = Symbol('processJob');
+const JOBS = Symbol('jobs');
+const JOBDONE = Symbol('jobDone');
+const ADDFSENTRY = Symbol('addFSEntry');
+const ADDTARENTRY = Symbol('addTarEntry');
+const STAT = Symbol('stat');
+const READDIR = Symbol('readdir');
+const ONREADDIR = Symbol('onreaddir');
+const PIPE = Symbol('pipe');
+const ENTRY = Symbol('entry');
+const ENTRYOPT = Symbol('entryOpt');
+const WRITEENTRYCLASS = Symbol('writeEntryClass');
+const WRITE = Symbol('write');
+const ONDRAIN = Symbol('ondrain');
+const path_1 = __importDefault(require("path"));
+const normalize_windows_path_js_1 = require("./normalize-windows-path.js");
+class Pack extends minipass_1.Minipass {
+    opt;
+    cwd;
+    maxReadSize;
+    preservePaths;
+    strict;
+    noPax;
+    prefix;
+    linkCache;
+    statCache;
+    file;
+    portable;
+    zip;
+    readdirCache;
+    noDirRecurse;
+    follow;
+    noMtime;
+    mtime;
+    filter;
+    jobs;
+    [WRITEENTRYCLASS];
+    onWriteEntry;
+    [QUEUE];
+    [JOBS] = 0;
+    [PROCESSING] = false;
+    [ENDED] = false;
+    constructor(opt = {}) {
+        //@ts-ignore
+        super();
+        this.opt = opt;
+        this.file = opt.file || '';
+        this.cwd = opt.cwd || process.cwd();
+        this.maxReadSize = opt.maxReadSize;
+        this.preservePaths = !!opt.preservePaths;
+        this.strict = !!opt.strict;
+        this.noPax = !!opt.noPax;
+        this.prefix = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.prefix || '');
+        this.linkCache = opt.linkCache || new Map();
+        this.statCache = opt.statCache || new Map();
+        this.readdirCache = opt.readdirCache || new Map();
+        this.onWriteEntry = opt.onWriteEntry;
+        this[WRITEENTRYCLASS] = write_entry_js_1.WriteEntry;
+        if (typeof opt.onwarn === 'function') {
+            this.on('warn', opt.onwarn);
+        }
+        this.portable = !!opt.portable;
+        if (opt.gzip || opt.brotli) {
+            if (opt.gzip && opt.brotli) {
+                throw new TypeError('gzip and brotli are mutually exclusive');
+            }
+            if (opt.gzip) {
+                if (typeof opt.gzip !== 'object') {
+                    opt.gzip = {};
+                }
+                if (this.portable) {
+                    opt.gzip.portable = true;
+                }
+                this.zip = new zlib.Gzip(opt.gzip);
+            }
+            if (opt.brotli) {
+                if (typeof opt.brotli !== 'object') {
+                    opt.brotli = {};
+                }
+                this.zip = new zlib.BrotliCompress(opt.brotli);
+            }
+            /* c8 ignore next */
+            if (!this.zip)
+                throw new Error('impossible');
+            const zip = this.zip;
+            zip.on('data', chunk => super.write(chunk));
+            zip.on('end', () => super.end());
+            zip.on('drain', () => this[ONDRAIN]());
+            this.on('resume', () => zip.resume());
+        }
+        else {
+            this.on('drain', this[ONDRAIN]);
+        }
+        this.noDirRecurse = !!opt.noDirRecurse;
+        this.follow = !!opt.follow;
+        this.noMtime = !!opt.noMtime;
+        if (opt.mtime)
+            this.mtime = opt.mtime;
+        this.filter =
+            typeof opt.filter === 'function' ? opt.filter : () => true;
+        this[QUEUE] = new yallist_1.Yallist();
+        this[JOBS] = 0;
+        this.jobs = Number(opt.jobs) || 4;
+        this[PROCESSING] = false;
+        this[ENDED] = false;
+    }
+    [WRITE](chunk) {
+        return super.write(chunk);
+    }
+    add(path) {
+        this.write(path);
+        return this;
+    }
+    end(path, encoding, cb) {
+        /* c8 ignore start */
+        if (typeof path === 'function') {
+            cb = path;
+            path = undefined;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = undefined;
+        }
+        /* c8 ignore stop */
+        if (path) {
+            this.add(path);
+        }
+        this[ENDED] = true;
+        this[PROCESS]();
+        /* c8 ignore next */
+        if (cb)
+            cb();
+        return this;
+    }
+    write(path) {
+        if (this[ENDED]) {
+            throw new Error('write after end');
+        }
+        if (path instanceof read_entry_js_1.ReadEntry) {
+            this[ADDTARENTRY](path);
+        }
+        else {
+            this[ADDFSENTRY](path);
+        }
+        return this.flowing;
+    }
+    [ADDTARENTRY](p) {
+        const absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(path_1.default.resolve(this.cwd, p.path));
+        // in this case, we don't have to wait for the stat
+        if (!this.filter(p.path, p)) {
+            p.resume();
+        }
+        else {
+            const job = new PackJob(p.path, absolute);
+            job.entry = new write_entry_js_1.WriteEntryTar(p, this[ENTRYOPT](job));
+            job.entry.on('end', () => this[JOBDONE](job));
+            this[JOBS] += 1;
+            this[QUEUE].push(job);
+        }
+        this[PROCESS]();
+    }
+    [ADDFSENTRY](p) {
+        const absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(path_1.default.resolve(this.cwd, p));
+        this[QUEUE].push(new PackJob(p, absolute));
+        this[PROCESS]();
+    }
+    [STAT](job) {
+        job.pending = true;
+        this[JOBS] += 1;
+        const stat = this.follow ? 'stat' : 'lstat';
+        fs_1.default[stat](job.absolute, (er, stat) => {
+            job.pending = false;
+            this[JOBS] -= 1;
+            if (er) {
+                this.emit('error', er);
+            }
+            else {
+                this[ONSTAT](job, stat);
+            }
+        });
+    }
+    [ONSTAT](job, stat) {
+        this.statCache.set(job.absolute, stat);
+        job.stat = stat;
+        // now we have the stat, we can filter it.
+        if (!this.filter(job.path, stat)) {
+            job.ignore = true;
+        }
+        this[PROCESS]();
+    }
+    [READDIR](job) {
+        job.pending = true;
+        this[JOBS] += 1;
+        fs_1.default.readdir(job.absolute, (er, entries) => {
+            job.pending = false;
+            this[JOBS] -= 1;
+            if (er) {
+                return this.emit('error', er);
+            }
+            this[ONREADDIR](job, entries);
+        });
+    }
+    [ONREADDIR](job, entries) {
+        this.readdirCache.set(job.absolute, entries);
+        job.readdir = entries;
+        this[PROCESS]();
+    }
+    [PROCESS]() {
+        if (this[PROCESSING]) {
+            return;
+        }
+        this[PROCESSING] = true;
+        for (let w = this[QUEUE].head; !!w && this[JOBS] < this.jobs; w = w.next) {
+            this[PROCESSJOB](w.value);
+            if (w.value.ignore) {
+                const p = w.next;
+                this[QUEUE].removeNode(w);
+                w.next = p;
+            }
+        }
+        this[PROCESSING] = false;
+        if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) {
+            if (this.zip) {
+                this.zip.end(EOF);
+            }
+            else {
+                super.write(EOF);
+                super.end();
+            }
+        }
+    }
+    get [CURRENT]() {
+        return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value;
+    }
+    [JOBDONE](_job) {
+        this[QUEUE].shift();
+        this[JOBS] -= 1;
+        this[PROCESS]();
+    }
+    [PROCESSJOB](job) {
+        if (job.pending) {
+            return;
+        }
+        if (job.entry) {
+            if (job === this[CURRENT] && !job.piped) {
+                this[PIPE](job);
+            }
+            return;
+        }
+        if (!job.stat) {
+            const sc = this.statCache.get(job.absolute);
+            if (sc) {
+                this[ONSTAT](job, sc);
+            }
+            else {
+                this[STAT](job);
+            }
+        }
+        if (!job.stat) {
+            return;
+        }
+        // filtered out!
+        if (job.ignore) {
+            return;
+        }
+        if (!this.noDirRecurse &&
+            job.stat.isDirectory() &&
+            !job.readdir) {
+            const rc = this.readdirCache.get(job.absolute);
+            if (rc) {
+                this[ONREADDIR](job, rc);
+            }
+            else {
+                this[READDIR](job);
+            }
+            if (!job.readdir) {
+                return;
+            }
+        }
+        // we know it doesn't have an entry, because that got checked above
+        job.entry = this[ENTRY](job);
+        if (!job.entry) {
+            job.ignore = true;
+            return;
+        }
+        if (job === this[CURRENT] && !job.piped) {
+            this[PIPE](job);
+        }
+    }
+    [ENTRYOPT](job) {
+        return {
+            onwarn: (code, msg, data) => this.warn(code, msg, data),
+            noPax: this.noPax,
+            cwd: this.cwd,
+            absolute: job.absolute,
+            preservePaths: this.preservePaths,
+            maxReadSize: this.maxReadSize,
+            strict: this.strict,
+            portable: this.portable,
+            linkCache: this.linkCache,
+            statCache: this.statCache,
+            noMtime: this.noMtime,
+            mtime: this.mtime,
+            prefix: this.prefix,
+            onWriteEntry: this.onWriteEntry,
+        };
+    }
+    [ENTRY](job) {
+        this[JOBS] += 1;
+        try {
+            const e = new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job));
+            return e
+                .on('end', () => this[JOBDONE](job))
+                .on('error', er => this.emit('error', er));
+        }
+        catch (er) {
+            this.emit('error', er);
+        }
+    }
+    [ONDRAIN]() {
+        if (this[CURRENT] && this[CURRENT].entry) {
+            this[CURRENT].entry.resume();
+        }
+    }
+    // like .pipe() but using super, because our write() is special
+    [PIPE](job) {
+        job.piped = true;
+        if (job.readdir) {
+            job.readdir.forEach(entry => {
+                const p = job.path;
+                const base = p === './' ? '' : p.replace(/\/*$/, '/');
+                this[ADDFSENTRY](base + entry);
+            });
+        }
+        const source = job.entry;
+        const zip = this.zip;
+        /* c8 ignore start */
+        if (!source)
+            throw new Error('cannot pipe without source');
+        /* c8 ignore stop */
+        if (zip) {
+            source.on('data', chunk => {
+                if (!zip.write(chunk)) {
+                    source.pause();
+                }
+            });
+        }
+        else {
+            source.on('data', chunk => {
+                if (!super.write(chunk)) {
+                    source.pause();
+                }
+            });
+        }
+    }
+    pause() {
+        if (this.zip) {
+            this.zip.pause();
+        }
+        return super.pause();
+    }
+    warn(code, message, data = {}) {
+        (0, warn_method_js_1.warnMethod)(this, code, message, data);
+    }
+}
+exports.Pack = Pack;
+class PackSync extends Pack {
+    sync = true;
+    constructor(opt) {
+        super(opt);
+        this[WRITEENTRYCLASS] = write_entry_js_1.WriteEntrySync;
+    }
+    // pause/resume are no-ops in sync streams.
+    pause() { }
+    resume() { }
+    [STAT](job) {
+        const stat = this.follow ? 'statSync' : 'lstatSync';
+        this[ONSTAT](job, fs_1.default[stat](job.absolute));
+    }
+    [READDIR](job) {
+        this[ONREADDIR](job, fs_1.default.readdirSync(job.absolute));
+    }
+    // gotta get it all in this tick
+    [PIPE](job) {
+        const source = job.entry;
+        const zip = this.zip;
+        if (job.readdir) {
+            job.readdir.forEach(entry => {
+                const p = job.path;
+                const base = p === './' ? '' : p.replace(/\/*$/, '/');
+                this[ADDFSENTRY](base + entry);
+            });
+        }
+        /* c8 ignore start */
+        if (!source)
+            throw new Error('Cannot pipe without source');
+        /* c8 ignore stop */
+        if (zip) {
+            source.on('data', chunk => {
+                zip.write(chunk);
+            });
+        }
+        else {
+            source.on('data', chunk => {
+                super[WRITE](chunk);
+            });
+        }
+    }
+}
+exports.PackSync = PackSync;
+//# sourceMappingURL=pack.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/package.json b/node_modules/pacote/node_modules/tar/dist/commonjs/package.json
new file mode 100644
index 0000000000000..5bbefffbabee3
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/commonjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/parse.js b/node_modules/pacote/node_modules/tar/dist/commonjs/parse.js
new file mode 100644
index 0000000000000..9746a25899e6e
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/commonjs/parse.js
@@ -0,0 +1,599 @@
+"use strict";
+// this[BUFFER] is the remainder of a chunk if we're waiting for
+// the full 512 bytes of a header to come in.  We will Buffer.concat()
+// it to the next write(), which is a mem copy, but a small one.
+//
+// this[QUEUE] is a Yallist of entries that haven't been emitted
+// yet this can only get filled up if the user keeps write()ing after
+// a write() returns false, or does a write() with more than one entry
+//
+// We don't buffer chunks, we always parse them and either create an
+// entry, or push it into the active entry.  The ReadEntry class knows
+// to throw data away if .ignore=true
+//
+// Shift entry off the buffer when it emits 'end', and emit 'entry' for
+// the next one in the list.
+//
+// At any time, we're pushing body chunks into the entry at WRITEENTRY,
+// and waiting for 'end' on the entry at READENTRY
+//
+// ignored entries get .resume() called on them straight away
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Parser = void 0;
+const events_1 = require("events");
+const minizlib_1 = require("minizlib");
+const yallist_1 = require("yallist");
+const header_js_1 = require("./header.js");
+const pax_js_1 = require("./pax.js");
+const read_entry_js_1 = require("./read-entry.js");
+const warn_method_js_1 = require("./warn-method.js");
+const maxMetaEntrySize = 1024 * 1024;
+const gzipHeader = Buffer.from([0x1f, 0x8b]);
+const STATE = Symbol('state');
+const WRITEENTRY = Symbol('writeEntry');
+const READENTRY = Symbol('readEntry');
+const NEXTENTRY = Symbol('nextEntry');
+const PROCESSENTRY = Symbol('processEntry');
+const EX = Symbol('extendedHeader');
+const GEX = Symbol('globalExtendedHeader');
+const META = Symbol('meta');
+const EMITMETA = Symbol('emitMeta');
+const BUFFER = Symbol('buffer');
+const QUEUE = Symbol('queue');
+const ENDED = Symbol('ended');
+const EMITTEDEND = Symbol('emittedEnd');
+const EMIT = Symbol('emit');
+const UNZIP = Symbol('unzip');
+const CONSUMECHUNK = Symbol('consumeChunk');
+const CONSUMECHUNKSUB = Symbol('consumeChunkSub');
+const CONSUMEBODY = Symbol('consumeBody');
+const CONSUMEMETA = Symbol('consumeMeta');
+const CONSUMEHEADER = Symbol('consumeHeader');
+const CONSUMING = Symbol('consuming');
+const BUFFERCONCAT = Symbol('bufferConcat');
+const MAYBEEND = Symbol('maybeEnd');
+const WRITING = Symbol('writing');
+const ABORTED = Symbol('aborted');
+const DONE = Symbol('onDone');
+const SAW_VALID_ENTRY = Symbol('sawValidEntry');
+const SAW_NULL_BLOCK = Symbol('sawNullBlock');
+const SAW_EOF = Symbol('sawEOF');
+const CLOSESTREAM = Symbol('closeStream');
+const noop = () => true;
+class Parser extends events_1.EventEmitter {
+    file;
+    strict;
+    maxMetaEntrySize;
+    filter;
+    brotli;
+    writable = true;
+    readable = false;
+    [QUEUE] = new yallist_1.Yallist();
+    [BUFFER];
+    [READENTRY];
+    [WRITEENTRY];
+    [STATE] = 'begin';
+    [META] = '';
+    [EX];
+    [GEX];
+    [ENDED] = false;
+    [UNZIP];
+    [ABORTED] = false;
+    [SAW_VALID_ENTRY];
+    [SAW_NULL_BLOCK] = false;
+    [SAW_EOF] = false;
+    [WRITING] = false;
+    [CONSUMING] = false;
+    [EMITTEDEND] = false;
+    constructor(opt = {}) {
+        super();
+        this.file = opt.file || '';
+        // these BADARCHIVE errors can't be detected early. listen on DONE.
+        this.on(DONE, () => {
+            if (this[STATE] === 'begin' ||
+                this[SAW_VALID_ENTRY] === false) {
+                // either less than 1 block of data, or all entries were invalid.
+                // Either way, probably not even a tarball.
+                this.warn('TAR_BAD_ARCHIVE', 'Unrecognized archive format');
+            }
+        });
+        if (opt.ondone) {
+            this.on(DONE, opt.ondone);
+        }
+        else {
+            this.on(DONE, () => {
+                this.emit('prefinish');
+                this.emit('finish');
+                this.emit('end');
+            });
+        }
+        this.strict = !!opt.strict;
+        this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize;
+        this.filter = typeof opt.filter === 'function' ? opt.filter : noop;
+        // Unlike gzip, brotli doesn't have any magic bytes to identify it
+        // Users need to explicitly tell us they're extracting a brotli file
+        // Or we infer from the file extension
+        const isTBR = opt.file &&
+            (opt.file.endsWith('.tar.br') || opt.file.endsWith('.tbr'));
+        // if it's a tbr file it MIGHT be brotli, but we don't know until
+        // we look at it and verify it's not a valid tar file.
+        this.brotli =
+            !opt.gzip && opt.brotli !== undefined ? opt.brotli
+                : isTBR ? undefined
+                    : false;
+        // have to set this so that streams are ok piping into it
+        this.on('end', () => this[CLOSESTREAM]());
+        if (typeof opt.onwarn === 'function') {
+            this.on('warn', opt.onwarn);
+        }
+        if (typeof opt.onReadEntry === 'function') {
+            this.on('entry', opt.onReadEntry);
+        }
+    }
+    warn(code, message, data = {}) {
+        (0, warn_method_js_1.warnMethod)(this, code, message, data);
+    }
+    [CONSUMEHEADER](chunk, position) {
+        if (this[SAW_VALID_ENTRY] === undefined) {
+            this[SAW_VALID_ENTRY] = false;
+        }
+        let header;
+        try {
+            header = new header_js_1.Header(chunk, position, this[EX], this[GEX]);
+        }
+        catch (er) {
+            return this.warn('TAR_ENTRY_INVALID', er);
+        }
+        if (header.nullBlock) {
+            if (this[SAW_NULL_BLOCK]) {
+                this[SAW_EOF] = true;
+                // ending an archive with no entries.  pointless, but legal.
+                if (this[STATE] === 'begin') {
+                    this[STATE] = 'header';
+                }
+                this[EMIT]('eof');
+            }
+            else {
+                this[SAW_NULL_BLOCK] = true;
+                this[EMIT]('nullBlock');
+            }
+        }
+        else {
+            this[SAW_NULL_BLOCK] = false;
+            if (!header.cksumValid) {
+                this.warn('TAR_ENTRY_INVALID', 'checksum failure', { header });
+            }
+            else if (!header.path) {
+                this.warn('TAR_ENTRY_INVALID', 'path is required', { header });
+            }
+            else {
+                const type = header.type;
+                if (/^(Symbolic)?Link$/.test(type) && !header.linkpath) {
+                    this.warn('TAR_ENTRY_INVALID', 'linkpath required', {
+                        header,
+                    });
+                }
+                else if (!/^(Symbolic)?Link$/.test(type) &&
+                    !/^(Global)?ExtendedHeader$/.test(type) &&
+                    header.linkpath) {
+                    this.warn('TAR_ENTRY_INVALID', 'linkpath forbidden', {
+                        header,
+                    });
+                }
+                else {
+                    const entry = (this[WRITEENTRY] = new read_entry_js_1.ReadEntry(header, this[EX], this[GEX]));
+                    // we do this for meta & ignored entries as well, because they
+                    // are still valid tar, or else we wouldn't know to ignore them
+                    if (!this[SAW_VALID_ENTRY]) {
+                        if (entry.remain) {
+                            // this might be the one!
+                            const onend = () => {
+                                if (!entry.invalid) {
+                                    this[SAW_VALID_ENTRY] = true;
+                                }
+                            };
+                            entry.on('end', onend);
+                        }
+                        else {
+                            this[SAW_VALID_ENTRY] = true;
+                        }
+                    }
+                    if (entry.meta) {
+                        if (entry.size > this.maxMetaEntrySize) {
+                            entry.ignore = true;
+                            this[EMIT]('ignoredEntry', entry);
+                            this[STATE] = 'ignore';
+                            entry.resume();
+                        }
+                        else if (entry.size > 0) {
+                            this[META] = '';
+                            entry.on('data', c => (this[META] += c));
+                            this[STATE] = 'meta';
+                        }
+                    }
+                    else {
+                        this[EX] = undefined;
+                        entry.ignore =
+                            entry.ignore || !this.filter(entry.path, entry);
+                        if (entry.ignore) {
+                            // probably valid, just not something we care about
+                            this[EMIT]('ignoredEntry', entry);
+                            this[STATE] = entry.remain ? 'ignore' : 'header';
+                            entry.resume();
+                        }
+                        else {
+                            if (entry.remain) {
+                                this[STATE] = 'body';
+                            }
+                            else {
+                                this[STATE] = 'header';
+                                entry.end();
+                            }
+                            if (!this[READENTRY]) {
+                                this[QUEUE].push(entry);
+                                this[NEXTENTRY]();
+                            }
+                            else {
+                                this[QUEUE].push(entry);
+                            }
+                        }
+                    }
+                }
+            }
+        }
+    }
+    [CLOSESTREAM]() {
+        queueMicrotask(() => this.emit('close'));
+    }
+    [PROCESSENTRY](entry) {
+        let go = true;
+        if (!entry) {
+            this[READENTRY] = undefined;
+            go = false;
+        }
+        else if (Array.isArray(entry)) {
+            const [ev, ...args] = entry;
+            this.emit(ev, ...args);
+        }
+        else {
+            this[READENTRY] = entry;
+            this.emit('entry', entry);
+            if (!entry.emittedEnd) {
+                entry.on('end', () => this[NEXTENTRY]());
+                go = false;
+            }
+        }
+        return go;
+    }
+    [NEXTENTRY]() {
+        do { } while (this[PROCESSENTRY](this[QUEUE].shift()));
+        if (!this[QUEUE].length) {
+            // At this point, there's nothing in the queue, but we may have an
+            // entry which is being consumed (readEntry).
+            // If we don't, then we definitely can handle more data.
+            // If we do, and either it's flowing, or it has never had any data
+            // written to it, then it needs more.
+            // The only other possibility is that it has returned false from a
+            // write() call, so we wait for the next drain to continue.
+            const re = this[READENTRY];
+            const drainNow = !re || re.flowing || re.size === re.remain;
+            if (drainNow) {
+                if (!this[WRITING]) {
+                    this.emit('drain');
+                }
+            }
+            else {
+                re.once('drain', () => this.emit('drain'));
+            }
+        }
+    }
+    [CONSUMEBODY](chunk, position) {
+        // write up to but no  more than writeEntry.blockRemain
+        const entry = this[WRITEENTRY];
+        /* c8 ignore start */
+        if (!entry) {
+            throw new Error('attempt to consume body without entry??');
+        }
+        const br = entry.blockRemain ?? 0;
+        /* c8 ignore stop */
+        const c = br >= chunk.length && position === 0 ?
+            chunk
+            : chunk.subarray(position, position + br);
+        entry.write(c);
+        if (!entry.blockRemain) {
+            this[STATE] = 'header';
+            this[WRITEENTRY] = undefined;
+            entry.end();
+        }
+        return c.length;
+    }
+    [CONSUMEMETA](chunk, position) {
+        const entry = this[WRITEENTRY];
+        const ret = this[CONSUMEBODY](chunk, position);
+        // if we finished, then the entry is reset
+        if (!this[WRITEENTRY] && entry) {
+            this[EMITMETA](entry);
+        }
+        return ret;
+    }
+    [EMIT](ev, data, extra) {
+        if (!this[QUEUE].length && !this[READENTRY]) {
+            this.emit(ev, data, extra);
+        }
+        else {
+            this[QUEUE].push([ev, data, extra]);
+        }
+    }
+    [EMITMETA](entry) {
+        this[EMIT]('meta', this[META]);
+        switch (entry.type) {
+            case 'ExtendedHeader':
+            case 'OldExtendedHeader':
+                this[EX] = pax_js_1.Pax.parse(this[META], this[EX], false);
+                break;
+            case 'GlobalExtendedHeader':
+                this[GEX] = pax_js_1.Pax.parse(this[META], this[GEX], true);
+                break;
+            case 'NextFileHasLongPath':
+            case 'OldGnuLongPath': {
+                const ex = this[EX] ?? Object.create(null);
+                this[EX] = ex;
+                ex.path = this[META].replace(/\0.*/, '');
+                break;
+            }
+            case 'NextFileHasLongLinkpath': {
+                const ex = this[EX] || Object.create(null);
+                this[EX] = ex;
+                ex.linkpath = this[META].replace(/\0.*/, '');
+                break;
+            }
+            /* c8 ignore start */
+            default:
+                throw new Error('unknown meta: ' + entry.type);
+            /* c8 ignore stop */
+        }
+    }
+    abort(error) {
+        this[ABORTED] = true;
+        this.emit('abort', error);
+        // always throws, even in non-strict mode
+        this.warn('TAR_ABORT', error, { recoverable: false });
+    }
+    write(chunk, encoding, cb) {
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = undefined;
+        }
+        if (typeof chunk === 'string') {
+            chunk = Buffer.from(chunk, 
+            /* c8 ignore next */
+            typeof encoding === 'string' ? encoding : 'utf8');
+        }
+        if (this[ABORTED]) {
+            /* c8 ignore next */
+            cb?.();
+            return false;
+        }
+        // first write, might be gzipped
+        const needSniff = this[UNZIP] === undefined ||
+            (this.brotli === undefined && this[UNZIP] === false);
+        if (needSniff && chunk) {
+            if (this[BUFFER]) {
+                chunk = Buffer.concat([this[BUFFER], chunk]);
+                this[BUFFER] = undefined;
+            }
+            if (chunk.length < gzipHeader.length) {
+                this[BUFFER] = chunk;
+                /* c8 ignore next */
+                cb?.();
+                return true;
+            }
+            // look for gzip header
+            for (let i = 0; this[UNZIP] === undefined && i < gzipHeader.length; i++) {
+                if (chunk[i] !== gzipHeader[i]) {
+                    this[UNZIP] = false;
+                }
+            }
+            const maybeBrotli = this.brotli === undefined;
+            if (this[UNZIP] === false && maybeBrotli) {
+                // read the first header to see if it's a valid tar file. If so,
+                // we can safely assume that it's not actually brotli, despite the
+                // .tbr or .tar.br file extension.
+                // if we ended before getting a full chunk, yes, def brotli
+                if (chunk.length < 512) {
+                    if (this[ENDED]) {
+                        this.brotli = true;
+                    }
+                    else {
+                        this[BUFFER] = chunk;
+                        /* c8 ignore next */
+                        cb?.();
+                        return true;
+                    }
+                }
+                else {
+                    // if it's tar, it's pretty reliably not brotli, chances of
+                    // that happening are astronomical.
+                    try {
+                        new header_js_1.Header(chunk.subarray(0, 512));
+                        this.brotli = false;
+                    }
+                    catch (_) {
+                        this.brotli = true;
+                    }
+                }
+            }
+            if (this[UNZIP] === undefined ||
+                (this[UNZIP] === false && this.brotli)) {
+                const ended = this[ENDED];
+                this[ENDED] = false;
+                this[UNZIP] =
+                    this[UNZIP] === undefined ?
+                        new minizlib_1.Unzip({})
+                        : new minizlib_1.BrotliDecompress({});
+                this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk));
+                this[UNZIP].on('error', er => this.abort(er));
+                this[UNZIP].on('end', () => {
+                    this[ENDED] = true;
+                    this[CONSUMECHUNK]();
+                });
+                this[WRITING] = true;
+                const ret = !!this[UNZIP][ended ? 'end' : 'write'](chunk);
+                this[WRITING] = false;
+                cb?.();
+                return ret;
+            }
+        }
+        this[WRITING] = true;
+        if (this[UNZIP]) {
+            this[UNZIP].write(chunk);
+        }
+        else {
+            this[CONSUMECHUNK](chunk);
+        }
+        this[WRITING] = false;
+        // return false if there's a queue, or if the current entry isn't flowing
+        const ret = this[QUEUE].length ? false
+            : this[READENTRY] ? this[READENTRY].flowing
+                : true;
+        // if we have no queue, then that means a clogged READENTRY
+        if (!ret && !this[QUEUE].length) {
+            this[READENTRY]?.once('drain', () => this.emit('drain'));
+        }
+        /* c8 ignore next */
+        cb?.();
+        return ret;
+    }
+    [BUFFERCONCAT](c) {
+        if (c && !this[ABORTED]) {
+            this[BUFFER] =
+                this[BUFFER] ? Buffer.concat([this[BUFFER], c]) : c;
+        }
+    }
+    [MAYBEEND]() {
+        if (this[ENDED] &&
+            !this[EMITTEDEND] &&
+            !this[ABORTED] &&
+            !this[CONSUMING]) {
+            this[EMITTEDEND] = true;
+            const entry = this[WRITEENTRY];
+            if (entry && entry.blockRemain) {
+                // truncated, likely a damaged file
+                const have = this[BUFFER] ? this[BUFFER].length : 0;
+                this.warn('TAR_BAD_ARCHIVE', `Truncated input (needed ${entry.blockRemain} more bytes, only ${have} available)`, { entry });
+                if (this[BUFFER]) {
+                    entry.write(this[BUFFER]);
+                }
+                entry.end();
+            }
+            this[EMIT](DONE);
+        }
+    }
+    [CONSUMECHUNK](chunk) {
+        if (this[CONSUMING] && chunk) {
+            this[BUFFERCONCAT](chunk);
+        }
+        else if (!chunk && !this[BUFFER]) {
+            this[MAYBEEND]();
+        }
+        else if (chunk) {
+            this[CONSUMING] = true;
+            if (this[BUFFER]) {
+                this[BUFFERCONCAT](chunk);
+                const c = this[BUFFER];
+                this[BUFFER] = undefined;
+                this[CONSUMECHUNKSUB](c);
+            }
+            else {
+                this[CONSUMECHUNKSUB](chunk);
+            }
+            while (this[BUFFER] &&
+                this[BUFFER]?.length >= 512 &&
+                !this[ABORTED] &&
+                !this[SAW_EOF]) {
+                const c = this[BUFFER];
+                this[BUFFER] = undefined;
+                this[CONSUMECHUNKSUB](c);
+            }
+            this[CONSUMING] = false;
+        }
+        if (!this[BUFFER] || this[ENDED]) {
+            this[MAYBEEND]();
+        }
+    }
+    [CONSUMECHUNKSUB](chunk) {
+        // we know that we are in CONSUMING mode, so anything written goes into
+        // the buffer.  Advance the position and put any remainder in the buffer.
+        let position = 0;
+        const length = chunk.length;
+        while (position + 512 <= length &&
+            !this[ABORTED] &&
+            !this[SAW_EOF]) {
+            switch (this[STATE]) {
+                case 'begin':
+                case 'header':
+                    this[CONSUMEHEADER](chunk, position);
+                    position += 512;
+                    break;
+                case 'ignore':
+                case 'body':
+                    position += this[CONSUMEBODY](chunk, position);
+                    break;
+                case 'meta':
+                    position += this[CONSUMEMETA](chunk, position);
+                    break;
+                /* c8 ignore start */
+                default:
+                    throw new Error('invalid state: ' + this[STATE]);
+                /* c8 ignore stop */
+            }
+        }
+        if (position < length) {
+            if (this[BUFFER]) {
+                this[BUFFER] = Buffer.concat([
+                    chunk.subarray(position),
+                    this[BUFFER],
+                ]);
+            }
+            else {
+                this[BUFFER] = chunk.subarray(position);
+            }
+        }
+    }
+    end(chunk, encoding, cb) {
+        if (typeof chunk === 'function') {
+            cb = chunk;
+            encoding = undefined;
+            chunk = undefined;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = undefined;
+        }
+        if (typeof chunk === 'string') {
+            chunk = Buffer.from(chunk, encoding);
+        }
+        if (cb)
+            this.once('finish', cb);
+        if (!this[ABORTED]) {
+            if (this[UNZIP]) {
+                /* c8 ignore start */
+                if (chunk)
+                    this[UNZIP].write(chunk);
+                /* c8 ignore stop */
+                this[UNZIP].end();
+            }
+            else {
+                this[ENDED] = true;
+                if (this.brotli === undefined)
+                    chunk = chunk || Buffer.alloc(0);
+                if (chunk)
+                    this.write(chunk);
+                this[MAYBEEND]();
+            }
+        }
+        return this;
+    }
+}
+exports.Parser = Parser;
+//# sourceMappingURL=parse.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/path-reservations.js b/node_modules/pacote/node_modules/tar/dist/commonjs/path-reservations.js
new file mode 100644
index 0000000000000..9ff391c44092c
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/commonjs/path-reservations.js
@@ -0,0 +1,170 @@
+"use strict";
+// A path exclusive reservation system
+// reserve([list, of, paths], fn)
+// When the fn is first in line for all its paths, it
+// is called with a cb that clears the reservation.
+//
+// Used by async unpack to avoid clobbering paths in use,
+// while still allowing maximal safe parallelization.
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.PathReservations = void 0;
+const node_path_1 = require("node:path");
+const normalize_unicode_js_1 = require("./normalize-unicode.js");
+const strip_trailing_slashes_js_1 = require("./strip-trailing-slashes.js");
+const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform;
+const isWindows = platform === 'win32';
+// return a set of parent dirs for a given path
+// '/a/b/c/d' -> ['/', '/a', '/a/b', '/a/b/c', '/a/b/c/d']
+const getDirs = (path) => {
+    const dirs = path
+        .split('/')
+        .slice(0, -1)
+        .reduce((set, path) => {
+        const s = set[set.length - 1];
+        if (s !== undefined) {
+            path = (0, node_path_1.join)(s, path);
+        }
+        set.push(path || '/');
+        return set;
+    }, []);
+    return dirs;
+};
+class PathReservations {
+    // path => [function or Set]
+    // A Set object means a directory reservation
+    // A fn is a direct reservation on that path
+    #queues = new Map();
+    // fn => {paths:[path,...], dirs:[path, ...]}
+    #reservations = new Map();
+    // functions currently running
+    #running = new Set();
+    reserve(paths, fn) {
+        paths =
+            isWindows ?
+                ['win32 parallelization disabled']
+                : paths.map(p => {
+                    // don't need normPath, because we skip this entirely for windows
+                    return (0, strip_trailing_slashes_js_1.stripTrailingSlashes)((0, node_path_1.join)((0, normalize_unicode_js_1.normalizeUnicode)(p))).toLowerCase();
+                });
+        const dirs = new Set(paths.map(path => getDirs(path)).reduce((a, b) => a.concat(b)));
+        this.#reservations.set(fn, { dirs, paths });
+        for (const p of paths) {
+            const q = this.#queues.get(p);
+            if (!q) {
+                this.#queues.set(p, [fn]);
+            }
+            else {
+                q.push(fn);
+            }
+        }
+        for (const dir of dirs) {
+            const q = this.#queues.get(dir);
+            if (!q) {
+                this.#queues.set(dir, [new Set([fn])]);
+            }
+            else {
+                const l = q[q.length - 1];
+                if (l instanceof Set) {
+                    l.add(fn);
+                }
+                else {
+                    q.push(new Set([fn]));
+                }
+            }
+        }
+        return this.#run(fn);
+    }
+    // return the queues for each path the function cares about
+    // fn => {paths, dirs}
+    #getQueues(fn) {
+        const res = this.#reservations.get(fn);
+        /* c8 ignore start */
+        if (!res) {
+            throw new Error('function does not have any path reservations');
+        }
+        /* c8 ignore stop */
+        return {
+            paths: res.paths.map((path) => this.#queues.get(path)),
+            dirs: [...res.dirs].map(path => this.#queues.get(path)),
+        };
+    }
+    // check if fn is first in line for all its paths, and is
+    // included in the first set for all its dir queues
+    check(fn) {
+        const { paths, dirs } = this.#getQueues(fn);
+        return (paths.every(q => q && q[0] === fn) &&
+            dirs.every(q => q && q[0] instanceof Set && q[0].has(fn)));
+    }
+    // run the function if it's first in line and not already running
+    #run(fn) {
+        if (this.#running.has(fn) || !this.check(fn)) {
+            return false;
+        }
+        this.#running.add(fn);
+        fn(() => this.#clear(fn));
+        return true;
+    }
+    #clear(fn) {
+        if (!this.#running.has(fn)) {
+            return false;
+        }
+        const res = this.#reservations.get(fn);
+        /* c8 ignore start */
+        if (!res) {
+            throw new Error('invalid reservation');
+        }
+        /* c8 ignore stop */
+        const { paths, dirs } = res;
+        const next = new Set();
+        for (const path of paths) {
+            const q = this.#queues.get(path);
+            /* c8 ignore start */
+            if (!q || q?.[0] !== fn) {
+                continue;
+            }
+            /* c8 ignore stop */
+            const q0 = q[1];
+            if (!q0) {
+                this.#queues.delete(path);
+                continue;
+            }
+            q.shift();
+            if (typeof q0 === 'function') {
+                next.add(q0);
+            }
+            else {
+                for (const f of q0) {
+                    next.add(f);
+                }
+            }
+        }
+        for (const dir of dirs) {
+            const q = this.#queues.get(dir);
+            const q0 = q?.[0];
+            /* c8 ignore next - type safety only */
+            if (!q || !(q0 instanceof Set))
+                continue;
+            if (q0.size === 1 && q.length === 1) {
+                this.#queues.delete(dir);
+                continue;
+            }
+            else if (q0.size === 1) {
+                q.shift();
+                // next one must be a function,
+                // or else the Set would've been reused
+                const n = q[0];
+                if (typeof n === 'function') {
+                    next.add(n);
+                }
+            }
+            else {
+                q0.delete(fn);
+            }
+        }
+        this.#running.delete(fn);
+        next.forEach(fn => this.#run(fn));
+        return true;
+    }
+}
+exports.PathReservations = PathReservations;
+//# sourceMappingURL=path-reservations.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/pax.js b/node_modules/pacote/node_modules/tar/dist/commonjs/pax.js
new file mode 100644
index 0000000000000..d30c0f3efbe9e
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/commonjs/pax.js
@@ -0,0 +1,158 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Pax = void 0;
+const node_path_1 = require("node:path");
+const header_js_1 = require("./header.js");
+class Pax {
+    atime;
+    mtime;
+    ctime;
+    charset;
+    comment;
+    gid;
+    uid;
+    gname;
+    uname;
+    linkpath;
+    dev;
+    ino;
+    nlink;
+    path;
+    size;
+    mode;
+    global;
+    constructor(obj, global = false) {
+        this.atime = obj.atime;
+        this.charset = obj.charset;
+        this.comment = obj.comment;
+        this.ctime = obj.ctime;
+        this.dev = obj.dev;
+        this.gid = obj.gid;
+        this.global = global;
+        this.gname = obj.gname;
+        this.ino = obj.ino;
+        this.linkpath = obj.linkpath;
+        this.mtime = obj.mtime;
+        this.nlink = obj.nlink;
+        this.path = obj.path;
+        this.size = obj.size;
+        this.uid = obj.uid;
+        this.uname = obj.uname;
+    }
+    encode() {
+        const body = this.encodeBody();
+        if (body === '') {
+            return Buffer.allocUnsafe(0);
+        }
+        const bodyLen = Buffer.byteLength(body);
+        // round up to 512 bytes
+        // add 512 for header
+        const bufLen = 512 * Math.ceil(1 + bodyLen / 512);
+        const buf = Buffer.allocUnsafe(bufLen);
+        // 0-fill the header section, it might not hit every field
+        for (let i = 0; i < 512; i++) {
+            buf[i] = 0;
+        }
+        new header_js_1.Header({
+            // XXX split the path
+            // then the path should be PaxHeader + basename, but less than 99,
+            // prepend with the dirname
+            /* c8 ignore start */
+            path: ('PaxHeader/' + (0, node_path_1.basename)(this.path ?? '')).slice(0, 99),
+            /* c8 ignore stop */
+            mode: this.mode || 0o644,
+            uid: this.uid,
+            gid: this.gid,
+            size: bodyLen,
+            mtime: this.mtime,
+            type: this.global ? 'GlobalExtendedHeader' : 'ExtendedHeader',
+            linkpath: '',
+            uname: this.uname || '',
+            gname: this.gname || '',
+            devmaj: 0,
+            devmin: 0,
+            atime: this.atime,
+            ctime: this.ctime,
+        }).encode(buf);
+        buf.write(body, 512, bodyLen, 'utf8');
+        // null pad after the body
+        for (let i = bodyLen + 512; i < buf.length; i++) {
+            buf[i] = 0;
+        }
+        return buf;
+    }
+    encodeBody() {
+        return (this.encodeField('path') +
+            this.encodeField('ctime') +
+            this.encodeField('atime') +
+            this.encodeField('dev') +
+            this.encodeField('ino') +
+            this.encodeField('nlink') +
+            this.encodeField('charset') +
+            this.encodeField('comment') +
+            this.encodeField('gid') +
+            this.encodeField('gname') +
+            this.encodeField('linkpath') +
+            this.encodeField('mtime') +
+            this.encodeField('size') +
+            this.encodeField('uid') +
+            this.encodeField('uname'));
+    }
+    encodeField(field) {
+        if (this[field] === undefined) {
+            return '';
+        }
+        const r = this[field];
+        const v = r instanceof Date ? r.getTime() / 1000 : r;
+        const s = ' ' +
+            (field === 'dev' || field === 'ino' || field === 'nlink' ?
+                'SCHILY.'
+                : '') +
+            field +
+            '=' +
+            v +
+            '\n';
+        const byteLen = Buffer.byteLength(s);
+        // the digits includes the length of the digits in ascii base-10
+        // so if it's 9 characters, then adding 1 for the 9 makes it 10
+        // which makes it 11 chars.
+        let digits = Math.floor(Math.log(byteLen) / Math.log(10)) + 1;
+        if (byteLen + digits >= Math.pow(10, digits)) {
+            digits += 1;
+        }
+        const len = digits + byteLen;
+        return len + s;
+    }
+    static parse(str, ex, g = false) {
+        return new Pax(merge(parseKV(str), ex), g);
+    }
+}
+exports.Pax = Pax;
+const merge = (a, b) => b ? Object.assign({}, b, a) : a;
+const parseKV = (str) => str
+    .replace(/\n$/, '')
+    .split('\n')
+    .reduce(parseKVLine, Object.create(null));
+const parseKVLine = (set, line) => {
+    const n = parseInt(line, 10);
+    // XXX Values with \n in them will fail this.
+    // Refactor to not be a naive line-by-line parse.
+    if (n !== Buffer.byteLength(line) + 1) {
+        return set;
+    }
+    line = line.slice((n + ' ').length);
+    const kv = line.split('=');
+    const r = kv.shift();
+    if (!r) {
+        return set;
+    }
+    const k = r.replace(/^SCHILY\.(dev|ino|nlink)/, '$1');
+    const v = kv.join('=');
+    set[k] =
+        /^([A-Z]+\.)?([mac]|birth|creation)time$/.test(k) ?
+            new Date(Number(v) * 1000)
+            : /^[0-9]+$/.test(v) ? +v
+                : v;
+    return set;
+};
+//# sourceMappingURL=pax.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/read-entry.js b/node_modules/pacote/node_modules/tar/dist/commonjs/read-entry.js
new file mode 100644
index 0000000000000..15e2d55c938a4
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/commonjs/read-entry.js
@@ -0,0 +1,140 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.ReadEntry = void 0;
+const minipass_1 = require("minipass");
+const normalize_windows_path_js_1 = require("./normalize-windows-path.js");
+class ReadEntry extends minipass_1.Minipass {
+    extended;
+    globalExtended;
+    header;
+    startBlockSize;
+    blockRemain;
+    remain;
+    type;
+    meta = false;
+    ignore = false;
+    path;
+    mode;
+    uid;
+    gid;
+    uname;
+    gname;
+    size = 0;
+    mtime;
+    atime;
+    ctime;
+    linkpath;
+    dev;
+    ino;
+    nlink;
+    invalid = false;
+    absolute;
+    unsupported = false;
+    constructor(header, ex, gex) {
+        super({});
+        // read entries always start life paused.  this is to avoid the
+        // situation where Minipass's auto-ending empty streams results
+        // in an entry ending before we're ready for it.
+        this.pause();
+        this.extended = ex;
+        this.globalExtended = gex;
+        this.header = header;
+        /* c8 ignore start */
+        this.remain = header.size ?? 0;
+        /* c8 ignore stop */
+        this.startBlockSize = 512 * Math.ceil(this.remain / 512);
+        this.blockRemain = this.startBlockSize;
+        this.type = header.type;
+        switch (this.type) {
+            case 'File':
+            case 'OldFile':
+            case 'Link':
+            case 'SymbolicLink':
+            case 'CharacterDevice':
+            case 'BlockDevice':
+            case 'Directory':
+            case 'FIFO':
+            case 'ContiguousFile':
+            case 'GNUDumpDir':
+                break;
+            case 'NextFileHasLongLinkpath':
+            case 'NextFileHasLongPath':
+            case 'OldGnuLongPath':
+            case 'GlobalExtendedHeader':
+            case 'ExtendedHeader':
+            case 'OldExtendedHeader':
+                this.meta = true;
+                break;
+            // NOTE: gnutar and bsdtar treat unrecognized types as 'File'
+            // it may be worth doing the same, but with a warning.
+            default:
+                this.ignore = true;
+        }
+        /* c8 ignore start */
+        if (!header.path) {
+            throw new Error('no path provided for tar.ReadEntry');
+        }
+        /* c8 ignore stop */
+        this.path = (0, normalize_windows_path_js_1.normalizeWindowsPath)(header.path);
+        this.mode = header.mode;
+        if (this.mode) {
+            this.mode = this.mode & 0o7777;
+        }
+        this.uid = header.uid;
+        this.gid = header.gid;
+        this.uname = header.uname;
+        this.gname = header.gname;
+        this.size = this.remain;
+        this.mtime = header.mtime;
+        this.atime = header.atime;
+        this.ctime = header.ctime;
+        /* c8 ignore start */
+        this.linkpath =
+            header.linkpath ?
+                (0, normalize_windows_path_js_1.normalizeWindowsPath)(header.linkpath)
+                : undefined;
+        /* c8 ignore stop */
+        this.uname = header.uname;
+        this.gname = header.gname;
+        if (ex) {
+            this.#slurp(ex);
+        }
+        if (gex) {
+            this.#slurp(gex, true);
+        }
+    }
+    write(data) {
+        const writeLen = data.length;
+        if (writeLen > this.blockRemain) {
+            throw new Error('writing more to entry than is appropriate');
+        }
+        const r = this.remain;
+        const br = this.blockRemain;
+        this.remain = Math.max(0, r - writeLen);
+        this.blockRemain = Math.max(0, br - writeLen);
+        if (this.ignore) {
+            return true;
+        }
+        if (r >= writeLen) {
+            return super.write(data);
+        }
+        // r < writeLen
+        return super.write(data.subarray(0, r));
+    }
+    #slurp(ex, gex = false) {
+        if (ex.path)
+            ex.path = (0, normalize_windows_path_js_1.normalizeWindowsPath)(ex.path);
+        if (ex.linkpath)
+            ex.linkpath = (0, normalize_windows_path_js_1.normalizeWindowsPath)(ex.linkpath);
+        Object.assign(this, Object.fromEntries(Object.entries(ex).filter(([k, v]) => {
+            // we slurp in everything except for the path attribute in
+            // a global extended header, because that's weird. Also, any
+            // null/undefined values are ignored.
+            return !(v === null ||
+                v === undefined ||
+                (k === 'path' && gex));
+        })));
+    }
+}
+exports.ReadEntry = ReadEntry;
+//# sourceMappingURL=read-entry.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/replace.js b/node_modules/pacote/node_modules/tar/dist/commonjs/replace.js
new file mode 100644
index 0000000000000..262deecd12f9f
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/commonjs/replace.js
@@ -0,0 +1,231 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.replace = void 0;
+// tar -r
+const fs_minipass_1 = require("@isaacs/fs-minipass");
+const node_fs_1 = __importDefault(require("node:fs"));
+const node_path_1 = __importDefault(require("node:path"));
+const header_js_1 = require("./header.js");
+const list_js_1 = require("./list.js");
+const make_command_js_1 = require("./make-command.js");
+const options_js_1 = require("./options.js");
+const pack_js_1 = require("./pack.js");
+// starting at the head of the file, read a Header
+// If the checksum is invalid, that's our position to start writing
+// If it is, jump forward by the specified size (round up to 512)
+// and try again.
+// Write the new Pack stream starting there.
+const replaceSync = (opt, files) => {
+    const p = new pack_js_1.PackSync(opt);
+    let threw = true;
+    let fd;
+    let position;
+    try {
+        try {
+            fd = node_fs_1.default.openSync(opt.file, 'r+');
+        }
+        catch (er) {
+            if (er?.code === 'ENOENT') {
+                fd = node_fs_1.default.openSync(opt.file, 'w+');
+            }
+            else {
+                throw er;
+            }
+        }
+        const st = node_fs_1.default.fstatSync(fd);
+        const headBuf = Buffer.alloc(512);
+        POSITION: for (position = 0; position < st.size; position += 512) {
+            for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) {
+                bytes = node_fs_1.default.readSync(fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos);
+                if (position === 0 &&
+                    headBuf[0] === 0x1f &&
+                    headBuf[1] === 0x8b) {
+                    throw new Error('cannot append to compressed archives');
+                }
+                if (!bytes) {
+                    break POSITION;
+                }
+            }
+            const h = new header_js_1.Header(headBuf);
+            if (!h.cksumValid) {
+                break;
+            }
+            const entryBlockSize = 512 * Math.ceil((h.size || 0) / 512);
+            if (position + entryBlockSize + 512 > st.size) {
+                break;
+            }
+            // the 512 for the header we just parsed will be added as well
+            // also jump ahead all the blocks for the body
+            position += entryBlockSize;
+            if (opt.mtimeCache && h.mtime) {
+                opt.mtimeCache.set(String(h.path), h.mtime);
+            }
+        }
+        threw = false;
+        streamSync(opt, p, position, fd, files);
+    }
+    finally {
+        if (threw) {
+            try {
+                node_fs_1.default.closeSync(fd);
+            }
+            catch (er) { }
+        }
+    }
+};
+const streamSync = (opt, p, position, fd, files) => {
+    const stream = new fs_minipass_1.WriteStreamSync(opt.file, {
+        fd: fd,
+        start: position,
+    });
+    p.pipe(stream);
+    addFilesSync(p, files);
+};
+const replaceAsync = (opt, files) => {
+    files = Array.from(files);
+    const p = new pack_js_1.Pack(opt);
+    const getPos = (fd, size, cb_) => {
+        const cb = (er, pos) => {
+            if (er) {
+                node_fs_1.default.close(fd, _ => cb_(er));
+            }
+            else {
+                cb_(null, pos);
+            }
+        };
+        let position = 0;
+        if (size === 0) {
+            return cb(null, 0);
+        }
+        let bufPos = 0;
+        const headBuf = Buffer.alloc(512);
+        const onread = (er, bytes) => {
+            if (er || typeof bytes === 'undefined') {
+                return cb(er);
+            }
+            bufPos += bytes;
+            if (bufPos < 512 && bytes) {
+                return node_fs_1.default.read(fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos, onread);
+            }
+            if (position === 0 &&
+                headBuf[0] === 0x1f &&
+                headBuf[1] === 0x8b) {
+                return cb(new Error('cannot append to compressed archives'));
+            }
+            // truncated header
+            if (bufPos < 512) {
+                return cb(null, position);
+            }
+            const h = new header_js_1.Header(headBuf);
+            if (!h.cksumValid) {
+                return cb(null, position);
+            }
+            /* c8 ignore next */
+            const entryBlockSize = 512 * Math.ceil((h.size ?? 0) / 512);
+            if (position + entryBlockSize + 512 > size) {
+                return cb(null, position);
+            }
+            position += entryBlockSize + 512;
+            if (position >= size) {
+                return cb(null, position);
+            }
+            if (opt.mtimeCache && h.mtime) {
+                opt.mtimeCache.set(String(h.path), h.mtime);
+            }
+            bufPos = 0;
+            node_fs_1.default.read(fd, headBuf, 0, 512, position, onread);
+        };
+        node_fs_1.default.read(fd, headBuf, 0, 512, position, onread);
+    };
+    const promise = new Promise((resolve, reject) => {
+        p.on('error', reject);
+        let flag = 'r+';
+        const onopen = (er, fd) => {
+            if (er && er.code === 'ENOENT' && flag === 'r+') {
+                flag = 'w+';
+                return node_fs_1.default.open(opt.file, flag, onopen);
+            }
+            if (er || !fd) {
+                return reject(er);
+            }
+            node_fs_1.default.fstat(fd, (er, st) => {
+                if (er) {
+                    return node_fs_1.default.close(fd, () => reject(er));
+                }
+                getPos(fd, st.size, (er, position) => {
+                    if (er) {
+                        return reject(er);
+                    }
+                    const stream = new fs_minipass_1.WriteStream(opt.file, {
+                        fd: fd,
+                        start: position,
+                    });
+                    p.pipe(stream);
+                    stream.on('error', reject);
+                    stream.on('close', resolve);
+                    addFilesAsync(p, files);
+                });
+            });
+        };
+        node_fs_1.default.open(opt.file, flag, onopen);
+    });
+    return promise;
+};
+const addFilesSync = (p, files) => {
+    files.forEach(file => {
+        if (file.charAt(0) === '@') {
+            (0, list_js_1.list)({
+                file: node_path_1.default.resolve(p.cwd, file.slice(1)),
+                sync: true,
+                noResume: true,
+                onReadEntry: entry => p.add(entry),
+            });
+        }
+        else {
+            p.add(file);
+        }
+    });
+    p.end();
+};
+const addFilesAsync = async (p, files) => {
+    for (let i = 0; i < files.length; i++) {
+        const file = String(files[i]);
+        if (file.charAt(0) === '@') {
+            await (0, list_js_1.list)({
+                file: node_path_1.default.resolve(String(p.cwd), file.slice(1)),
+                noResume: true,
+                onReadEntry: entry => p.add(entry),
+            });
+        }
+        else {
+            p.add(file);
+        }
+    }
+    p.end();
+};
+exports.replace = (0, make_command_js_1.makeCommand)(replaceSync, replaceAsync, 
+/* c8 ignore start */
+() => {
+    throw new TypeError('file is required');
+}, () => {
+    throw new TypeError('file is required');
+}, 
+/* c8 ignore stop */
+(opt, entries) => {
+    if (!(0, options_js_1.isFile)(opt)) {
+        throw new TypeError('file is required');
+    }
+    if (opt.gzip ||
+        opt.brotli ||
+        opt.file.endsWith('.br') ||
+        opt.file.endsWith('.tbr')) {
+        throw new TypeError('cannot append to compressed archives');
+    }
+    if (!entries?.length) {
+        throw new TypeError('no paths specified to add/replace');
+    }
+});
+//# sourceMappingURL=replace.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/strip-absolute-path.js b/node_modules/pacote/node_modules/tar/dist/commonjs/strip-absolute-path.js
new file mode 100644
index 0000000000000..bb7639c35a110
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/commonjs/strip-absolute-path.js
@@ -0,0 +1,29 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.stripAbsolutePath = void 0;
+// unix absolute paths are also absolute on win32, so we use this for both
+const node_path_1 = require("node:path");
+const { isAbsolute, parse } = node_path_1.win32;
+// returns [root, stripped]
+// Note that windows will think that //x/y/z/a has a "root" of //x/y, and in
+// those cases, we want to sanitize it to x/y/z/a, not z/a, so we strip /
+// explicitly if it's the first character.
+// drive-specific relative paths on Windows get their root stripped off even
+// though they are not absolute, so `c:../foo` becomes ['c:', '../foo']
+const stripAbsolutePath = (path) => {
+    let r = '';
+    let parsed = parse(path);
+    while (isAbsolute(path) || parsed.root) {
+        // windows will think that //x/y/z has a "root" of //x/y/
+        // but strip the //?/C:/ off of //?/C:/path
+        const root = path.charAt(0) === '/' && path.slice(0, 4) !== '//?/' ?
+            '/'
+            : parsed.root;
+        path = path.slice(root.length);
+        r += root;
+        parsed = parse(path);
+    }
+    return [r, path];
+};
+exports.stripAbsolutePath = stripAbsolutePath;
+//# sourceMappingURL=strip-absolute-path.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/strip-trailing-slashes.js b/node_modules/pacote/node_modules/tar/dist/commonjs/strip-trailing-slashes.js
new file mode 100644
index 0000000000000..6fa74ad6a4ac9
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/commonjs/strip-trailing-slashes.js
@@ -0,0 +1,18 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.stripTrailingSlashes = void 0;
+// warning: extremely hot code path.
+// This has been meticulously optimized for use
+// within npm install on large package trees.
+// Do not edit without careful benchmarking.
+const stripTrailingSlashes = (str) => {
+    let i = str.length - 1;
+    let slashesStart = -1;
+    while (i > -1 && str.charAt(i) === '/') {
+        slashesStart = i;
+        i--;
+    }
+    return slashesStart === -1 ? str : str.slice(0, slashesStart);
+};
+exports.stripTrailingSlashes = stripTrailingSlashes;
+//# sourceMappingURL=strip-trailing-slashes.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/symlink-error.js b/node_modules/pacote/node_modules/tar/dist/commonjs/symlink-error.js
new file mode 100644
index 0000000000000..cc19ac1a2e3c6
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/commonjs/symlink-error.js
@@ -0,0 +1,19 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.SymlinkError = void 0;
+class SymlinkError extends Error {
+    path;
+    symlink;
+    syscall = 'symlink';
+    code = 'TAR_SYMLINK_ERROR';
+    constructor(symlink, path) {
+        super('TAR_SYMLINK_ERROR: Cannot extract through symbolic link');
+        this.symlink = symlink;
+        this.path = path;
+    }
+    get name() {
+        return 'SymlinkError';
+    }
+}
+exports.SymlinkError = SymlinkError;
+//# sourceMappingURL=symlink-error.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/types.js b/node_modules/pacote/node_modules/tar/dist/commonjs/types.js
new file mode 100644
index 0000000000000..cb9b684e843b7
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/commonjs/types.js
@@ -0,0 +1,50 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.code = exports.name = exports.isName = exports.isCode = void 0;
+const isCode = (c) => exports.name.has(c);
+exports.isCode = isCode;
+const isName = (c) => exports.code.has(c);
+exports.isName = isName;
+// map types from key to human-friendly name
+exports.name = new Map([
+    ['0', 'File'],
+    // same as File
+    ['', 'OldFile'],
+    ['1', 'Link'],
+    ['2', 'SymbolicLink'],
+    // Devices and FIFOs aren't fully supported
+    // they are parsed, but skipped when unpacking
+    ['3', 'CharacterDevice'],
+    ['4', 'BlockDevice'],
+    ['5', 'Directory'],
+    ['6', 'FIFO'],
+    // same as File
+    ['7', 'ContiguousFile'],
+    // pax headers
+    ['g', 'GlobalExtendedHeader'],
+    ['x', 'ExtendedHeader'],
+    // vendor-specific stuff
+    // skip
+    ['A', 'SolarisACL'],
+    // like 5, but with data, which should be skipped
+    ['D', 'GNUDumpDir'],
+    // metadata only, skip
+    ['I', 'Inode'],
+    // data = link path of next file
+    ['K', 'NextFileHasLongLinkpath'],
+    // data = path of next file
+    ['L', 'NextFileHasLongPath'],
+    // skip
+    ['M', 'ContinuationFile'],
+    // like L
+    ['N', 'OldGnuLongPath'],
+    // skip
+    ['S', 'SparseFile'],
+    // skip
+    ['V', 'TapeVolumeHeader'],
+    // like x
+    ['X', 'OldExtendedHeader'],
+]);
+// map the other direction
+exports.code = new Map(Array.from(exports.name).map(kv => [kv[1], kv[0]]));
+//# sourceMappingURL=types.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/unpack.js b/node_modules/pacote/node_modules/tar/dist/commonjs/unpack.js
new file mode 100644
index 0000000000000..edf8acbb18c40
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/commonjs/unpack.js
@@ -0,0 +1,919 @@
+"use strict";
+// the PEND/UNPEND stuff tracks whether we're ready to emit end/close yet.
+// but the path reservations are required to avoid race conditions where
+// parallelized unpack ops may mess with one another, due to dependencies
+// (like a Link depending on its target) or destructive operations (like
+// clobbering an fs object to create one of a different type.)
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.UnpackSync = exports.Unpack = void 0;
+const fsm = __importStar(require("@isaacs/fs-minipass"));
+const node_assert_1 = __importDefault(require("node:assert"));
+const node_crypto_1 = require("node:crypto");
+const node_fs_1 = __importDefault(require("node:fs"));
+const node_path_1 = __importDefault(require("node:path"));
+const get_write_flag_js_1 = require("./get-write-flag.js");
+const mkdir_js_1 = require("./mkdir.js");
+const normalize_unicode_js_1 = require("./normalize-unicode.js");
+const normalize_windows_path_js_1 = require("./normalize-windows-path.js");
+const parse_js_1 = require("./parse.js");
+const strip_absolute_path_js_1 = require("./strip-absolute-path.js");
+const strip_trailing_slashes_js_1 = require("./strip-trailing-slashes.js");
+const wc = __importStar(require("./winchars.js"));
+const path_reservations_js_1 = require("./path-reservations.js");
+const ONENTRY = Symbol('onEntry');
+const CHECKFS = Symbol('checkFs');
+const CHECKFS2 = Symbol('checkFs2');
+const PRUNECACHE = Symbol('pruneCache');
+const ISREUSABLE = Symbol('isReusable');
+const MAKEFS = Symbol('makeFs');
+const FILE = Symbol('file');
+const DIRECTORY = Symbol('directory');
+const LINK = Symbol('link');
+const SYMLINK = Symbol('symlink');
+const HARDLINK = Symbol('hardlink');
+const UNSUPPORTED = Symbol('unsupported');
+const CHECKPATH = Symbol('checkPath');
+const MKDIR = Symbol('mkdir');
+const ONERROR = Symbol('onError');
+const PENDING = Symbol('pending');
+const PEND = Symbol('pend');
+const UNPEND = Symbol('unpend');
+const ENDED = Symbol('ended');
+const MAYBECLOSE = Symbol('maybeClose');
+const SKIP = Symbol('skip');
+const DOCHOWN = Symbol('doChown');
+const UID = Symbol('uid');
+const GID = Symbol('gid');
+const CHECKED_CWD = Symbol('checkedCwd');
+const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform;
+const isWindows = platform === 'win32';
+const DEFAULT_MAX_DEPTH = 1024;
+// Unlinks on Windows are not atomic.
+//
+// This means that if you have a file entry, followed by another
+// file entry with an identical name, and you cannot re-use the file
+// (because it's a hardlink, or because unlink:true is set, or it's
+// Windows, which does not have useful nlink values), then the unlink
+// will be committed to the disk AFTER the new file has been written
+// over the old one, deleting the new file.
+//
+// To work around this, on Windows systems, we rename the file and then
+// delete the renamed file.  It's a sloppy kludge, but frankly, I do not
+// know of a better way to do this, given windows' non-atomic unlink
+// semantics.
+//
+// See: https://github.com/npm/node-tar/issues/183
+/* c8 ignore start */
+const unlinkFile = (path, cb) => {
+    if (!isWindows) {
+        return node_fs_1.default.unlink(path, cb);
+    }
+    const name = path + '.DELETE.' + (0, node_crypto_1.randomBytes)(16).toString('hex');
+    node_fs_1.default.rename(path, name, er => {
+        if (er) {
+            return cb(er);
+        }
+        node_fs_1.default.unlink(name, cb);
+    });
+};
+/* c8 ignore stop */
+/* c8 ignore start */
+const unlinkFileSync = (path) => {
+    if (!isWindows) {
+        return node_fs_1.default.unlinkSync(path);
+    }
+    const name = path + '.DELETE.' + (0, node_crypto_1.randomBytes)(16).toString('hex');
+    node_fs_1.default.renameSync(path, name);
+    node_fs_1.default.unlinkSync(name);
+};
+/* c8 ignore stop */
+// this.gid, entry.gid, this.processUid
+const uint32 = (a, b, c) => a !== undefined && a === a >>> 0 ? a
+    : b !== undefined && b === b >>> 0 ? b
+        : c;
+// clear the cache if it's a case-insensitive unicode-squashing match.
+// we can't know if the current file system is case-sensitive or supports
+// unicode fully, so we check for similarity on the maximally compatible
+// representation.  Err on the side of pruning, since all it's doing is
+// preventing lstats, and it's not the end of the world if we get a false
+// positive.
+// Note that on windows, we always drop the entire cache whenever a
+// symbolic link is encountered, because 8.3 filenames are impossible
+// to reason about, and collisions are hazards rather than just failures.
+const cacheKeyNormalize = (path) => (0, strip_trailing_slashes_js_1.stripTrailingSlashes)((0, normalize_windows_path_js_1.normalizeWindowsPath)((0, normalize_unicode_js_1.normalizeUnicode)(path))).toLowerCase();
+// remove all cache entries matching ${abs}/**
+const pruneCache = (cache, abs) => {
+    abs = cacheKeyNormalize(abs);
+    for (const path of cache.keys()) {
+        const pnorm = cacheKeyNormalize(path);
+        if (pnorm === abs || pnorm.indexOf(abs + '/') === 0) {
+            cache.delete(path);
+        }
+    }
+};
+const dropCache = (cache) => {
+    for (const key of cache.keys()) {
+        cache.delete(key);
+    }
+};
+class Unpack extends parse_js_1.Parser {
+    [ENDED] = false;
+    [CHECKED_CWD] = false;
+    [PENDING] = 0;
+    reservations = new path_reservations_js_1.PathReservations();
+    transform;
+    writable = true;
+    readable = false;
+    dirCache;
+    uid;
+    gid;
+    setOwner;
+    preserveOwner;
+    processGid;
+    processUid;
+    maxDepth;
+    forceChown;
+    win32;
+    newer;
+    keep;
+    noMtime;
+    preservePaths;
+    unlink;
+    cwd;
+    strip;
+    processUmask;
+    umask;
+    dmode;
+    fmode;
+    chmod;
+    constructor(opt = {}) {
+        opt.ondone = () => {
+            this[ENDED] = true;
+            this[MAYBECLOSE]();
+        };
+        super(opt);
+        this.transform = opt.transform;
+        this.dirCache = opt.dirCache || new Map();
+        this.chmod = !!opt.chmod;
+        if (typeof opt.uid === 'number' || typeof opt.gid === 'number') {
+            // need both or neither
+            if (typeof opt.uid !== 'number' ||
+                typeof opt.gid !== 'number') {
+                throw new TypeError('cannot set owner without number uid and gid');
+            }
+            if (opt.preserveOwner) {
+                throw new TypeError('cannot preserve owner in archive and also set owner explicitly');
+            }
+            this.uid = opt.uid;
+            this.gid = opt.gid;
+            this.setOwner = true;
+        }
+        else {
+            this.uid = undefined;
+            this.gid = undefined;
+            this.setOwner = false;
+        }
+        // default true for root
+        if (opt.preserveOwner === undefined &&
+            typeof opt.uid !== 'number') {
+            this.preserveOwner = !!(process.getuid && process.getuid() === 0);
+        }
+        else {
+            this.preserveOwner = !!opt.preserveOwner;
+        }
+        this.processUid =
+            (this.preserveOwner || this.setOwner) && process.getuid ?
+                process.getuid()
+                : undefined;
+        this.processGid =
+            (this.preserveOwner || this.setOwner) && process.getgid ?
+                process.getgid()
+                : undefined;
+        // prevent excessively deep nesting of subfolders
+        // set to `Infinity` to remove this restriction
+        this.maxDepth =
+            typeof opt.maxDepth === 'number' ?
+                opt.maxDepth
+                : DEFAULT_MAX_DEPTH;
+        // mostly just for testing, but useful in some cases.
+        // Forcibly trigger a chown on every entry, no matter what
+        this.forceChown = opt.forceChown === true;
+        // turn > this[ONENTRY](entry));
+    }
+    // a bad or damaged archive is a warning for Parser, but an error
+    // when extracting.  Mark those errors as unrecoverable, because
+    // the Unpack contract cannot be met.
+    warn(code, msg, data = {}) {
+        if (code === 'TAR_BAD_ARCHIVE' || code === 'TAR_ABORT') {
+            data.recoverable = false;
+        }
+        return super.warn(code, msg, data);
+    }
+    [MAYBECLOSE]() {
+        if (this[ENDED] && this[PENDING] === 0) {
+            this.emit('prefinish');
+            this.emit('finish');
+            this.emit('end');
+        }
+    }
+    [CHECKPATH](entry) {
+        const p = (0, normalize_windows_path_js_1.normalizeWindowsPath)(entry.path);
+        const parts = p.split('/');
+        if (this.strip) {
+            if (parts.length < this.strip) {
+                return false;
+            }
+            if (entry.type === 'Link') {
+                const linkparts = (0, normalize_windows_path_js_1.normalizeWindowsPath)(String(entry.linkpath)).split('/');
+                if (linkparts.length >= this.strip) {
+                    entry.linkpath = linkparts.slice(this.strip).join('/');
+                }
+                else {
+                    return false;
+                }
+            }
+            parts.splice(0, this.strip);
+            entry.path = parts.join('/');
+        }
+        if (isFinite(this.maxDepth) && parts.length > this.maxDepth) {
+            this.warn('TAR_ENTRY_ERROR', 'path excessively deep', {
+                entry,
+                path: p,
+                depth: parts.length,
+                maxDepth: this.maxDepth,
+            });
+            return false;
+        }
+        if (!this.preservePaths) {
+            if (parts.includes('..') ||
+                /* c8 ignore next */
+                (isWindows && /^[a-z]:\.\.$/i.test(parts[0] ?? ''))) {
+                this.warn('TAR_ENTRY_ERROR', `path contains '..'`, {
+                    entry,
+                    path: p,
+                });
+                return false;
+            }
+            // strip off the root
+            const [root, stripped] = (0, strip_absolute_path_js_1.stripAbsolutePath)(p);
+            if (root) {
+                entry.path = String(stripped);
+                this.warn('TAR_ENTRY_INFO', `stripping ${root} from absolute path`, {
+                    entry,
+                    path: p,
+                });
+            }
+        }
+        if (node_path_1.default.isAbsolute(entry.path)) {
+            entry.absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(entry.path));
+        }
+        else {
+            entry.absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(this.cwd, entry.path));
+        }
+        // if we somehow ended up with a path that escapes the cwd, and we are
+        // not in preservePaths mode, then something is fishy!  This should have
+        // been prevented above, so ignore this for coverage.
+        /* c8 ignore start - defense in depth */
+        if (!this.preservePaths &&
+            typeof entry.absolute === 'string' &&
+            entry.absolute.indexOf(this.cwd + '/') !== 0 &&
+            entry.absolute !== this.cwd) {
+            this.warn('TAR_ENTRY_ERROR', 'path escaped extraction target', {
+                entry,
+                path: (0, normalize_windows_path_js_1.normalizeWindowsPath)(entry.path),
+                resolvedPath: entry.absolute,
+                cwd: this.cwd,
+            });
+            return false;
+        }
+        /* c8 ignore stop */
+        // an archive can set properties on the extraction directory, but it
+        // may not replace the cwd with a different kind of thing entirely.
+        if (entry.absolute === this.cwd &&
+            entry.type !== 'Directory' &&
+            entry.type !== 'GNUDumpDir') {
+            return false;
+        }
+        // only encode : chars that aren't drive letter indicators
+        if (this.win32) {
+            const { root: aRoot } = node_path_1.default.win32.parse(String(entry.absolute));
+            entry.absolute =
+                aRoot + wc.encode(String(entry.absolute).slice(aRoot.length));
+            const { root: pRoot } = node_path_1.default.win32.parse(entry.path);
+            entry.path = pRoot + wc.encode(entry.path.slice(pRoot.length));
+        }
+        return true;
+    }
+    [ONENTRY](entry) {
+        if (!this[CHECKPATH](entry)) {
+            return entry.resume();
+        }
+        node_assert_1.default.equal(typeof entry.absolute, 'string');
+        switch (entry.type) {
+            case 'Directory':
+            case 'GNUDumpDir':
+                if (entry.mode) {
+                    entry.mode = entry.mode | 0o700;
+                }
+            // eslint-disable-next-line no-fallthrough
+            case 'File':
+            case 'OldFile':
+            case 'ContiguousFile':
+            case 'Link':
+            case 'SymbolicLink':
+                return this[CHECKFS](entry);
+            case 'CharacterDevice':
+            case 'BlockDevice':
+            case 'FIFO':
+            default:
+                return this[UNSUPPORTED](entry);
+        }
+    }
+    [ONERROR](er, entry) {
+        // Cwd has to exist, or else nothing works. That's serious.
+        // Other errors are warnings, which raise the error in strict
+        // mode, but otherwise continue on.
+        if (er.name === 'CwdError') {
+            this.emit('error', er);
+        }
+        else {
+            this.warn('TAR_ENTRY_ERROR', er, { entry });
+            this[UNPEND]();
+            entry.resume();
+        }
+    }
+    [MKDIR](dir, mode, cb) {
+        (0, mkdir_js_1.mkdir)((0, normalize_windows_path_js_1.normalizeWindowsPath)(dir), {
+            uid: this.uid,
+            gid: this.gid,
+            processUid: this.processUid,
+            processGid: this.processGid,
+            umask: this.processUmask,
+            preserve: this.preservePaths,
+            unlink: this.unlink,
+            cache: this.dirCache,
+            cwd: this.cwd,
+            mode: mode,
+        }, cb);
+    }
+    [DOCHOWN](entry) {
+        // in preserve owner mode, chown if the entry doesn't match process
+        // in set owner mode, chown if setting doesn't match process
+        return (this.forceChown ||
+            (this.preserveOwner &&
+                ((typeof entry.uid === 'number' &&
+                    entry.uid !== this.processUid) ||
+                    (typeof entry.gid === 'number' &&
+                        entry.gid !== this.processGid))) ||
+            (typeof this.uid === 'number' &&
+                this.uid !== this.processUid) ||
+            (typeof this.gid === 'number' && this.gid !== this.processGid));
+    }
+    [UID](entry) {
+        return uint32(this.uid, entry.uid, this.processUid);
+    }
+    [GID](entry) {
+        return uint32(this.gid, entry.gid, this.processGid);
+    }
+    [FILE](entry, fullyDone) {
+        const mode = typeof entry.mode === 'number' ?
+            entry.mode & 0o7777
+            : this.fmode;
+        const stream = new fsm.WriteStream(String(entry.absolute), {
+            // slight lie, but it can be numeric flags
+            flags: (0, get_write_flag_js_1.getWriteFlag)(entry.size),
+            mode: mode,
+            autoClose: false,
+        });
+        stream.on('error', (er) => {
+            if (stream.fd) {
+                node_fs_1.default.close(stream.fd, () => { });
+            }
+            // flush all the data out so that we aren't left hanging
+            // if the error wasn't actually fatal.  otherwise the parse
+            // is blocked, and we never proceed.
+            stream.write = () => true;
+            this[ONERROR](er, entry);
+            fullyDone();
+        });
+        let actions = 1;
+        const done = (er) => {
+            if (er) {
+                /* c8 ignore start - we should always have a fd by now */
+                if (stream.fd) {
+                    node_fs_1.default.close(stream.fd, () => { });
+                }
+                /* c8 ignore stop */
+                this[ONERROR](er, entry);
+                fullyDone();
+                return;
+            }
+            if (--actions === 0) {
+                if (stream.fd !== undefined) {
+                    node_fs_1.default.close(stream.fd, er => {
+                        if (er) {
+                            this[ONERROR](er, entry);
+                        }
+                        else {
+                            this[UNPEND]();
+                        }
+                        fullyDone();
+                    });
+                }
+            }
+        };
+        stream.on('finish', () => {
+            // if futimes fails, try utimes
+            // if utimes fails, fail with the original error
+            // same for fchown/chown
+            const abs = String(entry.absolute);
+            const fd = stream.fd;
+            if (typeof fd === 'number' && entry.mtime && !this.noMtime) {
+                actions++;
+                const atime = entry.atime || new Date();
+                const mtime = entry.mtime;
+                node_fs_1.default.futimes(fd, atime, mtime, er => er ?
+                    node_fs_1.default.utimes(abs, atime, mtime, er2 => done(er2 && er))
+                    : done());
+            }
+            if (typeof fd === 'number' && this[DOCHOWN](entry)) {
+                actions++;
+                const uid = this[UID](entry);
+                const gid = this[GID](entry);
+                if (typeof uid === 'number' && typeof gid === 'number') {
+                    node_fs_1.default.fchown(fd, uid, gid, er => er ?
+                        node_fs_1.default.chown(abs, uid, gid, er2 => done(er2 && er))
+                        : done());
+                }
+            }
+            done();
+        });
+        const tx = this.transform ? this.transform(entry) || entry : entry;
+        if (tx !== entry) {
+            tx.on('error', (er) => {
+                this[ONERROR](er, entry);
+                fullyDone();
+            });
+            entry.pipe(tx);
+        }
+        tx.pipe(stream);
+    }
+    [DIRECTORY](entry, fullyDone) {
+        const mode = typeof entry.mode === 'number' ?
+            entry.mode & 0o7777
+            : this.dmode;
+        this[MKDIR](String(entry.absolute), mode, er => {
+            if (er) {
+                this[ONERROR](er, entry);
+                fullyDone();
+                return;
+            }
+            let actions = 1;
+            const done = () => {
+                if (--actions === 0) {
+                    fullyDone();
+                    this[UNPEND]();
+                    entry.resume();
+                }
+            };
+            if (entry.mtime && !this.noMtime) {
+                actions++;
+                node_fs_1.default.utimes(String(entry.absolute), entry.atime || new Date(), entry.mtime, done);
+            }
+            if (this[DOCHOWN](entry)) {
+                actions++;
+                node_fs_1.default.chown(String(entry.absolute), Number(this[UID](entry)), Number(this[GID](entry)), done);
+            }
+            done();
+        });
+    }
+    [UNSUPPORTED](entry) {
+        entry.unsupported = true;
+        this.warn('TAR_ENTRY_UNSUPPORTED', `unsupported entry type: ${entry.type}`, { entry });
+        entry.resume();
+    }
+    [SYMLINK](entry, done) {
+        this[LINK](entry, String(entry.linkpath), 'symlink', done);
+    }
+    [HARDLINK](entry, done) {
+        const linkpath = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(this.cwd, String(entry.linkpath)));
+        this[LINK](entry, linkpath, 'link', done);
+    }
+    [PEND]() {
+        this[PENDING]++;
+    }
+    [UNPEND]() {
+        this[PENDING]--;
+        this[MAYBECLOSE]();
+    }
+    [SKIP](entry) {
+        this[UNPEND]();
+        entry.resume();
+    }
+    // Check if we can reuse an existing filesystem entry safely and
+    // overwrite it, rather than unlinking and recreating
+    // Windows doesn't report a useful nlink, so we just never reuse entries
+    [ISREUSABLE](entry, st) {
+        return (entry.type === 'File' &&
+            !this.unlink &&
+            st.isFile() &&
+            st.nlink <= 1 &&
+            !isWindows);
+    }
+    // check if a thing is there, and if so, try to clobber it
+    [CHECKFS](entry) {
+        this[PEND]();
+        const paths = [entry.path];
+        if (entry.linkpath) {
+            paths.push(entry.linkpath);
+        }
+        this.reservations.reserve(paths, done => this[CHECKFS2](entry, done));
+    }
+    [PRUNECACHE](entry) {
+        // if we are not creating a directory, and the path is in the dirCache,
+        // then that means we are about to delete the directory we created
+        // previously, and it is no longer going to be a directory, and neither
+        // is any of its children.
+        // If a symbolic link is encountered, all bets are off.  There is no
+        // reasonable way to sanitize the cache in such a way we will be able to
+        // avoid having filesystem collisions.  If this happens with a non-symlink
+        // entry, it'll just fail to unpack, but a symlink to a directory, using an
+        // 8.3 shortname or certain unicode attacks, can evade detection and lead
+        // to arbitrary writes to anywhere on the system.
+        if (entry.type === 'SymbolicLink') {
+            dropCache(this.dirCache);
+        }
+        else if (entry.type !== 'Directory') {
+            pruneCache(this.dirCache, String(entry.absolute));
+        }
+    }
+    [CHECKFS2](entry, fullyDone) {
+        this[PRUNECACHE](entry);
+        const done = (er) => {
+            this[PRUNECACHE](entry);
+            fullyDone(er);
+        };
+        const checkCwd = () => {
+            this[MKDIR](this.cwd, this.dmode, er => {
+                if (er) {
+                    this[ONERROR](er, entry);
+                    done();
+                    return;
+                }
+                this[CHECKED_CWD] = true;
+                start();
+            });
+        };
+        const start = () => {
+            if (entry.absolute !== this.cwd) {
+                const parent = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.dirname(String(entry.absolute)));
+                if (parent !== this.cwd) {
+                    return this[MKDIR](parent, this.dmode, er => {
+                        if (er) {
+                            this[ONERROR](er, entry);
+                            done();
+                            return;
+                        }
+                        afterMakeParent();
+                    });
+                }
+            }
+            afterMakeParent();
+        };
+        const afterMakeParent = () => {
+            node_fs_1.default.lstat(String(entry.absolute), (lstatEr, st) => {
+                if (st &&
+                    (this.keep ||
+                        /* c8 ignore next */
+                        (this.newer && st.mtime > (entry.mtime ?? st.mtime)))) {
+                    this[SKIP](entry);
+                    done();
+                    return;
+                }
+                if (lstatEr || this[ISREUSABLE](entry, st)) {
+                    return this[MAKEFS](null, entry, done);
+                }
+                if (st.isDirectory()) {
+                    if (entry.type === 'Directory') {
+                        const needChmod = this.chmod &&
+                            entry.mode &&
+                            (st.mode & 0o7777) !== entry.mode;
+                        const afterChmod = (er) => this[MAKEFS](er ?? null, entry, done);
+                        if (!needChmod) {
+                            return afterChmod();
+                        }
+                        return node_fs_1.default.chmod(String(entry.absolute), Number(entry.mode), afterChmod);
+                    }
+                    // Not a dir entry, have to remove it.
+                    // NB: the only way to end up with an entry that is the cwd
+                    // itself, in such a way that == does not detect, is a
+                    // tricky windows absolute path with UNC or 8.3 parts (and
+                    // preservePaths:true, or else it will have been stripped).
+                    // In that case, the user has opted out of path protections
+                    // explicitly, so if they blow away the cwd, c'est la vie.
+                    if (entry.absolute !== this.cwd) {
+                        return node_fs_1.default.rmdir(String(entry.absolute), (er) => this[MAKEFS](er ?? null, entry, done));
+                    }
+                }
+                // not a dir, and not reusable
+                // don't remove if the cwd, we want that error
+                if (entry.absolute === this.cwd) {
+                    return this[MAKEFS](null, entry, done);
+                }
+                unlinkFile(String(entry.absolute), er => this[MAKEFS](er ?? null, entry, done));
+            });
+        };
+        if (this[CHECKED_CWD]) {
+            start();
+        }
+        else {
+            checkCwd();
+        }
+    }
+    [MAKEFS](er, entry, done) {
+        if (er) {
+            this[ONERROR](er, entry);
+            done();
+            return;
+        }
+        switch (entry.type) {
+            case 'File':
+            case 'OldFile':
+            case 'ContiguousFile':
+                return this[FILE](entry, done);
+            case 'Link':
+                return this[HARDLINK](entry, done);
+            case 'SymbolicLink':
+                return this[SYMLINK](entry, done);
+            case 'Directory':
+            case 'GNUDumpDir':
+                return this[DIRECTORY](entry, done);
+        }
+    }
+    [LINK](entry, linkpath, link, done) {
+        // XXX: get the type ('symlink' or 'junction') for windows
+        node_fs_1.default[link](linkpath, String(entry.absolute), er => {
+            if (er) {
+                this[ONERROR](er, entry);
+            }
+            else {
+                this[UNPEND]();
+                entry.resume();
+            }
+            done();
+        });
+    }
+}
+exports.Unpack = Unpack;
+const callSync = (fn) => {
+    try {
+        return [null, fn()];
+    }
+    catch (er) {
+        return [er, null];
+    }
+};
+class UnpackSync extends Unpack {
+    sync = true;
+    [MAKEFS](er, entry) {
+        return super[MAKEFS](er, entry, () => { });
+    }
+    [CHECKFS](entry) {
+        this[PRUNECACHE](entry);
+        if (!this[CHECKED_CWD]) {
+            const er = this[MKDIR](this.cwd, this.dmode);
+            if (er) {
+                return this[ONERROR](er, entry);
+            }
+            this[CHECKED_CWD] = true;
+        }
+        // don't bother to make the parent if the current entry is the cwd,
+        // we've already checked it.
+        if (entry.absolute !== this.cwd) {
+            const parent = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.dirname(String(entry.absolute)));
+            if (parent !== this.cwd) {
+                const mkParent = this[MKDIR](parent, this.dmode);
+                if (mkParent) {
+                    return this[ONERROR](mkParent, entry);
+                }
+            }
+        }
+        const [lstatEr, st] = callSync(() => node_fs_1.default.lstatSync(String(entry.absolute)));
+        if (st &&
+            (this.keep ||
+                /* c8 ignore next */
+                (this.newer && st.mtime > (entry.mtime ?? st.mtime)))) {
+            return this[SKIP](entry);
+        }
+        if (lstatEr || this[ISREUSABLE](entry, st)) {
+            return this[MAKEFS](null, entry);
+        }
+        if (st.isDirectory()) {
+            if (entry.type === 'Directory') {
+                const needChmod = this.chmod &&
+                    entry.mode &&
+                    (st.mode & 0o7777) !== entry.mode;
+                const [er] = needChmod ?
+                    callSync(() => {
+                        node_fs_1.default.chmodSync(String(entry.absolute), Number(entry.mode));
+                    })
+                    : [];
+                return this[MAKEFS](er, entry);
+            }
+            // not a dir entry, have to remove it
+            const [er] = callSync(() => node_fs_1.default.rmdirSync(String(entry.absolute)));
+            this[MAKEFS](er, entry);
+        }
+        // not a dir, and not reusable.
+        // don't remove if it's the cwd, since we want that error.
+        const [er] = entry.absolute === this.cwd ?
+            []
+            : callSync(() => unlinkFileSync(String(entry.absolute)));
+        this[MAKEFS](er, entry);
+    }
+    [FILE](entry, done) {
+        const mode = typeof entry.mode === 'number' ?
+            entry.mode & 0o7777
+            : this.fmode;
+        const oner = (er) => {
+            let closeError;
+            try {
+                node_fs_1.default.closeSync(fd);
+            }
+            catch (e) {
+                closeError = e;
+            }
+            if (er || closeError) {
+                this[ONERROR](er || closeError, entry);
+            }
+            done();
+        };
+        let fd;
+        try {
+            fd = node_fs_1.default.openSync(String(entry.absolute), (0, get_write_flag_js_1.getWriteFlag)(entry.size), mode);
+        }
+        catch (er) {
+            return oner(er);
+        }
+        const tx = this.transform ? this.transform(entry) || entry : entry;
+        if (tx !== entry) {
+            tx.on('error', (er) => this[ONERROR](er, entry));
+            entry.pipe(tx);
+        }
+        tx.on('data', (chunk) => {
+            try {
+                node_fs_1.default.writeSync(fd, chunk, 0, chunk.length);
+            }
+            catch (er) {
+                oner(er);
+            }
+        });
+        tx.on('end', () => {
+            let er = null;
+            // try both, falling futimes back to utimes
+            // if either fails, handle the first error
+            if (entry.mtime && !this.noMtime) {
+                const atime = entry.atime || new Date();
+                const mtime = entry.mtime;
+                try {
+                    node_fs_1.default.futimesSync(fd, atime, mtime);
+                }
+                catch (futimeser) {
+                    try {
+                        node_fs_1.default.utimesSync(String(entry.absolute), atime, mtime);
+                    }
+                    catch (utimeser) {
+                        er = futimeser;
+                    }
+                }
+            }
+            if (this[DOCHOWN](entry)) {
+                const uid = this[UID](entry);
+                const gid = this[GID](entry);
+                try {
+                    node_fs_1.default.fchownSync(fd, Number(uid), Number(gid));
+                }
+                catch (fchowner) {
+                    try {
+                        node_fs_1.default.chownSync(String(entry.absolute), Number(uid), Number(gid));
+                    }
+                    catch (chowner) {
+                        er = er || fchowner;
+                    }
+                }
+            }
+            oner(er);
+        });
+    }
+    [DIRECTORY](entry, done) {
+        const mode = typeof entry.mode === 'number' ?
+            entry.mode & 0o7777
+            : this.dmode;
+        const er = this[MKDIR](String(entry.absolute), mode);
+        if (er) {
+            this[ONERROR](er, entry);
+            done();
+            return;
+        }
+        if (entry.mtime && !this.noMtime) {
+            try {
+                node_fs_1.default.utimesSync(String(entry.absolute), entry.atime || new Date(), entry.mtime);
+                /* c8 ignore next */
+            }
+            catch (er) { }
+        }
+        if (this[DOCHOWN](entry)) {
+            try {
+                node_fs_1.default.chownSync(String(entry.absolute), Number(this[UID](entry)), Number(this[GID](entry)));
+            }
+            catch (er) { }
+        }
+        done();
+        entry.resume();
+    }
+    [MKDIR](dir, mode) {
+        try {
+            return (0, mkdir_js_1.mkdirSync)((0, normalize_windows_path_js_1.normalizeWindowsPath)(dir), {
+                uid: this.uid,
+                gid: this.gid,
+                processUid: this.processUid,
+                processGid: this.processGid,
+                umask: this.processUmask,
+                preserve: this.preservePaths,
+                unlink: this.unlink,
+                cache: this.dirCache,
+                cwd: this.cwd,
+                mode: mode,
+            });
+        }
+        catch (er) {
+            return er;
+        }
+    }
+    [LINK](entry, linkpath, link, done) {
+        const ls = `${link}Sync`;
+        try {
+            node_fs_1.default[ls](linkpath, String(entry.absolute));
+            done();
+            entry.resume();
+        }
+        catch (er) {
+            return this[ONERROR](er, entry);
+        }
+    }
+}
+exports.UnpackSync = UnpackSync;
+//# sourceMappingURL=unpack.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/update.js b/node_modules/pacote/node_modules/tar/dist/commonjs/update.js
new file mode 100644
index 0000000000000..7687896f4bfee
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/commonjs/update.js
@@ -0,0 +1,33 @@
+"use strict";
+// tar -u
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.update = void 0;
+const make_command_js_1 = require("./make-command.js");
+const replace_js_1 = require("./replace.js");
+// just call tar.r with the filter and mtimeCache
+exports.update = (0, make_command_js_1.makeCommand)(replace_js_1.replace.syncFile, replace_js_1.replace.asyncFile, replace_js_1.replace.syncNoFile, replace_js_1.replace.asyncNoFile, (opt, entries = []) => {
+    replace_js_1.replace.validate?.(opt, entries);
+    mtimeFilter(opt);
+});
+const mtimeFilter = (opt) => {
+    const filter = opt.filter;
+    if (!opt.mtimeCache) {
+        opt.mtimeCache = new Map();
+    }
+    opt.filter =
+        filter ?
+            (path, stat) => filter(path, stat) &&
+                !(
+                /* c8 ignore start */
+                ((opt.mtimeCache?.get(path) ?? stat.mtime ?? 0) >
+                    (stat.mtime ?? 0))
+                /* c8 ignore stop */
+                )
+            : (path, stat) => !(
+            /* c8 ignore start */
+            ((opt.mtimeCache?.get(path) ?? stat.mtime ?? 0) >
+                (stat.mtime ?? 0))
+            /* c8 ignore stop */
+            );
+};
+//# sourceMappingURL=update.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/warn-method.js b/node_modules/pacote/node_modules/tar/dist/commonjs/warn-method.js
new file mode 100644
index 0000000000000..f25502776e36a
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/commonjs/warn-method.js
@@ -0,0 +1,31 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.warnMethod = void 0;
+const warnMethod = (self, code, message, data = {}) => {
+    if (self.file) {
+        data.file = self.file;
+    }
+    if (self.cwd) {
+        data.cwd = self.cwd;
+    }
+    data.code =
+        (message instanceof Error &&
+            message.code) ||
+            code;
+    data.tarCode = code;
+    if (!self.strict && data.recoverable !== false) {
+        if (message instanceof Error) {
+            data = Object.assign(message, data);
+            message = message.message;
+        }
+        self.emit('warn', code, message, data);
+    }
+    else if (message instanceof Error) {
+        self.emit('error', Object.assign(message, data));
+    }
+    else {
+        self.emit('error', Object.assign(new Error(`${code}: ${message}`), data));
+    }
+};
+exports.warnMethod = warnMethod;
+//# sourceMappingURL=warn-method.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/winchars.js b/node_modules/pacote/node_modules/tar/dist/commonjs/winchars.js
new file mode 100644
index 0000000000000..c0a4405812929
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/commonjs/winchars.js
@@ -0,0 +1,14 @@
+"use strict";
+// When writing files on Windows, translate the characters to their
+// 0xf000 higher-encoded versions.
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.decode = exports.encode = void 0;
+const raw = ['|', '<', '>', '?', ':'];
+const win = raw.map(char => String.fromCharCode(0xf000 + char.charCodeAt(0)));
+const toWin = new Map(raw.map((char, i) => [char, win[i]]));
+const toRaw = new Map(win.map((char, i) => [char, raw[i]]));
+const encode = (s) => raw.reduce((s, c) => s.split(c).join(toWin.get(c)), s);
+exports.encode = encode;
+const decode = (s) => win.reduce((s, c) => s.split(c).join(toRaw.get(c)), s);
+exports.decode = decode;
+//# sourceMappingURL=winchars.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/write-entry.js b/node_modules/pacote/node_modules/tar/dist/commonjs/write-entry.js
new file mode 100644
index 0000000000000..45b7efeb79502
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/commonjs/write-entry.js
@@ -0,0 +1,689 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.WriteEntryTar = exports.WriteEntrySync = exports.WriteEntry = void 0;
+const fs_1 = __importDefault(require("fs"));
+const minipass_1 = require("minipass");
+const path_1 = __importDefault(require("path"));
+const header_js_1 = require("./header.js");
+const mode_fix_js_1 = require("./mode-fix.js");
+const normalize_windows_path_js_1 = require("./normalize-windows-path.js");
+const options_js_1 = require("./options.js");
+const pax_js_1 = require("./pax.js");
+const strip_absolute_path_js_1 = require("./strip-absolute-path.js");
+const strip_trailing_slashes_js_1 = require("./strip-trailing-slashes.js");
+const warn_method_js_1 = require("./warn-method.js");
+const winchars = __importStar(require("./winchars.js"));
+const prefixPath = (path, prefix) => {
+    if (!prefix) {
+        return (0, normalize_windows_path_js_1.normalizeWindowsPath)(path);
+    }
+    path = (0, normalize_windows_path_js_1.normalizeWindowsPath)(path).replace(/^\.(\/|$)/, '');
+    return (0, strip_trailing_slashes_js_1.stripTrailingSlashes)(prefix) + '/' + path;
+};
+const maxReadSize = 16 * 1024 * 1024;
+const PROCESS = Symbol('process');
+const FILE = Symbol('file');
+const DIRECTORY = Symbol('directory');
+const SYMLINK = Symbol('symlink');
+const HARDLINK = Symbol('hardlink');
+const HEADER = Symbol('header');
+const READ = Symbol('read');
+const LSTAT = Symbol('lstat');
+const ONLSTAT = Symbol('onlstat');
+const ONREAD = Symbol('onread');
+const ONREADLINK = Symbol('onreadlink');
+const OPENFILE = Symbol('openfile');
+const ONOPENFILE = Symbol('onopenfile');
+const CLOSE = Symbol('close');
+const MODE = Symbol('mode');
+const AWAITDRAIN = Symbol('awaitDrain');
+const ONDRAIN = Symbol('ondrain');
+const PREFIX = Symbol('prefix');
+class WriteEntry extends minipass_1.Minipass {
+    path;
+    portable;
+    myuid = (process.getuid && process.getuid()) || 0;
+    // until node has builtin pwnam functions, this'll have to do
+    myuser = process.env.USER || '';
+    maxReadSize;
+    linkCache;
+    statCache;
+    preservePaths;
+    cwd;
+    strict;
+    mtime;
+    noPax;
+    noMtime;
+    prefix;
+    fd;
+    blockLen = 0;
+    blockRemain = 0;
+    buf;
+    pos = 0;
+    remain = 0;
+    length = 0;
+    offset = 0;
+    win32;
+    absolute;
+    header;
+    type;
+    linkpath;
+    stat;
+    onWriteEntry;
+    #hadError = false;
+    constructor(p, opt_ = {}) {
+        const opt = (0, options_js_1.dealias)(opt_);
+        super();
+        this.path = (0, normalize_windows_path_js_1.normalizeWindowsPath)(p);
+        // suppress atime, ctime, uid, gid, uname, gname
+        this.portable = !!opt.portable;
+        this.maxReadSize = opt.maxReadSize || maxReadSize;
+        this.linkCache = opt.linkCache || new Map();
+        this.statCache = opt.statCache || new Map();
+        this.preservePaths = !!opt.preservePaths;
+        this.cwd = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.cwd || process.cwd());
+        this.strict = !!opt.strict;
+        this.noPax = !!opt.noPax;
+        this.noMtime = !!opt.noMtime;
+        this.mtime = opt.mtime;
+        this.prefix =
+            opt.prefix ? (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.prefix) : undefined;
+        this.onWriteEntry = opt.onWriteEntry;
+        if (typeof opt.onwarn === 'function') {
+            this.on('warn', opt.onwarn);
+        }
+        let pathWarn = false;
+        if (!this.preservePaths) {
+            const [root, stripped] = (0, strip_absolute_path_js_1.stripAbsolutePath)(this.path);
+            if (root && typeof stripped === 'string') {
+                this.path = stripped;
+                pathWarn = root;
+            }
+        }
+        this.win32 = !!opt.win32 || process.platform === 'win32';
+        if (this.win32) {
+            // force the \ to / normalization, since we might not *actually*
+            // be on windows, but want \ to be considered a path separator.
+            this.path = winchars.decode(this.path.replace(/\\/g, '/'));
+            p = p.replace(/\\/g, '/');
+        }
+        this.absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.absolute || path_1.default.resolve(this.cwd, p));
+        if (this.path === '') {
+            this.path = './';
+        }
+        if (pathWarn) {
+            this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, {
+                entry: this,
+                path: pathWarn + this.path,
+            });
+        }
+        const cs = this.statCache.get(this.absolute);
+        if (cs) {
+            this[ONLSTAT](cs);
+        }
+        else {
+            this[LSTAT]();
+        }
+    }
+    warn(code, message, data = {}) {
+        return (0, warn_method_js_1.warnMethod)(this, code, message, data);
+    }
+    emit(ev, ...data) {
+        if (ev === 'error') {
+            this.#hadError = true;
+        }
+        return super.emit(ev, ...data);
+    }
+    [LSTAT]() {
+        fs_1.default.lstat(this.absolute, (er, stat) => {
+            if (er) {
+                return this.emit('error', er);
+            }
+            this[ONLSTAT](stat);
+        });
+    }
+    [ONLSTAT](stat) {
+        this.statCache.set(this.absolute, stat);
+        this.stat = stat;
+        if (!stat.isFile()) {
+            stat.size = 0;
+        }
+        this.type = getType(stat);
+        this.emit('stat', stat);
+        this[PROCESS]();
+    }
+    [PROCESS]() {
+        switch (this.type) {
+            case 'File':
+                return this[FILE]();
+            case 'Directory':
+                return this[DIRECTORY]();
+            case 'SymbolicLink':
+                return this[SYMLINK]();
+            // unsupported types are ignored.
+            default:
+                return this.end();
+        }
+    }
+    [MODE](mode) {
+        return (0, mode_fix_js_1.modeFix)(mode, this.type === 'Directory', this.portable);
+    }
+    [PREFIX](path) {
+        return prefixPath(path, this.prefix);
+    }
+    [HEADER]() {
+        /* c8 ignore start */
+        if (!this.stat) {
+            throw new Error('cannot write header before stat');
+        }
+        /* c8 ignore stop */
+        if (this.type === 'Directory' && this.portable) {
+            this.noMtime = true;
+        }
+        this.onWriteEntry?.(this);
+        this.header = new header_js_1.Header({
+            path: this[PREFIX](this.path),
+            // only apply the prefix to hard links.
+            linkpath: this.type === 'Link' && this.linkpath !== undefined ?
+                this[PREFIX](this.linkpath)
+                : this.linkpath,
+            // only the permissions and setuid/setgid/sticky bitflags
+            // not the higher-order bits that specify file type
+            mode: this[MODE](this.stat.mode),
+            uid: this.portable ? undefined : this.stat.uid,
+            gid: this.portable ? undefined : this.stat.gid,
+            size: this.stat.size,
+            mtime: this.noMtime ? undefined : this.mtime || this.stat.mtime,
+            /* c8 ignore next */
+            type: this.type === 'Unsupported' ? undefined : this.type,
+            uname: this.portable ? undefined
+                : this.stat.uid === this.myuid ? this.myuser
+                    : '',
+            atime: this.portable ? undefined : this.stat.atime,
+            ctime: this.portable ? undefined : this.stat.ctime,
+        });
+        if (this.header.encode() && !this.noPax) {
+            super.write(new pax_js_1.Pax({
+                atime: this.portable ? undefined : this.header.atime,
+                ctime: this.portable ? undefined : this.header.ctime,
+                gid: this.portable ? undefined : this.header.gid,
+                mtime: this.noMtime ? undefined : (this.mtime || this.header.mtime),
+                path: this[PREFIX](this.path),
+                linkpath: this.type === 'Link' && this.linkpath !== undefined ?
+                    this[PREFIX](this.linkpath)
+                    : this.linkpath,
+                size: this.header.size,
+                uid: this.portable ? undefined : this.header.uid,
+                uname: this.portable ? undefined : this.header.uname,
+                dev: this.portable ? undefined : this.stat.dev,
+                ino: this.portable ? undefined : this.stat.ino,
+                nlink: this.portable ? undefined : this.stat.nlink,
+            }).encode());
+        }
+        const block = this.header?.block;
+        /* c8 ignore start */
+        if (!block) {
+            throw new Error('failed to encode header');
+        }
+        /* c8 ignore stop */
+        super.write(block);
+    }
+    [DIRECTORY]() {
+        /* c8 ignore start */
+        if (!this.stat) {
+            throw new Error('cannot create directory entry without stat');
+        }
+        /* c8 ignore stop */
+        if (this.path.slice(-1) !== '/') {
+            this.path += '/';
+        }
+        this.stat.size = 0;
+        this[HEADER]();
+        this.end();
+    }
+    [SYMLINK]() {
+        fs_1.default.readlink(this.absolute, (er, linkpath) => {
+            if (er) {
+                return this.emit('error', er);
+            }
+            this[ONREADLINK](linkpath);
+        });
+    }
+    [ONREADLINK](linkpath) {
+        this.linkpath = (0, normalize_windows_path_js_1.normalizeWindowsPath)(linkpath);
+        this[HEADER]();
+        this.end();
+    }
+    [HARDLINK](linkpath) {
+        /* c8 ignore start */
+        if (!this.stat) {
+            throw new Error('cannot create link entry without stat');
+        }
+        /* c8 ignore stop */
+        this.type = 'Link';
+        this.linkpath = (0, normalize_windows_path_js_1.normalizeWindowsPath)(path_1.default.relative(this.cwd, linkpath));
+        this.stat.size = 0;
+        this[HEADER]();
+        this.end();
+    }
+    [FILE]() {
+        /* c8 ignore start */
+        if (!this.stat) {
+            throw new Error('cannot create file entry without stat');
+        }
+        /* c8 ignore stop */
+        if (this.stat.nlink > 1) {
+            const linkKey = `${this.stat.dev}:${this.stat.ino}`;
+            const linkpath = this.linkCache.get(linkKey);
+            if (linkpath?.indexOf(this.cwd) === 0) {
+                return this[HARDLINK](linkpath);
+            }
+            this.linkCache.set(linkKey, this.absolute);
+        }
+        this[HEADER]();
+        if (this.stat.size === 0) {
+            return this.end();
+        }
+        this[OPENFILE]();
+    }
+    [OPENFILE]() {
+        fs_1.default.open(this.absolute, 'r', (er, fd) => {
+            if (er) {
+                return this.emit('error', er);
+            }
+            this[ONOPENFILE](fd);
+        });
+    }
+    [ONOPENFILE](fd) {
+        this.fd = fd;
+        if (this.#hadError) {
+            return this[CLOSE]();
+        }
+        /* c8 ignore start */
+        if (!this.stat) {
+            throw new Error('should stat before calling onopenfile');
+        }
+        /* c8 ignore start */
+        this.blockLen = 512 * Math.ceil(this.stat.size / 512);
+        this.blockRemain = this.blockLen;
+        const bufLen = Math.min(this.blockLen, this.maxReadSize);
+        this.buf = Buffer.allocUnsafe(bufLen);
+        this.offset = 0;
+        this.pos = 0;
+        this.remain = this.stat.size;
+        this.length = this.buf.length;
+        this[READ]();
+    }
+    [READ]() {
+        const { fd, buf, offset, length, pos } = this;
+        if (fd === undefined || buf === undefined) {
+            throw new Error('cannot read file without first opening');
+        }
+        fs_1.default.read(fd, buf, offset, length, pos, (er, bytesRead) => {
+            if (er) {
+                // ignoring the error from close(2) is a bad practice, but at
+                // this point we already have an error, don't need another one
+                return this[CLOSE](() => this.emit('error', er));
+            }
+            this[ONREAD](bytesRead);
+        });
+    }
+    /* c8 ignore start */
+    [CLOSE](cb = () => { }) {
+        /* c8 ignore stop */
+        if (this.fd !== undefined)
+            fs_1.default.close(this.fd, cb);
+    }
+    [ONREAD](bytesRead) {
+        if (bytesRead <= 0 && this.remain > 0) {
+            const er = Object.assign(new Error('encountered unexpected EOF'), {
+                path: this.absolute,
+                syscall: 'read',
+                code: 'EOF',
+            });
+            return this[CLOSE](() => this.emit('error', er));
+        }
+        if (bytesRead > this.remain) {
+            const er = Object.assign(new Error('did not encounter expected EOF'), {
+                path: this.absolute,
+                syscall: 'read',
+                code: 'EOF',
+            });
+            return this[CLOSE](() => this.emit('error', er));
+        }
+        /* c8 ignore start */
+        if (!this.buf) {
+            throw new Error('should have created buffer prior to reading');
+        }
+        /* c8 ignore stop */
+        // null out the rest of the buffer, if we could fit the block padding
+        // at the end of this loop, we've incremented bytesRead and this.remain
+        // to be incremented up to the blockRemain level, as if we had expected
+        // to get a null-padded file, and read it until the end.  then we will
+        // decrement both remain and blockRemain by bytesRead, and know that we
+        // reached the expected EOF, without any null buffer to append.
+        if (bytesRead === this.remain) {
+            for (let i = bytesRead; i < this.length && bytesRead < this.blockRemain; i++) {
+                this.buf[i + this.offset] = 0;
+                bytesRead++;
+                this.remain++;
+            }
+        }
+        const chunk = this.offset === 0 && bytesRead === this.buf.length ?
+            this.buf
+            : this.buf.subarray(this.offset, this.offset + bytesRead);
+        const flushed = this.write(chunk);
+        if (!flushed) {
+            this[AWAITDRAIN](() => this[ONDRAIN]());
+        }
+        else {
+            this[ONDRAIN]();
+        }
+    }
+    [AWAITDRAIN](cb) {
+        this.once('drain', cb);
+    }
+    write(chunk, encoding, cb) {
+        /* c8 ignore start - just junk to comply with NodeJS.WritableStream */
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = undefined;
+        }
+        if (typeof chunk === 'string') {
+            chunk = Buffer.from(chunk, typeof encoding === 'string' ? encoding : 'utf8');
+        }
+        /* c8 ignore stop */
+        if (this.blockRemain < chunk.length) {
+            const er = Object.assign(new Error('writing more data than expected'), {
+                path: this.absolute,
+            });
+            return this.emit('error', er);
+        }
+        this.remain -= chunk.length;
+        this.blockRemain -= chunk.length;
+        this.pos += chunk.length;
+        this.offset += chunk.length;
+        return super.write(chunk, null, cb);
+    }
+    [ONDRAIN]() {
+        if (!this.remain) {
+            if (this.blockRemain) {
+                super.write(Buffer.alloc(this.blockRemain));
+            }
+            return this[CLOSE](er => er ? this.emit('error', er) : this.end());
+        }
+        /* c8 ignore start */
+        if (!this.buf) {
+            throw new Error('buffer lost somehow in ONDRAIN');
+        }
+        /* c8 ignore stop */
+        if (this.offset >= this.length) {
+            // if we only have a smaller bit left to read, alloc a smaller buffer
+            // otherwise, keep it the same length it was before.
+            this.buf = Buffer.allocUnsafe(Math.min(this.blockRemain, this.buf.length));
+            this.offset = 0;
+        }
+        this.length = this.buf.length - this.offset;
+        this[READ]();
+    }
+}
+exports.WriteEntry = WriteEntry;
+class WriteEntrySync extends WriteEntry {
+    sync = true;
+    [LSTAT]() {
+        this[ONLSTAT](fs_1.default.lstatSync(this.absolute));
+    }
+    [SYMLINK]() {
+        this[ONREADLINK](fs_1.default.readlinkSync(this.absolute));
+    }
+    [OPENFILE]() {
+        this[ONOPENFILE](fs_1.default.openSync(this.absolute, 'r'));
+    }
+    [READ]() {
+        let threw = true;
+        try {
+            const { fd, buf, offset, length, pos } = this;
+            /* c8 ignore start */
+            if (fd === undefined || buf === undefined) {
+                throw new Error('fd and buf must be set in READ method');
+            }
+            /* c8 ignore stop */
+            const bytesRead = fs_1.default.readSync(fd, buf, offset, length, pos);
+            this[ONREAD](bytesRead);
+            threw = false;
+        }
+        finally {
+            // ignoring the error from close(2) is a bad practice, but at
+            // this point we already have an error, don't need another one
+            if (threw) {
+                try {
+                    this[CLOSE](() => { });
+                }
+                catch (er) { }
+            }
+        }
+    }
+    [AWAITDRAIN](cb) {
+        cb();
+    }
+    /* c8 ignore start */
+    [CLOSE](cb = () => { }) {
+        /* c8 ignore stop */
+        if (this.fd !== undefined)
+            fs_1.default.closeSync(this.fd);
+        cb();
+    }
+}
+exports.WriteEntrySync = WriteEntrySync;
+class WriteEntryTar extends minipass_1.Minipass {
+    blockLen = 0;
+    blockRemain = 0;
+    buf = 0;
+    pos = 0;
+    remain = 0;
+    length = 0;
+    preservePaths;
+    portable;
+    strict;
+    noPax;
+    noMtime;
+    readEntry;
+    type;
+    prefix;
+    path;
+    mode;
+    uid;
+    gid;
+    uname;
+    gname;
+    header;
+    mtime;
+    atime;
+    ctime;
+    linkpath;
+    size;
+    onWriteEntry;
+    warn(code, message, data = {}) {
+        return (0, warn_method_js_1.warnMethod)(this, code, message, data);
+    }
+    constructor(readEntry, opt_ = {}) {
+        const opt = (0, options_js_1.dealias)(opt_);
+        super();
+        this.preservePaths = !!opt.preservePaths;
+        this.portable = !!opt.portable;
+        this.strict = !!opt.strict;
+        this.noPax = !!opt.noPax;
+        this.noMtime = !!opt.noMtime;
+        this.onWriteEntry = opt.onWriteEntry;
+        this.readEntry = readEntry;
+        const { type } = readEntry;
+        /* c8 ignore start */
+        if (type === 'Unsupported') {
+            throw new Error('writing entry that should be ignored');
+        }
+        /* c8 ignore stop */
+        this.type = type;
+        if (this.type === 'Directory' && this.portable) {
+            this.noMtime = true;
+        }
+        this.prefix = opt.prefix;
+        this.path = (0, normalize_windows_path_js_1.normalizeWindowsPath)(readEntry.path);
+        this.mode =
+            readEntry.mode !== undefined ?
+                this[MODE](readEntry.mode)
+                : undefined;
+        this.uid = this.portable ? undefined : readEntry.uid;
+        this.gid = this.portable ? undefined : readEntry.gid;
+        this.uname = this.portable ? undefined : readEntry.uname;
+        this.gname = this.portable ? undefined : readEntry.gname;
+        this.size = readEntry.size;
+        this.mtime =
+            this.noMtime ? undefined : opt.mtime || readEntry.mtime;
+        this.atime = this.portable ? undefined : readEntry.atime;
+        this.ctime = this.portable ? undefined : readEntry.ctime;
+        this.linkpath =
+            readEntry.linkpath !== undefined ?
+                (0, normalize_windows_path_js_1.normalizeWindowsPath)(readEntry.linkpath)
+                : undefined;
+        if (typeof opt.onwarn === 'function') {
+            this.on('warn', opt.onwarn);
+        }
+        let pathWarn = false;
+        if (!this.preservePaths) {
+            const [root, stripped] = (0, strip_absolute_path_js_1.stripAbsolutePath)(this.path);
+            if (root && typeof stripped === 'string') {
+                this.path = stripped;
+                pathWarn = root;
+            }
+        }
+        this.remain = readEntry.size;
+        this.blockRemain = readEntry.startBlockSize;
+        this.onWriteEntry?.(this);
+        this.header = new header_js_1.Header({
+            path: this[PREFIX](this.path),
+            linkpath: this.type === 'Link' && this.linkpath !== undefined ?
+                this[PREFIX](this.linkpath)
+                : this.linkpath,
+            // only the permissions and setuid/setgid/sticky bitflags
+            // not the higher-order bits that specify file type
+            mode: this.mode,
+            uid: this.portable ? undefined : this.uid,
+            gid: this.portable ? undefined : this.gid,
+            size: this.size,
+            mtime: this.noMtime ? undefined : this.mtime,
+            type: this.type,
+            uname: this.portable ? undefined : this.uname,
+            atime: this.portable ? undefined : this.atime,
+            ctime: this.portable ? undefined : this.ctime,
+        });
+        if (pathWarn) {
+            this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, {
+                entry: this,
+                path: pathWarn + this.path,
+            });
+        }
+        if (this.header.encode() && !this.noPax) {
+            super.write(new pax_js_1.Pax({
+                atime: this.portable ? undefined : this.atime,
+                ctime: this.portable ? undefined : this.ctime,
+                gid: this.portable ? undefined : this.gid,
+                mtime: this.noMtime ? undefined : this.mtime,
+                path: this[PREFIX](this.path),
+                linkpath: this.type === 'Link' && this.linkpath !== undefined ?
+                    this[PREFIX](this.linkpath)
+                    : this.linkpath,
+                size: this.size,
+                uid: this.portable ? undefined : this.uid,
+                uname: this.portable ? undefined : this.uname,
+                dev: this.portable ? undefined : this.readEntry.dev,
+                ino: this.portable ? undefined : this.readEntry.ino,
+                nlink: this.portable ? undefined : this.readEntry.nlink,
+            }).encode());
+        }
+        const b = this.header?.block;
+        /* c8 ignore start */
+        if (!b)
+            throw new Error('failed to encode header');
+        /* c8 ignore stop */
+        super.write(b);
+        readEntry.pipe(this);
+    }
+    [PREFIX](path) {
+        return prefixPath(path, this.prefix);
+    }
+    [MODE](mode) {
+        return (0, mode_fix_js_1.modeFix)(mode, this.type === 'Directory', this.portable);
+    }
+    write(chunk, encoding, cb) {
+        /* c8 ignore start - just junk to comply with NodeJS.WritableStream */
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = undefined;
+        }
+        if (typeof chunk === 'string') {
+            chunk = Buffer.from(chunk, typeof encoding === 'string' ? encoding : 'utf8');
+        }
+        /* c8 ignore stop */
+        const writeLen = chunk.length;
+        if (writeLen > this.blockRemain) {
+            throw new Error('writing more to entry than is appropriate');
+        }
+        this.blockRemain -= writeLen;
+        return super.write(chunk, cb);
+    }
+    end(chunk, encoding, cb) {
+        if (this.blockRemain) {
+            super.write(Buffer.alloc(this.blockRemain));
+        }
+        /* c8 ignore start - just junk to comply with NodeJS.WritableStream */
+        if (typeof chunk === 'function') {
+            cb = chunk;
+            encoding = undefined;
+            chunk = undefined;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = undefined;
+        }
+        if (typeof chunk === 'string') {
+            chunk = Buffer.from(chunk, encoding ?? 'utf8');
+        }
+        if (cb)
+            this.once('finish', cb);
+        chunk ? super.end(chunk, cb) : super.end(cb);
+        /* c8 ignore stop */
+        return this;
+    }
+}
+exports.WriteEntryTar = WriteEntryTar;
+const getType = (stat) => stat.isFile() ? 'File'
+    : stat.isDirectory() ? 'Directory'
+        : stat.isSymbolicLink() ? 'SymbolicLink'
+            : 'Unsupported';
+//# sourceMappingURL=write-entry.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/create.js b/node_modules/pacote/node_modules/tar/dist/esm/create.js
new file mode 100644
index 0000000000000..512a9911d70d5
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/esm/create.js
@@ -0,0 +1,77 @@
+import { WriteStream, WriteStreamSync } from '@isaacs/fs-minipass';
+import path from 'node:path';
+import { list } from './list.js';
+import { makeCommand } from './make-command.js';
+import { Pack, PackSync } from './pack.js';
+const createFileSync = (opt, files) => {
+    const p = new PackSync(opt);
+    const stream = new WriteStreamSync(opt.file, {
+        mode: opt.mode || 0o666,
+    });
+    p.pipe(stream);
+    addFilesSync(p, files);
+};
+const createFile = (opt, files) => {
+    const p = new Pack(opt);
+    const stream = new WriteStream(opt.file, {
+        mode: opt.mode || 0o666,
+    });
+    p.pipe(stream);
+    const promise = new Promise((res, rej) => {
+        stream.on('error', rej);
+        stream.on('close', res);
+        p.on('error', rej);
+    });
+    addFilesAsync(p, files);
+    return promise;
+};
+const addFilesSync = (p, files) => {
+    files.forEach(file => {
+        if (file.charAt(0) === '@') {
+            list({
+                file: path.resolve(p.cwd, file.slice(1)),
+                sync: true,
+                noResume: true,
+                onReadEntry: entry => p.add(entry),
+            });
+        }
+        else {
+            p.add(file);
+        }
+    });
+    p.end();
+};
+const addFilesAsync = async (p, files) => {
+    for (let i = 0; i < files.length; i++) {
+        const file = String(files[i]);
+        if (file.charAt(0) === '@') {
+            await list({
+                file: path.resolve(String(p.cwd), file.slice(1)),
+                noResume: true,
+                onReadEntry: entry => {
+                    p.add(entry);
+                },
+            });
+        }
+        else {
+            p.add(file);
+        }
+    }
+    p.end();
+};
+const createSync = (opt, files) => {
+    const p = new PackSync(opt);
+    addFilesSync(p, files);
+    return p;
+};
+const createAsync = (opt, files) => {
+    const p = new Pack(opt);
+    addFilesAsync(p, files);
+    return p;
+};
+export const create = makeCommand(createFileSync, createFile, createSync, createAsync, (_opt, files) => {
+    if (!files?.length) {
+        throw new TypeError('no paths specified to add to archive');
+    }
+});
+//# sourceMappingURL=create.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/cwd-error.js b/node_modules/pacote/node_modules/tar/dist/esm/cwd-error.js
new file mode 100644
index 0000000000000..289a066b8e031
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/esm/cwd-error.js
@@ -0,0 +1,14 @@
+export class CwdError extends Error {
+    path;
+    code;
+    syscall = 'chdir';
+    constructor(path, code) {
+        super(`${code}: Cannot cd into '${path}'`);
+        this.path = path;
+        this.code = code;
+    }
+    get name() {
+        return 'CwdError';
+    }
+}
+//# sourceMappingURL=cwd-error.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/extract.js b/node_modules/pacote/node_modules/tar/dist/esm/extract.js
new file mode 100644
index 0000000000000..2274feef26e78
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/esm/extract.js
@@ -0,0 +1,49 @@
+// tar -x
+import * as fsm from '@isaacs/fs-minipass';
+import fs from 'node:fs';
+import { filesFilter } from './list.js';
+import { makeCommand } from './make-command.js';
+import { Unpack, UnpackSync } from './unpack.js';
+const extractFileSync = (opt) => {
+    const u = new UnpackSync(opt);
+    const file = opt.file;
+    const stat = fs.statSync(file);
+    // This trades a zero-byte read() syscall for a stat
+    // However, it will usually result in less memory allocation
+    const readSize = opt.maxReadSize || 16 * 1024 * 1024;
+    const stream = new fsm.ReadStreamSync(file, {
+        readSize: readSize,
+        size: stat.size,
+    });
+    stream.pipe(u);
+};
+const extractFile = (opt, _) => {
+    const u = new Unpack(opt);
+    const readSize = opt.maxReadSize || 16 * 1024 * 1024;
+    const file = opt.file;
+    const p = new Promise((resolve, reject) => {
+        u.on('error', reject);
+        u.on('close', resolve);
+        // This trades a zero-byte read() syscall for a stat
+        // However, it will usually result in less memory allocation
+        fs.stat(file, (er, stat) => {
+            if (er) {
+                reject(er);
+            }
+            else {
+                const stream = new fsm.ReadStream(file, {
+                    readSize: readSize,
+                    size: stat.size,
+                });
+                stream.on('error', reject);
+                stream.pipe(u);
+            }
+        });
+    });
+    return p;
+};
+export const extract = makeCommand(extractFileSync, extractFile, opt => new UnpackSync(opt), opt => new Unpack(opt), (opt, files) => {
+    if (files?.length)
+        filesFilter(opt, files);
+});
+//# sourceMappingURL=extract.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/get-write-flag.js b/node_modules/pacote/node_modules/tar/dist/esm/get-write-flag.js
new file mode 100644
index 0000000000000..2c7f3e8b28fda
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/esm/get-write-flag.js
@@ -0,0 +1,23 @@
+// Get the appropriate flag to use for creating files
+// We use fmap on Windows platforms for files less than
+// 512kb.  This is a fairly low limit, but avoids making
+// things slower in some cases.  Since most of what this
+// library is used for is extracting tarballs of many
+// relatively small files in npm packages and the like,
+// it can be a big boost on Windows platforms.
+import fs from 'fs';
+const platform = process.env.__FAKE_PLATFORM__ || process.platform;
+const isWindows = platform === 'win32';
+/* c8 ignore start */
+const { O_CREAT, O_TRUNC, O_WRONLY } = fs.constants;
+const UV_FS_O_FILEMAP = Number(process.env.__FAKE_FS_O_FILENAME__) ||
+    fs.constants.UV_FS_O_FILEMAP ||
+    0;
+/* c8 ignore stop */
+const fMapEnabled = isWindows && !!UV_FS_O_FILEMAP;
+const fMapLimit = 512 * 1024;
+const fMapFlag = UV_FS_O_FILEMAP | O_TRUNC | O_CREAT | O_WRONLY;
+export const getWriteFlag = !fMapEnabled ?
+    () => 'w'
+    : (size) => (size < fMapLimit ? fMapFlag : 'w');
+//# sourceMappingURL=get-write-flag.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/header.js b/node_modules/pacote/node_modules/tar/dist/esm/header.js
new file mode 100644
index 0000000000000..e15192b14b16e
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/esm/header.js
@@ -0,0 +1,279 @@
+// parse a 512-byte header block to a data object, or vice-versa
+// encode returns `true` if a pax extended header is needed, because
+// the data could not be faithfully encoded in a simple header.
+// (Also, check header.needPax to see if it needs a pax header.)
+import { posix as pathModule } from 'node:path';
+import * as large from './large-numbers.js';
+import * as types from './types.js';
+export class Header {
+    cksumValid = false;
+    needPax = false;
+    nullBlock = false;
+    block;
+    path;
+    mode;
+    uid;
+    gid;
+    size;
+    cksum;
+    #type = 'Unsupported';
+    linkpath;
+    uname;
+    gname;
+    devmaj = 0;
+    devmin = 0;
+    atime;
+    ctime;
+    mtime;
+    charset;
+    comment;
+    constructor(data, off = 0, ex, gex) {
+        if (Buffer.isBuffer(data)) {
+            this.decode(data, off || 0, ex, gex);
+        }
+        else if (data) {
+            this.#slurp(data);
+        }
+    }
+    decode(buf, off, ex, gex) {
+        if (!off) {
+            off = 0;
+        }
+        if (!buf || !(buf.length >= off + 512)) {
+            throw new Error('need 512 bytes for header');
+        }
+        this.path = decString(buf, off, 100);
+        this.mode = decNumber(buf, off + 100, 8);
+        this.uid = decNumber(buf, off + 108, 8);
+        this.gid = decNumber(buf, off + 116, 8);
+        this.size = decNumber(buf, off + 124, 12);
+        this.mtime = decDate(buf, off + 136, 12);
+        this.cksum = decNumber(buf, off + 148, 12);
+        // if we have extended or global extended headers, apply them now
+        // See https://github.com/npm/node-tar/pull/187
+        // Apply global before local, so it overrides
+        if (gex)
+            this.#slurp(gex, true);
+        if (ex)
+            this.#slurp(ex);
+        // old tar versions marked dirs as a file with a trailing /
+        const t = decString(buf, off + 156, 1);
+        if (types.isCode(t)) {
+            this.#type = t || '0';
+        }
+        if (this.#type === '0' && this.path.slice(-1) === '/') {
+            this.#type = '5';
+        }
+        // tar implementations sometimes incorrectly put the stat(dir).size
+        // as the size in the tarball, even though Directory entries are
+        // not able to have any body at all.  In the very rare chance that
+        // it actually DOES have a body, we weren't going to do anything with
+        // it anyway, and it'll just be a warning about an invalid header.
+        if (this.#type === '5') {
+            this.size = 0;
+        }
+        this.linkpath = decString(buf, off + 157, 100);
+        if (buf.subarray(off + 257, off + 265).toString() ===
+            'ustar\u000000') {
+            this.uname = decString(buf, off + 265, 32);
+            this.gname = decString(buf, off + 297, 32);
+            /* c8 ignore start */
+            this.devmaj = decNumber(buf, off + 329, 8) ?? 0;
+            this.devmin = decNumber(buf, off + 337, 8) ?? 0;
+            /* c8 ignore stop */
+            if (buf[off + 475] !== 0) {
+                // definitely a prefix, definitely >130 chars.
+                const prefix = decString(buf, off + 345, 155);
+                this.path = prefix + '/' + this.path;
+            }
+            else {
+                const prefix = decString(buf, off + 345, 130);
+                if (prefix) {
+                    this.path = prefix + '/' + this.path;
+                }
+                this.atime = decDate(buf, off + 476, 12);
+                this.ctime = decDate(buf, off + 488, 12);
+            }
+        }
+        let sum = 8 * 0x20;
+        for (let i = off; i < off + 148; i++) {
+            sum += buf[i];
+        }
+        for (let i = off + 156; i < off + 512; i++) {
+            sum += buf[i];
+        }
+        this.cksumValid = sum === this.cksum;
+        if (this.cksum === undefined && sum === 8 * 0x20) {
+            this.nullBlock = true;
+        }
+    }
+    #slurp(ex, gex = false) {
+        Object.assign(this, Object.fromEntries(Object.entries(ex).filter(([k, v]) => {
+            // we slurp in everything except for the path attribute in
+            // a global extended header, because that's weird. Also, any
+            // null/undefined values are ignored.
+            return !(v === null ||
+                v === undefined ||
+                (k === 'path' && gex) ||
+                (k === 'linkpath' && gex) ||
+                k === 'global');
+        })));
+    }
+    encode(buf, off = 0) {
+        if (!buf) {
+            buf = this.block = Buffer.alloc(512);
+        }
+        if (this.#type === 'Unsupported') {
+            this.#type = '0';
+        }
+        if (!(buf.length >= off + 512)) {
+            throw new Error('need 512 bytes for header');
+        }
+        const prefixSize = this.ctime || this.atime ? 130 : 155;
+        const split = splitPrefix(this.path || '', prefixSize);
+        const path = split[0];
+        const prefix = split[1];
+        this.needPax = !!split[2];
+        this.needPax = encString(buf, off, 100, path) || this.needPax;
+        this.needPax =
+            encNumber(buf, off + 100, 8, this.mode) || this.needPax;
+        this.needPax =
+            encNumber(buf, off + 108, 8, this.uid) || this.needPax;
+        this.needPax =
+            encNumber(buf, off + 116, 8, this.gid) || this.needPax;
+        this.needPax =
+            encNumber(buf, off + 124, 12, this.size) || this.needPax;
+        this.needPax =
+            encDate(buf, off + 136, 12, this.mtime) || this.needPax;
+        buf[off + 156] = this.#type.charCodeAt(0);
+        this.needPax =
+            encString(buf, off + 157, 100, this.linkpath) || this.needPax;
+        buf.write('ustar\u000000', off + 257, 8);
+        this.needPax =
+            encString(buf, off + 265, 32, this.uname) || this.needPax;
+        this.needPax =
+            encString(buf, off + 297, 32, this.gname) || this.needPax;
+        this.needPax =
+            encNumber(buf, off + 329, 8, this.devmaj) || this.needPax;
+        this.needPax =
+            encNumber(buf, off + 337, 8, this.devmin) || this.needPax;
+        this.needPax =
+            encString(buf, off + 345, prefixSize, prefix) || this.needPax;
+        if (buf[off + 475] !== 0) {
+            this.needPax =
+                encString(buf, off + 345, 155, prefix) || this.needPax;
+        }
+        else {
+            this.needPax =
+                encString(buf, off + 345, 130, prefix) || this.needPax;
+            this.needPax =
+                encDate(buf, off + 476, 12, this.atime) || this.needPax;
+            this.needPax =
+                encDate(buf, off + 488, 12, this.ctime) || this.needPax;
+        }
+        let sum = 8 * 0x20;
+        for (let i = off; i < off + 148; i++) {
+            sum += buf[i];
+        }
+        for (let i = off + 156; i < off + 512; i++) {
+            sum += buf[i];
+        }
+        this.cksum = sum;
+        encNumber(buf, off + 148, 8, this.cksum);
+        this.cksumValid = true;
+        return this.needPax;
+    }
+    get type() {
+        return (this.#type === 'Unsupported' ?
+            this.#type
+            : types.name.get(this.#type));
+    }
+    get typeKey() {
+        return this.#type;
+    }
+    set type(type) {
+        const c = String(types.code.get(type));
+        if (types.isCode(c) || c === 'Unsupported') {
+            this.#type = c;
+        }
+        else if (types.isCode(type)) {
+            this.#type = type;
+        }
+        else {
+            throw new TypeError('invalid entry type: ' + type);
+        }
+    }
+}
+const splitPrefix = (p, prefixSize) => {
+    const pathSize = 100;
+    let pp = p;
+    let prefix = '';
+    let ret = undefined;
+    const root = pathModule.parse(p).root || '.';
+    if (Buffer.byteLength(pp) < pathSize) {
+        ret = [pp, prefix, false];
+    }
+    else {
+        // first set prefix to the dir, and path to the base
+        prefix = pathModule.dirname(pp);
+        pp = pathModule.basename(pp);
+        do {
+            if (Buffer.byteLength(pp) <= pathSize &&
+                Buffer.byteLength(prefix) <= prefixSize) {
+                // both fit!
+                ret = [pp, prefix, false];
+            }
+            else if (Buffer.byteLength(pp) > pathSize &&
+                Buffer.byteLength(prefix) <= prefixSize) {
+                // prefix fits in prefix, but path doesn't fit in path
+                ret = [pp.slice(0, pathSize - 1), prefix, true];
+            }
+            else {
+                // make path take a bit from prefix
+                pp = pathModule.join(pathModule.basename(prefix), pp);
+                prefix = pathModule.dirname(prefix);
+            }
+        } while (prefix !== root && ret === undefined);
+        // at this point, found no resolution, just truncate
+        if (!ret) {
+            ret = [p.slice(0, pathSize - 1), '', true];
+        }
+    }
+    return ret;
+};
+const decString = (buf, off, size) => buf
+    .subarray(off, off + size)
+    .toString('utf8')
+    .replace(/\0.*/, '');
+const decDate = (buf, off, size) => numToDate(decNumber(buf, off, size));
+const numToDate = (num) => num === undefined ? undefined : new Date(num * 1000);
+const decNumber = (buf, off, size) => Number(buf[off]) & 0x80 ?
+    large.parse(buf.subarray(off, off + size))
+    : decSmallNumber(buf, off, size);
+const nanUndef = (value) => (isNaN(value) ? undefined : value);
+const decSmallNumber = (buf, off, size) => nanUndef(parseInt(buf
+    .subarray(off, off + size)
+    .toString('utf8')
+    .replace(/\0.*$/, '')
+    .trim(), 8));
+// the maximum encodable as a null-terminated octal, by field size
+const MAXNUM = {
+    12: 0o77777777777,
+    8: 0o7777777,
+};
+const encNumber = (buf, off, size, num) => num === undefined ? false
+    : num > MAXNUM[size] || num < 0 ?
+        (large.encode(num, buf.subarray(off, off + size)), true)
+        : (encSmallNumber(buf, off, size, num), false);
+const encSmallNumber = (buf, off, size, num) => buf.write(octalString(num, size), off, size, 'ascii');
+const octalString = (num, size) => padOctal(Math.floor(num).toString(8), size);
+const padOctal = (str, size) => (str.length === size - 1 ?
+    str
+    : new Array(size - str.length - 1).join('0') + str + ' ') + '\0';
+const encDate = (buf, off, size, date) => date === undefined ? false : (encNumber(buf, off, size, date.getTime() / 1000));
+// enough to fill the longest string we've got
+const NULLS = new Array(156).join('\0');
+// pad with nulls, return true if it's longer or non-ascii
+const encString = (buf, off, size, str) => str === undefined ? false : ((buf.write(str + NULLS, off, size, 'utf8'),
+    str.length !== Buffer.byteLength(str) || str.length > size));
+//# sourceMappingURL=header.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/index.js b/node_modules/pacote/node_modules/tar/dist/esm/index.js
new file mode 100644
index 0000000000000..1bac6415c8d73
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/esm/index.js
@@ -0,0 +1,20 @@
+export * from './create.js';
+export { create as c } from './create.js';
+export * from './extract.js';
+export { extract as x } from './extract.js';
+export * from './header.js';
+export * from './list.js';
+export { list as t } from './list.js';
+// classes
+export * from './pack.js';
+export * from './parse.js';
+export * from './pax.js';
+export * from './read-entry.js';
+export * from './replace.js';
+export { replace as r } from './replace.js';
+export * as types from './types.js';
+export * from './unpack.js';
+export * from './update.js';
+export { update as u } from './update.js';
+export * from './write-entry.js';
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/large-numbers.js b/node_modules/pacote/node_modules/tar/dist/esm/large-numbers.js
new file mode 100644
index 0000000000000..4f2f7e5f14fc1
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/esm/large-numbers.js
@@ -0,0 +1,94 @@
+// Tar can encode large and negative numbers using a leading byte of
+// 0xff for negative, and 0x80 for positive.
+export const encode = (num, buf) => {
+    if (!Number.isSafeInteger(num)) {
+        // The number is so large that javascript cannot represent it with integer
+        // precision.
+        throw Error('cannot encode number outside of javascript safe integer range');
+    }
+    else if (num < 0) {
+        encodeNegative(num, buf);
+    }
+    else {
+        encodePositive(num, buf);
+    }
+    return buf;
+};
+const encodePositive = (num, buf) => {
+    buf[0] = 0x80;
+    for (var i = buf.length; i > 1; i--) {
+        buf[i - 1] = num & 0xff;
+        num = Math.floor(num / 0x100);
+    }
+};
+const encodeNegative = (num, buf) => {
+    buf[0] = 0xff;
+    var flipped = false;
+    num = num * -1;
+    for (var i = buf.length; i > 1; i--) {
+        var byte = num & 0xff;
+        num = Math.floor(num / 0x100);
+        if (flipped) {
+            buf[i - 1] = onesComp(byte);
+        }
+        else if (byte === 0) {
+            buf[i - 1] = 0;
+        }
+        else {
+            flipped = true;
+            buf[i - 1] = twosComp(byte);
+        }
+    }
+};
+export const parse = (buf) => {
+    const pre = buf[0];
+    const value = pre === 0x80 ? pos(buf.subarray(1, buf.length))
+        : pre === 0xff ? twos(buf)
+            : null;
+    if (value === null) {
+        throw Error('invalid base256 encoding');
+    }
+    if (!Number.isSafeInteger(value)) {
+        // The number is so large that javascript cannot represent it with integer
+        // precision.
+        throw Error('parsed number outside of javascript safe integer range');
+    }
+    return value;
+};
+const twos = (buf) => {
+    var len = buf.length;
+    var sum = 0;
+    var flipped = false;
+    for (var i = len - 1; i > -1; i--) {
+        var byte = Number(buf[i]);
+        var f;
+        if (flipped) {
+            f = onesComp(byte);
+        }
+        else if (byte === 0) {
+            f = byte;
+        }
+        else {
+            flipped = true;
+            f = twosComp(byte);
+        }
+        if (f !== 0) {
+            sum -= f * Math.pow(256, len - i - 1);
+        }
+    }
+    return sum;
+};
+const pos = (buf) => {
+    var len = buf.length;
+    var sum = 0;
+    for (var i = len - 1; i > -1; i--) {
+        var byte = Number(buf[i]);
+        if (byte !== 0) {
+            sum += byte * Math.pow(256, len - i - 1);
+        }
+    }
+    return sum;
+};
+const onesComp = (byte) => (0xff ^ byte) & 0xff;
+const twosComp = (byte) => ((0xff ^ byte) + 1) & 0xff;
+//# sourceMappingURL=large-numbers.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/list.js b/node_modules/pacote/node_modules/tar/dist/esm/list.js
new file mode 100644
index 0000000000000..f49068400b6c9
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/esm/list.js
@@ -0,0 +1,106 @@
+// tar -t
+import * as fsm from '@isaacs/fs-minipass';
+import fs from 'node:fs';
+import { dirname, parse } from 'path';
+import { makeCommand } from './make-command.js';
+import { Parser } from './parse.js';
+import { stripTrailingSlashes } from './strip-trailing-slashes.js';
+const onReadEntryFunction = (opt) => {
+    const onReadEntry = opt.onReadEntry;
+    opt.onReadEntry =
+        onReadEntry ?
+            e => {
+                onReadEntry(e);
+                e.resume();
+            }
+            : e => e.resume();
+};
+// construct a filter that limits the file entries listed
+// include child entries if a dir is included
+export const filesFilter = (opt, files) => {
+    const map = new Map(files.map(f => [stripTrailingSlashes(f), true]));
+    const filter = opt.filter;
+    const mapHas = (file, r = '') => {
+        const root = r || parse(file).root || '.';
+        let ret;
+        if (file === root)
+            ret = false;
+        else {
+            const m = map.get(file);
+            if (m !== undefined) {
+                ret = m;
+            }
+            else {
+                ret = mapHas(dirname(file), root);
+            }
+        }
+        map.set(file, ret);
+        return ret;
+    };
+    opt.filter =
+        filter ?
+            (file, entry) => filter(file, entry) && mapHas(stripTrailingSlashes(file))
+            : file => mapHas(stripTrailingSlashes(file));
+};
+const listFileSync = (opt) => {
+    const p = new Parser(opt);
+    const file = opt.file;
+    let fd;
+    try {
+        const stat = fs.statSync(file);
+        const readSize = opt.maxReadSize || 16 * 1024 * 1024;
+        if (stat.size < readSize) {
+            p.end(fs.readFileSync(file));
+        }
+        else {
+            let pos = 0;
+            const buf = Buffer.allocUnsafe(readSize);
+            fd = fs.openSync(file, 'r');
+            while (pos < stat.size) {
+                const bytesRead = fs.readSync(fd, buf, 0, readSize, pos);
+                pos += bytesRead;
+                p.write(buf.subarray(0, bytesRead));
+            }
+            p.end();
+        }
+    }
+    finally {
+        if (typeof fd === 'number') {
+            try {
+                fs.closeSync(fd);
+                /* c8 ignore next */
+            }
+            catch (er) { }
+        }
+    }
+};
+const listFile = (opt, _files) => {
+    const parse = new Parser(opt);
+    const readSize = opt.maxReadSize || 16 * 1024 * 1024;
+    const file = opt.file;
+    const p = new Promise((resolve, reject) => {
+        parse.on('error', reject);
+        parse.on('end', resolve);
+        fs.stat(file, (er, stat) => {
+            if (er) {
+                reject(er);
+            }
+            else {
+                const stream = new fsm.ReadStream(file, {
+                    readSize: readSize,
+                    size: stat.size,
+                });
+                stream.on('error', reject);
+                stream.pipe(parse);
+            }
+        });
+    });
+    return p;
+};
+export const list = makeCommand(listFileSync, listFile, opt => new Parser(opt), opt => new Parser(opt), (opt, files) => {
+    if (files?.length)
+        filesFilter(opt, files);
+    if (!opt.noResume)
+        onReadEntryFunction(opt);
+});
+//# sourceMappingURL=list.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/make-command.js b/node_modules/pacote/node_modules/tar/dist/esm/make-command.js
new file mode 100644
index 0000000000000..f2f737bca78fd
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/esm/make-command.js
@@ -0,0 +1,57 @@
+import { dealias, isAsyncFile, isAsyncNoFile, isSyncFile, isSyncNoFile, } from './options.js';
+export const makeCommand = (syncFile, asyncFile, syncNoFile, asyncNoFile, validate) => {
+    return Object.assign((opt_ = [], entries, cb) => {
+        if (Array.isArray(opt_)) {
+            entries = opt_;
+            opt_ = {};
+        }
+        if (typeof entries === 'function') {
+            cb = entries;
+            entries = undefined;
+        }
+        if (!entries) {
+            entries = [];
+        }
+        else {
+            entries = Array.from(entries);
+        }
+        const opt = dealias(opt_);
+        validate?.(opt, entries);
+        if (isSyncFile(opt)) {
+            if (typeof cb === 'function') {
+                throw new TypeError('callback not supported for sync tar functions');
+            }
+            return syncFile(opt, entries);
+        }
+        else if (isAsyncFile(opt)) {
+            const p = asyncFile(opt, entries);
+            // weirdness to make TS happy
+            const c = cb ? cb : undefined;
+            return c ? p.then(() => c(), c) : p;
+        }
+        else if (isSyncNoFile(opt)) {
+            if (typeof cb === 'function') {
+                throw new TypeError('callback not supported for sync tar functions');
+            }
+            return syncNoFile(opt, entries);
+        }
+        else if (isAsyncNoFile(opt)) {
+            if (typeof cb === 'function') {
+                throw new TypeError('callback only supported with file option');
+            }
+            return asyncNoFile(opt, entries);
+            /* c8 ignore start */
+        }
+        else {
+            throw new Error('impossible options??');
+        }
+        /* c8 ignore stop */
+    }, {
+        syncFile,
+        asyncFile,
+        syncNoFile,
+        asyncNoFile,
+        validate,
+    });
+};
+//# sourceMappingURL=make-command.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/mkdir.js b/node_modules/pacote/node_modules/tar/dist/esm/mkdir.js
new file mode 100644
index 0000000000000..13498ef0082f0
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/esm/mkdir.js
@@ -0,0 +1,201 @@
+import { chownr, chownrSync } from 'chownr';
+import fs from 'fs';
+import { mkdirp, mkdirpSync } from 'mkdirp';
+import path from 'node:path';
+import { CwdError } from './cwd-error.js';
+import { normalizeWindowsPath } from './normalize-windows-path.js';
+import { SymlinkError } from './symlink-error.js';
+const cGet = (cache, key) => cache.get(normalizeWindowsPath(key));
+const cSet = (cache, key, val) => cache.set(normalizeWindowsPath(key), val);
+const checkCwd = (dir, cb) => {
+    fs.stat(dir, (er, st) => {
+        if (er || !st.isDirectory()) {
+            er = new CwdError(dir, er?.code || 'ENOTDIR');
+        }
+        cb(er);
+    });
+};
+/**
+ * Wrapper around mkdirp for tar's needs.
+ *
+ * The main purpose is to avoid creating directories if we know that
+ * they already exist (and track which ones exist for this purpose),
+ * and prevent entries from being extracted into symlinked folders,
+ * if `preservePaths` is not set.
+ */
+export const mkdir = (dir, opt, cb) => {
+    dir = normalizeWindowsPath(dir);
+    // if there's any overlap between mask and mode,
+    // then we'll need an explicit chmod
+    /* c8 ignore next */
+    const umask = opt.umask ?? 0o22;
+    const mode = opt.mode | 0o0700;
+    const needChmod = (mode & umask) !== 0;
+    const uid = opt.uid;
+    const gid = opt.gid;
+    const doChown = typeof uid === 'number' &&
+        typeof gid === 'number' &&
+        (uid !== opt.processUid || gid !== opt.processGid);
+    const preserve = opt.preserve;
+    const unlink = opt.unlink;
+    const cache = opt.cache;
+    const cwd = normalizeWindowsPath(opt.cwd);
+    const done = (er, created) => {
+        if (er) {
+            cb(er);
+        }
+        else {
+            cSet(cache, dir, true);
+            if (created && doChown) {
+                chownr(created, uid, gid, er => done(er));
+            }
+            else if (needChmod) {
+                fs.chmod(dir, mode, cb);
+            }
+            else {
+                cb();
+            }
+        }
+    };
+    if (cache && cGet(cache, dir) === true) {
+        return done();
+    }
+    if (dir === cwd) {
+        return checkCwd(dir, done);
+    }
+    if (preserve) {
+        return mkdirp(dir, { mode }).then(made => done(null, made ?? undefined), // oh, ts
+        done);
+    }
+    const sub = normalizeWindowsPath(path.relative(cwd, dir));
+    const parts = sub.split('/');
+    mkdir_(cwd, parts, mode, cache, unlink, cwd, undefined, done);
+};
+const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => {
+    if (!parts.length) {
+        return cb(null, created);
+    }
+    const p = parts.shift();
+    const part = normalizeWindowsPath(path.resolve(base + '/' + p));
+    if (cGet(cache, part)) {
+        return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
+    }
+    fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb));
+};
+const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => (er) => {
+    if (er) {
+        fs.lstat(part, (statEr, st) => {
+            if (statEr) {
+                statEr.path =
+                    statEr.path && normalizeWindowsPath(statEr.path);
+                cb(statEr);
+            }
+            else if (st.isDirectory()) {
+                mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
+            }
+            else if (unlink) {
+                fs.unlink(part, er => {
+                    if (er) {
+                        return cb(er);
+                    }
+                    fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb));
+                });
+            }
+            else if (st.isSymbolicLink()) {
+                return cb(new SymlinkError(part, part + '/' + parts.join('/')));
+            }
+            else {
+                cb(er);
+            }
+        });
+    }
+    else {
+        created = created || part;
+        mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
+    }
+};
+const checkCwdSync = (dir) => {
+    let ok = false;
+    let code = undefined;
+    try {
+        ok = fs.statSync(dir).isDirectory();
+    }
+    catch (er) {
+        code = er?.code;
+    }
+    finally {
+        if (!ok) {
+            throw new CwdError(dir, code ?? 'ENOTDIR');
+        }
+    }
+};
+export const mkdirSync = (dir, opt) => {
+    dir = normalizeWindowsPath(dir);
+    // if there's any overlap between mask and mode,
+    // then we'll need an explicit chmod
+    /* c8 ignore next */
+    const umask = opt.umask ?? 0o22;
+    const mode = opt.mode | 0o700;
+    const needChmod = (mode & umask) !== 0;
+    const uid = opt.uid;
+    const gid = opt.gid;
+    const doChown = typeof uid === 'number' &&
+        typeof gid === 'number' &&
+        (uid !== opt.processUid || gid !== opt.processGid);
+    const preserve = opt.preserve;
+    const unlink = opt.unlink;
+    const cache = opt.cache;
+    const cwd = normalizeWindowsPath(opt.cwd);
+    const done = (created) => {
+        cSet(cache, dir, true);
+        if (created && doChown) {
+            chownrSync(created, uid, gid);
+        }
+        if (needChmod) {
+            fs.chmodSync(dir, mode);
+        }
+    };
+    if (cache && cGet(cache, dir) === true) {
+        return done();
+    }
+    if (dir === cwd) {
+        checkCwdSync(cwd);
+        return done();
+    }
+    if (preserve) {
+        return done(mkdirpSync(dir, mode) ?? undefined);
+    }
+    const sub = normalizeWindowsPath(path.relative(cwd, dir));
+    const parts = sub.split('/');
+    let created = undefined;
+    for (let p = parts.shift(), part = cwd; p && (part += '/' + p); p = parts.shift()) {
+        part = normalizeWindowsPath(path.resolve(part));
+        if (cGet(cache, part)) {
+            continue;
+        }
+        try {
+            fs.mkdirSync(part, mode);
+            created = created || part;
+            cSet(cache, part, true);
+        }
+        catch (er) {
+            const st = fs.lstatSync(part);
+            if (st.isDirectory()) {
+                cSet(cache, part, true);
+                continue;
+            }
+            else if (unlink) {
+                fs.unlinkSync(part);
+                fs.mkdirSync(part, mode);
+                created = created || part;
+                cSet(cache, part, true);
+                continue;
+            }
+            else if (st.isSymbolicLink()) {
+                return new SymlinkError(part, part + '/' + parts.join('/'));
+            }
+        }
+    }
+    return done(created);
+};
+//# sourceMappingURL=mkdir.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/mode-fix.js b/node_modules/pacote/node_modules/tar/dist/esm/mode-fix.js
new file mode 100644
index 0000000000000..5fd3bb88c1cb2
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/esm/mode-fix.js
@@ -0,0 +1,25 @@
+export const modeFix = (mode, isDir, portable) => {
+    mode &= 0o7777;
+    // in portable mode, use the minimum reasonable umask
+    // if this system creates files with 0o664 by default
+    // (as some linux distros do), then we'll write the
+    // archive with 0o644 instead.  Also, don't ever create
+    // a file that is not readable/writable by the owner.
+    if (portable) {
+        mode = (mode | 0o600) & ~0o22;
+    }
+    // if dirs are readable, then they should be listable
+    if (isDir) {
+        if (mode & 0o400) {
+            mode |= 0o100;
+        }
+        if (mode & 0o40) {
+            mode |= 0o10;
+        }
+        if (mode & 0o4) {
+            mode |= 0o1;
+        }
+    }
+    return mode;
+};
+//# sourceMappingURL=mode-fix.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/normalize-unicode.js b/node_modules/pacote/node_modules/tar/dist/esm/normalize-unicode.js
new file mode 100644
index 0000000000000..94e5095476d6e
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/esm/normalize-unicode.js
@@ -0,0 +1,13 @@
+// warning: extremely hot code path.
+// This has been meticulously optimized for use
+// within npm install on large package trees.
+// Do not edit without careful benchmarking.
+const normalizeCache = Object.create(null);
+const { hasOwnProperty } = Object.prototype;
+export const normalizeUnicode = (s) => {
+    if (!hasOwnProperty.call(normalizeCache, s)) {
+        normalizeCache[s] = s.normalize('NFD');
+    }
+    return normalizeCache[s];
+};
+//# sourceMappingURL=normalize-unicode.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/normalize-windows-path.js b/node_modules/pacote/node_modules/tar/dist/esm/normalize-windows-path.js
new file mode 100644
index 0000000000000..2d97d2b884e62
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/esm/normalize-windows-path.js
@@ -0,0 +1,9 @@
+// on windows, either \ or / are valid directory separators.
+// on unix, \ is a valid character in filenames.
+// so, on windows, and only on windows, we replace all \ chars with /,
+// so that we can use / as our one and only directory separator char.
+const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform;
+export const normalizeWindowsPath = platform !== 'win32' ?
+    (p) => p
+    : (p) => p && p.replace(/\\/g, '/');
+//# sourceMappingURL=normalize-windows-path.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/options.js b/node_modules/pacote/node_modules/tar/dist/esm/options.js
new file mode 100644
index 0000000000000..a006d36c23c92
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/esm/options.js
@@ -0,0 +1,54 @@
+// turn tar(1) style args like `C` into the more verbose things like `cwd`
+const argmap = new Map([
+    ['C', 'cwd'],
+    ['f', 'file'],
+    ['z', 'gzip'],
+    ['P', 'preservePaths'],
+    ['U', 'unlink'],
+    ['strip-components', 'strip'],
+    ['stripComponents', 'strip'],
+    ['keep-newer', 'newer'],
+    ['keepNewer', 'newer'],
+    ['keep-newer-files', 'newer'],
+    ['keepNewerFiles', 'newer'],
+    ['k', 'keep'],
+    ['keep-existing', 'keep'],
+    ['keepExisting', 'keep'],
+    ['m', 'noMtime'],
+    ['no-mtime', 'noMtime'],
+    ['p', 'preserveOwner'],
+    ['L', 'follow'],
+    ['h', 'follow'],
+    ['onentry', 'onReadEntry'],
+]);
+export const isSyncFile = (o) => !!o.sync && !!o.file;
+export const isAsyncFile = (o) => !o.sync && !!o.file;
+export const isSyncNoFile = (o) => !!o.sync && !o.file;
+export const isAsyncNoFile = (o) => !o.sync && !o.file;
+export const isSync = (o) => !!o.sync;
+export const isAsync = (o) => !o.sync;
+export const isFile = (o) => !!o.file;
+export const isNoFile = (o) => !o.file;
+const dealiasKey = (k) => {
+    const d = argmap.get(k);
+    if (d)
+        return d;
+    return k;
+};
+export const dealias = (opt = {}) => {
+    if (!opt)
+        return {};
+    const result = {};
+    for (const [key, v] of Object.entries(opt)) {
+        // TS doesn't know that aliases are going to always be the same type
+        const k = dealiasKey(key);
+        result[k] = v;
+    }
+    // affordance for deprecated noChmod -> chmod
+    if (result.chmod === undefined && result.noChmod === false) {
+        result.chmod = true;
+    }
+    delete result.noChmod;
+    return result;
+};
+//# sourceMappingURL=options.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/pack.js b/node_modules/pacote/node_modules/tar/dist/esm/pack.js
new file mode 100644
index 0000000000000..f59f32f94201f
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/esm/pack.js
@@ -0,0 +1,445 @@
+// A readable tar stream creator
+// Technically, this is a transform stream that you write paths into,
+// and tar format comes out of.
+// The `add()` method is like `write()` but returns this,
+// and end() return `this` as well, so you can
+// do `new Pack(opt).add('files').add('dir').end().pipe(output)
+// You could also do something like:
+// streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar'))
+import fs from 'fs';
+import { WriteEntry, WriteEntrySync, WriteEntryTar, } from './write-entry.js';
+export class PackJob {
+    path;
+    absolute;
+    entry;
+    stat;
+    readdir;
+    pending = false;
+    ignore = false;
+    piped = false;
+    constructor(path, absolute) {
+        this.path = path || './';
+        this.absolute = absolute;
+    }
+}
+import { Minipass } from 'minipass';
+import * as zlib from 'minizlib';
+import { Yallist } from 'yallist';
+import { ReadEntry } from './read-entry.js';
+import { warnMethod, } from './warn-method.js';
+const EOF = Buffer.alloc(1024);
+const ONSTAT = Symbol('onStat');
+const ENDED = Symbol('ended');
+const QUEUE = Symbol('queue');
+const CURRENT = Symbol('current');
+const PROCESS = Symbol('process');
+const PROCESSING = Symbol('processing');
+const PROCESSJOB = Symbol('processJob');
+const JOBS = Symbol('jobs');
+const JOBDONE = Symbol('jobDone');
+const ADDFSENTRY = Symbol('addFSEntry');
+const ADDTARENTRY = Symbol('addTarEntry');
+const STAT = Symbol('stat');
+const READDIR = Symbol('readdir');
+const ONREADDIR = Symbol('onreaddir');
+const PIPE = Symbol('pipe');
+const ENTRY = Symbol('entry');
+const ENTRYOPT = Symbol('entryOpt');
+const WRITEENTRYCLASS = Symbol('writeEntryClass');
+const WRITE = Symbol('write');
+const ONDRAIN = Symbol('ondrain');
+import path from 'path';
+import { normalizeWindowsPath } from './normalize-windows-path.js';
+export class Pack extends Minipass {
+    opt;
+    cwd;
+    maxReadSize;
+    preservePaths;
+    strict;
+    noPax;
+    prefix;
+    linkCache;
+    statCache;
+    file;
+    portable;
+    zip;
+    readdirCache;
+    noDirRecurse;
+    follow;
+    noMtime;
+    mtime;
+    filter;
+    jobs;
+    [WRITEENTRYCLASS];
+    onWriteEntry;
+    [QUEUE];
+    [JOBS] = 0;
+    [PROCESSING] = false;
+    [ENDED] = false;
+    constructor(opt = {}) {
+        //@ts-ignore
+        super();
+        this.opt = opt;
+        this.file = opt.file || '';
+        this.cwd = opt.cwd || process.cwd();
+        this.maxReadSize = opt.maxReadSize;
+        this.preservePaths = !!opt.preservePaths;
+        this.strict = !!opt.strict;
+        this.noPax = !!opt.noPax;
+        this.prefix = normalizeWindowsPath(opt.prefix || '');
+        this.linkCache = opt.linkCache || new Map();
+        this.statCache = opt.statCache || new Map();
+        this.readdirCache = opt.readdirCache || new Map();
+        this.onWriteEntry = opt.onWriteEntry;
+        this[WRITEENTRYCLASS] = WriteEntry;
+        if (typeof opt.onwarn === 'function') {
+            this.on('warn', opt.onwarn);
+        }
+        this.portable = !!opt.portable;
+        if (opt.gzip || opt.brotli) {
+            if (opt.gzip && opt.brotli) {
+                throw new TypeError('gzip and brotli are mutually exclusive');
+            }
+            if (opt.gzip) {
+                if (typeof opt.gzip !== 'object') {
+                    opt.gzip = {};
+                }
+                if (this.portable) {
+                    opt.gzip.portable = true;
+                }
+                this.zip = new zlib.Gzip(opt.gzip);
+            }
+            if (opt.brotli) {
+                if (typeof opt.brotli !== 'object') {
+                    opt.brotli = {};
+                }
+                this.zip = new zlib.BrotliCompress(opt.brotli);
+            }
+            /* c8 ignore next */
+            if (!this.zip)
+                throw new Error('impossible');
+            const zip = this.zip;
+            zip.on('data', chunk => super.write(chunk));
+            zip.on('end', () => super.end());
+            zip.on('drain', () => this[ONDRAIN]());
+            this.on('resume', () => zip.resume());
+        }
+        else {
+            this.on('drain', this[ONDRAIN]);
+        }
+        this.noDirRecurse = !!opt.noDirRecurse;
+        this.follow = !!opt.follow;
+        this.noMtime = !!opt.noMtime;
+        if (opt.mtime)
+            this.mtime = opt.mtime;
+        this.filter =
+            typeof opt.filter === 'function' ? opt.filter : () => true;
+        this[QUEUE] = new Yallist();
+        this[JOBS] = 0;
+        this.jobs = Number(opt.jobs) || 4;
+        this[PROCESSING] = false;
+        this[ENDED] = false;
+    }
+    [WRITE](chunk) {
+        return super.write(chunk);
+    }
+    add(path) {
+        this.write(path);
+        return this;
+    }
+    end(path, encoding, cb) {
+        /* c8 ignore start */
+        if (typeof path === 'function') {
+            cb = path;
+            path = undefined;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = undefined;
+        }
+        /* c8 ignore stop */
+        if (path) {
+            this.add(path);
+        }
+        this[ENDED] = true;
+        this[PROCESS]();
+        /* c8 ignore next */
+        if (cb)
+            cb();
+        return this;
+    }
+    write(path) {
+        if (this[ENDED]) {
+            throw new Error('write after end');
+        }
+        if (path instanceof ReadEntry) {
+            this[ADDTARENTRY](path);
+        }
+        else {
+            this[ADDFSENTRY](path);
+        }
+        return this.flowing;
+    }
+    [ADDTARENTRY](p) {
+        const absolute = normalizeWindowsPath(path.resolve(this.cwd, p.path));
+        // in this case, we don't have to wait for the stat
+        if (!this.filter(p.path, p)) {
+            p.resume();
+        }
+        else {
+            const job = new PackJob(p.path, absolute);
+            job.entry = new WriteEntryTar(p, this[ENTRYOPT](job));
+            job.entry.on('end', () => this[JOBDONE](job));
+            this[JOBS] += 1;
+            this[QUEUE].push(job);
+        }
+        this[PROCESS]();
+    }
+    [ADDFSENTRY](p) {
+        const absolute = normalizeWindowsPath(path.resolve(this.cwd, p));
+        this[QUEUE].push(new PackJob(p, absolute));
+        this[PROCESS]();
+    }
+    [STAT](job) {
+        job.pending = true;
+        this[JOBS] += 1;
+        const stat = this.follow ? 'stat' : 'lstat';
+        fs[stat](job.absolute, (er, stat) => {
+            job.pending = false;
+            this[JOBS] -= 1;
+            if (er) {
+                this.emit('error', er);
+            }
+            else {
+                this[ONSTAT](job, stat);
+            }
+        });
+    }
+    [ONSTAT](job, stat) {
+        this.statCache.set(job.absolute, stat);
+        job.stat = stat;
+        // now we have the stat, we can filter it.
+        if (!this.filter(job.path, stat)) {
+            job.ignore = true;
+        }
+        this[PROCESS]();
+    }
+    [READDIR](job) {
+        job.pending = true;
+        this[JOBS] += 1;
+        fs.readdir(job.absolute, (er, entries) => {
+            job.pending = false;
+            this[JOBS] -= 1;
+            if (er) {
+                return this.emit('error', er);
+            }
+            this[ONREADDIR](job, entries);
+        });
+    }
+    [ONREADDIR](job, entries) {
+        this.readdirCache.set(job.absolute, entries);
+        job.readdir = entries;
+        this[PROCESS]();
+    }
+    [PROCESS]() {
+        if (this[PROCESSING]) {
+            return;
+        }
+        this[PROCESSING] = true;
+        for (let w = this[QUEUE].head; !!w && this[JOBS] < this.jobs; w = w.next) {
+            this[PROCESSJOB](w.value);
+            if (w.value.ignore) {
+                const p = w.next;
+                this[QUEUE].removeNode(w);
+                w.next = p;
+            }
+        }
+        this[PROCESSING] = false;
+        if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) {
+            if (this.zip) {
+                this.zip.end(EOF);
+            }
+            else {
+                super.write(EOF);
+                super.end();
+            }
+        }
+    }
+    get [CURRENT]() {
+        return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value;
+    }
+    [JOBDONE](_job) {
+        this[QUEUE].shift();
+        this[JOBS] -= 1;
+        this[PROCESS]();
+    }
+    [PROCESSJOB](job) {
+        if (job.pending) {
+            return;
+        }
+        if (job.entry) {
+            if (job === this[CURRENT] && !job.piped) {
+                this[PIPE](job);
+            }
+            return;
+        }
+        if (!job.stat) {
+            const sc = this.statCache.get(job.absolute);
+            if (sc) {
+                this[ONSTAT](job, sc);
+            }
+            else {
+                this[STAT](job);
+            }
+        }
+        if (!job.stat) {
+            return;
+        }
+        // filtered out!
+        if (job.ignore) {
+            return;
+        }
+        if (!this.noDirRecurse &&
+            job.stat.isDirectory() &&
+            !job.readdir) {
+            const rc = this.readdirCache.get(job.absolute);
+            if (rc) {
+                this[ONREADDIR](job, rc);
+            }
+            else {
+                this[READDIR](job);
+            }
+            if (!job.readdir) {
+                return;
+            }
+        }
+        // we know it doesn't have an entry, because that got checked above
+        job.entry = this[ENTRY](job);
+        if (!job.entry) {
+            job.ignore = true;
+            return;
+        }
+        if (job === this[CURRENT] && !job.piped) {
+            this[PIPE](job);
+        }
+    }
+    [ENTRYOPT](job) {
+        return {
+            onwarn: (code, msg, data) => this.warn(code, msg, data),
+            noPax: this.noPax,
+            cwd: this.cwd,
+            absolute: job.absolute,
+            preservePaths: this.preservePaths,
+            maxReadSize: this.maxReadSize,
+            strict: this.strict,
+            portable: this.portable,
+            linkCache: this.linkCache,
+            statCache: this.statCache,
+            noMtime: this.noMtime,
+            mtime: this.mtime,
+            prefix: this.prefix,
+            onWriteEntry: this.onWriteEntry,
+        };
+    }
+    [ENTRY](job) {
+        this[JOBS] += 1;
+        try {
+            const e = new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job));
+            return e
+                .on('end', () => this[JOBDONE](job))
+                .on('error', er => this.emit('error', er));
+        }
+        catch (er) {
+            this.emit('error', er);
+        }
+    }
+    [ONDRAIN]() {
+        if (this[CURRENT] && this[CURRENT].entry) {
+            this[CURRENT].entry.resume();
+        }
+    }
+    // like .pipe() but using super, because our write() is special
+    [PIPE](job) {
+        job.piped = true;
+        if (job.readdir) {
+            job.readdir.forEach(entry => {
+                const p = job.path;
+                const base = p === './' ? '' : p.replace(/\/*$/, '/');
+                this[ADDFSENTRY](base + entry);
+            });
+        }
+        const source = job.entry;
+        const zip = this.zip;
+        /* c8 ignore start */
+        if (!source)
+            throw new Error('cannot pipe without source');
+        /* c8 ignore stop */
+        if (zip) {
+            source.on('data', chunk => {
+                if (!zip.write(chunk)) {
+                    source.pause();
+                }
+            });
+        }
+        else {
+            source.on('data', chunk => {
+                if (!super.write(chunk)) {
+                    source.pause();
+                }
+            });
+        }
+    }
+    pause() {
+        if (this.zip) {
+            this.zip.pause();
+        }
+        return super.pause();
+    }
+    warn(code, message, data = {}) {
+        warnMethod(this, code, message, data);
+    }
+}
+export class PackSync extends Pack {
+    sync = true;
+    constructor(opt) {
+        super(opt);
+        this[WRITEENTRYCLASS] = WriteEntrySync;
+    }
+    // pause/resume are no-ops in sync streams.
+    pause() { }
+    resume() { }
+    [STAT](job) {
+        const stat = this.follow ? 'statSync' : 'lstatSync';
+        this[ONSTAT](job, fs[stat](job.absolute));
+    }
+    [READDIR](job) {
+        this[ONREADDIR](job, fs.readdirSync(job.absolute));
+    }
+    // gotta get it all in this tick
+    [PIPE](job) {
+        const source = job.entry;
+        const zip = this.zip;
+        if (job.readdir) {
+            job.readdir.forEach(entry => {
+                const p = job.path;
+                const base = p === './' ? '' : p.replace(/\/*$/, '/');
+                this[ADDFSENTRY](base + entry);
+            });
+        }
+        /* c8 ignore start */
+        if (!source)
+            throw new Error('Cannot pipe without source');
+        /* c8 ignore stop */
+        if (zip) {
+            source.on('data', chunk => {
+                zip.write(chunk);
+            });
+        }
+        else {
+            source.on('data', chunk => {
+                super[WRITE](chunk);
+            });
+        }
+    }
+}
+//# sourceMappingURL=pack.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/package.json b/node_modules/pacote/node_modules/tar/dist/esm/package.json
new file mode 100644
index 0000000000000..3dbc1ca591c05
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/esm/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/parse.js b/node_modules/pacote/node_modules/tar/dist/esm/parse.js
new file mode 100644
index 0000000000000..cce430479cd0c
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/esm/parse.js
@@ -0,0 +1,595 @@
+// this[BUFFER] is the remainder of a chunk if we're waiting for
+// the full 512 bytes of a header to come in.  We will Buffer.concat()
+// it to the next write(), which is a mem copy, but a small one.
+//
+// this[QUEUE] is a Yallist of entries that haven't been emitted
+// yet this can only get filled up if the user keeps write()ing after
+// a write() returns false, or does a write() with more than one entry
+//
+// We don't buffer chunks, we always parse them and either create an
+// entry, or push it into the active entry.  The ReadEntry class knows
+// to throw data away if .ignore=true
+//
+// Shift entry off the buffer when it emits 'end', and emit 'entry' for
+// the next one in the list.
+//
+// At any time, we're pushing body chunks into the entry at WRITEENTRY,
+// and waiting for 'end' on the entry at READENTRY
+//
+// ignored entries get .resume() called on them straight away
+import { EventEmitter as EE } from 'events';
+import { BrotliDecompress, Unzip } from 'minizlib';
+import { Yallist } from 'yallist';
+import { Header } from './header.js';
+import { Pax } from './pax.js';
+import { ReadEntry } from './read-entry.js';
+import { warnMethod, } from './warn-method.js';
+const maxMetaEntrySize = 1024 * 1024;
+const gzipHeader = Buffer.from([0x1f, 0x8b]);
+const STATE = Symbol('state');
+const WRITEENTRY = Symbol('writeEntry');
+const READENTRY = Symbol('readEntry');
+const NEXTENTRY = Symbol('nextEntry');
+const PROCESSENTRY = Symbol('processEntry');
+const EX = Symbol('extendedHeader');
+const GEX = Symbol('globalExtendedHeader');
+const META = Symbol('meta');
+const EMITMETA = Symbol('emitMeta');
+const BUFFER = Symbol('buffer');
+const QUEUE = Symbol('queue');
+const ENDED = Symbol('ended');
+const EMITTEDEND = Symbol('emittedEnd');
+const EMIT = Symbol('emit');
+const UNZIP = Symbol('unzip');
+const CONSUMECHUNK = Symbol('consumeChunk');
+const CONSUMECHUNKSUB = Symbol('consumeChunkSub');
+const CONSUMEBODY = Symbol('consumeBody');
+const CONSUMEMETA = Symbol('consumeMeta');
+const CONSUMEHEADER = Symbol('consumeHeader');
+const CONSUMING = Symbol('consuming');
+const BUFFERCONCAT = Symbol('bufferConcat');
+const MAYBEEND = Symbol('maybeEnd');
+const WRITING = Symbol('writing');
+const ABORTED = Symbol('aborted');
+const DONE = Symbol('onDone');
+const SAW_VALID_ENTRY = Symbol('sawValidEntry');
+const SAW_NULL_BLOCK = Symbol('sawNullBlock');
+const SAW_EOF = Symbol('sawEOF');
+const CLOSESTREAM = Symbol('closeStream');
+const noop = () => true;
+export class Parser extends EE {
+    file;
+    strict;
+    maxMetaEntrySize;
+    filter;
+    brotli;
+    writable = true;
+    readable = false;
+    [QUEUE] = new Yallist();
+    [BUFFER];
+    [READENTRY];
+    [WRITEENTRY];
+    [STATE] = 'begin';
+    [META] = '';
+    [EX];
+    [GEX];
+    [ENDED] = false;
+    [UNZIP];
+    [ABORTED] = false;
+    [SAW_VALID_ENTRY];
+    [SAW_NULL_BLOCK] = false;
+    [SAW_EOF] = false;
+    [WRITING] = false;
+    [CONSUMING] = false;
+    [EMITTEDEND] = false;
+    constructor(opt = {}) {
+        super();
+        this.file = opt.file || '';
+        // these BADARCHIVE errors can't be detected early. listen on DONE.
+        this.on(DONE, () => {
+            if (this[STATE] === 'begin' ||
+                this[SAW_VALID_ENTRY] === false) {
+                // either less than 1 block of data, or all entries were invalid.
+                // Either way, probably not even a tarball.
+                this.warn('TAR_BAD_ARCHIVE', 'Unrecognized archive format');
+            }
+        });
+        if (opt.ondone) {
+            this.on(DONE, opt.ondone);
+        }
+        else {
+            this.on(DONE, () => {
+                this.emit('prefinish');
+                this.emit('finish');
+                this.emit('end');
+            });
+        }
+        this.strict = !!opt.strict;
+        this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize;
+        this.filter = typeof opt.filter === 'function' ? opt.filter : noop;
+        // Unlike gzip, brotli doesn't have any magic bytes to identify it
+        // Users need to explicitly tell us they're extracting a brotli file
+        // Or we infer from the file extension
+        const isTBR = opt.file &&
+            (opt.file.endsWith('.tar.br') || opt.file.endsWith('.tbr'));
+        // if it's a tbr file it MIGHT be brotli, but we don't know until
+        // we look at it and verify it's not a valid tar file.
+        this.brotli =
+            !opt.gzip && opt.brotli !== undefined ? opt.brotli
+                : isTBR ? undefined
+                    : false;
+        // have to set this so that streams are ok piping into it
+        this.on('end', () => this[CLOSESTREAM]());
+        if (typeof opt.onwarn === 'function') {
+            this.on('warn', opt.onwarn);
+        }
+        if (typeof opt.onReadEntry === 'function') {
+            this.on('entry', opt.onReadEntry);
+        }
+    }
+    warn(code, message, data = {}) {
+        warnMethod(this, code, message, data);
+    }
+    [CONSUMEHEADER](chunk, position) {
+        if (this[SAW_VALID_ENTRY] === undefined) {
+            this[SAW_VALID_ENTRY] = false;
+        }
+        let header;
+        try {
+            header = new Header(chunk, position, this[EX], this[GEX]);
+        }
+        catch (er) {
+            return this.warn('TAR_ENTRY_INVALID', er);
+        }
+        if (header.nullBlock) {
+            if (this[SAW_NULL_BLOCK]) {
+                this[SAW_EOF] = true;
+                // ending an archive with no entries.  pointless, but legal.
+                if (this[STATE] === 'begin') {
+                    this[STATE] = 'header';
+                }
+                this[EMIT]('eof');
+            }
+            else {
+                this[SAW_NULL_BLOCK] = true;
+                this[EMIT]('nullBlock');
+            }
+        }
+        else {
+            this[SAW_NULL_BLOCK] = false;
+            if (!header.cksumValid) {
+                this.warn('TAR_ENTRY_INVALID', 'checksum failure', { header });
+            }
+            else if (!header.path) {
+                this.warn('TAR_ENTRY_INVALID', 'path is required', { header });
+            }
+            else {
+                const type = header.type;
+                if (/^(Symbolic)?Link$/.test(type) && !header.linkpath) {
+                    this.warn('TAR_ENTRY_INVALID', 'linkpath required', {
+                        header,
+                    });
+                }
+                else if (!/^(Symbolic)?Link$/.test(type) &&
+                    !/^(Global)?ExtendedHeader$/.test(type) &&
+                    header.linkpath) {
+                    this.warn('TAR_ENTRY_INVALID', 'linkpath forbidden', {
+                        header,
+                    });
+                }
+                else {
+                    const entry = (this[WRITEENTRY] = new ReadEntry(header, this[EX], this[GEX]));
+                    // we do this for meta & ignored entries as well, because they
+                    // are still valid tar, or else we wouldn't know to ignore them
+                    if (!this[SAW_VALID_ENTRY]) {
+                        if (entry.remain) {
+                            // this might be the one!
+                            const onend = () => {
+                                if (!entry.invalid) {
+                                    this[SAW_VALID_ENTRY] = true;
+                                }
+                            };
+                            entry.on('end', onend);
+                        }
+                        else {
+                            this[SAW_VALID_ENTRY] = true;
+                        }
+                    }
+                    if (entry.meta) {
+                        if (entry.size > this.maxMetaEntrySize) {
+                            entry.ignore = true;
+                            this[EMIT]('ignoredEntry', entry);
+                            this[STATE] = 'ignore';
+                            entry.resume();
+                        }
+                        else if (entry.size > 0) {
+                            this[META] = '';
+                            entry.on('data', c => (this[META] += c));
+                            this[STATE] = 'meta';
+                        }
+                    }
+                    else {
+                        this[EX] = undefined;
+                        entry.ignore =
+                            entry.ignore || !this.filter(entry.path, entry);
+                        if (entry.ignore) {
+                            // probably valid, just not something we care about
+                            this[EMIT]('ignoredEntry', entry);
+                            this[STATE] = entry.remain ? 'ignore' : 'header';
+                            entry.resume();
+                        }
+                        else {
+                            if (entry.remain) {
+                                this[STATE] = 'body';
+                            }
+                            else {
+                                this[STATE] = 'header';
+                                entry.end();
+                            }
+                            if (!this[READENTRY]) {
+                                this[QUEUE].push(entry);
+                                this[NEXTENTRY]();
+                            }
+                            else {
+                                this[QUEUE].push(entry);
+                            }
+                        }
+                    }
+                }
+            }
+        }
+    }
+    [CLOSESTREAM]() {
+        queueMicrotask(() => this.emit('close'));
+    }
+    [PROCESSENTRY](entry) {
+        let go = true;
+        if (!entry) {
+            this[READENTRY] = undefined;
+            go = false;
+        }
+        else if (Array.isArray(entry)) {
+            const [ev, ...args] = entry;
+            this.emit(ev, ...args);
+        }
+        else {
+            this[READENTRY] = entry;
+            this.emit('entry', entry);
+            if (!entry.emittedEnd) {
+                entry.on('end', () => this[NEXTENTRY]());
+                go = false;
+            }
+        }
+        return go;
+    }
+    [NEXTENTRY]() {
+        do { } while (this[PROCESSENTRY](this[QUEUE].shift()));
+        if (!this[QUEUE].length) {
+            // At this point, there's nothing in the queue, but we may have an
+            // entry which is being consumed (readEntry).
+            // If we don't, then we definitely can handle more data.
+            // If we do, and either it's flowing, or it has never had any data
+            // written to it, then it needs more.
+            // The only other possibility is that it has returned false from a
+            // write() call, so we wait for the next drain to continue.
+            const re = this[READENTRY];
+            const drainNow = !re || re.flowing || re.size === re.remain;
+            if (drainNow) {
+                if (!this[WRITING]) {
+                    this.emit('drain');
+                }
+            }
+            else {
+                re.once('drain', () => this.emit('drain'));
+            }
+        }
+    }
+    [CONSUMEBODY](chunk, position) {
+        // write up to but no  more than writeEntry.blockRemain
+        const entry = this[WRITEENTRY];
+        /* c8 ignore start */
+        if (!entry) {
+            throw new Error('attempt to consume body without entry??');
+        }
+        const br = entry.blockRemain ?? 0;
+        /* c8 ignore stop */
+        const c = br >= chunk.length && position === 0 ?
+            chunk
+            : chunk.subarray(position, position + br);
+        entry.write(c);
+        if (!entry.blockRemain) {
+            this[STATE] = 'header';
+            this[WRITEENTRY] = undefined;
+            entry.end();
+        }
+        return c.length;
+    }
+    [CONSUMEMETA](chunk, position) {
+        const entry = this[WRITEENTRY];
+        const ret = this[CONSUMEBODY](chunk, position);
+        // if we finished, then the entry is reset
+        if (!this[WRITEENTRY] && entry) {
+            this[EMITMETA](entry);
+        }
+        return ret;
+    }
+    [EMIT](ev, data, extra) {
+        if (!this[QUEUE].length && !this[READENTRY]) {
+            this.emit(ev, data, extra);
+        }
+        else {
+            this[QUEUE].push([ev, data, extra]);
+        }
+    }
+    [EMITMETA](entry) {
+        this[EMIT]('meta', this[META]);
+        switch (entry.type) {
+            case 'ExtendedHeader':
+            case 'OldExtendedHeader':
+                this[EX] = Pax.parse(this[META], this[EX], false);
+                break;
+            case 'GlobalExtendedHeader':
+                this[GEX] = Pax.parse(this[META], this[GEX], true);
+                break;
+            case 'NextFileHasLongPath':
+            case 'OldGnuLongPath': {
+                const ex = this[EX] ?? Object.create(null);
+                this[EX] = ex;
+                ex.path = this[META].replace(/\0.*/, '');
+                break;
+            }
+            case 'NextFileHasLongLinkpath': {
+                const ex = this[EX] || Object.create(null);
+                this[EX] = ex;
+                ex.linkpath = this[META].replace(/\0.*/, '');
+                break;
+            }
+            /* c8 ignore start */
+            default:
+                throw new Error('unknown meta: ' + entry.type);
+            /* c8 ignore stop */
+        }
+    }
+    abort(error) {
+        this[ABORTED] = true;
+        this.emit('abort', error);
+        // always throws, even in non-strict mode
+        this.warn('TAR_ABORT', error, { recoverable: false });
+    }
+    write(chunk, encoding, cb) {
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = undefined;
+        }
+        if (typeof chunk === 'string') {
+            chunk = Buffer.from(chunk, 
+            /* c8 ignore next */
+            typeof encoding === 'string' ? encoding : 'utf8');
+        }
+        if (this[ABORTED]) {
+            /* c8 ignore next */
+            cb?.();
+            return false;
+        }
+        // first write, might be gzipped
+        const needSniff = this[UNZIP] === undefined ||
+            (this.brotli === undefined && this[UNZIP] === false);
+        if (needSniff && chunk) {
+            if (this[BUFFER]) {
+                chunk = Buffer.concat([this[BUFFER], chunk]);
+                this[BUFFER] = undefined;
+            }
+            if (chunk.length < gzipHeader.length) {
+                this[BUFFER] = chunk;
+                /* c8 ignore next */
+                cb?.();
+                return true;
+            }
+            // look for gzip header
+            for (let i = 0; this[UNZIP] === undefined && i < gzipHeader.length; i++) {
+                if (chunk[i] !== gzipHeader[i]) {
+                    this[UNZIP] = false;
+                }
+            }
+            const maybeBrotli = this.brotli === undefined;
+            if (this[UNZIP] === false && maybeBrotli) {
+                // read the first header to see if it's a valid tar file. If so,
+                // we can safely assume that it's not actually brotli, despite the
+                // .tbr or .tar.br file extension.
+                // if we ended before getting a full chunk, yes, def brotli
+                if (chunk.length < 512) {
+                    if (this[ENDED]) {
+                        this.brotli = true;
+                    }
+                    else {
+                        this[BUFFER] = chunk;
+                        /* c8 ignore next */
+                        cb?.();
+                        return true;
+                    }
+                }
+                else {
+                    // if it's tar, it's pretty reliably not brotli, chances of
+                    // that happening are astronomical.
+                    try {
+                        new Header(chunk.subarray(0, 512));
+                        this.brotli = false;
+                    }
+                    catch (_) {
+                        this.brotli = true;
+                    }
+                }
+            }
+            if (this[UNZIP] === undefined ||
+                (this[UNZIP] === false && this.brotli)) {
+                const ended = this[ENDED];
+                this[ENDED] = false;
+                this[UNZIP] =
+                    this[UNZIP] === undefined ?
+                        new Unzip({})
+                        : new BrotliDecompress({});
+                this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk));
+                this[UNZIP].on('error', er => this.abort(er));
+                this[UNZIP].on('end', () => {
+                    this[ENDED] = true;
+                    this[CONSUMECHUNK]();
+                });
+                this[WRITING] = true;
+                const ret = !!this[UNZIP][ended ? 'end' : 'write'](chunk);
+                this[WRITING] = false;
+                cb?.();
+                return ret;
+            }
+        }
+        this[WRITING] = true;
+        if (this[UNZIP]) {
+            this[UNZIP].write(chunk);
+        }
+        else {
+            this[CONSUMECHUNK](chunk);
+        }
+        this[WRITING] = false;
+        // return false if there's a queue, or if the current entry isn't flowing
+        const ret = this[QUEUE].length ? false
+            : this[READENTRY] ? this[READENTRY].flowing
+                : true;
+        // if we have no queue, then that means a clogged READENTRY
+        if (!ret && !this[QUEUE].length) {
+            this[READENTRY]?.once('drain', () => this.emit('drain'));
+        }
+        /* c8 ignore next */
+        cb?.();
+        return ret;
+    }
+    [BUFFERCONCAT](c) {
+        if (c && !this[ABORTED]) {
+            this[BUFFER] =
+                this[BUFFER] ? Buffer.concat([this[BUFFER], c]) : c;
+        }
+    }
+    [MAYBEEND]() {
+        if (this[ENDED] &&
+            !this[EMITTEDEND] &&
+            !this[ABORTED] &&
+            !this[CONSUMING]) {
+            this[EMITTEDEND] = true;
+            const entry = this[WRITEENTRY];
+            if (entry && entry.blockRemain) {
+                // truncated, likely a damaged file
+                const have = this[BUFFER] ? this[BUFFER].length : 0;
+                this.warn('TAR_BAD_ARCHIVE', `Truncated input (needed ${entry.blockRemain} more bytes, only ${have} available)`, { entry });
+                if (this[BUFFER]) {
+                    entry.write(this[BUFFER]);
+                }
+                entry.end();
+            }
+            this[EMIT](DONE);
+        }
+    }
+    [CONSUMECHUNK](chunk) {
+        if (this[CONSUMING] && chunk) {
+            this[BUFFERCONCAT](chunk);
+        }
+        else if (!chunk && !this[BUFFER]) {
+            this[MAYBEEND]();
+        }
+        else if (chunk) {
+            this[CONSUMING] = true;
+            if (this[BUFFER]) {
+                this[BUFFERCONCAT](chunk);
+                const c = this[BUFFER];
+                this[BUFFER] = undefined;
+                this[CONSUMECHUNKSUB](c);
+            }
+            else {
+                this[CONSUMECHUNKSUB](chunk);
+            }
+            while (this[BUFFER] &&
+                this[BUFFER]?.length >= 512 &&
+                !this[ABORTED] &&
+                !this[SAW_EOF]) {
+                const c = this[BUFFER];
+                this[BUFFER] = undefined;
+                this[CONSUMECHUNKSUB](c);
+            }
+            this[CONSUMING] = false;
+        }
+        if (!this[BUFFER] || this[ENDED]) {
+            this[MAYBEEND]();
+        }
+    }
+    [CONSUMECHUNKSUB](chunk) {
+        // we know that we are in CONSUMING mode, so anything written goes into
+        // the buffer.  Advance the position and put any remainder in the buffer.
+        let position = 0;
+        const length = chunk.length;
+        while (position + 512 <= length &&
+            !this[ABORTED] &&
+            !this[SAW_EOF]) {
+            switch (this[STATE]) {
+                case 'begin':
+                case 'header':
+                    this[CONSUMEHEADER](chunk, position);
+                    position += 512;
+                    break;
+                case 'ignore':
+                case 'body':
+                    position += this[CONSUMEBODY](chunk, position);
+                    break;
+                case 'meta':
+                    position += this[CONSUMEMETA](chunk, position);
+                    break;
+                /* c8 ignore start */
+                default:
+                    throw new Error('invalid state: ' + this[STATE]);
+                /* c8 ignore stop */
+            }
+        }
+        if (position < length) {
+            if (this[BUFFER]) {
+                this[BUFFER] = Buffer.concat([
+                    chunk.subarray(position),
+                    this[BUFFER],
+                ]);
+            }
+            else {
+                this[BUFFER] = chunk.subarray(position);
+            }
+        }
+    }
+    end(chunk, encoding, cb) {
+        if (typeof chunk === 'function') {
+            cb = chunk;
+            encoding = undefined;
+            chunk = undefined;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = undefined;
+        }
+        if (typeof chunk === 'string') {
+            chunk = Buffer.from(chunk, encoding);
+        }
+        if (cb)
+            this.once('finish', cb);
+        if (!this[ABORTED]) {
+            if (this[UNZIP]) {
+                /* c8 ignore start */
+                if (chunk)
+                    this[UNZIP].write(chunk);
+                /* c8 ignore stop */
+                this[UNZIP].end();
+            }
+            else {
+                this[ENDED] = true;
+                if (this.brotli === undefined)
+                    chunk = chunk || Buffer.alloc(0);
+                if (chunk)
+                    this.write(chunk);
+                this[MAYBEEND]();
+            }
+        }
+        return this;
+    }
+}
+//# sourceMappingURL=parse.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/path-reservations.js b/node_modules/pacote/node_modules/tar/dist/esm/path-reservations.js
new file mode 100644
index 0000000000000..e63b9c91e9a80
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/esm/path-reservations.js
@@ -0,0 +1,166 @@
+// A path exclusive reservation system
+// reserve([list, of, paths], fn)
+// When the fn is first in line for all its paths, it
+// is called with a cb that clears the reservation.
+//
+// Used by async unpack to avoid clobbering paths in use,
+// while still allowing maximal safe parallelization.
+import { join } from 'node:path';
+import { normalizeUnicode } from './normalize-unicode.js';
+import { stripTrailingSlashes } from './strip-trailing-slashes.js';
+const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform;
+const isWindows = platform === 'win32';
+// return a set of parent dirs for a given path
+// '/a/b/c/d' -> ['/', '/a', '/a/b', '/a/b/c', '/a/b/c/d']
+const getDirs = (path) => {
+    const dirs = path
+        .split('/')
+        .slice(0, -1)
+        .reduce((set, path) => {
+        const s = set[set.length - 1];
+        if (s !== undefined) {
+            path = join(s, path);
+        }
+        set.push(path || '/');
+        return set;
+    }, []);
+    return dirs;
+};
+export class PathReservations {
+    // path => [function or Set]
+    // A Set object means a directory reservation
+    // A fn is a direct reservation on that path
+    #queues = new Map();
+    // fn => {paths:[path,...], dirs:[path, ...]}
+    #reservations = new Map();
+    // functions currently running
+    #running = new Set();
+    reserve(paths, fn) {
+        paths =
+            isWindows ?
+                ['win32 parallelization disabled']
+                : paths.map(p => {
+                    // don't need normPath, because we skip this entirely for windows
+                    return stripTrailingSlashes(join(normalizeUnicode(p))).toLowerCase();
+                });
+        const dirs = new Set(paths.map(path => getDirs(path)).reduce((a, b) => a.concat(b)));
+        this.#reservations.set(fn, { dirs, paths });
+        for (const p of paths) {
+            const q = this.#queues.get(p);
+            if (!q) {
+                this.#queues.set(p, [fn]);
+            }
+            else {
+                q.push(fn);
+            }
+        }
+        for (const dir of dirs) {
+            const q = this.#queues.get(dir);
+            if (!q) {
+                this.#queues.set(dir, [new Set([fn])]);
+            }
+            else {
+                const l = q[q.length - 1];
+                if (l instanceof Set) {
+                    l.add(fn);
+                }
+                else {
+                    q.push(new Set([fn]));
+                }
+            }
+        }
+        return this.#run(fn);
+    }
+    // return the queues for each path the function cares about
+    // fn => {paths, dirs}
+    #getQueues(fn) {
+        const res = this.#reservations.get(fn);
+        /* c8 ignore start */
+        if (!res) {
+            throw new Error('function does not have any path reservations');
+        }
+        /* c8 ignore stop */
+        return {
+            paths: res.paths.map((path) => this.#queues.get(path)),
+            dirs: [...res.dirs].map(path => this.#queues.get(path)),
+        };
+    }
+    // check if fn is first in line for all its paths, and is
+    // included in the first set for all its dir queues
+    check(fn) {
+        const { paths, dirs } = this.#getQueues(fn);
+        return (paths.every(q => q && q[0] === fn) &&
+            dirs.every(q => q && q[0] instanceof Set && q[0].has(fn)));
+    }
+    // run the function if it's first in line and not already running
+    #run(fn) {
+        if (this.#running.has(fn) || !this.check(fn)) {
+            return false;
+        }
+        this.#running.add(fn);
+        fn(() => this.#clear(fn));
+        return true;
+    }
+    #clear(fn) {
+        if (!this.#running.has(fn)) {
+            return false;
+        }
+        const res = this.#reservations.get(fn);
+        /* c8 ignore start */
+        if (!res) {
+            throw new Error('invalid reservation');
+        }
+        /* c8 ignore stop */
+        const { paths, dirs } = res;
+        const next = new Set();
+        for (const path of paths) {
+            const q = this.#queues.get(path);
+            /* c8 ignore start */
+            if (!q || q?.[0] !== fn) {
+                continue;
+            }
+            /* c8 ignore stop */
+            const q0 = q[1];
+            if (!q0) {
+                this.#queues.delete(path);
+                continue;
+            }
+            q.shift();
+            if (typeof q0 === 'function') {
+                next.add(q0);
+            }
+            else {
+                for (const f of q0) {
+                    next.add(f);
+                }
+            }
+        }
+        for (const dir of dirs) {
+            const q = this.#queues.get(dir);
+            const q0 = q?.[0];
+            /* c8 ignore next - type safety only */
+            if (!q || !(q0 instanceof Set))
+                continue;
+            if (q0.size === 1 && q.length === 1) {
+                this.#queues.delete(dir);
+                continue;
+            }
+            else if (q0.size === 1) {
+                q.shift();
+                // next one must be a function,
+                // or else the Set would've been reused
+                const n = q[0];
+                if (typeof n === 'function') {
+                    next.add(n);
+                }
+            }
+            else {
+                q0.delete(fn);
+            }
+        }
+        this.#running.delete(fn);
+        next.forEach(fn => this.#run(fn));
+        return true;
+    }
+}
+//# sourceMappingURL=path-reservations.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/pax.js b/node_modules/pacote/node_modules/tar/dist/esm/pax.js
new file mode 100644
index 0000000000000..832808f344da5
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/esm/pax.js
@@ -0,0 +1,154 @@
+import { basename } from 'node:path';
+import { Header } from './header.js';
+export class Pax {
+    atime;
+    mtime;
+    ctime;
+    charset;
+    comment;
+    gid;
+    uid;
+    gname;
+    uname;
+    linkpath;
+    dev;
+    ino;
+    nlink;
+    path;
+    size;
+    mode;
+    global;
+    constructor(obj, global = false) {
+        this.atime = obj.atime;
+        this.charset = obj.charset;
+        this.comment = obj.comment;
+        this.ctime = obj.ctime;
+        this.dev = obj.dev;
+        this.gid = obj.gid;
+        this.global = global;
+        this.gname = obj.gname;
+        this.ino = obj.ino;
+        this.linkpath = obj.linkpath;
+        this.mtime = obj.mtime;
+        this.nlink = obj.nlink;
+        this.path = obj.path;
+        this.size = obj.size;
+        this.uid = obj.uid;
+        this.uname = obj.uname;
+    }
+    encode() {
+        const body = this.encodeBody();
+        if (body === '') {
+            return Buffer.allocUnsafe(0);
+        }
+        const bodyLen = Buffer.byteLength(body);
+        // round up to 512 bytes
+        // add 512 for header
+        const bufLen = 512 * Math.ceil(1 + bodyLen / 512);
+        const buf = Buffer.allocUnsafe(bufLen);
+        // 0-fill the header section, it might not hit every field
+        for (let i = 0; i < 512; i++) {
+            buf[i] = 0;
+        }
+        new Header({
+            // XXX split the path
+            // then the path should be PaxHeader + basename, but less than 99,
+            // prepend with the dirname
+            /* c8 ignore start */
+            path: ('PaxHeader/' + basename(this.path ?? '')).slice(0, 99),
+            /* c8 ignore stop */
+            mode: this.mode || 0o644,
+            uid: this.uid,
+            gid: this.gid,
+            size: bodyLen,
+            mtime: this.mtime,
+            type: this.global ? 'GlobalExtendedHeader' : 'ExtendedHeader',
+            linkpath: '',
+            uname: this.uname || '',
+            gname: this.gname || '',
+            devmaj: 0,
+            devmin: 0,
+            atime: this.atime,
+            ctime: this.ctime,
+        }).encode(buf);
+        buf.write(body, 512, bodyLen, 'utf8');
+        // null pad after the body
+        for (let i = bodyLen + 512; i < buf.length; i++) {
+            buf[i] = 0;
+        }
+        return buf;
+    }
+    encodeBody() {
+        return (this.encodeField('path') +
+            this.encodeField('ctime') +
+            this.encodeField('atime') +
+            this.encodeField('dev') +
+            this.encodeField('ino') +
+            this.encodeField('nlink') +
+            this.encodeField('charset') +
+            this.encodeField('comment') +
+            this.encodeField('gid') +
+            this.encodeField('gname') +
+            this.encodeField('linkpath') +
+            this.encodeField('mtime') +
+            this.encodeField('size') +
+            this.encodeField('uid') +
+            this.encodeField('uname'));
+    }
+    encodeField(field) {
+        if (this[field] === undefined) {
+            return '';
+        }
+        const r = this[field];
+        const v = r instanceof Date ? r.getTime() / 1000 : r;
+        const s = ' ' +
+            (field === 'dev' || field === 'ino' || field === 'nlink' ?
+                'SCHILY.'
+                : '') +
+            field +
+            '=' +
+            v +
+            '\n';
+        const byteLen = Buffer.byteLength(s);
+        // the digits includes the length of the digits in ascii base-10
+        // so if it's 9 characters, then adding 1 for the 9 makes it 10
+        // which makes it 11 chars.
+        let digits = Math.floor(Math.log(byteLen) / Math.log(10)) + 1;
+        if (byteLen + digits >= Math.pow(10, digits)) {
+            digits += 1;
+        }
+        const len = digits + byteLen;
+        return len + s;
+    }
+    static parse(str, ex, g = false) {
+        return new Pax(merge(parseKV(str), ex), g);
+    }
+}
+const merge = (a, b) => b ? Object.assign({}, b, a) : a;
+const parseKV = (str) => str
+    .replace(/\n$/, '')
+    .split('\n')
+    .reduce(parseKVLine, Object.create(null));
+const parseKVLine = (set, line) => {
+    const n = parseInt(line, 10);
+    // XXX Values with \n in them will fail this.
+    // Refactor to not be a naive line-by-line parse.
+    if (n !== Buffer.byteLength(line) + 1) {
+        return set;
+    }
+    line = line.slice((n + ' ').length);
+    const kv = line.split('=');
+    const r = kv.shift();
+    if (!r) {
+        return set;
+    }
+    const k = r.replace(/^SCHILY\.(dev|ino|nlink)/, '$1');
+    const v = kv.join('=');
+    set[k] =
+        /^([A-Z]+\.)?([mac]|birth|creation)time$/.test(k) ?
+            new Date(Number(v) * 1000)
+            : /^[0-9]+$/.test(v) ? +v
+                : v;
+    return set;
+};
+//# sourceMappingURL=pax.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/read-entry.js b/node_modules/pacote/node_modules/tar/dist/esm/read-entry.js
new file mode 100644
index 0000000000000..23cc673e61087
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/esm/read-entry.js
@@ -0,0 +1,136 @@
+import { Minipass } from 'minipass';
+import { normalizeWindowsPath } from './normalize-windows-path.js';
+export class ReadEntry extends Minipass {
+    extended;
+    globalExtended;
+    header;
+    startBlockSize;
+    blockRemain;
+    remain;
+    type;
+    meta = false;
+    ignore = false;
+    path;
+    mode;
+    uid;
+    gid;
+    uname;
+    gname;
+    size = 0;
+    mtime;
+    atime;
+    ctime;
+    linkpath;
+    dev;
+    ino;
+    nlink;
+    invalid = false;
+    absolute;
+    unsupported = false;
+    constructor(header, ex, gex) {
+        super({});
+        // read entries always start life paused.  this is to avoid the
+        // situation where Minipass's auto-ending empty streams results
+        // in an entry ending before we're ready for it.
+        this.pause();
+        this.extended = ex;
+        this.globalExtended = gex;
+        this.header = header;
+        /* c8 ignore start */
+        this.remain = header.size ?? 0;
+        /* c8 ignore stop */
+        this.startBlockSize = 512 * Math.ceil(this.remain / 512);
+        this.blockRemain = this.startBlockSize;
+        this.type = header.type;
+        switch (this.type) {
+            case 'File':
+            case 'OldFile':
+            case 'Link':
+            case 'SymbolicLink':
+            case 'CharacterDevice':
+            case 'BlockDevice':
+            case 'Directory':
+            case 'FIFO':
+            case 'ContiguousFile':
+            case 'GNUDumpDir':
+                break;
+            case 'NextFileHasLongLinkpath':
+            case 'NextFileHasLongPath':
+            case 'OldGnuLongPath':
+            case 'GlobalExtendedHeader':
+            case 'ExtendedHeader':
+            case 'OldExtendedHeader':
+                this.meta = true;
+                break;
+            // NOTE: gnutar and bsdtar treat unrecognized types as 'File'
+            // it may be worth doing the same, but with a warning.
+            default:
+                this.ignore = true;
+        }
+        /* c8 ignore start */
+        if (!header.path) {
+            throw new Error('no path provided for tar.ReadEntry');
+        }
+        /* c8 ignore stop */
+        this.path = normalizeWindowsPath(header.path);
+        this.mode = header.mode;
+        if (this.mode) {
+            this.mode = this.mode & 0o7777;
+        }
+        this.uid = header.uid;
+        this.gid = header.gid;
+        this.uname = header.uname;
+        this.gname = header.gname;
+        this.size = this.remain;
+        this.mtime = header.mtime;
+        this.atime = header.atime;
+        this.ctime = header.ctime;
+        /* c8 ignore start */
+        this.linkpath =
+            header.linkpath ?
+                normalizeWindowsPath(header.linkpath)
+                : undefined;
+        /* c8 ignore stop */
+        this.uname = header.uname;
+        this.gname = header.gname;
+        if (ex) {
+            this.#slurp(ex);
+        }
+        if (gex) {
+            this.#slurp(gex, true);
+        }
+    }
+    write(data) {
+        const writeLen = data.length;
+        if (writeLen > this.blockRemain) {
+            throw new Error('writing more to entry than is appropriate');
+        }
+        const r = this.remain;
+        const br = this.blockRemain;
+        this.remain = Math.max(0, r - writeLen);
+        this.blockRemain = Math.max(0, br - writeLen);
+        if (this.ignore) {
+            return true;
+        }
+        if (r >= writeLen) {
+            return super.write(data);
+        }
+        // r < writeLen
+        return super.write(data.subarray(0, r));
+    }
+    #slurp(ex, gex = false) {
+        if (ex.path)
+            ex.path = normalizeWindowsPath(ex.path);
+        if (ex.linkpath)
+            ex.linkpath = normalizeWindowsPath(ex.linkpath);
+        Object.assign(this, Object.fromEntries(Object.entries(ex).filter(([k, v]) => {
+            // we slurp in everything except for the path attribute in
+            // a global extended header, because that's weird. Also, any
+            // null/undefined values are ignored.
+            return !(v === null ||
+                v === undefined ||
+                (k === 'path' && gex));
+        })));
+    }
+}
+//# sourceMappingURL=read-entry.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/replace.js b/node_modules/pacote/node_modules/tar/dist/esm/replace.js
new file mode 100644
index 0000000000000..bab622bfdf1f1
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/esm/replace.js
@@ -0,0 +1,225 @@
+// tar -r
+import { WriteStream, WriteStreamSync } from '@isaacs/fs-minipass';
+import fs from 'node:fs';
+import path from 'node:path';
+import { Header } from './header.js';
+import { list } from './list.js';
+import { makeCommand } from './make-command.js';
+import { isFile, } from './options.js';
+import { Pack, PackSync } from './pack.js';
+// starting at the head of the file, read a Header
+// If the checksum is invalid, that's our position to start writing
+// If it is, jump forward by the specified size (round up to 512)
+// and try again.
+// Write the new Pack stream starting there.
+const replaceSync = (opt, files) => {
+    const p = new PackSync(opt);
+    let threw = true;
+    let fd;
+    let position;
+    try {
+        try {
+            fd = fs.openSync(opt.file, 'r+');
+        }
+        catch (er) {
+            if (er?.code === 'ENOENT') {
+                fd = fs.openSync(opt.file, 'w+');
+            }
+            else {
+                throw er;
+            }
+        }
+        const st = fs.fstatSync(fd);
+        const headBuf = Buffer.alloc(512);
+        POSITION: for (position = 0; position < st.size; position += 512) {
+            for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) {
+                bytes = fs.readSync(fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos);
+                if (position === 0 &&
+                    headBuf[0] === 0x1f &&
+                    headBuf[1] === 0x8b) {
+                    throw new Error('cannot append to compressed archives');
+                }
+                if (!bytes) {
+                    break POSITION;
+                }
+            }
+            const h = new Header(headBuf);
+            if (!h.cksumValid) {
+                break;
+            }
+            const entryBlockSize = 512 * Math.ceil((h.size || 0) / 512);
+            if (position + entryBlockSize + 512 > st.size) {
+                break;
+            }
+            // the 512 for the header we just parsed will be added as well
+            // also jump ahead all the blocks for the body
+            position += entryBlockSize;
+            if (opt.mtimeCache && h.mtime) {
+                opt.mtimeCache.set(String(h.path), h.mtime);
+            }
+        }
+        threw = false;
+        streamSync(opt, p, position, fd, files);
+    }
+    finally {
+        if (threw) {
+            try {
+                fs.closeSync(fd);
+            }
+            catch (er) { }
+        }
+    }
+};
+const streamSync = (opt, p, position, fd, files) => {
+    const stream = new WriteStreamSync(opt.file, {
+        fd: fd,
+        start: position,
+    });
+    p.pipe(stream);
+    addFilesSync(p, files);
+};
+const replaceAsync = (opt, files) => {
+    files = Array.from(files);
+    const p = new Pack(opt);
+    const getPos = (fd, size, cb_) => {
+        const cb = (er, pos) => {
+            if (er) {
+                fs.close(fd, _ => cb_(er));
+            }
+            else {
+                cb_(null, pos);
+            }
+        };
+        let position = 0;
+        if (size === 0) {
+            return cb(null, 0);
+        }
+        let bufPos = 0;
+        const headBuf = Buffer.alloc(512);
+        const onread = (er, bytes) => {
+            if (er || typeof bytes === 'undefined') {
+                return cb(er);
+            }
+            bufPos += bytes;
+            if (bufPos < 512 && bytes) {
+                return fs.read(fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos, onread);
+            }
+            if (position === 0 &&
+                headBuf[0] === 0x1f &&
+                headBuf[1] === 0x8b) {
+                return cb(new Error('cannot append to compressed archives'));
+            }
+            // truncated header
+            if (bufPos < 512) {
+                return cb(null, position);
+            }
+            const h = new Header(headBuf);
+            if (!h.cksumValid) {
+                return cb(null, position);
+            }
+            /* c8 ignore next */
+            const entryBlockSize = 512 * Math.ceil((h.size ?? 0) / 512);
+            if (position + entryBlockSize + 512 > size) {
+                return cb(null, position);
+            }
+            position += entryBlockSize + 512;
+            if (position >= size) {
+                return cb(null, position);
+            }
+            if (opt.mtimeCache && h.mtime) {
+                opt.mtimeCache.set(String(h.path), h.mtime);
+            }
+            bufPos = 0;
+            fs.read(fd, headBuf, 0, 512, position, onread);
+        };
+        fs.read(fd, headBuf, 0, 512, position, onread);
+    };
+    const promise = new Promise((resolve, reject) => {
+        p.on('error', reject);
+        let flag = 'r+';
+        const onopen = (er, fd) => {
+            if (er && er.code === 'ENOENT' && flag === 'r+') {
+                flag = 'w+';
+                return fs.open(opt.file, flag, onopen);
+            }
+            if (er || !fd) {
+                return reject(er);
+            }
+            fs.fstat(fd, (er, st) => {
+                if (er) {
+                    return fs.close(fd, () => reject(er));
+                }
+                getPos(fd, st.size, (er, position) => {
+                    if (er) {
+                        return reject(er);
+                    }
+                    const stream = new WriteStream(opt.file, {
+                        fd: fd,
+                        start: position,
+                    });
+                    p.pipe(stream);
+                    stream.on('error', reject);
+                    stream.on('close', resolve);
+                    addFilesAsync(p, files);
+                });
+            });
+        };
+        fs.open(opt.file, flag, onopen);
+    });
+    return promise;
+};
+const addFilesSync = (p, files) => {
+    files.forEach(file => {
+        if (file.charAt(0) === '@') {
+            list({
+                file: path.resolve(p.cwd, file.slice(1)),
+                sync: true,
+                noResume: true,
+                onReadEntry: entry => p.add(entry),
+            });
+        }
+        else {
+            p.add(file);
+        }
+    });
+    p.end();
+};
+const addFilesAsync = async (p, files) => {
+    for (let i = 0; i < files.length; i++) {
+        const file = String(files[i]);
+        if (file.charAt(0) === '@') {
+            await list({
+                file: path.resolve(String(p.cwd), file.slice(1)),
+                noResume: true,
+                onReadEntry: entry => p.add(entry),
+            });
+        }
+        else {
+            p.add(file);
+        }
+    }
+    p.end();
+};
+export const replace = makeCommand(replaceSync, replaceAsync, 
+/* c8 ignore start */
+() => {
+    throw new TypeError('file is required');
+}, () => {
+    throw new TypeError('file is required');
+}, 
+/* c8 ignore stop */
+(opt, entries) => {
+    if (!isFile(opt)) {
+        throw new TypeError('file is required');
+    }
+    if (opt.gzip ||
+        opt.brotli ||
+        opt.file.endsWith('.br') ||
+        opt.file.endsWith('.tbr')) {
+        throw new TypeError('cannot append to compressed archives');
+    }
+    if (!entries?.length) {
+        throw new TypeError('no paths specified to add/replace');
+    }
+});
+//# sourceMappingURL=replace.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/strip-absolute-path.js b/node_modules/pacote/node_modules/tar/dist/esm/strip-absolute-path.js
new file mode 100644
index 0000000000000..cce5ff80b00db
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/esm/strip-absolute-path.js
@@ -0,0 +1,25 @@
+// unix absolute paths are also absolute on win32, so we use this for both
+import { win32 } from 'node:path';
+const { isAbsolute, parse } = win32;
+// returns [root, stripped]
+// Note that windows will think that //x/y/z/a has a "root" of //x/y, and in
+// those cases, we want to sanitize it to x/y/z/a, not z/a, so we strip /
+// explicitly if it's the first character.
+// drive-specific relative paths on Windows get their root stripped off even
+// though they are not absolute, so `c:../foo` becomes ['c:', '../foo']
+export const stripAbsolutePath = (path) => {
+    let r = '';
+    let parsed = parse(path);
+    while (isAbsolute(path) || parsed.root) {
+        // windows will think that //x/y/z has a "root" of //x/y/
+        // but strip the //?/C:/ off of //?/C:/path
+        const root = path.charAt(0) === '/' && path.slice(0, 4) !== '//?/' ?
+            '/'
+            : parsed.root;
+        path = path.slice(root.length);
+        r += root;
+        parsed = parse(path);
+    }
+    return [r, path];
+};
+//# sourceMappingURL=strip-absolute-path.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/strip-trailing-slashes.js b/node_modules/pacote/node_modules/tar/dist/esm/strip-trailing-slashes.js
new file mode 100644
index 0000000000000..ace4218a7547b
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/esm/strip-trailing-slashes.js
@@ -0,0 +1,14 @@
+// warning: extremely hot code path.
+// This has been meticulously optimized for use
+// within npm install on large package trees.
+// Do not edit without careful benchmarking.
+export const stripTrailingSlashes = (str) => {
+    let i = str.length - 1;
+    let slashesStart = -1;
+    while (i > -1 && str.charAt(i) === '/') {
+        slashesStart = i;
+        i--;
+    }
+    return slashesStart === -1 ? str : str.slice(0, slashesStart);
+};
+//# sourceMappingURL=strip-trailing-slashes.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/symlink-error.js b/node_modules/pacote/node_modules/tar/dist/esm/symlink-error.js
new file mode 100644
index 0000000000000..d31766e2e0afa
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/esm/symlink-error.js
@@ -0,0 +1,15 @@
+export class SymlinkError extends Error {
+    path;
+    symlink;
+    syscall = 'symlink';
+    code = 'TAR_SYMLINK_ERROR';
+    constructor(symlink, path) {
+        super('TAR_SYMLINK_ERROR: Cannot extract through symbolic link');
+        this.symlink = symlink;
+        this.path = path;
+    }
+    get name() {
+        return 'SymlinkError';
+    }
+}
+//# sourceMappingURL=symlink-error.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/types.js b/node_modules/pacote/node_modules/tar/dist/esm/types.js
new file mode 100644
index 0000000000000..27b982ae1e092
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/esm/types.js
@@ -0,0 +1,45 @@
+export const isCode = (c) => name.has(c);
+export const isName = (c) => code.has(c);
+// map types from key to human-friendly name
+export const name = new Map([
+    ['0', 'File'],
+    // same as File
+    ['', 'OldFile'],
+    ['1', 'Link'],
+    ['2', 'SymbolicLink'],
+    // Devices and FIFOs aren't fully supported
+    // they are parsed, but skipped when unpacking
+    ['3', 'CharacterDevice'],
+    ['4', 'BlockDevice'],
+    ['5', 'Directory'],
+    ['6', 'FIFO'],
+    // same as File
+    ['7', 'ContiguousFile'],
+    // pax headers
+    ['g', 'GlobalExtendedHeader'],
+    ['x', 'ExtendedHeader'],
+    // vendor-specific stuff
+    // skip
+    ['A', 'SolarisACL'],
+    // like 5, but with data, which should be skipped
+    ['D', 'GNUDumpDir'],
+    // metadata only, skip
+    ['I', 'Inode'],
+    // data = link path of next file
+    ['K', 'NextFileHasLongLinkpath'],
+    // data = path of next file
+    ['L', 'NextFileHasLongPath'],
+    // skip
+    ['M', 'ContinuationFile'],
+    // like L
+    ['N', 'OldGnuLongPath'],
+    // skip
+    ['S', 'SparseFile'],
+    // skip
+    ['V', 'TapeVolumeHeader'],
+    // like x
+    ['X', 'OldExtendedHeader'],
+]);
+// map the other direction
+export const code = new Map(Array.from(name).map(kv => [kv[1], kv[0]]));
+//# sourceMappingURL=types.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/unpack.js b/node_modules/pacote/node_modules/tar/dist/esm/unpack.js
new file mode 100644
index 0000000000000..6e744cfc1a6f9
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/esm/unpack.js
@@ -0,0 +1,888 @@
+// the PEND/UNPEND stuff tracks whether we're ready to emit end/close yet.
+// but the path reservations are required to avoid race conditions where
+// parallelized unpack ops may mess with one another, due to dependencies
+// (like a Link depending on its target) or destructive operations (like
+// clobbering an fs object to create one of a different type.)
+import * as fsm from '@isaacs/fs-minipass';
+import assert from 'node:assert';
+import { randomBytes } from 'node:crypto';
+import fs from 'node:fs';
+import path from 'node:path';
+import { getWriteFlag } from './get-write-flag.js';
+import { mkdir, mkdirSync } from './mkdir.js';
+import { normalizeUnicode } from './normalize-unicode.js';
+import { normalizeWindowsPath } from './normalize-windows-path.js';
+import { Parser } from './parse.js';
+import { stripAbsolutePath } from './strip-absolute-path.js';
+import { stripTrailingSlashes } from './strip-trailing-slashes.js';
+import * as wc from './winchars.js';
+import { PathReservations } from './path-reservations.js';
+const ONENTRY = Symbol('onEntry');
+const CHECKFS = Symbol('checkFs');
+const CHECKFS2 = Symbol('checkFs2');
+const PRUNECACHE = Symbol('pruneCache');
+const ISREUSABLE = Symbol('isReusable');
+const MAKEFS = Symbol('makeFs');
+const FILE = Symbol('file');
+const DIRECTORY = Symbol('directory');
+const LINK = Symbol('link');
+const SYMLINK = Symbol('symlink');
+const HARDLINK = Symbol('hardlink');
+const UNSUPPORTED = Symbol('unsupported');
+const CHECKPATH = Symbol('checkPath');
+const MKDIR = Symbol('mkdir');
+const ONERROR = Symbol('onError');
+const PENDING = Symbol('pending');
+const PEND = Symbol('pend');
+const UNPEND = Symbol('unpend');
+const ENDED = Symbol('ended');
+const MAYBECLOSE = Symbol('maybeClose');
+const SKIP = Symbol('skip');
+const DOCHOWN = Symbol('doChown');
+const UID = Symbol('uid');
+const GID = Symbol('gid');
+const CHECKED_CWD = Symbol('checkedCwd');
+const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform;
+const isWindows = platform === 'win32';
+const DEFAULT_MAX_DEPTH = 1024;
+// Unlinks on Windows are not atomic.
+//
+// This means that if you have a file entry, followed by another
+// file entry with an identical name, and you cannot re-use the file
+// (because it's a hardlink, or because unlink:true is set, or it's
+// Windows, which does not have useful nlink values), then the unlink
+// will be committed to the disk AFTER the new file has been written
+// over the old one, deleting the new file.
+//
+// To work around this, on Windows systems, we rename the file and then
+// delete the renamed file.  It's a sloppy kludge, but frankly, I do not
+// know of a better way to do this, given windows' non-atomic unlink
+// semantics.
+//
+// See: https://github.com/npm/node-tar/issues/183
+/* c8 ignore start */
+const unlinkFile = (path, cb) => {
+    if (!isWindows) {
+        return fs.unlink(path, cb);
+    }
+    const name = path + '.DELETE.' + randomBytes(16).toString('hex');
+    fs.rename(path, name, er => {
+        if (er) {
+            return cb(er);
+        }
+        fs.unlink(name, cb);
+    });
+};
+/* c8 ignore stop */
+/* c8 ignore start */
+const unlinkFileSync = (path) => {
+    if (!isWindows) {
+        return fs.unlinkSync(path);
+    }
+    const name = path + '.DELETE.' + randomBytes(16).toString('hex');
+    fs.renameSync(path, name);
+    fs.unlinkSync(name);
+};
+/* c8 ignore stop */
+// this.gid, entry.gid, this.processUid
+const uint32 = (a, b, c) => a !== undefined && a === a >>> 0 ? a
+    : b !== undefined && b === b >>> 0 ? b
+        : c;
+// clear the cache if it's a case-insensitive unicode-squashing match.
+// we can't know if the current file system is case-sensitive or supports
+// unicode fully, so we check for similarity on the maximally compatible
+// representation.  Err on the side of pruning, since all it's doing is
+// preventing lstats, and it's not the end of the world if we get a false
+// positive.
+// Note that on windows, we always drop the entire cache whenever a
+// symbolic link is encountered, because 8.3 filenames are impossible
+// to reason about, and collisions are hazards rather than just failures.
+const cacheKeyNormalize = (path) => stripTrailingSlashes(normalizeWindowsPath(normalizeUnicode(path))).toLowerCase();
+// remove all cache entries matching ${abs}/**
+const pruneCache = (cache, abs) => {
+    abs = cacheKeyNormalize(abs);
+    for (const path of cache.keys()) {
+        const pnorm = cacheKeyNormalize(path);
+        if (pnorm === abs || pnorm.indexOf(abs + '/') === 0) {
+            cache.delete(path);
+        }
+    }
+};
+const dropCache = (cache) => {
+    for (const key of cache.keys()) {
+        cache.delete(key);
+    }
+};
+export class Unpack extends Parser {
+    [ENDED] = false;
+    [CHECKED_CWD] = false;
+    [PENDING] = 0;
+    reservations = new PathReservations();
+    transform;
+    writable = true;
+    readable = false;
+    dirCache;
+    uid;
+    gid;
+    setOwner;
+    preserveOwner;
+    processGid;
+    processUid;
+    maxDepth;
+    forceChown;
+    win32;
+    newer;
+    keep;
+    noMtime;
+    preservePaths;
+    unlink;
+    cwd;
+    strip;
+    processUmask;
+    umask;
+    dmode;
+    fmode;
+    chmod;
+    constructor(opt = {}) {
+        opt.ondone = () => {
+            this[ENDED] = true;
+            this[MAYBECLOSE]();
+        };
+        super(opt);
+        this.transform = opt.transform;
+        this.dirCache = opt.dirCache || new Map();
+        this.chmod = !!opt.chmod;
+        if (typeof opt.uid === 'number' || typeof opt.gid === 'number') {
+            // need both or neither
+            if (typeof opt.uid !== 'number' ||
+                typeof opt.gid !== 'number') {
+                throw new TypeError('cannot set owner without number uid and gid');
+            }
+            if (opt.preserveOwner) {
+                throw new TypeError('cannot preserve owner in archive and also set owner explicitly');
+            }
+            this.uid = opt.uid;
+            this.gid = opt.gid;
+            this.setOwner = true;
+        }
+        else {
+            this.uid = undefined;
+            this.gid = undefined;
+            this.setOwner = false;
+        }
+        // default true for root
+        if (opt.preserveOwner === undefined &&
+            typeof opt.uid !== 'number') {
+            this.preserveOwner = !!(process.getuid && process.getuid() === 0);
+        }
+        else {
+            this.preserveOwner = !!opt.preserveOwner;
+        }
+        this.processUid =
+            (this.preserveOwner || this.setOwner) && process.getuid ?
+                process.getuid()
+                : undefined;
+        this.processGid =
+            (this.preserveOwner || this.setOwner) && process.getgid ?
+                process.getgid()
+                : undefined;
+        // prevent excessively deep nesting of subfolders
+        // set to `Infinity` to remove this restriction
+        this.maxDepth =
+            typeof opt.maxDepth === 'number' ?
+                opt.maxDepth
+                : DEFAULT_MAX_DEPTH;
+        // mostly just for testing, but useful in some cases.
+        // Forcibly trigger a chown on every entry, no matter what
+        this.forceChown = opt.forceChown === true;
+        // turn > this[ONENTRY](entry));
+    }
+    // a bad or damaged archive is a warning for Parser, but an error
+    // when extracting.  Mark those errors as unrecoverable, because
+    // the Unpack contract cannot be met.
+    warn(code, msg, data = {}) {
+        if (code === 'TAR_BAD_ARCHIVE' || code === 'TAR_ABORT') {
+            data.recoverable = false;
+        }
+        return super.warn(code, msg, data);
+    }
+    [MAYBECLOSE]() {
+        if (this[ENDED] && this[PENDING] === 0) {
+            this.emit('prefinish');
+            this.emit('finish');
+            this.emit('end');
+        }
+    }
+    [CHECKPATH](entry) {
+        const p = normalizeWindowsPath(entry.path);
+        const parts = p.split('/');
+        if (this.strip) {
+            if (parts.length < this.strip) {
+                return false;
+            }
+            if (entry.type === 'Link') {
+                const linkparts = normalizeWindowsPath(String(entry.linkpath)).split('/');
+                if (linkparts.length >= this.strip) {
+                    entry.linkpath = linkparts.slice(this.strip).join('/');
+                }
+                else {
+                    return false;
+                }
+            }
+            parts.splice(0, this.strip);
+            entry.path = parts.join('/');
+        }
+        if (isFinite(this.maxDepth) && parts.length > this.maxDepth) {
+            this.warn('TAR_ENTRY_ERROR', 'path excessively deep', {
+                entry,
+                path: p,
+                depth: parts.length,
+                maxDepth: this.maxDepth,
+            });
+            return false;
+        }
+        if (!this.preservePaths) {
+            if (parts.includes('..') ||
+                /* c8 ignore next */
+                (isWindows && /^[a-z]:\.\.$/i.test(parts[0] ?? ''))) {
+                this.warn('TAR_ENTRY_ERROR', `path contains '..'`, {
+                    entry,
+                    path: p,
+                });
+                return false;
+            }
+            // strip off the root
+            const [root, stripped] = stripAbsolutePath(p);
+            if (root) {
+                entry.path = String(stripped);
+                this.warn('TAR_ENTRY_INFO', `stripping ${root} from absolute path`, {
+                    entry,
+                    path: p,
+                });
+            }
+        }
+        if (path.isAbsolute(entry.path)) {
+            entry.absolute = normalizeWindowsPath(path.resolve(entry.path));
+        }
+        else {
+            entry.absolute = normalizeWindowsPath(path.resolve(this.cwd, entry.path));
+        }
+        // if we somehow ended up with a path that escapes the cwd, and we are
+        // not in preservePaths mode, then something is fishy!  This should have
+        // been prevented above, so ignore this for coverage.
+        /* c8 ignore start - defense in depth */
+        if (!this.preservePaths &&
+            typeof entry.absolute === 'string' &&
+            entry.absolute.indexOf(this.cwd + '/') !== 0 &&
+            entry.absolute !== this.cwd) {
+            this.warn('TAR_ENTRY_ERROR', 'path escaped extraction target', {
+                entry,
+                path: normalizeWindowsPath(entry.path),
+                resolvedPath: entry.absolute,
+                cwd: this.cwd,
+            });
+            return false;
+        }
+        /* c8 ignore stop */
+        // an archive can set properties on the extraction directory, but it
+        // may not replace the cwd with a different kind of thing entirely.
+        if (entry.absolute === this.cwd &&
+            entry.type !== 'Directory' &&
+            entry.type !== 'GNUDumpDir') {
+            return false;
+        }
+        // only encode : chars that aren't drive letter indicators
+        if (this.win32) {
+            const { root: aRoot } = path.win32.parse(String(entry.absolute));
+            entry.absolute =
+                aRoot + wc.encode(String(entry.absolute).slice(aRoot.length));
+            const { root: pRoot } = path.win32.parse(entry.path);
+            entry.path = pRoot + wc.encode(entry.path.slice(pRoot.length));
+        }
+        return true;
+    }
+    [ONENTRY](entry) {
+        if (!this[CHECKPATH](entry)) {
+            return entry.resume();
+        }
+        assert.equal(typeof entry.absolute, 'string');
+        switch (entry.type) {
+            case 'Directory':
+            case 'GNUDumpDir':
+                if (entry.mode) {
+                    entry.mode = entry.mode | 0o700;
+                }
+            // eslint-disable-next-line no-fallthrough
+            case 'File':
+            case 'OldFile':
+            case 'ContiguousFile':
+            case 'Link':
+            case 'SymbolicLink':
+                return this[CHECKFS](entry);
+            case 'CharacterDevice':
+            case 'BlockDevice':
+            case 'FIFO':
+            default:
+                return this[UNSUPPORTED](entry);
+        }
+    }
+    [ONERROR](er, entry) {
+        // Cwd has to exist, or else nothing works. That's serious.
+        // Other errors are warnings, which raise the error in strict
+        // mode, but otherwise continue on.
+        if (er.name === 'CwdError') {
+            this.emit('error', er);
+        }
+        else {
+            this.warn('TAR_ENTRY_ERROR', er, { entry });
+            this[UNPEND]();
+            entry.resume();
+        }
+    }
+    [MKDIR](dir, mode, cb) {
+        mkdir(normalizeWindowsPath(dir), {
+            uid: this.uid,
+            gid: this.gid,
+            processUid: this.processUid,
+            processGid: this.processGid,
+            umask: this.processUmask,
+            preserve: this.preservePaths,
+            unlink: this.unlink,
+            cache: this.dirCache,
+            cwd: this.cwd,
+            mode: mode,
+        }, cb);
+    }
+    [DOCHOWN](entry) {
+        // in preserve owner mode, chown if the entry doesn't match process
+        // in set owner mode, chown if setting doesn't match process
+        return (this.forceChown ||
+            (this.preserveOwner &&
+                ((typeof entry.uid === 'number' &&
+                    entry.uid !== this.processUid) ||
+                    (typeof entry.gid === 'number' &&
+                        entry.gid !== this.processGid))) ||
+            (typeof this.uid === 'number' &&
+                this.uid !== this.processUid) ||
+            (typeof this.gid === 'number' && this.gid !== this.processGid));
+    }
+    [UID](entry) {
+        return uint32(this.uid, entry.uid, this.processUid);
+    }
+    [GID](entry) {
+        return uint32(this.gid, entry.gid, this.processGid);
+    }
+    [FILE](entry, fullyDone) {
+        const mode = typeof entry.mode === 'number' ?
+            entry.mode & 0o7777
+            : this.fmode;
+        const stream = new fsm.WriteStream(String(entry.absolute), {
+            // slight lie, but it can be numeric flags
+            flags: getWriteFlag(entry.size),
+            mode: mode,
+            autoClose: false,
+        });
+        stream.on('error', (er) => {
+            if (stream.fd) {
+                fs.close(stream.fd, () => { });
+            }
+            // flush all the data out so that we aren't left hanging
+            // if the error wasn't actually fatal.  otherwise the parse
+            // is blocked, and we never proceed.
+            stream.write = () => true;
+            this[ONERROR](er, entry);
+            fullyDone();
+        });
+        let actions = 1;
+        const done = (er) => {
+            if (er) {
+                /* c8 ignore start - we should always have a fd by now */
+                if (stream.fd) {
+                    fs.close(stream.fd, () => { });
+                }
+                /* c8 ignore stop */
+                this[ONERROR](er, entry);
+                fullyDone();
+                return;
+            }
+            if (--actions === 0) {
+                if (stream.fd !== undefined) {
+                    fs.close(stream.fd, er => {
+                        if (er) {
+                            this[ONERROR](er, entry);
+                        }
+                        else {
+                            this[UNPEND]();
+                        }
+                        fullyDone();
+                    });
+                }
+            }
+        };
+        stream.on('finish', () => {
+            // if futimes fails, try utimes
+            // if utimes fails, fail with the original error
+            // same for fchown/chown
+            const abs = String(entry.absolute);
+            const fd = stream.fd;
+            if (typeof fd === 'number' && entry.mtime && !this.noMtime) {
+                actions++;
+                const atime = entry.atime || new Date();
+                const mtime = entry.mtime;
+                fs.futimes(fd, atime, mtime, er => er ?
+                    fs.utimes(abs, atime, mtime, er2 => done(er2 && er))
+                    : done());
+            }
+            if (typeof fd === 'number' && this[DOCHOWN](entry)) {
+                actions++;
+                const uid = this[UID](entry);
+                const gid = this[GID](entry);
+                if (typeof uid === 'number' && typeof gid === 'number') {
+                    fs.fchown(fd, uid, gid, er => er ?
+                        fs.chown(abs, uid, gid, er2 => done(er2 && er))
+                        : done());
+                }
+            }
+            done();
+        });
+        const tx = this.transform ? this.transform(entry) || entry : entry;
+        if (tx !== entry) {
+            tx.on('error', (er) => {
+                this[ONERROR](er, entry);
+                fullyDone();
+            });
+            entry.pipe(tx);
+        }
+        tx.pipe(stream);
+    }
+    [DIRECTORY](entry, fullyDone) {
+        const mode = typeof entry.mode === 'number' ?
+            entry.mode & 0o7777
+            : this.dmode;
+        this[MKDIR](String(entry.absolute), mode, er => {
+            if (er) {
+                this[ONERROR](er, entry);
+                fullyDone();
+                return;
+            }
+            let actions = 1;
+            const done = () => {
+                if (--actions === 0) {
+                    fullyDone();
+                    this[UNPEND]();
+                    entry.resume();
+                }
+            };
+            if (entry.mtime && !this.noMtime) {
+                actions++;
+                fs.utimes(String(entry.absolute), entry.atime || new Date(), entry.mtime, done);
+            }
+            if (this[DOCHOWN](entry)) {
+                actions++;
+                fs.chown(String(entry.absolute), Number(this[UID](entry)), Number(this[GID](entry)), done);
+            }
+            done();
+        });
+    }
+    [UNSUPPORTED](entry) {
+        entry.unsupported = true;
+        this.warn('TAR_ENTRY_UNSUPPORTED', `unsupported entry type: ${entry.type}`, { entry });
+        entry.resume();
+    }
+    [SYMLINK](entry, done) {
+        this[LINK](entry, String(entry.linkpath), 'symlink', done);
+    }
+    [HARDLINK](entry, done) {
+        const linkpath = normalizeWindowsPath(path.resolve(this.cwd, String(entry.linkpath)));
+        this[LINK](entry, linkpath, 'link', done);
+    }
+    [PEND]() {
+        this[PENDING]++;
+    }
+    [UNPEND]() {
+        this[PENDING]--;
+        this[MAYBECLOSE]();
+    }
+    [SKIP](entry) {
+        this[UNPEND]();
+        entry.resume();
+    }
+    // Check if we can reuse an existing filesystem entry safely and
+    // overwrite it, rather than unlinking and recreating
+    // Windows doesn't report a useful nlink, so we just never reuse entries
+    [ISREUSABLE](entry, st) {
+        return (entry.type === 'File' &&
+            !this.unlink &&
+            st.isFile() &&
+            st.nlink <= 1 &&
+            !isWindows);
+    }
+    // check if a thing is there, and if so, try to clobber it
+    [CHECKFS](entry) {
+        this[PEND]();
+        const paths = [entry.path];
+        if (entry.linkpath) {
+            paths.push(entry.linkpath);
+        }
+        this.reservations.reserve(paths, done => this[CHECKFS2](entry, done));
+    }
+    [PRUNECACHE](entry) {
+        // if we are not creating a directory, and the path is in the dirCache,
+        // then that means we are about to delete the directory we created
+        // previously, and it is no longer going to be a directory, and neither
+        // is any of its children.
+        // If a symbolic link is encountered, all bets are off.  There is no
+        // reasonable way to sanitize the cache in such a way we will be able to
+        // avoid having filesystem collisions.  If this happens with a non-symlink
+        // entry, it'll just fail to unpack, but a symlink to a directory, using an
+        // 8.3 shortname or certain unicode attacks, can evade detection and lead
+        // to arbitrary writes to anywhere on the system.
+        if (entry.type === 'SymbolicLink') {
+            dropCache(this.dirCache);
+        }
+        else if (entry.type !== 'Directory') {
+            pruneCache(this.dirCache, String(entry.absolute));
+        }
+    }
+    [CHECKFS2](entry, fullyDone) {
+        this[PRUNECACHE](entry);
+        const done = (er) => {
+            this[PRUNECACHE](entry);
+            fullyDone(er);
+        };
+        const checkCwd = () => {
+            this[MKDIR](this.cwd, this.dmode, er => {
+                if (er) {
+                    this[ONERROR](er, entry);
+                    done();
+                    return;
+                }
+                this[CHECKED_CWD] = true;
+                start();
+            });
+        };
+        const start = () => {
+            if (entry.absolute !== this.cwd) {
+                const parent = normalizeWindowsPath(path.dirname(String(entry.absolute)));
+                if (parent !== this.cwd) {
+                    return this[MKDIR](parent, this.dmode, er => {
+                        if (er) {
+                            this[ONERROR](er, entry);
+                            done();
+                            return;
+                        }
+                        afterMakeParent();
+                    });
+                }
+            }
+            afterMakeParent();
+        };
+        const afterMakeParent = () => {
+            fs.lstat(String(entry.absolute), (lstatEr, st) => {
+                if (st &&
+                    (this.keep ||
+                        /* c8 ignore next */
+                        (this.newer && st.mtime > (entry.mtime ?? st.mtime)))) {
+                    this[SKIP](entry);
+                    done();
+                    return;
+                }
+                if (lstatEr || this[ISREUSABLE](entry, st)) {
+                    return this[MAKEFS](null, entry, done);
+                }
+                if (st.isDirectory()) {
+                    if (entry.type === 'Directory') {
+                        const needChmod = this.chmod &&
+                            entry.mode &&
+                            (st.mode & 0o7777) !== entry.mode;
+                        const afterChmod = (er) => this[MAKEFS](er ?? null, entry, done);
+                        if (!needChmod) {
+                            return afterChmod();
+                        }
+                        return fs.chmod(String(entry.absolute), Number(entry.mode), afterChmod);
+                    }
+                    // Not a dir entry, have to remove it.
+                    // NB: the only way to end up with an entry that is the cwd
+                    // itself, in such a way that == does not detect, is a
+                    // tricky windows absolute path with UNC or 8.3 parts (and
+                    // preservePaths:true, or else it will have been stripped).
+                    // In that case, the user has opted out of path protections
+                    // explicitly, so if they blow away the cwd, c'est la vie.
+                    if (entry.absolute !== this.cwd) {
+                        return fs.rmdir(String(entry.absolute), (er) => this[MAKEFS](er ?? null, entry, done));
+                    }
+                }
+                // not a dir, and not reusable
+                // don't remove if the cwd, we want that error
+                if (entry.absolute === this.cwd) {
+                    return this[MAKEFS](null, entry, done);
+                }
+                unlinkFile(String(entry.absolute), er => this[MAKEFS](er ?? null, entry, done));
+            });
+        };
+        if (this[CHECKED_CWD]) {
+            start();
+        }
+        else {
+            checkCwd();
+        }
+    }
+    [MAKEFS](er, entry, done) {
+        if (er) {
+            this[ONERROR](er, entry);
+            done();
+            return;
+        }
+        switch (entry.type) {
+            case 'File':
+            case 'OldFile':
+            case 'ContiguousFile':
+                return this[FILE](entry, done);
+            case 'Link':
+                return this[HARDLINK](entry, done);
+            case 'SymbolicLink':
+                return this[SYMLINK](entry, done);
+            case 'Directory':
+            case 'GNUDumpDir':
+                return this[DIRECTORY](entry, done);
+        }
+    }
+    [LINK](entry, linkpath, link, done) {
+        // XXX: get the type ('symlink' or 'junction') for windows
+        fs[link](linkpath, String(entry.absolute), er => {
+            if (er) {
+                this[ONERROR](er, entry);
+            }
+            else {
+                this[UNPEND]();
+                entry.resume();
+            }
+            done();
+        });
+    }
+}
+const callSync = (fn) => {
+    try {
+        return [null, fn()];
+    }
+    catch (er) {
+        return [er, null];
+    }
+};
+export class UnpackSync extends Unpack {
+    sync = true;
+    [MAKEFS](er, entry) {
+        return super[MAKEFS](er, entry, () => { });
+    }
+    [CHECKFS](entry) {
+        this[PRUNECACHE](entry);
+        if (!this[CHECKED_CWD]) {
+            const er = this[MKDIR](this.cwd, this.dmode);
+            if (er) {
+                return this[ONERROR](er, entry);
+            }
+            this[CHECKED_CWD] = true;
+        }
+        // don't bother to make the parent if the current entry is the cwd,
+        // we've already checked it.
+        if (entry.absolute !== this.cwd) {
+            const parent = normalizeWindowsPath(path.dirname(String(entry.absolute)));
+            if (parent !== this.cwd) {
+                const mkParent = this[MKDIR](parent, this.dmode);
+                if (mkParent) {
+                    return this[ONERROR](mkParent, entry);
+                }
+            }
+        }
+        const [lstatEr, st] = callSync(() => fs.lstatSync(String(entry.absolute)));
+        if (st &&
+            (this.keep ||
+                /* c8 ignore next */
+                (this.newer && st.mtime > (entry.mtime ?? st.mtime)))) {
+            return this[SKIP](entry);
+        }
+        if (lstatEr || this[ISREUSABLE](entry, st)) {
+            return this[MAKEFS](null, entry);
+        }
+        if (st.isDirectory()) {
+            if (entry.type === 'Directory') {
+                const needChmod = this.chmod &&
+                    entry.mode &&
+                    (st.mode & 0o7777) !== entry.mode;
+                const [er] = needChmod ?
+                    callSync(() => {
+                        fs.chmodSync(String(entry.absolute), Number(entry.mode));
+                    })
+                    : [];
+                return this[MAKEFS](er, entry);
+            }
+            // not a dir entry, have to remove it
+            const [er] = callSync(() => fs.rmdirSync(String(entry.absolute)));
+            this[MAKEFS](er, entry);
+        }
+        // not a dir, and not reusable.
+        // don't remove if it's the cwd, since we want that error.
+        const [er] = entry.absolute === this.cwd ?
+            []
+            : callSync(() => unlinkFileSync(String(entry.absolute)));
+        this[MAKEFS](er, entry);
+    }
+    [FILE](entry, done) {
+        const mode = typeof entry.mode === 'number' ?
+            entry.mode & 0o7777
+            : this.fmode;
+        const oner = (er) => {
+            let closeError;
+            try {
+                fs.closeSync(fd);
+            }
+            catch (e) {
+                closeError = e;
+            }
+            if (er || closeError) {
+                this[ONERROR](er || closeError, entry);
+            }
+            done();
+        };
+        let fd;
+        try {
+            fd = fs.openSync(String(entry.absolute), getWriteFlag(entry.size), mode);
+        }
+        catch (er) {
+            return oner(er);
+        }
+        const tx = this.transform ? this.transform(entry) || entry : entry;
+        if (tx !== entry) {
+            tx.on('error', (er) => this[ONERROR](er, entry));
+            entry.pipe(tx);
+        }
+        tx.on('data', (chunk) => {
+            try {
+                fs.writeSync(fd, chunk, 0, chunk.length);
+            }
+            catch (er) {
+                oner(er);
+            }
+        });
+        tx.on('end', () => {
+            let er = null;
+            // try both, falling futimes back to utimes
+            // if either fails, handle the first error
+            if (entry.mtime && !this.noMtime) {
+                const atime = entry.atime || new Date();
+                const mtime = entry.mtime;
+                try {
+                    fs.futimesSync(fd, atime, mtime);
+                }
+                catch (futimeser) {
+                    try {
+                        fs.utimesSync(String(entry.absolute), atime, mtime);
+                    }
+                    catch (utimeser) {
+                        er = futimeser;
+                    }
+                }
+            }
+            if (this[DOCHOWN](entry)) {
+                const uid = this[UID](entry);
+                const gid = this[GID](entry);
+                try {
+                    fs.fchownSync(fd, Number(uid), Number(gid));
+                }
+                catch (fchowner) {
+                    try {
+                        fs.chownSync(String(entry.absolute), Number(uid), Number(gid));
+                    }
+                    catch (chowner) {
+                        er = er || fchowner;
+                    }
+                }
+            }
+            oner(er);
+        });
+    }
+    [DIRECTORY](entry, done) {
+        const mode = typeof entry.mode === 'number' ?
+            entry.mode & 0o7777
+            : this.dmode;
+        const er = this[MKDIR](String(entry.absolute), mode);
+        if (er) {
+            this[ONERROR](er, entry);
+            done();
+            return;
+        }
+        if (entry.mtime && !this.noMtime) {
+            try {
+                fs.utimesSync(String(entry.absolute), entry.atime || new Date(), entry.mtime);
+                /* c8 ignore next */
+            }
+            catch (er) { }
+        }
+        if (this[DOCHOWN](entry)) {
+            try {
+                fs.chownSync(String(entry.absolute), Number(this[UID](entry)), Number(this[GID](entry)));
+            }
+            catch (er) { }
+        }
+        done();
+        entry.resume();
+    }
+    [MKDIR](dir, mode) {
+        try {
+            return mkdirSync(normalizeWindowsPath(dir), {
+                uid: this.uid,
+                gid: this.gid,
+                processUid: this.processUid,
+                processGid: this.processGid,
+                umask: this.processUmask,
+                preserve: this.preservePaths,
+                unlink: this.unlink,
+                cache: this.dirCache,
+                cwd: this.cwd,
+                mode: mode,
+            });
+        }
+        catch (er) {
+            return er;
+        }
+    }
+    [LINK](entry, linkpath, link, done) {
+        const ls = `${link}Sync`;
+        try {
+            fs[ls](linkpath, String(entry.absolute));
+            done();
+            entry.resume();
+        }
+        catch (er) {
+            return this[ONERROR](er, entry);
+        }
+    }
+}
+//# sourceMappingURL=unpack.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/update.js b/node_modules/pacote/node_modules/tar/dist/esm/update.js
new file mode 100644
index 0000000000000..21398e9766663
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/esm/update.js
@@ -0,0 +1,30 @@
+// tar -u
+import { makeCommand } from './make-command.js';
+import { replace as r } from './replace.js';
+// just call tar.r with the filter and mtimeCache
+export const update = makeCommand(r.syncFile, r.asyncFile, r.syncNoFile, r.asyncNoFile, (opt, entries = []) => {
+    r.validate?.(opt, entries);
+    mtimeFilter(opt);
+});
+const mtimeFilter = (opt) => {
+    const filter = opt.filter;
+    if (!opt.mtimeCache) {
+        opt.mtimeCache = new Map();
+    }
+    opt.filter =
+        filter ?
+            (path, stat) => filter(path, stat) &&
+                !(
+                /* c8 ignore start */
+                ((opt.mtimeCache?.get(path) ?? stat.mtime ?? 0) >
+                    (stat.mtime ?? 0))
+                /* c8 ignore stop */
+                )
+            : (path, stat) => !(
+            /* c8 ignore start */
+            ((opt.mtimeCache?.get(path) ?? stat.mtime ?? 0) >
+                (stat.mtime ?? 0))
+            /* c8 ignore stop */
+            );
+};
+//# sourceMappingURL=update.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/warn-method.js b/node_modules/pacote/node_modules/tar/dist/esm/warn-method.js
new file mode 100644
index 0000000000000..13e798afefc85
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/esm/warn-method.js
@@ -0,0 +1,27 @@
+export const warnMethod = (self, code, message, data = {}) => {
+    if (self.file) {
+        data.file = self.file;
+    }
+    if (self.cwd) {
+        data.cwd = self.cwd;
+    }
+    data.code =
+        (message instanceof Error &&
+            message.code) ||
+            code;
+    data.tarCode = code;
+    if (!self.strict && data.recoverable !== false) {
+        if (message instanceof Error) {
+            data = Object.assign(message, data);
+            message = message.message;
+        }
+        self.emit('warn', code, message, data);
+    }
+    else if (message instanceof Error) {
+        self.emit('error', Object.assign(message, data));
+    }
+    else {
+        self.emit('error', Object.assign(new Error(`${code}: ${message}`), data));
+    }
+};
+//# sourceMappingURL=warn-method.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/winchars.js b/node_modules/pacote/node_modules/tar/dist/esm/winchars.js
new file mode 100644
index 0000000000000..c41eb86d69a4b
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/esm/winchars.js
@@ -0,0 +1,9 @@
+// When writing files on Windows, translate the characters to their
+// 0xf000 higher-encoded versions.
+const raw = ['|', '<', '>', '?', ':'];
+const win = raw.map(char => String.fromCharCode(0xf000 + char.charCodeAt(0)));
+const toWin = new Map(raw.map((char, i) => [char, win[i]]));
+const toRaw = new Map(win.map((char, i) => [char, raw[i]]));
+export const encode = (s) => raw.reduce((s, c) => s.split(c).join(toWin.get(c)), s);
+export const decode = (s) => win.reduce((s, c) => s.split(c).join(toRaw.get(c)), s);
+//# sourceMappingURL=winchars.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/write-entry.js b/node_modules/pacote/node_modules/tar/dist/esm/write-entry.js
new file mode 100644
index 0000000000000..9028cd676b4cd
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/dist/esm/write-entry.js
@@ -0,0 +1,657 @@
+import fs from 'fs';
+import { Minipass } from 'minipass';
+import path from 'path';
+import { Header } from './header.js';
+import { modeFix } from './mode-fix.js';
+import { normalizeWindowsPath } from './normalize-windows-path.js';
+import { dealias, } from './options.js';
+import { Pax } from './pax.js';
+import { stripAbsolutePath } from './strip-absolute-path.js';
+import { stripTrailingSlashes } from './strip-trailing-slashes.js';
+import { warnMethod, } from './warn-method.js';
+import * as winchars from './winchars.js';
+const prefixPath = (path, prefix) => {
+    if (!prefix) {
+        return normalizeWindowsPath(path);
+    }
+    path = normalizeWindowsPath(path).replace(/^\.(\/|$)/, '');
+    return stripTrailingSlashes(prefix) + '/' + path;
+};
+const maxReadSize = 16 * 1024 * 1024;
+const PROCESS = Symbol('process');
+const FILE = Symbol('file');
+const DIRECTORY = Symbol('directory');
+const SYMLINK = Symbol('symlink');
+const HARDLINK = Symbol('hardlink');
+const HEADER = Symbol('header');
+const READ = Symbol('read');
+const LSTAT = Symbol('lstat');
+const ONLSTAT = Symbol('onlstat');
+const ONREAD = Symbol('onread');
+const ONREADLINK = Symbol('onreadlink');
+const OPENFILE = Symbol('openfile');
+const ONOPENFILE = Symbol('onopenfile');
+const CLOSE = Symbol('close');
+const MODE = Symbol('mode');
+const AWAITDRAIN = Symbol('awaitDrain');
+const ONDRAIN = Symbol('ondrain');
+const PREFIX = Symbol('prefix');
+export class WriteEntry extends Minipass {
+    path;
+    portable;
+    myuid = (process.getuid && process.getuid()) || 0;
+    // until node has builtin pwnam functions, this'll have to do
+    myuser = process.env.USER || '';
+    maxReadSize;
+    linkCache;
+    statCache;
+    preservePaths;
+    cwd;
+    strict;
+    mtime;
+    noPax;
+    noMtime;
+    prefix;
+    fd;
+    blockLen = 0;
+    blockRemain = 0;
+    buf;
+    pos = 0;
+    remain = 0;
+    length = 0;
+    offset = 0;
+    win32;
+    absolute;
+    header;
+    type;
+    linkpath;
+    stat;
+    onWriteEntry;
+    #hadError = false;
+    constructor(p, opt_ = {}) {
+        const opt = dealias(opt_);
+        super();
+        this.path = normalizeWindowsPath(p);
+        // suppress atime, ctime, uid, gid, uname, gname
+        this.portable = !!opt.portable;
+        this.maxReadSize = opt.maxReadSize || maxReadSize;
+        this.linkCache = opt.linkCache || new Map();
+        this.statCache = opt.statCache || new Map();
+        this.preservePaths = !!opt.preservePaths;
+        this.cwd = normalizeWindowsPath(opt.cwd || process.cwd());
+        this.strict = !!opt.strict;
+        this.noPax = !!opt.noPax;
+        this.noMtime = !!opt.noMtime;
+        this.mtime = opt.mtime;
+        this.prefix =
+            opt.prefix ? normalizeWindowsPath(opt.prefix) : undefined;
+        this.onWriteEntry = opt.onWriteEntry;
+        if (typeof opt.onwarn === 'function') {
+            this.on('warn', opt.onwarn);
+        }
+        let pathWarn = false;
+        if (!this.preservePaths) {
+            const [root, stripped] = stripAbsolutePath(this.path);
+            if (root && typeof stripped === 'string') {
+                this.path = stripped;
+                pathWarn = root;
+            }
+        }
+        this.win32 = !!opt.win32 || process.platform === 'win32';
+        if (this.win32) {
+            // force the \ to / normalization, since we might not *actually*
+            // be on windows, but want \ to be considered a path separator.
+            this.path = winchars.decode(this.path.replace(/\\/g, '/'));
+            p = p.replace(/\\/g, '/');
+        }
+        this.absolute = normalizeWindowsPath(opt.absolute || path.resolve(this.cwd, p));
+        if (this.path === '') {
+            this.path = './';
+        }
+        if (pathWarn) {
+            this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, {
+                entry: this,
+                path: pathWarn + this.path,
+            });
+        }
+        const cs = this.statCache.get(this.absolute);
+        if (cs) {
+            this[ONLSTAT](cs);
+        }
+        else {
+            this[LSTAT]();
+        }
+    }
+    warn(code, message, data = {}) {
+        return warnMethod(this, code, message, data);
+    }
+    emit(ev, ...data) {
+        if (ev === 'error') {
+            this.#hadError = true;
+        }
+        return super.emit(ev, ...data);
+    }
+    [LSTAT]() {
+        fs.lstat(this.absolute, (er, stat) => {
+            if (er) {
+                return this.emit('error', er);
+            }
+            this[ONLSTAT](stat);
+        });
+    }
+    [ONLSTAT](stat) {
+        this.statCache.set(this.absolute, stat);
+        this.stat = stat;
+        if (!stat.isFile()) {
+            stat.size = 0;
+        }
+        this.type = getType(stat);
+        this.emit('stat', stat);
+        this[PROCESS]();
+    }
+    [PROCESS]() {
+        switch (this.type) {
+            case 'File':
+                return this[FILE]();
+            case 'Directory':
+                return this[DIRECTORY]();
+            case 'SymbolicLink':
+                return this[SYMLINK]();
+            // unsupported types are ignored.
+            default:
+                return this.end();
+        }
+    }
+    [MODE](mode) {
+        return modeFix(mode, this.type === 'Directory', this.portable);
+    }
+    [PREFIX](path) {
+        return prefixPath(path, this.prefix);
+    }
+    [HEADER]() {
+        /* c8 ignore start */
+        if (!this.stat) {
+            throw new Error('cannot write header before stat');
+        }
+        /* c8 ignore stop */
+        if (this.type === 'Directory' && this.portable) {
+            this.noMtime = true;
+        }
+        this.onWriteEntry?.(this);
+        this.header = new Header({
+            path: this[PREFIX](this.path),
+            // only apply the prefix to hard links.
+            linkpath: this.type === 'Link' && this.linkpath !== undefined ?
+                this[PREFIX](this.linkpath)
+                : this.linkpath,
+            // only the permissions and setuid/setgid/sticky bitflags
+            // not the higher-order bits that specify file type
+            mode: this[MODE](this.stat.mode),
+            uid: this.portable ? undefined : this.stat.uid,
+            gid: this.portable ? undefined : this.stat.gid,
+            size: this.stat.size,
+            mtime: this.noMtime ? undefined : this.mtime || this.stat.mtime,
+            /* c8 ignore next */
+            type: this.type === 'Unsupported' ? undefined : this.type,
+            uname: this.portable ? undefined
+                : this.stat.uid === this.myuid ? this.myuser
+                    : '',
+            atime: this.portable ? undefined : this.stat.atime,
+            ctime: this.portable ? undefined : this.stat.ctime,
+        });
+        if (this.header.encode() && !this.noPax) {
+            super.write(new Pax({
+                atime: this.portable ? undefined : this.header.atime,
+                ctime: this.portable ? undefined : this.header.ctime,
+                gid: this.portable ? undefined : this.header.gid,
+                mtime: this.noMtime ? undefined : (this.mtime || this.header.mtime),
+                path: this[PREFIX](this.path),
+                linkpath: this.type === 'Link' && this.linkpath !== undefined ?
+                    this[PREFIX](this.linkpath)
+                    : this.linkpath,
+                size: this.header.size,
+                uid: this.portable ? undefined : this.header.uid,
+                uname: this.portable ? undefined : this.header.uname,
+                dev: this.portable ? undefined : this.stat.dev,
+                ino: this.portable ? undefined : this.stat.ino,
+                nlink: this.portable ? undefined : this.stat.nlink,
+            }).encode());
+        }
+        const block = this.header?.block;
+        /* c8 ignore start */
+        if (!block) {
+            throw new Error('failed to encode header');
+        }
+        /* c8 ignore stop */
+        super.write(block);
+    }
+    [DIRECTORY]() {
+        /* c8 ignore start */
+        if (!this.stat) {
+            throw new Error('cannot create directory entry without stat');
+        }
+        /* c8 ignore stop */
+        if (this.path.slice(-1) !== '/') {
+            this.path += '/';
+        }
+        this.stat.size = 0;
+        this[HEADER]();
+        this.end();
+    }
+    [SYMLINK]() {
+        fs.readlink(this.absolute, (er, linkpath) => {
+            if (er) {
+                return this.emit('error', er);
+            }
+            this[ONREADLINK](linkpath);
+        });
+    }
+    [ONREADLINK](linkpath) {
+        this.linkpath = normalizeWindowsPath(linkpath);
+        this[HEADER]();
+        this.end();
+    }
+    [HARDLINK](linkpath) {
+        /* c8 ignore start */
+        if (!this.stat) {
+            throw new Error('cannot create link entry without stat');
+        }
+        /* c8 ignore stop */
+        this.type = 'Link';
+        this.linkpath = normalizeWindowsPath(path.relative(this.cwd, linkpath));
+        this.stat.size = 0;
+        this[HEADER]();
+        this.end();
+    }
+    [FILE]() {
+        /* c8 ignore start */
+        if (!this.stat) {
+            throw new Error('cannot create file entry without stat');
+        }
+        /* c8 ignore stop */
+        if (this.stat.nlink > 1) {
+            const linkKey = `${this.stat.dev}:${this.stat.ino}`;
+            const linkpath = this.linkCache.get(linkKey);
+            if (linkpath?.indexOf(this.cwd) === 0) {
+                return this[HARDLINK](linkpath);
+            }
+            this.linkCache.set(linkKey, this.absolute);
+        }
+        this[HEADER]();
+        if (this.stat.size === 0) {
+            return this.end();
+        }
+        this[OPENFILE]();
+    }
+    [OPENFILE]() {
+        fs.open(this.absolute, 'r', (er, fd) => {
+            if (er) {
+                return this.emit('error', er);
+            }
+            this[ONOPENFILE](fd);
+        });
+    }
+    [ONOPENFILE](fd) {
+        this.fd = fd;
+        if (this.#hadError) {
+            return this[CLOSE]();
+        }
+        /* c8 ignore start */
+        if (!this.stat) {
+            throw new Error('should stat before calling onopenfile');
+        }
+        /* c8 ignore start */
+        this.blockLen = 512 * Math.ceil(this.stat.size / 512);
+        this.blockRemain = this.blockLen;
+        const bufLen = Math.min(this.blockLen, this.maxReadSize);
+        this.buf = Buffer.allocUnsafe(bufLen);
+        this.offset = 0;
+        this.pos = 0;
+        this.remain = this.stat.size;
+        this.length = this.buf.length;
+        this[READ]();
+    }
+    [READ]() {
+        const { fd, buf, offset, length, pos } = this;
+        if (fd === undefined || buf === undefined) {
+            throw new Error('cannot read file without first opening');
+        }
+        fs.read(fd, buf, offset, length, pos, (er, bytesRead) => {
+            if (er) {
+                // ignoring the error from close(2) is a bad practice, but at
+                // this point we already have an error, don't need another one
+                return this[CLOSE](() => this.emit('error', er));
+            }
+            this[ONREAD](bytesRead);
+        });
+    }
+    /* c8 ignore start */
+    [CLOSE](cb = () => { }) {
+        /* c8 ignore stop */
+        if (this.fd !== undefined)
+            fs.close(this.fd, cb);
+    }
+    [ONREAD](bytesRead) {
+        if (bytesRead <= 0 && this.remain > 0) {
+            const er = Object.assign(new Error('encountered unexpected EOF'), {
+                path: this.absolute,
+                syscall: 'read',
+                code: 'EOF',
+            });
+            return this[CLOSE](() => this.emit('error', er));
+        }
+        if (bytesRead > this.remain) {
+            const er = Object.assign(new Error('did not encounter expected EOF'), {
+                path: this.absolute,
+                syscall: 'read',
+                code: 'EOF',
+            });
+            return this[CLOSE](() => this.emit('error', er));
+        }
+        /* c8 ignore start */
+        if (!this.buf) {
+            throw new Error('should have created buffer prior to reading');
+        }
+        /* c8 ignore stop */
+        // null out the rest of the buffer, if we could fit the block padding
+        // at the end of this loop, we've incremented bytesRead and this.remain
+        // to be incremented up to the blockRemain level, as if we had expected
+        // to get a null-padded file, and read it until the end.  then we will
+        // decrement both remain and blockRemain by bytesRead, and know that we
+        // reached the expected EOF, without any null buffer to append.
+        if (bytesRead === this.remain) {
+            for (let i = bytesRead; i < this.length && bytesRead < this.blockRemain; i++) {
+                this.buf[i + this.offset] = 0;
+                bytesRead++;
+                this.remain++;
+            }
+        }
+        const chunk = this.offset === 0 && bytesRead === this.buf.length ?
+            this.buf
+            : this.buf.subarray(this.offset, this.offset + bytesRead);
+        const flushed = this.write(chunk);
+        if (!flushed) {
+            this[AWAITDRAIN](() => this[ONDRAIN]());
+        }
+        else {
+            this[ONDRAIN]();
+        }
+    }
+    [AWAITDRAIN](cb) {
+        this.once('drain', cb);
+    }
+    write(chunk, encoding, cb) {
+        /* c8 ignore start - just junk to comply with NodeJS.WritableStream */
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = undefined;
+        }
+        if (typeof chunk === 'string') {
+            chunk = Buffer.from(chunk, typeof encoding === 'string' ? encoding : 'utf8');
+        }
+        /* c8 ignore stop */
+        if (this.blockRemain < chunk.length) {
+            const er = Object.assign(new Error('writing more data than expected'), {
+                path: this.absolute,
+            });
+            return this.emit('error', er);
+        }
+        this.remain -= chunk.length;
+        this.blockRemain -= chunk.length;
+        this.pos += chunk.length;
+        this.offset += chunk.length;
+        return super.write(chunk, null, cb);
+    }
+    [ONDRAIN]() {
+        if (!this.remain) {
+            if (this.blockRemain) {
+                super.write(Buffer.alloc(this.blockRemain));
+            }
+            return this[CLOSE](er => er ? this.emit('error', er) : this.end());
+        }
+        /* c8 ignore start */
+        if (!this.buf) {
+            throw new Error('buffer lost somehow in ONDRAIN');
+        }
+        /* c8 ignore stop */
+        if (this.offset >= this.length) {
+            // if we only have a smaller bit left to read, alloc a smaller buffer
+            // otherwise, keep it the same length it was before.
+            this.buf = Buffer.allocUnsafe(Math.min(this.blockRemain, this.buf.length));
+            this.offset = 0;
+        }
+        this.length = this.buf.length - this.offset;
+        this[READ]();
+    }
+}
+export class WriteEntrySync extends WriteEntry {
+    sync = true;
+    [LSTAT]() {
+        this[ONLSTAT](fs.lstatSync(this.absolute));
+    }
+    [SYMLINK]() {
+        this[ONREADLINK](fs.readlinkSync(this.absolute));
+    }
+    [OPENFILE]() {
+        this[ONOPENFILE](fs.openSync(this.absolute, 'r'));
+    }
+    [READ]() {
+        let threw = true;
+        try {
+            const { fd, buf, offset, length, pos } = this;
+            /* c8 ignore start */
+            if (fd === undefined || buf === undefined) {
+                throw new Error('fd and buf must be set in READ method');
+            }
+            /* c8 ignore stop */
+            const bytesRead = fs.readSync(fd, buf, offset, length, pos);
+            this[ONREAD](bytesRead);
+            threw = false;
+        }
+        finally {
+            // ignoring the error from close(2) is a bad practice, but at
+            // this point we already have an error, don't need another one
+            if (threw) {
+                try {
+                    this[CLOSE](() => { });
+                }
+                catch (er) { }
+            }
+        }
+    }
+    [AWAITDRAIN](cb) {
+        cb();
+    }
+    /* c8 ignore start */
+    [CLOSE](cb = () => { }) {
+        /* c8 ignore stop */
+        if (this.fd !== undefined)
+            fs.closeSync(this.fd);
+        cb();
+    }
+}
+export class WriteEntryTar extends Minipass {
+    blockLen = 0;
+    blockRemain = 0;
+    buf = 0;
+    pos = 0;
+    remain = 0;
+    length = 0;
+    preservePaths;
+    portable;
+    strict;
+    noPax;
+    noMtime;
+    readEntry;
+    type;
+    prefix;
+    path;
+    mode;
+    uid;
+    gid;
+    uname;
+    gname;
+    header;
+    mtime;
+    atime;
+    ctime;
+    linkpath;
+    size;
+    onWriteEntry;
+    warn(code, message, data = {}) {
+        return warnMethod(this, code, message, data);
+    }
+    constructor(readEntry, opt_ = {}) {
+        const opt = dealias(opt_);
+        super();
+        this.preservePaths = !!opt.preservePaths;
+        this.portable = !!opt.portable;
+        this.strict = !!opt.strict;
+        this.noPax = !!opt.noPax;
+        this.noMtime = !!opt.noMtime;
+        this.onWriteEntry = opt.onWriteEntry;
+        this.readEntry = readEntry;
+        const { type } = readEntry;
+        /* c8 ignore start */
+        if (type === 'Unsupported') {
+            throw new Error('writing entry that should be ignored');
+        }
+        /* c8 ignore stop */
+        this.type = type;
+        if (this.type === 'Directory' && this.portable) {
+            this.noMtime = true;
+        }
+        this.prefix = opt.prefix;
+        this.path = normalizeWindowsPath(readEntry.path);
+        this.mode =
+            readEntry.mode !== undefined ?
+                this[MODE](readEntry.mode)
+                : undefined;
+        this.uid = this.portable ? undefined : readEntry.uid;
+        this.gid = this.portable ? undefined : readEntry.gid;
+        this.uname = this.portable ? undefined : readEntry.uname;
+        this.gname = this.portable ? undefined : readEntry.gname;
+        this.size = readEntry.size;
+        this.mtime =
+            this.noMtime ? undefined : opt.mtime || readEntry.mtime;
+        this.atime = this.portable ? undefined : readEntry.atime;
+        this.ctime = this.portable ? undefined : readEntry.ctime;
+        this.linkpath =
+            readEntry.linkpath !== undefined ?
+                normalizeWindowsPath(readEntry.linkpath)
+                : undefined;
+        if (typeof opt.onwarn === 'function') {
+            this.on('warn', opt.onwarn);
+        }
+        let pathWarn = false;
+        if (!this.preservePaths) {
+            const [root, stripped] = stripAbsolutePath(this.path);
+            if (root && typeof stripped === 'string') {
+                this.path = stripped;
+                pathWarn = root;
+            }
+        }
+        this.remain = readEntry.size;
+        this.blockRemain = readEntry.startBlockSize;
+        this.onWriteEntry?.(this);
+        this.header = new Header({
+            path: this[PREFIX](this.path),
+            linkpath: this.type === 'Link' && this.linkpath !== undefined ?
+                this[PREFIX](this.linkpath)
+                : this.linkpath,
+            // only the permissions and setuid/setgid/sticky bitflags
+            // not the higher-order bits that specify file type
+            mode: this.mode,
+            uid: this.portable ? undefined : this.uid,
+            gid: this.portable ? undefined : this.gid,
+            size: this.size,
+            mtime: this.noMtime ? undefined : this.mtime,
+            type: this.type,
+            uname: this.portable ? undefined : this.uname,
+            atime: this.portable ? undefined : this.atime,
+            ctime: this.portable ? undefined : this.ctime,
+        });
+        if (pathWarn) {
+            this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, {
+                entry: this,
+                path: pathWarn + this.path,
+            });
+        }
+        if (this.header.encode() && !this.noPax) {
+            super.write(new Pax({
+                atime: this.portable ? undefined : this.atime,
+                ctime: this.portable ? undefined : this.ctime,
+                gid: this.portable ? undefined : this.gid,
+                mtime: this.noMtime ? undefined : this.mtime,
+                path: this[PREFIX](this.path),
+                linkpath: this.type === 'Link' && this.linkpath !== undefined ?
+                    this[PREFIX](this.linkpath)
+                    : this.linkpath,
+                size: this.size,
+                uid: this.portable ? undefined : this.uid,
+                uname: this.portable ? undefined : this.uname,
+                dev: this.portable ? undefined : this.readEntry.dev,
+                ino: this.portable ? undefined : this.readEntry.ino,
+                nlink: this.portable ? undefined : this.readEntry.nlink,
+            }).encode());
+        }
+        const b = this.header?.block;
+        /* c8 ignore start */
+        if (!b)
+            throw new Error('failed to encode header');
+        /* c8 ignore stop */
+        super.write(b);
+        readEntry.pipe(this);
+    }
+    [PREFIX](path) {
+        return prefixPath(path, this.prefix);
+    }
+    [MODE](mode) {
+        return modeFix(mode, this.type === 'Directory', this.portable);
+    }
+    write(chunk, encoding, cb) {
+        /* c8 ignore start - just junk to comply with NodeJS.WritableStream */
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = undefined;
+        }
+        if (typeof chunk === 'string') {
+            chunk = Buffer.from(chunk, typeof encoding === 'string' ? encoding : 'utf8');
+        }
+        /* c8 ignore stop */
+        const writeLen = chunk.length;
+        if (writeLen > this.blockRemain) {
+            throw new Error('writing more to entry than is appropriate');
+        }
+        this.blockRemain -= writeLen;
+        return super.write(chunk, cb);
+    }
+    end(chunk, encoding, cb) {
+        if (this.blockRemain) {
+            super.write(Buffer.alloc(this.blockRemain));
+        }
+        /* c8 ignore start - just junk to comply with NodeJS.WritableStream */
+        if (typeof chunk === 'function') {
+            cb = chunk;
+            encoding = undefined;
+            chunk = undefined;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = undefined;
+        }
+        if (typeof chunk === 'string') {
+            chunk = Buffer.from(chunk, encoding ?? 'utf8');
+        }
+        if (cb)
+            this.once('finish', cb);
+        chunk ? super.end(chunk, cb) : super.end(cb);
+        /* c8 ignore stop */
+        return this;
+    }
+}
+const getType = (stat) => stat.isFile() ? 'File'
+    : stat.isDirectory() ? 'Directory'
+        : stat.isSymbolicLink() ? 'SymbolicLink'
+            : 'Unsupported';
+//# sourceMappingURL=write-entry.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/package.json b/node_modules/pacote/node_modules/tar/package.json
new file mode 100644
index 0000000000000..0283103ee9eaf
--- /dev/null
+++ b/node_modules/pacote/node_modules/tar/package.json
@@ -0,0 +1,325 @@
+{
+  "author": "Isaac Z. Schlueter",
+  "name": "tar",
+  "description": "tar for node",
+  "version": "7.4.3",
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/isaacs/node-tar.git"
+  },
+  "scripts": {
+    "genparse": "node scripts/generate-parse-fixtures.js",
+    "snap": "tap",
+    "test": "tap",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "prepare": "tshy",
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "format": "prettier --write . --log-level warn",
+    "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
+  },
+  "dependencies": {
+    "@isaacs/fs-minipass": "^4.0.0",
+    "chownr": "^3.0.0",
+    "minipass": "^7.1.2",
+    "minizlib": "^3.0.1",
+    "mkdirp": "^3.0.1",
+    "yallist": "^5.0.0"
+  },
+  "devDependencies": {
+    "chmodr": "^1.2.0",
+    "end-of-stream": "^1.4.3",
+    "events-to-array": "^2.0.3",
+    "mutate-fs": "^2.1.1",
+    "nock": "^13.5.4",
+    "prettier": "^3.2.5",
+    "rimraf": "^5.0.5",
+    "tap": "^18.7.2",
+    "tshy": "^1.13.1",
+    "typedoc": "^0.25.13"
+  },
+  "license": "ISC",
+  "engines": {
+    "node": ">=18"
+  },
+  "files": [
+    "dist"
+  ],
+  "tap": {
+    "coverage-map": "map.js",
+    "timeout": 0,
+    "typecheck": true
+  },
+  "prettier": {
+    "experimentalTernaries": true,
+    "semi": false,
+    "printWidth": 70,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "tshy": {
+    "exports": {
+      "./package.json": "./package.json",
+      ".": "./src/index.ts",
+      "./c": "./src/create.ts",
+      "./create": "./src/create.ts",
+      "./replace": "./src/create.ts",
+      "./r": "./src/create.ts",
+      "./list": "./src/list.ts",
+      "./t": "./src/list.ts",
+      "./update": "./src/update.ts",
+      "./u": "./src/update.ts",
+      "./extract": "./src/extract.ts",
+      "./x": "./src/extract.ts",
+      "./pack": "./src/pack.ts",
+      "./unpack": "./src/unpack.ts",
+      "./parse": "./src/parse.ts",
+      "./read-entry": "./src/read-entry.ts",
+      "./write-entry": "./src/write-entry.ts",
+      "./header": "./src/header.ts",
+      "./pax": "./src/pax.ts",
+      "./types": "./src/types.ts"
+    }
+  },
+  "exports": {
+    "./package.json": "./package.json",
+    ".": {
+      "import": {
+        "source": "./src/index.ts",
+        "types": "./dist/esm/index.d.ts",
+        "default": "./dist/esm/index.js"
+      },
+      "require": {
+        "source": "./src/index.ts",
+        "types": "./dist/commonjs/index.d.ts",
+        "default": "./dist/commonjs/index.js"
+      }
+    },
+    "./c": {
+      "import": {
+        "source": "./src/create.ts",
+        "types": "./dist/esm/create.d.ts",
+        "default": "./dist/esm/create.js"
+      },
+      "require": {
+        "source": "./src/create.ts",
+        "types": "./dist/commonjs/create.d.ts",
+        "default": "./dist/commonjs/create.js"
+      }
+    },
+    "./create": {
+      "import": {
+        "source": "./src/create.ts",
+        "types": "./dist/esm/create.d.ts",
+        "default": "./dist/esm/create.js"
+      },
+      "require": {
+        "source": "./src/create.ts",
+        "types": "./dist/commonjs/create.d.ts",
+        "default": "./dist/commonjs/create.js"
+      }
+    },
+    "./replace": {
+      "import": {
+        "source": "./src/create.ts",
+        "types": "./dist/esm/create.d.ts",
+        "default": "./dist/esm/create.js"
+      },
+      "require": {
+        "source": "./src/create.ts",
+        "types": "./dist/commonjs/create.d.ts",
+        "default": "./dist/commonjs/create.js"
+      }
+    },
+    "./r": {
+      "import": {
+        "source": "./src/create.ts",
+        "types": "./dist/esm/create.d.ts",
+        "default": "./dist/esm/create.js"
+      },
+      "require": {
+        "source": "./src/create.ts",
+        "types": "./dist/commonjs/create.d.ts",
+        "default": "./dist/commonjs/create.js"
+      }
+    },
+    "./list": {
+      "import": {
+        "source": "./src/list.ts",
+        "types": "./dist/esm/list.d.ts",
+        "default": "./dist/esm/list.js"
+      },
+      "require": {
+        "source": "./src/list.ts",
+        "types": "./dist/commonjs/list.d.ts",
+        "default": "./dist/commonjs/list.js"
+      }
+    },
+    "./t": {
+      "import": {
+        "source": "./src/list.ts",
+        "types": "./dist/esm/list.d.ts",
+        "default": "./dist/esm/list.js"
+      },
+      "require": {
+        "source": "./src/list.ts",
+        "types": "./dist/commonjs/list.d.ts",
+        "default": "./dist/commonjs/list.js"
+      }
+    },
+    "./update": {
+      "import": {
+        "source": "./src/update.ts",
+        "types": "./dist/esm/update.d.ts",
+        "default": "./dist/esm/update.js"
+      },
+      "require": {
+        "source": "./src/update.ts",
+        "types": "./dist/commonjs/update.d.ts",
+        "default": "./dist/commonjs/update.js"
+      }
+    },
+    "./u": {
+      "import": {
+        "source": "./src/update.ts",
+        "types": "./dist/esm/update.d.ts",
+        "default": "./dist/esm/update.js"
+      },
+      "require": {
+        "source": "./src/update.ts",
+        "types": "./dist/commonjs/update.d.ts",
+        "default": "./dist/commonjs/update.js"
+      }
+    },
+    "./extract": {
+      "import": {
+        "source": "./src/extract.ts",
+        "types": "./dist/esm/extract.d.ts",
+        "default": "./dist/esm/extract.js"
+      },
+      "require": {
+        "source": "./src/extract.ts",
+        "types": "./dist/commonjs/extract.d.ts",
+        "default": "./dist/commonjs/extract.js"
+      }
+    },
+    "./x": {
+      "import": {
+        "source": "./src/extract.ts",
+        "types": "./dist/esm/extract.d.ts",
+        "default": "./dist/esm/extract.js"
+      },
+      "require": {
+        "source": "./src/extract.ts",
+        "types": "./dist/commonjs/extract.d.ts",
+        "default": "./dist/commonjs/extract.js"
+      }
+    },
+    "./pack": {
+      "import": {
+        "source": "./src/pack.ts",
+        "types": "./dist/esm/pack.d.ts",
+        "default": "./dist/esm/pack.js"
+      },
+      "require": {
+        "source": "./src/pack.ts",
+        "types": "./dist/commonjs/pack.d.ts",
+        "default": "./dist/commonjs/pack.js"
+      }
+    },
+    "./unpack": {
+      "import": {
+        "source": "./src/unpack.ts",
+        "types": "./dist/esm/unpack.d.ts",
+        "default": "./dist/esm/unpack.js"
+      },
+      "require": {
+        "source": "./src/unpack.ts",
+        "types": "./dist/commonjs/unpack.d.ts",
+        "default": "./dist/commonjs/unpack.js"
+      }
+    },
+    "./parse": {
+      "import": {
+        "source": "./src/parse.ts",
+        "types": "./dist/esm/parse.d.ts",
+        "default": "./dist/esm/parse.js"
+      },
+      "require": {
+        "source": "./src/parse.ts",
+        "types": "./dist/commonjs/parse.d.ts",
+        "default": "./dist/commonjs/parse.js"
+      }
+    },
+    "./read-entry": {
+      "import": {
+        "source": "./src/read-entry.ts",
+        "types": "./dist/esm/read-entry.d.ts",
+        "default": "./dist/esm/read-entry.js"
+      },
+      "require": {
+        "source": "./src/read-entry.ts",
+        "types": "./dist/commonjs/read-entry.d.ts",
+        "default": "./dist/commonjs/read-entry.js"
+      }
+    },
+    "./write-entry": {
+      "import": {
+        "source": "./src/write-entry.ts",
+        "types": "./dist/esm/write-entry.d.ts",
+        "default": "./dist/esm/write-entry.js"
+      },
+      "require": {
+        "source": "./src/write-entry.ts",
+        "types": "./dist/commonjs/write-entry.d.ts",
+        "default": "./dist/commonjs/write-entry.js"
+      }
+    },
+    "./header": {
+      "import": {
+        "source": "./src/header.ts",
+        "types": "./dist/esm/header.d.ts",
+        "default": "./dist/esm/header.js"
+      },
+      "require": {
+        "source": "./src/header.ts",
+        "types": "./dist/commonjs/header.d.ts",
+        "default": "./dist/commonjs/header.js"
+      }
+    },
+    "./pax": {
+      "import": {
+        "source": "./src/pax.ts",
+        "types": "./dist/esm/pax.d.ts",
+        "default": "./dist/esm/pax.js"
+      },
+      "require": {
+        "source": "./src/pax.ts",
+        "types": "./dist/commonjs/pax.d.ts",
+        "default": "./dist/commonjs/pax.js"
+      }
+    },
+    "./types": {
+      "import": {
+        "source": "./src/types.ts",
+        "types": "./dist/esm/types.d.ts",
+        "default": "./dist/esm/types.js"
+      },
+      "require": {
+        "source": "./src/types.ts",
+        "types": "./dist/commonjs/types.d.ts",
+        "default": "./dist/commonjs/types.js"
+      }
+    }
+  },
+  "type": "module",
+  "main": "./dist/commonjs/index.js",
+  "types": "./dist/commonjs/index.d.ts"
+}
diff --git a/node_modules/pacote/node_modules/tuf-js/LICENSE b/node_modules/pacote/node_modules/tuf-js/LICENSE
new file mode 100644
index 0000000000000..420700f5d3765
--- /dev/null
+++ b/node_modules/pacote/node_modules/tuf-js/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2022 GitHub and the TUF Contributors
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/node_modules/pacote/node_modules/tuf-js/dist/config.js b/node_modules/pacote/node_modules/tuf-js/dist/config.js
new file mode 100644
index 0000000000000..c66d76af86b98
--- /dev/null
+++ b/node_modules/pacote/node_modules/tuf-js/dist/config.js
@@ -0,0 +1,15 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.defaultConfig = void 0;
+exports.defaultConfig = {
+    maxRootRotations: 256,
+    maxDelegations: 32,
+    rootMaxLength: 512000, //bytes
+    timestampMaxLength: 16384, // bytes
+    snapshotMaxLength: 2000000, // bytes
+    targetsMaxLength: 5000000, // bytes
+    prefixTargetsWithHash: true,
+    fetchTimeout: 100000, // milliseconds
+    fetchRetries: undefined,
+    fetchRetry: 2,
+};
diff --git a/node_modules/pacote/node_modules/tuf-js/dist/error.js b/node_modules/pacote/node_modules/tuf-js/dist/error.js
new file mode 100644
index 0000000000000..3a3c26a068a95
--- /dev/null
+++ b/node_modules/pacote/node_modules/tuf-js/dist/error.js
@@ -0,0 +1,49 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.DownloadHTTPError = exports.DownloadLengthMismatchError = exports.DownloadError = exports.ExpiredMetadataError = exports.EqualVersionError = exports.BadVersionError = exports.RepositoryError = exports.PersistError = exports.RuntimeError = exports.ValueError = void 0;
+// An error about insufficient values
+class ValueError extends Error {
+}
+exports.ValueError = ValueError;
+class RuntimeError extends Error {
+}
+exports.RuntimeError = RuntimeError;
+class PersistError extends Error {
+}
+exports.PersistError = PersistError;
+// An error with a repository's state, such as a missing file.
+// It covers all exceptions that come from the repository side when
+// looking from the perspective of users of metadata API or ngclient.
+class RepositoryError extends Error {
+}
+exports.RepositoryError = RepositoryError;
+// An error for metadata that contains an invalid version number.
+class BadVersionError extends RepositoryError {
+}
+exports.BadVersionError = BadVersionError;
+// An error for metadata containing a previously verified version number.
+class EqualVersionError extends BadVersionError {
+}
+exports.EqualVersionError = EqualVersionError;
+// Indicate that a TUF Metadata file has expired.
+class ExpiredMetadataError extends RepositoryError {
+}
+exports.ExpiredMetadataError = ExpiredMetadataError;
+//----- Download Errors -------------------------------------------------------
+// An error occurred while attempting to download a file.
+class DownloadError extends Error {
+}
+exports.DownloadError = DownloadError;
+// Indicate that a mismatch of lengths was seen while downloading a file
+class DownloadLengthMismatchError extends DownloadError {
+}
+exports.DownloadLengthMismatchError = DownloadLengthMismatchError;
+// Returned by FetcherInterface implementations for HTTP errors.
+class DownloadHTTPError extends DownloadError {
+    statusCode;
+    constructor(message, statusCode) {
+        super(message);
+        this.statusCode = statusCode;
+    }
+}
+exports.DownloadHTTPError = DownloadHTTPError;
diff --git a/node_modules/pacote/node_modules/tuf-js/dist/fetcher.js b/node_modules/pacote/node_modules/tuf-js/dist/fetcher.js
new file mode 100644
index 0000000000000..b964135c7b008
--- /dev/null
+++ b/node_modules/pacote/node_modules/tuf-js/dist/fetcher.js
@@ -0,0 +1,86 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.DefaultFetcher = exports.BaseFetcher = void 0;
+const debug_1 = __importDefault(require("debug"));
+const fs_1 = __importDefault(require("fs"));
+const make_fetch_happen_1 = __importDefault(require("make-fetch-happen"));
+const util_1 = __importDefault(require("util"));
+const error_1 = require("./error");
+const tmpfile_1 = require("./utils/tmpfile");
+const log = (0, debug_1.default)('tuf:fetch');
+class BaseFetcher {
+    // Download file from given URL. The file is downloaded to a temporary
+    // location and then passed to the given handler. The handler is responsible
+    // for moving the file to its final location. The temporary file is deleted
+    // after the handler returns.
+    async downloadFile(url, maxLength, handler) {
+        return (0, tmpfile_1.withTempFile)(async (tmpFile) => {
+            const reader = await this.fetch(url);
+            let numberOfBytesReceived = 0;
+            const fileStream = fs_1.default.createWriteStream(tmpFile);
+            // Read the stream a chunk at a time so that we can check
+            // the length of the file as we go
+            try {
+                for await (const chunk of reader) {
+                    numberOfBytesReceived += chunk.length;
+                    if (numberOfBytesReceived > maxLength) {
+                        throw new error_1.DownloadLengthMismatchError('Max length reached');
+                    }
+                    await writeBufferToStream(fileStream, chunk);
+                }
+            }
+            finally {
+                // Make sure we always close the stream
+                // eslint-disable-next-line @typescript-eslint/unbound-method
+                await util_1.default.promisify(fileStream.close).bind(fileStream)();
+            }
+            return handler(tmpFile);
+        });
+    }
+    // Download bytes from given URL.
+    async downloadBytes(url, maxLength) {
+        return this.downloadFile(url, maxLength, async (file) => {
+            const stream = fs_1.default.createReadStream(file);
+            const chunks = [];
+            for await (const chunk of stream) {
+                chunks.push(chunk);
+            }
+            return Buffer.concat(chunks);
+        });
+    }
+}
+exports.BaseFetcher = BaseFetcher;
+class DefaultFetcher extends BaseFetcher {
+    timeout;
+    retry;
+    constructor(options = {}) {
+        super();
+        this.timeout = options.timeout;
+        this.retry = options.retry;
+    }
+    async fetch(url) {
+        log('GET %s', url);
+        const response = await (0, make_fetch_happen_1.default)(url, {
+            timeout: this.timeout,
+            retry: this.retry,
+        });
+        if (!response.ok || !response?.body) {
+            throw new error_1.DownloadHTTPError('Failed to download', response.status);
+        }
+        return response.body;
+    }
+}
+exports.DefaultFetcher = DefaultFetcher;
+const writeBufferToStream = async (stream, buffer) => {
+    return new Promise((resolve, reject) => {
+        stream.write(buffer, (err) => {
+            if (err) {
+                reject(err);
+            }
+            resolve(true);
+        });
+    });
+};
diff --git a/node_modules/pacote/node_modules/tuf-js/dist/index.js b/node_modules/pacote/node_modules/tuf-js/dist/index.js
new file mode 100644
index 0000000000000..5a83b91f355d8
--- /dev/null
+++ b/node_modules/pacote/node_modules/tuf-js/dist/index.js
@@ -0,0 +1,9 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Updater = exports.BaseFetcher = exports.TargetFile = void 0;
+var models_1 = require("@tufjs/models");
+Object.defineProperty(exports, "TargetFile", { enumerable: true, get: function () { return models_1.TargetFile; } });
+var fetcher_1 = require("./fetcher");
+Object.defineProperty(exports, "BaseFetcher", { enumerable: true, get: function () { return fetcher_1.BaseFetcher; } });
+var updater_1 = require("./updater");
+Object.defineProperty(exports, "Updater", { enumerable: true, get: function () { return updater_1.Updater; } });
diff --git a/node_modules/pacote/node_modules/tuf-js/dist/store.js b/node_modules/pacote/node_modules/tuf-js/dist/store.js
new file mode 100644
index 0000000000000..1b1669029a8db
--- /dev/null
+++ b/node_modules/pacote/node_modules/tuf-js/dist/store.js
@@ -0,0 +1,219 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TrustedMetadataStore = void 0;
+const models_1 = require("@tufjs/models");
+const error_1 = require("./error");
+class TrustedMetadataStore {
+    trustedSet = {};
+    referenceTime;
+    constructor(rootData) {
+        // Client workflow 5.1: record fixed update start time
+        this.referenceTime = new Date();
+        // Client workflow 5.2: load trusted root metadata
+        this.loadTrustedRoot(rootData);
+    }
+    get root() {
+        if (!this.trustedSet.root) {
+            throw new ReferenceError('No trusted root metadata');
+        }
+        return this.trustedSet.root;
+    }
+    get timestamp() {
+        return this.trustedSet.timestamp;
+    }
+    get snapshot() {
+        return this.trustedSet.snapshot;
+    }
+    get targets() {
+        return this.trustedSet.targets;
+    }
+    getRole(name) {
+        return this.trustedSet[name];
+    }
+    updateRoot(bytesBuffer) {
+        // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
+        const data = JSON.parse(bytesBuffer.toString('utf8'));
+        // eslint-disable-next-line @typescript-eslint/no-unsafe-argument
+        const newRoot = models_1.Metadata.fromJSON(models_1.MetadataKind.Root, data);
+        if (newRoot.signed.type != models_1.MetadataKind.Root) {
+            throw new error_1.RepositoryError(`Expected 'root', got ${newRoot.signed.type}`);
+        }
+        // Client workflow 5.4: check for arbitrary software attack
+        this.root.verifyDelegate(models_1.MetadataKind.Root, newRoot);
+        // Client workflow 5.5: check for rollback attack
+        if (newRoot.signed.version != this.root.signed.version + 1) {
+            throw new error_1.BadVersionError(`Expected version ${this.root.signed.version + 1}, got ${newRoot.signed.version}`);
+        }
+        // Check that new root is signed by self
+        newRoot.verifyDelegate(models_1.MetadataKind.Root, newRoot);
+        // Client workflow 5.7: set new root as trusted root
+        this.trustedSet.root = newRoot;
+        return newRoot;
+    }
+    updateTimestamp(bytesBuffer) {
+        if (this.snapshot) {
+            throw new error_1.RuntimeError('Cannot update timestamp after snapshot');
+        }
+        if (this.root.signed.isExpired(this.referenceTime)) {
+            throw new error_1.ExpiredMetadataError('Final root.json is expired');
+        }
+        // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
+        const data = JSON.parse(bytesBuffer.toString('utf8'));
+        // eslint-disable-next-line @typescript-eslint/no-unsafe-argument
+        const newTimestamp = models_1.Metadata.fromJSON(models_1.MetadataKind.Timestamp, data);
+        if (newTimestamp.signed.type != models_1.MetadataKind.Timestamp) {
+            throw new error_1.RepositoryError(`Expected 'timestamp', got ${newTimestamp.signed.type}`);
+        }
+        // Client workflow 5.4.2: check for arbitrary software attack
+        this.root.verifyDelegate(models_1.MetadataKind.Timestamp, newTimestamp);
+        if (this.timestamp) {
+            // Prevent rolling back timestamp version
+            // Client workflow 5.4.3.1: check for rollback attack
+            if (newTimestamp.signed.version < this.timestamp.signed.version) {
+                throw new error_1.BadVersionError(`New timestamp version ${newTimestamp.signed.version} is less than current version ${this.timestamp.signed.version}`);
+            }
+            //  Keep using old timestamp if versions are equal.
+            if (newTimestamp.signed.version === this.timestamp.signed.version) {
+                throw new error_1.EqualVersionError(`New timestamp version ${newTimestamp.signed.version} is equal to current version ${this.timestamp.signed.version}`);
+            }
+            // Prevent rolling back snapshot version
+            // Client workflow 5.4.3.2: check for rollback attack
+            const snapshotMeta = this.timestamp.signed.snapshotMeta;
+            const newSnapshotMeta = newTimestamp.signed.snapshotMeta;
+            if (newSnapshotMeta.version < snapshotMeta.version) {
+                throw new error_1.BadVersionError(`New snapshot version ${newSnapshotMeta.version} is less than current version ${snapshotMeta.version}`);
+            }
+        }
+        // expiry not checked to allow old timestamp to be used for rollback
+        // protection of new timestamp: expiry is checked in update_snapshot
+        this.trustedSet.timestamp = newTimestamp;
+        // Client workflow 5.4.4: check for freeze attack
+        this.checkFinalTimestamp();
+        return newTimestamp;
+    }
+    updateSnapshot(bytesBuffer, trusted = false) {
+        if (!this.timestamp) {
+            throw new error_1.RuntimeError('Cannot update snapshot before timestamp');
+        }
+        if (this.targets) {
+            throw new error_1.RuntimeError('Cannot update snapshot after targets');
+        }
+        // Snapshot cannot be loaded if final timestamp is expired
+        this.checkFinalTimestamp();
+        const snapshotMeta = this.timestamp.signed.snapshotMeta;
+        // Verify non-trusted data against the hashes in timestamp, if any.
+        // Trusted snapshot data has already been verified once.
+        // Client workflow 5.5.2: check against timestamp role's snaphsot hash
+        if (!trusted) {
+            snapshotMeta.verify(bytesBuffer);
+        }
+        // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
+        const data = JSON.parse(bytesBuffer.toString('utf8'));
+        // eslint-disable-next-line @typescript-eslint/no-unsafe-argument
+        const newSnapshot = models_1.Metadata.fromJSON(models_1.MetadataKind.Snapshot, data);
+        if (newSnapshot.signed.type != models_1.MetadataKind.Snapshot) {
+            throw new error_1.RepositoryError(`Expected 'snapshot', got ${newSnapshot.signed.type}`);
+        }
+        // Client workflow 5.5.3: check for arbitrary software attack
+        this.root.verifyDelegate(models_1.MetadataKind.Snapshot, newSnapshot);
+        // version check against meta version (5.5.4) is deferred to allow old
+        // snapshot to be used in rollback protection
+        // Client workflow 5.5.5: check for rollback attack
+        if (this.snapshot) {
+            Object.entries(this.snapshot.signed.meta).forEach(([fileName, fileInfo]) => {
+                const newFileInfo = newSnapshot.signed.meta[fileName];
+                if (!newFileInfo) {
+                    throw new error_1.RepositoryError(`Missing file ${fileName} in new snapshot`);
+                }
+                if (newFileInfo.version < fileInfo.version) {
+                    throw new error_1.BadVersionError(`New version ${newFileInfo.version} of ${fileName} is less than current version ${fileInfo.version}`);
+                }
+            });
+        }
+        this.trustedSet.snapshot = newSnapshot;
+        // snapshot is loaded, but we raise if it's not valid _final_ snapshot
+        // Client workflow 5.5.4 & 5.5.6
+        this.checkFinalSnapsnot();
+        return newSnapshot;
+    }
+    updateDelegatedTargets(bytesBuffer, roleName, delegatorName) {
+        if (!this.snapshot) {
+            throw new error_1.RuntimeError('Cannot update delegated targets before snapshot');
+        }
+        // Targets cannot be loaded if final snapshot is expired or its version
+        // does not match meta version in timestamp.
+        this.checkFinalSnapsnot();
+        const delegator = this.trustedSet[delegatorName];
+        if (!delegator) {
+            throw new error_1.RuntimeError(`No trusted ${delegatorName} metadata`);
+        }
+        // Extract metadata for the delegated role from snapshot
+        const meta = this.snapshot.signed.meta?.[`${roleName}.json`];
+        if (!meta) {
+            throw new error_1.RepositoryError(`Missing ${roleName}.json in snapshot`);
+        }
+        // Client workflow 5.6.2: check against snapshot role's targets hash
+        meta.verify(bytesBuffer);
+        // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
+        const data = JSON.parse(bytesBuffer.toString('utf8'));
+        // eslint-disable-next-line @typescript-eslint/no-unsafe-argument
+        const newDelegate = models_1.Metadata.fromJSON(models_1.MetadataKind.Targets, data);
+        if (newDelegate.signed.type != models_1.MetadataKind.Targets) {
+            throw new error_1.RepositoryError(`Expected 'targets', got ${newDelegate.signed.type}`);
+        }
+        // Client workflow 5.6.3: check for arbitrary software attack
+        delegator.verifyDelegate(roleName, newDelegate);
+        // Client workflow 5.6.4: Check against snapshot role’s targets version
+        const version = newDelegate.signed.version;
+        if (version != meta.version) {
+            throw new error_1.BadVersionError(`Version ${version} of ${roleName} does not match snapshot version ${meta.version}`);
+        }
+        // Client workflow 5.6.5: check for a freeze attack
+        if (newDelegate.signed.isExpired(this.referenceTime)) {
+            throw new error_1.ExpiredMetadataError(`${roleName}.json is expired`);
+        }
+        this.trustedSet[roleName] = newDelegate;
+    }
+    // Verifies and loads data as trusted root metadata.
+    // Note that an expired initial root is still considered valid.
+    loadTrustedRoot(bytesBuffer) {
+        // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
+        const data = JSON.parse(bytesBuffer.toString('utf8'));
+        // eslint-disable-next-line @typescript-eslint/no-unsafe-argument
+        const root = models_1.Metadata.fromJSON(models_1.MetadataKind.Root, data);
+        if (root.signed.type != models_1.MetadataKind.Root) {
+            throw new error_1.RepositoryError(`Expected 'root', got ${root.signed.type}`);
+        }
+        root.verifyDelegate(models_1.MetadataKind.Root, root);
+        this.trustedSet['root'] = root;
+    }
+    checkFinalTimestamp() {
+        // Timestamp MUST be loaded
+        if (!this.timestamp) {
+            throw new ReferenceError('No trusted timestamp metadata');
+        }
+        // Client workflow 5.4.4: check for freeze attack
+        if (this.timestamp.signed.isExpired(this.referenceTime)) {
+            throw new error_1.ExpiredMetadataError('Final timestamp.json is expired');
+        }
+    }
+    checkFinalSnapsnot() {
+        // Snapshot and timestamp MUST be loaded
+        if (!this.snapshot) {
+            throw new ReferenceError('No trusted snapshot metadata');
+        }
+        if (!this.timestamp) {
+            throw new ReferenceError('No trusted timestamp metadata');
+        }
+        // Client workflow 5.5.6: check for freeze attack
+        if (this.snapshot.signed.isExpired(this.referenceTime)) {
+            throw new error_1.ExpiredMetadataError('snapshot.json is expired');
+        }
+        // Client workflow 5.5.4: check against timestamp role’s snapshot version
+        const snapshotMeta = this.timestamp.signed.snapshotMeta;
+        if (this.snapshot.signed.version !== snapshotMeta.version) {
+            throw new error_1.BadVersionError("Snapshot version doesn't match timestamp");
+        }
+    }
+}
+exports.TrustedMetadataStore = TrustedMetadataStore;
diff --git a/node_modules/pacote/node_modules/tuf-js/dist/updater.js b/node_modules/pacote/node_modules/tuf-js/dist/updater.js
new file mode 100644
index 0000000000000..32046e4bec417
--- /dev/null
+++ b/node_modules/pacote/node_modules/tuf-js/dist/updater.js
@@ -0,0 +1,368 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || (function () {
+    var ownKeys = function(o) {
+        ownKeys = Object.getOwnPropertyNames || function (o) {
+            var ar = [];
+            for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
+            return ar;
+        };
+        return ownKeys(o);
+    };
+    return function (mod) {
+        if (mod && mod.__esModule) return mod;
+        var result = {};
+        if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
+        __setModuleDefault(result, mod);
+        return result;
+    };
+})();
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Updater = void 0;
+const models_1 = require("@tufjs/models");
+const debug_1 = __importDefault(require("debug"));
+const fs = __importStar(require("fs"));
+const path = __importStar(require("path"));
+const config_1 = require("./config");
+const error_1 = require("./error");
+const fetcher_1 = require("./fetcher");
+const store_1 = require("./store");
+const url = __importStar(require("./utils/url"));
+const log = (0, debug_1.default)('tuf:cache');
+class Updater {
+    dir;
+    metadataBaseUrl;
+    targetDir;
+    targetBaseUrl;
+    forceCache;
+    trustedSet;
+    config;
+    fetcher;
+    constructor(options) {
+        const { metadataDir, metadataBaseUrl, targetDir, targetBaseUrl, fetcher, config, } = options;
+        this.dir = metadataDir;
+        this.metadataBaseUrl = metadataBaseUrl;
+        this.targetDir = targetDir;
+        this.targetBaseUrl = targetBaseUrl;
+        this.forceCache = options.forceCache ?? false;
+        const data = this.loadLocalMetadata(models_1.MetadataKind.Root);
+        this.trustedSet = new store_1.TrustedMetadataStore(data);
+        this.config = { ...config_1.defaultConfig, ...config };
+        this.fetcher =
+            fetcher ||
+                new fetcher_1.DefaultFetcher({
+                    timeout: this.config.fetchTimeout,
+                    retry: this.config.fetchRetries ?? this.config.fetchRetry,
+                });
+    }
+    // refresh and load the metadata before downloading the target
+    // refresh should be called once after the client is initialized
+    async refresh() {
+        // If forceCache is true, try to load the timestamp from local storage
+        // without fetching it from the remote. Otherwise, load the root and
+        // timestamp from the remote per the TUF spec.
+        if (this.forceCache) {
+            // If anything fails, load the root and timestamp from the remote. This
+            // should cover any situation where the local metadata is corrupted or
+            // expired.
+            try {
+                await this.loadTimestamp({ checkRemote: false });
+            }
+            catch (error) {
+                await this.loadRoot();
+                await this.loadTimestamp();
+            }
+        }
+        else {
+            await this.loadRoot();
+            await this.loadTimestamp();
+        }
+        await this.loadSnapshot();
+        await this.loadTargets(models_1.MetadataKind.Targets, models_1.MetadataKind.Root);
+    }
+    // Returns the TargetFile instance with information for the given target path.
+    //
+    // Implicitly calls refresh if it hasn't already been called.
+    async getTargetInfo(targetPath) {
+        if (!this.trustedSet.targets) {
+            await this.refresh();
+        }
+        return this.preorderDepthFirstWalk(targetPath);
+    }
+    async downloadTarget(targetInfo, filePath, targetBaseUrl) {
+        const targetPath = filePath || this.generateTargetPath(targetInfo);
+        if (!targetBaseUrl) {
+            if (!this.targetBaseUrl) {
+                throw new error_1.ValueError('Target base URL not set');
+            }
+            targetBaseUrl = this.targetBaseUrl;
+        }
+        let targetFilePath = targetInfo.path;
+        const consistentSnapshot = this.trustedSet.root.signed.consistentSnapshot;
+        if (consistentSnapshot && this.config.prefixTargetsWithHash) {
+            const hashes = Object.values(targetInfo.hashes);
+            const { dir, base } = path.parse(targetFilePath);
+            const filename = `${hashes[0]}.${base}`;
+            targetFilePath = dir ? `${dir}/${filename}` : filename;
+        }
+        const targetUrl = url.join(targetBaseUrl, targetFilePath);
+        // Client workflow 5.7.3: download target file
+        await this.fetcher.downloadFile(targetUrl, targetInfo.length, async (fileName) => {
+            // Verify hashes and length of downloaded file
+            await targetInfo.verify(fs.createReadStream(fileName));
+            // Copy file to target path
+            log('WRITE %s', targetPath);
+            fs.copyFileSync(fileName, targetPath);
+        });
+        return targetPath;
+    }
+    async findCachedTarget(targetInfo, filePath) {
+        if (!filePath) {
+            filePath = this.generateTargetPath(targetInfo);
+        }
+        try {
+            if (fs.existsSync(filePath)) {
+                await targetInfo.verify(fs.createReadStream(filePath));
+                return filePath;
+            }
+        }
+        catch (error) {
+            return; // File not found
+        }
+        return; // File not found
+    }
+    loadLocalMetadata(fileName) {
+        const filePath = path.join(this.dir, `${fileName}.json`);
+        log('READ %s', filePath);
+        return fs.readFileSync(filePath);
+    }
+    // Sequentially load and persist on local disk every newer root metadata
+    // version available on the remote.
+    // Client workflow 5.3: update root role
+    async loadRoot() {
+        // Client workflow 5.3.2: version of trusted root metadata file
+        const rootVersion = this.trustedSet.root.signed.version;
+        const lowerBound = rootVersion + 1;
+        const upperBound = lowerBound + this.config.maxRootRotations;
+        for (let version = lowerBound; version < upperBound; version++) {
+            const rootUrl = url.join(this.metadataBaseUrl, `${version}.root.json`);
+            try {
+                // Client workflow 5.3.3: download new root metadata file
+                const bytesData = await this.fetcher.downloadBytes(rootUrl, this.config.rootMaxLength);
+                // Client workflow 5.3.4 - 5.4.7
+                this.trustedSet.updateRoot(bytesData);
+                // Client workflow 5.3.8: persist root metadata file
+                this.persistMetadata(models_1.MetadataKind.Root, bytesData);
+            }
+            catch (error) {
+                if (error instanceof error_1.DownloadHTTPError) {
+                    //  404/403 means current root is newest available
+                    if ([403, 404].includes(error.statusCode)) {
+                        break;
+                    }
+                }
+                throw error;
+            }
+        }
+    }
+    // Load local and remote timestamp metadata.
+    // Client workflow 5.4: update timestamp role
+    async loadTimestamp({ checkRemote } = { checkRemote: true }) {
+        // Load local and remote timestamp metadata
+        try {
+            const data = this.loadLocalMetadata(models_1.MetadataKind.Timestamp);
+            this.trustedSet.updateTimestamp(data);
+            // If checkRemote is disabled, return here to avoid fetching the remote
+            // timestamp metadata.
+            if (!checkRemote) {
+                return;
+            }
+        }
+        catch (error) {
+            // continue
+        }
+        //Load from remote (whether local load succeeded or not)
+        const timestampUrl = url.join(this.metadataBaseUrl, 'timestamp.json');
+        // Client workflow 5.4.1: download timestamp metadata file
+        const bytesData = await this.fetcher.downloadBytes(timestampUrl, this.config.timestampMaxLength);
+        try {
+            // Client workflow 5.4.2 - 5.4.4
+            this.trustedSet.updateTimestamp(bytesData);
+        }
+        catch (error) {
+            // If new timestamp version is same as current, discardd the new one.
+            // This is normal and should NOT raise an error.
+            if (error instanceof error_1.EqualVersionError) {
+                return;
+            }
+            // Re-raise any other error
+            throw error;
+        }
+        // Client workflow 5.4.5: persist timestamp metadata
+        this.persistMetadata(models_1.MetadataKind.Timestamp, bytesData);
+    }
+    // Load local and remote snapshot metadata.
+    // Client workflow 5.5: update snapshot role
+    async loadSnapshot() {
+        //Load local (and if needed remote) snapshot metadata
+        try {
+            const data = this.loadLocalMetadata(models_1.MetadataKind.Snapshot);
+            this.trustedSet.updateSnapshot(data, true);
+        }
+        catch (error) {
+            if (!this.trustedSet.timestamp) {
+                throw new ReferenceError('No timestamp metadata');
+            }
+            const snapshotMeta = this.trustedSet.timestamp.signed.snapshotMeta;
+            const maxLength = snapshotMeta.length || this.config.snapshotMaxLength;
+            const version = this.trustedSet.root.signed.consistentSnapshot
+                ? snapshotMeta.version
+                : undefined;
+            const snapshotUrl = url.join(this.metadataBaseUrl, version ? `${version}.snapshot.json` : 'snapshot.json');
+            try {
+                // Client workflow 5.5.1: download snapshot metadata file
+                const bytesData = await this.fetcher.downloadBytes(snapshotUrl, maxLength);
+                // Client workflow 5.5.2 - 5.5.6
+                this.trustedSet.updateSnapshot(bytesData);
+                // Client workflow 5.5.7: persist snapshot metadata file
+                this.persistMetadata(models_1.MetadataKind.Snapshot, bytesData);
+            }
+            catch (error) {
+                throw new error_1.RuntimeError(`Unable to load snapshot metadata error ${error}`);
+            }
+        }
+    }
+    // Load local and remote targets metadata.
+    // Client workflow 5.6: update targets role
+    async loadTargets(role, parentRole) {
+        if (this.trustedSet.getRole(role)) {
+            return this.trustedSet.getRole(role);
+        }
+        try {
+            const buffer = this.loadLocalMetadata(role);
+            this.trustedSet.updateDelegatedTargets(buffer, role, parentRole);
+        }
+        catch (error) {
+            // Local 'role' does not exist or is invalid: update from remote
+            if (!this.trustedSet.snapshot) {
+                throw new ReferenceError('No snapshot metadata');
+            }
+            const metaInfo = this.trustedSet.snapshot.signed.meta[`${role}.json`];
+            // TODO: use length for fetching
+            const maxLength = metaInfo.length || this.config.targetsMaxLength;
+            const version = this.trustedSet.root.signed.consistentSnapshot
+                ? metaInfo.version
+                : undefined;
+            const encodedRole = encodeURIComponent(role);
+            const metadataUrl = url.join(this.metadataBaseUrl, version ? `${version}.${encodedRole}.json` : `${encodedRole}.json`);
+            try {
+                // Client workflow 5.6.1: download targets metadata file
+                const bytesData = await this.fetcher.downloadBytes(metadataUrl, maxLength);
+                // Client workflow 5.6.2 - 5.6.6
+                this.trustedSet.updateDelegatedTargets(bytesData, role, parentRole);
+                // Client workflow 5.6.7: persist targets metadata file
+                this.persistMetadata(role, bytesData);
+            }
+            catch (error) {
+                throw new error_1.RuntimeError(`Unable to load targets error ${error}`);
+            }
+        }
+        return this.trustedSet.getRole(role);
+    }
+    async preorderDepthFirstWalk(targetPath) {
+        // Interrogates the tree of target delegations in order of appearance
+        // (which implicitly order trustworthiness), and returns the matching
+        // target found in the most trusted role.
+        // List of delegations to be interrogated. A (role, parent role) pair
+        // is needed to load and verify the delegated targets metadata.
+        const delegationsToVisit = [
+            {
+                roleName: models_1.MetadataKind.Targets,
+                parentRoleName: models_1.MetadataKind.Root,
+            },
+        ];
+        const visitedRoleNames = new Set();
+        // Client workflow 5.6.7: preorder depth-first traversal of the graph of
+        // target delegations
+        while (visitedRoleNames.size <= this.config.maxDelegations &&
+            delegationsToVisit.length > 0) {
+            //  Pop the role name from the top of the stack.
+            const { roleName, parentRoleName } = delegationsToVisit.pop();
+            // Skip any visited current role to prevent cycles.
+            // Client workflow 5.6.7.1: skip already-visited roles
+            if (visitedRoleNames.has(roleName)) {
+                continue;
+            }
+            // The metadata for 'role_name' must be downloaded/updated before
+            // its targets, delegations, and child roles can be inspected.
+            const targets = (await this.loadTargets(roleName, parentRoleName))
+                ?.signed;
+            if (!targets) {
+                continue;
+            }
+            const target = targets.targets?.[targetPath];
+            if (target) {
+                return target;
+            }
+            // After preorder check, add current role to set of visited roles.
+            visitedRoleNames.add(roleName);
+            if (targets.delegations) {
+                const childRolesToVisit = [];
+                // NOTE: This may be a slow operation if there are many delegated roles.
+                const rolesForTarget = targets.delegations.rolesForTarget(targetPath);
+                for (const { role: childName, terminating } of rolesForTarget) {
+                    childRolesToVisit.push({
+                        roleName: childName,
+                        parentRoleName: roleName,
+                    });
+                    // Client workflow 5.6.7.2.1
+                    if (terminating) {
+                        delegationsToVisit.splice(0); // empty the array
+                        break;
+                    }
+                }
+                childRolesToVisit.reverse();
+                delegationsToVisit.push(...childRolesToVisit);
+            }
+        }
+        return; // no matching target found
+    }
+    generateTargetPath(targetInfo) {
+        if (!this.targetDir) {
+            throw new error_1.ValueError('Target directory not set');
+        }
+        // URL encode target path
+        const filePath = encodeURIComponent(targetInfo.path);
+        return path.join(this.targetDir, filePath);
+    }
+    persistMetadata(metaDataName, bytesData) {
+        const encodedName = encodeURIComponent(metaDataName);
+        try {
+            const filePath = path.join(this.dir, `${encodedName}.json`);
+            log('WRITE %s', filePath);
+            fs.writeFileSync(filePath, bytesData.toString('utf8'));
+        }
+        catch (error) {
+            throw new error_1.PersistError(`Failed to persist metadata ${encodedName} error: ${error}`);
+        }
+    }
+}
+exports.Updater = Updater;
diff --git a/node_modules/pacote/node_modules/tuf-js/dist/utils/tmpfile.js b/node_modules/pacote/node_modules/tuf-js/dist/utils/tmpfile.js
new file mode 100644
index 0000000000000..923eef6044bcc
--- /dev/null
+++ b/node_modules/pacote/node_modules/tuf-js/dist/utils/tmpfile.js
@@ -0,0 +1,25 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.withTempFile = void 0;
+const promises_1 = __importDefault(require("fs/promises"));
+const os_1 = __importDefault(require("os"));
+const path_1 = __importDefault(require("path"));
+// Invokes the given handler with the path to a temporary file. The file
+// is deleted after the handler returns.
+const withTempFile = async (handler) => withTempDir(async (dir) => handler(path_1.default.join(dir, 'tempfile')));
+exports.withTempFile = withTempFile;
+// Invokes the given handler with a temporary directory. The directory is
+// deleted after the handler returns.
+const withTempDir = async (handler) => {
+    const tmpDir = await promises_1.default.realpath(os_1.default.tmpdir());
+    const dir = await promises_1.default.mkdtemp(tmpDir + path_1.default.sep);
+    try {
+        return await handler(dir);
+    }
+    finally {
+        await promises_1.default.rm(dir, { force: true, recursive: true, maxRetries: 3 });
+    }
+};
diff --git a/node_modules/pacote/node_modules/tuf-js/dist/utils/url.js b/node_modules/pacote/node_modules/tuf-js/dist/utils/url.js
new file mode 100644
index 0000000000000..359d1f3ef385b
--- /dev/null
+++ b/node_modules/pacote/node_modules/tuf-js/dist/utils/url.js
@@ -0,0 +1,13 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.join = join;
+const url_1 = require("url");
+function join(base, path) {
+    return new url_1.URL(ensureTrailingSlash(base) + removeLeadingSlash(path)).toString();
+}
+function ensureTrailingSlash(path) {
+    return path.endsWith('/') ? path : path + '/';
+}
+function removeLeadingSlash(path) {
+    return path.startsWith('/') ? path.slice(1) : path;
+}
diff --git a/node_modules/pacote/node_modules/tuf-js/package.json b/node_modules/pacote/node_modules/tuf-js/package.json
new file mode 100644
index 0000000000000..c7f53556ac152
--- /dev/null
+++ b/node_modules/pacote/node_modules/tuf-js/package.json
@@ -0,0 +1,43 @@
+{
+  "name": "tuf-js",
+  "version": "4.0.0",
+  "description": "JavaScript implementation of The Update Framework (TUF)",
+  "main": "dist/index.js",
+  "types": "dist/index.d.ts",
+  "scripts": {
+    "build": "tsc --build tsconfig.build.json",
+    "clean": "rm -rf dist && rm tsconfig.build.tsbuildinfo",
+    "test": "jest"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/theupdateframework/tuf-js.git"
+  },
+  "files": [
+    "dist"
+  ],
+  "keywords": [
+    "tuf",
+    "security",
+    "update"
+  ],
+  "author": "bdehamer@github.com",
+  "license": "MIT",
+  "bugs": {
+    "url": "https://github.com/theupdateframework/tuf-js/issues"
+  },
+  "homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/client#readme",
+  "devDependencies": {
+    "@tufjs/repo-mock": "4.0.0",
+    "@types/debug": "^4.1.12",
+    "@types/make-fetch-happen": "^10.0.4"
+  },
+  "dependencies": {
+    "@tufjs/models": "4.0.0",
+    "debug": "^4.4.1",
+    "make-fetch-happen": "^15.0.0"
+  },
+  "engines": {
+    "node": "^20.17.0 || >=22.9.0"
+  }
+}
diff --git a/node_modules/pacote/node_modules/yallist/LICENSE.md b/node_modules/pacote/node_modules/yallist/LICENSE.md
new file mode 100644
index 0000000000000..881248b6d7f0c
--- /dev/null
+++ b/node_modules/pacote/node_modules/yallist/LICENSE.md
@@ -0,0 +1,63 @@
+All packages under `src/` are licensed according to the terms in
+their respective `LICENSE` or `LICENSE.md` files.
+
+The remainder of this project is licensed under the Blue Oak
+Model License, as follows:
+
+-----
+
+# Blue Oak Model License
+
+Version 1.0.0
+
+## Purpose
+
+This license gives everyone as much permission to work with
+this software as possible, while protecting contributors
+from liability.
+
+## Acceptance
+
+In order to receive this license, you must agree to its
+rules.  The rules of this license are both obligations
+under that agreement and conditions to your license.
+You must not do anything with this software that triggers
+a rule that you cannot or will not follow.
+
+## Copyright
+
+Each contributor licenses you to do everything with this
+software that would otherwise infringe that contributor's
+copyright in it.
+
+## Notices
+
+You must ensure that everyone who gets a copy of
+any part of this software from you, with or without
+changes, also gets the text of this license or a link to
+.
+
+## Excuse
+
+If anyone notifies you in writing that you have not
+complied with [Notices](#notices), you can keep your
+license by taking all practical steps to comply within 30
+days after the notice.  If you do not do so, your license
+ends immediately.
+
+## Patent
+
+Each contributor licenses you to do everything with this
+software that would otherwise infringe any patent claims
+they can license or become able to license.
+
+## Reliability
+
+No contributor can revoke this license.
+
+## No Liability
+
+***As far as the law allows, this software comes as is,
+without any warranty or condition, and no contributor
+will be liable to anyone for any damages related to this
+software or this license, under any kind of legal claim.***
diff --git a/node_modules/pacote/node_modules/yallist/dist/commonjs/index.js b/node_modules/pacote/node_modules/yallist/dist/commonjs/index.js
new file mode 100644
index 0000000000000..c1e1e4741689d
--- /dev/null
+++ b/node_modules/pacote/node_modules/yallist/dist/commonjs/index.js
@@ -0,0 +1,384 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Node = exports.Yallist = void 0;
+class Yallist {
+    tail;
+    head;
+    length = 0;
+    static create(list = []) {
+        return new Yallist(list);
+    }
+    constructor(list = []) {
+        for (const item of list) {
+            this.push(item);
+        }
+    }
+    *[Symbol.iterator]() {
+        for (let walker = this.head; walker; walker = walker.next) {
+            yield walker.value;
+        }
+    }
+    removeNode(node) {
+        if (node.list !== this) {
+            throw new Error('removing node which does not belong to this list');
+        }
+        const next = node.next;
+        const prev = node.prev;
+        if (next) {
+            next.prev = prev;
+        }
+        if (prev) {
+            prev.next = next;
+        }
+        if (node === this.head) {
+            this.head = next;
+        }
+        if (node === this.tail) {
+            this.tail = prev;
+        }
+        this.length--;
+        node.next = undefined;
+        node.prev = undefined;
+        node.list = undefined;
+        return next;
+    }
+    unshiftNode(node) {
+        if (node === this.head) {
+            return;
+        }
+        if (node.list) {
+            node.list.removeNode(node);
+        }
+        const head = this.head;
+        node.list = this;
+        node.next = head;
+        if (head) {
+            head.prev = node;
+        }
+        this.head = node;
+        if (!this.tail) {
+            this.tail = node;
+        }
+        this.length++;
+    }
+    pushNode(node) {
+        if (node === this.tail) {
+            return;
+        }
+        if (node.list) {
+            node.list.removeNode(node);
+        }
+        const tail = this.tail;
+        node.list = this;
+        node.prev = tail;
+        if (tail) {
+            tail.next = node;
+        }
+        this.tail = node;
+        if (!this.head) {
+            this.head = node;
+        }
+        this.length++;
+    }
+    push(...args) {
+        for (let i = 0, l = args.length; i < l; i++) {
+            push(this, args[i]);
+        }
+        return this.length;
+    }
+    unshift(...args) {
+        for (var i = 0, l = args.length; i < l; i++) {
+            unshift(this, args[i]);
+        }
+        return this.length;
+    }
+    pop() {
+        if (!this.tail) {
+            return undefined;
+        }
+        const res = this.tail.value;
+        const t = this.tail;
+        this.tail = this.tail.prev;
+        if (this.tail) {
+            this.tail.next = undefined;
+        }
+        else {
+            this.head = undefined;
+        }
+        t.list = undefined;
+        this.length--;
+        return res;
+    }
+    shift() {
+        if (!this.head) {
+            return undefined;
+        }
+        const res = this.head.value;
+        const h = this.head;
+        this.head = this.head.next;
+        if (this.head) {
+            this.head.prev = undefined;
+        }
+        else {
+            this.tail = undefined;
+        }
+        h.list = undefined;
+        this.length--;
+        return res;
+    }
+    forEach(fn, thisp) {
+        thisp = thisp || this;
+        for (let walker = this.head, i = 0; !!walker; i++) {
+            fn.call(thisp, walker.value, i, this);
+            walker = walker.next;
+        }
+    }
+    forEachReverse(fn, thisp) {
+        thisp = thisp || this;
+        for (let walker = this.tail, i = this.length - 1; !!walker; i--) {
+            fn.call(thisp, walker.value, i, this);
+            walker = walker.prev;
+        }
+    }
+    get(n) {
+        let i = 0;
+        let walker = this.head;
+        for (; !!walker && i < n; i++) {
+            walker = walker.next;
+        }
+        if (i === n && !!walker) {
+            return walker.value;
+        }
+    }
+    getReverse(n) {
+        let i = 0;
+        let walker = this.tail;
+        for (; !!walker && i < n; i++) {
+            // abort out of the list early if we hit a cycle
+            walker = walker.prev;
+        }
+        if (i === n && !!walker) {
+            return walker.value;
+        }
+    }
+    map(fn, thisp) {
+        thisp = thisp || this;
+        const res = new Yallist();
+        for (let walker = this.head; !!walker;) {
+            res.push(fn.call(thisp, walker.value, this));
+            walker = walker.next;
+        }
+        return res;
+    }
+    mapReverse(fn, thisp) {
+        thisp = thisp || this;
+        var res = new Yallist();
+        for (let walker = this.tail; !!walker;) {
+            res.push(fn.call(thisp, walker.value, this));
+            walker = walker.prev;
+        }
+        return res;
+    }
+    reduce(fn, initial) {
+        let acc;
+        let walker = this.head;
+        if (arguments.length > 1) {
+            acc = initial;
+        }
+        else if (this.head) {
+            walker = this.head.next;
+            acc = this.head.value;
+        }
+        else {
+            throw new TypeError('Reduce of empty list with no initial value');
+        }
+        for (var i = 0; !!walker; i++) {
+            acc = fn(acc, walker.value, i);
+            walker = walker.next;
+        }
+        return acc;
+    }
+    reduceReverse(fn, initial) {
+        let acc;
+        let walker = this.tail;
+        if (arguments.length > 1) {
+            acc = initial;
+        }
+        else if (this.tail) {
+            walker = this.tail.prev;
+            acc = this.tail.value;
+        }
+        else {
+            throw new TypeError('Reduce of empty list with no initial value');
+        }
+        for (let i = this.length - 1; !!walker; i--) {
+            acc = fn(acc, walker.value, i);
+            walker = walker.prev;
+        }
+        return acc;
+    }
+    toArray() {
+        const arr = new Array(this.length);
+        for (let i = 0, walker = this.head; !!walker; i++) {
+            arr[i] = walker.value;
+            walker = walker.next;
+        }
+        return arr;
+    }
+    toArrayReverse() {
+        const arr = new Array(this.length);
+        for (let i = 0, walker = this.tail; !!walker; i++) {
+            arr[i] = walker.value;
+            walker = walker.prev;
+        }
+        return arr;
+    }
+    slice(from = 0, to = this.length) {
+        if (to < 0) {
+            to += this.length;
+        }
+        if (from < 0) {
+            from += this.length;
+        }
+        const ret = new Yallist();
+        if (to < from || to < 0) {
+            return ret;
+        }
+        if (from < 0) {
+            from = 0;
+        }
+        if (to > this.length) {
+            to = this.length;
+        }
+        let walker = this.head;
+        let i = 0;
+        for (i = 0; !!walker && i < from; i++) {
+            walker = walker.next;
+        }
+        for (; !!walker && i < to; i++, walker = walker.next) {
+            ret.push(walker.value);
+        }
+        return ret;
+    }
+    sliceReverse(from = 0, to = this.length) {
+        if (to < 0) {
+            to += this.length;
+        }
+        if (from < 0) {
+            from += this.length;
+        }
+        const ret = new Yallist();
+        if (to < from || to < 0) {
+            return ret;
+        }
+        if (from < 0) {
+            from = 0;
+        }
+        if (to > this.length) {
+            to = this.length;
+        }
+        let i = this.length;
+        let walker = this.tail;
+        for (; !!walker && i > to; i--) {
+            walker = walker.prev;
+        }
+        for (; !!walker && i > from; i--, walker = walker.prev) {
+            ret.push(walker.value);
+        }
+        return ret;
+    }
+    splice(start, deleteCount = 0, ...nodes) {
+        if (start > this.length) {
+            start = this.length - 1;
+        }
+        if (start < 0) {
+            start = this.length + start;
+        }
+        let walker = this.head;
+        for (let i = 0; !!walker && i < start; i++) {
+            walker = walker.next;
+        }
+        const ret = [];
+        for (let i = 0; !!walker && i < deleteCount; i++) {
+            ret.push(walker.value);
+            walker = this.removeNode(walker);
+        }
+        if (!walker) {
+            walker = this.tail;
+        }
+        else if (walker !== this.tail) {
+            walker = walker.prev;
+        }
+        for (const v of nodes) {
+            walker = insertAfter(this, walker, v);
+        }
+        return ret;
+    }
+    reverse() {
+        const head = this.head;
+        const tail = this.tail;
+        for (let walker = head; !!walker; walker = walker.prev) {
+            const p = walker.prev;
+            walker.prev = walker.next;
+            walker.next = p;
+        }
+        this.head = tail;
+        this.tail = head;
+        return this;
+    }
+}
+exports.Yallist = Yallist;
+// insertAfter undefined means "make the node the new head of list"
+function insertAfter(self, node, value) {
+    const prev = node;
+    const next = node ? node.next : self.head;
+    const inserted = new Node(value, prev, next, self);
+    if (inserted.next === undefined) {
+        self.tail = inserted;
+    }
+    if (inserted.prev === undefined) {
+        self.head = inserted;
+    }
+    self.length++;
+    return inserted;
+}
+function push(self, item) {
+    self.tail = new Node(item, self.tail, undefined, self);
+    if (!self.head) {
+        self.head = self.tail;
+    }
+    self.length++;
+}
+function unshift(self, item) {
+    self.head = new Node(item, undefined, self.head, self);
+    if (!self.tail) {
+        self.tail = self.head;
+    }
+    self.length++;
+}
+class Node {
+    list;
+    next;
+    prev;
+    value;
+    constructor(value, prev, next, list) {
+        this.list = list;
+        this.value = value;
+        if (prev) {
+            prev.next = this;
+            this.prev = prev;
+        }
+        else {
+            this.prev = undefined;
+        }
+        if (next) {
+            next.prev = this;
+            this.next = next;
+        }
+        else {
+            this.next = undefined;
+        }
+    }
+}
+exports.Node = Node;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/yallist/dist/commonjs/package.json b/node_modules/pacote/node_modules/yallist/dist/commonjs/package.json
new file mode 100644
index 0000000000000..5bbefffbabee3
--- /dev/null
+++ b/node_modules/pacote/node_modules/yallist/dist/commonjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/node_modules/pacote/node_modules/yallist/dist/esm/index.js b/node_modules/pacote/node_modules/yallist/dist/esm/index.js
new file mode 100644
index 0000000000000..3d81c5113b93a
--- /dev/null
+++ b/node_modules/pacote/node_modules/yallist/dist/esm/index.js
@@ -0,0 +1,379 @@
+export class Yallist {
+    tail;
+    head;
+    length = 0;
+    static create(list = []) {
+        return new Yallist(list);
+    }
+    constructor(list = []) {
+        for (const item of list) {
+            this.push(item);
+        }
+    }
+    *[Symbol.iterator]() {
+        for (let walker = this.head; walker; walker = walker.next) {
+            yield walker.value;
+        }
+    }
+    removeNode(node) {
+        if (node.list !== this) {
+            throw new Error('removing node which does not belong to this list');
+        }
+        const next = node.next;
+        const prev = node.prev;
+        if (next) {
+            next.prev = prev;
+        }
+        if (prev) {
+            prev.next = next;
+        }
+        if (node === this.head) {
+            this.head = next;
+        }
+        if (node === this.tail) {
+            this.tail = prev;
+        }
+        this.length--;
+        node.next = undefined;
+        node.prev = undefined;
+        node.list = undefined;
+        return next;
+    }
+    unshiftNode(node) {
+        if (node === this.head) {
+            return;
+        }
+        if (node.list) {
+            node.list.removeNode(node);
+        }
+        const head = this.head;
+        node.list = this;
+        node.next = head;
+        if (head) {
+            head.prev = node;
+        }
+        this.head = node;
+        if (!this.tail) {
+            this.tail = node;
+        }
+        this.length++;
+    }
+    pushNode(node) {
+        if (node === this.tail) {
+            return;
+        }
+        if (node.list) {
+            node.list.removeNode(node);
+        }
+        const tail = this.tail;
+        node.list = this;
+        node.prev = tail;
+        if (tail) {
+            tail.next = node;
+        }
+        this.tail = node;
+        if (!this.head) {
+            this.head = node;
+        }
+        this.length++;
+    }
+    push(...args) {
+        for (let i = 0, l = args.length; i < l; i++) {
+            push(this, args[i]);
+        }
+        return this.length;
+    }
+    unshift(...args) {
+        for (var i = 0, l = args.length; i < l; i++) {
+            unshift(this, args[i]);
+        }
+        return this.length;
+    }
+    pop() {
+        if (!this.tail) {
+            return undefined;
+        }
+        const res = this.tail.value;
+        const t = this.tail;
+        this.tail = this.tail.prev;
+        if (this.tail) {
+            this.tail.next = undefined;
+        }
+        else {
+            this.head = undefined;
+        }
+        t.list = undefined;
+        this.length--;
+        return res;
+    }
+    shift() {
+        if (!this.head) {
+            return undefined;
+        }
+        const res = this.head.value;
+        const h = this.head;
+        this.head = this.head.next;
+        if (this.head) {
+            this.head.prev = undefined;
+        }
+        else {
+            this.tail = undefined;
+        }
+        h.list = undefined;
+        this.length--;
+        return res;
+    }
+    forEach(fn, thisp) {
+        thisp = thisp || this;
+        for (let walker = this.head, i = 0; !!walker; i++) {
+            fn.call(thisp, walker.value, i, this);
+            walker = walker.next;
+        }
+    }
+    forEachReverse(fn, thisp) {
+        thisp = thisp || this;
+        for (let walker = this.tail, i = this.length - 1; !!walker; i--) {
+            fn.call(thisp, walker.value, i, this);
+            walker = walker.prev;
+        }
+    }
+    get(n) {
+        let i = 0;
+        let walker = this.head;
+        for (; !!walker && i < n; i++) {
+            walker = walker.next;
+        }
+        if (i === n && !!walker) {
+            return walker.value;
+        }
+    }
+    getReverse(n) {
+        let i = 0;
+        let walker = this.tail;
+        for (; !!walker && i < n; i++) {
+            // abort out of the list early if we hit a cycle
+            walker = walker.prev;
+        }
+        if (i === n && !!walker) {
+            return walker.value;
+        }
+    }
+    map(fn, thisp) {
+        thisp = thisp || this;
+        const res = new Yallist();
+        for (let walker = this.head; !!walker;) {
+            res.push(fn.call(thisp, walker.value, this));
+            walker = walker.next;
+        }
+        return res;
+    }
+    mapReverse(fn, thisp) {
+        thisp = thisp || this;
+        var res = new Yallist();
+        for (let walker = this.tail; !!walker;) {
+            res.push(fn.call(thisp, walker.value, this));
+            walker = walker.prev;
+        }
+        return res;
+    }
+    reduce(fn, initial) {
+        let acc;
+        let walker = this.head;
+        if (arguments.length > 1) {
+            acc = initial;
+        }
+        else if (this.head) {
+            walker = this.head.next;
+            acc = this.head.value;
+        }
+        else {
+            throw new TypeError('Reduce of empty list with no initial value');
+        }
+        for (var i = 0; !!walker; i++) {
+            acc = fn(acc, walker.value, i);
+            walker = walker.next;
+        }
+        return acc;
+    }
+    reduceReverse(fn, initial) {
+        let acc;
+        let walker = this.tail;
+        if (arguments.length > 1) {
+            acc = initial;
+        }
+        else if (this.tail) {
+            walker = this.tail.prev;
+            acc = this.tail.value;
+        }
+        else {
+            throw new TypeError('Reduce of empty list with no initial value');
+        }
+        for (let i = this.length - 1; !!walker; i--) {
+            acc = fn(acc, walker.value, i);
+            walker = walker.prev;
+        }
+        return acc;
+    }
+    toArray() {
+        const arr = new Array(this.length);
+        for (let i = 0, walker = this.head; !!walker; i++) {
+            arr[i] = walker.value;
+            walker = walker.next;
+        }
+        return arr;
+    }
+    toArrayReverse() {
+        const arr = new Array(this.length);
+        for (let i = 0, walker = this.tail; !!walker; i++) {
+            arr[i] = walker.value;
+            walker = walker.prev;
+        }
+        return arr;
+    }
+    slice(from = 0, to = this.length) {
+        if (to < 0) {
+            to += this.length;
+        }
+        if (from < 0) {
+            from += this.length;
+        }
+        const ret = new Yallist();
+        if (to < from || to < 0) {
+            return ret;
+        }
+        if (from < 0) {
+            from = 0;
+        }
+        if (to > this.length) {
+            to = this.length;
+        }
+        let walker = this.head;
+        let i = 0;
+        for (i = 0; !!walker && i < from; i++) {
+            walker = walker.next;
+        }
+        for (; !!walker && i < to; i++, walker = walker.next) {
+            ret.push(walker.value);
+        }
+        return ret;
+    }
+    sliceReverse(from = 0, to = this.length) {
+        if (to < 0) {
+            to += this.length;
+        }
+        if (from < 0) {
+            from += this.length;
+        }
+        const ret = new Yallist();
+        if (to < from || to < 0) {
+            return ret;
+        }
+        if (from < 0) {
+            from = 0;
+        }
+        if (to > this.length) {
+            to = this.length;
+        }
+        let i = this.length;
+        let walker = this.tail;
+        for (; !!walker && i > to; i--) {
+            walker = walker.prev;
+        }
+        for (; !!walker && i > from; i--, walker = walker.prev) {
+            ret.push(walker.value);
+        }
+        return ret;
+    }
+    splice(start, deleteCount = 0, ...nodes) {
+        if (start > this.length) {
+            start = this.length - 1;
+        }
+        if (start < 0) {
+            start = this.length + start;
+        }
+        let walker = this.head;
+        for (let i = 0; !!walker && i < start; i++) {
+            walker = walker.next;
+        }
+        const ret = [];
+        for (let i = 0; !!walker && i < deleteCount; i++) {
+            ret.push(walker.value);
+            walker = this.removeNode(walker);
+        }
+        if (!walker) {
+            walker = this.tail;
+        }
+        else if (walker !== this.tail) {
+            walker = walker.prev;
+        }
+        for (const v of nodes) {
+            walker = insertAfter(this, walker, v);
+        }
+        return ret;
+    }
+    reverse() {
+        const head = this.head;
+        const tail = this.tail;
+        for (let walker = head; !!walker; walker = walker.prev) {
+            const p = walker.prev;
+            walker.prev = walker.next;
+            walker.next = p;
+        }
+        this.head = tail;
+        this.tail = head;
+        return this;
+    }
+}
+// insertAfter undefined means "make the node the new head of list"
+function insertAfter(self, node, value) {
+    const prev = node;
+    const next = node ? node.next : self.head;
+    const inserted = new Node(value, prev, next, self);
+    if (inserted.next === undefined) {
+        self.tail = inserted;
+    }
+    if (inserted.prev === undefined) {
+        self.head = inserted;
+    }
+    self.length++;
+    return inserted;
+}
+function push(self, item) {
+    self.tail = new Node(item, self.tail, undefined, self);
+    if (!self.head) {
+        self.head = self.tail;
+    }
+    self.length++;
+}
+function unshift(self, item) {
+    self.head = new Node(item, undefined, self.head, self);
+    if (!self.tail) {
+        self.tail = self.head;
+    }
+    self.length++;
+}
+export class Node {
+    list;
+    next;
+    prev;
+    value;
+    constructor(value, prev, next, list) {
+        this.list = list;
+        this.value = value;
+        if (prev) {
+            prev.next = this;
+            this.prev = prev;
+        }
+        else {
+            this.prev = undefined;
+        }
+        if (next) {
+            next.prev = this;
+            this.next = next;
+        }
+        else {
+            this.next = undefined;
+        }
+    }
+}
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/yallist/dist/esm/package.json b/node_modules/pacote/node_modules/yallist/dist/esm/package.json
new file mode 100644
index 0000000000000..3dbc1ca591c05
--- /dev/null
+++ b/node_modules/pacote/node_modules/yallist/dist/esm/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/node_modules/pacote/node_modules/yallist/package.json b/node_modules/pacote/node_modules/yallist/package.json
new file mode 100644
index 0000000000000..2f5247808bbea
--- /dev/null
+++ b/node_modules/pacote/node_modules/yallist/package.json
@@ -0,0 +1,68 @@
+{
+  "name": "yallist",
+  "version": "5.0.0",
+  "description": "Yet Another Linked List",
+  "files": [
+    "dist"
+  ],
+  "devDependencies": {
+    "prettier": "^3.2.5",
+    "tap": "^18.7.2",
+    "tshy": "^1.13.1",
+    "typedoc": "^0.25.13"
+  },
+  "scripts": {
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "prepare": "tshy",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "test": "tap",
+    "snap": "tap",
+    "format": "prettier --write . --loglevel warn --ignore-path ../../.prettierignore --cache",
+    "typedoc": "typedoc"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/isaacs/yallist.git"
+  },
+  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
+  "license": "BlueOak-1.0.0",
+  "tshy": {
+    "exports": {
+      "./package.json": "./package.json",
+      ".": "./src/index.ts"
+    }
+  },
+  "exports": {
+    "./package.json": "./package.json",
+    ".": {
+      "import": {
+        "types": "./dist/esm/index.d.ts",
+        "default": "./dist/esm/index.js"
+      },
+      "require": {
+        "types": "./dist/commonjs/index.d.ts",
+        "default": "./dist/commonjs/index.js"
+      }
+    }
+  },
+  "main": "./dist/commonjs/index.js",
+  "types": "./dist/commonjs/index.d.ts",
+  "type": "module",
+  "prettier": {
+    "semi": false,
+    "printWidth": 70,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "engines": {
+    "node": ">=18"
+  }
+}
diff --git a/node_modules/pacote/package.json b/node_modules/pacote/package.json
index 422be5f5452dc..3cc141a104796 100644
--- a/node_modules/pacote/package.json
+++ b/node_modules/pacote/package.json
@@ -1,6 +1,6 @@
 {
   "name": "pacote",
-  "version": "21.0.0",
+  "version": "21.0.3",
   "description": "JavaScript package downloader",
   "author": "GitHub Inc.",
   "bin": {
@@ -26,10 +26,10 @@
     ]
   },
   "devDependencies": {
-    "@npmcli/arborist": "^8.0.0",
+    "@npmcli/arborist": "^9.0.2",
     "@npmcli/eslint-config": "^5.0.0",
     "@npmcli/template-oss": "4.23.4",
-    "hosted-git-info": "^8.0.0",
+    "hosted-git-info": "^9.0.0",
     "mutate-fs": "^2.1.1",
     "nock": "^13.2.4",
     "npm-registry-mock": "^1.3.2",
@@ -46,23 +46,23 @@
     "git"
   ],
   "dependencies": {
-    "@npmcli/git": "^6.0.0",
+    "@npmcli/git": "^7.0.0",
     "@npmcli/installed-package-contents": "^3.0.0",
-    "@npmcli/package-json": "^6.0.0",
+    "@npmcli/package-json": "^7.0.0",
     "@npmcli/promise-spawn": "^8.0.0",
-    "@npmcli/run-script": "^9.0.0",
-    "cacache": "^19.0.0",
+    "@npmcli/run-script": "^10.0.0",
+    "cacache": "^20.0.0",
     "fs-minipass": "^3.0.0",
     "minipass": "^7.0.2",
-    "npm-package-arg": "^12.0.0",
-    "npm-packlist": "^10.0.0",
-    "npm-pick-manifest": "^10.0.0",
-    "npm-registry-fetch": "^18.0.0",
+    "npm-package-arg": "^13.0.0",
+    "npm-packlist": "^10.0.1",
+    "npm-pick-manifest": "^11.0.1",
+    "npm-registry-fetch": "^19.0.0",
     "proc-log": "^5.0.0",
     "promise-retry": "^2.0.1",
-    "sigstore": "^3.0.0",
+    "sigstore": "^4.0.0",
     "ssri": "^12.0.0",
-    "tar": "^6.1.11"
+    "tar": "^7.4.3"
   },
   "engines": {
     "node": "^20.17.0 || >=22.9.0"
diff --git a/package-lock.json b/package-lock.json
index 08949f5429bec..bee1772f17416 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -135,7 +135,7 @@
         "npm-registry-fetch": "^18.0.2",
         "npm-user-validate": "^3.0.0",
         "p-map": "^7.0.3",
-        "pacote": "^21.0.0",
+        "pacote": "^21.0.3",
         "parse-conflict-json": "^4.0.0",
         "proc-log": "^5.0.0",
         "qrcode-terminal": "^0.12.0",
@@ -2013,7 +2013,7 @@
         "json-stringify-safe": "^5.0.1",
         "nock": "^13.3.3",
         "npm-package-arg": "^12.0.0",
-        "pacote": "^21.0.0",
+        "pacote": "^21.0.2",
         "tap": "^16.3.8"
       },
       "engines": {
@@ -5232,7 +5232,6 @@
       "version": "3.1.0",
       "resolved": "https://registry.npmjs.org/@sigstore/bundle/-/bundle-3.1.0.tgz",
       "integrity": "sha512-Mm1E3/CmDDCz3nDhFKTuYdB47EdRFRQMOE/EAbiG1MJW77/w1b3P7Qx7JSrVJs8PfwOLOVcKQCHErIwCTyPbag==",
-      "inBundle": true,
       "license": "Apache-2.0",
       "dependencies": {
         "@sigstore/protobuf-specs": "^0.4.0"
@@ -5245,7 +5244,6 @@
       "version": "2.0.0",
       "resolved": "https://registry.npmjs.org/@sigstore/core/-/core-2.0.0.tgz",
       "integrity": "sha512-nYxaSb/MtlSI+JWcwTHQxyNmWeWrUXJJ/G4liLrGG7+tS4vAz6LF3xRXqLH6wPIVUoZQel2Fs4ddLx4NCpiIYg==",
-      "inBundle": true,
       "license": "Apache-2.0",
       "engines": {
         "node": "^18.17.0 || >=20.5.0"
@@ -5265,7 +5263,6 @@
       "version": "3.1.0",
       "resolved": "https://registry.npmjs.org/@sigstore/sign/-/sign-3.1.0.tgz",
       "integrity": "sha512-knzjmaOHOov1Ur7N/z4B1oPqZ0QX5geUfhrVaqVlu+hl0EAoL4o+l0MSULINcD5GCWe3Z0+YJO8ues6vFlW0Yw==",
-      "inBundle": true,
       "license": "Apache-2.0",
       "dependencies": {
         "@sigstore/bundle": "^3.1.0",
@@ -5297,7 +5294,6 @@
       "version": "2.1.1",
       "resolved": "https://registry.npmjs.org/@sigstore/verify/-/verify-2.1.1.tgz",
       "integrity": "sha512-hVJD77oT67aowHxwT4+M6PGOp+E2LtLdTK3+FC0lBO9T7sYwItDMXZ7Z07IDCvR1M717a4axbIWckrW67KMP/w==",
-      "inBundle": true,
       "license": "Apache-2.0",
       "dependencies": {
         "@sigstore/bundle": "^3.1.0",
@@ -9657,7 +9653,7 @@
       "version": "7.0.0",
       "resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-7.0.0.tgz",
       "integrity": "sha512-T4gbf83A4NH95zvhVYZc+qWocBBGlpzUXLPGurJggw/WIOwicfXJChLDP/iBZnN5WqROSu5Bm3hhle4z8a8YGQ==",
-      "inBundle": true,
+      "dev": true,
       "license": "ISC",
       "dependencies": {
         "minimatch": "^9.0.0"
@@ -12711,18 +12707,47 @@
       }
     },
     "node_modules/npm-packlist": {
-      "version": "10.0.0",
-      "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-10.0.0.tgz",
-      "integrity": "sha512-rht9U6nS8WOBDc53eipZNPo5qkAV4X2rhKE2Oj1DYUQ3DieXfj0mKkVmjnf3iuNdtMd8WfLdi2L6ASkD/8a+Kg==",
+      "version": "10.0.1",
+      "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-10.0.1.tgz",
+      "integrity": "sha512-vaC03b2PqJA6QqmwHi1jNU8fAPXEnnyv4j/W4PVfgm24C4/zZGSVut3z0YUeN0WIFCo1oGOL02+6LbvFK7JL4Q==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
-        "ignore-walk": "^7.0.0"
+        "ignore-walk": "^8.0.0"
       },
       "engines": {
         "node": "^20.17.0 || >=22.9.0"
       }
     },
+    "node_modules/npm-packlist/node_modules/ignore-walk": {
+      "version": "8.0.0",
+      "resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-8.0.0.tgz",
+      "integrity": "sha512-FCeMZT4NiRQGh+YkeKMtWrOmBgWjHjMJ26WQWrRQyoyzqevdaGSakUaJW5xQYmjLlUVk2qUnCjYVBax9EKKg8A==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "minimatch": "^10.0.3"
+      },
+      "engines": {
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/npm-packlist/node_modules/minimatch": {
+      "version": "10.0.3",
+      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.0.3.tgz",
+      "integrity": "sha512-IPZ167aShDZZUMdRk66cyQAW3qr0WzbHkPdMYa8bzZhlHhO3jALbKdxcaak7W9FfT2rZNpQuUu4Od7ILEpXSaw==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "@isaacs/brace-expansion": "^5.0.0"
+      },
+      "engines": {
+        "node": "20 || >=22"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
+      }
+    },
     "node_modules/npm-pick-manifest": {
       "version": "10.0.0",
       "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-10.0.0.tgz",
@@ -13357,29 +13382,29 @@
       "license": "BlueOak-1.0.0"
     },
     "node_modules/pacote": {
-      "version": "21.0.0",
-      "resolved": "https://registry.npmjs.org/pacote/-/pacote-21.0.0.tgz",
-      "integrity": "sha512-lcqexq73AMv6QNLo7SOpz0JJoaGdS3rBFgF122NZVl1bApo2mfu+XzUBU/X/XsiJu+iUmKpekRayqQYAs+PhkA==",
+      "version": "21.0.3",
+      "resolved": "https://registry.npmjs.org/pacote/-/pacote-21.0.3.tgz",
+      "integrity": "sha512-itdFlanxO0nmQv4ORsvA9K1wv40IPfB9OmWqfaJWvoJ30VKyHsqNgDVeG+TVhI7Gk7XW8slUy7cA9r6dF5qohw==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
-        "@npmcli/git": "^6.0.0",
+        "@npmcli/git": "^7.0.0",
         "@npmcli/installed-package-contents": "^3.0.0",
-        "@npmcli/package-json": "^6.0.0",
+        "@npmcli/package-json": "^7.0.0",
         "@npmcli/promise-spawn": "^8.0.0",
-        "@npmcli/run-script": "^9.0.0",
-        "cacache": "^19.0.0",
+        "@npmcli/run-script": "^10.0.0",
+        "cacache": "^20.0.0",
         "fs-minipass": "^3.0.0",
         "minipass": "^7.0.2",
-        "npm-package-arg": "^12.0.0",
-        "npm-packlist": "^10.0.0",
-        "npm-pick-manifest": "^10.0.0",
-        "npm-registry-fetch": "^18.0.0",
+        "npm-package-arg": "^13.0.0",
+        "npm-packlist": "^10.0.1",
+        "npm-pick-manifest": "^11.0.1",
+        "npm-registry-fetch": "^19.0.0",
         "proc-log": "^5.0.0",
         "promise-retry": "^2.0.1",
-        "sigstore": "^3.0.0",
+        "sigstore": "^4.0.0",
         "ssri": "^12.0.0",
-        "tar": "^6.1.11"
+        "tar": "^7.4.3"
       },
       "bin": {
         "pacote": "bin/index.js"
@@ -13388,25 +13413,458 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/pacote/node_modules/@npmcli/package-json": {
-      "version": "6.2.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-6.2.0.tgz",
-      "integrity": "sha512-rCNLSB/JzNvot0SEyXqWZ7tX2B5dD2a1br2Dp0vSYVo5jh8Z0EZ7lS9TsZ1UtziddB1UfNUaMCc538/HztnJGA==",
+    "node_modules/pacote/node_modules/@npmcli/git": {
+      "version": "7.0.0",
+      "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-7.0.0.tgz",
+      "integrity": "sha512-vnz7BVGtOctJAIHouCJdvWBhsTVSICMeUgZo2c7XAi5d5Rrl80S1H7oPym7K03cRuinK5Q6s2dw36+PgXQTcMA==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
-        "@npmcli/git": "^6.0.0",
-        "glob": "^10.2.2",
-        "hosted-git-info": "^8.0.0",
-        "json-parse-even-better-errors": "^4.0.0",
+        "@npmcli/promise-spawn": "^8.0.0",
+        "ini": "^5.0.0",
+        "lru-cache": "^11.2.1",
+        "npm-pick-manifest": "^11.0.1",
         "proc-log": "^5.0.0",
-        "semver": "^7.5.3",
-        "validate-npm-package-license": "^3.0.4"
+        "promise-retry": "^2.0.1",
+        "semver": "^7.3.5",
+        "which": "^5.0.0"
       },
+      "engines": {
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/pacote/node_modules/@npmcli/run-script": {
+      "version": "10.0.0",
+      "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-10.0.0.tgz",
+      "integrity": "sha512-vaQj4nccJbAslopIvd49pQH2NhUp7G9pY4byUtmwhe37ZZuubGrx0eB9hW2F37uVNRuDDK6byFGXF+7JCuMSZg==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "@npmcli/node-gyp": "^4.0.0",
+        "@npmcli/package-json": "^7.0.0",
+        "@npmcli/promise-spawn": "^8.0.0",
+        "node-gyp": "^11.0.0",
+        "proc-log": "^5.0.0",
+        "which": "^5.0.0"
+      },
+      "engines": {
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/pacote/node_modules/@sigstore/bundle": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/@sigstore/bundle/-/bundle-4.0.0.tgz",
+      "integrity": "sha512-NwCl5Y0V6Di0NexvkTqdoVfmjTaQwoLM236r89KEojGmq/jMls8S+zb7yOwAPdXvbwfKDlP+lmXgAL4vKSQT+A==",
+      "inBundle": true,
+      "license": "Apache-2.0",
+      "dependencies": {
+        "@sigstore/protobuf-specs": "^0.5.0"
+      },
+      "engines": {
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/pacote/node_modules/@sigstore/core": {
+      "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/@sigstore/core/-/core-3.0.0.tgz",
+      "integrity": "sha512-NgbJ+aW9gQl/25+GIEGYcCyi8M+ng2/5X04BMuIgoDfgvp18vDcoNHOQjQsG9418HGNYRxG3vfEXaR1ayD37gg==",
+      "inBundle": true,
+      "license": "Apache-2.0",
+      "engines": {
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/pacote/node_modules/@sigstore/protobuf-specs": {
+      "version": "0.5.0",
+      "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.5.0.tgz",
+      "integrity": "sha512-MM8XIwUjN2bwvCg1QvrMtbBmpcSHrkhFSCu1D11NyPvDQ25HEc4oG5/OcQfd/Tlf/OxmKWERDj0zGE23jQaMwA==",
+      "inBundle": true,
+      "license": "Apache-2.0",
       "engines": {
         "node": "^18.17.0 || >=20.5.0"
       }
     },
+    "node_modules/pacote/node_modules/@sigstore/sign": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/@sigstore/sign/-/sign-4.0.0.tgz",
+      "integrity": "sha512-5+IadiqPzRRMfvftHONzpeH2EzlDNuBiTMC3Lx7+9tLqn/4xbWVfSZA+YaOzKCn86k5BWfJ+aGO9v+pQmIyxqQ==",
+      "inBundle": true,
+      "license": "Apache-2.0",
+      "dependencies": {
+        "@sigstore/bundle": "^4.0.0",
+        "@sigstore/core": "^3.0.0",
+        "@sigstore/protobuf-specs": "^0.5.0",
+        "make-fetch-happen": "^15.0.0",
+        "proc-log": "^5.0.0",
+        "promise-retry": "^2.0.1"
+      },
+      "engines": {
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/pacote/node_modules/@sigstore/tuf": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/@sigstore/tuf/-/tuf-4.0.0.tgz",
+      "integrity": "sha512-0QFuWDHOQmz7t66gfpfNO6aEjoFrdhkJaej/AOqb4kqWZVbPWFZifXZzkxyQBB1OwTbkhdT3LNpMFxwkTvf+2w==",
+      "inBundle": true,
+      "license": "Apache-2.0",
+      "dependencies": {
+        "@sigstore/protobuf-specs": "^0.5.0",
+        "tuf-js": "^4.0.0"
+      },
+      "engines": {
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/pacote/node_modules/@sigstore/verify": {
+      "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/@sigstore/verify/-/verify-3.0.0.tgz",
+      "integrity": "sha512-moXtHH33AobOhTZF8xcX1MpOFqdvfCk7v6+teJL8zymBiDXwEsQH6XG9HGx2VIxnJZNm4cNSzflTLDnQLmIdmw==",
+      "inBundle": true,
+      "license": "Apache-2.0",
+      "dependencies": {
+        "@sigstore/bundle": "^4.0.0",
+        "@sigstore/core": "^3.0.0",
+        "@sigstore/protobuf-specs": "^0.5.0"
+      },
+      "engines": {
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/pacote/node_modules/@tufjs/models": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/@tufjs/models/-/models-4.0.0.tgz",
+      "integrity": "sha512-h5x5ga/hh82COe+GoD4+gKUeV4T3iaYOxqLt41GRKApinPI7DMidhCmNVTjKfhCWFJIGXaFJee07XczdT4jdZQ==",
+      "inBundle": true,
+      "license": "MIT",
+      "dependencies": {
+        "@tufjs/canonical-json": "2.0.0",
+        "minimatch": "^9.0.5"
+      },
+      "engines": {
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch": {
+      "version": "9.0.5",
+      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz",
+      "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "brace-expansion": "^2.0.1"
+      },
+      "engines": {
+        "node": ">=16 || 14 >=14.17"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
+      }
+    },
+    "node_modules/pacote/node_modules/cacache": {
+      "version": "20.0.1",
+      "resolved": "https://registry.npmjs.org/cacache/-/cacache-20.0.1.tgz",
+      "integrity": "sha512-+7LYcYGBYoNqTp1Rv7Ny1YjUo5E0/ftkQtraH3vkfAGgVHc+ouWdC8okAwQgQR7EVIdW6JTzTmhKFwzb+4okAQ==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "@npmcli/fs": "^4.0.0",
+        "fs-minipass": "^3.0.0",
+        "glob": "^11.0.3",
+        "lru-cache": "^11.1.0",
+        "minipass": "^7.0.3",
+        "minipass-collect": "^2.0.1",
+        "minipass-flush": "^1.0.5",
+        "minipass-pipeline": "^1.2.4",
+        "p-map": "^7.0.2",
+        "ssri": "^12.0.0",
+        "unique-filename": "^4.0.0"
+      },
+      "engines": {
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/pacote/node_modules/chownr": {
+      "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz",
+      "integrity": "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==",
+      "inBundle": true,
+      "license": "BlueOak-1.0.0",
+      "engines": {
+        "node": ">=18"
+      }
+    },
+    "node_modules/pacote/node_modules/glob": {
+      "version": "11.0.3",
+      "resolved": "https://registry.npmjs.org/glob/-/glob-11.0.3.tgz",
+      "integrity": "sha512-2Nim7dha1KVkaiF4q6Dj+ngPPMdfvLJEOpZk/jKiUAkqKebpGAWQXAq9z1xu9HKu5lWfqw/FASuccEjyznjPaA==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "foreground-child": "^3.3.1",
+        "jackspeak": "^4.1.1",
+        "minimatch": "^10.0.3",
+        "minipass": "^7.1.2",
+        "package-json-from-dist": "^1.0.0",
+        "path-scurry": "^2.0.0"
+      },
+      "bin": {
+        "glob": "dist/esm/bin.mjs"
+      },
+      "engines": {
+        "node": "20 || >=22"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
+      }
+    },
+    "node_modules/pacote/node_modules/hosted-git-info": {
+      "version": "9.0.0",
+      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-9.0.0.tgz",
+      "integrity": "sha512-gEf705MZLrDPkbbhi8PnoO4ZwYgKoNL+ISZ3AjZMht2r3N5tuTwncyDi6Fv2/qDnMmZxgs0yI8WDOyR8q3G+SQ==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "lru-cache": "^11.1.0"
+      },
+      "engines": {
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/pacote/node_modules/jackspeak": {
+      "version": "4.1.1",
+      "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-4.1.1.tgz",
+      "integrity": "sha512-zptv57P3GpL+O0I7VdMJNBZCu+BPHVQUk55Ft8/QCJjTVxrnJHuVuX/0Bl2A6/+2oyR/ZMEuFKwmzqqZ/U5nPQ==",
+      "inBundle": true,
+      "license": "BlueOak-1.0.0",
+      "dependencies": {
+        "@isaacs/cliui": "^8.0.2"
+      },
+      "engines": {
+        "node": "20 || >=22"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
+      }
+    },
+    "node_modules/pacote/node_modules/lru-cache": {
+      "version": "11.2.1",
+      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.1.tgz",
+      "integrity": "sha512-r8LA6i4LP4EeWOhqBaZZjDWwehd1xUJPCJd9Sv300H0ZmcUER4+JPh7bqqZeqs1o5pgtgvXm+d9UGrB5zZGDiQ==",
+      "inBundle": true,
+      "license": "ISC",
+      "engines": {
+        "node": "20 || >=22"
+      }
+    },
+    "node_modules/pacote/node_modules/make-fetch-happen": {
+      "version": "15.0.1",
+      "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-15.0.1.tgz",
+      "integrity": "sha512-9GjpQcaUXO2xmre8JfALl8Oji8Jpo+SyY2HpqFFPHVczOld/I+JFRx9FkP/uedZzkJlI9uM5t/j6dGJv4BScQw==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "@npmcli/agent": "^3.0.0",
+        "cacache": "^20.0.1",
+        "http-cache-semantics": "^4.1.1",
+        "minipass": "^7.0.2",
+        "minipass-fetch": "^4.0.0",
+        "minipass-flush": "^1.0.5",
+        "minipass-pipeline": "^1.2.4",
+        "negotiator": "^1.0.0",
+        "proc-log": "^5.0.0",
+        "promise-retry": "^2.0.1",
+        "ssri": "^12.0.0"
+      },
+      "engines": {
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/pacote/node_modules/minimatch": {
+      "version": "10.0.3",
+      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.0.3.tgz",
+      "integrity": "sha512-IPZ167aShDZZUMdRk66cyQAW3qr0WzbHkPdMYa8bzZhlHhO3jALbKdxcaak7W9FfT2rZNpQuUu4Od7ILEpXSaw==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "@isaacs/brace-expansion": "^5.0.0"
+      },
+      "engines": {
+        "node": "20 || >=22"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
+      }
+    },
+    "node_modules/pacote/node_modules/minizlib": {
+      "version": "3.0.2",
+      "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.2.tgz",
+      "integrity": "sha512-oG62iEk+CYt5Xj2YqI5Xi9xWUeZhDI8jjQmC5oThVH5JGCTgIjr7ciJDzC7MBzYd//WvR1OTmP5Q38Q8ShQtVA==",
+      "inBundle": true,
+      "license": "MIT",
+      "dependencies": {
+        "minipass": "^7.1.2"
+      },
+      "engines": {
+        "node": ">= 18"
+      }
+    },
+    "node_modules/pacote/node_modules/mkdirp": {
+      "version": "3.0.1",
+      "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz",
+      "integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==",
+      "inBundle": true,
+      "license": "MIT",
+      "bin": {
+        "mkdirp": "dist/cjs/src/bin.js"
+      },
+      "engines": {
+        "node": ">=10"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
+      }
+    },
+    "node_modules/pacote/node_modules/negotiator": {
+      "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz",
+      "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==",
+      "inBundle": true,
+      "license": "MIT",
+      "engines": {
+        "node": ">= 0.6"
+      }
+    },
+    "node_modules/pacote/node_modules/npm-package-arg": {
+      "version": "13.0.0",
+      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-13.0.0.tgz",
+      "integrity": "sha512-+t2etZAGcB7TbbLHfDwooV9ppB2LhhcT6A+L9cahsf9mEUAoQ6CktLEVvEnpD0N5CkX7zJqnPGaFtoQDy9EkHQ==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "hosted-git-info": "^9.0.0",
+        "proc-log": "^5.0.0",
+        "semver": "^7.3.5",
+        "validate-npm-package-name": "^6.0.0"
+      },
+      "engines": {
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/pacote/node_modules/npm-pick-manifest": {
+      "version": "11.0.1",
+      "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-11.0.1.tgz",
+      "integrity": "sha512-HnU7FYSWbo7dTVHtK0G+BXbZ0aIfxz/aUCVLN0979Ec6rGUX5cJ6RbgVx5fqb5G31ufz+BVFA7y1SkRTPVNoVQ==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "npm-install-checks": "^7.1.0",
+        "npm-normalize-package-bin": "^4.0.0",
+        "npm-package-arg": "^13.0.0",
+        "semver": "^7.3.5"
+      },
+      "engines": {
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/pacote/node_modules/npm-registry-fetch": {
+      "version": "19.0.0",
+      "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-19.0.0.tgz",
+      "integrity": "sha512-DFxSAemHUwT/POaXAOY4NJmEWBPB0oKbwD6FFDE9hnt1nORkt/FXvgjD4hQjoKoHw9u0Ezws9SPXwV7xE/Gyww==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "@npmcli/redact": "^3.0.0",
+        "jsonparse": "^1.3.1",
+        "make-fetch-happen": "^15.0.0",
+        "minipass": "^7.0.2",
+        "minipass-fetch": "^4.0.0",
+        "minizlib": "^3.0.1",
+        "npm-package-arg": "^13.0.0",
+        "proc-log": "^5.0.0"
+      },
+      "engines": {
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/pacote/node_modules/path-scurry": {
+      "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-2.0.0.tgz",
+      "integrity": "sha512-ypGJsmGtdXUOeM5u93TyeIEfEhM6s+ljAhrk5vAvSx8uyY/02OvrZnA0YNGUrPXfpJMgI1ODd3nwz8Npx4O4cg==",
+      "inBundle": true,
+      "license": "BlueOak-1.0.0",
+      "dependencies": {
+        "lru-cache": "^11.0.0",
+        "minipass": "^7.1.2"
+      },
+      "engines": {
+        "node": "20 || >=22"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
+      }
+    },
+    "node_modules/pacote/node_modules/sigstore": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/sigstore/-/sigstore-4.0.0.tgz",
+      "integrity": "sha512-Gw/FgHtrLM9WP8P5lLcSGh9OQcrTruWCELAiS48ik1QbL0cH+dfjomiRTUE9zzz+D1N6rOLkwXUvVmXZAsNE0Q==",
+      "inBundle": true,
+      "license": "Apache-2.0",
+      "dependencies": {
+        "@sigstore/bundle": "^4.0.0",
+        "@sigstore/core": "^3.0.0",
+        "@sigstore/protobuf-specs": "^0.5.0",
+        "@sigstore/sign": "^4.0.0",
+        "@sigstore/tuf": "^4.0.0",
+        "@sigstore/verify": "^3.0.0"
+      },
+      "engines": {
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/pacote/node_modules/tar": {
+      "version": "7.4.3",
+      "resolved": "https://registry.npmjs.org/tar/-/tar-7.4.3.tgz",
+      "integrity": "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "@isaacs/fs-minipass": "^4.0.0",
+        "chownr": "^3.0.0",
+        "minipass": "^7.1.2",
+        "minizlib": "^3.0.1",
+        "mkdirp": "^3.0.1",
+        "yallist": "^5.0.0"
+      },
+      "engines": {
+        "node": ">=18"
+      }
+    },
+    "node_modules/pacote/node_modules/tuf-js": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-4.0.0.tgz",
+      "integrity": "sha512-Lq7ieeGvXDXwpoSmOSgLWVdsGGV9J4a77oDTAPe/Ltrqnnm/ETaRlBAQTH5JatEh8KXuE6sddf9qAv1Q2282Hg==",
+      "inBundle": true,
+      "license": "MIT",
+      "dependencies": {
+        "@tufjs/models": "4.0.0",
+        "debug": "^4.4.1",
+        "make-fetch-happen": "^15.0.0"
+      },
+      "engines": {
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/pacote/node_modules/yallist": {
+      "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz",
+      "integrity": "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==",
+      "inBundle": true,
+      "license": "BlueOak-1.0.0",
+      "engines": {
+        "node": ">=18"
+      }
+    },
     "node_modules/parent-module": {
       "version": "1.0.1",
       "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz",
@@ -14871,7 +15329,6 @@
       "version": "3.1.0",
       "resolved": "https://registry.npmjs.org/sigstore/-/sigstore-3.1.0.tgz",
       "integrity": "sha512-ZpzWAFHIFqyFE56dXqgX/DkDRZdz+rRcjoIk/RQU4IX0wiCv1l8S7ZrXDHcCc+uaf+6o7w3h2l3g6GYG5TKN9Q==",
-      "inBundle": true,
       "license": "Apache-2.0",
       "dependencies": {
         "@sigstore/bundle": "^3.1.0",
@@ -19199,7 +19656,7 @@
         "npm-package-arg": "^12.0.0",
         "npm-pick-manifest": "^10.0.0",
         "npm-registry-fetch": "^18.0.1",
-        "pacote": "^21.0.0",
+        "pacote": "^21.0.2",
         "parse-conflict-json": "^4.0.0",
         "proc-log": "^5.0.0",
         "proggy": "^3.0.0",
@@ -19279,7 +19736,7 @@
         "diff": "^7.0.0",
         "minimatch": "^9.0.4",
         "npm-package-arg": "^12.0.0",
-        "pacote": "^21.0.0",
+        "pacote": "^21.0.2",
         "tar": "^6.2.1"
       },
       "devDependencies": {
@@ -19300,7 +19757,7 @@
         "@npmcli/run-script": "^9.0.1",
         "ci-info": "^4.0.0",
         "npm-package-arg": "^12.0.0",
-        "pacote": "^21.0.0",
+        "pacote": "^21.0.2",
         "proc-log": "^5.0.0",
         "promise-retry": "^2.0.1",
         "read": "^4.0.0",
@@ -19362,7 +19819,7 @@
         "@npmcli/arborist": "^9.1.4",
         "@npmcli/run-script": "^9.0.1",
         "npm-package-arg": "^12.0.0",
-        "pacote": "^21.0.0"
+        "pacote": "^21.0.2"
       },
       "devDependencies": {
         "@npmcli/eslint-config": "^5.0.1",
diff --git a/package.json b/package.json
index 1d0ae432724a8..473636b417d33 100644
--- a/package.json
+++ b/package.json
@@ -102,7 +102,7 @@
     "npm-registry-fetch": "^18.0.2",
     "npm-user-validate": "^3.0.0",
     "p-map": "^7.0.3",
-    "pacote": "^21.0.0",
+    "pacote": "^21.0.3",
     "parse-conflict-json": "^4.0.0",
     "proc-log": "^5.0.0",
     "qrcode-terminal": "^0.12.0",
diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json
index 53458f7469ca1..940dad5cc7948 100644
--- a/workspaces/arborist/package.json
+++ b/workspaces/arborist/package.json
@@ -26,7 +26,7 @@
     "npm-package-arg": "^12.0.0",
     "npm-pick-manifest": "^10.0.0",
     "npm-registry-fetch": "^18.0.1",
-    "pacote": "^21.0.0",
+    "pacote": "^21.0.2",
     "parse-conflict-json": "^4.0.0",
     "proc-log": "^5.0.0",
     "proggy": "^3.0.0",
diff --git a/workspaces/libnpmdiff/package.json b/workspaces/libnpmdiff/package.json
index 87c467b5a9783..13c7f7cc7dd6f 100644
--- a/workspaces/libnpmdiff/package.json
+++ b/workspaces/libnpmdiff/package.json
@@ -53,7 +53,7 @@
     "diff": "^7.0.0",
     "minimatch": "^9.0.4",
     "npm-package-arg": "^12.0.0",
-    "pacote": "^21.0.0",
+    "pacote": "^21.0.2",
     "tar": "^6.2.1"
   },
   "templateOSS": {
diff --git a/workspaces/libnpmexec/package.json b/workspaces/libnpmexec/package.json
index b5b59c8248fc6..ca9e4f2c9c7aa 100644
--- a/workspaces/libnpmexec/package.json
+++ b/workspaces/libnpmexec/package.json
@@ -65,7 +65,7 @@
     "@npmcli/run-script": "^9.0.1",
     "ci-info": "^4.0.0",
     "npm-package-arg": "^12.0.0",
-    "pacote": "^21.0.0",
+    "pacote": "^21.0.2",
     "proc-log": "^5.0.0",
     "promise-retry": "^2.0.1",
     "read": "^4.0.0",
diff --git a/workspaces/libnpmpack/package.json b/workspaces/libnpmpack/package.json
index a48d3d983707e..f2cfda0d76e79 100644
--- a/workspaces/libnpmpack/package.json
+++ b/workspaces/libnpmpack/package.json
@@ -40,7 +40,7 @@
     "@npmcli/arborist": "^9.1.4",
     "@npmcli/run-script": "^9.0.1",
     "npm-package-arg": "^12.0.0",
-    "pacote": "^21.0.0"
+    "pacote": "^21.0.2"
   },
   "engines": {
     "node": "^20.17.0 || >=22.9.0"

From cb36a8ad38df37579f59cf794d6c23ed7274fba9 Mon Sep 17 00:00:00 2001
From: Gar 
Date: Wed, 17 Sep 2025 10:21:42 -0700
Subject: [PATCH 08/63] deps: init-package-json@8.2.2

---
 node_modules/.gitignore                       |    6 +-
 .../node_modules/@npmcli/package-json/LICENSE |   18 -
 .../@npmcli/package-json/lib/index.js         |  286 ---
 .../package-json/lib/normalize-data.js        |  257 ---
 .../@npmcli/package-json/lib/normalize.js     |  601 -------
 .../@npmcli/package-json/lib/read-package.js  |   39 -
 .../@npmcli/package-json/lib/sort.js          |  101 --
 .../package-json/lib/update-dependencies.js   |   75 -
 .../package-json/lib/update-scripts.js        |   29 -
 .../package-json/lib/update-workspaces.js     |   26 -
 .../node_modules/hosted-git-info/LICENSE      |   13 +
 .../hosted-git-info/lib/from-url.js           |  122 ++
 .../node_modules/hosted-git-info/lib/hosts.js |  231 +++
 .../node_modules/hosted-git-info/lib/index.js |  227 +++
 .../hosted-git-info/lib/parse-url.js          |   78 +
 .../node_modules/hosted-git-info/package.json |   61 +
 .../node_modules/lru-cache/LICENSE            |   15 +
 .../lru-cache/dist/commonjs/index.js          | 1564 +++++++++++++++++
 .../lru-cache/dist/commonjs/index.min.js      |    2 +
 .../lru-cache/dist/commonjs/package.json      |    3 +
 .../node_modules/lru-cache/dist/esm/index.js  | 1560 ++++++++++++++++
 .../lru-cache/dist/esm/index.min.js           |    2 +
 .../lru-cache/dist/esm/package.json           |    3 +
 .../node_modules/lru-cache/package.json       |  113 ++
 .../node_modules/npm-package-arg/LICENSE      |   15 +
 .../node_modules/npm-package-arg/lib/npa.js   |  481 +++++
 .../package.json                              |   68 +-
 node_modules/init-package-json/package.json   |   10 +-
 package-lock.json                             |   58 +-
 package.json                                  |    2 +-
 30 files changed, 4572 insertions(+), 1494 deletions(-)
 delete mode 100644 node_modules/init-package-json/node_modules/@npmcli/package-json/LICENSE
 delete mode 100644 node_modules/init-package-json/node_modules/@npmcli/package-json/lib/index.js
 delete mode 100644 node_modules/init-package-json/node_modules/@npmcli/package-json/lib/normalize-data.js
 delete mode 100644 node_modules/init-package-json/node_modules/@npmcli/package-json/lib/normalize.js
 delete mode 100644 node_modules/init-package-json/node_modules/@npmcli/package-json/lib/read-package.js
 delete mode 100644 node_modules/init-package-json/node_modules/@npmcli/package-json/lib/sort.js
 delete mode 100644 node_modules/init-package-json/node_modules/@npmcli/package-json/lib/update-dependencies.js
 delete mode 100644 node_modules/init-package-json/node_modules/@npmcli/package-json/lib/update-scripts.js
 delete mode 100644 node_modules/init-package-json/node_modules/@npmcli/package-json/lib/update-workspaces.js
 create mode 100644 node_modules/init-package-json/node_modules/hosted-git-info/LICENSE
 create mode 100644 node_modules/init-package-json/node_modules/hosted-git-info/lib/from-url.js
 create mode 100644 node_modules/init-package-json/node_modules/hosted-git-info/lib/hosts.js
 create mode 100644 node_modules/init-package-json/node_modules/hosted-git-info/lib/index.js
 create mode 100644 node_modules/init-package-json/node_modules/hosted-git-info/lib/parse-url.js
 create mode 100644 node_modules/init-package-json/node_modules/hosted-git-info/package.json
 create mode 100644 node_modules/init-package-json/node_modules/lru-cache/LICENSE
 create mode 100644 node_modules/init-package-json/node_modules/lru-cache/dist/commonjs/index.js
 create mode 100644 node_modules/init-package-json/node_modules/lru-cache/dist/commonjs/index.min.js
 create mode 100644 node_modules/init-package-json/node_modules/lru-cache/dist/commonjs/package.json
 create mode 100644 node_modules/init-package-json/node_modules/lru-cache/dist/esm/index.js
 create mode 100644 node_modules/init-package-json/node_modules/lru-cache/dist/esm/index.min.js
 create mode 100644 node_modules/init-package-json/node_modules/lru-cache/dist/esm/package.json
 create mode 100644 node_modules/init-package-json/node_modules/lru-cache/package.json
 create mode 100644 node_modules/init-package-json/node_modules/npm-package-arg/LICENSE
 create mode 100644 node_modules/init-package-json/node_modules/npm-package-arg/lib/npa.js
 rename node_modules/init-package-json/node_modules/{@npmcli/package-json => npm-package-arg}/package.json (54%)

diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 1477ba9c79d32..991015407c23e 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -126,9 +126,9 @@
 !/init-package-json
 !/init-package-json/node_modules/
 /init-package-json/node_modules/*
-!/init-package-json/node_modules/@npmcli/
-/init-package-json/node_modules/@npmcli/*
-!/init-package-json/node_modules/@npmcli/package-json
+!/init-package-json/node_modules/hosted-git-info
+!/init-package-json/node_modules/lru-cache
+!/init-package-json/node_modules/npm-package-arg
 !/ip-address
 !/ip-regex
 !/is-cidr
diff --git a/node_modules/init-package-json/node_modules/@npmcli/package-json/LICENSE b/node_modules/init-package-json/node_modules/@npmcli/package-json/LICENSE
deleted file mode 100644
index 6a1f3708f6d70..0000000000000
--- a/node_modules/init-package-json/node_modules/@npmcli/package-json/LICENSE
+++ /dev/null
@@ -1,18 +0,0 @@
-ISC License
-
-Copyright GitHub Inc.
-
-Permission to use, copy, modify, and/or distribute this
-software for any purpose with or without fee is hereby
-granted, provided that the above copyright notice and this
-permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL
-WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
-IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO
-EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT,
-INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
-WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
-TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/index.js b/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/index.js
deleted file mode 100644
index 7eff602d73a3f..0000000000000
--- a/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/index.js
+++ /dev/null
@@ -1,286 +0,0 @@
-const { readFile, writeFile } = require('node:fs/promises')
-const { resolve } = require('node:path')
-const parseJSON = require('json-parse-even-better-errors')
-
-const updateDeps = require('./update-dependencies.js')
-const updateScripts = require('./update-scripts.js')
-const updateWorkspaces = require('./update-workspaces.js')
-const normalize = require('./normalize.js')
-const { read, parse } = require('./read-package.js')
-const { packageSort } = require('./sort.js')
-
-// a list of handy specialized helper functions that take
-// care of special cases that are handled by the npm cli
-const knownSteps = new Set([
-  updateDeps,
-  updateScripts,
-  updateWorkspaces,
-])
-
-// list of all keys that are handled by "knownSteps" helpers
-const knownKeys = new Set([
-  ...updateDeps.knownKeys,
-  'scripts',
-  'workspaces',
-])
-
-class PackageJson {
-  static normalizeSteps = Object.freeze([
-    '_id',
-    '_attributes',
-    'bundledDependencies',
-    'bundleDependencies',
-    'optionalDedupe',
-    'scripts',
-    'funding',
-    'bin',
-  ])
-
-  // npm pkg fix
-  static fixSteps = Object.freeze([
-    'binRefs',
-    'bundleDependencies',
-    'bundleDependenciesFalse',
-    'fixName',
-    'fixNameField',
-    'fixVersionField',
-    'fixRepositoryField',
-    'fixDependencies',
-    'devDependencies',
-    'scriptpath',
-  ])
-
-  static prepareSteps = Object.freeze([
-    '_id',
-    '_attributes',
-    'bundledDependencies',
-    'bundleDependencies',
-    'bundleDependenciesDeleteFalse',
-    'gypfile',
-    'serverjs',
-    'scriptpath',
-    'authors',
-    'readme',
-    'mans',
-    'binDir',
-    'gitHead',
-    'fillTypes',
-    'normalizeData',
-    'binRefs',
-  ])
-
-  // create a new empty package.json, so we can save at the given path even
-  // though we didn't start from a parsed file
-  static async create (path, opts = {}) {
-    const p = new PackageJson()
-    await p.create(path)
-    if (opts.data) {
-      return p.update(opts.data)
-    }
-    return p
-  }
-
-  // Loads a package.json at given path and JSON parses
-  static async load (path, opts = {}) {
-    const p = new PackageJson()
-    // Avoid try/catch if we aren't going to create
-    if (!opts.create) {
-      return p.load(path)
-    }
-
-    try {
-      return await p.load(path)
-    } catch (err) {
-      if (!err.message.startsWith('Could not read package.json')) {
-        throw err
-      }
-      return await p.create(path)
-    }
-  }
-
-  // npm pkg fix
-  static async fix (path, opts) {
-    const p = new PackageJson()
-    await p.load(path, true)
-    return p.fix(opts)
-  }
-
-  // read-package-json compatible behavior
-  static async prepare (path, opts) {
-    const p = new PackageJson()
-    await p.load(path, true)
-    return p.prepare(opts)
-  }
-
-  // read-package-json-fast compatible behavior
-  static async normalize (path, opts) {
-    const p = new PackageJson()
-    await p.load(path)
-    return p.normalize(opts)
-  }
-
-  #path
-  #manifest
-  #readFileContent = ''
-  #canSave = true
-
-  // Load content from given path
-  async load (path, parseIndex) {
-    this.#path = path
-    let parseErr
-    try {
-      this.#readFileContent = await read(this.filename)
-    } catch (err) {
-      if (!parseIndex) {
-        throw err
-      }
-      parseErr = err
-    }
-
-    if (parseErr) {
-      const indexFile = resolve(this.path, 'index.js')
-      let indexFileContent
-      try {
-        indexFileContent = await readFile(indexFile, 'utf8')
-      } catch (err) {
-        throw parseErr
-      }
-      try {
-        this.fromComment(indexFileContent)
-      } catch (err) {
-        throw parseErr
-      }
-      // This wasn't a package.json so prevent saving
-      this.#canSave = false
-      return this
-    }
-
-    return this.fromJSON(this.#readFileContent)
-  }
-
-  // Load data from a JSON string/buffer
-  fromJSON (data) {
-    this.#manifest = parse(data)
-    return this
-  }
-
-  fromContent (data) {
-    this.#manifest = data
-    this.#canSave = false
-    return this
-  }
-
-  // Load data from a comment
-  // /**package { "name": "foo", "version": "1.2.3", ... } **/
-  fromComment (data) {
-    data = data.split(/^\/\*\*package(?:\s|$)/m)
-
-    if (data.length < 2) {
-      throw new Error('File has no package in comments')
-    }
-    data = data[1]
-    data = data.split(/\*\*\/$/m)
-
-    if (data.length < 2) {
-      throw new Error('File has no package in comments')
-    }
-    data = data[0]
-    data = data.replace(/^\s*\*/mg, '')
-
-    this.#manifest = parseJSON(data)
-    return this
-  }
-
-  get content () {
-    return this.#manifest
-  }
-
-  get path () {
-    return this.#path
-  }
-
-  get filename () {
-    if (this.path) {
-      return resolve(this.path, 'package.json')
-    }
-    return undefined
-  }
-
-  create (path) {
-    this.#path = path
-    this.#manifest = {}
-    return this
-  }
-
-  // This should be the ONLY way to set content in the manifest
-  update (content) {
-    if (!this.content) {
-      throw new Error('Can not update without content.  Please `load` or `create`')
-    }
-
-    for (const step of knownSteps) {
-      this.#manifest = step({ content, originalContent: this.content })
-    }
-
-    // unknown properties will just be overwitten
-    for (const [key, value] of Object.entries(content)) {
-      if (!knownKeys.has(key)) {
-        this.content[key] = value
-      }
-    }
-
-    return this
-  }
-
-  async save ({ sort } = {}) {
-    if (!this.#canSave) {
-      throw new Error('No package.json to save to')
-    }
-    const {
-      [Symbol.for('indent')]: indent,
-      [Symbol.for('newline')]: newline,
-      ...rest
-    } = this.content
-
-    const format = indent === undefined ? '  ' : indent
-    const eol = newline === undefined ? '\n' : newline
-
-    const content = sort ? packageSort(rest) : rest
-
-    const fileContent = `${
-      JSON.stringify(content, null, format)
-    }\n`
-      .replace(/\n/g, eol)
-
-    if (fileContent.trim() !== this.#readFileContent.trim()) {
-      const written = await writeFile(this.filename, fileContent)
-      this.#readFileContent = fileContent
-      return written
-    }
-  }
-
-  async normalize (opts = {}) {
-    if (!opts.steps) {
-      opts.steps = this.constructor.normalizeSteps
-    }
-    await normalize(this, opts)
-    return this
-  }
-
-  async prepare (opts = {}) {
-    if (!opts.steps) {
-      opts.steps = this.constructor.prepareSteps
-    }
-    await normalize(this, opts)
-    return this
-  }
-
-  async fix (opts = {}) {
-    // This one is not overridable
-    opts.steps = this.constructor.fixSteps
-    await normalize(this, opts)
-    return this
-  }
-}
-
-module.exports = PackageJson
diff --git a/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/normalize-data.js b/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/normalize-data.js
deleted file mode 100644
index 79b0bafbcd3a4..0000000000000
--- a/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/normalize-data.js
+++ /dev/null
@@ -1,257 +0,0 @@
-// Originally normalize-package-data
-
-const url = require('node:url')
-const hostedGitInfo = require('hosted-git-info')
-const validateLicense = require('validate-npm-package-license')
-
-const typos = {
-  dependancies: 'dependencies',
-  dependecies: 'dependencies',
-  depdenencies: 'dependencies',
-  devEependencies: 'devDependencies',
-  depends: 'dependencies',
-  'dev-dependencies': 'devDependencies',
-  devDependences: 'devDependencies',
-  devDepenencies: 'devDependencies',
-  devdependencies: 'devDependencies',
-  repostitory: 'repository',
-  repo: 'repository',
-  prefereGlobal: 'preferGlobal',
-  hompage: 'homepage',
-  hampage: 'homepage',
-  autohr: 'author',
-  autor: 'author',
-  contributers: 'contributors',
-  publicationConfig: 'publishConfig',
-  script: 'scripts',
-}
-
-const isEmail = str => str.includes('@') && (str.indexOf('@') < str.lastIndexOf('.'))
-
-// Extracts description from contents of a readme file in markdown format
-function extractDescription (description) {
-  // the first block of text before the first heading that isn't the first line heading
-  const lines = description.trim().split('\n')
-  let start = 0
-  // skip initial empty lines and lines that start with #
-  while (lines[start]?.trim().match(/^(#|$)/)) {
-    start++
-  }
-  let end = start + 1
-  // keep going till we get to the end or an empty line
-  while (end < lines.length && lines[end].trim()) {
-    end++
-  }
-  return lines.slice(start, end).join(' ').trim()
-}
-
-function stringifyPerson (person) {
-  if (typeof person !== 'string') {
-    const name = person.name || ''
-    const u = person.url || person.web
-    const wrappedUrl = u ? (' (' + u + ')') : ''
-    const e = person.email || person.mail
-    const wrappedEmail = e ? (' <' + e + '>') : ''
-    person = name + wrappedEmail + wrappedUrl
-  }
-  const matchedName = person.match(/^([^(<]+)/)
-  const matchedUrl = person.match(/\(([^()]+)\)/)
-  const matchedEmail = person.match(/<([^<>]+)>/)
-  const parsed = {}
-  if (matchedName?.[0].trim()) {
-    parsed.name = matchedName[0].trim()
-  }
-  if (matchedEmail) {
-    parsed.email = matchedEmail[1]
-  }
-  if (matchedUrl) {
-    parsed.url = matchedUrl[1]
-  }
-  return parsed
-}
-
-function normalizeData (data, changes) {
-  // fixDescriptionField
-  if (data.description && typeof data.description !== 'string') {
-    changes?.push(`'description' field should be a string`)
-    delete data.description
-  }
-  if (data.readme && !data.description && data.readme !== 'ERROR: No README data found!') {
-    data.description = extractDescription(data.readme)
-  }
-  if (data.description === undefined) {
-    delete data.description
-  }
-  if (!data.description) {
-    changes?.push('No description')
-  }
-
-  // fixModulesField
-  if (data.modules) {
-    changes?.push(`modules field is deprecated`)
-    delete data.modules
-  }
-
-  // fixFilesField
-  const files = data.files
-  if (files && !Array.isArray(files)) {
-    changes?.push(`Invalid 'files' member`)
-    delete data.files
-  } else if (data.files) {
-    data.files = data.files.filter(function (file) {
-      if (!file || typeof file !== 'string') {
-        changes?.push(`Invalid filename in 'files' list: ${file}`)
-        return false
-      } else {
-        return true
-      }
-    })
-  }
-
-  // fixManField
-  if (data.man && typeof data.man === 'string') {
-    data.man = [data.man]
-  }
-
-  // fixBugsField
-  if (!data.bugs && data.repository?.url) {
-    const hosted = hostedGitInfo.fromUrl(data.repository.url)
-    if (hosted && hosted.bugs()) {
-      data.bugs = { url: hosted.bugs() }
-    }
-  } else if (data.bugs) {
-    if (typeof data.bugs === 'string') {
-      if (isEmail(data.bugs)) {
-        data.bugs = { email: data.bugs }
-        /* eslint-disable-next-line node/no-deprecated-api */
-      } else if (url.parse(data.bugs).protocol) {
-        data.bugs = { url: data.bugs }
-      } else {
-        changes?.push(`Bug string field must be url, email, or {email,url}`)
-      }
-    } else {
-      for (const k in data.bugs) {
-        if (['web', 'name'].includes(k)) {
-          changes?.push(`bugs['${k}'] should probably be bugs['url'].`)
-          data.bugs.url = data.bugs[k]
-          delete data.bugs[k]
-        }
-      }
-      const oldBugs = data.bugs
-      data.bugs = {}
-      if (oldBugs.url) {
-        /* eslint-disable-next-line node/no-deprecated-api */
-        if (typeof (oldBugs.url) === 'string' && url.parse(oldBugs.url).protocol) {
-          data.bugs.url = oldBugs.url
-        } else {
-          changes?.push('bugs.url field must be a string url. Deleted.')
-        }
-      }
-      if (oldBugs.email) {
-        if (typeof (oldBugs.email) === 'string' && isEmail(oldBugs.email)) {
-          data.bugs.email = oldBugs.email
-        } else {
-          changes?.push('bugs.email field must be a string email. Deleted.')
-        }
-      }
-    }
-    if (!data.bugs.email && !data.bugs.url) {
-      delete data.bugs
-      changes?.push('Normalized value of bugs field is an empty object. Deleted.')
-    }
-  }
-  // fixKeywordsField
-  if (typeof data.keywords === 'string') {
-    data.keywords = data.keywords.split(/,\s+/)
-  }
-  if (data.keywords && !Array.isArray(data.keywords)) {
-    delete data.keywords
-    changes?.push(`keywords should be an array of strings`)
-  } else if (data.keywords) {
-    data.keywords = data.keywords.filter(function (kw) {
-      if (typeof kw !== 'string' || !kw) {
-        changes?.push(`keywords should be an array of strings`)
-        return false
-      } else {
-        return true
-      }
-    })
-  }
-  // fixBundleDependenciesField
-  const bdd = 'bundledDependencies'
-  const bd = 'bundleDependencies'
-  if (data[bdd] && !data[bd]) {
-    data[bd] = data[bdd]
-    delete data[bdd]
-  }
-  if (data[bd] && !Array.isArray(data[bd])) {
-    changes?.push(`Invalid 'bundleDependencies' list. Must be array of package names`)
-    delete data[bd]
-  } else if (data[bd]) {
-    data[bd] = data[bd].filter(function (filtered) {
-      if (!filtered || typeof filtered !== 'string') {
-        changes?.push(`Invalid bundleDependencies member: ${filtered}`)
-        return false
-      } else {
-        if (!data.dependencies) {
-          data.dependencies = {}
-        }
-        if (!Object.prototype.hasOwnProperty.call(data.dependencies, filtered)) {
-          changes?.push(`Non-dependency in bundleDependencies: ${filtered}`)
-          data.dependencies[filtered] = '*'
-        }
-        return true
-      }
-    })
-  }
-  // fixHomepageField
-  if (!data.homepage && data.repository && data.repository.url) {
-    const hosted = hostedGitInfo.fromUrl(data.repository.url)
-    if (hosted) {
-      data.homepage = hosted.docs()
-    }
-  }
-  if (data.homepage) {
-    if (typeof data.homepage !== 'string') {
-      changes?.push('homepage field must be a string url. Deleted.')
-      delete data.homepage
-    } else {
-      /* eslint-disable-next-line node/no-deprecated-api */
-      if (!url.parse(data.homepage).protocol) {
-        data.homepage = 'http://' + data.homepage
-      }
-    }
-  }
-  // fixReadmeField
-  if (!data.readme) {
-    changes?.push('No README data')
-    data.readme = 'ERROR: No README data found!'
-  }
-  // fixLicenseField
-  const license = data.license || data.licence
-  if (!license) {
-    changes?.push('No license field.')
-  } else if (typeof (license) !== 'string' || license.length < 1 || license.trim() === '') {
-    changes?.push('license should be a valid SPDX license expression')
-  } else if (!validateLicense(license).validForNewPackages) {
-    changes?.push('license should be a valid SPDX license expression')
-  }
-  // fixPeople
-  if (data.author) {
-    data.author = stringifyPerson(data.author)
-  }
-  ['maintainers', 'contributors'].forEach(function (set) {
-    if (!Array.isArray(data[set])) {
-      return
-    }
-    data[set] = data[set].map(stringifyPerson)
-  })
-  // fixTypos
-  for (const d in typos) {
-    if (Object.prototype.hasOwnProperty.call(data, d)) {
-      changes?.push(`${d} should probably be ${typos[d]}.`)
-    }
-  }
-}
-
-module.exports = { normalizeData }
diff --git a/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/normalize.js b/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/normalize.js
deleted file mode 100644
index 845f6753a9a00..0000000000000
--- a/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/normalize.js
+++ /dev/null
@@ -1,601 +0,0 @@
-const valid = require('semver/functions/valid')
-const clean = require('semver/functions/clean')
-const fs = require('node:fs/promises')
-const path = require('node:path')
-const { log } = require('proc-log')
-const moduleBuiltin = require('node:module')
-
-/**
- * @type {import('hosted-git-info')}
- */
-let _hostedGitInfo
-function lazyHostedGitInfo () {
-  if (!_hostedGitInfo) {
-    _hostedGitInfo = require('hosted-git-info')
-  }
-  return _hostedGitInfo
-}
-
-/**
- * @type {import('glob').glob}
- */
-let _glob
-function lazyLoadGlob () {
-  if (!_glob) {
-    _glob = require('glob').glob
-  }
-  return _glob
-}
-
-// used to be npm-normalize-package-bin
-function normalizePackageBin (pkg, changes) {
-  if (pkg.bin) {
-    if (typeof pkg.bin === 'string' && pkg.name) {
-      changes?.push('"bin" was converted to an object')
-      pkg.bin = { [pkg.name]: pkg.bin }
-    } else if (Array.isArray(pkg.bin)) {
-      changes?.push('"bin" was converted to an object')
-      pkg.bin = pkg.bin.reduce((acc, k) => {
-        acc[path.basename(k)] = k
-        return acc
-      }, {})
-    }
-    if (typeof pkg.bin === 'object') {
-      for (const binKey in pkg.bin) {
-        if (typeof pkg.bin[binKey] !== 'string') {
-          delete pkg.bin[binKey]
-          changes?.push(`removed invalid "bin[${binKey}]"`)
-          continue
-        }
-        const base = path.basename(secureAndUnixifyPath(binKey))
-        if (!base) {
-          delete pkg.bin[binKey]
-          changes?.push(`removed invalid "bin[${binKey}]"`)
-          continue
-        }
-
-        const binTarget = secureAndUnixifyPath(pkg.bin[binKey])
-
-        if (!binTarget) {
-          delete pkg.bin[binKey]
-          changes?.push(`removed invalid "bin[${binKey}]"`)
-          continue
-        }
-
-        if (base !== binKey) {
-          delete pkg.bin[binKey]
-          changes?.push(`"bin[${binKey}]" was renamed to "bin[${base}]"`)
-        }
-        if (binTarget !== pkg.bin[binKey]) {
-          changes?.push(`"bin[${base}]" script name was cleaned`)
-        }
-        pkg.bin[base] = binTarget
-      }
-
-      if (Object.keys(pkg.bin).length === 0) {
-        changes?.push('empty "bin" was removed')
-        delete pkg.bin
-      }
-
-      return pkg
-    }
-  }
-  delete pkg.bin
-}
-
-function normalizePackageMan (pkg, changes) {
-  if (pkg.man) {
-    const mans = []
-    for (const man of (Array.isArray(pkg.man) ? pkg.man : [pkg.man])) {
-      if (typeof man !== 'string') {
-        changes?.push(`removed invalid "man [${man}]"`)
-      } else {
-        mans.push(secureAndUnixifyPath(man))
-      }
-    }
-
-    if (!mans.length) {
-      changes?.push('empty "man" was removed')
-    } else {
-      pkg.man = mans
-      return pkg
-    }
-  }
-  delete pkg.man
-}
-
-function isCorrectlyEncodedName (spec) {
-  return !spec.match(/[/@\s+%:]/) &&
-    spec === encodeURIComponent(spec)
-}
-
-function isValidScopedPackageName (spec) {
-  if (spec.charAt(0) !== '@') {
-    return false
-  }
-
-  const rest = spec.slice(1).split('/')
-  if (rest.length !== 2) {
-    return false
-  }
-
-  return rest[0] && rest[1] &&
-    rest[0] === encodeURIComponent(rest[0]) &&
-    rest[1] === encodeURIComponent(rest[1])
-}
-
-function unixifyPath (ref) {
-  return ref.replace(/\\|:/g, '/')
-}
-
-function secureAndUnixifyPath (ref) {
-  const secured = unixifyPath(path.join('.', path.join('/', unixifyPath(ref))))
-  return secured.startsWith('./') ? '' : secured
-}
-
-// We don't want the `changes` array in here by default because this is a hot
-// path for parsing packuments during install.  So the calling method passes it
-// in if it wants to track changes.
-const normalize = async (pkg, { strict, steps, root, changes, allowLegacyCase }) => {
-  if (!pkg.content) {
-    throw new Error('Can not normalize without content')
-  }
-  const data = pkg.content
-  const scripts = data.scripts || {}
-  const pkgId = `${data.name ?? ''}@${data.version ?? ''}`
-
-  // name and version are load bearing so we have to clean them up first
-  if (steps.includes('fixName') || steps.includes('fixNameField') || steps.includes('normalizeData')) {
-    if (!data.name && !strict) {
-      changes?.push('Missing "name" field was set to an empty string')
-      data.name = ''
-    } else {
-      if (typeof data.name !== 'string') {
-        throw new Error('name field must be a string.')
-      }
-      if (!strict) {
-        const name = data.name.trim()
-        if (data.name !== name) {
-          changes?.push(`Whitespace was trimmed from "name"`)
-          data.name = name
-        }
-      }
-
-      if (data.name.startsWith('.') ||
-        !(isValidScopedPackageName(data.name) || isCorrectlyEncodedName(data.name)) ||
-        (strict && (!allowLegacyCase) && data.name !== data.name.toLowerCase()) ||
-        data.name.toLowerCase() === 'node_modules' ||
-        data.name.toLowerCase() === 'favicon.ico') {
-        throw new Error('Invalid name: ' + JSON.stringify(data.name))
-      }
-    }
-  }
-
-  if (steps.includes('fixName')) {
-    // Check for conflicts with builtin modules
-    if (moduleBuiltin.builtinModules.includes(data.name)) {
-      log.warn('package-json', pkgId, `Package name "${data.name}" conflicts with a Node.js built-in module name`)
-    }
-  }
-
-  if (steps.includes('fixVersionField') || steps.includes('normalizeData')) {
-    // allow "loose" semver 1.0 versions in non-strict mode
-    // enforce strict semver 2.0 compliance in strict mode
-    const loose = !strict
-    if (!data.version) {
-      data.version = ''
-    } else {
-      if (!valid(data.version, loose)) {
-        throw new Error(`Invalid version: "${data.version}"`)
-      }
-      const version = clean(data.version, loose)
-      if (version !== data.version) {
-        changes?.push(`"version" was cleaned and set to "${version}"`)
-        data.version = version
-      }
-    }
-  }
-  // remove attributes that start with "_"
-  if (steps.includes('_attributes')) {
-    for (const key in data) {
-      if (key.startsWith('_')) {
-        changes?.push(`"${key}" was removed`)
-        delete pkg.content[key]
-      }
-    }
-  }
-
-  // build the "_id" attribute
-  if (steps.includes('_id')) {
-    if (data.name && data.version) {
-      changes?.push(`"_id" was set to ${pkgId}`)
-      data._id = pkgId
-    }
-  }
-
-  // fix bundledDependencies typo
-  // normalize bundleDependencies
-  if (steps.includes('bundledDependencies')) {
-    if (data.bundleDependencies === undefined && data.bundledDependencies !== undefined) {
-      data.bundleDependencies = data.bundledDependencies
-    }
-    changes?.push(`Deleted incorrect "bundledDependencies"`)
-    delete data.bundledDependencies
-  }
-  // expand "bundleDependencies: true or translate from object"
-  if (steps.includes('bundleDependencies')) {
-    const bd = data.bundleDependencies
-    if (bd === false && !steps.includes('bundleDependenciesDeleteFalse')) {
-      changes?.push(`"bundleDependencies" was changed from "false" to "[]"`)
-      data.bundleDependencies = []
-    } else if (bd === true) {
-      changes?.push(`"bundleDependencies" was auto-populated from "dependencies"`)
-      data.bundleDependencies = Object.keys(data.dependencies || {})
-    } else if (bd && typeof bd === 'object') {
-      if (!Array.isArray(bd)) {
-        changes?.push(`"bundleDependencies" was changed from an object to an array`)
-        data.bundleDependencies = Object.keys(bd)
-      }
-    } else if ('bundleDependencies' in data) {
-      changes?.push(`"bundleDependencies" was removed`)
-      delete data.bundleDependencies
-    }
-  }
-
-  // it was once common practice to list deps both in optionalDependencies and
-  // in dependencies, to support npm versions that did not know about
-  // optionalDependencies.  This is no longer a relevant need, so duplicating
-  // the deps in two places is unnecessary and excessive.
-  if (steps.includes('optionalDedupe')) {
-    if (data.dependencies &&
-      data.optionalDependencies && typeof data.optionalDependencies === 'object') {
-      for (const name in data.optionalDependencies) {
-        changes?.push(`optionalDependencies."${name}" was removed`)
-        delete data.dependencies[name]
-      }
-      if (!Object.keys(data.dependencies).length) {
-        changes?.push(`Empty "optionalDependencies" was removed`)
-        delete data.dependencies
-      }
-    }
-  }
-
-  // add "install" attribute if any "*.gyp" files exist
-  if (steps.includes('gypfile')) {
-    if (!scripts.install && !scripts.preinstall && data.gypfile !== false) {
-      const files = await lazyLoadGlob()('*.gyp', { cwd: pkg.path })
-      if (files.length) {
-        scripts.install = 'node-gyp rebuild'
-        data.scripts = scripts
-        data.gypfile = true
-        changes?.push(`"scripts.install" was set to "node-gyp rebuild"`)
-        changes?.push(`"gypfile" was set to "true"`)
-      }
-    }
-  }
-
-  // add "start" attribute if "server.js" exists
-  if (steps.includes('serverjs') && !scripts.start) {
-    try {
-      await fs.access(path.join(pkg.path, 'server.js'))
-      scripts.start = 'node server.js'
-      data.scripts = scripts
-      changes?.push('"scripts.start" was set to "node server.js"')
-    } catch {
-      // do nothing
-    }
-  }
-
-  // strip "node_modules/.bin" from scripts entries
-  // remove invalid scripts entries (non-strings)
-  if ((steps.includes('scripts') || steps.includes('scriptpath')) && data.scripts !== undefined) {
-    const spre = /^(\.[/\\])?node_modules[/\\].bin[\\/]/
-    if (typeof data.scripts === 'object') {
-      for (const name in data.scripts) {
-        if (typeof data.scripts[name] !== 'string') {
-          delete data.scripts[name]
-          changes?.push(`Invalid scripts."${name}" was removed`)
-        } else if (steps.includes('scriptpath') && spre.test(data.scripts[name])) {
-          data.scripts[name] = data.scripts[name].replace(spre, '')
-          changes?.push(`scripts entry "${name}" was fixed to remove node_modules/.bin reference`)
-        }
-      }
-    } else {
-      changes?.push(`Removed invalid "scripts"`)
-      delete data.scripts
-    }
-  }
-
-  if (steps.includes('funding')) {
-    if (data.funding && typeof data.funding === 'string') {
-      data.funding = { url: data.funding }
-      changes?.push(`"funding" was changed to an object with a url attribute`)
-    }
-  }
-
-  // populate "authors" attribute
-  if (steps.includes('authors') && !data.contributors) {
-    try {
-      const authorData = await fs.readFile(path.join(pkg.path, 'AUTHORS'), 'utf8')
-      const authors = authorData.split(/\r?\n/g)
-        .map(line => line.replace(/^\s*#.*$/, '').trim())
-        .filter(line => line)
-      data.contributors = authors
-      changes?.push('"contributors" was auto-populated with the contents of the "AUTHORS" file')
-    } catch {
-      // do nothing
-    }
-  }
-
-  // populate "readme" attribute
-  if (steps.includes('readme') && !data.readme) {
-    const mdre = /\.m?a?r?k?d?o?w?n?$/i
-    const files = await lazyLoadGlob()('{README,README.*}', {
-      cwd: pkg.path,
-      nocase: true,
-      mark: true,
-    })
-    let readmeFile
-    for (const file of files) {
-      // don't accept directories.
-      if (!file.endsWith(path.sep)) {
-        if (file.match(mdre)) {
-          readmeFile = file
-          break
-        }
-        if (file.endsWith('README')) {
-          readmeFile = file
-        }
-      }
-    }
-    if (readmeFile) {
-      const readmeData = await fs.readFile(path.join(pkg.path, readmeFile), 'utf8')
-      data.readme = readmeData
-      data.readmeFilename = readmeFile
-      changes?.push(`"readme" was set to the contents of ${readmeFile}`)
-      changes?.push(`"readmeFilename" was set to ${readmeFile}`)
-    }
-    if (!data.readme) {
-      data.readme = 'ERROR: No README data found!'
-    }
-  }
-
-  // expand directories.man
-  if (steps.includes('mans')) {
-    if (data.directories?.man && !data.man) {
-      const manDir = secureAndUnixifyPath(data.directories.man)
-      const cwd = path.resolve(pkg.path, manDir)
-      const files = await lazyLoadGlob()('**/*.[0-9]', { cwd })
-      data.man = files.map(man =>
-        path.relative(pkg.path, path.join(cwd, man)).split(path.sep).join('/')
-      )
-    }
-    normalizePackageMan(data, changes)
-  }
-
-  if (steps.includes('bin') || steps.includes('binDir') || steps.includes('binRefs')) {
-    normalizePackageBin(data, changes)
-  }
-
-  // expand "directories.bin"
-  if (steps.includes('binDir') && data.directories?.bin && !data.bin) {
-    const binsDir = path.resolve(pkg.path, secureAndUnixifyPath(data.directories.bin))
-    const bins = await lazyLoadGlob()('**', { cwd: binsDir })
-    data.bin = bins.reduce((acc, binFile) => {
-      if (binFile && !binFile.startsWith('.')) {
-        const binName = path.basename(binFile)
-        acc[binName] = path.join(data.directories.bin, binFile)
-      }
-      return acc
-    }, {})
-    // *sigh*
-    normalizePackageBin(data, changes)
-  }
-
-  // populate "gitHead" attribute
-  if (steps.includes('gitHead') && !data.gitHead) {
-    const git = require('@npmcli/git')
-    const gitRoot = await git.find({ cwd: pkg.path, root })
-    let head
-    if (gitRoot) {
-      try {
-        head = await fs.readFile(path.resolve(gitRoot, '.git/HEAD'), 'utf8')
-      } catch (err) {
-      // do nothing
-      }
-    }
-    let headData
-    if (head) {
-      if (head.startsWith('ref: ')) {
-        const headRef = head.replace(/^ref: /, '').trim()
-        const headFile = path.resolve(gitRoot, '.git', headRef)
-        try {
-          headData = await fs.readFile(headFile, 'utf8')
-          headData = headData.replace(/^ref: /, '').trim()
-        } catch (err) {
-          // do nothing
-        }
-        if (!headData) {
-          const packFile = path.resolve(gitRoot, '.git/packed-refs')
-          try {
-            let refs = await fs.readFile(packFile, 'utf8')
-            if (refs) {
-              refs = refs.split('\n')
-              for (let i = 0; i < refs.length; i++) {
-                const match = refs[i].match(/^([0-9a-f]{40}) (.+)$/)
-                if (match && match[2].trim() === headRef) {
-                  headData = match[1]
-                  break
-                }
-              }
-            }
-          } catch {
-            // do nothing
-          }
-        }
-      } else {
-        headData = head.trim()
-      }
-    }
-    if (headData) {
-      data.gitHead = headData
-    }
-  }
-
-  // populate "types" attribute
-  if (steps.includes('fillTypes')) {
-    const index = data.main || 'index.js'
-
-    if (typeof index !== 'string') {
-      throw new TypeError('The "main" attribute must be of type string.')
-    }
-
-    // TODO exports is much more complicated than this in verbose format
-    // We need to support for instance
-
-    // "exports": {
-    //   ".": [
-    //     {
-    //       "default": "./lib/npm.js"
-    //     },
-    //     "./lib/npm.js"
-    //   ],
-    //   "./package.json": "./package.json"
-    // },
-    // as well as conditional exports
-
-    // if (data.exports && typeof data.exports === 'string') {
-    //   index = data.exports
-    // }
-
-    // if (data.exports && data.exports['.']) {
-    //   index = data.exports['.']
-    //   if (typeof index !== 'string') {
-    //   }
-    // }
-    const extless = path.join(path.dirname(index), path.basename(index, path.extname(index)))
-    const dts = `./${extless}.d.ts`
-    const hasDTSFields = 'types' in data || 'typings' in data
-    if (!hasDTSFields) {
-      try {
-        await fs.access(path.join(pkg.path, dts))
-        data.types = dts.split(path.sep).join('/')
-      } catch {
-        // do nothing
-      }
-    }
-  }
-
-  // "normalizeData" from "read-package-json", which was just a call through to
-  // "normalize-package-data".  We only call the "fixer" functions because
-  // outside of that it was also clobbering _id (which we already conditionally
-  // do) and also adding the gypfile script (which we also already
-  // conditionally do)
-
-  // Some steps are isolated so we can do a limited subset of these in `fix`
-  if (steps.includes('fixRepositoryField') || steps.includes('normalizeData')) {
-    if (data.repositories) {
-      changes?.push(`"repository" was set to the first entry in "repositories" (${data.repository})`)
-      data.repository = data.repositories[0]
-    }
-    if (data.repository) {
-      if (typeof data.repository === 'string') {
-        changes?.push('"repository" was changed from a string to an object')
-        data.repository = {
-          type: 'git',
-          url: data.repository,
-        }
-      }
-      if (data.repository.url) {
-        const hosted = lazyHostedGitInfo().fromUrl(data.repository.url)
-        let r
-        if (hosted) {
-          if (hosted.getDefaultRepresentation() === 'shortcut') {
-            r = hosted.https()
-          } else {
-            r = hosted.toString()
-          }
-          if (r !== data.repository.url) {
-            changes?.push(`"repository.url" was normalized to "${r}"`)
-            data.repository.url = r
-          }
-        }
-      }
-    }
-  }
-
-  if (steps.includes('fixDependencies') || steps.includes('normalizeData')) {
-    // peerDependencies?
-    // devDependencies is meaningless here, it's ignored on an installed package
-    for (const type of ['dependencies', 'devDependencies', 'optionalDependencies']) {
-      if (data[type]) {
-        let secondWarning = true
-        if (typeof data[type] === 'string') {
-          changes?.push(`"${type}" was converted from a string into an object`)
-          data[type] = data[type].trim().split(/[\n\r\s\t ,]+/)
-          secondWarning = false
-        }
-        if (Array.isArray(data[type])) {
-          if (secondWarning) {
-            changes?.push(`"${type}" was converted from an array into an object`)
-          }
-          const o = {}
-          for (const d of data[type]) {
-            if (typeof d === 'string') {
-              const dep = d.trim().split(/(:?[@\s><=])/)
-              const dn = dep.shift()
-              const dv = dep.join('').replace(/^@/, '').trim()
-              o[dn] = dv
-            }
-          }
-          data[type] = o
-        }
-      }
-    }
-    // normalize-package-data used to put optional dependencies BACK into
-    // dependencies here, we no longer do this
-
-    for (const deps of ['dependencies', 'devDependencies']) {
-      if (deps in data) {
-        if (!data[deps] || typeof data[deps] !== 'object') {
-          changes?.push(`Removed invalid "${deps}"`)
-          delete data[deps]
-        } else {
-          for (const d in data[deps]) {
-            const r = data[deps][d]
-            if (typeof r !== 'string') {
-              changes?.push(`Removed invalid "${deps}.${d}"`)
-              delete data[deps][d]
-            }
-            const hosted = lazyHostedGitInfo().fromUrl(data[deps][d])?.toString()
-            if (hosted && hosted !== data[deps][d]) {
-              changes?.push(`Normalized git reference to "${deps}.${d}"`)
-              data[deps][d] = hosted.toString()
-            }
-          }
-        }
-      }
-    }
-  }
-
-  // TODO some of this is duplicated in other steps here, a future breaking change may be able to remove the duplicates involved in this step
-  if (steps.includes('normalizeData')) {
-    const { normalizeData } = require('./normalize-data.js')
-    normalizeData(data, changes)
-  }
-
-  // Warn if the bin references don't point to anything.  This might be better
-  // in normalize-package-data if it had access to the file path.
-  if (steps.includes('binRefs') && data.bin instanceof Object) {
-    for (const key in data.bin) {
-      try {
-        await fs.access(path.resolve(pkg.path, data.bin[key]))
-      } catch {
-        log.warn('package-json', pkgId, `No bin file found at ${data.bin[key]}`)
-        // XXX: should a future breaking change delete bin entries that cannot be accessed?
-      }
-    }
-  }
-}
-
-module.exports = normalize
diff --git a/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/read-package.js b/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/read-package.js
deleted file mode 100644
index d6c86ce388e6c..0000000000000
--- a/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/read-package.js
+++ /dev/null
@@ -1,39 +0,0 @@
-// This is JUST the code needed to open a package.json file and parse it.
-// It's isolated out so that code needing to parse a package.json file can do so in the same way as this module does, without needing to require the whole module, or needing to require the underlying parsing library.
-
-const { readFile } = require('fs/promises')
-const parseJSON = require('json-parse-even-better-errors')
-
-async function read (filename) {
-  try {
-    const data = await readFile(filename, 'utf8')
-    return data
-  } catch (err) {
-    err.message = `Could not read package.json: ${err}`
-    throw err
-  }
-}
-
-function parse (data) {
-  try {
-    const content = parseJSON(data)
-    return content
-  } catch (err) {
-    err.message = `Invalid package.json: ${err}`
-    throw err
-  }
-}
-
-// This is what most external libs will use.
-// PackageJson will call read and parse separately
-async function readPackage (filename) {
-  const data = await read(filename)
-  const content = parse(data)
-  return content
-}
-
-module.exports = {
-  read,
-  parse,
-  readPackage,
-}
diff --git a/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/sort.js b/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/sort.js
deleted file mode 100644
index 0bd0d5199da58..0000000000000
--- a/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/sort.js
+++ /dev/null
@@ -1,101 +0,0 @@
-/**
- * arbitrary sort order for package.json largely pulled from:
- * https://github.com/keithamus/sort-package-json/blob/main/defaultRules.md
- *
- * cross checked with:
- * https://github.com/npm/types/blob/main/types/index.d.ts#L104
- * https://docs.npmjs.com/cli/configuring-npm/package-json
- */
-function packageSort (json) {
-  const {
-    name,
-    version,
-    private: isPrivate,
-    description,
-    keywords,
-    homepage,
-    bugs,
-    repository,
-    funding,
-    license,
-    author,
-    maintainers,
-    contributors,
-    type,
-    imports,
-    exports,
-    main,
-    browser,
-    types,
-    bin,
-    man,
-    directories,
-    files,
-    workspaces,
-    scripts,
-    config,
-    dependencies,
-    devDependencies,
-    peerDependencies,
-    peerDependenciesMeta,
-    optionalDependencies,
-    bundledDependencies,
-    bundleDependencies,
-    engines,
-    os,
-    cpu,
-    publishConfig,
-    devEngines,
-    licenses,
-    overrides,
-    ...rest
-  } = json
-
-  return {
-    ...(typeof name !== 'undefined' ? { name } : {}),
-    ...(typeof version !== 'undefined' ? { version } : {}),
-    ...(typeof isPrivate !== 'undefined' ? { private: isPrivate } : {}),
-    ...(typeof description !== 'undefined' ? { description } : {}),
-    ...(typeof keywords !== 'undefined' ? { keywords } : {}),
-    ...(typeof homepage !== 'undefined' ? { homepage } : {}),
-    ...(typeof bugs !== 'undefined' ? { bugs } : {}),
-    ...(typeof repository !== 'undefined' ? { repository } : {}),
-    ...(typeof funding !== 'undefined' ? { funding } : {}),
-    ...(typeof license !== 'undefined' ? { license } : {}),
-    ...(typeof author !== 'undefined' ? { author } : {}),
-    ...(typeof maintainers !== 'undefined' ? { maintainers } : {}),
-    ...(typeof contributors !== 'undefined' ? { contributors } : {}),
-    ...(typeof type !== 'undefined' ? { type } : {}),
-    ...(typeof imports !== 'undefined' ? { imports } : {}),
-    ...(typeof exports !== 'undefined' ? { exports } : {}),
-    ...(typeof main !== 'undefined' ? { main } : {}),
-    ...(typeof browser !== 'undefined' ? { browser } : {}),
-    ...(typeof types !== 'undefined' ? { types } : {}),
-    ...(typeof bin !== 'undefined' ? { bin } : {}),
-    ...(typeof man !== 'undefined' ? { man } : {}),
-    ...(typeof directories !== 'undefined' ? { directories } : {}),
-    ...(typeof files !== 'undefined' ? { files } : {}),
-    ...(typeof workspaces !== 'undefined' ? { workspaces } : {}),
-    ...(typeof scripts !== 'undefined' ? { scripts } : {}),
-    ...(typeof config !== 'undefined' ? { config } : {}),
-    ...(typeof dependencies !== 'undefined' ? { dependencies } : {}),
-    ...(typeof devDependencies !== 'undefined' ? { devDependencies } : {}),
-    ...(typeof peerDependencies !== 'undefined' ? { peerDependencies } : {}),
-    ...(typeof peerDependenciesMeta !== 'undefined' ? { peerDependenciesMeta } : {}),
-    ...(typeof optionalDependencies !== 'undefined' ? { optionalDependencies } : {}),
-    ...(typeof bundledDependencies !== 'undefined' ? { bundledDependencies } : {}),
-    ...(typeof bundleDependencies !== 'undefined' ? { bundleDependencies } : {}),
-    ...(typeof engines !== 'undefined' ? { engines } : {}),
-    ...(typeof os !== 'undefined' ? { os } : {}),
-    ...(typeof cpu !== 'undefined' ? { cpu } : {}),
-    ...(typeof publishConfig !== 'undefined' ? { publishConfig } : {}),
-    ...(typeof devEngines !== 'undefined' ? { devEngines } : {}),
-    ...(typeof licenses !== 'undefined' ? { licenses } : {}),
-    ...(typeof overrides !== 'undefined' ? { overrides } : {}),
-    ...rest,
-  }
-}
-
-module.exports = {
-  packageSort,
-}
diff --git a/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/update-dependencies.js b/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/update-dependencies.js
deleted file mode 100644
index 7259949ab661d..0000000000000
--- a/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/update-dependencies.js
+++ /dev/null
@@ -1,75 +0,0 @@
-const depTypes = new Set([
-  'dependencies',
-  'optionalDependencies',
-  'devDependencies',
-  'peerDependencies',
-])
-
-// sort alphabetically all types of deps for a given package
-const orderDeps = (content) => {
-  for (const type of depTypes) {
-    if (content && content[type]) {
-      content[type] = Object.keys(content[type])
-        .sort((a, b) => a.localeCompare(b, 'en'))
-        .reduce((res, key) => {
-          res[key] = content[type][key]
-          return res
-        }, {})
-    }
-  }
-  return content
-}
-
-const updateDependencies = ({ content, originalContent }) => {
-  const pkg = orderDeps({
-    ...content,
-  })
-
-  // optionalDependencies don't need to be repeated in two places
-  if (pkg.dependencies) {
-    if (pkg.optionalDependencies) {
-      for (const name of Object.keys(pkg.optionalDependencies)) {
-        delete pkg.dependencies[name]
-      }
-    }
-  }
-
-  const result = { ...originalContent }
-
-  // loop through all types of dependencies and update package json pkg
-  for (const type of depTypes) {
-    if (pkg[type]) {
-      result[type] = pkg[type]
-    }
-
-    // prune empty type props from resulting object
-    const emptyDepType =
-      pkg[type]
-      && typeof pkg === 'object'
-      && Object.keys(pkg[type]).length === 0
-    if (emptyDepType) {
-      delete result[type]
-    }
-  }
-
-  // if original package.json had dep in peerDeps AND deps, preserve that.
-  const { dependencies: origProd, peerDependencies: origPeer } =
-    originalContent || {}
-  const { peerDependencies: newPeer } = result
-  if (origProd && origPeer && newPeer) {
-    // we have original prod/peer deps, and new peer deps
-    // copy over any that were in both in the original
-    for (const name of Object.keys(origPeer)) {
-      if (origProd[name] !== undefined && newPeer[name] !== undefined) {
-        result.dependencies = result.dependencies || {}
-        result.dependencies[name] = newPeer[name]
-      }
-    }
-  }
-
-  return result
-}
-
-updateDependencies.knownKeys = depTypes
-
-module.exports = updateDependencies
diff --git a/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/update-scripts.js b/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/update-scripts.js
deleted file mode 100644
index 30495e54cc3c7..0000000000000
--- a/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/update-scripts.js
+++ /dev/null
@@ -1,29 +0,0 @@
-const updateScripts = ({ content, originalContent = {} }) => {
-  const newScripts = content.scripts
-
-  if (!newScripts) {
-    return originalContent
-  }
-
-  // validate scripts content being appended
-  const hasInvalidScripts = () =>
-    Object.entries(newScripts)
-      .some(([key, value]) =>
-        typeof key !== 'string' || typeof value !== 'string')
-  if (hasInvalidScripts()) {
-    throw Object.assign(
-      new TypeError(
-        'package.json scripts should be a key-value pair of strings.'),
-      { code: 'ESCRIPTSINVALID' }
-    )
-  }
-
-  return {
-    ...originalContent,
-    scripts: {
-      ...newScripts,
-    },
-  }
-}
-
-module.exports = updateScripts
diff --git a/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/update-workspaces.js b/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/update-workspaces.js
deleted file mode 100644
index 04bf63230636f..0000000000000
--- a/node_modules/init-package-json/node_modules/@npmcli/package-json/lib/update-workspaces.js
+++ /dev/null
@@ -1,26 +0,0 @@
-const updateWorkspaces = ({ content, originalContent = {} }) => {
-  const newWorkspaces = content.workspaces
-
-  if (!newWorkspaces) {
-    return originalContent
-  }
-
-  // validate workspaces content being appended
-  const hasInvalidWorkspaces = () =>
-    newWorkspaces.some(w => !(typeof w === 'string'))
-  if (!newWorkspaces.length || hasInvalidWorkspaces()) {
-    throw Object.assign(
-      new TypeError('workspaces should be an array of strings.'),
-      { code: 'EWORKSPACESINVALID' }
-    )
-  }
-
-  return {
-    ...originalContent,
-    workspaces: [
-      ...newWorkspaces,
-    ],
-  }
-}
-
-module.exports = updateWorkspaces
diff --git a/node_modules/init-package-json/node_modules/hosted-git-info/LICENSE b/node_modules/init-package-json/node_modules/hosted-git-info/LICENSE
new file mode 100644
index 0000000000000..45055763dc838
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/hosted-git-info/LICENSE
@@ -0,0 +1,13 @@
+Copyright (c) 2015, Rebecca Turner
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
+OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/init-package-json/node_modules/hosted-git-info/lib/from-url.js b/node_modules/init-package-json/node_modules/hosted-git-info/lib/from-url.js
new file mode 100644
index 0000000000000..efc1247d59d12
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/hosted-git-info/lib/from-url.js
@@ -0,0 +1,122 @@
+'use strict'
+
+const parseUrl = require('./parse-url')
+
+// look for github shorthand inputs, such as npm/cli
+const isGitHubShorthand = (arg) => {
+  // it cannot contain whitespace before the first #
+  // it cannot start with a / because that's probably an absolute file path
+  // but it must include a slash since repos are username/repository
+  // it cannot start with a . because that's probably a relative file path
+  // it cannot start with an @ because that's a scoped package if it passes the other tests
+  // it cannot contain a : before a # because that tells us that there's a protocol
+  // a second / may not exist before a #
+  const firstHash = arg.indexOf('#')
+  const firstSlash = arg.indexOf('/')
+  const secondSlash = arg.indexOf('/', firstSlash + 1)
+  const firstColon = arg.indexOf(':')
+  const firstSpace = /\s/.exec(arg)
+  const firstAt = arg.indexOf('@')
+
+  const spaceOnlyAfterHash = !firstSpace || (firstHash > -1 && firstSpace.index > firstHash)
+  const atOnlyAfterHash = firstAt === -1 || (firstHash > -1 && firstAt > firstHash)
+  const colonOnlyAfterHash = firstColon === -1 || (firstHash > -1 && firstColon > firstHash)
+  const secondSlashOnlyAfterHash = secondSlash === -1 || (firstHash > -1 && secondSlash > firstHash)
+  const hasSlash = firstSlash > 0
+  // if a # is found, what we really want to know is that the character
+  // immediately before # is not a /
+  const doesNotEndWithSlash = firstHash > -1 ? arg[firstHash - 1] !== '/' : !arg.endsWith('/')
+  const doesNotStartWithDot = !arg.startsWith('.')
+
+  return spaceOnlyAfterHash && hasSlash && doesNotEndWithSlash &&
+    doesNotStartWithDot && atOnlyAfterHash && colonOnlyAfterHash &&
+    secondSlashOnlyAfterHash
+}
+
+module.exports = (giturl, opts, { gitHosts, protocols }) => {
+  if (!giturl) {
+    return
+  }
+
+  const correctedUrl = isGitHubShorthand(giturl) ? `github:${giturl}` : giturl
+  const parsed = parseUrl(correctedUrl, protocols)
+  if (!parsed) {
+    return
+  }
+
+  const gitHostShortcut = gitHosts.byShortcut[parsed.protocol]
+  const gitHostDomain = gitHosts.byDomain[parsed.hostname.startsWith('www.')
+    ? parsed.hostname.slice(4)
+    : parsed.hostname]
+  const gitHostName = gitHostShortcut || gitHostDomain
+  if (!gitHostName) {
+    return
+  }
+
+  const gitHostInfo = gitHosts[gitHostShortcut || gitHostDomain]
+  let auth = null
+  if (protocols[parsed.protocol]?.auth && (parsed.username || parsed.password)) {
+    auth = `${parsed.username}${parsed.password ? ':' + parsed.password : ''}`
+  }
+
+  let committish = null
+  let user = null
+  let project = null
+  let defaultRepresentation = null
+
+  try {
+    if (gitHostShortcut) {
+      let pathname = parsed.pathname.startsWith('/') ? parsed.pathname.slice(1) : parsed.pathname
+      const firstAt = pathname.indexOf('@')
+      // we ignore auth for shortcuts, so just trim it out
+      if (firstAt > -1) {
+        pathname = pathname.slice(firstAt + 1)
+      }
+
+      const lastSlash = pathname.lastIndexOf('/')
+      if (lastSlash > -1) {
+        user = decodeURIComponent(pathname.slice(0, lastSlash))
+        // we want nulls only, never empty strings
+        if (!user) {
+          user = null
+        }
+        project = decodeURIComponent(pathname.slice(lastSlash + 1))
+      } else {
+        project = decodeURIComponent(pathname)
+      }
+
+      if (project.endsWith('.git')) {
+        project = project.slice(0, -4)
+      }
+
+      if (parsed.hash) {
+        committish = decodeURIComponent(parsed.hash.slice(1))
+      }
+
+      defaultRepresentation = 'shortcut'
+    } else {
+      if (!gitHostInfo.protocols.includes(parsed.protocol)) {
+        return
+      }
+
+      const segments = gitHostInfo.extract(parsed)
+      if (!segments) {
+        return
+      }
+
+      user = segments.user && decodeURIComponent(segments.user)
+      project = decodeURIComponent(segments.project)
+      committish = decodeURIComponent(segments.committish)
+      defaultRepresentation = protocols[parsed.protocol]?.name || parsed.protocol.slice(0, -1)
+    }
+  } catch (err) {
+    /* istanbul ignore else */
+    if (err instanceof URIError) {
+      return
+    } else {
+      throw err
+    }
+  }
+
+  return [gitHostName, user, auth, project, committish, defaultRepresentation, opts]
+}
diff --git a/node_modules/init-package-json/node_modules/hosted-git-info/lib/hosts.js b/node_modules/init-package-json/node_modules/hosted-git-info/lib/hosts.js
new file mode 100644
index 0000000000000..2a88e95927772
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/hosted-git-info/lib/hosts.js
@@ -0,0 +1,231 @@
+/* eslint-disable max-len */
+
+'use strict'
+
+const maybeJoin = (...args) => args.every(arg => arg) ? args.join('') : ''
+const maybeEncode = (arg) => arg ? encodeURIComponent(arg) : ''
+const formatHashFragment = (f) => f.toLowerCase()
+  .replace(/^\W+/g, '') // strip leading non-characters
+  .replace(/(?
+    `git@${domain}:${user}/${project}.git${maybeJoin('#', committish)}`,
+  sshurltemplate: ({ domain, user, project, committish }) =>
+    `git+ssh://git@${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
+  edittemplate: ({ domain, user, project, committish, editpath, path }) =>
+    `https://${domain}/${user}/${project}${maybeJoin('/', editpath, '/', maybeEncode(committish || 'HEAD'), '/', path)}`,
+  browsetemplate: ({ domain, user, project, committish, treepath }) =>
+    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}`,
+  browsetreetemplate: ({ domain, user, project, committish, treepath, path, fragment, hashformat }) =>
+    `https://${domain}/${user}/${project}/${treepath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`,
+  browseblobtemplate: ({ domain, user, project, committish, blobpath, path, fragment, hashformat }) =>
+    `https://${domain}/${user}/${project}/${blobpath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`,
+  docstemplate: ({ domain, user, project, treepath, committish }) =>
+    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}#readme`,
+  httpstemplate: ({ auth, domain, user, project, committish }) =>
+    `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
+  filetemplate: ({ domain, user, project, committish, path }) =>
+    `https://${domain}/${user}/${project}/raw/${maybeEncode(committish || 'HEAD')}/${path}`,
+  shortcuttemplate: ({ type, user, project, committish }) =>
+    `${type}:${user}/${project}${maybeJoin('#', committish)}`,
+  pathtemplate: ({ user, project, committish }) =>
+    `${user}/${project}${maybeJoin('#', committish)}`,
+  bugstemplate: ({ domain, user, project }) =>
+    `https://${domain}/${user}/${project}/issues`,
+  hashformat: formatHashFragment,
+}
+
+const hosts = {}
+hosts.github = {
+  // First two are insecure and generally shouldn't be used any more, but
+  // they are still supported.
+  protocols: ['git:', 'http:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'],
+  domain: 'github.com',
+  treepath: 'tree',
+  blobpath: 'blob',
+  editpath: 'edit',
+  filetemplate: ({ auth, user, project, committish, path }) =>
+    `https://${maybeJoin(auth, '@')}raw.githubusercontent.com/${user}/${project}/${maybeEncode(committish || 'HEAD')}/${path}`,
+  gittemplate: ({ auth, domain, user, project, committish }) =>
+    `git://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
+  tarballtemplate: ({ domain, user, project, committish }) =>
+    `https://codeload.${domain}/${user}/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`,
+  extract: (url) => {
+    let [, user, project, type, committish] = url.pathname.split('/', 5)
+    if (type && type !== 'tree') {
+      return
+    }
+
+    if (!type) {
+      committish = url.hash.slice(1)
+    }
+
+    if (project && project.endsWith('.git')) {
+      project = project.slice(0, -4)
+    }
+
+    if (!user || !project) {
+      return
+    }
+
+    return { user, project, committish }
+  },
+}
+
+hosts.bitbucket = {
+  protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'],
+  domain: 'bitbucket.org',
+  treepath: 'src',
+  blobpath: 'src',
+  editpath: '?mode=edit',
+  edittemplate: ({ domain, user, project, committish, treepath, path, editpath }) =>
+    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish || 'HEAD'), '/', path, editpath)}`,
+  tarballtemplate: ({ domain, user, project, committish }) =>
+    `https://${domain}/${user}/${project}/get/${maybeEncode(committish || 'HEAD')}.tar.gz`,
+  extract: (url) => {
+    let [, user, project, aux] = url.pathname.split('/', 4)
+    if (['get'].includes(aux)) {
+      return
+    }
+
+    if (project && project.endsWith('.git')) {
+      project = project.slice(0, -4)
+    }
+
+    if (!user || !project) {
+      return
+    }
+
+    return { user, project, committish: url.hash.slice(1) }
+  },
+}
+
+hosts.gitlab = {
+  protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'],
+  domain: 'gitlab.com',
+  treepath: 'tree',
+  blobpath: 'tree',
+  editpath: '-/edit',
+  httpstemplate: ({ auth, domain, user, project, committish }) =>
+    `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
+  tarballtemplate: ({ domain, user, project, committish }) =>
+    `https://${domain}/${user}/${project}/repository/archive.tar.gz?ref=${maybeEncode(committish || 'HEAD')}`,
+  extract: (url) => {
+    const path = url.pathname.slice(1)
+    if (path.includes('/-/') || path.includes('/archive.tar.gz')) {
+      return
+    }
+
+    const segments = path.split('/')
+    let project = segments.pop()
+    if (project.endsWith('.git')) {
+      project = project.slice(0, -4)
+    }
+
+    const user = segments.join('/')
+    if (!user || !project) {
+      return
+    }
+
+    return { user, project, committish: url.hash.slice(1) }
+  },
+}
+
+hosts.gist = {
+  protocols: ['git:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'],
+  domain: 'gist.github.com',
+  editpath: 'edit',
+  sshtemplate: ({ domain, project, committish }) =>
+    `git@${domain}:${project}.git${maybeJoin('#', committish)}`,
+  sshurltemplate: ({ domain, project, committish }) =>
+    `git+ssh://git@${domain}/${project}.git${maybeJoin('#', committish)}`,
+  edittemplate: ({ domain, user, project, committish, editpath }) =>
+    `https://${domain}/${user}/${project}${maybeJoin('/', maybeEncode(committish))}/${editpath}`,
+  browsetemplate: ({ domain, project, committish }) =>
+    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`,
+  browsetreetemplate: ({ domain, project, committish, path, hashformat }) =>
+    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`,
+  browseblobtemplate: ({ domain, project, committish, path, hashformat }) =>
+    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`,
+  docstemplate: ({ domain, project, committish }) =>
+    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`,
+  httpstemplate: ({ domain, project, committish }) =>
+    `git+https://${domain}/${project}.git${maybeJoin('#', committish)}`,
+  filetemplate: ({ user, project, committish, path }) =>
+    `https://gist.githubusercontent.com/${user}/${project}/raw${maybeJoin('/', maybeEncode(committish))}/${path}`,
+  shortcuttemplate: ({ type, project, committish }) =>
+    `${type}:${project}${maybeJoin('#', committish)}`,
+  pathtemplate: ({ project, committish }) =>
+    `${project}${maybeJoin('#', committish)}`,
+  bugstemplate: ({ domain, project }) =>
+    `https://${domain}/${project}`,
+  gittemplate: ({ domain, project, committish }) =>
+    `git://${domain}/${project}.git${maybeJoin('#', committish)}`,
+  tarballtemplate: ({ project, committish }) =>
+    `https://codeload.github.com/gist/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`,
+  extract: (url) => {
+    let [, user, project, aux] = url.pathname.split('/', 4)
+    if (aux === 'raw') {
+      return
+    }
+
+    if (!project) {
+      if (!user) {
+        return
+      }
+
+      project = user
+      user = null
+    }
+
+    if (project.endsWith('.git')) {
+      project = project.slice(0, -4)
+    }
+
+    return { user, project, committish: url.hash.slice(1) }
+  },
+  hashformat: function (fragment) {
+    return fragment && 'file-' + formatHashFragment(fragment)
+  },
+}
+
+hosts.sourcehut = {
+  protocols: ['git+ssh:', 'https:'],
+  domain: 'git.sr.ht',
+  treepath: 'tree',
+  blobpath: 'tree',
+  filetemplate: ({ domain, user, project, committish, path }) =>
+    `https://${domain}/${user}/${project}/blob/${maybeEncode(committish) || 'HEAD'}/${path}`,
+  httpstemplate: ({ domain, user, project, committish }) =>
+    `https://${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
+  tarballtemplate: ({ domain, user, project, committish }) =>
+    `https://${domain}/${user}/${project}/archive/${maybeEncode(committish) || 'HEAD'}.tar.gz`,
+  bugstemplate: () => null,
+  extract: (url) => {
+    let [, user, project, aux] = url.pathname.split('/', 4)
+
+    // tarball url
+    if (['archive'].includes(aux)) {
+      return
+    }
+
+    if (project && project.endsWith('.git')) {
+      project = project.slice(0, -4)
+    }
+
+    if (!user || !project) {
+      return
+    }
+
+    return { user, project, committish: url.hash.slice(1) }
+  },
+}
+
+for (const [name, host] of Object.entries(hosts)) {
+  hosts[name] = Object.assign({}, defaults, host)
+}
+
+module.exports = hosts
diff --git a/node_modules/init-package-json/node_modules/hosted-git-info/lib/index.js b/node_modules/init-package-json/node_modules/hosted-git-info/lib/index.js
new file mode 100644
index 0000000000000..2a7100dcee6e7
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/hosted-git-info/lib/index.js
@@ -0,0 +1,227 @@
+'use strict'
+
+const { LRUCache } = require('lru-cache')
+const hosts = require('./hosts.js')
+const fromUrl = require('./from-url.js')
+const parseUrl = require('./parse-url.js')
+
+const cache = new LRUCache({ max: 1000 })
+
+function unknownHostedUrl (url) {
+  try {
+    const {
+      protocol,
+      hostname,
+      pathname,
+    } = new URL(url)
+
+    if (!hostname) {
+      return null
+    }
+
+    const proto = /(?:git\+)http:$/.test(protocol) ? 'http:' : 'https:'
+    const path = pathname.replace(/\.git$/, '')
+    return `${proto}//${hostname}${path}`
+  } catch {
+    return null
+  }
+}
+
+class GitHost {
+  constructor (type, user, auth, project, committish, defaultRepresentation, opts = {}) {
+    Object.assign(this, GitHost.#gitHosts[type], {
+      type,
+      user,
+      auth,
+      project,
+      committish,
+      default: defaultRepresentation,
+      opts,
+    })
+  }
+
+  static #gitHosts = { byShortcut: {}, byDomain: {} }
+  static #protocols = {
+    'git+ssh:': { name: 'sshurl' },
+    'ssh:': { name: 'sshurl' },
+    'git+https:': { name: 'https', auth: true },
+    'git:': { auth: true },
+    'http:': { auth: true },
+    'https:': { auth: true },
+    'git+http:': { auth: true },
+  }
+
+  static addHost (name, host) {
+    GitHost.#gitHosts[name] = host
+    GitHost.#gitHosts.byDomain[host.domain] = name
+    GitHost.#gitHosts.byShortcut[`${name}:`] = name
+    GitHost.#protocols[`${name}:`] = { name }
+  }
+
+  static fromUrl (giturl, opts) {
+    if (typeof giturl !== 'string') {
+      return
+    }
+
+    const key = giturl + JSON.stringify(opts || {})
+
+    if (!cache.has(key)) {
+      const hostArgs = fromUrl(giturl, opts, {
+        gitHosts: GitHost.#gitHosts,
+        protocols: GitHost.#protocols,
+      })
+      cache.set(key, hostArgs ? new GitHost(...hostArgs) : undefined)
+    }
+
+    return cache.get(key)
+  }
+
+  static fromManifest (manifest, opts = {}) {
+    if (!manifest || typeof manifest !== 'object') {
+      return
+    }
+
+    const r = manifest.repository
+    // TODO: look into also checking the `bugs`/`homepage` URLs
+
+    const rurl = r && (
+      typeof r === 'string'
+        ? r
+        : typeof r === 'object' && typeof r.url === 'string'
+          ? r.url
+          : null
+    )
+
+    if (!rurl) {
+      throw new Error('no repository')
+    }
+
+    const info = (rurl && GitHost.fromUrl(rurl.replace(/^git\+/, ''), opts)) || null
+    if (info) {
+      return info
+    }
+    const unk = unknownHostedUrl(rurl)
+    return GitHost.fromUrl(unk, opts) || unk
+  }
+
+  static parseUrl (url) {
+    return parseUrl(url)
+  }
+
+  #fill (template, opts) {
+    if (typeof template !== 'function') {
+      return null
+    }
+
+    const options = { ...this, ...this.opts, ...opts }
+
+    // the path should always be set so we don't end up with 'undefined' in urls
+    if (!options.path) {
+      options.path = ''
+    }
+
+    // template functions will insert the leading slash themselves
+    if (options.path.startsWith('/')) {
+      options.path = options.path.slice(1)
+    }
+
+    if (options.noCommittish) {
+      options.committish = null
+    }
+
+    const result = template(options)
+    return options.noGitPlus && result.startsWith('git+') ? result.slice(4) : result
+  }
+
+  hash () {
+    return this.committish ? `#${this.committish}` : ''
+  }
+
+  ssh (opts) {
+    return this.#fill(this.sshtemplate, opts)
+  }
+
+  sshurl (opts) {
+    return this.#fill(this.sshurltemplate, opts)
+  }
+
+  browse (path, ...args) {
+    // not a string, treat path as opts
+    if (typeof path !== 'string') {
+      return this.#fill(this.browsetemplate, path)
+    }
+
+    if (typeof args[0] !== 'string') {
+      return this.#fill(this.browsetreetemplate, { ...args[0], path })
+    }
+
+    return this.#fill(this.browsetreetemplate, { ...args[1], fragment: args[0], path })
+  }
+
+  // If the path is known to be a file, then browseFile should be used. For some hosts
+  // the url is the same as browse, but for others like GitHub a file can use both `/tree/`
+  // and `/blob/` in the path. When using a default committish of `HEAD` then the `/tree/`
+  // path will redirect to a specific commit. Using the `/blob/` path avoids this and
+  // does not redirect to a different commit.
+  browseFile (path, ...args) {
+    if (typeof args[0] !== 'string') {
+      return this.#fill(this.browseblobtemplate, { ...args[0], path })
+    }
+
+    return this.#fill(this.browseblobtemplate, { ...args[1], fragment: args[0], path })
+  }
+
+  docs (opts) {
+    return this.#fill(this.docstemplate, opts)
+  }
+
+  bugs (opts) {
+    return this.#fill(this.bugstemplate, opts)
+  }
+
+  https (opts) {
+    return this.#fill(this.httpstemplate, opts)
+  }
+
+  git (opts) {
+    return this.#fill(this.gittemplate, opts)
+  }
+
+  shortcut (opts) {
+    return this.#fill(this.shortcuttemplate, opts)
+  }
+
+  path (opts) {
+    return this.#fill(this.pathtemplate, opts)
+  }
+
+  tarball (opts) {
+    return this.#fill(this.tarballtemplate, { ...opts, noCommittish: false })
+  }
+
+  file (path, opts) {
+    return this.#fill(this.filetemplate, { ...opts, path })
+  }
+
+  edit (path, opts) {
+    return this.#fill(this.edittemplate, { ...opts, path })
+  }
+
+  getDefaultRepresentation () {
+    return this.default
+  }
+
+  toString (opts) {
+    if (this.default && typeof this[this.default] === 'function') {
+      return this[this.default](opts)
+    }
+
+    return this.sshurl(opts)
+  }
+}
+
+for (const [name, host] of Object.entries(hosts)) {
+  GitHost.addHost(name, host)
+}
+
+module.exports = GitHost
diff --git a/node_modules/init-package-json/node_modules/hosted-git-info/lib/parse-url.js b/node_modules/init-package-json/node_modules/hosted-git-info/lib/parse-url.js
new file mode 100644
index 0000000000000..7d5489c008ab4
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/hosted-git-info/lib/parse-url.js
@@ -0,0 +1,78 @@
+const url = require('url')
+
+const lastIndexOfBefore = (str, char, beforeChar) => {
+  const startPosition = str.indexOf(beforeChar)
+  return str.lastIndexOf(char, startPosition > -1 ? startPosition : Infinity)
+}
+
+const safeUrl = (u) => {
+  try {
+    return new url.URL(u)
+  } catch {
+    // this fn should never throw
+  }
+}
+
+// accepts input like git:github.com:user/repo and inserts the // after the first :
+const correctProtocol = (arg, protocols) => {
+  const firstColon = arg.indexOf(':')
+  const proto = arg.slice(0, firstColon + 1)
+  if (Object.prototype.hasOwnProperty.call(protocols, proto)) {
+    return arg
+  }
+
+  const firstAt = arg.indexOf('@')
+  if (firstAt > -1) {
+    if (firstAt > firstColon) {
+      return `git+ssh://${arg}`
+    } else {
+      return arg
+    }
+  }
+
+  const doubleSlash = arg.indexOf('//')
+  if (doubleSlash === firstColon + 1) {
+    return arg
+  }
+
+  return `${arg.slice(0, firstColon + 1)}//${arg.slice(firstColon + 1)}`
+}
+
+// attempt to correct an scp style url so that it will parse with `new URL()`
+const correctUrl = (giturl) => {
+  // ignore @ that come after the first hash since the denotes the start
+  // of a committish which can contain @ characters
+  const firstAt = lastIndexOfBefore(giturl, '@', '#')
+  // ignore colons that come after the hash since that could include colons such as:
+  // git@github.com:user/package-2#semver:^1.0.0
+  const lastColonBeforeHash = lastIndexOfBefore(giturl, ':', '#')
+
+  if (lastColonBeforeHash > firstAt) {
+    // the last : comes after the first @ (or there is no @)
+    // like it would in:
+    // proto://hostname.com:user/repo
+    // username@hostname.com:user/repo
+    // :password@hostname.com:user/repo
+    // username:password@hostname.com:user/repo
+    // proto://username@hostname.com:user/repo
+    // proto://:password@hostname.com:user/repo
+    // proto://username:password@hostname.com:user/repo
+    // then we replace the last : with a / to create a valid path
+    giturl = giturl.slice(0, lastColonBeforeHash) + '/' + giturl.slice(lastColonBeforeHash + 1)
+  }
+
+  if (lastIndexOfBefore(giturl, ':', '#') === -1 && giturl.indexOf('//') === -1) {
+    // we have no : at all
+    // as it would be in:
+    // username@hostname.com/user/repo
+    // then we prepend a protocol
+    giturl = `git+ssh://${giturl}`
+  }
+
+  return giturl
+}
+
+module.exports = (giturl, protocols) => {
+  const withProtocol = protocols ? correctProtocol(giturl, protocols) : giturl
+  return safeUrl(withProtocol) || safeUrl(correctUrl(withProtocol))
+}
diff --git a/node_modules/init-package-json/node_modules/hosted-git-info/package.json b/node_modules/init-package-json/node_modules/hosted-git-info/package.json
new file mode 100644
index 0000000000000..5883a7d308d79
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/hosted-git-info/package.json
@@ -0,0 +1,61 @@
+{
+  "name": "hosted-git-info",
+  "version": "9.0.0",
+  "description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab",
+  "main": "./lib/index.js",
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/npm/hosted-git-info.git"
+  },
+  "keywords": [
+    "git",
+    "github",
+    "bitbucket",
+    "gitlab"
+  ],
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "bugs": {
+    "url": "https://github.com/npm/hosted-git-info/issues"
+  },
+  "homepage": "https://github.com/npm/hosted-git-info",
+  "scripts": {
+    "posttest": "npm run lint",
+    "snap": "tap",
+    "test": "tap",
+    "test:coverage": "tap --coverage-report=html",
+    "lint": "npm run eslint",
+    "postlint": "template-oss-check",
+    "lintfix": "npm run eslint -- --fix",
+    "template-oss-apply": "template-oss-apply --force",
+    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
+  },
+  "dependencies": {
+    "lru-cache": "^11.1.0"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^5.0.0",
+    "@npmcli/template-oss": "4.25.0",
+    "tap": "^16.0.1"
+  },
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "engines": {
+    "node": "^20.17.0 || >=22.9.0"
+  },
+  "tap": {
+    "color": 1,
+    "coverage": true,
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.25.0",
+    "publish": "true"
+  }
+}
diff --git a/node_modules/init-package-json/node_modules/lru-cache/LICENSE b/node_modules/init-package-json/node_modules/lru-cache/LICENSE
new file mode 100644
index 0000000000000..f785757cd63f8
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/lru-cache/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/init-package-json/node_modules/lru-cache/dist/commonjs/index.js b/node_modules/init-package-json/node_modules/lru-cache/dist/commonjs/index.js
new file mode 100644
index 0000000000000..921b8f10f71b1
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/lru-cache/dist/commonjs/index.js
@@ -0,0 +1,1564 @@
+"use strict";
+/**
+ * @module LRUCache
+ */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.LRUCache = void 0;
+const defaultPerf = (typeof performance === 'object' &&
+    performance &&
+    typeof performance.now === 'function') ?
+    performance
+    : Date;
+const warned = new Set();
+/* c8 ignore start */
+const PROCESS = (typeof process === 'object' && !!process ?
+    process
+    : {});
+/* c8 ignore start */
+const emitWarning = (msg, type, code, fn) => {
+    typeof PROCESS.emitWarning === 'function' ?
+        PROCESS.emitWarning(msg, type, code, fn)
+        : console.error(`[${code}] ${type}: ${msg}`);
+};
+let AC = globalThis.AbortController;
+let AS = globalThis.AbortSignal;
+/* c8 ignore start */
+if (typeof AC === 'undefined') {
+    //@ts-ignore
+    AS = class AbortSignal {
+        onabort;
+        _onabort = [];
+        reason;
+        aborted = false;
+        addEventListener(_, fn) {
+            this._onabort.push(fn);
+        }
+    };
+    //@ts-ignore
+    AC = class AbortController {
+        constructor() {
+            warnACPolyfill();
+        }
+        signal = new AS();
+        abort(reason) {
+            if (this.signal.aborted)
+                return;
+            //@ts-ignore
+            this.signal.reason = reason;
+            //@ts-ignore
+            this.signal.aborted = true;
+            //@ts-ignore
+            for (const fn of this.signal._onabort) {
+                fn(reason);
+            }
+            this.signal.onabort?.(reason);
+        }
+    };
+    let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
+    const warnACPolyfill = () => {
+        if (!printACPolyfillWarning)
+            return;
+        printACPolyfillWarning = false;
+        emitWarning('AbortController is not defined. If using lru-cache in ' +
+            'node 14, load an AbortController polyfill from the ' +
+            '`node-abort-controller` package. A minimal polyfill is ' +
+            'provided for use by LRUCache.fetch(), but it should not be ' +
+            'relied upon in other contexts (eg, passing it to other APIs that ' +
+            'use AbortController/AbortSignal might have undesirable effects). ' +
+            'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
+    };
+}
+/* c8 ignore stop */
+const shouldWarn = (code) => !warned.has(code);
+const TYPE = Symbol('type');
+const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
+/* c8 ignore start */
+// This is a little bit ridiculous, tbh.
+// The maximum array length is 2^32-1 or thereabouts on most JS impls.
+// And well before that point, you're caching the entire world, I mean,
+// that's ~32GB of just integers for the next/prev links, plus whatever
+// else to hold that many keys and values.  Just filling the memory with
+// zeroes at init time is brutal when you get that big.
+// But why not be complete?
+// Maybe in the future, these limits will have expanded.
+const getUintArray = (max) => !isPosInt(max) ? null
+    : max <= Math.pow(2, 8) ? Uint8Array
+        : max <= Math.pow(2, 16) ? Uint16Array
+            : max <= Math.pow(2, 32) ? Uint32Array
+                : max <= Number.MAX_SAFE_INTEGER ? ZeroArray
+                    : null;
+/* c8 ignore stop */
+class ZeroArray extends Array {
+    constructor(size) {
+        super(size);
+        this.fill(0);
+    }
+}
+class Stack {
+    heap;
+    length;
+    // private constructor
+    static #constructing = false;
+    static create(max) {
+        const HeapCls = getUintArray(max);
+        if (!HeapCls)
+            return [];
+        Stack.#constructing = true;
+        const s = new Stack(max, HeapCls);
+        Stack.#constructing = false;
+        return s;
+    }
+    constructor(max, HeapCls) {
+        /* c8 ignore start */
+        if (!Stack.#constructing) {
+            throw new TypeError('instantiate Stack using Stack.create(n)');
+        }
+        /* c8 ignore stop */
+        this.heap = new HeapCls(max);
+        this.length = 0;
+    }
+    push(n) {
+        this.heap[this.length++] = n;
+    }
+    pop() {
+        return this.heap[--this.length];
+    }
+}
+/**
+ * Default export, the thing you're using this module to get.
+ *
+ * The `K` and `V` types define the key and value types, respectively. The
+ * optional `FC` type defines the type of the `context` object passed to
+ * `cache.fetch()` and `cache.memo()`.
+ *
+ * Keys and values **must not** be `null` or `undefined`.
+ *
+ * All properties from the options object (with the exception of `max`,
+ * `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are
+ * added as normal public members. (The listed options are read-only getters.)
+ *
+ * Changing any of these will alter the defaults for subsequent method calls.
+ */
+class LRUCache {
+    // options that cannot be changed without disaster
+    #max;
+    #maxSize;
+    #dispose;
+    #onInsert;
+    #disposeAfter;
+    #fetchMethod;
+    #memoMethod;
+    #perf;
+    /**
+     * {@link LRUCache.OptionsBase.perf}
+     */
+    get perf() {
+        return this.#perf;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.ttl}
+     */
+    ttl;
+    /**
+     * {@link LRUCache.OptionsBase.ttlResolution}
+     */
+    ttlResolution;
+    /**
+     * {@link LRUCache.OptionsBase.ttlAutopurge}
+     */
+    ttlAutopurge;
+    /**
+     * {@link LRUCache.OptionsBase.updateAgeOnGet}
+     */
+    updateAgeOnGet;
+    /**
+     * {@link LRUCache.OptionsBase.updateAgeOnHas}
+     */
+    updateAgeOnHas;
+    /**
+     * {@link LRUCache.OptionsBase.allowStale}
+     */
+    allowStale;
+    /**
+     * {@link LRUCache.OptionsBase.noDisposeOnSet}
+     */
+    noDisposeOnSet;
+    /**
+     * {@link LRUCache.OptionsBase.noUpdateTTL}
+     */
+    noUpdateTTL;
+    /**
+     * {@link LRUCache.OptionsBase.maxEntrySize}
+     */
+    maxEntrySize;
+    /**
+     * {@link LRUCache.OptionsBase.sizeCalculation}
+     */
+    sizeCalculation;
+    /**
+     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
+     */
+    noDeleteOnFetchRejection;
+    /**
+     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
+     */
+    noDeleteOnStaleGet;
+    /**
+     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
+     */
+    allowStaleOnFetchAbort;
+    /**
+     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
+     */
+    allowStaleOnFetchRejection;
+    /**
+     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
+     */
+    ignoreFetchAbort;
+    // computed properties
+    #size;
+    #calculatedSize;
+    #keyMap;
+    #keyList;
+    #valList;
+    #next;
+    #prev;
+    #head;
+    #tail;
+    #free;
+    #disposed;
+    #sizes;
+    #starts;
+    #ttls;
+    #hasDispose;
+    #hasFetchMethod;
+    #hasDisposeAfter;
+    #hasOnInsert;
+    /**
+     * Do not call this method unless you need to inspect the
+     * inner workings of the cache.  If anything returned by this
+     * object is modified in any way, strange breakage may occur.
+     *
+     * These fields are private for a reason!
+     *
+     * @internal
+     */
+    static unsafeExposeInternals(c) {
+        return {
+            // properties
+            starts: c.#starts,
+            ttls: c.#ttls,
+            sizes: c.#sizes,
+            keyMap: c.#keyMap,
+            keyList: c.#keyList,
+            valList: c.#valList,
+            next: c.#next,
+            prev: c.#prev,
+            get head() {
+                return c.#head;
+            },
+            get tail() {
+                return c.#tail;
+            },
+            free: c.#free,
+            // methods
+            isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
+            backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
+            moveToTail: (index) => c.#moveToTail(index),
+            indexes: (options) => c.#indexes(options),
+            rindexes: (options) => c.#rindexes(options),
+            isStale: (index) => c.#isStale(index),
+        };
+    }
+    // Protected read-only members
+    /**
+     * {@link LRUCache.OptionsBase.max} (read-only)
+     */
+    get max() {
+        return this.#max;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.maxSize} (read-only)
+     */
+    get maxSize() {
+        return this.#maxSize;
+    }
+    /**
+     * The total computed size of items in the cache (read-only)
+     */
+    get calculatedSize() {
+        return this.#calculatedSize;
+    }
+    /**
+     * The number of items stored in the cache (read-only)
+     */
+    get size() {
+        return this.#size;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
+     */
+    get fetchMethod() {
+        return this.#fetchMethod;
+    }
+    get memoMethod() {
+        return this.#memoMethod;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.dispose} (read-only)
+     */
+    get dispose() {
+        return this.#dispose;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.onInsert} (read-only)
+     */
+    get onInsert() {
+        return this.#onInsert;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
+     */
+    get disposeAfter() {
+        return this.#disposeAfter;
+    }
+    constructor(options) {
+        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, onInsert, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, perf, } = options;
+        if (perf !== undefined) {
+            if (typeof perf?.now !== 'function') {
+                throw new TypeError('perf option must have a now() method if specified');
+            }
+        }
+        this.#perf = perf ?? defaultPerf;
+        if (max !== 0 && !isPosInt(max)) {
+            throw new TypeError('max option must be a nonnegative integer');
+        }
+        const UintArray = max ? getUintArray(max) : Array;
+        if (!UintArray) {
+            throw new Error('invalid max value: ' + max);
+        }
+        this.#max = max;
+        this.#maxSize = maxSize;
+        this.maxEntrySize = maxEntrySize || this.#maxSize;
+        this.sizeCalculation = sizeCalculation;
+        if (this.sizeCalculation) {
+            if (!this.#maxSize && !this.maxEntrySize) {
+                throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
+            }
+            if (typeof this.sizeCalculation !== 'function') {
+                throw new TypeError('sizeCalculation set to non-function');
+            }
+        }
+        if (memoMethod !== undefined &&
+            typeof memoMethod !== 'function') {
+            throw new TypeError('memoMethod must be a function if defined');
+        }
+        this.#memoMethod = memoMethod;
+        if (fetchMethod !== undefined &&
+            typeof fetchMethod !== 'function') {
+            throw new TypeError('fetchMethod must be a function if specified');
+        }
+        this.#fetchMethod = fetchMethod;
+        this.#hasFetchMethod = !!fetchMethod;
+        this.#keyMap = new Map();
+        this.#keyList = new Array(max).fill(undefined);
+        this.#valList = new Array(max).fill(undefined);
+        this.#next = new UintArray(max);
+        this.#prev = new UintArray(max);
+        this.#head = 0;
+        this.#tail = 0;
+        this.#free = Stack.create(max);
+        this.#size = 0;
+        this.#calculatedSize = 0;
+        if (typeof dispose === 'function') {
+            this.#dispose = dispose;
+        }
+        if (typeof onInsert === 'function') {
+            this.#onInsert = onInsert;
+        }
+        if (typeof disposeAfter === 'function') {
+            this.#disposeAfter = disposeAfter;
+            this.#disposed = [];
+        }
+        else {
+            this.#disposeAfter = undefined;
+            this.#disposed = undefined;
+        }
+        this.#hasDispose = !!this.#dispose;
+        this.#hasOnInsert = !!this.#onInsert;
+        this.#hasDisposeAfter = !!this.#disposeAfter;
+        this.noDisposeOnSet = !!noDisposeOnSet;
+        this.noUpdateTTL = !!noUpdateTTL;
+        this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
+        this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
+        this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
+        this.ignoreFetchAbort = !!ignoreFetchAbort;
+        // NB: maxEntrySize is set to maxSize if it's set
+        if (this.maxEntrySize !== 0) {
+            if (this.#maxSize !== 0) {
+                if (!isPosInt(this.#maxSize)) {
+                    throw new TypeError('maxSize must be a positive integer if specified');
+                }
+            }
+            if (!isPosInt(this.maxEntrySize)) {
+                throw new TypeError('maxEntrySize must be a positive integer if specified');
+            }
+            this.#initializeSizeTracking();
+        }
+        this.allowStale = !!allowStale;
+        this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
+        this.updateAgeOnGet = !!updateAgeOnGet;
+        this.updateAgeOnHas = !!updateAgeOnHas;
+        this.ttlResolution =
+            isPosInt(ttlResolution) || ttlResolution === 0 ?
+                ttlResolution
+                : 1;
+        this.ttlAutopurge = !!ttlAutopurge;
+        this.ttl = ttl || 0;
+        if (this.ttl) {
+            if (!isPosInt(this.ttl)) {
+                throw new TypeError('ttl must be a positive integer if specified');
+            }
+            this.#initializeTTLTracking();
+        }
+        // do not allow completely unbounded caches
+        if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
+            throw new TypeError('At least one of max, maxSize, or ttl is required');
+        }
+        if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
+            const code = 'LRU_CACHE_UNBOUNDED';
+            if (shouldWarn(code)) {
+                warned.add(code);
+                const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
+                    'result in unbounded memory consumption.';
+                emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
+            }
+        }
+    }
+    /**
+     * Return the number of ms left in the item's TTL. If item is not in cache,
+     * returns `0`. Returns `Infinity` if item is in cache without a defined TTL.
+     */
+    getRemainingTTL(key) {
+        return this.#keyMap.has(key) ? Infinity : 0;
+    }
+    #initializeTTLTracking() {
+        const ttls = new ZeroArray(this.#max);
+        const starts = new ZeroArray(this.#max);
+        this.#ttls = ttls;
+        this.#starts = starts;
+        this.#setItemTTL = (index, ttl, start = this.#perf.now()) => {
+            starts[index] = ttl !== 0 ? start : 0;
+            ttls[index] = ttl;
+            if (ttl !== 0 && this.ttlAutopurge) {
+                const t = setTimeout(() => {
+                    if (this.#isStale(index)) {
+                        this.#delete(this.#keyList[index], 'expire');
+                    }
+                }, ttl + 1);
+                // unref() not supported on all platforms
+                /* c8 ignore start */
+                if (t.unref) {
+                    t.unref();
+                }
+                /* c8 ignore stop */
+            }
+        };
+        this.#updateItemAge = index => {
+            starts[index] = ttls[index] !== 0 ? this.#perf.now() : 0;
+        };
+        this.#statusTTL = (status, index) => {
+            if (ttls[index]) {
+                const ttl = ttls[index];
+                const start = starts[index];
+                /* c8 ignore next */
+                if (!ttl || !start)
+                    return;
+                status.ttl = ttl;
+                status.start = start;
+                status.now = cachedNow || getNow();
+                const age = status.now - start;
+                status.remainingTTL = ttl - age;
+            }
+        };
+        // debounce calls to perf.now() to 1s so we're not hitting
+        // that costly call repeatedly.
+        let cachedNow = 0;
+        const getNow = () => {
+            const n = this.#perf.now();
+            if (this.ttlResolution > 0) {
+                cachedNow = n;
+                const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
+                // not available on all platforms
+                /* c8 ignore start */
+                if (t.unref) {
+                    t.unref();
+                }
+                /* c8 ignore stop */
+            }
+            return n;
+        };
+        this.getRemainingTTL = key => {
+            const index = this.#keyMap.get(key);
+            if (index === undefined) {
+                return 0;
+            }
+            const ttl = ttls[index];
+            const start = starts[index];
+            if (!ttl || !start) {
+                return Infinity;
+            }
+            const age = (cachedNow || getNow()) - start;
+            return ttl - age;
+        };
+        this.#isStale = index => {
+            const s = starts[index];
+            const t = ttls[index];
+            return !!t && !!s && (cachedNow || getNow()) - s > t;
+        };
+    }
+    // conditionally set private methods related to TTL
+    #updateItemAge = () => { };
+    #statusTTL = () => { };
+    #setItemTTL = () => { };
+    /* c8 ignore stop */
+    #isStale = () => false;
+    #initializeSizeTracking() {
+        const sizes = new ZeroArray(this.#max);
+        this.#calculatedSize = 0;
+        this.#sizes = sizes;
+        this.#removeItemSize = index => {
+            this.#calculatedSize -= sizes[index];
+            sizes[index] = 0;
+        };
+        this.#requireSize = (k, v, size, sizeCalculation) => {
+            // provisionally accept background fetches.
+            // actual value size will be checked when they return.
+            if (this.#isBackgroundFetch(v)) {
+                return 0;
+            }
+            if (!isPosInt(size)) {
+                if (sizeCalculation) {
+                    if (typeof sizeCalculation !== 'function') {
+                        throw new TypeError('sizeCalculation must be a function');
+                    }
+                    size = sizeCalculation(v, k);
+                    if (!isPosInt(size)) {
+                        throw new TypeError('sizeCalculation return invalid (expect positive integer)');
+                    }
+                }
+                else {
+                    throw new TypeError('invalid size value (must be positive integer). ' +
+                        'When maxSize or maxEntrySize is used, sizeCalculation ' +
+                        'or size must be set.');
+                }
+            }
+            return size;
+        };
+        this.#addItemSize = (index, size, status) => {
+            sizes[index] = size;
+            if (this.#maxSize) {
+                const maxSize = this.#maxSize - sizes[index];
+                while (this.#calculatedSize > maxSize) {
+                    this.#evict(true);
+                }
+            }
+            this.#calculatedSize += sizes[index];
+            if (status) {
+                status.entrySize = size;
+                status.totalCalculatedSize = this.#calculatedSize;
+            }
+        };
+    }
+    #removeItemSize = _i => { };
+    #addItemSize = (_i, _s, _st) => { };
+    #requireSize = (_k, _v, size, sizeCalculation) => {
+        if (size || sizeCalculation) {
+            throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
+        }
+        return 0;
+    };
+    *#indexes({ allowStale = this.allowStale } = {}) {
+        if (this.#size) {
+            for (let i = this.#tail; true;) {
+                if (!this.#isValidIndex(i)) {
+                    break;
+                }
+                if (allowStale || !this.#isStale(i)) {
+                    yield i;
+                }
+                if (i === this.#head) {
+                    break;
+                }
+                else {
+                    i = this.#prev[i];
+                }
+            }
+        }
+    }
+    *#rindexes({ allowStale = this.allowStale } = {}) {
+        if (this.#size) {
+            for (let i = this.#head; true;) {
+                if (!this.#isValidIndex(i)) {
+                    break;
+                }
+                if (allowStale || !this.#isStale(i)) {
+                    yield i;
+                }
+                if (i === this.#tail) {
+                    break;
+                }
+                else {
+                    i = this.#next[i];
+                }
+            }
+        }
+    }
+    #isValidIndex(index) {
+        return (index !== undefined &&
+            this.#keyMap.get(this.#keyList[index]) === index);
+    }
+    /**
+     * Return a generator yielding `[key, value]` pairs,
+     * in order from most recently used to least recently used.
+     */
+    *entries() {
+        for (const i of this.#indexes()) {
+            if (this.#valList[i] !== undefined &&
+                this.#keyList[i] !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield [this.#keyList[i], this.#valList[i]];
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.entries}
+     *
+     * Return a generator yielding `[key, value]` pairs,
+     * in order from least recently used to most recently used.
+     */
+    *rentries() {
+        for (const i of this.#rindexes()) {
+            if (this.#valList[i] !== undefined &&
+                this.#keyList[i] !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield [this.#keyList[i], this.#valList[i]];
+            }
+        }
+    }
+    /**
+     * Return a generator yielding the keys in the cache,
+     * in order from most recently used to least recently used.
+     */
+    *keys() {
+        for (const i of this.#indexes()) {
+            const k = this.#keyList[i];
+            if (k !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield k;
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.keys}
+     *
+     * Return a generator yielding the keys in the cache,
+     * in order from least recently used to most recently used.
+     */
+    *rkeys() {
+        for (const i of this.#rindexes()) {
+            const k = this.#keyList[i];
+            if (k !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield k;
+            }
+        }
+    }
+    /**
+     * Return a generator yielding the values in the cache,
+     * in order from most recently used to least recently used.
+     */
+    *values() {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            if (v !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield this.#valList[i];
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.values}
+     *
+     * Return a generator yielding the values in the cache,
+     * in order from least recently used to most recently used.
+     */
+    *rvalues() {
+        for (const i of this.#rindexes()) {
+            const v = this.#valList[i];
+            if (v !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield this.#valList[i];
+            }
+        }
+    }
+    /**
+     * Iterating over the cache itself yields the same results as
+     * {@link LRUCache.entries}
+     */
+    [Symbol.iterator]() {
+        return this.entries();
+    }
+    /**
+     * A String value that is used in the creation of the default string
+     * description of an object. Called by the built-in method
+     * `Object.prototype.toString`.
+     */
+    [Symbol.toStringTag] = 'LRUCache';
+    /**
+     * Find a value for which the supplied fn method returns a truthy value,
+     * similar to `Array.find()`. fn is called as `fn(value, key, cache)`.
+     */
+    find(fn, getOptions = {}) {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            if (fn(value, this.#keyList[i], this)) {
+                return this.get(this.#keyList[i], getOptions);
+            }
+        }
+    }
+    /**
+     * Call the supplied function on each item in the cache, in order from most
+     * recently used to least recently used.
+     *
+     * `fn` is called as `fn(value, key, cache)`.
+     *
+     * If `thisp` is provided, function will be called in the `this`-context of
+     * the provided object, or the cache if no `thisp` object is provided.
+     *
+     * Does not update age or recenty of use, or iterate over stale values.
+     */
+    forEach(fn, thisp = this) {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            fn.call(thisp, value, this.#keyList[i], this);
+        }
+    }
+    /**
+     * The same as {@link LRUCache.forEach} but items are iterated over in
+     * reverse order.  (ie, less recently used items are iterated over first.)
+     */
+    rforEach(fn, thisp = this) {
+        for (const i of this.#rindexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            fn.call(thisp, value, this.#keyList[i], this);
+        }
+    }
+    /**
+     * Delete any stale entries. Returns true if anything was removed,
+     * false otherwise.
+     */
+    purgeStale() {
+        let deleted = false;
+        for (const i of this.#rindexes({ allowStale: true })) {
+            if (this.#isStale(i)) {
+                this.#delete(this.#keyList[i], 'expire');
+                deleted = true;
+            }
+        }
+        return deleted;
+    }
+    /**
+     * Get the extended info about a given entry, to get its value, size, and
+     * TTL info simultaneously. Returns `undefined` if the key is not present.
+     *
+     * Unlike {@link LRUCache#dump}, which is designed to be portable and survive
+     * serialization, the `start` value is always the current timestamp, and the
+     * `ttl` is a calculated remaining time to live (negative if expired).
+     *
+     * Always returns stale values, if their info is found in the cache, so be
+     * sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl})
+     * if relevant.
+     */
+    info(key) {
+        const i = this.#keyMap.get(key);
+        if (i === undefined)
+            return undefined;
+        const v = this.#valList[i];
+        /* c8 ignore start - this isn't tested for the info function,
+         * but it's the same logic as found in other places. */
+        const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+        if (value === undefined)
+            return undefined;
+        /* c8 ignore end */
+        const entry = { value };
+        if (this.#ttls && this.#starts) {
+            const ttl = this.#ttls[i];
+            const start = this.#starts[i];
+            if (ttl && start) {
+                const remain = ttl - (this.#perf.now() - start);
+                entry.ttl = remain;
+                entry.start = Date.now();
+            }
+        }
+        if (this.#sizes) {
+            entry.size = this.#sizes[i];
+        }
+        return entry;
+    }
+    /**
+     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
+     * passed to {@link LRUCache#load}.
+     *
+     * The `start` fields are calculated relative to a portable `Date.now()`
+     * timestamp, even if `performance.now()` is available.
+     *
+     * Stale entries are always included in the `dump`, even if
+     * {@link LRUCache.OptionsBase.allowStale} is false.
+     *
+     * Note: this returns an actual array, not a generator, so it can be more
+     * easily passed around.
+     */
+    dump() {
+        const arr = [];
+        for (const i of this.#indexes({ allowStale: true })) {
+            const key = this.#keyList[i];
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined || key === undefined)
+                continue;
+            const entry = { value };
+            if (this.#ttls && this.#starts) {
+                entry.ttl = this.#ttls[i];
+                // always dump the start relative to a portable timestamp
+                // it's ok for this to be a bit slow, it's a rare operation.
+                const age = this.#perf.now() - this.#starts[i];
+                entry.start = Math.floor(Date.now() - age);
+            }
+            if (this.#sizes) {
+                entry.size = this.#sizes[i];
+            }
+            arr.unshift([key, entry]);
+        }
+        return arr;
+    }
+    /**
+     * Reset the cache and load in the items in entries in the order listed.
+     *
+     * The shape of the resulting cache may be different if the same options are
+     * not used in both caches.
+     *
+     * The `start` fields are assumed to be calculated relative to a portable
+     * `Date.now()` timestamp, even if `performance.now()` is available.
+     */
+    load(arr) {
+        this.clear();
+        for (const [key, entry] of arr) {
+            if (entry.start) {
+                // entry.start is a portable timestamp, but we may be using
+                // node's performance.now(), so calculate the offset, so that
+                // we get the intended remaining TTL, no matter how long it's
+                // been on ice.
+                //
+                // it's ok for this to be a bit slow, it's a rare operation.
+                const age = Date.now() - entry.start;
+                entry.start = this.#perf.now() - age;
+            }
+            this.set(key, entry.value, entry);
+        }
+    }
+    /**
+     * Add a value to the cache.
+     *
+     * Note: if `undefined` is specified as a value, this is an alias for
+     * {@link LRUCache#delete}
+     *
+     * Fields on the {@link LRUCache.SetOptions} options param will override
+     * their corresponding values in the constructor options for the scope
+     * of this single `set()` operation.
+     *
+     * If `start` is provided, then that will set the effective start
+     * time for the TTL calculation. Note that this must be a previous
+     * value of `performance.now()` if supported, or a previous value of
+     * `Date.now()` if not.
+     *
+     * Options object may also include `size`, which will prevent
+     * calling the `sizeCalculation` function and just use the specified
+     * number if it is a positive integer, and `noDisposeOnSet` which
+     * will prevent calling a `dispose` function in the case of
+     * overwrites.
+     *
+     * If the `size` (or return value of `sizeCalculation`) for a given
+     * entry is greater than `maxEntrySize`, then the item will not be
+     * added to the cache.
+     *
+     * Will update the recency of the entry.
+     *
+     * If the value is `undefined`, then this is an alias for
+     * `cache.delete(key)`. `undefined` is never stored in the cache.
+     */
+    set(k, v, setOptions = {}) {
+        if (v === undefined) {
+            this.delete(k);
+            return this;
+        }
+        const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
+        let { noUpdateTTL = this.noUpdateTTL } = setOptions;
+        const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
+        // if the item doesn't fit, don't do anything
+        // NB: maxEntrySize set to maxSize by default
+        if (this.maxEntrySize && size > this.maxEntrySize) {
+            if (status) {
+                status.set = 'miss';
+                status.maxEntrySizeExceeded = true;
+            }
+            // have to delete, in case something is there already.
+            this.#delete(k, 'set');
+            return this;
+        }
+        let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
+        if (index === undefined) {
+            // addition
+            index = (this.#size === 0 ? this.#tail
+                : this.#free.length !== 0 ? this.#free.pop()
+                    : this.#size === this.#max ? this.#evict(false)
+                        : this.#size);
+            this.#keyList[index] = k;
+            this.#valList[index] = v;
+            this.#keyMap.set(k, index);
+            this.#next[this.#tail] = index;
+            this.#prev[index] = this.#tail;
+            this.#tail = index;
+            this.#size++;
+            this.#addItemSize(index, size, status);
+            if (status)
+                status.set = 'add';
+            noUpdateTTL = false;
+            if (this.#hasOnInsert) {
+                this.#onInsert?.(v, k, 'add');
+            }
+        }
+        else {
+            // update
+            this.#moveToTail(index);
+            const oldVal = this.#valList[index];
+            if (v !== oldVal) {
+                if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
+                    oldVal.__abortController.abort(new Error('replaced'));
+                    const { __staleWhileFetching: s } = oldVal;
+                    if (s !== undefined && !noDisposeOnSet) {
+                        if (this.#hasDispose) {
+                            this.#dispose?.(s, k, 'set');
+                        }
+                        if (this.#hasDisposeAfter) {
+                            this.#disposed?.push([s, k, 'set']);
+                        }
+                    }
+                }
+                else if (!noDisposeOnSet) {
+                    if (this.#hasDispose) {
+                        this.#dispose?.(oldVal, k, 'set');
+                    }
+                    if (this.#hasDisposeAfter) {
+                        this.#disposed?.push([oldVal, k, 'set']);
+                    }
+                }
+                this.#removeItemSize(index);
+                this.#addItemSize(index, size, status);
+                this.#valList[index] = v;
+                if (status) {
+                    status.set = 'replace';
+                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal) ?
+                        oldVal.__staleWhileFetching
+                        : oldVal;
+                    if (oldValue !== undefined)
+                        status.oldValue = oldValue;
+                }
+            }
+            else if (status) {
+                status.set = 'update';
+            }
+            if (this.#hasOnInsert) {
+                this.onInsert?.(v, k, v === oldVal ? 'update' : 'replace');
+            }
+        }
+        if (ttl !== 0 && !this.#ttls) {
+            this.#initializeTTLTracking();
+        }
+        if (this.#ttls) {
+            if (!noUpdateTTL) {
+                this.#setItemTTL(index, ttl, start);
+            }
+            if (status)
+                this.#statusTTL(status, index);
+        }
+        if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+        return this;
+    }
+    /**
+     * Evict the least recently used item, returning its value or
+     * `undefined` if cache is empty.
+     */
+    pop() {
+        try {
+            while (this.#size) {
+                const val = this.#valList[this.#head];
+                this.#evict(true);
+                if (this.#isBackgroundFetch(val)) {
+                    if (val.__staleWhileFetching) {
+                        return val.__staleWhileFetching;
+                    }
+                }
+                else if (val !== undefined) {
+                    return val;
+                }
+            }
+        }
+        finally {
+            if (this.#hasDisposeAfter && this.#disposed) {
+                const dt = this.#disposed;
+                let task;
+                while ((task = dt?.shift())) {
+                    this.#disposeAfter?.(...task);
+                }
+            }
+        }
+    }
+    #evict(free) {
+        const head = this.#head;
+        const k = this.#keyList[head];
+        const v = this.#valList[head];
+        if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
+            v.__abortController.abort(new Error('evicted'));
+        }
+        else if (this.#hasDispose || this.#hasDisposeAfter) {
+            if (this.#hasDispose) {
+                this.#dispose?.(v, k, 'evict');
+            }
+            if (this.#hasDisposeAfter) {
+                this.#disposed?.push([v, k, 'evict']);
+            }
+        }
+        this.#removeItemSize(head);
+        // if we aren't about to use the index, then null these out
+        if (free) {
+            this.#keyList[head] = undefined;
+            this.#valList[head] = undefined;
+            this.#free.push(head);
+        }
+        if (this.#size === 1) {
+            this.#head = this.#tail = 0;
+            this.#free.length = 0;
+        }
+        else {
+            this.#head = this.#next[head];
+        }
+        this.#keyMap.delete(k);
+        this.#size--;
+        return head;
+    }
+    /**
+     * Check if a key is in the cache, without updating the recency of use.
+     * Will return false if the item is stale, even though it is technically
+     * in the cache.
+     *
+     * Check if a key is in the cache, without updating the recency of
+     * use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set
+     * to `true` in either the options or the constructor.
+     *
+     * Will return `false` if the item is stale, even though it is technically in
+     * the cache. The difference can be determined (if it matters) by using a
+     * `status` argument, and inspecting the `has` field.
+     *
+     * Will not update item age unless
+     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
+     */
+    has(k, hasOptions = {}) {
+        const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
+        const index = this.#keyMap.get(k);
+        if (index !== undefined) {
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v) &&
+                v.__staleWhileFetching === undefined) {
+                return false;
+            }
+            if (!this.#isStale(index)) {
+                if (updateAgeOnHas) {
+                    this.#updateItemAge(index);
+                }
+                if (status) {
+                    status.has = 'hit';
+                    this.#statusTTL(status, index);
+                }
+                return true;
+            }
+            else if (status) {
+                status.has = 'stale';
+                this.#statusTTL(status, index);
+            }
+        }
+        else if (status) {
+            status.has = 'miss';
+        }
+        return false;
+    }
+    /**
+     * Like {@link LRUCache#get} but doesn't update recency or delete stale
+     * items.
+     *
+     * Returns `undefined` if the item is stale, unless
+     * {@link LRUCache.OptionsBase.allowStale} is set.
+     */
+    peek(k, peekOptions = {}) {
+        const { allowStale = this.allowStale } = peekOptions;
+        const index = this.#keyMap.get(k);
+        if (index === undefined ||
+            (!allowStale && this.#isStale(index))) {
+            return;
+        }
+        const v = this.#valList[index];
+        // either stale and allowed, or forcing a refresh of non-stale value
+        return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+    }
+    #backgroundFetch(k, index, options, context) {
+        const v = index === undefined ? undefined : this.#valList[index];
+        if (this.#isBackgroundFetch(v)) {
+            return v;
+        }
+        const ac = new AC();
+        const { signal } = options;
+        // when/if our AC signals, then stop listening to theirs.
+        signal?.addEventListener('abort', () => ac.abort(signal.reason), {
+            signal: ac.signal,
+        });
+        const fetchOpts = {
+            signal: ac.signal,
+            options,
+            context,
+        };
+        const cb = (v, updateCache = false) => {
+            const { aborted } = ac.signal;
+            const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
+            if (options.status) {
+                if (aborted && !updateCache) {
+                    options.status.fetchAborted = true;
+                    options.status.fetchError = ac.signal.reason;
+                    if (ignoreAbort)
+                        options.status.fetchAbortIgnored = true;
+                }
+                else {
+                    options.status.fetchResolved = true;
+                }
+            }
+            if (aborted && !ignoreAbort && !updateCache) {
+                return fetchFail(ac.signal.reason);
+            }
+            // either we didn't abort, and are still here, or we did, and ignored
+            const bf = p;
+            if (this.#valList[index] === p) {
+                if (v === undefined) {
+                    if (bf.__staleWhileFetching !== undefined) {
+                        this.#valList[index] = bf.__staleWhileFetching;
+                    }
+                    else {
+                        this.#delete(k, 'fetch');
+                    }
+                }
+                else {
+                    if (options.status)
+                        options.status.fetchUpdated = true;
+                    this.set(k, v, fetchOpts.options);
+                }
+            }
+            return v;
+        };
+        const eb = (er) => {
+            if (options.status) {
+                options.status.fetchRejected = true;
+                options.status.fetchError = er;
+            }
+            return fetchFail(er);
+        };
+        const fetchFail = (er) => {
+            const { aborted } = ac.signal;
+            const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
+            const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
+            const noDelete = allowStale || options.noDeleteOnFetchRejection;
+            const bf = p;
+            if (this.#valList[index] === p) {
+                // if we allow stale on fetch rejections, then we need to ensure that
+                // the stale value is not removed from the cache when the fetch fails.
+                const del = !noDelete || bf.__staleWhileFetching === undefined;
+                if (del) {
+                    this.#delete(k, 'fetch');
+                }
+                else if (!allowStaleAborted) {
+                    // still replace the *promise* with the stale value,
+                    // since we are done with the promise at this point.
+                    // leave it untouched if we're still waiting for an
+                    // aborted background fetch that hasn't yet returned.
+                    this.#valList[index] = bf.__staleWhileFetching;
+                }
+            }
+            if (allowStale) {
+                if (options.status && bf.__staleWhileFetching !== undefined) {
+                    options.status.returnedStale = true;
+                }
+                return bf.__staleWhileFetching;
+            }
+            else if (bf.__returned === bf) {
+                throw er;
+            }
+        };
+        const pcall = (res, rej) => {
+            const fmp = this.#fetchMethod?.(k, v, fetchOpts);
+            if (fmp && fmp instanceof Promise) {
+                fmp.then(v => res(v === undefined ? undefined : v), rej);
+            }
+            // ignored, we go until we finish, regardless.
+            // defer check until we are actually aborting,
+            // so fetchMethod can override.
+            ac.signal.addEventListener('abort', () => {
+                if (!options.ignoreFetchAbort ||
+                    options.allowStaleOnFetchAbort) {
+                    res(undefined);
+                    // when it eventually resolves, update the cache.
+                    if (options.allowStaleOnFetchAbort) {
+                        res = v => cb(v, true);
+                    }
+                }
+            });
+        };
+        if (options.status)
+            options.status.fetchDispatched = true;
+        const p = new Promise(pcall).then(cb, eb);
+        const bf = Object.assign(p, {
+            __abortController: ac,
+            __staleWhileFetching: v,
+            __returned: undefined,
+        });
+        if (index === undefined) {
+            // internal, don't expose status.
+            this.set(k, bf, { ...fetchOpts.options, status: undefined });
+            index = this.#keyMap.get(k);
+        }
+        else {
+            this.#valList[index] = bf;
+        }
+        return bf;
+    }
+    #isBackgroundFetch(p) {
+        if (!this.#hasFetchMethod)
+            return false;
+        const b = p;
+        return (!!b &&
+            b instanceof Promise &&
+            b.hasOwnProperty('__staleWhileFetching') &&
+            b.__abortController instanceof AC);
+    }
+    async fetch(k, fetchOptions = {}) {
+        const { 
+        // get options
+        allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, 
+        // set options
+        ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, 
+        // fetch exclusive options
+        noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
+        if (!this.#hasFetchMethod) {
+            if (status)
+                status.fetch = 'get';
+            return this.get(k, {
+                allowStale,
+                updateAgeOnGet,
+                noDeleteOnStaleGet,
+                status,
+            });
+        }
+        const options = {
+            allowStale,
+            updateAgeOnGet,
+            noDeleteOnStaleGet,
+            ttl,
+            noDisposeOnSet,
+            size,
+            sizeCalculation,
+            noUpdateTTL,
+            noDeleteOnFetchRejection,
+            allowStaleOnFetchRejection,
+            allowStaleOnFetchAbort,
+            ignoreFetchAbort,
+            status,
+            signal,
+        };
+        let index = this.#keyMap.get(k);
+        if (index === undefined) {
+            if (status)
+                status.fetch = 'miss';
+            const p = this.#backgroundFetch(k, index, options, context);
+            return (p.__returned = p);
+        }
+        else {
+            // in cache, maybe already fetching
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v)) {
+                const stale = allowStale && v.__staleWhileFetching !== undefined;
+                if (status) {
+                    status.fetch = 'inflight';
+                    if (stale)
+                        status.returnedStale = true;
+                }
+                return stale ? v.__staleWhileFetching : (v.__returned = v);
+            }
+            // if we force a refresh, that means do NOT serve the cached value,
+            // unless we are already in the process of refreshing the cache.
+            const isStale = this.#isStale(index);
+            if (!forceRefresh && !isStale) {
+                if (status)
+                    status.fetch = 'hit';
+                this.#moveToTail(index);
+                if (updateAgeOnGet) {
+                    this.#updateItemAge(index);
+                }
+                if (status)
+                    this.#statusTTL(status, index);
+                return v;
+            }
+            // ok, it is stale or a forced refresh, and not already fetching.
+            // refresh the cache.
+            const p = this.#backgroundFetch(k, index, options, context);
+            const hasStale = p.__staleWhileFetching !== undefined;
+            const staleVal = hasStale && allowStale;
+            if (status) {
+                status.fetch = isStale ? 'stale' : 'refresh';
+                if (staleVal && isStale)
+                    status.returnedStale = true;
+            }
+            return staleVal ? p.__staleWhileFetching : (p.__returned = p);
+        }
+    }
+    async forceFetch(k, fetchOptions = {}) {
+        const v = await this.fetch(k, fetchOptions);
+        if (v === undefined)
+            throw new Error('fetch() returned undefined');
+        return v;
+    }
+    memo(k, memoOptions = {}) {
+        const memoMethod = this.#memoMethod;
+        if (!memoMethod) {
+            throw new Error('no memoMethod provided to constructor');
+        }
+        const { context, forceRefresh, ...options } = memoOptions;
+        const v = this.get(k, options);
+        if (!forceRefresh && v !== undefined)
+            return v;
+        const vv = memoMethod(k, v, {
+            options,
+            context,
+        });
+        this.set(k, vv, options);
+        return vv;
+    }
+    /**
+     * Return a value from the cache. Will update the recency of the cache
+     * entry found.
+     *
+     * If the key is not found, get() will return `undefined`.
+     */
+    get(k, getOptions = {}) {
+        const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
+        const index = this.#keyMap.get(k);
+        if (index !== undefined) {
+            const value = this.#valList[index];
+            const fetching = this.#isBackgroundFetch(value);
+            if (status)
+                this.#statusTTL(status, index);
+            if (this.#isStale(index)) {
+                if (status)
+                    status.get = 'stale';
+                // delete only if not an in-flight background fetch
+                if (!fetching) {
+                    if (!noDeleteOnStaleGet) {
+                        this.#delete(k, 'expire');
+                    }
+                    if (status && allowStale)
+                        status.returnedStale = true;
+                    return allowStale ? value : undefined;
+                }
+                else {
+                    if (status &&
+                        allowStale &&
+                        value.__staleWhileFetching !== undefined) {
+                        status.returnedStale = true;
+                    }
+                    return allowStale ? value.__staleWhileFetching : undefined;
+                }
+            }
+            else {
+                if (status)
+                    status.get = 'hit';
+                // if we're currently fetching it, we don't actually have it yet
+                // it's not stale, which means this isn't a staleWhileRefetching.
+                // If it's not stale, and fetching, AND has a __staleWhileFetching
+                // value, then that means the user fetched with {forceRefresh:true},
+                // so it's safe to return that value.
+                if (fetching) {
+                    return value.__staleWhileFetching;
+                }
+                this.#moveToTail(index);
+                if (updateAgeOnGet) {
+                    this.#updateItemAge(index);
+                }
+                return value;
+            }
+        }
+        else if (status) {
+            status.get = 'miss';
+        }
+    }
+    #connect(p, n) {
+        this.#prev[n] = p;
+        this.#next[p] = n;
+    }
+    #moveToTail(index) {
+        // if tail already, nothing to do
+        // if head, move head to next[index]
+        // else
+        //   move next[prev[index]] to next[index] (head has no prev)
+        //   move prev[next[index]] to prev[index]
+        // prev[index] = tail
+        // next[tail] = index
+        // tail = index
+        if (index !== this.#tail) {
+            if (index === this.#head) {
+                this.#head = this.#next[index];
+            }
+            else {
+                this.#connect(this.#prev[index], this.#next[index]);
+            }
+            this.#connect(this.#tail, index);
+            this.#tail = index;
+        }
+    }
+    /**
+     * Deletes a key out of the cache.
+     *
+     * Returns true if the key was deleted, false otherwise.
+     */
+    delete(k) {
+        return this.#delete(k, 'delete');
+    }
+    #delete(k, reason) {
+        let deleted = false;
+        if (this.#size !== 0) {
+            const index = this.#keyMap.get(k);
+            if (index !== undefined) {
+                deleted = true;
+                if (this.#size === 1) {
+                    this.#clear(reason);
+                }
+                else {
+                    this.#removeItemSize(index);
+                    const v = this.#valList[index];
+                    if (this.#isBackgroundFetch(v)) {
+                        v.__abortController.abort(new Error('deleted'));
+                    }
+                    else if (this.#hasDispose || this.#hasDisposeAfter) {
+                        if (this.#hasDispose) {
+                            this.#dispose?.(v, k, reason);
+                        }
+                        if (this.#hasDisposeAfter) {
+                            this.#disposed?.push([v, k, reason]);
+                        }
+                    }
+                    this.#keyMap.delete(k);
+                    this.#keyList[index] = undefined;
+                    this.#valList[index] = undefined;
+                    if (index === this.#tail) {
+                        this.#tail = this.#prev[index];
+                    }
+                    else if (index === this.#head) {
+                        this.#head = this.#next[index];
+                    }
+                    else {
+                        const pi = this.#prev[index];
+                        this.#next[pi] = this.#next[index];
+                        const ni = this.#next[index];
+                        this.#prev[ni] = this.#prev[index];
+                    }
+                    this.#size--;
+                    this.#free.push(index);
+                }
+            }
+        }
+        if (this.#hasDisposeAfter && this.#disposed?.length) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+        return deleted;
+    }
+    /**
+     * Clear the cache entirely, throwing away all values.
+     */
+    clear() {
+        return this.#clear('delete');
+    }
+    #clear(reason) {
+        for (const index of this.#rindexes({ allowStale: true })) {
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v)) {
+                v.__abortController.abort(new Error('deleted'));
+            }
+            else {
+                const k = this.#keyList[index];
+                if (this.#hasDispose) {
+                    this.#dispose?.(v, k, reason);
+                }
+                if (this.#hasDisposeAfter) {
+                    this.#disposed?.push([v, k, reason]);
+                }
+            }
+        }
+        this.#keyMap.clear();
+        this.#valList.fill(undefined);
+        this.#keyList.fill(undefined);
+        if (this.#ttls && this.#starts) {
+            this.#ttls.fill(0);
+            this.#starts.fill(0);
+        }
+        if (this.#sizes) {
+            this.#sizes.fill(0);
+        }
+        this.#head = 0;
+        this.#tail = 0;
+        this.#free.length = 0;
+        this.#calculatedSize = 0;
+        this.#size = 0;
+        if (this.#hasDisposeAfter && this.#disposed) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+    }
+}
+exports.LRUCache = LRUCache;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/init-package-json/node_modules/lru-cache/dist/commonjs/index.min.js b/node_modules/init-package-json/node_modules/lru-cache/dist/commonjs/index.min.js
new file mode 100644
index 0000000000000..ef5027b91650d
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/lru-cache/dist/commonjs/index.min.js
@@ -0,0 +1,2 @@
+"use strict";Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var M=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,x=new Set,R=typeof process=="object"&&process?process:{},U=(a,t,e,i)=>{typeof R.emitWarning=="function"?R.emitWarning(a,t,e,i):console.error(`[${e}] ${t}: ${a}`)},C=globalThis.AbortController,L=globalThis.AbortSignal;if(typeof C>"u"){L=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},C=class{constructor(){t()}signal=new L;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let a=R.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{a&&(a=!1,U("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var G=a=>!x.has(a),H=Symbol("type"),y=a=>a&&a===Math.floor(a)&&a>0&&isFinite(a),I=a=>y(a)?a<=Math.pow(2,8)?Uint8Array:a<=Math.pow(2,16)?Uint16Array:a<=Math.pow(2,32)?Uint32Array:a<=Number.MAX_SAFE_INTEGER?E:null:null,E=class extends Array{constructor(t){super(t),this.fill(0)}},W=class a{heap;length;static#l=!1;static create(t){let e=I(t);if(!e)return[];a.#l=!0;let i=new a(t,e);return a.#l=!1,i}constructor(t,e){if(!a.#l)throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},D=class a{#l;#c;#p;#v;#w;#D;#L;#S;get perf(){return this.#S}ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#_;#s;#i;#t;#a;#u;#o;#h;#m;#r;#b;#y;#d;#A;#z;#f;#x;static unsafeExposeInternals(t){return{starts:t.#y,ttls:t.#d,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#a,prev:t.#u,get head(){return t.#o},get tail(){return t.#h},free:t.#m,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#M(e,i,s,n),moveToTail:e=>t.#W(e),indexes:e=>t.#F(e),rindexes:e=>t.#T(e),isStale:e=>t.#g(e)}}get max(){return this.#l}get maxSize(){return this.#c}get calculatedSize(){return this.#_}get size(){return this.#n}get fetchMethod(){return this.#D}get memoMethod(){return this.#L}get dispose(){return this.#p}get onInsert(){return this.#v}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,onInsert:_,disposeAfter:f,noDisposeOnSet:c,noUpdateTTL:u,maxSize:A=0,maxEntrySize:d=0,sizeCalculation:m,fetchMethod:l,memoMethod:w,noDeleteOnFetchRejection:b,noDeleteOnStaleGet:p,allowStaleOnFetchRejection:S,allowStaleOnFetchAbort:z,ignoreFetchAbort:F,perf:v}=t;if(v!==void 0&&typeof v?.now!="function")throw new TypeError("perf option must have a now() method if specified");if(this.#S=v??M,e!==0&&!y(e))throw new TypeError("max option must be a nonnegative integer");let T=e?I(e):Array;if(!T)throw new Error("invalid max value: "+e);if(this.#l=e,this.#c=A,this.maxEntrySize=d||this.#c,this.sizeCalculation=m,this.sizeCalculation){if(!this.#c&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#L=w,l!==void 0&&typeof l!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#D=l,this.#z=!!l,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#a=new T(e),this.#u=new T(e),this.#o=0,this.#h=0,this.#m=W.create(e),this.#n=0,this.#_=0,typeof g=="function"&&(this.#p=g),typeof _=="function"&&(this.#v=_),typeof f=="function"?(this.#w=f,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#A=!!this.#p,this.#x=!!this.#v,this.#f=!!this.#w,this.noDisposeOnSet=!!c,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!b,this.allowStaleOnFetchRejection=!!S,this.allowStaleOnFetchAbort=!!z,this.ignoreFetchAbort=!!F,this.maxEntrySize!==0){if(this.#c!==0&&!y(this.#c))throw new TypeError("maxSize must be a positive integer if specified");if(!y(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#V()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!p,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=y(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!y(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#G()}if(this.#l===0&&this.ttl===0&&this.#c===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#l&&!this.#c){let O="LRU_CACHE_UNBOUNDED";G(O)&&(x.add(O),U("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",O,a))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#G(){let t=new E(this.#l),e=new E(this.#l);this.#d=t,this.#y=e,this.#j=(n,h,o=this.#S.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#g(n)&&this.#O(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#C=n=>{e[n]=t[n]!==0?this.#S.now():0},this.#E=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=this.#S.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#g=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#C=()=>{};#E=()=>{};#j=()=>{};#g=()=>!1;#V(){let t=new E(this.#l);this.#_=0,this.#b=t,this.#R=e=>{this.#_-=t[e],t[e]=0},this.#N=(e,i,s,n)=>{if(this.#e(i))return 0;if(!y(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!y(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#U=(e,i,s)=>{if(t[e]=i,this.#c){let n=this.#c-t[e];for(;this.#_>n;)this.#I(!0)}this.#_+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#_)}}#R=t=>{};#U=(t,e,i)=>{};#N=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#o));)e=this.#u[e]}*#T({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#h));)e=this.#a[e]}#P(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#T())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#T()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#T())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#T()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#T({allowStale:!0}))this.#g(e)&&(this.#O(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#d&&this.#y){let h=this.#d[e],o=this.#y[e];if(h&&o){let r=h-(this.#S.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#F({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#d&&this.#y){h.ttl=this.#d[e];let o=this.#S.now()-this.#y[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=this.#S.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,_=this.#N(t,e,i.size||0,o);if(this.maxEntrySize&&_>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#O(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#m.length!==0?this.#m.pop():this.#n===this.#l?this.#I(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#a[this.#h]=f,this.#u[f]=this.#h,this.#h=f,this.#n++,this.#U(f,_,r),r&&(r.set="add"),g=!1,this.#x&&this.#v?.(e,t,"add");else{this.#W(f);let c=this.#t[f];if(e!==c){if(this.#z&&this.#e(c)){c.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:u}=c;u!==void 0&&!h&&(this.#A&&this.#p?.(u,t,"set"),this.#f&&this.#r?.push([u,t,"set"]))}else h||(this.#A&&this.#p?.(c,t,"set"),this.#f&&this.#r?.push([c,t,"set"]));if(this.#R(f),this.#U(f,_,r),this.#t[f]=e,r){r.set="replace";let u=c&&this.#e(c)?c.__staleWhileFetching:c;u!==void 0&&(r.oldValue=u)}}else r&&(r.set="update");this.#x&&this.onInsert?.(e,t,e===c?"update":"replace")}if(s!==0&&!this.#d&&this.#G(),this.#d&&(g||this.#j(f,s,n),r&&this.#E(r,f)),!h&&this.#f&&this.#r){let c=this.#r,u;for(;u=c?.shift();)this.#w?.(...u)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#I(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#f&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#I(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#z&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(s,i,"evict"),this.#f&&this.#r?.push([s,i,"evict"])),this.#R(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#m.push(e)),this.#n===1?(this.#o=this.#h=0,this.#m.length=0):this.#o=this.#a[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#g(n))s&&(s.has="stale",this.#E(s,n));else return i&&this.#C(n),s&&(s.has="hit",this.#E(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#g(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#M(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new C,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,m=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!m?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!m)return f(h.signal.reason);let b=u;return this.#t[e]===u&&(d===void 0?b.__staleWhileFetching!==void 0?this.#t[e]=b.__staleWhileFetching:this.#O(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},_=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:m}=h.signal,l=m&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=u;if(this.#t[e]===u&&(!b||p.__staleWhileFetching===void 0?this.#O(t,"fetch"):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},c=(d,m)=>{let l=this.#D?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),m),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let u=new Promise(c).then(g,_),A=Object.assign(u,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,A,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=A,A}#e(t){if(!this.#z)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof C}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:_=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:c=this.allowStaleOnFetchRejection,ignoreFetchAbort:u=this.ignoreFetchAbort,allowStaleOnFetchAbort:A=this.allowStaleOnFetchAbort,context:d,forceRefresh:m=!1,status:l,signal:w}=e;if(!this.#z)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:_,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:c,allowStaleOnFetchAbort:A,ignoreFetchAbort:u,status:l,signal:w},p=this.#s.get(t);if(p===void 0){l&&(l.fetch="miss");let S=this.#M(t,p,b,d);return S.__returned=S}else{let S=this.#t[p];if(this.#e(S)){let O=i&&S.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",O&&(l.returnedStale=!0)),O?S.__staleWhileFetching:S.__returned=S}let z=this.#g(p);if(!m&&!z)return l&&(l.fetch="hit"),this.#W(p),s&&this.#C(p),l&&this.#E(l,p),S;let F=this.#M(t,p,b,d),T=F.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=z?"stale":"refresh",T&&z&&(l.returnedStale=!0)),T?F.__staleWhileFetching:F.__returned=F}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#L;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#E(h,o),this.#g(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#O(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#W(o),s&&this.#C(o),r))}else h&&(h.get="miss")}#H(t,e){this.#u[e]=t,this.#a[t]=e}#W(t){t!==this.#h&&(t===this.#o?this.#o=this.#a[t]:this.#H(this.#u[t],this.#a[t]),this.#H(this.#h,t),this.#h=t)}delete(t){return this.#O(t,"delete")}#O(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#k(e);else{this.#R(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(n,t,e),this.#f&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#u[s];else if(s===this.#o)this.#o=this.#a[s];else{let h=this.#u[s];this.#a[h]=this.#a[s];let o=this.#a[s];this.#u[o]=this.#u[s]}this.#n--,this.#m.push(s)}}if(this.#f&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#k("delete")}#k(t){for(let e of this.#T({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#A&&this.#p?.(i,s,t),this.#f&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#d&&this.#y&&(this.#d.fill(0),this.#y.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#m.length=0,this.#_=0,this.#n=0,this.#f&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};exports.LRUCache=D;
+//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/init-package-json/node_modules/lru-cache/dist/commonjs/package.json b/node_modules/init-package-json/node_modules/lru-cache/dist/commonjs/package.json
new file mode 100644
index 0000000000000..5bbefffbabee3
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/lru-cache/dist/commonjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/node_modules/init-package-json/node_modules/lru-cache/dist/esm/index.js b/node_modules/init-package-json/node_modules/lru-cache/dist/esm/index.js
new file mode 100644
index 0000000000000..8fd8fc5f31507
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/lru-cache/dist/esm/index.js
@@ -0,0 +1,1560 @@
+/**
+ * @module LRUCache
+ */
+const defaultPerf = (typeof performance === 'object' &&
+    performance &&
+    typeof performance.now === 'function') ?
+    performance
+    : Date;
+const warned = new Set();
+/* c8 ignore start */
+const PROCESS = (typeof process === 'object' && !!process ?
+    process
+    : {});
+/* c8 ignore start */
+const emitWarning = (msg, type, code, fn) => {
+    typeof PROCESS.emitWarning === 'function' ?
+        PROCESS.emitWarning(msg, type, code, fn)
+        : console.error(`[${code}] ${type}: ${msg}`);
+};
+let AC = globalThis.AbortController;
+let AS = globalThis.AbortSignal;
+/* c8 ignore start */
+if (typeof AC === 'undefined') {
+    //@ts-ignore
+    AS = class AbortSignal {
+        onabort;
+        _onabort = [];
+        reason;
+        aborted = false;
+        addEventListener(_, fn) {
+            this._onabort.push(fn);
+        }
+    };
+    //@ts-ignore
+    AC = class AbortController {
+        constructor() {
+            warnACPolyfill();
+        }
+        signal = new AS();
+        abort(reason) {
+            if (this.signal.aborted)
+                return;
+            //@ts-ignore
+            this.signal.reason = reason;
+            //@ts-ignore
+            this.signal.aborted = true;
+            //@ts-ignore
+            for (const fn of this.signal._onabort) {
+                fn(reason);
+            }
+            this.signal.onabort?.(reason);
+        }
+    };
+    let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
+    const warnACPolyfill = () => {
+        if (!printACPolyfillWarning)
+            return;
+        printACPolyfillWarning = false;
+        emitWarning('AbortController is not defined. If using lru-cache in ' +
+            'node 14, load an AbortController polyfill from the ' +
+            '`node-abort-controller` package. A minimal polyfill is ' +
+            'provided for use by LRUCache.fetch(), but it should not be ' +
+            'relied upon in other contexts (eg, passing it to other APIs that ' +
+            'use AbortController/AbortSignal might have undesirable effects). ' +
+            'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
+    };
+}
+/* c8 ignore stop */
+const shouldWarn = (code) => !warned.has(code);
+const TYPE = Symbol('type');
+const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
+/* c8 ignore start */
+// This is a little bit ridiculous, tbh.
+// The maximum array length is 2^32-1 or thereabouts on most JS impls.
+// And well before that point, you're caching the entire world, I mean,
+// that's ~32GB of just integers for the next/prev links, plus whatever
+// else to hold that many keys and values.  Just filling the memory with
+// zeroes at init time is brutal when you get that big.
+// But why not be complete?
+// Maybe in the future, these limits will have expanded.
+const getUintArray = (max) => !isPosInt(max) ? null
+    : max <= Math.pow(2, 8) ? Uint8Array
+        : max <= Math.pow(2, 16) ? Uint16Array
+            : max <= Math.pow(2, 32) ? Uint32Array
+                : max <= Number.MAX_SAFE_INTEGER ? ZeroArray
+                    : null;
+/* c8 ignore stop */
+class ZeroArray extends Array {
+    constructor(size) {
+        super(size);
+        this.fill(0);
+    }
+}
+class Stack {
+    heap;
+    length;
+    // private constructor
+    static #constructing = false;
+    static create(max) {
+        const HeapCls = getUintArray(max);
+        if (!HeapCls)
+            return [];
+        Stack.#constructing = true;
+        const s = new Stack(max, HeapCls);
+        Stack.#constructing = false;
+        return s;
+    }
+    constructor(max, HeapCls) {
+        /* c8 ignore start */
+        if (!Stack.#constructing) {
+            throw new TypeError('instantiate Stack using Stack.create(n)');
+        }
+        /* c8 ignore stop */
+        this.heap = new HeapCls(max);
+        this.length = 0;
+    }
+    push(n) {
+        this.heap[this.length++] = n;
+    }
+    pop() {
+        return this.heap[--this.length];
+    }
+}
+/**
+ * Default export, the thing you're using this module to get.
+ *
+ * The `K` and `V` types define the key and value types, respectively. The
+ * optional `FC` type defines the type of the `context` object passed to
+ * `cache.fetch()` and `cache.memo()`.
+ *
+ * Keys and values **must not** be `null` or `undefined`.
+ *
+ * All properties from the options object (with the exception of `max`,
+ * `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are
+ * added as normal public members. (The listed options are read-only getters.)
+ *
+ * Changing any of these will alter the defaults for subsequent method calls.
+ */
+export class LRUCache {
+    // options that cannot be changed without disaster
+    #max;
+    #maxSize;
+    #dispose;
+    #onInsert;
+    #disposeAfter;
+    #fetchMethod;
+    #memoMethod;
+    #perf;
+    /**
+     * {@link LRUCache.OptionsBase.perf}
+     */
+    get perf() {
+        return this.#perf;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.ttl}
+     */
+    ttl;
+    /**
+     * {@link LRUCache.OptionsBase.ttlResolution}
+     */
+    ttlResolution;
+    /**
+     * {@link LRUCache.OptionsBase.ttlAutopurge}
+     */
+    ttlAutopurge;
+    /**
+     * {@link LRUCache.OptionsBase.updateAgeOnGet}
+     */
+    updateAgeOnGet;
+    /**
+     * {@link LRUCache.OptionsBase.updateAgeOnHas}
+     */
+    updateAgeOnHas;
+    /**
+     * {@link LRUCache.OptionsBase.allowStale}
+     */
+    allowStale;
+    /**
+     * {@link LRUCache.OptionsBase.noDisposeOnSet}
+     */
+    noDisposeOnSet;
+    /**
+     * {@link LRUCache.OptionsBase.noUpdateTTL}
+     */
+    noUpdateTTL;
+    /**
+     * {@link LRUCache.OptionsBase.maxEntrySize}
+     */
+    maxEntrySize;
+    /**
+     * {@link LRUCache.OptionsBase.sizeCalculation}
+     */
+    sizeCalculation;
+    /**
+     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
+     */
+    noDeleteOnFetchRejection;
+    /**
+     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
+     */
+    noDeleteOnStaleGet;
+    /**
+     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
+     */
+    allowStaleOnFetchAbort;
+    /**
+     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
+     */
+    allowStaleOnFetchRejection;
+    /**
+     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
+     */
+    ignoreFetchAbort;
+    // computed properties
+    #size;
+    #calculatedSize;
+    #keyMap;
+    #keyList;
+    #valList;
+    #next;
+    #prev;
+    #head;
+    #tail;
+    #free;
+    #disposed;
+    #sizes;
+    #starts;
+    #ttls;
+    #hasDispose;
+    #hasFetchMethod;
+    #hasDisposeAfter;
+    #hasOnInsert;
+    /**
+     * Do not call this method unless you need to inspect the
+     * inner workings of the cache.  If anything returned by this
+     * object is modified in any way, strange breakage may occur.
+     *
+     * These fields are private for a reason!
+     *
+     * @internal
+     */
+    static unsafeExposeInternals(c) {
+        return {
+            // properties
+            starts: c.#starts,
+            ttls: c.#ttls,
+            sizes: c.#sizes,
+            keyMap: c.#keyMap,
+            keyList: c.#keyList,
+            valList: c.#valList,
+            next: c.#next,
+            prev: c.#prev,
+            get head() {
+                return c.#head;
+            },
+            get tail() {
+                return c.#tail;
+            },
+            free: c.#free,
+            // methods
+            isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
+            backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
+            moveToTail: (index) => c.#moveToTail(index),
+            indexes: (options) => c.#indexes(options),
+            rindexes: (options) => c.#rindexes(options),
+            isStale: (index) => c.#isStale(index),
+        };
+    }
+    // Protected read-only members
+    /**
+     * {@link LRUCache.OptionsBase.max} (read-only)
+     */
+    get max() {
+        return this.#max;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.maxSize} (read-only)
+     */
+    get maxSize() {
+        return this.#maxSize;
+    }
+    /**
+     * The total computed size of items in the cache (read-only)
+     */
+    get calculatedSize() {
+        return this.#calculatedSize;
+    }
+    /**
+     * The number of items stored in the cache (read-only)
+     */
+    get size() {
+        return this.#size;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
+     */
+    get fetchMethod() {
+        return this.#fetchMethod;
+    }
+    get memoMethod() {
+        return this.#memoMethod;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.dispose} (read-only)
+     */
+    get dispose() {
+        return this.#dispose;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.onInsert} (read-only)
+     */
+    get onInsert() {
+        return this.#onInsert;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
+     */
+    get disposeAfter() {
+        return this.#disposeAfter;
+    }
+    constructor(options) {
+        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, onInsert, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, perf, } = options;
+        if (perf !== undefined) {
+            if (typeof perf?.now !== 'function') {
+                throw new TypeError('perf option must have a now() method if specified');
+            }
+        }
+        this.#perf = perf ?? defaultPerf;
+        if (max !== 0 && !isPosInt(max)) {
+            throw new TypeError('max option must be a nonnegative integer');
+        }
+        const UintArray = max ? getUintArray(max) : Array;
+        if (!UintArray) {
+            throw new Error('invalid max value: ' + max);
+        }
+        this.#max = max;
+        this.#maxSize = maxSize;
+        this.maxEntrySize = maxEntrySize || this.#maxSize;
+        this.sizeCalculation = sizeCalculation;
+        if (this.sizeCalculation) {
+            if (!this.#maxSize && !this.maxEntrySize) {
+                throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
+            }
+            if (typeof this.sizeCalculation !== 'function') {
+                throw new TypeError('sizeCalculation set to non-function');
+            }
+        }
+        if (memoMethod !== undefined &&
+            typeof memoMethod !== 'function') {
+            throw new TypeError('memoMethod must be a function if defined');
+        }
+        this.#memoMethod = memoMethod;
+        if (fetchMethod !== undefined &&
+            typeof fetchMethod !== 'function') {
+            throw new TypeError('fetchMethod must be a function if specified');
+        }
+        this.#fetchMethod = fetchMethod;
+        this.#hasFetchMethod = !!fetchMethod;
+        this.#keyMap = new Map();
+        this.#keyList = new Array(max).fill(undefined);
+        this.#valList = new Array(max).fill(undefined);
+        this.#next = new UintArray(max);
+        this.#prev = new UintArray(max);
+        this.#head = 0;
+        this.#tail = 0;
+        this.#free = Stack.create(max);
+        this.#size = 0;
+        this.#calculatedSize = 0;
+        if (typeof dispose === 'function') {
+            this.#dispose = dispose;
+        }
+        if (typeof onInsert === 'function') {
+            this.#onInsert = onInsert;
+        }
+        if (typeof disposeAfter === 'function') {
+            this.#disposeAfter = disposeAfter;
+            this.#disposed = [];
+        }
+        else {
+            this.#disposeAfter = undefined;
+            this.#disposed = undefined;
+        }
+        this.#hasDispose = !!this.#dispose;
+        this.#hasOnInsert = !!this.#onInsert;
+        this.#hasDisposeAfter = !!this.#disposeAfter;
+        this.noDisposeOnSet = !!noDisposeOnSet;
+        this.noUpdateTTL = !!noUpdateTTL;
+        this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
+        this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
+        this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
+        this.ignoreFetchAbort = !!ignoreFetchAbort;
+        // NB: maxEntrySize is set to maxSize if it's set
+        if (this.maxEntrySize !== 0) {
+            if (this.#maxSize !== 0) {
+                if (!isPosInt(this.#maxSize)) {
+                    throw new TypeError('maxSize must be a positive integer if specified');
+                }
+            }
+            if (!isPosInt(this.maxEntrySize)) {
+                throw new TypeError('maxEntrySize must be a positive integer if specified');
+            }
+            this.#initializeSizeTracking();
+        }
+        this.allowStale = !!allowStale;
+        this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
+        this.updateAgeOnGet = !!updateAgeOnGet;
+        this.updateAgeOnHas = !!updateAgeOnHas;
+        this.ttlResolution =
+            isPosInt(ttlResolution) || ttlResolution === 0 ?
+                ttlResolution
+                : 1;
+        this.ttlAutopurge = !!ttlAutopurge;
+        this.ttl = ttl || 0;
+        if (this.ttl) {
+            if (!isPosInt(this.ttl)) {
+                throw new TypeError('ttl must be a positive integer if specified');
+            }
+            this.#initializeTTLTracking();
+        }
+        // do not allow completely unbounded caches
+        if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
+            throw new TypeError('At least one of max, maxSize, or ttl is required');
+        }
+        if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
+            const code = 'LRU_CACHE_UNBOUNDED';
+            if (shouldWarn(code)) {
+                warned.add(code);
+                const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
+                    'result in unbounded memory consumption.';
+                emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
+            }
+        }
+    }
+    /**
+     * Return the number of ms left in the item's TTL. If item is not in cache,
+     * returns `0`. Returns `Infinity` if item is in cache without a defined TTL.
+     */
+    getRemainingTTL(key) {
+        return this.#keyMap.has(key) ? Infinity : 0;
+    }
+    #initializeTTLTracking() {
+        const ttls = new ZeroArray(this.#max);
+        const starts = new ZeroArray(this.#max);
+        this.#ttls = ttls;
+        this.#starts = starts;
+        this.#setItemTTL = (index, ttl, start = this.#perf.now()) => {
+            starts[index] = ttl !== 0 ? start : 0;
+            ttls[index] = ttl;
+            if (ttl !== 0 && this.ttlAutopurge) {
+                const t = setTimeout(() => {
+                    if (this.#isStale(index)) {
+                        this.#delete(this.#keyList[index], 'expire');
+                    }
+                }, ttl + 1);
+                // unref() not supported on all platforms
+                /* c8 ignore start */
+                if (t.unref) {
+                    t.unref();
+                }
+                /* c8 ignore stop */
+            }
+        };
+        this.#updateItemAge = index => {
+            starts[index] = ttls[index] !== 0 ? this.#perf.now() : 0;
+        };
+        this.#statusTTL = (status, index) => {
+            if (ttls[index]) {
+                const ttl = ttls[index];
+                const start = starts[index];
+                /* c8 ignore next */
+                if (!ttl || !start)
+                    return;
+                status.ttl = ttl;
+                status.start = start;
+                status.now = cachedNow || getNow();
+                const age = status.now - start;
+                status.remainingTTL = ttl - age;
+            }
+        };
+        // debounce calls to perf.now() to 1s so we're not hitting
+        // that costly call repeatedly.
+        let cachedNow = 0;
+        const getNow = () => {
+            const n = this.#perf.now();
+            if (this.ttlResolution > 0) {
+                cachedNow = n;
+                const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
+                // not available on all platforms
+                /* c8 ignore start */
+                if (t.unref) {
+                    t.unref();
+                }
+                /* c8 ignore stop */
+            }
+            return n;
+        };
+        this.getRemainingTTL = key => {
+            const index = this.#keyMap.get(key);
+            if (index === undefined) {
+                return 0;
+            }
+            const ttl = ttls[index];
+            const start = starts[index];
+            if (!ttl || !start) {
+                return Infinity;
+            }
+            const age = (cachedNow || getNow()) - start;
+            return ttl - age;
+        };
+        this.#isStale = index => {
+            const s = starts[index];
+            const t = ttls[index];
+            return !!t && !!s && (cachedNow || getNow()) - s > t;
+        };
+    }
+    // conditionally set private methods related to TTL
+    #updateItemAge = () => { };
+    #statusTTL = () => { };
+    #setItemTTL = () => { };
+    /* c8 ignore stop */
+    #isStale = () => false;
+    #initializeSizeTracking() {
+        const sizes = new ZeroArray(this.#max);
+        this.#calculatedSize = 0;
+        this.#sizes = sizes;
+        this.#removeItemSize = index => {
+            this.#calculatedSize -= sizes[index];
+            sizes[index] = 0;
+        };
+        this.#requireSize = (k, v, size, sizeCalculation) => {
+            // provisionally accept background fetches.
+            // actual value size will be checked when they return.
+            if (this.#isBackgroundFetch(v)) {
+                return 0;
+            }
+            if (!isPosInt(size)) {
+                if (sizeCalculation) {
+                    if (typeof sizeCalculation !== 'function') {
+                        throw new TypeError('sizeCalculation must be a function');
+                    }
+                    size = sizeCalculation(v, k);
+                    if (!isPosInt(size)) {
+                        throw new TypeError('sizeCalculation return invalid (expect positive integer)');
+                    }
+                }
+                else {
+                    throw new TypeError('invalid size value (must be positive integer). ' +
+                        'When maxSize or maxEntrySize is used, sizeCalculation ' +
+                        'or size must be set.');
+                }
+            }
+            return size;
+        };
+        this.#addItemSize = (index, size, status) => {
+            sizes[index] = size;
+            if (this.#maxSize) {
+                const maxSize = this.#maxSize - sizes[index];
+                while (this.#calculatedSize > maxSize) {
+                    this.#evict(true);
+                }
+            }
+            this.#calculatedSize += sizes[index];
+            if (status) {
+                status.entrySize = size;
+                status.totalCalculatedSize = this.#calculatedSize;
+            }
+        };
+    }
+    #removeItemSize = _i => { };
+    #addItemSize = (_i, _s, _st) => { };
+    #requireSize = (_k, _v, size, sizeCalculation) => {
+        if (size || sizeCalculation) {
+            throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
+        }
+        return 0;
+    };
+    *#indexes({ allowStale = this.allowStale } = {}) {
+        if (this.#size) {
+            for (let i = this.#tail; true;) {
+                if (!this.#isValidIndex(i)) {
+                    break;
+                }
+                if (allowStale || !this.#isStale(i)) {
+                    yield i;
+                }
+                if (i === this.#head) {
+                    break;
+                }
+                else {
+                    i = this.#prev[i];
+                }
+            }
+        }
+    }
+    *#rindexes({ allowStale = this.allowStale } = {}) {
+        if (this.#size) {
+            for (let i = this.#head; true;) {
+                if (!this.#isValidIndex(i)) {
+                    break;
+                }
+                if (allowStale || !this.#isStale(i)) {
+                    yield i;
+                }
+                if (i === this.#tail) {
+                    break;
+                }
+                else {
+                    i = this.#next[i];
+                }
+            }
+        }
+    }
+    #isValidIndex(index) {
+        return (index !== undefined &&
+            this.#keyMap.get(this.#keyList[index]) === index);
+    }
+    /**
+     * Return a generator yielding `[key, value]` pairs,
+     * in order from most recently used to least recently used.
+     */
+    *entries() {
+        for (const i of this.#indexes()) {
+            if (this.#valList[i] !== undefined &&
+                this.#keyList[i] !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield [this.#keyList[i], this.#valList[i]];
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.entries}
+     *
+     * Return a generator yielding `[key, value]` pairs,
+     * in order from least recently used to most recently used.
+     */
+    *rentries() {
+        for (const i of this.#rindexes()) {
+            if (this.#valList[i] !== undefined &&
+                this.#keyList[i] !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield [this.#keyList[i], this.#valList[i]];
+            }
+        }
+    }
+    /**
+     * Return a generator yielding the keys in the cache,
+     * in order from most recently used to least recently used.
+     */
+    *keys() {
+        for (const i of this.#indexes()) {
+            const k = this.#keyList[i];
+            if (k !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield k;
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.keys}
+     *
+     * Return a generator yielding the keys in the cache,
+     * in order from least recently used to most recently used.
+     */
+    *rkeys() {
+        for (const i of this.#rindexes()) {
+            const k = this.#keyList[i];
+            if (k !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield k;
+            }
+        }
+    }
+    /**
+     * Return a generator yielding the values in the cache,
+     * in order from most recently used to least recently used.
+     */
+    *values() {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            if (v !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield this.#valList[i];
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.values}
+     *
+     * Return a generator yielding the values in the cache,
+     * in order from least recently used to most recently used.
+     */
+    *rvalues() {
+        for (const i of this.#rindexes()) {
+            const v = this.#valList[i];
+            if (v !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield this.#valList[i];
+            }
+        }
+    }
+    /**
+     * Iterating over the cache itself yields the same results as
+     * {@link LRUCache.entries}
+     */
+    [Symbol.iterator]() {
+        return this.entries();
+    }
+    /**
+     * A String value that is used in the creation of the default string
+     * description of an object. Called by the built-in method
+     * `Object.prototype.toString`.
+     */
+    [Symbol.toStringTag] = 'LRUCache';
+    /**
+     * Find a value for which the supplied fn method returns a truthy value,
+     * similar to `Array.find()`. fn is called as `fn(value, key, cache)`.
+     */
+    find(fn, getOptions = {}) {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            if (fn(value, this.#keyList[i], this)) {
+                return this.get(this.#keyList[i], getOptions);
+            }
+        }
+    }
+    /**
+     * Call the supplied function on each item in the cache, in order from most
+     * recently used to least recently used.
+     *
+     * `fn` is called as `fn(value, key, cache)`.
+     *
+     * If `thisp` is provided, function will be called in the `this`-context of
+     * the provided object, or the cache if no `thisp` object is provided.
+     *
+     * Does not update age or recenty of use, or iterate over stale values.
+     */
+    forEach(fn, thisp = this) {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            fn.call(thisp, value, this.#keyList[i], this);
+        }
+    }
+    /**
+     * The same as {@link LRUCache.forEach} but items are iterated over in
+     * reverse order.  (ie, less recently used items are iterated over first.)
+     */
+    rforEach(fn, thisp = this) {
+        for (const i of this.#rindexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            fn.call(thisp, value, this.#keyList[i], this);
+        }
+    }
+    /**
+     * Delete any stale entries. Returns true if anything was removed,
+     * false otherwise.
+     */
+    purgeStale() {
+        let deleted = false;
+        for (const i of this.#rindexes({ allowStale: true })) {
+            if (this.#isStale(i)) {
+                this.#delete(this.#keyList[i], 'expire');
+                deleted = true;
+            }
+        }
+        return deleted;
+    }
+    /**
+     * Get the extended info about a given entry, to get its value, size, and
+     * TTL info simultaneously. Returns `undefined` if the key is not present.
+     *
+     * Unlike {@link LRUCache#dump}, which is designed to be portable and survive
+     * serialization, the `start` value is always the current timestamp, and the
+     * `ttl` is a calculated remaining time to live (negative if expired).
+     *
+     * Always returns stale values, if their info is found in the cache, so be
+     * sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl})
+     * if relevant.
+     */
+    info(key) {
+        const i = this.#keyMap.get(key);
+        if (i === undefined)
+            return undefined;
+        const v = this.#valList[i];
+        /* c8 ignore start - this isn't tested for the info function,
+         * but it's the same logic as found in other places. */
+        const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+        if (value === undefined)
+            return undefined;
+        /* c8 ignore end */
+        const entry = { value };
+        if (this.#ttls && this.#starts) {
+            const ttl = this.#ttls[i];
+            const start = this.#starts[i];
+            if (ttl && start) {
+                const remain = ttl - (this.#perf.now() - start);
+                entry.ttl = remain;
+                entry.start = Date.now();
+            }
+        }
+        if (this.#sizes) {
+            entry.size = this.#sizes[i];
+        }
+        return entry;
+    }
+    /**
+     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
+     * passed to {@link LRUCache#load}.
+     *
+     * The `start` fields are calculated relative to a portable `Date.now()`
+     * timestamp, even if `performance.now()` is available.
+     *
+     * Stale entries are always included in the `dump`, even if
+     * {@link LRUCache.OptionsBase.allowStale} is false.
+     *
+     * Note: this returns an actual array, not a generator, so it can be more
+     * easily passed around.
+     */
+    dump() {
+        const arr = [];
+        for (const i of this.#indexes({ allowStale: true })) {
+            const key = this.#keyList[i];
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined || key === undefined)
+                continue;
+            const entry = { value };
+            if (this.#ttls && this.#starts) {
+                entry.ttl = this.#ttls[i];
+                // always dump the start relative to a portable timestamp
+                // it's ok for this to be a bit slow, it's a rare operation.
+                const age = this.#perf.now() - this.#starts[i];
+                entry.start = Math.floor(Date.now() - age);
+            }
+            if (this.#sizes) {
+                entry.size = this.#sizes[i];
+            }
+            arr.unshift([key, entry]);
+        }
+        return arr;
+    }
+    /**
+     * Reset the cache and load in the items in entries in the order listed.
+     *
+     * The shape of the resulting cache may be different if the same options are
+     * not used in both caches.
+     *
+     * The `start` fields are assumed to be calculated relative to a portable
+     * `Date.now()` timestamp, even if `performance.now()` is available.
+     */
+    load(arr) {
+        this.clear();
+        for (const [key, entry] of arr) {
+            if (entry.start) {
+                // entry.start is a portable timestamp, but we may be using
+                // node's performance.now(), so calculate the offset, so that
+                // we get the intended remaining TTL, no matter how long it's
+                // been on ice.
+                //
+                // it's ok for this to be a bit slow, it's a rare operation.
+                const age = Date.now() - entry.start;
+                entry.start = this.#perf.now() - age;
+            }
+            this.set(key, entry.value, entry);
+        }
+    }
+    /**
+     * Add a value to the cache.
+     *
+     * Note: if `undefined` is specified as a value, this is an alias for
+     * {@link LRUCache#delete}
+     *
+     * Fields on the {@link LRUCache.SetOptions} options param will override
+     * their corresponding values in the constructor options for the scope
+     * of this single `set()` operation.
+     *
+     * If `start` is provided, then that will set the effective start
+     * time for the TTL calculation. Note that this must be a previous
+     * value of `performance.now()` if supported, or a previous value of
+     * `Date.now()` if not.
+     *
+     * Options object may also include `size`, which will prevent
+     * calling the `sizeCalculation` function and just use the specified
+     * number if it is a positive integer, and `noDisposeOnSet` which
+     * will prevent calling a `dispose` function in the case of
+     * overwrites.
+     *
+     * If the `size` (or return value of `sizeCalculation`) for a given
+     * entry is greater than `maxEntrySize`, then the item will not be
+     * added to the cache.
+     *
+     * Will update the recency of the entry.
+     *
+     * If the value is `undefined`, then this is an alias for
+     * `cache.delete(key)`. `undefined` is never stored in the cache.
+     */
+    set(k, v, setOptions = {}) {
+        if (v === undefined) {
+            this.delete(k);
+            return this;
+        }
+        const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
+        let { noUpdateTTL = this.noUpdateTTL } = setOptions;
+        const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
+        // if the item doesn't fit, don't do anything
+        // NB: maxEntrySize set to maxSize by default
+        if (this.maxEntrySize && size > this.maxEntrySize) {
+            if (status) {
+                status.set = 'miss';
+                status.maxEntrySizeExceeded = true;
+            }
+            // have to delete, in case something is there already.
+            this.#delete(k, 'set');
+            return this;
+        }
+        let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
+        if (index === undefined) {
+            // addition
+            index = (this.#size === 0 ? this.#tail
+                : this.#free.length !== 0 ? this.#free.pop()
+                    : this.#size === this.#max ? this.#evict(false)
+                        : this.#size);
+            this.#keyList[index] = k;
+            this.#valList[index] = v;
+            this.#keyMap.set(k, index);
+            this.#next[this.#tail] = index;
+            this.#prev[index] = this.#tail;
+            this.#tail = index;
+            this.#size++;
+            this.#addItemSize(index, size, status);
+            if (status)
+                status.set = 'add';
+            noUpdateTTL = false;
+            if (this.#hasOnInsert) {
+                this.#onInsert?.(v, k, 'add');
+            }
+        }
+        else {
+            // update
+            this.#moveToTail(index);
+            const oldVal = this.#valList[index];
+            if (v !== oldVal) {
+                if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
+                    oldVal.__abortController.abort(new Error('replaced'));
+                    const { __staleWhileFetching: s } = oldVal;
+                    if (s !== undefined && !noDisposeOnSet) {
+                        if (this.#hasDispose) {
+                            this.#dispose?.(s, k, 'set');
+                        }
+                        if (this.#hasDisposeAfter) {
+                            this.#disposed?.push([s, k, 'set']);
+                        }
+                    }
+                }
+                else if (!noDisposeOnSet) {
+                    if (this.#hasDispose) {
+                        this.#dispose?.(oldVal, k, 'set');
+                    }
+                    if (this.#hasDisposeAfter) {
+                        this.#disposed?.push([oldVal, k, 'set']);
+                    }
+                }
+                this.#removeItemSize(index);
+                this.#addItemSize(index, size, status);
+                this.#valList[index] = v;
+                if (status) {
+                    status.set = 'replace';
+                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal) ?
+                        oldVal.__staleWhileFetching
+                        : oldVal;
+                    if (oldValue !== undefined)
+                        status.oldValue = oldValue;
+                }
+            }
+            else if (status) {
+                status.set = 'update';
+            }
+            if (this.#hasOnInsert) {
+                this.onInsert?.(v, k, v === oldVal ? 'update' : 'replace');
+            }
+        }
+        if (ttl !== 0 && !this.#ttls) {
+            this.#initializeTTLTracking();
+        }
+        if (this.#ttls) {
+            if (!noUpdateTTL) {
+                this.#setItemTTL(index, ttl, start);
+            }
+            if (status)
+                this.#statusTTL(status, index);
+        }
+        if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+        return this;
+    }
+    /**
+     * Evict the least recently used item, returning its value or
+     * `undefined` if cache is empty.
+     */
+    pop() {
+        try {
+            while (this.#size) {
+                const val = this.#valList[this.#head];
+                this.#evict(true);
+                if (this.#isBackgroundFetch(val)) {
+                    if (val.__staleWhileFetching) {
+                        return val.__staleWhileFetching;
+                    }
+                }
+                else if (val !== undefined) {
+                    return val;
+                }
+            }
+        }
+        finally {
+            if (this.#hasDisposeAfter && this.#disposed) {
+                const dt = this.#disposed;
+                let task;
+                while ((task = dt?.shift())) {
+                    this.#disposeAfter?.(...task);
+                }
+            }
+        }
+    }
+    #evict(free) {
+        const head = this.#head;
+        const k = this.#keyList[head];
+        const v = this.#valList[head];
+        if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
+            v.__abortController.abort(new Error('evicted'));
+        }
+        else if (this.#hasDispose || this.#hasDisposeAfter) {
+            if (this.#hasDispose) {
+                this.#dispose?.(v, k, 'evict');
+            }
+            if (this.#hasDisposeAfter) {
+                this.#disposed?.push([v, k, 'evict']);
+            }
+        }
+        this.#removeItemSize(head);
+        // if we aren't about to use the index, then null these out
+        if (free) {
+            this.#keyList[head] = undefined;
+            this.#valList[head] = undefined;
+            this.#free.push(head);
+        }
+        if (this.#size === 1) {
+            this.#head = this.#tail = 0;
+            this.#free.length = 0;
+        }
+        else {
+            this.#head = this.#next[head];
+        }
+        this.#keyMap.delete(k);
+        this.#size--;
+        return head;
+    }
+    /**
+     * Check if a key is in the cache, without updating the recency of use.
+     * Will return false if the item is stale, even though it is technically
+     * in the cache.
+     *
+     * Check if a key is in the cache, without updating the recency of
+     * use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set
+     * to `true` in either the options or the constructor.
+     *
+     * Will return `false` if the item is stale, even though it is technically in
+     * the cache. The difference can be determined (if it matters) by using a
+     * `status` argument, and inspecting the `has` field.
+     *
+     * Will not update item age unless
+     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
+     */
+    has(k, hasOptions = {}) {
+        const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
+        const index = this.#keyMap.get(k);
+        if (index !== undefined) {
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v) &&
+                v.__staleWhileFetching === undefined) {
+                return false;
+            }
+            if (!this.#isStale(index)) {
+                if (updateAgeOnHas) {
+                    this.#updateItemAge(index);
+                }
+                if (status) {
+                    status.has = 'hit';
+                    this.#statusTTL(status, index);
+                }
+                return true;
+            }
+            else if (status) {
+                status.has = 'stale';
+                this.#statusTTL(status, index);
+            }
+        }
+        else if (status) {
+            status.has = 'miss';
+        }
+        return false;
+    }
+    /**
+     * Like {@link LRUCache#get} but doesn't update recency or delete stale
+     * items.
+     *
+     * Returns `undefined` if the item is stale, unless
+     * {@link LRUCache.OptionsBase.allowStale} is set.
+     */
+    peek(k, peekOptions = {}) {
+        const { allowStale = this.allowStale } = peekOptions;
+        const index = this.#keyMap.get(k);
+        if (index === undefined ||
+            (!allowStale && this.#isStale(index))) {
+            return;
+        }
+        const v = this.#valList[index];
+        // either stale and allowed, or forcing a refresh of non-stale value
+        return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+    }
+    #backgroundFetch(k, index, options, context) {
+        const v = index === undefined ? undefined : this.#valList[index];
+        if (this.#isBackgroundFetch(v)) {
+            return v;
+        }
+        const ac = new AC();
+        const { signal } = options;
+        // when/if our AC signals, then stop listening to theirs.
+        signal?.addEventListener('abort', () => ac.abort(signal.reason), {
+            signal: ac.signal,
+        });
+        const fetchOpts = {
+            signal: ac.signal,
+            options,
+            context,
+        };
+        const cb = (v, updateCache = false) => {
+            const { aborted } = ac.signal;
+            const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
+            if (options.status) {
+                if (aborted && !updateCache) {
+                    options.status.fetchAborted = true;
+                    options.status.fetchError = ac.signal.reason;
+                    if (ignoreAbort)
+                        options.status.fetchAbortIgnored = true;
+                }
+                else {
+                    options.status.fetchResolved = true;
+                }
+            }
+            if (aborted && !ignoreAbort && !updateCache) {
+                return fetchFail(ac.signal.reason);
+            }
+            // either we didn't abort, and are still here, or we did, and ignored
+            const bf = p;
+            if (this.#valList[index] === p) {
+                if (v === undefined) {
+                    if (bf.__staleWhileFetching !== undefined) {
+                        this.#valList[index] = bf.__staleWhileFetching;
+                    }
+                    else {
+                        this.#delete(k, 'fetch');
+                    }
+                }
+                else {
+                    if (options.status)
+                        options.status.fetchUpdated = true;
+                    this.set(k, v, fetchOpts.options);
+                }
+            }
+            return v;
+        };
+        const eb = (er) => {
+            if (options.status) {
+                options.status.fetchRejected = true;
+                options.status.fetchError = er;
+            }
+            return fetchFail(er);
+        };
+        const fetchFail = (er) => {
+            const { aborted } = ac.signal;
+            const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
+            const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
+            const noDelete = allowStale || options.noDeleteOnFetchRejection;
+            const bf = p;
+            if (this.#valList[index] === p) {
+                // if we allow stale on fetch rejections, then we need to ensure that
+                // the stale value is not removed from the cache when the fetch fails.
+                const del = !noDelete || bf.__staleWhileFetching === undefined;
+                if (del) {
+                    this.#delete(k, 'fetch');
+                }
+                else if (!allowStaleAborted) {
+                    // still replace the *promise* with the stale value,
+                    // since we are done with the promise at this point.
+                    // leave it untouched if we're still waiting for an
+                    // aborted background fetch that hasn't yet returned.
+                    this.#valList[index] = bf.__staleWhileFetching;
+                }
+            }
+            if (allowStale) {
+                if (options.status && bf.__staleWhileFetching !== undefined) {
+                    options.status.returnedStale = true;
+                }
+                return bf.__staleWhileFetching;
+            }
+            else if (bf.__returned === bf) {
+                throw er;
+            }
+        };
+        const pcall = (res, rej) => {
+            const fmp = this.#fetchMethod?.(k, v, fetchOpts);
+            if (fmp && fmp instanceof Promise) {
+                fmp.then(v => res(v === undefined ? undefined : v), rej);
+            }
+            // ignored, we go until we finish, regardless.
+            // defer check until we are actually aborting,
+            // so fetchMethod can override.
+            ac.signal.addEventListener('abort', () => {
+                if (!options.ignoreFetchAbort ||
+                    options.allowStaleOnFetchAbort) {
+                    res(undefined);
+                    // when it eventually resolves, update the cache.
+                    if (options.allowStaleOnFetchAbort) {
+                        res = v => cb(v, true);
+                    }
+                }
+            });
+        };
+        if (options.status)
+            options.status.fetchDispatched = true;
+        const p = new Promise(pcall).then(cb, eb);
+        const bf = Object.assign(p, {
+            __abortController: ac,
+            __staleWhileFetching: v,
+            __returned: undefined,
+        });
+        if (index === undefined) {
+            // internal, don't expose status.
+            this.set(k, bf, { ...fetchOpts.options, status: undefined });
+            index = this.#keyMap.get(k);
+        }
+        else {
+            this.#valList[index] = bf;
+        }
+        return bf;
+    }
+    #isBackgroundFetch(p) {
+        if (!this.#hasFetchMethod)
+            return false;
+        const b = p;
+        return (!!b &&
+            b instanceof Promise &&
+            b.hasOwnProperty('__staleWhileFetching') &&
+            b.__abortController instanceof AC);
+    }
+    async fetch(k, fetchOptions = {}) {
+        const { 
+        // get options
+        allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, 
+        // set options
+        ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, 
+        // fetch exclusive options
+        noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
+        if (!this.#hasFetchMethod) {
+            if (status)
+                status.fetch = 'get';
+            return this.get(k, {
+                allowStale,
+                updateAgeOnGet,
+                noDeleteOnStaleGet,
+                status,
+            });
+        }
+        const options = {
+            allowStale,
+            updateAgeOnGet,
+            noDeleteOnStaleGet,
+            ttl,
+            noDisposeOnSet,
+            size,
+            sizeCalculation,
+            noUpdateTTL,
+            noDeleteOnFetchRejection,
+            allowStaleOnFetchRejection,
+            allowStaleOnFetchAbort,
+            ignoreFetchAbort,
+            status,
+            signal,
+        };
+        let index = this.#keyMap.get(k);
+        if (index === undefined) {
+            if (status)
+                status.fetch = 'miss';
+            const p = this.#backgroundFetch(k, index, options, context);
+            return (p.__returned = p);
+        }
+        else {
+            // in cache, maybe already fetching
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v)) {
+                const stale = allowStale && v.__staleWhileFetching !== undefined;
+                if (status) {
+                    status.fetch = 'inflight';
+                    if (stale)
+                        status.returnedStale = true;
+                }
+                return stale ? v.__staleWhileFetching : (v.__returned = v);
+            }
+            // if we force a refresh, that means do NOT serve the cached value,
+            // unless we are already in the process of refreshing the cache.
+            const isStale = this.#isStale(index);
+            if (!forceRefresh && !isStale) {
+                if (status)
+                    status.fetch = 'hit';
+                this.#moveToTail(index);
+                if (updateAgeOnGet) {
+                    this.#updateItemAge(index);
+                }
+                if (status)
+                    this.#statusTTL(status, index);
+                return v;
+            }
+            // ok, it is stale or a forced refresh, and not already fetching.
+            // refresh the cache.
+            const p = this.#backgroundFetch(k, index, options, context);
+            const hasStale = p.__staleWhileFetching !== undefined;
+            const staleVal = hasStale && allowStale;
+            if (status) {
+                status.fetch = isStale ? 'stale' : 'refresh';
+                if (staleVal && isStale)
+                    status.returnedStale = true;
+            }
+            return staleVal ? p.__staleWhileFetching : (p.__returned = p);
+        }
+    }
+    async forceFetch(k, fetchOptions = {}) {
+        const v = await this.fetch(k, fetchOptions);
+        if (v === undefined)
+            throw new Error('fetch() returned undefined');
+        return v;
+    }
+    memo(k, memoOptions = {}) {
+        const memoMethod = this.#memoMethod;
+        if (!memoMethod) {
+            throw new Error('no memoMethod provided to constructor');
+        }
+        const { context, forceRefresh, ...options } = memoOptions;
+        const v = this.get(k, options);
+        if (!forceRefresh && v !== undefined)
+            return v;
+        const vv = memoMethod(k, v, {
+            options,
+            context,
+        });
+        this.set(k, vv, options);
+        return vv;
+    }
+    /**
+     * Return a value from the cache. Will update the recency of the cache
+     * entry found.
+     *
+     * If the key is not found, get() will return `undefined`.
+     */
+    get(k, getOptions = {}) {
+        const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
+        const index = this.#keyMap.get(k);
+        if (index !== undefined) {
+            const value = this.#valList[index];
+            const fetching = this.#isBackgroundFetch(value);
+            if (status)
+                this.#statusTTL(status, index);
+            if (this.#isStale(index)) {
+                if (status)
+                    status.get = 'stale';
+                // delete only if not an in-flight background fetch
+                if (!fetching) {
+                    if (!noDeleteOnStaleGet) {
+                        this.#delete(k, 'expire');
+                    }
+                    if (status && allowStale)
+                        status.returnedStale = true;
+                    return allowStale ? value : undefined;
+                }
+                else {
+                    if (status &&
+                        allowStale &&
+                        value.__staleWhileFetching !== undefined) {
+                        status.returnedStale = true;
+                    }
+                    return allowStale ? value.__staleWhileFetching : undefined;
+                }
+            }
+            else {
+                if (status)
+                    status.get = 'hit';
+                // if we're currently fetching it, we don't actually have it yet
+                // it's not stale, which means this isn't a staleWhileRefetching.
+                // If it's not stale, and fetching, AND has a __staleWhileFetching
+                // value, then that means the user fetched with {forceRefresh:true},
+                // so it's safe to return that value.
+                if (fetching) {
+                    return value.__staleWhileFetching;
+                }
+                this.#moveToTail(index);
+                if (updateAgeOnGet) {
+                    this.#updateItemAge(index);
+                }
+                return value;
+            }
+        }
+        else if (status) {
+            status.get = 'miss';
+        }
+    }
+    #connect(p, n) {
+        this.#prev[n] = p;
+        this.#next[p] = n;
+    }
+    #moveToTail(index) {
+        // if tail already, nothing to do
+        // if head, move head to next[index]
+        // else
+        //   move next[prev[index]] to next[index] (head has no prev)
+        //   move prev[next[index]] to prev[index]
+        // prev[index] = tail
+        // next[tail] = index
+        // tail = index
+        if (index !== this.#tail) {
+            if (index === this.#head) {
+                this.#head = this.#next[index];
+            }
+            else {
+                this.#connect(this.#prev[index], this.#next[index]);
+            }
+            this.#connect(this.#tail, index);
+            this.#tail = index;
+        }
+    }
+    /**
+     * Deletes a key out of the cache.
+     *
+     * Returns true if the key was deleted, false otherwise.
+     */
+    delete(k) {
+        return this.#delete(k, 'delete');
+    }
+    #delete(k, reason) {
+        let deleted = false;
+        if (this.#size !== 0) {
+            const index = this.#keyMap.get(k);
+            if (index !== undefined) {
+                deleted = true;
+                if (this.#size === 1) {
+                    this.#clear(reason);
+                }
+                else {
+                    this.#removeItemSize(index);
+                    const v = this.#valList[index];
+                    if (this.#isBackgroundFetch(v)) {
+                        v.__abortController.abort(new Error('deleted'));
+                    }
+                    else if (this.#hasDispose || this.#hasDisposeAfter) {
+                        if (this.#hasDispose) {
+                            this.#dispose?.(v, k, reason);
+                        }
+                        if (this.#hasDisposeAfter) {
+                            this.#disposed?.push([v, k, reason]);
+                        }
+                    }
+                    this.#keyMap.delete(k);
+                    this.#keyList[index] = undefined;
+                    this.#valList[index] = undefined;
+                    if (index === this.#tail) {
+                        this.#tail = this.#prev[index];
+                    }
+                    else if (index === this.#head) {
+                        this.#head = this.#next[index];
+                    }
+                    else {
+                        const pi = this.#prev[index];
+                        this.#next[pi] = this.#next[index];
+                        const ni = this.#next[index];
+                        this.#prev[ni] = this.#prev[index];
+                    }
+                    this.#size--;
+                    this.#free.push(index);
+                }
+            }
+        }
+        if (this.#hasDisposeAfter && this.#disposed?.length) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+        return deleted;
+    }
+    /**
+     * Clear the cache entirely, throwing away all values.
+     */
+    clear() {
+        return this.#clear('delete');
+    }
+    #clear(reason) {
+        for (const index of this.#rindexes({ allowStale: true })) {
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v)) {
+                v.__abortController.abort(new Error('deleted'));
+            }
+            else {
+                const k = this.#keyList[index];
+                if (this.#hasDispose) {
+                    this.#dispose?.(v, k, reason);
+                }
+                if (this.#hasDisposeAfter) {
+                    this.#disposed?.push([v, k, reason]);
+                }
+            }
+        }
+        this.#keyMap.clear();
+        this.#valList.fill(undefined);
+        this.#keyList.fill(undefined);
+        if (this.#ttls && this.#starts) {
+            this.#ttls.fill(0);
+            this.#starts.fill(0);
+        }
+        if (this.#sizes) {
+            this.#sizes.fill(0);
+        }
+        this.#head = 0;
+        this.#tail = 0;
+        this.#free.length = 0;
+        this.#calculatedSize = 0;
+        this.#size = 0;
+        if (this.#hasDisposeAfter && this.#disposed) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+    }
+}
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/init-package-json/node_modules/lru-cache/dist/esm/index.min.js b/node_modules/init-package-json/node_modules/lru-cache/dist/esm/index.min.js
new file mode 100644
index 0000000000000..07dd8fc3c59d8
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/lru-cache/dist/esm/index.min.js
@@ -0,0 +1,2 @@
+var M=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,x=new Set,R=typeof process=="object"&&process?process:{},I=(a,t,e,i)=>{typeof R.emitWarning=="function"?R.emitWarning(a,t,e,i):console.error(`[${e}] ${t}: ${a}`)},C=globalThis.AbortController,D=globalThis.AbortSignal;if(typeof C>"u"){D=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},C=class{constructor(){t()}signal=new D;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let a=R.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{a&&(a=!1,I("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var G=a=>!x.has(a),H=Symbol("type"),y=a=>a&&a===Math.floor(a)&&a>0&&isFinite(a),U=a=>y(a)?a<=Math.pow(2,8)?Uint8Array:a<=Math.pow(2,16)?Uint16Array:a<=Math.pow(2,32)?Uint32Array:a<=Number.MAX_SAFE_INTEGER?O:null:null,O=class extends Array{constructor(t){super(t),this.fill(0)}},W=class a{heap;length;static#l=!1;static create(t){let e=U(t);if(!e)return[];a.#l=!0;let i=new a(t,e);return a.#l=!1,i}constructor(t,e){if(!a.#l)throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},L=class a{#l;#c;#p;#v;#w;#D;#L;#S;get perf(){return this.#S}ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#_;#s;#i;#t;#a;#u;#o;#h;#m;#r;#b;#y;#d;#A;#z;#f;#x;static unsafeExposeInternals(t){return{starts:t.#y,ttls:t.#d,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#a,prev:t.#u,get head(){return t.#o},get tail(){return t.#h},free:t.#m,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#M(e,i,s,n),moveToTail:e=>t.#W(e),indexes:e=>t.#F(e),rindexes:e=>t.#T(e),isStale:e=>t.#g(e)}}get max(){return this.#l}get maxSize(){return this.#c}get calculatedSize(){return this.#_}get size(){return this.#n}get fetchMethod(){return this.#D}get memoMethod(){return this.#L}get dispose(){return this.#p}get onInsert(){return this.#v}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,onInsert:_,disposeAfter:f,noDisposeOnSet:c,noUpdateTTL:u,maxSize:A=0,maxEntrySize:d=0,sizeCalculation:m,fetchMethod:l,memoMethod:w,noDeleteOnFetchRejection:b,noDeleteOnStaleGet:p,allowStaleOnFetchRejection:S,allowStaleOnFetchAbort:z,ignoreFetchAbort:F,perf:v}=t;if(v!==void 0&&typeof v?.now!="function")throw new TypeError("perf option must have a now() method if specified");if(this.#S=v??M,e!==0&&!y(e))throw new TypeError("max option must be a nonnegative integer");let T=e?U(e):Array;if(!T)throw new Error("invalid max value: "+e);if(this.#l=e,this.#c=A,this.maxEntrySize=d||this.#c,this.sizeCalculation=m,this.sizeCalculation){if(!this.#c&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#L=w,l!==void 0&&typeof l!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#D=l,this.#z=!!l,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#a=new T(e),this.#u=new T(e),this.#o=0,this.#h=0,this.#m=W.create(e),this.#n=0,this.#_=0,typeof g=="function"&&(this.#p=g),typeof _=="function"&&(this.#v=_),typeof f=="function"?(this.#w=f,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#A=!!this.#p,this.#x=!!this.#v,this.#f=!!this.#w,this.noDisposeOnSet=!!c,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!b,this.allowStaleOnFetchRejection=!!S,this.allowStaleOnFetchAbort=!!z,this.ignoreFetchAbort=!!F,this.maxEntrySize!==0){if(this.#c!==0&&!y(this.#c))throw new TypeError("maxSize must be a positive integer if specified");if(!y(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#V()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!p,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=y(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!y(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#G()}if(this.#l===0&&this.ttl===0&&this.#c===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#l&&!this.#c){let E="LRU_CACHE_UNBOUNDED";G(E)&&(x.add(E),I("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",E,a))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#G(){let t=new O(this.#l),e=new O(this.#l);this.#d=t,this.#y=e,this.#j=(n,h,o=this.#S.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#g(n)&&this.#E(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#C=n=>{e[n]=t[n]!==0?this.#S.now():0},this.#O=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=this.#S.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#g=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#C=()=>{};#O=()=>{};#j=()=>{};#g=()=>!1;#V(){let t=new O(this.#l);this.#_=0,this.#b=t,this.#R=e=>{this.#_-=t[e],t[e]=0},this.#N=(e,i,s,n)=>{if(this.#e(i))return 0;if(!y(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!y(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#I=(e,i,s)=>{if(t[e]=i,this.#c){let n=this.#c-t[e];for(;this.#_>n;)this.#U(!0)}this.#_+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#_)}}#R=t=>{};#I=(t,e,i)=>{};#N=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#o));)e=this.#u[e]}*#T({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#h));)e=this.#a[e]}#P(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#T())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#T()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#T())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#T()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#T({allowStale:!0}))this.#g(e)&&(this.#E(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#d&&this.#y){let h=this.#d[e],o=this.#y[e];if(h&&o){let r=h-(this.#S.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#F({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#d&&this.#y){h.ttl=this.#d[e];let o=this.#S.now()-this.#y[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=this.#S.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,_=this.#N(t,e,i.size||0,o);if(this.maxEntrySize&&_>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#E(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#m.length!==0?this.#m.pop():this.#n===this.#l?this.#U(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#a[this.#h]=f,this.#u[f]=this.#h,this.#h=f,this.#n++,this.#I(f,_,r),r&&(r.set="add"),g=!1,this.#x&&this.#v?.(e,t,"add");else{this.#W(f);let c=this.#t[f];if(e!==c){if(this.#z&&this.#e(c)){c.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:u}=c;u!==void 0&&!h&&(this.#A&&this.#p?.(u,t,"set"),this.#f&&this.#r?.push([u,t,"set"]))}else h||(this.#A&&this.#p?.(c,t,"set"),this.#f&&this.#r?.push([c,t,"set"]));if(this.#R(f),this.#I(f,_,r),this.#t[f]=e,r){r.set="replace";let u=c&&this.#e(c)?c.__staleWhileFetching:c;u!==void 0&&(r.oldValue=u)}}else r&&(r.set="update");this.#x&&this.onInsert?.(e,t,e===c?"update":"replace")}if(s!==0&&!this.#d&&this.#G(),this.#d&&(g||this.#j(f,s,n),r&&this.#O(r,f)),!h&&this.#f&&this.#r){let c=this.#r,u;for(;u=c?.shift();)this.#w?.(...u)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#U(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#f&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#U(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#z&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(s,i,"evict"),this.#f&&this.#r?.push([s,i,"evict"])),this.#R(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#m.push(e)),this.#n===1?(this.#o=this.#h=0,this.#m.length=0):this.#o=this.#a[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#g(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#C(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#g(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#M(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new C,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,m=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!m?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!m)return f(h.signal.reason);let b=u;return this.#t[e]===u&&(d===void 0?b.__staleWhileFetching!==void 0?this.#t[e]=b.__staleWhileFetching:this.#E(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},_=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:m}=h.signal,l=m&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=u;if(this.#t[e]===u&&(!b||p.__staleWhileFetching===void 0?this.#E(t,"fetch"):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},c=(d,m)=>{let l=this.#D?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),m),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let u=new Promise(c).then(g,_),A=Object.assign(u,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,A,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=A,A}#e(t){if(!this.#z)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof C}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:_=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:c=this.allowStaleOnFetchRejection,ignoreFetchAbort:u=this.ignoreFetchAbort,allowStaleOnFetchAbort:A=this.allowStaleOnFetchAbort,context:d,forceRefresh:m=!1,status:l,signal:w}=e;if(!this.#z)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:_,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:c,allowStaleOnFetchAbort:A,ignoreFetchAbort:u,status:l,signal:w},p=this.#s.get(t);if(p===void 0){l&&(l.fetch="miss");let S=this.#M(t,p,b,d);return S.__returned=S}else{let S=this.#t[p];if(this.#e(S)){let E=i&&S.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",E&&(l.returnedStale=!0)),E?S.__staleWhileFetching:S.__returned=S}let z=this.#g(p);if(!m&&!z)return l&&(l.fetch="hit"),this.#W(p),s&&this.#C(p),l&&this.#O(l,p),S;let F=this.#M(t,p,b,d),T=F.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=z?"stale":"refresh",T&&z&&(l.returnedStale=!0)),T?F.__staleWhileFetching:F.__returned=F}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#L;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#O(h,o),this.#g(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#E(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#W(o),s&&this.#C(o),r))}else h&&(h.get="miss")}#H(t,e){this.#u[e]=t,this.#a[t]=e}#W(t){t!==this.#h&&(t===this.#o?this.#o=this.#a[t]:this.#H(this.#u[t],this.#a[t]),this.#H(this.#h,t),this.#h=t)}delete(t){return this.#E(t,"delete")}#E(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#k(e);else{this.#R(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(n,t,e),this.#f&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#u[s];else if(s===this.#o)this.#o=this.#a[s];else{let h=this.#u[s];this.#a[h]=this.#a[s];let o=this.#a[s];this.#u[o]=this.#u[s]}this.#n--,this.#m.push(s)}}if(this.#f&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#k("delete")}#k(t){for(let e of this.#T({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#A&&this.#p?.(i,s,t),this.#f&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#d&&this.#y&&(this.#d.fill(0),this.#y.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#m.length=0,this.#_=0,this.#n=0,this.#f&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};export{L as LRUCache};
+//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/init-package-json/node_modules/lru-cache/dist/esm/package.json b/node_modules/init-package-json/node_modules/lru-cache/dist/esm/package.json
new file mode 100644
index 0000000000000..3dbc1ca591c05
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/lru-cache/dist/esm/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/node_modules/init-package-json/node_modules/lru-cache/package.json b/node_modules/init-package-json/node_modules/lru-cache/package.json
new file mode 100644
index 0000000000000..4953bdf4a7a35
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/lru-cache/package.json
@@ -0,0 +1,113 @@
+{
+  "name": "lru-cache",
+  "description": "A cache object that deletes the least-recently-used items.",
+  "version": "11.2.1",
+  "author": "Isaac Z. Schlueter ",
+  "keywords": [
+    "mru",
+    "lru",
+    "cache"
+  ],
+  "sideEffects": false,
+  "scripts": {
+    "build": "npm run prepare",
+    "prepare": "tshy && bash fixup.sh",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "test": "tap",
+    "snap": "tap",
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "format": "prettier --write .",
+    "typedoc": "typedoc --tsconfig ./.tshy/esm.json ./src/*.ts",
+    "benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh",
+    "prebenchmark": "npm run prepare",
+    "benchmark": "make -C benchmark",
+    "preprofile": "npm run prepare",
+    "profile": "make -C benchmark profile"
+  },
+  "main": "./dist/commonjs/index.js",
+  "types": "./dist/commonjs/index.d.ts",
+  "tshy": {
+    "exports": {
+      ".": "./src/index.ts",
+      "./min": {
+        "import": {
+          "types": "./dist/esm/index.d.ts",
+          "default": "./dist/esm/index.min.js"
+        },
+        "require": {
+          "types": "./dist/commonjs/index.d.ts",
+          "default": "./dist/commonjs/index.min.js"
+        }
+      }
+    }
+  },
+  "repository": {
+    "type": "git",
+    "url": "git://github.com/isaacs/node-lru-cache.git"
+  },
+  "devDependencies": {
+    "@types/node": "^24.3.0",
+    "benchmark": "^2.1.4",
+    "esbuild": "^0.25.9",
+    "marked": "^4.2.12",
+    "mkdirp": "^3.0.1",
+    "prettier": "^3.6.2",
+    "tap": "^21.1.0",
+    "tshy": "^3.0.2",
+    "typedoc": "^0.28.12"
+  },
+  "license": "ISC",
+  "files": [
+    "dist"
+  ],
+  "engines": {
+    "node": "20 || >=22"
+  },
+  "prettier": {
+    "experimentalTernaries": true,
+    "semi": false,
+    "printWidth": 70,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "tap": {
+    "node-arg": [
+      "--expose-gc"
+    ],
+    "plugin": [
+      "@tapjs/clock"
+    ]
+  },
+  "exports": {
+    ".": {
+      "import": {
+        "types": "./dist/esm/index.d.ts",
+        "default": "./dist/esm/index.js"
+      },
+      "require": {
+        "types": "./dist/commonjs/index.d.ts",
+        "default": "./dist/commonjs/index.js"
+      }
+    },
+    "./min": {
+      "import": {
+        "types": "./dist/esm/index.d.ts",
+        "default": "./dist/esm/index.min.js"
+      },
+      "require": {
+        "types": "./dist/commonjs/index.d.ts",
+        "default": "./dist/commonjs/index.min.js"
+      }
+    }
+  },
+  "type": "module",
+  "module": "./dist/esm/index.js"
+}
diff --git a/node_modules/init-package-json/node_modules/npm-package-arg/LICENSE b/node_modules/init-package-json/node_modules/npm-package-arg/LICENSE
new file mode 100644
index 0000000000000..19cec97b18468
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/npm-package-arg/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/init-package-json/node_modules/npm-package-arg/lib/npa.js b/node_modules/init-package-json/node_modules/npm-package-arg/lib/npa.js
new file mode 100644
index 0000000000000..d409b7f1becfc
--- /dev/null
+++ b/node_modules/init-package-json/node_modules/npm-package-arg/lib/npa.js
@@ -0,0 +1,481 @@
+'use strict'
+
+const isWindows = process.platform === 'win32'
+
+const { URL } = require('node:url')
+// We need to use path/win32 so that we get consistent results in tests, but this also means we need to manually convert backslashes to forward slashes when generating file: urls with paths.
+const path = isWindows ? require('node:path/win32') : require('node:path')
+const { homedir } = require('node:os')
+const HostedGit = require('hosted-git-info')
+const semver = require('semver')
+const validatePackageName = require('validate-npm-package-name')
+const { log } = require('proc-log')
+
+const hasSlashes = isWindows ? /\\|[/]/ : /[/]/
+const isURL = /^(?:git[+])?[a-z]+:/i
+const isGit = /^[^@]+@[^:.]+\.[^:]+:.+$/i
+const isFileType = /[.](?:tgz|tar.gz|tar)$/i
+const isPortNumber = /:[0-9]+(\/|$)/i
+const isWindowsFile = /^(?:[.]|~[/]|[/\\]|[a-zA-Z]:)/
+const isPosixFile = /^(?:[.]|~[/]|[/]|[a-zA-Z]:)/
+const defaultRegistry = 'https://registry.npmjs.org'
+
+function npa (arg, where) {
+  let name
+  let spec
+  if (typeof arg === 'object') {
+    if (arg instanceof Result && (!where || where === arg.where)) {
+      return arg
+    } else if (arg.name && arg.rawSpec) {
+      return npa.resolve(arg.name, arg.rawSpec, where || arg.where)
+    } else {
+      return npa(arg.raw, where || arg.where)
+    }
+  }
+  const nameEndsAt = arg.indexOf('@', 1) // Skip possible leading @
+  const namePart = nameEndsAt > 0 ? arg.slice(0, nameEndsAt) : arg
+  if (isURL.test(arg)) {
+    spec = arg
+  } else if (isGit.test(arg)) {
+    spec = `git+ssh://${arg}`
+  // eslint-disable-next-line max-len
+  } else if (!namePart.startsWith('@') && (hasSlashes.test(namePart) || isFileType.test(namePart))) {
+    spec = arg
+  } else if (nameEndsAt > 0) {
+    name = namePart
+    spec = arg.slice(nameEndsAt + 1) || '*'
+  } else {
+    const valid = validatePackageName(arg)
+    if (valid.validForOldPackages) {
+      name = arg
+      spec = '*'
+    } else {
+      spec = arg
+    }
+  }
+  return resolve(name, spec, where, arg)
+}
+
+function isFileSpec (spec) {
+  if (!spec) {
+    return false
+  }
+  if (spec.toLowerCase().startsWith('file:')) {
+    return true
+  }
+  if (isWindows) {
+    return isWindowsFile.test(spec)
+  }
+  // We never hit this in windows tests, obviously
+  /* istanbul ignore next */
+  return isPosixFile.test(spec)
+}
+
+function isAliasSpec (spec) {
+  if (!spec) {
+    return false
+  }
+  return spec.toLowerCase().startsWith('npm:')
+}
+
+function resolve (name, spec, where, arg) {
+  const res = new Result({
+    raw: arg,
+    name: name,
+    rawSpec: spec,
+    fromArgument: arg != null,
+  })
+
+  if (name) {
+    res.name = name
+  }
+
+  if (!where) {
+    where = process.cwd()
+  }
+
+  if (isFileSpec(spec)) {
+    return fromFile(res, where)
+  } else if (isAliasSpec(spec)) {
+    return fromAlias(res, where)
+  }
+
+  const hosted = HostedGit.fromUrl(spec, {
+    noGitPlus: true,
+    noCommittish: true,
+  })
+  if (hosted) {
+    return fromHostedGit(res, hosted)
+  } else if (spec && isURL.test(spec)) {
+    return fromURL(res)
+  } else if (spec && (hasSlashes.test(spec) || isFileType.test(spec))) {
+    return fromFile(res, where)
+  } else {
+    return fromRegistry(res)
+  }
+}
+
+function toPurl (arg, reg = defaultRegistry) {
+  const res = npa(arg)
+
+  if (res.type !== 'version') {
+    throw invalidPurlType(res.type, res.raw)
+  }
+
+  // URI-encode leading @ of scoped packages
+  let purl = 'pkg:npm/' + res.name.replace(/^@/, '%40') + '@' + res.rawSpec
+  if (reg !== defaultRegistry) {
+    purl += '?repository_url=' + reg
+  }
+
+  return purl
+}
+
+function invalidPackageName (name, valid, raw) {
+  // eslint-disable-next-line max-len
+  const err = new Error(`Invalid package name "${name}" of package "${raw}": ${valid.errors.join('; ')}.`)
+  err.code = 'EINVALIDPACKAGENAME'
+  return err
+}
+
+function invalidTagName (name, raw) {
+  // eslint-disable-next-line max-len
+  const err = new Error(`Invalid tag name "${name}" of package "${raw}": Tags may not have any characters that encodeURIComponent encodes.`)
+  err.code = 'EINVALIDTAGNAME'
+  return err
+}
+
+function invalidPurlType (type, raw) {
+  // eslint-disable-next-line max-len
+  const err = new Error(`Invalid type "${type}" of package "${raw}": Purl can only be generated for "version" types.`)
+  err.code = 'EINVALIDPURLTYPE'
+  return err
+}
+
+class Result {
+  constructor (opts) {
+    this.type = opts.type
+    this.registry = opts.registry
+    this.where = opts.where
+    if (opts.raw == null) {
+      this.raw = opts.name ? `${opts.name}@${opts.rawSpec}` : opts.rawSpec
+    } else {
+      this.raw = opts.raw
+    }
+    this.name = undefined
+    this.escapedName = undefined
+    this.scope = undefined
+    this.rawSpec = opts.rawSpec || ''
+    this.saveSpec = opts.saveSpec
+    this.fetchSpec = opts.fetchSpec
+    if (opts.name) {
+      this.setName(opts.name)
+    }
+    this.gitRange = opts.gitRange
+    this.gitCommittish = opts.gitCommittish
+    this.gitSubdir = opts.gitSubdir
+    this.hosted = opts.hosted
+  }
+
+  // TODO move this to a getter/setter in a semver major
+  setName (name) {
+    const valid = validatePackageName(name)
+    if (!valid.validForOldPackages) {
+      throw invalidPackageName(name, valid, this.raw)
+    }
+
+    this.name = name
+    this.scope = name[0] === '@' ? name.slice(0, name.indexOf('/')) : undefined
+    // scoped packages in couch must have slash url-encoded, e.g. @foo%2Fbar
+    this.escapedName = name.replace('/', '%2f')
+    return this
+  }
+
+  toString () {
+    const full = []
+    if (this.name != null && this.name !== '') {
+      full.push(this.name)
+    }
+    const spec = this.saveSpec || this.fetchSpec || this.rawSpec
+    if (spec != null && spec !== '') {
+      full.push(spec)
+    }
+    return full.length ? full.join('@') : this.raw
+  }
+
+  toJSON () {
+    const result = Object.assign({}, this)
+    delete result.hosted
+    return result
+  }
+}
+
+// sets res.gitCommittish, res.gitRange, and res.gitSubdir
+function setGitAttrs (res, committish) {
+  if (!committish) {
+    res.gitCommittish = null
+    return
+  }
+
+  // for each :: separated item:
+  for (const part of committish.split('::')) {
+    // if the item has no : the n it is a commit-ish
+    if (!part.includes(':')) {
+      if (res.gitRange) {
+        throw new Error('cannot override existing semver range with a committish')
+      }
+      if (res.gitCommittish) {
+        throw new Error('cannot override existing committish with a second committish')
+      }
+      res.gitCommittish = part
+      continue
+    }
+    // split on name:value
+    const [name, value] = part.split(':')
+    // if name is semver do semver lookup of ref or tag
+    if (name === 'semver') {
+      if (res.gitCommittish) {
+        throw new Error('cannot override existing committish with a semver range')
+      }
+      if (res.gitRange) {
+        throw new Error('cannot override existing semver range with a second semver range')
+      }
+      res.gitRange = decodeURIComponent(value)
+      continue
+    }
+    if (name === 'path') {
+      if (res.gitSubdir) {
+        throw new Error('cannot override existing path with a second path')
+      }
+      res.gitSubdir = `/${value}`
+      continue
+    }
+    log.warn('npm-package-arg', `ignoring unknown key "${name}"`)
+  }
+}
+
+// Taken from: EncodePathChars and lookup_table in src/node_url.cc
+// url.pathToFileURL only returns absolute references.  We can't use it to encode paths.
+// encodeURI mangles windows paths. We can't use it to encode paths.
+// Under the hood, url.pathToFileURL does a limited set of encoding, with an extra windows step, and then calls path.resolve.
+// The encoding node does without path.resolve is not available outside of the source, so we are recreating it here.
+const encodedPathChars = new Map([
+  ['\0', '%00'],
+  ['\t', '%09'],
+  ['\n', '%0A'],
+  ['\r', '%0D'],
+  [' ', '%20'],
+  ['"', '%22'],
+  ['#', '%23'],
+  ['%', '%25'],
+  ['?', '%3F'],
+  ['[', '%5B'],
+  ['\\', isWindows ? '/' : '%5C'],
+  [']', '%5D'],
+  ['^', '%5E'],
+  ['|', '%7C'],
+  ['~', '%7E'],
+])
+
+function pathToFileURL (str) {
+  let result = ''
+  for (let i = 0; i < str.length; i++) {
+    result = `${result}${encodedPathChars.get(str[i]) ?? str[i]}`
+  }
+  if (result.startsWith('file:')) {
+    return result
+  }
+  return `file:${result}`
+}
+
+function fromFile (res, where) {
+  res.type = isFileType.test(res.rawSpec) ? 'file' : 'directory'
+  res.where = where
+
+  let rawSpec = pathToFileURL(res.rawSpec)
+
+  if (rawSpec.startsWith('file:/')) {
+    // XXX backwards compatibility lack of compliance with RFC 8089
+
+    // turn file://path into file:/path
+    if (/^file:\/\/[^/]/.test(rawSpec)) {
+      rawSpec = `file:/${rawSpec.slice(5)}`
+    }
+
+    // turn file:/../path into file:../path
+    // for 1 or 3 leading slashes (2 is already ruled out from handling file:// explicitly above)
+    if (/^\/{1,3}\.\.?(\/|$)/.test(rawSpec.slice(5))) {
+      rawSpec = rawSpec.replace(/^file:\/{1,3}/, 'file:')
+    }
+  }
+
+  let resolvedUrl
+  let specUrl
+  try {
+    // always put the '/' on "where", or else file:foo from /path/to/bar goes to /path/to/foo, when we want it to be /path/to/bar/foo
+    resolvedUrl = new URL(rawSpec, `${pathToFileURL(path.resolve(where))}/`)
+    specUrl = new URL(rawSpec)
+  } catch (originalError) {
+    const er = new Error('Invalid file: URL, must comply with RFC 8089')
+    throw Object.assign(er, {
+      raw: res.rawSpec,
+      spec: res,
+      where,
+      originalError,
+    })
+  }
+
+  // turn /C:/blah into just C:/blah on windows
+  let specPath = decodeURIComponent(specUrl.pathname)
+  let resolvedPath = decodeURIComponent(resolvedUrl.pathname)
+  if (isWindows) {
+    specPath = specPath.replace(/^\/+([a-z]:\/)/i, '$1')
+    resolvedPath = resolvedPath.replace(/^\/+([a-z]:\/)/i, '$1')
+  }
+
+  // replace ~ with homedir, but keep the ~ in the saveSpec
+  // otherwise, make it relative to where param
+  if (/^\/~(\/|$)/.test(specPath)) {
+    res.saveSpec = `file:${specPath.substr(1)}`
+    resolvedPath = path.resolve(homedir(), specPath.substr(3))
+  } else if (!path.isAbsolute(rawSpec.slice(5))) {
+    res.saveSpec = `file:${path.relative(where, resolvedPath)}`
+  } else {
+    res.saveSpec = `file:${path.resolve(resolvedPath)}`
+  }
+
+  res.fetchSpec = path.resolve(where, resolvedPath)
+  // re-normalize the slashes in saveSpec due to node:path/win32 behavior in windows
+  res.saveSpec = res.saveSpec.split('\\').join('/')
+  // Ignoring because this only happens in windows
+  /* istanbul ignore next */
+  if (res.saveSpec.startsWith('file://')) {
+    // normalization of \\win32\root paths can cause a double / which we don't want
+    res.saveSpec = `file:/${res.saveSpec.slice(7)}`
+  }
+  return res
+}
+
+function fromHostedGit (res, hosted) {
+  res.type = 'git'
+  res.hosted = hosted
+  res.saveSpec = hosted.toString({ noGitPlus: false, noCommittish: false })
+  res.fetchSpec = hosted.getDefaultRepresentation() === 'shortcut' ? null : hosted.toString()
+  setGitAttrs(res, hosted.committish)
+  return res
+}
+
+function unsupportedURLType (protocol, spec) {
+  const err = new Error(`Unsupported URL Type "${protocol}": ${spec}`)
+  err.code = 'EUNSUPPORTEDPROTOCOL'
+  return err
+}
+
+function fromURL (res) {
+  let rawSpec = res.rawSpec
+  res.saveSpec = rawSpec
+  if (rawSpec.startsWith('git+ssh:')) {
+    // git ssh specifiers are overloaded to also use scp-style git
+    // specifiers, so we have to parse those out and treat them special.
+    // They are NOT true URIs, so we can't hand them to URL.
+
+    // This regex looks for things that look like:
+    // git+ssh://git@my.custom.git.com:username/project.git#deadbeef
+    // ...and various combinations. The username in the beginning is *required*.
+    const matched = rawSpec.match(/^git\+ssh:\/\/([^:#]+:[^#]+(?:\.git)?)(?:#(.*))?$/i)
+    // Filter out all-number "usernames" which are really port numbers
+    // They can either be :1234 :1234/ or :1234/path but not :12abc
+    if (matched && !matched[1].match(isPortNumber)) {
+      res.type = 'git'
+      setGitAttrs(res, matched[2])
+      res.fetchSpec = matched[1]
+      return res
+    }
+  } else if (rawSpec.startsWith('git+file://')) {
+    // URL can't handle windows paths
+    rawSpec = rawSpec.replace(/\\/g, '/')
+  }
+  const parsedUrl = new URL(rawSpec)
+  // check the protocol, and then see if it's git or not
+  switch (parsedUrl.protocol) {
+    case 'git:':
+    case 'git+http:':
+    case 'git+https:':
+    case 'git+rsync:':
+    case 'git+ftp:':
+    case 'git+file:':
+    case 'git+ssh:':
+      res.type = 'git'
+      setGitAttrs(res, parsedUrl.hash.slice(1))
+      if (parsedUrl.protocol === 'git+file:' && /^git\+file:\/\/[a-z]:/i.test(rawSpec)) {
+        // URL can't handle drive letters on windows file paths, the host can't contain a :
+        res.fetchSpec = `git+file://${parsedUrl.host.toLowerCase()}:${parsedUrl.pathname}`
+      } else {
+        parsedUrl.hash = ''
+        res.fetchSpec = parsedUrl.toString()
+      }
+      if (res.fetchSpec.startsWith('git+')) {
+        res.fetchSpec = res.fetchSpec.slice(4)
+      }
+      break
+    case 'http:':
+    case 'https:':
+      res.type = 'remote'
+      res.fetchSpec = res.saveSpec
+      break
+
+    default:
+      throw unsupportedURLType(parsedUrl.protocol, rawSpec)
+  }
+
+  return res
+}
+
+function fromAlias (res, where) {
+  const subSpec = npa(res.rawSpec.substr(4), where)
+  if (subSpec.type === 'alias') {
+    throw new Error('nested aliases not supported')
+  }
+
+  if (!subSpec.registry) {
+    throw new Error('aliases only work for registry deps')
+  }
+
+  if (!subSpec.name) {
+    throw new Error('aliases must have a name')
+  }
+
+  res.subSpec = subSpec
+  res.registry = true
+  res.type = 'alias'
+  res.saveSpec = null
+  res.fetchSpec = null
+  return res
+}
+
+function fromRegistry (res) {
+  res.registry = true
+  const spec = res.rawSpec.trim()
+  // no save spec for registry components as we save based on the fetched
+  // version, not on the argument so this can't compute that.
+  res.saveSpec = null
+  res.fetchSpec = spec
+  const version = semver.valid(spec, true)
+  const range = semver.validRange(spec, true)
+  if (version) {
+    res.type = 'version'
+  } else if (range) {
+    res.type = 'range'
+  } else {
+    if (encodeURIComponent(spec) !== spec) {
+      throw invalidTagName(spec, res.raw)
+    }
+    res.type = 'tag'
+  }
+  return res
+}
+
+module.exports = npa
+module.exports.resolve = resolve
+module.exports.toPurl = toPurl
+module.exports.Result = Result
diff --git a/node_modules/init-package-json/node_modules/@npmcli/package-json/package.json b/node_modules/init-package-json/node_modules/npm-package-arg/package.json
similarity index 54%
rename from node_modules/init-package-json/node_modules/@npmcli/package-json/package.json
rename to node_modules/init-package-json/node_modules/npm-package-arg/package.json
index 263d67ff3bc5b..db6ce9074cfa2 100644
--- a/node_modules/init-package-json/node_modules/@npmcli/package-json/package.json
+++ b/node_modules/init-package-json/node_modules/npm-package-arg/package.json
@@ -1,25 +1,30 @@
 {
-  "name": "@npmcli/package-json",
-  "version": "6.2.0",
-  "description": "Programmatic API to update package.json",
-  "keywords": [
-    "npm",
-    "oss"
-  ],
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/npm/package-json.git"
+  "name": "npm-package-arg",
+  "version": "13.0.0",
+  "description": "Parse the things that can be arguments to `npm install`",
+  "main": "./lib/npa.js",
+  "directories": {
+    "test": "test"
   },
-  "license": "ISC",
-  "author": "GitHub Inc.",
-  "main": "lib/index.js",
   "files": [
     "bin/",
     "lib/"
   ],
+  "dependencies": {
+    "hosted-git-info": "^9.0.0",
+    "proc-log": "^5.0.0",
+    "semver": "^7.3.5",
+    "validate-npm-package-name": "^6.0.0"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^5.0.0",
+    "@npmcli/template-oss": "4.23.5",
+    "tap": "^16.0.1"
+  },
   "scripts": {
-    "snap": "tap",
     "test": "tap",
+    "snap": "tap",
+    "npmclilint": "npmcli-lint",
     "lint": "npm run eslint",
     "lintfix": "npm run eslint -- --fix",
     "posttest": "npm run lint",
@@ -28,34 +33,29 @@
     "template-oss-apply": "template-oss-apply --force",
     "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
   },
-  "dependencies": {
-    "@npmcli/git": "^6.0.0",
-    "glob": "^10.2.2",
-    "hosted-git-info": "^8.0.0",
-    "json-parse-even-better-errors": "^4.0.0",
-    "proc-log": "^5.0.0",
-    "semver": "^7.5.3",
-    "validate-npm-package-license": "^3.0.4"
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/npm/npm-package-arg.git"
   },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^5.1.0",
-    "@npmcli/template-oss": "4.23.6",
-    "read-package-json": "^7.0.0",
-    "read-package-json-fast": "^4.0.0",
-    "tap": "^16.0.1"
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "bugs": {
+    "url": "https://github.com/npm/npm-package-arg/issues"
   },
+  "homepage": "https://github.com/npm/npm-package-arg",
   "engines": {
-    "node": "^18.17.0 || >=20.5.0"
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.23.6",
-    "publish": "true"
+    "node": "^20.17.0 || >=22.9.0"
   },
   "tap": {
+    "branches": 97,
     "nyc-arg": [
       "--exclude",
       "tap-snapshots/**"
     ]
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.23.5",
+    "publish": true
   }
 }
diff --git a/node_modules/init-package-json/package.json b/node_modules/init-package-json/package.json
index 722e74fc16cb0..de404b658c7b7 100644
--- a/node_modules/init-package-json/package.json
+++ b/node_modules/init-package-json/package.json
@@ -1,6 +1,6 @@
 {
   "name": "init-package-json",
-  "version": "8.2.1",
+  "version": "8.2.2",
   "main": "lib/init-package-json.js",
   "scripts": {
     "test": "tap",
@@ -20,13 +20,13 @@
   "license": "ISC",
   "description": "A node module to get your node module started",
   "dependencies": {
-    "@npmcli/package-json": "^6.1.0",
-    "npm-package-arg": "^12.0.0",
+    "@npmcli/package-json": "^7.0.0",
+    "npm-package-arg": "^13.0.0",
     "promzard": "^2.0.0",
     "read": "^4.0.0",
-    "semver": "^7.3.5",
+    "semver": "^7.7.2",
     "validate-npm-package-license": "^3.0.4",
-    "validate-npm-package-name": "^6.0.0"
+    "validate-npm-package-name": "^6.0.2"
   },
   "devDependencies": {
     "@npmcli/config": "^10.0.0",
diff --git a/package-lock.json b/package-lock.json
index bee1772f17416..f810694a88ff3 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -106,7 +106,7 @@
         "graceful-fs": "^4.2.11",
         "hosted-git-info": "^8.1.0",
         "ini": "^5.0.0",
-        "init-package-json": "^8.2.1",
+        "init-package-json": "^8.2.2",
         "is-cidr": "^5.1.1",
         "json-parse-even-better-errors": "^4.0.0",
         "libnpmaccess": "^10.0.1",
@@ -9750,41 +9750,61 @@
       }
     },
     "node_modules/init-package-json": {
-      "version": "8.2.1",
-      "resolved": "https://registry.npmjs.org/init-package-json/-/init-package-json-8.2.1.tgz",
-      "integrity": "sha512-8lhupwQjiwCJzwVILceTq0Kvyj+0cFun0jvmMz0TwCFFgCAqLV6tZl07VAexh8YFOWwIN9jxN+XHkW27fy1nZg==",
+      "version": "8.2.2",
+      "resolved": "https://registry.npmjs.org/init-package-json/-/init-package-json-8.2.2.tgz",
+      "integrity": "sha512-pXVMn67Jdw2hPKLCuJZj62NC9B2OIDd1R3JwZXTHXuEnfN3Uq5kJbKOSld6YEU+KOGfMD82EzxFTYz5o0SSJoA==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
-        "@npmcli/package-json": "^6.1.0",
-        "npm-package-arg": "^12.0.0",
+        "@npmcli/package-json": "^7.0.0",
+        "npm-package-arg": "^13.0.0",
         "promzard": "^2.0.0",
         "read": "^4.0.0",
-        "semver": "^7.3.5",
+        "semver": "^7.7.2",
         "validate-npm-package-license": "^3.0.4",
-        "validate-npm-package-name": "^6.0.0"
+        "validate-npm-package-name": "^6.0.2"
       },
       "engines": {
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/init-package-json/node_modules/@npmcli/package-json": {
-      "version": "6.2.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-6.2.0.tgz",
-      "integrity": "sha512-rCNLSB/JzNvot0SEyXqWZ7tX2B5dD2a1br2Dp0vSYVo5jh8Z0EZ7lS9TsZ1UtziddB1UfNUaMCc538/HztnJGA==",
+    "node_modules/init-package-json/node_modules/hosted-git-info": {
+      "version": "9.0.0",
+      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-9.0.0.tgz",
+      "integrity": "sha512-gEf705MZLrDPkbbhi8PnoO4ZwYgKoNL+ISZ3AjZMht2r3N5tuTwncyDi6Fv2/qDnMmZxgs0yI8WDOyR8q3G+SQ==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
-        "@npmcli/git": "^6.0.0",
-        "glob": "^10.2.2",
-        "hosted-git-info": "^8.0.0",
-        "json-parse-even-better-errors": "^4.0.0",
+        "lru-cache": "^11.1.0"
+      },
+      "engines": {
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/init-package-json/node_modules/lru-cache": {
+      "version": "11.2.1",
+      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.1.tgz",
+      "integrity": "sha512-r8LA6i4LP4EeWOhqBaZZjDWwehd1xUJPCJd9Sv300H0ZmcUER4+JPh7bqqZeqs1o5pgtgvXm+d9UGrB5zZGDiQ==",
+      "inBundle": true,
+      "license": "ISC",
+      "engines": {
+        "node": "20 || >=22"
+      }
+    },
+    "node_modules/init-package-json/node_modules/npm-package-arg": {
+      "version": "13.0.0",
+      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-13.0.0.tgz",
+      "integrity": "sha512-+t2etZAGcB7TbbLHfDwooV9ppB2LhhcT6A+L9cahsf9mEUAoQ6CktLEVvEnpD0N5CkX7zJqnPGaFtoQDy9EkHQ==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "hosted-git-info": "^9.0.0",
         "proc-log": "^5.0.0",
-        "semver": "^7.5.3",
-        "validate-npm-package-license": "^3.0.4"
+        "semver": "^7.3.5",
+        "validate-npm-package-name": "^6.0.0"
       },
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^20.17.0 || >=22.9.0"
       }
     },
     "node_modules/internal-slot": {
diff --git a/package.json b/package.json
index 473636b417d33..a0e3eb626d4c2 100644
--- a/package.json
+++ b/package.json
@@ -73,7 +73,7 @@
     "graceful-fs": "^4.2.11",
     "hosted-git-info": "^8.1.0",
     "ini": "^5.0.0",
-    "init-package-json": "^8.2.1",
+    "init-package-json": "^8.2.2",
     "is-cidr": "^5.1.1",
     "json-parse-even-better-errors": "^4.0.0",
     "libnpmaccess": "^10.0.1",

From 6b4c5f92865230ed9a260cd3e8486bf3991120eb Mon Sep 17 00:00:00 2001
From: Gar 
Date: Wed, 17 Sep 2025 10:24:52 -0700
Subject: [PATCH 09/63] deps: @npmcli/run-script@10.0.0

---
 node_modules/.gitignore                       |   5 -
 .../node_modules/@npmcli/package-json/LICENSE |  18 -
 .../@npmcli/package-json/lib/index.js         | 286 ---------
 .../package-json/lib/normalize-data.js        | 257 --------
 .../@npmcli/package-json/lib/normalize.js     | 601 ------------------
 .../@npmcli/package-json/lib/read-package.js  |  39 --
 .../@npmcli/package-json/lib/sort.js          | 101 ---
 .../package-json/lib/update-dependencies.js   |  75 ---
 .../package-json/lib/update-scripts.js        |  29 -
 .../package-json/lib/update-workspaces.js     |  26 -
 .../@npmcli/package-json/package.json         |  61 --
 node_modules/@npmcli/run-script/package.json  |  10 +-
 .../node_modules/@npmcli/run-script/LICENSE   |  15 -
 .../run-script/lib/is-server-package.js       |  11 -
 .../@npmcli/run-script/lib/make-spawn-args.js |  53 --
 .../run-script/lib/node-gyp-bin/node-gyp      |   2 -
 .../run-script/lib/node-gyp-bin/node-gyp.cmd  |   1 -
 .../@npmcli/run-script/lib/package-envs.js    |  29 -
 .../@npmcli/run-script/lib/run-script-pkg.js  | 114 ----
 .../@npmcli/run-script/lib/run-script.js      |  15 -
 .../@npmcli/run-script/lib/set-path.js        |  45 --
 .../@npmcli/run-script/lib/signal-manager.js  |  50 --
 .../run-script/lib/validate-options.js        |  39 --
 .../@npmcli/run-script/package.json           |  54 --
 package-lock.json                             |  58 +-
 package.json                                  |   2 +-
 workspaces/arborist/package.json              |   2 +-
 workspaces/libnpmexec/package.json            |   2 +-
 workspaces/libnpmpack/package.json            |   2 +-
 workspaces/libnpmversion/package.json         |   2 +-
 30 files changed, 20 insertions(+), 1984 deletions(-)
 delete mode 100644 node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/LICENSE
 delete mode 100644 node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/index.js
 delete mode 100644 node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/normalize-data.js
 delete mode 100644 node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/normalize.js
 delete mode 100644 node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/read-package.js
 delete mode 100644 node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/sort.js
 delete mode 100644 node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/update-dependencies.js
 delete mode 100644 node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/update-scripts.js
 delete mode 100644 node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/update-workspaces.js
 delete mode 100644 node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/package.json
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/LICENSE
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/lib/is-server-package.js
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/lib/make-spawn-args.js
 delete mode 100755 node_modules/pacote/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp
 delete mode 100755 node_modules/pacote/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp.cmd
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/lib/package-envs.js
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/lib/run-script-pkg.js
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/lib/run-script.js
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/lib/set-path.js
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/lib/signal-manager.js
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/lib/validate-options.js
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/run-script/package.json

diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 991015407c23e..03122be7ec29b 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -51,11 +51,6 @@
 !/@npmcli/query
 !/@npmcli/redact
 !/@npmcli/run-script
-!/@npmcli/run-script/node_modules/
-/@npmcli/run-script/node_modules/*
-!/@npmcli/run-script/node_modules/@npmcli/
-/@npmcli/run-script/node_modules/@npmcli/*
-!/@npmcli/run-script/node_modules/@npmcli/package-json
 !/@pkgjs/
 /@pkgjs/*
 !/@pkgjs/parseargs
diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/LICENSE b/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/LICENSE
deleted file mode 100644
index 6a1f3708f6d70..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/LICENSE
+++ /dev/null
@@ -1,18 +0,0 @@
-ISC License
-
-Copyright GitHub Inc.
-
-Permission to use, copy, modify, and/or distribute this
-software for any purpose with or without fee is hereby
-granted, provided that the above copyright notice and this
-permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL
-WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
-IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO
-EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT,
-INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
-WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
-TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/index.js b/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/index.js
deleted file mode 100644
index 7eff602d73a3f..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/index.js
+++ /dev/null
@@ -1,286 +0,0 @@
-const { readFile, writeFile } = require('node:fs/promises')
-const { resolve } = require('node:path')
-const parseJSON = require('json-parse-even-better-errors')
-
-const updateDeps = require('./update-dependencies.js')
-const updateScripts = require('./update-scripts.js')
-const updateWorkspaces = require('./update-workspaces.js')
-const normalize = require('./normalize.js')
-const { read, parse } = require('./read-package.js')
-const { packageSort } = require('./sort.js')
-
-// a list of handy specialized helper functions that take
-// care of special cases that are handled by the npm cli
-const knownSteps = new Set([
-  updateDeps,
-  updateScripts,
-  updateWorkspaces,
-])
-
-// list of all keys that are handled by "knownSteps" helpers
-const knownKeys = new Set([
-  ...updateDeps.knownKeys,
-  'scripts',
-  'workspaces',
-])
-
-class PackageJson {
-  static normalizeSteps = Object.freeze([
-    '_id',
-    '_attributes',
-    'bundledDependencies',
-    'bundleDependencies',
-    'optionalDedupe',
-    'scripts',
-    'funding',
-    'bin',
-  ])
-
-  // npm pkg fix
-  static fixSteps = Object.freeze([
-    'binRefs',
-    'bundleDependencies',
-    'bundleDependenciesFalse',
-    'fixName',
-    'fixNameField',
-    'fixVersionField',
-    'fixRepositoryField',
-    'fixDependencies',
-    'devDependencies',
-    'scriptpath',
-  ])
-
-  static prepareSteps = Object.freeze([
-    '_id',
-    '_attributes',
-    'bundledDependencies',
-    'bundleDependencies',
-    'bundleDependenciesDeleteFalse',
-    'gypfile',
-    'serverjs',
-    'scriptpath',
-    'authors',
-    'readme',
-    'mans',
-    'binDir',
-    'gitHead',
-    'fillTypes',
-    'normalizeData',
-    'binRefs',
-  ])
-
-  // create a new empty package.json, so we can save at the given path even
-  // though we didn't start from a parsed file
-  static async create (path, opts = {}) {
-    const p = new PackageJson()
-    await p.create(path)
-    if (opts.data) {
-      return p.update(opts.data)
-    }
-    return p
-  }
-
-  // Loads a package.json at given path and JSON parses
-  static async load (path, opts = {}) {
-    const p = new PackageJson()
-    // Avoid try/catch if we aren't going to create
-    if (!opts.create) {
-      return p.load(path)
-    }
-
-    try {
-      return await p.load(path)
-    } catch (err) {
-      if (!err.message.startsWith('Could not read package.json')) {
-        throw err
-      }
-      return await p.create(path)
-    }
-  }
-
-  // npm pkg fix
-  static async fix (path, opts) {
-    const p = new PackageJson()
-    await p.load(path, true)
-    return p.fix(opts)
-  }
-
-  // read-package-json compatible behavior
-  static async prepare (path, opts) {
-    const p = new PackageJson()
-    await p.load(path, true)
-    return p.prepare(opts)
-  }
-
-  // read-package-json-fast compatible behavior
-  static async normalize (path, opts) {
-    const p = new PackageJson()
-    await p.load(path)
-    return p.normalize(opts)
-  }
-
-  #path
-  #manifest
-  #readFileContent = ''
-  #canSave = true
-
-  // Load content from given path
-  async load (path, parseIndex) {
-    this.#path = path
-    let parseErr
-    try {
-      this.#readFileContent = await read(this.filename)
-    } catch (err) {
-      if (!parseIndex) {
-        throw err
-      }
-      parseErr = err
-    }
-
-    if (parseErr) {
-      const indexFile = resolve(this.path, 'index.js')
-      let indexFileContent
-      try {
-        indexFileContent = await readFile(indexFile, 'utf8')
-      } catch (err) {
-        throw parseErr
-      }
-      try {
-        this.fromComment(indexFileContent)
-      } catch (err) {
-        throw parseErr
-      }
-      // This wasn't a package.json so prevent saving
-      this.#canSave = false
-      return this
-    }
-
-    return this.fromJSON(this.#readFileContent)
-  }
-
-  // Load data from a JSON string/buffer
-  fromJSON (data) {
-    this.#manifest = parse(data)
-    return this
-  }
-
-  fromContent (data) {
-    this.#manifest = data
-    this.#canSave = false
-    return this
-  }
-
-  // Load data from a comment
-  // /**package { "name": "foo", "version": "1.2.3", ... } **/
-  fromComment (data) {
-    data = data.split(/^\/\*\*package(?:\s|$)/m)
-
-    if (data.length < 2) {
-      throw new Error('File has no package in comments')
-    }
-    data = data[1]
-    data = data.split(/\*\*\/$/m)
-
-    if (data.length < 2) {
-      throw new Error('File has no package in comments')
-    }
-    data = data[0]
-    data = data.replace(/^\s*\*/mg, '')
-
-    this.#manifest = parseJSON(data)
-    return this
-  }
-
-  get content () {
-    return this.#manifest
-  }
-
-  get path () {
-    return this.#path
-  }
-
-  get filename () {
-    if (this.path) {
-      return resolve(this.path, 'package.json')
-    }
-    return undefined
-  }
-
-  create (path) {
-    this.#path = path
-    this.#manifest = {}
-    return this
-  }
-
-  // This should be the ONLY way to set content in the manifest
-  update (content) {
-    if (!this.content) {
-      throw new Error('Can not update without content.  Please `load` or `create`')
-    }
-
-    for (const step of knownSteps) {
-      this.#manifest = step({ content, originalContent: this.content })
-    }
-
-    // unknown properties will just be overwitten
-    for (const [key, value] of Object.entries(content)) {
-      if (!knownKeys.has(key)) {
-        this.content[key] = value
-      }
-    }
-
-    return this
-  }
-
-  async save ({ sort } = {}) {
-    if (!this.#canSave) {
-      throw new Error('No package.json to save to')
-    }
-    const {
-      [Symbol.for('indent')]: indent,
-      [Symbol.for('newline')]: newline,
-      ...rest
-    } = this.content
-
-    const format = indent === undefined ? '  ' : indent
-    const eol = newline === undefined ? '\n' : newline
-
-    const content = sort ? packageSort(rest) : rest
-
-    const fileContent = `${
-      JSON.stringify(content, null, format)
-    }\n`
-      .replace(/\n/g, eol)
-
-    if (fileContent.trim() !== this.#readFileContent.trim()) {
-      const written = await writeFile(this.filename, fileContent)
-      this.#readFileContent = fileContent
-      return written
-    }
-  }
-
-  async normalize (opts = {}) {
-    if (!opts.steps) {
-      opts.steps = this.constructor.normalizeSteps
-    }
-    await normalize(this, opts)
-    return this
-  }
-
-  async prepare (opts = {}) {
-    if (!opts.steps) {
-      opts.steps = this.constructor.prepareSteps
-    }
-    await normalize(this, opts)
-    return this
-  }
-
-  async fix (opts = {}) {
-    // This one is not overridable
-    opts.steps = this.constructor.fixSteps
-    await normalize(this, opts)
-    return this
-  }
-}
-
-module.exports = PackageJson
diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/normalize-data.js b/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/normalize-data.js
deleted file mode 100644
index 79b0bafbcd3a4..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/normalize-data.js
+++ /dev/null
@@ -1,257 +0,0 @@
-// Originally normalize-package-data
-
-const url = require('node:url')
-const hostedGitInfo = require('hosted-git-info')
-const validateLicense = require('validate-npm-package-license')
-
-const typos = {
-  dependancies: 'dependencies',
-  dependecies: 'dependencies',
-  depdenencies: 'dependencies',
-  devEependencies: 'devDependencies',
-  depends: 'dependencies',
-  'dev-dependencies': 'devDependencies',
-  devDependences: 'devDependencies',
-  devDepenencies: 'devDependencies',
-  devdependencies: 'devDependencies',
-  repostitory: 'repository',
-  repo: 'repository',
-  prefereGlobal: 'preferGlobal',
-  hompage: 'homepage',
-  hampage: 'homepage',
-  autohr: 'author',
-  autor: 'author',
-  contributers: 'contributors',
-  publicationConfig: 'publishConfig',
-  script: 'scripts',
-}
-
-const isEmail = str => str.includes('@') && (str.indexOf('@') < str.lastIndexOf('.'))
-
-// Extracts description from contents of a readme file in markdown format
-function extractDescription (description) {
-  // the first block of text before the first heading that isn't the first line heading
-  const lines = description.trim().split('\n')
-  let start = 0
-  // skip initial empty lines and lines that start with #
-  while (lines[start]?.trim().match(/^(#|$)/)) {
-    start++
-  }
-  let end = start + 1
-  // keep going till we get to the end or an empty line
-  while (end < lines.length && lines[end].trim()) {
-    end++
-  }
-  return lines.slice(start, end).join(' ').trim()
-}
-
-function stringifyPerson (person) {
-  if (typeof person !== 'string') {
-    const name = person.name || ''
-    const u = person.url || person.web
-    const wrappedUrl = u ? (' (' + u + ')') : ''
-    const e = person.email || person.mail
-    const wrappedEmail = e ? (' <' + e + '>') : ''
-    person = name + wrappedEmail + wrappedUrl
-  }
-  const matchedName = person.match(/^([^(<]+)/)
-  const matchedUrl = person.match(/\(([^()]+)\)/)
-  const matchedEmail = person.match(/<([^<>]+)>/)
-  const parsed = {}
-  if (matchedName?.[0].trim()) {
-    parsed.name = matchedName[0].trim()
-  }
-  if (matchedEmail) {
-    parsed.email = matchedEmail[1]
-  }
-  if (matchedUrl) {
-    parsed.url = matchedUrl[1]
-  }
-  return parsed
-}
-
-function normalizeData (data, changes) {
-  // fixDescriptionField
-  if (data.description && typeof data.description !== 'string') {
-    changes?.push(`'description' field should be a string`)
-    delete data.description
-  }
-  if (data.readme && !data.description && data.readme !== 'ERROR: No README data found!') {
-    data.description = extractDescription(data.readme)
-  }
-  if (data.description === undefined) {
-    delete data.description
-  }
-  if (!data.description) {
-    changes?.push('No description')
-  }
-
-  // fixModulesField
-  if (data.modules) {
-    changes?.push(`modules field is deprecated`)
-    delete data.modules
-  }
-
-  // fixFilesField
-  const files = data.files
-  if (files && !Array.isArray(files)) {
-    changes?.push(`Invalid 'files' member`)
-    delete data.files
-  } else if (data.files) {
-    data.files = data.files.filter(function (file) {
-      if (!file || typeof file !== 'string') {
-        changes?.push(`Invalid filename in 'files' list: ${file}`)
-        return false
-      } else {
-        return true
-      }
-    })
-  }
-
-  // fixManField
-  if (data.man && typeof data.man === 'string') {
-    data.man = [data.man]
-  }
-
-  // fixBugsField
-  if (!data.bugs && data.repository?.url) {
-    const hosted = hostedGitInfo.fromUrl(data.repository.url)
-    if (hosted && hosted.bugs()) {
-      data.bugs = { url: hosted.bugs() }
-    }
-  } else if (data.bugs) {
-    if (typeof data.bugs === 'string') {
-      if (isEmail(data.bugs)) {
-        data.bugs = { email: data.bugs }
-        /* eslint-disable-next-line node/no-deprecated-api */
-      } else if (url.parse(data.bugs).protocol) {
-        data.bugs = { url: data.bugs }
-      } else {
-        changes?.push(`Bug string field must be url, email, or {email,url}`)
-      }
-    } else {
-      for (const k in data.bugs) {
-        if (['web', 'name'].includes(k)) {
-          changes?.push(`bugs['${k}'] should probably be bugs['url'].`)
-          data.bugs.url = data.bugs[k]
-          delete data.bugs[k]
-        }
-      }
-      const oldBugs = data.bugs
-      data.bugs = {}
-      if (oldBugs.url) {
-        /* eslint-disable-next-line node/no-deprecated-api */
-        if (typeof (oldBugs.url) === 'string' && url.parse(oldBugs.url).protocol) {
-          data.bugs.url = oldBugs.url
-        } else {
-          changes?.push('bugs.url field must be a string url. Deleted.')
-        }
-      }
-      if (oldBugs.email) {
-        if (typeof (oldBugs.email) === 'string' && isEmail(oldBugs.email)) {
-          data.bugs.email = oldBugs.email
-        } else {
-          changes?.push('bugs.email field must be a string email. Deleted.')
-        }
-      }
-    }
-    if (!data.bugs.email && !data.bugs.url) {
-      delete data.bugs
-      changes?.push('Normalized value of bugs field is an empty object. Deleted.')
-    }
-  }
-  // fixKeywordsField
-  if (typeof data.keywords === 'string') {
-    data.keywords = data.keywords.split(/,\s+/)
-  }
-  if (data.keywords && !Array.isArray(data.keywords)) {
-    delete data.keywords
-    changes?.push(`keywords should be an array of strings`)
-  } else if (data.keywords) {
-    data.keywords = data.keywords.filter(function (kw) {
-      if (typeof kw !== 'string' || !kw) {
-        changes?.push(`keywords should be an array of strings`)
-        return false
-      } else {
-        return true
-      }
-    })
-  }
-  // fixBundleDependenciesField
-  const bdd = 'bundledDependencies'
-  const bd = 'bundleDependencies'
-  if (data[bdd] && !data[bd]) {
-    data[bd] = data[bdd]
-    delete data[bdd]
-  }
-  if (data[bd] && !Array.isArray(data[bd])) {
-    changes?.push(`Invalid 'bundleDependencies' list. Must be array of package names`)
-    delete data[bd]
-  } else if (data[bd]) {
-    data[bd] = data[bd].filter(function (filtered) {
-      if (!filtered || typeof filtered !== 'string') {
-        changes?.push(`Invalid bundleDependencies member: ${filtered}`)
-        return false
-      } else {
-        if (!data.dependencies) {
-          data.dependencies = {}
-        }
-        if (!Object.prototype.hasOwnProperty.call(data.dependencies, filtered)) {
-          changes?.push(`Non-dependency in bundleDependencies: ${filtered}`)
-          data.dependencies[filtered] = '*'
-        }
-        return true
-      }
-    })
-  }
-  // fixHomepageField
-  if (!data.homepage && data.repository && data.repository.url) {
-    const hosted = hostedGitInfo.fromUrl(data.repository.url)
-    if (hosted) {
-      data.homepage = hosted.docs()
-    }
-  }
-  if (data.homepage) {
-    if (typeof data.homepage !== 'string') {
-      changes?.push('homepage field must be a string url. Deleted.')
-      delete data.homepage
-    } else {
-      /* eslint-disable-next-line node/no-deprecated-api */
-      if (!url.parse(data.homepage).protocol) {
-        data.homepage = 'http://' + data.homepage
-      }
-    }
-  }
-  // fixReadmeField
-  if (!data.readme) {
-    changes?.push('No README data')
-    data.readme = 'ERROR: No README data found!'
-  }
-  // fixLicenseField
-  const license = data.license || data.licence
-  if (!license) {
-    changes?.push('No license field.')
-  } else if (typeof (license) !== 'string' || license.length < 1 || license.trim() === '') {
-    changes?.push('license should be a valid SPDX license expression')
-  } else if (!validateLicense(license).validForNewPackages) {
-    changes?.push('license should be a valid SPDX license expression')
-  }
-  // fixPeople
-  if (data.author) {
-    data.author = stringifyPerson(data.author)
-  }
-  ['maintainers', 'contributors'].forEach(function (set) {
-    if (!Array.isArray(data[set])) {
-      return
-    }
-    data[set] = data[set].map(stringifyPerson)
-  })
-  // fixTypos
-  for (const d in typos) {
-    if (Object.prototype.hasOwnProperty.call(data, d)) {
-      changes?.push(`${d} should probably be ${typos[d]}.`)
-    }
-  }
-}
-
-module.exports = { normalizeData }
diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/normalize.js b/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/normalize.js
deleted file mode 100644
index 845f6753a9a00..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/normalize.js
+++ /dev/null
@@ -1,601 +0,0 @@
-const valid = require('semver/functions/valid')
-const clean = require('semver/functions/clean')
-const fs = require('node:fs/promises')
-const path = require('node:path')
-const { log } = require('proc-log')
-const moduleBuiltin = require('node:module')
-
-/**
- * @type {import('hosted-git-info')}
- */
-let _hostedGitInfo
-function lazyHostedGitInfo () {
-  if (!_hostedGitInfo) {
-    _hostedGitInfo = require('hosted-git-info')
-  }
-  return _hostedGitInfo
-}
-
-/**
- * @type {import('glob').glob}
- */
-let _glob
-function lazyLoadGlob () {
-  if (!_glob) {
-    _glob = require('glob').glob
-  }
-  return _glob
-}
-
-// used to be npm-normalize-package-bin
-function normalizePackageBin (pkg, changes) {
-  if (pkg.bin) {
-    if (typeof pkg.bin === 'string' && pkg.name) {
-      changes?.push('"bin" was converted to an object')
-      pkg.bin = { [pkg.name]: pkg.bin }
-    } else if (Array.isArray(pkg.bin)) {
-      changes?.push('"bin" was converted to an object')
-      pkg.bin = pkg.bin.reduce((acc, k) => {
-        acc[path.basename(k)] = k
-        return acc
-      }, {})
-    }
-    if (typeof pkg.bin === 'object') {
-      for (const binKey in pkg.bin) {
-        if (typeof pkg.bin[binKey] !== 'string') {
-          delete pkg.bin[binKey]
-          changes?.push(`removed invalid "bin[${binKey}]"`)
-          continue
-        }
-        const base = path.basename(secureAndUnixifyPath(binKey))
-        if (!base) {
-          delete pkg.bin[binKey]
-          changes?.push(`removed invalid "bin[${binKey}]"`)
-          continue
-        }
-
-        const binTarget = secureAndUnixifyPath(pkg.bin[binKey])
-
-        if (!binTarget) {
-          delete pkg.bin[binKey]
-          changes?.push(`removed invalid "bin[${binKey}]"`)
-          continue
-        }
-
-        if (base !== binKey) {
-          delete pkg.bin[binKey]
-          changes?.push(`"bin[${binKey}]" was renamed to "bin[${base}]"`)
-        }
-        if (binTarget !== pkg.bin[binKey]) {
-          changes?.push(`"bin[${base}]" script name was cleaned`)
-        }
-        pkg.bin[base] = binTarget
-      }
-
-      if (Object.keys(pkg.bin).length === 0) {
-        changes?.push('empty "bin" was removed')
-        delete pkg.bin
-      }
-
-      return pkg
-    }
-  }
-  delete pkg.bin
-}
-
-function normalizePackageMan (pkg, changes) {
-  if (pkg.man) {
-    const mans = []
-    for (const man of (Array.isArray(pkg.man) ? pkg.man : [pkg.man])) {
-      if (typeof man !== 'string') {
-        changes?.push(`removed invalid "man [${man}]"`)
-      } else {
-        mans.push(secureAndUnixifyPath(man))
-      }
-    }
-
-    if (!mans.length) {
-      changes?.push('empty "man" was removed')
-    } else {
-      pkg.man = mans
-      return pkg
-    }
-  }
-  delete pkg.man
-}
-
-function isCorrectlyEncodedName (spec) {
-  return !spec.match(/[/@\s+%:]/) &&
-    spec === encodeURIComponent(spec)
-}
-
-function isValidScopedPackageName (spec) {
-  if (spec.charAt(0) !== '@') {
-    return false
-  }
-
-  const rest = spec.slice(1).split('/')
-  if (rest.length !== 2) {
-    return false
-  }
-
-  return rest[0] && rest[1] &&
-    rest[0] === encodeURIComponent(rest[0]) &&
-    rest[1] === encodeURIComponent(rest[1])
-}
-
-function unixifyPath (ref) {
-  return ref.replace(/\\|:/g, '/')
-}
-
-function secureAndUnixifyPath (ref) {
-  const secured = unixifyPath(path.join('.', path.join('/', unixifyPath(ref))))
-  return secured.startsWith('./') ? '' : secured
-}
-
-// We don't want the `changes` array in here by default because this is a hot
-// path for parsing packuments during install.  So the calling method passes it
-// in if it wants to track changes.
-const normalize = async (pkg, { strict, steps, root, changes, allowLegacyCase }) => {
-  if (!pkg.content) {
-    throw new Error('Can not normalize without content')
-  }
-  const data = pkg.content
-  const scripts = data.scripts || {}
-  const pkgId = `${data.name ?? ''}@${data.version ?? ''}`
-
-  // name and version are load bearing so we have to clean them up first
-  if (steps.includes('fixName') || steps.includes('fixNameField') || steps.includes('normalizeData')) {
-    if (!data.name && !strict) {
-      changes?.push('Missing "name" field was set to an empty string')
-      data.name = ''
-    } else {
-      if (typeof data.name !== 'string') {
-        throw new Error('name field must be a string.')
-      }
-      if (!strict) {
-        const name = data.name.trim()
-        if (data.name !== name) {
-          changes?.push(`Whitespace was trimmed from "name"`)
-          data.name = name
-        }
-      }
-
-      if (data.name.startsWith('.') ||
-        !(isValidScopedPackageName(data.name) || isCorrectlyEncodedName(data.name)) ||
-        (strict && (!allowLegacyCase) && data.name !== data.name.toLowerCase()) ||
-        data.name.toLowerCase() === 'node_modules' ||
-        data.name.toLowerCase() === 'favicon.ico') {
-        throw new Error('Invalid name: ' + JSON.stringify(data.name))
-      }
-    }
-  }
-
-  if (steps.includes('fixName')) {
-    // Check for conflicts with builtin modules
-    if (moduleBuiltin.builtinModules.includes(data.name)) {
-      log.warn('package-json', pkgId, `Package name "${data.name}" conflicts with a Node.js built-in module name`)
-    }
-  }
-
-  if (steps.includes('fixVersionField') || steps.includes('normalizeData')) {
-    // allow "loose" semver 1.0 versions in non-strict mode
-    // enforce strict semver 2.0 compliance in strict mode
-    const loose = !strict
-    if (!data.version) {
-      data.version = ''
-    } else {
-      if (!valid(data.version, loose)) {
-        throw new Error(`Invalid version: "${data.version}"`)
-      }
-      const version = clean(data.version, loose)
-      if (version !== data.version) {
-        changes?.push(`"version" was cleaned and set to "${version}"`)
-        data.version = version
-      }
-    }
-  }
-  // remove attributes that start with "_"
-  if (steps.includes('_attributes')) {
-    for (const key in data) {
-      if (key.startsWith('_')) {
-        changes?.push(`"${key}" was removed`)
-        delete pkg.content[key]
-      }
-    }
-  }
-
-  // build the "_id" attribute
-  if (steps.includes('_id')) {
-    if (data.name && data.version) {
-      changes?.push(`"_id" was set to ${pkgId}`)
-      data._id = pkgId
-    }
-  }
-
-  // fix bundledDependencies typo
-  // normalize bundleDependencies
-  if (steps.includes('bundledDependencies')) {
-    if (data.bundleDependencies === undefined && data.bundledDependencies !== undefined) {
-      data.bundleDependencies = data.bundledDependencies
-    }
-    changes?.push(`Deleted incorrect "bundledDependencies"`)
-    delete data.bundledDependencies
-  }
-  // expand "bundleDependencies: true or translate from object"
-  if (steps.includes('bundleDependencies')) {
-    const bd = data.bundleDependencies
-    if (bd === false && !steps.includes('bundleDependenciesDeleteFalse')) {
-      changes?.push(`"bundleDependencies" was changed from "false" to "[]"`)
-      data.bundleDependencies = []
-    } else if (bd === true) {
-      changes?.push(`"bundleDependencies" was auto-populated from "dependencies"`)
-      data.bundleDependencies = Object.keys(data.dependencies || {})
-    } else if (bd && typeof bd === 'object') {
-      if (!Array.isArray(bd)) {
-        changes?.push(`"bundleDependencies" was changed from an object to an array`)
-        data.bundleDependencies = Object.keys(bd)
-      }
-    } else if ('bundleDependencies' in data) {
-      changes?.push(`"bundleDependencies" was removed`)
-      delete data.bundleDependencies
-    }
-  }
-
-  // it was once common practice to list deps both in optionalDependencies and
-  // in dependencies, to support npm versions that did not know about
-  // optionalDependencies.  This is no longer a relevant need, so duplicating
-  // the deps in two places is unnecessary and excessive.
-  if (steps.includes('optionalDedupe')) {
-    if (data.dependencies &&
-      data.optionalDependencies && typeof data.optionalDependencies === 'object') {
-      for (const name in data.optionalDependencies) {
-        changes?.push(`optionalDependencies."${name}" was removed`)
-        delete data.dependencies[name]
-      }
-      if (!Object.keys(data.dependencies).length) {
-        changes?.push(`Empty "optionalDependencies" was removed`)
-        delete data.dependencies
-      }
-    }
-  }
-
-  // add "install" attribute if any "*.gyp" files exist
-  if (steps.includes('gypfile')) {
-    if (!scripts.install && !scripts.preinstall && data.gypfile !== false) {
-      const files = await lazyLoadGlob()('*.gyp', { cwd: pkg.path })
-      if (files.length) {
-        scripts.install = 'node-gyp rebuild'
-        data.scripts = scripts
-        data.gypfile = true
-        changes?.push(`"scripts.install" was set to "node-gyp rebuild"`)
-        changes?.push(`"gypfile" was set to "true"`)
-      }
-    }
-  }
-
-  // add "start" attribute if "server.js" exists
-  if (steps.includes('serverjs') && !scripts.start) {
-    try {
-      await fs.access(path.join(pkg.path, 'server.js'))
-      scripts.start = 'node server.js'
-      data.scripts = scripts
-      changes?.push('"scripts.start" was set to "node server.js"')
-    } catch {
-      // do nothing
-    }
-  }
-
-  // strip "node_modules/.bin" from scripts entries
-  // remove invalid scripts entries (non-strings)
-  if ((steps.includes('scripts') || steps.includes('scriptpath')) && data.scripts !== undefined) {
-    const spre = /^(\.[/\\])?node_modules[/\\].bin[\\/]/
-    if (typeof data.scripts === 'object') {
-      for (const name in data.scripts) {
-        if (typeof data.scripts[name] !== 'string') {
-          delete data.scripts[name]
-          changes?.push(`Invalid scripts."${name}" was removed`)
-        } else if (steps.includes('scriptpath') && spre.test(data.scripts[name])) {
-          data.scripts[name] = data.scripts[name].replace(spre, '')
-          changes?.push(`scripts entry "${name}" was fixed to remove node_modules/.bin reference`)
-        }
-      }
-    } else {
-      changes?.push(`Removed invalid "scripts"`)
-      delete data.scripts
-    }
-  }
-
-  if (steps.includes('funding')) {
-    if (data.funding && typeof data.funding === 'string') {
-      data.funding = { url: data.funding }
-      changes?.push(`"funding" was changed to an object with a url attribute`)
-    }
-  }
-
-  // populate "authors" attribute
-  if (steps.includes('authors') && !data.contributors) {
-    try {
-      const authorData = await fs.readFile(path.join(pkg.path, 'AUTHORS'), 'utf8')
-      const authors = authorData.split(/\r?\n/g)
-        .map(line => line.replace(/^\s*#.*$/, '').trim())
-        .filter(line => line)
-      data.contributors = authors
-      changes?.push('"contributors" was auto-populated with the contents of the "AUTHORS" file')
-    } catch {
-      // do nothing
-    }
-  }
-
-  // populate "readme" attribute
-  if (steps.includes('readme') && !data.readme) {
-    const mdre = /\.m?a?r?k?d?o?w?n?$/i
-    const files = await lazyLoadGlob()('{README,README.*}', {
-      cwd: pkg.path,
-      nocase: true,
-      mark: true,
-    })
-    let readmeFile
-    for (const file of files) {
-      // don't accept directories.
-      if (!file.endsWith(path.sep)) {
-        if (file.match(mdre)) {
-          readmeFile = file
-          break
-        }
-        if (file.endsWith('README')) {
-          readmeFile = file
-        }
-      }
-    }
-    if (readmeFile) {
-      const readmeData = await fs.readFile(path.join(pkg.path, readmeFile), 'utf8')
-      data.readme = readmeData
-      data.readmeFilename = readmeFile
-      changes?.push(`"readme" was set to the contents of ${readmeFile}`)
-      changes?.push(`"readmeFilename" was set to ${readmeFile}`)
-    }
-    if (!data.readme) {
-      data.readme = 'ERROR: No README data found!'
-    }
-  }
-
-  // expand directories.man
-  if (steps.includes('mans')) {
-    if (data.directories?.man && !data.man) {
-      const manDir = secureAndUnixifyPath(data.directories.man)
-      const cwd = path.resolve(pkg.path, manDir)
-      const files = await lazyLoadGlob()('**/*.[0-9]', { cwd })
-      data.man = files.map(man =>
-        path.relative(pkg.path, path.join(cwd, man)).split(path.sep).join('/')
-      )
-    }
-    normalizePackageMan(data, changes)
-  }
-
-  if (steps.includes('bin') || steps.includes('binDir') || steps.includes('binRefs')) {
-    normalizePackageBin(data, changes)
-  }
-
-  // expand "directories.bin"
-  if (steps.includes('binDir') && data.directories?.bin && !data.bin) {
-    const binsDir = path.resolve(pkg.path, secureAndUnixifyPath(data.directories.bin))
-    const bins = await lazyLoadGlob()('**', { cwd: binsDir })
-    data.bin = bins.reduce((acc, binFile) => {
-      if (binFile && !binFile.startsWith('.')) {
-        const binName = path.basename(binFile)
-        acc[binName] = path.join(data.directories.bin, binFile)
-      }
-      return acc
-    }, {})
-    // *sigh*
-    normalizePackageBin(data, changes)
-  }
-
-  // populate "gitHead" attribute
-  if (steps.includes('gitHead') && !data.gitHead) {
-    const git = require('@npmcli/git')
-    const gitRoot = await git.find({ cwd: pkg.path, root })
-    let head
-    if (gitRoot) {
-      try {
-        head = await fs.readFile(path.resolve(gitRoot, '.git/HEAD'), 'utf8')
-      } catch (err) {
-      // do nothing
-      }
-    }
-    let headData
-    if (head) {
-      if (head.startsWith('ref: ')) {
-        const headRef = head.replace(/^ref: /, '').trim()
-        const headFile = path.resolve(gitRoot, '.git', headRef)
-        try {
-          headData = await fs.readFile(headFile, 'utf8')
-          headData = headData.replace(/^ref: /, '').trim()
-        } catch (err) {
-          // do nothing
-        }
-        if (!headData) {
-          const packFile = path.resolve(gitRoot, '.git/packed-refs')
-          try {
-            let refs = await fs.readFile(packFile, 'utf8')
-            if (refs) {
-              refs = refs.split('\n')
-              for (let i = 0; i < refs.length; i++) {
-                const match = refs[i].match(/^([0-9a-f]{40}) (.+)$/)
-                if (match && match[2].trim() === headRef) {
-                  headData = match[1]
-                  break
-                }
-              }
-            }
-          } catch {
-            // do nothing
-          }
-        }
-      } else {
-        headData = head.trim()
-      }
-    }
-    if (headData) {
-      data.gitHead = headData
-    }
-  }
-
-  // populate "types" attribute
-  if (steps.includes('fillTypes')) {
-    const index = data.main || 'index.js'
-
-    if (typeof index !== 'string') {
-      throw new TypeError('The "main" attribute must be of type string.')
-    }
-
-    // TODO exports is much more complicated than this in verbose format
-    // We need to support for instance
-
-    // "exports": {
-    //   ".": [
-    //     {
-    //       "default": "./lib/npm.js"
-    //     },
-    //     "./lib/npm.js"
-    //   ],
-    //   "./package.json": "./package.json"
-    // },
-    // as well as conditional exports
-
-    // if (data.exports && typeof data.exports === 'string') {
-    //   index = data.exports
-    // }
-
-    // if (data.exports && data.exports['.']) {
-    //   index = data.exports['.']
-    //   if (typeof index !== 'string') {
-    //   }
-    // }
-    const extless = path.join(path.dirname(index), path.basename(index, path.extname(index)))
-    const dts = `./${extless}.d.ts`
-    const hasDTSFields = 'types' in data || 'typings' in data
-    if (!hasDTSFields) {
-      try {
-        await fs.access(path.join(pkg.path, dts))
-        data.types = dts.split(path.sep).join('/')
-      } catch {
-        // do nothing
-      }
-    }
-  }
-
-  // "normalizeData" from "read-package-json", which was just a call through to
-  // "normalize-package-data".  We only call the "fixer" functions because
-  // outside of that it was also clobbering _id (which we already conditionally
-  // do) and also adding the gypfile script (which we also already
-  // conditionally do)
-
-  // Some steps are isolated so we can do a limited subset of these in `fix`
-  if (steps.includes('fixRepositoryField') || steps.includes('normalizeData')) {
-    if (data.repositories) {
-      changes?.push(`"repository" was set to the first entry in "repositories" (${data.repository})`)
-      data.repository = data.repositories[0]
-    }
-    if (data.repository) {
-      if (typeof data.repository === 'string') {
-        changes?.push('"repository" was changed from a string to an object')
-        data.repository = {
-          type: 'git',
-          url: data.repository,
-        }
-      }
-      if (data.repository.url) {
-        const hosted = lazyHostedGitInfo().fromUrl(data.repository.url)
-        let r
-        if (hosted) {
-          if (hosted.getDefaultRepresentation() === 'shortcut') {
-            r = hosted.https()
-          } else {
-            r = hosted.toString()
-          }
-          if (r !== data.repository.url) {
-            changes?.push(`"repository.url" was normalized to "${r}"`)
-            data.repository.url = r
-          }
-        }
-      }
-    }
-  }
-
-  if (steps.includes('fixDependencies') || steps.includes('normalizeData')) {
-    // peerDependencies?
-    // devDependencies is meaningless here, it's ignored on an installed package
-    for (const type of ['dependencies', 'devDependencies', 'optionalDependencies']) {
-      if (data[type]) {
-        let secondWarning = true
-        if (typeof data[type] === 'string') {
-          changes?.push(`"${type}" was converted from a string into an object`)
-          data[type] = data[type].trim().split(/[\n\r\s\t ,]+/)
-          secondWarning = false
-        }
-        if (Array.isArray(data[type])) {
-          if (secondWarning) {
-            changes?.push(`"${type}" was converted from an array into an object`)
-          }
-          const o = {}
-          for (const d of data[type]) {
-            if (typeof d === 'string') {
-              const dep = d.trim().split(/(:?[@\s><=])/)
-              const dn = dep.shift()
-              const dv = dep.join('').replace(/^@/, '').trim()
-              o[dn] = dv
-            }
-          }
-          data[type] = o
-        }
-      }
-    }
-    // normalize-package-data used to put optional dependencies BACK into
-    // dependencies here, we no longer do this
-
-    for (const deps of ['dependencies', 'devDependencies']) {
-      if (deps in data) {
-        if (!data[deps] || typeof data[deps] !== 'object') {
-          changes?.push(`Removed invalid "${deps}"`)
-          delete data[deps]
-        } else {
-          for (const d in data[deps]) {
-            const r = data[deps][d]
-            if (typeof r !== 'string') {
-              changes?.push(`Removed invalid "${deps}.${d}"`)
-              delete data[deps][d]
-            }
-            const hosted = lazyHostedGitInfo().fromUrl(data[deps][d])?.toString()
-            if (hosted && hosted !== data[deps][d]) {
-              changes?.push(`Normalized git reference to "${deps}.${d}"`)
-              data[deps][d] = hosted.toString()
-            }
-          }
-        }
-      }
-    }
-  }
-
-  // TODO some of this is duplicated in other steps here, a future breaking change may be able to remove the duplicates involved in this step
-  if (steps.includes('normalizeData')) {
-    const { normalizeData } = require('./normalize-data.js')
-    normalizeData(data, changes)
-  }
-
-  // Warn if the bin references don't point to anything.  This might be better
-  // in normalize-package-data if it had access to the file path.
-  if (steps.includes('binRefs') && data.bin instanceof Object) {
-    for (const key in data.bin) {
-      try {
-        await fs.access(path.resolve(pkg.path, data.bin[key]))
-      } catch {
-        log.warn('package-json', pkgId, `No bin file found at ${data.bin[key]}`)
-        // XXX: should a future breaking change delete bin entries that cannot be accessed?
-      }
-    }
-  }
-}
-
-module.exports = normalize
diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/read-package.js b/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/read-package.js
deleted file mode 100644
index d6c86ce388e6c..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/read-package.js
+++ /dev/null
@@ -1,39 +0,0 @@
-// This is JUST the code needed to open a package.json file and parse it.
-// It's isolated out so that code needing to parse a package.json file can do so in the same way as this module does, without needing to require the whole module, or needing to require the underlying parsing library.
-
-const { readFile } = require('fs/promises')
-const parseJSON = require('json-parse-even-better-errors')
-
-async function read (filename) {
-  try {
-    const data = await readFile(filename, 'utf8')
-    return data
-  } catch (err) {
-    err.message = `Could not read package.json: ${err}`
-    throw err
-  }
-}
-
-function parse (data) {
-  try {
-    const content = parseJSON(data)
-    return content
-  } catch (err) {
-    err.message = `Invalid package.json: ${err}`
-    throw err
-  }
-}
-
-// This is what most external libs will use.
-// PackageJson will call read and parse separately
-async function readPackage (filename) {
-  const data = await read(filename)
-  const content = parse(data)
-  return content
-}
-
-module.exports = {
-  read,
-  parse,
-  readPackage,
-}
diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/sort.js b/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/sort.js
deleted file mode 100644
index 0bd0d5199da58..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/sort.js
+++ /dev/null
@@ -1,101 +0,0 @@
-/**
- * arbitrary sort order for package.json largely pulled from:
- * https://github.com/keithamus/sort-package-json/blob/main/defaultRules.md
- *
- * cross checked with:
- * https://github.com/npm/types/blob/main/types/index.d.ts#L104
- * https://docs.npmjs.com/cli/configuring-npm/package-json
- */
-function packageSort (json) {
-  const {
-    name,
-    version,
-    private: isPrivate,
-    description,
-    keywords,
-    homepage,
-    bugs,
-    repository,
-    funding,
-    license,
-    author,
-    maintainers,
-    contributors,
-    type,
-    imports,
-    exports,
-    main,
-    browser,
-    types,
-    bin,
-    man,
-    directories,
-    files,
-    workspaces,
-    scripts,
-    config,
-    dependencies,
-    devDependencies,
-    peerDependencies,
-    peerDependenciesMeta,
-    optionalDependencies,
-    bundledDependencies,
-    bundleDependencies,
-    engines,
-    os,
-    cpu,
-    publishConfig,
-    devEngines,
-    licenses,
-    overrides,
-    ...rest
-  } = json
-
-  return {
-    ...(typeof name !== 'undefined' ? { name } : {}),
-    ...(typeof version !== 'undefined' ? { version } : {}),
-    ...(typeof isPrivate !== 'undefined' ? { private: isPrivate } : {}),
-    ...(typeof description !== 'undefined' ? { description } : {}),
-    ...(typeof keywords !== 'undefined' ? { keywords } : {}),
-    ...(typeof homepage !== 'undefined' ? { homepage } : {}),
-    ...(typeof bugs !== 'undefined' ? { bugs } : {}),
-    ...(typeof repository !== 'undefined' ? { repository } : {}),
-    ...(typeof funding !== 'undefined' ? { funding } : {}),
-    ...(typeof license !== 'undefined' ? { license } : {}),
-    ...(typeof author !== 'undefined' ? { author } : {}),
-    ...(typeof maintainers !== 'undefined' ? { maintainers } : {}),
-    ...(typeof contributors !== 'undefined' ? { contributors } : {}),
-    ...(typeof type !== 'undefined' ? { type } : {}),
-    ...(typeof imports !== 'undefined' ? { imports } : {}),
-    ...(typeof exports !== 'undefined' ? { exports } : {}),
-    ...(typeof main !== 'undefined' ? { main } : {}),
-    ...(typeof browser !== 'undefined' ? { browser } : {}),
-    ...(typeof types !== 'undefined' ? { types } : {}),
-    ...(typeof bin !== 'undefined' ? { bin } : {}),
-    ...(typeof man !== 'undefined' ? { man } : {}),
-    ...(typeof directories !== 'undefined' ? { directories } : {}),
-    ...(typeof files !== 'undefined' ? { files } : {}),
-    ...(typeof workspaces !== 'undefined' ? { workspaces } : {}),
-    ...(typeof scripts !== 'undefined' ? { scripts } : {}),
-    ...(typeof config !== 'undefined' ? { config } : {}),
-    ...(typeof dependencies !== 'undefined' ? { dependencies } : {}),
-    ...(typeof devDependencies !== 'undefined' ? { devDependencies } : {}),
-    ...(typeof peerDependencies !== 'undefined' ? { peerDependencies } : {}),
-    ...(typeof peerDependenciesMeta !== 'undefined' ? { peerDependenciesMeta } : {}),
-    ...(typeof optionalDependencies !== 'undefined' ? { optionalDependencies } : {}),
-    ...(typeof bundledDependencies !== 'undefined' ? { bundledDependencies } : {}),
-    ...(typeof bundleDependencies !== 'undefined' ? { bundleDependencies } : {}),
-    ...(typeof engines !== 'undefined' ? { engines } : {}),
-    ...(typeof os !== 'undefined' ? { os } : {}),
-    ...(typeof cpu !== 'undefined' ? { cpu } : {}),
-    ...(typeof publishConfig !== 'undefined' ? { publishConfig } : {}),
-    ...(typeof devEngines !== 'undefined' ? { devEngines } : {}),
-    ...(typeof licenses !== 'undefined' ? { licenses } : {}),
-    ...(typeof overrides !== 'undefined' ? { overrides } : {}),
-    ...rest,
-  }
-}
-
-module.exports = {
-  packageSort,
-}
diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/update-dependencies.js b/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/update-dependencies.js
deleted file mode 100644
index 7259949ab661d..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/update-dependencies.js
+++ /dev/null
@@ -1,75 +0,0 @@
-const depTypes = new Set([
-  'dependencies',
-  'optionalDependencies',
-  'devDependencies',
-  'peerDependencies',
-])
-
-// sort alphabetically all types of deps for a given package
-const orderDeps = (content) => {
-  for (const type of depTypes) {
-    if (content && content[type]) {
-      content[type] = Object.keys(content[type])
-        .sort((a, b) => a.localeCompare(b, 'en'))
-        .reduce((res, key) => {
-          res[key] = content[type][key]
-          return res
-        }, {})
-    }
-  }
-  return content
-}
-
-const updateDependencies = ({ content, originalContent }) => {
-  const pkg = orderDeps({
-    ...content,
-  })
-
-  // optionalDependencies don't need to be repeated in two places
-  if (pkg.dependencies) {
-    if (pkg.optionalDependencies) {
-      for (const name of Object.keys(pkg.optionalDependencies)) {
-        delete pkg.dependencies[name]
-      }
-    }
-  }
-
-  const result = { ...originalContent }
-
-  // loop through all types of dependencies and update package json pkg
-  for (const type of depTypes) {
-    if (pkg[type]) {
-      result[type] = pkg[type]
-    }
-
-    // prune empty type props from resulting object
-    const emptyDepType =
-      pkg[type]
-      && typeof pkg === 'object'
-      && Object.keys(pkg[type]).length === 0
-    if (emptyDepType) {
-      delete result[type]
-    }
-  }
-
-  // if original package.json had dep in peerDeps AND deps, preserve that.
-  const { dependencies: origProd, peerDependencies: origPeer } =
-    originalContent || {}
-  const { peerDependencies: newPeer } = result
-  if (origProd && origPeer && newPeer) {
-    // we have original prod/peer deps, and new peer deps
-    // copy over any that were in both in the original
-    for (const name of Object.keys(origPeer)) {
-      if (origProd[name] !== undefined && newPeer[name] !== undefined) {
-        result.dependencies = result.dependencies || {}
-        result.dependencies[name] = newPeer[name]
-      }
-    }
-  }
-
-  return result
-}
-
-updateDependencies.knownKeys = depTypes
-
-module.exports = updateDependencies
diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/update-scripts.js b/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/update-scripts.js
deleted file mode 100644
index 30495e54cc3c7..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/update-scripts.js
+++ /dev/null
@@ -1,29 +0,0 @@
-const updateScripts = ({ content, originalContent = {} }) => {
-  const newScripts = content.scripts
-
-  if (!newScripts) {
-    return originalContent
-  }
-
-  // validate scripts content being appended
-  const hasInvalidScripts = () =>
-    Object.entries(newScripts)
-      .some(([key, value]) =>
-        typeof key !== 'string' || typeof value !== 'string')
-  if (hasInvalidScripts()) {
-    throw Object.assign(
-      new TypeError(
-        'package.json scripts should be a key-value pair of strings.'),
-      { code: 'ESCRIPTSINVALID' }
-    )
-  }
-
-  return {
-    ...originalContent,
-    scripts: {
-      ...newScripts,
-    },
-  }
-}
-
-module.exports = updateScripts
diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/update-workspaces.js b/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/update-workspaces.js
deleted file mode 100644
index 04bf63230636f..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/lib/update-workspaces.js
+++ /dev/null
@@ -1,26 +0,0 @@
-const updateWorkspaces = ({ content, originalContent = {} }) => {
-  const newWorkspaces = content.workspaces
-
-  if (!newWorkspaces) {
-    return originalContent
-  }
-
-  // validate workspaces content being appended
-  const hasInvalidWorkspaces = () =>
-    newWorkspaces.some(w => !(typeof w === 'string'))
-  if (!newWorkspaces.length || hasInvalidWorkspaces()) {
-    throw Object.assign(
-      new TypeError('workspaces should be an array of strings.'),
-      { code: 'EWORKSPACESINVALID' }
-    )
-  }
-
-  return {
-    ...originalContent,
-    workspaces: [
-      ...newWorkspaces,
-    ],
-  }
-}
-
-module.exports = updateWorkspaces
diff --git a/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/package.json b/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/package.json
deleted file mode 100644
index 263d67ff3bc5b..0000000000000
--- a/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json/package.json
+++ /dev/null
@@ -1,61 +0,0 @@
-{
-  "name": "@npmcli/package-json",
-  "version": "6.2.0",
-  "description": "Programmatic API to update package.json",
-  "keywords": [
-    "npm",
-    "oss"
-  ],
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/npm/package-json.git"
-  },
-  "license": "ISC",
-  "author": "GitHub Inc.",
-  "main": "lib/index.js",
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "scripts": {
-    "snap": "tap",
-    "test": "tap",
-    "lint": "npm run eslint",
-    "lintfix": "npm run eslint -- --fix",
-    "posttest": "npm run lint",
-    "postsnap": "npm run lintfix --",
-    "postlint": "template-oss-check",
-    "template-oss-apply": "template-oss-apply --force",
-    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
-  },
-  "dependencies": {
-    "@npmcli/git": "^6.0.0",
-    "glob": "^10.2.2",
-    "hosted-git-info": "^8.0.0",
-    "json-parse-even-better-errors": "^4.0.0",
-    "proc-log": "^5.0.0",
-    "semver": "^7.5.3",
-    "validate-npm-package-license": "^3.0.4"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^5.1.0",
-    "@npmcli/template-oss": "4.23.6",
-    "read-package-json": "^7.0.0",
-    "read-package-json-fast": "^4.0.0",
-    "tap": "^16.0.1"
-  },
-  "engines": {
-    "node": "^18.17.0 || >=20.5.0"
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.23.6",
-    "publish": "true"
-  },
-  "tap": {
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  }
-}
diff --git a/node_modules/@npmcli/run-script/package.json b/node_modules/@npmcli/run-script/package.json
index 6003a73943ecf..2873f7cbf91c5 100644
--- a/node_modules/@npmcli/run-script/package.json
+++ b/node_modules/@npmcli/run-script/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@npmcli/run-script",
-  "version": "9.1.0",
+  "version": "10.0.0",
   "description": "Run a lifecycle script for a package (descendant of npm-lifecycle)",
   "author": "GitHub Inc.",
   "license": "ISC",
@@ -16,13 +16,13 @@
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.24.1",
+    "@npmcli/template-oss": "4.25.0",
     "spawk": "^1.8.1",
     "tap": "^16.0.1"
   },
   "dependencies": {
     "@npmcli/node-gyp": "^4.0.0",
-    "@npmcli/package-json": "^6.0.0",
+    "@npmcli/package-json": "^7.0.0",
     "@npmcli/promise-spawn": "^8.0.0",
     "node-gyp": "^11.0.0",
     "proc-log": "^5.0.0",
@@ -38,11 +38,11 @@
     "url": "git+https://github.com/npm/run-script.git"
   },
   "engines": {
-    "node": "^18.17.0 || >=20.5.0"
+    "node": "^20.17.0 || >=22.9.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.24.1",
+    "version": "4.25.0",
     "publish": "true"
   },
   "tap": {
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/LICENSE b/node_modules/pacote/node_modules/@npmcli/run-script/LICENSE
deleted file mode 100644
index 19cec97b18468..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/run-script/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/is-server-package.js b/node_modules/pacote/node_modules/@npmcli/run-script/lib/is-server-package.js
deleted file mode 100644
index c36c40d4898d5..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/run-script/lib/is-server-package.js
+++ /dev/null
@@ -1,11 +0,0 @@
-const { stat } = require('node:fs/promises')
-const { resolve } = require('node:path')
-
-module.exports = async path => {
-  try {
-    const st = await stat(resolve(path, 'server.js'))
-    return st.isFile()
-  } catch (er) {
-    return false
-  }
-}
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/make-spawn-args.js b/node_modules/pacote/node_modules/@npmcli/run-script/lib/make-spawn-args.js
deleted file mode 100644
index 1c9f02c062f72..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/run-script/lib/make-spawn-args.js
+++ /dev/null
@@ -1,53 +0,0 @@
-/* eslint camelcase: "off" */
-const setPATH = require('./set-path.js')
-const { resolve } = require('path')
-
-let npm_config_node_gyp
-
-const makeSpawnArgs = options => {
-  const {
-    args,
-    binPaths,
-    cmd,
-    env,
-    event,
-    nodeGyp,
-    path,
-    scriptShell = true,
-    stdio,
-    stdioString,
-  } = options
-
-  if (nodeGyp) {
-    // npm already pulled this from env and passes it in to options
-    npm_config_node_gyp = nodeGyp
-  } else if (env.npm_config_node_gyp) {
-    // legacy mode for standalone user
-    npm_config_node_gyp = env.npm_config_node_gyp
-  } else {
-    // default
-    npm_config_node_gyp = require.resolve('node-gyp/bin/node-gyp.js')
-  }
-
-  const spawnEnv = setPATH(path, binPaths, {
-    // we need to at least save the PATH environment var
-    ...process.env,
-    ...env,
-    npm_package_json: resolve(path, 'package.json'),
-    npm_lifecycle_event: event,
-    npm_lifecycle_script: cmd,
-    npm_config_node_gyp,
-  })
-
-  const spawnOpts = {
-    env: spawnEnv,
-    stdioString,
-    stdio,
-    cwd: path,
-    shell: scriptShell,
-  }
-
-  return [cmd, args, spawnOpts]
-}
-
-module.exports = makeSpawnArgs
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp b/node_modules/pacote/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp
deleted file mode 100755
index 5bec64d961a3a..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp
+++ /dev/null
@@ -1,2 +0,0 @@
-#!/usr/bin/env sh
-node "$npm_config_node_gyp" "$@"
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp.cmd b/node_modules/pacote/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp.cmd
deleted file mode 100755
index 4c6987ac9868b..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/run-script/lib/node-gyp-bin/node-gyp.cmd
+++ /dev/null
@@ -1 +0,0 @@
-@node "%npm_config_node_gyp%" %*
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/package-envs.js b/node_modules/pacote/node_modules/@npmcli/run-script/lib/package-envs.js
deleted file mode 100644
index 612f850fb076c..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/run-script/lib/package-envs.js
+++ /dev/null
@@ -1,29 +0,0 @@
-const packageEnvs = (vals, prefix, env = {}) => {
-  for (const [key, val] of Object.entries(vals)) {
-    if (val === undefined) {
-      continue
-    } else if (val === null || val === false) {
-      env[`${prefix}${key}`] = ''
-    } else if (Array.isArray(val)) {
-      val.forEach((item, index) => {
-        packageEnvs({ [`${key}_${index}`]: item }, `${prefix}`, env)
-      })
-    } else if (typeof val === 'object') {
-      packageEnvs(val, `${prefix}${key}_`, env)
-    } else {
-      env[`${prefix}${key}`] = String(val)
-    }
-  }
-  return env
-}
-
-// https://github.com/npm/rfcs/pull/183 defines which fields we put into the environment
-module.exports = pkg => {
-  return packageEnvs({
-    name: pkg.name,
-    version: pkg.version,
-    config: pkg.config,
-    engines: pkg.engines,
-    bin: pkg.bin,
-  }, 'npm_package_')
-}
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/run-script-pkg.js b/node_modules/pacote/node_modules/@npmcli/run-script/lib/run-script-pkg.js
deleted file mode 100644
index 161caebb98d97..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/run-script/lib/run-script-pkg.js
+++ /dev/null
@@ -1,114 +0,0 @@
-const makeSpawnArgs = require('./make-spawn-args.js')
-const promiseSpawn = require('@npmcli/promise-spawn')
-const packageEnvs = require('./package-envs.js')
-const { isNodeGypPackage, defaultGypInstallScript } = require('@npmcli/node-gyp')
-const signalManager = require('./signal-manager.js')
-const isServerPackage = require('./is-server-package.js')
-
-const runScriptPkg = async options => {
-  const {
-    args = [],
-    binPaths = false,
-    env = {},
-    event,
-    nodeGyp,
-    path,
-    pkg,
-    scriptShell,
-    // how long to wait for a process.kill signal
-    // only exposed here so that we can make the test go a bit faster.
-    signalTimeout = 500,
-    stdio = 'pipe',
-    stdioString,
-  } = options
-
-  const { scripts = {}, gypfile } = pkg
-  let cmd = null
-  if (options.cmd) {
-    cmd = options.cmd
-  } else if (pkg.scripts && pkg.scripts[event]) {
-    cmd = pkg.scripts[event]
-  } else if (
-    // If there is no preinstall or install script, default to rebuilding node-gyp packages.
-    event === 'install' &&
-    !scripts.install &&
-    !scripts.preinstall &&
-    gypfile !== false &&
-    await isNodeGypPackage(path)
-  ) {
-    cmd = defaultGypInstallScript
-  } else if (event === 'start' && await isServerPackage(path)) {
-    cmd = 'node server.js'
-  }
-
-  if (!cmd) {
-    return { code: 0, signal: null }
-  }
-
-  let inputEnd = () => {}
-  if (stdio === 'inherit') {
-    let banner
-    if (pkg._id) {
-      banner = `\n> ${pkg._id} ${event}\n`
-    } else {
-      banner = `\n> ${event}\n`
-    }
-    banner += `> ${cmd.trim().replace(/\n/g, '\n> ')}`
-    if (args.length) {
-      banner += ` ${args.join(' ')}`
-    }
-    banner += '\n'
-    const { output, input } = require('proc-log')
-    output.standard(banner)
-    inputEnd = input.start()
-  }
-
-  const [spawnShell, spawnArgs, spawnOpts] = makeSpawnArgs({
-    args,
-    binPaths,
-    cmd,
-    env: { ...env, ...packageEnvs(pkg) },
-    event,
-    nodeGyp,
-    path,
-    scriptShell,
-    stdio,
-    stdioString,
-  })
-
-  const p = promiseSpawn(spawnShell, spawnArgs, spawnOpts, {
-    event,
-    script: cmd,
-    pkgid: pkg._id,
-    path,
-  })
-
-  if (stdio === 'inherit') {
-    signalManager.add(p.process)
-  }
-
-  if (p.stdin) {
-    p.stdin.end()
-  }
-
-  return p.catch(er => {
-    const { signal } = er
-    // coverage disabled because win32 never emits signals
-    /* istanbul ignore next */
-    if (stdio === 'inherit' && signal) {
-      // by the time we reach here, the child has already exited. we send the
-      // signal back to ourselves again so that npm will exit with the same
-      // status as the child
-      process.kill(process.pid, signal)
-
-      // just in case we don't die, reject after 500ms
-      // this also keeps the node process open long enough to actually
-      // get the signal, rather than terminating gracefully.
-      return new Promise((res, rej) => setTimeout(() => rej(er), signalTimeout))
-    } else {
-      throw er
-    }
-  }).finally(inputEnd)
-}
-
-module.exports = runScriptPkg
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/run-script.js b/node_modules/pacote/node_modules/@npmcli/run-script/lib/run-script.js
deleted file mode 100644
index b00304c8d6e7f..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/run-script/lib/run-script.js
+++ /dev/null
@@ -1,15 +0,0 @@
-const PackageJson = require('@npmcli/package-json')
-const runScriptPkg = require('./run-script-pkg.js')
-const validateOptions = require('./validate-options.js')
-const isServerPackage = require('./is-server-package.js')
-
-const runScript = async options => {
-  validateOptions(options)
-  if (options.pkg) {
-    return runScriptPkg(options)
-  }
-  const { content: pkg } = await PackageJson.normalize(options.path)
-  return runScriptPkg({ ...options, pkg })
-}
-
-module.exports = Object.assign(runScript, { isServerPackage })
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/set-path.js b/node_modules/pacote/node_modules/@npmcli/run-script/lib/set-path.js
deleted file mode 100644
index c59c270d9969a..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/run-script/lib/set-path.js
+++ /dev/null
@@ -1,45 +0,0 @@
-const { resolve, dirname, delimiter } = require('path')
-// the path here is relative, even though it does not need to be
-// in order to make the posix tests pass in windows
-const nodeGypPath = resolve(__dirname, '../lib/node-gyp-bin')
-
-// Windows typically calls its PATH environ 'Path', but this is not
-// guaranteed, nor is it guaranteed to be the only one.  Merge them
-// all together in the order they appear in the object.
-const setPATH = (projectPath, binPaths, env) => {
-  const PATH = Object.keys(env).filter(p => /^path$/i.test(p) && env[p])
-    .map(p => env[p].split(delimiter))
-    .reduce((set, p) => set.concat(p.filter(concatted => !set.includes(concatted))), [])
-    .join(delimiter)
-
-  const pathArr = []
-  if (binPaths) {
-    pathArr.push(...binPaths)
-  }
-  // unshift the ./node_modules/.bin from every folder
-  // walk up until dirname() does nothing, at the root
-  // XXX we should specify a cwd that we don't go above
-  let p = projectPath
-  let pp
-  do {
-    pathArr.push(resolve(p, 'node_modules', '.bin'))
-    pp = p
-    p = dirname(p)
-  } while (p !== pp)
-  pathArr.push(nodeGypPath, PATH)
-
-  const pathVal = pathArr.join(delimiter)
-
-  // XXX include the node-gyp-bin path somehow?  Probably better for
-  // npm or arborist or whoever to just provide that by putting it in
-  // the PATH environ, since that's preserved anyway.
-  for (const key of Object.keys(env)) {
-    if (/^path$/i.test(key)) {
-      env[key] = pathVal
-    }
-  }
-
-  return env
-}
-
-module.exports = setPATH
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/signal-manager.js b/node_modules/pacote/node_modules/@npmcli/run-script/lib/signal-manager.js
deleted file mode 100644
index a099a4af2b9be..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/run-script/lib/signal-manager.js
+++ /dev/null
@@ -1,50 +0,0 @@
-const runningProcs = new Set()
-let handlersInstalled = false
-
-const forwardedSignals = [
-  'SIGINT',
-  'SIGTERM',
-]
-
-// no-op, this is so receiving the signal doesn't cause us to exit immediately
-// instead, we exit after all children have exited when we re-send the signal
-// to ourselves. see the catch handler at the bottom of run-script-pkg.js
-const handleSignal = signal => {
-  for (const proc of runningProcs) {
-    proc.kill(signal)
-  }
-}
-
-const setupListeners = () => {
-  for (const signal of forwardedSignals) {
-    process.on(signal, handleSignal)
-  }
-  handlersInstalled = true
-}
-
-const cleanupListeners = () => {
-  if (runningProcs.size === 0) {
-    for (const signal of forwardedSignals) {
-      process.removeListener(signal, handleSignal)
-    }
-    handlersInstalled = false
-  }
-}
-
-const add = proc => {
-  runningProcs.add(proc)
-  if (!handlersInstalled) {
-    setupListeners()
-  }
-
-  proc.once('exit', () => {
-    runningProcs.delete(proc)
-    cleanupListeners()
-  })
-}
-
-module.exports = {
-  add,
-  handleSignal,
-  forwardedSignals,
-}
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/lib/validate-options.js b/node_modules/pacote/node_modules/@npmcli/run-script/lib/validate-options.js
deleted file mode 100644
index 8d855916ecd15..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/run-script/lib/validate-options.js
+++ /dev/null
@@ -1,39 +0,0 @@
-const validateOptions = options => {
-  if (typeof options !== 'object' || !options) {
-    throw new TypeError('invalid options object provided to runScript')
-  }
-
-  const {
-    event,
-    path,
-    scriptShell,
-    env = {},
-    stdio = 'pipe',
-    args = [],
-    cmd,
-  } = options
-
-  if (!event || typeof event !== 'string') {
-    throw new TypeError('valid event not provided to runScript')
-  }
-  if (!path || typeof path !== 'string') {
-    throw new TypeError('valid path not provided to runScript')
-  }
-  if (scriptShell !== undefined && typeof scriptShell !== 'string') {
-    throw new TypeError('invalid scriptShell option provided to runScript')
-  }
-  if (typeof env !== 'object' || !env) {
-    throw new TypeError('invalid env option provided to runScript')
-  }
-  if (typeof stdio !== 'string' && !Array.isArray(stdio)) {
-    throw new TypeError('invalid stdio option provided to runScript')
-  }
-  if (!Array.isArray(args) || args.some(a => typeof a !== 'string')) {
-    throw new TypeError('invalid args option provided to runScript')
-  }
-  if (cmd !== undefined && typeof cmd !== 'string') {
-    throw new TypeError('invalid cmd option provided to runScript')
-  }
-}
-
-module.exports = validateOptions
diff --git a/node_modules/pacote/node_modules/@npmcli/run-script/package.json b/node_modules/pacote/node_modules/@npmcli/run-script/package.json
deleted file mode 100644
index 2873f7cbf91c5..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/run-script/package.json
+++ /dev/null
@@ -1,54 +0,0 @@
-{
-  "name": "@npmcli/run-script",
-  "version": "10.0.0",
-  "description": "Run a lifecycle script for a package (descendant of npm-lifecycle)",
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "scripts": {
-    "test": "tap",
-    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
-    "lint": "npm run eslint",
-    "lintfix": "npm run eslint -- --fix",
-    "postlint": "template-oss-check",
-    "snap": "tap",
-    "posttest": "npm run lint",
-    "template-oss-apply": "template-oss-apply --force"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.25.0",
-    "spawk": "^1.8.1",
-    "tap": "^16.0.1"
-  },
-  "dependencies": {
-    "@npmcli/node-gyp": "^4.0.0",
-    "@npmcli/package-json": "^7.0.0",
-    "@npmcli/promise-spawn": "^8.0.0",
-    "node-gyp": "^11.0.0",
-    "proc-log": "^5.0.0",
-    "which": "^5.0.0"
-  },
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "main": "lib/run-script.js",
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/npm/run-script.git"
-  },
-  "engines": {
-    "node": "^20.17.0 || >=22.9.0"
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.25.0",
-    "publish": "true"
-  },
-  "tap": {
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  }
-}
diff --git a/package-lock.json b/package-lock.json
index f810694a88ff3..9b7ab8ca534b5 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -92,7 +92,7 @@
         "@npmcli/package-json": "^7.0.1",
         "@npmcli/promise-spawn": "^8.0.2",
         "@npmcli/redact": "^3.2.2",
-        "@npmcli/run-script": "^9.1.0",
+        "@npmcli/run-script": "^10.0.0",
         "@sigstore/tuf": "^3.1.1",
         "abbrev": "^3.0.1",
         "archy": "~1.0.0",
@@ -3423,7 +3423,6 @@
       "version": "6.0.3",
       "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-6.0.3.tgz",
       "integrity": "sha512-GUYESQlxZRAdhs3UhbB6pVRNUELQOHXwK9ruDkwmCv2aZ5y0SApQzUJCg02p3A7Ue2J5hxvlk1YI53c00NmRyQ==",
-      "inBundle": true,
       "license": "ISC",
       "dependencies": {
         "@npmcli/promise-spawn": "^8.0.0",
@@ -3802,40 +3801,21 @@
       }
     },
     "node_modules/@npmcli/run-script": {
-      "version": "9.1.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-9.1.0.tgz",
-      "integrity": "sha512-aoNSbxtkePXUlbZB+anS1LqsJdctG5n3UVhfU47+CDdwMi6uNTBMF9gPcQRnqghQd2FGzcwwIFBruFMxjhBewg==",
+      "version": "10.0.0",
+      "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-10.0.0.tgz",
+      "integrity": "sha512-vaQj4nccJbAslopIvd49pQH2NhUp7G9pY4byUtmwhe37ZZuubGrx0eB9hW2F37uVNRuDDK6byFGXF+7JCuMSZg==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
         "@npmcli/node-gyp": "^4.0.0",
-        "@npmcli/package-json": "^6.0.0",
+        "@npmcli/package-json": "^7.0.0",
         "@npmcli/promise-spawn": "^8.0.0",
         "node-gyp": "^11.0.0",
         "proc-log": "^5.0.0",
         "which": "^5.0.0"
       },
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
-    "node_modules/@npmcli/run-script/node_modules/@npmcli/package-json": {
-      "version": "6.2.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-6.2.0.tgz",
-      "integrity": "sha512-rCNLSB/JzNvot0SEyXqWZ7tX2B5dD2a1br2Dp0vSYVo5jh8Z0EZ7lS9TsZ1UtziddB1UfNUaMCc538/HztnJGA==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "@npmcli/git": "^6.0.0",
-        "glob": "^10.2.2",
-        "hosted-git-info": "^8.0.0",
-        "json-parse-even-better-errors": "^4.0.0",
-        "proc-log": "^5.0.0",
-        "semver": "^7.5.3",
-        "validate-npm-package-license": "^3.0.4"
-      },
-      "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^20.17.0 || >=22.9.0"
       }
     },
     "node_modules/@npmcli/smoke-tests": {
@@ -13453,24 +13433,6 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/pacote/node_modules/@npmcli/run-script": {
-      "version": "10.0.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-10.0.0.tgz",
-      "integrity": "sha512-vaQj4nccJbAslopIvd49pQH2NhUp7G9pY4byUtmwhe37ZZuubGrx0eB9hW2F37uVNRuDDK6byFGXF+7JCuMSZg==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "@npmcli/node-gyp": "^4.0.0",
-        "@npmcli/package-json": "^7.0.0",
-        "@npmcli/promise-spawn": "^8.0.0",
-        "node-gyp": "^11.0.0",
-        "proc-log": "^5.0.0",
-        "which": "^5.0.0"
-      },
-      "engines": {
-        "node": "^20.17.0 || >=22.9.0"
-      }
-    },
     "node_modules/pacote/node_modules/@sigstore/bundle": {
       "version": "4.0.0",
       "resolved": "https://registry.npmjs.org/@sigstore/bundle/-/bundle-4.0.0.tgz",
@@ -19663,7 +19625,7 @@
         "@npmcli/package-json": "^7.0.0",
         "@npmcli/query": "^4.0.0",
         "@npmcli/redact": "^3.0.0",
-        "@npmcli/run-script": "^9.0.1",
+        "@npmcli/run-script": "^10.0.0",
         "bin-links": "^5.0.0",
         "cacache": "^19.0.1",
         "common-ancestor-path": "^1.0.1",
@@ -19774,7 +19736,7 @@
       "dependencies": {
         "@npmcli/arborist": "^9.1.4",
         "@npmcli/package-json": "^7.0.0",
-        "@npmcli/run-script": "^9.0.1",
+        "@npmcli/run-script": "^10.0.0",
         "ci-info": "^4.0.0",
         "npm-package-arg": "^12.0.0",
         "pacote": "^21.0.2",
@@ -19837,7 +19799,7 @@
       "license": "ISC",
       "dependencies": {
         "@npmcli/arborist": "^9.1.4",
-        "@npmcli/run-script": "^9.0.1",
+        "@npmcli/run-script": "^10.0.0",
         "npm-package-arg": "^12.0.0",
         "pacote": "^21.0.2"
       },
@@ -19914,7 +19876,7 @@
       "license": "ISC",
       "dependencies": {
         "@npmcli/git": "^6.0.1",
-        "@npmcli/run-script": "^9.0.1",
+        "@npmcli/run-script": "^10.0.0",
         "json-parse-even-better-errors": "^4.0.0",
         "proc-log": "^5.0.0",
         "semver": "^7.3.7"
diff --git a/package.json b/package.json
index a0e3eb626d4c2..dc712e13a6022 100644
--- a/package.json
+++ b/package.json
@@ -59,7 +59,7 @@
     "@npmcli/package-json": "^7.0.1",
     "@npmcli/promise-spawn": "^8.0.2",
     "@npmcli/redact": "^3.2.2",
-    "@npmcli/run-script": "^9.1.0",
+    "@npmcli/run-script": "^10.0.0",
     "@sigstore/tuf": "^3.1.1",
     "abbrev": "^3.0.1",
     "archy": "~1.0.0",
diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json
index 940dad5cc7948..d59dc679f162f 100644
--- a/workspaces/arborist/package.json
+++ b/workspaces/arborist/package.json
@@ -13,7 +13,7 @@
     "@npmcli/package-json": "^7.0.0",
     "@npmcli/query": "^4.0.0",
     "@npmcli/redact": "^3.0.0",
-    "@npmcli/run-script": "^9.0.1",
+    "@npmcli/run-script": "^10.0.0",
     "bin-links": "^5.0.0",
     "cacache": "^19.0.1",
     "common-ancestor-path": "^1.0.1",
diff --git a/workspaces/libnpmexec/package.json b/workspaces/libnpmexec/package.json
index ca9e4f2c9c7aa..687b02f7dc126 100644
--- a/workspaces/libnpmexec/package.json
+++ b/workspaces/libnpmexec/package.json
@@ -62,7 +62,7 @@
   "dependencies": {
     "@npmcli/arborist": "^9.1.4",
     "@npmcli/package-json": "^7.0.0",
-    "@npmcli/run-script": "^9.0.1",
+    "@npmcli/run-script": "^10.0.0",
     "ci-info": "^4.0.0",
     "npm-package-arg": "^12.0.0",
     "pacote": "^21.0.2",
diff --git a/workspaces/libnpmpack/package.json b/workspaces/libnpmpack/package.json
index f2cfda0d76e79..5fd5f945f2a39 100644
--- a/workspaces/libnpmpack/package.json
+++ b/workspaces/libnpmpack/package.json
@@ -38,7 +38,7 @@
   "homepage": "https://npmjs.com/package/libnpmpack",
   "dependencies": {
     "@npmcli/arborist": "^9.1.4",
-    "@npmcli/run-script": "^9.0.1",
+    "@npmcli/run-script": "^10.0.0",
     "npm-package-arg": "^12.0.0",
     "pacote": "^21.0.2"
   },
diff --git a/workspaces/libnpmversion/package.json b/workspaces/libnpmversion/package.json
index 2ceebf979aafa..6d6c774570644 100644
--- a/workspaces/libnpmversion/package.json
+++ b/workspaces/libnpmversion/package.json
@@ -39,7 +39,7 @@
   },
   "dependencies": {
     "@npmcli/git": "^6.0.1",
-    "@npmcli/run-script": "^9.0.1",
+    "@npmcli/run-script": "^10.0.0",
     "json-parse-even-better-errors": "^4.0.0",
     "proc-log": "^5.0.0",
     "semver": "^7.3.7"

From da81a3702fdf7ea2dc7223fc6ece4c7a19e32ad1 Mon Sep 17 00:00:00 2001
From: Gar 
Date: Wed, 17 Sep 2025 10:28:44 -0700
Subject: [PATCH 10/63] deps: cacache@20.0.1

---
 node_modules/.gitignore                       |   32 +-
 .../node_modules/cacache/LICENSE.md           |    0
 .../node_modules/cacache/lib/content/path.js  |    0
 .../node_modules/cacache/lib/content/read.js  |    0
 .../node_modules/cacache/lib/content/rm.js    |    0
 .../node_modules/cacache/lib/content/write.js |    0
 .../node_modules/cacache/lib/entry-index.js   |    0
 .../node_modules/cacache/lib/get.js           |    0
 .../node_modules/cacache/lib/index.js         |    0
 .../node_modules/cacache/lib/memoization.js   |    0
 .../node_modules/cacache/lib/put.js           |    0
 .../node_modules/cacache/lib/rm.js            |    0
 .../node_modules/cacache/lib/util/glob.js     |    0
 .../cacache/lib/util/hash-to-segments.js      |    0
 .../node_modules/cacache/lib/util/tmp.js      |    0
 .../node_modules/cacache/lib/verify.js        |    0
 .../node_modules/cacache/package.json         |   13 +-
 .../node_modules/chownr/LICENSE.md            |    0
 .../chownr/dist/commonjs/index.js             |    0
 .../chownr/dist/commonjs/package.json         |    0
 .../node_modules/chownr/dist/esm/index.js     |    0
 .../node_modules/chownr/dist/esm/package.json |    0
 .../node_modules/chownr/package.json          |    0
 .../node_modules/minizlib/LICENSE             |    0
 .../minizlib/dist/commonjs/constants.js       |    0
 .../minizlib/dist/commonjs/index.js           |    0
 .../minizlib/dist/commonjs/package.json       |    0
 .../minizlib/dist/esm/constants.js            |    0
 .../node_modules/minizlib/dist/esm/index.js   |    0
 .../minizlib/dist/esm/package.json            |    0
 .../node_modules/minizlib/package.json        |    0
 .../node_modules/mkdirp/LICENSE               |    0
 .../node_modules/mkdirp/dist/cjs/package.json |    0
 .../node_modules/mkdirp/dist/cjs/src/bin.js   |    0
 .../mkdirp/dist/cjs/src/find-made.js          |    0
 .../node_modules/mkdirp/dist/cjs/src/index.js |    0
 .../mkdirp/dist/cjs/src/mkdirp-manual.js      |    0
 .../mkdirp/dist/cjs/src/mkdirp-native.js      |    0
 .../mkdirp/dist/cjs/src/opts-arg.js           |    0
 .../mkdirp/dist/cjs/src/path-arg.js           |    0
 .../mkdirp/dist/cjs/src/use-native.js         |    0
 .../node_modules/mkdirp/dist/mjs/find-made.js |    0
 .../node_modules/mkdirp/dist/mjs/index.js     |    0
 .../mkdirp/dist/mjs/mkdirp-manual.js          |    0
 .../mkdirp/dist/mjs/mkdirp-native.js          |    0
 .../node_modules/mkdirp/dist/mjs/opts-arg.js  |    0
 .../node_modules/mkdirp/dist/mjs/package.json |    0
 .../node_modules/mkdirp/dist/mjs/path-arg.js  |    0
 .../mkdirp/dist/mjs/use-native.js             |    0
 .../node_modules/mkdirp/package.json          |    0
 .../node_modules/tar/LICENSE                  |    0
 .../node_modules/tar/dist/commonjs/create.js  |    0
 .../tar/dist/commonjs/cwd-error.js            |    0
 .../node_modules/tar/dist/commonjs/extract.js |    0
 .../tar/dist/commonjs/get-write-flag.js       |    0
 .../node_modules/tar/dist/commonjs/header.js  |    0
 .../node_modules/tar/dist/commonjs/index.js   |    0
 .../tar/dist/commonjs/large-numbers.js        |    0
 .../node_modules/tar/dist/commonjs/list.js    |    0
 .../tar/dist/commonjs/make-command.js         |    0
 .../node_modules/tar/dist/commonjs/mkdir.js   |    0
 .../tar/dist/commonjs/mode-fix.js             |    0
 .../tar/dist/commonjs/normalize-unicode.js    |    0
 .../dist/commonjs/normalize-windows-path.js   |    0
 .../node_modules/tar/dist/commonjs/options.js |    0
 .../node_modules/tar/dist/commonjs/pack.js    |    0
 .../tar/dist/commonjs/package.json            |    0
 .../node_modules/tar/dist/commonjs/parse.js   |    0
 .../tar/dist/commonjs/path-reservations.js    |    0
 .../node_modules/tar/dist/commonjs/pax.js     |    0
 .../tar/dist/commonjs/read-entry.js           |    0
 .../node_modules/tar/dist/commonjs/replace.js |    0
 .../tar/dist/commonjs/strip-absolute-path.js  |    0
 .../dist/commonjs/strip-trailing-slashes.js   |    0
 .../tar/dist/commonjs/symlink-error.js        |    0
 .../node_modules/tar/dist/commonjs/types.js   |    0
 .../node_modules/tar/dist/commonjs/unpack.js  |    0
 .../node_modules/tar/dist/commonjs/update.js  |    0
 .../tar/dist/commonjs/warn-method.js          |    0
 .../tar/dist/commonjs/winchars.js             |    0
 .../tar/dist/commonjs/write-entry.js          |    0
 .../node_modules/tar/dist/esm/create.js       |    0
 .../node_modules/tar/dist/esm/cwd-error.js    |    0
 .../node_modules/tar/dist/esm/extract.js      |    0
 .../tar/dist/esm/get-write-flag.js            |    0
 .../node_modules/tar/dist/esm/header.js       |    0
 .../node_modules/tar/dist/esm/index.js        |    0
 .../tar/dist/esm/large-numbers.js             |    0
 .../node_modules/tar/dist/esm/list.js         |    0
 .../node_modules/tar/dist/esm/make-command.js |    0
 .../node_modules/tar/dist/esm/mkdir.js        |    0
 .../node_modules/tar/dist/esm/mode-fix.js     |    0
 .../tar/dist/esm/normalize-unicode.js         |    0
 .../tar/dist/esm/normalize-windows-path.js    |    0
 .../node_modules/tar/dist/esm/options.js      |    0
 .../node_modules/tar/dist/esm/pack.js         |    0
 .../node_modules/tar/dist/esm/package.json    |    0
 .../node_modules/tar/dist/esm/parse.js        |    0
 .../tar/dist/esm/path-reservations.js         |    0
 .../node_modules/tar/dist/esm/pax.js          |    0
 .../node_modules/tar/dist/esm/read-entry.js   |    0
 .../node_modules/tar/dist/esm/replace.js      |    0
 .../tar/dist/esm/strip-absolute-path.js       |    0
 .../tar/dist/esm/strip-trailing-slashes.js    |    0
 .../tar/dist/esm/symlink-error.js             |    0
 .../node_modules/tar/dist/esm/types.js        |    0
 .../node_modules/tar/dist/esm/unpack.js       |    0
 .../node_modules/tar/dist/esm/update.js       |    0
 .../node_modules/tar/dist/esm/warn-method.js  |    0
 .../node_modules/tar/dist/esm/winchars.js     |    0
 .../node_modules/tar/dist/esm/write-entry.js  |    0
 .../node_modules/tar/package.json             |    0
 .../node_modules/yallist/LICENSE.md           |    0
 .../yallist/dist/commonjs/index.js            |    0
 .../yallist/dist/commonjs/package.json        |    0
 .../node_modules/yallist/dist/esm/index.js    |    0
 .../yallist/dist/esm/package.json             |    0
 .../node_modules/yallist/package.json         |    0
 .../node_modules/glob/LICENSE                 |    0
 .../node_modules/glob/dist/commonjs/glob.js   |    0
 .../glob/dist/commonjs/has-magic.js           |    0
 .../node_modules/glob/dist/commonjs/ignore.js |    0
 .../node_modules/glob/dist/commonjs/index.js  |    0
 .../glob}/dist/commonjs/package.json          |    0
 .../glob/dist/commonjs/pattern.js             |    0
 .../glob/dist/commonjs/processor.js           |    0
 .../node_modules/glob/dist/commonjs/walker.js |    0
 .../node_modules/glob/dist/esm/bin.d.mts      |    0
 .../node_modules/glob/dist/esm/bin.mjs        |    0
 .../node_modules/glob/dist/esm/glob.js        |    0
 .../node_modules/glob/dist/esm/has-magic.js   |    0
 .../node_modules/glob/dist/esm/ignore.js      |    0
 .../node_modules/glob/dist/esm/index.js       |    0
 .../node_modules/glob}/dist/esm/package.json  |    0
 .../node_modules/glob/dist/esm/pattern.js     |    0
 .../node_modules/glob/dist/esm/processor.js   |    0
 .../node_modules/glob/dist/esm/walker.js      |    0
 .../node_modules/glob/package.json            |    0
 .../node_modules/jackspeak/LICENSE.md         |    0
 .../jackspeak/dist/commonjs/index.js          |    0
 .../jackspeak}/dist/commonjs/package.json     |    0
 .../node_modules/jackspeak/dist/esm/index.js  |    0
 .../jackspeak}/dist/esm/package.json          |    0
 .../node_modules/jackspeak/package.json       |    0
 .../node_modules/lru-cache}/LICENSE           |    2 +-
 .../lru-cache/dist/commonjs/index.js          | 1564 +++++++++++++++++
 .../lru-cache/dist/commonjs/index.min.js      |    2 +
 .../lru-cache}/dist/commonjs/package.json     |    0
 .../node_modules/lru-cache/dist/esm/index.js  | 1560 ++++++++++++++++
 .../lru-cache/dist/esm/index.min.js           |    2 +
 .../lru-cache}/dist/esm/package.json          |    0
 .../node_modules/lru-cache/package.json       |  113 ++
 .../node_modules/minimatch/LICENSE            |    0
 .../dist/commonjs/assert-valid-pattern.js     |    0
 .../minimatch/dist/commonjs/ast.js            |    0
 .../dist/commonjs/brace-expressions.js        |    0
 .../minimatch/dist/commonjs/escape.js         |    0
 .../minimatch/dist/commonjs/index.js          |    0
 .../minimatch/dist/commonjs/package.json      |    0
 .../minimatch/dist/commonjs/unescape.js       |    0
 .../dist/esm/assert-valid-pattern.js          |    0
 .../node_modules/minimatch/dist/esm/ast.js    |    0
 .../minimatch/dist/esm/brace-expressions.js   |    0
 .../node_modules/minimatch/dist/esm/escape.js |    0
 .../node_modules/minimatch/dist/esm/index.js  |    0
 .../minimatch/dist/esm/package.json           |    0
 .../minimatch/dist/esm/unescape.js            |    0
 .../node_modules/minimatch/package.json       |    0
 .../node_modules/path-scurry/LICENSE.md       |    0
 .../path-scurry/dist/commonjs/index.js        |    0
 .../path-scurry/dist/commonjs/package.json    |    0
 .../path-scurry/dist/esm/index.js             |    0
 .../path-scurry/dist/esm/package.json         |    0
 .../node_modules/path-scurry/package.json     |    0
 node_modules/cacache/package.json             |   13 +-
 .../node_modules/cacache/LICENSE.md           |   16 +
 .../node_modules/cacache/lib/content/path.js  |   29 +
 .../node_modules/cacache/lib/content/read.js  |  165 ++
 .../node_modules/cacache/lib/content/rm.js    |   18 +
 .../node_modules/cacache/lib/content/write.js |  206 +++
 .../node_modules/cacache/lib/entry-index.js   |  336 ++++
 .../node_modules/cacache/lib/get.js           |  170 ++
 .../node_modules/cacache/lib/index.js         |   42 +
 .../node_modules/cacache/lib/memoization.js   |   72 +
 .../node_modules/cacache/lib/put.js           |   80 +
 .../node_modules/cacache/lib/rm.js            |   31 +
 .../node_modules/cacache/lib/util/glob.js     |    7 +
 .../cacache/lib/util/hash-to-segments.js      |    7 +
 .../node_modules/cacache/lib/util/tmp.js      |   26 +
 .../node_modules/cacache/lib/verify.js        |  258 +++
 .../node_modules/cacache/package.json         |   83 +
 .../node_modules/chownr/LICENSE.md            |   63 +
 .../chownr/dist/commonjs/index.js             |   93 +
 .../chownr/dist/commonjs/package.json         |    3 +
 .../node_modules/chownr/dist/esm/index.js     |   85 +
 .../node_modules/chownr/dist/esm/package.json |    3 +
 .../node_modules/chownr/package.json          |   69 +
 .../node_modules/minizlib/LICENSE             |   26 +
 .../minizlib/dist/commonjs/constants.js       |  123 ++
 .../minizlib/dist/commonjs/index.js           |  392 +++++
 .../minizlib/dist/commonjs/package.json       |    3 +
 .../minizlib/dist/esm/constants.js            |  117 ++
 .../node_modules/minizlib/dist/esm/index.js   |  340 ++++
 .../minizlib/dist/esm/package.json            |    3 +
 .../node_modules/minizlib}/package.json       |  106 +-
 .../node_modules/mkdirp/LICENSE               |   21 +
 .../node_modules/mkdirp/dist/cjs/package.json |   91 +
 .../node_modules/mkdirp/dist/cjs/src/bin.js   |   80 +
 .../mkdirp/dist/cjs/src/find-made.js          |   35 +
 .../node_modules/mkdirp/dist/cjs/src/index.js |   53 +
 .../mkdirp/dist/cjs/src/mkdirp-manual.js      |   79 +
 .../mkdirp/dist/cjs/src/mkdirp-native.js      |   50 +
 .../mkdirp/dist/cjs/src/opts-arg.js           |   38 +
 .../mkdirp/dist/cjs/src/path-arg.js           |   28 +
 .../mkdirp/dist/cjs/src/use-native.js         |   17 +
 .../node_modules/mkdirp/dist/mjs/find-made.js |   30 +
 .../node_modules/mkdirp/dist/mjs/index.js     |   43 +
 .../mkdirp/dist/mjs/mkdirp-manual.js          |   75 +
 .../mkdirp/dist/mjs/mkdirp-native.js          |   46 +
 .../node_modules/mkdirp/dist/mjs/opts-arg.js  |   34 +
 .../node_modules/mkdirp/dist/mjs/package.json |    3 +
 .../node_modules/mkdirp/dist/mjs/path-arg.js  |   24 +
 .../mkdirp/dist/mjs/use-native.js             |   14 +
 .../node_modules/mkdirp/package.json          |   91 +
 .../node_modules/tar/LICENSE                  |   15 +
 .../node_modules/tar/dist/commonjs/create.js  |   83 +
 .../tar/dist/commonjs/cwd-error.js            |   18 +
 .../node_modules/tar/dist/commonjs/extract.js |   78 +
 .../tar/dist/commonjs/get-write-flag.js       |   29 +
 .../node_modules/tar/dist/commonjs/header.js  |  306 ++++
 .../node_modules/tar/dist/commonjs/index.js   |   54 +
 .../tar/dist/commonjs/large-numbers.js        |   99 ++
 .../node_modules/tar/dist/commonjs/list.js    |  136 ++
 .../tar/dist/commonjs/make-command.js         |   61 +
 .../node_modules/tar/dist/commonjs/mkdir.js   |  209 +++
 .../tar/dist/commonjs/mode-fix.js             |   29 +
 .../tar/dist/commonjs/normalize-unicode.js    |   17 +
 .../dist/commonjs/normalize-windows-path.js   |   12 +
 .../node_modules/tar/dist/commonjs/options.js |   66 +
 .../node_modules/tar/dist/commonjs/pack.js    |  477 +++++
 .../tar/dist/commonjs/package.json            |    3 +
 .../node_modules/tar/dist/commonjs/parse.js   |  599 +++++++
 .../tar/dist/commonjs/path-reservations.js    |  170 ++
 .../node_modules/tar/dist/commonjs/pax.js     |  158 ++
 .../tar/dist/commonjs/read-entry.js           |  140 ++
 .../node_modules/tar/dist/commonjs/replace.js |  231 +++
 .../tar/dist/commonjs/strip-absolute-path.js  |   29 +
 .../dist/commonjs/strip-trailing-slashes.js   |   18 +
 .../tar/dist/commonjs/symlink-error.js        |   19 +
 .../node_modules/tar/dist/commonjs/types.js   |   50 +
 .../node_modules/tar/dist/commonjs/unpack.js  |  919 ++++++++++
 .../node_modules/tar/dist/commonjs/update.js  |   33 +
 .../tar/dist/commonjs/warn-method.js          |   31 +
 .../tar/dist/commonjs/winchars.js             |   14 +
 .../tar/dist/commonjs/write-entry.js          |  689 ++++++++
 .../node_modules/tar/dist/esm/create.js       |   77 +
 .../node_modules/tar/dist/esm/cwd-error.js    |   14 +
 .../node_modules/tar/dist/esm/extract.js      |   49 +
 .../tar/dist/esm/get-write-flag.js            |   23 +
 .../node_modules/tar/dist/esm/header.js       |  279 +++
 .../node_modules/tar/dist/esm/index.js        |   20 +
 .../tar/dist/esm/large-numbers.js             |   94 +
 .../node_modules/tar/dist/esm/list.js         |  106 ++
 .../node_modules/tar/dist/esm/make-command.js |   57 +
 .../node_modules/tar/dist/esm/mkdir.js        |  201 +++
 .../node_modules/tar/dist/esm/mode-fix.js     |   25 +
 .../tar/dist/esm/normalize-unicode.js         |   13 +
 .../tar/dist/esm/normalize-windows-path.js    |    9 +
 .../node_modules/tar/dist/esm/options.js      |   54 +
 .../node_modules/tar/dist/esm/pack.js         |  445 +++++
 .../node_modules/tar/dist/esm/package.json    |    3 +
 .../node_modules/tar/dist/esm/parse.js        |  595 +++++++
 .../tar/dist/esm/path-reservations.js         |  166 ++
 .../node_modules/tar/dist/esm/pax.js          |  154 ++
 .../node_modules/tar/dist/esm/read-entry.js   |  136 ++
 .../node_modules/tar/dist/esm/replace.js      |  225 +++
 .../tar/dist/esm/strip-absolute-path.js       |   25 +
 .../tar/dist/esm/strip-trailing-slashes.js    |   14 +
 .../tar/dist/esm/symlink-error.js             |   15 +
 .../node_modules/tar/dist/esm/types.js        |   45 +
 .../node_modules/tar/dist/esm/unpack.js       |  888 ++++++++++
 .../node_modules/tar/dist/esm/update.js       |   30 +
 .../node_modules/tar/dist/esm/warn-method.js  |   27 +
 .../node_modules/tar/dist/esm/winchars.js     |    9 +
 .../node_modules/tar/dist/esm/write-entry.js  |  657 +++++++
 .../node_modules/tar/package.json             |  325 ++++
 .../node_modules/yallist/LICENSE.md           |   63 +
 .../yallist/dist/commonjs/index.js            |  384 ++++
 .../yallist/dist/commonjs/package.json        |    3 +
 .../node_modules/yallist/dist/esm/index.js    |  379 ++++
 .../yallist/dist/esm/package.json             |    3 +
 .../node_modules/yallist/package.json         |   68 +
 .../minimatch/dist/commonjs/index.js          | 1017 -----------
 .../node_modules/minimatch/dist/esm/index.js  | 1001 -----------
 .../dist/commonjs/assert-valid-pattern.js     |   14 -
 .../minimatch/dist/commonjs/ast.js            |  592 -------
 .../dist/commonjs/brace-expressions.js        |  152 --
 .../minimatch/dist/commonjs/escape.js         |   22 -
 .../minimatch/dist/commonjs/unescape.js       |   24 -
 .../dist/esm/assert-valid-pattern.js          |   10 -
 .../node_modules/minimatch/dist/esm/ast.js    |  588 -------
 .../minimatch/dist/esm/brace-expressions.js   |  148 --
 .../node_modules/minimatch/dist/esm/escape.js |   18 -
 .../minimatch/dist/esm/unescape.js            |   20 -
 package-lock.json                             |  403 +++--
 package.json                                  |    2 +-
 workspaces/arborist/package.json              |    2 +-
 307 files changed, 17759 insertions(+), 3851 deletions(-)
 rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/cacache/LICENSE.md (100%)
 rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/cacache/lib/content/path.js (100%)
 rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/cacache/lib/content/read.js (100%)
 rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/cacache/lib/content/rm.js (100%)
 rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/cacache/lib/content/write.js (100%)
 rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/cacache/lib/entry-index.js (100%)
 rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/cacache/lib/get.js (100%)
 rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/cacache/lib/index.js (100%)
 rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/cacache/lib/memoization.js (100%)
 rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/cacache/lib/put.js (100%)
 rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/cacache/lib/rm.js (100%)
 rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/cacache/lib/util/glob.js (100%)
 rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/cacache/lib/util/hash-to-segments.js (100%)
 rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/cacache/lib/util/tmp.js (100%)
 rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/cacache/lib/verify.js (100%)
 rename node_modules/{pacote => @npmcli/metavuln-calculator}/node_modules/cacache/package.json (90%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/chownr/LICENSE.md (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/chownr/dist/commonjs/index.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/chownr/dist/commonjs/package.json (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/chownr/dist/esm/index.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/chownr/dist/esm/package.json (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/chownr/package.json (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/minizlib/LICENSE (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/minizlib/dist/commonjs/constants.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/minizlib/dist/commonjs/index.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/minizlib/dist/commonjs/package.json (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/minizlib/dist/esm/constants.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/minizlib/dist/esm/index.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/minizlib/dist/esm/package.json (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/minizlib/package.json (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/mkdirp/LICENSE (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/mkdirp/dist/cjs/package.json (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/mkdirp/dist/cjs/src/bin.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/mkdirp/dist/cjs/src/find-made.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/mkdirp/dist/cjs/src/index.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/mkdirp/dist/cjs/src/opts-arg.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/mkdirp/dist/cjs/src/path-arg.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/mkdirp/dist/cjs/src/use-native.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/mkdirp/dist/mjs/find-made.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/mkdirp/dist/mjs/index.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/mkdirp/dist/mjs/mkdirp-manual.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/mkdirp/dist/mjs/mkdirp-native.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/mkdirp/dist/mjs/opts-arg.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/mkdirp/dist/mjs/package.json (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/mkdirp/dist/mjs/path-arg.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/mkdirp/dist/mjs/use-native.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/mkdirp/package.json (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/LICENSE (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/commonjs/create.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/commonjs/cwd-error.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/commonjs/extract.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/commonjs/get-write-flag.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/commonjs/header.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/commonjs/index.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/commonjs/large-numbers.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/commonjs/list.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/commonjs/make-command.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/commonjs/mkdir.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/commonjs/mode-fix.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/commonjs/normalize-unicode.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/commonjs/normalize-windows-path.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/commonjs/options.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/commonjs/pack.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/commonjs/package.json (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/commonjs/parse.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/commonjs/path-reservations.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/commonjs/pax.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/commonjs/read-entry.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/commonjs/replace.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/commonjs/strip-absolute-path.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/commonjs/strip-trailing-slashes.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/commonjs/symlink-error.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/commonjs/types.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/commonjs/unpack.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/commonjs/update.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/commonjs/warn-method.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/commonjs/winchars.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/commonjs/write-entry.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/esm/create.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/esm/cwd-error.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/esm/extract.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/esm/get-write-flag.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/esm/header.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/esm/index.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/esm/large-numbers.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/esm/list.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/esm/make-command.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/esm/mkdir.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/esm/mode-fix.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/esm/normalize-unicode.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/esm/normalize-windows-path.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/esm/options.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/esm/pack.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/esm/package.json (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/esm/parse.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/esm/path-reservations.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/esm/pax.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/esm/read-entry.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/esm/replace.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/esm/strip-absolute-path.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/esm/strip-trailing-slashes.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/esm/symlink-error.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/esm/types.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/esm/unpack.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/esm/update.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/esm/warn-method.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/esm/winchars.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/dist/esm/write-entry.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/tar/package.json (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/yallist/LICENSE.md (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/yallist/dist/commonjs/index.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/yallist/dist/commonjs/package.json (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/yallist/dist/esm/index.js (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/yallist/dist/esm/package.json (100%)
 rename node_modules/{cacache => @npmcli/metavuln-calculator}/node_modules/yallist/package.json (100%)
 rename node_modules/{pacote => cacache}/node_modules/glob/LICENSE (100%)
 rename node_modules/{pacote => cacache}/node_modules/glob/dist/commonjs/glob.js (100%)
 rename node_modules/{pacote => cacache}/node_modules/glob/dist/commonjs/has-magic.js (100%)
 rename node_modules/{pacote => cacache}/node_modules/glob/dist/commonjs/ignore.js (100%)
 rename node_modules/{pacote => cacache}/node_modules/glob/dist/commonjs/index.js (100%)
 rename node_modules/{pacote/node_modules/@tufjs/models/node_modules/minimatch => cacache/node_modules/glob}/dist/commonjs/package.json (100%)
 rename node_modules/{pacote => cacache}/node_modules/glob/dist/commonjs/pattern.js (100%)
 rename node_modules/{pacote => cacache}/node_modules/glob/dist/commonjs/processor.js (100%)
 rename node_modules/{pacote => cacache}/node_modules/glob/dist/commonjs/walker.js (100%)
 rename node_modules/{pacote => cacache}/node_modules/glob/dist/esm/bin.d.mts (100%)
 rename node_modules/{pacote => cacache}/node_modules/glob/dist/esm/bin.mjs (100%)
 rename node_modules/{pacote => cacache}/node_modules/glob/dist/esm/glob.js (100%)
 rename node_modules/{pacote => cacache}/node_modules/glob/dist/esm/has-magic.js (100%)
 rename node_modules/{pacote => cacache}/node_modules/glob/dist/esm/ignore.js (100%)
 rename node_modules/{pacote => cacache}/node_modules/glob/dist/esm/index.js (100%)
 rename node_modules/{pacote/node_modules/@tufjs/models/node_modules/minimatch => cacache/node_modules/glob}/dist/esm/package.json (100%)
 rename node_modules/{pacote => cacache}/node_modules/glob/dist/esm/pattern.js (100%)
 rename node_modules/{pacote => cacache}/node_modules/glob/dist/esm/processor.js (100%)
 rename node_modules/{pacote => cacache}/node_modules/glob/dist/esm/walker.js (100%)
 rename node_modules/{pacote => cacache}/node_modules/glob/package.json (100%)
 rename node_modules/{pacote => cacache}/node_modules/jackspeak/LICENSE.md (100%)
 rename node_modules/{pacote => cacache}/node_modules/jackspeak/dist/commonjs/index.js (100%)
 rename node_modules/{pacote/node_modules/glob => cacache/node_modules/jackspeak}/dist/commonjs/package.json (100%)
 rename node_modules/{pacote => cacache}/node_modules/jackspeak/dist/esm/index.js (100%)
 rename node_modules/{pacote/node_modules/glob => cacache/node_modules/jackspeak}/dist/esm/package.json (100%)
 rename node_modules/{pacote => cacache}/node_modules/jackspeak/package.json (100%)
 rename node_modules/{pacote/node_modules/minimatch => cacache/node_modules/lru-cache}/LICENSE (92%)
 create mode 100644 node_modules/cacache/node_modules/lru-cache/dist/commonjs/index.js
 create mode 100644 node_modules/cacache/node_modules/lru-cache/dist/commonjs/index.min.js
 rename node_modules/{pacote/node_modules/jackspeak => cacache/node_modules/lru-cache}/dist/commonjs/package.json (100%)
 create mode 100644 node_modules/cacache/node_modules/lru-cache/dist/esm/index.js
 create mode 100644 node_modules/cacache/node_modules/lru-cache/dist/esm/index.min.js
 rename node_modules/{pacote/node_modules/jackspeak => cacache/node_modules/lru-cache}/dist/esm/package.json (100%)
 create mode 100644 node_modules/cacache/node_modules/lru-cache/package.json
 rename node_modules/{pacote/node_modules/@tufjs/models => cacache}/node_modules/minimatch/LICENSE (100%)
 rename node_modules/{pacote/node_modules/@tufjs/models => cacache}/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js (100%)
 rename node_modules/{pacote/node_modules/@tufjs/models => cacache}/node_modules/minimatch/dist/commonjs/ast.js (100%)
 rename node_modules/{pacote/node_modules/@tufjs/models => cacache}/node_modules/minimatch/dist/commonjs/brace-expressions.js (100%)
 rename node_modules/{pacote/node_modules/@tufjs/models => cacache}/node_modules/minimatch/dist/commonjs/escape.js (100%)
 rename node_modules/{pacote => cacache}/node_modules/minimatch/dist/commonjs/index.js (100%)
 rename node_modules/{pacote => cacache}/node_modules/minimatch/dist/commonjs/package.json (100%)
 rename node_modules/{pacote/node_modules/@tufjs/models => cacache}/node_modules/minimatch/dist/commonjs/unescape.js (100%)
 rename node_modules/{pacote/node_modules/@tufjs/models => cacache}/node_modules/minimatch/dist/esm/assert-valid-pattern.js (100%)
 rename node_modules/{pacote/node_modules/@tufjs/models => cacache}/node_modules/minimatch/dist/esm/ast.js (100%)
 rename node_modules/{pacote/node_modules/@tufjs/models => cacache}/node_modules/minimatch/dist/esm/brace-expressions.js (100%)
 rename node_modules/{pacote/node_modules/@tufjs/models => cacache}/node_modules/minimatch/dist/esm/escape.js (100%)
 rename node_modules/{pacote => cacache}/node_modules/minimatch/dist/esm/index.js (100%)
 rename node_modules/{pacote => cacache}/node_modules/minimatch/dist/esm/package.json (100%)
 rename node_modules/{pacote/node_modules/@tufjs/models => cacache}/node_modules/minimatch/dist/esm/unescape.js (100%)
 rename node_modules/{pacote => cacache}/node_modules/minimatch/package.json (100%)
 rename node_modules/{pacote => cacache}/node_modules/path-scurry/LICENSE.md (100%)
 rename node_modules/{pacote => cacache}/node_modules/path-scurry/dist/commonjs/index.js (100%)
 rename node_modules/{pacote => cacache}/node_modules/path-scurry/dist/commonjs/package.json (100%)
 rename node_modules/{pacote => cacache}/node_modules/path-scurry/dist/esm/index.js (100%)
 rename node_modules/{pacote => cacache}/node_modules/path-scurry/dist/esm/package.json (100%)
 rename node_modules/{pacote => cacache}/node_modules/path-scurry/package.json (100%)
 create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/LICENSE.md
 create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/content/path.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/content/read.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/content/rm.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/content/write.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/entry-index.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/get.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/index.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/memoization.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/put.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/rm.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/util/glob.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/util/hash-to-segments.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/util/tmp.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/lib/verify.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/cacache/package.json
 create mode 100644 node_modules/make-fetch-happen/node_modules/chownr/LICENSE.md
 create mode 100644 node_modules/make-fetch-happen/node_modules/chownr/dist/commonjs/index.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/chownr/dist/commonjs/package.json
 create mode 100644 node_modules/make-fetch-happen/node_modules/chownr/dist/esm/index.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/chownr/dist/esm/package.json
 create mode 100644 node_modules/make-fetch-happen/node_modules/chownr/package.json
 create mode 100644 node_modules/make-fetch-happen/node_modules/minizlib/LICENSE
 create mode 100644 node_modules/make-fetch-happen/node_modules/minizlib/dist/commonjs/constants.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/minizlib/dist/commonjs/index.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/minizlib/dist/commonjs/package.json
 create mode 100644 node_modules/make-fetch-happen/node_modules/minizlib/dist/esm/constants.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/minizlib/dist/esm/index.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/minizlib/dist/esm/package.json
 rename node_modules/{pacote/node_modules/@tufjs/models/node_modules/minimatch => make-fetch-happen/node_modules/minizlib}/package.json (56%)
 create mode 100644 node_modules/make-fetch-happen/node_modules/mkdirp/LICENSE
 create mode 100644 node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/package.json
 create mode 100755 node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/bin.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/find-made.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/index.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/opts-arg.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/path-arg.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/use-native.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/find-made.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/index.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/mkdirp-manual.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/mkdirp-native.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/opts-arg.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/package.json
 create mode 100644 node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/path-arg.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/use-native.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/mkdirp/package.json
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/LICENSE
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/create.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/cwd-error.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/extract.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/get-write-flag.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/header.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/index.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/large-numbers.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/list.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/make-command.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/mkdir.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/mode-fix.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/normalize-unicode.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/normalize-windows-path.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/options.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/pack.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/package.json
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/parse.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/path-reservations.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/pax.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/read-entry.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/replace.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/strip-absolute-path.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/strip-trailing-slashes.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/symlink-error.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/types.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/unpack.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/update.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/warn-method.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/winchars.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/write-entry.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/create.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/cwd-error.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/extract.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/get-write-flag.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/header.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/index.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/large-numbers.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/list.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/make-command.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/mkdir.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/mode-fix.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/normalize-unicode.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/normalize-windows-path.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/options.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/pack.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/package.json
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/parse.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/path-reservations.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/pax.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/read-entry.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/replace.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/strip-absolute-path.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/strip-trailing-slashes.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/symlink-error.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/types.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/unpack.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/update.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/warn-method.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/winchars.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/write-entry.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/tar/package.json
 create mode 100644 node_modules/make-fetch-happen/node_modules/yallist/LICENSE.md
 create mode 100644 node_modules/make-fetch-happen/node_modules/yallist/dist/commonjs/index.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/yallist/dist/commonjs/package.json
 create mode 100644 node_modules/make-fetch-happen/node_modules/yallist/dist/esm/index.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/yallist/dist/esm/package.json
 create mode 100644 node_modules/make-fetch-happen/node_modules/yallist/package.json
 delete mode 100644 node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/index.js
 delete mode 100644 node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/index.js
 delete mode 100644 node_modules/pacote/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
 delete mode 100644 node_modules/pacote/node_modules/minimatch/dist/commonjs/ast.js
 delete mode 100644 node_modules/pacote/node_modules/minimatch/dist/commonjs/brace-expressions.js
 delete mode 100644 node_modules/pacote/node_modules/minimatch/dist/commonjs/escape.js
 delete mode 100644 node_modules/pacote/node_modules/minimatch/dist/commonjs/unescape.js
 delete mode 100644 node_modules/pacote/node_modules/minimatch/dist/esm/assert-valid-pattern.js
 delete mode 100644 node_modules/pacote/node_modules/minimatch/dist/esm/ast.js
 delete mode 100644 node_modules/pacote/node_modules/minimatch/dist/esm/brace-expressions.js
 delete mode 100644 node_modules/pacote/node_modules/minimatch/dist/esm/escape.js
 delete mode 100644 node_modules/pacote/node_modules/minimatch/dist/esm/unescape.js

diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 03122be7ec29b..7fedfe7f3b4bc 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -31,6 +31,14 @@
 !/@npmcli/map-workspaces/node_modules/minimatch
 !/@npmcli/map-workspaces/node_modules/path-scurry
 !/@npmcli/metavuln-calculator
+!/@npmcli/metavuln-calculator/node_modules/
+/@npmcli/metavuln-calculator/node_modules/*
+!/@npmcli/metavuln-calculator/node_modules/cacache
+!/@npmcli/metavuln-calculator/node_modules/chownr
+!/@npmcli/metavuln-calculator/node_modules/minizlib
+!/@npmcli/metavuln-calculator/node_modules/mkdirp
+!/@npmcli/metavuln-calculator/node_modules/tar
+!/@npmcli/metavuln-calculator/node_modules/yallist
 !/@npmcli/name-from-folder
 !/@npmcli/node-gyp
 !/@npmcli/package-json
@@ -79,11 +87,11 @@
 !/cacache
 !/cacache/node_modules/
 /cacache/node_modules/*
-!/cacache/node_modules/chownr
-!/cacache/node_modules/minizlib
-!/cacache/node_modules/mkdirp
-!/cacache/node_modules/tar
-!/cacache/node_modules/yallist
+!/cacache/node_modules/glob
+!/cacache/node_modules/jackspeak
+!/cacache/node_modules/lru-cache
+!/cacache/node_modules/minimatch
+!/cacache/node_modules/path-scurry
 !/chalk
 !/chownr
 !/ci-info
@@ -140,7 +148,13 @@
 !/make-fetch-happen
 !/make-fetch-happen/node_modules/
 /make-fetch-happen/node_modules/*
+!/make-fetch-happen/node_modules/cacache
+!/make-fetch-happen/node_modules/chownr
+!/make-fetch-happen/node_modules/minizlib
+!/make-fetch-happen/node_modules/mkdirp
 !/make-fetch-happen/node_modules/negotiator
+!/make-fetch-happen/node_modules/tar
+!/make-fetch-happen/node_modules/yallist
 !/minimatch
 !/minipass-collect
 !/minipass-fetch
@@ -214,24 +228,16 @@
 !/pacote/node_modules/@tufjs/
 /pacote/node_modules/@tufjs/*
 !/pacote/node_modules/@tufjs/models
-!/pacote/node_modules/@tufjs/models/node_modules/
-/pacote/node_modules/@tufjs/models/node_modules/*
-!/pacote/node_modules/@tufjs/models/node_modules/minimatch
-!/pacote/node_modules/cacache
 !/pacote/node_modules/chownr
-!/pacote/node_modules/glob
 !/pacote/node_modules/hosted-git-info
-!/pacote/node_modules/jackspeak
 !/pacote/node_modules/lru-cache
 !/pacote/node_modules/make-fetch-happen
-!/pacote/node_modules/minimatch
 !/pacote/node_modules/minizlib
 !/pacote/node_modules/mkdirp
 !/pacote/node_modules/negotiator
 !/pacote/node_modules/npm-package-arg
 !/pacote/node_modules/npm-pick-manifest
 !/pacote/node_modules/npm-registry-fetch
-!/pacote/node_modules/path-scurry
 !/pacote/node_modules/sigstore
 !/pacote/node_modules/tar
 !/pacote/node_modules/tuf-js
diff --git a/node_modules/pacote/node_modules/cacache/LICENSE.md b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/LICENSE.md
similarity index 100%
rename from node_modules/pacote/node_modules/cacache/LICENSE.md
rename to node_modules/@npmcli/metavuln-calculator/node_modules/cacache/LICENSE.md
diff --git a/node_modules/pacote/node_modules/cacache/lib/content/path.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/path.js
similarity index 100%
rename from node_modules/pacote/node_modules/cacache/lib/content/path.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/path.js
diff --git a/node_modules/pacote/node_modules/cacache/lib/content/read.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/read.js
similarity index 100%
rename from node_modules/pacote/node_modules/cacache/lib/content/read.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/read.js
diff --git a/node_modules/pacote/node_modules/cacache/lib/content/rm.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/rm.js
similarity index 100%
rename from node_modules/pacote/node_modules/cacache/lib/content/rm.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/rm.js
diff --git a/node_modules/pacote/node_modules/cacache/lib/content/write.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/write.js
similarity index 100%
rename from node_modules/pacote/node_modules/cacache/lib/content/write.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/write.js
diff --git a/node_modules/pacote/node_modules/cacache/lib/entry-index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/entry-index.js
similarity index 100%
rename from node_modules/pacote/node_modules/cacache/lib/entry-index.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/entry-index.js
diff --git a/node_modules/pacote/node_modules/cacache/lib/get.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/get.js
similarity index 100%
rename from node_modules/pacote/node_modules/cacache/lib/get.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/get.js
diff --git a/node_modules/pacote/node_modules/cacache/lib/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/index.js
similarity index 100%
rename from node_modules/pacote/node_modules/cacache/lib/index.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/index.js
diff --git a/node_modules/pacote/node_modules/cacache/lib/memoization.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/memoization.js
similarity index 100%
rename from node_modules/pacote/node_modules/cacache/lib/memoization.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/memoization.js
diff --git a/node_modules/pacote/node_modules/cacache/lib/put.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/put.js
similarity index 100%
rename from node_modules/pacote/node_modules/cacache/lib/put.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/put.js
diff --git a/node_modules/pacote/node_modules/cacache/lib/rm.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/rm.js
similarity index 100%
rename from node_modules/pacote/node_modules/cacache/lib/rm.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/rm.js
diff --git a/node_modules/pacote/node_modules/cacache/lib/util/glob.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/glob.js
similarity index 100%
rename from node_modules/pacote/node_modules/cacache/lib/util/glob.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/glob.js
diff --git a/node_modules/pacote/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/hash-to-segments.js
similarity index 100%
rename from node_modules/pacote/node_modules/cacache/lib/util/hash-to-segments.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/hash-to-segments.js
diff --git a/node_modules/pacote/node_modules/cacache/lib/util/tmp.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/tmp.js
similarity index 100%
rename from node_modules/pacote/node_modules/cacache/lib/util/tmp.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/tmp.js
diff --git a/node_modules/pacote/node_modules/cacache/lib/verify.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/verify.js
similarity index 100%
rename from node_modules/pacote/node_modules/cacache/lib/verify.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/verify.js
diff --git a/node_modules/pacote/node_modules/cacache/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/package.json
similarity index 90%
rename from node_modules/pacote/node_modules/cacache/package.json
rename to node_modules/@npmcli/metavuln-calculator/node_modules/cacache/package.json
index 6eec0a8375e5c..ebb0f3f8ed410 100644
--- a/node_modules/pacote/node_modules/cacache/package.json
+++ b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/package.json
@@ -1,6 +1,6 @@
 {
   "name": "cacache",
-  "version": "20.0.1",
+  "version": "19.0.1",
   "cache-version": {
     "content": "2",
     "index": "5"
@@ -48,28 +48,29 @@
   "dependencies": {
     "@npmcli/fs": "^4.0.0",
     "fs-minipass": "^3.0.0",
-    "glob": "^11.0.3",
-    "lru-cache": "^11.1.0",
+    "glob": "^10.2.2",
+    "lru-cache": "^10.0.1",
     "minipass": "^7.0.3",
     "minipass-collect": "^2.0.1",
     "minipass-flush": "^1.0.5",
     "minipass-pipeline": "^1.2.4",
     "p-map": "^7.0.2",
     "ssri": "^12.0.0",
+    "tar": "^7.4.3",
     "unique-filename": "^4.0.0"
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.25.0",
+    "@npmcli/template-oss": "4.23.3",
     "tap": "^16.0.0"
   },
   "engines": {
-    "node": "^20.17.0 || >=22.9.0"
+    "node": "^18.17.0 || >=20.5.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
     "windowsCI": false,
-    "version": "4.25.0",
+    "version": "4.23.3",
     "publish": "true"
   },
   "author": "GitHub Inc.",
diff --git a/node_modules/cacache/node_modules/chownr/LICENSE.md b/node_modules/@npmcli/metavuln-calculator/node_modules/chownr/LICENSE.md
similarity index 100%
rename from node_modules/cacache/node_modules/chownr/LICENSE.md
rename to node_modules/@npmcli/metavuln-calculator/node_modules/chownr/LICENSE.md
diff --git a/node_modules/cacache/node_modules/chownr/dist/commonjs/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/chownr/dist/commonjs/index.js
similarity index 100%
rename from node_modules/cacache/node_modules/chownr/dist/commonjs/index.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/chownr/dist/commonjs/index.js
diff --git a/node_modules/cacache/node_modules/chownr/dist/commonjs/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/chownr/dist/commonjs/package.json
similarity index 100%
rename from node_modules/cacache/node_modules/chownr/dist/commonjs/package.json
rename to node_modules/@npmcli/metavuln-calculator/node_modules/chownr/dist/commonjs/package.json
diff --git a/node_modules/cacache/node_modules/chownr/dist/esm/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/chownr/dist/esm/index.js
similarity index 100%
rename from node_modules/cacache/node_modules/chownr/dist/esm/index.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/chownr/dist/esm/index.js
diff --git a/node_modules/cacache/node_modules/chownr/dist/esm/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/chownr/dist/esm/package.json
similarity index 100%
rename from node_modules/cacache/node_modules/chownr/dist/esm/package.json
rename to node_modules/@npmcli/metavuln-calculator/node_modules/chownr/dist/esm/package.json
diff --git a/node_modules/cacache/node_modules/chownr/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/chownr/package.json
similarity index 100%
rename from node_modules/cacache/node_modules/chownr/package.json
rename to node_modules/@npmcli/metavuln-calculator/node_modules/chownr/package.json
diff --git a/node_modules/cacache/node_modules/minizlib/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/LICENSE
similarity index 100%
rename from node_modules/cacache/node_modules/minizlib/LICENSE
rename to node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/LICENSE
diff --git a/node_modules/cacache/node_modules/minizlib/dist/commonjs/constants.js b/node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/dist/commonjs/constants.js
similarity index 100%
rename from node_modules/cacache/node_modules/minizlib/dist/commonjs/constants.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/dist/commonjs/constants.js
diff --git a/node_modules/cacache/node_modules/minizlib/dist/commonjs/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/dist/commonjs/index.js
similarity index 100%
rename from node_modules/cacache/node_modules/minizlib/dist/commonjs/index.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/dist/commonjs/index.js
diff --git a/node_modules/cacache/node_modules/minizlib/dist/commonjs/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/dist/commonjs/package.json
similarity index 100%
rename from node_modules/cacache/node_modules/minizlib/dist/commonjs/package.json
rename to node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/dist/commonjs/package.json
diff --git a/node_modules/cacache/node_modules/minizlib/dist/esm/constants.js b/node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/dist/esm/constants.js
similarity index 100%
rename from node_modules/cacache/node_modules/minizlib/dist/esm/constants.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/dist/esm/constants.js
diff --git a/node_modules/cacache/node_modules/minizlib/dist/esm/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/dist/esm/index.js
similarity index 100%
rename from node_modules/cacache/node_modules/minizlib/dist/esm/index.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/dist/esm/index.js
diff --git a/node_modules/cacache/node_modules/minizlib/dist/esm/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/dist/esm/package.json
similarity index 100%
rename from node_modules/cacache/node_modules/minizlib/dist/esm/package.json
rename to node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/dist/esm/package.json
diff --git a/node_modules/cacache/node_modules/minizlib/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/package.json
similarity index 100%
rename from node_modules/cacache/node_modules/minizlib/package.json
rename to node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/package.json
diff --git a/node_modules/cacache/node_modules/mkdirp/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/LICENSE
similarity index 100%
rename from node_modules/cacache/node_modules/mkdirp/LICENSE
rename to node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/LICENSE
diff --git a/node_modules/cacache/node_modules/mkdirp/dist/cjs/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/package.json
similarity index 100%
rename from node_modules/cacache/node_modules/mkdirp/dist/cjs/package.json
rename to node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/package.json
diff --git a/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/bin.js b/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/bin.js
similarity index 100%
rename from node_modules/cacache/node_modules/mkdirp/dist/cjs/src/bin.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/bin.js
diff --git a/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/find-made.js b/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/find-made.js
similarity index 100%
rename from node_modules/cacache/node_modules/mkdirp/dist/cjs/src/find-made.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/find-made.js
diff --git a/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/index.js
similarity index 100%
rename from node_modules/cacache/node_modules/mkdirp/dist/cjs/src/index.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/index.js
diff --git a/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js b/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js
similarity index 100%
rename from node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js
diff --git a/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js b/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js
similarity index 100%
rename from node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js
diff --git a/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/opts-arg.js b/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/opts-arg.js
similarity index 100%
rename from node_modules/cacache/node_modules/mkdirp/dist/cjs/src/opts-arg.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/opts-arg.js
diff --git a/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/path-arg.js b/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/path-arg.js
similarity index 100%
rename from node_modules/cacache/node_modules/mkdirp/dist/cjs/src/path-arg.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/path-arg.js
diff --git a/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/use-native.js b/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/use-native.js
similarity index 100%
rename from node_modules/cacache/node_modules/mkdirp/dist/cjs/src/use-native.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/use-native.js
diff --git a/node_modules/cacache/node_modules/mkdirp/dist/mjs/find-made.js b/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/find-made.js
similarity index 100%
rename from node_modules/cacache/node_modules/mkdirp/dist/mjs/find-made.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/find-made.js
diff --git a/node_modules/cacache/node_modules/mkdirp/dist/mjs/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/index.js
similarity index 100%
rename from node_modules/cacache/node_modules/mkdirp/dist/mjs/index.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/index.js
diff --git a/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-manual.js b/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/mkdirp-manual.js
similarity index 100%
rename from node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-manual.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/mkdirp-manual.js
diff --git a/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-native.js b/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/mkdirp-native.js
similarity index 100%
rename from node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-native.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/mkdirp-native.js
diff --git a/node_modules/cacache/node_modules/mkdirp/dist/mjs/opts-arg.js b/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/opts-arg.js
similarity index 100%
rename from node_modules/cacache/node_modules/mkdirp/dist/mjs/opts-arg.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/opts-arg.js
diff --git a/node_modules/cacache/node_modules/mkdirp/dist/mjs/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/package.json
similarity index 100%
rename from node_modules/cacache/node_modules/mkdirp/dist/mjs/package.json
rename to node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/package.json
diff --git a/node_modules/cacache/node_modules/mkdirp/dist/mjs/path-arg.js b/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/path-arg.js
similarity index 100%
rename from node_modules/cacache/node_modules/mkdirp/dist/mjs/path-arg.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/path-arg.js
diff --git a/node_modules/cacache/node_modules/mkdirp/dist/mjs/use-native.js b/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/use-native.js
similarity index 100%
rename from node_modules/cacache/node_modules/mkdirp/dist/mjs/use-native.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/use-native.js
diff --git a/node_modules/cacache/node_modules/mkdirp/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/package.json
similarity index 100%
rename from node_modules/cacache/node_modules/mkdirp/package.json
rename to node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/package.json
diff --git a/node_modules/cacache/node_modules/tar/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/LICENSE
similarity index 100%
rename from node_modules/cacache/node_modules/tar/LICENSE
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/LICENSE
diff --git a/node_modules/cacache/node_modules/tar/dist/commonjs/create.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/create.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/commonjs/create.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/create.js
diff --git a/node_modules/cacache/node_modules/tar/dist/commonjs/cwd-error.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/cwd-error.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/commonjs/cwd-error.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/cwd-error.js
diff --git a/node_modules/cacache/node_modules/tar/dist/commonjs/extract.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/extract.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/commonjs/extract.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/extract.js
diff --git a/node_modules/cacache/node_modules/tar/dist/commonjs/get-write-flag.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/get-write-flag.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/commonjs/get-write-flag.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/get-write-flag.js
diff --git a/node_modules/cacache/node_modules/tar/dist/commonjs/header.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/header.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/commonjs/header.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/header.js
diff --git a/node_modules/cacache/node_modules/tar/dist/commonjs/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/index.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/commonjs/index.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/index.js
diff --git a/node_modules/cacache/node_modules/tar/dist/commonjs/large-numbers.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/large-numbers.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/commonjs/large-numbers.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/large-numbers.js
diff --git a/node_modules/cacache/node_modules/tar/dist/commonjs/list.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/list.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/commonjs/list.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/list.js
diff --git a/node_modules/cacache/node_modules/tar/dist/commonjs/make-command.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/make-command.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/commonjs/make-command.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/make-command.js
diff --git a/node_modules/cacache/node_modules/tar/dist/commonjs/mkdir.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/mkdir.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/commonjs/mkdir.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/mkdir.js
diff --git a/node_modules/cacache/node_modules/tar/dist/commonjs/mode-fix.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/mode-fix.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/commonjs/mode-fix.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/mode-fix.js
diff --git a/node_modules/cacache/node_modules/tar/dist/commonjs/normalize-unicode.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/normalize-unicode.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/commonjs/normalize-unicode.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/normalize-unicode.js
diff --git a/node_modules/cacache/node_modules/tar/dist/commonjs/normalize-windows-path.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/normalize-windows-path.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/commonjs/normalize-windows-path.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/normalize-windows-path.js
diff --git a/node_modules/cacache/node_modules/tar/dist/commonjs/options.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/options.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/commonjs/options.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/options.js
diff --git a/node_modules/cacache/node_modules/tar/dist/commonjs/pack.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/pack.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/commonjs/pack.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/pack.js
diff --git a/node_modules/cacache/node_modules/tar/dist/commonjs/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/package.json
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/commonjs/package.json
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/package.json
diff --git a/node_modules/cacache/node_modules/tar/dist/commonjs/parse.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/parse.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/commonjs/parse.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/parse.js
diff --git a/node_modules/cacache/node_modules/tar/dist/commonjs/path-reservations.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/path-reservations.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/commonjs/path-reservations.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/path-reservations.js
diff --git a/node_modules/cacache/node_modules/tar/dist/commonjs/pax.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/pax.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/commonjs/pax.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/pax.js
diff --git a/node_modules/cacache/node_modules/tar/dist/commonjs/read-entry.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/read-entry.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/commonjs/read-entry.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/read-entry.js
diff --git a/node_modules/cacache/node_modules/tar/dist/commonjs/replace.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/replace.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/commonjs/replace.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/replace.js
diff --git a/node_modules/cacache/node_modules/tar/dist/commonjs/strip-absolute-path.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/strip-absolute-path.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/commonjs/strip-absolute-path.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/strip-absolute-path.js
diff --git a/node_modules/cacache/node_modules/tar/dist/commonjs/strip-trailing-slashes.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/strip-trailing-slashes.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/commonjs/strip-trailing-slashes.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/strip-trailing-slashes.js
diff --git a/node_modules/cacache/node_modules/tar/dist/commonjs/symlink-error.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/symlink-error.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/commonjs/symlink-error.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/symlink-error.js
diff --git a/node_modules/cacache/node_modules/tar/dist/commonjs/types.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/types.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/commonjs/types.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/types.js
diff --git a/node_modules/cacache/node_modules/tar/dist/commonjs/unpack.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/unpack.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/commonjs/unpack.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/unpack.js
diff --git a/node_modules/cacache/node_modules/tar/dist/commonjs/update.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/update.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/commonjs/update.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/update.js
diff --git a/node_modules/cacache/node_modules/tar/dist/commonjs/warn-method.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/warn-method.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/commonjs/warn-method.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/warn-method.js
diff --git a/node_modules/cacache/node_modules/tar/dist/commonjs/winchars.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/winchars.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/commonjs/winchars.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/winchars.js
diff --git a/node_modules/cacache/node_modules/tar/dist/commonjs/write-entry.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/write-entry.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/commonjs/write-entry.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/write-entry.js
diff --git a/node_modules/cacache/node_modules/tar/dist/esm/create.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/create.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/esm/create.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/create.js
diff --git a/node_modules/cacache/node_modules/tar/dist/esm/cwd-error.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/cwd-error.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/esm/cwd-error.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/cwd-error.js
diff --git a/node_modules/cacache/node_modules/tar/dist/esm/extract.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/extract.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/esm/extract.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/extract.js
diff --git a/node_modules/cacache/node_modules/tar/dist/esm/get-write-flag.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/get-write-flag.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/esm/get-write-flag.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/get-write-flag.js
diff --git a/node_modules/cacache/node_modules/tar/dist/esm/header.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/header.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/esm/header.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/header.js
diff --git a/node_modules/cacache/node_modules/tar/dist/esm/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/index.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/esm/index.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/index.js
diff --git a/node_modules/cacache/node_modules/tar/dist/esm/large-numbers.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/large-numbers.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/esm/large-numbers.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/large-numbers.js
diff --git a/node_modules/cacache/node_modules/tar/dist/esm/list.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/list.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/esm/list.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/list.js
diff --git a/node_modules/cacache/node_modules/tar/dist/esm/make-command.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/make-command.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/esm/make-command.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/make-command.js
diff --git a/node_modules/cacache/node_modules/tar/dist/esm/mkdir.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/mkdir.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/esm/mkdir.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/mkdir.js
diff --git a/node_modules/cacache/node_modules/tar/dist/esm/mode-fix.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/mode-fix.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/esm/mode-fix.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/mode-fix.js
diff --git a/node_modules/cacache/node_modules/tar/dist/esm/normalize-unicode.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/normalize-unicode.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/esm/normalize-unicode.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/normalize-unicode.js
diff --git a/node_modules/cacache/node_modules/tar/dist/esm/normalize-windows-path.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/normalize-windows-path.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/esm/normalize-windows-path.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/normalize-windows-path.js
diff --git a/node_modules/cacache/node_modules/tar/dist/esm/options.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/options.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/esm/options.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/options.js
diff --git a/node_modules/cacache/node_modules/tar/dist/esm/pack.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/pack.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/esm/pack.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/pack.js
diff --git a/node_modules/cacache/node_modules/tar/dist/esm/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/package.json
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/esm/package.json
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/package.json
diff --git a/node_modules/cacache/node_modules/tar/dist/esm/parse.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/parse.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/esm/parse.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/parse.js
diff --git a/node_modules/cacache/node_modules/tar/dist/esm/path-reservations.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/path-reservations.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/esm/path-reservations.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/path-reservations.js
diff --git a/node_modules/cacache/node_modules/tar/dist/esm/pax.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/pax.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/esm/pax.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/pax.js
diff --git a/node_modules/cacache/node_modules/tar/dist/esm/read-entry.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/read-entry.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/esm/read-entry.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/read-entry.js
diff --git a/node_modules/cacache/node_modules/tar/dist/esm/replace.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/replace.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/esm/replace.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/replace.js
diff --git a/node_modules/cacache/node_modules/tar/dist/esm/strip-absolute-path.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/strip-absolute-path.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/esm/strip-absolute-path.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/strip-absolute-path.js
diff --git a/node_modules/cacache/node_modules/tar/dist/esm/strip-trailing-slashes.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/strip-trailing-slashes.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/esm/strip-trailing-slashes.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/strip-trailing-slashes.js
diff --git a/node_modules/cacache/node_modules/tar/dist/esm/symlink-error.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/symlink-error.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/esm/symlink-error.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/symlink-error.js
diff --git a/node_modules/cacache/node_modules/tar/dist/esm/types.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/types.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/esm/types.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/types.js
diff --git a/node_modules/cacache/node_modules/tar/dist/esm/unpack.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/unpack.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/esm/unpack.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/unpack.js
diff --git a/node_modules/cacache/node_modules/tar/dist/esm/update.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/update.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/esm/update.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/update.js
diff --git a/node_modules/cacache/node_modules/tar/dist/esm/warn-method.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/warn-method.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/esm/warn-method.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/warn-method.js
diff --git a/node_modules/cacache/node_modules/tar/dist/esm/winchars.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/winchars.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/esm/winchars.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/winchars.js
diff --git a/node_modules/cacache/node_modules/tar/dist/esm/write-entry.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/write-entry.js
similarity index 100%
rename from node_modules/cacache/node_modules/tar/dist/esm/write-entry.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/write-entry.js
diff --git a/node_modules/cacache/node_modules/tar/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/package.json
similarity index 100%
rename from node_modules/cacache/node_modules/tar/package.json
rename to node_modules/@npmcli/metavuln-calculator/node_modules/tar/package.json
diff --git a/node_modules/cacache/node_modules/yallist/LICENSE.md b/node_modules/@npmcli/metavuln-calculator/node_modules/yallist/LICENSE.md
similarity index 100%
rename from node_modules/cacache/node_modules/yallist/LICENSE.md
rename to node_modules/@npmcli/metavuln-calculator/node_modules/yallist/LICENSE.md
diff --git a/node_modules/cacache/node_modules/yallist/dist/commonjs/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/yallist/dist/commonjs/index.js
similarity index 100%
rename from node_modules/cacache/node_modules/yallist/dist/commonjs/index.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/yallist/dist/commonjs/index.js
diff --git a/node_modules/cacache/node_modules/yallist/dist/commonjs/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/yallist/dist/commonjs/package.json
similarity index 100%
rename from node_modules/cacache/node_modules/yallist/dist/commonjs/package.json
rename to node_modules/@npmcli/metavuln-calculator/node_modules/yallist/dist/commonjs/package.json
diff --git a/node_modules/cacache/node_modules/yallist/dist/esm/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/yallist/dist/esm/index.js
similarity index 100%
rename from node_modules/cacache/node_modules/yallist/dist/esm/index.js
rename to node_modules/@npmcli/metavuln-calculator/node_modules/yallist/dist/esm/index.js
diff --git a/node_modules/cacache/node_modules/yallist/dist/esm/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/yallist/dist/esm/package.json
similarity index 100%
rename from node_modules/cacache/node_modules/yallist/dist/esm/package.json
rename to node_modules/@npmcli/metavuln-calculator/node_modules/yallist/dist/esm/package.json
diff --git a/node_modules/cacache/node_modules/yallist/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/yallist/package.json
similarity index 100%
rename from node_modules/cacache/node_modules/yallist/package.json
rename to node_modules/@npmcli/metavuln-calculator/node_modules/yallist/package.json
diff --git a/node_modules/pacote/node_modules/glob/LICENSE b/node_modules/cacache/node_modules/glob/LICENSE
similarity index 100%
rename from node_modules/pacote/node_modules/glob/LICENSE
rename to node_modules/cacache/node_modules/glob/LICENSE
diff --git a/node_modules/pacote/node_modules/glob/dist/commonjs/glob.js b/node_modules/cacache/node_modules/glob/dist/commonjs/glob.js
similarity index 100%
rename from node_modules/pacote/node_modules/glob/dist/commonjs/glob.js
rename to node_modules/cacache/node_modules/glob/dist/commonjs/glob.js
diff --git a/node_modules/pacote/node_modules/glob/dist/commonjs/has-magic.js b/node_modules/cacache/node_modules/glob/dist/commonjs/has-magic.js
similarity index 100%
rename from node_modules/pacote/node_modules/glob/dist/commonjs/has-magic.js
rename to node_modules/cacache/node_modules/glob/dist/commonjs/has-magic.js
diff --git a/node_modules/pacote/node_modules/glob/dist/commonjs/ignore.js b/node_modules/cacache/node_modules/glob/dist/commonjs/ignore.js
similarity index 100%
rename from node_modules/pacote/node_modules/glob/dist/commonjs/ignore.js
rename to node_modules/cacache/node_modules/glob/dist/commonjs/ignore.js
diff --git a/node_modules/pacote/node_modules/glob/dist/commonjs/index.js b/node_modules/cacache/node_modules/glob/dist/commonjs/index.js
similarity index 100%
rename from node_modules/pacote/node_modules/glob/dist/commonjs/index.js
rename to node_modules/cacache/node_modules/glob/dist/commonjs/index.js
diff --git a/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/package.json b/node_modules/cacache/node_modules/glob/dist/commonjs/package.json
similarity index 100%
rename from node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/package.json
rename to node_modules/cacache/node_modules/glob/dist/commonjs/package.json
diff --git a/node_modules/pacote/node_modules/glob/dist/commonjs/pattern.js b/node_modules/cacache/node_modules/glob/dist/commonjs/pattern.js
similarity index 100%
rename from node_modules/pacote/node_modules/glob/dist/commonjs/pattern.js
rename to node_modules/cacache/node_modules/glob/dist/commonjs/pattern.js
diff --git a/node_modules/pacote/node_modules/glob/dist/commonjs/processor.js b/node_modules/cacache/node_modules/glob/dist/commonjs/processor.js
similarity index 100%
rename from node_modules/pacote/node_modules/glob/dist/commonjs/processor.js
rename to node_modules/cacache/node_modules/glob/dist/commonjs/processor.js
diff --git a/node_modules/pacote/node_modules/glob/dist/commonjs/walker.js b/node_modules/cacache/node_modules/glob/dist/commonjs/walker.js
similarity index 100%
rename from node_modules/pacote/node_modules/glob/dist/commonjs/walker.js
rename to node_modules/cacache/node_modules/glob/dist/commonjs/walker.js
diff --git a/node_modules/pacote/node_modules/glob/dist/esm/bin.d.mts b/node_modules/cacache/node_modules/glob/dist/esm/bin.d.mts
similarity index 100%
rename from node_modules/pacote/node_modules/glob/dist/esm/bin.d.mts
rename to node_modules/cacache/node_modules/glob/dist/esm/bin.d.mts
diff --git a/node_modules/pacote/node_modules/glob/dist/esm/bin.mjs b/node_modules/cacache/node_modules/glob/dist/esm/bin.mjs
similarity index 100%
rename from node_modules/pacote/node_modules/glob/dist/esm/bin.mjs
rename to node_modules/cacache/node_modules/glob/dist/esm/bin.mjs
diff --git a/node_modules/pacote/node_modules/glob/dist/esm/glob.js b/node_modules/cacache/node_modules/glob/dist/esm/glob.js
similarity index 100%
rename from node_modules/pacote/node_modules/glob/dist/esm/glob.js
rename to node_modules/cacache/node_modules/glob/dist/esm/glob.js
diff --git a/node_modules/pacote/node_modules/glob/dist/esm/has-magic.js b/node_modules/cacache/node_modules/glob/dist/esm/has-magic.js
similarity index 100%
rename from node_modules/pacote/node_modules/glob/dist/esm/has-magic.js
rename to node_modules/cacache/node_modules/glob/dist/esm/has-magic.js
diff --git a/node_modules/pacote/node_modules/glob/dist/esm/ignore.js b/node_modules/cacache/node_modules/glob/dist/esm/ignore.js
similarity index 100%
rename from node_modules/pacote/node_modules/glob/dist/esm/ignore.js
rename to node_modules/cacache/node_modules/glob/dist/esm/ignore.js
diff --git a/node_modules/pacote/node_modules/glob/dist/esm/index.js b/node_modules/cacache/node_modules/glob/dist/esm/index.js
similarity index 100%
rename from node_modules/pacote/node_modules/glob/dist/esm/index.js
rename to node_modules/cacache/node_modules/glob/dist/esm/index.js
diff --git a/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/package.json b/node_modules/cacache/node_modules/glob/dist/esm/package.json
similarity index 100%
rename from node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/package.json
rename to node_modules/cacache/node_modules/glob/dist/esm/package.json
diff --git a/node_modules/pacote/node_modules/glob/dist/esm/pattern.js b/node_modules/cacache/node_modules/glob/dist/esm/pattern.js
similarity index 100%
rename from node_modules/pacote/node_modules/glob/dist/esm/pattern.js
rename to node_modules/cacache/node_modules/glob/dist/esm/pattern.js
diff --git a/node_modules/pacote/node_modules/glob/dist/esm/processor.js b/node_modules/cacache/node_modules/glob/dist/esm/processor.js
similarity index 100%
rename from node_modules/pacote/node_modules/glob/dist/esm/processor.js
rename to node_modules/cacache/node_modules/glob/dist/esm/processor.js
diff --git a/node_modules/pacote/node_modules/glob/dist/esm/walker.js b/node_modules/cacache/node_modules/glob/dist/esm/walker.js
similarity index 100%
rename from node_modules/pacote/node_modules/glob/dist/esm/walker.js
rename to node_modules/cacache/node_modules/glob/dist/esm/walker.js
diff --git a/node_modules/pacote/node_modules/glob/package.json b/node_modules/cacache/node_modules/glob/package.json
similarity index 100%
rename from node_modules/pacote/node_modules/glob/package.json
rename to node_modules/cacache/node_modules/glob/package.json
diff --git a/node_modules/pacote/node_modules/jackspeak/LICENSE.md b/node_modules/cacache/node_modules/jackspeak/LICENSE.md
similarity index 100%
rename from node_modules/pacote/node_modules/jackspeak/LICENSE.md
rename to node_modules/cacache/node_modules/jackspeak/LICENSE.md
diff --git a/node_modules/pacote/node_modules/jackspeak/dist/commonjs/index.js b/node_modules/cacache/node_modules/jackspeak/dist/commonjs/index.js
similarity index 100%
rename from node_modules/pacote/node_modules/jackspeak/dist/commonjs/index.js
rename to node_modules/cacache/node_modules/jackspeak/dist/commonjs/index.js
diff --git a/node_modules/pacote/node_modules/glob/dist/commonjs/package.json b/node_modules/cacache/node_modules/jackspeak/dist/commonjs/package.json
similarity index 100%
rename from node_modules/pacote/node_modules/glob/dist/commonjs/package.json
rename to node_modules/cacache/node_modules/jackspeak/dist/commonjs/package.json
diff --git a/node_modules/pacote/node_modules/jackspeak/dist/esm/index.js b/node_modules/cacache/node_modules/jackspeak/dist/esm/index.js
similarity index 100%
rename from node_modules/pacote/node_modules/jackspeak/dist/esm/index.js
rename to node_modules/cacache/node_modules/jackspeak/dist/esm/index.js
diff --git a/node_modules/pacote/node_modules/glob/dist/esm/package.json b/node_modules/cacache/node_modules/jackspeak/dist/esm/package.json
similarity index 100%
rename from node_modules/pacote/node_modules/glob/dist/esm/package.json
rename to node_modules/cacache/node_modules/jackspeak/dist/esm/package.json
diff --git a/node_modules/pacote/node_modules/jackspeak/package.json b/node_modules/cacache/node_modules/jackspeak/package.json
similarity index 100%
rename from node_modules/pacote/node_modules/jackspeak/package.json
rename to node_modules/cacache/node_modules/jackspeak/package.json
diff --git a/node_modules/pacote/node_modules/minimatch/LICENSE b/node_modules/cacache/node_modules/lru-cache/LICENSE
similarity index 92%
rename from node_modules/pacote/node_modules/minimatch/LICENSE
rename to node_modules/cacache/node_modules/lru-cache/LICENSE
index 1493534e60dce..f785757cd63f8 100644
--- a/node_modules/pacote/node_modules/minimatch/LICENSE
+++ b/node_modules/cacache/node_modules/lru-cache/LICENSE
@@ -1,6 +1,6 @@
 The ISC License
 
-Copyright (c) 2011-2023 Isaac Z. Schlueter and Contributors
+Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors
 
 Permission to use, copy, modify, and/or distribute this software for any
 purpose with or without fee is hereby granted, provided that the above
diff --git a/node_modules/cacache/node_modules/lru-cache/dist/commonjs/index.js b/node_modules/cacache/node_modules/lru-cache/dist/commonjs/index.js
new file mode 100644
index 0000000000000..921b8f10f71b1
--- /dev/null
+++ b/node_modules/cacache/node_modules/lru-cache/dist/commonjs/index.js
@@ -0,0 +1,1564 @@
+"use strict";
+/**
+ * @module LRUCache
+ */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.LRUCache = void 0;
+const defaultPerf = (typeof performance === 'object' &&
+    performance &&
+    typeof performance.now === 'function') ?
+    performance
+    : Date;
+const warned = new Set();
+/* c8 ignore start */
+const PROCESS = (typeof process === 'object' && !!process ?
+    process
+    : {});
+/* c8 ignore start */
+const emitWarning = (msg, type, code, fn) => {
+    typeof PROCESS.emitWarning === 'function' ?
+        PROCESS.emitWarning(msg, type, code, fn)
+        : console.error(`[${code}] ${type}: ${msg}`);
+};
+let AC = globalThis.AbortController;
+let AS = globalThis.AbortSignal;
+/* c8 ignore start */
+if (typeof AC === 'undefined') {
+    //@ts-ignore
+    AS = class AbortSignal {
+        onabort;
+        _onabort = [];
+        reason;
+        aborted = false;
+        addEventListener(_, fn) {
+            this._onabort.push(fn);
+        }
+    };
+    //@ts-ignore
+    AC = class AbortController {
+        constructor() {
+            warnACPolyfill();
+        }
+        signal = new AS();
+        abort(reason) {
+            if (this.signal.aborted)
+                return;
+            //@ts-ignore
+            this.signal.reason = reason;
+            //@ts-ignore
+            this.signal.aborted = true;
+            //@ts-ignore
+            for (const fn of this.signal._onabort) {
+                fn(reason);
+            }
+            this.signal.onabort?.(reason);
+        }
+    };
+    let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
+    const warnACPolyfill = () => {
+        if (!printACPolyfillWarning)
+            return;
+        printACPolyfillWarning = false;
+        emitWarning('AbortController is not defined. If using lru-cache in ' +
+            'node 14, load an AbortController polyfill from the ' +
+            '`node-abort-controller` package. A minimal polyfill is ' +
+            'provided for use by LRUCache.fetch(), but it should not be ' +
+            'relied upon in other contexts (eg, passing it to other APIs that ' +
+            'use AbortController/AbortSignal might have undesirable effects). ' +
+            'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
+    };
+}
+/* c8 ignore stop */
+const shouldWarn = (code) => !warned.has(code);
+const TYPE = Symbol('type');
+const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
+/* c8 ignore start */
+// This is a little bit ridiculous, tbh.
+// The maximum array length is 2^32-1 or thereabouts on most JS impls.
+// And well before that point, you're caching the entire world, I mean,
+// that's ~32GB of just integers for the next/prev links, plus whatever
+// else to hold that many keys and values.  Just filling the memory with
+// zeroes at init time is brutal when you get that big.
+// But why not be complete?
+// Maybe in the future, these limits will have expanded.
+const getUintArray = (max) => !isPosInt(max) ? null
+    : max <= Math.pow(2, 8) ? Uint8Array
+        : max <= Math.pow(2, 16) ? Uint16Array
+            : max <= Math.pow(2, 32) ? Uint32Array
+                : max <= Number.MAX_SAFE_INTEGER ? ZeroArray
+                    : null;
+/* c8 ignore stop */
+class ZeroArray extends Array {
+    constructor(size) {
+        super(size);
+        this.fill(0);
+    }
+}
+class Stack {
+    heap;
+    length;
+    // private constructor
+    static #constructing = false;
+    static create(max) {
+        const HeapCls = getUintArray(max);
+        if (!HeapCls)
+            return [];
+        Stack.#constructing = true;
+        const s = new Stack(max, HeapCls);
+        Stack.#constructing = false;
+        return s;
+    }
+    constructor(max, HeapCls) {
+        /* c8 ignore start */
+        if (!Stack.#constructing) {
+            throw new TypeError('instantiate Stack using Stack.create(n)');
+        }
+        /* c8 ignore stop */
+        this.heap = new HeapCls(max);
+        this.length = 0;
+    }
+    push(n) {
+        this.heap[this.length++] = n;
+    }
+    pop() {
+        return this.heap[--this.length];
+    }
+}
+/**
+ * Default export, the thing you're using this module to get.
+ *
+ * The `K` and `V` types define the key and value types, respectively. The
+ * optional `FC` type defines the type of the `context` object passed to
+ * `cache.fetch()` and `cache.memo()`.
+ *
+ * Keys and values **must not** be `null` or `undefined`.
+ *
+ * All properties from the options object (with the exception of `max`,
+ * `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are
+ * added as normal public members. (The listed options are read-only getters.)
+ *
+ * Changing any of these will alter the defaults for subsequent method calls.
+ */
+class LRUCache {
+    // options that cannot be changed without disaster
+    #max;
+    #maxSize;
+    #dispose;
+    #onInsert;
+    #disposeAfter;
+    #fetchMethod;
+    #memoMethod;
+    #perf;
+    /**
+     * {@link LRUCache.OptionsBase.perf}
+     */
+    get perf() {
+        return this.#perf;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.ttl}
+     */
+    ttl;
+    /**
+     * {@link LRUCache.OptionsBase.ttlResolution}
+     */
+    ttlResolution;
+    /**
+     * {@link LRUCache.OptionsBase.ttlAutopurge}
+     */
+    ttlAutopurge;
+    /**
+     * {@link LRUCache.OptionsBase.updateAgeOnGet}
+     */
+    updateAgeOnGet;
+    /**
+     * {@link LRUCache.OptionsBase.updateAgeOnHas}
+     */
+    updateAgeOnHas;
+    /**
+     * {@link LRUCache.OptionsBase.allowStale}
+     */
+    allowStale;
+    /**
+     * {@link LRUCache.OptionsBase.noDisposeOnSet}
+     */
+    noDisposeOnSet;
+    /**
+     * {@link LRUCache.OptionsBase.noUpdateTTL}
+     */
+    noUpdateTTL;
+    /**
+     * {@link LRUCache.OptionsBase.maxEntrySize}
+     */
+    maxEntrySize;
+    /**
+     * {@link LRUCache.OptionsBase.sizeCalculation}
+     */
+    sizeCalculation;
+    /**
+     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
+     */
+    noDeleteOnFetchRejection;
+    /**
+     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
+     */
+    noDeleteOnStaleGet;
+    /**
+     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
+     */
+    allowStaleOnFetchAbort;
+    /**
+     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
+     */
+    allowStaleOnFetchRejection;
+    /**
+     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
+     */
+    ignoreFetchAbort;
+    // computed properties
+    #size;
+    #calculatedSize;
+    #keyMap;
+    #keyList;
+    #valList;
+    #next;
+    #prev;
+    #head;
+    #tail;
+    #free;
+    #disposed;
+    #sizes;
+    #starts;
+    #ttls;
+    #hasDispose;
+    #hasFetchMethod;
+    #hasDisposeAfter;
+    #hasOnInsert;
+    /**
+     * Do not call this method unless you need to inspect the
+     * inner workings of the cache.  If anything returned by this
+     * object is modified in any way, strange breakage may occur.
+     *
+     * These fields are private for a reason!
+     *
+     * @internal
+     */
+    static unsafeExposeInternals(c) {
+        return {
+            // properties
+            starts: c.#starts,
+            ttls: c.#ttls,
+            sizes: c.#sizes,
+            keyMap: c.#keyMap,
+            keyList: c.#keyList,
+            valList: c.#valList,
+            next: c.#next,
+            prev: c.#prev,
+            get head() {
+                return c.#head;
+            },
+            get tail() {
+                return c.#tail;
+            },
+            free: c.#free,
+            // methods
+            isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
+            backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
+            moveToTail: (index) => c.#moveToTail(index),
+            indexes: (options) => c.#indexes(options),
+            rindexes: (options) => c.#rindexes(options),
+            isStale: (index) => c.#isStale(index),
+        };
+    }
+    // Protected read-only members
+    /**
+     * {@link LRUCache.OptionsBase.max} (read-only)
+     */
+    get max() {
+        return this.#max;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.maxSize} (read-only)
+     */
+    get maxSize() {
+        return this.#maxSize;
+    }
+    /**
+     * The total computed size of items in the cache (read-only)
+     */
+    get calculatedSize() {
+        return this.#calculatedSize;
+    }
+    /**
+     * The number of items stored in the cache (read-only)
+     */
+    get size() {
+        return this.#size;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
+     */
+    get fetchMethod() {
+        return this.#fetchMethod;
+    }
+    get memoMethod() {
+        return this.#memoMethod;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.dispose} (read-only)
+     */
+    get dispose() {
+        return this.#dispose;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.onInsert} (read-only)
+     */
+    get onInsert() {
+        return this.#onInsert;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
+     */
+    get disposeAfter() {
+        return this.#disposeAfter;
+    }
+    constructor(options) {
+        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, onInsert, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, perf, } = options;
+        if (perf !== undefined) {
+            if (typeof perf?.now !== 'function') {
+                throw new TypeError('perf option must have a now() method if specified');
+            }
+        }
+        this.#perf = perf ?? defaultPerf;
+        if (max !== 0 && !isPosInt(max)) {
+            throw new TypeError('max option must be a nonnegative integer');
+        }
+        const UintArray = max ? getUintArray(max) : Array;
+        if (!UintArray) {
+            throw new Error('invalid max value: ' + max);
+        }
+        this.#max = max;
+        this.#maxSize = maxSize;
+        this.maxEntrySize = maxEntrySize || this.#maxSize;
+        this.sizeCalculation = sizeCalculation;
+        if (this.sizeCalculation) {
+            if (!this.#maxSize && !this.maxEntrySize) {
+                throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
+            }
+            if (typeof this.sizeCalculation !== 'function') {
+                throw new TypeError('sizeCalculation set to non-function');
+            }
+        }
+        if (memoMethod !== undefined &&
+            typeof memoMethod !== 'function') {
+            throw new TypeError('memoMethod must be a function if defined');
+        }
+        this.#memoMethod = memoMethod;
+        if (fetchMethod !== undefined &&
+            typeof fetchMethod !== 'function') {
+            throw new TypeError('fetchMethod must be a function if specified');
+        }
+        this.#fetchMethod = fetchMethod;
+        this.#hasFetchMethod = !!fetchMethod;
+        this.#keyMap = new Map();
+        this.#keyList = new Array(max).fill(undefined);
+        this.#valList = new Array(max).fill(undefined);
+        this.#next = new UintArray(max);
+        this.#prev = new UintArray(max);
+        this.#head = 0;
+        this.#tail = 0;
+        this.#free = Stack.create(max);
+        this.#size = 0;
+        this.#calculatedSize = 0;
+        if (typeof dispose === 'function') {
+            this.#dispose = dispose;
+        }
+        if (typeof onInsert === 'function') {
+            this.#onInsert = onInsert;
+        }
+        if (typeof disposeAfter === 'function') {
+            this.#disposeAfter = disposeAfter;
+            this.#disposed = [];
+        }
+        else {
+            this.#disposeAfter = undefined;
+            this.#disposed = undefined;
+        }
+        this.#hasDispose = !!this.#dispose;
+        this.#hasOnInsert = !!this.#onInsert;
+        this.#hasDisposeAfter = !!this.#disposeAfter;
+        this.noDisposeOnSet = !!noDisposeOnSet;
+        this.noUpdateTTL = !!noUpdateTTL;
+        this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
+        this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
+        this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
+        this.ignoreFetchAbort = !!ignoreFetchAbort;
+        // NB: maxEntrySize is set to maxSize if it's set
+        if (this.maxEntrySize !== 0) {
+            if (this.#maxSize !== 0) {
+                if (!isPosInt(this.#maxSize)) {
+                    throw new TypeError('maxSize must be a positive integer if specified');
+                }
+            }
+            if (!isPosInt(this.maxEntrySize)) {
+                throw new TypeError('maxEntrySize must be a positive integer if specified');
+            }
+            this.#initializeSizeTracking();
+        }
+        this.allowStale = !!allowStale;
+        this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
+        this.updateAgeOnGet = !!updateAgeOnGet;
+        this.updateAgeOnHas = !!updateAgeOnHas;
+        this.ttlResolution =
+            isPosInt(ttlResolution) || ttlResolution === 0 ?
+                ttlResolution
+                : 1;
+        this.ttlAutopurge = !!ttlAutopurge;
+        this.ttl = ttl || 0;
+        if (this.ttl) {
+            if (!isPosInt(this.ttl)) {
+                throw new TypeError('ttl must be a positive integer if specified');
+            }
+            this.#initializeTTLTracking();
+        }
+        // do not allow completely unbounded caches
+        if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
+            throw new TypeError('At least one of max, maxSize, or ttl is required');
+        }
+        if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
+            const code = 'LRU_CACHE_UNBOUNDED';
+            if (shouldWarn(code)) {
+                warned.add(code);
+                const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
+                    'result in unbounded memory consumption.';
+                emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
+            }
+        }
+    }
+    /**
+     * Return the number of ms left in the item's TTL. If item is not in cache,
+     * returns `0`. Returns `Infinity` if item is in cache without a defined TTL.
+     */
+    getRemainingTTL(key) {
+        return this.#keyMap.has(key) ? Infinity : 0;
+    }
+    #initializeTTLTracking() {
+        const ttls = new ZeroArray(this.#max);
+        const starts = new ZeroArray(this.#max);
+        this.#ttls = ttls;
+        this.#starts = starts;
+        this.#setItemTTL = (index, ttl, start = this.#perf.now()) => {
+            starts[index] = ttl !== 0 ? start : 0;
+            ttls[index] = ttl;
+            if (ttl !== 0 && this.ttlAutopurge) {
+                const t = setTimeout(() => {
+                    if (this.#isStale(index)) {
+                        this.#delete(this.#keyList[index], 'expire');
+                    }
+                }, ttl + 1);
+                // unref() not supported on all platforms
+                /* c8 ignore start */
+                if (t.unref) {
+                    t.unref();
+                }
+                /* c8 ignore stop */
+            }
+        };
+        this.#updateItemAge = index => {
+            starts[index] = ttls[index] !== 0 ? this.#perf.now() : 0;
+        };
+        this.#statusTTL = (status, index) => {
+            if (ttls[index]) {
+                const ttl = ttls[index];
+                const start = starts[index];
+                /* c8 ignore next */
+                if (!ttl || !start)
+                    return;
+                status.ttl = ttl;
+                status.start = start;
+                status.now = cachedNow || getNow();
+                const age = status.now - start;
+                status.remainingTTL = ttl - age;
+            }
+        };
+        // debounce calls to perf.now() to 1s so we're not hitting
+        // that costly call repeatedly.
+        let cachedNow = 0;
+        const getNow = () => {
+            const n = this.#perf.now();
+            if (this.ttlResolution > 0) {
+                cachedNow = n;
+                const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
+                // not available on all platforms
+                /* c8 ignore start */
+                if (t.unref) {
+                    t.unref();
+                }
+                /* c8 ignore stop */
+            }
+            return n;
+        };
+        this.getRemainingTTL = key => {
+            const index = this.#keyMap.get(key);
+            if (index === undefined) {
+                return 0;
+            }
+            const ttl = ttls[index];
+            const start = starts[index];
+            if (!ttl || !start) {
+                return Infinity;
+            }
+            const age = (cachedNow || getNow()) - start;
+            return ttl - age;
+        };
+        this.#isStale = index => {
+            const s = starts[index];
+            const t = ttls[index];
+            return !!t && !!s && (cachedNow || getNow()) - s > t;
+        };
+    }
+    // conditionally set private methods related to TTL
+    #updateItemAge = () => { };
+    #statusTTL = () => { };
+    #setItemTTL = () => { };
+    /* c8 ignore stop */
+    #isStale = () => false;
+    #initializeSizeTracking() {
+        const sizes = new ZeroArray(this.#max);
+        this.#calculatedSize = 0;
+        this.#sizes = sizes;
+        this.#removeItemSize = index => {
+            this.#calculatedSize -= sizes[index];
+            sizes[index] = 0;
+        };
+        this.#requireSize = (k, v, size, sizeCalculation) => {
+            // provisionally accept background fetches.
+            // actual value size will be checked when they return.
+            if (this.#isBackgroundFetch(v)) {
+                return 0;
+            }
+            if (!isPosInt(size)) {
+                if (sizeCalculation) {
+                    if (typeof sizeCalculation !== 'function') {
+                        throw new TypeError('sizeCalculation must be a function');
+                    }
+                    size = sizeCalculation(v, k);
+                    if (!isPosInt(size)) {
+                        throw new TypeError('sizeCalculation return invalid (expect positive integer)');
+                    }
+                }
+                else {
+                    throw new TypeError('invalid size value (must be positive integer). ' +
+                        'When maxSize or maxEntrySize is used, sizeCalculation ' +
+                        'or size must be set.');
+                }
+            }
+            return size;
+        };
+        this.#addItemSize = (index, size, status) => {
+            sizes[index] = size;
+            if (this.#maxSize) {
+                const maxSize = this.#maxSize - sizes[index];
+                while (this.#calculatedSize > maxSize) {
+                    this.#evict(true);
+                }
+            }
+            this.#calculatedSize += sizes[index];
+            if (status) {
+                status.entrySize = size;
+                status.totalCalculatedSize = this.#calculatedSize;
+            }
+        };
+    }
+    #removeItemSize = _i => { };
+    #addItemSize = (_i, _s, _st) => { };
+    #requireSize = (_k, _v, size, sizeCalculation) => {
+        if (size || sizeCalculation) {
+            throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
+        }
+        return 0;
+    };
+    *#indexes({ allowStale = this.allowStale } = {}) {
+        if (this.#size) {
+            for (let i = this.#tail; true;) {
+                if (!this.#isValidIndex(i)) {
+                    break;
+                }
+                if (allowStale || !this.#isStale(i)) {
+                    yield i;
+                }
+                if (i === this.#head) {
+                    break;
+                }
+                else {
+                    i = this.#prev[i];
+                }
+            }
+        }
+    }
+    *#rindexes({ allowStale = this.allowStale } = {}) {
+        if (this.#size) {
+            for (let i = this.#head; true;) {
+                if (!this.#isValidIndex(i)) {
+                    break;
+                }
+                if (allowStale || !this.#isStale(i)) {
+                    yield i;
+                }
+                if (i === this.#tail) {
+                    break;
+                }
+                else {
+                    i = this.#next[i];
+                }
+            }
+        }
+    }
+    #isValidIndex(index) {
+        return (index !== undefined &&
+            this.#keyMap.get(this.#keyList[index]) === index);
+    }
+    /**
+     * Return a generator yielding `[key, value]` pairs,
+     * in order from most recently used to least recently used.
+     */
+    *entries() {
+        for (const i of this.#indexes()) {
+            if (this.#valList[i] !== undefined &&
+                this.#keyList[i] !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield [this.#keyList[i], this.#valList[i]];
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.entries}
+     *
+     * Return a generator yielding `[key, value]` pairs,
+     * in order from least recently used to most recently used.
+     */
+    *rentries() {
+        for (const i of this.#rindexes()) {
+            if (this.#valList[i] !== undefined &&
+                this.#keyList[i] !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield [this.#keyList[i], this.#valList[i]];
+            }
+        }
+    }
+    /**
+     * Return a generator yielding the keys in the cache,
+     * in order from most recently used to least recently used.
+     */
+    *keys() {
+        for (const i of this.#indexes()) {
+            const k = this.#keyList[i];
+            if (k !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield k;
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.keys}
+     *
+     * Return a generator yielding the keys in the cache,
+     * in order from least recently used to most recently used.
+     */
+    *rkeys() {
+        for (const i of this.#rindexes()) {
+            const k = this.#keyList[i];
+            if (k !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield k;
+            }
+        }
+    }
+    /**
+     * Return a generator yielding the values in the cache,
+     * in order from most recently used to least recently used.
+     */
+    *values() {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            if (v !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield this.#valList[i];
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.values}
+     *
+     * Return a generator yielding the values in the cache,
+     * in order from least recently used to most recently used.
+     */
+    *rvalues() {
+        for (const i of this.#rindexes()) {
+            const v = this.#valList[i];
+            if (v !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield this.#valList[i];
+            }
+        }
+    }
+    /**
+     * Iterating over the cache itself yields the same results as
+     * {@link LRUCache.entries}
+     */
+    [Symbol.iterator]() {
+        return this.entries();
+    }
+    /**
+     * A String value that is used in the creation of the default string
+     * description of an object. Called by the built-in method
+     * `Object.prototype.toString`.
+     */
+    [Symbol.toStringTag] = 'LRUCache';
+    /**
+     * Find a value for which the supplied fn method returns a truthy value,
+     * similar to `Array.find()`. fn is called as `fn(value, key, cache)`.
+     */
+    find(fn, getOptions = {}) {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            if (fn(value, this.#keyList[i], this)) {
+                return this.get(this.#keyList[i], getOptions);
+            }
+        }
+    }
+    /**
+     * Call the supplied function on each item in the cache, in order from most
+     * recently used to least recently used.
+     *
+     * `fn` is called as `fn(value, key, cache)`.
+     *
+     * If `thisp` is provided, function will be called in the `this`-context of
+     * the provided object, or the cache if no `thisp` object is provided.
+     *
+     * Does not update age or recenty of use, or iterate over stale values.
+     */
+    forEach(fn, thisp = this) {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            fn.call(thisp, value, this.#keyList[i], this);
+        }
+    }
+    /**
+     * The same as {@link LRUCache.forEach} but items are iterated over in
+     * reverse order.  (ie, less recently used items are iterated over first.)
+     */
+    rforEach(fn, thisp = this) {
+        for (const i of this.#rindexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            fn.call(thisp, value, this.#keyList[i], this);
+        }
+    }
+    /**
+     * Delete any stale entries. Returns true if anything was removed,
+     * false otherwise.
+     */
+    purgeStale() {
+        let deleted = false;
+        for (const i of this.#rindexes({ allowStale: true })) {
+            if (this.#isStale(i)) {
+                this.#delete(this.#keyList[i], 'expire');
+                deleted = true;
+            }
+        }
+        return deleted;
+    }
+    /**
+     * Get the extended info about a given entry, to get its value, size, and
+     * TTL info simultaneously. Returns `undefined` if the key is not present.
+     *
+     * Unlike {@link LRUCache#dump}, which is designed to be portable and survive
+     * serialization, the `start` value is always the current timestamp, and the
+     * `ttl` is a calculated remaining time to live (negative if expired).
+     *
+     * Always returns stale values, if their info is found in the cache, so be
+     * sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl})
+     * if relevant.
+     */
+    info(key) {
+        const i = this.#keyMap.get(key);
+        if (i === undefined)
+            return undefined;
+        const v = this.#valList[i];
+        /* c8 ignore start - this isn't tested for the info function,
+         * but it's the same logic as found in other places. */
+        const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+        if (value === undefined)
+            return undefined;
+        /* c8 ignore end */
+        const entry = { value };
+        if (this.#ttls && this.#starts) {
+            const ttl = this.#ttls[i];
+            const start = this.#starts[i];
+            if (ttl && start) {
+                const remain = ttl - (this.#perf.now() - start);
+                entry.ttl = remain;
+                entry.start = Date.now();
+            }
+        }
+        if (this.#sizes) {
+            entry.size = this.#sizes[i];
+        }
+        return entry;
+    }
+    /**
+     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
+     * passed to {@link LRUCache#load}.
+     *
+     * The `start` fields are calculated relative to a portable `Date.now()`
+     * timestamp, even if `performance.now()` is available.
+     *
+     * Stale entries are always included in the `dump`, even if
+     * {@link LRUCache.OptionsBase.allowStale} is false.
+     *
+     * Note: this returns an actual array, not a generator, so it can be more
+     * easily passed around.
+     */
+    dump() {
+        const arr = [];
+        for (const i of this.#indexes({ allowStale: true })) {
+            const key = this.#keyList[i];
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined || key === undefined)
+                continue;
+            const entry = { value };
+            if (this.#ttls && this.#starts) {
+                entry.ttl = this.#ttls[i];
+                // always dump the start relative to a portable timestamp
+                // it's ok for this to be a bit slow, it's a rare operation.
+                const age = this.#perf.now() - this.#starts[i];
+                entry.start = Math.floor(Date.now() - age);
+            }
+            if (this.#sizes) {
+                entry.size = this.#sizes[i];
+            }
+            arr.unshift([key, entry]);
+        }
+        return arr;
+    }
+    /**
+     * Reset the cache and load in the items in entries in the order listed.
+     *
+     * The shape of the resulting cache may be different if the same options are
+     * not used in both caches.
+     *
+     * The `start` fields are assumed to be calculated relative to a portable
+     * `Date.now()` timestamp, even if `performance.now()` is available.
+     */
+    load(arr) {
+        this.clear();
+        for (const [key, entry] of arr) {
+            if (entry.start) {
+                // entry.start is a portable timestamp, but we may be using
+                // node's performance.now(), so calculate the offset, so that
+                // we get the intended remaining TTL, no matter how long it's
+                // been on ice.
+                //
+                // it's ok for this to be a bit slow, it's a rare operation.
+                const age = Date.now() - entry.start;
+                entry.start = this.#perf.now() - age;
+            }
+            this.set(key, entry.value, entry);
+        }
+    }
+    /**
+     * Add a value to the cache.
+     *
+     * Note: if `undefined` is specified as a value, this is an alias for
+     * {@link LRUCache#delete}
+     *
+     * Fields on the {@link LRUCache.SetOptions} options param will override
+     * their corresponding values in the constructor options for the scope
+     * of this single `set()` operation.
+     *
+     * If `start` is provided, then that will set the effective start
+     * time for the TTL calculation. Note that this must be a previous
+     * value of `performance.now()` if supported, or a previous value of
+     * `Date.now()` if not.
+     *
+     * Options object may also include `size`, which will prevent
+     * calling the `sizeCalculation` function and just use the specified
+     * number if it is a positive integer, and `noDisposeOnSet` which
+     * will prevent calling a `dispose` function in the case of
+     * overwrites.
+     *
+     * If the `size` (or return value of `sizeCalculation`) for a given
+     * entry is greater than `maxEntrySize`, then the item will not be
+     * added to the cache.
+     *
+     * Will update the recency of the entry.
+     *
+     * If the value is `undefined`, then this is an alias for
+     * `cache.delete(key)`. `undefined` is never stored in the cache.
+     */
+    set(k, v, setOptions = {}) {
+        if (v === undefined) {
+            this.delete(k);
+            return this;
+        }
+        const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
+        let { noUpdateTTL = this.noUpdateTTL } = setOptions;
+        const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
+        // if the item doesn't fit, don't do anything
+        // NB: maxEntrySize set to maxSize by default
+        if (this.maxEntrySize && size > this.maxEntrySize) {
+            if (status) {
+                status.set = 'miss';
+                status.maxEntrySizeExceeded = true;
+            }
+            // have to delete, in case something is there already.
+            this.#delete(k, 'set');
+            return this;
+        }
+        let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
+        if (index === undefined) {
+            // addition
+            index = (this.#size === 0 ? this.#tail
+                : this.#free.length !== 0 ? this.#free.pop()
+                    : this.#size === this.#max ? this.#evict(false)
+                        : this.#size);
+            this.#keyList[index] = k;
+            this.#valList[index] = v;
+            this.#keyMap.set(k, index);
+            this.#next[this.#tail] = index;
+            this.#prev[index] = this.#tail;
+            this.#tail = index;
+            this.#size++;
+            this.#addItemSize(index, size, status);
+            if (status)
+                status.set = 'add';
+            noUpdateTTL = false;
+            if (this.#hasOnInsert) {
+                this.#onInsert?.(v, k, 'add');
+            }
+        }
+        else {
+            // update
+            this.#moveToTail(index);
+            const oldVal = this.#valList[index];
+            if (v !== oldVal) {
+                if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
+                    oldVal.__abortController.abort(new Error('replaced'));
+                    const { __staleWhileFetching: s } = oldVal;
+                    if (s !== undefined && !noDisposeOnSet) {
+                        if (this.#hasDispose) {
+                            this.#dispose?.(s, k, 'set');
+                        }
+                        if (this.#hasDisposeAfter) {
+                            this.#disposed?.push([s, k, 'set']);
+                        }
+                    }
+                }
+                else if (!noDisposeOnSet) {
+                    if (this.#hasDispose) {
+                        this.#dispose?.(oldVal, k, 'set');
+                    }
+                    if (this.#hasDisposeAfter) {
+                        this.#disposed?.push([oldVal, k, 'set']);
+                    }
+                }
+                this.#removeItemSize(index);
+                this.#addItemSize(index, size, status);
+                this.#valList[index] = v;
+                if (status) {
+                    status.set = 'replace';
+                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal) ?
+                        oldVal.__staleWhileFetching
+                        : oldVal;
+                    if (oldValue !== undefined)
+                        status.oldValue = oldValue;
+                }
+            }
+            else if (status) {
+                status.set = 'update';
+            }
+            if (this.#hasOnInsert) {
+                this.onInsert?.(v, k, v === oldVal ? 'update' : 'replace');
+            }
+        }
+        if (ttl !== 0 && !this.#ttls) {
+            this.#initializeTTLTracking();
+        }
+        if (this.#ttls) {
+            if (!noUpdateTTL) {
+                this.#setItemTTL(index, ttl, start);
+            }
+            if (status)
+                this.#statusTTL(status, index);
+        }
+        if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+        return this;
+    }
+    /**
+     * Evict the least recently used item, returning its value or
+     * `undefined` if cache is empty.
+     */
+    pop() {
+        try {
+            while (this.#size) {
+                const val = this.#valList[this.#head];
+                this.#evict(true);
+                if (this.#isBackgroundFetch(val)) {
+                    if (val.__staleWhileFetching) {
+                        return val.__staleWhileFetching;
+                    }
+                }
+                else if (val !== undefined) {
+                    return val;
+                }
+            }
+        }
+        finally {
+            if (this.#hasDisposeAfter && this.#disposed) {
+                const dt = this.#disposed;
+                let task;
+                while ((task = dt?.shift())) {
+                    this.#disposeAfter?.(...task);
+                }
+            }
+        }
+    }
+    #evict(free) {
+        const head = this.#head;
+        const k = this.#keyList[head];
+        const v = this.#valList[head];
+        if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
+            v.__abortController.abort(new Error('evicted'));
+        }
+        else if (this.#hasDispose || this.#hasDisposeAfter) {
+            if (this.#hasDispose) {
+                this.#dispose?.(v, k, 'evict');
+            }
+            if (this.#hasDisposeAfter) {
+                this.#disposed?.push([v, k, 'evict']);
+            }
+        }
+        this.#removeItemSize(head);
+        // if we aren't about to use the index, then null these out
+        if (free) {
+            this.#keyList[head] = undefined;
+            this.#valList[head] = undefined;
+            this.#free.push(head);
+        }
+        if (this.#size === 1) {
+            this.#head = this.#tail = 0;
+            this.#free.length = 0;
+        }
+        else {
+            this.#head = this.#next[head];
+        }
+        this.#keyMap.delete(k);
+        this.#size--;
+        return head;
+    }
+    /**
+     * Check if a key is in the cache, without updating the recency of use.
+     * Will return false if the item is stale, even though it is technically
+     * in the cache.
+     *
+     * Check if a key is in the cache, without updating the recency of
+     * use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set
+     * to `true` in either the options or the constructor.
+     *
+     * Will return `false` if the item is stale, even though it is technically in
+     * the cache. The difference can be determined (if it matters) by using a
+     * `status` argument, and inspecting the `has` field.
+     *
+     * Will not update item age unless
+     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
+     */
+    has(k, hasOptions = {}) {
+        const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
+        const index = this.#keyMap.get(k);
+        if (index !== undefined) {
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v) &&
+                v.__staleWhileFetching === undefined) {
+                return false;
+            }
+            if (!this.#isStale(index)) {
+                if (updateAgeOnHas) {
+                    this.#updateItemAge(index);
+                }
+                if (status) {
+                    status.has = 'hit';
+                    this.#statusTTL(status, index);
+                }
+                return true;
+            }
+            else if (status) {
+                status.has = 'stale';
+                this.#statusTTL(status, index);
+            }
+        }
+        else if (status) {
+            status.has = 'miss';
+        }
+        return false;
+    }
+    /**
+     * Like {@link LRUCache#get} but doesn't update recency or delete stale
+     * items.
+     *
+     * Returns `undefined` if the item is stale, unless
+     * {@link LRUCache.OptionsBase.allowStale} is set.
+     */
+    peek(k, peekOptions = {}) {
+        const { allowStale = this.allowStale } = peekOptions;
+        const index = this.#keyMap.get(k);
+        if (index === undefined ||
+            (!allowStale && this.#isStale(index))) {
+            return;
+        }
+        const v = this.#valList[index];
+        // either stale and allowed, or forcing a refresh of non-stale value
+        return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+    }
+    #backgroundFetch(k, index, options, context) {
+        const v = index === undefined ? undefined : this.#valList[index];
+        if (this.#isBackgroundFetch(v)) {
+            return v;
+        }
+        const ac = new AC();
+        const { signal } = options;
+        // when/if our AC signals, then stop listening to theirs.
+        signal?.addEventListener('abort', () => ac.abort(signal.reason), {
+            signal: ac.signal,
+        });
+        const fetchOpts = {
+            signal: ac.signal,
+            options,
+            context,
+        };
+        const cb = (v, updateCache = false) => {
+            const { aborted } = ac.signal;
+            const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
+            if (options.status) {
+                if (aborted && !updateCache) {
+                    options.status.fetchAborted = true;
+                    options.status.fetchError = ac.signal.reason;
+                    if (ignoreAbort)
+                        options.status.fetchAbortIgnored = true;
+                }
+                else {
+                    options.status.fetchResolved = true;
+                }
+            }
+            if (aborted && !ignoreAbort && !updateCache) {
+                return fetchFail(ac.signal.reason);
+            }
+            // either we didn't abort, and are still here, or we did, and ignored
+            const bf = p;
+            if (this.#valList[index] === p) {
+                if (v === undefined) {
+                    if (bf.__staleWhileFetching !== undefined) {
+                        this.#valList[index] = bf.__staleWhileFetching;
+                    }
+                    else {
+                        this.#delete(k, 'fetch');
+                    }
+                }
+                else {
+                    if (options.status)
+                        options.status.fetchUpdated = true;
+                    this.set(k, v, fetchOpts.options);
+                }
+            }
+            return v;
+        };
+        const eb = (er) => {
+            if (options.status) {
+                options.status.fetchRejected = true;
+                options.status.fetchError = er;
+            }
+            return fetchFail(er);
+        };
+        const fetchFail = (er) => {
+            const { aborted } = ac.signal;
+            const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
+            const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
+            const noDelete = allowStale || options.noDeleteOnFetchRejection;
+            const bf = p;
+            if (this.#valList[index] === p) {
+                // if we allow stale on fetch rejections, then we need to ensure that
+                // the stale value is not removed from the cache when the fetch fails.
+                const del = !noDelete || bf.__staleWhileFetching === undefined;
+                if (del) {
+                    this.#delete(k, 'fetch');
+                }
+                else if (!allowStaleAborted) {
+                    // still replace the *promise* with the stale value,
+                    // since we are done with the promise at this point.
+                    // leave it untouched if we're still waiting for an
+                    // aborted background fetch that hasn't yet returned.
+                    this.#valList[index] = bf.__staleWhileFetching;
+                }
+            }
+            if (allowStale) {
+                if (options.status && bf.__staleWhileFetching !== undefined) {
+                    options.status.returnedStale = true;
+                }
+                return bf.__staleWhileFetching;
+            }
+            else if (bf.__returned === bf) {
+                throw er;
+            }
+        };
+        const pcall = (res, rej) => {
+            const fmp = this.#fetchMethod?.(k, v, fetchOpts);
+            if (fmp && fmp instanceof Promise) {
+                fmp.then(v => res(v === undefined ? undefined : v), rej);
+            }
+            // ignored, we go until we finish, regardless.
+            // defer check until we are actually aborting,
+            // so fetchMethod can override.
+            ac.signal.addEventListener('abort', () => {
+                if (!options.ignoreFetchAbort ||
+                    options.allowStaleOnFetchAbort) {
+                    res(undefined);
+                    // when it eventually resolves, update the cache.
+                    if (options.allowStaleOnFetchAbort) {
+                        res = v => cb(v, true);
+                    }
+                }
+            });
+        };
+        if (options.status)
+            options.status.fetchDispatched = true;
+        const p = new Promise(pcall).then(cb, eb);
+        const bf = Object.assign(p, {
+            __abortController: ac,
+            __staleWhileFetching: v,
+            __returned: undefined,
+        });
+        if (index === undefined) {
+            // internal, don't expose status.
+            this.set(k, bf, { ...fetchOpts.options, status: undefined });
+            index = this.#keyMap.get(k);
+        }
+        else {
+            this.#valList[index] = bf;
+        }
+        return bf;
+    }
+    #isBackgroundFetch(p) {
+        if (!this.#hasFetchMethod)
+            return false;
+        const b = p;
+        return (!!b &&
+            b instanceof Promise &&
+            b.hasOwnProperty('__staleWhileFetching') &&
+            b.__abortController instanceof AC);
+    }
+    async fetch(k, fetchOptions = {}) {
+        const { 
+        // get options
+        allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, 
+        // set options
+        ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, 
+        // fetch exclusive options
+        noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
+        if (!this.#hasFetchMethod) {
+            if (status)
+                status.fetch = 'get';
+            return this.get(k, {
+                allowStale,
+                updateAgeOnGet,
+                noDeleteOnStaleGet,
+                status,
+            });
+        }
+        const options = {
+            allowStale,
+            updateAgeOnGet,
+            noDeleteOnStaleGet,
+            ttl,
+            noDisposeOnSet,
+            size,
+            sizeCalculation,
+            noUpdateTTL,
+            noDeleteOnFetchRejection,
+            allowStaleOnFetchRejection,
+            allowStaleOnFetchAbort,
+            ignoreFetchAbort,
+            status,
+            signal,
+        };
+        let index = this.#keyMap.get(k);
+        if (index === undefined) {
+            if (status)
+                status.fetch = 'miss';
+            const p = this.#backgroundFetch(k, index, options, context);
+            return (p.__returned = p);
+        }
+        else {
+            // in cache, maybe already fetching
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v)) {
+                const stale = allowStale && v.__staleWhileFetching !== undefined;
+                if (status) {
+                    status.fetch = 'inflight';
+                    if (stale)
+                        status.returnedStale = true;
+                }
+                return stale ? v.__staleWhileFetching : (v.__returned = v);
+            }
+            // if we force a refresh, that means do NOT serve the cached value,
+            // unless we are already in the process of refreshing the cache.
+            const isStale = this.#isStale(index);
+            if (!forceRefresh && !isStale) {
+                if (status)
+                    status.fetch = 'hit';
+                this.#moveToTail(index);
+                if (updateAgeOnGet) {
+                    this.#updateItemAge(index);
+                }
+                if (status)
+                    this.#statusTTL(status, index);
+                return v;
+            }
+            // ok, it is stale or a forced refresh, and not already fetching.
+            // refresh the cache.
+            const p = this.#backgroundFetch(k, index, options, context);
+            const hasStale = p.__staleWhileFetching !== undefined;
+            const staleVal = hasStale && allowStale;
+            if (status) {
+                status.fetch = isStale ? 'stale' : 'refresh';
+                if (staleVal && isStale)
+                    status.returnedStale = true;
+            }
+            return staleVal ? p.__staleWhileFetching : (p.__returned = p);
+        }
+    }
+    async forceFetch(k, fetchOptions = {}) {
+        const v = await this.fetch(k, fetchOptions);
+        if (v === undefined)
+            throw new Error('fetch() returned undefined');
+        return v;
+    }
+    memo(k, memoOptions = {}) {
+        const memoMethod = this.#memoMethod;
+        if (!memoMethod) {
+            throw new Error('no memoMethod provided to constructor');
+        }
+        const { context, forceRefresh, ...options } = memoOptions;
+        const v = this.get(k, options);
+        if (!forceRefresh && v !== undefined)
+            return v;
+        const vv = memoMethod(k, v, {
+            options,
+            context,
+        });
+        this.set(k, vv, options);
+        return vv;
+    }
+    /**
+     * Return a value from the cache. Will update the recency of the cache
+     * entry found.
+     *
+     * If the key is not found, get() will return `undefined`.
+     */
+    get(k, getOptions = {}) {
+        const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
+        const index = this.#keyMap.get(k);
+        if (index !== undefined) {
+            const value = this.#valList[index];
+            const fetching = this.#isBackgroundFetch(value);
+            if (status)
+                this.#statusTTL(status, index);
+            if (this.#isStale(index)) {
+                if (status)
+                    status.get = 'stale';
+                // delete only if not an in-flight background fetch
+                if (!fetching) {
+                    if (!noDeleteOnStaleGet) {
+                        this.#delete(k, 'expire');
+                    }
+                    if (status && allowStale)
+                        status.returnedStale = true;
+                    return allowStale ? value : undefined;
+                }
+                else {
+                    if (status &&
+                        allowStale &&
+                        value.__staleWhileFetching !== undefined) {
+                        status.returnedStale = true;
+                    }
+                    return allowStale ? value.__staleWhileFetching : undefined;
+                }
+            }
+            else {
+                if (status)
+                    status.get = 'hit';
+                // if we're currently fetching it, we don't actually have it yet
+                // it's not stale, which means this isn't a staleWhileRefetching.
+                // If it's not stale, and fetching, AND has a __staleWhileFetching
+                // value, then that means the user fetched with {forceRefresh:true},
+                // so it's safe to return that value.
+                if (fetching) {
+                    return value.__staleWhileFetching;
+                }
+                this.#moveToTail(index);
+                if (updateAgeOnGet) {
+                    this.#updateItemAge(index);
+                }
+                return value;
+            }
+        }
+        else if (status) {
+            status.get = 'miss';
+        }
+    }
+    #connect(p, n) {
+        this.#prev[n] = p;
+        this.#next[p] = n;
+    }
+    #moveToTail(index) {
+        // if tail already, nothing to do
+        // if head, move head to next[index]
+        // else
+        //   move next[prev[index]] to next[index] (head has no prev)
+        //   move prev[next[index]] to prev[index]
+        // prev[index] = tail
+        // next[tail] = index
+        // tail = index
+        if (index !== this.#tail) {
+            if (index === this.#head) {
+                this.#head = this.#next[index];
+            }
+            else {
+                this.#connect(this.#prev[index], this.#next[index]);
+            }
+            this.#connect(this.#tail, index);
+            this.#tail = index;
+        }
+    }
+    /**
+     * Deletes a key out of the cache.
+     *
+     * Returns true if the key was deleted, false otherwise.
+     */
+    delete(k) {
+        return this.#delete(k, 'delete');
+    }
+    #delete(k, reason) {
+        let deleted = false;
+        if (this.#size !== 0) {
+            const index = this.#keyMap.get(k);
+            if (index !== undefined) {
+                deleted = true;
+                if (this.#size === 1) {
+                    this.#clear(reason);
+                }
+                else {
+                    this.#removeItemSize(index);
+                    const v = this.#valList[index];
+                    if (this.#isBackgroundFetch(v)) {
+                        v.__abortController.abort(new Error('deleted'));
+                    }
+                    else if (this.#hasDispose || this.#hasDisposeAfter) {
+                        if (this.#hasDispose) {
+                            this.#dispose?.(v, k, reason);
+                        }
+                        if (this.#hasDisposeAfter) {
+                            this.#disposed?.push([v, k, reason]);
+                        }
+                    }
+                    this.#keyMap.delete(k);
+                    this.#keyList[index] = undefined;
+                    this.#valList[index] = undefined;
+                    if (index === this.#tail) {
+                        this.#tail = this.#prev[index];
+                    }
+                    else if (index === this.#head) {
+                        this.#head = this.#next[index];
+                    }
+                    else {
+                        const pi = this.#prev[index];
+                        this.#next[pi] = this.#next[index];
+                        const ni = this.#next[index];
+                        this.#prev[ni] = this.#prev[index];
+                    }
+                    this.#size--;
+                    this.#free.push(index);
+                }
+            }
+        }
+        if (this.#hasDisposeAfter && this.#disposed?.length) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+        return deleted;
+    }
+    /**
+     * Clear the cache entirely, throwing away all values.
+     */
+    clear() {
+        return this.#clear('delete');
+    }
+    #clear(reason) {
+        for (const index of this.#rindexes({ allowStale: true })) {
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v)) {
+                v.__abortController.abort(new Error('deleted'));
+            }
+            else {
+                const k = this.#keyList[index];
+                if (this.#hasDispose) {
+                    this.#dispose?.(v, k, reason);
+                }
+                if (this.#hasDisposeAfter) {
+                    this.#disposed?.push([v, k, reason]);
+                }
+            }
+        }
+        this.#keyMap.clear();
+        this.#valList.fill(undefined);
+        this.#keyList.fill(undefined);
+        if (this.#ttls && this.#starts) {
+            this.#ttls.fill(0);
+            this.#starts.fill(0);
+        }
+        if (this.#sizes) {
+            this.#sizes.fill(0);
+        }
+        this.#head = 0;
+        this.#tail = 0;
+        this.#free.length = 0;
+        this.#calculatedSize = 0;
+        this.#size = 0;
+        if (this.#hasDisposeAfter && this.#disposed) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+    }
+}
+exports.LRUCache = LRUCache;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/cacache/node_modules/lru-cache/dist/commonjs/index.min.js b/node_modules/cacache/node_modules/lru-cache/dist/commonjs/index.min.js
new file mode 100644
index 0000000000000..ef5027b91650d
--- /dev/null
+++ b/node_modules/cacache/node_modules/lru-cache/dist/commonjs/index.min.js
@@ -0,0 +1,2 @@
+"use strict";Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var M=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,x=new Set,R=typeof process=="object"&&process?process:{},U=(a,t,e,i)=>{typeof R.emitWarning=="function"?R.emitWarning(a,t,e,i):console.error(`[${e}] ${t}: ${a}`)},C=globalThis.AbortController,L=globalThis.AbortSignal;if(typeof C>"u"){L=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},C=class{constructor(){t()}signal=new L;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let a=R.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{a&&(a=!1,U("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var G=a=>!x.has(a),H=Symbol("type"),y=a=>a&&a===Math.floor(a)&&a>0&&isFinite(a),I=a=>y(a)?a<=Math.pow(2,8)?Uint8Array:a<=Math.pow(2,16)?Uint16Array:a<=Math.pow(2,32)?Uint32Array:a<=Number.MAX_SAFE_INTEGER?E:null:null,E=class extends Array{constructor(t){super(t),this.fill(0)}},W=class a{heap;length;static#l=!1;static create(t){let e=I(t);if(!e)return[];a.#l=!0;let i=new a(t,e);return a.#l=!1,i}constructor(t,e){if(!a.#l)throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},D=class a{#l;#c;#p;#v;#w;#D;#L;#S;get perf(){return this.#S}ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#_;#s;#i;#t;#a;#u;#o;#h;#m;#r;#b;#y;#d;#A;#z;#f;#x;static unsafeExposeInternals(t){return{starts:t.#y,ttls:t.#d,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#a,prev:t.#u,get head(){return t.#o},get tail(){return t.#h},free:t.#m,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#M(e,i,s,n),moveToTail:e=>t.#W(e),indexes:e=>t.#F(e),rindexes:e=>t.#T(e),isStale:e=>t.#g(e)}}get max(){return this.#l}get maxSize(){return this.#c}get calculatedSize(){return this.#_}get size(){return this.#n}get fetchMethod(){return this.#D}get memoMethod(){return this.#L}get dispose(){return this.#p}get onInsert(){return this.#v}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,onInsert:_,disposeAfter:f,noDisposeOnSet:c,noUpdateTTL:u,maxSize:A=0,maxEntrySize:d=0,sizeCalculation:m,fetchMethod:l,memoMethod:w,noDeleteOnFetchRejection:b,noDeleteOnStaleGet:p,allowStaleOnFetchRejection:S,allowStaleOnFetchAbort:z,ignoreFetchAbort:F,perf:v}=t;if(v!==void 0&&typeof v?.now!="function")throw new TypeError("perf option must have a now() method if specified");if(this.#S=v??M,e!==0&&!y(e))throw new TypeError("max option must be a nonnegative integer");let T=e?I(e):Array;if(!T)throw new Error("invalid max value: "+e);if(this.#l=e,this.#c=A,this.maxEntrySize=d||this.#c,this.sizeCalculation=m,this.sizeCalculation){if(!this.#c&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#L=w,l!==void 0&&typeof l!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#D=l,this.#z=!!l,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#a=new T(e),this.#u=new T(e),this.#o=0,this.#h=0,this.#m=W.create(e),this.#n=0,this.#_=0,typeof g=="function"&&(this.#p=g),typeof _=="function"&&(this.#v=_),typeof f=="function"?(this.#w=f,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#A=!!this.#p,this.#x=!!this.#v,this.#f=!!this.#w,this.noDisposeOnSet=!!c,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!b,this.allowStaleOnFetchRejection=!!S,this.allowStaleOnFetchAbort=!!z,this.ignoreFetchAbort=!!F,this.maxEntrySize!==0){if(this.#c!==0&&!y(this.#c))throw new TypeError("maxSize must be a positive integer if specified");if(!y(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#V()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!p,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=y(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!y(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#G()}if(this.#l===0&&this.ttl===0&&this.#c===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#l&&!this.#c){let O="LRU_CACHE_UNBOUNDED";G(O)&&(x.add(O),U("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",O,a))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#G(){let t=new E(this.#l),e=new E(this.#l);this.#d=t,this.#y=e,this.#j=(n,h,o=this.#S.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#g(n)&&this.#O(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#C=n=>{e[n]=t[n]!==0?this.#S.now():0},this.#E=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=this.#S.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#g=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#C=()=>{};#E=()=>{};#j=()=>{};#g=()=>!1;#V(){let t=new E(this.#l);this.#_=0,this.#b=t,this.#R=e=>{this.#_-=t[e],t[e]=0},this.#N=(e,i,s,n)=>{if(this.#e(i))return 0;if(!y(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!y(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#U=(e,i,s)=>{if(t[e]=i,this.#c){let n=this.#c-t[e];for(;this.#_>n;)this.#I(!0)}this.#_+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#_)}}#R=t=>{};#U=(t,e,i)=>{};#N=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#o));)e=this.#u[e]}*#T({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#h));)e=this.#a[e]}#P(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#T())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#T()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#T())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#T()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#T({allowStale:!0}))this.#g(e)&&(this.#O(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#d&&this.#y){let h=this.#d[e],o=this.#y[e];if(h&&o){let r=h-(this.#S.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#F({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#d&&this.#y){h.ttl=this.#d[e];let o=this.#S.now()-this.#y[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=this.#S.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,_=this.#N(t,e,i.size||0,o);if(this.maxEntrySize&&_>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#O(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#m.length!==0?this.#m.pop():this.#n===this.#l?this.#I(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#a[this.#h]=f,this.#u[f]=this.#h,this.#h=f,this.#n++,this.#U(f,_,r),r&&(r.set="add"),g=!1,this.#x&&this.#v?.(e,t,"add");else{this.#W(f);let c=this.#t[f];if(e!==c){if(this.#z&&this.#e(c)){c.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:u}=c;u!==void 0&&!h&&(this.#A&&this.#p?.(u,t,"set"),this.#f&&this.#r?.push([u,t,"set"]))}else h||(this.#A&&this.#p?.(c,t,"set"),this.#f&&this.#r?.push([c,t,"set"]));if(this.#R(f),this.#U(f,_,r),this.#t[f]=e,r){r.set="replace";let u=c&&this.#e(c)?c.__staleWhileFetching:c;u!==void 0&&(r.oldValue=u)}}else r&&(r.set="update");this.#x&&this.onInsert?.(e,t,e===c?"update":"replace")}if(s!==0&&!this.#d&&this.#G(),this.#d&&(g||this.#j(f,s,n),r&&this.#E(r,f)),!h&&this.#f&&this.#r){let c=this.#r,u;for(;u=c?.shift();)this.#w?.(...u)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#I(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#f&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#I(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#z&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(s,i,"evict"),this.#f&&this.#r?.push([s,i,"evict"])),this.#R(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#m.push(e)),this.#n===1?(this.#o=this.#h=0,this.#m.length=0):this.#o=this.#a[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#g(n))s&&(s.has="stale",this.#E(s,n));else return i&&this.#C(n),s&&(s.has="hit",this.#E(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#g(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#M(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new C,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,m=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!m?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!m)return f(h.signal.reason);let b=u;return this.#t[e]===u&&(d===void 0?b.__staleWhileFetching!==void 0?this.#t[e]=b.__staleWhileFetching:this.#O(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},_=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:m}=h.signal,l=m&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=u;if(this.#t[e]===u&&(!b||p.__staleWhileFetching===void 0?this.#O(t,"fetch"):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},c=(d,m)=>{let l=this.#D?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),m),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let u=new Promise(c).then(g,_),A=Object.assign(u,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,A,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=A,A}#e(t){if(!this.#z)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof C}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:_=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:c=this.allowStaleOnFetchRejection,ignoreFetchAbort:u=this.ignoreFetchAbort,allowStaleOnFetchAbort:A=this.allowStaleOnFetchAbort,context:d,forceRefresh:m=!1,status:l,signal:w}=e;if(!this.#z)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:_,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:c,allowStaleOnFetchAbort:A,ignoreFetchAbort:u,status:l,signal:w},p=this.#s.get(t);if(p===void 0){l&&(l.fetch="miss");let S=this.#M(t,p,b,d);return S.__returned=S}else{let S=this.#t[p];if(this.#e(S)){let O=i&&S.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",O&&(l.returnedStale=!0)),O?S.__staleWhileFetching:S.__returned=S}let z=this.#g(p);if(!m&&!z)return l&&(l.fetch="hit"),this.#W(p),s&&this.#C(p),l&&this.#E(l,p),S;let F=this.#M(t,p,b,d),T=F.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=z?"stale":"refresh",T&&z&&(l.returnedStale=!0)),T?F.__staleWhileFetching:F.__returned=F}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#L;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#E(h,o),this.#g(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#O(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#W(o),s&&this.#C(o),r))}else h&&(h.get="miss")}#H(t,e){this.#u[e]=t,this.#a[t]=e}#W(t){t!==this.#h&&(t===this.#o?this.#o=this.#a[t]:this.#H(this.#u[t],this.#a[t]),this.#H(this.#h,t),this.#h=t)}delete(t){return this.#O(t,"delete")}#O(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#k(e);else{this.#R(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(n,t,e),this.#f&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#u[s];else if(s===this.#o)this.#o=this.#a[s];else{let h=this.#u[s];this.#a[h]=this.#a[s];let o=this.#a[s];this.#u[o]=this.#u[s]}this.#n--,this.#m.push(s)}}if(this.#f&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#k("delete")}#k(t){for(let e of this.#T({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#A&&this.#p?.(i,s,t),this.#f&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#d&&this.#y&&(this.#d.fill(0),this.#y.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#m.length=0,this.#_=0,this.#n=0,this.#f&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};exports.LRUCache=D;
+//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/pacote/node_modules/jackspeak/dist/commonjs/package.json b/node_modules/cacache/node_modules/lru-cache/dist/commonjs/package.json
similarity index 100%
rename from node_modules/pacote/node_modules/jackspeak/dist/commonjs/package.json
rename to node_modules/cacache/node_modules/lru-cache/dist/commonjs/package.json
diff --git a/node_modules/cacache/node_modules/lru-cache/dist/esm/index.js b/node_modules/cacache/node_modules/lru-cache/dist/esm/index.js
new file mode 100644
index 0000000000000..8fd8fc5f31507
--- /dev/null
+++ b/node_modules/cacache/node_modules/lru-cache/dist/esm/index.js
@@ -0,0 +1,1560 @@
+/**
+ * @module LRUCache
+ */
+const defaultPerf = (typeof performance === 'object' &&
+    performance &&
+    typeof performance.now === 'function') ?
+    performance
+    : Date;
+const warned = new Set();
+/* c8 ignore start */
+const PROCESS = (typeof process === 'object' && !!process ?
+    process
+    : {});
+/* c8 ignore start */
+const emitWarning = (msg, type, code, fn) => {
+    typeof PROCESS.emitWarning === 'function' ?
+        PROCESS.emitWarning(msg, type, code, fn)
+        : console.error(`[${code}] ${type}: ${msg}`);
+};
+let AC = globalThis.AbortController;
+let AS = globalThis.AbortSignal;
+/* c8 ignore start */
+if (typeof AC === 'undefined') {
+    //@ts-ignore
+    AS = class AbortSignal {
+        onabort;
+        _onabort = [];
+        reason;
+        aborted = false;
+        addEventListener(_, fn) {
+            this._onabort.push(fn);
+        }
+    };
+    //@ts-ignore
+    AC = class AbortController {
+        constructor() {
+            warnACPolyfill();
+        }
+        signal = new AS();
+        abort(reason) {
+            if (this.signal.aborted)
+                return;
+            //@ts-ignore
+            this.signal.reason = reason;
+            //@ts-ignore
+            this.signal.aborted = true;
+            //@ts-ignore
+            for (const fn of this.signal._onabort) {
+                fn(reason);
+            }
+            this.signal.onabort?.(reason);
+        }
+    };
+    let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
+    const warnACPolyfill = () => {
+        if (!printACPolyfillWarning)
+            return;
+        printACPolyfillWarning = false;
+        emitWarning('AbortController is not defined. If using lru-cache in ' +
+            'node 14, load an AbortController polyfill from the ' +
+            '`node-abort-controller` package. A minimal polyfill is ' +
+            'provided for use by LRUCache.fetch(), but it should not be ' +
+            'relied upon in other contexts (eg, passing it to other APIs that ' +
+            'use AbortController/AbortSignal might have undesirable effects). ' +
+            'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
+    };
+}
+/* c8 ignore stop */
+const shouldWarn = (code) => !warned.has(code);
+const TYPE = Symbol('type');
+const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
+/* c8 ignore start */
+// This is a little bit ridiculous, tbh.
+// The maximum array length is 2^32-1 or thereabouts on most JS impls.
+// And well before that point, you're caching the entire world, I mean,
+// that's ~32GB of just integers for the next/prev links, plus whatever
+// else to hold that many keys and values.  Just filling the memory with
+// zeroes at init time is brutal when you get that big.
+// But why not be complete?
+// Maybe in the future, these limits will have expanded.
+const getUintArray = (max) => !isPosInt(max) ? null
+    : max <= Math.pow(2, 8) ? Uint8Array
+        : max <= Math.pow(2, 16) ? Uint16Array
+            : max <= Math.pow(2, 32) ? Uint32Array
+                : max <= Number.MAX_SAFE_INTEGER ? ZeroArray
+                    : null;
+/* c8 ignore stop */
+class ZeroArray extends Array {
+    constructor(size) {
+        super(size);
+        this.fill(0);
+    }
+}
+class Stack {
+    heap;
+    length;
+    // private constructor
+    static #constructing = false;
+    static create(max) {
+        const HeapCls = getUintArray(max);
+        if (!HeapCls)
+            return [];
+        Stack.#constructing = true;
+        const s = new Stack(max, HeapCls);
+        Stack.#constructing = false;
+        return s;
+    }
+    constructor(max, HeapCls) {
+        /* c8 ignore start */
+        if (!Stack.#constructing) {
+            throw new TypeError('instantiate Stack using Stack.create(n)');
+        }
+        /* c8 ignore stop */
+        this.heap = new HeapCls(max);
+        this.length = 0;
+    }
+    push(n) {
+        this.heap[this.length++] = n;
+    }
+    pop() {
+        return this.heap[--this.length];
+    }
+}
+/**
+ * Default export, the thing you're using this module to get.
+ *
+ * The `K` and `V` types define the key and value types, respectively. The
+ * optional `FC` type defines the type of the `context` object passed to
+ * `cache.fetch()` and `cache.memo()`.
+ *
+ * Keys and values **must not** be `null` or `undefined`.
+ *
+ * All properties from the options object (with the exception of `max`,
+ * `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are
+ * added as normal public members. (The listed options are read-only getters.)
+ *
+ * Changing any of these will alter the defaults for subsequent method calls.
+ */
+export class LRUCache {
+    // options that cannot be changed without disaster
+    #max;
+    #maxSize;
+    #dispose;
+    #onInsert;
+    #disposeAfter;
+    #fetchMethod;
+    #memoMethod;
+    #perf;
+    /**
+     * {@link LRUCache.OptionsBase.perf}
+     */
+    get perf() {
+        return this.#perf;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.ttl}
+     */
+    ttl;
+    /**
+     * {@link LRUCache.OptionsBase.ttlResolution}
+     */
+    ttlResolution;
+    /**
+     * {@link LRUCache.OptionsBase.ttlAutopurge}
+     */
+    ttlAutopurge;
+    /**
+     * {@link LRUCache.OptionsBase.updateAgeOnGet}
+     */
+    updateAgeOnGet;
+    /**
+     * {@link LRUCache.OptionsBase.updateAgeOnHas}
+     */
+    updateAgeOnHas;
+    /**
+     * {@link LRUCache.OptionsBase.allowStale}
+     */
+    allowStale;
+    /**
+     * {@link LRUCache.OptionsBase.noDisposeOnSet}
+     */
+    noDisposeOnSet;
+    /**
+     * {@link LRUCache.OptionsBase.noUpdateTTL}
+     */
+    noUpdateTTL;
+    /**
+     * {@link LRUCache.OptionsBase.maxEntrySize}
+     */
+    maxEntrySize;
+    /**
+     * {@link LRUCache.OptionsBase.sizeCalculation}
+     */
+    sizeCalculation;
+    /**
+     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
+     */
+    noDeleteOnFetchRejection;
+    /**
+     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
+     */
+    noDeleteOnStaleGet;
+    /**
+     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
+     */
+    allowStaleOnFetchAbort;
+    /**
+     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
+     */
+    allowStaleOnFetchRejection;
+    /**
+     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
+     */
+    ignoreFetchAbort;
+    // computed properties
+    #size;
+    #calculatedSize;
+    #keyMap;
+    #keyList;
+    #valList;
+    #next;
+    #prev;
+    #head;
+    #tail;
+    #free;
+    #disposed;
+    #sizes;
+    #starts;
+    #ttls;
+    #hasDispose;
+    #hasFetchMethod;
+    #hasDisposeAfter;
+    #hasOnInsert;
+    /**
+     * Do not call this method unless you need to inspect the
+     * inner workings of the cache.  If anything returned by this
+     * object is modified in any way, strange breakage may occur.
+     *
+     * These fields are private for a reason!
+     *
+     * @internal
+     */
+    static unsafeExposeInternals(c) {
+        return {
+            // properties
+            starts: c.#starts,
+            ttls: c.#ttls,
+            sizes: c.#sizes,
+            keyMap: c.#keyMap,
+            keyList: c.#keyList,
+            valList: c.#valList,
+            next: c.#next,
+            prev: c.#prev,
+            get head() {
+                return c.#head;
+            },
+            get tail() {
+                return c.#tail;
+            },
+            free: c.#free,
+            // methods
+            isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
+            backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
+            moveToTail: (index) => c.#moveToTail(index),
+            indexes: (options) => c.#indexes(options),
+            rindexes: (options) => c.#rindexes(options),
+            isStale: (index) => c.#isStale(index),
+        };
+    }
+    // Protected read-only members
+    /**
+     * {@link LRUCache.OptionsBase.max} (read-only)
+     */
+    get max() {
+        return this.#max;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.maxSize} (read-only)
+     */
+    get maxSize() {
+        return this.#maxSize;
+    }
+    /**
+     * The total computed size of items in the cache (read-only)
+     */
+    get calculatedSize() {
+        return this.#calculatedSize;
+    }
+    /**
+     * The number of items stored in the cache (read-only)
+     */
+    get size() {
+        return this.#size;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
+     */
+    get fetchMethod() {
+        return this.#fetchMethod;
+    }
+    get memoMethod() {
+        return this.#memoMethod;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.dispose} (read-only)
+     */
+    get dispose() {
+        return this.#dispose;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.onInsert} (read-only)
+     */
+    get onInsert() {
+        return this.#onInsert;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
+     */
+    get disposeAfter() {
+        return this.#disposeAfter;
+    }
+    constructor(options) {
+        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, onInsert, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, perf, } = options;
+        if (perf !== undefined) {
+            if (typeof perf?.now !== 'function') {
+                throw new TypeError('perf option must have a now() method if specified');
+            }
+        }
+        this.#perf = perf ?? defaultPerf;
+        if (max !== 0 && !isPosInt(max)) {
+            throw new TypeError('max option must be a nonnegative integer');
+        }
+        const UintArray = max ? getUintArray(max) : Array;
+        if (!UintArray) {
+            throw new Error('invalid max value: ' + max);
+        }
+        this.#max = max;
+        this.#maxSize = maxSize;
+        this.maxEntrySize = maxEntrySize || this.#maxSize;
+        this.sizeCalculation = sizeCalculation;
+        if (this.sizeCalculation) {
+            if (!this.#maxSize && !this.maxEntrySize) {
+                throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
+            }
+            if (typeof this.sizeCalculation !== 'function') {
+                throw new TypeError('sizeCalculation set to non-function');
+            }
+        }
+        if (memoMethod !== undefined &&
+            typeof memoMethod !== 'function') {
+            throw new TypeError('memoMethod must be a function if defined');
+        }
+        this.#memoMethod = memoMethod;
+        if (fetchMethod !== undefined &&
+            typeof fetchMethod !== 'function') {
+            throw new TypeError('fetchMethod must be a function if specified');
+        }
+        this.#fetchMethod = fetchMethod;
+        this.#hasFetchMethod = !!fetchMethod;
+        this.#keyMap = new Map();
+        this.#keyList = new Array(max).fill(undefined);
+        this.#valList = new Array(max).fill(undefined);
+        this.#next = new UintArray(max);
+        this.#prev = new UintArray(max);
+        this.#head = 0;
+        this.#tail = 0;
+        this.#free = Stack.create(max);
+        this.#size = 0;
+        this.#calculatedSize = 0;
+        if (typeof dispose === 'function') {
+            this.#dispose = dispose;
+        }
+        if (typeof onInsert === 'function') {
+            this.#onInsert = onInsert;
+        }
+        if (typeof disposeAfter === 'function') {
+            this.#disposeAfter = disposeAfter;
+            this.#disposed = [];
+        }
+        else {
+            this.#disposeAfter = undefined;
+            this.#disposed = undefined;
+        }
+        this.#hasDispose = !!this.#dispose;
+        this.#hasOnInsert = !!this.#onInsert;
+        this.#hasDisposeAfter = !!this.#disposeAfter;
+        this.noDisposeOnSet = !!noDisposeOnSet;
+        this.noUpdateTTL = !!noUpdateTTL;
+        this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
+        this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
+        this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
+        this.ignoreFetchAbort = !!ignoreFetchAbort;
+        // NB: maxEntrySize is set to maxSize if it's set
+        if (this.maxEntrySize !== 0) {
+            if (this.#maxSize !== 0) {
+                if (!isPosInt(this.#maxSize)) {
+                    throw new TypeError('maxSize must be a positive integer if specified');
+                }
+            }
+            if (!isPosInt(this.maxEntrySize)) {
+                throw new TypeError('maxEntrySize must be a positive integer if specified');
+            }
+            this.#initializeSizeTracking();
+        }
+        this.allowStale = !!allowStale;
+        this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
+        this.updateAgeOnGet = !!updateAgeOnGet;
+        this.updateAgeOnHas = !!updateAgeOnHas;
+        this.ttlResolution =
+            isPosInt(ttlResolution) || ttlResolution === 0 ?
+                ttlResolution
+                : 1;
+        this.ttlAutopurge = !!ttlAutopurge;
+        this.ttl = ttl || 0;
+        if (this.ttl) {
+            if (!isPosInt(this.ttl)) {
+                throw new TypeError('ttl must be a positive integer if specified');
+            }
+            this.#initializeTTLTracking();
+        }
+        // do not allow completely unbounded caches
+        if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
+            throw new TypeError('At least one of max, maxSize, or ttl is required');
+        }
+        if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
+            const code = 'LRU_CACHE_UNBOUNDED';
+            if (shouldWarn(code)) {
+                warned.add(code);
+                const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
+                    'result in unbounded memory consumption.';
+                emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
+            }
+        }
+    }
+    /**
+     * Return the number of ms left in the item's TTL. If item is not in cache,
+     * returns `0`. Returns `Infinity` if item is in cache without a defined TTL.
+     */
+    getRemainingTTL(key) {
+        return this.#keyMap.has(key) ? Infinity : 0;
+    }
+    #initializeTTLTracking() {
+        const ttls = new ZeroArray(this.#max);
+        const starts = new ZeroArray(this.#max);
+        this.#ttls = ttls;
+        this.#starts = starts;
+        this.#setItemTTL = (index, ttl, start = this.#perf.now()) => {
+            starts[index] = ttl !== 0 ? start : 0;
+            ttls[index] = ttl;
+            if (ttl !== 0 && this.ttlAutopurge) {
+                const t = setTimeout(() => {
+                    if (this.#isStale(index)) {
+                        this.#delete(this.#keyList[index], 'expire');
+                    }
+                }, ttl + 1);
+                // unref() not supported on all platforms
+                /* c8 ignore start */
+                if (t.unref) {
+                    t.unref();
+                }
+                /* c8 ignore stop */
+            }
+        };
+        this.#updateItemAge = index => {
+            starts[index] = ttls[index] !== 0 ? this.#perf.now() : 0;
+        };
+        this.#statusTTL = (status, index) => {
+            if (ttls[index]) {
+                const ttl = ttls[index];
+                const start = starts[index];
+                /* c8 ignore next */
+                if (!ttl || !start)
+                    return;
+                status.ttl = ttl;
+                status.start = start;
+                status.now = cachedNow || getNow();
+                const age = status.now - start;
+                status.remainingTTL = ttl - age;
+            }
+        };
+        // debounce calls to perf.now() to 1s so we're not hitting
+        // that costly call repeatedly.
+        let cachedNow = 0;
+        const getNow = () => {
+            const n = this.#perf.now();
+            if (this.ttlResolution > 0) {
+                cachedNow = n;
+                const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
+                // not available on all platforms
+                /* c8 ignore start */
+                if (t.unref) {
+                    t.unref();
+                }
+                /* c8 ignore stop */
+            }
+            return n;
+        };
+        this.getRemainingTTL = key => {
+            const index = this.#keyMap.get(key);
+            if (index === undefined) {
+                return 0;
+            }
+            const ttl = ttls[index];
+            const start = starts[index];
+            if (!ttl || !start) {
+                return Infinity;
+            }
+            const age = (cachedNow || getNow()) - start;
+            return ttl - age;
+        };
+        this.#isStale = index => {
+            const s = starts[index];
+            const t = ttls[index];
+            return !!t && !!s && (cachedNow || getNow()) - s > t;
+        };
+    }
+    // conditionally set private methods related to TTL
+    #updateItemAge = () => { };
+    #statusTTL = () => { };
+    #setItemTTL = () => { };
+    /* c8 ignore stop */
+    #isStale = () => false;
+    #initializeSizeTracking() {
+        const sizes = new ZeroArray(this.#max);
+        this.#calculatedSize = 0;
+        this.#sizes = sizes;
+        this.#removeItemSize = index => {
+            this.#calculatedSize -= sizes[index];
+            sizes[index] = 0;
+        };
+        this.#requireSize = (k, v, size, sizeCalculation) => {
+            // provisionally accept background fetches.
+            // actual value size will be checked when they return.
+            if (this.#isBackgroundFetch(v)) {
+                return 0;
+            }
+            if (!isPosInt(size)) {
+                if (sizeCalculation) {
+                    if (typeof sizeCalculation !== 'function') {
+                        throw new TypeError('sizeCalculation must be a function');
+                    }
+                    size = sizeCalculation(v, k);
+                    if (!isPosInt(size)) {
+                        throw new TypeError('sizeCalculation return invalid (expect positive integer)');
+                    }
+                }
+                else {
+                    throw new TypeError('invalid size value (must be positive integer). ' +
+                        'When maxSize or maxEntrySize is used, sizeCalculation ' +
+                        'or size must be set.');
+                }
+            }
+            return size;
+        };
+        this.#addItemSize = (index, size, status) => {
+            sizes[index] = size;
+            if (this.#maxSize) {
+                const maxSize = this.#maxSize - sizes[index];
+                while (this.#calculatedSize > maxSize) {
+                    this.#evict(true);
+                }
+            }
+            this.#calculatedSize += sizes[index];
+            if (status) {
+                status.entrySize = size;
+                status.totalCalculatedSize = this.#calculatedSize;
+            }
+        };
+    }
+    #removeItemSize = _i => { };
+    #addItemSize = (_i, _s, _st) => { };
+    #requireSize = (_k, _v, size, sizeCalculation) => {
+        if (size || sizeCalculation) {
+            throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
+        }
+        return 0;
+    };
+    *#indexes({ allowStale = this.allowStale } = {}) {
+        if (this.#size) {
+            for (let i = this.#tail; true;) {
+                if (!this.#isValidIndex(i)) {
+                    break;
+                }
+                if (allowStale || !this.#isStale(i)) {
+                    yield i;
+                }
+                if (i === this.#head) {
+                    break;
+                }
+                else {
+                    i = this.#prev[i];
+                }
+            }
+        }
+    }
+    *#rindexes({ allowStale = this.allowStale } = {}) {
+        if (this.#size) {
+            for (let i = this.#head; true;) {
+                if (!this.#isValidIndex(i)) {
+                    break;
+                }
+                if (allowStale || !this.#isStale(i)) {
+                    yield i;
+                }
+                if (i === this.#tail) {
+                    break;
+                }
+                else {
+                    i = this.#next[i];
+                }
+            }
+        }
+    }
+    #isValidIndex(index) {
+        return (index !== undefined &&
+            this.#keyMap.get(this.#keyList[index]) === index);
+    }
+    /**
+     * Return a generator yielding `[key, value]` pairs,
+     * in order from most recently used to least recently used.
+     */
+    *entries() {
+        for (const i of this.#indexes()) {
+            if (this.#valList[i] !== undefined &&
+                this.#keyList[i] !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield [this.#keyList[i], this.#valList[i]];
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.entries}
+     *
+     * Return a generator yielding `[key, value]` pairs,
+     * in order from least recently used to most recently used.
+     */
+    *rentries() {
+        for (const i of this.#rindexes()) {
+            if (this.#valList[i] !== undefined &&
+                this.#keyList[i] !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield [this.#keyList[i], this.#valList[i]];
+            }
+        }
+    }
+    /**
+     * Return a generator yielding the keys in the cache,
+     * in order from most recently used to least recently used.
+     */
+    *keys() {
+        for (const i of this.#indexes()) {
+            const k = this.#keyList[i];
+            if (k !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield k;
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.keys}
+     *
+     * Return a generator yielding the keys in the cache,
+     * in order from least recently used to most recently used.
+     */
+    *rkeys() {
+        for (const i of this.#rindexes()) {
+            const k = this.#keyList[i];
+            if (k !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield k;
+            }
+        }
+    }
+    /**
+     * Return a generator yielding the values in the cache,
+     * in order from most recently used to least recently used.
+     */
+    *values() {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            if (v !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield this.#valList[i];
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.values}
+     *
+     * Return a generator yielding the values in the cache,
+     * in order from least recently used to most recently used.
+     */
+    *rvalues() {
+        for (const i of this.#rindexes()) {
+            const v = this.#valList[i];
+            if (v !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield this.#valList[i];
+            }
+        }
+    }
+    /**
+     * Iterating over the cache itself yields the same results as
+     * {@link LRUCache.entries}
+     */
+    [Symbol.iterator]() {
+        return this.entries();
+    }
+    /**
+     * A String value that is used in the creation of the default string
+     * description of an object. Called by the built-in method
+     * `Object.prototype.toString`.
+     */
+    [Symbol.toStringTag] = 'LRUCache';
+    /**
+     * Find a value for which the supplied fn method returns a truthy value,
+     * similar to `Array.find()`. fn is called as `fn(value, key, cache)`.
+     */
+    find(fn, getOptions = {}) {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            if (fn(value, this.#keyList[i], this)) {
+                return this.get(this.#keyList[i], getOptions);
+            }
+        }
+    }
+    /**
+     * Call the supplied function on each item in the cache, in order from most
+     * recently used to least recently used.
+     *
+     * `fn` is called as `fn(value, key, cache)`.
+     *
+     * If `thisp` is provided, function will be called in the `this`-context of
+     * the provided object, or the cache if no `thisp` object is provided.
+     *
+     * Does not update age or recenty of use, or iterate over stale values.
+     */
+    forEach(fn, thisp = this) {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            fn.call(thisp, value, this.#keyList[i], this);
+        }
+    }
+    /**
+     * The same as {@link LRUCache.forEach} but items are iterated over in
+     * reverse order.  (ie, less recently used items are iterated over first.)
+     */
+    rforEach(fn, thisp = this) {
+        for (const i of this.#rindexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            fn.call(thisp, value, this.#keyList[i], this);
+        }
+    }
+    /**
+     * Delete any stale entries. Returns true if anything was removed,
+     * false otherwise.
+     */
+    purgeStale() {
+        let deleted = false;
+        for (const i of this.#rindexes({ allowStale: true })) {
+            if (this.#isStale(i)) {
+                this.#delete(this.#keyList[i], 'expire');
+                deleted = true;
+            }
+        }
+        return deleted;
+    }
+    /**
+     * Get the extended info about a given entry, to get its value, size, and
+     * TTL info simultaneously. Returns `undefined` if the key is not present.
+     *
+     * Unlike {@link LRUCache#dump}, which is designed to be portable and survive
+     * serialization, the `start` value is always the current timestamp, and the
+     * `ttl` is a calculated remaining time to live (negative if expired).
+     *
+     * Always returns stale values, if their info is found in the cache, so be
+     * sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl})
+     * if relevant.
+     */
+    info(key) {
+        const i = this.#keyMap.get(key);
+        if (i === undefined)
+            return undefined;
+        const v = this.#valList[i];
+        /* c8 ignore start - this isn't tested for the info function,
+         * but it's the same logic as found in other places. */
+        const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+        if (value === undefined)
+            return undefined;
+        /* c8 ignore end */
+        const entry = { value };
+        if (this.#ttls && this.#starts) {
+            const ttl = this.#ttls[i];
+            const start = this.#starts[i];
+            if (ttl && start) {
+                const remain = ttl - (this.#perf.now() - start);
+                entry.ttl = remain;
+                entry.start = Date.now();
+            }
+        }
+        if (this.#sizes) {
+            entry.size = this.#sizes[i];
+        }
+        return entry;
+    }
+    /**
+     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
+     * passed to {@link LRUCache#load}.
+     *
+     * The `start` fields are calculated relative to a portable `Date.now()`
+     * timestamp, even if `performance.now()` is available.
+     *
+     * Stale entries are always included in the `dump`, even if
+     * {@link LRUCache.OptionsBase.allowStale} is false.
+     *
+     * Note: this returns an actual array, not a generator, so it can be more
+     * easily passed around.
+     */
+    dump() {
+        const arr = [];
+        for (const i of this.#indexes({ allowStale: true })) {
+            const key = this.#keyList[i];
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined || key === undefined)
+                continue;
+            const entry = { value };
+            if (this.#ttls && this.#starts) {
+                entry.ttl = this.#ttls[i];
+                // always dump the start relative to a portable timestamp
+                // it's ok for this to be a bit slow, it's a rare operation.
+                const age = this.#perf.now() - this.#starts[i];
+                entry.start = Math.floor(Date.now() - age);
+            }
+            if (this.#sizes) {
+                entry.size = this.#sizes[i];
+            }
+            arr.unshift([key, entry]);
+        }
+        return arr;
+    }
+    /**
+     * Reset the cache and load in the items in entries in the order listed.
+     *
+     * The shape of the resulting cache may be different if the same options are
+     * not used in both caches.
+     *
+     * The `start` fields are assumed to be calculated relative to a portable
+     * `Date.now()` timestamp, even if `performance.now()` is available.
+     */
+    load(arr) {
+        this.clear();
+        for (const [key, entry] of arr) {
+            if (entry.start) {
+                // entry.start is a portable timestamp, but we may be using
+                // node's performance.now(), so calculate the offset, so that
+                // we get the intended remaining TTL, no matter how long it's
+                // been on ice.
+                //
+                // it's ok for this to be a bit slow, it's a rare operation.
+                const age = Date.now() - entry.start;
+                entry.start = this.#perf.now() - age;
+            }
+            this.set(key, entry.value, entry);
+        }
+    }
+    /**
+     * Add a value to the cache.
+     *
+     * Note: if `undefined` is specified as a value, this is an alias for
+     * {@link LRUCache#delete}
+     *
+     * Fields on the {@link LRUCache.SetOptions} options param will override
+     * their corresponding values in the constructor options for the scope
+     * of this single `set()` operation.
+     *
+     * If `start` is provided, then that will set the effective start
+     * time for the TTL calculation. Note that this must be a previous
+     * value of `performance.now()` if supported, or a previous value of
+     * `Date.now()` if not.
+     *
+     * Options object may also include `size`, which will prevent
+     * calling the `sizeCalculation` function and just use the specified
+     * number if it is a positive integer, and `noDisposeOnSet` which
+     * will prevent calling a `dispose` function in the case of
+     * overwrites.
+     *
+     * If the `size` (or return value of `sizeCalculation`) for a given
+     * entry is greater than `maxEntrySize`, then the item will not be
+     * added to the cache.
+     *
+     * Will update the recency of the entry.
+     *
+     * If the value is `undefined`, then this is an alias for
+     * `cache.delete(key)`. `undefined` is never stored in the cache.
+     */
+    set(k, v, setOptions = {}) {
+        if (v === undefined) {
+            this.delete(k);
+            return this;
+        }
+        const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
+        let { noUpdateTTL = this.noUpdateTTL } = setOptions;
+        const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
+        // if the item doesn't fit, don't do anything
+        // NB: maxEntrySize set to maxSize by default
+        if (this.maxEntrySize && size > this.maxEntrySize) {
+            if (status) {
+                status.set = 'miss';
+                status.maxEntrySizeExceeded = true;
+            }
+            // have to delete, in case something is there already.
+            this.#delete(k, 'set');
+            return this;
+        }
+        let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
+        if (index === undefined) {
+            // addition
+            index = (this.#size === 0 ? this.#tail
+                : this.#free.length !== 0 ? this.#free.pop()
+                    : this.#size === this.#max ? this.#evict(false)
+                        : this.#size);
+            this.#keyList[index] = k;
+            this.#valList[index] = v;
+            this.#keyMap.set(k, index);
+            this.#next[this.#tail] = index;
+            this.#prev[index] = this.#tail;
+            this.#tail = index;
+            this.#size++;
+            this.#addItemSize(index, size, status);
+            if (status)
+                status.set = 'add';
+            noUpdateTTL = false;
+            if (this.#hasOnInsert) {
+                this.#onInsert?.(v, k, 'add');
+            }
+        }
+        else {
+            // update
+            this.#moveToTail(index);
+            const oldVal = this.#valList[index];
+            if (v !== oldVal) {
+                if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
+                    oldVal.__abortController.abort(new Error('replaced'));
+                    const { __staleWhileFetching: s } = oldVal;
+                    if (s !== undefined && !noDisposeOnSet) {
+                        if (this.#hasDispose) {
+                            this.#dispose?.(s, k, 'set');
+                        }
+                        if (this.#hasDisposeAfter) {
+                            this.#disposed?.push([s, k, 'set']);
+                        }
+                    }
+                }
+                else if (!noDisposeOnSet) {
+                    if (this.#hasDispose) {
+                        this.#dispose?.(oldVal, k, 'set');
+                    }
+                    if (this.#hasDisposeAfter) {
+                        this.#disposed?.push([oldVal, k, 'set']);
+                    }
+                }
+                this.#removeItemSize(index);
+                this.#addItemSize(index, size, status);
+                this.#valList[index] = v;
+                if (status) {
+                    status.set = 'replace';
+                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal) ?
+                        oldVal.__staleWhileFetching
+                        : oldVal;
+                    if (oldValue !== undefined)
+                        status.oldValue = oldValue;
+                }
+            }
+            else if (status) {
+                status.set = 'update';
+            }
+            if (this.#hasOnInsert) {
+                this.onInsert?.(v, k, v === oldVal ? 'update' : 'replace');
+            }
+        }
+        if (ttl !== 0 && !this.#ttls) {
+            this.#initializeTTLTracking();
+        }
+        if (this.#ttls) {
+            if (!noUpdateTTL) {
+                this.#setItemTTL(index, ttl, start);
+            }
+            if (status)
+                this.#statusTTL(status, index);
+        }
+        if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+        return this;
+    }
+    /**
+     * Evict the least recently used item, returning its value or
+     * `undefined` if cache is empty.
+     */
+    pop() {
+        try {
+            while (this.#size) {
+                const val = this.#valList[this.#head];
+                this.#evict(true);
+                if (this.#isBackgroundFetch(val)) {
+                    if (val.__staleWhileFetching) {
+                        return val.__staleWhileFetching;
+                    }
+                }
+                else if (val !== undefined) {
+                    return val;
+                }
+            }
+        }
+        finally {
+            if (this.#hasDisposeAfter && this.#disposed) {
+                const dt = this.#disposed;
+                let task;
+                while ((task = dt?.shift())) {
+                    this.#disposeAfter?.(...task);
+                }
+            }
+        }
+    }
+    #evict(free) {
+        const head = this.#head;
+        const k = this.#keyList[head];
+        const v = this.#valList[head];
+        if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
+            v.__abortController.abort(new Error('evicted'));
+        }
+        else if (this.#hasDispose || this.#hasDisposeAfter) {
+            if (this.#hasDispose) {
+                this.#dispose?.(v, k, 'evict');
+            }
+            if (this.#hasDisposeAfter) {
+                this.#disposed?.push([v, k, 'evict']);
+            }
+        }
+        this.#removeItemSize(head);
+        // if we aren't about to use the index, then null these out
+        if (free) {
+            this.#keyList[head] = undefined;
+            this.#valList[head] = undefined;
+            this.#free.push(head);
+        }
+        if (this.#size === 1) {
+            this.#head = this.#tail = 0;
+            this.#free.length = 0;
+        }
+        else {
+            this.#head = this.#next[head];
+        }
+        this.#keyMap.delete(k);
+        this.#size--;
+        return head;
+    }
+    /**
+     * Check if a key is in the cache, without updating the recency of use.
+     * Will return false if the item is stale, even though it is technically
+     * in the cache.
+     *
+     * Check if a key is in the cache, without updating the recency of
+     * use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set
+     * to `true` in either the options or the constructor.
+     *
+     * Will return `false` if the item is stale, even though it is technically in
+     * the cache. The difference can be determined (if it matters) by using a
+     * `status` argument, and inspecting the `has` field.
+     *
+     * Will not update item age unless
+     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
+     */
+    has(k, hasOptions = {}) {
+        const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
+        const index = this.#keyMap.get(k);
+        if (index !== undefined) {
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v) &&
+                v.__staleWhileFetching === undefined) {
+                return false;
+            }
+            if (!this.#isStale(index)) {
+                if (updateAgeOnHas) {
+                    this.#updateItemAge(index);
+                }
+                if (status) {
+                    status.has = 'hit';
+                    this.#statusTTL(status, index);
+                }
+                return true;
+            }
+            else if (status) {
+                status.has = 'stale';
+                this.#statusTTL(status, index);
+            }
+        }
+        else if (status) {
+            status.has = 'miss';
+        }
+        return false;
+    }
+    /**
+     * Like {@link LRUCache#get} but doesn't update recency or delete stale
+     * items.
+     *
+     * Returns `undefined` if the item is stale, unless
+     * {@link LRUCache.OptionsBase.allowStale} is set.
+     */
+    peek(k, peekOptions = {}) {
+        const { allowStale = this.allowStale } = peekOptions;
+        const index = this.#keyMap.get(k);
+        if (index === undefined ||
+            (!allowStale && this.#isStale(index))) {
+            return;
+        }
+        const v = this.#valList[index];
+        // either stale and allowed, or forcing a refresh of non-stale value
+        return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+    }
+    #backgroundFetch(k, index, options, context) {
+        const v = index === undefined ? undefined : this.#valList[index];
+        if (this.#isBackgroundFetch(v)) {
+            return v;
+        }
+        const ac = new AC();
+        const { signal } = options;
+        // when/if our AC signals, then stop listening to theirs.
+        signal?.addEventListener('abort', () => ac.abort(signal.reason), {
+            signal: ac.signal,
+        });
+        const fetchOpts = {
+            signal: ac.signal,
+            options,
+            context,
+        };
+        const cb = (v, updateCache = false) => {
+            const { aborted } = ac.signal;
+            const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
+            if (options.status) {
+                if (aborted && !updateCache) {
+                    options.status.fetchAborted = true;
+                    options.status.fetchError = ac.signal.reason;
+                    if (ignoreAbort)
+                        options.status.fetchAbortIgnored = true;
+                }
+                else {
+                    options.status.fetchResolved = true;
+                }
+            }
+            if (aborted && !ignoreAbort && !updateCache) {
+                return fetchFail(ac.signal.reason);
+            }
+            // either we didn't abort, and are still here, or we did, and ignored
+            const bf = p;
+            if (this.#valList[index] === p) {
+                if (v === undefined) {
+                    if (bf.__staleWhileFetching !== undefined) {
+                        this.#valList[index] = bf.__staleWhileFetching;
+                    }
+                    else {
+                        this.#delete(k, 'fetch');
+                    }
+                }
+                else {
+                    if (options.status)
+                        options.status.fetchUpdated = true;
+                    this.set(k, v, fetchOpts.options);
+                }
+            }
+            return v;
+        };
+        const eb = (er) => {
+            if (options.status) {
+                options.status.fetchRejected = true;
+                options.status.fetchError = er;
+            }
+            return fetchFail(er);
+        };
+        const fetchFail = (er) => {
+            const { aborted } = ac.signal;
+            const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
+            const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
+            const noDelete = allowStale || options.noDeleteOnFetchRejection;
+            const bf = p;
+            if (this.#valList[index] === p) {
+                // if we allow stale on fetch rejections, then we need to ensure that
+                // the stale value is not removed from the cache when the fetch fails.
+                const del = !noDelete || bf.__staleWhileFetching === undefined;
+                if (del) {
+                    this.#delete(k, 'fetch');
+                }
+                else if (!allowStaleAborted) {
+                    // still replace the *promise* with the stale value,
+                    // since we are done with the promise at this point.
+                    // leave it untouched if we're still waiting for an
+                    // aborted background fetch that hasn't yet returned.
+                    this.#valList[index] = bf.__staleWhileFetching;
+                }
+            }
+            if (allowStale) {
+                if (options.status && bf.__staleWhileFetching !== undefined) {
+                    options.status.returnedStale = true;
+                }
+                return bf.__staleWhileFetching;
+            }
+            else if (bf.__returned === bf) {
+                throw er;
+            }
+        };
+        const pcall = (res, rej) => {
+            const fmp = this.#fetchMethod?.(k, v, fetchOpts);
+            if (fmp && fmp instanceof Promise) {
+                fmp.then(v => res(v === undefined ? undefined : v), rej);
+            }
+            // ignored, we go until we finish, regardless.
+            // defer check until we are actually aborting,
+            // so fetchMethod can override.
+            ac.signal.addEventListener('abort', () => {
+                if (!options.ignoreFetchAbort ||
+                    options.allowStaleOnFetchAbort) {
+                    res(undefined);
+                    // when it eventually resolves, update the cache.
+                    if (options.allowStaleOnFetchAbort) {
+                        res = v => cb(v, true);
+                    }
+                }
+            });
+        };
+        if (options.status)
+            options.status.fetchDispatched = true;
+        const p = new Promise(pcall).then(cb, eb);
+        const bf = Object.assign(p, {
+            __abortController: ac,
+            __staleWhileFetching: v,
+            __returned: undefined,
+        });
+        if (index === undefined) {
+            // internal, don't expose status.
+            this.set(k, bf, { ...fetchOpts.options, status: undefined });
+            index = this.#keyMap.get(k);
+        }
+        else {
+            this.#valList[index] = bf;
+        }
+        return bf;
+    }
+    #isBackgroundFetch(p) {
+        if (!this.#hasFetchMethod)
+            return false;
+        const b = p;
+        return (!!b &&
+            b instanceof Promise &&
+            b.hasOwnProperty('__staleWhileFetching') &&
+            b.__abortController instanceof AC);
+    }
+    async fetch(k, fetchOptions = {}) {
+        const { 
+        // get options
+        allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, 
+        // set options
+        ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, 
+        // fetch exclusive options
+        noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
+        if (!this.#hasFetchMethod) {
+            if (status)
+                status.fetch = 'get';
+            return this.get(k, {
+                allowStale,
+                updateAgeOnGet,
+                noDeleteOnStaleGet,
+                status,
+            });
+        }
+        const options = {
+            allowStale,
+            updateAgeOnGet,
+            noDeleteOnStaleGet,
+            ttl,
+            noDisposeOnSet,
+            size,
+            sizeCalculation,
+            noUpdateTTL,
+            noDeleteOnFetchRejection,
+            allowStaleOnFetchRejection,
+            allowStaleOnFetchAbort,
+            ignoreFetchAbort,
+            status,
+            signal,
+        };
+        let index = this.#keyMap.get(k);
+        if (index === undefined) {
+            if (status)
+                status.fetch = 'miss';
+            const p = this.#backgroundFetch(k, index, options, context);
+            return (p.__returned = p);
+        }
+        else {
+            // in cache, maybe already fetching
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v)) {
+                const stale = allowStale && v.__staleWhileFetching !== undefined;
+                if (status) {
+                    status.fetch = 'inflight';
+                    if (stale)
+                        status.returnedStale = true;
+                }
+                return stale ? v.__staleWhileFetching : (v.__returned = v);
+            }
+            // if we force a refresh, that means do NOT serve the cached value,
+            // unless we are already in the process of refreshing the cache.
+            const isStale = this.#isStale(index);
+            if (!forceRefresh && !isStale) {
+                if (status)
+                    status.fetch = 'hit';
+                this.#moveToTail(index);
+                if (updateAgeOnGet) {
+                    this.#updateItemAge(index);
+                }
+                if (status)
+                    this.#statusTTL(status, index);
+                return v;
+            }
+            // ok, it is stale or a forced refresh, and not already fetching.
+            // refresh the cache.
+            const p = this.#backgroundFetch(k, index, options, context);
+            const hasStale = p.__staleWhileFetching !== undefined;
+            const staleVal = hasStale && allowStale;
+            if (status) {
+                status.fetch = isStale ? 'stale' : 'refresh';
+                if (staleVal && isStale)
+                    status.returnedStale = true;
+            }
+            return staleVal ? p.__staleWhileFetching : (p.__returned = p);
+        }
+    }
+    async forceFetch(k, fetchOptions = {}) {
+        const v = await this.fetch(k, fetchOptions);
+        if (v === undefined)
+            throw new Error('fetch() returned undefined');
+        return v;
+    }
+    memo(k, memoOptions = {}) {
+        const memoMethod = this.#memoMethod;
+        if (!memoMethod) {
+            throw new Error('no memoMethod provided to constructor');
+        }
+        const { context, forceRefresh, ...options } = memoOptions;
+        const v = this.get(k, options);
+        if (!forceRefresh && v !== undefined)
+            return v;
+        const vv = memoMethod(k, v, {
+            options,
+            context,
+        });
+        this.set(k, vv, options);
+        return vv;
+    }
+    /**
+     * Return a value from the cache. Will update the recency of the cache
+     * entry found.
+     *
+     * If the key is not found, get() will return `undefined`.
+     */
+    get(k, getOptions = {}) {
+        const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
+        const index = this.#keyMap.get(k);
+        if (index !== undefined) {
+            const value = this.#valList[index];
+            const fetching = this.#isBackgroundFetch(value);
+            if (status)
+                this.#statusTTL(status, index);
+            if (this.#isStale(index)) {
+                if (status)
+                    status.get = 'stale';
+                // delete only if not an in-flight background fetch
+                if (!fetching) {
+                    if (!noDeleteOnStaleGet) {
+                        this.#delete(k, 'expire');
+                    }
+                    if (status && allowStale)
+                        status.returnedStale = true;
+                    return allowStale ? value : undefined;
+                }
+                else {
+                    if (status &&
+                        allowStale &&
+                        value.__staleWhileFetching !== undefined) {
+                        status.returnedStale = true;
+                    }
+                    return allowStale ? value.__staleWhileFetching : undefined;
+                }
+            }
+            else {
+                if (status)
+                    status.get = 'hit';
+                // if we're currently fetching it, we don't actually have it yet
+                // it's not stale, which means this isn't a staleWhileRefetching.
+                // If it's not stale, and fetching, AND has a __staleWhileFetching
+                // value, then that means the user fetched with {forceRefresh:true},
+                // so it's safe to return that value.
+                if (fetching) {
+                    return value.__staleWhileFetching;
+                }
+                this.#moveToTail(index);
+                if (updateAgeOnGet) {
+                    this.#updateItemAge(index);
+                }
+                return value;
+            }
+        }
+        else if (status) {
+            status.get = 'miss';
+        }
+    }
+    #connect(p, n) {
+        this.#prev[n] = p;
+        this.#next[p] = n;
+    }
+    #moveToTail(index) {
+        // if tail already, nothing to do
+        // if head, move head to next[index]
+        // else
+        //   move next[prev[index]] to next[index] (head has no prev)
+        //   move prev[next[index]] to prev[index]
+        // prev[index] = tail
+        // next[tail] = index
+        // tail = index
+        if (index !== this.#tail) {
+            if (index === this.#head) {
+                this.#head = this.#next[index];
+            }
+            else {
+                this.#connect(this.#prev[index], this.#next[index]);
+            }
+            this.#connect(this.#tail, index);
+            this.#tail = index;
+        }
+    }
+    /**
+     * Deletes a key out of the cache.
+     *
+     * Returns true if the key was deleted, false otherwise.
+     */
+    delete(k) {
+        return this.#delete(k, 'delete');
+    }
+    #delete(k, reason) {
+        let deleted = false;
+        if (this.#size !== 0) {
+            const index = this.#keyMap.get(k);
+            if (index !== undefined) {
+                deleted = true;
+                if (this.#size === 1) {
+                    this.#clear(reason);
+                }
+                else {
+                    this.#removeItemSize(index);
+                    const v = this.#valList[index];
+                    if (this.#isBackgroundFetch(v)) {
+                        v.__abortController.abort(new Error('deleted'));
+                    }
+                    else if (this.#hasDispose || this.#hasDisposeAfter) {
+                        if (this.#hasDispose) {
+                            this.#dispose?.(v, k, reason);
+                        }
+                        if (this.#hasDisposeAfter) {
+                            this.#disposed?.push([v, k, reason]);
+                        }
+                    }
+                    this.#keyMap.delete(k);
+                    this.#keyList[index] = undefined;
+                    this.#valList[index] = undefined;
+                    if (index === this.#tail) {
+                        this.#tail = this.#prev[index];
+                    }
+                    else if (index === this.#head) {
+                        this.#head = this.#next[index];
+                    }
+                    else {
+                        const pi = this.#prev[index];
+                        this.#next[pi] = this.#next[index];
+                        const ni = this.#next[index];
+                        this.#prev[ni] = this.#prev[index];
+                    }
+                    this.#size--;
+                    this.#free.push(index);
+                }
+            }
+        }
+        if (this.#hasDisposeAfter && this.#disposed?.length) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+        return deleted;
+    }
+    /**
+     * Clear the cache entirely, throwing away all values.
+     */
+    clear() {
+        return this.#clear('delete');
+    }
+    #clear(reason) {
+        for (const index of this.#rindexes({ allowStale: true })) {
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v)) {
+                v.__abortController.abort(new Error('deleted'));
+            }
+            else {
+                const k = this.#keyList[index];
+                if (this.#hasDispose) {
+                    this.#dispose?.(v, k, reason);
+                }
+                if (this.#hasDisposeAfter) {
+                    this.#disposed?.push([v, k, reason]);
+                }
+            }
+        }
+        this.#keyMap.clear();
+        this.#valList.fill(undefined);
+        this.#keyList.fill(undefined);
+        if (this.#ttls && this.#starts) {
+            this.#ttls.fill(0);
+            this.#starts.fill(0);
+        }
+        if (this.#sizes) {
+            this.#sizes.fill(0);
+        }
+        this.#head = 0;
+        this.#tail = 0;
+        this.#free.length = 0;
+        this.#calculatedSize = 0;
+        this.#size = 0;
+        if (this.#hasDisposeAfter && this.#disposed) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+    }
+}
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/cacache/node_modules/lru-cache/dist/esm/index.min.js b/node_modules/cacache/node_modules/lru-cache/dist/esm/index.min.js
new file mode 100644
index 0000000000000..07dd8fc3c59d8
--- /dev/null
+++ b/node_modules/cacache/node_modules/lru-cache/dist/esm/index.min.js
@@ -0,0 +1,2 @@
+var M=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,x=new Set,R=typeof process=="object"&&process?process:{},I=(a,t,e,i)=>{typeof R.emitWarning=="function"?R.emitWarning(a,t,e,i):console.error(`[${e}] ${t}: ${a}`)},C=globalThis.AbortController,D=globalThis.AbortSignal;if(typeof C>"u"){D=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},C=class{constructor(){t()}signal=new D;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let a=R.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{a&&(a=!1,I("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var G=a=>!x.has(a),H=Symbol("type"),y=a=>a&&a===Math.floor(a)&&a>0&&isFinite(a),U=a=>y(a)?a<=Math.pow(2,8)?Uint8Array:a<=Math.pow(2,16)?Uint16Array:a<=Math.pow(2,32)?Uint32Array:a<=Number.MAX_SAFE_INTEGER?O:null:null,O=class extends Array{constructor(t){super(t),this.fill(0)}},W=class a{heap;length;static#l=!1;static create(t){let e=U(t);if(!e)return[];a.#l=!0;let i=new a(t,e);return a.#l=!1,i}constructor(t,e){if(!a.#l)throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},L=class a{#l;#c;#p;#v;#w;#D;#L;#S;get perf(){return this.#S}ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#_;#s;#i;#t;#a;#u;#o;#h;#m;#r;#b;#y;#d;#A;#z;#f;#x;static unsafeExposeInternals(t){return{starts:t.#y,ttls:t.#d,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#a,prev:t.#u,get head(){return t.#o},get tail(){return t.#h},free:t.#m,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#M(e,i,s,n),moveToTail:e=>t.#W(e),indexes:e=>t.#F(e),rindexes:e=>t.#T(e),isStale:e=>t.#g(e)}}get max(){return this.#l}get maxSize(){return this.#c}get calculatedSize(){return this.#_}get size(){return this.#n}get fetchMethod(){return this.#D}get memoMethod(){return this.#L}get dispose(){return this.#p}get onInsert(){return this.#v}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,onInsert:_,disposeAfter:f,noDisposeOnSet:c,noUpdateTTL:u,maxSize:A=0,maxEntrySize:d=0,sizeCalculation:m,fetchMethod:l,memoMethod:w,noDeleteOnFetchRejection:b,noDeleteOnStaleGet:p,allowStaleOnFetchRejection:S,allowStaleOnFetchAbort:z,ignoreFetchAbort:F,perf:v}=t;if(v!==void 0&&typeof v?.now!="function")throw new TypeError("perf option must have a now() method if specified");if(this.#S=v??M,e!==0&&!y(e))throw new TypeError("max option must be a nonnegative integer");let T=e?U(e):Array;if(!T)throw new Error("invalid max value: "+e);if(this.#l=e,this.#c=A,this.maxEntrySize=d||this.#c,this.sizeCalculation=m,this.sizeCalculation){if(!this.#c&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#L=w,l!==void 0&&typeof l!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#D=l,this.#z=!!l,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#a=new T(e),this.#u=new T(e),this.#o=0,this.#h=0,this.#m=W.create(e),this.#n=0,this.#_=0,typeof g=="function"&&(this.#p=g),typeof _=="function"&&(this.#v=_),typeof f=="function"?(this.#w=f,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#A=!!this.#p,this.#x=!!this.#v,this.#f=!!this.#w,this.noDisposeOnSet=!!c,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!b,this.allowStaleOnFetchRejection=!!S,this.allowStaleOnFetchAbort=!!z,this.ignoreFetchAbort=!!F,this.maxEntrySize!==0){if(this.#c!==0&&!y(this.#c))throw new TypeError("maxSize must be a positive integer if specified");if(!y(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#V()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!p,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=y(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!y(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#G()}if(this.#l===0&&this.ttl===0&&this.#c===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#l&&!this.#c){let E="LRU_CACHE_UNBOUNDED";G(E)&&(x.add(E),I("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",E,a))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#G(){let t=new O(this.#l),e=new O(this.#l);this.#d=t,this.#y=e,this.#j=(n,h,o=this.#S.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#g(n)&&this.#E(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#C=n=>{e[n]=t[n]!==0?this.#S.now():0},this.#O=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=this.#S.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#g=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#C=()=>{};#O=()=>{};#j=()=>{};#g=()=>!1;#V(){let t=new O(this.#l);this.#_=0,this.#b=t,this.#R=e=>{this.#_-=t[e],t[e]=0},this.#N=(e,i,s,n)=>{if(this.#e(i))return 0;if(!y(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!y(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#I=(e,i,s)=>{if(t[e]=i,this.#c){let n=this.#c-t[e];for(;this.#_>n;)this.#U(!0)}this.#_+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#_)}}#R=t=>{};#I=(t,e,i)=>{};#N=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#o));)e=this.#u[e]}*#T({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#h));)e=this.#a[e]}#P(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#T())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#T()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#T())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#T()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#T({allowStale:!0}))this.#g(e)&&(this.#E(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#d&&this.#y){let h=this.#d[e],o=this.#y[e];if(h&&o){let r=h-(this.#S.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#F({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#d&&this.#y){h.ttl=this.#d[e];let o=this.#S.now()-this.#y[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=this.#S.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,_=this.#N(t,e,i.size||0,o);if(this.maxEntrySize&&_>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#E(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#m.length!==0?this.#m.pop():this.#n===this.#l?this.#U(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#a[this.#h]=f,this.#u[f]=this.#h,this.#h=f,this.#n++,this.#I(f,_,r),r&&(r.set="add"),g=!1,this.#x&&this.#v?.(e,t,"add");else{this.#W(f);let c=this.#t[f];if(e!==c){if(this.#z&&this.#e(c)){c.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:u}=c;u!==void 0&&!h&&(this.#A&&this.#p?.(u,t,"set"),this.#f&&this.#r?.push([u,t,"set"]))}else h||(this.#A&&this.#p?.(c,t,"set"),this.#f&&this.#r?.push([c,t,"set"]));if(this.#R(f),this.#I(f,_,r),this.#t[f]=e,r){r.set="replace";let u=c&&this.#e(c)?c.__staleWhileFetching:c;u!==void 0&&(r.oldValue=u)}}else r&&(r.set="update");this.#x&&this.onInsert?.(e,t,e===c?"update":"replace")}if(s!==0&&!this.#d&&this.#G(),this.#d&&(g||this.#j(f,s,n),r&&this.#O(r,f)),!h&&this.#f&&this.#r){let c=this.#r,u;for(;u=c?.shift();)this.#w?.(...u)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#U(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#f&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#U(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#z&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(s,i,"evict"),this.#f&&this.#r?.push([s,i,"evict"])),this.#R(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#m.push(e)),this.#n===1?(this.#o=this.#h=0,this.#m.length=0):this.#o=this.#a[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#g(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#C(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#g(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#M(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new C,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,m=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!m?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!m)return f(h.signal.reason);let b=u;return this.#t[e]===u&&(d===void 0?b.__staleWhileFetching!==void 0?this.#t[e]=b.__staleWhileFetching:this.#E(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},_=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:m}=h.signal,l=m&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=u;if(this.#t[e]===u&&(!b||p.__staleWhileFetching===void 0?this.#E(t,"fetch"):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},c=(d,m)=>{let l=this.#D?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),m),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let u=new Promise(c).then(g,_),A=Object.assign(u,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,A,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=A,A}#e(t){if(!this.#z)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof C}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:_=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:c=this.allowStaleOnFetchRejection,ignoreFetchAbort:u=this.ignoreFetchAbort,allowStaleOnFetchAbort:A=this.allowStaleOnFetchAbort,context:d,forceRefresh:m=!1,status:l,signal:w}=e;if(!this.#z)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:_,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:c,allowStaleOnFetchAbort:A,ignoreFetchAbort:u,status:l,signal:w},p=this.#s.get(t);if(p===void 0){l&&(l.fetch="miss");let S=this.#M(t,p,b,d);return S.__returned=S}else{let S=this.#t[p];if(this.#e(S)){let E=i&&S.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",E&&(l.returnedStale=!0)),E?S.__staleWhileFetching:S.__returned=S}let z=this.#g(p);if(!m&&!z)return l&&(l.fetch="hit"),this.#W(p),s&&this.#C(p),l&&this.#O(l,p),S;let F=this.#M(t,p,b,d),T=F.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=z?"stale":"refresh",T&&z&&(l.returnedStale=!0)),T?F.__staleWhileFetching:F.__returned=F}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#L;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#O(h,o),this.#g(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#E(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#W(o),s&&this.#C(o),r))}else h&&(h.get="miss")}#H(t,e){this.#u[e]=t,this.#a[t]=e}#W(t){t!==this.#h&&(t===this.#o?this.#o=this.#a[t]:this.#H(this.#u[t],this.#a[t]),this.#H(this.#h,t),this.#h=t)}delete(t){return this.#E(t,"delete")}#E(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#k(e);else{this.#R(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(n,t,e),this.#f&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#u[s];else if(s===this.#o)this.#o=this.#a[s];else{let h=this.#u[s];this.#a[h]=this.#a[s];let o=this.#a[s];this.#u[o]=this.#u[s]}this.#n--,this.#m.push(s)}}if(this.#f&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#k("delete")}#k(t){for(let e of this.#T({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#A&&this.#p?.(i,s,t),this.#f&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#d&&this.#y&&(this.#d.fill(0),this.#y.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#m.length=0,this.#_=0,this.#n=0,this.#f&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};export{L as LRUCache};
+//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/pacote/node_modules/jackspeak/dist/esm/package.json b/node_modules/cacache/node_modules/lru-cache/dist/esm/package.json
similarity index 100%
rename from node_modules/pacote/node_modules/jackspeak/dist/esm/package.json
rename to node_modules/cacache/node_modules/lru-cache/dist/esm/package.json
diff --git a/node_modules/cacache/node_modules/lru-cache/package.json b/node_modules/cacache/node_modules/lru-cache/package.json
new file mode 100644
index 0000000000000..4953bdf4a7a35
--- /dev/null
+++ b/node_modules/cacache/node_modules/lru-cache/package.json
@@ -0,0 +1,113 @@
+{
+  "name": "lru-cache",
+  "description": "A cache object that deletes the least-recently-used items.",
+  "version": "11.2.1",
+  "author": "Isaac Z. Schlueter ",
+  "keywords": [
+    "mru",
+    "lru",
+    "cache"
+  ],
+  "sideEffects": false,
+  "scripts": {
+    "build": "npm run prepare",
+    "prepare": "tshy && bash fixup.sh",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "test": "tap",
+    "snap": "tap",
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "format": "prettier --write .",
+    "typedoc": "typedoc --tsconfig ./.tshy/esm.json ./src/*.ts",
+    "benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh",
+    "prebenchmark": "npm run prepare",
+    "benchmark": "make -C benchmark",
+    "preprofile": "npm run prepare",
+    "profile": "make -C benchmark profile"
+  },
+  "main": "./dist/commonjs/index.js",
+  "types": "./dist/commonjs/index.d.ts",
+  "tshy": {
+    "exports": {
+      ".": "./src/index.ts",
+      "./min": {
+        "import": {
+          "types": "./dist/esm/index.d.ts",
+          "default": "./dist/esm/index.min.js"
+        },
+        "require": {
+          "types": "./dist/commonjs/index.d.ts",
+          "default": "./dist/commonjs/index.min.js"
+        }
+      }
+    }
+  },
+  "repository": {
+    "type": "git",
+    "url": "git://github.com/isaacs/node-lru-cache.git"
+  },
+  "devDependencies": {
+    "@types/node": "^24.3.0",
+    "benchmark": "^2.1.4",
+    "esbuild": "^0.25.9",
+    "marked": "^4.2.12",
+    "mkdirp": "^3.0.1",
+    "prettier": "^3.6.2",
+    "tap": "^21.1.0",
+    "tshy": "^3.0.2",
+    "typedoc": "^0.28.12"
+  },
+  "license": "ISC",
+  "files": [
+    "dist"
+  ],
+  "engines": {
+    "node": "20 || >=22"
+  },
+  "prettier": {
+    "experimentalTernaries": true,
+    "semi": false,
+    "printWidth": 70,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "tap": {
+    "node-arg": [
+      "--expose-gc"
+    ],
+    "plugin": [
+      "@tapjs/clock"
+    ]
+  },
+  "exports": {
+    ".": {
+      "import": {
+        "types": "./dist/esm/index.d.ts",
+        "default": "./dist/esm/index.js"
+      },
+      "require": {
+        "types": "./dist/commonjs/index.d.ts",
+        "default": "./dist/commonjs/index.js"
+      }
+    },
+    "./min": {
+      "import": {
+        "types": "./dist/esm/index.d.ts",
+        "default": "./dist/esm/index.min.js"
+      },
+      "require": {
+        "types": "./dist/commonjs/index.d.ts",
+        "default": "./dist/commonjs/index.min.js"
+      }
+    }
+  },
+  "type": "module",
+  "module": "./dist/esm/index.js"
+}
diff --git a/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/LICENSE b/node_modules/cacache/node_modules/minimatch/LICENSE
similarity index 100%
rename from node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/LICENSE
rename to node_modules/cacache/node_modules/minimatch/LICENSE
diff --git a/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js b/node_modules/cacache/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
similarity index 100%
rename from node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
rename to node_modules/cacache/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
diff --git a/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/ast.js b/node_modules/cacache/node_modules/minimatch/dist/commonjs/ast.js
similarity index 100%
rename from node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/ast.js
rename to node_modules/cacache/node_modules/minimatch/dist/commonjs/ast.js
diff --git a/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/brace-expressions.js b/node_modules/cacache/node_modules/minimatch/dist/commonjs/brace-expressions.js
similarity index 100%
rename from node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/brace-expressions.js
rename to node_modules/cacache/node_modules/minimatch/dist/commonjs/brace-expressions.js
diff --git a/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/escape.js b/node_modules/cacache/node_modules/minimatch/dist/commonjs/escape.js
similarity index 100%
rename from node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/escape.js
rename to node_modules/cacache/node_modules/minimatch/dist/commonjs/escape.js
diff --git a/node_modules/pacote/node_modules/minimatch/dist/commonjs/index.js b/node_modules/cacache/node_modules/minimatch/dist/commonjs/index.js
similarity index 100%
rename from node_modules/pacote/node_modules/minimatch/dist/commonjs/index.js
rename to node_modules/cacache/node_modules/minimatch/dist/commonjs/index.js
diff --git a/node_modules/pacote/node_modules/minimatch/dist/commonjs/package.json b/node_modules/cacache/node_modules/minimatch/dist/commonjs/package.json
similarity index 100%
rename from node_modules/pacote/node_modules/minimatch/dist/commonjs/package.json
rename to node_modules/cacache/node_modules/minimatch/dist/commonjs/package.json
diff --git a/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/unescape.js b/node_modules/cacache/node_modules/minimatch/dist/commonjs/unescape.js
similarity index 100%
rename from node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/unescape.js
rename to node_modules/cacache/node_modules/minimatch/dist/commonjs/unescape.js
diff --git a/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/assert-valid-pattern.js b/node_modules/cacache/node_modules/minimatch/dist/esm/assert-valid-pattern.js
similarity index 100%
rename from node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/assert-valid-pattern.js
rename to node_modules/cacache/node_modules/minimatch/dist/esm/assert-valid-pattern.js
diff --git a/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/ast.js b/node_modules/cacache/node_modules/minimatch/dist/esm/ast.js
similarity index 100%
rename from node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/ast.js
rename to node_modules/cacache/node_modules/minimatch/dist/esm/ast.js
diff --git a/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/brace-expressions.js b/node_modules/cacache/node_modules/minimatch/dist/esm/brace-expressions.js
similarity index 100%
rename from node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/brace-expressions.js
rename to node_modules/cacache/node_modules/minimatch/dist/esm/brace-expressions.js
diff --git a/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/escape.js b/node_modules/cacache/node_modules/minimatch/dist/esm/escape.js
similarity index 100%
rename from node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/escape.js
rename to node_modules/cacache/node_modules/minimatch/dist/esm/escape.js
diff --git a/node_modules/pacote/node_modules/minimatch/dist/esm/index.js b/node_modules/cacache/node_modules/minimatch/dist/esm/index.js
similarity index 100%
rename from node_modules/pacote/node_modules/minimatch/dist/esm/index.js
rename to node_modules/cacache/node_modules/minimatch/dist/esm/index.js
diff --git a/node_modules/pacote/node_modules/minimatch/dist/esm/package.json b/node_modules/cacache/node_modules/minimatch/dist/esm/package.json
similarity index 100%
rename from node_modules/pacote/node_modules/minimatch/dist/esm/package.json
rename to node_modules/cacache/node_modules/minimatch/dist/esm/package.json
diff --git a/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/unescape.js b/node_modules/cacache/node_modules/minimatch/dist/esm/unescape.js
similarity index 100%
rename from node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/unescape.js
rename to node_modules/cacache/node_modules/minimatch/dist/esm/unescape.js
diff --git a/node_modules/pacote/node_modules/minimatch/package.json b/node_modules/cacache/node_modules/minimatch/package.json
similarity index 100%
rename from node_modules/pacote/node_modules/minimatch/package.json
rename to node_modules/cacache/node_modules/minimatch/package.json
diff --git a/node_modules/pacote/node_modules/path-scurry/LICENSE.md b/node_modules/cacache/node_modules/path-scurry/LICENSE.md
similarity index 100%
rename from node_modules/pacote/node_modules/path-scurry/LICENSE.md
rename to node_modules/cacache/node_modules/path-scurry/LICENSE.md
diff --git a/node_modules/pacote/node_modules/path-scurry/dist/commonjs/index.js b/node_modules/cacache/node_modules/path-scurry/dist/commonjs/index.js
similarity index 100%
rename from node_modules/pacote/node_modules/path-scurry/dist/commonjs/index.js
rename to node_modules/cacache/node_modules/path-scurry/dist/commonjs/index.js
diff --git a/node_modules/pacote/node_modules/path-scurry/dist/commonjs/package.json b/node_modules/cacache/node_modules/path-scurry/dist/commonjs/package.json
similarity index 100%
rename from node_modules/pacote/node_modules/path-scurry/dist/commonjs/package.json
rename to node_modules/cacache/node_modules/path-scurry/dist/commonjs/package.json
diff --git a/node_modules/pacote/node_modules/path-scurry/dist/esm/index.js b/node_modules/cacache/node_modules/path-scurry/dist/esm/index.js
similarity index 100%
rename from node_modules/pacote/node_modules/path-scurry/dist/esm/index.js
rename to node_modules/cacache/node_modules/path-scurry/dist/esm/index.js
diff --git a/node_modules/pacote/node_modules/path-scurry/dist/esm/package.json b/node_modules/cacache/node_modules/path-scurry/dist/esm/package.json
similarity index 100%
rename from node_modules/pacote/node_modules/path-scurry/dist/esm/package.json
rename to node_modules/cacache/node_modules/path-scurry/dist/esm/package.json
diff --git a/node_modules/pacote/node_modules/path-scurry/package.json b/node_modules/cacache/node_modules/path-scurry/package.json
similarity index 100%
rename from node_modules/pacote/node_modules/path-scurry/package.json
rename to node_modules/cacache/node_modules/path-scurry/package.json
diff --git a/node_modules/cacache/package.json b/node_modules/cacache/package.json
index ebb0f3f8ed410..6eec0a8375e5c 100644
--- a/node_modules/cacache/package.json
+++ b/node_modules/cacache/package.json
@@ -1,6 +1,6 @@
 {
   "name": "cacache",
-  "version": "19.0.1",
+  "version": "20.0.1",
   "cache-version": {
     "content": "2",
     "index": "5"
@@ -48,29 +48,28 @@
   "dependencies": {
     "@npmcli/fs": "^4.0.0",
     "fs-minipass": "^3.0.0",
-    "glob": "^10.2.2",
-    "lru-cache": "^10.0.1",
+    "glob": "^11.0.3",
+    "lru-cache": "^11.1.0",
     "minipass": "^7.0.3",
     "minipass-collect": "^2.0.1",
     "minipass-flush": "^1.0.5",
     "minipass-pipeline": "^1.2.4",
     "p-map": "^7.0.2",
     "ssri": "^12.0.0",
-    "tar": "^7.4.3",
     "unique-filename": "^4.0.0"
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.23.3",
+    "@npmcli/template-oss": "4.25.0",
     "tap": "^16.0.0"
   },
   "engines": {
-    "node": "^18.17.0 || >=20.5.0"
+    "node": "^20.17.0 || >=22.9.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
     "windowsCI": false,
-    "version": "4.23.3",
+    "version": "4.25.0",
     "publish": "true"
   },
   "author": "GitHub Inc.",
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/LICENSE.md b/node_modules/make-fetch-happen/node_modules/cacache/LICENSE.md
new file mode 100644
index 0000000000000..8d28acf866d93
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/cacache/LICENSE.md
@@ -0,0 +1,16 @@
+ISC License
+
+Copyright (c) npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this software for
+any purpose with or without fee is hereby granted, provided that the
+above copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
+ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
+COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
+CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
+USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/path.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/path.js
new file mode 100644
index 0000000000000..ad5a76a4f73f2
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/path.js
@@ -0,0 +1,29 @@
+'use strict'
+
+const contentVer = require('../../package.json')['cache-version'].content
+const hashToSegments = require('../util/hash-to-segments')
+const path = require('path')
+const ssri = require('ssri')
+
+// Current format of content file path:
+//
+// sha512-BaSE64Hex= ->
+// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee
+//
+module.exports = contentPath
+
+function contentPath (cache, integrity) {
+  const sri = ssri.parse(integrity, { single: true })
+  // contentPath is the *strongest* algo given
+  return path.join(
+    contentDir(cache),
+    sri.algorithm,
+    ...hashToSegments(sri.hexDigest())
+  )
+}
+
+module.exports.contentDir = contentDir
+
+function contentDir (cache) {
+  return path.join(cache, `content-v${contentVer}`)
+}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/read.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/read.js
new file mode 100644
index 0000000000000..5f6192c3cec56
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/read.js
@@ -0,0 +1,165 @@
+'use strict'
+
+const fs = require('fs/promises')
+const fsm = require('fs-minipass')
+const ssri = require('ssri')
+const contentPath = require('./path')
+const Pipeline = require('minipass-pipeline')
+
+module.exports = read
+
+const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024
+async function read (cache, integrity, opts = {}) {
+  const { size } = opts
+  const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
+    // get size
+    const stat = size ? { size } : await fs.stat(cpath)
+    return { stat, cpath, sri }
+  })
+
+  if (stat.size > MAX_SINGLE_READ_SIZE) {
+    return readPipeline(cpath, stat.size, sri, new Pipeline()).concat()
+  }
+
+  const data = await fs.readFile(cpath, { encoding: null })
+
+  if (stat.size !== data.length) {
+    throw sizeError(stat.size, data.length)
+  }
+
+  if (!ssri.checkData(data, sri)) {
+    throw integrityError(sri, cpath)
+  }
+
+  return data
+}
+
+const readPipeline = (cpath, size, sri, stream) => {
+  stream.push(
+    new fsm.ReadStream(cpath, {
+      size,
+      readSize: MAX_SINGLE_READ_SIZE,
+    }),
+    ssri.integrityStream({
+      integrity: sri,
+      size,
+    })
+  )
+  return stream
+}
+
+module.exports.stream = readStream
+module.exports.readStream = readStream
+
+function readStream (cache, integrity, opts = {}) {
+  const { size } = opts
+  const stream = new Pipeline()
+  // Set all this up to run on the stream and then just return the stream
+  Promise.resolve().then(async () => {
+    const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
+      // get size
+      const stat = size ? { size } : await fs.stat(cpath)
+      return { stat, cpath, sri }
+    })
+
+    return readPipeline(cpath, stat.size, sri, stream)
+  }).catch(err => stream.emit('error', err))
+
+  return stream
+}
+
+module.exports.copy = copy
+
+function copy (cache, integrity, dest) {
+  return withContentSri(cache, integrity, (cpath) => {
+    return fs.copyFile(cpath, dest)
+  })
+}
+
+module.exports.hasContent = hasContent
+
+async function hasContent (cache, integrity) {
+  if (!integrity) {
+    return false
+  }
+
+  try {
+    return await withContentSri(cache, integrity, async (cpath, sri) => {
+      const stat = await fs.stat(cpath)
+      return { size: stat.size, sri, stat }
+    })
+  } catch (err) {
+    if (err.code === 'ENOENT') {
+      return false
+    }
+
+    if (err.code === 'EPERM') {
+      /* istanbul ignore else */
+      if (process.platform !== 'win32') {
+        throw err
+      } else {
+        return false
+      }
+    }
+  }
+}
+
+async function withContentSri (cache, integrity, fn) {
+  const sri = ssri.parse(integrity)
+  // If `integrity` has multiple entries, pick the first digest
+  // with available local data.
+  const algo = sri.pickAlgorithm()
+  const digests = sri[algo]
+
+  if (digests.length <= 1) {
+    const cpath = contentPath(cache, digests[0])
+    return fn(cpath, digests[0])
+  } else {
+    // Can't use race here because a generic error can happen before
+    // a ENOENT error, and can happen before a valid result
+    const results = await Promise.all(digests.map(async (meta) => {
+      try {
+        return await withContentSri(cache, meta, fn)
+      } catch (err) {
+        if (err.code === 'ENOENT') {
+          return Object.assign(
+            new Error('No matching content found for ' + sri.toString()),
+            { code: 'ENOENT' }
+          )
+        }
+        return err
+      }
+    }))
+    // Return the first non error if it is found
+    const result = results.find((r) => !(r instanceof Error))
+    if (result) {
+      return result
+    }
+
+    // Throw the No matching content found error
+    const enoentError = results.find((r) => r.code === 'ENOENT')
+    if (enoentError) {
+      throw enoentError
+    }
+
+    // Throw generic error
+    throw results.find((r) => r instanceof Error)
+  }
+}
+
+function sizeError (expected, found) {
+  /* eslint-disable-next-line max-len */
+  const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
+  err.expected = expected
+  err.found = found
+  err.code = 'EBADSIZE'
+  return err
+}
+
+function integrityError (sri, path) {
+  const err = new Error(`Integrity verification failed for ${sri} (${path})`)
+  err.code = 'EINTEGRITY'
+  err.sri = sri
+  err.path = path
+  return err
+}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/rm.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/rm.js
new file mode 100644
index 0000000000000..ce58d679e4cb2
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/rm.js
@@ -0,0 +1,18 @@
+'use strict'
+
+const fs = require('fs/promises')
+const contentPath = require('./path')
+const { hasContent } = require('./read')
+
+module.exports = rm
+
+async function rm (cache, integrity) {
+  const content = await hasContent(cache, integrity)
+  // ~pretty~ sure we can't end up with a content lacking sri, but be safe
+  if (content && content.sri) {
+    await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true })
+    return true
+  } else {
+    return false
+  }
+}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/write.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/write.js
new file mode 100644
index 0000000000000..e7187abca8788
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/content/write.js
@@ -0,0 +1,206 @@
+'use strict'
+
+const events = require('events')
+
+const contentPath = require('./path')
+const fs = require('fs/promises')
+const { moveFile } = require('@npmcli/fs')
+const { Minipass } = require('minipass')
+const Pipeline = require('minipass-pipeline')
+const Flush = require('minipass-flush')
+const path = require('path')
+const ssri = require('ssri')
+const uniqueFilename = require('unique-filename')
+const fsm = require('fs-minipass')
+
+module.exports = write
+
+// Cache of move operations in process so we don't duplicate
+const moveOperations = new Map()
+
+async function write (cache, data, opts = {}) {
+  const { algorithms, size, integrity } = opts
+
+  if (typeof size === 'number' && data.length !== size) {
+    throw sizeError(size, data.length)
+  }
+
+  const sri = ssri.fromData(data, algorithms ? { algorithms } : {})
+  if (integrity && !ssri.checkData(data, integrity, opts)) {
+    throw checksumError(integrity, sri)
+  }
+
+  for (const algo in sri) {
+    const tmp = await makeTmp(cache, opts)
+    const hash = sri[algo].toString()
+    try {
+      await fs.writeFile(tmp.target, data, { flag: 'wx' })
+      await moveToDestination(tmp, cache, hash, opts)
+    } finally {
+      if (!tmp.moved) {
+        await fs.rm(tmp.target, { recursive: true, force: true })
+      }
+    }
+  }
+  return { integrity: sri, size: data.length }
+}
+
+module.exports.stream = writeStream
+
+// writes proxied to the 'inputStream' that is passed to the Promise
+// 'end' is deferred until content is handled.
+class CacacheWriteStream extends Flush {
+  constructor (cache, opts) {
+    super()
+    this.opts = opts
+    this.cache = cache
+    this.inputStream = new Minipass()
+    this.inputStream.on('error', er => this.emit('error', er))
+    this.inputStream.on('drain', () => this.emit('drain'))
+    this.handleContentP = null
+  }
+
+  write (chunk, encoding, cb) {
+    if (!this.handleContentP) {
+      this.handleContentP = handleContent(
+        this.inputStream,
+        this.cache,
+        this.opts
+      )
+      this.handleContentP.catch(error => this.emit('error', error))
+    }
+    return this.inputStream.write(chunk, encoding, cb)
+  }
+
+  flush (cb) {
+    this.inputStream.end(() => {
+      if (!this.handleContentP) {
+        const e = new Error('Cache input stream was empty')
+        e.code = 'ENODATA'
+        // empty streams are probably emitting end right away.
+        // defer this one tick by rejecting a promise on it.
+        return Promise.reject(e).catch(cb)
+      }
+      // eslint-disable-next-line promise/catch-or-return
+      this.handleContentP.then(
+        (res) => {
+          res.integrity && this.emit('integrity', res.integrity)
+          // eslint-disable-next-line promise/always-return
+          res.size !== null && this.emit('size', res.size)
+          cb()
+        },
+        (er) => cb(er)
+      )
+    })
+  }
+}
+
+function writeStream (cache, opts = {}) {
+  return new CacacheWriteStream(cache, opts)
+}
+
+async function handleContent (inputStream, cache, opts) {
+  const tmp = await makeTmp(cache, opts)
+  try {
+    const res = await pipeToTmp(inputStream, cache, tmp.target, opts)
+    await moveToDestination(
+      tmp,
+      cache,
+      res.integrity,
+      opts
+    )
+    return res
+  } finally {
+    if (!tmp.moved) {
+      await fs.rm(tmp.target, { recursive: true, force: true })
+    }
+  }
+}
+
+async function pipeToTmp (inputStream, cache, tmpTarget, opts) {
+  const outStream = new fsm.WriteStream(tmpTarget, {
+    flags: 'wx',
+  })
+
+  if (opts.integrityEmitter) {
+    // we need to create these all simultaneously since they can fire in any order
+    const [integrity, size] = await Promise.all([
+      events.once(opts.integrityEmitter, 'integrity').then(res => res[0]),
+      events.once(opts.integrityEmitter, 'size').then(res => res[0]),
+      new Pipeline(inputStream, outStream).promise(),
+    ])
+    return { integrity, size }
+  }
+
+  let integrity
+  let size
+  const hashStream = ssri.integrityStream({
+    integrity: opts.integrity,
+    algorithms: opts.algorithms,
+    size: opts.size,
+  })
+  hashStream.on('integrity', i => {
+    integrity = i
+  })
+  hashStream.on('size', s => {
+    size = s
+  })
+
+  const pipeline = new Pipeline(inputStream, hashStream, outStream)
+  await pipeline.promise()
+  return { integrity, size }
+}
+
+async function makeTmp (cache, opts) {
+  const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
+  await fs.mkdir(path.dirname(tmpTarget), { recursive: true })
+  return {
+    target: tmpTarget,
+    moved: false,
+  }
+}
+
+async function moveToDestination (tmp, cache, sri) {
+  const destination = contentPath(cache, sri)
+  const destDir = path.dirname(destination)
+  if (moveOperations.has(destination)) {
+    return moveOperations.get(destination)
+  }
+  moveOperations.set(
+    destination,
+    fs.mkdir(destDir, { recursive: true })
+      .then(async () => {
+        await moveFile(tmp.target, destination, { overwrite: false })
+        tmp.moved = true
+        return tmp.moved
+      })
+      .catch(err => {
+        if (!err.message.startsWith('The destination file exists')) {
+          throw Object.assign(err, { code: 'EEXIST' })
+        }
+      }).finally(() => {
+        moveOperations.delete(destination)
+      })
+
+  )
+  return moveOperations.get(destination)
+}
+
+function sizeError (expected, found) {
+  /* eslint-disable-next-line max-len */
+  const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
+  err.expected = expected
+  err.found = found
+  err.code = 'EBADSIZE'
+  return err
+}
+
+function checksumError (expected, found) {
+  const err = new Error(`Integrity check failed:
+  Wanted: ${expected}
+   Found: ${found}`)
+  err.code = 'EINTEGRITY'
+  err.expected = expected
+  err.found = found
+  return err
+}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/entry-index.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/entry-index.js
new file mode 100644
index 0000000000000..0e09b10818d09
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/entry-index.js
@@ -0,0 +1,336 @@
+'use strict'
+
+const crypto = require('crypto')
+const {
+  appendFile,
+  mkdir,
+  readFile,
+  readdir,
+  rm,
+  writeFile,
+} = require('fs/promises')
+const { Minipass } = require('minipass')
+const path = require('path')
+const ssri = require('ssri')
+const uniqueFilename = require('unique-filename')
+
+const contentPath = require('./content/path')
+const hashToSegments = require('./util/hash-to-segments')
+const indexV = require('../package.json')['cache-version'].index
+const { moveFile } = require('@npmcli/fs')
+
+const lsStreamConcurrency = 5
+
+module.exports.NotFoundError = class NotFoundError extends Error {
+  constructor (cache, key) {
+    super(`No cache entry for ${key} found in ${cache}`)
+    this.code = 'ENOENT'
+    this.cache = cache
+    this.key = key
+  }
+}
+
+module.exports.compact = compact
+
+async function compact (cache, key, matchFn, opts = {}) {
+  const bucket = bucketPath(cache, key)
+  const entries = await bucketEntries(bucket)
+  const newEntries = []
+  // we loop backwards because the bottom-most result is the newest
+  // since we add new entries with appendFile
+  for (let i = entries.length - 1; i >= 0; --i) {
+    const entry = entries[i]
+    // a null integrity could mean either a delete was appended
+    // or the user has simply stored an index that does not map
+    // to any content. we determine if the user wants to keep the
+    // null integrity based on the validateEntry function passed in options.
+    // if the integrity is null and no validateEntry is provided, we break
+    // as we consider the null integrity to be a deletion of everything
+    // that came before it.
+    if (entry.integrity === null && !opts.validateEntry) {
+      break
+    }
+
+    // if this entry is valid, and it is either the first entry or
+    // the newEntries array doesn't already include an entry that
+    // matches this one based on the provided matchFn, then we add
+    // it to the beginning of our list
+    if ((!opts.validateEntry || opts.validateEntry(entry) === true) &&
+      (newEntries.length === 0 ||
+        !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) {
+      newEntries.unshift(entry)
+    }
+  }
+
+  const newIndex = '\n' + newEntries.map((entry) => {
+    const stringified = JSON.stringify(entry)
+    const hash = hashEntry(stringified)
+    return `${hash}\t${stringified}`
+  }).join('\n')
+
+  const setup = async () => {
+    const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
+    await mkdir(path.dirname(target), { recursive: true })
+    return {
+      target,
+      moved: false,
+    }
+  }
+
+  const teardown = async (tmp) => {
+    if (!tmp.moved) {
+      return rm(tmp.target, { recursive: true, force: true })
+    }
+  }
+
+  const write = async (tmp) => {
+    await writeFile(tmp.target, newIndex, { flag: 'wx' })
+    await mkdir(path.dirname(bucket), { recursive: true })
+    // we use @npmcli/move-file directly here because we
+    // want to overwrite the existing file
+    await moveFile(tmp.target, bucket)
+    tmp.moved = true
+  }
+
+  // write the file atomically
+  const tmp = await setup()
+  try {
+    await write(tmp)
+  } finally {
+    await teardown(tmp)
+  }
+
+  // we reverse the list we generated such that the newest
+  // entries come first in order to make looping through them easier
+  // the true passed to formatEntry tells it to keep null
+  // integrity values, if they made it this far it's because
+  // validateEntry returned true, and as such we should return it
+  return newEntries.reverse().map((entry) => formatEntry(cache, entry, true))
+}
+
+module.exports.insert = insert
+
+async function insert (cache, key, integrity, opts = {}) {
+  const { metadata, size, time } = opts
+  const bucket = bucketPath(cache, key)
+  const entry = {
+    key,
+    integrity: integrity && ssri.stringify(integrity),
+    time: time || Date.now(),
+    size,
+    metadata,
+  }
+  try {
+    await mkdir(path.dirname(bucket), { recursive: true })
+    const stringified = JSON.stringify(entry)
+    // NOTE - Cleverness ahoy!
+    //
+    // This works because it's tremendously unlikely for an entry to corrupt
+    // another while still preserving the string length of the JSON in
+    // question. So, we just slap the length in there and verify it on read.
+    //
+    // Thanks to @isaacs for the whiteboarding session that ended up with
+    // this.
+    await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`)
+  } catch (err) {
+    if (err.code === 'ENOENT') {
+      return undefined
+    }
+
+    throw err
+  }
+  return formatEntry(cache, entry)
+}
+
+module.exports.find = find
+
+async function find (cache, key) {
+  const bucket = bucketPath(cache, key)
+  try {
+    const entries = await bucketEntries(bucket)
+    return entries.reduce((latest, next) => {
+      if (next && next.key === key) {
+        return formatEntry(cache, next)
+      } else {
+        return latest
+      }
+    }, null)
+  } catch (err) {
+    if (err.code === 'ENOENT') {
+      return null
+    } else {
+      throw err
+    }
+  }
+}
+
+module.exports.delete = del
+
+function del (cache, key, opts = {}) {
+  if (!opts.removeFully) {
+    return insert(cache, key, null, opts)
+  }
+
+  const bucket = bucketPath(cache, key)
+  return rm(bucket, { recursive: true, force: true })
+}
+
+module.exports.lsStream = lsStream
+
+function lsStream (cache) {
+  const indexDir = bucketDir(cache)
+  const stream = new Minipass({ objectMode: true })
+
+  // Set all this up to run on the stream and then just return the stream
+  Promise.resolve().then(async () => {
+    const { default: pMap } = await import('p-map')
+    const buckets = await readdirOrEmpty(indexDir)
+    await pMap(buckets, async (bucket) => {
+      const bucketPath = path.join(indexDir, bucket)
+      const subbuckets = await readdirOrEmpty(bucketPath)
+      await pMap(subbuckets, async (subbucket) => {
+        const subbucketPath = path.join(bucketPath, subbucket)
+
+        // "/cachename//./*"
+        const subbucketEntries = await readdirOrEmpty(subbucketPath)
+        await pMap(subbucketEntries, async (entry) => {
+          const entryPath = path.join(subbucketPath, entry)
+          try {
+            const entries = await bucketEntries(entryPath)
+            // using a Map here prevents duplicate keys from showing up
+            // twice, I guess?
+            const reduced = entries.reduce((acc, entry) => {
+              acc.set(entry.key, entry)
+              return acc
+            }, new Map())
+            // reduced is a map of key => entry
+            for (const entry of reduced.values()) {
+              const formatted = formatEntry(cache, entry)
+              if (formatted) {
+                stream.write(formatted)
+              }
+            }
+          } catch (err) {
+            if (err.code === 'ENOENT') {
+              return undefined
+            }
+            throw err
+          }
+        },
+        { concurrency: lsStreamConcurrency })
+      },
+      { concurrency: lsStreamConcurrency })
+    },
+    { concurrency: lsStreamConcurrency })
+    stream.end()
+    return stream
+  }).catch(err => stream.emit('error', err))
+
+  return stream
+}
+
+module.exports.ls = ls
+
+async function ls (cache) {
+  const entries = await lsStream(cache).collect()
+  return entries.reduce((acc, xs) => {
+    acc[xs.key] = xs
+    return acc
+  }, {})
+}
+
+module.exports.bucketEntries = bucketEntries
+
+async function bucketEntries (bucket, filter) {
+  const data = await readFile(bucket, 'utf8')
+  return _bucketEntries(data, filter)
+}
+
+function _bucketEntries (data) {
+  const entries = []
+  data.split('\n').forEach((entry) => {
+    if (!entry) {
+      return
+    }
+
+    const pieces = entry.split('\t')
+    if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) {
+      // Hash is no good! Corruption or malice? Doesn't matter!
+      // EJECT EJECT
+      return
+    }
+    let obj
+    try {
+      obj = JSON.parse(pieces[1])
+    } catch (_) {
+      // eslint-ignore-next-line no-empty-block
+    }
+    // coverage disabled here, no need to test with an entry that parses to something falsey
+    // istanbul ignore else
+    if (obj) {
+      entries.push(obj)
+    }
+  })
+  return entries
+}
+
+module.exports.bucketDir = bucketDir
+
+function bucketDir (cache) {
+  return path.join(cache, `index-v${indexV}`)
+}
+
+module.exports.bucketPath = bucketPath
+
+function bucketPath (cache, key) {
+  const hashed = hashKey(key)
+  return path.join.apply(
+    path,
+    [bucketDir(cache)].concat(hashToSegments(hashed))
+  )
+}
+
+module.exports.hashKey = hashKey
+
+function hashKey (key) {
+  return hash(key, 'sha256')
+}
+
+module.exports.hashEntry = hashEntry
+
+function hashEntry (str) {
+  return hash(str, 'sha1')
+}
+
+function hash (str, digest) {
+  return crypto
+    .createHash(digest)
+    .update(str)
+    .digest('hex')
+}
+
+function formatEntry (cache, entry, keepAll) {
+  // Treat null digests as deletions. They'll shadow any previous entries.
+  if (!entry.integrity && !keepAll) {
+    return null
+  }
+
+  return {
+    key: entry.key,
+    integrity: entry.integrity,
+    path: entry.integrity ? contentPath(cache, entry.integrity) : undefined,
+    size: entry.size,
+    time: entry.time,
+    metadata: entry.metadata,
+  }
+}
+
+function readdirOrEmpty (dir) {
+  return readdir(dir).catch((err) => {
+    if (err.code === 'ENOENT' || err.code === 'ENOTDIR') {
+      return []
+    }
+
+    throw err
+  })
+}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/get.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/get.js
new file mode 100644
index 0000000000000..80ec206c7ecaa
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/get.js
@@ -0,0 +1,170 @@
+'use strict'
+
+const Collect = require('minipass-collect')
+const { Minipass } = require('minipass')
+const Pipeline = require('minipass-pipeline')
+
+const index = require('./entry-index')
+const memo = require('./memoization')
+const read = require('./content/read')
+
+async function getData (cache, key, opts = {}) {
+  const { integrity, memoize, size } = opts
+  const memoized = memo.get(cache, key, opts)
+  if (memoized && memoize !== false) {
+    return {
+      metadata: memoized.entry.metadata,
+      data: memoized.data,
+      integrity: memoized.entry.integrity,
+      size: memoized.entry.size,
+    }
+  }
+
+  const entry = await index.find(cache, key, opts)
+  if (!entry) {
+    throw new index.NotFoundError(cache, key)
+  }
+  const data = await read(cache, entry.integrity, { integrity, size })
+  if (memoize) {
+    memo.put(cache, entry, data, opts)
+  }
+
+  return {
+    data,
+    metadata: entry.metadata,
+    size: entry.size,
+    integrity: entry.integrity,
+  }
+}
+module.exports = getData
+
+async function getDataByDigest (cache, key, opts = {}) {
+  const { integrity, memoize, size } = opts
+  const memoized = memo.get.byDigest(cache, key, opts)
+  if (memoized && memoize !== false) {
+    return memoized
+  }
+
+  const res = await read(cache, key, { integrity, size })
+  if (memoize) {
+    memo.put.byDigest(cache, key, res, opts)
+  }
+  return res
+}
+module.exports.byDigest = getDataByDigest
+
+const getMemoizedStream = (memoized) => {
+  const stream = new Minipass()
+  stream.on('newListener', function (ev, cb) {
+    ev === 'metadata' && cb(memoized.entry.metadata)
+    ev === 'integrity' && cb(memoized.entry.integrity)
+    ev === 'size' && cb(memoized.entry.size)
+  })
+  stream.end(memoized.data)
+  return stream
+}
+
+function getStream (cache, key, opts = {}) {
+  const { memoize, size } = opts
+  const memoized = memo.get(cache, key, opts)
+  if (memoized && memoize !== false) {
+    return getMemoizedStream(memoized)
+  }
+
+  const stream = new Pipeline()
+  // Set all this up to run on the stream and then just return the stream
+  Promise.resolve().then(async () => {
+    const entry = await index.find(cache, key)
+    if (!entry) {
+      throw new index.NotFoundError(cache, key)
+    }
+
+    stream.emit('metadata', entry.metadata)
+    stream.emit('integrity', entry.integrity)
+    stream.emit('size', entry.size)
+    stream.on('newListener', function (ev, cb) {
+      ev === 'metadata' && cb(entry.metadata)
+      ev === 'integrity' && cb(entry.integrity)
+      ev === 'size' && cb(entry.size)
+    })
+
+    const src = read.readStream(
+      cache,
+      entry.integrity,
+      { ...opts, size: typeof size !== 'number' ? entry.size : size }
+    )
+
+    if (memoize) {
+      const memoStream = new Collect.PassThrough()
+      memoStream.on('collect', data => memo.put(cache, entry, data, opts))
+      stream.unshift(memoStream)
+    }
+    stream.unshift(src)
+    return stream
+  }).catch((err) => stream.emit('error', err))
+
+  return stream
+}
+
+module.exports.stream = getStream
+
+function getStreamDigest (cache, integrity, opts = {}) {
+  const { memoize } = opts
+  const memoized = memo.get.byDigest(cache, integrity, opts)
+  if (memoized && memoize !== false) {
+    const stream = new Minipass()
+    stream.end(memoized)
+    return stream
+  } else {
+    const stream = read.readStream(cache, integrity, opts)
+    if (!memoize) {
+      return stream
+    }
+
+    const memoStream = new Collect.PassThrough()
+    memoStream.on('collect', data => memo.put.byDigest(
+      cache,
+      integrity,
+      data,
+      opts
+    ))
+    return new Pipeline(stream, memoStream)
+  }
+}
+
+module.exports.stream.byDigest = getStreamDigest
+
+function info (cache, key, opts = {}) {
+  const { memoize } = opts
+  const memoized = memo.get(cache, key, opts)
+  if (memoized && memoize !== false) {
+    return Promise.resolve(memoized.entry)
+  } else {
+    return index.find(cache, key)
+  }
+}
+module.exports.info = info
+
+async function copy (cache, key, dest, opts = {}) {
+  const entry = await index.find(cache, key, opts)
+  if (!entry) {
+    throw new index.NotFoundError(cache, key)
+  }
+  await read.copy(cache, entry.integrity, dest, opts)
+  return {
+    metadata: entry.metadata,
+    size: entry.size,
+    integrity: entry.integrity,
+  }
+}
+
+module.exports.copy = copy
+
+async function copyByDigest (cache, key, dest, opts = {}) {
+  await read.copy(cache, key, dest, opts)
+  return key
+}
+
+module.exports.copy.byDigest = copyByDigest
+
+module.exports.hasContent = read.hasContent
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/index.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/index.js
new file mode 100644
index 0000000000000..c9b0da5f3a271
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/index.js
@@ -0,0 +1,42 @@
+'use strict'
+
+const get = require('./get.js')
+const put = require('./put.js')
+const rm = require('./rm.js')
+const verify = require('./verify.js')
+const { clearMemoized } = require('./memoization.js')
+const tmp = require('./util/tmp.js')
+const index = require('./entry-index.js')
+
+module.exports.index = {}
+module.exports.index.compact = index.compact
+module.exports.index.insert = index.insert
+
+module.exports.ls = index.ls
+module.exports.ls.stream = index.lsStream
+
+module.exports.get = get
+module.exports.get.byDigest = get.byDigest
+module.exports.get.stream = get.stream
+module.exports.get.stream.byDigest = get.stream.byDigest
+module.exports.get.copy = get.copy
+module.exports.get.copy.byDigest = get.copy.byDigest
+module.exports.get.info = get.info
+module.exports.get.hasContent = get.hasContent
+
+module.exports.put = put
+module.exports.put.stream = put.stream
+
+module.exports.rm = rm.entry
+module.exports.rm.all = rm.all
+module.exports.rm.entry = module.exports.rm
+module.exports.rm.content = rm.content
+
+module.exports.clearMemoized = clearMemoized
+
+module.exports.tmp = {}
+module.exports.tmp.mkdir = tmp.mkdir
+module.exports.tmp.withTmp = tmp.withTmp
+
+module.exports.verify = verify
+module.exports.verify.lastRun = verify.lastRun
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/memoization.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/memoization.js
new file mode 100644
index 0000000000000..2ecc60912e456
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/memoization.js
@@ -0,0 +1,72 @@
+'use strict'
+
+const { LRUCache } = require('lru-cache')
+
+const MEMOIZED = new LRUCache({
+  max: 500,
+  maxSize: 50 * 1024 * 1024, // 50MB
+  ttl: 3 * 60 * 1000, // 3 minutes
+  sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length,
+})
+
+module.exports.clearMemoized = clearMemoized
+
+function clearMemoized () {
+  const old = {}
+  MEMOIZED.forEach((v, k) => {
+    old[k] = v
+  })
+  MEMOIZED.clear()
+  return old
+}
+
+module.exports.put = put
+
+function put (cache, entry, data, opts) {
+  pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data })
+  putDigest(cache, entry.integrity, data, opts)
+}
+
+module.exports.put.byDigest = putDigest
+
+function putDigest (cache, integrity, data, opts) {
+  pickMem(opts).set(`digest:${cache}:${integrity}`, data)
+}
+
+module.exports.get = get
+
+function get (cache, key, opts) {
+  return pickMem(opts).get(`key:${cache}:${key}`)
+}
+
+module.exports.get.byDigest = getDigest
+
+function getDigest (cache, integrity, opts) {
+  return pickMem(opts).get(`digest:${cache}:${integrity}`)
+}
+
+class ObjProxy {
+  constructor (obj) {
+    this.obj = obj
+  }
+
+  get (key) {
+    return this.obj[key]
+  }
+
+  set (key, val) {
+    this.obj[key] = val
+  }
+}
+
+function pickMem (opts) {
+  if (!opts || !opts.memoize) {
+    return MEMOIZED
+  } else if (opts.memoize.get && opts.memoize.set) {
+    return opts.memoize
+  } else if (typeof opts.memoize === 'object') {
+    return new ObjProxy(opts.memoize)
+  } else {
+    return MEMOIZED
+  }
+}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/put.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/put.js
new file mode 100644
index 0000000000000..9fc932d5f6dec
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/put.js
@@ -0,0 +1,80 @@
+'use strict'
+
+const index = require('./entry-index')
+const memo = require('./memoization')
+const write = require('./content/write')
+const Flush = require('minipass-flush')
+const { PassThrough } = require('minipass-collect')
+const Pipeline = require('minipass-pipeline')
+
+const putOpts = (opts) => ({
+  algorithms: ['sha512'],
+  ...opts,
+})
+
+module.exports = putData
+
+async function putData (cache, key, data, opts = {}) {
+  const { memoize } = opts
+  opts = putOpts(opts)
+  const res = await write(cache, data, opts)
+  const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size })
+  if (memoize) {
+    memo.put(cache, entry, data, opts)
+  }
+
+  return res.integrity
+}
+
+module.exports.stream = putStream
+
+function putStream (cache, key, opts = {}) {
+  const { memoize } = opts
+  opts = putOpts(opts)
+  let integrity
+  let size
+  let error
+
+  let memoData
+  const pipeline = new Pipeline()
+  // first item in the pipeline is the memoizer, because we need
+  // that to end first and get the collected data.
+  if (memoize) {
+    const memoizer = new PassThrough().on('collect', data => {
+      memoData = data
+    })
+    pipeline.push(memoizer)
+  }
+
+  // contentStream is a write-only, not a passthrough
+  // no data comes out of it.
+  const contentStream = write.stream(cache, opts)
+    .on('integrity', (int) => {
+      integrity = int
+    })
+    .on('size', (s) => {
+      size = s
+    })
+    .on('error', (err) => {
+      error = err
+    })
+
+  pipeline.push(contentStream)
+
+  // last but not least, we write the index and emit hash and size,
+  // and memoize if we're doing that
+  pipeline.push(new Flush({
+    async flush () {
+      if (!error) {
+        const entry = await index.insert(cache, key, integrity, { ...opts, size })
+        if (memoize && memoData) {
+          memo.put(cache, entry, memoData, opts)
+        }
+        pipeline.emit('integrity', integrity)
+        pipeline.emit('size', size)
+      }
+    },
+  }))
+
+  return pipeline
+}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/rm.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/rm.js
new file mode 100644
index 0000000000000..a94760c7cf243
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/rm.js
@@ -0,0 +1,31 @@
+'use strict'
+
+const { rm } = require('fs/promises')
+const glob = require('./util/glob.js')
+const index = require('./entry-index')
+const memo = require('./memoization')
+const path = require('path')
+const rmContent = require('./content/rm')
+
+module.exports = entry
+module.exports.entry = entry
+
+function entry (cache, key, opts) {
+  memo.clearMemoized()
+  return index.delete(cache, key, opts)
+}
+
+module.exports.content = content
+
+function content (cache, integrity) {
+  memo.clearMemoized()
+  return rmContent(cache, integrity)
+}
+
+module.exports.all = all
+
+async function all (cache) {
+  memo.clearMemoized()
+  const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true })
+  return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true })))
+}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/glob.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/glob.js
new file mode 100644
index 0000000000000..8500c1c16a429
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/glob.js
@@ -0,0 +1,7 @@
+'use strict'
+
+const { glob } = require('glob')
+const path = require('path')
+
+const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep)
+module.exports = (path, options) => glob(globify(path), options)
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/hash-to-segments.js
new file mode 100644
index 0000000000000..445599b503808
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/hash-to-segments.js
@@ -0,0 +1,7 @@
+'use strict'
+
+module.exports = hashToSegments
+
+function hashToSegments (hash) {
+  return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)]
+}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/tmp.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/tmp.js
new file mode 100644
index 0000000000000..0bf5302136ebe
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/util/tmp.js
@@ -0,0 +1,26 @@
+'use strict'
+
+const { withTempDir } = require('@npmcli/fs')
+const fs = require('fs/promises')
+const path = require('path')
+
+module.exports.mkdir = mktmpdir
+
+async function mktmpdir (cache, opts = {}) {
+  const { tmpPrefix } = opts
+  const tmpDir = path.join(cache, 'tmp')
+  await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' })
+  // do not use path.join(), it drops the trailing / if tmpPrefix is unset
+  const target = `${tmpDir}${path.sep}${tmpPrefix || ''}`
+  return fs.mkdtemp(target, { owner: 'inherit' })
+}
+
+module.exports.withTmp = withTmp
+
+function withTmp (cache, opts, cb) {
+  if (!cb) {
+    cb = opts
+    opts = {}
+  }
+  return withTempDir(path.join(cache, 'tmp'), cb, opts)
+}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/verify.js b/node_modules/make-fetch-happen/node_modules/cacache/lib/verify.js
new file mode 100644
index 0000000000000..dcff3aa73f317
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/cacache/lib/verify.js
@@ -0,0 +1,258 @@
+'use strict'
+
+const {
+  mkdir,
+  readFile,
+  rm,
+  stat,
+  truncate,
+  writeFile,
+} = require('fs/promises')
+const contentPath = require('./content/path')
+const fsm = require('fs-minipass')
+const glob = require('./util/glob.js')
+const index = require('./entry-index')
+const path = require('path')
+const ssri = require('ssri')
+
+const hasOwnProperty = (obj, key) =>
+  Object.prototype.hasOwnProperty.call(obj, key)
+
+const verifyOpts = (opts) => ({
+  concurrency: 20,
+  log: { silly () {} },
+  ...opts,
+})
+
+module.exports = verify
+
+async function verify (cache, opts) {
+  opts = verifyOpts(opts)
+  opts.log.silly('verify', 'verifying cache at', cache)
+
+  const steps = [
+    markStartTime,
+    fixPerms,
+    garbageCollect,
+    rebuildIndex,
+    cleanTmp,
+    writeVerifile,
+    markEndTime,
+  ]
+
+  const stats = {}
+  for (const step of steps) {
+    const label = step.name
+    const start = new Date()
+    const s = await step(cache, opts)
+    if (s) {
+      Object.keys(s).forEach((k) => {
+        stats[k] = s[k]
+      })
+    }
+    const end = new Date()
+    if (!stats.runTime) {
+      stats.runTime = {}
+    }
+    stats.runTime[label] = end - start
+  }
+  stats.runTime.total = stats.endTime - stats.startTime
+  opts.log.silly(
+    'verify',
+    'verification finished for',
+    cache,
+    'in',
+    `${stats.runTime.total}ms`
+  )
+  return stats
+}
+
+async function markStartTime () {
+  return { startTime: new Date() }
+}
+
+async function markEndTime () {
+  return { endTime: new Date() }
+}
+
+async function fixPerms (cache, opts) {
+  opts.log.silly('verify', 'fixing cache permissions')
+  await mkdir(cache, { recursive: true })
+  return null
+}
+
+// Implements a naive mark-and-sweep tracing garbage collector.
+//
+// The algorithm is basically as follows:
+// 1. Read (and filter) all index entries ("pointers")
+// 2. Mark each integrity value as "live"
+// 3. Read entire filesystem tree in `content-vX/` dir
+// 4. If content is live, verify its checksum and delete it if it fails
+// 5. If content is not marked as live, rm it.
+//
+async function garbageCollect (cache, opts) {
+  opts.log.silly('verify', 'garbage collecting content')
+  const { default: pMap } = await import('p-map')
+  const indexStream = index.lsStream(cache)
+  const liveContent = new Set()
+  indexStream.on('data', (entry) => {
+    if (opts.filter && !opts.filter(entry)) {
+      return
+    }
+
+    // integrity is stringified, re-parse it so we can get each hash
+    const integrity = ssri.parse(entry.integrity)
+    for (const algo in integrity) {
+      liveContent.add(integrity[algo].toString())
+    }
+  })
+  await new Promise((resolve, reject) => {
+    indexStream.on('end', resolve).on('error', reject)
+  })
+  const contentDir = contentPath.contentDir(cache)
+  const files = await glob(path.join(contentDir, '**'), {
+    follow: false,
+    nodir: true,
+    nosort: true,
+  })
+  const stats = {
+    verifiedContent: 0,
+    reclaimedCount: 0,
+    reclaimedSize: 0,
+    badContentCount: 0,
+    keptSize: 0,
+  }
+  await pMap(
+    files,
+    async (f) => {
+      const split = f.split(/[/\\]/)
+      const digest = split.slice(split.length - 3).join('')
+      const algo = split[split.length - 4]
+      const integrity = ssri.fromHex(digest, algo)
+      if (liveContent.has(integrity.toString())) {
+        const info = await verifyContent(f, integrity)
+        if (!info.valid) {
+          stats.reclaimedCount++
+          stats.badContentCount++
+          stats.reclaimedSize += info.size
+        } else {
+          stats.verifiedContent++
+          stats.keptSize += info.size
+        }
+      } else {
+        // No entries refer to this content. We can delete.
+        stats.reclaimedCount++
+        const s = await stat(f)
+        await rm(f, { recursive: true, force: true })
+        stats.reclaimedSize += s.size
+      }
+      return stats
+    },
+    { concurrency: opts.concurrency }
+  )
+  return stats
+}
+
+async function verifyContent (filepath, sri) {
+  const contentInfo = {}
+  try {
+    const { size } = await stat(filepath)
+    contentInfo.size = size
+    contentInfo.valid = true
+    await ssri.checkStream(new fsm.ReadStream(filepath), sri)
+  } catch (err) {
+    if (err.code === 'ENOENT') {
+      return { size: 0, valid: false }
+    }
+    if (err.code !== 'EINTEGRITY') {
+      throw err
+    }
+
+    await rm(filepath, { recursive: true, force: true })
+    contentInfo.valid = false
+  }
+  return contentInfo
+}
+
+async function rebuildIndex (cache, opts) {
+  opts.log.silly('verify', 'rebuilding index')
+  const { default: pMap } = await import('p-map')
+  const entries = await index.ls(cache)
+  const stats = {
+    missingContent: 0,
+    rejectedEntries: 0,
+    totalEntries: 0,
+  }
+  const buckets = {}
+  for (const k in entries) {
+    /* istanbul ignore else */
+    if (hasOwnProperty(entries, k)) {
+      const hashed = index.hashKey(k)
+      const entry = entries[k]
+      const excluded = opts.filter && !opts.filter(entry)
+      excluded && stats.rejectedEntries++
+      if (buckets[hashed] && !excluded) {
+        buckets[hashed].push(entry)
+      } else if (buckets[hashed] && excluded) {
+        // skip
+      } else if (excluded) {
+        buckets[hashed] = []
+        buckets[hashed]._path = index.bucketPath(cache, k)
+      } else {
+        buckets[hashed] = [entry]
+        buckets[hashed]._path = index.bucketPath(cache, k)
+      }
+    }
+  }
+  await pMap(
+    Object.keys(buckets),
+    (key) => {
+      return rebuildBucket(cache, buckets[key], stats, opts)
+    },
+    { concurrency: opts.concurrency }
+  )
+  return stats
+}
+
+async function rebuildBucket (cache, bucket, stats) {
+  await truncate(bucket._path)
+  // This needs to be serialized because cacache explicitly
+  // lets very racy bucket conflicts clobber each other.
+  for (const entry of bucket) {
+    const content = contentPath(cache, entry.integrity)
+    try {
+      await stat(content)
+      await index.insert(cache, entry.key, entry.integrity, {
+        metadata: entry.metadata,
+        size: entry.size,
+        time: entry.time,
+      })
+      stats.totalEntries++
+    } catch (err) {
+      if (err.code === 'ENOENT') {
+        stats.rejectedEntries++
+        stats.missingContent++
+      } else {
+        throw err
+      }
+    }
+  }
+}
+
+function cleanTmp (cache, opts) {
+  opts.log.silly('verify', 'cleaning tmp directory')
+  return rm(path.join(cache, 'tmp'), { recursive: true, force: true })
+}
+
+async function writeVerifile (cache, opts) {
+  const verifile = path.join(cache, '_lastverified')
+  opts.log.silly('verify', 'writing verifile to ' + verifile)
+  return writeFile(verifile, `${Date.now()}`)
+}
+
+module.exports.lastRun = lastRun
+
+async function lastRun (cache) {
+  const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' })
+  return new Date(+data)
+}
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/package.json b/node_modules/make-fetch-happen/node_modules/cacache/package.json
new file mode 100644
index 0000000000000..ebb0f3f8ed410
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/cacache/package.json
@@ -0,0 +1,83 @@
+{
+  "name": "cacache",
+  "version": "19.0.1",
+  "cache-version": {
+    "content": "2",
+    "index": "5"
+  },
+  "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.",
+  "main": "lib/index.js",
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "scripts": {
+    "test": "tap",
+    "snap": "tap",
+    "coverage": "tap",
+    "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test",
+    "lint": "npm run eslint",
+    "npmclilint": "npmcli-lint",
+    "lintfix": "npm run eslint -- --fix",
+    "postsnap": "npm run lintfix --",
+    "postlint": "template-oss-check",
+    "posttest": "npm run lint",
+    "template-oss-apply": "template-oss-apply --force",
+    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/npm/cacache.git"
+  },
+  "keywords": [
+    "cache",
+    "caching",
+    "content-addressable",
+    "sri",
+    "sri hash",
+    "subresource integrity",
+    "cache",
+    "storage",
+    "store",
+    "file store",
+    "filesystem",
+    "disk cache",
+    "disk storage"
+  ],
+  "license": "ISC",
+  "dependencies": {
+    "@npmcli/fs": "^4.0.0",
+    "fs-minipass": "^3.0.0",
+    "glob": "^10.2.2",
+    "lru-cache": "^10.0.1",
+    "minipass": "^7.0.3",
+    "minipass-collect": "^2.0.1",
+    "minipass-flush": "^1.0.5",
+    "minipass-pipeline": "^1.2.4",
+    "p-map": "^7.0.2",
+    "ssri": "^12.0.0",
+    "tar": "^7.4.3",
+    "unique-filename": "^4.0.0"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^5.0.0",
+    "@npmcli/template-oss": "4.23.3",
+    "tap": "^16.0.0"
+  },
+  "engines": {
+    "node": "^18.17.0 || >=20.5.0"
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "windowsCI": false,
+    "version": "4.23.3",
+    "publish": "true"
+  },
+  "author": "GitHub Inc.",
+  "tap": {
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  }
+}
diff --git a/node_modules/make-fetch-happen/node_modules/chownr/LICENSE.md b/node_modules/make-fetch-happen/node_modules/chownr/LICENSE.md
new file mode 100644
index 0000000000000..881248b6d7f0c
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/chownr/LICENSE.md
@@ -0,0 +1,63 @@
+All packages under `src/` are licensed according to the terms in
+their respective `LICENSE` or `LICENSE.md` files.
+
+The remainder of this project is licensed under the Blue Oak
+Model License, as follows:
+
+-----
+
+# Blue Oak Model License
+
+Version 1.0.0
+
+## Purpose
+
+This license gives everyone as much permission to work with
+this software as possible, while protecting contributors
+from liability.
+
+## Acceptance
+
+In order to receive this license, you must agree to its
+rules.  The rules of this license are both obligations
+under that agreement and conditions to your license.
+You must not do anything with this software that triggers
+a rule that you cannot or will not follow.
+
+## Copyright
+
+Each contributor licenses you to do everything with this
+software that would otherwise infringe that contributor's
+copyright in it.
+
+## Notices
+
+You must ensure that everyone who gets a copy of
+any part of this software from you, with or without
+changes, also gets the text of this license or a link to
+.
+
+## Excuse
+
+If anyone notifies you in writing that you have not
+complied with [Notices](#notices), you can keep your
+license by taking all practical steps to comply within 30
+days after the notice.  If you do not do so, your license
+ends immediately.
+
+## Patent
+
+Each contributor licenses you to do everything with this
+software that would otherwise infringe any patent claims
+they can license or become able to license.
+
+## Reliability
+
+No contributor can revoke this license.
+
+## No Liability
+
+***As far as the law allows, this software comes as is,
+without any warranty or condition, and no contributor
+will be liable to anyone for any damages related to this
+software or this license, under any kind of legal claim.***
diff --git a/node_modules/make-fetch-happen/node_modules/chownr/dist/commonjs/index.js b/node_modules/make-fetch-happen/node_modules/chownr/dist/commonjs/index.js
new file mode 100644
index 0000000000000..6a7b68d5eac26
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/chownr/dist/commonjs/index.js
@@ -0,0 +1,93 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.chownrSync = exports.chownr = void 0;
+const node_fs_1 = __importDefault(require("node:fs"));
+const node_path_1 = __importDefault(require("node:path"));
+const lchownSync = (path, uid, gid) => {
+    try {
+        return node_fs_1.default.lchownSync(path, uid, gid);
+    }
+    catch (er) {
+        if (er?.code !== 'ENOENT')
+            throw er;
+    }
+};
+const chown = (cpath, uid, gid, cb) => {
+    node_fs_1.default.lchown(cpath, uid, gid, er => {
+        // Skip ENOENT error
+        cb(er && er?.code !== 'ENOENT' ? er : null);
+    });
+};
+const chownrKid = (p, child, uid, gid, cb) => {
+    if (child.isDirectory()) {
+        (0, exports.chownr)(node_path_1.default.resolve(p, child.name), uid, gid, (er) => {
+            if (er)
+                return cb(er);
+            const cpath = node_path_1.default.resolve(p, child.name);
+            chown(cpath, uid, gid, cb);
+        });
+    }
+    else {
+        const cpath = node_path_1.default.resolve(p, child.name);
+        chown(cpath, uid, gid, cb);
+    }
+};
+const chownr = (p, uid, gid, cb) => {
+    node_fs_1.default.readdir(p, { withFileTypes: true }, (er, children) => {
+        // any error other than ENOTDIR or ENOTSUP means it's not readable,
+        // or doesn't exist.  give up.
+        if (er) {
+            if (er.code === 'ENOENT')
+                return cb();
+            else if (er.code !== 'ENOTDIR' && er.code !== 'ENOTSUP')
+                return cb(er);
+        }
+        if (er || !children.length)
+            return chown(p, uid, gid, cb);
+        let len = children.length;
+        let errState = null;
+        const then = (er) => {
+            /* c8 ignore start */
+            if (errState)
+                return;
+            /* c8 ignore stop */
+            if (er)
+                return cb((errState = er));
+            if (--len === 0)
+                return chown(p, uid, gid, cb);
+        };
+        for (const child of children) {
+            chownrKid(p, child, uid, gid, then);
+        }
+    });
+};
+exports.chownr = chownr;
+const chownrKidSync = (p, child, uid, gid) => {
+    if (child.isDirectory())
+        (0, exports.chownrSync)(node_path_1.default.resolve(p, child.name), uid, gid);
+    lchownSync(node_path_1.default.resolve(p, child.name), uid, gid);
+};
+const chownrSync = (p, uid, gid) => {
+    let children;
+    try {
+        children = node_fs_1.default.readdirSync(p, { withFileTypes: true });
+    }
+    catch (er) {
+        const e = er;
+        if (e?.code === 'ENOENT')
+            return;
+        else if (e?.code === 'ENOTDIR' || e?.code === 'ENOTSUP')
+            return lchownSync(p, uid, gid);
+        else
+            throw e;
+    }
+    for (const child of children) {
+        chownrKidSync(p, child, uid, gid);
+    }
+    return lchownSync(p, uid, gid);
+};
+exports.chownrSync = chownrSync;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/chownr/dist/commonjs/package.json b/node_modules/make-fetch-happen/node_modules/chownr/dist/commonjs/package.json
new file mode 100644
index 0000000000000..5bbefffbabee3
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/chownr/dist/commonjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/node_modules/make-fetch-happen/node_modules/chownr/dist/esm/index.js b/node_modules/make-fetch-happen/node_modules/chownr/dist/esm/index.js
new file mode 100644
index 0000000000000..5c2815297a67c
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/chownr/dist/esm/index.js
@@ -0,0 +1,85 @@
+import fs from 'node:fs';
+import path from 'node:path';
+const lchownSync = (path, uid, gid) => {
+    try {
+        return fs.lchownSync(path, uid, gid);
+    }
+    catch (er) {
+        if (er?.code !== 'ENOENT')
+            throw er;
+    }
+};
+const chown = (cpath, uid, gid, cb) => {
+    fs.lchown(cpath, uid, gid, er => {
+        // Skip ENOENT error
+        cb(er && er?.code !== 'ENOENT' ? er : null);
+    });
+};
+const chownrKid = (p, child, uid, gid, cb) => {
+    if (child.isDirectory()) {
+        chownr(path.resolve(p, child.name), uid, gid, (er) => {
+            if (er)
+                return cb(er);
+            const cpath = path.resolve(p, child.name);
+            chown(cpath, uid, gid, cb);
+        });
+    }
+    else {
+        const cpath = path.resolve(p, child.name);
+        chown(cpath, uid, gid, cb);
+    }
+};
+export const chownr = (p, uid, gid, cb) => {
+    fs.readdir(p, { withFileTypes: true }, (er, children) => {
+        // any error other than ENOTDIR or ENOTSUP means it's not readable,
+        // or doesn't exist.  give up.
+        if (er) {
+            if (er.code === 'ENOENT')
+                return cb();
+            else if (er.code !== 'ENOTDIR' && er.code !== 'ENOTSUP')
+                return cb(er);
+        }
+        if (er || !children.length)
+            return chown(p, uid, gid, cb);
+        let len = children.length;
+        let errState = null;
+        const then = (er) => {
+            /* c8 ignore start */
+            if (errState)
+                return;
+            /* c8 ignore stop */
+            if (er)
+                return cb((errState = er));
+            if (--len === 0)
+                return chown(p, uid, gid, cb);
+        };
+        for (const child of children) {
+            chownrKid(p, child, uid, gid, then);
+        }
+    });
+};
+const chownrKidSync = (p, child, uid, gid) => {
+    if (child.isDirectory())
+        chownrSync(path.resolve(p, child.name), uid, gid);
+    lchownSync(path.resolve(p, child.name), uid, gid);
+};
+export const chownrSync = (p, uid, gid) => {
+    let children;
+    try {
+        children = fs.readdirSync(p, { withFileTypes: true });
+    }
+    catch (er) {
+        const e = er;
+        if (e?.code === 'ENOENT')
+            return;
+        else if (e?.code === 'ENOTDIR' || e?.code === 'ENOTSUP')
+            return lchownSync(p, uid, gid);
+        else
+            throw e;
+    }
+    for (const child of children) {
+        chownrKidSync(p, child, uid, gid);
+    }
+    return lchownSync(p, uid, gid);
+};
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/chownr/dist/esm/package.json b/node_modules/make-fetch-happen/node_modules/chownr/dist/esm/package.json
new file mode 100644
index 0000000000000..3dbc1ca591c05
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/chownr/dist/esm/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/node_modules/make-fetch-happen/node_modules/chownr/package.json b/node_modules/make-fetch-happen/node_modules/chownr/package.json
new file mode 100644
index 0000000000000..09aa6b2e2e576
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/chownr/package.json
@@ -0,0 +1,69 @@
+{
+  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
+  "name": "chownr",
+  "description": "like `chown -R`",
+  "version": "3.0.0",
+  "repository": {
+    "type": "git",
+    "url": "git://github.com/isaacs/chownr.git"
+  },
+  "files": [
+    "dist"
+  ],
+  "devDependencies": {
+    "@types/node": "^20.12.5",
+    "mkdirp": "^3.0.1",
+    "prettier": "^3.2.5",
+    "rimraf": "^5.0.5",
+    "tap": "^18.7.2",
+    "tshy": "^1.13.1",
+    "typedoc": "^0.25.12"
+  },
+  "scripts": {
+    "prepare": "tshy",
+    "pretest": "npm run prepare",
+    "test": "tap",
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "format": "prettier --write . --loglevel warn",
+    "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
+  },
+  "license": "BlueOak-1.0.0",
+  "engines": {
+    "node": ">=18"
+  },
+  "tshy": {
+    "exports": {
+      "./package.json": "./package.json",
+      ".": "./src/index.ts"
+    }
+  },
+  "exports": {
+    "./package.json": "./package.json",
+    ".": {
+      "import": {
+        "types": "./dist/esm/index.d.ts",
+        "default": "./dist/esm/index.js"
+      },
+      "require": {
+        "types": "./dist/commonjs/index.d.ts",
+        "default": "./dist/commonjs/index.js"
+      }
+    }
+  },
+  "main": "./dist/commonjs/index.js",
+  "types": "./dist/commonjs/index.d.ts",
+  "type": "module",
+  "prettier": {
+    "semi": false,
+    "printWidth": 75,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  }
+}
diff --git a/node_modules/make-fetch-happen/node_modules/minizlib/LICENSE b/node_modules/make-fetch-happen/node_modules/minizlib/LICENSE
new file mode 100644
index 0000000000000..49f7efe431c9e
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/minizlib/LICENSE
@@ -0,0 +1,26 @@
+Minizlib was created by Isaac Z. Schlueter.
+It is a derivative work of the Node.js project.
+
+"""
+Copyright (c) 2017-2023 Isaac Z. Schlueter and Contributors
+Copyright (c) 2017-2023 Node.js contributors. All rights reserved.
+Copyright (c) 2017-2023 Joyent, Inc. and other Node contributors. All rights reserved.
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the "Software"),
+to deal in the Software without restriction, including without limitation
+the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+"""
diff --git a/node_modules/make-fetch-happen/node_modules/minizlib/dist/commonjs/constants.js b/node_modules/make-fetch-happen/node_modules/minizlib/dist/commonjs/constants.js
new file mode 100644
index 0000000000000..dfc2c1957bfc9
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/minizlib/dist/commonjs/constants.js
@@ -0,0 +1,123 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.constants = void 0;
+// Update with any zlib constants that are added or changed in the future.
+// Node v6 didn't export this, so we just hard code the version and rely
+// on all the other hard-coded values from zlib v4736.  When node v6
+// support drops, we can just export the realZlibConstants object.
+const zlib_1 = __importDefault(require("zlib"));
+/* c8 ignore start */
+const realZlibConstants = zlib_1.default.constants || { ZLIB_VERNUM: 4736 };
+/* c8 ignore stop */
+exports.constants = Object.freeze(Object.assign(Object.create(null), {
+    Z_NO_FLUSH: 0,
+    Z_PARTIAL_FLUSH: 1,
+    Z_SYNC_FLUSH: 2,
+    Z_FULL_FLUSH: 3,
+    Z_FINISH: 4,
+    Z_BLOCK: 5,
+    Z_OK: 0,
+    Z_STREAM_END: 1,
+    Z_NEED_DICT: 2,
+    Z_ERRNO: -1,
+    Z_STREAM_ERROR: -2,
+    Z_DATA_ERROR: -3,
+    Z_MEM_ERROR: -4,
+    Z_BUF_ERROR: -5,
+    Z_VERSION_ERROR: -6,
+    Z_NO_COMPRESSION: 0,
+    Z_BEST_SPEED: 1,
+    Z_BEST_COMPRESSION: 9,
+    Z_DEFAULT_COMPRESSION: -1,
+    Z_FILTERED: 1,
+    Z_HUFFMAN_ONLY: 2,
+    Z_RLE: 3,
+    Z_FIXED: 4,
+    Z_DEFAULT_STRATEGY: 0,
+    DEFLATE: 1,
+    INFLATE: 2,
+    GZIP: 3,
+    GUNZIP: 4,
+    DEFLATERAW: 5,
+    INFLATERAW: 6,
+    UNZIP: 7,
+    BROTLI_DECODE: 8,
+    BROTLI_ENCODE: 9,
+    Z_MIN_WINDOWBITS: 8,
+    Z_MAX_WINDOWBITS: 15,
+    Z_DEFAULT_WINDOWBITS: 15,
+    Z_MIN_CHUNK: 64,
+    Z_MAX_CHUNK: Infinity,
+    Z_DEFAULT_CHUNK: 16384,
+    Z_MIN_MEMLEVEL: 1,
+    Z_MAX_MEMLEVEL: 9,
+    Z_DEFAULT_MEMLEVEL: 8,
+    Z_MIN_LEVEL: -1,
+    Z_MAX_LEVEL: 9,
+    Z_DEFAULT_LEVEL: -1,
+    BROTLI_OPERATION_PROCESS: 0,
+    BROTLI_OPERATION_FLUSH: 1,
+    BROTLI_OPERATION_FINISH: 2,
+    BROTLI_OPERATION_EMIT_METADATA: 3,
+    BROTLI_MODE_GENERIC: 0,
+    BROTLI_MODE_TEXT: 1,
+    BROTLI_MODE_FONT: 2,
+    BROTLI_DEFAULT_MODE: 0,
+    BROTLI_MIN_QUALITY: 0,
+    BROTLI_MAX_QUALITY: 11,
+    BROTLI_DEFAULT_QUALITY: 11,
+    BROTLI_MIN_WINDOW_BITS: 10,
+    BROTLI_MAX_WINDOW_BITS: 24,
+    BROTLI_LARGE_MAX_WINDOW_BITS: 30,
+    BROTLI_DEFAULT_WINDOW: 22,
+    BROTLI_MIN_INPUT_BLOCK_BITS: 16,
+    BROTLI_MAX_INPUT_BLOCK_BITS: 24,
+    BROTLI_PARAM_MODE: 0,
+    BROTLI_PARAM_QUALITY: 1,
+    BROTLI_PARAM_LGWIN: 2,
+    BROTLI_PARAM_LGBLOCK: 3,
+    BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4,
+    BROTLI_PARAM_SIZE_HINT: 5,
+    BROTLI_PARAM_LARGE_WINDOW: 6,
+    BROTLI_PARAM_NPOSTFIX: 7,
+    BROTLI_PARAM_NDIRECT: 8,
+    BROTLI_DECODER_RESULT_ERROR: 0,
+    BROTLI_DECODER_RESULT_SUCCESS: 1,
+    BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2,
+    BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3,
+    BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0,
+    BROTLI_DECODER_PARAM_LARGE_WINDOW: 1,
+    BROTLI_DECODER_NO_ERROR: 0,
+    BROTLI_DECODER_SUCCESS: 1,
+    BROTLI_DECODER_NEEDS_MORE_INPUT: 2,
+    BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3,
+    BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1,
+    BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2,
+    BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3,
+    BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4,
+    BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5,
+    BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6,
+    BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7,
+    BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8,
+    BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9,
+    BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10,
+    BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11,
+    BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12,
+    BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13,
+    BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14,
+    BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15,
+    BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16,
+    BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19,
+    BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20,
+    BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21,
+    BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22,
+    BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25,
+    BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26,
+    BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27,
+    BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30,
+    BROTLI_DECODER_ERROR_UNREACHABLE: -31,
+}, realZlibConstants));
+//# sourceMappingURL=constants.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/minizlib/dist/commonjs/index.js b/node_modules/make-fetch-happen/node_modules/minizlib/dist/commonjs/index.js
new file mode 100644
index 0000000000000..b4906d2783372
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/minizlib/dist/commonjs/index.js
@@ -0,0 +1,392 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || (function () {
+    var ownKeys = function(o) {
+        ownKeys = Object.getOwnPropertyNames || function (o) {
+            var ar = [];
+            for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
+            return ar;
+        };
+        return ownKeys(o);
+    };
+    return function (mod) {
+        if (mod && mod.__esModule) return mod;
+        var result = {};
+        if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
+        __setModuleDefault(result, mod);
+        return result;
+    };
+})();
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.BrotliDecompress = exports.BrotliCompress = exports.Brotli = exports.Unzip = exports.InflateRaw = exports.DeflateRaw = exports.Gunzip = exports.Gzip = exports.Inflate = exports.Deflate = exports.Zlib = exports.ZlibError = exports.constants = void 0;
+const assert_1 = __importDefault(require("assert"));
+const buffer_1 = require("buffer");
+const minipass_1 = require("minipass");
+const realZlib = __importStar(require("zlib"));
+const constants_js_1 = require("./constants.js");
+var constants_js_2 = require("./constants.js");
+Object.defineProperty(exports, "constants", { enumerable: true, get: function () { return constants_js_2.constants; } });
+const OriginalBufferConcat = buffer_1.Buffer.concat;
+const desc = Object.getOwnPropertyDescriptor(buffer_1.Buffer, 'concat');
+const noop = (args) => args;
+const passthroughBufferConcat = desc?.writable === true || desc?.set !== undefined
+    ? (makeNoOp) => {
+        buffer_1.Buffer.concat = makeNoOp ? noop : OriginalBufferConcat;
+    }
+    : (_) => { };
+const _superWrite = Symbol('_superWrite');
+class ZlibError extends Error {
+    code;
+    errno;
+    constructor(err) {
+        super('zlib: ' + err.message);
+        this.code = err.code;
+        this.errno = err.errno;
+        /* c8 ignore next */
+        if (!this.code)
+            this.code = 'ZLIB_ERROR';
+        this.message = 'zlib: ' + err.message;
+        Error.captureStackTrace(this, this.constructor);
+    }
+    get name() {
+        return 'ZlibError';
+    }
+}
+exports.ZlibError = ZlibError;
+// the Zlib class they all inherit from
+// This thing manages the queue of requests, and returns
+// true or false if there is anything in the queue when
+// you call the .write() method.
+const _flushFlag = Symbol('flushFlag');
+class ZlibBase extends minipass_1.Minipass {
+    #sawError = false;
+    #ended = false;
+    #flushFlag;
+    #finishFlushFlag;
+    #fullFlushFlag;
+    #handle;
+    #onError;
+    get sawError() {
+        return this.#sawError;
+    }
+    get handle() {
+        return this.#handle;
+    }
+    /* c8 ignore start */
+    get flushFlag() {
+        return this.#flushFlag;
+    }
+    /* c8 ignore stop */
+    constructor(opts, mode) {
+        if (!opts || typeof opts !== 'object')
+            throw new TypeError('invalid options for ZlibBase constructor');
+        //@ts-ignore
+        super(opts);
+        /* c8 ignore start */
+        this.#flushFlag = opts.flush ?? 0;
+        this.#finishFlushFlag = opts.finishFlush ?? 0;
+        this.#fullFlushFlag = opts.fullFlushFlag ?? 0;
+        /* c8 ignore stop */
+        // this will throw if any options are invalid for the class selected
+        try {
+            // @types/node doesn't know that it exports the classes, but they're there
+            //@ts-ignore
+            this.#handle = new realZlib[mode](opts);
+        }
+        catch (er) {
+            // make sure that all errors get decorated properly
+            throw new ZlibError(er);
+        }
+        this.#onError = err => {
+            // no sense raising multiple errors, since we abort on the first one.
+            if (this.#sawError)
+                return;
+            this.#sawError = true;
+            // there is no way to cleanly recover.
+            // continuing only obscures problems.
+            this.close();
+            this.emit('error', err);
+        };
+        this.#handle?.on('error', er => this.#onError(new ZlibError(er)));
+        this.once('end', () => this.close);
+    }
+    close() {
+        if (this.#handle) {
+            this.#handle.close();
+            this.#handle = undefined;
+            this.emit('close');
+        }
+    }
+    reset() {
+        if (!this.#sawError) {
+            (0, assert_1.default)(this.#handle, 'zlib binding closed');
+            //@ts-ignore
+            return this.#handle.reset?.();
+        }
+    }
+    flush(flushFlag) {
+        if (this.ended)
+            return;
+        if (typeof flushFlag !== 'number')
+            flushFlag = this.#fullFlushFlag;
+        this.write(Object.assign(buffer_1.Buffer.alloc(0), { [_flushFlag]: flushFlag }));
+    }
+    end(chunk, encoding, cb) {
+        /* c8 ignore start */
+        if (typeof chunk === 'function') {
+            cb = chunk;
+            encoding = undefined;
+            chunk = undefined;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = undefined;
+        }
+        /* c8 ignore stop */
+        if (chunk) {
+            if (encoding)
+                this.write(chunk, encoding);
+            else
+                this.write(chunk);
+        }
+        this.flush(this.#finishFlushFlag);
+        this.#ended = true;
+        return super.end(cb);
+    }
+    get ended() {
+        return this.#ended;
+    }
+    // overridden in the gzip classes to do portable writes
+    [_superWrite](data) {
+        return super.write(data);
+    }
+    write(chunk, encoding, cb) {
+        // process the chunk using the sync process
+        // then super.write() all the outputted chunks
+        if (typeof encoding === 'function')
+            (cb = encoding), (encoding = 'utf8');
+        if (typeof chunk === 'string')
+            chunk = buffer_1.Buffer.from(chunk, encoding);
+        if (this.#sawError)
+            return;
+        (0, assert_1.default)(this.#handle, 'zlib binding closed');
+        // _processChunk tries to .close() the native handle after it's done, so we
+        // intercept that by temporarily making it a no-op.
+        // diving into the node:zlib internals a bit here
+        const nativeHandle = this.#handle
+            ._handle;
+        const originalNativeClose = nativeHandle.close;
+        nativeHandle.close = () => { };
+        const originalClose = this.#handle.close;
+        this.#handle.close = () => { };
+        // It also calls `Buffer.concat()` at the end, which may be convenient
+        // for some, but which we are not interested in as it slows us down.
+        passthroughBufferConcat(true);
+        let result = undefined;
+        try {
+            const flushFlag = typeof chunk[_flushFlag] === 'number'
+                ? chunk[_flushFlag]
+                : this.#flushFlag;
+            result = this.#handle._processChunk(chunk, flushFlag);
+            // if we don't throw, reset it back how it was
+            passthroughBufferConcat(false);
+        }
+        catch (err) {
+            // or if we do, put Buffer.concat() back before we emit error
+            // Error events call into user code, which may call Buffer.concat()
+            passthroughBufferConcat(false);
+            this.#onError(new ZlibError(err));
+        }
+        finally {
+            if (this.#handle) {
+                // Core zlib resets `_handle` to null after attempting to close the
+                // native handle. Our no-op handler prevented actual closure, but we
+                // need to restore the `._handle` property.
+                ;
+                this.#handle._handle =
+                    nativeHandle;
+                nativeHandle.close = originalNativeClose;
+                this.#handle.close = originalClose;
+                // `_processChunk()` adds an 'error' listener. If we don't remove it
+                // after each call, these handlers start piling up.
+                this.#handle.removeAllListeners('error');
+                // make sure OUR error listener is still attached tho
+            }
+        }
+        if (this.#handle)
+            this.#handle.on('error', er => this.#onError(new ZlibError(er)));
+        let writeReturn;
+        if (result) {
+            if (Array.isArray(result) && result.length > 0) {
+                const r = result[0];
+                // The first buffer is always `handle._outBuffer`, which would be
+                // re-used for later invocations; so, we always have to copy that one.
+                writeReturn = this[_superWrite](buffer_1.Buffer.from(r));
+                for (let i = 1; i < result.length; i++) {
+                    writeReturn = this[_superWrite](result[i]);
+                }
+            }
+            else {
+                // either a single Buffer or an empty array
+                writeReturn = this[_superWrite](buffer_1.Buffer.from(result));
+            }
+        }
+        if (cb)
+            cb();
+        return writeReturn;
+    }
+}
+class Zlib extends ZlibBase {
+    #level;
+    #strategy;
+    constructor(opts, mode) {
+        opts = opts || {};
+        opts.flush = opts.flush || constants_js_1.constants.Z_NO_FLUSH;
+        opts.finishFlush = opts.finishFlush || constants_js_1.constants.Z_FINISH;
+        opts.fullFlushFlag = constants_js_1.constants.Z_FULL_FLUSH;
+        super(opts, mode);
+        this.#level = opts.level;
+        this.#strategy = opts.strategy;
+    }
+    params(level, strategy) {
+        if (this.sawError)
+            return;
+        if (!this.handle)
+            throw new Error('cannot switch params when binding is closed');
+        // no way to test this without also not supporting params at all
+        /* c8 ignore start */
+        if (!this.handle.params)
+            throw new Error('not supported in this implementation');
+        /* c8 ignore stop */
+        if (this.#level !== level || this.#strategy !== strategy) {
+            this.flush(constants_js_1.constants.Z_SYNC_FLUSH);
+            (0, assert_1.default)(this.handle, 'zlib binding closed');
+            // .params() calls .flush(), but the latter is always async in the
+            // core zlib. We override .flush() temporarily to intercept that and
+            // flush synchronously.
+            const origFlush = this.handle.flush;
+            this.handle.flush = (flushFlag, cb) => {
+                /* c8 ignore start */
+                if (typeof flushFlag === 'function') {
+                    cb = flushFlag;
+                    flushFlag = this.flushFlag;
+                }
+                /* c8 ignore stop */
+                this.flush(flushFlag);
+                cb?.();
+            };
+            try {
+                ;
+                this.handle.params(level, strategy);
+            }
+            finally {
+                this.handle.flush = origFlush;
+            }
+            /* c8 ignore start */
+            if (this.handle) {
+                this.#level = level;
+                this.#strategy = strategy;
+            }
+            /* c8 ignore stop */
+        }
+    }
+}
+exports.Zlib = Zlib;
+// minimal 2-byte header
+class Deflate extends Zlib {
+    constructor(opts) {
+        super(opts, 'Deflate');
+    }
+}
+exports.Deflate = Deflate;
+class Inflate extends Zlib {
+    constructor(opts) {
+        super(opts, 'Inflate');
+    }
+}
+exports.Inflate = Inflate;
+class Gzip extends Zlib {
+    #portable;
+    constructor(opts) {
+        super(opts, 'Gzip');
+        this.#portable = opts && !!opts.portable;
+    }
+    [_superWrite](data) {
+        if (!this.#portable)
+            return super[_superWrite](data);
+        // we'll always get the header emitted in one first chunk
+        // overwrite the OS indicator byte with 0xFF
+        this.#portable = false;
+        data[9] = 255;
+        return super[_superWrite](data);
+    }
+}
+exports.Gzip = Gzip;
+class Gunzip extends Zlib {
+    constructor(opts) {
+        super(opts, 'Gunzip');
+    }
+}
+exports.Gunzip = Gunzip;
+// raw - no header
+class DeflateRaw extends Zlib {
+    constructor(opts) {
+        super(opts, 'DeflateRaw');
+    }
+}
+exports.DeflateRaw = DeflateRaw;
+class InflateRaw extends Zlib {
+    constructor(opts) {
+        super(opts, 'InflateRaw');
+    }
+}
+exports.InflateRaw = InflateRaw;
+// auto-detect header.
+class Unzip extends Zlib {
+    constructor(opts) {
+        super(opts, 'Unzip');
+    }
+}
+exports.Unzip = Unzip;
+class Brotli extends ZlibBase {
+    constructor(opts, mode) {
+        opts = opts || {};
+        opts.flush = opts.flush || constants_js_1.constants.BROTLI_OPERATION_PROCESS;
+        opts.finishFlush =
+            opts.finishFlush || constants_js_1.constants.BROTLI_OPERATION_FINISH;
+        opts.fullFlushFlag = constants_js_1.constants.BROTLI_OPERATION_FLUSH;
+        super(opts, mode);
+    }
+}
+exports.Brotli = Brotli;
+class BrotliCompress extends Brotli {
+    constructor(opts) {
+        super(opts, 'BrotliCompress');
+    }
+}
+exports.BrotliCompress = BrotliCompress;
+class BrotliDecompress extends Brotli {
+    constructor(opts) {
+        super(opts, 'BrotliDecompress');
+    }
+}
+exports.BrotliDecompress = BrotliDecompress;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/minizlib/dist/commonjs/package.json b/node_modules/make-fetch-happen/node_modules/minizlib/dist/commonjs/package.json
new file mode 100644
index 0000000000000..5bbefffbabee3
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/minizlib/dist/commonjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/node_modules/make-fetch-happen/node_modules/minizlib/dist/esm/constants.js b/node_modules/make-fetch-happen/node_modules/minizlib/dist/esm/constants.js
new file mode 100644
index 0000000000000..7faf40be5068d
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/minizlib/dist/esm/constants.js
@@ -0,0 +1,117 @@
+// Update with any zlib constants that are added or changed in the future.
+// Node v6 didn't export this, so we just hard code the version and rely
+// on all the other hard-coded values from zlib v4736.  When node v6
+// support drops, we can just export the realZlibConstants object.
+import realZlib from 'zlib';
+/* c8 ignore start */
+const realZlibConstants = realZlib.constants || { ZLIB_VERNUM: 4736 };
+/* c8 ignore stop */
+export const constants = Object.freeze(Object.assign(Object.create(null), {
+    Z_NO_FLUSH: 0,
+    Z_PARTIAL_FLUSH: 1,
+    Z_SYNC_FLUSH: 2,
+    Z_FULL_FLUSH: 3,
+    Z_FINISH: 4,
+    Z_BLOCK: 5,
+    Z_OK: 0,
+    Z_STREAM_END: 1,
+    Z_NEED_DICT: 2,
+    Z_ERRNO: -1,
+    Z_STREAM_ERROR: -2,
+    Z_DATA_ERROR: -3,
+    Z_MEM_ERROR: -4,
+    Z_BUF_ERROR: -5,
+    Z_VERSION_ERROR: -6,
+    Z_NO_COMPRESSION: 0,
+    Z_BEST_SPEED: 1,
+    Z_BEST_COMPRESSION: 9,
+    Z_DEFAULT_COMPRESSION: -1,
+    Z_FILTERED: 1,
+    Z_HUFFMAN_ONLY: 2,
+    Z_RLE: 3,
+    Z_FIXED: 4,
+    Z_DEFAULT_STRATEGY: 0,
+    DEFLATE: 1,
+    INFLATE: 2,
+    GZIP: 3,
+    GUNZIP: 4,
+    DEFLATERAW: 5,
+    INFLATERAW: 6,
+    UNZIP: 7,
+    BROTLI_DECODE: 8,
+    BROTLI_ENCODE: 9,
+    Z_MIN_WINDOWBITS: 8,
+    Z_MAX_WINDOWBITS: 15,
+    Z_DEFAULT_WINDOWBITS: 15,
+    Z_MIN_CHUNK: 64,
+    Z_MAX_CHUNK: Infinity,
+    Z_DEFAULT_CHUNK: 16384,
+    Z_MIN_MEMLEVEL: 1,
+    Z_MAX_MEMLEVEL: 9,
+    Z_DEFAULT_MEMLEVEL: 8,
+    Z_MIN_LEVEL: -1,
+    Z_MAX_LEVEL: 9,
+    Z_DEFAULT_LEVEL: -1,
+    BROTLI_OPERATION_PROCESS: 0,
+    BROTLI_OPERATION_FLUSH: 1,
+    BROTLI_OPERATION_FINISH: 2,
+    BROTLI_OPERATION_EMIT_METADATA: 3,
+    BROTLI_MODE_GENERIC: 0,
+    BROTLI_MODE_TEXT: 1,
+    BROTLI_MODE_FONT: 2,
+    BROTLI_DEFAULT_MODE: 0,
+    BROTLI_MIN_QUALITY: 0,
+    BROTLI_MAX_QUALITY: 11,
+    BROTLI_DEFAULT_QUALITY: 11,
+    BROTLI_MIN_WINDOW_BITS: 10,
+    BROTLI_MAX_WINDOW_BITS: 24,
+    BROTLI_LARGE_MAX_WINDOW_BITS: 30,
+    BROTLI_DEFAULT_WINDOW: 22,
+    BROTLI_MIN_INPUT_BLOCK_BITS: 16,
+    BROTLI_MAX_INPUT_BLOCK_BITS: 24,
+    BROTLI_PARAM_MODE: 0,
+    BROTLI_PARAM_QUALITY: 1,
+    BROTLI_PARAM_LGWIN: 2,
+    BROTLI_PARAM_LGBLOCK: 3,
+    BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4,
+    BROTLI_PARAM_SIZE_HINT: 5,
+    BROTLI_PARAM_LARGE_WINDOW: 6,
+    BROTLI_PARAM_NPOSTFIX: 7,
+    BROTLI_PARAM_NDIRECT: 8,
+    BROTLI_DECODER_RESULT_ERROR: 0,
+    BROTLI_DECODER_RESULT_SUCCESS: 1,
+    BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2,
+    BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3,
+    BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0,
+    BROTLI_DECODER_PARAM_LARGE_WINDOW: 1,
+    BROTLI_DECODER_NO_ERROR: 0,
+    BROTLI_DECODER_SUCCESS: 1,
+    BROTLI_DECODER_NEEDS_MORE_INPUT: 2,
+    BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3,
+    BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1,
+    BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2,
+    BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3,
+    BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4,
+    BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5,
+    BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6,
+    BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7,
+    BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8,
+    BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9,
+    BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10,
+    BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11,
+    BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12,
+    BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13,
+    BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14,
+    BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15,
+    BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16,
+    BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19,
+    BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20,
+    BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21,
+    BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22,
+    BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25,
+    BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26,
+    BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27,
+    BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30,
+    BROTLI_DECODER_ERROR_UNREACHABLE: -31,
+}, realZlibConstants));
+//# sourceMappingURL=constants.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/minizlib/dist/esm/index.js b/node_modules/make-fetch-happen/node_modules/minizlib/dist/esm/index.js
new file mode 100644
index 0000000000000..f33586a8ab0ec
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/minizlib/dist/esm/index.js
@@ -0,0 +1,340 @@
+import assert from 'assert';
+import { Buffer } from 'buffer';
+import { Minipass } from 'minipass';
+import * as realZlib from 'zlib';
+import { constants } from './constants.js';
+export { constants } from './constants.js';
+const OriginalBufferConcat = Buffer.concat;
+const desc = Object.getOwnPropertyDescriptor(Buffer, 'concat');
+const noop = (args) => args;
+const passthroughBufferConcat = desc?.writable === true || desc?.set !== undefined
+    ? (makeNoOp) => {
+        Buffer.concat = makeNoOp ? noop : OriginalBufferConcat;
+    }
+    : (_) => { };
+const _superWrite = Symbol('_superWrite');
+export class ZlibError extends Error {
+    code;
+    errno;
+    constructor(err) {
+        super('zlib: ' + err.message);
+        this.code = err.code;
+        this.errno = err.errno;
+        /* c8 ignore next */
+        if (!this.code)
+            this.code = 'ZLIB_ERROR';
+        this.message = 'zlib: ' + err.message;
+        Error.captureStackTrace(this, this.constructor);
+    }
+    get name() {
+        return 'ZlibError';
+    }
+}
+// the Zlib class they all inherit from
+// This thing manages the queue of requests, and returns
+// true or false if there is anything in the queue when
+// you call the .write() method.
+const _flushFlag = Symbol('flushFlag');
+class ZlibBase extends Minipass {
+    #sawError = false;
+    #ended = false;
+    #flushFlag;
+    #finishFlushFlag;
+    #fullFlushFlag;
+    #handle;
+    #onError;
+    get sawError() {
+        return this.#sawError;
+    }
+    get handle() {
+        return this.#handle;
+    }
+    /* c8 ignore start */
+    get flushFlag() {
+        return this.#flushFlag;
+    }
+    /* c8 ignore stop */
+    constructor(opts, mode) {
+        if (!opts || typeof opts !== 'object')
+            throw new TypeError('invalid options for ZlibBase constructor');
+        //@ts-ignore
+        super(opts);
+        /* c8 ignore start */
+        this.#flushFlag = opts.flush ?? 0;
+        this.#finishFlushFlag = opts.finishFlush ?? 0;
+        this.#fullFlushFlag = opts.fullFlushFlag ?? 0;
+        /* c8 ignore stop */
+        // this will throw if any options are invalid for the class selected
+        try {
+            // @types/node doesn't know that it exports the classes, but they're there
+            //@ts-ignore
+            this.#handle = new realZlib[mode](opts);
+        }
+        catch (er) {
+            // make sure that all errors get decorated properly
+            throw new ZlibError(er);
+        }
+        this.#onError = err => {
+            // no sense raising multiple errors, since we abort on the first one.
+            if (this.#sawError)
+                return;
+            this.#sawError = true;
+            // there is no way to cleanly recover.
+            // continuing only obscures problems.
+            this.close();
+            this.emit('error', err);
+        };
+        this.#handle?.on('error', er => this.#onError(new ZlibError(er)));
+        this.once('end', () => this.close);
+    }
+    close() {
+        if (this.#handle) {
+            this.#handle.close();
+            this.#handle = undefined;
+            this.emit('close');
+        }
+    }
+    reset() {
+        if (!this.#sawError) {
+            assert(this.#handle, 'zlib binding closed');
+            //@ts-ignore
+            return this.#handle.reset?.();
+        }
+    }
+    flush(flushFlag) {
+        if (this.ended)
+            return;
+        if (typeof flushFlag !== 'number')
+            flushFlag = this.#fullFlushFlag;
+        this.write(Object.assign(Buffer.alloc(0), { [_flushFlag]: flushFlag }));
+    }
+    end(chunk, encoding, cb) {
+        /* c8 ignore start */
+        if (typeof chunk === 'function') {
+            cb = chunk;
+            encoding = undefined;
+            chunk = undefined;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = undefined;
+        }
+        /* c8 ignore stop */
+        if (chunk) {
+            if (encoding)
+                this.write(chunk, encoding);
+            else
+                this.write(chunk);
+        }
+        this.flush(this.#finishFlushFlag);
+        this.#ended = true;
+        return super.end(cb);
+    }
+    get ended() {
+        return this.#ended;
+    }
+    // overridden in the gzip classes to do portable writes
+    [_superWrite](data) {
+        return super.write(data);
+    }
+    write(chunk, encoding, cb) {
+        // process the chunk using the sync process
+        // then super.write() all the outputted chunks
+        if (typeof encoding === 'function')
+            (cb = encoding), (encoding = 'utf8');
+        if (typeof chunk === 'string')
+            chunk = Buffer.from(chunk, encoding);
+        if (this.#sawError)
+            return;
+        assert(this.#handle, 'zlib binding closed');
+        // _processChunk tries to .close() the native handle after it's done, so we
+        // intercept that by temporarily making it a no-op.
+        // diving into the node:zlib internals a bit here
+        const nativeHandle = this.#handle
+            ._handle;
+        const originalNativeClose = nativeHandle.close;
+        nativeHandle.close = () => { };
+        const originalClose = this.#handle.close;
+        this.#handle.close = () => { };
+        // It also calls `Buffer.concat()` at the end, which may be convenient
+        // for some, but which we are not interested in as it slows us down.
+        passthroughBufferConcat(true);
+        let result = undefined;
+        try {
+            const flushFlag = typeof chunk[_flushFlag] === 'number'
+                ? chunk[_flushFlag]
+                : this.#flushFlag;
+            result = this.#handle._processChunk(chunk, flushFlag);
+            // if we don't throw, reset it back how it was
+            passthroughBufferConcat(false);
+        }
+        catch (err) {
+            // or if we do, put Buffer.concat() back before we emit error
+            // Error events call into user code, which may call Buffer.concat()
+            passthroughBufferConcat(false);
+            this.#onError(new ZlibError(err));
+        }
+        finally {
+            if (this.#handle) {
+                // Core zlib resets `_handle` to null after attempting to close the
+                // native handle. Our no-op handler prevented actual closure, but we
+                // need to restore the `._handle` property.
+                ;
+                this.#handle._handle =
+                    nativeHandle;
+                nativeHandle.close = originalNativeClose;
+                this.#handle.close = originalClose;
+                // `_processChunk()` adds an 'error' listener. If we don't remove it
+                // after each call, these handlers start piling up.
+                this.#handle.removeAllListeners('error');
+                // make sure OUR error listener is still attached tho
+            }
+        }
+        if (this.#handle)
+            this.#handle.on('error', er => this.#onError(new ZlibError(er)));
+        let writeReturn;
+        if (result) {
+            if (Array.isArray(result) && result.length > 0) {
+                const r = result[0];
+                // The first buffer is always `handle._outBuffer`, which would be
+                // re-used for later invocations; so, we always have to copy that one.
+                writeReturn = this[_superWrite](Buffer.from(r));
+                for (let i = 1; i < result.length; i++) {
+                    writeReturn = this[_superWrite](result[i]);
+                }
+            }
+            else {
+                // either a single Buffer or an empty array
+                writeReturn = this[_superWrite](Buffer.from(result));
+            }
+        }
+        if (cb)
+            cb();
+        return writeReturn;
+    }
+}
+export class Zlib extends ZlibBase {
+    #level;
+    #strategy;
+    constructor(opts, mode) {
+        opts = opts || {};
+        opts.flush = opts.flush || constants.Z_NO_FLUSH;
+        opts.finishFlush = opts.finishFlush || constants.Z_FINISH;
+        opts.fullFlushFlag = constants.Z_FULL_FLUSH;
+        super(opts, mode);
+        this.#level = opts.level;
+        this.#strategy = opts.strategy;
+    }
+    params(level, strategy) {
+        if (this.sawError)
+            return;
+        if (!this.handle)
+            throw new Error('cannot switch params when binding is closed');
+        // no way to test this without also not supporting params at all
+        /* c8 ignore start */
+        if (!this.handle.params)
+            throw new Error('not supported in this implementation');
+        /* c8 ignore stop */
+        if (this.#level !== level || this.#strategy !== strategy) {
+            this.flush(constants.Z_SYNC_FLUSH);
+            assert(this.handle, 'zlib binding closed');
+            // .params() calls .flush(), but the latter is always async in the
+            // core zlib. We override .flush() temporarily to intercept that and
+            // flush synchronously.
+            const origFlush = this.handle.flush;
+            this.handle.flush = (flushFlag, cb) => {
+                /* c8 ignore start */
+                if (typeof flushFlag === 'function') {
+                    cb = flushFlag;
+                    flushFlag = this.flushFlag;
+                }
+                /* c8 ignore stop */
+                this.flush(flushFlag);
+                cb?.();
+            };
+            try {
+                ;
+                this.handle.params(level, strategy);
+            }
+            finally {
+                this.handle.flush = origFlush;
+            }
+            /* c8 ignore start */
+            if (this.handle) {
+                this.#level = level;
+                this.#strategy = strategy;
+            }
+            /* c8 ignore stop */
+        }
+    }
+}
+// minimal 2-byte header
+export class Deflate extends Zlib {
+    constructor(opts) {
+        super(opts, 'Deflate');
+    }
+}
+export class Inflate extends Zlib {
+    constructor(opts) {
+        super(opts, 'Inflate');
+    }
+}
+export class Gzip extends Zlib {
+    #portable;
+    constructor(opts) {
+        super(opts, 'Gzip');
+        this.#portable = opts && !!opts.portable;
+    }
+    [_superWrite](data) {
+        if (!this.#portable)
+            return super[_superWrite](data);
+        // we'll always get the header emitted in one first chunk
+        // overwrite the OS indicator byte with 0xFF
+        this.#portable = false;
+        data[9] = 255;
+        return super[_superWrite](data);
+    }
+}
+export class Gunzip extends Zlib {
+    constructor(opts) {
+        super(opts, 'Gunzip');
+    }
+}
+// raw - no header
+export class DeflateRaw extends Zlib {
+    constructor(opts) {
+        super(opts, 'DeflateRaw');
+    }
+}
+export class InflateRaw extends Zlib {
+    constructor(opts) {
+        super(opts, 'InflateRaw');
+    }
+}
+// auto-detect header.
+export class Unzip extends Zlib {
+    constructor(opts) {
+        super(opts, 'Unzip');
+    }
+}
+export class Brotli extends ZlibBase {
+    constructor(opts, mode) {
+        opts = opts || {};
+        opts.flush = opts.flush || constants.BROTLI_OPERATION_PROCESS;
+        opts.finishFlush =
+            opts.finishFlush || constants.BROTLI_OPERATION_FINISH;
+        opts.fullFlushFlag = constants.BROTLI_OPERATION_FLUSH;
+        super(opts, mode);
+    }
+}
+export class BrotliCompress extends Brotli {
+    constructor(opts) {
+        super(opts, 'BrotliCompress');
+    }
+}
+export class BrotliDecompress extends Brotli {
+    constructor(opts) {
+        super(opts, 'BrotliDecompress');
+    }
+}
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/minizlib/dist/esm/package.json b/node_modules/make-fetch-happen/node_modules/minizlib/dist/esm/package.json
new file mode 100644
index 0000000000000..3dbc1ca591c05
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/minizlib/dist/esm/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/package.json b/node_modules/make-fetch-happen/node_modules/minizlib/package.json
similarity index 56%
rename from node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/package.json
rename to node_modules/make-fetch-happen/node_modules/minizlib/package.json
index 01fc48ecfd6a9..43cb855e15a5d 100644
--- a/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/package.json
+++ b/node_modules/make-fetch-happen/node_modules/minizlib/package.json
@@ -1,14 +1,55 @@
 {
-  "author": "Isaac Z. Schlueter  (http://blog.izs.me)",
-  "name": "minimatch",
-  "description": "a glob matcher in javascript",
-  "version": "9.0.5",
+  "name": "minizlib",
+  "version": "3.0.2",
+  "description": "A small fast zlib stream built on [minipass](http://npm.im/minipass) and Node.js's zlib binding.",
+  "main": "./dist/commonjs/index.js",
+  "dependencies": {
+    "minipass": "^7.1.2"
+  },
+  "scripts": {
+    "prepare": "tshy",
+    "pretest": "npm run prepare",
+    "test": "tap",
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "format": "prettier --write . --loglevel warn",
+    "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
+  },
   "repository": {
     "type": "git",
-    "url": "git://github.com/isaacs/minimatch.git"
+    "url": "git+https://github.com/isaacs/minizlib.git"
+  },
+  "keywords": [
+    "zlib",
+    "gzip",
+    "gunzip",
+    "deflate",
+    "inflate",
+    "compression",
+    "zip",
+    "unzip"
+  ],
+  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
+  "license": "MIT",
+  "devDependencies": {
+    "@types/node": "^22.13.14",
+    "tap": "^21.1.0",
+    "tshy": "^3.0.2",
+    "typedoc": "^0.28.1"
+  },
+  "files": [
+    "dist"
+  ],
+  "engines": {
+    "node": ">= 18"
+  },
+  "tshy": {
+    "exports": {
+      "./package.json": "./package.json",
+      ".": "./src/index.ts"
+    }
   },
-  "main": "./dist/commonjs/index.js",
-  "types": "./dist/commonjs/index.d.ts",
   "exports": {
     "./package.json": "./package.json",
     ".": {
@@ -22,25 +63,11 @@
       }
     }
   },
-  "files": [
-    "dist"
-  ],
-  "scripts": {
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "prepare": "tshy",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "test": "tap",
-    "snap": "tap",
-    "format": "prettier --write . --loglevel warn",
-    "benchmark": "node benchmark/index.js",
-    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
-  },
+  "types": "./dist/commonjs/index.d.ts",
+  "type": "module",
   "prettier": {
     "semi": false,
-    "printWidth": 80,
+    "printWidth": 75,
     "tabWidth": 2,
     "useTabs": false,
     "singleQuote": true,
@@ -49,34 +76,5 @@
     "arrowParens": "avoid",
     "endOfLine": "lf"
   },
-  "engines": {
-    "node": ">=16 || 14 >=14.17"
-  },
-  "dependencies": {
-    "brace-expansion": "^2.0.1"
-  },
-  "devDependencies": {
-    "@types/brace-expansion": "^1.1.0",
-    "@types/node": "^18.15.11",
-    "@types/tap": "^15.0.8",
-    "eslint-config-prettier": "^8.6.0",
-    "mkdirp": "1",
-    "prettier": "^2.8.2",
-    "tap": "^18.7.2",
-    "ts-node": "^10.9.1",
-    "tshy": "^1.12.0",
-    "typedoc": "^0.23.21",
-    "typescript": "^4.9.3"
-  },
-  "funding": {
-    "url": "https://github.com/sponsors/isaacs"
-  },
-  "license": "ISC",
-  "tshy": {
-    "exports": {
-      "./package.json": "./package.json",
-      ".": "./src/index.ts"
-    }
-  },
-  "type": "module"
+  "module": "./dist/esm/index.js"
 }
diff --git a/node_modules/make-fetch-happen/node_modules/mkdirp/LICENSE b/node_modules/make-fetch-happen/node_modules/mkdirp/LICENSE
new file mode 100644
index 0000000000000..0a034db7a73b5
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/mkdirp/LICENSE
@@ -0,0 +1,21 @@
+Copyright (c) 2011-2023 James Halliday (mail@substack.net) and Isaac Z. Schlueter (i@izs.me)
+
+This project is free software released under the MIT license:
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/package.json b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/package.json
new file mode 100644
index 0000000000000..9d04a66e16cd9
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/package.json
@@ -0,0 +1,91 @@
+{
+    "name": "mkdirp",
+    "description": "Recursively mkdir, like `mkdir -p`",
+    "version": "3.0.1",
+    "keywords": [
+        "mkdir",
+        "directory",
+        "make dir",
+        "make",
+        "dir",
+        "recursive",
+        "native"
+    ],
+    "bin": "./dist/cjs/src/bin.js",
+    "main": "./dist/cjs/src/index.js",
+    "module": "./dist/mjs/index.js",
+    "types": "./dist/mjs/index.d.ts",
+    "exports": {
+        ".": {
+            "import": {
+                "types": "./dist/mjs/index.d.ts",
+                "default": "./dist/mjs/index.js"
+            },
+            "require": {
+                "types": "./dist/cjs/src/index.d.ts",
+                "default": "./dist/cjs/src/index.js"
+            }
+        }
+    },
+    "files": [
+        "dist"
+    ],
+    "scripts": {
+        "preversion": "npm test",
+        "postversion": "npm publish",
+        "prepublishOnly": "git push origin --follow-tags",
+        "preprepare": "rm -rf dist",
+        "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json",
+        "postprepare": "bash fixup.sh",
+        "pretest": "npm run prepare",
+        "presnap": "npm run prepare",
+        "test": "c8 tap",
+        "snap": "c8 tap",
+        "format": "prettier --write . --loglevel warn",
+        "benchmark": "node benchmark/index.js",
+        "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
+    },
+    "prettier": {
+        "semi": false,
+        "printWidth": 80,
+        "tabWidth": 2,
+        "useTabs": false,
+        "singleQuote": true,
+        "jsxSingleQuote": false,
+        "bracketSameLine": true,
+        "arrowParens": "avoid",
+        "endOfLine": "lf"
+    },
+    "devDependencies": {
+        "@types/brace-expansion": "^1.1.0",
+        "@types/node": "^18.11.9",
+        "@types/tap": "^15.0.7",
+        "c8": "^7.12.0",
+        "eslint-config-prettier": "^8.6.0",
+        "prettier": "^2.8.2",
+        "tap": "^16.3.3",
+        "ts-node": "^10.9.1",
+        "typedoc": "^0.23.21",
+        "typescript": "^4.9.3"
+    },
+    "tap": {
+        "coverage": false,
+        "node-arg": [
+            "--no-warnings",
+            "--loader",
+            "ts-node/esm"
+        ],
+        "ts": false
+    },
+    "funding": {
+        "url": "https://github.com/sponsors/isaacs"
+    },
+    "repository": {
+        "type": "git",
+        "url": "https://github.com/isaacs/node-mkdirp.git"
+    },
+    "license": "MIT",
+    "engines": {
+        "node": ">=10"
+    }
+}
diff --git a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/bin.js b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/bin.js
new file mode 100755
index 0000000000000..757aae1fd96cb
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/bin.js
@@ -0,0 +1,80 @@
+#!/usr/bin/env node
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+const package_json_1 = require("../package.json");
+const usage = () => `
+usage: mkdirp [DIR1,DIR2..] {OPTIONS}
+
+  Create each supplied directory including any necessary parent directories
+  that don't yet exist.
+
+  If the directory already exists, do nothing.
+
+OPTIONS are:
+
+  -m       If a directory needs to be created, set the mode as an octal
+  --mode=  permission string.
+
+  -v --version   Print the mkdirp version number
+
+  -h --help      Print this helpful banner
+
+  -p --print     Print the first directories created for each path provided
+
+  --manual       Use manual implementation, even if native is available
+`;
+const dirs = [];
+const opts = {};
+let doPrint = false;
+let dashdash = false;
+let manual = false;
+for (const arg of process.argv.slice(2)) {
+    if (dashdash)
+        dirs.push(arg);
+    else if (arg === '--')
+        dashdash = true;
+    else if (arg === '--manual')
+        manual = true;
+    else if (/^-h/.test(arg) || /^--help/.test(arg)) {
+        console.log(usage());
+        process.exit(0);
+    }
+    else if (arg === '-v' || arg === '--version') {
+        console.log(package_json_1.version);
+        process.exit(0);
+    }
+    else if (arg === '-p' || arg === '--print') {
+        doPrint = true;
+    }
+    else if (/^-m/.test(arg) || /^--mode=/.test(arg)) {
+        // these don't get covered in CI, but work locally
+        // weird because the tests below show as passing in the output.
+        /* c8 ignore start */
+        const mode = parseInt(arg.replace(/^(-m|--mode=)/, ''), 8);
+        if (isNaN(mode)) {
+            console.error(`invalid mode argument: ${arg}\nMust be an octal number.`);
+            process.exit(1);
+        }
+        /* c8 ignore stop */
+        opts.mode = mode;
+    }
+    else
+        dirs.push(arg);
+}
+const index_js_1 = require("./index.js");
+const impl = manual ? index_js_1.mkdirp.manual : index_js_1.mkdirp;
+if (dirs.length === 0) {
+    console.error(usage());
+}
+// these don't get covered in CI, but work locally
+/* c8 ignore start */
+Promise.all(dirs.map(dir => impl(dir, opts)))
+    .then(made => (doPrint ? made.forEach(m => m && console.log(m)) : null))
+    .catch(er => {
+    console.error(er.message);
+    if (er.code)
+        console.error('  code: ' + er.code);
+    process.exit(1);
+});
+/* c8 ignore stop */
+//# sourceMappingURL=bin.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/find-made.js b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/find-made.js
new file mode 100644
index 0000000000000..e831ef27cadc1
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/find-made.js
@@ -0,0 +1,35 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.findMadeSync = exports.findMade = void 0;
+const path_1 = require("path");
+const findMade = async (opts, parent, path) => {
+    // we never want the 'made' return value to be a root directory
+    if (path === parent) {
+        return;
+    }
+    return opts.statAsync(parent).then(st => (st.isDirectory() ? path : undefined), // will fail later
+    // will fail later
+    er => {
+        const fer = er;
+        return fer && fer.code === 'ENOENT'
+            ? (0, exports.findMade)(opts, (0, path_1.dirname)(parent), parent)
+            : undefined;
+    });
+};
+exports.findMade = findMade;
+const findMadeSync = (opts, parent, path) => {
+    if (path === parent) {
+        return undefined;
+    }
+    try {
+        return opts.statSync(parent).isDirectory() ? path : undefined;
+    }
+    catch (er) {
+        const fer = er;
+        return fer && fer.code === 'ENOENT'
+            ? (0, exports.findMadeSync)(opts, (0, path_1.dirname)(parent), parent)
+            : undefined;
+    }
+};
+exports.findMadeSync = findMadeSync;
+//# sourceMappingURL=find-made.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/index.js b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/index.js
new file mode 100644
index 0000000000000..ab9dc62cddda3
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/index.js
@@ -0,0 +1,53 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.mkdirp = exports.nativeSync = exports.native = exports.manualSync = exports.manual = exports.sync = exports.mkdirpSync = exports.useNativeSync = exports.useNative = exports.mkdirpNativeSync = exports.mkdirpNative = exports.mkdirpManualSync = exports.mkdirpManual = void 0;
+const mkdirp_manual_js_1 = require("./mkdirp-manual.js");
+const mkdirp_native_js_1 = require("./mkdirp-native.js");
+const opts_arg_js_1 = require("./opts-arg.js");
+const path_arg_js_1 = require("./path-arg.js");
+const use_native_js_1 = require("./use-native.js");
+/* c8 ignore start */
+var mkdirp_manual_js_2 = require("./mkdirp-manual.js");
+Object.defineProperty(exports, "mkdirpManual", { enumerable: true, get: function () { return mkdirp_manual_js_2.mkdirpManual; } });
+Object.defineProperty(exports, "mkdirpManualSync", { enumerable: true, get: function () { return mkdirp_manual_js_2.mkdirpManualSync; } });
+var mkdirp_native_js_2 = require("./mkdirp-native.js");
+Object.defineProperty(exports, "mkdirpNative", { enumerable: true, get: function () { return mkdirp_native_js_2.mkdirpNative; } });
+Object.defineProperty(exports, "mkdirpNativeSync", { enumerable: true, get: function () { return mkdirp_native_js_2.mkdirpNativeSync; } });
+var use_native_js_2 = require("./use-native.js");
+Object.defineProperty(exports, "useNative", { enumerable: true, get: function () { return use_native_js_2.useNative; } });
+Object.defineProperty(exports, "useNativeSync", { enumerable: true, get: function () { return use_native_js_2.useNativeSync; } });
+/* c8 ignore stop */
+const mkdirpSync = (path, opts) => {
+    path = (0, path_arg_js_1.pathArg)(path);
+    const resolved = (0, opts_arg_js_1.optsArg)(opts);
+    return (0, use_native_js_1.useNativeSync)(resolved)
+        ? (0, mkdirp_native_js_1.mkdirpNativeSync)(path, resolved)
+        : (0, mkdirp_manual_js_1.mkdirpManualSync)(path, resolved);
+};
+exports.mkdirpSync = mkdirpSync;
+exports.sync = exports.mkdirpSync;
+exports.manual = mkdirp_manual_js_1.mkdirpManual;
+exports.manualSync = mkdirp_manual_js_1.mkdirpManualSync;
+exports.native = mkdirp_native_js_1.mkdirpNative;
+exports.nativeSync = mkdirp_native_js_1.mkdirpNativeSync;
+exports.mkdirp = Object.assign(async (path, opts) => {
+    path = (0, path_arg_js_1.pathArg)(path);
+    const resolved = (0, opts_arg_js_1.optsArg)(opts);
+    return (0, use_native_js_1.useNative)(resolved)
+        ? (0, mkdirp_native_js_1.mkdirpNative)(path, resolved)
+        : (0, mkdirp_manual_js_1.mkdirpManual)(path, resolved);
+}, {
+    mkdirpSync: exports.mkdirpSync,
+    mkdirpNative: mkdirp_native_js_1.mkdirpNative,
+    mkdirpNativeSync: mkdirp_native_js_1.mkdirpNativeSync,
+    mkdirpManual: mkdirp_manual_js_1.mkdirpManual,
+    mkdirpManualSync: mkdirp_manual_js_1.mkdirpManualSync,
+    sync: exports.mkdirpSync,
+    native: mkdirp_native_js_1.mkdirpNative,
+    nativeSync: mkdirp_native_js_1.mkdirpNativeSync,
+    manual: mkdirp_manual_js_1.mkdirpManual,
+    manualSync: mkdirp_manual_js_1.mkdirpManualSync,
+    useNative: use_native_js_1.useNative,
+    useNativeSync: use_native_js_1.useNativeSync,
+});
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js
new file mode 100644
index 0000000000000..d9bd1d8bb5a49
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js
@@ -0,0 +1,79 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.mkdirpManual = exports.mkdirpManualSync = void 0;
+const path_1 = require("path");
+const opts_arg_js_1 = require("./opts-arg.js");
+const mkdirpManualSync = (path, options, made) => {
+    const parent = (0, path_1.dirname)(path);
+    const opts = { ...(0, opts_arg_js_1.optsArg)(options), recursive: false };
+    if (parent === path) {
+        try {
+            return opts.mkdirSync(path, opts);
+        }
+        catch (er) {
+            // swallowed by recursive implementation on posix systems
+            // any other error is a failure
+            const fer = er;
+            if (fer && fer.code !== 'EISDIR') {
+                throw er;
+            }
+            return;
+        }
+    }
+    try {
+        opts.mkdirSync(path, opts);
+        return made || path;
+    }
+    catch (er) {
+        const fer = er;
+        if (fer && fer.code === 'ENOENT') {
+            return (0, exports.mkdirpManualSync)(path, opts, (0, exports.mkdirpManualSync)(parent, opts, made));
+        }
+        if (fer && fer.code !== 'EEXIST' && fer && fer.code !== 'EROFS') {
+            throw er;
+        }
+        try {
+            if (!opts.statSync(path).isDirectory())
+                throw er;
+        }
+        catch (_) {
+            throw er;
+        }
+    }
+};
+exports.mkdirpManualSync = mkdirpManualSync;
+exports.mkdirpManual = Object.assign(async (path, options, made) => {
+    const opts = (0, opts_arg_js_1.optsArg)(options);
+    opts.recursive = false;
+    const parent = (0, path_1.dirname)(path);
+    if (parent === path) {
+        return opts.mkdirAsync(path, opts).catch(er => {
+            // swallowed by recursive implementation on posix systems
+            // any other error is a failure
+            const fer = er;
+            if (fer && fer.code !== 'EISDIR') {
+                throw er;
+            }
+        });
+    }
+    return opts.mkdirAsync(path, opts).then(() => made || path, async (er) => {
+        const fer = er;
+        if (fer && fer.code === 'ENOENT') {
+            return (0, exports.mkdirpManual)(parent, opts).then((made) => (0, exports.mkdirpManual)(path, opts, made));
+        }
+        if (fer && fer.code !== 'EEXIST' && fer.code !== 'EROFS') {
+            throw er;
+        }
+        return opts.statAsync(path).then(st => {
+            if (st.isDirectory()) {
+                return made;
+            }
+            else {
+                throw er;
+            }
+        }, () => {
+            throw er;
+        });
+    });
+}, { sync: exports.mkdirpManualSync });
+//# sourceMappingURL=mkdirp-manual.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js
new file mode 100644
index 0000000000000..9f00567d7cc20
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js
@@ -0,0 +1,50 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.mkdirpNative = exports.mkdirpNativeSync = void 0;
+const path_1 = require("path");
+const find_made_js_1 = require("./find-made.js");
+const mkdirp_manual_js_1 = require("./mkdirp-manual.js");
+const opts_arg_js_1 = require("./opts-arg.js");
+const mkdirpNativeSync = (path, options) => {
+    const opts = (0, opts_arg_js_1.optsArg)(options);
+    opts.recursive = true;
+    const parent = (0, path_1.dirname)(path);
+    if (parent === path) {
+        return opts.mkdirSync(path, opts);
+    }
+    const made = (0, find_made_js_1.findMadeSync)(opts, path);
+    try {
+        opts.mkdirSync(path, opts);
+        return made;
+    }
+    catch (er) {
+        const fer = er;
+        if (fer && fer.code === 'ENOENT') {
+            return (0, mkdirp_manual_js_1.mkdirpManualSync)(path, opts);
+        }
+        else {
+            throw er;
+        }
+    }
+};
+exports.mkdirpNativeSync = mkdirpNativeSync;
+exports.mkdirpNative = Object.assign(async (path, options) => {
+    const opts = { ...(0, opts_arg_js_1.optsArg)(options), recursive: true };
+    const parent = (0, path_1.dirname)(path);
+    if (parent === path) {
+        return await opts.mkdirAsync(path, opts);
+    }
+    return (0, find_made_js_1.findMade)(opts, path).then((made) => opts
+        .mkdirAsync(path, opts)
+        .then(m => made || m)
+        .catch(er => {
+        const fer = er;
+        if (fer && fer.code === 'ENOENT') {
+            return (0, mkdirp_manual_js_1.mkdirpManual)(path, opts);
+        }
+        else {
+            throw er;
+        }
+    }));
+}, { sync: exports.mkdirpNativeSync });
+//# sourceMappingURL=mkdirp-native.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/opts-arg.js b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/opts-arg.js
new file mode 100644
index 0000000000000..e8f486c090595
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/opts-arg.js
@@ -0,0 +1,38 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.optsArg = void 0;
+const fs_1 = require("fs");
+const optsArg = (opts) => {
+    if (!opts) {
+        opts = { mode: 0o777 };
+    }
+    else if (typeof opts === 'object') {
+        opts = { mode: 0o777, ...opts };
+    }
+    else if (typeof opts === 'number') {
+        opts = { mode: opts };
+    }
+    else if (typeof opts === 'string') {
+        opts = { mode: parseInt(opts, 8) };
+    }
+    else {
+        throw new TypeError('invalid options argument');
+    }
+    const resolved = opts;
+    const optsFs = opts.fs || {};
+    opts.mkdir = opts.mkdir || optsFs.mkdir || fs_1.mkdir;
+    opts.mkdirAsync = opts.mkdirAsync
+        ? opts.mkdirAsync
+        : async (path, options) => {
+            return new Promise((res, rej) => resolved.mkdir(path, options, (er, made) => er ? rej(er) : res(made)));
+        };
+    opts.stat = opts.stat || optsFs.stat || fs_1.stat;
+    opts.statAsync = opts.statAsync
+        ? opts.statAsync
+        : async (path) => new Promise((res, rej) => resolved.stat(path, (err, stats) => (err ? rej(err) : res(stats))));
+    opts.statSync = opts.statSync || optsFs.statSync || fs_1.statSync;
+    opts.mkdirSync = opts.mkdirSync || optsFs.mkdirSync || fs_1.mkdirSync;
+    return resolved;
+};
+exports.optsArg = optsArg;
+//# sourceMappingURL=opts-arg.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/path-arg.js b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/path-arg.js
new file mode 100644
index 0000000000000..a6b457f6e23d5
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/path-arg.js
@@ -0,0 +1,28 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.pathArg = void 0;
+const platform = process.env.__TESTING_MKDIRP_PLATFORM__ || process.platform;
+const path_1 = require("path");
+const pathArg = (path) => {
+    if (/\0/.test(path)) {
+        // simulate same failure that node raises
+        throw Object.assign(new TypeError('path must be a string without null bytes'), {
+            path,
+            code: 'ERR_INVALID_ARG_VALUE',
+        });
+    }
+    path = (0, path_1.resolve)(path);
+    if (platform === 'win32') {
+        const badWinChars = /[*|"<>?:]/;
+        const { root } = (0, path_1.parse)(path);
+        if (badWinChars.test(path.substring(root.length))) {
+            throw Object.assign(new Error('Illegal characters in path.'), {
+                path,
+                code: 'EINVAL',
+            });
+        }
+    }
+    return path;
+};
+exports.pathArg = pathArg;
+//# sourceMappingURL=path-arg.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/use-native.js b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/use-native.js
new file mode 100644
index 0000000000000..550b3452688ee
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/use-native.js
@@ -0,0 +1,17 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.useNative = exports.useNativeSync = void 0;
+const fs_1 = require("fs");
+const opts_arg_js_1 = require("./opts-arg.js");
+const version = process.env.__TESTING_MKDIRP_NODE_VERSION__ || process.version;
+const versArr = version.replace(/^v/, '').split('.');
+const hasNative = +versArr[0] > 10 || (+versArr[0] === 10 && +versArr[1] >= 12);
+exports.useNativeSync = !hasNative
+    ? () => false
+    : (opts) => (0, opts_arg_js_1.optsArg)(opts).mkdirSync === fs_1.mkdirSync;
+exports.useNative = Object.assign(!hasNative
+    ? () => false
+    : (opts) => (0, opts_arg_js_1.optsArg)(opts).mkdir === fs_1.mkdir, {
+    sync: exports.useNativeSync,
+});
+//# sourceMappingURL=use-native.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/find-made.js b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/find-made.js
new file mode 100644
index 0000000000000..3e72fd59a2c1f
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/find-made.js
@@ -0,0 +1,30 @@
+import { dirname } from 'path';
+export const findMade = async (opts, parent, path) => {
+    // we never want the 'made' return value to be a root directory
+    if (path === parent) {
+        return;
+    }
+    return opts.statAsync(parent).then(st => (st.isDirectory() ? path : undefined), // will fail later
+    // will fail later
+    er => {
+        const fer = er;
+        return fer && fer.code === 'ENOENT'
+            ? findMade(opts, dirname(parent), parent)
+            : undefined;
+    });
+};
+export const findMadeSync = (opts, parent, path) => {
+    if (path === parent) {
+        return undefined;
+    }
+    try {
+        return opts.statSync(parent).isDirectory() ? path : undefined;
+    }
+    catch (er) {
+        const fer = er;
+        return fer && fer.code === 'ENOENT'
+            ? findMadeSync(opts, dirname(parent), parent)
+            : undefined;
+    }
+};
+//# sourceMappingURL=find-made.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/index.js b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/index.js
new file mode 100644
index 0000000000000..0217ecc8cdd83
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/index.js
@@ -0,0 +1,43 @@
+import { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js';
+import { mkdirpNative, mkdirpNativeSync } from './mkdirp-native.js';
+import { optsArg } from './opts-arg.js';
+import { pathArg } from './path-arg.js';
+import { useNative, useNativeSync } from './use-native.js';
+/* c8 ignore start */
+export { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js';
+export { mkdirpNative, mkdirpNativeSync } from './mkdirp-native.js';
+export { useNative, useNativeSync } from './use-native.js';
+/* c8 ignore stop */
+export const mkdirpSync = (path, opts) => {
+    path = pathArg(path);
+    const resolved = optsArg(opts);
+    return useNativeSync(resolved)
+        ? mkdirpNativeSync(path, resolved)
+        : mkdirpManualSync(path, resolved);
+};
+export const sync = mkdirpSync;
+export const manual = mkdirpManual;
+export const manualSync = mkdirpManualSync;
+export const native = mkdirpNative;
+export const nativeSync = mkdirpNativeSync;
+export const mkdirp = Object.assign(async (path, opts) => {
+    path = pathArg(path);
+    const resolved = optsArg(opts);
+    return useNative(resolved)
+        ? mkdirpNative(path, resolved)
+        : mkdirpManual(path, resolved);
+}, {
+    mkdirpSync,
+    mkdirpNative,
+    mkdirpNativeSync,
+    mkdirpManual,
+    mkdirpManualSync,
+    sync: mkdirpSync,
+    native: mkdirpNative,
+    nativeSync: mkdirpNativeSync,
+    manual: mkdirpManual,
+    manualSync: mkdirpManualSync,
+    useNative,
+    useNativeSync,
+});
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/mkdirp-manual.js b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/mkdirp-manual.js
new file mode 100644
index 0000000000000..a4d044e02d3bf
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/mkdirp-manual.js
@@ -0,0 +1,75 @@
+import { dirname } from 'path';
+import { optsArg } from './opts-arg.js';
+export const mkdirpManualSync = (path, options, made) => {
+    const parent = dirname(path);
+    const opts = { ...optsArg(options), recursive: false };
+    if (parent === path) {
+        try {
+            return opts.mkdirSync(path, opts);
+        }
+        catch (er) {
+            // swallowed by recursive implementation on posix systems
+            // any other error is a failure
+            const fer = er;
+            if (fer && fer.code !== 'EISDIR') {
+                throw er;
+            }
+            return;
+        }
+    }
+    try {
+        opts.mkdirSync(path, opts);
+        return made || path;
+    }
+    catch (er) {
+        const fer = er;
+        if (fer && fer.code === 'ENOENT') {
+            return mkdirpManualSync(path, opts, mkdirpManualSync(parent, opts, made));
+        }
+        if (fer && fer.code !== 'EEXIST' && fer && fer.code !== 'EROFS') {
+            throw er;
+        }
+        try {
+            if (!opts.statSync(path).isDirectory())
+                throw er;
+        }
+        catch (_) {
+            throw er;
+        }
+    }
+};
+export const mkdirpManual = Object.assign(async (path, options, made) => {
+    const opts = optsArg(options);
+    opts.recursive = false;
+    const parent = dirname(path);
+    if (parent === path) {
+        return opts.mkdirAsync(path, opts).catch(er => {
+            // swallowed by recursive implementation on posix systems
+            // any other error is a failure
+            const fer = er;
+            if (fer && fer.code !== 'EISDIR') {
+                throw er;
+            }
+        });
+    }
+    return opts.mkdirAsync(path, opts).then(() => made || path, async (er) => {
+        const fer = er;
+        if (fer && fer.code === 'ENOENT') {
+            return mkdirpManual(parent, opts).then((made) => mkdirpManual(path, opts, made));
+        }
+        if (fer && fer.code !== 'EEXIST' && fer.code !== 'EROFS') {
+            throw er;
+        }
+        return opts.statAsync(path).then(st => {
+            if (st.isDirectory()) {
+                return made;
+            }
+            else {
+                throw er;
+            }
+        }, () => {
+            throw er;
+        });
+    });
+}, { sync: mkdirpManualSync });
+//# sourceMappingURL=mkdirp-manual.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/mkdirp-native.js b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/mkdirp-native.js
new file mode 100644
index 0000000000000..99d10a5425dad
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/mkdirp-native.js
@@ -0,0 +1,46 @@
+import { dirname } from 'path';
+import { findMade, findMadeSync } from './find-made.js';
+import { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js';
+import { optsArg } from './opts-arg.js';
+export const mkdirpNativeSync = (path, options) => {
+    const opts = optsArg(options);
+    opts.recursive = true;
+    const parent = dirname(path);
+    if (parent === path) {
+        return opts.mkdirSync(path, opts);
+    }
+    const made = findMadeSync(opts, path);
+    try {
+        opts.mkdirSync(path, opts);
+        return made;
+    }
+    catch (er) {
+        const fer = er;
+        if (fer && fer.code === 'ENOENT') {
+            return mkdirpManualSync(path, opts);
+        }
+        else {
+            throw er;
+        }
+    }
+};
+export const mkdirpNative = Object.assign(async (path, options) => {
+    const opts = { ...optsArg(options), recursive: true };
+    const parent = dirname(path);
+    if (parent === path) {
+        return await opts.mkdirAsync(path, opts);
+    }
+    return findMade(opts, path).then((made) => opts
+        .mkdirAsync(path, opts)
+        .then(m => made || m)
+        .catch(er => {
+        const fer = er;
+        if (fer && fer.code === 'ENOENT') {
+            return mkdirpManual(path, opts);
+        }
+        else {
+            throw er;
+        }
+    }));
+}, { sync: mkdirpNativeSync });
+//# sourceMappingURL=mkdirp-native.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/opts-arg.js b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/opts-arg.js
new file mode 100644
index 0000000000000..d47e2927fee4c
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/opts-arg.js
@@ -0,0 +1,34 @@
+import { mkdir, mkdirSync, stat, statSync, } from 'fs';
+export const optsArg = (opts) => {
+    if (!opts) {
+        opts = { mode: 0o777 };
+    }
+    else if (typeof opts === 'object') {
+        opts = { mode: 0o777, ...opts };
+    }
+    else if (typeof opts === 'number') {
+        opts = { mode: opts };
+    }
+    else if (typeof opts === 'string') {
+        opts = { mode: parseInt(opts, 8) };
+    }
+    else {
+        throw new TypeError('invalid options argument');
+    }
+    const resolved = opts;
+    const optsFs = opts.fs || {};
+    opts.mkdir = opts.mkdir || optsFs.mkdir || mkdir;
+    opts.mkdirAsync = opts.mkdirAsync
+        ? opts.mkdirAsync
+        : async (path, options) => {
+            return new Promise((res, rej) => resolved.mkdir(path, options, (er, made) => er ? rej(er) : res(made)));
+        };
+    opts.stat = opts.stat || optsFs.stat || stat;
+    opts.statAsync = opts.statAsync
+        ? opts.statAsync
+        : async (path) => new Promise((res, rej) => resolved.stat(path, (err, stats) => (err ? rej(err) : res(stats))));
+    opts.statSync = opts.statSync || optsFs.statSync || statSync;
+    opts.mkdirSync = opts.mkdirSync || optsFs.mkdirSync || mkdirSync;
+    return resolved;
+};
+//# sourceMappingURL=opts-arg.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/package.json b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/package.json
new file mode 100644
index 0000000000000..3dbc1ca591c05
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/path-arg.js b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/path-arg.js
new file mode 100644
index 0000000000000..03539cc5a94f9
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/path-arg.js
@@ -0,0 +1,24 @@
+const platform = process.env.__TESTING_MKDIRP_PLATFORM__ || process.platform;
+import { parse, resolve } from 'path';
+export const pathArg = (path) => {
+    if (/\0/.test(path)) {
+        // simulate same failure that node raises
+        throw Object.assign(new TypeError('path must be a string without null bytes'), {
+            path,
+            code: 'ERR_INVALID_ARG_VALUE',
+        });
+    }
+    path = resolve(path);
+    if (platform === 'win32') {
+        const badWinChars = /[*|"<>?:]/;
+        const { root } = parse(path);
+        if (badWinChars.test(path.substring(root.length))) {
+            throw Object.assign(new Error('Illegal characters in path.'), {
+                path,
+                code: 'EINVAL',
+            });
+        }
+    }
+    return path;
+};
+//# sourceMappingURL=path-arg.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/use-native.js b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/use-native.js
new file mode 100644
index 0000000000000..ad2093867eb74
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/use-native.js
@@ -0,0 +1,14 @@
+import { mkdir, mkdirSync } from 'fs';
+import { optsArg } from './opts-arg.js';
+const version = process.env.__TESTING_MKDIRP_NODE_VERSION__ || process.version;
+const versArr = version.replace(/^v/, '').split('.');
+const hasNative = +versArr[0] > 10 || (+versArr[0] === 10 && +versArr[1] >= 12);
+export const useNativeSync = !hasNative
+    ? () => false
+    : (opts) => optsArg(opts).mkdirSync === mkdirSync;
+export const useNative = Object.assign(!hasNative
+    ? () => false
+    : (opts) => optsArg(opts).mkdir === mkdir, {
+    sync: useNativeSync,
+});
+//# sourceMappingURL=use-native.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/mkdirp/package.json b/node_modules/make-fetch-happen/node_modules/mkdirp/package.json
new file mode 100644
index 0000000000000..f31ac3314d6f6
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/mkdirp/package.json
@@ -0,0 +1,91 @@
+{
+  "name": "mkdirp",
+  "description": "Recursively mkdir, like `mkdir -p`",
+  "version": "3.0.1",
+  "keywords": [
+    "mkdir",
+    "directory",
+    "make dir",
+    "make",
+    "dir",
+    "recursive",
+    "native"
+  ],
+  "bin": "./dist/cjs/src/bin.js",
+  "main": "./dist/cjs/src/index.js",
+  "module": "./dist/mjs/index.js",
+  "types": "./dist/mjs/index.d.ts",
+  "exports": {
+    ".": {
+      "import": {
+        "types": "./dist/mjs/index.d.ts",
+        "default": "./dist/mjs/index.js"
+      },
+      "require": {
+        "types": "./dist/cjs/src/index.d.ts",
+        "default": "./dist/cjs/src/index.js"
+      }
+    }
+  },
+  "files": [
+    "dist"
+  ],
+  "scripts": {
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "preprepare": "rm -rf dist",
+    "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json",
+    "postprepare": "bash fixup.sh",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "test": "c8 tap",
+    "snap": "c8 tap",
+    "format": "prettier --write . --loglevel warn",
+    "benchmark": "node benchmark/index.js",
+    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
+  },
+  "prettier": {
+    "semi": false,
+    "printWidth": 80,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "devDependencies": {
+    "@types/brace-expansion": "^1.1.0",
+    "@types/node": "^18.11.9",
+    "@types/tap": "^15.0.7",
+    "c8": "^7.12.0",
+    "eslint-config-prettier": "^8.6.0",
+    "prettier": "^2.8.2",
+    "tap": "^16.3.3",
+    "ts-node": "^10.9.1",
+    "typedoc": "^0.23.21",
+    "typescript": "^4.9.3"
+  },
+  "tap": {
+    "coverage": false,
+    "node-arg": [
+      "--no-warnings",
+      "--loader",
+      "ts-node/esm"
+    ],
+    "ts": false
+  },
+  "funding": {
+    "url": "https://github.com/sponsors/isaacs"
+  },
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/isaacs/node-mkdirp.git"
+  },
+  "license": "MIT",
+  "engines": {
+    "node": ">=10"
+  }
+}
diff --git a/node_modules/make-fetch-happen/node_modules/tar/LICENSE b/node_modules/make-fetch-happen/node_modules/tar/LICENSE
new file mode 100644
index 0000000000000..19129e315fe59
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/create.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/create.js
new file mode 100644
index 0000000000000..3190afc48318f
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/create.js
@@ -0,0 +1,83 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.create = void 0;
+const fs_minipass_1 = require("@isaacs/fs-minipass");
+const node_path_1 = __importDefault(require("node:path"));
+const list_js_1 = require("./list.js");
+const make_command_js_1 = require("./make-command.js");
+const pack_js_1 = require("./pack.js");
+const createFileSync = (opt, files) => {
+    const p = new pack_js_1.PackSync(opt);
+    const stream = new fs_minipass_1.WriteStreamSync(opt.file, {
+        mode: opt.mode || 0o666,
+    });
+    p.pipe(stream);
+    addFilesSync(p, files);
+};
+const createFile = (opt, files) => {
+    const p = new pack_js_1.Pack(opt);
+    const stream = new fs_minipass_1.WriteStream(opt.file, {
+        mode: opt.mode || 0o666,
+    });
+    p.pipe(stream);
+    const promise = new Promise((res, rej) => {
+        stream.on('error', rej);
+        stream.on('close', res);
+        p.on('error', rej);
+    });
+    addFilesAsync(p, files);
+    return promise;
+};
+const addFilesSync = (p, files) => {
+    files.forEach(file => {
+        if (file.charAt(0) === '@') {
+            (0, list_js_1.list)({
+                file: node_path_1.default.resolve(p.cwd, file.slice(1)),
+                sync: true,
+                noResume: true,
+                onReadEntry: entry => p.add(entry),
+            });
+        }
+        else {
+            p.add(file);
+        }
+    });
+    p.end();
+};
+const addFilesAsync = async (p, files) => {
+    for (let i = 0; i < files.length; i++) {
+        const file = String(files[i]);
+        if (file.charAt(0) === '@') {
+            await (0, list_js_1.list)({
+                file: node_path_1.default.resolve(String(p.cwd), file.slice(1)),
+                noResume: true,
+                onReadEntry: entry => {
+                    p.add(entry);
+                },
+            });
+        }
+        else {
+            p.add(file);
+        }
+    }
+    p.end();
+};
+const createSync = (opt, files) => {
+    const p = new pack_js_1.PackSync(opt);
+    addFilesSync(p, files);
+    return p;
+};
+const createAsync = (opt, files) => {
+    const p = new pack_js_1.Pack(opt);
+    addFilesAsync(p, files);
+    return p;
+};
+exports.create = (0, make_command_js_1.makeCommand)(createFileSync, createFile, createSync, createAsync, (_opt, files) => {
+    if (!files?.length) {
+        throw new TypeError('no paths specified to add to archive');
+    }
+});
+//# sourceMappingURL=create.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/cwd-error.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/cwd-error.js
new file mode 100644
index 0000000000000..d703a7772be3a
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/cwd-error.js
@@ -0,0 +1,18 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.CwdError = void 0;
+class CwdError extends Error {
+    path;
+    code;
+    syscall = 'chdir';
+    constructor(path, code) {
+        super(`${code}: Cannot cd into '${path}'`);
+        this.path = path;
+        this.code = code;
+    }
+    get name() {
+        return 'CwdError';
+    }
+}
+exports.CwdError = CwdError;
+//# sourceMappingURL=cwd-error.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/extract.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/extract.js
new file mode 100644
index 0000000000000..f848cbcbf779e
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/extract.js
@@ -0,0 +1,78 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.extract = void 0;
+// tar -x
+const fsm = __importStar(require("@isaacs/fs-minipass"));
+const node_fs_1 = __importDefault(require("node:fs"));
+const list_js_1 = require("./list.js");
+const make_command_js_1 = require("./make-command.js");
+const unpack_js_1 = require("./unpack.js");
+const extractFileSync = (opt) => {
+    const u = new unpack_js_1.UnpackSync(opt);
+    const file = opt.file;
+    const stat = node_fs_1.default.statSync(file);
+    // This trades a zero-byte read() syscall for a stat
+    // However, it will usually result in less memory allocation
+    const readSize = opt.maxReadSize || 16 * 1024 * 1024;
+    const stream = new fsm.ReadStreamSync(file, {
+        readSize: readSize,
+        size: stat.size,
+    });
+    stream.pipe(u);
+};
+const extractFile = (opt, _) => {
+    const u = new unpack_js_1.Unpack(opt);
+    const readSize = opt.maxReadSize || 16 * 1024 * 1024;
+    const file = opt.file;
+    const p = new Promise((resolve, reject) => {
+        u.on('error', reject);
+        u.on('close', resolve);
+        // This trades a zero-byte read() syscall for a stat
+        // However, it will usually result in less memory allocation
+        node_fs_1.default.stat(file, (er, stat) => {
+            if (er) {
+                reject(er);
+            }
+            else {
+                const stream = new fsm.ReadStream(file, {
+                    readSize: readSize,
+                    size: stat.size,
+                });
+                stream.on('error', reject);
+                stream.pipe(u);
+            }
+        });
+    });
+    return p;
+};
+exports.extract = (0, make_command_js_1.makeCommand)(extractFileSync, extractFile, opt => new unpack_js_1.UnpackSync(opt), opt => new unpack_js_1.Unpack(opt), (opt, files) => {
+    if (files?.length)
+        (0, list_js_1.filesFilter)(opt, files);
+});
+//# sourceMappingURL=extract.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/get-write-flag.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/get-write-flag.js
new file mode 100644
index 0000000000000..94add8f6b2231
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/get-write-flag.js
@@ -0,0 +1,29 @@
+"use strict";
+// Get the appropriate flag to use for creating files
+// We use fmap on Windows platforms for files less than
+// 512kb.  This is a fairly low limit, but avoids making
+// things slower in some cases.  Since most of what this
+// library is used for is extracting tarballs of many
+// relatively small files in npm packages and the like,
+// it can be a big boost on Windows platforms.
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.getWriteFlag = void 0;
+const fs_1 = __importDefault(require("fs"));
+const platform = process.env.__FAKE_PLATFORM__ || process.platform;
+const isWindows = platform === 'win32';
+/* c8 ignore start */
+const { O_CREAT, O_TRUNC, O_WRONLY } = fs_1.default.constants;
+const UV_FS_O_FILEMAP = Number(process.env.__FAKE_FS_O_FILENAME__) ||
+    fs_1.default.constants.UV_FS_O_FILEMAP ||
+    0;
+/* c8 ignore stop */
+const fMapEnabled = isWindows && !!UV_FS_O_FILEMAP;
+const fMapLimit = 512 * 1024;
+const fMapFlag = UV_FS_O_FILEMAP | O_TRUNC | O_CREAT | O_WRONLY;
+exports.getWriteFlag = !fMapEnabled ?
+    () => 'w'
+    : (size) => (size < fMapLimit ? fMapFlag : 'w');
+//# sourceMappingURL=get-write-flag.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/header.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/header.js
new file mode 100644
index 0000000000000..b3a48037b849a
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/header.js
@@ -0,0 +1,306 @@
+"use strict";
+// parse a 512-byte header block to a data object, or vice-versa
+// encode returns `true` if a pax extended header is needed, because
+// the data could not be faithfully encoded in a simple header.
+// (Also, check header.needPax to see if it needs a pax header.)
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Header = void 0;
+const node_path_1 = require("node:path");
+const large = __importStar(require("./large-numbers.js"));
+const types = __importStar(require("./types.js"));
+class Header {
+    cksumValid = false;
+    needPax = false;
+    nullBlock = false;
+    block;
+    path;
+    mode;
+    uid;
+    gid;
+    size;
+    cksum;
+    #type = 'Unsupported';
+    linkpath;
+    uname;
+    gname;
+    devmaj = 0;
+    devmin = 0;
+    atime;
+    ctime;
+    mtime;
+    charset;
+    comment;
+    constructor(data, off = 0, ex, gex) {
+        if (Buffer.isBuffer(data)) {
+            this.decode(data, off || 0, ex, gex);
+        }
+        else if (data) {
+            this.#slurp(data);
+        }
+    }
+    decode(buf, off, ex, gex) {
+        if (!off) {
+            off = 0;
+        }
+        if (!buf || !(buf.length >= off + 512)) {
+            throw new Error('need 512 bytes for header');
+        }
+        this.path = decString(buf, off, 100);
+        this.mode = decNumber(buf, off + 100, 8);
+        this.uid = decNumber(buf, off + 108, 8);
+        this.gid = decNumber(buf, off + 116, 8);
+        this.size = decNumber(buf, off + 124, 12);
+        this.mtime = decDate(buf, off + 136, 12);
+        this.cksum = decNumber(buf, off + 148, 12);
+        // if we have extended or global extended headers, apply them now
+        // See https://github.com/npm/node-tar/pull/187
+        // Apply global before local, so it overrides
+        if (gex)
+            this.#slurp(gex, true);
+        if (ex)
+            this.#slurp(ex);
+        // old tar versions marked dirs as a file with a trailing /
+        const t = decString(buf, off + 156, 1);
+        if (types.isCode(t)) {
+            this.#type = t || '0';
+        }
+        if (this.#type === '0' && this.path.slice(-1) === '/') {
+            this.#type = '5';
+        }
+        // tar implementations sometimes incorrectly put the stat(dir).size
+        // as the size in the tarball, even though Directory entries are
+        // not able to have any body at all.  In the very rare chance that
+        // it actually DOES have a body, we weren't going to do anything with
+        // it anyway, and it'll just be a warning about an invalid header.
+        if (this.#type === '5') {
+            this.size = 0;
+        }
+        this.linkpath = decString(buf, off + 157, 100);
+        if (buf.subarray(off + 257, off + 265).toString() ===
+            'ustar\u000000') {
+            this.uname = decString(buf, off + 265, 32);
+            this.gname = decString(buf, off + 297, 32);
+            /* c8 ignore start */
+            this.devmaj = decNumber(buf, off + 329, 8) ?? 0;
+            this.devmin = decNumber(buf, off + 337, 8) ?? 0;
+            /* c8 ignore stop */
+            if (buf[off + 475] !== 0) {
+                // definitely a prefix, definitely >130 chars.
+                const prefix = decString(buf, off + 345, 155);
+                this.path = prefix + '/' + this.path;
+            }
+            else {
+                const prefix = decString(buf, off + 345, 130);
+                if (prefix) {
+                    this.path = prefix + '/' + this.path;
+                }
+                this.atime = decDate(buf, off + 476, 12);
+                this.ctime = decDate(buf, off + 488, 12);
+            }
+        }
+        let sum = 8 * 0x20;
+        for (let i = off; i < off + 148; i++) {
+            sum += buf[i];
+        }
+        for (let i = off + 156; i < off + 512; i++) {
+            sum += buf[i];
+        }
+        this.cksumValid = sum === this.cksum;
+        if (this.cksum === undefined && sum === 8 * 0x20) {
+            this.nullBlock = true;
+        }
+    }
+    #slurp(ex, gex = false) {
+        Object.assign(this, Object.fromEntries(Object.entries(ex).filter(([k, v]) => {
+            // we slurp in everything except for the path attribute in
+            // a global extended header, because that's weird. Also, any
+            // null/undefined values are ignored.
+            return !(v === null ||
+                v === undefined ||
+                (k === 'path' && gex) ||
+                (k === 'linkpath' && gex) ||
+                k === 'global');
+        })));
+    }
+    encode(buf, off = 0) {
+        if (!buf) {
+            buf = this.block = Buffer.alloc(512);
+        }
+        if (this.#type === 'Unsupported') {
+            this.#type = '0';
+        }
+        if (!(buf.length >= off + 512)) {
+            throw new Error('need 512 bytes for header');
+        }
+        const prefixSize = this.ctime || this.atime ? 130 : 155;
+        const split = splitPrefix(this.path || '', prefixSize);
+        const path = split[0];
+        const prefix = split[1];
+        this.needPax = !!split[2];
+        this.needPax = encString(buf, off, 100, path) || this.needPax;
+        this.needPax =
+            encNumber(buf, off + 100, 8, this.mode) || this.needPax;
+        this.needPax =
+            encNumber(buf, off + 108, 8, this.uid) || this.needPax;
+        this.needPax =
+            encNumber(buf, off + 116, 8, this.gid) || this.needPax;
+        this.needPax =
+            encNumber(buf, off + 124, 12, this.size) || this.needPax;
+        this.needPax =
+            encDate(buf, off + 136, 12, this.mtime) || this.needPax;
+        buf[off + 156] = this.#type.charCodeAt(0);
+        this.needPax =
+            encString(buf, off + 157, 100, this.linkpath) || this.needPax;
+        buf.write('ustar\u000000', off + 257, 8);
+        this.needPax =
+            encString(buf, off + 265, 32, this.uname) || this.needPax;
+        this.needPax =
+            encString(buf, off + 297, 32, this.gname) || this.needPax;
+        this.needPax =
+            encNumber(buf, off + 329, 8, this.devmaj) || this.needPax;
+        this.needPax =
+            encNumber(buf, off + 337, 8, this.devmin) || this.needPax;
+        this.needPax =
+            encString(buf, off + 345, prefixSize, prefix) || this.needPax;
+        if (buf[off + 475] !== 0) {
+            this.needPax =
+                encString(buf, off + 345, 155, prefix) || this.needPax;
+        }
+        else {
+            this.needPax =
+                encString(buf, off + 345, 130, prefix) || this.needPax;
+            this.needPax =
+                encDate(buf, off + 476, 12, this.atime) || this.needPax;
+            this.needPax =
+                encDate(buf, off + 488, 12, this.ctime) || this.needPax;
+        }
+        let sum = 8 * 0x20;
+        for (let i = off; i < off + 148; i++) {
+            sum += buf[i];
+        }
+        for (let i = off + 156; i < off + 512; i++) {
+            sum += buf[i];
+        }
+        this.cksum = sum;
+        encNumber(buf, off + 148, 8, this.cksum);
+        this.cksumValid = true;
+        return this.needPax;
+    }
+    get type() {
+        return (this.#type === 'Unsupported' ?
+            this.#type
+            : types.name.get(this.#type));
+    }
+    get typeKey() {
+        return this.#type;
+    }
+    set type(type) {
+        const c = String(types.code.get(type));
+        if (types.isCode(c) || c === 'Unsupported') {
+            this.#type = c;
+        }
+        else if (types.isCode(type)) {
+            this.#type = type;
+        }
+        else {
+            throw new TypeError('invalid entry type: ' + type);
+        }
+    }
+}
+exports.Header = Header;
+const splitPrefix = (p, prefixSize) => {
+    const pathSize = 100;
+    let pp = p;
+    let prefix = '';
+    let ret = undefined;
+    const root = node_path_1.posix.parse(p).root || '.';
+    if (Buffer.byteLength(pp) < pathSize) {
+        ret = [pp, prefix, false];
+    }
+    else {
+        // first set prefix to the dir, and path to the base
+        prefix = node_path_1.posix.dirname(pp);
+        pp = node_path_1.posix.basename(pp);
+        do {
+            if (Buffer.byteLength(pp) <= pathSize &&
+                Buffer.byteLength(prefix) <= prefixSize) {
+                // both fit!
+                ret = [pp, prefix, false];
+            }
+            else if (Buffer.byteLength(pp) > pathSize &&
+                Buffer.byteLength(prefix) <= prefixSize) {
+                // prefix fits in prefix, but path doesn't fit in path
+                ret = [pp.slice(0, pathSize - 1), prefix, true];
+            }
+            else {
+                // make path take a bit from prefix
+                pp = node_path_1.posix.join(node_path_1.posix.basename(prefix), pp);
+                prefix = node_path_1.posix.dirname(prefix);
+            }
+        } while (prefix !== root && ret === undefined);
+        // at this point, found no resolution, just truncate
+        if (!ret) {
+            ret = [p.slice(0, pathSize - 1), '', true];
+        }
+    }
+    return ret;
+};
+const decString = (buf, off, size) => buf
+    .subarray(off, off + size)
+    .toString('utf8')
+    .replace(/\0.*/, '');
+const decDate = (buf, off, size) => numToDate(decNumber(buf, off, size));
+const numToDate = (num) => num === undefined ? undefined : new Date(num * 1000);
+const decNumber = (buf, off, size) => Number(buf[off]) & 0x80 ?
+    large.parse(buf.subarray(off, off + size))
+    : decSmallNumber(buf, off, size);
+const nanUndef = (value) => (isNaN(value) ? undefined : value);
+const decSmallNumber = (buf, off, size) => nanUndef(parseInt(buf
+    .subarray(off, off + size)
+    .toString('utf8')
+    .replace(/\0.*$/, '')
+    .trim(), 8));
+// the maximum encodable as a null-terminated octal, by field size
+const MAXNUM = {
+    12: 0o77777777777,
+    8: 0o7777777,
+};
+const encNumber = (buf, off, size, num) => num === undefined ? false
+    : num > MAXNUM[size] || num < 0 ?
+        (large.encode(num, buf.subarray(off, off + size)), true)
+        : (encSmallNumber(buf, off, size, num), false);
+const encSmallNumber = (buf, off, size, num) => buf.write(octalString(num, size), off, size, 'ascii');
+const octalString = (num, size) => padOctal(Math.floor(num).toString(8), size);
+const padOctal = (str, size) => (str.length === size - 1 ?
+    str
+    : new Array(size - str.length - 1).join('0') + str + ' ') + '\0';
+const encDate = (buf, off, size, date) => date === undefined ? false : (encNumber(buf, off, size, date.getTime() / 1000));
+// enough to fill the longest string we've got
+const NULLS = new Array(156).join('\0');
+// pad with nulls, return true if it's longer or non-ascii
+const encString = (buf, off, size, str) => str === undefined ? false : ((buf.write(str + NULLS, off, size, 'utf8'),
+    str.length !== Buffer.byteLength(str) || str.length > size));
+//# sourceMappingURL=header.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/index.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/index.js
new file mode 100644
index 0000000000000..e93ed5ad54aa6
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/index.js
@@ -0,0 +1,54 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __exportStar = (this && this.__exportStar) || function(m, exports) {
+    for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
+};
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.u = exports.types = exports.r = exports.t = exports.x = exports.c = void 0;
+__exportStar(require("./create.js"), exports);
+var create_js_1 = require("./create.js");
+Object.defineProperty(exports, "c", { enumerable: true, get: function () { return create_js_1.create; } });
+__exportStar(require("./extract.js"), exports);
+var extract_js_1 = require("./extract.js");
+Object.defineProperty(exports, "x", { enumerable: true, get: function () { return extract_js_1.extract; } });
+__exportStar(require("./header.js"), exports);
+__exportStar(require("./list.js"), exports);
+var list_js_1 = require("./list.js");
+Object.defineProperty(exports, "t", { enumerable: true, get: function () { return list_js_1.list; } });
+// classes
+__exportStar(require("./pack.js"), exports);
+__exportStar(require("./parse.js"), exports);
+__exportStar(require("./pax.js"), exports);
+__exportStar(require("./read-entry.js"), exports);
+__exportStar(require("./replace.js"), exports);
+var replace_js_1 = require("./replace.js");
+Object.defineProperty(exports, "r", { enumerable: true, get: function () { return replace_js_1.replace; } });
+exports.types = __importStar(require("./types.js"));
+__exportStar(require("./unpack.js"), exports);
+__exportStar(require("./update.js"), exports);
+var update_js_1 = require("./update.js");
+Object.defineProperty(exports, "u", { enumerable: true, get: function () { return update_js_1.update; } });
+__exportStar(require("./write-entry.js"), exports);
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/large-numbers.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/large-numbers.js
new file mode 100644
index 0000000000000..5b07aa7f71b48
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/large-numbers.js
@@ -0,0 +1,99 @@
+"use strict";
+// Tar can encode large and negative numbers using a leading byte of
+// 0xff for negative, and 0x80 for positive.
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.parse = exports.encode = void 0;
+const encode = (num, buf) => {
+    if (!Number.isSafeInteger(num)) {
+        // The number is so large that javascript cannot represent it with integer
+        // precision.
+        throw Error('cannot encode number outside of javascript safe integer range');
+    }
+    else if (num < 0) {
+        encodeNegative(num, buf);
+    }
+    else {
+        encodePositive(num, buf);
+    }
+    return buf;
+};
+exports.encode = encode;
+const encodePositive = (num, buf) => {
+    buf[0] = 0x80;
+    for (var i = buf.length; i > 1; i--) {
+        buf[i - 1] = num & 0xff;
+        num = Math.floor(num / 0x100);
+    }
+};
+const encodeNegative = (num, buf) => {
+    buf[0] = 0xff;
+    var flipped = false;
+    num = num * -1;
+    for (var i = buf.length; i > 1; i--) {
+        var byte = num & 0xff;
+        num = Math.floor(num / 0x100);
+        if (flipped) {
+            buf[i - 1] = onesComp(byte);
+        }
+        else if (byte === 0) {
+            buf[i - 1] = 0;
+        }
+        else {
+            flipped = true;
+            buf[i - 1] = twosComp(byte);
+        }
+    }
+};
+const parse = (buf) => {
+    const pre = buf[0];
+    const value = pre === 0x80 ? pos(buf.subarray(1, buf.length))
+        : pre === 0xff ? twos(buf)
+            : null;
+    if (value === null) {
+        throw Error('invalid base256 encoding');
+    }
+    if (!Number.isSafeInteger(value)) {
+        // The number is so large that javascript cannot represent it with integer
+        // precision.
+        throw Error('parsed number outside of javascript safe integer range');
+    }
+    return value;
+};
+exports.parse = parse;
+const twos = (buf) => {
+    var len = buf.length;
+    var sum = 0;
+    var flipped = false;
+    for (var i = len - 1; i > -1; i--) {
+        var byte = Number(buf[i]);
+        var f;
+        if (flipped) {
+            f = onesComp(byte);
+        }
+        else if (byte === 0) {
+            f = byte;
+        }
+        else {
+            flipped = true;
+            f = twosComp(byte);
+        }
+        if (f !== 0) {
+            sum -= f * Math.pow(256, len - i - 1);
+        }
+    }
+    return sum;
+};
+const pos = (buf) => {
+    var len = buf.length;
+    var sum = 0;
+    for (var i = len - 1; i > -1; i--) {
+        var byte = Number(buf[i]);
+        if (byte !== 0) {
+            sum += byte * Math.pow(256, len - i - 1);
+        }
+    }
+    return sum;
+};
+const onesComp = (byte) => (0xff ^ byte) & 0xff;
+const twosComp = (byte) => ((0xff ^ byte) + 1) & 0xff;
+//# sourceMappingURL=large-numbers.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/list.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/list.js
new file mode 100644
index 0000000000000..3cd34bb4bad48
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/list.js
@@ -0,0 +1,136 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.list = exports.filesFilter = void 0;
+// tar -t
+const fsm = __importStar(require("@isaacs/fs-minipass"));
+const node_fs_1 = __importDefault(require("node:fs"));
+const path_1 = require("path");
+const make_command_js_1 = require("./make-command.js");
+const parse_js_1 = require("./parse.js");
+const strip_trailing_slashes_js_1 = require("./strip-trailing-slashes.js");
+const onReadEntryFunction = (opt) => {
+    const onReadEntry = opt.onReadEntry;
+    opt.onReadEntry =
+        onReadEntry ?
+            e => {
+                onReadEntry(e);
+                e.resume();
+            }
+            : e => e.resume();
+};
+// construct a filter that limits the file entries listed
+// include child entries if a dir is included
+const filesFilter = (opt, files) => {
+    const map = new Map(files.map(f => [(0, strip_trailing_slashes_js_1.stripTrailingSlashes)(f), true]));
+    const filter = opt.filter;
+    const mapHas = (file, r = '') => {
+        const root = r || (0, path_1.parse)(file).root || '.';
+        let ret;
+        if (file === root)
+            ret = false;
+        else {
+            const m = map.get(file);
+            if (m !== undefined) {
+                ret = m;
+            }
+            else {
+                ret = mapHas((0, path_1.dirname)(file), root);
+            }
+        }
+        map.set(file, ret);
+        return ret;
+    };
+    opt.filter =
+        filter ?
+            (file, entry) => filter(file, entry) && mapHas((0, strip_trailing_slashes_js_1.stripTrailingSlashes)(file))
+            : file => mapHas((0, strip_trailing_slashes_js_1.stripTrailingSlashes)(file));
+};
+exports.filesFilter = filesFilter;
+const listFileSync = (opt) => {
+    const p = new parse_js_1.Parser(opt);
+    const file = opt.file;
+    let fd;
+    try {
+        const stat = node_fs_1.default.statSync(file);
+        const readSize = opt.maxReadSize || 16 * 1024 * 1024;
+        if (stat.size < readSize) {
+            p.end(node_fs_1.default.readFileSync(file));
+        }
+        else {
+            let pos = 0;
+            const buf = Buffer.allocUnsafe(readSize);
+            fd = node_fs_1.default.openSync(file, 'r');
+            while (pos < stat.size) {
+                const bytesRead = node_fs_1.default.readSync(fd, buf, 0, readSize, pos);
+                pos += bytesRead;
+                p.write(buf.subarray(0, bytesRead));
+            }
+            p.end();
+        }
+    }
+    finally {
+        if (typeof fd === 'number') {
+            try {
+                node_fs_1.default.closeSync(fd);
+                /* c8 ignore next */
+            }
+            catch (er) { }
+        }
+    }
+};
+const listFile = (opt, _files) => {
+    const parse = new parse_js_1.Parser(opt);
+    const readSize = opt.maxReadSize || 16 * 1024 * 1024;
+    const file = opt.file;
+    const p = new Promise((resolve, reject) => {
+        parse.on('error', reject);
+        parse.on('end', resolve);
+        node_fs_1.default.stat(file, (er, stat) => {
+            if (er) {
+                reject(er);
+            }
+            else {
+                const stream = new fsm.ReadStream(file, {
+                    readSize: readSize,
+                    size: stat.size,
+                });
+                stream.on('error', reject);
+                stream.pipe(parse);
+            }
+        });
+    });
+    return p;
+};
+exports.list = (0, make_command_js_1.makeCommand)(listFileSync, listFile, opt => new parse_js_1.Parser(opt), opt => new parse_js_1.Parser(opt), (opt, files) => {
+    if (files?.length)
+        (0, exports.filesFilter)(opt, files);
+    if (!opt.noResume)
+        onReadEntryFunction(opt);
+});
+//# sourceMappingURL=list.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/make-command.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/make-command.js
new file mode 100644
index 0000000000000..1814319e78bc6
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/make-command.js
@@ -0,0 +1,61 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.makeCommand = void 0;
+const options_js_1 = require("./options.js");
+const makeCommand = (syncFile, asyncFile, syncNoFile, asyncNoFile, validate) => {
+    return Object.assign((opt_ = [], entries, cb) => {
+        if (Array.isArray(opt_)) {
+            entries = opt_;
+            opt_ = {};
+        }
+        if (typeof entries === 'function') {
+            cb = entries;
+            entries = undefined;
+        }
+        if (!entries) {
+            entries = [];
+        }
+        else {
+            entries = Array.from(entries);
+        }
+        const opt = (0, options_js_1.dealias)(opt_);
+        validate?.(opt, entries);
+        if ((0, options_js_1.isSyncFile)(opt)) {
+            if (typeof cb === 'function') {
+                throw new TypeError('callback not supported for sync tar functions');
+            }
+            return syncFile(opt, entries);
+        }
+        else if ((0, options_js_1.isAsyncFile)(opt)) {
+            const p = asyncFile(opt, entries);
+            // weirdness to make TS happy
+            const c = cb ? cb : undefined;
+            return c ? p.then(() => c(), c) : p;
+        }
+        else if ((0, options_js_1.isSyncNoFile)(opt)) {
+            if (typeof cb === 'function') {
+                throw new TypeError('callback not supported for sync tar functions');
+            }
+            return syncNoFile(opt, entries);
+        }
+        else if ((0, options_js_1.isAsyncNoFile)(opt)) {
+            if (typeof cb === 'function') {
+                throw new TypeError('callback only supported with file option');
+            }
+            return asyncNoFile(opt, entries);
+            /* c8 ignore start */
+        }
+        else {
+            throw new Error('impossible options??');
+        }
+        /* c8 ignore stop */
+    }, {
+        syncFile,
+        asyncFile,
+        syncNoFile,
+        asyncNoFile,
+        validate,
+    });
+};
+exports.makeCommand = makeCommand;
+//# sourceMappingURL=make-command.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/mkdir.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/mkdir.js
new file mode 100644
index 0000000000000..2b13ecbab6723
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/mkdir.js
@@ -0,0 +1,209 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.mkdirSync = exports.mkdir = void 0;
+const chownr_1 = require("chownr");
+const fs_1 = __importDefault(require("fs"));
+const mkdirp_1 = require("mkdirp");
+const node_path_1 = __importDefault(require("node:path"));
+const cwd_error_js_1 = require("./cwd-error.js");
+const normalize_windows_path_js_1 = require("./normalize-windows-path.js");
+const symlink_error_js_1 = require("./symlink-error.js");
+const cGet = (cache, key) => cache.get((0, normalize_windows_path_js_1.normalizeWindowsPath)(key));
+const cSet = (cache, key, val) => cache.set((0, normalize_windows_path_js_1.normalizeWindowsPath)(key), val);
+const checkCwd = (dir, cb) => {
+    fs_1.default.stat(dir, (er, st) => {
+        if (er || !st.isDirectory()) {
+            er = new cwd_error_js_1.CwdError(dir, er?.code || 'ENOTDIR');
+        }
+        cb(er);
+    });
+};
+/**
+ * Wrapper around mkdirp for tar's needs.
+ *
+ * The main purpose is to avoid creating directories if we know that
+ * they already exist (and track which ones exist for this purpose),
+ * and prevent entries from being extracted into symlinked folders,
+ * if `preservePaths` is not set.
+ */
+const mkdir = (dir, opt, cb) => {
+    dir = (0, normalize_windows_path_js_1.normalizeWindowsPath)(dir);
+    // if there's any overlap between mask and mode,
+    // then we'll need an explicit chmod
+    /* c8 ignore next */
+    const umask = opt.umask ?? 0o22;
+    const mode = opt.mode | 0o0700;
+    const needChmod = (mode & umask) !== 0;
+    const uid = opt.uid;
+    const gid = opt.gid;
+    const doChown = typeof uid === 'number' &&
+        typeof gid === 'number' &&
+        (uid !== opt.processUid || gid !== opt.processGid);
+    const preserve = opt.preserve;
+    const unlink = opt.unlink;
+    const cache = opt.cache;
+    const cwd = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.cwd);
+    const done = (er, created) => {
+        if (er) {
+            cb(er);
+        }
+        else {
+            cSet(cache, dir, true);
+            if (created && doChown) {
+                (0, chownr_1.chownr)(created, uid, gid, er => done(er));
+            }
+            else if (needChmod) {
+                fs_1.default.chmod(dir, mode, cb);
+            }
+            else {
+                cb();
+            }
+        }
+    };
+    if (cache && cGet(cache, dir) === true) {
+        return done();
+    }
+    if (dir === cwd) {
+        return checkCwd(dir, done);
+    }
+    if (preserve) {
+        return (0, mkdirp_1.mkdirp)(dir, { mode }).then(made => done(null, made ?? undefined), // oh, ts
+        done);
+    }
+    const sub = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.relative(cwd, dir));
+    const parts = sub.split('/');
+    mkdir_(cwd, parts, mode, cache, unlink, cwd, undefined, done);
+};
+exports.mkdir = mkdir;
+const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => {
+    if (!parts.length) {
+        return cb(null, created);
+    }
+    const p = parts.shift();
+    const part = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(base + '/' + p));
+    if (cGet(cache, part)) {
+        return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
+    }
+    fs_1.default.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb));
+};
+const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => (er) => {
+    if (er) {
+        fs_1.default.lstat(part, (statEr, st) => {
+            if (statEr) {
+                statEr.path =
+                    statEr.path && (0, normalize_windows_path_js_1.normalizeWindowsPath)(statEr.path);
+                cb(statEr);
+            }
+            else if (st.isDirectory()) {
+                mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
+            }
+            else if (unlink) {
+                fs_1.default.unlink(part, er => {
+                    if (er) {
+                        return cb(er);
+                    }
+                    fs_1.default.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb));
+                });
+            }
+            else if (st.isSymbolicLink()) {
+                return cb(new symlink_error_js_1.SymlinkError(part, part + '/' + parts.join('/')));
+            }
+            else {
+                cb(er);
+            }
+        });
+    }
+    else {
+        created = created || part;
+        mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
+    }
+};
+const checkCwdSync = (dir) => {
+    let ok = false;
+    let code = undefined;
+    try {
+        ok = fs_1.default.statSync(dir).isDirectory();
+    }
+    catch (er) {
+        code = er?.code;
+    }
+    finally {
+        if (!ok) {
+            throw new cwd_error_js_1.CwdError(dir, code ?? 'ENOTDIR');
+        }
+    }
+};
+const mkdirSync = (dir, opt) => {
+    dir = (0, normalize_windows_path_js_1.normalizeWindowsPath)(dir);
+    // if there's any overlap between mask and mode,
+    // then we'll need an explicit chmod
+    /* c8 ignore next */
+    const umask = opt.umask ?? 0o22;
+    const mode = opt.mode | 0o700;
+    const needChmod = (mode & umask) !== 0;
+    const uid = opt.uid;
+    const gid = opt.gid;
+    const doChown = typeof uid === 'number' &&
+        typeof gid === 'number' &&
+        (uid !== opt.processUid || gid !== opt.processGid);
+    const preserve = opt.preserve;
+    const unlink = opt.unlink;
+    const cache = opt.cache;
+    const cwd = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.cwd);
+    const done = (created) => {
+        cSet(cache, dir, true);
+        if (created && doChown) {
+            (0, chownr_1.chownrSync)(created, uid, gid);
+        }
+        if (needChmod) {
+            fs_1.default.chmodSync(dir, mode);
+        }
+    };
+    if (cache && cGet(cache, dir) === true) {
+        return done();
+    }
+    if (dir === cwd) {
+        checkCwdSync(cwd);
+        return done();
+    }
+    if (preserve) {
+        return done((0, mkdirp_1.mkdirpSync)(dir, mode) ?? undefined);
+    }
+    const sub = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.relative(cwd, dir));
+    const parts = sub.split('/');
+    let created = undefined;
+    for (let p = parts.shift(), part = cwd; p && (part += '/' + p); p = parts.shift()) {
+        part = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(part));
+        if (cGet(cache, part)) {
+            continue;
+        }
+        try {
+            fs_1.default.mkdirSync(part, mode);
+            created = created || part;
+            cSet(cache, part, true);
+        }
+        catch (er) {
+            const st = fs_1.default.lstatSync(part);
+            if (st.isDirectory()) {
+                cSet(cache, part, true);
+                continue;
+            }
+            else if (unlink) {
+                fs_1.default.unlinkSync(part);
+                fs_1.default.mkdirSync(part, mode);
+                created = created || part;
+                cSet(cache, part, true);
+                continue;
+            }
+            else if (st.isSymbolicLink()) {
+                return new symlink_error_js_1.SymlinkError(part, part + '/' + parts.join('/'));
+            }
+        }
+    }
+    return done(created);
+};
+exports.mkdirSync = mkdirSync;
+//# sourceMappingURL=mkdir.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/mode-fix.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/mode-fix.js
new file mode 100644
index 0000000000000..49dd727961d29
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/mode-fix.js
@@ -0,0 +1,29 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.modeFix = void 0;
+const modeFix = (mode, isDir, portable) => {
+    mode &= 0o7777;
+    // in portable mode, use the minimum reasonable umask
+    // if this system creates files with 0o664 by default
+    // (as some linux distros do), then we'll write the
+    // archive with 0o644 instead.  Also, don't ever create
+    // a file that is not readable/writable by the owner.
+    if (portable) {
+        mode = (mode | 0o600) & ~0o22;
+    }
+    // if dirs are readable, then they should be listable
+    if (isDir) {
+        if (mode & 0o400) {
+            mode |= 0o100;
+        }
+        if (mode & 0o40) {
+            mode |= 0o10;
+        }
+        if (mode & 0o4) {
+            mode |= 0o1;
+        }
+    }
+    return mode;
+};
+exports.modeFix = modeFix;
+//# sourceMappingURL=mode-fix.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/normalize-unicode.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/normalize-unicode.js
new file mode 100644
index 0000000000000..2f08ce46d98c4
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/normalize-unicode.js
@@ -0,0 +1,17 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.normalizeUnicode = void 0;
+// warning: extremely hot code path.
+// This has been meticulously optimized for use
+// within npm install on large package trees.
+// Do not edit without careful benchmarking.
+const normalizeCache = Object.create(null);
+const { hasOwnProperty } = Object.prototype;
+const normalizeUnicode = (s) => {
+    if (!hasOwnProperty.call(normalizeCache, s)) {
+        normalizeCache[s] = s.normalize('NFD');
+    }
+    return normalizeCache[s];
+};
+exports.normalizeUnicode = normalizeUnicode;
+//# sourceMappingURL=normalize-unicode.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/normalize-windows-path.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/normalize-windows-path.js
new file mode 100644
index 0000000000000..b0c7aaa9f2d17
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/normalize-windows-path.js
@@ -0,0 +1,12 @@
+"use strict";
+// on windows, either \ or / are valid directory separators.
+// on unix, \ is a valid character in filenames.
+// so, on windows, and only on windows, we replace all \ chars with /,
+// so that we can use / as our one and only directory separator char.
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.normalizeWindowsPath = void 0;
+const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform;
+exports.normalizeWindowsPath = platform !== 'win32' ?
+    (p) => p
+    : (p) => p && p.replace(/\\/g, '/');
+//# sourceMappingURL=normalize-windows-path.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/options.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/options.js
new file mode 100644
index 0000000000000..4cd06505bc72b
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/options.js
@@ -0,0 +1,66 @@
+"use strict";
+// turn tar(1) style args like `C` into the more verbose things like `cwd`
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.dealias = exports.isNoFile = exports.isFile = exports.isAsync = exports.isSync = exports.isAsyncNoFile = exports.isSyncNoFile = exports.isAsyncFile = exports.isSyncFile = void 0;
+const argmap = new Map([
+    ['C', 'cwd'],
+    ['f', 'file'],
+    ['z', 'gzip'],
+    ['P', 'preservePaths'],
+    ['U', 'unlink'],
+    ['strip-components', 'strip'],
+    ['stripComponents', 'strip'],
+    ['keep-newer', 'newer'],
+    ['keepNewer', 'newer'],
+    ['keep-newer-files', 'newer'],
+    ['keepNewerFiles', 'newer'],
+    ['k', 'keep'],
+    ['keep-existing', 'keep'],
+    ['keepExisting', 'keep'],
+    ['m', 'noMtime'],
+    ['no-mtime', 'noMtime'],
+    ['p', 'preserveOwner'],
+    ['L', 'follow'],
+    ['h', 'follow'],
+    ['onentry', 'onReadEntry'],
+]);
+const isSyncFile = (o) => !!o.sync && !!o.file;
+exports.isSyncFile = isSyncFile;
+const isAsyncFile = (o) => !o.sync && !!o.file;
+exports.isAsyncFile = isAsyncFile;
+const isSyncNoFile = (o) => !!o.sync && !o.file;
+exports.isSyncNoFile = isSyncNoFile;
+const isAsyncNoFile = (o) => !o.sync && !o.file;
+exports.isAsyncNoFile = isAsyncNoFile;
+const isSync = (o) => !!o.sync;
+exports.isSync = isSync;
+const isAsync = (o) => !o.sync;
+exports.isAsync = isAsync;
+const isFile = (o) => !!o.file;
+exports.isFile = isFile;
+const isNoFile = (o) => !o.file;
+exports.isNoFile = isNoFile;
+const dealiasKey = (k) => {
+    const d = argmap.get(k);
+    if (d)
+        return d;
+    return k;
+};
+const dealias = (opt = {}) => {
+    if (!opt)
+        return {};
+    const result = {};
+    for (const [key, v] of Object.entries(opt)) {
+        // TS doesn't know that aliases are going to always be the same type
+        const k = dealiasKey(key);
+        result[k] = v;
+    }
+    // affordance for deprecated noChmod -> chmod
+    if (result.chmod === undefined && result.noChmod === false) {
+        result.chmod = true;
+    }
+    delete result.noChmod;
+    return result;
+};
+exports.dealias = dealias;
+//# sourceMappingURL=options.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/pack.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/pack.js
new file mode 100644
index 0000000000000..303e93063c2db
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/pack.js
@@ -0,0 +1,477 @@
+"use strict";
+// A readable tar stream creator
+// Technically, this is a transform stream that you write paths into,
+// and tar format comes out of.
+// The `add()` method is like `write()` but returns this,
+// and end() return `this` as well, so you can
+// do `new Pack(opt).add('files').add('dir').end().pipe(output)
+// You could also do something like:
+// streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar'))
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.PackSync = exports.Pack = exports.PackJob = void 0;
+const fs_1 = __importDefault(require("fs"));
+const write_entry_js_1 = require("./write-entry.js");
+class PackJob {
+    path;
+    absolute;
+    entry;
+    stat;
+    readdir;
+    pending = false;
+    ignore = false;
+    piped = false;
+    constructor(path, absolute) {
+        this.path = path || './';
+        this.absolute = absolute;
+    }
+}
+exports.PackJob = PackJob;
+const minipass_1 = require("minipass");
+const zlib = __importStar(require("minizlib"));
+const yallist_1 = require("yallist");
+const read_entry_js_1 = require("./read-entry.js");
+const warn_method_js_1 = require("./warn-method.js");
+const EOF = Buffer.alloc(1024);
+const ONSTAT = Symbol('onStat');
+const ENDED = Symbol('ended');
+const QUEUE = Symbol('queue');
+const CURRENT = Symbol('current');
+const PROCESS = Symbol('process');
+const PROCESSING = Symbol('processing');
+const PROCESSJOB = Symbol('processJob');
+const JOBS = Symbol('jobs');
+const JOBDONE = Symbol('jobDone');
+const ADDFSENTRY = Symbol('addFSEntry');
+const ADDTARENTRY = Symbol('addTarEntry');
+const STAT = Symbol('stat');
+const READDIR = Symbol('readdir');
+const ONREADDIR = Symbol('onreaddir');
+const PIPE = Symbol('pipe');
+const ENTRY = Symbol('entry');
+const ENTRYOPT = Symbol('entryOpt');
+const WRITEENTRYCLASS = Symbol('writeEntryClass');
+const WRITE = Symbol('write');
+const ONDRAIN = Symbol('ondrain');
+const path_1 = __importDefault(require("path"));
+const normalize_windows_path_js_1 = require("./normalize-windows-path.js");
+class Pack extends minipass_1.Minipass {
+    opt;
+    cwd;
+    maxReadSize;
+    preservePaths;
+    strict;
+    noPax;
+    prefix;
+    linkCache;
+    statCache;
+    file;
+    portable;
+    zip;
+    readdirCache;
+    noDirRecurse;
+    follow;
+    noMtime;
+    mtime;
+    filter;
+    jobs;
+    [WRITEENTRYCLASS];
+    onWriteEntry;
+    [QUEUE];
+    [JOBS] = 0;
+    [PROCESSING] = false;
+    [ENDED] = false;
+    constructor(opt = {}) {
+        //@ts-ignore
+        super();
+        this.opt = opt;
+        this.file = opt.file || '';
+        this.cwd = opt.cwd || process.cwd();
+        this.maxReadSize = opt.maxReadSize;
+        this.preservePaths = !!opt.preservePaths;
+        this.strict = !!opt.strict;
+        this.noPax = !!opt.noPax;
+        this.prefix = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.prefix || '');
+        this.linkCache = opt.linkCache || new Map();
+        this.statCache = opt.statCache || new Map();
+        this.readdirCache = opt.readdirCache || new Map();
+        this.onWriteEntry = opt.onWriteEntry;
+        this[WRITEENTRYCLASS] = write_entry_js_1.WriteEntry;
+        if (typeof opt.onwarn === 'function') {
+            this.on('warn', opt.onwarn);
+        }
+        this.portable = !!opt.portable;
+        if (opt.gzip || opt.brotli) {
+            if (opt.gzip && opt.brotli) {
+                throw new TypeError('gzip and brotli are mutually exclusive');
+            }
+            if (opt.gzip) {
+                if (typeof opt.gzip !== 'object') {
+                    opt.gzip = {};
+                }
+                if (this.portable) {
+                    opt.gzip.portable = true;
+                }
+                this.zip = new zlib.Gzip(opt.gzip);
+            }
+            if (opt.brotli) {
+                if (typeof opt.brotli !== 'object') {
+                    opt.brotli = {};
+                }
+                this.zip = new zlib.BrotliCompress(opt.brotli);
+            }
+            /* c8 ignore next */
+            if (!this.zip)
+                throw new Error('impossible');
+            const zip = this.zip;
+            zip.on('data', chunk => super.write(chunk));
+            zip.on('end', () => super.end());
+            zip.on('drain', () => this[ONDRAIN]());
+            this.on('resume', () => zip.resume());
+        }
+        else {
+            this.on('drain', this[ONDRAIN]);
+        }
+        this.noDirRecurse = !!opt.noDirRecurse;
+        this.follow = !!opt.follow;
+        this.noMtime = !!opt.noMtime;
+        if (opt.mtime)
+            this.mtime = opt.mtime;
+        this.filter =
+            typeof opt.filter === 'function' ? opt.filter : () => true;
+        this[QUEUE] = new yallist_1.Yallist();
+        this[JOBS] = 0;
+        this.jobs = Number(opt.jobs) || 4;
+        this[PROCESSING] = false;
+        this[ENDED] = false;
+    }
+    [WRITE](chunk) {
+        return super.write(chunk);
+    }
+    add(path) {
+        this.write(path);
+        return this;
+    }
+    end(path, encoding, cb) {
+        /* c8 ignore start */
+        if (typeof path === 'function') {
+            cb = path;
+            path = undefined;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = undefined;
+        }
+        /* c8 ignore stop */
+        if (path) {
+            this.add(path);
+        }
+        this[ENDED] = true;
+        this[PROCESS]();
+        /* c8 ignore next */
+        if (cb)
+            cb();
+        return this;
+    }
+    write(path) {
+        if (this[ENDED]) {
+            throw new Error('write after end');
+        }
+        if (path instanceof read_entry_js_1.ReadEntry) {
+            this[ADDTARENTRY](path);
+        }
+        else {
+            this[ADDFSENTRY](path);
+        }
+        return this.flowing;
+    }
+    [ADDTARENTRY](p) {
+        const absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(path_1.default.resolve(this.cwd, p.path));
+        // in this case, we don't have to wait for the stat
+        if (!this.filter(p.path, p)) {
+            p.resume();
+        }
+        else {
+            const job = new PackJob(p.path, absolute);
+            job.entry = new write_entry_js_1.WriteEntryTar(p, this[ENTRYOPT](job));
+            job.entry.on('end', () => this[JOBDONE](job));
+            this[JOBS] += 1;
+            this[QUEUE].push(job);
+        }
+        this[PROCESS]();
+    }
+    [ADDFSENTRY](p) {
+        const absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(path_1.default.resolve(this.cwd, p));
+        this[QUEUE].push(new PackJob(p, absolute));
+        this[PROCESS]();
+    }
+    [STAT](job) {
+        job.pending = true;
+        this[JOBS] += 1;
+        const stat = this.follow ? 'stat' : 'lstat';
+        fs_1.default[stat](job.absolute, (er, stat) => {
+            job.pending = false;
+            this[JOBS] -= 1;
+            if (er) {
+                this.emit('error', er);
+            }
+            else {
+                this[ONSTAT](job, stat);
+            }
+        });
+    }
+    [ONSTAT](job, stat) {
+        this.statCache.set(job.absolute, stat);
+        job.stat = stat;
+        // now we have the stat, we can filter it.
+        if (!this.filter(job.path, stat)) {
+            job.ignore = true;
+        }
+        this[PROCESS]();
+    }
+    [READDIR](job) {
+        job.pending = true;
+        this[JOBS] += 1;
+        fs_1.default.readdir(job.absolute, (er, entries) => {
+            job.pending = false;
+            this[JOBS] -= 1;
+            if (er) {
+                return this.emit('error', er);
+            }
+            this[ONREADDIR](job, entries);
+        });
+    }
+    [ONREADDIR](job, entries) {
+        this.readdirCache.set(job.absolute, entries);
+        job.readdir = entries;
+        this[PROCESS]();
+    }
+    [PROCESS]() {
+        if (this[PROCESSING]) {
+            return;
+        }
+        this[PROCESSING] = true;
+        for (let w = this[QUEUE].head; !!w && this[JOBS] < this.jobs; w = w.next) {
+            this[PROCESSJOB](w.value);
+            if (w.value.ignore) {
+                const p = w.next;
+                this[QUEUE].removeNode(w);
+                w.next = p;
+            }
+        }
+        this[PROCESSING] = false;
+        if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) {
+            if (this.zip) {
+                this.zip.end(EOF);
+            }
+            else {
+                super.write(EOF);
+                super.end();
+            }
+        }
+    }
+    get [CURRENT]() {
+        return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value;
+    }
+    [JOBDONE](_job) {
+        this[QUEUE].shift();
+        this[JOBS] -= 1;
+        this[PROCESS]();
+    }
+    [PROCESSJOB](job) {
+        if (job.pending) {
+            return;
+        }
+        if (job.entry) {
+            if (job === this[CURRENT] && !job.piped) {
+                this[PIPE](job);
+            }
+            return;
+        }
+        if (!job.stat) {
+            const sc = this.statCache.get(job.absolute);
+            if (sc) {
+                this[ONSTAT](job, sc);
+            }
+            else {
+                this[STAT](job);
+            }
+        }
+        if (!job.stat) {
+            return;
+        }
+        // filtered out!
+        if (job.ignore) {
+            return;
+        }
+        if (!this.noDirRecurse &&
+            job.stat.isDirectory() &&
+            !job.readdir) {
+            const rc = this.readdirCache.get(job.absolute);
+            if (rc) {
+                this[ONREADDIR](job, rc);
+            }
+            else {
+                this[READDIR](job);
+            }
+            if (!job.readdir) {
+                return;
+            }
+        }
+        // we know it doesn't have an entry, because that got checked above
+        job.entry = this[ENTRY](job);
+        if (!job.entry) {
+            job.ignore = true;
+            return;
+        }
+        if (job === this[CURRENT] && !job.piped) {
+            this[PIPE](job);
+        }
+    }
+    [ENTRYOPT](job) {
+        return {
+            onwarn: (code, msg, data) => this.warn(code, msg, data),
+            noPax: this.noPax,
+            cwd: this.cwd,
+            absolute: job.absolute,
+            preservePaths: this.preservePaths,
+            maxReadSize: this.maxReadSize,
+            strict: this.strict,
+            portable: this.portable,
+            linkCache: this.linkCache,
+            statCache: this.statCache,
+            noMtime: this.noMtime,
+            mtime: this.mtime,
+            prefix: this.prefix,
+            onWriteEntry: this.onWriteEntry,
+        };
+    }
+    [ENTRY](job) {
+        this[JOBS] += 1;
+        try {
+            const e = new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job));
+            return e
+                .on('end', () => this[JOBDONE](job))
+                .on('error', er => this.emit('error', er));
+        }
+        catch (er) {
+            this.emit('error', er);
+        }
+    }
+    [ONDRAIN]() {
+        if (this[CURRENT] && this[CURRENT].entry) {
+            this[CURRENT].entry.resume();
+        }
+    }
+    // like .pipe() but using super, because our write() is special
+    [PIPE](job) {
+        job.piped = true;
+        if (job.readdir) {
+            job.readdir.forEach(entry => {
+                const p = job.path;
+                const base = p === './' ? '' : p.replace(/\/*$/, '/');
+                this[ADDFSENTRY](base + entry);
+            });
+        }
+        const source = job.entry;
+        const zip = this.zip;
+        /* c8 ignore start */
+        if (!source)
+            throw new Error('cannot pipe without source');
+        /* c8 ignore stop */
+        if (zip) {
+            source.on('data', chunk => {
+                if (!zip.write(chunk)) {
+                    source.pause();
+                }
+            });
+        }
+        else {
+            source.on('data', chunk => {
+                if (!super.write(chunk)) {
+                    source.pause();
+                }
+            });
+        }
+    }
+    pause() {
+        if (this.zip) {
+            this.zip.pause();
+        }
+        return super.pause();
+    }
+    warn(code, message, data = {}) {
+        (0, warn_method_js_1.warnMethod)(this, code, message, data);
+    }
+}
+exports.Pack = Pack;
+class PackSync extends Pack {
+    sync = true;
+    constructor(opt) {
+        super(opt);
+        this[WRITEENTRYCLASS] = write_entry_js_1.WriteEntrySync;
+    }
+    // pause/resume are no-ops in sync streams.
+    pause() { }
+    resume() { }
+    [STAT](job) {
+        const stat = this.follow ? 'statSync' : 'lstatSync';
+        this[ONSTAT](job, fs_1.default[stat](job.absolute));
+    }
+    [READDIR](job) {
+        this[ONREADDIR](job, fs_1.default.readdirSync(job.absolute));
+    }
+    // gotta get it all in this tick
+    [PIPE](job) {
+        const source = job.entry;
+        const zip = this.zip;
+        if (job.readdir) {
+            job.readdir.forEach(entry => {
+                const p = job.path;
+                const base = p === './' ? '' : p.replace(/\/*$/, '/');
+                this[ADDFSENTRY](base + entry);
+            });
+        }
+        /* c8 ignore start */
+        if (!source)
+            throw new Error('Cannot pipe without source');
+        /* c8 ignore stop */
+        if (zip) {
+            source.on('data', chunk => {
+                zip.write(chunk);
+            });
+        }
+        else {
+            source.on('data', chunk => {
+                super[WRITE](chunk);
+            });
+        }
+    }
+}
+exports.PackSync = PackSync;
+//# sourceMappingURL=pack.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/package.json b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/package.json
new file mode 100644
index 0000000000000..5bbefffbabee3
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/parse.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/parse.js
new file mode 100644
index 0000000000000..9746a25899e6e
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/parse.js
@@ -0,0 +1,599 @@
+"use strict";
+// this[BUFFER] is the remainder of a chunk if we're waiting for
+// the full 512 bytes of a header to come in.  We will Buffer.concat()
+// it to the next write(), which is a mem copy, but a small one.
+//
+// this[QUEUE] is a Yallist of entries that haven't been emitted
+// yet this can only get filled up if the user keeps write()ing after
+// a write() returns false, or does a write() with more than one entry
+//
+// We don't buffer chunks, we always parse them and either create an
+// entry, or push it into the active entry.  The ReadEntry class knows
+// to throw data away if .ignore=true
+//
+// Shift entry off the buffer when it emits 'end', and emit 'entry' for
+// the next one in the list.
+//
+// At any time, we're pushing body chunks into the entry at WRITEENTRY,
+// and waiting for 'end' on the entry at READENTRY
+//
+// ignored entries get .resume() called on them straight away
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Parser = void 0;
+const events_1 = require("events");
+const minizlib_1 = require("minizlib");
+const yallist_1 = require("yallist");
+const header_js_1 = require("./header.js");
+const pax_js_1 = require("./pax.js");
+const read_entry_js_1 = require("./read-entry.js");
+const warn_method_js_1 = require("./warn-method.js");
+const maxMetaEntrySize = 1024 * 1024;
+const gzipHeader = Buffer.from([0x1f, 0x8b]);
+const STATE = Symbol('state');
+const WRITEENTRY = Symbol('writeEntry');
+const READENTRY = Symbol('readEntry');
+const NEXTENTRY = Symbol('nextEntry');
+const PROCESSENTRY = Symbol('processEntry');
+const EX = Symbol('extendedHeader');
+const GEX = Symbol('globalExtendedHeader');
+const META = Symbol('meta');
+const EMITMETA = Symbol('emitMeta');
+const BUFFER = Symbol('buffer');
+const QUEUE = Symbol('queue');
+const ENDED = Symbol('ended');
+const EMITTEDEND = Symbol('emittedEnd');
+const EMIT = Symbol('emit');
+const UNZIP = Symbol('unzip');
+const CONSUMECHUNK = Symbol('consumeChunk');
+const CONSUMECHUNKSUB = Symbol('consumeChunkSub');
+const CONSUMEBODY = Symbol('consumeBody');
+const CONSUMEMETA = Symbol('consumeMeta');
+const CONSUMEHEADER = Symbol('consumeHeader');
+const CONSUMING = Symbol('consuming');
+const BUFFERCONCAT = Symbol('bufferConcat');
+const MAYBEEND = Symbol('maybeEnd');
+const WRITING = Symbol('writing');
+const ABORTED = Symbol('aborted');
+const DONE = Symbol('onDone');
+const SAW_VALID_ENTRY = Symbol('sawValidEntry');
+const SAW_NULL_BLOCK = Symbol('sawNullBlock');
+const SAW_EOF = Symbol('sawEOF');
+const CLOSESTREAM = Symbol('closeStream');
+const noop = () => true;
+class Parser extends events_1.EventEmitter {
+    file;
+    strict;
+    maxMetaEntrySize;
+    filter;
+    brotli;
+    writable = true;
+    readable = false;
+    [QUEUE] = new yallist_1.Yallist();
+    [BUFFER];
+    [READENTRY];
+    [WRITEENTRY];
+    [STATE] = 'begin';
+    [META] = '';
+    [EX];
+    [GEX];
+    [ENDED] = false;
+    [UNZIP];
+    [ABORTED] = false;
+    [SAW_VALID_ENTRY];
+    [SAW_NULL_BLOCK] = false;
+    [SAW_EOF] = false;
+    [WRITING] = false;
+    [CONSUMING] = false;
+    [EMITTEDEND] = false;
+    constructor(opt = {}) {
+        super();
+        this.file = opt.file || '';
+        // these BADARCHIVE errors can't be detected early. listen on DONE.
+        this.on(DONE, () => {
+            if (this[STATE] === 'begin' ||
+                this[SAW_VALID_ENTRY] === false) {
+                // either less than 1 block of data, or all entries were invalid.
+                // Either way, probably not even a tarball.
+                this.warn('TAR_BAD_ARCHIVE', 'Unrecognized archive format');
+            }
+        });
+        if (opt.ondone) {
+            this.on(DONE, opt.ondone);
+        }
+        else {
+            this.on(DONE, () => {
+                this.emit('prefinish');
+                this.emit('finish');
+                this.emit('end');
+            });
+        }
+        this.strict = !!opt.strict;
+        this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize;
+        this.filter = typeof opt.filter === 'function' ? opt.filter : noop;
+        // Unlike gzip, brotli doesn't have any magic bytes to identify it
+        // Users need to explicitly tell us they're extracting a brotli file
+        // Or we infer from the file extension
+        const isTBR = opt.file &&
+            (opt.file.endsWith('.tar.br') || opt.file.endsWith('.tbr'));
+        // if it's a tbr file it MIGHT be brotli, but we don't know until
+        // we look at it and verify it's not a valid tar file.
+        this.brotli =
+            !opt.gzip && opt.brotli !== undefined ? opt.brotli
+                : isTBR ? undefined
+                    : false;
+        // have to set this so that streams are ok piping into it
+        this.on('end', () => this[CLOSESTREAM]());
+        if (typeof opt.onwarn === 'function') {
+            this.on('warn', opt.onwarn);
+        }
+        if (typeof opt.onReadEntry === 'function') {
+            this.on('entry', opt.onReadEntry);
+        }
+    }
+    warn(code, message, data = {}) {
+        (0, warn_method_js_1.warnMethod)(this, code, message, data);
+    }
+    [CONSUMEHEADER](chunk, position) {
+        if (this[SAW_VALID_ENTRY] === undefined) {
+            this[SAW_VALID_ENTRY] = false;
+        }
+        let header;
+        try {
+            header = new header_js_1.Header(chunk, position, this[EX], this[GEX]);
+        }
+        catch (er) {
+            return this.warn('TAR_ENTRY_INVALID', er);
+        }
+        if (header.nullBlock) {
+            if (this[SAW_NULL_BLOCK]) {
+                this[SAW_EOF] = true;
+                // ending an archive with no entries.  pointless, but legal.
+                if (this[STATE] === 'begin') {
+                    this[STATE] = 'header';
+                }
+                this[EMIT]('eof');
+            }
+            else {
+                this[SAW_NULL_BLOCK] = true;
+                this[EMIT]('nullBlock');
+            }
+        }
+        else {
+            this[SAW_NULL_BLOCK] = false;
+            if (!header.cksumValid) {
+                this.warn('TAR_ENTRY_INVALID', 'checksum failure', { header });
+            }
+            else if (!header.path) {
+                this.warn('TAR_ENTRY_INVALID', 'path is required', { header });
+            }
+            else {
+                const type = header.type;
+                if (/^(Symbolic)?Link$/.test(type) && !header.linkpath) {
+                    this.warn('TAR_ENTRY_INVALID', 'linkpath required', {
+                        header,
+                    });
+                }
+                else if (!/^(Symbolic)?Link$/.test(type) &&
+                    !/^(Global)?ExtendedHeader$/.test(type) &&
+                    header.linkpath) {
+                    this.warn('TAR_ENTRY_INVALID', 'linkpath forbidden', {
+                        header,
+                    });
+                }
+                else {
+                    const entry = (this[WRITEENTRY] = new read_entry_js_1.ReadEntry(header, this[EX], this[GEX]));
+                    // we do this for meta & ignored entries as well, because they
+                    // are still valid tar, or else we wouldn't know to ignore them
+                    if (!this[SAW_VALID_ENTRY]) {
+                        if (entry.remain) {
+                            // this might be the one!
+                            const onend = () => {
+                                if (!entry.invalid) {
+                                    this[SAW_VALID_ENTRY] = true;
+                                }
+                            };
+                            entry.on('end', onend);
+                        }
+                        else {
+                            this[SAW_VALID_ENTRY] = true;
+                        }
+                    }
+                    if (entry.meta) {
+                        if (entry.size > this.maxMetaEntrySize) {
+                            entry.ignore = true;
+                            this[EMIT]('ignoredEntry', entry);
+                            this[STATE] = 'ignore';
+                            entry.resume();
+                        }
+                        else if (entry.size > 0) {
+                            this[META] = '';
+                            entry.on('data', c => (this[META] += c));
+                            this[STATE] = 'meta';
+                        }
+                    }
+                    else {
+                        this[EX] = undefined;
+                        entry.ignore =
+                            entry.ignore || !this.filter(entry.path, entry);
+                        if (entry.ignore) {
+                            // probably valid, just not something we care about
+                            this[EMIT]('ignoredEntry', entry);
+                            this[STATE] = entry.remain ? 'ignore' : 'header';
+                            entry.resume();
+                        }
+                        else {
+                            if (entry.remain) {
+                                this[STATE] = 'body';
+                            }
+                            else {
+                                this[STATE] = 'header';
+                                entry.end();
+                            }
+                            if (!this[READENTRY]) {
+                                this[QUEUE].push(entry);
+                                this[NEXTENTRY]();
+                            }
+                            else {
+                                this[QUEUE].push(entry);
+                            }
+                        }
+                    }
+                }
+            }
+        }
+    }
+    [CLOSESTREAM]() {
+        queueMicrotask(() => this.emit('close'));
+    }
+    [PROCESSENTRY](entry) {
+        let go = true;
+        if (!entry) {
+            this[READENTRY] = undefined;
+            go = false;
+        }
+        else if (Array.isArray(entry)) {
+            const [ev, ...args] = entry;
+            this.emit(ev, ...args);
+        }
+        else {
+            this[READENTRY] = entry;
+            this.emit('entry', entry);
+            if (!entry.emittedEnd) {
+                entry.on('end', () => this[NEXTENTRY]());
+                go = false;
+            }
+        }
+        return go;
+    }
+    [NEXTENTRY]() {
+        do { } while (this[PROCESSENTRY](this[QUEUE].shift()));
+        if (!this[QUEUE].length) {
+            // At this point, there's nothing in the queue, but we may have an
+            // entry which is being consumed (readEntry).
+            // If we don't, then we definitely can handle more data.
+            // If we do, and either it's flowing, or it has never had any data
+            // written to it, then it needs more.
+            // The only other possibility is that it has returned false from a
+            // write() call, so we wait for the next drain to continue.
+            const re = this[READENTRY];
+            const drainNow = !re || re.flowing || re.size === re.remain;
+            if (drainNow) {
+                if (!this[WRITING]) {
+                    this.emit('drain');
+                }
+            }
+            else {
+                re.once('drain', () => this.emit('drain'));
+            }
+        }
+    }
+    [CONSUMEBODY](chunk, position) {
+        // write up to but no  more than writeEntry.blockRemain
+        const entry = this[WRITEENTRY];
+        /* c8 ignore start */
+        if (!entry) {
+            throw new Error('attempt to consume body without entry??');
+        }
+        const br = entry.blockRemain ?? 0;
+        /* c8 ignore stop */
+        const c = br >= chunk.length && position === 0 ?
+            chunk
+            : chunk.subarray(position, position + br);
+        entry.write(c);
+        if (!entry.blockRemain) {
+            this[STATE] = 'header';
+            this[WRITEENTRY] = undefined;
+            entry.end();
+        }
+        return c.length;
+    }
+    [CONSUMEMETA](chunk, position) {
+        const entry = this[WRITEENTRY];
+        const ret = this[CONSUMEBODY](chunk, position);
+        // if we finished, then the entry is reset
+        if (!this[WRITEENTRY] && entry) {
+            this[EMITMETA](entry);
+        }
+        return ret;
+    }
+    [EMIT](ev, data, extra) {
+        if (!this[QUEUE].length && !this[READENTRY]) {
+            this.emit(ev, data, extra);
+        }
+        else {
+            this[QUEUE].push([ev, data, extra]);
+        }
+    }
+    [EMITMETA](entry) {
+        this[EMIT]('meta', this[META]);
+        switch (entry.type) {
+            case 'ExtendedHeader':
+            case 'OldExtendedHeader':
+                this[EX] = pax_js_1.Pax.parse(this[META], this[EX], false);
+                break;
+            case 'GlobalExtendedHeader':
+                this[GEX] = pax_js_1.Pax.parse(this[META], this[GEX], true);
+                break;
+            case 'NextFileHasLongPath':
+            case 'OldGnuLongPath': {
+                const ex = this[EX] ?? Object.create(null);
+                this[EX] = ex;
+                ex.path = this[META].replace(/\0.*/, '');
+                break;
+            }
+            case 'NextFileHasLongLinkpath': {
+                const ex = this[EX] || Object.create(null);
+                this[EX] = ex;
+                ex.linkpath = this[META].replace(/\0.*/, '');
+                break;
+            }
+            /* c8 ignore start */
+            default:
+                throw new Error('unknown meta: ' + entry.type);
+            /* c8 ignore stop */
+        }
+    }
+    abort(error) {
+        this[ABORTED] = true;
+        this.emit('abort', error);
+        // always throws, even in non-strict mode
+        this.warn('TAR_ABORT', error, { recoverable: false });
+    }
+    write(chunk, encoding, cb) {
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = undefined;
+        }
+        if (typeof chunk === 'string') {
+            chunk = Buffer.from(chunk, 
+            /* c8 ignore next */
+            typeof encoding === 'string' ? encoding : 'utf8');
+        }
+        if (this[ABORTED]) {
+            /* c8 ignore next */
+            cb?.();
+            return false;
+        }
+        // first write, might be gzipped
+        const needSniff = this[UNZIP] === undefined ||
+            (this.brotli === undefined && this[UNZIP] === false);
+        if (needSniff && chunk) {
+            if (this[BUFFER]) {
+                chunk = Buffer.concat([this[BUFFER], chunk]);
+                this[BUFFER] = undefined;
+            }
+            if (chunk.length < gzipHeader.length) {
+                this[BUFFER] = chunk;
+                /* c8 ignore next */
+                cb?.();
+                return true;
+            }
+            // look for gzip header
+            for (let i = 0; this[UNZIP] === undefined && i < gzipHeader.length; i++) {
+                if (chunk[i] !== gzipHeader[i]) {
+                    this[UNZIP] = false;
+                }
+            }
+            const maybeBrotli = this.brotli === undefined;
+            if (this[UNZIP] === false && maybeBrotli) {
+                // read the first header to see if it's a valid tar file. If so,
+                // we can safely assume that it's not actually brotli, despite the
+                // .tbr or .tar.br file extension.
+                // if we ended before getting a full chunk, yes, def brotli
+                if (chunk.length < 512) {
+                    if (this[ENDED]) {
+                        this.brotli = true;
+                    }
+                    else {
+                        this[BUFFER] = chunk;
+                        /* c8 ignore next */
+                        cb?.();
+                        return true;
+                    }
+                }
+                else {
+                    // if it's tar, it's pretty reliably not brotli, chances of
+                    // that happening are astronomical.
+                    try {
+                        new header_js_1.Header(chunk.subarray(0, 512));
+                        this.brotli = false;
+                    }
+                    catch (_) {
+                        this.brotli = true;
+                    }
+                }
+            }
+            if (this[UNZIP] === undefined ||
+                (this[UNZIP] === false && this.brotli)) {
+                const ended = this[ENDED];
+                this[ENDED] = false;
+                this[UNZIP] =
+                    this[UNZIP] === undefined ?
+                        new minizlib_1.Unzip({})
+                        : new minizlib_1.BrotliDecompress({});
+                this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk));
+                this[UNZIP].on('error', er => this.abort(er));
+                this[UNZIP].on('end', () => {
+                    this[ENDED] = true;
+                    this[CONSUMECHUNK]();
+                });
+                this[WRITING] = true;
+                const ret = !!this[UNZIP][ended ? 'end' : 'write'](chunk);
+                this[WRITING] = false;
+                cb?.();
+                return ret;
+            }
+        }
+        this[WRITING] = true;
+        if (this[UNZIP]) {
+            this[UNZIP].write(chunk);
+        }
+        else {
+            this[CONSUMECHUNK](chunk);
+        }
+        this[WRITING] = false;
+        // return false if there's a queue, or if the current entry isn't flowing
+        const ret = this[QUEUE].length ? false
+            : this[READENTRY] ? this[READENTRY].flowing
+                : true;
+        // if we have no queue, then that means a clogged READENTRY
+        if (!ret && !this[QUEUE].length) {
+            this[READENTRY]?.once('drain', () => this.emit('drain'));
+        }
+        /* c8 ignore next */
+        cb?.();
+        return ret;
+    }
+    [BUFFERCONCAT](c) {
+        if (c && !this[ABORTED]) {
+            this[BUFFER] =
+                this[BUFFER] ? Buffer.concat([this[BUFFER], c]) : c;
+        }
+    }
+    [MAYBEEND]() {
+        if (this[ENDED] &&
+            !this[EMITTEDEND] &&
+            !this[ABORTED] &&
+            !this[CONSUMING]) {
+            this[EMITTEDEND] = true;
+            const entry = this[WRITEENTRY];
+            if (entry && entry.blockRemain) {
+                // truncated, likely a damaged file
+                const have = this[BUFFER] ? this[BUFFER].length : 0;
+                this.warn('TAR_BAD_ARCHIVE', `Truncated input (needed ${entry.blockRemain} more bytes, only ${have} available)`, { entry });
+                if (this[BUFFER]) {
+                    entry.write(this[BUFFER]);
+                }
+                entry.end();
+            }
+            this[EMIT](DONE);
+        }
+    }
+    [CONSUMECHUNK](chunk) {
+        if (this[CONSUMING] && chunk) {
+            this[BUFFERCONCAT](chunk);
+        }
+        else if (!chunk && !this[BUFFER]) {
+            this[MAYBEEND]();
+        }
+        else if (chunk) {
+            this[CONSUMING] = true;
+            if (this[BUFFER]) {
+                this[BUFFERCONCAT](chunk);
+                const c = this[BUFFER];
+                this[BUFFER] = undefined;
+                this[CONSUMECHUNKSUB](c);
+            }
+            else {
+                this[CONSUMECHUNKSUB](chunk);
+            }
+            while (this[BUFFER] &&
+                this[BUFFER]?.length >= 512 &&
+                !this[ABORTED] &&
+                !this[SAW_EOF]) {
+                const c = this[BUFFER];
+                this[BUFFER] = undefined;
+                this[CONSUMECHUNKSUB](c);
+            }
+            this[CONSUMING] = false;
+        }
+        if (!this[BUFFER] || this[ENDED]) {
+            this[MAYBEEND]();
+        }
+    }
+    [CONSUMECHUNKSUB](chunk) {
+        // we know that we are in CONSUMING mode, so anything written goes into
+        // the buffer.  Advance the position and put any remainder in the buffer.
+        let position = 0;
+        const length = chunk.length;
+        while (position + 512 <= length &&
+            !this[ABORTED] &&
+            !this[SAW_EOF]) {
+            switch (this[STATE]) {
+                case 'begin':
+                case 'header':
+                    this[CONSUMEHEADER](chunk, position);
+                    position += 512;
+                    break;
+                case 'ignore':
+                case 'body':
+                    position += this[CONSUMEBODY](chunk, position);
+                    break;
+                case 'meta':
+                    position += this[CONSUMEMETA](chunk, position);
+                    break;
+                /* c8 ignore start */
+                default:
+                    throw new Error('invalid state: ' + this[STATE]);
+                /* c8 ignore stop */
+            }
+        }
+        if (position < length) {
+            if (this[BUFFER]) {
+                this[BUFFER] = Buffer.concat([
+                    chunk.subarray(position),
+                    this[BUFFER],
+                ]);
+            }
+            else {
+                this[BUFFER] = chunk.subarray(position);
+            }
+        }
+    }
+    end(chunk, encoding, cb) {
+        if (typeof chunk === 'function') {
+            cb = chunk;
+            encoding = undefined;
+            chunk = undefined;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = undefined;
+        }
+        if (typeof chunk === 'string') {
+            chunk = Buffer.from(chunk, encoding);
+        }
+        if (cb)
+            this.once('finish', cb);
+        if (!this[ABORTED]) {
+            if (this[UNZIP]) {
+                /* c8 ignore start */
+                if (chunk)
+                    this[UNZIP].write(chunk);
+                /* c8 ignore stop */
+                this[UNZIP].end();
+            }
+            else {
+                this[ENDED] = true;
+                if (this.brotli === undefined)
+                    chunk = chunk || Buffer.alloc(0);
+                if (chunk)
+                    this.write(chunk);
+                this[MAYBEEND]();
+            }
+        }
+        return this;
+    }
+}
+exports.Parser = Parser;
+//# sourceMappingURL=parse.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/path-reservations.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/path-reservations.js
new file mode 100644
index 0000000000000..9ff391c44092c
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/path-reservations.js
@@ -0,0 +1,170 @@
+"use strict";
+// A path exclusive reservation system
+// reserve([list, of, paths], fn)
+// When the fn is first in line for all its paths, it
+// is called with a cb that clears the reservation.
+//
+// Used by async unpack to avoid clobbering paths in use,
+// while still allowing maximal safe parallelization.
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.PathReservations = void 0;
+const node_path_1 = require("node:path");
+const normalize_unicode_js_1 = require("./normalize-unicode.js");
+const strip_trailing_slashes_js_1 = require("./strip-trailing-slashes.js");
+const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform;
+const isWindows = platform === 'win32';
+// return a set of parent dirs for a given path
+// '/a/b/c/d' -> ['/', '/a', '/a/b', '/a/b/c', '/a/b/c/d']
+const getDirs = (path) => {
+    const dirs = path
+        .split('/')
+        .slice(0, -1)
+        .reduce((set, path) => {
+        const s = set[set.length - 1];
+        if (s !== undefined) {
+            path = (0, node_path_1.join)(s, path);
+        }
+        set.push(path || '/');
+        return set;
+    }, []);
+    return dirs;
+};
+class PathReservations {
+    // path => [function or Set]
+    // A Set object means a directory reservation
+    // A fn is a direct reservation on that path
+    #queues = new Map();
+    // fn => {paths:[path,...], dirs:[path, ...]}
+    #reservations = new Map();
+    // functions currently running
+    #running = new Set();
+    reserve(paths, fn) {
+        paths =
+            isWindows ?
+                ['win32 parallelization disabled']
+                : paths.map(p => {
+                    // don't need normPath, because we skip this entirely for windows
+                    return (0, strip_trailing_slashes_js_1.stripTrailingSlashes)((0, node_path_1.join)((0, normalize_unicode_js_1.normalizeUnicode)(p))).toLowerCase();
+                });
+        const dirs = new Set(paths.map(path => getDirs(path)).reduce((a, b) => a.concat(b)));
+        this.#reservations.set(fn, { dirs, paths });
+        for (const p of paths) {
+            const q = this.#queues.get(p);
+            if (!q) {
+                this.#queues.set(p, [fn]);
+            }
+            else {
+                q.push(fn);
+            }
+        }
+        for (const dir of dirs) {
+            const q = this.#queues.get(dir);
+            if (!q) {
+                this.#queues.set(dir, [new Set([fn])]);
+            }
+            else {
+                const l = q[q.length - 1];
+                if (l instanceof Set) {
+                    l.add(fn);
+                }
+                else {
+                    q.push(new Set([fn]));
+                }
+            }
+        }
+        return this.#run(fn);
+    }
+    // return the queues for each path the function cares about
+    // fn => {paths, dirs}
+    #getQueues(fn) {
+        const res = this.#reservations.get(fn);
+        /* c8 ignore start */
+        if (!res) {
+            throw new Error('function does not have any path reservations');
+        }
+        /* c8 ignore stop */
+        return {
+            paths: res.paths.map((path) => this.#queues.get(path)),
+            dirs: [...res.dirs].map(path => this.#queues.get(path)),
+        };
+    }
+    // check if fn is first in line for all its paths, and is
+    // included in the first set for all its dir queues
+    check(fn) {
+        const { paths, dirs } = this.#getQueues(fn);
+        return (paths.every(q => q && q[0] === fn) &&
+            dirs.every(q => q && q[0] instanceof Set && q[0].has(fn)));
+    }
+    // run the function if it's first in line and not already running
+    #run(fn) {
+        if (this.#running.has(fn) || !this.check(fn)) {
+            return false;
+        }
+        this.#running.add(fn);
+        fn(() => this.#clear(fn));
+        return true;
+    }
+    #clear(fn) {
+        if (!this.#running.has(fn)) {
+            return false;
+        }
+        const res = this.#reservations.get(fn);
+        /* c8 ignore start */
+        if (!res) {
+            throw new Error('invalid reservation');
+        }
+        /* c8 ignore stop */
+        const { paths, dirs } = res;
+        const next = new Set();
+        for (const path of paths) {
+            const q = this.#queues.get(path);
+            /* c8 ignore start */
+            if (!q || q?.[0] !== fn) {
+                continue;
+            }
+            /* c8 ignore stop */
+            const q0 = q[1];
+            if (!q0) {
+                this.#queues.delete(path);
+                continue;
+            }
+            q.shift();
+            if (typeof q0 === 'function') {
+                next.add(q0);
+            }
+            else {
+                for (const f of q0) {
+                    next.add(f);
+                }
+            }
+        }
+        for (const dir of dirs) {
+            const q = this.#queues.get(dir);
+            const q0 = q?.[0];
+            /* c8 ignore next - type safety only */
+            if (!q || !(q0 instanceof Set))
+                continue;
+            if (q0.size === 1 && q.length === 1) {
+                this.#queues.delete(dir);
+                continue;
+            }
+            else if (q0.size === 1) {
+                q.shift();
+                // next one must be a function,
+                // or else the Set would've been reused
+                const n = q[0];
+                if (typeof n === 'function') {
+                    next.add(n);
+                }
+            }
+            else {
+                q0.delete(fn);
+            }
+        }
+        this.#running.delete(fn);
+        next.forEach(fn => this.#run(fn));
+        return true;
+    }
+}
+exports.PathReservations = PathReservations;
+//# sourceMappingURL=path-reservations.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/pax.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/pax.js
new file mode 100644
index 0000000000000..d30c0f3efbe9e
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/pax.js
@@ -0,0 +1,158 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Pax = void 0;
+const node_path_1 = require("node:path");
+const header_js_1 = require("./header.js");
+class Pax {
+    atime;
+    mtime;
+    ctime;
+    charset;
+    comment;
+    gid;
+    uid;
+    gname;
+    uname;
+    linkpath;
+    dev;
+    ino;
+    nlink;
+    path;
+    size;
+    mode;
+    global;
+    constructor(obj, global = false) {
+        this.atime = obj.atime;
+        this.charset = obj.charset;
+        this.comment = obj.comment;
+        this.ctime = obj.ctime;
+        this.dev = obj.dev;
+        this.gid = obj.gid;
+        this.global = global;
+        this.gname = obj.gname;
+        this.ino = obj.ino;
+        this.linkpath = obj.linkpath;
+        this.mtime = obj.mtime;
+        this.nlink = obj.nlink;
+        this.path = obj.path;
+        this.size = obj.size;
+        this.uid = obj.uid;
+        this.uname = obj.uname;
+    }
+    encode() {
+        const body = this.encodeBody();
+        if (body === '') {
+            return Buffer.allocUnsafe(0);
+        }
+        const bodyLen = Buffer.byteLength(body);
+        // round up to 512 bytes
+        // add 512 for header
+        const bufLen = 512 * Math.ceil(1 + bodyLen / 512);
+        const buf = Buffer.allocUnsafe(bufLen);
+        // 0-fill the header section, it might not hit every field
+        for (let i = 0; i < 512; i++) {
+            buf[i] = 0;
+        }
+        new header_js_1.Header({
+            // XXX split the path
+            // then the path should be PaxHeader + basename, but less than 99,
+            // prepend with the dirname
+            /* c8 ignore start */
+            path: ('PaxHeader/' + (0, node_path_1.basename)(this.path ?? '')).slice(0, 99),
+            /* c8 ignore stop */
+            mode: this.mode || 0o644,
+            uid: this.uid,
+            gid: this.gid,
+            size: bodyLen,
+            mtime: this.mtime,
+            type: this.global ? 'GlobalExtendedHeader' : 'ExtendedHeader',
+            linkpath: '',
+            uname: this.uname || '',
+            gname: this.gname || '',
+            devmaj: 0,
+            devmin: 0,
+            atime: this.atime,
+            ctime: this.ctime,
+        }).encode(buf);
+        buf.write(body, 512, bodyLen, 'utf8');
+        // null pad after the body
+        for (let i = bodyLen + 512; i < buf.length; i++) {
+            buf[i] = 0;
+        }
+        return buf;
+    }
+    encodeBody() {
+        return (this.encodeField('path') +
+            this.encodeField('ctime') +
+            this.encodeField('atime') +
+            this.encodeField('dev') +
+            this.encodeField('ino') +
+            this.encodeField('nlink') +
+            this.encodeField('charset') +
+            this.encodeField('comment') +
+            this.encodeField('gid') +
+            this.encodeField('gname') +
+            this.encodeField('linkpath') +
+            this.encodeField('mtime') +
+            this.encodeField('size') +
+            this.encodeField('uid') +
+            this.encodeField('uname'));
+    }
+    encodeField(field) {
+        if (this[field] === undefined) {
+            return '';
+        }
+        const r = this[field];
+        const v = r instanceof Date ? r.getTime() / 1000 : r;
+        const s = ' ' +
+            (field === 'dev' || field === 'ino' || field === 'nlink' ?
+                'SCHILY.'
+                : '') +
+            field +
+            '=' +
+            v +
+            '\n';
+        const byteLen = Buffer.byteLength(s);
+        // the digits includes the length of the digits in ascii base-10
+        // so if it's 9 characters, then adding 1 for the 9 makes it 10
+        // which makes it 11 chars.
+        let digits = Math.floor(Math.log(byteLen) / Math.log(10)) + 1;
+        if (byteLen + digits >= Math.pow(10, digits)) {
+            digits += 1;
+        }
+        const len = digits + byteLen;
+        return len + s;
+    }
+    static parse(str, ex, g = false) {
+        return new Pax(merge(parseKV(str), ex), g);
+    }
+}
+exports.Pax = Pax;
+const merge = (a, b) => b ? Object.assign({}, b, a) : a;
+const parseKV = (str) => str
+    .replace(/\n$/, '')
+    .split('\n')
+    .reduce(parseKVLine, Object.create(null));
+const parseKVLine = (set, line) => {
+    const n = parseInt(line, 10);
+    // XXX Values with \n in them will fail this.
+    // Refactor to not be a naive line-by-line parse.
+    if (n !== Buffer.byteLength(line) + 1) {
+        return set;
+    }
+    line = line.slice((n + ' ').length);
+    const kv = line.split('=');
+    const r = kv.shift();
+    if (!r) {
+        return set;
+    }
+    const k = r.replace(/^SCHILY\.(dev|ino|nlink)/, '$1');
+    const v = kv.join('=');
+    set[k] =
+        /^([A-Z]+\.)?([mac]|birth|creation)time$/.test(k) ?
+            new Date(Number(v) * 1000)
+            : /^[0-9]+$/.test(v) ? +v
+                : v;
+    return set;
+};
+//# sourceMappingURL=pax.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/read-entry.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/read-entry.js
new file mode 100644
index 0000000000000..15e2d55c938a4
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/read-entry.js
@@ -0,0 +1,140 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.ReadEntry = void 0;
+const minipass_1 = require("minipass");
+const normalize_windows_path_js_1 = require("./normalize-windows-path.js");
+class ReadEntry extends minipass_1.Minipass {
+    extended;
+    globalExtended;
+    header;
+    startBlockSize;
+    blockRemain;
+    remain;
+    type;
+    meta = false;
+    ignore = false;
+    path;
+    mode;
+    uid;
+    gid;
+    uname;
+    gname;
+    size = 0;
+    mtime;
+    atime;
+    ctime;
+    linkpath;
+    dev;
+    ino;
+    nlink;
+    invalid = false;
+    absolute;
+    unsupported = false;
+    constructor(header, ex, gex) {
+        super({});
+        // read entries always start life paused.  this is to avoid the
+        // situation where Minipass's auto-ending empty streams results
+        // in an entry ending before we're ready for it.
+        this.pause();
+        this.extended = ex;
+        this.globalExtended = gex;
+        this.header = header;
+        /* c8 ignore start */
+        this.remain = header.size ?? 0;
+        /* c8 ignore stop */
+        this.startBlockSize = 512 * Math.ceil(this.remain / 512);
+        this.blockRemain = this.startBlockSize;
+        this.type = header.type;
+        switch (this.type) {
+            case 'File':
+            case 'OldFile':
+            case 'Link':
+            case 'SymbolicLink':
+            case 'CharacterDevice':
+            case 'BlockDevice':
+            case 'Directory':
+            case 'FIFO':
+            case 'ContiguousFile':
+            case 'GNUDumpDir':
+                break;
+            case 'NextFileHasLongLinkpath':
+            case 'NextFileHasLongPath':
+            case 'OldGnuLongPath':
+            case 'GlobalExtendedHeader':
+            case 'ExtendedHeader':
+            case 'OldExtendedHeader':
+                this.meta = true;
+                break;
+            // NOTE: gnutar and bsdtar treat unrecognized types as 'File'
+            // it may be worth doing the same, but with a warning.
+            default:
+                this.ignore = true;
+        }
+        /* c8 ignore start */
+        if (!header.path) {
+            throw new Error('no path provided for tar.ReadEntry');
+        }
+        /* c8 ignore stop */
+        this.path = (0, normalize_windows_path_js_1.normalizeWindowsPath)(header.path);
+        this.mode = header.mode;
+        if (this.mode) {
+            this.mode = this.mode & 0o7777;
+        }
+        this.uid = header.uid;
+        this.gid = header.gid;
+        this.uname = header.uname;
+        this.gname = header.gname;
+        this.size = this.remain;
+        this.mtime = header.mtime;
+        this.atime = header.atime;
+        this.ctime = header.ctime;
+        /* c8 ignore start */
+        this.linkpath =
+            header.linkpath ?
+                (0, normalize_windows_path_js_1.normalizeWindowsPath)(header.linkpath)
+                : undefined;
+        /* c8 ignore stop */
+        this.uname = header.uname;
+        this.gname = header.gname;
+        if (ex) {
+            this.#slurp(ex);
+        }
+        if (gex) {
+            this.#slurp(gex, true);
+        }
+    }
+    write(data) {
+        const writeLen = data.length;
+        if (writeLen > this.blockRemain) {
+            throw new Error('writing more to entry than is appropriate');
+        }
+        const r = this.remain;
+        const br = this.blockRemain;
+        this.remain = Math.max(0, r - writeLen);
+        this.blockRemain = Math.max(0, br - writeLen);
+        if (this.ignore) {
+            return true;
+        }
+        if (r >= writeLen) {
+            return super.write(data);
+        }
+        // r < writeLen
+        return super.write(data.subarray(0, r));
+    }
+    #slurp(ex, gex = false) {
+        if (ex.path)
+            ex.path = (0, normalize_windows_path_js_1.normalizeWindowsPath)(ex.path);
+        if (ex.linkpath)
+            ex.linkpath = (0, normalize_windows_path_js_1.normalizeWindowsPath)(ex.linkpath);
+        Object.assign(this, Object.fromEntries(Object.entries(ex).filter(([k, v]) => {
+            // we slurp in everything except for the path attribute in
+            // a global extended header, because that's weird. Also, any
+            // null/undefined values are ignored.
+            return !(v === null ||
+                v === undefined ||
+                (k === 'path' && gex));
+        })));
+    }
+}
+exports.ReadEntry = ReadEntry;
+//# sourceMappingURL=read-entry.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/replace.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/replace.js
new file mode 100644
index 0000000000000..262deecd12f9f
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/replace.js
@@ -0,0 +1,231 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.replace = void 0;
+// tar -r
+const fs_minipass_1 = require("@isaacs/fs-minipass");
+const node_fs_1 = __importDefault(require("node:fs"));
+const node_path_1 = __importDefault(require("node:path"));
+const header_js_1 = require("./header.js");
+const list_js_1 = require("./list.js");
+const make_command_js_1 = require("./make-command.js");
+const options_js_1 = require("./options.js");
+const pack_js_1 = require("./pack.js");
+// starting at the head of the file, read a Header
+// If the checksum is invalid, that's our position to start writing
+// If it is, jump forward by the specified size (round up to 512)
+// and try again.
+// Write the new Pack stream starting there.
+const replaceSync = (opt, files) => {
+    const p = new pack_js_1.PackSync(opt);
+    let threw = true;
+    let fd;
+    let position;
+    try {
+        try {
+            fd = node_fs_1.default.openSync(opt.file, 'r+');
+        }
+        catch (er) {
+            if (er?.code === 'ENOENT') {
+                fd = node_fs_1.default.openSync(opt.file, 'w+');
+            }
+            else {
+                throw er;
+            }
+        }
+        const st = node_fs_1.default.fstatSync(fd);
+        const headBuf = Buffer.alloc(512);
+        POSITION: for (position = 0; position < st.size; position += 512) {
+            for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) {
+                bytes = node_fs_1.default.readSync(fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos);
+                if (position === 0 &&
+                    headBuf[0] === 0x1f &&
+                    headBuf[1] === 0x8b) {
+                    throw new Error('cannot append to compressed archives');
+                }
+                if (!bytes) {
+                    break POSITION;
+                }
+            }
+            const h = new header_js_1.Header(headBuf);
+            if (!h.cksumValid) {
+                break;
+            }
+            const entryBlockSize = 512 * Math.ceil((h.size || 0) / 512);
+            if (position + entryBlockSize + 512 > st.size) {
+                break;
+            }
+            // the 512 for the header we just parsed will be added as well
+            // also jump ahead all the blocks for the body
+            position += entryBlockSize;
+            if (opt.mtimeCache && h.mtime) {
+                opt.mtimeCache.set(String(h.path), h.mtime);
+            }
+        }
+        threw = false;
+        streamSync(opt, p, position, fd, files);
+    }
+    finally {
+        if (threw) {
+            try {
+                node_fs_1.default.closeSync(fd);
+            }
+            catch (er) { }
+        }
+    }
+};
+const streamSync = (opt, p, position, fd, files) => {
+    const stream = new fs_minipass_1.WriteStreamSync(opt.file, {
+        fd: fd,
+        start: position,
+    });
+    p.pipe(stream);
+    addFilesSync(p, files);
+};
+const replaceAsync = (opt, files) => {
+    files = Array.from(files);
+    const p = new pack_js_1.Pack(opt);
+    const getPos = (fd, size, cb_) => {
+        const cb = (er, pos) => {
+            if (er) {
+                node_fs_1.default.close(fd, _ => cb_(er));
+            }
+            else {
+                cb_(null, pos);
+            }
+        };
+        let position = 0;
+        if (size === 0) {
+            return cb(null, 0);
+        }
+        let bufPos = 0;
+        const headBuf = Buffer.alloc(512);
+        const onread = (er, bytes) => {
+            if (er || typeof bytes === 'undefined') {
+                return cb(er);
+            }
+            bufPos += bytes;
+            if (bufPos < 512 && bytes) {
+                return node_fs_1.default.read(fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos, onread);
+            }
+            if (position === 0 &&
+                headBuf[0] === 0x1f &&
+                headBuf[1] === 0x8b) {
+                return cb(new Error('cannot append to compressed archives'));
+            }
+            // truncated header
+            if (bufPos < 512) {
+                return cb(null, position);
+            }
+            const h = new header_js_1.Header(headBuf);
+            if (!h.cksumValid) {
+                return cb(null, position);
+            }
+            /* c8 ignore next */
+            const entryBlockSize = 512 * Math.ceil((h.size ?? 0) / 512);
+            if (position + entryBlockSize + 512 > size) {
+                return cb(null, position);
+            }
+            position += entryBlockSize + 512;
+            if (position >= size) {
+                return cb(null, position);
+            }
+            if (opt.mtimeCache && h.mtime) {
+                opt.mtimeCache.set(String(h.path), h.mtime);
+            }
+            bufPos = 0;
+            node_fs_1.default.read(fd, headBuf, 0, 512, position, onread);
+        };
+        node_fs_1.default.read(fd, headBuf, 0, 512, position, onread);
+    };
+    const promise = new Promise((resolve, reject) => {
+        p.on('error', reject);
+        let flag = 'r+';
+        const onopen = (er, fd) => {
+            if (er && er.code === 'ENOENT' && flag === 'r+') {
+                flag = 'w+';
+                return node_fs_1.default.open(opt.file, flag, onopen);
+            }
+            if (er || !fd) {
+                return reject(er);
+            }
+            node_fs_1.default.fstat(fd, (er, st) => {
+                if (er) {
+                    return node_fs_1.default.close(fd, () => reject(er));
+                }
+                getPos(fd, st.size, (er, position) => {
+                    if (er) {
+                        return reject(er);
+                    }
+                    const stream = new fs_minipass_1.WriteStream(opt.file, {
+                        fd: fd,
+                        start: position,
+                    });
+                    p.pipe(stream);
+                    stream.on('error', reject);
+                    stream.on('close', resolve);
+                    addFilesAsync(p, files);
+                });
+            });
+        };
+        node_fs_1.default.open(opt.file, flag, onopen);
+    });
+    return promise;
+};
+const addFilesSync = (p, files) => {
+    files.forEach(file => {
+        if (file.charAt(0) === '@') {
+            (0, list_js_1.list)({
+                file: node_path_1.default.resolve(p.cwd, file.slice(1)),
+                sync: true,
+                noResume: true,
+                onReadEntry: entry => p.add(entry),
+            });
+        }
+        else {
+            p.add(file);
+        }
+    });
+    p.end();
+};
+const addFilesAsync = async (p, files) => {
+    for (let i = 0; i < files.length; i++) {
+        const file = String(files[i]);
+        if (file.charAt(0) === '@') {
+            await (0, list_js_1.list)({
+                file: node_path_1.default.resolve(String(p.cwd), file.slice(1)),
+                noResume: true,
+                onReadEntry: entry => p.add(entry),
+            });
+        }
+        else {
+            p.add(file);
+        }
+    }
+    p.end();
+};
+exports.replace = (0, make_command_js_1.makeCommand)(replaceSync, replaceAsync, 
+/* c8 ignore start */
+() => {
+    throw new TypeError('file is required');
+}, () => {
+    throw new TypeError('file is required');
+}, 
+/* c8 ignore stop */
+(opt, entries) => {
+    if (!(0, options_js_1.isFile)(opt)) {
+        throw new TypeError('file is required');
+    }
+    if (opt.gzip ||
+        opt.brotli ||
+        opt.file.endsWith('.br') ||
+        opt.file.endsWith('.tbr')) {
+        throw new TypeError('cannot append to compressed archives');
+    }
+    if (!entries?.length) {
+        throw new TypeError('no paths specified to add/replace');
+    }
+});
+//# sourceMappingURL=replace.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/strip-absolute-path.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/strip-absolute-path.js
new file mode 100644
index 0000000000000..bb7639c35a110
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/strip-absolute-path.js
@@ -0,0 +1,29 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.stripAbsolutePath = void 0;
+// unix absolute paths are also absolute on win32, so we use this for both
+const node_path_1 = require("node:path");
+const { isAbsolute, parse } = node_path_1.win32;
+// returns [root, stripped]
+// Note that windows will think that //x/y/z/a has a "root" of //x/y, and in
+// those cases, we want to sanitize it to x/y/z/a, not z/a, so we strip /
+// explicitly if it's the first character.
+// drive-specific relative paths on Windows get their root stripped off even
+// though they are not absolute, so `c:../foo` becomes ['c:', '../foo']
+const stripAbsolutePath = (path) => {
+    let r = '';
+    let parsed = parse(path);
+    while (isAbsolute(path) || parsed.root) {
+        // windows will think that //x/y/z has a "root" of //x/y/
+        // but strip the //?/C:/ off of //?/C:/path
+        const root = path.charAt(0) === '/' && path.slice(0, 4) !== '//?/' ?
+            '/'
+            : parsed.root;
+        path = path.slice(root.length);
+        r += root;
+        parsed = parse(path);
+    }
+    return [r, path];
+};
+exports.stripAbsolutePath = stripAbsolutePath;
+//# sourceMappingURL=strip-absolute-path.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/strip-trailing-slashes.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/strip-trailing-slashes.js
new file mode 100644
index 0000000000000..6fa74ad6a4ac9
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/strip-trailing-slashes.js
@@ -0,0 +1,18 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.stripTrailingSlashes = void 0;
+// warning: extremely hot code path.
+// This has been meticulously optimized for use
+// within npm install on large package trees.
+// Do not edit without careful benchmarking.
+const stripTrailingSlashes = (str) => {
+    let i = str.length - 1;
+    let slashesStart = -1;
+    while (i > -1 && str.charAt(i) === '/') {
+        slashesStart = i;
+        i--;
+    }
+    return slashesStart === -1 ? str : str.slice(0, slashesStart);
+};
+exports.stripTrailingSlashes = stripTrailingSlashes;
+//# sourceMappingURL=strip-trailing-slashes.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/symlink-error.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/symlink-error.js
new file mode 100644
index 0000000000000..cc19ac1a2e3c6
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/symlink-error.js
@@ -0,0 +1,19 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.SymlinkError = void 0;
+class SymlinkError extends Error {
+    path;
+    symlink;
+    syscall = 'symlink';
+    code = 'TAR_SYMLINK_ERROR';
+    constructor(symlink, path) {
+        super('TAR_SYMLINK_ERROR: Cannot extract through symbolic link');
+        this.symlink = symlink;
+        this.path = path;
+    }
+    get name() {
+        return 'SymlinkError';
+    }
+}
+exports.SymlinkError = SymlinkError;
+//# sourceMappingURL=symlink-error.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/types.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/types.js
new file mode 100644
index 0000000000000..cb9b684e843b7
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/types.js
@@ -0,0 +1,50 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.code = exports.name = exports.isName = exports.isCode = void 0;
+const isCode = (c) => exports.name.has(c);
+exports.isCode = isCode;
+const isName = (c) => exports.code.has(c);
+exports.isName = isName;
+// map types from key to human-friendly name
+exports.name = new Map([
+    ['0', 'File'],
+    // same as File
+    ['', 'OldFile'],
+    ['1', 'Link'],
+    ['2', 'SymbolicLink'],
+    // Devices and FIFOs aren't fully supported
+    // they are parsed, but skipped when unpacking
+    ['3', 'CharacterDevice'],
+    ['4', 'BlockDevice'],
+    ['5', 'Directory'],
+    ['6', 'FIFO'],
+    // same as File
+    ['7', 'ContiguousFile'],
+    // pax headers
+    ['g', 'GlobalExtendedHeader'],
+    ['x', 'ExtendedHeader'],
+    // vendor-specific stuff
+    // skip
+    ['A', 'SolarisACL'],
+    // like 5, but with data, which should be skipped
+    ['D', 'GNUDumpDir'],
+    // metadata only, skip
+    ['I', 'Inode'],
+    // data = link path of next file
+    ['K', 'NextFileHasLongLinkpath'],
+    // data = path of next file
+    ['L', 'NextFileHasLongPath'],
+    // skip
+    ['M', 'ContinuationFile'],
+    // like L
+    ['N', 'OldGnuLongPath'],
+    // skip
+    ['S', 'SparseFile'],
+    // skip
+    ['V', 'TapeVolumeHeader'],
+    // like x
+    ['X', 'OldExtendedHeader'],
+]);
+// map the other direction
+exports.code = new Map(Array.from(exports.name).map(kv => [kv[1], kv[0]]));
+//# sourceMappingURL=types.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/unpack.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/unpack.js
new file mode 100644
index 0000000000000..edf8acbb18c40
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/unpack.js
@@ -0,0 +1,919 @@
+"use strict";
+// the PEND/UNPEND stuff tracks whether we're ready to emit end/close yet.
+// but the path reservations are required to avoid race conditions where
+// parallelized unpack ops may mess with one another, due to dependencies
+// (like a Link depending on its target) or destructive operations (like
+// clobbering an fs object to create one of a different type.)
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.UnpackSync = exports.Unpack = void 0;
+const fsm = __importStar(require("@isaacs/fs-minipass"));
+const node_assert_1 = __importDefault(require("node:assert"));
+const node_crypto_1 = require("node:crypto");
+const node_fs_1 = __importDefault(require("node:fs"));
+const node_path_1 = __importDefault(require("node:path"));
+const get_write_flag_js_1 = require("./get-write-flag.js");
+const mkdir_js_1 = require("./mkdir.js");
+const normalize_unicode_js_1 = require("./normalize-unicode.js");
+const normalize_windows_path_js_1 = require("./normalize-windows-path.js");
+const parse_js_1 = require("./parse.js");
+const strip_absolute_path_js_1 = require("./strip-absolute-path.js");
+const strip_trailing_slashes_js_1 = require("./strip-trailing-slashes.js");
+const wc = __importStar(require("./winchars.js"));
+const path_reservations_js_1 = require("./path-reservations.js");
+const ONENTRY = Symbol('onEntry');
+const CHECKFS = Symbol('checkFs');
+const CHECKFS2 = Symbol('checkFs2');
+const PRUNECACHE = Symbol('pruneCache');
+const ISREUSABLE = Symbol('isReusable');
+const MAKEFS = Symbol('makeFs');
+const FILE = Symbol('file');
+const DIRECTORY = Symbol('directory');
+const LINK = Symbol('link');
+const SYMLINK = Symbol('symlink');
+const HARDLINK = Symbol('hardlink');
+const UNSUPPORTED = Symbol('unsupported');
+const CHECKPATH = Symbol('checkPath');
+const MKDIR = Symbol('mkdir');
+const ONERROR = Symbol('onError');
+const PENDING = Symbol('pending');
+const PEND = Symbol('pend');
+const UNPEND = Symbol('unpend');
+const ENDED = Symbol('ended');
+const MAYBECLOSE = Symbol('maybeClose');
+const SKIP = Symbol('skip');
+const DOCHOWN = Symbol('doChown');
+const UID = Symbol('uid');
+const GID = Symbol('gid');
+const CHECKED_CWD = Symbol('checkedCwd');
+const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform;
+const isWindows = platform === 'win32';
+const DEFAULT_MAX_DEPTH = 1024;
+// Unlinks on Windows are not atomic.
+//
+// This means that if you have a file entry, followed by another
+// file entry with an identical name, and you cannot re-use the file
+// (because it's a hardlink, or because unlink:true is set, or it's
+// Windows, which does not have useful nlink values), then the unlink
+// will be committed to the disk AFTER the new file has been written
+// over the old one, deleting the new file.
+//
+// To work around this, on Windows systems, we rename the file and then
+// delete the renamed file.  It's a sloppy kludge, but frankly, I do not
+// know of a better way to do this, given windows' non-atomic unlink
+// semantics.
+//
+// See: https://github.com/npm/node-tar/issues/183
+/* c8 ignore start */
+const unlinkFile = (path, cb) => {
+    if (!isWindows) {
+        return node_fs_1.default.unlink(path, cb);
+    }
+    const name = path + '.DELETE.' + (0, node_crypto_1.randomBytes)(16).toString('hex');
+    node_fs_1.default.rename(path, name, er => {
+        if (er) {
+            return cb(er);
+        }
+        node_fs_1.default.unlink(name, cb);
+    });
+};
+/* c8 ignore stop */
+/* c8 ignore start */
+const unlinkFileSync = (path) => {
+    if (!isWindows) {
+        return node_fs_1.default.unlinkSync(path);
+    }
+    const name = path + '.DELETE.' + (0, node_crypto_1.randomBytes)(16).toString('hex');
+    node_fs_1.default.renameSync(path, name);
+    node_fs_1.default.unlinkSync(name);
+};
+/* c8 ignore stop */
+// this.gid, entry.gid, this.processUid
+const uint32 = (a, b, c) => a !== undefined && a === a >>> 0 ? a
+    : b !== undefined && b === b >>> 0 ? b
+        : c;
+// clear the cache if it's a case-insensitive unicode-squashing match.
+// we can't know if the current file system is case-sensitive or supports
+// unicode fully, so we check for similarity on the maximally compatible
+// representation.  Err on the side of pruning, since all it's doing is
+// preventing lstats, and it's not the end of the world if we get a false
+// positive.
+// Note that on windows, we always drop the entire cache whenever a
+// symbolic link is encountered, because 8.3 filenames are impossible
+// to reason about, and collisions are hazards rather than just failures.
+const cacheKeyNormalize = (path) => (0, strip_trailing_slashes_js_1.stripTrailingSlashes)((0, normalize_windows_path_js_1.normalizeWindowsPath)((0, normalize_unicode_js_1.normalizeUnicode)(path))).toLowerCase();
+// remove all cache entries matching ${abs}/**
+const pruneCache = (cache, abs) => {
+    abs = cacheKeyNormalize(abs);
+    for (const path of cache.keys()) {
+        const pnorm = cacheKeyNormalize(path);
+        if (pnorm === abs || pnorm.indexOf(abs + '/') === 0) {
+            cache.delete(path);
+        }
+    }
+};
+const dropCache = (cache) => {
+    for (const key of cache.keys()) {
+        cache.delete(key);
+    }
+};
+class Unpack extends parse_js_1.Parser {
+    [ENDED] = false;
+    [CHECKED_CWD] = false;
+    [PENDING] = 0;
+    reservations = new path_reservations_js_1.PathReservations();
+    transform;
+    writable = true;
+    readable = false;
+    dirCache;
+    uid;
+    gid;
+    setOwner;
+    preserveOwner;
+    processGid;
+    processUid;
+    maxDepth;
+    forceChown;
+    win32;
+    newer;
+    keep;
+    noMtime;
+    preservePaths;
+    unlink;
+    cwd;
+    strip;
+    processUmask;
+    umask;
+    dmode;
+    fmode;
+    chmod;
+    constructor(opt = {}) {
+        opt.ondone = () => {
+            this[ENDED] = true;
+            this[MAYBECLOSE]();
+        };
+        super(opt);
+        this.transform = opt.transform;
+        this.dirCache = opt.dirCache || new Map();
+        this.chmod = !!opt.chmod;
+        if (typeof opt.uid === 'number' || typeof opt.gid === 'number') {
+            // need both or neither
+            if (typeof opt.uid !== 'number' ||
+                typeof opt.gid !== 'number') {
+                throw new TypeError('cannot set owner without number uid and gid');
+            }
+            if (opt.preserveOwner) {
+                throw new TypeError('cannot preserve owner in archive and also set owner explicitly');
+            }
+            this.uid = opt.uid;
+            this.gid = opt.gid;
+            this.setOwner = true;
+        }
+        else {
+            this.uid = undefined;
+            this.gid = undefined;
+            this.setOwner = false;
+        }
+        // default true for root
+        if (opt.preserveOwner === undefined &&
+            typeof opt.uid !== 'number') {
+            this.preserveOwner = !!(process.getuid && process.getuid() === 0);
+        }
+        else {
+            this.preserveOwner = !!opt.preserveOwner;
+        }
+        this.processUid =
+            (this.preserveOwner || this.setOwner) && process.getuid ?
+                process.getuid()
+                : undefined;
+        this.processGid =
+            (this.preserveOwner || this.setOwner) && process.getgid ?
+                process.getgid()
+                : undefined;
+        // prevent excessively deep nesting of subfolders
+        // set to `Infinity` to remove this restriction
+        this.maxDepth =
+            typeof opt.maxDepth === 'number' ?
+                opt.maxDepth
+                : DEFAULT_MAX_DEPTH;
+        // mostly just for testing, but useful in some cases.
+        // Forcibly trigger a chown on every entry, no matter what
+        this.forceChown = opt.forceChown === true;
+        // turn > this[ONENTRY](entry));
+    }
+    // a bad or damaged archive is a warning for Parser, but an error
+    // when extracting.  Mark those errors as unrecoverable, because
+    // the Unpack contract cannot be met.
+    warn(code, msg, data = {}) {
+        if (code === 'TAR_BAD_ARCHIVE' || code === 'TAR_ABORT') {
+            data.recoverable = false;
+        }
+        return super.warn(code, msg, data);
+    }
+    [MAYBECLOSE]() {
+        if (this[ENDED] && this[PENDING] === 0) {
+            this.emit('prefinish');
+            this.emit('finish');
+            this.emit('end');
+        }
+    }
+    [CHECKPATH](entry) {
+        const p = (0, normalize_windows_path_js_1.normalizeWindowsPath)(entry.path);
+        const parts = p.split('/');
+        if (this.strip) {
+            if (parts.length < this.strip) {
+                return false;
+            }
+            if (entry.type === 'Link') {
+                const linkparts = (0, normalize_windows_path_js_1.normalizeWindowsPath)(String(entry.linkpath)).split('/');
+                if (linkparts.length >= this.strip) {
+                    entry.linkpath = linkparts.slice(this.strip).join('/');
+                }
+                else {
+                    return false;
+                }
+            }
+            parts.splice(0, this.strip);
+            entry.path = parts.join('/');
+        }
+        if (isFinite(this.maxDepth) && parts.length > this.maxDepth) {
+            this.warn('TAR_ENTRY_ERROR', 'path excessively deep', {
+                entry,
+                path: p,
+                depth: parts.length,
+                maxDepth: this.maxDepth,
+            });
+            return false;
+        }
+        if (!this.preservePaths) {
+            if (parts.includes('..') ||
+                /* c8 ignore next */
+                (isWindows && /^[a-z]:\.\.$/i.test(parts[0] ?? ''))) {
+                this.warn('TAR_ENTRY_ERROR', `path contains '..'`, {
+                    entry,
+                    path: p,
+                });
+                return false;
+            }
+            // strip off the root
+            const [root, stripped] = (0, strip_absolute_path_js_1.stripAbsolutePath)(p);
+            if (root) {
+                entry.path = String(stripped);
+                this.warn('TAR_ENTRY_INFO', `stripping ${root} from absolute path`, {
+                    entry,
+                    path: p,
+                });
+            }
+        }
+        if (node_path_1.default.isAbsolute(entry.path)) {
+            entry.absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(entry.path));
+        }
+        else {
+            entry.absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(this.cwd, entry.path));
+        }
+        // if we somehow ended up with a path that escapes the cwd, and we are
+        // not in preservePaths mode, then something is fishy!  This should have
+        // been prevented above, so ignore this for coverage.
+        /* c8 ignore start - defense in depth */
+        if (!this.preservePaths &&
+            typeof entry.absolute === 'string' &&
+            entry.absolute.indexOf(this.cwd + '/') !== 0 &&
+            entry.absolute !== this.cwd) {
+            this.warn('TAR_ENTRY_ERROR', 'path escaped extraction target', {
+                entry,
+                path: (0, normalize_windows_path_js_1.normalizeWindowsPath)(entry.path),
+                resolvedPath: entry.absolute,
+                cwd: this.cwd,
+            });
+            return false;
+        }
+        /* c8 ignore stop */
+        // an archive can set properties on the extraction directory, but it
+        // may not replace the cwd with a different kind of thing entirely.
+        if (entry.absolute === this.cwd &&
+            entry.type !== 'Directory' &&
+            entry.type !== 'GNUDumpDir') {
+            return false;
+        }
+        // only encode : chars that aren't drive letter indicators
+        if (this.win32) {
+            const { root: aRoot } = node_path_1.default.win32.parse(String(entry.absolute));
+            entry.absolute =
+                aRoot + wc.encode(String(entry.absolute).slice(aRoot.length));
+            const { root: pRoot } = node_path_1.default.win32.parse(entry.path);
+            entry.path = pRoot + wc.encode(entry.path.slice(pRoot.length));
+        }
+        return true;
+    }
+    [ONENTRY](entry) {
+        if (!this[CHECKPATH](entry)) {
+            return entry.resume();
+        }
+        node_assert_1.default.equal(typeof entry.absolute, 'string');
+        switch (entry.type) {
+            case 'Directory':
+            case 'GNUDumpDir':
+                if (entry.mode) {
+                    entry.mode = entry.mode | 0o700;
+                }
+            // eslint-disable-next-line no-fallthrough
+            case 'File':
+            case 'OldFile':
+            case 'ContiguousFile':
+            case 'Link':
+            case 'SymbolicLink':
+                return this[CHECKFS](entry);
+            case 'CharacterDevice':
+            case 'BlockDevice':
+            case 'FIFO':
+            default:
+                return this[UNSUPPORTED](entry);
+        }
+    }
+    [ONERROR](er, entry) {
+        // Cwd has to exist, or else nothing works. That's serious.
+        // Other errors are warnings, which raise the error in strict
+        // mode, but otherwise continue on.
+        if (er.name === 'CwdError') {
+            this.emit('error', er);
+        }
+        else {
+            this.warn('TAR_ENTRY_ERROR', er, { entry });
+            this[UNPEND]();
+            entry.resume();
+        }
+    }
+    [MKDIR](dir, mode, cb) {
+        (0, mkdir_js_1.mkdir)((0, normalize_windows_path_js_1.normalizeWindowsPath)(dir), {
+            uid: this.uid,
+            gid: this.gid,
+            processUid: this.processUid,
+            processGid: this.processGid,
+            umask: this.processUmask,
+            preserve: this.preservePaths,
+            unlink: this.unlink,
+            cache: this.dirCache,
+            cwd: this.cwd,
+            mode: mode,
+        }, cb);
+    }
+    [DOCHOWN](entry) {
+        // in preserve owner mode, chown if the entry doesn't match process
+        // in set owner mode, chown if setting doesn't match process
+        return (this.forceChown ||
+            (this.preserveOwner &&
+                ((typeof entry.uid === 'number' &&
+                    entry.uid !== this.processUid) ||
+                    (typeof entry.gid === 'number' &&
+                        entry.gid !== this.processGid))) ||
+            (typeof this.uid === 'number' &&
+                this.uid !== this.processUid) ||
+            (typeof this.gid === 'number' && this.gid !== this.processGid));
+    }
+    [UID](entry) {
+        return uint32(this.uid, entry.uid, this.processUid);
+    }
+    [GID](entry) {
+        return uint32(this.gid, entry.gid, this.processGid);
+    }
+    [FILE](entry, fullyDone) {
+        const mode = typeof entry.mode === 'number' ?
+            entry.mode & 0o7777
+            : this.fmode;
+        const stream = new fsm.WriteStream(String(entry.absolute), {
+            // slight lie, but it can be numeric flags
+            flags: (0, get_write_flag_js_1.getWriteFlag)(entry.size),
+            mode: mode,
+            autoClose: false,
+        });
+        stream.on('error', (er) => {
+            if (stream.fd) {
+                node_fs_1.default.close(stream.fd, () => { });
+            }
+            // flush all the data out so that we aren't left hanging
+            // if the error wasn't actually fatal.  otherwise the parse
+            // is blocked, and we never proceed.
+            stream.write = () => true;
+            this[ONERROR](er, entry);
+            fullyDone();
+        });
+        let actions = 1;
+        const done = (er) => {
+            if (er) {
+                /* c8 ignore start - we should always have a fd by now */
+                if (stream.fd) {
+                    node_fs_1.default.close(stream.fd, () => { });
+                }
+                /* c8 ignore stop */
+                this[ONERROR](er, entry);
+                fullyDone();
+                return;
+            }
+            if (--actions === 0) {
+                if (stream.fd !== undefined) {
+                    node_fs_1.default.close(stream.fd, er => {
+                        if (er) {
+                            this[ONERROR](er, entry);
+                        }
+                        else {
+                            this[UNPEND]();
+                        }
+                        fullyDone();
+                    });
+                }
+            }
+        };
+        stream.on('finish', () => {
+            // if futimes fails, try utimes
+            // if utimes fails, fail with the original error
+            // same for fchown/chown
+            const abs = String(entry.absolute);
+            const fd = stream.fd;
+            if (typeof fd === 'number' && entry.mtime && !this.noMtime) {
+                actions++;
+                const atime = entry.atime || new Date();
+                const mtime = entry.mtime;
+                node_fs_1.default.futimes(fd, atime, mtime, er => er ?
+                    node_fs_1.default.utimes(abs, atime, mtime, er2 => done(er2 && er))
+                    : done());
+            }
+            if (typeof fd === 'number' && this[DOCHOWN](entry)) {
+                actions++;
+                const uid = this[UID](entry);
+                const gid = this[GID](entry);
+                if (typeof uid === 'number' && typeof gid === 'number') {
+                    node_fs_1.default.fchown(fd, uid, gid, er => er ?
+                        node_fs_1.default.chown(abs, uid, gid, er2 => done(er2 && er))
+                        : done());
+                }
+            }
+            done();
+        });
+        const tx = this.transform ? this.transform(entry) || entry : entry;
+        if (tx !== entry) {
+            tx.on('error', (er) => {
+                this[ONERROR](er, entry);
+                fullyDone();
+            });
+            entry.pipe(tx);
+        }
+        tx.pipe(stream);
+    }
+    [DIRECTORY](entry, fullyDone) {
+        const mode = typeof entry.mode === 'number' ?
+            entry.mode & 0o7777
+            : this.dmode;
+        this[MKDIR](String(entry.absolute), mode, er => {
+            if (er) {
+                this[ONERROR](er, entry);
+                fullyDone();
+                return;
+            }
+            let actions = 1;
+            const done = () => {
+                if (--actions === 0) {
+                    fullyDone();
+                    this[UNPEND]();
+                    entry.resume();
+                }
+            };
+            if (entry.mtime && !this.noMtime) {
+                actions++;
+                node_fs_1.default.utimes(String(entry.absolute), entry.atime || new Date(), entry.mtime, done);
+            }
+            if (this[DOCHOWN](entry)) {
+                actions++;
+                node_fs_1.default.chown(String(entry.absolute), Number(this[UID](entry)), Number(this[GID](entry)), done);
+            }
+            done();
+        });
+    }
+    [UNSUPPORTED](entry) {
+        entry.unsupported = true;
+        this.warn('TAR_ENTRY_UNSUPPORTED', `unsupported entry type: ${entry.type}`, { entry });
+        entry.resume();
+    }
+    [SYMLINK](entry, done) {
+        this[LINK](entry, String(entry.linkpath), 'symlink', done);
+    }
+    [HARDLINK](entry, done) {
+        const linkpath = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(this.cwd, String(entry.linkpath)));
+        this[LINK](entry, linkpath, 'link', done);
+    }
+    [PEND]() {
+        this[PENDING]++;
+    }
+    [UNPEND]() {
+        this[PENDING]--;
+        this[MAYBECLOSE]();
+    }
+    [SKIP](entry) {
+        this[UNPEND]();
+        entry.resume();
+    }
+    // Check if we can reuse an existing filesystem entry safely and
+    // overwrite it, rather than unlinking and recreating
+    // Windows doesn't report a useful nlink, so we just never reuse entries
+    [ISREUSABLE](entry, st) {
+        return (entry.type === 'File' &&
+            !this.unlink &&
+            st.isFile() &&
+            st.nlink <= 1 &&
+            !isWindows);
+    }
+    // check if a thing is there, and if so, try to clobber it
+    [CHECKFS](entry) {
+        this[PEND]();
+        const paths = [entry.path];
+        if (entry.linkpath) {
+            paths.push(entry.linkpath);
+        }
+        this.reservations.reserve(paths, done => this[CHECKFS2](entry, done));
+    }
+    [PRUNECACHE](entry) {
+        // if we are not creating a directory, and the path is in the dirCache,
+        // then that means we are about to delete the directory we created
+        // previously, and it is no longer going to be a directory, and neither
+        // is any of its children.
+        // If a symbolic link is encountered, all bets are off.  There is no
+        // reasonable way to sanitize the cache in such a way we will be able to
+        // avoid having filesystem collisions.  If this happens with a non-symlink
+        // entry, it'll just fail to unpack, but a symlink to a directory, using an
+        // 8.3 shortname or certain unicode attacks, can evade detection and lead
+        // to arbitrary writes to anywhere on the system.
+        if (entry.type === 'SymbolicLink') {
+            dropCache(this.dirCache);
+        }
+        else if (entry.type !== 'Directory') {
+            pruneCache(this.dirCache, String(entry.absolute));
+        }
+    }
+    [CHECKFS2](entry, fullyDone) {
+        this[PRUNECACHE](entry);
+        const done = (er) => {
+            this[PRUNECACHE](entry);
+            fullyDone(er);
+        };
+        const checkCwd = () => {
+            this[MKDIR](this.cwd, this.dmode, er => {
+                if (er) {
+                    this[ONERROR](er, entry);
+                    done();
+                    return;
+                }
+                this[CHECKED_CWD] = true;
+                start();
+            });
+        };
+        const start = () => {
+            if (entry.absolute !== this.cwd) {
+                const parent = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.dirname(String(entry.absolute)));
+                if (parent !== this.cwd) {
+                    return this[MKDIR](parent, this.dmode, er => {
+                        if (er) {
+                            this[ONERROR](er, entry);
+                            done();
+                            return;
+                        }
+                        afterMakeParent();
+                    });
+                }
+            }
+            afterMakeParent();
+        };
+        const afterMakeParent = () => {
+            node_fs_1.default.lstat(String(entry.absolute), (lstatEr, st) => {
+                if (st &&
+                    (this.keep ||
+                        /* c8 ignore next */
+                        (this.newer && st.mtime > (entry.mtime ?? st.mtime)))) {
+                    this[SKIP](entry);
+                    done();
+                    return;
+                }
+                if (lstatEr || this[ISREUSABLE](entry, st)) {
+                    return this[MAKEFS](null, entry, done);
+                }
+                if (st.isDirectory()) {
+                    if (entry.type === 'Directory') {
+                        const needChmod = this.chmod &&
+                            entry.mode &&
+                            (st.mode & 0o7777) !== entry.mode;
+                        const afterChmod = (er) => this[MAKEFS](er ?? null, entry, done);
+                        if (!needChmod) {
+                            return afterChmod();
+                        }
+                        return node_fs_1.default.chmod(String(entry.absolute), Number(entry.mode), afterChmod);
+                    }
+                    // Not a dir entry, have to remove it.
+                    // NB: the only way to end up with an entry that is the cwd
+                    // itself, in such a way that == does not detect, is a
+                    // tricky windows absolute path with UNC or 8.3 parts (and
+                    // preservePaths:true, or else it will have been stripped).
+                    // In that case, the user has opted out of path protections
+                    // explicitly, so if they blow away the cwd, c'est la vie.
+                    if (entry.absolute !== this.cwd) {
+                        return node_fs_1.default.rmdir(String(entry.absolute), (er) => this[MAKEFS](er ?? null, entry, done));
+                    }
+                }
+                // not a dir, and not reusable
+                // don't remove if the cwd, we want that error
+                if (entry.absolute === this.cwd) {
+                    return this[MAKEFS](null, entry, done);
+                }
+                unlinkFile(String(entry.absolute), er => this[MAKEFS](er ?? null, entry, done));
+            });
+        };
+        if (this[CHECKED_CWD]) {
+            start();
+        }
+        else {
+            checkCwd();
+        }
+    }
+    [MAKEFS](er, entry, done) {
+        if (er) {
+            this[ONERROR](er, entry);
+            done();
+            return;
+        }
+        switch (entry.type) {
+            case 'File':
+            case 'OldFile':
+            case 'ContiguousFile':
+                return this[FILE](entry, done);
+            case 'Link':
+                return this[HARDLINK](entry, done);
+            case 'SymbolicLink':
+                return this[SYMLINK](entry, done);
+            case 'Directory':
+            case 'GNUDumpDir':
+                return this[DIRECTORY](entry, done);
+        }
+    }
+    [LINK](entry, linkpath, link, done) {
+        // XXX: get the type ('symlink' or 'junction') for windows
+        node_fs_1.default[link](linkpath, String(entry.absolute), er => {
+            if (er) {
+                this[ONERROR](er, entry);
+            }
+            else {
+                this[UNPEND]();
+                entry.resume();
+            }
+            done();
+        });
+    }
+}
+exports.Unpack = Unpack;
+const callSync = (fn) => {
+    try {
+        return [null, fn()];
+    }
+    catch (er) {
+        return [er, null];
+    }
+};
+class UnpackSync extends Unpack {
+    sync = true;
+    [MAKEFS](er, entry) {
+        return super[MAKEFS](er, entry, () => { });
+    }
+    [CHECKFS](entry) {
+        this[PRUNECACHE](entry);
+        if (!this[CHECKED_CWD]) {
+            const er = this[MKDIR](this.cwd, this.dmode);
+            if (er) {
+                return this[ONERROR](er, entry);
+            }
+            this[CHECKED_CWD] = true;
+        }
+        // don't bother to make the parent if the current entry is the cwd,
+        // we've already checked it.
+        if (entry.absolute !== this.cwd) {
+            const parent = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.dirname(String(entry.absolute)));
+            if (parent !== this.cwd) {
+                const mkParent = this[MKDIR](parent, this.dmode);
+                if (mkParent) {
+                    return this[ONERROR](mkParent, entry);
+                }
+            }
+        }
+        const [lstatEr, st] = callSync(() => node_fs_1.default.lstatSync(String(entry.absolute)));
+        if (st &&
+            (this.keep ||
+                /* c8 ignore next */
+                (this.newer && st.mtime > (entry.mtime ?? st.mtime)))) {
+            return this[SKIP](entry);
+        }
+        if (lstatEr || this[ISREUSABLE](entry, st)) {
+            return this[MAKEFS](null, entry);
+        }
+        if (st.isDirectory()) {
+            if (entry.type === 'Directory') {
+                const needChmod = this.chmod &&
+                    entry.mode &&
+                    (st.mode & 0o7777) !== entry.mode;
+                const [er] = needChmod ?
+                    callSync(() => {
+                        node_fs_1.default.chmodSync(String(entry.absolute), Number(entry.mode));
+                    })
+                    : [];
+                return this[MAKEFS](er, entry);
+            }
+            // not a dir entry, have to remove it
+            const [er] = callSync(() => node_fs_1.default.rmdirSync(String(entry.absolute)));
+            this[MAKEFS](er, entry);
+        }
+        // not a dir, and not reusable.
+        // don't remove if it's the cwd, since we want that error.
+        const [er] = entry.absolute === this.cwd ?
+            []
+            : callSync(() => unlinkFileSync(String(entry.absolute)));
+        this[MAKEFS](er, entry);
+    }
+    [FILE](entry, done) {
+        const mode = typeof entry.mode === 'number' ?
+            entry.mode & 0o7777
+            : this.fmode;
+        const oner = (er) => {
+            let closeError;
+            try {
+                node_fs_1.default.closeSync(fd);
+            }
+            catch (e) {
+                closeError = e;
+            }
+            if (er || closeError) {
+                this[ONERROR](er || closeError, entry);
+            }
+            done();
+        };
+        let fd;
+        try {
+            fd = node_fs_1.default.openSync(String(entry.absolute), (0, get_write_flag_js_1.getWriteFlag)(entry.size), mode);
+        }
+        catch (er) {
+            return oner(er);
+        }
+        const tx = this.transform ? this.transform(entry) || entry : entry;
+        if (tx !== entry) {
+            tx.on('error', (er) => this[ONERROR](er, entry));
+            entry.pipe(tx);
+        }
+        tx.on('data', (chunk) => {
+            try {
+                node_fs_1.default.writeSync(fd, chunk, 0, chunk.length);
+            }
+            catch (er) {
+                oner(er);
+            }
+        });
+        tx.on('end', () => {
+            let er = null;
+            // try both, falling futimes back to utimes
+            // if either fails, handle the first error
+            if (entry.mtime && !this.noMtime) {
+                const atime = entry.atime || new Date();
+                const mtime = entry.mtime;
+                try {
+                    node_fs_1.default.futimesSync(fd, atime, mtime);
+                }
+                catch (futimeser) {
+                    try {
+                        node_fs_1.default.utimesSync(String(entry.absolute), atime, mtime);
+                    }
+                    catch (utimeser) {
+                        er = futimeser;
+                    }
+                }
+            }
+            if (this[DOCHOWN](entry)) {
+                const uid = this[UID](entry);
+                const gid = this[GID](entry);
+                try {
+                    node_fs_1.default.fchownSync(fd, Number(uid), Number(gid));
+                }
+                catch (fchowner) {
+                    try {
+                        node_fs_1.default.chownSync(String(entry.absolute), Number(uid), Number(gid));
+                    }
+                    catch (chowner) {
+                        er = er || fchowner;
+                    }
+                }
+            }
+            oner(er);
+        });
+    }
+    [DIRECTORY](entry, done) {
+        const mode = typeof entry.mode === 'number' ?
+            entry.mode & 0o7777
+            : this.dmode;
+        const er = this[MKDIR](String(entry.absolute), mode);
+        if (er) {
+            this[ONERROR](er, entry);
+            done();
+            return;
+        }
+        if (entry.mtime && !this.noMtime) {
+            try {
+                node_fs_1.default.utimesSync(String(entry.absolute), entry.atime || new Date(), entry.mtime);
+                /* c8 ignore next */
+            }
+            catch (er) { }
+        }
+        if (this[DOCHOWN](entry)) {
+            try {
+                node_fs_1.default.chownSync(String(entry.absolute), Number(this[UID](entry)), Number(this[GID](entry)));
+            }
+            catch (er) { }
+        }
+        done();
+        entry.resume();
+    }
+    [MKDIR](dir, mode) {
+        try {
+            return (0, mkdir_js_1.mkdirSync)((0, normalize_windows_path_js_1.normalizeWindowsPath)(dir), {
+                uid: this.uid,
+                gid: this.gid,
+                processUid: this.processUid,
+                processGid: this.processGid,
+                umask: this.processUmask,
+                preserve: this.preservePaths,
+                unlink: this.unlink,
+                cache: this.dirCache,
+                cwd: this.cwd,
+                mode: mode,
+            });
+        }
+        catch (er) {
+            return er;
+        }
+    }
+    [LINK](entry, linkpath, link, done) {
+        const ls = `${link}Sync`;
+        try {
+            node_fs_1.default[ls](linkpath, String(entry.absolute));
+            done();
+            entry.resume();
+        }
+        catch (er) {
+            return this[ONERROR](er, entry);
+        }
+    }
+}
+exports.UnpackSync = UnpackSync;
+//# sourceMappingURL=unpack.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/update.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/update.js
new file mode 100644
index 0000000000000..7687896f4bfee
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/update.js
@@ -0,0 +1,33 @@
+"use strict";
+// tar -u
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.update = void 0;
+const make_command_js_1 = require("./make-command.js");
+const replace_js_1 = require("./replace.js");
+// just call tar.r with the filter and mtimeCache
+exports.update = (0, make_command_js_1.makeCommand)(replace_js_1.replace.syncFile, replace_js_1.replace.asyncFile, replace_js_1.replace.syncNoFile, replace_js_1.replace.asyncNoFile, (opt, entries = []) => {
+    replace_js_1.replace.validate?.(opt, entries);
+    mtimeFilter(opt);
+});
+const mtimeFilter = (opt) => {
+    const filter = opt.filter;
+    if (!opt.mtimeCache) {
+        opt.mtimeCache = new Map();
+    }
+    opt.filter =
+        filter ?
+            (path, stat) => filter(path, stat) &&
+                !(
+                /* c8 ignore start */
+                ((opt.mtimeCache?.get(path) ?? stat.mtime ?? 0) >
+                    (stat.mtime ?? 0))
+                /* c8 ignore stop */
+                )
+            : (path, stat) => !(
+            /* c8 ignore start */
+            ((opt.mtimeCache?.get(path) ?? stat.mtime ?? 0) >
+                (stat.mtime ?? 0))
+            /* c8 ignore stop */
+            );
+};
+//# sourceMappingURL=update.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/warn-method.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/warn-method.js
new file mode 100644
index 0000000000000..f25502776e36a
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/warn-method.js
@@ -0,0 +1,31 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.warnMethod = void 0;
+const warnMethod = (self, code, message, data = {}) => {
+    if (self.file) {
+        data.file = self.file;
+    }
+    if (self.cwd) {
+        data.cwd = self.cwd;
+    }
+    data.code =
+        (message instanceof Error &&
+            message.code) ||
+            code;
+    data.tarCode = code;
+    if (!self.strict && data.recoverable !== false) {
+        if (message instanceof Error) {
+            data = Object.assign(message, data);
+            message = message.message;
+        }
+        self.emit('warn', code, message, data);
+    }
+    else if (message instanceof Error) {
+        self.emit('error', Object.assign(message, data));
+    }
+    else {
+        self.emit('error', Object.assign(new Error(`${code}: ${message}`), data));
+    }
+};
+exports.warnMethod = warnMethod;
+//# sourceMappingURL=warn-method.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/winchars.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/winchars.js
new file mode 100644
index 0000000000000..c0a4405812929
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/winchars.js
@@ -0,0 +1,14 @@
+"use strict";
+// When writing files on Windows, translate the characters to their
+// 0xf000 higher-encoded versions.
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.decode = exports.encode = void 0;
+const raw = ['|', '<', '>', '?', ':'];
+const win = raw.map(char => String.fromCharCode(0xf000 + char.charCodeAt(0)));
+const toWin = new Map(raw.map((char, i) => [char, win[i]]));
+const toRaw = new Map(win.map((char, i) => [char, raw[i]]));
+const encode = (s) => raw.reduce((s, c) => s.split(c).join(toWin.get(c)), s);
+exports.encode = encode;
+const decode = (s) => win.reduce((s, c) => s.split(c).join(toRaw.get(c)), s);
+exports.decode = decode;
+//# sourceMappingURL=winchars.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/write-entry.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/write-entry.js
new file mode 100644
index 0000000000000..45b7efeb79502
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/write-entry.js
@@ -0,0 +1,689 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+    if (mod && mod.__esModule) return mod;
+    var result = {};
+    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+    __setModuleDefault(result, mod);
+    return result;
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.WriteEntryTar = exports.WriteEntrySync = exports.WriteEntry = void 0;
+const fs_1 = __importDefault(require("fs"));
+const minipass_1 = require("minipass");
+const path_1 = __importDefault(require("path"));
+const header_js_1 = require("./header.js");
+const mode_fix_js_1 = require("./mode-fix.js");
+const normalize_windows_path_js_1 = require("./normalize-windows-path.js");
+const options_js_1 = require("./options.js");
+const pax_js_1 = require("./pax.js");
+const strip_absolute_path_js_1 = require("./strip-absolute-path.js");
+const strip_trailing_slashes_js_1 = require("./strip-trailing-slashes.js");
+const warn_method_js_1 = require("./warn-method.js");
+const winchars = __importStar(require("./winchars.js"));
+const prefixPath = (path, prefix) => {
+    if (!prefix) {
+        return (0, normalize_windows_path_js_1.normalizeWindowsPath)(path);
+    }
+    path = (0, normalize_windows_path_js_1.normalizeWindowsPath)(path).replace(/^\.(\/|$)/, '');
+    return (0, strip_trailing_slashes_js_1.stripTrailingSlashes)(prefix) + '/' + path;
+};
+const maxReadSize = 16 * 1024 * 1024;
+const PROCESS = Symbol('process');
+const FILE = Symbol('file');
+const DIRECTORY = Symbol('directory');
+const SYMLINK = Symbol('symlink');
+const HARDLINK = Symbol('hardlink');
+const HEADER = Symbol('header');
+const READ = Symbol('read');
+const LSTAT = Symbol('lstat');
+const ONLSTAT = Symbol('onlstat');
+const ONREAD = Symbol('onread');
+const ONREADLINK = Symbol('onreadlink');
+const OPENFILE = Symbol('openfile');
+const ONOPENFILE = Symbol('onopenfile');
+const CLOSE = Symbol('close');
+const MODE = Symbol('mode');
+const AWAITDRAIN = Symbol('awaitDrain');
+const ONDRAIN = Symbol('ondrain');
+const PREFIX = Symbol('prefix');
+class WriteEntry extends minipass_1.Minipass {
+    path;
+    portable;
+    myuid = (process.getuid && process.getuid()) || 0;
+    // until node has builtin pwnam functions, this'll have to do
+    myuser = process.env.USER || '';
+    maxReadSize;
+    linkCache;
+    statCache;
+    preservePaths;
+    cwd;
+    strict;
+    mtime;
+    noPax;
+    noMtime;
+    prefix;
+    fd;
+    blockLen = 0;
+    blockRemain = 0;
+    buf;
+    pos = 0;
+    remain = 0;
+    length = 0;
+    offset = 0;
+    win32;
+    absolute;
+    header;
+    type;
+    linkpath;
+    stat;
+    onWriteEntry;
+    #hadError = false;
+    constructor(p, opt_ = {}) {
+        const opt = (0, options_js_1.dealias)(opt_);
+        super();
+        this.path = (0, normalize_windows_path_js_1.normalizeWindowsPath)(p);
+        // suppress atime, ctime, uid, gid, uname, gname
+        this.portable = !!opt.portable;
+        this.maxReadSize = opt.maxReadSize || maxReadSize;
+        this.linkCache = opt.linkCache || new Map();
+        this.statCache = opt.statCache || new Map();
+        this.preservePaths = !!opt.preservePaths;
+        this.cwd = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.cwd || process.cwd());
+        this.strict = !!opt.strict;
+        this.noPax = !!opt.noPax;
+        this.noMtime = !!opt.noMtime;
+        this.mtime = opt.mtime;
+        this.prefix =
+            opt.prefix ? (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.prefix) : undefined;
+        this.onWriteEntry = opt.onWriteEntry;
+        if (typeof opt.onwarn === 'function') {
+            this.on('warn', opt.onwarn);
+        }
+        let pathWarn = false;
+        if (!this.preservePaths) {
+            const [root, stripped] = (0, strip_absolute_path_js_1.stripAbsolutePath)(this.path);
+            if (root && typeof stripped === 'string') {
+                this.path = stripped;
+                pathWarn = root;
+            }
+        }
+        this.win32 = !!opt.win32 || process.platform === 'win32';
+        if (this.win32) {
+            // force the \ to / normalization, since we might not *actually*
+            // be on windows, but want \ to be considered a path separator.
+            this.path = winchars.decode(this.path.replace(/\\/g, '/'));
+            p = p.replace(/\\/g, '/');
+        }
+        this.absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.absolute || path_1.default.resolve(this.cwd, p));
+        if (this.path === '') {
+            this.path = './';
+        }
+        if (pathWarn) {
+            this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, {
+                entry: this,
+                path: pathWarn + this.path,
+            });
+        }
+        const cs = this.statCache.get(this.absolute);
+        if (cs) {
+            this[ONLSTAT](cs);
+        }
+        else {
+            this[LSTAT]();
+        }
+    }
+    warn(code, message, data = {}) {
+        return (0, warn_method_js_1.warnMethod)(this, code, message, data);
+    }
+    emit(ev, ...data) {
+        if (ev === 'error') {
+            this.#hadError = true;
+        }
+        return super.emit(ev, ...data);
+    }
+    [LSTAT]() {
+        fs_1.default.lstat(this.absolute, (er, stat) => {
+            if (er) {
+                return this.emit('error', er);
+            }
+            this[ONLSTAT](stat);
+        });
+    }
+    [ONLSTAT](stat) {
+        this.statCache.set(this.absolute, stat);
+        this.stat = stat;
+        if (!stat.isFile()) {
+            stat.size = 0;
+        }
+        this.type = getType(stat);
+        this.emit('stat', stat);
+        this[PROCESS]();
+    }
+    [PROCESS]() {
+        switch (this.type) {
+            case 'File':
+                return this[FILE]();
+            case 'Directory':
+                return this[DIRECTORY]();
+            case 'SymbolicLink':
+                return this[SYMLINK]();
+            // unsupported types are ignored.
+            default:
+                return this.end();
+        }
+    }
+    [MODE](mode) {
+        return (0, mode_fix_js_1.modeFix)(mode, this.type === 'Directory', this.portable);
+    }
+    [PREFIX](path) {
+        return prefixPath(path, this.prefix);
+    }
+    [HEADER]() {
+        /* c8 ignore start */
+        if (!this.stat) {
+            throw new Error('cannot write header before stat');
+        }
+        /* c8 ignore stop */
+        if (this.type === 'Directory' && this.portable) {
+            this.noMtime = true;
+        }
+        this.onWriteEntry?.(this);
+        this.header = new header_js_1.Header({
+            path: this[PREFIX](this.path),
+            // only apply the prefix to hard links.
+            linkpath: this.type === 'Link' && this.linkpath !== undefined ?
+                this[PREFIX](this.linkpath)
+                : this.linkpath,
+            // only the permissions and setuid/setgid/sticky bitflags
+            // not the higher-order bits that specify file type
+            mode: this[MODE](this.stat.mode),
+            uid: this.portable ? undefined : this.stat.uid,
+            gid: this.portable ? undefined : this.stat.gid,
+            size: this.stat.size,
+            mtime: this.noMtime ? undefined : this.mtime || this.stat.mtime,
+            /* c8 ignore next */
+            type: this.type === 'Unsupported' ? undefined : this.type,
+            uname: this.portable ? undefined
+                : this.stat.uid === this.myuid ? this.myuser
+                    : '',
+            atime: this.portable ? undefined : this.stat.atime,
+            ctime: this.portable ? undefined : this.stat.ctime,
+        });
+        if (this.header.encode() && !this.noPax) {
+            super.write(new pax_js_1.Pax({
+                atime: this.portable ? undefined : this.header.atime,
+                ctime: this.portable ? undefined : this.header.ctime,
+                gid: this.portable ? undefined : this.header.gid,
+                mtime: this.noMtime ? undefined : (this.mtime || this.header.mtime),
+                path: this[PREFIX](this.path),
+                linkpath: this.type === 'Link' && this.linkpath !== undefined ?
+                    this[PREFIX](this.linkpath)
+                    : this.linkpath,
+                size: this.header.size,
+                uid: this.portable ? undefined : this.header.uid,
+                uname: this.portable ? undefined : this.header.uname,
+                dev: this.portable ? undefined : this.stat.dev,
+                ino: this.portable ? undefined : this.stat.ino,
+                nlink: this.portable ? undefined : this.stat.nlink,
+            }).encode());
+        }
+        const block = this.header?.block;
+        /* c8 ignore start */
+        if (!block) {
+            throw new Error('failed to encode header');
+        }
+        /* c8 ignore stop */
+        super.write(block);
+    }
+    [DIRECTORY]() {
+        /* c8 ignore start */
+        if (!this.stat) {
+            throw new Error('cannot create directory entry without stat');
+        }
+        /* c8 ignore stop */
+        if (this.path.slice(-1) !== '/') {
+            this.path += '/';
+        }
+        this.stat.size = 0;
+        this[HEADER]();
+        this.end();
+    }
+    [SYMLINK]() {
+        fs_1.default.readlink(this.absolute, (er, linkpath) => {
+            if (er) {
+                return this.emit('error', er);
+            }
+            this[ONREADLINK](linkpath);
+        });
+    }
+    [ONREADLINK](linkpath) {
+        this.linkpath = (0, normalize_windows_path_js_1.normalizeWindowsPath)(linkpath);
+        this[HEADER]();
+        this.end();
+    }
+    [HARDLINK](linkpath) {
+        /* c8 ignore start */
+        if (!this.stat) {
+            throw new Error('cannot create link entry without stat');
+        }
+        /* c8 ignore stop */
+        this.type = 'Link';
+        this.linkpath = (0, normalize_windows_path_js_1.normalizeWindowsPath)(path_1.default.relative(this.cwd, linkpath));
+        this.stat.size = 0;
+        this[HEADER]();
+        this.end();
+    }
+    [FILE]() {
+        /* c8 ignore start */
+        if (!this.stat) {
+            throw new Error('cannot create file entry without stat');
+        }
+        /* c8 ignore stop */
+        if (this.stat.nlink > 1) {
+            const linkKey = `${this.stat.dev}:${this.stat.ino}`;
+            const linkpath = this.linkCache.get(linkKey);
+            if (linkpath?.indexOf(this.cwd) === 0) {
+                return this[HARDLINK](linkpath);
+            }
+            this.linkCache.set(linkKey, this.absolute);
+        }
+        this[HEADER]();
+        if (this.stat.size === 0) {
+            return this.end();
+        }
+        this[OPENFILE]();
+    }
+    [OPENFILE]() {
+        fs_1.default.open(this.absolute, 'r', (er, fd) => {
+            if (er) {
+                return this.emit('error', er);
+            }
+            this[ONOPENFILE](fd);
+        });
+    }
+    [ONOPENFILE](fd) {
+        this.fd = fd;
+        if (this.#hadError) {
+            return this[CLOSE]();
+        }
+        /* c8 ignore start */
+        if (!this.stat) {
+            throw new Error('should stat before calling onopenfile');
+        }
+        /* c8 ignore start */
+        this.blockLen = 512 * Math.ceil(this.stat.size / 512);
+        this.blockRemain = this.blockLen;
+        const bufLen = Math.min(this.blockLen, this.maxReadSize);
+        this.buf = Buffer.allocUnsafe(bufLen);
+        this.offset = 0;
+        this.pos = 0;
+        this.remain = this.stat.size;
+        this.length = this.buf.length;
+        this[READ]();
+    }
+    [READ]() {
+        const { fd, buf, offset, length, pos } = this;
+        if (fd === undefined || buf === undefined) {
+            throw new Error('cannot read file without first opening');
+        }
+        fs_1.default.read(fd, buf, offset, length, pos, (er, bytesRead) => {
+            if (er) {
+                // ignoring the error from close(2) is a bad practice, but at
+                // this point we already have an error, don't need another one
+                return this[CLOSE](() => this.emit('error', er));
+            }
+            this[ONREAD](bytesRead);
+        });
+    }
+    /* c8 ignore start */
+    [CLOSE](cb = () => { }) {
+        /* c8 ignore stop */
+        if (this.fd !== undefined)
+            fs_1.default.close(this.fd, cb);
+    }
+    [ONREAD](bytesRead) {
+        if (bytesRead <= 0 && this.remain > 0) {
+            const er = Object.assign(new Error('encountered unexpected EOF'), {
+                path: this.absolute,
+                syscall: 'read',
+                code: 'EOF',
+            });
+            return this[CLOSE](() => this.emit('error', er));
+        }
+        if (bytesRead > this.remain) {
+            const er = Object.assign(new Error('did not encounter expected EOF'), {
+                path: this.absolute,
+                syscall: 'read',
+                code: 'EOF',
+            });
+            return this[CLOSE](() => this.emit('error', er));
+        }
+        /* c8 ignore start */
+        if (!this.buf) {
+            throw new Error('should have created buffer prior to reading');
+        }
+        /* c8 ignore stop */
+        // null out the rest of the buffer, if we could fit the block padding
+        // at the end of this loop, we've incremented bytesRead and this.remain
+        // to be incremented up to the blockRemain level, as if we had expected
+        // to get a null-padded file, and read it until the end.  then we will
+        // decrement both remain and blockRemain by bytesRead, and know that we
+        // reached the expected EOF, without any null buffer to append.
+        if (bytesRead === this.remain) {
+            for (let i = bytesRead; i < this.length && bytesRead < this.blockRemain; i++) {
+                this.buf[i + this.offset] = 0;
+                bytesRead++;
+                this.remain++;
+            }
+        }
+        const chunk = this.offset === 0 && bytesRead === this.buf.length ?
+            this.buf
+            : this.buf.subarray(this.offset, this.offset + bytesRead);
+        const flushed = this.write(chunk);
+        if (!flushed) {
+            this[AWAITDRAIN](() => this[ONDRAIN]());
+        }
+        else {
+            this[ONDRAIN]();
+        }
+    }
+    [AWAITDRAIN](cb) {
+        this.once('drain', cb);
+    }
+    write(chunk, encoding, cb) {
+        /* c8 ignore start - just junk to comply with NodeJS.WritableStream */
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = undefined;
+        }
+        if (typeof chunk === 'string') {
+            chunk = Buffer.from(chunk, typeof encoding === 'string' ? encoding : 'utf8');
+        }
+        /* c8 ignore stop */
+        if (this.blockRemain < chunk.length) {
+            const er = Object.assign(new Error('writing more data than expected'), {
+                path: this.absolute,
+            });
+            return this.emit('error', er);
+        }
+        this.remain -= chunk.length;
+        this.blockRemain -= chunk.length;
+        this.pos += chunk.length;
+        this.offset += chunk.length;
+        return super.write(chunk, null, cb);
+    }
+    [ONDRAIN]() {
+        if (!this.remain) {
+            if (this.blockRemain) {
+                super.write(Buffer.alloc(this.blockRemain));
+            }
+            return this[CLOSE](er => er ? this.emit('error', er) : this.end());
+        }
+        /* c8 ignore start */
+        if (!this.buf) {
+            throw new Error('buffer lost somehow in ONDRAIN');
+        }
+        /* c8 ignore stop */
+        if (this.offset >= this.length) {
+            // if we only have a smaller bit left to read, alloc a smaller buffer
+            // otherwise, keep it the same length it was before.
+            this.buf = Buffer.allocUnsafe(Math.min(this.blockRemain, this.buf.length));
+            this.offset = 0;
+        }
+        this.length = this.buf.length - this.offset;
+        this[READ]();
+    }
+}
+exports.WriteEntry = WriteEntry;
+class WriteEntrySync extends WriteEntry {
+    sync = true;
+    [LSTAT]() {
+        this[ONLSTAT](fs_1.default.lstatSync(this.absolute));
+    }
+    [SYMLINK]() {
+        this[ONREADLINK](fs_1.default.readlinkSync(this.absolute));
+    }
+    [OPENFILE]() {
+        this[ONOPENFILE](fs_1.default.openSync(this.absolute, 'r'));
+    }
+    [READ]() {
+        let threw = true;
+        try {
+            const { fd, buf, offset, length, pos } = this;
+            /* c8 ignore start */
+            if (fd === undefined || buf === undefined) {
+                throw new Error('fd and buf must be set in READ method');
+            }
+            /* c8 ignore stop */
+            const bytesRead = fs_1.default.readSync(fd, buf, offset, length, pos);
+            this[ONREAD](bytesRead);
+            threw = false;
+        }
+        finally {
+            // ignoring the error from close(2) is a bad practice, but at
+            // this point we already have an error, don't need another one
+            if (threw) {
+                try {
+                    this[CLOSE](() => { });
+                }
+                catch (er) { }
+            }
+        }
+    }
+    [AWAITDRAIN](cb) {
+        cb();
+    }
+    /* c8 ignore start */
+    [CLOSE](cb = () => { }) {
+        /* c8 ignore stop */
+        if (this.fd !== undefined)
+            fs_1.default.closeSync(this.fd);
+        cb();
+    }
+}
+exports.WriteEntrySync = WriteEntrySync;
+class WriteEntryTar extends minipass_1.Minipass {
+    blockLen = 0;
+    blockRemain = 0;
+    buf = 0;
+    pos = 0;
+    remain = 0;
+    length = 0;
+    preservePaths;
+    portable;
+    strict;
+    noPax;
+    noMtime;
+    readEntry;
+    type;
+    prefix;
+    path;
+    mode;
+    uid;
+    gid;
+    uname;
+    gname;
+    header;
+    mtime;
+    atime;
+    ctime;
+    linkpath;
+    size;
+    onWriteEntry;
+    warn(code, message, data = {}) {
+        return (0, warn_method_js_1.warnMethod)(this, code, message, data);
+    }
+    constructor(readEntry, opt_ = {}) {
+        const opt = (0, options_js_1.dealias)(opt_);
+        super();
+        this.preservePaths = !!opt.preservePaths;
+        this.portable = !!opt.portable;
+        this.strict = !!opt.strict;
+        this.noPax = !!opt.noPax;
+        this.noMtime = !!opt.noMtime;
+        this.onWriteEntry = opt.onWriteEntry;
+        this.readEntry = readEntry;
+        const { type } = readEntry;
+        /* c8 ignore start */
+        if (type === 'Unsupported') {
+            throw new Error('writing entry that should be ignored');
+        }
+        /* c8 ignore stop */
+        this.type = type;
+        if (this.type === 'Directory' && this.portable) {
+            this.noMtime = true;
+        }
+        this.prefix = opt.prefix;
+        this.path = (0, normalize_windows_path_js_1.normalizeWindowsPath)(readEntry.path);
+        this.mode =
+            readEntry.mode !== undefined ?
+                this[MODE](readEntry.mode)
+                : undefined;
+        this.uid = this.portable ? undefined : readEntry.uid;
+        this.gid = this.portable ? undefined : readEntry.gid;
+        this.uname = this.portable ? undefined : readEntry.uname;
+        this.gname = this.portable ? undefined : readEntry.gname;
+        this.size = readEntry.size;
+        this.mtime =
+            this.noMtime ? undefined : opt.mtime || readEntry.mtime;
+        this.atime = this.portable ? undefined : readEntry.atime;
+        this.ctime = this.portable ? undefined : readEntry.ctime;
+        this.linkpath =
+            readEntry.linkpath !== undefined ?
+                (0, normalize_windows_path_js_1.normalizeWindowsPath)(readEntry.linkpath)
+                : undefined;
+        if (typeof opt.onwarn === 'function') {
+            this.on('warn', opt.onwarn);
+        }
+        let pathWarn = false;
+        if (!this.preservePaths) {
+            const [root, stripped] = (0, strip_absolute_path_js_1.stripAbsolutePath)(this.path);
+            if (root && typeof stripped === 'string') {
+                this.path = stripped;
+                pathWarn = root;
+            }
+        }
+        this.remain = readEntry.size;
+        this.blockRemain = readEntry.startBlockSize;
+        this.onWriteEntry?.(this);
+        this.header = new header_js_1.Header({
+            path: this[PREFIX](this.path),
+            linkpath: this.type === 'Link' && this.linkpath !== undefined ?
+                this[PREFIX](this.linkpath)
+                : this.linkpath,
+            // only the permissions and setuid/setgid/sticky bitflags
+            // not the higher-order bits that specify file type
+            mode: this.mode,
+            uid: this.portable ? undefined : this.uid,
+            gid: this.portable ? undefined : this.gid,
+            size: this.size,
+            mtime: this.noMtime ? undefined : this.mtime,
+            type: this.type,
+            uname: this.portable ? undefined : this.uname,
+            atime: this.portable ? undefined : this.atime,
+            ctime: this.portable ? undefined : this.ctime,
+        });
+        if (pathWarn) {
+            this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, {
+                entry: this,
+                path: pathWarn + this.path,
+            });
+        }
+        if (this.header.encode() && !this.noPax) {
+            super.write(new pax_js_1.Pax({
+                atime: this.portable ? undefined : this.atime,
+                ctime: this.portable ? undefined : this.ctime,
+                gid: this.portable ? undefined : this.gid,
+                mtime: this.noMtime ? undefined : this.mtime,
+                path: this[PREFIX](this.path),
+                linkpath: this.type === 'Link' && this.linkpath !== undefined ?
+                    this[PREFIX](this.linkpath)
+                    : this.linkpath,
+                size: this.size,
+                uid: this.portable ? undefined : this.uid,
+                uname: this.portable ? undefined : this.uname,
+                dev: this.portable ? undefined : this.readEntry.dev,
+                ino: this.portable ? undefined : this.readEntry.ino,
+                nlink: this.portable ? undefined : this.readEntry.nlink,
+            }).encode());
+        }
+        const b = this.header?.block;
+        /* c8 ignore start */
+        if (!b)
+            throw new Error('failed to encode header');
+        /* c8 ignore stop */
+        super.write(b);
+        readEntry.pipe(this);
+    }
+    [PREFIX](path) {
+        return prefixPath(path, this.prefix);
+    }
+    [MODE](mode) {
+        return (0, mode_fix_js_1.modeFix)(mode, this.type === 'Directory', this.portable);
+    }
+    write(chunk, encoding, cb) {
+        /* c8 ignore start - just junk to comply with NodeJS.WritableStream */
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = undefined;
+        }
+        if (typeof chunk === 'string') {
+            chunk = Buffer.from(chunk, typeof encoding === 'string' ? encoding : 'utf8');
+        }
+        /* c8 ignore stop */
+        const writeLen = chunk.length;
+        if (writeLen > this.blockRemain) {
+            throw new Error('writing more to entry than is appropriate');
+        }
+        this.blockRemain -= writeLen;
+        return super.write(chunk, cb);
+    }
+    end(chunk, encoding, cb) {
+        if (this.blockRemain) {
+            super.write(Buffer.alloc(this.blockRemain));
+        }
+        /* c8 ignore start - just junk to comply with NodeJS.WritableStream */
+        if (typeof chunk === 'function') {
+            cb = chunk;
+            encoding = undefined;
+            chunk = undefined;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = undefined;
+        }
+        if (typeof chunk === 'string') {
+            chunk = Buffer.from(chunk, encoding ?? 'utf8');
+        }
+        if (cb)
+            this.once('finish', cb);
+        chunk ? super.end(chunk, cb) : super.end(cb);
+        /* c8 ignore stop */
+        return this;
+    }
+}
+exports.WriteEntryTar = WriteEntryTar;
+const getType = (stat) => stat.isFile() ? 'File'
+    : stat.isDirectory() ? 'Directory'
+        : stat.isSymbolicLink() ? 'SymbolicLink'
+            : 'Unsupported';
+//# sourceMappingURL=write-entry.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/create.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/create.js
new file mode 100644
index 0000000000000..512a9911d70d5
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/create.js
@@ -0,0 +1,77 @@
+import { WriteStream, WriteStreamSync } from '@isaacs/fs-minipass';
+import path from 'node:path';
+import { list } from './list.js';
+import { makeCommand } from './make-command.js';
+import { Pack, PackSync } from './pack.js';
+const createFileSync = (opt, files) => {
+    const p = new PackSync(opt);
+    const stream = new WriteStreamSync(opt.file, {
+        mode: opt.mode || 0o666,
+    });
+    p.pipe(stream);
+    addFilesSync(p, files);
+};
+const createFile = (opt, files) => {
+    const p = new Pack(opt);
+    const stream = new WriteStream(opt.file, {
+        mode: opt.mode || 0o666,
+    });
+    p.pipe(stream);
+    const promise = new Promise((res, rej) => {
+        stream.on('error', rej);
+        stream.on('close', res);
+        p.on('error', rej);
+    });
+    addFilesAsync(p, files);
+    return promise;
+};
+const addFilesSync = (p, files) => {
+    files.forEach(file => {
+        if (file.charAt(0) === '@') {
+            list({
+                file: path.resolve(p.cwd, file.slice(1)),
+                sync: true,
+                noResume: true,
+                onReadEntry: entry => p.add(entry),
+            });
+        }
+        else {
+            p.add(file);
+        }
+    });
+    p.end();
+};
+const addFilesAsync = async (p, files) => {
+    for (let i = 0; i < files.length; i++) {
+        const file = String(files[i]);
+        if (file.charAt(0) === '@') {
+            await list({
+                file: path.resolve(String(p.cwd), file.slice(1)),
+                noResume: true,
+                onReadEntry: entry => {
+                    p.add(entry);
+                },
+            });
+        }
+        else {
+            p.add(file);
+        }
+    }
+    p.end();
+};
+const createSync = (opt, files) => {
+    const p = new PackSync(opt);
+    addFilesSync(p, files);
+    return p;
+};
+const createAsync = (opt, files) => {
+    const p = new Pack(opt);
+    addFilesAsync(p, files);
+    return p;
+};
+export const create = makeCommand(createFileSync, createFile, createSync, createAsync, (_opt, files) => {
+    if (!files?.length) {
+        throw new TypeError('no paths specified to add to archive');
+    }
+});
+//# sourceMappingURL=create.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/cwd-error.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/cwd-error.js
new file mode 100644
index 0000000000000..289a066b8e031
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/cwd-error.js
@@ -0,0 +1,14 @@
+export class CwdError extends Error {
+    path;
+    code;
+    syscall = 'chdir';
+    constructor(path, code) {
+        super(`${code}: Cannot cd into '${path}'`);
+        this.path = path;
+        this.code = code;
+    }
+    get name() {
+        return 'CwdError';
+    }
+}
+//# sourceMappingURL=cwd-error.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/extract.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/extract.js
new file mode 100644
index 0000000000000..2274feef26e78
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/extract.js
@@ -0,0 +1,49 @@
+// tar -x
+import * as fsm from '@isaacs/fs-minipass';
+import fs from 'node:fs';
+import { filesFilter } from './list.js';
+import { makeCommand } from './make-command.js';
+import { Unpack, UnpackSync } from './unpack.js';
+const extractFileSync = (opt) => {
+    const u = new UnpackSync(opt);
+    const file = opt.file;
+    const stat = fs.statSync(file);
+    // This trades a zero-byte read() syscall for a stat
+    // However, it will usually result in less memory allocation
+    const readSize = opt.maxReadSize || 16 * 1024 * 1024;
+    const stream = new fsm.ReadStreamSync(file, {
+        readSize: readSize,
+        size: stat.size,
+    });
+    stream.pipe(u);
+};
+const extractFile = (opt, _) => {
+    const u = new Unpack(opt);
+    const readSize = opt.maxReadSize || 16 * 1024 * 1024;
+    const file = opt.file;
+    const p = new Promise((resolve, reject) => {
+        u.on('error', reject);
+        u.on('close', resolve);
+        // This trades a zero-byte read() syscall for a stat
+        // However, it will usually result in less memory allocation
+        fs.stat(file, (er, stat) => {
+            if (er) {
+                reject(er);
+            }
+            else {
+                const stream = new fsm.ReadStream(file, {
+                    readSize: readSize,
+                    size: stat.size,
+                });
+                stream.on('error', reject);
+                stream.pipe(u);
+            }
+        });
+    });
+    return p;
+};
+export const extract = makeCommand(extractFileSync, extractFile, opt => new UnpackSync(opt), opt => new Unpack(opt), (opt, files) => {
+    if (files?.length)
+        filesFilter(opt, files);
+});
+//# sourceMappingURL=extract.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/get-write-flag.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/get-write-flag.js
new file mode 100644
index 0000000000000..2c7f3e8b28fda
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/get-write-flag.js
@@ -0,0 +1,23 @@
+// Get the appropriate flag to use for creating files
+// We use fmap on Windows platforms for files less than
+// 512kb.  This is a fairly low limit, but avoids making
+// things slower in some cases.  Since most of what this
+// library is used for is extracting tarballs of many
+// relatively small files in npm packages and the like,
+// it can be a big boost on Windows platforms.
+import fs from 'fs';
+const platform = process.env.__FAKE_PLATFORM__ || process.platform;
+const isWindows = platform === 'win32';
+/* c8 ignore start */
+const { O_CREAT, O_TRUNC, O_WRONLY } = fs.constants;
+const UV_FS_O_FILEMAP = Number(process.env.__FAKE_FS_O_FILENAME__) ||
+    fs.constants.UV_FS_O_FILEMAP ||
+    0;
+/* c8 ignore stop */
+const fMapEnabled = isWindows && !!UV_FS_O_FILEMAP;
+const fMapLimit = 512 * 1024;
+const fMapFlag = UV_FS_O_FILEMAP | O_TRUNC | O_CREAT | O_WRONLY;
+export const getWriteFlag = !fMapEnabled ?
+    () => 'w'
+    : (size) => (size < fMapLimit ? fMapFlag : 'w');
+//# sourceMappingURL=get-write-flag.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/header.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/header.js
new file mode 100644
index 0000000000000..e15192b14b16e
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/header.js
@@ -0,0 +1,279 @@
+// parse a 512-byte header block to a data object, or vice-versa
+// encode returns `true` if a pax extended header is needed, because
+// the data could not be faithfully encoded in a simple header.
+// (Also, check header.needPax to see if it needs a pax header.)
+import { posix as pathModule } from 'node:path';
+import * as large from './large-numbers.js';
+import * as types from './types.js';
+export class Header {
+    cksumValid = false;
+    needPax = false;
+    nullBlock = false;
+    block;
+    path;
+    mode;
+    uid;
+    gid;
+    size;
+    cksum;
+    #type = 'Unsupported';
+    linkpath;
+    uname;
+    gname;
+    devmaj = 0;
+    devmin = 0;
+    atime;
+    ctime;
+    mtime;
+    charset;
+    comment;
+    constructor(data, off = 0, ex, gex) {
+        if (Buffer.isBuffer(data)) {
+            this.decode(data, off || 0, ex, gex);
+        }
+        else if (data) {
+            this.#slurp(data);
+        }
+    }
+    decode(buf, off, ex, gex) {
+        if (!off) {
+            off = 0;
+        }
+        if (!buf || !(buf.length >= off + 512)) {
+            throw new Error('need 512 bytes for header');
+        }
+        this.path = decString(buf, off, 100);
+        this.mode = decNumber(buf, off + 100, 8);
+        this.uid = decNumber(buf, off + 108, 8);
+        this.gid = decNumber(buf, off + 116, 8);
+        this.size = decNumber(buf, off + 124, 12);
+        this.mtime = decDate(buf, off + 136, 12);
+        this.cksum = decNumber(buf, off + 148, 12);
+        // if we have extended or global extended headers, apply them now
+        // See https://github.com/npm/node-tar/pull/187
+        // Apply global before local, so it overrides
+        if (gex)
+            this.#slurp(gex, true);
+        if (ex)
+            this.#slurp(ex);
+        // old tar versions marked dirs as a file with a trailing /
+        const t = decString(buf, off + 156, 1);
+        if (types.isCode(t)) {
+            this.#type = t || '0';
+        }
+        if (this.#type === '0' && this.path.slice(-1) === '/') {
+            this.#type = '5';
+        }
+        // tar implementations sometimes incorrectly put the stat(dir).size
+        // as the size in the tarball, even though Directory entries are
+        // not able to have any body at all.  In the very rare chance that
+        // it actually DOES have a body, we weren't going to do anything with
+        // it anyway, and it'll just be a warning about an invalid header.
+        if (this.#type === '5') {
+            this.size = 0;
+        }
+        this.linkpath = decString(buf, off + 157, 100);
+        if (buf.subarray(off + 257, off + 265).toString() ===
+            'ustar\u000000') {
+            this.uname = decString(buf, off + 265, 32);
+            this.gname = decString(buf, off + 297, 32);
+            /* c8 ignore start */
+            this.devmaj = decNumber(buf, off + 329, 8) ?? 0;
+            this.devmin = decNumber(buf, off + 337, 8) ?? 0;
+            /* c8 ignore stop */
+            if (buf[off + 475] !== 0) {
+                // definitely a prefix, definitely >130 chars.
+                const prefix = decString(buf, off + 345, 155);
+                this.path = prefix + '/' + this.path;
+            }
+            else {
+                const prefix = decString(buf, off + 345, 130);
+                if (prefix) {
+                    this.path = prefix + '/' + this.path;
+                }
+                this.atime = decDate(buf, off + 476, 12);
+                this.ctime = decDate(buf, off + 488, 12);
+            }
+        }
+        let sum = 8 * 0x20;
+        for (let i = off; i < off + 148; i++) {
+            sum += buf[i];
+        }
+        for (let i = off + 156; i < off + 512; i++) {
+            sum += buf[i];
+        }
+        this.cksumValid = sum === this.cksum;
+        if (this.cksum === undefined && sum === 8 * 0x20) {
+            this.nullBlock = true;
+        }
+    }
+    #slurp(ex, gex = false) {
+        Object.assign(this, Object.fromEntries(Object.entries(ex).filter(([k, v]) => {
+            // we slurp in everything except for the path attribute in
+            // a global extended header, because that's weird. Also, any
+            // null/undefined values are ignored.
+            return !(v === null ||
+                v === undefined ||
+                (k === 'path' && gex) ||
+                (k === 'linkpath' && gex) ||
+                k === 'global');
+        })));
+    }
+    encode(buf, off = 0) {
+        if (!buf) {
+            buf = this.block = Buffer.alloc(512);
+        }
+        if (this.#type === 'Unsupported') {
+            this.#type = '0';
+        }
+        if (!(buf.length >= off + 512)) {
+            throw new Error('need 512 bytes for header');
+        }
+        const prefixSize = this.ctime || this.atime ? 130 : 155;
+        const split = splitPrefix(this.path || '', prefixSize);
+        const path = split[0];
+        const prefix = split[1];
+        this.needPax = !!split[2];
+        this.needPax = encString(buf, off, 100, path) || this.needPax;
+        this.needPax =
+            encNumber(buf, off + 100, 8, this.mode) || this.needPax;
+        this.needPax =
+            encNumber(buf, off + 108, 8, this.uid) || this.needPax;
+        this.needPax =
+            encNumber(buf, off + 116, 8, this.gid) || this.needPax;
+        this.needPax =
+            encNumber(buf, off + 124, 12, this.size) || this.needPax;
+        this.needPax =
+            encDate(buf, off + 136, 12, this.mtime) || this.needPax;
+        buf[off + 156] = this.#type.charCodeAt(0);
+        this.needPax =
+            encString(buf, off + 157, 100, this.linkpath) || this.needPax;
+        buf.write('ustar\u000000', off + 257, 8);
+        this.needPax =
+            encString(buf, off + 265, 32, this.uname) || this.needPax;
+        this.needPax =
+            encString(buf, off + 297, 32, this.gname) || this.needPax;
+        this.needPax =
+            encNumber(buf, off + 329, 8, this.devmaj) || this.needPax;
+        this.needPax =
+            encNumber(buf, off + 337, 8, this.devmin) || this.needPax;
+        this.needPax =
+            encString(buf, off + 345, prefixSize, prefix) || this.needPax;
+        if (buf[off + 475] !== 0) {
+            this.needPax =
+                encString(buf, off + 345, 155, prefix) || this.needPax;
+        }
+        else {
+            this.needPax =
+                encString(buf, off + 345, 130, prefix) || this.needPax;
+            this.needPax =
+                encDate(buf, off + 476, 12, this.atime) || this.needPax;
+            this.needPax =
+                encDate(buf, off + 488, 12, this.ctime) || this.needPax;
+        }
+        let sum = 8 * 0x20;
+        for (let i = off; i < off + 148; i++) {
+            sum += buf[i];
+        }
+        for (let i = off + 156; i < off + 512; i++) {
+            sum += buf[i];
+        }
+        this.cksum = sum;
+        encNumber(buf, off + 148, 8, this.cksum);
+        this.cksumValid = true;
+        return this.needPax;
+    }
+    get type() {
+        return (this.#type === 'Unsupported' ?
+            this.#type
+            : types.name.get(this.#type));
+    }
+    get typeKey() {
+        return this.#type;
+    }
+    set type(type) {
+        const c = String(types.code.get(type));
+        if (types.isCode(c) || c === 'Unsupported') {
+            this.#type = c;
+        }
+        else if (types.isCode(type)) {
+            this.#type = type;
+        }
+        else {
+            throw new TypeError('invalid entry type: ' + type);
+        }
+    }
+}
+const splitPrefix = (p, prefixSize) => {
+    const pathSize = 100;
+    let pp = p;
+    let prefix = '';
+    let ret = undefined;
+    const root = pathModule.parse(p).root || '.';
+    if (Buffer.byteLength(pp) < pathSize) {
+        ret = [pp, prefix, false];
+    }
+    else {
+        // first set prefix to the dir, and path to the base
+        prefix = pathModule.dirname(pp);
+        pp = pathModule.basename(pp);
+        do {
+            if (Buffer.byteLength(pp) <= pathSize &&
+                Buffer.byteLength(prefix) <= prefixSize) {
+                // both fit!
+                ret = [pp, prefix, false];
+            }
+            else if (Buffer.byteLength(pp) > pathSize &&
+                Buffer.byteLength(prefix) <= prefixSize) {
+                // prefix fits in prefix, but path doesn't fit in path
+                ret = [pp.slice(0, pathSize - 1), prefix, true];
+            }
+            else {
+                // make path take a bit from prefix
+                pp = pathModule.join(pathModule.basename(prefix), pp);
+                prefix = pathModule.dirname(prefix);
+            }
+        } while (prefix !== root && ret === undefined);
+        // at this point, found no resolution, just truncate
+        if (!ret) {
+            ret = [p.slice(0, pathSize - 1), '', true];
+        }
+    }
+    return ret;
+};
+const decString = (buf, off, size) => buf
+    .subarray(off, off + size)
+    .toString('utf8')
+    .replace(/\0.*/, '');
+const decDate = (buf, off, size) => numToDate(decNumber(buf, off, size));
+const numToDate = (num) => num === undefined ? undefined : new Date(num * 1000);
+const decNumber = (buf, off, size) => Number(buf[off]) & 0x80 ?
+    large.parse(buf.subarray(off, off + size))
+    : decSmallNumber(buf, off, size);
+const nanUndef = (value) => (isNaN(value) ? undefined : value);
+const decSmallNumber = (buf, off, size) => nanUndef(parseInt(buf
+    .subarray(off, off + size)
+    .toString('utf8')
+    .replace(/\0.*$/, '')
+    .trim(), 8));
+// the maximum encodable as a null-terminated octal, by field size
+const MAXNUM = {
+    12: 0o77777777777,
+    8: 0o7777777,
+};
+const encNumber = (buf, off, size, num) => num === undefined ? false
+    : num > MAXNUM[size] || num < 0 ?
+        (large.encode(num, buf.subarray(off, off + size)), true)
+        : (encSmallNumber(buf, off, size, num), false);
+const encSmallNumber = (buf, off, size, num) => buf.write(octalString(num, size), off, size, 'ascii');
+const octalString = (num, size) => padOctal(Math.floor(num).toString(8), size);
+const padOctal = (str, size) => (str.length === size - 1 ?
+    str
+    : new Array(size - str.length - 1).join('0') + str + ' ') + '\0';
+const encDate = (buf, off, size, date) => date === undefined ? false : (encNumber(buf, off, size, date.getTime() / 1000));
+// enough to fill the longest string we've got
+const NULLS = new Array(156).join('\0');
+// pad with nulls, return true if it's longer or non-ascii
+const encString = (buf, off, size, str) => str === undefined ? false : ((buf.write(str + NULLS, off, size, 'utf8'),
+    str.length !== Buffer.byteLength(str) || str.length > size));
+//# sourceMappingURL=header.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/index.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/index.js
new file mode 100644
index 0000000000000..1bac6415c8d73
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/index.js
@@ -0,0 +1,20 @@
+export * from './create.js';
+export { create as c } from './create.js';
+export * from './extract.js';
+export { extract as x } from './extract.js';
+export * from './header.js';
+export * from './list.js';
+export { list as t } from './list.js';
+// classes
+export * from './pack.js';
+export * from './parse.js';
+export * from './pax.js';
+export * from './read-entry.js';
+export * from './replace.js';
+export { replace as r } from './replace.js';
+export * as types from './types.js';
+export * from './unpack.js';
+export * from './update.js';
+export { update as u } from './update.js';
+export * from './write-entry.js';
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/large-numbers.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/large-numbers.js
new file mode 100644
index 0000000000000..4f2f7e5f14fc1
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/large-numbers.js
@@ -0,0 +1,94 @@
+// Tar can encode large and negative numbers using a leading byte of
+// 0xff for negative, and 0x80 for positive.
+export const encode = (num, buf) => {
+    if (!Number.isSafeInteger(num)) {
+        // The number is so large that javascript cannot represent it with integer
+        // precision.
+        throw Error('cannot encode number outside of javascript safe integer range');
+    }
+    else if (num < 0) {
+        encodeNegative(num, buf);
+    }
+    else {
+        encodePositive(num, buf);
+    }
+    return buf;
+};
+const encodePositive = (num, buf) => {
+    buf[0] = 0x80;
+    for (var i = buf.length; i > 1; i--) {
+        buf[i - 1] = num & 0xff;
+        num = Math.floor(num / 0x100);
+    }
+};
+const encodeNegative = (num, buf) => {
+    buf[0] = 0xff;
+    var flipped = false;
+    num = num * -1;
+    for (var i = buf.length; i > 1; i--) {
+        var byte = num & 0xff;
+        num = Math.floor(num / 0x100);
+        if (flipped) {
+            buf[i - 1] = onesComp(byte);
+        }
+        else if (byte === 0) {
+            buf[i - 1] = 0;
+        }
+        else {
+            flipped = true;
+            buf[i - 1] = twosComp(byte);
+        }
+    }
+};
+export const parse = (buf) => {
+    const pre = buf[0];
+    const value = pre === 0x80 ? pos(buf.subarray(1, buf.length))
+        : pre === 0xff ? twos(buf)
+            : null;
+    if (value === null) {
+        throw Error('invalid base256 encoding');
+    }
+    if (!Number.isSafeInteger(value)) {
+        // The number is so large that javascript cannot represent it with integer
+        // precision.
+        throw Error('parsed number outside of javascript safe integer range');
+    }
+    return value;
+};
+const twos = (buf) => {
+    var len = buf.length;
+    var sum = 0;
+    var flipped = false;
+    for (var i = len - 1; i > -1; i--) {
+        var byte = Number(buf[i]);
+        var f;
+        if (flipped) {
+            f = onesComp(byte);
+        }
+        else if (byte === 0) {
+            f = byte;
+        }
+        else {
+            flipped = true;
+            f = twosComp(byte);
+        }
+        if (f !== 0) {
+            sum -= f * Math.pow(256, len - i - 1);
+        }
+    }
+    return sum;
+};
+const pos = (buf) => {
+    var len = buf.length;
+    var sum = 0;
+    for (var i = len - 1; i > -1; i--) {
+        var byte = Number(buf[i]);
+        if (byte !== 0) {
+            sum += byte * Math.pow(256, len - i - 1);
+        }
+    }
+    return sum;
+};
+const onesComp = (byte) => (0xff ^ byte) & 0xff;
+const twosComp = (byte) => ((0xff ^ byte) + 1) & 0xff;
+//# sourceMappingURL=large-numbers.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/list.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/list.js
new file mode 100644
index 0000000000000..f49068400b6c9
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/list.js
@@ -0,0 +1,106 @@
+// tar -t
+import * as fsm from '@isaacs/fs-minipass';
+import fs from 'node:fs';
+import { dirname, parse } from 'path';
+import { makeCommand } from './make-command.js';
+import { Parser } from './parse.js';
+import { stripTrailingSlashes } from './strip-trailing-slashes.js';
+const onReadEntryFunction = (opt) => {
+    const onReadEntry = opt.onReadEntry;
+    opt.onReadEntry =
+        onReadEntry ?
+            e => {
+                onReadEntry(e);
+                e.resume();
+            }
+            : e => e.resume();
+};
+// construct a filter that limits the file entries listed
+// include child entries if a dir is included
+export const filesFilter = (opt, files) => {
+    const map = new Map(files.map(f => [stripTrailingSlashes(f), true]));
+    const filter = opt.filter;
+    const mapHas = (file, r = '') => {
+        const root = r || parse(file).root || '.';
+        let ret;
+        if (file === root)
+            ret = false;
+        else {
+            const m = map.get(file);
+            if (m !== undefined) {
+                ret = m;
+            }
+            else {
+                ret = mapHas(dirname(file), root);
+            }
+        }
+        map.set(file, ret);
+        return ret;
+    };
+    opt.filter =
+        filter ?
+            (file, entry) => filter(file, entry) && mapHas(stripTrailingSlashes(file))
+            : file => mapHas(stripTrailingSlashes(file));
+};
+const listFileSync = (opt) => {
+    const p = new Parser(opt);
+    const file = opt.file;
+    let fd;
+    try {
+        const stat = fs.statSync(file);
+        const readSize = opt.maxReadSize || 16 * 1024 * 1024;
+        if (stat.size < readSize) {
+            p.end(fs.readFileSync(file));
+        }
+        else {
+            let pos = 0;
+            const buf = Buffer.allocUnsafe(readSize);
+            fd = fs.openSync(file, 'r');
+            while (pos < stat.size) {
+                const bytesRead = fs.readSync(fd, buf, 0, readSize, pos);
+                pos += bytesRead;
+                p.write(buf.subarray(0, bytesRead));
+            }
+            p.end();
+        }
+    }
+    finally {
+        if (typeof fd === 'number') {
+            try {
+                fs.closeSync(fd);
+                /* c8 ignore next */
+            }
+            catch (er) { }
+        }
+    }
+};
+const listFile = (opt, _files) => {
+    const parse = new Parser(opt);
+    const readSize = opt.maxReadSize || 16 * 1024 * 1024;
+    const file = opt.file;
+    const p = new Promise((resolve, reject) => {
+        parse.on('error', reject);
+        parse.on('end', resolve);
+        fs.stat(file, (er, stat) => {
+            if (er) {
+                reject(er);
+            }
+            else {
+                const stream = new fsm.ReadStream(file, {
+                    readSize: readSize,
+                    size: stat.size,
+                });
+                stream.on('error', reject);
+                stream.pipe(parse);
+            }
+        });
+    });
+    return p;
+};
+export const list = makeCommand(listFileSync, listFile, opt => new Parser(opt), opt => new Parser(opt), (opt, files) => {
+    if (files?.length)
+        filesFilter(opt, files);
+    if (!opt.noResume)
+        onReadEntryFunction(opt);
+});
+//# sourceMappingURL=list.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/make-command.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/make-command.js
new file mode 100644
index 0000000000000..f2f737bca78fd
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/make-command.js
@@ -0,0 +1,57 @@
+import { dealias, isAsyncFile, isAsyncNoFile, isSyncFile, isSyncNoFile, } from './options.js';
+export const makeCommand = (syncFile, asyncFile, syncNoFile, asyncNoFile, validate) => {
+    return Object.assign((opt_ = [], entries, cb) => {
+        if (Array.isArray(opt_)) {
+            entries = opt_;
+            opt_ = {};
+        }
+        if (typeof entries === 'function') {
+            cb = entries;
+            entries = undefined;
+        }
+        if (!entries) {
+            entries = [];
+        }
+        else {
+            entries = Array.from(entries);
+        }
+        const opt = dealias(opt_);
+        validate?.(opt, entries);
+        if (isSyncFile(opt)) {
+            if (typeof cb === 'function') {
+                throw new TypeError('callback not supported for sync tar functions');
+            }
+            return syncFile(opt, entries);
+        }
+        else if (isAsyncFile(opt)) {
+            const p = asyncFile(opt, entries);
+            // weirdness to make TS happy
+            const c = cb ? cb : undefined;
+            return c ? p.then(() => c(), c) : p;
+        }
+        else if (isSyncNoFile(opt)) {
+            if (typeof cb === 'function') {
+                throw new TypeError('callback not supported for sync tar functions');
+            }
+            return syncNoFile(opt, entries);
+        }
+        else if (isAsyncNoFile(opt)) {
+            if (typeof cb === 'function') {
+                throw new TypeError('callback only supported with file option');
+            }
+            return asyncNoFile(opt, entries);
+            /* c8 ignore start */
+        }
+        else {
+            throw new Error('impossible options??');
+        }
+        /* c8 ignore stop */
+    }, {
+        syncFile,
+        asyncFile,
+        syncNoFile,
+        asyncNoFile,
+        validate,
+    });
+};
+//# sourceMappingURL=make-command.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/mkdir.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/mkdir.js
new file mode 100644
index 0000000000000..13498ef0082f0
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/mkdir.js
@@ -0,0 +1,201 @@
+import { chownr, chownrSync } from 'chownr';
+import fs from 'fs';
+import { mkdirp, mkdirpSync } from 'mkdirp';
+import path from 'node:path';
+import { CwdError } from './cwd-error.js';
+import { normalizeWindowsPath } from './normalize-windows-path.js';
+import { SymlinkError } from './symlink-error.js';
+const cGet = (cache, key) => cache.get(normalizeWindowsPath(key));
+const cSet = (cache, key, val) => cache.set(normalizeWindowsPath(key), val);
+const checkCwd = (dir, cb) => {
+    fs.stat(dir, (er, st) => {
+        if (er || !st.isDirectory()) {
+            er = new CwdError(dir, er?.code || 'ENOTDIR');
+        }
+        cb(er);
+    });
+};
+/**
+ * Wrapper around mkdirp for tar's needs.
+ *
+ * The main purpose is to avoid creating directories if we know that
+ * they already exist (and track which ones exist for this purpose),
+ * and prevent entries from being extracted into symlinked folders,
+ * if `preservePaths` is not set.
+ */
+export const mkdir = (dir, opt, cb) => {
+    dir = normalizeWindowsPath(dir);
+    // if there's any overlap between mask and mode,
+    // then we'll need an explicit chmod
+    /* c8 ignore next */
+    const umask = opt.umask ?? 0o22;
+    const mode = opt.mode | 0o0700;
+    const needChmod = (mode & umask) !== 0;
+    const uid = opt.uid;
+    const gid = opt.gid;
+    const doChown = typeof uid === 'number' &&
+        typeof gid === 'number' &&
+        (uid !== opt.processUid || gid !== opt.processGid);
+    const preserve = opt.preserve;
+    const unlink = opt.unlink;
+    const cache = opt.cache;
+    const cwd = normalizeWindowsPath(opt.cwd);
+    const done = (er, created) => {
+        if (er) {
+            cb(er);
+        }
+        else {
+            cSet(cache, dir, true);
+            if (created && doChown) {
+                chownr(created, uid, gid, er => done(er));
+            }
+            else if (needChmod) {
+                fs.chmod(dir, mode, cb);
+            }
+            else {
+                cb();
+            }
+        }
+    };
+    if (cache && cGet(cache, dir) === true) {
+        return done();
+    }
+    if (dir === cwd) {
+        return checkCwd(dir, done);
+    }
+    if (preserve) {
+        return mkdirp(dir, { mode }).then(made => done(null, made ?? undefined), // oh, ts
+        done);
+    }
+    const sub = normalizeWindowsPath(path.relative(cwd, dir));
+    const parts = sub.split('/');
+    mkdir_(cwd, parts, mode, cache, unlink, cwd, undefined, done);
+};
+const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => {
+    if (!parts.length) {
+        return cb(null, created);
+    }
+    const p = parts.shift();
+    const part = normalizeWindowsPath(path.resolve(base + '/' + p));
+    if (cGet(cache, part)) {
+        return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
+    }
+    fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb));
+};
+const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => (er) => {
+    if (er) {
+        fs.lstat(part, (statEr, st) => {
+            if (statEr) {
+                statEr.path =
+                    statEr.path && normalizeWindowsPath(statEr.path);
+                cb(statEr);
+            }
+            else if (st.isDirectory()) {
+                mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
+            }
+            else if (unlink) {
+                fs.unlink(part, er => {
+                    if (er) {
+                        return cb(er);
+                    }
+                    fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb));
+                });
+            }
+            else if (st.isSymbolicLink()) {
+                return cb(new SymlinkError(part, part + '/' + parts.join('/')));
+            }
+            else {
+                cb(er);
+            }
+        });
+    }
+    else {
+        created = created || part;
+        mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
+    }
+};
+const checkCwdSync = (dir) => {
+    let ok = false;
+    let code = undefined;
+    try {
+        ok = fs.statSync(dir).isDirectory();
+    }
+    catch (er) {
+        code = er?.code;
+    }
+    finally {
+        if (!ok) {
+            throw new CwdError(dir, code ?? 'ENOTDIR');
+        }
+    }
+};
+export const mkdirSync = (dir, opt) => {
+    dir = normalizeWindowsPath(dir);
+    // if there's any overlap between mask and mode,
+    // then we'll need an explicit chmod
+    /* c8 ignore next */
+    const umask = opt.umask ?? 0o22;
+    const mode = opt.mode | 0o700;
+    const needChmod = (mode & umask) !== 0;
+    const uid = opt.uid;
+    const gid = opt.gid;
+    const doChown = typeof uid === 'number' &&
+        typeof gid === 'number' &&
+        (uid !== opt.processUid || gid !== opt.processGid);
+    const preserve = opt.preserve;
+    const unlink = opt.unlink;
+    const cache = opt.cache;
+    const cwd = normalizeWindowsPath(opt.cwd);
+    const done = (created) => {
+        cSet(cache, dir, true);
+        if (created && doChown) {
+            chownrSync(created, uid, gid);
+        }
+        if (needChmod) {
+            fs.chmodSync(dir, mode);
+        }
+    };
+    if (cache && cGet(cache, dir) === true) {
+        return done();
+    }
+    if (dir === cwd) {
+        checkCwdSync(cwd);
+        return done();
+    }
+    if (preserve) {
+        return done(mkdirpSync(dir, mode) ?? undefined);
+    }
+    const sub = normalizeWindowsPath(path.relative(cwd, dir));
+    const parts = sub.split('/');
+    let created = undefined;
+    for (let p = parts.shift(), part = cwd; p && (part += '/' + p); p = parts.shift()) {
+        part = normalizeWindowsPath(path.resolve(part));
+        if (cGet(cache, part)) {
+            continue;
+        }
+        try {
+            fs.mkdirSync(part, mode);
+            created = created || part;
+            cSet(cache, part, true);
+        }
+        catch (er) {
+            const st = fs.lstatSync(part);
+            if (st.isDirectory()) {
+                cSet(cache, part, true);
+                continue;
+            }
+            else if (unlink) {
+                fs.unlinkSync(part);
+                fs.mkdirSync(part, mode);
+                created = created || part;
+                cSet(cache, part, true);
+                continue;
+            }
+            else if (st.isSymbolicLink()) {
+                return new SymlinkError(part, part + '/' + parts.join('/'));
+            }
+        }
+    }
+    return done(created);
+};
+//# sourceMappingURL=mkdir.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/mode-fix.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/mode-fix.js
new file mode 100644
index 0000000000000..5fd3bb88c1cb2
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/mode-fix.js
@@ -0,0 +1,25 @@
+export const modeFix = (mode, isDir, portable) => {
+    mode &= 0o7777;
+    // in portable mode, use the minimum reasonable umask
+    // if this system creates files with 0o664 by default
+    // (as some linux distros do), then we'll write the
+    // archive with 0o644 instead.  Also, don't ever create
+    // a file that is not readable/writable by the owner.
+    if (portable) {
+        mode = (mode | 0o600) & ~0o22;
+    }
+    // if dirs are readable, then they should be listable
+    if (isDir) {
+        if (mode & 0o400) {
+            mode |= 0o100;
+        }
+        if (mode & 0o40) {
+            mode |= 0o10;
+        }
+        if (mode & 0o4) {
+            mode |= 0o1;
+        }
+    }
+    return mode;
+};
+//# sourceMappingURL=mode-fix.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/normalize-unicode.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/normalize-unicode.js
new file mode 100644
index 0000000000000..94e5095476d6e
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/normalize-unicode.js
@@ -0,0 +1,13 @@
+// warning: extremely hot code path.
+// This has been meticulously optimized for use
+// within npm install on large package trees.
+// Do not edit without careful benchmarking.
+const normalizeCache = Object.create(null);
+const { hasOwnProperty } = Object.prototype;
+export const normalizeUnicode = (s) => {
+    if (!hasOwnProperty.call(normalizeCache, s)) {
+        normalizeCache[s] = s.normalize('NFD');
+    }
+    return normalizeCache[s];
+};
+//# sourceMappingURL=normalize-unicode.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/normalize-windows-path.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/normalize-windows-path.js
new file mode 100644
index 0000000000000..2d97d2b884e62
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/normalize-windows-path.js
@@ -0,0 +1,9 @@
+// on windows, either \ or / are valid directory separators.
+// on unix, \ is a valid character in filenames.
+// so, on windows, and only on windows, we replace all \ chars with /,
+// so that we can use / as our one and only directory separator char.
+const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform;
+export const normalizeWindowsPath = platform !== 'win32' ?
+    (p) => p
+    : (p) => p && p.replace(/\\/g, '/');
+//# sourceMappingURL=normalize-windows-path.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/options.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/options.js
new file mode 100644
index 0000000000000..a006d36c23c92
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/options.js
@@ -0,0 +1,54 @@
+// turn tar(1) style args like `C` into the more verbose things like `cwd`
+const argmap = new Map([
+    ['C', 'cwd'],
+    ['f', 'file'],
+    ['z', 'gzip'],
+    ['P', 'preservePaths'],
+    ['U', 'unlink'],
+    ['strip-components', 'strip'],
+    ['stripComponents', 'strip'],
+    ['keep-newer', 'newer'],
+    ['keepNewer', 'newer'],
+    ['keep-newer-files', 'newer'],
+    ['keepNewerFiles', 'newer'],
+    ['k', 'keep'],
+    ['keep-existing', 'keep'],
+    ['keepExisting', 'keep'],
+    ['m', 'noMtime'],
+    ['no-mtime', 'noMtime'],
+    ['p', 'preserveOwner'],
+    ['L', 'follow'],
+    ['h', 'follow'],
+    ['onentry', 'onReadEntry'],
+]);
+export const isSyncFile = (o) => !!o.sync && !!o.file;
+export const isAsyncFile = (o) => !o.sync && !!o.file;
+export const isSyncNoFile = (o) => !!o.sync && !o.file;
+export const isAsyncNoFile = (o) => !o.sync && !o.file;
+export const isSync = (o) => !!o.sync;
+export const isAsync = (o) => !o.sync;
+export const isFile = (o) => !!o.file;
+export const isNoFile = (o) => !o.file;
+const dealiasKey = (k) => {
+    const d = argmap.get(k);
+    if (d)
+        return d;
+    return k;
+};
+export const dealias = (opt = {}) => {
+    if (!opt)
+        return {};
+    const result = {};
+    for (const [key, v] of Object.entries(opt)) {
+        // TS doesn't know that aliases are going to always be the same type
+        const k = dealiasKey(key);
+        result[k] = v;
+    }
+    // affordance for deprecated noChmod -> chmod
+    if (result.chmod === undefined && result.noChmod === false) {
+        result.chmod = true;
+    }
+    delete result.noChmod;
+    return result;
+};
+//# sourceMappingURL=options.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/pack.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/pack.js
new file mode 100644
index 0000000000000..f59f32f94201f
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/pack.js
@@ -0,0 +1,445 @@
+// A readable tar stream creator
+// Technically, this is a transform stream that you write paths into,
+// and tar format comes out of.
+// The `add()` method is like `write()` but returns this,
+// and end() return `this` as well, so you can
+// do `new Pack(opt).add('files').add('dir').end().pipe(output)
+// You could also do something like:
+// streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar'))
+import fs from 'fs';
+import { WriteEntry, WriteEntrySync, WriteEntryTar, } from './write-entry.js';
+export class PackJob {
+    path;
+    absolute;
+    entry;
+    stat;
+    readdir;
+    pending = false;
+    ignore = false;
+    piped = false;
+    constructor(path, absolute) {
+        this.path = path || './';
+        this.absolute = absolute;
+    }
+}
+import { Minipass } from 'minipass';
+import * as zlib from 'minizlib';
+import { Yallist } from 'yallist';
+import { ReadEntry } from './read-entry.js';
+import { warnMethod, } from './warn-method.js';
+const EOF = Buffer.alloc(1024);
+const ONSTAT = Symbol('onStat');
+const ENDED = Symbol('ended');
+const QUEUE = Symbol('queue');
+const CURRENT = Symbol('current');
+const PROCESS = Symbol('process');
+const PROCESSING = Symbol('processing');
+const PROCESSJOB = Symbol('processJob');
+const JOBS = Symbol('jobs');
+const JOBDONE = Symbol('jobDone');
+const ADDFSENTRY = Symbol('addFSEntry');
+const ADDTARENTRY = Symbol('addTarEntry');
+const STAT = Symbol('stat');
+const READDIR = Symbol('readdir');
+const ONREADDIR = Symbol('onreaddir');
+const PIPE = Symbol('pipe');
+const ENTRY = Symbol('entry');
+const ENTRYOPT = Symbol('entryOpt');
+const WRITEENTRYCLASS = Symbol('writeEntryClass');
+const WRITE = Symbol('write');
+const ONDRAIN = Symbol('ondrain');
+import path from 'path';
+import { normalizeWindowsPath } from './normalize-windows-path.js';
+export class Pack extends Minipass {
+    opt;
+    cwd;
+    maxReadSize;
+    preservePaths;
+    strict;
+    noPax;
+    prefix;
+    linkCache;
+    statCache;
+    file;
+    portable;
+    zip;
+    readdirCache;
+    noDirRecurse;
+    follow;
+    noMtime;
+    mtime;
+    filter;
+    jobs;
+    [WRITEENTRYCLASS];
+    onWriteEntry;
+    [QUEUE];
+    [JOBS] = 0;
+    [PROCESSING] = false;
+    [ENDED] = false;
+    constructor(opt = {}) {
+        //@ts-ignore
+        super();
+        this.opt = opt;
+        this.file = opt.file || '';
+        this.cwd = opt.cwd || process.cwd();
+        this.maxReadSize = opt.maxReadSize;
+        this.preservePaths = !!opt.preservePaths;
+        this.strict = !!opt.strict;
+        this.noPax = !!opt.noPax;
+        this.prefix = normalizeWindowsPath(opt.prefix || '');
+        this.linkCache = opt.linkCache || new Map();
+        this.statCache = opt.statCache || new Map();
+        this.readdirCache = opt.readdirCache || new Map();
+        this.onWriteEntry = opt.onWriteEntry;
+        this[WRITEENTRYCLASS] = WriteEntry;
+        if (typeof opt.onwarn === 'function') {
+            this.on('warn', opt.onwarn);
+        }
+        this.portable = !!opt.portable;
+        if (opt.gzip || opt.brotli) {
+            if (opt.gzip && opt.brotli) {
+                throw new TypeError('gzip and brotli are mutually exclusive');
+            }
+            if (opt.gzip) {
+                if (typeof opt.gzip !== 'object') {
+                    opt.gzip = {};
+                }
+                if (this.portable) {
+                    opt.gzip.portable = true;
+                }
+                this.zip = new zlib.Gzip(opt.gzip);
+            }
+            if (opt.brotli) {
+                if (typeof opt.brotli !== 'object') {
+                    opt.brotli = {};
+                }
+                this.zip = new zlib.BrotliCompress(opt.brotli);
+            }
+            /* c8 ignore next */
+            if (!this.zip)
+                throw new Error('impossible');
+            const zip = this.zip;
+            zip.on('data', chunk => super.write(chunk));
+            zip.on('end', () => super.end());
+            zip.on('drain', () => this[ONDRAIN]());
+            this.on('resume', () => zip.resume());
+        }
+        else {
+            this.on('drain', this[ONDRAIN]);
+        }
+        this.noDirRecurse = !!opt.noDirRecurse;
+        this.follow = !!opt.follow;
+        this.noMtime = !!opt.noMtime;
+        if (opt.mtime)
+            this.mtime = opt.mtime;
+        this.filter =
+            typeof opt.filter === 'function' ? opt.filter : () => true;
+        this[QUEUE] = new Yallist();
+        this[JOBS] = 0;
+        this.jobs = Number(opt.jobs) || 4;
+        this[PROCESSING] = false;
+        this[ENDED] = false;
+    }
+    [WRITE](chunk) {
+        return super.write(chunk);
+    }
+    add(path) {
+        this.write(path);
+        return this;
+    }
+    end(path, encoding, cb) {
+        /* c8 ignore start */
+        if (typeof path === 'function') {
+            cb = path;
+            path = undefined;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = undefined;
+        }
+        /* c8 ignore stop */
+        if (path) {
+            this.add(path);
+        }
+        this[ENDED] = true;
+        this[PROCESS]();
+        /* c8 ignore next */
+        if (cb)
+            cb();
+        return this;
+    }
+    write(path) {
+        if (this[ENDED]) {
+            throw new Error('write after end');
+        }
+        if (path instanceof ReadEntry) {
+            this[ADDTARENTRY](path);
+        }
+        else {
+            this[ADDFSENTRY](path);
+        }
+        return this.flowing;
+    }
+    [ADDTARENTRY](p) {
+        const absolute = normalizeWindowsPath(path.resolve(this.cwd, p.path));
+        // in this case, we don't have to wait for the stat
+        if (!this.filter(p.path, p)) {
+            p.resume();
+        }
+        else {
+            const job = new PackJob(p.path, absolute);
+            job.entry = new WriteEntryTar(p, this[ENTRYOPT](job));
+            job.entry.on('end', () => this[JOBDONE](job));
+            this[JOBS] += 1;
+            this[QUEUE].push(job);
+        }
+        this[PROCESS]();
+    }
+    [ADDFSENTRY](p) {
+        const absolute = normalizeWindowsPath(path.resolve(this.cwd, p));
+        this[QUEUE].push(new PackJob(p, absolute));
+        this[PROCESS]();
+    }
+    [STAT](job) {
+        job.pending = true;
+        this[JOBS] += 1;
+        const stat = this.follow ? 'stat' : 'lstat';
+        fs[stat](job.absolute, (er, stat) => {
+            job.pending = false;
+            this[JOBS] -= 1;
+            if (er) {
+                this.emit('error', er);
+            }
+            else {
+                this[ONSTAT](job, stat);
+            }
+        });
+    }
+    [ONSTAT](job, stat) {
+        this.statCache.set(job.absolute, stat);
+        job.stat = stat;
+        // now we have the stat, we can filter it.
+        if (!this.filter(job.path, stat)) {
+            job.ignore = true;
+        }
+        this[PROCESS]();
+    }
+    [READDIR](job) {
+        job.pending = true;
+        this[JOBS] += 1;
+        fs.readdir(job.absolute, (er, entries) => {
+            job.pending = false;
+            this[JOBS] -= 1;
+            if (er) {
+                return this.emit('error', er);
+            }
+            this[ONREADDIR](job, entries);
+        });
+    }
+    [ONREADDIR](job, entries) {
+        this.readdirCache.set(job.absolute, entries);
+        job.readdir = entries;
+        this[PROCESS]();
+    }
+    [PROCESS]() {
+        if (this[PROCESSING]) {
+            return;
+        }
+        this[PROCESSING] = true;
+        for (let w = this[QUEUE].head; !!w && this[JOBS] < this.jobs; w = w.next) {
+            this[PROCESSJOB](w.value);
+            if (w.value.ignore) {
+                const p = w.next;
+                this[QUEUE].removeNode(w);
+                w.next = p;
+            }
+        }
+        this[PROCESSING] = false;
+        if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) {
+            if (this.zip) {
+                this.zip.end(EOF);
+            }
+            else {
+                super.write(EOF);
+                super.end();
+            }
+        }
+    }
+    get [CURRENT]() {
+        return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value;
+    }
+    [JOBDONE](_job) {
+        this[QUEUE].shift();
+        this[JOBS] -= 1;
+        this[PROCESS]();
+    }
+    [PROCESSJOB](job) {
+        if (job.pending) {
+            return;
+        }
+        if (job.entry) {
+            if (job === this[CURRENT] && !job.piped) {
+                this[PIPE](job);
+            }
+            return;
+        }
+        if (!job.stat) {
+            const sc = this.statCache.get(job.absolute);
+            if (sc) {
+                this[ONSTAT](job, sc);
+            }
+            else {
+                this[STAT](job);
+            }
+        }
+        if (!job.stat) {
+            return;
+        }
+        // filtered out!
+        if (job.ignore) {
+            return;
+        }
+        if (!this.noDirRecurse &&
+            job.stat.isDirectory() &&
+            !job.readdir) {
+            const rc = this.readdirCache.get(job.absolute);
+            if (rc) {
+                this[ONREADDIR](job, rc);
+            }
+            else {
+                this[READDIR](job);
+            }
+            if (!job.readdir) {
+                return;
+            }
+        }
+        // we know it doesn't have an entry, because that got checked above
+        job.entry = this[ENTRY](job);
+        if (!job.entry) {
+            job.ignore = true;
+            return;
+        }
+        if (job === this[CURRENT] && !job.piped) {
+            this[PIPE](job);
+        }
+    }
+    [ENTRYOPT](job) {
+        return {
+            onwarn: (code, msg, data) => this.warn(code, msg, data),
+            noPax: this.noPax,
+            cwd: this.cwd,
+            absolute: job.absolute,
+            preservePaths: this.preservePaths,
+            maxReadSize: this.maxReadSize,
+            strict: this.strict,
+            portable: this.portable,
+            linkCache: this.linkCache,
+            statCache: this.statCache,
+            noMtime: this.noMtime,
+            mtime: this.mtime,
+            prefix: this.prefix,
+            onWriteEntry: this.onWriteEntry,
+        };
+    }
+    [ENTRY](job) {
+        this[JOBS] += 1;
+        try {
+            const e = new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job));
+            return e
+                .on('end', () => this[JOBDONE](job))
+                .on('error', er => this.emit('error', er));
+        }
+        catch (er) {
+            this.emit('error', er);
+        }
+    }
+    [ONDRAIN]() {
+        if (this[CURRENT] && this[CURRENT].entry) {
+            this[CURRENT].entry.resume();
+        }
+    }
+    // like .pipe() but using super, because our write() is special
+    [PIPE](job) {
+        job.piped = true;
+        if (job.readdir) {
+            job.readdir.forEach(entry => {
+                const p = job.path;
+                const base = p === './' ? '' : p.replace(/\/*$/, '/');
+                this[ADDFSENTRY](base + entry);
+            });
+        }
+        const source = job.entry;
+        const zip = this.zip;
+        /* c8 ignore start */
+        if (!source)
+            throw new Error('cannot pipe without source');
+        /* c8 ignore stop */
+        if (zip) {
+            source.on('data', chunk => {
+                if (!zip.write(chunk)) {
+                    source.pause();
+                }
+            });
+        }
+        else {
+            source.on('data', chunk => {
+                if (!super.write(chunk)) {
+                    source.pause();
+                }
+            });
+        }
+    }
+    pause() {
+        if (this.zip) {
+            this.zip.pause();
+        }
+        return super.pause();
+    }
+    warn(code, message, data = {}) {
+        warnMethod(this, code, message, data);
+    }
+}
+export class PackSync extends Pack {
+    sync = true;
+    constructor(opt) {
+        super(opt);
+        this[WRITEENTRYCLASS] = WriteEntrySync;
+    }
+    // pause/resume are no-ops in sync streams.
+    pause() { }
+    resume() { }
+    [STAT](job) {
+        const stat = this.follow ? 'statSync' : 'lstatSync';
+        this[ONSTAT](job, fs[stat](job.absolute));
+    }
+    [READDIR](job) {
+        this[ONREADDIR](job, fs.readdirSync(job.absolute));
+    }
+    // gotta get it all in this tick
+    [PIPE](job) {
+        const source = job.entry;
+        const zip = this.zip;
+        if (job.readdir) {
+            job.readdir.forEach(entry => {
+                const p = job.path;
+                const base = p === './' ? '' : p.replace(/\/*$/, '/');
+                this[ADDFSENTRY](base + entry);
+            });
+        }
+        /* c8 ignore start */
+        if (!source)
+            throw new Error('Cannot pipe without source');
+        /* c8 ignore stop */
+        if (zip) {
+            source.on('data', chunk => {
+                zip.write(chunk);
+            });
+        }
+        else {
+            source.on('data', chunk => {
+                super[WRITE](chunk);
+            });
+        }
+    }
+}
+//# sourceMappingURL=pack.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/package.json b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/package.json
new file mode 100644
index 0000000000000..3dbc1ca591c05
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/parse.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/parse.js
new file mode 100644
index 0000000000000..cce430479cd0c
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/parse.js
@@ -0,0 +1,595 @@
+// this[BUFFER] is the remainder of a chunk if we're waiting for
+// the full 512 bytes of a header to come in.  We will Buffer.concat()
+// it to the next write(), which is a mem copy, but a small one.
+//
+// this[QUEUE] is a Yallist of entries that haven't been emitted
+// yet this can only get filled up if the user keeps write()ing after
+// a write() returns false, or does a write() with more than one entry
+//
+// We don't buffer chunks, we always parse them and either create an
+// entry, or push it into the active entry.  The ReadEntry class knows
+// to throw data away if .ignore=true
+//
+// Shift entry off the buffer when it emits 'end', and emit 'entry' for
+// the next one in the list.
+//
+// At any time, we're pushing body chunks into the entry at WRITEENTRY,
+// and waiting for 'end' on the entry at READENTRY
+//
+// ignored entries get .resume() called on them straight away
+import { EventEmitter as EE } from 'events';
+import { BrotliDecompress, Unzip } from 'minizlib';
+import { Yallist } from 'yallist';
+import { Header } from './header.js';
+import { Pax } from './pax.js';
+import { ReadEntry } from './read-entry.js';
+import { warnMethod, } from './warn-method.js';
+const maxMetaEntrySize = 1024 * 1024;
+const gzipHeader = Buffer.from([0x1f, 0x8b]);
+const STATE = Symbol('state');
+const WRITEENTRY = Symbol('writeEntry');
+const READENTRY = Symbol('readEntry');
+const NEXTENTRY = Symbol('nextEntry');
+const PROCESSENTRY = Symbol('processEntry');
+const EX = Symbol('extendedHeader');
+const GEX = Symbol('globalExtendedHeader');
+const META = Symbol('meta');
+const EMITMETA = Symbol('emitMeta');
+const BUFFER = Symbol('buffer');
+const QUEUE = Symbol('queue');
+const ENDED = Symbol('ended');
+const EMITTEDEND = Symbol('emittedEnd');
+const EMIT = Symbol('emit');
+const UNZIP = Symbol('unzip');
+const CONSUMECHUNK = Symbol('consumeChunk');
+const CONSUMECHUNKSUB = Symbol('consumeChunkSub');
+const CONSUMEBODY = Symbol('consumeBody');
+const CONSUMEMETA = Symbol('consumeMeta');
+const CONSUMEHEADER = Symbol('consumeHeader');
+const CONSUMING = Symbol('consuming');
+const BUFFERCONCAT = Symbol('bufferConcat');
+const MAYBEEND = Symbol('maybeEnd');
+const WRITING = Symbol('writing');
+const ABORTED = Symbol('aborted');
+const DONE = Symbol('onDone');
+const SAW_VALID_ENTRY = Symbol('sawValidEntry');
+const SAW_NULL_BLOCK = Symbol('sawNullBlock');
+const SAW_EOF = Symbol('sawEOF');
+const CLOSESTREAM = Symbol('closeStream');
+const noop = () => true;
+export class Parser extends EE {
+    file;
+    strict;
+    maxMetaEntrySize;
+    filter;
+    brotli;
+    writable = true;
+    readable = false;
+    [QUEUE] = new Yallist();
+    [BUFFER];
+    [READENTRY];
+    [WRITEENTRY];
+    [STATE] = 'begin';
+    [META] = '';
+    [EX];
+    [GEX];
+    [ENDED] = false;
+    [UNZIP];
+    [ABORTED] = false;
+    [SAW_VALID_ENTRY];
+    [SAW_NULL_BLOCK] = false;
+    [SAW_EOF] = false;
+    [WRITING] = false;
+    [CONSUMING] = false;
+    [EMITTEDEND] = false;
+    constructor(opt = {}) {
+        super();
+        this.file = opt.file || '';
+        // these BADARCHIVE errors can't be detected early. listen on DONE.
+        this.on(DONE, () => {
+            if (this[STATE] === 'begin' ||
+                this[SAW_VALID_ENTRY] === false) {
+                // either less than 1 block of data, or all entries were invalid.
+                // Either way, probably not even a tarball.
+                this.warn('TAR_BAD_ARCHIVE', 'Unrecognized archive format');
+            }
+        });
+        if (opt.ondone) {
+            this.on(DONE, opt.ondone);
+        }
+        else {
+            this.on(DONE, () => {
+                this.emit('prefinish');
+                this.emit('finish');
+                this.emit('end');
+            });
+        }
+        this.strict = !!opt.strict;
+        this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize;
+        this.filter = typeof opt.filter === 'function' ? opt.filter : noop;
+        // Unlike gzip, brotli doesn't have any magic bytes to identify it
+        // Users need to explicitly tell us they're extracting a brotli file
+        // Or we infer from the file extension
+        const isTBR = opt.file &&
+            (opt.file.endsWith('.tar.br') || opt.file.endsWith('.tbr'));
+        // if it's a tbr file it MIGHT be brotli, but we don't know until
+        // we look at it and verify it's not a valid tar file.
+        this.brotli =
+            !opt.gzip && opt.brotli !== undefined ? opt.brotli
+                : isTBR ? undefined
+                    : false;
+        // have to set this so that streams are ok piping into it
+        this.on('end', () => this[CLOSESTREAM]());
+        if (typeof opt.onwarn === 'function') {
+            this.on('warn', opt.onwarn);
+        }
+        if (typeof opt.onReadEntry === 'function') {
+            this.on('entry', opt.onReadEntry);
+        }
+    }
+    warn(code, message, data = {}) {
+        warnMethod(this, code, message, data);
+    }
+    [CONSUMEHEADER](chunk, position) {
+        if (this[SAW_VALID_ENTRY] === undefined) {
+            this[SAW_VALID_ENTRY] = false;
+        }
+        let header;
+        try {
+            header = new Header(chunk, position, this[EX], this[GEX]);
+        }
+        catch (er) {
+            return this.warn('TAR_ENTRY_INVALID', er);
+        }
+        if (header.nullBlock) {
+            if (this[SAW_NULL_BLOCK]) {
+                this[SAW_EOF] = true;
+                // ending an archive with no entries.  pointless, but legal.
+                if (this[STATE] === 'begin') {
+                    this[STATE] = 'header';
+                }
+                this[EMIT]('eof');
+            }
+            else {
+                this[SAW_NULL_BLOCK] = true;
+                this[EMIT]('nullBlock');
+            }
+        }
+        else {
+            this[SAW_NULL_BLOCK] = false;
+            if (!header.cksumValid) {
+                this.warn('TAR_ENTRY_INVALID', 'checksum failure', { header });
+            }
+            else if (!header.path) {
+                this.warn('TAR_ENTRY_INVALID', 'path is required', { header });
+            }
+            else {
+                const type = header.type;
+                if (/^(Symbolic)?Link$/.test(type) && !header.linkpath) {
+                    this.warn('TAR_ENTRY_INVALID', 'linkpath required', {
+                        header,
+                    });
+                }
+                else if (!/^(Symbolic)?Link$/.test(type) &&
+                    !/^(Global)?ExtendedHeader$/.test(type) &&
+                    header.linkpath) {
+                    this.warn('TAR_ENTRY_INVALID', 'linkpath forbidden', {
+                        header,
+                    });
+                }
+                else {
+                    const entry = (this[WRITEENTRY] = new ReadEntry(header, this[EX], this[GEX]));
+                    // we do this for meta & ignored entries as well, because they
+                    // are still valid tar, or else we wouldn't know to ignore them
+                    if (!this[SAW_VALID_ENTRY]) {
+                        if (entry.remain) {
+                            // this might be the one!
+                            const onend = () => {
+                                if (!entry.invalid) {
+                                    this[SAW_VALID_ENTRY] = true;
+                                }
+                            };
+                            entry.on('end', onend);
+                        }
+                        else {
+                            this[SAW_VALID_ENTRY] = true;
+                        }
+                    }
+                    if (entry.meta) {
+                        if (entry.size > this.maxMetaEntrySize) {
+                            entry.ignore = true;
+                            this[EMIT]('ignoredEntry', entry);
+                            this[STATE] = 'ignore';
+                            entry.resume();
+                        }
+                        else if (entry.size > 0) {
+                            this[META] = '';
+                            entry.on('data', c => (this[META] += c));
+                            this[STATE] = 'meta';
+                        }
+                    }
+                    else {
+                        this[EX] = undefined;
+                        entry.ignore =
+                            entry.ignore || !this.filter(entry.path, entry);
+                        if (entry.ignore) {
+                            // probably valid, just not something we care about
+                            this[EMIT]('ignoredEntry', entry);
+                            this[STATE] = entry.remain ? 'ignore' : 'header';
+                            entry.resume();
+                        }
+                        else {
+                            if (entry.remain) {
+                                this[STATE] = 'body';
+                            }
+                            else {
+                                this[STATE] = 'header';
+                                entry.end();
+                            }
+                            if (!this[READENTRY]) {
+                                this[QUEUE].push(entry);
+                                this[NEXTENTRY]();
+                            }
+                            else {
+                                this[QUEUE].push(entry);
+                            }
+                        }
+                    }
+                }
+            }
+        }
+    }
+    [CLOSESTREAM]() {
+        queueMicrotask(() => this.emit('close'));
+    }
+    [PROCESSENTRY](entry) {
+        let go = true;
+        if (!entry) {
+            this[READENTRY] = undefined;
+            go = false;
+        }
+        else if (Array.isArray(entry)) {
+            const [ev, ...args] = entry;
+            this.emit(ev, ...args);
+        }
+        else {
+            this[READENTRY] = entry;
+            this.emit('entry', entry);
+            if (!entry.emittedEnd) {
+                entry.on('end', () => this[NEXTENTRY]());
+                go = false;
+            }
+        }
+        return go;
+    }
+    [NEXTENTRY]() {
+        do { } while (this[PROCESSENTRY](this[QUEUE].shift()));
+        if (!this[QUEUE].length) {
+            // At this point, there's nothing in the queue, but we may have an
+            // entry which is being consumed (readEntry).
+            // If we don't, then we definitely can handle more data.
+            // If we do, and either it's flowing, or it has never had any data
+            // written to it, then it needs more.
+            // The only other possibility is that it has returned false from a
+            // write() call, so we wait for the next drain to continue.
+            const re = this[READENTRY];
+            const drainNow = !re || re.flowing || re.size === re.remain;
+            if (drainNow) {
+                if (!this[WRITING]) {
+                    this.emit('drain');
+                }
+            }
+            else {
+                re.once('drain', () => this.emit('drain'));
+            }
+        }
+    }
+    [CONSUMEBODY](chunk, position) {
+        // write up to but no  more than writeEntry.blockRemain
+        const entry = this[WRITEENTRY];
+        /* c8 ignore start */
+        if (!entry) {
+            throw new Error('attempt to consume body without entry??');
+        }
+        const br = entry.blockRemain ?? 0;
+        /* c8 ignore stop */
+        const c = br >= chunk.length && position === 0 ?
+            chunk
+            : chunk.subarray(position, position + br);
+        entry.write(c);
+        if (!entry.blockRemain) {
+            this[STATE] = 'header';
+            this[WRITEENTRY] = undefined;
+            entry.end();
+        }
+        return c.length;
+    }
+    [CONSUMEMETA](chunk, position) {
+        const entry = this[WRITEENTRY];
+        const ret = this[CONSUMEBODY](chunk, position);
+        // if we finished, then the entry is reset
+        if (!this[WRITEENTRY] && entry) {
+            this[EMITMETA](entry);
+        }
+        return ret;
+    }
+    [EMIT](ev, data, extra) {
+        if (!this[QUEUE].length && !this[READENTRY]) {
+            this.emit(ev, data, extra);
+        }
+        else {
+            this[QUEUE].push([ev, data, extra]);
+        }
+    }
+    [EMITMETA](entry) {
+        this[EMIT]('meta', this[META]);
+        switch (entry.type) {
+            case 'ExtendedHeader':
+            case 'OldExtendedHeader':
+                this[EX] = Pax.parse(this[META], this[EX], false);
+                break;
+            case 'GlobalExtendedHeader':
+                this[GEX] = Pax.parse(this[META], this[GEX], true);
+                break;
+            case 'NextFileHasLongPath':
+            case 'OldGnuLongPath': {
+                const ex = this[EX] ?? Object.create(null);
+                this[EX] = ex;
+                ex.path = this[META].replace(/\0.*/, '');
+                break;
+            }
+            case 'NextFileHasLongLinkpath': {
+                const ex = this[EX] || Object.create(null);
+                this[EX] = ex;
+                ex.linkpath = this[META].replace(/\0.*/, '');
+                break;
+            }
+            /* c8 ignore start */
+            default:
+                throw new Error('unknown meta: ' + entry.type);
+            /* c8 ignore stop */
+        }
+    }
+    abort(error) {
+        this[ABORTED] = true;
+        this.emit('abort', error);
+        // always throws, even in non-strict mode
+        this.warn('TAR_ABORT', error, { recoverable: false });
+    }
+    write(chunk, encoding, cb) {
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = undefined;
+        }
+        if (typeof chunk === 'string') {
+            chunk = Buffer.from(chunk, 
+            /* c8 ignore next */
+            typeof encoding === 'string' ? encoding : 'utf8');
+        }
+        if (this[ABORTED]) {
+            /* c8 ignore next */
+            cb?.();
+            return false;
+        }
+        // first write, might be gzipped
+        const needSniff = this[UNZIP] === undefined ||
+            (this.brotli === undefined && this[UNZIP] === false);
+        if (needSniff && chunk) {
+            if (this[BUFFER]) {
+                chunk = Buffer.concat([this[BUFFER], chunk]);
+                this[BUFFER] = undefined;
+            }
+            if (chunk.length < gzipHeader.length) {
+                this[BUFFER] = chunk;
+                /* c8 ignore next */
+                cb?.();
+                return true;
+            }
+            // look for gzip header
+            for (let i = 0; this[UNZIP] === undefined && i < gzipHeader.length; i++) {
+                if (chunk[i] !== gzipHeader[i]) {
+                    this[UNZIP] = false;
+                }
+            }
+            const maybeBrotli = this.brotli === undefined;
+            if (this[UNZIP] === false && maybeBrotli) {
+                // read the first header to see if it's a valid tar file. If so,
+                // we can safely assume that it's not actually brotli, despite the
+                // .tbr or .tar.br file extension.
+                // if we ended before getting a full chunk, yes, def brotli
+                if (chunk.length < 512) {
+                    if (this[ENDED]) {
+                        this.brotli = true;
+                    }
+                    else {
+                        this[BUFFER] = chunk;
+                        /* c8 ignore next */
+                        cb?.();
+                        return true;
+                    }
+                }
+                else {
+                    // if it's tar, it's pretty reliably not brotli, chances of
+                    // that happening are astronomical.
+                    try {
+                        new Header(chunk.subarray(0, 512));
+                        this.brotli = false;
+                    }
+                    catch (_) {
+                        this.brotli = true;
+                    }
+                }
+            }
+            if (this[UNZIP] === undefined ||
+                (this[UNZIP] === false && this.brotli)) {
+                const ended = this[ENDED];
+                this[ENDED] = false;
+                this[UNZIP] =
+                    this[UNZIP] === undefined ?
+                        new Unzip({})
+                        : new BrotliDecompress({});
+                this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk));
+                this[UNZIP].on('error', er => this.abort(er));
+                this[UNZIP].on('end', () => {
+                    this[ENDED] = true;
+                    this[CONSUMECHUNK]();
+                });
+                this[WRITING] = true;
+                const ret = !!this[UNZIP][ended ? 'end' : 'write'](chunk);
+                this[WRITING] = false;
+                cb?.();
+                return ret;
+            }
+        }
+        this[WRITING] = true;
+        if (this[UNZIP]) {
+            this[UNZIP].write(chunk);
+        }
+        else {
+            this[CONSUMECHUNK](chunk);
+        }
+        this[WRITING] = false;
+        // return false if there's a queue, or if the current entry isn't flowing
+        const ret = this[QUEUE].length ? false
+            : this[READENTRY] ? this[READENTRY].flowing
+                : true;
+        // if we have no queue, then that means a clogged READENTRY
+        if (!ret && !this[QUEUE].length) {
+            this[READENTRY]?.once('drain', () => this.emit('drain'));
+        }
+        /* c8 ignore next */
+        cb?.();
+        return ret;
+    }
+    [BUFFERCONCAT](c) {
+        if (c && !this[ABORTED]) {
+            this[BUFFER] =
+                this[BUFFER] ? Buffer.concat([this[BUFFER], c]) : c;
+        }
+    }
+    [MAYBEEND]() {
+        if (this[ENDED] &&
+            !this[EMITTEDEND] &&
+            !this[ABORTED] &&
+            !this[CONSUMING]) {
+            this[EMITTEDEND] = true;
+            const entry = this[WRITEENTRY];
+            if (entry && entry.blockRemain) {
+                // truncated, likely a damaged file
+                const have = this[BUFFER] ? this[BUFFER].length : 0;
+                this.warn('TAR_BAD_ARCHIVE', `Truncated input (needed ${entry.blockRemain} more bytes, only ${have} available)`, { entry });
+                if (this[BUFFER]) {
+                    entry.write(this[BUFFER]);
+                }
+                entry.end();
+            }
+            this[EMIT](DONE);
+        }
+    }
+    [CONSUMECHUNK](chunk) {
+        if (this[CONSUMING] && chunk) {
+            this[BUFFERCONCAT](chunk);
+        }
+        else if (!chunk && !this[BUFFER]) {
+            this[MAYBEEND]();
+        }
+        else if (chunk) {
+            this[CONSUMING] = true;
+            if (this[BUFFER]) {
+                this[BUFFERCONCAT](chunk);
+                const c = this[BUFFER];
+                this[BUFFER] = undefined;
+                this[CONSUMECHUNKSUB](c);
+            }
+            else {
+                this[CONSUMECHUNKSUB](chunk);
+            }
+            while (this[BUFFER] &&
+                this[BUFFER]?.length >= 512 &&
+                !this[ABORTED] &&
+                !this[SAW_EOF]) {
+                const c = this[BUFFER];
+                this[BUFFER] = undefined;
+                this[CONSUMECHUNKSUB](c);
+            }
+            this[CONSUMING] = false;
+        }
+        if (!this[BUFFER] || this[ENDED]) {
+            this[MAYBEEND]();
+        }
+    }
+    [CONSUMECHUNKSUB](chunk) {
+        // we know that we are in CONSUMING mode, so anything written goes into
+        // the buffer.  Advance the position and put any remainder in the buffer.
+        let position = 0;
+        const length = chunk.length;
+        while (position + 512 <= length &&
+            !this[ABORTED] &&
+            !this[SAW_EOF]) {
+            switch (this[STATE]) {
+                case 'begin':
+                case 'header':
+                    this[CONSUMEHEADER](chunk, position);
+                    position += 512;
+                    break;
+                case 'ignore':
+                case 'body':
+                    position += this[CONSUMEBODY](chunk, position);
+                    break;
+                case 'meta':
+                    position += this[CONSUMEMETA](chunk, position);
+                    break;
+                /* c8 ignore start */
+                default:
+                    throw new Error('invalid state: ' + this[STATE]);
+                /* c8 ignore stop */
+            }
+        }
+        if (position < length) {
+            if (this[BUFFER]) {
+                this[BUFFER] = Buffer.concat([
+                    chunk.subarray(position),
+                    this[BUFFER],
+                ]);
+            }
+            else {
+                this[BUFFER] = chunk.subarray(position);
+            }
+        }
+    }
+    end(chunk, encoding, cb) {
+        if (typeof chunk === 'function') {
+            cb = chunk;
+            encoding = undefined;
+            chunk = undefined;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = undefined;
+        }
+        if (typeof chunk === 'string') {
+            chunk = Buffer.from(chunk, encoding);
+        }
+        if (cb)
+            this.once('finish', cb);
+        if (!this[ABORTED]) {
+            if (this[UNZIP]) {
+                /* c8 ignore start */
+                if (chunk)
+                    this[UNZIP].write(chunk);
+                /* c8 ignore stop */
+                this[UNZIP].end();
+            }
+            else {
+                this[ENDED] = true;
+                if (this.brotli === undefined)
+                    chunk = chunk || Buffer.alloc(0);
+                if (chunk)
+                    this.write(chunk);
+                this[MAYBEEND]();
+            }
+        }
+        return this;
+    }
+}
+//# sourceMappingURL=parse.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/path-reservations.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/path-reservations.js
new file mode 100644
index 0000000000000..e63b9c91e9a80
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/path-reservations.js
@@ -0,0 +1,166 @@
+// A path exclusive reservation system
+// reserve([list, of, paths], fn)
+// When the fn is first in line for all its paths, it
+// is called with a cb that clears the reservation.
+//
+// Used by async unpack to avoid clobbering paths in use,
+// while still allowing maximal safe parallelization.
+import { join } from 'node:path';
+import { normalizeUnicode } from './normalize-unicode.js';
+import { stripTrailingSlashes } from './strip-trailing-slashes.js';
+const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform;
+const isWindows = platform === 'win32';
+// return a set of parent dirs for a given path
+// '/a/b/c/d' -> ['/', '/a', '/a/b', '/a/b/c', '/a/b/c/d']
+const getDirs = (path) => {
+    const dirs = path
+        .split('/')
+        .slice(0, -1)
+        .reduce((set, path) => {
+        const s = set[set.length - 1];
+        if (s !== undefined) {
+            path = join(s, path);
+        }
+        set.push(path || '/');
+        return set;
+    }, []);
+    return dirs;
+};
+export class PathReservations {
+    // path => [function or Set]
+    // A Set object means a directory reservation
+    // A fn is a direct reservation on that path
+    #queues = new Map();
+    // fn => {paths:[path,...], dirs:[path, ...]}
+    #reservations = new Map();
+    // functions currently running
+    #running = new Set();
+    reserve(paths, fn) {
+        paths =
+            isWindows ?
+                ['win32 parallelization disabled']
+                : paths.map(p => {
+                    // don't need normPath, because we skip this entirely for windows
+                    return stripTrailingSlashes(join(normalizeUnicode(p))).toLowerCase();
+                });
+        const dirs = new Set(paths.map(path => getDirs(path)).reduce((a, b) => a.concat(b)));
+        this.#reservations.set(fn, { dirs, paths });
+        for (const p of paths) {
+            const q = this.#queues.get(p);
+            if (!q) {
+                this.#queues.set(p, [fn]);
+            }
+            else {
+                q.push(fn);
+            }
+        }
+        for (const dir of dirs) {
+            const q = this.#queues.get(dir);
+            if (!q) {
+                this.#queues.set(dir, [new Set([fn])]);
+            }
+            else {
+                const l = q[q.length - 1];
+                if (l instanceof Set) {
+                    l.add(fn);
+                }
+                else {
+                    q.push(new Set([fn]));
+                }
+            }
+        }
+        return this.#run(fn);
+    }
+    // return the queues for each path the function cares about
+    // fn => {paths, dirs}
+    #getQueues(fn) {
+        const res = this.#reservations.get(fn);
+        /* c8 ignore start */
+        if (!res) {
+            throw new Error('function does not have any path reservations');
+        }
+        /* c8 ignore stop */
+        return {
+            paths: res.paths.map((path) => this.#queues.get(path)),
+            dirs: [...res.dirs].map(path => this.#queues.get(path)),
+        };
+    }
+    // check if fn is first in line for all its paths, and is
+    // included in the first set for all its dir queues
+    check(fn) {
+        const { paths, dirs } = this.#getQueues(fn);
+        return (paths.every(q => q && q[0] === fn) &&
+            dirs.every(q => q && q[0] instanceof Set && q[0].has(fn)));
+    }
+    // run the function if it's first in line and not already running
+    #run(fn) {
+        if (this.#running.has(fn) || !this.check(fn)) {
+            return false;
+        }
+        this.#running.add(fn);
+        fn(() => this.#clear(fn));
+        return true;
+    }
+    #clear(fn) {
+        if (!this.#running.has(fn)) {
+            return false;
+        }
+        const res = this.#reservations.get(fn);
+        /* c8 ignore start */
+        if (!res) {
+            throw new Error('invalid reservation');
+        }
+        /* c8 ignore stop */
+        const { paths, dirs } = res;
+        const next = new Set();
+        for (const path of paths) {
+            const q = this.#queues.get(path);
+            /* c8 ignore start */
+            if (!q || q?.[0] !== fn) {
+                continue;
+            }
+            /* c8 ignore stop */
+            const q0 = q[1];
+            if (!q0) {
+                this.#queues.delete(path);
+                continue;
+            }
+            q.shift();
+            if (typeof q0 === 'function') {
+                next.add(q0);
+            }
+            else {
+                for (const f of q0) {
+                    next.add(f);
+                }
+            }
+        }
+        for (const dir of dirs) {
+            const q = this.#queues.get(dir);
+            const q0 = q?.[0];
+            /* c8 ignore next - type safety only */
+            if (!q || !(q0 instanceof Set))
+                continue;
+            if (q0.size === 1 && q.length === 1) {
+                this.#queues.delete(dir);
+                continue;
+            }
+            else if (q0.size === 1) {
+                q.shift();
+                // next one must be a function,
+                // or else the Set would've been reused
+                const n = q[0];
+                if (typeof n === 'function') {
+                    next.add(n);
+                }
+            }
+            else {
+                q0.delete(fn);
+            }
+        }
+        this.#running.delete(fn);
+        next.forEach(fn => this.#run(fn));
+        return true;
+    }
+}
+//# sourceMappingURL=path-reservations.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/pax.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/pax.js
new file mode 100644
index 0000000000000..832808f344da5
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/pax.js
@@ -0,0 +1,154 @@
+import { basename } from 'node:path';
+import { Header } from './header.js';
+export class Pax {
+    atime;
+    mtime;
+    ctime;
+    charset;
+    comment;
+    gid;
+    uid;
+    gname;
+    uname;
+    linkpath;
+    dev;
+    ino;
+    nlink;
+    path;
+    size;
+    mode;
+    global;
+    constructor(obj, global = false) {
+        this.atime = obj.atime;
+        this.charset = obj.charset;
+        this.comment = obj.comment;
+        this.ctime = obj.ctime;
+        this.dev = obj.dev;
+        this.gid = obj.gid;
+        this.global = global;
+        this.gname = obj.gname;
+        this.ino = obj.ino;
+        this.linkpath = obj.linkpath;
+        this.mtime = obj.mtime;
+        this.nlink = obj.nlink;
+        this.path = obj.path;
+        this.size = obj.size;
+        this.uid = obj.uid;
+        this.uname = obj.uname;
+    }
+    encode() {
+        const body = this.encodeBody();
+        if (body === '') {
+            return Buffer.allocUnsafe(0);
+        }
+        const bodyLen = Buffer.byteLength(body);
+        // round up to 512 bytes
+        // add 512 for header
+        const bufLen = 512 * Math.ceil(1 + bodyLen / 512);
+        const buf = Buffer.allocUnsafe(bufLen);
+        // 0-fill the header section, it might not hit every field
+        for (let i = 0; i < 512; i++) {
+            buf[i] = 0;
+        }
+        new Header({
+            // XXX split the path
+            // then the path should be PaxHeader + basename, but less than 99,
+            // prepend with the dirname
+            /* c8 ignore start */
+            path: ('PaxHeader/' + basename(this.path ?? '')).slice(0, 99),
+            /* c8 ignore stop */
+            mode: this.mode || 0o644,
+            uid: this.uid,
+            gid: this.gid,
+            size: bodyLen,
+            mtime: this.mtime,
+            type: this.global ? 'GlobalExtendedHeader' : 'ExtendedHeader',
+            linkpath: '',
+            uname: this.uname || '',
+            gname: this.gname || '',
+            devmaj: 0,
+            devmin: 0,
+            atime: this.atime,
+            ctime: this.ctime,
+        }).encode(buf);
+        buf.write(body, 512, bodyLen, 'utf8');
+        // null pad after the body
+        for (let i = bodyLen + 512; i < buf.length; i++) {
+            buf[i] = 0;
+        }
+        return buf;
+    }
+    encodeBody() {
+        return (this.encodeField('path') +
+            this.encodeField('ctime') +
+            this.encodeField('atime') +
+            this.encodeField('dev') +
+            this.encodeField('ino') +
+            this.encodeField('nlink') +
+            this.encodeField('charset') +
+            this.encodeField('comment') +
+            this.encodeField('gid') +
+            this.encodeField('gname') +
+            this.encodeField('linkpath') +
+            this.encodeField('mtime') +
+            this.encodeField('size') +
+            this.encodeField('uid') +
+            this.encodeField('uname'));
+    }
+    encodeField(field) {
+        if (this[field] === undefined) {
+            return '';
+        }
+        const r = this[field];
+        const v = r instanceof Date ? r.getTime() / 1000 : r;
+        const s = ' ' +
+            (field === 'dev' || field === 'ino' || field === 'nlink' ?
+                'SCHILY.'
+                : '') +
+            field +
+            '=' +
+            v +
+            '\n';
+        const byteLen = Buffer.byteLength(s);
+        // the digits includes the length of the digits in ascii base-10
+        // so if it's 9 characters, then adding 1 for the 9 makes it 10
+        // which makes it 11 chars.
+        let digits = Math.floor(Math.log(byteLen) / Math.log(10)) + 1;
+        if (byteLen + digits >= Math.pow(10, digits)) {
+            digits += 1;
+        }
+        const len = digits + byteLen;
+        return len + s;
+    }
+    static parse(str, ex, g = false) {
+        return new Pax(merge(parseKV(str), ex), g);
+    }
+}
+const merge = (a, b) => b ? Object.assign({}, b, a) : a;
+const parseKV = (str) => str
+    .replace(/\n$/, '')
+    .split('\n')
+    .reduce(parseKVLine, Object.create(null));
+const parseKVLine = (set, line) => {
+    const n = parseInt(line, 10);
+    // XXX Values with \n in them will fail this.
+    // Refactor to not be a naive line-by-line parse.
+    if (n !== Buffer.byteLength(line) + 1) {
+        return set;
+    }
+    line = line.slice((n + ' ').length);
+    const kv = line.split('=');
+    const r = kv.shift();
+    if (!r) {
+        return set;
+    }
+    const k = r.replace(/^SCHILY\.(dev|ino|nlink)/, '$1');
+    const v = kv.join('=');
+    set[k] =
+        /^([A-Z]+\.)?([mac]|birth|creation)time$/.test(k) ?
+            new Date(Number(v) * 1000)
+            : /^[0-9]+$/.test(v) ? +v
+                : v;
+    return set;
+};
+//# sourceMappingURL=pax.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/read-entry.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/read-entry.js
new file mode 100644
index 0000000000000..23cc673e61087
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/read-entry.js
@@ -0,0 +1,136 @@
+import { Minipass } from 'minipass';
+import { normalizeWindowsPath } from './normalize-windows-path.js';
+export class ReadEntry extends Minipass {
+    extended;
+    globalExtended;
+    header;
+    startBlockSize;
+    blockRemain;
+    remain;
+    type;
+    meta = false;
+    ignore = false;
+    path;
+    mode;
+    uid;
+    gid;
+    uname;
+    gname;
+    size = 0;
+    mtime;
+    atime;
+    ctime;
+    linkpath;
+    dev;
+    ino;
+    nlink;
+    invalid = false;
+    absolute;
+    unsupported = false;
+    constructor(header, ex, gex) {
+        super({});
+        // read entries always start life paused.  this is to avoid the
+        // situation where Minipass's auto-ending empty streams results
+        // in an entry ending before we're ready for it.
+        this.pause();
+        this.extended = ex;
+        this.globalExtended = gex;
+        this.header = header;
+        /* c8 ignore start */
+        this.remain = header.size ?? 0;
+        /* c8 ignore stop */
+        this.startBlockSize = 512 * Math.ceil(this.remain / 512);
+        this.blockRemain = this.startBlockSize;
+        this.type = header.type;
+        switch (this.type) {
+            case 'File':
+            case 'OldFile':
+            case 'Link':
+            case 'SymbolicLink':
+            case 'CharacterDevice':
+            case 'BlockDevice':
+            case 'Directory':
+            case 'FIFO':
+            case 'ContiguousFile':
+            case 'GNUDumpDir':
+                break;
+            case 'NextFileHasLongLinkpath':
+            case 'NextFileHasLongPath':
+            case 'OldGnuLongPath':
+            case 'GlobalExtendedHeader':
+            case 'ExtendedHeader':
+            case 'OldExtendedHeader':
+                this.meta = true;
+                break;
+            // NOTE: gnutar and bsdtar treat unrecognized types as 'File'
+            // it may be worth doing the same, but with a warning.
+            default:
+                this.ignore = true;
+        }
+        /* c8 ignore start */
+        if (!header.path) {
+            throw new Error('no path provided for tar.ReadEntry');
+        }
+        /* c8 ignore stop */
+        this.path = normalizeWindowsPath(header.path);
+        this.mode = header.mode;
+        if (this.mode) {
+            this.mode = this.mode & 0o7777;
+        }
+        this.uid = header.uid;
+        this.gid = header.gid;
+        this.uname = header.uname;
+        this.gname = header.gname;
+        this.size = this.remain;
+        this.mtime = header.mtime;
+        this.atime = header.atime;
+        this.ctime = header.ctime;
+        /* c8 ignore start */
+        this.linkpath =
+            header.linkpath ?
+                normalizeWindowsPath(header.linkpath)
+                : undefined;
+        /* c8 ignore stop */
+        this.uname = header.uname;
+        this.gname = header.gname;
+        if (ex) {
+            this.#slurp(ex);
+        }
+        if (gex) {
+            this.#slurp(gex, true);
+        }
+    }
+    write(data) {
+        const writeLen = data.length;
+        if (writeLen > this.blockRemain) {
+            throw new Error('writing more to entry than is appropriate');
+        }
+        const r = this.remain;
+        const br = this.blockRemain;
+        this.remain = Math.max(0, r - writeLen);
+        this.blockRemain = Math.max(0, br - writeLen);
+        if (this.ignore) {
+            return true;
+        }
+        if (r >= writeLen) {
+            return super.write(data);
+        }
+        // r < writeLen
+        return super.write(data.subarray(0, r));
+    }
+    #slurp(ex, gex = false) {
+        if (ex.path)
+            ex.path = normalizeWindowsPath(ex.path);
+        if (ex.linkpath)
+            ex.linkpath = normalizeWindowsPath(ex.linkpath);
+        Object.assign(this, Object.fromEntries(Object.entries(ex).filter(([k, v]) => {
+            // we slurp in everything except for the path attribute in
+            // a global extended header, because that's weird. Also, any
+            // null/undefined values are ignored.
+            return !(v === null ||
+                v === undefined ||
+                (k === 'path' && gex));
+        })));
+    }
+}
+//# sourceMappingURL=read-entry.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/replace.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/replace.js
new file mode 100644
index 0000000000000..bab622bfdf1f1
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/replace.js
@@ -0,0 +1,225 @@
+// tar -r
+import { WriteStream, WriteStreamSync } from '@isaacs/fs-minipass';
+import fs from 'node:fs';
+import path from 'node:path';
+import { Header } from './header.js';
+import { list } from './list.js';
+import { makeCommand } from './make-command.js';
+import { isFile, } from './options.js';
+import { Pack, PackSync } from './pack.js';
+// starting at the head of the file, read a Header
+// If the checksum is invalid, that's our position to start writing
+// If it is, jump forward by the specified size (round up to 512)
+// and try again.
+// Write the new Pack stream starting there.
+const replaceSync = (opt, files) => {
+    const p = new PackSync(opt);
+    let threw = true;
+    let fd;
+    let position;
+    try {
+        try {
+            fd = fs.openSync(opt.file, 'r+');
+        }
+        catch (er) {
+            if (er?.code === 'ENOENT') {
+                fd = fs.openSync(opt.file, 'w+');
+            }
+            else {
+                throw er;
+            }
+        }
+        const st = fs.fstatSync(fd);
+        const headBuf = Buffer.alloc(512);
+        POSITION: for (position = 0; position < st.size; position += 512) {
+            for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) {
+                bytes = fs.readSync(fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos);
+                if (position === 0 &&
+                    headBuf[0] === 0x1f &&
+                    headBuf[1] === 0x8b) {
+                    throw new Error('cannot append to compressed archives');
+                }
+                if (!bytes) {
+                    break POSITION;
+                }
+            }
+            const h = new Header(headBuf);
+            if (!h.cksumValid) {
+                break;
+            }
+            const entryBlockSize = 512 * Math.ceil((h.size || 0) / 512);
+            if (position + entryBlockSize + 512 > st.size) {
+                break;
+            }
+            // the 512 for the header we just parsed will be added as well
+            // also jump ahead all the blocks for the body
+            position += entryBlockSize;
+            if (opt.mtimeCache && h.mtime) {
+                opt.mtimeCache.set(String(h.path), h.mtime);
+            }
+        }
+        threw = false;
+        streamSync(opt, p, position, fd, files);
+    }
+    finally {
+        if (threw) {
+            try {
+                fs.closeSync(fd);
+            }
+            catch (er) { }
+        }
+    }
+};
+const streamSync = (opt, p, position, fd, files) => {
+    const stream = new WriteStreamSync(opt.file, {
+        fd: fd,
+        start: position,
+    });
+    p.pipe(stream);
+    addFilesSync(p, files);
+};
+const replaceAsync = (opt, files) => {
+    files = Array.from(files);
+    const p = new Pack(opt);
+    const getPos = (fd, size, cb_) => {
+        const cb = (er, pos) => {
+            if (er) {
+                fs.close(fd, _ => cb_(er));
+            }
+            else {
+                cb_(null, pos);
+            }
+        };
+        let position = 0;
+        if (size === 0) {
+            return cb(null, 0);
+        }
+        let bufPos = 0;
+        const headBuf = Buffer.alloc(512);
+        const onread = (er, bytes) => {
+            if (er || typeof bytes === 'undefined') {
+                return cb(er);
+            }
+            bufPos += bytes;
+            if (bufPos < 512 && bytes) {
+                return fs.read(fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos, onread);
+            }
+            if (position === 0 &&
+                headBuf[0] === 0x1f &&
+                headBuf[1] === 0x8b) {
+                return cb(new Error('cannot append to compressed archives'));
+            }
+            // truncated header
+            if (bufPos < 512) {
+                return cb(null, position);
+            }
+            const h = new Header(headBuf);
+            if (!h.cksumValid) {
+                return cb(null, position);
+            }
+            /* c8 ignore next */
+            const entryBlockSize = 512 * Math.ceil((h.size ?? 0) / 512);
+            if (position + entryBlockSize + 512 > size) {
+                return cb(null, position);
+            }
+            position += entryBlockSize + 512;
+            if (position >= size) {
+                return cb(null, position);
+            }
+            if (opt.mtimeCache && h.mtime) {
+                opt.mtimeCache.set(String(h.path), h.mtime);
+            }
+            bufPos = 0;
+            fs.read(fd, headBuf, 0, 512, position, onread);
+        };
+        fs.read(fd, headBuf, 0, 512, position, onread);
+    };
+    const promise = new Promise((resolve, reject) => {
+        p.on('error', reject);
+        let flag = 'r+';
+        const onopen = (er, fd) => {
+            if (er && er.code === 'ENOENT' && flag === 'r+') {
+                flag = 'w+';
+                return fs.open(opt.file, flag, onopen);
+            }
+            if (er || !fd) {
+                return reject(er);
+            }
+            fs.fstat(fd, (er, st) => {
+                if (er) {
+                    return fs.close(fd, () => reject(er));
+                }
+                getPos(fd, st.size, (er, position) => {
+                    if (er) {
+                        return reject(er);
+                    }
+                    const stream = new WriteStream(opt.file, {
+                        fd: fd,
+                        start: position,
+                    });
+                    p.pipe(stream);
+                    stream.on('error', reject);
+                    stream.on('close', resolve);
+                    addFilesAsync(p, files);
+                });
+            });
+        };
+        fs.open(opt.file, flag, onopen);
+    });
+    return promise;
+};
+const addFilesSync = (p, files) => {
+    files.forEach(file => {
+        if (file.charAt(0) === '@') {
+            list({
+                file: path.resolve(p.cwd, file.slice(1)),
+                sync: true,
+                noResume: true,
+                onReadEntry: entry => p.add(entry),
+            });
+        }
+        else {
+            p.add(file);
+        }
+    });
+    p.end();
+};
+const addFilesAsync = async (p, files) => {
+    for (let i = 0; i < files.length; i++) {
+        const file = String(files[i]);
+        if (file.charAt(0) === '@') {
+            await list({
+                file: path.resolve(String(p.cwd), file.slice(1)),
+                noResume: true,
+                onReadEntry: entry => p.add(entry),
+            });
+        }
+        else {
+            p.add(file);
+        }
+    }
+    p.end();
+};
+export const replace = makeCommand(replaceSync, replaceAsync, 
+/* c8 ignore start */
+() => {
+    throw new TypeError('file is required');
+}, () => {
+    throw new TypeError('file is required');
+}, 
+/* c8 ignore stop */
+(opt, entries) => {
+    if (!isFile(opt)) {
+        throw new TypeError('file is required');
+    }
+    if (opt.gzip ||
+        opt.brotli ||
+        opt.file.endsWith('.br') ||
+        opt.file.endsWith('.tbr')) {
+        throw new TypeError('cannot append to compressed archives');
+    }
+    if (!entries?.length) {
+        throw new TypeError('no paths specified to add/replace');
+    }
+});
+//# sourceMappingURL=replace.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/strip-absolute-path.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/strip-absolute-path.js
new file mode 100644
index 0000000000000..cce5ff80b00db
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/strip-absolute-path.js
@@ -0,0 +1,25 @@
+// unix absolute paths are also absolute on win32, so we use this for both
+import { win32 } from 'node:path';
+const { isAbsolute, parse } = win32;
+// returns [root, stripped]
+// Note that windows will think that //x/y/z/a has a "root" of //x/y, and in
+// those cases, we want to sanitize it to x/y/z/a, not z/a, so we strip /
+// explicitly if it's the first character.
+// drive-specific relative paths on Windows get their root stripped off even
+// though they are not absolute, so `c:../foo` becomes ['c:', '../foo']
+export const stripAbsolutePath = (path) => {
+    let r = '';
+    let parsed = parse(path);
+    while (isAbsolute(path) || parsed.root) {
+        // windows will think that //x/y/z has a "root" of //x/y/
+        // but strip the //?/C:/ off of //?/C:/path
+        const root = path.charAt(0) === '/' && path.slice(0, 4) !== '//?/' ?
+            '/'
+            : parsed.root;
+        path = path.slice(root.length);
+        r += root;
+        parsed = parse(path);
+    }
+    return [r, path];
+};
+//# sourceMappingURL=strip-absolute-path.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/strip-trailing-slashes.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/strip-trailing-slashes.js
new file mode 100644
index 0000000000000..ace4218a7547b
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/strip-trailing-slashes.js
@@ -0,0 +1,14 @@
+// warning: extremely hot code path.
+// This has been meticulously optimized for use
+// within npm install on large package trees.
+// Do not edit without careful benchmarking.
+export const stripTrailingSlashes = (str) => {
+    let i = str.length - 1;
+    let slashesStart = -1;
+    while (i > -1 && str.charAt(i) === '/') {
+        slashesStart = i;
+        i--;
+    }
+    return slashesStart === -1 ? str : str.slice(0, slashesStart);
+};
+//# sourceMappingURL=strip-trailing-slashes.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/symlink-error.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/symlink-error.js
new file mode 100644
index 0000000000000..d31766e2e0afa
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/symlink-error.js
@@ -0,0 +1,15 @@
+export class SymlinkError extends Error {
+    path;
+    symlink;
+    syscall = 'symlink';
+    code = 'TAR_SYMLINK_ERROR';
+    constructor(symlink, path) {
+        super('TAR_SYMLINK_ERROR: Cannot extract through symbolic link');
+        this.symlink = symlink;
+        this.path = path;
+    }
+    get name() {
+        return 'SymlinkError';
+    }
+}
+//# sourceMappingURL=symlink-error.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/types.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/types.js
new file mode 100644
index 0000000000000..27b982ae1e092
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/types.js
@@ -0,0 +1,45 @@
+export const isCode = (c) => name.has(c);
+export const isName = (c) => code.has(c);
+// map types from key to human-friendly name
+export const name = new Map([
+    ['0', 'File'],
+    // same as File
+    ['', 'OldFile'],
+    ['1', 'Link'],
+    ['2', 'SymbolicLink'],
+    // Devices and FIFOs aren't fully supported
+    // they are parsed, but skipped when unpacking
+    ['3', 'CharacterDevice'],
+    ['4', 'BlockDevice'],
+    ['5', 'Directory'],
+    ['6', 'FIFO'],
+    // same as File
+    ['7', 'ContiguousFile'],
+    // pax headers
+    ['g', 'GlobalExtendedHeader'],
+    ['x', 'ExtendedHeader'],
+    // vendor-specific stuff
+    // skip
+    ['A', 'SolarisACL'],
+    // like 5, but with data, which should be skipped
+    ['D', 'GNUDumpDir'],
+    // metadata only, skip
+    ['I', 'Inode'],
+    // data = link path of next file
+    ['K', 'NextFileHasLongLinkpath'],
+    // data = path of next file
+    ['L', 'NextFileHasLongPath'],
+    // skip
+    ['M', 'ContinuationFile'],
+    // like L
+    ['N', 'OldGnuLongPath'],
+    // skip
+    ['S', 'SparseFile'],
+    // skip
+    ['V', 'TapeVolumeHeader'],
+    // like x
+    ['X', 'OldExtendedHeader'],
+]);
+// map the other direction
+export const code = new Map(Array.from(name).map(kv => [kv[1], kv[0]]));
+//# sourceMappingURL=types.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/unpack.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/unpack.js
new file mode 100644
index 0000000000000..6e744cfc1a6f9
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/unpack.js
@@ -0,0 +1,888 @@
+// the PEND/UNPEND stuff tracks whether we're ready to emit end/close yet.
+// but the path reservations are required to avoid race conditions where
+// parallelized unpack ops may mess with one another, due to dependencies
+// (like a Link depending on its target) or destructive operations (like
+// clobbering an fs object to create one of a different type.)
+import * as fsm from '@isaacs/fs-minipass';
+import assert from 'node:assert';
+import { randomBytes } from 'node:crypto';
+import fs from 'node:fs';
+import path from 'node:path';
+import { getWriteFlag } from './get-write-flag.js';
+import { mkdir, mkdirSync } from './mkdir.js';
+import { normalizeUnicode } from './normalize-unicode.js';
+import { normalizeWindowsPath } from './normalize-windows-path.js';
+import { Parser } from './parse.js';
+import { stripAbsolutePath } from './strip-absolute-path.js';
+import { stripTrailingSlashes } from './strip-trailing-slashes.js';
+import * as wc from './winchars.js';
+import { PathReservations } from './path-reservations.js';
+const ONENTRY = Symbol('onEntry');
+const CHECKFS = Symbol('checkFs');
+const CHECKFS2 = Symbol('checkFs2');
+const PRUNECACHE = Symbol('pruneCache');
+const ISREUSABLE = Symbol('isReusable');
+const MAKEFS = Symbol('makeFs');
+const FILE = Symbol('file');
+const DIRECTORY = Symbol('directory');
+const LINK = Symbol('link');
+const SYMLINK = Symbol('symlink');
+const HARDLINK = Symbol('hardlink');
+const UNSUPPORTED = Symbol('unsupported');
+const CHECKPATH = Symbol('checkPath');
+const MKDIR = Symbol('mkdir');
+const ONERROR = Symbol('onError');
+const PENDING = Symbol('pending');
+const PEND = Symbol('pend');
+const UNPEND = Symbol('unpend');
+const ENDED = Symbol('ended');
+const MAYBECLOSE = Symbol('maybeClose');
+const SKIP = Symbol('skip');
+const DOCHOWN = Symbol('doChown');
+const UID = Symbol('uid');
+const GID = Symbol('gid');
+const CHECKED_CWD = Symbol('checkedCwd');
+const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform;
+const isWindows = platform === 'win32';
+const DEFAULT_MAX_DEPTH = 1024;
+// Unlinks on Windows are not atomic.
+//
+// This means that if you have a file entry, followed by another
+// file entry with an identical name, and you cannot re-use the file
+// (because it's a hardlink, or because unlink:true is set, or it's
+// Windows, which does not have useful nlink values), then the unlink
+// will be committed to the disk AFTER the new file has been written
+// over the old one, deleting the new file.
+//
+// To work around this, on Windows systems, we rename the file and then
+// delete the renamed file.  It's a sloppy kludge, but frankly, I do not
+// know of a better way to do this, given windows' non-atomic unlink
+// semantics.
+//
+// See: https://github.com/npm/node-tar/issues/183
+/* c8 ignore start */
+const unlinkFile = (path, cb) => {
+    if (!isWindows) {
+        return fs.unlink(path, cb);
+    }
+    const name = path + '.DELETE.' + randomBytes(16).toString('hex');
+    fs.rename(path, name, er => {
+        if (er) {
+            return cb(er);
+        }
+        fs.unlink(name, cb);
+    });
+};
+/* c8 ignore stop */
+/* c8 ignore start */
+const unlinkFileSync = (path) => {
+    if (!isWindows) {
+        return fs.unlinkSync(path);
+    }
+    const name = path + '.DELETE.' + randomBytes(16).toString('hex');
+    fs.renameSync(path, name);
+    fs.unlinkSync(name);
+};
+/* c8 ignore stop */
+// this.gid, entry.gid, this.processUid
+const uint32 = (a, b, c) => a !== undefined && a === a >>> 0 ? a
+    : b !== undefined && b === b >>> 0 ? b
+        : c;
+// clear the cache if it's a case-insensitive unicode-squashing match.
+// we can't know if the current file system is case-sensitive or supports
+// unicode fully, so we check for similarity on the maximally compatible
+// representation.  Err on the side of pruning, since all it's doing is
+// preventing lstats, and it's not the end of the world if we get a false
+// positive.
+// Note that on windows, we always drop the entire cache whenever a
+// symbolic link is encountered, because 8.3 filenames are impossible
+// to reason about, and collisions are hazards rather than just failures.
+const cacheKeyNormalize = (path) => stripTrailingSlashes(normalizeWindowsPath(normalizeUnicode(path))).toLowerCase();
+// remove all cache entries matching ${abs}/**
+const pruneCache = (cache, abs) => {
+    abs = cacheKeyNormalize(abs);
+    for (const path of cache.keys()) {
+        const pnorm = cacheKeyNormalize(path);
+        if (pnorm === abs || pnorm.indexOf(abs + '/') === 0) {
+            cache.delete(path);
+        }
+    }
+};
+const dropCache = (cache) => {
+    for (const key of cache.keys()) {
+        cache.delete(key);
+    }
+};
+export class Unpack extends Parser {
+    [ENDED] = false;
+    [CHECKED_CWD] = false;
+    [PENDING] = 0;
+    reservations = new PathReservations();
+    transform;
+    writable = true;
+    readable = false;
+    dirCache;
+    uid;
+    gid;
+    setOwner;
+    preserveOwner;
+    processGid;
+    processUid;
+    maxDepth;
+    forceChown;
+    win32;
+    newer;
+    keep;
+    noMtime;
+    preservePaths;
+    unlink;
+    cwd;
+    strip;
+    processUmask;
+    umask;
+    dmode;
+    fmode;
+    chmod;
+    constructor(opt = {}) {
+        opt.ondone = () => {
+            this[ENDED] = true;
+            this[MAYBECLOSE]();
+        };
+        super(opt);
+        this.transform = opt.transform;
+        this.dirCache = opt.dirCache || new Map();
+        this.chmod = !!opt.chmod;
+        if (typeof opt.uid === 'number' || typeof opt.gid === 'number') {
+            // need both or neither
+            if (typeof opt.uid !== 'number' ||
+                typeof opt.gid !== 'number') {
+                throw new TypeError('cannot set owner without number uid and gid');
+            }
+            if (opt.preserveOwner) {
+                throw new TypeError('cannot preserve owner in archive and also set owner explicitly');
+            }
+            this.uid = opt.uid;
+            this.gid = opt.gid;
+            this.setOwner = true;
+        }
+        else {
+            this.uid = undefined;
+            this.gid = undefined;
+            this.setOwner = false;
+        }
+        // default true for root
+        if (opt.preserveOwner === undefined &&
+            typeof opt.uid !== 'number') {
+            this.preserveOwner = !!(process.getuid && process.getuid() === 0);
+        }
+        else {
+            this.preserveOwner = !!opt.preserveOwner;
+        }
+        this.processUid =
+            (this.preserveOwner || this.setOwner) && process.getuid ?
+                process.getuid()
+                : undefined;
+        this.processGid =
+            (this.preserveOwner || this.setOwner) && process.getgid ?
+                process.getgid()
+                : undefined;
+        // prevent excessively deep nesting of subfolders
+        // set to `Infinity` to remove this restriction
+        this.maxDepth =
+            typeof opt.maxDepth === 'number' ?
+                opt.maxDepth
+                : DEFAULT_MAX_DEPTH;
+        // mostly just for testing, but useful in some cases.
+        // Forcibly trigger a chown on every entry, no matter what
+        this.forceChown = opt.forceChown === true;
+        // turn > this[ONENTRY](entry));
+    }
+    // a bad or damaged archive is a warning for Parser, but an error
+    // when extracting.  Mark those errors as unrecoverable, because
+    // the Unpack contract cannot be met.
+    warn(code, msg, data = {}) {
+        if (code === 'TAR_BAD_ARCHIVE' || code === 'TAR_ABORT') {
+            data.recoverable = false;
+        }
+        return super.warn(code, msg, data);
+    }
+    [MAYBECLOSE]() {
+        if (this[ENDED] && this[PENDING] === 0) {
+            this.emit('prefinish');
+            this.emit('finish');
+            this.emit('end');
+        }
+    }
+    [CHECKPATH](entry) {
+        const p = normalizeWindowsPath(entry.path);
+        const parts = p.split('/');
+        if (this.strip) {
+            if (parts.length < this.strip) {
+                return false;
+            }
+            if (entry.type === 'Link') {
+                const linkparts = normalizeWindowsPath(String(entry.linkpath)).split('/');
+                if (linkparts.length >= this.strip) {
+                    entry.linkpath = linkparts.slice(this.strip).join('/');
+                }
+                else {
+                    return false;
+                }
+            }
+            parts.splice(0, this.strip);
+            entry.path = parts.join('/');
+        }
+        if (isFinite(this.maxDepth) && parts.length > this.maxDepth) {
+            this.warn('TAR_ENTRY_ERROR', 'path excessively deep', {
+                entry,
+                path: p,
+                depth: parts.length,
+                maxDepth: this.maxDepth,
+            });
+            return false;
+        }
+        if (!this.preservePaths) {
+            if (parts.includes('..') ||
+                /* c8 ignore next */
+                (isWindows && /^[a-z]:\.\.$/i.test(parts[0] ?? ''))) {
+                this.warn('TAR_ENTRY_ERROR', `path contains '..'`, {
+                    entry,
+                    path: p,
+                });
+                return false;
+            }
+            // strip off the root
+            const [root, stripped] = stripAbsolutePath(p);
+            if (root) {
+                entry.path = String(stripped);
+                this.warn('TAR_ENTRY_INFO', `stripping ${root} from absolute path`, {
+                    entry,
+                    path: p,
+                });
+            }
+        }
+        if (path.isAbsolute(entry.path)) {
+            entry.absolute = normalizeWindowsPath(path.resolve(entry.path));
+        }
+        else {
+            entry.absolute = normalizeWindowsPath(path.resolve(this.cwd, entry.path));
+        }
+        // if we somehow ended up with a path that escapes the cwd, and we are
+        // not in preservePaths mode, then something is fishy!  This should have
+        // been prevented above, so ignore this for coverage.
+        /* c8 ignore start - defense in depth */
+        if (!this.preservePaths &&
+            typeof entry.absolute === 'string' &&
+            entry.absolute.indexOf(this.cwd + '/') !== 0 &&
+            entry.absolute !== this.cwd) {
+            this.warn('TAR_ENTRY_ERROR', 'path escaped extraction target', {
+                entry,
+                path: normalizeWindowsPath(entry.path),
+                resolvedPath: entry.absolute,
+                cwd: this.cwd,
+            });
+            return false;
+        }
+        /* c8 ignore stop */
+        // an archive can set properties on the extraction directory, but it
+        // may not replace the cwd with a different kind of thing entirely.
+        if (entry.absolute === this.cwd &&
+            entry.type !== 'Directory' &&
+            entry.type !== 'GNUDumpDir') {
+            return false;
+        }
+        // only encode : chars that aren't drive letter indicators
+        if (this.win32) {
+            const { root: aRoot } = path.win32.parse(String(entry.absolute));
+            entry.absolute =
+                aRoot + wc.encode(String(entry.absolute).slice(aRoot.length));
+            const { root: pRoot } = path.win32.parse(entry.path);
+            entry.path = pRoot + wc.encode(entry.path.slice(pRoot.length));
+        }
+        return true;
+    }
+    [ONENTRY](entry) {
+        if (!this[CHECKPATH](entry)) {
+            return entry.resume();
+        }
+        assert.equal(typeof entry.absolute, 'string');
+        switch (entry.type) {
+            case 'Directory':
+            case 'GNUDumpDir':
+                if (entry.mode) {
+                    entry.mode = entry.mode | 0o700;
+                }
+            // eslint-disable-next-line no-fallthrough
+            case 'File':
+            case 'OldFile':
+            case 'ContiguousFile':
+            case 'Link':
+            case 'SymbolicLink':
+                return this[CHECKFS](entry);
+            case 'CharacterDevice':
+            case 'BlockDevice':
+            case 'FIFO':
+            default:
+                return this[UNSUPPORTED](entry);
+        }
+    }
+    [ONERROR](er, entry) {
+        // Cwd has to exist, or else nothing works. That's serious.
+        // Other errors are warnings, which raise the error in strict
+        // mode, but otherwise continue on.
+        if (er.name === 'CwdError') {
+            this.emit('error', er);
+        }
+        else {
+            this.warn('TAR_ENTRY_ERROR', er, { entry });
+            this[UNPEND]();
+            entry.resume();
+        }
+    }
+    [MKDIR](dir, mode, cb) {
+        mkdir(normalizeWindowsPath(dir), {
+            uid: this.uid,
+            gid: this.gid,
+            processUid: this.processUid,
+            processGid: this.processGid,
+            umask: this.processUmask,
+            preserve: this.preservePaths,
+            unlink: this.unlink,
+            cache: this.dirCache,
+            cwd: this.cwd,
+            mode: mode,
+        }, cb);
+    }
+    [DOCHOWN](entry) {
+        // in preserve owner mode, chown if the entry doesn't match process
+        // in set owner mode, chown if setting doesn't match process
+        return (this.forceChown ||
+            (this.preserveOwner &&
+                ((typeof entry.uid === 'number' &&
+                    entry.uid !== this.processUid) ||
+                    (typeof entry.gid === 'number' &&
+                        entry.gid !== this.processGid))) ||
+            (typeof this.uid === 'number' &&
+                this.uid !== this.processUid) ||
+            (typeof this.gid === 'number' && this.gid !== this.processGid));
+    }
+    [UID](entry) {
+        return uint32(this.uid, entry.uid, this.processUid);
+    }
+    [GID](entry) {
+        return uint32(this.gid, entry.gid, this.processGid);
+    }
+    [FILE](entry, fullyDone) {
+        const mode = typeof entry.mode === 'number' ?
+            entry.mode & 0o7777
+            : this.fmode;
+        const stream = new fsm.WriteStream(String(entry.absolute), {
+            // slight lie, but it can be numeric flags
+            flags: getWriteFlag(entry.size),
+            mode: mode,
+            autoClose: false,
+        });
+        stream.on('error', (er) => {
+            if (stream.fd) {
+                fs.close(stream.fd, () => { });
+            }
+            // flush all the data out so that we aren't left hanging
+            // if the error wasn't actually fatal.  otherwise the parse
+            // is blocked, and we never proceed.
+            stream.write = () => true;
+            this[ONERROR](er, entry);
+            fullyDone();
+        });
+        let actions = 1;
+        const done = (er) => {
+            if (er) {
+                /* c8 ignore start - we should always have a fd by now */
+                if (stream.fd) {
+                    fs.close(stream.fd, () => { });
+                }
+                /* c8 ignore stop */
+                this[ONERROR](er, entry);
+                fullyDone();
+                return;
+            }
+            if (--actions === 0) {
+                if (stream.fd !== undefined) {
+                    fs.close(stream.fd, er => {
+                        if (er) {
+                            this[ONERROR](er, entry);
+                        }
+                        else {
+                            this[UNPEND]();
+                        }
+                        fullyDone();
+                    });
+                }
+            }
+        };
+        stream.on('finish', () => {
+            // if futimes fails, try utimes
+            // if utimes fails, fail with the original error
+            // same for fchown/chown
+            const abs = String(entry.absolute);
+            const fd = stream.fd;
+            if (typeof fd === 'number' && entry.mtime && !this.noMtime) {
+                actions++;
+                const atime = entry.atime || new Date();
+                const mtime = entry.mtime;
+                fs.futimes(fd, atime, mtime, er => er ?
+                    fs.utimes(abs, atime, mtime, er2 => done(er2 && er))
+                    : done());
+            }
+            if (typeof fd === 'number' && this[DOCHOWN](entry)) {
+                actions++;
+                const uid = this[UID](entry);
+                const gid = this[GID](entry);
+                if (typeof uid === 'number' && typeof gid === 'number') {
+                    fs.fchown(fd, uid, gid, er => er ?
+                        fs.chown(abs, uid, gid, er2 => done(er2 && er))
+                        : done());
+                }
+            }
+            done();
+        });
+        const tx = this.transform ? this.transform(entry) || entry : entry;
+        if (tx !== entry) {
+            tx.on('error', (er) => {
+                this[ONERROR](er, entry);
+                fullyDone();
+            });
+            entry.pipe(tx);
+        }
+        tx.pipe(stream);
+    }
+    [DIRECTORY](entry, fullyDone) {
+        const mode = typeof entry.mode === 'number' ?
+            entry.mode & 0o7777
+            : this.dmode;
+        this[MKDIR](String(entry.absolute), mode, er => {
+            if (er) {
+                this[ONERROR](er, entry);
+                fullyDone();
+                return;
+            }
+            let actions = 1;
+            const done = () => {
+                if (--actions === 0) {
+                    fullyDone();
+                    this[UNPEND]();
+                    entry.resume();
+                }
+            };
+            if (entry.mtime && !this.noMtime) {
+                actions++;
+                fs.utimes(String(entry.absolute), entry.atime || new Date(), entry.mtime, done);
+            }
+            if (this[DOCHOWN](entry)) {
+                actions++;
+                fs.chown(String(entry.absolute), Number(this[UID](entry)), Number(this[GID](entry)), done);
+            }
+            done();
+        });
+    }
+    [UNSUPPORTED](entry) {
+        entry.unsupported = true;
+        this.warn('TAR_ENTRY_UNSUPPORTED', `unsupported entry type: ${entry.type}`, { entry });
+        entry.resume();
+    }
+    [SYMLINK](entry, done) {
+        this[LINK](entry, String(entry.linkpath), 'symlink', done);
+    }
+    [HARDLINK](entry, done) {
+        const linkpath = normalizeWindowsPath(path.resolve(this.cwd, String(entry.linkpath)));
+        this[LINK](entry, linkpath, 'link', done);
+    }
+    [PEND]() {
+        this[PENDING]++;
+    }
+    [UNPEND]() {
+        this[PENDING]--;
+        this[MAYBECLOSE]();
+    }
+    [SKIP](entry) {
+        this[UNPEND]();
+        entry.resume();
+    }
+    // Check if we can reuse an existing filesystem entry safely and
+    // overwrite it, rather than unlinking and recreating
+    // Windows doesn't report a useful nlink, so we just never reuse entries
+    [ISREUSABLE](entry, st) {
+        return (entry.type === 'File' &&
+            !this.unlink &&
+            st.isFile() &&
+            st.nlink <= 1 &&
+            !isWindows);
+    }
+    // check if a thing is there, and if so, try to clobber it
+    [CHECKFS](entry) {
+        this[PEND]();
+        const paths = [entry.path];
+        if (entry.linkpath) {
+            paths.push(entry.linkpath);
+        }
+        this.reservations.reserve(paths, done => this[CHECKFS2](entry, done));
+    }
+    [PRUNECACHE](entry) {
+        // if we are not creating a directory, and the path is in the dirCache,
+        // then that means we are about to delete the directory we created
+        // previously, and it is no longer going to be a directory, and neither
+        // is any of its children.
+        // If a symbolic link is encountered, all bets are off.  There is no
+        // reasonable way to sanitize the cache in such a way we will be able to
+        // avoid having filesystem collisions.  If this happens with a non-symlink
+        // entry, it'll just fail to unpack, but a symlink to a directory, using an
+        // 8.3 shortname or certain unicode attacks, can evade detection and lead
+        // to arbitrary writes to anywhere on the system.
+        if (entry.type === 'SymbolicLink') {
+            dropCache(this.dirCache);
+        }
+        else if (entry.type !== 'Directory') {
+            pruneCache(this.dirCache, String(entry.absolute));
+        }
+    }
+    [CHECKFS2](entry, fullyDone) {
+        this[PRUNECACHE](entry);
+        const done = (er) => {
+            this[PRUNECACHE](entry);
+            fullyDone(er);
+        };
+        const checkCwd = () => {
+            this[MKDIR](this.cwd, this.dmode, er => {
+                if (er) {
+                    this[ONERROR](er, entry);
+                    done();
+                    return;
+                }
+                this[CHECKED_CWD] = true;
+                start();
+            });
+        };
+        const start = () => {
+            if (entry.absolute !== this.cwd) {
+                const parent = normalizeWindowsPath(path.dirname(String(entry.absolute)));
+                if (parent !== this.cwd) {
+                    return this[MKDIR](parent, this.dmode, er => {
+                        if (er) {
+                            this[ONERROR](er, entry);
+                            done();
+                            return;
+                        }
+                        afterMakeParent();
+                    });
+                }
+            }
+            afterMakeParent();
+        };
+        const afterMakeParent = () => {
+            fs.lstat(String(entry.absolute), (lstatEr, st) => {
+                if (st &&
+                    (this.keep ||
+                        /* c8 ignore next */
+                        (this.newer && st.mtime > (entry.mtime ?? st.mtime)))) {
+                    this[SKIP](entry);
+                    done();
+                    return;
+                }
+                if (lstatEr || this[ISREUSABLE](entry, st)) {
+                    return this[MAKEFS](null, entry, done);
+                }
+                if (st.isDirectory()) {
+                    if (entry.type === 'Directory') {
+                        const needChmod = this.chmod &&
+                            entry.mode &&
+                            (st.mode & 0o7777) !== entry.mode;
+                        const afterChmod = (er) => this[MAKEFS](er ?? null, entry, done);
+                        if (!needChmod) {
+                            return afterChmod();
+                        }
+                        return fs.chmod(String(entry.absolute), Number(entry.mode), afterChmod);
+                    }
+                    // Not a dir entry, have to remove it.
+                    // NB: the only way to end up with an entry that is the cwd
+                    // itself, in such a way that == does not detect, is a
+                    // tricky windows absolute path with UNC or 8.3 parts (and
+                    // preservePaths:true, or else it will have been stripped).
+                    // In that case, the user has opted out of path protections
+                    // explicitly, so if they blow away the cwd, c'est la vie.
+                    if (entry.absolute !== this.cwd) {
+                        return fs.rmdir(String(entry.absolute), (er) => this[MAKEFS](er ?? null, entry, done));
+                    }
+                }
+                // not a dir, and not reusable
+                // don't remove if the cwd, we want that error
+                if (entry.absolute === this.cwd) {
+                    return this[MAKEFS](null, entry, done);
+                }
+                unlinkFile(String(entry.absolute), er => this[MAKEFS](er ?? null, entry, done));
+            });
+        };
+        if (this[CHECKED_CWD]) {
+            start();
+        }
+        else {
+            checkCwd();
+        }
+    }
+    [MAKEFS](er, entry, done) {
+        if (er) {
+            this[ONERROR](er, entry);
+            done();
+            return;
+        }
+        switch (entry.type) {
+            case 'File':
+            case 'OldFile':
+            case 'ContiguousFile':
+                return this[FILE](entry, done);
+            case 'Link':
+                return this[HARDLINK](entry, done);
+            case 'SymbolicLink':
+                return this[SYMLINK](entry, done);
+            case 'Directory':
+            case 'GNUDumpDir':
+                return this[DIRECTORY](entry, done);
+        }
+    }
+    [LINK](entry, linkpath, link, done) {
+        // XXX: get the type ('symlink' or 'junction') for windows
+        fs[link](linkpath, String(entry.absolute), er => {
+            if (er) {
+                this[ONERROR](er, entry);
+            }
+            else {
+                this[UNPEND]();
+                entry.resume();
+            }
+            done();
+        });
+    }
+}
+const callSync = (fn) => {
+    try {
+        return [null, fn()];
+    }
+    catch (er) {
+        return [er, null];
+    }
+};
+export class UnpackSync extends Unpack {
+    sync = true;
+    [MAKEFS](er, entry) {
+        return super[MAKEFS](er, entry, () => { });
+    }
+    [CHECKFS](entry) {
+        this[PRUNECACHE](entry);
+        if (!this[CHECKED_CWD]) {
+            const er = this[MKDIR](this.cwd, this.dmode);
+            if (er) {
+                return this[ONERROR](er, entry);
+            }
+            this[CHECKED_CWD] = true;
+        }
+        // don't bother to make the parent if the current entry is the cwd,
+        // we've already checked it.
+        if (entry.absolute !== this.cwd) {
+            const parent = normalizeWindowsPath(path.dirname(String(entry.absolute)));
+            if (parent !== this.cwd) {
+                const mkParent = this[MKDIR](parent, this.dmode);
+                if (mkParent) {
+                    return this[ONERROR](mkParent, entry);
+                }
+            }
+        }
+        const [lstatEr, st] = callSync(() => fs.lstatSync(String(entry.absolute)));
+        if (st &&
+            (this.keep ||
+                /* c8 ignore next */
+                (this.newer && st.mtime > (entry.mtime ?? st.mtime)))) {
+            return this[SKIP](entry);
+        }
+        if (lstatEr || this[ISREUSABLE](entry, st)) {
+            return this[MAKEFS](null, entry);
+        }
+        if (st.isDirectory()) {
+            if (entry.type === 'Directory') {
+                const needChmod = this.chmod &&
+                    entry.mode &&
+                    (st.mode & 0o7777) !== entry.mode;
+                const [er] = needChmod ?
+                    callSync(() => {
+                        fs.chmodSync(String(entry.absolute), Number(entry.mode));
+                    })
+                    : [];
+                return this[MAKEFS](er, entry);
+            }
+            // not a dir entry, have to remove it
+            const [er] = callSync(() => fs.rmdirSync(String(entry.absolute)));
+            this[MAKEFS](er, entry);
+        }
+        // not a dir, and not reusable.
+        // don't remove if it's the cwd, since we want that error.
+        const [er] = entry.absolute === this.cwd ?
+            []
+            : callSync(() => unlinkFileSync(String(entry.absolute)));
+        this[MAKEFS](er, entry);
+    }
+    [FILE](entry, done) {
+        const mode = typeof entry.mode === 'number' ?
+            entry.mode & 0o7777
+            : this.fmode;
+        const oner = (er) => {
+            let closeError;
+            try {
+                fs.closeSync(fd);
+            }
+            catch (e) {
+                closeError = e;
+            }
+            if (er || closeError) {
+                this[ONERROR](er || closeError, entry);
+            }
+            done();
+        };
+        let fd;
+        try {
+            fd = fs.openSync(String(entry.absolute), getWriteFlag(entry.size), mode);
+        }
+        catch (er) {
+            return oner(er);
+        }
+        const tx = this.transform ? this.transform(entry) || entry : entry;
+        if (tx !== entry) {
+            tx.on('error', (er) => this[ONERROR](er, entry));
+            entry.pipe(tx);
+        }
+        tx.on('data', (chunk) => {
+            try {
+                fs.writeSync(fd, chunk, 0, chunk.length);
+            }
+            catch (er) {
+                oner(er);
+            }
+        });
+        tx.on('end', () => {
+            let er = null;
+            // try both, falling futimes back to utimes
+            // if either fails, handle the first error
+            if (entry.mtime && !this.noMtime) {
+                const atime = entry.atime || new Date();
+                const mtime = entry.mtime;
+                try {
+                    fs.futimesSync(fd, atime, mtime);
+                }
+                catch (futimeser) {
+                    try {
+                        fs.utimesSync(String(entry.absolute), atime, mtime);
+                    }
+                    catch (utimeser) {
+                        er = futimeser;
+                    }
+                }
+            }
+            if (this[DOCHOWN](entry)) {
+                const uid = this[UID](entry);
+                const gid = this[GID](entry);
+                try {
+                    fs.fchownSync(fd, Number(uid), Number(gid));
+                }
+                catch (fchowner) {
+                    try {
+                        fs.chownSync(String(entry.absolute), Number(uid), Number(gid));
+                    }
+                    catch (chowner) {
+                        er = er || fchowner;
+                    }
+                }
+            }
+            oner(er);
+        });
+    }
+    [DIRECTORY](entry, done) {
+        const mode = typeof entry.mode === 'number' ?
+            entry.mode & 0o7777
+            : this.dmode;
+        const er = this[MKDIR](String(entry.absolute), mode);
+        if (er) {
+            this[ONERROR](er, entry);
+            done();
+            return;
+        }
+        if (entry.mtime && !this.noMtime) {
+            try {
+                fs.utimesSync(String(entry.absolute), entry.atime || new Date(), entry.mtime);
+                /* c8 ignore next */
+            }
+            catch (er) { }
+        }
+        if (this[DOCHOWN](entry)) {
+            try {
+                fs.chownSync(String(entry.absolute), Number(this[UID](entry)), Number(this[GID](entry)));
+            }
+            catch (er) { }
+        }
+        done();
+        entry.resume();
+    }
+    [MKDIR](dir, mode) {
+        try {
+            return mkdirSync(normalizeWindowsPath(dir), {
+                uid: this.uid,
+                gid: this.gid,
+                processUid: this.processUid,
+                processGid: this.processGid,
+                umask: this.processUmask,
+                preserve: this.preservePaths,
+                unlink: this.unlink,
+                cache: this.dirCache,
+                cwd: this.cwd,
+                mode: mode,
+            });
+        }
+        catch (er) {
+            return er;
+        }
+    }
+    [LINK](entry, linkpath, link, done) {
+        const ls = `${link}Sync`;
+        try {
+            fs[ls](linkpath, String(entry.absolute));
+            done();
+            entry.resume();
+        }
+        catch (er) {
+            return this[ONERROR](er, entry);
+        }
+    }
+}
+//# sourceMappingURL=unpack.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/update.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/update.js
new file mode 100644
index 0000000000000..21398e9766663
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/update.js
@@ -0,0 +1,30 @@
+// tar -u
+import { makeCommand } from './make-command.js';
+import { replace as r } from './replace.js';
+// just call tar.r with the filter and mtimeCache
+export const update = makeCommand(r.syncFile, r.asyncFile, r.syncNoFile, r.asyncNoFile, (opt, entries = []) => {
+    r.validate?.(opt, entries);
+    mtimeFilter(opt);
+});
+const mtimeFilter = (opt) => {
+    const filter = opt.filter;
+    if (!opt.mtimeCache) {
+        opt.mtimeCache = new Map();
+    }
+    opt.filter =
+        filter ?
+            (path, stat) => filter(path, stat) &&
+                !(
+                /* c8 ignore start */
+                ((opt.mtimeCache?.get(path) ?? stat.mtime ?? 0) >
+                    (stat.mtime ?? 0))
+                /* c8 ignore stop */
+                )
+            : (path, stat) => !(
+            /* c8 ignore start */
+            ((opt.mtimeCache?.get(path) ?? stat.mtime ?? 0) >
+                (stat.mtime ?? 0))
+            /* c8 ignore stop */
+            );
+};
+//# sourceMappingURL=update.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/warn-method.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/warn-method.js
new file mode 100644
index 0000000000000..13e798afefc85
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/warn-method.js
@@ -0,0 +1,27 @@
+export const warnMethod = (self, code, message, data = {}) => {
+    if (self.file) {
+        data.file = self.file;
+    }
+    if (self.cwd) {
+        data.cwd = self.cwd;
+    }
+    data.code =
+        (message instanceof Error &&
+            message.code) ||
+            code;
+    data.tarCode = code;
+    if (!self.strict && data.recoverable !== false) {
+        if (message instanceof Error) {
+            data = Object.assign(message, data);
+            message = message.message;
+        }
+        self.emit('warn', code, message, data);
+    }
+    else if (message instanceof Error) {
+        self.emit('error', Object.assign(message, data));
+    }
+    else {
+        self.emit('error', Object.assign(new Error(`${code}: ${message}`), data));
+    }
+};
+//# sourceMappingURL=warn-method.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/winchars.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/winchars.js
new file mode 100644
index 0000000000000..c41eb86d69a4b
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/winchars.js
@@ -0,0 +1,9 @@
+// When writing files on Windows, translate the characters to their
+// 0xf000 higher-encoded versions.
+const raw = ['|', '<', '>', '?', ':'];
+const win = raw.map(char => String.fromCharCode(0xf000 + char.charCodeAt(0)));
+const toWin = new Map(raw.map((char, i) => [char, win[i]]));
+const toRaw = new Map(win.map((char, i) => [char, raw[i]]));
+export const encode = (s) => raw.reduce((s, c) => s.split(c).join(toWin.get(c)), s);
+export const decode = (s) => win.reduce((s, c) => s.split(c).join(toRaw.get(c)), s);
+//# sourceMappingURL=winchars.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/write-entry.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/write-entry.js
new file mode 100644
index 0000000000000..9028cd676b4cd
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/write-entry.js
@@ -0,0 +1,657 @@
+import fs from 'fs';
+import { Minipass } from 'minipass';
+import path from 'path';
+import { Header } from './header.js';
+import { modeFix } from './mode-fix.js';
+import { normalizeWindowsPath } from './normalize-windows-path.js';
+import { dealias, } from './options.js';
+import { Pax } from './pax.js';
+import { stripAbsolutePath } from './strip-absolute-path.js';
+import { stripTrailingSlashes } from './strip-trailing-slashes.js';
+import { warnMethod, } from './warn-method.js';
+import * as winchars from './winchars.js';
+const prefixPath = (path, prefix) => {
+    if (!prefix) {
+        return normalizeWindowsPath(path);
+    }
+    path = normalizeWindowsPath(path).replace(/^\.(\/|$)/, '');
+    return stripTrailingSlashes(prefix) + '/' + path;
+};
+const maxReadSize = 16 * 1024 * 1024;
+const PROCESS = Symbol('process');
+const FILE = Symbol('file');
+const DIRECTORY = Symbol('directory');
+const SYMLINK = Symbol('symlink');
+const HARDLINK = Symbol('hardlink');
+const HEADER = Symbol('header');
+const READ = Symbol('read');
+const LSTAT = Symbol('lstat');
+const ONLSTAT = Symbol('onlstat');
+const ONREAD = Symbol('onread');
+const ONREADLINK = Symbol('onreadlink');
+const OPENFILE = Symbol('openfile');
+const ONOPENFILE = Symbol('onopenfile');
+const CLOSE = Symbol('close');
+const MODE = Symbol('mode');
+const AWAITDRAIN = Symbol('awaitDrain');
+const ONDRAIN = Symbol('ondrain');
+const PREFIX = Symbol('prefix');
+export class WriteEntry extends Minipass {
+    path;
+    portable;
+    myuid = (process.getuid && process.getuid()) || 0;
+    // until node has builtin pwnam functions, this'll have to do
+    myuser = process.env.USER || '';
+    maxReadSize;
+    linkCache;
+    statCache;
+    preservePaths;
+    cwd;
+    strict;
+    mtime;
+    noPax;
+    noMtime;
+    prefix;
+    fd;
+    blockLen = 0;
+    blockRemain = 0;
+    buf;
+    pos = 0;
+    remain = 0;
+    length = 0;
+    offset = 0;
+    win32;
+    absolute;
+    header;
+    type;
+    linkpath;
+    stat;
+    onWriteEntry;
+    #hadError = false;
+    constructor(p, opt_ = {}) {
+        const opt = dealias(opt_);
+        super();
+        this.path = normalizeWindowsPath(p);
+        // suppress atime, ctime, uid, gid, uname, gname
+        this.portable = !!opt.portable;
+        this.maxReadSize = opt.maxReadSize || maxReadSize;
+        this.linkCache = opt.linkCache || new Map();
+        this.statCache = opt.statCache || new Map();
+        this.preservePaths = !!opt.preservePaths;
+        this.cwd = normalizeWindowsPath(opt.cwd || process.cwd());
+        this.strict = !!opt.strict;
+        this.noPax = !!opt.noPax;
+        this.noMtime = !!opt.noMtime;
+        this.mtime = opt.mtime;
+        this.prefix =
+            opt.prefix ? normalizeWindowsPath(opt.prefix) : undefined;
+        this.onWriteEntry = opt.onWriteEntry;
+        if (typeof opt.onwarn === 'function') {
+            this.on('warn', opt.onwarn);
+        }
+        let pathWarn = false;
+        if (!this.preservePaths) {
+            const [root, stripped] = stripAbsolutePath(this.path);
+            if (root && typeof stripped === 'string') {
+                this.path = stripped;
+                pathWarn = root;
+            }
+        }
+        this.win32 = !!opt.win32 || process.platform === 'win32';
+        if (this.win32) {
+            // force the \ to / normalization, since we might not *actually*
+            // be on windows, but want \ to be considered a path separator.
+            this.path = winchars.decode(this.path.replace(/\\/g, '/'));
+            p = p.replace(/\\/g, '/');
+        }
+        this.absolute = normalizeWindowsPath(opt.absolute || path.resolve(this.cwd, p));
+        if (this.path === '') {
+            this.path = './';
+        }
+        if (pathWarn) {
+            this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, {
+                entry: this,
+                path: pathWarn + this.path,
+            });
+        }
+        const cs = this.statCache.get(this.absolute);
+        if (cs) {
+            this[ONLSTAT](cs);
+        }
+        else {
+            this[LSTAT]();
+        }
+    }
+    warn(code, message, data = {}) {
+        return warnMethod(this, code, message, data);
+    }
+    emit(ev, ...data) {
+        if (ev === 'error') {
+            this.#hadError = true;
+        }
+        return super.emit(ev, ...data);
+    }
+    [LSTAT]() {
+        fs.lstat(this.absolute, (er, stat) => {
+            if (er) {
+                return this.emit('error', er);
+            }
+            this[ONLSTAT](stat);
+        });
+    }
+    [ONLSTAT](stat) {
+        this.statCache.set(this.absolute, stat);
+        this.stat = stat;
+        if (!stat.isFile()) {
+            stat.size = 0;
+        }
+        this.type = getType(stat);
+        this.emit('stat', stat);
+        this[PROCESS]();
+    }
+    [PROCESS]() {
+        switch (this.type) {
+            case 'File':
+                return this[FILE]();
+            case 'Directory':
+                return this[DIRECTORY]();
+            case 'SymbolicLink':
+                return this[SYMLINK]();
+            // unsupported types are ignored.
+            default:
+                return this.end();
+        }
+    }
+    [MODE](mode) {
+        return modeFix(mode, this.type === 'Directory', this.portable);
+    }
+    [PREFIX](path) {
+        return prefixPath(path, this.prefix);
+    }
+    [HEADER]() {
+        /* c8 ignore start */
+        if (!this.stat) {
+            throw new Error('cannot write header before stat');
+        }
+        /* c8 ignore stop */
+        if (this.type === 'Directory' && this.portable) {
+            this.noMtime = true;
+        }
+        this.onWriteEntry?.(this);
+        this.header = new Header({
+            path: this[PREFIX](this.path),
+            // only apply the prefix to hard links.
+            linkpath: this.type === 'Link' && this.linkpath !== undefined ?
+                this[PREFIX](this.linkpath)
+                : this.linkpath,
+            // only the permissions and setuid/setgid/sticky bitflags
+            // not the higher-order bits that specify file type
+            mode: this[MODE](this.stat.mode),
+            uid: this.portable ? undefined : this.stat.uid,
+            gid: this.portable ? undefined : this.stat.gid,
+            size: this.stat.size,
+            mtime: this.noMtime ? undefined : this.mtime || this.stat.mtime,
+            /* c8 ignore next */
+            type: this.type === 'Unsupported' ? undefined : this.type,
+            uname: this.portable ? undefined
+                : this.stat.uid === this.myuid ? this.myuser
+                    : '',
+            atime: this.portable ? undefined : this.stat.atime,
+            ctime: this.portable ? undefined : this.stat.ctime,
+        });
+        if (this.header.encode() && !this.noPax) {
+            super.write(new Pax({
+                atime: this.portable ? undefined : this.header.atime,
+                ctime: this.portable ? undefined : this.header.ctime,
+                gid: this.portable ? undefined : this.header.gid,
+                mtime: this.noMtime ? undefined : (this.mtime || this.header.mtime),
+                path: this[PREFIX](this.path),
+                linkpath: this.type === 'Link' && this.linkpath !== undefined ?
+                    this[PREFIX](this.linkpath)
+                    : this.linkpath,
+                size: this.header.size,
+                uid: this.portable ? undefined : this.header.uid,
+                uname: this.portable ? undefined : this.header.uname,
+                dev: this.portable ? undefined : this.stat.dev,
+                ino: this.portable ? undefined : this.stat.ino,
+                nlink: this.portable ? undefined : this.stat.nlink,
+            }).encode());
+        }
+        const block = this.header?.block;
+        /* c8 ignore start */
+        if (!block) {
+            throw new Error('failed to encode header');
+        }
+        /* c8 ignore stop */
+        super.write(block);
+    }
+    [DIRECTORY]() {
+        /* c8 ignore start */
+        if (!this.stat) {
+            throw new Error('cannot create directory entry without stat');
+        }
+        /* c8 ignore stop */
+        if (this.path.slice(-1) !== '/') {
+            this.path += '/';
+        }
+        this.stat.size = 0;
+        this[HEADER]();
+        this.end();
+    }
+    [SYMLINK]() {
+        fs.readlink(this.absolute, (er, linkpath) => {
+            if (er) {
+                return this.emit('error', er);
+            }
+            this[ONREADLINK](linkpath);
+        });
+    }
+    [ONREADLINK](linkpath) {
+        this.linkpath = normalizeWindowsPath(linkpath);
+        this[HEADER]();
+        this.end();
+    }
+    [HARDLINK](linkpath) {
+        /* c8 ignore start */
+        if (!this.stat) {
+            throw new Error('cannot create link entry without stat');
+        }
+        /* c8 ignore stop */
+        this.type = 'Link';
+        this.linkpath = normalizeWindowsPath(path.relative(this.cwd, linkpath));
+        this.stat.size = 0;
+        this[HEADER]();
+        this.end();
+    }
+    [FILE]() {
+        /* c8 ignore start */
+        if (!this.stat) {
+            throw new Error('cannot create file entry without stat');
+        }
+        /* c8 ignore stop */
+        if (this.stat.nlink > 1) {
+            const linkKey = `${this.stat.dev}:${this.stat.ino}`;
+            const linkpath = this.linkCache.get(linkKey);
+            if (linkpath?.indexOf(this.cwd) === 0) {
+                return this[HARDLINK](linkpath);
+            }
+            this.linkCache.set(linkKey, this.absolute);
+        }
+        this[HEADER]();
+        if (this.stat.size === 0) {
+            return this.end();
+        }
+        this[OPENFILE]();
+    }
+    [OPENFILE]() {
+        fs.open(this.absolute, 'r', (er, fd) => {
+            if (er) {
+                return this.emit('error', er);
+            }
+            this[ONOPENFILE](fd);
+        });
+    }
+    [ONOPENFILE](fd) {
+        this.fd = fd;
+        if (this.#hadError) {
+            return this[CLOSE]();
+        }
+        /* c8 ignore start */
+        if (!this.stat) {
+            throw new Error('should stat before calling onopenfile');
+        }
+        /* c8 ignore start */
+        this.blockLen = 512 * Math.ceil(this.stat.size / 512);
+        this.blockRemain = this.blockLen;
+        const bufLen = Math.min(this.blockLen, this.maxReadSize);
+        this.buf = Buffer.allocUnsafe(bufLen);
+        this.offset = 0;
+        this.pos = 0;
+        this.remain = this.stat.size;
+        this.length = this.buf.length;
+        this[READ]();
+    }
+    [READ]() {
+        const { fd, buf, offset, length, pos } = this;
+        if (fd === undefined || buf === undefined) {
+            throw new Error('cannot read file without first opening');
+        }
+        fs.read(fd, buf, offset, length, pos, (er, bytesRead) => {
+            if (er) {
+                // ignoring the error from close(2) is a bad practice, but at
+                // this point we already have an error, don't need another one
+                return this[CLOSE](() => this.emit('error', er));
+            }
+            this[ONREAD](bytesRead);
+        });
+    }
+    /* c8 ignore start */
+    [CLOSE](cb = () => { }) {
+        /* c8 ignore stop */
+        if (this.fd !== undefined)
+            fs.close(this.fd, cb);
+    }
+    [ONREAD](bytesRead) {
+        if (bytesRead <= 0 && this.remain > 0) {
+            const er = Object.assign(new Error('encountered unexpected EOF'), {
+                path: this.absolute,
+                syscall: 'read',
+                code: 'EOF',
+            });
+            return this[CLOSE](() => this.emit('error', er));
+        }
+        if (bytesRead > this.remain) {
+            const er = Object.assign(new Error('did not encounter expected EOF'), {
+                path: this.absolute,
+                syscall: 'read',
+                code: 'EOF',
+            });
+            return this[CLOSE](() => this.emit('error', er));
+        }
+        /* c8 ignore start */
+        if (!this.buf) {
+            throw new Error('should have created buffer prior to reading');
+        }
+        /* c8 ignore stop */
+        // null out the rest of the buffer, if we could fit the block padding
+        // at the end of this loop, we've incremented bytesRead and this.remain
+        // to be incremented up to the blockRemain level, as if we had expected
+        // to get a null-padded file, and read it until the end.  then we will
+        // decrement both remain and blockRemain by bytesRead, and know that we
+        // reached the expected EOF, without any null buffer to append.
+        if (bytesRead === this.remain) {
+            for (let i = bytesRead; i < this.length && bytesRead < this.blockRemain; i++) {
+                this.buf[i + this.offset] = 0;
+                bytesRead++;
+                this.remain++;
+            }
+        }
+        const chunk = this.offset === 0 && bytesRead === this.buf.length ?
+            this.buf
+            : this.buf.subarray(this.offset, this.offset + bytesRead);
+        const flushed = this.write(chunk);
+        if (!flushed) {
+            this[AWAITDRAIN](() => this[ONDRAIN]());
+        }
+        else {
+            this[ONDRAIN]();
+        }
+    }
+    [AWAITDRAIN](cb) {
+        this.once('drain', cb);
+    }
+    write(chunk, encoding, cb) {
+        /* c8 ignore start - just junk to comply with NodeJS.WritableStream */
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = undefined;
+        }
+        if (typeof chunk === 'string') {
+            chunk = Buffer.from(chunk, typeof encoding === 'string' ? encoding : 'utf8');
+        }
+        /* c8 ignore stop */
+        if (this.blockRemain < chunk.length) {
+            const er = Object.assign(new Error('writing more data than expected'), {
+                path: this.absolute,
+            });
+            return this.emit('error', er);
+        }
+        this.remain -= chunk.length;
+        this.blockRemain -= chunk.length;
+        this.pos += chunk.length;
+        this.offset += chunk.length;
+        return super.write(chunk, null, cb);
+    }
+    [ONDRAIN]() {
+        if (!this.remain) {
+            if (this.blockRemain) {
+                super.write(Buffer.alloc(this.blockRemain));
+            }
+            return this[CLOSE](er => er ? this.emit('error', er) : this.end());
+        }
+        /* c8 ignore start */
+        if (!this.buf) {
+            throw new Error('buffer lost somehow in ONDRAIN');
+        }
+        /* c8 ignore stop */
+        if (this.offset >= this.length) {
+            // if we only have a smaller bit left to read, alloc a smaller buffer
+            // otherwise, keep it the same length it was before.
+            this.buf = Buffer.allocUnsafe(Math.min(this.blockRemain, this.buf.length));
+            this.offset = 0;
+        }
+        this.length = this.buf.length - this.offset;
+        this[READ]();
+    }
+}
+export class WriteEntrySync extends WriteEntry {
+    sync = true;
+    [LSTAT]() {
+        this[ONLSTAT](fs.lstatSync(this.absolute));
+    }
+    [SYMLINK]() {
+        this[ONREADLINK](fs.readlinkSync(this.absolute));
+    }
+    [OPENFILE]() {
+        this[ONOPENFILE](fs.openSync(this.absolute, 'r'));
+    }
+    [READ]() {
+        let threw = true;
+        try {
+            const { fd, buf, offset, length, pos } = this;
+            /* c8 ignore start */
+            if (fd === undefined || buf === undefined) {
+                throw new Error('fd and buf must be set in READ method');
+            }
+            /* c8 ignore stop */
+            const bytesRead = fs.readSync(fd, buf, offset, length, pos);
+            this[ONREAD](bytesRead);
+            threw = false;
+        }
+        finally {
+            // ignoring the error from close(2) is a bad practice, but at
+            // this point we already have an error, don't need another one
+            if (threw) {
+                try {
+                    this[CLOSE](() => { });
+                }
+                catch (er) { }
+            }
+        }
+    }
+    [AWAITDRAIN](cb) {
+        cb();
+    }
+    /* c8 ignore start */
+    [CLOSE](cb = () => { }) {
+        /* c8 ignore stop */
+        if (this.fd !== undefined)
+            fs.closeSync(this.fd);
+        cb();
+    }
+}
+export class WriteEntryTar extends Minipass {
+    blockLen = 0;
+    blockRemain = 0;
+    buf = 0;
+    pos = 0;
+    remain = 0;
+    length = 0;
+    preservePaths;
+    portable;
+    strict;
+    noPax;
+    noMtime;
+    readEntry;
+    type;
+    prefix;
+    path;
+    mode;
+    uid;
+    gid;
+    uname;
+    gname;
+    header;
+    mtime;
+    atime;
+    ctime;
+    linkpath;
+    size;
+    onWriteEntry;
+    warn(code, message, data = {}) {
+        return warnMethod(this, code, message, data);
+    }
+    constructor(readEntry, opt_ = {}) {
+        const opt = dealias(opt_);
+        super();
+        this.preservePaths = !!opt.preservePaths;
+        this.portable = !!opt.portable;
+        this.strict = !!opt.strict;
+        this.noPax = !!opt.noPax;
+        this.noMtime = !!opt.noMtime;
+        this.onWriteEntry = opt.onWriteEntry;
+        this.readEntry = readEntry;
+        const { type } = readEntry;
+        /* c8 ignore start */
+        if (type === 'Unsupported') {
+            throw new Error('writing entry that should be ignored');
+        }
+        /* c8 ignore stop */
+        this.type = type;
+        if (this.type === 'Directory' && this.portable) {
+            this.noMtime = true;
+        }
+        this.prefix = opt.prefix;
+        this.path = normalizeWindowsPath(readEntry.path);
+        this.mode =
+            readEntry.mode !== undefined ?
+                this[MODE](readEntry.mode)
+                : undefined;
+        this.uid = this.portable ? undefined : readEntry.uid;
+        this.gid = this.portable ? undefined : readEntry.gid;
+        this.uname = this.portable ? undefined : readEntry.uname;
+        this.gname = this.portable ? undefined : readEntry.gname;
+        this.size = readEntry.size;
+        this.mtime =
+            this.noMtime ? undefined : opt.mtime || readEntry.mtime;
+        this.atime = this.portable ? undefined : readEntry.atime;
+        this.ctime = this.portable ? undefined : readEntry.ctime;
+        this.linkpath =
+            readEntry.linkpath !== undefined ?
+                normalizeWindowsPath(readEntry.linkpath)
+                : undefined;
+        if (typeof opt.onwarn === 'function') {
+            this.on('warn', opt.onwarn);
+        }
+        let pathWarn = false;
+        if (!this.preservePaths) {
+            const [root, stripped] = stripAbsolutePath(this.path);
+            if (root && typeof stripped === 'string') {
+                this.path = stripped;
+                pathWarn = root;
+            }
+        }
+        this.remain = readEntry.size;
+        this.blockRemain = readEntry.startBlockSize;
+        this.onWriteEntry?.(this);
+        this.header = new Header({
+            path: this[PREFIX](this.path),
+            linkpath: this.type === 'Link' && this.linkpath !== undefined ?
+                this[PREFIX](this.linkpath)
+                : this.linkpath,
+            // only the permissions and setuid/setgid/sticky bitflags
+            // not the higher-order bits that specify file type
+            mode: this.mode,
+            uid: this.portable ? undefined : this.uid,
+            gid: this.portable ? undefined : this.gid,
+            size: this.size,
+            mtime: this.noMtime ? undefined : this.mtime,
+            type: this.type,
+            uname: this.portable ? undefined : this.uname,
+            atime: this.portable ? undefined : this.atime,
+            ctime: this.portable ? undefined : this.ctime,
+        });
+        if (pathWarn) {
+            this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, {
+                entry: this,
+                path: pathWarn + this.path,
+            });
+        }
+        if (this.header.encode() && !this.noPax) {
+            super.write(new Pax({
+                atime: this.portable ? undefined : this.atime,
+                ctime: this.portable ? undefined : this.ctime,
+                gid: this.portable ? undefined : this.gid,
+                mtime: this.noMtime ? undefined : this.mtime,
+                path: this[PREFIX](this.path),
+                linkpath: this.type === 'Link' && this.linkpath !== undefined ?
+                    this[PREFIX](this.linkpath)
+                    : this.linkpath,
+                size: this.size,
+                uid: this.portable ? undefined : this.uid,
+                uname: this.portable ? undefined : this.uname,
+                dev: this.portable ? undefined : this.readEntry.dev,
+                ino: this.portable ? undefined : this.readEntry.ino,
+                nlink: this.portable ? undefined : this.readEntry.nlink,
+            }).encode());
+        }
+        const b = this.header?.block;
+        /* c8 ignore start */
+        if (!b)
+            throw new Error('failed to encode header');
+        /* c8 ignore stop */
+        super.write(b);
+        readEntry.pipe(this);
+    }
+    [PREFIX](path) {
+        return prefixPath(path, this.prefix);
+    }
+    [MODE](mode) {
+        return modeFix(mode, this.type === 'Directory', this.portable);
+    }
+    write(chunk, encoding, cb) {
+        /* c8 ignore start - just junk to comply with NodeJS.WritableStream */
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = undefined;
+        }
+        if (typeof chunk === 'string') {
+            chunk = Buffer.from(chunk, typeof encoding === 'string' ? encoding : 'utf8');
+        }
+        /* c8 ignore stop */
+        const writeLen = chunk.length;
+        if (writeLen > this.blockRemain) {
+            throw new Error('writing more to entry than is appropriate');
+        }
+        this.blockRemain -= writeLen;
+        return super.write(chunk, cb);
+    }
+    end(chunk, encoding, cb) {
+        if (this.blockRemain) {
+            super.write(Buffer.alloc(this.blockRemain));
+        }
+        /* c8 ignore start - just junk to comply with NodeJS.WritableStream */
+        if (typeof chunk === 'function') {
+            cb = chunk;
+            encoding = undefined;
+            chunk = undefined;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = undefined;
+        }
+        if (typeof chunk === 'string') {
+            chunk = Buffer.from(chunk, encoding ?? 'utf8');
+        }
+        if (cb)
+            this.once('finish', cb);
+        chunk ? super.end(chunk, cb) : super.end(cb);
+        /* c8 ignore stop */
+        return this;
+    }
+}
+const getType = (stat) => stat.isFile() ? 'File'
+    : stat.isDirectory() ? 'Directory'
+        : stat.isSymbolicLink() ? 'SymbolicLink'
+            : 'Unsupported';
+//# sourceMappingURL=write-entry.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/package.json b/node_modules/make-fetch-happen/node_modules/tar/package.json
new file mode 100644
index 0000000000000..0283103ee9eaf
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/tar/package.json
@@ -0,0 +1,325 @@
+{
+  "author": "Isaac Z. Schlueter",
+  "name": "tar",
+  "description": "tar for node",
+  "version": "7.4.3",
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/isaacs/node-tar.git"
+  },
+  "scripts": {
+    "genparse": "node scripts/generate-parse-fixtures.js",
+    "snap": "tap",
+    "test": "tap",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "prepare": "tshy",
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "format": "prettier --write . --log-level warn",
+    "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
+  },
+  "dependencies": {
+    "@isaacs/fs-minipass": "^4.0.0",
+    "chownr": "^3.0.0",
+    "minipass": "^7.1.2",
+    "minizlib": "^3.0.1",
+    "mkdirp": "^3.0.1",
+    "yallist": "^5.0.0"
+  },
+  "devDependencies": {
+    "chmodr": "^1.2.0",
+    "end-of-stream": "^1.4.3",
+    "events-to-array": "^2.0.3",
+    "mutate-fs": "^2.1.1",
+    "nock": "^13.5.4",
+    "prettier": "^3.2.5",
+    "rimraf": "^5.0.5",
+    "tap": "^18.7.2",
+    "tshy": "^1.13.1",
+    "typedoc": "^0.25.13"
+  },
+  "license": "ISC",
+  "engines": {
+    "node": ">=18"
+  },
+  "files": [
+    "dist"
+  ],
+  "tap": {
+    "coverage-map": "map.js",
+    "timeout": 0,
+    "typecheck": true
+  },
+  "prettier": {
+    "experimentalTernaries": true,
+    "semi": false,
+    "printWidth": 70,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "tshy": {
+    "exports": {
+      "./package.json": "./package.json",
+      ".": "./src/index.ts",
+      "./c": "./src/create.ts",
+      "./create": "./src/create.ts",
+      "./replace": "./src/create.ts",
+      "./r": "./src/create.ts",
+      "./list": "./src/list.ts",
+      "./t": "./src/list.ts",
+      "./update": "./src/update.ts",
+      "./u": "./src/update.ts",
+      "./extract": "./src/extract.ts",
+      "./x": "./src/extract.ts",
+      "./pack": "./src/pack.ts",
+      "./unpack": "./src/unpack.ts",
+      "./parse": "./src/parse.ts",
+      "./read-entry": "./src/read-entry.ts",
+      "./write-entry": "./src/write-entry.ts",
+      "./header": "./src/header.ts",
+      "./pax": "./src/pax.ts",
+      "./types": "./src/types.ts"
+    }
+  },
+  "exports": {
+    "./package.json": "./package.json",
+    ".": {
+      "import": {
+        "source": "./src/index.ts",
+        "types": "./dist/esm/index.d.ts",
+        "default": "./dist/esm/index.js"
+      },
+      "require": {
+        "source": "./src/index.ts",
+        "types": "./dist/commonjs/index.d.ts",
+        "default": "./dist/commonjs/index.js"
+      }
+    },
+    "./c": {
+      "import": {
+        "source": "./src/create.ts",
+        "types": "./dist/esm/create.d.ts",
+        "default": "./dist/esm/create.js"
+      },
+      "require": {
+        "source": "./src/create.ts",
+        "types": "./dist/commonjs/create.d.ts",
+        "default": "./dist/commonjs/create.js"
+      }
+    },
+    "./create": {
+      "import": {
+        "source": "./src/create.ts",
+        "types": "./dist/esm/create.d.ts",
+        "default": "./dist/esm/create.js"
+      },
+      "require": {
+        "source": "./src/create.ts",
+        "types": "./dist/commonjs/create.d.ts",
+        "default": "./dist/commonjs/create.js"
+      }
+    },
+    "./replace": {
+      "import": {
+        "source": "./src/create.ts",
+        "types": "./dist/esm/create.d.ts",
+        "default": "./dist/esm/create.js"
+      },
+      "require": {
+        "source": "./src/create.ts",
+        "types": "./dist/commonjs/create.d.ts",
+        "default": "./dist/commonjs/create.js"
+      }
+    },
+    "./r": {
+      "import": {
+        "source": "./src/create.ts",
+        "types": "./dist/esm/create.d.ts",
+        "default": "./dist/esm/create.js"
+      },
+      "require": {
+        "source": "./src/create.ts",
+        "types": "./dist/commonjs/create.d.ts",
+        "default": "./dist/commonjs/create.js"
+      }
+    },
+    "./list": {
+      "import": {
+        "source": "./src/list.ts",
+        "types": "./dist/esm/list.d.ts",
+        "default": "./dist/esm/list.js"
+      },
+      "require": {
+        "source": "./src/list.ts",
+        "types": "./dist/commonjs/list.d.ts",
+        "default": "./dist/commonjs/list.js"
+      }
+    },
+    "./t": {
+      "import": {
+        "source": "./src/list.ts",
+        "types": "./dist/esm/list.d.ts",
+        "default": "./dist/esm/list.js"
+      },
+      "require": {
+        "source": "./src/list.ts",
+        "types": "./dist/commonjs/list.d.ts",
+        "default": "./dist/commonjs/list.js"
+      }
+    },
+    "./update": {
+      "import": {
+        "source": "./src/update.ts",
+        "types": "./dist/esm/update.d.ts",
+        "default": "./dist/esm/update.js"
+      },
+      "require": {
+        "source": "./src/update.ts",
+        "types": "./dist/commonjs/update.d.ts",
+        "default": "./dist/commonjs/update.js"
+      }
+    },
+    "./u": {
+      "import": {
+        "source": "./src/update.ts",
+        "types": "./dist/esm/update.d.ts",
+        "default": "./dist/esm/update.js"
+      },
+      "require": {
+        "source": "./src/update.ts",
+        "types": "./dist/commonjs/update.d.ts",
+        "default": "./dist/commonjs/update.js"
+      }
+    },
+    "./extract": {
+      "import": {
+        "source": "./src/extract.ts",
+        "types": "./dist/esm/extract.d.ts",
+        "default": "./dist/esm/extract.js"
+      },
+      "require": {
+        "source": "./src/extract.ts",
+        "types": "./dist/commonjs/extract.d.ts",
+        "default": "./dist/commonjs/extract.js"
+      }
+    },
+    "./x": {
+      "import": {
+        "source": "./src/extract.ts",
+        "types": "./dist/esm/extract.d.ts",
+        "default": "./dist/esm/extract.js"
+      },
+      "require": {
+        "source": "./src/extract.ts",
+        "types": "./dist/commonjs/extract.d.ts",
+        "default": "./dist/commonjs/extract.js"
+      }
+    },
+    "./pack": {
+      "import": {
+        "source": "./src/pack.ts",
+        "types": "./dist/esm/pack.d.ts",
+        "default": "./dist/esm/pack.js"
+      },
+      "require": {
+        "source": "./src/pack.ts",
+        "types": "./dist/commonjs/pack.d.ts",
+        "default": "./dist/commonjs/pack.js"
+      }
+    },
+    "./unpack": {
+      "import": {
+        "source": "./src/unpack.ts",
+        "types": "./dist/esm/unpack.d.ts",
+        "default": "./dist/esm/unpack.js"
+      },
+      "require": {
+        "source": "./src/unpack.ts",
+        "types": "./dist/commonjs/unpack.d.ts",
+        "default": "./dist/commonjs/unpack.js"
+      }
+    },
+    "./parse": {
+      "import": {
+        "source": "./src/parse.ts",
+        "types": "./dist/esm/parse.d.ts",
+        "default": "./dist/esm/parse.js"
+      },
+      "require": {
+        "source": "./src/parse.ts",
+        "types": "./dist/commonjs/parse.d.ts",
+        "default": "./dist/commonjs/parse.js"
+      }
+    },
+    "./read-entry": {
+      "import": {
+        "source": "./src/read-entry.ts",
+        "types": "./dist/esm/read-entry.d.ts",
+        "default": "./dist/esm/read-entry.js"
+      },
+      "require": {
+        "source": "./src/read-entry.ts",
+        "types": "./dist/commonjs/read-entry.d.ts",
+        "default": "./dist/commonjs/read-entry.js"
+      }
+    },
+    "./write-entry": {
+      "import": {
+        "source": "./src/write-entry.ts",
+        "types": "./dist/esm/write-entry.d.ts",
+        "default": "./dist/esm/write-entry.js"
+      },
+      "require": {
+        "source": "./src/write-entry.ts",
+        "types": "./dist/commonjs/write-entry.d.ts",
+        "default": "./dist/commonjs/write-entry.js"
+      }
+    },
+    "./header": {
+      "import": {
+        "source": "./src/header.ts",
+        "types": "./dist/esm/header.d.ts",
+        "default": "./dist/esm/header.js"
+      },
+      "require": {
+        "source": "./src/header.ts",
+        "types": "./dist/commonjs/header.d.ts",
+        "default": "./dist/commonjs/header.js"
+      }
+    },
+    "./pax": {
+      "import": {
+        "source": "./src/pax.ts",
+        "types": "./dist/esm/pax.d.ts",
+        "default": "./dist/esm/pax.js"
+      },
+      "require": {
+        "source": "./src/pax.ts",
+        "types": "./dist/commonjs/pax.d.ts",
+        "default": "./dist/commonjs/pax.js"
+      }
+    },
+    "./types": {
+      "import": {
+        "source": "./src/types.ts",
+        "types": "./dist/esm/types.d.ts",
+        "default": "./dist/esm/types.js"
+      },
+      "require": {
+        "source": "./src/types.ts",
+        "types": "./dist/commonjs/types.d.ts",
+        "default": "./dist/commonjs/types.js"
+      }
+    }
+  },
+  "type": "module",
+  "main": "./dist/commonjs/index.js",
+  "types": "./dist/commonjs/index.d.ts"
+}
diff --git a/node_modules/make-fetch-happen/node_modules/yallist/LICENSE.md b/node_modules/make-fetch-happen/node_modules/yallist/LICENSE.md
new file mode 100644
index 0000000000000..881248b6d7f0c
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/yallist/LICENSE.md
@@ -0,0 +1,63 @@
+All packages under `src/` are licensed according to the terms in
+their respective `LICENSE` or `LICENSE.md` files.
+
+The remainder of this project is licensed under the Blue Oak
+Model License, as follows:
+
+-----
+
+# Blue Oak Model License
+
+Version 1.0.0
+
+## Purpose
+
+This license gives everyone as much permission to work with
+this software as possible, while protecting contributors
+from liability.
+
+## Acceptance
+
+In order to receive this license, you must agree to its
+rules.  The rules of this license are both obligations
+under that agreement and conditions to your license.
+You must not do anything with this software that triggers
+a rule that you cannot or will not follow.
+
+## Copyright
+
+Each contributor licenses you to do everything with this
+software that would otherwise infringe that contributor's
+copyright in it.
+
+## Notices
+
+You must ensure that everyone who gets a copy of
+any part of this software from you, with or without
+changes, also gets the text of this license or a link to
+.
+
+## Excuse
+
+If anyone notifies you in writing that you have not
+complied with [Notices](#notices), you can keep your
+license by taking all practical steps to comply within 30
+days after the notice.  If you do not do so, your license
+ends immediately.
+
+## Patent
+
+Each contributor licenses you to do everything with this
+software that would otherwise infringe any patent claims
+they can license or become able to license.
+
+## Reliability
+
+No contributor can revoke this license.
+
+## No Liability
+
+***As far as the law allows, this software comes as is,
+without any warranty or condition, and no contributor
+will be liable to anyone for any damages related to this
+software or this license, under any kind of legal claim.***
diff --git a/node_modules/make-fetch-happen/node_modules/yallist/dist/commonjs/index.js b/node_modules/make-fetch-happen/node_modules/yallist/dist/commonjs/index.js
new file mode 100644
index 0000000000000..c1e1e4741689d
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/yallist/dist/commonjs/index.js
@@ -0,0 +1,384 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Node = exports.Yallist = void 0;
+class Yallist {
+    tail;
+    head;
+    length = 0;
+    static create(list = []) {
+        return new Yallist(list);
+    }
+    constructor(list = []) {
+        for (const item of list) {
+            this.push(item);
+        }
+    }
+    *[Symbol.iterator]() {
+        for (let walker = this.head; walker; walker = walker.next) {
+            yield walker.value;
+        }
+    }
+    removeNode(node) {
+        if (node.list !== this) {
+            throw new Error('removing node which does not belong to this list');
+        }
+        const next = node.next;
+        const prev = node.prev;
+        if (next) {
+            next.prev = prev;
+        }
+        if (prev) {
+            prev.next = next;
+        }
+        if (node === this.head) {
+            this.head = next;
+        }
+        if (node === this.tail) {
+            this.tail = prev;
+        }
+        this.length--;
+        node.next = undefined;
+        node.prev = undefined;
+        node.list = undefined;
+        return next;
+    }
+    unshiftNode(node) {
+        if (node === this.head) {
+            return;
+        }
+        if (node.list) {
+            node.list.removeNode(node);
+        }
+        const head = this.head;
+        node.list = this;
+        node.next = head;
+        if (head) {
+            head.prev = node;
+        }
+        this.head = node;
+        if (!this.tail) {
+            this.tail = node;
+        }
+        this.length++;
+    }
+    pushNode(node) {
+        if (node === this.tail) {
+            return;
+        }
+        if (node.list) {
+            node.list.removeNode(node);
+        }
+        const tail = this.tail;
+        node.list = this;
+        node.prev = tail;
+        if (tail) {
+            tail.next = node;
+        }
+        this.tail = node;
+        if (!this.head) {
+            this.head = node;
+        }
+        this.length++;
+    }
+    push(...args) {
+        for (let i = 0, l = args.length; i < l; i++) {
+            push(this, args[i]);
+        }
+        return this.length;
+    }
+    unshift(...args) {
+        for (var i = 0, l = args.length; i < l; i++) {
+            unshift(this, args[i]);
+        }
+        return this.length;
+    }
+    pop() {
+        if (!this.tail) {
+            return undefined;
+        }
+        const res = this.tail.value;
+        const t = this.tail;
+        this.tail = this.tail.prev;
+        if (this.tail) {
+            this.tail.next = undefined;
+        }
+        else {
+            this.head = undefined;
+        }
+        t.list = undefined;
+        this.length--;
+        return res;
+    }
+    shift() {
+        if (!this.head) {
+            return undefined;
+        }
+        const res = this.head.value;
+        const h = this.head;
+        this.head = this.head.next;
+        if (this.head) {
+            this.head.prev = undefined;
+        }
+        else {
+            this.tail = undefined;
+        }
+        h.list = undefined;
+        this.length--;
+        return res;
+    }
+    forEach(fn, thisp) {
+        thisp = thisp || this;
+        for (let walker = this.head, i = 0; !!walker; i++) {
+            fn.call(thisp, walker.value, i, this);
+            walker = walker.next;
+        }
+    }
+    forEachReverse(fn, thisp) {
+        thisp = thisp || this;
+        for (let walker = this.tail, i = this.length - 1; !!walker; i--) {
+            fn.call(thisp, walker.value, i, this);
+            walker = walker.prev;
+        }
+    }
+    get(n) {
+        let i = 0;
+        let walker = this.head;
+        for (; !!walker && i < n; i++) {
+            walker = walker.next;
+        }
+        if (i === n && !!walker) {
+            return walker.value;
+        }
+    }
+    getReverse(n) {
+        let i = 0;
+        let walker = this.tail;
+        for (; !!walker && i < n; i++) {
+            // abort out of the list early if we hit a cycle
+            walker = walker.prev;
+        }
+        if (i === n && !!walker) {
+            return walker.value;
+        }
+    }
+    map(fn, thisp) {
+        thisp = thisp || this;
+        const res = new Yallist();
+        for (let walker = this.head; !!walker;) {
+            res.push(fn.call(thisp, walker.value, this));
+            walker = walker.next;
+        }
+        return res;
+    }
+    mapReverse(fn, thisp) {
+        thisp = thisp || this;
+        var res = new Yallist();
+        for (let walker = this.tail; !!walker;) {
+            res.push(fn.call(thisp, walker.value, this));
+            walker = walker.prev;
+        }
+        return res;
+    }
+    reduce(fn, initial) {
+        let acc;
+        let walker = this.head;
+        if (arguments.length > 1) {
+            acc = initial;
+        }
+        else if (this.head) {
+            walker = this.head.next;
+            acc = this.head.value;
+        }
+        else {
+            throw new TypeError('Reduce of empty list with no initial value');
+        }
+        for (var i = 0; !!walker; i++) {
+            acc = fn(acc, walker.value, i);
+            walker = walker.next;
+        }
+        return acc;
+    }
+    reduceReverse(fn, initial) {
+        let acc;
+        let walker = this.tail;
+        if (arguments.length > 1) {
+            acc = initial;
+        }
+        else if (this.tail) {
+            walker = this.tail.prev;
+            acc = this.tail.value;
+        }
+        else {
+            throw new TypeError('Reduce of empty list with no initial value');
+        }
+        for (let i = this.length - 1; !!walker; i--) {
+            acc = fn(acc, walker.value, i);
+            walker = walker.prev;
+        }
+        return acc;
+    }
+    toArray() {
+        const arr = new Array(this.length);
+        for (let i = 0, walker = this.head; !!walker; i++) {
+            arr[i] = walker.value;
+            walker = walker.next;
+        }
+        return arr;
+    }
+    toArrayReverse() {
+        const arr = new Array(this.length);
+        for (let i = 0, walker = this.tail; !!walker; i++) {
+            arr[i] = walker.value;
+            walker = walker.prev;
+        }
+        return arr;
+    }
+    slice(from = 0, to = this.length) {
+        if (to < 0) {
+            to += this.length;
+        }
+        if (from < 0) {
+            from += this.length;
+        }
+        const ret = new Yallist();
+        if (to < from || to < 0) {
+            return ret;
+        }
+        if (from < 0) {
+            from = 0;
+        }
+        if (to > this.length) {
+            to = this.length;
+        }
+        let walker = this.head;
+        let i = 0;
+        for (i = 0; !!walker && i < from; i++) {
+            walker = walker.next;
+        }
+        for (; !!walker && i < to; i++, walker = walker.next) {
+            ret.push(walker.value);
+        }
+        return ret;
+    }
+    sliceReverse(from = 0, to = this.length) {
+        if (to < 0) {
+            to += this.length;
+        }
+        if (from < 0) {
+            from += this.length;
+        }
+        const ret = new Yallist();
+        if (to < from || to < 0) {
+            return ret;
+        }
+        if (from < 0) {
+            from = 0;
+        }
+        if (to > this.length) {
+            to = this.length;
+        }
+        let i = this.length;
+        let walker = this.tail;
+        for (; !!walker && i > to; i--) {
+            walker = walker.prev;
+        }
+        for (; !!walker && i > from; i--, walker = walker.prev) {
+            ret.push(walker.value);
+        }
+        return ret;
+    }
+    splice(start, deleteCount = 0, ...nodes) {
+        if (start > this.length) {
+            start = this.length - 1;
+        }
+        if (start < 0) {
+            start = this.length + start;
+        }
+        let walker = this.head;
+        for (let i = 0; !!walker && i < start; i++) {
+            walker = walker.next;
+        }
+        const ret = [];
+        for (let i = 0; !!walker && i < deleteCount; i++) {
+            ret.push(walker.value);
+            walker = this.removeNode(walker);
+        }
+        if (!walker) {
+            walker = this.tail;
+        }
+        else if (walker !== this.tail) {
+            walker = walker.prev;
+        }
+        for (const v of nodes) {
+            walker = insertAfter(this, walker, v);
+        }
+        return ret;
+    }
+    reverse() {
+        const head = this.head;
+        const tail = this.tail;
+        for (let walker = head; !!walker; walker = walker.prev) {
+            const p = walker.prev;
+            walker.prev = walker.next;
+            walker.next = p;
+        }
+        this.head = tail;
+        this.tail = head;
+        return this;
+    }
+}
+exports.Yallist = Yallist;
+// insertAfter undefined means "make the node the new head of list"
+function insertAfter(self, node, value) {
+    const prev = node;
+    const next = node ? node.next : self.head;
+    const inserted = new Node(value, prev, next, self);
+    if (inserted.next === undefined) {
+        self.tail = inserted;
+    }
+    if (inserted.prev === undefined) {
+        self.head = inserted;
+    }
+    self.length++;
+    return inserted;
+}
+function push(self, item) {
+    self.tail = new Node(item, self.tail, undefined, self);
+    if (!self.head) {
+        self.head = self.tail;
+    }
+    self.length++;
+}
+function unshift(self, item) {
+    self.head = new Node(item, undefined, self.head, self);
+    if (!self.tail) {
+        self.tail = self.head;
+    }
+    self.length++;
+}
+class Node {
+    list;
+    next;
+    prev;
+    value;
+    constructor(value, prev, next, list) {
+        this.list = list;
+        this.value = value;
+        if (prev) {
+            prev.next = this;
+            this.prev = prev;
+        }
+        else {
+            this.prev = undefined;
+        }
+        if (next) {
+            next.prev = this;
+            this.next = next;
+        }
+        else {
+            this.next = undefined;
+        }
+    }
+}
+exports.Node = Node;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/yallist/dist/commonjs/package.json b/node_modules/make-fetch-happen/node_modules/yallist/dist/commonjs/package.json
new file mode 100644
index 0000000000000..5bbefffbabee3
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/yallist/dist/commonjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/node_modules/make-fetch-happen/node_modules/yallist/dist/esm/index.js b/node_modules/make-fetch-happen/node_modules/yallist/dist/esm/index.js
new file mode 100644
index 0000000000000..3d81c5113b93a
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/yallist/dist/esm/index.js
@@ -0,0 +1,379 @@
+export class Yallist {
+    tail;
+    head;
+    length = 0;
+    static create(list = []) {
+        return new Yallist(list);
+    }
+    constructor(list = []) {
+        for (const item of list) {
+            this.push(item);
+        }
+    }
+    *[Symbol.iterator]() {
+        for (let walker = this.head; walker; walker = walker.next) {
+            yield walker.value;
+        }
+    }
+    removeNode(node) {
+        if (node.list !== this) {
+            throw new Error('removing node which does not belong to this list');
+        }
+        const next = node.next;
+        const prev = node.prev;
+        if (next) {
+            next.prev = prev;
+        }
+        if (prev) {
+            prev.next = next;
+        }
+        if (node === this.head) {
+            this.head = next;
+        }
+        if (node === this.tail) {
+            this.tail = prev;
+        }
+        this.length--;
+        node.next = undefined;
+        node.prev = undefined;
+        node.list = undefined;
+        return next;
+    }
+    unshiftNode(node) {
+        if (node === this.head) {
+            return;
+        }
+        if (node.list) {
+            node.list.removeNode(node);
+        }
+        const head = this.head;
+        node.list = this;
+        node.next = head;
+        if (head) {
+            head.prev = node;
+        }
+        this.head = node;
+        if (!this.tail) {
+            this.tail = node;
+        }
+        this.length++;
+    }
+    pushNode(node) {
+        if (node === this.tail) {
+            return;
+        }
+        if (node.list) {
+            node.list.removeNode(node);
+        }
+        const tail = this.tail;
+        node.list = this;
+        node.prev = tail;
+        if (tail) {
+            tail.next = node;
+        }
+        this.tail = node;
+        if (!this.head) {
+            this.head = node;
+        }
+        this.length++;
+    }
+    push(...args) {
+        for (let i = 0, l = args.length; i < l; i++) {
+            push(this, args[i]);
+        }
+        return this.length;
+    }
+    unshift(...args) {
+        for (var i = 0, l = args.length; i < l; i++) {
+            unshift(this, args[i]);
+        }
+        return this.length;
+    }
+    pop() {
+        if (!this.tail) {
+            return undefined;
+        }
+        const res = this.tail.value;
+        const t = this.tail;
+        this.tail = this.tail.prev;
+        if (this.tail) {
+            this.tail.next = undefined;
+        }
+        else {
+            this.head = undefined;
+        }
+        t.list = undefined;
+        this.length--;
+        return res;
+    }
+    shift() {
+        if (!this.head) {
+            return undefined;
+        }
+        const res = this.head.value;
+        const h = this.head;
+        this.head = this.head.next;
+        if (this.head) {
+            this.head.prev = undefined;
+        }
+        else {
+            this.tail = undefined;
+        }
+        h.list = undefined;
+        this.length--;
+        return res;
+    }
+    forEach(fn, thisp) {
+        thisp = thisp || this;
+        for (let walker = this.head, i = 0; !!walker; i++) {
+            fn.call(thisp, walker.value, i, this);
+            walker = walker.next;
+        }
+    }
+    forEachReverse(fn, thisp) {
+        thisp = thisp || this;
+        for (let walker = this.tail, i = this.length - 1; !!walker; i--) {
+            fn.call(thisp, walker.value, i, this);
+            walker = walker.prev;
+        }
+    }
+    get(n) {
+        let i = 0;
+        let walker = this.head;
+        for (; !!walker && i < n; i++) {
+            walker = walker.next;
+        }
+        if (i === n && !!walker) {
+            return walker.value;
+        }
+    }
+    getReverse(n) {
+        let i = 0;
+        let walker = this.tail;
+        for (; !!walker && i < n; i++) {
+            // abort out of the list early if we hit a cycle
+            walker = walker.prev;
+        }
+        if (i === n && !!walker) {
+            return walker.value;
+        }
+    }
+    map(fn, thisp) {
+        thisp = thisp || this;
+        const res = new Yallist();
+        for (let walker = this.head; !!walker;) {
+            res.push(fn.call(thisp, walker.value, this));
+            walker = walker.next;
+        }
+        return res;
+    }
+    mapReverse(fn, thisp) {
+        thisp = thisp || this;
+        var res = new Yallist();
+        for (let walker = this.tail; !!walker;) {
+            res.push(fn.call(thisp, walker.value, this));
+            walker = walker.prev;
+        }
+        return res;
+    }
+    reduce(fn, initial) {
+        let acc;
+        let walker = this.head;
+        if (arguments.length > 1) {
+            acc = initial;
+        }
+        else if (this.head) {
+            walker = this.head.next;
+            acc = this.head.value;
+        }
+        else {
+            throw new TypeError('Reduce of empty list with no initial value');
+        }
+        for (var i = 0; !!walker; i++) {
+            acc = fn(acc, walker.value, i);
+            walker = walker.next;
+        }
+        return acc;
+    }
+    reduceReverse(fn, initial) {
+        let acc;
+        let walker = this.tail;
+        if (arguments.length > 1) {
+            acc = initial;
+        }
+        else if (this.tail) {
+            walker = this.tail.prev;
+            acc = this.tail.value;
+        }
+        else {
+            throw new TypeError('Reduce of empty list with no initial value');
+        }
+        for (let i = this.length - 1; !!walker; i--) {
+            acc = fn(acc, walker.value, i);
+            walker = walker.prev;
+        }
+        return acc;
+    }
+    toArray() {
+        const arr = new Array(this.length);
+        for (let i = 0, walker = this.head; !!walker; i++) {
+            arr[i] = walker.value;
+            walker = walker.next;
+        }
+        return arr;
+    }
+    toArrayReverse() {
+        const arr = new Array(this.length);
+        for (let i = 0, walker = this.tail; !!walker; i++) {
+            arr[i] = walker.value;
+            walker = walker.prev;
+        }
+        return arr;
+    }
+    slice(from = 0, to = this.length) {
+        if (to < 0) {
+            to += this.length;
+        }
+        if (from < 0) {
+            from += this.length;
+        }
+        const ret = new Yallist();
+        if (to < from || to < 0) {
+            return ret;
+        }
+        if (from < 0) {
+            from = 0;
+        }
+        if (to > this.length) {
+            to = this.length;
+        }
+        let walker = this.head;
+        let i = 0;
+        for (i = 0; !!walker && i < from; i++) {
+            walker = walker.next;
+        }
+        for (; !!walker && i < to; i++, walker = walker.next) {
+            ret.push(walker.value);
+        }
+        return ret;
+    }
+    sliceReverse(from = 0, to = this.length) {
+        if (to < 0) {
+            to += this.length;
+        }
+        if (from < 0) {
+            from += this.length;
+        }
+        const ret = new Yallist();
+        if (to < from || to < 0) {
+            return ret;
+        }
+        if (from < 0) {
+            from = 0;
+        }
+        if (to > this.length) {
+            to = this.length;
+        }
+        let i = this.length;
+        let walker = this.tail;
+        for (; !!walker && i > to; i--) {
+            walker = walker.prev;
+        }
+        for (; !!walker && i > from; i--, walker = walker.prev) {
+            ret.push(walker.value);
+        }
+        return ret;
+    }
+    splice(start, deleteCount = 0, ...nodes) {
+        if (start > this.length) {
+            start = this.length - 1;
+        }
+        if (start < 0) {
+            start = this.length + start;
+        }
+        let walker = this.head;
+        for (let i = 0; !!walker && i < start; i++) {
+            walker = walker.next;
+        }
+        const ret = [];
+        for (let i = 0; !!walker && i < deleteCount; i++) {
+            ret.push(walker.value);
+            walker = this.removeNode(walker);
+        }
+        if (!walker) {
+            walker = this.tail;
+        }
+        else if (walker !== this.tail) {
+            walker = walker.prev;
+        }
+        for (const v of nodes) {
+            walker = insertAfter(this, walker, v);
+        }
+        return ret;
+    }
+    reverse() {
+        const head = this.head;
+        const tail = this.tail;
+        for (let walker = head; !!walker; walker = walker.prev) {
+            const p = walker.prev;
+            walker.prev = walker.next;
+            walker.next = p;
+        }
+        this.head = tail;
+        this.tail = head;
+        return this;
+    }
+}
+// insertAfter undefined means "make the node the new head of list"
+function insertAfter(self, node, value) {
+    const prev = node;
+    const next = node ? node.next : self.head;
+    const inserted = new Node(value, prev, next, self);
+    if (inserted.next === undefined) {
+        self.tail = inserted;
+    }
+    if (inserted.prev === undefined) {
+        self.head = inserted;
+    }
+    self.length++;
+    return inserted;
+}
+function push(self, item) {
+    self.tail = new Node(item, self.tail, undefined, self);
+    if (!self.head) {
+        self.head = self.tail;
+    }
+    self.length++;
+}
+function unshift(self, item) {
+    self.head = new Node(item, undefined, self.head, self);
+    if (!self.tail) {
+        self.tail = self.head;
+    }
+    self.length++;
+}
+export class Node {
+    list;
+    next;
+    prev;
+    value;
+    constructor(value, prev, next, list) {
+        this.list = list;
+        this.value = value;
+        if (prev) {
+            prev.next = this;
+            this.prev = prev;
+        }
+        else {
+            this.prev = undefined;
+        }
+        if (next) {
+            next.prev = this;
+            this.next = next;
+        }
+        else {
+            this.next = undefined;
+        }
+    }
+}
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/yallist/dist/esm/package.json b/node_modules/make-fetch-happen/node_modules/yallist/dist/esm/package.json
new file mode 100644
index 0000000000000..3dbc1ca591c05
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/yallist/dist/esm/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/node_modules/make-fetch-happen/node_modules/yallist/package.json b/node_modules/make-fetch-happen/node_modules/yallist/package.json
new file mode 100644
index 0000000000000..2f5247808bbea
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/yallist/package.json
@@ -0,0 +1,68 @@
+{
+  "name": "yallist",
+  "version": "5.0.0",
+  "description": "Yet Another Linked List",
+  "files": [
+    "dist"
+  ],
+  "devDependencies": {
+    "prettier": "^3.2.5",
+    "tap": "^18.7.2",
+    "tshy": "^1.13.1",
+    "typedoc": "^0.25.13"
+  },
+  "scripts": {
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "prepare": "tshy",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "test": "tap",
+    "snap": "tap",
+    "format": "prettier --write . --loglevel warn --ignore-path ../../.prettierignore --cache",
+    "typedoc": "typedoc"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/isaacs/yallist.git"
+  },
+  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
+  "license": "BlueOak-1.0.0",
+  "tshy": {
+    "exports": {
+      "./package.json": "./package.json",
+      ".": "./src/index.ts"
+    }
+  },
+  "exports": {
+    "./package.json": "./package.json",
+    ".": {
+      "import": {
+        "types": "./dist/esm/index.d.ts",
+        "default": "./dist/esm/index.js"
+      },
+      "require": {
+        "types": "./dist/commonjs/index.d.ts",
+        "default": "./dist/commonjs/index.js"
+      }
+    }
+  },
+  "main": "./dist/commonjs/index.js",
+  "types": "./dist/commonjs/index.d.ts",
+  "type": "module",
+  "prettier": {
+    "semi": false,
+    "printWidth": 70,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "engines": {
+    "node": ">=18"
+  }
+}
diff --git a/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/index.js b/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/index.js
deleted file mode 100644
index 64a0f1f833222..0000000000000
--- a/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/index.js
+++ /dev/null
@@ -1,1017 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.unescape = exports.escape = exports.AST = exports.Minimatch = exports.match = exports.makeRe = exports.braceExpand = exports.defaults = exports.filter = exports.GLOBSTAR = exports.sep = exports.minimatch = void 0;
-const brace_expansion_1 = __importDefault(require("brace-expansion"));
-const assert_valid_pattern_js_1 = require("./assert-valid-pattern.js");
-const ast_js_1 = require("./ast.js");
-const escape_js_1 = require("./escape.js");
-const unescape_js_1 = require("./unescape.js");
-const minimatch = (p, pattern, options = {}) => {
-    (0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
-    // shortcut: comments match nothing.
-    if (!options.nocomment && pattern.charAt(0) === '#') {
-        return false;
-    }
-    return new Minimatch(pattern, options).match(p);
-};
-exports.minimatch = minimatch;
-// Optimized checking for the most common glob patterns.
-const starDotExtRE = /^\*+([^+@!?\*\[\(]*)$/;
-const starDotExtTest = (ext) => (f) => !f.startsWith('.') && f.endsWith(ext);
-const starDotExtTestDot = (ext) => (f) => f.endsWith(ext);
-const starDotExtTestNocase = (ext) => {
-    ext = ext.toLowerCase();
-    return (f) => !f.startsWith('.') && f.toLowerCase().endsWith(ext);
-};
-const starDotExtTestNocaseDot = (ext) => {
-    ext = ext.toLowerCase();
-    return (f) => f.toLowerCase().endsWith(ext);
-};
-const starDotStarRE = /^\*+\.\*+$/;
-const starDotStarTest = (f) => !f.startsWith('.') && f.includes('.');
-const starDotStarTestDot = (f) => f !== '.' && f !== '..' && f.includes('.');
-const dotStarRE = /^\.\*+$/;
-const dotStarTest = (f) => f !== '.' && f !== '..' && f.startsWith('.');
-const starRE = /^\*+$/;
-const starTest = (f) => f.length !== 0 && !f.startsWith('.');
-const starTestDot = (f) => f.length !== 0 && f !== '.' && f !== '..';
-const qmarksRE = /^\?+([^+@!?\*\[\(]*)?$/;
-const qmarksTestNocase = ([$0, ext = '']) => {
-    const noext = qmarksTestNoExt([$0]);
-    if (!ext)
-        return noext;
-    ext = ext.toLowerCase();
-    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
-};
-const qmarksTestNocaseDot = ([$0, ext = '']) => {
-    const noext = qmarksTestNoExtDot([$0]);
-    if (!ext)
-        return noext;
-    ext = ext.toLowerCase();
-    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
-};
-const qmarksTestDot = ([$0, ext = '']) => {
-    const noext = qmarksTestNoExtDot([$0]);
-    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
-};
-const qmarksTest = ([$0, ext = '']) => {
-    const noext = qmarksTestNoExt([$0]);
-    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
-};
-const qmarksTestNoExt = ([$0]) => {
-    const len = $0.length;
-    return (f) => f.length === len && !f.startsWith('.');
-};
-const qmarksTestNoExtDot = ([$0]) => {
-    const len = $0.length;
-    return (f) => f.length === len && f !== '.' && f !== '..';
-};
-/* c8 ignore start */
-const defaultPlatform = (typeof process === 'object' && process
-    ? (typeof process.env === 'object' &&
-        process.env &&
-        process.env.__MINIMATCH_TESTING_PLATFORM__) ||
-        process.platform
-    : 'posix');
-const path = {
-    win32: { sep: '\\' },
-    posix: { sep: '/' },
-};
-/* c8 ignore stop */
-exports.sep = defaultPlatform === 'win32' ? path.win32.sep : path.posix.sep;
-exports.minimatch.sep = exports.sep;
-exports.GLOBSTAR = Symbol('globstar **');
-exports.minimatch.GLOBSTAR = exports.GLOBSTAR;
-// any single thing other than /
-// don't need to escape / when using new RegExp()
-const qmark = '[^/]';
-// * => any number of characters
-const star = qmark + '*?';
-// ** when dots are allowed.  Anything goes, except .. and .
-// not (^ or / followed by one or two dots followed by $ or /),
-// followed by anything, any number of times.
-const twoStarDot = '(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?';
-// not a ^ or / followed by a dot,
-// followed by anything, any number of times.
-const twoStarNoDot = '(?:(?!(?:\\/|^)\\.).)*?';
-const filter = (pattern, options = {}) => (p) => (0, exports.minimatch)(p, pattern, options);
-exports.filter = filter;
-exports.minimatch.filter = exports.filter;
-const ext = (a, b = {}) => Object.assign({}, a, b);
-const defaults = (def) => {
-    if (!def || typeof def !== 'object' || !Object.keys(def).length) {
-        return exports.minimatch;
-    }
-    const orig = exports.minimatch;
-    const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options));
-    return Object.assign(m, {
-        Minimatch: class Minimatch extends orig.Minimatch {
-            constructor(pattern, options = {}) {
-                super(pattern, ext(def, options));
-            }
-            static defaults(options) {
-                return orig.defaults(ext(def, options)).Minimatch;
-            }
-        },
-        AST: class AST extends orig.AST {
-            /* c8 ignore start */
-            constructor(type, parent, options = {}) {
-                super(type, parent, ext(def, options));
-            }
-            /* c8 ignore stop */
-            static fromGlob(pattern, options = {}) {
-                return orig.AST.fromGlob(pattern, ext(def, options));
-            }
-        },
-        unescape: (s, options = {}) => orig.unescape(s, ext(def, options)),
-        escape: (s, options = {}) => orig.escape(s, ext(def, options)),
-        filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)),
-        defaults: (options) => orig.defaults(ext(def, options)),
-        makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)),
-        braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)),
-        match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)),
-        sep: orig.sep,
-        GLOBSTAR: exports.GLOBSTAR,
-    });
-};
-exports.defaults = defaults;
-exports.minimatch.defaults = exports.defaults;
-// Brace expansion:
-// a{b,c}d -> abd acd
-// a{b,}c -> abc ac
-// a{0..3}d -> a0d a1d a2d a3d
-// a{b,c{d,e}f}g -> abg acdfg acefg
-// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
-//
-// Invalid sets are not expanded.
-// a{2..}b -> a{2..}b
-// a{b}c -> a{b}c
-const braceExpand = (pattern, options = {}) => {
-    (0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
-    // Thanks to Yeting Li  for
-    // improving this regexp to avoid a ReDOS vulnerability.
-    if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
-        // shortcut. no need to expand.
-        return [pattern];
-    }
-    return (0, brace_expansion_1.default)(pattern);
-};
-exports.braceExpand = braceExpand;
-exports.minimatch.braceExpand = exports.braceExpand;
-// parse a component of the expanded set.
-// At this point, no pattern may contain "/" in it
-// so we're going to return a 2d array, where each entry is the full
-// pattern, split on '/', and then turned into a regular expression.
-// A regexp is made at the end which joins each array with an
-// escaped /, and another full one which joins each regexp with |.
-//
-// Following the lead of Bash 4.1, note that "**" only has special meaning
-// when it is the *only* thing in a path portion.  Otherwise, any series
-// of * is equivalent to a single *.  Globstar behavior is enabled by
-// default, and can be disabled by setting options.noglobstar.
-const makeRe = (pattern, options = {}) => new Minimatch(pattern, options).makeRe();
-exports.makeRe = makeRe;
-exports.minimatch.makeRe = exports.makeRe;
-const match = (list, pattern, options = {}) => {
-    const mm = new Minimatch(pattern, options);
-    list = list.filter(f => mm.match(f));
-    if (mm.options.nonull && !list.length) {
-        list.push(pattern);
-    }
-    return list;
-};
-exports.match = match;
-exports.minimatch.match = exports.match;
-// replace stuff like \* with *
-const globMagic = /[?*]|[+@!]\(.*?\)|\[|\]/;
-const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
-class Minimatch {
-    options;
-    set;
-    pattern;
-    windowsPathsNoEscape;
-    nonegate;
-    negate;
-    comment;
-    empty;
-    preserveMultipleSlashes;
-    partial;
-    globSet;
-    globParts;
-    nocase;
-    isWindows;
-    platform;
-    windowsNoMagicRoot;
-    regexp;
-    constructor(pattern, options = {}) {
-        (0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
-        options = options || {};
-        this.options = options;
-        this.pattern = pattern;
-        this.platform = options.platform || defaultPlatform;
-        this.isWindows = this.platform === 'win32';
-        this.windowsPathsNoEscape =
-            !!options.windowsPathsNoEscape || options.allowWindowsEscape === false;
-        if (this.windowsPathsNoEscape) {
-            this.pattern = this.pattern.replace(/\\/g, '/');
-        }
-        this.preserveMultipleSlashes = !!options.preserveMultipleSlashes;
-        this.regexp = null;
-        this.negate = false;
-        this.nonegate = !!options.nonegate;
-        this.comment = false;
-        this.empty = false;
-        this.partial = !!options.partial;
-        this.nocase = !!this.options.nocase;
-        this.windowsNoMagicRoot =
-            options.windowsNoMagicRoot !== undefined
-                ? options.windowsNoMagicRoot
-                : !!(this.isWindows && this.nocase);
-        this.globSet = [];
-        this.globParts = [];
-        this.set = [];
-        // make the set of regexps etc.
-        this.make();
-    }
-    hasMagic() {
-        if (this.options.magicalBraces && this.set.length > 1) {
-            return true;
-        }
-        for (const pattern of this.set) {
-            for (const part of pattern) {
-                if (typeof part !== 'string')
-                    return true;
-            }
-        }
-        return false;
-    }
-    debug(..._) { }
-    make() {
-        const pattern = this.pattern;
-        const options = this.options;
-        // empty patterns and comments match nothing.
-        if (!options.nocomment && pattern.charAt(0) === '#') {
-            this.comment = true;
-            return;
-        }
-        if (!pattern) {
-            this.empty = true;
-            return;
-        }
-        // step 1: figure out negation, etc.
-        this.parseNegate();
-        // step 2: expand braces
-        this.globSet = [...new Set(this.braceExpand())];
-        if (options.debug) {
-            this.debug = (...args) => console.error(...args);
-        }
-        this.debug(this.pattern, this.globSet);
-        // step 3: now we have a set, so turn each one into a series of
-        // path-portion matching patterns.
-        // These will be regexps, except in the case of "**", which is
-        // set to the GLOBSTAR object for globstar behavior,
-        // and will not contain any / characters
-        //
-        // First, we preprocess to make the glob pattern sets a bit simpler
-        // and deduped.  There are some perf-killing patterns that can cause
-        // problems with a glob walk, but we can simplify them down a bit.
-        const rawGlobParts = this.globSet.map(s => this.slashSplit(s));
-        this.globParts = this.preprocess(rawGlobParts);
-        this.debug(this.pattern, this.globParts);
-        // glob --> regexps
-        let set = this.globParts.map((s, _, __) => {
-            if (this.isWindows && this.windowsNoMagicRoot) {
-                // check if it's a drive or unc path.
-                const isUNC = s[0] === '' &&
-                    s[1] === '' &&
-                    (s[2] === '?' || !globMagic.test(s[2])) &&
-                    !globMagic.test(s[3]);
-                const isDrive = /^[a-z]:/i.test(s[0]);
-                if (isUNC) {
-                    return [...s.slice(0, 4), ...s.slice(4).map(ss => this.parse(ss))];
-                }
-                else if (isDrive) {
-                    return [s[0], ...s.slice(1).map(ss => this.parse(ss))];
-                }
-            }
-            return s.map(ss => this.parse(ss));
-        });
-        this.debug(this.pattern, set);
-        // filter out everything that didn't compile properly.
-        this.set = set.filter(s => s.indexOf(false) === -1);
-        // do not treat the ? in UNC paths as magic
-        if (this.isWindows) {
-            for (let i = 0; i < this.set.length; i++) {
-                const p = this.set[i];
-                if (p[0] === '' &&
-                    p[1] === '' &&
-                    this.globParts[i][2] === '?' &&
-                    typeof p[3] === 'string' &&
-                    /^[a-z]:$/i.test(p[3])) {
-                    p[2] = '?';
-                }
-            }
-        }
-        this.debug(this.pattern, this.set);
-    }
-    // various transforms to equivalent pattern sets that are
-    // faster to process in a filesystem walk.  The goal is to
-    // eliminate what we can, and push all ** patterns as far
-    // to the right as possible, even if it increases the number
-    // of patterns that we have to process.
-    preprocess(globParts) {
-        // if we're not in globstar mode, then turn all ** into *
-        if (this.options.noglobstar) {
-            for (let i = 0; i < globParts.length; i++) {
-                for (let j = 0; j < globParts[i].length; j++) {
-                    if (globParts[i][j] === '**') {
-                        globParts[i][j] = '*';
-                    }
-                }
-            }
-        }
-        const { optimizationLevel = 1 } = this.options;
-        if (optimizationLevel >= 2) {
-            // aggressive optimization for the purpose of fs walking
-            globParts = this.firstPhasePreProcess(globParts);
-            globParts = this.secondPhasePreProcess(globParts);
-        }
-        else if (optimizationLevel >= 1) {
-            // just basic optimizations to remove some .. parts
-            globParts = this.levelOneOptimize(globParts);
-        }
-        else {
-            // just collapse multiple ** portions into one
-            globParts = this.adjascentGlobstarOptimize(globParts);
-        }
-        return globParts;
-    }
-    // just get rid of adjascent ** portions
-    adjascentGlobstarOptimize(globParts) {
-        return globParts.map(parts => {
-            let gs = -1;
-            while (-1 !== (gs = parts.indexOf('**', gs + 1))) {
-                let i = gs;
-                while (parts[i + 1] === '**') {
-                    i++;
-                }
-                if (i !== gs) {
-                    parts.splice(gs, i - gs);
-                }
-            }
-            return parts;
-        });
-    }
-    // get rid of adjascent ** and resolve .. portions
-    levelOneOptimize(globParts) {
-        return globParts.map(parts => {
-            parts = parts.reduce((set, part) => {
-                const prev = set[set.length - 1];
-                if (part === '**' && prev === '**') {
-                    return set;
-                }
-                if (part === '..') {
-                    if (prev && prev !== '..' && prev !== '.' && prev !== '**') {
-                        set.pop();
-                        return set;
-                    }
-                }
-                set.push(part);
-                return set;
-            }, []);
-            return parts.length === 0 ? [''] : parts;
-        });
-    }
-    levelTwoFileOptimize(parts) {
-        if (!Array.isArray(parts)) {
-            parts = this.slashSplit(parts);
-        }
-        let didSomething = false;
-        do {
-            didSomething = false;
-            // 
// -> 
/
-            if (!this.preserveMultipleSlashes) {
-                for (let i = 1; i < parts.length - 1; i++) {
-                    const p = parts[i];
-                    // don't squeeze out UNC patterns
-                    if (i === 1 && p === '' && parts[0] === '')
-                        continue;
-                    if (p === '.' || p === '') {
-                        didSomething = true;
-                        parts.splice(i, 1);
-                        i--;
-                    }
-                }
-                if (parts[0] === '.' &&
-                    parts.length === 2 &&
-                    (parts[1] === '.' || parts[1] === '')) {
-                    didSomething = true;
-                    parts.pop();
-                }
-            }
-            // 
/

/../ ->

/
-            let dd = 0;
-            while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
-                const p = parts[dd - 1];
-                if (p && p !== '.' && p !== '..' && p !== '**') {
-                    didSomething = true;
-                    parts.splice(dd - 1, 2);
-                    dd -= 2;
-                }
-            }
-        } while (didSomething);
-        return parts.length === 0 ? [''] : parts;
-    }
-    // First phase: single-pattern processing
-    // 
 is 1 or more portions
-    //  is 1 or more portions
-    // 

is any portion other than ., .., '', or ** - // is . or '' - // - // **/.. is *brutal* for filesystem walking performance, because - // it effectively resets the recursive walk each time it occurs, - // and ** cannot be reduced out by a .. pattern part like a regexp - // or most strings (other than .., ., and '') can be. - // - //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/} - //

// -> 
/
-    // 
/

/../ ->

/
-    // **/**/ -> **/
-    //
-    // **/*/ -> */**/ <== not valid because ** doesn't follow
-    // this WOULD be allowed if ** did follow symlinks, or * didn't
-    firstPhasePreProcess(globParts) {
-        let didSomething = false;
-        do {
-            didSomething = false;
-            // 
/**/../

/

/ -> {

/../

/

/,

/**/

/

/} - for (let parts of globParts) { - let gs = -1; - while (-1 !== (gs = parts.indexOf('**', gs + 1))) { - let gss = gs; - while (parts[gss + 1] === '**') { - //

/**/**/ -> 
/**/
-                        gss++;
-                    }
-                    // eg, if gs is 2 and gss is 4, that means we have 3 **
-                    // parts, and can remove 2 of them.
-                    if (gss > gs) {
-                        parts.splice(gs + 1, gss - gs);
-                    }
-                    let next = parts[gs + 1];
-                    const p = parts[gs + 2];
-                    const p2 = parts[gs + 3];
-                    if (next !== '..')
-                        continue;
-                    if (!p ||
-                        p === '.' ||
-                        p === '..' ||
-                        !p2 ||
-                        p2 === '.' ||
-                        p2 === '..') {
-                        continue;
-                    }
-                    didSomething = true;
-                    // edit parts in place, and push the new one
-                    parts.splice(gs, 1);
-                    const other = parts.slice(0);
-                    other[gs] = '**';
-                    globParts.push(other);
-                    gs--;
-                }
-                // 
// -> 
/
-                if (!this.preserveMultipleSlashes) {
-                    for (let i = 1; i < parts.length - 1; i++) {
-                        const p = parts[i];
-                        // don't squeeze out UNC patterns
-                        if (i === 1 && p === '' && parts[0] === '')
-                            continue;
-                        if (p === '.' || p === '') {
-                            didSomething = true;
-                            parts.splice(i, 1);
-                            i--;
-                        }
-                    }
-                    if (parts[0] === '.' &&
-                        parts.length === 2 &&
-                        (parts[1] === '.' || parts[1] === '')) {
-                        didSomething = true;
-                        parts.pop();
-                    }
-                }
-                // 
/

/../ ->

/
-                let dd = 0;
-                while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
-                    const p = parts[dd - 1];
-                    if (p && p !== '.' && p !== '..' && p !== '**') {
-                        didSomething = true;
-                        const needDot = dd === 1 && parts[dd + 1] === '**';
-                        const splin = needDot ? ['.'] : [];
-                        parts.splice(dd - 1, 2, ...splin);
-                        if (parts.length === 0)
-                            parts.push('');
-                        dd -= 2;
-                    }
-                }
-            }
-        } while (didSomething);
-        return globParts;
-    }
-    // second phase: multi-pattern dedupes
-    // {
/*/,
/

/} ->

/*/
-    // {
/,
/} -> 
/
-    // {
/**/,
/} -> 
/**/
-    //
-    // {
/**/,
/**/

/} ->

/**/
-    // ^-- not valid because ** doens't follow symlinks
-    secondPhasePreProcess(globParts) {
-        for (let i = 0; i < globParts.length - 1; i++) {
-            for (let j = i + 1; j < globParts.length; j++) {
-                const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);
-                if (matched) {
-                    globParts[i] = [];
-                    globParts[j] = matched;
-                    break;
-                }
-            }
-        }
-        return globParts.filter(gs => gs.length);
-    }
-    partsMatch(a, b, emptyGSMatch = false) {
-        let ai = 0;
-        let bi = 0;
-        let result = [];
-        let which = '';
-        while (ai < a.length && bi < b.length) {
-            if (a[ai] === b[bi]) {
-                result.push(which === 'b' ? b[bi] : a[ai]);
-                ai++;
-                bi++;
-            }
-            else if (emptyGSMatch && a[ai] === '**' && b[bi] === a[ai + 1]) {
-                result.push(a[ai]);
-                ai++;
-            }
-            else if (emptyGSMatch && b[bi] === '**' && a[ai] === b[bi + 1]) {
-                result.push(b[bi]);
-                bi++;
-            }
-            else if (a[ai] === '*' &&
-                b[bi] &&
-                (this.options.dot || !b[bi].startsWith('.')) &&
-                b[bi] !== '**') {
-                if (which === 'b')
-                    return false;
-                which = 'a';
-                result.push(a[ai]);
-                ai++;
-                bi++;
-            }
-            else if (b[bi] === '*' &&
-                a[ai] &&
-                (this.options.dot || !a[ai].startsWith('.')) &&
-                a[ai] !== '**') {
-                if (which === 'a')
-                    return false;
-                which = 'b';
-                result.push(b[bi]);
-                ai++;
-                bi++;
-            }
-            else {
-                return false;
-            }
-        }
-        // if we fall out of the loop, it means they two are identical
-        // as long as their lengths match
-        return a.length === b.length && result;
-    }
-    parseNegate() {
-        if (this.nonegate)
-            return;
-        const pattern = this.pattern;
-        let negate = false;
-        let negateOffset = 0;
-        for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {
-            negate = !negate;
-            negateOffset++;
-        }
-        if (negateOffset)
-            this.pattern = pattern.slice(negateOffset);
-        this.negate = negate;
-    }
-    // set partial to true to test if, for example,
-    // "/a/b" matches the start of "/*/b/*/d"
-    // Partial means, if you run out of file before you run
-    // out of pattern, then that's fine, as long as all
-    // the parts match.
-    matchOne(file, pattern, partial = false) {
-        const options = this.options;
-        // UNC paths like //?/X:/... can match X:/... and vice versa
-        // Drive letters in absolute drive or unc paths are always compared
-        // case-insensitively.
-        if (this.isWindows) {
-            const fileDrive = typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0]);
-            const fileUNC = !fileDrive &&
-                file[0] === '' &&
-                file[1] === '' &&
-                file[2] === '?' &&
-                /^[a-z]:$/i.test(file[3]);
-            const patternDrive = typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0]);
-            const patternUNC = !patternDrive &&
-                pattern[0] === '' &&
-                pattern[1] === '' &&
-                pattern[2] === '?' &&
-                typeof pattern[3] === 'string' &&
-                /^[a-z]:$/i.test(pattern[3]);
-            const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined;
-            const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined;
-            if (typeof fdi === 'number' && typeof pdi === 'number') {
-                const [fd, pd] = [file[fdi], pattern[pdi]];
-                if (fd.toLowerCase() === pd.toLowerCase()) {
-                    pattern[pdi] = fd;
-                    if (pdi > fdi) {
-                        pattern = pattern.slice(pdi);
-                    }
-                    else if (fdi > pdi) {
-                        file = file.slice(fdi);
-                    }
-                }
-            }
-        }
-        // resolve and reduce . and .. portions in the file as well.
-        // dont' need to do the second phase, because it's only one string[]
-        const { optimizationLevel = 1 } = this.options;
-        if (optimizationLevel >= 2) {
-            file = this.levelTwoFileOptimize(file);
-        }
-        this.debug('matchOne', this, { file, pattern });
-        this.debug('matchOne', file.length, pattern.length);
-        for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
-            this.debug('matchOne loop');
-            var p = pattern[pi];
-            var f = file[fi];
-            this.debug(pattern, p, f);
-            // should be impossible.
-            // some invalid regexp stuff in the set.
-            /* c8 ignore start */
-            if (p === false) {
-                return false;
-            }
-            /* c8 ignore stop */
-            if (p === exports.GLOBSTAR) {
-                this.debug('GLOBSTAR', [pattern, p, f]);
-                // "**"
-                // a/**/b/**/c would match the following:
-                // a/b/x/y/z/c
-                // a/x/y/z/b/c
-                // a/b/x/b/x/c
-                // a/b/c
-                // To do this, take the rest of the pattern after
-                // the **, and see if it would match the file remainder.
-                // If so, return success.
-                // If not, the ** "swallows" a segment, and try again.
-                // This is recursively awful.
-                //
-                // a/**/b/**/c matching a/b/x/y/z/c
-                // - a matches a
-                // - doublestar
-                //   - matchOne(b/x/y/z/c, b/**/c)
-                //     - b matches b
-                //     - doublestar
-                //       - matchOne(x/y/z/c, c) -> no
-                //       - matchOne(y/z/c, c) -> no
-                //       - matchOne(z/c, c) -> no
-                //       - matchOne(c, c) yes, hit
-                var fr = fi;
-                var pr = pi + 1;
-                if (pr === pl) {
-                    this.debug('** at the end');
-                    // a ** at the end will just swallow the rest.
-                    // We have found a match.
-                    // however, it will not swallow /.x, unless
-                    // options.dot is set.
-                    // . and .. are *never* matched by **, for explosively
-                    // exponential reasons.
-                    for (; fi < fl; fi++) {
-                        if (file[fi] === '.' ||
-                            file[fi] === '..' ||
-                            (!options.dot && file[fi].charAt(0) === '.'))
-                            return false;
-                    }
-                    return true;
-                }
-                // ok, let's see if we can swallow whatever we can.
-                while (fr < fl) {
-                    var swallowee = file[fr];
-                    this.debug('\nglobstar while', file, fr, pattern, pr, swallowee);
-                    // XXX remove this slice.  Just pass the start index.
-                    if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
-                        this.debug('globstar found match!', fr, fl, swallowee);
-                        // found a match.
-                        return true;
-                    }
-                    else {
-                        // can't swallow "." or ".." ever.
-                        // can only swallow ".foo" when explicitly asked.
-                        if (swallowee === '.' ||
-                            swallowee === '..' ||
-                            (!options.dot && swallowee.charAt(0) === '.')) {
-                            this.debug('dot detected!', file, fr, pattern, pr);
-                            break;
-                        }
-                        // ** swallows a segment, and continue.
-                        this.debug('globstar swallow a segment, and continue');
-                        fr++;
-                    }
-                }
-                // no match was found.
-                // However, in partial mode, we can't say this is necessarily over.
-                /* c8 ignore start */
-                if (partial) {
-                    // ran out of file
-                    this.debug('\n>>> no match, partial?', file, fr, pattern, pr);
-                    if (fr === fl) {
-                        return true;
-                    }
-                }
-                /* c8 ignore stop */
-                return false;
-            }
-            // something other than **
-            // non-magic patterns just have to match exactly
-            // patterns with magic have been turned into regexps.
-            let hit;
-            if (typeof p === 'string') {
-                hit = f === p;
-                this.debug('string match', p, f, hit);
-            }
-            else {
-                hit = p.test(f);
-                this.debug('pattern match', p, f, hit);
-            }
-            if (!hit)
-                return false;
-        }
-        // Note: ending in / means that we'll get a final ""
-        // at the end of the pattern.  This can only match a
-        // corresponding "" at the end of the file.
-        // If the file ends in /, then it can only match a
-        // a pattern that ends in /, unless the pattern just
-        // doesn't have any more for it. But, a/b/ should *not*
-        // match "a/b/*", even though "" matches against the
-        // [^/]*? pattern, except in partial mode, where it might
-        // simply not be reached yet.
-        // However, a/b/ should still satisfy a/*
-        // now either we fell off the end of the pattern, or we're done.
-        if (fi === fl && pi === pl) {
-            // ran out of pattern and filename at the same time.
-            // an exact hit!
-            return true;
-        }
-        else if (fi === fl) {
-            // ran out of file, but still had pattern left.
-            // this is ok if we're doing the match as part of
-            // a glob fs traversal.
-            return partial;
-        }
-        else if (pi === pl) {
-            // ran out of pattern, still have file left.
-            // this is only acceptable if we're on the very last
-            // empty segment of a file with a trailing slash.
-            // a/* should match a/b/
-            return fi === fl - 1 && file[fi] === '';
-            /* c8 ignore start */
-        }
-        else {
-            // should be unreachable.
-            throw new Error('wtf?');
-        }
-        /* c8 ignore stop */
-    }
-    braceExpand() {
-        return (0, exports.braceExpand)(this.pattern, this.options);
-    }
-    parse(pattern) {
-        (0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
-        const options = this.options;
-        // shortcuts
-        if (pattern === '**')
-            return exports.GLOBSTAR;
-        if (pattern === '')
-            return '';
-        // far and away, the most common glob pattern parts are
-        // *, *.*, and *.  Add a fast check method for those.
-        let m;
-        let fastTest = null;
-        if ((m = pattern.match(starRE))) {
-            fastTest = options.dot ? starTestDot : starTest;
-        }
-        else if ((m = pattern.match(starDotExtRE))) {
-            fastTest = (options.nocase
-                ? options.dot
-                    ? starDotExtTestNocaseDot
-                    : starDotExtTestNocase
-                : options.dot
-                    ? starDotExtTestDot
-                    : starDotExtTest)(m[1]);
-        }
-        else if ((m = pattern.match(qmarksRE))) {
-            fastTest = (options.nocase
-                ? options.dot
-                    ? qmarksTestNocaseDot
-                    : qmarksTestNocase
-                : options.dot
-                    ? qmarksTestDot
-                    : qmarksTest)(m);
-        }
-        else if ((m = pattern.match(starDotStarRE))) {
-            fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
-        }
-        else if ((m = pattern.match(dotStarRE))) {
-            fastTest = dotStarTest;
-        }
-        const re = ast_js_1.AST.fromGlob(pattern, this.options).toMMPattern();
-        if (fastTest && typeof re === 'object') {
-            // Avoids overriding in frozen environments
-            Reflect.defineProperty(re, 'test', { value: fastTest });
-        }
-        return re;
-    }
-    makeRe() {
-        if (this.regexp || this.regexp === false)
-            return this.regexp;
-        // at this point, this.set is a 2d array of partial
-        // pattern strings, or "**".
-        //
-        // It's better to use .match().  This function shouldn't
-        // be used, really, but it's pretty convenient sometimes,
-        // when you just want to work with a regex.
-        const set = this.set;
-        if (!set.length) {
-            this.regexp = false;
-            return this.regexp;
-        }
-        const options = this.options;
-        const twoStar = options.noglobstar
-            ? star
-            : options.dot
-                ? twoStarDot
-                : twoStarNoDot;
-        const flags = new Set(options.nocase ? ['i'] : []);
-        // regexpify non-globstar patterns
-        // if ** is only item, then we just do one twoStar
-        // if ** is first, and there are more, prepend (\/|twoStar\/)? to next
-        // if ** is last, append (\/twoStar|) to previous
-        // if ** is in the middle, append (\/|\/twoStar\/) to previous
-        // then filter out GLOBSTAR symbols
-        let re = set
-            .map(pattern => {
-            const pp = pattern.map(p => {
-                if (p instanceof RegExp) {
-                    for (const f of p.flags.split(''))
-                        flags.add(f);
-                }
-                return typeof p === 'string'
-                    ? regExpEscape(p)
-                    : p === exports.GLOBSTAR
-                        ? exports.GLOBSTAR
-                        : p._src;
-            });
-            pp.forEach((p, i) => {
-                const next = pp[i + 1];
-                const prev = pp[i - 1];
-                if (p !== exports.GLOBSTAR || prev === exports.GLOBSTAR) {
-                    return;
-                }
-                if (prev === undefined) {
-                    if (next !== undefined && next !== exports.GLOBSTAR) {
-                        pp[i + 1] = '(?:\\/|' + twoStar + '\\/)?' + next;
-                    }
-                    else {
-                        pp[i] = twoStar;
-                    }
-                }
-                else if (next === undefined) {
-                    pp[i - 1] = prev + '(?:\\/|' + twoStar + ')?';
-                }
-                else if (next !== exports.GLOBSTAR) {
-                    pp[i - 1] = prev + '(?:\\/|\\/' + twoStar + '\\/)' + next;
-                    pp[i + 1] = exports.GLOBSTAR;
-                }
-            });
-            return pp.filter(p => p !== exports.GLOBSTAR).join('/');
-        })
-            .join('|');
-        // need to wrap in parens if we had more than one thing with |,
-        // otherwise only the first will be anchored to ^ and the last to $
-        const [open, close] = set.length > 1 ? ['(?:', ')'] : ['', ''];
-        // must match entire pattern
-        // ending in a * or ** will make it less strict.
-        re = '^' + open + re + close + '$';
-        // can match anything, as long as it's not this.
-        if (this.negate)
-            re = '^(?!' + re + ').+$';
-        try {
-            this.regexp = new RegExp(re, [...flags].join(''));
-            /* c8 ignore start */
-        }
-        catch (ex) {
-            // should be impossible
-            this.regexp = false;
-        }
-        /* c8 ignore stop */
-        return this.regexp;
-    }
-    slashSplit(p) {
-        // if p starts with // on windows, we preserve that
-        // so that UNC paths aren't broken.  Otherwise, any number of
-        // / characters are coalesced into one, unless
-        // preserveMultipleSlashes is set to true.
-        if (this.preserveMultipleSlashes) {
-            return p.split('/');
-        }
-        else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
-            // add an extra '' for the one we lose
-            return ['', ...p.split(/\/+/)];
-        }
-        else {
-            return p.split(/\/+/);
-        }
-    }
-    match(f, partial = this.partial) {
-        this.debug('match', f, this.pattern);
-        // short-circuit in the case of busted things.
-        // comments, etc.
-        if (this.comment) {
-            return false;
-        }
-        if (this.empty) {
-            return f === '';
-        }
-        if (f === '/' && partial) {
-            return true;
-        }
-        const options = this.options;
-        // windows: need to use /, not \
-        if (this.isWindows) {
-            f = f.split('\\').join('/');
-        }
-        // treat the test path as a set of pathparts.
-        const ff = this.slashSplit(f);
-        this.debug(this.pattern, 'split', ff);
-        // just ONE of the pattern sets in this.set needs to match
-        // in order for it to be valid.  If negating, then just one
-        // match means that we have failed.
-        // Either way, return on the first hit.
-        const set = this.set;
-        this.debug(this.pattern, 'set', set);
-        // Find the basename of the path by looking for the last non-empty segment
-        let filename = ff[ff.length - 1];
-        if (!filename) {
-            for (let i = ff.length - 2; !filename && i >= 0; i--) {
-                filename = ff[i];
-            }
-        }
-        for (let i = 0; i < set.length; i++) {
-            const pattern = set[i];
-            let file = ff;
-            if (options.matchBase && pattern.length === 1) {
-                file = [filename];
-            }
-            const hit = this.matchOne(file, pattern, partial);
-            if (hit) {
-                if (options.flipNegate) {
-                    return true;
-                }
-                return !this.negate;
-            }
-        }
-        // didn't get any hits.  this is success if it's a negative
-        // pattern, failure otherwise.
-        if (options.flipNegate) {
-            return false;
-        }
-        return this.negate;
-    }
-    static defaults(def) {
-        return exports.minimatch.defaults(def).Minimatch;
-    }
-}
-exports.Minimatch = Minimatch;
-/* c8 ignore start */
-var ast_js_2 = require("./ast.js");
-Object.defineProperty(exports, "AST", { enumerable: true, get: function () { return ast_js_2.AST; } });
-var escape_js_2 = require("./escape.js");
-Object.defineProperty(exports, "escape", { enumerable: true, get: function () { return escape_js_2.escape; } });
-var unescape_js_2 = require("./unescape.js");
-Object.defineProperty(exports, "unescape", { enumerable: true, get: function () { return unescape_js_2.unescape; } });
-/* c8 ignore stop */
-exports.minimatch.AST = ast_js_1.AST;
-exports.minimatch.Minimatch = Minimatch;
-exports.minimatch.escape = escape_js_1.escape;
-exports.minimatch.unescape = unescape_js_1.unescape;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/index.js b/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/index.js
deleted file mode 100644
index 84b577b0472cb..0000000000000
--- a/node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/index.js
+++ /dev/null
@@ -1,1001 +0,0 @@
-import expand from 'brace-expansion';
-import { assertValidPattern } from './assert-valid-pattern.js';
-import { AST } from './ast.js';
-import { escape } from './escape.js';
-import { unescape } from './unescape.js';
-export const minimatch = (p, pattern, options = {}) => {
-    assertValidPattern(pattern);
-    // shortcut: comments match nothing.
-    if (!options.nocomment && pattern.charAt(0) === '#') {
-        return false;
-    }
-    return new Minimatch(pattern, options).match(p);
-};
-// Optimized checking for the most common glob patterns.
-const starDotExtRE = /^\*+([^+@!?\*\[\(]*)$/;
-const starDotExtTest = (ext) => (f) => !f.startsWith('.') && f.endsWith(ext);
-const starDotExtTestDot = (ext) => (f) => f.endsWith(ext);
-const starDotExtTestNocase = (ext) => {
-    ext = ext.toLowerCase();
-    return (f) => !f.startsWith('.') && f.toLowerCase().endsWith(ext);
-};
-const starDotExtTestNocaseDot = (ext) => {
-    ext = ext.toLowerCase();
-    return (f) => f.toLowerCase().endsWith(ext);
-};
-const starDotStarRE = /^\*+\.\*+$/;
-const starDotStarTest = (f) => !f.startsWith('.') && f.includes('.');
-const starDotStarTestDot = (f) => f !== '.' && f !== '..' && f.includes('.');
-const dotStarRE = /^\.\*+$/;
-const dotStarTest = (f) => f !== '.' && f !== '..' && f.startsWith('.');
-const starRE = /^\*+$/;
-const starTest = (f) => f.length !== 0 && !f.startsWith('.');
-const starTestDot = (f) => f.length !== 0 && f !== '.' && f !== '..';
-const qmarksRE = /^\?+([^+@!?\*\[\(]*)?$/;
-const qmarksTestNocase = ([$0, ext = '']) => {
-    const noext = qmarksTestNoExt([$0]);
-    if (!ext)
-        return noext;
-    ext = ext.toLowerCase();
-    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
-};
-const qmarksTestNocaseDot = ([$0, ext = '']) => {
-    const noext = qmarksTestNoExtDot([$0]);
-    if (!ext)
-        return noext;
-    ext = ext.toLowerCase();
-    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
-};
-const qmarksTestDot = ([$0, ext = '']) => {
-    const noext = qmarksTestNoExtDot([$0]);
-    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
-};
-const qmarksTest = ([$0, ext = '']) => {
-    const noext = qmarksTestNoExt([$0]);
-    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
-};
-const qmarksTestNoExt = ([$0]) => {
-    const len = $0.length;
-    return (f) => f.length === len && !f.startsWith('.');
-};
-const qmarksTestNoExtDot = ([$0]) => {
-    const len = $0.length;
-    return (f) => f.length === len && f !== '.' && f !== '..';
-};
-/* c8 ignore start */
-const defaultPlatform = (typeof process === 'object' && process
-    ? (typeof process.env === 'object' &&
-        process.env &&
-        process.env.__MINIMATCH_TESTING_PLATFORM__) ||
-        process.platform
-    : 'posix');
-const path = {
-    win32: { sep: '\\' },
-    posix: { sep: '/' },
-};
-/* c8 ignore stop */
-export const sep = defaultPlatform === 'win32' ? path.win32.sep : path.posix.sep;
-minimatch.sep = sep;
-export const GLOBSTAR = Symbol('globstar **');
-minimatch.GLOBSTAR = GLOBSTAR;
-// any single thing other than /
-// don't need to escape / when using new RegExp()
-const qmark = '[^/]';
-// * => any number of characters
-const star = qmark + '*?';
-// ** when dots are allowed.  Anything goes, except .. and .
-// not (^ or / followed by one or two dots followed by $ or /),
-// followed by anything, any number of times.
-const twoStarDot = '(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?';
-// not a ^ or / followed by a dot,
-// followed by anything, any number of times.
-const twoStarNoDot = '(?:(?!(?:\\/|^)\\.).)*?';
-export const filter = (pattern, options = {}) => (p) => minimatch(p, pattern, options);
-minimatch.filter = filter;
-const ext = (a, b = {}) => Object.assign({}, a, b);
-export const defaults = (def) => {
-    if (!def || typeof def !== 'object' || !Object.keys(def).length) {
-        return minimatch;
-    }
-    const orig = minimatch;
-    const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options));
-    return Object.assign(m, {
-        Minimatch: class Minimatch extends orig.Minimatch {
-            constructor(pattern, options = {}) {
-                super(pattern, ext(def, options));
-            }
-            static defaults(options) {
-                return orig.defaults(ext(def, options)).Minimatch;
-            }
-        },
-        AST: class AST extends orig.AST {
-            /* c8 ignore start */
-            constructor(type, parent, options = {}) {
-                super(type, parent, ext(def, options));
-            }
-            /* c8 ignore stop */
-            static fromGlob(pattern, options = {}) {
-                return orig.AST.fromGlob(pattern, ext(def, options));
-            }
-        },
-        unescape: (s, options = {}) => orig.unescape(s, ext(def, options)),
-        escape: (s, options = {}) => orig.escape(s, ext(def, options)),
-        filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)),
-        defaults: (options) => orig.defaults(ext(def, options)),
-        makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)),
-        braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)),
-        match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)),
-        sep: orig.sep,
-        GLOBSTAR: GLOBSTAR,
-    });
-};
-minimatch.defaults = defaults;
-// Brace expansion:
-// a{b,c}d -> abd acd
-// a{b,}c -> abc ac
-// a{0..3}d -> a0d a1d a2d a3d
-// a{b,c{d,e}f}g -> abg acdfg acefg
-// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
-//
-// Invalid sets are not expanded.
-// a{2..}b -> a{2..}b
-// a{b}c -> a{b}c
-export const braceExpand = (pattern, options = {}) => {
-    assertValidPattern(pattern);
-    // Thanks to Yeting Li  for
-    // improving this regexp to avoid a ReDOS vulnerability.
-    if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
-        // shortcut. no need to expand.
-        return [pattern];
-    }
-    return expand(pattern);
-};
-minimatch.braceExpand = braceExpand;
-// parse a component of the expanded set.
-// At this point, no pattern may contain "/" in it
-// so we're going to return a 2d array, where each entry is the full
-// pattern, split on '/', and then turned into a regular expression.
-// A regexp is made at the end which joins each array with an
-// escaped /, and another full one which joins each regexp with |.
-//
-// Following the lead of Bash 4.1, note that "**" only has special meaning
-// when it is the *only* thing in a path portion.  Otherwise, any series
-// of * is equivalent to a single *.  Globstar behavior is enabled by
-// default, and can be disabled by setting options.noglobstar.
-export const makeRe = (pattern, options = {}) => new Minimatch(pattern, options).makeRe();
-minimatch.makeRe = makeRe;
-export const match = (list, pattern, options = {}) => {
-    const mm = new Minimatch(pattern, options);
-    list = list.filter(f => mm.match(f));
-    if (mm.options.nonull && !list.length) {
-        list.push(pattern);
-    }
-    return list;
-};
-minimatch.match = match;
-// replace stuff like \* with *
-const globMagic = /[?*]|[+@!]\(.*?\)|\[|\]/;
-const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
-export class Minimatch {
-    options;
-    set;
-    pattern;
-    windowsPathsNoEscape;
-    nonegate;
-    negate;
-    comment;
-    empty;
-    preserveMultipleSlashes;
-    partial;
-    globSet;
-    globParts;
-    nocase;
-    isWindows;
-    platform;
-    windowsNoMagicRoot;
-    regexp;
-    constructor(pattern, options = {}) {
-        assertValidPattern(pattern);
-        options = options || {};
-        this.options = options;
-        this.pattern = pattern;
-        this.platform = options.platform || defaultPlatform;
-        this.isWindows = this.platform === 'win32';
-        this.windowsPathsNoEscape =
-            !!options.windowsPathsNoEscape || options.allowWindowsEscape === false;
-        if (this.windowsPathsNoEscape) {
-            this.pattern = this.pattern.replace(/\\/g, '/');
-        }
-        this.preserveMultipleSlashes = !!options.preserveMultipleSlashes;
-        this.regexp = null;
-        this.negate = false;
-        this.nonegate = !!options.nonegate;
-        this.comment = false;
-        this.empty = false;
-        this.partial = !!options.partial;
-        this.nocase = !!this.options.nocase;
-        this.windowsNoMagicRoot =
-            options.windowsNoMagicRoot !== undefined
-                ? options.windowsNoMagicRoot
-                : !!(this.isWindows && this.nocase);
-        this.globSet = [];
-        this.globParts = [];
-        this.set = [];
-        // make the set of regexps etc.
-        this.make();
-    }
-    hasMagic() {
-        if (this.options.magicalBraces && this.set.length > 1) {
-            return true;
-        }
-        for (const pattern of this.set) {
-            for (const part of pattern) {
-                if (typeof part !== 'string')
-                    return true;
-            }
-        }
-        return false;
-    }
-    debug(..._) { }
-    make() {
-        const pattern = this.pattern;
-        const options = this.options;
-        // empty patterns and comments match nothing.
-        if (!options.nocomment && pattern.charAt(0) === '#') {
-            this.comment = true;
-            return;
-        }
-        if (!pattern) {
-            this.empty = true;
-            return;
-        }
-        // step 1: figure out negation, etc.
-        this.parseNegate();
-        // step 2: expand braces
-        this.globSet = [...new Set(this.braceExpand())];
-        if (options.debug) {
-            this.debug = (...args) => console.error(...args);
-        }
-        this.debug(this.pattern, this.globSet);
-        // step 3: now we have a set, so turn each one into a series of
-        // path-portion matching patterns.
-        // These will be regexps, except in the case of "**", which is
-        // set to the GLOBSTAR object for globstar behavior,
-        // and will not contain any / characters
-        //
-        // First, we preprocess to make the glob pattern sets a bit simpler
-        // and deduped.  There are some perf-killing patterns that can cause
-        // problems with a glob walk, but we can simplify them down a bit.
-        const rawGlobParts = this.globSet.map(s => this.slashSplit(s));
-        this.globParts = this.preprocess(rawGlobParts);
-        this.debug(this.pattern, this.globParts);
-        // glob --> regexps
-        let set = this.globParts.map((s, _, __) => {
-            if (this.isWindows && this.windowsNoMagicRoot) {
-                // check if it's a drive or unc path.
-                const isUNC = s[0] === '' &&
-                    s[1] === '' &&
-                    (s[2] === '?' || !globMagic.test(s[2])) &&
-                    !globMagic.test(s[3]);
-                const isDrive = /^[a-z]:/i.test(s[0]);
-                if (isUNC) {
-                    return [...s.slice(0, 4), ...s.slice(4).map(ss => this.parse(ss))];
-                }
-                else if (isDrive) {
-                    return [s[0], ...s.slice(1).map(ss => this.parse(ss))];
-                }
-            }
-            return s.map(ss => this.parse(ss));
-        });
-        this.debug(this.pattern, set);
-        // filter out everything that didn't compile properly.
-        this.set = set.filter(s => s.indexOf(false) === -1);
-        // do not treat the ? in UNC paths as magic
-        if (this.isWindows) {
-            for (let i = 0; i < this.set.length; i++) {
-                const p = this.set[i];
-                if (p[0] === '' &&
-                    p[1] === '' &&
-                    this.globParts[i][2] === '?' &&
-                    typeof p[3] === 'string' &&
-                    /^[a-z]:$/i.test(p[3])) {
-                    p[2] = '?';
-                }
-            }
-        }
-        this.debug(this.pattern, this.set);
-    }
-    // various transforms to equivalent pattern sets that are
-    // faster to process in a filesystem walk.  The goal is to
-    // eliminate what we can, and push all ** patterns as far
-    // to the right as possible, even if it increases the number
-    // of patterns that we have to process.
-    preprocess(globParts) {
-        // if we're not in globstar mode, then turn all ** into *
-        if (this.options.noglobstar) {
-            for (let i = 0; i < globParts.length; i++) {
-                for (let j = 0; j < globParts[i].length; j++) {
-                    if (globParts[i][j] === '**') {
-                        globParts[i][j] = '*';
-                    }
-                }
-            }
-        }
-        const { optimizationLevel = 1 } = this.options;
-        if (optimizationLevel >= 2) {
-            // aggressive optimization for the purpose of fs walking
-            globParts = this.firstPhasePreProcess(globParts);
-            globParts = this.secondPhasePreProcess(globParts);
-        }
-        else if (optimizationLevel >= 1) {
-            // just basic optimizations to remove some .. parts
-            globParts = this.levelOneOptimize(globParts);
-        }
-        else {
-            // just collapse multiple ** portions into one
-            globParts = this.adjascentGlobstarOptimize(globParts);
-        }
-        return globParts;
-    }
-    // just get rid of adjascent ** portions
-    adjascentGlobstarOptimize(globParts) {
-        return globParts.map(parts => {
-            let gs = -1;
-            while (-1 !== (gs = parts.indexOf('**', gs + 1))) {
-                let i = gs;
-                while (parts[i + 1] === '**') {
-                    i++;
-                }
-                if (i !== gs) {
-                    parts.splice(gs, i - gs);
-                }
-            }
-            return parts;
-        });
-    }
-    // get rid of adjascent ** and resolve .. portions
-    levelOneOptimize(globParts) {
-        return globParts.map(parts => {
-            parts = parts.reduce((set, part) => {
-                const prev = set[set.length - 1];
-                if (part === '**' && prev === '**') {
-                    return set;
-                }
-                if (part === '..') {
-                    if (prev && prev !== '..' && prev !== '.' && prev !== '**') {
-                        set.pop();
-                        return set;
-                    }
-                }
-                set.push(part);
-                return set;
-            }, []);
-            return parts.length === 0 ? [''] : parts;
-        });
-    }
-    levelTwoFileOptimize(parts) {
-        if (!Array.isArray(parts)) {
-            parts = this.slashSplit(parts);
-        }
-        let didSomething = false;
-        do {
-            didSomething = false;
-            // 
// -> 
/
-            if (!this.preserveMultipleSlashes) {
-                for (let i = 1; i < parts.length - 1; i++) {
-                    const p = parts[i];
-                    // don't squeeze out UNC patterns
-                    if (i === 1 && p === '' && parts[0] === '')
-                        continue;
-                    if (p === '.' || p === '') {
-                        didSomething = true;
-                        parts.splice(i, 1);
-                        i--;
-                    }
-                }
-                if (parts[0] === '.' &&
-                    parts.length === 2 &&
-                    (parts[1] === '.' || parts[1] === '')) {
-                    didSomething = true;
-                    parts.pop();
-                }
-            }
-            // 
/

/../ ->

/
-            let dd = 0;
-            while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
-                const p = parts[dd - 1];
-                if (p && p !== '.' && p !== '..' && p !== '**') {
-                    didSomething = true;
-                    parts.splice(dd - 1, 2);
-                    dd -= 2;
-                }
-            }
-        } while (didSomething);
-        return parts.length === 0 ? [''] : parts;
-    }
-    // First phase: single-pattern processing
-    // 
 is 1 or more portions
-    //  is 1 or more portions
-    // 

is any portion other than ., .., '', or ** - // is . or '' - // - // **/.. is *brutal* for filesystem walking performance, because - // it effectively resets the recursive walk each time it occurs, - // and ** cannot be reduced out by a .. pattern part like a regexp - // or most strings (other than .., ., and '') can be. - // - //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/} - //

// -> 
/
-    // 
/

/../ ->

/
-    // **/**/ -> **/
-    //
-    // **/*/ -> */**/ <== not valid because ** doesn't follow
-    // this WOULD be allowed if ** did follow symlinks, or * didn't
-    firstPhasePreProcess(globParts) {
-        let didSomething = false;
-        do {
-            didSomething = false;
-            // 
/**/../

/

/ -> {

/../

/

/,

/**/

/

/} - for (let parts of globParts) { - let gs = -1; - while (-1 !== (gs = parts.indexOf('**', gs + 1))) { - let gss = gs; - while (parts[gss + 1] === '**') { - //

/**/**/ -> 
/**/
-                        gss++;
-                    }
-                    // eg, if gs is 2 and gss is 4, that means we have 3 **
-                    // parts, and can remove 2 of them.
-                    if (gss > gs) {
-                        parts.splice(gs + 1, gss - gs);
-                    }
-                    let next = parts[gs + 1];
-                    const p = parts[gs + 2];
-                    const p2 = parts[gs + 3];
-                    if (next !== '..')
-                        continue;
-                    if (!p ||
-                        p === '.' ||
-                        p === '..' ||
-                        !p2 ||
-                        p2 === '.' ||
-                        p2 === '..') {
-                        continue;
-                    }
-                    didSomething = true;
-                    // edit parts in place, and push the new one
-                    parts.splice(gs, 1);
-                    const other = parts.slice(0);
-                    other[gs] = '**';
-                    globParts.push(other);
-                    gs--;
-                }
-                // 
// -> 
/
-                if (!this.preserveMultipleSlashes) {
-                    for (let i = 1; i < parts.length - 1; i++) {
-                        const p = parts[i];
-                        // don't squeeze out UNC patterns
-                        if (i === 1 && p === '' && parts[0] === '')
-                            continue;
-                        if (p === '.' || p === '') {
-                            didSomething = true;
-                            parts.splice(i, 1);
-                            i--;
-                        }
-                    }
-                    if (parts[0] === '.' &&
-                        parts.length === 2 &&
-                        (parts[1] === '.' || parts[1] === '')) {
-                        didSomething = true;
-                        parts.pop();
-                    }
-                }
-                // 
/

/../ ->

/
-                let dd = 0;
-                while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
-                    const p = parts[dd - 1];
-                    if (p && p !== '.' && p !== '..' && p !== '**') {
-                        didSomething = true;
-                        const needDot = dd === 1 && parts[dd + 1] === '**';
-                        const splin = needDot ? ['.'] : [];
-                        parts.splice(dd - 1, 2, ...splin);
-                        if (parts.length === 0)
-                            parts.push('');
-                        dd -= 2;
-                    }
-                }
-            }
-        } while (didSomething);
-        return globParts;
-    }
-    // second phase: multi-pattern dedupes
-    // {
/*/,
/

/} ->

/*/
-    // {
/,
/} -> 
/
-    // {
/**/,
/} -> 
/**/
-    //
-    // {
/**/,
/**/

/} ->

/**/
-    // ^-- not valid because ** doens't follow symlinks
-    secondPhasePreProcess(globParts) {
-        for (let i = 0; i < globParts.length - 1; i++) {
-            for (let j = i + 1; j < globParts.length; j++) {
-                const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);
-                if (matched) {
-                    globParts[i] = [];
-                    globParts[j] = matched;
-                    break;
-                }
-            }
-        }
-        return globParts.filter(gs => gs.length);
-    }
-    partsMatch(a, b, emptyGSMatch = false) {
-        let ai = 0;
-        let bi = 0;
-        let result = [];
-        let which = '';
-        while (ai < a.length && bi < b.length) {
-            if (a[ai] === b[bi]) {
-                result.push(which === 'b' ? b[bi] : a[ai]);
-                ai++;
-                bi++;
-            }
-            else if (emptyGSMatch && a[ai] === '**' && b[bi] === a[ai + 1]) {
-                result.push(a[ai]);
-                ai++;
-            }
-            else if (emptyGSMatch && b[bi] === '**' && a[ai] === b[bi + 1]) {
-                result.push(b[bi]);
-                bi++;
-            }
-            else if (a[ai] === '*' &&
-                b[bi] &&
-                (this.options.dot || !b[bi].startsWith('.')) &&
-                b[bi] !== '**') {
-                if (which === 'b')
-                    return false;
-                which = 'a';
-                result.push(a[ai]);
-                ai++;
-                bi++;
-            }
-            else if (b[bi] === '*' &&
-                a[ai] &&
-                (this.options.dot || !a[ai].startsWith('.')) &&
-                a[ai] !== '**') {
-                if (which === 'a')
-                    return false;
-                which = 'b';
-                result.push(b[bi]);
-                ai++;
-                bi++;
-            }
-            else {
-                return false;
-            }
-        }
-        // if we fall out of the loop, it means they two are identical
-        // as long as their lengths match
-        return a.length === b.length && result;
-    }
-    parseNegate() {
-        if (this.nonegate)
-            return;
-        const pattern = this.pattern;
-        let negate = false;
-        let negateOffset = 0;
-        for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {
-            negate = !negate;
-            negateOffset++;
-        }
-        if (negateOffset)
-            this.pattern = pattern.slice(negateOffset);
-        this.negate = negate;
-    }
-    // set partial to true to test if, for example,
-    // "/a/b" matches the start of "/*/b/*/d"
-    // Partial means, if you run out of file before you run
-    // out of pattern, then that's fine, as long as all
-    // the parts match.
-    matchOne(file, pattern, partial = false) {
-        const options = this.options;
-        // UNC paths like //?/X:/... can match X:/... and vice versa
-        // Drive letters in absolute drive or unc paths are always compared
-        // case-insensitively.
-        if (this.isWindows) {
-            const fileDrive = typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0]);
-            const fileUNC = !fileDrive &&
-                file[0] === '' &&
-                file[1] === '' &&
-                file[2] === '?' &&
-                /^[a-z]:$/i.test(file[3]);
-            const patternDrive = typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0]);
-            const patternUNC = !patternDrive &&
-                pattern[0] === '' &&
-                pattern[1] === '' &&
-                pattern[2] === '?' &&
-                typeof pattern[3] === 'string' &&
-                /^[a-z]:$/i.test(pattern[3]);
-            const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined;
-            const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined;
-            if (typeof fdi === 'number' && typeof pdi === 'number') {
-                const [fd, pd] = [file[fdi], pattern[pdi]];
-                if (fd.toLowerCase() === pd.toLowerCase()) {
-                    pattern[pdi] = fd;
-                    if (pdi > fdi) {
-                        pattern = pattern.slice(pdi);
-                    }
-                    else if (fdi > pdi) {
-                        file = file.slice(fdi);
-                    }
-                }
-            }
-        }
-        // resolve and reduce . and .. portions in the file as well.
-        // dont' need to do the second phase, because it's only one string[]
-        const { optimizationLevel = 1 } = this.options;
-        if (optimizationLevel >= 2) {
-            file = this.levelTwoFileOptimize(file);
-        }
-        this.debug('matchOne', this, { file, pattern });
-        this.debug('matchOne', file.length, pattern.length);
-        for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
-            this.debug('matchOne loop');
-            var p = pattern[pi];
-            var f = file[fi];
-            this.debug(pattern, p, f);
-            // should be impossible.
-            // some invalid regexp stuff in the set.
-            /* c8 ignore start */
-            if (p === false) {
-                return false;
-            }
-            /* c8 ignore stop */
-            if (p === GLOBSTAR) {
-                this.debug('GLOBSTAR', [pattern, p, f]);
-                // "**"
-                // a/**/b/**/c would match the following:
-                // a/b/x/y/z/c
-                // a/x/y/z/b/c
-                // a/b/x/b/x/c
-                // a/b/c
-                // To do this, take the rest of the pattern after
-                // the **, and see if it would match the file remainder.
-                // If so, return success.
-                // If not, the ** "swallows" a segment, and try again.
-                // This is recursively awful.
-                //
-                // a/**/b/**/c matching a/b/x/y/z/c
-                // - a matches a
-                // - doublestar
-                //   - matchOne(b/x/y/z/c, b/**/c)
-                //     - b matches b
-                //     - doublestar
-                //       - matchOne(x/y/z/c, c) -> no
-                //       - matchOne(y/z/c, c) -> no
-                //       - matchOne(z/c, c) -> no
-                //       - matchOne(c, c) yes, hit
-                var fr = fi;
-                var pr = pi + 1;
-                if (pr === pl) {
-                    this.debug('** at the end');
-                    // a ** at the end will just swallow the rest.
-                    // We have found a match.
-                    // however, it will not swallow /.x, unless
-                    // options.dot is set.
-                    // . and .. are *never* matched by **, for explosively
-                    // exponential reasons.
-                    for (; fi < fl; fi++) {
-                        if (file[fi] === '.' ||
-                            file[fi] === '..' ||
-                            (!options.dot && file[fi].charAt(0) === '.'))
-                            return false;
-                    }
-                    return true;
-                }
-                // ok, let's see if we can swallow whatever we can.
-                while (fr < fl) {
-                    var swallowee = file[fr];
-                    this.debug('\nglobstar while', file, fr, pattern, pr, swallowee);
-                    // XXX remove this slice.  Just pass the start index.
-                    if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
-                        this.debug('globstar found match!', fr, fl, swallowee);
-                        // found a match.
-                        return true;
-                    }
-                    else {
-                        // can't swallow "." or ".." ever.
-                        // can only swallow ".foo" when explicitly asked.
-                        if (swallowee === '.' ||
-                            swallowee === '..' ||
-                            (!options.dot && swallowee.charAt(0) === '.')) {
-                            this.debug('dot detected!', file, fr, pattern, pr);
-                            break;
-                        }
-                        // ** swallows a segment, and continue.
-                        this.debug('globstar swallow a segment, and continue');
-                        fr++;
-                    }
-                }
-                // no match was found.
-                // However, in partial mode, we can't say this is necessarily over.
-                /* c8 ignore start */
-                if (partial) {
-                    // ran out of file
-                    this.debug('\n>>> no match, partial?', file, fr, pattern, pr);
-                    if (fr === fl) {
-                        return true;
-                    }
-                }
-                /* c8 ignore stop */
-                return false;
-            }
-            // something other than **
-            // non-magic patterns just have to match exactly
-            // patterns with magic have been turned into regexps.
-            let hit;
-            if (typeof p === 'string') {
-                hit = f === p;
-                this.debug('string match', p, f, hit);
-            }
-            else {
-                hit = p.test(f);
-                this.debug('pattern match', p, f, hit);
-            }
-            if (!hit)
-                return false;
-        }
-        // Note: ending in / means that we'll get a final ""
-        // at the end of the pattern.  This can only match a
-        // corresponding "" at the end of the file.
-        // If the file ends in /, then it can only match a
-        // a pattern that ends in /, unless the pattern just
-        // doesn't have any more for it. But, a/b/ should *not*
-        // match "a/b/*", even though "" matches against the
-        // [^/]*? pattern, except in partial mode, where it might
-        // simply not be reached yet.
-        // However, a/b/ should still satisfy a/*
-        // now either we fell off the end of the pattern, or we're done.
-        if (fi === fl && pi === pl) {
-            // ran out of pattern and filename at the same time.
-            // an exact hit!
-            return true;
-        }
-        else if (fi === fl) {
-            // ran out of file, but still had pattern left.
-            // this is ok if we're doing the match as part of
-            // a glob fs traversal.
-            return partial;
-        }
-        else if (pi === pl) {
-            // ran out of pattern, still have file left.
-            // this is only acceptable if we're on the very last
-            // empty segment of a file with a trailing slash.
-            // a/* should match a/b/
-            return fi === fl - 1 && file[fi] === '';
-            /* c8 ignore start */
-        }
-        else {
-            // should be unreachable.
-            throw new Error('wtf?');
-        }
-        /* c8 ignore stop */
-    }
-    braceExpand() {
-        return braceExpand(this.pattern, this.options);
-    }
-    parse(pattern) {
-        assertValidPattern(pattern);
-        const options = this.options;
-        // shortcuts
-        if (pattern === '**')
-            return GLOBSTAR;
-        if (pattern === '')
-            return '';
-        // far and away, the most common glob pattern parts are
-        // *, *.*, and *.  Add a fast check method for those.
-        let m;
-        let fastTest = null;
-        if ((m = pattern.match(starRE))) {
-            fastTest = options.dot ? starTestDot : starTest;
-        }
-        else if ((m = pattern.match(starDotExtRE))) {
-            fastTest = (options.nocase
-                ? options.dot
-                    ? starDotExtTestNocaseDot
-                    : starDotExtTestNocase
-                : options.dot
-                    ? starDotExtTestDot
-                    : starDotExtTest)(m[1]);
-        }
-        else if ((m = pattern.match(qmarksRE))) {
-            fastTest = (options.nocase
-                ? options.dot
-                    ? qmarksTestNocaseDot
-                    : qmarksTestNocase
-                : options.dot
-                    ? qmarksTestDot
-                    : qmarksTest)(m);
-        }
-        else if ((m = pattern.match(starDotStarRE))) {
-            fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
-        }
-        else if ((m = pattern.match(dotStarRE))) {
-            fastTest = dotStarTest;
-        }
-        const re = AST.fromGlob(pattern, this.options).toMMPattern();
-        if (fastTest && typeof re === 'object') {
-            // Avoids overriding in frozen environments
-            Reflect.defineProperty(re, 'test', { value: fastTest });
-        }
-        return re;
-    }
-    makeRe() {
-        if (this.regexp || this.regexp === false)
-            return this.regexp;
-        // at this point, this.set is a 2d array of partial
-        // pattern strings, or "**".
-        //
-        // It's better to use .match().  This function shouldn't
-        // be used, really, but it's pretty convenient sometimes,
-        // when you just want to work with a regex.
-        const set = this.set;
-        if (!set.length) {
-            this.regexp = false;
-            return this.regexp;
-        }
-        const options = this.options;
-        const twoStar = options.noglobstar
-            ? star
-            : options.dot
-                ? twoStarDot
-                : twoStarNoDot;
-        const flags = new Set(options.nocase ? ['i'] : []);
-        // regexpify non-globstar patterns
-        // if ** is only item, then we just do one twoStar
-        // if ** is first, and there are more, prepend (\/|twoStar\/)? to next
-        // if ** is last, append (\/twoStar|) to previous
-        // if ** is in the middle, append (\/|\/twoStar\/) to previous
-        // then filter out GLOBSTAR symbols
-        let re = set
-            .map(pattern => {
-            const pp = pattern.map(p => {
-                if (p instanceof RegExp) {
-                    for (const f of p.flags.split(''))
-                        flags.add(f);
-                }
-                return typeof p === 'string'
-                    ? regExpEscape(p)
-                    : p === GLOBSTAR
-                        ? GLOBSTAR
-                        : p._src;
-            });
-            pp.forEach((p, i) => {
-                const next = pp[i + 1];
-                const prev = pp[i - 1];
-                if (p !== GLOBSTAR || prev === GLOBSTAR) {
-                    return;
-                }
-                if (prev === undefined) {
-                    if (next !== undefined && next !== GLOBSTAR) {
-                        pp[i + 1] = '(?:\\/|' + twoStar + '\\/)?' + next;
-                    }
-                    else {
-                        pp[i] = twoStar;
-                    }
-                }
-                else if (next === undefined) {
-                    pp[i - 1] = prev + '(?:\\/|' + twoStar + ')?';
-                }
-                else if (next !== GLOBSTAR) {
-                    pp[i - 1] = prev + '(?:\\/|\\/' + twoStar + '\\/)' + next;
-                    pp[i + 1] = GLOBSTAR;
-                }
-            });
-            return pp.filter(p => p !== GLOBSTAR).join('/');
-        })
-            .join('|');
-        // need to wrap in parens if we had more than one thing with |,
-        // otherwise only the first will be anchored to ^ and the last to $
-        const [open, close] = set.length > 1 ? ['(?:', ')'] : ['', ''];
-        // must match entire pattern
-        // ending in a * or ** will make it less strict.
-        re = '^' + open + re + close + '$';
-        // can match anything, as long as it's not this.
-        if (this.negate)
-            re = '^(?!' + re + ').+$';
-        try {
-            this.regexp = new RegExp(re, [...flags].join(''));
-            /* c8 ignore start */
-        }
-        catch (ex) {
-            // should be impossible
-            this.regexp = false;
-        }
-        /* c8 ignore stop */
-        return this.regexp;
-    }
-    slashSplit(p) {
-        // if p starts with // on windows, we preserve that
-        // so that UNC paths aren't broken.  Otherwise, any number of
-        // / characters are coalesced into one, unless
-        // preserveMultipleSlashes is set to true.
-        if (this.preserveMultipleSlashes) {
-            return p.split('/');
-        }
-        else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
-            // add an extra '' for the one we lose
-            return ['', ...p.split(/\/+/)];
-        }
-        else {
-            return p.split(/\/+/);
-        }
-    }
-    match(f, partial = this.partial) {
-        this.debug('match', f, this.pattern);
-        // short-circuit in the case of busted things.
-        // comments, etc.
-        if (this.comment) {
-            return false;
-        }
-        if (this.empty) {
-            return f === '';
-        }
-        if (f === '/' && partial) {
-            return true;
-        }
-        const options = this.options;
-        // windows: need to use /, not \
-        if (this.isWindows) {
-            f = f.split('\\').join('/');
-        }
-        // treat the test path as a set of pathparts.
-        const ff = this.slashSplit(f);
-        this.debug(this.pattern, 'split', ff);
-        // just ONE of the pattern sets in this.set needs to match
-        // in order for it to be valid.  If negating, then just one
-        // match means that we have failed.
-        // Either way, return on the first hit.
-        const set = this.set;
-        this.debug(this.pattern, 'set', set);
-        // Find the basename of the path by looking for the last non-empty segment
-        let filename = ff[ff.length - 1];
-        if (!filename) {
-            for (let i = ff.length - 2; !filename && i >= 0; i--) {
-                filename = ff[i];
-            }
-        }
-        for (let i = 0; i < set.length; i++) {
-            const pattern = set[i];
-            let file = ff;
-            if (options.matchBase && pattern.length === 1) {
-                file = [filename];
-            }
-            const hit = this.matchOne(file, pattern, partial);
-            if (hit) {
-                if (options.flipNegate) {
-                    return true;
-                }
-                return !this.negate;
-            }
-        }
-        // didn't get any hits.  this is success if it's a negative
-        // pattern, failure otherwise.
-        if (options.flipNegate) {
-            return false;
-        }
-        return this.negate;
-    }
-    static defaults(def) {
-        return minimatch.defaults(def).Minimatch;
-    }
-}
-/* c8 ignore start */
-export { AST } from './ast.js';
-export { escape } from './escape.js';
-export { unescape } from './unescape.js';
-/* c8 ignore stop */
-minimatch.AST = AST;
-minimatch.Minimatch = Minimatch;
-minimatch.escape = escape;
-minimatch.unescape = unescape;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js b/node_modules/pacote/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
deleted file mode 100644
index 5fc86bbd0116c..0000000000000
--- a/node_modules/pacote/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
+++ /dev/null
@@ -1,14 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.assertValidPattern = void 0;
-const MAX_PATTERN_LENGTH = 1024 * 64;
-const assertValidPattern = (pattern) => {
-    if (typeof pattern !== 'string') {
-        throw new TypeError('invalid pattern');
-    }
-    if (pattern.length > MAX_PATTERN_LENGTH) {
-        throw new TypeError('pattern is too long');
-    }
-};
-exports.assertValidPattern = assertValidPattern;
-//# sourceMappingURL=assert-valid-pattern.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/minimatch/dist/commonjs/ast.js b/node_modules/pacote/node_modules/minimatch/dist/commonjs/ast.js
deleted file mode 100644
index 7b2109625eaeb..0000000000000
--- a/node_modules/pacote/node_modules/minimatch/dist/commonjs/ast.js
+++ /dev/null
@@ -1,592 +0,0 @@
-"use strict";
-// parse a single path portion
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.AST = void 0;
-const brace_expressions_js_1 = require("./brace-expressions.js");
-const unescape_js_1 = require("./unescape.js");
-const types = new Set(['!', '?', '+', '*', '@']);
-const isExtglobType = (c) => types.has(c);
-// Patterns that get prepended to bind to the start of either the
-// entire string, or just a single path portion, to prevent dots
-// and/or traversal patterns, when needed.
-// Exts don't need the ^ or / bit, because the root binds that already.
-const startNoTraversal = '(?!(?:^|/)\\.\\.?(?:$|/))';
-const startNoDot = '(?!\\.)';
-// characters that indicate a start of pattern needs the "no dots" bit,
-// because a dot *might* be matched. ( is not in the list, because in
-// the case of a child extglob, it will handle the prevention itself.
-const addPatternStart = new Set(['[', '.']);
-// cases where traversal is A-OK, no dot prevention needed
-const justDots = new Set(['..', '.']);
-const reSpecials = new Set('().*{}+?[]^$\\!');
-const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
-// any single thing other than /
-const qmark = '[^/]';
-// * => any number of characters
-const star = qmark + '*?';
-// use + when we need to ensure that *something* matches, because the * is
-// the only thing in the path portion.
-const starNoEmpty = qmark + '+?';
-// remove the \ chars that we added if we end up doing a nonmagic compare
-// const deslash = (s: string) => s.replace(/\\(.)/g, '$1')
-class AST {
-    type;
-    #root;
-    #hasMagic;
-    #uflag = false;
-    #parts = [];
-    #parent;
-    #parentIndex;
-    #negs;
-    #filledNegs = false;
-    #options;
-    #toString;
-    // set to true if it's an extglob with no children
-    // (which really means one child of '')
-    #emptyExt = false;
-    constructor(type, parent, options = {}) {
-        this.type = type;
-        // extglobs are inherently magical
-        if (type)
-            this.#hasMagic = true;
-        this.#parent = parent;
-        this.#root = this.#parent ? this.#parent.#root : this;
-        this.#options = this.#root === this ? options : this.#root.#options;
-        this.#negs = this.#root === this ? [] : this.#root.#negs;
-        if (type === '!' && !this.#root.#filledNegs)
-            this.#negs.push(this);
-        this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0;
-    }
-    get hasMagic() {
-        /* c8 ignore start */
-        if (this.#hasMagic !== undefined)
-            return this.#hasMagic;
-        /* c8 ignore stop */
-        for (const p of this.#parts) {
-            if (typeof p === 'string')
-                continue;
-            if (p.type || p.hasMagic)
-                return (this.#hasMagic = true);
-        }
-        // note: will be undefined until we generate the regexp src and find out
-        return this.#hasMagic;
-    }
-    // reconstructs the pattern
-    toString() {
-        if (this.#toString !== undefined)
-            return this.#toString;
-        if (!this.type) {
-            return (this.#toString = this.#parts.map(p => String(p)).join(''));
-        }
-        else {
-            return (this.#toString =
-                this.type + '(' + this.#parts.map(p => String(p)).join('|') + ')');
-        }
-    }
-    #fillNegs() {
-        /* c8 ignore start */
-        if (this !== this.#root)
-            throw new Error('should only call on root');
-        if (this.#filledNegs)
-            return this;
-        /* c8 ignore stop */
-        // call toString() once to fill this out
-        this.toString();
-        this.#filledNegs = true;
-        let n;
-        while ((n = this.#negs.pop())) {
-            if (n.type !== '!')
-                continue;
-            // walk up the tree, appending everthing that comes AFTER parentIndex
-            let p = n;
-            let pp = p.#parent;
-            while (pp) {
-                for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) {
-                    for (const part of n.#parts) {
-                        /* c8 ignore start */
-                        if (typeof part === 'string') {
-                            throw new Error('string part in extglob AST??');
-                        }
-                        /* c8 ignore stop */
-                        part.copyIn(pp.#parts[i]);
-                    }
-                }
-                p = pp;
-                pp = p.#parent;
-            }
-        }
-        return this;
-    }
-    push(...parts) {
-        for (const p of parts) {
-            if (p === '')
-                continue;
-            /* c8 ignore start */
-            if (typeof p !== 'string' && !(p instanceof AST && p.#parent === this)) {
-                throw new Error('invalid part: ' + p);
-            }
-            /* c8 ignore stop */
-            this.#parts.push(p);
-        }
-    }
-    toJSON() {
-        const ret = this.type === null
-            ? this.#parts.slice().map(p => (typeof p === 'string' ? p : p.toJSON()))
-            : [this.type, ...this.#parts.map(p => p.toJSON())];
-        if (this.isStart() && !this.type)
-            ret.unshift([]);
-        if (this.isEnd() &&
-            (this === this.#root ||
-                (this.#root.#filledNegs && this.#parent?.type === '!'))) {
-            ret.push({});
-        }
-        return ret;
-    }
-    isStart() {
-        if (this.#root === this)
-            return true;
-        // if (this.type) return !!this.#parent?.isStart()
-        if (!this.#parent?.isStart())
-            return false;
-        if (this.#parentIndex === 0)
-            return true;
-        // if everything AHEAD of this is a negation, then it's still the "start"
-        const p = this.#parent;
-        for (let i = 0; i < this.#parentIndex; i++) {
-            const pp = p.#parts[i];
-            if (!(pp instanceof AST && pp.type === '!')) {
-                return false;
-            }
-        }
-        return true;
-    }
-    isEnd() {
-        if (this.#root === this)
-            return true;
-        if (this.#parent?.type === '!')
-            return true;
-        if (!this.#parent?.isEnd())
-            return false;
-        if (!this.type)
-            return this.#parent?.isEnd();
-        // if not root, it'll always have a parent
-        /* c8 ignore start */
-        const pl = this.#parent ? this.#parent.#parts.length : 0;
-        /* c8 ignore stop */
-        return this.#parentIndex === pl - 1;
-    }
-    copyIn(part) {
-        if (typeof part === 'string')
-            this.push(part);
-        else
-            this.push(part.clone(this));
-    }
-    clone(parent) {
-        const c = new AST(this.type, parent);
-        for (const p of this.#parts) {
-            c.copyIn(p);
-        }
-        return c;
-    }
-    static #parseAST(str, ast, pos, opt) {
-        let escaping = false;
-        let inBrace = false;
-        let braceStart = -1;
-        let braceNeg = false;
-        if (ast.type === null) {
-            // outside of a extglob, append until we find a start
-            let i = pos;
-            let acc = '';
-            while (i < str.length) {
-                const c = str.charAt(i++);
-                // still accumulate escapes at this point, but we do ignore
-                // starts that are escaped
-                if (escaping || c === '\\') {
-                    escaping = !escaping;
-                    acc += c;
-                    continue;
-                }
-                if (inBrace) {
-                    if (i === braceStart + 1) {
-                        if (c === '^' || c === '!') {
-                            braceNeg = true;
-                        }
-                    }
-                    else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
-                        inBrace = false;
-                    }
-                    acc += c;
-                    continue;
-                }
-                else if (c === '[') {
-                    inBrace = true;
-                    braceStart = i;
-                    braceNeg = false;
-                    acc += c;
-                    continue;
-                }
-                if (!opt.noext && isExtglobType(c) && str.charAt(i) === '(') {
-                    ast.push(acc);
-                    acc = '';
-                    const ext = new AST(c, ast);
-                    i = AST.#parseAST(str, ext, i, opt);
-                    ast.push(ext);
-                    continue;
-                }
-                acc += c;
-            }
-            ast.push(acc);
-            return i;
-        }
-        // some kind of extglob, pos is at the (
-        // find the next | or )
-        let i = pos + 1;
-        let part = new AST(null, ast);
-        const parts = [];
-        let acc = '';
-        while (i < str.length) {
-            const c = str.charAt(i++);
-            // still accumulate escapes at this point, but we do ignore
-            // starts that are escaped
-            if (escaping || c === '\\') {
-                escaping = !escaping;
-                acc += c;
-                continue;
-            }
-            if (inBrace) {
-                if (i === braceStart + 1) {
-                    if (c === '^' || c === '!') {
-                        braceNeg = true;
-                    }
-                }
-                else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
-                    inBrace = false;
-                }
-                acc += c;
-                continue;
-            }
-            else if (c === '[') {
-                inBrace = true;
-                braceStart = i;
-                braceNeg = false;
-                acc += c;
-                continue;
-            }
-            if (isExtglobType(c) && str.charAt(i) === '(') {
-                part.push(acc);
-                acc = '';
-                const ext = new AST(c, part);
-                part.push(ext);
-                i = AST.#parseAST(str, ext, i, opt);
-                continue;
-            }
-            if (c === '|') {
-                part.push(acc);
-                acc = '';
-                parts.push(part);
-                part = new AST(null, ast);
-                continue;
-            }
-            if (c === ')') {
-                if (acc === '' && ast.#parts.length === 0) {
-                    ast.#emptyExt = true;
-                }
-                part.push(acc);
-                acc = '';
-                ast.push(...parts, part);
-                return i;
-            }
-            acc += c;
-        }
-        // unfinished extglob
-        // if we got here, it was a malformed extglob! not an extglob, but
-        // maybe something else in there.
-        ast.type = null;
-        ast.#hasMagic = undefined;
-        ast.#parts = [str.substring(pos - 1)];
-        return i;
-    }
-    static fromGlob(pattern, options = {}) {
-        const ast = new AST(null, undefined, options);
-        AST.#parseAST(pattern, ast, 0, options);
-        return ast;
-    }
-    // returns the regular expression if there's magic, or the unescaped
-    // string if not.
-    toMMPattern() {
-        // should only be called on root
-        /* c8 ignore start */
-        if (this !== this.#root)
-            return this.#root.toMMPattern();
-        /* c8 ignore stop */
-        const glob = this.toString();
-        const [re, body, hasMagic, uflag] = this.toRegExpSource();
-        // if we're in nocase mode, and not nocaseMagicOnly, then we do
-        // still need a regular expression if we have to case-insensitively
-        // match capital/lowercase characters.
-        const anyMagic = hasMagic ||
-            this.#hasMagic ||
-            (this.#options.nocase &&
-                !this.#options.nocaseMagicOnly &&
-                glob.toUpperCase() !== glob.toLowerCase());
-        if (!anyMagic) {
-            return body;
-        }
-        const flags = (this.#options.nocase ? 'i' : '') + (uflag ? 'u' : '');
-        return Object.assign(new RegExp(`^${re}$`, flags), {
-            _src: re,
-            _glob: glob,
-        });
-    }
-    get options() {
-        return this.#options;
-    }
-    // returns the string match, the regexp source, whether there's magic
-    // in the regexp (so a regular expression is required) and whether or
-    // not the uflag is needed for the regular expression (for posix classes)
-    // TODO: instead of injecting the start/end at this point, just return
-    // the BODY of the regexp, along with the start/end portions suitable
-    // for binding the start/end in either a joined full-path makeRe context
-    // (where we bind to (^|/), or a standalone matchPart context (where
-    // we bind to ^, and not /).  Otherwise slashes get duped!
-    //
-    // In part-matching mode, the start is:
-    // - if not isStart: nothing
-    // - if traversal possible, but not allowed: ^(?!\.\.?$)
-    // - if dots allowed or not possible: ^
-    // - if dots possible and not allowed: ^(?!\.)
-    // end is:
-    // - if not isEnd(): nothing
-    // - else: $
-    //
-    // In full-path matching mode, we put the slash at the START of the
-    // pattern, so start is:
-    // - if first pattern: same as part-matching mode
-    // - if not isStart(): nothing
-    // - if traversal possible, but not allowed: /(?!\.\.?(?:$|/))
-    // - if dots allowed or not possible: /
-    // - if dots possible and not allowed: /(?!\.)
-    // end is:
-    // - if last pattern, same as part-matching mode
-    // - else nothing
-    //
-    // Always put the (?:$|/) on negated tails, though, because that has to be
-    // there to bind the end of the negated pattern portion, and it's easier to
-    // just stick it in now rather than try to inject it later in the middle of
-    // the pattern.
-    //
-    // We can just always return the same end, and leave it up to the caller
-    // to know whether it's going to be used joined or in parts.
-    // And, if the start is adjusted slightly, can do the same there:
-    // - if not isStart: nothing
-    // - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$)
-    // - if dots allowed or not possible: (?:/|^)
-    // - if dots possible and not allowed: (?:/|^)(?!\.)
-    //
-    // But it's better to have a simpler binding without a conditional, for
-    // performance, so probably better to return both start options.
-    //
-    // Then the caller just ignores the end if it's not the first pattern,
-    // and the start always gets applied.
-    //
-    // But that's always going to be $ if it's the ending pattern, or nothing,
-    // so the caller can just attach $ at the end of the pattern when building.
-    //
-    // So the todo is:
-    // - better detect what kind of start is needed
-    // - return both flavors of starting pattern
-    // - attach $ at the end of the pattern when creating the actual RegExp
-    //
-    // Ah, but wait, no, that all only applies to the root when the first pattern
-    // is not an extglob. If the first pattern IS an extglob, then we need all
-    // that dot prevention biz to live in the extglob portions, because eg
-    // +(*|.x*) can match .xy but not .yx.
-    //
-    // So, return the two flavors if it's #root and the first child is not an
-    // AST, otherwise leave it to the child AST to handle it, and there,
-    // use the (?:^|/) style of start binding.
-    //
-    // Even simplified further:
-    // - Since the start for a join is eg /(?!\.) and the start for a part
-    // is ^(?!\.), we can just prepend (?!\.) to the pattern (either root
-    // or start or whatever) and prepend ^ or / at the Regexp construction.
-    toRegExpSource(allowDot) {
-        const dot = allowDot ?? !!this.#options.dot;
-        if (this.#root === this)
-            this.#fillNegs();
-        if (!this.type) {
-            const noEmpty = this.isStart() && this.isEnd();
-            const src = this.#parts
-                .map(p => {
-                const [re, _, hasMagic, uflag] = typeof p === 'string'
-                    ? AST.#parseGlob(p, this.#hasMagic, noEmpty)
-                    : p.toRegExpSource(allowDot);
-                this.#hasMagic = this.#hasMagic || hasMagic;
-                this.#uflag = this.#uflag || uflag;
-                return re;
-            })
-                .join('');
-            let start = '';
-            if (this.isStart()) {
-                if (typeof this.#parts[0] === 'string') {
-                    // this is the string that will match the start of the pattern,
-                    // so we need to protect against dots and such.
-                    // '.' and '..' cannot match unless the pattern is that exactly,
-                    // even if it starts with . or dot:true is set.
-                    const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]);
-                    if (!dotTravAllowed) {
-                        const aps = addPatternStart;
-                        // check if we have a possibility of matching . or ..,
-                        // and prevent that.
-                        const needNoTrav = 
-                        // dots are allowed, and the pattern starts with [ or .
-                        (dot && aps.has(src.charAt(0))) ||
-                            // the pattern starts with \., and then [ or .
-                            (src.startsWith('\\.') && aps.has(src.charAt(2))) ||
-                            // the pattern starts with \.\., and then [ or .
-                            (src.startsWith('\\.\\.') && aps.has(src.charAt(4)));
-                        // no need to prevent dots if it can't match a dot, or if a
-                        // sub-pattern will be preventing it anyway.
-                        const needNoDot = !dot && !allowDot && aps.has(src.charAt(0));
-                        start = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : '';
-                    }
-                }
-            }
-            // append the "end of path portion" pattern to negation tails
-            let end = '';
-            if (this.isEnd() &&
-                this.#root.#filledNegs &&
-                this.#parent?.type === '!') {
-                end = '(?:$|\\/)';
-            }
-            const final = start + src + end;
-            return [
-                final,
-                (0, unescape_js_1.unescape)(src),
-                (this.#hasMagic = !!this.#hasMagic),
-                this.#uflag,
-            ];
-        }
-        // We need to calculate the body *twice* if it's a repeat pattern
-        // at the start, once in nodot mode, then again in dot mode, so a
-        // pattern like *(?) can match 'x.y'
-        const repeated = this.type === '*' || this.type === '+';
-        // some kind of extglob
-        const start = this.type === '!' ? '(?:(?!(?:' : '(?:';
-        let body = this.#partsToRegExp(dot);
-        if (this.isStart() && this.isEnd() && !body && this.type !== '!') {
-            // invalid extglob, has to at least be *something* present, if it's
-            // the entire path portion.
-            const s = this.toString();
-            this.#parts = [s];
-            this.type = null;
-            this.#hasMagic = undefined;
-            return [s, (0, unescape_js_1.unescape)(this.toString()), false, false];
-        }
-        // XXX abstract out this map method
-        let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot
-            ? ''
-            : this.#partsToRegExp(true);
-        if (bodyDotAllowed === body) {
-            bodyDotAllowed = '';
-        }
-        if (bodyDotAllowed) {
-            body = `(?:${body})(?:${bodyDotAllowed})*?`;
-        }
-        // an empty !() is exactly equivalent to a starNoEmpty
-        let final = '';
-        if (this.type === '!' && this.#emptyExt) {
-            final = (this.isStart() && !dot ? startNoDot : '') + starNoEmpty;
-        }
-        else {
-            const close = this.type === '!'
-                ? // !() must match something,but !(x) can match ''
-                    '))' +
-                        (this.isStart() && !dot && !allowDot ? startNoDot : '') +
-                        star +
-                        ')'
-                : this.type === '@'
-                    ? ')'
-                    : this.type === '?'
-                        ? ')?'
-                        : this.type === '+' && bodyDotAllowed
-                            ? ')'
-                            : this.type === '*' && bodyDotAllowed
-                                ? `)?`
-                                : `)${this.type}`;
-            final = start + body + close;
-        }
-        return [
-            final,
-            (0, unescape_js_1.unescape)(body),
-            (this.#hasMagic = !!this.#hasMagic),
-            this.#uflag,
-        ];
-    }
-    #partsToRegExp(dot) {
-        return this.#parts
-            .map(p => {
-            // extglob ASTs should only contain parent ASTs
-            /* c8 ignore start */
-            if (typeof p === 'string') {
-                throw new Error('string type in extglob ast??');
-            }
-            /* c8 ignore stop */
-            // can ignore hasMagic, because extglobs are already always magic
-            const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot);
-            this.#uflag = this.#uflag || uflag;
-            return re;
-        })
-            .filter(p => !(this.isStart() && this.isEnd()) || !!p)
-            .join('|');
-    }
-    static #parseGlob(glob, hasMagic, noEmpty = false) {
-        let escaping = false;
-        let re = '';
-        let uflag = false;
-        for (let i = 0; i < glob.length; i++) {
-            const c = glob.charAt(i);
-            if (escaping) {
-                escaping = false;
-                re += (reSpecials.has(c) ? '\\' : '') + c;
-                continue;
-            }
-            if (c === '\\') {
-                if (i === glob.length - 1) {
-                    re += '\\\\';
-                }
-                else {
-                    escaping = true;
-                }
-                continue;
-            }
-            if (c === '[') {
-                const [src, needUflag, consumed, magic] = (0, brace_expressions_js_1.parseClass)(glob, i);
-                if (consumed) {
-                    re += src;
-                    uflag = uflag || needUflag;
-                    i += consumed - 1;
-                    hasMagic = hasMagic || magic;
-                    continue;
-                }
-            }
-            if (c === '*') {
-                if (noEmpty && glob === '*')
-                    re += starNoEmpty;
-                else
-                    re += star;
-                hasMagic = true;
-                continue;
-            }
-            if (c === '?') {
-                re += qmark;
-                hasMagic = true;
-                continue;
-            }
-            re += regExpEscape(c);
-        }
-        return [re, (0, unescape_js_1.unescape)(glob), !!hasMagic, uflag];
-    }
-}
-exports.AST = AST;
-//# sourceMappingURL=ast.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/minimatch/dist/commonjs/brace-expressions.js b/node_modules/pacote/node_modules/minimatch/dist/commonjs/brace-expressions.js
deleted file mode 100644
index 0e13eefc4cfee..0000000000000
--- a/node_modules/pacote/node_modules/minimatch/dist/commonjs/brace-expressions.js
+++ /dev/null
@@ -1,152 +0,0 @@
-"use strict";
-// translate the various posix character classes into unicode properties
-// this works across all unicode locales
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.parseClass = void 0;
-// { : [, /u flag required, negated]
-const posixClasses = {
-    '[:alnum:]': ['\\p{L}\\p{Nl}\\p{Nd}', true],
-    '[:alpha:]': ['\\p{L}\\p{Nl}', true],
-    '[:ascii:]': ['\\x' + '00-\\x' + '7f', false],
-    '[:blank:]': ['\\p{Zs}\\t', true],
-    '[:cntrl:]': ['\\p{Cc}', true],
-    '[:digit:]': ['\\p{Nd}', true],
-    '[:graph:]': ['\\p{Z}\\p{C}', true, true],
-    '[:lower:]': ['\\p{Ll}', true],
-    '[:print:]': ['\\p{C}', true],
-    '[:punct:]': ['\\p{P}', true],
-    '[:space:]': ['\\p{Z}\\t\\r\\n\\v\\f', true],
-    '[:upper:]': ['\\p{Lu}', true],
-    '[:word:]': ['\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}', true],
-    '[:xdigit:]': ['A-Fa-f0-9', false],
-};
-// only need to escape a few things inside of brace expressions
-// escapes: [ \ ] -
-const braceEscape = (s) => s.replace(/[[\]\\-]/g, '\\$&');
-// escape all regexp magic characters
-const regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
-// everything has already been escaped, we just have to join
-const rangesToString = (ranges) => ranges.join('');
-// takes a glob string at a posix brace expression, and returns
-// an equivalent regular expression source, and boolean indicating
-// whether the /u flag needs to be applied, and the number of chars
-// consumed to parse the character class.
-// This also removes out of order ranges, and returns ($.) if the
-// entire class just no good.
-const parseClass = (glob, position) => {
-    const pos = position;
-    /* c8 ignore start */
-    if (glob.charAt(pos) !== '[') {
-        throw new Error('not in a brace expression');
-    }
-    /* c8 ignore stop */
-    const ranges = [];
-    const negs = [];
-    let i = pos + 1;
-    let sawStart = false;
-    let uflag = false;
-    let escaping = false;
-    let negate = false;
-    let endPos = pos;
-    let rangeStart = '';
-    WHILE: while (i < glob.length) {
-        const c = glob.charAt(i);
-        if ((c === '!' || c === '^') && i === pos + 1) {
-            negate = true;
-            i++;
-            continue;
-        }
-        if (c === ']' && sawStart && !escaping) {
-            endPos = i + 1;
-            break;
-        }
-        sawStart = true;
-        if (c === '\\') {
-            if (!escaping) {
-                escaping = true;
-                i++;
-                continue;
-            }
-            // escaped \ char, fall through and treat like normal char
-        }
-        if (c === '[' && !escaping) {
-            // either a posix class, a collation equivalent, or just a [
-            for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) {
-                if (glob.startsWith(cls, i)) {
-                    // invalid, [a-[] is fine, but not [a-[:alpha]]
-                    if (rangeStart) {
-                        return ['$.', false, glob.length - pos, true];
-                    }
-                    i += cls.length;
-                    if (neg)
-                        negs.push(unip);
-                    else
-                        ranges.push(unip);
-                    uflag = uflag || u;
-                    continue WHILE;
-                }
-            }
-        }
-        // now it's just a normal character, effectively
-        escaping = false;
-        if (rangeStart) {
-            // throw this range away if it's not valid, but others
-            // can still match.
-            if (c > rangeStart) {
-                ranges.push(braceEscape(rangeStart) + '-' + braceEscape(c));
-            }
-            else if (c === rangeStart) {
-                ranges.push(braceEscape(c));
-            }
-            rangeStart = '';
-            i++;
-            continue;
-        }
-        // now might be the start of a range.
-        // can be either c-d or c-] or c] or c] at this point
-        if (glob.startsWith('-]', i + 1)) {
-            ranges.push(braceEscape(c + '-'));
-            i += 2;
-            continue;
-        }
-        if (glob.startsWith('-', i + 1)) {
-            rangeStart = c;
-            i += 2;
-            continue;
-        }
-        // not the start of a range, just a single character
-        ranges.push(braceEscape(c));
-        i++;
-    }
-    if (endPos < i) {
-        // didn't see the end of the class, not a valid class,
-        // but might still be valid as a literal match.
-        return ['', false, 0, false];
-    }
-    // if we got no ranges and no negates, then we have a range that
-    // cannot possibly match anything, and that poisons the whole glob
-    if (!ranges.length && !negs.length) {
-        return ['$.', false, glob.length - pos, true];
-    }
-    // if we got one positive range, and it's a single character, then that's
-    // not actually a magic pattern, it's just that one literal character.
-    // we should not treat that as "magic", we should just return the literal
-    // character. [_] is a perfectly valid way to escape glob magic chars.
-    if (negs.length === 0 &&
-        ranges.length === 1 &&
-        /^\\?.$/.test(ranges[0]) &&
-        !negate) {
-        const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0];
-        return [regexpEscape(r), false, endPos - pos, false];
-    }
-    const sranges = '[' + (negate ? '^' : '') + rangesToString(ranges) + ']';
-    const snegs = '[' + (negate ? '' : '^') + rangesToString(negs) + ']';
-    const comb = ranges.length && negs.length
-        ? '(' + sranges + '|' + snegs + ')'
-        : ranges.length
-            ? sranges
-            : snegs;
-    return [comb, uflag, endPos - pos, true];
-};
-exports.parseClass = parseClass;
-//# sourceMappingURL=brace-expressions.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/minimatch/dist/commonjs/escape.js b/node_modules/pacote/node_modules/minimatch/dist/commonjs/escape.js
deleted file mode 100644
index 02a4f8a8e0a58..0000000000000
--- a/node_modules/pacote/node_modules/minimatch/dist/commonjs/escape.js
+++ /dev/null
@@ -1,22 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.escape = void 0;
-/**
- * Escape all magic characters in a glob pattern.
- *
- * If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape}
- * option is used, then characters are escaped by wrapping in `[]`, because
- * a magic character wrapped in a character class can only be satisfied by
- * that exact character.  In this mode, `\` is _not_ escaped, because it is
- * not interpreted as a magic character, but instead as a path separator.
- */
-const escape = (s, { windowsPathsNoEscape = false, } = {}) => {
-    // don't need to escape +@! because we escape the parens
-    // that make those magic, and escaping ! as [!] isn't valid,
-    // because [!]] is a valid glob class meaning not ']'.
-    return windowsPathsNoEscape
-        ? s.replace(/[?*()[\]]/g, '[$&]')
-        : s.replace(/[?*()[\]\\]/g, '\\$&');
-};
-exports.escape = escape;
-//# sourceMappingURL=escape.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/minimatch/dist/commonjs/unescape.js b/node_modules/pacote/node_modules/minimatch/dist/commonjs/unescape.js
deleted file mode 100644
index 47c36bcee5a02..0000000000000
--- a/node_modules/pacote/node_modules/minimatch/dist/commonjs/unescape.js
+++ /dev/null
@@ -1,24 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.unescape = void 0;
-/**
- * Un-escape a string that has been escaped with {@link escape}.
- *
- * If the {@link windowsPathsNoEscape} option is used, then square-brace
- * escapes are removed, but not backslash escapes.  For example, it will turn
- * the string `'[*]'` into `*`, but it will not turn `'\\*'` into `'*'`,
- * becuase `\` is a path separator in `windowsPathsNoEscape` mode.
- *
- * When `windowsPathsNoEscape` is not set, then both brace escapes and
- * backslash escapes are removed.
- *
- * Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot be escaped
- * or unescaped.
- */
-const unescape = (s, { windowsPathsNoEscape = false, } = {}) => {
-    return windowsPathsNoEscape
-        ? s.replace(/\[([^\/\\])\]/g, '$1')
-        : s.replace(/((?!\\).|^)\[([^\/\\])\]/g, '$1$2').replace(/\\([^\/])/g, '$1');
-};
-exports.unescape = unescape;
-//# sourceMappingURL=unescape.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/minimatch/dist/esm/assert-valid-pattern.js b/node_modules/pacote/node_modules/minimatch/dist/esm/assert-valid-pattern.js
deleted file mode 100644
index 7b534fc30200b..0000000000000
--- a/node_modules/pacote/node_modules/minimatch/dist/esm/assert-valid-pattern.js
+++ /dev/null
@@ -1,10 +0,0 @@
-const MAX_PATTERN_LENGTH = 1024 * 64;
-export const assertValidPattern = (pattern) => {
-    if (typeof pattern !== 'string') {
-        throw new TypeError('invalid pattern');
-    }
-    if (pattern.length > MAX_PATTERN_LENGTH) {
-        throw new TypeError('pattern is too long');
-    }
-};
-//# sourceMappingURL=assert-valid-pattern.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/minimatch/dist/esm/ast.js b/node_modules/pacote/node_modules/minimatch/dist/esm/ast.js
deleted file mode 100644
index 2d2bced6533de..0000000000000
--- a/node_modules/pacote/node_modules/minimatch/dist/esm/ast.js
+++ /dev/null
@@ -1,588 +0,0 @@
-// parse a single path portion
-import { parseClass } from './brace-expressions.js';
-import { unescape } from './unescape.js';
-const types = new Set(['!', '?', '+', '*', '@']);
-const isExtglobType = (c) => types.has(c);
-// Patterns that get prepended to bind to the start of either the
-// entire string, or just a single path portion, to prevent dots
-// and/or traversal patterns, when needed.
-// Exts don't need the ^ or / bit, because the root binds that already.
-const startNoTraversal = '(?!(?:^|/)\\.\\.?(?:$|/))';
-const startNoDot = '(?!\\.)';
-// characters that indicate a start of pattern needs the "no dots" bit,
-// because a dot *might* be matched. ( is not in the list, because in
-// the case of a child extglob, it will handle the prevention itself.
-const addPatternStart = new Set(['[', '.']);
-// cases where traversal is A-OK, no dot prevention needed
-const justDots = new Set(['..', '.']);
-const reSpecials = new Set('().*{}+?[]^$\\!');
-const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
-// any single thing other than /
-const qmark = '[^/]';
-// * => any number of characters
-const star = qmark + '*?';
-// use + when we need to ensure that *something* matches, because the * is
-// the only thing in the path portion.
-const starNoEmpty = qmark + '+?';
-// remove the \ chars that we added if we end up doing a nonmagic compare
-// const deslash = (s: string) => s.replace(/\\(.)/g, '$1')
-export class AST {
-    type;
-    #root;
-    #hasMagic;
-    #uflag = false;
-    #parts = [];
-    #parent;
-    #parentIndex;
-    #negs;
-    #filledNegs = false;
-    #options;
-    #toString;
-    // set to true if it's an extglob with no children
-    // (which really means one child of '')
-    #emptyExt = false;
-    constructor(type, parent, options = {}) {
-        this.type = type;
-        // extglobs are inherently magical
-        if (type)
-            this.#hasMagic = true;
-        this.#parent = parent;
-        this.#root = this.#parent ? this.#parent.#root : this;
-        this.#options = this.#root === this ? options : this.#root.#options;
-        this.#negs = this.#root === this ? [] : this.#root.#negs;
-        if (type === '!' && !this.#root.#filledNegs)
-            this.#negs.push(this);
-        this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0;
-    }
-    get hasMagic() {
-        /* c8 ignore start */
-        if (this.#hasMagic !== undefined)
-            return this.#hasMagic;
-        /* c8 ignore stop */
-        for (const p of this.#parts) {
-            if (typeof p === 'string')
-                continue;
-            if (p.type || p.hasMagic)
-                return (this.#hasMagic = true);
-        }
-        // note: will be undefined until we generate the regexp src and find out
-        return this.#hasMagic;
-    }
-    // reconstructs the pattern
-    toString() {
-        if (this.#toString !== undefined)
-            return this.#toString;
-        if (!this.type) {
-            return (this.#toString = this.#parts.map(p => String(p)).join(''));
-        }
-        else {
-            return (this.#toString =
-                this.type + '(' + this.#parts.map(p => String(p)).join('|') + ')');
-        }
-    }
-    #fillNegs() {
-        /* c8 ignore start */
-        if (this !== this.#root)
-            throw new Error('should only call on root');
-        if (this.#filledNegs)
-            return this;
-        /* c8 ignore stop */
-        // call toString() once to fill this out
-        this.toString();
-        this.#filledNegs = true;
-        let n;
-        while ((n = this.#negs.pop())) {
-            if (n.type !== '!')
-                continue;
-            // walk up the tree, appending everthing that comes AFTER parentIndex
-            let p = n;
-            let pp = p.#parent;
-            while (pp) {
-                for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) {
-                    for (const part of n.#parts) {
-                        /* c8 ignore start */
-                        if (typeof part === 'string') {
-                            throw new Error('string part in extglob AST??');
-                        }
-                        /* c8 ignore stop */
-                        part.copyIn(pp.#parts[i]);
-                    }
-                }
-                p = pp;
-                pp = p.#parent;
-            }
-        }
-        return this;
-    }
-    push(...parts) {
-        for (const p of parts) {
-            if (p === '')
-                continue;
-            /* c8 ignore start */
-            if (typeof p !== 'string' && !(p instanceof AST && p.#parent === this)) {
-                throw new Error('invalid part: ' + p);
-            }
-            /* c8 ignore stop */
-            this.#parts.push(p);
-        }
-    }
-    toJSON() {
-        const ret = this.type === null
-            ? this.#parts.slice().map(p => (typeof p === 'string' ? p : p.toJSON()))
-            : [this.type, ...this.#parts.map(p => p.toJSON())];
-        if (this.isStart() && !this.type)
-            ret.unshift([]);
-        if (this.isEnd() &&
-            (this === this.#root ||
-                (this.#root.#filledNegs && this.#parent?.type === '!'))) {
-            ret.push({});
-        }
-        return ret;
-    }
-    isStart() {
-        if (this.#root === this)
-            return true;
-        // if (this.type) return !!this.#parent?.isStart()
-        if (!this.#parent?.isStart())
-            return false;
-        if (this.#parentIndex === 0)
-            return true;
-        // if everything AHEAD of this is a negation, then it's still the "start"
-        const p = this.#parent;
-        for (let i = 0; i < this.#parentIndex; i++) {
-            const pp = p.#parts[i];
-            if (!(pp instanceof AST && pp.type === '!')) {
-                return false;
-            }
-        }
-        return true;
-    }
-    isEnd() {
-        if (this.#root === this)
-            return true;
-        if (this.#parent?.type === '!')
-            return true;
-        if (!this.#parent?.isEnd())
-            return false;
-        if (!this.type)
-            return this.#parent?.isEnd();
-        // if not root, it'll always have a parent
-        /* c8 ignore start */
-        const pl = this.#parent ? this.#parent.#parts.length : 0;
-        /* c8 ignore stop */
-        return this.#parentIndex === pl - 1;
-    }
-    copyIn(part) {
-        if (typeof part === 'string')
-            this.push(part);
-        else
-            this.push(part.clone(this));
-    }
-    clone(parent) {
-        const c = new AST(this.type, parent);
-        for (const p of this.#parts) {
-            c.copyIn(p);
-        }
-        return c;
-    }
-    static #parseAST(str, ast, pos, opt) {
-        let escaping = false;
-        let inBrace = false;
-        let braceStart = -1;
-        let braceNeg = false;
-        if (ast.type === null) {
-            // outside of a extglob, append until we find a start
-            let i = pos;
-            let acc = '';
-            while (i < str.length) {
-                const c = str.charAt(i++);
-                // still accumulate escapes at this point, but we do ignore
-                // starts that are escaped
-                if (escaping || c === '\\') {
-                    escaping = !escaping;
-                    acc += c;
-                    continue;
-                }
-                if (inBrace) {
-                    if (i === braceStart + 1) {
-                        if (c === '^' || c === '!') {
-                            braceNeg = true;
-                        }
-                    }
-                    else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
-                        inBrace = false;
-                    }
-                    acc += c;
-                    continue;
-                }
-                else if (c === '[') {
-                    inBrace = true;
-                    braceStart = i;
-                    braceNeg = false;
-                    acc += c;
-                    continue;
-                }
-                if (!opt.noext && isExtglobType(c) && str.charAt(i) === '(') {
-                    ast.push(acc);
-                    acc = '';
-                    const ext = new AST(c, ast);
-                    i = AST.#parseAST(str, ext, i, opt);
-                    ast.push(ext);
-                    continue;
-                }
-                acc += c;
-            }
-            ast.push(acc);
-            return i;
-        }
-        // some kind of extglob, pos is at the (
-        // find the next | or )
-        let i = pos + 1;
-        let part = new AST(null, ast);
-        const parts = [];
-        let acc = '';
-        while (i < str.length) {
-            const c = str.charAt(i++);
-            // still accumulate escapes at this point, but we do ignore
-            // starts that are escaped
-            if (escaping || c === '\\') {
-                escaping = !escaping;
-                acc += c;
-                continue;
-            }
-            if (inBrace) {
-                if (i === braceStart + 1) {
-                    if (c === '^' || c === '!') {
-                        braceNeg = true;
-                    }
-                }
-                else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
-                    inBrace = false;
-                }
-                acc += c;
-                continue;
-            }
-            else if (c === '[') {
-                inBrace = true;
-                braceStart = i;
-                braceNeg = false;
-                acc += c;
-                continue;
-            }
-            if (isExtglobType(c) && str.charAt(i) === '(') {
-                part.push(acc);
-                acc = '';
-                const ext = new AST(c, part);
-                part.push(ext);
-                i = AST.#parseAST(str, ext, i, opt);
-                continue;
-            }
-            if (c === '|') {
-                part.push(acc);
-                acc = '';
-                parts.push(part);
-                part = new AST(null, ast);
-                continue;
-            }
-            if (c === ')') {
-                if (acc === '' && ast.#parts.length === 0) {
-                    ast.#emptyExt = true;
-                }
-                part.push(acc);
-                acc = '';
-                ast.push(...parts, part);
-                return i;
-            }
-            acc += c;
-        }
-        // unfinished extglob
-        // if we got here, it was a malformed extglob! not an extglob, but
-        // maybe something else in there.
-        ast.type = null;
-        ast.#hasMagic = undefined;
-        ast.#parts = [str.substring(pos - 1)];
-        return i;
-    }
-    static fromGlob(pattern, options = {}) {
-        const ast = new AST(null, undefined, options);
-        AST.#parseAST(pattern, ast, 0, options);
-        return ast;
-    }
-    // returns the regular expression if there's magic, or the unescaped
-    // string if not.
-    toMMPattern() {
-        // should only be called on root
-        /* c8 ignore start */
-        if (this !== this.#root)
-            return this.#root.toMMPattern();
-        /* c8 ignore stop */
-        const glob = this.toString();
-        const [re, body, hasMagic, uflag] = this.toRegExpSource();
-        // if we're in nocase mode, and not nocaseMagicOnly, then we do
-        // still need a regular expression if we have to case-insensitively
-        // match capital/lowercase characters.
-        const anyMagic = hasMagic ||
-            this.#hasMagic ||
-            (this.#options.nocase &&
-                !this.#options.nocaseMagicOnly &&
-                glob.toUpperCase() !== glob.toLowerCase());
-        if (!anyMagic) {
-            return body;
-        }
-        const flags = (this.#options.nocase ? 'i' : '') + (uflag ? 'u' : '');
-        return Object.assign(new RegExp(`^${re}$`, flags), {
-            _src: re,
-            _glob: glob,
-        });
-    }
-    get options() {
-        return this.#options;
-    }
-    // returns the string match, the regexp source, whether there's magic
-    // in the regexp (so a regular expression is required) and whether or
-    // not the uflag is needed for the regular expression (for posix classes)
-    // TODO: instead of injecting the start/end at this point, just return
-    // the BODY of the regexp, along with the start/end portions suitable
-    // for binding the start/end in either a joined full-path makeRe context
-    // (where we bind to (^|/), or a standalone matchPart context (where
-    // we bind to ^, and not /).  Otherwise slashes get duped!
-    //
-    // In part-matching mode, the start is:
-    // - if not isStart: nothing
-    // - if traversal possible, but not allowed: ^(?!\.\.?$)
-    // - if dots allowed or not possible: ^
-    // - if dots possible and not allowed: ^(?!\.)
-    // end is:
-    // - if not isEnd(): nothing
-    // - else: $
-    //
-    // In full-path matching mode, we put the slash at the START of the
-    // pattern, so start is:
-    // - if first pattern: same as part-matching mode
-    // - if not isStart(): nothing
-    // - if traversal possible, but not allowed: /(?!\.\.?(?:$|/))
-    // - if dots allowed or not possible: /
-    // - if dots possible and not allowed: /(?!\.)
-    // end is:
-    // - if last pattern, same as part-matching mode
-    // - else nothing
-    //
-    // Always put the (?:$|/) on negated tails, though, because that has to be
-    // there to bind the end of the negated pattern portion, and it's easier to
-    // just stick it in now rather than try to inject it later in the middle of
-    // the pattern.
-    //
-    // We can just always return the same end, and leave it up to the caller
-    // to know whether it's going to be used joined or in parts.
-    // And, if the start is adjusted slightly, can do the same there:
-    // - if not isStart: nothing
-    // - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$)
-    // - if dots allowed or not possible: (?:/|^)
-    // - if dots possible and not allowed: (?:/|^)(?!\.)
-    //
-    // But it's better to have a simpler binding without a conditional, for
-    // performance, so probably better to return both start options.
-    //
-    // Then the caller just ignores the end if it's not the first pattern,
-    // and the start always gets applied.
-    //
-    // But that's always going to be $ if it's the ending pattern, or nothing,
-    // so the caller can just attach $ at the end of the pattern when building.
-    //
-    // So the todo is:
-    // - better detect what kind of start is needed
-    // - return both flavors of starting pattern
-    // - attach $ at the end of the pattern when creating the actual RegExp
-    //
-    // Ah, but wait, no, that all only applies to the root when the first pattern
-    // is not an extglob. If the first pattern IS an extglob, then we need all
-    // that dot prevention biz to live in the extglob portions, because eg
-    // +(*|.x*) can match .xy but not .yx.
-    //
-    // So, return the two flavors if it's #root and the first child is not an
-    // AST, otherwise leave it to the child AST to handle it, and there,
-    // use the (?:^|/) style of start binding.
-    //
-    // Even simplified further:
-    // - Since the start for a join is eg /(?!\.) and the start for a part
-    // is ^(?!\.), we can just prepend (?!\.) to the pattern (either root
-    // or start or whatever) and prepend ^ or / at the Regexp construction.
-    toRegExpSource(allowDot) {
-        const dot = allowDot ?? !!this.#options.dot;
-        if (this.#root === this)
-            this.#fillNegs();
-        if (!this.type) {
-            const noEmpty = this.isStart() && this.isEnd();
-            const src = this.#parts
-                .map(p => {
-                const [re, _, hasMagic, uflag] = typeof p === 'string'
-                    ? AST.#parseGlob(p, this.#hasMagic, noEmpty)
-                    : p.toRegExpSource(allowDot);
-                this.#hasMagic = this.#hasMagic || hasMagic;
-                this.#uflag = this.#uflag || uflag;
-                return re;
-            })
-                .join('');
-            let start = '';
-            if (this.isStart()) {
-                if (typeof this.#parts[0] === 'string') {
-                    // this is the string that will match the start of the pattern,
-                    // so we need to protect against dots and such.
-                    // '.' and '..' cannot match unless the pattern is that exactly,
-                    // even if it starts with . or dot:true is set.
-                    const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]);
-                    if (!dotTravAllowed) {
-                        const aps = addPatternStart;
-                        // check if we have a possibility of matching . or ..,
-                        // and prevent that.
-                        const needNoTrav = 
-                        // dots are allowed, and the pattern starts with [ or .
-                        (dot && aps.has(src.charAt(0))) ||
-                            // the pattern starts with \., and then [ or .
-                            (src.startsWith('\\.') && aps.has(src.charAt(2))) ||
-                            // the pattern starts with \.\., and then [ or .
-                            (src.startsWith('\\.\\.') && aps.has(src.charAt(4)));
-                        // no need to prevent dots if it can't match a dot, or if a
-                        // sub-pattern will be preventing it anyway.
-                        const needNoDot = !dot && !allowDot && aps.has(src.charAt(0));
-                        start = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : '';
-                    }
-                }
-            }
-            // append the "end of path portion" pattern to negation tails
-            let end = '';
-            if (this.isEnd() &&
-                this.#root.#filledNegs &&
-                this.#parent?.type === '!') {
-                end = '(?:$|\\/)';
-            }
-            const final = start + src + end;
-            return [
-                final,
-                unescape(src),
-                (this.#hasMagic = !!this.#hasMagic),
-                this.#uflag,
-            ];
-        }
-        // We need to calculate the body *twice* if it's a repeat pattern
-        // at the start, once in nodot mode, then again in dot mode, so a
-        // pattern like *(?) can match 'x.y'
-        const repeated = this.type === '*' || this.type === '+';
-        // some kind of extglob
-        const start = this.type === '!' ? '(?:(?!(?:' : '(?:';
-        let body = this.#partsToRegExp(dot);
-        if (this.isStart() && this.isEnd() && !body && this.type !== '!') {
-            // invalid extglob, has to at least be *something* present, if it's
-            // the entire path portion.
-            const s = this.toString();
-            this.#parts = [s];
-            this.type = null;
-            this.#hasMagic = undefined;
-            return [s, unescape(this.toString()), false, false];
-        }
-        // XXX abstract out this map method
-        let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot
-            ? ''
-            : this.#partsToRegExp(true);
-        if (bodyDotAllowed === body) {
-            bodyDotAllowed = '';
-        }
-        if (bodyDotAllowed) {
-            body = `(?:${body})(?:${bodyDotAllowed})*?`;
-        }
-        // an empty !() is exactly equivalent to a starNoEmpty
-        let final = '';
-        if (this.type === '!' && this.#emptyExt) {
-            final = (this.isStart() && !dot ? startNoDot : '') + starNoEmpty;
-        }
-        else {
-            const close = this.type === '!'
-                ? // !() must match something,but !(x) can match ''
-                    '))' +
-                        (this.isStart() && !dot && !allowDot ? startNoDot : '') +
-                        star +
-                        ')'
-                : this.type === '@'
-                    ? ')'
-                    : this.type === '?'
-                        ? ')?'
-                        : this.type === '+' && bodyDotAllowed
-                            ? ')'
-                            : this.type === '*' && bodyDotAllowed
-                                ? `)?`
-                                : `)${this.type}`;
-            final = start + body + close;
-        }
-        return [
-            final,
-            unescape(body),
-            (this.#hasMagic = !!this.#hasMagic),
-            this.#uflag,
-        ];
-    }
-    #partsToRegExp(dot) {
-        return this.#parts
-            .map(p => {
-            // extglob ASTs should only contain parent ASTs
-            /* c8 ignore start */
-            if (typeof p === 'string') {
-                throw new Error('string type in extglob ast??');
-            }
-            /* c8 ignore stop */
-            // can ignore hasMagic, because extglobs are already always magic
-            const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot);
-            this.#uflag = this.#uflag || uflag;
-            return re;
-        })
-            .filter(p => !(this.isStart() && this.isEnd()) || !!p)
-            .join('|');
-    }
-    static #parseGlob(glob, hasMagic, noEmpty = false) {
-        let escaping = false;
-        let re = '';
-        let uflag = false;
-        for (let i = 0; i < glob.length; i++) {
-            const c = glob.charAt(i);
-            if (escaping) {
-                escaping = false;
-                re += (reSpecials.has(c) ? '\\' : '') + c;
-                continue;
-            }
-            if (c === '\\') {
-                if (i === glob.length - 1) {
-                    re += '\\\\';
-                }
-                else {
-                    escaping = true;
-                }
-                continue;
-            }
-            if (c === '[') {
-                const [src, needUflag, consumed, magic] = parseClass(glob, i);
-                if (consumed) {
-                    re += src;
-                    uflag = uflag || needUflag;
-                    i += consumed - 1;
-                    hasMagic = hasMagic || magic;
-                    continue;
-                }
-            }
-            if (c === '*') {
-                if (noEmpty && glob === '*')
-                    re += starNoEmpty;
-                else
-                    re += star;
-                hasMagic = true;
-                continue;
-            }
-            if (c === '?') {
-                re += qmark;
-                hasMagic = true;
-                continue;
-            }
-            re += regExpEscape(c);
-        }
-        return [re, unescape(glob), !!hasMagic, uflag];
-    }
-}
-//# sourceMappingURL=ast.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/minimatch/dist/esm/brace-expressions.js b/node_modules/pacote/node_modules/minimatch/dist/esm/brace-expressions.js
deleted file mode 100644
index c629d6ae816e2..0000000000000
--- a/node_modules/pacote/node_modules/minimatch/dist/esm/brace-expressions.js
+++ /dev/null
@@ -1,148 +0,0 @@
-// translate the various posix character classes into unicode properties
-// this works across all unicode locales
-// { : [, /u flag required, negated]
-const posixClasses = {
-    '[:alnum:]': ['\\p{L}\\p{Nl}\\p{Nd}', true],
-    '[:alpha:]': ['\\p{L}\\p{Nl}', true],
-    '[:ascii:]': ['\\x' + '00-\\x' + '7f', false],
-    '[:blank:]': ['\\p{Zs}\\t', true],
-    '[:cntrl:]': ['\\p{Cc}', true],
-    '[:digit:]': ['\\p{Nd}', true],
-    '[:graph:]': ['\\p{Z}\\p{C}', true, true],
-    '[:lower:]': ['\\p{Ll}', true],
-    '[:print:]': ['\\p{C}', true],
-    '[:punct:]': ['\\p{P}', true],
-    '[:space:]': ['\\p{Z}\\t\\r\\n\\v\\f', true],
-    '[:upper:]': ['\\p{Lu}', true],
-    '[:word:]': ['\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}', true],
-    '[:xdigit:]': ['A-Fa-f0-9', false],
-};
-// only need to escape a few things inside of brace expressions
-// escapes: [ \ ] -
-const braceEscape = (s) => s.replace(/[[\]\\-]/g, '\\$&');
-// escape all regexp magic characters
-const regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
-// everything has already been escaped, we just have to join
-const rangesToString = (ranges) => ranges.join('');
-// takes a glob string at a posix brace expression, and returns
-// an equivalent regular expression source, and boolean indicating
-// whether the /u flag needs to be applied, and the number of chars
-// consumed to parse the character class.
-// This also removes out of order ranges, and returns ($.) if the
-// entire class just no good.
-export const parseClass = (glob, position) => {
-    const pos = position;
-    /* c8 ignore start */
-    if (glob.charAt(pos) !== '[') {
-        throw new Error('not in a brace expression');
-    }
-    /* c8 ignore stop */
-    const ranges = [];
-    const negs = [];
-    let i = pos + 1;
-    let sawStart = false;
-    let uflag = false;
-    let escaping = false;
-    let negate = false;
-    let endPos = pos;
-    let rangeStart = '';
-    WHILE: while (i < glob.length) {
-        const c = glob.charAt(i);
-        if ((c === '!' || c === '^') && i === pos + 1) {
-            negate = true;
-            i++;
-            continue;
-        }
-        if (c === ']' && sawStart && !escaping) {
-            endPos = i + 1;
-            break;
-        }
-        sawStart = true;
-        if (c === '\\') {
-            if (!escaping) {
-                escaping = true;
-                i++;
-                continue;
-            }
-            // escaped \ char, fall through and treat like normal char
-        }
-        if (c === '[' && !escaping) {
-            // either a posix class, a collation equivalent, or just a [
-            for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) {
-                if (glob.startsWith(cls, i)) {
-                    // invalid, [a-[] is fine, but not [a-[:alpha]]
-                    if (rangeStart) {
-                        return ['$.', false, glob.length - pos, true];
-                    }
-                    i += cls.length;
-                    if (neg)
-                        negs.push(unip);
-                    else
-                        ranges.push(unip);
-                    uflag = uflag || u;
-                    continue WHILE;
-                }
-            }
-        }
-        // now it's just a normal character, effectively
-        escaping = false;
-        if (rangeStart) {
-            // throw this range away if it's not valid, but others
-            // can still match.
-            if (c > rangeStart) {
-                ranges.push(braceEscape(rangeStart) + '-' + braceEscape(c));
-            }
-            else if (c === rangeStart) {
-                ranges.push(braceEscape(c));
-            }
-            rangeStart = '';
-            i++;
-            continue;
-        }
-        // now might be the start of a range.
-        // can be either c-d or c-] or c] or c] at this point
-        if (glob.startsWith('-]', i + 1)) {
-            ranges.push(braceEscape(c + '-'));
-            i += 2;
-            continue;
-        }
-        if (glob.startsWith('-', i + 1)) {
-            rangeStart = c;
-            i += 2;
-            continue;
-        }
-        // not the start of a range, just a single character
-        ranges.push(braceEscape(c));
-        i++;
-    }
-    if (endPos < i) {
-        // didn't see the end of the class, not a valid class,
-        // but might still be valid as a literal match.
-        return ['', false, 0, false];
-    }
-    // if we got no ranges and no negates, then we have a range that
-    // cannot possibly match anything, and that poisons the whole glob
-    if (!ranges.length && !negs.length) {
-        return ['$.', false, glob.length - pos, true];
-    }
-    // if we got one positive range, and it's a single character, then that's
-    // not actually a magic pattern, it's just that one literal character.
-    // we should not treat that as "magic", we should just return the literal
-    // character. [_] is a perfectly valid way to escape glob magic chars.
-    if (negs.length === 0 &&
-        ranges.length === 1 &&
-        /^\\?.$/.test(ranges[0]) &&
-        !negate) {
-        const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0];
-        return [regexpEscape(r), false, endPos - pos, false];
-    }
-    const sranges = '[' + (negate ? '^' : '') + rangesToString(ranges) + ']';
-    const snegs = '[' + (negate ? '' : '^') + rangesToString(negs) + ']';
-    const comb = ranges.length && negs.length
-        ? '(' + sranges + '|' + snegs + ')'
-        : ranges.length
-            ? sranges
-            : snegs;
-    return [comb, uflag, endPos - pos, true];
-};
-//# sourceMappingURL=brace-expressions.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/minimatch/dist/esm/escape.js b/node_modules/pacote/node_modules/minimatch/dist/esm/escape.js
deleted file mode 100644
index 16f7c8c7bdc64..0000000000000
--- a/node_modules/pacote/node_modules/minimatch/dist/esm/escape.js
+++ /dev/null
@@ -1,18 +0,0 @@
-/**
- * Escape all magic characters in a glob pattern.
- *
- * If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape}
- * option is used, then characters are escaped by wrapping in `[]`, because
- * a magic character wrapped in a character class can only be satisfied by
- * that exact character.  In this mode, `\` is _not_ escaped, because it is
- * not interpreted as a magic character, but instead as a path separator.
- */
-export const escape = (s, { windowsPathsNoEscape = false, } = {}) => {
-    // don't need to escape +@! because we escape the parens
-    // that make those magic, and escaping ! as [!] isn't valid,
-    // because [!]] is a valid glob class meaning not ']'.
-    return windowsPathsNoEscape
-        ? s.replace(/[?*()[\]]/g, '[$&]')
-        : s.replace(/[?*()[\]\\]/g, '\\$&');
-};
-//# sourceMappingURL=escape.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/minimatch/dist/esm/unescape.js b/node_modules/pacote/node_modules/minimatch/dist/esm/unescape.js
deleted file mode 100644
index 0faf9a2b7306f..0000000000000
--- a/node_modules/pacote/node_modules/minimatch/dist/esm/unescape.js
+++ /dev/null
@@ -1,20 +0,0 @@
-/**
- * Un-escape a string that has been escaped with {@link escape}.
- *
- * If the {@link windowsPathsNoEscape} option is used, then square-brace
- * escapes are removed, but not backslash escapes.  For example, it will turn
- * the string `'[*]'` into `*`, but it will not turn `'\\*'` into `'*'`,
- * becuase `\` is a path separator in `windowsPathsNoEscape` mode.
- *
- * When `windowsPathsNoEscape` is not set, then both brace escapes and
- * backslash escapes are removed.
- *
- * Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot be escaped
- * or unescaped.
- */
-export const unescape = (s, { windowsPathsNoEscape = false, } = {}) => {
-    return windowsPathsNoEscape
-        ? s.replace(/\[([^\/\\])\]/g, '$1')
-        : s.replace(/((?!\\).|^)\[([^\/\\])\]/g, '$1$2').replace(/\\([^\/])/g, '$1');
-};
-//# sourceMappingURL=unescape.js.map
\ No newline at end of file
diff --git a/package-lock.json b/package-lock.json
index 9b7ab8ca534b5..bc2c637083dbd 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -96,7 +96,7 @@
         "@sigstore/tuf": "^3.1.1",
         "abbrev": "^3.0.1",
         "archy": "~1.0.0",
-        "cacache": "^19.0.1",
+        "cacache": "^20.0.1",
         "chalk": "^5.4.1",
         "ci-info": "^4.3.0",
         "cli-columns": "^4.0.0",
@@ -3570,6 +3570,91 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
+    "node_modules/@npmcli/metavuln-calculator/node_modules/cacache": {
+      "version": "19.0.1",
+      "resolved": "https://registry.npmjs.org/cacache/-/cacache-19.0.1.tgz",
+      "integrity": "sha512-hdsUxulXCi5STId78vRVYEtDAjq99ICAUktLTeTYsLoTE6Z8dS0c8pWNCxwdrk9YfJeobDZc2Y186hD/5ZQgFQ==",
+      "license": "ISC",
+      "dependencies": {
+        "@npmcli/fs": "^4.0.0",
+        "fs-minipass": "^3.0.0",
+        "glob": "^10.2.2",
+        "lru-cache": "^10.0.1",
+        "minipass": "^7.0.3",
+        "minipass-collect": "^2.0.1",
+        "minipass-flush": "^1.0.5",
+        "minipass-pipeline": "^1.2.4",
+        "p-map": "^7.0.2",
+        "ssri": "^12.0.0",
+        "tar": "^7.4.3",
+        "unique-filename": "^4.0.0"
+      },
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
+    "node_modules/@npmcli/metavuln-calculator/node_modules/chownr": {
+      "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz",
+      "integrity": "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==",
+      "license": "BlueOak-1.0.0",
+      "engines": {
+        "node": ">=18"
+      }
+    },
+    "node_modules/@npmcli/metavuln-calculator/node_modules/minizlib": {
+      "version": "3.0.2",
+      "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.2.tgz",
+      "integrity": "sha512-oG62iEk+CYt5Xj2YqI5Xi9xWUeZhDI8jjQmC5oThVH5JGCTgIjr7ciJDzC7MBzYd//WvR1OTmP5Q38Q8ShQtVA==",
+      "license": "MIT",
+      "dependencies": {
+        "minipass": "^7.1.2"
+      },
+      "engines": {
+        "node": ">= 18"
+      }
+    },
+    "node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp": {
+      "version": "3.0.1",
+      "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz",
+      "integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==",
+      "license": "MIT",
+      "bin": {
+        "mkdirp": "dist/cjs/src/bin.js"
+      },
+      "engines": {
+        "node": ">=10"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
+      }
+    },
+    "node_modules/@npmcli/metavuln-calculator/node_modules/tar": {
+      "version": "7.4.3",
+      "resolved": "https://registry.npmjs.org/tar/-/tar-7.4.3.tgz",
+      "integrity": "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw==",
+      "license": "ISC",
+      "dependencies": {
+        "@isaacs/fs-minipass": "^4.0.0",
+        "chownr": "^3.0.0",
+        "minipass": "^7.1.2",
+        "minizlib": "^3.0.1",
+        "mkdirp": "^3.0.1",
+        "yallist": "^5.0.0"
+      },
+      "engines": {
+        "node": ">=18"
+      }
+    },
+    "node_modules/@npmcli/metavuln-calculator/node_modules/yallist": {
+      "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz",
+      "integrity": "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==",
+      "license": "BlueOak-1.0.0",
+      "engines": {
+        "node": ">=18"
+      }
+    },
     "node_modules/@npmcli/mock-globals": {
       "resolved": "mock-globals",
       "link": true
@@ -6098,94 +6183,109 @@
       "license": "MIT"
     },
     "node_modules/cacache": {
-      "version": "19.0.1",
-      "resolved": "https://registry.npmjs.org/cacache/-/cacache-19.0.1.tgz",
-      "integrity": "sha512-hdsUxulXCi5STId78vRVYEtDAjq99ICAUktLTeTYsLoTE6Z8dS0c8pWNCxwdrk9YfJeobDZc2Y186hD/5ZQgFQ==",
+      "version": "20.0.1",
+      "resolved": "https://registry.npmjs.org/cacache/-/cacache-20.0.1.tgz",
+      "integrity": "sha512-+7LYcYGBYoNqTp1Rv7Ny1YjUo5E0/ftkQtraH3vkfAGgVHc+ouWdC8okAwQgQR7EVIdW6JTzTmhKFwzb+4okAQ==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
         "@npmcli/fs": "^4.0.0",
         "fs-minipass": "^3.0.0",
-        "glob": "^10.2.2",
-        "lru-cache": "^10.0.1",
+        "glob": "^11.0.3",
+        "lru-cache": "^11.1.0",
         "minipass": "^7.0.3",
         "minipass-collect": "^2.0.1",
         "minipass-flush": "^1.0.5",
         "minipass-pipeline": "^1.2.4",
         "p-map": "^7.0.2",
         "ssri": "^12.0.0",
-        "tar": "^7.4.3",
         "unique-filename": "^4.0.0"
       },
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
-    "node_modules/cacache/node_modules/chownr": {
-      "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz",
-      "integrity": "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==",
-      "inBundle": true,
-      "license": "BlueOak-1.0.0",
-      "engines": {
-        "node": ">=18"
+        "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/cacache/node_modules/minizlib": {
-      "version": "3.0.2",
-      "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.2.tgz",
-      "integrity": "sha512-oG62iEk+CYt5Xj2YqI5Xi9xWUeZhDI8jjQmC5oThVH5JGCTgIjr7ciJDzC7MBzYd//WvR1OTmP5Q38Q8ShQtVA==",
+    "node_modules/cacache/node_modules/glob": {
+      "version": "11.0.3",
+      "resolved": "https://registry.npmjs.org/glob/-/glob-11.0.3.tgz",
+      "integrity": "sha512-2Nim7dha1KVkaiF4q6Dj+ngPPMdfvLJEOpZk/jKiUAkqKebpGAWQXAq9z1xu9HKu5lWfqw/FASuccEjyznjPaA==",
       "inBundle": true,
-      "license": "MIT",
+      "license": "ISC",
       "dependencies": {
-        "minipass": "^7.1.2"
+        "foreground-child": "^3.3.1",
+        "jackspeak": "^4.1.1",
+        "minimatch": "^10.0.3",
+        "minipass": "^7.1.2",
+        "package-json-from-dist": "^1.0.0",
+        "path-scurry": "^2.0.0"
+      },
+      "bin": {
+        "glob": "dist/esm/bin.mjs"
       },
       "engines": {
-        "node": ">= 18"
+        "node": "20 || >=22"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
       }
     },
-    "node_modules/cacache/node_modules/mkdirp": {
-      "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz",
-      "integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==",
+    "node_modules/cacache/node_modules/jackspeak": {
+      "version": "4.1.1",
+      "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-4.1.1.tgz",
+      "integrity": "sha512-zptv57P3GpL+O0I7VdMJNBZCu+BPHVQUk55Ft8/QCJjTVxrnJHuVuX/0Bl2A6/+2oyR/ZMEuFKwmzqqZ/U5nPQ==",
       "inBundle": true,
-      "license": "MIT",
-      "bin": {
-        "mkdirp": "dist/cjs/src/bin.js"
+      "license": "BlueOak-1.0.0",
+      "dependencies": {
+        "@isaacs/cliui": "^8.0.2"
       },
       "engines": {
-        "node": ">=10"
+        "node": "20 || >=22"
       },
       "funding": {
         "url": "https://github.com/sponsors/isaacs"
       }
     },
-    "node_modules/cacache/node_modules/tar": {
-      "version": "7.4.3",
-      "resolved": "https://registry.npmjs.org/tar/-/tar-7.4.3.tgz",
-      "integrity": "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw==",
+    "node_modules/cacache/node_modules/lru-cache": {
+      "version": "11.2.1",
+      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.1.tgz",
+      "integrity": "sha512-r8LA6i4LP4EeWOhqBaZZjDWwehd1xUJPCJd9Sv300H0ZmcUER4+JPh7bqqZeqs1o5pgtgvXm+d9UGrB5zZGDiQ==",
+      "inBundle": true,
+      "license": "ISC",
+      "engines": {
+        "node": "20 || >=22"
+      }
+    },
+    "node_modules/cacache/node_modules/minimatch": {
+      "version": "10.0.3",
+      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.0.3.tgz",
+      "integrity": "sha512-IPZ167aShDZZUMdRk66cyQAW3qr0WzbHkPdMYa8bzZhlHhO3jALbKdxcaak7W9FfT2rZNpQuUu4Od7ILEpXSaw==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
-        "@isaacs/fs-minipass": "^4.0.0",
-        "chownr": "^3.0.0",
-        "minipass": "^7.1.2",
-        "minizlib": "^3.0.1",
-        "mkdirp": "^3.0.1",
-        "yallist": "^5.0.0"
+        "@isaacs/brace-expansion": "^5.0.0"
       },
       "engines": {
-        "node": ">=18"
+        "node": "20 || >=22"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
       }
     },
-    "node_modules/cacache/node_modules/yallist": {
-      "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz",
-      "integrity": "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==",
+    "node_modules/cacache/node_modules/path-scurry": {
+      "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-2.0.0.tgz",
+      "integrity": "sha512-ypGJsmGtdXUOeM5u93TyeIEfEhM6s+ljAhrk5vAvSx8uyY/02OvrZnA0YNGUrPXfpJMgI1ODd3nwz8Npx4O4cg==",
       "inBundle": true,
       "license": "BlueOak-1.0.0",
+      "dependencies": {
+        "lru-cache": "^11.0.0",
+        "minipass": "^7.1.2"
+      },
       "engines": {
-        "node": ">=18"
+        "node": "20 || >=22"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
       }
     },
     "node_modules/caching-transform": {
@@ -11178,6 +11278,69 @@
         "node": "^18.17.0 || >=20.5.0"
       }
     },
+    "node_modules/make-fetch-happen/node_modules/cacache": {
+      "version": "19.0.1",
+      "resolved": "https://registry.npmjs.org/cacache/-/cacache-19.0.1.tgz",
+      "integrity": "sha512-hdsUxulXCi5STId78vRVYEtDAjq99ICAUktLTeTYsLoTE6Z8dS0c8pWNCxwdrk9YfJeobDZc2Y186hD/5ZQgFQ==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "@npmcli/fs": "^4.0.0",
+        "fs-minipass": "^3.0.0",
+        "glob": "^10.2.2",
+        "lru-cache": "^10.0.1",
+        "minipass": "^7.0.3",
+        "minipass-collect": "^2.0.1",
+        "minipass-flush": "^1.0.5",
+        "minipass-pipeline": "^1.2.4",
+        "p-map": "^7.0.2",
+        "ssri": "^12.0.0",
+        "tar": "^7.4.3",
+        "unique-filename": "^4.0.0"
+      },
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
+    "node_modules/make-fetch-happen/node_modules/chownr": {
+      "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz",
+      "integrity": "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==",
+      "inBundle": true,
+      "license": "BlueOak-1.0.0",
+      "engines": {
+        "node": ">=18"
+      }
+    },
+    "node_modules/make-fetch-happen/node_modules/minizlib": {
+      "version": "3.0.2",
+      "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.2.tgz",
+      "integrity": "sha512-oG62iEk+CYt5Xj2YqI5Xi9xWUeZhDI8jjQmC5oThVH5JGCTgIjr7ciJDzC7MBzYd//WvR1OTmP5Q38Q8ShQtVA==",
+      "inBundle": true,
+      "license": "MIT",
+      "dependencies": {
+        "minipass": "^7.1.2"
+      },
+      "engines": {
+        "node": ">= 18"
+      }
+    },
+    "node_modules/make-fetch-happen/node_modules/mkdirp": {
+      "version": "3.0.1",
+      "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz",
+      "integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==",
+      "inBundle": true,
+      "license": "MIT",
+      "bin": {
+        "mkdirp": "dist/cjs/src/bin.js"
+      },
+      "engines": {
+        "node": ">=10"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
+      }
+    },
     "node_modules/make-fetch-happen/node_modules/negotiator": {
       "version": "1.0.0",
       "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz",
@@ -11188,6 +11351,34 @@
         "node": ">= 0.6"
       }
     },
+    "node_modules/make-fetch-happen/node_modules/tar": {
+      "version": "7.4.3",
+      "resolved": "https://registry.npmjs.org/tar/-/tar-7.4.3.tgz",
+      "integrity": "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "@isaacs/fs-minipass": "^4.0.0",
+        "chownr": "^3.0.0",
+        "minipass": "^7.1.2",
+        "minizlib": "^3.0.1",
+        "mkdirp": "^3.0.1",
+        "yallist": "^5.0.0"
+      },
+      "engines": {
+        "node": ">=18"
+      }
+    },
+    "node_modules/make-fetch-happen/node_modules/yallist": {
+      "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz",
+      "integrity": "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==",
+      "inBundle": true,
+      "license": "BlueOak-1.0.0",
+      "engines": {
+        "node": ">=18"
+      }
+    },
     "node_modules/map-obj": {
       "version": "4.3.0",
       "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.3.0.tgz",
@@ -13527,45 +13718,6 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/pacote/node_modules/@tufjs/models/node_modules/minimatch": {
-      "version": "9.0.5",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz",
-      "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "brace-expansion": "^2.0.1"
-      },
-      "engines": {
-        "node": ">=16 || 14 >=14.17"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      }
-    },
-    "node_modules/pacote/node_modules/cacache": {
-      "version": "20.0.1",
-      "resolved": "https://registry.npmjs.org/cacache/-/cacache-20.0.1.tgz",
-      "integrity": "sha512-+7LYcYGBYoNqTp1Rv7Ny1YjUo5E0/ftkQtraH3vkfAGgVHc+ouWdC8okAwQgQR7EVIdW6JTzTmhKFwzb+4okAQ==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "@npmcli/fs": "^4.0.0",
-        "fs-minipass": "^3.0.0",
-        "glob": "^11.0.3",
-        "lru-cache": "^11.1.0",
-        "minipass": "^7.0.3",
-        "minipass-collect": "^2.0.1",
-        "minipass-flush": "^1.0.5",
-        "minipass-pipeline": "^1.2.4",
-        "p-map": "^7.0.2",
-        "ssri": "^12.0.0",
-        "unique-filename": "^4.0.0"
-      },
-      "engines": {
-        "node": "^20.17.0 || >=22.9.0"
-      }
-    },
     "node_modules/pacote/node_modules/chownr": {
       "version": "3.0.0",
       "resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz",
@@ -13576,30 +13728,6 @@
         "node": ">=18"
       }
     },
-    "node_modules/pacote/node_modules/glob": {
-      "version": "11.0.3",
-      "resolved": "https://registry.npmjs.org/glob/-/glob-11.0.3.tgz",
-      "integrity": "sha512-2Nim7dha1KVkaiF4q6Dj+ngPPMdfvLJEOpZk/jKiUAkqKebpGAWQXAq9z1xu9HKu5lWfqw/FASuccEjyznjPaA==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "foreground-child": "^3.3.1",
-        "jackspeak": "^4.1.1",
-        "minimatch": "^10.0.3",
-        "minipass": "^7.1.2",
-        "package-json-from-dist": "^1.0.0",
-        "path-scurry": "^2.0.0"
-      },
-      "bin": {
-        "glob": "dist/esm/bin.mjs"
-      },
-      "engines": {
-        "node": "20 || >=22"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      }
-    },
     "node_modules/pacote/node_modules/hosted-git-info": {
       "version": "9.0.0",
       "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-9.0.0.tgz",
@@ -13613,22 +13741,6 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/pacote/node_modules/jackspeak": {
-      "version": "4.1.1",
-      "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-4.1.1.tgz",
-      "integrity": "sha512-zptv57P3GpL+O0I7VdMJNBZCu+BPHVQUk55Ft8/QCJjTVxrnJHuVuX/0Bl2A6/+2oyR/ZMEuFKwmzqqZ/U5nPQ==",
-      "inBundle": true,
-      "license": "BlueOak-1.0.0",
-      "dependencies": {
-        "@isaacs/cliui": "^8.0.2"
-      },
-      "engines": {
-        "node": "20 || >=22"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      }
-    },
     "node_modules/pacote/node_modules/lru-cache": {
       "version": "11.2.1",
       "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.1.tgz",
@@ -13662,22 +13774,6 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/pacote/node_modules/minimatch": {
-      "version": "10.0.3",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.0.3.tgz",
-      "integrity": "sha512-IPZ167aShDZZUMdRk66cyQAW3qr0WzbHkPdMYa8bzZhlHhO3jALbKdxcaak7W9FfT2rZNpQuUu4Od7ILEpXSaw==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "@isaacs/brace-expansion": "^5.0.0"
-      },
-      "engines": {
-        "node": "20 || >=22"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      }
-    },
     "node_modules/pacote/node_modules/minizlib": {
       "version": "3.0.2",
       "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.2.tgz",
@@ -13769,23 +13865,6 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/pacote/node_modules/path-scurry": {
-      "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-2.0.0.tgz",
-      "integrity": "sha512-ypGJsmGtdXUOeM5u93TyeIEfEhM6s+ljAhrk5vAvSx8uyY/02OvrZnA0YNGUrPXfpJMgI1ODd3nwz8Npx4O4cg==",
-      "inBundle": true,
-      "license": "BlueOak-1.0.0",
-      "dependencies": {
-        "lru-cache": "^11.0.0",
-        "minipass": "^7.1.2"
-      },
-      "engines": {
-        "node": "20 || >=22"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      }
-    },
     "node_modules/pacote/node_modules/sigstore": {
       "version": "4.0.0",
       "resolved": "https://registry.npmjs.org/sigstore/-/sigstore-4.0.0.tgz",
@@ -19627,7 +19706,7 @@
         "@npmcli/redact": "^3.0.0",
         "@npmcli/run-script": "^10.0.0",
         "bin-links": "^5.0.0",
-        "cacache": "^19.0.1",
+        "cacache": "^20.0.1",
         "common-ancestor-path": "^1.0.1",
         "hosted-git-info": "^8.0.0",
         "json-stringify-nice": "^1.1.4",
diff --git a/package.json b/package.json
index dc712e13a6022..e42eafe859b1f 100644
--- a/package.json
+++ b/package.json
@@ -63,7 +63,7 @@
     "@sigstore/tuf": "^3.1.1",
     "abbrev": "^3.0.1",
     "archy": "~1.0.0",
-    "cacache": "^19.0.1",
+    "cacache": "^20.0.1",
     "chalk": "^5.4.1",
     "ci-info": "^4.3.0",
     "cli-columns": "^4.0.0",
diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json
index d59dc679f162f..993898149542b 100644
--- a/workspaces/arborist/package.json
+++ b/workspaces/arborist/package.json
@@ -15,7 +15,7 @@
     "@npmcli/redact": "^3.0.0",
     "@npmcli/run-script": "^10.0.0",
     "bin-links": "^5.0.0",
-    "cacache": "^19.0.1",
+    "cacache": "^20.0.1",
     "common-ancestor-path": "^1.0.1",
     "hosted-git-info": "^8.0.0",
     "json-stringify-nice": "^1.1.4",

From 6221e277b4b841df09225b4d72f9eda70db1f15a Mon Sep 17 00:00:00 2001
From: Gar 
Date: Wed, 17 Sep 2025 10:34:52 -0700
Subject: [PATCH 11/63] deps: @npmcli/metavuln-calculator@9.0.2

---
 node_modules/.gitignore                       |   8 -
 .../node_modules/cacache/LICENSE.md           |  16 -
 .../node_modules/cacache/lib/content/path.js  |  29 -
 .../node_modules/cacache/lib/content/read.js  | 165 ----
 .../node_modules/cacache/lib/content/rm.js    |  18 -
 .../node_modules/cacache/lib/content/write.js | 206 ----
 .../node_modules/cacache/lib/entry-index.js   | 336 -------
 .../node_modules/cacache/lib/get.js           | 170 ----
 .../node_modules/cacache/lib/index.js         |  42 -
 .../node_modules/cacache/lib/memoization.js   |  72 --
 .../node_modules/cacache/lib/put.js           |  80 --
 .../node_modules/cacache/lib/rm.js            |  31 -
 .../node_modules/cacache/lib/util/glob.js     |   7 -
 .../cacache/lib/util/hash-to-segments.js      |   7 -
 .../node_modules/cacache/lib/util/tmp.js      |  26 -
 .../node_modules/cacache/lib/verify.js        | 258 -----
 .../node_modules/cacache/package.json         |  83 --
 .../node_modules/chownr/LICENSE.md            |  63 --
 .../chownr/dist/commonjs/index.js             |  93 --
 .../chownr/dist/commonjs/package.json         |   3 -
 .../node_modules/chownr/dist/esm/index.js     |  85 --
 .../node_modules/chownr/dist/esm/package.json |   3 -
 .../node_modules/chownr/package.json          |  69 --
 .../node_modules/minizlib/LICENSE             |  26 -
 .../minizlib/dist/commonjs/constants.js       | 123 ---
 .../minizlib/dist/commonjs/index.js           | 392 --------
 .../minizlib/dist/commonjs/package.json       |   3 -
 .../minizlib/dist/esm/constants.js            | 117 ---
 .../node_modules/minizlib/dist/esm/index.js   | 340 -------
 .../minizlib/dist/esm/package.json            |   3 -
 .../node_modules/minizlib/package.json        |  80 --
 .../node_modules/mkdirp/LICENSE               |  21 -
 .../node_modules/mkdirp/dist/cjs/package.json |  91 --
 .../node_modules/mkdirp/dist/cjs/src/bin.js   |  80 --
 .../mkdirp/dist/cjs/src/find-made.js          |  35 -
 .../node_modules/mkdirp/dist/cjs/src/index.js |  53 -
 .../mkdirp/dist/cjs/src/mkdirp-manual.js      |  79 --
 .../mkdirp/dist/cjs/src/mkdirp-native.js      |  50 -
 .../mkdirp/dist/cjs/src/opts-arg.js           |  38 -
 .../mkdirp/dist/cjs/src/path-arg.js           |  28 -
 .../mkdirp/dist/cjs/src/use-native.js         |  17 -
 .../node_modules/mkdirp/dist/mjs/find-made.js |  30 -
 .../node_modules/mkdirp/dist/mjs/index.js     |  43 -
 .../mkdirp/dist/mjs/mkdirp-manual.js          |  75 --
 .../mkdirp/dist/mjs/mkdirp-native.js          |  46 -
 .../node_modules/mkdirp/dist/mjs/opts-arg.js  |  34 -
 .../node_modules/mkdirp/dist/mjs/package.json |   3 -
 .../node_modules/mkdirp/dist/mjs/path-arg.js  |  24 -
 .../mkdirp/dist/mjs/use-native.js             |  14 -
 .../node_modules/mkdirp/package.json          |  91 --
 .../node_modules/tar/LICENSE                  |  15 -
 .../node_modules/tar/dist/commonjs/create.js  |  83 --
 .../tar/dist/commonjs/cwd-error.js            |  18 -
 .../node_modules/tar/dist/commonjs/extract.js |  78 --
 .../tar/dist/commonjs/get-write-flag.js       |  29 -
 .../node_modules/tar/dist/commonjs/header.js  | 306 ------
 .../node_modules/tar/dist/commonjs/index.js   |  54 -
 .../tar/dist/commonjs/large-numbers.js        |  99 --
 .../node_modules/tar/dist/commonjs/list.js    | 136 ---
 .../tar/dist/commonjs/make-command.js         |  61 --
 .../node_modules/tar/dist/commonjs/mkdir.js   | 209 ----
 .../tar/dist/commonjs/mode-fix.js             |  29 -
 .../tar/dist/commonjs/normalize-unicode.js    |  17 -
 .../dist/commonjs/normalize-windows-path.js   |  12 -
 .../node_modules/tar/dist/commonjs/options.js |  66 --
 .../node_modules/tar/dist/commonjs/pack.js    | 477 ---------
 .../tar/dist/commonjs/package.json            |   3 -
 .../node_modules/tar/dist/commonjs/parse.js   | 599 ------------
 .../tar/dist/commonjs/path-reservations.js    | 170 ----
 .../node_modules/tar/dist/commonjs/pax.js     | 158 ---
 .../tar/dist/commonjs/read-entry.js           | 140 ---
 .../node_modules/tar/dist/commonjs/replace.js | 231 -----
 .../tar/dist/commonjs/strip-absolute-path.js  |  29 -
 .../dist/commonjs/strip-trailing-slashes.js   |  18 -
 .../tar/dist/commonjs/symlink-error.js        |  19 -
 .../node_modules/tar/dist/commonjs/types.js   |  50 -
 .../node_modules/tar/dist/commonjs/unpack.js  | 919 ------------------
 .../node_modules/tar/dist/commonjs/update.js  |  33 -
 .../tar/dist/commonjs/warn-method.js          |  31 -
 .../tar/dist/commonjs/winchars.js             |  14 -
 .../tar/dist/commonjs/write-entry.js          | 689 -------------
 .../node_modules/tar/dist/esm/create.js       |  77 --
 .../node_modules/tar/dist/esm/cwd-error.js    |  14 -
 .../node_modules/tar/dist/esm/extract.js      |  49 -
 .../tar/dist/esm/get-write-flag.js            |  23 -
 .../node_modules/tar/dist/esm/header.js       | 279 ------
 .../node_modules/tar/dist/esm/index.js        |  20 -
 .../tar/dist/esm/large-numbers.js             |  94 --
 .../node_modules/tar/dist/esm/list.js         | 106 --
 .../node_modules/tar/dist/esm/make-command.js |  57 --
 .../node_modules/tar/dist/esm/mkdir.js        | 201 ----
 .../node_modules/tar/dist/esm/mode-fix.js     |  25 -
 .../tar/dist/esm/normalize-unicode.js         |  13 -
 .../tar/dist/esm/normalize-windows-path.js    |   9 -
 .../node_modules/tar/dist/esm/options.js      |  54 -
 .../node_modules/tar/dist/esm/pack.js         | 445 ---------
 .../node_modules/tar/dist/esm/package.json    |   3 -
 .../node_modules/tar/dist/esm/parse.js        | 595 ------------
 .../tar/dist/esm/path-reservations.js         | 166 ----
 .../node_modules/tar/dist/esm/pax.js          | 154 ---
 .../node_modules/tar/dist/esm/read-entry.js   | 136 ---
 .../node_modules/tar/dist/esm/replace.js      | 225 -----
 .../tar/dist/esm/strip-absolute-path.js       |  25 -
 .../tar/dist/esm/strip-trailing-slashes.js    |  14 -
 .../tar/dist/esm/symlink-error.js             |  15 -
 .../node_modules/tar/dist/esm/types.js        |  45 -
 .../node_modules/tar/dist/esm/unpack.js       | 888 -----------------
 .../node_modules/tar/dist/esm/update.js       |  30 -
 .../node_modules/tar/dist/esm/warn-method.js  |  27 -
 .../node_modules/tar/dist/esm/winchars.js     |   9 -
 .../node_modules/tar/dist/esm/write-entry.js  | 657 -------------
 .../node_modules/tar/package.json             | 325 -------
 .../node_modules/yallist/LICENSE.md           |  63 --
 .../yallist/dist/commonjs/index.js            | 384 --------
 .../yallist/dist/commonjs/package.json        |   3 -
 .../node_modules/yallist/dist/esm/index.js    | 379 --------
 .../yallist/dist/esm/package.json             |   3 -
 .../node_modules/yallist/package.json         |  68 --
 .../@npmcli/metavuln-calculator/package.json  |   4 +-
 package-lock.json                             |  95 +-
 workspaces/arborist/package.json              |   2 +-
 121 files changed, 8 insertions(+), 14371 deletions(-)
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/LICENSE.md
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/path.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/read.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/rm.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/write.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/entry-index.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/get.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/index.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/memoization.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/put.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/rm.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/glob.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/hash-to-segments.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/tmp.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/verify.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/cacache/package.json
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/chownr/LICENSE.md
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/chownr/dist/commonjs/index.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/chownr/dist/commonjs/package.json
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/chownr/dist/esm/index.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/chownr/dist/esm/package.json
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/chownr/package.json
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/LICENSE
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/dist/commonjs/constants.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/dist/commonjs/index.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/dist/commonjs/package.json
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/dist/esm/constants.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/dist/esm/index.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/dist/esm/package.json
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/package.json
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/LICENSE
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/package.json
 delete mode 100755 node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/bin.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/find-made.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/index.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/opts-arg.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/path-arg.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/use-native.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/find-made.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/index.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/mkdirp-manual.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/mkdirp-native.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/opts-arg.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/package.json
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/path-arg.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/use-native.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/package.json
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/LICENSE
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/create.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/cwd-error.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/extract.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/get-write-flag.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/header.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/index.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/large-numbers.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/list.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/make-command.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/mkdir.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/mode-fix.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/normalize-unicode.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/normalize-windows-path.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/options.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/pack.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/package.json
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/parse.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/path-reservations.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/pax.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/read-entry.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/replace.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/strip-absolute-path.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/strip-trailing-slashes.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/symlink-error.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/types.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/unpack.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/update.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/warn-method.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/winchars.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/write-entry.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/create.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/cwd-error.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/extract.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/get-write-flag.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/header.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/index.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/large-numbers.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/list.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/make-command.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/mkdir.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/mode-fix.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/normalize-unicode.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/normalize-windows-path.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/options.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/pack.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/package.json
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/parse.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/path-reservations.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/pax.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/read-entry.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/replace.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/strip-absolute-path.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/strip-trailing-slashes.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/symlink-error.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/types.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/unpack.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/update.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/warn-method.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/winchars.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/write-entry.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/tar/package.json
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/yallist/LICENSE.md
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/yallist/dist/commonjs/index.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/yallist/dist/commonjs/package.json
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/yallist/dist/esm/index.js
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/yallist/dist/esm/package.json
 delete mode 100644 node_modules/@npmcli/metavuln-calculator/node_modules/yallist/package.json

diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 7fedfe7f3b4bc..8815394a1bbc1 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -31,14 +31,6 @@
 !/@npmcli/map-workspaces/node_modules/minimatch
 !/@npmcli/map-workspaces/node_modules/path-scurry
 !/@npmcli/metavuln-calculator
-!/@npmcli/metavuln-calculator/node_modules/
-/@npmcli/metavuln-calculator/node_modules/*
-!/@npmcli/metavuln-calculator/node_modules/cacache
-!/@npmcli/metavuln-calculator/node_modules/chownr
-!/@npmcli/metavuln-calculator/node_modules/minizlib
-!/@npmcli/metavuln-calculator/node_modules/mkdirp
-!/@npmcli/metavuln-calculator/node_modules/tar
-!/@npmcli/metavuln-calculator/node_modules/yallist
 !/@npmcli/name-from-folder
 !/@npmcli/node-gyp
 !/@npmcli/package-json
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/LICENSE.md b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/LICENSE.md
deleted file mode 100644
index 8d28acf866d93..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/LICENSE.md
+++ /dev/null
@@ -1,16 +0,0 @@
-ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for
-any purpose with or without fee is hereby granted, provided that the
-above copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
-ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
-COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/path.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/path.js
deleted file mode 100644
index ad5a76a4f73f2..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/path.js
+++ /dev/null
@@ -1,29 +0,0 @@
-'use strict'
-
-const contentVer = require('../../package.json')['cache-version'].content
-const hashToSegments = require('../util/hash-to-segments')
-const path = require('path')
-const ssri = require('ssri')
-
-// Current format of content file path:
-//
-// sha512-BaSE64Hex= ->
-// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee
-//
-module.exports = contentPath
-
-function contentPath (cache, integrity) {
-  const sri = ssri.parse(integrity, { single: true })
-  // contentPath is the *strongest* algo given
-  return path.join(
-    contentDir(cache),
-    sri.algorithm,
-    ...hashToSegments(sri.hexDigest())
-  )
-}
-
-module.exports.contentDir = contentDir
-
-function contentDir (cache) {
-  return path.join(cache, `content-v${contentVer}`)
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/read.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/read.js
deleted file mode 100644
index 5f6192c3cec56..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/read.js
+++ /dev/null
@@ -1,165 +0,0 @@
-'use strict'
-
-const fs = require('fs/promises')
-const fsm = require('fs-minipass')
-const ssri = require('ssri')
-const contentPath = require('./path')
-const Pipeline = require('minipass-pipeline')
-
-module.exports = read
-
-const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024
-async function read (cache, integrity, opts = {}) {
-  const { size } = opts
-  const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
-    // get size
-    const stat = size ? { size } : await fs.stat(cpath)
-    return { stat, cpath, sri }
-  })
-
-  if (stat.size > MAX_SINGLE_READ_SIZE) {
-    return readPipeline(cpath, stat.size, sri, new Pipeline()).concat()
-  }
-
-  const data = await fs.readFile(cpath, { encoding: null })
-
-  if (stat.size !== data.length) {
-    throw sizeError(stat.size, data.length)
-  }
-
-  if (!ssri.checkData(data, sri)) {
-    throw integrityError(sri, cpath)
-  }
-
-  return data
-}
-
-const readPipeline = (cpath, size, sri, stream) => {
-  stream.push(
-    new fsm.ReadStream(cpath, {
-      size,
-      readSize: MAX_SINGLE_READ_SIZE,
-    }),
-    ssri.integrityStream({
-      integrity: sri,
-      size,
-    })
-  )
-  return stream
-}
-
-module.exports.stream = readStream
-module.exports.readStream = readStream
-
-function readStream (cache, integrity, opts = {}) {
-  const { size } = opts
-  const stream = new Pipeline()
-  // Set all this up to run on the stream and then just return the stream
-  Promise.resolve().then(async () => {
-    const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => {
-      // get size
-      const stat = size ? { size } : await fs.stat(cpath)
-      return { stat, cpath, sri }
-    })
-
-    return readPipeline(cpath, stat.size, sri, stream)
-  }).catch(err => stream.emit('error', err))
-
-  return stream
-}
-
-module.exports.copy = copy
-
-function copy (cache, integrity, dest) {
-  return withContentSri(cache, integrity, (cpath) => {
-    return fs.copyFile(cpath, dest)
-  })
-}
-
-module.exports.hasContent = hasContent
-
-async function hasContent (cache, integrity) {
-  if (!integrity) {
-    return false
-  }
-
-  try {
-    return await withContentSri(cache, integrity, async (cpath, sri) => {
-      const stat = await fs.stat(cpath)
-      return { size: stat.size, sri, stat }
-    })
-  } catch (err) {
-    if (err.code === 'ENOENT') {
-      return false
-    }
-
-    if (err.code === 'EPERM') {
-      /* istanbul ignore else */
-      if (process.platform !== 'win32') {
-        throw err
-      } else {
-        return false
-      }
-    }
-  }
-}
-
-async function withContentSri (cache, integrity, fn) {
-  const sri = ssri.parse(integrity)
-  // If `integrity` has multiple entries, pick the first digest
-  // with available local data.
-  const algo = sri.pickAlgorithm()
-  const digests = sri[algo]
-
-  if (digests.length <= 1) {
-    const cpath = contentPath(cache, digests[0])
-    return fn(cpath, digests[0])
-  } else {
-    // Can't use race here because a generic error can happen before
-    // a ENOENT error, and can happen before a valid result
-    const results = await Promise.all(digests.map(async (meta) => {
-      try {
-        return await withContentSri(cache, meta, fn)
-      } catch (err) {
-        if (err.code === 'ENOENT') {
-          return Object.assign(
-            new Error('No matching content found for ' + sri.toString()),
-            { code: 'ENOENT' }
-          )
-        }
-        return err
-      }
-    }))
-    // Return the first non error if it is found
-    const result = results.find((r) => !(r instanceof Error))
-    if (result) {
-      return result
-    }
-
-    // Throw the No matching content found error
-    const enoentError = results.find((r) => r.code === 'ENOENT')
-    if (enoentError) {
-      throw enoentError
-    }
-
-    // Throw generic error
-    throw results.find((r) => r instanceof Error)
-  }
-}
-
-function sizeError (expected, found) {
-  /* eslint-disable-next-line max-len */
-  const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
-  err.expected = expected
-  err.found = found
-  err.code = 'EBADSIZE'
-  return err
-}
-
-function integrityError (sri, path) {
-  const err = new Error(`Integrity verification failed for ${sri} (${path})`)
-  err.code = 'EINTEGRITY'
-  err.sri = sri
-  err.path = path
-  return err
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/rm.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/rm.js
deleted file mode 100644
index ce58d679e4cb2..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/rm.js
+++ /dev/null
@@ -1,18 +0,0 @@
-'use strict'
-
-const fs = require('fs/promises')
-const contentPath = require('./path')
-const { hasContent } = require('./read')
-
-module.exports = rm
-
-async function rm (cache, integrity) {
-  const content = await hasContent(cache, integrity)
-  // ~pretty~ sure we can't end up with a content lacking sri, but be safe
-  if (content && content.sri) {
-    await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true })
-    return true
-  } else {
-    return false
-  }
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/write.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/write.js
deleted file mode 100644
index e7187abca8788..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/content/write.js
+++ /dev/null
@@ -1,206 +0,0 @@
-'use strict'
-
-const events = require('events')
-
-const contentPath = require('./path')
-const fs = require('fs/promises')
-const { moveFile } = require('@npmcli/fs')
-const { Minipass } = require('minipass')
-const Pipeline = require('minipass-pipeline')
-const Flush = require('minipass-flush')
-const path = require('path')
-const ssri = require('ssri')
-const uniqueFilename = require('unique-filename')
-const fsm = require('fs-minipass')
-
-module.exports = write
-
-// Cache of move operations in process so we don't duplicate
-const moveOperations = new Map()
-
-async function write (cache, data, opts = {}) {
-  const { algorithms, size, integrity } = opts
-
-  if (typeof size === 'number' && data.length !== size) {
-    throw sizeError(size, data.length)
-  }
-
-  const sri = ssri.fromData(data, algorithms ? { algorithms } : {})
-  if (integrity && !ssri.checkData(data, integrity, opts)) {
-    throw checksumError(integrity, sri)
-  }
-
-  for (const algo in sri) {
-    const tmp = await makeTmp(cache, opts)
-    const hash = sri[algo].toString()
-    try {
-      await fs.writeFile(tmp.target, data, { flag: 'wx' })
-      await moveToDestination(tmp, cache, hash, opts)
-    } finally {
-      if (!tmp.moved) {
-        await fs.rm(tmp.target, { recursive: true, force: true })
-      }
-    }
-  }
-  return { integrity: sri, size: data.length }
-}
-
-module.exports.stream = writeStream
-
-// writes proxied to the 'inputStream' that is passed to the Promise
-// 'end' is deferred until content is handled.
-class CacacheWriteStream extends Flush {
-  constructor (cache, opts) {
-    super()
-    this.opts = opts
-    this.cache = cache
-    this.inputStream = new Minipass()
-    this.inputStream.on('error', er => this.emit('error', er))
-    this.inputStream.on('drain', () => this.emit('drain'))
-    this.handleContentP = null
-  }
-
-  write (chunk, encoding, cb) {
-    if (!this.handleContentP) {
-      this.handleContentP = handleContent(
-        this.inputStream,
-        this.cache,
-        this.opts
-      )
-      this.handleContentP.catch(error => this.emit('error', error))
-    }
-    return this.inputStream.write(chunk, encoding, cb)
-  }
-
-  flush (cb) {
-    this.inputStream.end(() => {
-      if (!this.handleContentP) {
-        const e = new Error('Cache input stream was empty')
-        e.code = 'ENODATA'
-        // empty streams are probably emitting end right away.
-        // defer this one tick by rejecting a promise on it.
-        return Promise.reject(e).catch(cb)
-      }
-      // eslint-disable-next-line promise/catch-or-return
-      this.handleContentP.then(
-        (res) => {
-          res.integrity && this.emit('integrity', res.integrity)
-          // eslint-disable-next-line promise/always-return
-          res.size !== null && this.emit('size', res.size)
-          cb()
-        },
-        (er) => cb(er)
-      )
-    })
-  }
-}
-
-function writeStream (cache, opts = {}) {
-  return new CacacheWriteStream(cache, opts)
-}
-
-async function handleContent (inputStream, cache, opts) {
-  const tmp = await makeTmp(cache, opts)
-  try {
-    const res = await pipeToTmp(inputStream, cache, tmp.target, opts)
-    await moveToDestination(
-      tmp,
-      cache,
-      res.integrity,
-      opts
-    )
-    return res
-  } finally {
-    if (!tmp.moved) {
-      await fs.rm(tmp.target, { recursive: true, force: true })
-    }
-  }
-}
-
-async function pipeToTmp (inputStream, cache, tmpTarget, opts) {
-  const outStream = new fsm.WriteStream(tmpTarget, {
-    flags: 'wx',
-  })
-
-  if (opts.integrityEmitter) {
-    // we need to create these all simultaneously since they can fire in any order
-    const [integrity, size] = await Promise.all([
-      events.once(opts.integrityEmitter, 'integrity').then(res => res[0]),
-      events.once(opts.integrityEmitter, 'size').then(res => res[0]),
-      new Pipeline(inputStream, outStream).promise(),
-    ])
-    return { integrity, size }
-  }
-
-  let integrity
-  let size
-  const hashStream = ssri.integrityStream({
-    integrity: opts.integrity,
-    algorithms: opts.algorithms,
-    size: opts.size,
-  })
-  hashStream.on('integrity', i => {
-    integrity = i
-  })
-  hashStream.on('size', s => {
-    size = s
-  })
-
-  const pipeline = new Pipeline(inputStream, hashStream, outStream)
-  await pipeline.promise()
-  return { integrity, size }
-}
-
-async function makeTmp (cache, opts) {
-  const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
-  await fs.mkdir(path.dirname(tmpTarget), { recursive: true })
-  return {
-    target: tmpTarget,
-    moved: false,
-  }
-}
-
-async function moveToDestination (tmp, cache, sri) {
-  const destination = contentPath(cache, sri)
-  const destDir = path.dirname(destination)
-  if (moveOperations.has(destination)) {
-    return moveOperations.get(destination)
-  }
-  moveOperations.set(
-    destination,
-    fs.mkdir(destDir, { recursive: true })
-      .then(async () => {
-        await moveFile(tmp.target, destination, { overwrite: false })
-        tmp.moved = true
-        return tmp.moved
-      })
-      .catch(err => {
-        if (!err.message.startsWith('The destination file exists')) {
-          throw Object.assign(err, { code: 'EEXIST' })
-        }
-      }).finally(() => {
-        moveOperations.delete(destination)
-      })
-
-  )
-  return moveOperations.get(destination)
-}
-
-function sizeError (expected, found) {
-  /* eslint-disable-next-line max-len */
-  const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`)
-  err.expected = expected
-  err.found = found
-  err.code = 'EBADSIZE'
-  return err
-}
-
-function checksumError (expected, found) {
-  const err = new Error(`Integrity check failed:
-  Wanted: ${expected}
-   Found: ${found}`)
-  err.code = 'EINTEGRITY'
-  err.expected = expected
-  err.found = found
-  return err
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/entry-index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/entry-index.js
deleted file mode 100644
index 0e09b10818d09..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/entry-index.js
+++ /dev/null
@@ -1,336 +0,0 @@
-'use strict'
-
-const crypto = require('crypto')
-const {
-  appendFile,
-  mkdir,
-  readFile,
-  readdir,
-  rm,
-  writeFile,
-} = require('fs/promises')
-const { Minipass } = require('minipass')
-const path = require('path')
-const ssri = require('ssri')
-const uniqueFilename = require('unique-filename')
-
-const contentPath = require('./content/path')
-const hashToSegments = require('./util/hash-to-segments')
-const indexV = require('../package.json')['cache-version'].index
-const { moveFile } = require('@npmcli/fs')
-
-const lsStreamConcurrency = 5
-
-module.exports.NotFoundError = class NotFoundError extends Error {
-  constructor (cache, key) {
-    super(`No cache entry for ${key} found in ${cache}`)
-    this.code = 'ENOENT'
-    this.cache = cache
-    this.key = key
-  }
-}
-
-module.exports.compact = compact
-
-async function compact (cache, key, matchFn, opts = {}) {
-  const bucket = bucketPath(cache, key)
-  const entries = await bucketEntries(bucket)
-  const newEntries = []
-  // we loop backwards because the bottom-most result is the newest
-  // since we add new entries with appendFile
-  for (let i = entries.length - 1; i >= 0; --i) {
-    const entry = entries[i]
-    // a null integrity could mean either a delete was appended
-    // or the user has simply stored an index that does not map
-    // to any content. we determine if the user wants to keep the
-    // null integrity based on the validateEntry function passed in options.
-    // if the integrity is null and no validateEntry is provided, we break
-    // as we consider the null integrity to be a deletion of everything
-    // that came before it.
-    if (entry.integrity === null && !opts.validateEntry) {
-      break
-    }
-
-    // if this entry is valid, and it is either the first entry or
-    // the newEntries array doesn't already include an entry that
-    // matches this one based on the provided matchFn, then we add
-    // it to the beginning of our list
-    if ((!opts.validateEntry || opts.validateEntry(entry) === true) &&
-      (newEntries.length === 0 ||
-        !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) {
-      newEntries.unshift(entry)
-    }
-  }
-
-  const newIndex = '\n' + newEntries.map((entry) => {
-    const stringified = JSON.stringify(entry)
-    const hash = hashEntry(stringified)
-    return `${hash}\t${stringified}`
-  }).join('\n')
-
-  const setup = async () => {
-    const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix)
-    await mkdir(path.dirname(target), { recursive: true })
-    return {
-      target,
-      moved: false,
-    }
-  }
-
-  const teardown = async (tmp) => {
-    if (!tmp.moved) {
-      return rm(tmp.target, { recursive: true, force: true })
-    }
-  }
-
-  const write = async (tmp) => {
-    await writeFile(tmp.target, newIndex, { flag: 'wx' })
-    await mkdir(path.dirname(bucket), { recursive: true })
-    // we use @npmcli/move-file directly here because we
-    // want to overwrite the existing file
-    await moveFile(tmp.target, bucket)
-    tmp.moved = true
-  }
-
-  // write the file atomically
-  const tmp = await setup()
-  try {
-    await write(tmp)
-  } finally {
-    await teardown(tmp)
-  }
-
-  // we reverse the list we generated such that the newest
-  // entries come first in order to make looping through them easier
-  // the true passed to formatEntry tells it to keep null
-  // integrity values, if they made it this far it's because
-  // validateEntry returned true, and as such we should return it
-  return newEntries.reverse().map((entry) => formatEntry(cache, entry, true))
-}
-
-module.exports.insert = insert
-
-async function insert (cache, key, integrity, opts = {}) {
-  const { metadata, size, time } = opts
-  const bucket = bucketPath(cache, key)
-  const entry = {
-    key,
-    integrity: integrity && ssri.stringify(integrity),
-    time: time || Date.now(),
-    size,
-    metadata,
-  }
-  try {
-    await mkdir(path.dirname(bucket), { recursive: true })
-    const stringified = JSON.stringify(entry)
-    // NOTE - Cleverness ahoy!
-    //
-    // This works because it's tremendously unlikely for an entry to corrupt
-    // another while still preserving the string length of the JSON in
-    // question. So, we just slap the length in there and verify it on read.
-    //
-    // Thanks to @isaacs for the whiteboarding session that ended up with
-    // this.
-    await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`)
-  } catch (err) {
-    if (err.code === 'ENOENT') {
-      return undefined
-    }
-
-    throw err
-  }
-  return formatEntry(cache, entry)
-}
-
-module.exports.find = find
-
-async function find (cache, key) {
-  const bucket = bucketPath(cache, key)
-  try {
-    const entries = await bucketEntries(bucket)
-    return entries.reduce((latest, next) => {
-      if (next && next.key === key) {
-        return formatEntry(cache, next)
-      } else {
-        return latest
-      }
-    }, null)
-  } catch (err) {
-    if (err.code === 'ENOENT') {
-      return null
-    } else {
-      throw err
-    }
-  }
-}
-
-module.exports.delete = del
-
-function del (cache, key, opts = {}) {
-  if (!opts.removeFully) {
-    return insert(cache, key, null, opts)
-  }
-
-  const bucket = bucketPath(cache, key)
-  return rm(bucket, { recursive: true, force: true })
-}
-
-module.exports.lsStream = lsStream
-
-function lsStream (cache) {
-  const indexDir = bucketDir(cache)
-  const stream = new Minipass({ objectMode: true })
-
-  // Set all this up to run on the stream and then just return the stream
-  Promise.resolve().then(async () => {
-    const { default: pMap } = await import('p-map')
-    const buckets = await readdirOrEmpty(indexDir)
-    await pMap(buckets, async (bucket) => {
-      const bucketPath = path.join(indexDir, bucket)
-      const subbuckets = await readdirOrEmpty(bucketPath)
-      await pMap(subbuckets, async (subbucket) => {
-        const subbucketPath = path.join(bucketPath, subbucket)
-
-        // "/cachename//./*"
-        const subbucketEntries = await readdirOrEmpty(subbucketPath)
-        await pMap(subbucketEntries, async (entry) => {
-          const entryPath = path.join(subbucketPath, entry)
-          try {
-            const entries = await bucketEntries(entryPath)
-            // using a Map here prevents duplicate keys from showing up
-            // twice, I guess?
-            const reduced = entries.reduce((acc, entry) => {
-              acc.set(entry.key, entry)
-              return acc
-            }, new Map())
-            // reduced is a map of key => entry
-            for (const entry of reduced.values()) {
-              const formatted = formatEntry(cache, entry)
-              if (formatted) {
-                stream.write(formatted)
-              }
-            }
-          } catch (err) {
-            if (err.code === 'ENOENT') {
-              return undefined
-            }
-            throw err
-          }
-        },
-        { concurrency: lsStreamConcurrency })
-      },
-      { concurrency: lsStreamConcurrency })
-    },
-    { concurrency: lsStreamConcurrency })
-    stream.end()
-    return stream
-  }).catch(err => stream.emit('error', err))
-
-  return stream
-}
-
-module.exports.ls = ls
-
-async function ls (cache) {
-  const entries = await lsStream(cache).collect()
-  return entries.reduce((acc, xs) => {
-    acc[xs.key] = xs
-    return acc
-  }, {})
-}
-
-module.exports.bucketEntries = bucketEntries
-
-async function bucketEntries (bucket, filter) {
-  const data = await readFile(bucket, 'utf8')
-  return _bucketEntries(data, filter)
-}
-
-function _bucketEntries (data) {
-  const entries = []
-  data.split('\n').forEach((entry) => {
-    if (!entry) {
-      return
-    }
-
-    const pieces = entry.split('\t')
-    if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) {
-      // Hash is no good! Corruption or malice? Doesn't matter!
-      // EJECT EJECT
-      return
-    }
-    let obj
-    try {
-      obj = JSON.parse(pieces[1])
-    } catch (_) {
-      // eslint-ignore-next-line no-empty-block
-    }
-    // coverage disabled here, no need to test with an entry that parses to something falsey
-    // istanbul ignore else
-    if (obj) {
-      entries.push(obj)
-    }
-  })
-  return entries
-}
-
-module.exports.bucketDir = bucketDir
-
-function bucketDir (cache) {
-  return path.join(cache, `index-v${indexV}`)
-}
-
-module.exports.bucketPath = bucketPath
-
-function bucketPath (cache, key) {
-  const hashed = hashKey(key)
-  return path.join.apply(
-    path,
-    [bucketDir(cache)].concat(hashToSegments(hashed))
-  )
-}
-
-module.exports.hashKey = hashKey
-
-function hashKey (key) {
-  return hash(key, 'sha256')
-}
-
-module.exports.hashEntry = hashEntry
-
-function hashEntry (str) {
-  return hash(str, 'sha1')
-}
-
-function hash (str, digest) {
-  return crypto
-    .createHash(digest)
-    .update(str)
-    .digest('hex')
-}
-
-function formatEntry (cache, entry, keepAll) {
-  // Treat null digests as deletions. They'll shadow any previous entries.
-  if (!entry.integrity && !keepAll) {
-    return null
-  }
-
-  return {
-    key: entry.key,
-    integrity: entry.integrity,
-    path: entry.integrity ? contentPath(cache, entry.integrity) : undefined,
-    size: entry.size,
-    time: entry.time,
-    metadata: entry.metadata,
-  }
-}
-
-function readdirOrEmpty (dir) {
-  return readdir(dir).catch((err) => {
-    if (err.code === 'ENOENT' || err.code === 'ENOTDIR') {
-      return []
-    }
-
-    throw err
-  })
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/get.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/get.js
deleted file mode 100644
index 80ec206c7ecaa..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/get.js
+++ /dev/null
@@ -1,170 +0,0 @@
-'use strict'
-
-const Collect = require('minipass-collect')
-const { Minipass } = require('minipass')
-const Pipeline = require('minipass-pipeline')
-
-const index = require('./entry-index')
-const memo = require('./memoization')
-const read = require('./content/read')
-
-async function getData (cache, key, opts = {}) {
-  const { integrity, memoize, size } = opts
-  const memoized = memo.get(cache, key, opts)
-  if (memoized && memoize !== false) {
-    return {
-      metadata: memoized.entry.metadata,
-      data: memoized.data,
-      integrity: memoized.entry.integrity,
-      size: memoized.entry.size,
-    }
-  }
-
-  const entry = await index.find(cache, key, opts)
-  if (!entry) {
-    throw new index.NotFoundError(cache, key)
-  }
-  const data = await read(cache, entry.integrity, { integrity, size })
-  if (memoize) {
-    memo.put(cache, entry, data, opts)
-  }
-
-  return {
-    data,
-    metadata: entry.metadata,
-    size: entry.size,
-    integrity: entry.integrity,
-  }
-}
-module.exports = getData
-
-async function getDataByDigest (cache, key, opts = {}) {
-  const { integrity, memoize, size } = opts
-  const memoized = memo.get.byDigest(cache, key, opts)
-  if (memoized && memoize !== false) {
-    return memoized
-  }
-
-  const res = await read(cache, key, { integrity, size })
-  if (memoize) {
-    memo.put.byDigest(cache, key, res, opts)
-  }
-  return res
-}
-module.exports.byDigest = getDataByDigest
-
-const getMemoizedStream = (memoized) => {
-  const stream = new Minipass()
-  stream.on('newListener', function (ev, cb) {
-    ev === 'metadata' && cb(memoized.entry.metadata)
-    ev === 'integrity' && cb(memoized.entry.integrity)
-    ev === 'size' && cb(memoized.entry.size)
-  })
-  stream.end(memoized.data)
-  return stream
-}
-
-function getStream (cache, key, opts = {}) {
-  const { memoize, size } = opts
-  const memoized = memo.get(cache, key, opts)
-  if (memoized && memoize !== false) {
-    return getMemoizedStream(memoized)
-  }
-
-  const stream = new Pipeline()
-  // Set all this up to run on the stream and then just return the stream
-  Promise.resolve().then(async () => {
-    const entry = await index.find(cache, key)
-    if (!entry) {
-      throw new index.NotFoundError(cache, key)
-    }
-
-    stream.emit('metadata', entry.metadata)
-    stream.emit('integrity', entry.integrity)
-    stream.emit('size', entry.size)
-    stream.on('newListener', function (ev, cb) {
-      ev === 'metadata' && cb(entry.metadata)
-      ev === 'integrity' && cb(entry.integrity)
-      ev === 'size' && cb(entry.size)
-    })
-
-    const src = read.readStream(
-      cache,
-      entry.integrity,
-      { ...opts, size: typeof size !== 'number' ? entry.size : size }
-    )
-
-    if (memoize) {
-      const memoStream = new Collect.PassThrough()
-      memoStream.on('collect', data => memo.put(cache, entry, data, opts))
-      stream.unshift(memoStream)
-    }
-    stream.unshift(src)
-    return stream
-  }).catch((err) => stream.emit('error', err))
-
-  return stream
-}
-
-module.exports.stream = getStream
-
-function getStreamDigest (cache, integrity, opts = {}) {
-  const { memoize } = opts
-  const memoized = memo.get.byDigest(cache, integrity, opts)
-  if (memoized && memoize !== false) {
-    const stream = new Minipass()
-    stream.end(memoized)
-    return stream
-  } else {
-    const stream = read.readStream(cache, integrity, opts)
-    if (!memoize) {
-      return stream
-    }
-
-    const memoStream = new Collect.PassThrough()
-    memoStream.on('collect', data => memo.put.byDigest(
-      cache,
-      integrity,
-      data,
-      opts
-    ))
-    return new Pipeline(stream, memoStream)
-  }
-}
-
-module.exports.stream.byDigest = getStreamDigest
-
-function info (cache, key, opts = {}) {
-  const { memoize } = opts
-  const memoized = memo.get(cache, key, opts)
-  if (memoized && memoize !== false) {
-    return Promise.resolve(memoized.entry)
-  } else {
-    return index.find(cache, key)
-  }
-}
-module.exports.info = info
-
-async function copy (cache, key, dest, opts = {}) {
-  const entry = await index.find(cache, key, opts)
-  if (!entry) {
-    throw new index.NotFoundError(cache, key)
-  }
-  await read.copy(cache, entry.integrity, dest, opts)
-  return {
-    metadata: entry.metadata,
-    size: entry.size,
-    integrity: entry.integrity,
-  }
-}
-
-module.exports.copy = copy
-
-async function copyByDigest (cache, key, dest, opts = {}) {
-  await read.copy(cache, key, dest, opts)
-  return key
-}
-
-module.exports.copy.byDigest = copyByDigest
-
-module.exports.hasContent = read.hasContent
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/index.js
deleted file mode 100644
index c9b0da5f3a271..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/index.js
+++ /dev/null
@@ -1,42 +0,0 @@
-'use strict'
-
-const get = require('./get.js')
-const put = require('./put.js')
-const rm = require('./rm.js')
-const verify = require('./verify.js')
-const { clearMemoized } = require('./memoization.js')
-const tmp = require('./util/tmp.js')
-const index = require('./entry-index.js')
-
-module.exports.index = {}
-module.exports.index.compact = index.compact
-module.exports.index.insert = index.insert
-
-module.exports.ls = index.ls
-module.exports.ls.stream = index.lsStream
-
-module.exports.get = get
-module.exports.get.byDigest = get.byDigest
-module.exports.get.stream = get.stream
-module.exports.get.stream.byDigest = get.stream.byDigest
-module.exports.get.copy = get.copy
-module.exports.get.copy.byDigest = get.copy.byDigest
-module.exports.get.info = get.info
-module.exports.get.hasContent = get.hasContent
-
-module.exports.put = put
-module.exports.put.stream = put.stream
-
-module.exports.rm = rm.entry
-module.exports.rm.all = rm.all
-module.exports.rm.entry = module.exports.rm
-module.exports.rm.content = rm.content
-
-module.exports.clearMemoized = clearMemoized
-
-module.exports.tmp = {}
-module.exports.tmp.mkdir = tmp.mkdir
-module.exports.tmp.withTmp = tmp.withTmp
-
-module.exports.verify = verify
-module.exports.verify.lastRun = verify.lastRun
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/memoization.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/memoization.js
deleted file mode 100644
index 2ecc60912e456..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/memoization.js
+++ /dev/null
@@ -1,72 +0,0 @@
-'use strict'
-
-const { LRUCache } = require('lru-cache')
-
-const MEMOIZED = new LRUCache({
-  max: 500,
-  maxSize: 50 * 1024 * 1024, // 50MB
-  ttl: 3 * 60 * 1000, // 3 minutes
-  sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length,
-})
-
-module.exports.clearMemoized = clearMemoized
-
-function clearMemoized () {
-  const old = {}
-  MEMOIZED.forEach((v, k) => {
-    old[k] = v
-  })
-  MEMOIZED.clear()
-  return old
-}
-
-module.exports.put = put
-
-function put (cache, entry, data, opts) {
-  pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data })
-  putDigest(cache, entry.integrity, data, opts)
-}
-
-module.exports.put.byDigest = putDigest
-
-function putDigest (cache, integrity, data, opts) {
-  pickMem(opts).set(`digest:${cache}:${integrity}`, data)
-}
-
-module.exports.get = get
-
-function get (cache, key, opts) {
-  return pickMem(opts).get(`key:${cache}:${key}`)
-}
-
-module.exports.get.byDigest = getDigest
-
-function getDigest (cache, integrity, opts) {
-  return pickMem(opts).get(`digest:${cache}:${integrity}`)
-}
-
-class ObjProxy {
-  constructor (obj) {
-    this.obj = obj
-  }
-
-  get (key) {
-    return this.obj[key]
-  }
-
-  set (key, val) {
-    this.obj[key] = val
-  }
-}
-
-function pickMem (opts) {
-  if (!opts || !opts.memoize) {
-    return MEMOIZED
-  } else if (opts.memoize.get && opts.memoize.set) {
-    return opts.memoize
-  } else if (typeof opts.memoize === 'object') {
-    return new ObjProxy(opts.memoize)
-  } else {
-    return MEMOIZED
-  }
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/put.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/put.js
deleted file mode 100644
index 9fc932d5f6dec..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/put.js
+++ /dev/null
@@ -1,80 +0,0 @@
-'use strict'
-
-const index = require('./entry-index')
-const memo = require('./memoization')
-const write = require('./content/write')
-const Flush = require('minipass-flush')
-const { PassThrough } = require('minipass-collect')
-const Pipeline = require('minipass-pipeline')
-
-const putOpts = (opts) => ({
-  algorithms: ['sha512'],
-  ...opts,
-})
-
-module.exports = putData
-
-async function putData (cache, key, data, opts = {}) {
-  const { memoize } = opts
-  opts = putOpts(opts)
-  const res = await write(cache, data, opts)
-  const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size })
-  if (memoize) {
-    memo.put(cache, entry, data, opts)
-  }
-
-  return res.integrity
-}
-
-module.exports.stream = putStream
-
-function putStream (cache, key, opts = {}) {
-  const { memoize } = opts
-  opts = putOpts(opts)
-  let integrity
-  let size
-  let error
-
-  let memoData
-  const pipeline = new Pipeline()
-  // first item in the pipeline is the memoizer, because we need
-  // that to end first and get the collected data.
-  if (memoize) {
-    const memoizer = new PassThrough().on('collect', data => {
-      memoData = data
-    })
-    pipeline.push(memoizer)
-  }
-
-  // contentStream is a write-only, not a passthrough
-  // no data comes out of it.
-  const contentStream = write.stream(cache, opts)
-    .on('integrity', (int) => {
-      integrity = int
-    })
-    .on('size', (s) => {
-      size = s
-    })
-    .on('error', (err) => {
-      error = err
-    })
-
-  pipeline.push(contentStream)
-
-  // last but not least, we write the index and emit hash and size,
-  // and memoize if we're doing that
-  pipeline.push(new Flush({
-    async flush () {
-      if (!error) {
-        const entry = await index.insert(cache, key, integrity, { ...opts, size })
-        if (memoize && memoData) {
-          memo.put(cache, entry, memoData, opts)
-        }
-        pipeline.emit('integrity', integrity)
-        pipeline.emit('size', size)
-      }
-    },
-  }))
-
-  return pipeline
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/rm.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/rm.js
deleted file mode 100644
index a94760c7cf243..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/rm.js
+++ /dev/null
@@ -1,31 +0,0 @@
-'use strict'
-
-const { rm } = require('fs/promises')
-const glob = require('./util/glob.js')
-const index = require('./entry-index')
-const memo = require('./memoization')
-const path = require('path')
-const rmContent = require('./content/rm')
-
-module.exports = entry
-module.exports.entry = entry
-
-function entry (cache, key, opts) {
-  memo.clearMemoized()
-  return index.delete(cache, key, opts)
-}
-
-module.exports.content = content
-
-function content (cache, integrity) {
-  memo.clearMemoized()
-  return rmContent(cache, integrity)
-}
-
-module.exports.all = all
-
-async function all (cache) {
-  memo.clearMemoized()
-  const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true })
-  return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true })))
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/glob.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/glob.js
deleted file mode 100644
index 8500c1c16a429..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/glob.js
+++ /dev/null
@@ -1,7 +0,0 @@
-'use strict'
-
-const { glob } = require('glob')
-const path = require('path')
-
-const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep)
-module.exports = (path, options) => glob(globify(path), options)
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/hash-to-segments.js
deleted file mode 100644
index 445599b503808..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/hash-to-segments.js
+++ /dev/null
@@ -1,7 +0,0 @@
-'use strict'
-
-module.exports = hashToSegments
-
-function hashToSegments (hash) {
-  return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)]
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/tmp.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/tmp.js
deleted file mode 100644
index 0bf5302136ebe..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/util/tmp.js
+++ /dev/null
@@ -1,26 +0,0 @@
-'use strict'
-
-const { withTempDir } = require('@npmcli/fs')
-const fs = require('fs/promises')
-const path = require('path')
-
-module.exports.mkdir = mktmpdir
-
-async function mktmpdir (cache, opts = {}) {
-  const { tmpPrefix } = opts
-  const tmpDir = path.join(cache, 'tmp')
-  await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' })
-  // do not use path.join(), it drops the trailing / if tmpPrefix is unset
-  const target = `${tmpDir}${path.sep}${tmpPrefix || ''}`
-  return fs.mkdtemp(target, { owner: 'inherit' })
-}
-
-module.exports.withTmp = withTmp
-
-function withTmp (cache, opts, cb) {
-  if (!cb) {
-    cb = opts
-    opts = {}
-  }
-  return withTempDir(path.join(cache, 'tmp'), cb, opts)
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/verify.js b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/verify.js
deleted file mode 100644
index dcff3aa73f317..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/lib/verify.js
+++ /dev/null
@@ -1,258 +0,0 @@
-'use strict'
-
-const {
-  mkdir,
-  readFile,
-  rm,
-  stat,
-  truncate,
-  writeFile,
-} = require('fs/promises')
-const contentPath = require('./content/path')
-const fsm = require('fs-minipass')
-const glob = require('./util/glob.js')
-const index = require('./entry-index')
-const path = require('path')
-const ssri = require('ssri')
-
-const hasOwnProperty = (obj, key) =>
-  Object.prototype.hasOwnProperty.call(obj, key)
-
-const verifyOpts = (opts) => ({
-  concurrency: 20,
-  log: { silly () {} },
-  ...opts,
-})
-
-module.exports = verify
-
-async function verify (cache, opts) {
-  opts = verifyOpts(opts)
-  opts.log.silly('verify', 'verifying cache at', cache)
-
-  const steps = [
-    markStartTime,
-    fixPerms,
-    garbageCollect,
-    rebuildIndex,
-    cleanTmp,
-    writeVerifile,
-    markEndTime,
-  ]
-
-  const stats = {}
-  for (const step of steps) {
-    const label = step.name
-    const start = new Date()
-    const s = await step(cache, opts)
-    if (s) {
-      Object.keys(s).forEach((k) => {
-        stats[k] = s[k]
-      })
-    }
-    const end = new Date()
-    if (!stats.runTime) {
-      stats.runTime = {}
-    }
-    stats.runTime[label] = end - start
-  }
-  stats.runTime.total = stats.endTime - stats.startTime
-  opts.log.silly(
-    'verify',
-    'verification finished for',
-    cache,
-    'in',
-    `${stats.runTime.total}ms`
-  )
-  return stats
-}
-
-async function markStartTime () {
-  return { startTime: new Date() }
-}
-
-async function markEndTime () {
-  return { endTime: new Date() }
-}
-
-async function fixPerms (cache, opts) {
-  opts.log.silly('verify', 'fixing cache permissions')
-  await mkdir(cache, { recursive: true })
-  return null
-}
-
-// Implements a naive mark-and-sweep tracing garbage collector.
-//
-// The algorithm is basically as follows:
-// 1. Read (and filter) all index entries ("pointers")
-// 2. Mark each integrity value as "live"
-// 3. Read entire filesystem tree in `content-vX/` dir
-// 4. If content is live, verify its checksum and delete it if it fails
-// 5. If content is not marked as live, rm it.
-//
-async function garbageCollect (cache, opts) {
-  opts.log.silly('verify', 'garbage collecting content')
-  const { default: pMap } = await import('p-map')
-  const indexStream = index.lsStream(cache)
-  const liveContent = new Set()
-  indexStream.on('data', (entry) => {
-    if (opts.filter && !opts.filter(entry)) {
-      return
-    }
-
-    // integrity is stringified, re-parse it so we can get each hash
-    const integrity = ssri.parse(entry.integrity)
-    for (const algo in integrity) {
-      liveContent.add(integrity[algo].toString())
-    }
-  })
-  await new Promise((resolve, reject) => {
-    indexStream.on('end', resolve).on('error', reject)
-  })
-  const contentDir = contentPath.contentDir(cache)
-  const files = await glob(path.join(contentDir, '**'), {
-    follow: false,
-    nodir: true,
-    nosort: true,
-  })
-  const stats = {
-    verifiedContent: 0,
-    reclaimedCount: 0,
-    reclaimedSize: 0,
-    badContentCount: 0,
-    keptSize: 0,
-  }
-  await pMap(
-    files,
-    async (f) => {
-      const split = f.split(/[/\\]/)
-      const digest = split.slice(split.length - 3).join('')
-      const algo = split[split.length - 4]
-      const integrity = ssri.fromHex(digest, algo)
-      if (liveContent.has(integrity.toString())) {
-        const info = await verifyContent(f, integrity)
-        if (!info.valid) {
-          stats.reclaimedCount++
-          stats.badContentCount++
-          stats.reclaimedSize += info.size
-        } else {
-          stats.verifiedContent++
-          stats.keptSize += info.size
-        }
-      } else {
-        // No entries refer to this content. We can delete.
-        stats.reclaimedCount++
-        const s = await stat(f)
-        await rm(f, { recursive: true, force: true })
-        stats.reclaimedSize += s.size
-      }
-      return stats
-    },
-    { concurrency: opts.concurrency }
-  )
-  return stats
-}
-
-async function verifyContent (filepath, sri) {
-  const contentInfo = {}
-  try {
-    const { size } = await stat(filepath)
-    contentInfo.size = size
-    contentInfo.valid = true
-    await ssri.checkStream(new fsm.ReadStream(filepath), sri)
-  } catch (err) {
-    if (err.code === 'ENOENT') {
-      return { size: 0, valid: false }
-    }
-    if (err.code !== 'EINTEGRITY') {
-      throw err
-    }
-
-    await rm(filepath, { recursive: true, force: true })
-    contentInfo.valid = false
-  }
-  return contentInfo
-}
-
-async function rebuildIndex (cache, opts) {
-  opts.log.silly('verify', 'rebuilding index')
-  const { default: pMap } = await import('p-map')
-  const entries = await index.ls(cache)
-  const stats = {
-    missingContent: 0,
-    rejectedEntries: 0,
-    totalEntries: 0,
-  }
-  const buckets = {}
-  for (const k in entries) {
-    /* istanbul ignore else */
-    if (hasOwnProperty(entries, k)) {
-      const hashed = index.hashKey(k)
-      const entry = entries[k]
-      const excluded = opts.filter && !opts.filter(entry)
-      excluded && stats.rejectedEntries++
-      if (buckets[hashed] && !excluded) {
-        buckets[hashed].push(entry)
-      } else if (buckets[hashed] && excluded) {
-        // skip
-      } else if (excluded) {
-        buckets[hashed] = []
-        buckets[hashed]._path = index.bucketPath(cache, k)
-      } else {
-        buckets[hashed] = [entry]
-        buckets[hashed]._path = index.bucketPath(cache, k)
-      }
-    }
-  }
-  await pMap(
-    Object.keys(buckets),
-    (key) => {
-      return rebuildBucket(cache, buckets[key], stats, opts)
-    },
-    { concurrency: opts.concurrency }
-  )
-  return stats
-}
-
-async function rebuildBucket (cache, bucket, stats) {
-  await truncate(bucket._path)
-  // This needs to be serialized because cacache explicitly
-  // lets very racy bucket conflicts clobber each other.
-  for (const entry of bucket) {
-    const content = contentPath(cache, entry.integrity)
-    try {
-      await stat(content)
-      await index.insert(cache, entry.key, entry.integrity, {
-        metadata: entry.metadata,
-        size: entry.size,
-        time: entry.time,
-      })
-      stats.totalEntries++
-    } catch (err) {
-      if (err.code === 'ENOENT') {
-        stats.rejectedEntries++
-        stats.missingContent++
-      } else {
-        throw err
-      }
-    }
-  }
-}
-
-function cleanTmp (cache, opts) {
-  opts.log.silly('verify', 'cleaning tmp directory')
-  return rm(path.join(cache, 'tmp'), { recursive: true, force: true })
-}
-
-async function writeVerifile (cache, opts) {
-  const verifile = path.join(cache, '_lastverified')
-  opts.log.silly('verify', 'writing verifile to ' + verifile)
-  return writeFile(verifile, `${Date.now()}`)
-}
-
-module.exports.lastRun = lastRun
-
-async function lastRun (cache) {
-  const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' })
-  return new Date(+data)
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/package.json
deleted file mode 100644
index ebb0f3f8ed410..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/cacache/package.json
+++ /dev/null
@@ -1,83 +0,0 @@
-{
-  "name": "cacache",
-  "version": "19.0.1",
-  "cache-version": {
-    "content": "2",
-    "index": "5"
-  },
-  "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.",
-  "main": "lib/index.js",
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "scripts": {
-    "test": "tap",
-    "snap": "tap",
-    "coverage": "tap",
-    "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test",
-    "lint": "npm run eslint",
-    "npmclilint": "npmcli-lint",
-    "lintfix": "npm run eslint -- --fix",
-    "postsnap": "npm run lintfix --",
-    "postlint": "template-oss-check",
-    "posttest": "npm run lint",
-    "template-oss-apply": "template-oss-apply --force",
-    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/npm/cacache.git"
-  },
-  "keywords": [
-    "cache",
-    "caching",
-    "content-addressable",
-    "sri",
-    "sri hash",
-    "subresource integrity",
-    "cache",
-    "storage",
-    "store",
-    "file store",
-    "filesystem",
-    "disk cache",
-    "disk storage"
-  ],
-  "license": "ISC",
-  "dependencies": {
-    "@npmcli/fs": "^4.0.0",
-    "fs-minipass": "^3.0.0",
-    "glob": "^10.2.2",
-    "lru-cache": "^10.0.1",
-    "minipass": "^7.0.3",
-    "minipass-collect": "^2.0.1",
-    "minipass-flush": "^1.0.5",
-    "minipass-pipeline": "^1.2.4",
-    "p-map": "^7.0.2",
-    "ssri": "^12.0.0",
-    "tar": "^7.4.3",
-    "unique-filename": "^4.0.0"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.23.3",
-    "tap": "^16.0.0"
-  },
-  "engines": {
-    "node": "^18.17.0 || >=20.5.0"
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "windowsCI": false,
-    "version": "4.23.3",
-    "publish": "true"
-  },
-  "author": "GitHub Inc.",
-  "tap": {
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  }
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/chownr/LICENSE.md b/node_modules/@npmcli/metavuln-calculator/node_modules/chownr/LICENSE.md
deleted file mode 100644
index 881248b6d7f0c..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/chownr/LICENSE.md
+++ /dev/null
@@ -1,63 +0,0 @@
-All packages under `src/` are licensed according to the terms in
-their respective `LICENSE` or `LICENSE.md` files.
-
-The remainder of this project is licensed under the Blue Oak
-Model License, as follows:
-
------
-
-# Blue Oak Model License
-
-Version 1.0.0
-
-## Purpose
-
-This license gives everyone as much permission to work with
-this software as possible, while protecting contributors
-from liability.
-
-## Acceptance
-
-In order to receive this license, you must agree to its
-rules.  The rules of this license are both obligations
-under that agreement and conditions to your license.
-You must not do anything with this software that triggers
-a rule that you cannot or will not follow.
-
-## Copyright
-
-Each contributor licenses you to do everything with this
-software that would otherwise infringe that contributor's
-copyright in it.
-
-## Notices
-
-You must ensure that everyone who gets a copy of
-any part of this software from you, with or without
-changes, also gets the text of this license or a link to
-.
-
-## Excuse
-
-If anyone notifies you in writing that you have not
-complied with [Notices](#notices), you can keep your
-license by taking all practical steps to comply within 30
-days after the notice.  If you do not do so, your license
-ends immediately.
-
-## Patent
-
-Each contributor licenses you to do everything with this
-software that would otherwise infringe any patent claims
-they can license or become able to license.
-
-## Reliability
-
-No contributor can revoke this license.
-
-## No Liability
-
-***As far as the law allows, this software comes as is,
-without any warranty or condition, and no contributor
-will be liable to anyone for any damages related to this
-software or this license, under any kind of legal claim.***
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/chownr/dist/commonjs/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/chownr/dist/commonjs/index.js
deleted file mode 100644
index 6a7b68d5eac26..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/chownr/dist/commonjs/index.js
+++ /dev/null
@@ -1,93 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.chownrSync = exports.chownr = void 0;
-const node_fs_1 = __importDefault(require("node:fs"));
-const node_path_1 = __importDefault(require("node:path"));
-const lchownSync = (path, uid, gid) => {
-    try {
-        return node_fs_1.default.lchownSync(path, uid, gid);
-    }
-    catch (er) {
-        if (er?.code !== 'ENOENT')
-            throw er;
-    }
-};
-const chown = (cpath, uid, gid, cb) => {
-    node_fs_1.default.lchown(cpath, uid, gid, er => {
-        // Skip ENOENT error
-        cb(er && er?.code !== 'ENOENT' ? er : null);
-    });
-};
-const chownrKid = (p, child, uid, gid, cb) => {
-    if (child.isDirectory()) {
-        (0, exports.chownr)(node_path_1.default.resolve(p, child.name), uid, gid, (er) => {
-            if (er)
-                return cb(er);
-            const cpath = node_path_1.default.resolve(p, child.name);
-            chown(cpath, uid, gid, cb);
-        });
-    }
-    else {
-        const cpath = node_path_1.default.resolve(p, child.name);
-        chown(cpath, uid, gid, cb);
-    }
-};
-const chownr = (p, uid, gid, cb) => {
-    node_fs_1.default.readdir(p, { withFileTypes: true }, (er, children) => {
-        // any error other than ENOTDIR or ENOTSUP means it's not readable,
-        // or doesn't exist.  give up.
-        if (er) {
-            if (er.code === 'ENOENT')
-                return cb();
-            else if (er.code !== 'ENOTDIR' && er.code !== 'ENOTSUP')
-                return cb(er);
-        }
-        if (er || !children.length)
-            return chown(p, uid, gid, cb);
-        let len = children.length;
-        let errState = null;
-        const then = (er) => {
-            /* c8 ignore start */
-            if (errState)
-                return;
-            /* c8 ignore stop */
-            if (er)
-                return cb((errState = er));
-            if (--len === 0)
-                return chown(p, uid, gid, cb);
-        };
-        for (const child of children) {
-            chownrKid(p, child, uid, gid, then);
-        }
-    });
-};
-exports.chownr = chownr;
-const chownrKidSync = (p, child, uid, gid) => {
-    if (child.isDirectory())
-        (0, exports.chownrSync)(node_path_1.default.resolve(p, child.name), uid, gid);
-    lchownSync(node_path_1.default.resolve(p, child.name), uid, gid);
-};
-const chownrSync = (p, uid, gid) => {
-    let children;
-    try {
-        children = node_fs_1.default.readdirSync(p, { withFileTypes: true });
-    }
-    catch (er) {
-        const e = er;
-        if (e?.code === 'ENOENT')
-            return;
-        else if (e?.code === 'ENOTDIR' || e?.code === 'ENOTSUP')
-            return lchownSync(p, uid, gid);
-        else
-            throw e;
-    }
-    for (const child of children) {
-        chownrKidSync(p, child, uid, gid);
-    }
-    return lchownSync(p, uid, gid);
-};
-exports.chownrSync = chownrSync;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/chownr/dist/commonjs/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/chownr/dist/commonjs/package.json
deleted file mode 100644
index 5bbefffbabee3..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/chownr/dist/commonjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "commonjs"
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/chownr/dist/esm/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/chownr/dist/esm/index.js
deleted file mode 100644
index 5c2815297a67c..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/chownr/dist/esm/index.js
+++ /dev/null
@@ -1,85 +0,0 @@
-import fs from 'node:fs';
-import path from 'node:path';
-const lchownSync = (path, uid, gid) => {
-    try {
-        return fs.lchownSync(path, uid, gid);
-    }
-    catch (er) {
-        if (er?.code !== 'ENOENT')
-            throw er;
-    }
-};
-const chown = (cpath, uid, gid, cb) => {
-    fs.lchown(cpath, uid, gid, er => {
-        // Skip ENOENT error
-        cb(er && er?.code !== 'ENOENT' ? er : null);
-    });
-};
-const chownrKid = (p, child, uid, gid, cb) => {
-    if (child.isDirectory()) {
-        chownr(path.resolve(p, child.name), uid, gid, (er) => {
-            if (er)
-                return cb(er);
-            const cpath = path.resolve(p, child.name);
-            chown(cpath, uid, gid, cb);
-        });
-    }
-    else {
-        const cpath = path.resolve(p, child.name);
-        chown(cpath, uid, gid, cb);
-    }
-};
-export const chownr = (p, uid, gid, cb) => {
-    fs.readdir(p, { withFileTypes: true }, (er, children) => {
-        // any error other than ENOTDIR or ENOTSUP means it's not readable,
-        // or doesn't exist.  give up.
-        if (er) {
-            if (er.code === 'ENOENT')
-                return cb();
-            else if (er.code !== 'ENOTDIR' && er.code !== 'ENOTSUP')
-                return cb(er);
-        }
-        if (er || !children.length)
-            return chown(p, uid, gid, cb);
-        let len = children.length;
-        let errState = null;
-        const then = (er) => {
-            /* c8 ignore start */
-            if (errState)
-                return;
-            /* c8 ignore stop */
-            if (er)
-                return cb((errState = er));
-            if (--len === 0)
-                return chown(p, uid, gid, cb);
-        };
-        for (const child of children) {
-            chownrKid(p, child, uid, gid, then);
-        }
-    });
-};
-const chownrKidSync = (p, child, uid, gid) => {
-    if (child.isDirectory())
-        chownrSync(path.resolve(p, child.name), uid, gid);
-    lchownSync(path.resolve(p, child.name), uid, gid);
-};
-export const chownrSync = (p, uid, gid) => {
-    let children;
-    try {
-        children = fs.readdirSync(p, { withFileTypes: true });
-    }
-    catch (er) {
-        const e = er;
-        if (e?.code === 'ENOENT')
-            return;
-        else if (e?.code === 'ENOTDIR' || e?.code === 'ENOTSUP')
-            return lchownSync(p, uid, gid);
-        else
-            throw e;
-    }
-    for (const child of children) {
-        chownrKidSync(p, child, uid, gid);
-    }
-    return lchownSync(p, uid, gid);
-};
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/chownr/dist/esm/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/chownr/dist/esm/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/chownr/dist/esm/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/chownr/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/chownr/package.json
deleted file mode 100644
index 09aa6b2e2e576..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/chownr/package.json
+++ /dev/null
@@ -1,69 +0,0 @@
-{
-  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
-  "name": "chownr",
-  "description": "like `chown -R`",
-  "version": "3.0.0",
-  "repository": {
-    "type": "git",
-    "url": "git://github.com/isaacs/chownr.git"
-  },
-  "files": [
-    "dist"
-  ],
-  "devDependencies": {
-    "@types/node": "^20.12.5",
-    "mkdirp": "^3.0.1",
-    "prettier": "^3.2.5",
-    "rimraf": "^5.0.5",
-    "tap": "^18.7.2",
-    "tshy": "^1.13.1",
-    "typedoc": "^0.25.12"
-  },
-  "scripts": {
-    "prepare": "tshy",
-    "pretest": "npm run prepare",
-    "test": "tap",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "format": "prettier --write . --loglevel warn",
-    "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
-  },
-  "license": "BlueOak-1.0.0",
-  "engines": {
-    "node": ">=18"
-  },
-  "tshy": {
-    "exports": {
-      "./package.json": "./package.json",
-      ".": "./src/index.ts"
-    }
-  },
-  "exports": {
-    "./package.json": "./package.json",
-    ".": {
-      "import": {
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.js"
-      },
-      "require": {
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.js"
-      }
-    }
-  },
-  "main": "./dist/commonjs/index.js",
-  "types": "./dist/commonjs/index.d.ts",
-  "type": "module",
-  "prettier": {
-    "semi": false,
-    "printWidth": 75,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  }
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/LICENSE
deleted file mode 100644
index 49f7efe431c9e..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/LICENSE
+++ /dev/null
@@ -1,26 +0,0 @@
-Minizlib was created by Isaac Z. Schlueter.
-It is a derivative work of the Node.js project.
-
-"""
-Copyright (c) 2017-2023 Isaac Z. Schlueter and Contributors
-Copyright (c) 2017-2023 Node.js contributors. All rights reserved.
-Copyright (c) 2017-2023 Joyent, Inc. and other Node contributors. All rights reserved.
-
-Permission is hereby granted, free of charge, to any person obtaining a
-copy of this software and associated documentation files (the "Software"),
-to deal in the Software without restriction, including without limitation
-the rights to use, copy, modify, merge, publish, distribute, sublicense,
-and/or sell copies of the Software, and to permit persons to whom the
-Software is furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-"""
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/dist/commonjs/constants.js b/node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/dist/commonjs/constants.js
deleted file mode 100644
index dfc2c1957bfc9..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/dist/commonjs/constants.js
+++ /dev/null
@@ -1,123 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.constants = void 0;
-// Update with any zlib constants that are added or changed in the future.
-// Node v6 didn't export this, so we just hard code the version and rely
-// on all the other hard-coded values from zlib v4736.  When node v6
-// support drops, we can just export the realZlibConstants object.
-const zlib_1 = __importDefault(require("zlib"));
-/* c8 ignore start */
-const realZlibConstants = zlib_1.default.constants || { ZLIB_VERNUM: 4736 };
-/* c8 ignore stop */
-exports.constants = Object.freeze(Object.assign(Object.create(null), {
-    Z_NO_FLUSH: 0,
-    Z_PARTIAL_FLUSH: 1,
-    Z_SYNC_FLUSH: 2,
-    Z_FULL_FLUSH: 3,
-    Z_FINISH: 4,
-    Z_BLOCK: 5,
-    Z_OK: 0,
-    Z_STREAM_END: 1,
-    Z_NEED_DICT: 2,
-    Z_ERRNO: -1,
-    Z_STREAM_ERROR: -2,
-    Z_DATA_ERROR: -3,
-    Z_MEM_ERROR: -4,
-    Z_BUF_ERROR: -5,
-    Z_VERSION_ERROR: -6,
-    Z_NO_COMPRESSION: 0,
-    Z_BEST_SPEED: 1,
-    Z_BEST_COMPRESSION: 9,
-    Z_DEFAULT_COMPRESSION: -1,
-    Z_FILTERED: 1,
-    Z_HUFFMAN_ONLY: 2,
-    Z_RLE: 3,
-    Z_FIXED: 4,
-    Z_DEFAULT_STRATEGY: 0,
-    DEFLATE: 1,
-    INFLATE: 2,
-    GZIP: 3,
-    GUNZIP: 4,
-    DEFLATERAW: 5,
-    INFLATERAW: 6,
-    UNZIP: 7,
-    BROTLI_DECODE: 8,
-    BROTLI_ENCODE: 9,
-    Z_MIN_WINDOWBITS: 8,
-    Z_MAX_WINDOWBITS: 15,
-    Z_DEFAULT_WINDOWBITS: 15,
-    Z_MIN_CHUNK: 64,
-    Z_MAX_CHUNK: Infinity,
-    Z_DEFAULT_CHUNK: 16384,
-    Z_MIN_MEMLEVEL: 1,
-    Z_MAX_MEMLEVEL: 9,
-    Z_DEFAULT_MEMLEVEL: 8,
-    Z_MIN_LEVEL: -1,
-    Z_MAX_LEVEL: 9,
-    Z_DEFAULT_LEVEL: -1,
-    BROTLI_OPERATION_PROCESS: 0,
-    BROTLI_OPERATION_FLUSH: 1,
-    BROTLI_OPERATION_FINISH: 2,
-    BROTLI_OPERATION_EMIT_METADATA: 3,
-    BROTLI_MODE_GENERIC: 0,
-    BROTLI_MODE_TEXT: 1,
-    BROTLI_MODE_FONT: 2,
-    BROTLI_DEFAULT_MODE: 0,
-    BROTLI_MIN_QUALITY: 0,
-    BROTLI_MAX_QUALITY: 11,
-    BROTLI_DEFAULT_QUALITY: 11,
-    BROTLI_MIN_WINDOW_BITS: 10,
-    BROTLI_MAX_WINDOW_BITS: 24,
-    BROTLI_LARGE_MAX_WINDOW_BITS: 30,
-    BROTLI_DEFAULT_WINDOW: 22,
-    BROTLI_MIN_INPUT_BLOCK_BITS: 16,
-    BROTLI_MAX_INPUT_BLOCK_BITS: 24,
-    BROTLI_PARAM_MODE: 0,
-    BROTLI_PARAM_QUALITY: 1,
-    BROTLI_PARAM_LGWIN: 2,
-    BROTLI_PARAM_LGBLOCK: 3,
-    BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4,
-    BROTLI_PARAM_SIZE_HINT: 5,
-    BROTLI_PARAM_LARGE_WINDOW: 6,
-    BROTLI_PARAM_NPOSTFIX: 7,
-    BROTLI_PARAM_NDIRECT: 8,
-    BROTLI_DECODER_RESULT_ERROR: 0,
-    BROTLI_DECODER_RESULT_SUCCESS: 1,
-    BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2,
-    BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3,
-    BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0,
-    BROTLI_DECODER_PARAM_LARGE_WINDOW: 1,
-    BROTLI_DECODER_NO_ERROR: 0,
-    BROTLI_DECODER_SUCCESS: 1,
-    BROTLI_DECODER_NEEDS_MORE_INPUT: 2,
-    BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3,
-    BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1,
-    BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2,
-    BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3,
-    BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4,
-    BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5,
-    BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6,
-    BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7,
-    BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8,
-    BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9,
-    BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10,
-    BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11,
-    BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12,
-    BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13,
-    BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14,
-    BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15,
-    BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16,
-    BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19,
-    BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20,
-    BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21,
-    BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22,
-    BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25,
-    BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26,
-    BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27,
-    BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30,
-    BROTLI_DECODER_ERROR_UNREACHABLE: -31,
-}, realZlibConstants));
-//# sourceMappingURL=constants.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/dist/commonjs/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/dist/commonjs/index.js
deleted file mode 100644
index b4906d2783372..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/dist/commonjs/index.js
+++ /dev/null
@@ -1,392 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || (function () {
-    var ownKeys = function(o) {
-        ownKeys = Object.getOwnPropertyNames || function (o) {
-            var ar = [];
-            for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
-            return ar;
-        };
-        return ownKeys(o);
-    };
-    return function (mod) {
-        if (mod && mod.__esModule) return mod;
-        var result = {};
-        if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
-        __setModuleDefault(result, mod);
-        return result;
-    };
-})();
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.BrotliDecompress = exports.BrotliCompress = exports.Brotli = exports.Unzip = exports.InflateRaw = exports.DeflateRaw = exports.Gunzip = exports.Gzip = exports.Inflate = exports.Deflate = exports.Zlib = exports.ZlibError = exports.constants = void 0;
-const assert_1 = __importDefault(require("assert"));
-const buffer_1 = require("buffer");
-const minipass_1 = require("minipass");
-const realZlib = __importStar(require("zlib"));
-const constants_js_1 = require("./constants.js");
-var constants_js_2 = require("./constants.js");
-Object.defineProperty(exports, "constants", { enumerable: true, get: function () { return constants_js_2.constants; } });
-const OriginalBufferConcat = buffer_1.Buffer.concat;
-const desc = Object.getOwnPropertyDescriptor(buffer_1.Buffer, 'concat');
-const noop = (args) => args;
-const passthroughBufferConcat = desc?.writable === true || desc?.set !== undefined
-    ? (makeNoOp) => {
-        buffer_1.Buffer.concat = makeNoOp ? noop : OriginalBufferConcat;
-    }
-    : (_) => { };
-const _superWrite = Symbol('_superWrite');
-class ZlibError extends Error {
-    code;
-    errno;
-    constructor(err) {
-        super('zlib: ' + err.message);
-        this.code = err.code;
-        this.errno = err.errno;
-        /* c8 ignore next */
-        if (!this.code)
-            this.code = 'ZLIB_ERROR';
-        this.message = 'zlib: ' + err.message;
-        Error.captureStackTrace(this, this.constructor);
-    }
-    get name() {
-        return 'ZlibError';
-    }
-}
-exports.ZlibError = ZlibError;
-// the Zlib class they all inherit from
-// This thing manages the queue of requests, and returns
-// true or false if there is anything in the queue when
-// you call the .write() method.
-const _flushFlag = Symbol('flushFlag');
-class ZlibBase extends minipass_1.Minipass {
-    #sawError = false;
-    #ended = false;
-    #flushFlag;
-    #finishFlushFlag;
-    #fullFlushFlag;
-    #handle;
-    #onError;
-    get sawError() {
-        return this.#sawError;
-    }
-    get handle() {
-        return this.#handle;
-    }
-    /* c8 ignore start */
-    get flushFlag() {
-        return this.#flushFlag;
-    }
-    /* c8 ignore stop */
-    constructor(opts, mode) {
-        if (!opts || typeof opts !== 'object')
-            throw new TypeError('invalid options for ZlibBase constructor');
-        //@ts-ignore
-        super(opts);
-        /* c8 ignore start */
-        this.#flushFlag = opts.flush ?? 0;
-        this.#finishFlushFlag = opts.finishFlush ?? 0;
-        this.#fullFlushFlag = opts.fullFlushFlag ?? 0;
-        /* c8 ignore stop */
-        // this will throw if any options are invalid for the class selected
-        try {
-            // @types/node doesn't know that it exports the classes, but they're there
-            //@ts-ignore
-            this.#handle = new realZlib[mode](opts);
-        }
-        catch (er) {
-            // make sure that all errors get decorated properly
-            throw new ZlibError(er);
-        }
-        this.#onError = err => {
-            // no sense raising multiple errors, since we abort on the first one.
-            if (this.#sawError)
-                return;
-            this.#sawError = true;
-            // there is no way to cleanly recover.
-            // continuing only obscures problems.
-            this.close();
-            this.emit('error', err);
-        };
-        this.#handle?.on('error', er => this.#onError(new ZlibError(er)));
-        this.once('end', () => this.close);
-    }
-    close() {
-        if (this.#handle) {
-            this.#handle.close();
-            this.#handle = undefined;
-            this.emit('close');
-        }
-    }
-    reset() {
-        if (!this.#sawError) {
-            (0, assert_1.default)(this.#handle, 'zlib binding closed');
-            //@ts-ignore
-            return this.#handle.reset?.();
-        }
-    }
-    flush(flushFlag) {
-        if (this.ended)
-            return;
-        if (typeof flushFlag !== 'number')
-            flushFlag = this.#fullFlushFlag;
-        this.write(Object.assign(buffer_1.Buffer.alloc(0), { [_flushFlag]: flushFlag }));
-    }
-    end(chunk, encoding, cb) {
-        /* c8 ignore start */
-        if (typeof chunk === 'function') {
-            cb = chunk;
-            encoding = undefined;
-            chunk = undefined;
-        }
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        /* c8 ignore stop */
-        if (chunk) {
-            if (encoding)
-                this.write(chunk, encoding);
-            else
-                this.write(chunk);
-        }
-        this.flush(this.#finishFlushFlag);
-        this.#ended = true;
-        return super.end(cb);
-    }
-    get ended() {
-        return this.#ended;
-    }
-    // overridden in the gzip classes to do portable writes
-    [_superWrite](data) {
-        return super.write(data);
-    }
-    write(chunk, encoding, cb) {
-        // process the chunk using the sync process
-        // then super.write() all the outputted chunks
-        if (typeof encoding === 'function')
-            (cb = encoding), (encoding = 'utf8');
-        if (typeof chunk === 'string')
-            chunk = buffer_1.Buffer.from(chunk, encoding);
-        if (this.#sawError)
-            return;
-        (0, assert_1.default)(this.#handle, 'zlib binding closed');
-        // _processChunk tries to .close() the native handle after it's done, so we
-        // intercept that by temporarily making it a no-op.
-        // diving into the node:zlib internals a bit here
-        const nativeHandle = this.#handle
-            ._handle;
-        const originalNativeClose = nativeHandle.close;
-        nativeHandle.close = () => { };
-        const originalClose = this.#handle.close;
-        this.#handle.close = () => { };
-        // It also calls `Buffer.concat()` at the end, which may be convenient
-        // for some, but which we are not interested in as it slows us down.
-        passthroughBufferConcat(true);
-        let result = undefined;
-        try {
-            const flushFlag = typeof chunk[_flushFlag] === 'number'
-                ? chunk[_flushFlag]
-                : this.#flushFlag;
-            result = this.#handle._processChunk(chunk, flushFlag);
-            // if we don't throw, reset it back how it was
-            passthroughBufferConcat(false);
-        }
-        catch (err) {
-            // or if we do, put Buffer.concat() back before we emit error
-            // Error events call into user code, which may call Buffer.concat()
-            passthroughBufferConcat(false);
-            this.#onError(new ZlibError(err));
-        }
-        finally {
-            if (this.#handle) {
-                // Core zlib resets `_handle` to null after attempting to close the
-                // native handle. Our no-op handler prevented actual closure, but we
-                // need to restore the `._handle` property.
-                ;
-                this.#handle._handle =
-                    nativeHandle;
-                nativeHandle.close = originalNativeClose;
-                this.#handle.close = originalClose;
-                // `_processChunk()` adds an 'error' listener. If we don't remove it
-                // after each call, these handlers start piling up.
-                this.#handle.removeAllListeners('error');
-                // make sure OUR error listener is still attached tho
-            }
-        }
-        if (this.#handle)
-            this.#handle.on('error', er => this.#onError(new ZlibError(er)));
-        let writeReturn;
-        if (result) {
-            if (Array.isArray(result) && result.length > 0) {
-                const r = result[0];
-                // The first buffer is always `handle._outBuffer`, which would be
-                // re-used for later invocations; so, we always have to copy that one.
-                writeReturn = this[_superWrite](buffer_1.Buffer.from(r));
-                for (let i = 1; i < result.length; i++) {
-                    writeReturn = this[_superWrite](result[i]);
-                }
-            }
-            else {
-                // either a single Buffer or an empty array
-                writeReturn = this[_superWrite](buffer_1.Buffer.from(result));
-            }
-        }
-        if (cb)
-            cb();
-        return writeReturn;
-    }
-}
-class Zlib extends ZlibBase {
-    #level;
-    #strategy;
-    constructor(opts, mode) {
-        opts = opts || {};
-        opts.flush = opts.flush || constants_js_1.constants.Z_NO_FLUSH;
-        opts.finishFlush = opts.finishFlush || constants_js_1.constants.Z_FINISH;
-        opts.fullFlushFlag = constants_js_1.constants.Z_FULL_FLUSH;
-        super(opts, mode);
-        this.#level = opts.level;
-        this.#strategy = opts.strategy;
-    }
-    params(level, strategy) {
-        if (this.sawError)
-            return;
-        if (!this.handle)
-            throw new Error('cannot switch params when binding is closed');
-        // no way to test this without also not supporting params at all
-        /* c8 ignore start */
-        if (!this.handle.params)
-            throw new Error('not supported in this implementation');
-        /* c8 ignore stop */
-        if (this.#level !== level || this.#strategy !== strategy) {
-            this.flush(constants_js_1.constants.Z_SYNC_FLUSH);
-            (0, assert_1.default)(this.handle, 'zlib binding closed');
-            // .params() calls .flush(), but the latter is always async in the
-            // core zlib. We override .flush() temporarily to intercept that and
-            // flush synchronously.
-            const origFlush = this.handle.flush;
-            this.handle.flush = (flushFlag, cb) => {
-                /* c8 ignore start */
-                if (typeof flushFlag === 'function') {
-                    cb = flushFlag;
-                    flushFlag = this.flushFlag;
-                }
-                /* c8 ignore stop */
-                this.flush(flushFlag);
-                cb?.();
-            };
-            try {
-                ;
-                this.handle.params(level, strategy);
-            }
-            finally {
-                this.handle.flush = origFlush;
-            }
-            /* c8 ignore start */
-            if (this.handle) {
-                this.#level = level;
-                this.#strategy = strategy;
-            }
-            /* c8 ignore stop */
-        }
-    }
-}
-exports.Zlib = Zlib;
-// minimal 2-byte header
-class Deflate extends Zlib {
-    constructor(opts) {
-        super(opts, 'Deflate');
-    }
-}
-exports.Deflate = Deflate;
-class Inflate extends Zlib {
-    constructor(opts) {
-        super(opts, 'Inflate');
-    }
-}
-exports.Inflate = Inflate;
-class Gzip extends Zlib {
-    #portable;
-    constructor(opts) {
-        super(opts, 'Gzip');
-        this.#portable = opts && !!opts.portable;
-    }
-    [_superWrite](data) {
-        if (!this.#portable)
-            return super[_superWrite](data);
-        // we'll always get the header emitted in one first chunk
-        // overwrite the OS indicator byte with 0xFF
-        this.#portable = false;
-        data[9] = 255;
-        return super[_superWrite](data);
-    }
-}
-exports.Gzip = Gzip;
-class Gunzip extends Zlib {
-    constructor(opts) {
-        super(opts, 'Gunzip');
-    }
-}
-exports.Gunzip = Gunzip;
-// raw - no header
-class DeflateRaw extends Zlib {
-    constructor(opts) {
-        super(opts, 'DeflateRaw');
-    }
-}
-exports.DeflateRaw = DeflateRaw;
-class InflateRaw extends Zlib {
-    constructor(opts) {
-        super(opts, 'InflateRaw');
-    }
-}
-exports.InflateRaw = InflateRaw;
-// auto-detect header.
-class Unzip extends Zlib {
-    constructor(opts) {
-        super(opts, 'Unzip');
-    }
-}
-exports.Unzip = Unzip;
-class Brotli extends ZlibBase {
-    constructor(opts, mode) {
-        opts = opts || {};
-        opts.flush = opts.flush || constants_js_1.constants.BROTLI_OPERATION_PROCESS;
-        opts.finishFlush =
-            opts.finishFlush || constants_js_1.constants.BROTLI_OPERATION_FINISH;
-        opts.fullFlushFlag = constants_js_1.constants.BROTLI_OPERATION_FLUSH;
-        super(opts, mode);
-    }
-}
-exports.Brotli = Brotli;
-class BrotliCompress extends Brotli {
-    constructor(opts) {
-        super(opts, 'BrotliCompress');
-    }
-}
-exports.BrotliCompress = BrotliCompress;
-class BrotliDecompress extends Brotli {
-    constructor(opts) {
-        super(opts, 'BrotliDecompress');
-    }
-}
-exports.BrotliDecompress = BrotliDecompress;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/dist/commonjs/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/dist/commonjs/package.json
deleted file mode 100644
index 5bbefffbabee3..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/dist/commonjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "commonjs"
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/dist/esm/constants.js b/node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/dist/esm/constants.js
deleted file mode 100644
index 7faf40be5068d..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/dist/esm/constants.js
+++ /dev/null
@@ -1,117 +0,0 @@
-// Update with any zlib constants that are added or changed in the future.
-// Node v6 didn't export this, so we just hard code the version and rely
-// on all the other hard-coded values from zlib v4736.  When node v6
-// support drops, we can just export the realZlibConstants object.
-import realZlib from 'zlib';
-/* c8 ignore start */
-const realZlibConstants = realZlib.constants || { ZLIB_VERNUM: 4736 };
-/* c8 ignore stop */
-export const constants = Object.freeze(Object.assign(Object.create(null), {
-    Z_NO_FLUSH: 0,
-    Z_PARTIAL_FLUSH: 1,
-    Z_SYNC_FLUSH: 2,
-    Z_FULL_FLUSH: 3,
-    Z_FINISH: 4,
-    Z_BLOCK: 5,
-    Z_OK: 0,
-    Z_STREAM_END: 1,
-    Z_NEED_DICT: 2,
-    Z_ERRNO: -1,
-    Z_STREAM_ERROR: -2,
-    Z_DATA_ERROR: -3,
-    Z_MEM_ERROR: -4,
-    Z_BUF_ERROR: -5,
-    Z_VERSION_ERROR: -6,
-    Z_NO_COMPRESSION: 0,
-    Z_BEST_SPEED: 1,
-    Z_BEST_COMPRESSION: 9,
-    Z_DEFAULT_COMPRESSION: -1,
-    Z_FILTERED: 1,
-    Z_HUFFMAN_ONLY: 2,
-    Z_RLE: 3,
-    Z_FIXED: 4,
-    Z_DEFAULT_STRATEGY: 0,
-    DEFLATE: 1,
-    INFLATE: 2,
-    GZIP: 3,
-    GUNZIP: 4,
-    DEFLATERAW: 5,
-    INFLATERAW: 6,
-    UNZIP: 7,
-    BROTLI_DECODE: 8,
-    BROTLI_ENCODE: 9,
-    Z_MIN_WINDOWBITS: 8,
-    Z_MAX_WINDOWBITS: 15,
-    Z_DEFAULT_WINDOWBITS: 15,
-    Z_MIN_CHUNK: 64,
-    Z_MAX_CHUNK: Infinity,
-    Z_DEFAULT_CHUNK: 16384,
-    Z_MIN_MEMLEVEL: 1,
-    Z_MAX_MEMLEVEL: 9,
-    Z_DEFAULT_MEMLEVEL: 8,
-    Z_MIN_LEVEL: -1,
-    Z_MAX_LEVEL: 9,
-    Z_DEFAULT_LEVEL: -1,
-    BROTLI_OPERATION_PROCESS: 0,
-    BROTLI_OPERATION_FLUSH: 1,
-    BROTLI_OPERATION_FINISH: 2,
-    BROTLI_OPERATION_EMIT_METADATA: 3,
-    BROTLI_MODE_GENERIC: 0,
-    BROTLI_MODE_TEXT: 1,
-    BROTLI_MODE_FONT: 2,
-    BROTLI_DEFAULT_MODE: 0,
-    BROTLI_MIN_QUALITY: 0,
-    BROTLI_MAX_QUALITY: 11,
-    BROTLI_DEFAULT_QUALITY: 11,
-    BROTLI_MIN_WINDOW_BITS: 10,
-    BROTLI_MAX_WINDOW_BITS: 24,
-    BROTLI_LARGE_MAX_WINDOW_BITS: 30,
-    BROTLI_DEFAULT_WINDOW: 22,
-    BROTLI_MIN_INPUT_BLOCK_BITS: 16,
-    BROTLI_MAX_INPUT_BLOCK_BITS: 24,
-    BROTLI_PARAM_MODE: 0,
-    BROTLI_PARAM_QUALITY: 1,
-    BROTLI_PARAM_LGWIN: 2,
-    BROTLI_PARAM_LGBLOCK: 3,
-    BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4,
-    BROTLI_PARAM_SIZE_HINT: 5,
-    BROTLI_PARAM_LARGE_WINDOW: 6,
-    BROTLI_PARAM_NPOSTFIX: 7,
-    BROTLI_PARAM_NDIRECT: 8,
-    BROTLI_DECODER_RESULT_ERROR: 0,
-    BROTLI_DECODER_RESULT_SUCCESS: 1,
-    BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2,
-    BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3,
-    BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0,
-    BROTLI_DECODER_PARAM_LARGE_WINDOW: 1,
-    BROTLI_DECODER_NO_ERROR: 0,
-    BROTLI_DECODER_SUCCESS: 1,
-    BROTLI_DECODER_NEEDS_MORE_INPUT: 2,
-    BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3,
-    BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1,
-    BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2,
-    BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3,
-    BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4,
-    BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5,
-    BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6,
-    BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7,
-    BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8,
-    BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9,
-    BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10,
-    BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11,
-    BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12,
-    BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13,
-    BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14,
-    BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15,
-    BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16,
-    BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19,
-    BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20,
-    BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21,
-    BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22,
-    BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25,
-    BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26,
-    BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27,
-    BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30,
-    BROTLI_DECODER_ERROR_UNREACHABLE: -31,
-}, realZlibConstants));
-//# sourceMappingURL=constants.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/dist/esm/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/dist/esm/index.js
deleted file mode 100644
index f33586a8ab0ec..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/dist/esm/index.js
+++ /dev/null
@@ -1,340 +0,0 @@
-import assert from 'assert';
-import { Buffer } from 'buffer';
-import { Minipass } from 'minipass';
-import * as realZlib from 'zlib';
-import { constants } from './constants.js';
-export { constants } from './constants.js';
-const OriginalBufferConcat = Buffer.concat;
-const desc = Object.getOwnPropertyDescriptor(Buffer, 'concat');
-const noop = (args) => args;
-const passthroughBufferConcat = desc?.writable === true || desc?.set !== undefined
-    ? (makeNoOp) => {
-        Buffer.concat = makeNoOp ? noop : OriginalBufferConcat;
-    }
-    : (_) => { };
-const _superWrite = Symbol('_superWrite');
-export class ZlibError extends Error {
-    code;
-    errno;
-    constructor(err) {
-        super('zlib: ' + err.message);
-        this.code = err.code;
-        this.errno = err.errno;
-        /* c8 ignore next */
-        if (!this.code)
-            this.code = 'ZLIB_ERROR';
-        this.message = 'zlib: ' + err.message;
-        Error.captureStackTrace(this, this.constructor);
-    }
-    get name() {
-        return 'ZlibError';
-    }
-}
-// the Zlib class they all inherit from
-// This thing manages the queue of requests, and returns
-// true or false if there is anything in the queue when
-// you call the .write() method.
-const _flushFlag = Symbol('flushFlag');
-class ZlibBase extends Minipass {
-    #sawError = false;
-    #ended = false;
-    #flushFlag;
-    #finishFlushFlag;
-    #fullFlushFlag;
-    #handle;
-    #onError;
-    get sawError() {
-        return this.#sawError;
-    }
-    get handle() {
-        return this.#handle;
-    }
-    /* c8 ignore start */
-    get flushFlag() {
-        return this.#flushFlag;
-    }
-    /* c8 ignore stop */
-    constructor(opts, mode) {
-        if (!opts || typeof opts !== 'object')
-            throw new TypeError('invalid options for ZlibBase constructor');
-        //@ts-ignore
-        super(opts);
-        /* c8 ignore start */
-        this.#flushFlag = opts.flush ?? 0;
-        this.#finishFlushFlag = opts.finishFlush ?? 0;
-        this.#fullFlushFlag = opts.fullFlushFlag ?? 0;
-        /* c8 ignore stop */
-        // this will throw if any options are invalid for the class selected
-        try {
-            // @types/node doesn't know that it exports the classes, but they're there
-            //@ts-ignore
-            this.#handle = new realZlib[mode](opts);
-        }
-        catch (er) {
-            // make sure that all errors get decorated properly
-            throw new ZlibError(er);
-        }
-        this.#onError = err => {
-            // no sense raising multiple errors, since we abort on the first one.
-            if (this.#sawError)
-                return;
-            this.#sawError = true;
-            // there is no way to cleanly recover.
-            // continuing only obscures problems.
-            this.close();
-            this.emit('error', err);
-        };
-        this.#handle?.on('error', er => this.#onError(new ZlibError(er)));
-        this.once('end', () => this.close);
-    }
-    close() {
-        if (this.#handle) {
-            this.#handle.close();
-            this.#handle = undefined;
-            this.emit('close');
-        }
-    }
-    reset() {
-        if (!this.#sawError) {
-            assert(this.#handle, 'zlib binding closed');
-            //@ts-ignore
-            return this.#handle.reset?.();
-        }
-    }
-    flush(flushFlag) {
-        if (this.ended)
-            return;
-        if (typeof flushFlag !== 'number')
-            flushFlag = this.#fullFlushFlag;
-        this.write(Object.assign(Buffer.alloc(0), { [_flushFlag]: flushFlag }));
-    }
-    end(chunk, encoding, cb) {
-        /* c8 ignore start */
-        if (typeof chunk === 'function') {
-            cb = chunk;
-            encoding = undefined;
-            chunk = undefined;
-        }
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        /* c8 ignore stop */
-        if (chunk) {
-            if (encoding)
-                this.write(chunk, encoding);
-            else
-                this.write(chunk);
-        }
-        this.flush(this.#finishFlushFlag);
-        this.#ended = true;
-        return super.end(cb);
-    }
-    get ended() {
-        return this.#ended;
-    }
-    // overridden in the gzip classes to do portable writes
-    [_superWrite](data) {
-        return super.write(data);
-    }
-    write(chunk, encoding, cb) {
-        // process the chunk using the sync process
-        // then super.write() all the outputted chunks
-        if (typeof encoding === 'function')
-            (cb = encoding), (encoding = 'utf8');
-        if (typeof chunk === 'string')
-            chunk = Buffer.from(chunk, encoding);
-        if (this.#sawError)
-            return;
-        assert(this.#handle, 'zlib binding closed');
-        // _processChunk tries to .close() the native handle after it's done, so we
-        // intercept that by temporarily making it a no-op.
-        // diving into the node:zlib internals a bit here
-        const nativeHandle = this.#handle
-            ._handle;
-        const originalNativeClose = nativeHandle.close;
-        nativeHandle.close = () => { };
-        const originalClose = this.#handle.close;
-        this.#handle.close = () => { };
-        // It also calls `Buffer.concat()` at the end, which may be convenient
-        // for some, but which we are not interested in as it slows us down.
-        passthroughBufferConcat(true);
-        let result = undefined;
-        try {
-            const flushFlag = typeof chunk[_flushFlag] === 'number'
-                ? chunk[_flushFlag]
-                : this.#flushFlag;
-            result = this.#handle._processChunk(chunk, flushFlag);
-            // if we don't throw, reset it back how it was
-            passthroughBufferConcat(false);
-        }
-        catch (err) {
-            // or if we do, put Buffer.concat() back before we emit error
-            // Error events call into user code, which may call Buffer.concat()
-            passthroughBufferConcat(false);
-            this.#onError(new ZlibError(err));
-        }
-        finally {
-            if (this.#handle) {
-                // Core zlib resets `_handle` to null after attempting to close the
-                // native handle. Our no-op handler prevented actual closure, but we
-                // need to restore the `._handle` property.
-                ;
-                this.#handle._handle =
-                    nativeHandle;
-                nativeHandle.close = originalNativeClose;
-                this.#handle.close = originalClose;
-                // `_processChunk()` adds an 'error' listener. If we don't remove it
-                // after each call, these handlers start piling up.
-                this.#handle.removeAllListeners('error');
-                // make sure OUR error listener is still attached tho
-            }
-        }
-        if (this.#handle)
-            this.#handle.on('error', er => this.#onError(new ZlibError(er)));
-        let writeReturn;
-        if (result) {
-            if (Array.isArray(result) && result.length > 0) {
-                const r = result[0];
-                // The first buffer is always `handle._outBuffer`, which would be
-                // re-used for later invocations; so, we always have to copy that one.
-                writeReturn = this[_superWrite](Buffer.from(r));
-                for (let i = 1; i < result.length; i++) {
-                    writeReturn = this[_superWrite](result[i]);
-                }
-            }
-            else {
-                // either a single Buffer or an empty array
-                writeReturn = this[_superWrite](Buffer.from(result));
-            }
-        }
-        if (cb)
-            cb();
-        return writeReturn;
-    }
-}
-export class Zlib extends ZlibBase {
-    #level;
-    #strategy;
-    constructor(opts, mode) {
-        opts = opts || {};
-        opts.flush = opts.flush || constants.Z_NO_FLUSH;
-        opts.finishFlush = opts.finishFlush || constants.Z_FINISH;
-        opts.fullFlushFlag = constants.Z_FULL_FLUSH;
-        super(opts, mode);
-        this.#level = opts.level;
-        this.#strategy = opts.strategy;
-    }
-    params(level, strategy) {
-        if (this.sawError)
-            return;
-        if (!this.handle)
-            throw new Error('cannot switch params when binding is closed');
-        // no way to test this without also not supporting params at all
-        /* c8 ignore start */
-        if (!this.handle.params)
-            throw new Error('not supported in this implementation');
-        /* c8 ignore stop */
-        if (this.#level !== level || this.#strategy !== strategy) {
-            this.flush(constants.Z_SYNC_FLUSH);
-            assert(this.handle, 'zlib binding closed');
-            // .params() calls .flush(), but the latter is always async in the
-            // core zlib. We override .flush() temporarily to intercept that and
-            // flush synchronously.
-            const origFlush = this.handle.flush;
-            this.handle.flush = (flushFlag, cb) => {
-                /* c8 ignore start */
-                if (typeof flushFlag === 'function') {
-                    cb = flushFlag;
-                    flushFlag = this.flushFlag;
-                }
-                /* c8 ignore stop */
-                this.flush(flushFlag);
-                cb?.();
-            };
-            try {
-                ;
-                this.handle.params(level, strategy);
-            }
-            finally {
-                this.handle.flush = origFlush;
-            }
-            /* c8 ignore start */
-            if (this.handle) {
-                this.#level = level;
-                this.#strategy = strategy;
-            }
-            /* c8 ignore stop */
-        }
-    }
-}
-// minimal 2-byte header
-export class Deflate extends Zlib {
-    constructor(opts) {
-        super(opts, 'Deflate');
-    }
-}
-export class Inflate extends Zlib {
-    constructor(opts) {
-        super(opts, 'Inflate');
-    }
-}
-export class Gzip extends Zlib {
-    #portable;
-    constructor(opts) {
-        super(opts, 'Gzip');
-        this.#portable = opts && !!opts.portable;
-    }
-    [_superWrite](data) {
-        if (!this.#portable)
-            return super[_superWrite](data);
-        // we'll always get the header emitted in one first chunk
-        // overwrite the OS indicator byte with 0xFF
-        this.#portable = false;
-        data[9] = 255;
-        return super[_superWrite](data);
-    }
-}
-export class Gunzip extends Zlib {
-    constructor(opts) {
-        super(opts, 'Gunzip');
-    }
-}
-// raw - no header
-export class DeflateRaw extends Zlib {
-    constructor(opts) {
-        super(opts, 'DeflateRaw');
-    }
-}
-export class InflateRaw extends Zlib {
-    constructor(opts) {
-        super(opts, 'InflateRaw');
-    }
-}
-// auto-detect header.
-export class Unzip extends Zlib {
-    constructor(opts) {
-        super(opts, 'Unzip');
-    }
-}
-export class Brotli extends ZlibBase {
-    constructor(opts, mode) {
-        opts = opts || {};
-        opts.flush = opts.flush || constants.BROTLI_OPERATION_PROCESS;
-        opts.finishFlush =
-            opts.finishFlush || constants.BROTLI_OPERATION_FINISH;
-        opts.fullFlushFlag = constants.BROTLI_OPERATION_FLUSH;
-        super(opts, mode);
-    }
-}
-export class BrotliCompress extends Brotli {
-    constructor(opts) {
-        super(opts, 'BrotliCompress');
-    }
-}
-export class BrotliDecompress extends Brotli {
-    constructor(opts) {
-        super(opts, 'BrotliDecompress');
-    }
-}
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/dist/esm/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/dist/esm/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/dist/esm/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/package.json
deleted file mode 100644
index 43cb855e15a5d..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/minizlib/package.json
+++ /dev/null
@@ -1,80 +0,0 @@
-{
-  "name": "minizlib",
-  "version": "3.0.2",
-  "description": "A small fast zlib stream built on [minipass](http://npm.im/minipass) and Node.js's zlib binding.",
-  "main": "./dist/commonjs/index.js",
-  "dependencies": {
-    "minipass": "^7.1.2"
-  },
-  "scripts": {
-    "prepare": "tshy",
-    "pretest": "npm run prepare",
-    "test": "tap",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "format": "prettier --write . --loglevel warn",
-    "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/isaacs/minizlib.git"
-  },
-  "keywords": [
-    "zlib",
-    "gzip",
-    "gunzip",
-    "deflate",
-    "inflate",
-    "compression",
-    "zip",
-    "unzip"
-  ],
-  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
-  "license": "MIT",
-  "devDependencies": {
-    "@types/node": "^22.13.14",
-    "tap": "^21.1.0",
-    "tshy": "^3.0.2",
-    "typedoc": "^0.28.1"
-  },
-  "files": [
-    "dist"
-  ],
-  "engines": {
-    "node": ">= 18"
-  },
-  "tshy": {
-    "exports": {
-      "./package.json": "./package.json",
-      ".": "./src/index.ts"
-    }
-  },
-  "exports": {
-    "./package.json": "./package.json",
-    ".": {
-      "import": {
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.js"
-      },
-      "require": {
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.js"
-      }
-    }
-  },
-  "types": "./dist/commonjs/index.d.ts",
-  "type": "module",
-  "prettier": {
-    "semi": false,
-    "printWidth": 75,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "module": "./dist/esm/index.js"
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/LICENSE
deleted file mode 100644
index 0a034db7a73b5..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-Copyright (c) 2011-2023 James Halliday (mail@substack.net) and Isaac Z. Schlueter (i@izs.me)
-
-This project is free software released under the MIT license:
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/package.json
deleted file mode 100644
index 9d04a66e16cd9..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/package.json
+++ /dev/null
@@ -1,91 +0,0 @@
-{
-    "name": "mkdirp",
-    "description": "Recursively mkdir, like `mkdir -p`",
-    "version": "3.0.1",
-    "keywords": [
-        "mkdir",
-        "directory",
-        "make dir",
-        "make",
-        "dir",
-        "recursive",
-        "native"
-    ],
-    "bin": "./dist/cjs/src/bin.js",
-    "main": "./dist/cjs/src/index.js",
-    "module": "./dist/mjs/index.js",
-    "types": "./dist/mjs/index.d.ts",
-    "exports": {
-        ".": {
-            "import": {
-                "types": "./dist/mjs/index.d.ts",
-                "default": "./dist/mjs/index.js"
-            },
-            "require": {
-                "types": "./dist/cjs/src/index.d.ts",
-                "default": "./dist/cjs/src/index.js"
-            }
-        }
-    },
-    "files": [
-        "dist"
-    ],
-    "scripts": {
-        "preversion": "npm test",
-        "postversion": "npm publish",
-        "prepublishOnly": "git push origin --follow-tags",
-        "preprepare": "rm -rf dist",
-        "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json",
-        "postprepare": "bash fixup.sh",
-        "pretest": "npm run prepare",
-        "presnap": "npm run prepare",
-        "test": "c8 tap",
-        "snap": "c8 tap",
-        "format": "prettier --write . --loglevel warn",
-        "benchmark": "node benchmark/index.js",
-        "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
-    },
-    "prettier": {
-        "semi": false,
-        "printWidth": 80,
-        "tabWidth": 2,
-        "useTabs": false,
-        "singleQuote": true,
-        "jsxSingleQuote": false,
-        "bracketSameLine": true,
-        "arrowParens": "avoid",
-        "endOfLine": "lf"
-    },
-    "devDependencies": {
-        "@types/brace-expansion": "^1.1.0",
-        "@types/node": "^18.11.9",
-        "@types/tap": "^15.0.7",
-        "c8": "^7.12.0",
-        "eslint-config-prettier": "^8.6.0",
-        "prettier": "^2.8.2",
-        "tap": "^16.3.3",
-        "ts-node": "^10.9.1",
-        "typedoc": "^0.23.21",
-        "typescript": "^4.9.3"
-    },
-    "tap": {
-        "coverage": false,
-        "node-arg": [
-            "--no-warnings",
-            "--loader",
-            "ts-node/esm"
-        ],
-        "ts": false
-    },
-    "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-    },
-    "repository": {
-        "type": "git",
-        "url": "https://github.com/isaacs/node-mkdirp.git"
-    },
-    "license": "MIT",
-    "engines": {
-        "node": ">=10"
-    }
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/bin.js b/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/bin.js
deleted file mode 100755
index 757aae1fd96cb..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/bin.js
+++ /dev/null
@@ -1,80 +0,0 @@
-#!/usr/bin/env node
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-const package_json_1 = require("../package.json");
-const usage = () => `
-usage: mkdirp [DIR1,DIR2..] {OPTIONS}
-
-  Create each supplied directory including any necessary parent directories
-  that don't yet exist.
-
-  If the directory already exists, do nothing.
-
-OPTIONS are:
-
-  -m       If a directory needs to be created, set the mode as an octal
-  --mode=  permission string.
-
-  -v --version   Print the mkdirp version number
-
-  -h --help      Print this helpful banner
-
-  -p --print     Print the first directories created for each path provided
-
-  --manual       Use manual implementation, even if native is available
-`;
-const dirs = [];
-const opts = {};
-let doPrint = false;
-let dashdash = false;
-let manual = false;
-for (const arg of process.argv.slice(2)) {
-    if (dashdash)
-        dirs.push(arg);
-    else if (arg === '--')
-        dashdash = true;
-    else if (arg === '--manual')
-        manual = true;
-    else if (/^-h/.test(arg) || /^--help/.test(arg)) {
-        console.log(usage());
-        process.exit(0);
-    }
-    else if (arg === '-v' || arg === '--version') {
-        console.log(package_json_1.version);
-        process.exit(0);
-    }
-    else if (arg === '-p' || arg === '--print') {
-        doPrint = true;
-    }
-    else if (/^-m/.test(arg) || /^--mode=/.test(arg)) {
-        // these don't get covered in CI, but work locally
-        // weird because the tests below show as passing in the output.
-        /* c8 ignore start */
-        const mode = parseInt(arg.replace(/^(-m|--mode=)/, ''), 8);
-        if (isNaN(mode)) {
-            console.error(`invalid mode argument: ${arg}\nMust be an octal number.`);
-            process.exit(1);
-        }
-        /* c8 ignore stop */
-        opts.mode = mode;
-    }
-    else
-        dirs.push(arg);
-}
-const index_js_1 = require("./index.js");
-const impl = manual ? index_js_1.mkdirp.manual : index_js_1.mkdirp;
-if (dirs.length === 0) {
-    console.error(usage());
-}
-// these don't get covered in CI, but work locally
-/* c8 ignore start */
-Promise.all(dirs.map(dir => impl(dir, opts)))
-    .then(made => (doPrint ? made.forEach(m => m && console.log(m)) : null))
-    .catch(er => {
-    console.error(er.message);
-    if (er.code)
-        console.error('  code: ' + er.code);
-    process.exit(1);
-});
-/* c8 ignore stop */
-//# sourceMappingURL=bin.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/find-made.js b/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/find-made.js
deleted file mode 100644
index e831ef27cadc1..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/find-made.js
+++ /dev/null
@@ -1,35 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.findMadeSync = exports.findMade = void 0;
-const path_1 = require("path");
-const findMade = async (opts, parent, path) => {
-    // we never want the 'made' return value to be a root directory
-    if (path === parent) {
-        return;
-    }
-    return opts.statAsync(parent).then(st => (st.isDirectory() ? path : undefined), // will fail later
-    // will fail later
-    er => {
-        const fer = er;
-        return fer && fer.code === 'ENOENT'
-            ? (0, exports.findMade)(opts, (0, path_1.dirname)(parent), parent)
-            : undefined;
-    });
-};
-exports.findMade = findMade;
-const findMadeSync = (opts, parent, path) => {
-    if (path === parent) {
-        return undefined;
-    }
-    try {
-        return opts.statSync(parent).isDirectory() ? path : undefined;
-    }
-    catch (er) {
-        const fer = er;
-        return fer && fer.code === 'ENOENT'
-            ? (0, exports.findMadeSync)(opts, (0, path_1.dirname)(parent), parent)
-            : undefined;
-    }
-};
-exports.findMadeSync = findMadeSync;
-//# sourceMappingURL=find-made.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/index.js
deleted file mode 100644
index ab9dc62cddda3..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/index.js
+++ /dev/null
@@ -1,53 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.mkdirp = exports.nativeSync = exports.native = exports.manualSync = exports.manual = exports.sync = exports.mkdirpSync = exports.useNativeSync = exports.useNative = exports.mkdirpNativeSync = exports.mkdirpNative = exports.mkdirpManualSync = exports.mkdirpManual = void 0;
-const mkdirp_manual_js_1 = require("./mkdirp-manual.js");
-const mkdirp_native_js_1 = require("./mkdirp-native.js");
-const opts_arg_js_1 = require("./opts-arg.js");
-const path_arg_js_1 = require("./path-arg.js");
-const use_native_js_1 = require("./use-native.js");
-/* c8 ignore start */
-var mkdirp_manual_js_2 = require("./mkdirp-manual.js");
-Object.defineProperty(exports, "mkdirpManual", { enumerable: true, get: function () { return mkdirp_manual_js_2.mkdirpManual; } });
-Object.defineProperty(exports, "mkdirpManualSync", { enumerable: true, get: function () { return mkdirp_manual_js_2.mkdirpManualSync; } });
-var mkdirp_native_js_2 = require("./mkdirp-native.js");
-Object.defineProperty(exports, "mkdirpNative", { enumerable: true, get: function () { return mkdirp_native_js_2.mkdirpNative; } });
-Object.defineProperty(exports, "mkdirpNativeSync", { enumerable: true, get: function () { return mkdirp_native_js_2.mkdirpNativeSync; } });
-var use_native_js_2 = require("./use-native.js");
-Object.defineProperty(exports, "useNative", { enumerable: true, get: function () { return use_native_js_2.useNative; } });
-Object.defineProperty(exports, "useNativeSync", { enumerable: true, get: function () { return use_native_js_2.useNativeSync; } });
-/* c8 ignore stop */
-const mkdirpSync = (path, opts) => {
-    path = (0, path_arg_js_1.pathArg)(path);
-    const resolved = (0, opts_arg_js_1.optsArg)(opts);
-    return (0, use_native_js_1.useNativeSync)(resolved)
-        ? (0, mkdirp_native_js_1.mkdirpNativeSync)(path, resolved)
-        : (0, mkdirp_manual_js_1.mkdirpManualSync)(path, resolved);
-};
-exports.mkdirpSync = mkdirpSync;
-exports.sync = exports.mkdirpSync;
-exports.manual = mkdirp_manual_js_1.mkdirpManual;
-exports.manualSync = mkdirp_manual_js_1.mkdirpManualSync;
-exports.native = mkdirp_native_js_1.mkdirpNative;
-exports.nativeSync = mkdirp_native_js_1.mkdirpNativeSync;
-exports.mkdirp = Object.assign(async (path, opts) => {
-    path = (0, path_arg_js_1.pathArg)(path);
-    const resolved = (0, opts_arg_js_1.optsArg)(opts);
-    return (0, use_native_js_1.useNative)(resolved)
-        ? (0, mkdirp_native_js_1.mkdirpNative)(path, resolved)
-        : (0, mkdirp_manual_js_1.mkdirpManual)(path, resolved);
-}, {
-    mkdirpSync: exports.mkdirpSync,
-    mkdirpNative: mkdirp_native_js_1.mkdirpNative,
-    mkdirpNativeSync: mkdirp_native_js_1.mkdirpNativeSync,
-    mkdirpManual: mkdirp_manual_js_1.mkdirpManual,
-    mkdirpManualSync: mkdirp_manual_js_1.mkdirpManualSync,
-    sync: exports.mkdirpSync,
-    native: mkdirp_native_js_1.mkdirpNative,
-    nativeSync: mkdirp_native_js_1.mkdirpNativeSync,
-    manual: mkdirp_manual_js_1.mkdirpManual,
-    manualSync: mkdirp_manual_js_1.mkdirpManualSync,
-    useNative: use_native_js_1.useNative,
-    useNativeSync: use_native_js_1.useNativeSync,
-});
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js b/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js
deleted file mode 100644
index d9bd1d8bb5a49..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js
+++ /dev/null
@@ -1,79 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.mkdirpManual = exports.mkdirpManualSync = void 0;
-const path_1 = require("path");
-const opts_arg_js_1 = require("./opts-arg.js");
-const mkdirpManualSync = (path, options, made) => {
-    const parent = (0, path_1.dirname)(path);
-    const opts = { ...(0, opts_arg_js_1.optsArg)(options), recursive: false };
-    if (parent === path) {
-        try {
-            return opts.mkdirSync(path, opts);
-        }
-        catch (er) {
-            // swallowed by recursive implementation on posix systems
-            // any other error is a failure
-            const fer = er;
-            if (fer && fer.code !== 'EISDIR') {
-                throw er;
-            }
-            return;
-        }
-    }
-    try {
-        opts.mkdirSync(path, opts);
-        return made || path;
-    }
-    catch (er) {
-        const fer = er;
-        if (fer && fer.code === 'ENOENT') {
-            return (0, exports.mkdirpManualSync)(path, opts, (0, exports.mkdirpManualSync)(parent, opts, made));
-        }
-        if (fer && fer.code !== 'EEXIST' && fer && fer.code !== 'EROFS') {
-            throw er;
-        }
-        try {
-            if (!opts.statSync(path).isDirectory())
-                throw er;
-        }
-        catch (_) {
-            throw er;
-        }
-    }
-};
-exports.mkdirpManualSync = mkdirpManualSync;
-exports.mkdirpManual = Object.assign(async (path, options, made) => {
-    const opts = (0, opts_arg_js_1.optsArg)(options);
-    opts.recursive = false;
-    const parent = (0, path_1.dirname)(path);
-    if (parent === path) {
-        return opts.mkdirAsync(path, opts).catch(er => {
-            // swallowed by recursive implementation on posix systems
-            // any other error is a failure
-            const fer = er;
-            if (fer && fer.code !== 'EISDIR') {
-                throw er;
-            }
-        });
-    }
-    return opts.mkdirAsync(path, opts).then(() => made || path, async (er) => {
-        const fer = er;
-        if (fer && fer.code === 'ENOENT') {
-            return (0, exports.mkdirpManual)(parent, opts).then((made) => (0, exports.mkdirpManual)(path, opts, made));
-        }
-        if (fer && fer.code !== 'EEXIST' && fer.code !== 'EROFS') {
-            throw er;
-        }
-        return opts.statAsync(path).then(st => {
-            if (st.isDirectory()) {
-                return made;
-            }
-            else {
-                throw er;
-            }
-        }, () => {
-            throw er;
-        });
-    });
-}, { sync: exports.mkdirpManualSync });
-//# sourceMappingURL=mkdirp-manual.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js b/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js
deleted file mode 100644
index 9f00567d7cc20..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js
+++ /dev/null
@@ -1,50 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.mkdirpNative = exports.mkdirpNativeSync = void 0;
-const path_1 = require("path");
-const find_made_js_1 = require("./find-made.js");
-const mkdirp_manual_js_1 = require("./mkdirp-manual.js");
-const opts_arg_js_1 = require("./opts-arg.js");
-const mkdirpNativeSync = (path, options) => {
-    const opts = (0, opts_arg_js_1.optsArg)(options);
-    opts.recursive = true;
-    const parent = (0, path_1.dirname)(path);
-    if (parent === path) {
-        return opts.mkdirSync(path, opts);
-    }
-    const made = (0, find_made_js_1.findMadeSync)(opts, path);
-    try {
-        opts.mkdirSync(path, opts);
-        return made;
-    }
-    catch (er) {
-        const fer = er;
-        if (fer && fer.code === 'ENOENT') {
-            return (0, mkdirp_manual_js_1.mkdirpManualSync)(path, opts);
-        }
-        else {
-            throw er;
-        }
-    }
-};
-exports.mkdirpNativeSync = mkdirpNativeSync;
-exports.mkdirpNative = Object.assign(async (path, options) => {
-    const opts = { ...(0, opts_arg_js_1.optsArg)(options), recursive: true };
-    const parent = (0, path_1.dirname)(path);
-    if (parent === path) {
-        return await opts.mkdirAsync(path, opts);
-    }
-    return (0, find_made_js_1.findMade)(opts, path).then((made) => opts
-        .mkdirAsync(path, opts)
-        .then(m => made || m)
-        .catch(er => {
-        const fer = er;
-        if (fer && fer.code === 'ENOENT') {
-            return (0, mkdirp_manual_js_1.mkdirpManual)(path, opts);
-        }
-        else {
-            throw er;
-        }
-    }));
-}, { sync: exports.mkdirpNativeSync });
-//# sourceMappingURL=mkdirp-native.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/opts-arg.js b/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/opts-arg.js
deleted file mode 100644
index e8f486c090595..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/opts-arg.js
+++ /dev/null
@@ -1,38 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.optsArg = void 0;
-const fs_1 = require("fs");
-const optsArg = (opts) => {
-    if (!opts) {
-        opts = { mode: 0o777 };
-    }
-    else if (typeof opts === 'object') {
-        opts = { mode: 0o777, ...opts };
-    }
-    else if (typeof opts === 'number') {
-        opts = { mode: opts };
-    }
-    else if (typeof opts === 'string') {
-        opts = { mode: parseInt(opts, 8) };
-    }
-    else {
-        throw new TypeError('invalid options argument');
-    }
-    const resolved = opts;
-    const optsFs = opts.fs || {};
-    opts.mkdir = opts.mkdir || optsFs.mkdir || fs_1.mkdir;
-    opts.mkdirAsync = opts.mkdirAsync
-        ? opts.mkdirAsync
-        : async (path, options) => {
-            return new Promise((res, rej) => resolved.mkdir(path, options, (er, made) => er ? rej(er) : res(made)));
-        };
-    opts.stat = opts.stat || optsFs.stat || fs_1.stat;
-    opts.statAsync = opts.statAsync
-        ? opts.statAsync
-        : async (path) => new Promise((res, rej) => resolved.stat(path, (err, stats) => (err ? rej(err) : res(stats))));
-    opts.statSync = opts.statSync || optsFs.statSync || fs_1.statSync;
-    opts.mkdirSync = opts.mkdirSync || optsFs.mkdirSync || fs_1.mkdirSync;
-    return resolved;
-};
-exports.optsArg = optsArg;
-//# sourceMappingURL=opts-arg.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/path-arg.js b/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/path-arg.js
deleted file mode 100644
index a6b457f6e23d5..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/path-arg.js
+++ /dev/null
@@ -1,28 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.pathArg = void 0;
-const platform = process.env.__TESTING_MKDIRP_PLATFORM__ || process.platform;
-const path_1 = require("path");
-const pathArg = (path) => {
-    if (/\0/.test(path)) {
-        // simulate same failure that node raises
-        throw Object.assign(new TypeError('path must be a string without null bytes'), {
-            path,
-            code: 'ERR_INVALID_ARG_VALUE',
-        });
-    }
-    path = (0, path_1.resolve)(path);
-    if (platform === 'win32') {
-        const badWinChars = /[*|"<>?:]/;
-        const { root } = (0, path_1.parse)(path);
-        if (badWinChars.test(path.substring(root.length))) {
-            throw Object.assign(new Error('Illegal characters in path.'), {
-                path,
-                code: 'EINVAL',
-            });
-        }
-    }
-    return path;
-};
-exports.pathArg = pathArg;
-//# sourceMappingURL=path-arg.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/use-native.js b/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/use-native.js
deleted file mode 100644
index 550b3452688ee..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/cjs/src/use-native.js
+++ /dev/null
@@ -1,17 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.useNative = exports.useNativeSync = void 0;
-const fs_1 = require("fs");
-const opts_arg_js_1 = require("./opts-arg.js");
-const version = process.env.__TESTING_MKDIRP_NODE_VERSION__ || process.version;
-const versArr = version.replace(/^v/, '').split('.');
-const hasNative = +versArr[0] > 10 || (+versArr[0] === 10 && +versArr[1] >= 12);
-exports.useNativeSync = !hasNative
-    ? () => false
-    : (opts) => (0, opts_arg_js_1.optsArg)(opts).mkdirSync === fs_1.mkdirSync;
-exports.useNative = Object.assign(!hasNative
-    ? () => false
-    : (opts) => (0, opts_arg_js_1.optsArg)(opts).mkdir === fs_1.mkdir, {
-    sync: exports.useNativeSync,
-});
-//# sourceMappingURL=use-native.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/find-made.js b/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/find-made.js
deleted file mode 100644
index 3e72fd59a2c1f..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/find-made.js
+++ /dev/null
@@ -1,30 +0,0 @@
-import { dirname } from 'path';
-export const findMade = async (opts, parent, path) => {
-    // we never want the 'made' return value to be a root directory
-    if (path === parent) {
-        return;
-    }
-    return opts.statAsync(parent).then(st => (st.isDirectory() ? path : undefined), // will fail later
-    // will fail later
-    er => {
-        const fer = er;
-        return fer && fer.code === 'ENOENT'
-            ? findMade(opts, dirname(parent), parent)
-            : undefined;
-    });
-};
-export const findMadeSync = (opts, parent, path) => {
-    if (path === parent) {
-        return undefined;
-    }
-    try {
-        return opts.statSync(parent).isDirectory() ? path : undefined;
-    }
-    catch (er) {
-        const fer = er;
-        return fer && fer.code === 'ENOENT'
-            ? findMadeSync(opts, dirname(parent), parent)
-            : undefined;
-    }
-};
-//# sourceMappingURL=find-made.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/index.js
deleted file mode 100644
index 0217ecc8cdd83..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/index.js
+++ /dev/null
@@ -1,43 +0,0 @@
-import { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js';
-import { mkdirpNative, mkdirpNativeSync } from './mkdirp-native.js';
-import { optsArg } from './opts-arg.js';
-import { pathArg } from './path-arg.js';
-import { useNative, useNativeSync } from './use-native.js';
-/* c8 ignore start */
-export { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js';
-export { mkdirpNative, mkdirpNativeSync } from './mkdirp-native.js';
-export { useNative, useNativeSync } from './use-native.js';
-/* c8 ignore stop */
-export const mkdirpSync = (path, opts) => {
-    path = pathArg(path);
-    const resolved = optsArg(opts);
-    return useNativeSync(resolved)
-        ? mkdirpNativeSync(path, resolved)
-        : mkdirpManualSync(path, resolved);
-};
-export const sync = mkdirpSync;
-export const manual = mkdirpManual;
-export const manualSync = mkdirpManualSync;
-export const native = mkdirpNative;
-export const nativeSync = mkdirpNativeSync;
-export const mkdirp = Object.assign(async (path, opts) => {
-    path = pathArg(path);
-    const resolved = optsArg(opts);
-    return useNative(resolved)
-        ? mkdirpNative(path, resolved)
-        : mkdirpManual(path, resolved);
-}, {
-    mkdirpSync,
-    mkdirpNative,
-    mkdirpNativeSync,
-    mkdirpManual,
-    mkdirpManualSync,
-    sync: mkdirpSync,
-    native: mkdirpNative,
-    nativeSync: mkdirpNativeSync,
-    manual: mkdirpManual,
-    manualSync: mkdirpManualSync,
-    useNative,
-    useNativeSync,
-});
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/mkdirp-manual.js b/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/mkdirp-manual.js
deleted file mode 100644
index a4d044e02d3bf..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/mkdirp-manual.js
+++ /dev/null
@@ -1,75 +0,0 @@
-import { dirname } from 'path';
-import { optsArg } from './opts-arg.js';
-export const mkdirpManualSync = (path, options, made) => {
-    const parent = dirname(path);
-    const opts = { ...optsArg(options), recursive: false };
-    if (parent === path) {
-        try {
-            return opts.mkdirSync(path, opts);
-        }
-        catch (er) {
-            // swallowed by recursive implementation on posix systems
-            // any other error is a failure
-            const fer = er;
-            if (fer && fer.code !== 'EISDIR') {
-                throw er;
-            }
-            return;
-        }
-    }
-    try {
-        opts.mkdirSync(path, opts);
-        return made || path;
-    }
-    catch (er) {
-        const fer = er;
-        if (fer && fer.code === 'ENOENT') {
-            return mkdirpManualSync(path, opts, mkdirpManualSync(parent, opts, made));
-        }
-        if (fer && fer.code !== 'EEXIST' && fer && fer.code !== 'EROFS') {
-            throw er;
-        }
-        try {
-            if (!opts.statSync(path).isDirectory())
-                throw er;
-        }
-        catch (_) {
-            throw er;
-        }
-    }
-};
-export const mkdirpManual = Object.assign(async (path, options, made) => {
-    const opts = optsArg(options);
-    opts.recursive = false;
-    const parent = dirname(path);
-    if (parent === path) {
-        return opts.mkdirAsync(path, opts).catch(er => {
-            // swallowed by recursive implementation on posix systems
-            // any other error is a failure
-            const fer = er;
-            if (fer && fer.code !== 'EISDIR') {
-                throw er;
-            }
-        });
-    }
-    return opts.mkdirAsync(path, opts).then(() => made || path, async (er) => {
-        const fer = er;
-        if (fer && fer.code === 'ENOENT') {
-            return mkdirpManual(parent, opts).then((made) => mkdirpManual(path, opts, made));
-        }
-        if (fer && fer.code !== 'EEXIST' && fer.code !== 'EROFS') {
-            throw er;
-        }
-        return opts.statAsync(path).then(st => {
-            if (st.isDirectory()) {
-                return made;
-            }
-            else {
-                throw er;
-            }
-        }, () => {
-            throw er;
-        });
-    });
-}, { sync: mkdirpManualSync });
-//# sourceMappingURL=mkdirp-manual.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/mkdirp-native.js b/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/mkdirp-native.js
deleted file mode 100644
index 99d10a5425dad..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/mkdirp-native.js
+++ /dev/null
@@ -1,46 +0,0 @@
-import { dirname } from 'path';
-import { findMade, findMadeSync } from './find-made.js';
-import { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js';
-import { optsArg } from './opts-arg.js';
-export const mkdirpNativeSync = (path, options) => {
-    const opts = optsArg(options);
-    opts.recursive = true;
-    const parent = dirname(path);
-    if (parent === path) {
-        return opts.mkdirSync(path, opts);
-    }
-    const made = findMadeSync(opts, path);
-    try {
-        opts.mkdirSync(path, opts);
-        return made;
-    }
-    catch (er) {
-        const fer = er;
-        if (fer && fer.code === 'ENOENT') {
-            return mkdirpManualSync(path, opts);
-        }
-        else {
-            throw er;
-        }
-    }
-};
-export const mkdirpNative = Object.assign(async (path, options) => {
-    const opts = { ...optsArg(options), recursive: true };
-    const parent = dirname(path);
-    if (parent === path) {
-        return await opts.mkdirAsync(path, opts);
-    }
-    return findMade(opts, path).then((made) => opts
-        .mkdirAsync(path, opts)
-        .then(m => made || m)
-        .catch(er => {
-        const fer = er;
-        if (fer && fer.code === 'ENOENT') {
-            return mkdirpManual(path, opts);
-        }
-        else {
-            throw er;
-        }
-    }));
-}, { sync: mkdirpNativeSync });
-//# sourceMappingURL=mkdirp-native.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/opts-arg.js b/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/opts-arg.js
deleted file mode 100644
index d47e2927fee4c..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/opts-arg.js
+++ /dev/null
@@ -1,34 +0,0 @@
-import { mkdir, mkdirSync, stat, statSync, } from 'fs';
-export const optsArg = (opts) => {
-    if (!opts) {
-        opts = { mode: 0o777 };
-    }
-    else if (typeof opts === 'object') {
-        opts = { mode: 0o777, ...opts };
-    }
-    else if (typeof opts === 'number') {
-        opts = { mode: opts };
-    }
-    else if (typeof opts === 'string') {
-        opts = { mode: parseInt(opts, 8) };
-    }
-    else {
-        throw new TypeError('invalid options argument');
-    }
-    const resolved = opts;
-    const optsFs = opts.fs || {};
-    opts.mkdir = opts.mkdir || optsFs.mkdir || mkdir;
-    opts.mkdirAsync = opts.mkdirAsync
-        ? opts.mkdirAsync
-        : async (path, options) => {
-            return new Promise((res, rej) => resolved.mkdir(path, options, (er, made) => er ? rej(er) : res(made)));
-        };
-    opts.stat = opts.stat || optsFs.stat || stat;
-    opts.statAsync = opts.statAsync
-        ? opts.statAsync
-        : async (path) => new Promise((res, rej) => resolved.stat(path, (err, stats) => (err ? rej(err) : res(stats))));
-    opts.statSync = opts.statSync || optsFs.statSync || statSync;
-    opts.mkdirSync = opts.mkdirSync || optsFs.mkdirSync || mkdirSync;
-    return resolved;
-};
-//# sourceMappingURL=opts-arg.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/path-arg.js b/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/path-arg.js
deleted file mode 100644
index 03539cc5a94f9..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/path-arg.js
+++ /dev/null
@@ -1,24 +0,0 @@
-const platform = process.env.__TESTING_MKDIRP_PLATFORM__ || process.platform;
-import { parse, resolve } from 'path';
-export const pathArg = (path) => {
-    if (/\0/.test(path)) {
-        // simulate same failure that node raises
-        throw Object.assign(new TypeError('path must be a string without null bytes'), {
-            path,
-            code: 'ERR_INVALID_ARG_VALUE',
-        });
-    }
-    path = resolve(path);
-    if (platform === 'win32') {
-        const badWinChars = /[*|"<>?:]/;
-        const { root } = parse(path);
-        if (badWinChars.test(path.substring(root.length))) {
-            throw Object.assign(new Error('Illegal characters in path.'), {
-                path,
-                code: 'EINVAL',
-            });
-        }
-    }
-    return path;
-};
-//# sourceMappingURL=path-arg.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/use-native.js b/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/use-native.js
deleted file mode 100644
index ad2093867eb74..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/dist/mjs/use-native.js
+++ /dev/null
@@ -1,14 +0,0 @@
-import { mkdir, mkdirSync } from 'fs';
-import { optsArg } from './opts-arg.js';
-const version = process.env.__TESTING_MKDIRP_NODE_VERSION__ || process.version;
-const versArr = version.replace(/^v/, '').split('.');
-const hasNative = +versArr[0] > 10 || (+versArr[0] === 10 && +versArr[1] >= 12);
-export const useNativeSync = !hasNative
-    ? () => false
-    : (opts) => optsArg(opts).mkdirSync === mkdirSync;
-export const useNative = Object.assign(!hasNative
-    ? () => false
-    : (opts) => optsArg(opts).mkdir === mkdir, {
-    sync: useNativeSync,
-});
-//# sourceMappingURL=use-native.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/package.json
deleted file mode 100644
index f31ac3314d6f6..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp/package.json
+++ /dev/null
@@ -1,91 +0,0 @@
-{
-  "name": "mkdirp",
-  "description": "Recursively mkdir, like `mkdir -p`",
-  "version": "3.0.1",
-  "keywords": [
-    "mkdir",
-    "directory",
-    "make dir",
-    "make",
-    "dir",
-    "recursive",
-    "native"
-  ],
-  "bin": "./dist/cjs/src/bin.js",
-  "main": "./dist/cjs/src/index.js",
-  "module": "./dist/mjs/index.js",
-  "types": "./dist/mjs/index.d.ts",
-  "exports": {
-    ".": {
-      "import": {
-        "types": "./dist/mjs/index.d.ts",
-        "default": "./dist/mjs/index.js"
-      },
-      "require": {
-        "types": "./dist/cjs/src/index.d.ts",
-        "default": "./dist/cjs/src/index.js"
-      }
-    }
-  },
-  "files": [
-    "dist"
-  ],
-  "scripts": {
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "preprepare": "rm -rf dist",
-    "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json",
-    "postprepare": "bash fixup.sh",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "test": "c8 tap",
-    "snap": "c8 tap",
-    "format": "prettier --write . --loglevel warn",
-    "benchmark": "node benchmark/index.js",
-    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
-  },
-  "prettier": {
-    "semi": false,
-    "printWidth": 80,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "devDependencies": {
-    "@types/brace-expansion": "^1.1.0",
-    "@types/node": "^18.11.9",
-    "@types/tap": "^15.0.7",
-    "c8": "^7.12.0",
-    "eslint-config-prettier": "^8.6.0",
-    "prettier": "^2.8.2",
-    "tap": "^16.3.3",
-    "ts-node": "^10.9.1",
-    "typedoc": "^0.23.21",
-    "typescript": "^4.9.3"
-  },
-  "tap": {
-    "coverage": false,
-    "node-arg": [
-      "--no-warnings",
-      "--loader",
-      "ts-node/esm"
-    ],
-    "ts": false
-  },
-  "funding": {
-    "url": "https://github.com/sponsors/isaacs"
-  },
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/isaacs/node-mkdirp.git"
-  },
-  "license": "MIT",
-  "engines": {
-    "node": ">=10"
-  }
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/LICENSE b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/LICENSE
deleted file mode 100644
index 19129e315fe59..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/create.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/create.js
deleted file mode 100644
index 3190afc48318f..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/create.js
+++ /dev/null
@@ -1,83 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.create = void 0;
-const fs_minipass_1 = require("@isaacs/fs-minipass");
-const node_path_1 = __importDefault(require("node:path"));
-const list_js_1 = require("./list.js");
-const make_command_js_1 = require("./make-command.js");
-const pack_js_1 = require("./pack.js");
-const createFileSync = (opt, files) => {
-    const p = new pack_js_1.PackSync(opt);
-    const stream = new fs_minipass_1.WriteStreamSync(opt.file, {
-        mode: opt.mode || 0o666,
-    });
-    p.pipe(stream);
-    addFilesSync(p, files);
-};
-const createFile = (opt, files) => {
-    const p = new pack_js_1.Pack(opt);
-    const stream = new fs_minipass_1.WriteStream(opt.file, {
-        mode: opt.mode || 0o666,
-    });
-    p.pipe(stream);
-    const promise = new Promise((res, rej) => {
-        stream.on('error', rej);
-        stream.on('close', res);
-        p.on('error', rej);
-    });
-    addFilesAsync(p, files);
-    return promise;
-};
-const addFilesSync = (p, files) => {
-    files.forEach(file => {
-        if (file.charAt(0) === '@') {
-            (0, list_js_1.list)({
-                file: node_path_1.default.resolve(p.cwd, file.slice(1)),
-                sync: true,
-                noResume: true,
-                onReadEntry: entry => p.add(entry),
-            });
-        }
-        else {
-            p.add(file);
-        }
-    });
-    p.end();
-};
-const addFilesAsync = async (p, files) => {
-    for (let i = 0; i < files.length; i++) {
-        const file = String(files[i]);
-        if (file.charAt(0) === '@') {
-            await (0, list_js_1.list)({
-                file: node_path_1.default.resolve(String(p.cwd), file.slice(1)),
-                noResume: true,
-                onReadEntry: entry => {
-                    p.add(entry);
-                },
-            });
-        }
-        else {
-            p.add(file);
-        }
-    }
-    p.end();
-};
-const createSync = (opt, files) => {
-    const p = new pack_js_1.PackSync(opt);
-    addFilesSync(p, files);
-    return p;
-};
-const createAsync = (opt, files) => {
-    const p = new pack_js_1.Pack(opt);
-    addFilesAsync(p, files);
-    return p;
-};
-exports.create = (0, make_command_js_1.makeCommand)(createFileSync, createFile, createSync, createAsync, (_opt, files) => {
-    if (!files?.length) {
-        throw new TypeError('no paths specified to add to archive');
-    }
-});
-//# sourceMappingURL=create.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/cwd-error.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/cwd-error.js
deleted file mode 100644
index d703a7772be3a..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/cwd-error.js
+++ /dev/null
@@ -1,18 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.CwdError = void 0;
-class CwdError extends Error {
-    path;
-    code;
-    syscall = 'chdir';
-    constructor(path, code) {
-        super(`${code}: Cannot cd into '${path}'`);
-        this.path = path;
-        this.code = code;
-    }
-    get name() {
-        return 'CwdError';
-    }
-}
-exports.CwdError = CwdError;
-//# sourceMappingURL=cwd-error.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/extract.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/extract.js
deleted file mode 100644
index f848cbcbf779e..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/extract.js
+++ /dev/null
@@ -1,78 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.extract = void 0;
-// tar -x
-const fsm = __importStar(require("@isaacs/fs-minipass"));
-const node_fs_1 = __importDefault(require("node:fs"));
-const list_js_1 = require("./list.js");
-const make_command_js_1 = require("./make-command.js");
-const unpack_js_1 = require("./unpack.js");
-const extractFileSync = (opt) => {
-    const u = new unpack_js_1.UnpackSync(opt);
-    const file = opt.file;
-    const stat = node_fs_1.default.statSync(file);
-    // This trades a zero-byte read() syscall for a stat
-    // However, it will usually result in less memory allocation
-    const readSize = opt.maxReadSize || 16 * 1024 * 1024;
-    const stream = new fsm.ReadStreamSync(file, {
-        readSize: readSize,
-        size: stat.size,
-    });
-    stream.pipe(u);
-};
-const extractFile = (opt, _) => {
-    const u = new unpack_js_1.Unpack(opt);
-    const readSize = opt.maxReadSize || 16 * 1024 * 1024;
-    const file = opt.file;
-    const p = new Promise((resolve, reject) => {
-        u.on('error', reject);
-        u.on('close', resolve);
-        // This trades a zero-byte read() syscall for a stat
-        // However, it will usually result in less memory allocation
-        node_fs_1.default.stat(file, (er, stat) => {
-            if (er) {
-                reject(er);
-            }
-            else {
-                const stream = new fsm.ReadStream(file, {
-                    readSize: readSize,
-                    size: stat.size,
-                });
-                stream.on('error', reject);
-                stream.pipe(u);
-            }
-        });
-    });
-    return p;
-};
-exports.extract = (0, make_command_js_1.makeCommand)(extractFileSync, extractFile, opt => new unpack_js_1.UnpackSync(opt), opt => new unpack_js_1.Unpack(opt), (opt, files) => {
-    if (files?.length)
-        (0, list_js_1.filesFilter)(opt, files);
-});
-//# sourceMappingURL=extract.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/get-write-flag.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/get-write-flag.js
deleted file mode 100644
index 94add8f6b2231..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/get-write-flag.js
+++ /dev/null
@@ -1,29 +0,0 @@
-"use strict";
-// Get the appropriate flag to use for creating files
-// We use fmap on Windows platforms for files less than
-// 512kb.  This is a fairly low limit, but avoids making
-// things slower in some cases.  Since most of what this
-// library is used for is extracting tarballs of many
-// relatively small files in npm packages and the like,
-// it can be a big boost on Windows platforms.
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.getWriteFlag = void 0;
-const fs_1 = __importDefault(require("fs"));
-const platform = process.env.__FAKE_PLATFORM__ || process.platform;
-const isWindows = platform === 'win32';
-/* c8 ignore start */
-const { O_CREAT, O_TRUNC, O_WRONLY } = fs_1.default.constants;
-const UV_FS_O_FILEMAP = Number(process.env.__FAKE_FS_O_FILENAME__) ||
-    fs_1.default.constants.UV_FS_O_FILEMAP ||
-    0;
-/* c8 ignore stop */
-const fMapEnabled = isWindows && !!UV_FS_O_FILEMAP;
-const fMapLimit = 512 * 1024;
-const fMapFlag = UV_FS_O_FILEMAP | O_TRUNC | O_CREAT | O_WRONLY;
-exports.getWriteFlag = !fMapEnabled ?
-    () => 'w'
-    : (size) => (size < fMapLimit ? fMapFlag : 'w');
-//# sourceMappingURL=get-write-flag.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/header.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/header.js
deleted file mode 100644
index b3a48037b849a..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/header.js
+++ /dev/null
@@ -1,306 +0,0 @@
-"use strict";
-// parse a 512-byte header block to a data object, or vice-versa
-// encode returns `true` if a pax extended header is needed, because
-// the data could not be faithfully encoded in a simple header.
-// (Also, check header.needPax to see if it needs a pax header.)
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Header = void 0;
-const node_path_1 = require("node:path");
-const large = __importStar(require("./large-numbers.js"));
-const types = __importStar(require("./types.js"));
-class Header {
-    cksumValid = false;
-    needPax = false;
-    nullBlock = false;
-    block;
-    path;
-    mode;
-    uid;
-    gid;
-    size;
-    cksum;
-    #type = 'Unsupported';
-    linkpath;
-    uname;
-    gname;
-    devmaj = 0;
-    devmin = 0;
-    atime;
-    ctime;
-    mtime;
-    charset;
-    comment;
-    constructor(data, off = 0, ex, gex) {
-        if (Buffer.isBuffer(data)) {
-            this.decode(data, off || 0, ex, gex);
-        }
-        else if (data) {
-            this.#slurp(data);
-        }
-    }
-    decode(buf, off, ex, gex) {
-        if (!off) {
-            off = 0;
-        }
-        if (!buf || !(buf.length >= off + 512)) {
-            throw new Error('need 512 bytes for header');
-        }
-        this.path = decString(buf, off, 100);
-        this.mode = decNumber(buf, off + 100, 8);
-        this.uid = decNumber(buf, off + 108, 8);
-        this.gid = decNumber(buf, off + 116, 8);
-        this.size = decNumber(buf, off + 124, 12);
-        this.mtime = decDate(buf, off + 136, 12);
-        this.cksum = decNumber(buf, off + 148, 12);
-        // if we have extended or global extended headers, apply them now
-        // See https://github.com/npm/node-tar/pull/187
-        // Apply global before local, so it overrides
-        if (gex)
-            this.#slurp(gex, true);
-        if (ex)
-            this.#slurp(ex);
-        // old tar versions marked dirs as a file with a trailing /
-        const t = decString(buf, off + 156, 1);
-        if (types.isCode(t)) {
-            this.#type = t || '0';
-        }
-        if (this.#type === '0' && this.path.slice(-1) === '/') {
-            this.#type = '5';
-        }
-        // tar implementations sometimes incorrectly put the stat(dir).size
-        // as the size in the tarball, even though Directory entries are
-        // not able to have any body at all.  In the very rare chance that
-        // it actually DOES have a body, we weren't going to do anything with
-        // it anyway, and it'll just be a warning about an invalid header.
-        if (this.#type === '5') {
-            this.size = 0;
-        }
-        this.linkpath = decString(buf, off + 157, 100);
-        if (buf.subarray(off + 257, off + 265).toString() ===
-            'ustar\u000000') {
-            this.uname = decString(buf, off + 265, 32);
-            this.gname = decString(buf, off + 297, 32);
-            /* c8 ignore start */
-            this.devmaj = decNumber(buf, off + 329, 8) ?? 0;
-            this.devmin = decNumber(buf, off + 337, 8) ?? 0;
-            /* c8 ignore stop */
-            if (buf[off + 475] !== 0) {
-                // definitely a prefix, definitely >130 chars.
-                const prefix = decString(buf, off + 345, 155);
-                this.path = prefix + '/' + this.path;
-            }
-            else {
-                const prefix = decString(buf, off + 345, 130);
-                if (prefix) {
-                    this.path = prefix + '/' + this.path;
-                }
-                this.atime = decDate(buf, off + 476, 12);
-                this.ctime = decDate(buf, off + 488, 12);
-            }
-        }
-        let sum = 8 * 0x20;
-        for (let i = off; i < off + 148; i++) {
-            sum += buf[i];
-        }
-        for (let i = off + 156; i < off + 512; i++) {
-            sum += buf[i];
-        }
-        this.cksumValid = sum === this.cksum;
-        if (this.cksum === undefined && sum === 8 * 0x20) {
-            this.nullBlock = true;
-        }
-    }
-    #slurp(ex, gex = false) {
-        Object.assign(this, Object.fromEntries(Object.entries(ex).filter(([k, v]) => {
-            // we slurp in everything except for the path attribute in
-            // a global extended header, because that's weird. Also, any
-            // null/undefined values are ignored.
-            return !(v === null ||
-                v === undefined ||
-                (k === 'path' && gex) ||
-                (k === 'linkpath' && gex) ||
-                k === 'global');
-        })));
-    }
-    encode(buf, off = 0) {
-        if (!buf) {
-            buf = this.block = Buffer.alloc(512);
-        }
-        if (this.#type === 'Unsupported') {
-            this.#type = '0';
-        }
-        if (!(buf.length >= off + 512)) {
-            throw new Error('need 512 bytes for header');
-        }
-        const prefixSize = this.ctime || this.atime ? 130 : 155;
-        const split = splitPrefix(this.path || '', prefixSize);
-        const path = split[0];
-        const prefix = split[1];
-        this.needPax = !!split[2];
-        this.needPax = encString(buf, off, 100, path) || this.needPax;
-        this.needPax =
-            encNumber(buf, off + 100, 8, this.mode) || this.needPax;
-        this.needPax =
-            encNumber(buf, off + 108, 8, this.uid) || this.needPax;
-        this.needPax =
-            encNumber(buf, off + 116, 8, this.gid) || this.needPax;
-        this.needPax =
-            encNumber(buf, off + 124, 12, this.size) || this.needPax;
-        this.needPax =
-            encDate(buf, off + 136, 12, this.mtime) || this.needPax;
-        buf[off + 156] = this.#type.charCodeAt(0);
-        this.needPax =
-            encString(buf, off + 157, 100, this.linkpath) || this.needPax;
-        buf.write('ustar\u000000', off + 257, 8);
-        this.needPax =
-            encString(buf, off + 265, 32, this.uname) || this.needPax;
-        this.needPax =
-            encString(buf, off + 297, 32, this.gname) || this.needPax;
-        this.needPax =
-            encNumber(buf, off + 329, 8, this.devmaj) || this.needPax;
-        this.needPax =
-            encNumber(buf, off + 337, 8, this.devmin) || this.needPax;
-        this.needPax =
-            encString(buf, off + 345, prefixSize, prefix) || this.needPax;
-        if (buf[off + 475] !== 0) {
-            this.needPax =
-                encString(buf, off + 345, 155, prefix) || this.needPax;
-        }
-        else {
-            this.needPax =
-                encString(buf, off + 345, 130, prefix) || this.needPax;
-            this.needPax =
-                encDate(buf, off + 476, 12, this.atime) || this.needPax;
-            this.needPax =
-                encDate(buf, off + 488, 12, this.ctime) || this.needPax;
-        }
-        let sum = 8 * 0x20;
-        for (let i = off; i < off + 148; i++) {
-            sum += buf[i];
-        }
-        for (let i = off + 156; i < off + 512; i++) {
-            sum += buf[i];
-        }
-        this.cksum = sum;
-        encNumber(buf, off + 148, 8, this.cksum);
-        this.cksumValid = true;
-        return this.needPax;
-    }
-    get type() {
-        return (this.#type === 'Unsupported' ?
-            this.#type
-            : types.name.get(this.#type));
-    }
-    get typeKey() {
-        return this.#type;
-    }
-    set type(type) {
-        const c = String(types.code.get(type));
-        if (types.isCode(c) || c === 'Unsupported') {
-            this.#type = c;
-        }
-        else if (types.isCode(type)) {
-            this.#type = type;
-        }
-        else {
-            throw new TypeError('invalid entry type: ' + type);
-        }
-    }
-}
-exports.Header = Header;
-const splitPrefix = (p, prefixSize) => {
-    const pathSize = 100;
-    let pp = p;
-    let prefix = '';
-    let ret = undefined;
-    const root = node_path_1.posix.parse(p).root || '.';
-    if (Buffer.byteLength(pp) < pathSize) {
-        ret = [pp, prefix, false];
-    }
-    else {
-        // first set prefix to the dir, and path to the base
-        prefix = node_path_1.posix.dirname(pp);
-        pp = node_path_1.posix.basename(pp);
-        do {
-            if (Buffer.byteLength(pp) <= pathSize &&
-                Buffer.byteLength(prefix) <= prefixSize) {
-                // both fit!
-                ret = [pp, prefix, false];
-            }
-            else if (Buffer.byteLength(pp) > pathSize &&
-                Buffer.byteLength(prefix) <= prefixSize) {
-                // prefix fits in prefix, but path doesn't fit in path
-                ret = [pp.slice(0, pathSize - 1), prefix, true];
-            }
-            else {
-                // make path take a bit from prefix
-                pp = node_path_1.posix.join(node_path_1.posix.basename(prefix), pp);
-                prefix = node_path_1.posix.dirname(prefix);
-            }
-        } while (prefix !== root && ret === undefined);
-        // at this point, found no resolution, just truncate
-        if (!ret) {
-            ret = [p.slice(0, pathSize - 1), '', true];
-        }
-    }
-    return ret;
-};
-const decString = (buf, off, size) => buf
-    .subarray(off, off + size)
-    .toString('utf8')
-    .replace(/\0.*/, '');
-const decDate = (buf, off, size) => numToDate(decNumber(buf, off, size));
-const numToDate = (num) => num === undefined ? undefined : new Date(num * 1000);
-const decNumber = (buf, off, size) => Number(buf[off]) & 0x80 ?
-    large.parse(buf.subarray(off, off + size))
-    : decSmallNumber(buf, off, size);
-const nanUndef = (value) => (isNaN(value) ? undefined : value);
-const decSmallNumber = (buf, off, size) => nanUndef(parseInt(buf
-    .subarray(off, off + size)
-    .toString('utf8')
-    .replace(/\0.*$/, '')
-    .trim(), 8));
-// the maximum encodable as a null-terminated octal, by field size
-const MAXNUM = {
-    12: 0o77777777777,
-    8: 0o7777777,
-};
-const encNumber = (buf, off, size, num) => num === undefined ? false
-    : num > MAXNUM[size] || num < 0 ?
-        (large.encode(num, buf.subarray(off, off + size)), true)
-        : (encSmallNumber(buf, off, size, num), false);
-const encSmallNumber = (buf, off, size, num) => buf.write(octalString(num, size), off, size, 'ascii');
-const octalString = (num, size) => padOctal(Math.floor(num).toString(8), size);
-const padOctal = (str, size) => (str.length === size - 1 ?
-    str
-    : new Array(size - str.length - 1).join('0') + str + ' ') + '\0';
-const encDate = (buf, off, size, date) => date === undefined ? false : (encNumber(buf, off, size, date.getTime() / 1000));
-// enough to fill the longest string we've got
-const NULLS = new Array(156).join('\0');
-// pad with nulls, return true if it's longer or non-ascii
-const encString = (buf, off, size, str) => str === undefined ? false : ((buf.write(str + NULLS, off, size, 'utf8'),
-    str.length !== Buffer.byteLength(str) || str.length > size));
-//# sourceMappingURL=header.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/index.js
deleted file mode 100644
index e93ed5ad54aa6..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/index.js
+++ /dev/null
@@ -1,54 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __exportStar = (this && this.__exportStar) || function(m, exports) {
-    for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
-};
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.u = exports.types = exports.r = exports.t = exports.x = exports.c = void 0;
-__exportStar(require("./create.js"), exports);
-var create_js_1 = require("./create.js");
-Object.defineProperty(exports, "c", { enumerable: true, get: function () { return create_js_1.create; } });
-__exportStar(require("./extract.js"), exports);
-var extract_js_1 = require("./extract.js");
-Object.defineProperty(exports, "x", { enumerable: true, get: function () { return extract_js_1.extract; } });
-__exportStar(require("./header.js"), exports);
-__exportStar(require("./list.js"), exports);
-var list_js_1 = require("./list.js");
-Object.defineProperty(exports, "t", { enumerable: true, get: function () { return list_js_1.list; } });
-// classes
-__exportStar(require("./pack.js"), exports);
-__exportStar(require("./parse.js"), exports);
-__exportStar(require("./pax.js"), exports);
-__exportStar(require("./read-entry.js"), exports);
-__exportStar(require("./replace.js"), exports);
-var replace_js_1 = require("./replace.js");
-Object.defineProperty(exports, "r", { enumerable: true, get: function () { return replace_js_1.replace; } });
-exports.types = __importStar(require("./types.js"));
-__exportStar(require("./unpack.js"), exports);
-__exportStar(require("./update.js"), exports);
-var update_js_1 = require("./update.js");
-Object.defineProperty(exports, "u", { enumerable: true, get: function () { return update_js_1.update; } });
-__exportStar(require("./write-entry.js"), exports);
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/large-numbers.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/large-numbers.js
deleted file mode 100644
index 5b07aa7f71b48..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/large-numbers.js
+++ /dev/null
@@ -1,99 +0,0 @@
-"use strict";
-// Tar can encode large and negative numbers using a leading byte of
-// 0xff for negative, and 0x80 for positive.
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.parse = exports.encode = void 0;
-const encode = (num, buf) => {
-    if (!Number.isSafeInteger(num)) {
-        // The number is so large that javascript cannot represent it with integer
-        // precision.
-        throw Error('cannot encode number outside of javascript safe integer range');
-    }
-    else if (num < 0) {
-        encodeNegative(num, buf);
-    }
-    else {
-        encodePositive(num, buf);
-    }
-    return buf;
-};
-exports.encode = encode;
-const encodePositive = (num, buf) => {
-    buf[0] = 0x80;
-    for (var i = buf.length; i > 1; i--) {
-        buf[i - 1] = num & 0xff;
-        num = Math.floor(num / 0x100);
-    }
-};
-const encodeNegative = (num, buf) => {
-    buf[0] = 0xff;
-    var flipped = false;
-    num = num * -1;
-    for (var i = buf.length; i > 1; i--) {
-        var byte = num & 0xff;
-        num = Math.floor(num / 0x100);
-        if (flipped) {
-            buf[i - 1] = onesComp(byte);
-        }
-        else if (byte === 0) {
-            buf[i - 1] = 0;
-        }
-        else {
-            flipped = true;
-            buf[i - 1] = twosComp(byte);
-        }
-    }
-};
-const parse = (buf) => {
-    const pre = buf[0];
-    const value = pre === 0x80 ? pos(buf.subarray(1, buf.length))
-        : pre === 0xff ? twos(buf)
-            : null;
-    if (value === null) {
-        throw Error('invalid base256 encoding');
-    }
-    if (!Number.isSafeInteger(value)) {
-        // The number is so large that javascript cannot represent it with integer
-        // precision.
-        throw Error('parsed number outside of javascript safe integer range');
-    }
-    return value;
-};
-exports.parse = parse;
-const twos = (buf) => {
-    var len = buf.length;
-    var sum = 0;
-    var flipped = false;
-    for (var i = len - 1; i > -1; i--) {
-        var byte = Number(buf[i]);
-        var f;
-        if (flipped) {
-            f = onesComp(byte);
-        }
-        else if (byte === 0) {
-            f = byte;
-        }
-        else {
-            flipped = true;
-            f = twosComp(byte);
-        }
-        if (f !== 0) {
-            sum -= f * Math.pow(256, len - i - 1);
-        }
-    }
-    return sum;
-};
-const pos = (buf) => {
-    var len = buf.length;
-    var sum = 0;
-    for (var i = len - 1; i > -1; i--) {
-        var byte = Number(buf[i]);
-        if (byte !== 0) {
-            sum += byte * Math.pow(256, len - i - 1);
-        }
-    }
-    return sum;
-};
-const onesComp = (byte) => (0xff ^ byte) & 0xff;
-const twosComp = (byte) => ((0xff ^ byte) + 1) & 0xff;
-//# sourceMappingURL=large-numbers.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/list.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/list.js
deleted file mode 100644
index 3cd34bb4bad48..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/list.js
+++ /dev/null
@@ -1,136 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.list = exports.filesFilter = void 0;
-// tar -t
-const fsm = __importStar(require("@isaacs/fs-minipass"));
-const node_fs_1 = __importDefault(require("node:fs"));
-const path_1 = require("path");
-const make_command_js_1 = require("./make-command.js");
-const parse_js_1 = require("./parse.js");
-const strip_trailing_slashes_js_1 = require("./strip-trailing-slashes.js");
-const onReadEntryFunction = (opt) => {
-    const onReadEntry = opt.onReadEntry;
-    opt.onReadEntry =
-        onReadEntry ?
-            e => {
-                onReadEntry(e);
-                e.resume();
-            }
-            : e => e.resume();
-};
-// construct a filter that limits the file entries listed
-// include child entries if a dir is included
-const filesFilter = (opt, files) => {
-    const map = new Map(files.map(f => [(0, strip_trailing_slashes_js_1.stripTrailingSlashes)(f), true]));
-    const filter = opt.filter;
-    const mapHas = (file, r = '') => {
-        const root = r || (0, path_1.parse)(file).root || '.';
-        let ret;
-        if (file === root)
-            ret = false;
-        else {
-            const m = map.get(file);
-            if (m !== undefined) {
-                ret = m;
-            }
-            else {
-                ret = mapHas((0, path_1.dirname)(file), root);
-            }
-        }
-        map.set(file, ret);
-        return ret;
-    };
-    opt.filter =
-        filter ?
-            (file, entry) => filter(file, entry) && mapHas((0, strip_trailing_slashes_js_1.stripTrailingSlashes)(file))
-            : file => mapHas((0, strip_trailing_slashes_js_1.stripTrailingSlashes)(file));
-};
-exports.filesFilter = filesFilter;
-const listFileSync = (opt) => {
-    const p = new parse_js_1.Parser(opt);
-    const file = opt.file;
-    let fd;
-    try {
-        const stat = node_fs_1.default.statSync(file);
-        const readSize = opt.maxReadSize || 16 * 1024 * 1024;
-        if (stat.size < readSize) {
-            p.end(node_fs_1.default.readFileSync(file));
-        }
-        else {
-            let pos = 0;
-            const buf = Buffer.allocUnsafe(readSize);
-            fd = node_fs_1.default.openSync(file, 'r');
-            while (pos < stat.size) {
-                const bytesRead = node_fs_1.default.readSync(fd, buf, 0, readSize, pos);
-                pos += bytesRead;
-                p.write(buf.subarray(0, bytesRead));
-            }
-            p.end();
-        }
-    }
-    finally {
-        if (typeof fd === 'number') {
-            try {
-                node_fs_1.default.closeSync(fd);
-                /* c8 ignore next */
-            }
-            catch (er) { }
-        }
-    }
-};
-const listFile = (opt, _files) => {
-    const parse = new parse_js_1.Parser(opt);
-    const readSize = opt.maxReadSize || 16 * 1024 * 1024;
-    const file = opt.file;
-    const p = new Promise((resolve, reject) => {
-        parse.on('error', reject);
-        parse.on('end', resolve);
-        node_fs_1.default.stat(file, (er, stat) => {
-            if (er) {
-                reject(er);
-            }
-            else {
-                const stream = new fsm.ReadStream(file, {
-                    readSize: readSize,
-                    size: stat.size,
-                });
-                stream.on('error', reject);
-                stream.pipe(parse);
-            }
-        });
-    });
-    return p;
-};
-exports.list = (0, make_command_js_1.makeCommand)(listFileSync, listFile, opt => new parse_js_1.Parser(opt), opt => new parse_js_1.Parser(opt), (opt, files) => {
-    if (files?.length)
-        (0, exports.filesFilter)(opt, files);
-    if (!opt.noResume)
-        onReadEntryFunction(opt);
-});
-//# sourceMappingURL=list.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/make-command.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/make-command.js
deleted file mode 100644
index 1814319e78bc6..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/make-command.js
+++ /dev/null
@@ -1,61 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.makeCommand = void 0;
-const options_js_1 = require("./options.js");
-const makeCommand = (syncFile, asyncFile, syncNoFile, asyncNoFile, validate) => {
-    return Object.assign((opt_ = [], entries, cb) => {
-        if (Array.isArray(opt_)) {
-            entries = opt_;
-            opt_ = {};
-        }
-        if (typeof entries === 'function') {
-            cb = entries;
-            entries = undefined;
-        }
-        if (!entries) {
-            entries = [];
-        }
-        else {
-            entries = Array.from(entries);
-        }
-        const opt = (0, options_js_1.dealias)(opt_);
-        validate?.(opt, entries);
-        if ((0, options_js_1.isSyncFile)(opt)) {
-            if (typeof cb === 'function') {
-                throw new TypeError('callback not supported for sync tar functions');
-            }
-            return syncFile(opt, entries);
-        }
-        else if ((0, options_js_1.isAsyncFile)(opt)) {
-            const p = asyncFile(opt, entries);
-            // weirdness to make TS happy
-            const c = cb ? cb : undefined;
-            return c ? p.then(() => c(), c) : p;
-        }
-        else if ((0, options_js_1.isSyncNoFile)(opt)) {
-            if (typeof cb === 'function') {
-                throw new TypeError('callback not supported for sync tar functions');
-            }
-            return syncNoFile(opt, entries);
-        }
-        else if ((0, options_js_1.isAsyncNoFile)(opt)) {
-            if (typeof cb === 'function') {
-                throw new TypeError('callback only supported with file option');
-            }
-            return asyncNoFile(opt, entries);
-            /* c8 ignore start */
-        }
-        else {
-            throw new Error('impossible options??');
-        }
-        /* c8 ignore stop */
-    }, {
-        syncFile,
-        asyncFile,
-        syncNoFile,
-        asyncNoFile,
-        validate,
-    });
-};
-exports.makeCommand = makeCommand;
-//# sourceMappingURL=make-command.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/mkdir.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/mkdir.js
deleted file mode 100644
index 2b13ecbab6723..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/mkdir.js
+++ /dev/null
@@ -1,209 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.mkdirSync = exports.mkdir = void 0;
-const chownr_1 = require("chownr");
-const fs_1 = __importDefault(require("fs"));
-const mkdirp_1 = require("mkdirp");
-const node_path_1 = __importDefault(require("node:path"));
-const cwd_error_js_1 = require("./cwd-error.js");
-const normalize_windows_path_js_1 = require("./normalize-windows-path.js");
-const symlink_error_js_1 = require("./symlink-error.js");
-const cGet = (cache, key) => cache.get((0, normalize_windows_path_js_1.normalizeWindowsPath)(key));
-const cSet = (cache, key, val) => cache.set((0, normalize_windows_path_js_1.normalizeWindowsPath)(key), val);
-const checkCwd = (dir, cb) => {
-    fs_1.default.stat(dir, (er, st) => {
-        if (er || !st.isDirectory()) {
-            er = new cwd_error_js_1.CwdError(dir, er?.code || 'ENOTDIR');
-        }
-        cb(er);
-    });
-};
-/**
- * Wrapper around mkdirp for tar's needs.
- *
- * The main purpose is to avoid creating directories if we know that
- * they already exist (and track which ones exist for this purpose),
- * and prevent entries from being extracted into symlinked folders,
- * if `preservePaths` is not set.
- */
-const mkdir = (dir, opt, cb) => {
-    dir = (0, normalize_windows_path_js_1.normalizeWindowsPath)(dir);
-    // if there's any overlap between mask and mode,
-    // then we'll need an explicit chmod
-    /* c8 ignore next */
-    const umask = opt.umask ?? 0o22;
-    const mode = opt.mode | 0o0700;
-    const needChmod = (mode & umask) !== 0;
-    const uid = opt.uid;
-    const gid = opt.gid;
-    const doChown = typeof uid === 'number' &&
-        typeof gid === 'number' &&
-        (uid !== opt.processUid || gid !== opt.processGid);
-    const preserve = opt.preserve;
-    const unlink = opt.unlink;
-    const cache = opt.cache;
-    const cwd = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.cwd);
-    const done = (er, created) => {
-        if (er) {
-            cb(er);
-        }
-        else {
-            cSet(cache, dir, true);
-            if (created && doChown) {
-                (0, chownr_1.chownr)(created, uid, gid, er => done(er));
-            }
-            else if (needChmod) {
-                fs_1.default.chmod(dir, mode, cb);
-            }
-            else {
-                cb();
-            }
-        }
-    };
-    if (cache && cGet(cache, dir) === true) {
-        return done();
-    }
-    if (dir === cwd) {
-        return checkCwd(dir, done);
-    }
-    if (preserve) {
-        return (0, mkdirp_1.mkdirp)(dir, { mode }).then(made => done(null, made ?? undefined), // oh, ts
-        done);
-    }
-    const sub = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.relative(cwd, dir));
-    const parts = sub.split('/');
-    mkdir_(cwd, parts, mode, cache, unlink, cwd, undefined, done);
-};
-exports.mkdir = mkdir;
-const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => {
-    if (!parts.length) {
-        return cb(null, created);
-    }
-    const p = parts.shift();
-    const part = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(base + '/' + p));
-    if (cGet(cache, part)) {
-        return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
-    }
-    fs_1.default.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb));
-};
-const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => (er) => {
-    if (er) {
-        fs_1.default.lstat(part, (statEr, st) => {
-            if (statEr) {
-                statEr.path =
-                    statEr.path && (0, normalize_windows_path_js_1.normalizeWindowsPath)(statEr.path);
-                cb(statEr);
-            }
-            else if (st.isDirectory()) {
-                mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
-            }
-            else if (unlink) {
-                fs_1.default.unlink(part, er => {
-                    if (er) {
-                        return cb(er);
-                    }
-                    fs_1.default.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb));
-                });
-            }
-            else if (st.isSymbolicLink()) {
-                return cb(new symlink_error_js_1.SymlinkError(part, part + '/' + parts.join('/')));
-            }
-            else {
-                cb(er);
-            }
-        });
-    }
-    else {
-        created = created || part;
-        mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
-    }
-};
-const checkCwdSync = (dir) => {
-    let ok = false;
-    let code = undefined;
-    try {
-        ok = fs_1.default.statSync(dir).isDirectory();
-    }
-    catch (er) {
-        code = er?.code;
-    }
-    finally {
-        if (!ok) {
-            throw new cwd_error_js_1.CwdError(dir, code ?? 'ENOTDIR');
-        }
-    }
-};
-const mkdirSync = (dir, opt) => {
-    dir = (0, normalize_windows_path_js_1.normalizeWindowsPath)(dir);
-    // if there's any overlap between mask and mode,
-    // then we'll need an explicit chmod
-    /* c8 ignore next */
-    const umask = opt.umask ?? 0o22;
-    const mode = opt.mode | 0o700;
-    const needChmod = (mode & umask) !== 0;
-    const uid = opt.uid;
-    const gid = opt.gid;
-    const doChown = typeof uid === 'number' &&
-        typeof gid === 'number' &&
-        (uid !== opt.processUid || gid !== opt.processGid);
-    const preserve = opt.preserve;
-    const unlink = opt.unlink;
-    const cache = opt.cache;
-    const cwd = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.cwd);
-    const done = (created) => {
-        cSet(cache, dir, true);
-        if (created && doChown) {
-            (0, chownr_1.chownrSync)(created, uid, gid);
-        }
-        if (needChmod) {
-            fs_1.default.chmodSync(dir, mode);
-        }
-    };
-    if (cache && cGet(cache, dir) === true) {
-        return done();
-    }
-    if (dir === cwd) {
-        checkCwdSync(cwd);
-        return done();
-    }
-    if (preserve) {
-        return done((0, mkdirp_1.mkdirpSync)(dir, mode) ?? undefined);
-    }
-    const sub = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.relative(cwd, dir));
-    const parts = sub.split('/');
-    let created = undefined;
-    for (let p = parts.shift(), part = cwd; p && (part += '/' + p); p = parts.shift()) {
-        part = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(part));
-        if (cGet(cache, part)) {
-            continue;
-        }
-        try {
-            fs_1.default.mkdirSync(part, mode);
-            created = created || part;
-            cSet(cache, part, true);
-        }
-        catch (er) {
-            const st = fs_1.default.lstatSync(part);
-            if (st.isDirectory()) {
-                cSet(cache, part, true);
-                continue;
-            }
-            else if (unlink) {
-                fs_1.default.unlinkSync(part);
-                fs_1.default.mkdirSync(part, mode);
-                created = created || part;
-                cSet(cache, part, true);
-                continue;
-            }
-            else if (st.isSymbolicLink()) {
-                return new symlink_error_js_1.SymlinkError(part, part + '/' + parts.join('/'));
-            }
-        }
-    }
-    return done(created);
-};
-exports.mkdirSync = mkdirSync;
-//# sourceMappingURL=mkdir.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/mode-fix.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/mode-fix.js
deleted file mode 100644
index 49dd727961d29..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/mode-fix.js
+++ /dev/null
@@ -1,29 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.modeFix = void 0;
-const modeFix = (mode, isDir, portable) => {
-    mode &= 0o7777;
-    // in portable mode, use the minimum reasonable umask
-    // if this system creates files with 0o664 by default
-    // (as some linux distros do), then we'll write the
-    // archive with 0o644 instead.  Also, don't ever create
-    // a file that is not readable/writable by the owner.
-    if (portable) {
-        mode = (mode | 0o600) & ~0o22;
-    }
-    // if dirs are readable, then they should be listable
-    if (isDir) {
-        if (mode & 0o400) {
-            mode |= 0o100;
-        }
-        if (mode & 0o40) {
-            mode |= 0o10;
-        }
-        if (mode & 0o4) {
-            mode |= 0o1;
-        }
-    }
-    return mode;
-};
-exports.modeFix = modeFix;
-//# sourceMappingURL=mode-fix.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/normalize-unicode.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/normalize-unicode.js
deleted file mode 100644
index 2f08ce46d98c4..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/normalize-unicode.js
+++ /dev/null
@@ -1,17 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.normalizeUnicode = void 0;
-// warning: extremely hot code path.
-// This has been meticulously optimized for use
-// within npm install on large package trees.
-// Do not edit without careful benchmarking.
-const normalizeCache = Object.create(null);
-const { hasOwnProperty } = Object.prototype;
-const normalizeUnicode = (s) => {
-    if (!hasOwnProperty.call(normalizeCache, s)) {
-        normalizeCache[s] = s.normalize('NFD');
-    }
-    return normalizeCache[s];
-};
-exports.normalizeUnicode = normalizeUnicode;
-//# sourceMappingURL=normalize-unicode.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/normalize-windows-path.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/normalize-windows-path.js
deleted file mode 100644
index b0c7aaa9f2d17..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/normalize-windows-path.js
+++ /dev/null
@@ -1,12 +0,0 @@
-"use strict";
-// on windows, either \ or / are valid directory separators.
-// on unix, \ is a valid character in filenames.
-// so, on windows, and only on windows, we replace all \ chars with /,
-// so that we can use / as our one and only directory separator char.
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.normalizeWindowsPath = void 0;
-const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform;
-exports.normalizeWindowsPath = platform !== 'win32' ?
-    (p) => p
-    : (p) => p && p.replace(/\\/g, '/');
-//# sourceMappingURL=normalize-windows-path.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/options.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/options.js
deleted file mode 100644
index 4cd06505bc72b..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/options.js
+++ /dev/null
@@ -1,66 +0,0 @@
-"use strict";
-// turn tar(1) style args like `C` into the more verbose things like `cwd`
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.dealias = exports.isNoFile = exports.isFile = exports.isAsync = exports.isSync = exports.isAsyncNoFile = exports.isSyncNoFile = exports.isAsyncFile = exports.isSyncFile = void 0;
-const argmap = new Map([
-    ['C', 'cwd'],
-    ['f', 'file'],
-    ['z', 'gzip'],
-    ['P', 'preservePaths'],
-    ['U', 'unlink'],
-    ['strip-components', 'strip'],
-    ['stripComponents', 'strip'],
-    ['keep-newer', 'newer'],
-    ['keepNewer', 'newer'],
-    ['keep-newer-files', 'newer'],
-    ['keepNewerFiles', 'newer'],
-    ['k', 'keep'],
-    ['keep-existing', 'keep'],
-    ['keepExisting', 'keep'],
-    ['m', 'noMtime'],
-    ['no-mtime', 'noMtime'],
-    ['p', 'preserveOwner'],
-    ['L', 'follow'],
-    ['h', 'follow'],
-    ['onentry', 'onReadEntry'],
-]);
-const isSyncFile = (o) => !!o.sync && !!o.file;
-exports.isSyncFile = isSyncFile;
-const isAsyncFile = (o) => !o.sync && !!o.file;
-exports.isAsyncFile = isAsyncFile;
-const isSyncNoFile = (o) => !!o.sync && !o.file;
-exports.isSyncNoFile = isSyncNoFile;
-const isAsyncNoFile = (o) => !o.sync && !o.file;
-exports.isAsyncNoFile = isAsyncNoFile;
-const isSync = (o) => !!o.sync;
-exports.isSync = isSync;
-const isAsync = (o) => !o.sync;
-exports.isAsync = isAsync;
-const isFile = (o) => !!o.file;
-exports.isFile = isFile;
-const isNoFile = (o) => !o.file;
-exports.isNoFile = isNoFile;
-const dealiasKey = (k) => {
-    const d = argmap.get(k);
-    if (d)
-        return d;
-    return k;
-};
-const dealias = (opt = {}) => {
-    if (!opt)
-        return {};
-    const result = {};
-    for (const [key, v] of Object.entries(opt)) {
-        // TS doesn't know that aliases are going to always be the same type
-        const k = dealiasKey(key);
-        result[k] = v;
-    }
-    // affordance for deprecated noChmod -> chmod
-    if (result.chmod === undefined && result.noChmod === false) {
-        result.chmod = true;
-    }
-    delete result.noChmod;
-    return result;
-};
-exports.dealias = dealias;
-//# sourceMappingURL=options.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/pack.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/pack.js
deleted file mode 100644
index 303e93063c2db..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/pack.js
+++ /dev/null
@@ -1,477 +0,0 @@
-"use strict";
-// A readable tar stream creator
-// Technically, this is a transform stream that you write paths into,
-// and tar format comes out of.
-// The `add()` method is like `write()` but returns this,
-// and end() return `this` as well, so you can
-// do `new Pack(opt).add('files').add('dir').end().pipe(output)
-// You could also do something like:
-// streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar'))
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.PackSync = exports.Pack = exports.PackJob = void 0;
-const fs_1 = __importDefault(require("fs"));
-const write_entry_js_1 = require("./write-entry.js");
-class PackJob {
-    path;
-    absolute;
-    entry;
-    stat;
-    readdir;
-    pending = false;
-    ignore = false;
-    piped = false;
-    constructor(path, absolute) {
-        this.path = path || './';
-        this.absolute = absolute;
-    }
-}
-exports.PackJob = PackJob;
-const minipass_1 = require("minipass");
-const zlib = __importStar(require("minizlib"));
-const yallist_1 = require("yallist");
-const read_entry_js_1 = require("./read-entry.js");
-const warn_method_js_1 = require("./warn-method.js");
-const EOF = Buffer.alloc(1024);
-const ONSTAT = Symbol('onStat');
-const ENDED = Symbol('ended');
-const QUEUE = Symbol('queue');
-const CURRENT = Symbol('current');
-const PROCESS = Symbol('process');
-const PROCESSING = Symbol('processing');
-const PROCESSJOB = Symbol('processJob');
-const JOBS = Symbol('jobs');
-const JOBDONE = Symbol('jobDone');
-const ADDFSENTRY = Symbol('addFSEntry');
-const ADDTARENTRY = Symbol('addTarEntry');
-const STAT = Symbol('stat');
-const READDIR = Symbol('readdir');
-const ONREADDIR = Symbol('onreaddir');
-const PIPE = Symbol('pipe');
-const ENTRY = Symbol('entry');
-const ENTRYOPT = Symbol('entryOpt');
-const WRITEENTRYCLASS = Symbol('writeEntryClass');
-const WRITE = Symbol('write');
-const ONDRAIN = Symbol('ondrain');
-const path_1 = __importDefault(require("path"));
-const normalize_windows_path_js_1 = require("./normalize-windows-path.js");
-class Pack extends minipass_1.Minipass {
-    opt;
-    cwd;
-    maxReadSize;
-    preservePaths;
-    strict;
-    noPax;
-    prefix;
-    linkCache;
-    statCache;
-    file;
-    portable;
-    zip;
-    readdirCache;
-    noDirRecurse;
-    follow;
-    noMtime;
-    mtime;
-    filter;
-    jobs;
-    [WRITEENTRYCLASS];
-    onWriteEntry;
-    [QUEUE];
-    [JOBS] = 0;
-    [PROCESSING] = false;
-    [ENDED] = false;
-    constructor(opt = {}) {
-        //@ts-ignore
-        super();
-        this.opt = opt;
-        this.file = opt.file || '';
-        this.cwd = opt.cwd || process.cwd();
-        this.maxReadSize = opt.maxReadSize;
-        this.preservePaths = !!opt.preservePaths;
-        this.strict = !!opt.strict;
-        this.noPax = !!opt.noPax;
-        this.prefix = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.prefix || '');
-        this.linkCache = opt.linkCache || new Map();
-        this.statCache = opt.statCache || new Map();
-        this.readdirCache = opt.readdirCache || new Map();
-        this.onWriteEntry = opt.onWriteEntry;
-        this[WRITEENTRYCLASS] = write_entry_js_1.WriteEntry;
-        if (typeof opt.onwarn === 'function') {
-            this.on('warn', opt.onwarn);
-        }
-        this.portable = !!opt.portable;
-        if (opt.gzip || opt.brotli) {
-            if (opt.gzip && opt.brotli) {
-                throw new TypeError('gzip and brotli are mutually exclusive');
-            }
-            if (opt.gzip) {
-                if (typeof opt.gzip !== 'object') {
-                    opt.gzip = {};
-                }
-                if (this.portable) {
-                    opt.gzip.portable = true;
-                }
-                this.zip = new zlib.Gzip(opt.gzip);
-            }
-            if (opt.brotli) {
-                if (typeof opt.brotli !== 'object') {
-                    opt.brotli = {};
-                }
-                this.zip = new zlib.BrotliCompress(opt.brotli);
-            }
-            /* c8 ignore next */
-            if (!this.zip)
-                throw new Error('impossible');
-            const zip = this.zip;
-            zip.on('data', chunk => super.write(chunk));
-            zip.on('end', () => super.end());
-            zip.on('drain', () => this[ONDRAIN]());
-            this.on('resume', () => zip.resume());
-        }
-        else {
-            this.on('drain', this[ONDRAIN]);
-        }
-        this.noDirRecurse = !!opt.noDirRecurse;
-        this.follow = !!opt.follow;
-        this.noMtime = !!opt.noMtime;
-        if (opt.mtime)
-            this.mtime = opt.mtime;
-        this.filter =
-            typeof opt.filter === 'function' ? opt.filter : () => true;
-        this[QUEUE] = new yallist_1.Yallist();
-        this[JOBS] = 0;
-        this.jobs = Number(opt.jobs) || 4;
-        this[PROCESSING] = false;
-        this[ENDED] = false;
-    }
-    [WRITE](chunk) {
-        return super.write(chunk);
-    }
-    add(path) {
-        this.write(path);
-        return this;
-    }
-    end(path, encoding, cb) {
-        /* c8 ignore start */
-        if (typeof path === 'function') {
-            cb = path;
-            path = undefined;
-        }
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        /* c8 ignore stop */
-        if (path) {
-            this.add(path);
-        }
-        this[ENDED] = true;
-        this[PROCESS]();
-        /* c8 ignore next */
-        if (cb)
-            cb();
-        return this;
-    }
-    write(path) {
-        if (this[ENDED]) {
-            throw new Error('write after end');
-        }
-        if (path instanceof read_entry_js_1.ReadEntry) {
-            this[ADDTARENTRY](path);
-        }
-        else {
-            this[ADDFSENTRY](path);
-        }
-        return this.flowing;
-    }
-    [ADDTARENTRY](p) {
-        const absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(path_1.default.resolve(this.cwd, p.path));
-        // in this case, we don't have to wait for the stat
-        if (!this.filter(p.path, p)) {
-            p.resume();
-        }
-        else {
-            const job = new PackJob(p.path, absolute);
-            job.entry = new write_entry_js_1.WriteEntryTar(p, this[ENTRYOPT](job));
-            job.entry.on('end', () => this[JOBDONE](job));
-            this[JOBS] += 1;
-            this[QUEUE].push(job);
-        }
-        this[PROCESS]();
-    }
-    [ADDFSENTRY](p) {
-        const absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(path_1.default.resolve(this.cwd, p));
-        this[QUEUE].push(new PackJob(p, absolute));
-        this[PROCESS]();
-    }
-    [STAT](job) {
-        job.pending = true;
-        this[JOBS] += 1;
-        const stat = this.follow ? 'stat' : 'lstat';
-        fs_1.default[stat](job.absolute, (er, stat) => {
-            job.pending = false;
-            this[JOBS] -= 1;
-            if (er) {
-                this.emit('error', er);
-            }
-            else {
-                this[ONSTAT](job, stat);
-            }
-        });
-    }
-    [ONSTAT](job, stat) {
-        this.statCache.set(job.absolute, stat);
-        job.stat = stat;
-        // now we have the stat, we can filter it.
-        if (!this.filter(job.path, stat)) {
-            job.ignore = true;
-        }
-        this[PROCESS]();
-    }
-    [READDIR](job) {
-        job.pending = true;
-        this[JOBS] += 1;
-        fs_1.default.readdir(job.absolute, (er, entries) => {
-            job.pending = false;
-            this[JOBS] -= 1;
-            if (er) {
-                return this.emit('error', er);
-            }
-            this[ONREADDIR](job, entries);
-        });
-    }
-    [ONREADDIR](job, entries) {
-        this.readdirCache.set(job.absolute, entries);
-        job.readdir = entries;
-        this[PROCESS]();
-    }
-    [PROCESS]() {
-        if (this[PROCESSING]) {
-            return;
-        }
-        this[PROCESSING] = true;
-        for (let w = this[QUEUE].head; !!w && this[JOBS] < this.jobs; w = w.next) {
-            this[PROCESSJOB](w.value);
-            if (w.value.ignore) {
-                const p = w.next;
-                this[QUEUE].removeNode(w);
-                w.next = p;
-            }
-        }
-        this[PROCESSING] = false;
-        if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) {
-            if (this.zip) {
-                this.zip.end(EOF);
-            }
-            else {
-                super.write(EOF);
-                super.end();
-            }
-        }
-    }
-    get [CURRENT]() {
-        return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value;
-    }
-    [JOBDONE](_job) {
-        this[QUEUE].shift();
-        this[JOBS] -= 1;
-        this[PROCESS]();
-    }
-    [PROCESSJOB](job) {
-        if (job.pending) {
-            return;
-        }
-        if (job.entry) {
-            if (job === this[CURRENT] && !job.piped) {
-                this[PIPE](job);
-            }
-            return;
-        }
-        if (!job.stat) {
-            const sc = this.statCache.get(job.absolute);
-            if (sc) {
-                this[ONSTAT](job, sc);
-            }
-            else {
-                this[STAT](job);
-            }
-        }
-        if (!job.stat) {
-            return;
-        }
-        // filtered out!
-        if (job.ignore) {
-            return;
-        }
-        if (!this.noDirRecurse &&
-            job.stat.isDirectory() &&
-            !job.readdir) {
-            const rc = this.readdirCache.get(job.absolute);
-            if (rc) {
-                this[ONREADDIR](job, rc);
-            }
-            else {
-                this[READDIR](job);
-            }
-            if (!job.readdir) {
-                return;
-            }
-        }
-        // we know it doesn't have an entry, because that got checked above
-        job.entry = this[ENTRY](job);
-        if (!job.entry) {
-            job.ignore = true;
-            return;
-        }
-        if (job === this[CURRENT] && !job.piped) {
-            this[PIPE](job);
-        }
-    }
-    [ENTRYOPT](job) {
-        return {
-            onwarn: (code, msg, data) => this.warn(code, msg, data),
-            noPax: this.noPax,
-            cwd: this.cwd,
-            absolute: job.absolute,
-            preservePaths: this.preservePaths,
-            maxReadSize: this.maxReadSize,
-            strict: this.strict,
-            portable: this.portable,
-            linkCache: this.linkCache,
-            statCache: this.statCache,
-            noMtime: this.noMtime,
-            mtime: this.mtime,
-            prefix: this.prefix,
-            onWriteEntry: this.onWriteEntry,
-        };
-    }
-    [ENTRY](job) {
-        this[JOBS] += 1;
-        try {
-            const e = new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job));
-            return e
-                .on('end', () => this[JOBDONE](job))
-                .on('error', er => this.emit('error', er));
-        }
-        catch (er) {
-            this.emit('error', er);
-        }
-    }
-    [ONDRAIN]() {
-        if (this[CURRENT] && this[CURRENT].entry) {
-            this[CURRENT].entry.resume();
-        }
-    }
-    // like .pipe() but using super, because our write() is special
-    [PIPE](job) {
-        job.piped = true;
-        if (job.readdir) {
-            job.readdir.forEach(entry => {
-                const p = job.path;
-                const base = p === './' ? '' : p.replace(/\/*$/, '/');
-                this[ADDFSENTRY](base + entry);
-            });
-        }
-        const source = job.entry;
-        const zip = this.zip;
-        /* c8 ignore start */
-        if (!source)
-            throw new Error('cannot pipe without source');
-        /* c8 ignore stop */
-        if (zip) {
-            source.on('data', chunk => {
-                if (!zip.write(chunk)) {
-                    source.pause();
-                }
-            });
-        }
-        else {
-            source.on('data', chunk => {
-                if (!super.write(chunk)) {
-                    source.pause();
-                }
-            });
-        }
-    }
-    pause() {
-        if (this.zip) {
-            this.zip.pause();
-        }
-        return super.pause();
-    }
-    warn(code, message, data = {}) {
-        (0, warn_method_js_1.warnMethod)(this, code, message, data);
-    }
-}
-exports.Pack = Pack;
-class PackSync extends Pack {
-    sync = true;
-    constructor(opt) {
-        super(opt);
-        this[WRITEENTRYCLASS] = write_entry_js_1.WriteEntrySync;
-    }
-    // pause/resume are no-ops in sync streams.
-    pause() { }
-    resume() { }
-    [STAT](job) {
-        const stat = this.follow ? 'statSync' : 'lstatSync';
-        this[ONSTAT](job, fs_1.default[stat](job.absolute));
-    }
-    [READDIR](job) {
-        this[ONREADDIR](job, fs_1.default.readdirSync(job.absolute));
-    }
-    // gotta get it all in this tick
-    [PIPE](job) {
-        const source = job.entry;
-        const zip = this.zip;
-        if (job.readdir) {
-            job.readdir.forEach(entry => {
-                const p = job.path;
-                const base = p === './' ? '' : p.replace(/\/*$/, '/');
-                this[ADDFSENTRY](base + entry);
-            });
-        }
-        /* c8 ignore start */
-        if (!source)
-            throw new Error('Cannot pipe without source');
-        /* c8 ignore stop */
-        if (zip) {
-            source.on('data', chunk => {
-                zip.write(chunk);
-            });
-        }
-        else {
-            source.on('data', chunk => {
-                super[WRITE](chunk);
-            });
-        }
-    }
-}
-exports.PackSync = PackSync;
-//# sourceMappingURL=pack.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/package.json
deleted file mode 100644
index 5bbefffbabee3..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "commonjs"
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/parse.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/parse.js
deleted file mode 100644
index 9746a25899e6e..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/parse.js
+++ /dev/null
@@ -1,599 +0,0 @@
-"use strict";
-// this[BUFFER] is the remainder of a chunk if we're waiting for
-// the full 512 bytes of a header to come in.  We will Buffer.concat()
-// it to the next write(), which is a mem copy, but a small one.
-//
-// this[QUEUE] is a Yallist of entries that haven't been emitted
-// yet this can only get filled up if the user keeps write()ing after
-// a write() returns false, or does a write() with more than one entry
-//
-// We don't buffer chunks, we always parse them and either create an
-// entry, or push it into the active entry.  The ReadEntry class knows
-// to throw data away if .ignore=true
-//
-// Shift entry off the buffer when it emits 'end', and emit 'entry' for
-// the next one in the list.
-//
-// At any time, we're pushing body chunks into the entry at WRITEENTRY,
-// and waiting for 'end' on the entry at READENTRY
-//
-// ignored entries get .resume() called on them straight away
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Parser = void 0;
-const events_1 = require("events");
-const minizlib_1 = require("minizlib");
-const yallist_1 = require("yallist");
-const header_js_1 = require("./header.js");
-const pax_js_1 = require("./pax.js");
-const read_entry_js_1 = require("./read-entry.js");
-const warn_method_js_1 = require("./warn-method.js");
-const maxMetaEntrySize = 1024 * 1024;
-const gzipHeader = Buffer.from([0x1f, 0x8b]);
-const STATE = Symbol('state');
-const WRITEENTRY = Symbol('writeEntry');
-const READENTRY = Symbol('readEntry');
-const NEXTENTRY = Symbol('nextEntry');
-const PROCESSENTRY = Symbol('processEntry');
-const EX = Symbol('extendedHeader');
-const GEX = Symbol('globalExtendedHeader');
-const META = Symbol('meta');
-const EMITMETA = Symbol('emitMeta');
-const BUFFER = Symbol('buffer');
-const QUEUE = Symbol('queue');
-const ENDED = Symbol('ended');
-const EMITTEDEND = Symbol('emittedEnd');
-const EMIT = Symbol('emit');
-const UNZIP = Symbol('unzip');
-const CONSUMECHUNK = Symbol('consumeChunk');
-const CONSUMECHUNKSUB = Symbol('consumeChunkSub');
-const CONSUMEBODY = Symbol('consumeBody');
-const CONSUMEMETA = Symbol('consumeMeta');
-const CONSUMEHEADER = Symbol('consumeHeader');
-const CONSUMING = Symbol('consuming');
-const BUFFERCONCAT = Symbol('bufferConcat');
-const MAYBEEND = Symbol('maybeEnd');
-const WRITING = Symbol('writing');
-const ABORTED = Symbol('aborted');
-const DONE = Symbol('onDone');
-const SAW_VALID_ENTRY = Symbol('sawValidEntry');
-const SAW_NULL_BLOCK = Symbol('sawNullBlock');
-const SAW_EOF = Symbol('sawEOF');
-const CLOSESTREAM = Symbol('closeStream');
-const noop = () => true;
-class Parser extends events_1.EventEmitter {
-    file;
-    strict;
-    maxMetaEntrySize;
-    filter;
-    brotli;
-    writable = true;
-    readable = false;
-    [QUEUE] = new yallist_1.Yallist();
-    [BUFFER];
-    [READENTRY];
-    [WRITEENTRY];
-    [STATE] = 'begin';
-    [META] = '';
-    [EX];
-    [GEX];
-    [ENDED] = false;
-    [UNZIP];
-    [ABORTED] = false;
-    [SAW_VALID_ENTRY];
-    [SAW_NULL_BLOCK] = false;
-    [SAW_EOF] = false;
-    [WRITING] = false;
-    [CONSUMING] = false;
-    [EMITTEDEND] = false;
-    constructor(opt = {}) {
-        super();
-        this.file = opt.file || '';
-        // these BADARCHIVE errors can't be detected early. listen on DONE.
-        this.on(DONE, () => {
-            if (this[STATE] === 'begin' ||
-                this[SAW_VALID_ENTRY] === false) {
-                // either less than 1 block of data, or all entries were invalid.
-                // Either way, probably not even a tarball.
-                this.warn('TAR_BAD_ARCHIVE', 'Unrecognized archive format');
-            }
-        });
-        if (opt.ondone) {
-            this.on(DONE, opt.ondone);
-        }
-        else {
-            this.on(DONE, () => {
-                this.emit('prefinish');
-                this.emit('finish');
-                this.emit('end');
-            });
-        }
-        this.strict = !!opt.strict;
-        this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize;
-        this.filter = typeof opt.filter === 'function' ? opt.filter : noop;
-        // Unlike gzip, brotli doesn't have any magic bytes to identify it
-        // Users need to explicitly tell us they're extracting a brotli file
-        // Or we infer from the file extension
-        const isTBR = opt.file &&
-            (opt.file.endsWith('.tar.br') || opt.file.endsWith('.tbr'));
-        // if it's a tbr file it MIGHT be brotli, but we don't know until
-        // we look at it and verify it's not a valid tar file.
-        this.brotli =
-            !opt.gzip && opt.brotli !== undefined ? opt.brotli
-                : isTBR ? undefined
-                    : false;
-        // have to set this so that streams are ok piping into it
-        this.on('end', () => this[CLOSESTREAM]());
-        if (typeof opt.onwarn === 'function') {
-            this.on('warn', opt.onwarn);
-        }
-        if (typeof opt.onReadEntry === 'function') {
-            this.on('entry', opt.onReadEntry);
-        }
-    }
-    warn(code, message, data = {}) {
-        (0, warn_method_js_1.warnMethod)(this, code, message, data);
-    }
-    [CONSUMEHEADER](chunk, position) {
-        if (this[SAW_VALID_ENTRY] === undefined) {
-            this[SAW_VALID_ENTRY] = false;
-        }
-        let header;
-        try {
-            header = new header_js_1.Header(chunk, position, this[EX], this[GEX]);
-        }
-        catch (er) {
-            return this.warn('TAR_ENTRY_INVALID', er);
-        }
-        if (header.nullBlock) {
-            if (this[SAW_NULL_BLOCK]) {
-                this[SAW_EOF] = true;
-                // ending an archive with no entries.  pointless, but legal.
-                if (this[STATE] === 'begin') {
-                    this[STATE] = 'header';
-                }
-                this[EMIT]('eof');
-            }
-            else {
-                this[SAW_NULL_BLOCK] = true;
-                this[EMIT]('nullBlock');
-            }
-        }
-        else {
-            this[SAW_NULL_BLOCK] = false;
-            if (!header.cksumValid) {
-                this.warn('TAR_ENTRY_INVALID', 'checksum failure', { header });
-            }
-            else if (!header.path) {
-                this.warn('TAR_ENTRY_INVALID', 'path is required', { header });
-            }
-            else {
-                const type = header.type;
-                if (/^(Symbolic)?Link$/.test(type) && !header.linkpath) {
-                    this.warn('TAR_ENTRY_INVALID', 'linkpath required', {
-                        header,
-                    });
-                }
-                else if (!/^(Symbolic)?Link$/.test(type) &&
-                    !/^(Global)?ExtendedHeader$/.test(type) &&
-                    header.linkpath) {
-                    this.warn('TAR_ENTRY_INVALID', 'linkpath forbidden', {
-                        header,
-                    });
-                }
-                else {
-                    const entry = (this[WRITEENTRY] = new read_entry_js_1.ReadEntry(header, this[EX], this[GEX]));
-                    // we do this for meta & ignored entries as well, because they
-                    // are still valid tar, or else we wouldn't know to ignore them
-                    if (!this[SAW_VALID_ENTRY]) {
-                        if (entry.remain) {
-                            // this might be the one!
-                            const onend = () => {
-                                if (!entry.invalid) {
-                                    this[SAW_VALID_ENTRY] = true;
-                                }
-                            };
-                            entry.on('end', onend);
-                        }
-                        else {
-                            this[SAW_VALID_ENTRY] = true;
-                        }
-                    }
-                    if (entry.meta) {
-                        if (entry.size > this.maxMetaEntrySize) {
-                            entry.ignore = true;
-                            this[EMIT]('ignoredEntry', entry);
-                            this[STATE] = 'ignore';
-                            entry.resume();
-                        }
-                        else if (entry.size > 0) {
-                            this[META] = '';
-                            entry.on('data', c => (this[META] += c));
-                            this[STATE] = 'meta';
-                        }
-                    }
-                    else {
-                        this[EX] = undefined;
-                        entry.ignore =
-                            entry.ignore || !this.filter(entry.path, entry);
-                        if (entry.ignore) {
-                            // probably valid, just not something we care about
-                            this[EMIT]('ignoredEntry', entry);
-                            this[STATE] = entry.remain ? 'ignore' : 'header';
-                            entry.resume();
-                        }
-                        else {
-                            if (entry.remain) {
-                                this[STATE] = 'body';
-                            }
-                            else {
-                                this[STATE] = 'header';
-                                entry.end();
-                            }
-                            if (!this[READENTRY]) {
-                                this[QUEUE].push(entry);
-                                this[NEXTENTRY]();
-                            }
-                            else {
-                                this[QUEUE].push(entry);
-                            }
-                        }
-                    }
-                }
-            }
-        }
-    }
-    [CLOSESTREAM]() {
-        queueMicrotask(() => this.emit('close'));
-    }
-    [PROCESSENTRY](entry) {
-        let go = true;
-        if (!entry) {
-            this[READENTRY] = undefined;
-            go = false;
-        }
-        else if (Array.isArray(entry)) {
-            const [ev, ...args] = entry;
-            this.emit(ev, ...args);
-        }
-        else {
-            this[READENTRY] = entry;
-            this.emit('entry', entry);
-            if (!entry.emittedEnd) {
-                entry.on('end', () => this[NEXTENTRY]());
-                go = false;
-            }
-        }
-        return go;
-    }
-    [NEXTENTRY]() {
-        do { } while (this[PROCESSENTRY](this[QUEUE].shift()));
-        if (!this[QUEUE].length) {
-            // At this point, there's nothing in the queue, but we may have an
-            // entry which is being consumed (readEntry).
-            // If we don't, then we definitely can handle more data.
-            // If we do, and either it's flowing, or it has never had any data
-            // written to it, then it needs more.
-            // The only other possibility is that it has returned false from a
-            // write() call, so we wait for the next drain to continue.
-            const re = this[READENTRY];
-            const drainNow = !re || re.flowing || re.size === re.remain;
-            if (drainNow) {
-                if (!this[WRITING]) {
-                    this.emit('drain');
-                }
-            }
-            else {
-                re.once('drain', () => this.emit('drain'));
-            }
-        }
-    }
-    [CONSUMEBODY](chunk, position) {
-        // write up to but no  more than writeEntry.blockRemain
-        const entry = this[WRITEENTRY];
-        /* c8 ignore start */
-        if (!entry) {
-            throw new Error('attempt to consume body without entry??');
-        }
-        const br = entry.blockRemain ?? 0;
-        /* c8 ignore stop */
-        const c = br >= chunk.length && position === 0 ?
-            chunk
-            : chunk.subarray(position, position + br);
-        entry.write(c);
-        if (!entry.blockRemain) {
-            this[STATE] = 'header';
-            this[WRITEENTRY] = undefined;
-            entry.end();
-        }
-        return c.length;
-    }
-    [CONSUMEMETA](chunk, position) {
-        const entry = this[WRITEENTRY];
-        const ret = this[CONSUMEBODY](chunk, position);
-        // if we finished, then the entry is reset
-        if (!this[WRITEENTRY] && entry) {
-            this[EMITMETA](entry);
-        }
-        return ret;
-    }
-    [EMIT](ev, data, extra) {
-        if (!this[QUEUE].length && !this[READENTRY]) {
-            this.emit(ev, data, extra);
-        }
-        else {
-            this[QUEUE].push([ev, data, extra]);
-        }
-    }
-    [EMITMETA](entry) {
-        this[EMIT]('meta', this[META]);
-        switch (entry.type) {
-            case 'ExtendedHeader':
-            case 'OldExtendedHeader':
-                this[EX] = pax_js_1.Pax.parse(this[META], this[EX], false);
-                break;
-            case 'GlobalExtendedHeader':
-                this[GEX] = pax_js_1.Pax.parse(this[META], this[GEX], true);
-                break;
-            case 'NextFileHasLongPath':
-            case 'OldGnuLongPath': {
-                const ex = this[EX] ?? Object.create(null);
-                this[EX] = ex;
-                ex.path = this[META].replace(/\0.*/, '');
-                break;
-            }
-            case 'NextFileHasLongLinkpath': {
-                const ex = this[EX] || Object.create(null);
-                this[EX] = ex;
-                ex.linkpath = this[META].replace(/\0.*/, '');
-                break;
-            }
-            /* c8 ignore start */
-            default:
-                throw new Error('unknown meta: ' + entry.type);
-            /* c8 ignore stop */
-        }
-    }
-    abort(error) {
-        this[ABORTED] = true;
-        this.emit('abort', error);
-        // always throws, even in non-strict mode
-        this.warn('TAR_ABORT', error, { recoverable: false });
-    }
-    write(chunk, encoding, cb) {
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        if (typeof chunk === 'string') {
-            chunk = Buffer.from(chunk, 
-            /* c8 ignore next */
-            typeof encoding === 'string' ? encoding : 'utf8');
-        }
-        if (this[ABORTED]) {
-            /* c8 ignore next */
-            cb?.();
-            return false;
-        }
-        // first write, might be gzipped
-        const needSniff = this[UNZIP] === undefined ||
-            (this.brotli === undefined && this[UNZIP] === false);
-        if (needSniff && chunk) {
-            if (this[BUFFER]) {
-                chunk = Buffer.concat([this[BUFFER], chunk]);
-                this[BUFFER] = undefined;
-            }
-            if (chunk.length < gzipHeader.length) {
-                this[BUFFER] = chunk;
-                /* c8 ignore next */
-                cb?.();
-                return true;
-            }
-            // look for gzip header
-            for (let i = 0; this[UNZIP] === undefined && i < gzipHeader.length; i++) {
-                if (chunk[i] !== gzipHeader[i]) {
-                    this[UNZIP] = false;
-                }
-            }
-            const maybeBrotli = this.brotli === undefined;
-            if (this[UNZIP] === false && maybeBrotli) {
-                // read the first header to see if it's a valid tar file. If so,
-                // we can safely assume that it's not actually brotli, despite the
-                // .tbr or .tar.br file extension.
-                // if we ended before getting a full chunk, yes, def brotli
-                if (chunk.length < 512) {
-                    if (this[ENDED]) {
-                        this.brotli = true;
-                    }
-                    else {
-                        this[BUFFER] = chunk;
-                        /* c8 ignore next */
-                        cb?.();
-                        return true;
-                    }
-                }
-                else {
-                    // if it's tar, it's pretty reliably not brotli, chances of
-                    // that happening are astronomical.
-                    try {
-                        new header_js_1.Header(chunk.subarray(0, 512));
-                        this.brotli = false;
-                    }
-                    catch (_) {
-                        this.brotli = true;
-                    }
-                }
-            }
-            if (this[UNZIP] === undefined ||
-                (this[UNZIP] === false && this.brotli)) {
-                const ended = this[ENDED];
-                this[ENDED] = false;
-                this[UNZIP] =
-                    this[UNZIP] === undefined ?
-                        new minizlib_1.Unzip({})
-                        : new minizlib_1.BrotliDecompress({});
-                this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk));
-                this[UNZIP].on('error', er => this.abort(er));
-                this[UNZIP].on('end', () => {
-                    this[ENDED] = true;
-                    this[CONSUMECHUNK]();
-                });
-                this[WRITING] = true;
-                const ret = !!this[UNZIP][ended ? 'end' : 'write'](chunk);
-                this[WRITING] = false;
-                cb?.();
-                return ret;
-            }
-        }
-        this[WRITING] = true;
-        if (this[UNZIP]) {
-            this[UNZIP].write(chunk);
-        }
-        else {
-            this[CONSUMECHUNK](chunk);
-        }
-        this[WRITING] = false;
-        // return false if there's a queue, or if the current entry isn't flowing
-        const ret = this[QUEUE].length ? false
-            : this[READENTRY] ? this[READENTRY].flowing
-                : true;
-        // if we have no queue, then that means a clogged READENTRY
-        if (!ret && !this[QUEUE].length) {
-            this[READENTRY]?.once('drain', () => this.emit('drain'));
-        }
-        /* c8 ignore next */
-        cb?.();
-        return ret;
-    }
-    [BUFFERCONCAT](c) {
-        if (c && !this[ABORTED]) {
-            this[BUFFER] =
-                this[BUFFER] ? Buffer.concat([this[BUFFER], c]) : c;
-        }
-    }
-    [MAYBEEND]() {
-        if (this[ENDED] &&
-            !this[EMITTEDEND] &&
-            !this[ABORTED] &&
-            !this[CONSUMING]) {
-            this[EMITTEDEND] = true;
-            const entry = this[WRITEENTRY];
-            if (entry && entry.blockRemain) {
-                // truncated, likely a damaged file
-                const have = this[BUFFER] ? this[BUFFER].length : 0;
-                this.warn('TAR_BAD_ARCHIVE', `Truncated input (needed ${entry.blockRemain} more bytes, only ${have} available)`, { entry });
-                if (this[BUFFER]) {
-                    entry.write(this[BUFFER]);
-                }
-                entry.end();
-            }
-            this[EMIT](DONE);
-        }
-    }
-    [CONSUMECHUNK](chunk) {
-        if (this[CONSUMING] && chunk) {
-            this[BUFFERCONCAT](chunk);
-        }
-        else if (!chunk && !this[BUFFER]) {
-            this[MAYBEEND]();
-        }
-        else if (chunk) {
-            this[CONSUMING] = true;
-            if (this[BUFFER]) {
-                this[BUFFERCONCAT](chunk);
-                const c = this[BUFFER];
-                this[BUFFER] = undefined;
-                this[CONSUMECHUNKSUB](c);
-            }
-            else {
-                this[CONSUMECHUNKSUB](chunk);
-            }
-            while (this[BUFFER] &&
-                this[BUFFER]?.length >= 512 &&
-                !this[ABORTED] &&
-                !this[SAW_EOF]) {
-                const c = this[BUFFER];
-                this[BUFFER] = undefined;
-                this[CONSUMECHUNKSUB](c);
-            }
-            this[CONSUMING] = false;
-        }
-        if (!this[BUFFER] || this[ENDED]) {
-            this[MAYBEEND]();
-        }
-    }
-    [CONSUMECHUNKSUB](chunk) {
-        // we know that we are in CONSUMING mode, so anything written goes into
-        // the buffer.  Advance the position and put any remainder in the buffer.
-        let position = 0;
-        const length = chunk.length;
-        while (position + 512 <= length &&
-            !this[ABORTED] &&
-            !this[SAW_EOF]) {
-            switch (this[STATE]) {
-                case 'begin':
-                case 'header':
-                    this[CONSUMEHEADER](chunk, position);
-                    position += 512;
-                    break;
-                case 'ignore':
-                case 'body':
-                    position += this[CONSUMEBODY](chunk, position);
-                    break;
-                case 'meta':
-                    position += this[CONSUMEMETA](chunk, position);
-                    break;
-                /* c8 ignore start */
-                default:
-                    throw new Error('invalid state: ' + this[STATE]);
-                /* c8 ignore stop */
-            }
-        }
-        if (position < length) {
-            if (this[BUFFER]) {
-                this[BUFFER] = Buffer.concat([
-                    chunk.subarray(position),
-                    this[BUFFER],
-                ]);
-            }
-            else {
-                this[BUFFER] = chunk.subarray(position);
-            }
-        }
-    }
-    end(chunk, encoding, cb) {
-        if (typeof chunk === 'function') {
-            cb = chunk;
-            encoding = undefined;
-            chunk = undefined;
-        }
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        if (typeof chunk === 'string') {
-            chunk = Buffer.from(chunk, encoding);
-        }
-        if (cb)
-            this.once('finish', cb);
-        if (!this[ABORTED]) {
-            if (this[UNZIP]) {
-                /* c8 ignore start */
-                if (chunk)
-                    this[UNZIP].write(chunk);
-                /* c8 ignore stop */
-                this[UNZIP].end();
-            }
-            else {
-                this[ENDED] = true;
-                if (this.brotli === undefined)
-                    chunk = chunk || Buffer.alloc(0);
-                if (chunk)
-                    this.write(chunk);
-                this[MAYBEEND]();
-            }
-        }
-        return this;
-    }
-}
-exports.Parser = Parser;
-//# sourceMappingURL=parse.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/path-reservations.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/path-reservations.js
deleted file mode 100644
index 9ff391c44092c..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/path-reservations.js
+++ /dev/null
@@ -1,170 +0,0 @@
-"use strict";
-// A path exclusive reservation system
-// reserve([list, of, paths], fn)
-// When the fn is first in line for all its paths, it
-// is called with a cb that clears the reservation.
-//
-// Used by async unpack to avoid clobbering paths in use,
-// while still allowing maximal safe parallelization.
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.PathReservations = void 0;
-const node_path_1 = require("node:path");
-const normalize_unicode_js_1 = require("./normalize-unicode.js");
-const strip_trailing_slashes_js_1 = require("./strip-trailing-slashes.js");
-const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform;
-const isWindows = platform === 'win32';
-// return a set of parent dirs for a given path
-// '/a/b/c/d' -> ['/', '/a', '/a/b', '/a/b/c', '/a/b/c/d']
-const getDirs = (path) => {
-    const dirs = path
-        .split('/')
-        .slice(0, -1)
-        .reduce((set, path) => {
-        const s = set[set.length - 1];
-        if (s !== undefined) {
-            path = (0, node_path_1.join)(s, path);
-        }
-        set.push(path || '/');
-        return set;
-    }, []);
-    return dirs;
-};
-class PathReservations {
-    // path => [function or Set]
-    // A Set object means a directory reservation
-    // A fn is a direct reservation on that path
-    #queues = new Map();
-    // fn => {paths:[path,...], dirs:[path, ...]}
-    #reservations = new Map();
-    // functions currently running
-    #running = new Set();
-    reserve(paths, fn) {
-        paths =
-            isWindows ?
-                ['win32 parallelization disabled']
-                : paths.map(p => {
-                    // don't need normPath, because we skip this entirely for windows
-                    return (0, strip_trailing_slashes_js_1.stripTrailingSlashes)((0, node_path_1.join)((0, normalize_unicode_js_1.normalizeUnicode)(p))).toLowerCase();
-                });
-        const dirs = new Set(paths.map(path => getDirs(path)).reduce((a, b) => a.concat(b)));
-        this.#reservations.set(fn, { dirs, paths });
-        for (const p of paths) {
-            const q = this.#queues.get(p);
-            if (!q) {
-                this.#queues.set(p, [fn]);
-            }
-            else {
-                q.push(fn);
-            }
-        }
-        for (const dir of dirs) {
-            const q = this.#queues.get(dir);
-            if (!q) {
-                this.#queues.set(dir, [new Set([fn])]);
-            }
-            else {
-                const l = q[q.length - 1];
-                if (l instanceof Set) {
-                    l.add(fn);
-                }
-                else {
-                    q.push(new Set([fn]));
-                }
-            }
-        }
-        return this.#run(fn);
-    }
-    // return the queues for each path the function cares about
-    // fn => {paths, dirs}
-    #getQueues(fn) {
-        const res = this.#reservations.get(fn);
-        /* c8 ignore start */
-        if (!res) {
-            throw new Error('function does not have any path reservations');
-        }
-        /* c8 ignore stop */
-        return {
-            paths: res.paths.map((path) => this.#queues.get(path)),
-            dirs: [...res.dirs].map(path => this.#queues.get(path)),
-        };
-    }
-    // check if fn is first in line for all its paths, and is
-    // included in the first set for all its dir queues
-    check(fn) {
-        const { paths, dirs } = this.#getQueues(fn);
-        return (paths.every(q => q && q[0] === fn) &&
-            dirs.every(q => q && q[0] instanceof Set && q[0].has(fn)));
-    }
-    // run the function if it's first in line and not already running
-    #run(fn) {
-        if (this.#running.has(fn) || !this.check(fn)) {
-            return false;
-        }
-        this.#running.add(fn);
-        fn(() => this.#clear(fn));
-        return true;
-    }
-    #clear(fn) {
-        if (!this.#running.has(fn)) {
-            return false;
-        }
-        const res = this.#reservations.get(fn);
-        /* c8 ignore start */
-        if (!res) {
-            throw new Error('invalid reservation');
-        }
-        /* c8 ignore stop */
-        const { paths, dirs } = res;
-        const next = new Set();
-        for (const path of paths) {
-            const q = this.#queues.get(path);
-            /* c8 ignore start */
-            if (!q || q?.[0] !== fn) {
-                continue;
-            }
-            /* c8 ignore stop */
-            const q0 = q[1];
-            if (!q0) {
-                this.#queues.delete(path);
-                continue;
-            }
-            q.shift();
-            if (typeof q0 === 'function') {
-                next.add(q0);
-            }
-            else {
-                for (const f of q0) {
-                    next.add(f);
-                }
-            }
-        }
-        for (const dir of dirs) {
-            const q = this.#queues.get(dir);
-            const q0 = q?.[0];
-            /* c8 ignore next - type safety only */
-            if (!q || !(q0 instanceof Set))
-                continue;
-            if (q0.size === 1 && q.length === 1) {
-                this.#queues.delete(dir);
-                continue;
-            }
-            else if (q0.size === 1) {
-                q.shift();
-                // next one must be a function,
-                // or else the Set would've been reused
-                const n = q[0];
-                if (typeof n === 'function') {
-                    next.add(n);
-                }
-            }
-            else {
-                q0.delete(fn);
-            }
-        }
-        this.#running.delete(fn);
-        next.forEach(fn => this.#run(fn));
-        return true;
-    }
-}
-exports.PathReservations = PathReservations;
-//# sourceMappingURL=path-reservations.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/pax.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/pax.js
deleted file mode 100644
index d30c0f3efbe9e..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/pax.js
+++ /dev/null
@@ -1,158 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Pax = void 0;
-const node_path_1 = require("node:path");
-const header_js_1 = require("./header.js");
-class Pax {
-    atime;
-    mtime;
-    ctime;
-    charset;
-    comment;
-    gid;
-    uid;
-    gname;
-    uname;
-    linkpath;
-    dev;
-    ino;
-    nlink;
-    path;
-    size;
-    mode;
-    global;
-    constructor(obj, global = false) {
-        this.atime = obj.atime;
-        this.charset = obj.charset;
-        this.comment = obj.comment;
-        this.ctime = obj.ctime;
-        this.dev = obj.dev;
-        this.gid = obj.gid;
-        this.global = global;
-        this.gname = obj.gname;
-        this.ino = obj.ino;
-        this.linkpath = obj.linkpath;
-        this.mtime = obj.mtime;
-        this.nlink = obj.nlink;
-        this.path = obj.path;
-        this.size = obj.size;
-        this.uid = obj.uid;
-        this.uname = obj.uname;
-    }
-    encode() {
-        const body = this.encodeBody();
-        if (body === '') {
-            return Buffer.allocUnsafe(0);
-        }
-        const bodyLen = Buffer.byteLength(body);
-        // round up to 512 bytes
-        // add 512 for header
-        const bufLen = 512 * Math.ceil(1 + bodyLen / 512);
-        const buf = Buffer.allocUnsafe(bufLen);
-        // 0-fill the header section, it might not hit every field
-        for (let i = 0; i < 512; i++) {
-            buf[i] = 0;
-        }
-        new header_js_1.Header({
-            // XXX split the path
-            // then the path should be PaxHeader + basename, but less than 99,
-            // prepend with the dirname
-            /* c8 ignore start */
-            path: ('PaxHeader/' + (0, node_path_1.basename)(this.path ?? '')).slice(0, 99),
-            /* c8 ignore stop */
-            mode: this.mode || 0o644,
-            uid: this.uid,
-            gid: this.gid,
-            size: bodyLen,
-            mtime: this.mtime,
-            type: this.global ? 'GlobalExtendedHeader' : 'ExtendedHeader',
-            linkpath: '',
-            uname: this.uname || '',
-            gname: this.gname || '',
-            devmaj: 0,
-            devmin: 0,
-            atime: this.atime,
-            ctime: this.ctime,
-        }).encode(buf);
-        buf.write(body, 512, bodyLen, 'utf8');
-        // null pad after the body
-        for (let i = bodyLen + 512; i < buf.length; i++) {
-            buf[i] = 0;
-        }
-        return buf;
-    }
-    encodeBody() {
-        return (this.encodeField('path') +
-            this.encodeField('ctime') +
-            this.encodeField('atime') +
-            this.encodeField('dev') +
-            this.encodeField('ino') +
-            this.encodeField('nlink') +
-            this.encodeField('charset') +
-            this.encodeField('comment') +
-            this.encodeField('gid') +
-            this.encodeField('gname') +
-            this.encodeField('linkpath') +
-            this.encodeField('mtime') +
-            this.encodeField('size') +
-            this.encodeField('uid') +
-            this.encodeField('uname'));
-    }
-    encodeField(field) {
-        if (this[field] === undefined) {
-            return '';
-        }
-        const r = this[field];
-        const v = r instanceof Date ? r.getTime() / 1000 : r;
-        const s = ' ' +
-            (field === 'dev' || field === 'ino' || field === 'nlink' ?
-                'SCHILY.'
-                : '') +
-            field +
-            '=' +
-            v +
-            '\n';
-        const byteLen = Buffer.byteLength(s);
-        // the digits includes the length of the digits in ascii base-10
-        // so if it's 9 characters, then adding 1 for the 9 makes it 10
-        // which makes it 11 chars.
-        let digits = Math.floor(Math.log(byteLen) / Math.log(10)) + 1;
-        if (byteLen + digits >= Math.pow(10, digits)) {
-            digits += 1;
-        }
-        const len = digits + byteLen;
-        return len + s;
-    }
-    static parse(str, ex, g = false) {
-        return new Pax(merge(parseKV(str), ex), g);
-    }
-}
-exports.Pax = Pax;
-const merge = (a, b) => b ? Object.assign({}, b, a) : a;
-const parseKV = (str) => str
-    .replace(/\n$/, '')
-    .split('\n')
-    .reduce(parseKVLine, Object.create(null));
-const parseKVLine = (set, line) => {
-    const n = parseInt(line, 10);
-    // XXX Values with \n in them will fail this.
-    // Refactor to not be a naive line-by-line parse.
-    if (n !== Buffer.byteLength(line) + 1) {
-        return set;
-    }
-    line = line.slice((n + ' ').length);
-    const kv = line.split('=');
-    const r = kv.shift();
-    if (!r) {
-        return set;
-    }
-    const k = r.replace(/^SCHILY\.(dev|ino|nlink)/, '$1');
-    const v = kv.join('=');
-    set[k] =
-        /^([A-Z]+\.)?([mac]|birth|creation)time$/.test(k) ?
-            new Date(Number(v) * 1000)
-            : /^[0-9]+$/.test(v) ? +v
-                : v;
-    return set;
-};
-//# sourceMappingURL=pax.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/read-entry.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/read-entry.js
deleted file mode 100644
index 15e2d55c938a4..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/read-entry.js
+++ /dev/null
@@ -1,140 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.ReadEntry = void 0;
-const minipass_1 = require("minipass");
-const normalize_windows_path_js_1 = require("./normalize-windows-path.js");
-class ReadEntry extends minipass_1.Minipass {
-    extended;
-    globalExtended;
-    header;
-    startBlockSize;
-    blockRemain;
-    remain;
-    type;
-    meta = false;
-    ignore = false;
-    path;
-    mode;
-    uid;
-    gid;
-    uname;
-    gname;
-    size = 0;
-    mtime;
-    atime;
-    ctime;
-    linkpath;
-    dev;
-    ino;
-    nlink;
-    invalid = false;
-    absolute;
-    unsupported = false;
-    constructor(header, ex, gex) {
-        super({});
-        // read entries always start life paused.  this is to avoid the
-        // situation where Minipass's auto-ending empty streams results
-        // in an entry ending before we're ready for it.
-        this.pause();
-        this.extended = ex;
-        this.globalExtended = gex;
-        this.header = header;
-        /* c8 ignore start */
-        this.remain = header.size ?? 0;
-        /* c8 ignore stop */
-        this.startBlockSize = 512 * Math.ceil(this.remain / 512);
-        this.blockRemain = this.startBlockSize;
-        this.type = header.type;
-        switch (this.type) {
-            case 'File':
-            case 'OldFile':
-            case 'Link':
-            case 'SymbolicLink':
-            case 'CharacterDevice':
-            case 'BlockDevice':
-            case 'Directory':
-            case 'FIFO':
-            case 'ContiguousFile':
-            case 'GNUDumpDir':
-                break;
-            case 'NextFileHasLongLinkpath':
-            case 'NextFileHasLongPath':
-            case 'OldGnuLongPath':
-            case 'GlobalExtendedHeader':
-            case 'ExtendedHeader':
-            case 'OldExtendedHeader':
-                this.meta = true;
-                break;
-            // NOTE: gnutar and bsdtar treat unrecognized types as 'File'
-            // it may be worth doing the same, but with a warning.
-            default:
-                this.ignore = true;
-        }
-        /* c8 ignore start */
-        if (!header.path) {
-            throw new Error('no path provided for tar.ReadEntry');
-        }
-        /* c8 ignore stop */
-        this.path = (0, normalize_windows_path_js_1.normalizeWindowsPath)(header.path);
-        this.mode = header.mode;
-        if (this.mode) {
-            this.mode = this.mode & 0o7777;
-        }
-        this.uid = header.uid;
-        this.gid = header.gid;
-        this.uname = header.uname;
-        this.gname = header.gname;
-        this.size = this.remain;
-        this.mtime = header.mtime;
-        this.atime = header.atime;
-        this.ctime = header.ctime;
-        /* c8 ignore start */
-        this.linkpath =
-            header.linkpath ?
-                (0, normalize_windows_path_js_1.normalizeWindowsPath)(header.linkpath)
-                : undefined;
-        /* c8 ignore stop */
-        this.uname = header.uname;
-        this.gname = header.gname;
-        if (ex) {
-            this.#slurp(ex);
-        }
-        if (gex) {
-            this.#slurp(gex, true);
-        }
-    }
-    write(data) {
-        const writeLen = data.length;
-        if (writeLen > this.blockRemain) {
-            throw new Error('writing more to entry than is appropriate');
-        }
-        const r = this.remain;
-        const br = this.blockRemain;
-        this.remain = Math.max(0, r - writeLen);
-        this.blockRemain = Math.max(0, br - writeLen);
-        if (this.ignore) {
-            return true;
-        }
-        if (r >= writeLen) {
-            return super.write(data);
-        }
-        // r < writeLen
-        return super.write(data.subarray(0, r));
-    }
-    #slurp(ex, gex = false) {
-        if (ex.path)
-            ex.path = (0, normalize_windows_path_js_1.normalizeWindowsPath)(ex.path);
-        if (ex.linkpath)
-            ex.linkpath = (0, normalize_windows_path_js_1.normalizeWindowsPath)(ex.linkpath);
-        Object.assign(this, Object.fromEntries(Object.entries(ex).filter(([k, v]) => {
-            // we slurp in everything except for the path attribute in
-            // a global extended header, because that's weird. Also, any
-            // null/undefined values are ignored.
-            return !(v === null ||
-                v === undefined ||
-                (k === 'path' && gex));
-        })));
-    }
-}
-exports.ReadEntry = ReadEntry;
-//# sourceMappingURL=read-entry.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/replace.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/replace.js
deleted file mode 100644
index 262deecd12f9f..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/replace.js
+++ /dev/null
@@ -1,231 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.replace = void 0;
-// tar -r
-const fs_minipass_1 = require("@isaacs/fs-minipass");
-const node_fs_1 = __importDefault(require("node:fs"));
-const node_path_1 = __importDefault(require("node:path"));
-const header_js_1 = require("./header.js");
-const list_js_1 = require("./list.js");
-const make_command_js_1 = require("./make-command.js");
-const options_js_1 = require("./options.js");
-const pack_js_1 = require("./pack.js");
-// starting at the head of the file, read a Header
-// If the checksum is invalid, that's our position to start writing
-// If it is, jump forward by the specified size (round up to 512)
-// and try again.
-// Write the new Pack stream starting there.
-const replaceSync = (opt, files) => {
-    const p = new pack_js_1.PackSync(opt);
-    let threw = true;
-    let fd;
-    let position;
-    try {
-        try {
-            fd = node_fs_1.default.openSync(opt.file, 'r+');
-        }
-        catch (er) {
-            if (er?.code === 'ENOENT') {
-                fd = node_fs_1.default.openSync(opt.file, 'w+');
-            }
-            else {
-                throw er;
-            }
-        }
-        const st = node_fs_1.default.fstatSync(fd);
-        const headBuf = Buffer.alloc(512);
-        POSITION: for (position = 0; position < st.size; position += 512) {
-            for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) {
-                bytes = node_fs_1.default.readSync(fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos);
-                if (position === 0 &&
-                    headBuf[0] === 0x1f &&
-                    headBuf[1] === 0x8b) {
-                    throw new Error('cannot append to compressed archives');
-                }
-                if (!bytes) {
-                    break POSITION;
-                }
-            }
-            const h = new header_js_1.Header(headBuf);
-            if (!h.cksumValid) {
-                break;
-            }
-            const entryBlockSize = 512 * Math.ceil((h.size || 0) / 512);
-            if (position + entryBlockSize + 512 > st.size) {
-                break;
-            }
-            // the 512 for the header we just parsed will be added as well
-            // also jump ahead all the blocks for the body
-            position += entryBlockSize;
-            if (opt.mtimeCache && h.mtime) {
-                opt.mtimeCache.set(String(h.path), h.mtime);
-            }
-        }
-        threw = false;
-        streamSync(opt, p, position, fd, files);
-    }
-    finally {
-        if (threw) {
-            try {
-                node_fs_1.default.closeSync(fd);
-            }
-            catch (er) { }
-        }
-    }
-};
-const streamSync = (opt, p, position, fd, files) => {
-    const stream = new fs_minipass_1.WriteStreamSync(opt.file, {
-        fd: fd,
-        start: position,
-    });
-    p.pipe(stream);
-    addFilesSync(p, files);
-};
-const replaceAsync = (opt, files) => {
-    files = Array.from(files);
-    const p = new pack_js_1.Pack(opt);
-    const getPos = (fd, size, cb_) => {
-        const cb = (er, pos) => {
-            if (er) {
-                node_fs_1.default.close(fd, _ => cb_(er));
-            }
-            else {
-                cb_(null, pos);
-            }
-        };
-        let position = 0;
-        if (size === 0) {
-            return cb(null, 0);
-        }
-        let bufPos = 0;
-        const headBuf = Buffer.alloc(512);
-        const onread = (er, bytes) => {
-            if (er || typeof bytes === 'undefined') {
-                return cb(er);
-            }
-            bufPos += bytes;
-            if (bufPos < 512 && bytes) {
-                return node_fs_1.default.read(fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos, onread);
-            }
-            if (position === 0 &&
-                headBuf[0] === 0x1f &&
-                headBuf[1] === 0x8b) {
-                return cb(new Error('cannot append to compressed archives'));
-            }
-            // truncated header
-            if (bufPos < 512) {
-                return cb(null, position);
-            }
-            const h = new header_js_1.Header(headBuf);
-            if (!h.cksumValid) {
-                return cb(null, position);
-            }
-            /* c8 ignore next */
-            const entryBlockSize = 512 * Math.ceil((h.size ?? 0) / 512);
-            if (position + entryBlockSize + 512 > size) {
-                return cb(null, position);
-            }
-            position += entryBlockSize + 512;
-            if (position >= size) {
-                return cb(null, position);
-            }
-            if (opt.mtimeCache && h.mtime) {
-                opt.mtimeCache.set(String(h.path), h.mtime);
-            }
-            bufPos = 0;
-            node_fs_1.default.read(fd, headBuf, 0, 512, position, onread);
-        };
-        node_fs_1.default.read(fd, headBuf, 0, 512, position, onread);
-    };
-    const promise = new Promise((resolve, reject) => {
-        p.on('error', reject);
-        let flag = 'r+';
-        const onopen = (er, fd) => {
-            if (er && er.code === 'ENOENT' && flag === 'r+') {
-                flag = 'w+';
-                return node_fs_1.default.open(opt.file, flag, onopen);
-            }
-            if (er || !fd) {
-                return reject(er);
-            }
-            node_fs_1.default.fstat(fd, (er, st) => {
-                if (er) {
-                    return node_fs_1.default.close(fd, () => reject(er));
-                }
-                getPos(fd, st.size, (er, position) => {
-                    if (er) {
-                        return reject(er);
-                    }
-                    const stream = new fs_minipass_1.WriteStream(opt.file, {
-                        fd: fd,
-                        start: position,
-                    });
-                    p.pipe(stream);
-                    stream.on('error', reject);
-                    stream.on('close', resolve);
-                    addFilesAsync(p, files);
-                });
-            });
-        };
-        node_fs_1.default.open(opt.file, flag, onopen);
-    });
-    return promise;
-};
-const addFilesSync = (p, files) => {
-    files.forEach(file => {
-        if (file.charAt(0) === '@') {
-            (0, list_js_1.list)({
-                file: node_path_1.default.resolve(p.cwd, file.slice(1)),
-                sync: true,
-                noResume: true,
-                onReadEntry: entry => p.add(entry),
-            });
-        }
-        else {
-            p.add(file);
-        }
-    });
-    p.end();
-};
-const addFilesAsync = async (p, files) => {
-    for (let i = 0; i < files.length; i++) {
-        const file = String(files[i]);
-        if (file.charAt(0) === '@') {
-            await (0, list_js_1.list)({
-                file: node_path_1.default.resolve(String(p.cwd), file.slice(1)),
-                noResume: true,
-                onReadEntry: entry => p.add(entry),
-            });
-        }
-        else {
-            p.add(file);
-        }
-    }
-    p.end();
-};
-exports.replace = (0, make_command_js_1.makeCommand)(replaceSync, replaceAsync, 
-/* c8 ignore start */
-() => {
-    throw new TypeError('file is required');
-}, () => {
-    throw new TypeError('file is required');
-}, 
-/* c8 ignore stop */
-(opt, entries) => {
-    if (!(0, options_js_1.isFile)(opt)) {
-        throw new TypeError('file is required');
-    }
-    if (opt.gzip ||
-        opt.brotli ||
-        opt.file.endsWith('.br') ||
-        opt.file.endsWith('.tbr')) {
-        throw new TypeError('cannot append to compressed archives');
-    }
-    if (!entries?.length) {
-        throw new TypeError('no paths specified to add/replace');
-    }
-});
-//# sourceMappingURL=replace.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/strip-absolute-path.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/strip-absolute-path.js
deleted file mode 100644
index bb7639c35a110..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/strip-absolute-path.js
+++ /dev/null
@@ -1,29 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.stripAbsolutePath = void 0;
-// unix absolute paths are also absolute on win32, so we use this for both
-const node_path_1 = require("node:path");
-const { isAbsolute, parse } = node_path_1.win32;
-// returns [root, stripped]
-// Note that windows will think that //x/y/z/a has a "root" of //x/y, and in
-// those cases, we want to sanitize it to x/y/z/a, not z/a, so we strip /
-// explicitly if it's the first character.
-// drive-specific relative paths on Windows get their root stripped off even
-// though they are not absolute, so `c:../foo` becomes ['c:', '../foo']
-const stripAbsolutePath = (path) => {
-    let r = '';
-    let parsed = parse(path);
-    while (isAbsolute(path) || parsed.root) {
-        // windows will think that //x/y/z has a "root" of //x/y/
-        // but strip the //?/C:/ off of //?/C:/path
-        const root = path.charAt(0) === '/' && path.slice(0, 4) !== '//?/' ?
-            '/'
-            : parsed.root;
-        path = path.slice(root.length);
-        r += root;
-        parsed = parse(path);
-    }
-    return [r, path];
-};
-exports.stripAbsolutePath = stripAbsolutePath;
-//# sourceMappingURL=strip-absolute-path.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/strip-trailing-slashes.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/strip-trailing-slashes.js
deleted file mode 100644
index 6fa74ad6a4ac9..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/strip-trailing-slashes.js
+++ /dev/null
@@ -1,18 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.stripTrailingSlashes = void 0;
-// warning: extremely hot code path.
-// This has been meticulously optimized for use
-// within npm install on large package trees.
-// Do not edit without careful benchmarking.
-const stripTrailingSlashes = (str) => {
-    let i = str.length - 1;
-    let slashesStart = -1;
-    while (i > -1 && str.charAt(i) === '/') {
-        slashesStart = i;
-        i--;
-    }
-    return slashesStart === -1 ? str : str.slice(0, slashesStart);
-};
-exports.stripTrailingSlashes = stripTrailingSlashes;
-//# sourceMappingURL=strip-trailing-slashes.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/symlink-error.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/symlink-error.js
deleted file mode 100644
index cc19ac1a2e3c6..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/symlink-error.js
+++ /dev/null
@@ -1,19 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.SymlinkError = void 0;
-class SymlinkError extends Error {
-    path;
-    symlink;
-    syscall = 'symlink';
-    code = 'TAR_SYMLINK_ERROR';
-    constructor(symlink, path) {
-        super('TAR_SYMLINK_ERROR: Cannot extract through symbolic link');
-        this.symlink = symlink;
-        this.path = path;
-    }
-    get name() {
-        return 'SymlinkError';
-    }
-}
-exports.SymlinkError = SymlinkError;
-//# sourceMappingURL=symlink-error.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/types.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/types.js
deleted file mode 100644
index cb9b684e843b7..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/types.js
+++ /dev/null
@@ -1,50 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.code = exports.name = exports.isName = exports.isCode = void 0;
-const isCode = (c) => exports.name.has(c);
-exports.isCode = isCode;
-const isName = (c) => exports.code.has(c);
-exports.isName = isName;
-// map types from key to human-friendly name
-exports.name = new Map([
-    ['0', 'File'],
-    // same as File
-    ['', 'OldFile'],
-    ['1', 'Link'],
-    ['2', 'SymbolicLink'],
-    // Devices and FIFOs aren't fully supported
-    // they are parsed, but skipped when unpacking
-    ['3', 'CharacterDevice'],
-    ['4', 'BlockDevice'],
-    ['5', 'Directory'],
-    ['6', 'FIFO'],
-    // same as File
-    ['7', 'ContiguousFile'],
-    // pax headers
-    ['g', 'GlobalExtendedHeader'],
-    ['x', 'ExtendedHeader'],
-    // vendor-specific stuff
-    // skip
-    ['A', 'SolarisACL'],
-    // like 5, but with data, which should be skipped
-    ['D', 'GNUDumpDir'],
-    // metadata only, skip
-    ['I', 'Inode'],
-    // data = link path of next file
-    ['K', 'NextFileHasLongLinkpath'],
-    // data = path of next file
-    ['L', 'NextFileHasLongPath'],
-    // skip
-    ['M', 'ContinuationFile'],
-    // like L
-    ['N', 'OldGnuLongPath'],
-    // skip
-    ['S', 'SparseFile'],
-    // skip
-    ['V', 'TapeVolumeHeader'],
-    // like x
-    ['X', 'OldExtendedHeader'],
-]);
-// map the other direction
-exports.code = new Map(Array.from(exports.name).map(kv => [kv[1], kv[0]]));
-//# sourceMappingURL=types.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/unpack.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/unpack.js
deleted file mode 100644
index edf8acbb18c40..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/unpack.js
+++ /dev/null
@@ -1,919 +0,0 @@
-"use strict";
-// the PEND/UNPEND stuff tracks whether we're ready to emit end/close yet.
-// but the path reservations are required to avoid race conditions where
-// parallelized unpack ops may mess with one another, due to dependencies
-// (like a Link depending on its target) or destructive operations (like
-// clobbering an fs object to create one of a different type.)
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.UnpackSync = exports.Unpack = void 0;
-const fsm = __importStar(require("@isaacs/fs-minipass"));
-const node_assert_1 = __importDefault(require("node:assert"));
-const node_crypto_1 = require("node:crypto");
-const node_fs_1 = __importDefault(require("node:fs"));
-const node_path_1 = __importDefault(require("node:path"));
-const get_write_flag_js_1 = require("./get-write-flag.js");
-const mkdir_js_1 = require("./mkdir.js");
-const normalize_unicode_js_1 = require("./normalize-unicode.js");
-const normalize_windows_path_js_1 = require("./normalize-windows-path.js");
-const parse_js_1 = require("./parse.js");
-const strip_absolute_path_js_1 = require("./strip-absolute-path.js");
-const strip_trailing_slashes_js_1 = require("./strip-trailing-slashes.js");
-const wc = __importStar(require("./winchars.js"));
-const path_reservations_js_1 = require("./path-reservations.js");
-const ONENTRY = Symbol('onEntry');
-const CHECKFS = Symbol('checkFs');
-const CHECKFS2 = Symbol('checkFs2');
-const PRUNECACHE = Symbol('pruneCache');
-const ISREUSABLE = Symbol('isReusable');
-const MAKEFS = Symbol('makeFs');
-const FILE = Symbol('file');
-const DIRECTORY = Symbol('directory');
-const LINK = Symbol('link');
-const SYMLINK = Symbol('symlink');
-const HARDLINK = Symbol('hardlink');
-const UNSUPPORTED = Symbol('unsupported');
-const CHECKPATH = Symbol('checkPath');
-const MKDIR = Symbol('mkdir');
-const ONERROR = Symbol('onError');
-const PENDING = Symbol('pending');
-const PEND = Symbol('pend');
-const UNPEND = Symbol('unpend');
-const ENDED = Symbol('ended');
-const MAYBECLOSE = Symbol('maybeClose');
-const SKIP = Symbol('skip');
-const DOCHOWN = Symbol('doChown');
-const UID = Symbol('uid');
-const GID = Symbol('gid');
-const CHECKED_CWD = Symbol('checkedCwd');
-const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform;
-const isWindows = platform === 'win32';
-const DEFAULT_MAX_DEPTH = 1024;
-// Unlinks on Windows are not atomic.
-//
-// This means that if you have a file entry, followed by another
-// file entry with an identical name, and you cannot re-use the file
-// (because it's a hardlink, or because unlink:true is set, or it's
-// Windows, which does not have useful nlink values), then the unlink
-// will be committed to the disk AFTER the new file has been written
-// over the old one, deleting the new file.
-//
-// To work around this, on Windows systems, we rename the file and then
-// delete the renamed file.  It's a sloppy kludge, but frankly, I do not
-// know of a better way to do this, given windows' non-atomic unlink
-// semantics.
-//
-// See: https://github.com/npm/node-tar/issues/183
-/* c8 ignore start */
-const unlinkFile = (path, cb) => {
-    if (!isWindows) {
-        return node_fs_1.default.unlink(path, cb);
-    }
-    const name = path + '.DELETE.' + (0, node_crypto_1.randomBytes)(16).toString('hex');
-    node_fs_1.default.rename(path, name, er => {
-        if (er) {
-            return cb(er);
-        }
-        node_fs_1.default.unlink(name, cb);
-    });
-};
-/* c8 ignore stop */
-/* c8 ignore start */
-const unlinkFileSync = (path) => {
-    if (!isWindows) {
-        return node_fs_1.default.unlinkSync(path);
-    }
-    const name = path + '.DELETE.' + (0, node_crypto_1.randomBytes)(16).toString('hex');
-    node_fs_1.default.renameSync(path, name);
-    node_fs_1.default.unlinkSync(name);
-};
-/* c8 ignore stop */
-// this.gid, entry.gid, this.processUid
-const uint32 = (a, b, c) => a !== undefined && a === a >>> 0 ? a
-    : b !== undefined && b === b >>> 0 ? b
-        : c;
-// clear the cache if it's a case-insensitive unicode-squashing match.
-// we can't know if the current file system is case-sensitive or supports
-// unicode fully, so we check for similarity on the maximally compatible
-// representation.  Err on the side of pruning, since all it's doing is
-// preventing lstats, and it's not the end of the world if we get a false
-// positive.
-// Note that on windows, we always drop the entire cache whenever a
-// symbolic link is encountered, because 8.3 filenames are impossible
-// to reason about, and collisions are hazards rather than just failures.
-const cacheKeyNormalize = (path) => (0, strip_trailing_slashes_js_1.stripTrailingSlashes)((0, normalize_windows_path_js_1.normalizeWindowsPath)((0, normalize_unicode_js_1.normalizeUnicode)(path))).toLowerCase();
-// remove all cache entries matching ${abs}/**
-const pruneCache = (cache, abs) => {
-    abs = cacheKeyNormalize(abs);
-    for (const path of cache.keys()) {
-        const pnorm = cacheKeyNormalize(path);
-        if (pnorm === abs || pnorm.indexOf(abs + '/') === 0) {
-            cache.delete(path);
-        }
-    }
-};
-const dropCache = (cache) => {
-    for (const key of cache.keys()) {
-        cache.delete(key);
-    }
-};
-class Unpack extends parse_js_1.Parser {
-    [ENDED] = false;
-    [CHECKED_CWD] = false;
-    [PENDING] = 0;
-    reservations = new path_reservations_js_1.PathReservations();
-    transform;
-    writable = true;
-    readable = false;
-    dirCache;
-    uid;
-    gid;
-    setOwner;
-    preserveOwner;
-    processGid;
-    processUid;
-    maxDepth;
-    forceChown;
-    win32;
-    newer;
-    keep;
-    noMtime;
-    preservePaths;
-    unlink;
-    cwd;
-    strip;
-    processUmask;
-    umask;
-    dmode;
-    fmode;
-    chmod;
-    constructor(opt = {}) {
-        opt.ondone = () => {
-            this[ENDED] = true;
-            this[MAYBECLOSE]();
-        };
-        super(opt);
-        this.transform = opt.transform;
-        this.dirCache = opt.dirCache || new Map();
-        this.chmod = !!opt.chmod;
-        if (typeof opt.uid === 'number' || typeof opt.gid === 'number') {
-            // need both or neither
-            if (typeof opt.uid !== 'number' ||
-                typeof opt.gid !== 'number') {
-                throw new TypeError('cannot set owner without number uid and gid');
-            }
-            if (opt.preserveOwner) {
-                throw new TypeError('cannot preserve owner in archive and also set owner explicitly');
-            }
-            this.uid = opt.uid;
-            this.gid = opt.gid;
-            this.setOwner = true;
-        }
-        else {
-            this.uid = undefined;
-            this.gid = undefined;
-            this.setOwner = false;
-        }
-        // default true for root
-        if (opt.preserveOwner === undefined &&
-            typeof opt.uid !== 'number') {
-            this.preserveOwner = !!(process.getuid && process.getuid() === 0);
-        }
-        else {
-            this.preserveOwner = !!opt.preserveOwner;
-        }
-        this.processUid =
-            (this.preserveOwner || this.setOwner) && process.getuid ?
-                process.getuid()
-                : undefined;
-        this.processGid =
-            (this.preserveOwner || this.setOwner) && process.getgid ?
-                process.getgid()
-                : undefined;
-        // prevent excessively deep nesting of subfolders
-        // set to `Infinity` to remove this restriction
-        this.maxDepth =
-            typeof opt.maxDepth === 'number' ?
-                opt.maxDepth
-                : DEFAULT_MAX_DEPTH;
-        // mostly just for testing, but useful in some cases.
-        // Forcibly trigger a chown on every entry, no matter what
-        this.forceChown = opt.forceChown === true;
-        // turn > this[ONENTRY](entry));
-    }
-    // a bad or damaged archive is a warning for Parser, but an error
-    // when extracting.  Mark those errors as unrecoverable, because
-    // the Unpack contract cannot be met.
-    warn(code, msg, data = {}) {
-        if (code === 'TAR_BAD_ARCHIVE' || code === 'TAR_ABORT') {
-            data.recoverable = false;
-        }
-        return super.warn(code, msg, data);
-    }
-    [MAYBECLOSE]() {
-        if (this[ENDED] && this[PENDING] === 0) {
-            this.emit('prefinish');
-            this.emit('finish');
-            this.emit('end');
-        }
-    }
-    [CHECKPATH](entry) {
-        const p = (0, normalize_windows_path_js_1.normalizeWindowsPath)(entry.path);
-        const parts = p.split('/');
-        if (this.strip) {
-            if (parts.length < this.strip) {
-                return false;
-            }
-            if (entry.type === 'Link') {
-                const linkparts = (0, normalize_windows_path_js_1.normalizeWindowsPath)(String(entry.linkpath)).split('/');
-                if (linkparts.length >= this.strip) {
-                    entry.linkpath = linkparts.slice(this.strip).join('/');
-                }
-                else {
-                    return false;
-                }
-            }
-            parts.splice(0, this.strip);
-            entry.path = parts.join('/');
-        }
-        if (isFinite(this.maxDepth) && parts.length > this.maxDepth) {
-            this.warn('TAR_ENTRY_ERROR', 'path excessively deep', {
-                entry,
-                path: p,
-                depth: parts.length,
-                maxDepth: this.maxDepth,
-            });
-            return false;
-        }
-        if (!this.preservePaths) {
-            if (parts.includes('..') ||
-                /* c8 ignore next */
-                (isWindows && /^[a-z]:\.\.$/i.test(parts[0] ?? ''))) {
-                this.warn('TAR_ENTRY_ERROR', `path contains '..'`, {
-                    entry,
-                    path: p,
-                });
-                return false;
-            }
-            // strip off the root
-            const [root, stripped] = (0, strip_absolute_path_js_1.stripAbsolutePath)(p);
-            if (root) {
-                entry.path = String(stripped);
-                this.warn('TAR_ENTRY_INFO', `stripping ${root} from absolute path`, {
-                    entry,
-                    path: p,
-                });
-            }
-        }
-        if (node_path_1.default.isAbsolute(entry.path)) {
-            entry.absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(entry.path));
-        }
-        else {
-            entry.absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(this.cwd, entry.path));
-        }
-        // if we somehow ended up with a path that escapes the cwd, and we are
-        // not in preservePaths mode, then something is fishy!  This should have
-        // been prevented above, so ignore this for coverage.
-        /* c8 ignore start - defense in depth */
-        if (!this.preservePaths &&
-            typeof entry.absolute === 'string' &&
-            entry.absolute.indexOf(this.cwd + '/') !== 0 &&
-            entry.absolute !== this.cwd) {
-            this.warn('TAR_ENTRY_ERROR', 'path escaped extraction target', {
-                entry,
-                path: (0, normalize_windows_path_js_1.normalizeWindowsPath)(entry.path),
-                resolvedPath: entry.absolute,
-                cwd: this.cwd,
-            });
-            return false;
-        }
-        /* c8 ignore stop */
-        // an archive can set properties on the extraction directory, but it
-        // may not replace the cwd with a different kind of thing entirely.
-        if (entry.absolute === this.cwd &&
-            entry.type !== 'Directory' &&
-            entry.type !== 'GNUDumpDir') {
-            return false;
-        }
-        // only encode : chars that aren't drive letter indicators
-        if (this.win32) {
-            const { root: aRoot } = node_path_1.default.win32.parse(String(entry.absolute));
-            entry.absolute =
-                aRoot + wc.encode(String(entry.absolute).slice(aRoot.length));
-            const { root: pRoot } = node_path_1.default.win32.parse(entry.path);
-            entry.path = pRoot + wc.encode(entry.path.slice(pRoot.length));
-        }
-        return true;
-    }
-    [ONENTRY](entry) {
-        if (!this[CHECKPATH](entry)) {
-            return entry.resume();
-        }
-        node_assert_1.default.equal(typeof entry.absolute, 'string');
-        switch (entry.type) {
-            case 'Directory':
-            case 'GNUDumpDir':
-                if (entry.mode) {
-                    entry.mode = entry.mode | 0o700;
-                }
-            // eslint-disable-next-line no-fallthrough
-            case 'File':
-            case 'OldFile':
-            case 'ContiguousFile':
-            case 'Link':
-            case 'SymbolicLink':
-                return this[CHECKFS](entry);
-            case 'CharacterDevice':
-            case 'BlockDevice':
-            case 'FIFO':
-            default:
-                return this[UNSUPPORTED](entry);
-        }
-    }
-    [ONERROR](er, entry) {
-        // Cwd has to exist, or else nothing works. That's serious.
-        // Other errors are warnings, which raise the error in strict
-        // mode, but otherwise continue on.
-        if (er.name === 'CwdError') {
-            this.emit('error', er);
-        }
-        else {
-            this.warn('TAR_ENTRY_ERROR', er, { entry });
-            this[UNPEND]();
-            entry.resume();
-        }
-    }
-    [MKDIR](dir, mode, cb) {
-        (0, mkdir_js_1.mkdir)((0, normalize_windows_path_js_1.normalizeWindowsPath)(dir), {
-            uid: this.uid,
-            gid: this.gid,
-            processUid: this.processUid,
-            processGid: this.processGid,
-            umask: this.processUmask,
-            preserve: this.preservePaths,
-            unlink: this.unlink,
-            cache: this.dirCache,
-            cwd: this.cwd,
-            mode: mode,
-        }, cb);
-    }
-    [DOCHOWN](entry) {
-        // in preserve owner mode, chown if the entry doesn't match process
-        // in set owner mode, chown if setting doesn't match process
-        return (this.forceChown ||
-            (this.preserveOwner &&
-                ((typeof entry.uid === 'number' &&
-                    entry.uid !== this.processUid) ||
-                    (typeof entry.gid === 'number' &&
-                        entry.gid !== this.processGid))) ||
-            (typeof this.uid === 'number' &&
-                this.uid !== this.processUid) ||
-            (typeof this.gid === 'number' && this.gid !== this.processGid));
-    }
-    [UID](entry) {
-        return uint32(this.uid, entry.uid, this.processUid);
-    }
-    [GID](entry) {
-        return uint32(this.gid, entry.gid, this.processGid);
-    }
-    [FILE](entry, fullyDone) {
-        const mode = typeof entry.mode === 'number' ?
-            entry.mode & 0o7777
-            : this.fmode;
-        const stream = new fsm.WriteStream(String(entry.absolute), {
-            // slight lie, but it can be numeric flags
-            flags: (0, get_write_flag_js_1.getWriteFlag)(entry.size),
-            mode: mode,
-            autoClose: false,
-        });
-        stream.on('error', (er) => {
-            if (stream.fd) {
-                node_fs_1.default.close(stream.fd, () => { });
-            }
-            // flush all the data out so that we aren't left hanging
-            // if the error wasn't actually fatal.  otherwise the parse
-            // is blocked, and we never proceed.
-            stream.write = () => true;
-            this[ONERROR](er, entry);
-            fullyDone();
-        });
-        let actions = 1;
-        const done = (er) => {
-            if (er) {
-                /* c8 ignore start - we should always have a fd by now */
-                if (stream.fd) {
-                    node_fs_1.default.close(stream.fd, () => { });
-                }
-                /* c8 ignore stop */
-                this[ONERROR](er, entry);
-                fullyDone();
-                return;
-            }
-            if (--actions === 0) {
-                if (stream.fd !== undefined) {
-                    node_fs_1.default.close(stream.fd, er => {
-                        if (er) {
-                            this[ONERROR](er, entry);
-                        }
-                        else {
-                            this[UNPEND]();
-                        }
-                        fullyDone();
-                    });
-                }
-            }
-        };
-        stream.on('finish', () => {
-            // if futimes fails, try utimes
-            // if utimes fails, fail with the original error
-            // same for fchown/chown
-            const abs = String(entry.absolute);
-            const fd = stream.fd;
-            if (typeof fd === 'number' && entry.mtime && !this.noMtime) {
-                actions++;
-                const atime = entry.atime || new Date();
-                const mtime = entry.mtime;
-                node_fs_1.default.futimes(fd, atime, mtime, er => er ?
-                    node_fs_1.default.utimes(abs, atime, mtime, er2 => done(er2 && er))
-                    : done());
-            }
-            if (typeof fd === 'number' && this[DOCHOWN](entry)) {
-                actions++;
-                const uid = this[UID](entry);
-                const gid = this[GID](entry);
-                if (typeof uid === 'number' && typeof gid === 'number') {
-                    node_fs_1.default.fchown(fd, uid, gid, er => er ?
-                        node_fs_1.default.chown(abs, uid, gid, er2 => done(er2 && er))
-                        : done());
-                }
-            }
-            done();
-        });
-        const tx = this.transform ? this.transform(entry) || entry : entry;
-        if (tx !== entry) {
-            tx.on('error', (er) => {
-                this[ONERROR](er, entry);
-                fullyDone();
-            });
-            entry.pipe(tx);
-        }
-        tx.pipe(stream);
-    }
-    [DIRECTORY](entry, fullyDone) {
-        const mode = typeof entry.mode === 'number' ?
-            entry.mode & 0o7777
-            : this.dmode;
-        this[MKDIR](String(entry.absolute), mode, er => {
-            if (er) {
-                this[ONERROR](er, entry);
-                fullyDone();
-                return;
-            }
-            let actions = 1;
-            const done = () => {
-                if (--actions === 0) {
-                    fullyDone();
-                    this[UNPEND]();
-                    entry.resume();
-                }
-            };
-            if (entry.mtime && !this.noMtime) {
-                actions++;
-                node_fs_1.default.utimes(String(entry.absolute), entry.atime || new Date(), entry.mtime, done);
-            }
-            if (this[DOCHOWN](entry)) {
-                actions++;
-                node_fs_1.default.chown(String(entry.absolute), Number(this[UID](entry)), Number(this[GID](entry)), done);
-            }
-            done();
-        });
-    }
-    [UNSUPPORTED](entry) {
-        entry.unsupported = true;
-        this.warn('TAR_ENTRY_UNSUPPORTED', `unsupported entry type: ${entry.type}`, { entry });
-        entry.resume();
-    }
-    [SYMLINK](entry, done) {
-        this[LINK](entry, String(entry.linkpath), 'symlink', done);
-    }
-    [HARDLINK](entry, done) {
-        const linkpath = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(this.cwd, String(entry.linkpath)));
-        this[LINK](entry, linkpath, 'link', done);
-    }
-    [PEND]() {
-        this[PENDING]++;
-    }
-    [UNPEND]() {
-        this[PENDING]--;
-        this[MAYBECLOSE]();
-    }
-    [SKIP](entry) {
-        this[UNPEND]();
-        entry.resume();
-    }
-    // Check if we can reuse an existing filesystem entry safely and
-    // overwrite it, rather than unlinking and recreating
-    // Windows doesn't report a useful nlink, so we just never reuse entries
-    [ISREUSABLE](entry, st) {
-        return (entry.type === 'File' &&
-            !this.unlink &&
-            st.isFile() &&
-            st.nlink <= 1 &&
-            !isWindows);
-    }
-    // check if a thing is there, and if so, try to clobber it
-    [CHECKFS](entry) {
-        this[PEND]();
-        const paths = [entry.path];
-        if (entry.linkpath) {
-            paths.push(entry.linkpath);
-        }
-        this.reservations.reserve(paths, done => this[CHECKFS2](entry, done));
-    }
-    [PRUNECACHE](entry) {
-        // if we are not creating a directory, and the path is in the dirCache,
-        // then that means we are about to delete the directory we created
-        // previously, and it is no longer going to be a directory, and neither
-        // is any of its children.
-        // If a symbolic link is encountered, all bets are off.  There is no
-        // reasonable way to sanitize the cache in such a way we will be able to
-        // avoid having filesystem collisions.  If this happens with a non-symlink
-        // entry, it'll just fail to unpack, but a symlink to a directory, using an
-        // 8.3 shortname or certain unicode attacks, can evade detection and lead
-        // to arbitrary writes to anywhere on the system.
-        if (entry.type === 'SymbolicLink') {
-            dropCache(this.dirCache);
-        }
-        else if (entry.type !== 'Directory') {
-            pruneCache(this.dirCache, String(entry.absolute));
-        }
-    }
-    [CHECKFS2](entry, fullyDone) {
-        this[PRUNECACHE](entry);
-        const done = (er) => {
-            this[PRUNECACHE](entry);
-            fullyDone(er);
-        };
-        const checkCwd = () => {
-            this[MKDIR](this.cwd, this.dmode, er => {
-                if (er) {
-                    this[ONERROR](er, entry);
-                    done();
-                    return;
-                }
-                this[CHECKED_CWD] = true;
-                start();
-            });
-        };
-        const start = () => {
-            if (entry.absolute !== this.cwd) {
-                const parent = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.dirname(String(entry.absolute)));
-                if (parent !== this.cwd) {
-                    return this[MKDIR](parent, this.dmode, er => {
-                        if (er) {
-                            this[ONERROR](er, entry);
-                            done();
-                            return;
-                        }
-                        afterMakeParent();
-                    });
-                }
-            }
-            afterMakeParent();
-        };
-        const afterMakeParent = () => {
-            node_fs_1.default.lstat(String(entry.absolute), (lstatEr, st) => {
-                if (st &&
-                    (this.keep ||
-                        /* c8 ignore next */
-                        (this.newer && st.mtime > (entry.mtime ?? st.mtime)))) {
-                    this[SKIP](entry);
-                    done();
-                    return;
-                }
-                if (lstatEr || this[ISREUSABLE](entry, st)) {
-                    return this[MAKEFS](null, entry, done);
-                }
-                if (st.isDirectory()) {
-                    if (entry.type === 'Directory') {
-                        const needChmod = this.chmod &&
-                            entry.mode &&
-                            (st.mode & 0o7777) !== entry.mode;
-                        const afterChmod = (er) => this[MAKEFS](er ?? null, entry, done);
-                        if (!needChmod) {
-                            return afterChmod();
-                        }
-                        return node_fs_1.default.chmod(String(entry.absolute), Number(entry.mode), afterChmod);
-                    }
-                    // Not a dir entry, have to remove it.
-                    // NB: the only way to end up with an entry that is the cwd
-                    // itself, in such a way that == does not detect, is a
-                    // tricky windows absolute path with UNC or 8.3 parts (and
-                    // preservePaths:true, or else it will have been stripped).
-                    // In that case, the user has opted out of path protections
-                    // explicitly, so if they blow away the cwd, c'est la vie.
-                    if (entry.absolute !== this.cwd) {
-                        return node_fs_1.default.rmdir(String(entry.absolute), (er) => this[MAKEFS](er ?? null, entry, done));
-                    }
-                }
-                // not a dir, and not reusable
-                // don't remove if the cwd, we want that error
-                if (entry.absolute === this.cwd) {
-                    return this[MAKEFS](null, entry, done);
-                }
-                unlinkFile(String(entry.absolute), er => this[MAKEFS](er ?? null, entry, done));
-            });
-        };
-        if (this[CHECKED_CWD]) {
-            start();
-        }
-        else {
-            checkCwd();
-        }
-    }
-    [MAKEFS](er, entry, done) {
-        if (er) {
-            this[ONERROR](er, entry);
-            done();
-            return;
-        }
-        switch (entry.type) {
-            case 'File':
-            case 'OldFile':
-            case 'ContiguousFile':
-                return this[FILE](entry, done);
-            case 'Link':
-                return this[HARDLINK](entry, done);
-            case 'SymbolicLink':
-                return this[SYMLINK](entry, done);
-            case 'Directory':
-            case 'GNUDumpDir':
-                return this[DIRECTORY](entry, done);
-        }
-    }
-    [LINK](entry, linkpath, link, done) {
-        // XXX: get the type ('symlink' or 'junction') for windows
-        node_fs_1.default[link](linkpath, String(entry.absolute), er => {
-            if (er) {
-                this[ONERROR](er, entry);
-            }
-            else {
-                this[UNPEND]();
-                entry.resume();
-            }
-            done();
-        });
-    }
-}
-exports.Unpack = Unpack;
-const callSync = (fn) => {
-    try {
-        return [null, fn()];
-    }
-    catch (er) {
-        return [er, null];
-    }
-};
-class UnpackSync extends Unpack {
-    sync = true;
-    [MAKEFS](er, entry) {
-        return super[MAKEFS](er, entry, () => { });
-    }
-    [CHECKFS](entry) {
-        this[PRUNECACHE](entry);
-        if (!this[CHECKED_CWD]) {
-            const er = this[MKDIR](this.cwd, this.dmode);
-            if (er) {
-                return this[ONERROR](er, entry);
-            }
-            this[CHECKED_CWD] = true;
-        }
-        // don't bother to make the parent if the current entry is the cwd,
-        // we've already checked it.
-        if (entry.absolute !== this.cwd) {
-            const parent = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.dirname(String(entry.absolute)));
-            if (parent !== this.cwd) {
-                const mkParent = this[MKDIR](parent, this.dmode);
-                if (mkParent) {
-                    return this[ONERROR](mkParent, entry);
-                }
-            }
-        }
-        const [lstatEr, st] = callSync(() => node_fs_1.default.lstatSync(String(entry.absolute)));
-        if (st &&
-            (this.keep ||
-                /* c8 ignore next */
-                (this.newer && st.mtime > (entry.mtime ?? st.mtime)))) {
-            return this[SKIP](entry);
-        }
-        if (lstatEr || this[ISREUSABLE](entry, st)) {
-            return this[MAKEFS](null, entry);
-        }
-        if (st.isDirectory()) {
-            if (entry.type === 'Directory') {
-                const needChmod = this.chmod &&
-                    entry.mode &&
-                    (st.mode & 0o7777) !== entry.mode;
-                const [er] = needChmod ?
-                    callSync(() => {
-                        node_fs_1.default.chmodSync(String(entry.absolute), Number(entry.mode));
-                    })
-                    : [];
-                return this[MAKEFS](er, entry);
-            }
-            // not a dir entry, have to remove it
-            const [er] = callSync(() => node_fs_1.default.rmdirSync(String(entry.absolute)));
-            this[MAKEFS](er, entry);
-        }
-        // not a dir, and not reusable.
-        // don't remove if it's the cwd, since we want that error.
-        const [er] = entry.absolute === this.cwd ?
-            []
-            : callSync(() => unlinkFileSync(String(entry.absolute)));
-        this[MAKEFS](er, entry);
-    }
-    [FILE](entry, done) {
-        const mode = typeof entry.mode === 'number' ?
-            entry.mode & 0o7777
-            : this.fmode;
-        const oner = (er) => {
-            let closeError;
-            try {
-                node_fs_1.default.closeSync(fd);
-            }
-            catch (e) {
-                closeError = e;
-            }
-            if (er || closeError) {
-                this[ONERROR](er || closeError, entry);
-            }
-            done();
-        };
-        let fd;
-        try {
-            fd = node_fs_1.default.openSync(String(entry.absolute), (0, get_write_flag_js_1.getWriteFlag)(entry.size), mode);
-        }
-        catch (er) {
-            return oner(er);
-        }
-        const tx = this.transform ? this.transform(entry) || entry : entry;
-        if (tx !== entry) {
-            tx.on('error', (er) => this[ONERROR](er, entry));
-            entry.pipe(tx);
-        }
-        tx.on('data', (chunk) => {
-            try {
-                node_fs_1.default.writeSync(fd, chunk, 0, chunk.length);
-            }
-            catch (er) {
-                oner(er);
-            }
-        });
-        tx.on('end', () => {
-            let er = null;
-            // try both, falling futimes back to utimes
-            // if either fails, handle the first error
-            if (entry.mtime && !this.noMtime) {
-                const atime = entry.atime || new Date();
-                const mtime = entry.mtime;
-                try {
-                    node_fs_1.default.futimesSync(fd, atime, mtime);
-                }
-                catch (futimeser) {
-                    try {
-                        node_fs_1.default.utimesSync(String(entry.absolute), atime, mtime);
-                    }
-                    catch (utimeser) {
-                        er = futimeser;
-                    }
-                }
-            }
-            if (this[DOCHOWN](entry)) {
-                const uid = this[UID](entry);
-                const gid = this[GID](entry);
-                try {
-                    node_fs_1.default.fchownSync(fd, Number(uid), Number(gid));
-                }
-                catch (fchowner) {
-                    try {
-                        node_fs_1.default.chownSync(String(entry.absolute), Number(uid), Number(gid));
-                    }
-                    catch (chowner) {
-                        er = er || fchowner;
-                    }
-                }
-            }
-            oner(er);
-        });
-    }
-    [DIRECTORY](entry, done) {
-        const mode = typeof entry.mode === 'number' ?
-            entry.mode & 0o7777
-            : this.dmode;
-        const er = this[MKDIR](String(entry.absolute), mode);
-        if (er) {
-            this[ONERROR](er, entry);
-            done();
-            return;
-        }
-        if (entry.mtime && !this.noMtime) {
-            try {
-                node_fs_1.default.utimesSync(String(entry.absolute), entry.atime || new Date(), entry.mtime);
-                /* c8 ignore next */
-            }
-            catch (er) { }
-        }
-        if (this[DOCHOWN](entry)) {
-            try {
-                node_fs_1.default.chownSync(String(entry.absolute), Number(this[UID](entry)), Number(this[GID](entry)));
-            }
-            catch (er) { }
-        }
-        done();
-        entry.resume();
-    }
-    [MKDIR](dir, mode) {
-        try {
-            return (0, mkdir_js_1.mkdirSync)((0, normalize_windows_path_js_1.normalizeWindowsPath)(dir), {
-                uid: this.uid,
-                gid: this.gid,
-                processUid: this.processUid,
-                processGid: this.processGid,
-                umask: this.processUmask,
-                preserve: this.preservePaths,
-                unlink: this.unlink,
-                cache: this.dirCache,
-                cwd: this.cwd,
-                mode: mode,
-            });
-        }
-        catch (er) {
-            return er;
-        }
-    }
-    [LINK](entry, linkpath, link, done) {
-        const ls = `${link}Sync`;
-        try {
-            node_fs_1.default[ls](linkpath, String(entry.absolute));
-            done();
-            entry.resume();
-        }
-        catch (er) {
-            return this[ONERROR](er, entry);
-        }
-    }
-}
-exports.UnpackSync = UnpackSync;
-//# sourceMappingURL=unpack.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/update.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/update.js
deleted file mode 100644
index 7687896f4bfee..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/update.js
+++ /dev/null
@@ -1,33 +0,0 @@
-"use strict";
-// tar -u
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.update = void 0;
-const make_command_js_1 = require("./make-command.js");
-const replace_js_1 = require("./replace.js");
-// just call tar.r with the filter and mtimeCache
-exports.update = (0, make_command_js_1.makeCommand)(replace_js_1.replace.syncFile, replace_js_1.replace.asyncFile, replace_js_1.replace.syncNoFile, replace_js_1.replace.asyncNoFile, (opt, entries = []) => {
-    replace_js_1.replace.validate?.(opt, entries);
-    mtimeFilter(opt);
-});
-const mtimeFilter = (opt) => {
-    const filter = opt.filter;
-    if (!opt.mtimeCache) {
-        opt.mtimeCache = new Map();
-    }
-    opt.filter =
-        filter ?
-            (path, stat) => filter(path, stat) &&
-                !(
-                /* c8 ignore start */
-                ((opt.mtimeCache?.get(path) ?? stat.mtime ?? 0) >
-                    (stat.mtime ?? 0))
-                /* c8 ignore stop */
-                )
-            : (path, stat) => !(
-            /* c8 ignore start */
-            ((opt.mtimeCache?.get(path) ?? stat.mtime ?? 0) >
-                (stat.mtime ?? 0))
-            /* c8 ignore stop */
-            );
-};
-//# sourceMappingURL=update.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/warn-method.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/warn-method.js
deleted file mode 100644
index f25502776e36a..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/warn-method.js
+++ /dev/null
@@ -1,31 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.warnMethod = void 0;
-const warnMethod = (self, code, message, data = {}) => {
-    if (self.file) {
-        data.file = self.file;
-    }
-    if (self.cwd) {
-        data.cwd = self.cwd;
-    }
-    data.code =
-        (message instanceof Error &&
-            message.code) ||
-            code;
-    data.tarCode = code;
-    if (!self.strict && data.recoverable !== false) {
-        if (message instanceof Error) {
-            data = Object.assign(message, data);
-            message = message.message;
-        }
-        self.emit('warn', code, message, data);
-    }
-    else if (message instanceof Error) {
-        self.emit('error', Object.assign(message, data));
-    }
-    else {
-        self.emit('error', Object.assign(new Error(`${code}: ${message}`), data));
-    }
-};
-exports.warnMethod = warnMethod;
-//# sourceMappingURL=warn-method.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/winchars.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/winchars.js
deleted file mode 100644
index c0a4405812929..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/winchars.js
+++ /dev/null
@@ -1,14 +0,0 @@
-"use strict";
-// When writing files on Windows, translate the characters to their
-// 0xf000 higher-encoded versions.
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.decode = exports.encode = void 0;
-const raw = ['|', '<', '>', '?', ':'];
-const win = raw.map(char => String.fromCharCode(0xf000 + char.charCodeAt(0)));
-const toWin = new Map(raw.map((char, i) => [char, win[i]]));
-const toRaw = new Map(win.map((char, i) => [char, raw[i]]));
-const encode = (s) => raw.reduce((s, c) => s.split(c).join(toWin.get(c)), s);
-exports.encode = encode;
-const decode = (s) => win.reduce((s, c) => s.split(c).join(toRaw.get(c)), s);
-exports.decode = decode;
-//# sourceMappingURL=winchars.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/write-entry.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/write-entry.js
deleted file mode 100644
index 45b7efeb79502..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/commonjs/write-entry.js
+++ /dev/null
@@ -1,689 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.WriteEntryTar = exports.WriteEntrySync = exports.WriteEntry = void 0;
-const fs_1 = __importDefault(require("fs"));
-const minipass_1 = require("minipass");
-const path_1 = __importDefault(require("path"));
-const header_js_1 = require("./header.js");
-const mode_fix_js_1 = require("./mode-fix.js");
-const normalize_windows_path_js_1 = require("./normalize-windows-path.js");
-const options_js_1 = require("./options.js");
-const pax_js_1 = require("./pax.js");
-const strip_absolute_path_js_1 = require("./strip-absolute-path.js");
-const strip_trailing_slashes_js_1 = require("./strip-trailing-slashes.js");
-const warn_method_js_1 = require("./warn-method.js");
-const winchars = __importStar(require("./winchars.js"));
-const prefixPath = (path, prefix) => {
-    if (!prefix) {
-        return (0, normalize_windows_path_js_1.normalizeWindowsPath)(path);
-    }
-    path = (0, normalize_windows_path_js_1.normalizeWindowsPath)(path).replace(/^\.(\/|$)/, '');
-    return (0, strip_trailing_slashes_js_1.stripTrailingSlashes)(prefix) + '/' + path;
-};
-const maxReadSize = 16 * 1024 * 1024;
-const PROCESS = Symbol('process');
-const FILE = Symbol('file');
-const DIRECTORY = Symbol('directory');
-const SYMLINK = Symbol('symlink');
-const HARDLINK = Symbol('hardlink');
-const HEADER = Symbol('header');
-const READ = Symbol('read');
-const LSTAT = Symbol('lstat');
-const ONLSTAT = Symbol('onlstat');
-const ONREAD = Symbol('onread');
-const ONREADLINK = Symbol('onreadlink');
-const OPENFILE = Symbol('openfile');
-const ONOPENFILE = Symbol('onopenfile');
-const CLOSE = Symbol('close');
-const MODE = Symbol('mode');
-const AWAITDRAIN = Symbol('awaitDrain');
-const ONDRAIN = Symbol('ondrain');
-const PREFIX = Symbol('prefix');
-class WriteEntry extends minipass_1.Minipass {
-    path;
-    portable;
-    myuid = (process.getuid && process.getuid()) || 0;
-    // until node has builtin pwnam functions, this'll have to do
-    myuser = process.env.USER || '';
-    maxReadSize;
-    linkCache;
-    statCache;
-    preservePaths;
-    cwd;
-    strict;
-    mtime;
-    noPax;
-    noMtime;
-    prefix;
-    fd;
-    blockLen = 0;
-    blockRemain = 0;
-    buf;
-    pos = 0;
-    remain = 0;
-    length = 0;
-    offset = 0;
-    win32;
-    absolute;
-    header;
-    type;
-    linkpath;
-    stat;
-    onWriteEntry;
-    #hadError = false;
-    constructor(p, opt_ = {}) {
-        const opt = (0, options_js_1.dealias)(opt_);
-        super();
-        this.path = (0, normalize_windows_path_js_1.normalizeWindowsPath)(p);
-        // suppress atime, ctime, uid, gid, uname, gname
-        this.portable = !!opt.portable;
-        this.maxReadSize = opt.maxReadSize || maxReadSize;
-        this.linkCache = opt.linkCache || new Map();
-        this.statCache = opt.statCache || new Map();
-        this.preservePaths = !!opt.preservePaths;
-        this.cwd = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.cwd || process.cwd());
-        this.strict = !!opt.strict;
-        this.noPax = !!opt.noPax;
-        this.noMtime = !!opt.noMtime;
-        this.mtime = opt.mtime;
-        this.prefix =
-            opt.prefix ? (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.prefix) : undefined;
-        this.onWriteEntry = opt.onWriteEntry;
-        if (typeof opt.onwarn === 'function') {
-            this.on('warn', opt.onwarn);
-        }
-        let pathWarn = false;
-        if (!this.preservePaths) {
-            const [root, stripped] = (0, strip_absolute_path_js_1.stripAbsolutePath)(this.path);
-            if (root && typeof stripped === 'string') {
-                this.path = stripped;
-                pathWarn = root;
-            }
-        }
-        this.win32 = !!opt.win32 || process.platform === 'win32';
-        if (this.win32) {
-            // force the \ to / normalization, since we might not *actually*
-            // be on windows, but want \ to be considered a path separator.
-            this.path = winchars.decode(this.path.replace(/\\/g, '/'));
-            p = p.replace(/\\/g, '/');
-        }
-        this.absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.absolute || path_1.default.resolve(this.cwd, p));
-        if (this.path === '') {
-            this.path = './';
-        }
-        if (pathWarn) {
-            this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, {
-                entry: this,
-                path: pathWarn + this.path,
-            });
-        }
-        const cs = this.statCache.get(this.absolute);
-        if (cs) {
-            this[ONLSTAT](cs);
-        }
-        else {
-            this[LSTAT]();
-        }
-    }
-    warn(code, message, data = {}) {
-        return (0, warn_method_js_1.warnMethod)(this, code, message, data);
-    }
-    emit(ev, ...data) {
-        if (ev === 'error') {
-            this.#hadError = true;
-        }
-        return super.emit(ev, ...data);
-    }
-    [LSTAT]() {
-        fs_1.default.lstat(this.absolute, (er, stat) => {
-            if (er) {
-                return this.emit('error', er);
-            }
-            this[ONLSTAT](stat);
-        });
-    }
-    [ONLSTAT](stat) {
-        this.statCache.set(this.absolute, stat);
-        this.stat = stat;
-        if (!stat.isFile()) {
-            stat.size = 0;
-        }
-        this.type = getType(stat);
-        this.emit('stat', stat);
-        this[PROCESS]();
-    }
-    [PROCESS]() {
-        switch (this.type) {
-            case 'File':
-                return this[FILE]();
-            case 'Directory':
-                return this[DIRECTORY]();
-            case 'SymbolicLink':
-                return this[SYMLINK]();
-            // unsupported types are ignored.
-            default:
-                return this.end();
-        }
-    }
-    [MODE](mode) {
-        return (0, mode_fix_js_1.modeFix)(mode, this.type === 'Directory', this.portable);
-    }
-    [PREFIX](path) {
-        return prefixPath(path, this.prefix);
-    }
-    [HEADER]() {
-        /* c8 ignore start */
-        if (!this.stat) {
-            throw new Error('cannot write header before stat');
-        }
-        /* c8 ignore stop */
-        if (this.type === 'Directory' && this.portable) {
-            this.noMtime = true;
-        }
-        this.onWriteEntry?.(this);
-        this.header = new header_js_1.Header({
-            path: this[PREFIX](this.path),
-            // only apply the prefix to hard links.
-            linkpath: this.type === 'Link' && this.linkpath !== undefined ?
-                this[PREFIX](this.linkpath)
-                : this.linkpath,
-            // only the permissions and setuid/setgid/sticky bitflags
-            // not the higher-order bits that specify file type
-            mode: this[MODE](this.stat.mode),
-            uid: this.portable ? undefined : this.stat.uid,
-            gid: this.portable ? undefined : this.stat.gid,
-            size: this.stat.size,
-            mtime: this.noMtime ? undefined : this.mtime || this.stat.mtime,
-            /* c8 ignore next */
-            type: this.type === 'Unsupported' ? undefined : this.type,
-            uname: this.portable ? undefined
-                : this.stat.uid === this.myuid ? this.myuser
-                    : '',
-            atime: this.portable ? undefined : this.stat.atime,
-            ctime: this.portable ? undefined : this.stat.ctime,
-        });
-        if (this.header.encode() && !this.noPax) {
-            super.write(new pax_js_1.Pax({
-                atime: this.portable ? undefined : this.header.atime,
-                ctime: this.portable ? undefined : this.header.ctime,
-                gid: this.portable ? undefined : this.header.gid,
-                mtime: this.noMtime ? undefined : (this.mtime || this.header.mtime),
-                path: this[PREFIX](this.path),
-                linkpath: this.type === 'Link' && this.linkpath !== undefined ?
-                    this[PREFIX](this.linkpath)
-                    : this.linkpath,
-                size: this.header.size,
-                uid: this.portable ? undefined : this.header.uid,
-                uname: this.portable ? undefined : this.header.uname,
-                dev: this.portable ? undefined : this.stat.dev,
-                ino: this.portable ? undefined : this.stat.ino,
-                nlink: this.portable ? undefined : this.stat.nlink,
-            }).encode());
-        }
-        const block = this.header?.block;
-        /* c8 ignore start */
-        if (!block) {
-            throw new Error('failed to encode header');
-        }
-        /* c8 ignore stop */
-        super.write(block);
-    }
-    [DIRECTORY]() {
-        /* c8 ignore start */
-        if (!this.stat) {
-            throw new Error('cannot create directory entry without stat');
-        }
-        /* c8 ignore stop */
-        if (this.path.slice(-1) !== '/') {
-            this.path += '/';
-        }
-        this.stat.size = 0;
-        this[HEADER]();
-        this.end();
-    }
-    [SYMLINK]() {
-        fs_1.default.readlink(this.absolute, (er, linkpath) => {
-            if (er) {
-                return this.emit('error', er);
-            }
-            this[ONREADLINK](linkpath);
-        });
-    }
-    [ONREADLINK](linkpath) {
-        this.linkpath = (0, normalize_windows_path_js_1.normalizeWindowsPath)(linkpath);
-        this[HEADER]();
-        this.end();
-    }
-    [HARDLINK](linkpath) {
-        /* c8 ignore start */
-        if (!this.stat) {
-            throw new Error('cannot create link entry without stat');
-        }
-        /* c8 ignore stop */
-        this.type = 'Link';
-        this.linkpath = (0, normalize_windows_path_js_1.normalizeWindowsPath)(path_1.default.relative(this.cwd, linkpath));
-        this.stat.size = 0;
-        this[HEADER]();
-        this.end();
-    }
-    [FILE]() {
-        /* c8 ignore start */
-        if (!this.stat) {
-            throw new Error('cannot create file entry without stat');
-        }
-        /* c8 ignore stop */
-        if (this.stat.nlink > 1) {
-            const linkKey = `${this.stat.dev}:${this.stat.ino}`;
-            const linkpath = this.linkCache.get(linkKey);
-            if (linkpath?.indexOf(this.cwd) === 0) {
-                return this[HARDLINK](linkpath);
-            }
-            this.linkCache.set(linkKey, this.absolute);
-        }
-        this[HEADER]();
-        if (this.stat.size === 0) {
-            return this.end();
-        }
-        this[OPENFILE]();
-    }
-    [OPENFILE]() {
-        fs_1.default.open(this.absolute, 'r', (er, fd) => {
-            if (er) {
-                return this.emit('error', er);
-            }
-            this[ONOPENFILE](fd);
-        });
-    }
-    [ONOPENFILE](fd) {
-        this.fd = fd;
-        if (this.#hadError) {
-            return this[CLOSE]();
-        }
-        /* c8 ignore start */
-        if (!this.stat) {
-            throw new Error('should stat before calling onopenfile');
-        }
-        /* c8 ignore start */
-        this.blockLen = 512 * Math.ceil(this.stat.size / 512);
-        this.blockRemain = this.blockLen;
-        const bufLen = Math.min(this.blockLen, this.maxReadSize);
-        this.buf = Buffer.allocUnsafe(bufLen);
-        this.offset = 0;
-        this.pos = 0;
-        this.remain = this.stat.size;
-        this.length = this.buf.length;
-        this[READ]();
-    }
-    [READ]() {
-        const { fd, buf, offset, length, pos } = this;
-        if (fd === undefined || buf === undefined) {
-            throw new Error('cannot read file without first opening');
-        }
-        fs_1.default.read(fd, buf, offset, length, pos, (er, bytesRead) => {
-            if (er) {
-                // ignoring the error from close(2) is a bad practice, but at
-                // this point we already have an error, don't need another one
-                return this[CLOSE](() => this.emit('error', er));
-            }
-            this[ONREAD](bytesRead);
-        });
-    }
-    /* c8 ignore start */
-    [CLOSE](cb = () => { }) {
-        /* c8 ignore stop */
-        if (this.fd !== undefined)
-            fs_1.default.close(this.fd, cb);
-    }
-    [ONREAD](bytesRead) {
-        if (bytesRead <= 0 && this.remain > 0) {
-            const er = Object.assign(new Error('encountered unexpected EOF'), {
-                path: this.absolute,
-                syscall: 'read',
-                code: 'EOF',
-            });
-            return this[CLOSE](() => this.emit('error', er));
-        }
-        if (bytesRead > this.remain) {
-            const er = Object.assign(new Error('did not encounter expected EOF'), {
-                path: this.absolute,
-                syscall: 'read',
-                code: 'EOF',
-            });
-            return this[CLOSE](() => this.emit('error', er));
-        }
-        /* c8 ignore start */
-        if (!this.buf) {
-            throw new Error('should have created buffer prior to reading');
-        }
-        /* c8 ignore stop */
-        // null out the rest of the buffer, if we could fit the block padding
-        // at the end of this loop, we've incremented bytesRead and this.remain
-        // to be incremented up to the blockRemain level, as if we had expected
-        // to get a null-padded file, and read it until the end.  then we will
-        // decrement both remain and blockRemain by bytesRead, and know that we
-        // reached the expected EOF, without any null buffer to append.
-        if (bytesRead === this.remain) {
-            for (let i = bytesRead; i < this.length && bytesRead < this.blockRemain; i++) {
-                this.buf[i + this.offset] = 0;
-                bytesRead++;
-                this.remain++;
-            }
-        }
-        const chunk = this.offset === 0 && bytesRead === this.buf.length ?
-            this.buf
-            : this.buf.subarray(this.offset, this.offset + bytesRead);
-        const flushed = this.write(chunk);
-        if (!flushed) {
-            this[AWAITDRAIN](() => this[ONDRAIN]());
-        }
-        else {
-            this[ONDRAIN]();
-        }
-    }
-    [AWAITDRAIN](cb) {
-        this.once('drain', cb);
-    }
-    write(chunk, encoding, cb) {
-        /* c8 ignore start - just junk to comply with NodeJS.WritableStream */
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        if (typeof chunk === 'string') {
-            chunk = Buffer.from(chunk, typeof encoding === 'string' ? encoding : 'utf8');
-        }
-        /* c8 ignore stop */
-        if (this.blockRemain < chunk.length) {
-            const er = Object.assign(new Error('writing more data than expected'), {
-                path: this.absolute,
-            });
-            return this.emit('error', er);
-        }
-        this.remain -= chunk.length;
-        this.blockRemain -= chunk.length;
-        this.pos += chunk.length;
-        this.offset += chunk.length;
-        return super.write(chunk, null, cb);
-    }
-    [ONDRAIN]() {
-        if (!this.remain) {
-            if (this.blockRemain) {
-                super.write(Buffer.alloc(this.blockRemain));
-            }
-            return this[CLOSE](er => er ? this.emit('error', er) : this.end());
-        }
-        /* c8 ignore start */
-        if (!this.buf) {
-            throw new Error('buffer lost somehow in ONDRAIN');
-        }
-        /* c8 ignore stop */
-        if (this.offset >= this.length) {
-            // if we only have a smaller bit left to read, alloc a smaller buffer
-            // otherwise, keep it the same length it was before.
-            this.buf = Buffer.allocUnsafe(Math.min(this.blockRemain, this.buf.length));
-            this.offset = 0;
-        }
-        this.length = this.buf.length - this.offset;
-        this[READ]();
-    }
-}
-exports.WriteEntry = WriteEntry;
-class WriteEntrySync extends WriteEntry {
-    sync = true;
-    [LSTAT]() {
-        this[ONLSTAT](fs_1.default.lstatSync(this.absolute));
-    }
-    [SYMLINK]() {
-        this[ONREADLINK](fs_1.default.readlinkSync(this.absolute));
-    }
-    [OPENFILE]() {
-        this[ONOPENFILE](fs_1.default.openSync(this.absolute, 'r'));
-    }
-    [READ]() {
-        let threw = true;
-        try {
-            const { fd, buf, offset, length, pos } = this;
-            /* c8 ignore start */
-            if (fd === undefined || buf === undefined) {
-                throw new Error('fd and buf must be set in READ method');
-            }
-            /* c8 ignore stop */
-            const bytesRead = fs_1.default.readSync(fd, buf, offset, length, pos);
-            this[ONREAD](bytesRead);
-            threw = false;
-        }
-        finally {
-            // ignoring the error from close(2) is a bad practice, but at
-            // this point we already have an error, don't need another one
-            if (threw) {
-                try {
-                    this[CLOSE](() => { });
-                }
-                catch (er) { }
-            }
-        }
-    }
-    [AWAITDRAIN](cb) {
-        cb();
-    }
-    /* c8 ignore start */
-    [CLOSE](cb = () => { }) {
-        /* c8 ignore stop */
-        if (this.fd !== undefined)
-            fs_1.default.closeSync(this.fd);
-        cb();
-    }
-}
-exports.WriteEntrySync = WriteEntrySync;
-class WriteEntryTar extends minipass_1.Minipass {
-    blockLen = 0;
-    blockRemain = 0;
-    buf = 0;
-    pos = 0;
-    remain = 0;
-    length = 0;
-    preservePaths;
-    portable;
-    strict;
-    noPax;
-    noMtime;
-    readEntry;
-    type;
-    prefix;
-    path;
-    mode;
-    uid;
-    gid;
-    uname;
-    gname;
-    header;
-    mtime;
-    atime;
-    ctime;
-    linkpath;
-    size;
-    onWriteEntry;
-    warn(code, message, data = {}) {
-        return (0, warn_method_js_1.warnMethod)(this, code, message, data);
-    }
-    constructor(readEntry, opt_ = {}) {
-        const opt = (0, options_js_1.dealias)(opt_);
-        super();
-        this.preservePaths = !!opt.preservePaths;
-        this.portable = !!opt.portable;
-        this.strict = !!opt.strict;
-        this.noPax = !!opt.noPax;
-        this.noMtime = !!opt.noMtime;
-        this.onWriteEntry = opt.onWriteEntry;
-        this.readEntry = readEntry;
-        const { type } = readEntry;
-        /* c8 ignore start */
-        if (type === 'Unsupported') {
-            throw new Error('writing entry that should be ignored');
-        }
-        /* c8 ignore stop */
-        this.type = type;
-        if (this.type === 'Directory' && this.portable) {
-            this.noMtime = true;
-        }
-        this.prefix = opt.prefix;
-        this.path = (0, normalize_windows_path_js_1.normalizeWindowsPath)(readEntry.path);
-        this.mode =
-            readEntry.mode !== undefined ?
-                this[MODE](readEntry.mode)
-                : undefined;
-        this.uid = this.portable ? undefined : readEntry.uid;
-        this.gid = this.portable ? undefined : readEntry.gid;
-        this.uname = this.portable ? undefined : readEntry.uname;
-        this.gname = this.portable ? undefined : readEntry.gname;
-        this.size = readEntry.size;
-        this.mtime =
-            this.noMtime ? undefined : opt.mtime || readEntry.mtime;
-        this.atime = this.portable ? undefined : readEntry.atime;
-        this.ctime = this.portable ? undefined : readEntry.ctime;
-        this.linkpath =
-            readEntry.linkpath !== undefined ?
-                (0, normalize_windows_path_js_1.normalizeWindowsPath)(readEntry.linkpath)
-                : undefined;
-        if (typeof opt.onwarn === 'function') {
-            this.on('warn', opt.onwarn);
-        }
-        let pathWarn = false;
-        if (!this.preservePaths) {
-            const [root, stripped] = (0, strip_absolute_path_js_1.stripAbsolutePath)(this.path);
-            if (root && typeof stripped === 'string') {
-                this.path = stripped;
-                pathWarn = root;
-            }
-        }
-        this.remain = readEntry.size;
-        this.blockRemain = readEntry.startBlockSize;
-        this.onWriteEntry?.(this);
-        this.header = new header_js_1.Header({
-            path: this[PREFIX](this.path),
-            linkpath: this.type === 'Link' && this.linkpath !== undefined ?
-                this[PREFIX](this.linkpath)
-                : this.linkpath,
-            // only the permissions and setuid/setgid/sticky bitflags
-            // not the higher-order bits that specify file type
-            mode: this.mode,
-            uid: this.portable ? undefined : this.uid,
-            gid: this.portable ? undefined : this.gid,
-            size: this.size,
-            mtime: this.noMtime ? undefined : this.mtime,
-            type: this.type,
-            uname: this.portable ? undefined : this.uname,
-            atime: this.portable ? undefined : this.atime,
-            ctime: this.portable ? undefined : this.ctime,
-        });
-        if (pathWarn) {
-            this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, {
-                entry: this,
-                path: pathWarn + this.path,
-            });
-        }
-        if (this.header.encode() && !this.noPax) {
-            super.write(new pax_js_1.Pax({
-                atime: this.portable ? undefined : this.atime,
-                ctime: this.portable ? undefined : this.ctime,
-                gid: this.portable ? undefined : this.gid,
-                mtime: this.noMtime ? undefined : this.mtime,
-                path: this[PREFIX](this.path),
-                linkpath: this.type === 'Link' && this.linkpath !== undefined ?
-                    this[PREFIX](this.linkpath)
-                    : this.linkpath,
-                size: this.size,
-                uid: this.portable ? undefined : this.uid,
-                uname: this.portable ? undefined : this.uname,
-                dev: this.portable ? undefined : this.readEntry.dev,
-                ino: this.portable ? undefined : this.readEntry.ino,
-                nlink: this.portable ? undefined : this.readEntry.nlink,
-            }).encode());
-        }
-        const b = this.header?.block;
-        /* c8 ignore start */
-        if (!b)
-            throw new Error('failed to encode header');
-        /* c8 ignore stop */
-        super.write(b);
-        readEntry.pipe(this);
-    }
-    [PREFIX](path) {
-        return prefixPath(path, this.prefix);
-    }
-    [MODE](mode) {
-        return (0, mode_fix_js_1.modeFix)(mode, this.type === 'Directory', this.portable);
-    }
-    write(chunk, encoding, cb) {
-        /* c8 ignore start - just junk to comply with NodeJS.WritableStream */
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        if (typeof chunk === 'string') {
-            chunk = Buffer.from(chunk, typeof encoding === 'string' ? encoding : 'utf8');
-        }
-        /* c8 ignore stop */
-        const writeLen = chunk.length;
-        if (writeLen > this.blockRemain) {
-            throw new Error('writing more to entry than is appropriate');
-        }
-        this.blockRemain -= writeLen;
-        return super.write(chunk, cb);
-    }
-    end(chunk, encoding, cb) {
-        if (this.blockRemain) {
-            super.write(Buffer.alloc(this.blockRemain));
-        }
-        /* c8 ignore start - just junk to comply with NodeJS.WritableStream */
-        if (typeof chunk === 'function') {
-            cb = chunk;
-            encoding = undefined;
-            chunk = undefined;
-        }
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        if (typeof chunk === 'string') {
-            chunk = Buffer.from(chunk, encoding ?? 'utf8');
-        }
-        if (cb)
-            this.once('finish', cb);
-        chunk ? super.end(chunk, cb) : super.end(cb);
-        /* c8 ignore stop */
-        return this;
-    }
-}
-exports.WriteEntryTar = WriteEntryTar;
-const getType = (stat) => stat.isFile() ? 'File'
-    : stat.isDirectory() ? 'Directory'
-        : stat.isSymbolicLink() ? 'SymbolicLink'
-            : 'Unsupported';
-//# sourceMappingURL=write-entry.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/create.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/create.js
deleted file mode 100644
index 512a9911d70d5..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/create.js
+++ /dev/null
@@ -1,77 +0,0 @@
-import { WriteStream, WriteStreamSync } from '@isaacs/fs-minipass';
-import path from 'node:path';
-import { list } from './list.js';
-import { makeCommand } from './make-command.js';
-import { Pack, PackSync } from './pack.js';
-const createFileSync = (opt, files) => {
-    const p = new PackSync(opt);
-    const stream = new WriteStreamSync(opt.file, {
-        mode: opt.mode || 0o666,
-    });
-    p.pipe(stream);
-    addFilesSync(p, files);
-};
-const createFile = (opt, files) => {
-    const p = new Pack(opt);
-    const stream = new WriteStream(opt.file, {
-        mode: opt.mode || 0o666,
-    });
-    p.pipe(stream);
-    const promise = new Promise((res, rej) => {
-        stream.on('error', rej);
-        stream.on('close', res);
-        p.on('error', rej);
-    });
-    addFilesAsync(p, files);
-    return promise;
-};
-const addFilesSync = (p, files) => {
-    files.forEach(file => {
-        if (file.charAt(0) === '@') {
-            list({
-                file: path.resolve(p.cwd, file.slice(1)),
-                sync: true,
-                noResume: true,
-                onReadEntry: entry => p.add(entry),
-            });
-        }
-        else {
-            p.add(file);
-        }
-    });
-    p.end();
-};
-const addFilesAsync = async (p, files) => {
-    for (let i = 0; i < files.length; i++) {
-        const file = String(files[i]);
-        if (file.charAt(0) === '@') {
-            await list({
-                file: path.resolve(String(p.cwd), file.slice(1)),
-                noResume: true,
-                onReadEntry: entry => {
-                    p.add(entry);
-                },
-            });
-        }
-        else {
-            p.add(file);
-        }
-    }
-    p.end();
-};
-const createSync = (opt, files) => {
-    const p = new PackSync(opt);
-    addFilesSync(p, files);
-    return p;
-};
-const createAsync = (opt, files) => {
-    const p = new Pack(opt);
-    addFilesAsync(p, files);
-    return p;
-};
-export const create = makeCommand(createFileSync, createFile, createSync, createAsync, (_opt, files) => {
-    if (!files?.length) {
-        throw new TypeError('no paths specified to add to archive');
-    }
-});
-//# sourceMappingURL=create.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/cwd-error.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/cwd-error.js
deleted file mode 100644
index 289a066b8e031..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/cwd-error.js
+++ /dev/null
@@ -1,14 +0,0 @@
-export class CwdError extends Error {
-    path;
-    code;
-    syscall = 'chdir';
-    constructor(path, code) {
-        super(`${code}: Cannot cd into '${path}'`);
-        this.path = path;
-        this.code = code;
-    }
-    get name() {
-        return 'CwdError';
-    }
-}
-//# sourceMappingURL=cwd-error.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/extract.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/extract.js
deleted file mode 100644
index 2274feef26e78..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/extract.js
+++ /dev/null
@@ -1,49 +0,0 @@
-// tar -x
-import * as fsm from '@isaacs/fs-minipass';
-import fs from 'node:fs';
-import { filesFilter } from './list.js';
-import { makeCommand } from './make-command.js';
-import { Unpack, UnpackSync } from './unpack.js';
-const extractFileSync = (opt) => {
-    const u = new UnpackSync(opt);
-    const file = opt.file;
-    const stat = fs.statSync(file);
-    // This trades a zero-byte read() syscall for a stat
-    // However, it will usually result in less memory allocation
-    const readSize = opt.maxReadSize || 16 * 1024 * 1024;
-    const stream = new fsm.ReadStreamSync(file, {
-        readSize: readSize,
-        size: stat.size,
-    });
-    stream.pipe(u);
-};
-const extractFile = (opt, _) => {
-    const u = new Unpack(opt);
-    const readSize = opt.maxReadSize || 16 * 1024 * 1024;
-    const file = opt.file;
-    const p = new Promise((resolve, reject) => {
-        u.on('error', reject);
-        u.on('close', resolve);
-        // This trades a zero-byte read() syscall for a stat
-        // However, it will usually result in less memory allocation
-        fs.stat(file, (er, stat) => {
-            if (er) {
-                reject(er);
-            }
-            else {
-                const stream = new fsm.ReadStream(file, {
-                    readSize: readSize,
-                    size: stat.size,
-                });
-                stream.on('error', reject);
-                stream.pipe(u);
-            }
-        });
-    });
-    return p;
-};
-export const extract = makeCommand(extractFileSync, extractFile, opt => new UnpackSync(opt), opt => new Unpack(opt), (opt, files) => {
-    if (files?.length)
-        filesFilter(opt, files);
-});
-//# sourceMappingURL=extract.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/get-write-flag.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/get-write-flag.js
deleted file mode 100644
index 2c7f3e8b28fda..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/get-write-flag.js
+++ /dev/null
@@ -1,23 +0,0 @@
-// Get the appropriate flag to use for creating files
-// We use fmap on Windows platforms for files less than
-// 512kb.  This is a fairly low limit, but avoids making
-// things slower in some cases.  Since most of what this
-// library is used for is extracting tarballs of many
-// relatively small files in npm packages and the like,
-// it can be a big boost on Windows platforms.
-import fs from 'fs';
-const platform = process.env.__FAKE_PLATFORM__ || process.platform;
-const isWindows = platform === 'win32';
-/* c8 ignore start */
-const { O_CREAT, O_TRUNC, O_WRONLY } = fs.constants;
-const UV_FS_O_FILEMAP = Number(process.env.__FAKE_FS_O_FILENAME__) ||
-    fs.constants.UV_FS_O_FILEMAP ||
-    0;
-/* c8 ignore stop */
-const fMapEnabled = isWindows && !!UV_FS_O_FILEMAP;
-const fMapLimit = 512 * 1024;
-const fMapFlag = UV_FS_O_FILEMAP | O_TRUNC | O_CREAT | O_WRONLY;
-export const getWriteFlag = !fMapEnabled ?
-    () => 'w'
-    : (size) => (size < fMapLimit ? fMapFlag : 'w');
-//# sourceMappingURL=get-write-flag.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/header.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/header.js
deleted file mode 100644
index e15192b14b16e..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/header.js
+++ /dev/null
@@ -1,279 +0,0 @@
-// parse a 512-byte header block to a data object, or vice-versa
-// encode returns `true` if a pax extended header is needed, because
-// the data could not be faithfully encoded in a simple header.
-// (Also, check header.needPax to see if it needs a pax header.)
-import { posix as pathModule } from 'node:path';
-import * as large from './large-numbers.js';
-import * as types from './types.js';
-export class Header {
-    cksumValid = false;
-    needPax = false;
-    nullBlock = false;
-    block;
-    path;
-    mode;
-    uid;
-    gid;
-    size;
-    cksum;
-    #type = 'Unsupported';
-    linkpath;
-    uname;
-    gname;
-    devmaj = 0;
-    devmin = 0;
-    atime;
-    ctime;
-    mtime;
-    charset;
-    comment;
-    constructor(data, off = 0, ex, gex) {
-        if (Buffer.isBuffer(data)) {
-            this.decode(data, off || 0, ex, gex);
-        }
-        else if (data) {
-            this.#slurp(data);
-        }
-    }
-    decode(buf, off, ex, gex) {
-        if (!off) {
-            off = 0;
-        }
-        if (!buf || !(buf.length >= off + 512)) {
-            throw new Error('need 512 bytes for header');
-        }
-        this.path = decString(buf, off, 100);
-        this.mode = decNumber(buf, off + 100, 8);
-        this.uid = decNumber(buf, off + 108, 8);
-        this.gid = decNumber(buf, off + 116, 8);
-        this.size = decNumber(buf, off + 124, 12);
-        this.mtime = decDate(buf, off + 136, 12);
-        this.cksum = decNumber(buf, off + 148, 12);
-        // if we have extended or global extended headers, apply them now
-        // See https://github.com/npm/node-tar/pull/187
-        // Apply global before local, so it overrides
-        if (gex)
-            this.#slurp(gex, true);
-        if (ex)
-            this.#slurp(ex);
-        // old tar versions marked dirs as a file with a trailing /
-        const t = decString(buf, off + 156, 1);
-        if (types.isCode(t)) {
-            this.#type = t || '0';
-        }
-        if (this.#type === '0' && this.path.slice(-1) === '/') {
-            this.#type = '5';
-        }
-        // tar implementations sometimes incorrectly put the stat(dir).size
-        // as the size in the tarball, even though Directory entries are
-        // not able to have any body at all.  In the very rare chance that
-        // it actually DOES have a body, we weren't going to do anything with
-        // it anyway, and it'll just be a warning about an invalid header.
-        if (this.#type === '5') {
-            this.size = 0;
-        }
-        this.linkpath = decString(buf, off + 157, 100);
-        if (buf.subarray(off + 257, off + 265).toString() ===
-            'ustar\u000000') {
-            this.uname = decString(buf, off + 265, 32);
-            this.gname = decString(buf, off + 297, 32);
-            /* c8 ignore start */
-            this.devmaj = decNumber(buf, off + 329, 8) ?? 0;
-            this.devmin = decNumber(buf, off + 337, 8) ?? 0;
-            /* c8 ignore stop */
-            if (buf[off + 475] !== 0) {
-                // definitely a prefix, definitely >130 chars.
-                const prefix = decString(buf, off + 345, 155);
-                this.path = prefix + '/' + this.path;
-            }
-            else {
-                const prefix = decString(buf, off + 345, 130);
-                if (prefix) {
-                    this.path = prefix + '/' + this.path;
-                }
-                this.atime = decDate(buf, off + 476, 12);
-                this.ctime = decDate(buf, off + 488, 12);
-            }
-        }
-        let sum = 8 * 0x20;
-        for (let i = off; i < off + 148; i++) {
-            sum += buf[i];
-        }
-        for (let i = off + 156; i < off + 512; i++) {
-            sum += buf[i];
-        }
-        this.cksumValid = sum === this.cksum;
-        if (this.cksum === undefined && sum === 8 * 0x20) {
-            this.nullBlock = true;
-        }
-    }
-    #slurp(ex, gex = false) {
-        Object.assign(this, Object.fromEntries(Object.entries(ex).filter(([k, v]) => {
-            // we slurp in everything except for the path attribute in
-            // a global extended header, because that's weird. Also, any
-            // null/undefined values are ignored.
-            return !(v === null ||
-                v === undefined ||
-                (k === 'path' && gex) ||
-                (k === 'linkpath' && gex) ||
-                k === 'global');
-        })));
-    }
-    encode(buf, off = 0) {
-        if (!buf) {
-            buf = this.block = Buffer.alloc(512);
-        }
-        if (this.#type === 'Unsupported') {
-            this.#type = '0';
-        }
-        if (!(buf.length >= off + 512)) {
-            throw new Error('need 512 bytes for header');
-        }
-        const prefixSize = this.ctime || this.atime ? 130 : 155;
-        const split = splitPrefix(this.path || '', prefixSize);
-        const path = split[0];
-        const prefix = split[1];
-        this.needPax = !!split[2];
-        this.needPax = encString(buf, off, 100, path) || this.needPax;
-        this.needPax =
-            encNumber(buf, off + 100, 8, this.mode) || this.needPax;
-        this.needPax =
-            encNumber(buf, off + 108, 8, this.uid) || this.needPax;
-        this.needPax =
-            encNumber(buf, off + 116, 8, this.gid) || this.needPax;
-        this.needPax =
-            encNumber(buf, off + 124, 12, this.size) || this.needPax;
-        this.needPax =
-            encDate(buf, off + 136, 12, this.mtime) || this.needPax;
-        buf[off + 156] = this.#type.charCodeAt(0);
-        this.needPax =
-            encString(buf, off + 157, 100, this.linkpath) || this.needPax;
-        buf.write('ustar\u000000', off + 257, 8);
-        this.needPax =
-            encString(buf, off + 265, 32, this.uname) || this.needPax;
-        this.needPax =
-            encString(buf, off + 297, 32, this.gname) || this.needPax;
-        this.needPax =
-            encNumber(buf, off + 329, 8, this.devmaj) || this.needPax;
-        this.needPax =
-            encNumber(buf, off + 337, 8, this.devmin) || this.needPax;
-        this.needPax =
-            encString(buf, off + 345, prefixSize, prefix) || this.needPax;
-        if (buf[off + 475] !== 0) {
-            this.needPax =
-                encString(buf, off + 345, 155, prefix) || this.needPax;
-        }
-        else {
-            this.needPax =
-                encString(buf, off + 345, 130, prefix) || this.needPax;
-            this.needPax =
-                encDate(buf, off + 476, 12, this.atime) || this.needPax;
-            this.needPax =
-                encDate(buf, off + 488, 12, this.ctime) || this.needPax;
-        }
-        let sum = 8 * 0x20;
-        for (let i = off; i < off + 148; i++) {
-            sum += buf[i];
-        }
-        for (let i = off + 156; i < off + 512; i++) {
-            sum += buf[i];
-        }
-        this.cksum = sum;
-        encNumber(buf, off + 148, 8, this.cksum);
-        this.cksumValid = true;
-        return this.needPax;
-    }
-    get type() {
-        return (this.#type === 'Unsupported' ?
-            this.#type
-            : types.name.get(this.#type));
-    }
-    get typeKey() {
-        return this.#type;
-    }
-    set type(type) {
-        const c = String(types.code.get(type));
-        if (types.isCode(c) || c === 'Unsupported') {
-            this.#type = c;
-        }
-        else if (types.isCode(type)) {
-            this.#type = type;
-        }
-        else {
-            throw new TypeError('invalid entry type: ' + type);
-        }
-    }
-}
-const splitPrefix = (p, prefixSize) => {
-    const pathSize = 100;
-    let pp = p;
-    let prefix = '';
-    let ret = undefined;
-    const root = pathModule.parse(p).root || '.';
-    if (Buffer.byteLength(pp) < pathSize) {
-        ret = [pp, prefix, false];
-    }
-    else {
-        // first set prefix to the dir, and path to the base
-        prefix = pathModule.dirname(pp);
-        pp = pathModule.basename(pp);
-        do {
-            if (Buffer.byteLength(pp) <= pathSize &&
-                Buffer.byteLength(prefix) <= prefixSize) {
-                // both fit!
-                ret = [pp, prefix, false];
-            }
-            else if (Buffer.byteLength(pp) > pathSize &&
-                Buffer.byteLength(prefix) <= prefixSize) {
-                // prefix fits in prefix, but path doesn't fit in path
-                ret = [pp.slice(0, pathSize - 1), prefix, true];
-            }
-            else {
-                // make path take a bit from prefix
-                pp = pathModule.join(pathModule.basename(prefix), pp);
-                prefix = pathModule.dirname(prefix);
-            }
-        } while (prefix !== root && ret === undefined);
-        // at this point, found no resolution, just truncate
-        if (!ret) {
-            ret = [p.slice(0, pathSize - 1), '', true];
-        }
-    }
-    return ret;
-};
-const decString = (buf, off, size) => buf
-    .subarray(off, off + size)
-    .toString('utf8')
-    .replace(/\0.*/, '');
-const decDate = (buf, off, size) => numToDate(decNumber(buf, off, size));
-const numToDate = (num) => num === undefined ? undefined : new Date(num * 1000);
-const decNumber = (buf, off, size) => Number(buf[off]) & 0x80 ?
-    large.parse(buf.subarray(off, off + size))
-    : decSmallNumber(buf, off, size);
-const nanUndef = (value) => (isNaN(value) ? undefined : value);
-const decSmallNumber = (buf, off, size) => nanUndef(parseInt(buf
-    .subarray(off, off + size)
-    .toString('utf8')
-    .replace(/\0.*$/, '')
-    .trim(), 8));
-// the maximum encodable as a null-terminated octal, by field size
-const MAXNUM = {
-    12: 0o77777777777,
-    8: 0o7777777,
-};
-const encNumber = (buf, off, size, num) => num === undefined ? false
-    : num > MAXNUM[size] || num < 0 ?
-        (large.encode(num, buf.subarray(off, off + size)), true)
-        : (encSmallNumber(buf, off, size, num), false);
-const encSmallNumber = (buf, off, size, num) => buf.write(octalString(num, size), off, size, 'ascii');
-const octalString = (num, size) => padOctal(Math.floor(num).toString(8), size);
-const padOctal = (str, size) => (str.length === size - 1 ?
-    str
-    : new Array(size - str.length - 1).join('0') + str + ' ') + '\0';
-const encDate = (buf, off, size, date) => date === undefined ? false : (encNumber(buf, off, size, date.getTime() / 1000));
-// enough to fill the longest string we've got
-const NULLS = new Array(156).join('\0');
-// pad with nulls, return true if it's longer or non-ascii
-const encString = (buf, off, size, str) => str === undefined ? false : ((buf.write(str + NULLS, off, size, 'utf8'),
-    str.length !== Buffer.byteLength(str) || str.length > size));
-//# sourceMappingURL=header.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/index.js
deleted file mode 100644
index 1bac6415c8d73..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/index.js
+++ /dev/null
@@ -1,20 +0,0 @@
-export * from './create.js';
-export { create as c } from './create.js';
-export * from './extract.js';
-export { extract as x } from './extract.js';
-export * from './header.js';
-export * from './list.js';
-export { list as t } from './list.js';
-// classes
-export * from './pack.js';
-export * from './parse.js';
-export * from './pax.js';
-export * from './read-entry.js';
-export * from './replace.js';
-export { replace as r } from './replace.js';
-export * as types from './types.js';
-export * from './unpack.js';
-export * from './update.js';
-export { update as u } from './update.js';
-export * from './write-entry.js';
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/large-numbers.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/large-numbers.js
deleted file mode 100644
index 4f2f7e5f14fc1..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/large-numbers.js
+++ /dev/null
@@ -1,94 +0,0 @@
-// Tar can encode large and negative numbers using a leading byte of
-// 0xff for negative, and 0x80 for positive.
-export const encode = (num, buf) => {
-    if (!Number.isSafeInteger(num)) {
-        // The number is so large that javascript cannot represent it with integer
-        // precision.
-        throw Error('cannot encode number outside of javascript safe integer range');
-    }
-    else if (num < 0) {
-        encodeNegative(num, buf);
-    }
-    else {
-        encodePositive(num, buf);
-    }
-    return buf;
-};
-const encodePositive = (num, buf) => {
-    buf[0] = 0x80;
-    for (var i = buf.length; i > 1; i--) {
-        buf[i - 1] = num & 0xff;
-        num = Math.floor(num / 0x100);
-    }
-};
-const encodeNegative = (num, buf) => {
-    buf[0] = 0xff;
-    var flipped = false;
-    num = num * -1;
-    for (var i = buf.length; i > 1; i--) {
-        var byte = num & 0xff;
-        num = Math.floor(num / 0x100);
-        if (flipped) {
-            buf[i - 1] = onesComp(byte);
-        }
-        else if (byte === 0) {
-            buf[i - 1] = 0;
-        }
-        else {
-            flipped = true;
-            buf[i - 1] = twosComp(byte);
-        }
-    }
-};
-export const parse = (buf) => {
-    const pre = buf[0];
-    const value = pre === 0x80 ? pos(buf.subarray(1, buf.length))
-        : pre === 0xff ? twos(buf)
-            : null;
-    if (value === null) {
-        throw Error('invalid base256 encoding');
-    }
-    if (!Number.isSafeInteger(value)) {
-        // The number is so large that javascript cannot represent it with integer
-        // precision.
-        throw Error('parsed number outside of javascript safe integer range');
-    }
-    return value;
-};
-const twos = (buf) => {
-    var len = buf.length;
-    var sum = 0;
-    var flipped = false;
-    for (var i = len - 1; i > -1; i--) {
-        var byte = Number(buf[i]);
-        var f;
-        if (flipped) {
-            f = onesComp(byte);
-        }
-        else if (byte === 0) {
-            f = byte;
-        }
-        else {
-            flipped = true;
-            f = twosComp(byte);
-        }
-        if (f !== 0) {
-            sum -= f * Math.pow(256, len - i - 1);
-        }
-    }
-    return sum;
-};
-const pos = (buf) => {
-    var len = buf.length;
-    var sum = 0;
-    for (var i = len - 1; i > -1; i--) {
-        var byte = Number(buf[i]);
-        if (byte !== 0) {
-            sum += byte * Math.pow(256, len - i - 1);
-        }
-    }
-    return sum;
-};
-const onesComp = (byte) => (0xff ^ byte) & 0xff;
-const twosComp = (byte) => ((0xff ^ byte) + 1) & 0xff;
-//# sourceMappingURL=large-numbers.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/list.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/list.js
deleted file mode 100644
index f49068400b6c9..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/list.js
+++ /dev/null
@@ -1,106 +0,0 @@
-// tar -t
-import * as fsm from '@isaacs/fs-minipass';
-import fs from 'node:fs';
-import { dirname, parse } from 'path';
-import { makeCommand } from './make-command.js';
-import { Parser } from './parse.js';
-import { stripTrailingSlashes } from './strip-trailing-slashes.js';
-const onReadEntryFunction = (opt) => {
-    const onReadEntry = opt.onReadEntry;
-    opt.onReadEntry =
-        onReadEntry ?
-            e => {
-                onReadEntry(e);
-                e.resume();
-            }
-            : e => e.resume();
-};
-// construct a filter that limits the file entries listed
-// include child entries if a dir is included
-export const filesFilter = (opt, files) => {
-    const map = new Map(files.map(f => [stripTrailingSlashes(f), true]));
-    const filter = opt.filter;
-    const mapHas = (file, r = '') => {
-        const root = r || parse(file).root || '.';
-        let ret;
-        if (file === root)
-            ret = false;
-        else {
-            const m = map.get(file);
-            if (m !== undefined) {
-                ret = m;
-            }
-            else {
-                ret = mapHas(dirname(file), root);
-            }
-        }
-        map.set(file, ret);
-        return ret;
-    };
-    opt.filter =
-        filter ?
-            (file, entry) => filter(file, entry) && mapHas(stripTrailingSlashes(file))
-            : file => mapHas(stripTrailingSlashes(file));
-};
-const listFileSync = (opt) => {
-    const p = new Parser(opt);
-    const file = opt.file;
-    let fd;
-    try {
-        const stat = fs.statSync(file);
-        const readSize = opt.maxReadSize || 16 * 1024 * 1024;
-        if (stat.size < readSize) {
-            p.end(fs.readFileSync(file));
-        }
-        else {
-            let pos = 0;
-            const buf = Buffer.allocUnsafe(readSize);
-            fd = fs.openSync(file, 'r');
-            while (pos < stat.size) {
-                const bytesRead = fs.readSync(fd, buf, 0, readSize, pos);
-                pos += bytesRead;
-                p.write(buf.subarray(0, bytesRead));
-            }
-            p.end();
-        }
-    }
-    finally {
-        if (typeof fd === 'number') {
-            try {
-                fs.closeSync(fd);
-                /* c8 ignore next */
-            }
-            catch (er) { }
-        }
-    }
-};
-const listFile = (opt, _files) => {
-    const parse = new Parser(opt);
-    const readSize = opt.maxReadSize || 16 * 1024 * 1024;
-    const file = opt.file;
-    const p = new Promise((resolve, reject) => {
-        parse.on('error', reject);
-        parse.on('end', resolve);
-        fs.stat(file, (er, stat) => {
-            if (er) {
-                reject(er);
-            }
-            else {
-                const stream = new fsm.ReadStream(file, {
-                    readSize: readSize,
-                    size: stat.size,
-                });
-                stream.on('error', reject);
-                stream.pipe(parse);
-            }
-        });
-    });
-    return p;
-};
-export const list = makeCommand(listFileSync, listFile, opt => new Parser(opt), opt => new Parser(opt), (opt, files) => {
-    if (files?.length)
-        filesFilter(opt, files);
-    if (!opt.noResume)
-        onReadEntryFunction(opt);
-});
-//# sourceMappingURL=list.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/make-command.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/make-command.js
deleted file mode 100644
index f2f737bca78fd..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/make-command.js
+++ /dev/null
@@ -1,57 +0,0 @@
-import { dealias, isAsyncFile, isAsyncNoFile, isSyncFile, isSyncNoFile, } from './options.js';
-export const makeCommand = (syncFile, asyncFile, syncNoFile, asyncNoFile, validate) => {
-    return Object.assign((opt_ = [], entries, cb) => {
-        if (Array.isArray(opt_)) {
-            entries = opt_;
-            opt_ = {};
-        }
-        if (typeof entries === 'function') {
-            cb = entries;
-            entries = undefined;
-        }
-        if (!entries) {
-            entries = [];
-        }
-        else {
-            entries = Array.from(entries);
-        }
-        const opt = dealias(opt_);
-        validate?.(opt, entries);
-        if (isSyncFile(opt)) {
-            if (typeof cb === 'function') {
-                throw new TypeError('callback not supported for sync tar functions');
-            }
-            return syncFile(opt, entries);
-        }
-        else if (isAsyncFile(opt)) {
-            const p = asyncFile(opt, entries);
-            // weirdness to make TS happy
-            const c = cb ? cb : undefined;
-            return c ? p.then(() => c(), c) : p;
-        }
-        else if (isSyncNoFile(opt)) {
-            if (typeof cb === 'function') {
-                throw new TypeError('callback not supported for sync tar functions');
-            }
-            return syncNoFile(opt, entries);
-        }
-        else if (isAsyncNoFile(opt)) {
-            if (typeof cb === 'function') {
-                throw new TypeError('callback only supported with file option');
-            }
-            return asyncNoFile(opt, entries);
-            /* c8 ignore start */
-        }
-        else {
-            throw new Error('impossible options??');
-        }
-        /* c8 ignore stop */
-    }, {
-        syncFile,
-        asyncFile,
-        syncNoFile,
-        asyncNoFile,
-        validate,
-    });
-};
-//# sourceMappingURL=make-command.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/mkdir.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/mkdir.js
deleted file mode 100644
index 13498ef0082f0..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/mkdir.js
+++ /dev/null
@@ -1,201 +0,0 @@
-import { chownr, chownrSync } from 'chownr';
-import fs from 'fs';
-import { mkdirp, mkdirpSync } from 'mkdirp';
-import path from 'node:path';
-import { CwdError } from './cwd-error.js';
-import { normalizeWindowsPath } from './normalize-windows-path.js';
-import { SymlinkError } from './symlink-error.js';
-const cGet = (cache, key) => cache.get(normalizeWindowsPath(key));
-const cSet = (cache, key, val) => cache.set(normalizeWindowsPath(key), val);
-const checkCwd = (dir, cb) => {
-    fs.stat(dir, (er, st) => {
-        if (er || !st.isDirectory()) {
-            er = new CwdError(dir, er?.code || 'ENOTDIR');
-        }
-        cb(er);
-    });
-};
-/**
- * Wrapper around mkdirp for tar's needs.
- *
- * The main purpose is to avoid creating directories if we know that
- * they already exist (and track which ones exist for this purpose),
- * and prevent entries from being extracted into symlinked folders,
- * if `preservePaths` is not set.
- */
-export const mkdir = (dir, opt, cb) => {
-    dir = normalizeWindowsPath(dir);
-    // if there's any overlap between mask and mode,
-    // then we'll need an explicit chmod
-    /* c8 ignore next */
-    const umask = opt.umask ?? 0o22;
-    const mode = opt.mode | 0o0700;
-    const needChmod = (mode & umask) !== 0;
-    const uid = opt.uid;
-    const gid = opt.gid;
-    const doChown = typeof uid === 'number' &&
-        typeof gid === 'number' &&
-        (uid !== opt.processUid || gid !== opt.processGid);
-    const preserve = opt.preserve;
-    const unlink = opt.unlink;
-    const cache = opt.cache;
-    const cwd = normalizeWindowsPath(opt.cwd);
-    const done = (er, created) => {
-        if (er) {
-            cb(er);
-        }
-        else {
-            cSet(cache, dir, true);
-            if (created && doChown) {
-                chownr(created, uid, gid, er => done(er));
-            }
-            else if (needChmod) {
-                fs.chmod(dir, mode, cb);
-            }
-            else {
-                cb();
-            }
-        }
-    };
-    if (cache && cGet(cache, dir) === true) {
-        return done();
-    }
-    if (dir === cwd) {
-        return checkCwd(dir, done);
-    }
-    if (preserve) {
-        return mkdirp(dir, { mode }).then(made => done(null, made ?? undefined), // oh, ts
-        done);
-    }
-    const sub = normalizeWindowsPath(path.relative(cwd, dir));
-    const parts = sub.split('/');
-    mkdir_(cwd, parts, mode, cache, unlink, cwd, undefined, done);
-};
-const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => {
-    if (!parts.length) {
-        return cb(null, created);
-    }
-    const p = parts.shift();
-    const part = normalizeWindowsPath(path.resolve(base + '/' + p));
-    if (cGet(cache, part)) {
-        return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
-    }
-    fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb));
-};
-const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => (er) => {
-    if (er) {
-        fs.lstat(part, (statEr, st) => {
-            if (statEr) {
-                statEr.path =
-                    statEr.path && normalizeWindowsPath(statEr.path);
-                cb(statEr);
-            }
-            else if (st.isDirectory()) {
-                mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
-            }
-            else if (unlink) {
-                fs.unlink(part, er => {
-                    if (er) {
-                        return cb(er);
-                    }
-                    fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb));
-                });
-            }
-            else if (st.isSymbolicLink()) {
-                return cb(new SymlinkError(part, part + '/' + parts.join('/')));
-            }
-            else {
-                cb(er);
-            }
-        });
-    }
-    else {
-        created = created || part;
-        mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
-    }
-};
-const checkCwdSync = (dir) => {
-    let ok = false;
-    let code = undefined;
-    try {
-        ok = fs.statSync(dir).isDirectory();
-    }
-    catch (er) {
-        code = er?.code;
-    }
-    finally {
-        if (!ok) {
-            throw new CwdError(dir, code ?? 'ENOTDIR');
-        }
-    }
-};
-export const mkdirSync = (dir, opt) => {
-    dir = normalizeWindowsPath(dir);
-    // if there's any overlap between mask and mode,
-    // then we'll need an explicit chmod
-    /* c8 ignore next */
-    const umask = opt.umask ?? 0o22;
-    const mode = opt.mode | 0o700;
-    const needChmod = (mode & umask) !== 0;
-    const uid = opt.uid;
-    const gid = opt.gid;
-    const doChown = typeof uid === 'number' &&
-        typeof gid === 'number' &&
-        (uid !== opt.processUid || gid !== opt.processGid);
-    const preserve = opt.preserve;
-    const unlink = opt.unlink;
-    const cache = opt.cache;
-    const cwd = normalizeWindowsPath(opt.cwd);
-    const done = (created) => {
-        cSet(cache, dir, true);
-        if (created && doChown) {
-            chownrSync(created, uid, gid);
-        }
-        if (needChmod) {
-            fs.chmodSync(dir, mode);
-        }
-    };
-    if (cache && cGet(cache, dir) === true) {
-        return done();
-    }
-    if (dir === cwd) {
-        checkCwdSync(cwd);
-        return done();
-    }
-    if (preserve) {
-        return done(mkdirpSync(dir, mode) ?? undefined);
-    }
-    const sub = normalizeWindowsPath(path.relative(cwd, dir));
-    const parts = sub.split('/');
-    let created = undefined;
-    for (let p = parts.shift(), part = cwd; p && (part += '/' + p); p = parts.shift()) {
-        part = normalizeWindowsPath(path.resolve(part));
-        if (cGet(cache, part)) {
-            continue;
-        }
-        try {
-            fs.mkdirSync(part, mode);
-            created = created || part;
-            cSet(cache, part, true);
-        }
-        catch (er) {
-            const st = fs.lstatSync(part);
-            if (st.isDirectory()) {
-                cSet(cache, part, true);
-                continue;
-            }
-            else if (unlink) {
-                fs.unlinkSync(part);
-                fs.mkdirSync(part, mode);
-                created = created || part;
-                cSet(cache, part, true);
-                continue;
-            }
-            else if (st.isSymbolicLink()) {
-                return new SymlinkError(part, part + '/' + parts.join('/'));
-            }
-        }
-    }
-    return done(created);
-};
-//# sourceMappingURL=mkdir.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/mode-fix.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/mode-fix.js
deleted file mode 100644
index 5fd3bb88c1cb2..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/mode-fix.js
+++ /dev/null
@@ -1,25 +0,0 @@
-export const modeFix = (mode, isDir, portable) => {
-    mode &= 0o7777;
-    // in portable mode, use the minimum reasonable umask
-    // if this system creates files with 0o664 by default
-    // (as some linux distros do), then we'll write the
-    // archive with 0o644 instead.  Also, don't ever create
-    // a file that is not readable/writable by the owner.
-    if (portable) {
-        mode = (mode | 0o600) & ~0o22;
-    }
-    // if dirs are readable, then they should be listable
-    if (isDir) {
-        if (mode & 0o400) {
-            mode |= 0o100;
-        }
-        if (mode & 0o40) {
-            mode |= 0o10;
-        }
-        if (mode & 0o4) {
-            mode |= 0o1;
-        }
-    }
-    return mode;
-};
-//# sourceMappingURL=mode-fix.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/normalize-unicode.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/normalize-unicode.js
deleted file mode 100644
index 94e5095476d6e..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/normalize-unicode.js
+++ /dev/null
@@ -1,13 +0,0 @@
-// warning: extremely hot code path.
-// This has been meticulously optimized for use
-// within npm install on large package trees.
-// Do not edit without careful benchmarking.
-const normalizeCache = Object.create(null);
-const { hasOwnProperty } = Object.prototype;
-export const normalizeUnicode = (s) => {
-    if (!hasOwnProperty.call(normalizeCache, s)) {
-        normalizeCache[s] = s.normalize('NFD');
-    }
-    return normalizeCache[s];
-};
-//# sourceMappingURL=normalize-unicode.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/normalize-windows-path.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/normalize-windows-path.js
deleted file mode 100644
index 2d97d2b884e62..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/normalize-windows-path.js
+++ /dev/null
@@ -1,9 +0,0 @@
-// on windows, either \ or / are valid directory separators.
-// on unix, \ is a valid character in filenames.
-// so, on windows, and only on windows, we replace all \ chars with /,
-// so that we can use / as our one and only directory separator char.
-const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform;
-export const normalizeWindowsPath = platform !== 'win32' ?
-    (p) => p
-    : (p) => p && p.replace(/\\/g, '/');
-//# sourceMappingURL=normalize-windows-path.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/options.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/options.js
deleted file mode 100644
index a006d36c23c92..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/options.js
+++ /dev/null
@@ -1,54 +0,0 @@
-// turn tar(1) style args like `C` into the more verbose things like `cwd`
-const argmap = new Map([
-    ['C', 'cwd'],
-    ['f', 'file'],
-    ['z', 'gzip'],
-    ['P', 'preservePaths'],
-    ['U', 'unlink'],
-    ['strip-components', 'strip'],
-    ['stripComponents', 'strip'],
-    ['keep-newer', 'newer'],
-    ['keepNewer', 'newer'],
-    ['keep-newer-files', 'newer'],
-    ['keepNewerFiles', 'newer'],
-    ['k', 'keep'],
-    ['keep-existing', 'keep'],
-    ['keepExisting', 'keep'],
-    ['m', 'noMtime'],
-    ['no-mtime', 'noMtime'],
-    ['p', 'preserveOwner'],
-    ['L', 'follow'],
-    ['h', 'follow'],
-    ['onentry', 'onReadEntry'],
-]);
-export const isSyncFile = (o) => !!o.sync && !!o.file;
-export const isAsyncFile = (o) => !o.sync && !!o.file;
-export const isSyncNoFile = (o) => !!o.sync && !o.file;
-export const isAsyncNoFile = (o) => !o.sync && !o.file;
-export const isSync = (o) => !!o.sync;
-export const isAsync = (o) => !o.sync;
-export const isFile = (o) => !!o.file;
-export const isNoFile = (o) => !o.file;
-const dealiasKey = (k) => {
-    const d = argmap.get(k);
-    if (d)
-        return d;
-    return k;
-};
-export const dealias = (opt = {}) => {
-    if (!opt)
-        return {};
-    const result = {};
-    for (const [key, v] of Object.entries(opt)) {
-        // TS doesn't know that aliases are going to always be the same type
-        const k = dealiasKey(key);
-        result[k] = v;
-    }
-    // affordance for deprecated noChmod -> chmod
-    if (result.chmod === undefined && result.noChmod === false) {
-        result.chmod = true;
-    }
-    delete result.noChmod;
-    return result;
-};
-//# sourceMappingURL=options.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/pack.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/pack.js
deleted file mode 100644
index f59f32f94201f..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/pack.js
+++ /dev/null
@@ -1,445 +0,0 @@
-// A readable tar stream creator
-// Technically, this is a transform stream that you write paths into,
-// and tar format comes out of.
-// The `add()` method is like `write()` but returns this,
-// and end() return `this` as well, so you can
-// do `new Pack(opt).add('files').add('dir').end().pipe(output)
-// You could also do something like:
-// streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar'))
-import fs from 'fs';
-import { WriteEntry, WriteEntrySync, WriteEntryTar, } from './write-entry.js';
-export class PackJob {
-    path;
-    absolute;
-    entry;
-    stat;
-    readdir;
-    pending = false;
-    ignore = false;
-    piped = false;
-    constructor(path, absolute) {
-        this.path = path || './';
-        this.absolute = absolute;
-    }
-}
-import { Minipass } from 'minipass';
-import * as zlib from 'minizlib';
-import { Yallist } from 'yallist';
-import { ReadEntry } from './read-entry.js';
-import { warnMethod, } from './warn-method.js';
-const EOF = Buffer.alloc(1024);
-const ONSTAT = Symbol('onStat');
-const ENDED = Symbol('ended');
-const QUEUE = Symbol('queue');
-const CURRENT = Symbol('current');
-const PROCESS = Symbol('process');
-const PROCESSING = Symbol('processing');
-const PROCESSJOB = Symbol('processJob');
-const JOBS = Symbol('jobs');
-const JOBDONE = Symbol('jobDone');
-const ADDFSENTRY = Symbol('addFSEntry');
-const ADDTARENTRY = Symbol('addTarEntry');
-const STAT = Symbol('stat');
-const READDIR = Symbol('readdir');
-const ONREADDIR = Symbol('onreaddir');
-const PIPE = Symbol('pipe');
-const ENTRY = Symbol('entry');
-const ENTRYOPT = Symbol('entryOpt');
-const WRITEENTRYCLASS = Symbol('writeEntryClass');
-const WRITE = Symbol('write');
-const ONDRAIN = Symbol('ondrain');
-import path from 'path';
-import { normalizeWindowsPath } from './normalize-windows-path.js';
-export class Pack extends Minipass {
-    opt;
-    cwd;
-    maxReadSize;
-    preservePaths;
-    strict;
-    noPax;
-    prefix;
-    linkCache;
-    statCache;
-    file;
-    portable;
-    zip;
-    readdirCache;
-    noDirRecurse;
-    follow;
-    noMtime;
-    mtime;
-    filter;
-    jobs;
-    [WRITEENTRYCLASS];
-    onWriteEntry;
-    [QUEUE];
-    [JOBS] = 0;
-    [PROCESSING] = false;
-    [ENDED] = false;
-    constructor(opt = {}) {
-        //@ts-ignore
-        super();
-        this.opt = opt;
-        this.file = opt.file || '';
-        this.cwd = opt.cwd || process.cwd();
-        this.maxReadSize = opt.maxReadSize;
-        this.preservePaths = !!opt.preservePaths;
-        this.strict = !!opt.strict;
-        this.noPax = !!opt.noPax;
-        this.prefix = normalizeWindowsPath(opt.prefix || '');
-        this.linkCache = opt.linkCache || new Map();
-        this.statCache = opt.statCache || new Map();
-        this.readdirCache = opt.readdirCache || new Map();
-        this.onWriteEntry = opt.onWriteEntry;
-        this[WRITEENTRYCLASS] = WriteEntry;
-        if (typeof opt.onwarn === 'function') {
-            this.on('warn', opt.onwarn);
-        }
-        this.portable = !!opt.portable;
-        if (opt.gzip || opt.brotli) {
-            if (opt.gzip && opt.brotli) {
-                throw new TypeError('gzip and brotli are mutually exclusive');
-            }
-            if (opt.gzip) {
-                if (typeof opt.gzip !== 'object') {
-                    opt.gzip = {};
-                }
-                if (this.portable) {
-                    opt.gzip.portable = true;
-                }
-                this.zip = new zlib.Gzip(opt.gzip);
-            }
-            if (opt.brotli) {
-                if (typeof opt.brotli !== 'object') {
-                    opt.brotli = {};
-                }
-                this.zip = new zlib.BrotliCompress(opt.brotli);
-            }
-            /* c8 ignore next */
-            if (!this.zip)
-                throw new Error('impossible');
-            const zip = this.zip;
-            zip.on('data', chunk => super.write(chunk));
-            zip.on('end', () => super.end());
-            zip.on('drain', () => this[ONDRAIN]());
-            this.on('resume', () => zip.resume());
-        }
-        else {
-            this.on('drain', this[ONDRAIN]);
-        }
-        this.noDirRecurse = !!opt.noDirRecurse;
-        this.follow = !!opt.follow;
-        this.noMtime = !!opt.noMtime;
-        if (opt.mtime)
-            this.mtime = opt.mtime;
-        this.filter =
-            typeof opt.filter === 'function' ? opt.filter : () => true;
-        this[QUEUE] = new Yallist();
-        this[JOBS] = 0;
-        this.jobs = Number(opt.jobs) || 4;
-        this[PROCESSING] = false;
-        this[ENDED] = false;
-    }
-    [WRITE](chunk) {
-        return super.write(chunk);
-    }
-    add(path) {
-        this.write(path);
-        return this;
-    }
-    end(path, encoding, cb) {
-        /* c8 ignore start */
-        if (typeof path === 'function') {
-            cb = path;
-            path = undefined;
-        }
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        /* c8 ignore stop */
-        if (path) {
-            this.add(path);
-        }
-        this[ENDED] = true;
-        this[PROCESS]();
-        /* c8 ignore next */
-        if (cb)
-            cb();
-        return this;
-    }
-    write(path) {
-        if (this[ENDED]) {
-            throw new Error('write after end');
-        }
-        if (path instanceof ReadEntry) {
-            this[ADDTARENTRY](path);
-        }
-        else {
-            this[ADDFSENTRY](path);
-        }
-        return this.flowing;
-    }
-    [ADDTARENTRY](p) {
-        const absolute = normalizeWindowsPath(path.resolve(this.cwd, p.path));
-        // in this case, we don't have to wait for the stat
-        if (!this.filter(p.path, p)) {
-            p.resume();
-        }
-        else {
-            const job = new PackJob(p.path, absolute);
-            job.entry = new WriteEntryTar(p, this[ENTRYOPT](job));
-            job.entry.on('end', () => this[JOBDONE](job));
-            this[JOBS] += 1;
-            this[QUEUE].push(job);
-        }
-        this[PROCESS]();
-    }
-    [ADDFSENTRY](p) {
-        const absolute = normalizeWindowsPath(path.resolve(this.cwd, p));
-        this[QUEUE].push(new PackJob(p, absolute));
-        this[PROCESS]();
-    }
-    [STAT](job) {
-        job.pending = true;
-        this[JOBS] += 1;
-        const stat = this.follow ? 'stat' : 'lstat';
-        fs[stat](job.absolute, (er, stat) => {
-            job.pending = false;
-            this[JOBS] -= 1;
-            if (er) {
-                this.emit('error', er);
-            }
-            else {
-                this[ONSTAT](job, stat);
-            }
-        });
-    }
-    [ONSTAT](job, stat) {
-        this.statCache.set(job.absolute, stat);
-        job.stat = stat;
-        // now we have the stat, we can filter it.
-        if (!this.filter(job.path, stat)) {
-            job.ignore = true;
-        }
-        this[PROCESS]();
-    }
-    [READDIR](job) {
-        job.pending = true;
-        this[JOBS] += 1;
-        fs.readdir(job.absolute, (er, entries) => {
-            job.pending = false;
-            this[JOBS] -= 1;
-            if (er) {
-                return this.emit('error', er);
-            }
-            this[ONREADDIR](job, entries);
-        });
-    }
-    [ONREADDIR](job, entries) {
-        this.readdirCache.set(job.absolute, entries);
-        job.readdir = entries;
-        this[PROCESS]();
-    }
-    [PROCESS]() {
-        if (this[PROCESSING]) {
-            return;
-        }
-        this[PROCESSING] = true;
-        for (let w = this[QUEUE].head; !!w && this[JOBS] < this.jobs; w = w.next) {
-            this[PROCESSJOB](w.value);
-            if (w.value.ignore) {
-                const p = w.next;
-                this[QUEUE].removeNode(w);
-                w.next = p;
-            }
-        }
-        this[PROCESSING] = false;
-        if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) {
-            if (this.zip) {
-                this.zip.end(EOF);
-            }
-            else {
-                super.write(EOF);
-                super.end();
-            }
-        }
-    }
-    get [CURRENT]() {
-        return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value;
-    }
-    [JOBDONE](_job) {
-        this[QUEUE].shift();
-        this[JOBS] -= 1;
-        this[PROCESS]();
-    }
-    [PROCESSJOB](job) {
-        if (job.pending) {
-            return;
-        }
-        if (job.entry) {
-            if (job === this[CURRENT] && !job.piped) {
-                this[PIPE](job);
-            }
-            return;
-        }
-        if (!job.stat) {
-            const sc = this.statCache.get(job.absolute);
-            if (sc) {
-                this[ONSTAT](job, sc);
-            }
-            else {
-                this[STAT](job);
-            }
-        }
-        if (!job.stat) {
-            return;
-        }
-        // filtered out!
-        if (job.ignore) {
-            return;
-        }
-        if (!this.noDirRecurse &&
-            job.stat.isDirectory() &&
-            !job.readdir) {
-            const rc = this.readdirCache.get(job.absolute);
-            if (rc) {
-                this[ONREADDIR](job, rc);
-            }
-            else {
-                this[READDIR](job);
-            }
-            if (!job.readdir) {
-                return;
-            }
-        }
-        // we know it doesn't have an entry, because that got checked above
-        job.entry = this[ENTRY](job);
-        if (!job.entry) {
-            job.ignore = true;
-            return;
-        }
-        if (job === this[CURRENT] && !job.piped) {
-            this[PIPE](job);
-        }
-    }
-    [ENTRYOPT](job) {
-        return {
-            onwarn: (code, msg, data) => this.warn(code, msg, data),
-            noPax: this.noPax,
-            cwd: this.cwd,
-            absolute: job.absolute,
-            preservePaths: this.preservePaths,
-            maxReadSize: this.maxReadSize,
-            strict: this.strict,
-            portable: this.portable,
-            linkCache: this.linkCache,
-            statCache: this.statCache,
-            noMtime: this.noMtime,
-            mtime: this.mtime,
-            prefix: this.prefix,
-            onWriteEntry: this.onWriteEntry,
-        };
-    }
-    [ENTRY](job) {
-        this[JOBS] += 1;
-        try {
-            const e = new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job));
-            return e
-                .on('end', () => this[JOBDONE](job))
-                .on('error', er => this.emit('error', er));
-        }
-        catch (er) {
-            this.emit('error', er);
-        }
-    }
-    [ONDRAIN]() {
-        if (this[CURRENT] && this[CURRENT].entry) {
-            this[CURRENT].entry.resume();
-        }
-    }
-    // like .pipe() but using super, because our write() is special
-    [PIPE](job) {
-        job.piped = true;
-        if (job.readdir) {
-            job.readdir.forEach(entry => {
-                const p = job.path;
-                const base = p === './' ? '' : p.replace(/\/*$/, '/');
-                this[ADDFSENTRY](base + entry);
-            });
-        }
-        const source = job.entry;
-        const zip = this.zip;
-        /* c8 ignore start */
-        if (!source)
-            throw new Error('cannot pipe without source');
-        /* c8 ignore stop */
-        if (zip) {
-            source.on('data', chunk => {
-                if (!zip.write(chunk)) {
-                    source.pause();
-                }
-            });
-        }
-        else {
-            source.on('data', chunk => {
-                if (!super.write(chunk)) {
-                    source.pause();
-                }
-            });
-        }
-    }
-    pause() {
-        if (this.zip) {
-            this.zip.pause();
-        }
-        return super.pause();
-    }
-    warn(code, message, data = {}) {
-        warnMethod(this, code, message, data);
-    }
-}
-export class PackSync extends Pack {
-    sync = true;
-    constructor(opt) {
-        super(opt);
-        this[WRITEENTRYCLASS] = WriteEntrySync;
-    }
-    // pause/resume are no-ops in sync streams.
-    pause() { }
-    resume() { }
-    [STAT](job) {
-        const stat = this.follow ? 'statSync' : 'lstatSync';
-        this[ONSTAT](job, fs[stat](job.absolute));
-    }
-    [READDIR](job) {
-        this[ONREADDIR](job, fs.readdirSync(job.absolute));
-    }
-    // gotta get it all in this tick
-    [PIPE](job) {
-        const source = job.entry;
-        const zip = this.zip;
-        if (job.readdir) {
-            job.readdir.forEach(entry => {
-                const p = job.path;
-                const base = p === './' ? '' : p.replace(/\/*$/, '/');
-                this[ADDFSENTRY](base + entry);
-            });
-        }
-        /* c8 ignore start */
-        if (!source)
-            throw new Error('Cannot pipe without source');
-        /* c8 ignore stop */
-        if (zip) {
-            source.on('data', chunk => {
-                zip.write(chunk);
-            });
-        }
-        else {
-            source.on('data', chunk => {
-                super[WRITE](chunk);
-            });
-        }
-    }
-}
-//# sourceMappingURL=pack.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/parse.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/parse.js
deleted file mode 100644
index cce430479cd0c..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/parse.js
+++ /dev/null
@@ -1,595 +0,0 @@
-// this[BUFFER] is the remainder of a chunk if we're waiting for
-// the full 512 bytes of a header to come in.  We will Buffer.concat()
-// it to the next write(), which is a mem copy, but a small one.
-//
-// this[QUEUE] is a Yallist of entries that haven't been emitted
-// yet this can only get filled up if the user keeps write()ing after
-// a write() returns false, or does a write() with more than one entry
-//
-// We don't buffer chunks, we always parse them and either create an
-// entry, or push it into the active entry.  The ReadEntry class knows
-// to throw data away if .ignore=true
-//
-// Shift entry off the buffer when it emits 'end', and emit 'entry' for
-// the next one in the list.
-//
-// At any time, we're pushing body chunks into the entry at WRITEENTRY,
-// and waiting for 'end' on the entry at READENTRY
-//
-// ignored entries get .resume() called on them straight away
-import { EventEmitter as EE } from 'events';
-import { BrotliDecompress, Unzip } from 'minizlib';
-import { Yallist } from 'yallist';
-import { Header } from './header.js';
-import { Pax } from './pax.js';
-import { ReadEntry } from './read-entry.js';
-import { warnMethod, } from './warn-method.js';
-const maxMetaEntrySize = 1024 * 1024;
-const gzipHeader = Buffer.from([0x1f, 0x8b]);
-const STATE = Symbol('state');
-const WRITEENTRY = Symbol('writeEntry');
-const READENTRY = Symbol('readEntry');
-const NEXTENTRY = Symbol('nextEntry');
-const PROCESSENTRY = Symbol('processEntry');
-const EX = Symbol('extendedHeader');
-const GEX = Symbol('globalExtendedHeader');
-const META = Symbol('meta');
-const EMITMETA = Symbol('emitMeta');
-const BUFFER = Symbol('buffer');
-const QUEUE = Symbol('queue');
-const ENDED = Symbol('ended');
-const EMITTEDEND = Symbol('emittedEnd');
-const EMIT = Symbol('emit');
-const UNZIP = Symbol('unzip');
-const CONSUMECHUNK = Symbol('consumeChunk');
-const CONSUMECHUNKSUB = Symbol('consumeChunkSub');
-const CONSUMEBODY = Symbol('consumeBody');
-const CONSUMEMETA = Symbol('consumeMeta');
-const CONSUMEHEADER = Symbol('consumeHeader');
-const CONSUMING = Symbol('consuming');
-const BUFFERCONCAT = Symbol('bufferConcat');
-const MAYBEEND = Symbol('maybeEnd');
-const WRITING = Symbol('writing');
-const ABORTED = Symbol('aborted');
-const DONE = Symbol('onDone');
-const SAW_VALID_ENTRY = Symbol('sawValidEntry');
-const SAW_NULL_BLOCK = Symbol('sawNullBlock');
-const SAW_EOF = Symbol('sawEOF');
-const CLOSESTREAM = Symbol('closeStream');
-const noop = () => true;
-export class Parser extends EE {
-    file;
-    strict;
-    maxMetaEntrySize;
-    filter;
-    brotli;
-    writable = true;
-    readable = false;
-    [QUEUE] = new Yallist();
-    [BUFFER];
-    [READENTRY];
-    [WRITEENTRY];
-    [STATE] = 'begin';
-    [META] = '';
-    [EX];
-    [GEX];
-    [ENDED] = false;
-    [UNZIP];
-    [ABORTED] = false;
-    [SAW_VALID_ENTRY];
-    [SAW_NULL_BLOCK] = false;
-    [SAW_EOF] = false;
-    [WRITING] = false;
-    [CONSUMING] = false;
-    [EMITTEDEND] = false;
-    constructor(opt = {}) {
-        super();
-        this.file = opt.file || '';
-        // these BADARCHIVE errors can't be detected early. listen on DONE.
-        this.on(DONE, () => {
-            if (this[STATE] === 'begin' ||
-                this[SAW_VALID_ENTRY] === false) {
-                // either less than 1 block of data, or all entries were invalid.
-                // Either way, probably not even a tarball.
-                this.warn('TAR_BAD_ARCHIVE', 'Unrecognized archive format');
-            }
-        });
-        if (opt.ondone) {
-            this.on(DONE, opt.ondone);
-        }
-        else {
-            this.on(DONE, () => {
-                this.emit('prefinish');
-                this.emit('finish');
-                this.emit('end');
-            });
-        }
-        this.strict = !!opt.strict;
-        this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize;
-        this.filter = typeof opt.filter === 'function' ? opt.filter : noop;
-        // Unlike gzip, brotli doesn't have any magic bytes to identify it
-        // Users need to explicitly tell us they're extracting a brotli file
-        // Or we infer from the file extension
-        const isTBR = opt.file &&
-            (opt.file.endsWith('.tar.br') || opt.file.endsWith('.tbr'));
-        // if it's a tbr file it MIGHT be brotli, but we don't know until
-        // we look at it and verify it's not a valid tar file.
-        this.brotli =
-            !opt.gzip && opt.brotli !== undefined ? opt.brotli
-                : isTBR ? undefined
-                    : false;
-        // have to set this so that streams are ok piping into it
-        this.on('end', () => this[CLOSESTREAM]());
-        if (typeof opt.onwarn === 'function') {
-            this.on('warn', opt.onwarn);
-        }
-        if (typeof opt.onReadEntry === 'function') {
-            this.on('entry', opt.onReadEntry);
-        }
-    }
-    warn(code, message, data = {}) {
-        warnMethod(this, code, message, data);
-    }
-    [CONSUMEHEADER](chunk, position) {
-        if (this[SAW_VALID_ENTRY] === undefined) {
-            this[SAW_VALID_ENTRY] = false;
-        }
-        let header;
-        try {
-            header = new Header(chunk, position, this[EX], this[GEX]);
-        }
-        catch (er) {
-            return this.warn('TAR_ENTRY_INVALID', er);
-        }
-        if (header.nullBlock) {
-            if (this[SAW_NULL_BLOCK]) {
-                this[SAW_EOF] = true;
-                // ending an archive with no entries.  pointless, but legal.
-                if (this[STATE] === 'begin') {
-                    this[STATE] = 'header';
-                }
-                this[EMIT]('eof');
-            }
-            else {
-                this[SAW_NULL_BLOCK] = true;
-                this[EMIT]('nullBlock');
-            }
-        }
-        else {
-            this[SAW_NULL_BLOCK] = false;
-            if (!header.cksumValid) {
-                this.warn('TAR_ENTRY_INVALID', 'checksum failure', { header });
-            }
-            else if (!header.path) {
-                this.warn('TAR_ENTRY_INVALID', 'path is required', { header });
-            }
-            else {
-                const type = header.type;
-                if (/^(Symbolic)?Link$/.test(type) && !header.linkpath) {
-                    this.warn('TAR_ENTRY_INVALID', 'linkpath required', {
-                        header,
-                    });
-                }
-                else if (!/^(Symbolic)?Link$/.test(type) &&
-                    !/^(Global)?ExtendedHeader$/.test(type) &&
-                    header.linkpath) {
-                    this.warn('TAR_ENTRY_INVALID', 'linkpath forbidden', {
-                        header,
-                    });
-                }
-                else {
-                    const entry = (this[WRITEENTRY] = new ReadEntry(header, this[EX], this[GEX]));
-                    // we do this for meta & ignored entries as well, because they
-                    // are still valid tar, or else we wouldn't know to ignore them
-                    if (!this[SAW_VALID_ENTRY]) {
-                        if (entry.remain) {
-                            // this might be the one!
-                            const onend = () => {
-                                if (!entry.invalid) {
-                                    this[SAW_VALID_ENTRY] = true;
-                                }
-                            };
-                            entry.on('end', onend);
-                        }
-                        else {
-                            this[SAW_VALID_ENTRY] = true;
-                        }
-                    }
-                    if (entry.meta) {
-                        if (entry.size > this.maxMetaEntrySize) {
-                            entry.ignore = true;
-                            this[EMIT]('ignoredEntry', entry);
-                            this[STATE] = 'ignore';
-                            entry.resume();
-                        }
-                        else if (entry.size > 0) {
-                            this[META] = '';
-                            entry.on('data', c => (this[META] += c));
-                            this[STATE] = 'meta';
-                        }
-                    }
-                    else {
-                        this[EX] = undefined;
-                        entry.ignore =
-                            entry.ignore || !this.filter(entry.path, entry);
-                        if (entry.ignore) {
-                            // probably valid, just not something we care about
-                            this[EMIT]('ignoredEntry', entry);
-                            this[STATE] = entry.remain ? 'ignore' : 'header';
-                            entry.resume();
-                        }
-                        else {
-                            if (entry.remain) {
-                                this[STATE] = 'body';
-                            }
-                            else {
-                                this[STATE] = 'header';
-                                entry.end();
-                            }
-                            if (!this[READENTRY]) {
-                                this[QUEUE].push(entry);
-                                this[NEXTENTRY]();
-                            }
-                            else {
-                                this[QUEUE].push(entry);
-                            }
-                        }
-                    }
-                }
-            }
-        }
-    }
-    [CLOSESTREAM]() {
-        queueMicrotask(() => this.emit('close'));
-    }
-    [PROCESSENTRY](entry) {
-        let go = true;
-        if (!entry) {
-            this[READENTRY] = undefined;
-            go = false;
-        }
-        else if (Array.isArray(entry)) {
-            const [ev, ...args] = entry;
-            this.emit(ev, ...args);
-        }
-        else {
-            this[READENTRY] = entry;
-            this.emit('entry', entry);
-            if (!entry.emittedEnd) {
-                entry.on('end', () => this[NEXTENTRY]());
-                go = false;
-            }
-        }
-        return go;
-    }
-    [NEXTENTRY]() {
-        do { } while (this[PROCESSENTRY](this[QUEUE].shift()));
-        if (!this[QUEUE].length) {
-            // At this point, there's nothing in the queue, but we may have an
-            // entry which is being consumed (readEntry).
-            // If we don't, then we definitely can handle more data.
-            // If we do, and either it's flowing, or it has never had any data
-            // written to it, then it needs more.
-            // The only other possibility is that it has returned false from a
-            // write() call, so we wait for the next drain to continue.
-            const re = this[READENTRY];
-            const drainNow = !re || re.flowing || re.size === re.remain;
-            if (drainNow) {
-                if (!this[WRITING]) {
-                    this.emit('drain');
-                }
-            }
-            else {
-                re.once('drain', () => this.emit('drain'));
-            }
-        }
-    }
-    [CONSUMEBODY](chunk, position) {
-        // write up to but no  more than writeEntry.blockRemain
-        const entry = this[WRITEENTRY];
-        /* c8 ignore start */
-        if (!entry) {
-            throw new Error('attempt to consume body without entry??');
-        }
-        const br = entry.blockRemain ?? 0;
-        /* c8 ignore stop */
-        const c = br >= chunk.length && position === 0 ?
-            chunk
-            : chunk.subarray(position, position + br);
-        entry.write(c);
-        if (!entry.blockRemain) {
-            this[STATE] = 'header';
-            this[WRITEENTRY] = undefined;
-            entry.end();
-        }
-        return c.length;
-    }
-    [CONSUMEMETA](chunk, position) {
-        const entry = this[WRITEENTRY];
-        const ret = this[CONSUMEBODY](chunk, position);
-        // if we finished, then the entry is reset
-        if (!this[WRITEENTRY] && entry) {
-            this[EMITMETA](entry);
-        }
-        return ret;
-    }
-    [EMIT](ev, data, extra) {
-        if (!this[QUEUE].length && !this[READENTRY]) {
-            this.emit(ev, data, extra);
-        }
-        else {
-            this[QUEUE].push([ev, data, extra]);
-        }
-    }
-    [EMITMETA](entry) {
-        this[EMIT]('meta', this[META]);
-        switch (entry.type) {
-            case 'ExtendedHeader':
-            case 'OldExtendedHeader':
-                this[EX] = Pax.parse(this[META], this[EX], false);
-                break;
-            case 'GlobalExtendedHeader':
-                this[GEX] = Pax.parse(this[META], this[GEX], true);
-                break;
-            case 'NextFileHasLongPath':
-            case 'OldGnuLongPath': {
-                const ex = this[EX] ?? Object.create(null);
-                this[EX] = ex;
-                ex.path = this[META].replace(/\0.*/, '');
-                break;
-            }
-            case 'NextFileHasLongLinkpath': {
-                const ex = this[EX] || Object.create(null);
-                this[EX] = ex;
-                ex.linkpath = this[META].replace(/\0.*/, '');
-                break;
-            }
-            /* c8 ignore start */
-            default:
-                throw new Error('unknown meta: ' + entry.type);
-            /* c8 ignore stop */
-        }
-    }
-    abort(error) {
-        this[ABORTED] = true;
-        this.emit('abort', error);
-        // always throws, even in non-strict mode
-        this.warn('TAR_ABORT', error, { recoverable: false });
-    }
-    write(chunk, encoding, cb) {
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        if (typeof chunk === 'string') {
-            chunk = Buffer.from(chunk, 
-            /* c8 ignore next */
-            typeof encoding === 'string' ? encoding : 'utf8');
-        }
-        if (this[ABORTED]) {
-            /* c8 ignore next */
-            cb?.();
-            return false;
-        }
-        // first write, might be gzipped
-        const needSniff = this[UNZIP] === undefined ||
-            (this.brotli === undefined && this[UNZIP] === false);
-        if (needSniff && chunk) {
-            if (this[BUFFER]) {
-                chunk = Buffer.concat([this[BUFFER], chunk]);
-                this[BUFFER] = undefined;
-            }
-            if (chunk.length < gzipHeader.length) {
-                this[BUFFER] = chunk;
-                /* c8 ignore next */
-                cb?.();
-                return true;
-            }
-            // look for gzip header
-            for (let i = 0; this[UNZIP] === undefined && i < gzipHeader.length; i++) {
-                if (chunk[i] !== gzipHeader[i]) {
-                    this[UNZIP] = false;
-                }
-            }
-            const maybeBrotli = this.brotli === undefined;
-            if (this[UNZIP] === false && maybeBrotli) {
-                // read the first header to see if it's a valid tar file. If so,
-                // we can safely assume that it's not actually brotli, despite the
-                // .tbr or .tar.br file extension.
-                // if we ended before getting a full chunk, yes, def brotli
-                if (chunk.length < 512) {
-                    if (this[ENDED]) {
-                        this.brotli = true;
-                    }
-                    else {
-                        this[BUFFER] = chunk;
-                        /* c8 ignore next */
-                        cb?.();
-                        return true;
-                    }
-                }
-                else {
-                    // if it's tar, it's pretty reliably not brotli, chances of
-                    // that happening are astronomical.
-                    try {
-                        new Header(chunk.subarray(0, 512));
-                        this.brotli = false;
-                    }
-                    catch (_) {
-                        this.brotli = true;
-                    }
-                }
-            }
-            if (this[UNZIP] === undefined ||
-                (this[UNZIP] === false && this.brotli)) {
-                const ended = this[ENDED];
-                this[ENDED] = false;
-                this[UNZIP] =
-                    this[UNZIP] === undefined ?
-                        new Unzip({})
-                        : new BrotliDecompress({});
-                this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk));
-                this[UNZIP].on('error', er => this.abort(er));
-                this[UNZIP].on('end', () => {
-                    this[ENDED] = true;
-                    this[CONSUMECHUNK]();
-                });
-                this[WRITING] = true;
-                const ret = !!this[UNZIP][ended ? 'end' : 'write'](chunk);
-                this[WRITING] = false;
-                cb?.();
-                return ret;
-            }
-        }
-        this[WRITING] = true;
-        if (this[UNZIP]) {
-            this[UNZIP].write(chunk);
-        }
-        else {
-            this[CONSUMECHUNK](chunk);
-        }
-        this[WRITING] = false;
-        // return false if there's a queue, or if the current entry isn't flowing
-        const ret = this[QUEUE].length ? false
-            : this[READENTRY] ? this[READENTRY].flowing
-                : true;
-        // if we have no queue, then that means a clogged READENTRY
-        if (!ret && !this[QUEUE].length) {
-            this[READENTRY]?.once('drain', () => this.emit('drain'));
-        }
-        /* c8 ignore next */
-        cb?.();
-        return ret;
-    }
-    [BUFFERCONCAT](c) {
-        if (c && !this[ABORTED]) {
-            this[BUFFER] =
-                this[BUFFER] ? Buffer.concat([this[BUFFER], c]) : c;
-        }
-    }
-    [MAYBEEND]() {
-        if (this[ENDED] &&
-            !this[EMITTEDEND] &&
-            !this[ABORTED] &&
-            !this[CONSUMING]) {
-            this[EMITTEDEND] = true;
-            const entry = this[WRITEENTRY];
-            if (entry && entry.blockRemain) {
-                // truncated, likely a damaged file
-                const have = this[BUFFER] ? this[BUFFER].length : 0;
-                this.warn('TAR_BAD_ARCHIVE', `Truncated input (needed ${entry.blockRemain} more bytes, only ${have} available)`, { entry });
-                if (this[BUFFER]) {
-                    entry.write(this[BUFFER]);
-                }
-                entry.end();
-            }
-            this[EMIT](DONE);
-        }
-    }
-    [CONSUMECHUNK](chunk) {
-        if (this[CONSUMING] && chunk) {
-            this[BUFFERCONCAT](chunk);
-        }
-        else if (!chunk && !this[BUFFER]) {
-            this[MAYBEEND]();
-        }
-        else if (chunk) {
-            this[CONSUMING] = true;
-            if (this[BUFFER]) {
-                this[BUFFERCONCAT](chunk);
-                const c = this[BUFFER];
-                this[BUFFER] = undefined;
-                this[CONSUMECHUNKSUB](c);
-            }
-            else {
-                this[CONSUMECHUNKSUB](chunk);
-            }
-            while (this[BUFFER] &&
-                this[BUFFER]?.length >= 512 &&
-                !this[ABORTED] &&
-                !this[SAW_EOF]) {
-                const c = this[BUFFER];
-                this[BUFFER] = undefined;
-                this[CONSUMECHUNKSUB](c);
-            }
-            this[CONSUMING] = false;
-        }
-        if (!this[BUFFER] || this[ENDED]) {
-            this[MAYBEEND]();
-        }
-    }
-    [CONSUMECHUNKSUB](chunk) {
-        // we know that we are in CONSUMING mode, so anything written goes into
-        // the buffer.  Advance the position and put any remainder in the buffer.
-        let position = 0;
-        const length = chunk.length;
-        while (position + 512 <= length &&
-            !this[ABORTED] &&
-            !this[SAW_EOF]) {
-            switch (this[STATE]) {
-                case 'begin':
-                case 'header':
-                    this[CONSUMEHEADER](chunk, position);
-                    position += 512;
-                    break;
-                case 'ignore':
-                case 'body':
-                    position += this[CONSUMEBODY](chunk, position);
-                    break;
-                case 'meta':
-                    position += this[CONSUMEMETA](chunk, position);
-                    break;
-                /* c8 ignore start */
-                default:
-                    throw new Error('invalid state: ' + this[STATE]);
-                /* c8 ignore stop */
-            }
-        }
-        if (position < length) {
-            if (this[BUFFER]) {
-                this[BUFFER] = Buffer.concat([
-                    chunk.subarray(position),
-                    this[BUFFER],
-                ]);
-            }
-            else {
-                this[BUFFER] = chunk.subarray(position);
-            }
-        }
-    }
-    end(chunk, encoding, cb) {
-        if (typeof chunk === 'function') {
-            cb = chunk;
-            encoding = undefined;
-            chunk = undefined;
-        }
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        if (typeof chunk === 'string') {
-            chunk = Buffer.from(chunk, encoding);
-        }
-        if (cb)
-            this.once('finish', cb);
-        if (!this[ABORTED]) {
-            if (this[UNZIP]) {
-                /* c8 ignore start */
-                if (chunk)
-                    this[UNZIP].write(chunk);
-                /* c8 ignore stop */
-                this[UNZIP].end();
-            }
-            else {
-                this[ENDED] = true;
-                if (this.brotli === undefined)
-                    chunk = chunk || Buffer.alloc(0);
-                if (chunk)
-                    this.write(chunk);
-                this[MAYBEEND]();
-            }
-        }
-        return this;
-    }
-}
-//# sourceMappingURL=parse.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/path-reservations.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/path-reservations.js
deleted file mode 100644
index e63b9c91e9a80..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/path-reservations.js
+++ /dev/null
@@ -1,166 +0,0 @@
-// A path exclusive reservation system
-// reserve([list, of, paths], fn)
-// When the fn is first in line for all its paths, it
-// is called with a cb that clears the reservation.
-//
-// Used by async unpack to avoid clobbering paths in use,
-// while still allowing maximal safe parallelization.
-import { join } from 'node:path';
-import { normalizeUnicode } from './normalize-unicode.js';
-import { stripTrailingSlashes } from './strip-trailing-slashes.js';
-const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform;
-const isWindows = platform === 'win32';
-// return a set of parent dirs for a given path
-// '/a/b/c/d' -> ['/', '/a', '/a/b', '/a/b/c', '/a/b/c/d']
-const getDirs = (path) => {
-    const dirs = path
-        .split('/')
-        .slice(0, -1)
-        .reduce((set, path) => {
-        const s = set[set.length - 1];
-        if (s !== undefined) {
-            path = join(s, path);
-        }
-        set.push(path || '/');
-        return set;
-    }, []);
-    return dirs;
-};
-export class PathReservations {
-    // path => [function or Set]
-    // A Set object means a directory reservation
-    // A fn is a direct reservation on that path
-    #queues = new Map();
-    // fn => {paths:[path,...], dirs:[path, ...]}
-    #reservations = new Map();
-    // functions currently running
-    #running = new Set();
-    reserve(paths, fn) {
-        paths =
-            isWindows ?
-                ['win32 parallelization disabled']
-                : paths.map(p => {
-                    // don't need normPath, because we skip this entirely for windows
-                    return stripTrailingSlashes(join(normalizeUnicode(p))).toLowerCase();
-                });
-        const dirs = new Set(paths.map(path => getDirs(path)).reduce((a, b) => a.concat(b)));
-        this.#reservations.set(fn, { dirs, paths });
-        for (const p of paths) {
-            const q = this.#queues.get(p);
-            if (!q) {
-                this.#queues.set(p, [fn]);
-            }
-            else {
-                q.push(fn);
-            }
-        }
-        for (const dir of dirs) {
-            const q = this.#queues.get(dir);
-            if (!q) {
-                this.#queues.set(dir, [new Set([fn])]);
-            }
-            else {
-                const l = q[q.length - 1];
-                if (l instanceof Set) {
-                    l.add(fn);
-                }
-                else {
-                    q.push(new Set([fn]));
-                }
-            }
-        }
-        return this.#run(fn);
-    }
-    // return the queues for each path the function cares about
-    // fn => {paths, dirs}
-    #getQueues(fn) {
-        const res = this.#reservations.get(fn);
-        /* c8 ignore start */
-        if (!res) {
-            throw new Error('function does not have any path reservations');
-        }
-        /* c8 ignore stop */
-        return {
-            paths: res.paths.map((path) => this.#queues.get(path)),
-            dirs: [...res.dirs].map(path => this.#queues.get(path)),
-        };
-    }
-    // check if fn is first in line for all its paths, and is
-    // included in the first set for all its dir queues
-    check(fn) {
-        const { paths, dirs } = this.#getQueues(fn);
-        return (paths.every(q => q && q[0] === fn) &&
-            dirs.every(q => q && q[0] instanceof Set && q[0].has(fn)));
-    }
-    // run the function if it's first in line and not already running
-    #run(fn) {
-        if (this.#running.has(fn) || !this.check(fn)) {
-            return false;
-        }
-        this.#running.add(fn);
-        fn(() => this.#clear(fn));
-        return true;
-    }
-    #clear(fn) {
-        if (!this.#running.has(fn)) {
-            return false;
-        }
-        const res = this.#reservations.get(fn);
-        /* c8 ignore start */
-        if (!res) {
-            throw new Error('invalid reservation');
-        }
-        /* c8 ignore stop */
-        const { paths, dirs } = res;
-        const next = new Set();
-        for (const path of paths) {
-            const q = this.#queues.get(path);
-            /* c8 ignore start */
-            if (!q || q?.[0] !== fn) {
-                continue;
-            }
-            /* c8 ignore stop */
-            const q0 = q[1];
-            if (!q0) {
-                this.#queues.delete(path);
-                continue;
-            }
-            q.shift();
-            if (typeof q0 === 'function') {
-                next.add(q0);
-            }
-            else {
-                for (const f of q0) {
-                    next.add(f);
-                }
-            }
-        }
-        for (const dir of dirs) {
-            const q = this.#queues.get(dir);
-            const q0 = q?.[0];
-            /* c8 ignore next - type safety only */
-            if (!q || !(q0 instanceof Set))
-                continue;
-            if (q0.size === 1 && q.length === 1) {
-                this.#queues.delete(dir);
-                continue;
-            }
-            else if (q0.size === 1) {
-                q.shift();
-                // next one must be a function,
-                // or else the Set would've been reused
-                const n = q[0];
-                if (typeof n === 'function') {
-                    next.add(n);
-                }
-            }
-            else {
-                q0.delete(fn);
-            }
-        }
-        this.#running.delete(fn);
-        next.forEach(fn => this.#run(fn));
-        return true;
-    }
-}
-//# sourceMappingURL=path-reservations.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/pax.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/pax.js
deleted file mode 100644
index 832808f344da5..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/pax.js
+++ /dev/null
@@ -1,154 +0,0 @@
-import { basename } from 'node:path';
-import { Header } from './header.js';
-export class Pax {
-    atime;
-    mtime;
-    ctime;
-    charset;
-    comment;
-    gid;
-    uid;
-    gname;
-    uname;
-    linkpath;
-    dev;
-    ino;
-    nlink;
-    path;
-    size;
-    mode;
-    global;
-    constructor(obj, global = false) {
-        this.atime = obj.atime;
-        this.charset = obj.charset;
-        this.comment = obj.comment;
-        this.ctime = obj.ctime;
-        this.dev = obj.dev;
-        this.gid = obj.gid;
-        this.global = global;
-        this.gname = obj.gname;
-        this.ino = obj.ino;
-        this.linkpath = obj.linkpath;
-        this.mtime = obj.mtime;
-        this.nlink = obj.nlink;
-        this.path = obj.path;
-        this.size = obj.size;
-        this.uid = obj.uid;
-        this.uname = obj.uname;
-    }
-    encode() {
-        const body = this.encodeBody();
-        if (body === '') {
-            return Buffer.allocUnsafe(0);
-        }
-        const bodyLen = Buffer.byteLength(body);
-        // round up to 512 bytes
-        // add 512 for header
-        const bufLen = 512 * Math.ceil(1 + bodyLen / 512);
-        const buf = Buffer.allocUnsafe(bufLen);
-        // 0-fill the header section, it might not hit every field
-        for (let i = 0; i < 512; i++) {
-            buf[i] = 0;
-        }
-        new Header({
-            // XXX split the path
-            // then the path should be PaxHeader + basename, but less than 99,
-            // prepend with the dirname
-            /* c8 ignore start */
-            path: ('PaxHeader/' + basename(this.path ?? '')).slice(0, 99),
-            /* c8 ignore stop */
-            mode: this.mode || 0o644,
-            uid: this.uid,
-            gid: this.gid,
-            size: bodyLen,
-            mtime: this.mtime,
-            type: this.global ? 'GlobalExtendedHeader' : 'ExtendedHeader',
-            linkpath: '',
-            uname: this.uname || '',
-            gname: this.gname || '',
-            devmaj: 0,
-            devmin: 0,
-            atime: this.atime,
-            ctime: this.ctime,
-        }).encode(buf);
-        buf.write(body, 512, bodyLen, 'utf8');
-        // null pad after the body
-        for (let i = bodyLen + 512; i < buf.length; i++) {
-            buf[i] = 0;
-        }
-        return buf;
-    }
-    encodeBody() {
-        return (this.encodeField('path') +
-            this.encodeField('ctime') +
-            this.encodeField('atime') +
-            this.encodeField('dev') +
-            this.encodeField('ino') +
-            this.encodeField('nlink') +
-            this.encodeField('charset') +
-            this.encodeField('comment') +
-            this.encodeField('gid') +
-            this.encodeField('gname') +
-            this.encodeField('linkpath') +
-            this.encodeField('mtime') +
-            this.encodeField('size') +
-            this.encodeField('uid') +
-            this.encodeField('uname'));
-    }
-    encodeField(field) {
-        if (this[field] === undefined) {
-            return '';
-        }
-        const r = this[field];
-        const v = r instanceof Date ? r.getTime() / 1000 : r;
-        const s = ' ' +
-            (field === 'dev' || field === 'ino' || field === 'nlink' ?
-                'SCHILY.'
-                : '') +
-            field +
-            '=' +
-            v +
-            '\n';
-        const byteLen = Buffer.byteLength(s);
-        // the digits includes the length of the digits in ascii base-10
-        // so if it's 9 characters, then adding 1 for the 9 makes it 10
-        // which makes it 11 chars.
-        let digits = Math.floor(Math.log(byteLen) / Math.log(10)) + 1;
-        if (byteLen + digits >= Math.pow(10, digits)) {
-            digits += 1;
-        }
-        const len = digits + byteLen;
-        return len + s;
-    }
-    static parse(str, ex, g = false) {
-        return new Pax(merge(parseKV(str), ex), g);
-    }
-}
-const merge = (a, b) => b ? Object.assign({}, b, a) : a;
-const parseKV = (str) => str
-    .replace(/\n$/, '')
-    .split('\n')
-    .reduce(parseKVLine, Object.create(null));
-const parseKVLine = (set, line) => {
-    const n = parseInt(line, 10);
-    // XXX Values with \n in them will fail this.
-    // Refactor to not be a naive line-by-line parse.
-    if (n !== Buffer.byteLength(line) + 1) {
-        return set;
-    }
-    line = line.slice((n + ' ').length);
-    const kv = line.split('=');
-    const r = kv.shift();
-    if (!r) {
-        return set;
-    }
-    const k = r.replace(/^SCHILY\.(dev|ino|nlink)/, '$1');
-    const v = kv.join('=');
-    set[k] =
-        /^([A-Z]+\.)?([mac]|birth|creation)time$/.test(k) ?
-            new Date(Number(v) * 1000)
-            : /^[0-9]+$/.test(v) ? +v
-                : v;
-    return set;
-};
-//# sourceMappingURL=pax.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/read-entry.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/read-entry.js
deleted file mode 100644
index 23cc673e61087..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/read-entry.js
+++ /dev/null
@@ -1,136 +0,0 @@
-import { Minipass } from 'minipass';
-import { normalizeWindowsPath } from './normalize-windows-path.js';
-export class ReadEntry extends Minipass {
-    extended;
-    globalExtended;
-    header;
-    startBlockSize;
-    blockRemain;
-    remain;
-    type;
-    meta = false;
-    ignore = false;
-    path;
-    mode;
-    uid;
-    gid;
-    uname;
-    gname;
-    size = 0;
-    mtime;
-    atime;
-    ctime;
-    linkpath;
-    dev;
-    ino;
-    nlink;
-    invalid = false;
-    absolute;
-    unsupported = false;
-    constructor(header, ex, gex) {
-        super({});
-        // read entries always start life paused.  this is to avoid the
-        // situation where Minipass's auto-ending empty streams results
-        // in an entry ending before we're ready for it.
-        this.pause();
-        this.extended = ex;
-        this.globalExtended = gex;
-        this.header = header;
-        /* c8 ignore start */
-        this.remain = header.size ?? 0;
-        /* c8 ignore stop */
-        this.startBlockSize = 512 * Math.ceil(this.remain / 512);
-        this.blockRemain = this.startBlockSize;
-        this.type = header.type;
-        switch (this.type) {
-            case 'File':
-            case 'OldFile':
-            case 'Link':
-            case 'SymbolicLink':
-            case 'CharacterDevice':
-            case 'BlockDevice':
-            case 'Directory':
-            case 'FIFO':
-            case 'ContiguousFile':
-            case 'GNUDumpDir':
-                break;
-            case 'NextFileHasLongLinkpath':
-            case 'NextFileHasLongPath':
-            case 'OldGnuLongPath':
-            case 'GlobalExtendedHeader':
-            case 'ExtendedHeader':
-            case 'OldExtendedHeader':
-                this.meta = true;
-                break;
-            // NOTE: gnutar and bsdtar treat unrecognized types as 'File'
-            // it may be worth doing the same, but with a warning.
-            default:
-                this.ignore = true;
-        }
-        /* c8 ignore start */
-        if (!header.path) {
-            throw new Error('no path provided for tar.ReadEntry');
-        }
-        /* c8 ignore stop */
-        this.path = normalizeWindowsPath(header.path);
-        this.mode = header.mode;
-        if (this.mode) {
-            this.mode = this.mode & 0o7777;
-        }
-        this.uid = header.uid;
-        this.gid = header.gid;
-        this.uname = header.uname;
-        this.gname = header.gname;
-        this.size = this.remain;
-        this.mtime = header.mtime;
-        this.atime = header.atime;
-        this.ctime = header.ctime;
-        /* c8 ignore start */
-        this.linkpath =
-            header.linkpath ?
-                normalizeWindowsPath(header.linkpath)
-                : undefined;
-        /* c8 ignore stop */
-        this.uname = header.uname;
-        this.gname = header.gname;
-        if (ex) {
-            this.#slurp(ex);
-        }
-        if (gex) {
-            this.#slurp(gex, true);
-        }
-    }
-    write(data) {
-        const writeLen = data.length;
-        if (writeLen > this.blockRemain) {
-            throw new Error('writing more to entry than is appropriate');
-        }
-        const r = this.remain;
-        const br = this.blockRemain;
-        this.remain = Math.max(0, r - writeLen);
-        this.blockRemain = Math.max(0, br - writeLen);
-        if (this.ignore) {
-            return true;
-        }
-        if (r >= writeLen) {
-            return super.write(data);
-        }
-        // r < writeLen
-        return super.write(data.subarray(0, r));
-    }
-    #slurp(ex, gex = false) {
-        if (ex.path)
-            ex.path = normalizeWindowsPath(ex.path);
-        if (ex.linkpath)
-            ex.linkpath = normalizeWindowsPath(ex.linkpath);
-        Object.assign(this, Object.fromEntries(Object.entries(ex).filter(([k, v]) => {
-            // we slurp in everything except for the path attribute in
-            // a global extended header, because that's weird. Also, any
-            // null/undefined values are ignored.
-            return !(v === null ||
-                v === undefined ||
-                (k === 'path' && gex));
-        })));
-    }
-}
-//# sourceMappingURL=read-entry.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/replace.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/replace.js
deleted file mode 100644
index bab622bfdf1f1..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/replace.js
+++ /dev/null
@@ -1,225 +0,0 @@
-// tar -r
-import { WriteStream, WriteStreamSync } from '@isaacs/fs-minipass';
-import fs from 'node:fs';
-import path from 'node:path';
-import { Header } from './header.js';
-import { list } from './list.js';
-import { makeCommand } from './make-command.js';
-import { isFile, } from './options.js';
-import { Pack, PackSync } from './pack.js';
-// starting at the head of the file, read a Header
-// If the checksum is invalid, that's our position to start writing
-// If it is, jump forward by the specified size (round up to 512)
-// and try again.
-// Write the new Pack stream starting there.
-const replaceSync = (opt, files) => {
-    const p = new PackSync(opt);
-    let threw = true;
-    let fd;
-    let position;
-    try {
-        try {
-            fd = fs.openSync(opt.file, 'r+');
-        }
-        catch (er) {
-            if (er?.code === 'ENOENT') {
-                fd = fs.openSync(opt.file, 'w+');
-            }
-            else {
-                throw er;
-            }
-        }
-        const st = fs.fstatSync(fd);
-        const headBuf = Buffer.alloc(512);
-        POSITION: for (position = 0; position < st.size; position += 512) {
-            for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) {
-                bytes = fs.readSync(fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos);
-                if (position === 0 &&
-                    headBuf[0] === 0x1f &&
-                    headBuf[1] === 0x8b) {
-                    throw new Error('cannot append to compressed archives');
-                }
-                if (!bytes) {
-                    break POSITION;
-                }
-            }
-            const h = new Header(headBuf);
-            if (!h.cksumValid) {
-                break;
-            }
-            const entryBlockSize = 512 * Math.ceil((h.size || 0) / 512);
-            if (position + entryBlockSize + 512 > st.size) {
-                break;
-            }
-            // the 512 for the header we just parsed will be added as well
-            // also jump ahead all the blocks for the body
-            position += entryBlockSize;
-            if (opt.mtimeCache && h.mtime) {
-                opt.mtimeCache.set(String(h.path), h.mtime);
-            }
-        }
-        threw = false;
-        streamSync(opt, p, position, fd, files);
-    }
-    finally {
-        if (threw) {
-            try {
-                fs.closeSync(fd);
-            }
-            catch (er) { }
-        }
-    }
-};
-const streamSync = (opt, p, position, fd, files) => {
-    const stream = new WriteStreamSync(opt.file, {
-        fd: fd,
-        start: position,
-    });
-    p.pipe(stream);
-    addFilesSync(p, files);
-};
-const replaceAsync = (opt, files) => {
-    files = Array.from(files);
-    const p = new Pack(opt);
-    const getPos = (fd, size, cb_) => {
-        const cb = (er, pos) => {
-            if (er) {
-                fs.close(fd, _ => cb_(er));
-            }
-            else {
-                cb_(null, pos);
-            }
-        };
-        let position = 0;
-        if (size === 0) {
-            return cb(null, 0);
-        }
-        let bufPos = 0;
-        const headBuf = Buffer.alloc(512);
-        const onread = (er, bytes) => {
-            if (er || typeof bytes === 'undefined') {
-                return cb(er);
-            }
-            bufPos += bytes;
-            if (bufPos < 512 && bytes) {
-                return fs.read(fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos, onread);
-            }
-            if (position === 0 &&
-                headBuf[0] === 0x1f &&
-                headBuf[1] === 0x8b) {
-                return cb(new Error('cannot append to compressed archives'));
-            }
-            // truncated header
-            if (bufPos < 512) {
-                return cb(null, position);
-            }
-            const h = new Header(headBuf);
-            if (!h.cksumValid) {
-                return cb(null, position);
-            }
-            /* c8 ignore next */
-            const entryBlockSize = 512 * Math.ceil((h.size ?? 0) / 512);
-            if (position + entryBlockSize + 512 > size) {
-                return cb(null, position);
-            }
-            position += entryBlockSize + 512;
-            if (position >= size) {
-                return cb(null, position);
-            }
-            if (opt.mtimeCache && h.mtime) {
-                opt.mtimeCache.set(String(h.path), h.mtime);
-            }
-            bufPos = 0;
-            fs.read(fd, headBuf, 0, 512, position, onread);
-        };
-        fs.read(fd, headBuf, 0, 512, position, onread);
-    };
-    const promise = new Promise((resolve, reject) => {
-        p.on('error', reject);
-        let flag = 'r+';
-        const onopen = (er, fd) => {
-            if (er && er.code === 'ENOENT' && flag === 'r+') {
-                flag = 'w+';
-                return fs.open(opt.file, flag, onopen);
-            }
-            if (er || !fd) {
-                return reject(er);
-            }
-            fs.fstat(fd, (er, st) => {
-                if (er) {
-                    return fs.close(fd, () => reject(er));
-                }
-                getPos(fd, st.size, (er, position) => {
-                    if (er) {
-                        return reject(er);
-                    }
-                    const stream = new WriteStream(opt.file, {
-                        fd: fd,
-                        start: position,
-                    });
-                    p.pipe(stream);
-                    stream.on('error', reject);
-                    stream.on('close', resolve);
-                    addFilesAsync(p, files);
-                });
-            });
-        };
-        fs.open(opt.file, flag, onopen);
-    });
-    return promise;
-};
-const addFilesSync = (p, files) => {
-    files.forEach(file => {
-        if (file.charAt(0) === '@') {
-            list({
-                file: path.resolve(p.cwd, file.slice(1)),
-                sync: true,
-                noResume: true,
-                onReadEntry: entry => p.add(entry),
-            });
-        }
-        else {
-            p.add(file);
-        }
-    });
-    p.end();
-};
-const addFilesAsync = async (p, files) => {
-    for (let i = 0; i < files.length; i++) {
-        const file = String(files[i]);
-        if (file.charAt(0) === '@') {
-            await list({
-                file: path.resolve(String(p.cwd), file.slice(1)),
-                noResume: true,
-                onReadEntry: entry => p.add(entry),
-            });
-        }
-        else {
-            p.add(file);
-        }
-    }
-    p.end();
-};
-export const replace = makeCommand(replaceSync, replaceAsync, 
-/* c8 ignore start */
-() => {
-    throw new TypeError('file is required');
-}, () => {
-    throw new TypeError('file is required');
-}, 
-/* c8 ignore stop */
-(opt, entries) => {
-    if (!isFile(opt)) {
-        throw new TypeError('file is required');
-    }
-    if (opt.gzip ||
-        opt.brotli ||
-        opt.file.endsWith('.br') ||
-        opt.file.endsWith('.tbr')) {
-        throw new TypeError('cannot append to compressed archives');
-    }
-    if (!entries?.length) {
-        throw new TypeError('no paths specified to add/replace');
-    }
-});
-//# sourceMappingURL=replace.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/strip-absolute-path.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/strip-absolute-path.js
deleted file mode 100644
index cce5ff80b00db..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/strip-absolute-path.js
+++ /dev/null
@@ -1,25 +0,0 @@
-// unix absolute paths are also absolute on win32, so we use this for both
-import { win32 } from 'node:path';
-const { isAbsolute, parse } = win32;
-// returns [root, stripped]
-// Note that windows will think that //x/y/z/a has a "root" of //x/y, and in
-// those cases, we want to sanitize it to x/y/z/a, not z/a, so we strip /
-// explicitly if it's the first character.
-// drive-specific relative paths on Windows get their root stripped off even
-// though they are not absolute, so `c:../foo` becomes ['c:', '../foo']
-export const stripAbsolutePath = (path) => {
-    let r = '';
-    let parsed = parse(path);
-    while (isAbsolute(path) || parsed.root) {
-        // windows will think that //x/y/z has a "root" of //x/y/
-        // but strip the //?/C:/ off of //?/C:/path
-        const root = path.charAt(0) === '/' && path.slice(0, 4) !== '//?/' ?
-            '/'
-            : parsed.root;
-        path = path.slice(root.length);
-        r += root;
-        parsed = parse(path);
-    }
-    return [r, path];
-};
-//# sourceMappingURL=strip-absolute-path.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/strip-trailing-slashes.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/strip-trailing-slashes.js
deleted file mode 100644
index ace4218a7547b..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/strip-trailing-slashes.js
+++ /dev/null
@@ -1,14 +0,0 @@
-// warning: extremely hot code path.
-// This has been meticulously optimized for use
-// within npm install on large package trees.
-// Do not edit without careful benchmarking.
-export const stripTrailingSlashes = (str) => {
-    let i = str.length - 1;
-    let slashesStart = -1;
-    while (i > -1 && str.charAt(i) === '/') {
-        slashesStart = i;
-        i--;
-    }
-    return slashesStart === -1 ? str : str.slice(0, slashesStart);
-};
-//# sourceMappingURL=strip-trailing-slashes.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/symlink-error.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/symlink-error.js
deleted file mode 100644
index d31766e2e0afa..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/symlink-error.js
+++ /dev/null
@@ -1,15 +0,0 @@
-export class SymlinkError extends Error {
-    path;
-    symlink;
-    syscall = 'symlink';
-    code = 'TAR_SYMLINK_ERROR';
-    constructor(symlink, path) {
-        super('TAR_SYMLINK_ERROR: Cannot extract through symbolic link');
-        this.symlink = symlink;
-        this.path = path;
-    }
-    get name() {
-        return 'SymlinkError';
-    }
-}
-//# sourceMappingURL=symlink-error.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/types.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/types.js
deleted file mode 100644
index 27b982ae1e092..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/types.js
+++ /dev/null
@@ -1,45 +0,0 @@
-export const isCode = (c) => name.has(c);
-export const isName = (c) => code.has(c);
-// map types from key to human-friendly name
-export const name = new Map([
-    ['0', 'File'],
-    // same as File
-    ['', 'OldFile'],
-    ['1', 'Link'],
-    ['2', 'SymbolicLink'],
-    // Devices and FIFOs aren't fully supported
-    // they are parsed, but skipped when unpacking
-    ['3', 'CharacterDevice'],
-    ['4', 'BlockDevice'],
-    ['5', 'Directory'],
-    ['6', 'FIFO'],
-    // same as File
-    ['7', 'ContiguousFile'],
-    // pax headers
-    ['g', 'GlobalExtendedHeader'],
-    ['x', 'ExtendedHeader'],
-    // vendor-specific stuff
-    // skip
-    ['A', 'SolarisACL'],
-    // like 5, but with data, which should be skipped
-    ['D', 'GNUDumpDir'],
-    // metadata only, skip
-    ['I', 'Inode'],
-    // data = link path of next file
-    ['K', 'NextFileHasLongLinkpath'],
-    // data = path of next file
-    ['L', 'NextFileHasLongPath'],
-    // skip
-    ['M', 'ContinuationFile'],
-    // like L
-    ['N', 'OldGnuLongPath'],
-    // skip
-    ['S', 'SparseFile'],
-    // skip
-    ['V', 'TapeVolumeHeader'],
-    // like x
-    ['X', 'OldExtendedHeader'],
-]);
-// map the other direction
-export const code = new Map(Array.from(name).map(kv => [kv[1], kv[0]]));
-//# sourceMappingURL=types.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/unpack.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/unpack.js
deleted file mode 100644
index 6e744cfc1a6f9..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/unpack.js
+++ /dev/null
@@ -1,888 +0,0 @@
-// the PEND/UNPEND stuff tracks whether we're ready to emit end/close yet.
-// but the path reservations are required to avoid race conditions where
-// parallelized unpack ops may mess with one another, due to dependencies
-// (like a Link depending on its target) or destructive operations (like
-// clobbering an fs object to create one of a different type.)
-import * as fsm from '@isaacs/fs-minipass';
-import assert from 'node:assert';
-import { randomBytes } from 'node:crypto';
-import fs from 'node:fs';
-import path from 'node:path';
-import { getWriteFlag } from './get-write-flag.js';
-import { mkdir, mkdirSync } from './mkdir.js';
-import { normalizeUnicode } from './normalize-unicode.js';
-import { normalizeWindowsPath } from './normalize-windows-path.js';
-import { Parser } from './parse.js';
-import { stripAbsolutePath } from './strip-absolute-path.js';
-import { stripTrailingSlashes } from './strip-trailing-slashes.js';
-import * as wc from './winchars.js';
-import { PathReservations } from './path-reservations.js';
-const ONENTRY = Symbol('onEntry');
-const CHECKFS = Symbol('checkFs');
-const CHECKFS2 = Symbol('checkFs2');
-const PRUNECACHE = Symbol('pruneCache');
-const ISREUSABLE = Symbol('isReusable');
-const MAKEFS = Symbol('makeFs');
-const FILE = Symbol('file');
-const DIRECTORY = Symbol('directory');
-const LINK = Symbol('link');
-const SYMLINK = Symbol('symlink');
-const HARDLINK = Symbol('hardlink');
-const UNSUPPORTED = Symbol('unsupported');
-const CHECKPATH = Symbol('checkPath');
-const MKDIR = Symbol('mkdir');
-const ONERROR = Symbol('onError');
-const PENDING = Symbol('pending');
-const PEND = Symbol('pend');
-const UNPEND = Symbol('unpend');
-const ENDED = Symbol('ended');
-const MAYBECLOSE = Symbol('maybeClose');
-const SKIP = Symbol('skip');
-const DOCHOWN = Symbol('doChown');
-const UID = Symbol('uid');
-const GID = Symbol('gid');
-const CHECKED_CWD = Symbol('checkedCwd');
-const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform;
-const isWindows = platform === 'win32';
-const DEFAULT_MAX_DEPTH = 1024;
-// Unlinks on Windows are not atomic.
-//
-// This means that if you have a file entry, followed by another
-// file entry with an identical name, and you cannot re-use the file
-// (because it's a hardlink, or because unlink:true is set, or it's
-// Windows, which does not have useful nlink values), then the unlink
-// will be committed to the disk AFTER the new file has been written
-// over the old one, deleting the new file.
-//
-// To work around this, on Windows systems, we rename the file and then
-// delete the renamed file.  It's a sloppy kludge, but frankly, I do not
-// know of a better way to do this, given windows' non-atomic unlink
-// semantics.
-//
-// See: https://github.com/npm/node-tar/issues/183
-/* c8 ignore start */
-const unlinkFile = (path, cb) => {
-    if (!isWindows) {
-        return fs.unlink(path, cb);
-    }
-    const name = path + '.DELETE.' + randomBytes(16).toString('hex');
-    fs.rename(path, name, er => {
-        if (er) {
-            return cb(er);
-        }
-        fs.unlink(name, cb);
-    });
-};
-/* c8 ignore stop */
-/* c8 ignore start */
-const unlinkFileSync = (path) => {
-    if (!isWindows) {
-        return fs.unlinkSync(path);
-    }
-    const name = path + '.DELETE.' + randomBytes(16).toString('hex');
-    fs.renameSync(path, name);
-    fs.unlinkSync(name);
-};
-/* c8 ignore stop */
-// this.gid, entry.gid, this.processUid
-const uint32 = (a, b, c) => a !== undefined && a === a >>> 0 ? a
-    : b !== undefined && b === b >>> 0 ? b
-        : c;
-// clear the cache if it's a case-insensitive unicode-squashing match.
-// we can't know if the current file system is case-sensitive or supports
-// unicode fully, so we check for similarity on the maximally compatible
-// representation.  Err on the side of pruning, since all it's doing is
-// preventing lstats, and it's not the end of the world if we get a false
-// positive.
-// Note that on windows, we always drop the entire cache whenever a
-// symbolic link is encountered, because 8.3 filenames are impossible
-// to reason about, and collisions are hazards rather than just failures.
-const cacheKeyNormalize = (path) => stripTrailingSlashes(normalizeWindowsPath(normalizeUnicode(path))).toLowerCase();
-// remove all cache entries matching ${abs}/**
-const pruneCache = (cache, abs) => {
-    abs = cacheKeyNormalize(abs);
-    for (const path of cache.keys()) {
-        const pnorm = cacheKeyNormalize(path);
-        if (pnorm === abs || pnorm.indexOf(abs + '/') === 0) {
-            cache.delete(path);
-        }
-    }
-};
-const dropCache = (cache) => {
-    for (const key of cache.keys()) {
-        cache.delete(key);
-    }
-};
-export class Unpack extends Parser {
-    [ENDED] = false;
-    [CHECKED_CWD] = false;
-    [PENDING] = 0;
-    reservations = new PathReservations();
-    transform;
-    writable = true;
-    readable = false;
-    dirCache;
-    uid;
-    gid;
-    setOwner;
-    preserveOwner;
-    processGid;
-    processUid;
-    maxDepth;
-    forceChown;
-    win32;
-    newer;
-    keep;
-    noMtime;
-    preservePaths;
-    unlink;
-    cwd;
-    strip;
-    processUmask;
-    umask;
-    dmode;
-    fmode;
-    chmod;
-    constructor(opt = {}) {
-        opt.ondone = () => {
-            this[ENDED] = true;
-            this[MAYBECLOSE]();
-        };
-        super(opt);
-        this.transform = opt.transform;
-        this.dirCache = opt.dirCache || new Map();
-        this.chmod = !!opt.chmod;
-        if (typeof opt.uid === 'number' || typeof opt.gid === 'number') {
-            // need both or neither
-            if (typeof opt.uid !== 'number' ||
-                typeof opt.gid !== 'number') {
-                throw new TypeError('cannot set owner without number uid and gid');
-            }
-            if (opt.preserveOwner) {
-                throw new TypeError('cannot preserve owner in archive and also set owner explicitly');
-            }
-            this.uid = opt.uid;
-            this.gid = opt.gid;
-            this.setOwner = true;
-        }
-        else {
-            this.uid = undefined;
-            this.gid = undefined;
-            this.setOwner = false;
-        }
-        // default true for root
-        if (opt.preserveOwner === undefined &&
-            typeof opt.uid !== 'number') {
-            this.preserveOwner = !!(process.getuid && process.getuid() === 0);
-        }
-        else {
-            this.preserveOwner = !!opt.preserveOwner;
-        }
-        this.processUid =
-            (this.preserveOwner || this.setOwner) && process.getuid ?
-                process.getuid()
-                : undefined;
-        this.processGid =
-            (this.preserveOwner || this.setOwner) && process.getgid ?
-                process.getgid()
-                : undefined;
-        // prevent excessively deep nesting of subfolders
-        // set to `Infinity` to remove this restriction
-        this.maxDepth =
-            typeof opt.maxDepth === 'number' ?
-                opt.maxDepth
-                : DEFAULT_MAX_DEPTH;
-        // mostly just for testing, but useful in some cases.
-        // Forcibly trigger a chown on every entry, no matter what
-        this.forceChown = opt.forceChown === true;
-        // turn > this[ONENTRY](entry));
-    }
-    // a bad or damaged archive is a warning for Parser, but an error
-    // when extracting.  Mark those errors as unrecoverable, because
-    // the Unpack contract cannot be met.
-    warn(code, msg, data = {}) {
-        if (code === 'TAR_BAD_ARCHIVE' || code === 'TAR_ABORT') {
-            data.recoverable = false;
-        }
-        return super.warn(code, msg, data);
-    }
-    [MAYBECLOSE]() {
-        if (this[ENDED] && this[PENDING] === 0) {
-            this.emit('prefinish');
-            this.emit('finish');
-            this.emit('end');
-        }
-    }
-    [CHECKPATH](entry) {
-        const p = normalizeWindowsPath(entry.path);
-        const parts = p.split('/');
-        if (this.strip) {
-            if (parts.length < this.strip) {
-                return false;
-            }
-            if (entry.type === 'Link') {
-                const linkparts = normalizeWindowsPath(String(entry.linkpath)).split('/');
-                if (linkparts.length >= this.strip) {
-                    entry.linkpath = linkparts.slice(this.strip).join('/');
-                }
-                else {
-                    return false;
-                }
-            }
-            parts.splice(0, this.strip);
-            entry.path = parts.join('/');
-        }
-        if (isFinite(this.maxDepth) && parts.length > this.maxDepth) {
-            this.warn('TAR_ENTRY_ERROR', 'path excessively deep', {
-                entry,
-                path: p,
-                depth: parts.length,
-                maxDepth: this.maxDepth,
-            });
-            return false;
-        }
-        if (!this.preservePaths) {
-            if (parts.includes('..') ||
-                /* c8 ignore next */
-                (isWindows && /^[a-z]:\.\.$/i.test(parts[0] ?? ''))) {
-                this.warn('TAR_ENTRY_ERROR', `path contains '..'`, {
-                    entry,
-                    path: p,
-                });
-                return false;
-            }
-            // strip off the root
-            const [root, stripped] = stripAbsolutePath(p);
-            if (root) {
-                entry.path = String(stripped);
-                this.warn('TAR_ENTRY_INFO', `stripping ${root} from absolute path`, {
-                    entry,
-                    path: p,
-                });
-            }
-        }
-        if (path.isAbsolute(entry.path)) {
-            entry.absolute = normalizeWindowsPath(path.resolve(entry.path));
-        }
-        else {
-            entry.absolute = normalizeWindowsPath(path.resolve(this.cwd, entry.path));
-        }
-        // if we somehow ended up with a path that escapes the cwd, and we are
-        // not in preservePaths mode, then something is fishy!  This should have
-        // been prevented above, so ignore this for coverage.
-        /* c8 ignore start - defense in depth */
-        if (!this.preservePaths &&
-            typeof entry.absolute === 'string' &&
-            entry.absolute.indexOf(this.cwd + '/') !== 0 &&
-            entry.absolute !== this.cwd) {
-            this.warn('TAR_ENTRY_ERROR', 'path escaped extraction target', {
-                entry,
-                path: normalizeWindowsPath(entry.path),
-                resolvedPath: entry.absolute,
-                cwd: this.cwd,
-            });
-            return false;
-        }
-        /* c8 ignore stop */
-        // an archive can set properties on the extraction directory, but it
-        // may not replace the cwd with a different kind of thing entirely.
-        if (entry.absolute === this.cwd &&
-            entry.type !== 'Directory' &&
-            entry.type !== 'GNUDumpDir') {
-            return false;
-        }
-        // only encode : chars that aren't drive letter indicators
-        if (this.win32) {
-            const { root: aRoot } = path.win32.parse(String(entry.absolute));
-            entry.absolute =
-                aRoot + wc.encode(String(entry.absolute).slice(aRoot.length));
-            const { root: pRoot } = path.win32.parse(entry.path);
-            entry.path = pRoot + wc.encode(entry.path.slice(pRoot.length));
-        }
-        return true;
-    }
-    [ONENTRY](entry) {
-        if (!this[CHECKPATH](entry)) {
-            return entry.resume();
-        }
-        assert.equal(typeof entry.absolute, 'string');
-        switch (entry.type) {
-            case 'Directory':
-            case 'GNUDumpDir':
-                if (entry.mode) {
-                    entry.mode = entry.mode | 0o700;
-                }
-            // eslint-disable-next-line no-fallthrough
-            case 'File':
-            case 'OldFile':
-            case 'ContiguousFile':
-            case 'Link':
-            case 'SymbolicLink':
-                return this[CHECKFS](entry);
-            case 'CharacterDevice':
-            case 'BlockDevice':
-            case 'FIFO':
-            default:
-                return this[UNSUPPORTED](entry);
-        }
-    }
-    [ONERROR](er, entry) {
-        // Cwd has to exist, or else nothing works. That's serious.
-        // Other errors are warnings, which raise the error in strict
-        // mode, but otherwise continue on.
-        if (er.name === 'CwdError') {
-            this.emit('error', er);
-        }
-        else {
-            this.warn('TAR_ENTRY_ERROR', er, { entry });
-            this[UNPEND]();
-            entry.resume();
-        }
-    }
-    [MKDIR](dir, mode, cb) {
-        mkdir(normalizeWindowsPath(dir), {
-            uid: this.uid,
-            gid: this.gid,
-            processUid: this.processUid,
-            processGid: this.processGid,
-            umask: this.processUmask,
-            preserve: this.preservePaths,
-            unlink: this.unlink,
-            cache: this.dirCache,
-            cwd: this.cwd,
-            mode: mode,
-        }, cb);
-    }
-    [DOCHOWN](entry) {
-        // in preserve owner mode, chown if the entry doesn't match process
-        // in set owner mode, chown if setting doesn't match process
-        return (this.forceChown ||
-            (this.preserveOwner &&
-                ((typeof entry.uid === 'number' &&
-                    entry.uid !== this.processUid) ||
-                    (typeof entry.gid === 'number' &&
-                        entry.gid !== this.processGid))) ||
-            (typeof this.uid === 'number' &&
-                this.uid !== this.processUid) ||
-            (typeof this.gid === 'number' && this.gid !== this.processGid));
-    }
-    [UID](entry) {
-        return uint32(this.uid, entry.uid, this.processUid);
-    }
-    [GID](entry) {
-        return uint32(this.gid, entry.gid, this.processGid);
-    }
-    [FILE](entry, fullyDone) {
-        const mode = typeof entry.mode === 'number' ?
-            entry.mode & 0o7777
-            : this.fmode;
-        const stream = new fsm.WriteStream(String(entry.absolute), {
-            // slight lie, but it can be numeric flags
-            flags: getWriteFlag(entry.size),
-            mode: mode,
-            autoClose: false,
-        });
-        stream.on('error', (er) => {
-            if (stream.fd) {
-                fs.close(stream.fd, () => { });
-            }
-            // flush all the data out so that we aren't left hanging
-            // if the error wasn't actually fatal.  otherwise the parse
-            // is blocked, and we never proceed.
-            stream.write = () => true;
-            this[ONERROR](er, entry);
-            fullyDone();
-        });
-        let actions = 1;
-        const done = (er) => {
-            if (er) {
-                /* c8 ignore start - we should always have a fd by now */
-                if (stream.fd) {
-                    fs.close(stream.fd, () => { });
-                }
-                /* c8 ignore stop */
-                this[ONERROR](er, entry);
-                fullyDone();
-                return;
-            }
-            if (--actions === 0) {
-                if (stream.fd !== undefined) {
-                    fs.close(stream.fd, er => {
-                        if (er) {
-                            this[ONERROR](er, entry);
-                        }
-                        else {
-                            this[UNPEND]();
-                        }
-                        fullyDone();
-                    });
-                }
-            }
-        };
-        stream.on('finish', () => {
-            // if futimes fails, try utimes
-            // if utimes fails, fail with the original error
-            // same for fchown/chown
-            const abs = String(entry.absolute);
-            const fd = stream.fd;
-            if (typeof fd === 'number' && entry.mtime && !this.noMtime) {
-                actions++;
-                const atime = entry.atime || new Date();
-                const mtime = entry.mtime;
-                fs.futimes(fd, atime, mtime, er => er ?
-                    fs.utimes(abs, atime, mtime, er2 => done(er2 && er))
-                    : done());
-            }
-            if (typeof fd === 'number' && this[DOCHOWN](entry)) {
-                actions++;
-                const uid = this[UID](entry);
-                const gid = this[GID](entry);
-                if (typeof uid === 'number' && typeof gid === 'number') {
-                    fs.fchown(fd, uid, gid, er => er ?
-                        fs.chown(abs, uid, gid, er2 => done(er2 && er))
-                        : done());
-                }
-            }
-            done();
-        });
-        const tx = this.transform ? this.transform(entry) || entry : entry;
-        if (tx !== entry) {
-            tx.on('error', (er) => {
-                this[ONERROR](er, entry);
-                fullyDone();
-            });
-            entry.pipe(tx);
-        }
-        tx.pipe(stream);
-    }
-    [DIRECTORY](entry, fullyDone) {
-        const mode = typeof entry.mode === 'number' ?
-            entry.mode & 0o7777
-            : this.dmode;
-        this[MKDIR](String(entry.absolute), mode, er => {
-            if (er) {
-                this[ONERROR](er, entry);
-                fullyDone();
-                return;
-            }
-            let actions = 1;
-            const done = () => {
-                if (--actions === 0) {
-                    fullyDone();
-                    this[UNPEND]();
-                    entry.resume();
-                }
-            };
-            if (entry.mtime && !this.noMtime) {
-                actions++;
-                fs.utimes(String(entry.absolute), entry.atime || new Date(), entry.mtime, done);
-            }
-            if (this[DOCHOWN](entry)) {
-                actions++;
-                fs.chown(String(entry.absolute), Number(this[UID](entry)), Number(this[GID](entry)), done);
-            }
-            done();
-        });
-    }
-    [UNSUPPORTED](entry) {
-        entry.unsupported = true;
-        this.warn('TAR_ENTRY_UNSUPPORTED', `unsupported entry type: ${entry.type}`, { entry });
-        entry.resume();
-    }
-    [SYMLINK](entry, done) {
-        this[LINK](entry, String(entry.linkpath), 'symlink', done);
-    }
-    [HARDLINK](entry, done) {
-        const linkpath = normalizeWindowsPath(path.resolve(this.cwd, String(entry.linkpath)));
-        this[LINK](entry, linkpath, 'link', done);
-    }
-    [PEND]() {
-        this[PENDING]++;
-    }
-    [UNPEND]() {
-        this[PENDING]--;
-        this[MAYBECLOSE]();
-    }
-    [SKIP](entry) {
-        this[UNPEND]();
-        entry.resume();
-    }
-    // Check if we can reuse an existing filesystem entry safely and
-    // overwrite it, rather than unlinking and recreating
-    // Windows doesn't report a useful nlink, so we just never reuse entries
-    [ISREUSABLE](entry, st) {
-        return (entry.type === 'File' &&
-            !this.unlink &&
-            st.isFile() &&
-            st.nlink <= 1 &&
-            !isWindows);
-    }
-    // check if a thing is there, and if so, try to clobber it
-    [CHECKFS](entry) {
-        this[PEND]();
-        const paths = [entry.path];
-        if (entry.linkpath) {
-            paths.push(entry.linkpath);
-        }
-        this.reservations.reserve(paths, done => this[CHECKFS2](entry, done));
-    }
-    [PRUNECACHE](entry) {
-        // if we are not creating a directory, and the path is in the dirCache,
-        // then that means we are about to delete the directory we created
-        // previously, and it is no longer going to be a directory, and neither
-        // is any of its children.
-        // If a symbolic link is encountered, all bets are off.  There is no
-        // reasonable way to sanitize the cache in such a way we will be able to
-        // avoid having filesystem collisions.  If this happens with a non-symlink
-        // entry, it'll just fail to unpack, but a symlink to a directory, using an
-        // 8.3 shortname or certain unicode attacks, can evade detection and lead
-        // to arbitrary writes to anywhere on the system.
-        if (entry.type === 'SymbolicLink') {
-            dropCache(this.dirCache);
-        }
-        else if (entry.type !== 'Directory') {
-            pruneCache(this.dirCache, String(entry.absolute));
-        }
-    }
-    [CHECKFS2](entry, fullyDone) {
-        this[PRUNECACHE](entry);
-        const done = (er) => {
-            this[PRUNECACHE](entry);
-            fullyDone(er);
-        };
-        const checkCwd = () => {
-            this[MKDIR](this.cwd, this.dmode, er => {
-                if (er) {
-                    this[ONERROR](er, entry);
-                    done();
-                    return;
-                }
-                this[CHECKED_CWD] = true;
-                start();
-            });
-        };
-        const start = () => {
-            if (entry.absolute !== this.cwd) {
-                const parent = normalizeWindowsPath(path.dirname(String(entry.absolute)));
-                if (parent !== this.cwd) {
-                    return this[MKDIR](parent, this.dmode, er => {
-                        if (er) {
-                            this[ONERROR](er, entry);
-                            done();
-                            return;
-                        }
-                        afterMakeParent();
-                    });
-                }
-            }
-            afterMakeParent();
-        };
-        const afterMakeParent = () => {
-            fs.lstat(String(entry.absolute), (lstatEr, st) => {
-                if (st &&
-                    (this.keep ||
-                        /* c8 ignore next */
-                        (this.newer && st.mtime > (entry.mtime ?? st.mtime)))) {
-                    this[SKIP](entry);
-                    done();
-                    return;
-                }
-                if (lstatEr || this[ISREUSABLE](entry, st)) {
-                    return this[MAKEFS](null, entry, done);
-                }
-                if (st.isDirectory()) {
-                    if (entry.type === 'Directory') {
-                        const needChmod = this.chmod &&
-                            entry.mode &&
-                            (st.mode & 0o7777) !== entry.mode;
-                        const afterChmod = (er) => this[MAKEFS](er ?? null, entry, done);
-                        if (!needChmod) {
-                            return afterChmod();
-                        }
-                        return fs.chmod(String(entry.absolute), Number(entry.mode), afterChmod);
-                    }
-                    // Not a dir entry, have to remove it.
-                    // NB: the only way to end up with an entry that is the cwd
-                    // itself, in such a way that == does not detect, is a
-                    // tricky windows absolute path with UNC or 8.3 parts (and
-                    // preservePaths:true, or else it will have been stripped).
-                    // In that case, the user has opted out of path protections
-                    // explicitly, so if they blow away the cwd, c'est la vie.
-                    if (entry.absolute !== this.cwd) {
-                        return fs.rmdir(String(entry.absolute), (er) => this[MAKEFS](er ?? null, entry, done));
-                    }
-                }
-                // not a dir, and not reusable
-                // don't remove if the cwd, we want that error
-                if (entry.absolute === this.cwd) {
-                    return this[MAKEFS](null, entry, done);
-                }
-                unlinkFile(String(entry.absolute), er => this[MAKEFS](er ?? null, entry, done));
-            });
-        };
-        if (this[CHECKED_CWD]) {
-            start();
-        }
-        else {
-            checkCwd();
-        }
-    }
-    [MAKEFS](er, entry, done) {
-        if (er) {
-            this[ONERROR](er, entry);
-            done();
-            return;
-        }
-        switch (entry.type) {
-            case 'File':
-            case 'OldFile':
-            case 'ContiguousFile':
-                return this[FILE](entry, done);
-            case 'Link':
-                return this[HARDLINK](entry, done);
-            case 'SymbolicLink':
-                return this[SYMLINK](entry, done);
-            case 'Directory':
-            case 'GNUDumpDir':
-                return this[DIRECTORY](entry, done);
-        }
-    }
-    [LINK](entry, linkpath, link, done) {
-        // XXX: get the type ('symlink' or 'junction') for windows
-        fs[link](linkpath, String(entry.absolute), er => {
-            if (er) {
-                this[ONERROR](er, entry);
-            }
-            else {
-                this[UNPEND]();
-                entry.resume();
-            }
-            done();
-        });
-    }
-}
-const callSync = (fn) => {
-    try {
-        return [null, fn()];
-    }
-    catch (er) {
-        return [er, null];
-    }
-};
-export class UnpackSync extends Unpack {
-    sync = true;
-    [MAKEFS](er, entry) {
-        return super[MAKEFS](er, entry, () => { });
-    }
-    [CHECKFS](entry) {
-        this[PRUNECACHE](entry);
-        if (!this[CHECKED_CWD]) {
-            const er = this[MKDIR](this.cwd, this.dmode);
-            if (er) {
-                return this[ONERROR](er, entry);
-            }
-            this[CHECKED_CWD] = true;
-        }
-        // don't bother to make the parent if the current entry is the cwd,
-        // we've already checked it.
-        if (entry.absolute !== this.cwd) {
-            const parent = normalizeWindowsPath(path.dirname(String(entry.absolute)));
-            if (parent !== this.cwd) {
-                const mkParent = this[MKDIR](parent, this.dmode);
-                if (mkParent) {
-                    return this[ONERROR](mkParent, entry);
-                }
-            }
-        }
-        const [lstatEr, st] = callSync(() => fs.lstatSync(String(entry.absolute)));
-        if (st &&
-            (this.keep ||
-                /* c8 ignore next */
-                (this.newer && st.mtime > (entry.mtime ?? st.mtime)))) {
-            return this[SKIP](entry);
-        }
-        if (lstatEr || this[ISREUSABLE](entry, st)) {
-            return this[MAKEFS](null, entry);
-        }
-        if (st.isDirectory()) {
-            if (entry.type === 'Directory') {
-                const needChmod = this.chmod &&
-                    entry.mode &&
-                    (st.mode & 0o7777) !== entry.mode;
-                const [er] = needChmod ?
-                    callSync(() => {
-                        fs.chmodSync(String(entry.absolute), Number(entry.mode));
-                    })
-                    : [];
-                return this[MAKEFS](er, entry);
-            }
-            // not a dir entry, have to remove it
-            const [er] = callSync(() => fs.rmdirSync(String(entry.absolute)));
-            this[MAKEFS](er, entry);
-        }
-        // not a dir, and not reusable.
-        // don't remove if it's the cwd, since we want that error.
-        const [er] = entry.absolute === this.cwd ?
-            []
-            : callSync(() => unlinkFileSync(String(entry.absolute)));
-        this[MAKEFS](er, entry);
-    }
-    [FILE](entry, done) {
-        const mode = typeof entry.mode === 'number' ?
-            entry.mode & 0o7777
-            : this.fmode;
-        const oner = (er) => {
-            let closeError;
-            try {
-                fs.closeSync(fd);
-            }
-            catch (e) {
-                closeError = e;
-            }
-            if (er || closeError) {
-                this[ONERROR](er || closeError, entry);
-            }
-            done();
-        };
-        let fd;
-        try {
-            fd = fs.openSync(String(entry.absolute), getWriteFlag(entry.size), mode);
-        }
-        catch (er) {
-            return oner(er);
-        }
-        const tx = this.transform ? this.transform(entry) || entry : entry;
-        if (tx !== entry) {
-            tx.on('error', (er) => this[ONERROR](er, entry));
-            entry.pipe(tx);
-        }
-        tx.on('data', (chunk) => {
-            try {
-                fs.writeSync(fd, chunk, 0, chunk.length);
-            }
-            catch (er) {
-                oner(er);
-            }
-        });
-        tx.on('end', () => {
-            let er = null;
-            // try both, falling futimes back to utimes
-            // if either fails, handle the first error
-            if (entry.mtime && !this.noMtime) {
-                const atime = entry.atime || new Date();
-                const mtime = entry.mtime;
-                try {
-                    fs.futimesSync(fd, atime, mtime);
-                }
-                catch (futimeser) {
-                    try {
-                        fs.utimesSync(String(entry.absolute), atime, mtime);
-                    }
-                    catch (utimeser) {
-                        er = futimeser;
-                    }
-                }
-            }
-            if (this[DOCHOWN](entry)) {
-                const uid = this[UID](entry);
-                const gid = this[GID](entry);
-                try {
-                    fs.fchownSync(fd, Number(uid), Number(gid));
-                }
-                catch (fchowner) {
-                    try {
-                        fs.chownSync(String(entry.absolute), Number(uid), Number(gid));
-                    }
-                    catch (chowner) {
-                        er = er || fchowner;
-                    }
-                }
-            }
-            oner(er);
-        });
-    }
-    [DIRECTORY](entry, done) {
-        const mode = typeof entry.mode === 'number' ?
-            entry.mode & 0o7777
-            : this.dmode;
-        const er = this[MKDIR](String(entry.absolute), mode);
-        if (er) {
-            this[ONERROR](er, entry);
-            done();
-            return;
-        }
-        if (entry.mtime && !this.noMtime) {
-            try {
-                fs.utimesSync(String(entry.absolute), entry.atime || new Date(), entry.mtime);
-                /* c8 ignore next */
-            }
-            catch (er) { }
-        }
-        if (this[DOCHOWN](entry)) {
-            try {
-                fs.chownSync(String(entry.absolute), Number(this[UID](entry)), Number(this[GID](entry)));
-            }
-            catch (er) { }
-        }
-        done();
-        entry.resume();
-    }
-    [MKDIR](dir, mode) {
-        try {
-            return mkdirSync(normalizeWindowsPath(dir), {
-                uid: this.uid,
-                gid: this.gid,
-                processUid: this.processUid,
-                processGid: this.processGid,
-                umask: this.processUmask,
-                preserve: this.preservePaths,
-                unlink: this.unlink,
-                cache: this.dirCache,
-                cwd: this.cwd,
-                mode: mode,
-            });
-        }
-        catch (er) {
-            return er;
-        }
-    }
-    [LINK](entry, linkpath, link, done) {
-        const ls = `${link}Sync`;
-        try {
-            fs[ls](linkpath, String(entry.absolute));
-            done();
-            entry.resume();
-        }
-        catch (er) {
-            return this[ONERROR](er, entry);
-        }
-    }
-}
-//# sourceMappingURL=unpack.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/update.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/update.js
deleted file mode 100644
index 21398e9766663..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/update.js
+++ /dev/null
@@ -1,30 +0,0 @@
-// tar -u
-import { makeCommand } from './make-command.js';
-import { replace as r } from './replace.js';
-// just call tar.r with the filter and mtimeCache
-export const update = makeCommand(r.syncFile, r.asyncFile, r.syncNoFile, r.asyncNoFile, (opt, entries = []) => {
-    r.validate?.(opt, entries);
-    mtimeFilter(opt);
-});
-const mtimeFilter = (opt) => {
-    const filter = opt.filter;
-    if (!opt.mtimeCache) {
-        opt.mtimeCache = new Map();
-    }
-    opt.filter =
-        filter ?
-            (path, stat) => filter(path, stat) &&
-                !(
-                /* c8 ignore start */
-                ((opt.mtimeCache?.get(path) ?? stat.mtime ?? 0) >
-                    (stat.mtime ?? 0))
-                /* c8 ignore stop */
-                )
-            : (path, stat) => !(
-            /* c8 ignore start */
-            ((opt.mtimeCache?.get(path) ?? stat.mtime ?? 0) >
-                (stat.mtime ?? 0))
-            /* c8 ignore stop */
-            );
-};
-//# sourceMappingURL=update.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/warn-method.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/warn-method.js
deleted file mode 100644
index 13e798afefc85..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/warn-method.js
+++ /dev/null
@@ -1,27 +0,0 @@
-export const warnMethod = (self, code, message, data = {}) => {
-    if (self.file) {
-        data.file = self.file;
-    }
-    if (self.cwd) {
-        data.cwd = self.cwd;
-    }
-    data.code =
-        (message instanceof Error &&
-            message.code) ||
-            code;
-    data.tarCode = code;
-    if (!self.strict && data.recoverable !== false) {
-        if (message instanceof Error) {
-            data = Object.assign(message, data);
-            message = message.message;
-        }
-        self.emit('warn', code, message, data);
-    }
-    else if (message instanceof Error) {
-        self.emit('error', Object.assign(message, data));
-    }
-    else {
-        self.emit('error', Object.assign(new Error(`${code}: ${message}`), data));
-    }
-};
-//# sourceMappingURL=warn-method.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/winchars.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/winchars.js
deleted file mode 100644
index c41eb86d69a4b..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/winchars.js
+++ /dev/null
@@ -1,9 +0,0 @@
-// When writing files on Windows, translate the characters to their
-// 0xf000 higher-encoded versions.
-const raw = ['|', '<', '>', '?', ':'];
-const win = raw.map(char => String.fromCharCode(0xf000 + char.charCodeAt(0)));
-const toWin = new Map(raw.map((char, i) => [char, win[i]]));
-const toRaw = new Map(win.map((char, i) => [char, raw[i]]));
-export const encode = (s) => raw.reduce((s, c) => s.split(c).join(toWin.get(c)), s);
-export const decode = (s) => win.reduce((s, c) => s.split(c).join(toRaw.get(c)), s);
-//# sourceMappingURL=winchars.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/write-entry.js b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/write-entry.js
deleted file mode 100644
index 9028cd676b4cd..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/dist/esm/write-entry.js
+++ /dev/null
@@ -1,657 +0,0 @@
-import fs from 'fs';
-import { Minipass } from 'minipass';
-import path from 'path';
-import { Header } from './header.js';
-import { modeFix } from './mode-fix.js';
-import { normalizeWindowsPath } from './normalize-windows-path.js';
-import { dealias, } from './options.js';
-import { Pax } from './pax.js';
-import { stripAbsolutePath } from './strip-absolute-path.js';
-import { stripTrailingSlashes } from './strip-trailing-slashes.js';
-import { warnMethod, } from './warn-method.js';
-import * as winchars from './winchars.js';
-const prefixPath = (path, prefix) => {
-    if (!prefix) {
-        return normalizeWindowsPath(path);
-    }
-    path = normalizeWindowsPath(path).replace(/^\.(\/|$)/, '');
-    return stripTrailingSlashes(prefix) + '/' + path;
-};
-const maxReadSize = 16 * 1024 * 1024;
-const PROCESS = Symbol('process');
-const FILE = Symbol('file');
-const DIRECTORY = Symbol('directory');
-const SYMLINK = Symbol('symlink');
-const HARDLINK = Symbol('hardlink');
-const HEADER = Symbol('header');
-const READ = Symbol('read');
-const LSTAT = Symbol('lstat');
-const ONLSTAT = Symbol('onlstat');
-const ONREAD = Symbol('onread');
-const ONREADLINK = Symbol('onreadlink');
-const OPENFILE = Symbol('openfile');
-const ONOPENFILE = Symbol('onopenfile');
-const CLOSE = Symbol('close');
-const MODE = Symbol('mode');
-const AWAITDRAIN = Symbol('awaitDrain');
-const ONDRAIN = Symbol('ondrain');
-const PREFIX = Symbol('prefix');
-export class WriteEntry extends Minipass {
-    path;
-    portable;
-    myuid = (process.getuid && process.getuid()) || 0;
-    // until node has builtin pwnam functions, this'll have to do
-    myuser = process.env.USER || '';
-    maxReadSize;
-    linkCache;
-    statCache;
-    preservePaths;
-    cwd;
-    strict;
-    mtime;
-    noPax;
-    noMtime;
-    prefix;
-    fd;
-    blockLen = 0;
-    blockRemain = 0;
-    buf;
-    pos = 0;
-    remain = 0;
-    length = 0;
-    offset = 0;
-    win32;
-    absolute;
-    header;
-    type;
-    linkpath;
-    stat;
-    onWriteEntry;
-    #hadError = false;
-    constructor(p, opt_ = {}) {
-        const opt = dealias(opt_);
-        super();
-        this.path = normalizeWindowsPath(p);
-        // suppress atime, ctime, uid, gid, uname, gname
-        this.portable = !!opt.portable;
-        this.maxReadSize = opt.maxReadSize || maxReadSize;
-        this.linkCache = opt.linkCache || new Map();
-        this.statCache = opt.statCache || new Map();
-        this.preservePaths = !!opt.preservePaths;
-        this.cwd = normalizeWindowsPath(opt.cwd || process.cwd());
-        this.strict = !!opt.strict;
-        this.noPax = !!opt.noPax;
-        this.noMtime = !!opt.noMtime;
-        this.mtime = opt.mtime;
-        this.prefix =
-            opt.prefix ? normalizeWindowsPath(opt.prefix) : undefined;
-        this.onWriteEntry = opt.onWriteEntry;
-        if (typeof opt.onwarn === 'function') {
-            this.on('warn', opt.onwarn);
-        }
-        let pathWarn = false;
-        if (!this.preservePaths) {
-            const [root, stripped] = stripAbsolutePath(this.path);
-            if (root && typeof stripped === 'string') {
-                this.path = stripped;
-                pathWarn = root;
-            }
-        }
-        this.win32 = !!opt.win32 || process.platform === 'win32';
-        if (this.win32) {
-            // force the \ to / normalization, since we might not *actually*
-            // be on windows, but want \ to be considered a path separator.
-            this.path = winchars.decode(this.path.replace(/\\/g, '/'));
-            p = p.replace(/\\/g, '/');
-        }
-        this.absolute = normalizeWindowsPath(opt.absolute || path.resolve(this.cwd, p));
-        if (this.path === '') {
-            this.path = './';
-        }
-        if (pathWarn) {
-            this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, {
-                entry: this,
-                path: pathWarn + this.path,
-            });
-        }
-        const cs = this.statCache.get(this.absolute);
-        if (cs) {
-            this[ONLSTAT](cs);
-        }
-        else {
-            this[LSTAT]();
-        }
-    }
-    warn(code, message, data = {}) {
-        return warnMethod(this, code, message, data);
-    }
-    emit(ev, ...data) {
-        if (ev === 'error') {
-            this.#hadError = true;
-        }
-        return super.emit(ev, ...data);
-    }
-    [LSTAT]() {
-        fs.lstat(this.absolute, (er, stat) => {
-            if (er) {
-                return this.emit('error', er);
-            }
-            this[ONLSTAT](stat);
-        });
-    }
-    [ONLSTAT](stat) {
-        this.statCache.set(this.absolute, stat);
-        this.stat = stat;
-        if (!stat.isFile()) {
-            stat.size = 0;
-        }
-        this.type = getType(stat);
-        this.emit('stat', stat);
-        this[PROCESS]();
-    }
-    [PROCESS]() {
-        switch (this.type) {
-            case 'File':
-                return this[FILE]();
-            case 'Directory':
-                return this[DIRECTORY]();
-            case 'SymbolicLink':
-                return this[SYMLINK]();
-            // unsupported types are ignored.
-            default:
-                return this.end();
-        }
-    }
-    [MODE](mode) {
-        return modeFix(mode, this.type === 'Directory', this.portable);
-    }
-    [PREFIX](path) {
-        return prefixPath(path, this.prefix);
-    }
-    [HEADER]() {
-        /* c8 ignore start */
-        if (!this.stat) {
-            throw new Error('cannot write header before stat');
-        }
-        /* c8 ignore stop */
-        if (this.type === 'Directory' && this.portable) {
-            this.noMtime = true;
-        }
-        this.onWriteEntry?.(this);
-        this.header = new Header({
-            path: this[PREFIX](this.path),
-            // only apply the prefix to hard links.
-            linkpath: this.type === 'Link' && this.linkpath !== undefined ?
-                this[PREFIX](this.linkpath)
-                : this.linkpath,
-            // only the permissions and setuid/setgid/sticky bitflags
-            // not the higher-order bits that specify file type
-            mode: this[MODE](this.stat.mode),
-            uid: this.portable ? undefined : this.stat.uid,
-            gid: this.portable ? undefined : this.stat.gid,
-            size: this.stat.size,
-            mtime: this.noMtime ? undefined : this.mtime || this.stat.mtime,
-            /* c8 ignore next */
-            type: this.type === 'Unsupported' ? undefined : this.type,
-            uname: this.portable ? undefined
-                : this.stat.uid === this.myuid ? this.myuser
-                    : '',
-            atime: this.portable ? undefined : this.stat.atime,
-            ctime: this.portable ? undefined : this.stat.ctime,
-        });
-        if (this.header.encode() && !this.noPax) {
-            super.write(new Pax({
-                atime: this.portable ? undefined : this.header.atime,
-                ctime: this.portable ? undefined : this.header.ctime,
-                gid: this.portable ? undefined : this.header.gid,
-                mtime: this.noMtime ? undefined : (this.mtime || this.header.mtime),
-                path: this[PREFIX](this.path),
-                linkpath: this.type === 'Link' && this.linkpath !== undefined ?
-                    this[PREFIX](this.linkpath)
-                    : this.linkpath,
-                size: this.header.size,
-                uid: this.portable ? undefined : this.header.uid,
-                uname: this.portable ? undefined : this.header.uname,
-                dev: this.portable ? undefined : this.stat.dev,
-                ino: this.portable ? undefined : this.stat.ino,
-                nlink: this.portable ? undefined : this.stat.nlink,
-            }).encode());
-        }
-        const block = this.header?.block;
-        /* c8 ignore start */
-        if (!block) {
-            throw new Error('failed to encode header');
-        }
-        /* c8 ignore stop */
-        super.write(block);
-    }
-    [DIRECTORY]() {
-        /* c8 ignore start */
-        if (!this.stat) {
-            throw new Error('cannot create directory entry without stat');
-        }
-        /* c8 ignore stop */
-        if (this.path.slice(-1) !== '/') {
-            this.path += '/';
-        }
-        this.stat.size = 0;
-        this[HEADER]();
-        this.end();
-    }
-    [SYMLINK]() {
-        fs.readlink(this.absolute, (er, linkpath) => {
-            if (er) {
-                return this.emit('error', er);
-            }
-            this[ONREADLINK](linkpath);
-        });
-    }
-    [ONREADLINK](linkpath) {
-        this.linkpath = normalizeWindowsPath(linkpath);
-        this[HEADER]();
-        this.end();
-    }
-    [HARDLINK](linkpath) {
-        /* c8 ignore start */
-        if (!this.stat) {
-            throw new Error('cannot create link entry without stat');
-        }
-        /* c8 ignore stop */
-        this.type = 'Link';
-        this.linkpath = normalizeWindowsPath(path.relative(this.cwd, linkpath));
-        this.stat.size = 0;
-        this[HEADER]();
-        this.end();
-    }
-    [FILE]() {
-        /* c8 ignore start */
-        if (!this.stat) {
-            throw new Error('cannot create file entry without stat');
-        }
-        /* c8 ignore stop */
-        if (this.stat.nlink > 1) {
-            const linkKey = `${this.stat.dev}:${this.stat.ino}`;
-            const linkpath = this.linkCache.get(linkKey);
-            if (linkpath?.indexOf(this.cwd) === 0) {
-                return this[HARDLINK](linkpath);
-            }
-            this.linkCache.set(linkKey, this.absolute);
-        }
-        this[HEADER]();
-        if (this.stat.size === 0) {
-            return this.end();
-        }
-        this[OPENFILE]();
-    }
-    [OPENFILE]() {
-        fs.open(this.absolute, 'r', (er, fd) => {
-            if (er) {
-                return this.emit('error', er);
-            }
-            this[ONOPENFILE](fd);
-        });
-    }
-    [ONOPENFILE](fd) {
-        this.fd = fd;
-        if (this.#hadError) {
-            return this[CLOSE]();
-        }
-        /* c8 ignore start */
-        if (!this.stat) {
-            throw new Error('should stat before calling onopenfile');
-        }
-        /* c8 ignore start */
-        this.blockLen = 512 * Math.ceil(this.stat.size / 512);
-        this.blockRemain = this.blockLen;
-        const bufLen = Math.min(this.blockLen, this.maxReadSize);
-        this.buf = Buffer.allocUnsafe(bufLen);
-        this.offset = 0;
-        this.pos = 0;
-        this.remain = this.stat.size;
-        this.length = this.buf.length;
-        this[READ]();
-    }
-    [READ]() {
-        const { fd, buf, offset, length, pos } = this;
-        if (fd === undefined || buf === undefined) {
-            throw new Error('cannot read file without first opening');
-        }
-        fs.read(fd, buf, offset, length, pos, (er, bytesRead) => {
-            if (er) {
-                // ignoring the error from close(2) is a bad practice, but at
-                // this point we already have an error, don't need another one
-                return this[CLOSE](() => this.emit('error', er));
-            }
-            this[ONREAD](bytesRead);
-        });
-    }
-    /* c8 ignore start */
-    [CLOSE](cb = () => { }) {
-        /* c8 ignore stop */
-        if (this.fd !== undefined)
-            fs.close(this.fd, cb);
-    }
-    [ONREAD](bytesRead) {
-        if (bytesRead <= 0 && this.remain > 0) {
-            const er = Object.assign(new Error('encountered unexpected EOF'), {
-                path: this.absolute,
-                syscall: 'read',
-                code: 'EOF',
-            });
-            return this[CLOSE](() => this.emit('error', er));
-        }
-        if (bytesRead > this.remain) {
-            const er = Object.assign(new Error('did not encounter expected EOF'), {
-                path: this.absolute,
-                syscall: 'read',
-                code: 'EOF',
-            });
-            return this[CLOSE](() => this.emit('error', er));
-        }
-        /* c8 ignore start */
-        if (!this.buf) {
-            throw new Error('should have created buffer prior to reading');
-        }
-        /* c8 ignore stop */
-        // null out the rest of the buffer, if we could fit the block padding
-        // at the end of this loop, we've incremented bytesRead and this.remain
-        // to be incremented up to the blockRemain level, as if we had expected
-        // to get a null-padded file, and read it until the end.  then we will
-        // decrement both remain and blockRemain by bytesRead, and know that we
-        // reached the expected EOF, without any null buffer to append.
-        if (bytesRead === this.remain) {
-            for (let i = bytesRead; i < this.length && bytesRead < this.blockRemain; i++) {
-                this.buf[i + this.offset] = 0;
-                bytesRead++;
-                this.remain++;
-            }
-        }
-        const chunk = this.offset === 0 && bytesRead === this.buf.length ?
-            this.buf
-            : this.buf.subarray(this.offset, this.offset + bytesRead);
-        const flushed = this.write(chunk);
-        if (!flushed) {
-            this[AWAITDRAIN](() => this[ONDRAIN]());
-        }
-        else {
-            this[ONDRAIN]();
-        }
-    }
-    [AWAITDRAIN](cb) {
-        this.once('drain', cb);
-    }
-    write(chunk, encoding, cb) {
-        /* c8 ignore start - just junk to comply with NodeJS.WritableStream */
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        if (typeof chunk === 'string') {
-            chunk = Buffer.from(chunk, typeof encoding === 'string' ? encoding : 'utf8');
-        }
-        /* c8 ignore stop */
-        if (this.blockRemain < chunk.length) {
-            const er = Object.assign(new Error('writing more data than expected'), {
-                path: this.absolute,
-            });
-            return this.emit('error', er);
-        }
-        this.remain -= chunk.length;
-        this.blockRemain -= chunk.length;
-        this.pos += chunk.length;
-        this.offset += chunk.length;
-        return super.write(chunk, null, cb);
-    }
-    [ONDRAIN]() {
-        if (!this.remain) {
-            if (this.blockRemain) {
-                super.write(Buffer.alloc(this.blockRemain));
-            }
-            return this[CLOSE](er => er ? this.emit('error', er) : this.end());
-        }
-        /* c8 ignore start */
-        if (!this.buf) {
-            throw new Error('buffer lost somehow in ONDRAIN');
-        }
-        /* c8 ignore stop */
-        if (this.offset >= this.length) {
-            // if we only have a smaller bit left to read, alloc a smaller buffer
-            // otherwise, keep it the same length it was before.
-            this.buf = Buffer.allocUnsafe(Math.min(this.blockRemain, this.buf.length));
-            this.offset = 0;
-        }
-        this.length = this.buf.length - this.offset;
-        this[READ]();
-    }
-}
-export class WriteEntrySync extends WriteEntry {
-    sync = true;
-    [LSTAT]() {
-        this[ONLSTAT](fs.lstatSync(this.absolute));
-    }
-    [SYMLINK]() {
-        this[ONREADLINK](fs.readlinkSync(this.absolute));
-    }
-    [OPENFILE]() {
-        this[ONOPENFILE](fs.openSync(this.absolute, 'r'));
-    }
-    [READ]() {
-        let threw = true;
-        try {
-            const { fd, buf, offset, length, pos } = this;
-            /* c8 ignore start */
-            if (fd === undefined || buf === undefined) {
-                throw new Error('fd and buf must be set in READ method');
-            }
-            /* c8 ignore stop */
-            const bytesRead = fs.readSync(fd, buf, offset, length, pos);
-            this[ONREAD](bytesRead);
-            threw = false;
-        }
-        finally {
-            // ignoring the error from close(2) is a bad practice, but at
-            // this point we already have an error, don't need another one
-            if (threw) {
-                try {
-                    this[CLOSE](() => { });
-                }
-                catch (er) { }
-            }
-        }
-    }
-    [AWAITDRAIN](cb) {
-        cb();
-    }
-    /* c8 ignore start */
-    [CLOSE](cb = () => { }) {
-        /* c8 ignore stop */
-        if (this.fd !== undefined)
-            fs.closeSync(this.fd);
-        cb();
-    }
-}
-export class WriteEntryTar extends Minipass {
-    blockLen = 0;
-    blockRemain = 0;
-    buf = 0;
-    pos = 0;
-    remain = 0;
-    length = 0;
-    preservePaths;
-    portable;
-    strict;
-    noPax;
-    noMtime;
-    readEntry;
-    type;
-    prefix;
-    path;
-    mode;
-    uid;
-    gid;
-    uname;
-    gname;
-    header;
-    mtime;
-    atime;
-    ctime;
-    linkpath;
-    size;
-    onWriteEntry;
-    warn(code, message, data = {}) {
-        return warnMethod(this, code, message, data);
-    }
-    constructor(readEntry, opt_ = {}) {
-        const opt = dealias(opt_);
-        super();
-        this.preservePaths = !!opt.preservePaths;
-        this.portable = !!opt.portable;
-        this.strict = !!opt.strict;
-        this.noPax = !!opt.noPax;
-        this.noMtime = !!opt.noMtime;
-        this.onWriteEntry = opt.onWriteEntry;
-        this.readEntry = readEntry;
-        const { type } = readEntry;
-        /* c8 ignore start */
-        if (type === 'Unsupported') {
-            throw new Error('writing entry that should be ignored');
-        }
-        /* c8 ignore stop */
-        this.type = type;
-        if (this.type === 'Directory' && this.portable) {
-            this.noMtime = true;
-        }
-        this.prefix = opt.prefix;
-        this.path = normalizeWindowsPath(readEntry.path);
-        this.mode =
-            readEntry.mode !== undefined ?
-                this[MODE](readEntry.mode)
-                : undefined;
-        this.uid = this.portable ? undefined : readEntry.uid;
-        this.gid = this.portable ? undefined : readEntry.gid;
-        this.uname = this.portable ? undefined : readEntry.uname;
-        this.gname = this.portable ? undefined : readEntry.gname;
-        this.size = readEntry.size;
-        this.mtime =
-            this.noMtime ? undefined : opt.mtime || readEntry.mtime;
-        this.atime = this.portable ? undefined : readEntry.atime;
-        this.ctime = this.portable ? undefined : readEntry.ctime;
-        this.linkpath =
-            readEntry.linkpath !== undefined ?
-                normalizeWindowsPath(readEntry.linkpath)
-                : undefined;
-        if (typeof opt.onwarn === 'function') {
-            this.on('warn', opt.onwarn);
-        }
-        let pathWarn = false;
-        if (!this.preservePaths) {
-            const [root, stripped] = stripAbsolutePath(this.path);
-            if (root && typeof stripped === 'string') {
-                this.path = stripped;
-                pathWarn = root;
-            }
-        }
-        this.remain = readEntry.size;
-        this.blockRemain = readEntry.startBlockSize;
-        this.onWriteEntry?.(this);
-        this.header = new Header({
-            path: this[PREFIX](this.path),
-            linkpath: this.type === 'Link' && this.linkpath !== undefined ?
-                this[PREFIX](this.linkpath)
-                : this.linkpath,
-            // only the permissions and setuid/setgid/sticky bitflags
-            // not the higher-order bits that specify file type
-            mode: this.mode,
-            uid: this.portable ? undefined : this.uid,
-            gid: this.portable ? undefined : this.gid,
-            size: this.size,
-            mtime: this.noMtime ? undefined : this.mtime,
-            type: this.type,
-            uname: this.portable ? undefined : this.uname,
-            atime: this.portable ? undefined : this.atime,
-            ctime: this.portable ? undefined : this.ctime,
-        });
-        if (pathWarn) {
-            this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, {
-                entry: this,
-                path: pathWarn + this.path,
-            });
-        }
-        if (this.header.encode() && !this.noPax) {
-            super.write(new Pax({
-                atime: this.portable ? undefined : this.atime,
-                ctime: this.portable ? undefined : this.ctime,
-                gid: this.portable ? undefined : this.gid,
-                mtime: this.noMtime ? undefined : this.mtime,
-                path: this[PREFIX](this.path),
-                linkpath: this.type === 'Link' && this.linkpath !== undefined ?
-                    this[PREFIX](this.linkpath)
-                    : this.linkpath,
-                size: this.size,
-                uid: this.portable ? undefined : this.uid,
-                uname: this.portable ? undefined : this.uname,
-                dev: this.portable ? undefined : this.readEntry.dev,
-                ino: this.portable ? undefined : this.readEntry.ino,
-                nlink: this.portable ? undefined : this.readEntry.nlink,
-            }).encode());
-        }
-        const b = this.header?.block;
-        /* c8 ignore start */
-        if (!b)
-            throw new Error('failed to encode header');
-        /* c8 ignore stop */
-        super.write(b);
-        readEntry.pipe(this);
-    }
-    [PREFIX](path) {
-        return prefixPath(path, this.prefix);
-    }
-    [MODE](mode) {
-        return modeFix(mode, this.type === 'Directory', this.portable);
-    }
-    write(chunk, encoding, cb) {
-        /* c8 ignore start - just junk to comply with NodeJS.WritableStream */
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        if (typeof chunk === 'string') {
-            chunk = Buffer.from(chunk, typeof encoding === 'string' ? encoding : 'utf8');
-        }
-        /* c8 ignore stop */
-        const writeLen = chunk.length;
-        if (writeLen > this.blockRemain) {
-            throw new Error('writing more to entry than is appropriate');
-        }
-        this.blockRemain -= writeLen;
-        return super.write(chunk, cb);
-    }
-    end(chunk, encoding, cb) {
-        if (this.blockRemain) {
-            super.write(Buffer.alloc(this.blockRemain));
-        }
-        /* c8 ignore start - just junk to comply with NodeJS.WritableStream */
-        if (typeof chunk === 'function') {
-            cb = chunk;
-            encoding = undefined;
-            chunk = undefined;
-        }
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        if (typeof chunk === 'string') {
-            chunk = Buffer.from(chunk, encoding ?? 'utf8');
-        }
-        if (cb)
-            this.once('finish', cb);
-        chunk ? super.end(chunk, cb) : super.end(cb);
-        /* c8 ignore stop */
-        return this;
-    }
-}
-const getType = (stat) => stat.isFile() ? 'File'
-    : stat.isDirectory() ? 'Directory'
-        : stat.isSymbolicLink() ? 'SymbolicLink'
-            : 'Unsupported';
-//# sourceMappingURL=write-entry.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/tar/package.json
deleted file mode 100644
index 0283103ee9eaf..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/tar/package.json
+++ /dev/null
@@ -1,325 +0,0 @@
-{
-  "author": "Isaac Z. Schlueter",
-  "name": "tar",
-  "description": "tar for node",
-  "version": "7.4.3",
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/isaacs/node-tar.git"
-  },
-  "scripts": {
-    "genparse": "node scripts/generate-parse-fixtures.js",
-    "snap": "tap",
-    "test": "tap",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "prepare": "tshy",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "format": "prettier --write . --log-level warn",
-    "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
-  },
-  "dependencies": {
-    "@isaacs/fs-minipass": "^4.0.0",
-    "chownr": "^3.0.0",
-    "minipass": "^7.1.2",
-    "minizlib": "^3.0.1",
-    "mkdirp": "^3.0.1",
-    "yallist": "^5.0.0"
-  },
-  "devDependencies": {
-    "chmodr": "^1.2.0",
-    "end-of-stream": "^1.4.3",
-    "events-to-array": "^2.0.3",
-    "mutate-fs": "^2.1.1",
-    "nock": "^13.5.4",
-    "prettier": "^3.2.5",
-    "rimraf": "^5.0.5",
-    "tap": "^18.7.2",
-    "tshy": "^1.13.1",
-    "typedoc": "^0.25.13"
-  },
-  "license": "ISC",
-  "engines": {
-    "node": ">=18"
-  },
-  "files": [
-    "dist"
-  ],
-  "tap": {
-    "coverage-map": "map.js",
-    "timeout": 0,
-    "typecheck": true
-  },
-  "prettier": {
-    "experimentalTernaries": true,
-    "semi": false,
-    "printWidth": 70,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "tshy": {
-    "exports": {
-      "./package.json": "./package.json",
-      ".": "./src/index.ts",
-      "./c": "./src/create.ts",
-      "./create": "./src/create.ts",
-      "./replace": "./src/create.ts",
-      "./r": "./src/create.ts",
-      "./list": "./src/list.ts",
-      "./t": "./src/list.ts",
-      "./update": "./src/update.ts",
-      "./u": "./src/update.ts",
-      "./extract": "./src/extract.ts",
-      "./x": "./src/extract.ts",
-      "./pack": "./src/pack.ts",
-      "./unpack": "./src/unpack.ts",
-      "./parse": "./src/parse.ts",
-      "./read-entry": "./src/read-entry.ts",
-      "./write-entry": "./src/write-entry.ts",
-      "./header": "./src/header.ts",
-      "./pax": "./src/pax.ts",
-      "./types": "./src/types.ts"
-    }
-  },
-  "exports": {
-    "./package.json": "./package.json",
-    ".": {
-      "import": {
-        "source": "./src/index.ts",
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.js"
-      },
-      "require": {
-        "source": "./src/index.ts",
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.js"
-      }
-    },
-    "./c": {
-      "import": {
-        "source": "./src/create.ts",
-        "types": "./dist/esm/create.d.ts",
-        "default": "./dist/esm/create.js"
-      },
-      "require": {
-        "source": "./src/create.ts",
-        "types": "./dist/commonjs/create.d.ts",
-        "default": "./dist/commonjs/create.js"
-      }
-    },
-    "./create": {
-      "import": {
-        "source": "./src/create.ts",
-        "types": "./dist/esm/create.d.ts",
-        "default": "./dist/esm/create.js"
-      },
-      "require": {
-        "source": "./src/create.ts",
-        "types": "./dist/commonjs/create.d.ts",
-        "default": "./dist/commonjs/create.js"
-      }
-    },
-    "./replace": {
-      "import": {
-        "source": "./src/create.ts",
-        "types": "./dist/esm/create.d.ts",
-        "default": "./dist/esm/create.js"
-      },
-      "require": {
-        "source": "./src/create.ts",
-        "types": "./dist/commonjs/create.d.ts",
-        "default": "./dist/commonjs/create.js"
-      }
-    },
-    "./r": {
-      "import": {
-        "source": "./src/create.ts",
-        "types": "./dist/esm/create.d.ts",
-        "default": "./dist/esm/create.js"
-      },
-      "require": {
-        "source": "./src/create.ts",
-        "types": "./dist/commonjs/create.d.ts",
-        "default": "./dist/commonjs/create.js"
-      }
-    },
-    "./list": {
-      "import": {
-        "source": "./src/list.ts",
-        "types": "./dist/esm/list.d.ts",
-        "default": "./dist/esm/list.js"
-      },
-      "require": {
-        "source": "./src/list.ts",
-        "types": "./dist/commonjs/list.d.ts",
-        "default": "./dist/commonjs/list.js"
-      }
-    },
-    "./t": {
-      "import": {
-        "source": "./src/list.ts",
-        "types": "./dist/esm/list.d.ts",
-        "default": "./dist/esm/list.js"
-      },
-      "require": {
-        "source": "./src/list.ts",
-        "types": "./dist/commonjs/list.d.ts",
-        "default": "./dist/commonjs/list.js"
-      }
-    },
-    "./update": {
-      "import": {
-        "source": "./src/update.ts",
-        "types": "./dist/esm/update.d.ts",
-        "default": "./dist/esm/update.js"
-      },
-      "require": {
-        "source": "./src/update.ts",
-        "types": "./dist/commonjs/update.d.ts",
-        "default": "./dist/commonjs/update.js"
-      }
-    },
-    "./u": {
-      "import": {
-        "source": "./src/update.ts",
-        "types": "./dist/esm/update.d.ts",
-        "default": "./dist/esm/update.js"
-      },
-      "require": {
-        "source": "./src/update.ts",
-        "types": "./dist/commonjs/update.d.ts",
-        "default": "./dist/commonjs/update.js"
-      }
-    },
-    "./extract": {
-      "import": {
-        "source": "./src/extract.ts",
-        "types": "./dist/esm/extract.d.ts",
-        "default": "./dist/esm/extract.js"
-      },
-      "require": {
-        "source": "./src/extract.ts",
-        "types": "./dist/commonjs/extract.d.ts",
-        "default": "./dist/commonjs/extract.js"
-      }
-    },
-    "./x": {
-      "import": {
-        "source": "./src/extract.ts",
-        "types": "./dist/esm/extract.d.ts",
-        "default": "./dist/esm/extract.js"
-      },
-      "require": {
-        "source": "./src/extract.ts",
-        "types": "./dist/commonjs/extract.d.ts",
-        "default": "./dist/commonjs/extract.js"
-      }
-    },
-    "./pack": {
-      "import": {
-        "source": "./src/pack.ts",
-        "types": "./dist/esm/pack.d.ts",
-        "default": "./dist/esm/pack.js"
-      },
-      "require": {
-        "source": "./src/pack.ts",
-        "types": "./dist/commonjs/pack.d.ts",
-        "default": "./dist/commonjs/pack.js"
-      }
-    },
-    "./unpack": {
-      "import": {
-        "source": "./src/unpack.ts",
-        "types": "./dist/esm/unpack.d.ts",
-        "default": "./dist/esm/unpack.js"
-      },
-      "require": {
-        "source": "./src/unpack.ts",
-        "types": "./dist/commonjs/unpack.d.ts",
-        "default": "./dist/commonjs/unpack.js"
-      }
-    },
-    "./parse": {
-      "import": {
-        "source": "./src/parse.ts",
-        "types": "./dist/esm/parse.d.ts",
-        "default": "./dist/esm/parse.js"
-      },
-      "require": {
-        "source": "./src/parse.ts",
-        "types": "./dist/commonjs/parse.d.ts",
-        "default": "./dist/commonjs/parse.js"
-      }
-    },
-    "./read-entry": {
-      "import": {
-        "source": "./src/read-entry.ts",
-        "types": "./dist/esm/read-entry.d.ts",
-        "default": "./dist/esm/read-entry.js"
-      },
-      "require": {
-        "source": "./src/read-entry.ts",
-        "types": "./dist/commonjs/read-entry.d.ts",
-        "default": "./dist/commonjs/read-entry.js"
-      }
-    },
-    "./write-entry": {
-      "import": {
-        "source": "./src/write-entry.ts",
-        "types": "./dist/esm/write-entry.d.ts",
-        "default": "./dist/esm/write-entry.js"
-      },
-      "require": {
-        "source": "./src/write-entry.ts",
-        "types": "./dist/commonjs/write-entry.d.ts",
-        "default": "./dist/commonjs/write-entry.js"
-      }
-    },
-    "./header": {
-      "import": {
-        "source": "./src/header.ts",
-        "types": "./dist/esm/header.d.ts",
-        "default": "./dist/esm/header.js"
-      },
-      "require": {
-        "source": "./src/header.ts",
-        "types": "./dist/commonjs/header.d.ts",
-        "default": "./dist/commonjs/header.js"
-      }
-    },
-    "./pax": {
-      "import": {
-        "source": "./src/pax.ts",
-        "types": "./dist/esm/pax.d.ts",
-        "default": "./dist/esm/pax.js"
-      },
-      "require": {
-        "source": "./src/pax.ts",
-        "types": "./dist/commonjs/pax.d.ts",
-        "default": "./dist/commonjs/pax.js"
-      }
-    },
-    "./types": {
-      "import": {
-        "source": "./src/types.ts",
-        "types": "./dist/esm/types.d.ts",
-        "default": "./dist/esm/types.js"
-      },
-      "require": {
-        "source": "./src/types.ts",
-        "types": "./dist/commonjs/types.d.ts",
-        "default": "./dist/commonjs/types.js"
-      }
-    }
-  },
-  "type": "module",
-  "main": "./dist/commonjs/index.js",
-  "types": "./dist/commonjs/index.d.ts"
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/yallist/LICENSE.md b/node_modules/@npmcli/metavuln-calculator/node_modules/yallist/LICENSE.md
deleted file mode 100644
index 881248b6d7f0c..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/yallist/LICENSE.md
+++ /dev/null
@@ -1,63 +0,0 @@
-All packages under `src/` are licensed according to the terms in
-their respective `LICENSE` or `LICENSE.md` files.
-
-The remainder of this project is licensed under the Blue Oak
-Model License, as follows:
-
------
-
-# Blue Oak Model License
-
-Version 1.0.0
-
-## Purpose
-
-This license gives everyone as much permission to work with
-this software as possible, while protecting contributors
-from liability.
-
-## Acceptance
-
-In order to receive this license, you must agree to its
-rules.  The rules of this license are both obligations
-under that agreement and conditions to your license.
-You must not do anything with this software that triggers
-a rule that you cannot or will not follow.
-
-## Copyright
-
-Each contributor licenses you to do everything with this
-software that would otherwise infringe that contributor's
-copyright in it.
-
-## Notices
-
-You must ensure that everyone who gets a copy of
-any part of this software from you, with or without
-changes, also gets the text of this license or a link to
-.
-
-## Excuse
-
-If anyone notifies you in writing that you have not
-complied with [Notices](#notices), you can keep your
-license by taking all practical steps to comply within 30
-days after the notice.  If you do not do so, your license
-ends immediately.
-
-## Patent
-
-Each contributor licenses you to do everything with this
-software that would otherwise infringe any patent claims
-they can license or become able to license.
-
-## Reliability
-
-No contributor can revoke this license.
-
-## No Liability
-
-***As far as the law allows, this software comes as is,
-without any warranty or condition, and no contributor
-will be liable to anyone for any damages related to this
-software or this license, under any kind of legal claim.***
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/yallist/dist/commonjs/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/yallist/dist/commonjs/index.js
deleted file mode 100644
index c1e1e4741689d..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/yallist/dist/commonjs/index.js
+++ /dev/null
@@ -1,384 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Node = exports.Yallist = void 0;
-class Yallist {
-    tail;
-    head;
-    length = 0;
-    static create(list = []) {
-        return new Yallist(list);
-    }
-    constructor(list = []) {
-        for (const item of list) {
-            this.push(item);
-        }
-    }
-    *[Symbol.iterator]() {
-        for (let walker = this.head; walker; walker = walker.next) {
-            yield walker.value;
-        }
-    }
-    removeNode(node) {
-        if (node.list !== this) {
-            throw new Error('removing node which does not belong to this list');
-        }
-        const next = node.next;
-        const prev = node.prev;
-        if (next) {
-            next.prev = prev;
-        }
-        if (prev) {
-            prev.next = next;
-        }
-        if (node === this.head) {
-            this.head = next;
-        }
-        if (node === this.tail) {
-            this.tail = prev;
-        }
-        this.length--;
-        node.next = undefined;
-        node.prev = undefined;
-        node.list = undefined;
-        return next;
-    }
-    unshiftNode(node) {
-        if (node === this.head) {
-            return;
-        }
-        if (node.list) {
-            node.list.removeNode(node);
-        }
-        const head = this.head;
-        node.list = this;
-        node.next = head;
-        if (head) {
-            head.prev = node;
-        }
-        this.head = node;
-        if (!this.tail) {
-            this.tail = node;
-        }
-        this.length++;
-    }
-    pushNode(node) {
-        if (node === this.tail) {
-            return;
-        }
-        if (node.list) {
-            node.list.removeNode(node);
-        }
-        const tail = this.tail;
-        node.list = this;
-        node.prev = tail;
-        if (tail) {
-            tail.next = node;
-        }
-        this.tail = node;
-        if (!this.head) {
-            this.head = node;
-        }
-        this.length++;
-    }
-    push(...args) {
-        for (let i = 0, l = args.length; i < l; i++) {
-            push(this, args[i]);
-        }
-        return this.length;
-    }
-    unshift(...args) {
-        for (var i = 0, l = args.length; i < l; i++) {
-            unshift(this, args[i]);
-        }
-        return this.length;
-    }
-    pop() {
-        if (!this.tail) {
-            return undefined;
-        }
-        const res = this.tail.value;
-        const t = this.tail;
-        this.tail = this.tail.prev;
-        if (this.tail) {
-            this.tail.next = undefined;
-        }
-        else {
-            this.head = undefined;
-        }
-        t.list = undefined;
-        this.length--;
-        return res;
-    }
-    shift() {
-        if (!this.head) {
-            return undefined;
-        }
-        const res = this.head.value;
-        const h = this.head;
-        this.head = this.head.next;
-        if (this.head) {
-            this.head.prev = undefined;
-        }
-        else {
-            this.tail = undefined;
-        }
-        h.list = undefined;
-        this.length--;
-        return res;
-    }
-    forEach(fn, thisp) {
-        thisp = thisp || this;
-        for (let walker = this.head, i = 0; !!walker; i++) {
-            fn.call(thisp, walker.value, i, this);
-            walker = walker.next;
-        }
-    }
-    forEachReverse(fn, thisp) {
-        thisp = thisp || this;
-        for (let walker = this.tail, i = this.length - 1; !!walker; i--) {
-            fn.call(thisp, walker.value, i, this);
-            walker = walker.prev;
-        }
-    }
-    get(n) {
-        let i = 0;
-        let walker = this.head;
-        for (; !!walker && i < n; i++) {
-            walker = walker.next;
-        }
-        if (i === n && !!walker) {
-            return walker.value;
-        }
-    }
-    getReverse(n) {
-        let i = 0;
-        let walker = this.tail;
-        for (; !!walker && i < n; i++) {
-            // abort out of the list early if we hit a cycle
-            walker = walker.prev;
-        }
-        if (i === n && !!walker) {
-            return walker.value;
-        }
-    }
-    map(fn, thisp) {
-        thisp = thisp || this;
-        const res = new Yallist();
-        for (let walker = this.head; !!walker;) {
-            res.push(fn.call(thisp, walker.value, this));
-            walker = walker.next;
-        }
-        return res;
-    }
-    mapReverse(fn, thisp) {
-        thisp = thisp || this;
-        var res = new Yallist();
-        for (let walker = this.tail; !!walker;) {
-            res.push(fn.call(thisp, walker.value, this));
-            walker = walker.prev;
-        }
-        return res;
-    }
-    reduce(fn, initial) {
-        let acc;
-        let walker = this.head;
-        if (arguments.length > 1) {
-            acc = initial;
-        }
-        else if (this.head) {
-            walker = this.head.next;
-            acc = this.head.value;
-        }
-        else {
-            throw new TypeError('Reduce of empty list with no initial value');
-        }
-        for (var i = 0; !!walker; i++) {
-            acc = fn(acc, walker.value, i);
-            walker = walker.next;
-        }
-        return acc;
-    }
-    reduceReverse(fn, initial) {
-        let acc;
-        let walker = this.tail;
-        if (arguments.length > 1) {
-            acc = initial;
-        }
-        else if (this.tail) {
-            walker = this.tail.prev;
-            acc = this.tail.value;
-        }
-        else {
-            throw new TypeError('Reduce of empty list with no initial value');
-        }
-        for (let i = this.length - 1; !!walker; i--) {
-            acc = fn(acc, walker.value, i);
-            walker = walker.prev;
-        }
-        return acc;
-    }
-    toArray() {
-        const arr = new Array(this.length);
-        for (let i = 0, walker = this.head; !!walker; i++) {
-            arr[i] = walker.value;
-            walker = walker.next;
-        }
-        return arr;
-    }
-    toArrayReverse() {
-        const arr = new Array(this.length);
-        for (let i = 0, walker = this.tail; !!walker; i++) {
-            arr[i] = walker.value;
-            walker = walker.prev;
-        }
-        return arr;
-    }
-    slice(from = 0, to = this.length) {
-        if (to < 0) {
-            to += this.length;
-        }
-        if (from < 0) {
-            from += this.length;
-        }
-        const ret = new Yallist();
-        if (to < from || to < 0) {
-            return ret;
-        }
-        if (from < 0) {
-            from = 0;
-        }
-        if (to > this.length) {
-            to = this.length;
-        }
-        let walker = this.head;
-        let i = 0;
-        for (i = 0; !!walker && i < from; i++) {
-            walker = walker.next;
-        }
-        for (; !!walker && i < to; i++, walker = walker.next) {
-            ret.push(walker.value);
-        }
-        return ret;
-    }
-    sliceReverse(from = 0, to = this.length) {
-        if (to < 0) {
-            to += this.length;
-        }
-        if (from < 0) {
-            from += this.length;
-        }
-        const ret = new Yallist();
-        if (to < from || to < 0) {
-            return ret;
-        }
-        if (from < 0) {
-            from = 0;
-        }
-        if (to > this.length) {
-            to = this.length;
-        }
-        let i = this.length;
-        let walker = this.tail;
-        for (; !!walker && i > to; i--) {
-            walker = walker.prev;
-        }
-        for (; !!walker && i > from; i--, walker = walker.prev) {
-            ret.push(walker.value);
-        }
-        return ret;
-    }
-    splice(start, deleteCount = 0, ...nodes) {
-        if (start > this.length) {
-            start = this.length - 1;
-        }
-        if (start < 0) {
-            start = this.length + start;
-        }
-        let walker = this.head;
-        for (let i = 0; !!walker && i < start; i++) {
-            walker = walker.next;
-        }
-        const ret = [];
-        for (let i = 0; !!walker && i < deleteCount; i++) {
-            ret.push(walker.value);
-            walker = this.removeNode(walker);
-        }
-        if (!walker) {
-            walker = this.tail;
-        }
-        else if (walker !== this.tail) {
-            walker = walker.prev;
-        }
-        for (const v of nodes) {
-            walker = insertAfter(this, walker, v);
-        }
-        return ret;
-    }
-    reverse() {
-        const head = this.head;
-        const tail = this.tail;
-        for (let walker = head; !!walker; walker = walker.prev) {
-            const p = walker.prev;
-            walker.prev = walker.next;
-            walker.next = p;
-        }
-        this.head = tail;
-        this.tail = head;
-        return this;
-    }
-}
-exports.Yallist = Yallist;
-// insertAfter undefined means "make the node the new head of list"
-function insertAfter(self, node, value) {
-    const prev = node;
-    const next = node ? node.next : self.head;
-    const inserted = new Node(value, prev, next, self);
-    if (inserted.next === undefined) {
-        self.tail = inserted;
-    }
-    if (inserted.prev === undefined) {
-        self.head = inserted;
-    }
-    self.length++;
-    return inserted;
-}
-function push(self, item) {
-    self.tail = new Node(item, self.tail, undefined, self);
-    if (!self.head) {
-        self.head = self.tail;
-    }
-    self.length++;
-}
-function unshift(self, item) {
-    self.head = new Node(item, undefined, self.head, self);
-    if (!self.tail) {
-        self.tail = self.head;
-    }
-    self.length++;
-}
-class Node {
-    list;
-    next;
-    prev;
-    value;
-    constructor(value, prev, next, list) {
-        this.list = list;
-        this.value = value;
-        if (prev) {
-            prev.next = this;
-            this.prev = prev;
-        }
-        else {
-            this.prev = undefined;
-        }
-        if (next) {
-            next.prev = this;
-            this.next = next;
-        }
-        else {
-            this.next = undefined;
-        }
-    }
-}
-exports.Node = Node;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/yallist/dist/commonjs/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/yallist/dist/commonjs/package.json
deleted file mode 100644
index 5bbefffbabee3..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/yallist/dist/commonjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "commonjs"
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/yallist/dist/esm/index.js b/node_modules/@npmcli/metavuln-calculator/node_modules/yallist/dist/esm/index.js
deleted file mode 100644
index 3d81c5113b93a..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/yallist/dist/esm/index.js
+++ /dev/null
@@ -1,379 +0,0 @@
-export class Yallist {
-    tail;
-    head;
-    length = 0;
-    static create(list = []) {
-        return new Yallist(list);
-    }
-    constructor(list = []) {
-        for (const item of list) {
-            this.push(item);
-        }
-    }
-    *[Symbol.iterator]() {
-        for (let walker = this.head; walker; walker = walker.next) {
-            yield walker.value;
-        }
-    }
-    removeNode(node) {
-        if (node.list !== this) {
-            throw new Error('removing node which does not belong to this list');
-        }
-        const next = node.next;
-        const prev = node.prev;
-        if (next) {
-            next.prev = prev;
-        }
-        if (prev) {
-            prev.next = next;
-        }
-        if (node === this.head) {
-            this.head = next;
-        }
-        if (node === this.tail) {
-            this.tail = prev;
-        }
-        this.length--;
-        node.next = undefined;
-        node.prev = undefined;
-        node.list = undefined;
-        return next;
-    }
-    unshiftNode(node) {
-        if (node === this.head) {
-            return;
-        }
-        if (node.list) {
-            node.list.removeNode(node);
-        }
-        const head = this.head;
-        node.list = this;
-        node.next = head;
-        if (head) {
-            head.prev = node;
-        }
-        this.head = node;
-        if (!this.tail) {
-            this.tail = node;
-        }
-        this.length++;
-    }
-    pushNode(node) {
-        if (node === this.tail) {
-            return;
-        }
-        if (node.list) {
-            node.list.removeNode(node);
-        }
-        const tail = this.tail;
-        node.list = this;
-        node.prev = tail;
-        if (tail) {
-            tail.next = node;
-        }
-        this.tail = node;
-        if (!this.head) {
-            this.head = node;
-        }
-        this.length++;
-    }
-    push(...args) {
-        for (let i = 0, l = args.length; i < l; i++) {
-            push(this, args[i]);
-        }
-        return this.length;
-    }
-    unshift(...args) {
-        for (var i = 0, l = args.length; i < l; i++) {
-            unshift(this, args[i]);
-        }
-        return this.length;
-    }
-    pop() {
-        if (!this.tail) {
-            return undefined;
-        }
-        const res = this.tail.value;
-        const t = this.tail;
-        this.tail = this.tail.prev;
-        if (this.tail) {
-            this.tail.next = undefined;
-        }
-        else {
-            this.head = undefined;
-        }
-        t.list = undefined;
-        this.length--;
-        return res;
-    }
-    shift() {
-        if (!this.head) {
-            return undefined;
-        }
-        const res = this.head.value;
-        const h = this.head;
-        this.head = this.head.next;
-        if (this.head) {
-            this.head.prev = undefined;
-        }
-        else {
-            this.tail = undefined;
-        }
-        h.list = undefined;
-        this.length--;
-        return res;
-    }
-    forEach(fn, thisp) {
-        thisp = thisp || this;
-        for (let walker = this.head, i = 0; !!walker; i++) {
-            fn.call(thisp, walker.value, i, this);
-            walker = walker.next;
-        }
-    }
-    forEachReverse(fn, thisp) {
-        thisp = thisp || this;
-        for (let walker = this.tail, i = this.length - 1; !!walker; i--) {
-            fn.call(thisp, walker.value, i, this);
-            walker = walker.prev;
-        }
-    }
-    get(n) {
-        let i = 0;
-        let walker = this.head;
-        for (; !!walker && i < n; i++) {
-            walker = walker.next;
-        }
-        if (i === n && !!walker) {
-            return walker.value;
-        }
-    }
-    getReverse(n) {
-        let i = 0;
-        let walker = this.tail;
-        for (; !!walker && i < n; i++) {
-            // abort out of the list early if we hit a cycle
-            walker = walker.prev;
-        }
-        if (i === n && !!walker) {
-            return walker.value;
-        }
-    }
-    map(fn, thisp) {
-        thisp = thisp || this;
-        const res = new Yallist();
-        for (let walker = this.head; !!walker;) {
-            res.push(fn.call(thisp, walker.value, this));
-            walker = walker.next;
-        }
-        return res;
-    }
-    mapReverse(fn, thisp) {
-        thisp = thisp || this;
-        var res = new Yallist();
-        for (let walker = this.tail; !!walker;) {
-            res.push(fn.call(thisp, walker.value, this));
-            walker = walker.prev;
-        }
-        return res;
-    }
-    reduce(fn, initial) {
-        let acc;
-        let walker = this.head;
-        if (arguments.length > 1) {
-            acc = initial;
-        }
-        else if (this.head) {
-            walker = this.head.next;
-            acc = this.head.value;
-        }
-        else {
-            throw new TypeError('Reduce of empty list with no initial value');
-        }
-        for (var i = 0; !!walker; i++) {
-            acc = fn(acc, walker.value, i);
-            walker = walker.next;
-        }
-        return acc;
-    }
-    reduceReverse(fn, initial) {
-        let acc;
-        let walker = this.tail;
-        if (arguments.length > 1) {
-            acc = initial;
-        }
-        else if (this.tail) {
-            walker = this.tail.prev;
-            acc = this.tail.value;
-        }
-        else {
-            throw new TypeError('Reduce of empty list with no initial value');
-        }
-        for (let i = this.length - 1; !!walker; i--) {
-            acc = fn(acc, walker.value, i);
-            walker = walker.prev;
-        }
-        return acc;
-    }
-    toArray() {
-        const arr = new Array(this.length);
-        for (let i = 0, walker = this.head; !!walker; i++) {
-            arr[i] = walker.value;
-            walker = walker.next;
-        }
-        return arr;
-    }
-    toArrayReverse() {
-        const arr = new Array(this.length);
-        for (let i = 0, walker = this.tail; !!walker; i++) {
-            arr[i] = walker.value;
-            walker = walker.prev;
-        }
-        return arr;
-    }
-    slice(from = 0, to = this.length) {
-        if (to < 0) {
-            to += this.length;
-        }
-        if (from < 0) {
-            from += this.length;
-        }
-        const ret = new Yallist();
-        if (to < from || to < 0) {
-            return ret;
-        }
-        if (from < 0) {
-            from = 0;
-        }
-        if (to > this.length) {
-            to = this.length;
-        }
-        let walker = this.head;
-        let i = 0;
-        for (i = 0; !!walker && i < from; i++) {
-            walker = walker.next;
-        }
-        for (; !!walker && i < to; i++, walker = walker.next) {
-            ret.push(walker.value);
-        }
-        return ret;
-    }
-    sliceReverse(from = 0, to = this.length) {
-        if (to < 0) {
-            to += this.length;
-        }
-        if (from < 0) {
-            from += this.length;
-        }
-        const ret = new Yallist();
-        if (to < from || to < 0) {
-            return ret;
-        }
-        if (from < 0) {
-            from = 0;
-        }
-        if (to > this.length) {
-            to = this.length;
-        }
-        let i = this.length;
-        let walker = this.tail;
-        for (; !!walker && i > to; i--) {
-            walker = walker.prev;
-        }
-        for (; !!walker && i > from; i--, walker = walker.prev) {
-            ret.push(walker.value);
-        }
-        return ret;
-    }
-    splice(start, deleteCount = 0, ...nodes) {
-        if (start > this.length) {
-            start = this.length - 1;
-        }
-        if (start < 0) {
-            start = this.length + start;
-        }
-        let walker = this.head;
-        for (let i = 0; !!walker && i < start; i++) {
-            walker = walker.next;
-        }
-        const ret = [];
-        for (let i = 0; !!walker && i < deleteCount; i++) {
-            ret.push(walker.value);
-            walker = this.removeNode(walker);
-        }
-        if (!walker) {
-            walker = this.tail;
-        }
-        else if (walker !== this.tail) {
-            walker = walker.prev;
-        }
-        for (const v of nodes) {
-            walker = insertAfter(this, walker, v);
-        }
-        return ret;
-    }
-    reverse() {
-        const head = this.head;
-        const tail = this.tail;
-        for (let walker = head; !!walker; walker = walker.prev) {
-            const p = walker.prev;
-            walker.prev = walker.next;
-            walker.next = p;
-        }
-        this.head = tail;
-        this.tail = head;
-        return this;
-    }
-}
-// insertAfter undefined means "make the node the new head of list"
-function insertAfter(self, node, value) {
-    const prev = node;
-    const next = node ? node.next : self.head;
-    const inserted = new Node(value, prev, next, self);
-    if (inserted.next === undefined) {
-        self.tail = inserted;
-    }
-    if (inserted.prev === undefined) {
-        self.head = inserted;
-    }
-    self.length++;
-    return inserted;
-}
-function push(self, item) {
-    self.tail = new Node(item, self.tail, undefined, self);
-    if (!self.head) {
-        self.head = self.tail;
-    }
-    self.length++;
-}
-function unshift(self, item) {
-    self.head = new Node(item, undefined, self.head, self);
-    if (!self.tail) {
-        self.tail = self.head;
-    }
-    self.length++;
-}
-export class Node {
-    list;
-    next;
-    prev;
-    value;
-    constructor(value, prev, next, list) {
-        this.list = list;
-        this.value = value;
-        if (prev) {
-            prev.next = this;
-            this.prev = prev;
-        }
-        else {
-            this.prev = undefined;
-        }
-        if (next) {
-            next.prev = this;
-            this.next = next;
-        }
-        else {
-            this.next = undefined;
-        }
-    }
-}
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/yallist/dist/esm/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/yallist/dist/esm/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/yallist/dist/esm/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/node_modules/yallist/package.json b/node_modules/@npmcli/metavuln-calculator/node_modules/yallist/package.json
deleted file mode 100644
index 2f5247808bbea..0000000000000
--- a/node_modules/@npmcli/metavuln-calculator/node_modules/yallist/package.json
+++ /dev/null
@@ -1,68 +0,0 @@
-{
-  "name": "yallist",
-  "version": "5.0.0",
-  "description": "Yet Another Linked List",
-  "files": [
-    "dist"
-  ],
-  "devDependencies": {
-    "prettier": "^3.2.5",
-    "tap": "^18.7.2",
-    "tshy": "^1.13.1",
-    "typedoc": "^0.25.13"
-  },
-  "scripts": {
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "prepare": "tshy",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "test": "tap",
-    "snap": "tap",
-    "format": "prettier --write . --loglevel warn --ignore-path ../../.prettierignore --cache",
-    "typedoc": "typedoc"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/isaacs/yallist.git"
-  },
-  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
-  "license": "BlueOak-1.0.0",
-  "tshy": {
-    "exports": {
-      "./package.json": "./package.json",
-      ".": "./src/index.ts"
-    }
-  },
-  "exports": {
-    "./package.json": "./package.json",
-    ".": {
-      "import": {
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.js"
-      },
-      "require": {
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.js"
-      }
-    }
-  },
-  "main": "./dist/commonjs/index.js",
-  "types": "./dist/commonjs/index.d.ts",
-  "type": "module",
-  "prettier": {
-    "semi": false,
-    "printWidth": 70,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "engines": {
-    "node": ">=18"
-  }
-}
diff --git a/node_modules/@npmcli/metavuln-calculator/package.json b/node_modules/@npmcli/metavuln-calculator/package.json
index fe39fcdf1fcb7..9d17000653c0e 100644
--- a/node_modules/@npmcli/metavuln-calculator/package.json
+++ b/node_modules/@npmcli/metavuln-calculator/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@npmcli/metavuln-calculator",
-  "version": "9.0.1",
+  "version": "9.0.2",
   "main": "lib/index.js",
   "files": [
     "bin/",
@@ -39,7 +39,7 @@
     "tap": "^16.0.1"
   },
   "dependencies": {
-    "cacache": "^19.0.0",
+    "cacache": "^20.0.0",
     "json-parse-even-better-errors": "^4.0.0",
     "pacote": "^21.0.0",
     "proc-log": "^5.0.0",
diff --git a/package-lock.json b/package-lock.json
index bc2c637083dbd..26d1e8b77df0c 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -3555,12 +3555,12 @@
       }
     },
     "node_modules/@npmcli/metavuln-calculator": {
-      "version": "9.0.1",
-      "resolved": "https://registry.npmjs.org/@npmcli/metavuln-calculator/-/metavuln-calculator-9.0.1.tgz",
-      "integrity": "sha512-B7ziEnkSmnauecEvFbg9h0d2CVa3uJudd9bTDc9vScfYdRETkQkCriFiYCV3PXE++igd5JRw35WJz902HnGrCg==",
+      "version": "9.0.2",
+      "resolved": "https://registry.npmjs.org/@npmcli/metavuln-calculator/-/metavuln-calculator-9.0.2.tgz",
+      "integrity": "sha512-eESzlCRLuD30qYefT2jYZTUepgu9DNJQdXABGGxjkir055x2UtnpNfDZCA6OJxButQNgxNKc9AeTchYxSgbMCw==",
       "license": "ISC",
       "dependencies": {
-        "cacache": "^19.0.0",
+        "cacache": "^20.0.0",
         "json-parse-even-better-errors": "^4.0.0",
         "pacote": "^21.0.0",
         "proc-log": "^5.0.0",
@@ -3570,91 +3570,6 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/@npmcli/metavuln-calculator/node_modules/cacache": {
-      "version": "19.0.1",
-      "resolved": "https://registry.npmjs.org/cacache/-/cacache-19.0.1.tgz",
-      "integrity": "sha512-hdsUxulXCi5STId78vRVYEtDAjq99ICAUktLTeTYsLoTE6Z8dS0c8pWNCxwdrk9YfJeobDZc2Y186hD/5ZQgFQ==",
-      "license": "ISC",
-      "dependencies": {
-        "@npmcli/fs": "^4.0.0",
-        "fs-minipass": "^3.0.0",
-        "glob": "^10.2.2",
-        "lru-cache": "^10.0.1",
-        "minipass": "^7.0.3",
-        "minipass-collect": "^2.0.1",
-        "minipass-flush": "^1.0.5",
-        "minipass-pipeline": "^1.2.4",
-        "p-map": "^7.0.2",
-        "ssri": "^12.0.0",
-        "tar": "^7.4.3",
-        "unique-filename": "^4.0.0"
-      },
-      "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
-    "node_modules/@npmcli/metavuln-calculator/node_modules/chownr": {
-      "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz",
-      "integrity": "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==",
-      "license": "BlueOak-1.0.0",
-      "engines": {
-        "node": ">=18"
-      }
-    },
-    "node_modules/@npmcli/metavuln-calculator/node_modules/minizlib": {
-      "version": "3.0.2",
-      "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.2.tgz",
-      "integrity": "sha512-oG62iEk+CYt5Xj2YqI5Xi9xWUeZhDI8jjQmC5oThVH5JGCTgIjr7ciJDzC7MBzYd//WvR1OTmP5Q38Q8ShQtVA==",
-      "license": "MIT",
-      "dependencies": {
-        "minipass": "^7.1.2"
-      },
-      "engines": {
-        "node": ">= 18"
-      }
-    },
-    "node_modules/@npmcli/metavuln-calculator/node_modules/mkdirp": {
-      "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz",
-      "integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==",
-      "license": "MIT",
-      "bin": {
-        "mkdirp": "dist/cjs/src/bin.js"
-      },
-      "engines": {
-        "node": ">=10"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      }
-    },
-    "node_modules/@npmcli/metavuln-calculator/node_modules/tar": {
-      "version": "7.4.3",
-      "resolved": "https://registry.npmjs.org/tar/-/tar-7.4.3.tgz",
-      "integrity": "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw==",
-      "license": "ISC",
-      "dependencies": {
-        "@isaacs/fs-minipass": "^4.0.0",
-        "chownr": "^3.0.0",
-        "minipass": "^7.1.2",
-        "minizlib": "^3.0.1",
-        "mkdirp": "^3.0.1",
-        "yallist": "^5.0.0"
-      },
-      "engines": {
-        "node": ">=18"
-      }
-    },
-    "node_modules/@npmcli/metavuln-calculator/node_modules/yallist": {
-      "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz",
-      "integrity": "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==",
-      "license": "BlueOak-1.0.0",
-      "engines": {
-        "node": ">=18"
-      }
-    },
     "node_modules/@npmcli/mock-globals": {
       "resolved": "mock-globals",
       "link": true
@@ -19698,7 +19613,7 @@
         "@npmcli/fs": "^4.0.0",
         "@npmcli/installed-package-contents": "^3.0.0",
         "@npmcli/map-workspaces": "^5.0.0",
-        "@npmcli/metavuln-calculator": "^9.0.1",
+        "@npmcli/metavuln-calculator": "^9.0.2",
         "@npmcli/name-from-folder": "^3.0.0",
         "@npmcli/node-gyp": "^4.0.0",
         "@npmcli/package-json": "^7.0.0",
diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json
index 993898149542b..372a983a946bb 100644
--- a/workspaces/arborist/package.json
+++ b/workspaces/arborist/package.json
@@ -7,7 +7,7 @@
     "@npmcli/fs": "^4.0.0",
     "@npmcli/installed-package-contents": "^3.0.0",
     "@npmcli/map-workspaces": "^5.0.0",
-    "@npmcli/metavuln-calculator": "^9.0.1",
+    "@npmcli/metavuln-calculator": "^9.0.2",
     "@npmcli/name-from-folder": "^3.0.0",
     "@npmcli/node-gyp": "^4.0.0",
     "@npmcli/package-json": "^7.0.0",

From b5bd5e351061b46d6417210cd73c0f64c39e6819 Mon Sep 17 00:00:00 2001
From: Gar 
Date: Wed, 17 Sep 2025 10:39:20 -0700
Subject: [PATCH 12/63] deps: npm-profile@12.0.0

---
 node_modules/.gitignore                       |    9 +
 .../node_modules/hosted-git-info/LICENSE      |   13 +
 .../hosted-git-info/lib/from-url.js           |  122 ++
 .../node_modules/hosted-git-info/lib/hosts.js |  231 +++
 .../node_modules/hosted-git-info/lib/index.js |  227 +++
 .../hosted-git-info/lib/parse-url.js          |   78 +
 .../node_modules/hosted-git-info/package.json |   61 +
 .../node_modules/lru-cache/LICENSE            |   15 +
 .../lru-cache/dist/commonjs/index.js          | 1564 +++++++++++++++++
 .../lru-cache/dist/commonjs/index.min.js      |    2 +
 .../lru-cache/dist/commonjs/package.json      |    3 +
 .../node_modules/lru-cache/dist/esm/index.js  | 1560 ++++++++++++++++
 .../lru-cache/dist/esm/index.min.js           |    2 +
 .../lru-cache/dist/esm/package.json           |    3 +
 .../node_modules/lru-cache/package.json       |  113 ++
 .../node_modules/make-fetch-happen/LICENSE    |   16 +
 .../make-fetch-happen/lib/cache/entry.js      |  471 +++++
 .../make-fetch-happen/lib/cache/errors.js     |   11 +
 .../make-fetch-happen/lib/cache/index.js      |   49 +
 .../make-fetch-happen/lib/cache/key.js        |   17 +
 .../make-fetch-happen/lib/cache/policy.js     |  161 ++
 .../make-fetch-happen/lib/fetch.js            |  118 ++
 .../make-fetch-happen/lib/index.js            |   41 +
 .../make-fetch-happen/lib/options.js          |   59 +
 .../make-fetch-happen/lib/pipeline.js         |   41 +
 .../make-fetch-happen/lib/remote.js           |  132 ++
 .../make-fetch-happen/package.json            |   74 +
 .../npm-profile/node_modules/minizlib/LICENSE |   26 +
 .../minizlib/dist/commonjs/constants.js       |  123 ++
 .../minizlib/dist/commonjs/index.js           |  392 +++++
 .../minizlib/dist/commonjs/package.json       |    3 +
 .../minizlib/dist/esm/constants.js            |  117 ++
 .../node_modules/minizlib/dist/esm/index.js   |  340 ++++
 .../minizlib/dist/esm/package.json            |    3 +
 .../node_modules/minizlib/package.json        |   80 +
 .../node_modules/negotiator/HISTORY.md        |  114 ++
 .../node_modules/negotiator/LICENSE           |   24 +
 .../node_modules/negotiator/index.js          |   83 +
 .../node_modules/negotiator/lib/charset.js    |  169 ++
 .../node_modules/negotiator/lib/encoding.js   |  205 +++
 .../node_modules/negotiator/lib/language.js   |  179 ++
 .../node_modules/negotiator/lib/mediaType.js  |  294 ++++
 .../node_modules/negotiator/package.json      |   43 +
 .../node_modules/npm-package-arg/LICENSE      |   15 +
 .../node_modules/npm-package-arg/lib/npa.js   |  481 +++++
 .../node_modules/npm-package-arg/package.json |   61 +
 .../npm-registry-fetch/LICENSE.md             |   20 +
 .../npm-registry-fetch/lib/auth.js            |  181 ++
 .../npm-registry-fetch/lib/check-response.js  |  108 ++
 .../npm-registry-fetch/lib/default-opts.js    |   19 +
 .../npm-registry-fetch/lib/errors.js          |   80 +
 .../npm-registry-fetch/lib/index.js           |  247 +++
 .../npm-registry-fetch/lib/json-stream.js     |  223 +++
 .../npm-registry-fetch/package.json           |   68 +
 node_modules/npm-profile/package.json         |   12 +-
 package-lock.json                             |  117 +-
 package.json                                  |    2 +-
 57 files changed, 9009 insertions(+), 13 deletions(-)
 create mode 100644 node_modules/npm-profile/node_modules/hosted-git-info/LICENSE
 create mode 100644 node_modules/npm-profile/node_modules/hosted-git-info/lib/from-url.js
 create mode 100644 node_modules/npm-profile/node_modules/hosted-git-info/lib/hosts.js
 create mode 100644 node_modules/npm-profile/node_modules/hosted-git-info/lib/index.js
 create mode 100644 node_modules/npm-profile/node_modules/hosted-git-info/lib/parse-url.js
 create mode 100644 node_modules/npm-profile/node_modules/hosted-git-info/package.json
 create mode 100644 node_modules/npm-profile/node_modules/lru-cache/LICENSE
 create mode 100644 node_modules/npm-profile/node_modules/lru-cache/dist/commonjs/index.js
 create mode 100644 node_modules/npm-profile/node_modules/lru-cache/dist/commonjs/index.min.js
 create mode 100644 node_modules/npm-profile/node_modules/lru-cache/dist/commonjs/package.json
 create mode 100644 node_modules/npm-profile/node_modules/lru-cache/dist/esm/index.js
 create mode 100644 node_modules/npm-profile/node_modules/lru-cache/dist/esm/index.min.js
 create mode 100644 node_modules/npm-profile/node_modules/lru-cache/dist/esm/package.json
 create mode 100644 node_modules/npm-profile/node_modules/lru-cache/package.json
 create mode 100644 node_modules/npm-profile/node_modules/make-fetch-happen/LICENSE
 create mode 100644 node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/entry.js
 create mode 100644 node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/errors.js
 create mode 100644 node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/index.js
 create mode 100644 node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/key.js
 create mode 100644 node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/policy.js
 create mode 100644 node_modules/npm-profile/node_modules/make-fetch-happen/lib/fetch.js
 create mode 100644 node_modules/npm-profile/node_modules/make-fetch-happen/lib/index.js
 create mode 100644 node_modules/npm-profile/node_modules/make-fetch-happen/lib/options.js
 create mode 100644 node_modules/npm-profile/node_modules/make-fetch-happen/lib/pipeline.js
 create mode 100644 node_modules/npm-profile/node_modules/make-fetch-happen/lib/remote.js
 create mode 100644 node_modules/npm-profile/node_modules/make-fetch-happen/package.json
 create mode 100644 node_modules/npm-profile/node_modules/minizlib/LICENSE
 create mode 100644 node_modules/npm-profile/node_modules/minizlib/dist/commonjs/constants.js
 create mode 100644 node_modules/npm-profile/node_modules/minizlib/dist/commonjs/index.js
 create mode 100644 node_modules/npm-profile/node_modules/minizlib/dist/commonjs/package.json
 create mode 100644 node_modules/npm-profile/node_modules/minizlib/dist/esm/constants.js
 create mode 100644 node_modules/npm-profile/node_modules/minizlib/dist/esm/index.js
 create mode 100644 node_modules/npm-profile/node_modules/minizlib/dist/esm/package.json
 create mode 100644 node_modules/npm-profile/node_modules/minizlib/package.json
 create mode 100644 node_modules/npm-profile/node_modules/negotiator/HISTORY.md
 create mode 100644 node_modules/npm-profile/node_modules/negotiator/LICENSE
 create mode 100644 node_modules/npm-profile/node_modules/negotiator/index.js
 create mode 100644 node_modules/npm-profile/node_modules/negotiator/lib/charset.js
 create mode 100644 node_modules/npm-profile/node_modules/negotiator/lib/encoding.js
 create mode 100644 node_modules/npm-profile/node_modules/negotiator/lib/language.js
 create mode 100644 node_modules/npm-profile/node_modules/negotiator/lib/mediaType.js
 create mode 100644 node_modules/npm-profile/node_modules/negotiator/package.json
 create mode 100644 node_modules/npm-profile/node_modules/npm-package-arg/LICENSE
 create mode 100644 node_modules/npm-profile/node_modules/npm-package-arg/lib/npa.js
 create mode 100644 node_modules/npm-profile/node_modules/npm-package-arg/package.json
 create mode 100644 node_modules/npm-profile/node_modules/npm-registry-fetch/LICENSE.md
 create mode 100644 node_modules/npm-profile/node_modules/npm-registry-fetch/lib/auth.js
 create mode 100644 node_modules/npm-profile/node_modules/npm-registry-fetch/lib/check-response.js
 create mode 100644 node_modules/npm-profile/node_modules/npm-registry-fetch/lib/default-opts.js
 create mode 100644 node_modules/npm-profile/node_modules/npm-registry-fetch/lib/errors.js
 create mode 100644 node_modules/npm-profile/node_modules/npm-registry-fetch/lib/index.js
 create mode 100644 node_modules/npm-profile/node_modules/npm-registry-fetch/lib/json-stream.js
 create mode 100644 node_modules/npm-profile/node_modules/npm-registry-fetch/package.json

diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 8815394a1bbc1..8d6961c785a5c 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -195,6 +195,15 @@
 !/npm-packlist/node_modules/minimatch
 !/npm-pick-manifest
 !/npm-profile
+!/npm-profile/node_modules/
+/npm-profile/node_modules/*
+!/npm-profile/node_modules/hosted-git-info
+!/npm-profile/node_modules/lru-cache
+!/npm-profile/node_modules/make-fetch-happen
+!/npm-profile/node_modules/minizlib
+!/npm-profile/node_modules/negotiator
+!/npm-profile/node_modules/npm-package-arg
+!/npm-profile/node_modules/npm-registry-fetch
 !/npm-registry-fetch
 !/npm-registry-fetch/node_modules/
 /npm-registry-fetch/node_modules/*
diff --git a/node_modules/npm-profile/node_modules/hosted-git-info/LICENSE b/node_modules/npm-profile/node_modules/hosted-git-info/LICENSE
new file mode 100644
index 0000000000000..45055763dc838
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/hosted-git-info/LICENSE
@@ -0,0 +1,13 @@
+Copyright (c) 2015, Rebecca Turner
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
+OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/npm-profile/node_modules/hosted-git-info/lib/from-url.js b/node_modules/npm-profile/node_modules/hosted-git-info/lib/from-url.js
new file mode 100644
index 0000000000000..efc1247d59d12
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/hosted-git-info/lib/from-url.js
@@ -0,0 +1,122 @@
+'use strict'
+
+const parseUrl = require('./parse-url')
+
+// look for github shorthand inputs, such as npm/cli
+const isGitHubShorthand = (arg) => {
+  // it cannot contain whitespace before the first #
+  // it cannot start with a / because that's probably an absolute file path
+  // but it must include a slash since repos are username/repository
+  // it cannot start with a . because that's probably a relative file path
+  // it cannot start with an @ because that's a scoped package if it passes the other tests
+  // it cannot contain a : before a # because that tells us that there's a protocol
+  // a second / may not exist before a #
+  const firstHash = arg.indexOf('#')
+  const firstSlash = arg.indexOf('/')
+  const secondSlash = arg.indexOf('/', firstSlash + 1)
+  const firstColon = arg.indexOf(':')
+  const firstSpace = /\s/.exec(arg)
+  const firstAt = arg.indexOf('@')
+
+  const spaceOnlyAfterHash = !firstSpace || (firstHash > -1 && firstSpace.index > firstHash)
+  const atOnlyAfterHash = firstAt === -1 || (firstHash > -1 && firstAt > firstHash)
+  const colonOnlyAfterHash = firstColon === -1 || (firstHash > -1 && firstColon > firstHash)
+  const secondSlashOnlyAfterHash = secondSlash === -1 || (firstHash > -1 && secondSlash > firstHash)
+  const hasSlash = firstSlash > 0
+  // if a # is found, what we really want to know is that the character
+  // immediately before # is not a /
+  const doesNotEndWithSlash = firstHash > -1 ? arg[firstHash - 1] !== '/' : !arg.endsWith('/')
+  const doesNotStartWithDot = !arg.startsWith('.')
+
+  return spaceOnlyAfterHash && hasSlash && doesNotEndWithSlash &&
+    doesNotStartWithDot && atOnlyAfterHash && colonOnlyAfterHash &&
+    secondSlashOnlyAfterHash
+}
+
+module.exports = (giturl, opts, { gitHosts, protocols }) => {
+  if (!giturl) {
+    return
+  }
+
+  const correctedUrl = isGitHubShorthand(giturl) ? `github:${giturl}` : giturl
+  const parsed = parseUrl(correctedUrl, protocols)
+  if (!parsed) {
+    return
+  }
+
+  const gitHostShortcut = gitHosts.byShortcut[parsed.protocol]
+  const gitHostDomain = gitHosts.byDomain[parsed.hostname.startsWith('www.')
+    ? parsed.hostname.slice(4)
+    : parsed.hostname]
+  const gitHostName = gitHostShortcut || gitHostDomain
+  if (!gitHostName) {
+    return
+  }
+
+  const gitHostInfo = gitHosts[gitHostShortcut || gitHostDomain]
+  let auth = null
+  if (protocols[parsed.protocol]?.auth && (parsed.username || parsed.password)) {
+    auth = `${parsed.username}${parsed.password ? ':' + parsed.password : ''}`
+  }
+
+  let committish = null
+  let user = null
+  let project = null
+  let defaultRepresentation = null
+
+  try {
+    if (gitHostShortcut) {
+      let pathname = parsed.pathname.startsWith('/') ? parsed.pathname.slice(1) : parsed.pathname
+      const firstAt = pathname.indexOf('@')
+      // we ignore auth for shortcuts, so just trim it out
+      if (firstAt > -1) {
+        pathname = pathname.slice(firstAt + 1)
+      }
+
+      const lastSlash = pathname.lastIndexOf('/')
+      if (lastSlash > -1) {
+        user = decodeURIComponent(pathname.slice(0, lastSlash))
+        // we want nulls only, never empty strings
+        if (!user) {
+          user = null
+        }
+        project = decodeURIComponent(pathname.slice(lastSlash + 1))
+      } else {
+        project = decodeURIComponent(pathname)
+      }
+
+      if (project.endsWith('.git')) {
+        project = project.slice(0, -4)
+      }
+
+      if (parsed.hash) {
+        committish = decodeURIComponent(parsed.hash.slice(1))
+      }
+
+      defaultRepresentation = 'shortcut'
+    } else {
+      if (!gitHostInfo.protocols.includes(parsed.protocol)) {
+        return
+      }
+
+      const segments = gitHostInfo.extract(parsed)
+      if (!segments) {
+        return
+      }
+
+      user = segments.user && decodeURIComponent(segments.user)
+      project = decodeURIComponent(segments.project)
+      committish = decodeURIComponent(segments.committish)
+      defaultRepresentation = protocols[parsed.protocol]?.name || parsed.protocol.slice(0, -1)
+    }
+  } catch (err) {
+    /* istanbul ignore else */
+    if (err instanceof URIError) {
+      return
+    } else {
+      throw err
+    }
+  }
+
+  return [gitHostName, user, auth, project, committish, defaultRepresentation, opts]
+}
diff --git a/node_modules/npm-profile/node_modules/hosted-git-info/lib/hosts.js b/node_modules/npm-profile/node_modules/hosted-git-info/lib/hosts.js
new file mode 100644
index 0000000000000..2a88e95927772
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/hosted-git-info/lib/hosts.js
@@ -0,0 +1,231 @@
+/* eslint-disable max-len */
+
+'use strict'
+
+const maybeJoin = (...args) => args.every(arg => arg) ? args.join('') : ''
+const maybeEncode = (arg) => arg ? encodeURIComponent(arg) : ''
+const formatHashFragment = (f) => f.toLowerCase()
+  .replace(/^\W+/g, '') // strip leading non-characters
+  .replace(/(?
+    `git@${domain}:${user}/${project}.git${maybeJoin('#', committish)}`,
+  sshurltemplate: ({ domain, user, project, committish }) =>
+    `git+ssh://git@${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
+  edittemplate: ({ domain, user, project, committish, editpath, path }) =>
+    `https://${domain}/${user}/${project}${maybeJoin('/', editpath, '/', maybeEncode(committish || 'HEAD'), '/', path)}`,
+  browsetemplate: ({ domain, user, project, committish, treepath }) =>
+    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}`,
+  browsetreetemplate: ({ domain, user, project, committish, treepath, path, fragment, hashformat }) =>
+    `https://${domain}/${user}/${project}/${treepath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`,
+  browseblobtemplate: ({ domain, user, project, committish, blobpath, path, fragment, hashformat }) =>
+    `https://${domain}/${user}/${project}/${blobpath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`,
+  docstemplate: ({ domain, user, project, treepath, committish }) =>
+    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}#readme`,
+  httpstemplate: ({ auth, domain, user, project, committish }) =>
+    `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
+  filetemplate: ({ domain, user, project, committish, path }) =>
+    `https://${domain}/${user}/${project}/raw/${maybeEncode(committish || 'HEAD')}/${path}`,
+  shortcuttemplate: ({ type, user, project, committish }) =>
+    `${type}:${user}/${project}${maybeJoin('#', committish)}`,
+  pathtemplate: ({ user, project, committish }) =>
+    `${user}/${project}${maybeJoin('#', committish)}`,
+  bugstemplate: ({ domain, user, project }) =>
+    `https://${domain}/${user}/${project}/issues`,
+  hashformat: formatHashFragment,
+}
+
+const hosts = {}
+hosts.github = {
+  // First two are insecure and generally shouldn't be used any more, but
+  // they are still supported.
+  protocols: ['git:', 'http:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'],
+  domain: 'github.com',
+  treepath: 'tree',
+  blobpath: 'blob',
+  editpath: 'edit',
+  filetemplate: ({ auth, user, project, committish, path }) =>
+    `https://${maybeJoin(auth, '@')}raw.githubusercontent.com/${user}/${project}/${maybeEncode(committish || 'HEAD')}/${path}`,
+  gittemplate: ({ auth, domain, user, project, committish }) =>
+    `git://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
+  tarballtemplate: ({ domain, user, project, committish }) =>
+    `https://codeload.${domain}/${user}/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`,
+  extract: (url) => {
+    let [, user, project, type, committish] = url.pathname.split('/', 5)
+    if (type && type !== 'tree') {
+      return
+    }
+
+    if (!type) {
+      committish = url.hash.slice(1)
+    }
+
+    if (project && project.endsWith('.git')) {
+      project = project.slice(0, -4)
+    }
+
+    if (!user || !project) {
+      return
+    }
+
+    return { user, project, committish }
+  },
+}
+
+hosts.bitbucket = {
+  protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'],
+  domain: 'bitbucket.org',
+  treepath: 'src',
+  blobpath: 'src',
+  editpath: '?mode=edit',
+  edittemplate: ({ domain, user, project, committish, treepath, path, editpath }) =>
+    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish || 'HEAD'), '/', path, editpath)}`,
+  tarballtemplate: ({ domain, user, project, committish }) =>
+    `https://${domain}/${user}/${project}/get/${maybeEncode(committish || 'HEAD')}.tar.gz`,
+  extract: (url) => {
+    let [, user, project, aux] = url.pathname.split('/', 4)
+    if (['get'].includes(aux)) {
+      return
+    }
+
+    if (project && project.endsWith('.git')) {
+      project = project.slice(0, -4)
+    }
+
+    if (!user || !project) {
+      return
+    }
+
+    return { user, project, committish: url.hash.slice(1) }
+  },
+}
+
+hosts.gitlab = {
+  protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'],
+  domain: 'gitlab.com',
+  treepath: 'tree',
+  blobpath: 'tree',
+  editpath: '-/edit',
+  httpstemplate: ({ auth, domain, user, project, committish }) =>
+    `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
+  tarballtemplate: ({ domain, user, project, committish }) =>
+    `https://${domain}/${user}/${project}/repository/archive.tar.gz?ref=${maybeEncode(committish || 'HEAD')}`,
+  extract: (url) => {
+    const path = url.pathname.slice(1)
+    if (path.includes('/-/') || path.includes('/archive.tar.gz')) {
+      return
+    }
+
+    const segments = path.split('/')
+    let project = segments.pop()
+    if (project.endsWith('.git')) {
+      project = project.slice(0, -4)
+    }
+
+    const user = segments.join('/')
+    if (!user || !project) {
+      return
+    }
+
+    return { user, project, committish: url.hash.slice(1) }
+  },
+}
+
+hosts.gist = {
+  protocols: ['git:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'],
+  domain: 'gist.github.com',
+  editpath: 'edit',
+  sshtemplate: ({ domain, project, committish }) =>
+    `git@${domain}:${project}.git${maybeJoin('#', committish)}`,
+  sshurltemplate: ({ domain, project, committish }) =>
+    `git+ssh://git@${domain}/${project}.git${maybeJoin('#', committish)}`,
+  edittemplate: ({ domain, user, project, committish, editpath }) =>
+    `https://${domain}/${user}/${project}${maybeJoin('/', maybeEncode(committish))}/${editpath}`,
+  browsetemplate: ({ domain, project, committish }) =>
+    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`,
+  browsetreetemplate: ({ domain, project, committish, path, hashformat }) =>
+    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`,
+  browseblobtemplate: ({ domain, project, committish, path, hashformat }) =>
+    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`,
+  docstemplate: ({ domain, project, committish }) =>
+    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`,
+  httpstemplate: ({ domain, project, committish }) =>
+    `git+https://${domain}/${project}.git${maybeJoin('#', committish)}`,
+  filetemplate: ({ user, project, committish, path }) =>
+    `https://gist.githubusercontent.com/${user}/${project}/raw${maybeJoin('/', maybeEncode(committish))}/${path}`,
+  shortcuttemplate: ({ type, project, committish }) =>
+    `${type}:${project}${maybeJoin('#', committish)}`,
+  pathtemplate: ({ project, committish }) =>
+    `${project}${maybeJoin('#', committish)}`,
+  bugstemplate: ({ domain, project }) =>
+    `https://${domain}/${project}`,
+  gittemplate: ({ domain, project, committish }) =>
+    `git://${domain}/${project}.git${maybeJoin('#', committish)}`,
+  tarballtemplate: ({ project, committish }) =>
+    `https://codeload.github.com/gist/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`,
+  extract: (url) => {
+    let [, user, project, aux] = url.pathname.split('/', 4)
+    if (aux === 'raw') {
+      return
+    }
+
+    if (!project) {
+      if (!user) {
+        return
+      }
+
+      project = user
+      user = null
+    }
+
+    if (project.endsWith('.git')) {
+      project = project.slice(0, -4)
+    }
+
+    return { user, project, committish: url.hash.slice(1) }
+  },
+  hashformat: function (fragment) {
+    return fragment && 'file-' + formatHashFragment(fragment)
+  },
+}
+
+hosts.sourcehut = {
+  protocols: ['git+ssh:', 'https:'],
+  domain: 'git.sr.ht',
+  treepath: 'tree',
+  blobpath: 'tree',
+  filetemplate: ({ domain, user, project, committish, path }) =>
+    `https://${domain}/${user}/${project}/blob/${maybeEncode(committish) || 'HEAD'}/${path}`,
+  httpstemplate: ({ domain, user, project, committish }) =>
+    `https://${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
+  tarballtemplate: ({ domain, user, project, committish }) =>
+    `https://${domain}/${user}/${project}/archive/${maybeEncode(committish) || 'HEAD'}.tar.gz`,
+  bugstemplate: () => null,
+  extract: (url) => {
+    let [, user, project, aux] = url.pathname.split('/', 4)
+
+    // tarball url
+    if (['archive'].includes(aux)) {
+      return
+    }
+
+    if (project && project.endsWith('.git')) {
+      project = project.slice(0, -4)
+    }
+
+    if (!user || !project) {
+      return
+    }
+
+    return { user, project, committish: url.hash.slice(1) }
+  },
+}
+
+for (const [name, host] of Object.entries(hosts)) {
+  hosts[name] = Object.assign({}, defaults, host)
+}
+
+module.exports = hosts
diff --git a/node_modules/npm-profile/node_modules/hosted-git-info/lib/index.js b/node_modules/npm-profile/node_modules/hosted-git-info/lib/index.js
new file mode 100644
index 0000000000000..2a7100dcee6e7
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/hosted-git-info/lib/index.js
@@ -0,0 +1,227 @@
+'use strict'
+
+const { LRUCache } = require('lru-cache')
+const hosts = require('./hosts.js')
+const fromUrl = require('./from-url.js')
+const parseUrl = require('./parse-url.js')
+
+const cache = new LRUCache({ max: 1000 })
+
+function unknownHostedUrl (url) {
+  try {
+    const {
+      protocol,
+      hostname,
+      pathname,
+    } = new URL(url)
+
+    if (!hostname) {
+      return null
+    }
+
+    const proto = /(?:git\+)http:$/.test(protocol) ? 'http:' : 'https:'
+    const path = pathname.replace(/\.git$/, '')
+    return `${proto}//${hostname}${path}`
+  } catch {
+    return null
+  }
+}
+
+class GitHost {
+  constructor (type, user, auth, project, committish, defaultRepresentation, opts = {}) {
+    Object.assign(this, GitHost.#gitHosts[type], {
+      type,
+      user,
+      auth,
+      project,
+      committish,
+      default: defaultRepresentation,
+      opts,
+    })
+  }
+
+  static #gitHosts = { byShortcut: {}, byDomain: {} }
+  static #protocols = {
+    'git+ssh:': { name: 'sshurl' },
+    'ssh:': { name: 'sshurl' },
+    'git+https:': { name: 'https', auth: true },
+    'git:': { auth: true },
+    'http:': { auth: true },
+    'https:': { auth: true },
+    'git+http:': { auth: true },
+  }
+
+  static addHost (name, host) {
+    GitHost.#gitHosts[name] = host
+    GitHost.#gitHosts.byDomain[host.domain] = name
+    GitHost.#gitHosts.byShortcut[`${name}:`] = name
+    GitHost.#protocols[`${name}:`] = { name }
+  }
+
+  static fromUrl (giturl, opts) {
+    if (typeof giturl !== 'string') {
+      return
+    }
+
+    const key = giturl + JSON.stringify(opts || {})
+
+    if (!cache.has(key)) {
+      const hostArgs = fromUrl(giturl, opts, {
+        gitHosts: GitHost.#gitHosts,
+        protocols: GitHost.#protocols,
+      })
+      cache.set(key, hostArgs ? new GitHost(...hostArgs) : undefined)
+    }
+
+    return cache.get(key)
+  }
+
+  static fromManifest (manifest, opts = {}) {
+    if (!manifest || typeof manifest !== 'object') {
+      return
+    }
+
+    const r = manifest.repository
+    // TODO: look into also checking the `bugs`/`homepage` URLs
+
+    const rurl = r && (
+      typeof r === 'string'
+        ? r
+        : typeof r === 'object' && typeof r.url === 'string'
+          ? r.url
+          : null
+    )
+
+    if (!rurl) {
+      throw new Error('no repository')
+    }
+
+    const info = (rurl && GitHost.fromUrl(rurl.replace(/^git\+/, ''), opts)) || null
+    if (info) {
+      return info
+    }
+    const unk = unknownHostedUrl(rurl)
+    return GitHost.fromUrl(unk, opts) || unk
+  }
+
+  static parseUrl (url) {
+    return parseUrl(url)
+  }
+
+  #fill (template, opts) {
+    if (typeof template !== 'function') {
+      return null
+    }
+
+    const options = { ...this, ...this.opts, ...opts }
+
+    // the path should always be set so we don't end up with 'undefined' in urls
+    if (!options.path) {
+      options.path = ''
+    }
+
+    // template functions will insert the leading slash themselves
+    if (options.path.startsWith('/')) {
+      options.path = options.path.slice(1)
+    }
+
+    if (options.noCommittish) {
+      options.committish = null
+    }
+
+    const result = template(options)
+    return options.noGitPlus && result.startsWith('git+') ? result.slice(4) : result
+  }
+
+  hash () {
+    return this.committish ? `#${this.committish}` : ''
+  }
+
+  ssh (opts) {
+    return this.#fill(this.sshtemplate, opts)
+  }
+
+  sshurl (opts) {
+    return this.#fill(this.sshurltemplate, opts)
+  }
+
+  browse (path, ...args) {
+    // not a string, treat path as opts
+    if (typeof path !== 'string') {
+      return this.#fill(this.browsetemplate, path)
+    }
+
+    if (typeof args[0] !== 'string') {
+      return this.#fill(this.browsetreetemplate, { ...args[0], path })
+    }
+
+    return this.#fill(this.browsetreetemplate, { ...args[1], fragment: args[0], path })
+  }
+
+  // If the path is known to be a file, then browseFile should be used. For some hosts
+  // the url is the same as browse, but for others like GitHub a file can use both `/tree/`
+  // and `/blob/` in the path. When using a default committish of `HEAD` then the `/tree/`
+  // path will redirect to a specific commit. Using the `/blob/` path avoids this and
+  // does not redirect to a different commit.
+  browseFile (path, ...args) {
+    if (typeof args[0] !== 'string') {
+      return this.#fill(this.browseblobtemplate, { ...args[0], path })
+    }
+
+    return this.#fill(this.browseblobtemplate, { ...args[1], fragment: args[0], path })
+  }
+
+  docs (opts) {
+    return this.#fill(this.docstemplate, opts)
+  }
+
+  bugs (opts) {
+    return this.#fill(this.bugstemplate, opts)
+  }
+
+  https (opts) {
+    return this.#fill(this.httpstemplate, opts)
+  }
+
+  git (opts) {
+    return this.#fill(this.gittemplate, opts)
+  }
+
+  shortcut (opts) {
+    return this.#fill(this.shortcuttemplate, opts)
+  }
+
+  path (opts) {
+    return this.#fill(this.pathtemplate, opts)
+  }
+
+  tarball (opts) {
+    return this.#fill(this.tarballtemplate, { ...opts, noCommittish: false })
+  }
+
+  file (path, opts) {
+    return this.#fill(this.filetemplate, { ...opts, path })
+  }
+
+  edit (path, opts) {
+    return this.#fill(this.edittemplate, { ...opts, path })
+  }
+
+  getDefaultRepresentation () {
+    return this.default
+  }
+
+  toString (opts) {
+    if (this.default && typeof this[this.default] === 'function') {
+      return this[this.default](opts)
+    }
+
+    return this.sshurl(opts)
+  }
+}
+
+for (const [name, host] of Object.entries(hosts)) {
+  GitHost.addHost(name, host)
+}
+
+module.exports = GitHost
diff --git a/node_modules/npm-profile/node_modules/hosted-git-info/lib/parse-url.js b/node_modules/npm-profile/node_modules/hosted-git-info/lib/parse-url.js
new file mode 100644
index 0000000000000..7d5489c008ab4
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/hosted-git-info/lib/parse-url.js
@@ -0,0 +1,78 @@
+const url = require('url')
+
+const lastIndexOfBefore = (str, char, beforeChar) => {
+  const startPosition = str.indexOf(beforeChar)
+  return str.lastIndexOf(char, startPosition > -1 ? startPosition : Infinity)
+}
+
+const safeUrl = (u) => {
+  try {
+    return new url.URL(u)
+  } catch {
+    // this fn should never throw
+  }
+}
+
+// accepts input like git:github.com:user/repo and inserts the // after the first :
+const correctProtocol = (arg, protocols) => {
+  const firstColon = arg.indexOf(':')
+  const proto = arg.slice(0, firstColon + 1)
+  if (Object.prototype.hasOwnProperty.call(protocols, proto)) {
+    return arg
+  }
+
+  const firstAt = arg.indexOf('@')
+  if (firstAt > -1) {
+    if (firstAt > firstColon) {
+      return `git+ssh://${arg}`
+    } else {
+      return arg
+    }
+  }
+
+  const doubleSlash = arg.indexOf('//')
+  if (doubleSlash === firstColon + 1) {
+    return arg
+  }
+
+  return `${arg.slice(0, firstColon + 1)}//${arg.slice(firstColon + 1)}`
+}
+
+// attempt to correct an scp style url so that it will parse with `new URL()`
+const correctUrl = (giturl) => {
+  // ignore @ that come after the first hash since the denotes the start
+  // of a committish which can contain @ characters
+  const firstAt = lastIndexOfBefore(giturl, '@', '#')
+  // ignore colons that come after the hash since that could include colons such as:
+  // git@github.com:user/package-2#semver:^1.0.0
+  const lastColonBeforeHash = lastIndexOfBefore(giturl, ':', '#')
+
+  if (lastColonBeforeHash > firstAt) {
+    // the last : comes after the first @ (or there is no @)
+    // like it would in:
+    // proto://hostname.com:user/repo
+    // username@hostname.com:user/repo
+    // :password@hostname.com:user/repo
+    // username:password@hostname.com:user/repo
+    // proto://username@hostname.com:user/repo
+    // proto://:password@hostname.com:user/repo
+    // proto://username:password@hostname.com:user/repo
+    // then we replace the last : with a / to create a valid path
+    giturl = giturl.slice(0, lastColonBeforeHash) + '/' + giturl.slice(lastColonBeforeHash + 1)
+  }
+
+  if (lastIndexOfBefore(giturl, ':', '#') === -1 && giturl.indexOf('//') === -1) {
+    // we have no : at all
+    // as it would be in:
+    // username@hostname.com/user/repo
+    // then we prepend a protocol
+    giturl = `git+ssh://${giturl}`
+  }
+
+  return giturl
+}
+
+module.exports = (giturl, protocols) => {
+  const withProtocol = protocols ? correctProtocol(giturl, protocols) : giturl
+  return safeUrl(withProtocol) || safeUrl(correctUrl(withProtocol))
+}
diff --git a/node_modules/npm-profile/node_modules/hosted-git-info/package.json b/node_modules/npm-profile/node_modules/hosted-git-info/package.json
new file mode 100644
index 0000000000000..5883a7d308d79
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/hosted-git-info/package.json
@@ -0,0 +1,61 @@
+{
+  "name": "hosted-git-info",
+  "version": "9.0.0",
+  "description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab",
+  "main": "./lib/index.js",
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/npm/hosted-git-info.git"
+  },
+  "keywords": [
+    "git",
+    "github",
+    "bitbucket",
+    "gitlab"
+  ],
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "bugs": {
+    "url": "https://github.com/npm/hosted-git-info/issues"
+  },
+  "homepage": "https://github.com/npm/hosted-git-info",
+  "scripts": {
+    "posttest": "npm run lint",
+    "snap": "tap",
+    "test": "tap",
+    "test:coverage": "tap --coverage-report=html",
+    "lint": "npm run eslint",
+    "postlint": "template-oss-check",
+    "lintfix": "npm run eslint -- --fix",
+    "template-oss-apply": "template-oss-apply --force",
+    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
+  },
+  "dependencies": {
+    "lru-cache": "^11.1.0"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^5.0.0",
+    "@npmcli/template-oss": "4.25.0",
+    "tap": "^16.0.1"
+  },
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "engines": {
+    "node": "^20.17.0 || >=22.9.0"
+  },
+  "tap": {
+    "color": 1,
+    "coverage": true,
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.25.0",
+    "publish": "true"
+  }
+}
diff --git a/node_modules/npm-profile/node_modules/lru-cache/LICENSE b/node_modules/npm-profile/node_modules/lru-cache/LICENSE
new file mode 100644
index 0000000000000..f785757cd63f8
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/lru-cache/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/npm-profile/node_modules/lru-cache/dist/commonjs/index.js b/node_modules/npm-profile/node_modules/lru-cache/dist/commonjs/index.js
new file mode 100644
index 0000000000000..921b8f10f71b1
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/lru-cache/dist/commonjs/index.js
@@ -0,0 +1,1564 @@
+"use strict";
+/**
+ * @module LRUCache
+ */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.LRUCache = void 0;
+const defaultPerf = (typeof performance === 'object' &&
+    performance &&
+    typeof performance.now === 'function') ?
+    performance
+    : Date;
+const warned = new Set();
+/* c8 ignore start */
+const PROCESS = (typeof process === 'object' && !!process ?
+    process
+    : {});
+/* c8 ignore start */
+const emitWarning = (msg, type, code, fn) => {
+    typeof PROCESS.emitWarning === 'function' ?
+        PROCESS.emitWarning(msg, type, code, fn)
+        : console.error(`[${code}] ${type}: ${msg}`);
+};
+let AC = globalThis.AbortController;
+let AS = globalThis.AbortSignal;
+/* c8 ignore start */
+if (typeof AC === 'undefined') {
+    //@ts-ignore
+    AS = class AbortSignal {
+        onabort;
+        _onabort = [];
+        reason;
+        aborted = false;
+        addEventListener(_, fn) {
+            this._onabort.push(fn);
+        }
+    };
+    //@ts-ignore
+    AC = class AbortController {
+        constructor() {
+            warnACPolyfill();
+        }
+        signal = new AS();
+        abort(reason) {
+            if (this.signal.aborted)
+                return;
+            //@ts-ignore
+            this.signal.reason = reason;
+            //@ts-ignore
+            this.signal.aborted = true;
+            //@ts-ignore
+            for (const fn of this.signal._onabort) {
+                fn(reason);
+            }
+            this.signal.onabort?.(reason);
+        }
+    };
+    let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
+    const warnACPolyfill = () => {
+        if (!printACPolyfillWarning)
+            return;
+        printACPolyfillWarning = false;
+        emitWarning('AbortController is not defined. If using lru-cache in ' +
+            'node 14, load an AbortController polyfill from the ' +
+            '`node-abort-controller` package. A minimal polyfill is ' +
+            'provided for use by LRUCache.fetch(), but it should not be ' +
+            'relied upon in other contexts (eg, passing it to other APIs that ' +
+            'use AbortController/AbortSignal might have undesirable effects). ' +
+            'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
+    };
+}
+/* c8 ignore stop */
+const shouldWarn = (code) => !warned.has(code);
+const TYPE = Symbol('type');
+const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
+/* c8 ignore start */
+// This is a little bit ridiculous, tbh.
+// The maximum array length is 2^32-1 or thereabouts on most JS impls.
+// And well before that point, you're caching the entire world, I mean,
+// that's ~32GB of just integers for the next/prev links, plus whatever
+// else to hold that many keys and values.  Just filling the memory with
+// zeroes at init time is brutal when you get that big.
+// But why not be complete?
+// Maybe in the future, these limits will have expanded.
+const getUintArray = (max) => !isPosInt(max) ? null
+    : max <= Math.pow(2, 8) ? Uint8Array
+        : max <= Math.pow(2, 16) ? Uint16Array
+            : max <= Math.pow(2, 32) ? Uint32Array
+                : max <= Number.MAX_SAFE_INTEGER ? ZeroArray
+                    : null;
+/* c8 ignore stop */
+class ZeroArray extends Array {
+    constructor(size) {
+        super(size);
+        this.fill(0);
+    }
+}
+class Stack {
+    heap;
+    length;
+    // private constructor
+    static #constructing = false;
+    static create(max) {
+        const HeapCls = getUintArray(max);
+        if (!HeapCls)
+            return [];
+        Stack.#constructing = true;
+        const s = new Stack(max, HeapCls);
+        Stack.#constructing = false;
+        return s;
+    }
+    constructor(max, HeapCls) {
+        /* c8 ignore start */
+        if (!Stack.#constructing) {
+            throw new TypeError('instantiate Stack using Stack.create(n)');
+        }
+        /* c8 ignore stop */
+        this.heap = new HeapCls(max);
+        this.length = 0;
+    }
+    push(n) {
+        this.heap[this.length++] = n;
+    }
+    pop() {
+        return this.heap[--this.length];
+    }
+}
+/**
+ * Default export, the thing you're using this module to get.
+ *
+ * The `K` and `V` types define the key and value types, respectively. The
+ * optional `FC` type defines the type of the `context` object passed to
+ * `cache.fetch()` and `cache.memo()`.
+ *
+ * Keys and values **must not** be `null` or `undefined`.
+ *
+ * All properties from the options object (with the exception of `max`,
+ * `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are
+ * added as normal public members. (The listed options are read-only getters.)
+ *
+ * Changing any of these will alter the defaults for subsequent method calls.
+ */
+class LRUCache {
+    // options that cannot be changed without disaster
+    #max;
+    #maxSize;
+    #dispose;
+    #onInsert;
+    #disposeAfter;
+    #fetchMethod;
+    #memoMethod;
+    #perf;
+    /**
+     * {@link LRUCache.OptionsBase.perf}
+     */
+    get perf() {
+        return this.#perf;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.ttl}
+     */
+    ttl;
+    /**
+     * {@link LRUCache.OptionsBase.ttlResolution}
+     */
+    ttlResolution;
+    /**
+     * {@link LRUCache.OptionsBase.ttlAutopurge}
+     */
+    ttlAutopurge;
+    /**
+     * {@link LRUCache.OptionsBase.updateAgeOnGet}
+     */
+    updateAgeOnGet;
+    /**
+     * {@link LRUCache.OptionsBase.updateAgeOnHas}
+     */
+    updateAgeOnHas;
+    /**
+     * {@link LRUCache.OptionsBase.allowStale}
+     */
+    allowStale;
+    /**
+     * {@link LRUCache.OptionsBase.noDisposeOnSet}
+     */
+    noDisposeOnSet;
+    /**
+     * {@link LRUCache.OptionsBase.noUpdateTTL}
+     */
+    noUpdateTTL;
+    /**
+     * {@link LRUCache.OptionsBase.maxEntrySize}
+     */
+    maxEntrySize;
+    /**
+     * {@link LRUCache.OptionsBase.sizeCalculation}
+     */
+    sizeCalculation;
+    /**
+     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
+     */
+    noDeleteOnFetchRejection;
+    /**
+     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
+     */
+    noDeleteOnStaleGet;
+    /**
+     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
+     */
+    allowStaleOnFetchAbort;
+    /**
+     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
+     */
+    allowStaleOnFetchRejection;
+    /**
+     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
+     */
+    ignoreFetchAbort;
+    // computed properties
+    #size;
+    #calculatedSize;
+    #keyMap;
+    #keyList;
+    #valList;
+    #next;
+    #prev;
+    #head;
+    #tail;
+    #free;
+    #disposed;
+    #sizes;
+    #starts;
+    #ttls;
+    #hasDispose;
+    #hasFetchMethod;
+    #hasDisposeAfter;
+    #hasOnInsert;
+    /**
+     * Do not call this method unless you need to inspect the
+     * inner workings of the cache.  If anything returned by this
+     * object is modified in any way, strange breakage may occur.
+     *
+     * These fields are private for a reason!
+     *
+     * @internal
+     */
+    static unsafeExposeInternals(c) {
+        return {
+            // properties
+            starts: c.#starts,
+            ttls: c.#ttls,
+            sizes: c.#sizes,
+            keyMap: c.#keyMap,
+            keyList: c.#keyList,
+            valList: c.#valList,
+            next: c.#next,
+            prev: c.#prev,
+            get head() {
+                return c.#head;
+            },
+            get tail() {
+                return c.#tail;
+            },
+            free: c.#free,
+            // methods
+            isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
+            backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
+            moveToTail: (index) => c.#moveToTail(index),
+            indexes: (options) => c.#indexes(options),
+            rindexes: (options) => c.#rindexes(options),
+            isStale: (index) => c.#isStale(index),
+        };
+    }
+    // Protected read-only members
+    /**
+     * {@link LRUCache.OptionsBase.max} (read-only)
+     */
+    get max() {
+        return this.#max;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.maxSize} (read-only)
+     */
+    get maxSize() {
+        return this.#maxSize;
+    }
+    /**
+     * The total computed size of items in the cache (read-only)
+     */
+    get calculatedSize() {
+        return this.#calculatedSize;
+    }
+    /**
+     * The number of items stored in the cache (read-only)
+     */
+    get size() {
+        return this.#size;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
+     */
+    get fetchMethod() {
+        return this.#fetchMethod;
+    }
+    get memoMethod() {
+        return this.#memoMethod;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.dispose} (read-only)
+     */
+    get dispose() {
+        return this.#dispose;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.onInsert} (read-only)
+     */
+    get onInsert() {
+        return this.#onInsert;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
+     */
+    get disposeAfter() {
+        return this.#disposeAfter;
+    }
+    constructor(options) {
+        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, onInsert, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, perf, } = options;
+        if (perf !== undefined) {
+            if (typeof perf?.now !== 'function') {
+                throw new TypeError('perf option must have a now() method if specified');
+            }
+        }
+        this.#perf = perf ?? defaultPerf;
+        if (max !== 0 && !isPosInt(max)) {
+            throw new TypeError('max option must be a nonnegative integer');
+        }
+        const UintArray = max ? getUintArray(max) : Array;
+        if (!UintArray) {
+            throw new Error('invalid max value: ' + max);
+        }
+        this.#max = max;
+        this.#maxSize = maxSize;
+        this.maxEntrySize = maxEntrySize || this.#maxSize;
+        this.sizeCalculation = sizeCalculation;
+        if (this.sizeCalculation) {
+            if (!this.#maxSize && !this.maxEntrySize) {
+                throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
+            }
+            if (typeof this.sizeCalculation !== 'function') {
+                throw new TypeError('sizeCalculation set to non-function');
+            }
+        }
+        if (memoMethod !== undefined &&
+            typeof memoMethod !== 'function') {
+            throw new TypeError('memoMethod must be a function if defined');
+        }
+        this.#memoMethod = memoMethod;
+        if (fetchMethod !== undefined &&
+            typeof fetchMethod !== 'function') {
+            throw new TypeError('fetchMethod must be a function if specified');
+        }
+        this.#fetchMethod = fetchMethod;
+        this.#hasFetchMethod = !!fetchMethod;
+        this.#keyMap = new Map();
+        this.#keyList = new Array(max).fill(undefined);
+        this.#valList = new Array(max).fill(undefined);
+        this.#next = new UintArray(max);
+        this.#prev = new UintArray(max);
+        this.#head = 0;
+        this.#tail = 0;
+        this.#free = Stack.create(max);
+        this.#size = 0;
+        this.#calculatedSize = 0;
+        if (typeof dispose === 'function') {
+            this.#dispose = dispose;
+        }
+        if (typeof onInsert === 'function') {
+            this.#onInsert = onInsert;
+        }
+        if (typeof disposeAfter === 'function') {
+            this.#disposeAfter = disposeAfter;
+            this.#disposed = [];
+        }
+        else {
+            this.#disposeAfter = undefined;
+            this.#disposed = undefined;
+        }
+        this.#hasDispose = !!this.#dispose;
+        this.#hasOnInsert = !!this.#onInsert;
+        this.#hasDisposeAfter = !!this.#disposeAfter;
+        this.noDisposeOnSet = !!noDisposeOnSet;
+        this.noUpdateTTL = !!noUpdateTTL;
+        this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
+        this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
+        this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
+        this.ignoreFetchAbort = !!ignoreFetchAbort;
+        // NB: maxEntrySize is set to maxSize if it's set
+        if (this.maxEntrySize !== 0) {
+            if (this.#maxSize !== 0) {
+                if (!isPosInt(this.#maxSize)) {
+                    throw new TypeError('maxSize must be a positive integer if specified');
+                }
+            }
+            if (!isPosInt(this.maxEntrySize)) {
+                throw new TypeError('maxEntrySize must be a positive integer if specified');
+            }
+            this.#initializeSizeTracking();
+        }
+        this.allowStale = !!allowStale;
+        this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
+        this.updateAgeOnGet = !!updateAgeOnGet;
+        this.updateAgeOnHas = !!updateAgeOnHas;
+        this.ttlResolution =
+            isPosInt(ttlResolution) || ttlResolution === 0 ?
+                ttlResolution
+                : 1;
+        this.ttlAutopurge = !!ttlAutopurge;
+        this.ttl = ttl || 0;
+        if (this.ttl) {
+            if (!isPosInt(this.ttl)) {
+                throw new TypeError('ttl must be a positive integer if specified');
+            }
+            this.#initializeTTLTracking();
+        }
+        // do not allow completely unbounded caches
+        if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
+            throw new TypeError('At least one of max, maxSize, or ttl is required');
+        }
+        if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
+            const code = 'LRU_CACHE_UNBOUNDED';
+            if (shouldWarn(code)) {
+                warned.add(code);
+                const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
+                    'result in unbounded memory consumption.';
+                emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
+            }
+        }
+    }
+    /**
+     * Return the number of ms left in the item's TTL. If item is not in cache,
+     * returns `0`. Returns `Infinity` if item is in cache without a defined TTL.
+     */
+    getRemainingTTL(key) {
+        return this.#keyMap.has(key) ? Infinity : 0;
+    }
+    #initializeTTLTracking() {
+        const ttls = new ZeroArray(this.#max);
+        const starts = new ZeroArray(this.#max);
+        this.#ttls = ttls;
+        this.#starts = starts;
+        this.#setItemTTL = (index, ttl, start = this.#perf.now()) => {
+            starts[index] = ttl !== 0 ? start : 0;
+            ttls[index] = ttl;
+            if (ttl !== 0 && this.ttlAutopurge) {
+                const t = setTimeout(() => {
+                    if (this.#isStale(index)) {
+                        this.#delete(this.#keyList[index], 'expire');
+                    }
+                }, ttl + 1);
+                // unref() not supported on all platforms
+                /* c8 ignore start */
+                if (t.unref) {
+                    t.unref();
+                }
+                /* c8 ignore stop */
+            }
+        };
+        this.#updateItemAge = index => {
+            starts[index] = ttls[index] !== 0 ? this.#perf.now() : 0;
+        };
+        this.#statusTTL = (status, index) => {
+            if (ttls[index]) {
+                const ttl = ttls[index];
+                const start = starts[index];
+                /* c8 ignore next */
+                if (!ttl || !start)
+                    return;
+                status.ttl = ttl;
+                status.start = start;
+                status.now = cachedNow || getNow();
+                const age = status.now - start;
+                status.remainingTTL = ttl - age;
+            }
+        };
+        // debounce calls to perf.now() to 1s so we're not hitting
+        // that costly call repeatedly.
+        let cachedNow = 0;
+        const getNow = () => {
+            const n = this.#perf.now();
+            if (this.ttlResolution > 0) {
+                cachedNow = n;
+                const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
+                // not available on all platforms
+                /* c8 ignore start */
+                if (t.unref) {
+                    t.unref();
+                }
+                /* c8 ignore stop */
+            }
+            return n;
+        };
+        this.getRemainingTTL = key => {
+            const index = this.#keyMap.get(key);
+            if (index === undefined) {
+                return 0;
+            }
+            const ttl = ttls[index];
+            const start = starts[index];
+            if (!ttl || !start) {
+                return Infinity;
+            }
+            const age = (cachedNow || getNow()) - start;
+            return ttl - age;
+        };
+        this.#isStale = index => {
+            const s = starts[index];
+            const t = ttls[index];
+            return !!t && !!s && (cachedNow || getNow()) - s > t;
+        };
+    }
+    // conditionally set private methods related to TTL
+    #updateItemAge = () => { };
+    #statusTTL = () => { };
+    #setItemTTL = () => { };
+    /* c8 ignore stop */
+    #isStale = () => false;
+    #initializeSizeTracking() {
+        const sizes = new ZeroArray(this.#max);
+        this.#calculatedSize = 0;
+        this.#sizes = sizes;
+        this.#removeItemSize = index => {
+            this.#calculatedSize -= sizes[index];
+            sizes[index] = 0;
+        };
+        this.#requireSize = (k, v, size, sizeCalculation) => {
+            // provisionally accept background fetches.
+            // actual value size will be checked when they return.
+            if (this.#isBackgroundFetch(v)) {
+                return 0;
+            }
+            if (!isPosInt(size)) {
+                if (sizeCalculation) {
+                    if (typeof sizeCalculation !== 'function') {
+                        throw new TypeError('sizeCalculation must be a function');
+                    }
+                    size = sizeCalculation(v, k);
+                    if (!isPosInt(size)) {
+                        throw new TypeError('sizeCalculation return invalid (expect positive integer)');
+                    }
+                }
+                else {
+                    throw new TypeError('invalid size value (must be positive integer). ' +
+                        'When maxSize or maxEntrySize is used, sizeCalculation ' +
+                        'or size must be set.');
+                }
+            }
+            return size;
+        };
+        this.#addItemSize = (index, size, status) => {
+            sizes[index] = size;
+            if (this.#maxSize) {
+                const maxSize = this.#maxSize - sizes[index];
+                while (this.#calculatedSize > maxSize) {
+                    this.#evict(true);
+                }
+            }
+            this.#calculatedSize += sizes[index];
+            if (status) {
+                status.entrySize = size;
+                status.totalCalculatedSize = this.#calculatedSize;
+            }
+        };
+    }
+    #removeItemSize = _i => { };
+    #addItemSize = (_i, _s, _st) => { };
+    #requireSize = (_k, _v, size, sizeCalculation) => {
+        if (size || sizeCalculation) {
+            throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
+        }
+        return 0;
+    };
+    *#indexes({ allowStale = this.allowStale } = {}) {
+        if (this.#size) {
+            for (let i = this.#tail; true;) {
+                if (!this.#isValidIndex(i)) {
+                    break;
+                }
+                if (allowStale || !this.#isStale(i)) {
+                    yield i;
+                }
+                if (i === this.#head) {
+                    break;
+                }
+                else {
+                    i = this.#prev[i];
+                }
+            }
+        }
+    }
+    *#rindexes({ allowStale = this.allowStale } = {}) {
+        if (this.#size) {
+            for (let i = this.#head; true;) {
+                if (!this.#isValidIndex(i)) {
+                    break;
+                }
+                if (allowStale || !this.#isStale(i)) {
+                    yield i;
+                }
+                if (i === this.#tail) {
+                    break;
+                }
+                else {
+                    i = this.#next[i];
+                }
+            }
+        }
+    }
+    #isValidIndex(index) {
+        return (index !== undefined &&
+            this.#keyMap.get(this.#keyList[index]) === index);
+    }
+    /**
+     * Return a generator yielding `[key, value]` pairs,
+     * in order from most recently used to least recently used.
+     */
+    *entries() {
+        for (const i of this.#indexes()) {
+            if (this.#valList[i] !== undefined &&
+                this.#keyList[i] !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield [this.#keyList[i], this.#valList[i]];
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.entries}
+     *
+     * Return a generator yielding `[key, value]` pairs,
+     * in order from least recently used to most recently used.
+     */
+    *rentries() {
+        for (const i of this.#rindexes()) {
+            if (this.#valList[i] !== undefined &&
+                this.#keyList[i] !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield [this.#keyList[i], this.#valList[i]];
+            }
+        }
+    }
+    /**
+     * Return a generator yielding the keys in the cache,
+     * in order from most recently used to least recently used.
+     */
+    *keys() {
+        for (const i of this.#indexes()) {
+            const k = this.#keyList[i];
+            if (k !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield k;
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.keys}
+     *
+     * Return a generator yielding the keys in the cache,
+     * in order from least recently used to most recently used.
+     */
+    *rkeys() {
+        for (const i of this.#rindexes()) {
+            const k = this.#keyList[i];
+            if (k !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield k;
+            }
+        }
+    }
+    /**
+     * Return a generator yielding the values in the cache,
+     * in order from most recently used to least recently used.
+     */
+    *values() {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            if (v !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield this.#valList[i];
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.values}
+     *
+     * Return a generator yielding the values in the cache,
+     * in order from least recently used to most recently used.
+     */
+    *rvalues() {
+        for (const i of this.#rindexes()) {
+            const v = this.#valList[i];
+            if (v !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield this.#valList[i];
+            }
+        }
+    }
+    /**
+     * Iterating over the cache itself yields the same results as
+     * {@link LRUCache.entries}
+     */
+    [Symbol.iterator]() {
+        return this.entries();
+    }
+    /**
+     * A String value that is used in the creation of the default string
+     * description of an object. Called by the built-in method
+     * `Object.prototype.toString`.
+     */
+    [Symbol.toStringTag] = 'LRUCache';
+    /**
+     * Find a value for which the supplied fn method returns a truthy value,
+     * similar to `Array.find()`. fn is called as `fn(value, key, cache)`.
+     */
+    find(fn, getOptions = {}) {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            if (fn(value, this.#keyList[i], this)) {
+                return this.get(this.#keyList[i], getOptions);
+            }
+        }
+    }
+    /**
+     * Call the supplied function on each item in the cache, in order from most
+     * recently used to least recently used.
+     *
+     * `fn` is called as `fn(value, key, cache)`.
+     *
+     * If `thisp` is provided, function will be called in the `this`-context of
+     * the provided object, or the cache if no `thisp` object is provided.
+     *
+     * Does not update age or recenty of use, or iterate over stale values.
+     */
+    forEach(fn, thisp = this) {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            fn.call(thisp, value, this.#keyList[i], this);
+        }
+    }
+    /**
+     * The same as {@link LRUCache.forEach} but items are iterated over in
+     * reverse order.  (ie, less recently used items are iterated over first.)
+     */
+    rforEach(fn, thisp = this) {
+        for (const i of this.#rindexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            fn.call(thisp, value, this.#keyList[i], this);
+        }
+    }
+    /**
+     * Delete any stale entries. Returns true if anything was removed,
+     * false otherwise.
+     */
+    purgeStale() {
+        let deleted = false;
+        for (const i of this.#rindexes({ allowStale: true })) {
+            if (this.#isStale(i)) {
+                this.#delete(this.#keyList[i], 'expire');
+                deleted = true;
+            }
+        }
+        return deleted;
+    }
+    /**
+     * Get the extended info about a given entry, to get its value, size, and
+     * TTL info simultaneously. Returns `undefined` if the key is not present.
+     *
+     * Unlike {@link LRUCache#dump}, which is designed to be portable and survive
+     * serialization, the `start` value is always the current timestamp, and the
+     * `ttl` is a calculated remaining time to live (negative if expired).
+     *
+     * Always returns stale values, if their info is found in the cache, so be
+     * sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl})
+     * if relevant.
+     */
+    info(key) {
+        const i = this.#keyMap.get(key);
+        if (i === undefined)
+            return undefined;
+        const v = this.#valList[i];
+        /* c8 ignore start - this isn't tested for the info function,
+         * but it's the same logic as found in other places. */
+        const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+        if (value === undefined)
+            return undefined;
+        /* c8 ignore end */
+        const entry = { value };
+        if (this.#ttls && this.#starts) {
+            const ttl = this.#ttls[i];
+            const start = this.#starts[i];
+            if (ttl && start) {
+                const remain = ttl - (this.#perf.now() - start);
+                entry.ttl = remain;
+                entry.start = Date.now();
+            }
+        }
+        if (this.#sizes) {
+            entry.size = this.#sizes[i];
+        }
+        return entry;
+    }
+    /**
+     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
+     * passed to {@link LRUCache#load}.
+     *
+     * The `start` fields are calculated relative to a portable `Date.now()`
+     * timestamp, even if `performance.now()` is available.
+     *
+     * Stale entries are always included in the `dump`, even if
+     * {@link LRUCache.OptionsBase.allowStale} is false.
+     *
+     * Note: this returns an actual array, not a generator, so it can be more
+     * easily passed around.
+     */
+    dump() {
+        const arr = [];
+        for (const i of this.#indexes({ allowStale: true })) {
+            const key = this.#keyList[i];
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined || key === undefined)
+                continue;
+            const entry = { value };
+            if (this.#ttls && this.#starts) {
+                entry.ttl = this.#ttls[i];
+                // always dump the start relative to a portable timestamp
+                // it's ok for this to be a bit slow, it's a rare operation.
+                const age = this.#perf.now() - this.#starts[i];
+                entry.start = Math.floor(Date.now() - age);
+            }
+            if (this.#sizes) {
+                entry.size = this.#sizes[i];
+            }
+            arr.unshift([key, entry]);
+        }
+        return arr;
+    }
+    /**
+     * Reset the cache and load in the items in entries in the order listed.
+     *
+     * The shape of the resulting cache may be different if the same options are
+     * not used in both caches.
+     *
+     * The `start` fields are assumed to be calculated relative to a portable
+     * `Date.now()` timestamp, even if `performance.now()` is available.
+     */
+    load(arr) {
+        this.clear();
+        for (const [key, entry] of arr) {
+            if (entry.start) {
+                // entry.start is a portable timestamp, but we may be using
+                // node's performance.now(), so calculate the offset, so that
+                // we get the intended remaining TTL, no matter how long it's
+                // been on ice.
+                //
+                // it's ok for this to be a bit slow, it's a rare operation.
+                const age = Date.now() - entry.start;
+                entry.start = this.#perf.now() - age;
+            }
+            this.set(key, entry.value, entry);
+        }
+    }
+    /**
+     * Add a value to the cache.
+     *
+     * Note: if `undefined` is specified as a value, this is an alias for
+     * {@link LRUCache#delete}
+     *
+     * Fields on the {@link LRUCache.SetOptions} options param will override
+     * their corresponding values in the constructor options for the scope
+     * of this single `set()` operation.
+     *
+     * If `start` is provided, then that will set the effective start
+     * time for the TTL calculation. Note that this must be a previous
+     * value of `performance.now()` if supported, or a previous value of
+     * `Date.now()` if not.
+     *
+     * Options object may also include `size`, which will prevent
+     * calling the `sizeCalculation` function and just use the specified
+     * number if it is a positive integer, and `noDisposeOnSet` which
+     * will prevent calling a `dispose` function in the case of
+     * overwrites.
+     *
+     * If the `size` (or return value of `sizeCalculation`) for a given
+     * entry is greater than `maxEntrySize`, then the item will not be
+     * added to the cache.
+     *
+     * Will update the recency of the entry.
+     *
+     * If the value is `undefined`, then this is an alias for
+     * `cache.delete(key)`. `undefined` is never stored in the cache.
+     */
+    set(k, v, setOptions = {}) {
+        if (v === undefined) {
+            this.delete(k);
+            return this;
+        }
+        const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
+        let { noUpdateTTL = this.noUpdateTTL } = setOptions;
+        const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
+        // if the item doesn't fit, don't do anything
+        // NB: maxEntrySize set to maxSize by default
+        if (this.maxEntrySize && size > this.maxEntrySize) {
+            if (status) {
+                status.set = 'miss';
+                status.maxEntrySizeExceeded = true;
+            }
+            // have to delete, in case something is there already.
+            this.#delete(k, 'set');
+            return this;
+        }
+        let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
+        if (index === undefined) {
+            // addition
+            index = (this.#size === 0 ? this.#tail
+                : this.#free.length !== 0 ? this.#free.pop()
+                    : this.#size === this.#max ? this.#evict(false)
+                        : this.#size);
+            this.#keyList[index] = k;
+            this.#valList[index] = v;
+            this.#keyMap.set(k, index);
+            this.#next[this.#tail] = index;
+            this.#prev[index] = this.#tail;
+            this.#tail = index;
+            this.#size++;
+            this.#addItemSize(index, size, status);
+            if (status)
+                status.set = 'add';
+            noUpdateTTL = false;
+            if (this.#hasOnInsert) {
+                this.#onInsert?.(v, k, 'add');
+            }
+        }
+        else {
+            // update
+            this.#moveToTail(index);
+            const oldVal = this.#valList[index];
+            if (v !== oldVal) {
+                if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
+                    oldVal.__abortController.abort(new Error('replaced'));
+                    const { __staleWhileFetching: s } = oldVal;
+                    if (s !== undefined && !noDisposeOnSet) {
+                        if (this.#hasDispose) {
+                            this.#dispose?.(s, k, 'set');
+                        }
+                        if (this.#hasDisposeAfter) {
+                            this.#disposed?.push([s, k, 'set']);
+                        }
+                    }
+                }
+                else if (!noDisposeOnSet) {
+                    if (this.#hasDispose) {
+                        this.#dispose?.(oldVal, k, 'set');
+                    }
+                    if (this.#hasDisposeAfter) {
+                        this.#disposed?.push([oldVal, k, 'set']);
+                    }
+                }
+                this.#removeItemSize(index);
+                this.#addItemSize(index, size, status);
+                this.#valList[index] = v;
+                if (status) {
+                    status.set = 'replace';
+                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal) ?
+                        oldVal.__staleWhileFetching
+                        : oldVal;
+                    if (oldValue !== undefined)
+                        status.oldValue = oldValue;
+                }
+            }
+            else if (status) {
+                status.set = 'update';
+            }
+            if (this.#hasOnInsert) {
+                this.onInsert?.(v, k, v === oldVal ? 'update' : 'replace');
+            }
+        }
+        if (ttl !== 0 && !this.#ttls) {
+            this.#initializeTTLTracking();
+        }
+        if (this.#ttls) {
+            if (!noUpdateTTL) {
+                this.#setItemTTL(index, ttl, start);
+            }
+            if (status)
+                this.#statusTTL(status, index);
+        }
+        if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+        return this;
+    }
+    /**
+     * Evict the least recently used item, returning its value or
+     * `undefined` if cache is empty.
+     */
+    pop() {
+        try {
+            while (this.#size) {
+                const val = this.#valList[this.#head];
+                this.#evict(true);
+                if (this.#isBackgroundFetch(val)) {
+                    if (val.__staleWhileFetching) {
+                        return val.__staleWhileFetching;
+                    }
+                }
+                else if (val !== undefined) {
+                    return val;
+                }
+            }
+        }
+        finally {
+            if (this.#hasDisposeAfter && this.#disposed) {
+                const dt = this.#disposed;
+                let task;
+                while ((task = dt?.shift())) {
+                    this.#disposeAfter?.(...task);
+                }
+            }
+        }
+    }
+    #evict(free) {
+        const head = this.#head;
+        const k = this.#keyList[head];
+        const v = this.#valList[head];
+        if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
+            v.__abortController.abort(new Error('evicted'));
+        }
+        else if (this.#hasDispose || this.#hasDisposeAfter) {
+            if (this.#hasDispose) {
+                this.#dispose?.(v, k, 'evict');
+            }
+            if (this.#hasDisposeAfter) {
+                this.#disposed?.push([v, k, 'evict']);
+            }
+        }
+        this.#removeItemSize(head);
+        // if we aren't about to use the index, then null these out
+        if (free) {
+            this.#keyList[head] = undefined;
+            this.#valList[head] = undefined;
+            this.#free.push(head);
+        }
+        if (this.#size === 1) {
+            this.#head = this.#tail = 0;
+            this.#free.length = 0;
+        }
+        else {
+            this.#head = this.#next[head];
+        }
+        this.#keyMap.delete(k);
+        this.#size--;
+        return head;
+    }
+    /**
+     * Check if a key is in the cache, without updating the recency of use.
+     * Will return false if the item is stale, even though it is technically
+     * in the cache.
+     *
+     * Check if a key is in the cache, without updating the recency of
+     * use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set
+     * to `true` in either the options or the constructor.
+     *
+     * Will return `false` if the item is stale, even though it is technically in
+     * the cache. The difference can be determined (if it matters) by using a
+     * `status` argument, and inspecting the `has` field.
+     *
+     * Will not update item age unless
+     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
+     */
+    has(k, hasOptions = {}) {
+        const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
+        const index = this.#keyMap.get(k);
+        if (index !== undefined) {
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v) &&
+                v.__staleWhileFetching === undefined) {
+                return false;
+            }
+            if (!this.#isStale(index)) {
+                if (updateAgeOnHas) {
+                    this.#updateItemAge(index);
+                }
+                if (status) {
+                    status.has = 'hit';
+                    this.#statusTTL(status, index);
+                }
+                return true;
+            }
+            else if (status) {
+                status.has = 'stale';
+                this.#statusTTL(status, index);
+            }
+        }
+        else if (status) {
+            status.has = 'miss';
+        }
+        return false;
+    }
+    /**
+     * Like {@link LRUCache#get} but doesn't update recency or delete stale
+     * items.
+     *
+     * Returns `undefined` if the item is stale, unless
+     * {@link LRUCache.OptionsBase.allowStale} is set.
+     */
+    peek(k, peekOptions = {}) {
+        const { allowStale = this.allowStale } = peekOptions;
+        const index = this.#keyMap.get(k);
+        if (index === undefined ||
+            (!allowStale && this.#isStale(index))) {
+            return;
+        }
+        const v = this.#valList[index];
+        // either stale and allowed, or forcing a refresh of non-stale value
+        return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+    }
+    #backgroundFetch(k, index, options, context) {
+        const v = index === undefined ? undefined : this.#valList[index];
+        if (this.#isBackgroundFetch(v)) {
+            return v;
+        }
+        const ac = new AC();
+        const { signal } = options;
+        // when/if our AC signals, then stop listening to theirs.
+        signal?.addEventListener('abort', () => ac.abort(signal.reason), {
+            signal: ac.signal,
+        });
+        const fetchOpts = {
+            signal: ac.signal,
+            options,
+            context,
+        };
+        const cb = (v, updateCache = false) => {
+            const { aborted } = ac.signal;
+            const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
+            if (options.status) {
+                if (aborted && !updateCache) {
+                    options.status.fetchAborted = true;
+                    options.status.fetchError = ac.signal.reason;
+                    if (ignoreAbort)
+                        options.status.fetchAbortIgnored = true;
+                }
+                else {
+                    options.status.fetchResolved = true;
+                }
+            }
+            if (aborted && !ignoreAbort && !updateCache) {
+                return fetchFail(ac.signal.reason);
+            }
+            // either we didn't abort, and are still here, or we did, and ignored
+            const bf = p;
+            if (this.#valList[index] === p) {
+                if (v === undefined) {
+                    if (bf.__staleWhileFetching !== undefined) {
+                        this.#valList[index] = bf.__staleWhileFetching;
+                    }
+                    else {
+                        this.#delete(k, 'fetch');
+                    }
+                }
+                else {
+                    if (options.status)
+                        options.status.fetchUpdated = true;
+                    this.set(k, v, fetchOpts.options);
+                }
+            }
+            return v;
+        };
+        const eb = (er) => {
+            if (options.status) {
+                options.status.fetchRejected = true;
+                options.status.fetchError = er;
+            }
+            return fetchFail(er);
+        };
+        const fetchFail = (er) => {
+            const { aborted } = ac.signal;
+            const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
+            const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
+            const noDelete = allowStale || options.noDeleteOnFetchRejection;
+            const bf = p;
+            if (this.#valList[index] === p) {
+                // if we allow stale on fetch rejections, then we need to ensure that
+                // the stale value is not removed from the cache when the fetch fails.
+                const del = !noDelete || bf.__staleWhileFetching === undefined;
+                if (del) {
+                    this.#delete(k, 'fetch');
+                }
+                else if (!allowStaleAborted) {
+                    // still replace the *promise* with the stale value,
+                    // since we are done with the promise at this point.
+                    // leave it untouched if we're still waiting for an
+                    // aborted background fetch that hasn't yet returned.
+                    this.#valList[index] = bf.__staleWhileFetching;
+                }
+            }
+            if (allowStale) {
+                if (options.status && bf.__staleWhileFetching !== undefined) {
+                    options.status.returnedStale = true;
+                }
+                return bf.__staleWhileFetching;
+            }
+            else if (bf.__returned === bf) {
+                throw er;
+            }
+        };
+        const pcall = (res, rej) => {
+            const fmp = this.#fetchMethod?.(k, v, fetchOpts);
+            if (fmp && fmp instanceof Promise) {
+                fmp.then(v => res(v === undefined ? undefined : v), rej);
+            }
+            // ignored, we go until we finish, regardless.
+            // defer check until we are actually aborting,
+            // so fetchMethod can override.
+            ac.signal.addEventListener('abort', () => {
+                if (!options.ignoreFetchAbort ||
+                    options.allowStaleOnFetchAbort) {
+                    res(undefined);
+                    // when it eventually resolves, update the cache.
+                    if (options.allowStaleOnFetchAbort) {
+                        res = v => cb(v, true);
+                    }
+                }
+            });
+        };
+        if (options.status)
+            options.status.fetchDispatched = true;
+        const p = new Promise(pcall).then(cb, eb);
+        const bf = Object.assign(p, {
+            __abortController: ac,
+            __staleWhileFetching: v,
+            __returned: undefined,
+        });
+        if (index === undefined) {
+            // internal, don't expose status.
+            this.set(k, bf, { ...fetchOpts.options, status: undefined });
+            index = this.#keyMap.get(k);
+        }
+        else {
+            this.#valList[index] = bf;
+        }
+        return bf;
+    }
+    #isBackgroundFetch(p) {
+        if (!this.#hasFetchMethod)
+            return false;
+        const b = p;
+        return (!!b &&
+            b instanceof Promise &&
+            b.hasOwnProperty('__staleWhileFetching') &&
+            b.__abortController instanceof AC);
+    }
+    async fetch(k, fetchOptions = {}) {
+        const { 
+        // get options
+        allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, 
+        // set options
+        ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, 
+        // fetch exclusive options
+        noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
+        if (!this.#hasFetchMethod) {
+            if (status)
+                status.fetch = 'get';
+            return this.get(k, {
+                allowStale,
+                updateAgeOnGet,
+                noDeleteOnStaleGet,
+                status,
+            });
+        }
+        const options = {
+            allowStale,
+            updateAgeOnGet,
+            noDeleteOnStaleGet,
+            ttl,
+            noDisposeOnSet,
+            size,
+            sizeCalculation,
+            noUpdateTTL,
+            noDeleteOnFetchRejection,
+            allowStaleOnFetchRejection,
+            allowStaleOnFetchAbort,
+            ignoreFetchAbort,
+            status,
+            signal,
+        };
+        let index = this.#keyMap.get(k);
+        if (index === undefined) {
+            if (status)
+                status.fetch = 'miss';
+            const p = this.#backgroundFetch(k, index, options, context);
+            return (p.__returned = p);
+        }
+        else {
+            // in cache, maybe already fetching
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v)) {
+                const stale = allowStale && v.__staleWhileFetching !== undefined;
+                if (status) {
+                    status.fetch = 'inflight';
+                    if (stale)
+                        status.returnedStale = true;
+                }
+                return stale ? v.__staleWhileFetching : (v.__returned = v);
+            }
+            // if we force a refresh, that means do NOT serve the cached value,
+            // unless we are already in the process of refreshing the cache.
+            const isStale = this.#isStale(index);
+            if (!forceRefresh && !isStale) {
+                if (status)
+                    status.fetch = 'hit';
+                this.#moveToTail(index);
+                if (updateAgeOnGet) {
+                    this.#updateItemAge(index);
+                }
+                if (status)
+                    this.#statusTTL(status, index);
+                return v;
+            }
+            // ok, it is stale or a forced refresh, and not already fetching.
+            // refresh the cache.
+            const p = this.#backgroundFetch(k, index, options, context);
+            const hasStale = p.__staleWhileFetching !== undefined;
+            const staleVal = hasStale && allowStale;
+            if (status) {
+                status.fetch = isStale ? 'stale' : 'refresh';
+                if (staleVal && isStale)
+                    status.returnedStale = true;
+            }
+            return staleVal ? p.__staleWhileFetching : (p.__returned = p);
+        }
+    }
+    async forceFetch(k, fetchOptions = {}) {
+        const v = await this.fetch(k, fetchOptions);
+        if (v === undefined)
+            throw new Error('fetch() returned undefined');
+        return v;
+    }
+    memo(k, memoOptions = {}) {
+        const memoMethod = this.#memoMethod;
+        if (!memoMethod) {
+            throw new Error('no memoMethod provided to constructor');
+        }
+        const { context, forceRefresh, ...options } = memoOptions;
+        const v = this.get(k, options);
+        if (!forceRefresh && v !== undefined)
+            return v;
+        const vv = memoMethod(k, v, {
+            options,
+            context,
+        });
+        this.set(k, vv, options);
+        return vv;
+    }
+    /**
+     * Return a value from the cache. Will update the recency of the cache
+     * entry found.
+     *
+     * If the key is not found, get() will return `undefined`.
+     */
+    get(k, getOptions = {}) {
+        const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
+        const index = this.#keyMap.get(k);
+        if (index !== undefined) {
+            const value = this.#valList[index];
+            const fetching = this.#isBackgroundFetch(value);
+            if (status)
+                this.#statusTTL(status, index);
+            if (this.#isStale(index)) {
+                if (status)
+                    status.get = 'stale';
+                // delete only if not an in-flight background fetch
+                if (!fetching) {
+                    if (!noDeleteOnStaleGet) {
+                        this.#delete(k, 'expire');
+                    }
+                    if (status && allowStale)
+                        status.returnedStale = true;
+                    return allowStale ? value : undefined;
+                }
+                else {
+                    if (status &&
+                        allowStale &&
+                        value.__staleWhileFetching !== undefined) {
+                        status.returnedStale = true;
+                    }
+                    return allowStale ? value.__staleWhileFetching : undefined;
+                }
+            }
+            else {
+                if (status)
+                    status.get = 'hit';
+                // if we're currently fetching it, we don't actually have it yet
+                // it's not stale, which means this isn't a staleWhileRefetching.
+                // If it's not stale, and fetching, AND has a __staleWhileFetching
+                // value, then that means the user fetched with {forceRefresh:true},
+                // so it's safe to return that value.
+                if (fetching) {
+                    return value.__staleWhileFetching;
+                }
+                this.#moveToTail(index);
+                if (updateAgeOnGet) {
+                    this.#updateItemAge(index);
+                }
+                return value;
+            }
+        }
+        else if (status) {
+            status.get = 'miss';
+        }
+    }
+    #connect(p, n) {
+        this.#prev[n] = p;
+        this.#next[p] = n;
+    }
+    #moveToTail(index) {
+        // if tail already, nothing to do
+        // if head, move head to next[index]
+        // else
+        //   move next[prev[index]] to next[index] (head has no prev)
+        //   move prev[next[index]] to prev[index]
+        // prev[index] = tail
+        // next[tail] = index
+        // tail = index
+        if (index !== this.#tail) {
+            if (index === this.#head) {
+                this.#head = this.#next[index];
+            }
+            else {
+                this.#connect(this.#prev[index], this.#next[index]);
+            }
+            this.#connect(this.#tail, index);
+            this.#tail = index;
+        }
+    }
+    /**
+     * Deletes a key out of the cache.
+     *
+     * Returns true if the key was deleted, false otherwise.
+     */
+    delete(k) {
+        return this.#delete(k, 'delete');
+    }
+    #delete(k, reason) {
+        let deleted = false;
+        if (this.#size !== 0) {
+            const index = this.#keyMap.get(k);
+            if (index !== undefined) {
+                deleted = true;
+                if (this.#size === 1) {
+                    this.#clear(reason);
+                }
+                else {
+                    this.#removeItemSize(index);
+                    const v = this.#valList[index];
+                    if (this.#isBackgroundFetch(v)) {
+                        v.__abortController.abort(new Error('deleted'));
+                    }
+                    else if (this.#hasDispose || this.#hasDisposeAfter) {
+                        if (this.#hasDispose) {
+                            this.#dispose?.(v, k, reason);
+                        }
+                        if (this.#hasDisposeAfter) {
+                            this.#disposed?.push([v, k, reason]);
+                        }
+                    }
+                    this.#keyMap.delete(k);
+                    this.#keyList[index] = undefined;
+                    this.#valList[index] = undefined;
+                    if (index === this.#tail) {
+                        this.#tail = this.#prev[index];
+                    }
+                    else if (index === this.#head) {
+                        this.#head = this.#next[index];
+                    }
+                    else {
+                        const pi = this.#prev[index];
+                        this.#next[pi] = this.#next[index];
+                        const ni = this.#next[index];
+                        this.#prev[ni] = this.#prev[index];
+                    }
+                    this.#size--;
+                    this.#free.push(index);
+                }
+            }
+        }
+        if (this.#hasDisposeAfter && this.#disposed?.length) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+        return deleted;
+    }
+    /**
+     * Clear the cache entirely, throwing away all values.
+     */
+    clear() {
+        return this.#clear('delete');
+    }
+    #clear(reason) {
+        for (const index of this.#rindexes({ allowStale: true })) {
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v)) {
+                v.__abortController.abort(new Error('deleted'));
+            }
+            else {
+                const k = this.#keyList[index];
+                if (this.#hasDispose) {
+                    this.#dispose?.(v, k, reason);
+                }
+                if (this.#hasDisposeAfter) {
+                    this.#disposed?.push([v, k, reason]);
+                }
+            }
+        }
+        this.#keyMap.clear();
+        this.#valList.fill(undefined);
+        this.#keyList.fill(undefined);
+        if (this.#ttls && this.#starts) {
+            this.#ttls.fill(0);
+            this.#starts.fill(0);
+        }
+        if (this.#sizes) {
+            this.#sizes.fill(0);
+        }
+        this.#head = 0;
+        this.#tail = 0;
+        this.#free.length = 0;
+        this.#calculatedSize = 0;
+        this.#size = 0;
+        if (this.#hasDisposeAfter && this.#disposed) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+    }
+}
+exports.LRUCache = LRUCache;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/npm-profile/node_modules/lru-cache/dist/commonjs/index.min.js b/node_modules/npm-profile/node_modules/lru-cache/dist/commonjs/index.min.js
new file mode 100644
index 0000000000000..ef5027b91650d
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/lru-cache/dist/commonjs/index.min.js
@@ -0,0 +1,2 @@
+"use strict";Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var M=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,x=new Set,R=typeof process=="object"&&process?process:{},U=(a,t,e,i)=>{typeof R.emitWarning=="function"?R.emitWarning(a,t,e,i):console.error(`[${e}] ${t}: ${a}`)},C=globalThis.AbortController,L=globalThis.AbortSignal;if(typeof C>"u"){L=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},C=class{constructor(){t()}signal=new L;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let a=R.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{a&&(a=!1,U("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var G=a=>!x.has(a),H=Symbol("type"),y=a=>a&&a===Math.floor(a)&&a>0&&isFinite(a),I=a=>y(a)?a<=Math.pow(2,8)?Uint8Array:a<=Math.pow(2,16)?Uint16Array:a<=Math.pow(2,32)?Uint32Array:a<=Number.MAX_SAFE_INTEGER?E:null:null,E=class extends Array{constructor(t){super(t),this.fill(0)}},W=class a{heap;length;static#l=!1;static create(t){let e=I(t);if(!e)return[];a.#l=!0;let i=new a(t,e);return a.#l=!1,i}constructor(t,e){if(!a.#l)throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},D=class a{#l;#c;#p;#v;#w;#D;#L;#S;get perf(){return this.#S}ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#_;#s;#i;#t;#a;#u;#o;#h;#m;#r;#b;#y;#d;#A;#z;#f;#x;static unsafeExposeInternals(t){return{starts:t.#y,ttls:t.#d,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#a,prev:t.#u,get head(){return t.#o},get tail(){return t.#h},free:t.#m,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#M(e,i,s,n),moveToTail:e=>t.#W(e),indexes:e=>t.#F(e),rindexes:e=>t.#T(e),isStale:e=>t.#g(e)}}get max(){return this.#l}get maxSize(){return this.#c}get calculatedSize(){return this.#_}get size(){return this.#n}get fetchMethod(){return this.#D}get memoMethod(){return this.#L}get dispose(){return this.#p}get onInsert(){return this.#v}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,onInsert:_,disposeAfter:f,noDisposeOnSet:c,noUpdateTTL:u,maxSize:A=0,maxEntrySize:d=0,sizeCalculation:m,fetchMethod:l,memoMethod:w,noDeleteOnFetchRejection:b,noDeleteOnStaleGet:p,allowStaleOnFetchRejection:S,allowStaleOnFetchAbort:z,ignoreFetchAbort:F,perf:v}=t;if(v!==void 0&&typeof v?.now!="function")throw new TypeError("perf option must have a now() method if specified");if(this.#S=v??M,e!==0&&!y(e))throw new TypeError("max option must be a nonnegative integer");let T=e?I(e):Array;if(!T)throw new Error("invalid max value: "+e);if(this.#l=e,this.#c=A,this.maxEntrySize=d||this.#c,this.sizeCalculation=m,this.sizeCalculation){if(!this.#c&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#L=w,l!==void 0&&typeof l!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#D=l,this.#z=!!l,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#a=new T(e),this.#u=new T(e),this.#o=0,this.#h=0,this.#m=W.create(e),this.#n=0,this.#_=0,typeof g=="function"&&(this.#p=g),typeof _=="function"&&(this.#v=_),typeof f=="function"?(this.#w=f,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#A=!!this.#p,this.#x=!!this.#v,this.#f=!!this.#w,this.noDisposeOnSet=!!c,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!b,this.allowStaleOnFetchRejection=!!S,this.allowStaleOnFetchAbort=!!z,this.ignoreFetchAbort=!!F,this.maxEntrySize!==0){if(this.#c!==0&&!y(this.#c))throw new TypeError("maxSize must be a positive integer if specified");if(!y(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#V()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!p,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=y(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!y(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#G()}if(this.#l===0&&this.ttl===0&&this.#c===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#l&&!this.#c){let O="LRU_CACHE_UNBOUNDED";G(O)&&(x.add(O),U("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",O,a))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#G(){let t=new E(this.#l),e=new E(this.#l);this.#d=t,this.#y=e,this.#j=(n,h,o=this.#S.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#g(n)&&this.#O(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#C=n=>{e[n]=t[n]!==0?this.#S.now():0},this.#E=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=this.#S.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#g=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#C=()=>{};#E=()=>{};#j=()=>{};#g=()=>!1;#V(){let t=new E(this.#l);this.#_=0,this.#b=t,this.#R=e=>{this.#_-=t[e],t[e]=0},this.#N=(e,i,s,n)=>{if(this.#e(i))return 0;if(!y(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!y(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#U=(e,i,s)=>{if(t[e]=i,this.#c){let n=this.#c-t[e];for(;this.#_>n;)this.#I(!0)}this.#_+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#_)}}#R=t=>{};#U=(t,e,i)=>{};#N=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#o));)e=this.#u[e]}*#T({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#h));)e=this.#a[e]}#P(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#T())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#T()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#T())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#T()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#T({allowStale:!0}))this.#g(e)&&(this.#O(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#d&&this.#y){let h=this.#d[e],o=this.#y[e];if(h&&o){let r=h-(this.#S.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#F({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#d&&this.#y){h.ttl=this.#d[e];let o=this.#S.now()-this.#y[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=this.#S.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,_=this.#N(t,e,i.size||0,o);if(this.maxEntrySize&&_>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#O(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#m.length!==0?this.#m.pop():this.#n===this.#l?this.#I(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#a[this.#h]=f,this.#u[f]=this.#h,this.#h=f,this.#n++,this.#U(f,_,r),r&&(r.set="add"),g=!1,this.#x&&this.#v?.(e,t,"add");else{this.#W(f);let c=this.#t[f];if(e!==c){if(this.#z&&this.#e(c)){c.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:u}=c;u!==void 0&&!h&&(this.#A&&this.#p?.(u,t,"set"),this.#f&&this.#r?.push([u,t,"set"]))}else h||(this.#A&&this.#p?.(c,t,"set"),this.#f&&this.#r?.push([c,t,"set"]));if(this.#R(f),this.#U(f,_,r),this.#t[f]=e,r){r.set="replace";let u=c&&this.#e(c)?c.__staleWhileFetching:c;u!==void 0&&(r.oldValue=u)}}else r&&(r.set="update");this.#x&&this.onInsert?.(e,t,e===c?"update":"replace")}if(s!==0&&!this.#d&&this.#G(),this.#d&&(g||this.#j(f,s,n),r&&this.#E(r,f)),!h&&this.#f&&this.#r){let c=this.#r,u;for(;u=c?.shift();)this.#w?.(...u)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#I(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#f&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#I(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#z&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(s,i,"evict"),this.#f&&this.#r?.push([s,i,"evict"])),this.#R(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#m.push(e)),this.#n===1?(this.#o=this.#h=0,this.#m.length=0):this.#o=this.#a[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#g(n))s&&(s.has="stale",this.#E(s,n));else return i&&this.#C(n),s&&(s.has="hit",this.#E(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#g(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#M(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new C,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,m=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!m?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!m)return f(h.signal.reason);let b=u;return this.#t[e]===u&&(d===void 0?b.__staleWhileFetching!==void 0?this.#t[e]=b.__staleWhileFetching:this.#O(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},_=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:m}=h.signal,l=m&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=u;if(this.#t[e]===u&&(!b||p.__staleWhileFetching===void 0?this.#O(t,"fetch"):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},c=(d,m)=>{let l=this.#D?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),m),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let u=new Promise(c).then(g,_),A=Object.assign(u,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,A,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=A,A}#e(t){if(!this.#z)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof C}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:_=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:c=this.allowStaleOnFetchRejection,ignoreFetchAbort:u=this.ignoreFetchAbort,allowStaleOnFetchAbort:A=this.allowStaleOnFetchAbort,context:d,forceRefresh:m=!1,status:l,signal:w}=e;if(!this.#z)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:_,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:c,allowStaleOnFetchAbort:A,ignoreFetchAbort:u,status:l,signal:w},p=this.#s.get(t);if(p===void 0){l&&(l.fetch="miss");let S=this.#M(t,p,b,d);return S.__returned=S}else{let S=this.#t[p];if(this.#e(S)){let O=i&&S.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",O&&(l.returnedStale=!0)),O?S.__staleWhileFetching:S.__returned=S}let z=this.#g(p);if(!m&&!z)return l&&(l.fetch="hit"),this.#W(p),s&&this.#C(p),l&&this.#E(l,p),S;let F=this.#M(t,p,b,d),T=F.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=z?"stale":"refresh",T&&z&&(l.returnedStale=!0)),T?F.__staleWhileFetching:F.__returned=F}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#L;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#E(h,o),this.#g(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#O(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#W(o),s&&this.#C(o),r))}else h&&(h.get="miss")}#H(t,e){this.#u[e]=t,this.#a[t]=e}#W(t){t!==this.#h&&(t===this.#o?this.#o=this.#a[t]:this.#H(this.#u[t],this.#a[t]),this.#H(this.#h,t),this.#h=t)}delete(t){return this.#O(t,"delete")}#O(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#k(e);else{this.#R(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(n,t,e),this.#f&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#u[s];else if(s===this.#o)this.#o=this.#a[s];else{let h=this.#u[s];this.#a[h]=this.#a[s];let o=this.#a[s];this.#u[o]=this.#u[s]}this.#n--,this.#m.push(s)}}if(this.#f&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#k("delete")}#k(t){for(let e of this.#T({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#A&&this.#p?.(i,s,t),this.#f&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#d&&this.#y&&(this.#d.fill(0),this.#y.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#m.length=0,this.#_=0,this.#n=0,this.#f&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};exports.LRUCache=D;
+//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/npm-profile/node_modules/lru-cache/dist/commonjs/package.json b/node_modules/npm-profile/node_modules/lru-cache/dist/commonjs/package.json
new file mode 100644
index 0000000000000..5bbefffbabee3
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/lru-cache/dist/commonjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/node_modules/npm-profile/node_modules/lru-cache/dist/esm/index.js b/node_modules/npm-profile/node_modules/lru-cache/dist/esm/index.js
new file mode 100644
index 0000000000000..8fd8fc5f31507
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/lru-cache/dist/esm/index.js
@@ -0,0 +1,1560 @@
+/**
+ * @module LRUCache
+ */
+const defaultPerf = (typeof performance === 'object' &&
+    performance &&
+    typeof performance.now === 'function') ?
+    performance
+    : Date;
+const warned = new Set();
+/* c8 ignore start */
+const PROCESS = (typeof process === 'object' && !!process ?
+    process
+    : {});
+/* c8 ignore start */
+const emitWarning = (msg, type, code, fn) => {
+    typeof PROCESS.emitWarning === 'function' ?
+        PROCESS.emitWarning(msg, type, code, fn)
+        : console.error(`[${code}] ${type}: ${msg}`);
+};
+let AC = globalThis.AbortController;
+let AS = globalThis.AbortSignal;
+/* c8 ignore start */
+if (typeof AC === 'undefined') {
+    //@ts-ignore
+    AS = class AbortSignal {
+        onabort;
+        _onabort = [];
+        reason;
+        aborted = false;
+        addEventListener(_, fn) {
+            this._onabort.push(fn);
+        }
+    };
+    //@ts-ignore
+    AC = class AbortController {
+        constructor() {
+            warnACPolyfill();
+        }
+        signal = new AS();
+        abort(reason) {
+            if (this.signal.aborted)
+                return;
+            //@ts-ignore
+            this.signal.reason = reason;
+            //@ts-ignore
+            this.signal.aborted = true;
+            //@ts-ignore
+            for (const fn of this.signal._onabort) {
+                fn(reason);
+            }
+            this.signal.onabort?.(reason);
+        }
+    };
+    let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
+    const warnACPolyfill = () => {
+        if (!printACPolyfillWarning)
+            return;
+        printACPolyfillWarning = false;
+        emitWarning('AbortController is not defined. If using lru-cache in ' +
+            'node 14, load an AbortController polyfill from the ' +
+            '`node-abort-controller` package. A minimal polyfill is ' +
+            'provided for use by LRUCache.fetch(), but it should not be ' +
+            'relied upon in other contexts (eg, passing it to other APIs that ' +
+            'use AbortController/AbortSignal might have undesirable effects). ' +
+            'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
+    };
+}
+/* c8 ignore stop */
+const shouldWarn = (code) => !warned.has(code);
+const TYPE = Symbol('type');
+const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
+/* c8 ignore start */
+// This is a little bit ridiculous, tbh.
+// The maximum array length is 2^32-1 or thereabouts on most JS impls.
+// And well before that point, you're caching the entire world, I mean,
+// that's ~32GB of just integers for the next/prev links, plus whatever
+// else to hold that many keys and values.  Just filling the memory with
+// zeroes at init time is brutal when you get that big.
+// But why not be complete?
+// Maybe in the future, these limits will have expanded.
+const getUintArray = (max) => !isPosInt(max) ? null
+    : max <= Math.pow(2, 8) ? Uint8Array
+        : max <= Math.pow(2, 16) ? Uint16Array
+            : max <= Math.pow(2, 32) ? Uint32Array
+                : max <= Number.MAX_SAFE_INTEGER ? ZeroArray
+                    : null;
+/* c8 ignore stop */
+class ZeroArray extends Array {
+    constructor(size) {
+        super(size);
+        this.fill(0);
+    }
+}
+class Stack {
+    heap;
+    length;
+    // private constructor
+    static #constructing = false;
+    static create(max) {
+        const HeapCls = getUintArray(max);
+        if (!HeapCls)
+            return [];
+        Stack.#constructing = true;
+        const s = new Stack(max, HeapCls);
+        Stack.#constructing = false;
+        return s;
+    }
+    constructor(max, HeapCls) {
+        /* c8 ignore start */
+        if (!Stack.#constructing) {
+            throw new TypeError('instantiate Stack using Stack.create(n)');
+        }
+        /* c8 ignore stop */
+        this.heap = new HeapCls(max);
+        this.length = 0;
+    }
+    push(n) {
+        this.heap[this.length++] = n;
+    }
+    pop() {
+        return this.heap[--this.length];
+    }
+}
+/**
+ * Default export, the thing you're using this module to get.
+ *
+ * The `K` and `V` types define the key and value types, respectively. The
+ * optional `FC` type defines the type of the `context` object passed to
+ * `cache.fetch()` and `cache.memo()`.
+ *
+ * Keys and values **must not** be `null` or `undefined`.
+ *
+ * All properties from the options object (with the exception of `max`,
+ * `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are
+ * added as normal public members. (The listed options are read-only getters.)
+ *
+ * Changing any of these will alter the defaults for subsequent method calls.
+ */
+export class LRUCache {
+    // options that cannot be changed without disaster
+    #max;
+    #maxSize;
+    #dispose;
+    #onInsert;
+    #disposeAfter;
+    #fetchMethod;
+    #memoMethod;
+    #perf;
+    /**
+     * {@link LRUCache.OptionsBase.perf}
+     */
+    get perf() {
+        return this.#perf;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.ttl}
+     */
+    ttl;
+    /**
+     * {@link LRUCache.OptionsBase.ttlResolution}
+     */
+    ttlResolution;
+    /**
+     * {@link LRUCache.OptionsBase.ttlAutopurge}
+     */
+    ttlAutopurge;
+    /**
+     * {@link LRUCache.OptionsBase.updateAgeOnGet}
+     */
+    updateAgeOnGet;
+    /**
+     * {@link LRUCache.OptionsBase.updateAgeOnHas}
+     */
+    updateAgeOnHas;
+    /**
+     * {@link LRUCache.OptionsBase.allowStale}
+     */
+    allowStale;
+    /**
+     * {@link LRUCache.OptionsBase.noDisposeOnSet}
+     */
+    noDisposeOnSet;
+    /**
+     * {@link LRUCache.OptionsBase.noUpdateTTL}
+     */
+    noUpdateTTL;
+    /**
+     * {@link LRUCache.OptionsBase.maxEntrySize}
+     */
+    maxEntrySize;
+    /**
+     * {@link LRUCache.OptionsBase.sizeCalculation}
+     */
+    sizeCalculation;
+    /**
+     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
+     */
+    noDeleteOnFetchRejection;
+    /**
+     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
+     */
+    noDeleteOnStaleGet;
+    /**
+     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
+     */
+    allowStaleOnFetchAbort;
+    /**
+     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
+     */
+    allowStaleOnFetchRejection;
+    /**
+     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
+     */
+    ignoreFetchAbort;
+    // computed properties
+    #size;
+    #calculatedSize;
+    #keyMap;
+    #keyList;
+    #valList;
+    #next;
+    #prev;
+    #head;
+    #tail;
+    #free;
+    #disposed;
+    #sizes;
+    #starts;
+    #ttls;
+    #hasDispose;
+    #hasFetchMethod;
+    #hasDisposeAfter;
+    #hasOnInsert;
+    /**
+     * Do not call this method unless you need to inspect the
+     * inner workings of the cache.  If anything returned by this
+     * object is modified in any way, strange breakage may occur.
+     *
+     * These fields are private for a reason!
+     *
+     * @internal
+     */
+    static unsafeExposeInternals(c) {
+        return {
+            // properties
+            starts: c.#starts,
+            ttls: c.#ttls,
+            sizes: c.#sizes,
+            keyMap: c.#keyMap,
+            keyList: c.#keyList,
+            valList: c.#valList,
+            next: c.#next,
+            prev: c.#prev,
+            get head() {
+                return c.#head;
+            },
+            get tail() {
+                return c.#tail;
+            },
+            free: c.#free,
+            // methods
+            isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
+            backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
+            moveToTail: (index) => c.#moveToTail(index),
+            indexes: (options) => c.#indexes(options),
+            rindexes: (options) => c.#rindexes(options),
+            isStale: (index) => c.#isStale(index),
+        };
+    }
+    // Protected read-only members
+    /**
+     * {@link LRUCache.OptionsBase.max} (read-only)
+     */
+    get max() {
+        return this.#max;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.maxSize} (read-only)
+     */
+    get maxSize() {
+        return this.#maxSize;
+    }
+    /**
+     * The total computed size of items in the cache (read-only)
+     */
+    get calculatedSize() {
+        return this.#calculatedSize;
+    }
+    /**
+     * The number of items stored in the cache (read-only)
+     */
+    get size() {
+        return this.#size;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
+     */
+    get fetchMethod() {
+        return this.#fetchMethod;
+    }
+    get memoMethod() {
+        return this.#memoMethod;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.dispose} (read-only)
+     */
+    get dispose() {
+        return this.#dispose;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.onInsert} (read-only)
+     */
+    get onInsert() {
+        return this.#onInsert;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
+     */
+    get disposeAfter() {
+        return this.#disposeAfter;
+    }
+    constructor(options) {
+        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, onInsert, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, perf, } = options;
+        if (perf !== undefined) {
+            if (typeof perf?.now !== 'function') {
+                throw new TypeError('perf option must have a now() method if specified');
+            }
+        }
+        this.#perf = perf ?? defaultPerf;
+        if (max !== 0 && !isPosInt(max)) {
+            throw new TypeError('max option must be a nonnegative integer');
+        }
+        const UintArray = max ? getUintArray(max) : Array;
+        if (!UintArray) {
+            throw new Error('invalid max value: ' + max);
+        }
+        this.#max = max;
+        this.#maxSize = maxSize;
+        this.maxEntrySize = maxEntrySize || this.#maxSize;
+        this.sizeCalculation = sizeCalculation;
+        if (this.sizeCalculation) {
+            if (!this.#maxSize && !this.maxEntrySize) {
+                throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
+            }
+            if (typeof this.sizeCalculation !== 'function') {
+                throw new TypeError('sizeCalculation set to non-function');
+            }
+        }
+        if (memoMethod !== undefined &&
+            typeof memoMethod !== 'function') {
+            throw new TypeError('memoMethod must be a function if defined');
+        }
+        this.#memoMethod = memoMethod;
+        if (fetchMethod !== undefined &&
+            typeof fetchMethod !== 'function') {
+            throw new TypeError('fetchMethod must be a function if specified');
+        }
+        this.#fetchMethod = fetchMethod;
+        this.#hasFetchMethod = !!fetchMethod;
+        this.#keyMap = new Map();
+        this.#keyList = new Array(max).fill(undefined);
+        this.#valList = new Array(max).fill(undefined);
+        this.#next = new UintArray(max);
+        this.#prev = new UintArray(max);
+        this.#head = 0;
+        this.#tail = 0;
+        this.#free = Stack.create(max);
+        this.#size = 0;
+        this.#calculatedSize = 0;
+        if (typeof dispose === 'function') {
+            this.#dispose = dispose;
+        }
+        if (typeof onInsert === 'function') {
+            this.#onInsert = onInsert;
+        }
+        if (typeof disposeAfter === 'function') {
+            this.#disposeAfter = disposeAfter;
+            this.#disposed = [];
+        }
+        else {
+            this.#disposeAfter = undefined;
+            this.#disposed = undefined;
+        }
+        this.#hasDispose = !!this.#dispose;
+        this.#hasOnInsert = !!this.#onInsert;
+        this.#hasDisposeAfter = !!this.#disposeAfter;
+        this.noDisposeOnSet = !!noDisposeOnSet;
+        this.noUpdateTTL = !!noUpdateTTL;
+        this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
+        this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
+        this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
+        this.ignoreFetchAbort = !!ignoreFetchAbort;
+        // NB: maxEntrySize is set to maxSize if it's set
+        if (this.maxEntrySize !== 0) {
+            if (this.#maxSize !== 0) {
+                if (!isPosInt(this.#maxSize)) {
+                    throw new TypeError('maxSize must be a positive integer if specified');
+                }
+            }
+            if (!isPosInt(this.maxEntrySize)) {
+                throw new TypeError('maxEntrySize must be a positive integer if specified');
+            }
+            this.#initializeSizeTracking();
+        }
+        this.allowStale = !!allowStale;
+        this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
+        this.updateAgeOnGet = !!updateAgeOnGet;
+        this.updateAgeOnHas = !!updateAgeOnHas;
+        this.ttlResolution =
+            isPosInt(ttlResolution) || ttlResolution === 0 ?
+                ttlResolution
+                : 1;
+        this.ttlAutopurge = !!ttlAutopurge;
+        this.ttl = ttl || 0;
+        if (this.ttl) {
+            if (!isPosInt(this.ttl)) {
+                throw new TypeError('ttl must be a positive integer if specified');
+            }
+            this.#initializeTTLTracking();
+        }
+        // do not allow completely unbounded caches
+        if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
+            throw new TypeError('At least one of max, maxSize, or ttl is required');
+        }
+        if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
+            const code = 'LRU_CACHE_UNBOUNDED';
+            if (shouldWarn(code)) {
+                warned.add(code);
+                const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
+                    'result in unbounded memory consumption.';
+                emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
+            }
+        }
+    }
+    /**
+     * Return the number of ms left in the item's TTL. If item is not in cache,
+     * returns `0`. Returns `Infinity` if item is in cache without a defined TTL.
+     */
+    getRemainingTTL(key) {
+        return this.#keyMap.has(key) ? Infinity : 0;
+    }
+    #initializeTTLTracking() {
+        const ttls = new ZeroArray(this.#max);
+        const starts = new ZeroArray(this.#max);
+        this.#ttls = ttls;
+        this.#starts = starts;
+        this.#setItemTTL = (index, ttl, start = this.#perf.now()) => {
+            starts[index] = ttl !== 0 ? start : 0;
+            ttls[index] = ttl;
+            if (ttl !== 0 && this.ttlAutopurge) {
+                const t = setTimeout(() => {
+                    if (this.#isStale(index)) {
+                        this.#delete(this.#keyList[index], 'expire');
+                    }
+                }, ttl + 1);
+                // unref() not supported on all platforms
+                /* c8 ignore start */
+                if (t.unref) {
+                    t.unref();
+                }
+                /* c8 ignore stop */
+            }
+        };
+        this.#updateItemAge = index => {
+            starts[index] = ttls[index] !== 0 ? this.#perf.now() : 0;
+        };
+        this.#statusTTL = (status, index) => {
+            if (ttls[index]) {
+                const ttl = ttls[index];
+                const start = starts[index];
+                /* c8 ignore next */
+                if (!ttl || !start)
+                    return;
+                status.ttl = ttl;
+                status.start = start;
+                status.now = cachedNow || getNow();
+                const age = status.now - start;
+                status.remainingTTL = ttl - age;
+            }
+        };
+        // debounce calls to perf.now() to 1s so we're not hitting
+        // that costly call repeatedly.
+        let cachedNow = 0;
+        const getNow = () => {
+            const n = this.#perf.now();
+            if (this.ttlResolution > 0) {
+                cachedNow = n;
+                const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
+                // not available on all platforms
+                /* c8 ignore start */
+                if (t.unref) {
+                    t.unref();
+                }
+                /* c8 ignore stop */
+            }
+            return n;
+        };
+        this.getRemainingTTL = key => {
+            const index = this.#keyMap.get(key);
+            if (index === undefined) {
+                return 0;
+            }
+            const ttl = ttls[index];
+            const start = starts[index];
+            if (!ttl || !start) {
+                return Infinity;
+            }
+            const age = (cachedNow || getNow()) - start;
+            return ttl - age;
+        };
+        this.#isStale = index => {
+            const s = starts[index];
+            const t = ttls[index];
+            return !!t && !!s && (cachedNow || getNow()) - s > t;
+        };
+    }
+    // conditionally set private methods related to TTL
+    #updateItemAge = () => { };
+    #statusTTL = () => { };
+    #setItemTTL = () => { };
+    /* c8 ignore stop */
+    #isStale = () => false;
+    #initializeSizeTracking() {
+        const sizes = new ZeroArray(this.#max);
+        this.#calculatedSize = 0;
+        this.#sizes = sizes;
+        this.#removeItemSize = index => {
+            this.#calculatedSize -= sizes[index];
+            sizes[index] = 0;
+        };
+        this.#requireSize = (k, v, size, sizeCalculation) => {
+            // provisionally accept background fetches.
+            // actual value size will be checked when they return.
+            if (this.#isBackgroundFetch(v)) {
+                return 0;
+            }
+            if (!isPosInt(size)) {
+                if (sizeCalculation) {
+                    if (typeof sizeCalculation !== 'function') {
+                        throw new TypeError('sizeCalculation must be a function');
+                    }
+                    size = sizeCalculation(v, k);
+                    if (!isPosInt(size)) {
+                        throw new TypeError('sizeCalculation return invalid (expect positive integer)');
+                    }
+                }
+                else {
+                    throw new TypeError('invalid size value (must be positive integer). ' +
+                        'When maxSize or maxEntrySize is used, sizeCalculation ' +
+                        'or size must be set.');
+                }
+            }
+            return size;
+        };
+        this.#addItemSize = (index, size, status) => {
+            sizes[index] = size;
+            if (this.#maxSize) {
+                const maxSize = this.#maxSize - sizes[index];
+                while (this.#calculatedSize > maxSize) {
+                    this.#evict(true);
+                }
+            }
+            this.#calculatedSize += sizes[index];
+            if (status) {
+                status.entrySize = size;
+                status.totalCalculatedSize = this.#calculatedSize;
+            }
+        };
+    }
+    #removeItemSize = _i => { };
+    #addItemSize = (_i, _s, _st) => { };
+    #requireSize = (_k, _v, size, sizeCalculation) => {
+        if (size || sizeCalculation) {
+            throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
+        }
+        return 0;
+    };
+    *#indexes({ allowStale = this.allowStale } = {}) {
+        if (this.#size) {
+            for (let i = this.#tail; true;) {
+                if (!this.#isValidIndex(i)) {
+                    break;
+                }
+                if (allowStale || !this.#isStale(i)) {
+                    yield i;
+                }
+                if (i === this.#head) {
+                    break;
+                }
+                else {
+                    i = this.#prev[i];
+                }
+            }
+        }
+    }
+    *#rindexes({ allowStale = this.allowStale } = {}) {
+        if (this.#size) {
+            for (let i = this.#head; true;) {
+                if (!this.#isValidIndex(i)) {
+                    break;
+                }
+                if (allowStale || !this.#isStale(i)) {
+                    yield i;
+                }
+                if (i === this.#tail) {
+                    break;
+                }
+                else {
+                    i = this.#next[i];
+                }
+            }
+        }
+    }
+    #isValidIndex(index) {
+        return (index !== undefined &&
+            this.#keyMap.get(this.#keyList[index]) === index);
+    }
+    /**
+     * Return a generator yielding `[key, value]` pairs,
+     * in order from most recently used to least recently used.
+     */
+    *entries() {
+        for (const i of this.#indexes()) {
+            if (this.#valList[i] !== undefined &&
+                this.#keyList[i] !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield [this.#keyList[i], this.#valList[i]];
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.entries}
+     *
+     * Return a generator yielding `[key, value]` pairs,
+     * in order from least recently used to most recently used.
+     */
+    *rentries() {
+        for (const i of this.#rindexes()) {
+            if (this.#valList[i] !== undefined &&
+                this.#keyList[i] !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield [this.#keyList[i], this.#valList[i]];
+            }
+        }
+    }
+    /**
+     * Return a generator yielding the keys in the cache,
+     * in order from most recently used to least recently used.
+     */
+    *keys() {
+        for (const i of this.#indexes()) {
+            const k = this.#keyList[i];
+            if (k !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield k;
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.keys}
+     *
+     * Return a generator yielding the keys in the cache,
+     * in order from least recently used to most recently used.
+     */
+    *rkeys() {
+        for (const i of this.#rindexes()) {
+            const k = this.#keyList[i];
+            if (k !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield k;
+            }
+        }
+    }
+    /**
+     * Return a generator yielding the values in the cache,
+     * in order from most recently used to least recently used.
+     */
+    *values() {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            if (v !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield this.#valList[i];
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.values}
+     *
+     * Return a generator yielding the values in the cache,
+     * in order from least recently used to most recently used.
+     */
+    *rvalues() {
+        for (const i of this.#rindexes()) {
+            const v = this.#valList[i];
+            if (v !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield this.#valList[i];
+            }
+        }
+    }
+    /**
+     * Iterating over the cache itself yields the same results as
+     * {@link LRUCache.entries}
+     */
+    [Symbol.iterator]() {
+        return this.entries();
+    }
+    /**
+     * A String value that is used in the creation of the default string
+     * description of an object. Called by the built-in method
+     * `Object.prototype.toString`.
+     */
+    [Symbol.toStringTag] = 'LRUCache';
+    /**
+     * Find a value for which the supplied fn method returns a truthy value,
+     * similar to `Array.find()`. fn is called as `fn(value, key, cache)`.
+     */
+    find(fn, getOptions = {}) {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            if (fn(value, this.#keyList[i], this)) {
+                return this.get(this.#keyList[i], getOptions);
+            }
+        }
+    }
+    /**
+     * Call the supplied function on each item in the cache, in order from most
+     * recently used to least recently used.
+     *
+     * `fn` is called as `fn(value, key, cache)`.
+     *
+     * If `thisp` is provided, function will be called in the `this`-context of
+     * the provided object, or the cache if no `thisp` object is provided.
+     *
+     * Does not update age or recenty of use, or iterate over stale values.
+     */
+    forEach(fn, thisp = this) {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            fn.call(thisp, value, this.#keyList[i], this);
+        }
+    }
+    /**
+     * The same as {@link LRUCache.forEach} but items are iterated over in
+     * reverse order.  (ie, less recently used items are iterated over first.)
+     */
+    rforEach(fn, thisp = this) {
+        for (const i of this.#rindexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            fn.call(thisp, value, this.#keyList[i], this);
+        }
+    }
+    /**
+     * Delete any stale entries. Returns true if anything was removed,
+     * false otherwise.
+     */
+    purgeStale() {
+        let deleted = false;
+        for (const i of this.#rindexes({ allowStale: true })) {
+            if (this.#isStale(i)) {
+                this.#delete(this.#keyList[i], 'expire');
+                deleted = true;
+            }
+        }
+        return deleted;
+    }
+    /**
+     * Get the extended info about a given entry, to get its value, size, and
+     * TTL info simultaneously. Returns `undefined` if the key is not present.
+     *
+     * Unlike {@link LRUCache#dump}, which is designed to be portable and survive
+     * serialization, the `start` value is always the current timestamp, and the
+     * `ttl` is a calculated remaining time to live (negative if expired).
+     *
+     * Always returns stale values, if their info is found in the cache, so be
+     * sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl})
+     * if relevant.
+     */
+    info(key) {
+        const i = this.#keyMap.get(key);
+        if (i === undefined)
+            return undefined;
+        const v = this.#valList[i];
+        /* c8 ignore start - this isn't tested for the info function,
+         * but it's the same logic as found in other places. */
+        const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+        if (value === undefined)
+            return undefined;
+        /* c8 ignore end */
+        const entry = { value };
+        if (this.#ttls && this.#starts) {
+            const ttl = this.#ttls[i];
+            const start = this.#starts[i];
+            if (ttl && start) {
+                const remain = ttl - (this.#perf.now() - start);
+                entry.ttl = remain;
+                entry.start = Date.now();
+            }
+        }
+        if (this.#sizes) {
+            entry.size = this.#sizes[i];
+        }
+        return entry;
+    }
+    /**
+     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
+     * passed to {@link LRUCache#load}.
+     *
+     * The `start` fields are calculated relative to a portable `Date.now()`
+     * timestamp, even if `performance.now()` is available.
+     *
+     * Stale entries are always included in the `dump`, even if
+     * {@link LRUCache.OptionsBase.allowStale} is false.
+     *
+     * Note: this returns an actual array, not a generator, so it can be more
+     * easily passed around.
+     */
+    dump() {
+        const arr = [];
+        for (const i of this.#indexes({ allowStale: true })) {
+            const key = this.#keyList[i];
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined || key === undefined)
+                continue;
+            const entry = { value };
+            if (this.#ttls && this.#starts) {
+                entry.ttl = this.#ttls[i];
+                // always dump the start relative to a portable timestamp
+                // it's ok for this to be a bit slow, it's a rare operation.
+                const age = this.#perf.now() - this.#starts[i];
+                entry.start = Math.floor(Date.now() - age);
+            }
+            if (this.#sizes) {
+                entry.size = this.#sizes[i];
+            }
+            arr.unshift([key, entry]);
+        }
+        return arr;
+    }
+    /**
+     * Reset the cache and load in the items in entries in the order listed.
+     *
+     * The shape of the resulting cache may be different if the same options are
+     * not used in both caches.
+     *
+     * The `start` fields are assumed to be calculated relative to a portable
+     * `Date.now()` timestamp, even if `performance.now()` is available.
+     */
+    load(arr) {
+        this.clear();
+        for (const [key, entry] of arr) {
+            if (entry.start) {
+                // entry.start is a portable timestamp, but we may be using
+                // node's performance.now(), so calculate the offset, so that
+                // we get the intended remaining TTL, no matter how long it's
+                // been on ice.
+                //
+                // it's ok for this to be a bit slow, it's a rare operation.
+                const age = Date.now() - entry.start;
+                entry.start = this.#perf.now() - age;
+            }
+            this.set(key, entry.value, entry);
+        }
+    }
+    /**
+     * Add a value to the cache.
+     *
+     * Note: if `undefined` is specified as a value, this is an alias for
+     * {@link LRUCache#delete}
+     *
+     * Fields on the {@link LRUCache.SetOptions} options param will override
+     * their corresponding values in the constructor options for the scope
+     * of this single `set()` operation.
+     *
+     * If `start` is provided, then that will set the effective start
+     * time for the TTL calculation. Note that this must be a previous
+     * value of `performance.now()` if supported, or a previous value of
+     * `Date.now()` if not.
+     *
+     * Options object may also include `size`, which will prevent
+     * calling the `sizeCalculation` function and just use the specified
+     * number if it is a positive integer, and `noDisposeOnSet` which
+     * will prevent calling a `dispose` function in the case of
+     * overwrites.
+     *
+     * If the `size` (or return value of `sizeCalculation`) for a given
+     * entry is greater than `maxEntrySize`, then the item will not be
+     * added to the cache.
+     *
+     * Will update the recency of the entry.
+     *
+     * If the value is `undefined`, then this is an alias for
+     * `cache.delete(key)`. `undefined` is never stored in the cache.
+     */
+    set(k, v, setOptions = {}) {
+        if (v === undefined) {
+            this.delete(k);
+            return this;
+        }
+        const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
+        let { noUpdateTTL = this.noUpdateTTL } = setOptions;
+        const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
+        // if the item doesn't fit, don't do anything
+        // NB: maxEntrySize set to maxSize by default
+        if (this.maxEntrySize && size > this.maxEntrySize) {
+            if (status) {
+                status.set = 'miss';
+                status.maxEntrySizeExceeded = true;
+            }
+            // have to delete, in case something is there already.
+            this.#delete(k, 'set');
+            return this;
+        }
+        let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
+        if (index === undefined) {
+            // addition
+            index = (this.#size === 0 ? this.#tail
+                : this.#free.length !== 0 ? this.#free.pop()
+                    : this.#size === this.#max ? this.#evict(false)
+                        : this.#size);
+            this.#keyList[index] = k;
+            this.#valList[index] = v;
+            this.#keyMap.set(k, index);
+            this.#next[this.#tail] = index;
+            this.#prev[index] = this.#tail;
+            this.#tail = index;
+            this.#size++;
+            this.#addItemSize(index, size, status);
+            if (status)
+                status.set = 'add';
+            noUpdateTTL = false;
+            if (this.#hasOnInsert) {
+                this.#onInsert?.(v, k, 'add');
+            }
+        }
+        else {
+            // update
+            this.#moveToTail(index);
+            const oldVal = this.#valList[index];
+            if (v !== oldVal) {
+                if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
+                    oldVal.__abortController.abort(new Error('replaced'));
+                    const { __staleWhileFetching: s } = oldVal;
+                    if (s !== undefined && !noDisposeOnSet) {
+                        if (this.#hasDispose) {
+                            this.#dispose?.(s, k, 'set');
+                        }
+                        if (this.#hasDisposeAfter) {
+                            this.#disposed?.push([s, k, 'set']);
+                        }
+                    }
+                }
+                else if (!noDisposeOnSet) {
+                    if (this.#hasDispose) {
+                        this.#dispose?.(oldVal, k, 'set');
+                    }
+                    if (this.#hasDisposeAfter) {
+                        this.#disposed?.push([oldVal, k, 'set']);
+                    }
+                }
+                this.#removeItemSize(index);
+                this.#addItemSize(index, size, status);
+                this.#valList[index] = v;
+                if (status) {
+                    status.set = 'replace';
+                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal) ?
+                        oldVal.__staleWhileFetching
+                        : oldVal;
+                    if (oldValue !== undefined)
+                        status.oldValue = oldValue;
+                }
+            }
+            else if (status) {
+                status.set = 'update';
+            }
+            if (this.#hasOnInsert) {
+                this.onInsert?.(v, k, v === oldVal ? 'update' : 'replace');
+            }
+        }
+        if (ttl !== 0 && !this.#ttls) {
+            this.#initializeTTLTracking();
+        }
+        if (this.#ttls) {
+            if (!noUpdateTTL) {
+                this.#setItemTTL(index, ttl, start);
+            }
+            if (status)
+                this.#statusTTL(status, index);
+        }
+        if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+        return this;
+    }
+    /**
+     * Evict the least recently used item, returning its value or
+     * `undefined` if cache is empty.
+     */
+    pop() {
+        try {
+            while (this.#size) {
+                const val = this.#valList[this.#head];
+                this.#evict(true);
+                if (this.#isBackgroundFetch(val)) {
+                    if (val.__staleWhileFetching) {
+                        return val.__staleWhileFetching;
+                    }
+                }
+                else if (val !== undefined) {
+                    return val;
+                }
+            }
+        }
+        finally {
+            if (this.#hasDisposeAfter && this.#disposed) {
+                const dt = this.#disposed;
+                let task;
+                while ((task = dt?.shift())) {
+                    this.#disposeAfter?.(...task);
+                }
+            }
+        }
+    }
+    #evict(free) {
+        const head = this.#head;
+        const k = this.#keyList[head];
+        const v = this.#valList[head];
+        if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
+            v.__abortController.abort(new Error('evicted'));
+        }
+        else if (this.#hasDispose || this.#hasDisposeAfter) {
+            if (this.#hasDispose) {
+                this.#dispose?.(v, k, 'evict');
+            }
+            if (this.#hasDisposeAfter) {
+                this.#disposed?.push([v, k, 'evict']);
+            }
+        }
+        this.#removeItemSize(head);
+        // if we aren't about to use the index, then null these out
+        if (free) {
+            this.#keyList[head] = undefined;
+            this.#valList[head] = undefined;
+            this.#free.push(head);
+        }
+        if (this.#size === 1) {
+            this.#head = this.#tail = 0;
+            this.#free.length = 0;
+        }
+        else {
+            this.#head = this.#next[head];
+        }
+        this.#keyMap.delete(k);
+        this.#size--;
+        return head;
+    }
+    /**
+     * Check if a key is in the cache, without updating the recency of use.
+     * Will return false if the item is stale, even though it is technically
+     * in the cache.
+     *
+     * Check if a key is in the cache, without updating the recency of
+     * use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set
+     * to `true` in either the options or the constructor.
+     *
+     * Will return `false` if the item is stale, even though it is technically in
+     * the cache. The difference can be determined (if it matters) by using a
+     * `status` argument, and inspecting the `has` field.
+     *
+     * Will not update item age unless
+     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
+     */
+    has(k, hasOptions = {}) {
+        const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
+        const index = this.#keyMap.get(k);
+        if (index !== undefined) {
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v) &&
+                v.__staleWhileFetching === undefined) {
+                return false;
+            }
+            if (!this.#isStale(index)) {
+                if (updateAgeOnHas) {
+                    this.#updateItemAge(index);
+                }
+                if (status) {
+                    status.has = 'hit';
+                    this.#statusTTL(status, index);
+                }
+                return true;
+            }
+            else if (status) {
+                status.has = 'stale';
+                this.#statusTTL(status, index);
+            }
+        }
+        else if (status) {
+            status.has = 'miss';
+        }
+        return false;
+    }
+    /**
+     * Like {@link LRUCache#get} but doesn't update recency or delete stale
+     * items.
+     *
+     * Returns `undefined` if the item is stale, unless
+     * {@link LRUCache.OptionsBase.allowStale} is set.
+     */
+    peek(k, peekOptions = {}) {
+        const { allowStale = this.allowStale } = peekOptions;
+        const index = this.#keyMap.get(k);
+        if (index === undefined ||
+            (!allowStale && this.#isStale(index))) {
+            return;
+        }
+        const v = this.#valList[index];
+        // either stale and allowed, or forcing a refresh of non-stale value
+        return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+    }
+    #backgroundFetch(k, index, options, context) {
+        const v = index === undefined ? undefined : this.#valList[index];
+        if (this.#isBackgroundFetch(v)) {
+            return v;
+        }
+        const ac = new AC();
+        const { signal } = options;
+        // when/if our AC signals, then stop listening to theirs.
+        signal?.addEventListener('abort', () => ac.abort(signal.reason), {
+            signal: ac.signal,
+        });
+        const fetchOpts = {
+            signal: ac.signal,
+            options,
+            context,
+        };
+        const cb = (v, updateCache = false) => {
+            const { aborted } = ac.signal;
+            const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
+            if (options.status) {
+                if (aborted && !updateCache) {
+                    options.status.fetchAborted = true;
+                    options.status.fetchError = ac.signal.reason;
+                    if (ignoreAbort)
+                        options.status.fetchAbortIgnored = true;
+                }
+                else {
+                    options.status.fetchResolved = true;
+                }
+            }
+            if (aborted && !ignoreAbort && !updateCache) {
+                return fetchFail(ac.signal.reason);
+            }
+            // either we didn't abort, and are still here, or we did, and ignored
+            const bf = p;
+            if (this.#valList[index] === p) {
+                if (v === undefined) {
+                    if (bf.__staleWhileFetching !== undefined) {
+                        this.#valList[index] = bf.__staleWhileFetching;
+                    }
+                    else {
+                        this.#delete(k, 'fetch');
+                    }
+                }
+                else {
+                    if (options.status)
+                        options.status.fetchUpdated = true;
+                    this.set(k, v, fetchOpts.options);
+                }
+            }
+            return v;
+        };
+        const eb = (er) => {
+            if (options.status) {
+                options.status.fetchRejected = true;
+                options.status.fetchError = er;
+            }
+            return fetchFail(er);
+        };
+        const fetchFail = (er) => {
+            const { aborted } = ac.signal;
+            const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
+            const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
+            const noDelete = allowStale || options.noDeleteOnFetchRejection;
+            const bf = p;
+            if (this.#valList[index] === p) {
+                // if we allow stale on fetch rejections, then we need to ensure that
+                // the stale value is not removed from the cache when the fetch fails.
+                const del = !noDelete || bf.__staleWhileFetching === undefined;
+                if (del) {
+                    this.#delete(k, 'fetch');
+                }
+                else if (!allowStaleAborted) {
+                    // still replace the *promise* with the stale value,
+                    // since we are done with the promise at this point.
+                    // leave it untouched if we're still waiting for an
+                    // aborted background fetch that hasn't yet returned.
+                    this.#valList[index] = bf.__staleWhileFetching;
+                }
+            }
+            if (allowStale) {
+                if (options.status && bf.__staleWhileFetching !== undefined) {
+                    options.status.returnedStale = true;
+                }
+                return bf.__staleWhileFetching;
+            }
+            else if (bf.__returned === bf) {
+                throw er;
+            }
+        };
+        const pcall = (res, rej) => {
+            const fmp = this.#fetchMethod?.(k, v, fetchOpts);
+            if (fmp && fmp instanceof Promise) {
+                fmp.then(v => res(v === undefined ? undefined : v), rej);
+            }
+            // ignored, we go until we finish, regardless.
+            // defer check until we are actually aborting,
+            // so fetchMethod can override.
+            ac.signal.addEventListener('abort', () => {
+                if (!options.ignoreFetchAbort ||
+                    options.allowStaleOnFetchAbort) {
+                    res(undefined);
+                    // when it eventually resolves, update the cache.
+                    if (options.allowStaleOnFetchAbort) {
+                        res = v => cb(v, true);
+                    }
+                }
+            });
+        };
+        if (options.status)
+            options.status.fetchDispatched = true;
+        const p = new Promise(pcall).then(cb, eb);
+        const bf = Object.assign(p, {
+            __abortController: ac,
+            __staleWhileFetching: v,
+            __returned: undefined,
+        });
+        if (index === undefined) {
+            // internal, don't expose status.
+            this.set(k, bf, { ...fetchOpts.options, status: undefined });
+            index = this.#keyMap.get(k);
+        }
+        else {
+            this.#valList[index] = bf;
+        }
+        return bf;
+    }
+    #isBackgroundFetch(p) {
+        if (!this.#hasFetchMethod)
+            return false;
+        const b = p;
+        return (!!b &&
+            b instanceof Promise &&
+            b.hasOwnProperty('__staleWhileFetching') &&
+            b.__abortController instanceof AC);
+    }
+    async fetch(k, fetchOptions = {}) {
+        const { 
+        // get options
+        allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, 
+        // set options
+        ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, 
+        // fetch exclusive options
+        noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
+        if (!this.#hasFetchMethod) {
+            if (status)
+                status.fetch = 'get';
+            return this.get(k, {
+                allowStale,
+                updateAgeOnGet,
+                noDeleteOnStaleGet,
+                status,
+            });
+        }
+        const options = {
+            allowStale,
+            updateAgeOnGet,
+            noDeleteOnStaleGet,
+            ttl,
+            noDisposeOnSet,
+            size,
+            sizeCalculation,
+            noUpdateTTL,
+            noDeleteOnFetchRejection,
+            allowStaleOnFetchRejection,
+            allowStaleOnFetchAbort,
+            ignoreFetchAbort,
+            status,
+            signal,
+        };
+        let index = this.#keyMap.get(k);
+        if (index === undefined) {
+            if (status)
+                status.fetch = 'miss';
+            const p = this.#backgroundFetch(k, index, options, context);
+            return (p.__returned = p);
+        }
+        else {
+            // in cache, maybe already fetching
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v)) {
+                const stale = allowStale && v.__staleWhileFetching !== undefined;
+                if (status) {
+                    status.fetch = 'inflight';
+                    if (stale)
+                        status.returnedStale = true;
+                }
+                return stale ? v.__staleWhileFetching : (v.__returned = v);
+            }
+            // if we force a refresh, that means do NOT serve the cached value,
+            // unless we are already in the process of refreshing the cache.
+            const isStale = this.#isStale(index);
+            if (!forceRefresh && !isStale) {
+                if (status)
+                    status.fetch = 'hit';
+                this.#moveToTail(index);
+                if (updateAgeOnGet) {
+                    this.#updateItemAge(index);
+                }
+                if (status)
+                    this.#statusTTL(status, index);
+                return v;
+            }
+            // ok, it is stale or a forced refresh, and not already fetching.
+            // refresh the cache.
+            const p = this.#backgroundFetch(k, index, options, context);
+            const hasStale = p.__staleWhileFetching !== undefined;
+            const staleVal = hasStale && allowStale;
+            if (status) {
+                status.fetch = isStale ? 'stale' : 'refresh';
+                if (staleVal && isStale)
+                    status.returnedStale = true;
+            }
+            return staleVal ? p.__staleWhileFetching : (p.__returned = p);
+        }
+    }
+    async forceFetch(k, fetchOptions = {}) {
+        const v = await this.fetch(k, fetchOptions);
+        if (v === undefined)
+            throw new Error('fetch() returned undefined');
+        return v;
+    }
+    memo(k, memoOptions = {}) {
+        const memoMethod = this.#memoMethod;
+        if (!memoMethod) {
+            throw new Error('no memoMethod provided to constructor');
+        }
+        const { context, forceRefresh, ...options } = memoOptions;
+        const v = this.get(k, options);
+        if (!forceRefresh && v !== undefined)
+            return v;
+        const vv = memoMethod(k, v, {
+            options,
+            context,
+        });
+        this.set(k, vv, options);
+        return vv;
+    }
+    /**
+     * Return a value from the cache. Will update the recency of the cache
+     * entry found.
+     *
+     * If the key is not found, get() will return `undefined`.
+     */
+    get(k, getOptions = {}) {
+        const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
+        const index = this.#keyMap.get(k);
+        if (index !== undefined) {
+            const value = this.#valList[index];
+            const fetching = this.#isBackgroundFetch(value);
+            if (status)
+                this.#statusTTL(status, index);
+            if (this.#isStale(index)) {
+                if (status)
+                    status.get = 'stale';
+                // delete only if not an in-flight background fetch
+                if (!fetching) {
+                    if (!noDeleteOnStaleGet) {
+                        this.#delete(k, 'expire');
+                    }
+                    if (status && allowStale)
+                        status.returnedStale = true;
+                    return allowStale ? value : undefined;
+                }
+                else {
+                    if (status &&
+                        allowStale &&
+                        value.__staleWhileFetching !== undefined) {
+                        status.returnedStale = true;
+                    }
+                    return allowStale ? value.__staleWhileFetching : undefined;
+                }
+            }
+            else {
+                if (status)
+                    status.get = 'hit';
+                // if we're currently fetching it, we don't actually have it yet
+                // it's not stale, which means this isn't a staleWhileRefetching.
+                // If it's not stale, and fetching, AND has a __staleWhileFetching
+                // value, then that means the user fetched with {forceRefresh:true},
+                // so it's safe to return that value.
+                if (fetching) {
+                    return value.__staleWhileFetching;
+                }
+                this.#moveToTail(index);
+                if (updateAgeOnGet) {
+                    this.#updateItemAge(index);
+                }
+                return value;
+            }
+        }
+        else if (status) {
+            status.get = 'miss';
+        }
+    }
+    #connect(p, n) {
+        this.#prev[n] = p;
+        this.#next[p] = n;
+    }
+    #moveToTail(index) {
+        // if tail already, nothing to do
+        // if head, move head to next[index]
+        // else
+        //   move next[prev[index]] to next[index] (head has no prev)
+        //   move prev[next[index]] to prev[index]
+        // prev[index] = tail
+        // next[tail] = index
+        // tail = index
+        if (index !== this.#tail) {
+            if (index === this.#head) {
+                this.#head = this.#next[index];
+            }
+            else {
+                this.#connect(this.#prev[index], this.#next[index]);
+            }
+            this.#connect(this.#tail, index);
+            this.#tail = index;
+        }
+    }
+    /**
+     * Deletes a key out of the cache.
+     *
+     * Returns true if the key was deleted, false otherwise.
+     */
+    delete(k) {
+        return this.#delete(k, 'delete');
+    }
+    #delete(k, reason) {
+        let deleted = false;
+        if (this.#size !== 0) {
+            const index = this.#keyMap.get(k);
+            if (index !== undefined) {
+                deleted = true;
+                if (this.#size === 1) {
+                    this.#clear(reason);
+                }
+                else {
+                    this.#removeItemSize(index);
+                    const v = this.#valList[index];
+                    if (this.#isBackgroundFetch(v)) {
+                        v.__abortController.abort(new Error('deleted'));
+                    }
+                    else if (this.#hasDispose || this.#hasDisposeAfter) {
+                        if (this.#hasDispose) {
+                            this.#dispose?.(v, k, reason);
+                        }
+                        if (this.#hasDisposeAfter) {
+                            this.#disposed?.push([v, k, reason]);
+                        }
+                    }
+                    this.#keyMap.delete(k);
+                    this.#keyList[index] = undefined;
+                    this.#valList[index] = undefined;
+                    if (index === this.#tail) {
+                        this.#tail = this.#prev[index];
+                    }
+                    else if (index === this.#head) {
+                        this.#head = this.#next[index];
+                    }
+                    else {
+                        const pi = this.#prev[index];
+                        this.#next[pi] = this.#next[index];
+                        const ni = this.#next[index];
+                        this.#prev[ni] = this.#prev[index];
+                    }
+                    this.#size--;
+                    this.#free.push(index);
+                }
+            }
+        }
+        if (this.#hasDisposeAfter && this.#disposed?.length) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+        return deleted;
+    }
+    /**
+     * Clear the cache entirely, throwing away all values.
+     */
+    clear() {
+        return this.#clear('delete');
+    }
+    #clear(reason) {
+        for (const index of this.#rindexes({ allowStale: true })) {
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v)) {
+                v.__abortController.abort(new Error('deleted'));
+            }
+            else {
+                const k = this.#keyList[index];
+                if (this.#hasDispose) {
+                    this.#dispose?.(v, k, reason);
+                }
+                if (this.#hasDisposeAfter) {
+                    this.#disposed?.push([v, k, reason]);
+                }
+            }
+        }
+        this.#keyMap.clear();
+        this.#valList.fill(undefined);
+        this.#keyList.fill(undefined);
+        if (this.#ttls && this.#starts) {
+            this.#ttls.fill(0);
+            this.#starts.fill(0);
+        }
+        if (this.#sizes) {
+            this.#sizes.fill(0);
+        }
+        this.#head = 0;
+        this.#tail = 0;
+        this.#free.length = 0;
+        this.#calculatedSize = 0;
+        this.#size = 0;
+        if (this.#hasDisposeAfter && this.#disposed) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+    }
+}
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/npm-profile/node_modules/lru-cache/dist/esm/index.min.js b/node_modules/npm-profile/node_modules/lru-cache/dist/esm/index.min.js
new file mode 100644
index 0000000000000..07dd8fc3c59d8
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/lru-cache/dist/esm/index.min.js
@@ -0,0 +1,2 @@
+var M=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,x=new Set,R=typeof process=="object"&&process?process:{},I=(a,t,e,i)=>{typeof R.emitWarning=="function"?R.emitWarning(a,t,e,i):console.error(`[${e}] ${t}: ${a}`)},C=globalThis.AbortController,D=globalThis.AbortSignal;if(typeof C>"u"){D=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},C=class{constructor(){t()}signal=new D;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let a=R.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{a&&(a=!1,I("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var G=a=>!x.has(a),H=Symbol("type"),y=a=>a&&a===Math.floor(a)&&a>0&&isFinite(a),U=a=>y(a)?a<=Math.pow(2,8)?Uint8Array:a<=Math.pow(2,16)?Uint16Array:a<=Math.pow(2,32)?Uint32Array:a<=Number.MAX_SAFE_INTEGER?O:null:null,O=class extends Array{constructor(t){super(t),this.fill(0)}},W=class a{heap;length;static#l=!1;static create(t){let e=U(t);if(!e)return[];a.#l=!0;let i=new a(t,e);return a.#l=!1,i}constructor(t,e){if(!a.#l)throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},L=class a{#l;#c;#p;#v;#w;#D;#L;#S;get perf(){return this.#S}ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#_;#s;#i;#t;#a;#u;#o;#h;#m;#r;#b;#y;#d;#A;#z;#f;#x;static unsafeExposeInternals(t){return{starts:t.#y,ttls:t.#d,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#a,prev:t.#u,get head(){return t.#o},get tail(){return t.#h},free:t.#m,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#M(e,i,s,n),moveToTail:e=>t.#W(e),indexes:e=>t.#F(e),rindexes:e=>t.#T(e),isStale:e=>t.#g(e)}}get max(){return this.#l}get maxSize(){return this.#c}get calculatedSize(){return this.#_}get size(){return this.#n}get fetchMethod(){return this.#D}get memoMethod(){return this.#L}get dispose(){return this.#p}get onInsert(){return this.#v}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,onInsert:_,disposeAfter:f,noDisposeOnSet:c,noUpdateTTL:u,maxSize:A=0,maxEntrySize:d=0,sizeCalculation:m,fetchMethod:l,memoMethod:w,noDeleteOnFetchRejection:b,noDeleteOnStaleGet:p,allowStaleOnFetchRejection:S,allowStaleOnFetchAbort:z,ignoreFetchAbort:F,perf:v}=t;if(v!==void 0&&typeof v?.now!="function")throw new TypeError("perf option must have a now() method if specified");if(this.#S=v??M,e!==0&&!y(e))throw new TypeError("max option must be a nonnegative integer");let T=e?U(e):Array;if(!T)throw new Error("invalid max value: "+e);if(this.#l=e,this.#c=A,this.maxEntrySize=d||this.#c,this.sizeCalculation=m,this.sizeCalculation){if(!this.#c&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#L=w,l!==void 0&&typeof l!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#D=l,this.#z=!!l,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#a=new T(e),this.#u=new T(e),this.#o=0,this.#h=0,this.#m=W.create(e),this.#n=0,this.#_=0,typeof g=="function"&&(this.#p=g),typeof _=="function"&&(this.#v=_),typeof f=="function"?(this.#w=f,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#A=!!this.#p,this.#x=!!this.#v,this.#f=!!this.#w,this.noDisposeOnSet=!!c,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!b,this.allowStaleOnFetchRejection=!!S,this.allowStaleOnFetchAbort=!!z,this.ignoreFetchAbort=!!F,this.maxEntrySize!==0){if(this.#c!==0&&!y(this.#c))throw new TypeError("maxSize must be a positive integer if specified");if(!y(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#V()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!p,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=y(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!y(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#G()}if(this.#l===0&&this.ttl===0&&this.#c===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#l&&!this.#c){let E="LRU_CACHE_UNBOUNDED";G(E)&&(x.add(E),I("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",E,a))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#G(){let t=new O(this.#l),e=new O(this.#l);this.#d=t,this.#y=e,this.#j=(n,h,o=this.#S.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#g(n)&&this.#E(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#C=n=>{e[n]=t[n]!==0?this.#S.now():0},this.#O=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=this.#S.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#g=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#C=()=>{};#O=()=>{};#j=()=>{};#g=()=>!1;#V(){let t=new O(this.#l);this.#_=0,this.#b=t,this.#R=e=>{this.#_-=t[e],t[e]=0},this.#N=(e,i,s,n)=>{if(this.#e(i))return 0;if(!y(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!y(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#I=(e,i,s)=>{if(t[e]=i,this.#c){let n=this.#c-t[e];for(;this.#_>n;)this.#U(!0)}this.#_+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#_)}}#R=t=>{};#I=(t,e,i)=>{};#N=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#o));)e=this.#u[e]}*#T({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#h));)e=this.#a[e]}#P(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#T())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#T()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#T())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#T()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#T({allowStale:!0}))this.#g(e)&&(this.#E(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#d&&this.#y){let h=this.#d[e],o=this.#y[e];if(h&&o){let r=h-(this.#S.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#F({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#d&&this.#y){h.ttl=this.#d[e];let o=this.#S.now()-this.#y[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=this.#S.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,_=this.#N(t,e,i.size||0,o);if(this.maxEntrySize&&_>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#E(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#m.length!==0?this.#m.pop():this.#n===this.#l?this.#U(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#a[this.#h]=f,this.#u[f]=this.#h,this.#h=f,this.#n++,this.#I(f,_,r),r&&(r.set="add"),g=!1,this.#x&&this.#v?.(e,t,"add");else{this.#W(f);let c=this.#t[f];if(e!==c){if(this.#z&&this.#e(c)){c.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:u}=c;u!==void 0&&!h&&(this.#A&&this.#p?.(u,t,"set"),this.#f&&this.#r?.push([u,t,"set"]))}else h||(this.#A&&this.#p?.(c,t,"set"),this.#f&&this.#r?.push([c,t,"set"]));if(this.#R(f),this.#I(f,_,r),this.#t[f]=e,r){r.set="replace";let u=c&&this.#e(c)?c.__staleWhileFetching:c;u!==void 0&&(r.oldValue=u)}}else r&&(r.set="update");this.#x&&this.onInsert?.(e,t,e===c?"update":"replace")}if(s!==0&&!this.#d&&this.#G(),this.#d&&(g||this.#j(f,s,n),r&&this.#O(r,f)),!h&&this.#f&&this.#r){let c=this.#r,u;for(;u=c?.shift();)this.#w?.(...u)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#U(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#f&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#U(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#z&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(s,i,"evict"),this.#f&&this.#r?.push([s,i,"evict"])),this.#R(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#m.push(e)),this.#n===1?(this.#o=this.#h=0,this.#m.length=0):this.#o=this.#a[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#g(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#C(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#g(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#M(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new C,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,m=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!m?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!m)return f(h.signal.reason);let b=u;return this.#t[e]===u&&(d===void 0?b.__staleWhileFetching!==void 0?this.#t[e]=b.__staleWhileFetching:this.#E(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},_=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:m}=h.signal,l=m&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=u;if(this.#t[e]===u&&(!b||p.__staleWhileFetching===void 0?this.#E(t,"fetch"):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},c=(d,m)=>{let l=this.#D?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),m),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let u=new Promise(c).then(g,_),A=Object.assign(u,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,A,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=A,A}#e(t){if(!this.#z)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof C}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:_=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:c=this.allowStaleOnFetchRejection,ignoreFetchAbort:u=this.ignoreFetchAbort,allowStaleOnFetchAbort:A=this.allowStaleOnFetchAbort,context:d,forceRefresh:m=!1,status:l,signal:w}=e;if(!this.#z)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:_,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:c,allowStaleOnFetchAbort:A,ignoreFetchAbort:u,status:l,signal:w},p=this.#s.get(t);if(p===void 0){l&&(l.fetch="miss");let S=this.#M(t,p,b,d);return S.__returned=S}else{let S=this.#t[p];if(this.#e(S)){let E=i&&S.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",E&&(l.returnedStale=!0)),E?S.__staleWhileFetching:S.__returned=S}let z=this.#g(p);if(!m&&!z)return l&&(l.fetch="hit"),this.#W(p),s&&this.#C(p),l&&this.#O(l,p),S;let F=this.#M(t,p,b,d),T=F.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=z?"stale":"refresh",T&&z&&(l.returnedStale=!0)),T?F.__staleWhileFetching:F.__returned=F}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#L;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#O(h,o),this.#g(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#E(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#W(o),s&&this.#C(o),r))}else h&&(h.get="miss")}#H(t,e){this.#u[e]=t,this.#a[t]=e}#W(t){t!==this.#h&&(t===this.#o?this.#o=this.#a[t]:this.#H(this.#u[t],this.#a[t]),this.#H(this.#h,t),this.#h=t)}delete(t){return this.#E(t,"delete")}#E(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#k(e);else{this.#R(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(n,t,e),this.#f&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#u[s];else if(s===this.#o)this.#o=this.#a[s];else{let h=this.#u[s];this.#a[h]=this.#a[s];let o=this.#a[s];this.#u[o]=this.#u[s]}this.#n--,this.#m.push(s)}}if(this.#f&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#k("delete")}#k(t){for(let e of this.#T({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#A&&this.#p?.(i,s,t),this.#f&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#d&&this.#y&&(this.#d.fill(0),this.#y.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#m.length=0,this.#_=0,this.#n=0,this.#f&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};export{L as LRUCache};
+//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/npm-profile/node_modules/lru-cache/dist/esm/package.json b/node_modules/npm-profile/node_modules/lru-cache/dist/esm/package.json
new file mode 100644
index 0000000000000..3dbc1ca591c05
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/lru-cache/dist/esm/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/node_modules/npm-profile/node_modules/lru-cache/package.json b/node_modules/npm-profile/node_modules/lru-cache/package.json
new file mode 100644
index 0000000000000..4953bdf4a7a35
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/lru-cache/package.json
@@ -0,0 +1,113 @@
+{
+  "name": "lru-cache",
+  "description": "A cache object that deletes the least-recently-used items.",
+  "version": "11.2.1",
+  "author": "Isaac Z. Schlueter ",
+  "keywords": [
+    "mru",
+    "lru",
+    "cache"
+  ],
+  "sideEffects": false,
+  "scripts": {
+    "build": "npm run prepare",
+    "prepare": "tshy && bash fixup.sh",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "test": "tap",
+    "snap": "tap",
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "format": "prettier --write .",
+    "typedoc": "typedoc --tsconfig ./.tshy/esm.json ./src/*.ts",
+    "benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh",
+    "prebenchmark": "npm run prepare",
+    "benchmark": "make -C benchmark",
+    "preprofile": "npm run prepare",
+    "profile": "make -C benchmark profile"
+  },
+  "main": "./dist/commonjs/index.js",
+  "types": "./dist/commonjs/index.d.ts",
+  "tshy": {
+    "exports": {
+      ".": "./src/index.ts",
+      "./min": {
+        "import": {
+          "types": "./dist/esm/index.d.ts",
+          "default": "./dist/esm/index.min.js"
+        },
+        "require": {
+          "types": "./dist/commonjs/index.d.ts",
+          "default": "./dist/commonjs/index.min.js"
+        }
+      }
+    }
+  },
+  "repository": {
+    "type": "git",
+    "url": "git://github.com/isaacs/node-lru-cache.git"
+  },
+  "devDependencies": {
+    "@types/node": "^24.3.0",
+    "benchmark": "^2.1.4",
+    "esbuild": "^0.25.9",
+    "marked": "^4.2.12",
+    "mkdirp": "^3.0.1",
+    "prettier": "^3.6.2",
+    "tap": "^21.1.0",
+    "tshy": "^3.0.2",
+    "typedoc": "^0.28.12"
+  },
+  "license": "ISC",
+  "files": [
+    "dist"
+  ],
+  "engines": {
+    "node": "20 || >=22"
+  },
+  "prettier": {
+    "experimentalTernaries": true,
+    "semi": false,
+    "printWidth": 70,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "tap": {
+    "node-arg": [
+      "--expose-gc"
+    ],
+    "plugin": [
+      "@tapjs/clock"
+    ]
+  },
+  "exports": {
+    ".": {
+      "import": {
+        "types": "./dist/esm/index.d.ts",
+        "default": "./dist/esm/index.js"
+      },
+      "require": {
+        "types": "./dist/commonjs/index.d.ts",
+        "default": "./dist/commonjs/index.js"
+      }
+    },
+    "./min": {
+      "import": {
+        "types": "./dist/esm/index.d.ts",
+        "default": "./dist/esm/index.min.js"
+      },
+      "require": {
+        "types": "./dist/commonjs/index.d.ts",
+        "default": "./dist/commonjs/index.min.js"
+      }
+    }
+  },
+  "type": "module",
+  "module": "./dist/esm/index.js"
+}
diff --git a/node_modules/npm-profile/node_modules/make-fetch-happen/LICENSE b/node_modules/npm-profile/node_modules/make-fetch-happen/LICENSE
new file mode 100644
index 0000000000000..1808eb2844231
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/make-fetch-happen/LICENSE
@@ -0,0 +1,16 @@
+ISC License
+
+Copyright 2017-2022 (c) npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this software for
+any purpose with or without fee is hereby granted, provided that the
+above copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
+ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
+COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
+CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
+USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/entry.js b/node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/entry.js
new file mode 100644
index 0000000000000..bfcfacbcc95e1
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/entry.js
@@ -0,0 +1,471 @@
+const { Request, Response } = require('minipass-fetch')
+const { Minipass } = require('minipass')
+const MinipassFlush = require('minipass-flush')
+const cacache = require('cacache')
+const url = require('url')
+
+const CachingMinipassPipeline = require('../pipeline.js')
+const CachePolicy = require('./policy.js')
+const cacheKey = require('./key.js')
+const remote = require('../remote.js')
+
+const hasOwnProperty = (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop)
+
+// allow list for request headers that will be written to the cache index
+// note: we will also store any request headers
+// that are named in a response's vary header
+const KEEP_REQUEST_HEADERS = [
+  'accept-charset',
+  'accept-encoding',
+  'accept-language',
+  'accept',
+  'cache-control',
+]
+
+// allow list for response headers that will be written to the cache index
+// note: we must not store the real response's age header, or when we load
+// a cache policy based on the metadata it will think the cached response
+// is always stale
+const KEEP_RESPONSE_HEADERS = [
+  'cache-control',
+  'content-encoding',
+  'content-language',
+  'content-type',
+  'date',
+  'etag',
+  'expires',
+  'last-modified',
+  'link',
+  'location',
+  'pragma',
+  'vary',
+]
+
+// return an object containing all metadata to be written to the index
+const getMetadata = (request, response, options) => {
+  const metadata = {
+    time: Date.now(),
+    url: request.url,
+    reqHeaders: {},
+    resHeaders: {},
+
+    // options on which we must match the request and vary the response
+    options: {
+      compress: options.compress != null ? options.compress : request.compress,
+    },
+  }
+
+  // only save the status if it's not a 200 or 304
+  if (response.status !== 200 && response.status !== 304) {
+    metadata.status = response.status
+  }
+
+  for (const name of KEEP_REQUEST_HEADERS) {
+    if (request.headers.has(name)) {
+      metadata.reqHeaders[name] = request.headers.get(name)
+    }
+  }
+
+  // if the request's host header differs from the host in the url
+  // we need to keep it, otherwise it's just noise and we ignore it
+  const host = request.headers.get('host')
+  const parsedUrl = new url.URL(request.url)
+  if (host && parsedUrl.host !== host) {
+    metadata.reqHeaders.host = host
+  }
+
+  // if the response has a vary header, make sure
+  // we store the relevant request headers too
+  if (response.headers.has('vary')) {
+    const vary = response.headers.get('vary')
+    // a vary of "*" means every header causes a different response.
+    // in that scenario, we do not include any additional headers
+    // as the freshness check will always fail anyway and we don't
+    // want to bloat the cache indexes
+    if (vary !== '*') {
+      // copy any other request headers that will vary the response
+      const varyHeaders = vary.trim().toLowerCase().split(/\s*,\s*/)
+      for (const name of varyHeaders) {
+        if (request.headers.has(name)) {
+          metadata.reqHeaders[name] = request.headers.get(name)
+        }
+      }
+    }
+  }
+
+  for (const name of KEEP_RESPONSE_HEADERS) {
+    if (response.headers.has(name)) {
+      metadata.resHeaders[name] = response.headers.get(name)
+    }
+  }
+
+  for (const name of options.cacheAdditionalHeaders) {
+    if (response.headers.has(name)) {
+      metadata.resHeaders[name] = response.headers.get(name)
+    }
+  }
+
+  return metadata
+}
+
+// symbols used to hide objects that may be lazily evaluated in a getter
+const _request = Symbol('request')
+const _response = Symbol('response')
+const _policy = Symbol('policy')
+
+class CacheEntry {
+  constructor ({ entry, request, response, options }) {
+    if (entry) {
+      this.key = entry.key
+      this.entry = entry
+      // previous versions of this module didn't write an explicit timestamp in
+      // the metadata, so fall back to the entry's timestamp. we can't use the
+      // entry timestamp to determine staleness because cacache will update it
+      // when it verifies its data
+      this.entry.metadata.time = this.entry.metadata.time || this.entry.time
+    } else {
+      this.key = cacheKey(request)
+    }
+
+    this.options = options
+
+    // these properties are behind getters that lazily evaluate
+    this[_request] = request
+    this[_response] = response
+    this[_policy] = null
+  }
+
+  // returns a CacheEntry instance that satisfies the given request
+  // or undefined if no existing entry satisfies
+  static async find (request, options) {
+    try {
+      // compacts the index and returns an array of unique entries
+      var matches = await cacache.index.compact(options.cachePath, cacheKey(request), (A, B) => {
+        const entryA = new CacheEntry({ entry: A, options })
+        const entryB = new CacheEntry({ entry: B, options })
+        return entryA.policy.satisfies(entryB.request)
+      }, {
+        validateEntry: (entry) => {
+          // clean out entries with a buggy content-encoding value
+          if (entry.metadata &&
+              entry.metadata.resHeaders &&
+              entry.metadata.resHeaders['content-encoding'] === null) {
+            return false
+          }
+
+          // if an integrity is null, it needs to have a status specified
+          if (entry.integrity === null) {
+            return !!(entry.metadata && entry.metadata.status)
+          }
+
+          return true
+        },
+      })
+    } catch (err) {
+      // if the compact request fails, ignore the error and return
+      return
+    }
+
+    // a cache mode of 'reload' means to behave as though we have no cache
+    // on the way to the network. return undefined to allow cacheFetch to
+    // create a brand new request no matter what.
+    if (options.cache === 'reload') {
+      return
+    }
+
+    // find the specific entry that satisfies the request
+    let match
+    for (const entry of matches) {
+      const _entry = new CacheEntry({
+        entry,
+        options,
+      })
+
+      if (_entry.policy.satisfies(request)) {
+        match = _entry
+        break
+      }
+    }
+
+    return match
+  }
+
+  // if the user made a PUT/POST/PATCH then we invalidate our
+  // cache for the same url by deleting the index entirely
+  static async invalidate (request, options) {
+    const key = cacheKey(request)
+    try {
+      await cacache.rm.entry(options.cachePath, key, { removeFully: true })
+    } catch (err) {
+      // ignore errors
+    }
+  }
+
+  get request () {
+    if (!this[_request]) {
+      this[_request] = new Request(this.entry.metadata.url, {
+        method: 'GET',
+        headers: this.entry.metadata.reqHeaders,
+        ...this.entry.metadata.options,
+      })
+    }
+
+    return this[_request]
+  }
+
+  get response () {
+    if (!this[_response]) {
+      this[_response] = new Response(null, {
+        url: this.entry.metadata.url,
+        counter: this.options.counter,
+        status: this.entry.metadata.status || 200,
+        headers: {
+          ...this.entry.metadata.resHeaders,
+          'content-length': this.entry.size,
+        },
+      })
+    }
+
+    return this[_response]
+  }
+
+  get policy () {
+    if (!this[_policy]) {
+      this[_policy] = new CachePolicy({
+        entry: this.entry,
+        request: this.request,
+        response: this.response,
+        options: this.options,
+      })
+    }
+
+    return this[_policy]
+  }
+
+  // wraps the response in a pipeline that stores the data
+  // in the cache while the user consumes it
+  async store (status) {
+    // if we got a status other than 200, 301, or 308,
+    // or the CachePolicy forbid storage, append the
+    // cache status header and return it untouched
+    if (
+      this.request.method !== 'GET' ||
+      ![200, 301, 308].includes(this.response.status) ||
+      !this.policy.storable()
+    ) {
+      this.response.headers.set('x-local-cache-status', 'skip')
+      return this.response
+    }
+
+    const size = this.response.headers.get('content-length')
+    const cacheOpts = {
+      algorithms: this.options.algorithms,
+      metadata: getMetadata(this.request, this.response, this.options),
+      size,
+      integrity: this.options.integrity,
+      integrityEmitter: this.response.body.hasIntegrityEmitter && this.response.body,
+    }
+
+    let body = null
+    // we only set a body if the status is a 200, redirects are
+    // stored as metadata only
+    if (this.response.status === 200) {
+      let cacheWriteResolve, cacheWriteReject
+      const cacheWritePromise = new Promise((resolve, reject) => {
+        cacheWriteResolve = resolve
+        cacheWriteReject = reject
+      }).catch((err) => {
+        body.emit('error', err)
+      })
+
+      body = new CachingMinipassPipeline({ events: ['integrity', 'size'] }, new MinipassFlush({
+        flush () {
+          return cacheWritePromise
+        },
+      }))
+      // this is always true since if we aren't reusing the one from the remote fetch, we
+      // are using the one from cacache
+      body.hasIntegrityEmitter = true
+
+      const onResume = () => {
+        const tee = new Minipass()
+        const cacheStream = cacache.put.stream(this.options.cachePath, this.key, cacheOpts)
+        // re-emit the integrity and size events on our new response body so they can be reused
+        cacheStream.on('integrity', i => body.emit('integrity', i))
+        cacheStream.on('size', s => body.emit('size', s))
+        // stick a flag on here so downstream users will know if they can expect integrity events
+        tee.pipe(cacheStream)
+        // TODO if the cache write fails, log a warning but return the response anyway
+        // eslint-disable-next-line promise/catch-or-return
+        cacheStream.promise().then(cacheWriteResolve, cacheWriteReject)
+        body.unshift(tee)
+        body.unshift(this.response.body)
+      }
+
+      body.once('resume', onResume)
+      body.once('end', () => body.removeListener('resume', onResume))
+    } else {
+      await cacache.index.insert(this.options.cachePath, this.key, null, cacheOpts)
+    }
+
+    // note: we do not set the x-local-cache-hash header because we do not know
+    // the hash value until after the write to the cache completes, which doesn't
+    // happen until after the response has been sent and it's too late to write
+    // the header anyway
+    this.response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath))
+    this.response.headers.set('x-local-cache-key', encodeURIComponent(this.key))
+    this.response.headers.set('x-local-cache-mode', 'stream')
+    this.response.headers.set('x-local-cache-status', status)
+    this.response.headers.set('x-local-cache-time', new Date().toISOString())
+    const newResponse = new Response(body, {
+      url: this.response.url,
+      status: this.response.status,
+      headers: this.response.headers,
+      counter: this.options.counter,
+    })
+    return newResponse
+  }
+
+  // use the cached data to create a response and return it
+  async respond (method, options, status) {
+    let response
+    if (method === 'HEAD' || [301, 308].includes(this.response.status)) {
+      // if the request is a HEAD, or the response is a redirect,
+      // then the metadata in the entry already includes everything
+      // we need to build a response
+      response = this.response
+    } else {
+      // we're responding with a full cached response, so create a body
+      // that reads from cacache and attach it to a new Response
+      const body = new Minipass()
+      const headers = { ...this.policy.responseHeaders() }
+
+      const onResume = () => {
+        const cacheStream = cacache.get.stream.byDigest(
+          this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }
+        )
+        cacheStream.on('error', async (err) => {
+          cacheStream.pause()
+          if (err.code === 'EINTEGRITY') {
+            await cacache.rm.content(
+              this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }
+            )
+          }
+          if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') {
+            await CacheEntry.invalidate(this.request, this.options)
+          }
+          body.emit('error', err)
+          cacheStream.resume()
+        })
+        // emit the integrity and size events based on our metadata so we're consistent
+        body.emit('integrity', this.entry.integrity)
+        body.emit('size', Number(headers['content-length']))
+        cacheStream.pipe(body)
+      }
+
+      body.once('resume', onResume)
+      body.once('end', () => body.removeListener('resume', onResume))
+      response = new Response(body, {
+        url: this.entry.metadata.url,
+        counter: options.counter,
+        status: 200,
+        headers,
+      })
+    }
+
+    response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath))
+    response.headers.set('x-local-cache-hash', encodeURIComponent(this.entry.integrity))
+    response.headers.set('x-local-cache-key', encodeURIComponent(this.key))
+    response.headers.set('x-local-cache-mode', 'stream')
+    response.headers.set('x-local-cache-status', status)
+    response.headers.set('x-local-cache-time', new Date(this.entry.metadata.time).toUTCString())
+    return response
+  }
+
+  // use the provided request along with this cache entry to
+  // revalidate the stored response. returns a response, either
+  // from the cache or from the update
+  async revalidate (request, options) {
+    const revalidateRequest = new Request(request, {
+      headers: this.policy.revalidationHeaders(request),
+    })
+
+    try {
+      // NOTE: be sure to remove the headers property from the
+      // user supplied options, since we have already defined
+      // them on the new request object. if they're still in the
+      // options then those will overwrite the ones from the policy
+      var response = await remote(revalidateRequest, {
+        ...options,
+        headers: undefined,
+      })
+    } catch (err) {
+      // if the network fetch fails, return the stale
+      // cached response unless it has a cache-control
+      // of 'must-revalidate'
+      if (!this.policy.mustRevalidate) {
+        return this.respond(request.method, options, 'stale')
+      }
+
+      throw err
+    }
+
+    if (this.policy.revalidated(revalidateRequest, response)) {
+      // we got a 304, write a new index to the cache and respond from cache
+      const metadata = getMetadata(request, response, options)
+      // 304 responses do not include headers that are specific to the response data
+      // since they do not include a body, so we copy values for headers that were
+      // in the old cache entry to the new one, if the new metadata does not already
+      // include that header
+      for (const name of KEEP_RESPONSE_HEADERS) {
+        if (
+          !hasOwnProperty(metadata.resHeaders, name) &&
+          hasOwnProperty(this.entry.metadata.resHeaders, name)
+        ) {
+          metadata.resHeaders[name] = this.entry.metadata.resHeaders[name]
+        }
+      }
+
+      for (const name of options.cacheAdditionalHeaders) {
+        const inMeta = hasOwnProperty(metadata.resHeaders, name)
+        const inEntry = hasOwnProperty(this.entry.metadata.resHeaders, name)
+        const inPolicy = hasOwnProperty(this.policy.response.headers, name)
+
+        // if the header is in the existing entry, but it is not in the metadata
+        // then we need to write it to the metadata as this will refresh the on-disk cache
+        if (!inMeta && inEntry) {
+          metadata.resHeaders[name] = this.entry.metadata.resHeaders[name]
+        }
+        // if the header is in the metadata, but not in the policy, then we need to set
+        // it in the policy so that it's included in the immediate response. future
+        // responses will load a new cache entry, so we don't need to change that
+        if (!inPolicy && inMeta) {
+          this.policy.response.headers[name] = metadata.resHeaders[name]
+        }
+      }
+
+      try {
+        await cacache.index.insert(options.cachePath, this.key, this.entry.integrity, {
+          size: this.entry.size,
+          metadata,
+        })
+      } catch (err) {
+        // if updating the cache index fails, we ignore it and
+        // respond anyway
+      }
+      return this.respond(request.method, options, 'revalidated')
+    }
+
+    // if we got a modified response, create a new entry based on it
+    const newEntry = new CacheEntry({
+      request,
+      response,
+      options,
+    })
+
+    // respond with the new entry while writing it to the cache
+    return newEntry.store('updated')
+  }
+}
+
+module.exports = CacheEntry
diff --git a/node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/errors.js b/node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/errors.js
new file mode 100644
index 0000000000000..67a66573bebe6
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/errors.js
@@ -0,0 +1,11 @@
+class NotCachedError extends Error {
+  constructor (url) {
+    /* eslint-disable-next-line max-len */
+    super(`request to ${url} failed: cache mode is 'only-if-cached' but no cached response is available.`)
+    this.code = 'ENOTCACHED'
+  }
+}
+
+module.exports = {
+  NotCachedError,
+}
diff --git a/node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/index.js b/node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/index.js
new file mode 100644
index 0000000000000..0de49d23fb933
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/index.js
@@ -0,0 +1,49 @@
+const { NotCachedError } = require('./errors.js')
+const CacheEntry = require('./entry.js')
+const remote = require('../remote.js')
+
+// do whatever is necessary to get a Response and return it
+const cacheFetch = async (request, options) => {
+  // try to find a cached entry that satisfies this request
+  const entry = await CacheEntry.find(request, options)
+  if (!entry) {
+    // no cached result, if the cache mode is 'only-if-cached' that's a failure
+    if (options.cache === 'only-if-cached') {
+      throw new NotCachedError(request.url)
+    }
+
+    // otherwise, we make a request, store it and return it
+    const response = await remote(request, options)
+    const newEntry = new CacheEntry({ request, response, options })
+    return newEntry.store('miss')
+  }
+
+  // we have a cached response that satisfies this request, however if the cache
+  // mode is 'no-cache' then we send the revalidation request no matter what
+  if (options.cache === 'no-cache') {
+    return entry.revalidate(request, options)
+  }
+
+  // if the cached entry is not stale, or if the cache mode is 'force-cache' or
+  // 'only-if-cached' we can respond with the cached entry. set the status
+  // based on the result of needsRevalidation and respond
+  const _needsRevalidation = entry.policy.needsRevalidation(request)
+  if (options.cache === 'force-cache' ||
+      options.cache === 'only-if-cached' ||
+      !_needsRevalidation) {
+    return entry.respond(request.method, options, _needsRevalidation ? 'stale' : 'hit')
+  }
+
+  // if we got here, the cache entry is stale so revalidate it
+  return entry.revalidate(request, options)
+}
+
+cacheFetch.invalidate = async (request, options) => {
+  if (!options.cachePath) {
+    return
+  }
+
+  return CacheEntry.invalidate(request, options)
+}
+
+module.exports = cacheFetch
diff --git a/node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/key.js b/node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/key.js
new file mode 100644
index 0000000000000..f7684d562b7fa
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/key.js
@@ -0,0 +1,17 @@
+const { URL, format } = require('url')
+
+// options passed to url.format() when generating a key
+const formatOptions = {
+  auth: false,
+  fragment: false,
+  search: true,
+  unicode: false,
+}
+
+// returns a string to be used as the cache key for the Request
+const cacheKey = (request) => {
+  const parsed = new URL(request.url)
+  return `make-fetch-happen:request-cache:${format(parsed, formatOptions)}`
+}
+
+module.exports = cacheKey
diff --git a/node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/policy.js b/node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/policy.js
new file mode 100644
index 0000000000000..ada3c8600dae9
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/policy.js
@@ -0,0 +1,161 @@
+const CacheSemantics = require('http-cache-semantics')
+const Negotiator = require('negotiator')
+const ssri = require('ssri')
+
+// options passed to http-cache-semantics constructor
+const policyOptions = {
+  shared: false,
+  ignoreCargoCult: true,
+}
+
+// a fake empty response, used when only testing the
+// request for storability
+const emptyResponse = { status: 200, headers: {} }
+
+// returns a plain object representation of the Request
+const requestObject = (request) => {
+  const _obj = {
+    method: request.method,
+    url: request.url,
+    headers: {},
+    compress: request.compress,
+  }
+
+  request.headers.forEach((value, key) => {
+    _obj.headers[key] = value
+  })
+
+  return _obj
+}
+
+// returns a plain object representation of the Response
+const responseObject = (response) => {
+  const _obj = {
+    status: response.status,
+    headers: {},
+  }
+
+  response.headers.forEach((value, key) => {
+    _obj.headers[key] = value
+  })
+
+  return _obj
+}
+
+class CachePolicy {
+  constructor ({ entry, request, response, options }) {
+    this.entry = entry
+    this.request = requestObject(request)
+    this.response = responseObject(response)
+    this.options = options
+    this.policy = new CacheSemantics(this.request, this.response, policyOptions)
+
+    if (this.entry) {
+      // if we have an entry, copy the timestamp to the _responseTime
+      // this is necessary because the CacheSemantics constructor forces
+      // the value to Date.now() which means a policy created from a
+      // cache entry is likely to always identify itself as stale
+      this.policy._responseTime = this.entry.metadata.time
+    }
+  }
+
+  // static method to quickly determine if a request alone is storable
+  static storable (request, options) {
+    // no cachePath means no caching
+    if (!options.cachePath) {
+      return false
+    }
+
+    // user explicitly asked not to cache
+    if (options.cache === 'no-store') {
+      return false
+    }
+
+    // we only cache GET and HEAD requests
+    if (!['GET', 'HEAD'].includes(request.method)) {
+      return false
+    }
+
+    // otherwise, let http-cache-semantics make the decision
+    // based on the request's headers
+    const policy = new CacheSemantics(requestObject(request), emptyResponse, policyOptions)
+    return policy.storable()
+  }
+
+  // returns true if the policy satisfies the request
+  satisfies (request) {
+    const _req = requestObject(request)
+    if (this.request.headers.host !== _req.headers.host) {
+      return false
+    }
+
+    if (this.request.compress !== _req.compress) {
+      return false
+    }
+
+    const negotiatorA = new Negotiator(this.request)
+    const negotiatorB = new Negotiator(_req)
+
+    if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes())) {
+      return false
+    }
+
+    if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages())) {
+      return false
+    }
+
+    if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings())) {
+      return false
+    }
+
+    if (this.options.integrity) {
+      return ssri.parse(this.options.integrity).match(this.entry.integrity)
+    }
+
+    return true
+  }
+
+  // returns true if the request and response allow caching
+  storable () {
+    return this.policy.storable()
+  }
+
+  // NOTE: this is a hack to avoid parsing the cache-control
+  // header ourselves, it returns true if the response's
+  // cache-control contains must-revalidate
+  get mustRevalidate () {
+    return !!this.policy._rescc['must-revalidate']
+  }
+
+  // returns true if the cached response requires revalidation
+  // for the given request
+  needsRevalidation (request) {
+    const _req = requestObject(request)
+    // force method to GET because we only cache GETs
+    // but can serve a HEAD from a cached GET
+    _req.method = 'GET'
+    return !this.policy.satisfiesWithoutRevalidation(_req)
+  }
+
+  responseHeaders () {
+    return this.policy.responseHeaders()
+  }
+
+  // returns a new object containing the appropriate headers
+  // to send a revalidation request
+  revalidationHeaders (request) {
+    const _req = requestObject(request)
+    return this.policy.revalidationHeaders(_req)
+  }
+
+  // returns true if the request/response was revalidated
+  // successfully. returns false if a new response was received
+  revalidated (request, response) {
+    const _req = requestObject(request)
+    const _res = responseObject(response)
+    const policy = this.policy.revalidatedPolicy(_req, _res)
+    return !policy.modified
+  }
+}
+
+module.exports = CachePolicy
diff --git a/node_modules/npm-profile/node_modules/make-fetch-happen/lib/fetch.js b/node_modules/npm-profile/node_modules/make-fetch-happen/lib/fetch.js
new file mode 100644
index 0000000000000..233ba67e16550
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/make-fetch-happen/lib/fetch.js
@@ -0,0 +1,118 @@
+'use strict'
+
+const { FetchError, Request, isRedirect } = require('minipass-fetch')
+const url = require('url')
+
+const CachePolicy = require('./cache/policy.js')
+const cache = require('./cache/index.js')
+const remote = require('./remote.js')
+
+// given a Request, a Response and user options
+// return true if the response is a redirect that
+// can be followed. we throw errors that will result
+// in the fetch being rejected if the redirect is
+// possible but invalid for some reason
+const canFollowRedirect = (request, response, options) => {
+  if (!isRedirect(response.status)) {
+    return false
+  }
+
+  if (options.redirect === 'manual') {
+    return false
+  }
+
+  if (options.redirect === 'error') {
+    throw new FetchError(`redirect mode is set to error: ${request.url}`,
+      'no-redirect', { code: 'ENOREDIRECT' })
+  }
+
+  if (!response.headers.has('location')) {
+    throw new FetchError(`redirect location header missing for: ${request.url}`,
+      'no-location', { code: 'EINVALIDREDIRECT' })
+  }
+
+  if (request.counter >= request.follow) {
+    throw new FetchError(`maximum redirect reached at: ${request.url}`,
+      'max-redirect', { code: 'EMAXREDIRECT' })
+  }
+
+  return true
+}
+
+// given a Request, a Response, and the user's options return an object
+// with a new Request and a new options object that will be used for
+// following the redirect
+const getRedirect = (request, response, options) => {
+  const _opts = { ...options }
+  const location = response.headers.get('location')
+  const redirectUrl = new url.URL(location, /^https?:/.test(location) ? undefined : request.url)
+  // Comment below is used under the following license:
+  /**
+   * @license
+   * Copyright (c) 2010-2012 Mikeal Rogers
+   * Licensed under the Apache License, Version 2.0 (the "License");
+   * you may not use this file except in compliance with the License.
+   * You may obtain a copy of the License at
+   * http://www.apache.org/licenses/LICENSE-2.0
+   * Unless required by applicable law or agreed to in writing,
+   * software distributed under the License is distributed on an "AS
+   * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+   * express or implied. See the License for the specific language
+   * governing permissions and limitations under the License.
+   */
+
+  // Remove authorization if changing hostnames (but not if just
+  // changing ports or protocols).  This matches the behavior of request:
+  // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138
+  if (new url.URL(request.url).hostname !== redirectUrl.hostname) {
+    request.headers.delete('authorization')
+    request.headers.delete('cookie')
+  }
+
+  // for POST request with 301/302 response, or any request with 303 response,
+  // use GET when following redirect
+  if (
+    response.status === 303 ||
+    (request.method === 'POST' && [301, 302].includes(response.status))
+  ) {
+    _opts.method = 'GET'
+    _opts.body = null
+    request.headers.delete('content-length')
+  }
+
+  _opts.headers = {}
+  request.headers.forEach((value, key) => {
+    _opts.headers[key] = value
+  })
+
+  _opts.counter = ++request.counter
+  const redirectReq = new Request(url.format(redirectUrl), _opts)
+  return {
+    request: redirectReq,
+    options: _opts,
+  }
+}
+
+const fetch = async (request, options) => {
+  const response = CachePolicy.storable(request, options)
+    ? await cache(request, options)
+    : await remote(request, options)
+
+  // if the request wasn't a GET or HEAD, and the response
+  // status is between 200 and 399 inclusive, invalidate the
+  // request url
+  if (!['GET', 'HEAD'].includes(request.method) &&
+      response.status >= 200 &&
+      response.status <= 399) {
+    await cache.invalidate(request, options)
+  }
+
+  if (!canFollowRedirect(request, response, options)) {
+    return response
+  }
+
+  const redirect = getRedirect(request, response, options)
+  return fetch(redirect.request, redirect.options)
+}
+
+module.exports = fetch
diff --git a/node_modules/npm-profile/node_modules/make-fetch-happen/lib/index.js b/node_modules/npm-profile/node_modules/make-fetch-happen/lib/index.js
new file mode 100644
index 0000000000000..2f12e8e1b6113
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/make-fetch-happen/lib/index.js
@@ -0,0 +1,41 @@
+const { FetchError, Headers, Request, Response } = require('minipass-fetch')
+
+const configureOptions = require('./options.js')
+const fetch = require('./fetch.js')
+
+const makeFetchHappen = (url, opts) => {
+  const options = configureOptions(opts)
+
+  const request = new Request(url, options)
+  return fetch(request, options)
+}
+
+makeFetchHappen.defaults = (defaultUrl, defaultOptions = {}, wrappedFetch = makeFetchHappen) => {
+  if (typeof defaultUrl === 'object') {
+    defaultOptions = defaultUrl
+    defaultUrl = null
+  }
+
+  const defaultedFetch = (url, options = {}) => {
+    const finalUrl = url || defaultUrl
+    const finalOptions = {
+      ...defaultOptions,
+      ...options,
+      headers: {
+        ...defaultOptions.headers,
+        ...options.headers,
+      },
+    }
+    return wrappedFetch(finalUrl, finalOptions)
+  }
+
+  defaultedFetch.defaults = (defaultUrl1, defaultOptions1 = {}) =>
+    makeFetchHappen.defaults(defaultUrl1, defaultOptions1, defaultedFetch)
+  return defaultedFetch
+}
+
+module.exports = makeFetchHappen
+module.exports.FetchError = FetchError
+module.exports.Headers = Headers
+module.exports.Request = Request
+module.exports.Response = Response
diff --git a/node_modules/npm-profile/node_modules/make-fetch-happen/lib/options.js b/node_modules/npm-profile/node_modules/make-fetch-happen/lib/options.js
new file mode 100644
index 0000000000000..db51cc6324817
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/make-fetch-happen/lib/options.js
@@ -0,0 +1,59 @@
+const dns = require('dns')
+
+const conditionalHeaders = [
+  'if-modified-since',
+  'if-none-match',
+  'if-unmodified-since',
+  'if-match',
+  'if-range',
+]
+
+const configureOptions = (opts) => {
+  const { strictSSL, ...options } = { ...opts }
+  options.method = options.method ? options.method.toUpperCase() : 'GET'
+
+  if (strictSSL === undefined || strictSSL === null) {
+    options.rejectUnauthorized = process.env.NODE_TLS_REJECT_UNAUTHORIZED !== '0'
+  } else {
+    options.rejectUnauthorized = strictSSL !== false
+  }
+
+  if (!options.retry) {
+    options.retry = { retries: 0 }
+  } else if (typeof options.retry === 'string') {
+    const retries = parseInt(options.retry, 10)
+    if (isFinite(retries)) {
+      options.retry = { retries }
+    } else {
+      options.retry = { retries: 0 }
+    }
+  } else if (typeof options.retry === 'number') {
+    options.retry = { retries: options.retry }
+  } else {
+    options.retry = { retries: 0, ...options.retry }
+  }
+
+  options.dns = { ttl: 5 * 60 * 1000, lookup: dns.lookup, ...options.dns }
+
+  options.cache = options.cache || 'default'
+  if (options.cache === 'default') {
+    const hasConditionalHeader = Object.keys(options.headers || {}).some((name) => {
+      return conditionalHeaders.includes(name.toLowerCase())
+    })
+    if (hasConditionalHeader) {
+      options.cache = 'no-store'
+    }
+  }
+
+  options.cacheAdditionalHeaders = options.cacheAdditionalHeaders || []
+
+  // cacheManager is deprecated, but if it's set and
+  // cachePath is not we should copy it to the new field
+  if (options.cacheManager && !options.cachePath) {
+    options.cachePath = options.cacheManager
+  }
+
+  return options
+}
+
+module.exports = configureOptions
diff --git a/node_modules/npm-profile/node_modules/make-fetch-happen/lib/pipeline.js b/node_modules/npm-profile/node_modules/make-fetch-happen/lib/pipeline.js
new file mode 100644
index 0000000000000..b1d221b2d0ce3
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/make-fetch-happen/lib/pipeline.js
@@ -0,0 +1,41 @@
+'use strict'
+
+const MinipassPipeline = require('minipass-pipeline')
+
+class CachingMinipassPipeline extends MinipassPipeline {
+  #events = []
+  #data = new Map()
+
+  constructor (opts, ...streams) {
+    // CRITICAL: do NOT pass the streams to the call to super(), this will start
+    // the flow of data and potentially cause the events we need to catch to emit
+    // before we've finished our own setup. instead we call super() with no args,
+    // finish our setup, and then push the streams into ourselves to start the
+    // data flow
+    super()
+    this.#events = opts.events
+
+    /* istanbul ignore next - coverage disabled because this is pointless to test here */
+    if (streams.length) {
+      this.push(...streams)
+    }
+  }
+
+  on (event, handler) {
+    if (this.#events.includes(event) && this.#data.has(event)) {
+      return handler(...this.#data.get(event))
+    }
+
+    return super.on(event, handler)
+  }
+
+  emit (event, ...data) {
+    if (this.#events.includes(event)) {
+      this.#data.set(event, data)
+    }
+
+    return super.emit(event, ...data)
+  }
+}
+
+module.exports = CachingMinipassPipeline
diff --git a/node_modules/npm-profile/node_modules/make-fetch-happen/lib/remote.js b/node_modules/npm-profile/node_modules/make-fetch-happen/lib/remote.js
new file mode 100644
index 0000000000000..1d640e5380baa
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/make-fetch-happen/lib/remote.js
@@ -0,0 +1,132 @@
+const { Minipass } = require('minipass')
+const fetch = require('minipass-fetch')
+const promiseRetry = require('promise-retry')
+const ssri = require('ssri')
+const { log } = require('proc-log')
+
+const CachingMinipassPipeline = require('./pipeline.js')
+const { getAgent } = require('@npmcli/agent')
+const pkg = require('../package.json')
+
+const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})`
+
+const RETRY_ERRORS = [
+  'ECONNRESET', // remote socket closed on us
+  'ECONNREFUSED', // remote host refused to open connection
+  'EADDRINUSE', // failed to bind to a local port (proxy?)
+  'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW
+  // from @npmcli/agent
+  'ECONNECTIONTIMEOUT',
+  'EIDLETIMEOUT',
+  'ERESPONSETIMEOUT',
+  'ETRANSFERTIMEOUT',
+  // Known codes we do NOT retry on:
+  // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline)
+  // EINVALIDPROXY // invalid protocol from @npmcli/agent
+  // EINVALIDRESPONSE // invalid status code from @npmcli/agent
+]
+
+const RETRY_TYPES = [
+  'request-timeout',
+]
+
+// make a request directly to the remote source,
+// retrying certain classes of errors as well as
+// following redirects (through the cache if necessary)
+// and verifying response integrity
+const remoteFetch = (request, options) => {
+  // options.signal is intended for the fetch itself, not the agent.  Attaching it to the agent will re-use that signal across multiple requests, which prevents any connections beyond the first one.
+  const agent = getAgent(request.url, { ...options, signal: undefined })
+  if (!request.headers.has('connection')) {
+    request.headers.set('connection', agent ? 'keep-alive' : 'close')
+  }
+
+  if (!request.headers.has('user-agent')) {
+    request.headers.set('user-agent', USER_AGENT)
+  }
+
+  // keep our own options since we're overriding the agent
+  // and the redirect mode
+  const _opts = {
+    ...options,
+    agent,
+    redirect: 'manual',
+  }
+
+  return promiseRetry(async (retryHandler, attemptNum) => {
+    const req = new fetch.Request(request, _opts)
+    try {
+      let res = await fetch(req, _opts)
+      if (_opts.integrity && res.status === 200) {
+        // we got a 200 response and the user has specified an expected
+        // integrity value, so wrap the response in an ssri stream to verify it
+        const integrityStream = ssri.integrityStream({
+          algorithms: _opts.algorithms,
+          integrity: _opts.integrity,
+          size: _opts.size,
+        })
+        const pipeline = new CachingMinipassPipeline({
+          events: ['integrity', 'size'],
+        }, res.body, integrityStream)
+        // we also propagate the integrity and size events out to the pipeline so we can use
+        // this new response body as an integrityEmitter for cacache
+        integrityStream.on('integrity', i => pipeline.emit('integrity', i))
+        integrityStream.on('size', s => pipeline.emit('size', s))
+        res = new fetch.Response(pipeline, res)
+        // set an explicit flag so we know if our response body will emit integrity and size
+        res.body.hasIntegrityEmitter = true
+      }
+
+      res.headers.set('x-fetch-attempts', attemptNum)
+
+      // do not retry POST requests, or requests with a streaming body
+      // do retry requests with a 408, 420, 429 or 500+ status in the response
+      const isStream = Minipass.isStream(req.body)
+      const isRetriable = req.method !== 'POST' &&
+          !isStream &&
+          ([408, 420, 429].includes(res.status) || res.status >= 500)
+
+      if (isRetriable) {
+        if (typeof options.onRetry === 'function') {
+          options.onRetry(res)
+        }
+
+        /* eslint-disable-next-line max-len */
+        log.http('fetch', `${req.method} ${req.url} attempt ${attemptNum} failed with ${res.status}`)
+        return retryHandler(res)
+      }
+
+      return res
+    } catch (err) {
+      const code = (err.code === 'EPROMISERETRY')
+        ? err.retried.code
+        : err.code
+
+      // err.retried will be the thing that was thrown from above
+      // if it's a response, we just got a bad status code and we
+      // can re-throw to allow the retry
+      const isRetryError = err.retried instanceof fetch.Response ||
+        (RETRY_ERRORS.includes(code) && RETRY_TYPES.includes(err.type))
+
+      if (req.method === 'POST' || isRetryError) {
+        throw err
+      }
+
+      if (typeof options.onRetry === 'function') {
+        options.onRetry(err)
+      }
+
+      log.http('fetch', `${req.method} ${req.url} attempt ${attemptNum} failed with ${err.code}`)
+      return retryHandler(err)
+    }
+  }, options.retry).catch((err) => {
+    // don't reject for http errors, just return them
+    if (err.status >= 400 && err.type !== 'system') {
+      return err
+    }
+
+    throw err
+  })
+}
+
+module.exports = remoteFetch
diff --git a/node_modules/npm-profile/node_modules/make-fetch-happen/package.json b/node_modules/npm-profile/node_modules/make-fetch-happen/package.json
new file mode 100644
index 0000000000000..1e27d4ee8a70e
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/make-fetch-happen/package.json
@@ -0,0 +1,74 @@
+{
+  "name": "make-fetch-happen",
+  "version": "15.0.1",
+  "description": "Opinionated, caching, retrying fetch client",
+  "main": "lib/index.js",
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "scripts": {
+    "test": "tap",
+    "posttest": "npm run lint",
+    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
+    "lint": "npm run eslint",
+    "lintfix": "npm run eslint -- --fix",
+    "postlint": "template-oss-check",
+    "snap": "tap",
+    "template-oss-apply": "template-oss-apply --force"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/npm/make-fetch-happen.git"
+  },
+  "keywords": [
+    "http",
+    "request",
+    "fetch",
+    "mean girls",
+    "caching",
+    "cache",
+    "subresource integrity"
+  ],
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "dependencies": {
+    "@npmcli/agent": "^3.0.0",
+    "cacache": "^20.0.1",
+    "http-cache-semantics": "^4.1.1",
+    "minipass": "^7.0.2",
+    "minipass-fetch": "^4.0.0",
+    "minipass-flush": "^1.0.5",
+    "minipass-pipeline": "^1.2.4",
+    "negotiator": "^1.0.0",
+    "proc-log": "^5.0.0",
+    "promise-retry": "^2.0.1",
+    "ssri": "^12.0.0"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^5.0.0",
+    "@npmcli/template-oss": "4.25.0",
+    "nock": "^13.2.4",
+    "safe-buffer": "^5.2.1",
+    "standard-version": "^9.3.2",
+    "tap": "^16.0.0"
+  },
+  "engines": {
+    "node": "^20.17.0 || >=22.9.0"
+  },
+  "tap": {
+    "color": 1,
+    "files": "test/*.js",
+    "check-coverage": true,
+    "timeout": 60,
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.25.0",
+    "publish": "true"
+  }
+}
diff --git a/node_modules/npm-profile/node_modules/minizlib/LICENSE b/node_modules/npm-profile/node_modules/minizlib/LICENSE
new file mode 100644
index 0000000000000..49f7efe431c9e
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/minizlib/LICENSE
@@ -0,0 +1,26 @@
+Minizlib was created by Isaac Z. Schlueter.
+It is a derivative work of the Node.js project.
+
+"""
+Copyright (c) 2017-2023 Isaac Z. Schlueter and Contributors
+Copyright (c) 2017-2023 Node.js contributors. All rights reserved.
+Copyright (c) 2017-2023 Joyent, Inc. and other Node contributors. All rights reserved.
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the "Software"),
+to deal in the Software without restriction, including without limitation
+the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+"""
diff --git a/node_modules/npm-profile/node_modules/minizlib/dist/commonjs/constants.js b/node_modules/npm-profile/node_modules/minizlib/dist/commonjs/constants.js
new file mode 100644
index 0000000000000..dfc2c1957bfc9
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/minizlib/dist/commonjs/constants.js
@@ -0,0 +1,123 @@
+"use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.constants = void 0;
+// Update with any zlib constants that are added or changed in the future.
+// Node v6 didn't export this, so we just hard code the version and rely
+// on all the other hard-coded values from zlib v4736.  When node v6
+// support drops, we can just export the realZlibConstants object.
+const zlib_1 = __importDefault(require("zlib"));
+/* c8 ignore start */
+const realZlibConstants = zlib_1.default.constants || { ZLIB_VERNUM: 4736 };
+/* c8 ignore stop */
+exports.constants = Object.freeze(Object.assign(Object.create(null), {
+    Z_NO_FLUSH: 0,
+    Z_PARTIAL_FLUSH: 1,
+    Z_SYNC_FLUSH: 2,
+    Z_FULL_FLUSH: 3,
+    Z_FINISH: 4,
+    Z_BLOCK: 5,
+    Z_OK: 0,
+    Z_STREAM_END: 1,
+    Z_NEED_DICT: 2,
+    Z_ERRNO: -1,
+    Z_STREAM_ERROR: -2,
+    Z_DATA_ERROR: -3,
+    Z_MEM_ERROR: -4,
+    Z_BUF_ERROR: -5,
+    Z_VERSION_ERROR: -6,
+    Z_NO_COMPRESSION: 0,
+    Z_BEST_SPEED: 1,
+    Z_BEST_COMPRESSION: 9,
+    Z_DEFAULT_COMPRESSION: -1,
+    Z_FILTERED: 1,
+    Z_HUFFMAN_ONLY: 2,
+    Z_RLE: 3,
+    Z_FIXED: 4,
+    Z_DEFAULT_STRATEGY: 0,
+    DEFLATE: 1,
+    INFLATE: 2,
+    GZIP: 3,
+    GUNZIP: 4,
+    DEFLATERAW: 5,
+    INFLATERAW: 6,
+    UNZIP: 7,
+    BROTLI_DECODE: 8,
+    BROTLI_ENCODE: 9,
+    Z_MIN_WINDOWBITS: 8,
+    Z_MAX_WINDOWBITS: 15,
+    Z_DEFAULT_WINDOWBITS: 15,
+    Z_MIN_CHUNK: 64,
+    Z_MAX_CHUNK: Infinity,
+    Z_DEFAULT_CHUNK: 16384,
+    Z_MIN_MEMLEVEL: 1,
+    Z_MAX_MEMLEVEL: 9,
+    Z_DEFAULT_MEMLEVEL: 8,
+    Z_MIN_LEVEL: -1,
+    Z_MAX_LEVEL: 9,
+    Z_DEFAULT_LEVEL: -1,
+    BROTLI_OPERATION_PROCESS: 0,
+    BROTLI_OPERATION_FLUSH: 1,
+    BROTLI_OPERATION_FINISH: 2,
+    BROTLI_OPERATION_EMIT_METADATA: 3,
+    BROTLI_MODE_GENERIC: 0,
+    BROTLI_MODE_TEXT: 1,
+    BROTLI_MODE_FONT: 2,
+    BROTLI_DEFAULT_MODE: 0,
+    BROTLI_MIN_QUALITY: 0,
+    BROTLI_MAX_QUALITY: 11,
+    BROTLI_DEFAULT_QUALITY: 11,
+    BROTLI_MIN_WINDOW_BITS: 10,
+    BROTLI_MAX_WINDOW_BITS: 24,
+    BROTLI_LARGE_MAX_WINDOW_BITS: 30,
+    BROTLI_DEFAULT_WINDOW: 22,
+    BROTLI_MIN_INPUT_BLOCK_BITS: 16,
+    BROTLI_MAX_INPUT_BLOCK_BITS: 24,
+    BROTLI_PARAM_MODE: 0,
+    BROTLI_PARAM_QUALITY: 1,
+    BROTLI_PARAM_LGWIN: 2,
+    BROTLI_PARAM_LGBLOCK: 3,
+    BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4,
+    BROTLI_PARAM_SIZE_HINT: 5,
+    BROTLI_PARAM_LARGE_WINDOW: 6,
+    BROTLI_PARAM_NPOSTFIX: 7,
+    BROTLI_PARAM_NDIRECT: 8,
+    BROTLI_DECODER_RESULT_ERROR: 0,
+    BROTLI_DECODER_RESULT_SUCCESS: 1,
+    BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2,
+    BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3,
+    BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0,
+    BROTLI_DECODER_PARAM_LARGE_WINDOW: 1,
+    BROTLI_DECODER_NO_ERROR: 0,
+    BROTLI_DECODER_SUCCESS: 1,
+    BROTLI_DECODER_NEEDS_MORE_INPUT: 2,
+    BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3,
+    BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1,
+    BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2,
+    BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3,
+    BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4,
+    BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5,
+    BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6,
+    BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7,
+    BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8,
+    BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9,
+    BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10,
+    BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11,
+    BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12,
+    BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13,
+    BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14,
+    BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15,
+    BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16,
+    BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19,
+    BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20,
+    BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21,
+    BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22,
+    BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25,
+    BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26,
+    BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27,
+    BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30,
+    BROTLI_DECODER_ERROR_UNREACHABLE: -31,
+}, realZlibConstants));
+//# sourceMappingURL=constants.js.map
\ No newline at end of file
diff --git a/node_modules/npm-profile/node_modules/minizlib/dist/commonjs/index.js b/node_modules/npm-profile/node_modules/minizlib/dist/commonjs/index.js
new file mode 100644
index 0000000000000..b4906d2783372
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/minizlib/dist/commonjs/index.js
@@ -0,0 +1,392 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+    Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+    o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || (function () {
+    var ownKeys = function(o) {
+        ownKeys = Object.getOwnPropertyNames || function (o) {
+            var ar = [];
+            for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
+            return ar;
+        };
+        return ownKeys(o);
+    };
+    return function (mod) {
+        if (mod && mod.__esModule) return mod;
+        var result = {};
+        if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
+        __setModuleDefault(result, mod);
+        return result;
+    };
+})();
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.BrotliDecompress = exports.BrotliCompress = exports.Brotli = exports.Unzip = exports.InflateRaw = exports.DeflateRaw = exports.Gunzip = exports.Gzip = exports.Inflate = exports.Deflate = exports.Zlib = exports.ZlibError = exports.constants = void 0;
+const assert_1 = __importDefault(require("assert"));
+const buffer_1 = require("buffer");
+const minipass_1 = require("minipass");
+const realZlib = __importStar(require("zlib"));
+const constants_js_1 = require("./constants.js");
+var constants_js_2 = require("./constants.js");
+Object.defineProperty(exports, "constants", { enumerable: true, get: function () { return constants_js_2.constants; } });
+const OriginalBufferConcat = buffer_1.Buffer.concat;
+const desc = Object.getOwnPropertyDescriptor(buffer_1.Buffer, 'concat');
+const noop = (args) => args;
+const passthroughBufferConcat = desc?.writable === true || desc?.set !== undefined
+    ? (makeNoOp) => {
+        buffer_1.Buffer.concat = makeNoOp ? noop : OriginalBufferConcat;
+    }
+    : (_) => { };
+const _superWrite = Symbol('_superWrite');
+class ZlibError extends Error {
+    code;
+    errno;
+    constructor(err) {
+        super('zlib: ' + err.message);
+        this.code = err.code;
+        this.errno = err.errno;
+        /* c8 ignore next */
+        if (!this.code)
+            this.code = 'ZLIB_ERROR';
+        this.message = 'zlib: ' + err.message;
+        Error.captureStackTrace(this, this.constructor);
+    }
+    get name() {
+        return 'ZlibError';
+    }
+}
+exports.ZlibError = ZlibError;
+// the Zlib class they all inherit from
+// This thing manages the queue of requests, and returns
+// true or false if there is anything in the queue when
+// you call the .write() method.
+const _flushFlag = Symbol('flushFlag');
+class ZlibBase extends minipass_1.Minipass {
+    #sawError = false;
+    #ended = false;
+    #flushFlag;
+    #finishFlushFlag;
+    #fullFlushFlag;
+    #handle;
+    #onError;
+    get sawError() {
+        return this.#sawError;
+    }
+    get handle() {
+        return this.#handle;
+    }
+    /* c8 ignore start */
+    get flushFlag() {
+        return this.#flushFlag;
+    }
+    /* c8 ignore stop */
+    constructor(opts, mode) {
+        if (!opts || typeof opts !== 'object')
+            throw new TypeError('invalid options for ZlibBase constructor');
+        //@ts-ignore
+        super(opts);
+        /* c8 ignore start */
+        this.#flushFlag = opts.flush ?? 0;
+        this.#finishFlushFlag = opts.finishFlush ?? 0;
+        this.#fullFlushFlag = opts.fullFlushFlag ?? 0;
+        /* c8 ignore stop */
+        // this will throw if any options are invalid for the class selected
+        try {
+            // @types/node doesn't know that it exports the classes, but they're there
+            //@ts-ignore
+            this.#handle = new realZlib[mode](opts);
+        }
+        catch (er) {
+            // make sure that all errors get decorated properly
+            throw new ZlibError(er);
+        }
+        this.#onError = err => {
+            // no sense raising multiple errors, since we abort on the first one.
+            if (this.#sawError)
+                return;
+            this.#sawError = true;
+            // there is no way to cleanly recover.
+            // continuing only obscures problems.
+            this.close();
+            this.emit('error', err);
+        };
+        this.#handle?.on('error', er => this.#onError(new ZlibError(er)));
+        this.once('end', () => this.close);
+    }
+    close() {
+        if (this.#handle) {
+            this.#handle.close();
+            this.#handle = undefined;
+            this.emit('close');
+        }
+    }
+    reset() {
+        if (!this.#sawError) {
+            (0, assert_1.default)(this.#handle, 'zlib binding closed');
+            //@ts-ignore
+            return this.#handle.reset?.();
+        }
+    }
+    flush(flushFlag) {
+        if (this.ended)
+            return;
+        if (typeof flushFlag !== 'number')
+            flushFlag = this.#fullFlushFlag;
+        this.write(Object.assign(buffer_1.Buffer.alloc(0), { [_flushFlag]: flushFlag }));
+    }
+    end(chunk, encoding, cb) {
+        /* c8 ignore start */
+        if (typeof chunk === 'function') {
+            cb = chunk;
+            encoding = undefined;
+            chunk = undefined;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = undefined;
+        }
+        /* c8 ignore stop */
+        if (chunk) {
+            if (encoding)
+                this.write(chunk, encoding);
+            else
+                this.write(chunk);
+        }
+        this.flush(this.#finishFlushFlag);
+        this.#ended = true;
+        return super.end(cb);
+    }
+    get ended() {
+        return this.#ended;
+    }
+    // overridden in the gzip classes to do portable writes
+    [_superWrite](data) {
+        return super.write(data);
+    }
+    write(chunk, encoding, cb) {
+        // process the chunk using the sync process
+        // then super.write() all the outputted chunks
+        if (typeof encoding === 'function')
+            (cb = encoding), (encoding = 'utf8');
+        if (typeof chunk === 'string')
+            chunk = buffer_1.Buffer.from(chunk, encoding);
+        if (this.#sawError)
+            return;
+        (0, assert_1.default)(this.#handle, 'zlib binding closed');
+        // _processChunk tries to .close() the native handle after it's done, so we
+        // intercept that by temporarily making it a no-op.
+        // diving into the node:zlib internals a bit here
+        const nativeHandle = this.#handle
+            ._handle;
+        const originalNativeClose = nativeHandle.close;
+        nativeHandle.close = () => { };
+        const originalClose = this.#handle.close;
+        this.#handle.close = () => { };
+        // It also calls `Buffer.concat()` at the end, which may be convenient
+        // for some, but which we are not interested in as it slows us down.
+        passthroughBufferConcat(true);
+        let result = undefined;
+        try {
+            const flushFlag = typeof chunk[_flushFlag] === 'number'
+                ? chunk[_flushFlag]
+                : this.#flushFlag;
+            result = this.#handle._processChunk(chunk, flushFlag);
+            // if we don't throw, reset it back how it was
+            passthroughBufferConcat(false);
+        }
+        catch (err) {
+            // or if we do, put Buffer.concat() back before we emit error
+            // Error events call into user code, which may call Buffer.concat()
+            passthroughBufferConcat(false);
+            this.#onError(new ZlibError(err));
+        }
+        finally {
+            if (this.#handle) {
+                // Core zlib resets `_handle` to null after attempting to close the
+                // native handle. Our no-op handler prevented actual closure, but we
+                // need to restore the `._handle` property.
+                ;
+                this.#handle._handle =
+                    nativeHandle;
+                nativeHandle.close = originalNativeClose;
+                this.#handle.close = originalClose;
+                // `_processChunk()` adds an 'error' listener. If we don't remove it
+                // after each call, these handlers start piling up.
+                this.#handle.removeAllListeners('error');
+                // make sure OUR error listener is still attached tho
+            }
+        }
+        if (this.#handle)
+            this.#handle.on('error', er => this.#onError(new ZlibError(er)));
+        let writeReturn;
+        if (result) {
+            if (Array.isArray(result) && result.length > 0) {
+                const r = result[0];
+                // The first buffer is always `handle._outBuffer`, which would be
+                // re-used for later invocations; so, we always have to copy that one.
+                writeReturn = this[_superWrite](buffer_1.Buffer.from(r));
+                for (let i = 1; i < result.length; i++) {
+                    writeReturn = this[_superWrite](result[i]);
+                }
+            }
+            else {
+                // either a single Buffer or an empty array
+                writeReturn = this[_superWrite](buffer_1.Buffer.from(result));
+            }
+        }
+        if (cb)
+            cb();
+        return writeReturn;
+    }
+}
+class Zlib extends ZlibBase {
+    #level;
+    #strategy;
+    constructor(opts, mode) {
+        opts = opts || {};
+        opts.flush = opts.flush || constants_js_1.constants.Z_NO_FLUSH;
+        opts.finishFlush = opts.finishFlush || constants_js_1.constants.Z_FINISH;
+        opts.fullFlushFlag = constants_js_1.constants.Z_FULL_FLUSH;
+        super(opts, mode);
+        this.#level = opts.level;
+        this.#strategy = opts.strategy;
+    }
+    params(level, strategy) {
+        if (this.sawError)
+            return;
+        if (!this.handle)
+            throw new Error('cannot switch params when binding is closed');
+        // no way to test this without also not supporting params at all
+        /* c8 ignore start */
+        if (!this.handle.params)
+            throw new Error('not supported in this implementation');
+        /* c8 ignore stop */
+        if (this.#level !== level || this.#strategy !== strategy) {
+            this.flush(constants_js_1.constants.Z_SYNC_FLUSH);
+            (0, assert_1.default)(this.handle, 'zlib binding closed');
+            // .params() calls .flush(), but the latter is always async in the
+            // core zlib. We override .flush() temporarily to intercept that and
+            // flush synchronously.
+            const origFlush = this.handle.flush;
+            this.handle.flush = (flushFlag, cb) => {
+                /* c8 ignore start */
+                if (typeof flushFlag === 'function') {
+                    cb = flushFlag;
+                    flushFlag = this.flushFlag;
+                }
+                /* c8 ignore stop */
+                this.flush(flushFlag);
+                cb?.();
+            };
+            try {
+                ;
+                this.handle.params(level, strategy);
+            }
+            finally {
+                this.handle.flush = origFlush;
+            }
+            /* c8 ignore start */
+            if (this.handle) {
+                this.#level = level;
+                this.#strategy = strategy;
+            }
+            /* c8 ignore stop */
+        }
+    }
+}
+exports.Zlib = Zlib;
+// minimal 2-byte header
+class Deflate extends Zlib {
+    constructor(opts) {
+        super(opts, 'Deflate');
+    }
+}
+exports.Deflate = Deflate;
+class Inflate extends Zlib {
+    constructor(opts) {
+        super(opts, 'Inflate');
+    }
+}
+exports.Inflate = Inflate;
+class Gzip extends Zlib {
+    #portable;
+    constructor(opts) {
+        super(opts, 'Gzip');
+        this.#portable = opts && !!opts.portable;
+    }
+    [_superWrite](data) {
+        if (!this.#portable)
+            return super[_superWrite](data);
+        // we'll always get the header emitted in one first chunk
+        // overwrite the OS indicator byte with 0xFF
+        this.#portable = false;
+        data[9] = 255;
+        return super[_superWrite](data);
+    }
+}
+exports.Gzip = Gzip;
+class Gunzip extends Zlib {
+    constructor(opts) {
+        super(opts, 'Gunzip');
+    }
+}
+exports.Gunzip = Gunzip;
+// raw - no header
+class DeflateRaw extends Zlib {
+    constructor(opts) {
+        super(opts, 'DeflateRaw');
+    }
+}
+exports.DeflateRaw = DeflateRaw;
+class InflateRaw extends Zlib {
+    constructor(opts) {
+        super(opts, 'InflateRaw');
+    }
+}
+exports.InflateRaw = InflateRaw;
+// auto-detect header.
+class Unzip extends Zlib {
+    constructor(opts) {
+        super(opts, 'Unzip');
+    }
+}
+exports.Unzip = Unzip;
+class Brotli extends ZlibBase {
+    constructor(opts, mode) {
+        opts = opts || {};
+        opts.flush = opts.flush || constants_js_1.constants.BROTLI_OPERATION_PROCESS;
+        opts.finishFlush =
+            opts.finishFlush || constants_js_1.constants.BROTLI_OPERATION_FINISH;
+        opts.fullFlushFlag = constants_js_1.constants.BROTLI_OPERATION_FLUSH;
+        super(opts, mode);
+    }
+}
+exports.Brotli = Brotli;
+class BrotliCompress extends Brotli {
+    constructor(opts) {
+        super(opts, 'BrotliCompress');
+    }
+}
+exports.BrotliCompress = BrotliCompress;
+class BrotliDecompress extends Brotli {
+    constructor(opts) {
+        super(opts, 'BrotliDecompress');
+    }
+}
+exports.BrotliDecompress = BrotliDecompress;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/npm-profile/node_modules/minizlib/dist/commonjs/package.json b/node_modules/npm-profile/node_modules/minizlib/dist/commonjs/package.json
new file mode 100644
index 0000000000000..5bbefffbabee3
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/minizlib/dist/commonjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/node_modules/npm-profile/node_modules/minizlib/dist/esm/constants.js b/node_modules/npm-profile/node_modules/minizlib/dist/esm/constants.js
new file mode 100644
index 0000000000000..7faf40be5068d
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/minizlib/dist/esm/constants.js
@@ -0,0 +1,117 @@
+// Update with any zlib constants that are added or changed in the future.
+// Node v6 didn't export this, so we just hard code the version and rely
+// on all the other hard-coded values from zlib v4736.  When node v6
+// support drops, we can just export the realZlibConstants object.
+import realZlib from 'zlib';
+/* c8 ignore start */
+const realZlibConstants = realZlib.constants || { ZLIB_VERNUM: 4736 };
+/* c8 ignore stop */
+export const constants = Object.freeze(Object.assign(Object.create(null), {
+    Z_NO_FLUSH: 0,
+    Z_PARTIAL_FLUSH: 1,
+    Z_SYNC_FLUSH: 2,
+    Z_FULL_FLUSH: 3,
+    Z_FINISH: 4,
+    Z_BLOCK: 5,
+    Z_OK: 0,
+    Z_STREAM_END: 1,
+    Z_NEED_DICT: 2,
+    Z_ERRNO: -1,
+    Z_STREAM_ERROR: -2,
+    Z_DATA_ERROR: -3,
+    Z_MEM_ERROR: -4,
+    Z_BUF_ERROR: -5,
+    Z_VERSION_ERROR: -6,
+    Z_NO_COMPRESSION: 0,
+    Z_BEST_SPEED: 1,
+    Z_BEST_COMPRESSION: 9,
+    Z_DEFAULT_COMPRESSION: -1,
+    Z_FILTERED: 1,
+    Z_HUFFMAN_ONLY: 2,
+    Z_RLE: 3,
+    Z_FIXED: 4,
+    Z_DEFAULT_STRATEGY: 0,
+    DEFLATE: 1,
+    INFLATE: 2,
+    GZIP: 3,
+    GUNZIP: 4,
+    DEFLATERAW: 5,
+    INFLATERAW: 6,
+    UNZIP: 7,
+    BROTLI_DECODE: 8,
+    BROTLI_ENCODE: 9,
+    Z_MIN_WINDOWBITS: 8,
+    Z_MAX_WINDOWBITS: 15,
+    Z_DEFAULT_WINDOWBITS: 15,
+    Z_MIN_CHUNK: 64,
+    Z_MAX_CHUNK: Infinity,
+    Z_DEFAULT_CHUNK: 16384,
+    Z_MIN_MEMLEVEL: 1,
+    Z_MAX_MEMLEVEL: 9,
+    Z_DEFAULT_MEMLEVEL: 8,
+    Z_MIN_LEVEL: -1,
+    Z_MAX_LEVEL: 9,
+    Z_DEFAULT_LEVEL: -1,
+    BROTLI_OPERATION_PROCESS: 0,
+    BROTLI_OPERATION_FLUSH: 1,
+    BROTLI_OPERATION_FINISH: 2,
+    BROTLI_OPERATION_EMIT_METADATA: 3,
+    BROTLI_MODE_GENERIC: 0,
+    BROTLI_MODE_TEXT: 1,
+    BROTLI_MODE_FONT: 2,
+    BROTLI_DEFAULT_MODE: 0,
+    BROTLI_MIN_QUALITY: 0,
+    BROTLI_MAX_QUALITY: 11,
+    BROTLI_DEFAULT_QUALITY: 11,
+    BROTLI_MIN_WINDOW_BITS: 10,
+    BROTLI_MAX_WINDOW_BITS: 24,
+    BROTLI_LARGE_MAX_WINDOW_BITS: 30,
+    BROTLI_DEFAULT_WINDOW: 22,
+    BROTLI_MIN_INPUT_BLOCK_BITS: 16,
+    BROTLI_MAX_INPUT_BLOCK_BITS: 24,
+    BROTLI_PARAM_MODE: 0,
+    BROTLI_PARAM_QUALITY: 1,
+    BROTLI_PARAM_LGWIN: 2,
+    BROTLI_PARAM_LGBLOCK: 3,
+    BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4,
+    BROTLI_PARAM_SIZE_HINT: 5,
+    BROTLI_PARAM_LARGE_WINDOW: 6,
+    BROTLI_PARAM_NPOSTFIX: 7,
+    BROTLI_PARAM_NDIRECT: 8,
+    BROTLI_DECODER_RESULT_ERROR: 0,
+    BROTLI_DECODER_RESULT_SUCCESS: 1,
+    BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2,
+    BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3,
+    BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0,
+    BROTLI_DECODER_PARAM_LARGE_WINDOW: 1,
+    BROTLI_DECODER_NO_ERROR: 0,
+    BROTLI_DECODER_SUCCESS: 1,
+    BROTLI_DECODER_NEEDS_MORE_INPUT: 2,
+    BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3,
+    BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1,
+    BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2,
+    BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3,
+    BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4,
+    BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5,
+    BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6,
+    BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7,
+    BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8,
+    BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9,
+    BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10,
+    BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11,
+    BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12,
+    BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13,
+    BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14,
+    BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15,
+    BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16,
+    BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19,
+    BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20,
+    BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21,
+    BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22,
+    BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25,
+    BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26,
+    BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27,
+    BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30,
+    BROTLI_DECODER_ERROR_UNREACHABLE: -31,
+}, realZlibConstants));
+//# sourceMappingURL=constants.js.map
\ No newline at end of file
diff --git a/node_modules/npm-profile/node_modules/minizlib/dist/esm/index.js b/node_modules/npm-profile/node_modules/minizlib/dist/esm/index.js
new file mode 100644
index 0000000000000..f33586a8ab0ec
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/minizlib/dist/esm/index.js
@@ -0,0 +1,340 @@
+import assert from 'assert';
+import { Buffer } from 'buffer';
+import { Minipass } from 'minipass';
+import * as realZlib from 'zlib';
+import { constants } from './constants.js';
+export { constants } from './constants.js';
+const OriginalBufferConcat = Buffer.concat;
+const desc = Object.getOwnPropertyDescriptor(Buffer, 'concat');
+const noop = (args) => args;
+const passthroughBufferConcat = desc?.writable === true || desc?.set !== undefined
+    ? (makeNoOp) => {
+        Buffer.concat = makeNoOp ? noop : OriginalBufferConcat;
+    }
+    : (_) => { };
+const _superWrite = Symbol('_superWrite');
+export class ZlibError extends Error {
+    code;
+    errno;
+    constructor(err) {
+        super('zlib: ' + err.message);
+        this.code = err.code;
+        this.errno = err.errno;
+        /* c8 ignore next */
+        if (!this.code)
+            this.code = 'ZLIB_ERROR';
+        this.message = 'zlib: ' + err.message;
+        Error.captureStackTrace(this, this.constructor);
+    }
+    get name() {
+        return 'ZlibError';
+    }
+}
+// the Zlib class they all inherit from
+// This thing manages the queue of requests, and returns
+// true or false if there is anything in the queue when
+// you call the .write() method.
+const _flushFlag = Symbol('flushFlag');
+class ZlibBase extends Minipass {
+    #sawError = false;
+    #ended = false;
+    #flushFlag;
+    #finishFlushFlag;
+    #fullFlushFlag;
+    #handle;
+    #onError;
+    get sawError() {
+        return this.#sawError;
+    }
+    get handle() {
+        return this.#handle;
+    }
+    /* c8 ignore start */
+    get flushFlag() {
+        return this.#flushFlag;
+    }
+    /* c8 ignore stop */
+    constructor(opts, mode) {
+        if (!opts || typeof opts !== 'object')
+            throw new TypeError('invalid options for ZlibBase constructor');
+        //@ts-ignore
+        super(opts);
+        /* c8 ignore start */
+        this.#flushFlag = opts.flush ?? 0;
+        this.#finishFlushFlag = opts.finishFlush ?? 0;
+        this.#fullFlushFlag = opts.fullFlushFlag ?? 0;
+        /* c8 ignore stop */
+        // this will throw if any options are invalid for the class selected
+        try {
+            // @types/node doesn't know that it exports the classes, but they're there
+            //@ts-ignore
+            this.#handle = new realZlib[mode](opts);
+        }
+        catch (er) {
+            // make sure that all errors get decorated properly
+            throw new ZlibError(er);
+        }
+        this.#onError = err => {
+            // no sense raising multiple errors, since we abort on the first one.
+            if (this.#sawError)
+                return;
+            this.#sawError = true;
+            // there is no way to cleanly recover.
+            // continuing only obscures problems.
+            this.close();
+            this.emit('error', err);
+        };
+        this.#handle?.on('error', er => this.#onError(new ZlibError(er)));
+        this.once('end', () => this.close);
+    }
+    close() {
+        if (this.#handle) {
+            this.#handle.close();
+            this.#handle = undefined;
+            this.emit('close');
+        }
+    }
+    reset() {
+        if (!this.#sawError) {
+            assert(this.#handle, 'zlib binding closed');
+            //@ts-ignore
+            return this.#handle.reset?.();
+        }
+    }
+    flush(flushFlag) {
+        if (this.ended)
+            return;
+        if (typeof flushFlag !== 'number')
+            flushFlag = this.#fullFlushFlag;
+        this.write(Object.assign(Buffer.alloc(0), { [_flushFlag]: flushFlag }));
+    }
+    end(chunk, encoding, cb) {
+        /* c8 ignore start */
+        if (typeof chunk === 'function') {
+            cb = chunk;
+            encoding = undefined;
+            chunk = undefined;
+        }
+        if (typeof encoding === 'function') {
+            cb = encoding;
+            encoding = undefined;
+        }
+        /* c8 ignore stop */
+        if (chunk) {
+            if (encoding)
+                this.write(chunk, encoding);
+            else
+                this.write(chunk);
+        }
+        this.flush(this.#finishFlushFlag);
+        this.#ended = true;
+        return super.end(cb);
+    }
+    get ended() {
+        return this.#ended;
+    }
+    // overridden in the gzip classes to do portable writes
+    [_superWrite](data) {
+        return super.write(data);
+    }
+    write(chunk, encoding, cb) {
+        // process the chunk using the sync process
+        // then super.write() all the outputted chunks
+        if (typeof encoding === 'function')
+            (cb = encoding), (encoding = 'utf8');
+        if (typeof chunk === 'string')
+            chunk = Buffer.from(chunk, encoding);
+        if (this.#sawError)
+            return;
+        assert(this.#handle, 'zlib binding closed');
+        // _processChunk tries to .close() the native handle after it's done, so we
+        // intercept that by temporarily making it a no-op.
+        // diving into the node:zlib internals a bit here
+        const nativeHandle = this.#handle
+            ._handle;
+        const originalNativeClose = nativeHandle.close;
+        nativeHandle.close = () => { };
+        const originalClose = this.#handle.close;
+        this.#handle.close = () => { };
+        // It also calls `Buffer.concat()` at the end, which may be convenient
+        // for some, but which we are not interested in as it slows us down.
+        passthroughBufferConcat(true);
+        let result = undefined;
+        try {
+            const flushFlag = typeof chunk[_flushFlag] === 'number'
+                ? chunk[_flushFlag]
+                : this.#flushFlag;
+            result = this.#handle._processChunk(chunk, flushFlag);
+            // if we don't throw, reset it back how it was
+            passthroughBufferConcat(false);
+        }
+        catch (err) {
+            // or if we do, put Buffer.concat() back before we emit error
+            // Error events call into user code, which may call Buffer.concat()
+            passthroughBufferConcat(false);
+            this.#onError(new ZlibError(err));
+        }
+        finally {
+            if (this.#handle) {
+                // Core zlib resets `_handle` to null after attempting to close the
+                // native handle. Our no-op handler prevented actual closure, but we
+                // need to restore the `._handle` property.
+                ;
+                this.#handle._handle =
+                    nativeHandle;
+                nativeHandle.close = originalNativeClose;
+                this.#handle.close = originalClose;
+                // `_processChunk()` adds an 'error' listener. If we don't remove it
+                // after each call, these handlers start piling up.
+                this.#handle.removeAllListeners('error');
+                // make sure OUR error listener is still attached tho
+            }
+        }
+        if (this.#handle)
+            this.#handle.on('error', er => this.#onError(new ZlibError(er)));
+        let writeReturn;
+        if (result) {
+            if (Array.isArray(result) && result.length > 0) {
+                const r = result[0];
+                // The first buffer is always `handle._outBuffer`, which would be
+                // re-used for later invocations; so, we always have to copy that one.
+                writeReturn = this[_superWrite](Buffer.from(r));
+                for (let i = 1; i < result.length; i++) {
+                    writeReturn = this[_superWrite](result[i]);
+                }
+            }
+            else {
+                // either a single Buffer or an empty array
+                writeReturn = this[_superWrite](Buffer.from(result));
+            }
+        }
+        if (cb)
+            cb();
+        return writeReturn;
+    }
+}
+export class Zlib extends ZlibBase {
+    #level;
+    #strategy;
+    constructor(opts, mode) {
+        opts = opts || {};
+        opts.flush = opts.flush || constants.Z_NO_FLUSH;
+        opts.finishFlush = opts.finishFlush || constants.Z_FINISH;
+        opts.fullFlushFlag = constants.Z_FULL_FLUSH;
+        super(opts, mode);
+        this.#level = opts.level;
+        this.#strategy = opts.strategy;
+    }
+    params(level, strategy) {
+        if (this.sawError)
+            return;
+        if (!this.handle)
+            throw new Error('cannot switch params when binding is closed');
+        // no way to test this without also not supporting params at all
+        /* c8 ignore start */
+        if (!this.handle.params)
+            throw new Error('not supported in this implementation');
+        /* c8 ignore stop */
+        if (this.#level !== level || this.#strategy !== strategy) {
+            this.flush(constants.Z_SYNC_FLUSH);
+            assert(this.handle, 'zlib binding closed');
+            // .params() calls .flush(), but the latter is always async in the
+            // core zlib. We override .flush() temporarily to intercept that and
+            // flush synchronously.
+            const origFlush = this.handle.flush;
+            this.handle.flush = (flushFlag, cb) => {
+                /* c8 ignore start */
+                if (typeof flushFlag === 'function') {
+                    cb = flushFlag;
+                    flushFlag = this.flushFlag;
+                }
+                /* c8 ignore stop */
+                this.flush(flushFlag);
+                cb?.();
+            };
+            try {
+                ;
+                this.handle.params(level, strategy);
+            }
+            finally {
+                this.handle.flush = origFlush;
+            }
+            /* c8 ignore start */
+            if (this.handle) {
+                this.#level = level;
+                this.#strategy = strategy;
+            }
+            /* c8 ignore stop */
+        }
+    }
+}
+// minimal 2-byte header
+export class Deflate extends Zlib {
+    constructor(opts) {
+        super(opts, 'Deflate');
+    }
+}
+export class Inflate extends Zlib {
+    constructor(opts) {
+        super(opts, 'Inflate');
+    }
+}
+export class Gzip extends Zlib {
+    #portable;
+    constructor(opts) {
+        super(opts, 'Gzip');
+        this.#portable = opts && !!opts.portable;
+    }
+    [_superWrite](data) {
+        if (!this.#portable)
+            return super[_superWrite](data);
+        // we'll always get the header emitted in one first chunk
+        // overwrite the OS indicator byte with 0xFF
+        this.#portable = false;
+        data[9] = 255;
+        return super[_superWrite](data);
+    }
+}
+export class Gunzip extends Zlib {
+    constructor(opts) {
+        super(opts, 'Gunzip');
+    }
+}
+// raw - no header
+export class DeflateRaw extends Zlib {
+    constructor(opts) {
+        super(opts, 'DeflateRaw');
+    }
+}
+export class InflateRaw extends Zlib {
+    constructor(opts) {
+        super(opts, 'InflateRaw');
+    }
+}
+// auto-detect header.
+export class Unzip extends Zlib {
+    constructor(opts) {
+        super(opts, 'Unzip');
+    }
+}
+export class Brotli extends ZlibBase {
+    constructor(opts, mode) {
+        opts = opts || {};
+        opts.flush = opts.flush || constants.BROTLI_OPERATION_PROCESS;
+        opts.finishFlush =
+            opts.finishFlush || constants.BROTLI_OPERATION_FINISH;
+        opts.fullFlushFlag = constants.BROTLI_OPERATION_FLUSH;
+        super(opts, mode);
+    }
+}
+export class BrotliCompress extends Brotli {
+    constructor(opts) {
+        super(opts, 'BrotliCompress');
+    }
+}
+export class BrotliDecompress extends Brotli {
+    constructor(opts) {
+        super(opts, 'BrotliDecompress');
+    }
+}
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/npm-profile/node_modules/minizlib/dist/esm/package.json b/node_modules/npm-profile/node_modules/minizlib/dist/esm/package.json
new file mode 100644
index 0000000000000..3dbc1ca591c05
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/minizlib/dist/esm/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/node_modules/npm-profile/node_modules/minizlib/package.json b/node_modules/npm-profile/node_modules/minizlib/package.json
new file mode 100644
index 0000000000000..43cb855e15a5d
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/minizlib/package.json
@@ -0,0 +1,80 @@
+{
+  "name": "minizlib",
+  "version": "3.0.2",
+  "description": "A small fast zlib stream built on [minipass](http://npm.im/minipass) and Node.js's zlib binding.",
+  "main": "./dist/commonjs/index.js",
+  "dependencies": {
+    "minipass": "^7.1.2"
+  },
+  "scripts": {
+    "prepare": "tshy",
+    "pretest": "npm run prepare",
+    "test": "tap",
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "format": "prettier --write . --loglevel warn",
+    "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/isaacs/minizlib.git"
+  },
+  "keywords": [
+    "zlib",
+    "gzip",
+    "gunzip",
+    "deflate",
+    "inflate",
+    "compression",
+    "zip",
+    "unzip"
+  ],
+  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
+  "license": "MIT",
+  "devDependencies": {
+    "@types/node": "^22.13.14",
+    "tap": "^21.1.0",
+    "tshy": "^3.0.2",
+    "typedoc": "^0.28.1"
+  },
+  "files": [
+    "dist"
+  ],
+  "engines": {
+    "node": ">= 18"
+  },
+  "tshy": {
+    "exports": {
+      "./package.json": "./package.json",
+      ".": "./src/index.ts"
+    }
+  },
+  "exports": {
+    "./package.json": "./package.json",
+    ".": {
+      "import": {
+        "types": "./dist/esm/index.d.ts",
+        "default": "./dist/esm/index.js"
+      },
+      "require": {
+        "types": "./dist/commonjs/index.d.ts",
+        "default": "./dist/commonjs/index.js"
+      }
+    }
+  },
+  "types": "./dist/commonjs/index.d.ts",
+  "type": "module",
+  "prettier": {
+    "semi": false,
+    "printWidth": 75,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "module": "./dist/esm/index.js"
+}
diff --git a/node_modules/npm-profile/node_modules/negotiator/HISTORY.md b/node_modules/npm-profile/node_modules/negotiator/HISTORY.md
new file mode 100644
index 0000000000000..63d537d3f6811
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/negotiator/HISTORY.md
@@ -0,0 +1,114 @@
+1.0.0 / 2024-08-31
+==================
+
+  * Drop support for node <18
+  * Added an option preferred encodings array #59
+
+0.6.3 / 2022-01-22
+==================
+
+  * Revert "Lazy-load modules from main entry point"
+
+0.6.2 / 2019-04-29
+==================
+
+  * Fix sorting charset, encoding, and language with extra parameters
+
+0.6.1 / 2016-05-02
+==================
+
+  * perf: improve `Accept` parsing speed
+  * perf: improve `Accept-Charset` parsing speed
+  * perf: improve `Accept-Encoding` parsing speed
+  * perf: improve `Accept-Language` parsing speed
+
+0.6.0 / 2015-09-29
+==================
+
+  * Fix including type extensions in parameters in `Accept` parsing
+  * Fix parsing `Accept` parameters with quoted equals
+  * Fix parsing `Accept` parameters with quoted semicolons
+  * Lazy-load modules from main entry point
+  * perf: delay type concatenation until needed
+  * perf: enable strict mode
+  * perf: hoist regular expressions
+  * perf: remove closures getting spec properties
+  * perf: remove a closure from media type parsing
+  * perf: remove property delete from media type parsing
+
+0.5.3 / 2015-05-10
+==================
+
+  * Fix media type parameter matching to be case-insensitive
+
+0.5.2 / 2015-05-06
+==================
+
+  * Fix comparing media types with quoted values
+  * Fix splitting media types with quoted commas
+
+0.5.1 / 2015-02-14
+==================
+
+  * Fix preference sorting to be stable for long acceptable lists
+
+0.5.0 / 2014-12-18
+==================
+
+  * Fix list return order when large accepted list
+  * Fix missing identity encoding when q=0 exists
+  * Remove dynamic building of Negotiator class
+
+0.4.9 / 2014-10-14
+==================
+
+  * Fix error when media type has invalid parameter
+
+0.4.8 / 2014-09-28
+==================
+
+  * Fix all negotiations to be case-insensitive
+  * Stable sort preferences of same quality according to client order
+  * Support Node.js 0.6
+
+0.4.7 / 2014-06-24
+==================
+
+  * Handle invalid provided languages
+  * Handle invalid provided media types
+
+0.4.6 / 2014-06-11
+==================
+
+  *  Order by specificity when quality is the same
+
+0.4.5 / 2014-05-29
+==================
+
+  * Fix regression in empty header handling
+
+0.4.4 / 2014-05-29
+==================
+
+  * Fix behaviors when headers are not present
+
+0.4.3 / 2014-04-16
+==================
+
+  * Handle slashes on media params correctly
+
+0.4.2 / 2014-02-28
+==================
+
+  * Fix media type sorting
+  * Handle media types params strictly
+
+0.4.1 / 2014-01-16
+==================
+
+  * Use most specific matches
+
+0.4.0 / 2014-01-09
+==================
+
+  * Remove preferred prefix from methods
diff --git a/node_modules/npm-profile/node_modules/negotiator/LICENSE b/node_modules/npm-profile/node_modules/negotiator/LICENSE
new file mode 100644
index 0000000000000..ea6b9e2e9ac25
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/negotiator/LICENSE
@@ -0,0 +1,24 @@
+(The MIT License)
+
+Copyright (c) 2012-2014 Federico Romero
+Copyright (c) 2012-2014 Isaac Z. Schlueter
+Copyright (c) 2014-2015 Douglas Christopher Wilson
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+'Software'), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/npm-profile/node_modules/negotiator/index.js b/node_modules/npm-profile/node_modules/negotiator/index.js
new file mode 100644
index 0000000000000..4f51315d6af4b
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/negotiator/index.js
@@ -0,0 +1,83 @@
+/*!
+ * negotiator
+ * Copyright(c) 2012 Federico Romero
+ * Copyright(c) 2012-2014 Isaac Z. Schlueter
+ * Copyright(c) 2015 Douglas Christopher Wilson
+ * MIT Licensed
+ */
+
+'use strict';
+
+var preferredCharsets = require('./lib/charset')
+var preferredEncodings = require('./lib/encoding')
+var preferredLanguages = require('./lib/language')
+var preferredMediaTypes = require('./lib/mediaType')
+
+/**
+ * Module exports.
+ * @public
+ */
+
+module.exports = Negotiator;
+module.exports.Negotiator = Negotiator;
+
+/**
+ * Create a Negotiator instance from a request.
+ * @param {object} request
+ * @public
+ */
+
+function Negotiator(request) {
+  if (!(this instanceof Negotiator)) {
+    return new Negotiator(request);
+  }
+
+  this.request = request;
+}
+
+Negotiator.prototype.charset = function charset(available) {
+  var set = this.charsets(available);
+  return set && set[0];
+};
+
+Negotiator.prototype.charsets = function charsets(available) {
+  return preferredCharsets(this.request.headers['accept-charset'], available);
+};
+
+Negotiator.prototype.encoding = function encoding(available, opts) {
+  var set = this.encodings(available, opts);
+  return set && set[0];
+};
+
+Negotiator.prototype.encodings = function encodings(available, options) {
+  var opts = options || {};
+  return preferredEncodings(this.request.headers['accept-encoding'], available, opts.preferred);
+};
+
+Negotiator.prototype.language = function language(available) {
+  var set = this.languages(available);
+  return set && set[0];
+};
+
+Negotiator.prototype.languages = function languages(available) {
+  return preferredLanguages(this.request.headers['accept-language'], available);
+};
+
+Negotiator.prototype.mediaType = function mediaType(available) {
+  var set = this.mediaTypes(available);
+  return set && set[0];
+};
+
+Negotiator.prototype.mediaTypes = function mediaTypes(available) {
+  return preferredMediaTypes(this.request.headers.accept, available);
+};
+
+// Backwards compatibility
+Negotiator.prototype.preferredCharset = Negotiator.prototype.charset;
+Negotiator.prototype.preferredCharsets = Negotiator.prototype.charsets;
+Negotiator.prototype.preferredEncoding = Negotiator.prototype.encoding;
+Negotiator.prototype.preferredEncodings = Negotiator.prototype.encodings;
+Negotiator.prototype.preferredLanguage = Negotiator.prototype.language;
+Negotiator.prototype.preferredLanguages = Negotiator.prototype.languages;
+Negotiator.prototype.preferredMediaType = Negotiator.prototype.mediaType;
+Negotiator.prototype.preferredMediaTypes = Negotiator.prototype.mediaTypes;
diff --git a/node_modules/npm-profile/node_modules/negotiator/lib/charset.js b/node_modules/npm-profile/node_modules/negotiator/lib/charset.js
new file mode 100644
index 0000000000000..cdd014803474a
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/negotiator/lib/charset.js
@@ -0,0 +1,169 @@
+/**
+ * negotiator
+ * Copyright(c) 2012 Isaac Z. Schlueter
+ * Copyright(c) 2014 Federico Romero
+ * Copyright(c) 2014-2015 Douglas Christopher Wilson
+ * MIT Licensed
+ */
+
+'use strict';
+
+/**
+ * Module exports.
+ * @public
+ */
+
+module.exports = preferredCharsets;
+module.exports.preferredCharsets = preferredCharsets;
+
+/**
+ * Module variables.
+ * @private
+ */
+
+var simpleCharsetRegExp = /^\s*([^\s;]+)\s*(?:;(.*))?$/;
+
+/**
+ * Parse the Accept-Charset header.
+ * @private
+ */
+
+function parseAcceptCharset(accept) {
+  var accepts = accept.split(',');
+
+  for (var i = 0, j = 0; i < accepts.length; i++) {
+    var charset = parseCharset(accepts[i].trim(), i);
+
+    if (charset) {
+      accepts[j++] = charset;
+    }
+  }
+
+  // trim accepts
+  accepts.length = j;
+
+  return accepts;
+}
+
+/**
+ * Parse a charset from the Accept-Charset header.
+ * @private
+ */
+
+function parseCharset(str, i) {
+  var match = simpleCharsetRegExp.exec(str);
+  if (!match) return null;
+
+  var charset = match[1];
+  var q = 1;
+  if (match[2]) {
+    var params = match[2].split(';')
+    for (var j = 0; j < params.length; j++) {
+      var p = params[j].trim().split('=');
+      if (p[0] === 'q') {
+        q = parseFloat(p[1]);
+        break;
+      }
+    }
+  }
+
+  return {
+    charset: charset,
+    q: q,
+    i: i
+  };
+}
+
+/**
+ * Get the priority of a charset.
+ * @private
+ */
+
+function getCharsetPriority(charset, accepted, index) {
+  var priority = {o: -1, q: 0, s: 0};
+
+  for (var i = 0; i < accepted.length; i++) {
+    var spec = specify(charset, accepted[i], index);
+
+    if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
+      priority = spec;
+    }
+  }
+
+  return priority;
+}
+
+/**
+ * Get the specificity of the charset.
+ * @private
+ */
+
+function specify(charset, spec, index) {
+  var s = 0;
+  if(spec.charset.toLowerCase() === charset.toLowerCase()){
+    s |= 1;
+  } else if (spec.charset !== '*' ) {
+    return null
+  }
+
+  return {
+    i: index,
+    o: spec.i,
+    q: spec.q,
+    s: s
+  }
+}
+
+/**
+ * Get the preferred charsets from an Accept-Charset header.
+ * @public
+ */
+
+function preferredCharsets(accept, provided) {
+  // RFC 2616 sec 14.2: no header = *
+  var accepts = parseAcceptCharset(accept === undefined ? '*' : accept || '');
+
+  if (!provided) {
+    // sorted list of all charsets
+    return accepts
+      .filter(isQuality)
+      .sort(compareSpecs)
+      .map(getFullCharset);
+  }
+
+  var priorities = provided.map(function getPriority(type, index) {
+    return getCharsetPriority(type, accepts, index);
+  });
+
+  // sorted list of accepted charsets
+  return priorities.filter(isQuality).sort(compareSpecs).map(function getCharset(priority) {
+    return provided[priorities.indexOf(priority)];
+  });
+}
+
+/**
+ * Compare two specs.
+ * @private
+ */
+
+function compareSpecs(a, b) {
+  return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0;
+}
+
+/**
+ * Get full charset string.
+ * @private
+ */
+
+function getFullCharset(spec) {
+  return spec.charset;
+}
+
+/**
+ * Check if a spec has any quality.
+ * @private
+ */
+
+function isQuality(spec) {
+  return spec.q > 0;
+}
diff --git a/node_modules/npm-profile/node_modules/negotiator/lib/encoding.js b/node_modules/npm-profile/node_modules/negotiator/lib/encoding.js
new file mode 100644
index 0000000000000..9ebb633d67743
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/negotiator/lib/encoding.js
@@ -0,0 +1,205 @@
+/**
+ * negotiator
+ * Copyright(c) 2012 Isaac Z. Schlueter
+ * Copyright(c) 2014 Federico Romero
+ * Copyright(c) 2014-2015 Douglas Christopher Wilson
+ * MIT Licensed
+ */
+
+'use strict';
+
+/**
+ * Module exports.
+ * @public
+ */
+
+module.exports = preferredEncodings;
+module.exports.preferredEncodings = preferredEncodings;
+
+/**
+ * Module variables.
+ * @private
+ */
+
+var simpleEncodingRegExp = /^\s*([^\s;]+)\s*(?:;(.*))?$/;
+
+/**
+ * Parse the Accept-Encoding header.
+ * @private
+ */
+
+function parseAcceptEncoding(accept) {
+  var accepts = accept.split(',');
+  var hasIdentity = false;
+  var minQuality = 1;
+
+  for (var i = 0, j = 0; i < accepts.length; i++) {
+    var encoding = parseEncoding(accepts[i].trim(), i);
+
+    if (encoding) {
+      accepts[j++] = encoding;
+      hasIdentity = hasIdentity || specify('identity', encoding);
+      minQuality = Math.min(minQuality, encoding.q || 1);
+    }
+  }
+
+  if (!hasIdentity) {
+    /*
+     * If identity doesn't explicitly appear in the accept-encoding header,
+     * it's added to the list of acceptable encoding with the lowest q
+     */
+    accepts[j++] = {
+      encoding: 'identity',
+      q: minQuality,
+      i: i
+    };
+  }
+
+  // trim accepts
+  accepts.length = j;
+
+  return accepts;
+}
+
+/**
+ * Parse an encoding from the Accept-Encoding header.
+ * @private
+ */
+
+function parseEncoding(str, i) {
+  var match = simpleEncodingRegExp.exec(str);
+  if (!match) return null;
+
+  var encoding = match[1];
+  var q = 1;
+  if (match[2]) {
+    var params = match[2].split(';');
+    for (var j = 0; j < params.length; j++) {
+      var p = params[j].trim().split('=');
+      if (p[0] === 'q') {
+        q = parseFloat(p[1]);
+        break;
+      }
+    }
+  }
+
+  return {
+    encoding: encoding,
+    q: q,
+    i: i
+  };
+}
+
+/**
+ * Get the priority of an encoding.
+ * @private
+ */
+
+function getEncodingPriority(encoding, accepted, index) {
+  var priority = {encoding: encoding, o: -1, q: 0, s: 0};
+
+  for (var i = 0; i < accepted.length; i++) {
+    var spec = specify(encoding, accepted[i], index);
+
+    if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
+      priority = spec;
+    }
+  }
+
+  return priority;
+}
+
+/**
+ * Get the specificity of the encoding.
+ * @private
+ */
+
+function specify(encoding, spec, index) {
+  var s = 0;
+  if(spec.encoding.toLowerCase() === encoding.toLowerCase()){
+    s |= 1;
+  } else if (spec.encoding !== '*' ) {
+    return null
+  }
+
+  return {
+    encoding: encoding,
+    i: index,
+    o: spec.i,
+    q: spec.q,
+    s: s
+  }
+};
+
+/**
+ * Get the preferred encodings from an Accept-Encoding header.
+ * @public
+ */
+
+function preferredEncodings(accept, provided, preferred) {
+  var accepts = parseAcceptEncoding(accept || '');
+
+  var comparator = preferred ? function comparator (a, b) {
+    if (a.q !== b.q) {
+      return b.q - a.q // higher quality first
+    }
+
+    var aPreferred = preferred.indexOf(a.encoding)
+    var bPreferred = preferred.indexOf(b.encoding)
+
+    if (aPreferred === -1 && bPreferred === -1) {
+      // consider the original specifity/order
+      return (b.s - a.s) || (a.o - b.o) || (a.i - b.i)
+    }
+
+    if (aPreferred !== -1 && bPreferred !== -1) {
+      return aPreferred - bPreferred // consider the preferred order
+    }
+
+    return aPreferred === -1 ? 1 : -1 // preferred first
+  } : compareSpecs;
+
+  if (!provided) {
+    // sorted list of all encodings
+    return accepts
+      .filter(isQuality)
+      .sort(comparator)
+      .map(getFullEncoding);
+  }
+
+  var priorities = provided.map(function getPriority(type, index) {
+    return getEncodingPriority(type, accepts, index);
+  });
+
+  // sorted list of accepted encodings
+  return priorities.filter(isQuality).sort(comparator).map(function getEncoding(priority) {
+    return provided[priorities.indexOf(priority)];
+  });
+}
+
+/**
+ * Compare two specs.
+ * @private
+ */
+
+function compareSpecs(a, b) {
+  return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i);
+}
+
+/**
+ * Get full encoding string.
+ * @private
+ */
+
+function getFullEncoding(spec) {
+  return spec.encoding;
+}
+
+/**
+ * Check if a spec has any quality.
+ * @private
+ */
+
+function isQuality(spec) {
+  return spec.q > 0;
+}
diff --git a/node_modules/npm-profile/node_modules/negotiator/lib/language.js b/node_modules/npm-profile/node_modules/negotiator/lib/language.js
new file mode 100644
index 0000000000000..a23167252719b
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/negotiator/lib/language.js
@@ -0,0 +1,179 @@
+/**
+ * negotiator
+ * Copyright(c) 2012 Isaac Z. Schlueter
+ * Copyright(c) 2014 Federico Romero
+ * Copyright(c) 2014-2015 Douglas Christopher Wilson
+ * MIT Licensed
+ */
+
+'use strict';
+
+/**
+ * Module exports.
+ * @public
+ */
+
+module.exports = preferredLanguages;
+module.exports.preferredLanguages = preferredLanguages;
+
+/**
+ * Module variables.
+ * @private
+ */
+
+var simpleLanguageRegExp = /^\s*([^\s\-;]+)(?:-([^\s;]+))?\s*(?:;(.*))?$/;
+
+/**
+ * Parse the Accept-Language header.
+ * @private
+ */
+
+function parseAcceptLanguage(accept) {
+  var accepts = accept.split(',');
+
+  for (var i = 0, j = 0; i < accepts.length; i++) {
+    var language = parseLanguage(accepts[i].trim(), i);
+
+    if (language) {
+      accepts[j++] = language;
+    }
+  }
+
+  // trim accepts
+  accepts.length = j;
+
+  return accepts;
+}
+
+/**
+ * Parse a language from the Accept-Language header.
+ * @private
+ */
+
+function parseLanguage(str, i) {
+  var match = simpleLanguageRegExp.exec(str);
+  if (!match) return null;
+
+  var prefix = match[1]
+  var suffix = match[2]
+  var full = prefix
+
+  if (suffix) full += "-" + suffix;
+
+  var q = 1;
+  if (match[3]) {
+    var params = match[3].split(';')
+    for (var j = 0; j < params.length; j++) {
+      var p = params[j].split('=');
+      if (p[0] === 'q') q = parseFloat(p[1]);
+    }
+  }
+
+  return {
+    prefix: prefix,
+    suffix: suffix,
+    q: q,
+    i: i,
+    full: full
+  };
+}
+
+/**
+ * Get the priority of a language.
+ * @private
+ */
+
+function getLanguagePriority(language, accepted, index) {
+  var priority = {o: -1, q: 0, s: 0};
+
+  for (var i = 0; i < accepted.length; i++) {
+    var spec = specify(language, accepted[i], index);
+
+    if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
+      priority = spec;
+    }
+  }
+
+  return priority;
+}
+
+/**
+ * Get the specificity of the language.
+ * @private
+ */
+
+function specify(language, spec, index) {
+  var p = parseLanguage(language)
+  if (!p) return null;
+  var s = 0;
+  if(spec.full.toLowerCase() === p.full.toLowerCase()){
+    s |= 4;
+  } else if (spec.prefix.toLowerCase() === p.full.toLowerCase()) {
+    s |= 2;
+  } else if (spec.full.toLowerCase() === p.prefix.toLowerCase()) {
+    s |= 1;
+  } else if (spec.full !== '*' ) {
+    return null
+  }
+
+  return {
+    i: index,
+    o: spec.i,
+    q: spec.q,
+    s: s
+  }
+};
+
+/**
+ * Get the preferred languages from an Accept-Language header.
+ * @public
+ */
+
+function preferredLanguages(accept, provided) {
+  // RFC 2616 sec 14.4: no header = *
+  var accepts = parseAcceptLanguage(accept === undefined ? '*' : accept || '');
+
+  if (!provided) {
+    // sorted list of all languages
+    return accepts
+      .filter(isQuality)
+      .sort(compareSpecs)
+      .map(getFullLanguage);
+  }
+
+  var priorities = provided.map(function getPriority(type, index) {
+    return getLanguagePriority(type, accepts, index);
+  });
+
+  // sorted list of accepted languages
+  return priorities.filter(isQuality).sort(compareSpecs).map(function getLanguage(priority) {
+    return provided[priorities.indexOf(priority)];
+  });
+}
+
+/**
+ * Compare two specs.
+ * @private
+ */
+
+function compareSpecs(a, b) {
+  return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0;
+}
+
+/**
+ * Get full language string.
+ * @private
+ */
+
+function getFullLanguage(spec) {
+  return spec.full;
+}
+
+/**
+ * Check if a spec has any quality.
+ * @private
+ */
+
+function isQuality(spec) {
+  return spec.q > 0;
+}
diff --git a/node_modules/npm-profile/node_modules/negotiator/lib/mediaType.js b/node_modules/npm-profile/node_modules/negotiator/lib/mediaType.js
new file mode 100644
index 0000000000000..8e402ea88394c
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/negotiator/lib/mediaType.js
@@ -0,0 +1,294 @@
+/**
+ * negotiator
+ * Copyright(c) 2012 Isaac Z. Schlueter
+ * Copyright(c) 2014 Federico Romero
+ * Copyright(c) 2014-2015 Douglas Christopher Wilson
+ * MIT Licensed
+ */
+
+'use strict';
+
+/**
+ * Module exports.
+ * @public
+ */
+
+module.exports = preferredMediaTypes;
+module.exports.preferredMediaTypes = preferredMediaTypes;
+
+/**
+ * Module variables.
+ * @private
+ */
+
+var simpleMediaTypeRegExp = /^\s*([^\s\/;]+)\/([^;\s]+)\s*(?:;(.*))?$/;
+
+/**
+ * Parse the Accept header.
+ * @private
+ */
+
+function parseAccept(accept) {
+  var accepts = splitMediaTypes(accept);
+
+  for (var i = 0, j = 0; i < accepts.length; i++) {
+    var mediaType = parseMediaType(accepts[i].trim(), i);
+
+    if (mediaType) {
+      accepts[j++] = mediaType;
+    }
+  }
+
+  // trim accepts
+  accepts.length = j;
+
+  return accepts;
+}
+
+/**
+ * Parse a media type from the Accept header.
+ * @private
+ */
+
+function parseMediaType(str, i) {
+  var match = simpleMediaTypeRegExp.exec(str);
+  if (!match) return null;
+
+  var params = Object.create(null);
+  var q = 1;
+  var subtype = match[2];
+  var type = match[1];
+
+  if (match[3]) {
+    var kvps = splitParameters(match[3]).map(splitKeyValuePair);
+
+    for (var j = 0; j < kvps.length; j++) {
+      var pair = kvps[j];
+      var key = pair[0].toLowerCase();
+      var val = pair[1];
+
+      // get the value, unwrapping quotes
+      var value = val && val[0] === '"' && val[val.length - 1] === '"'
+        ? val.slice(1, -1)
+        : val;
+
+      if (key === 'q') {
+        q = parseFloat(value);
+        break;
+      }
+
+      // store parameter
+      params[key] = value;
+    }
+  }
+
+  return {
+    type: type,
+    subtype: subtype,
+    params: params,
+    q: q,
+    i: i
+  };
+}
+
+/**
+ * Get the priority of a media type.
+ * @private
+ */
+
+function getMediaTypePriority(type, accepted, index) {
+  var priority = {o: -1, q: 0, s: 0};
+
+  for (var i = 0; i < accepted.length; i++) {
+    var spec = specify(type, accepted[i], index);
+
+    if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
+      priority = spec;
+    }
+  }
+
+  return priority;
+}
+
+/**
+ * Get the specificity of the media type.
+ * @private
+ */
+
+function specify(type, spec, index) {
+  var p = parseMediaType(type);
+  var s = 0;
+
+  if (!p) {
+    return null;
+  }
+
+  if(spec.type.toLowerCase() == p.type.toLowerCase()) {
+    s |= 4
+  } else if(spec.type != '*') {
+    return null;
+  }
+
+  if(spec.subtype.toLowerCase() == p.subtype.toLowerCase()) {
+    s |= 2
+  } else if(spec.subtype != '*') {
+    return null;
+  }
+
+  var keys = Object.keys(spec.params);
+  if (keys.length > 0) {
+    if (keys.every(function (k) {
+      return spec.params[k] == '*' || (spec.params[k] || '').toLowerCase() == (p.params[k] || '').toLowerCase();
+    })) {
+      s |= 1
+    } else {
+      return null
+    }
+  }
+
+  return {
+    i: index,
+    o: spec.i,
+    q: spec.q,
+    s: s,
+  }
+}
+
+/**
+ * Get the preferred media types from an Accept header.
+ * @public
+ */
+
+function preferredMediaTypes(accept, provided) {
+  // RFC 2616 sec 14.2: no header = */*
+  var accepts = parseAccept(accept === undefined ? '*/*' : accept || '');
+
+  if (!provided) {
+    // sorted list of all types
+    return accepts
+      .filter(isQuality)
+      .sort(compareSpecs)
+      .map(getFullType);
+  }
+
+  var priorities = provided.map(function getPriority(type, index) {
+    return getMediaTypePriority(type, accepts, index);
+  });
+
+  // sorted list of accepted types
+  return priorities.filter(isQuality).sort(compareSpecs).map(function getType(priority) {
+    return provided[priorities.indexOf(priority)];
+  });
+}
+
+/**
+ * Compare two specs.
+ * @private
+ */
+
+function compareSpecs(a, b) {
+  return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0;
+}
+
+/**
+ * Get full type string.
+ * @private
+ */
+
+function getFullType(spec) {
+  return spec.type + '/' + spec.subtype;
+}
+
+/**
+ * Check if a spec has any quality.
+ * @private
+ */
+
+function isQuality(spec) {
+  return spec.q > 0;
+}
+
+/**
+ * Count the number of quotes in a string.
+ * @private
+ */
+
+function quoteCount(string) {
+  var count = 0;
+  var index = 0;
+
+  while ((index = string.indexOf('"', index)) !== -1) {
+    count++;
+    index++;
+  }
+
+  return count;
+}
+
+/**
+ * Split a key value pair.
+ * @private
+ */
+
+function splitKeyValuePair(str) {
+  var index = str.indexOf('=');
+  var key;
+  var val;
+
+  if (index === -1) {
+    key = str;
+  } else {
+    key = str.slice(0, index);
+    val = str.slice(index + 1);
+  }
+
+  return [key, val];
+}
+
+/**
+ * Split an Accept header into media types.
+ * @private
+ */
+
+function splitMediaTypes(accept) {
+  var accepts = accept.split(',');
+
+  for (var i = 1, j = 0; i < accepts.length; i++) {
+    if (quoteCount(accepts[j]) % 2 == 0) {
+      accepts[++j] = accepts[i];
+    } else {
+      accepts[j] += ',' + accepts[i];
+    }
+  }
+
+  // trim accepts
+  accepts.length = j + 1;
+
+  return accepts;
+}
+
+/**
+ * Split a string of parameters.
+ * @private
+ */
+
+function splitParameters(str) {
+  var parameters = str.split(';');
+
+  for (var i = 1, j = 0; i < parameters.length; i++) {
+    if (quoteCount(parameters[j]) % 2 == 0) {
+      parameters[++j] = parameters[i];
+    } else {
+      parameters[j] += ';' + parameters[i];
+    }
+  }
+
+  // trim parameters
+  parameters.length = j + 1;
+
+  for (var i = 0; i < parameters.length; i++) {
+    parameters[i] = parameters[i].trim();
+  }
+
+  return parameters;
+}
diff --git a/node_modules/npm-profile/node_modules/negotiator/package.json b/node_modules/npm-profile/node_modules/negotiator/package.json
new file mode 100644
index 0000000000000..e4bdc1ef4f748
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/negotiator/package.json
@@ -0,0 +1,43 @@
+{
+  "name": "negotiator",
+  "description": "HTTP content negotiation",
+  "version": "1.0.0",
+  "contributors": [
+    "Douglas Christopher Wilson ",
+    "Federico Romero ",
+    "Isaac Z. Schlueter  (http://blog.izs.me/)"
+  ],
+  "license": "MIT",
+  "keywords": [
+    "http",
+    "content negotiation",
+    "accept",
+    "accept-language",
+    "accept-encoding",
+    "accept-charset"
+  ],
+  "repository": "jshttp/negotiator",
+  "devDependencies": {
+    "eslint": "7.32.0",
+    "eslint-plugin-markdown": "2.2.1",
+    "mocha": "9.1.3",
+    "nyc": "15.1.0"
+  },
+  "files": [
+    "lib/",
+    "HISTORY.md",
+    "LICENSE",
+    "index.js",
+    "README.md"
+  ],
+  "engines": {
+    "node": ">= 0.6"
+  },
+  "scripts": {
+    "lint": "eslint .",
+    "test": "mocha --reporter spec --check-leaks --bail test/",
+    "test:debug": "mocha --reporter spec --check-leaks --inspect --inspect-brk test/",
+    "test-ci": "nyc --reporter=lcov --reporter=text npm test",
+    "test-cov": "nyc --reporter=html --reporter=text npm test"
+  }
+}
diff --git a/node_modules/npm-profile/node_modules/npm-package-arg/LICENSE b/node_modules/npm-profile/node_modules/npm-package-arg/LICENSE
new file mode 100644
index 0000000000000..19cec97b18468
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/npm-package-arg/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/npm-profile/node_modules/npm-package-arg/lib/npa.js b/node_modules/npm-profile/node_modules/npm-package-arg/lib/npa.js
new file mode 100644
index 0000000000000..d409b7f1becfc
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/npm-package-arg/lib/npa.js
@@ -0,0 +1,481 @@
+'use strict'
+
+const isWindows = process.platform === 'win32'
+
+const { URL } = require('node:url')
+// We need to use path/win32 so that we get consistent results in tests, but this also means we need to manually convert backslashes to forward slashes when generating file: urls with paths.
+const path = isWindows ? require('node:path/win32') : require('node:path')
+const { homedir } = require('node:os')
+const HostedGit = require('hosted-git-info')
+const semver = require('semver')
+const validatePackageName = require('validate-npm-package-name')
+const { log } = require('proc-log')
+
+const hasSlashes = isWindows ? /\\|[/]/ : /[/]/
+const isURL = /^(?:git[+])?[a-z]+:/i
+const isGit = /^[^@]+@[^:.]+\.[^:]+:.+$/i
+const isFileType = /[.](?:tgz|tar.gz|tar)$/i
+const isPortNumber = /:[0-9]+(\/|$)/i
+const isWindowsFile = /^(?:[.]|~[/]|[/\\]|[a-zA-Z]:)/
+const isPosixFile = /^(?:[.]|~[/]|[/]|[a-zA-Z]:)/
+const defaultRegistry = 'https://registry.npmjs.org'
+
+function npa (arg, where) {
+  let name
+  let spec
+  if (typeof arg === 'object') {
+    if (arg instanceof Result && (!where || where === arg.where)) {
+      return arg
+    } else if (arg.name && arg.rawSpec) {
+      return npa.resolve(arg.name, arg.rawSpec, where || arg.where)
+    } else {
+      return npa(arg.raw, where || arg.where)
+    }
+  }
+  const nameEndsAt = arg.indexOf('@', 1) // Skip possible leading @
+  const namePart = nameEndsAt > 0 ? arg.slice(0, nameEndsAt) : arg
+  if (isURL.test(arg)) {
+    spec = arg
+  } else if (isGit.test(arg)) {
+    spec = `git+ssh://${arg}`
+  // eslint-disable-next-line max-len
+  } else if (!namePart.startsWith('@') && (hasSlashes.test(namePart) || isFileType.test(namePart))) {
+    spec = arg
+  } else if (nameEndsAt > 0) {
+    name = namePart
+    spec = arg.slice(nameEndsAt + 1) || '*'
+  } else {
+    const valid = validatePackageName(arg)
+    if (valid.validForOldPackages) {
+      name = arg
+      spec = '*'
+    } else {
+      spec = arg
+    }
+  }
+  return resolve(name, spec, where, arg)
+}
+
+function isFileSpec (spec) {
+  if (!spec) {
+    return false
+  }
+  if (spec.toLowerCase().startsWith('file:')) {
+    return true
+  }
+  if (isWindows) {
+    return isWindowsFile.test(spec)
+  }
+  // We never hit this in windows tests, obviously
+  /* istanbul ignore next */
+  return isPosixFile.test(spec)
+}
+
+function isAliasSpec (spec) {
+  if (!spec) {
+    return false
+  }
+  return spec.toLowerCase().startsWith('npm:')
+}
+
+function resolve (name, spec, where, arg) {
+  const res = new Result({
+    raw: arg,
+    name: name,
+    rawSpec: spec,
+    fromArgument: arg != null,
+  })
+
+  if (name) {
+    res.name = name
+  }
+
+  if (!where) {
+    where = process.cwd()
+  }
+
+  if (isFileSpec(spec)) {
+    return fromFile(res, where)
+  } else if (isAliasSpec(spec)) {
+    return fromAlias(res, where)
+  }
+
+  const hosted = HostedGit.fromUrl(spec, {
+    noGitPlus: true,
+    noCommittish: true,
+  })
+  if (hosted) {
+    return fromHostedGit(res, hosted)
+  } else if (spec && isURL.test(spec)) {
+    return fromURL(res)
+  } else if (spec && (hasSlashes.test(spec) || isFileType.test(spec))) {
+    return fromFile(res, where)
+  } else {
+    return fromRegistry(res)
+  }
+}
+
+function toPurl (arg, reg = defaultRegistry) {
+  const res = npa(arg)
+
+  if (res.type !== 'version') {
+    throw invalidPurlType(res.type, res.raw)
+  }
+
+  // URI-encode leading @ of scoped packages
+  let purl = 'pkg:npm/' + res.name.replace(/^@/, '%40') + '@' + res.rawSpec
+  if (reg !== defaultRegistry) {
+    purl += '?repository_url=' + reg
+  }
+
+  return purl
+}
+
+function invalidPackageName (name, valid, raw) {
+  // eslint-disable-next-line max-len
+  const err = new Error(`Invalid package name "${name}" of package "${raw}": ${valid.errors.join('; ')}.`)
+  err.code = 'EINVALIDPACKAGENAME'
+  return err
+}
+
+function invalidTagName (name, raw) {
+  // eslint-disable-next-line max-len
+  const err = new Error(`Invalid tag name "${name}" of package "${raw}": Tags may not have any characters that encodeURIComponent encodes.`)
+  err.code = 'EINVALIDTAGNAME'
+  return err
+}
+
+function invalidPurlType (type, raw) {
+  // eslint-disable-next-line max-len
+  const err = new Error(`Invalid type "${type}" of package "${raw}": Purl can only be generated for "version" types.`)
+  err.code = 'EINVALIDPURLTYPE'
+  return err
+}
+
+class Result {
+  constructor (opts) {
+    this.type = opts.type
+    this.registry = opts.registry
+    this.where = opts.where
+    if (opts.raw == null) {
+      this.raw = opts.name ? `${opts.name}@${opts.rawSpec}` : opts.rawSpec
+    } else {
+      this.raw = opts.raw
+    }
+    this.name = undefined
+    this.escapedName = undefined
+    this.scope = undefined
+    this.rawSpec = opts.rawSpec || ''
+    this.saveSpec = opts.saveSpec
+    this.fetchSpec = opts.fetchSpec
+    if (opts.name) {
+      this.setName(opts.name)
+    }
+    this.gitRange = opts.gitRange
+    this.gitCommittish = opts.gitCommittish
+    this.gitSubdir = opts.gitSubdir
+    this.hosted = opts.hosted
+  }
+
+  // TODO move this to a getter/setter in a semver major
+  setName (name) {
+    const valid = validatePackageName(name)
+    if (!valid.validForOldPackages) {
+      throw invalidPackageName(name, valid, this.raw)
+    }
+
+    this.name = name
+    this.scope = name[0] === '@' ? name.slice(0, name.indexOf('/')) : undefined
+    // scoped packages in couch must have slash url-encoded, e.g. @foo%2Fbar
+    this.escapedName = name.replace('/', '%2f')
+    return this
+  }
+
+  toString () {
+    const full = []
+    if (this.name != null && this.name !== '') {
+      full.push(this.name)
+    }
+    const spec = this.saveSpec || this.fetchSpec || this.rawSpec
+    if (spec != null && spec !== '') {
+      full.push(spec)
+    }
+    return full.length ? full.join('@') : this.raw
+  }
+
+  toJSON () {
+    const result = Object.assign({}, this)
+    delete result.hosted
+    return result
+  }
+}
+
+// sets res.gitCommittish, res.gitRange, and res.gitSubdir
+function setGitAttrs (res, committish) {
+  if (!committish) {
+    res.gitCommittish = null
+    return
+  }
+
+  // for each :: separated item:
+  for (const part of committish.split('::')) {
+    // if the item has no : the n it is a commit-ish
+    if (!part.includes(':')) {
+      if (res.gitRange) {
+        throw new Error('cannot override existing semver range with a committish')
+      }
+      if (res.gitCommittish) {
+        throw new Error('cannot override existing committish with a second committish')
+      }
+      res.gitCommittish = part
+      continue
+    }
+    // split on name:value
+    const [name, value] = part.split(':')
+    // if name is semver do semver lookup of ref or tag
+    if (name === 'semver') {
+      if (res.gitCommittish) {
+        throw new Error('cannot override existing committish with a semver range')
+      }
+      if (res.gitRange) {
+        throw new Error('cannot override existing semver range with a second semver range')
+      }
+      res.gitRange = decodeURIComponent(value)
+      continue
+    }
+    if (name === 'path') {
+      if (res.gitSubdir) {
+        throw new Error('cannot override existing path with a second path')
+      }
+      res.gitSubdir = `/${value}`
+      continue
+    }
+    log.warn('npm-package-arg', `ignoring unknown key "${name}"`)
+  }
+}
+
+// Taken from: EncodePathChars and lookup_table in src/node_url.cc
+// url.pathToFileURL only returns absolute references.  We can't use it to encode paths.
+// encodeURI mangles windows paths. We can't use it to encode paths.
+// Under the hood, url.pathToFileURL does a limited set of encoding, with an extra windows step, and then calls path.resolve.
+// The encoding node does without path.resolve is not available outside of the source, so we are recreating it here.
+const encodedPathChars = new Map([
+  ['\0', '%00'],
+  ['\t', '%09'],
+  ['\n', '%0A'],
+  ['\r', '%0D'],
+  [' ', '%20'],
+  ['"', '%22'],
+  ['#', '%23'],
+  ['%', '%25'],
+  ['?', '%3F'],
+  ['[', '%5B'],
+  ['\\', isWindows ? '/' : '%5C'],
+  [']', '%5D'],
+  ['^', '%5E'],
+  ['|', '%7C'],
+  ['~', '%7E'],
+])
+
+function pathToFileURL (str) {
+  let result = ''
+  for (let i = 0; i < str.length; i++) {
+    result = `${result}${encodedPathChars.get(str[i]) ?? str[i]}`
+  }
+  if (result.startsWith('file:')) {
+    return result
+  }
+  return `file:${result}`
+}
+
+function fromFile (res, where) {
+  res.type = isFileType.test(res.rawSpec) ? 'file' : 'directory'
+  res.where = where
+
+  let rawSpec = pathToFileURL(res.rawSpec)
+
+  if (rawSpec.startsWith('file:/')) {
+    // XXX backwards compatibility lack of compliance with RFC 8089
+
+    // turn file://path into file:/path
+    if (/^file:\/\/[^/]/.test(rawSpec)) {
+      rawSpec = `file:/${rawSpec.slice(5)}`
+    }
+
+    // turn file:/../path into file:../path
+    // for 1 or 3 leading slashes (2 is already ruled out from handling file:// explicitly above)
+    if (/^\/{1,3}\.\.?(\/|$)/.test(rawSpec.slice(5))) {
+      rawSpec = rawSpec.replace(/^file:\/{1,3}/, 'file:')
+    }
+  }
+
+  let resolvedUrl
+  let specUrl
+  try {
+    // always put the '/' on "where", or else file:foo from /path/to/bar goes to /path/to/foo, when we want it to be /path/to/bar/foo
+    resolvedUrl = new URL(rawSpec, `${pathToFileURL(path.resolve(where))}/`)
+    specUrl = new URL(rawSpec)
+  } catch (originalError) {
+    const er = new Error('Invalid file: URL, must comply with RFC 8089')
+    throw Object.assign(er, {
+      raw: res.rawSpec,
+      spec: res,
+      where,
+      originalError,
+    })
+  }
+
+  // turn /C:/blah into just C:/blah on windows
+  let specPath = decodeURIComponent(specUrl.pathname)
+  let resolvedPath = decodeURIComponent(resolvedUrl.pathname)
+  if (isWindows) {
+    specPath = specPath.replace(/^\/+([a-z]:\/)/i, '$1')
+    resolvedPath = resolvedPath.replace(/^\/+([a-z]:\/)/i, '$1')
+  }
+
+  // replace ~ with homedir, but keep the ~ in the saveSpec
+  // otherwise, make it relative to where param
+  if (/^\/~(\/|$)/.test(specPath)) {
+    res.saveSpec = `file:${specPath.substr(1)}`
+    resolvedPath = path.resolve(homedir(), specPath.substr(3))
+  } else if (!path.isAbsolute(rawSpec.slice(5))) {
+    res.saveSpec = `file:${path.relative(where, resolvedPath)}`
+  } else {
+    res.saveSpec = `file:${path.resolve(resolvedPath)}`
+  }
+
+  res.fetchSpec = path.resolve(where, resolvedPath)
+  // re-normalize the slashes in saveSpec due to node:path/win32 behavior in windows
+  res.saveSpec = res.saveSpec.split('\\').join('/')
+  // Ignoring because this only happens in windows
+  /* istanbul ignore next */
+  if (res.saveSpec.startsWith('file://')) {
+    // normalization of \\win32\root paths can cause a double / which we don't want
+    res.saveSpec = `file:/${res.saveSpec.slice(7)}`
+  }
+  return res
+}
+
+function fromHostedGit (res, hosted) {
+  res.type = 'git'
+  res.hosted = hosted
+  res.saveSpec = hosted.toString({ noGitPlus: false, noCommittish: false })
+  res.fetchSpec = hosted.getDefaultRepresentation() === 'shortcut' ? null : hosted.toString()
+  setGitAttrs(res, hosted.committish)
+  return res
+}
+
+function unsupportedURLType (protocol, spec) {
+  const err = new Error(`Unsupported URL Type "${protocol}": ${spec}`)
+  err.code = 'EUNSUPPORTEDPROTOCOL'
+  return err
+}
+
+function fromURL (res) {
+  let rawSpec = res.rawSpec
+  res.saveSpec = rawSpec
+  if (rawSpec.startsWith('git+ssh:')) {
+    // git ssh specifiers are overloaded to also use scp-style git
+    // specifiers, so we have to parse those out and treat them special.
+    // They are NOT true URIs, so we can't hand them to URL.
+
+    // This regex looks for things that look like:
+    // git+ssh://git@my.custom.git.com:username/project.git#deadbeef
+    // ...and various combinations. The username in the beginning is *required*.
+    const matched = rawSpec.match(/^git\+ssh:\/\/([^:#]+:[^#]+(?:\.git)?)(?:#(.*))?$/i)
+    // Filter out all-number "usernames" which are really port numbers
+    // They can either be :1234 :1234/ or :1234/path but not :12abc
+    if (matched && !matched[1].match(isPortNumber)) {
+      res.type = 'git'
+      setGitAttrs(res, matched[2])
+      res.fetchSpec = matched[1]
+      return res
+    }
+  } else if (rawSpec.startsWith('git+file://')) {
+    // URL can't handle windows paths
+    rawSpec = rawSpec.replace(/\\/g, '/')
+  }
+  const parsedUrl = new URL(rawSpec)
+  // check the protocol, and then see if it's git or not
+  switch (parsedUrl.protocol) {
+    case 'git:':
+    case 'git+http:':
+    case 'git+https:':
+    case 'git+rsync:':
+    case 'git+ftp:':
+    case 'git+file:':
+    case 'git+ssh:':
+      res.type = 'git'
+      setGitAttrs(res, parsedUrl.hash.slice(1))
+      if (parsedUrl.protocol === 'git+file:' && /^git\+file:\/\/[a-z]:/i.test(rawSpec)) {
+        // URL can't handle drive letters on windows file paths, the host can't contain a :
+        res.fetchSpec = `git+file://${parsedUrl.host.toLowerCase()}:${parsedUrl.pathname}`
+      } else {
+        parsedUrl.hash = ''
+        res.fetchSpec = parsedUrl.toString()
+      }
+      if (res.fetchSpec.startsWith('git+')) {
+        res.fetchSpec = res.fetchSpec.slice(4)
+      }
+      break
+    case 'http:':
+    case 'https:':
+      res.type = 'remote'
+      res.fetchSpec = res.saveSpec
+      break
+
+    default:
+      throw unsupportedURLType(parsedUrl.protocol, rawSpec)
+  }
+
+  return res
+}
+
+function fromAlias (res, where) {
+  const subSpec = npa(res.rawSpec.substr(4), where)
+  if (subSpec.type === 'alias') {
+    throw new Error('nested aliases not supported')
+  }
+
+  if (!subSpec.registry) {
+    throw new Error('aliases only work for registry deps')
+  }
+
+  if (!subSpec.name) {
+    throw new Error('aliases must have a name')
+  }
+
+  res.subSpec = subSpec
+  res.registry = true
+  res.type = 'alias'
+  res.saveSpec = null
+  res.fetchSpec = null
+  return res
+}
+
+function fromRegistry (res) {
+  res.registry = true
+  const spec = res.rawSpec.trim()
+  // no save spec for registry components as we save based on the fetched
+  // version, not on the argument so this can't compute that.
+  res.saveSpec = null
+  res.fetchSpec = spec
+  const version = semver.valid(spec, true)
+  const range = semver.validRange(spec, true)
+  if (version) {
+    res.type = 'version'
+  } else if (range) {
+    res.type = 'range'
+  } else {
+    if (encodeURIComponent(spec) !== spec) {
+      throw invalidTagName(spec, res.raw)
+    }
+    res.type = 'tag'
+  }
+  return res
+}
+
+module.exports = npa
+module.exports.resolve = resolve
+module.exports.toPurl = toPurl
+module.exports.Result = Result
diff --git a/node_modules/npm-profile/node_modules/npm-package-arg/package.json b/node_modules/npm-profile/node_modules/npm-package-arg/package.json
new file mode 100644
index 0000000000000..db6ce9074cfa2
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/npm-package-arg/package.json
@@ -0,0 +1,61 @@
+{
+  "name": "npm-package-arg",
+  "version": "13.0.0",
+  "description": "Parse the things that can be arguments to `npm install`",
+  "main": "./lib/npa.js",
+  "directories": {
+    "test": "test"
+  },
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "dependencies": {
+    "hosted-git-info": "^9.0.0",
+    "proc-log": "^5.0.0",
+    "semver": "^7.3.5",
+    "validate-npm-package-name": "^6.0.0"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^5.0.0",
+    "@npmcli/template-oss": "4.23.5",
+    "tap": "^16.0.1"
+  },
+  "scripts": {
+    "test": "tap",
+    "snap": "tap",
+    "npmclilint": "npmcli-lint",
+    "lint": "npm run eslint",
+    "lintfix": "npm run eslint -- --fix",
+    "posttest": "npm run lint",
+    "postsnap": "npm run lintfix --",
+    "postlint": "template-oss-check",
+    "template-oss-apply": "template-oss-apply --force",
+    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/npm/npm-package-arg.git"
+  },
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "bugs": {
+    "url": "https://github.com/npm/npm-package-arg/issues"
+  },
+  "homepage": "https://github.com/npm/npm-package-arg",
+  "engines": {
+    "node": "^20.17.0 || >=22.9.0"
+  },
+  "tap": {
+    "branches": 97,
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.23.5",
+    "publish": true
+  }
+}
diff --git a/node_modules/npm-profile/node_modules/npm-registry-fetch/LICENSE.md b/node_modules/npm-profile/node_modules/npm-registry-fetch/LICENSE.md
new file mode 100644
index 0000000000000..5fc208ff122e0
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/npm-registry-fetch/LICENSE.md
@@ -0,0 +1,20 @@
+
+
+ISC License
+
+Copyright npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this
+software for any purpose with or without fee is hereby
+granted, provided that the above copyright notice and this
+permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL
+WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO
+EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
+WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
+TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
+USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/auth.js b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/auth.js
new file mode 100644
index 0000000000000..9270025fa8d90
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/auth.js
@@ -0,0 +1,181 @@
+'use strict'
+const fs = require('fs')
+const npa = require('npm-package-arg')
+const { URL } = require('url')
+
+// Find the longest registry key that is used for some kind of auth
+// in the options.  Returns the registry key and the auth config.
+const regFromURI = (uri, opts) => {
+  const parsed = new URL(uri)
+  // try to find a config key indicating we have auth for this registry
+  // can be one of :_authToken, :_auth, :_password and :username, or
+  // :certfile and :keyfile
+  // We walk up the "path" until we're left with just //[:],
+  // stopping when we reach '//'.
+  let regKey = `//${parsed.host}${parsed.pathname}`
+  while (regKey.length > '//'.length) {
+    const authKey = hasAuth(regKey, opts)
+    // got some auth for this URI
+    if (authKey) {
+      return { regKey, authKey }
+    }
+
+    // can be either //host/some/path/:_auth or //host/some/path:_auth
+    // walk up by removing EITHER what's after the slash OR the slash itself
+    regKey = regKey.replace(/([^/]+|\/)$/, '')
+  }
+  return { regKey: false, authKey: null }
+}
+
+// Not only do we want to know if there is auth, but if we are calling `npm
+// logout` we want to know what config value specifically provided it.  This is
+// so we can look up where the config came from to delete it (i.e. user vs
+// project)
+const hasAuth = (regKey, opts) => {
+  if (opts[`${regKey}:_authToken`]) {
+    return '_authToken'
+  }
+  if (opts[`${regKey}:_auth`]) {
+    return '_auth'
+  }
+  if (opts[`${regKey}:username`] && opts[`${regKey}:_password`]) {
+    // 'password' can be inferred to also be present
+    return 'username'
+  }
+  if (opts[`${regKey}:certfile`] && opts[`${regKey}:keyfile`]) {
+    // 'keyfile' can be inferred to also be present
+    return 'certfile'
+  }
+  return false
+}
+
+const sameHost = (a, b) => {
+  const parsedA = new URL(a)
+  const parsedB = new URL(b)
+  return parsedA.host === parsedB.host
+}
+
+const getRegistry = opts => {
+  const { spec } = opts
+  const { scope: specScope, subSpec } = spec ? npa(spec) : {}
+  const subSpecScope = subSpec && subSpec.scope
+  const scope = subSpec ? subSpecScope : specScope
+  const scopeReg = scope && opts[`${scope}:registry`]
+  return scopeReg || opts.registry
+}
+
+const maybeReadFile = file => {
+  try {
+    return fs.readFileSync(file, 'utf8')
+  } catch (er) {
+    if (er.code !== 'ENOENT') {
+      throw er
+    }
+    return null
+  }
+}
+
+const getAuth = (uri, opts = {}) => {
+  const { forceAuth } = opts
+  if (!uri) {
+    throw new Error('URI is required')
+  }
+  const { regKey, authKey } = regFromURI(uri, forceAuth || opts)
+
+  // we are only allowed to use what's in forceAuth if specified
+  if (forceAuth && !regKey) {
+    return new Auth({
+      // if we force auth we don't want to refer back to anything in config
+      regKey: false,
+      authKey: null,
+      scopeAuthKey: null,
+      token: forceAuth._authToken || forceAuth.token,
+      username: forceAuth.username,
+      password: forceAuth._password || forceAuth.password,
+      auth: forceAuth._auth || forceAuth.auth,
+      certfile: forceAuth.certfile,
+      keyfile: forceAuth.keyfile,
+    })
+  }
+
+  // no auth for this URI, but might have it for the registry
+  if (!regKey) {
+    const registry = getRegistry(opts)
+    if (registry && uri !== registry && sameHost(uri, registry)) {
+      return getAuth(registry, opts)
+    } else if (registry !== opts.registry) {
+      // If making a tarball request to a different base URI than the
+      // registry where we logged in, but the same auth SHOULD be sent
+      // to that artifact host, then we track where it was coming in from,
+      // and warn the user if we get a 4xx error on it.
+      const { regKey: scopeAuthKey, authKey: _authKey } = regFromURI(registry, opts)
+      return new Auth({ scopeAuthKey, regKey: scopeAuthKey, authKey: _authKey })
+    }
+  }
+
+  const {
+    [`${regKey}:_authToken`]: token,
+    [`${regKey}:username`]: username,
+    [`${regKey}:_password`]: password,
+    [`${regKey}:_auth`]: auth,
+    [`${regKey}:certfile`]: certfile,
+    [`${regKey}:keyfile`]: keyfile,
+  } = opts
+
+  return new Auth({
+    scopeAuthKey: null,
+    regKey,
+    authKey,
+    token,
+    auth,
+    username,
+    password,
+    certfile,
+    keyfile,
+  })
+}
+
+class Auth {
+  constructor ({
+    token,
+    auth,
+    username,
+    password,
+    scopeAuthKey,
+    certfile,
+    keyfile,
+    regKey,
+    authKey,
+  }) {
+    // same as regKey but only present for scoped auth. Should have been named scopeRegKey
+    this.scopeAuthKey = scopeAuthKey
+    // `${regKey}:${authKey}` will get you back to the auth config that gave us auth
+    this.regKey = regKey
+    this.authKey = authKey
+    this.token = null
+    this.auth = null
+    this.isBasicAuth = false
+    this.cert = null
+    this.key = null
+    if (token) {
+      this.token = token
+    } else if (auth) {
+      this.auth = auth
+    } else if (username && password) {
+      const p = Buffer.from(password, 'base64').toString('utf8')
+      this.auth = Buffer.from(`${username}:${p}`, 'utf8').toString('base64')
+      this.isBasicAuth = true
+    }
+    // mTLS may be used in conjunction with another auth method above
+    if (certfile && keyfile) {
+      const cert = maybeReadFile(certfile, 'utf-8')
+      const key = maybeReadFile(keyfile, 'utf-8')
+      if (cert && key) {
+        this.cert = cert
+        this.key = key
+      }
+    }
+  }
+}
+
+module.exports = getAuth
diff --git a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/check-response.js b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/check-response.js
new file mode 100644
index 0000000000000..2f183082ab2ce
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/check-response.js
@@ -0,0 +1,108 @@
+'use strict'
+
+const errors = require('./errors.js')
+const { Response } = require('minipass-fetch')
+const defaultOpts = require('./default-opts.js')
+const { log } = require('proc-log')
+const { redact: cleanUrl } = require('@npmcli/redact')
+
+/* eslint-disable-next-line max-len */
+const moreInfoUrl = 'https://github.com/npm/cli/wiki/No-auth-for-URI,-but-auth-present-for-scoped-registry'
+const checkResponse =
+  async ({ method, uri, res, startTime, auth, opts }) => {
+    opts = { ...defaultOpts, ...opts }
+    if (res.headers.has('npm-notice') && !res.headers.has('x-local-cache')) {
+      log.notice('', res.headers.get('npm-notice'))
+    }
+
+    if (res.status >= 400) {
+      logRequest(method, res, startTime)
+      if (auth && auth.scopeAuthKey && !auth.token && !auth.auth) {
+      // we didn't have auth for THIS request, but we do have auth for
+      // requests to the registry indicated by the spec's scope value.
+      // Warn the user.
+        log.warn('registry', `No auth for URI, but auth present for scoped registry.
+
+URI: ${uri}
+Scoped Registry Key: ${auth.scopeAuthKey}
+
+More info here: ${moreInfoUrl}`)
+      }
+      return checkErrors(method, res, startTime, opts)
+    } else {
+      res.body.on('end', () => logRequest(method, res, startTime, opts))
+      if (opts.ignoreBody) {
+        res.body.resume()
+        return new Response(null, res)
+      }
+      return res
+    }
+  }
+module.exports = checkResponse
+
+function logRequest (method, res, startTime) {
+  const elapsedTime = Date.now() - startTime
+  const attempt = res.headers.get('x-fetch-attempts')
+  const attemptStr = attempt && attempt > 1 ? ` attempt #${attempt}` : ''
+  const cacheStatus = res.headers.get('x-local-cache-status')
+  const cacheStr = cacheStatus ? ` (cache ${cacheStatus})` : ''
+  const urlStr = cleanUrl(res.url)
+
+  // If make-fetch-happen reports a cache hit, then there was no fetch
+  if (cacheStatus === 'hit') {
+    log.http(
+      'cache',
+      `${urlStr} ${elapsedTime}ms${attemptStr}${cacheStr}`
+    )
+  } else {
+    log.http(
+      'fetch',
+      `${method.toUpperCase()} ${res.status} ${urlStr} ${elapsedTime}ms${attemptStr}${cacheStr}`
+    )
+  }
+}
+
+function checkErrors (method, res, startTime, opts) {
+  return res.buffer()
+    .catch(() => null)
+    .then(body => {
+      let parsed = body
+      try {
+        parsed = JSON.parse(body.toString('utf8'))
+      } catch {
+        // ignore errors
+      }
+      if (res.status === 401 && res.headers.get('www-authenticate')) {
+        const auth = res.headers.get('www-authenticate')
+          .split(/,\s*/)
+          .map(s => s.toLowerCase())
+        if (auth.indexOf('ipaddress') !== -1) {
+          throw new errors.HttpErrorAuthIPAddress(
+            method, res, parsed, opts.spec
+          )
+        } else if (auth.indexOf('otp') !== -1) {
+          throw new errors.HttpErrorAuthOTP(
+            method, res, parsed, opts.spec
+          )
+        } else {
+          throw new errors.HttpErrorAuthUnknown(
+            method, res, parsed, opts.spec
+          )
+        }
+      } else if (
+        res.status === 401 &&
+        body != null &&
+        /one-time pass/.test(body.toString('utf8'))
+      ) {
+        // Heuristic for malformed OTP responses that don't include the
+        // www-authenticate header.
+        throw new errors.HttpErrorAuthOTP(
+          method, res, parsed, opts.spec
+        )
+      } else {
+        throw new errors.HttpErrorGeneral(
+          method, res, parsed, opts.spec
+        )
+      }
+    })
+}
diff --git a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/default-opts.js b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/default-opts.js
new file mode 100644
index 0000000000000..f0847f0b507e2
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/default-opts.js
@@ -0,0 +1,19 @@
+const pkg = require('../package.json')
+module.exports = {
+  maxSockets: 12,
+  method: 'GET',
+  registry: 'https://registry.npmjs.org/',
+  timeout: 5 * 60 * 1000, // 5 minutes
+  strictSSL: true,
+  noProxy: process.env.NOPROXY,
+  userAgent: `${pkg.name
+    }@${
+      pkg.version
+    }/node@${
+      process.version
+    }+${
+      process.arch
+    } (${
+      process.platform
+    })`,
+}
diff --git a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/errors.js b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/errors.js
new file mode 100644
index 0000000000000..5bf6b012a24ef
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/errors.js
@@ -0,0 +1,80 @@
+'use strict'
+
+const { URL } = require('node:url')
+
+function packageName (href) {
+  try {
+    let basePath = new URL(href).pathname.slice(1)
+    if (!basePath.match(/^-/)) {
+      basePath = basePath.split('/')
+      var index = basePath.indexOf('_rewrite')
+      if (index === -1) {
+        index = basePath.length - 1
+      } else {
+        index++
+      }
+      return decodeURIComponent(basePath[index])
+    }
+  } catch {
+    // this is ok
+  }
+}
+
+class HttpErrorBase extends Error {
+  constructor (method, res, body, spec) {
+    super()
+    this.name = this.constructor.name
+    this.headers = typeof res.headers?.raw === 'function' ? res.headers.raw() : res.headers
+    this.statusCode = res.status
+    this.code = `E${res.status}`
+    this.method = method
+    this.uri = res.url
+    this.body = body
+    this.pkgid = spec ? spec.toString() : packageName(res.url)
+    Error.captureStackTrace(this, this.constructor)
+  }
+}
+
+class HttpErrorGeneral extends HttpErrorBase {
+  constructor (method, res, body, spec) {
+    super(method, res, body, spec)
+    this.message = `${res.status} ${res.statusText} - ${
+      this.method.toUpperCase()
+    } ${
+      this.spec || this.uri
+    }${
+      (body && body.error) ? ' - ' + body.error : ''
+    }`
+  }
+}
+
+class HttpErrorAuthOTP extends HttpErrorBase {
+  constructor (method, res, body, spec) {
+    super(method, res, body, spec)
+    this.message = 'OTP required for authentication'
+    this.code = 'EOTP'
+  }
+}
+
+class HttpErrorAuthIPAddress extends HttpErrorBase {
+  constructor (method, res, body, spec) {
+    super(method, res, body, spec)
+    this.message = 'Login is not allowed from your IP address'
+    this.code = 'EAUTHIP'
+  }
+}
+
+class HttpErrorAuthUnknown extends HttpErrorBase {
+  constructor (method, res, body, spec) {
+    super(method, res, body, spec)
+    this.message = 'Unable to authenticate, need: ' + res.headers.get('www-authenticate')
+  }
+}
+
+module.exports = {
+  HttpErrorBase,
+  HttpErrorGeneral,
+  HttpErrorAuthOTP,
+  HttpErrorAuthIPAddress,
+  HttpErrorAuthUnknown,
+}
diff --git a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/index.js b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/index.js
new file mode 100644
index 0000000000000..898c8125bfe0e
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/index.js
@@ -0,0 +1,247 @@
+'use strict'
+
+const { HttpErrorAuthOTP } = require('./errors.js')
+const checkResponse = require('./check-response.js')
+const getAuth = require('./auth.js')
+const fetch = require('make-fetch-happen')
+const JSONStream = require('./json-stream')
+const npa = require('npm-package-arg')
+const qs = require('querystring')
+const url = require('url')
+const zlib = require('minizlib')
+const { Minipass } = require('minipass')
+
+const defaultOpts = require('./default-opts.js')
+
+// WhatWG URL throws if it's not fully resolved
+const urlIsValid = u => {
+  try {
+    return !!new url.URL(u)
+  } catch (_) {
+    return false
+  }
+}
+
+module.exports = regFetch
+function regFetch (uri, /* istanbul ignore next */ opts_ = {}) {
+  const opts = {
+    ...defaultOpts,
+    ...opts_,
+  }
+
+  // if we did not get a fully qualified URI, then we look at the registry
+  // config or relevant scope to resolve it.
+  const uriValid = urlIsValid(uri)
+  let registry = opts.registry || defaultOpts.registry
+  if (!uriValid) {
+    registry = opts.registry = (
+      (opts.spec && pickRegistry(opts.spec, opts)) ||
+      opts.registry ||
+      registry
+    )
+    uri = `${
+      registry.trim().replace(/\/?$/g, '')
+    }/${
+      uri.trim().replace(/^\//, '')
+    }`
+    // asserts that this is now valid
+    new url.URL(uri)
+  }
+
+  const method = opts.method || 'GET'
+
+  // through that takes into account the scope, the prefix of `uri`, etc
+  const startTime = Date.now()
+  const auth = getAuth(uri, opts)
+  const headers = getHeaders(uri, auth, opts)
+  let body = opts.body
+  const bodyIsStream = Minipass.isStream(body)
+  const bodyIsPromise = body &&
+    typeof body === 'object' &&
+    typeof body.then === 'function'
+
+  if (
+    body && !bodyIsStream && !bodyIsPromise && typeof body !== 'string' && !Buffer.isBuffer(body)
+  ) {
+    headers['content-type'] = headers['content-type'] || 'application/json'
+    body = JSON.stringify(body)
+  } else if (body && !headers['content-type']) {
+    headers['content-type'] = 'application/octet-stream'
+  }
+
+  if (opts.gzip) {
+    headers['content-encoding'] = 'gzip'
+    if (bodyIsStream) {
+      const gz = new zlib.Gzip()
+      body.on('error', /* istanbul ignore next: unlikely and hard to test */
+        err => gz.emit('error', err))
+      body = body.pipe(gz)
+    } else if (!bodyIsPromise) {
+      body = new zlib.Gzip().end(body).concat()
+    }
+  }
+
+  const parsed = new url.URL(uri)
+
+  if (opts.query) {
+    const q = typeof opts.query === 'string' ? qs.parse(opts.query)
+      : opts.query
+
+    Object.keys(q).forEach(key => {
+      if (q[key] !== undefined) {
+        parsed.searchParams.set(key, q[key])
+      }
+    })
+    uri = url.format(parsed)
+  }
+
+  if (parsed.searchParams.get('write') === 'true' && method === 'GET') {
+    // do not cache, because this GET is fetching a rev that will be
+    // used for a subsequent PUT or DELETE, so we need to conditionally
+    // update cache.
+    opts.offline = false
+    opts.preferOffline = false
+    opts.preferOnline = true
+  }
+
+  const doFetch = async fetchBody => {
+    const p = fetch(uri, {
+      agent: opts.agent,
+      algorithms: opts.algorithms,
+      body: fetchBody,
+      cache: getCacheMode(opts),
+      cachePath: opts.cache,
+      ca: opts.ca,
+      cert: auth.cert || opts.cert,
+      headers,
+      integrity: opts.integrity,
+      key: auth.key || opts.key,
+      localAddress: opts.localAddress,
+      maxSockets: opts.maxSockets,
+      memoize: opts.memoize,
+      method: method,
+      noProxy: opts.noProxy,
+      proxy: opts.httpsProxy || opts.proxy,
+      retry: opts.retry ? opts.retry : {
+        retries: opts.fetchRetries,
+        factor: opts.fetchRetryFactor,
+        minTimeout: opts.fetchRetryMintimeout,
+        maxTimeout: opts.fetchRetryMaxtimeout,
+      },
+      strictSSL: opts.strictSSL,
+      timeout: opts.timeout || 30 * 1000,
+    }).then(res => checkResponse({
+      method,
+      uri,
+      res,
+      registry,
+      startTime,
+      auth,
+      opts,
+    }))
+
+    if (typeof opts.otpPrompt === 'function') {
+      return p.catch(async er => {
+        if (er instanceof HttpErrorAuthOTP) {
+          let otp
+          // if otp fails to complete, we fail with that failure
+          try {
+            otp = await opts.otpPrompt()
+          } catch (_) {
+            // ignore this error
+          }
+          // if no otp provided, or otpPrompt errored, throw the original HTTP error
+          if (!otp) {
+            throw er
+          }
+          return regFetch(uri, { ...opts, otp })
+        }
+        throw er
+      })
+    } else {
+      return p
+    }
+  }
+
+  return Promise.resolve(body).then(doFetch)
+}
+
+module.exports.getAuth = getAuth
+
+module.exports.json = fetchJSON
+function fetchJSON (uri, opts) {
+  return regFetch(uri, opts).then(res => res.json())
+}
+
+module.exports.json.stream = fetchJSONStream
+function fetchJSONStream (uri, jsonPath,
+  /* istanbul ignore next */ opts_ = {}) {
+  const opts = { ...defaultOpts, ...opts_ }
+  const parser = JSONStream.parse(jsonPath, opts.mapJSON)
+  regFetch(uri, opts).then(res =>
+    res.body.on('error',
+      /* istanbul ignore next: unlikely and difficult to test */
+      er => parser.emit('error', er)).pipe(parser)
+  ).catch(er => parser.emit('error', er))
+  return parser
+}
+
+module.exports.pickRegistry = pickRegistry
+function pickRegistry (spec, opts = {}) {
+  spec = npa(spec)
+  let registry = spec.scope &&
+    opts[spec.scope.replace(/^@?/, '@') + ':registry']
+
+  if (!registry && opts.scope) {
+    registry = opts[opts.scope.replace(/^@?/, '@') + ':registry']
+  }
+
+  if (!registry) {
+    registry = opts.registry || defaultOpts.registry
+  }
+
+  return registry
+}
+
+function getCacheMode (opts) {
+  return opts.offline ? 'only-if-cached'
+    : opts.preferOffline ? 'force-cache'
+    : opts.preferOnline ? 'no-cache'
+    : 'default'
+}
+
+function getHeaders (uri, auth, opts) {
+  const headers = Object.assign({
+    'user-agent': opts.userAgent,
+  }, opts.headers || {})
+
+  if (opts.authType) {
+    headers['npm-auth-type'] = opts.authType
+  }
+
+  if (opts.scope) {
+    headers['npm-scope'] = opts.scope
+  }
+
+  if (opts.npmSession) {
+    headers['npm-session'] = opts.npmSession
+  }
+
+  if (opts.npmCommand) {
+    headers['npm-command'] = opts.npmCommand
+  }
+
+  // If a tarball is hosted on a different place than the manifest, only send
+  // credentials on `alwaysAuth`
+  if (auth.token) {
+    headers.authorization = `Bearer ${auth.token}`
+  } else if (auth.auth) {
+    headers.authorization = `Basic ${auth.auth}`
+  }
+
+  if (opts.otp) {
+    headers['npm-otp'] = opts.otp
+  }
+
+  return headers
+}
diff --git a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/json-stream.js b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/json-stream.js
new file mode 100644
index 0000000000000..36b05ad4a20b9
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/json-stream.js
@@ -0,0 +1,223 @@
+const Parser = require('jsonparse')
+const { Minipass } = require('minipass')
+
+class JSONStreamError extends Error {
+  constructor (err, caller) {
+    super(err.message)
+    Error.captureStackTrace(this, caller || this.constructor)
+  }
+
+  get name () {
+    return 'JSONStreamError'
+  }
+}
+
+const check = (x, y) =>
+  typeof x === 'string' ? String(y) === x
+  : x && typeof x.test === 'function' ? x.test(y)
+  : typeof x === 'boolean' || typeof x === 'object' ? x
+  : typeof x === 'function' ? x(y)
+  : false
+
+class JSONStream extends Minipass {
+  #count = 0
+  #ending = false
+  #footer = null
+  #header = null
+  #map = null
+  #onTokenOriginal
+  #parser
+  #path = null
+  #root = null
+
+  constructor (opts) {
+    super({
+      ...opts,
+      objectMode: true,
+    })
+
+    const parser = this.#parser = new Parser()
+    parser.onValue = value => this.#onValue(value)
+    this.#onTokenOriginal = parser.onToken
+    parser.onToken = (token, value) => this.#onToken(token, value)
+    parser.onError = er => this.#onError(er)
+
+    this.#path = typeof opts.path === 'string'
+      ? opts.path.split('.').map(e =>
+        e === '$*' ? { emitKey: true }
+        : e === '*' ? true
+        : e === '' ? { recurse: true }
+        : e)
+      : Array.isArray(opts.path) && opts.path.length ? opts.path
+      : null
+
+    if (typeof opts.map === 'function') {
+      this.#map = opts.map
+    }
+  }
+
+  #setHeaderFooter (key, value) {
+    // header has not been emitted yet
+    if (this.#header !== false) {
+      this.#header = this.#header || {}
+      this.#header[key] = value
+    }
+
+    // footer has not been emitted yet but header has
+    if (this.#footer !== false && this.#header === false) {
+      this.#footer = this.#footer || {}
+      this.#footer[key] = value
+    }
+  }
+
+  #onError (er) {
+    // error will always happen during a write() call.
+    const caller = this.#ending ? this.end : this.write
+    this.#ending = false
+    return this.emit('error', new JSONStreamError(er, caller))
+  }
+
+  #onToken (token, value) {
+    const parser = this.#parser
+    this.#onTokenOriginal.call(this.#parser, token, value)
+    if (parser.stack.length === 0) {
+      if (this.#root) {
+        const root = this.#root
+        if (!this.#path) {
+          super.write(root)
+        }
+        this.#root = null
+        this.#count = 0
+      }
+    }
+  }
+
+  #onValue (value) {
+    const parser = this.#parser
+    // the LAST onValue encountered is the root object.
+    // just overwrite it each time.
+    this.#root = value
+
+    if (!this.#path) {
+      return
+    }
+
+    let i = 0 // iterates on path
+    let j = 0 // iterates on stack
+    let emitKey = false
+    while (i < this.#path.length) {
+      const key = this.#path[i]
+      j++
+
+      if (key && !key.recurse) {
+        const c = (j === parser.stack.length) ? parser : parser.stack[j]
+        if (!c) {
+          return
+        }
+        if (!check(key, c.key)) {
+          this.#setHeaderFooter(c.key, value)
+          return
+        }
+        emitKey = !!key.emitKey
+        i++
+      } else {
+        i++
+        if (i >= this.#path.length) {
+          return
+        }
+        const nextKey = this.#path[i]
+        if (!nextKey) {
+          return
+        }
+        while (true) {
+          const c = (j === parser.stack.length) ? parser : parser.stack[j]
+          if (!c) {
+            return
+          }
+          if (check(nextKey, c.key)) {
+            i++
+            if (!Object.isFrozen(parser.stack[j])) {
+              parser.stack[j].value = null
+            }
+            break
+          } else {
+            this.#setHeaderFooter(c.key, value)
+          }
+          j++
+        }
+      }
+    }
+
+    // emit header
+    if (this.#header) {
+      const header = this.#header
+      this.#header = false
+      this.emit('header', header)
+    }
+    if (j !== parser.stack.length) {
+      return
+    }
+
+    this.#count++
+    const actualPath = parser.stack.slice(1)
+      .map(e => e.key).concat([parser.key])
+    if (value !== null && value !== undefined) {
+      const data = this.#map ? this.#map(value, actualPath) : value
+      if (data !== null && data !== undefined) {
+        const emit = emitKey ? { value: data } : data
+        if (emitKey) {
+          emit.key = parser.key
+        }
+        super.write(emit)
+      }
+    }
+
+    if (parser.value) {
+      delete parser.value[parser.key]
+    }
+
+    for (const k of parser.stack) {
+      k.value = null
+    }
+  }
+
+  write (chunk, encoding) {
+    if (typeof chunk === 'string') {
+      chunk = Buffer.from(chunk, encoding)
+    } else if (!Buffer.isBuffer(chunk)) {
+      return this.emit('error', new TypeError(
+        'Can only parse JSON from string or buffer input'))
+    }
+    this.#parser.write(chunk)
+    return this.flowing
+  }
+
+  end (chunk, encoding) {
+    this.#ending = true
+    if (chunk) {
+      this.write(chunk, encoding)
+    }
+
+    const h = this.#header
+    this.#header = null
+    const f = this.#footer
+    this.#footer = null
+    if (h) {
+      this.emit('header', h)
+    }
+    if (f) {
+      this.emit('footer', f)
+    }
+    return super.end()
+  }
+
+  static get JSONStreamError () {
+    return JSONStreamError
+  }
+
+  static parse (path, map) {
+    return new JSONStream({ path, map })
+  }
+}
+
+module.exports = JSONStream
diff --git a/node_modules/npm-profile/node_modules/npm-registry-fetch/package.json b/node_modules/npm-profile/node_modules/npm-registry-fetch/package.json
new file mode 100644
index 0000000000000..a8e954cdf3c14
--- /dev/null
+++ b/node_modules/npm-profile/node_modules/npm-registry-fetch/package.json
@@ -0,0 +1,68 @@
+{
+  "name": "npm-registry-fetch",
+  "version": "19.0.0",
+  "description": "Fetch-based http client for use with npm registry APIs",
+  "main": "lib",
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "scripts": {
+    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
+    "lint": "npm run eslint",
+    "lintfix": "npm run eslint -- --fix",
+    "test": "tap",
+    "posttest": "npm run lint",
+    "npmclilint": "npmcli-lint",
+    "postsnap": "npm run lintfix --",
+    "postlint": "template-oss-check",
+    "snap": "tap",
+    "template-oss-apply": "template-oss-apply --force"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/npm/npm-registry-fetch.git"
+  },
+  "keywords": [
+    "npm",
+    "registry",
+    "fetch"
+  ],
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "dependencies": {
+    "@npmcli/redact": "^3.0.0",
+    "jsonparse": "^1.3.1",
+    "make-fetch-happen": "^15.0.0",
+    "minipass": "^7.0.2",
+    "minipass-fetch": "^4.0.0",
+    "minizlib": "^3.0.1",
+    "npm-package-arg": "^13.0.0",
+    "proc-log": "^5.0.0"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^5.0.0",
+    "@npmcli/template-oss": "4.25.0",
+    "cacache": "^20.0.0",
+    "nock": "^13.2.4",
+    "require-inject": "^1.4.4",
+    "ssri": "^12.0.0",
+    "tap": "^16.0.1"
+  },
+  "tap": {
+    "check-coverage": true,
+    "test-ignore": "test[\\\\/](util|cache)[\\\\/]",
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  },
+  "engines": {
+    "node": "^20.17.0 || >=22.9.0"
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.25.0",
+    "publish": "true"
+  }
+}
diff --git a/node_modules/npm-profile/package.json b/node_modules/npm-profile/package.json
index 72a19a08231e2..fb4ce118c9cf2 100644
--- a/node_modules/npm-profile/package.json
+++ b/node_modules/npm-profile/package.json
@@ -1,12 +1,12 @@
 {
   "name": "npm-profile",
-  "version": "11.0.1",
+  "version": "12.0.0",
   "description": "Library for updating an npmjs.com profile",
   "keywords": [],
   "author": "GitHub Inc.",
   "license": "ISC",
   "dependencies": {
-    "npm-registry-fetch": "^18.0.0",
+    "npm-registry-fetch": "^19.0.0",
     "proc-log": "^5.0.0"
   },
   "main": "./lib/index.js",
@@ -20,8 +20,8 @@
   ],
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.23.3",
-    "nock": "^13.2.4",
+    "@npmcli/template-oss": "4.25.0",
+    "nock": "^13.5.6",
     "tap": "^16.0.1"
   },
   "scripts": {
@@ -42,11 +42,11 @@
     ]
   },
   "engines": {
-    "node": "^18.17.0 || >=20.5.0"
+    "node": "^20.17.0 || >=22.9.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.23.3",
+    "version": "4.25.0",
     "publish": true
   }
 }
diff --git a/package-lock.json b/package-lock.json
index 26d1e8b77df0c..4dd37bc2b6a9c 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -131,7 +131,7 @@
         "npm-install-checks": "^7.1.1",
         "npm-package-arg": "^12.0.2",
         "npm-pick-manifest": "^10.0.0",
-        "npm-profile": "^11.0.1",
+        "npm-profile": "^12.0.0",
         "npm-registry-fetch": "^18.0.2",
         "npm-user-validate": "^3.0.0",
         "p-map": "^7.0.3",
@@ -12871,17 +12871,122 @@
       }
     },
     "node_modules/npm-profile": {
-      "version": "11.0.1",
-      "resolved": "https://registry.npmjs.org/npm-profile/-/npm-profile-11.0.1.tgz",
-      "integrity": "sha512-HP5Cw9WHwFS9vb4fxVlkNAQBUhVL5BmW6rAR+/JWkpwqcFJid7TihKUdYDWqHl0NDfLd0mpucheGySqo8ysyfw==",
+      "version": "12.0.0",
+      "resolved": "https://registry.npmjs.org/npm-profile/-/npm-profile-12.0.0.tgz",
+      "integrity": "sha512-ZrtDFhNpLCcH7b7kQIpegK4Bt66DpkHojcWdm41/qie+i9dYg2Mc+BenwHVnfjNnw8/bpYuBj8wf+6iI4GoF+g==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
-        "npm-registry-fetch": "^18.0.0",
+        "npm-registry-fetch": "^19.0.0",
         "proc-log": "^5.0.0"
       },
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/npm-profile/node_modules/hosted-git-info": {
+      "version": "9.0.0",
+      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-9.0.0.tgz",
+      "integrity": "sha512-gEf705MZLrDPkbbhi8PnoO4ZwYgKoNL+ISZ3AjZMht2r3N5tuTwncyDi6Fv2/qDnMmZxgs0yI8WDOyR8q3G+SQ==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "lru-cache": "^11.1.0"
+      },
+      "engines": {
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/npm-profile/node_modules/lru-cache": {
+      "version": "11.2.1",
+      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.1.tgz",
+      "integrity": "sha512-r8LA6i4LP4EeWOhqBaZZjDWwehd1xUJPCJd9Sv300H0ZmcUER4+JPh7bqqZeqs1o5pgtgvXm+d9UGrB5zZGDiQ==",
+      "inBundle": true,
+      "license": "ISC",
+      "engines": {
+        "node": "20 || >=22"
+      }
+    },
+    "node_modules/npm-profile/node_modules/make-fetch-happen": {
+      "version": "15.0.1",
+      "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-15.0.1.tgz",
+      "integrity": "sha512-9GjpQcaUXO2xmre8JfALl8Oji8Jpo+SyY2HpqFFPHVczOld/I+JFRx9FkP/uedZzkJlI9uM5t/j6dGJv4BScQw==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "@npmcli/agent": "^3.0.0",
+        "cacache": "^20.0.1",
+        "http-cache-semantics": "^4.1.1",
+        "minipass": "^7.0.2",
+        "minipass-fetch": "^4.0.0",
+        "minipass-flush": "^1.0.5",
+        "minipass-pipeline": "^1.2.4",
+        "negotiator": "^1.0.0",
+        "proc-log": "^5.0.0",
+        "promise-retry": "^2.0.1",
+        "ssri": "^12.0.0"
+      },
+      "engines": {
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/npm-profile/node_modules/minizlib": {
+      "version": "3.0.2",
+      "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.2.tgz",
+      "integrity": "sha512-oG62iEk+CYt5Xj2YqI5Xi9xWUeZhDI8jjQmC5oThVH5JGCTgIjr7ciJDzC7MBzYd//WvR1OTmP5Q38Q8ShQtVA==",
+      "inBundle": true,
+      "license": "MIT",
+      "dependencies": {
+        "minipass": "^7.1.2"
+      },
+      "engines": {
+        "node": ">= 18"
+      }
+    },
+    "node_modules/npm-profile/node_modules/negotiator": {
+      "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz",
+      "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==",
+      "inBundle": true,
+      "license": "MIT",
+      "engines": {
+        "node": ">= 0.6"
+      }
+    },
+    "node_modules/npm-profile/node_modules/npm-package-arg": {
+      "version": "13.0.0",
+      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-13.0.0.tgz",
+      "integrity": "sha512-+t2etZAGcB7TbbLHfDwooV9ppB2LhhcT6A+L9cahsf9mEUAoQ6CktLEVvEnpD0N5CkX7zJqnPGaFtoQDy9EkHQ==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "hosted-git-info": "^9.0.0",
+        "proc-log": "^5.0.0",
+        "semver": "^7.3.5",
+        "validate-npm-package-name": "^6.0.0"
+      },
+      "engines": {
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/npm-profile/node_modules/npm-registry-fetch": {
+      "version": "19.0.0",
+      "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-19.0.0.tgz",
+      "integrity": "sha512-DFxSAemHUwT/POaXAOY4NJmEWBPB0oKbwD6FFDE9hnt1nORkt/FXvgjD4hQjoKoHw9u0Ezws9SPXwV7xE/Gyww==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "@npmcli/redact": "^3.0.0",
+        "jsonparse": "^1.3.1",
+        "make-fetch-happen": "^15.0.0",
+        "minipass": "^7.0.2",
+        "minipass-fetch": "^4.0.0",
+        "minizlib": "^3.0.1",
+        "npm-package-arg": "^13.0.0",
+        "proc-log": "^5.0.0"
+      },
+      "engines": {
+        "node": "^20.17.0 || >=22.9.0"
       }
     },
     "node_modules/npm-registry-fetch": {
diff --git a/package.json b/package.json
index e42eafe859b1f..1b9848de599b1 100644
--- a/package.json
+++ b/package.json
@@ -98,7 +98,7 @@
     "npm-install-checks": "^7.1.1",
     "npm-package-arg": "^12.0.2",
     "npm-pick-manifest": "^10.0.0",
-    "npm-profile": "^11.0.1",
+    "npm-profile": "^12.0.0",
     "npm-registry-fetch": "^18.0.2",
     "npm-user-validate": "^3.0.0",
     "p-map": "^7.0.3",

From 11499711e4c10e4ddb97bf3e1ef1652d151894fb Mon Sep 17 00:00:00 2001
From: Gar 
Date: Wed, 17 Sep 2025 10:40:53 -0700
Subject: [PATCH 13/63] deps: npm-registry-fetch@19.0.0

---
 node_modules/.gitignore                       |  14 +-
 .../npm-profile/node_modules/minizlib/LICENSE |  26 --
 .../minizlib/dist/commonjs/constants.js       | 123 ------
 .../minizlib/dist/commonjs/index.js           | 392 ------------------
 .../minizlib/dist/commonjs/package.json       |   3 -
 .../minizlib/dist/esm/constants.js            | 117 ------
 .../node_modules/minizlib/dist/esm/index.js   | 340 ---------------
 .../minizlib/dist/esm/package.json            |   3 -
 .../node_modules/minizlib/package.json        |  80 ----
 .../npm-registry-fetch/LICENSE.md             |  20 -
 .../npm-registry-fetch/lib/auth.js            | 181 --------
 .../npm-registry-fetch/lib/check-response.js  | 108 -----
 .../npm-registry-fetch/lib/default-opts.js    |  19 -
 .../npm-registry-fetch/lib/errors.js          |  80 ----
 .../npm-registry-fetch/lib/index.js           | 247 -----------
 .../npm-registry-fetch/lib/json-stream.js     | 223 ----------
 .../npm-registry-fetch/package.json           |  68 ---
 .../node_modules/hosted-git-info/LICENSE      |   0
 .../hosted-git-info/lib/from-url.js           |   0
 .../node_modules/hosted-git-info/lib/hosts.js |   0
 .../node_modules/hosted-git-info/lib/index.js |   0
 .../hosted-git-info/lib/parse-url.js          |   0
 .../node_modules/hosted-git-info/package.json |   0
 .../node_modules/lru-cache/LICENSE            |   0
 .../lru-cache/dist/commonjs/index.js          |   0
 .../lru-cache/dist/commonjs/index.min.js      |   0
 .../lru-cache/dist/commonjs/package.json      |   0
 .../node_modules/lru-cache/dist/esm/index.js  |   0
 .../lru-cache/dist/esm/index.min.js           |   0
 .../lru-cache/dist/esm/package.json           |   0
 .../node_modules/lru-cache/package.json       |   0
 .../node_modules/make-fetch-happen/LICENSE    |   0
 .../make-fetch-happen/lib/cache/entry.js      |   0
 .../make-fetch-happen/lib/cache/errors.js     |   0
 .../make-fetch-happen/lib/cache/index.js      |   0
 .../make-fetch-happen/lib/cache/key.js        |   0
 .../make-fetch-happen/lib/cache/policy.js     |   0
 .../make-fetch-happen/lib/fetch.js            |   0
 .../make-fetch-happen/lib/index.js            |   0
 .../make-fetch-happen/lib/options.js          |   0
 .../make-fetch-happen/lib/pipeline.js         |   0
 .../make-fetch-happen/lib/remote.js           |   0
 .../make-fetch-happen/package.json            |   0
 .../node_modules/negotiator/HISTORY.md        |   0
 .../node_modules/negotiator/LICENSE           |   0
 .../node_modules/negotiator/index.js          |   0
 .../node_modules/negotiator/lib/charset.js    |   0
 .../node_modules/negotiator/lib/encoding.js   |   0
 .../node_modules/negotiator/lib/language.js   |   0
 .../node_modules/negotiator/lib/mediaType.js  |   0
 .../node_modules/negotiator/package.json      |   0
 .../node_modules/npm-package-arg/LICENSE      |   0
 .../node_modules/npm-package-arg/lib/npa.js   |   0
 .../node_modules/npm-package-arg/package.json |   0
 node_modules/npm-registry-fetch/package.json  |  14 +-
 .../npm-registry-fetch/LICENSE.md             |  20 -
 .../npm-registry-fetch/lib/auth.js            | 181 --------
 .../npm-registry-fetch/lib/check-response.js  | 108 -----
 .../npm-registry-fetch/lib/default-opts.js    |  19 -
 .../npm-registry-fetch/lib/errors.js          |  80 ----
 .../npm-registry-fetch/lib/index.js           | 247 -----------
 .../npm-registry-fetch/lib/json-stream.js     | 223 ----------
 .../npm-registry-fetch/package.json           |  68 ---
 package-lock.json                             | 119 ++----
 package.json                                  |   2 +-
 workspaces/arborist/package.json              |   2 +-
 workspaces/libnpmaccess/package.json          |   2 +-
 workspaces/libnpmorg/package.json             |   2 +-
 workspaces/libnpmpublish/package.json         |   2 +-
 workspaces/libnpmsearch/package.json          |   2 +-
 workspaces/libnpmteam/package.json            |   2 +-
 71 files changed, 52 insertions(+), 3085 deletions(-)
 delete mode 100644 node_modules/npm-profile/node_modules/minizlib/LICENSE
 delete mode 100644 node_modules/npm-profile/node_modules/minizlib/dist/commonjs/constants.js
 delete mode 100644 node_modules/npm-profile/node_modules/minizlib/dist/commonjs/index.js
 delete mode 100644 node_modules/npm-profile/node_modules/minizlib/dist/commonjs/package.json
 delete mode 100644 node_modules/npm-profile/node_modules/minizlib/dist/esm/constants.js
 delete mode 100644 node_modules/npm-profile/node_modules/minizlib/dist/esm/index.js
 delete mode 100644 node_modules/npm-profile/node_modules/minizlib/dist/esm/package.json
 delete mode 100644 node_modules/npm-profile/node_modules/minizlib/package.json
 delete mode 100644 node_modules/npm-profile/node_modules/npm-registry-fetch/LICENSE.md
 delete mode 100644 node_modules/npm-profile/node_modules/npm-registry-fetch/lib/auth.js
 delete mode 100644 node_modules/npm-profile/node_modules/npm-registry-fetch/lib/check-response.js
 delete mode 100644 node_modules/npm-profile/node_modules/npm-registry-fetch/lib/default-opts.js
 delete mode 100644 node_modules/npm-profile/node_modules/npm-registry-fetch/lib/errors.js
 delete mode 100644 node_modules/npm-profile/node_modules/npm-registry-fetch/lib/index.js
 delete mode 100644 node_modules/npm-profile/node_modules/npm-registry-fetch/lib/json-stream.js
 delete mode 100644 node_modules/npm-profile/node_modules/npm-registry-fetch/package.json
 rename node_modules/{npm-profile => npm-registry-fetch}/node_modules/hosted-git-info/LICENSE (100%)
 rename node_modules/{npm-profile => npm-registry-fetch}/node_modules/hosted-git-info/lib/from-url.js (100%)
 rename node_modules/{npm-profile => npm-registry-fetch}/node_modules/hosted-git-info/lib/hosts.js (100%)
 rename node_modules/{npm-profile => npm-registry-fetch}/node_modules/hosted-git-info/lib/index.js (100%)
 rename node_modules/{npm-profile => npm-registry-fetch}/node_modules/hosted-git-info/lib/parse-url.js (100%)
 rename node_modules/{npm-profile => npm-registry-fetch}/node_modules/hosted-git-info/package.json (100%)
 rename node_modules/{npm-profile => npm-registry-fetch}/node_modules/lru-cache/LICENSE (100%)
 rename node_modules/{npm-profile => npm-registry-fetch}/node_modules/lru-cache/dist/commonjs/index.js (100%)
 rename node_modules/{npm-profile => npm-registry-fetch}/node_modules/lru-cache/dist/commonjs/index.min.js (100%)
 rename node_modules/{npm-profile => npm-registry-fetch}/node_modules/lru-cache/dist/commonjs/package.json (100%)
 rename node_modules/{npm-profile => npm-registry-fetch}/node_modules/lru-cache/dist/esm/index.js (100%)
 rename node_modules/{npm-profile => npm-registry-fetch}/node_modules/lru-cache/dist/esm/index.min.js (100%)
 rename node_modules/{npm-profile => npm-registry-fetch}/node_modules/lru-cache/dist/esm/package.json (100%)
 rename node_modules/{npm-profile => npm-registry-fetch}/node_modules/lru-cache/package.json (100%)
 rename node_modules/{npm-profile => npm-registry-fetch}/node_modules/make-fetch-happen/LICENSE (100%)
 rename node_modules/{npm-profile => npm-registry-fetch}/node_modules/make-fetch-happen/lib/cache/entry.js (100%)
 rename node_modules/{npm-profile => npm-registry-fetch}/node_modules/make-fetch-happen/lib/cache/errors.js (100%)
 rename node_modules/{npm-profile => npm-registry-fetch}/node_modules/make-fetch-happen/lib/cache/index.js (100%)
 rename node_modules/{npm-profile => npm-registry-fetch}/node_modules/make-fetch-happen/lib/cache/key.js (100%)
 rename node_modules/{npm-profile => npm-registry-fetch}/node_modules/make-fetch-happen/lib/cache/policy.js (100%)
 rename node_modules/{npm-profile => npm-registry-fetch}/node_modules/make-fetch-happen/lib/fetch.js (100%)
 rename node_modules/{npm-profile => npm-registry-fetch}/node_modules/make-fetch-happen/lib/index.js (100%)
 rename node_modules/{npm-profile => npm-registry-fetch}/node_modules/make-fetch-happen/lib/options.js (100%)
 rename node_modules/{npm-profile => npm-registry-fetch}/node_modules/make-fetch-happen/lib/pipeline.js (100%)
 rename node_modules/{npm-profile => npm-registry-fetch}/node_modules/make-fetch-happen/lib/remote.js (100%)
 rename node_modules/{npm-profile => npm-registry-fetch}/node_modules/make-fetch-happen/package.json (100%)
 rename node_modules/{npm-profile => npm-registry-fetch}/node_modules/negotiator/HISTORY.md (100%)
 rename node_modules/{npm-profile => npm-registry-fetch}/node_modules/negotiator/LICENSE (100%)
 rename node_modules/{npm-profile => npm-registry-fetch}/node_modules/negotiator/index.js (100%)
 rename node_modules/{npm-profile => npm-registry-fetch}/node_modules/negotiator/lib/charset.js (100%)
 rename node_modules/{npm-profile => npm-registry-fetch}/node_modules/negotiator/lib/encoding.js (100%)
 rename node_modules/{npm-profile => npm-registry-fetch}/node_modules/negotiator/lib/language.js (100%)
 rename node_modules/{npm-profile => npm-registry-fetch}/node_modules/negotiator/lib/mediaType.js (100%)
 rename node_modules/{npm-profile => npm-registry-fetch}/node_modules/negotiator/package.json (100%)
 rename node_modules/{npm-profile => npm-registry-fetch}/node_modules/npm-package-arg/LICENSE (100%)
 rename node_modules/{npm-profile => npm-registry-fetch}/node_modules/npm-package-arg/lib/npa.js (100%)
 rename node_modules/{npm-profile => npm-registry-fetch}/node_modules/npm-package-arg/package.json (100%)
 delete mode 100644 node_modules/pacote/node_modules/npm-registry-fetch/LICENSE.md
 delete mode 100644 node_modules/pacote/node_modules/npm-registry-fetch/lib/auth.js
 delete mode 100644 node_modules/pacote/node_modules/npm-registry-fetch/lib/check-response.js
 delete mode 100644 node_modules/pacote/node_modules/npm-registry-fetch/lib/default-opts.js
 delete mode 100644 node_modules/pacote/node_modules/npm-registry-fetch/lib/errors.js
 delete mode 100644 node_modules/pacote/node_modules/npm-registry-fetch/lib/index.js
 delete mode 100644 node_modules/pacote/node_modules/npm-registry-fetch/lib/json-stream.js
 delete mode 100644 node_modules/pacote/node_modules/npm-registry-fetch/package.json

diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 8d6961c785a5c..c843e97b50bc2 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -195,19 +195,15 @@
 !/npm-packlist/node_modules/minimatch
 !/npm-pick-manifest
 !/npm-profile
-!/npm-profile/node_modules/
-/npm-profile/node_modules/*
-!/npm-profile/node_modules/hosted-git-info
-!/npm-profile/node_modules/lru-cache
-!/npm-profile/node_modules/make-fetch-happen
-!/npm-profile/node_modules/minizlib
-!/npm-profile/node_modules/negotiator
-!/npm-profile/node_modules/npm-package-arg
-!/npm-profile/node_modules/npm-registry-fetch
 !/npm-registry-fetch
 !/npm-registry-fetch/node_modules/
 /npm-registry-fetch/node_modules/*
+!/npm-registry-fetch/node_modules/hosted-git-info
+!/npm-registry-fetch/node_modules/lru-cache
+!/npm-registry-fetch/node_modules/make-fetch-happen
 !/npm-registry-fetch/node_modules/minizlib
+!/npm-registry-fetch/node_modules/negotiator
+!/npm-registry-fetch/node_modules/npm-package-arg
 !/npm-user-validate
 !/p-map
 !/package-json-from-dist
diff --git a/node_modules/npm-profile/node_modules/minizlib/LICENSE b/node_modules/npm-profile/node_modules/minizlib/LICENSE
deleted file mode 100644
index 49f7efe431c9e..0000000000000
--- a/node_modules/npm-profile/node_modules/minizlib/LICENSE
+++ /dev/null
@@ -1,26 +0,0 @@
-Minizlib was created by Isaac Z. Schlueter.
-It is a derivative work of the Node.js project.
-
-"""
-Copyright (c) 2017-2023 Isaac Z. Schlueter and Contributors
-Copyright (c) 2017-2023 Node.js contributors. All rights reserved.
-Copyright (c) 2017-2023 Joyent, Inc. and other Node contributors. All rights reserved.
-
-Permission is hereby granted, free of charge, to any person obtaining a
-copy of this software and associated documentation files (the "Software"),
-to deal in the Software without restriction, including without limitation
-the rights to use, copy, modify, merge, publish, distribute, sublicense,
-and/or sell copies of the Software, and to permit persons to whom the
-Software is furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-"""
diff --git a/node_modules/npm-profile/node_modules/minizlib/dist/commonjs/constants.js b/node_modules/npm-profile/node_modules/minizlib/dist/commonjs/constants.js
deleted file mode 100644
index dfc2c1957bfc9..0000000000000
--- a/node_modules/npm-profile/node_modules/minizlib/dist/commonjs/constants.js
+++ /dev/null
@@ -1,123 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.constants = void 0;
-// Update with any zlib constants that are added or changed in the future.
-// Node v6 didn't export this, so we just hard code the version and rely
-// on all the other hard-coded values from zlib v4736.  When node v6
-// support drops, we can just export the realZlibConstants object.
-const zlib_1 = __importDefault(require("zlib"));
-/* c8 ignore start */
-const realZlibConstants = zlib_1.default.constants || { ZLIB_VERNUM: 4736 };
-/* c8 ignore stop */
-exports.constants = Object.freeze(Object.assign(Object.create(null), {
-    Z_NO_FLUSH: 0,
-    Z_PARTIAL_FLUSH: 1,
-    Z_SYNC_FLUSH: 2,
-    Z_FULL_FLUSH: 3,
-    Z_FINISH: 4,
-    Z_BLOCK: 5,
-    Z_OK: 0,
-    Z_STREAM_END: 1,
-    Z_NEED_DICT: 2,
-    Z_ERRNO: -1,
-    Z_STREAM_ERROR: -2,
-    Z_DATA_ERROR: -3,
-    Z_MEM_ERROR: -4,
-    Z_BUF_ERROR: -5,
-    Z_VERSION_ERROR: -6,
-    Z_NO_COMPRESSION: 0,
-    Z_BEST_SPEED: 1,
-    Z_BEST_COMPRESSION: 9,
-    Z_DEFAULT_COMPRESSION: -1,
-    Z_FILTERED: 1,
-    Z_HUFFMAN_ONLY: 2,
-    Z_RLE: 3,
-    Z_FIXED: 4,
-    Z_DEFAULT_STRATEGY: 0,
-    DEFLATE: 1,
-    INFLATE: 2,
-    GZIP: 3,
-    GUNZIP: 4,
-    DEFLATERAW: 5,
-    INFLATERAW: 6,
-    UNZIP: 7,
-    BROTLI_DECODE: 8,
-    BROTLI_ENCODE: 9,
-    Z_MIN_WINDOWBITS: 8,
-    Z_MAX_WINDOWBITS: 15,
-    Z_DEFAULT_WINDOWBITS: 15,
-    Z_MIN_CHUNK: 64,
-    Z_MAX_CHUNK: Infinity,
-    Z_DEFAULT_CHUNK: 16384,
-    Z_MIN_MEMLEVEL: 1,
-    Z_MAX_MEMLEVEL: 9,
-    Z_DEFAULT_MEMLEVEL: 8,
-    Z_MIN_LEVEL: -1,
-    Z_MAX_LEVEL: 9,
-    Z_DEFAULT_LEVEL: -1,
-    BROTLI_OPERATION_PROCESS: 0,
-    BROTLI_OPERATION_FLUSH: 1,
-    BROTLI_OPERATION_FINISH: 2,
-    BROTLI_OPERATION_EMIT_METADATA: 3,
-    BROTLI_MODE_GENERIC: 0,
-    BROTLI_MODE_TEXT: 1,
-    BROTLI_MODE_FONT: 2,
-    BROTLI_DEFAULT_MODE: 0,
-    BROTLI_MIN_QUALITY: 0,
-    BROTLI_MAX_QUALITY: 11,
-    BROTLI_DEFAULT_QUALITY: 11,
-    BROTLI_MIN_WINDOW_BITS: 10,
-    BROTLI_MAX_WINDOW_BITS: 24,
-    BROTLI_LARGE_MAX_WINDOW_BITS: 30,
-    BROTLI_DEFAULT_WINDOW: 22,
-    BROTLI_MIN_INPUT_BLOCK_BITS: 16,
-    BROTLI_MAX_INPUT_BLOCK_BITS: 24,
-    BROTLI_PARAM_MODE: 0,
-    BROTLI_PARAM_QUALITY: 1,
-    BROTLI_PARAM_LGWIN: 2,
-    BROTLI_PARAM_LGBLOCK: 3,
-    BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4,
-    BROTLI_PARAM_SIZE_HINT: 5,
-    BROTLI_PARAM_LARGE_WINDOW: 6,
-    BROTLI_PARAM_NPOSTFIX: 7,
-    BROTLI_PARAM_NDIRECT: 8,
-    BROTLI_DECODER_RESULT_ERROR: 0,
-    BROTLI_DECODER_RESULT_SUCCESS: 1,
-    BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2,
-    BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3,
-    BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0,
-    BROTLI_DECODER_PARAM_LARGE_WINDOW: 1,
-    BROTLI_DECODER_NO_ERROR: 0,
-    BROTLI_DECODER_SUCCESS: 1,
-    BROTLI_DECODER_NEEDS_MORE_INPUT: 2,
-    BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3,
-    BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1,
-    BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2,
-    BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3,
-    BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4,
-    BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5,
-    BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6,
-    BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7,
-    BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8,
-    BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9,
-    BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10,
-    BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11,
-    BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12,
-    BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13,
-    BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14,
-    BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15,
-    BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16,
-    BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19,
-    BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20,
-    BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21,
-    BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22,
-    BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25,
-    BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26,
-    BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27,
-    BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30,
-    BROTLI_DECODER_ERROR_UNREACHABLE: -31,
-}, realZlibConstants));
-//# sourceMappingURL=constants.js.map
\ No newline at end of file
diff --git a/node_modules/npm-profile/node_modules/minizlib/dist/commonjs/index.js b/node_modules/npm-profile/node_modules/minizlib/dist/commonjs/index.js
deleted file mode 100644
index b4906d2783372..0000000000000
--- a/node_modules/npm-profile/node_modules/minizlib/dist/commonjs/index.js
+++ /dev/null
@@ -1,392 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || (function () {
-    var ownKeys = function(o) {
-        ownKeys = Object.getOwnPropertyNames || function (o) {
-            var ar = [];
-            for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
-            return ar;
-        };
-        return ownKeys(o);
-    };
-    return function (mod) {
-        if (mod && mod.__esModule) return mod;
-        var result = {};
-        if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
-        __setModuleDefault(result, mod);
-        return result;
-    };
-})();
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.BrotliDecompress = exports.BrotliCompress = exports.Brotli = exports.Unzip = exports.InflateRaw = exports.DeflateRaw = exports.Gunzip = exports.Gzip = exports.Inflate = exports.Deflate = exports.Zlib = exports.ZlibError = exports.constants = void 0;
-const assert_1 = __importDefault(require("assert"));
-const buffer_1 = require("buffer");
-const minipass_1 = require("minipass");
-const realZlib = __importStar(require("zlib"));
-const constants_js_1 = require("./constants.js");
-var constants_js_2 = require("./constants.js");
-Object.defineProperty(exports, "constants", { enumerable: true, get: function () { return constants_js_2.constants; } });
-const OriginalBufferConcat = buffer_1.Buffer.concat;
-const desc = Object.getOwnPropertyDescriptor(buffer_1.Buffer, 'concat');
-const noop = (args) => args;
-const passthroughBufferConcat = desc?.writable === true || desc?.set !== undefined
-    ? (makeNoOp) => {
-        buffer_1.Buffer.concat = makeNoOp ? noop : OriginalBufferConcat;
-    }
-    : (_) => { };
-const _superWrite = Symbol('_superWrite');
-class ZlibError extends Error {
-    code;
-    errno;
-    constructor(err) {
-        super('zlib: ' + err.message);
-        this.code = err.code;
-        this.errno = err.errno;
-        /* c8 ignore next */
-        if (!this.code)
-            this.code = 'ZLIB_ERROR';
-        this.message = 'zlib: ' + err.message;
-        Error.captureStackTrace(this, this.constructor);
-    }
-    get name() {
-        return 'ZlibError';
-    }
-}
-exports.ZlibError = ZlibError;
-// the Zlib class they all inherit from
-// This thing manages the queue of requests, and returns
-// true or false if there is anything in the queue when
-// you call the .write() method.
-const _flushFlag = Symbol('flushFlag');
-class ZlibBase extends minipass_1.Minipass {
-    #sawError = false;
-    #ended = false;
-    #flushFlag;
-    #finishFlushFlag;
-    #fullFlushFlag;
-    #handle;
-    #onError;
-    get sawError() {
-        return this.#sawError;
-    }
-    get handle() {
-        return this.#handle;
-    }
-    /* c8 ignore start */
-    get flushFlag() {
-        return this.#flushFlag;
-    }
-    /* c8 ignore stop */
-    constructor(opts, mode) {
-        if (!opts || typeof opts !== 'object')
-            throw new TypeError('invalid options for ZlibBase constructor');
-        //@ts-ignore
-        super(opts);
-        /* c8 ignore start */
-        this.#flushFlag = opts.flush ?? 0;
-        this.#finishFlushFlag = opts.finishFlush ?? 0;
-        this.#fullFlushFlag = opts.fullFlushFlag ?? 0;
-        /* c8 ignore stop */
-        // this will throw if any options are invalid for the class selected
-        try {
-            // @types/node doesn't know that it exports the classes, but they're there
-            //@ts-ignore
-            this.#handle = new realZlib[mode](opts);
-        }
-        catch (er) {
-            // make sure that all errors get decorated properly
-            throw new ZlibError(er);
-        }
-        this.#onError = err => {
-            // no sense raising multiple errors, since we abort on the first one.
-            if (this.#sawError)
-                return;
-            this.#sawError = true;
-            // there is no way to cleanly recover.
-            // continuing only obscures problems.
-            this.close();
-            this.emit('error', err);
-        };
-        this.#handle?.on('error', er => this.#onError(new ZlibError(er)));
-        this.once('end', () => this.close);
-    }
-    close() {
-        if (this.#handle) {
-            this.#handle.close();
-            this.#handle = undefined;
-            this.emit('close');
-        }
-    }
-    reset() {
-        if (!this.#sawError) {
-            (0, assert_1.default)(this.#handle, 'zlib binding closed');
-            //@ts-ignore
-            return this.#handle.reset?.();
-        }
-    }
-    flush(flushFlag) {
-        if (this.ended)
-            return;
-        if (typeof flushFlag !== 'number')
-            flushFlag = this.#fullFlushFlag;
-        this.write(Object.assign(buffer_1.Buffer.alloc(0), { [_flushFlag]: flushFlag }));
-    }
-    end(chunk, encoding, cb) {
-        /* c8 ignore start */
-        if (typeof chunk === 'function') {
-            cb = chunk;
-            encoding = undefined;
-            chunk = undefined;
-        }
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        /* c8 ignore stop */
-        if (chunk) {
-            if (encoding)
-                this.write(chunk, encoding);
-            else
-                this.write(chunk);
-        }
-        this.flush(this.#finishFlushFlag);
-        this.#ended = true;
-        return super.end(cb);
-    }
-    get ended() {
-        return this.#ended;
-    }
-    // overridden in the gzip classes to do portable writes
-    [_superWrite](data) {
-        return super.write(data);
-    }
-    write(chunk, encoding, cb) {
-        // process the chunk using the sync process
-        // then super.write() all the outputted chunks
-        if (typeof encoding === 'function')
-            (cb = encoding), (encoding = 'utf8');
-        if (typeof chunk === 'string')
-            chunk = buffer_1.Buffer.from(chunk, encoding);
-        if (this.#sawError)
-            return;
-        (0, assert_1.default)(this.#handle, 'zlib binding closed');
-        // _processChunk tries to .close() the native handle after it's done, so we
-        // intercept that by temporarily making it a no-op.
-        // diving into the node:zlib internals a bit here
-        const nativeHandle = this.#handle
-            ._handle;
-        const originalNativeClose = nativeHandle.close;
-        nativeHandle.close = () => { };
-        const originalClose = this.#handle.close;
-        this.#handle.close = () => { };
-        // It also calls `Buffer.concat()` at the end, which may be convenient
-        // for some, but which we are not interested in as it slows us down.
-        passthroughBufferConcat(true);
-        let result = undefined;
-        try {
-            const flushFlag = typeof chunk[_flushFlag] === 'number'
-                ? chunk[_flushFlag]
-                : this.#flushFlag;
-            result = this.#handle._processChunk(chunk, flushFlag);
-            // if we don't throw, reset it back how it was
-            passthroughBufferConcat(false);
-        }
-        catch (err) {
-            // or if we do, put Buffer.concat() back before we emit error
-            // Error events call into user code, which may call Buffer.concat()
-            passthroughBufferConcat(false);
-            this.#onError(new ZlibError(err));
-        }
-        finally {
-            if (this.#handle) {
-                // Core zlib resets `_handle` to null after attempting to close the
-                // native handle. Our no-op handler prevented actual closure, but we
-                // need to restore the `._handle` property.
-                ;
-                this.#handle._handle =
-                    nativeHandle;
-                nativeHandle.close = originalNativeClose;
-                this.#handle.close = originalClose;
-                // `_processChunk()` adds an 'error' listener. If we don't remove it
-                // after each call, these handlers start piling up.
-                this.#handle.removeAllListeners('error');
-                // make sure OUR error listener is still attached tho
-            }
-        }
-        if (this.#handle)
-            this.#handle.on('error', er => this.#onError(new ZlibError(er)));
-        let writeReturn;
-        if (result) {
-            if (Array.isArray(result) && result.length > 0) {
-                const r = result[0];
-                // The first buffer is always `handle._outBuffer`, which would be
-                // re-used for later invocations; so, we always have to copy that one.
-                writeReturn = this[_superWrite](buffer_1.Buffer.from(r));
-                for (let i = 1; i < result.length; i++) {
-                    writeReturn = this[_superWrite](result[i]);
-                }
-            }
-            else {
-                // either a single Buffer or an empty array
-                writeReturn = this[_superWrite](buffer_1.Buffer.from(result));
-            }
-        }
-        if (cb)
-            cb();
-        return writeReturn;
-    }
-}
-class Zlib extends ZlibBase {
-    #level;
-    #strategy;
-    constructor(opts, mode) {
-        opts = opts || {};
-        opts.flush = opts.flush || constants_js_1.constants.Z_NO_FLUSH;
-        opts.finishFlush = opts.finishFlush || constants_js_1.constants.Z_FINISH;
-        opts.fullFlushFlag = constants_js_1.constants.Z_FULL_FLUSH;
-        super(opts, mode);
-        this.#level = opts.level;
-        this.#strategy = opts.strategy;
-    }
-    params(level, strategy) {
-        if (this.sawError)
-            return;
-        if (!this.handle)
-            throw new Error('cannot switch params when binding is closed');
-        // no way to test this without also not supporting params at all
-        /* c8 ignore start */
-        if (!this.handle.params)
-            throw new Error('not supported in this implementation');
-        /* c8 ignore stop */
-        if (this.#level !== level || this.#strategy !== strategy) {
-            this.flush(constants_js_1.constants.Z_SYNC_FLUSH);
-            (0, assert_1.default)(this.handle, 'zlib binding closed');
-            // .params() calls .flush(), but the latter is always async in the
-            // core zlib. We override .flush() temporarily to intercept that and
-            // flush synchronously.
-            const origFlush = this.handle.flush;
-            this.handle.flush = (flushFlag, cb) => {
-                /* c8 ignore start */
-                if (typeof flushFlag === 'function') {
-                    cb = flushFlag;
-                    flushFlag = this.flushFlag;
-                }
-                /* c8 ignore stop */
-                this.flush(flushFlag);
-                cb?.();
-            };
-            try {
-                ;
-                this.handle.params(level, strategy);
-            }
-            finally {
-                this.handle.flush = origFlush;
-            }
-            /* c8 ignore start */
-            if (this.handle) {
-                this.#level = level;
-                this.#strategy = strategy;
-            }
-            /* c8 ignore stop */
-        }
-    }
-}
-exports.Zlib = Zlib;
-// minimal 2-byte header
-class Deflate extends Zlib {
-    constructor(opts) {
-        super(opts, 'Deflate');
-    }
-}
-exports.Deflate = Deflate;
-class Inflate extends Zlib {
-    constructor(opts) {
-        super(opts, 'Inflate');
-    }
-}
-exports.Inflate = Inflate;
-class Gzip extends Zlib {
-    #portable;
-    constructor(opts) {
-        super(opts, 'Gzip');
-        this.#portable = opts && !!opts.portable;
-    }
-    [_superWrite](data) {
-        if (!this.#portable)
-            return super[_superWrite](data);
-        // we'll always get the header emitted in one first chunk
-        // overwrite the OS indicator byte with 0xFF
-        this.#portable = false;
-        data[9] = 255;
-        return super[_superWrite](data);
-    }
-}
-exports.Gzip = Gzip;
-class Gunzip extends Zlib {
-    constructor(opts) {
-        super(opts, 'Gunzip');
-    }
-}
-exports.Gunzip = Gunzip;
-// raw - no header
-class DeflateRaw extends Zlib {
-    constructor(opts) {
-        super(opts, 'DeflateRaw');
-    }
-}
-exports.DeflateRaw = DeflateRaw;
-class InflateRaw extends Zlib {
-    constructor(opts) {
-        super(opts, 'InflateRaw');
-    }
-}
-exports.InflateRaw = InflateRaw;
-// auto-detect header.
-class Unzip extends Zlib {
-    constructor(opts) {
-        super(opts, 'Unzip');
-    }
-}
-exports.Unzip = Unzip;
-class Brotli extends ZlibBase {
-    constructor(opts, mode) {
-        opts = opts || {};
-        opts.flush = opts.flush || constants_js_1.constants.BROTLI_OPERATION_PROCESS;
-        opts.finishFlush =
-            opts.finishFlush || constants_js_1.constants.BROTLI_OPERATION_FINISH;
-        opts.fullFlushFlag = constants_js_1.constants.BROTLI_OPERATION_FLUSH;
-        super(opts, mode);
-    }
-}
-exports.Brotli = Brotli;
-class BrotliCompress extends Brotli {
-    constructor(opts) {
-        super(opts, 'BrotliCompress');
-    }
-}
-exports.BrotliCompress = BrotliCompress;
-class BrotliDecompress extends Brotli {
-    constructor(opts) {
-        super(opts, 'BrotliDecompress');
-    }
-}
-exports.BrotliDecompress = BrotliDecompress;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/npm-profile/node_modules/minizlib/dist/commonjs/package.json b/node_modules/npm-profile/node_modules/minizlib/dist/commonjs/package.json
deleted file mode 100644
index 5bbefffbabee3..0000000000000
--- a/node_modules/npm-profile/node_modules/minizlib/dist/commonjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "commonjs"
-}
diff --git a/node_modules/npm-profile/node_modules/minizlib/dist/esm/constants.js b/node_modules/npm-profile/node_modules/minizlib/dist/esm/constants.js
deleted file mode 100644
index 7faf40be5068d..0000000000000
--- a/node_modules/npm-profile/node_modules/minizlib/dist/esm/constants.js
+++ /dev/null
@@ -1,117 +0,0 @@
-// Update with any zlib constants that are added or changed in the future.
-// Node v6 didn't export this, so we just hard code the version and rely
-// on all the other hard-coded values from zlib v4736.  When node v6
-// support drops, we can just export the realZlibConstants object.
-import realZlib from 'zlib';
-/* c8 ignore start */
-const realZlibConstants = realZlib.constants || { ZLIB_VERNUM: 4736 };
-/* c8 ignore stop */
-export const constants = Object.freeze(Object.assign(Object.create(null), {
-    Z_NO_FLUSH: 0,
-    Z_PARTIAL_FLUSH: 1,
-    Z_SYNC_FLUSH: 2,
-    Z_FULL_FLUSH: 3,
-    Z_FINISH: 4,
-    Z_BLOCK: 5,
-    Z_OK: 0,
-    Z_STREAM_END: 1,
-    Z_NEED_DICT: 2,
-    Z_ERRNO: -1,
-    Z_STREAM_ERROR: -2,
-    Z_DATA_ERROR: -3,
-    Z_MEM_ERROR: -4,
-    Z_BUF_ERROR: -5,
-    Z_VERSION_ERROR: -6,
-    Z_NO_COMPRESSION: 0,
-    Z_BEST_SPEED: 1,
-    Z_BEST_COMPRESSION: 9,
-    Z_DEFAULT_COMPRESSION: -1,
-    Z_FILTERED: 1,
-    Z_HUFFMAN_ONLY: 2,
-    Z_RLE: 3,
-    Z_FIXED: 4,
-    Z_DEFAULT_STRATEGY: 0,
-    DEFLATE: 1,
-    INFLATE: 2,
-    GZIP: 3,
-    GUNZIP: 4,
-    DEFLATERAW: 5,
-    INFLATERAW: 6,
-    UNZIP: 7,
-    BROTLI_DECODE: 8,
-    BROTLI_ENCODE: 9,
-    Z_MIN_WINDOWBITS: 8,
-    Z_MAX_WINDOWBITS: 15,
-    Z_DEFAULT_WINDOWBITS: 15,
-    Z_MIN_CHUNK: 64,
-    Z_MAX_CHUNK: Infinity,
-    Z_DEFAULT_CHUNK: 16384,
-    Z_MIN_MEMLEVEL: 1,
-    Z_MAX_MEMLEVEL: 9,
-    Z_DEFAULT_MEMLEVEL: 8,
-    Z_MIN_LEVEL: -1,
-    Z_MAX_LEVEL: 9,
-    Z_DEFAULT_LEVEL: -1,
-    BROTLI_OPERATION_PROCESS: 0,
-    BROTLI_OPERATION_FLUSH: 1,
-    BROTLI_OPERATION_FINISH: 2,
-    BROTLI_OPERATION_EMIT_METADATA: 3,
-    BROTLI_MODE_GENERIC: 0,
-    BROTLI_MODE_TEXT: 1,
-    BROTLI_MODE_FONT: 2,
-    BROTLI_DEFAULT_MODE: 0,
-    BROTLI_MIN_QUALITY: 0,
-    BROTLI_MAX_QUALITY: 11,
-    BROTLI_DEFAULT_QUALITY: 11,
-    BROTLI_MIN_WINDOW_BITS: 10,
-    BROTLI_MAX_WINDOW_BITS: 24,
-    BROTLI_LARGE_MAX_WINDOW_BITS: 30,
-    BROTLI_DEFAULT_WINDOW: 22,
-    BROTLI_MIN_INPUT_BLOCK_BITS: 16,
-    BROTLI_MAX_INPUT_BLOCK_BITS: 24,
-    BROTLI_PARAM_MODE: 0,
-    BROTLI_PARAM_QUALITY: 1,
-    BROTLI_PARAM_LGWIN: 2,
-    BROTLI_PARAM_LGBLOCK: 3,
-    BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4,
-    BROTLI_PARAM_SIZE_HINT: 5,
-    BROTLI_PARAM_LARGE_WINDOW: 6,
-    BROTLI_PARAM_NPOSTFIX: 7,
-    BROTLI_PARAM_NDIRECT: 8,
-    BROTLI_DECODER_RESULT_ERROR: 0,
-    BROTLI_DECODER_RESULT_SUCCESS: 1,
-    BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2,
-    BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3,
-    BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0,
-    BROTLI_DECODER_PARAM_LARGE_WINDOW: 1,
-    BROTLI_DECODER_NO_ERROR: 0,
-    BROTLI_DECODER_SUCCESS: 1,
-    BROTLI_DECODER_NEEDS_MORE_INPUT: 2,
-    BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3,
-    BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1,
-    BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2,
-    BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3,
-    BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4,
-    BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5,
-    BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6,
-    BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7,
-    BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8,
-    BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9,
-    BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10,
-    BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11,
-    BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12,
-    BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13,
-    BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14,
-    BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15,
-    BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16,
-    BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19,
-    BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20,
-    BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21,
-    BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22,
-    BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25,
-    BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26,
-    BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27,
-    BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30,
-    BROTLI_DECODER_ERROR_UNREACHABLE: -31,
-}, realZlibConstants));
-//# sourceMappingURL=constants.js.map
\ No newline at end of file
diff --git a/node_modules/npm-profile/node_modules/minizlib/dist/esm/index.js b/node_modules/npm-profile/node_modules/minizlib/dist/esm/index.js
deleted file mode 100644
index f33586a8ab0ec..0000000000000
--- a/node_modules/npm-profile/node_modules/minizlib/dist/esm/index.js
+++ /dev/null
@@ -1,340 +0,0 @@
-import assert from 'assert';
-import { Buffer } from 'buffer';
-import { Minipass } from 'minipass';
-import * as realZlib from 'zlib';
-import { constants } from './constants.js';
-export { constants } from './constants.js';
-const OriginalBufferConcat = Buffer.concat;
-const desc = Object.getOwnPropertyDescriptor(Buffer, 'concat');
-const noop = (args) => args;
-const passthroughBufferConcat = desc?.writable === true || desc?.set !== undefined
-    ? (makeNoOp) => {
-        Buffer.concat = makeNoOp ? noop : OriginalBufferConcat;
-    }
-    : (_) => { };
-const _superWrite = Symbol('_superWrite');
-export class ZlibError extends Error {
-    code;
-    errno;
-    constructor(err) {
-        super('zlib: ' + err.message);
-        this.code = err.code;
-        this.errno = err.errno;
-        /* c8 ignore next */
-        if (!this.code)
-            this.code = 'ZLIB_ERROR';
-        this.message = 'zlib: ' + err.message;
-        Error.captureStackTrace(this, this.constructor);
-    }
-    get name() {
-        return 'ZlibError';
-    }
-}
-// the Zlib class they all inherit from
-// This thing manages the queue of requests, and returns
-// true or false if there is anything in the queue when
-// you call the .write() method.
-const _flushFlag = Symbol('flushFlag');
-class ZlibBase extends Minipass {
-    #sawError = false;
-    #ended = false;
-    #flushFlag;
-    #finishFlushFlag;
-    #fullFlushFlag;
-    #handle;
-    #onError;
-    get sawError() {
-        return this.#sawError;
-    }
-    get handle() {
-        return this.#handle;
-    }
-    /* c8 ignore start */
-    get flushFlag() {
-        return this.#flushFlag;
-    }
-    /* c8 ignore stop */
-    constructor(opts, mode) {
-        if (!opts || typeof opts !== 'object')
-            throw new TypeError('invalid options for ZlibBase constructor');
-        //@ts-ignore
-        super(opts);
-        /* c8 ignore start */
-        this.#flushFlag = opts.flush ?? 0;
-        this.#finishFlushFlag = opts.finishFlush ?? 0;
-        this.#fullFlushFlag = opts.fullFlushFlag ?? 0;
-        /* c8 ignore stop */
-        // this will throw if any options are invalid for the class selected
-        try {
-            // @types/node doesn't know that it exports the classes, but they're there
-            //@ts-ignore
-            this.#handle = new realZlib[mode](opts);
-        }
-        catch (er) {
-            // make sure that all errors get decorated properly
-            throw new ZlibError(er);
-        }
-        this.#onError = err => {
-            // no sense raising multiple errors, since we abort on the first one.
-            if (this.#sawError)
-                return;
-            this.#sawError = true;
-            // there is no way to cleanly recover.
-            // continuing only obscures problems.
-            this.close();
-            this.emit('error', err);
-        };
-        this.#handle?.on('error', er => this.#onError(new ZlibError(er)));
-        this.once('end', () => this.close);
-    }
-    close() {
-        if (this.#handle) {
-            this.#handle.close();
-            this.#handle = undefined;
-            this.emit('close');
-        }
-    }
-    reset() {
-        if (!this.#sawError) {
-            assert(this.#handle, 'zlib binding closed');
-            //@ts-ignore
-            return this.#handle.reset?.();
-        }
-    }
-    flush(flushFlag) {
-        if (this.ended)
-            return;
-        if (typeof flushFlag !== 'number')
-            flushFlag = this.#fullFlushFlag;
-        this.write(Object.assign(Buffer.alloc(0), { [_flushFlag]: flushFlag }));
-    }
-    end(chunk, encoding, cb) {
-        /* c8 ignore start */
-        if (typeof chunk === 'function') {
-            cb = chunk;
-            encoding = undefined;
-            chunk = undefined;
-        }
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        /* c8 ignore stop */
-        if (chunk) {
-            if (encoding)
-                this.write(chunk, encoding);
-            else
-                this.write(chunk);
-        }
-        this.flush(this.#finishFlushFlag);
-        this.#ended = true;
-        return super.end(cb);
-    }
-    get ended() {
-        return this.#ended;
-    }
-    // overridden in the gzip classes to do portable writes
-    [_superWrite](data) {
-        return super.write(data);
-    }
-    write(chunk, encoding, cb) {
-        // process the chunk using the sync process
-        // then super.write() all the outputted chunks
-        if (typeof encoding === 'function')
-            (cb = encoding), (encoding = 'utf8');
-        if (typeof chunk === 'string')
-            chunk = Buffer.from(chunk, encoding);
-        if (this.#sawError)
-            return;
-        assert(this.#handle, 'zlib binding closed');
-        // _processChunk tries to .close() the native handle after it's done, so we
-        // intercept that by temporarily making it a no-op.
-        // diving into the node:zlib internals a bit here
-        const nativeHandle = this.#handle
-            ._handle;
-        const originalNativeClose = nativeHandle.close;
-        nativeHandle.close = () => { };
-        const originalClose = this.#handle.close;
-        this.#handle.close = () => { };
-        // It also calls `Buffer.concat()` at the end, which may be convenient
-        // for some, but which we are not interested in as it slows us down.
-        passthroughBufferConcat(true);
-        let result = undefined;
-        try {
-            const flushFlag = typeof chunk[_flushFlag] === 'number'
-                ? chunk[_flushFlag]
-                : this.#flushFlag;
-            result = this.#handle._processChunk(chunk, flushFlag);
-            // if we don't throw, reset it back how it was
-            passthroughBufferConcat(false);
-        }
-        catch (err) {
-            // or if we do, put Buffer.concat() back before we emit error
-            // Error events call into user code, which may call Buffer.concat()
-            passthroughBufferConcat(false);
-            this.#onError(new ZlibError(err));
-        }
-        finally {
-            if (this.#handle) {
-                // Core zlib resets `_handle` to null after attempting to close the
-                // native handle. Our no-op handler prevented actual closure, but we
-                // need to restore the `._handle` property.
-                ;
-                this.#handle._handle =
-                    nativeHandle;
-                nativeHandle.close = originalNativeClose;
-                this.#handle.close = originalClose;
-                // `_processChunk()` adds an 'error' listener. If we don't remove it
-                // after each call, these handlers start piling up.
-                this.#handle.removeAllListeners('error');
-                // make sure OUR error listener is still attached tho
-            }
-        }
-        if (this.#handle)
-            this.#handle.on('error', er => this.#onError(new ZlibError(er)));
-        let writeReturn;
-        if (result) {
-            if (Array.isArray(result) && result.length > 0) {
-                const r = result[0];
-                // The first buffer is always `handle._outBuffer`, which would be
-                // re-used for later invocations; so, we always have to copy that one.
-                writeReturn = this[_superWrite](Buffer.from(r));
-                for (let i = 1; i < result.length; i++) {
-                    writeReturn = this[_superWrite](result[i]);
-                }
-            }
-            else {
-                // either a single Buffer or an empty array
-                writeReturn = this[_superWrite](Buffer.from(result));
-            }
-        }
-        if (cb)
-            cb();
-        return writeReturn;
-    }
-}
-export class Zlib extends ZlibBase {
-    #level;
-    #strategy;
-    constructor(opts, mode) {
-        opts = opts || {};
-        opts.flush = opts.flush || constants.Z_NO_FLUSH;
-        opts.finishFlush = opts.finishFlush || constants.Z_FINISH;
-        opts.fullFlushFlag = constants.Z_FULL_FLUSH;
-        super(opts, mode);
-        this.#level = opts.level;
-        this.#strategy = opts.strategy;
-    }
-    params(level, strategy) {
-        if (this.sawError)
-            return;
-        if (!this.handle)
-            throw new Error('cannot switch params when binding is closed');
-        // no way to test this without also not supporting params at all
-        /* c8 ignore start */
-        if (!this.handle.params)
-            throw new Error('not supported in this implementation');
-        /* c8 ignore stop */
-        if (this.#level !== level || this.#strategy !== strategy) {
-            this.flush(constants.Z_SYNC_FLUSH);
-            assert(this.handle, 'zlib binding closed');
-            // .params() calls .flush(), but the latter is always async in the
-            // core zlib. We override .flush() temporarily to intercept that and
-            // flush synchronously.
-            const origFlush = this.handle.flush;
-            this.handle.flush = (flushFlag, cb) => {
-                /* c8 ignore start */
-                if (typeof flushFlag === 'function') {
-                    cb = flushFlag;
-                    flushFlag = this.flushFlag;
-                }
-                /* c8 ignore stop */
-                this.flush(flushFlag);
-                cb?.();
-            };
-            try {
-                ;
-                this.handle.params(level, strategy);
-            }
-            finally {
-                this.handle.flush = origFlush;
-            }
-            /* c8 ignore start */
-            if (this.handle) {
-                this.#level = level;
-                this.#strategy = strategy;
-            }
-            /* c8 ignore stop */
-        }
-    }
-}
-// minimal 2-byte header
-export class Deflate extends Zlib {
-    constructor(opts) {
-        super(opts, 'Deflate');
-    }
-}
-export class Inflate extends Zlib {
-    constructor(opts) {
-        super(opts, 'Inflate');
-    }
-}
-export class Gzip extends Zlib {
-    #portable;
-    constructor(opts) {
-        super(opts, 'Gzip');
-        this.#portable = opts && !!opts.portable;
-    }
-    [_superWrite](data) {
-        if (!this.#portable)
-            return super[_superWrite](data);
-        // we'll always get the header emitted in one first chunk
-        // overwrite the OS indicator byte with 0xFF
-        this.#portable = false;
-        data[9] = 255;
-        return super[_superWrite](data);
-    }
-}
-export class Gunzip extends Zlib {
-    constructor(opts) {
-        super(opts, 'Gunzip');
-    }
-}
-// raw - no header
-export class DeflateRaw extends Zlib {
-    constructor(opts) {
-        super(opts, 'DeflateRaw');
-    }
-}
-export class InflateRaw extends Zlib {
-    constructor(opts) {
-        super(opts, 'InflateRaw');
-    }
-}
-// auto-detect header.
-export class Unzip extends Zlib {
-    constructor(opts) {
-        super(opts, 'Unzip');
-    }
-}
-export class Brotli extends ZlibBase {
-    constructor(opts, mode) {
-        opts = opts || {};
-        opts.flush = opts.flush || constants.BROTLI_OPERATION_PROCESS;
-        opts.finishFlush =
-            opts.finishFlush || constants.BROTLI_OPERATION_FINISH;
-        opts.fullFlushFlag = constants.BROTLI_OPERATION_FLUSH;
-        super(opts, mode);
-    }
-}
-export class BrotliCompress extends Brotli {
-    constructor(opts) {
-        super(opts, 'BrotliCompress');
-    }
-}
-export class BrotliDecompress extends Brotli {
-    constructor(opts) {
-        super(opts, 'BrotliDecompress');
-    }
-}
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/npm-profile/node_modules/minizlib/dist/esm/package.json b/node_modules/npm-profile/node_modules/minizlib/dist/esm/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/npm-profile/node_modules/minizlib/dist/esm/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/npm-profile/node_modules/minizlib/package.json b/node_modules/npm-profile/node_modules/minizlib/package.json
deleted file mode 100644
index 43cb855e15a5d..0000000000000
--- a/node_modules/npm-profile/node_modules/minizlib/package.json
+++ /dev/null
@@ -1,80 +0,0 @@
-{
-  "name": "minizlib",
-  "version": "3.0.2",
-  "description": "A small fast zlib stream built on [minipass](http://npm.im/minipass) and Node.js's zlib binding.",
-  "main": "./dist/commonjs/index.js",
-  "dependencies": {
-    "minipass": "^7.1.2"
-  },
-  "scripts": {
-    "prepare": "tshy",
-    "pretest": "npm run prepare",
-    "test": "tap",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "format": "prettier --write . --loglevel warn",
-    "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/isaacs/minizlib.git"
-  },
-  "keywords": [
-    "zlib",
-    "gzip",
-    "gunzip",
-    "deflate",
-    "inflate",
-    "compression",
-    "zip",
-    "unzip"
-  ],
-  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
-  "license": "MIT",
-  "devDependencies": {
-    "@types/node": "^22.13.14",
-    "tap": "^21.1.0",
-    "tshy": "^3.0.2",
-    "typedoc": "^0.28.1"
-  },
-  "files": [
-    "dist"
-  ],
-  "engines": {
-    "node": ">= 18"
-  },
-  "tshy": {
-    "exports": {
-      "./package.json": "./package.json",
-      ".": "./src/index.ts"
-    }
-  },
-  "exports": {
-    "./package.json": "./package.json",
-    ".": {
-      "import": {
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.js"
-      },
-      "require": {
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.js"
-      }
-    }
-  },
-  "types": "./dist/commonjs/index.d.ts",
-  "type": "module",
-  "prettier": {
-    "semi": false,
-    "printWidth": 75,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "module": "./dist/esm/index.js"
-}
diff --git a/node_modules/npm-profile/node_modules/npm-registry-fetch/LICENSE.md b/node_modules/npm-profile/node_modules/npm-registry-fetch/LICENSE.md
deleted file mode 100644
index 5fc208ff122e0..0000000000000
--- a/node_modules/npm-profile/node_modules/npm-registry-fetch/LICENSE.md
+++ /dev/null
@@ -1,20 +0,0 @@
-
-
-ISC License
-
-Copyright npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this
-software for any purpose with or without fee is hereby
-granted, provided that the above copyright notice and this
-permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL
-WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
-IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO
-EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT,
-INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
-WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
-TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/auth.js b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/auth.js
deleted file mode 100644
index 9270025fa8d90..0000000000000
--- a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/auth.js
+++ /dev/null
@@ -1,181 +0,0 @@
-'use strict'
-const fs = require('fs')
-const npa = require('npm-package-arg')
-const { URL } = require('url')
-
-// Find the longest registry key that is used for some kind of auth
-// in the options.  Returns the registry key and the auth config.
-const regFromURI = (uri, opts) => {
-  const parsed = new URL(uri)
-  // try to find a config key indicating we have auth for this registry
-  // can be one of :_authToken, :_auth, :_password and :username, or
-  // :certfile and :keyfile
-  // We walk up the "path" until we're left with just //[:],
-  // stopping when we reach '//'.
-  let regKey = `//${parsed.host}${parsed.pathname}`
-  while (regKey.length > '//'.length) {
-    const authKey = hasAuth(regKey, opts)
-    // got some auth for this URI
-    if (authKey) {
-      return { regKey, authKey }
-    }
-
-    // can be either //host/some/path/:_auth or //host/some/path:_auth
-    // walk up by removing EITHER what's after the slash OR the slash itself
-    regKey = regKey.replace(/([^/]+|\/)$/, '')
-  }
-  return { regKey: false, authKey: null }
-}
-
-// Not only do we want to know if there is auth, but if we are calling `npm
-// logout` we want to know what config value specifically provided it.  This is
-// so we can look up where the config came from to delete it (i.e. user vs
-// project)
-const hasAuth = (regKey, opts) => {
-  if (opts[`${regKey}:_authToken`]) {
-    return '_authToken'
-  }
-  if (opts[`${regKey}:_auth`]) {
-    return '_auth'
-  }
-  if (opts[`${regKey}:username`] && opts[`${regKey}:_password`]) {
-    // 'password' can be inferred to also be present
-    return 'username'
-  }
-  if (opts[`${regKey}:certfile`] && opts[`${regKey}:keyfile`]) {
-    // 'keyfile' can be inferred to also be present
-    return 'certfile'
-  }
-  return false
-}
-
-const sameHost = (a, b) => {
-  const parsedA = new URL(a)
-  const parsedB = new URL(b)
-  return parsedA.host === parsedB.host
-}
-
-const getRegistry = opts => {
-  const { spec } = opts
-  const { scope: specScope, subSpec } = spec ? npa(spec) : {}
-  const subSpecScope = subSpec && subSpec.scope
-  const scope = subSpec ? subSpecScope : specScope
-  const scopeReg = scope && opts[`${scope}:registry`]
-  return scopeReg || opts.registry
-}
-
-const maybeReadFile = file => {
-  try {
-    return fs.readFileSync(file, 'utf8')
-  } catch (er) {
-    if (er.code !== 'ENOENT') {
-      throw er
-    }
-    return null
-  }
-}
-
-const getAuth = (uri, opts = {}) => {
-  const { forceAuth } = opts
-  if (!uri) {
-    throw new Error('URI is required')
-  }
-  const { regKey, authKey } = regFromURI(uri, forceAuth || opts)
-
-  // we are only allowed to use what's in forceAuth if specified
-  if (forceAuth && !regKey) {
-    return new Auth({
-      // if we force auth we don't want to refer back to anything in config
-      regKey: false,
-      authKey: null,
-      scopeAuthKey: null,
-      token: forceAuth._authToken || forceAuth.token,
-      username: forceAuth.username,
-      password: forceAuth._password || forceAuth.password,
-      auth: forceAuth._auth || forceAuth.auth,
-      certfile: forceAuth.certfile,
-      keyfile: forceAuth.keyfile,
-    })
-  }
-
-  // no auth for this URI, but might have it for the registry
-  if (!regKey) {
-    const registry = getRegistry(opts)
-    if (registry && uri !== registry && sameHost(uri, registry)) {
-      return getAuth(registry, opts)
-    } else if (registry !== opts.registry) {
-      // If making a tarball request to a different base URI than the
-      // registry where we logged in, but the same auth SHOULD be sent
-      // to that artifact host, then we track where it was coming in from,
-      // and warn the user if we get a 4xx error on it.
-      const { regKey: scopeAuthKey, authKey: _authKey } = regFromURI(registry, opts)
-      return new Auth({ scopeAuthKey, regKey: scopeAuthKey, authKey: _authKey })
-    }
-  }
-
-  const {
-    [`${regKey}:_authToken`]: token,
-    [`${regKey}:username`]: username,
-    [`${regKey}:_password`]: password,
-    [`${regKey}:_auth`]: auth,
-    [`${regKey}:certfile`]: certfile,
-    [`${regKey}:keyfile`]: keyfile,
-  } = opts
-
-  return new Auth({
-    scopeAuthKey: null,
-    regKey,
-    authKey,
-    token,
-    auth,
-    username,
-    password,
-    certfile,
-    keyfile,
-  })
-}
-
-class Auth {
-  constructor ({
-    token,
-    auth,
-    username,
-    password,
-    scopeAuthKey,
-    certfile,
-    keyfile,
-    regKey,
-    authKey,
-  }) {
-    // same as regKey but only present for scoped auth. Should have been named scopeRegKey
-    this.scopeAuthKey = scopeAuthKey
-    // `${regKey}:${authKey}` will get you back to the auth config that gave us auth
-    this.regKey = regKey
-    this.authKey = authKey
-    this.token = null
-    this.auth = null
-    this.isBasicAuth = false
-    this.cert = null
-    this.key = null
-    if (token) {
-      this.token = token
-    } else if (auth) {
-      this.auth = auth
-    } else if (username && password) {
-      const p = Buffer.from(password, 'base64').toString('utf8')
-      this.auth = Buffer.from(`${username}:${p}`, 'utf8').toString('base64')
-      this.isBasicAuth = true
-    }
-    // mTLS may be used in conjunction with another auth method above
-    if (certfile && keyfile) {
-      const cert = maybeReadFile(certfile, 'utf-8')
-      const key = maybeReadFile(keyfile, 'utf-8')
-      if (cert && key) {
-        this.cert = cert
-        this.key = key
-      }
-    }
-  }
-}
-
-module.exports = getAuth
diff --git a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/check-response.js b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/check-response.js
deleted file mode 100644
index 2f183082ab2ce..0000000000000
--- a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/check-response.js
+++ /dev/null
@@ -1,108 +0,0 @@
-'use strict'
-
-const errors = require('./errors.js')
-const { Response } = require('minipass-fetch')
-const defaultOpts = require('./default-opts.js')
-const { log } = require('proc-log')
-const { redact: cleanUrl } = require('@npmcli/redact')
-
-/* eslint-disable-next-line max-len */
-const moreInfoUrl = 'https://github.com/npm/cli/wiki/No-auth-for-URI,-but-auth-present-for-scoped-registry'
-const checkResponse =
-  async ({ method, uri, res, startTime, auth, opts }) => {
-    opts = { ...defaultOpts, ...opts }
-    if (res.headers.has('npm-notice') && !res.headers.has('x-local-cache')) {
-      log.notice('', res.headers.get('npm-notice'))
-    }
-
-    if (res.status >= 400) {
-      logRequest(method, res, startTime)
-      if (auth && auth.scopeAuthKey && !auth.token && !auth.auth) {
-      // we didn't have auth for THIS request, but we do have auth for
-      // requests to the registry indicated by the spec's scope value.
-      // Warn the user.
-        log.warn('registry', `No auth for URI, but auth present for scoped registry.
-
-URI: ${uri}
-Scoped Registry Key: ${auth.scopeAuthKey}
-
-More info here: ${moreInfoUrl}`)
-      }
-      return checkErrors(method, res, startTime, opts)
-    } else {
-      res.body.on('end', () => logRequest(method, res, startTime, opts))
-      if (opts.ignoreBody) {
-        res.body.resume()
-        return new Response(null, res)
-      }
-      return res
-    }
-  }
-module.exports = checkResponse
-
-function logRequest (method, res, startTime) {
-  const elapsedTime = Date.now() - startTime
-  const attempt = res.headers.get('x-fetch-attempts')
-  const attemptStr = attempt && attempt > 1 ? ` attempt #${attempt}` : ''
-  const cacheStatus = res.headers.get('x-local-cache-status')
-  const cacheStr = cacheStatus ? ` (cache ${cacheStatus})` : ''
-  const urlStr = cleanUrl(res.url)
-
-  // If make-fetch-happen reports a cache hit, then there was no fetch
-  if (cacheStatus === 'hit') {
-    log.http(
-      'cache',
-      `${urlStr} ${elapsedTime}ms${attemptStr}${cacheStr}`
-    )
-  } else {
-    log.http(
-      'fetch',
-      `${method.toUpperCase()} ${res.status} ${urlStr} ${elapsedTime}ms${attemptStr}${cacheStr}`
-    )
-  }
-}
-
-function checkErrors (method, res, startTime, opts) {
-  return res.buffer()
-    .catch(() => null)
-    .then(body => {
-      let parsed = body
-      try {
-        parsed = JSON.parse(body.toString('utf8'))
-      } catch {
-        // ignore errors
-      }
-      if (res.status === 401 && res.headers.get('www-authenticate')) {
-        const auth = res.headers.get('www-authenticate')
-          .split(/,\s*/)
-          .map(s => s.toLowerCase())
-        if (auth.indexOf('ipaddress') !== -1) {
-          throw new errors.HttpErrorAuthIPAddress(
-            method, res, parsed, opts.spec
-          )
-        } else if (auth.indexOf('otp') !== -1) {
-          throw new errors.HttpErrorAuthOTP(
-            method, res, parsed, opts.spec
-          )
-        } else {
-          throw new errors.HttpErrorAuthUnknown(
-            method, res, parsed, opts.spec
-          )
-        }
-      } else if (
-        res.status === 401 &&
-        body != null &&
-        /one-time pass/.test(body.toString('utf8'))
-      ) {
-        // Heuristic for malformed OTP responses that don't include the
-        // www-authenticate header.
-        throw new errors.HttpErrorAuthOTP(
-          method, res, parsed, opts.spec
-        )
-      } else {
-        throw new errors.HttpErrorGeneral(
-          method, res, parsed, opts.spec
-        )
-      }
-    })
-}
diff --git a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/default-opts.js b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/default-opts.js
deleted file mode 100644
index f0847f0b507e2..0000000000000
--- a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/default-opts.js
+++ /dev/null
@@ -1,19 +0,0 @@
-const pkg = require('../package.json')
-module.exports = {
-  maxSockets: 12,
-  method: 'GET',
-  registry: 'https://registry.npmjs.org/',
-  timeout: 5 * 60 * 1000, // 5 minutes
-  strictSSL: true,
-  noProxy: process.env.NOPROXY,
-  userAgent: `${pkg.name
-    }@${
-      pkg.version
-    }/node@${
-      process.version
-    }+${
-      process.arch
-    } (${
-      process.platform
-    })`,
-}
diff --git a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/errors.js b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/errors.js
deleted file mode 100644
index 5bf6b012a24ef..0000000000000
--- a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/errors.js
+++ /dev/null
@@ -1,80 +0,0 @@
-'use strict'
-
-const { URL } = require('node:url')
-
-function packageName (href) {
-  try {
-    let basePath = new URL(href).pathname.slice(1)
-    if (!basePath.match(/^-/)) {
-      basePath = basePath.split('/')
-      var index = basePath.indexOf('_rewrite')
-      if (index === -1) {
-        index = basePath.length - 1
-      } else {
-        index++
-      }
-      return decodeURIComponent(basePath[index])
-    }
-  } catch {
-    // this is ok
-  }
-}
-
-class HttpErrorBase extends Error {
-  constructor (method, res, body, spec) {
-    super()
-    this.name = this.constructor.name
-    this.headers = typeof res.headers?.raw === 'function' ? res.headers.raw() : res.headers
-    this.statusCode = res.status
-    this.code = `E${res.status}`
-    this.method = method
-    this.uri = res.url
-    this.body = body
-    this.pkgid = spec ? spec.toString() : packageName(res.url)
-    Error.captureStackTrace(this, this.constructor)
-  }
-}
-
-class HttpErrorGeneral extends HttpErrorBase {
-  constructor (method, res, body, spec) {
-    super(method, res, body, spec)
-    this.message = `${res.status} ${res.statusText} - ${
-      this.method.toUpperCase()
-    } ${
-      this.spec || this.uri
-    }${
-      (body && body.error) ? ' - ' + body.error : ''
-    }`
-  }
-}
-
-class HttpErrorAuthOTP extends HttpErrorBase {
-  constructor (method, res, body, spec) {
-    super(method, res, body, spec)
-    this.message = 'OTP required for authentication'
-    this.code = 'EOTP'
-  }
-}
-
-class HttpErrorAuthIPAddress extends HttpErrorBase {
-  constructor (method, res, body, spec) {
-    super(method, res, body, spec)
-    this.message = 'Login is not allowed from your IP address'
-    this.code = 'EAUTHIP'
-  }
-}
-
-class HttpErrorAuthUnknown extends HttpErrorBase {
-  constructor (method, res, body, spec) {
-    super(method, res, body, spec)
-    this.message = 'Unable to authenticate, need: ' + res.headers.get('www-authenticate')
-  }
-}
-
-module.exports = {
-  HttpErrorBase,
-  HttpErrorGeneral,
-  HttpErrorAuthOTP,
-  HttpErrorAuthIPAddress,
-  HttpErrorAuthUnknown,
-}
diff --git a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/index.js b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/index.js
deleted file mode 100644
index 898c8125bfe0e..0000000000000
--- a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/index.js
+++ /dev/null
@@ -1,247 +0,0 @@
-'use strict'
-
-const { HttpErrorAuthOTP } = require('./errors.js')
-const checkResponse = require('./check-response.js')
-const getAuth = require('./auth.js')
-const fetch = require('make-fetch-happen')
-const JSONStream = require('./json-stream')
-const npa = require('npm-package-arg')
-const qs = require('querystring')
-const url = require('url')
-const zlib = require('minizlib')
-const { Minipass } = require('minipass')
-
-const defaultOpts = require('./default-opts.js')
-
-// WhatWG URL throws if it's not fully resolved
-const urlIsValid = u => {
-  try {
-    return !!new url.URL(u)
-  } catch (_) {
-    return false
-  }
-}
-
-module.exports = regFetch
-function regFetch (uri, /* istanbul ignore next */ opts_ = {}) {
-  const opts = {
-    ...defaultOpts,
-    ...opts_,
-  }
-
-  // if we did not get a fully qualified URI, then we look at the registry
-  // config or relevant scope to resolve it.
-  const uriValid = urlIsValid(uri)
-  let registry = opts.registry || defaultOpts.registry
-  if (!uriValid) {
-    registry = opts.registry = (
-      (opts.spec && pickRegistry(opts.spec, opts)) ||
-      opts.registry ||
-      registry
-    )
-    uri = `${
-      registry.trim().replace(/\/?$/g, '')
-    }/${
-      uri.trim().replace(/^\//, '')
-    }`
-    // asserts that this is now valid
-    new url.URL(uri)
-  }
-
-  const method = opts.method || 'GET'
-
-  // through that takes into account the scope, the prefix of `uri`, etc
-  const startTime = Date.now()
-  const auth = getAuth(uri, opts)
-  const headers = getHeaders(uri, auth, opts)
-  let body = opts.body
-  const bodyIsStream = Minipass.isStream(body)
-  const bodyIsPromise = body &&
-    typeof body === 'object' &&
-    typeof body.then === 'function'
-
-  if (
-    body && !bodyIsStream && !bodyIsPromise && typeof body !== 'string' && !Buffer.isBuffer(body)
-  ) {
-    headers['content-type'] = headers['content-type'] || 'application/json'
-    body = JSON.stringify(body)
-  } else if (body && !headers['content-type']) {
-    headers['content-type'] = 'application/octet-stream'
-  }
-
-  if (opts.gzip) {
-    headers['content-encoding'] = 'gzip'
-    if (bodyIsStream) {
-      const gz = new zlib.Gzip()
-      body.on('error', /* istanbul ignore next: unlikely and hard to test */
-        err => gz.emit('error', err))
-      body = body.pipe(gz)
-    } else if (!bodyIsPromise) {
-      body = new zlib.Gzip().end(body).concat()
-    }
-  }
-
-  const parsed = new url.URL(uri)
-
-  if (opts.query) {
-    const q = typeof opts.query === 'string' ? qs.parse(opts.query)
-      : opts.query
-
-    Object.keys(q).forEach(key => {
-      if (q[key] !== undefined) {
-        parsed.searchParams.set(key, q[key])
-      }
-    })
-    uri = url.format(parsed)
-  }
-
-  if (parsed.searchParams.get('write') === 'true' && method === 'GET') {
-    // do not cache, because this GET is fetching a rev that will be
-    // used for a subsequent PUT or DELETE, so we need to conditionally
-    // update cache.
-    opts.offline = false
-    opts.preferOffline = false
-    opts.preferOnline = true
-  }
-
-  const doFetch = async fetchBody => {
-    const p = fetch(uri, {
-      agent: opts.agent,
-      algorithms: opts.algorithms,
-      body: fetchBody,
-      cache: getCacheMode(opts),
-      cachePath: opts.cache,
-      ca: opts.ca,
-      cert: auth.cert || opts.cert,
-      headers,
-      integrity: opts.integrity,
-      key: auth.key || opts.key,
-      localAddress: opts.localAddress,
-      maxSockets: opts.maxSockets,
-      memoize: opts.memoize,
-      method: method,
-      noProxy: opts.noProxy,
-      proxy: opts.httpsProxy || opts.proxy,
-      retry: opts.retry ? opts.retry : {
-        retries: opts.fetchRetries,
-        factor: opts.fetchRetryFactor,
-        minTimeout: opts.fetchRetryMintimeout,
-        maxTimeout: opts.fetchRetryMaxtimeout,
-      },
-      strictSSL: opts.strictSSL,
-      timeout: opts.timeout || 30 * 1000,
-    }).then(res => checkResponse({
-      method,
-      uri,
-      res,
-      registry,
-      startTime,
-      auth,
-      opts,
-    }))
-
-    if (typeof opts.otpPrompt === 'function') {
-      return p.catch(async er => {
-        if (er instanceof HttpErrorAuthOTP) {
-          let otp
-          // if otp fails to complete, we fail with that failure
-          try {
-            otp = await opts.otpPrompt()
-          } catch (_) {
-            // ignore this error
-          }
-          // if no otp provided, or otpPrompt errored, throw the original HTTP error
-          if (!otp) {
-            throw er
-          }
-          return regFetch(uri, { ...opts, otp })
-        }
-        throw er
-      })
-    } else {
-      return p
-    }
-  }
-
-  return Promise.resolve(body).then(doFetch)
-}
-
-module.exports.getAuth = getAuth
-
-module.exports.json = fetchJSON
-function fetchJSON (uri, opts) {
-  return regFetch(uri, opts).then(res => res.json())
-}
-
-module.exports.json.stream = fetchJSONStream
-function fetchJSONStream (uri, jsonPath,
-  /* istanbul ignore next */ opts_ = {}) {
-  const opts = { ...defaultOpts, ...opts_ }
-  const parser = JSONStream.parse(jsonPath, opts.mapJSON)
-  regFetch(uri, opts).then(res =>
-    res.body.on('error',
-      /* istanbul ignore next: unlikely and difficult to test */
-      er => parser.emit('error', er)).pipe(parser)
-  ).catch(er => parser.emit('error', er))
-  return parser
-}
-
-module.exports.pickRegistry = pickRegistry
-function pickRegistry (spec, opts = {}) {
-  spec = npa(spec)
-  let registry = spec.scope &&
-    opts[spec.scope.replace(/^@?/, '@') + ':registry']
-
-  if (!registry && opts.scope) {
-    registry = opts[opts.scope.replace(/^@?/, '@') + ':registry']
-  }
-
-  if (!registry) {
-    registry = opts.registry || defaultOpts.registry
-  }
-
-  return registry
-}
-
-function getCacheMode (opts) {
-  return opts.offline ? 'only-if-cached'
-    : opts.preferOffline ? 'force-cache'
-    : opts.preferOnline ? 'no-cache'
-    : 'default'
-}
-
-function getHeaders (uri, auth, opts) {
-  const headers = Object.assign({
-    'user-agent': opts.userAgent,
-  }, opts.headers || {})
-
-  if (opts.authType) {
-    headers['npm-auth-type'] = opts.authType
-  }
-
-  if (opts.scope) {
-    headers['npm-scope'] = opts.scope
-  }
-
-  if (opts.npmSession) {
-    headers['npm-session'] = opts.npmSession
-  }
-
-  if (opts.npmCommand) {
-    headers['npm-command'] = opts.npmCommand
-  }
-
-  // If a tarball is hosted on a different place than the manifest, only send
-  // credentials on `alwaysAuth`
-  if (auth.token) {
-    headers.authorization = `Bearer ${auth.token}`
-  } else if (auth.auth) {
-    headers.authorization = `Basic ${auth.auth}`
-  }
-
-  if (opts.otp) {
-    headers['npm-otp'] = opts.otp
-  }
-
-  return headers
-}
diff --git a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/json-stream.js b/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/json-stream.js
deleted file mode 100644
index 36b05ad4a20b9..0000000000000
--- a/node_modules/npm-profile/node_modules/npm-registry-fetch/lib/json-stream.js
+++ /dev/null
@@ -1,223 +0,0 @@
-const Parser = require('jsonparse')
-const { Minipass } = require('minipass')
-
-class JSONStreamError extends Error {
-  constructor (err, caller) {
-    super(err.message)
-    Error.captureStackTrace(this, caller || this.constructor)
-  }
-
-  get name () {
-    return 'JSONStreamError'
-  }
-}
-
-const check = (x, y) =>
-  typeof x === 'string' ? String(y) === x
-  : x && typeof x.test === 'function' ? x.test(y)
-  : typeof x === 'boolean' || typeof x === 'object' ? x
-  : typeof x === 'function' ? x(y)
-  : false
-
-class JSONStream extends Minipass {
-  #count = 0
-  #ending = false
-  #footer = null
-  #header = null
-  #map = null
-  #onTokenOriginal
-  #parser
-  #path = null
-  #root = null
-
-  constructor (opts) {
-    super({
-      ...opts,
-      objectMode: true,
-    })
-
-    const parser = this.#parser = new Parser()
-    parser.onValue = value => this.#onValue(value)
-    this.#onTokenOriginal = parser.onToken
-    parser.onToken = (token, value) => this.#onToken(token, value)
-    parser.onError = er => this.#onError(er)
-
-    this.#path = typeof opts.path === 'string'
-      ? opts.path.split('.').map(e =>
-        e === '$*' ? { emitKey: true }
-        : e === '*' ? true
-        : e === '' ? { recurse: true }
-        : e)
-      : Array.isArray(opts.path) && opts.path.length ? opts.path
-      : null
-
-    if (typeof opts.map === 'function') {
-      this.#map = opts.map
-    }
-  }
-
-  #setHeaderFooter (key, value) {
-    // header has not been emitted yet
-    if (this.#header !== false) {
-      this.#header = this.#header || {}
-      this.#header[key] = value
-    }
-
-    // footer has not been emitted yet but header has
-    if (this.#footer !== false && this.#header === false) {
-      this.#footer = this.#footer || {}
-      this.#footer[key] = value
-    }
-  }
-
-  #onError (er) {
-    // error will always happen during a write() call.
-    const caller = this.#ending ? this.end : this.write
-    this.#ending = false
-    return this.emit('error', new JSONStreamError(er, caller))
-  }
-
-  #onToken (token, value) {
-    const parser = this.#parser
-    this.#onTokenOriginal.call(this.#parser, token, value)
-    if (parser.stack.length === 0) {
-      if (this.#root) {
-        const root = this.#root
-        if (!this.#path) {
-          super.write(root)
-        }
-        this.#root = null
-        this.#count = 0
-      }
-    }
-  }
-
-  #onValue (value) {
-    const parser = this.#parser
-    // the LAST onValue encountered is the root object.
-    // just overwrite it each time.
-    this.#root = value
-
-    if (!this.#path) {
-      return
-    }
-
-    let i = 0 // iterates on path
-    let j = 0 // iterates on stack
-    let emitKey = false
-    while (i < this.#path.length) {
-      const key = this.#path[i]
-      j++
-
-      if (key && !key.recurse) {
-        const c = (j === parser.stack.length) ? parser : parser.stack[j]
-        if (!c) {
-          return
-        }
-        if (!check(key, c.key)) {
-          this.#setHeaderFooter(c.key, value)
-          return
-        }
-        emitKey = !!key.emitKey
-        i++
-      } else {
-        i++
-        if (i >= this.#path.length) {
-          return
-        }
-        const nextKey = this.#path[i]
-        if (!nextKey) {
-          return
-        }
-        while (true) {
-          const c = (j === parser.stack.length) ? parser : parser.stack[j]
-          if (!c) {
-            return
-          }
-          if (check(nextKey, c.key)) {
-            i++
-            if (!Object.isFrozen(parser.stack[j])) {
-              parser.stack[j].value = null
-            }
-            break
-          } else {
-            this.#setHeaderFooter(c.key, value)
-          }
-          j++
-        }
-      }
-    }
-
-    // emit header
-    if (this.#header) {
-      const header = this.#header
-      this.#header = false
-      this.emit('header', header)
-    }
-    if (j !== parser.stack.length) {
-      return
-    }
-
-    this.#count++
-    const actualPath = parser.stack.slice(1)
-      .map(e => e.key).concat([parser.key])
-    if (value !== null && value !== undefined) {
-      const data = this.#map ? this.#map(value, actualPath) : value
-      if (data !== null && data !== undefined) {
-        const emit = emitKey ? { value: data } : data
-        if (emitKey) {
-          emit.key = parser.key
-        }
-        super.write(emit)
-      }
-    }
-
-    if (parser.value) {
-      delete parser.value[parser.key]
-    }
-
-    for (const k of parser.stack) {
-      k.value = null
-    }
-  }
-
-  write (chunk, encoding) {
-    if (typeof chunk === 'string') {
-      chunk = Buffer.from(chunk, encoding)
-    } else if (!Buffer.isBuffer(chunk)) {
-      return this.emit('error', new TypeError(
-        'Can only parse JSON from string or buffer input'))
-    }
-    this.#parser.write(chunk)
-    return this.flowing
-  }
-
-  end (chunk, encoding) {
-    this.#ending = true
-    if (chunk) {
-      this.write(chunk, encoding)
-    }
-
-    const h = this.#header
-    this.#header = null
-    const f = this.#footer
-    this.#footer = null
-    if (h) {
-      this.emit('header', h)
-    }
-    if (f) {
-      this.emit('footer', f)
-    }
-    return super.end()
-  }
-
-  static get JSONStreamError () {
-    return JSONStreamError
-  }
-
-  static parse (path, map) {
-    return new JSONStream({ path, map })
-  }
-}
-
-module.exports = JSONStream
diff --git a/node_modules/npm-profile/node_modules/npm-registry-fetch/package.json b/node_modules/npm-profile/node_modules/npm-registry-fetch/package.json
deleted file mode 100644
index a8e954cdf3c14..0000000000000
--- a/node_modules/npm-profile/node_modules/npm-registry-fetch/package.json
+++ /dev/null
@@ -1,68 +0,0 @@
-{
-  "name": "npm-registry-fetch",
-  "version": "19.0.0",
-  "description": "Fetch-based http client for use with npm registry APIs",
-  "main": "lib",
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "scripts": {
-    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
-    "lint": "npm run eslint",
-    "lintfix": "npm run eslint -- --fix",
-    "test": "tap",
-    "posttest": "npm run lint",
-    "npmclilint": "npmcli-lint",
-    "postsnap": "npm run lintfix --",
-    "postlint": "template-oss-check",
-    "snap": "tap",
-    "template-oss-apply": "template-oss-apply --force"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/npm/npm-registry-fetch.git"
-  },
-  "keywords": [
-    "npm",
-    "registry",
-    "fetch"
-  ],
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "dependencies": {
-    "@npmcli/redact": "^3.0.0",
-    "jsonparse": "^1.3.1",
-    "make-fetch-happen": "^15.0.0",
-    "minipass": "^7.0.2",
-    "minipass-fetch": "^4.0.0",
-    "minizlib": "^3.0.1",
-    "npm-package-arg": "^13.0.0",
-    "proc-log": "^5.0.0"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.25.0",
-    "cacache": "^20.0.0",
-    "nock": "^13.2.4",
-    "require-inject": "^1.4.4",
-    "ssri": "^12.0.0",
-    "tap": "^16.0.1"
-  },
-  "tap": {
-    "check-coverage": true,
-    "test-ignore": "test[\\\\/](util|cache)[\\\\/]",
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "engines": {
-    "node": "^20.17.0 || >=22.9.0"
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.25.0",
-    "publish": "true"
-  }
-}
diff --git a/node_modules/npm-profile/node_modules/hosted-git-info/LICENSE b/node_modules/npm-registry-fetch/node_modules/hosted-git-info/LICENSE
similarity index 100%
rename from node_modules/npm-profile/node_modules/hosted-git-info/LICENSE
rename to node_modules/npm-registry-fetch/node_modules/hosted-git-info/LICENSE
diff --git a/node_modules/npm-profile/node_modules/hosted-git-info/lib/from-url.js b/node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/from-url.js
similarity index 100%
rename from node_modules/npm-profile/node_modules/hosted-git-info/lib/from-url.js
rename to node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/from-url.js
diff --git a/node_modules/npm-profile/node_modules/hosted-git-info/lib/hosts.js b/node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/hosts.js
similarity index 100%
rename from node_modules/npm-profile/node_modules/hosted-git-info/lib/hosts.js
rename to node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/hosts.js
diff --git a/node_modules/npm-profile/node_modules/hosted-git-info/lib/index.js b/node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/index.js
similarity index 100%
rename from node_modules/npm-profile/node_modules/hosted-git-info/lib/index.js
rename to node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/index.js
diff --git a/node_modules/npm-profile/node_modules/hosted-git-info/lib/parse-url.js b/node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/parse-url.js
similarity index 100%
rename from node_modules/npm-profile/node_modules/hosted-git-info/lib/parse-url.js
rename to node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/parse-url.js
diff --git a/node_modules/npm-profile/node_modules/hosted-git-info/package.json b/node_modules/npm-registry-fetch/node_modules/hosted-git-info/package.json
similarity index 100%
rename from node_modules/npm-profile/node_modules/hosted-git-info/package.json
rename to node_modules/npm-registry-fetch/node_modules/hosted-git-info/package.json
diff --git a/node_modules/npm-profile/node_modules/lru-cache/LICENSE b/node_modules/npm-registry-fetch/node_modules/lru-cache/LICENSE
similarity index 100%
rename from node_modules/npm-profile/node_modules/lru-cache/LICENSE
rename to node_modules/npm-registry-fetch/node_modules/lru-cache/LICENSE
diff --git a/node_modules/npm-profile/node_modules/lru-cache/dist/commonjs/index.js b/node_modules/npm-registry-fetch/node_modules/lru-cache/dist/commonjs/index.js
similarity index 100%
rename from node_modules/npm-profile/node_modules/lru-cache/dist/commonjs/index.js
rename to node_modules/npm-registry-fetch/node_modules/lru-cache/dist/commonjs/index.js
diff --git a/node_modules/npm-profile/node_modules/lru-cache/dist/commonjs/index.min.js b/node_modules/npm-registry-fetch/node_modules/lru-cache/dist/commonjs/index.min.js
similarity index 100%
rename from node_modules/npm-profile/node_modules/lru-cache/dist/commonjs/index.min.js
rename to node_modules/npm-registry-fetch/node_modules/lru-cache/dist/commonjs/index.min.js
diff --git a/node_modules/npm-profile/node_modules/lru-cache/dist/commonjs/package.json b/node_modules/npm-registry-fetch/node_modules/lru-cache/dist/commonjs/package.json
similarity index 100%
rename from node_modules/npm-profile/node_modules/lru-cache/dist/commonjs/package.json
rename to node_modules/npm-registry-fetch/node_modules/lru-cache/dist/commonjs/package.json
diff --git a/node_modules/npm-profile/node_modules/lru-cache/dist/esm/index.js b/node_modules/npm-registry-fetch/node_modules/lru-cache/dist/esm/index.js
similarity index 100%
rename from node_modules/npm-profile/node_modules/lru-cache/dist/esm/index.js
rename to node_modules/npm-registry-fetch/node_modules/lru-cache/dist/esm/index.js
diff --git a/node_modules/npm-profile/node_modules/lru-cache/dist/esm/index.min.js b/node_modules/npm-registry-fetch/node_modules/lru-cache/dist/esm/index.min.js
similarity index 100%
rename from node_modules/npm-profile/node_modules/lru-cache/dist/esm/index.min.js
rename to node_modules/npm-registry-fetch/node_modules/lru-cache/dist/esm/index.min.js
diff --git a/node_modules/npm-profile/node_modules/lru-cache/dist/esm/package.json b/node_modules/npm-registry-fetch/node_modules/lru-cache/dist/esm/package.json
similarity index 100%
rename from node_modules/npm-profile/node_modules/lru-cache/dist/esm/package.json
rename to node_modules/npm-registry-fetch/node_modules/lru-cache/dist/esm/package.json
diff --git a/node_modules/npm-profile/node_modules/lru-cache/package.json b/node_modules/npm-registry-fetch/node_modules/lru-cache/package.json
similarity index 100%
rename from node_modules/npm-profile/node_modules/lru-cache/package.json
rename to node_modules/npm-registry-fetch/node_modules/lru-cache/package.json
diff --git a/node_modules/npm-profile/node_modules/make-fetch-happen/LICENSE b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/LICENSE
similarity index 100%
rename from node_modules/npm-profile/node_modules/make-fetch-happen/LICENSE
rename to node_modules/npm-registry-fetch/node_modules/make-fetch-happen/LICENSE
diff --git a/node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/entry.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/entry.js
similarity index 100%
rename from node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/entry.js
rename to node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/entry.js
diff --git a/node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/errors.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/errors.js
similarity index 100%
rename from node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/errors.js
rename to node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/errors.js
diff --git a/node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/index.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/index.js
similarity index 100%
rename from node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/index.js
rename to node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/index.js
diff --git a/node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/key.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/key.js
similarity index 100%
rename from node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/key.js
rename to node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/key.js
diff --git a/node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/policy.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/policy.js
similarity index 100%
rename from node_modules/npm-profile/node_modules/make-fetch-happen/lib/cache/policy.js
rename to node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/policy.js
diff --git a/node_modules/npm-profile/node_modules/make-fetch-happen/lib/fetch.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/fetch.js
similarity index 100%
rename from node_modules/npm-profile/node_modules/make-fetch-happen/lib/fetch.js
rename to node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/fetch.js
diff --git a/node_modules/npm-profile/node_modules/make-fetch-happen/lib/index.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/index.js
similarity index 100%
rename from node_modules/npm-profile/node_modules/make-fetch-happen/lib/index.js
rename to node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/index.js
diff --git a/node_modules/npm-profile/node_modules/make-fetch-happen/lib/options.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/options.js
similarity index 100%
rename from node_modules/npm-profile/node_modules/make-fetch-happen/lib/options.js
rename to node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/options.js
diff --git a/node_modules/npm-profile/node_modules/make-fetch-happen/lib/pipeline.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/pipeline.js
similarity index 100%
rename from node_modules/npm-profile/node_modules/make-fetch-happen/lib/pipeline.js
rename to node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/pipeline.js
diff --git a/node_modules/npm-profile/node_modules/make-fetch-happen/lib/remote.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/remote.js
similarity index 100%
rename from node_modules/npm-profile/node_modules/make-fetch-happen/lib/remote.js
rename to node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/remote.js
diff --git a/node_modules/npm-profile/node_modules/make-fetch-happen/package.json b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/package.json
similarity index 100%
rename from node_modules/npm-profile/node_modules/make-fetch-happen/package.json
rename to node_modules/npm-registry-fetch/node_modules/make-fetch-happen/package.json
diff --git a/node_modules/npm-profile/node_modules/negotiator/HISTORY.md b/node_modules/npm-registry-fetch/node_modules/negotiator/HISTORY.md
similarity index 100%
rename from node_modules/npm-profile/node_modules/negotiator/HISTORY.md
rename to node_modules/npm-registry-fetch/node_modules/negotiator/HISTORY.md
diff --git a/node_modules/npm-profile/node_modules/negotiator/LICENSE b/node_modules/npm-registry-fetch/node_modules/negotiator/LICENSE
similarity index 100%
rename from node_modules/npm-profile/node_modules/negotiator/LICENSE
rename to node_modules/npm-registry-fetch/node_modules/negotiator/LICENSE
diff --git a/node_modules/npm-profile/node_modules/negotiator/index.js b/node_modules/npm-registry-fetch/node_modules/negotiator/index.js
similarity index 100%
rename from node_modules/npm-profile/node_modules/negotiator/index.js
rename to node_modules/npm-registry-fetch/node_modules/negotiator/index.js
diff --git a/node_modules/npm-profile/node_modules/negotiator/lib/charset.js b/node_modules/npm-registry-fetch/node_modules/negotiator/lib/charset.js
similarity index 100%
rename from node_modules/npm-profile/node_modules/negotiator/lib/charset.js
rename to node_modules/npm-registry-fetch/node_modules/negotiator/lib/charset.js
diff --git a/node_modules/npm-profile/node_modules/negotiator/lib/encoding.js b/node_modules/npm-registry-fetch/node_modules/negotiator/lib/encoding.js
similarity index 100%
rename from node_modules/npm-profile/node_modules/negotiator/lib/encoding.js
rename to node_modules/npm-registry-fetch/node_modules/negotiator/lib/encoding.js
diff --git a/node_modules/npm-profile/node_modules/negotiator/lib/language.js b/node_modules/npm-registry-fetch/node_modules/negotiator/lib/language.js
similarity index 100%
rename from node_modules/npm-profile/node_modules/negotiator/lib/language.js
rename to node_modules/npm-registry-fetch/node_modules/negotiator/lib/language.js
diff --git a/node_modules/npm-profile/node_modules/negotiator/lib/mediaType.js b/node_modules/npm-registry-fetch/node_modules/negotiator/lib/mediaType.js
similarity index 100%
rename from node_modules/npm-profile/node_modules/negotiator/lib/mediaType.js
rename to node_modules/npm-registry-fetch/node_modules/negotiator/lib/mediaType.js
diff --git a/node_modules/npm-profile/node_modules/negotiator/package.json b/node_modules/npm-registry-fetch/node_modules/negotiator/package.json
similarity index 100%
rename from node_modules/npm-profile/node_modules/negotiator/package.json
rename to node_modules/npm-registry-fetch/node_modules/negotiator/package.json
diff --git a/node_modules/npm-profile/node_modules/npm-package-arg/LICENSE b/node_modules/npm-registry-fetch/node_modules/npm-package-arg/LICENSE
similarity index 100%
rename from node_modules/npm-profile/node_modules/npm-package-arg/LICENSE
rename to node_modules/npm-registry-fetch/node_modules/npm-package-arg/LICENSE
diff --git a/node_modules/npm-profile/node_modules/npm-package-arg/lib/npa.js b/node_modules/npm-registry-fetch/node_modules/npm-package-arg/lib/npa.js
similarity index 100%
rename from node_modules/npm-profile/node_modules/npm-package-arg/lib/npa.js
rename to node_modules/npm-registry-fetch/node_modules/npm-package-arg/lib/npa.js
diff --git a/node_modules/npm-profile/node_modules/npm-package-arg/package.json b/node_modules/npm-registry-fetch/node_modules/npm-package-arg/package.json
similarity index 100%
rename from node_modules/npm-profile/node_modules/npm-package-arg/package.json
rename to node_modules/npm-registry-fetch/node_modules/npm-package-arg/package.json
diff --git a/node_modules/npm-registry-fetch/package.json b/node_modules/npm-registry-fetch/package.json
index bd7a79d35e26a..a8e954cdf3c14 100644
--- a/node_modules/npm-registry-fetch/package.json
+++ b/node_modules/npm-registry-fetch/package.json
@@ -1,6 +1,6 @@
 {
   "name": "npm-registry-fetch",
-  "version": "18.0.2",
+  "version": "19.0.0",
   "description": "Fetch-based http client for use with npm registry APIs",
   "main": "lib",
   "files": [
@@ -33,17 +33,17 @@
   "dependencies": {
     "@npmcli/redact": "^3.0.0",
     "jsonparse": "^1.3.1",
-    "make-fetch-happen": "^14.0.0",
+    "make-fetch-happen": "^15.0.0",
     "minipass": "^7.0.2",
     "minipass-fetch": "^4.0.0",
     "minizlib": "^3.0.1",
-    "npm-package-arg": "^12.0.0",
+    "npm-package-arg": "^13.0.0",
     "proc-log": "^5.0.0"
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.23.4",
-    "cacache": "^19.0.1",
+    "@npmcli/template-oss": "4.25.0",
+    "cacache": "^20.0.0",
     "nock": "^13.2.4",
     "require-inject": "^1.4.4",
     "ssri": "^12.0.0",
@@ -58,11 +58,11 @@
     ]
   },
   "engines": {
-    "node": "^18.17.0 || >=20.5.0"
+    "node": "^20.17.0 || >=22.9.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.23.4",
+    "version": "4.25.0",
     "publish": "true"
   }
 }
diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/LICENSE.md b/node_modules/pacote/node_modules/npm-registry-fetch/LICENSE.md
deleted file mode 100644
index 5fc208ff122e0..0000000000000
--- a/node_modules/pacote/node_modules/npm-registry-fetch/LICENSE.md
+++ /dev/null
@@ -1,20 +0,0 @@
-
-
-ISC License
-
-Copyright npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this
-software for any purpose with or without fee is hereby
-granted, provided that the above copyright notice and this
-permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL
-WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
-IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO
-EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT,
-INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
-WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
-TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/auth.js b/node_modules/pacote/node_modules/npm-registry-fetch/lib/auth.js
deleted file mode 100644
index 9270025fa8d90..0000000000000
--- a/node_modules/pacote/node_modules/npm-registry-fetch/lib/auth.js
+++ /dev/null
@@ -1,181 +0,0 @@
-'use strict'
-const fs = require('fs')
-const npa = require('npm-package-arg')
-const { URL } = require('url')
-
-// Find the longest registry key that is used for some kind of auth
-// in the options.  Returns the registry key and the auth config.
-const regFromURI = (uri, opts) => {
-  const parsed = new URL(uri)
-  // try to find a config key indicating we have auth for this registry
-  // can be one of :_authToken, :_auth, :_password and :username, or
-  // :certfile and :keyfile
-  // We walk up the "path" until we're left with just //[:],
-  // stopping when we reach '//'.
-  let regKey = `//${parsed.host}${parsed.pathname}`
-  while (regKey.length > '//'.length) {
-    const authKey = hasAuth(regKey, opts)
-    // got some auth for this URI
-    if (authKey) {
-      return { regKey, authKey }
-    }
-
-    // can be either //host/some/path/:_auth or //host/some/path:_auth
-    // walk up by removing EITHER what's after the slash OR the slash itself
-    regKey = regKey.replace(/([^/]+|\/)$/, '')
-  }
-  return { regKey: false, authKey: null }
-}
-
-// Not only do we want to know if there is auth, but if we are calling `npm
-// logout` we want to know what config value specifically provided it.  This is
-// so we can look up where the config came from to delete it (i.e. user vs
-// project)
-const hasAuth = (regKey, opts) => {
-  if (opts[`${regKey}:_authToken`]) {
-    return '_authToken'
-  }
-  if (opts[`${regKey}:_auth`]) {
-    return '_auth'
-  }
-  if (opts[`${regKey}:username`] && opts[`${regKey}:_password`]) {
-    // 'password' can be inferred to also be present
-    return 'username'
-  }
-  if (opts[`${regKey}:certfile`] && opts[`${regKey}:keyfile`]) {
-    // 'keyfile' can be inferred to also be present
-    return 'certfile'
-  }
-  return false
-}
-
-const sameHost = (a, b) => {
-  const parsedA = new URL(a)
-  const parsedB = new URL(b)
-  return parsedA.host === parsedB.host
-}
-
-const getRegistry = opts => {
-  const { spec } = opts
-  const { scope: specScope, subSpec } = spec ? npa(spec) : {}
-  const subSpecScope = subSpec && subSpec.scope
-  const scope = subSpec ? subSpecScope : specScope
-  const scopeReg = scope && opts[`${scope}:registry`]
-  return scopeReg || opts.registry
-}
-
-const maybeReadFile = file => {
-  try {
-    return fs.readFileSync(file, 'utf8')
-  } catch (er) {
-    if (er.code !== 'ENOENT') {
-      throw er
-    }
-    return null
-  }
-}
-
-const getAuth = (uri, opts = {}) => {
-  const { forceAuth } = opts
-  if (!uri) {
-    throw new Error('URI is required')
-  }
-  const { regKey, authKey } = regFromURI(uri, forceAuth || opts)
-
-  // we are only allowed to use what's in forceAuth if specified
-  if (forceAuth && !regKey) {
-    return new Auth({
-      // if we force auth we don't want to refer back to anything in config
-      regKey: false,
-      authKey: null,
-      scopeAuthKey: null,
-      token: forceAuth._authToken || forceAuth.token,
-      username: forceAuth.username,
-      password: forceAuth._password || forceAuth.password,
-      auth: forceAuth._auth || forceAuth.auth,
-      certfile: forceAuth.certfile,
-      keyfile: forceAuth.keyfile,
-    })
-  }
-
-  // no auth for this URI, but might have it for the registry
-  if (!regKey) {
-    const registry = getRegistry(opts)
-    if (registry && uri !== registry && sameHost(uri, registry)) {
-      return getAuth(registry, opts)
-    } else if (registry !== opts.registry) {
-      // If making a tarball request to a different base URI than the
-      // registry where we logged in, but the same auth SHOULD be sent
-      // to that artifact host, then we track where it was coming in from,
-      // and warn the user if we get a 4xx error on it.
-      const { regKey: scopeAuthKey, authKey: _authKey } = regFromURI(registry, opts)
-      return new Auth({ scopeAuthKey, regKey: scopeAuthKey, authKey: _authKey })
-    }
-  }
-
-  const {
-    [`${regKey}:_authToken`]: token,
-    [`${regKey}:username`]: username,
-    [`${regKey}:_password`]: password,
-    [`${regKey}:_auth`]: auth,
-    [`${regKey}:certfile`]: certfile,
-    [`${regKey}:keyfile`]: keyfile,
-  } = opts
-
-  return new Auth({
-    scopeAuthKey: null,
-    regKey,
-    authKey,
-    token,
-    auth,
-    username,
-    password,
-    certfile,
-    keyfile,
-  })
-}
-
-class Auth {
-  constructor ({
-    token,
-    auth,
-    username,
-    password,
-    scopeAuthKey,
-    certfile,
-    keyfile,
-    regKey,
-    authKey,
-  }) {
-    // same as regKey but only present for scoped auth. Should have been named scopeRegKey
-    this.scopeAuthKey = scopeAuthKey
-    // `${regKey}:${authKey}` will get you back to the auth config that gave us auth
-    this.regKey = regKey
-    this.authKey = authKey
-    this.token = null
-    this.auth = null
-    this.isBasicAuth = false
-    this.cert = null
-    this.key = null
-    if (token) {
-      this.token = token
-    } else if (auth) {
-      this.auth = auth
-    } else if (username && password) {
-      const p = Buffer.from(password, 'base64').toString('utf8')
-      this.auth = Buffer.from(`${username}:${p}`, 'utf8').toString('base64')
-      this.isBasicAuth = true
-    }
-    // mTLS may be used in conjunction with another auth method above
-    if (certfile && keyfile) {
-      const cert = maybeReadFile(certfile, 'utf-8')
-      const key = maybeReadFile(keyfile, 'utf-8')
-      if (cert && key) {
-        this.cert = cert
-        this.key = key
-      }
-    }
-  }
-}
-
-module.exports = getAuth
diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/check-response.js b/node_modules/pacote/node_modules/npm-registry-fetch/lib/check-response.js
deleted file mode 100644
index 2f183082ab2ce..0000000000000
--- a/node_modules/pacote/node_modules/npm-registry-fetch/lib/check-response.js
+++ /dev/null
@@ -1,108 +0,0 @@
-'use strict'
-
-const errors = require('./errors.js')
-const { Response } = require('minipass-fetch')
-const defaultOpts = require('./default-opts.js')
-const { log } = require('proc-log')
-const { redact: cleanUrl } = require('@npmcli/redact')
-
-/* eslint-disable-next-line max-len */
-const moreInfoUrl = 'https://github.com/npm/cli/wiki/No-auth-for-URI,-but-auth-present-for-scoped-registry'
-const checkResponse =
-  async ({ method, uri, res, startTime, auth, opts }) => {
-    opts = { ...defaultOpts, ...opts }
-    if (res.headers.has('npm-notice') && !res.headers.has('x-local-cache')) {
-      log.notice('', res.headers.get('npm-notice'))
-    }
-
-    if (res.status >= 400) {
-      logRequest(method, res, startTime)
-      if (auth && auth.scopeAuthKey && !auth.token && !auth.auth) {
-      // we didn't have auth for THIS request, but we do have auth for
-      // requests to the registry indicated by the spec's scope value.
-      // Warn the user.
-        log.warn('registry', `No auth for URI, but auth present for scoped registry.
-
-URI: ${uri}
-Scoped Registry Key: ${auth.scopeAuthKey}
-
-More info here: ${moreInfoUrl}`)
-      }
-      return checkErrors(method, res, startTime, opts)
-    } else {
-      res.body.on('end', () => logRequest(method, res, startTime, opts))
-      if (opts.ignoreBody) {
-        res.body.resume()
-        return new Response(null, res)
-      }
-      return res
-    }
-  }
-module.exports = checkResponse
-
-function logRequest (method, res, startTime) {
-  const elapsedTime = Date.now() - startTime
-  const attempt = res.headers.get('x-fetch-attempts')
-  const attemptStr = attempt && attempt > 1 ? ` attempt #${attempt}` : ''
-  const cacheStatus = res.headers.get('x-local-cache-status')
-  const cacheStr = cacheStatus ? ` (cache ${cacheStatus})` : ''
-  const urlStr = cleanUrl(res.url)
-
-  // If make-fetch-happen reports a cache hit, then there was no fetch
-  if (cacheStatus === 'hit') {
-    log.http(
-      'cache',
-      `${urlStr} ${elapsedTime}ms${attemptStr}${cacheStr}`
-    )
-  } else {
-    log.http(
-      'fetch',
-      `${method.toUpperCase()} ${res.status} ${urlStr} ${elapsedTime}ms${attemptStr}${cacheStr}`
-    )
-  }
-}
-
-function checkErrors (method, res, startTime, opts) {
-  return res.buffer()
-    .catch(() => null)
-    .then(body => {
-      let parsed = body
-      try {
-        parsed = JSON.parse(body.toString('utf8'))
-      } catch {
-        // ignore errors
-      }
-      if (res.status === 401 && res.headers.get('www-authenticate')) {
-        const auth = res.headers.get('www-authenticate')
-          .split(/,\s*/)
-          .map(s => s.toLowerCase())
-        if (auth.indexOf('ipaddress') !== -1) {
-          throw new errors.HttpErrorAuthIPAddress(
-            method, res, parsed, opts.spec
-          )
-        } else if (auth.indexOf('otp') !== -1) {
-          throw new errors.HttpErrorAuthOTP(
-            method, res, parsed, opts.spec
-          )
-        } else {
-          throw new errors.HttpErrorAuthUnknown(
-            method, res, parsed, opts.spec
-          )
-        }
-      } else if (
-        res.status === 401 &&
-        body != null &&
-        /one-time pass/.test(body.toString('utf8'))
-      ) {
-        // Heuristic for malformed OTP responses that don't include the
-        // www-authenticate header.
-        throw new errors.HttpErrorAuthOTP(
-          method, res, parsed, opts.spec
-        )
-      } else {
-        throw new errors.HttpErrorGeneral(
-          method, res, parsed, opts.spec
-        )
-      }
-    })
-}
diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/default-opts.js b/node_modules/pacote/node_modules/npm-registry-fetch/lib/default-opts.js
deleted file mode 100644
index f0847f0b507e2..0000000000000
--- a/node_modules/pacote/node_modules/npm-registry-fetch/lib/default-opts.js
+++ /dev/null
@@ -1,19 +0,0 @@
-const pkg = require('../package.json')
-module.exports = {
-  maxSockets: 12,
-  method: 'GET',
-  registry: 'https://registry.npmjs.org/',
-  timeout: 5 * 60 * 1000, // 5 minutes
-  strictSSL: true,
-  noProxy: process.env.NOPROXY,
-  userAgent: `${pkg.name
-    }@${
-      pkg.version
-    }/node@${
-      process.version
-    }+${
-      process.arch
-    } (${
-      process.platform
-    })`,
-}
diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/errors.js b/node_modules/pacote/node_modules/npm-registry-fetch/lib/errors.js
deleted file mode 100644
index 5bf6b012a24ef..0000000000000
--- a/node_modules/pacote/node_modules/npm-registry-fetch/lib/errors.js
+++ /dev/null
@@ -1,80 +0,0 @@
-'use strict'
-
-const { URL } = require('node:url')
-
-function packageName (href) {
-  try {
-    let basePath = new URL(href).pathname.slice(1)
-    if (!basePath.match(/^-/)) {
-      basePath = basePath.split('/')
-      var index = basePath.indexOf('_rewrite')
-      if (index === -1) {
-        index = basePath.length - 1
-      } else {
-        index++
-      }
-      return decodeURIComponent(basePath[index])
-    }
-  } catch {
-    // this is ok
-  }
-}
-
-class HttpErrorBase extends Error {
-  constructor (method, res, body, spec) {
-    super()
-    this.name = this.constructor.name
-    this.headers = typeof res.headers?.raw === 'function' ? res.headers.raw() : res.headers
-    this.statusCode = res.status
-    this.code = `E${res.status}`
-    this.method = method
-    this.uri = res.url
-    this.body = body
-    this.pkgid = spec ? spec.toString() : packageName(res.url)
-    Error.captureStackTrace(this, this.constructor)
-  }
-}
-
-class HttpErrorGeneral extends HttpErrorBase {
-  constructor (method, res, body, spec) {
-    super(method, res, body, spec)
-    this.message = `${res.status} ${res.statusText} - ${
-      this.method.toUpperCase()
-    } ${
-      this.spec || this.uri
-    }${
-      (body && body.error) ? ' - ' + body.error : ''
-    }`
-  }
-}
-
-class HttpErrorAuthOTP extends HttpErrorBase {
-  constructor (method, res, body, spec) {
-    super(method, res, body, spec)
-    this.message = 'OTP required for authentication'
-    this.code = 'EOTP'
-  }
-}
-
-class HttpErrorAuthIPAddress extends HttpErrorBase {
-  constructor (method, res, body, spec) {
-    super(method, res, body, spec)
-    this.message = 'Login is not allowed from your IP address'
-    this.code = 'EAUTHIP'
-  }
-}
-
-class HttpErrorAuthUnknown extends HttpErrorBase {
-  constructor (method, res, body, spec) {
-    super(method, res, body, spec)
-    this.message = 'Unable to authenticate, need: ' + res.headers.get('www-authenticate')
-  }
-}
-
-module.exports = {
-  HttpErrorBase,
-  HttpErrorGeneral,
-  HttpErrorAuthOTP,
-  HttpErrorAuthIPAddress,
-  HttpErrorAuthUnknown,
-}
diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/index.js b/node_modules/pacote/node_modules/npm-registry-fetch/lib/index.js
deleted file mode 100644
index 898c8125bfe0e..0000000000000
--- a/node_modules/pacote/node_modules/npm-registry-fetch/lib/index.js
+++ /dev/null
@@ -1,247 +0,0 @@
-'use strict'
-
-const { HttpErrorAuthOTP } = require('./errors.js')
-const checkResponse = require('./check-response.js')
-const getAuth = require('./auth.js')
-const fetch = require('make-fetch-happen')
-const JSONStream = require('./json-stream')
-const npa = require('npm-package-arg')
-const qs = require('querystring')
-const url = require('url')
-const zlib = require('minizlib')
-const { Minipass } = require('minipass')
-
-const defaultOpts = require('./default-opts.js')
-
-// WhatWG URL throws if it's not fully resolved
-const urlIsValid = u => {
-  try {
-    return !!new url.URL(u)
-  } catch (_) {
-    return false
-  }
-}
-
-module.exports = regFetch
-function regFetch (uri, /* istanbul ignore next */ opts_ = {}) {
-  const opts = {
-    ...defaultOpts,
-    ...opts_,
-  }
-
-  // if we did not get a fully qualified URI, then we look at the registry
-  // config or relevant scope to resolve it.
-  const uriValid = urlIsValid(uri)
-  let registry = opts.registry || defaultOpts.registry
-  if (!uriValid) {
-    registry = opts.registry = (
-      (opts.spec && pickRegistry(opts.spec, opts)) ||
-      opts.registry ||
-      registry
-    )
-    uri = `${
-      registry.trim().replace(/\/?$/g, '')
-    }/${
-      uri.trim().replace(/^\//, '')
-    }`
-    // asserts that this is now valid
-    new url.URL(uri)
-  }
-
-  const method = opts.method || 'GET'
-
-  // through that takes into account the scope, the prefix of `uri`, etc
-  const startTime = Date.now()
-  const auth = getAuth(uri, opts)
-  const headers = getHeaders(uri, auth, opts)
-  let body = opts.body
-  const bodyIsStream = Minipass.isStream(body)
-  const bodyIsPromise = body &&
-    typeof body === 'object' &&
-    typeof body.then === 'function'
-
-  if (
-    body && !bodyIsStream && !bodyIsPromise && typeof body !== 'string' && !Buffer.isBuffer(body)
-  ) {
-    headers['content-type'] = headers['content-type'] || 'application/json'
-    body = JSON.stringify(body)
-  } else if (body && !headers['content-type']) {
-    headers['content-type'] = 'application/octet-stream'
-  }
-
-  if (opts.gzip) {
-    headers['content-encoding'] = 'gzip'
-    if (bodyIsStream) {
-      const gz = new zlib.Gzip()
-      body.on('error', /* istanbul ignore next: unlikely and hard to test */
-        err => gz.emit('error', err))
-      body = body.pipe(gz)
-    } else if (!bodyIsPromise) {
-      body = new zlib.Gzip().end(body).concat()
-    }
-  }
-
-  const parsed = new url.URL(uri)
-
-  if (opts.query) {
-    const q = typeof opts.query === 'string' ? qs.parse(opts.query)
-      : opts.query
-
-    Object.keys(q).forEach(key => {
-      if (q[key] !== undefined) {
-        parsed.searchParams.set(key, q[key])
-      }
-    })
-    uri = url.format(parsed)
-  }
-
-  if (parsed.searchParams.get('write') === 'true' && method === 'GET') {
-    // do not cache, because this GET is fetching a rev that will be
-    // used for a subsequent PUT or DELETE, so we need to conditionally
-    // update cache.
-    opts.offline = false
-    opts.preferOffline = false
-    opts.preferOnline = true
-  }
-
-  const doFetch = async fetchBody => {
-    const p = fetch(uri, {
-      agent: opts.agent,
-      algorithms: opts.algorithms,
-      body: fetchBody,
-      cache: getCacheMode(opts),
-      cachePath: opts.cache,
-      ca: opts.ca,
-      cert: auth.cert || opts.cert,
-      headers,
-      integrity: opts.integrity,
-      key: auth.key || opts.key,
-      localAddress: opts.localAddress,
-      maxSockets: opts.maxSockets,
-      memoize: opts.memoize,
-      method: method,
-      noProxy: opts.noProxy,
-      proxy: opts.httpsProxy || opts.proxy,
-      retry: opts.retry ? opts.retry : {
-        retries: opts.fetchRetries,
-        factor: opts.fetchRetryFactor,
-        minTimeout: opts.fetchRetryMintimeout,
-        maxTimeout: opts.fetchRetryMaxtimeout,
-      },
-      strictSSL: opts.strictSSL,
-      timeout: opts.timeout || 30 * 1000,
-    }).then(res => checkResponse({
-      method,
-      uri,
-      res,
-      registry,
-      startTime,
-      auth,
-      opts,
-    }))
-
-    if (typeof opts.otpPrompt === 'function') {
-      return p.catch(async er => {
-        if (er instanceof HttpErrorAuthOTP) {
-          let otp
-          // if otp fails to complete, we fail with that failure
-          try {
-            otp = await opts.otpPrompt()
-          } catch (_) {
-            // ignore this error
-          }
-          // if no otp provided, or otpPrompt errored, throw the original HTTP error
-          if (!otp) {
-            throw er
-          }
-          return regFetch(uri, { ...opts, otp })
-        }
-        throw er
-      })
-    } else {
-      return p
-    }
-  }
-
-  return Promise.resolve(body).then(doFetch)
-}
-
-module.exports.getAuth = getAuth
-
-module.exports.json = fetchJSON
-function fetchJSON (uri, opts) {
-  return regFetch(uri, opts).then(res => res.json())
-}
-
-module.exports.json.stream = fetchJSONStream
-function fetchJSONStream (uri, jsonPath,
-  /* istanbul ignore next */ opts_ = {}) {
-  const opts = { ...defaultOpts, ...opts_ }
-  const parser = JSONStream.parse(jsonPath, opts.mapJSON)
-  regFetch(uri, opts).then(res =>
-    res.body.on('error',
-      /* istanbul ignore next: unlikely and difficult to test */
-      er => parser.emit('error', er)).pipe(parser)
-  ).catch(er => parser.emit('error', er))
-  return parser
-}
-
-module.exports.pickRegistry = pickRegistry
-function pickRegistry (spec, opts = {}) {
-  spec = npa(spec)
-  let registry = spec.scope &&
-    opts[spec.scope.replace(/^@?/, '@') + ':registry']
-
-  if (!registry && opts.scope) {
-    registry = opts[opts.scope.replace(/^@?/, '@') + ':registry']
-  }
-
-  if (!registry) {
-    registry = opts.registry || defaultOpts.registry
-  }
-
-  return registry
-}
-
-function getCacheMode (opts) {
-  return opts.offline ? 'only-if-cached'
-    : opts.preferOffline ? 'force-cache'
-    : opts.preferOnline ? 'no-cache'
-    : 'default'
-}
-
-function getHeaders (uri, auth, opts) {
-  const headers = Object.assign({
-    'user-agent': opts.userAgent,
-  }, opts.headers || {})
-
-  if (opts.authType) {
-    headers['npm-auth-type'] = opts.authType
-  }
-
-  if (opts.scope) {
-    headers['npm-scope'] = opts.scope
-  }
-
-  if (opts.npmSession) {
-    headers['npm-session'] = opts.npmSession
-  }
-
-  if (opts.npmCommand) {
-    headers['npm-command'] = opts.npmCommand
-  }
-
-  // If a tarball is hosted on a different place than the manifest, only send
-  // credentials on `alwaysAuth`
-  if (auth.token) {
-    headers.authorization = `Bearer ${auth.token}`
-  } else if (auth.auth) {
-    headers.authorization = `Basic ${auth.auth}`
-  }
-
-  if (opts.otp) {
-    headers['npm-otp'] = opts.otp
-  }
-
-  return headers
-}
diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/lib/json-stream.js b/node_modules/pacote/node_modules/npm-registry-fetch/lib/json-stream.js
deleted file mode 100644
index 36b05ad4a20b9..0000000000000
--- a/node_modules/pacote/node_modules/npm-registry-fetch/lib/json-stream.js
+++ /dev/null
@@ -1,223 +0,0 @@
-const Parser = require('jsonparse')
-const { Minipass } = require('minipass')
-
-class JSONStreamError extends Error {
-  constructor (err, caller) {
-    super(err.message)
-    Error.captureStackTrace(this, caller || this.constructor)
-  }
-
-  get name () {
-    return 'JSONStreamError'
-  }
-}
-
-const check = (x, y) =>
-  typeof x === 'string' ? String(y) === x
-  : x && typeof x.test === 'function' ? x.test(y)
-  : typeof x === 'boolean' || typeof x === 'object' ? x
-  : typeof x === 'function' ? x(y)
-  : false
-
-class JSONStream extends Minipass {
-  #count = 0
-  #ending = false
-  #footer = null
-  #header = null
-  #map = null
-  #onTokenOriginal
-  #parser
-  #path = null
-  #root = null
-
-  constructor (opts) {
-    super({
-      ...opts,
-      objectMode: true,
-    })
-
-    const parser = this.#parser = new Parser()
-    parser.onValue = value => this.#onValue(value)
-    this.#onTokenOriginal = parser.onToken
-    parser.onToken = (token, value) => this.#onToken(token, value)
-    parser.onError = er => this.#onError(er)
-
-    this.#path = typeof opts.path === 'string'
-      ? opts.path.split('.').map(e =>
-        e === '$*' ? { emitKey: true }
-        : e === '*' ? true
-        : e === '' ? { recurse: true }
-        : e)
-      : Array.isArray(opts.path) && opts.path.length ? opts.path
-      : null
-
-    if (typeof opts.map === 'function') {
-      this.#map = opts.map
-    }
-  }
-
-  #setHeaderFooter (key, value) {
-    // header has not been emitted yet
-    if (this.#header !== false) {
-      this.#header = this.#header || {}
-      this.#header[key] = value
-    }
-
-    // footer has not been emitted yet but header has
-    if (this.#footer !== false && this.#header === false) {
-      this.#footer = this.#footer || {}
-      this.#footer[key] = value
-    }
-  }
-
-  #onError (er) {
-    // error will always happen during a write() call.
-    const caller = this.#ending ? this.end : this.write
-    this.#ending = false
-    return this.emit('error', new JSONStreamError(er, caller))
-  }
-
-  #onToken (token, value) {
-    const parser = this.#parser
-    this.#onTokenOriginal.call(this.#parser, token, value)
-    if (parser.stack.length === 0) {
-      if (this.#root) {
-        const root = this.#root
-        if (!this.#path) {
-          super.write(root)
-        }
-        this.#root = null
-        this.#count = 0
-      }
-    }
-  }
-
-  #onValue (value) {
-    const parser = this.#parser
-    // the LAST onValue encountered is the root object.
-    // just overwrite it each time.
-    this.#root = value
-
-    if (!this.#path) {
-      return
-    }
-
-    let i = 0 // iterates on path
-    let j = 0 // iterates on stack
-    let emitKey = false
-    while (i < this.#path.length) {
-      const key = this.#path[i]
-      j++
-
-      if (key && !key.recurse) {
-        const c = (j === parser.stack.length) ? parser : parser.stack[j]
-        if (!c) {
-          return
-        }
-        if (!check(key, c.key)) {
-          this.#setHeaderFooter(c.key, value)
-          return
-        }
-        emitKey = !!key.emitKey
-        i++
-      } else {
-        i++
-        if (i >= this.#path.length) {
-          return
-        }
-        const nextKey = this.#path[i]
-        if (!nextKey) {
-          return
-        }
-        while (true) {
-          const c = (j === parser.stack.length) ? parser : parser.stack[j]
-          if (!c) {
-            return
-          }
-          if (check(nextKey, c.key)) {
-            i++
-            if (!Object.isFrozen(parser.stack[j])) {
-              parser.stack[j].value = null
-            }
-            break
-          } else {
-            this.#setHeaderFooter(c.key, value)
-          }
-          j++
-        }
-      }
-    }
-
-    // emit header
-    if (this.#header) {
-      const header = this.#header
-      this.#header = false
-      this.emit('header', header)
-    }
-    if (j !== parser.stack.length) {
-      return
-    }
-
-    this.#count++
-    const actualPath = parser.stack.slice(1)
-      .map(e => e.key).concat([parser.key])
-    if (value !== null && value !== undefined) {
-      const data = this.#map ? this.#map(value, actualPath) : value
-      if (data !== null && data !== undefined) {
-        const emit = emitKey ? { value: data } : data
-        if (emitKey) {
-          emit.key = parser.key
-        }
-        super.write(emit)
-      }
-    }
-
-    if (parser.value) {
-      delete parser.value[parser.key]
-    }
-
-    for (const k of parser.stack) {
-      k.value = null
-    }
-  }
-
-  write (chunk, encoding) {
-    if (typeof chunk === 'string') {
-      chunk = Buffer.from(chunk, encoding)
-    } else if (!Buffer.isBuffer(chunk)) {
-      return this.emit('error', new TypeError(
-        'Can only parse JSON from string or buffer input'))
-    }
-    this.#parser.write(chunk)
-    return this.flowing
-  }
-
-  end (chunk, encoding) {
-    this.#ending = true
-    if (chunk) {
-      this.write(chunk, encoding)
-    }
-
-    const h = this.#header
-    this.#header = null
-    const f = this.#footer
-    this.#footer = null
-    if (h) {
-      this.emit('header', h)
-    }
-    if (f) {
-      this.emit('footer', f)
-    }
-    return super.end()
-  }
-
-  static get JSONStreamError () {
-    return JSONStreamError
-  }
-
-  static parse (path, map) {
-    return new JSONStream({ path, map })
-  }
-}
-
-module.exports = JSONStream
diff --git a/node_modules/pacote/node_modules/npm-registry-fetch/package.json b/node_modules/pacote/node_modules/npm-registry-fetch/package.json
deleted file mode 100644
index a8e954cdf3c14..0000000000000
--- a/node_modules/pacote/node_modules/npm-registry-fetch/package.json
+++ /dev/null
@@ -1,68 +0,0 @@
-{
-  "name": "npm-registry-fetch",
-  "version": "19.0.0",
-  "description": "Fetch-based http client for use with npm registry APIs",
-  "main": "lib",
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "scripts": {
-    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
-    "lint": "npm run eslint",
-    "lintfix": "npm run eslint -- --fix",
-    "test": "tap",
-    "posttest": "npm run lint",
-    "npmclilint": "npmcli-lint",
-    "postsnap": "npm run lintfix --",
-    "postlint": "template-oss-check",
-    "snap": "tap",
-    "template-oss-apply": "template-oss-apply --force"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/npm/npm-registry-fetch.git"
-  },
-  "keywords": [
-    "npm",
-    "registry",
-    "fetch"
-  ],
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "dependencies": {
-    "@npmcli/redact": "^3.0.0",
-    "jsonparse": "^1.3.1",
-    "make-fetch-happen": "^15.0.0",
-    "minipass": "^7.0.2",
-    "minipass-fetch": "^4.0.0",
-    "minizlib": "^3.0.1",
-    "npm-package-arg": "^13.0.0",
-    "proc-log": "^5.0.0"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.25.0",
-    "cacache": "^20.0.0",
-    "nock": "^13.2.4",
-    "require-inject": "^1.4.4",
-    "ssri": "^12.0.0",
-    "tap": "^16.0.1"
-  },
-  "tap": {
-    "check-coverage": true,
-    "test-ignore": "test[\\\\/](util|cache)[\\\\/]",
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "engines": {
-    "node": "^20.17.0 || >=22.9.0"
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.25.0",
-    "publish": "true"
-  }
-}
diff --git a/package-lock.json b/package-lock.json
index 4dd37bc2b6a9c..2363571976be7 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -132,7 +132,7 @@
         "npm-package-arg": "^12.0.2",
         "npm-pick-manifest": "^10.0.0",
         "npm-profile": "^12.0.0",
-        "npm-registry-fetch": "^18.0.2",
+        "npm-registry-fetch": "^19.0.0",
         "npm-user-validate": "^3.0.0",
         "p-map": "^7.0.3",
         "pacote": "^21.0.3",
@@ -12884,7 +12884,27 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/npm-profile/node_modules/hosted-git-info": {
+    "node_modules/npm-registry-fetch": {
+      "version": "19.0.0",
+      "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-19.0.0.tgz",
+      "integrity": "sha512-DFxSAemHUwT/POaXAOY4NJmEWBPB0oKbwD6FFDE9hnt1nORkt/FXvgjD4hQjoKoHw9u0Ezws9SPXwV7xE/Gyww==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "@npmcli/redact": "^3.0.0",
+        "jsonparse": "^1.3.1",
+        "make-fetch-happen": "^15.0.0",
+        "minipass": "^7.0.2",
+        "minipass-fetch": "^4.0.0",
+        "minizlib": "^3.0.1",
+        "npm-package-arg": "^13.0.0",
+        "proc-log": "^5.0.0"
+      },
+      "engines": {
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/npm-registry-fetch/node_modules/hosted-git-info": {
       "version": "9.0.0",
       "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-9.0.0.tgz",
       "integrity": "sha512-gEf705MZLrDPkbbhi8PnoO4ZwYgKoNL+ISZ3AjZMht2r3N5tuTwncyDi6Fv2/qDnMmZxgs0yI8WDOyR8q3G+SQ==",
@@ -12897,7 +12917,7 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/npm-profile/node_modules/lru-cache": {
+    "node_modules/npm-registry-fetch/node_modules/lru-cache": {
       "version": "11.2.1",
       "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.1.tgz",
       "integrity": "sha512-r8LA6i4LP4EeWOhqBaZZjDWwehd1xUJPCJd9Sv300H0ZmcUER4+JPh7bqqZeqs1o5pgtgvXm+d9UGrB5zZGDiQ==",
@@ -12907,7 +12927,7 @@
         "node": "20 || >=22"
       }
     },
-    "node_modules/npm-profile/node_modules/make-fetch-happen": {
+    "node_modules/npm-registry-fetch/node_modules/make-fetch-happen": {
       "version": "15.0.1",
       "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-15.0.1.tgz",
       "integrity": "sha512-9GjpQcaUXO2xmre8JfALl8Oji8Jpo+SyY2HpqFFPHVczOld/I+JFRx9FkP/uedZzkJlI9uM5t/j6dGJv4BScQw==",
@@ -12930,7 +12950,7 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/npm-profile/node_modules/minizlib": {
+    "node_modules/npm-registry-fetch/node_modules/minizlib": {
       "version": "3.0.2",
       "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.2.tgz",
       "integrity": "sha512-oG62iEk+CYt5Xj2YqI5Xi9xWUeZhDI8jjQmC5oThVH5JGCTgIjr7ciJDzC7MBzYd//WvR1OTmP5Q38Q8ShQtVA==",
@@ -12943,7 +12963,7 @@
         "node": ">= 18"
       }
     },
-    "node_modules/npm-profile/node_modules/negotiator": {
+    "node_modules/npm-registry-fetch/node_modules/negotiator": {
       "version": "1.0.0",
       "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz",
       "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==",
@@ -12953,7 +12973,7 @@
         "node": ">= 0.6"
       }
     },
-    "node_modules/npm-profile/node_modules/npm-package-arg": {
+    "node_modules/npm-registry-fetch/node_modules/npm-package-arg": {
       "version": "13.0.0",
       "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-13.0.0.tgz",
       "integrity": "sha512-+t2etZAGcB7TbbLHfDwooV9ppB2LhhcT6A+L9cahsf9mEUAoQ6CktLEVvEnpD0N5CkX7zJqnPGaFtoQDy9EkHQ==",
@@ -12969,59 +12989,6 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/npm-profile/node_modules/npm-registry-fetch": {
-      "version": "19.0.0",
-      "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-19.0.0.tgz",
-      "integrity": "sha512-DFxSAemHUwT/POaXAOY4NJmEWBPB0oKbwD6FFDE9hnt1nORkt/FXvgjD4hQjoKoHw9u0Ezws9SPXwV7xE/Gyww==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "@npmcli/redact": "^3.0.0",
-        "jsonparse": "^1.3.1",
-        "make-fetch-happen": "^15.0.0",
-        "minipass": "^7.0.2",
-        "minipass-fetch": "^4.0.0",
-        "minizlib": "^3.0.1",
-        "npm-package-arg": "^13.0.0",
-        "proc-log": "^5.0.0"
-      },
-      "engines": {
-        "node": "^20.17.0 || >=22.9.0"
-      }
-    },
-    "node_modules/npm-registry-fetch": {
-      "version": "18.0.2",
-      "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-18.0.2.tgz",
-      "integrity": "sha512-LeVMZBBVy+oQb5R6FDV9OlJCcWDU+al10oKpe+nsvcHnG24Z3uM3SvJYKfGJlfGjVU8v9liejCrUR/M5HO5NEQ==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "@npmcli/redact": "^3.0.0",
-        "jsonparse": "^1.3.1",
-        "make-fetch-happen": "^14.0.0",
-        "minipass": "^7.0.2",
-        "minipass-fetch": "^4.0.0",
-        "minizlib": "^3.0.1",
-        "npm-package-arg": "^12.0.0",
-        "proc-log": "^5.0.0"
-      },
-      "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
-    "node_modules/npm-registry-fetch/node_modules/minizlib": {
-      "version": "3.0.2",
-      "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.2.tgz",
-      "integrity": "sha512-oG62iEk+CYt5Xj2YqI5Xi9xWUeZhDI8jjQmC5oThVH5JGCTgIjr7ciJDzC7MBzYd//WvR1OTmP5Q38Q8ShQtVA==",
-      "inBundle": true,
-      "license": "MIT",
-      "dependencies": {
-        "minipass": "^7.1.2"
-      },
-      "engines": {
-        "node": ">= 18"
-      }
-    },
     "node_modules/npm-user-validate": {
       "version": "3.0.0",
       "resolved": "https://registry.npmjs.org/npm-user-validate/-/npm-user-validate-3.0.0.tgz",
@@ -13865,26 +13832,6 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/pacote/node_modules/npm-registry-fetch": {
-      "version": "19.0.0",
-      "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-19.0.0.tgz",
-      "integrity": "sha512-DFxSAemHUwT/POaXAOY4NJmEWBPB0oKbwD6FFDE9hnt1nORkt/FXvgjD4hQjoKoHw9u0Ezws9SPXwV7xE/Gyww==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "@npmcli/redact": "^3.0.0",
-        "jsonparse": "^1.3.1",
-        "make-fetch-happen": "^15.0.0",
-        "minipass": "^7.0.2",
-        "minipass-fetch": "^4.0.0",
-        "minizlib": "^3.0.1",
-        "npm-package-arg": "^13.0.0",
-        "proc-log": "^5.0.0"
-      },
-      "engines": {
-        "node": "^20.17.0 || >=22.9.0"
-      }
-    },
     "node_modules/pacote/node_modules/sigstore": {
       "version": "4.0.0",
       "resolved": "https://registry.npmjs.org/sigstore/-/sigstore-4.0.0.tgz",
@@ -19736,7 +19683,7 @@
         "npm-install-checks": "^7.1.0",
         "npm-package-arg": "^12.0.0",
         "npm-pick-manifest": "^10.0.0",
-        "npm-registry-fetch": "^18.0.1",
+        "npm-registry-fetch": "^19.0.0",
         "pacote": "^21.0.2",
         "parse-conflict-json": "^4.0.0",
         "proc-log": "^5.0.0",
@@ -19795,7 +19742,7 @@
       "license": "ISC",
       "dependencies": {
         "npm-package-arg": "^12.0.0",
-        "npm-registry-fetch": "^18.0.1"
+        "npm-registry-fetch": "^19.0.0"
       },
       "devDependencies": {
         "@npmcli/eslint-config": "^5.0.1",
@@ -19880,7 +19827,7 @@
       "license": "ISC",
       "dependencies": {
         "aproba": "^2.0.0",
-        "npm-registry-fetch": "^18.0.1"
+        "npm-registry-fetch": "^19.0.0"
       },
       "devDependencies": {
         "@npmcli/eslint-config": "^5.0.1",
@@ -19920,7 +19867,7 @@
         "@npmcli/package-json": "^7.0.0",
         "ci-info": "^4.0.0",
         "npm-package-arg": "^12.0.0",
-        "npm-registry-fetch": "^18.0.1",
+        "npm-registry-fetch": "^19.0.0",
         "proc-log": "^5.0.0",
         "semver": "^7.3.7",
         "sigstore": "^3.0.0",
@@ -19941,7 +19888,7 @@
       "version": "9.0.0",
       "license": "ISC",
       "dependencies": {
-        "npm-registry-fetch": "^18.0.1"
+        "npm-registry-fetch": "^19.0.0"
       },
       "devDependencies": {
         "@npmcli/eslint-config": "^5.0.1",
@@ -19958,7 +19905,7 @@
       "license": "ISC",
       "dependencies": {
         "aproba": "^2.0.0",
-        "npm-registry-fetch": "^18.0.1"
+        "npm-registry-fetch": "^19.0.0"
       },
       "devDependencies": {
         "@npmcli/eslint-config": "^5.0.1",
diff --git a/package.json b/package.json
index 1b9848de599b1..149a573dca677 100644
--- a/package.json
+++ b/package.json
@@ -99,7 +99,7 @@
     "npm-package-arg": "^12.0.2",
     "npm-pick-manifest": "^10.0.0",
     "npm-profile": "^12.0.0",
-    "npm-registry-fetch": "^18.0.2",
+    "npm-registry-fetch": "^19.0.0",
     "npm-user-validate": "^3.0.0",
     "p-map": "^7.0.3",
     "pacote": "^21.0.3",
diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json
index 372a983a946bb..70e8775747b1c 100644
--- a/workspaces/arborist/package.json
+++ b/workspaces/arborist/package.json
@@ -25,7 +25,7 @@
     "npm-install-checks": "^7.1.0",
     "npm-package-arg": "^12.0.0",
     "npm-pick-manifest": "^10.0.0",
-    "npm-registry-fetch": "^18.0.1",
+    "npm-registry-fetch": "^19.0.0",
     "pacote": "^21.0.2",
     "parse-conflict-json": "^4.0.0",
     "proc-log": "^5.0.0",
diff --git a/workspaces/libnpmaccess/package.json b/workspaces/libnpmaccess/package.json
index d0e4e294022ff..9c3c446045b6f 100644
--- a/workspaces/libnpmaccess/package.json
+++ b/workspaces/libnpmaccess/package.json
@@ -30,7 +30,7 @@
   "homepage": "https://npmjs.com/package/libnpmaccess",
   "dependencies": {
     "npm-package-arg": "^12.0.0",
-    "npm-registry-fetch": "^18.0.1"
+    "npm-registry-fetch": "^19.0.0"
   },
   "engines": {
     "node": "^20.17.0 || >=22.9.0"
diff --git a/workspaces/libnpmorg/package.json b/workspaces/libnpmorg/package.json
index 346a2f5fa82f6..368cc7fef987d 100644
--- a/workspaces/libnpmorg/package.json
+++ b/workspaces/libnpmorg/package.json
@@ -43,7 +43,7 @@
   "homepage": "https://npmjs.com/package/libnpmorg",
   "dependencies": {
     "aproba": "^2.0.0",
-    "npm-registry-fetch": "^18.0.1"
+    "npm-registry-fetch": "^19.0.0"
   },
   "engines": {
     "node": "^20.17.0 || >=22.9.0"
diff --git a/workspaces/libnpmpublish/package.json b/workspaces/libnpmpublish/package.json
index c51d4997cac14..134cfb7f14b72 100644
--- a/workspaces/libnpmpublish/package.json
+++ b/workspaces/libnpmpublish/package.json
@@ -41,7 +41,7 @@
     "@npmcli/package-json": "^7.0.0",
     "ci-info": "^4.0.0",
     "npm-package-arg": "^12.0.0",
-    "npm-registry-fetch": "^18.0.1",
+    "npm-registry-fetch": "^19.0.0",
     "proc-log": "^5.0.0",
     "semver": "^7.3.7",
     "sigstore": "^3.0.0",
diff --git a/workspaces/libnpmsearch/package.json b/workspaces/libnpmsearch/package.json
index c2e1db680779c..21fc85e188c12 100644
--- a/workspaces/libnpmsearch/package.json
+++ b/workspaces/libnpmsearch/package.json
@@ -39,7 +39,7 @@
   "bugs": "https://github.com/npm/libnpmsearch/issues",
   "homepage": "https://npmjs.com/package/libnpmsearch",
   "dependencies": {
-    "npm-registry-fetch": "^18.0.1"
+    "npm-registry-fetch": "^19.0.0"
   },
   "engines": {
     "node": "^20.17.0 || >=22.9.0"
diff --git a/workspaces/libnpmteam/package.json b/workspaces/libnpmteam/package.json
index 04c3c4e6ddddd..270680bd6e3fe 100644
--- a/workspaces/libnpmteam/package.json
+++ b/workspaces/libnpmteam/package.json
@@ -33,7 +33,7 @@
   "homepage": "https://npmjs.com/package/libnpmteam",
   "dependencies": {
     "aproba": "^2.0.0",
-    "npm-registry-fetch": "^18.0.1"
+    "npm-registry-fetch": "^19.0.0"
   },
   "engines": {
     "node": "^20.17.0 || >=22.9.0"

From a2bdecc6677abcd58ed3037ab0edafb419ea86fa Mon Sep 17 00:00:00 2001
From: Gar 
Date: Wed, 17 Sep 2025 10:44:03 -0700
Subject: [PATCH 14/63] deps: sigstore@4.0.0

---
 node_modules/.gitignore                       |   32 +-
 .../@sigstore/protobuf-specs}/LICENSE         |    0
 .../dist/__generated__/envelope.js            |    0
 .../dist/__generated__/events.js              |    0
 .../google/api/field_behavior.js              |    0
 .../dist/__generated__/google/protobuf/any.js |    0
 .../google/protobuf/descriptor.js             |    0
 .../google/protobuf/timestamp.js              |    0
 .../dist/__generated__/rekor/v2/dsse.js       |    0
 .../dist/__generated__/rekor/v2/entry.js      |    0
 .../__generated__/rekor/v2/hashedrekord.js    |    0
 .../dist/__generated__/rekor/v2/verifier.js   |    0
 .../dist/__generated__/sigstore_bundle.js     |    0
 .../dist/__generated__/sigstore_common.js     |    0
 .../dist/__generated__/sigstore_rekor.js      |    0
 .../dist/__generated__/sigstore_trustroot.js  |    0
 .../__generated__/sigstore_verification.js    |    0
 .../@sigstore/protobuf-specs/dist/index.js    |    0
 .../protobuf-specs/dist/rekor/v2/index.js     |    0
 .../@sigstore/protobuf-specs/package.json     |    0
 node_modules/@sigstore/bundle/package.json    |    6 +-
 node_modules/@sigstore/core/dist/index.js     |   24 +-
 .../@sigstore/core/dist/rfc3161/timestamp.js  |   24 +-
 .../@sigstore/core/dist/rfc3161/tstinfo.js    |   24 +-
 node_modules/@sigstore/core/dist/x509/cert.js |   25 +-
 node_modules/@sigstore/core/dist/x509/sct.js  |   24 +-
 node_modules/@sigstore/core/package.json      |    4 +-
 .../@sigstore/protobuf-specs}/LICENSE         |    0
 .../dist/__generated__/envelope.js            |   59 +
 .../dist/__generated__/events.js              |  174 ++
 .../google/api/field_behavior.js              |  141 ++
 .../dist/__generated__/google/protobuf/any.js |   35 +
 .../google/protobuf/descriptor.js             | 2042 +++++++++++++++++
 .../google/protobuf/timestamp.js              |   29 +
 .../dist/__generated__/rekor/v2/dsse.js       |   55 +
 .../dist/__generated__/rekor/v2/entry.js      |   81 +
 .../__generated__/rekor/v2/hashedrekord.js    |   56 +
 .../dist/__generated__/rekor/v2/verifier.js   |   74 +
 .../dist/__generated__/sigstore_bundle.js     |  103 +
 .../dist/__generated__/sigstore_common.js     |  596 +++++
 .../dist/__generated__/sigstore_rekor.js      |  137 ++
 .../dist/__generated__/sigstore_trustroot.js  |  284 +++
 .../__generated__/sigstore_verification.js    |  281 +++
 .../@sigstore/protobuf-specs/dist/index.js    |   37 +
 .../protobuf-specs/dist/rekor/v2/index.js     |   35 +
 .../@sigstore/protobuf-specs/package.json     |   35 +
 .../node_modules/make-fetch-happen/LICENSE    |    0
 .../make-fetch-happen/lib/cache/entry.js      |    0
 .../make-fetch-happen/lib/cache/errors.js     |    0
 .../make-fetch-happen/lib/cache/index.js      |    0
 .../make-fetch-happen/lib/cache/key.js        |    0
 .../make-fetch-happen/lib/cache/policy.js     |    0
 .../make-fetch-happen/lib/fetch.js            |    0
 .../make-fetch-happen/lib/index.js            |    0
 .../make-fetch-happen/lib/options.js          |    0
 .../make-fetch-happen/lib/pipeline.js         |    0
 .../make-fetch-happen/lib/remote.js           |    0
 .../make-fetch-happen/package.json            |    0
 .../sign}/node_modules/negotiator/HISTORY.md  |    0
 .../sign}/node_modules/negotiator/LICENSE     |    0
 .../sign}/node_modules/negotiator/index.js    |    0
 .../node_modules/negotiator/lib/charset.js    |    0
 .../node_modules/negotiator/lib/encoding.js   |    0
 .../node_modules/negotiator/lib/language.js   |    0
 .../node_modules/negotiator/lib/mediaType.js  |    0
 .../node_modules/negotiator/package.json      |    0
 node_modules/@sigstore/sign/package.json      |   16 +-
 .../@sigstore/verify/dist/key/certificate.js  |    1 +
 .../@sigstore/verify/dist/verifier.js         |    2 +
 .../@sigstore/protobuf-specs/LICENSE          |    0
 .../dist/__generated__/envelope.js            |   59 +
 .../dist/__generated__/events.js              |  174 ++
 .../google/api/field_behavior.js              |  141 ++
 .../dist/__generated__/google/protobuf/any.js |   35 +
 .../google/protobuf/descriptor.js             | 2042 +++++++++++++++++
 .../google/protobuf/timestamp.js              |   29 +
 .../dist/__generated__/rekor/v2/dsse.js       |   55 +
 .../dist/__generated__/rekor/v2/entry.js      |   81 +
 .../__generated__/rekor/v2/hashedrekord.js    |   56 +
 .../dist/__generated__/rekor/v2/verifier.js   |   74 +
 .../dist/__generated__/sigstore_bundle.js     |  103 +
 .../dist/__generated__/sigstore_common.js     |  596 +++++
 .../dist/__generated__/sigstore_rekor.js      |  137 ++
 .../dist/__generated__/sigstore_trustroot.js  |  284 +++
 .../__generated__/sigstore_verification.js    |  281 +++
 .../@sigstore/protobuf-specs/dist/index.js    |   37 +
 .../protobuf-specs/dist/rekor/v2/index.js     |   35 +
 .../@sigstore/protobuf-specs/package.json     |   35 +
 node_modules/@sigstore/verify/package.json    |   10 +-
 .../@sigstore/bundle/dist/build.js            |  100 -
 .../@sigstore/bundle/dist/bundle.js           |   24 -
 .../@sigstore/bundle/dist/error.js            |   25 -
 .../@sigstore/bundle/dist/index.js            |   43 -
 .../@sigstore/bundle/dist/serialized.js       |   49 -
 .../@sigstore/bundle/dist/utility.js          |    2 -
 .../@sigstore/bundle/dist/validate.js         |  199 --
 .../@sigstore/bundle/package.json             |   35 -
 .../@sigstore/core/dist/asn1/error.js         |   24 -
 .../@sigstore/core/dist/asn1/index.js         |   20 -
 .../@sigstore/core/dist/asn1/length.js        |   62 -
 .../@sigstore/core/dist/asn1/obj.js           |  152 --
 .../@sigstore/core/dist/asn1/parse.js         |  124 -
 .../@sigstore/core/dist/asn1/tag.js           |   86 -
 .../@sigstore/core/dist/crypto.js             |   60 -
 .../node_modules/@sigstore/core/dist/dsse.js  |   30 -
 .../@sigstore/core/dist/encoding.js           |   27 -
 .../node_modules/@sigstore/core/dist/index.js |   66 -
 .../node_modules/@sigstore/core/dist/json.js  |   60 -
 .../node_modules/@sigstore/core/dist/oid.js   |   14 -
 .../node_modules/@sigstore/core/dist/pem.js   |   43 -
 .../@sigstore/core/dist/rfc3161/error.js      |   21 -
 .../@sigstore/core/dist/rfc3161/index.js      |   20 -
 .../@sigstore/core/dist/rfc3161/timestamp.js  |  211 --
 .../@sigstore/core/dist/rfc3161/tstinfo.js    |   71 -
 .../@sigstore/core/dist/stream.js             |  115 -
 .../@sigstore/core/dist/x509/cert.js          |  241 --
 .../@sigstore/core/dist/x509/ext.js           |  145 --
 .../@sigstore/core/dist/x509/index.js         |   23 -
 .../@sigstore/core/dist/x509/sct.js           |  151 --
 .../node_modules/@sigstore/core/package.json  |   31 -
 .../@sigstore/sign/dist/bundler/base.js       |   50 -
 .../@sigstore/sign/dist/bundler/bundle.js     |   81 -
 .../@sigstore/sign/dist/bundler/dsse.js       |   46 -
 .../@sigstore/sign/dist/bundler/index.js      |    7 -
 .../@sigstore/sign/dist/bundler/message.js    |   30 -
 .../node_modules/@sigstore/sign/dist/error.js |   39 -
 .../@sigstore/sign/dist/external/error.js     |   26 -
 .../@sigstore/sign/dist/external/fetch.js     |   98 -
 .../@sigstore/sign/dist/external/fulcio.js    |   41 -
 .../@sigstore/sign/dist/external/rekor.js     |   80 -
 .../@sigstore/sign/dist/external/tsa.js       |   38 -
 .../@sigstore/sign/dist/identity/ci.js        |   73 -
 .../@sigstore/sign/dist/identity/index.js     |   20 -
 .../@sigstore/sign/dist/identity/provider.js  |    2 -
 .../node_modules/@sigstore/sign/dist/index.js |   17 -
 .../@sigstore/sign/dist/signer/fulcio/ca.js   |   59 -
 .../sign/dist/signer/fulcio/ephemeral.js      |   45 -
 .../sign/dist/signer/fulcio/index.js          |   87 -
 .../@sigstore/sign/dist/signer/index.js       |   22 -
 .../@sigstore/sign/dist/signer/signer.js      |   17 -
 .../@sigstore/sign/dist/types/fetch.js        |    2 -
 .../@sigstore/sign/dist/util/index.js         |   59 -
 .../@sigstore/sign/dist/util/oidc.js          |   30 -
 .../@sigstore/sign/dist/util/ua.js            |   32 -
 .../@sigstore/sign/dist/witness/index.js      |   24 -
 .../sign/dist/witness/tlog/client.js          |   61 -
 .../@sigstore/sign/dist/witness/tlog/entry.js |  140 --
 .../@sigstore/sign/dist/witness/tlog/index.js |   82 -
 .../@sigstore/sign/dist/witness/tsa/client.js |   46 -
 .../@sigstore/sign/dist/witness/tsa/index.js  |   44 -
 .../@sigstore/sign/dist/witness/witness.js    |    2 -
 .../node_modules/@sigstore/sign/package.json  |   46 -
 .../@sigstore/verify/dist/bundle/dsse.js      |   43 -
 .../@sigstore/verify/dist/bundle/index.js     |   57 -
 .../@sigstore/verify/dist/bundle/message.js   |   36 -
 .../@sigstore/verify/dist/error.js            |   32 -
 .../@sigstore/verify/dist/index.js            |   28 -
 .../@sigstore/verify/dist/key/certificate.js  |  212 --
 .../@sigstore/verify/dist/key/index.js        |   67 -
 .../@sigstore/verify/dist/key/sct.js          |   78 -
 .../@sigstore/verify/dist/policy.js           |   24 -
 .../@sigstore/verify/dist/shared.types.js     |    2 -
 .../verify/dist/timestamp/checkpoint.js       |  157 --
 .../@sigstore/verify/dist/timestamp/index.js  |   46 -
 .../@sigstore/verify/dist/timestamp/merkle.js |  104 -
 .../@sigstore/verify/dist/timestamp/set.js    |   60 -
 .../@sigstore/verify/dist/timestamp/tsa.js    |   63 -
 .../@sigstore/verify/dist/tlog/dsse.js        |   57 -
 .../verify/dist/tlog/hashedrekord.js          |   51 -
 .../@sigstore/verify/dist/tlog/index.js       |   47 -
 .../@sigstore/verify/dist/tlog/intoto.js      |   62 -
 .../@sigstore/verify/dist/trust/filter.js     |   23 -
 .../@sigstore/verify/dist/trust/index.js      |   86 -
 .../verify/dist/trust/trust.types.js          |    2 -
 .../@sigstore/verify/dist/verifier.js         |  143 --
 .../@sigstore/verify/package.json             |   36 -
 .../pacote/node_modules/sigstore/LICENSE      |  202 --
 .../node_modules/sigstore/dist/config.js      |  120 -
 .../node_modules/sigstore/dist/index.js       |   34 -
 .../node_modules/sigstore/dist/sigstore.js    |  112 -
 .../pacote/node_modules/sigstore/package.json |   47 -
 .../@sigstore/protobuf-specs}/LICENSE         |    0
 .../dist/__generated__/envelope.js            |   59 +
 .../dist/__generated__/events.js              |  174 ++
 .../google/api/field_behavior.js              |  141 ++
 .../dist/__generated__/google/protobuf/any.js |   35 +
 .../google/protobuf/descriptor.js             | 2042 +++++++++++++++++
 .../google/protobuf/timestamp.js              |   29 +
 .../dist/__generated__/rekor/v2/dsse.js       |   55 +
 .../dist/__generated__/rekor/v2/entry.js      |   81 +
 .../__generated__/rekor/v2/hashedrekord.js    |   56 +
 .../dist/__generated__/rekor/v2/verifier.js   |   74 +
 .../dist/__generated__/sigstore_bundle.js     |  103 +
 .../dist/__generated__/sigstore_common.js     |  596 +++++
 .../dist/__generated__/sigstore_rekor.js      |  137 ++
 .../dist/__generated__/sigstore_trustroot.js  |  284 +++
 .../__generated__/sigstore_verification.js    |  281 +++
 .../@sigstore/protobuf-specs/dist/index.js    |   37 +
 .../protobuf-specs/dist/rekor/v2/index.js     |   35 +
 .../@sigstore/protobuf-specs/package.json     |   35 +
 .../node_modules/@sigstore/tuf/LICENSE        |    0
 .../@sigstore/tuf/dist/appdata.js             |    0
 .../node_modules/@sigstore/tuf/dist/client.js |    0
 .../node_modules/@sigstore/tuf/dist/error.js  |    0
 .../node_modules/@sigstore/tuf/dist/index.js  |    0
 .../node_modules/@sigstore/tuf/dist/target.js |    0
 .../node_modules/@sigstore/tuf/package.json   |    0
 .../node_modules/@sigstore/tuf/seeds.json     |    0
 .../node_modules/@tufjs/models/LICENSE        |    0
 .../node_modules/@tufjs/models/dist/base.js   |    0
 .../@tufjs/models/dist/delegations.js         |    0
 .../node_modules/@tufjs/models/dist/error.js  |    0
 .../node_modules/@tufjs/models/dist/file.js   |    0
 .../node_modules/@tufjs/models/dist/index.js  |    0
 .../node_modules/@tufjs/models/dist/key.js    |    0
 .../@tufjs/models/dist/metadata.js            |    0
 .../node_modules/@tufjs/models/dist/role.js   |    0
 .../node_modules/@tufjs/models/dist/root.js   |    0
 .../@tufjs/models/dist/signature.js           |    0
 .../@tufjs/models/dist/snapshot.js            |    0
 .../@tufjs/models/dist/targets.js             |    0
 .../@tufjs/models/dist/timestamp.js           |    0
 .../@tufjs/models/dist/utils/guard.js         |    0
 .../@tufjs/models/dist/utils/index.js         |    0
 .../@tufjs/models/dist/utils/key.js           |    0
 .../@tufjs/models/dist/utils/oid.js           |    0
 .../@tufjs/models/dist/utils/types.js         |    0
 .../@tufjs/models/dist/utils/verify.js        |    0
 .../node_modules/@tufjs/models/package.json   |    0
 .../node_modules/make-fetch-happen/LICENSE    |   16 +
 .../make-fetch-happen/lib/cache/entry.js      |  471 ++++
 .../make-fetch-happen/lib/cache/errors.js     |   11 +
 .../make-fetch-happen/lib/cache/index.js      |   49 +
 .../make-fetch-happen/lib/cache/key.js        |   17 +
 .../make-fetch-happen/lib/cache/policy.js     |  161 ++
 .../make-fetch-happen/lib/fetch.js            |  118 +
 .../make-fetch-happen/lib/index.js            |   41 +
 .../make-fetch-happen/lib/options.js          |   59 +
 .../make-fetch-happen/lib/pipeline.js         |   41 +
 .../make-fetch-happen/lib/remote.js           |  132 ++
 .../make-fetch-happen/package.json            |   74 +
 .../node_modules/negotiator/HISTORY.md        |  114 +
 .../sigstore/node_modules/negotiator/LICENSE  |   24 +
 .../sigstore/node_modules/negotiator/index.js |   83 +
 .../node_modules/negotiator/lib/charset.js    |  169 ++
 .../node_modules/negotiator/lib/encoding.js   |  205 ++
 .../node_modules/negotiator/lib/language.js   |  179 ++
 .../node_modules/negotiator/lib/mediaType.js  |  294 +++
 .../node_modules/negotiator/package.json      |   43 +
 .../node_modules/tuf-js/LICENSE               |    0
 .../node_modules/tuf-js/dist/config.js        |    0
 .../node_modules/tuf-js/dist/error.js         |    0
 .../node_modules/tuf-js/dist/fetcher.js       |    0
 .../node_modules/tuf-js/dist/index.js         |    0
 .../node_modules/tuf-js/dist/store.js         |    0
 .../node_modules/tuf-js/dist/updater.js       |    0
 .../node_modules/tuf-js/dist/utils/tmpfile.js |    0
 .../node_modules/tuf-js/dist/utils/url.js     |    0
 .../node_modules/tuf-js/package.json          |    0
 node_modules/sigstore/package.json            |   20 +-
 package-lock.json                             |  376 ++-
 workspaces/libnpmpublish/package.json         |    2 +-
 262 files changed, 15395 insertions(+), 6009 deletions(-)
 rename node_modules/{pacote/node_modules/@sigstore/bundle => @sigstore/bundle/node_modules/@sigstore/protobuf-specs}/LICENSE (100%)
 rename node_modules/{pacote => @sigstore/bundle}/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js (100%)
 rename node_modules/{pacote => @sigstore/bundle}/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js (100%)
 rename node_modules/{pacote => @sigstore/bundle}/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js (100%)
 rename node_modules/{pacote => @sigstore/bundle}/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js (100%)
 rename node_modules/{pacote => @sigstore/bundle}/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js (100%)
 rename node_modules/{pacote => @sigstore/bundle}/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js (100%)
 rename node_modules/{pacote => @sigstore/bundle}/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js (100%)
 rename node_modules/{pacote => @sigstore/bundle}/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js (100%)
 rename node_modules/{pacote => @sigstore/bundle}/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js (100%)
 rename node_modules/{pacote => @sigstore/bundle}/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js (100%)
 rename node_modules/{pacote => @sigstore/bundle}/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js (100%)
 rename node_modules/{pacote => @sigstore/bundle}/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js (100%)
 rename node_modules/{pacote => @sigstore/bundle}/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js (100%)
 rename node_modules/{pacote => @sigstore/bundle}/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js (100%)
 rename node_modules/{pacote => @sigstore/bundle}/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js (100%)
 rename node_modules/{pacote => @sigstore/bundle}/node_modules/@sigstore/protobuf-specs/dist/index.js (100%)
 rename node_modules/{pacote => @sigstore/bundle}/node_modules/@sigstore/protobuf-specs/dist/rekor/v2/index.js (100%)
 rename node_modules/{pacote => @sigstore/bundle}/node_modules/@sigstore/protobuf-specs/package.json (100%)
 rename node_modules/{pacote/node_modules/@sigstore/core => @sigstore/sign/node_modules/@sigstore/protobuf-specs}/LICENSE (100%)
 create mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
 create mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
 create mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
 create mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
 create mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
 create mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
 create mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js
 create mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js
 create mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js
 create mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js
 create mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
 create mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
 create mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
 create mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
 create mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
 create mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/index.js
 create mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/rekor/v2/index.js
 create mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/package.json
 rename node_modules/{pacote => @sigstore/sign}/node_modules/make-fetch-happen/LICENSE (100%)
 rename node_modules/{pacote => @sigstore/sign}/node_modules/make-fetch-happen/lib/cache/entry.js (100%)
 rename node_modules/{pacote => @sigstore/sign}/node_modules/make-fetch-happen/lib/cache/errors.js (100%)
 rename node_modules/{pacote => @sigstore/sign}/node_modules/make-fetch-happen/lib/cache/index.js (100%)
 rename node_modules/{pacote => @sigstore/sign}/node_modules/make-fetch-happen/lib/cache/key.js (100%)
 rename node_modules/{pacote => @sigstore/sign}/node_modules/make-fetch-happen/lib/cache/policy.js (100%)
 rename node_modules/{pacote => @sigstore/sign}/node_modules/make-fetch-happen/lib/fetch.js (100%)
 rename node_modules/{pacote => @sigstore/sign}/node_modules/make-fetch-happen/lib/index.js (100%)
 rename node_modules/{pacote => @sigstore/sign}/node_modules/make-fetch-happen/lib/options.js (100%)
 rename node_modules/{pacote => @sigstore/sign}/node_modules/make-fetch-happen/lib/pipeline.js (100%)
 rename node_modules/{pacote => @sigstore/sign}/node_modules/make-fetch-happen/lib/remote.js (100%)
 rename node_modules/{pacote => @sigstore/sign}/node_modules/make-fetch-happen/package.json (100%)
 rename node_modules/{pacote => @sigstore/sign}/node_modules/negotiator/HISTORY.md (100%)
 rename node_modules/{pacote => @sigstore/sign}/node_modules/negotiator/LICENSE (100%)
 rename node_modules/{pacote => @sigstore/sign}/node_modules/negotiator/index.js (100%)
 rename node_modules/{pacote => @sigstore/sign}/node_modules/negotiator/lib/charset.js (100%)
 rename node_modules/{pacote => @sigstore/sign}/node_modules/negotiator/lib/encoding.js (100%)
 rename node_modules/{pacote => @sigstore/sign}/node_modules/negotiator/lib/language.js (100%)
 rename node_modules/{pacote => @sigstore/sign}/node_modules/negotiator/lib/mediaType.js (100%)
 rename node_modules/{pacote => @sigstore/sign}/node_modules/negotiator/package.json (100%)
 rename node_modules/{pacote => @sigstore/verify}/node_modules/@sigstore/protobuf-specs/LICENSE (100%)
 create mode 100644 node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
 create mode 100644 node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
 create mode 100644 node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
 create mode 100644 node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
 create mode 100644 node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
 create mode 100644 node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
 create mode 100644 node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js
 create mode 100644 node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js
 create mode 100644 node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js
 create mode 100644 node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js
 create mode 100644 node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
 create mode 100644 node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
 create mode 100644 node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
 create mode 100644 node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
 create mode 100644 node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
 create mode 100644 node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/index.js
 create mode 100644 node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/rekor/v2/index.js
 create mode 100644 node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/package.json
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/bundle/dist/build.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/bundle/dist/bundle.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/bundle/dist/error.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/bundle/dist/index.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/bundle/dist/serialized.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/bundle/dist/utility.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/bundle/dist/validate.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/bundle/package.json
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/asn1/error.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/asn1/index.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/asn1/length.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/asn1/obj.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/asn1/parse.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/asn1/tag.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/crypto.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/dsse.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/encoding.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/index.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/json.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/oid.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/pem.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/error.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/index.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/timestamp.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/stream.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/x509/cert.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/x509/ext.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/x509/index.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/dist/x509/sct.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/core/package.json
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/base.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/bundle.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/dsse.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/index.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/message.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/error.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/external/error.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/external/fetch.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/external/fulcio.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/external/rekor.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/external/tsa.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/identity/ci.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/identity/index.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/identity/provider.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/index.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/index.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/signer/index.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/signer/signer.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/types/fetch.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/util/index.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/util/oidc.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/util/ua.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/witness/index.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/client.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/entry.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/index.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tsa/client.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tsa/index.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/dist/witness/witness.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/sign/package.json
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/dsse.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/index.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/message.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/error.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/index.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/key/certificate.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/key/index.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/key/sct.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/policy.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/shared.types.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/checkpoint.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/index.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/merkle.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/set.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/tsa.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/dsse.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/hashedrekord.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/index.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/intoto.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/trust/filter.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/trust/index.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/trust/trust.types.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/dist/verifier.js
 delete mode 100644 node_modules/pacote/node_modules/@sigstore/verify/package.json
 delete mode 100644 node_modules/pacote/node_modules/sigstore/LICENSE
 delete mode 100644 node_modules/pacote/node_modules/sigstore/dist/config.js
 delete mode 100644 node_modules/pacote/node_modules/sigstore/dist/index.js
 delete mode 100644 node_modules/pacote/node_modules/sigstore/dist/sigstore.js
 delete mode 100644 node_modules/pacote/node_modules/sigstore/package.json
 rename node_modules/{pacote/node_modules/@sigstore/sign => sigstore/node_modules/@sigstore/protobuf-specs}/LICENSE (100%)
 create mode 100644 node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
 create mode 100644 node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
 create mode 100644 node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
 create mode 100644 node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
 create mode 100644 node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
 create mode 100644 node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
 create mode 100644 node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js
 create mode 100644 node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js
 create mode 100644 node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js
 create mode 100644 node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js
 create mode 100644 node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
 create mode 100644 node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
 create mode 100644 node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
 create mode 100644 node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
 create mode 100644 node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
 create mode 100644 node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/index.js
 create mode 100644 node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/rekor/v2/index.js
 create mode 100644 node_modules/sigstore/node_modules/@sigstore/protobuf-specs/package.json
 rename node_modules/{pacote => sigstore}/node_modules/@sigstore/tuf/LICENSE (100%)
 rename node_modules/{pacote => sigstore}/node_modules/@sigstore/tuf/dist/appdata.js (100%)
 rename node_modules/{pacote => sigstore}/node_modules/@sigstore/tuf/dist/client.js (100%)
 rename node_modules/{pacote => sigstore}/node_modules/@sigstore/tuf/dist/error.js (100%)
 rename node_modules/{pacote => sigstore}/node_modules/@sigstore/tuf/dist/index.js (100%)
 rename node_modules/{pacote => sigstore}/node_modules/@sigstore/tuf/dist/target.js (100%)
 rename node_modules/{pacote => sigstore}/node_modules/@sigstore/tuf/package.json (100%)
 rename node_modules/{pacote => sigstore}/node_modules/@sigstore/tuf/seeds.json (100%)
 rename node_modules/{pacote => sigstore}/node_modules/@tufjs/models/LICENSE (100%)
 rename node_modules/{pacote => sigstore}/node_modules/@tufjs/models/dist/base.js (100%)
 rename node_modules/{pacote => sigstore}/node_modules/@tufjs/models/dist/delegations.js (100%)
 rename node_modules/{pacote => sigstore}/node_modules/@tufjs/models/dist/error.js (100%)
 rename node_modules/{pacote => sigstore}/node_modules/@tufjs/models/dist/file.js (100%)
 rename node_modules/{pacote => sigstore}/node_modules/@tufjs/models/dist/index.js (100%)
 rename node_modules/{pacote => sigstore}/node_modules/@tufjs/models/dist/key.js (100%)
 rename node_modules/{pacote => sigstore}/node_modules/@tufjs/models/dist/metadata.js (100%)
 rename node_modules/{pacote => sigstore}/node_modules/@tufjs/models/dist/role.js (100%)
 rename node_modules/{pacote => sigstore}/node_modules/@tufjs/models/dist/root.js (100%)
 rename node_modules/{pacote => sigstore}/node_modules/@tufjs/models/dist/signature.js (100%)
 rename node_modules/{pacote => sigstore}/node_modules/@tufjs/models/dist/snapshot.js (100%)
 rename node_modules/{pacote => sigstore}/node_modules/@tufjs/models/dist/targets.js (100%)
 rename node_modules/{pacote => sigstore}/node_modules/@tufjs/models/dist/timestamp.js (100%)
 rename node_modules/{pacote => sigstore}/node_modules/@tufjs/models/dist/utils/guard.js (100%)
 rename node_modules/{pacote => sigstore}/node_modules/@tufjs/models/dist/utils/index.js (100%)
 rename node_modules/{pacote => sigstore}/node_modules/@tufjs/models/dist/utils/key.js (100%)
 rename node_modules/{pacote => sigstore}/node_modules/@tufjs/models/dist/utils/oid.js (100%)
 rename node_modules/{pacote => sigstore}/node_modules/@tufjs/models/dist/utils/types.js (100%)
 rename node_modules/{pacote => sigstore}/node_modules/@tufjs/models/dist/utils/verify.js (100%)
 rename node_modules/{pacote => sigstore}/node_modules/@tufjs/models/package.json (100%)
 create mode 100644 node_modules/sigstore/node_modules/make-fetch-happen/LICENSE
 create mode 100644 node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/entry.js
 create mode 100644 node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/errors.js
 create mode 100644 node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/index.js
 create mode 100644 node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/key.js
 create mode 100644 node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/policy.js
 create mode 100644 node_modules/sigstore/node_modules/make-fetch-happen/lib/fetch.js
 create mode 100644 node_modules/sigstore/node_modules/make-fetch-happen/lib/index.js
 create mode 100644 node_modules/sigstore/node_modules/make-fetch-happen/lib/options.js
 create mode 100644 node_modules/sigstore/node_modules/make-fetch-happen/lib/pipeline.js
 create mode 100644 node_modules/sigstore/node_modules/make-fetch-happen/lib/remote.js
 create mode 100644 node_modules/sigstore/node_modules/make-fetch-happen/package.json
 create mode 100644 node_modules/sigstore/node_modules/negotiator/HISTORY.md
 create mode 100644 node_modules/sigstore/node_modules/negotiator/LICENSE
 create mode 100644 node_modules/sigstore/node_modules/negotiator/index.js
 create mode 100644 node_modules/sigstore/node_modules/negotiator/lib/charset.js
 create mode 100644 node_modules/sigstore/node_modules/negotiator/lib/encoding.js
 create mode 100644 node_modules/sigstore/node_modules/negotiator/lib/language.js
 create mode 100644 node_modules/sigstore/node_modules/negotiator/lib/mediaType.js
 create mode 100644 node_modules/sigstore/node_modules/negotiator/package.json
 rename node_modules/{pacote => sigstore}/node_modules/tuf-js/LICENSE (100%)
 rename node_modules/{pacote => sigstore}/node_modules/tuf-js/dist/config.js (100%)
 rename node_modules/{pacote => sigstore}/node_modules/tuf-js/dist/error.js (100%)
 rename node_modules/{pacote => sigstore}/node_modules/tuf-js/dist/fetcher.js (100%)
 rename node_modules/{pacote => sigstore}/node_modules/tuf-js/dist/index.js (100%)
 rename node_modules/{pacote => sigstore}/node_modules/tuf-js/dist/store.js (100%)
 rename node_modules/{pacote => sigstore}/node_modules/tuf-js/dist/updater.js (100%)
 rename node_modules/{pacote => sigstore}/node_modules/tuf-js/dist/utils/tmpfile.js (100%)
 rename node_modules/{pacote => sigstore}/node_modules/tuf-js/dist/utils/url.js (100%)
 rename node_modules/{pacote => sigstore}/node_modules/tuf-js/package.json (100%)

diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index c843e97b50bc2..2875bd6e9071d 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -57,11 +57,28 @@
 !/@sigstore/
 /@sigstore/*
 !/@sigstore/bundle
+!/@sigstore/bundle/node_modules/
+/@sigstore/bundle/node_modules/*
+!/@sigstore/bundle/node_modules/@sigstore/
+/@sigstore/bundle/node_modules/@sigstore/*
+!/@sigstore/bundle/node_modules/@sigstore/protobuf-specs
 !/@sigstore/core
 !/@sigstore/protobuf-specs
 !/@sigstore/sign
+!/@sigstore/sign/node_modules/
+/@sigstore/sign/node_modules/*
+!/@sigstore/sign/node_modules/@sigstore/
+/@sigstore/sign/node_modules/@sigstore/*
+!/@sigstore/sign/node_modules/@sigstore/protobuf-specs
+!/@sigstore/sign/node_modules/make-fetch-happen
+!/@sigstore/sign/node_modules/negotiator
 !/@sigstore/tuf
 !/@sigstore/verify
+!/@sigstore/verify/node_modules/
+/@sigstore/verify/node_modules/*
+!/@sigstore/verify/node_modules/@sigstore/
+/@sigstore/verify/node_modules/@sigstore/*
+!/@sigstore/verify/node_modules/@sigstore/protobuf-specs
 !/@tufjs/
 /@tufjs/*
 !/@tufjs/canonical-json
@@ -228,16 +245,13 @@
 !/pacote/node_modules/chownr
 !/pacote/node_modules/hosted-git-info
 !/pacote/node_modules/lru-cache
-!/pacote/node_modules/make-fetch-happen
 !/pacote/node_modules/minizlib
 !/pacote/node_modules/mkdirp
-!/pacote/node_modules/negotiator
 !/pacote/node_modules/npm-package-arg
 !/pacote/node_modules/npm-pick-manifest
 !/pacote/node_modules/npm-registry-fetch
 !/pacote/node_modules/sigstore
 !/pacote/node_modules/tar
-!/pacote/node_modules/tuf-js
 !/pacote/node_modules/yallist
 !/parse-conflict-json
 !/path-key
@@ -259,6 +273,18 @@
 !/shebang-regex
 !/signal-exit
 !/sigstore
+!/sigstore/node_modules/
+/sigstore/node_modules/*
+!/sigstore/node_modules/@sigstore/
+/sigstore/node_modules/@sigstore/*
+!/sigstore/node_modules/@sigstore/protobuf-specs
+!/sigstore/node_modules/@sigstore/tuf
+!/sigstore/node_modules/@tufjs/
+/sigstore/node_modules/@tufjs/*
+!/sigstore/node_modules/@tufjs/models
+!/sigstore/node_modules/make-fetch-happen
+!/sigstore/node_modules/negotiator
+!/sigstore/node_modules/tuf-js
 !/smart-buffer
 !/socks-proxy-agent
 !/socks
diff --git a/node_modules/pacote/node_modules/@sigstore/bundle/LICENSE b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/LICENSE
similarity index 100%
rename from node_modules/pacote/node_modules/@sigstore/bundle/LICENSE
rename to node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/LICENSE
diff --git a/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
similarity index 100%
rename from node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
rename to node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
diff --git a/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
similarity index 100%
rename from node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
rename to node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
diff --git a/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
similarity index 100%
rename from node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
rename to node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
diff --git a/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
similarity index 100%
rename from node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
rename to node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
diff --git a/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
similarity index 100%
rename from node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
rename to node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
diff --git a/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
similarity index 100%
rename from node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
rename to node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
diff --git a/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js
similarity index 100%
rename from node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js
rename to node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js
diff --git a/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js
similarity index 100%
rename from node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js
rename to node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js
diff --git a/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js
similarity index 100%
rename from node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js
rename to node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js
diff --git a/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js
similarity index 100%
rename from node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js
rename to node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js
diff --git a/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
similarity index 100%
rename from node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
rename to node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
diff --git a/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
similarity index 100%
rename from node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
rename to node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
diff --git a/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
similarity index 100%
rename from node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
rename to node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
diff --git a/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
similarity index 100%
rename from node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
rename to node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
diff --git a/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
similarity index 100%
rename from node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
rename to node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
diff --git a/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/index.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/index.js
similarity index 100%
rename from node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/index.js
rename to node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/index.js
diff --git a/node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/rekor/v2/index.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/rekor/v2/index.js
similarity index 100%
rename from node_modules/pacote/node_modules/@sigstore/protobuf-specs/dist/rekor/v2/index.js
rename to node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/rekor/v2/index.js
diff --git a/node_modules/pacote/node_modules/@sigstore/protobuf-specs/package.json b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/package.json
similarity index 100%
rename from node_modules/pacote/node_modules/@sigstore/protobuf-specs/package.json
rename to node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/package.json
diff --git a/node_modules/@sigstore/bundle/package.json b/node_modules/@sigstore/bundle/package.json
index 61b062ae2b212..03291b2159b79 100644
--- a/node_modules/@sigstore/bundle/package.json
+++ b/node_modules/@sigstore/bundle/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@sigstore/bundle",
-  "version": "3.1.0",
+  "version": "4.0.0",
   "description": "Sigstore bundle type",
   "main": "dist/index.js",
   "types": "dist/index.d.ts",
@@ -27,9 +27,9 @@
     "provenance": true
   },
   "dependencies": {
-    "@sigstore/protobuf-specs": "^0.4.0"
+    "@sigstore/protobuf-specs": "^0.5.0"
   },
   "engines": {
-    "node": "^18.17.0 || >=20.5.0"
+    "node": "^20.17.0 || >=22.9.0"
   }
 }
diff --git a/node_modules/@sigstore/core/dist/index.js b/node_modules/@sigstore/core/dist/index.js
index ac35e86a8df7d..49859d84db756 100644
--- a/node_modules/@sigstore/core/dist/index.js
+++ b/node_modules/@sigstore/core/dist/index.js
@@ -15,13 +15,23 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
 }) : function(o, v) {
     o["default"] = v;
 });
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
+var __importStar = (this && this.__importStar) || (function () {
+    var ownKeys = function(o) {
+        ownKeys = Object.getOwnPropertyNames || function (o) {
+            var ar = [];
+            for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
+            return ar;
+        };
+        return ownKeys(o);
+    };
+    return function (mod) {
+        if (mod && mod.__esModule) return mod;
+        var result = {};
+        if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
+        __setModuleDefault(result, mod);
+        return result;
+    };
+})();
 Object.defineProperty(exports, "__esModule", { value: true });
 exports.X509SCTExtension = exports.X509Certificate = exports.EXTENSION_OID_SCT = exports.ByteStream = exports.RFC3161Timestamp = exports.pem = exports.json = exports.encoding = exports.dsse = exports.crypto = exports.ASN1Obj = void 0;
 /*
diff --git a/node_modules/@sigstore/core/dist/rfc3161/timestamp.js b/node_modules/@sigstore/core/dist/rfc3161/timestamp.js
index 3e61fc1a4e169..982fb5e6126e8 100644
--- a/node_modules/@sigstore/core/dist/rfc3161/timestamp.js
+++ b/node_modules/@sigstore/core/dist/rfc3161/timestamp.js
@@ -15,13 +15,23 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
 }) : function(o, v) {
     o["default"] = v;
 });
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
+var __importStar = (this && this.__importStar) || (function () {
+    var ownKeys = function(o) {
+        ownKeys = Object.getOwnPropertyNames || function (o) {
+            var ar = [];
+            for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
+            return ar;
+        };
+        return ownKeys(o);
+    };
+    return function (mod) {
+        if (mod && mod.__esModule) return mod;
+        var result = {};
+        if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
+        __setModuleDefault(result, mod);
+        return result;
+    };
+})();
 Object.defineProperty(exports, "__esModule", { value: true });
 exports.RFC3161Timestamp = void 0;
 /*
diff --git a/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js b/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js
index dc8e4fb339383..d5001c42c108f 100644
--- a/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js
+++ b/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js
@@ -15,13 +15,23 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
 }) : function(o, v) {
     o["default"] = v;
 });
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
+var __importStar = (this && this.__importStar) || (function () {
+    var ownKeys = function(o) {
+        ownKeys = Object.getOwnPropertyNames || function (o) {
+            var ar = [];
+            for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
+            return ar;
+        };
+        return ownKeys(o);
+    };
+    return function (mod) {
+        if (mod && mod.__esModule) return mod;
+        var result = {};
+        if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
+        __setModuleDefault(result, mod);
+        return result;
+    };
+})();
 Object.defineProperty(exports, "__esModule", { value: true });
 exports.TSTInfo = void 0;
 const crypto = __importStar(require("../crypto"));
diff --git a/node_modules/@sigstore/core/dist/x509/cert.js b/node_modules/@sigstore/core/dist/x509/cert.js
index 72ea8e0738bc8..83aee7d1215a4 100644
--- a/node_modules/@sigstore/core/dist/x509/cert.js
+++ b/node_modules/@sigstore/core/dist/x509/cert.js
@@ -15,13 +15,23 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
 }) : function(o, v) {
     o["default"] = v;
 });
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
+var __importStar = (this && this.__importStar) || (function () {
+    var ownKeys = function(o) {
+        ownKeys = Object.getOwnPropertyNames || function (o) {
+            var ar = [];
+            for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
+            return ar;
+        };
+        return ownKeys(o);
+    };
+    return function (mod) {
+        if (mod && mod.__esModule) return mod;
+        var result = {};
+        if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
+        __setModuleDefault(result, mod);
+        return result;
+    };
+})();
 Object.defineProperty(exports, "__esModule", { value: true });
 exports.X509Certificate = exports.EXTENSION_OID_SCT = void 0;
 /*
@@ -136,6 +146,7 @@ class X509Certificate {
     get isCA() {
         const ca = this.extBasicConstraints?.isCA || false;
         // If the KeyUsage extension is present, keyCertSign must be set
+        /* istanbul ignore else */
         if (this.extKeyUsage) {
             return ca && this.extKeyUsage.keyCertSign;
         }
diff --git a/node_modules/@sigstore/core/dist/x509/sct.js b/node_modules/@sigstore/core/dist/x509/sct.js
index 1603059c0d1ac..55885e3b30742 100644
--- a/node_modules/@sigstore/core/dist/x509/sct.js
+++ b/node_modules/@sigstore/core/dist/x509/sct.js
@@ -15,13 +15,23 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
 }) : function(o, v) {
     o["default"] = v;
 });
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
+var __importStar = (this && this.__importStar) || (function () {
+    var ownKeys = function(o) {
+        ownKeys = Object.getOwnPropertyNames || function (o) {
+            var ar = [];
+            for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
+            return ar;
+        };
+        return ownKeys(o);
+    };
+    return function (mod) {
+        if (mod && mod.__esModule) return mod;
+        var result = {};
+        if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
+        __setModuleDefault(result, mod);
+        return result;
+    };
+})();
 Object.defineProperty(exports, "__esModule", { value: true });
 exports.SignedCertificateTimestamp = void 0;
 /*
diff --git a/node_modules/@sigstore/core/package.json b/node_modules/@sigstore/core/package.json
index af5dd281ac90e..7d2f8d5de3f7a 100644
--- a/node_modules/@sigstore/core/package.json
+++ b/node_modules/@sigstore/core/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@sigstore/core",
-  "version": "2.0.0",
+  "version": "3.0.0",
   "description": "Base library for Sigstore",
   "main": "dist/index.js",
   "types": "dist/index.d.ts",
@@ -26,6 +26,6 @@
     "provenance": true
   },
   "engines": {
-    "node": "^18.17.0 || >=20.5.0"
+    "node": "^20.17.0 || >=22.9.0"
   }
 }
diff --git a/node_modules/pacote/node_modules/@sigstore/core/LICENSE b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/LICENSE
similarity index 100%
rename from node_modules/pacote/node_modules/@sigstore/core/LICENSE
rename to node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/LICENSE
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
new file mode 100644
index 0000000000000..5c4f37bfaf3fb
--- /dev/null
+++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
@@ -0,0 +1,59 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: envelope.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Signature = exports.Envelope = void 0;
+exports.Envelope = {
+    fromJSON(object) {
+        return {
+            payload: isSet(object.payload) ? Buffer.from(bytesFromBase64(object.payload)) : Buffer.alloc(0),
+            payloadType: isSet(object.payloadType) ? globalThis.String(object.payloadType) : "",
+            signatures: globalThis.Array.isArray(object?.signatures)
+                ? object.signatures.map((e) => exports.Signature.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.payload.length !== 0) {
+            obj.payload = base64FromBytes(message.payload);
+        }
+        if (message.payloadType !== "") {
+            obj.payloadType = message.payloadType;
+        }
+        if (message.signatures?.length) {
+            obj.signatures = message.signatures.map((e) => exports.Signature.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.Signature = {
+    fromJSON(object) {
+        return {
+            sig: isSet(object.sig) ? Buffer.from(bytesFromBase64(object.sig)) : Buffer.alloc(0),
+            keyid: isSet(object.keyid) ? globalThis.String(object.keyid) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.sig.length !== 0) {
+            obj.sig = base64FromBytes(message.sig);
+        }
+        if (message.keyid !== "") {
+            obj.keyid = message.keyid;
+        }
+        return obj;
+    },
+};
+function bytesFromBase64(b64) {
+    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+}
+function base64FromBytes(arr) {
+    return globalThis.Buffer.from(arr).toString("base64");
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
new file mode 100644
index 0000000000000..6138fef5672fc
--- /dev/null
+++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
@@ -0,0 +1,174 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: events.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.CloudEventBatch = exports.CloudEvent_CloudEventAttributeValue = exports.CloudEvent_AttributesEntry = exports.CloudEvent = void 0;
+/* eslint-disable */
+const any_1 = require("./google/protobuf/any");
+const timestamp_1 = require("./google/protobuf/timestamp");
+exports.CloudEvent = {
+    fromJSON(object) {
+        return {
+            id: isSet(object.id) ? globalThis.String(object.id) : "",
+            source: isSet(object.source) ? globalThis.String(object.source) : "",
+            specVersion: isSet(object.specVersion) ? globalThis.String(object.specVersion) : "",
+            type: isSet(object.type) ? globalThis.String(object.type) : "",
+            attributes: isObject(object.attributes)
+                ? Object.entries(object.attributes).reduce((acc, [key, value]) => {
+                    acc[key] = exports.CloudEvent_CloudEventAttributeValue.fromJSON(value);
+                    return acc;
+                }, {})
+                : {},
+            data: isSet(object.binaryData)
+                ? { $case: "binaryData", binaryData: Buffer.from(bytesFromBase64(object.binaryData)) }
+                : isSet(object.textData)
+                    ? { $case: "textData", textData: globalThis.String(object.textData) }
+                    : isSet(object.protoData)
+                        ? { $case: "protoData", protoData: any_1.Any.fromJSON(object.protoData) }
+                        : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.id !== "") {
+            obj.id = message.id;
+        }
+        if (message.source !== "") {
+            obj.source = message.source;
+        }
+        if (message.specVersion !== "") {
+            obj.specVersion = message.specVersion;
+        }
+        if (message.type !== "") {
+            obj.type = message.type;
+        }
+        if (message.attributes) {
+            const entries = Object.entries(message.attributes);
+            if (entries.length > 0) {
+                obj.attributes = {};
+                entries.forEach(([k, v]) => {
+                    obj.attributes[k] = exports.CloudEvent_CloudEventAttributeValue.toJSON(v);
+                });
+            }
+        }
+        if (message.data?.$case === "binaryData") {
+            obj.binaryData = base64FromBytes(message.data.binaryData);
+        }
+        else if (message.data?.$case === "textData") {
+            obj.textData = message.data.textData;
+        }
+        else if (message.data?.$case === "protoData") {
+            obj.protoData = any_1.Any.toJSON(message.data.protoData);
+        }
+        return obj;
+    },
+};
+exports.CloudEvent_AttributesEntry = {
+    fromJSON(object) {
+        return {
+            key: isSet(object.key) ? globalThis.String(object.key) : "",
+            value: isSet(object.value) ? exports.CloudEvent_CloudEventAttributeValue.fromJSON(object.value) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.key !== "") {
+            obj.key = message.key;
+        }
+        if (message.value !== undefined) {
+            obj.value = exports.CloudEvent_CloudEventAttributeValue.toJSON(message.value);
+        }
+        return obj;
+    },
+};
+exports.CloudEvent_CloudEventAttributeValue = {
+    fromJSON(object) {
+        return {
+            attr: isSet(object.ceBoolean)
+                ? { $case: "ceBoolean", ceBoolean: globalThis.Boolean(object.ceBoolean) }
+                : isSet(object.ceInteger)
+                    ? { $case: "ceInteger", ceInteger: globalThis.Number(object.ceInteger) }
+                    : isSet(object.ceString)
+                        ? { $case: "ceString", ceString: globalThis.String(object.ceString) }
+                        : isSet(object.ceBytes)
+                            ? { $case: "ceBytes", ceBytes: Buffer.from(bytesFromBase64(object.ceBytes)) }
+                            : isSet(object.ceUri)
+                                ? { $case: "ceUri", ceUri: globalThis.String(object.ceUri) }
+                                : isSet(object.ceUriRef)
+                                    ? { $case: "ceUriRef", ceUriRef: globalThis.String(object.ceUriRef) }
+                                    : isSet(object.ceTimestamp)
+                                        ? { $case: "ceTimestamp", ceTimestamp: fromJsonTimestamp(object.ceTimestamp) }
+                                        : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.attr?.$case === "ceBoolean") {
+            obj.ceBoolean = message.attr.ceBoolean;
+        }
+        else if (message.attr?.$case === "ceInteger") {
+            obj.ceInteger = Math.round(message.attr.ceInteger);
+        }
+        else if (message.attr?.$case === "ceString") {
+            obj.ceString = message.attr.ceString;
+        }
+        else if (message.attr?.$case === "ceBytes") {
+            obj.ceBytes = base64FromBytes(message.attr.ceBytes);
+        }
+        else if (message.attr?.$case === "ceUri") {
+            obj.ceUri = message.attr.ceUri;
+        }
+        else if (message.attr?.$case === "ceUriRef") {
+            obj.ceUriRef = message.attr.ceUriRef;
+        }
+        else if (message.attr?.$case === "ceTimestamp") {
+            obj.ceTimestamp = message.attr.ceTimestamp.toISOString();
+        }
+        return obj;
+    },
+};
+exports.CloudEventBatch = {
+    fromJSON(object) {
+        return {
+            events: globalThis.Array.isArray(object?.events) ? object.events.map((e) => exports.CloudEvent.fromJSON(e)) : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.events?.length) {
+            obj.events = message.events.map((e) => exports.CloudEvent.toJSON(e));
+        }
+        return obj;
+    },
+};
+function bytesFromBase64(b64) {
+    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+}
+function base64FromBytes(arr) {
+    return globalThis.Buffer.from(arr).toString("base64");
+}
+function fromTimestamp(t) {
+    let millis = (globalThis.Number(t.seconds) || 0) * 1_000;
+    millis += (t.nanos || 0) / 1_000_000;
+    return new globalThis.Date(millis);
+}
+function fromJsonTimestamp(o) {
+    if (o instanceof globalThis.Date) {
+        return o;
+    }
+    else if (typeof o === "string") {
+        return new globalThis.Date(o);
+    }
+    else {
+        return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
+    }
+}
+function isObject(value) {
+    return typeof value === "object" && value !== null;
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
new file mode 100644
index 0000000000000..b4d9ccc781c2f
--- /dev/null
+++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
@@ -0,0 +1,141 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: google/api/field_behavior.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.FieldBehavior = void 0;
+exports.fieldBehaviorFromJSON = fieldBehaviorFromJSON;
+exports.fieldBehaviorToJSON = fieldBehaviorToJSON;
+/* eslint-disable */
+/**
+ * An indicator of the behavior of a given field (for example, that a field
+ * is required in requests, or given as output but ignored as input).
+ * This **does not** change the behavior in protocol buffers itself; it only
+ * denotes the behavior and may affect how API tooling handles the field.
+ *
+ * Note: This enum **may** receive new values in the future.
+ */
+var FieldBehavior;
+(function (FieldBehavior) {
+    /** FIELD_BEHAVIOR_UNSPECIFIED - Conventional default for enums. Do not use this. */
+    FieldBehavior[FieldBehavior["FIELD_BEHAVIOR_UNSPECIFIED"] = 0] = "FIELD_BEHAVIOR_UNSPECIFIED";
+    /**
+     * OPTIONAL - Specifically denotes a field as optional.
+     * While all fields in protocol buffers are optional, this may be specified
+     * for emphasis if appropriate.
+     */
+    FieldBehavior[FieldBehavior["OPTIONAL"] = 1] = "OPTIONAL";
+    /**
+     * REQUIRED - Denotes a field as required.
+     * This indicates that the field **must** be provided as part of the request,
+     * and failure to do so will cause an error (usually `INVALID_ARGUMENT`).
+     */
+    FieldBehavior[FieldBehavior["REQUIRED"] = 2] = "REQUIRED";
+    /**
+     * OUTPUT_ONLY - Denotes a field as output only.
+     * This indicates that the field is provided in responses, but including the
+     * field in a request does nothing (the server *must* ignore it and
+     * *must not* throw an error as a result of the field's presence).
+     */
+    FieldBehavior[FieldBehavior["OUTPUT_ONLY"] = 3] = "OUTPUT_ONLY";
+    /**
+     * INPUT_ONLY - Denotes a field as input only.
+     * This indicates that the field is provided in requests, and the
+     * corresponding field is not included in output.
+     */
+    FieldBehavior[FieldBehavior["INPUT_ONLY"] = 4] = "INPUT_ONLY";
+    /**
+     * IMMUTABLE - Denotes a field as immutable.
+     * This indicates that the field may be set once in a request to create a
+     * resource, but may not be changed thereafter.
+     */
+    FieldBehavior[FieldBehavior["IMMUTABLE"] = 5] = "IMMUTABLE";
+    /**
+     * UNORDERED_LIST - Denotes that a (repeated) field is an unordered list.
+     * This indicates that the service may provide the elements of the list
+     * in any arbitrary  order, rather than the order the user originally
+     * provided. Additionally, the list's order may or may not be stable.
+     */
+    FieldBehavior[FieldBehavior["UNORDERED_LIST"] = 6] = "UNORDERED_LIST";
+    /**
+     * NON_EMPTY_DEFAULT - Denotes that this field returns a non-empty default value if not set.
+     * This indicates that if the user provides the empty value in a request,
+     * a non-empty value will be returned. The user will not be aware of what
+     * non-empty value to expect.
+     */
+    FieldBehavior[FieldBehavior["NON_EMPTY_DEFAULT"] = 7] = "NON_EMPTY_DEFAULT";
+    /**
+     * IDENTIFIER - Denotes that the field in a resource (a message annotated with
+     * google.api.resource) is used in the resource name to uniquely identify the
+     * resource. For AIP-compliant APIs, this should only be applied to the
+     * `name` field on the resource.
+     *
+     * This behavior should not be applied to references to other resources within
+     * the message.
+     *
+     * The identifier field of resources often have different field behavior
+     * depending on the request it is embedded in (e.g. for Create methods name
+     * is optional and unused, while for Update methods it is required). Instead
+     * of method-specific annotations, only `IDENTIFIER` is required.
+     */
+    FieldBehavior[FieldBehavior["IDENTIFIER"] = 8] = "IDENTIFIER";
+})(FieldBehavior || (exports.FieldBehavior = FieldBehavior = {}));
+function fieldBehaviorFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "FIELD_BEHAVIOR_UNSPECIFIED":
+            return FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED;
+        case 1:
+        case "OPTIONAL":
+            return FieldBehavior.OPTIONAL;
+        case 2:
+        case "REQUIRED":
+            return FieldBehavior.REQUIRED;
+        case 3:
+        case "OUTPUT_ONLY":
+            return FieldBehavior.OUTPUT_ONLY;
+        case 4:
+        case "INPUT_ONLY":
+            return FieldBehavior.INPUT_ONLY;
+        case 5:
+        case "IMMUTABLE":
+            return FieldBehavior.IMMUTABLE;
+        case 6:
+        case "UNORDERED_LIST":
+            return FieldBehavior.UNORDERED_LIST;
+        case 7:
+        case "NON_EMPTY_DEFAULT":
+            return FieldBehavior.NON_EMPTY_DEFAULT;
+        case 8:
+        case "IDENTIFIER":
+            return FieldBehavior.IDENTIFIER;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
+    }
+}
+function fieldBehaviorToJSON(object) {
+    switch (object) {
+        case FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED:
+            return "FIELD_BEHAVIOR_UNSPECIFIED";
+        case FieldBehavior.OPTIONAL:
+            return "OPTIONAL";
+        case FieldBehavior.REQUIRED:
+            return "REQUIRED";
+        case FieldBehavior.OUTPUT_ONLY:
+            return "OUTPUT_ONLY";
+        case FieldBehavior.INPUT_ONLY:
+            return "INPUT_ONLY";
+        case FieldBehavior.IMMUTABLE:
+            return "IMMUTABLE";
+        case FieldBehavior.UNORDERED_LIST:
+            return "UNORDERED_LIST";
+        case FieldBehavior.NON_EMPTY_DEFAULT:
+            return "NON_EMPTY_DEFAULT";
+        case FieldBehavior.IDENTIFIER:
+            return "IDENTIFIER";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
+    }
+}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
new file mode 100644
index 0000000000000..f0c8aab773e4c
--- /dev/null
+++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
@@ -0,0 +1,35 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: google/protobuf/any.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Any = void 0;
+exports.Any = {
+    fromJSON(object) {
+        return {
+            typeUrl: isSet(object.typeUrl) ? globalThis.String(object.typeUrl) : "",
+            value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.typeUrl !== "") {
+            obj.typeUrl = message.typeUrl;
+        }
+        if (message.value.length !== 0) {
+            obj.value = base64FromBytes(message.value);
+        }
+        return obj;
+    },
+};
+function bytesFromBase64(b64) {
+    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+}
+function base64FromBytes(arr) {
+    return globalThis.Buffer.from(arr).toString("base64");
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
new file mode 100644
index 0000000000000..d6f8ddddf799d
--- /dev/null
+++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
@@ -0,0 +1,2042 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: google/protobuf/descriptor.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.GeneratedCodeInfo = exports.SourceCodeInfo_Location = exports.SourceCodeInfo = exports.FeatureSetDefaults_FeatureSetEditionDefault = exports.FeatureSetDefaults = exports.FeatureSet = exports.UninterpretedOption_NamePart = exports.UninterpretedOption = exports.MethodOptions = exports.ServiceOptions = exports.EnumValueOptions = exports.EnumOptions = exports.OneofOptions = exports.FieldOptions_FeatureSupport = exports.FieldOptions_EditionDefault = exports.FieldOptions = exports.MessageOptions = exports.FileOptions = exports.MethodDescriptorProto = exports.ServiceDescriptorProto = exports.EnumValueDescriptorProto = exports.EnumDescriptorProto_EnumReservedRange = exports.EnumDescriptorProto = exports.OneofDescriptorProto = exports.FieldDescriptorProto = exports.ExtensionRangeOptions_Declaration = exports.ExtensionRangeOptions = exports.DescriptorProto_ReservedRange = exports.DescriptorProto_ExtensionRange = exports.DescriptorProto = exports.FileDescriptorProto = exports.FileDescriptorSet = exports.GeneratedCodeInfo_Annotation_Semantic = exports.FeatureSet_EnforceNamingStyle = exports.FeatureSet_JsonFormat = exports.FeatureSet_MessageEncoding = exports.FeatureSet_Utf8Validation = exports.FeatureSet_RepeatedFieldEncoding = exports.FeatureSet_EnumType = exports.FeatureSet_FieldPresence = exports.MethodOptions_IdempotencyLevel = exports.FieldOptions_OptionTargetType = exports.FieldOptions_OptionRetention = exports.FieldOptions_JSType = exports.FieldOptions_CType = exports.FileOptions_OptimizeMode = exports.FieldDescriptorProto_Label = exports.FieldDescriptorProto_Type = exports.ExtensionRangeOptions_VerificationState = exports.Edition = void 0;
+exports.GeneratedCodeInfo_Annotation = void 0;
+exports.editionFromJSON = editionFromJSON;
+exports.editionToJSON = editionToJSON;
+exports.extensionRangeOptions_VerificationStateFromJSON = extensionRangeOptions_VerificationStateFromJSON;
+exports.extensionRangeOptions_VerificationStateToJSON = extensionRangeOptions_VerificationStateToJSON;
+exports.fieldDescriptorProto_TypeFromJSON = fieldDescriptorProto_TypeFromJSON;
+exports.fieldDescriptorProto_TypeToJSON = fieldDescriptorProto_TypeToJSON;
+exports.fieldDescriptorProto_LabelFromJSON = fieldDescriptorProto_LabelFromJSON;
+exports.fieldDescriptorProto_LabelToJSON = fieldDescriptorProto_LabelToJSON;
+exports.fileOptions_OptimizeModeFromJSON = fileOptions_OptimizeModeFromJSON;
+exports.fileOptions_OptimizeModeToJSON = fileOptions_OptimizeModeToJSON;
+exports.fieldOptions_CTypeFromJSON = fieldOptions_CTypeFromJSON;
+exports.fieldOptions_CTypeToJSON = fieldOptions_CTypeToJSON;
+exports.fieldOptions_JSTypeFromJSON = fieldOptions_JSTypeFromJSON;
+exports.fieldOptions_JSTypeToJSON = fieldOptions_JSTypeToJSON;
+exports.fieldOptions_OptionRetentionFromJSON = fieldOptions_OptionRetentionFromJSON;
+exports.fieldOptions_OptionRetentionToJSON = fieldOptions_OptionRetentionToJSON;
+exports.fieldOptions_OptionTargetTypeFromJSON = fieldOptions_OptionTargetTypeFromJSON;
+exports.fieldOptions_OptionTargetTypeToJSON = fieldOptions_OptionTargetTypeToJSON;
+exports.methodOptions_IdempotencyLevelFromJSON = methodOptions_IdempotencyLevelFromJSON;
+exports.methodOptions_IdempotencyLevelToJSON = methodOptions_IdempotencyLevelToJSON;
+exports.featureSet_FieldPresenceFromJSON = featureSet_FieldPresenceFromJSON;
+exports.featureSet_FieldPresenceToJSON = featureSet_FieldPresenceToJSON;
+exports.featureSet_EnumTypeFromJSON = featureSet_EnumTypeFromJSON;
+exports.featureSet_EnumTypeToJSON = featureSet_EnumTypeToJSON;
+exports.featureSet_RepeatedFieldEncodingFromJSON = featureSet_RepeatedFieldEncodingFromJSON;
+exports.featureSet_RepeatedFieldEncodingToJSON = featureSet_RepeatedFieldEncodingToJSON;
+exports.featureSet_Utf8ValidationFromJSON = featureSet_Utf8ValidationFromJSON;
+exports.featureSet_Utf8ValidationToJSON = featureSet_Utf8ValidationToJSON;
+exports.featureSet_MessageEncodingFromJSON = featureSet_MessageEncodingFromJSON;
+exports.featureSet_MessageEncodingToJSON = featureSet_MessageEncodingToJSON;
+exports.featureSet_JsonFormatFromJSON = featureSet_JsonFormatFromJSON;
+exports.featureSet_JsonFormatToJSON = featureSet_JsonFormatToJSON;
+exports.featureSet_EnforceNamingStyleFromJSON = featureSet_EnforceNamingStyleFromJSON;
+exports.featureSet_EnforceNamingStyleToJSON = featureSet_EnforceNamingStyleToJSON;
+exports.generatedCodeInfo_Annotation_SemanticFromJSON = generatedCodeInfo_Annotation_SemanticFromJSON;
+exports.generatedCodeInfo_Annotation_SemanticToJSON = generatedCodeInfo_Annotation_SemanticToJSON;
+/* eslint-disable */
+/** The full set of known editions. */
+var Edition;
+(function (Edition) {
+    /** EDITION_UNKNOWN - A placeholder for an unknown edition value. */
+    Edition[Edition["EDITION_UNKNOWN"] = 0] = "EDITION_UNKNOWN";
+    /**
+     * EDITION_LEGACY - A placeholder edition for specifying default behaviors *before* a feature
+     * was first introduced.  This is effectively an "infinite past".
+     */
+    Edition[Edition["EDITION_LEGACY"] = 900] = "EDITION_LEGACY";
+    /**
+     * EDITION_PROTO2 - Legacy syntax "editions".  These pre-date editions, but behave much like
+     * distinct editions.  These can't be used to specify the edition of proto
+     * files, but feature definitions must supply proto2/proto3 defaults for
+     * backwards compatibility.
+     */
+    Edition[Edition["EDITION_PROTO2"] = 998] = "EDITION_PROTO2";
+    Edition[Edition["EDITION_PROTO3"] = 999] = "EDITION_PROTO3";
+    /**
+     * EDITION_2023 - Editions that have been released.  The specific values are arbitrary and
+     * should not be depended on, but they will always be time-ordered for easy
+     * comparison.
+     */
+    Edition[Edition["EDITION_2023"] = 1000] = "EDITION_2023";
+    Edition[Edition["EDITION_2024"] = 1001] = "EDITION_2024";
+    /**
+     * EDITION_1_TEST_ONLY - Placeholder editions for testing feature resolution.  These should not be
+     * used or relied on outside of tests.
+     */
+    Edition[Edition["EDITION_1_TEST_ONLY"] = 1] = "EDITION_1_TEST_ONLY";
+    Edition[Edition["EDITION_2_TEST_ONLY"] = 2] = "EDITION_2_TEST_ONLY";
+    Edition[Edition["EDITION_99997_TEST_ONLY"] = 99997] = "EDITION_99997_TEST_ONLY";
+    Edition[Edition["EDITION_99998_TEST_ONLY"] = 99998] = "EDITION_99998_TEST_ONLY";
+    Edition[Edition["EDITION_99999_TEST_ONLY"] = 99999] = "EDITION_99999_TEST_ONLY";
+    /**
+     * EDITION_MAX - Placeholder for specifying unbounded edition support.  This should only
+     * ever be used by plugins that can expect to never require any changes to
+     * support a new edition.
+     */
+    Edition[Edition["EDITION_MAX"] = 2147483647] = "EDITION_MAX";
+})(Edition || (exports.Edition = Edition = {}));
+function editionFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "EDITION_UNKNOWN":
+            return Edition.EDITION_UNKNOWN;
+        case 900:
+        case "EDITION_LEGACY":
+            return Edition.EDITION_LEGACY;
+        case 998:
+        case "EDITION_PROTO2":
+            return Edition.EDITION_PROTO2;
+        case 999:
+        case "EDITION_PROTO3":
+            return Edition.EDITION_PROTO3;
+        case 1000:
+        case "EDITION_2023":
+            return Edition.EDITION_2023;
+        case 1001:
+        case "EDITION_2024":
+            return Edition.EDITION_2024;
+        case 1:
+        case "EDITION_1_TEST_ONLY":
+            return Edition.EDITION_1_TEST_ONLY;
+        case 2:
+        case "EDITION_2_TEST_ONLY":
+            return Edition.EDITION_2_TEST_ONLY;
+        case 99997:
+        case "EDITION_99997_TEST_ONLY":
+            return Edition.EDITION_99997_TEST_ONLY;
+        case 99998:
+        case "EDITION_99998_TEST_ONLY":
+            return Edition.EDITION_99998_TEST_ONLY;
+        case 99999:
+        case "EDITION_99999_TEST_ONLY":
+            return Edition.EDITION_99999_TEST_ONLY;
+        case 2147483647:
+        case "EDITION_MAX":
+            return Edition.EDITION_MAX;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum Edition");
+    }
+}
+function editionToJSON(object) {
+    switch (object) {
+        case Edition.EDITION_UNKNOWN:
+            return "EDITION_UNKNOWN";
+        case Edition.EDITION_LEGACY:
+            return "EDITION_LEGACY";
+        case Edition.EDITION_PROTO2:
+            return "EDITION_PROTO2";
+        case Edition.EDITION_PROTO3:
+            return "EDITION_PROTO3";
+        case Edition.EDITION_2023:
+            return "EDITION_2023";
+        case Edition.EDITION_2024:
+            return "EDITION_2024";
+        case Edition.EDITION_1_TEST_ONLY:
+            return "EDITION_1_TEST_ONLY";
+        case Edition.EDITION_2_TEST_ONLY:
+            return "EDITION_2_TEST_ONLY";
+        case Edition.EDITION_99997_TEST_ONLY:
+            return "EDITION_99997_TEST_ONLY";
+        case Edition.EDITION_99998_TEST_ONLY:
+            return "EDITION_99998_TEST_ONLY";
+        case Edition.EDITION_99999_TEST_ONLY:
+            return "EDITION_99999_TEST_ONLY";
+        case Edition.EDITION_MAX:
+            return "EDITION_MAX";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum Edition");
+    }
+}
+/** The verification state of the extension range. */
+var ExtensionRangeOptions_VerificationState;
+(function (ExtensionRangeOptions_VerificationState) {
+    /** DECLARATION - All the extensions of the range must be declared. */
+    ExtensionRangeOptions_VerificationState[ExtensionRangeOptions_VerificationState["DECLARATION"] = 0] = "DECLARATION";
+    ExtensionRangeOptions_VerificationState[ExtensionRangeOptions_VerificationState["UNVERIFIED"] = 1] = "UNVERIFIED";
+})(ExtensionRangeOptions_VerificationState || (exports.ExtensionRangeOptions_VerificationState = ExtensionRangeOptions_VerificationState = {}));
+function extensionRangeOptions_VerificationStateFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "DECLARATION":
+            return ExtensionRangeOptions_VerificationState.DECLARATION;
+        case 1:
+        case "UNVERIFIED":
+            return ExtensionRangeOptions_VerificationState.UNVERIFIED;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum ExtensionRangeOptions_VerificationState");
+    }
+}
+function extensionRangeOptions_VerificationStateToJSON(object) {
+    switch (object) {
+        case ExtensionRangeOptions_VerificationState.DECLARATION:
+            return "DECLARATION";
+        case ExtensionRangeOptions_VerificationState.UNVERIFIED:
+            return "UNVERIFIED";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum ExtensionRangeOptions_VerificationState");
+    }
+}
+var FieldDescriptorProto_Type;
+(function (FieldDescriptorProto_Type) {
+    /**
+     * TYPE_DOUBLE - 0 is reserved for errors.
+     * Order is weird for historical reasons.
+     */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_DOUBLE"] = 1] = "TYPE_DOUBLE";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FLOAT"] = 2] = "TYPE_FLOAT";
+    /**
+     * TYPE_INT64 - Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT64 if
+     * negative values are likely.
+     */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT64"] = 3] = "TYPE_INT64";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT64"] = 4] = "TYPE_UINT64";
+    /**
+     * TYPE_INT32 - Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT32 if
+     * negative values are likely.
+     */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT32"] = 5] = "TYPE_INT32";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED64"] = 6] = "TYPE_FIXED64";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED32"] = 7] = "TYPE_FIXED32";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BOOL"] = 8] = "TYPE_BOOL";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_STRING"] = 9] = "TYPE_STRING";
+    /**
+     * TYPE_GROUP - Tag-delimited aggregate.
+     * Group type is deprecated and not supported after google.protobuf. However, Proto3
+     * implementations should still be able to parse the group wire format and
+     * treat group fields as unknown fields.  In Editions, the group wire format
+     * can be enabled via the `message_encoding` feature.
+     */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_GROUP"] = 10] = "TYPE_GROUP";
+    /** TYPE_MESSAGE - Length-delimited aggregate. */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_MESSAGE"] = 11] = "TYPE_MESSAGE";
+    /** TYPE_BYTES - New in version 2. */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BYTES"] = 12] = "TYPE_BYTES";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT32"] = 13] = "TYPE_UINT32";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_ENUM"] = 14] = "TYPE_ENUM";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED32"] = 15] = "TYPE_SFIXED32";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED64"] = 16] = "TYPE_SFIXED64";
+    /** TYPE_SINT32 - Uses ZigZag encoding. */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT32"] = 17] = "TYPE_SINT32";
+    /** TYPE_SINT64 - Uses ZigZag encoding. */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT64"] = 18] = "TYPE_SINT64";
+})(FieldDescriptorProto_Type || (exports.FieldDescriptorProto_Type = FieldDescriptorProto_Type = {}));
+function fieldDescriptorProto_TypeFromJSON(object) {
+    switch (object) {
+        case 1:
+        case "TYPE_DOUBLE":
+            return FieldDescriptorProto_Type.TYPE_DOUBLE;
+        case 2:
+        case "TYPE_FLOAT":
+            return FieldDescriptorProto_Type.TYPE_FLOAT;
+        case 3:
+        case "TYPE_INT64":
+            return FieldDescriptorProto_Type.TYPE_INT64;
+        case 4:
+        case "TYPE_UINT64":
+            return FieldDescriptorProto_Type.TYPE_UINT64;
+        case 5:
+        case "TYPE_INT32":
+            return FieldDescriptorProto_Type.TYPE_INT32;
+        case 6:
+        case "TYPE_FIXED64":
+            return FieldDescriptorProto_Type.TYPE_FIXED64;
+        case 7:
+        case "TYPE_FIXED32":
+            return FieldDescriptorProto_Type.TYPE_FIXED32;
+        case 8:
+        case "TYPE_BOOL":
+            return FieldDescriptorProto_Type.TYPE_BOOL;
+        case 9:
+        case "TYPE_STRING":
+            return FieldDescriptorProto_Type.TYPE_STRING;
+        case 10:
+        case "TYPE_GROUP":
+            return FieldDescriptorProto_Type.TYPE_GROUP;
+        case 11:
+        case "TYPE_MESSAGE":
+            return FieldDescriptorProto_Type.TYPE_MESSAGE;
+        case 12:
+        case "TYPE_BYTES":
+            return FieldDescriptorProto_Type.TYPE_BYTES;
+        case 13:
+        case "TYPE_UINT32":
+            return FieldDescriptorProto_Type.TYPE_UINT32;
+        case 14:
+        case "TYPE_ENUM":
+            return FieldDescriptorProto_Type.TYPE_ENUM;
+        case 15:
+        case "TYPE_SFIXED32":
+            return FieldDescriptorProto_Type.TYPE_SFIXED32;
+        case 16:
+        case "TYPE_SFIXED64":
+            return FieldDescriptorProto_Type.TYPE_SFIXED64;
+        case 17:
+        case "TYPE_SINT32":
+            return FieldDescriptorProto_Type.TYPE_SINT32;
+        case 18:
+        case "TYPE_SINT64":
+            return FieldDescriptorProto_Type.TYPE_SINT64;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
+    }
+}
+function fieldDescriptorProto_TypeToJSON(object) {
+    switch (object) {
+        case FieldDescriptorProto_Type.TYPE_DOUBLE:
+            return "TYPE_DOUBLE";
+        case FieldDescriptorProto_Type.TYPE_FLOAT:
+            return "TYPE_FLOAT";
+        case FieldDescriptorProto_Type.TYPE_INT64:
+            return "TYPE_INT64";
+        case FieldDescriptorProto_Type.TYPE_UINT64:
+            return "TYPE_UINT64";
+        case FieldDescriptorProto_Type.TYPE_INT32:
+            return "TYPE_INT32";
+        case FieldDescriptorProto_Type.TYPE_FIXED64:
+            return "TYPE_FIXED64";
+        case FieldDescriptorProto_Type.TYPE_FIXED32:
+            return "TYPE_FIXED32";
+        case FieldDescriptorProto_Type.TYPE_BOOL:
+            return "TYPE_BOOL";
+        case FieldDescriptorProto_Type.TYPE_STRING:
+            return "TYPE_STRING";
+        case FieldDescriptorProto_Type.TYPE_GROUP:
+            return "TYPE_GROUP";
+        case FieldDescriptorProto_Type.TYPE_MESSAGE:
+            return "TYPE_MESSAGE";
+        case FieldDescriptorProto_Type.TYPE_BYTES:
+            return "TYPE_BYTES";
+        case FieldDescriptorProto_Type.TYPE_UINT32:
+            return "TYPE_UINT32";
+        case FieldDescriptorProto_Type.TYPE_ENUM:
+            return "TYPE_ENUM";
+        case FieldDescriptorProto_Type.TYPE_SFIXED32:
+            return "TYPE_SFIXED32";
+        case FieldDescriptorProto_Type.TYPE_SFIXED64:
+            return "TYPE_SFIXED64";
+        case FieldDescriptorProto_Type.TYPE_SINT32:
+            return "TYPE_SINT32";
+        case FieldDescriptorProto_Type.TYPE_SINT64:
+            return "TYPE_SINT64";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
+    }
+}
+var FieldDescriptorProto_Label;
+(function (FieldDescriptorProto_Label) {
+    /** LABEL_OPTIONAL - 0 is reserved for errors */
+    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_OPTIONAL"] = 1] = "LABEL_OPTIONAL";
+    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REPEATED"] = 3] = "LABEL_REPEATED";
+    /**
+     * LABEL_REQUIRED - The required label is only allowed in google.protobuf.  In proto3 and Editions
+     * it's explicitly prohibited.  In Editions, the `field_presence` feature
+     * can be used to get this behavior.
+     */
+    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REQUIRED"] = 2] = "LABEL_REQUIRED";
+})(FieldDescriptorProto_Label || (exports.FieldDescriptorProto_Label = FieldDescriptorProto_Label = {}));
+function fieldDescriptorProto_LabelFromJSON(object) {
+    switch (object) {
+        case 1:
+        case "LABEL_OPTIONAL":
+            return FieldDescriptorProto_Label.LABEL_OPTIONAL;
+        case 3:
+        case "LABEL_REPEATED":
+            return FieldDescriptorProto_Label.LABEL_REPEATED;
+        case 2:
+        case "LABEL_REQUIRED":
+            return FieldDescriptorProto_Label.LABEL_REQUIRED;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
+    }
+}
+function fieldDescriptorProto_LabelToJSON(object) {
+    switch (object) {
+        case FieldDescriptorProto_Label.LABEL_OPTIONAL:
+            return "LABEL_OPTIONAL";
+        case FieldDescriptorProto_Label.LABEL_REPEATED:
+            return "LABEL_REPEATED";
+        case FieldDescriptorProto_Label.LABEL_REQUIRED:
+            return "LABEL_REQUIRED";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
+    }
+}
+/** Generated classes can be optimized for speed or code size. */
+var FileOptions_OptimizeMode;
+(function (FileOptions_OptimizeMode) {
+    /** SPEED - Generate complete code for parsing, serialization, */
+    FileOptions_OptimizeMode[FileOptions_OptimizeMode["SPEED"] = 1] = "SPEED";
+    /** CODE_SIZE - etc. */
+    FileOptions_OptimizeMode[FileOptions_OptimizeMode["CODE_SIZE"] = 2] = "CODE_SIZE";
+    /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */
+    FileOptions_OptimizeMode[FileOptions_OptimizeMode["LITE_RUNTIME"] = 3] = "LITE_RUNTIME";
+})(FileOptions_OptimizeMode || (exports.FileOptions_OptimizeMode = FileOptions_OptimizeMode = {}));
+function fileOptions_OptimizeModeFromJSON(object) {
+    switch (object) {
+        case 1:
+        case "SPEED":
+            return FileOptions_OptimizeMode.SPEED;
+        case 2:
+        case "CODE_SIZE":
+            return FileOptions_OptimizeMode.CODE_SIZE;
+        case 3:
+        case "LITE_RUNTIME":
+            return FileOptions_OptimizeMode.LITE_RUNTIME;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
+    }
+}
+function fileOptions_OptimizeModeToJSON(object) {
+    switch (object) {
+        case FileOptions_OptimizeMode.SPEED:
+            return "SPEED";
+        case FileOptions_OptimizeMode.CODE_SIZE:
+            return "CODE_SIZE";
+        case FileOptions_OptimizeMode.LITE_RUNTIME:
+            return "LITE_RUNTIME";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
+    }
+}
+var FieldOptions_CType;
+(function (FieldOptions_CType) {
+    /** STRING - Default mode. */
+    FieldOptions_CType[FieldOptions_CType["STRING"] = 0] = "STRING";
+    /**
+     * CORD - The option [ctype=CORD] may be applied to a non-repeated field of type
+     * "bytes". It indicates that in C++, the data should be stored in a Cord
+     * instead of a string.  For very large strings, this may reduce memory
+     * fragmentation. It may also allow better performance when parsing from a
+     * Cord, or when parsing with aliasing enabled, as the parsed Cord may then
+     * alias the original buffer.
+     */
+    FieldOptions_CType[FieldOptions_CType["CORD"] = 1] = "CORD";
+    FieldOptions_CType[FieldOptions_CType["STRING_PIECE"] = 2] = "STRING_PIECE";
+})(FieldOptions_CType || (exports.FieldOptions_CType = FieldOptions_CType = {}));
+function fieldOptions_CTypeFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "STRING":
+            return FieldOptions_CType.STRING;
+        case 1:
+        case "CORD":
+            return FieldOptions_CType.CORD;
+        case 2:
+        case "STRING_PIECE":
+            return FieldOptions_CType.STRING_PIECE;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
+    }
+}
+function fieldOptions_CTypeToJSON(object) {
+    switch (object) {
+        case FieldOptions_CType.STRING:
+            return "STRING";
+        case FieldOptions_CType.CORD:
+            return "CORD";
+        case FieldOptions_CType.STRING_PIECE:
+            return "STRING_PIECE";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
+    }
+}
+var FieldOptions_JSType;
+(function (FieldOptions_JSType) {
+    /** JS_NORMAL - Use the default type. */
+    FieldOptions_JSType[FieldOptions_JSType["JS_NORMAL"] = 0] = "JS_NORMAL";
+    /** JS_STRING - Use JavaScript strings. */
+    FieldOptions_JSType[FieldOptions_JSType["JS_STRING"] = 1] = "JS_STRING";
+    /** JS_NUMBER - Use JavaScript numbers. */
+    FieldOptions_JSType[FieldOptions_JSType["JS_NUMBER"] = 2] = "JS_NUMBER";
+})(FieldOptions_JSType || (exports.FieldOptions_JSType = FieldOptions_JSType = {}));
+function fieldOptions_JSTypeFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "JS_NORMAL":
+            return FieldOptions_JSType.JS_NORMAL;
+        case 1:
+        case "JS_STRING":
+            return FieldOptions_JSType.JS_STRING;
+        case 2:
+        case "JS_NUMBER":
+            return FieldOptions_JSType.JS_NUMBER;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
+    }
+}
+function fieldOptions_JSTypeToJSON(object) {
+    switch (object) {
+        case FieldOptions_JSType.JS_NORMAL:
+            return "JS_NORMAL";
+        case FieldOptions_JSType.JS_STRING:
+            return "JS_STRING";
+        case FieldOptions_JSType.JS_NUMBER:
+            return "JS_NUMBER";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
+    }
+}
+/** If set to RETENTION_SOURCE, the option will be omitted from the binary. */
+var FieldOptions_OptionRetention;
+(function (FieldOptions_OptionRetention) {
+    FieldOptions_OptionRetention[FieldOptions_OptionRetention["RETENTION_UNKNOWN"] = 0] = "RETENTION_UNKNOWN";
+    FieldOptions_OptionRetention[FieldOptions_OptionRetention["RETENTION_RUNTIME"] = 1] = "RETENTION_RUNTIME";
+    FieldOptions_OptionRetention[FieldOptions_OptionRetention["RETENTION_SOURCE"] = 2] = "RETENTION_SOURCE";
+})(FieldOptions_OptionRetention || (exports.FieldOptions_OptionRetention = FieldOptions_OptionRetention = {}));
+function fieldOptions_OptionRetentionFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "RETENTION_UNKNOWN":
+            return FieldOptions_OptionRetention.RETENTION_UNKNOWN;
+        case 1:
+        case "RETENTION_RUNTIME":
+            return FieldOptions_OptionRetention.RETENTION_RUNTIME;
+        case 2:
+        case "RETENTION_SOURCE":
+            return FieldOptions_OptionRetention.RETENTION_SOURCE;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionRetention");
+    }
+}
+function fieldOptions_OptionRetentionToJSON(object) {
+    switch (object) {
+        case FieldOptions_OptionRetention.RETENTION_UNKNOWN:
+            return "RETENTION_UNKNOWN";
+        case FieldOptions_OptionRetention.RETENTION_RUNTIME:
+            return "RETENTION_RUNTIME";
+        case FieldOptions_OptionRetention.RETENTION_SOURCE:
+            return "RETENTION_SOURCE";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionRetention");
+    }
+}
+/**
+ * This indicates the types of entities that the field may apply to when used
+ * as an option. If it is unset, then the field may be freely used as an
+ * option on any kind of entity.
+ */
+var FieldOptions_OptionTargetType;
+(function (FieldOptions_OptionTargetType) {
+    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_UNKNOWN"] = 0] = "TARGET_TYPE_UNKNOWN";
+    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_FILE"] = 1] = "TARGET_TYPE_FILE";
+    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_EXTENSION_RANGE"] = 2] = "TARGET_TYPE_EXTENSION_RANGE";
+    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_MESSAGE"] = 3] = "TARGET_TYPE_MESSAGE";
+    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_FIELD"] = 4] = "TARGET_TYPE_FIELD";
+    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_ONEOF"] = 5] = "TARGET_TYPE_ONEOF";
+    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_ENUM"] = 6] = "TARGET_TYPE_ENUM";
+    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_ENUM_ENTRY"] = 7] = "TARGET_TYPE_ENUM_ENTRY";
+    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_SERVICE"] = 8] = "TARGET_TYPE_SERVICE";
+    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_METHOD"] = 9] = "TARGET_TYPE_METHOD";
+})(FieldOptions_OptionTargetType || (exports.FieldOptions_OptionTargetType = FieldOptions_OptionTargetType = {}));
+function fieldOptions_OptionTargetTypeFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "TARGET_TYPE_UNKNOWN":
+            return FieldOptions_OptionTargetType.TARGET_TYPE_UNKNOWN;
+        case 1:
+        case "TARGET_TYPE_FILE":
+            return FieldOptions_OptionTargetType.TARGET_TYPE_FILE;
+        case 2:
+        case "TARGET_TYPE_EXTENSION_RANGE":
+            return FieldOptions_OptionTargetType.TARGET_TYPE_EXTENSION_RANGE;
+        case 3:
+        case "TARGET_TYPE_MESSAGE":
+            return FieldOptions_OptionTargetType.TARGET_TYPE_MESSAGE;
+        case 4:
+        case "TARGET_TYPE_FIELD":
+            return FieldOptions_OptionTargetType.TARGET_TYPE_FIELD;
+        case 5:
+        case "TARGET_TYPE_ONEOF":
+            return FieldOptions_OptionTargetType.TARGET_TYPE_ONEOF;
+        case 6:
+        case "TARGET_TYPE_ENUM":
+            return FieldOptions_OptionTargetType.TARGET_TYPE_ENUM;
+        case 7:
+        case "TARGET_TYPE_ENUM_ENTRY":
+            return FieldOptions_OptionTargetType.TARGET_TYPE_ENUM_ENTRY;
+        case 8:
+        case "TARGET_TYPE_SERVICE":
+            return FieldOptions_OptionTargetType.TARGET_TYPE_SERVICE;
+        case 9:
+        case "TARGET_TYPE_METHOD":
+            return FieldOptions_OptionTargetType.TARGET_TYPE_METHOD;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionTargetType");
+    }
+}
+function fieldOptions_OptionTargetTypeToJSON(object) {
+    switch (object) {
+        case FieldOptions_OptionTargetType.TARGET_TYPE_UNKNOWN:
+            return "TARGET_TYPE_UNKNOWN";
+        case FieldOptions_OptionTargetType.TARGET_TYPE_FILE:
+            return "TARGET_TYPE_FILE";
+        case FieldOptions_OptionTargetType.TARGET_TYPE_EXTENSION_RANGE:
+            return "TARGET_TYPE_EXTENSION_RANGE";
+        case FieldOptions_OptionTargetType.TARGET_TYPE_MESSAGE:
+            return "TARGET_TYPE_MESSAGE";
+        case FieldOptions_OptionTargetType.TARGET_TYPE_FIELD:
+            return "TARGET_TYPE_FIELD";
+        case FieldOptions_OptionTargetType.TARGET_TYPE_ONEOF:
+            return "TARGET_TYPE_ONEOF";
+        case FieldOptions_OptionTargetType.TARGET_TYPE_ENUM:
+            return "TARGET_TYPE_ENUM";
+        case FieldOptions_OptionTargetType.TARGET_TYPE_ENUM_ENTRY:
+            return "TARGET_TYPE_ENUM_ENTRY";
+        case FieldOptions_OptionTargetType.TARGET_TYPE_SERVICE:
+            return "TARGET_TYPE_SERVICE";
+        case FieldOptions_OptionTargetType.TARGET_TYPE_METHOD:
+            return "TARGET_TYPE_METHOD";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionTargetType");
+    }
+}
+/**
+ * Is this method side-effect-free (or safe in HTTP parlance), or idempotent,
+ * or neither? HTTP based RPC implementation may choose GET verb for safe
+ * methods, and PUT verb for idempotent methods instead of the default POST.
+ */
+var MethodOptions_IdempotencyLevel;
+(function (MethodOptions_IdempotencyLevel) {
+    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENCY_UNKNOWN"] = 0] = "IDEMPOTENCY_UNKNOWN";
+    /** NO_SIDE_EFFECTS - implies idempotent */
+    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["NO_SIDE_EFFECTS"] = 1] = "NO_SIDE_EFFECTS";
+    /** IDEMPOTENT - idempotent, but may have side effects */
+    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENT"] = 2] = "IDEMPOTENT";
+})(MethodOptions_IdempotencyLevel || (exports.MethodOptions_IdempotencyLevel = MethodOptions_IdempotencyLevel = {}));
+function methodOptions_IdempotencyLevelFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "IDEMPOTENCY_UNKNOWN":
+            return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN;
+        case 1:
+        case "NO_SIDE_EFFECTS":
+            return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS;
+        case 2:
+        case "IDEMPOTENT":
+            return MethodOptions_IdempotencyLevel.IDEMPOTENT;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
+    }
+}
+function methodOptions_IdempotencyLevelToJSON(object) {
+    switch (object) {
+        case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN:
+            return "IDEMPOTENCY_UNKNOWN";
+        case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS:
+            return "NO_SIDE_EFFECTS";
+        case MethodOptions_IdempotencyLevel.IDEMPOTENT:
+            return "IDEMPOTENT";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
+    }
+}
+var FeatureSet_FieldPresence;
+(function (FeatureSet_FieldPresence) {
+    FeatureSet_FieldPresence[FeatureSet_FieldPresence["FIELD_PRESENCE_UNKNOWN"] = 0] = "FIELD_PRESENCE_UNKNOWN";
+    FeatureSet_FieldPresence[FeatureSet_FieldPresence["EXPLICIT"] = 1] = "EXPLICIT";
+    FeatureSet_FieldPresence[FeatureSet_FieldPresence["IMPLICIT"] = 2] = "IMPLICIT";
+    FeatureSet_FieldPresence[FeatureSet_FieldPresence["LEGACY_REQUIRED"] = 3] = "LEGACY_REQUIRED";
+})(FeatureSet_FieldPresence || (exports.FeatureSet_FieldPresence = FeatureSet_FieldPresence = {}));
+function featureSet_FieldPresenceFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "FIELD_PRESENCE_UNKNOWN":
+            return FeatureSet_FieldPresence.FIELD_PRESENCE_UNKNOWN;
+        case 1:
+        case "EXPLICIT":
+            return FeatureSet_FieldPresence.EXPLICIT;
+        case 2:
+        case "IMPLICIT":
+            return FeatureSet_FieldPresence.IMPLICIT;
+        case 3:
+        case "LEGACY_REQUIRED":
+            return FeatureSet_FieldPresence.LEGACY_REQUIRED;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_FieldPresence");
+    }
+}
+function featureSet_FieldPresenceToJSON(object) {
+    switch (object) {
+        case FeatureSet_FieldPresence.FIELD_PRESENCE_UNKNOWN:
+            return "FIELD_PRESENCE_UNKNOWN";
+        case FeatureSet_FieldPresence.EXPLICIT:
+            return "EXPLICIT";
+        case FeatureSet_FieldPresence.IMPLICIT:
+            return "IMPLICIT";
+        case FeatureSet_FieldPresence.LEGACY_REQUIRED:
+            return "LEGACY_REQUIRED";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_FieldPresence");
+    }
+}
+var FeatureSet_EnumType;
+(function (FeatureSet_EnumType) {
+    FeatureSet_EnumType[FeatureSet_EnumType["ENUM_TYPE_UNKNOWN"] = 0] = "ENUM_TYPE_UNKNOWN";
+    FeatureSet_EnumType[FeatureSet_EnumType["OPEN"] = 1] = "OPEN";
+    FeatureSet_EnumType[FeatureSet_EnumType["CLOSED"] = 2] = "CLOSED";
+})(FeatureSet_EnumType || (exports.FeatureSet_EnumType = FeatureSet_EnumType = {}));
+function featureSet_EnumTypeFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "ENUM_TYPE_UNKNOWN":
+            return FeatureSet_EnumType.ENUM_TYPE_UNKNOWN;
+        case 1:
+        case "OPEN":
+            return FeatureSet_EnumType.OPEN;
+        case 2:
+        case "CLOSED":
+            return FeatureSet_EnumType.CLOSED;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnumType");
+    }
+}
+function featureSet_EnumTypeToJSON(object) {
+    switch (object) {
+        case FeatureSet_EnumType.ENUM_TYPE_UNKNOWN:
+            return "ENUM_TYPE_UNKNOWN";
+        case FeatureSet_EnumType.OPEN:
+            return "OPEN";
+        case FeatureSet_EnumType.CLOSED:
+            return "CLOSED";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnumType");
+    }
+}
+var FeatureSet_RepeatedFieldEncoding;
+(function (FeatureSet_RepeatedFieldEncoding) {
+    FeatureSet_RepeatedFieldEncoding[FeatureSet_RepeatedFieldEncoding["REPEATED_FIELD_ENCODING_UNKNOWN"] = 0] = "REPEATED_FIELD_ENCODING_UNKNOWN";
+    FeatureSet_RepeatedFieldEncoding[FeatureSet_RepeatedFieldEncoding["PACKED"] = 1] = "PACKED";
+    FeatureSet_RepeatedFieldEncoding[FeatureSet_RepeatedFieldEncoding["EXPANDED"] = 2] = "EXPANDED";
+})(FeatureSet_RepeatedFieldEncoding || (exports.FeatureSet_RepeatedFieldEncoding = FeatureSet_RepeatedFieldEncoding = {}));
+function featureSet_RepeatedFieldEncodingFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "REPEATED_FIELD_ENCODING_UNKNOWN":
+            return FeatureSet_RepeatedFieldEncoding.REPEATED_FIELD_ENCODING_UNKNOWN;
+        case 1:
+        case "PACKED":
+            return FeatureSet_RepeatedFieldEncoding.PACKED;
+        case 2:
+        case "EXPANDED":
+            return FeatureSet_RepeatedFieldEncoding.EXPANDED;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_RepeatedFieldEncoding");
+    }
+}
+function featureSet_RepeatedFieldEncodingToJSON(object) {
+    switch (object) {
+        case FeatureSet_RepeatedFieldEncoding.REPEATED_FIELD_ENCODING_UNKNOWN:
+            return "REPEATED_FIELD_ENCODING_UNKNOWN";
+        case FeatureSet_RepeatedFieldEncoding.PACKED:
+            return "PACKED";
+        case FeatureSet_RepeatedFieldEncoding.EXPANDED:
+            return "EXPANDED";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_RepeatedFieldEncoding");
+    }
+}
+var FeatureSet_Utf8Validation;
+(function (FeatureSet_Utf8Validation) {
+    FeatureSet_Utf8Validation[FeatureSet_Utf8Validation["UTF8_VALIDATION_UNKNOWN"] = 0] = "UTF8_VALIDATION_UNKNOWN";
+    FeatureSet_Utf8Validation[FeatureSet_Utf8Validation["VERIFY"] = 2] = "VERIFY";
+    FeatureSet_Utf8Validation[FeatureSet_Utf8Validation["NONE"] = 3] = "NONE";
+})(FeatureSet_Utf8Validation || (exports.FeatureSet_Utf8Validation = FeatureSet_Utf8Validation = {}));
+function featureSet_Utf8ValidationFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "UTF8_VALIDATION_UNKNOWN":
+            return FeatureSet_Utf8Validation.UTF8_VALIDATION_UNKNOWN;
+        case 2:
+        case "VERIFY":
+            return FeatureSet_Utf8Validation.VERIFY;
+        case 3:
+        case "NONE":
+            return FeatureSet_Utf8Validation.NONE;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_Utf8Validation");
+    }
+}
+function featureSet_Utf8ValidationToJSON(object) {
+    switch (object) {
+        case FeatureSet_Utf8Validation.UTF8_VALIDATION_UNKNOWN:
+            return "UTF8_VALIDATION_UNKNOWN";
+        case FeatureSet_Utf8Validation.VERIFY:
+            return "VERIFY";
+        case FeatureSet_Utf8Validation.NONE:
+            return "NONE";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_Utf8Validation");
+    }
+}
+var FeatureSet_MessageEncoding;
+(function (FeatureSet_MessageEncoding) {
+    FeatureSet_MessageEncoding[FeatureSet_MessageEncoding["MESSAGE_ENCODING_UNKNOWN"] = 0] = "MESSAGE_ENCODING_UNKNOWN";
+    FeatureSet_MessageEncoding[FeatureSet_MessageEncoding["LENGTH_PREFIXED"] = 1] = "LENGTH_PREFIXED";
+    FeatureSet_MessageEncoding[FeatureSet_MessageEncoding["DELIMITED"] = 2] = "DELIMITED";
+})(FeatureSet_MessageEncoding || (exports.FeatureSet_MessageEncoding = FeatureSet_MessageEncoding = {}));
+function featureSet_MessageEncodingFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "MESSAGE_ENCODING_UNKNOWN":
+            return FeatureSet_MessageEncoding.MESSAGE_ENCODING_UNKNOWN;
+        case 1:
+        case "LENGTH_PREFIXED":
+            return FeatureSet_MessageEncoding.LENGTH_PREFIXED;
+        case 2:
+        case "DELIMITED":
+            return FeatureSet_MessageEncoding.DELIMITED;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_MessageEncoding");
+    }
+}
+function featureSet_MessageEncodingToJSON(object) {
+    switch (object) {
+        case FeatureSet_MessageEncoding.MESSAGE_ENCODING_UNKNOWN:
+            return "MESSAGE_ENCODING_UNKNOWN";
+        case FeatureSet_MessageEncoding.LENGTH_PREFIXED:
+            return "LENGTH_PREFIXED";
+        case FeatureSet_MessageEncoding.DELIMITED:
+            return "DELIMITED";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_MessageEncoding");
+    }
+}
+var FeatureSet_JsonFormat;
+(function (FeatureSet_JsonFormat) {
+    FeatureSet_JsonFormat[FeatureSet_JsonFormat["JSON_FORMAT_UNKNOWN"] = 0] = "JSON_FORMAT_UNKNOWN";
+    FeatureSet_JsonFormat[FeatureSet_JsonFormat["ALLOW"] = 1] = "ALLOW";
+    FeatureSet_JsonFormat[FeatureSet_JsonFormat["LEGACY_BEST_EFFORT"] = 2] = "LEGACY_BEST_EFFORT";
+})(FeatureSet_JsonFormat || (exports.FeatureSet_JsonFormat = FeatureSet_JsonFormat = {}));
+function featureSet_JsonFormatFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "JSON_FORMAT_UNKNOWN":
+            return FeatureSet_JsonFormat.JSON_FORMAT_UNKNOWN;
+        case 1:
+        case "ALLOW":
+            return FeatureSet_JsonFormat.ALLOW;
+        case 2:
+        case "LEGACY_BEST_EFFORT":
+            return FeatureSet_JsonFormat.LEGACY_BEST_EFFORT;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_JsonFormat");
+    }
+}
+function featureSet_JsonFormatToJSON(object) {
+    switch (object) {
+        case FeatureSet_JsonFormat.JSON_FORMAT_UNKNOWN:
+            return "JSON_FORMAT_UNKNOWN";
+        case FeatureSet_JsonFormat.ALLOW:
+            return "ALLOW";
+        case FeatureSet_JsonFormat.LEGACY_BEST_EFFORT:
+            return "LEGACY_BEST_EFFORT";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_JsonFormat");
+    }
+}
+var FeatureSet_EnforceNamingStyle;
+(function (FeatureSet_EnforceNamingStyle) {
+    FeatureSet_EnforceNamingStyle[FeatureSet_EnforceNamingStyle["ENFORCE_NAMING_STYLE_UNKNOWN"] = 0] = "ENFORCE_NAMING_STYLE_UNKNOWN";
+    FeatureSet_EnforceNamingStyle[FeatureSet_EnforceNamingStyle["STYLE2024"] = 1] = "STYLE2024";
+    FeatureSet_EnforceNamingStyle[FeatureSet_EnforceNamingStyle["STYLE_LEGACY"] = 2] = "STYLE_LEGACY";
+})(FeatureSet_EnforceNamingStyle || (exports.FeatureSet_EnforceNamingStyle = FeatureSet_EnforceNamingStyle = {}));
+function featureSet_EnforceNamingStyleFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "ENFORCE_NAMING_STYLE_UNKNOWN":
+            return FeatureSet_EnforceNamingStyle.ENFORCE_NAMING_STYLE_UNKNOWN;
+        case 1:
+        case "STYLE2024":
+            return FeatureSet_EnforceNamingStyle.STYLE2024;
+        case 2:
+        case "STYLE_LEGACY":
+            return FeatureSet_EnforceNamingStyle.STYLE_LEGACY;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnforceNamingStyle");
+    }
+}
+function featureSet_EnforceNamingStyleToJSON(object) {
+    switch (object) {
+        case FeatureSet_EnforceNamingStyle.ENFORCE_NAMING_STYLE_UNKNOWN:
+            return "ENFORCE_NAMING_STYLE_UNKNOWN";
+        case FeatureSet_EnforceNamingStyle.STYLE2024:
+            return "STYLE2024";
+        case FeatureSet_EnforceNamingStyle.STYLE_LEGACY:
+            return "STYLE_LEGACY";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnforceNamingStyle");
+    }
+}
+/**
+ * Represents the identified object's effect on the element in the original
+ * .proto file.
+ */
+var GeneratedCodeInfo_Annotation_Semantic;
+(function (GeneratedCodeInfo_Annotation_Semantic) {
+    /** NONE - There is no effect or the effect is indescribable. */
+    GeneratedCodeInfo_Annotation_Semantic[GeneratedCodeInfo_Annotation_Semantic["NONE"] = 0] = "NONE";
+    /** SET - The element is set or otherwise mutated. */
+    GeneratedCodeInfo_Annotation_Semantic[GeneratedCodeInfo_Annotation_Semantic["SET"] = 1] = "SET";
+    /** ALIAS - An alias to the element is returned. */
+    GeneratedCodeInfo_Annotation_Semantic[GeneratedCodeInfo_Annotation_Semantic["ALIAS"] = 2] = "ALIAS";
+})(GeneratedCodeInfo_Annotation_Semantic || (exports.GeneratedCodeInfo_Annotation_Semantic = GeneratedCodeInfo_Annotation_Semantic = {}));
+function generatedCodeInfo_Annotation_SemanticFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "NONE":
+            return GeneratedCodeInfo_Annotation_Semantic.NONE;
+        case 1:
+        case "SET":
+            return GeneratedCodeInfo_Annotation_Semantic.SET;
+        case 2:
+        case "ALIAS":
+            return GeneratedCodeInfo_Annotation_Semantic.ALIAS;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum GeneratedCodeInfo_Annotation_Semantic");
+    }
+}
+function generatedCodeInfo_Annotation_SemanticToJSON(object) {
+    switch (object) {
+        case GeneratedCodeInfo_Annotation_Semantic.NONE:
+            return "NONE";
+        case GeneratedCodeInfo_Annotation_Semantic.SET:
+            return "SET";
+        case GeneratedCodeInfo_Annotation_Semantic.ALIAS:
+            return "ALIAS";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum GeneratedCodeInfo_Annotation_Semantic");
+    }
+}
+exports.FileDescriptorSet = {
+    fromJSON(object) {
+        return {
+            file: globalThis.Array.isArray(object?.file) ? object.file.map((e) => exports.FileDescriptorProto.fromJSON(e)) : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.file?.length) {
+            obj.file = message.file.map((e) => exports.FileDescriptorProto.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.FileDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? globalThis.String(object.name) : "",
+            package: isSet(object.package) ? globalThis.String(object.package) : "",
+            dependency: globalThis.Array.isArray(object?.dependency)
+                ? object.dependency.map((e) => globalThis.String(e))
+                : [],
+            publicDependency: globalThis.Array.isArray(object?.publicDependency)
+                ? object.publicDependency.map((e) => globalThis.Number(e))
+                : [],
+            weakDependency: globalThis.Array.isArray(object?.weakDependency)
+                ? object.weakDependency.map((e) => globalThis.Number(e))
+                : [],
+            messageType: globalThis.Array.isArray(object?.messageType)
+                ? object.messageType.map((e) => exports.DescriptorProto.fromJSON(e))
+                : [],
+            enumType: globalThis.Array.isArray(object?.enumType)
+                ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e))
+                : [],
+            service: globalThis.Array.isArray(object?.service)
+                ? object.service.map((e) => exports.ServiceDescriptorProto.fromJSON(e))
+                : [],
+            extension: globalThis.Array.isArray(object?.extension)
+                ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e))
+                : [],
+            options: isSet(object.options) ? exports.FileOptions.fromJSON(object.options) : undefined,
+            sourceCodeInfo: isSet(object.sourceCodeInfo) ? exports.SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined,
+            syntax: isSet(object.syntax) ? globalThis.String(object.syntax) : "",
+            edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name !== undefined && message.name !== "") {
+            obj.name = message.name;
+        }
+        if (message.package !== undefined && message.package !== "") {
+            obj.package = message.package;
+        }
+        if (message.dependency?.length) {
+            obj.dependency = message.dependency;
+        }
+        if (message.publicDependency?.length) {
+            obj.publicDependency = message.publicDependency.map((e) => Math.round(e));
+        }
+        if (message.weakDependency?.length) {
+            obj.weakDependency = message.weakDependency.map((e) => Math.round(e));
+        }
+        if (message.messageType?.length) {
+            obj.messageType = message.messageType.map((e) => exports.DescriptorProto.toJSON(e));
+        }
+        if (message.enumType?.length) {
+            obj.enumType = message.enumType.map((e) => exports.EnumDescriptorProto.toJSON(e));
+        }
+        if (message.service?.length) {
+            obj.service = message.service.map((e) => exports.ServiceDescriptorProto.toJSON(e));
+        }
+        if (message.extension?.length) {
+            obj.extension = message.extension.map((e) => exports.FieldDescriptorProto.toJSON(e));
+        }
+        if (message.options !== undefined) {
+            obj.options = exports.FileOptions.toJSON(message.options);
+        }
+        if (message.sourceCodeInfo !== undefined) {
+            obj.sourceCodeInfo = exports.SourceCodeInfo.toJSON(message.sourceCodeInfo);
+        }
+        if (message.syntax !== undefined && message.syntax !== "") {
+            obj.syntax = message.syntax;
+        }
+        if (message.edition !== undefined && message.edition !== 0) {
+            obj.edition = editionToJSON(message.edition);
+        }
+        return obj;
+    },
+};
+exports.DescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? globalThis.String(object.name) : "",
+            field: globalThis.Array.isArray(object?.field)
+                ? object.field.map((e) => exports.FieldDescriptorProto.fromJSON(e))
+                : [],
+            extension: globalThis.Array.isArray(object?.extension)
+                ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e))
+                : [],
+            nestedType: globalThis.Array.isArray(object?.nestedType)
+                ? object.nestedType.map((e) => exports.DescriptorProto.fromJSON(e))
+                : [],
+            enumType: globalThis.Array.isArray(object?.enumType)
+                ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e))
+                : [],
+            extensionRange: globalThis.Array.isArray(object?.extensionRange)
+                ? object.extensionRange.map((e) => exports.DescriptorProto_ExtensionRange.fromJSON(e))
+                : [],
+            oneofDecl: globalThis.Array.isArray(object?.oneofDecl)
+                ? object.oneofDecl.map((e) => exports.OneofDescriptorProto.fromJSON(e))
+                : [],
+            options: isSet(object.options) ? exports.MessageOptions.fromJSON(object.options) : undefined,
+            reservedRange: globalThis.Array.isArray(object?.reservedRange)
+                ? object.reservedRange.map((e) => exports.DescriptorProto_ReservedRange.fromJSON(e))
+                : [],
+            reservedName: globalThis.Array.isArray(object?.reservedName)
+                ? object.reservedName.map((e) => globalThis.String(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name !== undefined && message.name !== "") {
+            obj.name = message.name;
+        }
+        if (message.field?.length) {
+            obj.field = message.field.map((e) => exports.FieldDescriptorProto.toJSON(e));
+        }
+        if (message.extension?.length) {
+            obj.extension = message.extension.map((e) => exports.FieldDescriptorProto.toJSON(e));
+        }
+        if (message.nestedType?.length) {
+            obj.nestedType = message.nestedType.map((e) => exports.DescriptorProto.toJSON(e));
+        }
+        if (message.enumType?.length) {
+            obj.enumType = message.enumType.map((e) => exports.EnumDescriptorProto.toJSON(e));
+        }
+        if (message.extensionRange?.length) {
+            obj.extensionRange = message.extensionRange.map((e) => exports.DescriptorProto_ExtensionRange.toJSON(e));
+        }
+        if (message.oneofDecl?.length) {
+            obj.oneofDecl = message.oneofDecl.map((e) => exports.OneofDescriptorProto.toJSON(e));
+        }
+        if (message.options !== undefined) {
+            obj.options = exports.MessageOptions.toJSON(message.options);
+        }
+        if (message.reservedRange?.length) {
+            obj.reservedRange = message.reservedRange.map((e) => exports.DescriptorProto_ReservedRange.toJSON(e));
+        }
+        if (message.reservedName?.length) {
+            obj.reservedName = message.reservedName;
+        }
+        return obj;
+    },
+};
+exports.DescriptorProto_ExtensionRange = {
+    fromJSON(object) {
+        return {
+            start: isSet(object.start) ? globalThis.Number(object.start) : 0,
+            end: isSet(object.end) ? globalThis.Number(object.end) : 0,
+            options: isSet(object.options) ? exports.ExtensionRangeOptions.fromJSON(object.options) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.start !== undefined && message.start !== 0) {
+            obj.start = Math.round(message.start);
+        }
+        if (message.end !== undefined && message.end !== 0) {
+            obj.end = Math.round(message.end);
+        }
+        if (message.options !== undefined) {
+            obj.options = exports.ExtensionRangeOptions.toJSON(message.options);
+        }
+        return obj;
+    },
+};
+exports.DescriptorProto_ReservedRange = {
+    fromJSON(object) {
+        return {
+            start: isSet(object.start) ? globalThis.Number(object.start) : 0,
+            end: isSet(object.end) ? globalThis.Number(object.end) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.start !== undefined && message.start !== 0) {
+            obj.start = Math.round(message.start);
+        }
+        if (message.end !== undefined && message.end !== 0) {
+            obj.end = Math.round(message.end);
+        }
+        return obj;
+    },
+};
+exports.ExtensionRangeOptions = {
+    fromJSON(object) {
+        return {
+            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+            declaration: globalThis.Array.isArray(object?.declaration)
+                ? object.declaration.map((e) => exports.ExtensionRangeOptions_Declaration.fromJSON(e))
+                : [],
+            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+            verification: isSet(object.verification)
+                ? extensionRangeOptions_VerificationStateFromJSON(object.verification)
+                : 1,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.uninterpretedOption?.length) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
+        }
+        if (message.declaration?.length) {
+            obj.declaration = message.declaration.map((e) => exports.ExtensionRangeOptions_Declaration.toJSON(e));
+        }
+        if (message.features !== undefined) {
+            obj.features = exports.FeatureSet.toJSON(message.features);
+        }
+        if (message.verification !== undefined && message.verification !== 1) {
+            obj.verification = extensionRangeOptions_VerificationStateToJSON(message.verification);
+        }
+        return obj;
+    },
+};
+exports.ExtensionRangeOptions_Declaration = {
+    fromJSON(object) {
+        return {
+            number: isSet(object.number) ? globalThis.Number(object.number) : 0,
+            fullName: isSet(object.fullName) ? globalThis.String(object.fullName) : "",
+            type: isSet(object.type) ? globalThis.String(object.type) : "",
+            reserved: isSet(object.reserved) ? globalThis.Boolean(object.reserved) : false,
+            repeated: isSet(object.repeated) ? globalThis.Boolean(object.repeated) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.number !== undefined && message.number !== 0) {
+            obj.number = Math.round(message.number);
+        }
+        if (message.fullName !== undefined && message.fullName !== "") {
+            obj.fullName = message.fullName;
+        }
+        if (message.type !== undefined && message.type !== "") {
+            obj.type = message.type;
+        }
+        if (message.reserved !== undefined && message.reserved !== false) {
+            obj.reserved = message.reserved;
+        }
+        if (message.repeated !== undefined && message.repeated !== false) {
+            obj.repeated = message.repeated;
+        }
+        return obj;
+    },
+};
+exports.FieldDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? globalThis.String(object.name) : "",
+            number: isSet(object.number) ? globalThis.Number(object.number) : 0,
+            label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1,
+            type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1,
+            typeName: isSet(object.typeName) ? globalThis.String(object.typeName) : "",
+            extendee: isSet(object.extendee) ? globalThis.String(object.extendee) : "",
+            defaultValue: isSet(object.defaultValue) ? globalThis.String(object.defaultValue) : "",
+            oneofIndex: isSet(object.oneofIndex) ? globalThis.Number(object.oneofIndex) : 0,
+            jsonName: isSet(object.jsonName) ? globalThis.String(object.jsonName) : "",
+            options: isSet(object.options) ? exports.FieldOptions.fromJSON(object.options) : undefined,
+            proto3Optional: isSet(object.proto3Optional) ? globalThis.Boolean(object.proto3Optional) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name !== undefined && message.name !== "") {
+            obj.name = message.name;
+        }
+        if (message.number !== undefined && message.number !== 0) {
+            obj.number = Math.round(message.number);
+        }
+        if (message.label !== undefined && message.label !== 1) {
+            obj.label = fieldDescriptorProto_LabelToJSON(message.label);
+        }
+        if (message.type !== undefined && message.type !== 1) {
+            obj.type = fieldDescriptorProto_TypeToJSON(message.type);
+        }
+        if (message.typeName !== undefined && message.typeName !== "") {
+            obj.typeName = message.typeName;
+        }
+        if (message.extendee !== undefined && message.extendee !== "") {
+            obj.extendee = message.extendee;
+        }
+        if (message.defaultValue !== undefined && message.defaultValue !== "") {
+            obj.defaultValue = message.defaultValue;
+        }
+        if (message.oneofIndex !== undefined && message.oneofIndex !== 0) {
+            obj.oneofIndex = Math.round(message.oneofIndex);
+        }
+        if (message.jsonName !== undefined && message.jsonName !== "") {
+            obj.jsonName = message.jsonName;
+        }
+        if (message.options !== undefined) {
+            obj.options = exports.FieldOptions.toJSON(message.options);
+        }
+        if (message.proto3Optional !== undefined && message.proto3Optional !== false) {
+            obj.proto3Optional = message.proto3Optional;
+        }
+        return obj;
+    },
+};
+exports.OneofDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? globalThis.String(object.name) : "",
+            options: isSet(object.options) ? exports.OneofOptions.fromJSON(object.options) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name !== undefined && message.name !== "") {
+            obj.name = message.name;
+        }
+        if (message.options !== undefined) {
+            obj.options = exports.OneofOptions.toJSON(message.options);
+        }
+        return obj;
+    },
+};
+exports.EnumDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? globalThis.String(object.name) : "",
+            value: globalThis.Array.isArray(object?.value)
+                ? object.value.map((e) => exports.EnumValueDescriptorProto.fromJSON(e))
+                : [],
+            options: isSet(object.options) ? exports.EnumOptions.fromJSON(object.options) : undefined,
+            reservedRange: globalThis.Array.isArray(object?.reservedRange)
+                ? object.reservedRange.map((e) => exports.EnumDescriptorProto_EnumReservedRange.fromJSON(e))
+                : [],
+            reservedName: globalThis.Array.isArray(object?.reservedName)
+                ? object.reservedName.map((e) => globalThis.String(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name !== undefined && message.name !== "") {
+            obj.name = message.name;
+        }
+        if (message.value?.length) {
+            obj.value = message.value.map((e) => exports.EnumValueDescriptorProto.toJSON(e));
+        }
+        if (message.options !== undefined) {
+            obj.options = exports.EnumOptions.toJSON(message.options);
+        }
+        if (message.reservedRange?.length) {
+            obj.reservedRange = message.reservedRange.map((e) => exports.EnumDescriptorProto_EnumReservedRange.toJSON(e));
+        }
+        if (message.reservedName?.length) {
+            obj.reservedName = message.reservedName;
+        }
+        return obj;
+    },
+};
+exports.EnumDescriptorProto_EnumReservedRange = {
+    fromJSON(object) {
+        return {
+            start: isSet(object.start) ? globalThis.Number(object.start) : 0,
+            end: isSet(object.end) ? globalThis.Number(object.end) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.start !== undefined && message.start !== 0) {
+            obj.start = Math.round(message.start);
+        }
+        if (message.end !== undefined && message.end !== 0) {
+            obj.end = Math.round(message.end);
+        }
+        return obj;
+    },
+};
+exports.EnumValueDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? globalThis.String(object.name) : "",
+            number: isSet(object.number) ? globalThis.Number(object.number) : 0,
+            options: isSet(object.options) ? exports.EnumValueOptions.fromJSON(object.options) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name !== undefined && message.name !== "") {
+            obj.name = message.name;
+        }
+        if (message.number !== undefined && message.number !== 0) {
+            obj.number = Math.round(message.number);
+        }
+        if (message.options !== undefined) {
+            obj.options = exports.EnumValueOptions.toJSON(message.options);
+        }
+        return obj;
+    },
+};
+exports.ServiceDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? globalThis.String(object.name) : "",
+            method: globalThis.Array.isArray(object?.method)
+                ? object.method.map((e) => exports.MethodDescriptorProto.fromJSON(e))
+                : [],
+            options: isSet(object.options) ? exports.ServiceOptions.fromJSON(object.options) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name !== undefined && message.name !== "") {
+            obj.name = message.name;
+        }
+        if (message.method?.length) {
+            obj.method = message.method.map((e) => exports.MethodDescriptorProto.toJSON(e));
+        }
+        if (message.options !== undefined) {
+            obj.options = exports.ServiceOptions.toJSON(message.options);
+        }
+        return obj;
+    },
+};
+exports.MethodDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? globalThis.String(object.name) : "",
+            inputType: isSet(object.inputType) ? globalThis.String(object.inputType) : "",
+            outputType: isSet(object.outputType) ? globalThis.String(object.outputType) : "",
+            options: isSet(object.options) ? exports.MethodOptions.fromJSON(object.options) : undefined,
+            clientStreaming: isSet(object.clientStreaming) ? globalThis.Boolean(object.clientStreaming) : false,
+            serverStreaming: isSet(object.serverStreaming) ? globalThis.Boolean(object.serverStreaming) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name !== undefined && message.name !== "") {
+            obj.name = message.name;
+        }
+        if (message.inputType !== undefined && message.inputType !== "") {
+            obj.inputType = message.inputType;
+        }
+        if (message.outputType !== undefined && message.outputType !== "") {
+            obj.outputType = message.outputType;
+        }
+        if (message.options !== undefined) {
+            obj.options = exports.MethodOptions.toJSON(message.options);
+        }
+        if (message.clientStreaming !== undefined && message.clientStreaming !== false) {
+            obj.clientStreaming = message.clientStreaming;
+        }
+        if (message.serverStreaming !== undefined && message.serverStreaming !== false) {
+            obj.serverStreaming = message.serverStreaming;
+        }
+        return obj;
+    },
+};
+exports.FileOptions = {
+    fromJSON(object) {
+        return {
+            javaPackage: isSet(object.javaPackage) ? globalThis.String(object.javaPackage) : "",
+            javaOuterClassname: isSet(object.javaOuterClassname) ? globalThis.String(object.javaOuterClassname) : "",
+            javaMultipleFiles: isSet(object.javaMultipleFiles) ? globalThis.Boolean(object.javaMultipleFiles) : false,
+            javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash)
+                ? globalThis.Boolean(object.javaGenerateEqualsAndHash)
+                : false,
+            javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? globalThis.Boolean(object.javaStringCheckUtf8) : false,
+            optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1,
+            goPackage: isSet(object.goPackage) ? globalThis.String(object.goPackage) : "",
+            ccGenericServices: isSet(object.ccGenericServices) ? globalThis.Boolean(object.ccGenericServices) : false,
+            javaGenericServices: isSet(object.javaGenericServices) ? globalThis.Boolean(object.javaGenericServices) : false,
+            pyGenericServices: isSet(object.pyGenericServices) ? globalThis.Boolean(object.pyGenericServices) : false,
+            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
+            ccEnableArenas: isSet(object.ccEnableArenas) ? globalThis.Boolean(object.ccEnableArenas) : true,
+            objcClassPrefix: isSet(object.objcClassPrefix) ? globalThis.String(object.objcClassPrefix) : "",
+            csharpNamespace: isSet(object.csharpNamespace) ? globalThis.String(object.csharpNamespace) : "",
+            swiftPrefix: isSet(object.swiftPrefix) ? globalThis.String(object.swiftPrefix) : "",
+            phpClassPrefix: isSet(object.phpClassPrefix) ? globalThis.String(object.phpClassPrefix) : "",
+            phpNamespace: isSet(object.phpNamespace) ? globalThis.String(object.phpNamespace) : "",
+            phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? globalThis.String(object.phpMetadataNamespace) : "",
+            rubyPackage: isSet(object.rubyPackage) ? globalThis.String(object.rubyPackage) : "",
+            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.javaPackage !== undefined && message.javaPackage !== "") {
+            obj.javaPackage = message.javaPackage;
+        }
+        if (message.javaOuterClassname !== undefined && message.javaOuterClassname !== "") {
+            obj.javaOuterClassname = message.javaOuterClassname;
+        }
+        if (message.javaMultipleFiles !== undefined && message.javaMultipleFiles !== false) {
+            obj.javaMultipleFiles = message.javaMultipleFiles;
+        }
+        if (message.javaGenerateEqualsAndHash !== undefined && message.javaGenerateEqualsAndHash !== false) {
+            obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash;
+        }
+        if (message.javaStringCheckUtf8 !== undefined && message.javaStringCheckUtf8 !== false) {
+            obj.javaStringCheckUtf8 = message.javaStringCheckUtf8;
+        }
+        if (message.optimizeFor !== undefined && message.optimizeFor !== 1) {
+            obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor);
+        }
+        if (message.goPackage !== undefined && message.goPackage !== "") {
+            obj.goPackage = message.goPackage;
+        }
+        if (message.ccGenericServices !== undefined && message.ccGenericServices !== false) {
+            obj.ccGenericServices = message.ccGenericServices;
+        }
+        if (message.javaGenericServices !== undefined && message.javaGenericServices !== false) {
+            obj.javaGenericServices = message.javaGenericServices;
+        }
+        if (message.pyGenericServices !== undefined && message.pyGenericServices !== false) {
+            obj.pyGenericServices = message.pyGenericServices;
+        }
+        if (message.deprecated !== undefined && message.deprecated !== false) {
+            obj.deprecated = message.deprecated;
+        }
+        if (message.ccEnableArenas !== undefined && message.ccEnableArenas !== true) {
+            obj.ccEnableArenas = message.ccEnableArenas;
+        }
+        if (message.objcClassPrefix !== undefined && message.objcClassPrefix !== "") {
+            obj.objcClassPrefix = message.objcClassPrefix;
+        }
+        if (message.csharpNamespace !== undefined && message.csharpNamespace !== "") {
+            obj.csharpNamespace = message.csharpNamespace;
+        }
+        if (message.swiftPrefix !== undefined && message.swiftPrefix !== "") {
+            obj.swiftPrefix = message.swiftPrefix;
+        }
+        if (message.phpClassPrefix !== undefined && message.phpClassPrefix !== "") {
+            obj.phpClassPrefix = message.phpClassPrefix;
+        }
+        if (message.phpNamespace !== undefined && message.phpNamespace !== "") {
+            obj.phpNamespace = message.phpNamespace;
+        }
+        if (message.phpMetadataNamespace !== undefined && message.phpMetadataNamespace !== "") {
+            obj.phpMetadataNamespace = message.phpMetadataNamespace;
+        }
+        if (message.rubyPackage !== undefined && message.rubyPackage !== "") {
+            obj.rubyPackage = message.rubyPackage;
+        }
+        if (message.features !== undefined) {
+            obj.features = exports.FeatureSet.toJSON(message.features);
+        }
+        if (message.uninterpretedOption?.length) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.MessageOptions = {
+    fromJSON(object) {
+        return {
+            messageSetWireFormat: isSet(object.messageSetWireFormat)
+                ? globalThis.Boolean(object.messageSetWireFormat)
+                : false,
+            noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor)
+                ? globalThis.Boolean(object.noStandardDescriptorAccessor)
+                : false,
+            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
+            mapEntry: isSet(object.mapEntry) ? globalThis.Boolean(object.mapEntry) : false,
+            deprecatedLegacyJsonFieldConflicts: isSet(object.deprecatedLegacyJsonFieldConflicts)
+                ? globalThis.Boolean(object.deprecatedLegacyJsonFieldConflicts)
+                : false,
+            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.messageSetWireFormat !== undefined && message.messageSetWireFormat !== false) {
+            obj.messageSetWireFormat = message.messageSetWireFormat;
+        }
+        if (message.noStandardDescriptorAccessor !== undefined && message.noStandardDescriptorAccessor !== false) {
+            obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor;
+        }
+        if (message.deprecated !== undefined && message.deprecated !== false) {
+            obj.deprecated = message.deprecated;
+        }
+        if (message.mapEntry !== undefined && message.mapEntry !== false) {
+            obj.mapEntry = message.mapEntry;
+        }
+        if (message.deprecatedLegacyJsonFieldConflicts !== undefined && message.deprecatedLegacyJsonFieldConflicts !== false) {
+            obj.deprecatedLegacyJsonFieldConflicts = message.deprecatedLegacyJsonFieldConflicts;
+        }
+        if (message.features !== undefined) {
+            obj.features = exports.FeatureSet.toJSON(message.features);
+        }
+        if (message.uninterpretedOption?.length) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.FieldOptions = {
+    fromJSON(object) {
+        return {
+            ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0,
+            packed: isSet(object.packed) ? globalThis.Boolean(object.packed) : false,
+            jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0,
+            lazy: isSet(object.lazy) ? globalThis.Boolean(object.lazy) : false,
+            unverifiedLazy: isSet(object.unverifiedLazy) ? globalThis.Boolean(object.unverifiedLazy) : false,
+            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
+            weak: isSet(object.weak) ? globalThis.Boolean(object.weak) : false,
+            debugRedact: isSet(object.debugRedact) ? globalThis.Boolean(object.debugRedact) : false,
+            retention: isSet(object.retention) ? fieldOptions_OptionRetentionFromJSON(object.retention) : 0,
+            targets: globalThis.Array.isArray(object?.targets)
+                ? object.targets.map((e) => fieldOptions_OptionTargetTypeFromJSON(e))
+                : [],
+            editionDefaults: globalThis.Array.isArray(object?.editionDefaults)
+                ? object.editionDefaults.map((e) => exports.FieldOptions_EditionDefault.fromJSON(e))
+                : [],
+            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+            featureSupport: isSet(object.featureSupport)
+                ? exports.FieldOptions_FeatureSupport.fromJSON(object.featureSupport)
+                : undefined,
+            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.ctype !== undefined && message.ctype !== 0) {
+            obj.ctype = fieldOptions_CTypeToJSON(message.ctype);
+        }
+        if (message.packed !== undefined && message.packed !== false) {
+            obj.packed = message.packed;
+        }
+        if (message.jstype !== undefined && message.jstype !== 0) {
+            obj.jstype = fieldOptions_JSTypeToJSON(message.jstype);
+        }
+        if (message.lazy !== undefined && message.lazy !== false) {
+            obj.lazy = message.lazy;
+        }
+        if (message.unverifiedLazy !== undefined && message.unverifiedLazy !== false) {
+            obj.unverifiedLazy = message.unverifiedLazy;
+        }
+        if (message.deprecated !== undefined && message.deprecated !== false) {
+            obj.deprecated = message.deprecated;
+        }
+        if (message.weak !== undefined && message.weak !== false) {
+            obj.weak = message.weak;
+        }
+        if (message.debugRedact !== undefined && message.debugRedact !== false) {
+            obj.debugRedact = message.debugRedact;
+        }
+        if (message.retention !== undefined && message.retention !== 0) {
+            obj.retention = fieldOptions_OptionRetentionToJSON(message.retention);
+        }
+        if (message.targets?.length) {
+            obj.targets = message.targets.map((e) => fieldOptions_OptionTargetTypeToJSON(e));
+        }
+        if (message.editionDefaults?.length) {
+            obj.editionDefaults = message.editionDefaults.map((e) => exports.FieldOptions_EditionDefault.toJSON(e));
+        }
+        if (message.features !== undefined) {
+            obj.features = exports.FeatureSet.toJSON(message.features);
+        }
+        if (message.featureSupport !== undefined) {
+            obj.featureSupport = exports.FieldOptions_FeatureSupport.toJSON(message.featureSupport);
+        }
+        if (message.uninterpretedOption?.length) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.FieldOptions_EditionDefault = {
+    fromJSON(object) {
+        return {
+            edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0,
+            value: isSet(object.value) ? globalThis.String(object.value) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.edition !== undefined && message.edition !== 0) {
+            obj.edition = editionToJSON(message.edition);
+        }
+        if (message.value !== undefined && message.value !== "") {
+            obj.value = message.value;
+        }
+        return obj;
+    },
+};
+exports.FieldOptions_FeatureSupport = {
+    fromJSON(object) {
+        return {
+            editionIntroduced: isSet(object.editionIntroduced) ? editionFromJSON(object.editionIntroduced) : 0,
+            editionDeprecated: isSet(object.editionDeprecated) ? editionFromJSON(object.editionDeprecated) : 0,
+            deprecationWarning: isSet(object.deprecationWarning) ? globalThis.String(object.deprecationWarning) : "",
+            editionRemoved: isSet(object.editionRemoved) ? editionFromJSON(object.editionRemoved) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.editionIntroduced !== undefined && message.editionIntroduced !== 0) {
+            obj.editionIntroduced = editionToJSON(message.editionIntroduced);
+        }
+        if (message.editionDeprecated !== undefined && message.editionDeprecated !== 0) {
+            obj.editionDeprecated = editionToJSON(message.editionDeprecated);
+        }
+        if (message.deprecationWarning !== undefined && message.deprecationWarning !== "") {
+            obj.deprecationWarning = message.deprecationWarning;
+        }
+        if (message.editionRemoved !== undefined && message.editionRemoved !== 0) {
+            obj.editionRemoved = editionToJSON(message.editionRemoved);
+        }
+        return obj;
+    },
+};
+exports.OneofOptions = {
+    fromJSON(object) {
+        return {
+            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.features !== undefined) {
+            obj.features = exports.FeatureSet.toJSON(message.features);
+        }
+        if (message.uninterpretedOption?.length) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.EnumOptions = {
+    fromJSON(object) {
+        return {
+            allowAlias: isSet(object.allowAlias) ? globalThis.Boolean(object.allowAlias) : false,
+            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
+            deprecatedLegacyJsonFieldConflicts: isSet(object.deprecatedLegacyJsonFieldConflicts)
+                ? globalThis.Boolean(object.deprecatedLegacyJsonFieldConflicts)
+                : false,
+            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.allowAlias !== undefined && message.allowAlias !== false) {
+            obj.allowAlias = message.allowAlias;
+        }
+        if (message.deprecated !== undefined && message.deprecated !== false) {
+            obj.deprecated = message.deprecated;
+        }
+        if (message.deprecatedLegacyJsonFieldConflicts !== undefined && message.deprecatedLegacyJsonFieldConflicts !== false) {
+            obj.deprecatedLegacyJsonFieldConflicts = message.deprecatedLegacyJsonFieldConflicts;
+        }
+        if (message.features !== undefined) {
+            obj.features = exports.FeatureSet.toJSON(message.features);
+        }
+        if (message.uninterpretedOption?.length) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.EnumValueOptions = {
+    fromJSON(object) {
+        return {
+            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
+            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+            debugRedact: isSet(object.debugRedact) ? globalThis.Boolean(object.debugRedact) : false,
+            featureSupport: isSet(object.featureSupport)
+                ? exports.FieldOptions_FeatureSupport.fromJSON(object.featureSupport)
+                : undefined,
+            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.deprecated !== undefined && message.deprecated !== false) {
+            obj.deprecated = message.deprecated;
+        }
+        if (message.features !== undefined) {
+            obj.features = exports.FeatureSet.toJSON(message.features);
+        }
+        if (message.debugRedact !== undefined && message.debugRedact !== false) {
+            obj.debugRedact = message.debugRedact;
+        }
+        if (message.featureSupport !== undefined) {
+            obj.featureSupport = exports.FieldOptions_FeatureSupport.toJSON(message.featureSupport);
+        }
+        if (message.uninterpretedOption?.length) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.ServiceOptions = {
+    fromJSON(object) {
+        return {
+            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
+            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.features !== undefined) {
+            obj.features = exports.FeatureSet.toJSON(message.features);
+        }
+        if (message.deprecated !== undefined && message.deprecated !== false) {
+            obj.deprecated = message.deprecated;
+        }
+        if (message.uninterpretedOption?.length) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.MethodOptions = {
+    fromJSON(object) {
+        return {
+            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
+            idempotencyLevel: isSet(object.idempotencyLevel)
+                ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel)
+                : 0,
+            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.deprecated !== undefined && message.deprecated !== false) {
+            obj.deprecated = message.deprecated;
+        }
+        if (message.idempotencyLevel !== undefined && message.idempotencyLevel !== 0) {
+            obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel);
+        }
+        if (message.features !== undefined) {
+            obj.features = exports.FeatureSet.toJSON(message.features);
+        }
+        if (message.uninterpretedOption?.length) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.UninterpretedOption = {
+    fromJSON(object) {
+        return {
+            name: globalThis.Array.isArray(object?.name)
+                ? object.name.map((e) => exports.UninterpretedOption_NamePart.fromJSON(e))
+                : [],
+            identifierValue: isSet(object.identifierValue) ? globalThis.String(object.identifierValue) : "",
+            positiveIntValue: isSet(object.positiveIntValue) ? globalThis.String(object.positiveIntValue) : "0",
+            negativeIntValue: isSet(object.negativeIntValue) ? globalThis.String(object.negativeIntValue) : "0",
+            doubleValue: isSet(object.doubleValue) ? globalThis.Number(object.doubleValue) : 0,
+            stringValue: isSet(object.stringValue) ? Buffer.from(bytesFromBase64(object.stringValue)) : Buffer.alloc(0),
+            aggregateValue: isSet(object.aggregateValue) ? globalThis.String(object.aggregateValue) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name?.length) {
+            obj.name = message.name.map((e) => exports.UninterpretedOption_NamePart.toJSON(e));
+        }
+        if (message.identifierValue !== undefined && message.identifierValue !== "") {
+            obj.identifierValue = message.identifierValue;
+        }
+        if (message.positiveIntValue !== undefined && message.positiveIntValue !== "0") {
+            obj.positiveIntValue = message.positiveIntValue;
+        }
+        if (message.negativeIntValue !== undefined && message.negativeIntValue !== "0") {
+            obj.negativeIntValue = message.negativeIntValue;
+        }
+        if (message.doubleValue !== undefined && message.doubleValue !== 0) {
+            obj.doubleValue = message.doubleValue;
+        }
+        if (message.stringValue !== undefined && message.stringValue.length !== 0) {
+            obj.stringValue = base64FromBytes(message.stringValue);
+        }
+        if (message.aggregateValue !== undefined && message.aggregateValue !== "") {
+            obj.aggregateValue = message.aggregateValue;
+        }
+        return obj;
+    },
+};
+exports.UninterpretedOption_NamePart = {
+    fromJSON(object) {
+        return {
+            namePart: isSet(object.namePart) ? globalThis.String(object.namePart) : "",
+            isExtension: isSet(object.isExtension) ? globalThis.Boolean(object.isExtension) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.namePart !== "") {
+            obj.namePart = message.namePart;
+        }
+        if (message.isExtension !== false) {
+            obj.isExtension = message.isExtension;
+        }
+        return obj;
+    },
+};
+exports.FeatureSet = {
+    fromJSON(object) {
+        return {
+            fieldPresence: isSet(object.fieldPresence) ? featureSet_FieldPresenceFromJSON(object.fieldPresence) : 0,
+            enumType: isSet(object.enumType) ? featureSet_EnumTypeFromJSON(object.enumType) : 0,
+            repeatedFieldEncoding: isSet(object.repeatedFieldEncoding)
+                ? featureSet_RepeatedFieldEncodingFromJSON(object.repeatedFieldEncoding)
+                : 0,
+            utf8Validation: isSet(object.utf8Validation) ? featureSet_Utf8ValidationFromJSON(object.utf8Validation) : 0,
+            messageEncoding: isSet(object.messageEncoding) ? featureSet_MessageEncodingFromJSON(object.messageEncoding) : 0,
+            jsonFormat: isSet(object.jsonFormat) ? featureSet_JsonFormatFromJSON(object.jsonFormat) : 0,
+            enforceNamingStyle: isSet(object.enforceNamingStyle)
+                ? featureSet_EnforceNamingStyleFromJSON(object.enforceNamingStyle)
+                : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.fieldPresence !== undefined && message.fieldPresence !== 0) {
+            obj.fieldPresence = featureSet_FieldPresenceToJSON(message.fieldPresence);
+        }
+        if (message.enumType !== undefined && message.enumType !== 0) {
+            obj.enumType = featureSet_EnumTypeToJSON(message.enumType);
+        }
+        if (message.repeatedFieldEncoding !== undefined && message.repeatedFieldEncoding !== 0) {
+            obj.repeatedFieldEncoding = featureSet_RepeatedFieldEncodingToJSON(message.repeatedFieldEncoding);
+        }
+        if (message.utf8Validation !== undefined && message.utf8Validation !== 0) {
+            obj.utf8Validation = featureSet_Utf8ValidationToJSON(message.utf8Validation);
+        }
+        if (message.messageEncoding !== undefined && message.messageEncoding !== 0) {
+            obj.messageEncoding = featureSet_MessageEncodingToJSON(message.messageEncoding);
+        }
+        if (message.jsonFormat !== undefined && message.jsonFormat !== 0) {
+            obj.jsonFormat = featureSet_JsonFormatToJSON(message.jsonFormat);
+        }
+        if (message.enforceNamingStyle !== undefined && message.enforceNamingStyle !== 0) {
+            obj.enforceNamingStyle = featureSet_EnforceNamingStyleToJSON(message.enforceNamingStyle);
+        }
+        return obj;
+    },
+};
+exports.FeatureSetDefaults = {
+    fromJSON(object) {
+        return {
+            defaults: globalThis.Array.isArray(object?.defaults)
+                ? object.defaults.map((e) => exports.FeatureSetDefaults_FeatureSetEditionDefault.fromJSON(e))
+                : [],
+            minimumEdition: isSet(object.minimumEdition) ? editionFromJSON(object.minimumEdition) : 0,
+            maximumEdition: isSet(object.maximumEdition) ? editionFromJSON(object.maximumEdition) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.defaults?.length) {
+            obj.defaults = message.defaults.map((e) => exports.FeatureSetDefaults_FeatureSetEditionDefault.toJSON(e));
+        }
+        if (message.minimumEdition !== undefined && message.minimumEdition !== 0) {
+            obj.minimumEdition = editionToJSON(message.minimumEdition);
+        }
+        if (message.maximumEdition !== undefined && message.maximumEdition !== 0) {
+            obj.maximumEdition = editionToJSON(message.maximumEdition);
+        }
+        return obj;
+    },
+};
+exports.FeatureSetDefaults_FeatureSetEditionDefault = {
+    fromJSON(object) {
+        return {
+            edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0,
+            overridableFeatures: isSet(object.overridableFeatures)
+                ? exports.FeatureSet.fromJSON(object.overridableFeatures)
+                : undefined,
+            fixedFeatures: isSet(object.fixedFeatures) ? exports.FeatureSet.fromJSON(object.fixedFeatures) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.edition !== undefined && message.edition !== 0) {
+            obj.edition = editionToJSON(message.edition);
+        }
+        if (message.overridableFeatures !== undefined) {
+            obj.overridableFeatures = exports.FeatureSet.toJSON(message.overridableFeatures);
+        }
+        if (message.fixedFeatures !== undefined) {
+            obj.fixedFeatures = exports.FeatureSet.toJSON(message.fixedFeatures);
+        }
+        return obj;
+    },
+};
+exports.SourceCodeInfo = {
+    fromJSON(object) {
+        return {
+            location: globalThis.Array.isArray(object?.location)
+                ? object.location.map((e) => exports.SourceCodeInfo_Location.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.location?.length) {
+            obj.location = message.location.map((e) => exports.SourceCodeInfo_Location.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.SourceCodeInfo_Location = {
+    fromJSON(object) {
+        return {
+            path: globalThis.Array.isArray(object?.path)
+                ? object.path.map((e) => globalThis.Number(e))
+                : [],
+            span: globalThis.Array.isArray(object?.span) ? object.span.map((e) => globalThis.Number(e)) : [],
+            leadingComments: isSet(object.leadingComments) ? globalThis.String(object.leadingComments) : "",
+            trailingComments: isSet(object.trailingComments) ? globalThis.String(object.trailingComments) : "",
+            leadingDetachedComments: globalThis.Array.isArray(object?.leadingDetachedComments)
+                ? object.leadingDetachedComments.map((e) => globalThis.String(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.path?.length) {
+            obj.path = message.path.map((e) => Math.round(e));
+        }
+        if (message.span?.length) {
+            obj.span = message.span.map((e) => Math.round(e));
+        }
+        if (message.leadingComments !== undefined && message.leadingComments !== "") {
+            obj.leadingComments = message.leadingComments;
+        }
+        if (message.trailingComments !== undefined && message.trailingComments !== "") {
+            obj.trailingComments = message.trailingComments;
+        }
+        if (message.leadingDetachedComments?.length) {
+            obj.leadingDetachedComments = message.leadingDetachedComments;
+        }
+        return obj;
+    },
+};
+exports.GeneratedCodeInfo = {
+    fromJSON(object) {
+        return {
+            annotation: globalThis.Array.isArray(object?.annotation)
+                ? object.annotation.map((e) => exports.GeneratedCodeInfo_Annotation.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.annotation?.length) {
+            obj.annotation = message.annotation.map((e) => exports.GeneratedCodeInfo_Annotation.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.GeneratedCodeInfo_Annotation = {
+    fromJSON(object) {
+        return {
+            path: globalThis.Array.isArray(object?.path)
+                ? object.path.map((e) => globalThis.Number(e))
+                : [],
+            sourceFile: isSet(object.sourceFile) ? globalThis.String(object.sourceFile) : "",
+            begin: isSet(object.begin) ? globalThis.Number(object.begin) : 0,
+            end: isSet(object.end) ? globalThis.Number(object.end) : 0,
+            semantic: isSet(object.semantic) ? generatedCodeInfo_Annotation_SemanticFromJSON(object.semantic) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.path?.length) {
+            obj.path = message.path.map((e) => Math.round(e));
+        }
+        if (message.sourceFile !== undefined && message.sourceFile !== "") {
+            obj.sourceFile = message.sourceFile;
+        }
+        if (message.begin !== undefined && message.begin !== 0) {
+            obj.begin = Math.round(message.begin);
+        }
+        if (message.end !== undefined && message.end !== 0) {
+            obj.end = Math.round(message.end);
+        }
+        if (message.semantic !== undefined && message.semantic !== 0) {
+            obj.semantic = generatedCodeInfo_Annotation_SemanticToJSON(message.semantic);
+        }
+        return obj;
+    },
+};
+function bytesFromBase64(b64) {
+    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+}
+function base64FromBytes(arr) {
+    return globalThis.Buffer.from(arr).toString("base64");
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
new file mode 100644
index 0000000000000..9d24cbba10de9
--- /dev/null
+++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
@@ -0,0 +1,29 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: google/protobuf/timestamp.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Timestamp = void 0;
+exports.Timestamp = {
+    fromJSON(object) {
+        return {
+            seconds: isSet(object.seconds) ? globalThis.String(object.seconds) : "0",
+            nanos: isSet(object.nanos) ? globalThis.Number(object.nanos) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.seconds !== "0") {
+            obj.seconds = message.seconds;
+        }
+        if (message.nanos !== 0) {
+            obj.nanos = Math.round(message.nanos);
+        }
+        return obj;
+    },
+};
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js
new file mode 100644
index 0000000000000..abc766bed3b88
--- /dev/null
+++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js
@@ -0,0 +1,55 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: rekor/v2/dsse.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.DSSELogEntryV002 = exports.DSSERequestV002 = void 0;
+/* eslint-disable */
+const envelope_1 = require("../../envelope");
+const sigstore_common_1 = require("../../sigstore_common");
+const verifier_1 = require("./verifier");
+exports.DSSERequestV002 = {
+    fromJSON(object) {
+        return {
+            envelope: isSet(object.envelope) ? envelope_1.Envelope.fromJSON(object.envelope) : undefined,
+            verifiers: globalThis.Array.isArray(object?.verifiers)
+                ? object.verifiers.map((e) => verifier_1.Verifier.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.envelope !== undefined) {
+            obj.envelope = envelope_1.Envelope.toJSON(message.envelope);
+        }
+        if (message.verifiers?.length) {
+            obj.verifiers = message.verifiers.map((e) => verifier_1.Verifier.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.DSSELogEntryV002 = {
+    fromJSON(object) {
+        return {
+            payloadHash: isSet(object.payloadHash) ? sigstore_common_1.HashOutput.fromJSON(object.payloadHash) : undefined,
+            signatures: globalThis.Array.isArray(object?.signatures)
+                ? object.signatures.map((e) => verifier_1.Signature.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.payloadHash !== undefined) {
+            obj.payloadHash = sigstore_common_1.HashOutput.toJSON(message.payloadHash);
+        }
+        if (message.signatures?.length) {
+            obj.signatures = message.signatures.map((e) => verifier_1.Signature.toJSON(e));
+        }
+        return obj;
+    },
+};
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js
new file mode 100644
index 0000000000000..c5eccb10e0a68
--- /dev/null
+++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js
@@ -0,0 +1,81 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: rekor/v2/entry.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.CreateEntryRequest = exports.Spec = exports.Entry = void 0;
+/* eslint-disable */
+const dsse_1 = require("./dsse");
+const hashedrekord_1 = require("./hashedrekord");
+exports.Entry = {
+    fromJSON(object) {
+        return {
+            kind: isSet(object.kind) ? globalThis.String(object.kind) : "",
+            apiVersion: isSet(object.apiVersion) ? globalThis.String(object.apiVersion) : "",
+            spec: isSet(object.spec) ? exports.Spec.fromJSON(object.spec) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.kind !== "") {
+            obj.kind = message.kind;
+        }
+        if (message.apiVersion !== "") {
+            obj.apiVersion = message.apiVersion;
+        }
+        if (message.spec !== undefined) {
+            obj.spec = exports.Spec.toJSON(message.spec);
+        }
+        return obj;
+    },
+};
+exports.Spec = {
+    fromJSON(object) {
+        return {
+            spec: isSet(object.hashedRekordV002)
+                ? { $case: "hashedRekordV002", hashedRekordV002: hashedrekord_1.HashedRekordLogEntryV002.fromJSON(object.hashedRekordV002) }
+                : isSet(object.dsseV002)
+                    ? { $case: "dsseV002", dsseV002: dsse_1.DSSELogEntryV002.fromJSON(object.dsseV002) }
+                    : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.spec?.$case === "hashedRekordV002") {
+            obj.hashedRekordV002 = hashedrekord_1.HashedRekordLogEntryV002.toJSON(message.spec.hashedRekordV002);
+        }
+        else if (message.spec?.$case === "dsseV002") {
+            obj.dsseV002 = dsse_1.DSSELogEntryV002.toJSON(message.spec.dsseV002);
+        }
+        return obj;
+    },
+};
+exports.CreateEntryRequest = {
+    fromJSON(object) {
+        return {
+            spec: isSet(object.hashedRekordRequestV002)
+                ? {
+                    $case: "hashedRekordRequestV002",
+                    hashedRekordRequestV002: hashedrekord_1.HashedRekordRequestV002.fromJSON(object.hashedRekordRequestV002),
+                }
+                : isSet(object.dsseRequestV002)
+                    ? { $case: "dsseRequestV002", dsseRequestV002: dsse_1.DSSERequestV002.fromJSON(object.dsseRequestV002) }
+                    : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.spec?.$case === "hashedRekordRequestV002") {
+            obj.hashedRekordRequestV002 = hashedrekord_1.HashedRekordRequestV002.toJSON(message.spec.hashedRekordRequestV002);
+        }
+        else if (message.spec?.$case === "dsseRequestV002") {
+            obj.dsseRequestV002 = dsse_1.DSSERequestV002.toJSON(message.spec.dsseRequestV002);
+        }
+        return obj;
+    },
+};
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js
new file mode 100644
index 0000000000000..d3fd1af2483d1
--- /dev/null
+++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js
@@ -0,0 +1,56 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: rekor/v2/hashedrekord.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.HashedRekordLogEntryV002 = exports.HashedRekordRequestV002 = void 0;
+/* eslint-disable */
+const sigstore_common_1 = require("../../sigstore_common");
+const verifier_1 = require("./verifier");
+exports.HashedRekordRequestV002 = {
+    fromJSON(object) {
+        return {
+            digest: isSet(object.digest) ? Buffer.from(bytesFromBase64(object.digest)) : Buffer.alloc(0),
+            signature: isSet(object.signature) ? verifier_1.Signature.fromJSON(object.signature) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.digest.length !== 0) {
+            obj.digest = base64FromBytes(message.digest);
+        }
+        if (message.signature !== undefined) {
+            obj.signature = verifier_1.Signature.toJSON(message.signature);
+        }
+        return obj;
+    },
+};
+exports.HashedRekordLogEntryV002 = {
+    fromJSON(object) {
+        return {
+            data: isSet(object.data) ? sigstore_common_1.HashOutput.fromJSON(object.data) : undefined,
+            signature: isSet(object.signature) ? verifier_1.Signature.fromJSON(object.signature) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.data !== undefined) {
+            obj.data = sigstore_common_1.HashOutput.toJSON(message.data);
+        }
+        if (message.signature !== undefined) {
+            obj.signature = verifier_1.Signature.toJSON(message.signature);
+        }
+        return obj;
+    },
+};
+function bytesFromBase64(b64) {
+    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+}
+function base64FromBytes(arr) {
+    return globalThis.Buffer.from(arr).toString("base64");
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js
new file mode 100644
index 0000000000000..c437d5053a3cb
--- /dev/null
+++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js
@@ -0,0 +1,74 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: rekor/v2/verifier.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Signature = exports.Verifier = exports.PublicKey = void 0;
+/* eslint-disable */
+const sigstore_common_1 = require("../../sigstore_common");
+exports.PublicKey = {
+    fromJSON(object) {
+        return { rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : Buffer.alloc(0) };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.rawBytes.length !== 0) {
+            obj.rawBytes = base64FromBytes(message.rawBytes);
+        }
+        return obj;
+    },
+};
+exports.Verifier = {
+    fromJSON(object) {
+        return {
+            verifier: isSet(object.publicKey)
+                ? { $case: "publicKey", publicKey: exports.PublicKey.fromJSON(object.publicKey) }
+                : isSet(object.x509Certificate)
+                    ? { $case: "x509Certificate", x509Certificate: sigstore_common_1.X509Certificate.fromJSON(object.x509Certificate) }
+                    : undefined,
+            keyDetails: isSet(object.keyDetails) ? (0, sigstore_common_1.publicKeyDetailsFromJSON)(object.keyDetails) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.verifier?.$case === "publicKey") {
+            obj.publicKey = exports.PublicKey.toJSON(message.verifier.publicKey);
+        }
+        else if (message.verifier?.$case === "x509Certificate") {
+            obj.x509Certificate = sigstore_common_1.X509Certificate.toJSON(message.verifier.x509Certificate);
+        }
+        if (message.keyDetails !== 0) {
+            obj.keyDetails = (0, sigstore_common_1.publicKeyDetailsToJSON)(message.keyDetails);
+        }
+        return obj;
+    },
+};
+exports.Signature = {
+    fromJSON(object) {
+        return {
+            content: isSet(object.content) ? Buffer.from(bytesFromBase64(object.content)) : Buffer.alloc(0),
+            verifier: isSet(object.verifier) ? exports.Verifier.fromJSON(object.verifier) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.content.length !== 0) {
+            obj.content = base64FromBytes(message.content);
+        }
+        if (message.verifier !== undefined) {
+            obj.verifier = exports.Verifier.toJSON(message.verifier);
+        }
+        return obj;
+    },
+};
+function bytesFromBase64(b64) {
+    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+}
+function base64FromBytes(arr) {
+    return globalThis.Buffer.from(arr).toString("base64");
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
new file mode 100644
index 0000000000000..aed636f00e7cf
--- /dev/null
+++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
@@ -0,0 +1,103 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: sigstore_bundle.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Bundle = exports.VerificationMaterial = exports.TimestampVerificationData = void 0;
+/* eslint-disable */
+const envelope_1 = require("./envelope");
+const sigstore_common_1 = require("./sigstore_common");
+const sigstore_rekor_1 = require("./sigstore_rekor");
+exports.TimestampVerificationData = {
+    fromJSON(object) {
+        return {
+            rfc3161Timestamps: globalThis.Array.isArray(object?.rfc3161Timestamps)
+                ? object.rfc3161Timestamps.map((e) => sigstore_common_1.RFC3161SignedTimestamp.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.rfc3161Timestamps?.length) {
+            obj.rfc3161Timestamps = message.rfc3161Timestamps.map((e) => sigstore_common_1.RFC3161SignedTimestamp.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.VerificationMaterial = {
+    fromJSON(object) {
+        return {
+            content: isSet(object.publicKey)
+                ? { $case: "publicKey", publicKey: sigstore_common_1.PublicKeyIdentifier.fromJSON(object.publicKey) }
+                : isSet(object.x509CertificateChain)
+                    ? {
+                        $case: "x509CertificateChain",
+                        x509CertificateChain: sigstore_common_1.X509CertificateChain.fromJSON(object.x509CertificateChain),
+                    }
+                    : isSet(object.certificate)
+                        ? { $case: "certificate", certificate: sigstore_common_1.X509Certificate.fromJSON(object.certificate) }
+                        : undefined,
+            tlogEntries: globalThis.Array.isArray(object?.tlogEntries)
+                ? object.tlogEntries.map((e) => sigstore_rekor_1.TransparencyLogEntry.fromJSON(e))
+                : [],
+            timestampVerificationData: isSet(object.timestampVerificationData)
+                ? exports.TimestampVerificationData.fromJSON(object.timestampVerificationData)
+                : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.content?.$case === "publicKey") {
+            obj.publicKey = sigstore_common_1.PublicKeyIdentifier.toJSON(message.content.publicKey);
+        }
+        else if (message.content?.$case === "x509CertificateChain") {
+            obj.x509CertificateChain = sigstore_common_1.X509CertificateChain.toJSON(message.content.x509CertificateChain);
+        }
+        else if (message.content?.$case === "certificate") {
+            obj.certificate = sigstore_common_1.X509Certificate.toJSON(message.content.certificate);
+        }
+        if (message.tlogEntries?.length) {
+            obj.tlogEntries = message.tlogEntries.map((e) => sigstore_rekor_1.TransparencyLogEntry.toJSON(e));
+        }
+        if (message.timestampVerificationData !== undefined) {
+            obj.timestampVerificationData = exports.TimestampVerificationData.toJSON(message.timestampVerificationData);
+        }
+        return obj;
+    },
+};
+exports.Bundle = {
+    fromJSON(object) {
+        return {
+            mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "",
+            verificationMaterial: isSet(object.verificationMaterial)
+                ? exports.VerificationMaterial.fromJSON(object.verificationMaterial)
+                : undefined,
+            content: isSet(object.messageSignature)
+                ? { $case: "messageSignature", messageSignature: sigstore_common_1.MessageSignature.fromJSON(object.messageSignature) }
+                : isSet(object.dsseEnvelope)
+                    ? { $case: "dsseEnvelope", dsseEnvelope: envelope_1.Envelope.fromJSON(object.dsseEnvelope) }
+                    : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.mediaType !== "") {
+            obj.mediaType = message.mediaType;
+        }
+        if (message.verificationMaterial !== undefined) {
+            obj.verificationMaterial = exports.VerificationMaterial.toJSON(message.verificationMaterial);
+        }
+        if (message.content?.$case === "messageSignature") {
+            obj.messageSignature = sigstore_common_1.MessageSignature.toJSON(message.content.messageSignature);
+        }
+        else if (message.content?.$case === "dsseEnvelope") {
+            obj.dsseEnvelope = envelope_1.Envelope.toJSON(message.content.dsseEnvelope);
+        }
+        return obj;
+    },
+};
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
new file mode 100644
index 0000000000000..b900516ed3b55
--- /dev/null
+++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
@@ -0,0 +1,596 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: sigstore_common.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TimeRange = exports.X509CertificateChain = exports.SubjectAlternativeName = exports.X509Certificate = exports.DistinguishedName = exports.ObjectIdentifierValuePair = exports.ObjectIdentifier = exports.PublicKeyIdentifier = exports.PublicKey = exports.RFC3161SignedTimestamp = exports.LogId = exports.MessageSignature = exports.HashOutput = exports.SubjectAlternativeNameType = exports.PublicKeyDetails = exports.HashAlgorithm = void 0;
+exports.hashAlgorithmFromJSON = hashAlgorithmFromJSON;
+exports.hashAlgorithmToJSON = hashAlgorithmToJSON;
+exports.publicKeyDetailsFromJSON = publicKeyDetailsFromJSON;
+exports.publicKeyDetailsToJSON = publicKeyDetailsToJSON;
+exports.subjectAlternativeNameTypeFromJSON = subjectAlternativeNameTypeFromJSON;
+exports.subjectAlternativeNameTypeToJSON = subjectAlternativeNameTypeToJSON;
+/* eslint-disable */
+const timestamp_1 = require("./google/protobuf/timestamp");
+/**
+ * Only a subset of the secure hash standard algorithms are supported.
+ * See  for more
+ * details.
+ * UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force
+ * any proto JSON serialization to emit the used hash algorithm, as default
+ * option is to *omit* the default value of an enum (which is the first
+ * value, represented by '0'.
+ */
+var HashAlgorithm;
+(function (HashAlgorithm) {
+    HashAlgorithm[HashAlgorithm["HASH_ALGORITHM_UNSPECIFIED"] = 0] = "HASH_ALGORITHM_UNSPECIFIED";
+    HashAlgorithm[HashAlgorithm["SHA2_256"] = 1] = "SHA2_256";
+    HashAlgorithm[HashAlgorithm["SHA2_384"] = 2] = "SHA2_384";
+    HashAlgorithm[HashAlgorithm["SHA2_512"] = 3] = "SHA2_512";
+    HashAlgorithm[HashAlgorithm["SHA3_256"] = 4] = "SHA3_256";
+    HashAlgorithm[HashAlgorithm["SHA3_384"] = 5] = "SHA3_384";
+})(HashAlgorithm || (exports.HashAlgorithm = HashAlgorithm = {}));
+function hashAlgorithmFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "HASH_ALGORITHM_UNSPECIFIED":
+            return HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED;
+        case 1:
+        case "SHA2_256":
+            return HashAlgorithm.SHA2_256;
+        case 2:
+        case "SHA2_384":
+            return HashAlgorithm.SHA2_384;
+        case 3:
+        case "SHA2_512":
+            return HashAlgorithm.SHA2_512;
+        case 4:
+        case "SHA3_256":
+            return HashAlgorithm.SHA3_256;
+        case 5:
+        case "SHA3_384":
+            return HashAlgorithm.SHA3_384;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
+    }
+}
+function hashAlgorithmToJSON(object) {
+    switch (object) {
+        case HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED:
+            return "HASH_ALGORITHM_UNSPECIFIED";
+        case HashAlgorithm.SHA2_256:
+            return "SHA2_256";
+        case HashAlgorithm.SHA2_384:
+            return "SHA2_384";
+        case HashAlgorithm.SHA2_512:
+            return "SHA2_512";
+        case HashAlgorithm.SHA3_256:
+            return "SHA3_256";
+        case HashAlgorithm.SHA3_384:
+            return "SHA3_384";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
+    }
+}
+/**
+ * Details of a specific public key, capturing the the key encoding method,
+ * and signature algorithm.
+ *
+ * PublicKeyDetails captures the public key/hash algorithm combinations
+ * recommended in the Sigstore ecosystem.
+ *
+ * This is modelled as a linear set as we want to provide a small number of
+ * opinionated options instead of allowing every possible permutation.
+ *
+ * Any changes to this enum MUST be reflected in the algorithm registry.
+ *
+ * See: 
+ *
+ * To avoid the possibility of contradicting formats such as PKCS1 with
+ * ED25519 the valid permutations are listed as a linear set instead of a
+ * cartesian set (i.e one combined variable instead of two, one for encoding
+ * and one for the signature algorithm).
+ */
+var PublicKeyDetails;
+(function (PublicKeyDetails) {
+    PublicKeyDetails[PublicKeyDetails["PUBLIC_KEY_DETAILS_UNSPECIFIED"] = 0] = "PUBLIC_KEY_DETAILS_UNSPECIFIED";
+    /**
+     * PKCS1_RSA_PKCS1V5 - RSA
+     *
+     * @deprecated
+     */
+    PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PKCS1V5"] = 1] = "PKCS1_RSA_PKCS1V5";
+    /**
+     * PKCS1_RSA_PSS - See RFC8017
+     *
+     * @deprecated
+     */
+    PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PSS"] = 2] = "PKCS1_RSA_PSS";
+    /** @deprecated */
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V5"] = 3] = "PKIX_RSA_PKCS1V5";
+    /** @deprecated */
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS"] = 4] = "PKIX_RSA_PSS";
+    /** PKIX_RSA_PKCS1V15_2048_SHA256 - RSA public key in PKIX format, PKCS#1v1.5 signature */
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V15_2048_SHA256"] = 9] = "PKIX_RSA_PKCS1V15_2048_SHA256";
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V15_3072_SHA256"] = 10] = "PKIX_RSA_PKCS1V15_3072_SHA256";
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V15_4096_SHA256"] = 11] = "PKIX_RSA_PKCS1V15_4096_SHA256";
+    /** PKIX_RSA_PSS_2048_SHA256 - RSA public key in PKIX format, RSASSA-PSS signature */
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS_2048_SHA256"] = 16] = "PKIX_RSA_PSS_2048_SHA256";
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS_3072_SHA256"] = 17] = "PKIX_RSA_PSS_3072_SHA256";
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS_4096_SHA256"] = 18] = "PKIX_RSA_PSS_4096_SHA256";
+    /**
+     * PKIX_ECDSA_P256_HMAC_SHA_256 - ECDSA
+     *
+     * @deprecated
+     */
+    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_HMAC_SHA_256"] = 6] = "PKIX_ECDSA_P256_HMAC_SHA_256";
+    /** PKIX_ECDSA_P256_SHA_256 - See NIST FIPS 186-4 */
+    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_SHA_256"] = 5] = "PKIX_ECDSA_P256_SHA_256";
+    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P384_SHA_384"] = 12] = "PKIX_ECDSA_P384_SHA_384";
+    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P521_SHA_512"] = 13] = "PKIX_ECDSA_P521_SHA_512";
+    /** PKIX_ED25519 - Ed 25519 */
+    PublicKeyDetails[PublicKeyDetails["PKIX_ED25519"] = 7] = "PKIX_ED25519";
+    PublicKeyDetails[PublicKeyDetails["PKIX_ED25519_PH"] = 8] = "PKIX_ED25519_PH";
+    /**
+     * PKIX_ECDSA_P384_SHA_256 - These algorithms are deprecated and should not be used, but they
+     * were/are being used by most Sigstore clients implementations.
+     *
+     * @deprecated
+     */
+    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P384_SHA_256"] = 19] = "PKIX_ECDSA_P384_SHA_256";
+    /** @deprecated */
+    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P521_SHA_256"] = 20] = "PKIX_ECDSA_P521_SHA_256";
+    /**
+     * LMS_SHA256 - LMS and LM-OTS
+     *
+     * These algorithms are deprecated and should not be used.
+     * Keys and signatures MAY be used by private Sigstore
+     * deployments, but will not be supported by the public
+     * good instance.
+     *
+     * USER WARNING: LMS and LM-OTS are both stateful signature schemes.
+     * Using them correctly requires discretion and careful consideration
+     * to ensure that individual secret keys are not used more than once.
+     * In addition, LM-OTS is a single-use scheme, meaning that it
+     * MUST NOT be used for more than one signature per LM-OTS key.
+     * If you cannot maintain these invariants, you MUST NOT use these
+     * schemes.
+     *
+     * @deprecated
+     */
+    PublicKeyDetails[PublicKeyDetails["LMS_SHA256"] = 14] = "LMS_SHA256";
+    /** @deprecated */
+    PublicKeyDetails[PublicKeyDetails["LMOTS_SHA256"] = 15] = "LMOTS_SHA256";
+    /**
+     * ML_DSA_65 - ML-DSA
+     *
+     * These ML_DSA_65 and ML-DSA_87 algorithms are the pure variants that
+     * take data to sign rather than the prehash variants (HashML-DSA), which
+     * take digests.  While considered quantum-resistant, their usage
+     * involves tradeoffs in that signatures and keys are much larger, and
+     * this makes deployments more costly.
+     *
+     * USER WARNING: ML_DSA_65 and ML_DSA_87 are experimental algorithms.
+     * In the future they MAY be used by private Sigstore deployments, but
+     * they are not yet fully functional.  This warning will be removed when
+     * these algorithms are widely supported by Sigstore clients and servers,
+     * but care should still be taken for production environments.
+     */
+    PublicKeyDetails[PublicKeyDetails["ML_DSA_65"] = 21] = "ML_DSA_65";
+    PublicKeyDetails[PublicKeyDetails["ML_DSA_87"] = 22] = "ML_DSA_87";
+})(PublicKeyDetails || (exports.PublicKeyDetails = PublicKeyDetails = {}));
+function publicKeyDetailsFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "PUBLIC_KEY_DETAILS_UNSPECIFIED":
+            return PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED;
+        case 1:
+        case "PKCS1_RSA_PKCS1V5":
+            return PublicKeyDetails.PKCS1_RSA_PKCS1V5;
+        case 2:
+        case "PKCS1_RSA_PSS":
+            return PublicKeyDetails.PKCS1_RSA_PSS;
+        case 3:
+        case "PKIX_RSA_PKCS1V5":
+            return PublicKeyDetails.PKIX_RSA_PKCS1V5;
+        case 4:
+        case "PKIX_RSA_PSS":
+            return PublicKeyDetails.PKIX_RSA_PSS;
+        case 9:
+        case "PKIX_RSA_PKCS1V15_2048_SHA256":
+            return PublicKeyDetails.PKIX_RSA_PKCS1V15_2048_SHA256;
+        case 10:
+        case "PKIX_RSA_PKCS1V15_3072_SHA256":
+            return PublicKeyDetails.PKIX_RSA_PKCS1V15_3072_SHA256;
+        case 11:
+        case "PKIX_RSA_PKCS1V15_4096_SHA256":
+            return PublicKeyDetails.PKIX_RSA_PKCS1V15_4096_SHA256;
+        case 16:
+        case "PKIX_RSA_PSS_2048_SHA256":
+            return PublicKeyDetails.PKIX_RSA_PSS_2048_SHA256;
+        case 17:
+        case "PKIX_RSA_PSS_3072_SHA256":
+            return PublicKeyDetails.PKIX_RSA_PSS_3072_SHA256;
+        case 18:
+        case "PKIX_RSA_PSS_4096_SHA256":
+            return PublicKeyDetails.PKIX_RSA_PSS_4096_SHA256;
+        case 6:
+        case "PKIX_ECDSA_P256_HMAC_SHA_256":
+            return PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256;
+        case 5:
+        case "PKIX_ECDSA_P256_SHA_256":
+            return PublicKeyDetails.PKIX_ECDSA_P256_SHA_256;
+        case 12:
+        case "PKIX_ECDSA_P384_SHA_384":
+            return PublicKeyDetails.PKIX_ECDSA_P384_SHA_384;
+        case 13:
+        case "PKIX_ECDSA_P521_SHA_512":
+            return PublicKeyDetails.PKIX_ECDSA_P521_SHA_512;
+        case 7:
+        case "PKIX_ED25519":
+            return PublicKeyDetails.PKIX_ED25519;
+        case 8:
+        case "PKIX_ED25519_PH":
+            return PublicKeyDetails.PKIX_ED25519_PH;
+        case 19:
+        case "PKIX_ECDSA_P384_SHA_256":
+            return PublicKeyDetails.PKIX_ECDSA_P384_SHA_256;
+        case 20:
+        case "PKIX_ECDSA_P521_SHA_256":
+            return PublicKeyDetails.PKIX_ECDSA_P521_SHA_256;
+        case 14:
+        case "LMS_SHA256":
+            return PublicKeyDetails.LMS_SHA256;
+        case 15:
+        case "LMOTS_SHA256":
+            return PublicKeyDetails.LMOTS_SHA256;
+        case 21:
+        case "ML_DSA_65":
+            return PublicKeyDetails.ML_DSA_65;
+        case 22:
+        case "ML_DSA_87":
+            return PublicKeyDetails.ML_DSA_87;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
+    }
+}
+function publicKeyDetailsToJSON(object) {
+    switch (object) {
+        case PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED:
+            return "PUBLIC_KEY_DETAILS_UNSPECIFIED";
+        case PublicKeyDetails.PKCS1_RSA_PKCS1V5:
+            return "PKCS1_RSA_PKCS1V5";
+        case PublicKeyDetails.PKCS1_RSA_PSS:
+            return "PKCS1_RSA_PSS";
+        case PublicKeyDetails.PKIX_RSA_PKCS1V5:
+            return "PKIX_RSA_PKCS1V5";
+        case PublicKeyDetails.PKIX_RSA_PSS:
+            return "PKIX_RSA_PSS";
+        case PublicKeyDetails.PKIX_RSA_PKCS1V15_2048_SHA256:
+            return "PKIX_RSA_PKCS1V15_2048_SHA256";
+        case PublicKeyDetails.PKIX_RSA_PKCS1V15_3072_SHA256:
+            return "PKIX_RSA_PKCS1V15_3072_SHA256";
+        case PublicKeyDetails.PKIX_RSA_PKCS1V15_4096_SHA256:
+            return "PKIX_RSA_PKCS1V15_4096_SHA256";
+        case PublicKeyDetails.PKIX_RSA_PSS_2048_SHA256:
+            return "PKIX_RSA_PSS_2048_SHA256";
+        case PublicKeyDetails.PKIX_RSA_PSS_3072_SHA256:
+            return "PKIX_RSA_PSS_3072_SHA256";
+        case PublicKeyDetails.PKIX_RSA_PSS_4096_SHA256:
+            return "PKIX_RSA_PSS_4096_SHA256";
+        case PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256:
+            return "PKIX_ECDSA_P256_HMAC_SHA_256";
+        case PublicKeyDetails.PKIX_ECDSA_P256_SHA_256:
+            return "PKIX_ECDSA_P256_SHA_256";
+        case PublicKeyDetails.PKIX_ECDSA_P384_SHA_384:
+            return "PKIX_ECDSA_P384_SHA_384";
+        case PublicKeyDetails.PKIX_ECDSA_P521_SHA_512:
+            return "PKIX_ECDSA_P521_SHA_512";
+        case PublicKeyDetails.PKIX_ED25519:
+            return "PKIX_ED25519";
+        case PublicKeyDetails.PKIX_ED25519_PH:
+            return "PKIX_ED25519_PH";
+        case PublicKeyDetails.PKIX_ECDSA_P384_SHA_256:
+            return "PKIX_ECDSA_P384_SHA_256";
+        case PublicKeyDetails.PKIX_ECDSA_P521_SHA_256:
+            return "PKIX_ECDSA_P521_SHA_256";
+        case PublicKeyDetails.LMS_SHA256:
+            return "LMS_SHA256";
+        case PublicKeyDetails.LMOTS_SHA256:
+            return "LMOTS_SHA256";
+        case PublicKeyDetails.ML_DSA_65:
+            return "ML_DSA_65";
+        case PublicKeyDetails.ML_DSA_87:
+            return "ML_DSA_87";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
+    }
+}
+var SubjectAlternativeNameType;
+(function (SubjectAlternativeNameType) {
+    SubjectAlternativeNameType[SubjectAlternativeNameType["SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED"] = 0] = "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
+    SubjectAlternativeNameType[SubjectAlternativeNameType["EMAIL"] = 1] = "EMAIL";
+    SubjectAlternativeNameType[SubjectAlternativeNameType["URI"] = 2] = "URI";
+    /**
+     * OTHER_NAME - OID 1.3.6.1.4.1.57264.1.7
+     * See https://github.com/sigstore/fulcio/blob/main/docs/oid-info.md#1361415726417--othername-san
+     * for more details.
+     */
+    SubjectAlternativeNameType[SubjectAlternativeNameType["OTHER_NAME"] = 3] = "OTHER_NAME";
+})(SubjectAlternativeNameType || (exports.SubjectAlternativeNameType = SubjectAlternativeNameType = {}));
+function subjectAlternativeNameTypeFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED":
+            return SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED;
+        case 1:
+        case "EMAIL":
+            return SubjectAlternativeNameType.EMAIL;
+        case 2:
+        case "URI":
+            return SubjectAlternativeNameType.URI;
+        case 3:
+        case "OTHER_NAME":
+            return SubjectAlternativeNameType.OTHER_NAME;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
+    }
+}
+function subjectAlternativeNameTypeToJSON(object) {
+    switch (object) {
+        case SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED:
+            return "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
+        case SubjectAlternativeNameType.EMAIL:
+            return "EMAIL";
+        case SubjectAlternativeNameType.URI:
+            return "URI";
+        case SubjectAlternativeNameType.OTHER_NAME:
+            return "OTHER_NAME";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
+    }
+}
+exports.HashOutput = {
+    fromJSON(object) {
+        return {
+            algorithm: isSet(object.algorithm) ? hashAlgorithmFromJSON(object.algorithm) : 0,
+            digest: isSet(object.digest) ? Buffer.from(bytesFromBase64(object.digest)) : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.algorithm !== 0) {
+            obj.algorithm = hashAlgorithmToJSON(message.algorithm);
+        }
+        if (message.digest.length !== 0) {
+            obj.digest = base64FromBytes(message.digest);
+        }
+        return obj;
+    },
+};
+exports.MessageSignature = {
+    fromJSON(object) {
+        return {
+            messageDigest: isSet(object.messageDigest) ? exports.HashOutput.fromJSON(object.messageDigest) : undefined,
+            signature: isSet(object.signature) ? Buffer.from(bytesFromBase64(object.signature)) : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.messageDigest !== undefined) {
+            obj.messageDigest = exports.HashOutput.toJSON(message.messageDigest);
+        }
+        if (message.signature.length !== 0) {
+            obj.signature = base64FromBytes(message.signature);
+        }
+        return obj;
+    },
+};
+exports.LogId = {
+    fromJSON(object) {
+        return { keyId: isSet(object.keyId) ? Buffer.from(bytesFromBase64(object.keyId)) : Buffer.alloc(0) };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.keyId.length !== 0) {
+            obj.keyId = base64FromBytes(message.keyId);
+        }
+        return obj;
+    },
+};
+exports.RFC3161SignedTimestamp = {
+    fromJSON(object) {
+        return {
+            signedTimestamp: isSet(object.signedTimestamp)
+                ? Buffer.from(bytesFromBase64(object.signedTimestamp))
+                : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.signedTimestamp.length !== 0) {
+            obj.signedTimestamp = base64FromBytes(message.signedTimestamp);
+        }
+        return obj;
+    },
+};
+exports.PublicKey = {
+    fromJSON(object) {
+        return {
+            rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : undefined,
+            keyDetails: isSet(object.keyDetails) ? publicKeyDetailsFromJSON(object.keyDetails) : 0,
+            validFor: isSet(object.validFor) ? exports.TimeRange.fromJSON(object.validFor) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.rawBytes !== undefined) {
+            obj.rawBytes = base64FromBytes(message.rawBytes);
+        }
+        if (message.keyDetails !== 0) {
+            obj.keyDetails = publicKeyDetailsToJSON(message.keyDetails);
+        }
+        if (message.validFor !== undefined) {
+            obj.validFor = exports.TimeRange.toJSON(message.validFor);
+        }
+        return obj;
+    },
+};
+exports.PublicKeyIdentifier = {
+    fromJSON(object) {
+        return { hint: isSet(object.hint) ? globalThis.String(object.hint) : "" };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.hint !== "") {
+            obj.hint = message.hint;
+        }
+        return obj;
+    },
+};
+exports.ObjectIdentifier = {
+    fromJSON(object) {
+        return { id: globalThis.Array.isArray(object?.id) ? object.id.map((e) => globalThis.Number(e)) : [] };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.id?.length) {
+            obj.id = message.id.map((e) => Math.round(e));
+        }
+        return obj;
+    },
+};
+exports.ObjectIdentifierValuePair = {
+    fromJSON(object) {
+        return {
+            oid: isSet(object.oid) ? exports.ObjectIdentifier.fromJSON(object.oid) : undefined,
+            value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.oid !== undefined) {
+            obj.oid = exports.ObjectIdentifier.toJSON(message.oid);
+        }
+        if (message.value.length !== 0) {
+            obj.value = base64FromBytes(message.value);
+        }
+        return obj;
+    },
+};
+exports.DistinguishedName = {
+    fromJSON(object) {
+        return {
+            organization: isSet(object.organization) ? globalThis.String(object.organization) : "",
+            commonName: isSet(object.commonName) ? globalThis.String(object.commonName) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.organization !== "") {
+            obj.organization = message.organization;
+        }
+        if (message.commonName !== "") {
+            obj.commonName = message.commonName;
+        }
+        return obj;
+    },
+};
+exports.X509Certificate = {
+    fromJSON(object) {
+        return { rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : Buffer.alloc(0) };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.rawBytes.length !== 0) {
+            obj.rawBytes = base64FromBytes(message.rawBytes);
+        }
+        return obj;
+    },
+};
+exports.SubjectAlternativeName = {
+    fromJSON(object) {
+        return {
+            type: isSet(object.type) ? subjectAlternativeNameTypeFromJSON(object.type) : 0,
+            identity: isSet(object.regexp)
+                ? { $case: "regexp", regexp: globalThis.String(object.regexp) }
+                : isSet(object.value)
+                    ? { $case: "value", value: globalThis.String(object.value) }
+                    : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.type !== 0) {
+            obj.type = subjectAlternativeNameTypeToJSON(message.type);
+        }
+        if (message.identity?.$case === "regexp") {
+            obj.regexp = message.identity.regexp;
+        }
+        else if (message.identity?.$case === "value") {
+            obj.value = message.identity.value;
+        }
+        return obj;
+    },
+};
+exports.X509CertificateChain = {
+    fromJSON(object) {
+        return {
+            certificates: globalThis.Array.isArray(object?.certificates)
+                ? object.certificates.map((e) => exports.X509Certificate.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.certificates?.length) {
+            obj.certificates = message.certificates.map((e) => exports.X509Certificate.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.TimeRange = {
+    fromJSON(object) {
+        return {
+            start: isSet(object.start) ? fromJsonTimestamp(object.start) : undefined,
+            end: isSet(object.end) ? fromJsonTimestamp(object.end) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.start !== undefined) {
+            obj.start = message.start.toISOString();
+        }
+        if (message.end !== undefined) {
+            obj.end = message.end.toISOString();
+        }
+        return obj;
+    },
+};
+function bytesFromBase64(b64) {
+    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+}
+function base64FromBytes(arr) {
+    return globalThis.Buffer.from(arr).toString("base64");
+}
+function fromTimestamp(t) {
+    let millis = (globalThis.Number(t.seconds) || 0) * 1_000;
+    millis += (t.nanos || 0) / 1_000_000;
+    return new globalThis.Date(millis);
+}
+function fromJsonTimestamp(o) {
+    if (o instanceof globalThis.Date) {
+        return o;
+    }
+    else if (typeof o === "string") {
+        return new globalThis.Date(o);
+    }
+    else {
+        return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
+    }
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
new file mode 100644
index 0000000000000..fd8ea8384664d
--- /dev/null
+++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
@@ -0,0 +1,137 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: sigstore_rekor.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TransparencyLogEntry = exports.InclusionPromise = exports.InclusionProof = exports.Checkpoint = exports.KindVersion = void 0;
+/* eslint-disable */
+const sigstore_common_1 = require("./sigstore_common");
+exports.KindVersion = {
+    fromJSON(object) {
+        return {
+            kind: isSet(object.kind) ? globalThis.String(object.kind) : "",
+            version: isSet(object.version) ? globalThis.String(object.version) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.kind !== "") {
+            obj.kind = message.kind;
+        }
+        if (message.version !== "") {
+            obj.version = message.version;
+        }
+        return obj;
+    },
+};
+exports.Checkpoint = {
+    fromJSON(object) {
+        return { envelope: isSet(object.envelope) ? globalThis.String(object.envelope) : "" };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.envelope !== "") {
+            obj.envelope = message.envelope;
+        }
+        return obj;
+    },
+};
+exports.InclusionProof = {
+    fromJSON(object) {
+        return {
+            logIndex: isSet(object.logIndex) ? globalThis.String(object.logIndex) : "0",
+            rootHash: isSet(object.rootHash) ? Buffer.from(bytesFromBase64(object.rootHash)) : Buffer.alloc(0),
+            treeSize: isSet(object.treeSize) ? globalThis.String(object.treeSize) : "0",
+            hashes: globalThis.Array.isArray(object?.hashes)
+                ? object.hashes.map((e) => Buffer.from(bytesFromBase64(e)))
+                : [],
+            checkpoint: isSet(object.checkpoint) ? exports.Checkpoint.fromJSON(object.checkpoint) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.logIndex !== "0") {
+            obj.logIndex = message.logIndex;
+        }
+        if (message.rootHash.length !== 0) {
+            obj.rootHash = base64FromBytes(message.rootHash);
+        }
+        if (message.treeSize !== "0") {
+            obj.treeSize = message.treeSize;
+        }
+        if (message.hashes?.length) {
+            obj.hashes = message.hashes.map((e) => base64FromBytes(e));
+        }
+        if (message.checkpoint !== undefined) {
+            obj.checkpoint = exports.Checkpoint.toJSON(message.checkpoint);
+        }
+        return obj;
+    },
+};
+exports.InclusionPromise = {
+    fromJSON(object) {
+        return {
+            signedEntryTimestamp: isSet(object.signedEntryTimestamp)
+                ? Buffer.from(bytesFromBase64(object.signedEntryTimestamp))
+                : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.signedEntryTimestamp.length !== 0) {
+            obj.signedEntryTimestamp = base64FromBytes(message.signedEntryTimestamp);
+        }
+        return obj;
+    },
+};
+exports.TransparencyLogEntry = {
+    fromJSON(object) {
+        return {
+            logIndex: isSet(object.logIndex) ? globalThis.String(object.logIndex) : "0",
+            logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
+            kindVersion: isSet(object.kindVersion) ? exports.KindVersion.fromJSON(object.kindVersion) : undefined,
+            integratedTime: isSet(object.integratedTime) ? globalThis.String(object.integratedTime) : "0",
+            inclusionPromise: isSet(object.inclusionPromise) ? exports.InclusionPromise.fromJSON(object.inclusionPromise) : undefined,
+            inclusionProof: isSet(object.inclusionProof) ? exports.InclusionProof.fromJSON(object.inclusionProof) : undefined,
+            canonicalizedBody: isSet(object.canonicalizedBody)
+                ? Buffer.from(bytesFromBase64(object.canonicalizedBody))
+                : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.logIndex !== "0") {
+            obj.logIndex = message.logIndex;
+        }
+        if (message.logId !== undefined) {
+            obj.logId = sigstore_common_1.LogId.toJSON(message.logId);
+        }
+        if (message.kindVersion !== undefined) {
+            obj.kindVersion = exports.KindVersion.toJSON(message.kindVersion);
+        }
+        if (message.integratedTime !== "0") {
+            obj.integratedTime = message.integratedTime;
+        }
+        if (message.inclusionPromise !== undefined) {
+            obj.inclusionPromise = exports.InclusionPromise.toJSON(message.inclusionPromise);
+        }
+        if (message.inclusionProof !== undefined) {
+            obj.inclusionProof = exports.InclusionProof.toJSON(message.inclusionProof);
+        }
+        if (message.canonicalizedBody.length !== 0) {
+            obj.canonicalizedBody = base64FromBytes(message.canonicalizedBody);
+        }
+        return obj;
+    },
+};
+function bytesFromBase64(b64) {
+    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+}
+function base64FromBytes(arr) {
+    return globalThis.Buffer.from(arr).toString("base64");
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
new file mode 100644
index 0000000000000..1b5492fb1a77e
--- /dev/null
+++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
@@ -0,0 +1,284 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: sigstore_trustroot.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.ClientTrustConfig = exports.ServiceConfiguration = exports.Service = exports.SigningConfig = exports.TrustedRoot = exports.CertificateAuthority = exports.TransparencyLogInstance = exports.ServiceSelector = void 0;
+exports.serviceSelectorFromJSON = serviceSelectorFromJSON;
+exports.serviceSelectorToJSON = serviceSelectorToJSON;
+/* eslint-disable */
+const sigstore_common_1 = require("./sigstore_common");
+/**
+ * ServiceSelector specifies how a client SHOULD select a set of
+ * Services to connect to. A client SHOULD throw an error if
+ * the value is SERVICE_SELECTOR_UNDEFINED.
+ */
+var ServiceSelector;
+(function (ServiceSelector) {
+    ServiceSelector[ServiceSelector["SERVICE_SELECTOR_UNDEFINED"] = 0] = "SERVICE_SELECTOR_UNDEFINED";
+    /**
+     * ALL - Clients SHOULD select all Services based on supported API version
+     * and validity window.
+     */
+    ServiceSelector[ServiceSelector["ALL"] = 1] = "ALL";
+    /**
+     * ANY - Clients SHOULD select one Service based on supported API version
+     * and validity window. It is up to the client implementation to
+     * decide how to select the Service, e.g. random or round-robin.
+     */
+    ServiceSelector[ServiceSelector["ANY"] = 2] = "ANY";
+    /**
+     * EXACT - Clients SHOULD select a specific number of Services based on
+     * supported API version and validity window, using the provided
+     * `count`. It is up to the client implementation to decide how to
+     * select the Service, e.g. random or round-robin.
+     */
+    ServiceSelector[ServiceSelector["EXACT"] = 3] = "EXACT";
+})(ServiceSelector || (exports.ServiceSelector = ServiceSelector = {}));
+function serviceSelectorFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "SERVICE_SELECTOR_UNDEFINED":
+            return ServiceSelector.SERVICE_SELECTOR_UNDEFINED;
+        case 1:
+        case "ALL":
+            return ServiceSelector.ALL;
+        case 2:
+        case "ANY":
+            return ServiceSelector.ANY;
+        case 3:
+        case "EXACT":
+            return ServiceSelector.EXACT;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum ServiceSelector");
+    }
+}
+function serviceSelectorToJSON(object) {
+    switch (object) {
+        case ServiceSelector.SERVICE_SELECTOR_UNDEFINED:
+            return "SERVICE_SELECTOR_UNDEFINED";
+        case ServiceSelector.ALL:
+            return "ALL";
+        case ServiceSelector.ANY:
+            return "ANY";
+        case ServiceSelector.EXACT:
+            return "EXACT";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum ServiceSelector");
+    }
+}
+exports.TransparencyLogInstance = {
+    fromJSON(object) {
+        return {
+            baseUrl: isSet(object.baseUrl) ? globalThis.String(object.baseUrl) : "",
+            hashAlgorithm: isSet(object.hashAlgorithm) ? (0, sigstore_common_1.hashAlgorithmFromJSON)(object.hashAlgorithm) : 0,
+            publicKey: isSet(object.publicKey) ? sigstore_common_1.PublicKey.fromJSON(object.publicKey) : undefined,
+            logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
+            checkpointKeyId: isSet(object.checkpointKeyId) ? sigstore_common_1.LogId.fromJSON(object.checkpointKeyId) : undefined,
+            operator: isSet(object.operator) ? globalThis.String(object.operator) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.baseUrl !== "") {
+            obj.baseUrl = message.baseUrl;
+        }
+        if (message.hashAlgorithm !== 0) {
+            obj.hashAlgorithm = (0, sigstore_common_1.hashAlgorithmToJSON)(message.hashAlgorithm);
+        }
+        if (message.publicKey !== undefined) {
+            obj.publicKey = sigstore_common_1.PublicKey.toJSON(message.publicKey);
+        }
+        if (message.logId !== undefined) {
+            obj.logId = sigstore_common_1.LogId.toJSON(message.logId);
+        }
+        if (message.checkpointKeyId !== undefined) {
+            obj.checkpointKeyId = sigstore_common_1.LogId.toJSON(message.checkpointKeyId);
+        }
+        if (message.operator !== "") {
+            obj.operator = message.operator;
+        }
+        return obj;
+    },
+};
+exports.CertificateAuthority = {
+    fromJSON(object) {
+        return {
+            subject: isSet(object.subject) ? sigstore_common_1.DistinguishedName.fromJSON(object.subject) : undefined,
+            uri: isSet(object.uri) ? globalThis.String(object.uri) : "",
+            certChain: isSet(object.certChain) ? sigstore_common_1.X509CertificateChain.fromJSON(object.certChain) : undefined,
+            validFor: isSet(object.validFor) ? sigstore_common_1.TimeRange.fromJSON(object.validFor) : undefined,
+            operator: isSet(object.operator) ? globalThis.String(object.operator) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.subject !== undefined) {
+            obj.subject = sigstore_common_1.DistinguishedName.toJSON(message.subject);
+        }
+        if (message.uri !== "") {
+            obj.uri = message.uri;
+        }
+        if (message.certChain !== undefined) {
+            obj.certChain = sigstore_common_1.X509CertificateChain.toJSON(message.certChain);
+        }
+        if (message.validFor !== undefined) {
+            obj.validFor = sigstore_common_1.TimeRange.toJSON(message.validFor);
+        }
+        if (message.operator !== "") {
+            obj.operator = message.operator;
+        }
+        return obj;
+    },
+};
+exports.TrustedRoot = {
+    fromJSON(object) {
+        return {
+            mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "",
+            tlogs: globalThis.Array.isArray(object?.tlogs)
+                ? object.tlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e))
+                : [],
+            certificateAuthorities: globalThis.Array.isArray(object?.certificateAuthorities)
+                ? object.certificateAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
+                : [],
+            ctlogs: globalThis.Array.isArray(object?.ctlogs)
+                ? object.ctlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e))
+                : [],
+            timestampAuthorities: globalThis.Array.isArray(object?.timestampAuthorities)
+                ? object.timestampAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.mediaType !== "") {
+            obj.mediaType = message.mediaType;
+        }
+        if (message.tlogs?.length) {
+            obj.tlogs = message.tlogs.map((e) => exports.TransparencyLogInstance.toJSON(e));
+        }
+        if (message.certificateAuthorities?.length) {
+            obj.certificateAuthorities = message.certificateAuthorities.map((e) => exports.CertificateAuthority.toJSON(e));
+        }
+        if (message.ctlogs?.length) {
+            obj.ctlogs = message.ctlogs.map((e) => exports.TransparencyLogInstance.toJSON(e));
+        }
+        if (message.timestampAuthorities?.length) {
+            obj.timestampAuthorities = message.timestampAuthorities.map((e) => exports.CertificateAuthority.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.SigningConfig = {
+    fromJSON(object) {
+        return {
+            mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "",
+            caUrls: globalThis.Array.isArray(object?.caUrls) ? object.caUrls.map((e) => exports.Service.fromJSON(e)) : [],
+            oidcUrls: globalThis.Array.isArray(object?.oidcUrls) ? object.oidcUrls.map((e) => exports.Service.fromJSON(e)) : [],
+            rekorTlogUrls: globalThis.Array.isArray(object?.rekorTlogUrls)
+                ? object.rekorTlogUrls.map((e) => exports.Service.fromJSON(e))
+                : [],
+            rekorTlogConfig: isSet(object.rekorTlogConfig)
+                ? exports.ServiceConfiguration.fromJSON(object.rekorTlogConfig)
+                : undefined,
+            tsaUrls: globalThis.Array.isArray(object?.tsaUrls) ? object.tsaUrls.map((e) => exports.Service.fromJSON(e)) : [],
+            tsaConfig: isSet(object.tsaConfig) ? exports.ServiceConfiguration.fromJSON(object.tsaConfig) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.mediaType !== "") {
+            obj.mediaType = message.mediaType;
+        }
+        if (message.caUrls?.length) {
+            obj.caUrls = message.caUrls.map((e) => exports.Service.toJSON(e));
+        }
+        if (message.oidcUrls?.length) {
+            obj.oidcUrls = message.oidcUrls.map((e) => exports.Service.toJSON(e));
+        }
+        if (message.rekorTlogUrls?.length) {
+            obj.rekorTlogUrls = message.rekorTlogUrls.map((e) => exports.Service.toJSON(e));
+        }
+        if (message.rekorTlogConfig !== undefined) {
+            obj.rekorTlogConfig = exports.ServiceConfiguration.toJSON(message.rekorTlogConfig);
+        }
+        if (message.tsaUrls?.length) {
+            obj.tsaUrls = message.tsaUrls.map((e) => exports.Service.toJSON(e));
+        }
+        if (message.tsaConfig !== undefined) {
+            obj.tsaConfig = exports.ServiceConfiguration.toJSON(message.tsaConfig);
+        }
+        return obj;
+    },
+};
+exports.Service = {
+    fromJSON(object) {
+        return {
+            url: isSet(object.url) ? globalThis.String(object.url) : "",
+            majorApiVersion: isSet(object.majorApiVersion) ? globalThis.Number(object.majorApiVersion) : 0,
+            validFor: isSet(object.validFor) ? sigstore_common_1.TimeRange.fromJSON(object.validFor) : undefined,
+            operator: isSet(object.operator) ? globalThis.String(object.operator) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.url !== "") {
+            obj.url = message.url;
+        }
+        if (message.majorApiVersion !== 0) {
+            obj.majorApiVersion = Math.round(message.majorApiVersion);
+        }
+        if (message.validFor !== undefined) {
+            obj.validFor = sigstore_common_1.TimeRange.toJSON(message.validFor);
+        }
+        if (message.operator !== "") {
+            obj.operator = message.operator;
+        }
+        return obj;
+    },
+};
+exports.ServiceConfiguration = {
+    fromJSON(object) {
+        return {
+            selector: isSet(object.selector) ? serviceSelectorFromJSON(object.selector) : 0,
+            count: isSet(object.count) ? globalThis.Number(object.count) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.selector !== 0) {
+            obj.selector = serviceSelectorToJSON(message.selector);
+        }
+        if (message.count !== 0) {
+            obj.count = Math.round(message.count);
+        }
+        return obj;
+    },
+};
+exports.ClientTrustConfig = {
+    fromJSON(object) {
+        return {
+            mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "",
+            trustedRoot: isSet(object.trustedRoot) ? exports.TrustedRoot.fromJSON(object.trustedRoot) : undefined,
+            signingConfig: isSet(object.signingConfig) ? exports.SigningConfig.fromJSON(object.signingConfig) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.mediaType !== "") {
+            obj.mediaType = message.mediaType;
+        }
+        if (message.trustedRoot !== undefined) {
+            obj.trustedRoot = exports.TrustedRoot.toJSON(message.trustedRoot);
+        }
+        if (message.signingConfig !== undefined) {
+            obj.signingConfig = exports.SigningConfig.toJSON(message.signingConfig);
+        }
+        return obj;
+    },
+};
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
new file mode 100644
index 0000000000000..876fe9cc1db1d
--- /dev/null
+++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
@@ -0,0 +1,281 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: sigstore_verification.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Input = exports.Artifact = exports.ArtifactVerificationOptions_ObserverTimestampOptions = exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions = exports.ArtifactVerificationOptions_TimestampAuthorityOptions = exports.ArtifactVerificationOptions_CtlogOptions = exports.ArtifactVerificationOptions_TlogOptions = exports.ArtifactVerificationOptions = exports.PublicKeyIdentities = exports.CertificateIdentities = exports.CertificateIdentity = void 0;
+/* eslint-disable */
+const sigstore_bundle_1 = require("./sigstore_bundle");
+const sigstore_common_1 = require("./sigstore_common");
+const sigstore_trustroot_1 = require("./sigstore_trustroot");
+exports.CertificateIdentity = {
+    fromJSON(object) {
+        return {
+            issuer: isSet(object.issuer) ? globalThis.String(object.issuer) : "",
+            san: isSet(object.san) ? sigstore_common_1.SubjectAlternativeName.fromJSON(object.san) : undefined,
+            oids: globalThis.Array.isArray(object?.oids)
+                ? object.oids.map((e) => sigstore_common_1.ObjectIdentifierValuePair.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.issuer !== "") {
+            obj.issuer = message.issuer;
+        }
+        if (message.san !== undefined) {
+            obj.san = sigstore_common_1.SubjectAlternativeName.toJSON(message.san);
+        }
+        if (message.oids?.length) {
+            obj.oids = message.oids.map((e) => sigstore_common_1.ObjectIdentifierValuePair.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.CertificateIdentities = {
+    fromJSON(object) {
+        return {
+            identities: globalThis.Array.isArray(object?.identities)
+                ? object.identities.map((e) => exports.CertificateIdentity.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.identities?.length) {
+            obj.identities = message.identities.map((e) => exports.CertificateIdentity.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.PublicKeyIdentities = {
+    fromJSON(object) {
+        return {
+            publicKeys: globalThis.Array.isArray(object?.publicKeys)
+                ? object.publicKeys.map((e) => sigstore_common_1.PublicKey.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.publicKeys?.length) {
+            obj.publicKeys = message.publicKeys.map((e) => sigstore_common_1.PublicKey.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.ArtifactVerificationOptions = {
+    fromJSON(object) {
+        return {
+            signers: isSet(object.certificateIdentities)
+                ? {
+                    $case: "certificateIdentities",
+                    certificateIdentities: exports.CertificateIdentities.fromJSON(object.certificateIdentities),
+                }
+                : isSet(object.publicKeys)
+                    ? { $case: "publicKeys", publicKeys: exports.PublicKeyIdentities.fromJSON(object.publicKeys) }
+                    : undefined,
+            tlogOptions: isSet(object.tlogOptions)
+                ? exports.ArtifactVerificationOptions_TlogOptions.fromJSON(object.tlogOptions)
+                : undefined,
+            ctlogOptions: isSet(object.ctlogOptions)
+                ? exports.ArtifactVerificationOptions_CtlogOptions.fromJSON(object.ctlogOptions)
+                : undefined,
+            tsaOptions: isSet(object.tsaOptions)
+                ? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.fromJSON(object.tsaOptions)
+                : undefined,
+            integratedTsOptions: isSet(object.integratedTsOptions)
+                ? exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions.fromJSON(object.integratedTsOptions)
+                : undefined,
+            observerOptions: isSet(object.observerOptions)
+                ? exports.ArtifactVerificationOptions_ObserverTimestampOptions.fromJSON(object.observerOptions)
+                : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.signers?.$case === "certificateIdentities") {
+            obj.certificateIdentities = exports.CertificateIdentities.toJSON(message.signers.certificateIdentities);
+        }
+        else if (message.signers?.$case === "publicKeys") {
+            obj.publicKeys = exports.PublicKeyIdentities.toJSON(message.signers.publicKeys);
+        }
+        if (message.tlogOptions !== undefined) {
+            obj.tlogOptions = exports.ArtifactVerificationOptions_TlogOptions.toJSON(message.tlogOptions);
+        }
+        if (message.ctlogOptions !== undefined) {
+            obj.ctlogOptions = exports.ArtifactVerificationOptions_CtlogOptions.toJSON(message.ctlogOptions);
+        }
+        if (message.tsaOptions !== undefined) {
+            obj.tsaOptions = exports.ArtifactVerificationOptions_TimestampAuthorityOptions.toJSON(message.tsaOptions);
+        }
+        if (message.integratedTsOptions !== undefined) {
+            obj.integratedTsOptions = exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions.toJSON(message.integratedTsOptions);
+        }
+        if (message.observerOptions !== undefined) {
+            obj.observerOptions = exports.ArtifactVerificationOptions_ObserverTimestampOptions.toJSON(message.observerOptions);
+        }
+        return obj;
+    },
+};
+exports.ArtifactVerificationOptions_TlogOptions = {
+    fromJSON(object) {
+        return {
+            threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
+            performOnlineVerification: isSet(object.performOnlineVerification)
+                ? globalThis.Boolean(object.performOnlineVerification)
+                : false,
+            disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.threshold !== 0) {
+            obj.threshold = Math.round(message.threshold);
+        }
+        if (message.performOnlineVerification !== false) {
+            obj.performOnlineVerification = message.performOnlineVerification;
+        }
+        if (message.disable !== false) {
+            obj.disable = message.disable;
+        }
+        return obj;
+    },
+};
+exports.ArtifactVerificationOptions_CtlogOptions = {
+    fromJSON(object) {
+        return {
+            threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
+            disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.threshold !== 0) {
+            obj.threshold = Math.round(message.threshold);
+        }
+        if (message.disable !== false) {
+            obj.disable = message.disable;
+        }
+        return obj;
+    },
+};
+exports.ArtifactVerificationOptions_TimestampAuthorityOptions = {
+    fromJSON(object) {
+        return {
+            threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
+            disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.threshold !== 0) {
+            obj.threshold = Math.round(message.threshold);
+        }
+        if (message.disable !== false) {
+            obj.disable = message.disable;
+        }
+        return obj;
+    },
+};
+exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions = {
+    fromJSON(object) {
+        return {
+            threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
+            disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.threshold !== 0) {
+            obj.threshold = Math.round(message.threshold);
+        }
+        if (message.disable !== false) {
+            obj.disable = message.disable;
+        }
+        return obj;
+    },
+};
+exports.ArtifactVerificationOptions_ObserverTimestampOptions = {
+    fromJSON(object) {
+        return {
+            threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
+            disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.threshold !== 0) {
+            obj.threshold = Math.round(message.threshold);
+        }
+        if (message.disable !== false) {
+            obj.disable = message.disable;
+        }
+        return obj;
+    },
+};
+exports.Artifact = {
+    fromJSON(object) {
+        return {
+            data: isSet(object.artifactUri)
+                ? { $case: "artifactUri", artifactUri: globalThis.String(object.artifactUri) }
+                : isSet(object.artifact)
+                    ? { $case: "artifact", artifact: Buffer.from(bytesFromBase64(object.artifact)) }
+                    : isSet(object.artifactDigest)
+                        ? { $case: "artifactDigest", artifactDigest: sigstore_common_1.HashOutput.fromJSON(object.artifactDigest) }
+                        : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.data?.$case === "artifactUri") {
+            obj.artifactUri = message.data.artifactUri;
+        }
+        else if (message.data?.$case === "artifact") {
+            obj.artifact = base64FromBytes(message.data.artifact);
+        }
+        else if (message.data?.$case === "artifactDigest") {
+            obj.artifactDigest = sigstore_common_1.HashOutput.toJSON(message.data.artifactDigest);
+        }
+        return obj;
+    },
+};
+exports.Input = {
+    fromJSON(object) {
+        return {
+            artifactTrustRoot: isSet(object.artifactTrustRoot) ? sigstore_trustroot_1.TrustedRoot.fromJSON(object.artifactTrustRoot) : undefined,
+            artifactVerificationOptions: isSet(object.artifactVerificationOptions)
+                ? exports.ArtifactVerificationOptions.fromJSON(object.artifactVerificationOptions)
+                : undefined,
+            bundle: isSet(object.bundle) ? sigstore_bundle_1.Bundle.fromJSON(object.bundle) : undefined,
+            artifact: isSet(object.artifact) ? exports.Artifact.fromJSON(object.artifact) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.artifactTrustRoot !== undefined) {
+            obj.artifactTrustRoot = sigstore_trustroot_1.TrustedRoot.toJSON(message.artifactTrustRoot);
+        }
+        if (message.artifactVerificationOptions !== undefined) {
+            obj.artifactVerificationOptions = exports.ArtifactVerificationOptions.toJSON(message.artifactVerificationOptions);
+        }
+        if (message.bundle !== undefined) {
+            obj.bundle = sigstore_bundle_1.Bundle.toJSON(message.bundle);
+        }
+        if (message.artifact !== undefined) {
+            obj.artifact = exports.Artifact.toJSON(message.artifact);
+        }
+        return obj;
+    },
+};
+function bytesFromBase64(b64) {
+    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+}
+function base64FromBytes(arr) {
+    return globalThis.Buffer.from(arr).toString("base64");
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/index.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/index.js
new file mode 100644
index 0000000000000..eafb768c48fca
--- /dev/null
+++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/index.js
@@ -0,0 +1,37 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __exportStar = (this && this.__exportStar) || function(m, exports) {
+    for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+__exportStar(require("./__generated__/envelope"), exports);
+__exportStar(require("./__generated__/sigstore_bundle"), exports);
+__exportStar(require("./__generated__/sigstore_common"), exports);
+__exportStar(require("./__generated__/sigstore_rekor"), exports);
+__exportStar(require("./__generated__/sigstore_trustroot"), exports);
+__exportStar(require("./__generated__/sigstore_verification"), exports);
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/rekor/v2/index.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/rekor/v2/index.js
new file mode 100644
index 0000000000000..10745efc39a1f
--- /dev/null
+++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/rekor/v2/index.js
@@ -0,0 +1,35 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __exportStar = (this && this.__exportStar) || function(m, exports) {
+    for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+/*
+Copyright 2025 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+__exportStar(require("../../__generated__/rekor/v2/dsse"), exports);
+__exportStar(require("../../__generated__/rekor/v2/entry"), exports);
+__exportStar(require("../../__generated__/rekor/v2/hashedrekord"), exports);
+__exportStar(require("../../__generated__/rekor/v2/verifier"), exports);
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/package.json b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/package.json
new file mode 100644
index 0000000000000..f87b2540fbf98
--- /dev/null
+++ b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/package.json
@@ -0,0 +1,35 @@
+{
+  "name": "@sigstore/protobuf-specs",
+  "version": "0.5.0",
+  "description": "code-signing for npm packages",
+  "main": "dist/index.js",
+  "types": "dist/index.d.ts",
+  "exports": {
+    ".": "./dist/index.js",
+    "./rekor/v2": "./dist/rekor/v2/index.js"
+  },
+  "scripts": {
+    "build": "tsc"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/sigstore/protobuf-specs.git"
+  },
+  "files": [
+    "dist"
+  ],
+  "author": "bdehamer@github.com",
+  "license": "Apache-2.0",
+  "bugs": {
+    "url": "https://github.com/sigstore/protobuf-specs/issues"
+  },
+  "homepage": "https://github.com/sigstore/protobuf-specs#readme",
+  "devDependencies": {
+    "@tsconfig/node18": "^18.2.4",
+    "@types/node": "^18.14.0",
+    "typescript": "^5.7.2"
+  },
+  "engines": {
+    "node": "^18.17.0 || >=20.5.0"
+  }
+}
diff --git a/node_modules/pacote/node_modules/make-fetch-happen/LICENSE b/node_modules/@sigstore/sign/node_modules/make-fetch-happen/LICENSE
similarity index 100%
rename from node_modules/pacote/node_modules/make-fetch-happen/LICENSE
rename to node_modules/@sigstore/sign/node_modules/make-fetch-happen/LICENSE
diff --git a/node_modules/pacote/node_modules/make-fetch-happen/lib/cache/entry.js b/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/entry.js
similarity index 100%
rename from node_modules/pacote/node_modules/make-fetch-happen/lib/cache/entry.js
rename to node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/entry.js
diff --git a/node_modules/pacote/node_modules/make-fetch-happen/lib/cache/errors.js b/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/errors.js
similarity index 100%
rename from node_modules/pacote/node_modules/make-fetch-happen/lib/cache/errors.js
rename to node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/errors.js
diff --git a/node_modules/pacote/node_modules/make-fetch-happen/lib/cache/index.js b/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/index.js
similarity index 100%
rename from node_modules/pacote/node_modules/make-fetch-happen/lib/cache/index.js
rename to node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/index.js
diff --git a/node_modules/pacote/node_modules/make-fetch-happen/lib/cache/key.js b/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/key.js
similarity index 100%
rename from node_modules/pacote/node_modules/make-fetch-happen/lib/cache/key.js
rename to node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/key.js
diff --git a/node_modules/pacote/node_modules/make-fetch-happen/lib/cache/policy.js b/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/policy.js
similarity index 100%
rename from node_modules/pacote/node_modules/make-fetch-happen/lib/cache/policy.js
rename to node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/policy.js
diff --git a/node_modules/pacote/node_modules/make-fetch-happen/lib/fetch.js b/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/fetch.js
similarity index 100%
rename from node_modules/pacote/node_modules/make-fetch-happen/lib/fetch.js
rename to node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/fetch.js
diff --git a/node_modules/pacote/node_modules/make-fetch-happen/lib/index.js b/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/index.js
similarity index 100%
rename from node_modules/pacote/node_modules/make-fetch-happen/lib/index.js
rename to node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/index.js
diff --git a/node_modules/pacote/node_modules/make-fetch-happen/lib/options.js b/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/options.js
similarity index 100%
rename from node_modules/pacote/node_modules/make-fetch-happen/lib/options.js
rename to node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/options.js
diff --git a/node_modules/pacote/node_modules/make-fetch-happen/lib/pipeline.js b/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/pipeline.js
similarity index 100%
rename from node_modules/pacote/node_modules/make-fetch-happen/lib/pipeline.js
rename to node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/pipeline.js
diff --git a/node_modules/pacote/node_modules/make-fetch-happen/lib/remote.js b/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/remote.js
similarity index 100%
rename from node_modules/pacote/node_modules/make-fetch-happen/lib/remote.js
rename to node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/remote.js
diff --git a/node_modules/pacote/node_modules/make-fetch-happen/package.json b/node_modules/@sigstore/sign/node_modules/make-fetch-happen/package.json
similarity index 100%
rename from node_modules/pacote/node_modules/make-fetch-happen/package.json
rename to node_modules/@sigstore/sign/node_modules/make-fetch-happen/package.json
diff --git a/node_modules/pacote/node_modules/negotiator/HISTORY.md b/node_modules/@sigstore/sign/node_modules/negotiator/HISTORY.md
similarity index 100%
rename from node_modules/pacote/node_modules/negotiator/HISTORY.md
rename to node_modules/@sigstore/sign/node_modules/negotiator/HISTORY.md
diff --git a/node_modules/pacote/node_modules/negotiator/LICENSE b/node_modules/@sigstore/sign/node_modules/negotiator/LICENSE
similarity index 100%
rename from node_modules/pacote/node_modules/negotiator/LICENSE
rename to node_modules/@sigstore/sign/node_modules/negotiator/LICENSE
diff --git a/node_modules/pacote/node_modules/negotiator/index.js b/node_modules/@sigstore/sign/node_modules/negotiator/index.js
similarity index 100%
rename from node_modules/pacote/node_modules/negotiator/index.js
rename to node_modules/@sigstore/sign/node_modules/negotiator/index.js
diff --git a/node_modules/pacote/node_modules/negotiator/lib/charset.js b/node_modules/@sigstore/sign/node_modules/negotiator/lib/charset.js
similarity index 100%
rename from node_modules/pacote/node_modules/negotiator/lib/charset.js
rename to node_modules/@sigstore/sign/node_modules/negotiator/lib/charset.js
diff --git a/node_modules/pacote/node_modules/negotiator/lib/encoding.js b/node_modules/@sigstore/sign/node_modules/negotiator/lib/encoding.js
similarity index 100%
rename from node_modules/pacote/node_modules/negotiator/lib/encoding.js
rename to node_modules/@sigstore/sign/node_modules/negotiator/lib/encoding.js
diff --git a/node_modules/pacote/node_modules/negotiator/lib/language.js b/node_modules/@sigstore/sign/node_modules/negotiator/lib/language.js
similarity index 100%
rename from node_modules/pacote/node_modules/negotiator/lib/language.js
rename to node_modules/@sigstore/sign/node_modules/negotiator/lib/language.js
diff --git a/node_modules/pacote/node_modules/negotiator/lib/mediaType.js b/node_modules/@sigstore/sign/node_modules/negotiator/lib/mediaType.js
similarity index 100%
rename from node_modules/pacote/node_modules/negotiator/lib/mediaType.js
rename to node_modules/@sigstore/sign/node_modules/negotiator/lib/mediaType.js
diff --git a/node_modules/pacote/node_modules/negotiator/package.json b/node_modules/@sigstore/sign/node_modules/negotiator/package.json
similarity index 100%
rename from node_modules/pacote/node_modules/negotiator/package.json
rename to node_modules/@sigstore/sign/node_modules/negotiator/package.json
diff --git a/node_modules/@sigstore/sign/package.json b/node_modules/@sigstore/sign/package.json
index b1d60ea1fdce6..4059997ced341 100644
--- a/node_modules/@sigstore/sign/package.json
+++ b/node_modules/@sigstore/sign/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@sigstore/sign",
-  "version": "3.1.0",
+  "version": "4.0.0",
   "description": "Sigstore signing library",
   "main": "dist/index.js",
   "types": "dist/index.d.ts",
@@ -27,20 +27,20 @@
   },
   "devDependencies": {
     "@sigstore/jest": "^0.0.0",
-    "@sigstore/mock": "^0.10.0",
-    "@sigstore/rekor-types": "^3.0.0",
+    "@sigstore/mock": "^0.11.0",
+    "@sigstore/rekor-types": "^4.0.0",
     "@types/make-fetch-happen": "^10.0.4",
     "@types/promise-retry": "^1.1.6"
   },
   "dependencies": {
-    "@sigstore/bundle": "^3.1.0",
-    "@sigstore/core": "^2.0.0",
-    "@sigstore/protobuf-specs": "^0.4.0",
-    "make-fetch-happen": "^14.0.2",
+    "@sigstore/bundle": "^4.0.0",
+    "@sigstore/core": "^3.0.0",
+    "@sigstore/protobuf-specs": "^0.5.0",
+    "make-fetch-happen": "^15.0.0",
     "proc-log": "^5.0.0",
     "promise-retry": "^2.0.1"
   },
   "engines": {
-    "node": "^18.17.0 || >=20.5.0"
+    "node": "^20.17.0 || >=22.9.0"
   }
 }
diff --git a/node_modules/@sigstore/verify/dist/key/certificate.js b/node_modules/@sigstore/verify/dist/key/certificate.js
index e9a66b123455e..35ad947f0bafc 100644
--- a/node_modules/@sigstore/verify/dist/key/certificate.js
+++ b/node_modules/@sigstore/verify/dist/key/certificate.js
@@ -123,6 +123,7 @@ class CertificateChainVerifier {
         // or issuer/subject. Potential issuers are added to the result array.
         this.localCerts.forEach((possibleIssuer) => {
             if (keyIdentifier) {
+                /* istanbul ignore else */
                 if (possibleIssuer.extSubjectKeyID) {
                     if (possibleIssuer.extSubjectKeyID.keyIdentifier.equals(keyIdentifier)) {
                         issuers.push(possibleIssuer);
diff --git a/node_modules/@sigstore/verify/dist/verifier.js b/node_modules/@sigstore/verify/dist/verifier.js
index 829727cd1d40a..6a9d11a3b6f8f 100644
--- a/node_modules/@sigstore/verify/dist/verifier.js
+++ b/node_modules/@sigstore/verify/dist/verifier.js
@@ -117,10 +117,12 @@ class Verifier {
     }
     verifyPolicy(policy, identity) {
         // Check the subject alternative name of the signer matches the policy
+        /* istanbul ignore else */
         if (policy.subjectAlternativeName) {
             (0, policy_1.verifySubjectAlternativeName)(policy.subjectAlternativeName, identity.subjectAlternativeName);
         }
         // Check that the extensions of the signer match the policy
+        /* istanbul ignore else */
         if (policy.extensions) {
             (0, policy_1.verifyExtensions)(policy.extensions, identity.extensions);
         }
diff --git a/node_modules/pacote/node_modules/@sigstore/protobuf-specs/LICENSE b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/LICENSE
similarity index 100%
rename from node_modules/pacote/node_modules/@sigstore/protobuf-specs/LICENSE
rename to node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/LICENSE
diff --git a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
new file mode 100644
index 0000000000000..5c4f37bfaf3fb
--- /dev/null
+++ b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
@@ -0,0 +1,59 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: envelope.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Signature = exports.Envelope = void 0;
+exports.Envelope = {
+    fromJSON(object) {
+        return {
+            payload: isSet(object.payload) ? Buffer.from(bytesFromBase64(object.payload)) : Buffer.alloc(0),
+            payloadType: isSet(object.payloadType) ? globalThis.String(object.payloadType) : "",
+            signatures: globalThis.Array.isArray(object?.signatures)
+                ? object.signatures.map((e) => exports.Signature.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.payload.length !== 0) {
+            obj.payload = base64FromBytes(message.payload);
+        }
+        if (message.payloadType !== "") {
+            obj.payloadType = message.payloadType;
+        }
+        if (message.signatures?.length) {
+            obj.signatures = message.signatures.map((e) => exports.Signature.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.Signature = {
+    fromJSON(object) {
+        return {
+            sig: isSet(object.sig) ? Buffer.from(bytesFromBase64(object.sig)) : Buffer.alloc(0),
+            keyid: isSet(object.keyid) ? globalThis.String(object.keyid) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.sig.length !== 0) {
+            obj.sig = base64FromBytes(message.sig);
+        }
+        if (message.keyid !== "") {
+            obj.keyid = message.keyid;
+        }
+        return obj;
+    },
+};
+function bytesFromBase64(b64) {
+    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+}
+function base64FromBytes(arr) {
+    return globalThis.Buffer.from(arr).toString("base64");
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
new file mode 100644
index 0000000000000..6138fef5672fc
--- /dev/null
+++ b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
@@ -0,0 +1,174 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: events.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.CloudEventBatch = exports.CloudEvent_CloudEventAttributeValue = exports.CloudEvent_AttributesEntry = exports.CloudEvent = void 0;
+/* eslint-disable */
+const any_1 = require("./google/protobuf/any");
+const timestamp_1 = require("./google/protobuf/timestamp");
+exports.CloudEvent = {
+    fromJSON(object) {
+        return {
+            id: isSet(object.id) ? globalThis.String(object.id) : "",
+            source: isSet(object.source) ? globalThis.String(object.source) : "",
+            specVersion: isSet(object.specVersion) ? globalThis.String(object.specVersion) : "",
+            type: isSet(object.type) ? globalThis.String(object.type) : "",
+            attributes: isObject(object.attributes)
+                ? Object.entries(object.attributes).reduce((acc, [key, value]) => {
+                    acc[key] = exports.CloudEvent_CloudEventAttributeValue.fromJSON(value);
+                    return acc;
+                }, {})
+                : {},
+            data: isSet(object.binaryData)
+                ? { $case: "binaryData", binaryData: Buffer.from(bytesFromBase64(object.binaryData)) }
+                : isSet(object.textData)
+                    ? { $case: "textData", textData: globalThis.String(object.textData) }
+                    : isSet(object.protoData)
+                        ? { $case: "protoData", protoData: any_1.Any.fromJSON(object.protoData) }
+                        : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.id !== "") {
+            obj.id = message.id;
+        }
+        if (message.source !== "") {
+            obj.source = message.source;
+        }
+        if (message.specVersion !== "") {
+            obj.specVersion = message.specVersion;
+        }
+        if (message.type !== "") {
+            obj.type = message.type;
+        }
+        if (message.attributes) {
+            const entries = Object.entries(message.attributes);
+            if (entries.length > 0) {
+                obj.attributes = {};
+                entries.forEach(([k, v]) => {
+                    obj.attributes[k] = exports.CloudEvent_CloudEventAttributeValue.toJSON(v);
+                });
+            }
+        }
+        if (message.data?.$case === "binaryData") {
+            obj.binaryData = base64FromBytes(message.data.binaryData);
+        }
+        else if (message.data?.$case === "textData") {
+            obj.textData = message.data.textData;
+        }
+        else if (message.data?.$case === "protoData") {
+            obj.protoData = any_1.Any.toJSON(message.data.protoData);
+        }
+        return obj;
+    },
+};
+exports.CloudEvent_AttributesEntry = {
+    fromJSON(object) {
+        return {
+            key: isSet(object.key) ? globalThis.String(object.key) : "",
+            value: isSet(object.value) ? exports.CloudEvent_CloudEventAttributeValue.fromJSON(object.value) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.key !== "") {
+            obj.key = message.key;
+        }
+        if (message.value !== undefined) {
+            obj.value = exports.CloudEvent_CloudEventAttributeValue.toJSON(message.value);
+        }
+        return obj;
+    },
+};
+exports.CloudEvent_CloudEventAttributeValue = {
+    fromJSON(object) {
+        return {
+            attr: isSet(object.ceBoolean)
+                ? { $case: "ceBoolean", ceBoolean: globalThis.Boolean(object.ceBoolean) }
+                : isSet(object.ceInteger)
+                    ? { $case: "ceInteger", ceInteger: globalThis.Number(object.ceInteger) }
+                    : isSet(object.ceString)
+                        ? { $case: "ceString", ceString: globalThis.String(object.ceString) }
+                        : isSet(object.ceBytes)
+                            ? { $case: "ceBytes", ceBytes: Buffer.from(bytesFromBase64(object.ceBytes)) }
+                            : isSet(object.ceUri)
+                                ? { $case: "ceUri", ceUri: globalThis.String(object.ceUri) }
+                                : isSet(object.ceUriRef)
+                                    ? { $case: "ceUriRef", ceUriRef: globalThis.String(object.ceUriRef) }
+                                    : isSet(object.ceTimestamp)
+                                        ? { $case: "ceTimestamp", ceTimestamp: fromJsonTimestamp(object.ceTimestamp) }
+                                        : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.attr?.$case === "ceBoolean") {
+            obj.ceBoolean = message.attr.ceBoolean;
+        }
+        else if (message.attr?.$case === "ceInteger") {
+            obj.ceInteger = Math.round(message.attr.ceInteger);
+        }
+        else if (message.attr?.$case === "ceString") {
+            obj.ceString = message.attr.ceString;
+        }
+        else if (message.attr?.$case === "ceBytes") {
+            obj.ceBytes = base64FromBytes(message.attr.ceBytes);
+        }
+        else if (message.attr?.$case === "ceUri") {
+            obj.ceUri = message.attr.ceUri;
+        }
+        else if (message.attr?.$case === "ceUriRef") {
+            obj.ceUriRef = message.attr.ceUriRef;
+        }
+        else if (message.attr?.$case === "ceTimestamp") {
+            obj.ceTimestamp = message.attr.ceTimestamp.toISOString();
+        }
+        return obj;
+    },
+};
+exports.CloudEventBatch = {
+    fromJSON(object) {
+        return {
+            events: globalThis.Array.isArray(object?.events) ? object.events.map((e) => exports.CloudEvent.fromJSON(e)) : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.events?.length) {
+            obj.events = message.events.map((e) => exports.CloudEvent.toJSON(e));
+        }
+        return obj;
+    },
+};
+function bytesFromBase64(b64) {
+    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+}
+function base64FromBytes(arr) {
+    return globalThis.Buffer.from(arr).toString("base64");
+}
+function fromTimestamp(t) {
+    let millis = (globalThis.Number(t.seconds) || 0) * 1_000;
+    millis += (t.nanos || 0) / 1_000_000;
+    return new globalThis.Date(millis);
+}
+function fromJsonTimestamp(o) {
+    if (o instanceof globalThis.Date) {
+        return o;
+    }
+    else if (typeof o === "string") {
+        return new globalThis.Date(o);
+    }
+    else {
+        return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
+    }
+}
+function isObject(value) {
+    return typeof value === "object" && value !== null;
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
new file mode 100644
index 0000000000000..b4d9ccc781c2f
--- /dev/null
+++ b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
@@ -0,0 +1,141 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: google/api/field_behavior.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.FieldBehavior = void 0;
+exports.fieldBehaviorFromJSON = fieldBehaviorFromJSON;
+exports.fieldBehaviorToJSON = fieldBehaviorToJSON;
+/* eslint-disable */
+/**
+ * An indicator of the behavior of a given field (for example, that a field
+ * is required in requests, or given as output but ignored as input).
+ * This **does not** change the behavior in protocol buffers itself; it only
+ * denotes the behavior and may affect how API tooling handles the field.
+ *
+ * Note: This enum **may** receive new values in the future.
+ */
+var FieldBehavior;
+(function (FieldBehavior) {
+    /** FIELD_BEHAVIOR_UNSPECIFIED - Conventional default for enums. Do not use this. */
+    FieldBehavior[FieldBehavior["FIELD_BEHAVIOR_UNSPECIFIED"] = 0] = "FIELD_BEHAVIOR_UNSPECIFIED";
+    /**
+     * OPTIONAL - Specifically denotes a field as optional.
+     * While all fields in protocol buffers are optional, this may be specified
+     * for emphasis if appropriate.
+     */
+    FieldBehavior[FieldBehavior["OPTIONAL"] = 1] = "OPTIONAL";
+    /**
+     * REQUIRED - Denotes a field as required.
+     * This indicates that the field **must** be provided as part of the request,
+     * and failure to do so will cause an error (usually `INVALID_ARGUMENT`).
+     */
+    FieldBehavior[FieldBehavior["REQUIRED"] = 2] = "REQUIRED";
+    /**
+     * OUTPUT_ONLY - Denotes a field as output only.
+     * This indicates that the field is provided in responses, but including the
+     * field in a request does nothing (the server *must* ignore it and
+     * *must not* throw an error as a result of the field's presence).
+     */
+    FieldBehavior[FieldBehavior["OUTPUT_ONLY"] = 3] = "OUTPUT_ONLY";
+    /**
+     * INPUT_ONLY - Denotes a field as input only.
+     * This indicates that the field is provided in requests, and the
+     * corresponding field is not included in output.
+     */
+    FieldBehavior[FieldBehavior["INPUT_ONLY"] = 4] = "INPUT_ONLY";
+    /**
+     * IMMUTABLE - Denotes a field as immutable.
+     * This indicates that the field may be set once in a request to create a
+     * resource, but may not be changed thereafter.
+     */
+    FieldBehavior[FieldBehavior["IMMUTABLE"] = 5] = "IMMUTABLE";
+    /**
+     * UNORDERED_LIST - Denotes that a (repeated) field is an unordered list.
+     * This indicates that the service may provide the elements of the list
+     * in any arbitrary  order, rather than the order the user originally
+     * provided. Additionally, the list's order may or may not be stable.
+     */
+    FieldBehavior[FieldBehavior["UNORDERED_LIST"] = 6] = "UNORDERED_LIST";
+    /**
+     * NON_EMPTY_DEFAULT - Denotes that this field returns a non-empty default value if not set.
+     * This indicates that if the user provides the empty value in a request,
+     * a non-empty value will be returned. The user will not be aware of what
+     * non-empty value to expect.
+     */
+    FieldBehavior[FieldBehavior["NON_EMPTY_DEFAULT"] = 7] = "NON_EMPTY_DEFAULT";
+    /**
+     * IDENTIFIER - Denotes that the field in a resource (a message annotated with
+     * google.api.resource) is used in the resource name to uniquely identify the
+     * resource. For AIP-compliant APIs, this should only be applied to the
+     * `name` field on the resource.
+     *
+     * This behavior should not be applied to references to other resources within
+     * the message.
+     *
+     * The identifier field of resources often have different field behavior
+     * depending on the request it is embedded in (e.g. for Create methods name
+     * is optional and unused, while for Update methods it is required). Instead
+     * of method-specific annotations, only `IDENTIFIER` is required.
+     */
+    FieldBehavior[FieldBehavior["IDENTIFIER"] = 8] = "IDENTIFIER";
+})(FieldBehavior || (exports.FieldBehavior = FieldBehavior = {}));
+function fieldBehaviorFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "FIELD_BEHAVIOR_UNSPECIFIED":
+            return FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED;
+        case 1:
+        case "OPTIONAL":
+            return FieldBehavior.OPTIONAL;
+        case 2:
+        case "REQUIRED":
+            return FieldBehavior.REQUIRED;
+        case 3:
+        case "OUTPUT_ONLY":
+            return FieldBehavior.OUTPUT_ONLY;
+        case 4:
+        case "INPUT_ONLY":
+            return FieldBehavior.INPUT_ONLY;
+        case 5:
+        case "IMMUTABLE":
+            return FieldBehavior.IMMUTABLE;
+        case 6:
+        case "UNORDERED_LIST":
+            return FieldBehavior.UNORDERED_LIST;
+        case 7:
+        case "NON_EMPTY_DEFAULT":
+            return FieldBehavior.NON_EMPTY_DEFAULT;
+        case 8:
+        case "IDENTIFIER":
+            return FieldBehavior.IDENTIFIER;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
+    }
+}
+function fieldBehaviorToJSON(object) {
+    switch (object) {
+        case FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED:
+            return "FIELD_BEHAVIOR_UNSPECIFIED";
+        case FieldBehavior.OPTIONAL:
+            return "OPTIONAL";
+        case FieldBehavior.REQUIRED:
+            return "REQUIRED";
+        case FieldBehavior.OUTPUT_ONLY:
+            return "OUTPUT_ONLY";
+        case FieldBehavior.INPUT_ONLY:
+            return "INPUT_ONLY";
+        case FieldBehavior.IMMUTABLE:
+            return "IMMUTABLE";
+        case FieldBehavior.UNORDERED_LIST:
+            return "UNORDERED_LIST";
+        case FieldBehavior.NON_EMPTY_DEFAULT:
+            return "NON_EMPTY_DEFAULT";
+        case FieldBehavior.IDENTIFIER:
+            return "IDENTIFIER";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
+    }
+}
diff --git a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
new file mode 100644
index 0000000000000..f0c8aab773e4c
--- /dev/null
+++ b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
@@ -0,0 +1,35 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: google/protobuf/any.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Any = void 0;
+exports.Any = {
+    fromJSON(object) {
+        return {
+            typeUrl: isSet(object.typeUrl) ? globalThis.String(object.typeUrl) : "",
+            value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.typeUrl !== "") {
+            obj.typeUrl = message.typeUrl;
+        }
+        if (message.value.length !== 0) {
+            obj.value = base64FromBytes(message.value);
+        }
+        return obj;
+    },
+};
+function bytesFromBase64(b64) {
+    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+}
+function base64FromBytes(arr) {
+    return globalThis.Buffer.from(arr).toString("base64");
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
new file mode 100644
index 0000000000000..d6f8ddddf799d
--- /dev/null
+++ b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
@@ -0,0 +1,2042 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: google/protobuf/descriptor.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.GeneratedCodeInfo = exports.SourceCodeInfo_Location = exports.SourceCodeInfo = exports.FeatureSetDefaults_FeatureSetEditionDefault = exports.FeatureSetDefaults = exports.FeatureSet = exports.UninterpretedOption_NamePart = exports.UninterpretedOption = exports.MethodOptions = exports.ServiceOptions = exports.EnumValueOptions = exports.EnumOptions = exports.OneofOptions = exports.FieldOptions_FeatureSupport = exports.FieldOptions_EditionDefault = exports.FieldOptions = exports.MessageOptions = exports.FileOptions = exports.MethodDescriptorProto = exports.ServiceDescriptorProto = exports.EnumValueDescriptorProto = exports.EnumDescriptorProto_EnumReservedRange = exports.EnumDescriptorProto = exports.OneofDescriptorProto = exports.FieldDescriptorProto = exports.ExtensionRangeOptions_Declaration = exports.ExtensionRangeOptions = exports.DescriptorProto_ReservedRange = exports.DescriptorProto_ExtensionRange = exports.DescriptorProto = exports.FileDescriptorProto = exports.FileDescriptorSet = exports.GeneratedCodeInfo_Annotation_Semantic = exports.FeatureSet_EnforceNamingStyle = exports.FeatureSet_JsonFormat = exports.FeatureSet_MessageEncoding = exports.FeatureSet_Utf8Validation = exports.FeatureSet_RepeatedFieldEncoding = exports.FeatureSet_EnumType = exports.FeatureSet_FieldPresence = exports.MethodOptions_IdempotencyLevel = exports.FieldOptions_OptionTargetType = exports.FieldOptions_OptionRetention = exports.FieldOptions_JSType = exports.FieldOptions_CType = exports.FileOptions_OptimizeMode = exports.FieldDescriptorProto_Label = exports.FieldDescriptorProto_Type = exports.ExtensionRangeOptions_VerificationState = exports.Edition = void 0;
+exports.GeneratedCodeInfo_Annotation = void 0;
+exports.editionFromJSON = editionFromJSON;
+exports.editionToJSON = editionToJSON;
+exports.extensionRangeOptions_VerificationStateFromJSON = extensionRangeOptions_VerificationStateFromJSON;
+exports.extensionRangeOptions_VerificationStateToJSON = extensionRangeOptions_VerificationStateToJSON;
+exports.fieldDescriptorProto_TypeFromJSON = fieldDescriptorProto_TypeFromJSON;
+exports.fieldDescriptorProto_TypeToJSON = fieldDescriptorProto_TypeToJSON;
+exports.fieldDescriptorProto_LabelFromJSON = fieldDescriptorProto_LabelFromJSON;
+exports.fieldDescriptorProto_LabelToJSON = fieldDescriptorProto_LabelToJSON;
+exports.fileOptions_OptimizeModeFromJSON = fileOptions_OptimizeModeFromJSON;
+exports.fileOptions_OptimizeModeToJSON = fileOptions_OptimizeModeToJSON;
+exports.fieldOptions_CTypeFromJSON = fieldOptions_CTypeFromJSON;
+exports.fieldOptions_CTypeToJSON = fieldOptions_CTypeToJSON;
+exports.fieldOptions_JSTypeFromJSON = fieldOptions_JSTypeFromJSON;
+exports.fieldOptions_JSTypeToJSON = fieldOptions_JSTypeToJSON;
+exports.fieldOptions_OptionRetentionFromJSON = fieldOptions_OptionRetentionFromJSON;
+exports.fieldOptions_OptionRetentionToJSON = fieldOptions_OptionRetentionToJSON;
+exports.fieldOptions_OptionTargetTypeFromJSON = fieldOptions_OptionTargetTypeFromJSON;
+exports.fieldOptions_OptionTargetTypeToJSON = fieldOptions_OptionTargetTypeToJSON;
+exports.methodOptions_IdempotencyLevelFromJSON = methodOptions_IdempotencyLevelFromJSON;
+exports.methodOptions_IdempotencyLevelToJSON = methodOptions_IdempotencyLevelToJSON;
+exports.featureSet_FieldPresenceFromJSON = featureSet_FieldPresenceFromJSON;
+exports.featureSet_FieldPresenceToJSON = featureSet_FieldPresenceToJSON;
+exports.featureSet_EnumTypeFromJSON = featureSet_EnumTypeFromJSON;
+exports.featureSet_EnumTypeToJSON = featureSet_EnumTypeToJSON;
+exports.featureSet_RepeatedFieldEncodingFromJSON = featureSet_RepeatedFieldEncodingFromJSON;
+exports.featureSet_RepeatedFieldEncodingToJSON = featureSet_RepeatedFieldEncodingToJSON;
+exports.featureSet_Utf8ValidationFromJSON = featureSet_Utf8ValidationFromJSON;
+exports.featureSet_Utf8ValidationToJSON = featureSet_Utf8ValidationToJSON;
+exports.featureSet_MessageEncodingFromJSON = featureSet_MessageEncodingFromJSON;
+exports.featureSet_MessageEncodingToJSON = featureSet_MessageEncodingToJSON;
+exports.featureSet_JsonFormatFromJSON = featureSet_JsonFormatFromJSON;
+exports.featureSet_JsonFormatToJSON = featureSet_JsonFormatToJSON;
+exports.featureSet_EnforceNamingStyleFromJSON = featureSet_EnforceNamingStyleFromJSON;
+exports.featureSet_EnforceNamingStyleToJSON = featureSet_EnforceNamingStyleToJSON;
+exports.generatedCodeInfo_Annotation_SemanticFromJSON = generatedCodeInfo_Annotation_SemanticFromJSON;
+exports.generatedCodeInfo_Annotation_SemanticToJSON = generatedCodeInfo_Annotation_SemanticToJSON;
+/* eslint-disable */
+/** The full set of known editions. */
+var Edition;
+(function (Edition) {
+    /** EDITION_UNKNOWN - A placeholder for an unknown edition value. */
+    Edition[Edition["EDITION_UNKNOWN"] = 0] = "EDITION_UNKNOWN";
+    /**
+     * EDITION_LEGACY - A placeholder edition for specifying default behaviors *before* a feature
+     * was first introduced.  This is effectively an "infinite past".
+     */
+    Edition[Edition["EDITION_LEGACY"] = 900] = "EDITION_LEGACY";
+    /**
+     * EDITION_PROTO2 - Legacy syntax "editions".  These pre-date editions, but behave much like
+     * distinct editions.  These can't be used to specify the edition of proto
+     * files, but feature definitions must supply proto2/proto3 defaults for
+     * backwards compatibility.
+     */
+    Edition[Edition["EDITION_PROTO2"] = 998] = "EDITION_PROTO2";
+    Edition[Edition["EDITION_PROTO3"] = 999] = "EDITION_PROTO3";
+    /**
+     * EDITION_2023 - Editions that have been released.  The specific values are arbitrary and
+     * should not be depended on, but they will always be time-ordered for easy
+     * comparison.
+     */
+    Edition[Edition["EDITION_2023"] = 1000] = "EDITION_2023";
+    Edition[Edition["EDITION_2024"] = 1001] = "EDITION_2024";
+    /**
+     * EDITION_1_TEST_ONLY - Placeholder editions for testing feature resolution.  These should not be
+     * used or relied on outside of tests.
+     */
+    Edition[Edition["EDITION_1_TEST_ONLY"] = 1] = "EDITION_1_TEST_ONLY";
+    Edition[Edition["EDITION_2_TEST_ONLY"] = 2] = "EDITION_2_TEST_ONLY";
+    Edition[Edition["EDITION_99997_TEST_ONLY"] = 99997] = "EDITION_99997_TEST_ONLY";
+    Edition[Edition["EDITION_99998_TEST_ONLY"] = 99998] = "EDITION_99998_TEST_ONLY";
+    Edition[Edition["EDITION_99999_TEST_ONLY"] = 99999] = "EDITION_99999_TEST_ONLY";
+    /**
+     * EDITION_MAX - Placeholder for specifying unbounded edition support.  This should only
+     * ever be used by plugins that can expect to never require any changes to
+     * support a new edition.
+     */
+    Edition[Edition["EDITION_MAX"] = 2147483647] = "EDITION_MAX";
+})(Edition || (exports.Edition = Edition = {}));
+function editionFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "EDITION_UNKNOWN":
+            return Edition.EDITION_UNKNOWN;
+        case 900:
+        case "EDITION_LEGACY":
+            return Edition.EDITION_LEGACY;
+        case 998:
+        case "EDITION_PROTO2":
+            return Edition.EDITION_PROTO2;
+        case 999:
+        case "EDITION_PROTO3":
+            return Edition.EDITION_PROTO3;
+        case 1000:
+        case "EDITION_2023":
+            return Edition.EDITION_2023;
+        case 1001:
+        case "EDITION_2024":
+            return Edition.EDITION_2024;
+        case 1:
+        case "EDITION_1_TEST_ONLY":
+            return Edition.EDITION_1_TEST_ONLY;
+        case 2:
+        case "EDITION_2_TEST_ONLY":
+            return Edition.EDITION_2_TEST_ONLY;
+        case 99997:
+        case "EDITION_99997_TEST_ONLY":
+            return Edition.EDITION_99997_TEST_ONLY;
+        case 99998:
+        case "EDITION_99998_TEST_ONLY":
+            return Edition.EDITION_99998_TEST_ONLY;
+        case 99999:
+        case "EDITION_99999_TEST_ONLY":
+            return Edition.EDITION_99999_TEST_ONLY;
+        case 2147483647:
+        case "EDITION_MAX":
+            return Edition.EDITION_MAX;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum Edition");
+    }
+}
+function editionToJSON(object) {
+    switch (object) {
+        case Edition.EDITION_UNKNOWN:
+            return "EDITION_UNKNOWN";
+        case Edition.EDITION_LEGACY:
+            return "EDITION_LEGACY";
+        case Edition.EDITION_PROTO2:
+            return "EDITION_PROTO2";
+        case Edition.EDITION_PROTO3:
+            return "EDITION_PROTO3";
+        case Edition.EDITION_2023:
+            return "EDITION_2023";
+        case Edition.EDITION_2024:
+            return "EDITION_2024";
+        case Edition.EDITION_1_TEST_ONLY:
+            return "EDITION_1_TEST_ONLY";
+        case Edition.EDITION_2_TEST_ONLY:
+            return "EDITION_2_TEST_ONLY";
+        case Edition.EDITION_99997_TEST_ONLY:
+            return "EDITION_99997_TEST_ONLY";
+        case Edition.EDITION_99998_TEST_ONLY:
+            return "EDITION_99998_TEST_ONLY";
+        case Edition.EDITION_99999_TEST_ONLY:
+            return "EDITION_99999_TEST_ONLY";
+        case Edition.EDITION_MAX:
+            return "EDITION_MAX";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum Edition");
+    }
+}
+/** The verification state of the extension range. */
+var ExtensionRangeOptions_VerificationState;
+(function (ExtensionRangeOptions_VerificationState) {
+    /** DECLARATION - All the extensions of the range must be declared. */
+    ExtensionRangeOptions_VerificationState[ExtensionRangeOptions_VerificationState["DECLARATION"] = 0] = "DECLARATION";
+    ExtensionRangeOptions_VerificationState[ExtensionRangeOptions_VerificationState["UNVERIFIED"] = 1] = "UNVERIFIED";
+})(ExtensionRangeOptions_VerificationState || (exports.ExtensionRangeOptions_VerificationState = ExtensionRangeOptions_VerificationState = {}));
+function extensionRangeOptions_VerificationStateFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "DECLARATION":
+            return ExtensionRangeOptions_VerificationState.DECLARATION;
+        case 1:
+        case "UNVERIFIED":
+            return ExtensionRangeOptions_VerificationState.UNVERIFIED;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum ExtensionRangeOptions_VerificationState");
+    }
+}
+function extensionRangeOptions_VerificationStateToJSON(object) {
+    switch (object) {
+        case ExtensionRangeOptions_VerificationState.DECLARATION:
+            return "DECLARATION";
+        case ExtensionRangeOptions_VerificationState.UNVERIFIED:
+            return "UNVERIFIED";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum ExtensionRangeOptions_VerificationState");
+    }
+}
+var FieldDescriptorProto_Type;
+(function (FieldDescriptorProto_Type) {
+    /**
+     * TYPE_DOUBLE - 0 is reserved for errors.
+     * Order is weird for historical reasons.
+     */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_DOUBLE"] = 1] = "TYPE_DOUBLE";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FLOAT"] = 2] = "TYPE_FLOAT";
+    /**
+     * TYPE_INT64 - Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT64 if
+     * negative values are likely.
+     */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT64"] = 3] = "TYPE_INT64";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT64"] = 4] = "TYPE_UINT64";
+    /**
+     * TYPE_INT32 - Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT32 if
+     * negative values are likely.
+     */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT32"] = 5] = "TYPE_INT32";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED64"] = 6] = "TYPE_FIXED64";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED32"] = 7] = "TYPE_FIXED32";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BOOL"] = 8] = "TYPE_BOOL";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_STRING"] = 9] = "TYPE_STRING";
+    /**
+     * TYPE_GROUP - Tag-delimited aggregate.
+     * Group type is deprecated and not supported after google.protobuf. However, Proto3
+     * implementations should still be able to parse the group wire format and
+     * treat group fields as unknown fields.  In Editions, the group wire format
+     * can be enabled via the `message_encoding` feature.
+     */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_GROUP"] = 10] = "TYPE_GROUP";
+    /** TYPE_MESSAGE - Length-delimited aggregate. */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_MESSAGE"] = 11] = "TYPE_MESSAGE";
+    /** TYPE_BYTES - New in version 2. */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BYTES"] = 12] = "TYPE_BYTES";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT32"] = 13] = "TYPE_UINT32";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_ENUM"] = 14] = "TYPE_ENUM";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED32"] = 15] = "TYPE_SFIXED32";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED64"] = 16] = "TYPE_SFIXED64";
+    /** TYPE_SINT32 - Uses ZigZag encoding. */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT32"] = 17] = "TYPE_SINT32";
+    /** TYPE_SINT64 - Uses ZigZag encoding. */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT64"] = 18] = "TYPE_SINT64";
+})(FieldDescriptorProto_Type || (exports.FieldDescriptorProto_Type = FieldDescriptorProto_Type = {}));
+function fieldDescriptorProto_TypeFromJSON(object) {
+    switch (object) {
+        case 1:
+        case "TYPE_DOUBLE":
+            return FieldDescriptorProto_Type.TYPE_DOUBLE;
+        case 2:
+        case "TYPE_FLOAT":
+            return FieldDescriptorProto_Type.TYPE_FLOAT;
+        case 3:
+        case "TYPE_INT64":
+            return FieldDescriptorProto_Type.TYPE_INT64;
+        case 4:
+        case "TYPE_UINT64":
+            return FieldDescriptorProto_Type.TYPE_UINT64;
+        case 5:
+        case "TYPE_INT32":
+            return FieldDescriptorProto_Type.TYPE_INT32;
+        case 6:
+        case "TYPE_FIXED64":
+            return FieldDescriptorProto_Type.TYPE_FIXED64;
+        case 7:
+        case "TYPE_FIXED32":
+            return FieldDescriptorProto_Type.TYPE_FIXED32;
+        case 8:
+        case "TYPE_BOOL":
+            return FieldDescriptorProto_Type.TYPE_BOOL;
+        case 9:
+        case "TYPE_STRING":
+            return FieldDescriptorProto_Type.TYPE_STRING;
+        case 10:
+        case "TYPE_GROUP":
+            return FieldDescriptorProto_Type.TYPE_GROUP;
+        case 11:
+        case "TYPE_MESSAGE":
+            return FieldDescriptorProto_Type.TYPE_MESSAGE;
+        case 12:
+        case "TYPE_BYTES":
+            return FieldDescriptorProto_Type.TYPE_BYTES;
+        case 13:
+        case "TYPE_UINT32":
+            return FieldDescriptorProto_Type.TYPE_UINT32;
+        case 14:
+        case "TYPE_ENUM":
+            return FieldDescriptorProto_Type.TYPE_ENUM;
+        case 15:
+        case "TYPE_SFIXED32":
+            return FieldDescriptorProto_Type.TYPE_SFIXED32;
+        case 16:
+        case "TYPE_SFIXED64":
+            return FieldDescriptorProto_Type.TYPE_SFIXED64;
+        case 17:
+        case "TYPE_SINT32":
+            return FieldDescriptorProto_Type.TYPE_SINT32;
+        case 18:
+        case "TYPE_SINT64":
+            return FieldDescriptorProto_Type.TYPE_SINT64;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
+    }
+}
+function fieldDescriptorProto_TypeToJSON(object) {
+    switch (object) {
+        case FieldDescriptorProto_Type.TYPE_DOUBLE:
+            return "TYPE_DOUBLE";
+        case FieldDescriptorProto_Type.TYPE_FLOAT:
+            return "TYPE_FLOAT";
+        case FieldDescriptorProto_Type.TYPE_INT64:
+            return "TYPE_INT64";
+        case FieldDescriptorProto_Type.TYPE_UINT64:
+            return "TYPE_UINT64";
+        case FieldDescriptorProto_Type.TYPE_INT32:
+            return "TYPE_INT32";
+        case FieldDescriptorProto_Type.TYPE_FIXED64:
+            return "TYPE_FIXED64";
+        case FieldDescriptorProto_Type.TYPE_FIXED32:
+            return "TYPE_FIXED32";
+        case FieldDescriptorProto_Type.TYPE_BOOL:
+            return "TYPE_BOOL";
+        case FieldDescriptorProto_Type.TYPE_STRING:
+            return "TYPE_STRING";
+        case FieldDescriptorProto_Type.TYPE_GROUP:
+            return "TYPE_GROUP";
+        case FieldDescriptorProto_Type.TYPE_MESSAGE:
+            return "TYPE_MESSAGE";
+        case FieldDescriptorProto_Type.TYPE_BYTES:
+            return "TYPE_BYTES";
+        case FieldDescriptorProto_Type.TYPE_UINT32:
+            return "TYPE_UINT32";
+        case FieldDescriptorProto_Type.TYPE_ENUM:
+            return "TYPE_ENUM";
+        case FieldDescriptorProto_Type.TYPE_SFIXED32:
+            return "TYPE_SFIXED32";
+        case FieldDescriptorProto_Type.TYPE_SFIXED64:
+            return "TYPE_SFIXED64";
+        case FieldDescriptorProto_Type.TYPE_SINT32:
+            return "TYPE_SINT32";
+        case FieldDescriptorProto_Type.TYPE_SINT64:
+            return "TYPE_SINT64";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
+    }
+}
+var FieldDescriptorProto_Label;
+(function (FieldDescriptorProto_Label) {
+    /** LABEL_OPTIONAL - 0 is reserved for errors */
+    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_OPTIONAL"] = 1] = "LABEL_OPTIONAL";
+    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REPEATED"] = 3] = "LABEL_REPEATED";
+    /**
+     * LABEL_REQUIRED - The required label is only allowed in google.protobuf.  In proto3 and Editions
+     * it's explicitly prohibited.  In Editions, the `field_presence` feature
+     * can be used to get this behavior.
+     */
+    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REQUIRED"] = 2] = "LABEL_REQUIRED";
+})(FieldDescriptorProto_Label || (exports.FieldDescriptorProto_Label = FieldDescriptorProto_Label = {}));
+function fieldDescriptorProto_LabelFromJSON(object) {
+    switch (object) {
+        case 1:
+        case "LABEL_OPTIONAL":
+            return FieldDescriptorProto_Label.LABEL_OPTIONAL;
+        case 3:
+        case "LABEL_REPEATED":
+            return FieldDescriptorProto_Label.LABEL_REPEATED;
+        case 2:
+        case "LABEL_REQUIRED":
+            return FieldDescriptorProto_Label.LABEL_REQUIRED;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
+    }
+}
+function fieldDescriptorProto_LabelToJSON(object) {
+    switch (object) {
+        case FieldDescriptorProto_Label.LABEL_OPTIONAL:
+            return "LABEL_OPTIONAL";
+        case FieldDescriptorProto_Label.LABEL_REPEATED:
+            return "LABEL_REPEATED";
+        case FieldDescriptorProto_Label.LABEL_REQUIRED:
+            return "LABEL_REQUIRED";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
+    }
+}
+/** Generated classes can be optimized for speed or code size. */
+var FileOptions_OptimizeMode;
+(function (FileOptions_OptimizeMode) {
+    /** SPEED - Generate complete code for parsing, serialization, */
+    FileOptions_OptimizeMode[FileOptions_OptimizeMode["SPEED"] = 1] = "SPEED";
+    /** CODE_SIZE - etc. */
+    FileOptions_OptimizeMode[FileOptions_OptimizeMode["CODE_SIZE"] = 2] = "CODE_SIZE";
+    /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */
+    FileOptions_OptimizeMode[FileOptions_OptimizeMode["LITE_RUNTIME"] = 3] = "LITE_RUNTIME";
+})(FileOptions_OptimizeMode || (exports.FileOptions_OptimizeMode = FileOptions_OptimizeMode = {}));
+function fileOptions_OptimizeModeFromJSON(object) {
+    switch (object) {
+        case 1:
+        case "SPEED":
+            return FileOptions_OptimizeMode.SPEED;
+        case 2:
+        case "CODE_SIZE":
+            return FileOptions_OptimizeMode.CODE_SIZE;
+        case 3:
+        case "LITE_RUNTIME":
+            return FileOptions_OptimizeMode.LITE_RUNTIME;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
+    }
+}
+function fileOptions_OptimizeModeToJSON(object) {
+    switch (object) {
+        case FileOptions_OptimizeMode.SPEED:
+            return "SPEED";
+        case FileOptions_OptimizeMode.CODE_SIZE:
+            return "CODE_SIZE";
+        case FileOptions_OptimizeMode.LITE_RUNTIME:
+            return "LITE_RUNTIME";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
+    }
+}
+var FieldOptions_CType;
+(function (FieldOptions_CType) {
+    /** STRING - Default mode. */
+    FieldOptions_CType[FieldOptions_CType["STRING"] = 0] = "STRING";
+    /**
+     * CORD - The option [ctype=CORD] may be applied to a non-repeated field of type
+     * "bytes". It indicates that in C++, the data should be stored in a Cord
+     * instead of a string.  For very large strings, this may reduce memory
+     * fragmentation. It may also allow better performance when parsing from a
+     * Cord, or when parsing with aliasing enabled, as the parsed Cord may then
+     * alias the original buffer.
+     */
+    FieldOptions_CType[FieldOptions_CType["CORD"] = 1] = "CORD";
+    FieldOptions_CType[FieldOptions_CType["STRING_PIECE"] = 2] = "STRING_PIECE";
+})(FieldOptions_CType || (exports.FieldOptions_CType = FieldOptions_CType = {}));
+function fieldOptions_CTypeFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "STRING":
+            return FieldOptions_CType.STRING;
+        case 1:
+        case "CORD":
+            return FieldOptions_CType.CORD;
+        case 2:
+        case "STRING_PIECE":
+            return FieldOptions_CType.STRING_PIECE;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
+    }
+}
+function fieldOptions_CTypeToJSON(object) {
+    switch (object) {
+        case FieldOptions_CType.STRING:
+            return "STRING";
+        case FieldOptions_CType.CORD:
+            return "CORD";
+        case FieldOptions_CType.STRING_PIECE:
+            return "STRING_PIECE";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
+    }
+}
+var FieldOptions_JSType;
+(function (FieldOptions_JSType) {
+    /** JS_NORMAL - Use the default type. */
+    FieldOptions_JSType[FieldOptions_JSType["JS_NORMAL"] = 0] = "JS_NORMAL";
+    /** JS_STRING - Use JavaScript strings. */
+    FieldOptions_JSType[FieldOptions_JSType["JS_STRING"] = 1] = "JS_STRING";
+    /** JS_NUMBER - Use JavaScript numbers. */
+    FieldOptions_JSType[FieldOptions_JSType["JS_NUMBER"] = 2] = "JS_NUMBER";
+})(FieldOptions_JSType || (exports.FieldOptions_JSType = FieldOptions_JSType = {}));
+function fieldOptions_JSTypeFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "JS_NORMAL":
+            return FieldOptions_JSType.JS_NORMAL;
+        case 1:
+        case "JS_STRING":
+            return FieldOptions_JSType.JS_STRING;
+        case 2:
+        case "JS_NUMBER":
+            return FieldOptions_JSType.JS_NUMBER;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
+    }
+}
+function fieldOptions_JSTypeToJSON(object) {
+    switch (object) {
+        case FieldOptions_JSType.JS_NORMAL:
+            return "JS_NORMAL";
+        case FieldOptions_JSType.JS_STRING:
+            return "JS_STRING";
+        case FieldOptions_JSType.JS_NUMBER:
+            return "JS_NUMBER";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
+    }
+}
+/** If set to RETENTION_SOURCE, the option will be omitted from the binary. */
+var FieldOptions_OptionRetention;
+(function (FieldOptions_OptionRetention) {
+    FieldOptions_OptionRetention[FieldOptions_OptionRetention["RETENTION_UNKNOWN"] = 0] = "RETENTION_UNKNOWN";
+    FieldOptions_OptionRetention[FieldOptions_OptionRetention["RETENTION_RUNTIME"] = 1] = "RETENTION_RUNTIME";
+    FieldOptions_OptionRetention[FieldOptions_OptionRetention["RETENTION_SOURCE"] = 2] = "RETENTION_SOURCE";
+})(FieldOptions_OptionRetention || (exports.FieldOptions_OptionRetention = FieldOptions_OptionRetention = {}));
+function fieldOptions_OptionRetentionFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "RETENTION_UNKNOWN":
+            return FieldOptions_OptionRetention.RETENTION_UNKNOWN;
+        case 1:
+        case "RETENTION_RUNTIME":
+            return FieldOptions_OptionRetention.RETENTION_RUNTIME;
+        case 2:
+        case "RETENTION_SOURCE":
+            return FieldOptions_OptionRetention.RETENTION_SOURCE;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionRetention");
+    }
+}
+function fieldOptions_OptionRetentionToJSON(object) {
+    switch (object) {
+        case FieldOptions_OptionRetention.RETENTION_UNKNOWN:
+            return "RETENTION_UNKNOWN";
+        case FieldOptions_OptionRetention.RETENTION_RUNTIME:
+            return "RETENTION_RUNTIME";
+        case FieldOptions_OptionRetention.RETENTION_SOURCE:
+            return "RETENTION_SOURCE";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionRetention");
+    }
+}
+/**
+ * This indicates the types of entities that the field may apply to when used
+ * as an option. If it is unset, then the field may be freely used as an
+ * option on any kind of entity.
+ */
+var FieldOptions_OptionTargetType;
+(function (FieldOptions_OptionTargetType) {
+    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_UNKNOWN"] = 0] = "TARGET_TYPE_UNKNOWN";
+    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_FILE"] = 1] = "TARGET_TYPE_FILE";
+    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_EXTENSION_RANGE"] = 2] = "TARGET_TYPE_EXTENSION_RANGE";
+    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_MESSAGE"] = 3] = "TARGET_TYPE_MESSAGE";
+    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_FIELD"] = 4] = "TARGET_TYPE_FIELD";
+    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_ONEOF"] = 5] = "TARGET_TYPE_ONEOF";
+    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_ENUM"] = 6] = "TARGET_TYPE_ENUM";
+    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_ENUM_ENTRY"] = 7] = "TARGET_TYPE_ENUM_ENTRY";
+    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_SERVICE"] = 8] = "TARGET_TYPE_SERVICE";
+    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_METHOD"] = 9] = "TARGET_TYPE_METHOD";
+})(FieldOptions_OptionTargetType || (exports.FieldOptions_OptionTargetType = FieldOptions_OptionTargetType = {}));
+function fieldOptions_OptionTargetTypeFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "TARGET_TYPE_UNKNOWN":
+            return FieldOptions_OptionTargetType.TARGET_TYPE_UNKNOWN;
+        case 1:
+        case "TARGET_TYPE_FILE":
+            return FieldOptions_OptionTargetType.TARGET_TYPE_FILE;
+        case 2:
+        case "TARGET_TYPE_EXTENSION_RANGE":
+            return FieldOptions_OptionTargetType.TARGET_TYPE_EXTENSION_RANGE;
+        case 3:
+        case "TARGET_TYPE_MESSAGE":
+            return FieldOptions_OptionTargetType.TARGET_TYPE_MESSAGE;
+        case 4:
+        case "TARGET_TYPE_FIELD":
+            return FieldOptions_OptionTargetType.TARGET_TYPE_FIELD;
+        case 5:
+        case "TARGET_TYPE_ONEOF":
+            return FieldOptions_OptionTargetType.TARGET_TYPE_ONEOF;
+        case 6:
+        case "TARGET_TYPE_ENUM":
+            return FieldOptions_OptionTargetType.TARGET_TYPE_ENUM;
+        case 7:
+        case "TARGET_TYPE_ENUM_ENTRY":
+            return FieldOptions_OptionTargetType.TARGET_TYPE_ENUM_ENTRY;
+        case 8:
+        case "TARGET_TYPE_SERVICE":
+            return FieldOptions_OptionTargetType.TARGET_TYPE_SERVICE;
+        case 9:
+        case "TARGET_TYPE_METHOD":
+            return FieldOptions_OptionTargetType.TARGET_TYPE_METHOD;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionTargetType");
+    }
+}
+function fieldOptions_OptionTargetTypeToJSON(object) {
+    switch (object) {
+        case FieldOptions_OptionTargetType.TARGET_TYPE_UNKNOWN:
+            return "TARGET_TYPE_UNKNOWN";
+        case FieldOptions_OptionTargetType.TARGET_TYPE_FILE:
+            return "TARGET_TYPE_FILE";
+        case FieldOptions_OptionTargetType.TARGET_TYPE_EXTENSION_RANGE:
+            return "TARGET_TYPE_EXTENSION_RANGE";
+        case FieldOptions_OptionTargetType.TARGET_TYPE_MESSAGE:
+            return "TARGET_TYPE_MESSAGE";
+        case FieldOptions_OptionTargetType.TARGET_TYPE_FIELD:
+            return "TARGET_TYPE_FIELD";
+        case FieldOptions_OptionTargetType.TARGET_TYPE_ONEOF:
+            return "TARGET_TYPE_ONEOF";
+        case FieldOptions_OptionTargetType.TARGET_TYPE_ENUM:
+            return "TARGET_TYPE_ENUM";
+        case FieldOptions_OptionTargetType.TARGET_TYPE_ENUM_ENTRY:
+            return "TARGET_TYPE_ENUM_ENTRY";
+        case FieldOptions_OptionTargetType.TARGET_TYPE_SERVICE:
+            return "TARGET_TYPE_SERVICE";
+        case FieldOptions_OptionTargetType.TARGET_TYPE_METHOD:
+            return "TARGET_TYPE_METHOD";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionTargetType");
+    }
+}
+/**
+ * Is this method side-effect-free (or safe in HTTP parlance), or idempotent,
+ * or neither? HTTP based RPC implementation may choose GET verb for safe
+ * methods, and PUT verb for idempotent methods instead of the default POST.
+ */
+var MethodOptions_IdempotencyLevel;
+(function (MethodOptions_IdempotencyLevel) {
+    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENCY_UNKNOWN"] = 0] = "IDEMPOTENCY_UNKNOWN";
+    /** NO_SIDE_EFFECTS - implies idempotent */
+    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["NO_SIDE_EFFECTS"] = 1] = "NO_SIDE_EFFECTS";
+    /** IDEMPOTENT - idempotent, but may have side effects */
+    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENT"] = 2] = "IDEMPOTENT";
+})(MethodOptions_IdempotencyLevel || (exports.MethodOptions_IdempotencyLevel = MethodOptions_IdempotencyLevel = {}));
+function methodOptions_IdempotencyLevelFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "IDEMPOTENCY_UNKNOWN":
+            return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN;
+        case 1:
+        case "NO_SIDE_EFFECTS":
+            return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS;
+        case 2:
+        case "IDEMPOTENT":
+            return MethodOptions_IdempotencyLevel.IDEMPOTENT;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
+    }
+}
+function methodOptions_IdempotencyLevelToJSON(object) {
+    switch (object) {
+        case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN:
+            return "IDEMPOTENCY_UNKNOWN";
+        case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS:
+            return "NO_SIDE_EFFECTS";
+        case MethodOptions_IdempotencyLevel.IDEMPOTENT:
+            return "IDEMPOTENT";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
+    }
+}
+var FeatureSet_FieldPresence;
+(function (FeatureSet_FieldPresence) {
+    FeatureSet_FieldPresence[FeatureSet_FieldPresence["FIELD_PRESENCE_UNKNOWN"] = 0] = "FIELD_PRESENCE_UNKNOWN";
+    FeatureSet_FieldPresence[FeatureSet_FieldPresence["EXPLICIT"] = 1] = "EXPLICIT";
+    FeatureSet_FieldPresence[FeatureSet_FieldPresence["IMPLICIT"] = 2] = "IMPLICIT";
+    FeatureSet_FieldPresence[FeatureSet_FieldPresence["LEGACY_REQUIRED"] = 3] = "LEGACY_REQUIRED";
+})(FeatureSet_FieldPresence || (exports.FeatureSet_FieldPresence = FeatureSet_FieldPresence = {}));
+function featureSet_FieldPresenceFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "FIELD_PRESENCE_UNKNOWN":
+            return FeatureSet_FieldPresence.FIELD_PRESENCE_UNKNOWN;
+        case 1:
+        case "EXPLICIT":
+            return FeatureSet_FieldPresence.EXPLICIT;
+        case 2:
+        case "IMPLICIT":
+            return FeatureSet_FieldPresence.IMPLICIT;
+        case 3:
+        case "LEGACY_REQUIRED":
+            return FeatureSet_FieldPresence.LEGACY_REQUIRED;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_FieldPresence");
+    }
+}
+function featureSet_FieldPresenceToJSON(object) {
+    switch (object) {
+        case FeatureSet_FieldPresence.FIELD_PRESENCE_UNKNOWN:
+            return "FIELD_PRESENCE_UNKNOWN";
+        case FeatureSet_FieldPresence.EXPLICIT:
+            return "EXPLICIT";
+        case FeatureSet_FieldPresence.IMPLICIT:
+            return "IMPLICIT";
+        case FeatureSet_FieldPresence.LEGACY_REQUIRED:
+            return "LEGACY_REQUIRED";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_FieldPresence");
+    }
+}
+var FeatureSet_EnumType;
+(function (FeatureSet_EnumType) {
+    FeatureSet_EnumType[FeatureSet_EnumType["ENUM_TYPE_UNKNOWN"] = 0] = "ENUM_TYPE_UNKNOWN";
+    FeatureSet_EnumType[FeatureSet_EnumType["OPEN"] = 1] = "OPEN";
+    FeatureSet_EnumType[FeatureSet_EnumType["CLOSED"] = 2] = "CLOSED";
+})(FeatureSet_EnumType || (exports.FeatureSet_EnumType = FeatureSet_EnumType = {}));
+function featureSet_EnumTypeFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "ENUM_TYPE_UNKNOWN":
+            return FeatureSet_EnumType.ENUM_TYPE_UNKNOWN;
+        case 1:
+        case "OPEN":
+            return FeatureSet_EnumType.OPEN;
+        case 2:
+        case "CLOSED":
+            return FeatureSet_EnumType.CLOSED;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnumType");
+    }
+}
+function featureSet_EnumTypeToJSON(object) {
+    switch (object) {
+        case FeatureSet_EnumType.ENUM_TYPE_UNKNOWN:
+            return "ENUM_TYPE_UNKNOWN";
+        case FeatureSet_EnumType.OPEN:
+            return "OPEN";
+        case FeatureSet_EnumType.CLOSED:
+            return "CLOSED";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnumType");
+    }
+}
+var FeatureSet_RepeatedFieldEncoding;
+(function (FeatureSet_RepeatedFieldEncoding) {
+    FeatureSet_RepeatedFieldEncoding[FeatureSet_RepeatedFieldEncoding["REPEATED_FIELD_ENCODING_UNKNOWN"] = 0] = "REPEATED_FIELD_ENCODING_UNKNOWN";
+    FeatureSet_RepeatedFieldEncoding[FeatureSet_RepeatedFieldEncoding["PACKED"] = 1] = "PACKED";
+    FeatureSet_RepeatedFieldEncoding[FeatureSet_RepeatedFieldEncoding["EXPANDED"] = 2] = "EXPANDED";
+})(FeatureSet_RepeatedFieldEncoding || (exports.FeatureSet_RepeatedFieldEncoding = FeatureSet_RepeatedFieldEncoding = {}));
+function featureSet_RepeatedFieldEncodingFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "REPEATED_FIELD_ENCODING_UNKNOWN":
+            return FeatureSet_RepeatedFieldEncoding.REPEATED_FIELD_ENCODING_UNKNOWN;
+        case 1:
+        case "PACKED":
+            return FeatureSet_RepeatedFieldEncoding.PACKED;
+        case 2:
+        case "EXPANDED":
+            return FeatureSet_RepeatedFieldEncoding.EXPANDED;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_RepeatedFieldEncoding");
+    }
+}
+function featureSet_RepeatedFieldEncodingToJSON(object) {
+    switch (object) {
+        case FeatureSet_RepeatedFieldEncoding.REPEATED_FIELD_ENCODING_UNKNOWN:
+            return "REPEATED_FIELD_ENCODING_UNKNOWN";
+        case FeatureSet_RepeatedFieldEncoding.PACKED:
+            return "PACKED";
+        case FeatureSet_RepeatedFieldEncoding.EXPANDED:
+            return "EXPANDED";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_RepeatedFieldEncoding");
+    }
+}
+var FeatureSet_Utf8Validation;
+(function (FeatureSet_Utf8Validation) {
+    FeatureSet_Utf8Validation[FeatureSet_Utf8Validation["UTF8_VALIDATION_UNKNOWN"] = 0] = "UTF8_VALIDATION_UNKNOWN";
+    FeatureSet_Utf8Validation[FeatureSet_Utf8Validation["VERIFY"] = 2] = "VERIFY";
+    FeatureSet_Utf8Validation[FeatureSet_Utf8Validation["NONE"] = 3] = "NONE";
+})(FeatureSet_Utf8Validation || (exports.FeatureSet_Utf8Validation = FeatureSet_Utf8Validation = {}));
+function featureSet_Utf8ValidationFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "UTF8_VALIDATION_UNKNOWN":
+            return FeatureSet_Utf8Validation.UTF8_VALIDATION_UNKNOWN;
+        case 2:
+        case "VERIFY":
+            return FeatureSet_Utf8Validation.VERIFY;
+        case 3:
+        case "NONE":
+            return FeatureSet_Utf8Validation.NONE;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_Utf8Validation");
+    }
+}
+function featureSet_Utf8ValidationToJSON(object) {
+    switch (object) {
+        case FeatureSet_Utf8Validation.UTF8_VALIDATION_UNKNOWN:
+            return "UTF8_VALIDATION_UNKNOWN";
+        case FeatureSet_Utf8Validation.VERIFY:
+            return "VERIFY";
+        case FeatureSet_Utf8Validation.NONE:
+            return "NONE";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_Utf8Validation");
+    }
+}
+var FeatureSet_MessageEncoding;
+(function (FeatureSet_MessageEncoding) {
+    FeatureSet_MessageEncoding[FeatureSet_MessageEncoding["MESSAGE_ENCODING_UNKNOWN"] = 0] = "MESSAGE_ENCODING_UNKNOWN";
+    FeatureSet_MessageEncoding[FeatureSet_MessageEncoding["LENGTH_PREFIXED"] = 1] = "LENGTH_PREFIXED";
+    FeatureSet_MessageEncoding[FeatureSet_MessageEncoding["DELIMITED"] = 2] = "DELIMITED";
+})(FeatureSet_MessageEncoding || (exports.FeatureSet_MessageEncoding = FeatureSet_MessageEncoding = {}));
+function featureSet_MessageEncodingFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "MESSAGE_ENCODING_UNKNOWN":
+            return FeatureSet_MessageEncoding.MESSAGE_ENCODING_UNKNOWN;
+        case 1:
+        case "LENGTH_PREFIXED":
+            return FeatureSet_MessageEncoding.LENGTH_PREFIXED;
+        case 2:
+        case "DELIMITED":
+            return FeatureSet_MessageEncoding.DELIMITED;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_MessageEncoding");
+    }
+}
+function featureSet_MessageEncodingToJSON(object) {
+    switch (object) {
+        case FeatureSet_MessageEncoding.MESSAGE_ENCODING_UNKNOWN:
+            return "MESSAGE_ENCODING_UNKNOWN";
+        case FeatureSet_MessageEncoding.LENGTH_PREFIXED:
+            return "LENGTH_PREFIXED";
+        case FeatureSet_MessageEncoding.DELIMITED:
+            return "DELIMITED";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_MessageEncoding");
+    }
+}
+var FeatureSet_JsonFormat;
+(function (FeatureSet_JsonFormat) {
+    FeatureSet_JsonFormat[FeatureSet_JsonFormat["JSON_FORMAT_UNKNOWN"] = 0] = "JSON_FORMAT_UNKNOWN";
+    FeatureSet_JsonFormat[FeatureSet_JsonFormat["ALLOW"] = 1] = "ALLOW";
+    FeatureSet_JsonFormat[FeatureSet_JsonFormat["LEGACY_BEST_EFFORT"] = 2] = "LEGACY_BEST_EFFORT";
+})(FeatureSet_JsonFormat || (exports.FeatureSet_JsonFormat = FeatureSet_JsonFormat = {}));
+function featureSet_JsonFormatFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "JSON_FORMAT_UNKNOWN":
+            return FeatureSet_JsonFormat.JSON_FORMAT_UNKNOWN;
+        case 1:
+        case "ALLOW":
+            return FeatureSet_JsonFormat.ALLOW;
+        case 2:
+        case "LEGACY_BEST_EFFORT":
+            return FeatureSet_JsonFormat.LEGACY_BEST_EFFORT;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_JsonFormat");
+    }
+}
+function featureSet_JsonFormatToJSON(object) {
+    switch (object) {
+        case FeatureSet_JsonFormat.JSON_FORMAT_UNKNOWN:
+            return "JSON_FORMAT_UNKNOWN";
+        case FeatureSet_JsonFormat.ALLOW:
+            return "ALLOW";
+        case FeatureSet_JsonFormat.LEGACY_BEST_EFFORT:
+            return "LEGACY_BEST_EFFORT";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_JsonFormat");
+    }
+}
+var FeatureSet_EnforceNamingStyle;
+(function (FeatureSet_EnforceNamingStyle) {
+    FeatureSet_EnforceNamingStyle[FeatureSet_EnforceNamingStyle["ENFORCE_NAMING_STYLE_UNKNOWN"] = 0] = "ENFORCE_NAMING_STYLE_UNKNOWN";
+    FeatureSet_EnforceNamingStyle[FeatureSet_EnforceNamingStyle["STYLE2024"] = 1] = "STYLE2024";
+    FeatureSet_EnforceNamingStyle[FeatureSet_EnforceNamingStyle["STYLE_LEGACY"] = 2] = "STYLE_LEGACY";
+})(FeatureSet_EnforceNamingStyle || (exports.FeatureSet_EnforceNamingStyle = FeatureSet_EnforceNamingStyle = {}));
+function featureSet_EnforceNamingStyleFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "ENFORCE_NAMING_STYLE_UNKNOWN":
+            return FeatureSet_EnforceNamingStyle.ENFORCE_NAMING_STYLE_UNKNOWN;
+        case 1:
+        case "STYLE2024":
+            return FeatureSet_EnforceNamingStyle.STYLE2024;
+        case 2:
+        case "STYLE_LEGACY":
+            return FeatureSet_EnforceNamingStyle.STYLE_LEGACY;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnforceNamingStyle");
+    }
+}
+function featureSet_EnforceNamingStyleToJSON(object) {
+    switch (object) {
+        case FeatureSet_EnforceNamingStyle.ENFORCE_NAMING_STYLE_UNKNOWN:
+            return "ENFORCE_NAMING_STYLE_UNKNOWN";
+        case FeatureSet_EnforceNamingStyle.STYLE2024:
+            return "STYLE2024";
+        case FeatureSet_EnforceNamingStyle.STYLE_LEGACY:
+            return "STYLE_LEGACY";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnforceNamingStyle");
+    }
+}
+/**
+ * Represents the identified object's effect on the element in the original
+ * .proto file.
+ */
+var GeneratedCodeInfo_Annotation_Semantic;
+(function (GeneratedCodeInfo_Annotation_Semantic) {
+    /** NONE - There is no effect or the effect is indescribable. */
+    GeneratedCodeInfo_Annotation_Semantic[GeneratedCodeInfo_Annotation_Semantic["NONE"] = 0] = "NONE";
+    /** SET - The element is set or otherwise mutated. */
+    GeneratedCodeInfo_Annotation_Semantic[GeneratedCodeInfo_Annotation_Semantic["SET"] = 1] = "SET";
+    /** ALIAS - An alias to the element is returned. */
+    GeneratedCodeInfo_Annotation_Semantic[GeneratedCodeInfo_Annotation_Semantic["ALIAS"] = 2] = "ALIAS";
+})(GeneratedCodeInfo_Annotation_Semantic || (exports.GeneratedCodeInfo_Annotation_Semantic = GeneratedCodeInfo_Annotation_Semantic = {}));
+function generatedCodeInfo_Annotation_SemanticFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "NONE":
+            return GeneratedCodeInfo_Annotation_Semantic.NONE;
+        case 1:
+        case "SET":
+            return GeneratedCodeInfo_Annotation_Semantic.SET;
+        case 2:
+        case "ALIAS":
+            return GeneratedCodeInfo_Annotation_Semantic.ALIAS;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum GeneratedCodeInfo_Annotation_Semantic");
+    }
+}
+function generatedCodeInfo_Annotation_SemanticToJSON(object) {
+    switch (object) {
+        case GeneratedCodeInfo_Annotation_Semantic.NONE:
+            return "NONE";
+        case GeneratedCodeInfo_Annotation_Semantic.SET:
+            return "SET";
+        case GeneratedCodeInfo_Annotation_Semantic.ALIAS:
+            return "ALIAS";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum GeneratedCodeInfo_Annotation_Semantic");
+    }
+}
+exports.FileDescriptorSet = {
+    fromJSON(object) {
+        return {
+            file: globalThis.Array.isArray(object?.file) ? object.file.map((e) => exports.FileDescriptorProto.fromJSON(e)) : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.file?.length) {
+            obj.file = message.file.map((e) => exports.FileDescriptorProto.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.FileDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? globalThis.String(object.name) : "",
+            package: isSet(object.package) ? globalThis.String(object.package) : "",
+            dependency: globalThis.Array.isArray(object?.dependency)
+                ? object.dependency.map((e) => globalThis.String(e))
+                : [],
+            publicDependency: globalThis.Array.isArray(object?.publicDependency)
+                ? object.publicDependency.map((e) => globalThis.Number(e))
+                : [],
+            weakDependency: globalThis.Array.isArray(object?.weakDependency)
+                ? object.weakDependency.map((e) => globalThis.Number(e))
+                : [],
+            messageType: globalThis.Array.isArray(object?.messageType)
+                ? object.messageType.map((e) => exports.DescriptorProto.fromJSON(e))
+                : [],
+            enumType: globalThis.Array.isArray(object?.enumType)
+                ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e))
+                : [],
+            service: globalThis.Array.isArray(object?.service)
+                ? object.service.map((e) => exports.ServiceDescriptorProto.fromJSON(e))
+                : [],
+            extension: globalThis.Array.isArray(object?.extension)
+                ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e))
+                : [],
+            options: isSet(object.options) ? exports.FileOptions.fromJSON(object.options) : undefined,
+            sourceCodeInfo: isSet(object.sourceCodeInfo) ? exports.SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined,
+            syntax: isSet(object.syntax) ? globalThis.String(object.syntax) : "",
+            edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name !== undefined && message.name !== "") {
+            obj.name = message.name;
+        }
+        if (message.package !== undefined && message.package !== "") {
+            obj.package = message.package;
+        }
+        if (message.dependency?.length) {
+            obj.dependency = message.dependency;
+        }
+        if (message.publicDependency?.length) {
+            obj.publicDependency = message.publicDependency.map((e) => Math.round(e));
+        }
+        if (message.weakDependency?.length) {
+            obj.weakDependency = message.weakDependency.map((e) => Math.round(e));
+        }
+        if (message.messageType?.length) {
+            obj.messageType = message.messageType.map((e) => exports.DescriptorProto.toJSON(e));
+        }
+        if (message.enumType?.length) {
+            obj.enumType = message.enumType.map((e) => exports.EnumDescriptorProto.toJSON(e));
+        }
+        if (message.service?.length) {
+            obj.service = message.service.map((e) => exports.ServiceDescriptorProto.toJSON(e));
+        }
+        if (message.extension?.length) {
+            obj.extension = message.extension.map((e) => exports.FieldDescriptorProto.toJSON(e));
+        }
+        if (message.options !== undefined) {
+            obj.options = exports.FileOptions.toJSON(message.options);
+        }
+        if (message.sourceCodeInfo !== undefined) {
+            obj.sourceCodeInfo = exports.SourceCodeInfo.toJSON(message.sourceCodeInfo);
+        }
+        if (message.syntax !== undefined && message.syntax !== "") {
+            obj.syntax = message.syntax;
+        }
+        if (message.edition !== undefined && message.edition !== 0) {
+            obj.edition = editionToJSON(message.edition);
+        }
+        return obj;
+    },
+};
+exports.DescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? globalThis.String(object.name) : "",
+            field: globalThis.Array.isArray(object?.field)
+                ? object.field.map((e) => exports.FieldDescriptorProto.fromJSON(e))
+                : [],
+            extension: globalThis.Array.isArray(object?.extension)
+                ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e))
+                : [],
+            nestedType: globalThis.Array.isArray(object?.nestedType)
+                ? object.nestedType.map((e) => exports.DescriptorProto.fromJSON(e))
+                : [],
+            enumType: globalThis.Array.isArray(object?.enumType)
+                ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e))
+                : [],
+            extensionRange: globalThis.Array.isArray(object?.extensionRange)
+                ? object.extensionRange.map((e) => exports.DescriptorProto_ExtensionRange.fromJSON(e))
+                : [],
+            oneofDecl: globalThis.Array.isArray(object?.oneofDecl)
+                ? object.oneofDecl.map((e) => exports.OneofDescriptorProto.fromJSON(e))
+                : [],
+            options: isSet(object.options) ? exports.MessageOptions.fromJSON(object.options) : undefined,
+            reservedRange: globalThis.Array.isArray(object?.reservedRange)
+                ? object.reservedRange.map((e) => exports.DescriptorProto_ReservedRange.fromJSON(e))
+                : [],
+            reservedName: globalThis.Array.isArray(object?.reservedName)
+                ? object.reservedName.map((e) => globalThis.String(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name !== undefined && message.name !== "") {
+            obj.name = message.name;
+        }
+        if (message.field?.length) {
+            obj.field = message.field.map((e) => exports.FieldDescriptorProto.toJSON(e));
+        }
+        if (message.extension?.length) {
+            obj.extension = message.extension.map((e) => exports.FieldDescriptorProto.toJSON(e));
+        }
+        if (message.nestedType?.length) {
+            obj.nestedType = message.nestedType.map((e) => exports.DescriptorProto.toJSON(e));
+        }
+        if (message.enumType?.length) {
+            obj.enumType = message.enumType.map((e) => exports.EnumDescriptorProto.toJSON(e));
+        }
+        if (message.extensionRange?.length) {
+            obj.extensionRange = message.extensionRange.map((e) => exports.DescriptorProto_ExtensionRange.toJSON(e));
+        }
+        if (message.oneofDecl?.length) {
+            obj.oneofDecl = message.oneofDecl.map((e) => exports.OneofDescriptorProto.toJSON(e));
+        }
+        if (message.options !== undefined) {
+            obj.options = exports.MessageOptions.toJSON(message.options);
+        }
+        if (message.reservedRange?.length) {
+            obj.reservedRange = message.reservedRange.map((e) => exports.DescriptorProto_ReservedRange.toJSON(e));
+        }
+        if (message.reservedName?.length) {
+            obj.reservedName = message.reservedName;
+        }
+        return obj;
+    },
+};
+exports.DescriptorProto_ExtensionRange = {
+    fromJSON(object) {
+        return {
+            start: isSet(object.start) ? globalThis.Number(object.start) : 0,
+            end: isSet(object.end) ? globalThis.Number(object.end) : 0,
+            options: isSet(object.options) ? exports.ExtensionRangeOptions.fromJSON(object.options) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.start !== undefined && message.start !== 0) {
+            obj.start = Math.round(message.start);
+        }
+        if (message.end !== undefined && message.end !== 0) {
+            obj.end = Math.round(message.end);
+        }
+        if (message.options !== undefined) {
+            obj.options = exports.ExtensionRangeOptions.toJSON(message.options);
+        }
+        return obj;
+    },
+};
+exports.DescriptorProto_ReservedRange = {
+    fromJSON(object) {
+        return {
+            start: isSet(object.start) ? globalThis.Number(object.start) : 0,
+            end: isSet(object.end) ? globalThis.Number(object.end) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.start !== undefined && message.start !== 0) {
+            obj.start = Math.round(message.start);
+        }
+        if (message.end !== undefined && message.end !== 0) {
+            obj.end = Math.round(message.end);
+        }
+        return obj;
+    },
+};
+exports.ExtensionRangeOptions = {
+    fromJSON(object) {
+        return {
+            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+            declaration: globalThis.Array.isArray(object?.declaration)
+                ? object.declaration.map((e) => exports.ExtensionRangeOptions_Declaration.fromJSON(e))
+                : [],
+            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+            verification: isSet(object.verification)
+                ? extensionRangeOptions_VerificationStateFromJSON(object.verification)
+                : 1,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.uninterpretedOption?.length) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
+        }
+        if (message.declaration?.length) {
+            obj.declaration = message.declaration.map((e) => exports.ExtensionRangeOptions_Declaration.toJSON(e));
+        }
+        if (message.features !== undefined) {
+            obj.features = exports.FeatureSet.toJSON(message.features);
+        }
+        if (message.verification !== undefined && message.verification !== 1) {
+            obj.verification = extensionRangeOptions_VerificationStateToJSON(message.verification);
+        }
+        return obj;
+    },
+};
+exports.ExtensionRangeOptions_Declaration = {
+    fromJSON(object) {
+        return {
+            number: isSet(object.number) ? globalThis.Number(object.number) : 0,
+            fullName: isSet(object.fullName) ? globalThis.String(object.fullName) : "",
+            type: isSet(object.type) ? globalThis.String(object.type) : "",
+            reserved: isSet(object.reserved) ? globalThis.Boolean(object.reserved) : false,
+            repeated: isSet(object.repeated) ? globalThis.Boolean(object.repeated) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.number !== undefined && message.number !== 0) {
+            obj.number = Math.round(message.number);
+        }
+        if (message.fullName !== undefined && message.fullName !== "") {
+            obj.fullName = message.fullName;
+        }
+        if (message.type !== undefined && message.type !== "") {
+            obj.type = message.type;
+        }
+        if (message.reserved !== undefined && message.reserved !== false) {
+            obj.reserved = message.reserved;
+        }
+        if (message.repeated !== undefined && message.repeated !== false) {
+            obj.repeated = message.repeated;
+        }
+        return obj;
+    },
+};
+exports.FieldDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? globalThis.String(object.name) : "",
+            number: isSet(object.number) ? globalThis.Number(object.number) : 0,
+            label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1,
+            type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1,
+            typeName: isSet(object.typeName) ? globalThis.String(object.typeName) : "",
+            extendee: isSet(object.extendee) ? globalThis.String(object.extendee) : "",
+            defaultValue: isSet(object.defaultValue) ? globalThis.String(object.defaultValue) : "",
+            oneofIndex: isSet(object.oneofIndex) ? globalThis.Number(object.oneofIndex) : 0,
+            jsonName: isSet(object.jsonName) ? globalThis.String(object.jsonName) : "",
+            options: isSet(object.options) ? exports.FieldOptions.fromJSON(object.options) : undefined,
+            proto3Optional: isSet(object.proto3Optional) ? globalThis.Boolean(object.proto3Optional) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name !== undefined && message.name !== "") {
+            obj.name = message.name;
+        }
+        if (message.number !== undefined && message.number !== 0) {
+            obj.number = Math.round(message.number);
+        }
+        if (message.label !== undefined && message.label !== 1) {
+            obj.label = fieldDescriptorProto_LabelToJSON(message.label);
+        }
+        if (message.type !== undefined && message.type !== 1) {
+            obj.type = fieldDescriptorProto_TypeToJSON(message.type);
+        }
+        if (message.typeName !== undefined && message.typeName !== "") {
+            obj.typeName = message.typeName;
+        }
+        if (message.extendee !== undefined && message.extendee !== "") {
+            obj.extendee = message.extendee;
+        }
+        if (message.defaultValue !== undefined && message.defaultValue !== "") {
+            obj.defaultValue = message.defaultValue;
+        }
+        if (message.oneofIndex !== undefined && message.oneofIndex !== 0) {
+            obj.oneofIndex = Math.round(message.oneofIndex);
+        }
+        if (message.jsonName !== undefined && message.jsonName !== "") {
+            obj.jsonName = message.jsonName;
+        }
+        if (message.options !== undefined) {
+            obj.options = exports.FieldOptions.toJSON(message.options);
+        }
+        if (message.proto3Optional !== undefined && message.proto3Optional !== false) {
+            obj.proto3Optional = message.proto3Optional;
+        }
+        return obj;
+    },
+};
+exports.OneofDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? globalThis.String(object.name) : "",
+            options: isSet(object.options) ? exports.OneofOptions.fromJSON(object.options) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name !== undefined && message.name !== "") {
+            obj.name = message.name;
+        }
+        if (message.options !== undefined) {
+            obj.options = exports.OneofOptions.toJSON(message.options);
+        }
+        return obj;
+    },
+};
+exports.EnumDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? globalThis.String(object.name) : "",
+            value: globalThis.Array.isArray(object?.value)
+                ? object.value.map((e) => exports.EnumValueDescriptorProto.fromJSON(e))
+                : [],
+            options: isSet(object.options) ? exports.EnumOptions.fromJSON(object.options) : undefined,
+            reservedRange: globalThis.Array.isArray(object?.reservedRange)
+                ? object.reservedRange.map((e) => exports.EnumDescriptorProto_EnumReservedRange.fromJSON(e))
+                : [],
+            reservedName: globalThis.Array.isArray(object?.reservedName)
+                ? object.reservedName.map((e) => globalThis.String(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name !== undefined && message.name !== "") {
+            obj.name = message.name;
+        }
+        if (message.value?.length) {
+            obj.value = message.value.map((e) => exports.EnumValueDescriptorProto.toJSON(e));
+        }
+        if (message.options !== undefined) {
+            obj.options = exports.EnumOptions.toJSON(message.options);
+        }
+        if (message.reservedRange?.length) {
+            obj.reservedRange = message.reservedRange.map((e) => exports.EnumDescriptorProto_EnumReservedRange.toJSON(e));
+        }
+        if (message.reservedName?.length) {
+            obj.reservedName = message.reservedName;
+        }
+        return obj;
+    },
+};
+exports.EnumDescriptorProto_EnumReservedRange = {
+    fromJSON(object) {
+        return {
+            start: isSet(object.start) ? globalThis.Number(object.start) : 0,
+            end: isSet(object.end) ? globalThis.Number(object.end) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.start !== undefined && message.start !== 0) {
+            obj.start = Math.round(message.start);
+        }
+        if (message.end !== undefined && message.end !== 0) {
+            obj.end = Math.round(message.end);
+        }
+        return obj;
+    },
+};
+exports.EnumValueDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? globalThis.String(object.name) : "",
+            number: isSet(object.number) ? globalThis.Number(object.number) : 0,
+            options: isSet(object.options) ? exports.EnumValueOptions.fromJSON(object.options) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name !== undefined && message.name !== "") {
+            obj.name = message.name;
+        }
+        if (message.number !== undefined && message.number !== 0) {
+            obj.number = Math.round(message.number);
+        }
+        if (message.options !== undefined) {
+            obj.options = exports.EnumValueOptions.toJSON(message.options);
+        }
+        return obj;
+    },
+};
+exports.ServiceDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? globalThis.String(object.name) : "",
+            method: globalThis.Array.isArray(object?.method)
+                ? object.method.map((e) => exports.MethodDescriptorProto.fromJSON(e))
+                : [],
+            options: isSet(object.options) ? exports.ServiceOptions.fromJSON(object.options) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name !== undefined && message.name !== "") {
+            obj.name = message.name;
+        }
+        if (message.method?.length) {
+            obj.method = message.method.map((e) => exports.MethodDescriptorProto.toJSON(e));
+        }
+        if (message.options !== undefined) {
+            obj.options = exports.ServiceOptions.toJSON(message.options);
+        }
+        return obj;
+    },
+};
+exports.MethodDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? globalThis.String(object.name) : "",
+            inputType: isSet(object.inputType) ? globalThis.String(object.inputType) : "",
+            outputType: isSet(object.outputType) ? globalThis.String(object.outputType) : "",
+            options: isSet(object.options) ? exports.MethodOptions.fromJSON(object.options) : undefined,
+            clientStreaming: isSet(object.clientStreaming) ? globalThis.Boolean(object.clientStreaming) : false,
+            serverStreaming: isSet(object.serverStreaming) ? globalThis.Boolean(object.serverStreaming) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name !== undefined && message.name !== "") {
+            obj.name = message.name;
+        }
+        if (message.inputType !== undefined && message.inputType !== "") {
+            obj.inputType = message.inputType;
+        }
+        if (message.outputType !== undefined && message.outputType !== "") {
+            obj.outputType = message.outputType;
+        }
+        if (message.options !== undefined) {
+            obj.options = exports.MethodOptions.toJSON(message.options);
+        }
+        if (message.clientStreaming !== undefined && message.clientStreaming !== false) {
+            obj.clientStreaming = message.clientStreaming;
+        }
+        if (message.serverStreaming !== undefined && message.serverStreaming !== false) {
+            obj.serverStreaming = message.serverStreaming;
+        }
+        return obj;
+    },
+};
+exports.FileOptions = {
+    fromJSON(object) {
+        return {
+            javaPackage: isSet(object.javaPackage) ? globalThis.String(object.javaPackage) : "",
+            javaOuterClassname: isSet(object.javaOuterClassname) ? globalThis.String(object.javaOuterClassname) : "",
+            javaMultipleFiles: isSet(object.javaMultipleFiles) ? globalThis.Boolean(object.javaMultipleFiles) : false,
+            javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash)
+                ? globalThis.Boolean(object.javaGenerateEqualsAndHash)
+                : false,
+            javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? globalThis.Boolean(object.javaStringCheckUtf8) : false,
+            optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1,
+            goPackage: isSet(object.goPackage) ? globalThis.String(object.goPackage) : "",
+            ccGenericServices: isSet(object.ccGenericServices) ? globalThis.Boolean(object.ccGenericServices) : false,
+            javaGenericServices: isSet(object.javaGenericServices) ? globalThis.Boolean(object.javaGenericServices) : false,
+            pyGenericServices: isSet(object.pyGenericServices) ? globalThis.Boolean(object.pyGenericServices) : false,
+            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
+            ccEnableArenas: isSet(object.ccEnableArenas) ? globalThis.Boolean(object.ccEnableArenas) : true,
+            objcClassPrefix: isSet(object.objcClassPrefix) ? globalThis.String(object.objcClassPrefix) : "",
+            csharpNamespace: isSet(object.csharpNamespace) ? globalThis.String(object.csharpNamespace) : "",
+            swiftPrefix: isSet(object.swiftPrefix) ? globalThis.String(object.swiftPrefix) : "",
+            phpClassPrefix: isSet(object.phpClassPrefix) ? globalThis.String(object.phpClassPrefix) : "",
+            phpNamespace: isSet(object.phpNamespace) ? globalThis.String(object.phpNamespace) : "",
+            phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? globalThis.String(object.phpMetadataNamespace) : "",
+            rubyPackage: isSet(object.rubyPackage) ? globalThis.String(object.rubyPackage) : "",
+            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.javaPackage !== undefined && message.javaPackage !== "") {
+            obj.javaPackage = message.javaPackage;
+        }
+        if (message.javaOuterClassname !== undefined && message.javaOuterClassname !== "") {
+            obj.javaOuterClassname = message.javaOuterClassname;
+        }
+        if (message.javaMultipleFiles !== undefined && message.javaMultipleFiles !== false) {
+            obj.javaMultipleFiles = message.javaMultipleFiles;
+        }
+        if (message.javaGenerateEqualsAndHash !== undefined && message.javaGenerateEqualsAndHash !== false) {
+            obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash;
+        }
+        if (message.javaStringCheckUtf8 !== undefined && message.javaStringCheckUtf8 !== false) {
+            obj.javaStringCheckUtf8 = message.javaStringCheckUtf8;
+        }
+        if (message.optimizeFor !== undefined && message.optimizeFor !== 1) {
+            obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor);
+        }
+        if (message.goPackage !== undefined && message.goPackage !== "") {
+            obj.goPackage = message.goPackage;
+        }
+        if (message.ccGenericServices !== undefined && message.ccGenericServices !== false) {
+            obj.ccGenericServices = message.ccGenericServices;
+        }
+        if (message.javaGenericServices !== undefined && message.javaGenericServices !== false) {
+            obj.javaGenericServices = message.javaGenericServices;
+        }
+        if (message.pyGenericServices !== undefined && message.pyGenericServices !== false) {
+            obj.pyGenericServices = message.pyGenericServices;
+        }
+        if (message.deprecated !== undefined && message.deprecated !== false) {
+            obj.deprecated = message.deprecated;
+        }
+        if (message.ccEnableArenas !== undefined && message.ccEnableArenas !== true) {
+            obj.ccEnableArenas = message.ccEnableArenas;
+        }
+        if (message.objcClassPrefix !== undefined && message.objcClassPrefix !== "") {
+            obj.objcClassPrefix = message.objcClassPrefix;
+        }
+        if (message.csharpNamespace !== undefined && message.csharpNamespace !== "") {
+            obj.csharpNamespace = message.csharpNamespace;
+        }
+        if (message.swiftPrefix !== undefined && message.swiftPrefix !== "") {
+            obj.swiftPrefix = message.swiftPrefix;
+        }
+        if (message.phpClassPrefix !== undefined && message.phpClassPrefix !== "") {
+            obj.phpClassPrefix = message.phpClassPrefix;
+        }
+        if (message.phpNamespace !== undefined && message.phpNamespace !== "") {
+            obj.phpNamespace = message.phpNamespace;
+        }
+        if (message.phpMetadataNamespace !== undefined && message.phpMetadataNamespace !== "") {
+            obj.phpMetadataNamespace = message.phpMetadataNamespace;
+        }
+        if (message.rubyPackage !== undefined && message.rubyPackage !== "") {
+            obj.rubyPackage = message.rubyPackage;
+        }
+        if (message.features !== undefined) {
+            obj.features = exports.FeatureSet.toJSON(message.features);
+        }
+        if (message.uninterpretedOption?.length) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.MessageOptions = {
+    fromJSON(object) {
+        return {
+            messageSetWireFormat: isSet(object.messageSetWireFormat)
+                ? globalThis.Boolean(object.messageSetWireFormat)
+                : false,
+            noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor)
+                ? globalThis.Boolean(object.noStandardDescriptorAccessor)
+                : false,
+            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
+            mapEntry: isSet(object.mapEntry) ? globalThis.Boolean(object.mapEntry) : false,
+            deprecatedLegacyJsonFieldConflicts: isSet(object.deprecatedLegacyJsonFieldConflicts)
+                ? globalThis.Boolean(object.deprecatedLegacyJsonFieldConflicts)
+                : false,
+            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.messageSetWireFormat !== undefined && message.messageSetWireFormat !== false) {
+            obj.messageSetWireFormat = message.messageSetWireFormat;
+        }
+        if (message.noStandardDescriptorAccessor !== undefined && message.noStandardDescriptorAccessor !== false) {
+            obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor;
+        }
+        if (message.deprecated !== undefined && message.deprecated !== false) {
+            obj.deprecated = message.deprecated;
+        }
+        if (message.mapEntry !== undefined && message.mapEntry !== false) {
+            obj.mapEntry = message.mapEntry;
+        }
+        if (message.deprecatedLegacyJsonFieldConflicts !== undefined && message.deprecatedLegacyJsonFieldConflicts !== false) {
+            obj.deprecatedLegacyJsonFieldConflicts = message.deprecatedLegacyJsonFieldConflicts;
+        }
+        if (message.features !== undefined) {
+            obj.features = exports.FeatureSet.toJSON(message.features);
+        }
+        if (message.uninterpretedOption?.length) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.FieldOptions = {
+    fromJSON(object) {
+        return {
+            ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0,
+            packed: isSet(object.packed) ? globalThis.Boolean(object.packed) : false,
+            jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0,
+            lazy: isSet(object.lazy) ? globalThis.Boolean(object.lazy) : false,
+            unverifiedLazy: isSet(object.unverifiedLazy) ? globalThis.Boolean(object.unverifiedLazy) : false,
+            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
+            weak: isSet(object.weak) ? globalThis.Boolean(object.weak) : false,
+            debugRedact: isSet(object.debugRedact) ? globalThis.Boolean(object.debugRedact) : false,
+            retention: isSet(object.retention) ? fieldOptions_OptionRetentionFromJSON(object.retention) : 0,
+            targets: globalThis.Array.isArray(object?.targets)
+                ? object.targets.map((e) => fieldOptions_OptionTargetTypeFromJSON(e))
+                : [],
+            editionDefaults: globalThis.Array.isArray(object?.editionDefaults)
+                ? object.editionDefaults.map((e) => exports.FieldOptions_EditionDefault.fromJSON(e))
+                : [],
+            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+            featureSupport: isSet(object.featureSupport)
+                ? exports.FieldOptions_FeatureSupport.fromJSON(object.featureSupport)
+                : undefined,
+            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.ctype !== undefined && message.ctype !== 0) {
+            obj.ctype = fieldOptions_CTypeToJSON(message.ctype);
+        }
+        if (message.packed !== undefined && message.packed !== false) {
+            obj.packed = message.packed;
+        }
+        if (message.jstype !== undefined && message.jstype !== 0) {
+            obj.jstype = fieldOptions_JSTypeToJSON(message.jstype);
+        }
+        if (message.lazy !== undefined && message.lazy !== false) {
+            obj.lazy = message.lazy;
+        }
+        if (message.unverifiedLazy !== undefined && message.unverifiedLazy !== false) {
+            obj.unverifiedLazy = message.unverifiedLazy;
+        }
+        if (message.deprecated !== undefined && message.deprecated !== false) {
+            obj.deprecated = message.deprecated;
+        }
+        if (message.weak !== undefined && message.weak !== false) {
+            obj.weak = message.weak;
+        }
+        if (message.debugRedact !== undefined && message.debugRedact !== false) {
+            obj.debugRedact = message.debugRedact;
+        }
+        if (message.retention !== undefined && message.retention !== 0) {
+            obj.retention = fieldOptions_OptionRetentionToJSON(message.retention);
+        }
+        if (message.targets?.length) {
+            obj.targets = message.targets.map((e) => fieldOptions_OptionTargetTypeToJSON(e));
+        }
+        if (message.editionDefaults?.length) {
+            obj.editionDefaults = message.editionDefaults.map((e) => exports.FieldOptions_EditionDefault.toJSON(e));
+        }
+        if (message.features !== undefined) {
+            obj.features = exports.FeatureSet.toJSON(message.features);
+        }
+        if (message.featureSupport !== undefined) {
+            obj.featureSupport = exports.FieldOptions_FeatureSupport.toJSON(message.featureSupport);
+        }
+        if (message.uninterpretedOption?.length) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.FieldOptions_EditionDefault = {
+    fromJSON(object) {
+        return {
+            edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0,
+            value: isSet(object.value) ? globalThis.String(object.value) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.edition !== undefined && message.edition !== 0) {
+            obj.edition = editionToJSON(message.edition);
+        }
+        if (message.value !== undefined && message.value !== "") {
+            obj.value = message.value;
+        }
+        return obj;
+    },
+};
+exports.FieldOptions_FeatureSupport = {
+    fromJSON(object) {
+        return {
+            editionIntroduced: isSet(object.editionIntroduced) ? editionFromJSON(object.editionIntroduced) : 0,
+            editionDeprecated: isSet(object.editionDeprecated) ? editionFromJSON(object.editionDeprecated) : 0,
+            deprecationWarning: isSet(object.deprecationWarning) ? globalThis.String(object.deprecationWarning) : "",
+            editionRemoved: isSet(object.editionRemoved) ? editionFromJSON(object.editionRemoved) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.editionIntroduced !== undefined && message.editionIntroduced !== 0) {
+            obj.editionIntroduced = editionToJSON(message.editionIntroduced);
+        }
+        if (message.editionDeprecated !== undefined && message.editionDeprecated !== 0) {
+            obj.editionDeprecated = editionToJSON(message.editionDeprecated);
+        }
+        if (message.deprecationWarning !== undefined && message.deprecationWarning !== "") {
+            obj.deprecationWarning = message.deprecationWarning;
+        }
+        if (message.editionRemoved !== undefined && message.editionRemoved !== 0) {
+            obj.editionRemoved = editionToJSON(message.editionRemoved);
+        }
+        return obj;
+    },
+};
+exports.OneofOptions = {
+    fromJSON(object) {
+        return {
+            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.features !== undefined) {
+            obj.features = exports.FeatureSet.toJSON(message.features);
+        }
+        if (message.uninterpretedOption?.length) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.EnumOptions = {
+    fromJSON(object) {
+        return {
+            allowAlias: isSet(object.allowAlias) ? globalThis.Boolean(object.allowAlias) : false,
+            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
+            deprecatedLegacyJsonFieldConflicts: isSet(object.deprecatedLegacyJsonFieldConflicts)
+                ? globalThis.Boolean(object.deprecatedLegacyJsonFieldConflicts)
+                : false,
+            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.allowAlias !== undefined && message.allowAlias !== false) {
+            obj.allowAlias = message.allowAlias;
+        }
+        if (message.deprecated !== undefined && message.deprecated !== false) {
+            obj.deprecated = message.deprecated;
+        }
+        if (message.deprecatedLegacyJsonFieldConflicts !== undefined && message.deprecatedLegacyJsonFieldConflicts !== false) {
+            obj.deprecatedLegacyJsonFieldConflicts = message.deprecatedLegacyJsonFieldConflicts;
+        }
+        if (message.features !== undefined) {
+            obj.features = exports.FeatureSet.toJSON(message.features);
+        }
+        if (message.uninterpretedOption?.length) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.EnumValueOptions = {
+    fromJSON(object) {
+        return {
+            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
+            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+            debugRedact: isSet(object.debugRedact) ? globalThis.Boolean(object.debugRedact) : false,
+            featureSupport: isSet(object.featureSupport)
+                ? exports.FieldOptions_FeatureSupport.fromJSON(object.featureSupport)
+                : undefined,
+            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.deprecated !== undefined && message.deprecated !== false) {
+            obj.deprecated = message.deprecated;
+        }
+        if (message.features !== undefined) {
+            obj.features = exports.FeatureSet.toJSON(message.features);
+        }
+        if (message.debugRedact !== undefined && message.debugRedact !== false) {
+            obj.debugRedact = message.debugRedact;
+        }
+        if (message.featureSupport !== undefined) {
+            obj.featureSupport = exports.FieldOptions_FeatureSupport.toJSON(message.featureSupport);
+        }
+        if (message.uninterpretedOption?.length) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.ServiceOptions = {
+    fromJSON(object) {
+        return {
+            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
+            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.features !== undefined) {
+            obj.features = exports.FeatureSet.toJSON(message.features);
+        }
+        if (message.deprecated !== undefined && message.deprecated !== false) {
+            obj.deprecated = message.deprecated;
+        }
+        if (message.uninterpretedOption?.length) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.MethodOptions = {
+    fromJSON(object) {
+        return {
+            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
+            idempotencyLevel: isSet(object.idempotencyLevel)
+                ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel)
+                : 0,
+            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.deprecated !== undefined && message.deprecated !== false) {
+            obj.deprecated = message.deprecated;
+        }
+        if (message.idempotencyLevel !== undefined && message.idempotencyLevel !== 0) {
+            obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel);
+        }
+        if (message.features !== undefined) {
+            obj.features = exports.FeatureSet.toJSON(message.features);
+        }
+        if (message.uninterpretedOption?.length) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.UninterpretedOption = {
+    fromJSON(object) {
+        return {
+            name: globalThis.Array.isArray(object?.name)
+                ? object.name.map((e) => exports.UninterpretedOption_NamePart.fromJSON(e))
+                : [],
+            identifierValue: isSet(object.identifierValue) ? globalThis.String(object.identifierValue) : "",
+            positiveIntValue: isSet(object.positiveIntValue) ? globalThis.String(object.positiveIntValue) : "0",
+            negativeIntValue: isSet(object.negativeIntValue) ? globalThis.String(object.negativeIntValue) : "0",
+            doubleValue: isSet(object.doubleValue) ? globalThis.Number(object.doubleValue) : 0,
+            stringValue: isSet(object.stringValue) ? Buffer.from(bytesFromBase64(object.stringValue)) : Buffer.alloc(0),
+            aggregateValue: isSet(object.aggregateValue) ? globalThis.String(object.aggregateValue) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name?.length) {
+            obj.name = message.name.map((e) => exports.UninterpretedOption_NamePart.toJSON(e));
+        }
+        if (message.identifierValue !== undefined && message.identifierValue !== "") {
+            obj.identifierValue = message.identifierValue;
+        }
+        if (message.positiveIntValue !== undefined && message.positiveIntValue !== "0") {
+            obj.positiveIntValue = message.positiveIntValue;
+        }
+        if (message.negativeIntValue !== undefined && message.negativeIntValue !== "0") {
+            obj.negativeIntValue = message.negativeIntValue;
+        }
+        if (message.doubleValue !== undefined && message.doubleValue !== 0) {
+            obj.doubleValue = message.doubleValue;
+        }
+        if (message.stringValue !== undefined && message.stringValue.length !== 0) {
+            obj.stringValue = base64FromBytes(message.stringValue);
+        }
+        if (message.aggregateValue !== undefined && message.aggregateValue !== "") {
+            obj.aggregateValue = message.aggregateValue;
+        }
+        return obj;
+    },
+};
+exports.UninterpretedOption_NamePart = {
+    fromJSON(object) {
+        return {
+            namePart: isSet(object.namePart) ? globalThis.String(object.namePart) : "",
+            isExtension: isSet(object.isExtension) ? globalThis.Boolean(object.isExtension) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.namePart !== "") {
+            obj.namePart = message.namePart;
+        }
+        if (message.isExtension !== false) {
+            obj.isExtension = message.isExtension;
+        }
+        return obj;
+    },
+};
+exports.FeatureSet = {
+    fromJSON(object) {
+        return {
+            fieldPresence: isSet(object.fieldPresence) ? featureSet_FieldPresenceFromJSON(object.fieldPresence) : 0,
+            enumType: isSet(object.enumType) ? featureSet_EnumTypeFromJSON(object.enumType) : 0,
+            repeatedFieldEncoding: isSet(object.repeatedFieldEncoding)
+                ? featureSet_RepeatedFieldEncodingFromJSON(object.repeatedFieldEncoding)
+                : 0,
+            utf8Validation: isSet(object.utf8Validation) ? featureSet_Utf8ValidationFromJSON(object.utf8Validation) : 0,
+            messageEncoding: isSet(object.messageEncoding) ? featureSet_MessageEncodingFromJSON(object.messageEncoding) : 0,
+            jsonFormat: isSet(object.jsonFormat) ? featureSet_JsonFormatFromJSON(object.jsonFormat) : 0,
+            enforceNamingStyle: isSet(object.enforceNamingStyle)
+                ? featureSet_EnforceNamingStyleFromJSON(object.enforceNamingStyle)
+                : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.fieldPresence !== undefined && message.fieldPresence !== 0) {
+            obj.fieldPresence = featureSet_FieldPresenceToJSON(message.fieldPresence);
+        }
+        if (message.enumType !== undefined && message.enumType !== 0) {
+            obj.enumType = featureSet_EnumTypeToJSON(message.enumType);
+        }
+        if (message.repeatedFieldEncoding !== undefined && message.repeatedFieldEncoding !== 0) {
+            obj.repeatedFieldEncoding = featureSet_RepeatedFieldEncodingToJSON(message.repeatedFieldEncoding);
+        }
+        if (message.utf8Validation !== undefined && message.utf8Validation !== 0) {
+            obj.utf8Validation = featureSet_Utf8ValidationToJSON(message.utf8Validation);
+        }
+        if (message.messageEncoding !== undefined && message.messageEncoding !== 0) {
+            obj.messageEncoding = featureSet_MessageEncodingToJSON(message.messageEncoding);
+        }
+        if (message.jsonFormat !== undefined && message.jsonFormat !== 0) {
+            obj.jsonFormat = featureSet_JsonFormatToJSON(message.jsonFormat);
+        }
+        if (message.enforceNamingStyle !== undefined && message.enforceNamingStyle !== 0) {
+            obj.enforceNamingStyle = featureSet_EnforceNamingStyleToJSON(message.enforceNamingStyle);
+        }
+        return obj;
+    },
+};
+exports.FeatureSetDefaults = {
+    fromJSON(object) {
+        return {
+            defaults: globalThis.Array.isArray(object?.defaults)
+                ? object.defaults.map((e) => exports.FeatureSetDefaults_FeatureSetEditionDefault.fromJSON(e))
+                : [],
+            minimumEdition: isSet(object.minimumEdition) ? editionFromJSON(object.minimumEdition) : 0,
+            maximumEdition: isSet(object.maximumEdition) ? editionFromJSON(object.maximumEdition) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.defaults?.length) {
+            obj.defaults = message.defaults.map((e) => exports.FeatureSetDefaults_FeatureSetEditionDefault.toJSON(e));
+        }
+        if (message.minimumEdition !== undefined && message.minimumEdition !== 0) {
+            obj.minimumEdition = editionToJSON(message.minimumEdition);
+        }
+        if (message.maximumEdition !== undefined && message.maximumEdition !== 0) {
+            obj.maximumEdition = editionToJSON(message.maximumEdition);
+        }
+        return obj;
+    },
+};
+exports.FeatureSetDefaults_FeatureSetEditionDefault = {
+    fromJSON(object) {
+        return {
+            edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0,
+            overridableFeatures: isSet(object.overridableFeatures)
+                ? exports.FeatureSet.fromJSON(object.overridableFeatures)
+                : undefined,
+            fixedFeatures: isSet(object.fixedFeatures) ? exports.FeatureSet.fromJSON(object.fixedFeatures) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.edition !== undefined && message.edition !== 0) {
+            obj.edition = editionToJSON(message.edition);
+        }
+        if (message.overridableFeatures !== undefined) {
+            obj.overridableFeatures = exports.FeatureSet.toJSON(message.overridableFeatures);
+        }
+        if (message.fixedFeatures !== undefined) {
+            obj.fixedFeatures = exports.FeatureSet.toJSON(message.fixedFeatures);
+        }
+        return obj;
+    },
+};
+exports.SourceCodeInfo = {
+    fromJSON(object) {
+        return {
+            location: globalThis.Array.isArray(object?.location)
+                ? object.location.map((e) => exports.SourceCodeInfo_Location.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.location?.length) {
+            obj.location = message.location.map((e) => exports.SourceCodeInfo_Location.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.SourceCodeInfo_Location = {
+    fromJSON(object) {
+        return {
+            path: globalThis.Array.isArray(object?.path)
+                ? object.path.map((e) => globalThis.Number(e))
+                : [],
+            span: globalThis.Array.isArray(object?.span) ? object.span.map((e) => globalThis.Number(e)) : [],
+            leadingComments: isSet(object.leadingComments) ? globalThis.String(object.leadingComments) : "",
+            trailingComments: isSet(object.trailingComments) ? globalThis.String(object.trailingComments) : "",
+            leadingDetachedComments: globalThis.Array.isArray(object?.leadingDetachedComments)
+                ? object.leadingDetachedComments.map((e) => globalThis.String(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.path?.length) {
+            obj.path = message.path.map((e) => Math.round(e));
+        }
+        if (message.span?.length) {
+            obj.span = message.span.map((e) => Math.round(e));
+        }
+        if (message.leadingComments !== undefined && message.leadingComments !== "") {
+            obj.leadingComments = message.leadingComments;
+        }
+        if (message.trailingComments !== undefined && message.trailingComments !== "") {
+            obj.trailingComments = message.trailingComments;
+        }
+        if (message.leadingDetachedComments?.length) {
+            obj.leadingDetachedComments = message.leadingDetachedComments;
+        }
+        return obj;
+    },
+};
+exports.GeneratedCodeInfo = {
+    fromJSON(object) {
+        return {
+            annotation: globalThis.Array.isArray(object?.annotation)
+                ? object.annotation.map((e) => exports.GeneratedCodeInfo_Annotation.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.annotation?.length) {
+            obj.annotation = message.annotation.map((e) => exports.GeneratedCodeInfo_Annotation.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.GeneratedCodeInfo_Annotation = {
+    fromJSON(object) {
+        return {
+            path: globalThis.Array.isArray(object?.path)
+                ? object.path.map((e) => globalThis.Number(e))
+                : [],
+            sourceFile: isSet(object.sourceFile) ? globalThis.String(object.sourceFile) : "",
+            begin: isSet(object.begin) ? globalThis.Number(object.begin) : 0,
+            end: isSet(object.end) ? globalThis.Number(object.end) : 0,
+            semantic: isSet(object.semantic) ? generatedCodeInfo_Annotation_SemanticFromJSON(object.semantic) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.path?.length) {
+            obj.path = message.path.map((e) => Math.round(e));
+        }
+        if (message.sourceFile !== undefined && message.sourceFile !== "") {
+            obj.sourceFile = message.sourceFile;
+        }
+        if (message.begin !== undefined && message.begin !== 0) {
+            obj.begin = Math.round(message.begin);
+        }
+        if (message.end !== undefined && message.end !== 0) {
+            obj.end = Math.round(message.end);
+        }
+        if (message.semantic !== undefined && message.semantic !== 0) {
+            obj.semantic = generatedCodeInfo_Annotation_SemanticToJSON(message.semantic);
+        }
+        return obj;
+    },
+};
+function bytesFromBase64(b64) {
+    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+}
+function base64FromBytes(arr) {
+    return globalThis.Buffer.from(arr).toString("base64");
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
new file mode 100644
index 0000000000000..9d24cbba10de9
--- /dev/null
+++ b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
@@ -0,0 +1,29 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: google/protobuf/timestamp.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Timestamp = void 0;
+exports.Timestamp = {
+    fromJSON(object) {
+        return {
+            seconds: isSet(object.seconds) ? globalThis.String(object.seconds) : "0",
+            nanos: isSet(object.nanos) ? globalThis.Number(object.nanos) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.seconds !== "0") {
+            obj.seconds = message.seconds;
+        }
+        if (message.nanos !== 0) {
+            obj.nanos = Math.round(message.nanos);
+        }
+        return obj;
+    },
+};
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js
new file mode 100644
index 0000000000000..abc766bed3b88
--- /dev/null
+++ b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js
@@ -0,0 +1,55 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: rekor/v2/dsse.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.DSSELogEntryV002 = exports.DSSERequestV002 = void 0;
+/* eslint-disable */
+const envelope_1 = require("../../envelope");
+const sigstore_common_1 = require("../../sigstore_common");
+const verifier_1 = require("./verifier");
+exports.DSSERequestV002 = {
+    fromJSON(object) {
+        return {
+            envelope: isSet(object.envelope) ? envelope_1.Envelope.fromJSON(object.envelope) : undefined,
+            verifiers: globalThis.Array.isArray(object?.verifiers)
+                ? object.verifiers.map((e) => verifier_1.Verifier.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.envelope !== undefined) {
+            obj.envelope = envelope_1.Envelope.toJSON(message.envelope);
+        }
+        if (message.verifiers?.length) {
+            obj.verifiers = message.verifiers.map((e) => verifier_1.Verifier.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.DSSELogEntryV002 = {
+    fromJSON(object) {
+        return {
+            payloadHash: isSet(object.payloadHash) ? sigstore_common_1.HashOutput.fromJSON(object.payloadHash) : undefined,
+            signatures: globalThis.Array.isArray(object?.signatures)
+                ? object.signatures.map((e) => verifier_1.Signature.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.payloadHash !== undefined) {
+            obj.payloadHash = sigstore_common_1.HashOutput.toJSON(message.payloadHash);
+        }
+        if (message.signatures?.length) {
+            obj.signatures = message.signatures.map((e) => verifier_1.Signature.toJSON(e));
+        }
+        return obj;
+    },
+};
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js
new file mode 100644
index 0000000000000..c5eccb10e0a68
--- /dev/null
+++ b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js
@@ -0,0 +1,81 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: rekor/v2/entry.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.CreateEntryRequest = exports.Spec = exports.Entry = void 0;
+/* eslint-disable */
+const dsse_1 = require("./dsse");
+const hashedrekord_1 = require("./hashedrekord");
+exports.Entry = {
+    fromJSON(object) {
+        return {
+            kind: isSet(object.kind) ? globalThis.String(object.kind) : "",
+            apiVersion: isSet(object.apiVersion) ? globalThis.String(object.apiVersion) : "",
+            spec: isSet(object.spec) ? exports.Spec.fromJSON(object.spec) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.kind !== "") {
+            obj.kind = message.kind;
+        }
+        if (message.apiVersion !== "") {
+            obj.apiVersion = message.apiVersion;
+        }
+        if (message.spec !== undefined) {
+            obj.spec = exports.Spec.toJSON(message.spec);
+        }
+        return obj;
+    },
+};
+exports.Spec = {
+    fromJSON(object) {
+        return {
+            spec: isSet(object.hashedRekordV002)
+                ? { $case: "hashedRekordV002", hashedRekordV002: hashedrekord_1.HashedRekordLogEntryV002.fromJSON(object.hashedRekordV002) }
+                : isSet(object.dsseV002)
+                    ? { $case: "dsseV002", dsseV002: dsse_1.DSSELogEntryV002.fromJSON(object.dsseV002) }
+                    : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.spec?.$case === "hashedRekordV002") {
+            obj.hashedRekordV002 = hashedrekord_1.HashedRekordLogEntryV002.toJSON(message.spec.hashedRekordV002);
+        }
+        else if (message.spec?.$case === "dsseV002") {
+            obj.dsseV002 = dsse_1.DSSELogEntryV002.toJSON(message.spec.dsseV002);
+        }
+        return obj;
+    },
+};
+exports.CreateEntryRequest = {
+    fromJSON(object) {
+        return {
+            spec: isSet(object.hashedRekordRequestV002)
+                ? {
+                    $case: "hashedRekordRequestV002",
+                    hashedRekordRequestV002: hashedrekord_1.HashedRekordRequestV002.fromJSON(object.hashedRekordRequestV002),
+                }
+                : isSet(object.dsseRequestV002)
+                    ? { $case: "dsseRequestV002", dsseRequestV002: dsse_1.DSSERequestV002.fromJSON(object.dsseRequestV002) }
+                    : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.spec?.$case === "hashedRekordRequestV002") {
+            obj.hashedRekordRequestV002 = hashedrekord_1.HashedRekordRequestV002.toJSON(message.spec.hashedRekordRequestV002);
+        }
+        else if (message.spec?.$case === "dsseRequestV002") {
+            obj.dsseRequestV002 = dsse_1.DSSERequestV002.toJSON(message.spec.dsseRequestV002);
+        }
+        return obj;
+    },
+};
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js
new file mode 100644
index 0000000000000..d3fd1af2483d1
--- /dev/null
+++ b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js
@@ -0,0 +1,56 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: rekor/v2/hashedrekord.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.HashedRekordLogEntryV002 = exports.HashedRekordRequestV002 = void 0;
+/* eslint-disable */
+const sigstore_common_1 = require("../../sigstore_common");
+const verifier_1 = require("./verifier");
+exports.HashedRekordRequestV002 = {
+    fromJSON(object) {
+        return {
+            digest: isSet(object.digest) ? Buffer.from(bytesFromBase64(object.digest)) : Buffer.alloc(0),
+            signature: isSet(object.signature) ? verifier_1.Signature.fromJSON(object.signature) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.digest.length !== 0) {
+            obj.digest = base64FromBytes(message.digest);
+        }
+        if (message.signature !== undefined) {
+            obj.signature = verifier_1.Signature.toJSON(message.signature);
+        }
+        return obj;
+    },
+};
+exports.HashedRekordLogEntryV002 = {
+    fromJSON(object) {
+        return {
+            data: isSet(object.data) ? sigstore_common_1.HashOutput.fromJSON(object.data) : undefined,
+            signature: isSet(object.signature) ? verifier_1.Signature.fromJSON(object.signature) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.data !== undefined) {
+            obj.data = sigstore_common_1.HashOutput.toJSON(message.data);
+        }
+        if (message.signature !== undefined) {
+            obj.signature = verifier_1.Signature.toJSON(message.signature);
+        }
+        return obj;
+    },
+};
+function bytesFromBase64(b64) {
+    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+}
+function base64FromBytes(arr) {
+    return globalThis.Buffer.from(arr).toString("base64");
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js
new file mode 100644
index 0000000000000..c437d5053a3cb
--- /dev/null
+++ b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js
@@ -0,0 +1,74 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: rekor/v2/verifier.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Signature = exports.Verifier = exports.PublicKey = void 0;
+/* eslint-disable */
+const sigstore_common_1 = require("../../sigstore_common");
+exports.PublicKey = {
+    fromJSON(object) {
+        return { rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : Buffer.alloc(0) };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.rawBytes.length !== 0) {
+            obj.rawBytes = base64FromBytes(message.rawBytes);
+        }
+        return obj;
+    },
+};
+exports.Verifier = {
+    fromJSON(object) {
+        return {
+            verifier: isSet(object.publicKey)
+                ? { $case: "publicKey", publicKey: exports.PublicKey.fromJSON(object.publicKey) }
+                : isSet(object.x509Certificate)
+                    ? { $case: "x509Certificate", x509Certificate: sigstore_common_1.X509Certificate.fromJSON(object.x509Certificate) }
+                    : undefined,
+            keyDetails: isSet(object.keyDetails) ? (0, sigstore_common_1.publicKeyDetailsFromJSON)(object.keyDetails) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.verifier?.$case === "publicKey") {
+            obj.publicKey = exports.PublicKey.toJSON(message.verifier.publicKey);
+        }
+        else if (message.verifier?.$case === "x509Certificate") {
+            obj.x509Certificate = sigstore_common_1.X509Certificate.toJSON(message.verifier.x509Certificate);
+        }
+        if (message.keyDetails !== 0) {
+            obj.keyDetails = (0, sigstore_common_1.publicKeyDetailsToJSON)(message.keyDetails);
+        }
+        return obj;
+    },
+};
+exports.Signature = {
+    fromJSON(object) {
+        return {
+            content: isSet(object.content) ? Buffer.from(bytesFromBase64(object.content)) : Buffer.alloc(0),
+            verifier: isSet(object.verifier) ? exports.Verifier.fromJSON(object.verifier) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.content.length !== 0) {
+            obj.content = base64FromBytes(message.content);
+        }
+        if (message.verifier !== undefined) {
+            obj.verifier = exports.Verifier.toJSON(message.verifier);
+        }
+        return obj;
+    },
+};
+function bytesFromBase64(b64) {
+    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+}
+function base64FromBytes(arr) {
+    return globalThis.Buffer.from(arr).toString("base64");
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
new file mode 100644
index 0000000000000..aed636f00e7cf
--- /dev/null
+++ b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
@@ -0,0 +1,103 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: sigstore_bundle.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Bundle = exports.VerificationMaterial = exports.TimestampVerificationData = void 0;
+/* eslint-disable */
+const envelope_1 = require("./envelope");
+const sigstore_common_1 = require("./sigstore_common");
+const sigstore_rekor_1 = require("./sigstore_rekor");
+exports.TimestampVerificationData = {
+    fromJSON(object) {
+        return {
+            rfc3161Timestamps: globalThis.Array.isArray(object?.rfc3161Timestamps)
+                ? object.rfc3161Timestamps.map((e) => sigstore_common_1.RFC3161SignedTimestamp.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.rfc3161Timestamps?.length) {
+            obj.rfc3161Timestamps = message.rfc3161Timestamps.map((e) => sigstore_common_1.RFC3161SignedTimestamp.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.VerificationMaterial = {
+    fromJSON(object) {
+        return {
+            content: isSet(object.publicKey)
+                ? { $case: "publicKey", publicKey: sigstore_common_1.PublicKeyIdentifier.fromJSON(object.publicKey) }
+                : isSet(object.x509CertificateChain)
+                    ? {
+                        $case: "x509CertificateChain",
+                        x509CertificateChain: sigstore_common_1.X509CertificateChain.fromJSON(object.x509CertificateChain),
+                    }
+                    : isSet(object.certificate)
+                        ? { $case: "certificate", certificate: sigstore_common_1.X509Certificate.fromJSON(object.certificate) }
+                        : undefined,
+            tlogEntries: globalThis.Array.isArray(object?.tlogEntries)
+                ? object.tlogEntries.map((e) => sigstore_rekor_1.TransparencyLogEntry.fromJSON(e))
+                : [],
+            timestampVerificationData: isSet(object.timestampVerificationData)
+                ? exports.TimestampVerificationData.fromJSON(object.timestampVerificationData)
+                : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.content?.$case === "publicKey") {
+            obj.publicKey = sigstore_common_1.PublicKeyIdentifier.toJSON(message.content.publicKey);
+        }
+        else if (message.content?.$case === "x509CertificateChain") {
+            obj.x509CertificateChain = sigstore_common_1.X509CertificateChain.toJSON(message.content.x509CertificateChain);
+        }
+        else if (message.content?.$case === "certificate") {
+            obj.certificate = sigstore_common_1.X509Certificate.toJSON(message.content.certificate);
+        }
+        if (message.tlogEntries?.length) {
+            obj.tlogEntries = message.tlogEntries.map((e) => sigstore_rekor_1.TransparencyLogEntry.toJSON(e));
+        }
+        if (message.timestampVerificationData !== undefined) {
+            obj.timestampVerificationData = exports.TimestampVerificationData.toJSON(message.timestampVerificationData);
+        }
+        return obj;
+    },
+};
+exports.Bundle = {
+    fromJSON(object) {
+        return {
+            mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "",
+            verificationMaterial: isSet(object.verificationMaterial)
+                ? exports.VerificationMaterial.fromJSON(object.verificationMaterial)
+                : undefined,
+            content: isSet(object.messageSignature)
+                ? { $case: "messageSignature", messageSignature: sigstore_common_1.MessageSignature.fromJSON(object.messageSignature) }
+                : isSet(object.dsseEnvelope)
+                    ? { $case: "dsseEnvelope", dsseEnvelope: envelope_1.Envelope.fromJSON(object.dsseEnvelope) }
+                    : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.mediaType !== "") {
+            obj.mediaType = message.mediaType;
+        }
+        if (message.verificationMaterial !== undefined) {
+            obj.verificationMaterial = exports.VerificationMaterial.toJSON(message.verificationMaterial);
+        }
+        if (message.content?.$case === "messageSignature") {
+            obj.messageSignature = sigstore_common_1.MessageSignature.toJSON(message.content.messageSignature);
+        }
+        else if (message.content?.$case === "dsseEnvelope") {
+            obj.dsseEnvelope = envelope_1.Envelope.toJSON(message.content.dsseEnvelope);
+        }
+        return obj;
+    },
+};
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
new file mode 100644
index 0000000000000..b900516ed3b55
--- /dev/null
+++ b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
@@ -0,0 +1,596 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: sigstore_common.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TimeRange = exports.X509CertificateChain = exports.SubjectAlternativeName = exports.X509Certificate = exports.DistinguishedName = exports.ObjectIdentifierValuePair = exports.ObjectIdentifier = exports.PublicKeyIdentifier = exports.PublicKey = exports.RFC3161SignedTimestamp = exports.LogId = exports.MessageSignature = exports.HashOutput = exports.SubjectAlternativeNameType = exports.PublicKeyDetails = exports.HashAlgorithm = void 0;
+exports.hashAlgorithmFromJSON = hashAlgorithmFromJSON;
+exports.hashAlgorithmToJSON = hashAlgorithmToJSON;
+exports.publicKeyDetailsFromJSON = publicKeyDetailsFromJSON;
+exports.publicKeyDetailsToJSON = publicKeyDetailsToJSON;
+exports.subjectAlternativeNameTypeFromJSON = subjectAlternativeNameTypeFromJSON;
+exports.subjectAlternativeNameTypeToJSON = subjectAlternativeNameTypeToJSON;
+/* eslint-disable */
+const timestamp_1 = require("./google/protobuf/timestamp");
+/**
+ * Only a subset of the secure hash standard algorithms are supported.
+ * See  for more
+ * details.
+ * UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force
+ * any proto JSON serialization to emit the used hash algorithm, as default
+ * option is to *omit* the default value of an enum (which is the first
+ * value, represented by '0'.
+ */
+var HashAlgorithm;
+(function (HashAlgorithm) {
+    HashAlgorithm[HashAlgorithm["HASH_ALGORITHM_UNSPECIFIED"] = 0] = "HASH_ALGORITHM_UNSPECIFIED";
+    HashAlgorithm[HashAlgorithm["SHA2_256"] = 1] = "SHA2_256";
+    HashAlgorithm[HashAlgorithm["SHA2_384"] = 2] = "SHA2_384";
+    HashAlgorithm[HashAlgorithm["SHA2_512"] = 3] = "SHA2_512";
+    HashAlgorithm[HashAlgorithm["SHA3_256"] = 4] = "SHA3_256";
+    HashAlgorithm[HashAlgorithm["SHA3_384"] = 5] = "SHA3_384";
+})(HashAlgorithm || (exports.HashAlgorithm = HashAlgorithm = {}));
+function hashAlgorithmFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "HASH_ALGORITHM_UNSPECIFIED":
+            return HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED;
+        case 1:
+        case "SHA2_256":
+            return HashAlgorithm.SHA2_256;
+        case 2:
+        case "SHA2_384":
+            return HashAlgorithm.SHA2_384;
+        case 3:
+        case "SHA2_512":
+            return HashAlgorithm.SHA2_512;
+        case 4:
+        case "SHA3_256":
+            return HashAlgorithm.SHA3_256;
+        case 5:
+        case "SHA3_384":
+            return HashAlgorithm.SHA3_384;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
+    }
+}
+function hashAlgorithmToJSON(object) {
+    switch (object) {
+        case HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED:
+            return "HASH_ALGORITHM_UNSPECIFIED";
+        case HashAlgorithm.SHA2_256:
+            return "SHA2_256";
+        case HashAlgorithm.SHA2_384:
+            return "SHA2_384";
+        case HashAlgorithm.SHA2_512:
+            return "SHA2_512";
+        case HashAlgorithm.SHA3_256:
+            return "SHA3_256";
+        case HashAlgorithm.SHA3_384:
+            return "SHA3_384";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
+    }
+}
+/**
+ * Details of a specific public key, capturing the the key encoding method,
+ * and signature algorithm.
+ *
+ * PublicKeyDetails captures the public key/hash algorithm combinations
+ * recommended in the Sigstore ecosystem.
+ *
+ * This is modelled as a linear set as we want to provide a small number of
+ * opinionated options instead of allowing every possible permutation.
+ *
+ * Any changes to this enum MUST be reflected in the algorithm registry.
+ *
+ * See: 
+ *
+ * To avoid the possibility of contradicting formats such as PKCS1 with
+ * ED25519 the valid permutations are listed as a linear set instead of a
+ * cartesian set (i.e one combined variable instead of two, one for encoding
+ * and one for the signature algorithm).
+ */
+var PublicKeyDetails;
+(function (PublicKeyDetails) {
+    PublicKeyDetails[PublicKeyDetails["PUBLIC_KEY_DETAILS_UNSPECIFIED"] = 0] = "PUBLIC_KEY_DETAILS_UNSPECIFIED";
+    /**
+     * PKCS1_RSA_PKCS1V5 - RSA
+     *
+     * @deprecated
+     */
+    PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PKCS1V5"] = 1] = "PKCS1_RSA_PKCS1V5";
+    /**
+     * PKCS1_RSA_PSS - See RFC8017
+     *
+     * @deprecated
+     */
+    PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PSS"] = 2] = "PKCS1_RSA_PSS";
+    /** @deprecated */
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V5"] = 3] = "PKIX_RSA_PKCS1V5";
+    /** @deprecated */
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS"] = 4] = "PKIX_RSA_PSS";
+    /** PKIX_RSA_PKCS1V15_2048_SHA256 - RSA public key in PKIX format, PKCS#1v1.5 signature */
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V15_2048_SHA256"] = 9] = "PKIX_RSA_PKCS1V15_2048_SHA256";
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V15_3072_SHA256"] = 10] = "PKIX_RSA_PKCS1V15_3072_SHA256";
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V15_4096_SHA256"] = 11] = "PKIX_RSA_PKCS1V15_4096_SHA256";
+    /** PKIX_RSA_PSS_2048_SHA256 - RSA public key in PKIX format, RSASSA-PSS signature */
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS_2048_SHA256"] = 16] = "PKIX_RSA_PSS_2048_SHA256";
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS_3072_SHA256"] = 17] = "PKIX_RSA_PSS_3072_SHA256";
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS_4096_SHA256"] = 18] = "PKIX_RSA_PSS_4096_SHA256";
+    /**
+     * PKIX_ECDSA_P256_HMAC_SHA_256 - ECDSA
+     *
+     * @deprecated
+     */
+    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_HMAC_SHA_256"] = 6] = "PKIX_ECDSA_P256_HMAC_SHA_256";
+    /** PKIX_ECDSA_P256_SHA_256 - See NIST FIPS 186-4 */
+    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_SHA_256"] = 5] = "PKIX_ECDSA_P256_SHA_256";
+    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P384_SHA_384"] = 12] = "PKIX_ECDSA_P384_SHA_384";
+    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P521_SHA_512"] = 13] = "PKIX_ECDSA_P521_SHA_512";
+    /** PKIX_ED25519 - Ed 25519 */
+    PublicKeyDetails[PublicKeyDetails["PKIX_ED25519"] = 7] = "PKIX_ED25519";
+    PublicKeyDetails[PublicKeyDetails["PKIX_ED25519_PH"] = 8] = "PKIX_ED25519_PH";
+    /**
+     * PKIX_ECDSA_P384_SHA_256 - These algorithms are deprecated and should not be used, but they
+     * were/are being used by most Sigstore clients implementations.
+     *
+     * @deprecated
+     */
+    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P384_SHA_256"] = 19] = "PKIX_ECDSA_P384_SHA_256";
+    /** @deprecated */
+    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P521_SHA_256"] = 20] = "PKIX_ECDSA_P521_SHA_256";
+    /**
+     * LMS_SHA256 - LMS and LM-OTS
+     *
+     * These algorithms are deprecated and should not be used.
+     * Keys and signatures MAY be used by private Sigstore
+     * deployments, but will not be supported by the public
+     * good instance.
+     *
+     * USER WARNING: LMS and LM-OTS are both stateful signature schemes.
+     * Using them correctly requires discretion and careful consideration
+     * to ensure that individual secret keys are not used more than once.
+     * In addition, LM-OTS is a single-use scheme, meaning that it
+     * MUST NOT be used for more than one signature per LM-OTS key.
+     * If you cannot maintain these invariants, you MUST NOT use these
+     * schemes.
+     *
+     * @deprecated
+     */
+    PublicKeyDetails[PublicKeyDetails["LMS_SHA256"] = 14] = "LMS_SHA256";
+    /** @deprecated */
+    PublicKeyDetails[PublicKeyDetails["LMOTS_SHA256"] = 15] = "LMOTS_SHA256";
+    /**
+     * ML_DSA_65 - ML-DSA
+     *
+     * These ML_DSA_65 and ML-DSA_87 algorithms are the pure variants that
+     * take data to sign rather than the prehash variants (HashML-DSA), which
+     * take digests.  While considered quantum-resistant, their usage
+     * involves tradeoffs in that signatures and keys are much larger, and
+     * this makes deployments more costly.
+     *
+     * USER WARNING: ML_DSA_65 and ML_DSA_87 are experimental algorithms.
+     * In the future they MAY be used by private Sigstore deployments, but
+     * they are not yet fully functional.  This warning will be removed when
+     * these algorithms are widely supported by Sigstore clients and servers,
+     * but care should still be taken for production environments.
+     */
+    PublicKeyDetails[PublicKeyDetails["ML_DSA_65"] = 21] = "ML_DSA_65";
+    PublicKeyDetails[PublicKeyDetails["ML_DSA_87"] = 22] = "ML_DSA_87";
+})(PublicKeyDetails || (exports.PublicKeyDetails = PublicKeyDetails = {}));
+function publicKeyDetailsFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "PUBLIC_KEY_DETAILS_UNSPECIFIED":
+            return PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED;
+        case 1:
+        case "PKCS1_RSA_PKCS1V5":
+            return PublicKeyDetails.PKCS1_RSA_PKCS1V5;
+        case 2:
+        case "PKCS1_RSA_PSS":
+            return PublicKeyDetails.PKCS1_RSA_PSS;
+        case 3:
+        case "PKIX_RSA_PKCS1V5":
+            return PublicKeyDetails.PKIX_RSA_PKCS1V5;
+        case 4:
+        case "PKIX_RSA_PSS":
+            return PublicKeyDetails.PKIX_RSA_PSS;
+        case 9:
+        case "PKIX_RSA_PKCS1V15_2048_SHA256":
+            return PublicKeyDetails.PKIX_RSA_PKCS1V15_2048_SHA256;
+        case 10:
+        case "PKIX_RSA_PKCS1V15_3072_SHA256":
+            return PublicKeyDetails.PKIX_RSA_PKCS1V15_3072_SHA256;
+        case 11:
+        case "PKIX_RSA_PKCS1V15_4096_SHA256":
+            return PublicKeyDetails.PKIX_RSA_PKCS1V15_4096_SHA256;
+        case 16:
+        case "PKIX_RSA_PSS_2048_SHA256":
+            return PublicKeyDetails.PKIX_RSA_PSS_2048_SHA256;
+        case 17:
+        case "PKIX_RSA_PSS_3072_SHA256":
+            return PublicKeyDetails.PKIX_RSA_PSS_3072_SHA256;
+        case 18:
+        case "PKIX_RSA_PSS_4096_SHA256":
+            return PublicKeyDetails.PKIX_RSA_PSS_4096_SHA256;
+        case 6:
+        case "PKIX_ECDSA_P256_HMAC_SHA_256":
+            return PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256;
+        case 5:
+        case "PKIX_ECDSA_P256_SHA_256":
+            return PublicKeyDetails.PKIX_ECDSA_P256_SHA_256;
+        case 12:
+        case "PKIX_ECDSA_P384_SHA_384":
+            return PublicKeyDetails.PKIX_ECDSA_P384_SHA_384;
+        case 13:
+        case "PKIX_ECDSA_P521_SHA_512":
+            return PublicKeyDetails.PKIX_ECDSA_P521_SHA_512;
+        case 7:
+        case "PKIX_ED25519":
+            return PublicKeyDetails.PKIX_ED25519;
+        case 8:
+        case "PKIX_ED25519_PH":
+            return PublicKeyDetails.PKIX_ED25519_PH;
+        case 19:
+        case "PKIX_ECDSA_P384_SHA_256":
+            return PublicKeyDetails.PKIX_ECDSA_P384_SHA_256;
+        case 20:
+        case "PKIX_ECDSA_P521_SHA_256":
+            return PublicKeyDetails.PKIX_ECDSA_P521_SHA_256;
+        case 14:
+        case "LMS_SHA256":
+            return PublicKeyDetails.LMS_SHA256;
+        case 15:
+        case "LMOTS_SHA256":
+            return PublicKeyDetails.LMOTS_SHA256;
+        case 21:
+        case "ML_DSA_65":
+            return PublicKeyDetails.ML_DSA_65;
+        case 22:
+        case "ML_DSA_87":
+            return PublicKeyDetails.ML_DSA_87;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
+    }
+}
+function publicKeyDetailsToJSON(object) {
+    switch (object) {
+        case PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED:
+            return "PUBLIC_KEY_DETAILS_UNSPECIFIED";
+        case PublicKeyDetails.PKCS1_RSA_PKCS1V5:
+            return "PKCS1_RSA_PKCS1V5";
+        case PublicKeyDetails.PKCS1_RSA_PSS:
+            return "PKCS1_RSA_PSS";
+        case PublicKeyDetails.PKIX_RSA_PKCS1V5:
+            return "PKIX_RSA_PKCS1V5";
+        case PublicKeyDetails.PKIX_RSA_PSS:
+            return "PKIX_RSA_PSS";
+        case PublicKeyDetails.PKIX_RSA_PKCS1V15_2048_SHA256:
+            return "PKIX_RSA_PKCS1V15_2048_SHA256";
+        case PublicKeyDetails.PKIX_RSA_PKCS1V15_3072_SHA256:
+            return "PKIX_RSA_PKCS1V15_3072_SHA256";
+        case PublicKeyDetails.PKIX_RSA_PKCS1V15_4096_SHA256:
+            return "PKIX_RSA_PKCS1V15_4096_SHA256";
+        case PublicKeyDetails.PKIX_RSA_PSS_2048_SHA256:
+            return "PKIX_RSA_PSS_2048_SHA256";
+        case PublicKeyDetails.PKIX_RSA_PSS_3072_SHA256:
+            return "PKIX_RSA_PSS_3072_SHA256";
+        case PublicKeyDetails.PKIX_RSA_PSS_4096_SHA256:
+            return "PKIX_RSA_PSS_4096_SHA256";
+        case PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256:
+            return "PKIX_ECDSA_P256_HMAC_SHA_256";
+        case PublicKeyDetails.PKIX_ECDSA_P256_SHA_256:
+            return "PKIX_ECDSA_P256_SHA_256";
+        case PublicKeyDetails.PKIX_ECDSA_P384_SHA_384:
+            return "PKIX_ECDSA_P384_SHA_384";
+        case PublicKeyDetails.PKIX_ECDSA_P521_SHA_512:
+            return "PKIX_ECDSA_P521_SHA_512";
+        case PublicKeyDetails.PKIX_ED25519:
+            return "PKIX_ED25519";
+        case PublicKeyDetails.PKIX_ED25519_PH:
+            return "PKIX_ED25519_PH";
+        case PublicKeyDetails.PKIX_ECDSA_P384_SHA_256:
+            return "PKIX_ECDSA_P384_SHA_256";
+        case PublicKeyDetails.PKIX_ECDSA_P521_SHA_256:
+            return "PKIX_ECDSA_P521_SHA_256";
+        case PublicKeyDetails.LMS_SHA256:
+            return "LMS_SHA256";
+        case PublicKeyDetails.LMOTS_SHA256:
+            return "LMOTS_SHA256";
+        case PublicKeyDetails.ML_DSA_65:
+            return "ML_DSA_65";
+        case PublicKeyDetails.ML_DSA_87:
+            return "ML_DSA_87";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
+    }
+}
+var SubjectAlternativeNameType;
+(function (SubjectAlternativeNameType) {
+    SubjectAlternativeNameType[SubjectAlternativeNameType["SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED"] = 0] = "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
+    SubjectAlternativeNameType[SubjectAlternativeNameType["EMAIL"] = 1] = "EMAIL";
+    SubjectAlternativeNameType[SubjectAlternativeNameType["URI"] = 2] = "URI";
+    /**
+     * OTHER_NAME - OID 1.3.6.1.4.1.57264.1.7
+     * See https://github.com/sigstore/fulcio/blob/main/docs/oid-info.md#1361415726417--othername-san
+     * for more details.
+     */
+    SubjectAlternativeNameType[SubjectAlternativeNameType["OTHER_NAME"] = 3] = "OTHER_NAME";
+})(SubjectAlternativeNameType || (exports.SubjectAlternativeNameType = SubjectAlternativeNameType = {}));
+function subjectAlternativeNameTypeFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED":
+            return SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED;
+        case 1:
+        case "EMAIL":
+            return SubjectAlternativeNameType.EMAIL;
+        case 2:
+        case "URI":
+            return SubjectAlternativeNameType.URI;
+        case 3:
+        case "OTHER_NAME":
+            return SubjectAlternativeNameType.OTHER_NAME;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
+    }
+}
+function subjectAlternativeNameTypeToJSON(object) {
+    switch (object) {
+        case SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED:
+            return "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
+        case SubjectAlternativeNameType.EMAIL:
+            return "EMAIL";
+        case SubjectAlternativeNameType.URI:
+            return "URI";
+        case SubjectAlternativeNameType.OTHER_NAME:
+            return "OTHER_NAME";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
+    }
+}
+exports.HashOutput = {
+    fromJSON(object) {
+        return {
+            algorithm: isSet(object.algorithm) ? hashAlgorithmFromJSON(object.algorithm) : 0,
+            digest: isSet(object.digest) ? Buffer.from(bytesFromBase64(object.digest)) : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.algorithm !== 0) {
+            obj.algorithm = hashAlgorithmToJSON(message.algorithm);
+        }
+        if (message.digest.length !== 0) {
+            obj.digest = base64FromBytes(message.digest);
+        }
+        return obj;
+    },
+};
+exports.MessageSignature = {
+    fromJSON(object) {
+        return {
+            messageDigest: isSet(object.messageDigest) ? exports.HashOutput.fromJSON(object.messageDigest) : undefined,
+            signature: isSet(object.signature) ? Buffer.from(bytesFromBase64(object.signature)) : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.messageDigest !== undefined) {
+            obj.messageDigest = exports.HashOutput.toJSON(message.messageDigest);
+        }
+        if (message.signature.length !== 0) {
+            obj.signature = base64FromBytes(message.signature);
+        }
+        return obj;
+    },
+};
+exports.LogId = {
+    fromJSON(object) {
+        return { keyId: isSet(object.keyId) ? Buffer.from(bytesFromBase64(object.keyId)) : Buffer.alloc(0) };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.keyId.length !== 0) {
+            obj.keyId = base64FromBytes(message.keyId);
+        }
+        return obj;
+    },
+};
+exports.RFC3161SignedTimestamp = {
+    fromJSON(object) {
+        return {
+            signedTimestamp: isSet(object.signedTimestamp)
+                ? Buffer.from(bytesFromBase64(object.signedTimestamp))
+                : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.signedTimestamp.length !== 0) {
+            obj.signedTimestamp = base64FromBytes(message.signedTimestamp);
+        }
+        return obj;
+    },
+};
+exports.PublicKey = {
+    fromJSON(object) {
+        return {
+            rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : undefined,
+            keyDetails: isSet(object.keyDetails) ? publicKeyDetailsFromJSON(object.keyDetails) : 0,
+            validFor: isSet(object.validFor) ? exports.TimeRange.fromJSON(object.validFor) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.rawBytes !== undefined) {
+            obj.rawBytes = base64FromBytes(message.rawBytes);
+        }
+        if (message.keyDetails !== 0) {
+            obj.keyDetails = publicKeyDetailsToJSON(message.keyDetails);
+        }
+        if (message.validFor !== undefined) {
+            obj.validFor = exports.TimeRange.toJSON(message.validFor);
+        }
+        return obj;
+    },
+};
+exports.PublicKeyIdentifier = {
+    fromJSON(object) {
+        return { hint: isSet(object.hint) ? globalThis.String(object.hint) : "" };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.hint !== "") {
+            obj.hint = message.hint;
+        }
+        return obj;
+    },
+};
+exports.ObjectIdentifier = {
+    fromJSON(object) {
+        return { id: globalThis.Array.isArray(object?.id) ? object.id.map((e) => globalThis.Number(e)) : [] };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.id?.length) {
+            obj.id = message.id.map((e) => Math.round(e));
+        }
+        return obj;
+    },
+};
+exports.ObjectIdentifierValuePair = {
+    fromJSON(object) {
+        return {
+            oid: isSet(object.oid) ? exports.ObjectIdentifier.fromJSON(object.oid) : undefined,
+            value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.oid !== undefined) {
+            obj.oid = exports.ObjectIdentifier.toJSON(message.oid);
+        }
+        if (message.value.length !== 0) {
+            obj.value = base64FromBytes(message.value);
+        }
+        return obj;
+    },
+};
+exports.DistinguishedName = {
+    fromJSON(object) {
+        return {
+            organization: isSet(object.organization) ? globalThis.String(object.organization) : "",
+            commonName: isSet(object.commonName) ? globalThis.String(object.commonName) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.organization !== "") {
+            obj.organization = message.organization;
+        }
+        if (message.commonName !== "") {
+            obj.commonName = message.commonName;
+        }
+        return obj;
+    },
+};
+exports.X509Certificate = {
+    fromJSON(object) {
+        return { rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : Buffer.alloc(0) };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.rawBytes.length !== 0) {
+            obj.rawBytes = base64FromBytes(message.rawBytes);
+        }
+        return obj;
+    },
+};
+exports.SubjectAlternativeName = {
+    fromJSON(object) {
+        return {
+            type: isSet(object.type) ? subjectAlternativeNameTypeFromJSON(object.type) : 0,
+            identity: isSet(object.regexp)
+                ? { $case: "regexp", regexp: globalThis.String(object.regexp) }
+                : isSet(object.value)
+                    ? { $case: "value", value: globalThis.String(object.value) }
+                    : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.type !== 0) {
+            obj.type = subjectAlternativeNameTypeToJSON(message.type);
+        }
+        if (message.identity?.$case === "regexp") {
+            obj.regexp = message.identity.regexp;
+        }
+        else if (message.identity?.$case === "value") {
+            obj.value = message.identity.value;
+        }
+        return obj;
+    },
+};
+exports.X509CertificateChain = {
+    fromJSON(object) {
+        return {
+            certificates: globalThis.Array.isArray(object?.certificates)
+                ? object.certificates.map((e) => exports.X509Certificate.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.certificates?.length) {
+            obj.certificates = message.certificates.map((e) => exports.X509Certificate.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.TimeRange = {
+    fromJSON(object) {
+        return {
+            start: isSet(object.start) ? fromJsonTimestamp(object.start) : undefined,
+            end: isSet(object.end) ? fromJsonTimestamp(object.end) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.start !== undefined) {
+            obj.start = message.start.toISOString();
+        }
+        if (message.end !== undefined) {
+            obj.end = message.end.toISOString();
+        }
+        return obj;
+    },
+};
+function bytesFromBase64(b64) {
+    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+}
+function base64FromBytes(arr) {
+    return globalThis.Buffer.from(arr).toString("base64");
+}
+function fromTimestamp(t) {
+    let millis = (globalThis.Number(t.seconds) || 0) * 1_000;
+    millis += (t.nanos || 0) / 1_000_000;
+    return new globalThis.Date(millis);
+}
+function fromJsonTimestamp(o) {
+    if (o instanceof globalThis.Date) {
+        return o;
+    }
+    else if (typeof o === "string") {
+        return new globalThis.Date(o);
+    }
+    else {
+        return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
+    }
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
new file mode 100644
index 0000000000000..fd8ea8384664d
--- /dev/null
+++ b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
@@ -0,0 +1,137 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: sigstore_rekor.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TransparencyLogEntry = exports.InclusionPromise = exports.InclusionProof = exports.Checkpoint = exports.KindVersion = void 0;
+/* eslint-disable */
+const sigstore_common_1 = require("./sigstore_common");
+exports.KindVersion = {
+    fromJSON(object) {
+        return {
+            kind: isSet(object.kind) ? globalThis.String(object.kind) : "",
+            version: isSet(object.version) ? globalThis.String(object.version) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.kind !== "") {
+            obj.kind = message.kind;
+        }
+        if (message.version !== "") {
+            obj.version = message.version;
+        }
+        return obj;
+    },
+};
+exports.Checkpoint = {
+    fromJSON(object) {
+        return { envelope: isSet(object.envelope) ? globalThis.String(object.envelope) : "" };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.envelope !== "") {
+            obj.envelope = message.envelope;
+        }
+        return obj;
+    },
+};
+exports.InclusionProof = {
+    fromJSON(object) {
+        return {
+            logIndex: isSet(object.logIndex) ? globalThis.String(object.logIndex) : "0",
+            rootHash: isSet(object.rootHash) ? Buffer.from(bytesFromBase64(object.rootHash)) : Buffer.alloc(0),
+            treeSize: isSet(object.treeSize) ? globalThis.String(object.treeSize) : "0",
+            hashes: globalThis.Array.isArray(object?.hashes)
+                ? object.hashes.map((e) => Buffer.from(bytesFromBase64(e)))
+                : [],
+            checkpoint: isSet(object.checkpoint) ? exports.Checkpoint.fromJSON(object.checkpoint) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.logIndex !== "0") {
+            obj.logIndex = message.logIndex;
+        }
+        if (message.rootHash.length !== 0) {
+            obj.rootHash = base64FromBytes(message.rootHash);
+        }
+        if (message.treeSize !== "0") {
+            obj.treeSize = message.treeSize;
+        }
+        if (message.hashes?.length) {
+            obj.hashes = message.hashes.map((e) => base64FromBytes(e));
+        }
+        if (message.checkpoint !== undefined) {
+            obj.checkpoint = exports.Checkpoint.toJSON(message.checkpoint);
+        }
+        return obj;
+    },
+};
+exports.InclusionPromise = {
+    fromJSON(object) {
+        return {
+            signedEntryTimestamp: isSet(object.signedEntryTimestamp)
+                ? Buffer.from(bytesFromBase64(object.signedEntryTimestamp))
+                : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.signedEntryTimestamp.length !== 0) {
+            obj.signedEntryTimestamp = base64FromBytes(message.signedEntryTimestamp);
+        }
+        return obj;
+    },
+};
+exports.TransparencyLogEntry = {
+    fromJSON(object) {
+        return {
+            logIndex: isSet(object.logIndex) ? globalThis.String(object.logIndex) : "0",
+            logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
+            kindVersion: isSet(object.kindVersion) ? exports.KindVersion.fromJSON(object.kindVersion) : undefined,
+            integratedTime: isSet(object.integratedTime) ? globalThis.String(object.integratedTime) : "0",
+            inclusionPromise: isSet(object.inclusionPromise) ? exports.InclusionPromise.fromJSON(object.inclusionPromise) : undefined,
+            inclusionProof: isSet(object.inclusionProof) ? exports.InclusionProof.fromJSON(object.inclusionProof) : undefined,
+            canonicalizedBody: isSet(object.canonicalizedBody)
+                ? Buffer.from(bytesFromBase64(object.canonicalizedBody))
+                : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.logIndex !== "0") {
+            obj.logIndex = message.logIndex;
+        }
+        if (message.logId !== undefined) {
+            obj.logId = sigstore_common_1.LogId.toJSON(message.logId);
+        }
+        if (message.kindVersion !== undefined) {
+            obj.kindVersion = exports.KindVersion.toJSON(message.kindVersion);
+        }
+        if (message.integratedTime !== "0") {
+            obj.integratedTime = message.integratedTime;
+        }
+        if (message.inclusionPromise !== undefined) {
+            obj.inclusionPromise = exports.InclusionPromise.toJSON(message.inclusionPromise);
+        }
+        if (message.inclusionProof !== undefined) {
+            obj.inclusionProof = exports.InclusionProof.toJSON(message.inclusionProof);
+        }
+        if (message.canonicalizedBody.length !== 0) {
+            obj.canonicalizedBody = base64FromBytes(message.canonicalizedBody);
+        }
+        return obj;
+    },
+};
+function bytesFromBase64(b64) {
+    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+}
+function base64FromBytes(arr) {
+    return globalThis.Buffer.from(arr).toString("base64");
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
new file mode 100644
index 0000000000000..1b5492fb1a77e
--- /dev/null
+++ b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
@@ -0,0 +1,284 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: sigstore_trustroot.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.ClientTrustConfig = exports.ServiceConfiguration = exports.Service = exports.SigningConfig = exports.TrustedRoot = exports.CertificateAuthority = exports.TransparencyLogInstance = exports.ServiceSelector = void 0;
+exports.serviceSelectorFromJSON = serviceSelectorFromJSON;
+exports.serviceSelectorToJSON = serviceSelectorToJSON;
+/* eslint-disable */
+const sigstore_common_1 = require("./sigstore_common");
+/**
+ * ServiceSelector specifies how a client SHOULD select a set of
+ * Services to connect to. A client SHOULD throw an error if
+ * the value is SERVICE_SELECTOR_UNDEFINED.
+ */
+var ServiceSelector;
+(function (ServiceSelector) {
+    ServiceSelector[ServiceSelector["SERVICE_SELECTOR_UNDEFINED"] = 0] = "SERVICE_SELECTOR_UNDEFINED";
+    /**
+     * ALL - Clients SHOULD select all Services based on supported API version
+     * and validity window.
+     */
+    ServiceSelector[ServiceSelector["ALL"] = 1] = "ALL";
+    /**
+     * ANY - Clients SHOULD select one Service based on supported API version
+     * and validity window. It is up to the client implementation to
+     * decide how to select the Service, e.g. random or round-robin.
+     */
+    ServiceSelector[ServiceSelector["ANY"] = 2] = "ANY";
+    /**
+     * EXACT - Clients SHOULD select a specific number of Services based on
+     * supported API version and validity window, using the provided
+     * `count`. It is up to the client implementation to decide how to
+     * select the Service, e.g. random or round-robin.
+     */
+    ServiceSelector[ServiceSelector["EXACT"] = 3] = "EXACT";
+})(ServiceSelector || (exports.ServiceSelector = ServiceSelector = {}));
+function serviceSelectorFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "SERVICE_SELECTOR_UNDEFINED":
+            return ServiceSelector.SERVICE_SELECTOR_UNDEFINED;
+        case 1:
+        case "ALL":
+            return ServiceSelector.ALL;
+        case 2:
+        case "ANY":
+            return ServiceSelector.ANY;
+        case 3:
+        case "EXACT":
+            return ServiceSelector.EXACT;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum ServiceSelector");
+    }
+}
+function serviceSelectorToJSON(object) {
+    switch (object) {
+        case ServiceSelector.SERVICE_SELECTOR_UNDEFINED:
+            return "SERVICE_SELECTOR_UNDEFINED";
+        case ServiceSelector.ALL:
+            return "ALL";
+        case ServiceSelector.ANY:
+            return "ANY";
+        case ServiceSelector.EXACT:
+            return "EXACT";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum ServiceSelector");
+    }
+}
+exports.TransparencyLogInstance = {
+    fromJSON(object) {
+        return {
+            baseUrl: isSet(object.baseUrl) ? globalThis.String(object.baseUrl) : "",
+            hashAlgorithm: isSet(object.hashAlgorithm) ? (0, sigstore_common_1.hashAlgorithmFromJSON)(object.hashAlgorithm) : 0,
+            publicKey: isSet(object.publicKey) ? sigstore_common_1.PublicKey.fromJSON(object.publicKey) : undefined,
+            logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
+            checkpointKeyId: isSet(object.checkpointKeyId) ? sigstore_common_1.LogId.fromJSON(object.checkpointKeyId) : undefined,
+            operator: isSet(object.operator) ? globalThis.String(object.operator) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.baseUrl !== "") {
+            obj.baseUrl = message.baseUrl;
+        }
+        if (message.hashAlgorithm !== 0) {
+            obj.hashAlgorithm = (0, sigstore_common_1.hashAlgorithmToJSON)(message.hashAlgorithm);
+        }
+        if (message.publicKey !== undefined) {
+            obj.publicKey = sigstore_common_1.PublicKey.toJSON(message.publicKey);
+        }
+        if (message.logId !== undefined) {
+            obj.logId = sigstore_common_1.LogId.toJSON(message.logId);
+        }
+        if (message.checkpointKeyId !== undefined) {
+            obj.checkpointKeyId = sigstore_common_1.LogId.toJSON(message.checkpointKeyId);
+        }
+        if (message.operator !== "") {
+            obj.operator = message.operator;
+        }
+        return obj;
+    },
+};
+exports.CertificateAuthority = {
+    fromJSON(object) {
+        return {
+            subject: isSet(object.subject) ? sigstore_common_1.DistinguishedName.fromJSON(object.subject) : undefined,
+            uri: isSet(object.uri) ? globalThis.String(object.uri) : "",
+            certChain: isSet(object.certChain) ? sigstore_common_1.X509CertificateChain.fromJSON(object.certChain) : undefined,
+            validFor: isSet(object.validFor) ? sigstore_common_1.TimeRange.fromJSON(object.validFor) : undefined,
+            operator: isSet(object.operator) ? globalThis.String(object.operator) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.subject !== undefined) {
+            obj.subject = sigstore_common_1.DistinguishedName.toJSON(message.subject);
+        }
+        if (message.uri !== "") {
+            obj.uri = message.uri;
+        }
+        if (message.certChain !== undefined) {
+            obj.certChain = sigstore_common_1.X509CertificateChain.toJSON(message.certChain);
+        }
+        if (message.validFor !== undefined) {
+            obj.validFor = sigstore_common_1.TimeRange.toJSON(message.validFor);
+        }
+        if (message.operator !== "") {
+            obj.operator = message.operator;
+        }
+        return obj;
+    },
+};
+exports.TrustedRoot = {
+    fromJSON(object) {
+        return {
+            mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "",
+            tlogs: globalThis.Array.isArray(object?.tlogs)
+                ? object.tlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e))
+                : [],
+            certificateAuthorities: globalThis.Array.isArray(object?.certificateAuthorities)
+                ? object.certificateAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
+                : [],
+            ctlogs: globalThis.Array.isArray(object?.ctlogs)
+                ? object.ctlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e))
+                : [],
+            timestampAuthorities: globalThis.Array.isArray(object?.timestampAuthorities)
+                ? object.timestampAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.mediaType !== "") {
+            obj.mediaType = message.mediaType;
+        }
+        if (message.tlogs?.length) {
+            obj.tlogs = message.tlogs.map((e) => exports.TransparencyLogInstance.toJSON(e));
+        }
+        if (message.certificateAuthorities?.length) {
+            obj.certificateAuthorities = message.certificateAuthorities.map((e) => exports.CertificateAuthority.toJSON(e));
+        }
+        if (message.ctlogs?.length) {
+            obj.ctlogs = message.ctlogs.map((e) => exports.TransparencyLogInstance.toJSON(e));
+        }
+        if (message.timestampAuthorities?.length) {
+            obj.timestampAuthorities = message.timestampAuthorities.map((e) => exports.CertificateAuthority.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.SigningConfig = {
+    fromJSON(object) {
+        return {
+            mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "",
+            caUrls: globalThis.Array.isArray(object?.caUrls) ? object.caUrls.map((e) => exports.Service.fromJSON(e)) : [],
+            oidcUrls: globalThis.Array.isArray(object?.oidcUrls) ? object.oidcUrls.map((e) => exports.Service.fromJSON(e)) : [],
+            rekorTlogUrls: globalThis.Array.isArray(object?.rekorTlogUrls)
+                ? object.rekorTlogUrls.map((e) => exports.Service.fromJSON(e))
+                : [],
+            rekorTlogConfig: isSet(object.rekorTlogConfig)
+                ? exports.ServiceConfiguration.fromJSON(object.rekorTlogConfig)
+                : undefined,
+            tsaUrls: globalThis.Array.isArray(object?.tsaUrls) ? object.tsaUrls.map((e) => exports.Service.fromJSON(e)) : [],
+            tsaConfig: isSet(object.tsaConfig) ? exports.ServiceConfiguration.fromJSON(object.tsaConfig) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.mediaType !== "") {
+            obj.mediaType = message.mediaType;
+        }
+        if (message.caUrls?.length) {
+            obj.caUrls = message.caUrls.map((e) => exports.Service.toJSON(e));
+        }
+        if (message.oidcUrls?.length) {
+            obj.oidcUrls = message.oidcUrls.map((e) => exports.Service.toJSON(e));
+        }
+        if (message.rekorTlogUrls?.length) {
+            obj.rekorTlogUrls = message.rekorTlogUrls.map((e) => exports.Service.toJSON(e));
+        }
+        if (message.rekorTlogConfig !== undefined) {
+            obj.rekorTlogConfig = exports.ServiceConfiguration.toJSON(message.rekorTlogConfig);
+        }
+        if (message.tsaUrls?.length) {
+            obj.tsaUrls = message.tsaUrls.map((e) => exports.Service.toJSON(e));
+        }
+        if (message.tsaConfig !== undefined) {
+            obj.tsaConfig = exports.ServiceConfiguration.toJSON(message.tsaConfig);
+        }
+        return obj;
+    },
+};
+exports.Service = {
+    fromJSON(object) {
+        return {
+            url: isSet(object.url) ? globalThis.String(object.url) : "",
+            majorApiVersion: isSet(object.majorApiVersion) ? globalThis.Number(object.majorApiVersion) : 0,
+            validFor: isSet(object.validFor) ? sigstore_common_1.TimeRange.fromJSON(object.validFor) : undefined,
+            operator: isSet(object.operator) ? globalThis.String(object.operator) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.url !== "") {
+            obj.url = message.url;
+        }
+        if (message.majorApiVersion !== 0) {
+            obj.majorApiVersion = Math.round(message.majorApiVersion);
+        }
+        if (message.validFor !== undefined) {
+            obj.validFor = sigstore_common_1.TimeRange.toJSON(message.validFor);
+        }
+        if (message.operator !== "") {
+            obj.operator = message.operator;
+        }
+        return obj;
+    },
+};
+exports.ServiceConfiguration = {
+    fromJSON(object) {
+        return {
+            selector: isSet(object.selector) ? serviceSelectorFromJSON(object.selector) : 0,
+            count: isSet(object.count) ? globalThis.Number(object.count) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.selector !== 0) {
+            obj.selector = serviceSelectorToJSON(message.selector);
+        }
+        if (message.count !== 0) {
+            obj.count = Math.round(message.count);
+        }
+        return obj;
+    },
+};
+exports.ClientTrustConfig = {
+    fromJSON(object) {
+        return {
+            mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "",
+            trustedRoot: isSet(object.trustedRoot) ? exports.TrustedRoot.fromJSON(object.trustedRoot) : undefined,
+            signingConfig: isSet(object.signingConfig) ? exports.SigningConfig.fromJSON(object.signingConfig) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.mediaType !== "") {
+            obj.mediaType = message.mediaType;
+        }
+        if (message.trustedRoot !== undefined) {
+            obj.trustedRoot = exports.TrustedRoot.toJSON(message.trustedRoot);
+        }
+        if (message.signingConfig !== undefined) {
+            obj.signingConfig = exports.SigningConfig.toJSON(message.signingConfig);
+        }
+        return obj;
+    },
+};
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
new file mode 100644
index 0000000000000..876fe9cc1db1d
--- /dev/null
+++ b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
@@ -0,0 +1,281 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: sigstore_verification.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Input = exports.Artifact = exports.ArtifactVerificationOptions_ObserverTimestampOptions = exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions = exports.ArtifactVerificationOptions_TimestampAuthorityOptions = exports.ArtifactVerificationOptions_CtlogOptions = exports.ArtifactVerificationOptions_TlogOptions = exports.ArtifactVerificationOptions = exports.PublicKeyIdentities = exports.CertificateIdentities = exports.CertificateIdentity = void 0;
+/* eslint-disable */
+const sigstore_bundle_1 = require("./sigstore_bundle");
+const sigstore_common_1 = require("./sigstore_common");
+const sigstore_trustroot_1 = require("./sigstore_trustroot");
+exports.CertificateIdentity = {
+    fromJSON(object) {
+        return {
+            issuer: isSet(object.issuer) ? globalThis.String(object.issuer) : "",
+            san: isSet(object.san) ? sigstore_common_1.SubjectAlternativeName.fromJSON(object.san) : undefined,
+            oids: globalThis.Array.isArray(object?.oids)
+                ? object.oids.map((e) => sigstore_common_1.ObjectIdentifierValuePair.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.issuer !== "") {
+            obj.issuer = message.issuer;
+        }
+        if (message.san !== undefined) {
+            obj.san = sigstore_common_1.SubjectAlternativeName.toJSON(message.san);
+        }
+        if (message.oids?.length) {
+            obj.oids = message.oids.map((e) => sigstore_common_1.ObjectIdentifierValuePair.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.CertificateIdentities = {
+    fromJSON(object) {
+        return {
+            identities: globalThis.Array.isArray(object?.identities)
+                ? object.identities.map((e) => exports.CertificateIdentity.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.identities?.length) {
+            obj.identities = message.identities.map((e) => exports.CertificateIdentity.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.PublicKeyIdentities = {
+    fromJSON(object) {
+        return {
+            publicKeys: globalThis.Array.isArray(object?.publicKeys)
+                ? object.publicKeys.map((e) => sigstore_common_1.PublicKey.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.publicKeys?.length) {
+            obj.publicKeys = message.publicKeys.map((e) => sigstore_common_1.PublicKey.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.ArtifactVerificationOptions = {
+    fromJSON(object) {
+        return {
+            signers: isSet(object.certificateIdentities)
+                ? {
+                    $case: "certificateIdentities",
+                    certificateIdentities: exports.CertificateIdentities.fromJSON(object.certificateIdentities),
+                }
+                : isSet(object.publicKeys)
+                    ? { $case: "publicKeys", publicKeys: exports.PublicKeyIdentities.fromJSON(object.publicKeys) }
+                    : undefined,
+            tlogOptions: isSet(object.tlogOptions)
+                ? exports.ArtifactVerificationOptions_TlogOptions.fromJSON(object.tlogOptions)
+                : undefined,
+            ctlogOptions: isSet(object.ctlogOptions)
+                ? exports.ArtifactVerificationOptions_CtlogOptions.fromJSON(object.ctlogOptions)
+                : undefined,
+            tsaOptions: isSet(object.tsaOptions)
+                ? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.fromJSON(object.tsaOptions)
+                : undefined,
+            integratedTsOptions: isSet(object.integratedTsOptions)
+                ? exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions.fromJSON(object.integratedTsOptions)
+                : undefined,
+            observerOptions: isSet(object.observerOptions)
+                ? exports.ArtifactVerificationOptions_ObserverTimestampOptions.fromJSON(object.observerOptions)
+                : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.signers?.$case === "certificateIdentities") {
+            obj.certificateIdentities = exports.CertificateIdentities.toJSON(message.signers.certificateIdentities);
+        }
+        else if (message.signers?.$case === "publicKeys") {
+            obj.publicKeys = exports.PublicKeyIdentities.toJSON(message.signers.publicKeys);
+        }
+        if (message.tlogOptions !== undefined) {
+            obj.tlogOptions = exports.ArtifactVerificationOptions_TlogOptions.toJSON(message.tlogOptions);
+        }
+        if (message.ctlogOptions !== undefined) {
+            obj.ctlogOptions = exports.ArtifactVerificationOptions_CtlogOptions.toJSON(message.ctlogOptions);
+        }
+        if (message.tsaOptions !== undefined) {
+            obj.tsaOptions = exports.ArtifactVerificationOptions_TimestampAuthorityOptions.toJSON(message.tsaOptions);
+        }
+        if (message.integratedTsOptions !== undefined) {
+            obj.integratedTsOptions = exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions.toJSON(message.integratedTsOptions);
+        }
+        if (message.observerOptions !== undefined) {
+            obj.observerOptions = exports.ArtifactVerificationOptions_ObserverTimestampOptions.toJSON(message.observerOptions);
+        }
+        return obj;
+    },
+};
+exports.ArtifactVerificationOptions_TlogOptions = {
+    fromJSON(object) {
+        return {
+            threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
+            performOnlineVerification: isSet(object.performOnlineVerification)
+                ? globalThis.Boolean(object.performOnlineVerification)
+                : false,
+            disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.threshold !== 0) {
+            obj.threshold = Math.round(message.threshold);
+        }
+        if (message.performOnlineVerification !== false) {
+            obj.performOnlineVerification = message.performOnlineVerification;
+        }
+        if (message.disable !== false) {
+            obj.disable = message.disable;
+        }
+        return obj;
+    },
+};
+exports.ArtifactVerificationOptions_CtlogOptions = {
+    fromJSON(object) {
+        return {
+            threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
+            disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.threshold !== 0) {
+            obj.threshold = Math.round(message.threshold);
+        }
+        if (message.disable !== false) {
+            obj.disable = message.disable;
+        }
+        return obj;
+    },
+};
+exports.ArtifactVerificationOptions_TimestampAuthorityOptions = {
+    fromJSON(object) {
+        return {
+            threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
+            disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.threshold !== 0) {
+            obj.threshold = Math.round(message.threshold);
+        }
+        if (message.disable !== false) {
+            obj.disable = message.disable;
+        }
+        return obj;
+    },
+};
+exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions = {
+    fromJSON(object) {
+        return {
+            threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
+            disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.threshold !== 0) {
+            obj.threshold = Math.round(message.threshold);
+        }
+        if (message.disable !== false) {
+            obj.disable = message.disable;
+        }
+        return obj;
+    },
+};
+exports.ArtifactVerificationOptions_ObserverTimestampOptions = {
+    fromJSON(object) {
+        return {
+            threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
+            disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.threshold !== 0) {
+            obj.threshold = Math.round(message.threshold);
+        }
+        if (message.disable !== false) {
+            obj.disable = message.disable;
+        }
+        return obj;
+    },
+};
+exports.Artifact = {
+    fromJSON(object) {
+        return {
+            data: isSet(object.artifactUri)
+                ? { $case: "artifactUri", artifactUri: globalThis.String(object.artifactUri) }
+                : isSet(object.artifact)
+                    ? { $case: "artifact", artifact: Buffer.from(bytesFromBase64(object.artifact)) }
+                    : isSet(object.artifactDigest)
+                        ? { $case: "artifactDigest", artifactDigest: sigstore_common_1.HashOutput.fromJSON(object.artifactDigest) }
+                        : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.data?.$case === "artifactUri") {
+            obj.artifactUri = message.data.artifactUri;
+        }
+        else if (message.data?.$case === "artifact") {
+            obj.artifact = base64FromBytes(message.data.artifact);
+        }
+        else if (message.data?.$case === "artifactDigest") {
+            obj.artifactDigest = sigstore_common_1.HashOutput.toJSON(message.data.artifactDigest);
+        }
+        return obj;
+    },
+};
+exports.Input = {
+    fromJSON(object) {
+        return {
+            artifactTrustRoot: isSet(object.artifactTrustRoot) ? sigstore_trustroot_1.TrustedRoot.fromJSON(object.artifactTrustRoot) : undefined,
+            artifactVerificationOptions: isSet(object.artifactVerificationOptions)
+                ? exports.ArtifactVerificationOptions.fromJSON(object.artifactVerificationOptions)
+                : undefined,
+            bundle: isSet(object.bundle) ? sigstore_bundle_1.Bundle.fromJSON(object.bundle) : undefined,
+            artifact: isSet(object.artifact) ? exports.Artifact.fromJSON(object.artifact) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.artifactTrustRoot !== undefined) {
+            obj.artifactTrustRoot = sigstore_trustroot_1.TrustedRoot.toJSON(message.artifactTrustRoot);
+        }
+        if (message.artifactVerificationOptions !== undefined) {
+            obj.artifactVerificationOptions = exports.ArtifactVerificationOptions.toJSON(message.artifactVerificationOptions);
+        }
+        if (message.bundle !== undefined) {
+            obj.bundle = sigstore_bundle_1.Bundle.toJSON(message.bundle);
+        }
+        if (message.artifact !== undefined) {
+            obj.artifact = exports.Artifact.toJSON(message.artifact);
+        }
+        return obj;
+    },
+};
+function bytesFromBase64(b64) {
+    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+}
+function base64FromBytes(arr) {
+    return globalThis.Buffer.from(arr).toString("base64");
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/index.js b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/index.js
new file mode 100644
index 0000000000000..eafb768c48fca
--- /dev/null
+++ b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/index.js
@@ -0,0 +1,37 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __exportStar = (this && this.__exportStar) || function(m, exports) {
+    for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+__exportStar(require("./__generated__/envelope"), exports);
+__exportStar(require("./__generated__/sigstore_bundle"), exports);
+__exportStar(require("./__generated__/sigstore_common"), exports);
+__exportStar(require("./__generated__/sigstore_rekor"), exports);
+__exportStar(require("./__generated__/sigstore_trustroot"), exports);
+__exportStar(require("./__generated__/sigstore_verification"), exports);
diff --git a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/rekor/v2/index.js b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/rekor/v2/index.js
new file mode 100644
index 0000000000000..10745efc39a1f
--- /dev/null
+++ b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/rekor/v2/index.js
@@ -0,0 +1,35 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __exportStar = (this && this.__exportStar) || function(m, exports) {
+    for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+/*
+Copyright 2025 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+__exportStar(require("../../__generated__/rekor/v2/dsse"), exports);
+__exportStar(require("../../__generated__/rekor/v2/entry"), exports);
+__exportStar(require("../../__generated__/rekor/v2/hashedrekord"), exports);
+__exportStar(require("../../__generated__/rekor/v2/verifier"), exports);
diff --git a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/package.json b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/package.json
new file mode 100644
index 0000000000000..f87b2540fbf98
--- /dev/null
+++ b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/package.json
@@ -0,0 +1,35 @@
+{
+  "name": "@sigstore/protobuf-specs",
+  "version": "0.5.0",
+  "description": "code-signing for npm packages",
+  "main": "dist/index.js",
+  "types": "dist/index.d.ts",
+  "exports": {
+    ".": "./dist/index.js",
+    "./rekor/v2": "./dist/rekor/v2/index.js"
+  },
+  "scripts": {
+    "build": "tsc"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/sigstore/protobuf-specs.git"
+  },
+  "files": [
+    "dist"
+  ],
+  "author": "bdehamer@github.com",
+  "license": "Apache-2.0",
+  "bugs": {
+    "url": "https://github.com/sigstore/protobuf-specs/issues"
+  },
+  "homepage": "https://github.com/sigstore/protobuf-specs#readme",
+  "devDependencies": {
+    "@tsconfig/node18": "^18.2.4",
+    "@types/node": "^18.14.0",
+    "typescript": "^5.7.2"
+  },
+  "engines": {
+    "node": "^18.17.0 || >=20.5.0"
+  }
+}
diff --git a/node_modules/@sigstore/verify/package.json b/node_modules/@sigstore/verify/package.json
index 62b84db7f91f4..eaf12376c9025 100644
--- a/node_modules/@sigstore/verify/package.json
+++ b/node_modules/@sigstore/verify/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@sigstore/verify",
-  "version": "2.1.1",
+  "version": "3.0.0",
   "description": "Verification of Sigstore signatures",
   "main": "dist/index.js",
   "types": "dist/index.d.ts",
@@ -26,11 +26,11 @@
     "provenance": true
   },
   "dependencies": {
-    "@sigstore/protobuf-specs": "^0.4.1",
-    "@sigstore/bundle": "^3.1.0",
-    "@sigstore/core": "^2.0.0"
+    "@sigstore/protobuf-specs": "^0.5.0",
+    "@sigstore/bundle": "^4.0.0",
+    "@sigstore/core": "^3.0.0"
   },
   "engines": {
-    "node": "^18.17.0 || >=20.5.0"
+    "node": "^20.17.0 || >=22.9.0"
   }
 }
diff --git a/node_modules/pacote/node_modules/@sigstore/bundle/dist/build.js b/node_modules/pacote/node_modules/@sigstore/bundle/dist/build.js
deleted file mode 100644
index ade736407554c..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/bundle/dist/build.js
+++ /dev/null
@@ -1,100 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.toMessageSignatureBundle = toMessageSignatureBundle;
-exports.toDSSEBundle = toDSSEBundle;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const protobuf_specs_1 = require("@sigstore/protobuf-specs");
-const bundle_1 = require("./bundle");
-// Message signature bundle - $case: 'messageSignature'
-function toMessageSignatureBundle(options) {
-    return {
-        mediaType: options.certificateChain
-            ? bundle_1.BUNDLE_V02_MEDIA_TYPE
-            : bundle_1.BUNDLE_V03_MEDIA_TYPE,
-        content: {
-            $case: 'messageSignature',
-            messageSignature: {
-                messageDigest: {
-                    algorithm: protobuf_specs_1.HashAlgorithm.SHA2_256,
-                    digest: options.digest,
-                },
-                signature: options.signature,
-            },
-        },
-        verificationMaterial: toVerificationMaterial(options),
-    };
-}
-// DSSE envelope bundle - $case: 'dsseEnvelope'
-function toDSSEBundle(options) {
-    return {
-        mediaType: options.certificateChain
-            ? bundle_1.BUNDLE_V02_MEDIA_TYPE
-            : bundle_1.BUNDLE_V03_MEDIA_TYPE,
-        content: {
-            $case: 'dsseEnvelope',
-            dsseEnvelope: toEnvelope(options),
-        },
-        verificationMaterial: toVerificationMaterial(options),
-    };
-}
-function toEnvelope(options) {
-    return {
-        payloadType: options.artifactType,
-        payload: options.artifact,
-        signatures: [toSignature(options)],
-    };
-}
-function toSignature(options) {
-    return {
-        keyid: options.keyHint || '',
-        sig: options.signature,
-    };
-}
-// Verification material
-function toVerificationMaterial(options) {
-    return {
-        content: toKeyContent(options),
-        tlogEntries: [],
-        timestampVerificationData: { rfc3161Timestamps: [] },
-    };
-}
-function toKeyContent(options) {
-    if (options.certificate) {
-        if (options.certificateChain) {
-            return {
-                $case: 'x509CertificateChain',
-                x509CertificateChain: {
-                    certificates: [{ rawBytes: options.certificate }],
-                },
-            };
-        }
-        else {
-            return {
-                $case: 'certificate',
-                certificate: { rawBytes: options.certificate },
-            };
-        }
-    }
-    else {
-        return {
-            $case: 'publicKey',
-            publicKey: {
-                hint: options.keyHint || '',
-            },
-        };
-    }
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/bundle/dist/bundle.js b/node_modules/pacote/node_modules/@sigstore/bundle/dist/bundle.js
deleted file mode 100644
index eb67a0ddc17bb..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/bundle/dist/bundle.js
+++ /dev/null
@@ -1,24 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.BUNDLE_V03_MEDIA_TYPE = exports.BUNDLE_V03_LEGACY_MEDIA_TYPE = exports.BUNDLE_V02_MEDIA_TYPE = exports.BUNDLE_V01_MEDIA_TYPE = void 0;
-exports.isBundleWithCertificateChain = isBundleWithCertificateChain;
-exports.isBundleWithPublicKey = isBundleWithPublicKey;
-exports.isBundleWithMessageSignature = isBundleWithMessageSignature;
-exports.isBundleWithDsseEnvelope = isBundleWithDsseEnvelope;
-exports.BUNDLE_V01_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.1';
-exports.BUNDLE_V02_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.2';
-exports.BUNDLE_V03_LEGACY_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle+json;version=0.3';
-exports.BUNDLE_V03_MEDIA_TYPE = 'application/vnd.dev.sigstore.bundle.v0.3+json';
-// Type guards for bundle variants.
-function isBundleWithCertificateChain(b) {
-    return b.verificationMaterial.content.$case === 'x509CertificateChain';
-}
-function isBundleWithPublicKey(b) {
-    return b.verificationMaterial.content.$case === 'publicKey';
-}
-function isBundleWithMessageSignature(b) {
-    return b.content.$case === 'messageSignature';
-}
-function isBundleWithDsseEnvelope(b) {
-    return b.content.$case === 'dsseEnvelope';
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/bundle/dist/error.js b/node_modules/pacote/node_modules/@sigstore/bundle/dist/error.js
deleted file mode 100644
index f84295323b812..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/bundle/dist/error.js
+++ /dev/null
@@ -1,25 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.ValidationError = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-class ValidationError extends Error {
-    constructor(message, fields) {
-        super(message);
-        this.fields = fields;
-    }
-}
-exports.ValidationError = ValidationError;
diff --git a/node_modules/pacote/node_modules/@sigstore/bundle/dist/index.js b/node_modules/pacote/node_modules/@sigstore/bundle/dist/index.js
deleted file mode 100644
index 1b012acad4d85..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/bundle/dist/index.js
+++ /dev/null
@@ -1,43 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.isBundleV01 = exports.assertBundleV02 = exports.assertBundleV01 = exports.assertBundleLatest = exports.assertBundle = exports.envelopeToJSON = exports.envelopeFromJSON = exports.bundleToJSON = exports.bundleFromJSON = exports.ValidationError = exports.isBundleWithPublicKey = exports.isBundleWithMessageSignature = exports.isBundleWithDsseEnvelope = exports.isBundleWithCertificateChain = exports.BUNDLE_V03_MEDIA_TYPE = exports.BUNDLE_V03_LEGACY_MEDIA_TYPE = exports.BUNDLE_V02_MEDIA_TYPE = exports.BUNDLE_V01_MEDIA_TYPE = exports.toMessageSignatureBundle = exports.toDSSEBundle = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-var build_1 = require("./build");
-Object.defineProperty(exports, "toDSSEBundle", { enumerable: true, get: function () { return build_1.toDSSEBundle; } });
-Object.defineProperty(exports, "toMessageSignatureBundle", { enumerable: true, get: function () { return build_1.toMessageSignatureBundle; } });
-var bundle_1 = require("./bundle");
-Object.defineProperty(exports, "BUNDLE_V01_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V01_MEDIA_TYPE; } });
-Object.defineProperty(exports, "BUNDLE_V02_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V02_MEDIA_TYPE; } });
-Object.defineProperty(exports, "BUNDLE_V03_LEGACY_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V03_LEGACY_MEDIA_TYPE; } });
-Object.defineProperty(exports, "BUNDLE_V03_MEDIA_TYPE", { enumerable: true, get: function () { return bundle_1.BUNDLE_V03_MEDIA_TYPE; } });
-Object.defineProperty(exports, "isBundleWithCertificateChain", { enumerable: true, get: function () { return bundle_1.isBundleWithCertificateChain; } });
-Object.defineProperty(exports, "isBundleWithDsseEnvelope", { enumerable: true, get: function () { return bundle_1.isBundleWithDsseEnvelope; } });
-Object.defineProperty(exports, "isBundleWithMessageSignature", { enumerable: true, get: function () { return bundle_1.isBundleWithMessageSignature; } });
-Object.defineProperty(exports, "isBundleWithPublicKey", { enumerable: true, get: function () { return bundle_1.isBundleWithPublicKey; } });
-var error_1 = require("./error");
-Object.defineProperty(exports, "ValidationError", { enumerable: true, get: function () { return error_1.ValidationError; } });
-var serialized_1 = require("./serialized");
-Object.defineProperty(exports, "bundleFromJSON", { enumerable: true, get: function () { return serialized_1.bundleFromJSON; } });
-Object.defineProperty(exports, "bundleToJSON", { enumerable: true, get: function () { return serialized_1.bundleToJSON; } });
-Object.defineProperty(exports, "envelopeFromJSON", { enumerable: true, get: function () { return serialized_1.envelopeFromJSON; } });
-Object.defineProperty(exports, "envelopeToJSON", { enumerable: true, get: function () { return serialized_1.envelopeToJSON; } });
-var validate_1 = require("./validate");
-Object.defineProperty(exports, "assertBundle", { enumerable: true, get: function () { return validate_1.assertBundle; } });
-Object.defineProperty(exports, "assertBundleLatest", { enumerable: true, get: function () { return validate_1.assertBundleLatest; } });
-Object.defineProperty(exports, "assertBundleV01", { enumerable: true, get: function () { return validate_1.assertBundleV01; } });
-Object.defineProperty(exports, "assertBundleV02", { enumerable: true, get: function () { return validate_1.assertBundleV02; } });
-Object.defineProperty(exports, "isBundleV01", { enumerable: true, get: function () { return validate_1.isBundleV01; } });
diff --git a/node_modules/pacote/node_modules/@sigstore/bundle/dist/serialized.js b/node_modules/pacote/node_modules/@sigstore/bundle/dist/serialized.js
deleted file mode 100644
index be0d2a2d54d09..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/bundle/dist/serialized.js
+++ /dev/null
@@ -1,49 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.envelopeToJSON = exports.envelopeFromJSON = exports.bundleToJSON = exports.bundleFromJSON = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const protobuf_specs_1 = require("@sigstore/protobuf-specs");
-const bundle_1 = require("./bundle");
-const validate_1 = require("./validate");
-const bundleFromJSON = (obj) => {
-    const bundle = protobuf_specs_1.Bundle.fromJSON(obj);
-    switch (bundle.mediaType) {
-        case bundle_1.BUNDLE_V01_MEDIA_TYPE:
-            (0, validate_1.assertBundleV01)(bundle);
-            break;
-        case bundle_1.BUNDLE_V02_MEDIA_TYPE:
-            (0, validate_1.assertBundleV02)(bundle);
-            break;
-        default:
-            (0, validate_1.assertBundleLatest)(bundle);
-            break;
-    }
-    return bundle;
-};
-exports.bundleFromJSON = bundleFromJSON;
-const bundleToJSON = (bundle) => {
-    return protobuf_specs_1.Bundle.toJSON(bundle);
-};
-exports.bundleToJSON = bundleToJSON;
-const envelopeFromJSON = (obj) => {
-    return protobuf_specs_1.Envelope.fromJSON(obj);
-};
-exports.envelopeFromJSON = envelopeFromJSON;
-const envelopeToJSON = (envelope) => {
-    return protobuf_specs_1.Envelope.toJSON(envelope);
-};
-exports.envelopeToJSON = envelopeToJSON;
diff --git a/node_modules/pacote/node_modules/@sigstore/bundle/dist/utility.js b/node_modules/pacote/node_modules/@sigstore/bundle/dist/utility.js
deleted file mode 100644
index c8ad2e549bdc6..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/bundle/dist/utility.js
+++ /dev/null
@@ -1,2 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/pacote/node_modules/@sigstore/bundle/dist/validate.js b/node_modules/pacote/node_modules/@sigstore/bundle/dist/validate.js
deleted file mode 100644
index 21b8b5ee293ba..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/bundle/dist/validate.js
+++ /dev/null
@@ -1,199 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.assertBundle = assertBundle;
-exports.assertBundleV01 = assertBundleV01;
-exports.isBundleV01 = isBundleV01;
-exports.assertBundleV02 = assertBundleV02;
-exports.assertBundleLatest = assertBundleLatest;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const error_1 = require("./error");
-// Performs basic validation of a Sigstore bundle to ensure that all required
-// fields are populated. This is not a complete validation of the bundle, but
-// rather a check that the bundle is in a valid state to be processed by the
-// rest of the code.
-function assertBundle(b) {
-    const invalidValues = validateBundleBase(b);
-    if (invalidValues.length > 0) {
-        throw new error_1.ValidationError('invalid bundle', invalidValues);
-    }
-}
-// Asserts that the given bundle conforms to the v0.1 bundle format.
-function assertBundleV01(b) {
-    const invalidValues = [];
-    invalidValues.push(...validateBundleBase(b));
-    invalidValues.push(...validateInclusionPromise(b));
-    if (invalidValues.length > 0) {
-        throw new error_1.ValidationError('invalid v0.1 bundle', invalidValues);
-    }
-}
-// Type guard to determine if Bundle is a v0.1 bundle.
-function isBundleV01(b) {
-    try {
-        assertBundleV01(b);
-        return true;
-    }
-    catch (e) {
-        return false;
-    }
-}
-// Asserts that the given bundle conforms to the v0.2 bundle format.
-function assertBundleV02(b) {
-    const invalidValues = [];
-    invalidValues.push(...validateBundleBase(b));
-    invalidValues.push(...validateInclusionProof(b));
-    if (invalidValues.length > 0) {
-        throw new error_1.ValidationError('invalid v0.2 bundle', invalidValues);
-    }
-}
-// Asserts that the given bundle conforms to the newest (0.3) bundle format.
-function assertBundleLatest(b) {
-    const invalidValues = [];
-    invalidValues.push(...validateBundleBase(b));
-    invalidValues.push(...validateInclusionProof(b));
-    invalidValues.push(...validateNoCertificateChain(b));
-    if (invalidValues.length > 0) {
-        throw new error_1.ValidationError('invalid bundle', invalidValues);
-    }
-}
-function validateBundleBase(b) {
-    const invalidValues = [];
-    // Media type validation
-    if (b.mediaType === undefined ||
-        (!b.mediaType.match(/^application\/vnd\.dev\.sigstore\.bundle\+json;version=\d\.\d/) &&
-            !b.mediaType.match(/^application\/vnd\.dev\.sigstore\.bundle\.v\d\.\d\+json/))) {
-        invalidValues.push('mediaType');
-    }
-    // Content-related validation
-    if (b.content === undefined) {
-        invalidValues.push('content');
-    }
-    else {
-        switch (b.content.$case) {
-            case 'messageSignature':
-                if (b.content.messageSignature.messageDigest === undefined) {
-                    invalidValues.push('content.messageSignature.messageDigest');
-                }
-                else {
-                    if (b.content.messageSignature.messageDigest.digest.length === 0) {
-                        invalidValues.push('content.messageSignature.messageDigest.digest');
-                    }
-                }
-                if (b.content.messageSignature.signature.length === 0) {
-                    invalidValues.push('content.messageSignature.signature');
-                }
-                break;
-            case 'dsseEnvelope':
-                if (b.content.dsseEnvelope.payload.length === 0) {
-                    invalidValues.push('content.dsseEnvelope.payload');
-                }
-                if (b.content.dsseEnvelope.signatures.length !== 1) {
-                    invalidValues.push('content.dsseEnvelope.signatures');
-                }
-                else {
-                    if (b.content.dsseEnvelope.signatures[0].sig.length === 0) {
-                        invalidValues.push('content.dsseEnvelope.signatures[0].sig');
-                    }
-                }
-                break;
-        }
-    }
-    // Verification material-related validation
-    if (b.verificationMaterial === undefined) {
-        invalidValues.push('verificationMaterial');
-    }
-    else {
-        if (b.verificationMaterial.content === undefined) {
-            invalidValues.push('verificationMaterial.content');
-        }
-        else {
-            switch (b.verificationMaterial.content.$case) {
-                case 'x509CertificateChain':
-                    if (b.verificationMaterial.content.x509CertificateChain.certificates
-                        .length === 0) {
-                        invalidValues.push('verificationMaterial.content.x509CertificateChain.certificates');
-                    }
-                    b.verificationMaterial.content.x509CertificateChain.certificates.forEach((cert, i) => {
-                        if (cert.rawBytes.length === 0) {
-                            invalidValues.push(`verificationMaterial.content.x509CertificateChain.certificates[${i}].rawBytes`);
-                        }
-                    });
-                    break;
-                case 'certificate':
-                    if (b.verificationMaterial.content.certificate.rawBytes.length === 0) {
-                        invalidValues.push('verificationMaterial.content.certificate.rawBytes');
-                    }
-                    break;
-            }
-        }
-        if (b.verificationMaterial.tlogEntries === undefined) {
-            invalidValues.push('verificationMaterial.tlogEntries');
-        }
-        else {
-            if (b.verificationMaterial.tlogEntries.length > 0) {
-                b.verificationMaterial.tlogEntries.forEach((entry, i) => {
-                    if (entry.logId === undefined) {
-                        invalidValues.push(`verificationMaterial.tlogEntries[${i}].logId`);
-                    }
-                    if (entry.kindVersion === undefined) {
-                        invalidValues.push(`verificationMaterial.tlogEntries[${i}].kindVersion`);
-                    }
-                });
-            }
-        }
-    }
-    return invalidValues;
-}
-// Necessary for V01 bundles
-function validateInclusionPromise(b) {
-    const invalidValues = [];
-    if (b.verificationMaterial &&
-        b.verificationMaterial.tlogEntries?.length > 0) {
-        b.verificationMaterial.tlogEntries.forEach((entry, i) => {
-            if (entry.inclusionPromise === undefined) {
-                invalidValues.push(`verificationMaterial.tlogEntries[${i}].inclusionPromise`);
-            }
-        });
-    }
-    return invalidValues;
-}
-// Necessary for V02 and later bundles
-function validateInclusionProof(b) {
-    const invalidValues = [];
-    if (b.verificationMaterial &&
-        b.verificationMaterial.tlogEntries?.length > 0) {
-        b.verificationMaterial.tlogEntries.forEach((entry, i) => {
-            if (entry.inclusionProof === undefined) {
-                invalidValues.push(`verificationMaterial.tlogEntries[${i}].inclusionProof`);
-            }
-            else {
-                if (entry.inclusionProof.checkpoint === undefined) {
-                    invalidValues.push(`verificationMaterial.tlogEntries[${i}].inclusionProof.checkpoint`);
-                }
-            }
-        });
-    }
-    return invalidValues;
-}
-// Necessary for V03 and later bundles
-function validateNoCertificateChain(b) {
-    const invalidValues = [];
-    /* istanbul ignore next */
-    if (b.verificationMaterial?.content?.$case === 'x509CertificateChain') {
-        invalidValues.push('verificationMaterial.content.$case');
-    }
-    return invalidValues;
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/bundle/package.json b/node_modules/pacote/node_modules/@sigstore/bundle/package.json
deleted file mode 100644
index 03291b2159b79..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/bundle/package.json
+++ /dev/null
@@ -1,35 +0,0 @@
-{
-  "name": "@sigstore/bundle",
-  "version": "4.0.0",
-  "description": "Sigstore bundle type",
-  "main": "dist/index.js",
-  "types": "dist/index.d.ts",
-  "scripts": {
-    "clean": "shx rm -rf dist *.tsbuildinfo",
-    "build": "tsc --build",
-    "test": "jest"
-  },
-  "files": [
-    "dist",
-    "store"
-  ],
-  "author": "bdehamer@github.com",
-  "license": "Apache-2.0",
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/sigstore/sigstore-js.git"
-  },
-  "bugs": {
-    "url": "https://github.com/sigstore/sigstore-js/issues"
-  },
-  "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/bundle#readme",
-  "publishConfig": {
-    "provenance": true
-  },
-  "dependencies": {
-    "@sigstore/protobuf-specs": "^0.5.0"
-  },
-  "engines": {
-    "node": "^20.17.0 || >=22.9.0"
-  }
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/error.js b/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/error.js
deleted file mode 100644
index 17d93b0f7e706..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/error.js
+++ /dev/null
@@ -1,24 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.ASN1TypeError = exports.ASN1ParseError = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-class ASN1ParseError extends Error {
-}
-exports.ASN1ParseError = ASN1ParseError;
-class ASN1TypeError extends Error {
-}
-exports.ASN1TypeError = ASN1TypeError;
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/index.js b/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/index.js
deleted file mode 100644
index 348b2ea4022e5..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/index.js
+++ /dev/null
@@ -1,20 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.ASN1Obj = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-var obj_1 = require("./obj");
-Object.defineProperty(exports, "ASN1Obj", { enumerable: true, get: function () { return obj_1.ASN1Obj; } });
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/length.js b/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/length.js
deleted file mode 100644
index cb7ebf09dbefa..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/length.js
+++ /dev/null
@@ -1,62 +0,0 @@
-"use strict";
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.decodeLength = decodeLength;
-exports.encodeLength = encodeLength;
-const error_1 = require("./error");
-// Decodes the length of a DER-encoded ANS.1 element from the supplied stream.
-// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-encoded-length-and-value-bytes
-function decodeLength(stream) {
-    const buf = stream.getUint8();
-    // If the most significant bit is UNSET the length is just the value of the
-    // byte.
-    if ((buf & 0x80) === 0x00) {
-        return buf;
-    }
-    // Otherwise, the lower 7 bits of the first byte indicate the number of bytes
-    // that follow to encode the length.
-    const byteCount = buf & 0x7f;
-    // Ensure the encoded length can safely fit in a JS number.
-    if (byteCount > 6) {
-        throw new error_1.ASN1ParseError('length exceeds 6 byte limit');
-    }
-    // Iterate over the bytes that encode the length.
-    let len = 0;
-    for (let i = 0; i < byteCount; i++) {
-        len = len * 256 + stream.getUint8();
-    }
-    // This is a valid ASN.1 length encoding, but we don't support it.
-    if (len === 0) {
-        throw new error_1.ASN1ParseError('indefinite length encoding not supported');
-    }
-    return len;
-}
-// Translates the supplied value to a DER-encoded length.
-function encodeLength(len) {
-    if (len < 128) {
-        return Buffer.from([len]);
-    }
-    // Bitwise operations on large numbers are not supported in JS, so we need to
-    // use BigInts.
-    let val = BigInt(len);
-    const bytes = [];
-    while (val > 0n) {
-        bytes.unshift(Number(val & 255n));
-        val = val >> 8n;
-    }
-    return Buffer.from([0x80 | bytes.length, ...bytes]);
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/obj.js b/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/obj.js
deleted file mode 100644
index 5f9ac9cdbc493..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/obj.js
+++ /dev/null
@@ -1,152 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.ASN1Obj = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const stream_1 = require("../stream");
-const error_1 = require("./error");
-const length_1 = require("./length");
-const parse_1 = require("./parse");
-const tag_1 = require("./tag");
-class ASN1Obj {
-    constructor(tag, value, subs) {
-        this.tag = tag;
-        this.value = value;
-        this.subs = subs;
-    }
-    // Constructs an ASN.1 object from a Buffer of DER-encoded bytes.
-    static parseBuffer(buf) {
-        return parseStream(new stream_1.ByteStream(buf));
-    }
-    toDER() {
-        const valueStream = new stream_1.ByteStream();
-        if (this.subs.length > 0) {
-            for (const sub of this.subs) {
-                valueStream.appendView(sub.toDER());
-            }
-        }
-        else {
-            valueStream.appendView(this.value);
-        }
-        const value = valueStream.buffer;
-        // Concat tag/length/value
-        const obj = new stream_1.ByteStream();
-        obj.appendChar(this.tag.toDER());
-        obj.appendView((0, length_1.encodeLength)(value.length));
-        obj.appendView(value);
-        return obj.buffer;
-    }
-    /////////////////////////////////////////////////////////////////////////////
-    // Convenience methods for parsing ASN.1 primitives into JS types
-    // Returns the ASN.1 object's value as a boolean. Throws an error if the
-    // object is not a boolean.
-    toBoolean() {
-        if (!this.tag.isBoolean()) {
-            throw new error_1.ASN1TypeError('not a boolean');
-        }
-        return (0, parse_1.parseBoolean)(this.value);
-    }
-    // Returns the ASN.1 object's value as a BigInt. Throws an error if the
-    // object is not an integer.
-    toInteger() {
-        if (!this.tag.isInteger()) {
-            throw new error_1.ASN1TypeError('not an integer');
-        }
-        return (0, parse_1.parseInteger)(this.value);
-    }
-    // Returns the ASN.1 object's value as an OID string. Throws an error if the
-    // object is not an OID.
-    toOID() {
-        if (!this.tag.isOID()) {
-            throw new error_1.ASN1TypeError('not an OID');
-        }
-        return (0, parse_1.parseOID)(this.value);
-    }
-    // Returns the ASN.1 object's value as a Date. Throws an error if the object
-    // is not either a UTCTime or a GeneralizedTime.
-    toDate() {
-        switch (true) {
-            case this.tag.isUTCTime():
-                return (0, parse_1.parseTime)(this.value, true);
-            case this.tag.isGeneralizedTime():
-                return (0, parse_1.parseTime)(this.value, false);
-            default:
-                throw new error_1.ASN1TypeError('not a date');
-        }
-    }
-    // Returns the ASN.1 object's value as a number[] where each number is the
-    // value of a bit in the bit string. Throws an error if the object is not a
-    // bit string.
-    toBitString() {
-        if (!this.tag.isBitString()) {
-            throw new error_1.ASN1TypeError('not a bit string');
-        }
-        return (0, parse_1.parseBitString)(this.value);
-    }
-}
-exports.ASN1Obj = ASN1Obj;
-/////////////////////////////////////////////////////////////////////////////
-// Internal stream parsing functions
-function parseStream(stream) {
-    // Parse tag, length, and value from stream
-    const tag = new tag_1.ASN1Tag(stream.getUint8());
-    const len = (0, length_1.decodeLength)(stream);
-    const value = stream.slice(stream.position, len);
-    const start = stream.position;
-    let subs = [];
-    // If the object is constructed, parse its children. Sometimes, children
-    // are embedded in OCTESTRING objects, so we need to check those
-    // for children as well.
-    if (tag.constructed) {
-        subs = collectSubs(stream, len);
-    }
-    else if (tag.isOctetString()) {
-        // Attempt to parse children of OCTETSTRING objects. If anything fails,
-        // assume the object is not constructed and treat as primitive.
-        try {
-            subs = collectSubs(stream, len);
-        }
-        catch (e) {
-            // Fail silently and treat as primitive
-        }
-    }
-    // If there are no children, move stream cursor to the end of the object
-    if (subs.length === 0) {
-        stream.seek(start + len);
-    }
-    return new ASN1Obj(tag, value, subs);
-}
-function collectSubs(stream, len) {
-    // Calculate end of object content
-    const end = stream.position + len;
-    // Make sure there are enough bytes left in the stream. This should never
-    // happen, cause it'll get caught when the stream is sliced in parseStream.
-    // Leaving as an extra check just in case.
-    /* istanbul ignore if */
-    if (end > stream.length) {
-        throw new error_1.ASN1ParseError('invalid length');
-    }
-    // Parse all children
-    const subs = [];
-    while (stream.position < end) {
-        subs.push(parseStream(stream));
-    }
-    // When we're done parsing children, we should be at the end of the object
-    if (stream.position !== end) {
-        throw new error_1.ASN1ParseError('invalid length');
-    }
-    return subs;
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/parse.js b/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/parse.js
deleted file mode 100644
index 7fbb42632c60e..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/parse.js
+++ /dev/null
@@ -1,124 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.parseInteger = parseInteger;
-exports.parseStringASCII = parseStringASCII;
-exports.parseTime = parseTime;
-exports.parseOID = parseOID;
-exports.parseBoolean = parseBoolean;
-exports.parseBitString = parseBitString;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const RE_TIME_SHORT_YEAR = /^(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})(\.\d{3})?Z$/;
-const RE_TIME_LONG_YEAR = /^(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})(\.\d{3})?Z$/;
-// Parse a BigInt from the DER-encoded buffer
-// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-integer
-function parseInteger(buf) {
-    let pos = 0;
-    const end = buf.length;
-    let val = buf[pos];
-    const neg = val > 0x7f;
-    // Consume any padding bytes
-    const pad = neg ? 0xff : 0x00;
-    while (val == pad && ++pos < end) {
-        val = buf[pos];
-    }
-    // Calculate remaining bytes to read
-    const len = end - pos;
-    if (len === 0)
-        return BigInt(neg ? -1 : 0);
-    // Handle two's complement for negative numbers
-    val = neg ? val - 256 : val;
-    // Parse remaining bytes
-    let n = BigInt(val);
-    for (let i = pos + 1; i < end; ++i) {
-        n = n * BigInt(256) + BigInt(buf[i]);
-    }
-    return n;
-}
-// Parse an ASCII string from the DER-encoded buffer
-// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-basic-types#boolean
-function parseStringASCII(buf) {
-    return buf.toString('ascii');
-}
-// Parse a Date from the DER-encoded buffer
-// https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.5.1
-function parseTime(buf, shortYear) {
-    const timeStr = parseStringASCII(buf);
-    // Parse the time string into matches - captured groups start at index 1
-    const m = shortYear
-        ? RE_TIME_SHORT_YEAR.exec(timeStr)
-        : RE_TIME_LONG_YEAR.exec(timeStr);
-    if (!m) {
-        throw new Error('invalid time');
-    }
-    // Translate dates with a 2-digit year to 4 digits per the spec
-    if (shortYear) {
-        let year = Number(m[1]);
-        year += year >= 50 ? 1900 : 2000;
-        m[1] = year.toString();
-    }
-    // Translate to ISO8601 format and parse
-    return new Date(`${m[1]}-${m[2]}-${m[3]}T${m[4]}:${m[5]}:${m[6]}Z`);
-}
-// Parse an OID from the DER-encoded buffer
-// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-object-identifier
-function parseOID(buf) {
-    let pos = 0;
-    const end = buf.length;
-    // Consume first byte which encodes the first two OID components
-    let n = buf[pos++];
-    const first = Math.floor(n / 40);
-    const second = n % 40;
-    let oid = `${first}.${second}`;
-    // Consume remaining bytes
-    let val = 0;
-    for (; pos < end; ++pos) {
-        n = buf[pos];
-        val = (val << 7) + (n & 0x7f);
-        // If the left-most bit is NOT set, then this is the last byte in the
-        // sequence and we can add the value to the OID and reset the accumulator
-        if ((n & 0x80) === 0) {
-            oid += `.${val}`;
-            val = 0;
-        }
-    }
-    return oid;
-}
-// Parse a boolean from the DER-encoded buffer
-// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-basic-types#boolean
-function parseBoolean(buf) {
-    return buf[0] !== 0;
-}
-// Parse a bit string from the DER-encoded buffer
-// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-bit-string
-function parseBitString(buf) {
-    // First byte tell us how many unused bits are in the last byte
-    const unused = buf[0];
-    const start = 1;
-    const end = buf.length;
-    const bits = [];
-    for (let i = start; i < end; ++i) {
-        const byte = buf[i];
-        // The skip value is only used for the last byte
-        const skip = i === end - 1 ? unused : 0;
-        // Iterate over each bit in the byte (most significant first)
-        for (let j = 7; j >= skip; --j) {
-            // Read the bit and add it to the bit string
-            bits.push((byte >> j) & 0x01);
-        }
-    }
-    return bits;
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/tag.js b/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/tag.js
deleted file mode 100644
index 84dd938d049aa..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/core/dist/asn1/tag.js
+++ /dev/null
@@ -1,86 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.ASN1Tag = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const error_1 = require("./error");
-const UNIVERSAL_TAG = {
-    BOOLEAN: 0x01,
-    INTEGER: 0x02,
-    BIT_STRING: 0x03,
-    OCTET_STRING: 0x04,
-    OBJECT_IDENTIFIER: 0x06,
-    SEQUENCE: 0x10,
-    SET: 0x11,
-    PRINTABLE_STRING: 0x13,
-    UTC_TIME: 0x17,
-    GENERALIZED_TIME: 0x18,
-};
-const TAG_CLASS = {
-    UNIVERSAL: 0x00,
-    APPLICATION: 0x01,
-    CONTEXT_SPECIFIC: 0x02,
-    PRIVATE: 0x03,
-};
-// https://learn.microsoft.com/en-us/windows/win32/seccertenroll/about-encoded-tag-bytes
-class ASN1Tag {
-    constructor(enc) {
-        // Bits 0 through 4 are the tag number
-        this.number = enc & 0x1f;
-        // Bit 5 is the constructed bit
-        this.constructed = (enc & 0x20) === 0x20;
-        // Bit 6 & 7 are the class
-        this.class = enc >> 6;
-        if (this.number === 0x1f) {
-            throw new error_1.ASN1ParseError('long form tags not supported');
-        }
-        if (this.class === TAG_CLASS.UNIVERSAL && this.number === 0x00) {
-            throw new error_1.ASN1ParseError('unsupported tag 0x00');
-        }
-    }
-    isUniversal() {
-        return this.class === TAG_CLASS.UNIVERSAL;
-    }
-    isContextSpecific(num) {
-        const res = this.class === TAG_CLASS.CONTEXT_SPECIFIC;
-        return num !== undefined ? res && this.number === num : res;
-    }
-    isBoolean() {
-        return this.isUniversal() && this.number === UNIVERSAL_TAG.BOOLEAN;
-    }
-    isInteger() {
-        return this.isUniversal() && this.number === UNIVERSAL_TAG.INTEGER;
-    }
-    isBitString() {
-        return this.isUniversal() && this.number === UNIVERSAL_TAG.BIT_STRING;
-    }
-    isOctetString() {
-        return this.isUniversal() && this.number === UNIVERSAL_TAG.OCTET_STRING;
-    }
-    isOID() {
-        return (this.isUniversal() && this.number === UNIVERSAL_TAG.OBJECT_IDENTIFIER);
-    }
-    isUTCTime() {
-        return this.isUniversal() && this.number === UNIVERSAL_TAG.UTC_TIME;
-    }
-    isGeneralizedTime() {
-        return this.isUniversal() && this.number === UNIVERSAL_TAG.GENERALIZED_TIME;
-    }
-    toDER() {
-        return this.number | (this.constructed ? 0x20 : 0x00) | (this.class << 6);
-    }
-}
-exports.ASN1Tag = ASN1Tag;
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/crypto.js b/node_modules/pacote/node_modules/@sigstore/core/dist/crypto.js
deleted file mode 100644
index 296b5ba43e86a..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/core/dist/crypto.js
+++ /dev/null
@@ -1,60 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.createPublicKey = createPublicKey;
-exports.digest = digest;
-exports.verify = verify;
-exports.bufferEqual = bufferEqual;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const crypto_1 = __importDefault(require("crypto"));
-function createPublicKey(key, type = 'spki') {
-    if (typeof key === 'string') {
-        return crypto_1.default.createPublicKey(key);
-    }
-    else {
-        return crypto_1.default.createPublicKey({ key, format: 'der', type: type });
-    }
-}
-function digest(algorithm, ...data) {
-    const hash = crypto_1.default.createHash(algorithm);
-    for (const d of data) {
-        hash.update(d);
-    }
-    return hash.digest();
-}
-function verify(data, key, signature, algorithm) {
-    // The try/catch is to work around an issue in Node 14.x where verify throws
-    // an error in some scenarios if the signature is invalid.
-    try {
-        return crypto_1.default.verify(algorithm, data, key, signature);
-    }
-    catch (e) {
-        /* istanbul ignore next */
-        return false;
-    }
-}
-function bufferEqual(a, b) {
-    try {
-        return crypto_1.default.timingSafeEqual(a, b);
-    }
-    catch {
-        /* istanbul ignore next */
-        return false;
-    }
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/dsse.js b/node_modules/pacote/node_modules/@sigstore/core/dist/dsse.js
deleted file mode 100644
index ca7b63630e2ba..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/core/dist/dsse.js
+++ /dev/null
@@ -1,30 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.preAuthEncoding = preAuthEncoding;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const PAE_PREFIX = 'DSSEv1';
-// DSSE Pre-Authentication Encoding
-function preAuthEncoding(payloadType, payload) {
-    const prefix = [
-        PAE_PREFIX,
-        payloadType.length,
-        payloadType,
-        payload.length,
-        '',
-    ].join(' ');
-    return Buffer.concat([Buffer.from(prefix, 'ascii'), payload]);
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/encoding.js b/node_modules/pacote/node_modules/@sigstore/core/dist/encoding.js
deleted file mode 100644
index 7113af66db4c2..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/core/dist/encoding.js
+++ /dev/null
@@ -1,27 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.base64Encode = base64Encode;
-exports.base64Decode = base64Decode;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const BASE64_ENCODING = 'base64';
-const UTF8_ENCODING = 'utf-8';
-function base64Encode(str) {
-    return Buffer.from(str, UTF8_ENCODING).toString(BASE64_ENCODING);
-}
-function base64Decode(str) {
-    return Buffer.from(str, BASE64_ENCODING).toString(UTF8_ENCODING);
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/index.js b/node_modules/pacote/node_modules/@sigstore/core/dist/index.js
deleted file mode 100644
index 49859d84db756..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/core/dist/index.js
+++ /dev/null
@@ -1,66 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || (function () {
-    var ownKeys = function(o) {
-        ownKeys = Object.getOwnPropertyNames || function (o) {
-            var ar = [];
-            for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
-            return ar;
-        };
-        return ownKeys(o);
-    };
-    return function (mod) {
-        if (mod && mod.__esModule) return mod;
-        var result = {};
-        if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
-        __setModuleDefault(result, mod);
-        return result;
-    };
-})();
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.X509SCTExtension = exports.X509Certificate = exports.EXTENSION_OID_SCT = exports.ByteStream = exports.RFC3161Timestamp = exports.pem = exports.json = exports.encoding = exports.dsse = exports.crypto = exports.ASN1Obj = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-var asn1_1 = require("./asn1");
-Object.defineProperty(exports, "ASN1Obj", { enumerable: true, get: function () { return asn1_1.ASN1Obj; } });
-exports.crypto = __importStar(require("./crypto"));
-exports.dsse = __importStar(require("./dsse"));
-exports.encoding = __importStar(require("./encoding"));
-exports.json = __importStar(require("./json"));
-exports.pem = __importStar(require("./pem"));
-var rfc3161_1 = require("./rfc3161");
-Object.defineProperty(exports, "RFC3161Timestamp", { enumerable: true, get: function () { return rfc3161_1.RFC3161Timestamp; } });
-var stream_1 = require("./stream");
-Object.defineProperty(exports, "ByteStream", { enumerable: true, get: function () { return stream_1.ByteStream; } });
-var x509_1 = require("./x509");
-Object.defineProperty(exports, "EXTENSION_OID_SCT", { enumerable: true, get: function () { return x509_1.EXTENSION_OID_SCT; } });
-Object.defineProperty(exports, "X509Certificate", { enumerable: true, get: function () { return x509_1.X509Certificate; } });
-Object.defineProperty(exports, "X509SCTExtension", { enumerable: true, get: function () { return x509_1.X509SCTExtension; } });
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/json.js b/node_modules/pacote/node_modules/@sigstore/core/dist/json.js
deleted file mode 100644
index 7808d033b98cc..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/core/dist/json.js
+++ /dev/null
@@ -1,60 +0,0 @@
-"use strict";
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.canonicalize = canonicalize;
-// JSON canonicalization per https://github.com/cyberphone/json-canonicalization
-// eslint-disable-next-line @typescript-eslint/no-explicit-any
-function canonicalize(object) {
-    let buffer = '';
-    if (object === null || typeof object !== 'object' || object.toJSON != null) {
-        // Primitives or toJSONable objects
-        buffer += JSON.stringify(object);
-    }
-    else if (Array.isArray(object)) {
-        // Array - maintain element order
-        buffer += '[';
-        let first = true;
-        object.forEach((element) => {
-            if (!first) {
-                buffer += ',';
-            }
-            first = false;
-            // recursive call
-            buffer += canonicalize(element);
-        });
-        buffer += ']';
-    }
-    else {
-        // Object - Sort properties before serializing
-        buffer += '{';
-        let first = true;
-        Object.keys(object)
-            .sort()
-            .forEach((property) => {
-            if (!first) {
-                buffer += ',';
-            }
-            first = false;
-            buffer += JSON.stringify(property);
-            buffer += ':';
-            // recursive call
-            buffer += canonicalize(object[property]);
-        });
-        buffer += '}';
-    }
-    return buffer;
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/oid.js b/node_modules/pacote/node_modules/@sigstore/core/dist/oid.js
deleted file mode 100644
index ac7a643067ad0..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/core/dist/oid.js
+++ /dev/null
@@ -1,14 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.SHA2_HASH_ALGOS = exports.ECDSA_SIGNATURE_ALGOS = void 0;
-exports.ECDSA_SIGNATURE_ALGOS = {
-    '1.2.840.10045.4.3.1': 'sha224',
-    '1.2.840.10045.4.3.2': 'sha256',
-    '1.2.840.10045.4.3.3': 'sha384',
-    '1.2.840.10045.4.3.4': 'sha512',
-};
-exports.SHA2_HASH_ALGOS = {
-    '2.16.840.1.101.3.4.2.1': 'sha256',
-    '2.16.840.1.101.3.4.2.2': 'sha384',
-    '2.16.840.1.101.3.4.2.3': 'sha512',
-};
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/pem.js b/node_modules/pacote/node_modules/@sigstore/core/dist/pem.js
deleted file mode 100644
index f1241d28d586e..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/core/dist/pem.js
+++ /dev/null
@@ -1,43 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.toDER = toDER;
-exports.fromDER = fromDER;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const PEM_HEADER = /-----BEGIN (.*)-----/;
-const PEM_FOOTER = /-----END (.*)-----/;
-function toDER(certificate) {
-    let der = '';
-    certificate.split('\n').forEach((line) => {
-        if (line.match(PEM_HEADER) || line.match(PEM_FOOTER)) {
-            return;
-        }
-        der += line;
-    });
-    return Buffer.from(der, 'base64');
-}
-// Translates a DER-encoded buffer into a PEM-encoded string. Standard PEM
-// encoding dictates that each certificate should have a trailing newline after
-// the footer.
-function fromDER(certificate, type = 'CERTIFICATE') {
-    // Base64-encode the certificate.
-    const der = certificate.toString('base64');
-    // Split the certificate into lines of 64 characters.
-    const lines = der.match(/.{1,64}/g) || '';
-    return [`-----BEGIN ${type}-----`, ...lines, `-----END ${type}-----`]
-        .join('\n')
-        .concat('\n');
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/error.js b/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/error.js
deleted file mode 100644
index b9b549b0bb323..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/error.js
+++ /dev/null
@@ -1,21 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.RFC3161TimestampVerificationError = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-class RFC3161TimestampVerificationError extends Error {
-}
-exports.RFC3161TimestampVerificationError = RFC3161TimestampVerificationError;
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/index.js b/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/index.js
deleted file mode 100644
index b77ecf1c7d50c..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/index.js
+++ /dev/null
@@ -1,20 +0,0 @@
-"use strict";
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.RFC3161Timestamp = void 0;
-var timestamp_1 = require("./timestamp");
-Object.defineProperty(exports, "RFC3161Timestamp", { enumerable: true, get: function () { return timestamp_1.RFC3161Timestamp; } });
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/timestamp.js b/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/timestamp.js
deleted file mode 100644
index 982fb5e6126e8..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/timestamp.js
+++ /dev/null
@@ -1,211 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || (function () {
-    var ownKeys = function(o) {
-        ownKeys = Object.getOwnPropertyNames || function (o) {
-            var ar = [];
-            for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
-            return ar;
-        };
-        return ownKeys(o);
-    };
-    return function (mod) {
-        if (mod && mod.__esModule) return mod;
-        var result = {};
-        if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
-        __setModuleDefault(result, mod);
-        return result;
-    };
-})();
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.RFC3161Timestamp = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const asn1_1 = require("../asn1");
-const crypto = __importStar(require("../crypto"));
-const oid_1 = require("../oid");
-const error_1 = require("./error");
-const tstinfo_1 = require("./tstinfo");
-const OID_PKCS9_CONTENT_TYPE_SIGNED_DATA = '1.2.840.113549.1.7.2';
-const OID_PKCS9_CONTENT_TYPE_TSTINFO = '1.2.840.113549.1.9.16.1.4';
-const OID_PKCS9_MESSAGE_DIGEST_KEY = '1.2.840.113549.1.9.4';
-class RFC3161Timestamp {
-    constructor(asn1) {
-        this.root = asn1;
-    }
-    static parse(der) {
-        const asn1 = asn1_1.ASN1Obj.parseBuffer(der);
-        return new RFC3161Timestamp(asn1);
-    }
-    get status() {
-        return this.pkiStatusInfoObj.subs[0].toInteger();
-    }
-    get contentType() {
-        return this.contentTypeObj.toOID();
-    }
-    get eContentType() {
-        return this.eContentTypeObj.toOID();
-    }
-    get signingTime() {
-        return this.tstInfo.genTime;
-    }
-    get signerIssuer() {
-        return this.signerSidObj.subs[0].value;
-    }
-    get signerSerialNumber() {
-        return this.signerSidObj.subs[1].value;
-    }
-    get signerDigestAlgorithm() {
-        const oid = this.signerDigestAlgorithmObj.subs[0].toOID();
-        return oid_1.SHA2_HASH_ALGOS[oid];
-    }
-    get signatureAlgorithm() {
-        const oid = this.signatureAlgorithmObj.subs[0].toOID();
-        return oid_1.ECDSA_SIGNATURE_ALGOS[oid];
-    }
-    get signatureValue() {
-        return this.signatureValueObj.value;
-    }
-    get tstInfo() {
-        // Need to unpack tstInfo from an OCTET STRING
-        return new tstinfo_1.TSTInfo(this.eContentObj.subs[0].subs[0]);
-    }
-    verify(data, publicKey) {
-        if (!this.timeStampTokenObj) {
-            throw new error_1.RFC3161TimestampVerificationError('timeStampToken is missing');
-        }
-        // Check for expected ContentInfo content type
-        if (this.contentType !== OID_PKCS9_CONTENT_TYPE_SIGNED_DATA) {
-            throw new error_1.RFC3161TimestampVerificationError(`incorrect content type: ${this.contentType}`);
-        }
-        // Check for expected encapsulated content type
-        if (this.eContentType !== OID_PKCS9_CONTENT_TYPE_TSTINFO) {
-            throw new error_1.RFC3161TimestampVerificationError(`incorrect encapsulated content type: ${this.eContentType}`);
-        }
-        // Check that the tstInfo references the correct artifact
-        this.tstInfo.verify(data);
-        // Check that the signed message digest matches the tstInfo
-        this.verifyMessageDigest();
-        // Check that the signature is valid for the signed attributes
-        this.verifySignature(publicKey);
-    }
-    verifyMessageDigest() {
-        // Check that the tstInfo matches the signed data
-        const tstInfoDigest = crypto.digest(this.signerDigestAlgorithm, this.tstInfo.raw);
-        const expectedDigest = this.messageDigestAttributeObj.subs[1].subs[0].value;
-        if (!crypto.bufferEqual(tstInfoDigest, expectedDigest)) {
-            throw new error_1.RFC3161TimestampVerificationError('signed data does not match tstInfo');
-        }
-    }
-    verifySignature(key) {
-        // Encode the signed attributes for verification
-        const signedAttrs = this.signedAttrsObj.toDER();
-        signedAttrs[0] = 0x31; // Change context-specific tag to SET
-        // Check that the signature is valid for the signed attributes
-        const verified = crypto.verify(signedAttrs, key, this.signatureValue, this.signatureAlgorithm);
-        if (!verified) {
-            throw new error_1.RFC3161TimestampVerificationError('signature verification failed');
-        }
-    }
-    // https://www.rfc-editor.org/rfc/rfc3161#section-2.4.2
-    get pkiStatusInfoObj() {
-        // pkiStatusInfo is the first element of the timestamp response sequence
-        return this.root.subs[0];
-    }
-    // https://www.rfc-editor.org/rfc/rfc3161#section-2.4.2
-    get timeStampTokenObj() {
-        // timeStampToken is the first element of the timestamp response sequence
-        return this.root.subs[1];
-    }
-    // https://datatracker.ietf.org/doc/html/rfc5652#section-3
-    get contentTypeObj() {
-        return this.timeStampTokenObj.subs[0];
-    }
-    // https://www.rfc-editor.org/rfc/rfc5652#section-3
-    get signedDataObj() {
-        const obj = this.timeStampTokenObj.subs.find((sub) => sub.tag.isContextSpecific(0x00));
-        return obj.subs[0];
-    }
-    // https://datatracker.ietf.org/doc/html/rfc5652#section-5.1
-    get encapContentInfoObj() {
-        return this.signedDataObj.subs[2];
-    }
-    // https://datatracker.ietf.org/doc/html/rfc5652#section-5.1
-    get signerInfosObj() {
-        // SignerInfos is the last element of the signed data sequence
-        const sd = this.signedDataObj;
-        return sd.subs[sd.subs.length - 1];
-    }
-    // https://www.rfc-editor.org/rfc/rfc5652#section-5.1
-    get signerInfoObj() {
-        // Only supporting one signer
-        return this.signerInfosObj.subs[0];
-    }
-    // https://datatracker.ietf.org/doc/html/rfc5652#section-5.2
-    get eContentTypeObj() {
-        return this.encapContentInfoObj.subs[0];
-    }
-    // https://datatracker.ietf.org/doc/html/rfc5652#section-5.2
-    get eContentObj() {
-        return this.encapContentInfoObj.subs[1];
-    }
-    // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3
-    get signedAttrsObj() {
-        const signedAttrs = this.signerInfoObj.subs.find((sub) => sub.tag.isContextSpecific(0x00));
-        return signedAttrs;
-    }
-    // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3
-    get messageDigestAttributeObj() {
-        const messageDigest = this.signedAttrsObj.subs.find((sub) => sub.subs[0].tag.isOID() &&
-            sub.subs[0].toOID() === OID_PKCS9_MESSAGE_DIGEST_KEY);
-        return messageDigest;
-    }
-    // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3
-    get signerSidObj() {
-        return this.signerInfoObj.subs[1];
-    }
-    // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3
-    get signerDigestAlgorithmObj() {
-        // Signature is the 2nd element of the signerInfoObj object
-        return this.signerInfoObj.subs[2];
-    }
-    // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3
-    get signatureAlgorithmObj() {
-        // Signature is the 4th element of the signerInfoObj object
-        return this.signerInfoObj.subs[4];
-    }
-    // https://datatracker.ietf.org/doc/html/rfc5652#section-5.3
-    get signatureValueObj() {
-        // Signature is the 6th element of the signerInfoObj object
-        return this.signerInfoObj.subs[5];
-    }
-}
-exports.RFC3161Timestamp = RFC3161Timestamp;
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js b/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js
deleted file mode 100644
index d5001c42c108f..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/core/dist/rfc3161/tstinfo.js
+++ /dev/null
@@ -1,71 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || (function () {
-    var ownKeys = function(o) {
-        ownKeys = Object.getOwnPropertyNames || function (o) {
-            var ar = [];
-            for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
-            return ar;
-        };
-        return ownKeys(o);
-    };
-    return function (mod) {
-        if (mod && mod.__esModule) return mod;
-        var result = {};
-        if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
-        __setModuleDefault(result, mod);
-        return result;
-    };
-})();
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TSTInfo = void 0;
-const crypto = __importStar(require("../crypto"));
-const oid_1 = require("../oid");
-const error_1 = require("./error");
-class TSTInfo {
-    constructor(asn1) {
-        this.root = asn1;
-    }
-    get version() {
-        return this.root.subs[0].toInteger();
-    }
-    get genTime() {
-        return this.root.subs[4].toDate();
-    }
-    get messageImprintHashAlgorithm() {
-        const oid = this.messageImprintObj.subs[0].subs[0].toOID();
-        return oid_1.SHA2_HASH_ALGOS[oid];
-    }
-    get messageImprintHashedMessage() {
-        return this.messageImprintObj.subs[1].value;
-    }
-    get raw() {
-        return this.root.toDER();
-    }
-    verify(data) {
-        const digest = crypto.digest(this.messageImprintHashAlgorithm, data);
-        if (!crypto.bufferEqual(digest, this.messageImprintHashedMessage)) {
-            throw new error_1.RFC3161TimestampVerificationError('message imprint does not match artifact');
-        }
-    }
-    // https://www.rfc-editor.org/rfc/rfc3161#section-2.4.2
-    get messageImprintObj() {
-        return this.root.subs[2];
-    }
-}
-exports.TSTInfo = TSTInfo;
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/stream.js b/node_modules/pacote/node_modules/@sigstore/core/dist/stream.js
deleted file mode 100644
index 0a24f8582eb23..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/core/dist/stream.js
+++ /dev/null
@@ -1,115 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.ByteStream = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-class StreamError extends Error {
-}
-class ByteStream {
-    constructor(buffer) {
-        this.start = 0;
-        if (buffer) {
-            this.buf = buffer;
-            this.view = Buffer.from(buffer);
-        }
-        else {
-            this.buf = new ArrayBuffer(0);
-            this.view = Buffer.from(this.buf);
-        }
-    }
-    get buffer() {
-        return this.view.subarray(0, this.start);
-    }
-    get length() {
-        return this.view.byteLength;
-    }
-    get position() {
-        return this.start;
-    }
-    seek(position) {
-        this.start = position;
-    }
-    // Returns a Buffer containing the specified number of bytes starting at the
-    // given start position.
-    slice(start, len) {
-        const end = start + len;
-        if (end > this.length) {
-            throw new StreamError('request past end of buffer');
-        }
-        return this.view.subarray(start, end);
-    }
-    appendChar(char) {
-        this.ensureCapacity(1);
-        this.view[this.start] = char;
-        this.start += 1;
-    }
-    appendUint16(num) {
-        this.ensureCapacity(2);
-        const value = new Uint16Array([num]);
-        const view = new Uint8Array(value.buffer);
-        this.view[this.start] = view[1];
-        this.view[this.start + 1] = view[0];
-        this.start += 2;
-    }
-    appendUint24(num) {
-        this.ensureCapacity(3);
-        const value = new Uint32Array([num]);
-        const view = new Uint8Array(value.buffer);
-        this.view[this.start] = view[2];
-        this.view[this.start + 1] = view[1];
-        this.view[this.start + 2] = view[0];
-        this.start += 3;
-    }
-    appendView(view) {
-        this.ensureCapacity(view.length);
-        this.view.set(view, this.start);
-        this.start += view.length;
-    }
-    getBlock(size) {
-        if (size <= 0) {
-            return Buffer.alloc(0);
-        }
-        if (this.start + size > this.view.length) {
-            throw new Error('request past end of buffer');
-        }
-        const result = this.view.subarray(this.start, this.start + size);
-        this.start += size;
-        return result;
-    }
-    getUint8() {
-        return this.getBlock(1)[0];
-    }
-    getUint16() {
-        const block = this.getBlock(2);
-        return (block[0] << 8) | block[1];
-    }
-    ensureCapacity(size) {
-        if (this.start + size > this.view.byteLength) {
-            const blockSize = ByteStream.BLOCK_SIZE + (size > ByteStream.BLOCK_SIZE ? size : 0);
-            this.realloc(this.view.byteLength + blockSize);
-        }
-    }
-    realloc(size) {
-        const newArray = new ArrayBuffer(size);
-        const newView = Buffer.from(newArray);
-        // Copy the old buffer into the new one
-        newView.set(this.view);
-        this.buf = newArray;
-        this.view = newView;
-    }
-}
-exports.ByteStream = ByteStream;
-ByteStream.BLOCK_SIZE = 1024;
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/x509/cert.js b/node_modules/pacote/node_modules/@sigstore/core/dist/x509/cert.js
deleted file mode 100644
index 83aee7d1215a4..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/core/dist/x509/cert.js
+++ /dev/null
@@ -1,241 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || (function () {
-    var ownKeys = function(o) {
-        ownKeys = Object.getOwnPropertyNames || function (o) {
-            var ar = [];
-            for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
-            return ar;
-        };
-        return ownKeys(o);
-    };
-    return function (mod) {
-        if (mod && mod.__esModule) return mod;
-        var result = {};
-        if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
-        __setModuleDefault(result, mod);
-        return result;
-    };
-})();
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.X509Certificate = exports.EXTENSION_OID_SCT = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const asn1_1 = require("../asn1");
-const crypto = __importStar(require("../crypto"));
-const oid_1 = require("../oid");
-const pem = __importStar(require("../pem"));
-const ext_1 = require("./ext");
-const EXTENSION_OID_SUBJECT_KEY_ID = '2.5.29.14';
-const EXTENSION_OID_KEY_USAGE = '2.5.29.15';
-const EXTENSION_OID_SUBJECT_ALT_NAME = '2.5.29.17';
-const EXTENSION_OID_BASIC_CONSTRAINTS = '2.5.29.19';
-const EXTENSION_OID_AUTHORITY_KEY_ID = '2.5.29.35';
-exports.EXTENSION_OID_SCT = '1.3.6.1.4.1.11129.2.4.2';
-class X509Certificate {
-    constructor(asn1) {
-        this.root = asn1;
-    }
-    static parse(cert) {
-        const der = typeof cert === 'string' ? pem.toDER(cert) : cert;
-        const asn1 = asn1_1.ASN1Obj.parseBuffer(der);
-        return new X509Certificate(asn1);
-    }
-    get tbsCertificate() {
-        return this.tbsCertificateObj;
-    }
-    get version() {
-        // version number is the first element of the version context specific tag
-        const ver = this.versionObj.subs[0].toInteger();
-        return `v${(ver + BigInt(1)).toString()}`;
-    }
-    get serialNumber() {
-        return this.serialNumberObj.value;
-    }
-    get notBefore() {
-        // notBefore is the first element of the validity sequence
-        return this.validityObj.subs[0].toDate();
-    }
-    get notAfter() {
-        // notAfter is the second element of the validity sequence
-        return this.validityObj.subs[1].toDate();
-    }
-    get issuer() {
-        return this.issuerObj.value;
-    }
-    get subject() {
-        return this.subjectObj.value;
-    }
-    get publicKey() {
-        return this.subjectPublicKeyInfoObj.toDER();
-    }
-    get signatureAlgorithm() {
-        const oid = this.signatureAlgorithmObj.subs[0].toOID();
-        return oid_1.ECDSA_SIGNATURE_ALGOS[oid];
-    }
-    get signatureValue() {
-        // Signature value is a bit string, so we need to skip the first byte
-        return this.signatureValueObj.value.subarray(1);
-    }
-    get subjectAltName() {
-        const ext = this.extSubjectAltName;
-        return ext?.uri || /* istanbul ignore next */ ext?.rfc822Name;
-    }
-    get extensions() {
-        // The extension list is the first (and only) element of the extensions
-        // context specific tag
-        /* istanbul ignore next */
-        const extSeq = this.extensionsObj?.subs[0];
-        /* istanbul ignore next */
-        return extSeq?.subs || [];
-    }
-    get extKeyUsage() {
-        const ext = this.findExtension(EXTENSION_OID_KEY_USAGE);
-        return ext ? new ext_1.X509KeyUsageExtension(ext) : undefined;
-    }
-    get extBasicConstraints() {
-        const ext = this.findExtension(EXTENSION_OID_BASIC_CONSTRAINTS);
-        return ext ? new ext_1.X509BasicConstraintsExtension(ext) : undefined;
-    }
-    get extSubjectAltName() {
-        const ext = this.findExtension(EXTENSION_OID_SUBJECT_ALT_NAME);
-        return ext ? new ext_1.X509SubjectAlternativeNameExtension(ext) : undefined;
-    }
-    get extAuthorityKeyID() {
-        const ext = this.findExtension(EXTENSION_OID_AUTHORITY_KEY_ID);
-        return ext ? new ext_1.X509AuthorityKeyIDExtension(ext) : undefined;
-    }
-    get extSubjectKeyID() {
-        const ext = this.findExtension(EXTENSION_OID_SUBJECT_KEY_ID);
-        return ext
-            ? new ext_1.X509SubjectKeyIDExtension(ext)
-            : /* istanbul ignore next */ undefined;
-    }
-    get extSCT() {
-        const ext = this.findExtension(exports.EXTENSION_OID_SCT);
-        return ext ? new ext_1.X509SCTExtension(ext) : undefined;
-    }
-    get isCA() {
-        const ca = this.extBasicConstraints?.isCA || false;
-        // If the KeyUsage extension is present, keyCertSign must be set
-        /* istanbul ignore else */
-        if (this.extKeyUsage) {
-            return ca && this.extKeyUsage.keyCertSign;
-        }
-        // TODO: test coverage for this case
-        /* istanbul ignore next */
-        return ca;
-    }
-    extension(oid) {
-        const ext = this.findExtension(oid);
-        return ext ? new ext_1.X509Extension(ext) : undefined;
-    }
-    verify(issuerCertificate) {
-        // Use the issuer's public key if provided, otherwise use the subject's
-        const publicKey = issuerCertificate?.publicKey || this.publicKey;
-        const key = crypto.createPublicKey(publicKey);
-        return crypto.verify(this.tbsCertificate.toDER(), key, this.signatureValue, this.signatureAlgorithm);
-    }
-    validForDate(date) {
-        return this.notBefore <= date && date <= this.notAfter;
-    }
-    equals(other) {
-        return this.root.toDER().equals(other.root.toDER());
-    }
-    // Creates a copy of the certificate with a new buffer
-    clone() {
-        const der = this.root.toDER();
-        const clone = Buffer.alloc(der.length);
-        der.copy(clone);
-        return X509Certificate.parse(clone);
-    }
-    findExtension(oid) {
-        // Find the extension with the given OID. The OID will always be the first
-        // element of the extension sequence
-        return this.extensions.find((ext) => ext.subs[0].toOID() === oid);
-    }
-    /////////////////////////////////////////////////////////////////////////////
-    // The following properties use the documented x509 structure to locate the
-    // desired ASN.1 object
-    // https://www.rfc-editor.org/rfc/rfc5280#section-4.1
-    // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.1.1
-    get tbsCertificateObj() {
-        // tbsCertificate is the first element of the certificate sequence
-        return this.root.subs[0];
-    }
-    // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.1.2
-    get signatureAlgorithmObj() {
-        // signatureAlgorithm is the second element of the certificate sequence
-        return this.root.subs[1];
-    }
-    // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.1.3
-    get signatureValueObj() {
-        // signatureValue is the third element of the certificate sequence
-        return this.root.subs[2];
-    }
-    // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.1
-    get versionObj() {
-        // version is the first element of the tbsCertificate sequence
-        return this.tbsCertificateObj.subs[0];
-    }
-    // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.2
-    get serialNumberObj() {
-        // serialNumber is the second element of the tbsCertificate sequence
-        return this.tbsCertificateObj.subs[1];
-    }
-    // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.4
-    get issuerObj() {
-        // issuer is the fourth element of the tbsCertificate sequence
-        return this.tbsCertificateObj.subs[3];
-    }
-    // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.5
-    get validityObj() {
-        // version is the fifth element of the tbsCertificate sequence
-        return this.tbsCertificateObj.subs[4];
-    }
-    // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.6
-    get subjectObj() {
-        // subject is the sixth element of the tbsCertificate sequence
-        return this.tbsCertificateObj.subs[5];
-    }
-    // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.7
-    get subjectPublicKeyInfoObj() {
-        // subjectPublicKeyInfo is the seventh element of the tbsCertificate sequence
-        return this.tbsCertificateObj.subs[6];
-    }
-    // Extensions can't be located by index because their position varies. Instead,
-    // we need to find the extensions context specific tag
-    // https://www.rfc-editor.org/rfc/rfc5280#section-4.1.2.9
-    get extensionsObj() {
-        return this.tbsCertificateObj.subs.find((sub) => sub.tag.isContextSpecific(0x03));
-    }
-}
-exports.X509Certificate = X509Certificate;
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/x509/ext.js b/node_modules/pacote/node_modules/@sigstore/core/dist/x509/ext.js
deleted file mode 100644
index 1d481261b0aa6..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/core/dist/x509/ext.js
+++ /dev/null
@@ -1,145 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.X509SCTExtension = exports.X509SubjectKeyIDExtension = exports.X509AuthorityKeyIDExtension = exports.X509SubjectAlternativeNameExtension = exports.X509KeyUsageExtension = exports.X509BasicConstraintsExtension = exports.X509Extension = void 0;
-const stream_1 = require("../stream");
-const sct_1 = require("./sct");
-// https://www.rfc-editor.org/rfc/rfc5280#section-4.1
-class X509Extension {
-    constructor(asn1) {
-        this.root = asn1;
-    }
-    get oid() {
-        return this.root.subs[0].toOID();
-    }
-    get critical() {
-        // The critical field is optional and will be the second element of the
-        // extension sequence if present. Default to false if not present.
-        return this.root.subs.length === 3 ? this.root.subs[1].toBoolean() : false;
-    }
-    get value() {
-        return this.extnValueObj.value;
-    }
-    get valueObj() {
-        return this.extnValueObj;
-    }
-    get extnValueObj() {
-        // The extnValue field will be the last element of the extension sequence
-        return this.root.subs[this.root.subs.length - 1];
-    }
-}
-exports.X509Extension = X509Extension;
-// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.9
-class X509BasicConstraintsExtension extends X509Extension {
-    get isCA() {
-        return this.sequence.subs[0]?.toBoolean() ?? false;
-    }
-    get pathLenConstraint() {
-        return this.sequence.subs.length > 1
-            ? this.sequence.subs[1].toInteger()
-            : undefined;
-    }
-    // The extnValue field contains a single sequence wrapping the isCA and
-    // pathLenConstraint.
-    get sequence() {
-        return this.extnValueObj.subs[0];
-    }
-}
-exports.X509BasicConstraintsExtension = X509BasicConstraintsExtension;
-// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.3
-class X509KeyUsageExtension extends X509Extension {
-    get digitalSignature() {
-        return this.bitString[0] === 1;
-    }
-    get keyCertSign() {
-        return this.bitString[5] === 1;
-    }
-    get crlSign() {
-        return this.bitString[6] === 1;
-    }
-    // The extnValue field contains a single bit string which is a bit mask
-    // indicating which key usages are enabled.
-    get bitString() {
-        return this.extnValueObj.subs[0].toBitString();
-    }
-}
-exports.X509KeyUsageExtension = X509KeyUsageExtension;
-// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.6
-class X509SubjectAlternativeNameExtension extends X509Extension {
-    get rfc822Name() {
-        return this.findGeneralName(0x01)?.value.toString('ascii');
-    }
-    get uri() {
-        return this.findGeneralName(0x06)?.value.toString('ascii');
-    }
-    // Retrieve the value of an otherName with the given OID.
-    otherName(oid) {
-        const otherName = this.findGeneralName(0x00);
-        if (otherName === undefined) {
-            return undefined;
-        }
-        // The otherName is a sequence containing an OID and a value.
-        // Need to check that the OID matches the one we're looking for.
-        const otherNameOID = otherName.subs[0].toOID();
-        if (otherNameOID !== oid) {
-            return undefined;
-        }
-        // The otherNameValue is a sequence containing the actual value.
-        const otherNameValue = otherName.subs[1];
-        return otherNameValue.subs[0].value.toString('ascii');
-    }
-    findGeneralName(tag) {
-        return this.generalNames.find((gn) => gn.tag.isContextSpecific(tag));
-    }
-    // The extnValue field contains a sequence of GeneralNames.
-    get generalNames() {
-        return this.extnValueObj.subs[0].subs;
-    }
-}
-exports.X509SubjectAlternativeNameExtension = X509SubjectAlternativeNameExtension;
-// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.1
-class X509AuthorityKeyIDExtension extends X509Extension {
-    get keyIdentifier() {
-        return this.findSequenceMember(0x00)?.value;
-    }
-    findSequenceMember(tag) {
-        return this.sequence.subs.find((el) => el.tag.isContextSpecific(tag));
-    }
-    // The extnValue field contains a single sequence wrapping the keyIdentifier
-    get sequence() {
-        return this.extnValueObj.subs[0];
-    }
-}
-exports.X509AuthorityKeyIDExtension = X509AuthorityKeyIDExtension;
-// https://www.rfc-editor.org/rfc/rfc5280#section-4.2.1.2
-class X509SubjectKeyIDExtension extends X509Extension {
-    get keyIdentifier() {
-        return this.extnValueObj.subs[0].value;
-    }
-}
-exports.X509SubjectKeyIDExtension = X509SubjectKeyIDExtension;
-// https://www.rfc-editor.org/rfc/rfc6962#section-3.3
-class X509SCTExtension extends X509Extension {
-    constructor(asn1) {
-        super(asn1);
-    }
-    get signedCertificateTimestamps() {
-        const buf = this.extnValueObj.subs[0].value;
-        const stream = new stream_1.ByteStream(buf);
-        // The overall list length is encoded in the first two bytes -- note this
-        // is the length of the list in bytes, NOT the number of SCTs in the list
-        const end = stream.getUint16() + 2;
-        const sctList = [];
-        while (stream.position < end) {
-            // Read the length of the next SCT
-            const sctLength = stream.getUint16();
-            // Slice out the bytes for the next SCT and parse it
-            const sct = stream.getBlock(sctLength);
-            sctList.push(sct_1.SignedCertificateTimestamp.parse(sct));
-        }
-        if (stream.position !== end) {
-            throw new Error('SCT list length does not match actual length');
-        }
-        return sctList;
-    }
-}
-exports.X509SCTExtension = X509SCTExtension;
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/x509/index.js b/node_modules/pacote/node_modules/@sigstore/core/dist/x509/index.js
deleted file mode 100644
index cdd77e58f37d5..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/core/dist/x509/index.js
+++ /dev/null
@@ -1,23 +0,0 @@
-"use strict";
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.X509SCTExtension = exports.X509Certificate = exports.EXTENSION_OID_SCT = void 0;
-var cert_1 = require("./cert");
-Object.defineProperty(exports, "EXTENSION_OID_SCT", { enumerable: true, get: function () { return cert_1.EXTENSION_OID_SCT; } });
-Object.defineProperty(exports, "X509Certificate", { enumerable: true, get: function () { return cert_1.X509Certificate; } });
-var ext_1 = require("./ext");
-Object.defineProperty(exports, "X509SCTExtension", { enumerable: true, get: function () { return ext_1.X509SCTExtension; } });
diff --git a/node_modules/pacote/node_modules/@sigstore/core/dist/x509/sct.js b/node_modules/pacote/node_modules/@sigstore/core/dist/x509/sct.js
deleted file mode 100644
index 55885e3b30742..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/core/dist/x509/sct.js
+++ /dev/null
@@ -1,151 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || (function () {
-    var ownKeys = function(o) {
-        ownKeys = Object.getOwnPropertyNames || function (o) {
-            var ar = [];
-            for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
-            return ar;
-        };
-        return ownKeys(o);
-    };
-    return function (mod) {
-        if (mod && mod.__esModule) return mod;
-        var result = {};
-        if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
-        __setModuleDefault(result, mod);
-        return result;
-    };
-})();
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.SignedCertificateTimestamp = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const crypto = __importStar(require("../crypto"));
-const stream_1 = require("../stream");
-class SignedCertificateTimestamp {
-    constructor(options) {
-        this.version = options.version;
-        this.logID = options.logID;
-        this.timestamp = options.timestamp;
-        this.extensions = options.extensions;
-        this.hashAlgorithm = options.hashAlgorithm;
-        this.signatureAlgorithm = options.signatureAlgorithm;
-        this.signature = options.signature;
-    }
-    get datetime() {
-        return new Date(Number(this.timestamp.readBigInt64BE()));
-    }
-    // Returns the hash algorithm used to generate the SCT's signature.
-    // https://www.rfc-editor.org/rfc/rfc5246#section-7.4.1.4.1
-    get algorithm() {
-        switch (this.hashAlgorithm) {
-            /* istanbul ignore next */
-            case 0:
-                return 'none';
-            /* istanbul ignore next */
-            case 1:
-                return 'md5';
-            /* istanbul ignore next */
-            case 2:
-                return 'sha1';
-            /* istanbul ignore next */
-            case 3:
-                return 'sha224';
-            case 4:
-                return 'sha256';
-            /* istanbul ignore next */
-            case 5:
-                return 'sha384';
-            /* istanbul ignore next */
-            case 6:
-                return 'sha512';
-            /* istanbul ignore next */
-            default:
-                return 'unknown';
-        }
-    }
-    verify(preCert, key) {
-        // Assemble the digitally-signed struct (the data over which the signature
-        // was generated).
-        // https://www.rfc-editor.org/rfc/rfc6962#section-3.2
-        const stream = new stream_1.ByteStream();
-        stream.appendChar(this.version);
-        stream.appendChar(0x00); // SignatureType = certificate_timestamp(0)
-        stream.appendView(this.timestamp);
-        stream.appendUint16(0x01); // LogEntryType = precert_entry(1)
-        stream.appendView(preCert);
-        stream.appendUint16(this.extensions.byteLength);
-        /* istanbul ignore next - extensions are very uncommon */
-        if (this.extensions.byteLength > 0) {
-            stream.appendView(this.extensions);
-        }
-        return crypto.verify(stream.buffer, key, this.signature, this.algorithm);
-    }
-    // Parses a SignedCertificateTimestamp from a buffer. SCTs are encoded using
-    // TLS encoding which means the fields and lengths of most fields are
-    // specified as part of the SCT and TLS specs.
-    // https://www.rfc-editor.org/rfc/rfc6962#section-3.2
-    // https://www.rfc-editor.org/rfc/rfc5246#section-7.4.1.4.1
-    static parse(buf) {
-        const stream = new stream_1.ByteStream(buf);
-        // Version - enum { v1(0), (255) }
-        const version = stream.getUint8();
-        // Log ID  - struct { opaque key_id[32]; }
-        const logID = stream.getBlock(32);
-        // Timestamp - uint64
-        const timestamp = stream.getBlock(8);
-        // Extensions - opaque extensions<0..2^16-1>;
-        const extenstionLength = stream.getUint16();
-        const extensions = stream.getBlock(extenstionLength);
-        // Hash algo - enum { sha256(4), . . . (255) }
-        const hashAlgorithm = stream.getUint8();
-        // Signature algo - enum { anonymous(0), rsa(1), dsa(2), ecdsa(3), (255) }
-        const signatureAlgorithm = stream.getUint8();
-        // Signature  - opaque signature<0..2^16-1>;
-        const sigLength = stream.getUint16();
-        const signature = stream.getBlock(sigLength);
-        // Check that we read the entire buffer
-        if (stream.position !== buf.length) {
-            throw new Error('SCT buffer length mismatch');
-        }
-        return new SignedCertificateTimestamp({
-            version,
-            logID,
-            timestamp,
-            extensions,
-            hashAlgorithm,
-            signatureAlgorithm,
-            signature,
-        });
-    }
-}
-exports.SignedCertificateTimestamp = SignedCertificateTimestamp;
diff --git a/node_modules/pacote/node_modules/@sigstore/core/package.json b/node_modules/pacote/node_modules/@sigstore/core/package.json
deleted file mode 100644
index 7d2f8d5de3f7a..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/core/package.json
+++ /dev/null
@@ -1,31 +0,0 @@
-{
-  "name": "@sigstore/core",
-  "version": "3.0.0",
-  "description": "Base library for Sigstore",
-  "main": "dist/index.js",
-  "types": "dist/index.d.ts",
-  "scripts": {
-    "clean": "shx rm -rf dist *.tsbuildinfo",
-    "build": "tsc --build",
-    "test": "jest"
-  },
-  "files": [
-    "dist"
-  ],
-  "author": "bdehamer@github.com",
-  "license": "Apache-2.0",
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/sigstore/sigstore-js.git"
-  },
-  "bugs": {
-    "url": "https://github.com/sigstore/sigstore-js/issues"
-  },
-  "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/core#readme",
-  "publishConfig": {
-    "provenance": true
-  },
-  "engines": {
-    "node": "^20.17.0 || >=22.9.0"
-  }
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/base.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/base.js
deleted file mode 100644
index 61d5eba4568a3..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/base.js
+++ /dev/null
@@ -1,50 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.BaseBundleBuilder = void 0;
-// BaseBundleBuilder is a base class for BundleBuilder implementations. It
-// provides a the basic wokflow for signing and witnessing an artifact.
-// Subclasses must implement the `package` method to assemble a valid bundle
-// with the generated signature and verification material.
-class BaseBundleBuilder {
-    constructor(options) {
-        this.signer = options.signer;
-        this.witnesses = options.witnesses;
-    }
-    // Executes the signing/witnessing process for the given artifact.
-    async create(artifact) {
-        const signature = await this.prepare(artifact).then((blob) => this.signer.sign(blob));
-        const bundle = await this.package(artifact, signature);
-        // Invoke all of the witnesses in parallel
-        const verificationMaterials = await Promise.all(this.witnesses.map((witness) => witness.testify(bundle.content, publicKey(signature.key))));
-        // Collect the verification material from all of the witnesses
-        const tlogEntryList = [];
-        const timestampList = [];
-        verificationMaterials.forEach(({ tlogEntries, rfc3161Timestamps }) => {
-            tlogEntryList.push(...(tlogEntries ?? []));
-            timestampList.push(...(rfc3161Timestamps ?? []));
-        });
-        // Merge the collected verification material into the bundle
-        bundle.verificationMaterial.tlogEntries = tlogEntryList;
-        bundle.verificationMaterial.timestampVerificationData = {
-            rfc3161Timestamps: timestampList,
-        };
-        return bundle;
-    }
-    // Override this function to apply any pre-signing transformations to the
-    // artifact. The returned buffer will be signed by the signer. The default
-    // implementation simply returns the artifact data.
-    async prepare(artifact) {
-        return artifact.data;
-    }
-}
-exports.BaseBundleBuilder = BaseBundleBuilder;
-// Extracts the public key from a KeyMaterial. Returns either the public key
-// or the certificate, depending on the type of key material.
-function publicKey(key) {
-    switch (key.$case) {
-        case 'publicKey':
-            return key.publicKey;
-        case 'x509Certificate':
-            return key.certificate;
-    }
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/bundle.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/bundle.js
deleted file mode 100644
index 34b1d12f2b44c..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/bundle.js
+++ /dev/null
@@ -1,81 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || (function () {
-    var ownKeys = function(o) {
-        ownKeys = Object.getOwnPropertyNames || function (o) {
-            var ar = [];
-            for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
-            return ar;
-        };
-        return ownKeys(o);
-    };
-    return function (mod) {
-        if (mod && mod.__esModule) return mod;
-        var result = {};
-        if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
-        __setModuleDefault(result, mod);
-        return result;
-    };
-})();
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.toMessageSignatureBundle = toMessageSignatureBundle;
-exports.toDSSEBundle = toDSSEBundle;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const sigstore = __importStar(require("@sigstore/bundle"));
-const util_1 = require("../util");
-// Helper functions for assembling the parts of a Sigstore bundle
-// Message signature bundle - $case: 'messageSignature'
-function toMessageSignatureBundle(artifact, signature) {
-    const digest = util_1.crypto.digest('sha256', artifact.data);
-    return sigstore.toMessageSignatureBundle({
-        digest,
-        signature: signature.signature,
-        certificate: signature.key.$case === 'x509Certificate'
-            ? util_1.pem.toDER(signature.key.certificate)
-            : undefined,
-        keyHint: signature.key.$case === 'publicKey' ? signature.key.hint : undefined,
-        certificateChain: true,
-    });
-}
-// DSSE envelope bundle - $case: 'dsseEnvelope'
-function toDSSEBundle(artifact, signature, certificateChain) {
-    return sigstore.toDSSEBundle({
-        artifact: artifact.data,
-        artifactType: artifact.type,
-        signature: signature.signature,
-        certificate: signature.key.$case === 'x509Certificate'
-            ? util_1.pem.toDER(signature.key.certificate)
-            : undefined,
-        keyHint: signature.key.$case === 'publicKey' ? signature.key.hint : undefined,
-        certificateChain,
-    });
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/dsse.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/dsse.js
deleted file mode 100644
index 86046ba8f3013..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/dsse.js
+++ /dev/null
@@ -1,46 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.DSSEBundleBuilder = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const util_1 = require("../util");
-const base_1 = require("./base");
-const bundle_1 = require("./bundle");
-// BundleBuilder implementation for DSSE wrapped attestations
-class DSSEBundleBuilder extends base_1.BaseBundleBuilder {
-    constructor(options) {
-        super(options);
-        this.certificateChain = options.certificateChain ?? false;
-    }
-    // DSSE requires the artifact to be pre-encoded with the payload type
-    // before the signature is generated.
-    async prepare(artifact) {
-        const a = artifactDefaults(artifact);
-        return util_1.dsse.preAuthEncoding(a.type, a.data);
-    }
-    // Packages the artifact and signature into a DSSE bundle
-    async package(artifact, signature) {
-        return (0, bundle_1.toDSSEBundle)(artifactDefaults(artifact), signature, this.certificateChain);
-    }
-}
-exports.DSSEBundleBuilder = DSSEBundleBuilder;
-// Defaults the artifact type to an empty string if not provided
-function artifactDefaults(artifact) {
-    return {
-        ...artifact,
-        type: artifact.type ?? '',
-    };
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/index.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/index.js
deleted file mode 100644
index d67c8c324a4f0..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/index.js
+++ /dev/null
@@ -1,7 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.MessageSignatureBundleBuilder = exports.DSSEBundleBuilder = void 0;
-var dsse_1 = require("./dsse");
-Object.defineProperty(exports, "DSSEBundleBuilder", { enumerable: true, get: function () { return dsse_1.DSSEBundleBuilder; } });
-var message_1 = require("./message");
-Object.defineProperty(exports, "MessageSignatureBundleBuilder", { enumerable: true, get: function () { return message_1.MessageSignatureBundleBuilder; } });
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/message.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/message.js
deleted file mode 100644
index e3991f42bab93..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/sign/dist/bundler/message.js
+++ /dev/null
@@ -1,30 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.MessageSignatureBundleBuilder = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const base_1 = require("./base");
-const bundle_1 = require("./bundle");
-// BundleBuilder implementation for raw message signatures
-class MessageSignatureBundleBuilder extends base_1.BaseBundleBuilder {
-    constructor(options) {
-        super(options);
-    }
-    async package(artifact, signature) {
-        return (0, bundle_1.toMessageSignatureBundle)(artifact, signature);
-    }
-}
-exports.MessageSignatureBundleBuilder = MessageSignatureBundleBuilder;
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/error.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/error.js
deleted file mode 100644
index d28f1913cc77e..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/sign/dist/error.js
+++ /dev/null
@@ -1,39 +0,0 @@
-"use strict";
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.InternalError = void 0;
-exports.internalError = internalError;
-const error_1 = require("./external/error");
-class InternalError extends Error {
-    constructor({ code, message, cause, }) {
-        super(message);
-        this.name = this.constructor.name;
-        this.cause = cause;
-        this.code = code;
-    }
-}
-exports.InternalError = InternalError;
-function internalError(err, code, message) {
-    if (err instanceof error_1.HTTPError) {
-        message += ` - ${err.message}`;
-    }
-    throw new InternalError({
-        code: code,
-        message: message,
-        cause: err,
-    });
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/external/error.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/external/error.js
deleted file mode 100644
index a6a65adebb176..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/sign/dist/external/error.js
+++ /dev/null
@@ -1,26 +0,0 @@
-"use strict";
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.HTTPError = void 0;
-class HTTPError extends Error {
-    constructor({ status, message, location, }) {
-        super(`(${status}) ${message}`);
-        this.statusCode = status;
-        this.location = location;
-    }
-}
-exports.HTTPError = HTTPError;
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/external/fetch.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/external/fetch.js
deleted file mode 100644
index 116090f3c641e..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/sign/dist/external/fetch.js
+++ /dev/null
@@ -1,98 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.fetchWithRetry = fetchWithRetry;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const http2_1 = require("http2");
-const make_fetch_happen_1 = __importDefault(require("make-fetch-happen"));
-const proc_log_1 = require("proc-log");
-const promise_retry_1 = __importDefault(require("promise-retry"));
-const util_1 = require("../util");
-const error_1 = require("./error");
-const { HTTP2_HEADER_LOCATION, HTTP2_HEADER_CONTENT_TYPE, HTTP2_HEADER_USER_AGENT, HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_TOO_MANY_REQUESTS, HTTP_STATUS_REQUEST_TIMEOUT, } = http2_1.constants;
-async function fetchWithRetry(url, options) {
-    return (0, promise_retry_1.default)(async (retry, attemptNum) => {
-        const method = options.method || 'POST';
-        const headers = {
-            [HTTP2_HEADER_USER_AGENT]: util_1.ua.getUserAgent(),
-            ...options.headers,
-        };
-        const response = await (0, make_fetch_happen_1.default)(url, {
-            method,
-            headers,
-            body: options.body,
-            timeout: options.timeout,
-            retry: false, // We're handling retries ourselves
-        }).catch((reason) => {
-            proc_log_1.log.http('fetch', `${method} ${url} attempt ${attemptNum} failed with ${reason}`);
-            return retry(reason);
-        });
-        if (response.ok) {
-            return response;
-        }
-        else {
-            const error = await errorFromResponse(response);
-            proc_log_1.log.http('fetch', `${method} ${url} attempt ${attemptNum} failed with ${response.status}`);
-            if (retryable(response.status)) {
-                return retry(error);
-            }
-            else {
-                throw error;
-            }
-        }
-    }, retryOpts(options.retry));
-}
-// Translate a Response into an HTTPError instance. This will attempt to parse
-// the response body for a message, but will default to the statusText if none
-// is found.
-const errorFromResponse = async (response) => {
-    let message = response.statusText;
-    const location = response.headers.get(HTTP2_HEADER_LOCATION) || undefined;
-    const contentType = response.headers.get(HTTP2_HEADER_CONTENT_TYPE);
-    // If response type is JSON, try to parse the body for a message
-    if (contentType?.includes('application/json')) {
-        try {
-            const body = await response.json();
-            message = body.message || message;
-        }
-        catch (e) {
-            // ignore
-        }
-    }
-    return new error_1.HTTPError({
-        status: response.status,
-        message: message,
-        location: location,
-    });
-};
-// Determine if a status code is retryable. This includes 5xx errors, 408, and
-// 429.
-const retryable = (status) => [HTTP_STATUS_REQUEST_TIMEOUT, HTTP_STATUS_TOO_MANY_REQUESTS].includes(status) || status >= HTTP_STATUS_INTERNAL_SERVER_ERROR;
-// Normalize the retry options to the format expected by promise-retry
-const retryOpts = (retry) => {
-    if (typeof retry === 'boolean') {
-        return { retries: retry ? 1 : 0 };
-    }
-    else if (typeof retry === 'number') {
-        return { retries: retry };
-    }
-    else {
-        return { retries: 0, ...retry };
-    }
-};
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/external/fulcio.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/external/fulcio.js
deleted file mode 100644
index de6a1ad9f9e79..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/sign/dist/external/fulcio.js
+++ /dev/null
@@ -1,41 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Fulcio = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const fetch_1 = require("./fetch");
-/**
- * Fulcio API client.
- */
-class Fulcio {
-    constructor(options) {
-        this.options = options;
-    }
-    async createSigningCertificate(request) {
-        const { baseURL, retry, timeout } = this.options;
-        const url = `${baseURL}/api/v2/signingCert`;
-        const response = await (0, fetch_1.fetchWithRetry)(url, {
-            headers: {
-                'Content-Type': 'application/json',
-            },
-            body: JSON.stringify(request),
-            timeout,
-            retry,
-        });
-        return response.json();
-    }
-}
-exports.Fulcio = Fulcio;
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/external/rekor.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/external/rekor.js
deleted file mode 100644
index bb59a126e032f..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/sign/dist/external/rekor.js
+++ /dev/null
@@ -1,80 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Rekor = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const fetch_1 = require("./fetch");
-/**
- * Rekor API client.
- */
-class Rekor {
-    constructor(options) {
-        this.options = options;
-    }
-    /**
-     * Create a new entry in the Rekor log.
-     * @param propsedEntry {ProposedEntry} Data to create a new entry
-     * @returns {Promise} The created entry
-     */
-    async createEntry(propsedEntry) {
-        const { baseURL, timeout, retry } = this.options;
-        const url = `${baseURL}/api/v1/log/entries`;
-        const response = await (0, fetch_1.fetchWithRetry)(url, {
-            headers: {
-                'Content-Type': 'application/json',
-                Accept: 'application/json',
-            },
-            body: JSON.stringify(propsedEntry),
-            timeout,
-            retry,
-        });
-        const data = await response.json();
-        return entryFromResponse(data);
-    }
-    /**
-     * Get an entry from the Rekor log.
-     * @param uuid {string} The UUID of the entry to retrieve
-     * @returns {Promise} The retrieved entry
-     */
-    async getEntry(uuid) {
-        const { baseURL, timeout, retry } = this.options;
-        const url = `${baseURL}/api/v1/log/entries/${uuid}`;
-        const response = await (0, fetch_1.fetchWithRetry)(url, {
-            method: 'GET',
-            headers: {
-                Accept: 'application/json',
-            },
-            timeout,
-            retry,
-        });
-        const data = await response.json();
-        return entryFromResponse(data);
-    }
-}
-exports.Rekor = Rekor;
-// Unpack the response from the Rekor API into a more convenient format.
-function entryFromResponse(data) {
-    const entries = Object.entries(data);
-    if (entries.length != 1) {
-        throw new Error('Received multiple entries in Rekor response');
-    }
-    // Grab UUID and entry data from the response
-    const [uuid, entry] = entries[0];
-    return {
-        ...entry,
-        uuid,
-    };
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/external/tsa.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/external/tsa.js
deleted file mode 100644
index a948ba9cca2c7..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/sign/dist/external/tsa.js
+++ /dev/null
@@ -1,38 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TimestampAuthority = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const fetch_1 = require("./fetch");
-class TimestampAuthority {
-    constructor(options) {
-        this.options = options;
-    }
-    async createTimestamp(request) {
-        const { baseURL, timeout, retry } = this.options;
-        const url = `${baseURL}/api/v1/timestamp`;
-        const response = await (0, fetch_1.fetchWithRetry)(url, {
-            headers: {
-                'Content-Type': 'application/json',
-            },
-            body: JSON.stringify(request),
-            timeout,
-            retry,
-        });
-        return response.buffer();
-    }
-}
-exports.TimestampAuthority = TimestampAuthority;
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/identity/ci.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/identity/ci.js
deleted file mode 100644
index d79133952b605..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/sign/dist/identity/ci.js
+++ /dev/null
@@ -1,73 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.CIContextProvider = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const make_fetch_happen_1 = __importDefault(require("make-fetch-happen"));
-// Collection of all the CI-specific providers we have implemented
-const providers = [getGHAToken, getEnv];
-/**
- * CIContextProvider is a composite identity provider which will iterate
- * over all of the CI-specific providers and return the token from the first
- * one that resolves.
- */
-class CIContextProvider {
-    /* istanbul ignore next */
-    constructor(audience = 'sigstore') {
-        this.audience = audience;
-    }
-    // Invoke all registered ProviderFuncs and return the value of whichever one
-    // resolves first.
-    async getToken() {
-        return Promise.any(providers.map((getToken) => getToken(this.audience))).catch(() => Promise.reject('CI: no tokens available'));
-    }
-}
-exports.CIContextProvider = CIContextProvider;
-/**
- * getGHAToken can retrieve an OIDC token when running in a GitHub Actions
- * workflow
- */
-async function getGHAToken(audience) {
-    // Check to see if we're running in GitHub Actions
-    if (!process.env.ACTIONS_ID_TOKEN_REQUEST_URL ||
-        !process.env.ACTIONS_ID_TOKEN_REQUEST_TOKEN) {
-        return Promise.reject('no token available');
-    }
-    // Construct URL to request token w/ appropriate audience
-    const url = new URL(process.env.ACTIONS_ID_TOKEN_REQUEST_URL);
-    url.searchParams.append('audience', audience);
-    const response = await (0, make_fetch_happen_1.default)(url.href, {
-        retry: 2,
-        headers: {
-            Accept: 'application/json',
-            Authorization: `Bearer ${process.env.ACTIONS_ID_TOKEN_REQUEST_TOKEN}`,
-        },
-    });
-    return response.json().then((data) => data.value);
-}
-/**
- * getEnv can retrieve an OIDC token from an environment variable.
- * This matches the behavior of https://github.com/sigstore/cosign/tree/main/pkg/providers/envvar
- */
-async function getEnv() {
-    if (!process.env.SIGSTORE_ID_TOKEN) {
-        return Promise.reject('no token available');
-    }
-    return process.env.SIGSTORE_ID_TOKEN;
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/identity/index.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/identity/index.js
deleted file mode 100644
index 1c1223b443fab..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/sign/dist/identity/index.js
+++ /dev/null
@@ -1,20 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.CIContextProvider = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-var ci_1 = require("./ci");
-Object.defineProperty(exports, "CIContextProvider", { enumerable: true, get: function () { return ci_1.CIContextProvider; } });
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/identity/provider.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/identity/provider.js
deleted file mode 100644
index c8ad2e549bdc6..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/sign/dist/identity/provider.js
+++ /dev/null
@@ -1,2 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/index.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/index.js
deleted file mode 100644
index 383b76083361b..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/sign/dist/index.js
+++ /dev/null
@@ -1,17 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TSAWitness = exports.RekorWitness = exports.DEFAULT_REKOR_URL = exports.FulcioSigner = exports.DEFAULT_FULCIO_URL = exports.CIContextProvider = exports.InternalError = exports.MessageSignatureBundleBuilder = exports.DSSEBundleBuilder = void 0;
-var bundler_1 = require("./bundler");
-Object.defineProperty(exports, "DSSEBundleBuilder", { enumerable: true, get: function () { return bundler_1.DSSEBundleBuilder; } });
-Object.defineProperty(exports, "MessageSignatureBundleBuilder", { enumerable: true, get: function () { return bundler_1.MessageSignatureBundleBuilder; } });
-var error_1 = require("./error");
-Object.defineProperty(exports, "InternalError", { enumerable: true, get: function () { return error_1.InternalError; } });
-var identity_1 = require("./identity");
-Object.defineProperty(exports, "CIContextProvider", { enumerable: true, get: function () { return identity_1.CIContextProvider; } });
-var signer_1 = require("./signer");
-Object.defineProperty(exports, "DEFAULT_FULCIO_URL", { enumerable: true, get: function () { return signer_1.DEFAULT_FULCIO_URL; } });
-Object.defineProperty(exports, "FulcioSigner", { enumerable: true, get: function () { return signer_1.FulcioSigner; } });
-var witness_1 = require("./witness");
-Object.defineProperty(exports, "DEFAULT_REKOR_URL", { enumerable: true, get: function () { return witness_1.DEFAULT_REKOR_URL; } });
-Object.defineProperty(exports, "RekorWitness", { enumerable: true, get: function () { return witness_1.RekorWitness; } });
-Object.defineProperty(exports, "TSAWitness", { enumerable: true, get: function () { return witness_1.TSAWitness; } });
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js
deleted file mode 100644
index f01703cfab564..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/ca.js
+++ /dev/null
@@ -1,59 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.CAClient = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const error_1 = require("../../error");
-const fulcio_1 = require("../../external/fulcio");
-class CAClient {
-    constructor(options) {
-        this.fulcio = new fulcio_1.Fulcio({
-            baseURL: options.fulcioBaseURL,
-            retry: options.retry,
-            timeout: options.timeout,
-        });
-    }
-    async createSigningCertificate(identityToken, publicKey, challenge) {
-        const request = toCertificateRequest(identityToken, publicKey, challenge);
-        try {
-            const resp = await this.fulcio.createSigningCertificate(request);
-            // Account for the fact that the response may contain either a
-            // signedCertificateEmbeddedSct or a signedCertificateDetachedSct.
-            const cert = resp.signedCertificateEmbeddedSct
-                ? resp.signedCertificateEmbeddedSct
-                : resp.signedCertificateDetachedSct;
-            return cert.chain.certificates;
-        }
-        catch (err) {
-            (0, error_1.internalError)(err, 'CA_CREATE_SIGNING_CERTIFICATE_ERROR', 'error creating signing certificate');
-        }
-    }
-}
-exports.CAClient = CAClient;
-function toCertificateRequest(identityToken, publicKey, challenge) {
-    return {
-        credentials: {
-            oidcIdentityToken: identityToken,
-        },
-        publicKeyRequest: {
-            publicKey: {
-                algorithm: 'ECDSA',
-                content: publicKey,
-            },
-            proofOfPossession: challenge.toString('base64'),
-        },
-    };
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js
deleted file mode 100644
index 481aa5c3579a2..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/ephemeral.js
+++ /dev/null
@@ -1,45 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.EphemeralSigner = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const crypto_1 = __importDefault(require("crypto"));
-const EC_KEYPAIR_TYPE = 'ec';
-const P256_CURVE = 'P-256';
-// Signer implementation which uses an ephemeral keypair to sign artifacts.
-// The private key lives only in memory and is tied to the lifetime of the
-// EphemeralSigner instance.
-class EphemeralSigner {
-    constructor() {
-        this.keypair = crypto_1.default.generateKeyPairSync(EC_KEYPAIR_TYPE, {
-            namedCurve: P256_CURVE,
-        });
-    }
-    async sign(data) {
-        const signature = crypto_1.default.sign(null, data, this.keypair.privateKey);
-        const publicKey = this.keypair.publicKey
-            .export({ format: 'pem', type: 'spki' })
-            .toString('ascii');
-        return {
-            signature: signature,
-            key: { $case: 'publicKey', publicKey },
-        };
-    }
-}
-exports.EphemeralSigner = EphemeralSigner;
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/index.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/index.js
deleted file mode 100644
index 89a432548d2b4..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/fulcio/index.js
+++ /dev/null
@@ -1,87 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.FulcioSigner = exports.DEFAULT_FULCIO_URL = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const error_1 = require("../../error");
-const util_1 = require("../../util");
-const ca_1 = require("./ca");
-const ephemeral_1 = require("./ephemeral");
-exports.DEFAULT_FULCIO_URL = 'https://fulcio.sigstore.dev';
-// Signer implementation which can be used to decorate another signer
-// with a Fulcio-issued signing certificate for the signer's public key.
-// Must be instantiated with an identity provider which can provide a JWT
-// which represents the identity to be bound to the signing certificate.
-class FulcioSigner {
-    constructor(options) {
-        this.ca = new ca_1.CAClient({
-            ...options,
-            fulcioBaseURL: options.fulcioBaseURL || /* istanbul ignore next */ exports.DEFAULT_FULCIO_URL,
-        });
-        this.identityProvider = options.identityProvider;
-        this.keyHolder = options.keyHolder || new ephemeral_1.EphemeralSigner();
-    }
-    async sign(data) {
-        // Retrieve identity token from the supplied identity provider
-        const identityToken = await this.getIdentityToken();
-        // Extract challenge claim from OIDC token
-        let subject;
-        try {
-            subject = util_1.oidc.extractJWTSubject(identityToken);
-        }
-        catch (err) {
-            throw new error_1.InternalError({
-                code: 'IDENTITY_TOKEN_PARSE_ERROR',
-                message: `invalid identity token: ${identityToken}`,
-                cause: err,
-            });
-        }
-        // Construct challenge value by signing the subject claim
-        const challenge = await this.keyHolder.sign(Buffer.from(subject));
-        if (challenge.key.$case !== 'publicKey') {
-            throw new error_1.InternalError({
-                code: 'CA_CREATE_SIGNING_CERTIFICATE_ERROR',
-                message: 'unexpected format for signing key',
-            });
-        }
-        // Create signing certificate
-        const certificates = await this.ca.createSigningCertificate(identityToken, challenge.key.publicKey, challenge.signature);
-        // Generate artifact signature
-        const signature = await this.keyHolder.sign(data);
-        // Specifically returning only the first certificate in the chain
-        // as the key.
-        return {
-            signature: signature.signature,
-            key: {
-                $case: 'x509Certificate',
-                certificate: certificates[0],
-            },
-        };
-    }
-    async getIdentityToken() {
-        try {
-            return await this.identityProvider.getToken();
-        }
-        catch (err) {
-            throw new error_1.InternalError({
-                code: 'IDENTITY_TOKEN_READ_ERROR',
-                message: 'error retrieving identity token',
-                cause: err,
-            });
-        }
-    }
-}
-exports.FulcioSigner = FulcioSigner;
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/index.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/index.js
deleted file mode 100644
index e2087767b81c1..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/index.js
+++ /dev/null
@@ -1,22 +0,0 @@
-"use strict";
-/* istanbul ignore file */
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.FulcioSigner = exports.DEFAULT_FULCIO_URL = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-var fulcio_1 = require("./fulcio");
-Object.defineProperty(exports, "DEFAULT_FULCIO_URL", { enumerable: true, get: function () { return fulcio_1.DEFAULT_FULCIO_URL; } });
-Object.defineProperty(exports, "FulcioSigner", { enumerable: true, get: function () { return fulcio_1.FulcioSigner; } });
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/signer.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/signer.js
deleted file mode 100644
index b92c54183375d..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/sign/dist/signer/signer.js
+++ /dev/null
@@ -1,17 +0,0 @@
-"use strict";
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/types/fetch.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/types/fetch.js
deleted file mode 100644
index c8ad2e549bdc6..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/sign/dist/types/fetch.js
+++ /dev/null
@@ -1,2 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/util/index.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/util/index.js
deleted file mode 100644
index 436630cfbbf19..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/sign/dist/util/index.js
+++ /dev/null
@@ -1,59 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || (function () {
-    var ownKeys = function(o) {
-        ownKeys = Object.getOwnPropertyNames || function (o) {
-            var ar = [];
-            for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
-            return ar;
-        };
-        return ownKeys(o);
-    };
-    return function (mod) {
-        if (mod && mod.__esModule) return mod;
-        var result = {};
-        if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
-        __setModuleDefault(result, mod);
-        return result;
-    };
-})();
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.ua = exports.oidc = exports.pem = exports.json = exports.encoding = exports.dsse = exports.crypto = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-var core_1 = require("@sigstore/core");
-Object.defineProperty(exports, "crypto", { enumerable: true, get: function () { return core_1.crypto; } });
-Object.defineProperty(exports, "dsse", { enumerable: true, get: function () { return core_1.dsse; } });
-Object.defineProperty(exports, "encoding", { enumerable: true, get: function () { return core_1.encoding; } });
-Object.defineProperty(exports, "json", { enumerable: true, get: function () { return core_1.json; } });
-Object.defineProperty(exports, "pem", { enumerable: true, get: function () { return core_1.pem; } });
-exports.oidc = __importStar(require("./oidc"));
-exports.ua = __importStar(require("./ua"));
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/util/oidc.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/util/oidc.js
deleted file mode 100644
index 37c5b168ee12e..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/sign/dist/util/oidc.js
+++ /dev/null
@@ -1,30 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.extractJWTSubject = extractJWTSubject;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const core_1 = require("@sigstore/core");
-function extractJWTSubject(jwt) {
-    const parts = jwt.split('.', 3);
-    const payload = JSON.parse(core_1.encoding.base64Decode(parts[1]));
-    switch (payload.iss) {
-        case 'https://accounts.google.com':
-        case 'https://oauth2.sigstore.dev/auth':
-            return payload.email;
-        default:
-            return payload.sub;
-    }
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/util/ua.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/util/ua.js
deleted file mode 100644
index b15ff2070fb9f..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/sign/dist/util/ua.js
+++ /dev/null
@@ -1,32 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.getUserAgent = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const os_1 = __importDefault(require("os"));
-// Format User-Agent:  /  ()
-// source: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/User-Agent
-const getUserAgent = () => {
-    const packageVersion = require('../../package.json').version;
-    const nodeVersion = process.version;
-    const platformName = os_1.default.platform();
-    const archName = os_1.default.arch();
-    return `sigstore-js/${packageVersion} (Node ${nodeVersion}) (${platformName}/${archName})`;
-};
-exports.getUserAgent = getUserAgent;
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/index.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/index.js
deleted file mode 100644
index 72677c399caa7..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/index.js
+++ /dev/null
@@ -1,24 +0,0 @@
-"use strict";
-/* istanbul ignore file */
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TSAWitness = exports.RekorWitness = exports.DEFAULT_REKOR_URL = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-var tlog_1 = require("./tlog");
-Object.defineProperty(exports, "DEFAULT_REKOR_URL", { enumerable: true, get: function () { return tlog_1.DEFAULT_REKOR_URL; } });
-Object.defineProperty(exports, "RekorWitness", { enumerable: true, get: function () { return tlog_1.RekorWitness; } });
-var tsa_1 = require("./tsa");
-Object.defineProperty(exports, "TSAWitness", { enumerable: true, get: function () { return tsa_1.TSAWitness; } });
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/client.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/client.js
deleted file mode 100644
index 22c895f2ca7ed..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/client.js
+++ /dev/null
@@ -1,61 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TLogClient = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const error_1 = require("../../error");
-const error_2 = require("../../external/error");
-const rekor_1 = require("../../external/rekor");
-class TLogClient {
-    constructor(options) {
-        this.fetchOnConflict = options.fetchOnConflict ?? false;
-        this.rekor = new rekor_1.Rekor({
-            baseURL: options.rekorBaseURL,
-            retry: options.retry,
-            timeout: options.timeout,
-        });
-    }
-    async createEntry(proposedEntry) {
-        let entry;
-        try {
-            entry = await this.rekor.createEntry(proposedEntry);
-        }
-        catch (err) {
-            // If the entry already exists, fetch it (if enabled)
-            if (entryExistsError(err) && this.fetchOnConflict) {
-                // Grab the UUID of the existing entry from the location header
-                /* istanbul ignore next */
-                const uuid = err.location.split('/').pop() || '';
-                try {
-                    entry = await this.rekor.getEntry(uuid);
-                }
-                catch (err) {
-                    (0, error_1.internalError)(err, 'TLOG_FETCH_ENTRY_ERROR', 'error fetching tlog entry');
-                }
-            }
-            else {
-                (0, error_1.internalError)(err, 'TLOG_CREATE_ENTRY_ERROR', 'error creating tlog entry');
-            }
-        }
-        return entry;
-    }
-}
-exports.TLogClient = TLogClient;
-function entryExistsError(value) {
-    return (value instanceof error_2.HTTPError &&
-        value.statusCode === 409 &&
-        value.location !== undefined);
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/entry.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/entry.js
deleted file mode 100644
index bb1c68e914b90..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/entry.js
+++ /dev/null
@@ -1,140 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.toProposedEntry = toProposedEntry;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const bundle_1 = require("@sigstore/bundle");
-const util_1 = require("../../util");
-const SHA256_ALGORITHM = 'sha256';
-function toProposedEntry(content, publicKey, 
-// TODO: Remove this parameter once have completely switched to 'dsse' entries
-entryType = 'dsse') {
-    switch (content.$case) {
-        case 'dsseEnvelope':
-            // TODO: Remove this conditional once have completely ditched "intoto" entries
-            if (entryType === 'intoto') {
-                return toProposedIntotoEntry(content.dsseEnvelope, publicKey);
-            }
-            return toProposedDSSEEntry(content.dsseEnvelope, publicKey);
-        case 'messageSignature':
-            return toProposedHashedRekordEntry(content.messageSignature, publicKey);
-    }
-}
-// Returns a properly formatted Rekor "hashedrekord" entry for the given digest
-// and signature
-function toProposedHashedRekordEntry(messageSignature, publicKey) {
-    const hexDigest = messageSignature.messageDigest.digest.toString('hex');
-    const b64Signature = messageSignature.signature.toString('base64');
-    const b64Key = util_1.encoding.base64Encode(publicKey);
-    return {
-        apiVersion: '0.0.1',
-        kind: 'hashedrekord',
-        spec: {
-            data: {
-                hash: {
-                    algorithm: SHA256_ALGORITHM,
-                    value: hexDigest,
-                },
-            },
-            signature: {
-                content: b64Signature,
-                publicKey: {
-                    content: b64Key,
-                },
-            },
-        },
-    };
-}
-// Returns a properly formatted Rekor "dsse" entry for the given DSSE envelope
-// and signature
-function toProposedDSSEEntry(envelope, publicKey) {
-    const envelopeJSON = JSON.stringify((0, bundle_1.envelopeToJSON)(envelope));
-    const encodedKey = util_1.encoding.base64Encode(publicKey);
-    return {
-        apiVersion: '0.0.1',
-        kind: 'dsse',
-        spec: {
-            proposedContent: {
-                envelope: envelopeJSON,
-                verifiers: [encodedKey],
-            },
-        },
-    };
-}
-// Returns a properly formatted Rekor "intoto" entry for the given DSSE
-// envelope and signature
-function toProposedIntotoEntry(envelope, publicKey) {
-    // Calculate the value for the payloadHash field in the Rekor entry
-    const payloadHash = util_1.crypto
-        .digest(SHA256_ALGORITHM, envelope.payload)
-        .toString('hex');
-    // Calculate the value for the hash field in the Rekor entry
-    const envelopeHash = calculateDSSEHash(envelope, publicKey);
-    // Collect values for re-creating the DSSE envelope.
-    // Double-encode payload and signature cause that's what Rekor expects
-    const payload = util_1.encoding.base64Encode(envelope.payload.toString('base64'));
-    const sig = util_1.encoding.base64Encode(envelope.signatures[0].sig.toString('base64'));
-    const keyid = envelope.signatures[0].keyid;
-    const encodedKey = util_1.encoding.base64Encode(publicKey);
-    // Create the envelope portion of the entry. Note the inclusion of the
-    // publicKey in the signature struct is not a standard part of a DSSE
-    // envelope, but is required by Rekor.
-    const dsse = {
-        payloadType: envelope.payloadType,
-        payload: payload,
-        signatures: [{ sig, publicKey: encodedKey }],
-    };
-    // If the keyid is an empty string, Rekor seems to remove it altogether. We
-    // need to do the same here so that we can properly recreate the entry for
-    // verification.
-    if (keyid.length > 0) {
-        dsse.signatures[0].keyid = keyid;
-    }
-    return {
-        apiVersion: '0.0.2',
-        kind: 'intoto',
-        spec: {
-            content: {
-                envelope: dsse,
-                hash: { algorithm: SHA256_ALGORITHM, value: envelopeHash },
-                payloadHash: { algorithm: SHA256_ALGORITHM, value: payloadHash },
-            },
-        },
-    };
-}
-// Calculates the hash of a DSSE envelope for inclusion in a Rekor entry.
-// There is no standard way to do this, so the scheme we're using as as
-// follows:
-//  * payload is base64 encoded
-//  * signature is base64 encoded (only the first signature is used)
-//  * keyid is included ONLY if it is NOT an empty string
-//  * The resulting JSON is canonicalized and hashed to a hex string
-function calculateDSSEHash(envelope, publicKey) {
-    const dsse = {
-        payloadType: envelope.payloadType,
-        payload: envelope.payload.toString('base64'),
-        signatures: [
-            { sig: envelope.signatures[0].sig.toString('base64'), publicKey },
-        ],
-    };
-    // If the keyid is an empty string, Rekor seems to remove it altogether.
-    if (envelope.signatures[0].keyid.length > 0) {
-        dsse.signatures[0].keyid = envelope.signatures[0].keyid;
-    }
-    return util_1.crypto
-        .digest(SHA256_ALGORITHM, util_1.json.canonicalize(dsse))
-        .toString('hex');
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/index.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/index.js
deleted file mode 100644
index 6197b09d4cdd9..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tlog/index.js
+++ /dev/null
@@ -1,82 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.RekorWitness = exports.DEFAULT_REKOR_URL = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const util_1 = require("../../util");
-const client_1 = require("./client");
-const entry_1 = require("./entry");
-exports.DEFAULT_REKOR_URL = 'https://rekor.sigstore.dev';
-class RekorWitness {
-    constructor(options) {
-        this.entryType = options.entryType;
-        this.tlog = new client_1.TLogClient({
-            ...options,
-            rekorBaseURL: options.rekorBaseURL || /* istanbul ignore next */ exports.DEFAULT_REKOR_URL,
-        });
-    }
-    async testify(content, publicKey) {
-        const proposedEntry = (0, entry_1.toProposedEntry)(content, publicKey, this.entryType);
-        const entry = await this.tlog.createEntry(proposedEntry);
-        return toTransparencyLogEntry(entry);
-    }
-}
-exports.RekorWitness = RekorWitness;
-function toTransparencyLogEntry(entry) {
-    const logID = Buffer.from(entry.logID, 'hex');
-    // Parse entry body so we can extract the kind and version.
-    const bodyJSON = util_1.encoding.base64Decode(entry.body);
-    const entryBody = JSON.parse(bodyJSON);
-    const promise = entry?.verification?.signedEntryTimestamp
-        ? inclusionPromise(entry.verification.signedEntryTimestamp)
-        : undefined;
-    const proof = entry?.verification?.inclusionProof
-        ? inclusionProof(entry.verification.inclusionProof)
-        : undefined;
-    const tlogEntry = {
-        logIndex: entry.logIndex.toString(),
-        logId: {
-            keyId: logID,
-        },
-        integratedTime: entry.integratedTime.toString(),
-        kindVersion: {
-            kind: entryBody.kind,
-            version: entryBody.apiVersion,
-        },
-        inclusionPromise: promise,
-        inclusionProof: proof,
-        canonicalizedBody: Buffer.from(entry.body, 'base64'),
-    };
-    return {
-        tlogEntries: [tlogEntry],
-    };
-}
-function inclusionPromise(promise) {
-    return {
-        signedEntryTimestamp: Buffer.from(promise, 'base64'),
-    };
-}
-function inclusionProof(proof) {
-    return {
-        logIndex: proof.logIndex.toString(),
-        treeSize: proof.treeSize.toString(),
-        rootHash: Buffer.from(proof.rootHash, 'hex'),
-        hashes: proof.hashes.map((h) => Buffer.from(h, 'hex')),
-        checkpoint: {
-            envelope: proof.checkpoint,
-        },
-    };
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tsa/client.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tsa/client.js
deleted file mode 100644
index 754de3748dbb3..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tsa/client.js
+++ /dev/null
@@ -1,46 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TSAClient = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const error_1 = require("../../error");
-const tsa_1 = require("../../external/tsa");
-const util_1 = require("../../util");
-const SHA256_ALGORITHM = 'sha256';
-class TSAClient {
-    constructor(options) {
-        this.tsa = new tsa_1.TimestampAuthority({
-            baseURL: options.tsaBaseURL,
-            retry: options.retry,
-            timeout: options.timeout,
-        });
-    }
-    async createTimestamp(signature) {
-        const request = {
-            artifactHash: util_1.crypto
-                .digest(SHA256_ALGORITHM, signature)
-                .toString('base64'),
-            hashAlgorithm: SHA256_ALGORITHM,
-        };
-        try {
-            return await this.tsa.createTimestamp(request);
-        }
-        catch (err) {
-            (0, error_1.internalError)(err, 'TSA_CREATE_TIMESTAMP_ERROR', 'error creating timestamp');
-        }
-    }
-}
-exports.TSAClient = TSAClient;
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tsa/index.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tsa/index.js
deleted file mode 100644
index d4f5c7c859d10..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/tsa/index.js
+++ /dev/null
@@ -1,44 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TSAWitness = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const client_1 = require("./client");
-class TSAWitness {
-    constructor(options) {
-        this.tsa = new client_1.TSAClient({
-            tsaBaseURL: options.tsaBaseURL,
-            retry: options.retry,
-            timeout: options.timeout,
-        });
-    }
-    async testify(content) {
-        const signature = extractSignature(content);
-        const timestamp = await this.tsa.createTimestamp(signature);
-        return {
-            rfc3161Timestamps: [{ signedTimestamp: timestamp }],
-        };
-    }
-}
-exports.TSAWitness = TSAWitness;
-function extractSignature(content) {
-    switch (content.$case) {
-        case 'dsseEnvelope':
-            return content.dsseEnvelope.signatures[0].sig;
-        case 'messageSignature':
-            return content.messageSignature.signature;
-    }
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/witness.js b/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/witness.js
deleted file mode 100644
index c8ad2e549bdc6..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/sign/dist/witness/witness.js
+++ /dev/null
@@ -1,2 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/package.json b/node_modules/pacote/node_modules/@sigstore/sign/package.json
deleted file mode 100644
index 4059997ced341..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/sign/package.json
+++ /dev/null
@@ -1,46 +0,0 @@
-{
-  "name": "@sigstore/sign",
-  "version": "4.0.0",
-  "description": "Sigstore signing library",
-  "main": "dist/index.js",
-  "types": "dist/index.d.ts",
-  "scripts": {
-    "clean": "shx rm -rf dist *.tsbuildinfo",
-    "build": "tsc --build",
-    "test": "jest"
-  },
-  "files": [
-    "dist"
-  ],
-  "author": "bdehamer@github.com",
-  "license": "Apache-2.0",
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/sigstore/sigstore-js.git"
-  },
-  "bugs": {
-    "url": "https://github.com/sigstore/sigstore-js/issues"
-  },
-  "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/sign#readme",
-  "publishConfig": {
-    "provenance": true
-  },
-  "devDependencies": {
-    "@sigstore/jest": "^0.0.0",
-    "@sigstore/mock": "^0.11.0",
-    "@sigstore/rekor-types": "^4.0.0",
-    "@types/make-fetch-happen": "^10.0.4",
-    "@types/promise-retry": "^1.1.6"
-  },
-  "dependencies": {
-    "@sigstore/bundle": "^4.0.0",
-    "@sigstore/core": "^3.0.0",
-    "@sigstore/protobuf-specs": "^0.5.0",
-    "make-fetch-happen": "^15.0.0",
-    "proc-log": "^5.0.0",
-    "promise-retry": "^2.0.1"
-  },
-  "engines": {
-    "node": "^20.17.0 || >=22.9.0"
-  }
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/dsse.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/dsse.js
deleted file mode 100644
index 1033fc422aba0..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/dsse.js
+++ /dev/null
@@ -1,43 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.DSSESignatureContent = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const core_1 = require("@sigstore/core");
-class DSSESignatureContent {
-    constructor(env) {
-        this.env = env;
-    }
-    compareDigest(digest) {
-        return core_1.crypto.bufferEqual(digest, core_1.crypto.digest('sha256', this.env.payload));
-    }
-    compareSignature(signature) {
-        return core_1.crypto.bufferEqual(signature, this.signature);
-    }
-    verifySignature(key) {
-        return core_1.crypto.verify(this.preAuthEncoding, key, this.signature);
-    }
-    get signature() {
-        return this.env.signatures.length > 0
-            ? this.env.signatures[0].sig
-            : Buffer.from('');
-    }
-    // DSSE Pre-Authentication Encoding
-    get preAuthEncoding() {
-        return core_1.dsse.preAuthEncoding(this.env.payloadType, this.env.payload);
-    }
-}
-exports.DSSESignatureContent = DSSESignatureContent;
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/index.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/index.js
deleted file mode 100644
index 4287d8032b75f..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/index.js
+++ /dev/null
@@ -1,57 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.toSignedEntity = toSignedEntity;
-exports.signatureContent = signatureContent;
-const core_1 = require("@sigstore/core");
-const dsse_1 = require("./dsse");
-const message_1 = require("./message");
-function toSignedEntity(bundle, artifact) {
-    const { tlogEntries, timestampVerificationData } = bundle.verificationMaterial;
-    const timestamps = [];
-    for (const entry of tlogEntries) {
-        timestamps.push({
-            $case: 'transparency-log',
-            tlogEntry: entry,
-        });
-    }
-    for (const ts of timestampVerificationData?.rfc3161Timestamps ?? []) {
-        timestamps.push({
-            $case: 'timestamp-authority',
-            timestamp: core_1.RFC3161Timestamp.parse(ts.signedTimestamp),
-        });
-    }
-    return {
-        signature: signatureContent(bundle, artifact),
-        key: key(bundle),
-        tlogEntries,
-        timestamps,
-    };
-}
-function signatureContent(bundle, artifact) {
-    switch (bundle.content.$case) {
-        case 'dsseEnvelope':
-            return new dsse_1.DSSESignatureContent(bundle.content.dsseEnvelope);
-        case 'messageSignature':
-            return new message_1.MessageSignatureContent(bundle.content.messageSignature, artifact);
-    }
-}
-function key(bundle) {
-    switch (bundle.verificationMaterial.content.$case) {
-        case 'publicKey':
-            return {
-                $case: 'public-key',
-                hint: bundle.verificationMaterial.content.publicKey.hint,
-            };
-        case 'x509CertificateChain':
-            return {
-                $case: 'certificate',
-                certificate: core_1.X509Certificate.parse(bundle.verificationMaterial.content.x509CertificateChain
-                    .certificates[0].rawBytes),
-            };
-        case 'certificate':
-            return {
-                $case: 'certificate',
-                certificate: core_1.X509Certificate.parse(bundle.verificationMaterial.content.certificate.rawBytes),
-            };
-    }
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/message.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/message.js
deleted file mode 100644
index 836148c68a8b6..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/verify/dist/bundle/message.js
+++ /dev/null
@@ -1,36 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.MessageSignatureContent = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const core_1 = require("@sigstore/core");
-class MessageSignatureContent {
-    constructor(messageSignature, artifact) {
-        this.signature = messageSignature.signature;
-        this.messageDigest = messageSignature.messageDigest.digest;
-        this.artifact = artifact;
-    }
-    compareSignature(signature) {
-        return core_1.crypto.bufferEqual(signature, this.signature);
-    }
-    compareDigest(digest) {
-        return core_1.crypto.bufferEqual(digest, this.messageDigest);
-    }
-    verifySignature(key) {
-        return core_1.crypto.verify(this.artifact, key, this.signature);
-    }
-}
-exports.MessageSignatureContent = MessageSignatureContent;
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/error.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/error.js
deleted file mode 100644
index 6cb1cd4121343..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/verify/dist/error.js
+++ /dev/null
@@ -1,32 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.PolicyError = exports.VerificationError = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-class BaseError extends Error {
-    constructor({ code, message, cause, }) {
-        super(message);
-        this.code = code;
-        this.cause = cause;
-        this.name = this.constructor.name;
-    }
-}
-class VerificationError extends BaseError {
-}
-exports.VerificationError = VerificationError;
-class PolicyError extends BaseError {
-}
-exports.PolicyError = PolicyError;
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/index.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/index.js
deleted file mode 100644
index 3222876fcd68b..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/verify/dist/index.js
+++ /dev/null
@@ -1,28 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Verifier = exports.toTrustMaterial = exports.VerificationError = exports.PolicyError = exports.toSignedEntity = void 0;
-/* istanbul ignore file */
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-var bundle_1 = require("./bundle");
-Object.defineProperty(exports, "toSignedEntity", { enumerable: true, get: function () { return bundle_1.toSignedEntity; } });
-var error_1 = require("./error");
-Object.defineProperty(exports, "PolicyError", { enumerable: true, get: function () { return error_1.PolicyError; } });
-Object.defineProperty(exports, "VerificationError", { enumerable: true, get: function () { return error_1.VerificationError; } });
-var trust_1 = require("./trust");
-Object.defineProperty(exports, "toTrustMaterial", { enumerable: true, get: function () { return trust_1.toTrustMaterial; } });
-var verifier_1 = require("./verifier");
-Object.defineProperty(exports, "Verifier", { enumerable: true, get: function () { return verifier_1.Verifier; } });
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/key/certificate.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/key/certificate.js
deleted file mode 100644
index 35ad947f0bafc..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/verify/dist/key/certificate.js
+++ /dev/null
@@ -1,212 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.CertificateChainVerifier = void 0;
-exports.verifyCertificateChain = verifyCertificateChain;
-const error_1 = require("../error");
-const trust_1 = require("../trust");
-function verifyCertificateChain(timestamp, leaf, certificateAuthorities) {
-    // Filter list of trusted CAs to those which are valid for the given
-    // timestamp
-    const cas = (0, trust_1.filterCertAuthorities)(certificateAuthorities, timestamp);
-    /* eslint-disable-next-line @typescript-eslint/no-explicit-any */
-    let error;
-    for (const ca of cas) {
-        try {
-            const verifier = new CertificateChainVerifier({
-                trustedCerts: ca.certChain,
-                untrustedCert: leaf,
-                timestamp,
-            });
-            return verifier.verify();
-        }
-        catch (err) {
-            error = err;
-        }
-    }
-    // If we failed to verify the certificate chain for all of the trusted
-    // CAs, throw the last error we encountered.
-    throw new error_1.VerificationError({
-        code: 'CERTIFICATE_ERROR',
-        message: 'Failed to verify certificate chain',
-        cause: error,
-    });
-}
-class CertificateChainVerifier {
-    constructor(opts) {
-        this.untrustedCert = opts.untrustedCert;
-        this.trustedCerts = opts.trustedCerts;
-        this.localCerts = dedupeCertificates([
-            ...opts.trustedCerts,
-            opts.untrustedCert,
-        ]);
-        this.timestamp = opts.timestamp;
-    }
-    verify() {
-        // Construct certificate path from leaf to root
-        const certificatePath = this.sort();
-        // Perform validation checks on each certificate in the path
-        this.checkPath(certificatePath);
-        const validForDate = certificatePath.every((cert) => cert.validForDate(this.timestamp));
-        if (!validForDate) {
-            throw new error_1.VerificationError({
-                code: 'CERTIFICATE_ERROR',
-                message: 'certificate is not valid or expired at the specified date',
-            });
-        }
-        // Return verified certificate path
-        return certificatePath;
-    }
-    sort() {
-        const leafCert = this.untrustedCert;
-        // Construct all possible paths from the leaf
-        let paths = this.buildPaths(leafCert);
-        // Filter for paths which contain a trusted certificate
-        paths = paths.filter((path) => path.some((cert) => this.trustedCerts.includes(cert)));
-        if (paths.length === 0) {
-            throw new error_1.VerificationError({
-                code: 'CERTIFICATE_ERROR',
-                message: 'no trusted certificate path found',
-            });
-        }
-        // Find the shortest of possible paths
-        /* istanbul ignore next */
-        const path = paths.reduce((prev, curr) => prev.length < curr.length ? prev : curr);
-        // Construct chain from shortest path
-        // Removes the last certificate in the path, which will be a second copy
-        // of the root certificate given that the root is self-signed.
-        return [leafCert, ...path].slice(0, -1);
-    }
-    // Recursively build all possible paths from the leaf to the root
-    buildPaths(certificate) {
-        const paths = [];
-        const issuers = this.findIssuer(certificate);
-        if (issuers.length === 0) {
-            throw new error_1.VerificationError({
-                code: 'CERTIFICATE_ERROR',
-                message: 'no valid certificate path found',
-            });
-        }
-        for (let i = 0; i < issuers.length; i++) {
-            const issuer = issuers[i];
-            // Base case - issuer is self
-            if (issuer.equals(certificate)) {
-                paths.push([certificate]);
-                continue;
-            }
-            // Recursively build path for the issuer
-            const subPaths = this.buildPaths(issuer);
-            // Construct paths by appending the issuer to each subpath
-            for (let j = 0; j < subPaths.length; j++) {
-                paths.push([issuer, ...subPaths[j]]);
-            }
-        }
-        return paths;
-    }
-    // Return all possible issuers for the given certificate
-    findIssuer(certificate) {
-        let issuers = [];
-        let keyIdentifier;
-        // Exit early if the certificate is self-signed
-        if (certificate.subject.equals(certificate.issuer)) {
-            if (certificate.verify()) {
-                return [certificate];
-            }
-        }
-        // If the certificate has an authority key identifier, use that
-        // to find the issuer
-        if (certificate.extAuthorityKeyID) {
-            keyIdentifier = certificate.extAuthorityKeyID.keyIdentifier;
-            // TODO: Add support for authorityCertIssuer/authorityCertSerialNumber
-            // though Fulcio doesn't appear to use these
-        }
-        // Find possible issuers by comparing the authorityKeyID/subjectKeyID
-        // or issuer/subject. Potential issuers are added to the result array.
-        this.localCerts.forEach((possibleIssuer) => {
-            if (keyIdentifier) {
-                /* istanbul ignore else */
-                if (possibleIssuer.extSubjectKeyID) {
-                    if (possibleIssuer.extSubjectKeyID.keyIdentifier.equals(keyIdentifier)) {
-                        issuers.push(possibleIssuer);
-                    }
-                    return;
-                }
-            }
-            // Fallback to comparing certificate issuer and subject if
-            // subjectKey/authorityKey extensions are not present
-            if (possibleIssuer.subject.equals(certificate.issuer)) {
-                issuers.push(possibleIssuer);
-            }
-        });
-        // Remove any issuers which fail to verify the certificate
-        issuers = issuers.filter((issuer) => {
-            try {
-                return certificate.verify(issuer);
-            }
-            catch (ex) {
-                /* istanbul ignore next - should never error */
-                return false;
-            }
-        });
-        return issuers;
-    }
-    checkPath(path) {
-        /* istanbul ignore if */
-        if (path.length < 1) {
-            throw new error_1.VerificationError({
-                code: 'CERTIFICATE_ERROR',
-                message: 'certificate chain must contain at least one certificate',
-            });
-        }
-        // Ensure that all certificates beyond the leaf are CAs
-        const validCAs = path.slice(1).every((cert) => cert.isCA);
-        if (!validCAs) {
-            throw new error_1.VerificationError({
-                code: 'CERTIFICATE_ERROR',
-                message: 'intermediate certificate is not a CA',
-            });
-        }
-        // Certificate's issuer must match the subject of the next certificate
-        // in the chain
-        for (let i = path.length - 2; i >= 0; i--) {
-            /* istanbul ignore if */
-            if (!path[i].issuer.equals(path[i + 1].subject)) {
-                throw new error_1.VerificationError({
-                    code: 'CERTIFICATE_ERROR',
-                    message: 'incorrect certificate name chaining',
-                });
-            }
-        }
-        // Check pathlength constraints
-        for (let i = 0; i < path.length; i++) {
-            const cert = path[i];
-            // If the certificate is a CA, check the path length
-            if (cert.extBasicConstraints?.isCA) {
-                const pathLength = cert.extBasicConstraints.pathLenConstraint;
-                // The path length, if set, indicates how many intermediate
-                // certificates (NOT including the leaf) are allowed to follow. The
-                // pathLength constraint of any intermediate CA certificate MUST be
-                // greater than or equal to it's own depth in the chain (with an
-                // adjustment for the leaf certificate)
-                if (pathLength !== undefined && pathLength < i - 1) {
-                    throw new error_1.VerificationError({
-                        code: 'CERTIFICATE_ERROR',
-                        message: 'path length constraint exceeded',
-                    });
-                }
-            }
-        }
-    }
-}
-exports.CertificateChainVerifier = CertificateChainVerifier;
-// Remove duplicate certificates from the array
-function dedupeCertificates(certs) {
-    for (let i = 0; i < certs.length; i++) {
-        for (let j = i + 1; j < certs.length; j++) {
-            if (certs[i].equals(certs[j])) {
-                certs.splice(j, 1);
-                j--;
-            }
-        }
-    }
-    return certs;
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/key/index.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/key/index.js
deleted file mode 100644
index c966ccb1e925e..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/verify/dist/key/index.js
+++ /dev/null
@@ -1,67 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.verifyPublicKey = verifyPublicKey;
-exports.verifyCertificate = verifyCertificate;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const core_1 = require("@sigstore/core");
-const error_1 = require("../error");
-const certificate_1 = require("./certificate");
-const sct_1 = require("./sct");
-const OID_FULCIO_ISSUER_V1 = '1.3.6.1.4.1.57264.1.1';
-const OID_FULCIO_ISSUER_V2 = '1.3.6.1.4.1.57264.1.8';
-function verifyPublicKey(hint, timestamps, trustMaterial) {
-    const key = trustMaterial.publicKey(hint);
-    timestamps.forEach((timestamp) => {
-        if (!key.validFor(timestamp)) {
-            throw new error_1.VerificationError({
-                code: 'PUBLIC_KEY_ERROR',
-                message: `Public key is not valid for timestamp: ${timestamp.toISOString()}`,
-            });
-        }
-    });
-    return { key: key.publicKey };
-}
-function verifyCertificate(leaf, timestamps, trustMaterial) {
-    // Check that leaf certificate chains to a trusted CA
-    let path = [];
-    timestamps.forEach((timestamp) => {
-        path = (0, certificate_1.verifyCertificateChain)(timestamp, leaf, trustMaterial.certificateAuthorities);
-    });
-    return {
-        scts: (0, sct_1.verifySCTs)(path[0], path[1], trustMaterial.ctlogs),
-        signer: getSigner(path[0]),
-    };
-}
-function getSigner(cert) {
-    let issuer;
-    const issuerExtension = cert.extension(OID_FULCIO_ISSUER_V2);
-    /* istanbul ignore next */
-    if (issuerExtension) {
-        issuer = issuerExtension.valueObj.subs?.[0]?.value.toString('ascii');
-    }
-    else {
-        issuer = cert.extension(OID_FULCIO_ISSUER_V1)?.value.toString('ascii');
-    }
-    const identity = {
-        extensions: { issuer },
-        subjectAlternativeName: cert.subjectAltName,
-    };
-    return {
-        key: core_1.crypto.createPublicKey(cert.publicKey),
-        identity,
-    };
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/key/sct.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/key/sct.js
deleted file mode 100644
index 8eca48738096e..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/verify/dist/key/sct.js
+++ /dev/null
@@ -1,78 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.verifySCTs = verifySCTs;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const core_1 = require("@sigstore/core");
-const error_1 = require("../error");
-const trust_1 = require("../trust");
-function verifySCTs(cert, issuer, ctlogs) {
-    let extSCT;
-    // Verifying the SCT requires that we remove the SCT extension and
-    // re-encode the TBS structure to DER -- this value is part of the data
-    // over which the signature is calculated. Since this is a destructive action
-    // we create a copy of the certificate so we can remove the SCT extension
-    // without affecting the original certificate.
-    const clone = cert.clone();
-    // Intentionally not using the findExtension method here because we want to
-    // remove the the SCT extension from the certificate before calculating the
-    // PreCert structure
-    for (let i = 0; i < clone.extensions.length; i++) {
-        const ext = clone.extensions[i];
-        if (ext.subs[0].toOID() === core_1.EXTENSION_OID_SCT) {
-            extSCT = new core_1.X509SCTExtension(ext);
-            // Remove the extension from the certificate
-            clone.extensions.splice(i, 1);
-            break;
-        }
-    }
-    // No SCT extension found to verify
-    if (!extSCT) {
-        return [];
-    }
-    // Found an SCT extension but it has no SCTs
-    /* istanbul ignore if -- too difficult to fabricate test case for this */
-    if (extSCT.signedCertificateTimestamps.length === 0) {
-        return [];
-    }
-    // Construct the PreCert structure
-    // https://www.rfc-editor.org/rfc/rfc6962#section-3.2
-    const preCert = new core_1.ByteStream();
-    // Calculate hash of the issuer's public key
-    const issuerId = core_1.crypto.digest('sha256', issuer.publicKey);
-    preCert.appendView(issuerId);
-    // Re-encodes the certificate to DER after removing the SCT extension
-    const tbs = clone.tbsCertificate.toDER();
-    preCert.appendUint24(tbs.length);
-    preCert.appendView(tbs);
-    // Calculate and return the verification results for each SCT
-    return extSCT.signedCertificateTimestamps.map((sct) => {
-        // Find the ctlog instance that corresponds to the SCT's logID
-        const validCTLogs = (0, trust_1.filterTLogAuthorities)(ctlogs, {
-            logID: sct.logID,
-            targetDate: sct.datetime,
-        });
-        // See if the SCT is valid for any of the CT logs
-        const verified = validCTLogs.some((log) => sct.verify(preCert.buffer, log.publicKey));
-        if (!verified) {
-            throw new error_1.VerificationError({
-                code: 'CERTIFICATE_ERROR',
-                message: 'SCT verification failed',
-            });
-        }
-        return sct.logID;
-    });
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/policy.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/policy.js
deleted file mode 100644
index f5960cf047b84..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/verify/dist/policy.js
+++ /dev/null
@@ -1,24 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.verifySubjectAlternativeName = verifySubjectAlternativeName;
-exports.verifyExtensions = verifyExtensions;
-const error_1 = require("./error");
-function verifySubjectAlternativeName(policyIdentity, signerIdentity) {
-    if (signerIdentity === undefined || !signerIdentity.match(policyIdentity)) {
-        throw new error_1.PolicyError({
-            code: 'UNTRUSTED_SIGNER_ERROR',
-            message: `certificate identity error - expected ${policyIdentity}, got ${signerIdentity}`,
-        });
-    }
-}
-function verifyExtensions(policyExtensions, signerExtensions = {}) {
-    let key;
-    for (key in policyExtensions) {
-        if (signerExtensions[key] !== policyExtensions[key]) {
-            throw new error_1.PolicyError({
-                code: 'UNTRUSTED_SIGNER_ERROR',
-                message: `invalid certificate extension - expected ${key}=${policyExtensions[key]}, got ${key}=${signerExtensions[key]}`,
-            });
-        }
-    }
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/shared.types.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/shared.types.js
deleted file mode 100644
index c8ad2e549bdc6..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/verify/dist/shared.types.js
+++ /dev/null
@@ -1,2 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/checkpoint.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/checkpoint.js
deleted file mode 100644
index 46619b675f886..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/checkpoint.js
+++ /dev/null
@@ -1,157 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.verifyCheckpoint = verifyCheckpoint;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const core_1 = require("@sigstore/core");
-const error_1 = require("../error");
-const trust_1 = require("../trust");
-// Separator between the note and the signatures in a checkpoint
-const CHECKPOINT_SEPARATOR = '\n\n';
-// Checkpoint signatures are of the following form:
-// "–  \n"
-// where:
-// - the prefix is an emdash (U+2014).
-// -  gives a human-readable representation of the signing ID.
-// -  is the first 4 bytes of the SHA256 hash of the
-//   associated public key followed by the signature bytes.
-const SIGNATURE_REGEX = /\u2014 (\S+) (\S+)\n/g;
-// Verifies the checkpoint value in the given tlog entry. There are two steps
-// to the verification:
-// 1. Verify that all signatures in the checkpoint can be verified against a
-//    trusted public key
-// 2. Verify that the root hash in the checkpoint matches the root hash in the
-//    inclusion proof
-// See: https://github.com/transparency-dev/formats/blob/main/log/README.md
-function verifyCheckpoint(entry, tlogs) {
-    // Filter tlog instances to just those which were valid at the time of the
-    // entry
-    const validTLogs = (0, trust_1.filterTLogAuthorities)(tlogs, {
-        targetDate: new Date(Number(entry.integratedTime) * 1000),
-    });
-    const inclusionProof = entry.inclusionProof;
-    const signedNote = SignedNote.fromString(inclusionProof.checkpoint.envelope);
-    const checkpoint = LogCheckpoint.fromString(signedNote.note);
-    // Verify that the signatures in the checkpoint are all valid
-    if (!verifySignedNote(signedNote, validTLogs)) {
-        throw new error_1.VerificationError({
-            code: 'TLOG_INCLUSION_PROOF_ERROR',
-            message: 'invalid checkpoint signature',
-        });
-    }
-    // Verify that the root hash from the checkpoint matches the root hash in the
-    // inclusion proof
-    if (!core_1.crypto.bufferEqual(checkpoint.logHash, inclusionProof.rootHash)) {
-        throw new error_1.VerificationError({
-            code: 'TLOG_INCLUSION_PROOF_ERROR',
-            message: 'root hash mismatch',
-        });
-    }
-}
-// Verifies the signatures in the SignedNote. For each signature, the
-// corresponding transparency log is looked up by the key hint and the
-// signature is verified against the public key in the transparency log.
-// Throws an error if any of the signatures are invalid.
-function verifySignedNote(signedNote, tlogs) {
-    const data = Buffer.from(signedNote.note, 'utf-8');
-    return signedNote.signatures.every((signature) => {
-        // Find the transparency log instance with the matching key hint
-        const tlog = tlogs.find((tlog) => core_1.crypto.bufferEqual(tlog.logID.subarray(0, 4), signature.keyHint));
-        if (!tlog) {
-            return false;
-        }
-        return core_1.crypto.verify(data, tlog.publicKey, signature.signature);
-    });
-}
-// SignedNote represents a signed note from a transparency log checkpoint. Consists
-// of a body (or note) and one more signatures calculated over the body. See
-// https://github.com/transparency-dev/formats/blob/main/log/README.md#signed-envelope
-class SignedNote {
-    constructor(note, signatures) {
-        this.note = note;
-        this.signatures = signatures;
-    }
-    // Deserialize a SignedNote from a string
-    static fromString(envelope) {
-        if (!envelope.includes(CHECKPOINT_SEPARATOR)) {
-            throw new error_1.VerificationError({
-                code: 'TLOG_INCLUSION_PROOF_ERROR',
-                message: 'missing checkpoint separator',
-            });
-        }
-        // Split the note into the header and the data portions at the separator
-        const split = envelope.indexOf(CHECKPOINT_SEPARATOR);
-        const header = envelope.slice(0, split + 1);
-        const data = envelope.slice(split + CHECKPOINT_SEPARATOR.length);
-        // Find all the signature lines in the data portion
-        const matches = data.matchAll(SIGNATURE_REGEX);
-        // Parse each of the matched signature lines into the name and signature.
-        // The first four bytes of the signature are the key hint (should match the
-        // first four bytes of the log ID), and the rest is the signature itself.
-        const signatures = Array.from(matches, (match) => {
-            const [, name, signature] = match;
-            const sigBytes = Buffer.from(signature, 'base64');
-            if (sigBytes.length < 5) {
-                throw new error_1.VerificationError({
-                    code: 'TLOG_INCLUSION_PROOF_ERROR',
-                    message: 'malformed checkpoint signature',
-                });
-            }
-            return {
-                name,
-                keyHint: sigBytes.subarray(0, 4),
-                signature: sigBytes.subarray(4),
-            };
-        });
-        if (signatures.length === 0) {
-            throw new error_1.VerificationError({
-                code: 'TLOG_INCLUSION_PROOF_ERROR',
-                message: 'no signatures found in checkpoint',
-            });
-        }
-        return new SignedNote(header, signatures);
-    }
-}
-// LogCheckpoint represents a transparency log checkpoint. Consists of the
-// following:
-//  - origin: the name of the transparency log
-//  - logSize: the size of the log at the time of the checkpoint
-//  - logHash: the root hash of the log at the time of the checkpoint
-//  - rest: the rest of the checkpoint body, which is a list of log entries
-// See:
-// https://github.com/transparency-dev/formats/blob/main/log/README.md#checkpoint-body
-class LogCheckpoint {
-    constructor(origin, logSize, logHash, rest) {
-        this.origin = origin;
-        this.logSize = logSize;
-        this.logHash = logHash;
-        this.rest = rest;
-    }
-    static fromString(note) {
-        const lines = note.trimEnd().split('\n');
-        if (lines.length < 3) {
-            throw new error_1.VerificationError({
-                code: 'TLOG_INCLUSION_PROOF_ERROR',
-                message: 'too few lines in checkpoint header',
-            });
-        }
-        const origin = lines[0];
-        const logSize = BigInt(lines[1]);
-        const rootHash = Buffer.from(lines[2], 'base64');
-        const rest = lines.slice(3);
-        return new LogCheckpoint(origin, logSize, rootHash, rest);
-    }
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/index.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/index.js
deleted file mode 100644
index 56e948de19338..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/index.js
+++ /dev/null
@@ -1,46 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.verifyTSATimestamp = verifyTSATimestamp;
-exports.verifyTLogTimestamp = verifyTLogTimestamp;
-const error_1 = require("../error");
-const checkpoint_1 = require("./checkpoint");
-const merkle_1 = require("./merkle");
-const set_1 = require("./set");
-const tsa_1 = require("./tsa");
-function verifyTSATimestamp(timestamp, data, timestampAuthorities) {
-    (0, tsa_1.verifyRFC3161Timestamp)(timestamp, data, timestampAuthorities);
-    return {
-        type: 'timestamp-authority',
-        logID: timestamp.signerSerialNumber,
-        timestamp: timestamp.signingTime,
-    };
-}
-function verifyTLogTimestamp(entry, tlogAuthorities) {
-    let inclusionVerified = false;
-    if (isTLogEntryWithInclusionPromise(entry)) {
-        (0, set_1.verifyTLogSET)(entry, tlogAuthorities);
-        inclusionVerified = true;
-    }
-    if (isTLogEntryWithInclusionProof(entry)) {
-        (0, merkle_1.verifyMerkleInclusion)(entry);
-        (0, checkpoint_1.verifyCheckpoint)(entry, tlogAuthorities);
-        inclusionVerified = true;
-    }
-    if (!inclusionVerified) {
-        throw new error_1.VerificationError({
-            code: 'TLOG_MISSING_INCLUSION_ERROR',
-            message: 'inclusion could not be verified',
-        });
-    }
-    return {
-        type: 'transparency-log',
-        logID: entry.logId.keyId,
-        timestamp: new Date(Number(entry.integratedTime) * 1000),
-    };
-}
-function isTLogEntryWithInclusionPromise(entry) {
-    return entry.inclusionPromise !== undefined;
-}
-function isTLogEntryWithInclusionProof(entry) {
-    return entry.inclusionProof !== undefined;
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/merkle.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/merkle.js
deleted file mode 100644
index f57cae42002bd..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/merkle.js
+++ /dev/null
@@ -1,104 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.verifyMerkleInclusion = verifyMerkleInclusion;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const core_1 = require("@sigstore/core");
-const error_1 = require("../error");
-const RFC6962_LEAF_HASH_PREFIX = Buffer.from([0x00]);
-const RFC6962_NODE_HASH_PREFIX = Buffer.from([0x01]);
-function verifyMerkleInclusion(entry) {
-    const inclusionProof = entry.inclusionProof;
-    const logIndex = BigInt(inclusionProof.logIndex);
-    const treeSize = BigInt(inclusionProof.treeSize);
-    if (logIndex < 0n || logIndex >= treeSize) {
-        throw new error_1.VerificationError({
-            code: 'TLOG_INCLUSION_PROOF_ERROR',
-            message: `invalid index: ${logIndex}`,
-        });
-    }
-    // Figure out which subset of hashes corresponds to the inner and border
-    // nodes
-    const { inner, border } = decompInclProof(logIndex, treeSize);
-    if (inclusionProof.hashes.length !== inner + border) {
-        throw new error_1.VerificationError({
-            code: 'TLOG_INCLUSION_PROOF_ERROR',
-            message: 'invalid hash count',
-        });
-    }
-    const innerHashes = inclusionProof.hashes.slice(0, inner);
-    const borderHashes = inclusionProof.hashes.slice(inner);
-    // The entry's hash is the leaf hash
-    const leafHash = hashLeaf(entry.canonicalizedBody);
-    // Chain the hashes belonging to the inner and border portions
-    const calculatedHash = chainBorderRight(chainInner(leafHash, innerHashes, logIndex), borderHashes);
-    // Calculated hash should match the root hash in the inclusion proof
-    if (!core_1.crypto.bufferEqual(calculatedHash, inclusionProof.rootHash)) {
-        throw new error_1.VerificationError({
-            code: 'TLOG_INCLUSION_PROOF_ERROR',
-            message: 'calculated root hash does not match inclusion proof',
-        });
-    }
-}
-// Breaks down inclusion proof for a leaf at the specified index in a tree of
-// the specified size. The split point is where paths to the index leaf and
-// the (size - 1) leaf diverge. Returns lengths of the bottom and upper proof
-// parts.
-function decompInclProof(index, size) {
-    const inner = innerProofSize(index, size);
-    const border = onesCount(index >> BigInt(inner));
-    return { inner, border };
-}
-// Computes a subtree hash for a node on or below the tree's right border.
-// Assumes the provided proof hashes are ordered from lower to higher levels
-// and seed is the initial hash of the node specified by the index.
-function chainInner(seed, hashes, index) {
-    return hashes.reduce((acc, h, i) => {
-        if ((index >> BigInt(i)) & BigInt(1)) {
-            return hashChildren(h, acc);
-        }
-        else {
-            return hashChildren(acc, h);
-        }
-    }, seed);
-}
-// Computes a subtree hash for nodes along the tree's right border.
-function chainBorderRight(seed, hashes) {
-    return hashes.reduce((acc, h) => hashChildren(h, acc), seed);
-}
-function innerProofSize(index, size) {
-    return bitLength(index ^ (size - BigInt(1)));
-}
-// Counts the number of ones in the binary representation of the given number.
-// https://en.wikipedia.org/wiki/Hamming_weight
-function onesCount(num) {
-    return num.toString(2).split('1').length - 1;
-}
-// Returns the number of bits necessary to represent an integer in binary.
-function bitLength(n) {
-    if (n === 0n) {
-        return 0;
-    }
-    return n.toString(2).length;
-}
-// Hashing logic according to RFC6962.
-// https://datatracker.ietf.org/doc/html/rfc6962#section-2
-function hashChildren(left, right) {
-    return core_1.crypto.digest('sha256', RFC6962_NODE_HASH_PREFIX, left, right);
-}
-function hashLeaf(leaf) {
-    return core_1.crypto.digest('sha256', RFC6962_LEAF_HASH_PREFIX, leaf);
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/set.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/set.js
deleted file mode 100644
index 5d3f47bb88746..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/set.js
+++ /dev/null
@@ -1,60 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.verifyTLogSET = verifyTLogSET;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const core_1 = require("@sigstore/core");
-const error_1 = require("../error");
-const trust_1 = require("../trust");
-// Verifies the SET for the given entry against the list of trusted
-// transparency logs. Returns true if the SET can be verified against at least
-// one of the trusted logs; otherwise, returns false.
-function verifyTLogSET(entry, tlogs) {
-    // Filter the list of tlog instances to only those which might be able to
-    // verify the SET
-    const validTLogs = (0, trust_1.filterTLogAuthorities)(tlogs, {
-        logID: entry.logId.keyId,
-        targetDate: new Date(Number(entry.integratedTime) * 1000),
-    });
-    // Check to see if we can verify the SET against any of the valid tlogs
-    const verified = validTLogs.some((tlog) => {
-        // Re-create the original Rekor verification payload
-        const payload = toVerificationPayload(entry);
-        // Canonicalize the payload and turn into a buffer for verification
-        const data = Buffer.from(core_1.json.canonicalize(payload), 'utf8');
-        // Extract the SET from the tlog entry
-        const signature = entry.inclusionPromise.signedEntryTimestamp;
-        return core_1.crypto.verify(data, tlog.publicKey, signature);
-    });
-    if (!verified) {
-        throw new error_1.VerificationError({
-            code: 'TLOG_INCLUSION_PROMISE_ERROR',
-            message: 'inclusion promise could not be verified',
-        });
-    }
-}
-// Returns a properly formatted "VerificationPayload" for one of the
-// transaction log entires in the given bundle which can be used for SET
-// verification.
-function toVerificationPayload(entry) {
-    const { integratedTime, logIndex, logId, canonicalizedBody } = entry;
-    return {
-        body: canonicalizedBody.toString('base64'),
-        integratedTime: Number(integratedTime),
-        logIndex: Number(logIndex),
-        logID: logId.keyId.toString('hex'),
-    };
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/tsa.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/tsa.js
deleted file mode 100644
index 0da4a3de8247f..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/verify/dist/timestamp/tsa.js
+++ /dev/null
@@ -1,63 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.verifyRFC3161Timestamp = verifyRFC3161Timestamp;
-const core_1 = require("@sigstore/core");
-const error_1 = require("../error");
-const certificate_1 = require("../key/certificate");
-const trust_1 = require("../trust");
-function verifyRFC3161Timestamp(timestamp, data, timestampAuthorities) {
-    const signingTime = timestamp.signingTime;
-    // Filter for CAs which were valid at the time of signing
-    timestampAuthorities = (0, trust_1.filterCertAuthorities)(timestampAuthorities, signingTime);
-    // Filter for CAs which match serial and issuer embedded in the timestamp
-    timestampAuthorities = filterCAsBySerialAndIssuer(timestampAuthorities, {
-        serialNumber: timestamp.signerSerialNumber,
-        issuer: timestamp.signerIssuer,
-    });
-    // Check that we can verify the timestamp with AT LEAST ONE of the remaining
-    // CAs
-    const verified = timestampAuthorities.some((ca) => {
-        try {
-            verifyTimestampForCA(timestamp, data, ca);
-            return true;
-        }
-        catch (e) {
-            return false;
-        }
-    });
-    if (!verified) {
-        throw new error_1.VerificationError({
-            code: 'TIMESTAMP_ERROR',
-            message: 'timestamp could not be verified',
-        });
-    }
-}
-function verifyTimestampForCA(timestamp, data, ca) {
-    const [leaf, ...cas] = ca.certChain;
-    const signingKey = core_1.crypto.createPublicKey(leaf.publicKey);
-    const signingTime = timestamp.signingTime;
-    // Verify the certificate chain for the provided CA
-    try {
-        new certificate_1.CertificateChainVerifier({
-            untrustedCert: leaf,
-            trustedCerts: cas,
-            timestamp: signingTime,
-        }).verify();
-    }
-    catch (e) {
-        throw new error_1.VerificationError({
-            code: 'TIMESTAMP_ERROR',
-            message: 'invalid certificate chain',
-        });
-    }
-    // Check that the signing certificate's key can be used to verify the
-    // timestamp signature.
-    timestamp.verify(data, signingKey);
-}
-// Filters the list of CAs to those which have a leaf signing certificate which
-// matches the given serial number and issuer.
-function filterCAsBySerialAndIssuer(timestampAuthorities, criteria) {
-    return timestampAuthorities.filter((ca) => ca.certChain.length > 0 &&
-        core_1.crypto.bufferEqual(ca.certChain[0].serialNumber, criteria.serialNumber) &&
-        core_1.crypto.bufferEqual(ca.certChain[0].issuer, criteria.issuer));
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/dsse.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/dsse.js
deleted file mode 100644
index d71ed8c6e7ad9..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/dsse.js
+++ /dev/null
@@ -1,57 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.verifyDSSETLogBody = verifyDSSETLogBody;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const error_1 = require("../error");
-// Compare the given intoto tlog entry to the given bundle
-function verifyDSSETLogBody(tlogEntry, content) {
-    switch (tlogEntry.apiVersion) {
-        case '0.0.1':
-            return verifyDSSE001TLogBody(tlogEntry, content);
-        default:
-            throw new error_1.VerificationError({
-                code: 'TLOG_BODY_ERROR',
-                message: `unsupported dsse version: ${tlogEntry.apiVersion}`,
-            });
-    }
-}
-// Compare the given dsse v0.0.1 tlog entry to the given DSSE envelope.
-function verifyDSSE001TLogBody(tlogEntry, content) {
-    // Ensure the bundle's DSSE only contains a single signature
-    if (tlogEntry.spec.signatures?.length !== 1) {
-        throw new error_1.VerificationError({
-            code: 'TLOG_BODY_ERROR',
-            message: 'signature count mismatch',
-        });
-    }
-    const tlogSig = tlogEntry.spec.signatures[0].signature;
-    // Ensure that the signature in the bundle's DSSE matches tlog entry
-    if (!content.compareSignature(Buffer.from(tlogSig, 'base64')))
-        throw new error_1.VerificationError({
-            code: 'TLOG_BODY_ERROR',
-            message: 'tlog entry signature mismatch',
-        });
-    // Ensure the digest of the bundle's DSSE payload matches the digest in the
-    // tlog entry
-    const tlogHash = tlogEntry.spec.payloadHash?.value || '';
-    if (!content.compareDigest(Buffer.from(tlogHash, 'hex'))) {
-        throw new error_1.VerificationError({
-            code: 'TLOG_BODY_ERROR',
-            message: 'DSSE payload hash mismatch',
-        });
-    }
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/hashedrekord.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/hashedrekord.js
deleted file mode 100644
index c4aa345b57ba7..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/hashedrekord.js
+++ /dev/null
@@ -1,51 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.verifyHashedRekordTLogBody = verifyHashedRekordTLogBody;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const error_1 = require("../error");
-// Compare the given hashedrekord tlog entry to the given bundle
-function verifyHashedRekordTLogBody(tlogEntry, content) {
-    switch (tlogEntry.apiVersion) {
-        case '0.0.1':
-            return verifyHashedrekord001TLogBody(tlogEntry, content);
-        default:
-            throw new error_1.VerificationError({
-                code: 'TLOG_BODY_ERROR',
-                message: `unsupported hashedrekord version: ${tlogEntry.apiVersion}`,
-            });
-    }
-}
-// Compare the given hashedrekord v0.0.1 tlog entry to the given message
-// signature
-function verifyHashedrekord001TLogBody(tlogEntry, content) {
-    // Ensure that the bundles message signature matches the tlog entry
-    const tlogSig = tlogEntry.spec.signature.content || '';
-    if (!content.compareSignature(Buffer.from(tlogSig, 'base64'))) {
-        throw new error_1.VerificationError({
-            code: 'TLOG_BODY_ERROR',
-            message: 'signature mismatch',
-        });
-    }
-    // Ensure that the bundle's message digest matches the tlog entry
-    const tlogDigest = tlogEntry.spec.data.hash?.value || '';
-    if (!content.compareDigest(Buffer.from(tlogDigest, 'hex'))) {
-        throw new error_1.VerificationError({
-            code: 'TLOG_BODY_ERROR',
-            message: 'digest mismatch',
-        });
-    }
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/index.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/index.js
deleted file mode 100644
index da235360c594a..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/index.js
+++ /dev/null
@@ -1,47 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.verifyTLogBody = verifyTLogBody;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const error_1 = require("../error");
-const dsse_1 = require("./dsse");
-const hashedrekord_1 = require("./hashedrekord");
-const intoto_1 = require("./intoto");
-// Verifies that the given tlog entry matches the supplied signature content.
-function verifyTLogBody(entry, sigContent) {
-    const { kind, version } = entry.kindVersion;
-    const body = JSON.parse(entry.canonicalizedBody.toString('utf8'));
-    if (kind !== body.kind || version !== body.apiVersion) {
-        throw new error_1.VerificationError({
-            code: 'TLOG_BODY_ERROR',
-            message: `kind/version mismatch - expected: ${kind}/${version}, received: ${body.kind}/${body.apiVersion}`,
-        });
-    }
-    switch (body.kind) {
-        case 'dsse':
-            return (0, dsse_1.verifyDSSETLogBody)(body, sigContent);
-        case 'intoto':
-            return (0, intoto_1.verifyIntotoTLogBody)(body, sigContent);
-        case 'hashedrekord':
-            return (0, hashedrekord_1.verifyHashedRekordTLogBody)(body, sigContent);
-        /* istanbul ignore next */
-        default:
-            throw new error_1.VerificationError({
-                code: 'TLOG_BODY_ERROR',
-                message: `unsupported kind: ${kind}`,
-            });
-    }
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/intoto.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/intoto.js
deleted file mode 100644
index 9096ae9418cc3..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/verify/dist/tlog/intoto.js
+++ /dev/null
@@ -1,62 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.verifyIntotoTLogBody = verifyIntotoTLogBody;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const error_1 = require("../error");
-// Compare the given intoto tlog entry to the given bundle
-function verifyIntotoTLogBody(tlogEntry, content) {
-    switch (tlogEntry.apiVersion) {
-        case '0.0.2':
-            return verifyIntoto002TLogBody(tlogEntry, content);
-        default:
-            throw new error_1.VerificationError({
-                code: 'TLOG_BODY_ERROR',
-                message: `unsupported intoto version: ${tlogEntry.apiVersion}`,
-            });
-    }
-}
-// Compare the given intoto v0.0.2 tlog entry to the given DSSE envelope.
-function verifyIntoto002TLogBody(tlogEntry, content) {
-    // Ensure the bundle's DSSE contains a single signature
-    if (tlogEntry.spec.content.envelope.signatures?.length !== 1) {
-        throw new error_1.VerificationError({
-            code: 'TLOG_BODY_ERROR',
-            message: 'signature count mismatch',
-        });
-    }
-    // Signature is double-base64-encoded in the tlog entry
-    const tlogSig = base64Decode(tlogEntry.spec.content.envelope.signatures[0].sig);
-    // Ensure that the signature in the bundle's DSSE matches tlog entry
-    if (!content.compareSignature(Buffer.from(tlogSig, 'base64'))) {
-        throw new error_1.VerificationError({
-            code: 'TLOG_BODY_ERROR',
-            message: 'tlog entry signature mismatch',
-        });
-    }
-    // Ensure the digest of the bundle's DSSE payload matches the digest in the
-    // tlog entry
-    const tlogHash = tlogEntry.spec.content.payloadHash?.value || '';
-    if (!content.compareDigest(Buffer.from(tlogHash, 'hex'))) {
-        throw new error_1.VerificationError({
-            code: 'TLOG_BODY_ERROR',
-            message: 'DSSE payload hash mismatch',
-        });
-    }
-}
-function base64Decode(str) {
-    return Buffer.from(str, 'base64').toString('utf-8');
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/trust/filter.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/trust/filter.js
deleted file mode 100644
index 98bd25cd70e59..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/verify/dist/trust/filter.js
+++ /dev/null
@@ -1,23 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.filterCertAuthorities = filterCertAuthorities;
-exports.filterTLogAuthorities = filterTLogAuthorities;
-function filterCertAuthorities(certAuthorities, timestamp) {
-    return certAuthorities.filter((ca) => {
-        return ca.validFor.start <= timestamp && ca.validFor.end >= timestamp;
-    });
-}
-// Filter the list of tlog instances to only those which match the given log
-// ID and have public keys which are valid for the given integrated time.
-function filterTLogAuthorities(tlogAuthorities, criteria) {
-    return tlogAuthorities.filter((tlog) => {
-        // If we're filtering by log ID and the log IDs don't match, we can't use
-        // this tlog
-        if (criteria.logID && !tlog.logID.equals(criteria.logID)) {
-            return false;
-        }
-        // Check that the integrated time is within the validFor range
-        return (tlog.validFor.start <= criteria.targetDate &&
-            criteria.targetDate <= tlog.validFor.end);
-    });
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/trust/index.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/trust/index.js
deleted file mode 100644
index bfab2eb4f9975..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/verify/dist/trust/index.js
+++ /dev/null
@@ -1,86 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.filterTLogAuthorities = exports.filterCertAuthorities = void 0;
-exports.toTrustMaterial = toTrustMaterial;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const core_1 = require("@sigstore/core");
-const protobuf_specs_1 = require("@sigstore/protobuf-specs");
-const error_1 = require("../error");
-const BEGINNING_OF_TIME = new Date(0);
-const END_OF_TIME = new Date(8640000000000000);
-var filter_1 = require("./filter");
-Object.defineProperty(exports, "filterCertAuthorities", { enumerable: true, get: function () { return filter_1.filterCertAuthorities; } });
-Object.defineProperty(exports, "filterTLogAuthorities", { enumerable: true, get: function () { return filter_1.filterTLogAuthorities; } });
-function toTrustMaterial(root, keys) {
-    const keyFinder = typeof keys === 'function' ? keys : keyLocator(keys);
-    return {
-        certificateAuthorities: root.certificateAuthorities.map(createCertAuthority),
-        timestampAuthorities: root.timestampAuthorities.map(createCertAuthority),
-        tlogs: root.tlogs.map(createTLogAuthority),
-        ctlogs: root.ctlogs.map(createTLogAuthority),
-        publicKey: keyFinder,
-    };
-}
-function createTLogAuthority(tlogInstance) {
-    const keyDetails = tlogInstance.publicKey.keyDetails;
-    const keyType = keyDetails === protobuf_specs_1.PublicKeyDetails.PKCS1_RSA_PKCS1V5 ||
-        keyDetails === protobuf_specs_1.PublicKeyDetails.PKIX_RSA_PKCS1V5 ||
-        keyDetails === protobuf_specs_1.PublicKeyDetails.PKIX_RSA_PKCS1V15_2048_SHA256 ||
-        keyDetails === protobuf_specs_1.PublicKeyDetails.PKIX_RSA_PKCS1V15_3072_SHA256 ||
-        keyDetails === protobuf_specs_1.PublicKeyDetails.PKIX_RSA_PKCS1V15_4096_SHA256
-        ? 'pkcs1'
-        : 'spki';
-    return {
-        logID: tlogInstance.logId.keyId,
-        publicKey: core_1.crypto.createPublicKey(tlogInstance.publicKey.rawBytes, keyType),
-        validFor: {
-            start: tlogInstance.publicKey.validFor?.start || BEGINNING_OF_TIME,
-            end: tlogInstance.publicKey.validFor?.end || END_OF_TIME,
-        },
-    };
-}
-function createCertAuthority(ca) {
-    /* istanbul ignore next */
-    return {
-        certChain: ca.certChain.certificates.map((cert) => {
-            return core_1.X509Certificate.parse(cert.rawBytes);
-        }),
-        validFor: {
-            start: ca.validFor?.start || BEGINNING_OF_TIME,
-            end: ca.validFor?.end || END_OF_TIME,
-        },
-    };
-}
-function keyLocator(keys) {
-    return (hint) => {
-        const key = (keys || {})[hint];
-        if (!key) {
-            throw new error_1.VerificationError({
-                code: 'PUBLIC_KEY_ERROR',
-                message: `key not found: ${hint}`,
-            });
-        }
-        return {
-            publicKey: core_1.crypto.createPublicKey(key.rawBytes),
-            validFor: (date) => {
-                /* istanbul ignore next */
-                return ((key.validFor?.start || BEGINNING_OF_TIME) <= date &&
-                    (key.validFor?.end || END_OF_TIME) >= date);
-            },
-        };
-    };
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/trust/trust.types.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/trust/trust.types.js
deleted file mode 100644
index c8ad2e549bdc6..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/verify/dist/trust/trust.types.js
+++ /dev/null
@@ -1,2 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/dist/verifier.js b/node_modules/pacote/node_modules/@sigstore/verify/dist/verifier.js
deleted file mode 100644
index 6a9d11a3b6f8f..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/verify/dist/verifier.js
+++ /dev/null
@@ -1,143 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Verifier = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const util_1 = require("util");
-const error_1 = require("./error");
-const key_1 = require("./key");
-const policy_1 = require("./policy");
-const timestamp_1 = require("./timestamp");
-const tlog_1 = require("./tlog");
-class Verifier {
-    constructor(trustMaterial, options = {}) {
-        this.trustMaterial = trustMaterial;
-        this.options = {
-            ctlogThreshold: options.ctlogThreshold ?? 1,
-            tlogThreshold: options.tlogThreshold ?? 1,
-            tsaThreshold: options.tsaThreshold ?? 0,
-        };
-    }
-    verify(entity, policy) {
-        const timestamps = this.verifyTimestamps(entity);
-        const signer = this.verifySigningKey(entity, timestamps);
-        this.verifyTLogs(entity);
-        this.verifySignature(entity, signer);
-        if (policy) {
-            this.verifyPolicy(policy, signer.identity || {});
-        }
-        return signer;
-    }
-    // Checks that all of the timestamps in the entity are valid and returns them
-    verifyTimestamps(entity) {
-        let tlogCount = 0;
-        let tsaCount = 0;
-        const timestamps = entity.timestamps.map((timestamp) => {
-            switch (timestamp.$case) {
-                case 'timestamp-authority':
-                    tsaCount++;
-                    return (0, timestamp_1.verifyTSATimestamp)(timestamp.timestamp, entity.signature.signature, this.trustMaterial.timestampAuthorities);
-                case 'transparency-log':
-                    tlogCount++;
-                    return (0, timestamp_1.verifyTLogTimestamp)(timestamp.tlogEntry, this.trustMaterial.tlogs);
-            }
-        });
-        // Check for duplicate timestamps
-        if (containsDupes(timestamps)) {
-            throw new error_1.VerificationError({
-                code: 'TIMESTAMP_ERROR',
-                message: 'duplicate timestamp',
-            });
-        }
-        if (tlogCount < this.options.tlogThreshold) {
-            throw new error_1.VerificationError({
-                code: 'TIMESTAMP_ERROR',
-                message: `expected ${this.options.tlogThreshold} tlog timestamps, got ${tlogCount}`,
-            });
-        }
-        if (tsaCount < this.options.tsaThreshold) {
-            throw new error_1.VerificationError({
-                code: 'TIMESTAMP_ERROR',
-                message: `expected ${this.options.tsaThreshold} tsa timestamps, got ${tsaCount}`,
-            });
-        }
-        return timestamps.map((t) => t.timestamp);
-    }
-    // Checks that the signing key is valid for all of the the supplied timestamps
-    // and returns the signer.
-    verifySigningKey({ key }, timestamps) {
-        switch (key.$case) {
-            case 'public-key': {
-                return (0, key_1.verifyPublicKey)(key.hint, timestamps, this.trustMaterial);
-            }
-            case 'certificate': {
-                const result = (0, key_1.verifyCertificate)(key.certificate, timestamps, this.trustMaterial);
-                /* istanbul ignore next - no fixture */
-                if (containsDupes(result.scts)) {
-                    throw new error_1.VerificationError({
-                        code: 'CERTIFICATE_ERROR',
-                        message: 'duplicate SCT',
-                    });
-                }
-                if (result.scts.length < this.options.ctlogThreshold) {
-                    throw new error_1.VerificationError({
-                        code: 'CERTIFICATE_ERROR',
-                        message: `expected ${this.options.ctlogThreshold} SCTs, got ${result.scts.length}`,
-                    });
-                }
-                return result.signer;
-            }
-        }
-    }
-    // Checks that the tlog entries are valid for the supplied content
-    verifyTLogs({ signature: content, tlogEntries }) {
-        tlogEntries.forEach((entry) => (0, tlog_1.verifyTLogBody)(entry, content));
-    }
-    // Checks that the signature is valid for the supplied content
-    verifySignature(entity, signer) {
-        if (!entity.signature.verifySignature(signer.key)) {
-            throw new error_1.VerificationError({
-                code: 'SIGNATURE_ERROR',
-                message: 'signature verification failed',
-            });
-        }
-    }
-    verifyPolicy(policy, identity) {
-        // Check the subject alternative name of the signer matches the policy
-        /* istanbul ignore else */
-        if (policy.subjectAlternativeName) {
-            (0, policy_1.verifySubjectAlternativeName)(policy.subjectAlternativeName, identity.subjectAlternativeName);
-        }
-        // Check that the extensions of the signer match the policy
-        /* istanbul ignore else */
-        if (policy.extensions) {
-            (0, policy_1.verifyExtensions)(policy.extensions, identity.extensions);
-        }
-    }
-}
-exports.Verifier = Verifier;
-// Checks for duplicate items in the array. Objects are compared using
-// deep equality.
-function containsDupes(arr) {
-    for (let i = 0; i < arr.length; i++) {
-        for (let j = i + 1; j < arr.length; j++) {
-            if ((0, util_1.isDeepStrictEqual)(arr[i], arr[j])) {
-                return true;
-            }
-        }
-    }
-    return false;
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/verify/package.json b/node_modules/pacote/node_modules/@sigstore/verify/package.json
deleted file mode 100644
index eaf12376c9025..0000000000000
--- a/node_modules/pacote/node_modules/@sigstore/verify/package.json
+++ /dev/null
@@ -1,36 +0,0 @@
-{
-  "name": "@sigstore/verify",
-  "version": "3.0.0",
-  "description": "Verification of Sigstore signatures",
-  "main": "dist/index.js",
-  "types": "dist/index.d.ts",
-  "scripts": {
-    "clean": "shx rm -rf dist *.tsbuildinfo",
-    "build": "tsc --build",
-    "test": "jest"
-  },
-  "files": [
-    "dist"
-  ],
-  "author": "bdehamer@github.com",
-  "license": "Apache-2.0",
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/sigstore/sigstore-js.git"
-  },
-  "bugs": {
-    "url": "https://github.com/sigstore/sigstore-js/issues"
-  },
-  "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/verify#readme",
-  "publishConfig": {
-    "provenance": true
-  },
-  "dependencies": {
-    "@sigstore/protobuf-specs": "^0.5.0",
-    "@sigstore/bundle": "^4.0.0",
-    "@sigstore/core": "^3.0.0"
-  },
-  "engines": {
-    "node": "^20.17.0 || >=22.9.0"
-  }
-}
diff --git a/node_modules/pacote/node_modules/sigstore/LICENSE b/node_modules/pacote/node_modules/sigstore/LICENSE
deleted file mode 100644
index e9e7c1679a09d..0000000000000
--- a/node_modules/pacote/node_modules/sigstore/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
-   APPENDIX: How to apply the Apache License to your work.
-
-      To apply the Apache License to your work, attach the following
-      boilerplate notice, with the fields enclosed by brackets "[]"
-      replaced with your own identifying information. (Don't include
-      the brackets!)  The text should be enclosed in the appropriate
-      comment syntax for the file format. We also recommend that a
-      file or class name and description of purpose be included on the
-      same "printed page" as the copyright notice for easier
-      identification within third-party archives.
-
-   Copyright 2023 The Sigstore Authors
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
diff --git a/node_modules/pacote/node_modules/sigstore/dist/config.js b/node_modules/pacote/node_modules/sigstore/dist/config.js
deleted file mode 100644
index e8b2392f97f23..0000000000000
--- a/node_modules/pacote/node_modules/sigstore/dist/config.js
+++ /dev/null
@@ -1,120 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.DEFAULT_TIMEOUT = exports.DEFAULT_RETRY = void 0;
-exports.createBundleBuilder = createBundleBuilder;
-exports.createKeyFinder = createKeyFinder;
-exports.createVerificationPolicy = createVerificationPolicy;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const core_1 = require("@sigstore/core");
-const sign_1 = require("@sigstore/sign");
-const verify_1 = require("@sigstore/verify");
-exports.DEFAULT_RETRY = { retries: 2 };
-exports.DEFAULT_TIMEOUT = 5000;
-function createBundleBuilder(bundleType, options) {
-    const bundlerOptions = {
-        signer: initSigner(options),
-        witnesses: initWitnesses(options),
-    };
-    switch (bundleType) {
-        case 'messageSignature':
-            return new sign_1.MessageSignatureBundleBuilder(bundlerOptions);
-        case 'dsseEnvelope':
-            return new sign_1.DSSEBundleBuilder({
-                ...bundlerOptions,
-                certificateChain: options.legacyCompatibility,
-            });
-    }
-}
-// Translates the public KeySelector type into the KeyFinderFunc type needed by
-// the verifier.
-function createKeyFinder(keySelector) {
-    return (hint) => {
-        const key = keySelector(hint);
-        if (!key) {
-            throw new verify_1.VerificationError({
-                code: 'PUBLIC_KEY_ERROR',
-                message: `key not found: ${hint}`,
-            });
-        }
-        return {
-            publicKey: core_1.crypto.createPublicKey(key),
-            validFor: () => true,
-        };
-    };
-}
-function createVerificationPolicy(options) {
-    const policy = {};
-    const san = options.certificateIdentityEmail || options.certificateIdentityURI;
-    if (san) {
-        policy.subjectAlternativeName = san;
-    }
-    if (options.certificateIssuer) {
-        policy.extensions = { issuer: options.certificateIssuer };
-    }
-    return policy;
-}
-// Instantiate the FulcioSigner based on the supplied options.
-function initSigner(options) {
-    return new sign_1.FulcioSigner({
-        fulcioBaseURL: options.fulcioURL,
-        identityProvider: options.identityProvider || initIdentityProvider(options),
-        retry: options.retry ?? exports.DEFAULT_RETRY,
-        timeout: options.timeout ?? exports.DEFAULT_TIMEOUT,
-    });
-}
-// Instantiate an identity provider based on the supplied options. If an
-// explicit identity token is provided, use that. Otherwise, use the CI
-// context provider.
-function initIdentityProvider(options) {
-    const token = options.identityToken;
-    if (token) {
-        /* istanbul ignore next */
-        return { getToken: () => Promise.resolve(token) };
-    }
-    else {
-        return new sign_1.CIContextProvider('sigstore');
-    }
-}
-// Instantiate a collection of witnesses based on the supplied options.
-function initWitnesses(options) {
-    const witnesses = [];
-    if (isRekorEnabled(options)) {
-        witnesses.push(new sign_1.RekorWitness({
-            rekorBaseURL: options.rekorURL,
-            entryType: options.legacyCompatibility ? 'intoto' : 'dsse',
-            fetchOnConflict: false,
-            retry: options.retry ?? exports.DEFAULT_RETRY,
-            timeout: options.timeout ?? exports.DEFAULT_TIMEOUT,
-        }));
-    }
-    if (isTSAEnabled(options)) {
-        witnesses.push(new sign_1.TSAWitness({
-            tsaBaseURL: options.tsaServerURL,
-            retry: options.retry ?? exports.DEFAULT_RETRY,
-            timeout: options.timeout ?? exports.DEFAULT_TIMEOUT,
-        }));
-    }
-    return witnesses;
-}
-// Type assertion to ensure that Rekor is enabled
-function isRekorEnabled(options) {
-    return options.tlogUpload !== false;
-}
-// Type assertion to ensure that TSA is enabled
-function isTSAEnabled(options) {
-    return options.tsaServerURL !== undefined;
-}
diff --git a/node_modules/pacote/node_modules/sigstore/dist/index.js b/node_modules/pacote/node_modules/sigstore/dist/index.js
deleted file mode 100644
index 7f6a5cf86bbfc..0000000000000
--- a/node_modules/pacote/node_modules/sigstore/dist/index.js
+++ /dev/null
@@ -1,34 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.verify = exports.sign = exports.createVerifier = exports.attest = exports.VerificationError = exports.PolicyError = exports.TUFError = exports.InternalError = exports.DEFAULT_REKOR_URL = exports.DEFAULT_FULCIO_URL = exports.ValidationError = void 0;
-/*
-Copyright 2022 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-var bundle_1 = require("@sigstore/bundle");
-Object.defineProperty(exports, "ValidationError", { enumerable: true, get: function () { return bundle_1.ValidationError; } });
-var sign_1 = require("@sigstore/sign");
-Object.defineProperty(exports, "DEFAULT_FULCIO_URL", { enumerable: true, get: function () { return sign_1.DEFAULT_FULCIO_URL; } });
-Object.defineProperty(exports, "DEFAULT_REKOR_URL", { enumerable: true, get: function () { return sign_1.DEFAULT_REKOR_URL; } });
-Object.defineProperty(exports, "InternalError", { enumerable: true, get: function () { return sign_1.InternalError; } });
-var tuf_1 = require("@sigstore/tuf");
-Object.defineProperty(exports, "TUFError", { enumerable: true, get: function () { return tuf_1.TUFError; } });
-var verify_1 = require("@sigstore/verify");
-Object.defineProperty(exports, "PolicyError", { enumerable: true, get: function () { return verify_1.PolicyError; } });
-Object.defineProperty(exports, "VerificationError", { enumerable: true, get: function () { return verify_1.VerificationError; } });
-var sigstore_1 = require("./sigstore");
-Object.defineProperty(exports, "attest", { enumerable: true, get: function () { return sigstore_1.attest; } });
-Object.defineProperty(exports, "createVerifier", { enumerable: true, get: function () { return sigstore_1.createVerifier; } });
-Object.defineProperty(exports, "sign", { enumerable: true, get: function () { return sigstore_1.sign; } });
-Object.defineProperty(exports, "verify", { enumerable: true, get: function () { return sigstore_1.verify; } });
diff --git a/node_modules/pacote/node_modules/sigstore/dist/sigstore.js b/node_modules/pacote/node_modules/sigstore/dist/sigstore.js
deleted file mode 100644
index cb4c66b38111b..0000000000000
--- a/node_modules/pacote/node_modules/sigstore/dist/sigstore.js
+++ /dev/null
@@ -1,112 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || (function () {
-    var ownKeys = function(o) {
-        ownKeys = Object.getOwnPropertyNames || function (o) {
-            var ar = [];
-            for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
-            return ar;
-        };
-        return ownKeys(o);
-    };
-    return function (mod) {
-        if (mod && mod.__esModule) return mod;
-        var result = {};
-        if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
-        __setModuleDefault(result, mod);
-        return result;
-    };
-})();
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.sign = sign;
-exports.attest = attest;
-exports.verify = verify;
-exports.createVerifier = createVerifier;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const bundle_1 = require("@sigstore/bundle");
-const tuf = __importStar(require("@sigstore/tuf"));
-const verify_1 = require("@sigstore/verify");
-const config = __importStar(require("./config"));
-async function sign(payload, 
-/* istanbul ignore next */
-options = {}) {
-    const bundler = config.createBundleBuilder('messageSignature', options);
-    const bundle = await bundler.create({ data: payload });
-    return (0, bundle_1.bundleToJSON)(bundle);
-}
-async function attest(payload, payloadType, 
-/* istanbul ignore next */
-options = {}) {
-    const bundler = config.createBundleBuilder('dsseEnvelope', options);
-    const bundle = await bundler.create({ data: payload, type: payloadType });
-    return (0, bundle_1.bundleToJSON)(bundle);
-}
-async function verify(bundle, dataOrOptions, options) {
-    let data;
-    if (Buffer.isBuffer(dataOrOptions)) {
-        data = dataOrOptions;
-    }
-    else {
-        options = dataOrOptions;
-    }
-    return createVerifier(options).then((verifier) => verifier.verify(bundle, data));
-}
-async function createVerifier(
-/* istanbul ignore next */
-options = {}) {
-    const trustedRoot = await tuf.getTrustedRoot({
-        mirrorURL: options.tufMirrorURL,
-        rootPath: options.tufRootPath,
-        cachePath: options.tufCachePath,
-        forceCache: options.tufForceCache,
-        retry: options.retry ?? config.DEFAULT_RETRY,
-        timeout: options.timeout ?? config.DEFAULT_TIMEOUT,
-    });
-    const keyFinder = options.keySelector
-        ? config.createKeyFinder(options.keySelector)
-        : undefined;
-    const trustMaterial = (0, verify_1.toTrustMaterial)(trustedRoot, keyFinder);
-    const verifierOptions = {
-        ctlogThreshold: options.ctLogThreshold,
-        tlogThreshold: options.tlogThreshold,
-    };
-    const verifier = new verify_1.Verifier(trustMaterial, verifierOptions);
-    const policy = config.createVerificationPolicy(options);
-    return {
-        verify: (bundle, payload) => {
-            const deserializedBundle = (0, bundle_1.bundleFromJSON)(bundle);
-            const signedEntity = (0, verify_1.toSignedEntity)(deserializedBundle, payload);
-            verifier.verify(signedEntity, policy);
-            return;
-        },
-    };
-}
diff --git a/node_modules/pacote/node_modules/sigstore/package.json b/node_modules/pacote/node_modules/sigstore/package.json
deleted file mode 100644
index b036dc787c75c..0000000000000
--- a/node_modules/pacote/node_modules/sigstore/package.json
+++ /dev/null
@@ -1,47 +0,0 @@
-{
-  "name": "sigstore",
-  "version": "4.0.0",
-  "description": "code-signing for npm packages",
-  "main": "dist/index.js",
-  "types": "dist/index.d.ts",
-  "scripts": {
-    "clean": "shx rm -rf dist *.tsbuildinfo",
-    "build": "tsc --build",
-    "test": "jest"
-  },
-  "files": [
-    "dist",
-    "store"
-  ],
-  "author": "bdehamer@github.com",
-  "license": "Apache-2.0",
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/sigstore/sigstore-js.git"
-  },
-  "bugs": {
-    "url": "https://github.com/sigstore/sigstore-js/issues"
-  },
-  "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/client#readme",
-  "publishConfig": {
-    "provenance": true
-  },
-  "devDependencies": {
-    "@sigstore/rekor-types": "^4.0.0",
-    "@sigstore/jest": "^0.0.0",
-    "@sigstore/mock": "^0.11.0",
-    "@tufjs/repo-mock": "^3.0.1",
-    "@types/make-fetch-happen": "^10.0.4"
-  },
-  "dependencies": {
-    "@sigstore/bundle": "^4.0.0",
-    "@sigstore/core": "^3.0.0",
-    "@sigstore/protobuf-specs": "^0.5.0",
-    "@sigstore/sign": "^4.0.0",
-    "@sigstore/tuf": "^4.0.0",
-    "@sigstore/verify": "^3.0.0"
-  },
-  "engines": {
-    "node": "^20.17.0 || >=22.9.0"
-  }
-}
diff --git a/node_modules/pacote/node_modules/@sigstore/sign/LICENSE b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/LICENSE
similarity index 100%
rename from node_modules/pacote/node_modules/@sigstore/sign/LICENSE
rename to node_modules/sigstore/node_modules/@sigstore/protobuf-specs/LICENSE
diff --git a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
new file mode 100644
index 0000000000000..5c4f37bfaf3fb
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
@@ -0,0 +1,59 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: envelope.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Signature = exports.Envelope = void 0;
+exports.Envelope = {
+    fromJSON(object) {
+        return {
+            payload: isSet(object.payload) ? Buffer.from(bytesFromBase64(object.payload)) : Buffer.alloc(0),
+            payloadType: isSet(object.payloadType) ? globalThis.String(object.payloadType) : "",
+            signatures: globalThis.Array.isArray(object?.signatures)
+                ? object.signatures.map((e) => exports.Signature.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.payload.length !== 0) {
+            obj.payload = base64FromBytes(message.payload);
+        }
+        if (message.payloadType !== "") {
+            obj.payloadType = message.payloadType;
+        }
+        if (message.signatures?.length) {
+            obj.signatures = message.signatures.map((e) => exports.Signature.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.Signature = {
+    fromJSON(object) {
+        return {
+            sig: isSet(object.sig) ? Buffer.from(bytesFromBase64(object.sig)) : Buffer.alloc(0),
+            keyid: isSet(object.keyid) ? globalThis.String(object.keyid) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.sig.length !== 0) {
+            obj.sig = base64FromBytes(message.sig);
+        }
+        if (message.keyid !== "") {
+            obj.keyid = message.keyid;
+        }
+        return obj;
+    },
+};
+function bytesFromBase64(b64) {
+    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+}
+function base64FromBytes(arr) {
+    return globalThis.Buffer.from(arr).toString("base64");
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
new file mode 100644
index 0000000000000..6138fef5672fc
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
@@ -0,0 +1,174 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: events.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.CloudEventBatch = exports.CloudEvent_CloudEventAttributeValue = exports.CloudEvent_AttributesEntry = exports.CloudEvent = void 0;
+/* eslint-disable */
+const any_1 = require("./google/protobuf/any");
+const timestamp_1 = require("./google/protobuf/timestamp");
+exports.CloudEvent = {
+    fromJSON(object) {
+        return {
+            id: isSet(object.id) ? globalThis.String(object.id) : "",
+            source: isSet(object.source) ? globalThis.String(object.source) : "",
+            specVersion: isSet(object.specVersion) ? globalThis.String(object.specVersion) : "",
+            type: isSet(object.type) ? globalThis.String(object.type) : "",
+            attributes: isObject(object.attributes)
+                ? Object.entries(object.attributes).reduce((acc, [key, value]) => {
+                    acc[key] = exports.CloudEvent_CloudEventAttributeValue.fromJSON(value);
+                    return acc;
+                }, {})
+                : {},
+            data: isSet(object.binaryData)
+                ? { $case: "binaryData", binaryData: Buffer.from(bytesFromBase64(object.binaryData)) }
+                : isSet(object.textData)
+                    ? { $case: "textData", textData: globalThis.String(object.textData) }
+                    : isSet(object.protoData)
+                        ? { $case: "protoData", protoData: any_1.Any.fromJSON(object.protoData) }
+                        : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.id !== "") {
+            obj.id = message.id;
+        }
+        if (message.source !== "") {
+            obj.source = message.source;
+        }
+        if (message.specVersion !== "") {
+            obj.specVersion = message.specVersion;
+        }
+        if (message.type !== "") {
+            obj.type = message.type;
+        }
+        if (message.attributes) {
+            const entries = Object.entries(message.attributes);
+            if (entries.length > 0) {
+                obj.attributes = {};
+                entries.forEach(([k, v]) => {
+                    obj.attributes[k] = exports.CloudEvent_CloudEventAttributeValue.toJSON(v);
+                });
+            }
+        }
+        if (message.data?.$case === "binaryData") {
+            obj.binaryData = base64FromBytes(message.data.binaryData);
+        }
+        else if (message.data?.$case === "textData") {
+            obj.textData = message.data.textData;
+        }
+        else if (message.data?.$case === "protoData") {
+            obj.protoData = any_1.Any.toJSON(message.data.protoData);
+        }
+        return obj;
+    },
+};
+exports.CloudEvent_AttributesEntry = {
+    fromJSON(object) {
+        return {
+            key: isSet(object.key) ? globalThis.String(object.key) : "",
+            value: isSet(object.value) ? exports.CloudEvent_CloudEventAttributeValue.fromJSON(object.value) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.key !== "") {
+            obj.key = message.key;
+        }
+        if (message.value !== undefined) {
+            obj.value = exports.CloudEvent_CloudEventAttributeValue.toJSON(message.value);
+        }
+        return obj;
+    },
+};
+exports.CloudEvent_CloudEventAttributeValue = {
+    fromJSON(object) {
+        return {
+            attr: isSet(object.ceBoolean)
+                ? { $case: "ceBoolean", ceBoolean: globalThis.Boolean(object.ceBoolean) }
+                : isSet(object.ceInteger)
+                    ? { $case: "ceInteger", ceInteger: globalThis.Number(object.ceInteger) }
+                    : isSet(object.ceString)
+                        ? { $case: "ceString", ceString: globalThis.String(object.ceString) }
+                        : isSet(object.ceBytes)
+                            ? { $case: "ceBytes", ceBytes: Buffer.from(bytesFromBase64(object.ceBytes)) }
+                            : isSet(object.ceUri)
+                                ? { $case: "ceUri", ceUri: globalThis.String(object.ceUri) }
+                                : isSet(object.ceUriRef)
+                                    ? { $case: "ceUriRef", ceUriRef: globalThis.String(object.ceUriRef) }
+                                    : isSet(object.ceTimestamp)
+                                        ? { $case: "ceTimestamp", ceTimestamp: fromJsonTimestamp(object.ceTimestamp) }
+                                        : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.attr?.$case === "ceBoolean") {
+            obj.ceBoolean = message.attr.ceBoolean;
+        }
+        else if (message.attr?.$case === "ceInteger") {
+            obj.ceInteger = Math.round(message.attr.ceInteger);
+        }
+        else if (message.attr?.$case === "ceString") {
+            obj.ceString = message.attr.ceString;
+        }
+        else if (message.attr?.$case === "ceBytes") {
+            obj.ceBytes = base64FromBytes(message.attr.ceBytes);
+        }
+        else if (message.attr?.$case === "ceUri") {
+            obj.ceUri = message.attr.ceUri;
+        }
+        else if (message.attr?.$case === "ceUriRef") {
+            obj.ceUriRef = message.attr.ceUriRef;
+        }
+        else if (message.attr?.$case === "ceTimestamp") {
+            obj.ceTimestamp = message.attr.ceTimestamp.toISOString();
+        }
+        return obj;
+    },
+};
+exports.CloudEventBatch = {
+    fromJSON(object) {
+        return {
+            events: globalThis.Array.isArray(object?.events) ? object.events.map((e) => exports.CloudEvent.fromJSON(e)) : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.events?.length) {
+            obj.events = message.events.map((e) => exports.CloudEvent.toJSON(e));
+        }
+        return obj;
+    },
+};
+function bytesFromBase64(b64) {
+    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+}
+function base64FromBytes(arr) {
+    return globalThis.Buffer.from(arr).toString("base64");
+}
+function fromTimestamp(t) {
+    let millis = (globalThis.Number(t.seconds) || 0) * 1_000;
+    millis += (t.nanos || 0) / 1_000_000;
+    return new globalThis.Date(millis);
+}
+function fromJsonTimestamp(o) {
+    if (o instanceof globalThis.Date) {
+        return o;
+    }
+    else if (typeof o === "string") {
+        return new globalThis.Date(o);
+    }
+    else {
+        return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
+    }
+}
+function isObject(value) {
+    return typeof value === "object" && value !== null;
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
new file mode 100644
index 0000000000000..b4d9ccc781c2f
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
@@ -0,0 +1,141 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: google/api/field_behavior.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.FieldBehavior = void 0;
+exports.fieldBehaviorFromJSON = fieldBehaviorFromJSON;
+exports.fieldBehaviorToJSON = fieldBehaviorToJSON;
+/* eslint-disable */
+/**
+ * An indicator of the behavior of a given field (for example, that a field
+ * is required in requests, or given as output but ignored as input).
+ * This **does not** change the behavior in protocol buffers itself; it only
+ * denotes the behavior and may affect how API tooling handles the field.
+ *
+ * Note: This enum **may** receive new values in the future.
+ */
+var FieldBehavior;
+(function (FieldBehavior) {
+    /** FIELD_BEHAVIOR_UNSPECIFIED - Conventional default for enums. Do not use this. */
+    FieldBehavior[FieldBehavior["FIELD_BEHAVIOR_UNSPECIFIED"] = 0] = "FIELD_BEHAVIOR_UNSPECIFIED";
+    /**
+     * OPTIONAL - Specifically denotes a field as optional.
+     * While all fields in protocol buffers are optional, this may be specified
+     * for emphasis if appropriate.
+     */
+    FieldBehavior[FieldBehavior["OPTIONAL"] = 1] = "OPTIONAL";
+    /**
+     * REQUIRED - Denotes a field as required.
+     * This indicates that the field **must** be provided as part of the request,
+     * and failure to do so will cause an error (usually `INVALID_ARGUMENT`).
+     */
+    FieldBehavior[FieldBehavior["REQUIRED"] = 2] = "REQUIRED";
+    /**
+     * OUTPUT_ONLY - Denotes a field as output only.
+     * This indicates that the field is provided in responses, but including the
+     * field in a request does nothing (the server *must* ignore it and
+     * *must not* throw an error as a result of the field's presence).
+     */
+    FieldBehavior[FieldBehavior["OUTPUT_ONLY"] = 3] = "OUTPUT_ONLY";
+    /**
+     * INPUT_ONLY - Denotes a field as input only.
+     * This indicates that the field is provided in requests, and the
+     * corresponding field is not included in output.
+     */
+    FieldBehavior[FieldBehavior["INPUT_ONLY"] = 4] = "INPUT_ONLY";
+    /**
+     * IMMUTABLE - Denotes a field as immutable.
+     * This indicates that the field may be set once in a request to create a
+     * resource, but may not be changed thereafter.
+     */
+    FieldBehavior[FieldBehavior["IMMUTABLE"] = 5] = "IMMUTABLE";
+    /**
+     * UNORDERED_LIST - Denotes that a (repeated) field is an unordered list.
+     * This indicates that the service may provide the elements of the list
+     * in any arbitrary  order, rather than the order the user originally
+     * provided. Additionally, the list's order may or may not be stable.
+     */
+    FieldBehavior[FieldBehavior["UNORDERED_LIST"] = 6] = "UNORDERED_LIST";
+    /**
+     * NON_EMPTY_DEFAULT - Denotes that this field returns a non-empty default value if not set.
+     * This indicates that if the user provides the empty value in a request,
+     * a non-empty value will be returned. The user will not be aware of what
+     * non-empty value to expect.
+     */
+    FieldBehavior[FieldBehavior["NON_EMPTY_DEFAULT"] = 7] = "NON_EMPTY_DEFAULT";
+    /**
+     * IDENTIFIER - Denotes that the field in a resource (a message annotated with
+     * google.api.resource) is used in the resource name to uniquely identify the
+     * resource. For AIP-compliant APIs, this should only be applied to the
+     * `name` field on the resource.
+     *
+     * This behavior should not be applied to references to other resources within
+     * the message.
+     *
+     * The identifier field of resources often have different field behavior
+     * depending on the request it is embedded in (e.g. for Create methods name
+     * is optional and unused, while for Update methods it is required). Instead
+     * of method-specific annotations, only `IDENTIFIER` is required.
+     */
+    FieldBehavior[FieldBehavior["IDENTIFIER"] = 8] = "IDENTIFIER";
+})(FieldBehavior || (exports.FieldBehavior = FieldBehavior = {}));
+function fieldBehaviorFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "FIELD_BEHAVIOR_UNSPECIFIED":
+            return FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED;
+        case 1:
+        case "OPTIONAL":
+            return FieldBehavior.OPTIONAL;
+        case 2:
+        case "REQUIRED":
+            return FieldBehavior.REQUIRED;
+        case 3:
+        case "OUTPUT_ONLY":
+            return FieldBehavior.OUTPUT_ONLY;
+        case 4:
+        case "INPUT_ONLY":
+            return FieldBehavior.INPUT_ONLY;
+        case 5:
+        case "IMMUTABLE":
+            return FieldBehavior.IMMUTABLE;
+        case 6:
+        case "UNORDERED_LIST":
+            return FieldBehavior.UNORDERED_LIST;
+        case 7:
+        case "NON_EMPTY_DEFAULT":
+            return FieldBehavior.NON_EMPTY_DEFAULT;
+        case 8:
+        case "IDENTIFIER":
+            return FieldBehavior.IDENTIFIER;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
+    }
+}
+function fieldBehaviorToJSON(object) {
+    switch (object) {
+        case FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED:
+            return "FIELD_BEHAVIOR_UNSPECIFIED";
+        case FieldBehavior.OPTIONAL:
+            return "OPTIONAL";
+        case FieldBehavior.REQUIRED:
+            return "REQUIRED";
+        case FieldBehavior.OUTPUT_ONLY:
+            return "OUTPUT_ONLY";
+        case FieldBehavior.INPUT_ONLY:
+            return "INPUT_ONLY";
+        case FieldBehavior.IMMUTABLE:
+            return "IMMUTABLE";
+        case FieldBehavior.UNORDERED_LIST:
+            return "UNORDERED_LIST";
+        case FieldBehavior.NON_EMPTY_DEFAULT:
+            return "NON_EMPTY_DEFAULT";
+        case FieldBehavior.IDENTIFIER:
+            return "IDENTIFIER";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
+    }
+}
diff --git a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
new file mode 100644
index 0000000000000..f0c8aab773e4c
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
@@ -0,0 +1,35 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: google/protobuf/any.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Any = void 0;
+exports.Any = {
+    fromJSON(object) {
+        return {
+            typeUrl: isSet(object.typeUrl) ? globalThis.String(object.typeUrl) : "",
+            value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.typeUrl !== "") {
+            obj.typeUrl = message.typeUrl;
+        }
+        if (message.value.length !== 0) {
+            obj.value = base64FromBytes(message.value);
+        }
+        return obj;
+    },
+};
+function bytesFromBase64(b64) {
+    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+}
+function base64FromBytes(arr) {
+    return globalThis.Buffer.from(arr).toString("base64");
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
new file mode 100644
index 0000000000000..d6f8ddddf799d
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
@@ -0,0 +1,2042 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: google/protobuf/descriptor.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.GeneratedCodeInfo = exports.SourceCodeInfo_Location = exports.SourceCodeInfo = exports.FeatureSetDefaults_FeatureSetEditionDefault = exports.FeatureSetDefaults = exports.FeatureSet = exports.UninterpretedOption_NamePart = exports.UninterpretedOption = exports.MethodOptions = exports.ServiceOptions = exports.EnumValueOptions = exports.EnumOptions = exports.OneofOptions = exports.FieldOptions_FeatureSupport = exports.FieldOptions_EditionDefault = exports.FieldOptions = exports.MessageOptions = exports.FileOptions = exports.MethodDescriptorProto = exports.ServiceDescriptorProto = exports.EnumValueDescriptorProto = exports.EnumDescriptorProto_EnumReservedRange = exports.EnumDescriptorProto = exports.OneofDescriptorProto = exports.FieldDescriptorProto = exports.ExtensionRangeOptions_Declaration = exports.ExtensionRangeOptions = exports.DescriptorProto_ReservedRange = exports.DescriptorProto_ExtensionRange = exports.DescriptorProto = exports.FileDescriptorProto = exports.FileDescriptorSet = exports.GeneratedCodeInfo_Annotation_Semantic = exports.FeatureSet_EnforceNamingStyle = exports.FeatureSet_JsonFormat = exports.FeatureSet_MessageEncoding = exports.FeatureSet_Utf8Validation = exports.FeatureSet_RepeatedFieldEncoding = exports.FeatureSet_EnumType = exports.FeatureSet_FieldPresence = exports.MethodOptions_IdempotencyLevel = exports.FieldOptions_OptionTargetType = exports.FieldOptions_OptionRetention = exports.FieldOptions_JSType = exports.FieldOptions_CType = exports.FileOptions_OptimizeMode = exports.FieldDescriptorProto_Label = exports.FieldDescriptorProto_Type = exports.ExtensionRangeOptions_VerificationState = exports.Edition = void 0;
+exports.GeneratedCodeInfo_Annotation = void 0;
+exports.editionFromJSON = editionFromJSON;
+exports.editionToJSON = editionToJSON;
+exports.extensionRangeOptions_VerificationStateFromJSON = extensionRangeOptions_VerificationStateFromJSON;
+exports.extensionRangeOptions_VerificationStateToJSON = extensionRangeOptions_VerificationStateToJSON;
+exports.fieldDescriptorProto_TypeFromJSON = fieldDescriptorProto_TypeFromJSON;
+exports.fieldDescriptorProto_TypeToJSON = fieldDescriptorProto_TypeToJSON;
+exports.fieldDescriptorProto_LabelFromJSON = fieldDescriptorProto_LabelFromJSON;
+exports.fieldDescriptorProto_LabelToJSON = fieldDescriptorProto_LabelToJSON;
+exports.fileOptions_OptimizeModeFromJSON = fileOptions_OptimizeModeFromJSON;
+exports.fileOptions_OptimizeModeToJSON = fileOptions_OptimizeModeToJSON;
+exports.fieldOptions_CTypeFromJSON = fieldOptions_CTypeFromJSON;
+exports.fieldOptions_CTypeToJSON = fieldOptions_CTypeToJSON;
+exports.fieldOptions_JSTypeFromJSON = fieldOptions_JSTypeFromJSON;
+exports.fieldOptions_JSTypeToJSON = fieldOptions_JSTypeToJSON;
+exports.fieldOptions_OptionRetentionFromJSON = fieldOptions_OptionRetentionFromJSON;
+exports.fieldOptions_OptionRetentionToJSON = fieldOptions_OptionRetentionToJSON;
+exports.fieldOptions_OptionTargetTypeFromJSON = fieldOptions_OptionTargetTypeFromJSON;
+exports.fieldOptions_OptionTargetTypeToJSON = fieldOptions_OptionTargetTypeToJSON;
+exports.methodOptions_IdempotencyLevelFromJSON = methodOptions_IdempotencyLevelFromJSON;
+exports.methodOptions_IdempotencyLevelToJSON = methodOptions_IdempotencyLevelToJSON;
+exports.featureSet_FieldPresenceFromJSON = featureSet_FieldPresenceFromJSON;
+exports.featureSet_FieldPresenceToJSON = featureSet_FieldPresenceToJSON;
+exports.featureSet_EnumTypeFromJSON = featureSet_EnumTypeFromJSON;
+exports.featureSet_EnumTypeToJSON = featureSet_EnumTypeToJSON;
+exports.featureSet_RepeatedFieldEncodingFromJSON = featureSet_RepeatedFieldEncodingFromJSON;
+exports.featureSet_RepeatedFieldEncodingToJSON = featureSet_RepeatedFieldEncodingToJSON;
+exports.featureSet_Utf8ValidationFromJSON = featureSet_Utf8ValidationFromJSON;
+exports.featureSet_Utf8ValidationToJSON = featureSet_Utf8ValidationToJSON;
+exports.featureSet_MessageEncodingFromJSON = featureSet_MessageEncodingFromJSON;
+exports.featureSet_MessageEncodingToJSON = featureSet_MessageEncodingToJSON;
+exports.featureSet_JsonFormatFromJSON = featureSet_JsonFormatFromJSON;
+exports.featureSet_JsonFormatToJSON = featureSet_JsonFormatToJSON;
+exports.featureSet_EnforceNamingStyleFromJSON = featureSet_EnforceNamingStyleFromJSON;
+exports.featureSet_EnforceNamingStyleToJSON = featureSet_EnforceNamingStyleToJSON;
+exports.generatedCodeInfo_Annotation_SemanticFromJSON = generatedCodeInfo_Annotation_SemanticFromJSON;
+exports.generatedCodeInfo_Annotation_SemanticToJSON = generatedCodeInfo_Annotation_SemanticToJSON;
+/* eslint-disable */
+/** The full set of known editions. */
+var Edition;
+(function (Edition) {
+    /** EDITION_UNKNOWN - A placeholder for an unknown edition value. */
+    Edition[Edition["EDITION_UNKNOWN"] = 0] = "EDITION_UNKNOWN";
+    /**
+     * EDITION_LEGACY - A placeholder edition for specifying default behaviors *before* a feature
+     * was first introduced.  This is effectively an "infinite past".
+     */
+    Edition[Edition["EDITION_LEGACY"] = 900] = "EDITION_LEGACY";
+    /**
+     * EDITION_PROTO2 - Legacy syntax "editions".  These pre-date editions, but behave much like
+     * distinct editions.  These can't be used to specify the edition of proto
+     * files, but feature definitions must supply proto2/proto3 defaults for
+     * backwards compatibility.
+     */
+    Edition[Edition["EDITION_PROTO2"] = 998] = "EDITION_PROTO2";
+    Edition[Edition["EDITION_PROTO3"] = 999] = "EDITION_PROTO3";
+    /**
+     * EDITION_2023 - Editions that have been released.  The specific values are arbitrary and
+     * should not be depended on, but they will always be time-ordered for easy
+     * comparison.
+     */
+    Edition[Edition["EDITION_2023"] = 1000] = "EDITION_2023";
+    Edition[Edition["EDITION_2024"] = 1001] = "EDITION_2024";
+    /**
+     * EDITION_1_TEST_ONLY - Placeholder editions for testing feature resolution.  These should not be
+     * used or relied on outside of tests.
+     */
+    Edition[Edition["EDITION_1_TEST_ONLY"] = 1] = "EDITION_1_TEST_ONLY";
+    Edition[Edition["EDITION_2_TEST_ONLY"] = 2] = "EDITION_2_TEST_ONLY";
+    Edition[Edition["EDITION_99997_TEST_ONLY"] = 99997] = "EDITION_99997_TEST_ONLY";
+    Edition[Edition["EDITION_99998_TEST_ONLY"] = 99998] = "EDITION_99998_TEST_ONLY";
+    Edition[Edition["EDITION_99999_TEST_ONLY"] = 99999] = "EDITION_99999_TEST_ONLY";
+    /**
+     * EDITION_MAX - Placeholder for specifying unbounded edition support.  This should only
+     * ever be used by plugins that can expect to never require any changes to
+     * support a new edition.
+     */
+    Edition[Edition["EDITION_MAX"] = 2147483647] = "EDITION_MAX";
+})(Edition || (exports.Edition = Edition = {}));
+function editionFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "EDITION_UNKNOWN":
+            return Edition.EDITION_UNKNOWN;
+        case 900:
+        case "EDITION_LEGACY":
+            return Edition.EDITION_LEGACY;
+        case 998:
+        case "EDITION_PROTO2":
+            return Edition.EDITION_PROTO2;
+        case 999:
+        case "EDITION_PROTO3":
+            return Edition.EDITION_PROTO3;
+        case 1000:
+        case "EDITION_2023":
+            return Edition.EDITION_2023;
+        case 1001:
+        case "EDITION_2024":
+            return Edition.EDITION_2024;
+        case 1:
+        case "EDITION_1_TEST_ONLY":
+            return Edition.EDITION_1_TEST_ONLY;
+        case 2:
+        case "EDITION_2_TEST_ONLY":
+            return Edition.EDITION_2_TEST_ONLY;
+        case 99997:
+        case "EDITION_99997_TEST_ONLY":
+            return Edition.EDITION_99997_TEST_ONLY;
+        case 99998:
+        case "EDITION_99998_TEST_ONLY":
+            return Edition.EDITION_99998_TEST_ONLY;
+        case 99999:
+        case "EDITION_99999_TEST_ONLY":
+            return Edition.EDITION_99999_TEST_ONLY;
+        case 2147483647:
+        case "EDITION_MAX":
+            return Edition.EDITION_MAX;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum Edition");
+    }
+}
+function editionToJSON(object) {
+    switch (object) {
+        case Edition.EDITION_UNKNOWN:
+            return "EDITION_UNKNOWN";
+        case Edition.EDITION_LEGACY:
+            return "EDITION_LEGACY";
+        case Edition.EDITION_PROTO2:
+            return "EDITION_PROTO2";
+        case Edition.EDITION_PROTO3:
+            return "EDITION_PROTO3";
+        case Edition.EDITION_2023:
+            return "EDITION_2023";
+        case Edition.EDITION_2024:
+            return "EDITION_2024";
+        case Edition.EDITION_1_TEST_ONLY:
+            return "EDITION_1_TEST_ONLY";
+        case Edition.EDITION_2_TEST_ONLY:
+            return "EDITION_2_TEST_ONLY";
+        case Edition.EDITION_99997_TEST_ONLY:
+            return "EDITION_99997_TEST_ONLY";
+        case Edition.EDITION_99998_TEST_ONLY:
+            return "EDITION_99998_TEST_ONLY";
+        case Edition.EDITION_99999_TEST_ONLY:
+            return "EDITION_99999_TEST_ONLY";
+        case Edition.EDITION_MAX:
+            return "EDITION_MAX";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum Edition");
+    }
+}
+/** The verification state of the extension range. */
+var ExtensionRangeOptions_VerificationState;
+(function (ExtensionRangeOptions_VerificationState) {
+    /** DECLARATION - All the extensions of the range must be declared. */
+    ExtensionRangeOptions_VerificationState[ExtensionRangeOptions_VerificationState["DECLARATION"] = 0] = "DECLARATION";
+    ExtensionRangeOptions_VerificationState[ExtensionRangeOptions_VerificationState["UNVERIFIED"] = 1] = "UNVERIFIED";
+})(ExtensionRangeOptions_VerificationState || (exports.ExtensionRangeOptions_VerificationState = ExtensionRangeOptions_VerificationState = {}));
+function extensionRangeOptions_VerificationStateFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "DECLARATION":
+            return ExtensionRangeOptions_VerificationState.DECLARATION;
+        case 1:
+        case "UNVERIFIED":
+            return ExtensionRangeOptions_VerificationState.UNVERIFIED;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum ExtensionRangeOptions_VerificationState");
+    }
+}
+function extensionRangeOptions_VerificationStateToJSON(object) {
+    switch (object) {
+        case ExtensionRangeOptions_VerificationState.DECLARATION:
+            return "DECLARATION";
+        case ExtensionRangeOptions_VerificationState.UNVERIFIED:
+            return "UNVERIFIED";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum ExtensionRangeOptions_VerificationState");
+    }
+}
+var FieldDescriptorProto_Type;
+(function (FieldDescriptorProto_Type) {
+    /**
+     * TYPE_DOUBLE - 0 is reserved for errors.
+     * Order is weird for historical reasons.
+     */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_DOUBLE"] = 1] = "TYPE_DOUBLE";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FLOAT"] = 2] = "TYPE_FLOAT";
+    /**
+     * TYPE_INT64 - Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT64 if
+     * negative values are likely.
+     */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT64"] = 3] = "TYPE_INT64";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT64"] = 4] = "TYPE_UINT64";
+    /**
+     * TYPE_INT32 - Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT32 if
+     * negative values are likely.
+     */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT32"] = 5] = "TYPE_INT32";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED64"] = 6] = "TYPE_FIXED64";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED32"] = 7] = "TYPE_FIXED32";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BOOL"] = 8] = "TYPE_BOOL";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_STRING"] = 9] = "TYPE_STRING";
+    /**
+     * TYPE_GROUP - Tag-delimited aggregate.
+     * Group type is deprecated and not supported after google.protobuf. However, Proto3
+     * implementations should still be able to parse the group wire format and
+     * treat group fields as unknown fields.  In Editions, the group wire format
+     * can be enabled via the `message_encoding` feature.
+     */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_GROUP"] = 10] = "TYPE_GROUP";
+    /** TYPE_MESSAGE - Length-delimited aggregate. */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_MESSAGE"] = 11] = "TYPE_MESSAGE";
+    /** TYPE_BYTES - New in version 2. */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BYTES"] = 12] = "TYPE_BYTES";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT32"] = 13] = "TYPE_UINT32";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_ENUM"] = 14] = "TYPE_ENUM";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED32"] = 15] = "TYPE_SFIXED32";
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED64"] = 16] = "TYPE_SFIXED64";
+    /** TYPE_SINT32 - Uses ZigZag encoding. */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT32"] = 17] = "TYPE_SINT32";
+    /** TYPE_SINT64 - Uses ZigZag encoding. */
+    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT64"] = 18] = "TYPE_SINT64";
+})(FieldDescriptorProto_Type || (exports.FieldDescriptorProto_Type = FieldDescriptorProto_Type = {}));
+function fieldDescriptorProto_TypeFromJSON(object) {
+    switch (object) {
+        case 1:
+        case "TYPE_DOUBLE":
+            return FieldDescriptorProto_Type.TYPE_DOUBLE;
+        case 2:
+        case "TYPE_FLOAT":
+            return FieldDescriptorProto_Type.TYPE_FLOAT;
+        case 3:
+        case "TYPE_INT64":
+            return FieldDescriptorProto_Type.TYPE_INT64;
+        case 4:
+        case "TYPE_UINT64":
+            return FieldDescriptorProto_Type.TYPE_UINT64;
+        case 5:
+        case "TYPE_INT32":
+            return FieldDescriptorProto_Type.TYPE_INT32;
+        case 6:
+        case "TYPE_FIXED64":
+            return FieldDescriptorProto_Type.TYPE_FIXED64;
+        case 7:
+        case "TYPE_FIXED32":
+            return FieldDescriptorProto_Type.TYPE_FIXED32;
+        case 8:
+        case "TYPE_BOOL":
+            return FieldDescriptorProto_Type.TYPE_BOOL;
+        case 9:
+        case "TYPE_STRING":
+            return FieldDescriptorProto_Type.TYPE_STRING;
+        case 10:
+        case "TYPE_GROUP":
+            return FieldDescriptorProto_Type.TYPE_GROUP;
+        case 11:
+        case "TYPE_MESSAGE":
+            return FieldDescriptorProto_Type.TYPE_MESSAGE;
+        case 12:
+        case "TYPE_BYTES":
+            return FieldDescriptorProto_Type.TYPE_BYTES;
+        case 13:
+        case "TYPE_UINT32":
+            return FieldDescriptorProto_Type.TYPE_UINT32;
+        case 14:
+        case "TYPE_ENUM":
+            return FieldDescriptorProto_Type.TYPE_ENUM;
+        case 15:
+        case "TYPE_SFIXED32":
+            return FieldDescriptorProto_Type.TYPE_SFIXED32;
+        case 16:
+        case "TYPE_SFIXED64":
+            return FieldDescriptorProto_Type.TYPE_SFIXED64;
+        case 17:
+        case "TYPE_SINT32":
+            return FieldDescriptorProto_Type.TYPE_SINT32;
+        case 18:
+        case "TYPE_SINT64":
+            return FieldDescriptorProto_Type.TYPE_SINT64;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
+    }
+}
+function fieldDescriptorProto_TypeToJSON(object) {
+    switch (object) {
+        case FieldDescriptorProto_Type.TYPE_DOUBLE:
+            return "TYPE_DOUBLE";
+        case FieldDescriptorProto_Type.TYPE_FLOAT:
+            return "TYPE_FLOAT";
+        case FieldDescriptorProto_Type.TYPE_INT64:
+            return "TYPE_INT64";
+        case FieldDescriptorProto_Type.TYPE_UINT64:
+            return "TYPE_UINT64";
+        case FieldDescriptorProto_Type.TYPE_INT32:
+            return "TYPE_INT32";
+        case FieldDescriptorProto_Type.TYPE_FIXED64:
+            return "TYPE_FIXED64";
+        case FieldDescriptorProto_Type.TYPE_FIXED32:
+            return "TYPE_FIXED32";
+        case FieldDescriptorProto_Type.TYPE_BOOL:
+            return "TYPE_BOOL";
+        case FieldDescriptorProto_Type.TYPE_STRING:
+            return "TYPE_STRING";
+        case FieldDescriptorProto_Type.TYPE_GROUP:
+            return "TYPE_GROUP";
+        case FieldDescriptorProto_Type.TYPE_MESSAGE:
+            return "TYPE_MESSAGE";
+        case FieldDescriptorProto_Type.TYPE_BYTES:
+            return "TYPE_BYTES";
+        case FieldDescriptorProto_Type.TYPE_UINT32:
+            return "TYPE_UINT32";
+        case FieldDescriptorProto_Type.TYPE_ENUM:
+            return "TYPE_ENUM";
+        case FieldDescriptorProto_Type.TYPE_SFIXED32:
+            return "TYPE_SFIXED32";
+        case FieldDescriptorProto_Type.TYPE_SFIXED64:
+            return "TYPE_SFIXED64";
+        case FieldDescriptorProto_Type.TYPE_SINT32:
+            return "TYPE_SINT32";
+        case FieldDescriptorProto_Type.TYPE_SINT64:
+            return "TYPE_SINT64";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
+    }
+}
+var FieldDescriptorProto_Label;
+(function (FieldDescriptorProto_Label) {
+    /** LABEL_OPTIONAL - 0 is reserved for errors */
+    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_OPTIONAL"] = 1] = "LABEL_OPTIONAL";
+    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REPEATED"] = 3] = "LABEL_REPEATED";
+    /**
+     * LABEL_REQUIRED - The required label is only allowed in google.protobuf.  In proto3 and Editions
+     * it's explicitly prohibited.  In Editions, the `field_presence` feature
+     * can be used to get this behavior.
+     */
+    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REQUIRED"] = 2] = "LABEL_REQUIRED";
+})(FieldDescriptorProto_Label || (exports.FieldDescriptorProto_Label = FieldDescriptorProto_Label = {}));
+function fieldDescriptorProto_LabelFromJSON(object) {
+    switch (object) {
+        case 1:
+        case "LABEL_OPTIONAL":
+            return FieldDescriptorProto_Label.LABEL_OPTIONAL;
+        case 3:
+        case "LABEL_REPEATED":
+            return FieldDescriptorProto_Label.LABEL_REPEATED;
+        case 2:
+        case "LABEL_REQUIRED":
+            return FieldDescriptorProto_Label.LABEL_REQUIRED;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
+    }
+}
+function fieldDescriptorProto_LabelToJSON(object) {
+    switch (object) {
+        case FieldDescriptorProto_Label.LABEL_OPTIONAL:
+            return "LABEL_OPTIONAL";
+        case FieldDescriptorProto_Label.LABEL_REPEATED:
+            return "LABEL_REPEATED";
+        case FieldDescriptorProto_Label.LABEL_REQUIRED:
+            return "LABEL_REQUIRED";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
+    }
+}
+/** Generated classes can be optimized for speed or code size. */
+var FileOptions_OptimizeMode;
+(function (FileOptions_OptimizeMode) {
+    /** SPEED - Generate complete code for parsing, serialization, */
+    FileOptions_OptimizeMode[FileOptions_OptimizeMode["SPEED"] = 1] = "SPEED";
+    /** CODE_SIZE - etc. */
+    FileOptions_OptimizeMode[FileOptions_OptimizeMode["CODE_SIZE"] = 2] = "CODE_SIZE";
+    /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */
+    FileOptions_OptimizeMode[FileOptions_OptimizeMode["LITE_RUNTIME"] = 3] = "LITE_RUNTIME";
+})(FileOptions_OptimizeMode || (exports.FileOptions_OptimizeMode = FileOptions_OptimizeMode = {}));
+function fileOptions_OptimizeModeFromJSON(object) {
+    switch (object) {
+        case 1:
+        case "SPEED":
+            return FileOptions_OptimizeMode.SPEED;
+        case 2:
+        case "CODE_SIZE":
+            return FileOptions_OptimizeMode.CODE_SIZE;
+        case 3:
+        case "LITE_RUNTIME":
+            return FileOptions_OptimizeMode.LITE_RUNTIME;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
+    }
+}
+function fileOptions_OptimizeModeToJSON(object) {
+    switch (object) {
+        case FileOptions_OptimizeMode.SPEED:
+            return "SPEED";
+        case FileOptions_OptimizeMode.CODE_SIZE:
+            return "CODE_SIZE";
+        case FileOptions_OptimizeMode.LITE_RUNTIME:
+            return "LITE_RUNTIME";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
+    }
+}
+var FieldOptions_CType;
+(function (FieldOptions_CType) {
+    /** STRING - Default mode. */
+    FieldOptions_CType[FieldOptions_CType["STRING"] = 0] = "STRING";
+    /**
+     * CORD - The option [ctype=CORD] may be applied to a non-repeated field of type
+     * "bytes". It indicates that in C++, the data should be stored in a Cord
+     * instead of a string.  For very large strings, this may reduce memory
+     * fragmentation. It may also allow better performance when parsing from a
+     * Cord, or when parsing with aliasing enabled, as the parsed Cord may then
+     * alias the original buffer.
+     */
+    FieldOptions_CType[FieldOptions_CType["CORD"] = 1] = "CORD";
+    FieldOptions_CType[FieldOptions_CType["STRING_PIECE"] = 2] = "STRING_PIECE";
+})(FieldOptions_CType || (exports.FieldOptions_CType = FieldOptions_CType = {}));
+function fieldOptions_CTypeFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "STRING":
+            return FieldOptions_CType.STRING;
+        case 1:
+        case "CORD":
+            return FieldOptions_CType.CORD;
+        case 2:
+        case "STRING_PIECE":
+            return FieldOptions_CType.STRING_PIECE;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
+    }
+}
+function fieldOptions_CTypeToJSON(object) {
+    switch (object) {
+        case FieldOptions_CType.STRING:
+            return "STRING";
+        case FieldOptions_CType.CORD:
+            return "CORD";
+        case FieldOptions_CType.STRING_PIECE:
+            return "STRING_PIECE";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
+    }
+}
+var FieldOptions_JSType;
+(function (FieldOptions_JSType) {
+    /** JS_NORMAL - Use the default type. */
+    FieldOptions_JSType[FieldOptions_JSType["JS_NORMAL"] = 0] = "JS_NORMAL";
+    /** JS_STRING - Use JavaScript strings. */
+    FieldOptions_JSType[FieldOptions_JSType["JS_STRING"] = 1] = "JS_STRING";
+    /** JS_NUMBER - Use JavaScript numbers. */
+    FieldOptions_JSType[FieldOptions_JSType["JS_NUMBER"] = 2] = "JS_NUMBER";
+})(FieldOptions_JSType || (exports.FieldOptions_JSType = FieldOptions_JSType = {}));
+function fieldOptions_JSTypeFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "JS_NORMAL":
+            return FieldOptions_JSType.JS_NORMAL;
+        case 1:
+        case "JS_STRING":
+            return FieldOptions_JSType.JS_STRING;
+        case 2:
+        case "JS_NUMBER":
+            return FieldOptions_JSType.JS_NUMBER;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
+    }
+}
+function fieldOptions_JSTypeToJSON(object) {
+    switch (object) {
+        case FieldOptions_JSType.JS_NORMAL:
+            return "JS_NORMAL";
+        case FieldOptions_JSType.JS_STRING:
+            return "JS_STRING";
+        case FieldOptions_JSType.JS_NUMBER:
+            return "JS_NUMBER";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
+    }
+}
+/** If set to RETENTION_SOURCE, the option will be omitted from the binary. */
+var FieldOptions_OptionRetention;
+(function (FieldOptions_OptionRetention) {
+    FieldOptions_OptionRetention[FieldOptions_OptionRetention["RETENTION_UNKNOWN"] = 0] = "RETENTION_UNKNOWN";
+    FieldOptions_OptionRetention[FieldOptions_OptionRetention["RETENTION_RUNTIME"] = 1] = "RETENTION_RUNTIME";
+    FieldOptions_OptionRetention[FieldOptions_OptionRetention["RETENTION_SOURCE"] = 2] = "RETENTION_SOURCE";
+})(FieldOptions_OptionRetention || (exports.FieldOptions_OptionRetention = FieldOptions_OptionRetention = {}));
+function fieldOptions_OptionRetentionFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "RETENTION_UNKNOWN":
+            return FieldOptions_OptionRetention.RETENTION_UNKNOWN;
+        case 1:
+        case "RETENTION_RUNTIME":
+            return FieldOptions_OptionRetention.RETENTION_RUNTIME;
+        case 2:
+        case "RETENTION_SOURCE":
+            return FieldOptions_OptionRetention.RETENTION_SOURCE;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionRetention");
+    }
+}
+function fieldOptions_OptionRetentionToJSON(object) {
+    switch (object) {
+        case FieldOptions_OptionRetention.RETENTION_UNKNOWN:
+            return "RETENTION_UNKNOWN";
+        case FieldOptions_OptionRetention.RETENTION_RUNTIME:
+            return "RETENTION_RUNTIME";
+        case FieldOptions_OptionRetention.RETENTION_SOURCE:
+            return "RETENTION_SOURCE";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionRetention");
+    }
+}
+/**
+ * This indicates the types of entities that the field may apply to when used
+ * as an option. If it is unset, then the field may be freely used as an
+ * option on any kind of entity.
+ */
+var FieldOptions_OptionTargetType;
+(function (FieldOptions_OptionTargetType) {
+    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_UNKNOWN"] = 0] = "TARGET_TYPE_UNKNOWN";
+    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_FILE"] = 1] = "TARGET_TYPE_FILE";
+    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_EXTENSION_RANGE"] = 2] = "TARGET_TYPE_EXTENSION_RANGE";
+    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_MESSAGE"] = 3] = "TARGET_TYPE_MESSAGE";
+    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_FIELD"] = 4] = "TARGET_TYPE_FIELD";
+    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_ONEOF"] = 5] = "TARGET_TYPE_ONEOF";
+    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_ENUM"] = 6] = "TARGET_TYPE_ENUM";
+    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_ENUM_ENTRY"] = 7] = "TARGET_TYPE_ENUM_ENTRY";
+    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_SERVICE"] = 8] = "TARGET_TYPE_SERVICE";
+    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_METHOD"] = 9] = "TARGET_TYPE_METHOD";
+})(FieldOptions_OptionTargetType || (exports.FieldOptions_OptionTargetType = FieldOptions_OptionTargetType = {}));
+function fieldOptions_OptionTargetTypeFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "TARGET_TYPE_UNKNOWN":
+            return FieldOptions_OptionTargetType.TARGET_TYPE_UNKNOWN;
+        case 1:
+        case "TARGET_TYPE_FILE":
+            return FieldOptions_OptionTargetType.TARGET_TYPE_FILE;
+        case 2:
+        case "TARGET_TYPE_EXTENSION_RANGE":
+            return FieldOptions_OptionTargetType.TARGET_TYPE_EXTENSION_RANGE;
+        case 3:
+        case "TARGET_TYPE_MESSAGE":
+            return FieldOptions_OptionTargetType.TARGET_TYPE_MESSAGE;
+        case 4:
+        case "TARGET_TYPE_FIELD":
+            return FieldOptions_OptionTargetType.TARGET_TYPE_FIELD;
+        case 5:
+        case "TARGET_TYPE_ONEOF":
+            return FieldOptions_OptionTargetType.TARGET_TYPE_ONEOF;
+        case 6:
+        case "TARGET_TYPE_ENUM":
+            return FieldOptions_OptionTargetType.TARGET_TYPE_ENUM;
+        case 7:
+        case "TARGET_TYPE_ENUM_ENTRY":
+            return FieldOptions_OptionTargetType.TARGET_TYPE_ENUM_ENTRY;
+        case 8:
+        case "TARGET_TYPE_SERVICE":
+            return FieldOptions_OptionTargetType.TARGET_TYPE_SERVICE;
+        case 9:
+        case "TARGET_TYPE_METHOD":
+            return FieldOptions_OptionTargetType.TARGET_TYPE_METHOD;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionTargetType");
+    }
+}
+function fieldOptions_OptionTargetTypeToJSON(object) {
+    switch (object) {
+        case FieldOptions_OptionTargetType.TARGET_TYPE_UNKNOWN:
+            return "TARGET_TYPE_UNKNOWN";
+        case FieldOptions_OptionTargetType.TARGET_TYPE_FILE:
+            return "TARGET_TYPE_FILE";
+        case FieldOptions_OptionTargetType.TARGET_TYPE_EXTENSION_RANGE:
+            return "TARGET_TYPE_EXTENSION_RANGE";
+        case FieldOptions_OptionTargetType.TARGET_TYPE_MESSAGE:
+            return "TARGET_TYPE_MESSAGE";
+        case FieldOptions_OptionTargetType.TARGET_TYPE_FIELD:
+            return "TARGET_TYPE_FIELD";
+        case FieldOptions_OptionTargetType.TARGET_TYPE_ONEOF:
+            return "TARGET_TYPE_ONEOF";
+        case FieldOptions_OptionTargetType.TARGET_TYPE_ENUM:
+            return "TARGET_TYPE_ENUM";
+        case FieldOptions_OptionTargetType.TARGET_TYPE_ENUM_ENTRY:
+            return "TARGET_TYPE_ENUM_ENTRY";
+        case FieldOptions_OptionTargetType.TARGET_TYPE_SERVICE:
+            return "TARGET_TYPE_SERVICE";
+        case FieldOptions_OptionTargetType.TARGET_TYPE_METHOD:
+            return "TARGET_TYPE_METHOD";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionTargetType");
+    }
+}
+/**
+ * Is this method side-effect-free (or safe in HTTP parlance), or idempotent,
+ * or neither? HTTP based RPC implementation may choose GET verb for safe
+ * methods, and PUT verb for idempotent methods instead of the default POST.
+ */
+var MethodOptions_IdempotencyLevel;
+(function (MethodOptions_IdempotencyLevel) {
+    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENCY_UNKNOWN"] = 0] = "IDEMPOTENCY_UNKNOWN";
+    /** NO_SIDE_EFFECTS - implies idempotent */
+    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["NO_SIDE_EFFECTS"] = 1] = "NO_SIDE_EFFECTS";
+    /** IDEMPOTENT - idempotent, but may have side effects */
+    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENT"] = 2] = "IDEMPOTENT";
+})(MethodOptions_IdempotencyLevel || (exports.MethodOptions_IdempotencyLevel = MethodOptions_IdempotencyLevel = {}));
+function methodOptions_IdempotencyLevelFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "IDEMPOTENCY_UNKNOWN":
+            return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN;
+        case 1:
+        case "NO_SIDE_EFFECTS":
+            return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS;
+        case 2:
+        case "IDEMPOTENT":
+            return MethodOptions_IdempotencyLevel.IDEMPOTENT;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
+    }
+}
+function methodOptions_IdempotencyLevelToJSON(object) {
+    switch (object) {
+        case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN:
+            return "IDEMPOTENCY_UNKNOWN";
+        case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS:
+            return "NO_SIDE_EFFECTS";
+        case MethodOptions_IdempotencyLevel.IDEMPOTENT:
+            return "IDEMPOTENT";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
+    }
+}
+var FeatureSet_FieldPresence;
+(function (FeatureSet_FieldPresence) {
+    FeatureSet_FieldPresence[FeatureSet_FieldPresence["FIELD_PRESENCE_UNKNOWN"] = 0] = "FIELD_PRESENCE_UNKNOWN";
+    FeatureSet_FieldPresence[FeatureSet_FieldPresence["EXPLICIT"] = 1] = "EXPLICIT";
+    FeatureSet_FieldPresence[FeatureSet_FieldPresence["IMPLICIT"] = 2] = "IMPLICIT";
+    FeatureSet_FieldPresence[FeatureSet_FieldPresence["LEGACY_REQUIRED"] = 3] = "LEGACY_REQUIRED";
+})(FeatureSet_FieldPresence || (exports.FeatureSet_FieldPresence = FeatureSet_FieldPresence = {}));
+function featureSet_FieldPresenceFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "FIELD_PRESENCE_UNKNOWN":
+            return FeatureSet_FieldPresence.FIELD_PRESENCE_UNKNOWN;
+        case 1:
+        case "EXPLICIT":
+            return FeatureSet_FieldPresence.EXPLICIT;
+        case 2:
+        case "IMPLICIT":
+            return FeatureSet_FieldPresence.IMPLICIT;
+        case 3:
+        case "LEGACY_REQUIRED":
+            return FeatureSet_FieldPresence.LEGACY_REQUIRED;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_FieldPresence");
+    }
+}
+function featureSet_FieldPresenceToJSON(object) {
+    switch (object) {
+        case FeatureSet_FieldPresence.FIELD_PRESENCE_UNKNOWN:
+            return "FIELD_PRESENCE_UNKNOWN";
+        case FeatureSet_FieldPresence.EXPLICIT:
+            return "EXPLICIT";
+        case FeatureSet_FieldPresence.IMPLICIT:
+            return "IMPLICIT";
+        case FeatureSet_FieldPresence.LEGACY_REQUIRED:
+            return "LEGACY_REQUIRED";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_FieldPresence");
+    }
+}
+var FeatureSet_EnumType;
+(function (FeatureSet_EnumType) {
+    FeatureSet_EnumType[FeatureSet_EnumType["ENUM_TYPE_UNKNOWN"] = 0] = "ENUM_TYPE_UNKNOWN";
+    FeatureSet_EnumType[FeatureSet_EnumType["OPEN"] = 1] = "OPEN";
+    FeatureSet_EnumType[FeatureSet_EnumType["CLOSED"] = 2] = "CLOSED";
+})(FeatureSet_EnumType || (exports.FeatureSet_EnumType = FeatureSet_EnumType = {}));
+function featureSet_EnumTypeFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "ENUM_TYPE_UNKNOWN":
+            return FeatureSet_EnumType.ENUM_TYPE_UNKNOWN;
+        case 1:
+        case "OPEN":
+            return FeatureSet_EnumType.OPEN;
+        case 2:
+        case "CLOSED":
+            return FeatureSet_EnumType.CLOSED;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnumType");
+    }
+}
+function featureSet_EnumTypeToJSON(object) {
+    switch (object) {
+        case FeatureSet_EnumType.ENUM_TYPE_UNKNOWN:
+            return "ENUM_TYPE_UNKNOWN";
+        case FeatureSet_EnumType.OPEN:
+            return "OPEN";
+        case FeatureSet_EnumType.CLOSED:
+            return "CLOSED";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnumType");
+    }
+}
+var FeatureSet_RepeatedFieldEncoding;
+(function (FeatureSet_RepeatedFieldEncoding) {
+    FeatureSet_RepeatedFieldEncoding[FeatureSet_RepeatedFieldEncoding["REPEATED_FIELD_ENCODING_UNKNOWN"] = 0] = "REPEATED_FIELD_ENCODING_UNKNOWN";
+    FeatureSet_RepeatedFieldEncoding[FeatureSet_RepeatedFieldEncoding["PACKED"] = 1] = "PACKED";
+    FeatureSet_RepeatedFieldEncoding[FeatureSet_RepeatedFieldEncoding["EXPANDED"] = 2] = "EXPANDED";
+})(FeatureSet_RepeatedFieldEncoding || (exports.FeatureSet_RepeatedFieldEncoding = FeatureSet_RepeatedFieldEncoding = {}));
+function featureSet_RepeatedFieldEncodingFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "REPEATED_FIELD_ENCODING_UNKNOWN":
+            return FeatureSet_RepeatedFieldEncoding.REPEATED_FIELD_ENCODING_UNKNOWN;
+        case 1:
+        case "PACKED":
+            return FeatureSet_RepeatedFieldEncoding.PACKED;
+        case 2:
+        case "EXPANDED":
+            return FeatureSet_RepeatedFieldEncoding.EXPANDED;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_RepeatedFieldEncoding");
+    }
+}
+function featureSet_RepeatedFieldEncodingToJSON(object) {
+    switch (object) {
+        case FeatureSet_RepeatedFieldEncoding.REPEATED_FIELD_ENCODING_UNKNOWN:
+            return "REPEATED_FIELD_ENCODING_UNKNOWN";
+        case FeatureSet_RepeatedFieldEncoding.PACKED:
+            return "PACKED";
+        case FeatureSet_RepeatedFieldEncoding.EXPANDED:
+            return "EXPANDED";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_RepeatedFieldEncoding");
+    }
+}
+var FeatureSet_Utf8Validation;
+(function (FeatureSet_Utf8Validation) {
+    FeatureSet_Utf8Validation[FeatureSet_Utf8Validation["UTF8_VALIDATION_UNKNOWN"] = 0] = "UTF8_VALIDATION_UNKNOWN";
+    FeatureSet_Utf8Validation[FeatureSet_Utf8Validation["VERIFY"] = 2] = "VERIFY";
+    FeatureSet_Utf8Validation[FeatureSet_Utf8Validation["NONE"] = 3] = "NONE";
+})(FeatureSet_Utf8Validation || (exports.FeatureSet_Utf8Validation = FeatureSet_Utf8Validation = {}));
+function featureSet_Utf8ValidationFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "UTF8_VALIDATION_UNKNOWN":
+            return FeatureSet_Utf8Validation.UTF8_VALIDATION_UNKNOWN;
+        case 2:
+        case "VERIFY":
+            return FeatureSet_Utf8Validation.VERIFY;
+        case 3:
+        case "NONE":
+            return FeatureSet_Utf8Validation.NONE;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_Utf8Validation");
+    }
+}
+function featureSet_Utf8ValidationToJSON(object) {
+    switch (object) {
+        case FeatureSet_Utf8Validation.UTF8_VALIDATION_UNKNOWN:
+            return "UTF8_VALIDATION_UNKNOWN";
+        case FeatureSet_Utf8Validation.VERIFY:
+            return "VERIFY";
+        case FeatureSet_Utf8Validation.NONE:
+            return "NONE";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_Utf8Validation");
+    }
+}
+var FeatureSet_MessageEncoding;
+(function (FeatureSet_MessageEncoding) {
+    FeatureSet_MessageEncoding[FeatureSet_MessageEncoding["MESSAGE_ENCODING_UNKNOWN"] = 0] = "MESSAGE_ENCODING_UNKNOWN";
+    FeatureSet_MessageEncoding[FeatureSet_MessageEncoding["LENGTH_PREFIXED"] = 1] = "LENGTH_PREFIXED";
+    FeatureSet_MessageEncoding[FeatureSet_MessageEncoding["DELIMITED"] = 2] = "DELIMITED";
+})(FeatureSet_MessageEncoding || (exports.FeatureSet_MessageEncoding = FeatureSet_MessageEncoding = {}));
+function featureSet_MessageEncodingFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "MESSAGE_ENCODING_UNKNOWN":
+            return FeatureSet_MessageEncoding.MESSAGE_ENCODING_UNKNOWN;
+        case 1:
+        case "LENGTH_PREFIXED":
+            return FeatureSet_MessageEncoding.LENGTH_PREFIXED;
+        case 2:
+        case "DELIMITED":
+            return FeatureSet_MessageEncoding.DELIMITED;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_MessageEncoding");
+    }
+}
+function featureSet_MessageEncodingToJSON(object) {
+    switch (object) {
+        case FeatureSet_MessageEncoding.MESSAGE_ENCODING_UNKNOWN:
+            return "MESSAGE_ENCODING_UNKNOWN";
+        case FeatureSet_MessageEncoding.LENGTH_PREFIXED:
+            return "LENGTH_PREFIXED";
+        case FeatureSet_MessageEncoding.DELIMITED:
+            return "DELIMITED";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_MessageEncoding");
+    }
+}
+var FeatureSet_JsonFormat;
+(function (FeatureSet_JsonFormat) {
+    FeatureSet_JsonFormat[FeatureSet_JsonFormat["JSON_FORMAT_UNKNOWN"] = 0] = "JSON_FORMAT_UNKNOWN";
+    FeatureSet_JsonFormat[FeatureSet_JsonFormat["ALLOW"] = 1] = "ALLOW";
+    FeatureSet_JsonFormat[FeatureSet_JsonFormat["LEGACY_BEST_EFFORT"] = 2] = "LEGACY_BEST_EFFORT";
+})(FeatureSet_JsonFormat || (exports.FeatureSet_JsonFormat = FeatureSet_JsonFormat = {}));
+function featureSet_JsonFormatFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "JSON_FORMAT_UNKNOWN":
+            return FeatureSet_JsonFormat.JSON_FORMAT_UNKNOWN;
+        case 1:
+        case "ALLOW":
+            return FeatureSet_JsonFormat.ALLOW;
+        case 2:
+        case "LEGACY_BEST_EFFORT":
+            return FeatureSet_JsonFormat.LEGACY_BEST_EFFORT;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_JsonFormat");
+    }
+}
+function featureSet_JsonFormatToJSON(object) {
+    switch (object) {
+        case FeatureSet_JsonFormat.JSON_FORMAT_UNKNOWN:
+            return "JSON_FORMAT_UNKNOWN";
+        case FeatureSet_JsonFormat.ALLOW:
+            return "ALLOW";
+        case FeatureSet_JsonFormat.LEGACY_BEST_EFFORT:
+            return "LEGACY_BEST_EFFORT";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_JsonFormat");
+    }
+}
+var FeatureSet_EnforceNamingStyle;
+(function (FeatureSet_EnforceNamingStyle) {
+    FeatureSet_EnforceNamingStyle[FeatureSet_EnforceNamingStyle["ENFORCE_NAMING_STYLE_UNKNOWN"] = 0] = "ENFORCE_NAMING_STYLE_UNKNOWN";
+    FeatureSet_EnforceNamingStyle[FeatureSet_EnforceNamingStyle["STYLE2024"] = 1] = "STYLE2024";
+    FeatureSet_EnforceNamingStyle[FeatureSet_EnforceNamingStyle["STYLE_LEGACY"] = 2] = "STYLE_LEGACY";
+})(FeatureSet_EnforceNamingStyle || (exports.FeatureSet_EnforceNamingStyle = FeatureSet_EnforceNamingStyle = {}));
+function featureSet_EnforceNamingStyleFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "ENFORCE_NAMING_STYLE_UNKNOWN":
+            return FeatureSet_EnforceNamingStyle.ENFORCE_NAMING_STYLE_UNKNOWN;
+        case 1:
+        case "STYLE2024":
+            return FeatureSet_EnforceNamingStyle.STYLE2024;
+        case 2:
+        case "STYLE_LEGACY":
+            return FeatureSet_EnforceNamingStyle.STYLE_LEGACY;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnforceNamingStyle");
+    }
+}
+function featureSet_EnforceNamingStyleToJSON(object) {
+    switch (object) {
+        case FeatureSet_EnforceNamingStyle.ENFORCE_NAMING_STYLE_UNKNOWN:
+            return "ENFORCE_NAMING_STYLE_UNKNOWN";
+        case FeatureSet_EnforceNamingStyle.STYLE2024:
+            return "STYLE2024";
+        case FeatureSet_EnforceNamingStyle.STYLE_LEGACY:
+            return "STYLE_LEGACY";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnforceNamingStyle");
+    }
+}
+/**
+ * Represents the identified object's effect on the element in the original
+ * .proto file.
+ */
+var GeneratedCodeInfo_Annotation_Semantic;
+(function (GeneratedCodeInfo_Annotation_Semantic) {
+    /** NONE - There is no effect or the effect is indescribable. */
+    GeneratedCodeInfo_Annotation_Semantic[GeneratedCodeInfo_Annotation_Semantic["NONE"] = 0] = "NONE";
+    /** SET - The element is set or otherwise mutated. */
+    GeneratedCodeInfo_Annotation_Semantic[GeneratedCodeInfo_Annotation_Semantic["SET"] = 1] = "SET";
+    /** ALIAS - An alias to the element is returned. */
+    GeneratedCodeInfo_Annotation_Semantic[GeneratedCodeInfo_Annotation_Semantic["ALIAS"] = 2] = "ALIAS";
+})(GeneratedCodeInfo_Annotation_Semantic || (exports.GeneratedCodeInfo_Annotation_Semantic = GeneratedCodeInfo_Annotation_Semantic = {}));
+function generatedCodeInfo_Annotation_SemanticFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "NONE":
+            return GeneratedCodeInfo_Annotation_Semantic.NONE;
+        case 1:
+        case "SET":
+            return GeneratedCodeInfo_Annotation_Semantic.SET;
+        case 2:
+        case "ALIAS":
+            return GeneratedCodeInfo_Annotation_Semantic.ALIAS;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum GeneratedCodeInfo_Annotation_Semantic");
+    }
+}
+function generatedCodeInfo_Annotation_SemanticToJSON(object) {
+    switch (object) {
+        case GeneratedCodeInfo_Annotation_Semantic.NONE:
+            return "NONE";
+        case GeneratedCodeInfo_Annotation_Semantic.SET:
+            return "SET";
+        case GeneratedCodeInfo_Annotation_Semantic.ALIAS:
+            return "ALIAS";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum GeneratedCodeInfo_Annotation_Semantic");
+    }
+}
+exports.FileDescriptorSet = {
+    fromJSON(object) {
+        return {
+            file: globalThis.Array.isArray(object?.file) ? object.file.map((e) => exports.FileDescriptorProto.fromJSON(e)) : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.file?.length) {
+            obj.file = message.file.map((e) => exports.FileDescriptorProto.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.FileDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? globalThis.String(object.name) : "",
+            package: isSet(object.package) ? globalThis.String(object.package) : "",
+            dependency: globalThis.Array.isArray(object?.dependency)
+                ? object.dependency.map((e) => globalThis.String(e))
+                : [],
+            publicDependency: globalThis.Array.isArray(object?.publicDependency)
+                ? object.publicDependency.map((e) => globalThis.Number(e))
+                : [],
+            weakDependency: globalThis.Array.isArray(object?.weakDependency)
+                ? object.weakDependency.map((e) => globalThis.Number(e))
+                : [],
+            messageType: globalThis.Array.isArray(object?.messageType)
+                ? object.messageType.map((e) => exports.DescriptorProto.fromJSON(e))
+                : [],
+            enumType: globalThis.Array.isArray(object?.enumType)
+                ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e))
+                : [],
+            service: globalThis.Array.isArray(object?.service)
+                ? object.service.map((e) => exports.ServiceDescriptorProto.fromJSON(e))
+                : [],
+            extension: globalThis.Array.isArray(object?.extension)
+                ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e))
+                : [],
+            options: isSet(object.options) ? exports.FileOptions.fromJSON(object.options) : undefined,
+            sourceCodeInfo: isSet(object.sourceCodeInfo) ? exports.SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined,
+            syntax: isSet(object.syntax) ? globalThis.String(object.syntax) : "",
+            edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name !== undefined && message.name !== "") {
+            obj.name = message.name;
+        }
+        if (message.package !== undefined && message.package !== "") {
+            obj.package = message.package;
+        }
+        if (message.dependency?.length) {
+            obj.dependency = message.dependency;
+        }
+        if (message.publicDependency?.length) {
+            obj.publicDependency = message.publicDependency.map((e) => Math.round(e));
+        }
+        if (message.weakDependency?.length) {
+            obj.weakDependency = message.weakDependency.map((e) => Math.round(e));
+        }
+        if (message.messageType?.length) {
+            obj.messageType = message.messageType.map((e) => exports.DescriptorProto.toJSON(e));
+        }
+        if (message.enumType?.length) {
+            obj.enumType = message.enumType.map((e) => exports.EnumDescriptorProto.toJSON(e));
+        }
+        if (message.service?.length) {
+            obj.service = message.service.map((e) => exports.ServiceDescriptorProto.toJSON(e));
+        }
+        if (message.extension?.length) {
+            obj.extension = message.extension.map((e) => exports.FieldDescriptorProto.toJSON(e));
+        }
+        if (message.options !== undefined) {
+            obj.options = exports.FileOptions.toJSON(message.options);
+        }
+        if (message.sourceCodeInfo !== undefined) {
+            obj.sourceCodeInfo = exports.SourceCodeInfo.toJSON(message.sourceCodeInfo);
+        }
+        if (message.syntax !== undefined && message.syntax !== "") {
+            obj.syntax = message.syntax;
+        }
+        if (message.edition !== undefined && message.edition !== 0) {
+            obj.edition = editionToJSON(message.edition);
+        }
+        return obj;
+    },
+};
+exports.DescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? globalThis.String(object.name) : "",
+            field: globalThis.Array.isArray(object?.field)
+                ? object.field.map((e) => exports.FieldDescriptorProto.fromJSON(e))
+                : [],
+            extension: globalThis.Array.isArray(object?.extension)
+                ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e))
+                : [],
+            nestedType: globalThis.Array.isArray(object?.nestedType)
+                ? object.nestedType.map((e) => exports.DescriptorProto.fromJSON(e))
+                : [],
+            enumType: globalThis.Array.isArray(object?.enumType)
+                ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e))
+                : [],
+            extensionRange: globalThis.Array.isArray(object?.extensionRange)
+                ? object.extensionRange.map((e) => exports.DescriptorProto_ExtensionRange.fromJSON(e))
+                : [],
+            oneofDecl: globalThis.Array.isArray(object?.oneofDecl)
+                ? object.oneofDecl.map((e) => exports.OneofDescriptorProto.fromJSON(e))
+                : [],
+            options: isSet(object.options) ? exports.MessageOptions.fromJSON(object.options) : undefined,
+            reservedRange: globalThis.Array.isArray(object?.reservedRange)
+                ? object.reservedRange.map((e) => exports.DescriptorProto_ReservedRange.fromJSON(e))
+                : [],
+            reservedName: globalThis.Array.isArray(object?.reservedName)
+                ? object.reservedName.map((e) => globalThis.String(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name !== undefined && message.name !== "") {
+            obj.name = message.name;
+        }
+        if (message.field?.length) {
+            obj.field = message.field.map((e) => exports.FieldDescriptorProto.toJSON(e));
+        }
+        if (message.extension?.length) {
+            obj.extension = message.extension.map((e) => exports.FieldDescriptorProto.toJSON(e));
+        }
+        if (message.nestedType?.length) {
+            obj.nestedType = message.nestedType.map((e) => exports.DescriptorProto.toJSON(e));
+        }
+        if (message.enumType?.length) {
+            obj.enumType = message.enumType.map((e) => exports.EnumDescriptorProto.toJSON(e));
+        }
+        if (message.extensionRange?.length) {
+            obj.extensionRange = message.extensionRange.map((e) => exports.DescriptorProto_ExtensionRange.toJSON(e));
+        }
+        if (message.oneofDecl?.length) {
+            obj.oneofDecl = message.oneofDecl.map((e) => exports.OneofDescriptorProto.toJSON(e));
+        }
+        if (message.options !== undefined) {
+            obj.options = exports.MessageOptions.toJSON(message.options);
+        }
+        if (message.reservedRange?.length) {
+            obj.reservedRange = message.reservedRange.map((e) => exports.DescriptorProto_ReservedRange.toJSON(e));
+        }
+        if (message.reservedName?.length) {
+            obj.reservedName = message.reservedName;
+        }
+        return obj;
+    },
+};
+exports.DescriptorProto_ExtensionRange = {
+    fromJSON(object) {
+        return {
+            start: isSet(object.start) ? globalThis.Number(object.start) : 0,
+            end: isSet(object.end) ? globalThis.Number(object.end) : 0,
+            options: isSet(object.options) ? exports.ExtensionRangeOptions.fromJSON(object.options) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.start !== undefined && message.start !== 0) {
+            obj.start = Math.round(message.start);
+        }
+        if (message.end !== undefined && message.end !== 0) {
+            obj.end = Math.round(message.end);
+        }
+        if (message.options !== undefined) {
+            obj.options = exports.ExtensionRangeOptions.toJSON(message.options);
+        }
+        return obj;
+    },
+};
+exports.DescriptorProto_ReservedRange = {
+    fromJSON(object) {
+        return {
+            start: isSet(object.start) ? globalThis.Number(object.start) : 0,
+            end: isSet(object.end) ? globalThis.Number(object.end) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.start !== undefined && message.start !== 0) {
+            obj.start = Math.round(message.start);
+        }
+        if (message.end !== undefined && message.end !== 0) {
+            obj.end = Math.round(message.end);
+        }
+        return obj;
+    },
+};
+exports.ExtensionRangeOptions = {
+    fromJSON(object) {
+        return {
+            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+            declaration: globalThis.Array.isArray(object?.declaration)
+                ? object.declaration.map((e) => exports.ExtensionRangeOptions_Declaration.fromJSON(e))
+                : [],
+            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+            verification: isSet(object.verification)
+                ? extensionRangeOptions_VerificationStateFromJSON(object.verification)
+                : 1,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.uninterpretedOption?.length) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
+        }
+        if (message.declaration?.length) {
+            obj.declaration = message.declaration.map((e) => exports.ExtensionRangeOptions_Declaration.toJSON(e));
+        }
+        if (message.features !== undefined) {
+            obj.features = exports.FeatureSet.toJSON(message.features);
+        }
+        if (message.verification !== undefined && message.verification !== 1) {
+            obj.verification = extensionRangeOptions_VerificationStateToJSON(message.verification);
+        }
+        return obj;
+    },
+};
+exports.ExtensionRangeOptions_Declaration = {
+    fromJSON(object) {
+        return {
+            number: isSet(object.number) ? globalThis.Number(object.number) : 0,
+            fullName: isSet(object.fullName) ? globalThis.String(object.fullName) : "",
+            type: isSet(object.type) ? globalThis.String(object.type) : "",
+            reserved: isSet(object.reserved) ? globalThis.Boolean(object.reserved) : false,
+            repeated: isSet(object.repeated) ? globalThis.Boolean(object.repeated) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.number !== undefined && message.number !== 0) {
+            obj.number = Math.round(message.number);
+        }
+        if (message.fullName !== undefined && message.fullName !== "") {
+            obj.fullName = message.fullName;
+        }
+        if (message.type !== undefined && message.type !== "") {
+            obj.type = message.type;
+        }
+        if (message.reserved !== undefined && message.reserved !== false) {
+            obj.reserved = message.reserved;
+        }
+        if (message.repeated !== undefined && message.repeated !== false) {
+            obj.repeated = message.repeated;
+        }
+        return obj;
+    },
+};
+exports.FieldDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? globalThis.String(object.name) : "",
+            number: isSet(object.number) ? globalThis.Number(object.number) : 0,
+            label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1,
+            type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1,
+            typeName: isSet(object.typeName) ? globalThis.String(object.typeName) : "",
+            extendee: isSet(object.extendee) ? globalThis.String(object.extendee) : "",
+            defaultValue: isSet(object.defaultValue) ? globalThis.String(object.defaultValue) : "",
+            oneofIndex: isSet(object.oneofIndex) ? globalThis.Number(object.oneofIndex) : 0,
+            jsonName: isSet(object.jsonName) ? globalThis.String(object.jsonName) : "",
+            options: isSet(object.options) ? exports.FieldOptions.fromJSON(object.options) : undefined,
+            proto3Optional: isSet(object.proto3Optional) ? globalThis.Boolean(object.proto3Optional) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name !== undefined && message.name !== "") {
+            obj.name = message.name;
+        }
+        if (message.number !== undefined && message.number !== 0) {
+            obj.number = Math.round(message.number);
+        }
+        if (message.label !== undefined && message.label !== 1) {
+            obj.label = fieldDescriptorProto_LabelToJSON(message.label);
+        }
+        if (message.type !== undefined && message.type !== 1) {
+            obj.type = fieldDescriptorProto_TypeToJSON(message.type);
+        }
+        if (message.typeName !== undefined && message.typeName !== "") {
+            obj.typeName = message.typeName;
+        }
+        if (message.extendee !== undefined && message.extendee !== "") {
+            obj.extendee = message.extendee;
+        }
+        if (message.defaultValue !== undefined && message.defaultValue !== "") {
+            obj.defaultValue = message.defaultValue;
+        }
+        if (message.oneofIndex !== undefined && message.oneofIndex !== 0) {
+            obj.oneofIndex = Math.round(message.oneofIndex);
+        }
+        if (message.jsonName !== undefined && message.jsonName !== "") {
+            obj.jsonName = message.jsonName;
+        }
+        if (message.options !== undefined) {
+            obj.options = exports.FieldOptions.toJSON(message.options);
+        }
+        if (message.proto3Optional !== undefined && message.proto3Optional !== false) {
+            obj.proto3Optional = message.proto3Optional;
+        }
+        return obj;
+    },
+};
+exports.OneofDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? globalThis.String(object.name) : "",
+            options: isSet(object.options) ? exports.OneofOptions.fromJSON(object.options) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name !== undefined && message.name !== "") {
+            obj.name = message.name;
+        }
+        if (message.options !== undefined) {
+            obj.options = exports.OneofOptions.toJSON(message.options);
+        }
+        return obj;
+    },
+};
+exports.EnumDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? globalThis.String(object.name) : "",
+            value: globalThis.Array.isArray(object?.value)
+                ? object.value.map((e) => exports.EnumValueDescriptorProto.fromJSON(e))
+                : [],
+            options: isSet(object.options) ? exports.EnumOptions.fromJSON(object.options) : undefined,
+            reservedRange: globalThis.Array.isArray(object?.reservedRange)
+                ? object.reservedRange.map((e) => exports.EnumDescriptorProto_EnumReservedRange.fromJSON(e))
+                : [],
+            reservedName: globalThis.Array.isArray(object?.reservedName)
+                ? object.reservedName.map((e) => globalThis.String(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name !== undefined && message.name !== "") {
+            obj.name = message.name;
+        }
+        if (message.value?.length) {
+            obj.value = message.value.map((e) => exports.EnumValueDescriptorProto.toJSON(e));
+        }
+        if (message.options !== undefined) {
+            obj.options = exports.EnumOptions.toJSON(message.options);
+        }
+        if (message.reservedRange?.length) {
+            obj.reservedRange = message.reservedRange.map((e) => exports.EnumDescriptorProto_EnumReservedRange.toJSON(e));
+        }
+        if (message.reservedName?.length) {
+            obj.reservedName = message.reservedName;
+        }
+        return obj;
+    },
+};
+exports.EnumDescriptorProto_EnumReservedRange = {
+    fromJSON(object) {
+        return {
+            start: isSet(object.start) ? globalThis.Number(object.start) : 0,
+            end: isSet(object.end) ? globalThis.Number(object.end) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.start !== undefined && message.start !== 0) {
+            obj.start = Math.round(message.start);
+        }
+        if (message.end !== undefined && message.end !== 0) {
+            obj.end = Math.round(message.end);
+        }
+        return obj;
+    },
+};
+exports.EnumValueDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? globalThis.String(object.name) : "",
+            number: isSet(object.number) ? globalThis.Number(object.number) : 0,
+            options: isSet(object.options) ? exports.EnumValueOptions.fromJSON(object.options) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name !== undefined && message.name !== "") {
+            obj.name = message.name;
+        }
+        if (message.number !== undefined && message.number !== 0) {
+            obj.number = Math.round(message.number);
+        }
+        if (message.options !== undefined) {
+            obj.options = exports.EnumValueOptions.toJSON(message.options);
+        }
+        return obj;
+    },
+};
+exports.ServiceDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? globalThis.String(object.name) : "",
+            method: globalThis.Array.isArray(object?.method)
+                ? object.method.map((e) => exports.MethodDescriptorProto.fromJSON(e))
+                : [],
+            options: isSet(object.options) ? exports.ServiceOptions.fromJSON(object.options) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name !== undefined && message.name !== "") {
+            obj.name = message.name;
+        }
+        if (message.method?.length) {
+            obj.method = message.method.map((e) => exports.MethodDescriptorProto.toJSON(e));
+        }
+        if (message.options !== undefined) {
+            obj.options = exports.ServiceOptions.toJSON(message.options);
+        }
+        return obj;
+    },
+};
+exports.MethodDescriptorProto = {
+    fromJSON(object) {
+        return {
+            name: isSet(object.name) ? globalThis.String(object.name) : "",
+            inputType: isSet(object.inputType) ? globalThis.String(object.inputType) : "",
+            outputType: isSet(object.outputType) ? globalThis.String(object.outputType) : "",
+            options: isSet(object.options) ? exports.MethodOptions.fromJSON(object.options) : undefined,
+            clientStreaming: isSet(object.clientStreaming) ? globalThis.Boolean(object.clientStreaming) : false,
+            serverStreaming: isSet(object.serverStreaming) ? globalThis.Boolean(object.serverStreaming) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name !== undefined && message.name !== "") {
+            obj.name = message.name;
+        }
+        if (message.inputType !== undefined && message.inputType !== "") {
+            obj.inputType = message.inputType;
+        }
+        if (message.outputType !== undefined && message.outputType !== "") {
+            obj.outputType = message.outputType;
+        }
+        if (message.options !== undefined) {
+            obj.options = exports.MethodOptions.toJSON(message.options);
+        }
+        if (message.clientStreaming !== undefined && message.clientStreaming !== false) {
+            obj.clientStreaming = message.clientStreaming;
+        }
+        if (message.serverStreaming !== undefined && message.serverStreaming !== false) {
+            obj.serverStreaming = message.serverStreaming;
+        }
+        return obj;
+    },
+};
+exports.FileOptions = {
+    fromJSON(object) {
+        return {
+            javaPackage: isSet(object.javaPackage) ? globalThis.String(object.javaPackage) : "",
+            javaOuterClassname: isSet(object.javaOuterClassname) ? globalThis.String(object.javaOuterClassname) : "",
+            javaMultipleFiles: isSet(object.javaMultipleFiles) ? globalThis.Boolean(object.javaMultipleFiles) : false,
+            javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash)
+                ? globalThis.Boolean(object.javaGenerateEqualsAndHash)
+                : false,
+            javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? globalThis.Boolean(object.javaStringCheckUtf8) : false,
+            optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1,
+            goPackage: isSet(object.goPackage) ? globalThis.String(object.goPackage) : "",
+            ccGenericServices: isSet(object.ccGenericServices) ? globalThis.Boolean(object.ccGenericServices) : false,
+            javaGenericServices: isSet(object.javaGenericServices) ? globalThis.Boolean(object.javaGenericServices) : false,
+            pyGenericServices: isSet(object.pyGenericServices) ? globalThis.Boolean(object.pyGenericServices) : false,
+            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
+            ccEnableArenas: isSet(object.ccEnableArenas) ? globalThis.Boolean(object.ccEnableArenas) : true,
+            objcClassPrefix: isSet(object.objcClassPrefix) ? globalThis.String(object.objcClassPrefix) : "",
+            csharpNamespace: isSet(object.csharpNamespace) ? globalThis.String(object.csharpNamespace) : "",
+            swiftPrefix: isSet(object.swiftPrefix) ? globalThis.String(object.swiftPrefix) : "",
+            phpClassPrefix: isSet(object.phpClassPrefix) ? globalThis.String(object.phpClassPrefix) : "",
+            phpNamespace: isSet(object.phpNamespace) ? globalThis.String(object.phpNamespace) : "",
+            phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? globalThis.String(object.phpMetadataNamespace) : "",
+            rubyPackage: isSet(object.rubyPackage) ? globalThis.String(object.rubyPackage) : "",
+            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.javaPackage !== undefined && message.javaPackage !== "") {
+            obj.javaPackage = message.javaPackage;
+        }
+        if (message.javaOuterClassname !== undefined && message.javaOuterClassname !== "") {
+            obj.javaOuterClassname = message.javaOuterClassname;
+        }
+        if (message.javaMultipleFiles !== undefined && message.javaMultipleFiles !== false) {
+            obj.javaMultipleFiles = message.javaMultipleFiles;
+        }
+        if (message.javaGenerateEqualsAndHash !== undefined && message.javaGenerateEqualsAndHash !== false) {
+            obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash;
+        }
+        if (message.javaStringCheckUtf8 !== undefined && message.javaStringCheckUtf8 !== false) {
+            obj.javaStringCheckUtf8 = message.javaStringCheckUtf8;
+        }
+        if (message.optimizeFor !== undefined && message.optimizeFor !== 1) {
+            obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor);
+        }
+        if (message.goPackage !== undefined && message.goPackage !== "") {
+            obj.goPackage = message.goPackage;
+        }
+        if (message.ccGenericServices !== undefined && message.ccGenericServices !== false) {
+            obj.ccGenericServices = message.ccGenericServices;
+        }
+        if (message.javaGenericServices !== undefined && message.javaGenericServices !== false) {
+            obj.javaGenericServices = message.javaGenericServices;
+        }
+        if (message.pyGenericServices !== undefined && message.pyGenericServices !== false) {
+            obj.pyGenericServices = message.pyGenericServices;
+        }
+        if (message.deprecated !== undefined && message.deprecated !== false) {
+            obj.deprecated = message.deprecated;
+        }
+        if (message.ccEnableArenas !== undefined && message.ccEnableArenas !== true) {
+            obj.ccEnableArenas = message.ccEnableArenas;
+        }
+        if (message.objcClassPrefix !== undefined && message.objcClassPrefix !== "") {
+            obj.objcClassPrefix = message.objcClassPrefix;
+        }
+        if (message.csharpNamespace !== undefined && message.csharpNamespace !== "") {
+            obj.csharpNamespace = message.csharpNamespace;
+        }
+        if (message.swiftPrefix !== undefined && message.swiftPrefix !== "") {
+            obj.swiftPrefix = message.swiftPrefix;
+        }
+        if (message.phpClassPrefix !== undefined && message.phpClassPrefix !== "") {
+            obj.phpClassPrefix = message.phpClassPrefix;
+        }
+        if (message.phpNamespace !== undefined && message.phpNamespace !== "") {
+            obj.phpNamespace = message.phpNamespace;
+        }
+        if (message.phpMetadataNamespace !== undefined && message.phpMetadataNamespace !== "") {
+            obj.phpMetadataNamespace = message.phpMetadataNamespace;
+        }
+        if (message.rubyPackage !== undefined && message.rubyPackage !== "") {
+            obj.rubyPackage = message.rubyPackage;
+        }
+        if (message.features !== undefined) {
+            obj.features = exports.FeatureSet.toJSON(message.features);
+        }
+        if (message.uninterpretedOption?.length) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.MessageOptions = {
+    fromJSON(object) {
+        return {
+            messageSetWireFormat: isSet(object.messageSetWireFormat)
+                ? globalThis.Boolean(object.messageSetWireFormat)
+                : false,
+            noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor)
+                ? globalThis.Boolean(object.noStandardDescriptorAccessor)
+                : false,
+            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
+            mapEntry: isSet(object.mapEntry) ? globalThis.Boolean(object.mapEntry) : false,
+            deprecatedLegacyJsonFieldConflicts: isSet(object.deprecatedLegacyJsonFieldConflicts)
+                ? globalThis.Boolean(object.deprecatedLegacyJsonFieldConflicts)
+                : false,
+            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.messageSetWireFormat !== undefined && message.messageSetWireFormat !== false) {
+            obj.messageSetWireFormat = message.messageSetWireFormat;
+        }
+        if (message.noStandardDescriptorAccessor !== undefined && message.noStandardDescriptorAccessor !== false) {
+            obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor;
+        }
+        if (message.deprecated !== undefined && message.deprecated !== false) {
+            obj.deprecated = message.deprecated;
+        }
+        if (message.mapEntry !== undefined && message.mapEntry !== false) {
+            obj.mapEntry = message.mapEntry;
+        }
+        if (message.deprecatedLegacyJsonFieldConflicts !== undefined && message.deprecatedLegacyJsonFieldConflicts !== false) {
+            obj.deprecatedLegacyJsonFieldConflicts = message.deprecatedLegacyJsonFieldConflicts;
+        }
+        if (message.features !== undefined) {
+            obj.features = exports.FeatureSet.toJSON(message.features);
+        }
+        if (message.uninterpretedOption?.length) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.FieldOptions = {
+    fromJSON(object) {
+        return {
+            ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0,
+            packed: isSet(object.packed) ? globalThis.Boolean(object.packed) : false,
+            jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0,
+            lazy: isSet(object.lazy) ? globalThis.Boolean(object.lazy) : false,
+            unverifiedLazy: isSet(object.unverifiedLazy) ? globalThis.Boolean(object.unverifiedLazy) : false,
+            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
+            weak: isSet(object.weak) ? globalThis.Boolean(object.weak) : false,
+            debugRedact: isSet(object.debugRedact) ? globalThis.Boolean(object.debugRedact) : false,
+            retention: isSet(object.retention) ? fieldOptions_OptionRetentionFromJSON(object.retention) : 0,
+            targets: globalThis.Array.isArray(object?.targets)
+                ? object.targets.map((e) => fieldOptions_OptionTargetTypeFromJSON(e))
+                : [],
+            editionDefaults: globalThis.Array.isArray(object?.editionDefaults)
+                ? object.editionDefaults.map((e) => exports.FieldOptions_EditionDefault.fromJSON(e))
+                : [],
+            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+            featureSupport: isSet(object.featureSupport)
+                ? exports.FieldOptions_FeatureSupport.fromJSON(object.featureSupport)
+                : undefined,
+            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.ctype !== undefined && message.ctype !== 0) {
+            obj.ctype = fieldOptions_CTypeToJSON(message.ctype);
+        }
+        if (message.packed !== undefined && message.packed !== false) {
+            obj.packed = message.packed;
+        }
+        if (message.jstype !== undefined && message.jstype !== 0) {
+            obj.jstype = fieldOptions_JSTypeToJSON(message.jstype);
+        }
+        if (message.lazy !== undefined && message.lazy !== false) {
+            obj.lazy = message.lazy;
+        }
+        if (message.unverifiedLazy !== undefined && message.unverifiedLazy !== false) {
+            obj.unverifiedLazy = message.unverifiedLazy;
+        }
+        if (message.deprecated !== undefined && message.deprecated !== false) {
+            obj.deprecated = message.deprecated;
+        }
+        if (message.weak !== undefined && message.weak !== false) {
+            obj.weak = message.weak;
+        }
+        if (message.debugRedact !== undefined && message.debugRedact !== false) {
+            obj.debugRedact = message.debugRedact;
+        }
+        if (message.retention !== undefined && message.retention !== 0) {
+            obj.retention = fieldOptions_OptionRetentionToJSON(message.retention);
+        }
+        if (message.targets?.length) {
+            obj.targets = message.targets.map((e) => fieldOptions_OptionTargetTypeToJSON(e));
+        }
+        if (message.editionDefaults?.length) {
+            obj.editionDefaults = message.editionDefaults.map((e) => exports.FieldOptions_EditionDefault.toJSON(e));
+        }
+        if (message.features !== undefined) {
+            obj.features = exports.FeatureSet.toJSON(message.features);
+        }
+        if (message.featureSupport !== undefined) {
+            obj.featureSupport = exports.FieldOptions_FeatureSupport.toJSON(message.featureSupport);
+        }
+        if (message.uninterpretedOption?.length) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.FieldOptions_EditionDefault = {
+    fromJSON(object) {
+        return {
+            edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0,
+            value: isSet(object.value) ? globalThis.String(object.value) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.edition !== undefined && message.edition !== 0) {
+            obj.edition = editionToJSON(message.edition);
+        }
+        if (message.value !== undefined && message.value !== "") {
+            obj.value = message.value;
+        }
+        return obj;
+    },
+};
+exports.FieldOptions_FeatureSupport = {
+    fromJSON(object) {
+        return {
+            editionIntroduced: isSet(object.editionIntroduced) ? editionFromJSON(object.editionIntroduced) : 0,
+            editionDeprecated: isSet(object.editionDeprecated) ? editionFromJSON(object.editionDeprecated) : 0,
+            deprecationWarning: isSet(object.deprecationWarning) ? globalThis.String(object.deprecationWarning) : "",
+            editionRemoved: isSet(object.editionRemoved) ? editionFromJSON(object.editionRemoved) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.editionIntroduced !== undefined && message.editionIntroduced !== 0) {
+            obj.editionIntroduced = editionToJSON(message.editionIntroduced);
+        }
+        if (message.editionDeprecated !== undefined && message.editionDeprecated !== 0) {
+            obj.editionDeprecated = editionToJSON(message.editionDeprecated);
+        }
+        if (message.deprecationWarning !== undefined && message.deprecationWarning !== "") {
+            obj.deprecationWarning = message.deprecationWarning;
+        }
+        if (message.editionRemoved !== undefined && message.editionRemoved !== 0) {
+            obj.editionRemoved = editionToJSON(message.editionRemoved);
+        }
+        return obj;
+    },
+};
+exports.OneofOptions = {
+    fromJSON(object) {
+        return {
+            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.features !== undefined) {
+            obj.features = exports.FeatureSet.toJSON(message.features);
+        }
+        if (message.uninterpretedOption?.length) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.EnumOptions = {
+    fromJSON(object) {
+        return {
+            allowAlias: isSet(object.allowAlias) ? globalThis.Boolean(object.allowAlias) : false,
+            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
+            deprecatedLegacyJsonFieldConflicts: isSet(object.deprecatedLegacyJsonFieldConflicts)
+                ? globalThis.Boolean(object.deprecatedLegacyJsonFieldConflicts)
+                : false,
+            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.allowAlias !== undefined && message.allowAlias !== false) {
+            obj.allowAlias = message.allowAlias;
+        }
+        if (message.deprecated !== undefined && message.deprecated !== false) {
+            obj.deprecated = message.deprecated;
+        }
+        if (message.deprecatedLegacyJsonFieldConflicts !== undefined && message.deprecatedLegacyJsonFieldConflicts !== false) {
+            obj.deprecatedLegacyJsonFieldConflicts = message.deprecatedLegacyJsonFieldConflicts;
+        }
+        if (message.features !== undefined) {
+            obj.features = exports.FeatureSet.toJSON(message.features);
+        }
+        if (message.uninterpretedOption?.length) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.EnumValueOptions = {
+    fromJSON(object) {
+        return {
+            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
+            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+            debugRedact: isSet(object.debugRedact) ? globalThis.Boolean(object.debugRedact) : false,
+            featureSupport: isSet(object.featureSupport)
+                ? exports.FieldOptions_FeatureSupport.fromJSON(object.featureSupport)
+                : undefined,
+            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.deprecated !== undefined && message.deprecated !== false) {
+            obj.deprecated = message.deprecated;
+        }
+        if (message.features !== undefined) {
+            obj.features = exports.FeatureSet.toJSON(message.features);
+        }
+        if (message.debugRedact !== undefined && message.debugRedact !== false) {
+            obj.debugRedact = message.debugRedact;
+        }
+        if (message.featureSupport !== undefined) {
+            obj.featureSupport = exports.FieldOptions_FeatureSupport.toJSON(message.featureSupport);
+        }
+        if (message.uninterpretedOption?.length) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.ServiceOptions = {
+    fromJSON(object) {
+        return {
+            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
+            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.features !== undefined) {
+            obj.features = exports.FeatureSet.toJSON(message.features);
+        }
+        if (message.deprecated !== undefined && message.deprecated !== false) {
+            obj.deprecated = message.deprecated;
+        }
+        if (message.uninterpretedOption?.length) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.MethodOptions = {
+    fromJSON(object) {
+        return {
+            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
+            idempotencyLevel: isSet(object.idempotencyLevel)
+                ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel)
+                : 0,
+            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
+            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
+                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.deprecated !== undefined && message.deprecated !== false) {
+            obj.deprecated = message.deprecated;
+        }
+        if (message.idempotencyLevel !== undefined && message.idempotencyLevel !== 0) {
+            obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel);
+        }
+        if (message.features !== undefined) {
+            obj.features = exports.FeatureSet.toJSON(message.features);
+        }
+        if (message.uninterpretedOption?.length) {
+            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.UninterpretedOption = {
+    fromJSON(object) {
+        return {
+            name: globalThis.Array.isArray(object?.name)
+                ? object.name.map((e) => exports.UninterpretedOption_NamePart.fromJSON(e))
+                : [],
+            identifierValue: isSet(object.identifierValue) ? globalThis.String(object.identifierValue) : "",
+            positiveIntValue: isSet(object.positiveIntValue) ? globalThis.String(object.positiveIntValue) : "0",
+            negativeIntValue: isSet(object.negativeIntValue) ? globalThis.String(object.negativeIntValue) : "0",
+            doubleValue: isSet(object.doubleValue) ? globalThis.Number(object.doubleValue) : 0,
+            stringValue: isSet(object.stringValue) ? Buffer.from(bytesFromBase64(object.stringValue)) : Buffer.alloc(0),
+            aggregateValue: isSet(object.aggregateValue) ? globalThis.String(object.aggregateValue) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.name?.length) {
+            obj.name = message.name.map((e) => exports.UninterpretedOption_NamePart.toJSON(e));
+        }
+        if (message.identifierValue !== undefined && message.identifierValue !== "") {
+            obj.identifierValue = message.identifierValue;
+        }
+        if (message.positiveIntValue !== undefined && message.positiveIntValue !== "0") {
+            obj.positiveIntValue = message.positiveIntValue;
+        }
+        if (message.negativeIntValue !== undefined && message.negativeIntValue !== "0") {
+            obj.negativeIntValue = message.negativeIntValue;
+        }
+        if (message.doubleValue !== undefined && message.doubleValue !== 0) {
+            obj.doubleValue = message.doubleValue;
+        }
+        if (message.stringValue !== undefined && message.stringValue.length !== 0) {
+            obj.stringValue = base64FromBytes(message.stringValue);
+        }
+        if (message.aggregateValue !== undefined && message.aggregateValue !== "") {
+            obj.aggregateValue = message.aggregateValue;
+        }
+        return obj;
+    },
+};
+exports.UninterpretedOption_NamePart = {
+    fromJSON(object) {
+        return {
+            namePart: isSet(object.namePart) ? globalThis.String(object.namePart) : "",
+            isExtension: isSet(object.isExtension) ? globalThis.Boolean(object.isExtension) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.namePart !== "") {
+            obj.namePart = message.namePart;
+        }
+        if (message.isExtension !== false) {
+            obj.isExtension = message.isExtension;
+        }
+        return obj;
+    },
+};
+exports.FeatureSet = {
+    fromJSON(object) {
+        return {
+            fieldPresence: isSet(object.fieldPresence) ? featureSet_FieldPresenceFromJSON(object.fieldPresence) : 0,
+            enumType: isSet(object.enumType) ? featureSet_EnumTypeFromJSON(object.enumType) : 0,
+            repeatedFieldEncoding: isSet(object.repeatedFieldEncoding)
+                ? featureSet_RepeatedFieldEncodingFromJSON(object.repeatedFieldEncoding)
+                : 0,
+            utf8Validation: isSet(object.utf8Validation) ? featureSet_Utf8ValidationFromJSON(object.utf8Validation) : 0,
+            messageEncoding: isSet(object.messageEncoding) ? featureSet_MessageEncodingFromJSON(object.messageEncoding) : 0,
+            jsonFormat: isSet(object.jsonFormat) ? featureSet_JsonFormatFromJSON(object.jsonFormat) : 0,
+            enforceNamingStyle: isSet(object.enforceNamingStyle)
+                ? featureSet_EnforceNamingStyleFromJSON(object.enforceNamingStyle)
+                : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.fieldPresence !== undefined && message.fieldPresence !== 0) {
+            obj.fieldPresence = featureSet_FieldPresenceToJSON(message.fieldPresence);
+        }
+        if (message.enumType !== undefined && message.enumType !== 0) {
+            obj.enumType = featureSet_EnumTypeToJSON(message.enumType);
+        }
+        if (message.repeatedFieldEncoding !== undefined && message.repeatedFieldEncoding !== 0) {
+            obj.repeatedFieldEncoding = featureSet_RepeatedFieldEncodingToJSON(message.repeatedFieldEncoding);
+        }
+        if (message.utf8Validation !== undefined && message.utf8Validation !== 0) {
+            obj.utf8Validation = featureSet_Utf8ValidationToJSON(message.utf8Validation);
+        }
+        if (message.messageEncoding !== undefined && message.messageEncoding !== 0) {
+            obj.messageEncoding = featureSet_MessageEncodingToJSON(message.messageEncoding);
+        }
+        if (message.jsonFormat !== undefined && message.jsonFormat !== 0) {
+            obj.jsonFormat = featureSet_JsonFormatToJSON(message.jsonFormat);
+        }
+        if (message.enforceNamingStyle !== undefined && message.enforceNamingStyle !== 0) {
+            obj.enforceNamingStyle = featureSet_EnforceNamingStyleToJSON(message.enforceNamingStyle);
+        }
+        return obj;
+    },
+};
+exports.FeatureSetDefaults = {
+    fromJSON(object) {
+        return {
+            defaults: globalThis.Array.isArray(object?.defaults)
+                ? object.defaults.map((e) => exports.FeatureSetDefaults_FeatureSetEditionDefault.fromJSON(e))
+                : [],
+            minimumEdition: isSet(object.minimumEdition) ? editionFromJSON(object.minimumEdition) : 0,
+            maximumEdition: isSet(object.maximumEdition) ? editionFromJSON(object.maximumEdition) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.defaults?.length) {
+            obj.defaults = message.defaults.map((e) => exports.FeatureSetDefaults_FeatureSetEditionDefault.toJSON(e));
+        }
+        if (message.minimumEdition !== undefined && message.minimumEdition !== 0) {
+            obj.minimumEdition = editionToJSON(message.minimumEdition);
+        }
+        if (message.maximumEdition !== undefined && message.maximumEdition !== 0) {
+            obj.maximumEdition = editionToJSON(message.maximumEdition);
+        }
+        return obj;
+    },
+};
+exports.FeatureSetDefaults_FeatureSetEditionDefault = {
+    fromJSON(object) {
+        return {
+            edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0,
+            overridableFeatures: isSet(object.overridableFeatures)
+                ? exports.FeatureSet.fromJSON(object.overridableFeatures)
+                : undefined,
+            fixedFeatures: isSet(object.fixedFeatures) ? exports.FeatureSet.fromJSON(object.fixedFeatures) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.edition !== undefined && message.edition !== 0) {
+            obj.edition = editionToJSON(message.edition);
+        }
+        if (message.overridableFeatures !== undefined) {
+            obj.overridableFeatures = exports.FeatureSet.toJSON(message.overridableFeatures);
+        }
+        if (message.fixedFeatures !== undefined) {
+            obj.fixedFeatures = exports.FeatureSet.toJSON(message.fixedFeatures);
+        }
+        return obj;
+    },
+};
+exports.SourceCodeInfo = {
+    fromJSON(object) {
+        return {
+            location: globalThis.Array.isArray(object?.location)
+                ? object.location.map((e) => exports.SourceCodeInfo_Location.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.location?.length) {
+            obj.location = message.location.map((e) => exports.SourceCodeInfo_Location.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.SourceCodeInfo_Location = {
+    fromJSON(object) {
+        return {
+            path: globalThis.Array.isArray(object?.path)
+                ? object.path.map((e) => globalThis.Number(e))
+                : [],
+            span: globalThis.Array.isArray(object?.span) ? object.span.map((e) => globalThis.Number(e)) : [],
+            leadingComments: isSet(object.leadingComments) ? globalThis.String(object.leadingComments) : "",
+            trailingComments: isSet(object.trailingComments) ? globalThis.String(object.trailingComments) : "",
+            leadingDetachedComments: globalThis.Array.isArray(object?.leadingDetachedComments)
+                ? object.leadingDetachedComments.map((e) => globalThis.String(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.path?.length) {
+            obj.path = message.path.map((e) => Math.round(e));
+        }
+        if (message.span?.length) {
+            obj.span = message.span.map((e) => Math.round(e));
+        }
+        if (message.leadingComments !== undefined && message.leadingComments !== "") {
+            obj.leadingComments = message.leadingComments;
+        }
+        if (message.trailingComments !== undefined && message.trailingComments !== "") {
+            obj.trailingComments = message.trailingComments;
+        }
+        if (message.leadingDetachedComments?.length) {
+            obj.leadingDetachedComments = message.leadingDetachedComments;
+        }
+        return obj;
+    },
+};
+exports.GeneratedCodeInfo = {
+    fromJSON(object) {
+        return {
+            annotation: globalThis.Array.isArray(object?.annotation)
+                ? object.annotation.map((e) => exports.GeneratedCodeInfo_Annotation.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.annotation?.length) {
+            obj.annotation = message.annotation.map((e) => exports.GeneratedCodeInfo_Annotation.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.GeneratedCodeInfo_Annotation = {
+    fromJSON(object) {
+        return {
+            path: globalThis.Array.isArray(object?.path)
+                ? object.path.map((e) => globalThis.Number(e))
+                : [],
+            sourceFile: isSet(object.sourceFile) ? globalThis.String(object.sourceFile) : "",
+            begin: isSet(object.begin) ? globalThis.Number(object.begin) : 0,
+            end: isSet(object.end) ? globalThis.Number(object.end) : 0,
+            semantic: isSet(object.semantic) ? generatedCodeInfo_Annotation_SemanticFromJSON(object.semantic) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.path?.length) {
+            obj.path = message.path.map((e) => Math.round(e));
+        }
+        if (message.sourceFile !== undefined && message.sourceFile !== "") {
+            obj.sourceFile = message.sourceFile;
+        }
+        if (message.begin !== undefined && message.begin !== 0) {
+            obj.begin = Math.round(message.begin);
+        }
+        if (message.end !== undefined && message.end !== 0) {
+            obj.end = Math.round(message.end);
+        }
+        if (message.semantic !== undefined && message.semantic !== 0) {
+            obj.semantic = generatedCodeInfo_Annotation_SemanticToJSON(message.semantic);
+        }
+        return obj;
+    },
+};
+function bytesFromBase64(b64) {
+    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+}
+function base64FromBytes(arr) {
+    return globalThis.Buffer.from(arr).toString("base64");
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
new file mode 100644
index 0000000000000..9d24cbba10de9
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
@@ -0,0 +1,29 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: google/protobuf/timestamp.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Timestamp = void 0;
+exports.Timestamp = {
+    fromJSON(object) {
+        return {
+            seconds: isSet(object.seconds) ? globalThis.String(object.seconds) : "0",
+            nanos: isSet(object.nanos) ? globalThis.Number(object.nanos) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.seconds !== "0") {
+            obj.seconds = message.seconds;
+        }
+        if (message.nanos !== 0) {
+            obj.nanos = Math.round(message.nanos);
+        }
+        return obj;
+    },
+};
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js
new file mode 100644
index 0000000000000..abc766bed3b88
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js
@@ -0,0 +1,55 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: rekor/v2/dsse.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.DSSELogEntryV002 = exports.DSSERequestV002 = void 0;
+/* eslint-disable */
+const envelope_1 = require("../../envelope");
+const sigstore_common_1 = require("../../sigstore_common");
+const verifier_1 = require("./verifier");
+exports.DSSERequestV002 = {
+    fromJSON(object) {
+        return {
+            envelope: isSet(object.envelope) ? envelope_1.Envelope.fromJSON(object.envelope) : undefined,
+            verifiers: globalThis.Array.isArray(object?.verifiers)
+                ? object.verifiers.map((e) => verifier_1.Verifier.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.envelope !== undefined) {
+            obj.envelope = envelope_1.Envelope.toJSON(message.envelope);
+        }
+        if (message.verifiers?.length) {
+            obj.verifiers = message.verifiers.map((e) => verifier_1.Verifier.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.DSSELogEntryV002 = {
+    fromJSON(object) {
+        return {
+            payloadHash: isSet(object.payloadHash) ? sigstore_common_1.HashOutput.fromJSON(object.payloadHash) : undefined,
+            signatures: globalThis.Array.isArray(object?.signatures)
+                ? object.signatures.map((e) => verifier_1.Signature.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.payloadHash !== undefined) {
+            obj.payloadHash = sigstore_common_1.HashOutput.toJSON(message.payloadHash);
+        }
+        if (message.signatures?.length) {
+            obj.signatures = message.signatures.map((e) => verifier_1.Signature.toJSON(e));
+        }
+        return obj;
+    },
+};
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js
new file mode 100644
index 0000000000000..c5eccb10e0a68
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js
@@ -0,0 +1,81 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: rekor/v2/entry.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.CreateEntryRequest = exports.Spec = exports.Entry = void 0;
+/* eslint-disable */
+const dsse_1 = require("./dsse");
+const hashedrekord_1 = require("./hashedrekord");
+exports.Entry = {
+    fromJSON(object) {
+        return {
+            kind: isSet(object.kind) ? globalThis.String(object.kind) : "",
+            apiVersion: isSet(object.apiVersion) ? globalThis.String(object.apiVersion) : "",
+            spec: isSet(object.spec) ? exports.Spec.fromJSON(object.spec) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.kind !== "") {
+            obj.kind = message.kind;
+        }
+        if (message.apiVersion !== "") {
+            obj.apiVersion = message.apiVersion;
+        }
+        if (message.spec !== undefined) {
+            obj.spec = exports.Spec.toJSON(message.spec);
+        }
+        return obj;
+    },
+};
+exports.Spec = {
+    fromJSON(object) {
+        return {
+            spec: isSet(object.hashedRekordV002)
+                ? { $case: "hashedRekordV002", hashedRekordV002: hashedrekord_1.HashedRekordLogEntryV002.fromJSON(object.hashedRekordV002) }
+                : isSet(object.dsseV002)
+                    ? { $case: "dsseV002", dsseV002: dsse_1.DSSELogEntryV002.fromJSON(object.dsseV002) }
+                    : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.spec?.$case === "hashedRekordV002") {
+            obj.hashedRekordV002 = hashedrekord_1.HashedRekordLogEntryV002.toJSON(message.spec.hashedRekordV002);
+        }
+        else if (message.spec?.$case === "dsseV002") {
+            obj.dsseV002 = dsse_1.DSSELogEntryV002.toJSON(message.spec.dsseV002);
+        }
+        return obj;
+    },
+};
+exports.CreateEntryRequest = {
+    fromJSON(object) {
+        return {
+            spec: isSet(object.hashedRekordRequestV002)
+                ? {
+                    $case: "hashedRekordRequestV002",
+                    hashedRekordRequestV002: hashedrekord_1.HashedRekordRequestV002.fromJSON(object.hashedRekordRequestV002),
+                }
+                : isSet(object.dsseRequestV002)
+                    ? { $case: "dsseRequestV002", dsseRequestV002: dsse_1.DSSERequestV002.fromJSON(object.dsseRequestV002) }
+                    : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.spec?.$case === "hashedRekordRequestV002") {
+            obj.hashedRekordRequestV002 = hashedrekord_1.HashedRekordRequestV002.toJSON(message.spec.hashedRekordRequestV002);
+        }
+        else if (message.spec?.$case === "dsseRequestV002") {
+            obj.dsseRequestV002 = dsse_1.DSSERequestV002.toJSON(message.spec.dsseRequestV002);
+        }
+        return obj;
+    },
+};
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js
new file mode 100644
index 0000000000000..d3fd1af2483d1
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js
@@ -0,0 +1,56 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: rekor/v2/hashedrekord.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.HashedRekordLogEntryV002 = exports.HashedRekordRequestV002 = void 0;
+/* eslint-disable */
+const sigstore_common_1 = require("../../sigstore_common");
+const verifier_1 = require("./verifier");
+exports.HashedRekordRequestV002 = {
+    fromJSON(object) {
+        return {
+            digest: isSet(object.digest) ? Buffer.from(bytesFromBase64(object.digest)) : Buffer.alloc(0),
+            signature: isSet(object.signature) ? verifier_1.Signature.fromJSON(object.signature) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.digest.length !== 0) {
+            obj.digest = base64FromBytes(message.digest);
+        }
+        if (message.signature !== undefined) {
+            obj.signature = verifier_1.Signature.toJSON(message.signature);
+        }
+        return obj;
+    },
+};
+exports.HashedRekordLogEntryV002 = {
+    fromJSON(object) {
+        return {
+            data: isSet(object.data) ? sigstore_common_1.HashOutput.fromJSON(object.data) : undefined,
+            signature: isSet(object.signature) ? verifier_1.Signature.fromJSON(object.signature) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.data !== undefined) {
+            obj.data = sigstore_common_1.HashOutput.toJSON(message.data);
+        }
+        if (message.signature !== undefined) {
+            obj.signature = verifier_1.Signature.toJSON(message.signature);
+        }
+        return obj;
+    },
+};
+function bytesFromBase64(b64) {
+    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+}
+function base64FromBytes(arr) {
+    return globalThis.Buffer.from(arr).toString("base64");
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js
new file mode 100644
index 0000000000000..c437d5053a3cb
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js
@@ -0,0 +1,74 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: rekor/v2/verifier.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Signature = exports.Verifier = exports.PublicKey = void 0;
+/* eslint-disable */
+const sigstore_common_1 = require("../../sigstore_common");
+exports.PublicKey = {
+    fromJSON(object) {
+        return { rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : Buffer.alloc(0) };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.rawBytes.length !== 0) {
+            obj.rawBytes = base64FromBytes(message.rawBytes);
+        }
+        return obj;
+    },
+};
+exports.Verifier = {
+    fromJSON(object) {
+        return {
+            verifier: isSet(object.publicKey)
+                ? { $case: "publicKey", publicKey: exports.PublicKey.fromJSON(object.publicKey) }
+                : isSet(object.x509Certificate)
+                    ? { $case: "x509Certificate", x509Certificate: sigstore_common_1.X509Certificate.fromJSON(object.x509Certificate) }
+                    : undefined,
+            keyDetails: isSet(object.keyDetails) ? (0, sigstore_common_1.publicKeyDetailsFromJSON)(object.keyDetails) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.verifier?.$case === "publicKey") {
+            obj.publicKey = exports.PublicKey.toJSON(message.verifier.publicKey);
+        }
+        else if (message.verifier?.$case === "x509Certificate") {
+            obj.x509Certificate = sigstore_common_1.X509Certificate.toJSON(message.verifier.x509Certificate);
+        }
+        if (message.keyDetails !== 0) {
+            obj.keyDetails = (0, sigstore_common_1.publicKeyDetailsToJSON)(message.keyDetails);
+        }
+        return obj;
+    },
+};
+exports.Signature = {
+    fromJSON(object) {
+        return {
+            content: isSet(object.content) ? Buffer.from(bytesFromBase64(object.content)) : Buffer.alloc(0),
+            verifier: isSet(object.verifier) ? exports.Verifier.fromJSON(object.verifier) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.content.length !== 0) {
+            obj.content = base64FromBytes(message.content);
+        }
+        if (message.verifier !== undefined) {
+            obj.verifier = exports.Verifier.toJSON(message.verifier);
+        }
+        return obj;
+    },
+};
+function bytesFromBase64(b64) {
+    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+}
+function base64FromBytes(arr) {
+    return globalThis.Buffer.from(arr).toString("base64");
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
new file mode 100644
index 0000000000000..aed636f00e7cf
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
@@ -0,0 +1,103 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: sigstore_bundle.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Bundle = exports.VerificationMaterial = exports.TimestampVerificationData = void 0;
+/* eslint-disable */
+const envelope_1 = require("./envelope");
+const sigstore_common_1 = require("./sigstore_common");
+const sigstore_rekor_1 = require("./sigstore_rekor");
+exports.TimestampVerificationData = {
+    fromJSON(object) {
+        return {
+            rfc3161Timestamps: globalThis.Array.isArray(object?.rfc3161Timestamps)
+                ? object.rfc3161Timestamps.map((e) => sigstore_common_1.RFC3161SignedTimestamp.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.rfc3161Timestamps?.length) {
+            obj.rfc3161Timestamps = message.rfc3161Timestamps.map((e) => sigstore_common_1.RFC3161SignedTimestamp.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.VerificationMaterial = {
+    fromJSON(object) {
+        return {
+            content: isSet(object.publicKey)
+                ? { $case: "publicKey", publicKey: sigstore_common_1.PublicKeyIdentifier.fromJSON(object.publicKey) }
+                : isSet(object.x509CertificateChain)
+                    ? {
+                        $case: "x509CertificateChain",
+                        x509CertificateChain: sigstore_common_1.X509CertificateChain.fromJSON(object.x509CertificateChain),
+                    }
+                    : isSet(object.certificate)
+                        ? { $case: "certificate", certificate: sigstore_common_1.X509Certificate.fromJSON(object.certificate) }
+                        : undefined,
+            tlogEntries: globalThis.Array.isArray(object?.tlogEntries)
+                ? object.tlogEntries.map((e) => sigstore_rekor_1.TransparencyLogEntry.fromJSON(e))
+                : [],
+            timestampVerificationData: isSet(object.timestampVerificationData)
+                ? exports.TimestampVerificationData.fromJSON(object.timestampVerificationData)
+                : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.content?.$case === "publicKey") {
+            obj.publicKey = sigstore_common_1.PublicKeyIdentifier.toJSON(message.content.publicKey);
+        }
+        else if (message.content?.$case === "x509CertificateChain") {
+            obj.x509CertificateChain = sigstore_common_1.X509CertificateChain.toJSON(message.content.x509CertificateChain);
+        }
+        else if (message.content?.$case === "certificate") {
+            obj.certificate = sigstore_common_1.X509Certificate.toJSON(message.content.certificate);
+        }
+        if (message.tlogEntries?.length) {
+            obj.tlogEntries = message.tlogEntries.map((e) => sigstore_rekor_1.TransparencyLogEntry.toJSON(e));
+        }
+        if (message.timestampVerificationData !== undefined) {
+            obj.timestampVerificationData = exports.TimestampVerificationData.toJSON(message.timestampVerificationData);
+        }
+        return obj;
+    },
+};
+exports.Bundle = {
+    fromJSON(object) {
+        return {
+            mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "",
+            verificationMaterial: isSet(object.verificationMaterial)
+                ? exports.VerificationMaterial.fromJSON(object.verificationMaterial)
+                : undefined,
+            content: isSet(object.messageSignature)
+                ? { $case: "messageSignature", messageSignature: sigstore_common_1.MessageSignature.fromJSON(object.messageSignature) }
+                : isSet(object.dsseEnvelope)
+                    ? { $case: "dsseEnvelope", dsseEnvelope: envelope_1.Envelope.fromJSON(object.dsseEnvelope) }
+                    : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.mediaType !== "") {
+            obj.mediaType = message.mediaType;
+        }
+        if (message.verificationMaterial !== undefined) {
+            obj.verificationMaterial = exports.VerificationMaterial.toJSON(message.verificationMaterial);
+        }
+        if (message.content?.$case === "messageSignature") {
+            obj.messageSignature = sigstore_common_1.MessageSignature.toJSON(message.content.messageSignature);
+        }
+        else if (message.content?.$case === "dsseEnvelope") {
+            obj.dsseEnvelope = envelope_1.Envelope.toJSON(message.content.dsseEnvelope);
+        }
+        return obj;
+    },
+};
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
new file mode 100644
index 0000000000000..b900516ed3b55
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
@@ -0,0 +1,596 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: sigstore_common.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TimeRange = exports.X509CertificateChain = exports.SubjectAlternativeName = exports.X509Certificate = exports.DistinguishedName = exports.ObjectIdentifierValuePair = exports.ObjectIdentifier = exports.PublicKeyIdentifier = exports.PublicKey = exports.RFC3161SignedTimestamp = exports.LogId = exports.MessageSignature = exports.HashOutput = exports.SubjectAlternativeNameType = exports.PublicKeyDetails = exports.HashAlgorithm = void 0;
+exports.hashAlgorithmFromJSON = hashAlgorithmFromJSON;
+exports.hashAlgorithmToJSON = hashAlgorithmToJSON;
+exports.publicKeyDetailsFromJSON = publicKeyDetailsFromJSON;
+exports.publicKeyDetailsToJSON = publicKeyDetailsToJSON;
+exports.subjectAlternativeNameTypeFromJSON = subjectAlternativeNameTypeFromJSON;
+exports.subjectAlternativeNameTypeToJSON = subjectAlternativeNameTypeToJSON;
+/* eslint-disable */
+const timestamp_1 = require("./google/protobuf/timestamp");
+/**
+ * Only a subset of the secure hash standard algorithms are supported.
+ * See  for more
+ * details.
+ * UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force
+ * any proto JSON serialization to emit the used hash algorithm, as default
+ * option is to *omit* the default value of an enum (which is the first
+ * value, represented by '0'.
+ */
+var HashAlgorithm;
+(function (HashAlgorithm) {
+    HashAlgorithm[HashAlgorithm["HASH_ALGORITHM_UNSPECIFIED"] = 0] = "HASH_ALGORITHM_UNSPECIFIED";
+    HashAlgorithm[HashAlgorithm["SHA2_256"] = 1] = "SHA2_256";
+    HashAlgorithm[HashAlgorithm["SHA2_384"] = 2] = "SHA2_384";
+    HashAlgorithm[HashAlgorithm["SHA2_512"] = 3] = "SHA2_512";
+    HashAlgorithm[HashAlgorithm["SHA3_256"] = 4] = "SHA3_256";
+    HashAlgorithm[HashAlgorithm["SHA3_384"] = 5] = "SHA3_384";
+})(HashAlgorithm || (exports.HashAlgorithm = HashAlgorithm = {}));
+function hashAlgorithmFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "HASH_ALGORITHM_UNSPECIFIED":
+            return HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED;
+        case 1:
+        case "SHA2_256":
+            return HashAlgorithm.SHA2_256;
+        case 2:
+        case "SHA2_384":
+            return HashAlgorithm.SHA2_384;
+        case 3:
+        case "SHA2_512":
+            return HashAlgorithm.SHA2_512;
+        case 4:
+        case "SHA3_256":
+            return HashAlgorithm.SHA3_256;
+        case 5:
+        case "SHA3_384":
+            return HashAlgorithm.SHA3_384;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
+    }
+}
+function hashAlgorithmToJSON(object) {
+    switch (object) {
+        case HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED:
+            return "HASH_ALGORITHM_UNSPECIFIED";
+        case HashAlgorithm.SHA2_256:
+            return "SHA2_256";
+        case HashAlgorithm.SHA2_384:
+            return "SHA2_384";
+        case HashAlgorithm.SHA2_512:
+            return "SHA2_512";
+        case HashAlgorithm.SHA3_256:
+            return "SHA3_256";
+        case HashAlgorithm.SHA3_384:
+            return "SHA3_384";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
+    }
+}
+/**
+ * Details of a specific public key, capturing the the key encoding method,
+ * and signature algorithm.
+ *
+ * PublicKeyDetails captures the public key/hash algorithm combinations
+ * recommended in the Sigstore ecosystem.
+ *
+ * This is modelled as a linear set as we want to provide a small number of
+ * opinionated options instead of allowing every possible permutation.
+ *
+ * Any changes to this enum MUST be reflected in the algorithm registry.
+ *
+ * See: 
+ *
+ * To avoid the possibility of contradicting formats such as PKCS1 with
+ * ED25519 the valid permutations are listed as a linear set instead of a
+ * cartesian set (i.e one combined variable instead of two, one for encoding
+ * and one for the signature algorithm).
+ */
+var PublicKeyDetails;
+(function (PublicKeyDetails) {
+    PublicKeyDetails[PublicKeyDetails["PUBLIC_KEY_DETAILS_UNSPECIFIED"] = 0] = "PUBLIC_KEY_DETAILS_UNSPECIFIED";
+    /**
+     * PKCS1_RSA_PKCS1V5 - RSA
+     *
+     * @deprecated
+     */
+    PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PKCS1V5"] = 1] = "PKCS1_RSA_PKCS1V5";
+    /**
+     * PKCS1_RSA_PSS - See RFC8017
+     *
+     * @deprecated
+     */
+    PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PSS"] = 2] = "PKCS1_RSA_PSS";
+    /** @deprecated */
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V5"] = 3] = "PKIX_RSA_PKCS1V5";
+    /** @deprecated */
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS"] = 4] = "PKIX_RSA_PSS";
+    /** PKIX_RSA_PKCS1V15_2048_SHA256 - RSA public key in PKIX format, PKCS#1v1.5 signature */
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V15_2048_SHA256"] = 9] = "PKIX_RSA_PKCS1V15_2048_SHA256";
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V15_3072_SHA256"] = 10] = "PKIX_RSA_PKCS1V15_3072_SHA256";
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V15_4096_SHA256"] = 11] = "PKIX_RSA_PKCS1V15_4096_SHA256";
+    /** PKIX_RSA_PSS_2048_SHA256 - RSA public key in PKIX format, RSASSA-PSS signature */
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS_2048_SHA256"] = 16] = "PKIX_RSA_PSS_2048_SHA256";
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS_3072_SHA256"] = 17] = "PKIX_RSA_PSS_3072_SHA256";
+    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS_4096_SHA256"] = 18] = "PKIX_RSA_PSS_4096_SHA256";
+    /**
+     * PKIX_ECDSA_P256_HMAC_SHA_256 - ECDSA
+     *
+     * @deprecated
+     */
+    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_HMAC_SHA_256"] = 6] = "PKIX_ECDSA_P256_HMAC_SHA_256";
+    /** PKIX_ECDSA_P256_SHA_256 - See NIST FIPS 186-4 */
+    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_SHA_256"] = 5] = "PKIX_ECDSA_P256_SHA_256";
+    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P384_SHA_384"] = 12] = "PKIX_ECDSA_P384_SHA_384";
+    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P521_SHA_512"] = 13] = "PKIX_ECDSA_P521_SHA_512";
+    /** PKIX_ED25519 - Ed 25519 */
+    PublicKeyDetails[PublicKeyDetails["PKIX_ED25519"] = 7] = "PKIX_ED25519";
+    PublicKeyDetails[PublicKeyDetails["PKIX_ED25519_PH"] = 8] = "PKIX_ED25519_PH";
+    /**
+     * PKIX_ECDSA_P384_SHA_256 - These algorithms are deprecated and should not be used, but they
+     * were/are being used by most Sigstore clients implementations.
+     *
+     * @deprecated
+     */
+    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P384_SHA_256"] = 19] = "PKIX_ECDSA_P384_SHA_256";
+    /** @deprecated */
+    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P521_SHA_256"] = 20] = "PKIX_ECDSA_P521_SHA_256";
+    /**
+     * LMS_SHA256 - LMS and LM-OTS
+     *
+     * These algorithms are deprecated and should not be used.
+     * Keys and signatures MAY be used by private Sigstore
+     * deployments, but will not be supported by the public
+     * good instance.
+     *
+     * USER WARNING: LMS and LM-OTS are both stateful signature schemes.
+     * Using them correctly requires discretion and careful consideration
+     * to ensure that individual secret keys are not used more than once.
+     * In addition, LM-OTS is a single-use scheme, meaning that it
+     * MUST NOT be used for more than one signature per LM-OTS key.
+     * If you cannot maintain these invariants, you MUST NOT use these
+     * schemes.
+     *
+     * @deprecated
+     */
+    PublicKeyDetails[PublicKeyDetails["LMS_SHA256"] = 14] = "LMS_SHA256";
+    /** @deprecated */
+    PublicKeyDetails[PublicKeyDetails["LMOTS_SHA256"] = 15] = "LMOTS_SHA256";
+    /**
+     * ML_DSA_65 - ML-DSA
+     *
+     * These ML_DSA_65 and ML-DSA_87 algorithms are the pure variants that
+     * take data to sign rather than the prehash variants (HashML-DSA), which
+     * take digests.  While considered quantum-resistant, their usage
+     * involves tradeoffs in that signatures and keys are much larger, and
+     * this makes deployments more costly.
+     *
+     * USER WARNING: ML_DSA_65 and ML_DSA_87 are experimental algorithms.
+     * In the future they MAY be used by private Sigstore deployments, but
+     * they are not yet fully functional.  This warning will be removed when
+     * these algorithms are widely supported by Sigstore clients and servers,
+     * but care should still be taken for production environments.
+     */
+    PublicKeyDetails[PublicKeyDetails["ML_DSA_65"] = 21] = "ML_DSA_65";
+    PublicKeyDetails[PublicKeyDetails["ML_DSA_87"] = 22] = "ML_DSA_87";
+})(PublicKeyDetails || (exports.PublicKeyDetails = PublicKeyDetails = {}));
+function publicKeyDetailsFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "PUBLIC_KEY_DETAILS_UNSPECIFIED":
+            return PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED;
+        case 1:
+        case "PKCS1_RSA_PKCS1V5":
+            return PublicKeyDetails.PKCS1_RSA_PKCS1V5;
+        case 2:
+        case "PKCS1_RSA_PSS":
+            return PublicKeyDetails.PKCS1_RSA_PSS;
+        case 3:
+        case "PKIX_RSA_PKCS1V5":
+            return PublicKeyDetails.PKIX_RSA_PKCS1V5;
+        case 4:
+        case "PKIX_RSA_PSS":
+            return PublicKeyDetails.PKIX_RSA_PSS;
+        case 9:
+        case "PKIX_RSA_PKCS1V15_2048_SHA256":
+            return PublicKeyDetails.PKIX_RSA_PKCS1V15_2048_SHA256;
+        case 10:
+        case "PKIX_RSA_PKCS1V15_3072_SHA256":
+            return PublicKeyDetails.PKIX_RSA_PKCS1V15_3072_SHA256;
+        case 11:
+        case "PKIX_RSA_PKCS1V15_4096_SHA256":
+            return PublicKeyDetails.PKIX_RSA_PKCS1V15_4096_SHA256;
+        case 16:
+        case "PKIX_RSA_PSS_2048_SHA256":
+            return PublicKeyDetails.PKIX_RSA_PSS_2048_SHA256;
+        case 17:
+        case "PKIX_RSA_PSS_3072_SHA256":
+            return PublicKeyDetails.PKIX_RSA_PSS_3072_SHA256;
+        case 18:
+        case "PKIX_RSA_PSS_4096_SHA256":
+            return PublicKeyDetails.PKIX_RSA_PSS_4096_SHA256;
+        case 6:
+        case "PKIX_ECDSA_P256_HMAC_SHA_256":
+            return PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256;
+        case 5:
+        case "PKIX_ECDSA_P256_SHA_256":
+            return PublicKeyDetails.PKIX_ECDSA_P256_SHA_256;
+        case 12:
+        case "PKIX_ECDSA_P384_SHA_384":
+            return PublicKeyDetails.PKIX_ECDSA_P384_SHA_384;
+        case 13:
+        case "PKIX_ECDSA_P521_SHA_512":
+            return PublicKeyDetails.PKIX_ECDSA_P521_SHA_512;
+        case 7:
+        case "PKIX_ED25519":
+            return PublicKeyDetails.PKIX_ED25519;
+        case 8:
+        case "PKIX_ED25519_PH":
+            return PublicKeyDetails.PKIX_ED25519_PH;
+        case 19:
+        case "PKIX_ECDSA_P384_SHA_256":
+            return PublicKeyDetails.PKIX_ECDSA_P384_SHA_256;
+        case 20:
+        case "PKIX_ECDSA_P521_SHA_256":
+            return PublicKeyDetails.PKIX_ECDSA_P521_SHA_256;
+        case 14:
+        case "LMS_SHA256":
+            return PublicKeyDetails.LMS_SHA256;
+        case 15:
+        case "LMOTS_SHA256":
+            return PublicKeyDetails.LMOTS_SHA256;
+        case 21:
+        case "ML_DSA_65":
+            return PublicKeyDetails.ML_DSA_65;
+        case 22:
+        case "ML_DSA_87":
+            return PublicKeyDetails.ML_DSA_87;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
+    }
+}
+function publicKeyDetailsToJSON(object) {
+    switch (object) {
+        case PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED:
+            return "PUBLIC_KEY_DETAILS_UNSPECIFIED";
+        case PublicKeyDetails.PKCS1_RSA_PKCS1V5:
+            return "PKCS1_RSA_PKCS1V5";
+        case PublicKeyDetails.PKCS1_RSA_PSS:
+            return "PKCS1_RSA_PSS";
+        case PublicKeyDetails.PKIX_RSA_PKCS1V5:
+            return "PKIX_RSA_PKCS1V5";
+        case PublicKeyDetails.PKIX_RSA_PSS:
+            return "PKIX_RSA_PSS";
+        case PublicKeyDetails.PKIX_RSA_PKCS1V15_2048_SHA256:
+            return "PKIX_RSA_PKCS1V15_2048_SHA256";
+        case PublicKeyDetails.PKIX_RSA_PKCS1V15_3072_SHA256:
+            return "PKIX_RSA_PKCS1V15_3072_SHA256";
+        case PublicKeyDetails.PKIX_RSA_PKCS1V15_4096_SHA256:
+            return "PKIX_RSA_PKCS1V15_4096_SHA256";
+        case PublicKeyDetails.PKIX_RSA_PSS_2048_SHA256:
+            return "PKIX_RSA_PSS_2048_SHA256";
+        case PublicKeyDetails.PKIX_RSA_PSS_3072_SHA256:
+            return "PKIX_RSA_PSS_3072_SHA256";
+        case PublicKeyDetails.PKIX_RSA_PSS_4096_SHA256:
+            return "PKIX_RSA_PSS_4096_SHA256";
+        case PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256:
+            return "PKIX_ECDSA_P256_HMAC_SHA_256";
+        case PublicKeyDetails.PKIX_ECDSA_P256_SHA_256:
+            return "PKIX_ECDSA_P256_SHA_256";
+        case PublicKeyDetails.PKIX_ECDSA_P384_SHA_384:
+            return "PKIX_ECDSA_P384_SHA_384";
+        case PublicKeyDetails.PKIX_ECDSA_P521_SHA_512:
+            return "PKIX_ECDSA_P521_SHA_512";
+        case PublicKeyDetails.PKIX_ED25519:
+            return "PKIX_ED25519";
+        case PublicKeyDetails.PKIX_ED25519_PH:
+            return "PKIX_ED25519_PH";
+        case PublicKeyDetails.PKIX_ECDSA_P384_SHA_256:
+            return "PKIX_ECDSA_P384_SHA_256";
+        case PublicKeyDetails.PKIX_ECDSA_P521_SHA_256:
+            return "PKIX_ECDSA_P521_SHA_256";
+        case PublicKeyDetails.LMS_SHA256:
+            return "LMS_SHA256";
+        case PublicKeyDetails.LMOTS_SHA256:
+            return "LMOTS_SHA256";
+        case PublicKeyDetails.ML_DSA_65:
+            return "ML_DSA_65";
+        case PublicKeyDetails.ML_DSA_87:
+            return "ML_DSA_87";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
+    }
+}
+var SubjectAlternativeNameType;
+(function (SubjectAlternativeNameType) {
+    SubjectAlternativeNameType[SubjectAlternativeNameType["SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED"] = 0] = "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
+    SubjectAlternativeNameType[SubjectAlternativeNameType["EMAIL"] = 1] = "EMAIL";
+    SubjectAlternativeNameType[SubjectAlternativeNameType["URI"] = 2] = "URI";
+    /**
+     * OTHER_NAME - OID 1.3.6.1.4.1.57264.1.7
+     * See https://github.com/sigstore/fulcio/blob/main/docs/oid-info.md#1361415726417--othername-san
+     * for more details.
+     */
+    SubjectAlternativeNameType[SubjectAlternativeNameType["OTHER_NAME"] = 3] = "OTHER_NAME";
+})(SubjectAlternativeNameType || (exports.SubjectAlternativeNameType = SubjectAlternativeNameType = {}));
+function subjectAlternativeNameTypeFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED":
+            return SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED;
+        case 1:
+        case "EMAIL":
+            return SubjectAlternativeNameType.EMAIL;
+        case 2:
+        case "URI":
+            return SubjectAlternativeNameType.URI;
+        case 3:
+        case "OTHER_NAME":
+            return SubjectAlternativeNameType.OTHER_NAME;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
+    }
+}
+function subjectAlternativeNameTypeToJSON(object) {
+    switch (object) {
+        case SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED:
+            return "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
+        case SubjectAlternativeNameType.EMAIL:
+            return "EMAIL";
+        case SubjectAlternativeNameType.URI:
+            return "URI";
+        case SubjectAlternativeNameType.OTHER_NAME:
+            return "OTHER_NAME";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
+    }
+}
+exports.HashOutput = {
+    fromJSON(object) {
+        return {
+            algorithm: isSet(object.algorithm) ? hashAlgorithmFromJSON(object.algorithm) : 0,
+            digest: isSet(object.digest) ? Buffer.from(bytesFromBase64(object.digest)) : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.algorithm !== 0) {
+            obj.algorithm = hashAlgorithmToJSON(message.algorithm);
+        }
+        if (message.digest.length !== 0) {
+            obj.digest = base64FromBytes(message.digest);
+        }
+        return obj;
+    },
+};
+exports.MessageSignature = {
+    fromJSON(object) {
+        return {
+            messageDigest: isSet(object.messageDigest) ? exports.HashOutput.fromJSON(object.messageDigest) : undefined,
+            signature: isSet(object.signature) ? Buffer.from(bytesFromBase64(object.signature)) : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.messageDigest !== undefined) {
+            obj.messageDigest = exports.HashOutput.toJSON(message.messageDigest);
+        }
+        if (message.signature.length !== 0) {
+            obj.signature = base64FromBytes(message.signature);
+        }
+        return obj;
+    },
+};
+exports.LogId = {
+    fromJSON(object) {
+        return { keyId: isSet(object.keyId) ? Buffer.from(bytesFromBase64(object.keyId)) : Buffer.alloc(0) };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.keyId.length !== 0) {
+            obj.keyId = base64FromBytes(message.keyId);
+        }
+        return obj;
+    },
+};
+exports.RFC3161SignedTimestamp = {
+    fromJSON(object) {
+        return {
+            signedTimestamp: isSet(object.signedTimestamp)
+                ? Buffer.from(bytesFromBase64(object.signedTimestamp))
+                : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.signedTimestamp.length !== 0) {
+            obj.signedTimestamp = base64FromBytes(message.signedTimestamp);
+        }
+        return obj;
+    },
+};
+exports.PublicKey = {
+    fromJSON(object) {
+        return {
+            rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : undefined,
+            keyDetails: isSet(object.keyDetails) ? publicKeyDetailsFromJSON(object.keyDetails) : 0,
+            validFor: isSet(object.validFor) ? exports.TimeRange.fromJSON(object.validFor) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.rawBytes !== undefined) {
+            obj.rawBytes = base64FromBytes(message.rawBytes);
+        }
+        if (message.keyDetails !== 0) {
+            obj.keyDetails = publicKeyDetailsToJSON(message.keyDetails);
+        }
+        if (message.validFor !== undefined) {
+            obj.validFor = exports.TimeRange.toJSON(message.validFor);
+        }
+        return obj;
+    },
+};
+exports.PublicKeyIdentifier = {
+    fromJSON(object) {
+        return { hint: isSet(object.hint) ? globalThis.String(object.hint) : "" };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.hint !== "") {
+            obj.hint = message.hint;
+        }
+        return obj;
+    },
+};
+exports.ObjectIdentifier = {
+    fromJSON(object) {
+        return { id: globalThis.Array.isArray(object?.id) ? object.id.map((e) => globalThis.Number(e)) : [] };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.id?.length) {
+            obj.id = message.id.map((e) => Math.round(e));
+        }
+        return obj;
+    },
+};
+exports.ObjectIdentifierValuePair = {
+    fromJSON(object) {
+        return {
+            oid: isSet(object.oid) ? exports.ObjectIdentifier.fromJSON(object.oid) : undefined,
+            value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.oid !== undefined) {
+            obj.oid = exports.ObjectIdentifier.toJSON(message.oid);
+        }
+        if (message.value.length !== 0) {
+            obj.value = base64FromBytes(message.value);
+        }
+        return obj;
+    },
+};
+exports.DistinguishedName = {
+    fromJSON(object) {
+        return {
+            organization: isSet(object.organization) ? globalThis.String(object.organization) : "",
+            commonName: isSet(object.commonName) ? globalThis.String(object.commonName) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.organization !== "") {
+            obj.organization = message.organization;
+        }
+        if (message.commonName !== "") {
+            obj.commonName = message.commonName;
+        }
+        return obj;
+    },
+};
+exports.X509Certificate = {
+    fromJSON(object) {
+        return { rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : Buffer.alloc(0) };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.rawBytes.length !== 0) {
+            obj.rawBytes = base64FromBytes(message.rawBytes);
+        }
+        return obj;
+    },
+};
+exports.SubjectAlternativeName = {
+    fromJSON(object) {
+        return {
+            type: isSet(object.type) ? subjectAlternativeNameTypeFromJSON(object.type) : 0,
+            identity: isSet(object.regexp)
+                ? { $case: "regexp", regexp: globalThis.String(object.regexp) }
+                : isSet(object.value)
+                    ? { $case: "value", value: globalThis.String(object.value) }
+                    : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.type !== 0) {
+            obj.type = subjectAlternativeNameTypeToJSON(message.type);
+        }
+        if (message.identity?.$case === "regexp") {
+            obj.regexp = message.identity.regexp;
+        }
+        else if (message.identity?.$case === "value") {
+            obj.value = message.identity.value;
+        }
+        return obj;
+    },
+};
+exports.X509CertificateChain = {
+    fromJSON(object) {
+        return {
+            certificates: globalThis.Array.isArray(object?.certificates)
+                ? object.certificates.map((e) => exports.X509Certificate.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.certificates?.length) {
+            obj.certificates = message.certificates.map((e) => exports.X509Certificate.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.TimeRange = {
+    fromJSON(object) {
+        return {
+            start: isSet(object.start) ? fromJsonTimestamp(object.start) : undefined,
+            end: isSet(object.end) ? fromJsonTimestamp(object.end) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.start !== undefined) {
+            obj.start = message.start.toISOString();
+        }
+        if (message.end !== undefined) {
+            obj.end = message.end.toISOString();
+        }
+        return obj;
+    },
+};
+function bytesFromBase64(b64) {
+    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+}
+function base64FromBytes(arr) {
+    return globalThis.Buffer.from(arr).toString("base64");
+}
+function fromTimestamp(t) {
+    let millis = (globalThis.Number(t.seconds) || 0) * 1_000;
+    millis += (t.nanos || 0) / 1_000_000;
+    return new globalThis.Date(millis);
+}
+function fromJsonTimestamp(o) {
+    if (o instanceof globalThis.Date) {
+        return o;
+    }
+    else if (typeof o === "string") {
+        return new globalThis.Date(o);
+    }
+    else {
+        return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
+    }
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
new file mode 100644
index 0000000000000..fd8ea8384664d
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
@@ -0,0 +1,137 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: sigstore_rekor.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.TransparencyLogEntry = exports.InclusionPromise = exports.InclusionProof = exports.Checkpoint = exports.KindVersion = void 0;
+/* eslint-disable */
+const sigstore_common_1 = require("./sigstore_common");
+exports.KindVersion = {
+    fromJSON(object) {
+        return {
+            kind: isSet(object.kind) ? globalThis.String(object.kind) : "",
+            version: isSet(object.version) ? globalThis.String(object.version) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.kind !== "") {
+            obj.kind = message.kind;
+        }
+        if (message.version !== "") {
+            obj.version = message.version;
+        }
+        return obj;
+    },
+};
+exports.Checkpoint = {
+    fromJSON(object) {
+        return { envelope: isSet(object.envelope) ? globalThis.String(object.envelope) : "" };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.envelope !== "") {
+            obj.envelope = message.envelope;
+        }
+        return obj;
+    },
+};
+exports.InclusionProof = {
+    fromJSON(object) {
+        return {
+            logIndex: isSet(object.logIndex) ? globalThis.String(object.logIndex) : "0",
+            rootHash: isSet(object.rootHash) ? Buffer.from(bytesFromBase64(object.rootHash)) : Buffer.alloc(0),
+            treeSize: isSet(object.treeSize) ? globalThis.String(object.treeSize) : "0",
+            hashes: globalThis.Array.isArray(object?.hashes)
+                ? object.hashes.map((e) => Buffer.from(bytesFromBase64(e)))
+                : [],
+            checkpoint: isSet(object.checkpoint) ? exports.Checkpoint.fromJSON(object.checkpoint) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.logIndex !== "0") {
+            obj.logIndex = message.logIndex;
+        }
+        if (message.rootHash.length !== 0) {
+            obj.rootHash = base64FromBytes(message.rootHash);
+        }
+        if (message.treeSize !== "0") {
+            obj.treeSize = message.treeSize;
+        }
+        if (message.hashes?.length) {
+            obj.hashes = message.hashes.map((e) => base64FromBytes(e));
+        }
+        if (message.checkpoint !== undefined) {
+            obj.checkpoint = exports.Checkpoint.toJSON(message.checkpoint);
+        }
+        return obj;
+    },
+};
+exports.InclusionPromise = {
+    fromJSON(object) {
+        return {
+            signedEntryTimestamp: isSet(object.signedEntryTimestamp)
+                ? Buffer.from(bytesFromBase64(object.signedEntryTimestamp))
+                : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.signedEntryTimestamp.length !== 0) {
+            obj.signedEntryTimestamp = base64FromBytes(message.signedEntryTimestamp);
+        }
+        return obj;
+    },
+};
+exports.TransparencyLogEntry = {
+    fromJSON(object) {
+        return {
+            logIndex: isSet(object.logIndex) ? globalThis.String(object.logIndex) : "0",
+            logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
+            kindVersion: isSet(object.kindVersion) ? exports.KindVersion.fromJSON(object.kindVersion) : undefined,
+            integratedTime: isSet(object.integratedTime) ? globalThis.String(object.integratedTime) : "0",
+            inclusionPromise: isSet(object.inclusionPromise) ? exports.InclusionPromise.fromJSON(object.inclusionPromise) : undefined,
+            inclusionProof: isSet(object.inclusionProof) ? exports.InclusionProof.fromJSON(object.inclusionProof) : undefined,
+            canonicalizedBody: isSet(object.canonicalizedBody)
+                ? Buffer.from(bytesFromBase64(object.canonicalizedBody))
+                : Buffer.alloc(0),
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.logIndex !== "0") {
+            obj.logIndex = message.logIndex;
+        }
+        if (message.logId !== undefined) {
+            obj.logId = sigstore_common_1.LogId.toJSON(message.logId);
+        }
+        if (message.kindVersion !== undefined) {
+            obj.kindVersion = exports.KindVersion.toJSON(message.kindVersion);
+        }
+        if (message.integratedTime !== "0") {
+            obj.integratedTime = message.integratedTime;
+        }
+        if (message.inclusionPromise !== undefined) {
+            obj.inclusionPromise = exports.InclusionPromise.toJSON(message.inclusionPromise);
+        }
+        if (message.inclusionProof !== undefined) {
+            obj.inclusionProof = exports.InclusionProof.toJSON(message.inclusionProof);
+        }
+        if (message.canonicalizedBody.length !== 0) {
+            obj.canonicalizedBody = base64FromBytes(message.canonicalizedBody);
+        }
+        return obj;
+    },
+};
+function bytesFromBase64(b64) {
+    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+}
+function base64FromBytes(arr) {
+    return globalThis.Buffer.from(arr).toString("base64");
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
new file mode 100644
index 0000000000000..1b5492fb1a77e
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
@@ -0,0 +1,284 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: sigstore_trustroot.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.ClientTrustConfig = exports.ServiceConfiguration = exports.Service = exports.SigningConfig = exports.TrustedRoot = exports.CertificateAuthority = exports.TransparencyLogInstance = exports.ServiceSelector = void 0;
+exports.serviceSelectorFromJSON = serviceSelectorFromJSON;
+exports.serviceSelectorToJSON = serviceSelectorToJSON;
+/* eslint-disable */
+const sigstore_common_1 = require("./sigstore_common");
+/**
+ * ServiceSelector specifies how a client SHOULD select a set of
+ * Services to connect to. A client SHOULD throw an error if
+ * the value is SERVICE_SELECTOR_UNDEFINED.
+ */
+var ServiceSelector;
+(function (ServiceSelector) {
+    ServiceSelector[ServiceSelector["SERVICE_SELECTOR_UNDEFINED"] = 0] = "SERVICE_SELECTOR_UNDEFINED";
+    /**
+     * ALL - Clients SHOULD select all Services based on supported API version
+     * and validity window.
+     */
+    ServiceSelector[ServiceSelector["ALL"] = 1] = "ALL";
+    /**
+     * ANY - Clients SHOULD select one Service based on supported API version
+     * and validity window. It is up to the client implementation to
+     * decide how to select the Service, e.g. random or round-robin.
+     */
+    ServiceSelector[ServiceSelector["ANY"] = 2] = "ANY";
+    /**
+     * EXACT - Clients SHOULD select a specific number of Services based on
+     * supported API version and validity window, using the provided
+     * `count`. It is up to the client implementation to decide how to
+     * select the Service, e.g. random or round-robin.
+     */
+    ServiceSelector[ServiceSelector["EXACT"] = 3] = "EXACT";
+})(ServiceSelector || (exports.ServiceSelector = ServiceSelector = {}));
+function serviceSelectorFromJSON(object) {
+    switch (object) {
+        case 0:
+        case "SERVICE_SELECTOR_UNDEFINED":
+            return ServiceSelector.SERVICE_SELECTOR_UNDEFINED;
+        case 1:
+        case "ALL":
+            return ServiceSelector.ALL;
+        case 2:
+        case "ANY":
+            return ServiceSelector.ANY;
+        case 3:
+        case "EXACT":
+            return ServiceSelector.EXACT;
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum ServiceSelector");
+    }
+}
+function serviceSelectorToJSON(object) {
+    switch (object) {
+        case ServiceSelector.SERVICE_SELECTOR_UNDEFINED:
+            return "SERVICE_SELECTOR_UNDEFINED";
+        case ServiceSelector.ALL:
+            return "ALL";
+        case ServiceSelector.ANY:
+            return "ANY";
+        case ServiceSelector.EXACT:
+            return "EXACT";
+        default:
+            throw new globalThis.Error("Unrecognized enum value " + object + " for enum ServiceSelector");
+    }
+}
+exports.TransparencyLogInstance = {
+    fromJSON(object) {
+        return {
+            baseUrl: isSet(object.baseUrl) ? globalThis.String(object.baseUrl) : "",
+            hashAlgorithm: isSet(object.hashAlgorithm) ? (0, sigstore_common_1.hashAlgorithmFromJSON)(object.hashAlgorithm) : 0,
+            publicKey: isSet(object.publicKey) ? sigstore_common_1.PublicKey.fromJSON(object.publicKey) : undefined,
+            logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
+            checkpointKeyId: isSet(object.checkpointKeyId) ? sigstore_common_1.LogId.fromJSON(object.checkpointKeyId) : undefined,
+            operator: isSet(object.operator) ? globalThis.String(object.operator) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.baseUrl !== "") {
+            obj.baseUrl = message.baseUrl;
+        }
+        if (message.hashAlgorithm !== 0) {
+            obj.hashAlgorithm = (0, sigstore_common_1.hashAlgorithmToJSON)(message.hashAlgorithm);
+        }
+        if (message.publicKey !== undefined) {
+            obj.publicKey = sigstore_common_1.PublicKey.toJSON(message.publicKey);
+        }
+        if (message.logId !== undefined) {
+            obj.logId = sigstore_common_1.LogId.toJSON(message.logId);
+        }
+        if (message.checkpointKeyId !== undefined) {
+            obj.checkpointKeyId = sigstore_common_1.LogId.toJSON(message.checkpointKeyId);
+        }
+        if (message.operator !== "") {
+            obj.operator = message.operator;
+        }
+        return obj;
+    },
+};
+exports.CertificateAuthority = {
+    fromJSON(object) {
+        return {
+            subject: isSet(object.subject) ? sigstore_common_1.DistinguishedName.fromJSON(object.subject) : undefined,
+            uri: isSet(object.uri) ? globalThis.String(object.uri) : "",
+            certChain: isSet(object.certChain) ? sigstore_common_1.X509CertificateChain.fromJSON(object.certChain) : undefined,
+            validFor: isSet(object.validFor) ? sigstore_common_1.TimeRange.fromJSON(object.validFor) : undefined,
+            operator: isSet(object.operator) ? globalThis.String(object.operator) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.subject !== undefined) {
+            obj.subject = sigstore_common_1.DistinguishedName.toJSON(message.subject);
+        }
+        if (message.uri !== "") {
+            obj.uri = message.uri;
+        }
+        if (message.certChain !== undefined) {
+            obj.certChain = sigstore_common_1.X509CertificateChain.toJSON(message.certChain);
+        }
+        if (message.validFor !== undefined) {
+            obj.validFor = sigstore_common_1.TimeRange.toJSON(message.validFor);
+        }
+        if (message.operator !== "") {
+            obj.operator = message.operator;
+        }
+        return obj;
+    },
+};
+exports.TrustedRoot = {
+    fromJSON(object) {
+        return {
+            mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "",
+            tlogs: globalThis.Array.isArray(object?.tlogs)
+                ? object.tlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e))
+                : [],
+            certificateAuthorities: globalThis.Array.isArray(object?.certificateAuthorities)
+                ? object.certificateAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
+                : [],
+            ctlogs: globalThis.Array.isArray(object?.ctlogs)
+                ? object.ctlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e))
+                : [],
+            timestampAuthorities: globalThis.Array.isArray(object?.timestampAuthorities)
+                ? object.timestampAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.mediaType !== "") {
+            obj.mediaType = message.mediaType;
+        }
+        if (message.tlogs?.length) {
+            obj.tlogs = message.tlogs.map((e) => exports.TransparencyLogInstance.toJSON(e));
+        }
+        if (message.certificateAuthorities?.length) {
+            obj.certificateAuthorities = message.certificateAuthorities.map((e) => exports.CertificateAuthority.toJSON(e));
+        }
+        if (message.ctlogs?.length) {
+            obj.ctlogs = message.ctlogs.map((e) => exports.TransparencyLogInstance.toJSON(e));
+        }
+        if (message.timestampAuthorities?.length) {
+            obj.timestampAuthorities = message.timestampAuthorities.map((e) => exports.CertificateAuthority.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.SigningConfig = {
+    fromJSON(object) {
+        return {
+            mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "",
+            caUrls: globalThis.Array.isArray(object?.caUrls) ? object.caUrls.map((e) => exports.Service.fromJSON(e)) : [],
+            oidcUrls: globalThis.Array.isArray(object?.oidcUrls) ? object.oidcUrls.map((e) => exports.Service.fromJSON(e)) : [],
+            rekorTlogUrls: globalThis.Array.isArray(object?.rekorTlogUrls)
+                ? object.rekorTlogUrls.map((e) => exports.Service.fromJSON(e))
+                : [],
+            rekorTlogConfig: isSet(object.rekorTlogConfig)
+                ? exports.ServiceConfiguration.fromJSON(object.rekorTlogConfig)
+                : undefined,
+            tsaUrls: globalThis.Array.isArray(object?.tsaUrls) ? object.tsaUrls.map((e) => exports.Service.fromJSON(e)) : [],
+            tsaConfig: isSet(object.tsaConfig) ? exports.ServiceConfiguration.fromJSON(object.tsaConfig) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.mediaType !== "") {
+            obj.mediaType = message.mediaType;
+        }
+        if (message.caUrls?.length) {
+            obj.caUrls = message.caUrls.map((e) => exports.Service.toJSON(e));
+        }
+        if (message.oidcUrls?.length) {
+            obj.oidcUrls = message.oidcUrls.map((e) => exports.Service.toJSON(e));
+        }
+        if (message.rekorTlogUrls?.length) {
+            obj.rekorTlogUrls = message.rekorTlogUrls.map((e) => exports.Service.toJSON(e));
+        }
+        if (message.rekorTlogConfig !== undefined) {
+            obj.rekorTlogConfig = exports.ServiceConfiguration.toJSON(message.rekorTlogConfig);
+        }
+        if (message.tsaUrls?.length) {
+            obj.tsaUrls = message.tsaUrls.map((e) => exports.Service.toJSON(e));
+        }
+        if (message.tsaConfig !== undefined) {
+            obj.tsaConfig = exports.ServiceConfiguration.toJSON(message.tsaConfig);
+        }
+        return obj;
+    },
+};
+exports.Service = {
+    fromJSON(object) {
+        return {
+            url: isSet(object.url) ? globalThis.String(object.url) : "",
+            majorApiVersion: isSet(object.majorApiVersion) ? globalThis.Number(object.majorApiVersion) : 0,
+            validFor: isSet(object.validFor) ? sigstore_common_1.TimeRange.fromJSON(object.validFor) : undefined,
+            operator: isSet(object.operator) ? globalThis.String(object.operator) : "",
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.url !== "") {
+            obj.url = message.url;
+        }
+        if (message.majorApiVersion !== 0) {
+            obj.majorApiVersion = Math.round(message.majorApiVersion);
+        }
+        if (message.validFor !== undefined) {
+            obj.validFor = sigstore_common_1.TimeRange.toJSON(message.validFor);
+        }
+        if (message.operator !== "") {
+            obj.operator = message.operator;
+        }
+        return obj;
+    },
+};
+exports.ServiceConfiguration = {
+    fromJSON(object) {
+        return {
+            selector: isSet(object.selector) ? serviceSelectorFromJSON(object.selector) : 0,
+            count: isSet(object.count) ? globalThis.Number(object.count) : 0,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.selector !== 0) {
+            obj.selector = serviceSelectorToJSON(message.selector);
+        }
+        if (message.count !== 0) {
+            obj.count = Math.round(message.count);
+        }
+        return obj;
+    },
+};
+exports.ClientTrustConfig = {
+    fromJSON(object) {
+        return {
+            mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "",
+            trustedRoot: isSet(object.trustedRoot) ? exports.TrustedRoot.fromJSON(object.trustedRoot) : undefined,
+            signingConfig: isSet(object.signingConfig) ? exports.SigningConfig.fromJSON(object.signingConfig) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.mediaType !== "") {
+            obj.mediaType = message.mediaType;
+        }
+        if (message.trustedRoot !== undefined) {
+            obj.trustedRoot = exports.TrustedRoot.toJSON(message.trustedRoot);
+        }
+        if (message.signingConfig !== undefined) {
+            obj.signingConfig = exports.SigningConfig.toJSON(message.signingConfig);
+        }
+        return obj;
+    },
+};
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
new file mode 100644
index 0000000000000..876fe9cc1db1d
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
@@ -0,0 +1,281 @@
+"use strict";
+// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
+// versions:
+//   protoc-gen-ts_proto  v2.7.5
+//   protoc               v6.30.2
+// source: sigstore_verification.proto
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.Input = exports.Artifact = exports.ArtifactVerificationOptions_ObserverTimestampOptions = exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions = exports.ArtifactVerificationOptions_TimestampAuthorityOptions = exports.ArtifactVerificationOptions_CtlogOptions = exports.ArtifactVerificationOptions_TlogOptions = exports.ArtifactVerificationOptions = exports.PublicKeyIdentities = exports.CertificateIdentities = exports.CertificateIdentity = void 0;
+/* eslint-disable */
+const sigstore_bundle_1 = require("./sigstore_bundle");
+const sigstore_common_1 = require("./sigstore_common");
+const sigstore_trustroot_1 = require("./sigstore_trustroot");
+exports.CertificateIdentity = {
+    fromJSON(object) {
+        return {
+            issuer: isSet(object.issuer) ? globalThis.String(object.issuer) : "",
+            san: isSet(object.san) ? sigstore_common_1.SubjectAlternativeName.fromJSON(object.san) : undefined,
+            oids: globalThis.Array.isArray(object?.oids)
+                ? object.oids.map((e) => sigstore_common_1.ObjectIdentifierValuePair.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.issuer !== "") {
+            obj.issuer = message.issuer;
+        }
+        if (message.san !== undefined) {
+            obj.san = sigstore_common_1.SubjectAlternativeName.toJSON(message.san);
+        }
+        if (message.oids?.length) {
+            obj.oids = message.oids.map((e) => sigstore_common_1.ObjectIdentifierValuePair.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.CertificateIdentities = {
+    fromJSON(object) {
+        return {
+            identities: globalThis.Array.isArray(object?.identities)
+                ? object.identities.map((e) => exports.CertificateIdentity.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.identities?.length) {
+            obj.identities = message.identities.map((e) => exports.CertificateIdentity.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.PublicKeyIdentities = {
+    fromJSON(object) {
+        return {
+            publicKeys: globalThis.Array.isArray(object?.publicKeys)
+                ? object.publicKeys.map((e) => sigstore_common_1.PublicKey.fromJSON(e))
+                : [],
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.publicKeys?.length) {
+            obj.publicKeys = message.publicKeys.map((e) => sigstore_common_1.PublicKey.toJSON(e));
+        }
+        return obj;
+    },
+};
+exports.ArtifactVerificationOptions = {
+    fromJSON(object) {
+        return {
+            signers: isSet(object.certificateIdentities)
+                ? {
+                    $case: "certificateIdentities",
+                    certificateIdentities: exports.CertificateIdentities.fromJSON(object.certificateIdentities),
+                }
+                : isSet(object.publicKeys)
+                    ? { $case: "publicKeys", publicKeys: exports.PublicKeyIdentities.fromJSON(object.publicKeys) }
+                    : undefined,
+            tlogOptions: isSet(object.tlogOptions)
+                ? exports.ArtifactVerificationOptions_TlogOptions.fromJSON(object.tlogOptions)
+                : undefined,
+            ctlogOptions: isSet(object.ctlogOptions)
+                ? exports.ArtifactVerificationOptions_CtlogOptions.fromJSON(object.ctlogOptions)
+                : undefined,
+            tsaOptions: isSet(object.tsaOptions)
+                ? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.fromJSON(object.tsaOptions)
+                : undefined,
+            integratedTsOptions: isSet(object.integratedTsOptions)
+                ? exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions.fromJSON(object.integratedTsOptions)
+                : undefined,
+            observerOptions: isSet(object.observerOptions)
+                ? exports.ArtifactVerificationOptions_ObserverTimestampOptions.fromJSON(object.observerOptions)
+                : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.signers?.$case === "certificateIdentities") {
+            obj.certificateIdentities = exports.CertificateIdentities.toJSON(message.signers.certificateIdentities);
+        }
+        else if (message.signers?.$case === "publicKeys") {
+            obj.publicKeys = exports.PublicKeyIdentities.toJSON(message.signers.publicKeys);
+        }
+        if (message.tlogOptions !== undefined) {
+            obj.tlogOptions = exports.ArtifactVerificationOptions_TlogOptions.toJSON(message.tlogOptions);
+        }
+        if (message.ctlogOptions !== undefined) {
+            obj.ctlogOptions = exports.ArtifactVerificationOptions_CtlogOptions.toJSON(message.ctlogOptions);
+        }
+        if (message.tsaOptions !== undefined) {
+            obj.tsaOptions = exports.ArtifactVerificationOptions_TimestampAuthorityOptions.toJSON(message.tsaOptions);
+        }
+        if (message.integratedTsOptions !== undefined) {
+            obj.integratedTsOptions = exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions.toJSON(message.integratedTsOptions);
+        }
+        if (message.observerOptions !== undefined) {
+            obj.observerOptions = exports.ArtifactVerificationOptions_ObserverTimestampOptions.toJSON(message.observerOptions);
+        }
+        return obj;
+    },
+};
+exports.ArtifactVerificationOptions_TlogOptions = {
+    fromJSON(object) {
+        return {
+            threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
+            performOnlineVerification: isSet(object.performOnlineVerification)
+                ? globalThis.Boolean(object.performOnlineVerification)
+                : false,
+            disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.threshold !== 0) {
+            obj.threshold = Math.round(message.threshold);
+        }
+        if (message.performOnlineVerification !== false) {
+            obj.performOnlineVerification = message.performOnlineVerification;
+        }
+        if (message.disable !== false) {
+            obj.disable = message.disable;
+        }
+        return obj;
+    },
+};
+exports.ArtifactVerificationOptions_CtlogOptions = {
+    fromJSON(object) {
+        return {
+            threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
+            disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.threshold !== 0) {
+            obj.threshold = Math.round(message.threshold);
+        }
+        if (message.disable !== false) {
+            obj.disable = message.disable;
+        }
+        return obj;
+    },
+};
+exports.ArtifactVerificationOptions_TimestampAuthorityOptions = {
+    fromJSON(object) {
+        return {
+            threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
+            disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.threshold !== 0) {
+            obj.threshold = Math.round(message.threshold);
+        }
+        if (message.disable !== false) {
+            obj.disable = message.disable;
+        }
+        return obj;
+    },
+};
+exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions = {
+    fromJSON(object) {
+        return {
+            threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
+            disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.threshold !== 0) {
+            obj.threshold = Math.round(message.threshold);
+        }
+        if (message.disable !== false) {
+            obj.disable = message.disable;
+        }
+        return obj;
+    },
+};
+exports.ArtifactVerificationOptions_ObserverTimestampOptions = {
+    fromJSON(object) {
+        return {
+            threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
+            disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.threshold !== 0) {
+            obj.threshold = Math.round(message.threshold);
+        }
+        if (message.disable !== false) {
+            obj.disable = message.disable;
+        }
+        return obj;
+    },
+};
+exports.Artifact = {
+    fromJSON(object) {
+        return {
+            data: isSet(object.artifactUri)
+                ? { $case: "artifactUri", artifactUri: globalThis.String(object.artifactUri) }
+                : isSet(object.artifact)
+                    ? { $case: "artifact", artifact: Buffer.from(bytesFromBase64(object.artifact)) }
+                    : isSet(object.artifactDigest)
+                        ? { $case: "artifactDigest", artifactDigest: sigstore_common_1.HashOutput.fromJSON(object.artifactDigest) }
+                        : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.data?.$case === "artifactUri") {
+            obj.artifactUri = message.data.artifactUri;
+        }
+        else if (message.data?.$case === "artifact") {
+            obj.artifact = base64FromBytes(message.data.artifact);
+        }
+        else if (message.data?.$case === "artifactDigest") {
+            obj.artifactDigest = sigstore_common_1.HashOutput.toJSON(message.data.artifactDigest);
+        }
+        return obj;
+    },
+};
+exports.Input = {
+    fromJSON(object) {
+        return {
+            artifactTrustRoot: isSet(object.artifactTrustRoot) ? sigstore_trustroot_1.TrustedRoot.fromJSON(object.artifactTrustRoot) : undefined,
+            artifactVerificationOptions: isSet(object.artifactVerificationOptions)
+                ? exports.ArtifactVerificationOptions.fromJSON(object.artifactVerificationOptions)
+                : undefined,
+            bundle: isSet(object.bundle) ? sigstore_bundle_1.Bundle.fromJSON(object.bundle) : undefined,
+            artifact: isSet(object.artifact) ? exports.Artifact.fromJSON(object.artifact) : undefined,
+        };
+    },
+    toJSON(message) {
+        const obj = {};
+        if (message.artifactTrustRoot !== undefined) {
+            obj.artifactTrustRoot = sigstore_trustroot_1.TrustedRoot.toJSON(message.artifactTrustRoot);
+        }
+        if (message.artifactVerificationOptions !== undefined) {
+            obj.artifactVerificationOptions = exports.ArtifactVerificationOptions.toJSON(message.artifactVerificationOptions);
+        }
+        if (message.bundle !== undefined) {
+            obj.bundle = sigstore_bundle_1.Bundle.toJSON(message.bundle);
+        }
+        if (message.artifact !== undefined) {
+            obj.artifact = exports.Artifact.toJSON(message.artifact);
+        }
+        return obj;
+    },
+};
+function bytesFromBase64(b64) {
+    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
+}
+function base64FromBytes(arr) {
+    return globalThis.Buffer.from(arr).toString("base64");
+}
+function isSet(value) {
+    return value !== null && value !== undefined;
+}
diff --git a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/index.js b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/index.js
new file mode 100644
index 0000000000000..eafb768c48fca
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/index.js
@@ -0,0 +1,37 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __exportStar = (this && this.__exportStar) || function(m, exports) {
+    for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+/*
+Copyright 2023 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+__exportStar(require("./__generated__/envelope"), exports);
+__exportStar(require("./__generated__/sigstore_bundle"), exports);
+__exportStar(require("./__generated__/sigstore_common"), exports);
+__exportStar(require("./__generated__/sigstore_rekor"), exports);
+__exportStar(require("./__generated__/sigstore_trustroot"), exports);
+__exportStar(require("./__generated__/sigstore_verification"), exports);
diff --git a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/rekor/v2/index.js b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/rekor/v2/index.js
new file mode 100644
index 0000000000000..10745efc39a1f
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/rekor/v2/index.js
@@ -0,0 +1,35 @@
+"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    var desc = Object.getOwnPropertyDescriptor(m, k);
+    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+      desc = { enumerable: true, get: function() { return m[k]; } };
+    }
+    Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+    if (k2 === undefined) k2 = k;
+    o[k2] = m[k];
+}));
+var __exportStar = (this && this.__exportStar) || function(m, exports) {
+    for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+/*
+Copyright 2025 The Sigstore Authors.
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+__exportStar(require("../../__generated__/rekor/v2/dsse"), exports);
+__exportStar(require("../../__generated__/rekor/v2/entry"), exports);
+__exportStar(require("../../__generated__/rekor/v2/hashedrekord"), exports);
+__exportStar(require("../../__generated__/rekor/v2/verifier"), exports);
diff --git a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/package.json b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/package.json
new file mode 100644
index 0000000000000..f87b2540fbf98
--- /dev/null
+++ b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/package.json
@@ -0,0 +1,35 @@
+{
+  "name": "@sigstore/protobuf-specs",
+  "version": "0.5.0",
+  "description": "code-signing for npm packages",
+  "main": "dist/index.js",
+  "types": "dist/index.d.ts",
+  "exports": {
+    ".": "./dist/index.js",
+    "./rekor/v2": "./dist/rekor/v2/index.js"
+  },
+  "scripts": {
+    "build": "tsc"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/sigstore/protobuf-specs.git"
+  },
+  "files": [
+    "dist"
+  ],
+  "author": "bdehamer@github.com",
+  "license": "Apache-2.0",
+  "bugs": {
+    "url": "https://github.com/sigstore/protobuf-specs/issues"
+  },
+  "homepage": "https://github.com/sigstore/protobuf-specs#readme",
+  "devDependencies": {
+    "@tsconfig/node18": "^18.2.4",
+    "@types/node": "^18.14.0",
+    "typescript": "^5.7.2"
+  },
+  "engines": {
+    "node": "^18.17.0 || >=20.5.0"
+  }
+}
diff --git a/node_modules/pacote/node_modules/@sigstore/tuf/LICENSE b/node_modules/sigstore/node_modules/@sigstore/tuf/LICENSE
similarity index 100%
rename from node_modules/pacote/node_modules/@sigstore/tuf/LICENSE
rename to node_modules/sigstore/node_modules/@sigstore/tuf/LICENSE
diff --git a/node_modules/pacote/node_modules/@sigstore/tuf/dist/appdata.js b/node_modules/sigstore/node_modules/@sigstore/tuf/dist/appdata.js
similarity index 100%
rename from node_modules/pacote/node_modules/@sigstore/tuf/dist/appdata.js
rename to node_modules/sigstore/node_modules/@sigstore/tuf/dist/appdata.js
diff --git a/node_modules/pacote/node_modules/@sigstore/tuf/dist/client.js b/node_modules/sigstore/node_modules/@sigstore/tuf/dist/client.js
similarity index 100%
rename from node_modules/pacote/node_modules/@sigstore/tuf/dist/client.js
rename to node_modules/sigstore/node_modules/@sigstore/tuf/dist/client.js
diff --git a/node_modules/pacote/node_modules/@sigstore/tuf/dist/error.js b/node_modules/sigstore/node_modules/@sigstore/tuf/dist/error.js
similarity index 100%
rename from node_modules/pacote/node_modules/@sigstore/tuf/dist/error.js
rename to node_modules/sigstore/node_modules/@sigstore/tuf/dist/error.js
diff --git a/node_modules/pacote/node_modules/@sigstore/tuf/dist/index.js b/node_modules/sigstore/node_modules/@sigstore/tuf/dist/index.js
similarity index 100%
rename from node_modules/pacote/node_modules/@sigstore/tuf/dist/index.js
rename to node_modules/sigstore/node_modules/@sigstore/tuf/dist/index.js
diff --git a/node_modules/pacote/node_modules/@sigstore/tuf/dist/target.js b/node_modules/sigstore/node_modules/@sigstore/tuf/dist/target.js
similarity index 100%
rename from node_modules/pacote/node_modules/@sigstore/tuf/dist/target.js
rename to node_modules/sigstore/node_modules/@sigstore/tuf/dist/target.js
diff --git a/node_modules/pacote/node_modules/@sigstore/tuf/package.json b/node_modules/sigstore/node_modules/@sigstore/tuf/package.json
similarity index 100%
rename from node_modules/pacote/node_modules/@sigstore/tuf/package.json
rename to node_modules/sigstore/node_modules/@sigstore/tuf/package.json
diff --git a/node_modules/pacote/node_modules/@sigstore/tuf/seeds.json b/node_modules/sigstore/node_modules/@sigstore/tuf/seeds.json
similarity index 100%
rename from node_modules/pacote/node_modules/@sigstore/tuf/seeds.json
rename to node_modules/sigstore/node_modules/@sigstore/tuf/seeds.json
diff --git a/node_modules/pacote/node_modules/@tufjs/models/LICENSE b/node_modules/sigstore/node_modules/@tufjs/models/LICENSE
similarity index 100%
rename from node_modules/pacote/node_modules/@tufjs/models/LICENSE
rename to node_modules/sigstore/node_modules/@tufjs/models/LICENSE
diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/base.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/base.js
similarity index 100%
rename from node_modules/pacote/node_modules/@tufjs/models/dist/base.js
rename to node_modules/sigstore/node_modules/@tufjs/models/dist/base.js
diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/delegations.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/delegations.js
similarity index 100%
rename from node_modules/pacote/node_modules/@tufjs/models/dist/delegations.js
rename to node_modules/sigstore/node_modules/@tufjs/models/dist/delegations.js
diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/error.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/error.js
similarity index 100%
rename from node_modules/pacote/node_modules/@tufjs/models/dist/error.js
rename to node_modules/sigstore/node_modules/@tufjs/models/dist/error.js
diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/file.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/file.js
similarity index 100%
rename from node_modules/pacote/node_modules/@tufjs/models/dist/file.js
rename to node_modules/sigstore/node_modules/@tufjs/models/dist/file.js
diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/index.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/index.js
similarity index 100%
rename from node_modules/pacote/node_modules/@tufjs/models/dist/index.js
rename to node_modules/sigstore/node_modules/@tufjs/models/dist/index.js
diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/key.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/key.js
similarity index 100%
rename from node_modules/pacote/node_modules/@tufjs/models/dist/key.js
rename to node_modules/sigstore/node_modules/@tufjs/models/dist/key.js
diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/metadata.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/metadata.js
similarity index 100%
rename from node_modules/pacote/node_modules/@tufjs/models/dist/metadata.js
rename to node_modules/sigstore/node_modules/@tufjs/models/dist/metadata.js
diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/role.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/role.js
similarity index 100%
rename from node_modules/pacote/node_modules/@tufjs/models/dist/role.js
rename to node_modules/sigstore/node_modules/@tufjs/models/dist/role.js
diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/root.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/root.js
similarity index 100%
rename from node_modules/pacote/node_modules/@tufjs/models/dist/root.js
rename to node_modules/sigstore/node_modules/@tufjs/models/dist/root.js
diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/signature.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/signature.js
similarity index 100%
rename from node_modules/pacote/node_modules/@tufjs/models/dist/signature.js
rename to node_modules/sigstore/node_modules/@tufjs/models/dist/signature.js
diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/snapshot.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/snapshot.js
similarity index 100%
rename from node_modules/pacote/node_modules/@tufjs/models/dist/snapshot.js
rename to node_modules/sigstore/node_modules/@tufjs/models/dist/snapshot.js
diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/targets.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/targets.js
similarity index 100%
rename from node_modules/pacote/node_modules/@tufjs/models/dist/targets.js
rename to node_modules/sigstore/node_modules/@tufjs/models/dist/targets.js
diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/timestamp.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/timestamp.js
similarity index 100%
rename from node_modules/pacote/node_modules/@tufjs/models/dist/timestamp.js
rename to node_modules/sigstore/node_modules/@tufjs/models/dist/timestamp.js
diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/utils/guard.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/guard.js
similarity index 100%
rename from node_modules/pacote/node_modules/@tufjs/models/dist/utils/guard.js
rename to node_modules/sigstore/node_modules/@tufjs/models/dist/utils/guard.js
diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/utils/index.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/index.js
similarity index 100%
rename from node_modules/pacote/node_modules/@tufjs/models/dist/utils/index.js
rename to node_modules/sigstore/node_modules/@tufjs/models/dist/utils/index.js
diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/utils/key.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/key.js
similarity index 100%
rename from node_modules/pacote/node_modules/@tufjs/models/dist/utils/key.js
rename to node_modules/sigstore/node_modules/@tufjs/models/dist/utils/key.js
diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/utils/oid.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/oid.js
similarity index 100%
rename from node_modules/pacote/node_modules/@tufjs/models/dist/utils/oid.js
rename to node_modules/sigstore/node_modules/@tufjs/models/dist/utils/oid.js
diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/utils/types.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/types.js
similarity index 100%
rename from node_modules/pacote/node_modules/@tufjs/models/dist/utils/types.js
rename to node_modules/sigstore/node_modules/@tufjs/models/dist/utils/types.js
diff --git a/node_modules/pacote/node_modules/@tufjs/models/dist/utils/verify.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/verify.js
similarity index 100%
rename from node_modules/pacote/node_modules/@tufjs/models/dist/utils/verify.js
rename to node_modules/sigstore/node_modules/@tufjs/models/dist/utils/verify.js
diff --git a/node_modules/pacote/node_modules/@tufjs/models/package.json b/node_modules/sigstore/node_modules/@tufjs/models/package.json
similarity index 100%
rename from node_modules/pacote/node_modules/@tufjs/models/package.json
rename to node_modules/sigstore/node_modules/@tufjs/models/package.json
diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/LICENSE b/node_modules/sigstore/node_modules/make-fetch-happen/LICENSE
new file mode 100644
index 0000000000000..1808eb2844231
--- /dev/null
+++ b/node_modules/sigstore/node_modules/make-fetch-happen/LICENSE
@@ -0,0 +1,16 @@
+ISC License
+
+Copyright 2017-2022 (c) npm, Inc.
+
+Permission to use, copy, modify, and/or distribute this software for
+any purpose with or without fee is hereby granted, provided that the
+above copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
+ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
+WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
+COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
+CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
+USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/entry.js b/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/entry.js
new file mode 100644
index 0000000000000..bfcfacbcc95e1
--- /dev/null
+++ b/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/entry.js
@@ -0,0 +1,471 @@
+const { Request, Response } = require('minipass-fetch')
+const { Minipass } = require('minipass')
+const MinipassFlush = require('minipass-flush')
+const cacache = require('cacache')
+const url = require('url')
+
+const CachingMinipassPipeline = require('../pipeline.js')
+const CachePolicy = require('./policy.js')
+const cacheKey = require('./key.js')
+const remote = require('../remote.js')
+
+const hasOwnProperty = (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop)
+
+// allow list for request headers that will be written to the cache index
+// note: we will also store any request headers
+// that are named in a response's vary header
+const KEEP_REQUEST_HEADERS = [
+  'accept-charset',
+  'accept-encoding',
+  'accept-language',
+  'accept',
+  'cache-control',
+]
+
+// allow list for response headers that will be written to the cache index
+// note: we must not store the real response's age header, or when we load
+// a cache policy based on the metadata it will think the cached response
+// is always stale
+const KEEP_RESPONSE_HEADERS = [
+  'cache-control',
+  'content-encoding',
+  'content-language',
+  'content-type',
+  'date',
+  'etag',
+  'expires',
+  'last-modified',
+  'link',
+  'location',
+  'pragma',
+  'vary',
+]
+
+// return an object containing all metadata to be written to the index
+const getMetadata = (request, response, options) => {
+  const metadata = {
+    time: Date.now(),
+    url: request.url,
+    reqHeaders: {},
+    resHeaders: {},
+
+    // options on which we must match the request and vary the response
+    options: {
+      compress: options.compress != null ? options.compress : request.compress,
+    },
+  }
+
+  // only save the status if it's not a 200 or 304
+  if (response.status !== 200 && response.status !== 304) {
+    metadata.status = response.status
+  }
+
+  for (const name of KEEP_REQUEST_HEADERS) {
+    if (request.headers.has(name)) {
+      metadata.reqHeaders[name] = request.headers.get(name)
+    }
+  }
+
+  // if the request's host header differs from the host in the url
+  // we need to keep it, otherwise it's just noise and we ignore it
+  const host = request.headers.get('host')
+  const parsedUrl = new url.URL(request.url)
+  if (host && parsedUrl.host !== host) {
+    metadata.reqHeaders.host = host
+  }
+
+  // if the response has a vary header, make sure
+  // we store the relevant request headers too
+  if (response.headers.has('vary')) {
+    const vary = response.headers.get('vary')
+    // a vary of "*" means every header causes a different response.
+    // in that scenario, we do not include any additional headers
+    // as the freshness check will always fail anyway and we don't
+    // want to bloat the cache indexes
+    if (vary !== '*') {
+      // copy any other request headers that will vary the response
+      const varyHeaders = vary.trim().toLowerCase().split(/\s*,\s*/)
+      for (const name of varyHeaders) {
+        if (request.headers.has(name)) {
+          metadata.reqHeaders[name] = request.headers.get(name)
+        }
+      }
+    }
+  }
+
+  for (const name of KEEP_RESPONSE_HEADERS) {
+    if (response.headers.has(name)) {
+      metadata.resHeaders[name] = response.headers.get(name)
+    }
+  }
+
+  for (const name of options.cacheAdditionalHeaders) {
+    if (response.headers.has(name)) {
+      metadata.resHeaders[name] = response.headers.get(name)
+    }
+  }
+
+  return metadata
+}
+
+// symbols used to hide objects that may be lazily evaluated in a getter
+const _request = Symbol('request')
+const _response = Symbol('response')
+const _policy = Symbol('policy')
+
+class CacheEntry {
+  constructor ({ entry, request, response, options }) {
+    if (entry) {
+      this.key = entry.key
+      this.entry = entry
+      // previous versions of this module didn't write an explicit timestamp in
+      // the metadata, so fall back to the entry's timestamp. we can't use the
+      // entry timestamp to determine staleness because cacache will update it
+      // when it verifies its data
+      this.entry.metadata.time = this.entry.metadata.time || this.entry.time
+    } else {
+      this.key = cacheKey(request)
+    }
+
+    this.options = options
+
+    // these properties are behind getters that lazily evaluate
+    this[_request] = request
+    this[_response] = response
+    this[_policy] = null
+  }
+
+  // returns a CacheEntry instance that satisfies the given request
+  // or undefined if no existing entry satisfies
+  static async find (request, options) {
+    try {
+      // compacts the index and returns an array of unique entries
+      var matches = await cacache.index.compact(options.cachePath, cacheKey(request), (A, B) => {
+        const entryA = new CacheEntry({ entry: A, options })
+        const entryB = new CacheEntry({ entry: B, options })
+        return entryA.policy.satisfies(entryB.request)
+      }, {
+        validateEntry: (entry) => {
+          // clean out entries with a buggy content-encoding value
+          if (entry.metadata &&
+              entry.metadata.resHeaders &&
+              entry.metadata.resHeaders['content-encoding'] === null) {
+            return false
+          }
+
+          // if an integrity is null, it needs to have a status specified
+          if (entry.integrity === null) {
+            return !!(entry.metadata && entry.metadata.status)
+          }
+
+          return true
+        },
+      })
+    } catch (err) {
+      // if the compact request fails, ignore the error and return
+      return
+    }
+
+    // a cache mode of 'reload' means to behave as though we have no cache
+    // on the way to the network. return undefined to allow cacheFetch to
+    // create a brand new request no matter what.
+    if (options.cache === 'reload') {
+      return
+    }
+
+    // find the specific entry that satisfies the request
+    let match
+    for (const entry of matches) {
+      const _entry = new CacheEntry({
+        entry,
+        options,
+      })
+
+      if (_entry.policy.satisfies(request)) {
+        match = _entry
+        break
+      }
+    }
+
+    return match
+  }
+
+  // if the user made a PUT/POST/PATCH then we invalidate our
+  // cache for the same url by deleting the index entirely
+  static async invalidate (request, options) {
+    const key = cacheKey(request)
+    try {
+      await cacache.rm.entry(options.cachePath, key, { removeFully: true })
+    } catch (err) {
+      // ignore errors
+    }
+  }
+
+  get request () {
+    if (!this[_request]) {
+      this[_request] = new Request(this.entry.metadata.url, {
+        method: 'GET',
+        headers: this.entry.metadata.reqHeaders,
+        ...this.entry.metadata.options,
+      })
+    }
+
+    return this[_request]
+  }
+
+  get response () {
+    if (!this[_response]) {
+      this[_response] = new Response(null, {
+        url: this.entry.metadata.url,
+        counter: this.options.counter,
+        status: this.entry.metadata.status || 200,
+        headers: {
+          ...this.entry.metadata.resHeaders,
+          'content-length': this.entry.size,
+        },
+      })
+    }
+
+    return this[_response]
+  }
+
+  get policy () {
+    if (!this[_policy]) {
+      this[_policy] = new CachePolicy({
+        entry: this.entry,
+        request: this.request,
+        response: this.response,
+        options: this.options,
+      })
+    }
+
+    return this[_policy]
+  }
+
+  // wraps the response in a pipeline that stores the data
+  // in the cache while the user consumes it
+  async store (status) {
+    // if we got a status other than 200, 301, or 308,
+    // or the CachePolicy forbid storage, append the
+    // cache status header and return it untouched
+    if (
+      this.request.method !== 'GET' ||
+      ![200, 301, 308].includes(this.response.status) ||
+      !this.policy.storable()
+    ) {
+      this.response.headers.set('x-local-cache-status', 'skip')
+      return this.response
+    }
+
+    const size = this.response.headers.get('content-length')
+    const cacheOpts = {
+      algorithms: this.options.algorithms,
+      metadata: getMetadata(this.request, this.response, this.options),
+      size,
+      integrity: this.options.integrity,
+      integrityEmitter: this.response.body.hasIntegrityEmitter && this.response.body,
+    }
+
+    let body = null
+    // we only set a body if the status is a 200, redirects are
+    // stored as metadata only
+    if (this.response.status === 200) {
+      let cacheWriteResolve, cacheWriteReject
+      const cacheWritePromise = new Promise((resolve, reject) => {
+        cacheWriteResolve = resolve
+        cacheWriteReject = reject
+      }).catch((err) => {
+        body.emit('error', err)
+      })
+
+      body = new CachingMinipassPipeline({ events: ['integrity', 'size'] }, new MinipassFlush({
+        flush () {
+          return cacheWritePromise
+        },
+      }))
+      // this is always true since if we aren't reusing the one from the remote fetch, we
+      // are using the one from cacache
+      body.hasIntegrityEmitter = true
+
+      const onResume = () => {
+        const tee = new Minipass()
+        const cacheStream = cacache.put.stream(this.options.cachePath, this.key, cacheOpts)
+        // re-emit the integrity and size events on our new response body so they can be reused
+        cacheStream.on('integrity', i => body.emit('integrity', i))
+        cacheStream.on('size', s => body.emit('size', s))
+        // stick a flag on here so downstream users will know if they can expect integrity events
+        tee.pipe(cacheStream)
+        // TODO if the cache write fails, log a warning but return the response anyway
+        // eslint-disable-next-line promise/catch-or-return
+        cacheStream.promise().then(cacheWriteResolve, cacheWriteReject)
+        body.unshift(tee)
+        body.unshift(this.response.body)
+      }
+
+      body.once('resume', onResume)
+      body.once('end', () => body.removeListener('resume', onResume))
+    } else {
+      await cacache.index.insert(this.options.cachePath, this.key, null, cacheOpts)
+    }
+
+    // note: we do not set the x-local-cache-hash header because we do not know
+    // the hash value until after the write to the cache completes, which doesn't
+    // happen until after the response has been sent and it's too late to write
+    // the header anyway
+    this.response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath))
+    this.response.headers.set('x-local-cache-key', encodeURIComponent(this.key))
+    this.response.headers.set('x-local-cache-mode', 'stream')
+    this.response.headers.set('x-local-cache-status', status)
+    this.response.headers.set('x-local-cache-time', new Date().toISOString())
+    const newResponse = new Response(body, {
+      url: this.response.url,
+      status: this.response.status,
+      headers: this.response.headers,
+      counter: this.options.counter,
+    })
+    return newResponse
+  }
+
+  // use the cached data to create a response and return it
+  async respond (method, options, status) {
+    let response
+    if (method === 'HEAD' || [301, 308].includes(this.response.status)) {
+      // if the request is a HEAD, or the response is a redirect,
+      // then the metadata in the entry already includes everything
+      // we need to build a response
+      response = this.response
+    } else {
+      // we're responding with a full cached response, so create a body
+      // that reads from cacache and attach it to a new Response
+      const body = new Minipass()
+      const headers = { ...this.policy.responseHeaders() }
+
+      const onResume = () => {
+        const cacheStream = cacache.get.stream.byDigest(
+          this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }
+        )
+        cacheStream.on('error', async (err) => {
+          cacheStream.pause()
+          if (err.code === 'EINTEGRITY') {
+            await cacache.rm.content(
+              this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }
+            )
+          }
+          if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') {
+            await CacheEntry.invalidate(this.request, this.options)
+          }
+          body.emit('error', err)
+          cacheStream.resume()
+        })
+        // emit the integrity and size events based on our metadata so we're consistent
+        body.emit('integrity', this.entry.integrity)
+        body.emit('size', Number(headers['content-length']))
+        cacheStream.pipe(body)
+      }
+
+      body.once('resume', onResume)
+      body.once('end', () => body.removeListener('resume', onResume))
+      response = new Response(body, {
+        url: this.entry.metadata.url,
+        counter: options.counter,
+        status: 200,
+        headers,
+      })
+    }
+
+    response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath))
+    response.headers.set('x-local-cache-hash', encodeURIComponent(this.entry.integrity))
+    response.headers.set('x-local-cache-key', encodeURIComponent(this.key))
+    response.headers.set('x-local-cache-mode', 'stream')
+    response.headers.set('x-local-cache-status', status)
+    response.headers.set('x-local-cache-time', new Date(this.entry.metadata.time).toUTCString())
+    return response
+  }
+
+  // use the provided request along with this cache entry to
+  // revalidate the stored response. returns a response, either
+  // from the cache or from the update
+  async revalidate (request, options) {
+    const revalidateRequest = new Request(request, {
+      headers: this.policy.revalidationHeaders(request),
+    })
+
+    try {
+      // NOTE: be sure to remove the headers property from the
+      // user supplied options, since we have already defined
+      // them on the new request object. if they're still in the
+      // options then those will overwrite the ones from the policy
+      var response = await remote(revalidateRequest, {
+        ...options,
+        headers: undefined,
+      })
+    } catch (err) {
+      // if the network fetch fails, return the stale
+      // cached response unless it has a cache-control
+      // of 'must-revalidate'
+      if (!this.policy.mustRevalidate) {
+        return this.respond(request.method, options, 'stale')
+      }
+
+      throw err
+    }
+
+    if (this.policy.revalidated(revalidateRequest, response)) {
+      // we got a 304, write a new index to the cache and respond from cache
+      const metadata = getMetadata(request, response, options)
+      // 304 responses do not include headers that are specific to the response data
+      // since they do not include a body, so we copy values for headers that were
+      // in the old cache entry to the new one, if the new metadata does not already
+      // include that header
+      for (const name of KEEP_RESPONSE_HEADERS) {
+        if (
+          !hasOwnProperty(metadata.resHeaders, name) &&
+          hasOwnProperty(this.entry.metadata.resHeaders, name)
+        ) {
+          metadata.resHeaders[name] = this.entry.metadata.resHeaders[name]
+        }
+      }
+
+      for (const name of options.cacheAdditionalHeaders) {
+        const inMeta = hasOwnProperty(metadata.resHeaders, name)
+        const inEntry = hasOwnProperty(this.entry.metadata.resHeaders, name)
+        const inPolicy = hasOwnProperty(this.policy.response.headers, name)
+
+        // if the header is in the existing entry, but it is not in the metadata
+        // then we need to write it to the metadata as this will refresh the on-disk cache
+        if (!inMeta && inEntry) {
+          metadata.resHeaders[name] = this.entry.metadata.resHeaders[name]
+        }
+        // if the header is in the metadata, but not in the policy, then we need to set
+        // it in the policy so that it's included in the immediate response. future
+        // responses will load a new cache entry, so we don't need to change that
+        if (!inPolicy && inMeta) {
+          this.policy.response.headers[name] = metadata.resHeaders[name]
+        }
+      }
+
+      try {
+        await cacache.index.insert(options.cachePath, this.key, this.entry.integrity, {
+          size: this.entry.size,
+          metadata,
+        })
+      } catch (err) {
+        // if updating the cache index fails, we ignore it and
+        // respond anyway
+      }
+      return this.respond(request.method, options, 'revalidated')
+    }
+
+    // if we got a modified response, create a new entry based on it
+    const newEntry = new CacheEntry({
+      request,
+      response,
+      options,
+    })
+
+    // respond with the new entry while writing it to the cache
+    return newEntry.store('updated')
+  }
+}
+
+module.exports = CacheEntry
diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/errors.js b/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/errors.js
new file mode 100644
index 0000000000000..67a66573bebe6
--- /dev/null
+++ b/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/errors.js
@@ -0,0 +1,11 @@
+class NotCachedError extends Error {
+  constructor (url) {
+    /* eslint-disable-next-line max-len */
+    super(`request to ${url} failed: cache mode is 'only-if-cached' but no cached response is available.`)
+    this.code = 'ENOTCACHED'
+  }
+}
+
+module.exports = {
+  NotCachedError,
+}
diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/index.js b/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/index.js
new file mode 100644
index 0000000000000..0de49d23fb933
--- /dev/null
+++ b/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/index.js
@@ -0,0 +1,49 @@
+const { NotCachedError } = require('./errors.js')
+const CacheEntry = require('./entry.js')
+const remote = require('../remote.js')
+
+// do whatever is necessary to get a Response and return it
+const cacheFetch = async (request, options) => {
+  // try to find a cached entry that satisfies this request
+  const entry = await CacheEntry.find(request, options)
+  if (!entry) {
+    // no cached result, if the cache mode is 'only-if-cached' that's a failure
+    if (options.cache === 'only-if-cached') {
+      throw new NotCachedError(request.url)
+    }
+
+    // otherwise, we make a request, store it and return it
+    const response = await remote(request, options)
+    const newEntry = new CacheEntry({ request, response, options })
+    return newEntry.store('miss')
+  }
+
+  // we have a cached response that satisfies this request, however if the cache
+  // mode is 'no-cache' then we send the revalidation request no matter what
+  if (options.cache === 'no-cache') {
+    return entry.revalidate(request, options)
+  }
+
+  // if the cached entry is not stale, or if the cache mode is 'force-cache' or
+  // 'only-if-cached' we can respond with the cached entry. set the status
+  // based on the result of needsRevalidation and respond
+  const _needsRevalidation = entry.policy.needsRevalidation(request)
+  if (options.cache === 'force-cache' ||
+      options.cache === 'only-if-cached' ||
+      !_needsRevalidation) {
+    return entry.respond(request.method, options, _needsRevalidation ? 'stale' : 'hit')
+  }
+
+  // if we got here, the cache entry is stale so revalidate it
+  return entry.revalidate(request, options)
+}
+
+cacheFetch.invalidate = async (request, options) => {
+  if (!options.cachePath) {
+    return
+  }
+
+  return CacheEntry.invalidate(request, options)
+}
+
+module.exports = cacheFetch
diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/key.js b/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/key.js
new file mode 100644
index 0000000000000..f7684d562b7fa
--- /dev/null
+++ b/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/key.js
@@ -0,0 +1,17 @@
+const { URL, format } = require('url')
+
+// options passed to url.format() when generating a key
+const formatOptions = {
+  auth: false,
+  fragment: false,
+  search: true,
+  unicode: false,
+}
+
+// returns a string to be used as the cache key for the Request
+const cacheKey = (request) => {
+  const parsed = new URL(request.url)
+  return `make-fetch-happen:request-cache:${format(parsed, formatOptions)}`
+}
+
+module.exports = cacheKey
diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/policy.js b/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/policy.js
new file mode 100644
index 0000000000000..ada3c8600dae9
--- /dev/null
+++ b/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/policy.js
@@ -0,0 +1,161 @@
+const CacheSemantics = require('http-cache-semantics')
+const Negotiator = require('negotiator')
+const ssri = require('ssri')
+
+// options passed to http-cache-semantics constructor
+const policyOptions = {
+  shared: false,
+  ignoreCargoCult: true,
+}
+
+// a fake empty response, used when only testing the
+// request for storability
+const emptyResponse = { status: 200, headers: {} }
+
+// returns a plain object representation of the Request
+const requestObject = (request) => {
+  const _obj = {
+    method: request.method,
+    url: request.url,
+    headers: {},
+    compress: request.compress,
+  }
+
+  request.headers.forEach((value, key) => {
+    _obj.headers[key] = value
+  })
+
+  return _obj
+}
+
+// returns a plain object representation of the Response
+const responseObject = (response) => {
+  const _obj = {
+    status: response.status,
+    headers: {},
+  }
+
+  response.headers.forEach((value, key) => {
+    _obj.headers[key] = value
+  })
+
+  return _obj
+}
+
+class CachePolicy {
+  constructor ({ entry, request, response, options }) {
+    this.entry = entry
+    this.request = requestObject(request)
+    this.response = responseObject(response)
+    this.options = options
+    this.policy = new CacheSemantics(this.request, this.response, policyOptions)
+
+    if (this.entry) {
+      // if we have an entry, copy the timestamp to the _responseTime
+      // this is necessary because the CacheSemantics constructor forces
+      // the value to Date.now() which means a policy created from a
+      // cache entry is likely to always identify itself as stale
+      this.policy._responseTime = this.entry.metadata.time
+    }
+  }
+
+  // static method to quickly determine if a request alone is storable
+  static storable (request, options) {
+    // no cachePath means no caching
+    if (!options.cachePath) {
+      return false
+    }
+
+    // user explicitly asked not to cache
+    if (options.cache === 'no-store') {
+      return false
+    }
+
+    // we only cache GET and HEAD requests
+    if (!['GET', 'HEAD'].includes(request.method)) {
+      return false
+    }
+
+    // otherwise, let http-cache-semantics make the decision
+    // based on the request's headers
+    const policy = new CacheSemantics(requestObject(request), emptyResponse, policyOptions)
+    return policy.storable()
+  }
+
+  // returns true if the policy satisfies the request
+  satisfies (request) {
+    const _req = requestObject(request)
+    if (this.request.headers.host !== _req.headers.host) {
+      return false
+    }
+
+    if (this.request.compress !== _req.compress) {
+      return false
+    }
+
+    const negotiatorA = new Negotiator(this.request)
+    const negotiatorB = new Negotiator(_req)
+
+    if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes())) {
+      return false
+    }
+
+    if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages())) {
+      return false
+    }
+
+    if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings())) {
+      return false
+    }
+
+    if (this.options.integrity) {
+      return ssri.parse(this.options.integrity).match(this.entry.integrity)
+    }
+
+    return true
+  }
+
+  // returns true if the request and response allow caching
+  storable () {
+    return this.policy.storable()
+  }
+
+  // NOTE: this is a hack to avoid parsing the cache-control
+  // header ourselves, it returns true if the response's
+  // cache-control contains must-revalidate
+  get mustRevalidate () {
+    return !!this.policy._rescc['must-revalidate']
+  }
+
+  // returns true if the cached response requires revalidation
+  // for the given request
+  needsRevalidation (request) {
+    const _req = requestObject(request)
+    // force method to GET because we only cache GETs
+    // but can serve a HEAD from a cached GET
+    _req.method = 'GET'
+    return !this.policy.satisfiesWithoutRevalidation(_req)
+  }
+
+  responseHeaders () {
+    return this.policy.responseHeaders()
+  }
+
+  // returns a new object containing the appropriate headers
+  // to send a revalidation request
+  revalidationHeaders (request) {
+    const _req = requestObject(request)
+    return this.policy.revalidationHeaders(_req)
+  }
+
+  // returns true if the request/response was revalidated
+  // successfully. returns false if a new response was received
+  revalidated (request, response) {
+    const _req = requestObject(request)
+    const _res = responseObject(response)
+    const policy = this.policy.revalidatedPolicy(_req, _res)
+    return !policy.modified
+  }
+}
+
+module.exports = CachePolicy
diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/fetch.js b/node_modules/sigstore/node_modules/make-fetch-happen/lib/fetch.js
new file mode 100644
index 0000000000000..233ba67e16550
--- /dev/null
+++ b/node_modules/sigstore/node_modules/make-fetch-happen/lib/fetch.js
@@ -0,0 +1,118 @@
+'use strict'
+
+const { FetchError, Request, isRedirect } = require('minipass-fetch')
+const url = require('url')
+
+const CachePolicy = require('./cache/policy.js')
+const cache = require('./cache/index.js')
+const remote = require('./remote.js')
+
+// given a Request, a Response and user options
+// return true if the response is a redirect that
+// can be followed. we throw errors that will result
+// in the fetch being rejected if the redirect is
+// possible but invalid for some reason
+const canFollowRedirect = (request, response, options) => {
+  if (!isRedirect(response.status)) {
+    return false
+  }
+
+  if (options.redirect === 'manual') {
+    return false
+  }
+
+  if (options.redirect === 'error') {
+    throw new FetchError(`redirect mode is set to error: ${request.url}`,
+      'no-redirect', { code: 'ENOREDIRECT' })
+  }
+
+  if (!response.headers.has('location')) {
+    throw new FetchError(`redirect location header missing for: ${request.url}`,
+      'no-location', { code: 'EINVALIDREDIRECT' })
+  }
+
+  if (request.counter >= request.follow) {
+    throw new FetchError(`maximum redirect reached at: ${request.url}`,
+      'max-redirect', { code: 'EMAXREDIRECT' })
+  }
+
+  return true
+}
+
+// given a Request, a Response, and the user's options return an object
+// with a new Request and a new options object that will be used for
+// following the redirect
+const getRedirect = (request, response, options) => {
+  const _opts = { ...options }
+  const location = response.headers.get('location')
+  const redirectUrl = new url.URL(location, /^https?:/.test(location) ? undefined : request.url)
+  // Comment below is used under the following license:
+  /**
+   * @license
+   * Copyright (c) 2010-2012 Mikeal Rogers
+   * Licensed under the Apache License, Version 2.0 (the "License");
+   * you may not use this file except in compliance with the License.
+   * You may obtain a copy of the License at
+   * http://www.apache.org/licenses/LICENSE-2.0
+   * Unless required by applicable law or agreed to in writing,
+   * software distributed under the License is distributed on an "AS
+   * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
+   * express or implied. See the License for the specific language
+   * governing permissions and limitations under the License.
+   */
+
+  // Remove authorization if changing hostnames (but not if just
+  // changing ports or protocols).  This matches the behavior of request:
+  // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138
+  if (new url.URL(request.url).hostname !== redirectUrl.hostname) {
+    request.headers.delete('authorization')
+    request.headers.delete('cookie')
+  }
+
+  // for POST request with 301/302 response, or any request with 303 response,
+  // use GET when following redirect
+  if (
+    response.status === 303 ||
+    (request.method === 'POST' && [301, 302].includes(response.status))
+  ) {
+    _opts.method = 'GET'
+    _opts.body = null
+    request.headers.delete('content-length')
+  }
+
+  _opts.headers = {}
+  request.headers.forEach((value, key) => {
+    _opts.headers[key] = value
+  })
+
+  _opts.counter = ++request.counter
+  const redirectReq = new Request(url.format(redirectUrl), _opts)
+  return {
+    request: redirectReq,
+    options: _opts,
+  }
+}
+
+const fetch = async (request, options) => {
+  const response = CachePolicy.storable(request, options)
+    ? await cache(request, options)
+    : await remote(request, options)
+
+  // if the request wasn't a GET or HEAD, and the response
+  // status is between 200 and 399 inclusive, invalidate the
+  // request url
+  if (!['GET', 'HEAD'].includes(request.method) &&
+      response.status >= 200 &&
+      response.status <= 399) {
+    await cache.invalidate(request, options)
+  }
+
+  if (!canFollowRedirect(request, response, options)) {
+    return response
+  }
+
+  const redirect = getRedirect(request, response, options)
+  return fetch(redirect.request, redirect.options)
+}
+
+module.exports = fetch
diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/index.js b/node_modules/sigstore/node_modules/make-fetch-happen/lib/index.js
new file mode 100644
index 0000000000000..2f12e8e1b6113
--- /dev/null
+++ b/node_modules/sigstore/node_modules/make-fetch-happen/lib/index.js
@@ -0,0 +1,41 @@
+const { FetchError, Headers, Request, Response } = require('minipass-fetch')
+
+const configureOptions = require('./options.js')
+const fetch = require('./fetch.js')
+
+const makeFetchHappen = (url, opts) => {
+  const options = configureOptions(opts)
+
+  const request = new Request(url, options)
+  return fetch(request, options)
+}
+
+makeFetchHappen.defaults = (defaultUrl, defaultOptions = {}, wrappedFetch = makeFetchHappen) => {
+  if (typeof defaultUrl === 'object') {
+    defaultOptions = defaultUrl
+    defaultUrl = null
+  }
+
+  const defaultedFetch = (url, options = {}) => {
+    const finalUrl = url || defaultUrl
+    const finalOptions = {
+      ...defaultOptions,
+      ...options,
+      headers: {
+        ...defaultOptions.headers,
+        ...options.headers,
+      },
+    }
+    return wrappedFetch(finalUrl, finalOptions)
+  }
+
+  defaultedFetch.defaults = (defaultUrl1, defaultOptions1 = {}) =>
+    makeFetchHappen.defaults(defaultUrl1, defaultOptions1, defaultedFetch)
+  return defaultedFetch
+}
+
+module.exports = makeFetchHappen
+module.exports.FetchError = FetchError
+module.exports.Headers = Headers
+module.exports.Request = Request
+module.exports.Response = Response
diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/options.js b/node_modules/sigstore/node_modules/make-fetch-happen/lib/options.js
new file mode 100644
index 0000000000000..db51cc6324817
--- /dev/null
+++ b/node_modules/sigstore/node_modules/make-fetch-happen/lib/options.js
@@ -0,0 +1,59 @@
+const dns = require('dns')
+
+const conditionalHeaders = [
+  'if-modified-since',
+  'if-none-match',
+  'if-unmodified-since',
+  'if-match',
+  'if-range',
+]
+
+const configureOptions = (opts) => {
+  const { strictSSL, ...options } = { ...opts }
+  options.method = options.method ? options.method.toUpperCase() : 'GET'
+
+  if (strictSSL === undefined || strictSSL === null) {
+    options.rejectUnauthorized = process.env.NODE_TLS_REJECT_UNAUTHORIZED !== '0'
+  } else {
+    options.rejectUnauthorized = strictSSL !== false
+  }
+
+  if (!options.retry) {
+    options.retry = { retries: 0 }
+  } else if (typeof options.retry === 'string') {
+    const retries = parseInt(options.retry, 10)
+    if (isFinite(retries)) {
+      options.retry = { retries }
+    } else {
+      options.retry = { retries: 0 }
+    }
+  } else if (typeof options.retry === 'number') {
+    options.retry = { retries: options.retry }
+  } else {
+    options.retry = { retries: 0, ...options.retry }
+  }
+
+  options.dns = { ttl: 5 * 60 * 1000, lookup: dns.lookup, ...options.dns }
+
+  options.cache = options.cache || 'default'
+  if (options.cache === 'default') {
+    const hasConditionalHeader = Object.keys(options.headers || {}).some((name) => {
+      return conditionalHeaders.includes(name.toLowerCase())
+    })
+    if (hasConditionalHeader) {
+      options.cache = 'no-store'
+    }
+  }
+
+  options.cacheAdditionalHeaders = options.cacheAdditionalHeaders || []
+
+  // cacheManager is deprecated, but if it's set and
+  // cachePath is not we should copy it to the new field
+  if (options.cacheManager && !options.cachePath) {
+    options.cachePath = options.cacheManager
+  }
+
+  return options
+}
+
+module.exports = configureOptions
diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/pipeline.js b/node_modules/sigstore/node_modules/make-fetch-happen/lib/pipeline.js
new file mode 100644
index 0000000000000..b1d221b2d0ce3
--- /dev/null
+++ b/node_modules/sigstore/node_modules/make-fetch-happen/lib/pipeline.js
@@ -0,0 +1,41 @@
+'use strict'
+
+const MinipassPipeline = require('minipass-pipeline')
+
+class CachingMinipassPipeline extends MinipassPipeline {
+  #events = []
+  #data = new Map()
+
+  constructor (opts, ...streams) {
+    // CRITICAL: do NOT pass the streams to the call to super(), this will start
+    // the flow of data and potentially cause the events we need to catch to emit
+    // before we've finished our own setup. instead we call super() with no args,
+    // finish our setup, and then push the streams into ourselves to start the
+    // data flow
+    super()
+    this.#events = opts.events
+
+    /* istanbul ignore next - coverage disabled because this is pointless to test here */
+    if (streams.length) {
+      this.push(...streams)
+    }
+  }
+
+  on (event, handler) {
+    if (this.#events.includes(event) && this.#data.has(event)) {
+      return handler(...this.#data.get(event))
+    }
+
+    return super.on(event, handler)
+  }
+
+  emit (event, ...data) {
+    if (this.#events.includes(event)) {
+      this.#data.set(event, data)
+    }
+
+    return super.emit(event, ...data)
+  }
+}
+
+module.exports = CachingMinipassPipeline
diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/remote.js b/node_modules/sigstore/node_modules/make-fetch-happen/lib/remote.js
new file mode 100644
index 0000000000000..1d640e5380baa
--- /dev/null
+++ b/node_modules/sigstore/node_modules/make-fetch-happen/lib/remote.js
@@ -0,0 +1,132 @@
+const { Minipass } = require('minipass')
+const fetch = require('minipass-fetch')
+const promiseRetry = require('promise-retry')
+const ssri = require('ssri')
+const { log } = require('proc-log')
+
+const CachingMinipassPipeline = require('./pipeline.js')
+const { getAgent } = require('@npmcli/agent')
+const pkg = require('../package.json')
+
+const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})`
+
+const RETRY_ERRORS = [
+  'ECONNRESET', // remote socket closed on us
+  'ECONNREFUSED', // remote host refused to open connection
+  'EADDRINUSE', // failed to bind to a local port (proxy?)
+  'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW
+  // from @npmcli/agent
+  'ECONNECTIONTIMEOUT',
+  'EIDLETIMEOUT',
+  'ERESPONSETIMEOUT',
+  'ETRANSFERTIMEOUT',
+  // Known codes we do NOT retry on:
+  // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline)
+  // EINVALIDPROXY // invalid protocol from @npmcli/agent
+  // EINVALIDRESPONSE // invalid status code from @npmcli/agent
+]
+
+const RETRY_TYPES = [
+  'request-timeout',
+]
+
+// make a request directly to the remote source,
+// retrying certain classes of errors as well as
+// following redirects (through the cache if necessary)
+// and verifying response integrity
+const remoteFetch = (request, options) => {
+  // options.signal is intended for the fetch itself, not the agent.  Attaching it to the agent will re-use that signal across multiple requests, which prevents any connections beyond the first one.
+  const agent = getAgent(request.url, { ...options, signal: undefined })
+  if (!request.headers.has('connection')) {
+    request.headers.set('connection', agent ? 'keep-alive' : 'close')
+  }
+
+  if (!request.headers.has('user-agent')) {
+    request.headers.set('user-agent', USER_AGENT)
+  }
+
+  // keep our own options since we're overriding the agent
+  // and the redirect mode
+  const _opts = {
+    ...options,
+    agent,
+    redirect: 'manual',
+  }
+
+  return promiseRetry(async (retryHandler, attemptNum) => {
+    const req = new fetch.Request(request, _opts)
+    try {
+      let res = await fetch(req, _opts)
+      if (_opts.integrity && res.status === 200) {
+        // we got a 200 response and the user has specified an expected
+        // integrity value, so wrap the response in an ssri stream to verify it
+        const integrityStream = ssri.integrityStream({
+          algorithms: _opts.algorithms,
+          integrity: _opts.integrity,
+          size: _opts.size,
+        })
+        const pipeline = new CachingMinipassPipeline({
+          events: ['integrity', 'size'],
+        }, res.body, integrityStream)
+        // we also propagate the integrity and size events out to the pipeline so we can use
+        // this new response body as an integrityEmitter for cacache
+        integrityStream.on('integrity', i => pipeline.emit('integrity', i))
+        integrityStream.on('size', s => pipeline.emit('size', s))
+        res = new fetch.Response(pipeline, res)
+        // set an explicit flag so we know if our response body will emit integrity and size
+        res.body.hasIntegrityEmitter = true
+      }
+
+      res.headers.set('x-fetch-attempts', attemptNum)
+
+      // do not retry POST requests, or requests with a streaming body
+      // do retry requests with a 408, 420, 429 or 500+ status in the response
+      const isStream = Minipass.isStream(req.body)
+      const isRetriable = req.method !== 'POST' &&
+          !isStream &&
+          ([408, 420, 429].includes(res.status) || res.status >= 500)
+
+      if (isRetriable) {
+        if (typeof options.onRetry === 'function') {
+          options.onRetry(res)
+        }
+
+        /* eslint-disable-next-line max-len */
+        log.http('fetch', `${req.method} ${req.url} attempt ${attemptNum} failed with ${res.status}`)
+        return retryHandler(res)
+      }
+
+      return res
+    } catch (err) {
+      const code = (err.code === 'EPROMISERETRY')
+        ? err.retried.code
+        : err.code
+
+      // err.retried will be the thing that was thrown from above
+      // if it's a response, we just got a bad status code and we
+      // can re-throw to allow the retry
+      const isRetryError = err.retried instanceof fetch.Response ||
+        (RETRY_ERRORS.includes(code) && RETRY_TYPES.includes(err.type))
+
+      if (req.method === 'POST' || isRetryError) {
+        throw err
+      }
+
+      if (typeof options.onRetry === 'function') {
+        options.onRetry(err)
+      }
+
+      log.http('fetch', `${req.method} ${req.url} attempt ${attemptNum} failed with ${err.code}`)
+      return retryHandler(err)
+    }
+  }, options.retry).catch((err) => {
+    // don't reject for http errors, just return them
+    if (err.status >= 400 && err.type !== 'system') {
+      return err
+    }
+
+    throw err
+  })
+}
+
+module.exports = remoteFetch
diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/package.json b/node_modules/sigstore/node_modules/make-fetch-happen/package.json
new file mode 100644
index 0000000000000..1e27d4ee8a70e
--- /dev/null
+++ b/node_modules/sigstore/node_modules/make-fetch-happen/package.json
@@ -0,0 +1,74 @@
+{
+  "name": "make-fetch-happen",
+  "version": "15.0.1",
+  "description": "Opinionated, caching, retrying fetch client",
+  "main": "lib/index.js",
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "scripts": {
+    "test": "tap",
+    "posttest": "npm run lint",
+    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
+    "lint": "npm run eslint",
+    "lintfix": "npm run eslint -- --fix",
+    "postlint": "template-oss-check",
+    "snap": "tap",
+    "template-oss-apply": "template-oss-apply --force"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/npm/make-fetch-happen.git"
+  },
+  "keywords": [
+    "http",
+    "request",
+    "fetch",
+    "mean girls",
+    "caching",
+    "cache",
+    "subresource integrity"
+  ],
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "dependencies": {
+    "@npmcli/agent": "^3.0.0",
+    "cacache": "^20.0.1",
+    "http-cache-semantics": "^4.1.1",
+    "minipass": "^7.0.2",
+    "minipass-fetch": "^4.0.0",
+    "minipass-flush": "^1.0.5",
+    "minipass-pipeline": "^1.2.4",
+    "negotiator": "^1.0.0",
+    "proc-log": "^5.0.0",
+    "promise-retry": "^2.0.1",
+    "ssri": "^12.0.0"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^5.0.0",
+    "@npmcli/template-oss": "4.25.0",
+    "nock": "^13.2.4",
+    "safe-buffer": "^5.2.1",
+    "standard-version": "^9.3.2",
+    "tap": "^16.0.0"
+  },
+  "engines": {
+    "node": "^20.17.0 || >=22.9.0"
+  },
+  "tap": {
+    "color": 1,
+    "files": "test/*.js",
+    "check-coverage": true,
+    "timeout": 60,
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.25.0",
+    "publish": "true"
+  }
+}
diff --git a/node_modules/sigstore/node_modules/negotiator/HISTORY.md b/node_modules/sigstore/node_modules/negotiator/HISTORY.md
new file mode 100644
index 0000000000000..63d537d3f6811
--- /dev/null
+++ b/node_modules/sigstore/node_modules/negotiator/HISTORY.md
@@ -0,0 +1,114 @@
+1.0.0 / 2024-08-31
+==================
+
+  * Drop support for node <18
+  * Added an option preferred encodings array #59
+
+0.6.3 / 2022-01-22
+==================
+
+  * Revert "Lazy-load modules from main entry point"
+
+0.6.2 / 2019-04-29
+==================
+
+  * Fix sorting charset, encoding, and language with extra parameters
+
+0.6.1 / 2016-05-02
+==================
+
+  * perf: improve `Accept` parsing speed
+  * perf: improve `Accept-Charset` parsing speed
+  * perf: improve `Accept-Encoding` parsing speed
+  * perf: improve `Accept-Language` parsing speed
+
+0.6.0 / 2015-09-29
+==================
+
+  * Fix including type extensions in parameters in `Accept` parsing
+  * Fix parsing `Accept` parameters with quoted equals
+  * Fix parsing `Accept` parameters with quoted semicolons
+  * Lazy-load modules from main entry point
+  * perf: delay type concatenation until needed
+  * perf: enable strict mode
+  * perf: hoist regular expressions
+  * perf: remove closures getting spec properties
+  * perf: remove a closure from media type parsing
+  * perf: remove property delete from media type parsing
+
+0.5.3 / 2015-05-10
+==================
+
+  * Fix media type parameter matching to be case-insensitive
+
+0.5.2 / 2015-05-06
+==================
+
+  * Fix comparing media types with quoted values
+  * Fix splitting media types with quoted commas
+
+0.5.1 / 2015-02-14
+==================
+
+  * Fix preference sorting to be stable for long acceptable lists
+
+0.5.0 / 2014-12-18
+==================
+
+  * Fix list return order when large accepted list
+  * Fix missing identity encoding when q=0 exists
+  * Remove dynamic building of Negotiator class
+
+0.4.9 / 2014-10-14
+==================
+
+  * Fix error when media type has invalid parameter
+
+0.4.8 / 2014-09-28
+==================
+
+  * Fix all negotiations to be case-insensitive
+  * Stable sort preferences of same quality according to client order
+  * Support Node.js 0.6
+
+0.4.7 / 2014-06-24
+==================
+
+  * Handle invalid provided languages
+  * Handle invalid provided media types
+
+0.4.6 / 2014-06-11
+==================
+
+  *  Order by specificity when quality is the same
+
+0.4.5 / 2014-05-29
+==================
+
+  * Fix regression in empty header handling
+
+0.4.4 / 2014-05-29
+==================
+
+  * Fix behaviors when headers are not present
+
+0.4.3 / 2014-04-16
+==================
+
+  * Handle slashes on media params correctly
+
+0.4.2 / 2014-02-28
+==================
+
+  * Fix media type sorting
+  * Handle media types params strictly
+
+0.4.1 / 2014-01-16
+==================
+
+  * Use most specific matches
+
+0.4.0 / 2014-01-09
+==================
+
+  * Remove preferred prefix from methods
diff --git a/node_modules/sigstore/node_modules/negotiator/LICENSE b/node_modules/sigstore/node_modules/negotiator/LICENSE
new file mode 100644
index 0000000000000..ea6b9e2e9ac25
--- /dev/null
+++ b/node_modules/sigstore/node_modules/negotiator/LICENSE
@@ -0,0 +1,24 @@
+(The MIT License)
+
+Copyright (c) 2012-2014 Federico Romero
+Copyright (c) 2012-2014 Isaac Z. Schlueter
+Copyright (c) 2014-2015 Douglas Christopher Wilson
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+'Software'), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/sigstore/node_modules/negotiator/index.js b/node_modules/sigstore/node_modules/negotiator/index.js
new file mode 100644
index 0000000000000..4f51315d6af4b
--- /dev/null
+++ b/node_modules/sigstore/node_modules/negotiator/index.js
@@ -0,0 +1,83 @@
+/*!
+ * negotiator
+ * Copyright(c) 2012 Federico Romero
+ * Copyright(c) 2012-2014 Isaac Z. Schlueter
+ * Copyright(c) 2015 Douglas Christopher Wilson
+ * MIT Licensed
+ */
+
+'use strict';
+
+var preferredCharsets = require('./lib/charset')
+var preferredEncodings = require('./lib/encoding')
+var preferredLanguages = require('./lib/language')
+var preferredMediaTypes = require('./lib/mediaType')
+
+/**
+ * Module exports.
+ * @public
+ */
+
+module.exports = Negotiator;
+module.exports.Negotiator = Negotiator;
+
+/**
+ * Create a Negotiator instance from a request.
+ * @param {object} request
+ * @public
+ */
+
+function Negotiator(request) {
+  if (!(this instanceof Negotiator)) {
+    return new Negotiator(request);
+  }
+
+  this.request = request;
+}
+
+Negotiator.prototype.charset = function charset(available) {
+  var set = this.charsets(available);
+  return set && set[0];
+};
+
+Negotiator.prototype.charsets = function charsets(available) {
+  return preferredCharsets(this.request.headers['accept-charset'], available);
+};
+
+Negotiator.prototype.encoding = function encoding(available, opts) {
+  var set = this.encodings(available, opts);
+  return set && set[0];
+};
+
+Negotiator.prototype.encodings = function encodings(available, options) {
+  var opts = options || {};
+  return preferredEncodings(this.request.headers['accept-encoding'], available, opts.preferred);
+};
+
+Negotiator.prototype.language = function language(available) {
+  var set = this.languages(available);
+  return set && set[0];
+};
+
+Negotiator.prototype.languages = function languages(available) {
+  return preferredLanguages(this.request.headers['accept-language'], available);
+};
+
+Negotiator.prototype.mediaType = function mediaType(available) {
+  var set = this.mediaTypes(available);
+  return set && set[0];
+};
+
+Negotiator.prototype.mediaTypes = function mediaTypes(available) {
+  return preferredMediaTypes(this.request.headers.accept, available);
+};
+
+// Backwards compatibility
+Negotiator.prototype.preferredCharset = Negotiator.prototype.charset;
+Negotiator.prototype.preferredCharsets = Negotiator.prototype.charsets;
+Negotiator.prototype.preferredEncoding = Negotiator.prototype.encoding;
+Negotiator.prototype.preferredEncodings = Negotiator.prototype.encodings;
+Negotiator.prototype.preferredLanguage = Negotiator.prototype.language;
+Negotiator.prototype.preferredLanguages = Negotiator.prototype.languages;
+Negotiator.prototype.preferredMediaType = Negotiator.prototype.mediaType;
+Negotiator.prototype.preferredMediaTypes = Negotiator.prototype.mediaTypes;
diff --git a/node_modules/sigstore/node_modules/negotiator/lib/charset.js b/node_modules/sigstore/node_modules/negotiator/lib/charset.js
new file mode 100644
index 0000000000000..cdd014803474a
--- /dev/null
+++ b/node_modules/sigstore/node_modules/negotiator/lib/charset.js
@@ -0,0 +1,169 @@
+/**
+ * negotiator
+ * Copyright(c) 2012 Isaac Z. Schlueter
+ * Copyright(c) 2014 Federico Romero
+ * Copyright(c) 2014-2015 Douglas Christopher Wilson
+ * MIT Licensed
+ */
+
+'use strict';
+
+/**
+ * Module exports.
+ * @public
+ */
+
+module.exports = preferredCharsets;
+module.exports.preferredCharsets = preferredCharsets;
+
+/**
+ * Module variables.
+ * @private
+ */
+
+var simpleCharsetRegExp = /^\s*([^\s;]+)\s*(?:;(.*))?$/;
+
+/**
+ * Parse the Accept-Charset header.
+ * @private
+ */
+
+function parseAcceptCharset(accept) {
+  var accepts = accept.split(',');
+
+  for (var i = 0, j = 0; i < accepts.length; i++) {
+    var charset = parseCharset(accepts[i].trim(), i);
+
+    if (charset) {
+      accepts[j++] = charset;
+    }
+  }
+
+  // trim accepts
+  accepts.length = j;
+
+  return accepts;
+}
+
+/**
+ * Parse a charset from the Accept-Charset header.
+ * @private
+ */
+
+function parseCharset(str, i) {
+  var match = simpleCharsetRegExp.exec(str);
+  if (!match) return null;
+
+  var charset = match[1];
+  var q = 1;
+  if (match[2]) {
+    var params = match[2].split(';')
+    for (var j = 0; j < params.length; j++) {
+      var p = params[j].trim().split('=');
+      if (p[0] === 'q') {
+        q = parseFloat(p[1]);
+        break;
+      }
+    }
+  }
+
+  return {
+    charset: charset,
+    q: q,
+    i: i
+  };
+}
+
+/**
+ * Get the priority of a charset.
+ * @private
+ */
+
+function getCharsetPriority(charset, accepted, index) {
+  var priority = {o: -1, q: 0, s: 0};
+
+  for (var i = 0; i < accepted.length; i++) {
+    var spec = specify(charset, accepted[i], index);
+
+    if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
+      priority = spec;
+    }
+  }
+
+  return priority;
+}
+
+/**
+ * Get the specificity of the charset.
+ * @private
+ */
+
+function specify(charset, spec, index) {
+  var s = 0;
+  if(spec.charset.toLowerCase() === charset.toLowerCase()){
+    s |= 1;
+  } else if (spec.charset !== '*' ) {
+    return null
+  }
+
+  return {
+    i: index,
+    o: spec.i,
+    q: spec.q,
+    s: s
+  }
+}
+
+/**
+ * Get the preferred charsets from an Accept-Charset header.
+ * @public
+ */
+
+function preferredCharsets(accept, provided) {
+  // RFC 2616 sec 14.2: no header = *
+  var accepts = parseAcceptCharset(accept === undefined ? '*' : accept || '');
+
+  if (!provided) {
+    // sorted list of all charsets
+    return accepts
+      .filter(isQuality)
+      .sort(compareSpecs)
+      .map(getFullCharset);
+  }
+
+  var priorities = provided.map(function getPriority(type, index) {
+    return getCharsetPriority(type, accepts, index);
+  });
+
+  // sorted list of accepted charsets
+  return priorities.filter(isQuality).sort(compareSpecs).map(function getCharset(priority) {
+    return provided[priorities.indexOf(priority)];
+  });
+}
+
+/**
+ * Compare two specs.
+ * @private
+ */
+
+function compareSpecs(a, b) {
+  return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0;
+}
+
+/**
+ * Get full charset string.
+ * @private
+ */
+
+function getFullCharset(spec) {
+  return spec.charset;
+}
+
+/**
+ * Check if a spec has any quality.
+ * @private
+ */
+
+function isQuality(spec) {
+  return spec.q > 0;
+}
diff --git a/node_modules/sigstore/node_modules/negotiator/lib/encoding.js b/node_modules/sigstore/node_modules/negotiator/lib/encoding.js
new file mode 100644
index 0000000000000..9ebb633d67743
--- /dev/null
+++ b/node_modules/sigstore/node_modules/negotiator/lib/encoding.js
@@ -0,0 +1,205 @@
+/**
+ * negotiator
+ * Copyright(c) 2012 Isaac Z. Schlueter
+ * Copyright(c) 2014 Federico Romero
+ * Copyright(c) 2014-2015 Douglas Christopher Wilson
+ * MIT Licensed
+ */
+
+'use strict';
+
+/**
+ * Module exports.
+ * @public
+ */
+
+module.exports = preferredEncodings;
+module.exports.preferredEncodings = preferredEncodings;
+
+/**
+ * Module variables.
+ * @private
+ */
+
+var simpleEncodingRegExp = /^\s*([^\s;]+)\s*(?:;(.*))?$/;
+
+/**
+ * Parse the Accept-Encoding header.
+ * @private
+ */
+
+function parseAcceptEncoding(accept) {
+  var accepts = accept.split(',');
+  var hasIdentity = false;
+  var minQuality = 1;
+
+  for (var i = 0, j = 0; i < accepts.length; i++) {
+    var encoding = parseEncoding(accepts[i].trim(), i);
+
+    if (encoding) {
+      accepts[j++] = encoding;
+      hasIdentity = hasIdentity || specify('identity', encoding);
+      minQuality = Math.min(minQuality, encoding.q || 1);
+    }
+  }
+
+  if (!hasIdentity) {
+    /*
+     * If identity doesn't explicitly appear in the accept-encoding header,
+     * it's added to the list of acceptable encoding with the lowest q
+     */
+    accepts[j++] = {
+      encoding: 'identity',
+      q: minQuality,
+      i: i
+    };
+  }
+
+  // trim accepts
+  accepts.length = j;
+
+  return accepts;
+}
+
+/**
+ * Parse an encoding from the Accept-Encoding header.
+ * @private
+ */
+
+function parseEncoding(str, i) {
+  var match = simpleEncodingRegExp.exec(str);
+  if (!match) return null;
+
+  var encoding = match[1];
+  var q = 1;
+  if (match[2]) {
+    var params = match[2].split(';');
+    for (var j = 0; j < params.length; j++) {
+      var p = params[j].trim().split('=');
+      if (p[0] === 'q') {
+        q = parseFloat(p[1]);
+        break;
+      }
+    }
+  }
+
+  return {
+    encoding: encoding,
+    q: q,
+    i: i
+  };
+}
+
+/**
+ * Get the priority of an encoding.
+ * @private
+ */
+
+function getEncodingPriority(encoding, accepted, index) {
+  var priority = {encoding: encoding, o: -1, q: 0, s: 0};
+
+  for (var i = 0; i < accepted.length; i++) {
+    var spec = specify(encoding, accepted[i], index);
+
+    if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
+      priority = spec;
+    }
+  }
+
+  return priority;
+}
+
+/**
+ * Get the specificity of the encoding.
+ * @private
+ */
+
+function specify(encoding, spec, index) {
+  var s = 0;
+  if(spec.encoding.toLowerCase() === encoding.toLowerCase()){
+    s |= 1;
+  } else if (spec.encoding !== '*' ) {
+    return null
+  }
+
+  return {
+    encoding: encoding,
+    i: index,
+    o: spec.i,
+    q: spec.q,
+    s: s
+  }
+};
+
+/**
+ * Get the preferred encodings from an Accept-Encoding header.
+ * @public
+ */
+
+function preferredEncodings(accept, provided, preferred) {
+  var accepts = parseAcceptEncoding(accept || '');
+
+  var comparator = preferred ? function comparator (a, b) {
+    if (a.q !== b.q) {
+      return b.q - a.q // higher quality first
+    }
+
+    var aPreferred = preferred.indexOf(a.encoding)
+    var bPreferred = preferred.indexOf(b.encoding)
+
+    if (aPreferred === -1 && bPreferred === -1) {
+      // consider the original specifity/order
+      return (b.s - a.s) || (a.o - b.o) || (a.i - b.i)
+    }
+
+    if (aPreferred !== -1 && bPreferred !== -1) {
+      return aPreferred - bPreferred // consider the preferred order
+    }
+
+    return aPreferred === -1 ? 1 : -1 // preferred first
+  } : compareSpecs;
+
+  if (!provided) {
+    // sorted list of all encodings
+    return accepts
+      .filter(isQuality)
+      .sort(comparator)
+      .map(getFullEncoding);
+  }
+
+  var priorities = provided.map(function getPriority(type, index) {
+    return getEncodingPriority(type, accepts, index);
+  });
+
+  // sorted list of accepted encodings
+  return priorities.filter(isQuality).sort(comparator).map(function getEncoding(priority) {
+    return provided[priorities.indexOf(priority)];
+  });
+}
+
+/**
+ * Compare two specs.
+ * @private
+ */
+
+function compareSpecs(a, b) {
+  return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i);
+}
+
+/**
+ * Get full encoding string.
+ * @private
+ */
+
+function getFullEncoding(spec) {
+  return spec.encoding;
+}
+
+/**
+ * Check if a spec has any quality.
+ * @private
+ */
+
+function isQuality(spec) {
+  return spec.q > 0;
+}
diff --git a/node_modules/sigstore/node_modules/negotiator/lib/language.js b/node_modules/sigstore/node_modules/negotiator/lib/language.js
new file mode 100644
index 0000000000000..a23167252719b
--- /dev/null
+++ b/node_modules/sigstore/node_modules/negotiator/lib/language.js
@@ -0,0 +1,179 @@
+/**
+ * negotiator
+ * Copyright(c) 2012 Isaac Z. Schlueter
+ * Copyright(c) 2014 Federico Romero
+ * Copyright(c) 2014-2015 Douglas Christopher Wilson
+ * MIT Licensed
+ */
+
+'use strict';
+
+/**
+ * Module exports.
+ * @public
+ */
+
+module.exports = preferredLanguages;
+module.exports.preferredLanguages = preferredLanguages;
+
+/**
+ * Module variables.
+ * @private
+ */
+
+var simpleLanguageRegExp = /^\s*([^\s\-;]+)(?:-([^\s;]+))?\s*(?:;(.*))?$/;
+
+/**
+ * Parse the Accept-Language header.
+ * @private
+ */
+
+function parseAcceptLanguage(accept) {
+  var accepts = accept.split(',');
+
+  for (var i = 0, j = 0; i < accepts.length; i++) {
+    var language = parseLanguage(accepts[i].trim(), i);
+
+    if (language) {
+      accepts[j++] = language;
+    }
+  }
+
+  // trim accepts
+  accepts.length = j;
+
+  return accepts;
+}
+
+/**
+ * Parse a language from the Accept-Language header.
+ * @private
+ */
+
+function parseLanguage(str, i) {
+  var match = simpleLanguageRegExp.exec(str);
+  if (!match) return null;
+
+  var prefix = match[1]
+  var suffix = match[2]
+  var full = prefix
+
+  if (suffix) full += "-" + suffix;
+
+  var q = 1;
+  if (match[3]) {
+    var params = match[3].split(';')
+    for (var j = 0; j < params.length; j++) {
+      var p = params[j].split('=');
+      if (p[0] === 'q') q = parseFloat(p[1]);
+    }
+  }
+
+  return {
+    prefix: prefix,
+    suffix: suffix,
+    q: q,
+    i: i,
+    full: full
+  };
+}
+
+/**
+ * Get the priority of a language.
+ * @private
+ */
+
+function getLanguagePriority(language, accepted, index) {
+  var priority = {o: -1, q: 0, s: 0};
+
+  for (var i = 0; i < accepted.length; i++) {
+    var spec = specify(language, accepted[i], index);
+
+    if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
+      priority = spec;
+    }
+  }
+
+  return priority;
+}
+
+/**
+ * Get the specificity of the language.
+ * @private
+ */
+
+function specify(language, spec, index) {
+  var p = parseLanguage(language)
+  if (!p) return null;
+  var s = 0;
+  if(spec.full.toLowerCase() === p.full.toLowerCase()){
+    s |= 4;
+  } else if (spec.prefix.toLowerCase() === p.full.toLowerCase()) {
+    s |= 2;
+  } else if (spec.full.toLowerCase() === p.prefix.toLowerCase()) {
+    s |= 1;
+  } else if (spec.full !== '*' ) {
+    return null
+  }
+
+  return {
+    i: index,
+    o: spec.i,
+    q: spec.q,
+    s: s
+  }
+};
+
+/**
+ * Get the preferred languages from an Accept-Language header.
+ * @public
+ */
+
+function preferredLanguages(accept, provided) {
+  // RFC 2616 sec 14.4: no header = *
+  var accepts = parseAcceptLanguage(accept === undefined ? '*' : accept || '');
+
+  if (!provided) {
+    // sorted list of all languages
+    return accepts
+      .filter(isQuality)
+      .sort(compareSpecs)
+      .map(getFullLanguage);
+  }
+
+  var priorities = provided.map(function getPriority(type, index) {
+    return getLanguagePriority(type, accepts, index);
+  });
+
+  // sorted list of accepted languages
+  return priorities.filter(isQuality).sort(compareSpecs).map(function getLanguage(priority) {
+    return provided[priorities.indexOf(priority)];
+  });
+}
+
+/**
+ * Compare two specs.
+ * @private
+ */
+
+function compareSpecs(a, b) {
+  return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0;
+}
+
+/**
+ * Get full language string.
+ * @private
+ */
+
+function getFullLanguage(spec) {
+  return spec.full;
+}
+
+/**
+ * Check if a spec has any quality.
+ * @private
+ */
+
+function isQuality(spec) {
+  return spec.q > 0;
+}
diff --git a/node_modules/sigstore/node_modules/negotiator/lib/mediaType.js b/node_modules/sigstore/node_modules/negotiator/lib/mediaType.js
new file mode 100644
index 0000000000000..8e402ea88394c
--- /dev/null
+++ b/node_modules/sigstore/node_modules/negotiator/lib/mediaType.js
@@ -0,0 +1,294 @@
+/**
+ * negotiator
+ * Copyright(c) 2012 Isaac Z. Schlueter
+ * Copyright(c) 2014 Federico Romero
+ * Copyright(c) 2014-2015 Douglas Christopher Wilson
+ * MIT Licensed
+ */
+
+'use strict';
+
+/**
+ * Module exports.
+ * @public
+ */
+
+module.exports = preferredMediaTypes;
+module.exports.preferredMediaTypes = preferredMediaTypes;
+
+/**
+ * Module variables.
+ * @private
+ */
+
+var simpleMediaTypeRegExp = /^\s*([^\s\/;]+)\/([^;\s]+)\s*(?:;(.*))?$/;
+
+/**
+ * Parse the Accept header.
+ * @private
+ */
+
+function parseAccept(accept) {
+  var accepts = splitMediaTypes(accept);
+
+  for (var i = 0, j = 0; i < accepts.length; i++) {
+    var mediaType = parseMediaType(accepts[i].trim(), i);
+
+    if (mediaType) {
+      accepts[j++] = mediaType;
+    }
+  }
+
+  // trim accepts
+  accepts.length = j;
+
+  return accepts;
+}
+
+/**
+ * Parse a media type from the Accept header.
+ * @private
+ */
+
+function parseMediaType(str, i) {
+  var match = simpleMediaTypeRegExp.exec(str);
+  if (!match) return null;
+
+  var params = Object.create(null);
+  var q = 1;
+  var subtype = match[2];
+  var type = match[1];
+
+  if (match[3]) {
+    var kvps = splitParameters(match[3]).map(splitKeyValuePair);
+
+    for (var j = 0; j < kvps.length; j++) {
+      var pair = kvps[j];
+      var key = pair[0].toLowerCase();
+      var val = pair[1];
+
+      // get the value, unwrapping quotes
+      var value = val && val[0] === '"' && val[val.length - 1] === '"'
+        ? val.slice(1, -1)
+        : val;
+
+      if (key === 'q') {
+        q = parseFloat(value);
+        break;
+      }
+
+      // store parameter
+      params[key] = value;
+    }
+  }
+
+  return {
+    type: type,
+    subtype: subtype,
+    params: params,
+    q: q,
+    i: i
+  };
+}
+
+/**
+ * Get the priority of a media type.
+ * @private
+ */
+
+function getMediaTypePriority(type, accepted, index) {
+  var priority = {o: -1, q: 0, s: 0};
+
+  for (var i = 0; i < accepted.length; i++) {
+    var spec = specify(type, accepted[i], index);
+
+    if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
+      priority = spec;
+    }
+  }
+
+  return priority;
+}
+
+/**
+ * Get the specificity of the media type.
+ * @private
+ */
+
+function specify(type, spec, index) {
+  var p = parseMediaType(type);
+  var s = 0;
+
+  if (!p) {
+    return null;
+  }
+
+  if(spec.type.toLowerCase() == p.type.toLowerCase()) {
+    s |= 4
+  } else if(spec.type != '*') {
+    return null;
+  }
+
+  if(spec.subtype.toLowerCase() == p.subtype.toLowerCase()) {
+    s |= 2
+  } else if(spec.subtype != '*') {
+    return null;
+  }
+
+  var keys = Object.keys(spec.params);
+  if (keys.length > 0) {
+    if (keys.every(function (k) {
+      return spec.params[k] == '*' || (spec.params[k] || '').toLowerCase() == (p.params[k] || '').toLowerCase();
+    })) {
+      s |= 1
+    } else {
+      return null
+    }
+  }
+
+  return {
+    i: index,
+    o: spec.i,
+    q: spec.q,
+    s: s,
+  }
+}
+
+/**
+ * Get the preferred media types from an Accept header.
+ * @public
+ */
+
+function preferredMediaTypes(accept, provided) {
+  // RFC 2616 sec 14.2: no header = */*
+  var accepts = parseAccept(accept === undefined ? '*/*' : accept || '');
+
+  if (!provided) {
+    // sorted list of all types
+    return accepts
+      .filter(isQuality)
+      .sort(compareSpecs)
+      .map(getFullType);
+  }
+
+  var priorities = provided.map(function getPriority(type, index) {
+    return getMediaTypePriority(type, accepts, index);
+  });
+
+  // sorted list of accepted types
+  return priorities.filter(isQuality).sort(compareSpecs).map(function getType(priority) {
+    return provided[priorities.indexOf(priority)];
+  });
+}
+
+/**
+ * Compare two specs.
+ * @private
+ */
+
+function compareSpecs(a, b) {
+  return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0;
+}
+
+/**
+ * Get full type string.
+ * @private
+ */
+
+function getFullType(spec) {
+  return spec.type + '/' + spec.subtype;
+}
+
+/**
+ * Check if a spec has any quality.
+ * @private
+ */
+
+function isQuality(spec) {
+  return spec.q > 0;
+}
+
+/**
+ * Count the number of quotes in a string.
+ * @private
+ */
+
+function quoteCount(string) {
+  var count = 0;
+  var index = 0;
+
+  while ((index = string.indexOf('"', index)) !== -1) {
+    count++;
+    index++;
+  }
+
+  return count;
+}
+
+/**
+ * Split a key value pair.
+ * @private
+ */
+
+function splitKeyValuePair(str) {
+  var index = str.indexOf('=');
+  var key;
+  var val;
+
+  if (index === -1) {
+    key = str;
+  } else {
+    key = str.slice(0, index);
+    val = str.slice(index + 1);
+  }
+
+  return [key, val];
+}
+
+/**
+ * Split an Accept header into media types.
+ * @private
+ */
+
+function splitMediaTypes(accept) {
+  var accepts = accept.split(',');
+
+  for (var i = 1, j = 0; i < accepts.length; i++) {
+    if (quoteCount(accepts[j]) % 2 == 0) {
+      accepts[++j] = accepts[i];
+    } else {
+      accepts[j] += ',' + accepts[i];
+    }
+  }
+
+  // trim accepts
+  accepts.length = j + 1;
+
+  return accepts;
+}
+
+/**
+ * Split a string of parameters.
+ * @private
+ */
+
+function splitParameters(str) {
+  var parameters = str.split(';');
+
+  for (var i = 1, j = 0; i < parameters.length; i++) {
+    if (quoteCount(parameters[j]) % 2 == 0) {
+      parameters[++j] = parameters[i];
+    } else {
+      parameters[j] += ';' + parameters[i];
+    }
+  }
+
+  // trim parameters
+  parameters.length = j + 1;
+
+  for (var i = 0; i < parameters.length; i++) {
+    parameters[i] = parameters[i].trim();
+  }
+
+  return parameters;
+}
diff --git a/node_modules/sigstore/node_modules/negotiator/package.json b/node_modules/sigstore/node_modules/negotiator/package.json
new file mode 100644
index 0000000000000..e4bdc1ef4f748
--- /dev/null
+++ b/node_modules/sigstore/node_modules/negotiator/package.json
@@ -0,0 +1,43 @@
+{
+  "name": "negotiator",
+  "description": "HTTP content negotiation",
+  "version": "1.0.0",
+  "contributors": [
+    "Douglas Christopher Wilson ",
+    "Federico Romero ",
+    "Isaac Z. Schlueter  (http://blog.izs.me/)"
+  ],
+  "license": "MIT",
+  "keywords": [
+    "http",
+    "content negotiation",
+    "accept",
+    "accept-language",
+    "accept-encoding",
+    "accept-charset"
+  ],
+  "repository": "jshttp/negotiator",
+  "devDependencies": {
+    "eslint": "7.32.0",
+    "eslint-plugin-markdown": "2.2.1",
+    "mocha": "9.1.3",
+    "nyc": "15.1.0"
+  },
+  "files": [
+    "lib/",
+    "HISTORY.md",
+    "LICENSE",
+    "index.js",
+    "README.md"
+  ],
+  "engines": {
+    "node": ">= 0.6"
+  },
+  "scripts": {
+    "lint": "eslint .",
+    "test": "mocha --reporter spec --check-leaks --bail test/",
+    "test:debug": "mocha --reporter spec --check-leaks --inspect --inspect-brk test/",
+    "test-ci": "nyc --reporter=lcov --reporter=text npm test",
+    "test-cov": "nyc --reporter=html --reporter=text npm test"
+  }
+}
diff --git a/node_modules/pacote/node_modules/tuf-js/LICENSE b/node_modules/sigstore/node_modules/tuf-js/LICENSE
similarity index 100%
rename from node_modules/pacote/node_modules/tuf-js/LICENSE
rename to node_modules/sigstore/node_modules/tuf-js/LICENSE
diff --git a/node_modules/pacote/node_modules/tuf-js/dist/config.js b/node_modules/sigstore/node_modules/tuf-js/dist/config.js
similarity index 100%
rename from node_modules/pacote/node_modules/tuf-js/dist/config.js
rename to node_modules/sigstore/node_modules/tuf-js/dist/config.js
diff --git a/node_modules/pacote/node_modules/tuf-js/dist/error.js b/node_modules/sigstore/node_modules/tuf-js/dist/error.js
similarity index 100%
rename from node_modules/pacote/node_modules/tuf-js/dist/error.js
rename to node_modules/sigstore/node_modules/tuf-js/dist/error.js
diff --git a/node_modules/pacote/node_modules/tuf-js/dist/fetcher.js b/node_modules/sigstore/node_modules/tuf-js/dist/fetcher.js
similarity index 100%
rename from node_modules/pacote/node_modules/tuf-js/dist/fetcher.js
rename to node_modules/sigstore/node_modules/tuf-js/dist/fetcher.js
diff --git a/node_modules/pacote/node_modules/tuf-js/dist/index.js b/node_modules/sigstore/node_modules/tuf-js/dist/index.js
similarity index 100%
rename from node_modules/pacote/node_modules/tuf-js/dist/index.js
rename to node_modules/sigstore/node_modules/tuf-js/dist/index.js
diff --git a/node_modules/pacote/node_modules/tuf-js/dist/store.js b/node_modules/sigstore/node_modules/tuf-js/dist/store.js
similarity index 100%
rename from node_modules/pacote/node_modules/tuf-js/dist/store.js
rename to node_modules/sigstore/node_modules/tuf-js/dist/store.js
diff --git a/node_modules/pacote/node_modules/tuf-js/dist/updater.js b/node_modules/sigstore/node_modules/tuf-js/dist/updater.js
similarity index 100%
rename from node_modules/pacote/node_modules/tuf-js/dist/updater.js
rename to node_modules/sigstore/node_modules/tuf-js/dist/updater.js
diff --git a/node_modules/pacote/node_modules/tuf-js/dist/utils/tmpfile.js b/node_modules/sigstore/node_modules/tuf-js/dist/utils/tmpfile.js
similarity index 100%
rename from node_modules/pacote/node_modules/tuf-js/dist/utils/tmpfile.js
rename to node_modules/sigstore/node_modules/tuf-js/dist/utils/tmpfile.js
diff --git a/node_modules/pacote/node_modules/tuf-js/dist/utils/url.js b/node_modules/sigstore/node_modules/tuf-js/dist/utils/url.js
similarity index 100%
rename from node_modules/pacote/node_modules/tuf-js/dist/utils/url.js
rename to node_modules/sigstore/node_modules/tuf-js/dist/utils/url.js
diff --git a/node_modules/pacote/node_modules/tuf-js/package.json b/node_modules/sigstore/node_modules/tuf-js/package.json
similarity index 100%
rename from node_modules/pacote/node_modules/tuf-js/package.json
rename to node_modules/sigstore/node_modules/tuf-js/package.json
diff --git a/node_modules/sigstore/package.json b/node_modules/sigstore/package.json
index dab40a8ea8fbc..b036dc787c75c 100644
--- a/node_modules/sigstore/package.json
+++ b/node_modules/sigstore/package.json
@@ -1,6 +1,6 @@
 {
   "name": "sigstore",
-  "version": "3.1.0",
+  "version": "4.0.0",
   "description": "code-signing for npm packages",
   "main": "dist/index.js",
   "types": "dist/index.d.ts",
@@ -27,21 +27,21 @@
     "provenance": true
   },
   "devDependencies": {
-    "@sigstore/rekor-types": "^3.0.0",
+    "@sigstore/rekor-types": "^4.0.0",
     "@sigstore/jest": "^0.0.0",
-    "@sigstore/mock": "^0.10.0",
+    "@sigstore/mock": "^0.11.0",
     "@tufjs/repo-mock": "^3.0.1",
     "@types/make-fetch-happen": "^10.0.4"
   },
   "dependencies": {
-    "@sigstore/bundle": "^3.1.0",
-    "@sigstore/core": "^2.0.0",
-    "@sigstore/protobuf-specs": "^0.4.0",
-    "@sigstore/sign": "^3.1.0",
-    "@sigstore/tuf": "^3.1.0",
-    "@sigstore/verify": "^2.1.0"
+    "@sigstore/bundle": "^4.0.0",
+    "@sigstore/core": "^3.0.0",
+    "@sigstore/protobuf-specs": "^0.5.0",
+    "@sigstore/sign": "^4.0.0",
+    "@sigstore/tuf": "^4.0.0",
+    "@sigstore/verify": "^3.0.0"
   },
   "engines": {
-    "node": "^18.17.0 || >=20.5.0"
+    "node": "^20.17.0 || >=22.9.0"
   }
 }
diff --git a/package-lock.json b/package-lock.json
index 2363571976be7..c67a540b65760 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -5209,24 +5209,36 @@
       "license": "MIT"
     },
     "node_modules/@sigstore/bundle": {
-      "version": "3.1.0",
-      "resolved": "https://registry.npmjs.org/@sigstore/bundle/-/bundle-3.1.0.tgz",
-      "integrity": "sha512-Mm1E3/CmDDCz3nDhFKTuYdB47EdRFRQMOE/EAbiG1MJW77/w1b3P7Qx7JSrVJs8PfwOLOVcKQCHErIwCTyPbag==",
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/@sigstore/bundle/-/bundle-4.0.0.tgz",
+      "integrity": "sha512-NwCl5Y0V6Di0NexvkTqdoVfmjTaQwoLM236r89KEojGmq/jMls8S+zb7yOwAPdXvbwfKDlP+lmXgAL4vKSQT+A==",
+      "inBundle": true,
       "license": "Apache-2.0",
       "dependencies": {
-        "@sigstore/protobuf-specs": "^0.4.0"
+        "@sigstore/protobuf-specs": "^0.5.0"
       },
+      "engines": {
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs": {
+      "version": "0.5.0",
+      "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.5.0.tgz",
+      "integrity": "sha512-MM8XIwUjN2bwvCg1QvrMtbBmpcSHrkhFSCu1D11NyPvDQ25HEc4oG5/OcQfd/Tlf/OxmKWERDj0zGE23jQaMwA==",
+      "inBundle": true,
+      "license": "Apache-2.0",
       "engines": {
         "node": "^18.17.0 || >=20.5.0"
       }
     },
     "node_modules/@sigstore/core": {
-      "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/@sigstore/core/-/core-2.0.0.tgz",
-      "integrity": "sha512-nYxaSb/MtlSI+JWcwTHQxyNmWeWrUXJJ/G4liLrGG7+tS4vAz6LF3xRXqLH6wPIVUoZQel2Fs4ddLx4NCpiIYg==",
+      "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/@sigstore/core/-/core-3.0.0.tgz",
+      "integrity": "sha512-NgbJ+aW9gQl/25+GIEGYcCyi8M+ng2/5X04BMuIgoDfgvp18vDcoNHOQjQsG9418HGNYRxG3vfEXaR1ayD37gg==",
+      "inBundle": true,
       "license": "Apache-2.0",
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^20.17.0 || >=22.9.0"
       }
     },
     "node_modules/@sigstore/protobuf-specs": {
@@ -5240,22 +5252,66 @@
       }
     },
     "node_modules/@sigstore/sign": {
-      "version": "3.1.0",
-      "resolved": "https://registry.npmjs.org/@sigstore/sign/-/sign-3.1.0.tgz",
-      "integrity": "sha512-knzjmaOHOov1Ur7N/z4B1oPqZ0QX5geUfhrVaqVlu+hl0EAoL4o+l0MSULINcD5GCWe3Z0+YJO8ues6vFlW0Yw==",
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/@sigstore/sign/-/sign-4.0.0.tgz",
+      "integrity": "sha512-5+IadiqPzRRMfvftHONzpeH2EzlDNuBiTMC3Lx7+9tLqn/4xbWVfSZA+YaOzKCn86k5BWfJ+aGO9v+pQmIyxqQ==",
+      "inBundle": true,
       "license": "Apache-2.0",
       "dependencies": {
-        "@sigstore/bundle": "^3.1.0",
-        "@sigstore/core": "^2.0.0",
-        "@sigstore/protobuf-specs": "^0.4.0",
-        "make-fetch-happen": "^14.0.2",
+        "@sigstore/bundle": "^4.0.0",
+        "@sigstore/core": "^3.0.0",
+        "@sigstore/protobuf-specs": "^0.5.0",
+        "make-fetch-happen": "^15.0.0",
         "proc-log": "^5.0.0",
         "promise-retry": "^2.0.1"
       },
+      "engines": {
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs": {
+      "version": "0.5.0",
+      "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.5.0.tgz",
+      "integrity": "sha512-MM8XIwUjN2bwvCg1QvrMtbBmpcSHrkhFSCu1D11NyPvDQ25HEc4oG5/OcQfd/Tlf/OxmKWERDj0zGE23jQaMwA==",
+      "inBundle": true,
+      "license": "Apache-2.0",
       "engines": {
         "node": "^18.17.0 || >=20.5.0"
       }
     },
+    "node_modules/@sigstore/sign/node_modules/make-fetch-happen": {
+      "version": "15.0.1",
+      "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-15.0.1.tgz",
+      "integrity": "sha512-9GjpQcaUXO2xmre8JfALl8Oji8Jpo+SyY2HpqFFPHVczOld/I+JFRx9FkP/uedZzkJlI9uM5t/j6dGJv4BScQw==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "@npmcli/agent": "^3.0.0",
+        "cacache": "^20.0.1",
+        "http-cache-semantics": "^4.1.1",
+        "minipass": "^7.0.2",
+        "minipass-fetch": "^4.0.0",
+        "minipass-flush": "^1.0.5",
+        "minipass-pipeline": "^1.2.4",
+        "negotiator": "^1.0.0",
+        "proc-log": "^5.0.0",
+        "promise-retry": "^2.0.1",
+        "ssri": "^12.0.0"
+      },
+      "engines": {
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/@sigstore/sign/node_modules/negotiator": {
+      "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz",
+      "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==",
+      "inBundle": true,
+      "license": "MIT",
+      "engines": {
+        "node": ">= 0.6"
+      }
+    },
     "node_modules/@sigstore/tuf": {
       "version": "3.1.1",
       "resolved": "https://registry.npmjs.org/@sigstore/tuf/-/tuf-3.1.1.tgz",
@@ -5271,15 +5327,26 @@
       }
     },
     "node_modules/@sigstore/verify": {
-      "version": "2.1.1",
-      "resolved": "https://registry.npmjs.org/@sigstore/verify/-/verify-2.1.1.tgz",
-      "integrity": "sha512-hVJD77oT67aowHxwT4+M6PGOp+E2LtLdTK3+FC0lBO9T7sYwItDMXZ7Z07IDCvR1M717a4axbIWckrW67KMP/w==",
+      "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/@sigstore/verify/-/verify-3.0.0.tgz",
+      "integrity": "sha512-moXtHH33AobOhTZF8xcX1MpOFqdvfCk7v6+teJL8zymBiDXwEsQH6XG9HGx2VIxnJZNm4cNSzflTLDnQLmIdmw==",
+      "inBundle": true,
       "license": "Apache-2.0",
       "dependencies": {
-        "@sigstore/bundle": "^3.1.0",
-        "@sigstore/core": "^2.0.0",
-        "@sigstore/protobuf-specs": "^0.4.1"
+        "@sigstore/bundle": "^4.0.0",
+        "@sigstore/core": "^3.0.0",
+        "@sigstore/protobuf-specs": "^0.5.0"
       },
+      "engines": {
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs": {
+      "version": "0.5.0",
+      "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.5.0.tgz",
+      "integrity": "sha512-MM8XIwUjN2bwvCg1QvrMtbBmpcSHrkhFSCu1D11NyPvDQ25HEc4oG5/OcQfd/Tlf/OxmKWERDj0zGE23jQaMwA==",
+      "inBundle": true,
+      "license": "Apache-2.0",
       "engines": {
         "node": "^18.17.0 || >=20.5.0"
       }
@@ -13611,100 +13678,6 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/pacote/node_modules/@sigstore/bundle": {
-      "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/@sigstore/bundle/-/bundle-4.0.0.tgz",
-      "integrity": "sha512-NwCl5Y0V6Di0NexvkTqdoVfmjTaQwoLM236r89KEojGmq/jMls8S+zb7yOwAPdXvbwfKDlP+lmXgAL4vKSQT+A==",
-      "inBundle": true,
-      "license": "Apache-2.0",
-      "dependencies": {
-        "@sigstore/protobuf-specs": "^0.5.0"
-      },
-      "engines": {
-        "node": "^20.17.0 || >=22.9.0"
-      }
-    },
-    "node_modules/pacote/node_modules/@sigstore/core": {
-      "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/@sigstore/core/-/core-3.0.0.tgz",
-      "integrity": "sha512-NgbJ+aW9gQl/25+GIEGYcCyi8M+ng2/5X04BMuIgoDfgvp18vDcoNHOQjQsG9418HGNYRxG3vfEXaR1ayD37gg==",
-      "inBundle": true,
-      "license": "Apache-2.0",
-      "engines": {
-        "node": "^20.17.0 || >=22.9.0"
-      }
-    },
-    "node_modules/pacote/node_modules/@sigstore/protobuf-specs": {
-      "version": "0.5.0",
-      "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.5.0.tgz",
-      "integrity": "sha512-MM8XIwUjN2bwvCg1QvrMtbBmpcSHrkhFSCu1D11NyPvDQ25HEc4oG5/OcQfd/Tlf/OxmKWERDj0zGE23jQaMwA==",
-      "inBundle": true,
-      "license": "Apache-2.0",
-      "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
-    "node_modules/pacote/node_modules/@sigstore/sign": {
-      "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/@sigstore/sign/-/sign-4.0.0.tgz",
-      "integrity": "sha512-5+IadiqPzRRMfvftHONzpeH2EzlDNuBiTMC3Lx7+9tLqn/4xbWVfSZA+YaOzKCn86k5BWfJ+aGO9v+pQmIyxqQ==",
-      "inBundle": true,
-      "license": "Apache-2.0",
-      "dependencies": {
-        "@sigstore/bundle": "^4.0.0",
-        "@sigstore/core": "^3.0.0",
-        "@sigstore/protobuf-specs": "^0.5.0",
-        "make-fetch-happen": "^15.0.0",
-        "proc-log": "^5.0.0",
-        "promise-retry": "^2.0.1"
-      },
-      "engines": {
-        "node": "^20.17.0 || >=22.9.0"
-      }
-    },
-    "node_modules/pacote/node_modules/@sigstore/tuf": {
-      "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/@sigstore/tuf/-/tuf-4.0.0.tgz",
-      "integrity": "sha512-0QFuWDHOQmz7t66gfpfNO6aEjoFrdhkJaej/AOqb4kqWZVbPWFZifXZzkxyQBB1OwTbkhdT3LNpMFxwkTvf+2w==",
-      "inBundle": true,
-      "license": "Apache-2.0",
-      "dependencies": {
-        "@sigstore/protobuf-specs": "^0.5.0",
-        "tuf-js": "^4.0.0"
-      },
-      "engines": {
-        "node": "^20.17.0 || >=22.9.0"
-      }
-    },
-    "node_modules/pacote/node_modules/@sigstore/verify": {
-      "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/@sigstore/verify/-/verify-3.0.0.tgz",
-      "integrity": "sha512-moXtHH33AobOhTZF8xcX1MpOFqdvfCk7v6+teJL8zymBiDXwEsQH6XG9HGx2VIxnJZNm4cNSzflTLDnQLmIdmw==",
-      "inBundle": true,
-      "license": "Apache-2.0",
-      "dependencies": {
-        "@sigstore/bundle": "^4.0.0",
-        "@sigstore/core": "^3.0.0",
-        "@sigstore/protobuf-specs": "^0.5.0"
-      },
-      "engines": {
-        "node": "^20.17.0 || >=22.9.0"
-      }
-    },
-    "node_modules/pacote/node_modules/@tufjs/models": {
-      "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/@tufjs/models/-/models-4.0.0.tgz",
-      "integrity": "sha512-h5x5ga/hh82COe+GoD4+gKUeV4T3iaYOxqLt41GRKApinPI7DMidhCmNVTjKfhCWFJIGXaFJee07XczdT4jdZQ==",
-      "inBundle": true,
-      "license": "MIT",
-      "dependencies": {
-        "@tufjs/canonical-json": "2.0.0",
-        "minimatch": "^9.0.5"
-      },
-      "engines": {
-        "node": "^20.17.0 || >=22.9.0"
-      }
-    },
     "node_modules/pacote/node_modules/chownr": {
       "version": "3.0.0",
       "resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz",
@@ -13738,29 +13711,6 @@
         "node": "20 || >=22"
       }
     },
-    "node_modules/pacote/node_modules/make-fetch-happen": {
-      "version": "15.0.1",
-      "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-15.0.1.tgz",
-      "integrity": "sha512-9GjpQcaUXO2xmre8JfALl8Oji8Jpo+SyY2HpqFFPHVczOld/I+JFRx9FkP/uedZzkJlI9uM5t/j6dGJv4BScQw==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "@npmcli/agent": "^3.0.0",
-        "cacache": "^20.0.1",
-        "http-cache-semantics": "^4.1.1",
-        "minipass": "^7.0.2",
-        "minipass-fetch": "^4.0.0",
-        "minipass-flush": "^1.0.5",
-        "minipass-pipeline": "^1.2.4",
-        "negotiator": "^1.0.0",
-        "proc-log": "^5.0.0",
-        "promise-retry": "^2.0.1",
-        "ssri": "^12.0.0"
-      },
-      "engines": {
-        "node": "^20.17.0 || >=22.9.0"
-      }
-    },
     "node_modules/pacote/node_modules/minizlib": {
       "version": "3.0.2",
       "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.2.tgz",
@@ -13790,16 +13740,6 @@
         "url": "https://github.com/sponsors/isaacs"
       }
     },
-    "node_modules/pacote/node_modules/negotiator": {
-      "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz",
-      "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==",
-      "inBundle": true,
-      "license": "MIT",
-      "engines": {
-        "node": ">= 0.6"
-      }
-    },
     "node_modules/pacote/node_modules/npm-package-arg": {
       "version": "13.0.0",
       "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-13.0.0.tgz",
@@ -13832,24 +13772,6 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/pacote/node_modules/sigstore": {
-      "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/sigstore/-/sigstore-4.0.0.tgz",
-      "integrity": "sha512-Gw/FgHtrLM9WP8P5lLcSGh9OQcrTruWCELAiS48ik1QbL0cH+dfjomiRTUE9zzz+D1N6rOLkwXUvVmXZAsNE0Q==",
-      "inBundle": true,
-      "license": "Apache-2.0",
-      "dependencies": {
-        "@sigstore/bundle": "^4.0.0",
-        "@sigstore/core": "^3.0.0",
-        "@sigstore/protobuf-specs": "^0.5.0",
-        "@sigstore/sign": "^4.0.0",
-        "@sigstore/tuf": "^4.0.0",
-        "@sigstore/verify": "^3.0.0"
-      },
-      "engines": {
-        "node": "^20.17.0 || >=22.9.0"
-      }
-    },
     "node_modules/pacote/node_modules/tar": {
       "version": "7.4.3",
       "resolved": "https://registry.npmjs.org/tar/-/tar-7.4.3.tgz",
@@ -13868,21 +13790,6 @@
         "node": ">=18"
       }
     },
-    "node_modules/pacote/node_modules/tuf-js": {
-      "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-4.0.0.tgz",
-      "integrity": "sha512-Lq7ieeGvXDXwpoSmOSgLWVdsGGV9J4a77oDTAPe/Ltrqnnm/ETaRlBAQTH5JatEh8KXuE6sddf9qAv1Q2282Hg==",
-      "inBundle": true,
-      "license": "MIT",
-      "dependencies": {
-        "@tufjs/models": "4.0.0",
-        "debug": "^4.4.1",
-        "make-fetch-happen": "^15.0.0"
-      },
-      "engines": {
-        "node": "^20.17.0 || >=22.9.0"
-      }
-    },
     "node_modules/pacote/node_modules/yallist": {
       "version": "5.0.0",
       "resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz",
@@ -15354,22 +15261,109 @@
       }
     },
     "node_modules/sigstore": {
-      "version": "3.1.0",
-      "resolved": "https://registry.npmjs.org/sigstore/-/sigstore-3.1.0.tgz",
-      "integrity": "sha512-ZpzWAFHIFqyFE56dXqgX/DkDRZdz+rRcjoIk/RQU4IX0wiCv1l8S7ZrXDHcCc+uaf+6o7w3h2l3g6GYG5TKN9Q==",
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/sigstore/-/sigstore-4.0.0.tgz",
+      "integrity": "sha512-Gw/FgHtrLM9WP8P5lLcSGh9OQcrTruWCELAiS48ik1QbL0cH+dfjomiRTUE9zzz+D1N6rOLkwXUvVmXZAsNE0Q==",
+      "inBundle": true,
       "license": "Apache-2.0",
       "dependencies": {
-        "@sigstore/bundle": "^3.1.0",
-        "@sigstore/core": "^2.0.0",
-        "@sigstore/protobuf-specs": "^0.4.0",
-        "@sigstore/sign": "^3.1.0",
-        "@sigstore/tuf": "^3.1.0",
-        "@sigstore/verify": "^2.1.0"
+        "@sigstore/bundle": "^4.0.0",
+        "@sigstore/core": "^3.0.0",
+        "@sigstore/protobuf-specs": "^0.5.0",
+        "@sigstore/sign": "^4.0.0",
+        "@sigstore/tuf": "^4.0.0",
+        "@sigstore/verify": "^3.0.0"
       },
+      "engines": {
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/sigstore/node_modules/@sigstore/protobuf-specs": {
+      "version": "0.5.0",
+      "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.5.0.tgz",
+      "integrity": "sha512-MM8XIwUjN2bwvCg1QvrMtbBmpcSHrkhFSCu1D11NyPvDQ25HEc4oG5/OcQfd/Tlf/OxmKWERDj0zGE23jQaMwA==",
+      "inBundle": true,
+      "license": "Apache-2.0",
       "engines": {
         "node": "^18.17.0 || >=20.5.0"
       }
     },
+    "node_modules/sigstore/node_modules/@sigstore/tuf": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/@sigstore/tuf/-/tuf-4.0.0.tgz",
+      "integrity": "sha512-0QFuWDHOQmz7t66gfpfNO6aEjoFrdhkJaej/AOqb4kqWZVbPWFZifXZzkxyQBB1OwTbkhdT3LNpMFxwkTvf+2w==",
+      "inBundle": true,
+      "license": "Apache-2.0",
+      "dependencies": {
+        "@sigstore/protobuf-specs": "^0.5.0",
+        "tuf-js": "^4.0.0"
+      },
+      "engines": {
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/sigstore/node_modules/@tufjs/models": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/@tufjs/models/-/models-4.0.0.tgz",
+      "integrity": "sha512-h5x5ga/hh82COe+GoD4+gKUeV4T3iaYOxqLt41GRKApinPI7DMidhCmNVTjKfhCWFJIGXaFJee07XczdT4jdZQ==",
+      "inBundle": true,
+      "license": "MIT",
+      "dependencies": {
+        "@tufjs/canonical-json": "2.0.0",
+        "minimatch": "^9.0.5"
+      },
+      "engines": {
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/sigstore/node_modules/make-fetch-happen": {
+      "version": "15.0.1",
+      "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-15.0.1.tgz",
+      "integrity": "sha512-9GjpQcaUXO2xmre8JfALl8Oji8Jpo+SyY2HpqFFPHVczOld/I+JFRx9FkP/uedZzkJlI9uM5t/j6dGJv4BScQw==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "@npmcli/agent": "^3.0.0",
+        "cacache": "^20.0.1",
+        "http-cache-semantics": "^4.1.1",
+        "minipass": "^7.0.2",
+        "minipass-fetch": "^4.0.0",
+        "minipass-flush": "^1.0.5",
+        "minipass-pipeline": "^1.2.4",
+        "negotiator": "^1.0.0",
+        "proc-log": "^5.0.0",
+        "promise-retry": "^2.0.1",
+        "ssri": "^12.0.0"
+      },
+      "engines": {
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/sigstore/node_modules/negotiator": {
+      "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz",
+      "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==",
+      "inBundle": true,
+      "license": "MIT",
+      "engines": {
+        "node": ">= 0.6"
+      }
+    },
+    "node_modules/sigstore/node_modules/tuf-js": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-4.0.0.tgz",
+      "integrity": "sha512-Lq7ieeGvXDXwpoSmOSgLWVdsGGV9J4a77oDTAPe/Ltrqnnm/ETaRlBAQTH5JatEh8KXuE6sddf9qAv1Q2282Hg==",
+      "inBundle": true,
+      "license": "MIT",
+      "dependencies": {
+        "@tufjs/models": "4.0.0",
+        "debug": "^4.4.1",
+        "make-fetch-happen": "^15.0.0"
+      },
+      "engines": {
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
     "node_modules/smart-buffer": {
       "version": "4.2.0",
       "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz",
@@ -19870,7 +19864,7 @@
         "npm-registry-fetch": "^19.0.0",
         "proc-log": "^5.0.0",
         "semver": "^7.3.7",
-        "sigstore": "^3.0.0",
+        "sigstore": "^4.0.0",
         "ssri": "^12.0.0"
       },
       "devDependencies": {
diff --git a/workspaces/libnpmpublish/package.json b/workspaces/libnpmpublish/package.json
index 134cfb7f14b72..68b2997649a77 100644
--- a/workspaces/libnpmpublish/package.json
+++ b/workspaces/libnpmpublish/package.json
@@ -44,7 +44,7 @@
     "npm-registry-fetch": "^19.0.0",
     "proc-log": "^5.0.0",
     "semver": "^7.3.7",
-    "sigstore": "^3.0.0",
+    "sigstore": "^4.0.0",
     "ssri": "^12.0.0"
   },
   "engines": {

From 1f85f94ec2e5dcf295c68c02b21d0b830b2082c2 Mon Sep 17 00:00:00 2001
From: Gar 
Date: Wed, 17 Sep 2025 10:45:30 -0700
Subject: [PATCH 15/63] deps: @sigstore/tuf@4.0.0

---
 node_modules/.gitignore                       |   33 +-
 .../@sigstore/protobuf-specs/LICENSE          |  202 --
 .../dist/__generated__/envelope.js            |   59 -
 .../dist/__generated__/events.js              |  174 --
 .../google/api/field_behavior.js              |  141 --
 .../dist/__generated__/google/protobuf/any.js |   35 -
 .../google/protobuf/descriptor.js             | 2042 -----------------
 .../google/protobuf/timestamp.js              |   29 -
 .../dist/__generated__/rekor/v2/dsse.js       |   55 -
 .../dist/__generated__/rekor/v2/entry.js      |   81 -
 .../__generated__/rekor/v2/hashedrekord.js    |   56 -
 .../dist/__generated__/rekor/v2/verifier.js   |   74 -
 .../dist/__generated__/sigstore_bundle.js     |  103 -
 .../dist/__generated__/sigstore_common.js     |  596 -----
 .../dist/__generated__/sigstore_rekor.js      |  137 --
 .../dist/__generated__/sigstore_trustroot.js  |  284 ---
 .../__generated__/sigstore_verification.js    |  281 ---
 .../@sigstore/protobuf-specs/dist/index.js    |   37 -
 .../@sigstore/protobuf-specs/package.json     |   35 -
 .../dist/__generated__/envelope.js            |    2 +-
 .../dist/__generated__/events.js              |    2 +-
 .../google/api/field_behavior.js              |    2 +-
 .../dist/__generated__/google/protobuf/any.js |    2 +-
 .../google/protobuf/descriptor.js             |    2 +-
 .../google/protobuf/timestamp.js              |    2 +-
 .../dist/__generated__/rekor/v2/dsse.js       |    2 +-
 .../dist/__generated__/rekor/v2/entry.js      |    2 +-
 .../__generated__/rekor/v2/hashedrekord.js    |    2 +-
 .../dist/__generated__/rekor/v2/verifier.js   |    2 +-
 .../dist/__generated__/sigstore_bundle.js     |    2 +-
 .../dist/__generated__/sigstore_common.js     |    2 +-
 .../dist/__generated__/sigstore_rekor.js      |    2 +-
 .../dist/__generated__/sigstore_trustroot.js  |    2 +-
 .../__generated__/sigstore_verification.js    |    2 +-
 .../protobuf-specs/dist/rekor/v2/index.js     |    0
 .../@sigstore/protobuf-specs/package.json     |    6 +-
 .../@sigstore/protobuf-specs/LICENSE          |  202 --
 .../dist/__generated__/envelope.js            |   59 -
 .../dist/__generated__/events.js              |  174 --
 .../google/api/field_behavior.js              |  141 --
 .../dist/__generated__/google/protobuf/any.js |   35 -
 .../google/protobuf/descriptor.js             | 2042 -----------------
 .../google/protobuf/timestamp.js              |   29 -
 .../dist/__generated__/rekor/v2/dsse.js       |   55 -
 .../dist/__generated__/rekor/v2/entry.js      |   81 -
 .../__generated__/rekor/v2/hashedrekord.js    |   56 -
 .../dist/__generated__/rekor/v2/verifier.js   |   74 -
 .../dist/__generated__/sigstore_bundle.js     |  103 -
 .../dist/__generated__/sigstore_common.js     |  596 -----
 .../dist/__generated__/sigstore_rekor.js      |  137 --
 .../dist/__generated__/sigstore_trustroot.js  |  284 ---
 .../__generated__/sigstore_verification.js    |  281 ---
 .../@sigstore/protobuf-specs/dist/index.js    |   37 -
 .../protobuf-specs/dist/rekor/v2/index.js     |   35 -
 .../@sigstore/protobuf-specs/package.json     |   35 -
 node_modules/@sigstore/tuf/dist/client.js     |    2 +
 node_modules/@sigstore/tuf/package.json       |    8 +-
 node_modules/@sigstore/tuf/seeds.json         |    2 +-
 .../@sigstore/protobuf-specs/LICENSE          |  202 --
 .../dist/__generated__/envelope.js            |   59 -
 .../dist/__generated__/events.js              |  174 --
 .../google/api/field_behavior.js              |  141 --
 .../dist/__generated__/google/protobuf/any.js |   35 -
 .../google/protobuf/descriptor.js             | 2042 -----------------
 .../google/protobuf/timestamp.js              |   29 -
 .../dist/__generated__/rekor/v2/dsse.js       |   55 -
 .../dist/__generated__/rekor/v2/entry.js      |   81 -
 .../__generated__/rekor/v2/hashedrekord.js    |   56 -
 .../dist/__generated__/rekor/v2/verifier.js   |   74 -
 .../dist/__generated__/sigstore_bundle.js     |  103 -
 .../dist/__generated__/sigstore_common.js     |  596 -----
 .../dist/__generated__/sigstore_rekor.js      |  137 --
 .../dist/__generated__/sigstore_trustroot.js  |  284 ---
 .../__generated__/sigstore_verification.js    |  281 ---
 .../@sigstore/protobuf-specs/dist/index.js    |   37 -
 .../protobuf-specs/dist/rekor/v2/index.js     |   35 -
 .../@sigstore/protobuf-specs/package.json     |   35 -
 node_modules/@tufjs/models/dist/base.js       |   92 -
 .../@tufjs/models/dist/delegations.js         |  115 -
 node_modules/@tufjs/models/dist/file.js       |  183 --
 node_modules/@tufjs/models/dist/key.js        |   85 -
 node_modules/@tufjs/models/dist/metadata.js   |  160 --
 node_modules/@tufjs/models/dist/role.js       |  299 ---
 node_modules/@tufjs/models/dist/root.js       |  116 -
 node_modules/@tufjs/models/dist/signature.js  |   38 -
 node_modules/@tufjs/models/dist/snapshot.js   |   71 -
 node_modules/@tufjs/models/dist/targets.js    |   92 -
 node_modules/@tufjs/models/dist/timestamp.js  |   58 -
 .../@tufjs/models/dist/utils/index.js         |   28 -
 node_modules/@tufjs/models/package.json       |   37 -
 .../@sigstore/protobuf-specs/LICENSE          |  202 --
 .../dist/__generated__/envelope.js            |   59 -
 .../dist/__generated__/events.js              |  174 --
 .../google/api/field_behavior.js              |  141 --
 .../dist/__generated__/google/protobuf/any.js |   35 -
 .../google/protobuf/descriptor.js             | 2042 -----------------
 .../google/protobuf/timestamp.js              |   29 -
 .../dist/__generated__/rekor/v2/dsse.js       |   55 -
 .../dist/__generated__/rekor/v2/entry.js      |   81 -
 .../__generated__/rekor/v2/hashedrekord.js    |   56 -
 .../dist/__generated__/rekor/v2/verifier.js   |   74 -
 .../dist/__generated__/sigstore_bundle.js     |  103 -
 .../dist/__generated__/sigstore_common.js     |  596 -----
 .../dist/__generated__/sigstore_rekor.js      |  137 --
 .../dist/__generated__/sigstore_trustroot.js  |  284 ---
 .../__generated__/sigstore_verification.js    |  281 ---
 .../@sigstore/protobuf-specs/dist/index.js    |   37 -
 .../protobuf-specs/dist/rekor/v2/index.js     |   35 -
 .../@sigstore/protobuf-specs/package.json     |   35 -
 .../node_modules/@sigstore/tuf/LICENSE        |  202 --
 .../@sigstore/tuf/dist/appdata.js             |   43 -
 .../node_modules/@sigstore/tuf/dist/client.js |  113 -
 .../node_modules/@sigstore/tuf/dist/error.js  |   12 -
 .../node_modules/@sigstore/tuf/dist/index.js  |   56 -
 .../node_modules/@sigstore/tuf/dist/target.js |   79 -
 .../node_modules/@sigstore/tuf/package.json   |   41 -
 .../node_modules/@sigstore/tuf/seeds.json     |    1 -
 .../node_modules/@tufjs/models/LICENSE        |   21 -
 .../node_modules/@tufjs/models/dist/error.js  |   27 -
 .../node_modules/@tufjs/models/dist/index.js  |   24 -
 .../@tufjs/models/dist/utils/guard.js         |   32 -
 .../@tufjs/models/dist/utils/key.js           |  142 --
 .../@tufjs/models/dist/utils/oid.js           |   26 -
 .../@tufjs/models/dist/utils/types.js         |    2 -
 .../@tufjs/models/dist/utils/verify.js        |   13 -
 .../sigstore/node_modules/tuf-js/LICENSE      |   21 -
 .../node_modules/tuf-js/dist/config.js        |   15 -
 .../node_modules/tuf-js/dist/error.js         |   49 -
 .../node_modules/tuf-js/dist/fetcher.js       |   86 -
 .../node_modules/tuf-js/dist/index.js         |    9 -
 .../node_modules/tuf-js/dist/store.js         |  219 --
 .../node_modules/tuf-js/dist/updater.js       |  368 ---
 .../node_modules/tuf-js/dist/utils/tmpfile.js |   25 -
 .../node_modules/tuf-js/dist/utils/url.js     |   13 -
 .../sigstore/node_modules/tuf-js/package.json |   43 -
 .../node_modules}/@tufjs/models/LICENSE       |    0
 .../node_modules/@tufjs/models/dist/base.js   |    0
 .../@tufjs/models/dist/delegations.js         |    0
 .../node_modules}/@tufjs/models/dist/error.js |    0
 .../node_modules/@tufjs/models/dist/file.js   |    0
 .../node_modules}/@tufjs/models/dist/index.js |    0
 .../node_modules/@tufjs/models/dist/key.js    |    0
 .../@tufjs/models/dist/metadata.js            |    0
 .../node_modules/@tufjs/models/dist/role.js   |    0
 .../node_modules/@tufjs/models/dist/root.js   |    0
 .../@tufjs/models/dist/signature.js           |    0
 .../@tufjs/models/dist/snapshot.js            |    0
 .../@tufjs/models/dist/targets.js             |    0
 .../@tufjs/models/dist/timestamp.js           |    0
 .../@tufjs/models/dist/utils/guard.js         |    0
 .../@tufjs/models/dist/utils/index.js         |    0
 .../@tufjs/models/dist/utils/key.js           |    0
 .../@tufjs/models/dist/utils/oid.js           |    0
 .../@tufjs/models/dist/utils/types.js         |    0
 .../@tufjs/models/dist/utils/verify.js        |    0
 .../node_modules/@tufjs/models/package.json   |    0
 .../node_modules/make-fetch-happen/LICENSE    |    0
 .../make-fetch-happen/lib/cache/entry.js      |    0
 .../make-fetch-happen/lib/cache/errors.js     |    0
 .../make-fetch-happen/lib/cache/index.js      |    0
 .../make-fetch-happen/lib/cache/key.js        |    0
 .../make-fetch-happen/lib/cache/policy.js     |    0
 .../make-fetch-happen/lib/fetch.js            |    0
 .../make-fetch-happen/lib/index.js            |    0
 .../make-fetch-happen/lib/options.js          |    0
 .../make-fetch-happen/lib/pipeline.js         |    0
 .../make-fetch-happen/lib/remote.js           |    0
 .../make-fetch-happen/package.json            |    0
 .../node_modules/negotiator/HISTORY.md        |    0
 .../node_modules/negotiator/LICENSE           |    0
 .../node_modules/negotiator/index.js          |    0
 .../node_modules/negotiator/lib/charset.js    |    0
 .../node_modules/negotiator/lib/encoding.js   |    0
 .../node_modules/negotiator/lib/language.js   |    0
 .../node_modules/negotiator/lib/mediaType.js  |    0
 .../node_modules/negotiator/package.json      |    0
 node_modules/tuf-js/package.json              |   10 +-
 package-lock.json                             |  197 +-
 package.json                                  |    2 +-
 179 files changed, 104 insertions(+), 21031 deletions(-)
 delete mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/LICENSE
 delete mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
 delete mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
 delete mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
 delete mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
 delete mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
 delete mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
 delete mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js
 delete mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js
 delete mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js
 delete mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js
 delete mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
 delete mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
 delete mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
 delete mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
 delete mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
 delete mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/index.js
 delete mode 100644 node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/package.json
 rename node_modules/@sigstore/{bundle/node_modules/@sigstore => }/protobuf-specs/dist/rekor/v2/index.js (100%)
 delete mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/LICENSE
 delete mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
 delete mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
 delete mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
 delete mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
 delete mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
 delete mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
 delete mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js
 delete mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js
 delete mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js
 delete mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js
 delete mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
 delete mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
 delete mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
 delete mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
 delete mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
 delete mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/index.js
 delete mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/rekor/v2/index.js
 delete mode 100644 node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/package.json
 delete mode 100644 node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/LICENSE
 delete mode 100644 node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
 delete mode 100644 node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
 delete mode 100644 node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
 delete mode 100644 node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
 delete mode 100644 node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
 delete mode 100644 node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
 delete mode 100644 node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js
 delete mode 100644 node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js
 delete mode 100644 node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js
 delete mode 100644 node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js
 delete mode 100644 node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
 delete mode 100644 node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
 delete mode 100644 node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
 delete mode 100644 node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
 delete mode 100644 node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
 delete mode 100644 node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/index.js
 delete mode 100644 node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/rekor/v2/index.js
 delete mode 100644 node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/package.json
 delete mode 100644 node_modules/@tufjs/models/dist/base.js
 delete mode 100644 node_modules/@tufjs/models/dist/delegations.js
 delete mode 100644 node_modules/@tufjs/models/dist/file.js
 delete mode 100644 node_modules/@tufjs/models/dist/key.js
 delete mode 100644 node_modules/@tufjs/models/dist/metadata.js
 delete mode 100644 node_modules/@tufjs/models/dist/role.js
 delete mode 100644 node_modules/@tufjs/models/dist/root.js
 delete mode 100644 node_modules/@tufjs/models/dist/signature.js
 delete mode 100644 node_modules/@tufjs/models/dist/snapshot.js
 delete mode 100644 node_modules/@tufjs/models/dist/targets.js
 delete mode 100644 node_modules/@tufjs/models/dist/timestamp.js
 delete mode 100644 node_modules/@tufjs/models/dist/utils/index.js
 delete mode 100644 node_modules/@tufjs/models/package.json
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/protobuf-specs/LICENSE
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/index.js
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/rekor/v2/index.js
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/protobuf-specs/package.json
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/LICENSE
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/dist/appdata.js
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/dist/client.js
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/dist/error.js
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/dist/index.js
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/dist/target.js
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/package.json
 delete mode 100644 node_modules/sigstore/node_modules/@sigstore/tuf/seeds.json
 delete mode 100644 node_modules/sigstore/node_modules/@tufjs/models/LICENSE
 delete mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/error.js
 delete mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/index.js
 delete mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/utils/guard.js
 delete mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/utils/key.js
 delete mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/utils/oid.js
 delete mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/utils/types.js
 delete mode 100644 node_modules/sigstore/node_modules/@tufjs/models/dist/utils/verify.js
 delete mode 100644 node_modules/sigstore/node_modules/tuf-js/LICENSE
 delete mode 100644 node_modules/sigstore/node_modules/tuf-js/dist/config.js
 delete mode 100644 node_modules/sigstore/node_modules/tuf-js/dist/error.js
 delete mode 100644 node_modules/sigstore/node_modules/tuf-js/dist/fetcher.js
 delete mode 100644 node_modules/sigstore/node_modules/tuf-js/dist/index.js
 delete mode 100644 node_modules/sigstore/node_modules/tuf-js/dist/store.js
 delete mode 100644 node_modules/sigstore/node_modules/tuf-js/dist/updater.js
 delete mode 100644 node_modules/sigstore/node_modules/tuf-js/dist/utils/tmpfile.js
 delete mode 100644 node_modules/sigstore/node_modules/tuf-js/dist/utils/url.js
 delete mode 100644 node_modules/sigstore/node_modules/tuf-js/package.json
 rename node_modules/{ => tuf-js/node_modules}/@tufjs/models/LICENSE (100%)
 rename node_modules/{sigstore => tuf-js}/node_modules/@tufjs/models/dist/base.js (100%)
 rename node_modules/{sigstore => tuf-js}/node_modules/@tufjs/models/dist/delegations.js (100%)
 rename node_modules/{ => tuf-js/node_modules}/@tufjs/models/dist/error.js (100%)
 rename node_modules/{sigstore => tuf-js}/node_modules/@tufjs/models/dist/file.js (100%)
 rename node_modules/{ => tuf-js/node_modules}/@tufjs/models/dist/index.js (100%)
 rename node_modules/{sigstore => tuf-js}/node_modules/@tufjs/models/dist/key.js (100%)
 rename node_modules/{sigstore => tuf-js}/node_modules/@tufjs/models/dist/metadata.js (100%)
 rename node_modules/{sigstore => tuf-js}/node_modules/@tufjs/models/dist/role.js (100%)
 rename node_modules/{sigstore => tuf-js}/node_modules/@tufjs/models/dist/root.js (100%)
 rename node_modules/{sigstore => tuf-js}/node_modules/@tufjs/models/dist/signature.js (100%)
 rename node_modules/{sigstore => tuf-js}/node_modules/@tufjs/models/dist/snapshot.js (100%)
 rename node_modules/{sigstore => tuf-js}/node_modules/@tufjs/models/dist/targets.js (100%)
 rename node_modules/{sigstore => tuf-js}/node_modules/@tufjs/models/dist/timestamp.js (100%)
 rename node_modules/{ => tuf-js/node_modules}/@tufjs/models/dist/utils/guard.js (100%)
 rename node_modules/{sigstore => tuf-js}/node_modules/@tufjs/models/dist/utils/index.js (100%)
 rename node_modules/{ => tuf-js/node_modules}/@tufjs/models/dist/utils/key.js (100%)
 rename node_modules/{ => tuf-js/node_modules}/@tufjs/models/dist/utils/oid.js (100%)
 rename node_modules/{ => tuf-js/node_modules}/@tufjs/models/dist/utils/types.js (100%)
 rename node_modules/{ => tuf-js/node_modules}/@tufjs/models/dist/utils/verify.js (100%)
 rename node_modules/{sigstore => tuf-js}/node_modules/@tufjs/models/package.json (100%)
 rename node_modules/{sigstore => tuf-js}/node_modules/make-fetch-happen/LICENSE (100%)
 rename node_modules/{sigstore => tuf-js}/node_modules/make-fetch-happen/lib/cache/entry.js (100%)
 rename node_modules/{sigstore => tuf-js}/node_modules/make-fetch-happen/lib/cache/errors.js (100%)
 rename node_modules/{sigstore => tuf-js}/node_modules/make-fetch-happen/lib/cache/index.js (100%)
 rename node_modules/{sigstore => tuf-js}/node_modules/make-fetch-happen/lib/cache/key.js (100%)
 rename node_modules/{sigstore => tuf-js}/node_modules/make-fetch-happen/lib/cache/policy.js (100%)
 rename node_modules/{sigstore => tuf-js}/node_modules/make-fetch-happen/lib/fetch.js (100%)
 rename node_modules/{sigstore => tuf-js}/node_modules/make-fetch-happen/lib/index.js (100%)
 rename node_modules/{sigstore => tuf-js}/node_modules/make-fetch-happen/lib/options.js (100%)
 rename node_modules/{sigstore => tuf-js}/node_modules/make-fetch-happen/lib/pipeline.js (100%)
 rename node_modules/{sigstore => tuf-js}/node_modules/make-fetch-happen/lib/remote.js (100%)
 rename node_modules/{sigstore => tuf-js}/node_modules/make-fetch-happen/package.json (100%)
 rename node_modules/{sigstore => tuf-js}/node_modules/negotiator/HISTORY.md (100%)
 rename node_modules/{sigstore => tuf-js}/node_modules/negotiator/LICENSE (100%)
 rename node_modules/{sigstore => tuf-js}/node_modules/negotiator/index.js (100%)
 rename node_modules/{sigstore => tuf-js}/node_modules/negotiator/lib/charset.js (100%)
 rename node_modules/{sigstore => tuf-js}/node_modules/negotiator/lib/encoding.js (100%)
 rename node_modules/{sigstore => tuf-js}/node_modules/negotiator/lib/language.js (100%)
 rename node_modules/{sigstore => tuf-js}/node_modules/negotiator/lib/mediaType.js (100%)
 rename node_modules/{sigstore => tuf-js}/node_modules/negotiator/package.json (100%)

diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 2875bd6e9071d..96b8e7707c35e 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -57,32 +57,18 @@
 !/@sigstore/
 /@sigstore/*
 !/@sigstore/bundle
-!/@sigstore/bundle/node_modules/
-/@sigstore/bundle/node_modules/*
-!/@sigstore/bundle/node_modules/@sigstore/
-/@sigstore/bundle/node_modules/@sigstore/*
-!/@sigstore/bundle/node_modules/@sigstore/protobuf-specs
 !/@sigstore/core
 !/@sigstore/protobuf-specs
 !/@sigstore/sign
 !/@sigstore/sign/node_modules/
 /@sigstore/sign/node_modules/*
-!/@sigstore/sign/node_modules/@sigstore/
-/@sigstore/sign/node_modules/@sigstore/*
-!/@sigstore/sign/node_modules/@sigstore/protobuf-specs
 !/@sigstore/sign/node_modules/make-fetch-happen
 !/@sigstore/sign/node_modules/negotiator
 !/@sigstore/tuf
 !/@sigstore/verify
-!/@sigstore/verify/node_modules/
-/@sigstore/verify/node_modules/*
-!/@sigstore/verify/node_modules/@sigstore/
-/@sigstore/verify/node_modules/@sigstore/*
-!/@sigstore/verify/node_modules/@sigstore/protobuf-specs
 !/@tufjs/
 /@tufjs/*
 !/@tufjs/canonical-json
-!/@tufjs/models
 !/abbrev
 !/agent-base
 !/ansi-regex
@@ -273,18 +259,6 @@
 !/shebang-regex
 !/signal-exit
 !/sigstore
-!/sigstore/node_modules/
-/sigstore/node_modules/*
-!/sigstore/node_modules/@sigstore/
-/sigstore/node_modules/@sigstore/*
-!/sigstore/node_modules/@sigstore/protobuf-specs
-!/sigstore/node_modules/@sigstore/tuf
-!/sigstore/node_modules/@tufjs/
-/sigstore/node_modules/@tufjs/*
-!/sigstore/node_modules/@tufjs/models
-!/sigstore/node_modules/make-fetch-happen
-!/sigstore/node_modules/negotiator
-!/sigstore/node_modules/tuf-js
 !/smart-buffer
 !/socks-proxy-agent
 !/socks
@@ -319,6 +293,13 @@
 !/tinyglobby/node_modules/picomatch
 !/treeverse
 !/tuf-js
+!/tuf-js/node_modules/
+/tuf-js/node_modules/*
+!/tuf-js/node_modules/@tufjs/
+/tuf-js/node_modules/@tufjs/*
+!/tuf-js/node_modules/@tufjs/models
+!/tuf-js/node_modules/make-fetch-happen
+!/tuf-js/node_modules/negotiator
 !/unique-filename
 !/unique-slug
 !/util-deprecate
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/LICENSE b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/LICENSE
deleted file mode 100644
index e9e7c1679a09d..0000000000000
--- a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
-   APPENDIX: How to apply the Apache License to your work.
-
-      To apply the Apache License to your work, attach the following
-      boilerplate notice, with the fields enclosed by brackets "[]"
-      replaced with your own identifying information. (Don't include
-      the brackets!)  The text should be enclosed in the appropriate
-      comment syntax for the file format. We also recommend that a
-      file or class name and description of purpose be included on the
-      same "printed page" as the copyright notice for easier
-      identification within third-party archives.
-
-   Copyright 2023 The Sigstore Authors
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
deleted file mode 100644
index 5c4f37bfaf3fb..0000000000000
--- a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
+++ /dev/null
@@ -1,59 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: envelope.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Signature = exports.Envelope = void 0;
-exports.Envelope = {
-    fromJSON(object) {
-        return {
-            payload: isSet(object.payload) ? Buffer.from(bytesFromBase64(object.payload)) : Buffer.alloc(0),
-            payloadType: isSet(object.payloadType) ? globalThis.String(object.payloadType) : "",
-            signatures: globalThis.Array.isArray(object?.signatures)
-                ? object.signatures.map((e) => exports.Signature.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.payload.length !== 0) {
-            obj.payload = base64FromBytes(message.payload);
-        }
-        if (message.payloadType !== "") {
-            obj.payloadType = message.payloadType;
-        }
-        if (message.signatures?.length) {
-            obj.signatures = message.signatures.map((e) => exports.Signature.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.Signature = {
-    fromJSON(object) {
-        return {
-            sig: isSet(object.sig) ? Buffer.from(bytesFromBase64(object.sig)) : Buffer.alloc(0),
-            keyid: isSet(object.keyid) ? globalThis.String(object.keyid) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.sig.length !== 0) {
-            obj.sig = base64FromBytes(message.sig);
-        }
-        if (message.keyid !== "") {
-            obj.keyid = message.keyid;
-        }
-        return obj;
-    },
-};
-function bytesFromBase64(b64) {
-    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
-}
-function base64FromBytes(arr) {
-    return globalThis.Buffer.from(arr).toString("base64");
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
deleted file mode 100644
index 6138fef5672fc..0000000000000
--- a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
+++ /dev/null
@@ -1,174 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: events.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.CloudEventBatch = exports.CloudEvent_CloudEventAttributeValue = exports.CloudEvent_AttributesEntry = exports.CloudEvent = void 0;
-/* eslint-disable */
-const any_1 = require("./google/protobuf/any");
-const timestamp_1 = require("./google/protobuf/timestamp");
-exports.CloudEvent = {
-    fromJSON(object) {
-        return {
-            id: isSet(object.id) ? globalThis.String(object.id) : "",
-            source: isSet(object.source) ? globalThis.String(object.source) : "",
-            specVersion: isSet(object.specVersion) ? globalThis.String(object.specVersion) : "",
-            type: isSet(object.type) ? globalThis.String(object.type) : "",
-            attributes: isObject(object.attributes)
-                ? Object.entries(object.attributes).reduce((acc, [key, value]) => {
-                    acc[key] = exports.CloudEvent_CloudEventAttributeValue.fromJSON(value);
-                    return acc;
-                }, {})
-                : {},
-            data: isSet(object.binaryData)
-                ? { $case: "binaryData", binaryData: Buffer.from(bytesFromBase64(object.binaryData)) }
-                : isSet(object.textData)
-                    ? { $case: "textData", textData: globalThis.String(object.textData) }
-                    : isSet(object.protoData)
-                        ? { $case: "protoData", protoData: any_1.Any.fromJSON(object.protoData) }
-                        : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.id !== "") {
-            obj.id = message.id;
-        }
-        if (message.source !== "") {
-            obj.source = message.source;
-        }
-        if (message.specVersion !== "") {
-            obj.specVersion = message.specVersion;
-        }
-        if (message.type !== "") {
-            obj.type = message.type;
-        }
-        if (message.attributes) {
-            const entries = Object.entries(message.attributes);
-            if (entries.length > 0) {
-                obj.attributes = {};
-                entries.forEach(([k, v]) => {
-                    obj.attributes[k] = exports.CloudEvent_CloudEventAttributeValue.toJSON(v);
-                });
-            }
-        }
-        if (message.data?.$case === "binaryData") {
-            obj.binaryData = base64FromBytes(message.data.binaryData);
-        }
-        else if (message.data?.$case === "textData") {
-            obj.textData = message.data.textData;
-        }
-        else if (message.data?.$case === "protoData") {
-            obj.protoData = any_1.Any.toJSON(message.data.protoData);
-        }
-        return obj;
-    },
-};
-exports.CloudEvent_AttributesEntry = {
-    fromJSON(object) {
-        return {
-            key: isSet(object.key) ? globalThis.String(object.key) : "",
-            value: isSet(object.value) ? exports.CloudEvent_CloudEventAttributeValue.fromJSON(object.value) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.key !== "") {
-            obj.key = message.key;
-        }
-        if (message.value !== undefined) {
-            obj.value = exports.CloudEvent_CloudEventAttributeValue.toJSON(message.value);
-        }
-        return obj;
-    },
-};
-exports.CloudEvent_CloudEventAttributeValue = {
-    fromJSON(object) {
-        return {
-            attr: isSet(object.ceBoolean)
-                ? { $case: "ceBoolean", ceBoolean: globalThis.Boolean(object.ceBoolean) }
-                : isSet(object.ceInteger)
-                    ? { $case: "ceInteger", ceInteger: globalThis.Number(object.ceInteger) }
-                    : isSet(object.ceString)
-                        ? { $case: "ceString", ceString: globalThis.String(object.ceString) }
-                        : isSet(object.ceBytes)
-                            ? { $case: "ceBytes", ceBytes: Buffer.from(bytesFromBase64(object.ceBytes)) }
-                            : isSet(object.ceUri)
-                                ? { $case: "ceUri", ceUri: globalThis.String(object.ceUri) }
-                                : isSet(object.ceUriRef)
-                                    ? { $case: "ceUriRef", ceUriRef: globalThis.String(object.ceUriRef) }
-                                    : isSet(object.ceTimestamp)
-                                        ? { $case: "ceTimestamp", ceTimestamp: fromJsonTimestamp(object.ceTimestamp) }
-                                        : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.attr?.$case === "ceBoolean") {
-            obj.ceBoolean = message.attr.ceBoolean;
-        }
-        else if (message.attr?.$case === "ceInteger") {
-            obj.ceInteger = Math.round(message.attr.ceInteger);
-        }
-        else if (message.attr?.$case === "ceString") {
-            obj.ceString = message.attr.ceString;
-        }
-        else if (message.attr?.$case === "ceBytes") {
-            obj.ceBytes = base64FromBytes(message.attr.ceBytes);
-        }
-        else if (message.attr?.$case === "ceUri") {
-            obj.ceUri = message.attr.ceUri;
-        }
-        else if (message.attr?.$case === "ceUriRef") {
-            obj.ceUriRef = message.attr.ceUriRef;
-        }
-        else if (message.attr?.$case === "ceTimestamp") {
-            obj.ceTimestamp = message.attr.ceTimestamp.toISOString();
-        }
-        return obj;
-    },
-};
-exports.CloudEventBatch = {
-    fromJSON(object) {
-        return {
-            events: globalThis.Array.isArray(object?.events) ? object.events.map((e) => exports.CloudEvent.fromJSON(e)) : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.events?.length) {
-            obj.events = message.events.map((e) => exports.CloudEvent.toJSON(e));
-        }
-        return obj;
-    },
-};
-function bytesFromBase64(b64) {
-    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
-}
-function base64FromBytes(arr) {
-    return globalThis.Buffer.from(arr).toString("base64");
-}
-function fromTimestamp(t) {
-    let millis = (globalThis.Number(t.seconds) || 0) * 1_000;
-    millis += (t.nanos || 0) / 1_000_000;
-    return new globalThis.Date(millis);
-}
-function fromJsonTimestamp(o) {
-    if (o instanceof globalThis.Date) {
-        return o;
-    }
-    else if (typeof o === "string") {
-        return new globalThis.Date(o);
-    }
-    else {
-        return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
-    }
-}
-function isObject(value) {
-    return typeof value === "object" && value !== null;
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
deleted file mode 100644
index b4d9ccc781c2f..0000000000000
--- a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
+++ /dev/null
@@ -1,141 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: google/api/field_behavior.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.FieldBehavior = void 0;
-exports.fieldBehaviorFromJSON = fieldBehaviorFromJSON;
-exports.fieldBehaviorToJSON = fieldBehaviorToJSON;
-/* eslint-disable */
-/**
- * An indicator of the behavior of a given field (for example, that a field
- * is required in requests, or given as output but ignored as input).
- * This **does not** change the behavior in protocol buffers itself; it only
- * denotes the behavior and may affect how API tooling handles the field.
- *
- * Note: This enum **may** receive new values in the future.
- */
-var FieldBehavior;
-(function (FieldBehavior) {
-    /** FIELD_BEHAVIOR_UNSPECIFIED - Conventional default for enums. Do not use this. */
-    FieldBehavior[FieldBehavior["FIELD_BEHAVIOR_UNSPECIFIED"] = 0] = "FIELD_BEHAVIOR_UNSPECIFIED";
-    /**
-     * OPTIONAL - Specifically denotes a field as optional.
-     * While all fields in protocol buffers are optional, this may be specified
-     * for emphasis if appropriate.
-     */
-    FieldBehavior[FieldBehavior["OPTIONAL"] = 1] = "OPTIONAL";
-    /**
-     * REQUIRED - Denotes a field as required.
-     * This indicates that the field **must** be provided as part of the request,
-     * and failure to do so will cause an error (usually `INVALID_ARGUMENT`).
-     */
-    FieldBehavior[FieldBehavior["REQUIRED"] = 2] = "REQUIRED";
-    /**
-     * OUTPUT_ONLY - Denotes a field as output only.
-     * This indicates that the field is provided in responses, but including the
-     * field in a request does nothing (the server *must* ignore it and
-     * *must not* throw an error as a result of the field's presence).
-     */
-    FieldBehavior[FieldBehavior["OUTPUT_ONLY"] = 3] = "OUTPUT_ONLY";
-    /**
-     * INPUT_ONLY - Denotes a field as input only.
-     * This indicates that the field is provided in requests, and the
-     * corresponding field is not included in output.
-     */
-    FieldBehavior[FieldBehavior["INPUT_ONLY"] = 4] = "INPUT_ONLY";
-    /**
-     * IMMUTABLE - Denotes a field as immutable.
-     * This indicates that the field may be set once in a request to create a
-     * resource, but may not be changed thereafter.
-     */
-    FieldBehavior[FieldBehavior["IMMUTABLE"] = 5] = "IMMUTABLE";
-    /**
-     * UNORDERED_LIST - Denotes that a (repeated) field is an unordered list.
-     * This indicates that the service may provide the elements of the list
-     * in any arbitrary  order, rather than the order the user originally
-     * provided. Additionally, the list's order may or may not be stable.
-     */
-    FieldBehavior[FieldBehavior["UNORDERED_LIST"] = 6] = "UNORDERED_LIST";
-    /**
-     * NON_EMPTY_DEFAULT - Denotes that this field returns a non-empty default value if not set.
-     * This indicates that if the user provides the empty value in a request,
-     * a non-empty value will be returned. The user will not be aware of what
-     * non-empty value to expect.
-     */
-    FieldBehavior[FieldBehavior["NON_EMPTY_DEFAULT"] = 7] = "NON_EMPTY_DEFAULT";
-    /**
-     * IDENTIFIER - Denotes that the field in a resource (a message annotated with
-     * google.api.resource) is used in the resource name to uniquely identify the
-     * resource. For AIP-compliant APIs, this should only be applied to the
-     * `name` field on the resource.
-     *
-     * This behavior should not be applied to references to other resources within
-     * the message.
-     *
-     * The identifier field of resources often have different field behavior
-     * depending on the request it is embedded in (e.g. for Create methods name
-     * is optional and unused, while for Update methods it is required). Instead
-     * of method-specific annotations, only `IDENTIFIER` is required.
-     */
-    FieldBehavior[FieldBehavior["IDENTIFIER"] = 8] = "IDENTIFIER";
-})(FieldBehavior || (exports.FieldBehavior = FieldBehavior = {}));
-function fieldBehaviorFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "FIELD_BEHAVIOR_UNSPECIFIED":
-            return FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED;
-        case 1:
-        case "OPTIONAL":
-            return FieldBehavior.OPTIONAL;
-        case 2:
-        case "REQUIRED":
-            return FieldBehavior.REQUIRED;
-        case 3:
-        case "OUTPUT_ONLY":
-            return FieldBehavior.OUTPUT_ONLY;
-        case 4:
-        case "INPUT_ONLY":
-            return FieldBehavior.INPUT_ONLY;
-        case 5:
-        case "IMMUTABLE":
-            return FieldBehavior.IMMUTABLE;
-        case 6:
-        case "UNORDERED_LIST":
-            return FieldBehavior.UNORDERED_LIST;
-        case 7:
-        case "NON_EMPTY_DEFAULT":
-            return FieldBehavior.NON_EMPTY_DEFAULT;
-        case 8:
-        case "IDENTIFIER":
-            return FieldBehavior.IDENTIFIER;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
-    }
-}
-function fieldBehaviorToJSON(object) {
-    switch (object) {
-        case FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED:
-            return "FIELD_BEHAVIOR_UNSPECIFIED";
-        case FieldBehavior.OPTIONAL:
-            return "OPTIONAL";
-        case FieldBehavior.REQUIRED:
-            return "REQUIRED";
-        case FieldBehavior.OUTPUT_ONLY:
-            return "OUTPUT_ONLY";
-        case FieldBehavior.INPUT_ONLY:
-            return "INPUT_ONLY";
-        case FieldBehavior.IMMUTABLE:
-            return "IMMUTABLE";
-        case FieldBehavior.UNORDERED_LIST:
-            return "UNORDERED_LIST";
-        case FieldBehavior.NON_EMPTY_DEFAULT:
-            return "NON_EMPTY_DEFAULT";
-        case FieldBehavior.IDENTIFIER:
-            return "IDENTIFIER";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
-    }
-}
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
deleted file mode 100644
index f0c8aab773e4c..0000000000000
--- a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
+++ /dev/null
@@ -1,35 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: google/protobuf/any.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Any = void 0;
-exports.Any = {
-    fromJSON(object) {
-        return {
-            typeUrl: isSet(object.typeUrl) ? globalThis.String(object.typeUrl) : "",
-            value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.typeUrl !== "") {
-            obj.typeUrl = message.typeUrl;
-        }
-        if (message.value.length !== 0) {
-            obj.value = base64FromBytes(message.value);
-        }
-        return obj;
-    },
-};
-function bytesFromBase64(b64) {
-    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
-}
-function base64FromBytes(arr) {
-    return globalThis.Buffer.from(arr).toString("base64");
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
deleted file mode 100644
index d6f8ddddf799d..0000000000000
--- a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
+++ /dev/null
@@ -1,2042 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: google/protobuf/descriptor.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.GeneratedCodeInfo = exports.SourceCodeInfo_Location = exports.SourceCodeInfo = exports.FeatureSetDefaults_FeatureSetEditionDefault = exports.FeatureSetDefaults = exports.FeatureSet = exports.UninterpretedOption_NamePart = exports.UninterpretedOption = exports.MethodOptions = exports.ServiceOptions = exports.EnumValueOptions = exports.EnumOptions = exports.OneofOptions = exports.FieldOptions_FeatureSupport = exports.FieldOptions_EditionDefault = exports.FieldOptions = exports.MessageOptions = exports.FileOptions = exports.MethodDescriptorProto = exports.ServiceDescriptorProto = exports.EnumValueDescriptorProto = exports.EnumDescriptorProto_EnumReservedRange = exports.EnumDescriptorProto = exports.OneofDescriptorProto = exports.FieldDescriptorProto = exports.ExtensionRangeOptions_Declaration = exports.ExtensionRangeOptions = exports.DescriptorProto_ReservedRange = exports.DescriptorProto_ExtensionRange = exports.DescriptorProto = exports.FileDescriptorProto = exports.FileDescriptorSet = exports.GeneratedCodeInfo_Annotation_Semantic = exports.FeatureSet_EnforceNamingStyle = exports.FeatureSet_JsonFormat = exports.FeatureSet_MessageEncoding = exports.FeatureSet_Utf8Validation = exports.FeatureSet_RepeatedFieldEncoding = exports.FeatureSet_EnumType = exports.FeatureSet_FieldPresence = exports.MethodOptions_IdempotencyLevel = exports.FieldOptions_OptionTargetType = exports.FieldOptions_OptionRetention = exports.FieldOptions_JSType = exports.FieldOptions_CType = exports.FileOptions_OptimizeMode = exports.FieldDescriptorProto_Label = exports.FieldDescriptorProto_Type = exports.ExtensionRangeOptions_VerificationState = exports.Edition = void 0;
-exports.GeneratedCodeInfo_Annotation = void 0;
-exports.editionFromJSON = editionFromJSON;
-exports.editionToJSON = editionToJSON;
-exports.extensionRangeOptions_VerificationStateFromJSON = extensionRangeOptions_VerificationStateFromJSON;
-exports.extensionRangeOptions_VerificationStateToJSON = extensionRangeOptions_VerificationStateToJSON;
-exports.fieldDescriptorProto_TypeFromJSON = fieldDescriptorProto_TypeFromJSON;
-exports.fieldDescriptorProto_TypeToJSON = fieldDescriptorProto_TypeToJSON;
-exports.fieldDescriptorProto_LabelFromJSON = fieldDescriptorProto_LabelFromJSON;
-exports.fieldDescriptorProto_LabelToJSON = fieldDescriptorProto_LabelToJSON;
-exports.fileOptions_OptimizeModeFromJSON = fileOptions_OptimizeModeFromJSON;
-exports.fileOptions_OptimizeModeToJSON = fileOptions_OptimizeModeToJSON;
-exports.fieldOptions_CTypeFromJSON = fieldOptions_CTypeFromJSON;
-exports.fieldOptions_CTypeToJSON = fieldOptions_CTypeToJSON;
-exports.fieldOptions_JSTypeFromJSON = fieldOptions_JSTypeFromJSON;
-exports.fieldOptions_JSTypeToJSON = fieldOptions_JSTypeToJSON;
-exports.fieldOptions_OptionRetentionFromJSON = fieldOptions_OptionRetentionFromJSON;
-exports.fieldOptions_OptionRetentionToJSON = fieldOptions_OptionRetentionToJSON;
-exports.fieldOptions_OptionTargetTypeFromJSON = fieldOptions_OptionTargetTypeFromJSON;
-exports.fieldOptions_OptionTargetTypeToJSON = fieldOptions_OptionTargetTypeToJSON;
-exports.methodOptions_IdempotencyLevelFromJSON = methodOptions_IdempotencyLevelFromJSON;
-exports.methodOptions_IdempotencyLevelToJSON = methodOptions_IdempotencyLevelToJSON;
-exports.featureSet_FieldPresenceFromJSON = featureSet_FieldPresenceFromJSON;
-exports.featureSet_FieldPresenceToJSON = featureSet_FieldPresenceToJSON;
-exports.featureSet_EnumTypeFromJSON = featureSet_EnumTypeFromJSON;
-exports.featureSet_EnumTypeToJSON = featureSet_EnumTypeToJSON;
-exports.featureSet_RepeatedFieldEncodingFromJSON = featureSet_RepeatedFieldEncodingFromJSON;
-exports.featureSet_RepeatedFieldEncodingToJSON = featureSet_RepeatedFieldEncodingToJSON;
-exports.featureSet_Utf8ValidationFromJSON = featureSet_Utf8ValidationFromJSON;
-exports.featureSet_Utf8ValidationToJSON = featureSet_Utf8ValidationToJSON;
-exports.featureSet_MessageEncodingFromJSON = featureSet_MessageEncodingFromJSON;
-exports.featureSet_MessageEncodingToJSON = featureSet_MessageEncodingToJSON;
-exports.featureSet_JsonFormatFromJSON = featureSet_JsonFormatFromJSON;
-exports.featureSet_JsonFormatToJSON = featureSet_JsonFormatToJSON;
-exports.featureSet_EnforceNamingStyleFromJSON = featureSet_EnforceNamingStyleFromJSON;
-exports.featureSet_EnforceNamingStyleToJSON = featureSet_EnforceNamingStyleToJSON;
-exports.generatedCodeInfo_Annotation_SemanticFromJSON = generatedCodeInfo_Annotation_SemanticFromJSON;
-exports.generatedCodeInfo_Annotation_SemanticToJSON = generatedCodeInfo_Annotation_SemanticToJSON;
-/* eslint-disable */
-/** The full set of known editions. */
-var Edition;
-(function (Edition) {
-    /** EDITION_UNKNOWN - A placeholder for an unknown edition value. */
-    Edition[Edition["EDITION_UNKNOWN"] = 0] = "EDITION_UNKNOWN";
-    /**
-     * EDITION_LEGACY - A placeholder edition for specifying default behaviors *before* a feature
-     * was first introduced.  This is effectively an "infinite past".
-     */
-    Edition[Edition["EDITION_LEGACY"] = 900] = "EDITION_LEGACY";
-    /**
-     * EDITION_PROTO2 - Legacy syntax "editions".  These pre-date editions, but behave much like
-     * distinct editions.  These can't be used to specify the edition of proto
-     * files, but feature definitions must supply proto2/proto3 defaults for
-     * backwards compatibility.
-     */
-    Edition[Edition["EDITION_PROTO2"] = 998] = "EDITION_PROTO2";
-    Edition[Edition["EDITION_PROTO3"] = 999] = "EDITION_PROTO3";
-    /**
-     * EDITION_2023 - Editions that have been released.  The specific values are arbitrary and
-     * should not be depended on, but they will always be time-ordered for easy
-     * comparison.
-     */
-    Edition[Edition["EDITION_2023"] = 1000] = "EDITION_2023";
-    Edition[Edition["EDITION_2024"] = 1001] = "EDITION_2024";
-    /**
-     * EDITION_1_TEST_ONLY - Placeholder editions for testing feature resolution.  These should not be
-     * used or relied on outside of tests.
-     */
-    Edition[Edition["EDITION_1_TEST_ONLY"] = 1] = "EDITION_1_TEST_ONLY";
-    Edition[Edition["EDITION_2_TEST_ONLY"] = 2] = "EDITION_2_TEST_ONLY";
-    Edition[Edition["EDITION_99997_TEST_ONLY"] = 99997] = "EDITION_99997_TEST_ONLY";
-    Edition[Edition["EDITION_99998_TEST_ONLY"] = 99998] = "EDITION_99998_TEST_ONLY";
-    Edition[Edition["EDITION_99999_TEST_ONLY"] = 99999] = "EDITION_99999_TEST_ONLY";
-    /**
-     * EDITION_MAX - Placeholder for specifying unbounded edition support.  This should only
-     * ever be used by plugins that can expect to never require any changes to
-     * support a new edition.
-     */
-    Edition[Edition["EDITION_MAX"] = 2147483647] = "EDITION_MAX";
-})(Edition || (exports.Edition = Edition = {}));
-function editionFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "EDITION_UNKNOWN":
-            return Edition.EDITION_UNKNOWN;
-        case 900:
-        case "EDITION_LEGACY":
-            return Edition.EDITION_LEGACY;
-        case 998:
-        case "EDITION_PROTO2":
-            return Edition.EDITION_PROTO2;
-        case 999:
-        case "EDITION_PROTO3":
-            return Edition.EDITION_PROTO3;
-        case 1000:
-        case "EDITION_2023":
-            return Edition.EDITION_2023;
-        case 1001:
-        case "EDITION_2024":
-            return Edition.EDITION_2024;
-        case 1:
-        case "EDITION_1_TEST_ONLY":
-            return Edition.EDITION_1_TEST_ONLY;
-        case 2:
-        case "EDITION_2_TEST_ONLY":
-            return Edition.EDITION_2_TEST_ONLY;
-        case 99997:
-        case "EDITION_99997_TEST_ONLY":
-            return Edition.EDITION_99997_TEST_ONLY;
-        case 99998:
-        case "EDITION_99998_TEST_ONLY":
-            return Edition.EDITION_99998_TEST_ONLY;
-        case 99999:
-        case "EDITION_99999_TEST_ONLY":
-            return Edition.EDITION_99999_TEST_ONLY;
-        case 2147483647:
-        case "EDITION_MAX":
-            return Edition.EDITION_MAX;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum Edition");
-    }
-}
-function editionToJSON(object) {
-    switch (object) {
-        case Edition.EDITION_UNKNOWN:
-            return "EDITION_UNKNOWN";
-        case Edition.EDITION_LEGACY:
-            return "EDITION_LEGACY";
-        case Edition.EDITION_PROTO2:
-            return "EDITION_PROTO2";
-        case Edition.EDITION_PROTO3:
-            return "EDITION_PROTO3";
-        case Edition.EDITION_2023:
-            return "EDITION_2023";
-        case Edition.EDITION_2024:
-            return "EDITION_2024";
-        case Edition.EDITION_1_TEST_ONLY:
-            return "EDITION_1_TEST_ONLY";
-        case Edition.EDITION_2_TEST_ONLY:
-            return "EDITION_2_TEST_ONLY";
-        case Edition.EDITION_99997_TEST_ONLY:
-            return "EDITION_99997_TEST_ONLY";
-        case Edition.EDITION_99998_TEST_ONLY:
-            return "EDITION_99998_TEST_ONLY";
-        case Edition.EDITION_99999_TEST_ONLY:
-            return "EDITION_99999_TEST_ONLY";
-        case Edition.EDITION_MAX:
-            return "EDITION_MAX";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum Edition");
-    }
-}
-/** The verification state of the extension range. */
-var ExtensionRangeOptions_VerificationState;
-(function (ExtensionRangeOptions_VerificationState) {
-    /** DECLARATION - All the extensions of the range must be declared. */
-    ExtensionRangeOptions_VerificationState[ExtensionRangeOptions_VerificationState["DECLARATION"] = 0] = "DECLARATION";
-    ExtensionRangeOptions_VerificationState[ExtensionRangeOptions_VerificationState["UNVERIFIED"] = 1] = "UNVERIFIED";
-})(ExtensionRangeOptions_VerificationState || (exports.ExtensionRangeOptions_VerificationState = ExtensionRangeOptions_VerificationState = {}));
-function extensionRangeOptions_VerificationStateFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "DECLARATION":
-            return ExtensionRangeOptions_VerificationState.DECLARATION;
-        case 1:
-        case "UNVERIFIED":
-            return ExtensionRangeOptions_VerificationState.UNVERIFIED;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum ExtensionRangeOptions_VerificationState");
-    }
-}
-function extensionRangeOptions_VerificationStateToJSON(object) {
-    switch (object) {
-        case ExtensionRangeOptions_VerificationState.DECLARATION:
-            return "DECLARATION";
-        case ExtensionRangeOptions_VerificationState.UNVERIFIED:
-            return "UNVERIFIED";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum ExtensionRangeOptions_VerificationState");
-    }
-}
-var FieldDescriptorProto_Type;
-(function (FieldDescriptorProto_Type) {
-    /**
-     * TYPE_DOUBLE - 0 is reserved for errors.
-     * Order is weird for historical reasons.
-     */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_DOUBLE"] = 1] = "TYPE_DOUBLE";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FLOAT"] = 2] = "TYPE_FLOAT";
-    /**
-     * TYPE_INT64 - Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT64 if
-     * negative values are likely.
-     */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT64"] = 3] = "TYPE_INT64";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT64"] = 4] = "TYPE_UINT64";
-    /**
-     * TYPE_INT32 - Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT32 if
-     * negative values are likely.
-     */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT32"] = 5] = "TYPE_INT32";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED64"] = 6] = "TYPE_FIXED64";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED32"] = 7] = "TYPE_FIXED32";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BOOL"] = 8] = "TYPE_BOOL";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_STRING"] = 9] = "TYPE_STRING";
-    /**
-     * TYPE_GROUP - Tag-delimited aggregate.
-     * Group type is deprecated and not supported after google.protobuf. However, Proto3
-     * implementations should still be able to parse the group wire format and
-     * treat group fields as unknown fields.  In Editions, the group wire format
-     * can be enabled via the `message_encoding` feature.
-     */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_GROUP"] = 10] = "TYPE_GROUP";
-    /** TYPE_MESSAGE - Length-delimited aggregate. */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_MESSAGE"] = 11] = "TYPE_MESSAGE";
-    /** TYPE_BYTES - New in version 2. */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BYTES"] = 12] = "TYPE_BYTES";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT32"] = 13] = "TYPE_UINT32";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_ENUM"] = 14] = "TYPE_ENUM";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED32"] = 15] = "TYPE_SFIXED32";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED64"] = 16] = "TYPE_SFIXED64";
-    /** TYPE_SINT32 - Uses ZigZag encoding. */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT32"] = 17] = "TYPE_SINT32";
-    /** TYPE_SINT64 - Uses ZigZag encoding. */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT64"] = 18] = "TYPE_SINT64";
-})(FieldDescriptorProto_Type || (exports.FieldDescriptorProto_Type = FieldDescriptorProto_Type = {}));
-function fieldDescriptorProto_TypeFromJSON(object) {
-    switch (object) {
-        case 1:
-        case "TYPE_DOUBLE":
-            return FieldDescriptorProto_Type.TYPE_DOUBLE;
-        case 2:
-        case "TYPE_FLOAT":
-            return FieldDescriptorProto_Type.TYPE_FLOAT;
-        case 3:
-        case "TYPE_INT64":
-            return FieldDescriptorProto_Type.TYPE_INT64;
-        case 4:
-        case "TYPE_UINT64":
-            return FieldDescriptorProto_Type.TYPE_UINT64;
-        case 5:
-        case "TYPE_INT32":
-            return FieldDescriptorProto_Type.TYPE_INT32;
-        case 6:
-        case "TYPE_FIXED64":
-            return FieldDescriptorProto_Type.TYPE_FIXED64;
-        case 7:
-        case "TYPE_FIXED32":
-            return FieldDescriptorProto_Type.TYPE_FIXED32;
-        case 8:
-        case "TYPE_BOOL":
-            return FieldDescriptorProto_Type.TYPE_BOOL;
-        case 9:
-        case "TYPE_STRING":
-            return FieldDescriptorProto_Type.TYPE_STRING;
-        case 10:
-        case "TYPE_GROUP":
-            return FieldDescriptorProto_Type.TYPE_GROUP;
-        case 11:
-        case "TYPE_MESSAGE":
-            return FieldDescriptorProto_Type.TYPE_MESSAGE;
-        case 12:
-        case "TYPE_BYTES":
-            return FieldDescriptorProto_Type.TYPE_BYTES;
-        case 13:
-        case "TYPE_UINT32":
-            return FieldDescriptorProto_Type.TYPE_UINT32;
-        case 14:
-        case "TYPE_ENUM":
-            return FieldDescriptorProto_Type.TYPE_ENUM;
-        case 15:
-        case "TYPE_SFIXED32":
-            return FieldDescriptorProto_Type.TYPE_SFIXED32;
-        case 16:
-        case "TYPE_SFIXED64":
-            return FieldDescriptorProto_Type.TYPE_SFIXED64;
-        case 17:
-        case "TYPE_SINT32":
-            return FieldDescriptorProto_Type.TYPE_SINT32;
-        case 18:
-        case "TYPE_SINT64":
-            return FieldDescriptorProto_Type.TYPE_SINT64;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
-    }
-}
-function fieldDescriptorProto_TypeToJSON(object) {
-    switch (object) {
-        case FieldDescriptorProto_Type.TYPE_DOUBLE:
-            return "TYPE_DOUBLE";
-        case FieldDescriptorProto_Type.TYPE_FLOAT:
-            return "TYPE_FLOAT";
-        case FieldDescriptorProto_Type.TYPE_INT64:
-            return "TYPE_INT64";
-        case FieldDescriptorProto_Type.TYPE_UINT64:
-            return "TYPE_UINT64";
-        case FieldDescriptorProto_Type.TYPE_INT32:
-            return "TYPE_INT32";
-        case FieldDescriptorProto_Type.TYPE_FIXED64:
-            return "TYPE_FIXED64";
-        case FieldDescriptorProto_Type.TYPE_FIXED32:
-            return "TYPE_FIXED32";
-        case FieldDescriptorProto_Type.TYPE_BOOL:
-            return "TYPE_BOOL";
-        case FieldDescriptorProto_Type.TYPE_STRING:
-            return "TYPE_STRING";
-        case FieldDescriptorProto_Type.TYPE_GROUP:
-            return "TYPE_GROUP";
-        case FieldDescriptorProto_Type.TYPE_MESSAGE:
-            return "TYPE_MESSAGE";
-        case FieldDescriptorProto_Type.TYPE_BYTES:
-            return "TYPE_BYTES";
-        case FieldDescriptorProto_Type.TYPE_UINT32:
-            return "TYPE_UINT32";
-        case FieldDescriptorProto_Type.TYPE_ENUM:
-            return "TYPE_ENUM";
-        case FieldDescriptorProto_Type.TYPE_SFIXED32:
-            return "TYPE_SFIXED32";
-        case FieldDescriptorProto_Type.TYPE_SFIXED64:
-            return "TYPE_SFIXED64";
-        case FieldDescriptorProto_Type.TYPE_SINT32:
-            return "TYPE_SINT32";
-        case FieldDescriptorProto_Type.TYPE_SINT64:
-            return "TYPE_SINT64";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
-    }
-}
-var FieldDescriptorProto_Label;
-(function (FieldDescriptorProto_Label) {
-    /** LABEL_OPTIONAL - 0 is reserved for errors */
-    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_OPTIONAL"] = 1] = "LABEL_OPTIONAL";
-    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REPEATED"] = 3] = "LABEL_REPEATED";
-    /**
-     * LABEL_REQUIRED - The required label is only allowed in google.protobuf.  In proto3 and Editions
-     * it's explicitly prohibited.  In Editions, the `field_presence` feature
-     * can be used to get this behavior.
-     */
-    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REQUIRED"] = 2] = "LABEL_REQUIRED";
-})(FieldDescriptorProto_Label || (exports.FieldDescriptorProto_Label = FieldDescriptorProto_Label = {}));
-function fieldDescriptorProto_LabelFromJSON(object) {
-    switch (object) {
-        case 1:
-        case "LABEL_OPTIONAL":
-            return FieldDescriptorProto_Label.LABEL_OPTIONAL;
-        case 3:
-        case "LABEL_REPEATED":
-            return FieldDescriptorProto_Label.LABEL_REPEATED;
-        case 2:
-        case "LABEL_REQUIRED":
-            return FieldDescriptorProto_Label.LABEL_REQUIRED;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
-    }
-}
-function fieldDescriptorProto_LabelToJSON(object) {
-    switch (object) {
-        case FieldDescriptorProto_Label.LABEL_OPTIONAL:
-            return "LABEL_OPTIONAL";
-        case FieldDescriptorProto_Label.LABEL_REPEATED:
-            return "LABEL_REPEATED";
-        case FieldDescriptorProto_Label.LABEL_REQUIRED:
-            return "LABEL_REQUIRED";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
-    }
-}
-/** Generated classes can be optimized for speed or code size. */
-var FileOptions_OptimizeMode;
-(function (FileOptions_OptimizeMode) {
-    /** SPEED - Generate complete code for parsing, serialization, */
-    FileOptions_OptimizeMode[FileOptions_OptimizeMode["SPEED"] = 1] = "SPEED";
-    /** CODE_SIZE - etc. */
-    FileOptions_OptimizeMode[FileOptions_OptimizeMode["CODE_SIZE"] = 2] = "CODE_SIZE";
-    /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */
-    FileOptions_OptimizeMode[FileOptions_OptimizeMode["LITE_RUNTIME"] = 3] = "LITE_RUNTIME";
-})(FileOptions_OptimizeMode || (exports.FileOptions_OptimizeMode = FileOptions_OptimizeMode = {}));
-function fileOptions_OptimizeModeFromJSON(object) {
-    switch (object) {
-        case 1:
-        case "SPEED":
-            return FileOptions_OptimizeMode.SPEED;
-        case 2:
-        case "CODE_SIZE":
-            return FileOptions_OptimizeMode.CODE_SIZE;
-        case 3:
-        case "LITE_RUNTIME":
-            return FileOptions_OptimizeMode.LITE_RUNTIME;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
-    }
-}
-function fileOptions_OptimizeModeToJSON(object) {
-    switch (object) {
-        case FileOptions_OptimizeMode.SPEED:
-            return "SPEED";
-        case FileOptions_OptimizeMode.CODE_SIZE:
-            return "CODE_SIZE";
-        case FileOptions_OptimizeMode.LITE_RUNTIME:
-            return "LITE_RUNTIME";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
-    }
-}
-var FieldOptions_CType;
-(function (FieldOptions_CType) {
-    /** STRING - Default mode. */
-    FieldOptions_CType[FieldOptions_CType["STRING"] = 0] = "STRING";
-    /**
-     * CORD - The option [ctype=CORD] may be applied to a non-repeated field of type
-     * "bytes". It indicates that in C++, the data should be stored in a Cord
-     * instead of a string.  For very large strings, this may reduce memory
-     * fragmentation. It may also allow better performance when parsing from a
-     * Cord, or when parsing with aliasing enabled, as the parsed Cord may then
-     * alias the original buffer.
-     */
-    FieldOptions_CType[FieldOptions_CType["CORD"] = 1] = "CORD";
-    FieldOptions_CType[FieldOptions_CType["STRING_PIECE"] = 2] = "STRING_PIECE";
-})(FieldOptions_CType || (exports.FieldOptions_CType = FieldOptions_CType = {}));
-function fieldOptions_CTypeFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "STRING":
-            return FieldOptions_CType.STRING;
-        case 1:
-        case "CORD":
-            return FieldOptions_CType.CORD;
-        case 2:
-        case "STRING_PIECE":
-            return FieldOptions_CType.STRING_PIECE;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
-    }
-}
-function fieldOptions_CTypeToJSON(object) {
-    switch (object) {
-        case FieldOptions_CType.STRING:
-            return "STRING";
-        case FieldOptions_CType.CORD:
-            return "CORD";
-        case FieldOptions_CType.STRING_PIECE:
-            return "STRING_PIECE";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
-    }
-}
-var FieldOptions_JSType;
-(function (FieldOptions_JSType) {
-    /** JS_NORMAL - Use the default type. */
-    FieldOptions_JSType[FieldOptions_JSType["JS_NORMAL"] = 0] = "JS_NORMAL";
-    /** JS_STRING - Use JavaScript strings. */
-    FieldOptions_JSType[FieldOptions_JSType["JS_STRING"] = 1] = "JS_STRING";
-    /** JS_NUMBER - Use JavaScript numbers. */
-    FieldOptions_JSType[FieldOptions_JSType["JS_NUMBER"] = 2] = "JS_NUMBER";
-})(FieldOptions_JSType || (exports.FieldOptions_JSType = FieldOptions_JSType = {}));
-function fieldOptions_JSTypeFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "JS_NORMAL":
-            return FieldOptions_JSType.JS_NORMAL;
-        case 1:
-        case "JS_STRING":
-            return FieldOptions_JSType.JS_STRING;
-        case 2:
-        case "JS_NUMBER":
-            return FieldOptions_JSType.JS_NUMBER;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
-    }
-}
-function fieldOptions_JSTypeToJSON(object) {
-    switch (object) {
-        case FieldOptions_JSType.JS_NORMAL:
-            return "JS_NORMAL";
-        case FieldOptions_JSType.JS_STRING:
-            return "JS_STRING";
-        case FieldOptions_JSType.JS_NUMBER:
-            return "JS_NUMBER";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
-    }
-}
-/** If set to RETENTION_SOURCE, the option will be omitted from the binary. */
-var FieldOptions_OptionRetention;
-(function (FieldOptions_OptionRetention) {
-    FieldOptions_OptionRetention[FieldOptions_OptionRetention["RETENTION_UNKNOWN"] = 0] = "RETENTION_UNKNOWN";
-    FieldOptions_OptionRetention[FieldOptions_OptionRetention["RETENTION_RUNTIME"] = 1] = "RETENTION_RUNTIME";
-    FieldOptions_OptionRetention[FieldOptions_OptionRetention["RETENTION_SOURCE"] = 2] = "RETENTION_SOURCE";
-})(FieldOptions_OptionRetention || (exports.FieldOptions_OptionRetention = FieldOptions_OptionRetention = {}));
-function fieldOptions_OptionRetentionFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "RETENTION_UNKNOWN":
-            return FieldOptions_OptionRetention.RETENTION_UNKNOWN;
-        case 1:
-        case "RETENTION_RUNTIME":
-            return FieldOptions_OptionRetention.RETENTION_RUNTIME;
-        case 2:
-        case "RETENTION_SOURCE":
-            return FieldOptions_OptionRetention.RETENTION_SOURCE;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionRetention");
-    }
-}
-function fieldOptions_OptionRetentionToJSON(object) {
-    switch (object) {
-        case FieldOptions_OptionRetention.RETENTION_UNKNOWN:
-            return "RETENTION_UNKNOWN";
-        case FieldOptions_OptionRetention.RETENTION_RUNTIME:
-            return "RETENTION_RUNTIME";
-        case FieldOptions_OptionRetention.RETENTION_SOURCE:
-            return "RETENTION_SOURCE";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionRetention");
-    }
-}
-/**
- * This indicates the types of entities that the field may apply to when used
- * as an option. If it is unset, then the field may be freely used as an
- * option on any kind of entity.
- */
-var FieldOptions_OptionTargetType;
-(function (FieldOptions_OptionTargetType) {
-    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_UNKNOWN"] = 0] = "TARGET_TYPE_UNKNOWN";
-    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_FILE"] = 1] = "TARGET_TYPE_FILE";
-    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_EXTENSION_RANGE"] = 2] = "TARGET_TYPE_EXTENSION_RANGE";
-    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_MESSAGE"] = 3] = "TARGET_TYPE_MESSAGE";
-    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_FIELD"] = 4] = "TARGET_TYPE_FIELD";
-    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_ONEOF"] = 5] = "TARGET_TYPE_ONEOF";
-    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_ENUM"] = 6] = "TARGET_TYPE_ENUM";
-    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_ENUM_ENTRY"] = 7] = "TARGET_TYPE_ENUM_ENTRY";
-    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_SERVICE"] = 8] = "TARGET_TYPE_SERVICE";
-    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_METHOD"] = 9] = "TARGET_TYPE_METHOD";
-})(FieldOptions_OptionTargetType || (exports.FieldOptions_OptionTargetType = FieldOptions_OptionTargetType = {}));
-function fieldOptions_OptionTargetTypeFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "TARGET_TYPE_UNKNOWN":
-            return FieldOptions_OptionTargetType.TARGET_TYPE_UNKNOWN;
-        case 1:
-        case "TARGET_TYPE_FILE":
-            return FieldOptions_OptionTargetType.TARGET_TYPE_FILE;
-        case 2:
-        case "TARGET_TYPE_EXTENSION_RANGE":
-            return FieldOptions_OptionTargetType.TARGET_TYPE_EXTENSION_RANGE;
-        case 3:
-        case "TARGET_TYPE_MESSAGE":
-            return FieldOptions_OptionTargetType.TARGET_TYPE_MESSAGE;
-        case 4:
-        case "TARGET_TYPE_FIELD":
-            return FieldOptions_OptionTargetType.TARGET_TYPE_FIELD;
-        case 5:
-        case "TARGET_TYPE_ONEOF":
-            return FieldOptions_OptionTargetType.TARGET_TYPE_ONEOF;
-        case 6:
-        case "TARGET_TYPE_ENUM":
-            return FieldOptions_OptionTargetType.TARGET_TYPE_ENUM;
-        case 7:
-        case "TARGET_TYPE_ENUM_ENTRY":
-            return FieldOptions_OptionTargetType.TARGET_TYPE_ENUM_ENTRY;
-        case 8:
-        case "TARGET_TYPE_SERVICE":
-            return FieldOptions_OptionTargetType.TARGET_TYPE_SERVICE;
-        case 9:
-        case "TARGET_TYPE_METHOD":
-            return FieldOptions_OptionTargetType.TARGET_TYPE_METHOD;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionTargetType");
-    }
-}
-function fieldOptions_OptionTargetTypeToJSON(object) {
-    switch (object) {
-        case FieldOptions_OptionTargetType.TARGET_TYPE_UNKNOWN:
-            return "TARGET_TYPE_UNKNOWN";
-        case FieldOptions_OptionTargetType.TARGET_TYPE_FILE:
-            return "TARGET_TYPE_FILE";
-        case FieldOptions_OptionTargetType.TARGET_TYPE_EXTENSION_RANGE:
-            return "TARGET_TYPE_EXTENSION_RANGE";
-        case FieldOptions_OptionTargetType.TARGET_TYPE_MESSAGE:
-            return "TARGET_TYPE_MESSAGE";
-        case FieldOptions_OptionTargetType.TARGET_TYPE_FIELD:
-            return "TARGET_TYPE_FIELD";
-        case FieldOptions_OptionTargetType.TARGET_TYPE_ONEOF:
-            return "TARGET_TYPE_ONEOF";
-        case FieldOptions_OptionTargetType.TARGET_TYPE_ENUM:
-            return "TARGET_TYPE_ENUM";
-        case FieldOptions_OptionTargetType.TARGET_TYPE_ENUM_ENTRY:
-            return "TARGET_TYPE_ENUM_ENTRY";
-        case FieldOptions_OptionTargetType.TARGET_TYPE_SERVICE:
-            return "TARGET_TYPE_SERVICE";
-        case FieldOptions_OptionTargetType.TARGET_TYPE_METHOD:
-            return "TARGET_TYPE_METHOD";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionTargetType");
-    }
-}
-/**
- * Is this method side-effect-free (or safe in HTTP parlance), or idempotent,
- * or neither? HTTP based RPC implementation may choose GET verb for safe
- * methods, and PUT verb for idempotent methods instead of the default POST.
- */
-var MethodOptions_IdempotencyLevel;
-(function (MethodOptions_IdempotencyLevel) {
-    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENCY_UNKNOWN"] = 0] = "IDEMPOTENCY_UNKNOWN";
-    /** NO_SIDE_EFFECTS - implies idempotent */
-    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["NO_SIDE_EFFECTS"] = 1] = "NO_SIDE_EFFECTS";
-    /** IDEMPOTENT - idempotent, but may have side effects */
-    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENT"] = 2] = "IDEMPOTENT";
-})(MethodOptions_IdempotencyLevel || (exports.MethodOptions_IdempotencyLevel = MethodOptions_IdempotencyLevel = {}));
-function methodOptions_IdempotencyLevelFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "IDEMPOTENCY_UNKNOWN":
-            return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN;
-        case 1:
-        case "NO_SIDE_EFFECTS":
-            return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS;
-        case 2:
-        case "IDEMPOTENT":
-            return MethodOptions_IdempotencyLevel.IDEMPOTENT;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
-    }
-}
-function methodOptions_IdempotencyLevelToJSON(object) {
-    switch (object) {
-        case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN:
-            return "IDEMPOTENCY_UNKNOWN";
-        case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS:
-            return "NO_SIDE_EFFECTS";
-        case MethodOptions_IdempotencyLevel.IDEMPOTENT:
-            return "IDEMPOTENT";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
-    }
-}
-var FeatureSet_FieldPresence;
-(function (FeatureSet_FieldPresence) {
-    FeatureSet_FieldPresence[FeatureSet_FieldPresence["FIELD_PRESENCE_UNKNOWN"] = 0] = "FIELD_PRESENCE_UNKNOWN";
-    FeatureSet_FieldPresence[FeatureSet_FieldPresence["EXPLICIT"] = 1] = "EXPLICIT";
-    FeatureSet_FieldPresence[FeatureSet_FieldPresence["IMPLICIT"] = 2] = "IMPLICIT";
-    FeatureSet_FieldPresence[FeatureSet_FieldPresence["LEGACY_REQUIRED"] = 3] = "LEGACY_REQUIRED";
-})(FeatureSet_FieldPresence || (exports.FeatureSet_FieldPresence = FeatureSet_FieldPresence = {}));
-function featureSet_FieldPresenceFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "FIELD_PRESENCE_UNKNOWN":
-            return FeatureSet_FieldPresence.FIELD_PRESENCE_UNKNOWN;
-        case 1:
-        case "EXPLICIT":
-            return FeatureSet_FieldPresence.EXPLICIT;
-        case 2:
-        case "IMPLICIT":
-            return FeatureSet_FieldPresence.IMPLICIT;
-        case 3:
-        case "LEGACY_REQUIRED":
-            return FeatureSet_FieldPresence.LEGACY_REQUIRED;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_FieldPresence");
-    }
-}
-function featureSet_FieldPresenceToJSON(object) {
-    switch (object) {
-        case FeatureSet_FieldPresence.FIELD_PRESENCE_UNKNOWN:
-            return "FIELD_PRESENCE_UNKNOWN";
-        case FeatureSet_FieldPresence.EXPLICIT:
-            return "EXPLICIT";
-        case FeatureSet_FieldPresence.IMPLICIT:
-            return "IMPLICIT";
-        case FeatureSet_FieldPresence.LEGACY_REQUIRED:
-            return "LEGACY_REQUIRED";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_FieldPresence");
-    }
-}
-var FeatureSet_EnumType;
-(function (FeatureSet_EnumType) {
-    FeatureSet_EnumType[FeatureSet_EnumType["ENUM_TYPE_UNKNOWN"] = 0] = "ENUM_TYPE_UNKNOWN";
-    FeatureSet_EnumType[FeatureSet_EnumType["OPEN"] = 1] = "OPEN";
-    FeatureSet_EnumType[FeatureSet_EnumType["CLOSED"] = 2] = "CLOSED";
-})(FeatureSet_EnumType || (exports.FeatureSet_EnumType = FeatureSet_EnumType = {}));
-function featureSet_EnumTypeFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "ENUM_TYPE_UNKNOWN":
-            return FeatureSet_EnumType.ENUM_TYPE_UNKNOWN;
-        case 1:
-        case "OPEN":
-            return FeatureSet_EnumType.OPEN;
-        case 2:
-        case "CLOSED":
-            return FeatureSet_EnumType.CLOSED;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnumType");
-    }
-}
-function featureSet_EnumTypeToJSON(object) {
-    switch (object) {
-        case FeatureSet_EnumType.ENUM_TYPE_UNKNOWN:
-            return "ENUM_TYPE_UNKNOWN";
-        case FeatureSet_EnumType.OPEN:
-            return "OPEN";
-        case FeatureSet_EnumType.CLOSED:
-            return "CLOSED";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnumType");
-    }
-}
-var FeatureSet_RepeatedFieldEncoding;
-(function (FeatureSet_RepeatedFieldEncoding) {
-    FeatureSet_RepeatedFieldEncoding[FeatureSet_RepeatedFieldEncoding["REPEATED_FIELD_ENCODING_UNKNOWN"] = 0] = "REPEATED_FIELD_ENCODING_UNKNOWN";
-    FeatureSet_RepeatedFieldEncoding[FeatureSet_RepeatedFieldEncoding["PACKED"] = 1] = "PACKED";
-    FeatureSet_RepeatedFieldEncoding[FeatureSet_RepeatedFieldEncoding["EXPANDED"] = 2] = "EXPANDED";
-})(FeatureSet_RepeatedFieldEncoding || (exports.FeatureSet_RepeatedFieldEncoding = FeatureSet_RepeatedFieldEncoding = {}));
-function featureSet_RepeatedFieldEncodingFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "REPEATED_FIELD_ENCODING_UNKNOWN":
-            return FeatureSet_RepeatedFieldEncoding.REPEATED_FIELD_ENCODING_UNKNOWN;
-        case 1:
-        case "PACKED":
-            return FeatureSet_RepeatedFieldEncoding.PACKED;
-        case 2:
-        case "EXPANDED":
-            return FeatureSet_RepeatedFieldEncoding.EXPANDED;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_RepeatedFieldEncoding");
-    }
-}
-function featureSet_RepeatedFieldEncodingToJSON(object) {
-    switch (object) {
-        case FeatureSet_RepeatedFieldEncoding.REPEATED_FIELD_ENCODING_UNKNOWN:
-            return "REPEATED_FIELD_ENCODING_UNKNOWN";
-        case FeatureSet_RepeatedFieldEncoding.PACKED:
-            return "PACKED";
-        case FeatureSet_RepeatedFieldEncoding.EXPANDED:
-            return "EXPANDED";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_RepeatedFieldEncoding");
-    }
-}
-var FeatureSet_Utf8Validation;
-(function (FeatureSet_Utf8Validation) {
-    FeatureSet_Utf8Validation[FeatureSet_Utf8Validation["UTF8_VALIDATION_UNKNOWN"] = 0] = "UTF8_VALIDATION_UNKNOWN";
-    FeatureSet_Utf8Validation[FeatureSet_Utf8Validation["VERIFY"] = 2] = "VERIFY";
-    FeatureSet_Utf8Validation[FeatureSet_Utf8Validation["NONE"] = 3] = "NONE";
-})(FeatureSet_Utf8Validation || (exports.FeatureSet_Utf8Validation = FeatureSet_Utf8Validation = {}));
-function featureSet_Utf8ValidationFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "UTF8_VALIDATION_UNKNOWN":
-            return FeatureSet_Utf8Validation.UTF8_VALIDATION_UNKNOWN;
-        case 2:
-        case "VERIFY":
-            return FeatureSet_Utf8Validation.VERIFY;
-        case 3:
-        case "NONE":
-            return FeatureSet_Utf8Validation.NONE;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_Utf8Validation");
-    }
-}
-function featureSet_Utf8ValidationToJSON(object) {
-    switch (object) {
-        case FeatureSet_Utf8Validation.UTF8_VALIDATION_UNKNOWN:
-            return "UTF8_VALIDATION_UNKNOWN";
-        case FeatureSet_Utf8Validation.VERIFY:
-            return "VERIFY";
-        case FeatureSet_Utf8Validation.NONE:
-            return "NONE";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_Utf8Validation");
-    }
-}
-var FeatureSet_MessageEncoding;
-(function (FeatureSet_MessageEncoding) {
-    FeatureSet_MessageEncoding[FeatureSet_MessageEncoding["MESSAGE_ENCODING_UNKNOWN"] = 0] = "MESSAGE_ENCODING_UNKNOWN";
-    FeatureSet_MessageEncoding[FeatureSet_MessageEncoding["LENGTH_PREFIXED"] = 1] = "LENGTH_PREFIXED";
-    FeatureSet_MessageEncoding[FeatureSet_MessageEncoding["DELIMITED"] = 2] = "DELIMITED";
-})(FeatureSet_MessageEncoding || (exports.FeatureSet_MessageEncoding = FeatureSet_MessageEncoding = {}));
-function featureSet_MessageEncodingFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "MESSAGE_ENCODING_UNKNOWN":
-            return FeatureSet_MessageEncoding.MESSAGE_ENCODING_UNKNOWN;
-        case 1:
-        case "LENGTH_PREFIXED":
-            return FeatureSet_MessageEncoding.LENGTH_PREFIXED;
-        case 2:
-        case "DELIMITED":
-            return FeatureSet_MessageEncoding.DELIMITED;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_MessageEncoding");
-    }
-}
-function featureSet_MessageEncodingToJSON(object) {
-    switch (object) {
-        case FeatureSet_MessageEncoding.MESSAGE_ENCODING_UNKNOWN:
-            return "MESSAGE_ENCODING_UNKNOWN";
-        case FeatureSet_MessageEncoding.LENGTH_PREFIXED:
-            return "LENGTH_PREFIXED";
-        case FeatureSet_MessageEncoding.DELIMITED:
-            return "DELIMITED";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_MessageEncoding");
-    }
-}
-var FeatureSet_JsonFormat;
-(function (FeatureSet_JsonFormat) {
-    FeatureSet_JsonFormat[FeatureSet_JsonFormat["JSON_FORMAT_UNKNOWN"] = 0] = "JSON_FORMAT_UNKNOWN";
-    FeatureSet_JsonFormat[FeatureSet_JsonFormat["ALLOW"] = 1] = "ALLOW";
-    FeatureSet_JsonFormat[FeatureSet_JsonFormat["LEGACY_BEST_EFFORT"] = 2] = "LEGACY_BEST_EFFORT";
-})(FeatureSet_JsonFormat || (exports.FeatureSet_JsonFormat = FeatureSet_JsonFormat = {}));
-function featureSet_JsonFormatFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "JSON_FORMAT_UNKNOWN":
-            return FeatureSet_JsonFormat.JSON_FORMAT_UNKNOWN;
-        case 1:
-        case "ALLOW":
-            return FeatureSet_JsonFormat.ALLOW;
-        case 2:
-        case "LEGACY_BEST_EFFORT":
-            return FeatureSet_JsonFormat.LEGACY_BEST_EFFORT;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_JsonFormat");
-    }
-}
-function featureSet_JsonFormatToJSON(object) {
-    switch (object) {
-        case FeatureSet_JsonFormat.JSON_FORMAT_UNKNOWN:
-            return "JSON_FORMAT_UNKNOWN";
-        case FeatureSet_JsonFormat.ALLOW:
-            return "ALLOW";
-        case FeatureSet_JsonFormat.LEGACY_BEST_EFFORT:
-            return "LEGACY_BEST_EFFORT";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_JsonFormat");
-    }
-}
-var FeatureSet_EnforceNamingStyle;
-(function (FeatureSet_EnforceNamingStyle) {
-    FeatureSet_EnforceNamingStyle[FeatureSet_EnforceNamingStyle["ENFORCE_NAMING_STYLE_UNKNOWN"] = 0] = "ENFORCE_NAMING_STYLE_UNKNOWN";
-    FeatureSet_EnforceNamingStyle[FeatureSet_EnforceNamingStyle["STYLE2024"] = 1] = "STYLE2024";
-    FeatureSet_EnforceNamingStyle[FeatureSet_EnforceNamingStyle["STYLE_LEGACY"] = 2] = "STYLE_LEGACY";
-})(FeatureSet_EnforceNamingStyle || (exports.FeatureSet_EnforceNamingStyle = FeatureSet_EnforceNamingStyle = {}));
-function featureSet_EnforceNamingStyleFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "ENFORCE_NAMING_STYLE_UNKNOWN":
-            return FeatureSet_EnforceNamingStyle.ENFORCE_NAMING_STYLE_UNKNOWN;
-        case 1:
-        case "STYLE2024":
-            return FeatureSet_EnforceNamingStyle.STYLE2024;
-        case 2:
-        case "STYLE_LEGACY":
-            return FeatureSet_EnforceNamingStyle.STYLE_LEGACY;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnforceNamingStyle");
-    }
-}
-function featureSet_EnforceNamingStyleToJSON(object) {
-    switch (object) {
-        case FeatureSet_EnforceNamingStyle.ENFORCE_NAMING_STYLE_UNKNOWN:
-            return "ENFORCE_NAMING_STYLE_UNKNOWN";
-        case FeatureSet_EnforceNamingStyle.STYLE2024:
-            return "STYLE2024";
-        case FeatureSet_EnforceNamingStyle.STYLE_LEGACY:
-            return "STYLE_LEGACY";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnforceNamingStyle");
-    }
-}
-/**
- * Represents the identified object's effect on the element in the original
- * .proto file.
- */
-var GeneratedCodeInfo_Annotation_Semantic;
-(function (GeneratedCodeInfo_Annotation_Semantic) {
-    /** NONE - There is no effect or the effect is indescribable. */
-    GeneratedCodeInfo_Annotation_Semantic[GeneratedCodeInfo_Annotation_Semantic["NONE"] = 0] = "NONE";
-    /** SET - The element is set or otherwise mutated. */
-    GeneratedCodeInfo_Annotation_Semantic[GeneratedCodeInfo_Annotation_Semantic["SET"] = 1] = "SET";
-    /** ALIAS - An alias to the element is returned. */
-    GeneratedCodeInfo_Annotation_Semantic[GeneratedCodeInfo_Annotation_Semantic["ALIAS"] = 2] = "ALIAS";
-})(GeneratedCodeInfo_Annotation_Semantic || (exports.GeneratedCodeInfo_Annotation_Semantic = GeneratedCodeInfo_Annotation_Semantic = {}));
-function generatedCodeInfo_Annotation_SemanticFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "NONE":
-            return GeneratedCodeInfo_Annotation_Semantic.NONE;
-        case 1:
-        case "SET":
-            return GeneratedCodeInfo_Annotation_Semantic.SET;
-        case 2:
-        case "ALIAS":
-            return GeneratedCodeInfo_Annotation_Semantic.ALIAS;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum GeneratedCodeInfo_Annotation_Semantic");
-    }
-}
-function generatedCodeInfo_Annotation_SemanticToJSON(object) {
-    switch (object) {
-        case GeneratedCodeInfo_Annotation_Semantic.NONE:
-            return "NONE";
-        case GeneratedCodeInfo_Annotation_Semantic.SET:
-            return "SET";
-        case GeneratedCodeInfo_Annotation_Semantic.ALIAS:
-            return "ALIAS";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum GeneratedCodeInfo_Annotation_Semantic");
-    }
-}
-exports.FileDescriptorSet = {
-    fromJSON(object) {
-        return {
-            file: globalThis.Array.isArray(object?.file) ? object.file.map((e) => exports.FileDescriptorProto.fromJSON(e)) : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.file?.length) {
-            obj.file = message.file.map((e) => exports.FileDescriptorProto.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.FileDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? globalThis.String(object.name) : "",
-            package: isSet(object.package) ? globalThis.String(object.package) : "",
-            dependency: globalThis.Array.isArray(object?.dependency)
-                ? object.dependency.map((e) => globalThis.String(e))
-                : [],
-            publicDependency: globalThis.Array.isArray(object?.publicDependency)
-                ? object.publicDependency.map((e) => globalThis.Number(e))
-                : [],
-            weakDependency: globalThis.Array.isArray(object?.weakDependency)
-                ? object.weakDependency.map((e) => globalThis.Number(e))
-                : [],
-            messageType: globalThis.Array.isArray(object?.messageType)
-                ? object.messageType.map((e) => exports.DescriptorProto.fromJSON(e))
-                : [],
-            enumType: globalThis.Array.isArray(object?.enumType)
-                ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e))
-                : [],
-            service: globalThis.Array.isArray(object?.service)
-                ? object.service.map((e) => exports.ServiceDescriptorProto.fromJSON(e))
-                : [],
-            extension: globalThis.Array.isArray(object?.extension)
-                ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e))
-                : [],
-            options: isSet(object.options) ? exports.FileOptions.fromJSON(object.options) : undefined,
-            sourceCodeInfo: isSet(object.sourceCodeInfo) ? exports.SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined,
-            syntax: isSet(object.syntax) ? globalThis.String(object.syntax) : "",
-            edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.name !== undefined && message.name !== "") {
-            obj.name = message.name;
-        }
-        if (message.package !== undefined && message.package !== "") {
-            obj.package = message.package;
-        }
-        if (message.dependency?.length) {
-            obj.dependency = message.dependency;
-        }
-        if (message.publicDependency?.length) {
-            obj.publicDependency = message.publicDependency.map((e) => Math.round(e));
-        }
-        if (message.weakDependency?.length) {
-            obj.weakDependency = message.weakDependency.map((e) => Math.round(e));
-        }
-        if (message.messageType?.length) {
-            obj.messageType = message.messageType.map((e) => exports.DescriptorProto.toJSON(e));
-        }
-        if (message.enumType?.length) {
-            obj.enumType = message.enumType.map((e) => exports.EnumDescriptorProto.toJSON(e));
-        }
-        if (message.service?.length) {
-            obj.service = message.service.map((e) => exports.ServiceDescriptorProto.toJSON(e));
-        }
-        if (message.extension?.length) {
-            obj.extension = message.extension.map((e) => exports.FieldDescriptorProto.toJSON(e));
-        }
-        if (message.options !== undefined) {
-            obj.options = exports.FileOptions.toJSON(message.options);
-        }
-        if (message.sourceCodeInfo !== undefined) {
-            obj.sourceCodeInfo = exports.SourceCodeInfo.toJSON(message.sourceCodeInfo);
-        }
-        if (message.syntax !== undefined && message.syntax !== "") {
-            obj.syntax = message.syntax;
-        }
-        if (message.edition !== undefined && message.edition !== 0) {
-            obj.edition = editionToJSON(message.edition);
-        }
-        return obj;
-    },
-};
-exports.DescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? globalThis.String(object.name) : "",
-            field: globalThis.Array.isArray(object?.field)
-                ? object.field.map((e) => exports.FieldDescriptorProto.fromJSON(e))
-                : [],
-            extension: globalThis.Array.isArray(object?.extension)
-                ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e))
-                : [],
-            nestedType: globalThis.Array.isArray(object?.nestedType)
-                ? object.nestedType.map((e) => exports.DescriptorProto.fromJSON(e))
-                : [],
-            enumType: globalThis.Array.isArray(object?.enumType)
-                ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e))
-                : [],
-            extensionRange: globalThis.Array.isArray(object?.extensionRange)
-                ? object.extensionRange.map((e) => exports.DescriptorProto_ExtensionRange.fromJSON(e))
-                : [],
-            oneofDecl: globalThis.Array.isArray(object?.oneofDecl)
-                ? object.oneofDecl.map((e) => exports.OneofDescriptorProto.fromJSON(e))
-                : [],
-            options: isSet(object.options) ? exports.MessageOptions.fromJSON(object.options) : undefined,
-            reservedRange: globalThis.Array.isArray(object?.reservedRange)
-                ? object.reservedRange.map((e) => exports.DescriptorProto_ReservedRange.fromJSON(e))
-                : [],
-            reservedName: globalThis.Array.isArray(object?.reservedName)
-                ? object.reservedName.map((e) => globalThis.String(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.name !== undefined && message.name !== "") {
-            obj.name = message.name;
-        }
-        if (message.field?.length) {
-            obj.field = message.field.map((e) => exports.FieldDescriptorProto.toJSON(e));
-        }
-        if (message.extension?.length) {
-            obj.extension = message.extension.map((e) => exports.FieldDescriptorProto.toJSON(e));
-        }
-        if (message.nestedType?.length) {
-            obj.nestedType = message.nestedType.map((e) => exports.DescriptorProto.toJSON(e));
-        }
-        if (message.enumType?.length) {
-            obj.enumType = message.enumType.map((e) => exports.EnumDescriptorProto.toJSON(e));
-        }
-        if (message.extensionRange?.length) {
-            obj.extensionRange = message.extensionRange.map((e) => exports.DescriptorProto_ExtensionRange.toJSON(e));
-        }
-        if (message.oneofDecl?.length) {
-            obj.oneofDecl = message.oneofDecl.map((e) => exports.OneofDescriptorProto.toJSON(e));
-        }
-        if (message.options !== undefined) {
-            obj.options = exports.MessageOptions.toJSON(message.options);
-        }
-        if (message.reservedRange?.length) {
-            obj.reservedRange = message.reservedRange.map((e) => exports.DescriptorProto_ReservedRange.toJSON(e));
-        }
-        if (message.reservedName?.length) {
-            obj.reservedName = message.reservedName;
-        }
-        return obj;
-    },
-};
-exports.DescriptorProto_ExtensionRange = {
-    fromJSON(object) {
-        return {
-            start: isSet(object.start) ? globalThis.Number(object.start) : 0,
-            end: isSet(object.end) ? globalThis.Number(object.end) : 0,
-            options: isSet(object.options) ? exports.ExtensionRangeOptions.fromJSON(object.options) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.start !== undefined && message.start !== 0) {
-            obj.start = Math.round(message.start);
-        }
-        if (message.end !== undefined && message.end !== 0) {
-            obj.end = Math.round(message.end);
-        }
-        if (message.options !== undefined) {
-            obj.options = exports.ExtensionRangeOptions.toJSON(message.options);
-        }
-        return obj;
-    },
-};
-exports.DescriptorProto_ReservedRange = {
-    fromJSON(object) {
-        return {
-            start: isSet(object.start) ? globalThis.Number(object.start) : 0,
-            end: isSet(object.end) ? globalThis.Number(object.end) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.start !== undefined && message.start !== 0) {
-            obj.start = Math.round(message.start);
-        }
-        if (message.end !== undefined && message.end !== 0) {
-            obj.end = Math.round(message.end);
-        }
-        return obj;
-    },
-};
-exports.ExtensionRangeOptions = {
-    fromJSON(object) {
-        return {
-            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-            declaration: globalThis.Array.isArray(object?.declaration)
-                ? object.declaration.map((e) => exports.ExtensionRangeOptions_Declaration.fromJSON(e))
-                : [],
-            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
-            verification: isSet(object.verification)
-                ? extensionRangeOptions_VerificationStateFromJSON(object.verification)
-                : 1,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.uninterpretedOption?.length) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
-        }
-        if (message.declaration?.length) {
-            obj.declaration = message.declaration.map((e) => exports.ExtensionRangeOptions_Declaration.toJSON(e));
-        }
-        if (message.features !== undefined) {
-            obj.features = exports.FeatureSet.toJSON(message.features);
-        }
-        if (message.verification !== undefined && message.verification !== 1) {
-            obj.verification = extensionRangeOptions_VerificationStateToJSON(message.verification);
-        }
-        return obj;
-    },
-};
-exports.ExtensionRangeOptions_Declaration = {
-    fromJSON(object) {
-        return {
-            number: isSet(object.number) ? globalThis.Number(object.number) : 0,
-            fullName: isSet(object.fullName) ? globalThis.String(object.fullName) : "",
-            type: isSet(object.type) ? globalThis.String(object.type) : "",
-            reserved: isSet(object.reserved) ? globalThis.Boolean(object.reserved) : false,
-            repeated: isSet(object.repeated) ? globalThis.Boolean(object.repeated) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.number !== undefined && message.number !== 0) {
-            obj.number = Math.round(message.number);
-        }
-        if (message.fullName !== undefined && message.fullName !== "") {
-            obj.fullName = message.fullName;
-        }
-        if (message.type !== undefined && message.type !== "") {
-            obj.type = message.type;
-        }
-        if (message.reserved !== undefined && message.reserved !== false) {
-            obj.reserved = message.reserved;
-        }
-        if (message.repeated !== undefined && message.repeated !== false) {
-            obj.repeated = message.repeated;
-        }
-        return obj;
-    },
-};
-exports.FieldDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? globalThis.String(object.name) : "",
-            number: isSet(object.number) ? globalThis.Number(object.number) : 0,
-            label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1,
-            type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1,
-            typeName: isSet(object.typeName) ? globalThis.String(object.typeName) : "",
-            extendee: isSet(object.extendee) ? globalThis.String(object.extendee) : "",
-            defaultValue: isSet(object.defaultValue) ? globalThis.String(object.defaultValue) : "",
-            oneofIndex: isSet(object.oneofIndex) ? globalThis.Number(object.oneofIndex) : 0,
-            jsonName: isSet(object.jsonName) ? globalThis.String(object.jsonName) : "",
-            options: isSet(object.options) ? exports.FieldOptions.fromJSON(object.options) : undefined,
-            proto3Optional: isSet(object.proto3Optional) ? globalThis.Boolean(object.proto3Optional) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.name !== undefined && message.name !== "") {
-            obj.name = message.name;
-        }
-        if (message.number !== undefined && message.number !== 0) {
-            obj.number = Math.round(message.number);
-        }
-        if (message.label !== undefined && message.label !== 1) {
-            obj.label = fieldDescriptorProto_LabelToJSON(message.label);
-        }
-        if (message.type !== undefined && message.type !== 1) {
-            obj.type = fieldDescriptorProto_TypeToJSON(message.type);
-        }
-        if (message.typeName !== undefined && message.typeName !== "") {
-            obj.typeName = message.typeName;
-        }
-        if (message.extendee !== undefined && message.extendee !== "") {
-            obj.extendee = message.extendee;
-        }
-        if (message.defaultValue !== undefined && message.defaultValue !== "") {
-            obj.defaultValue = message.defaultValue;
-        }
-        if (message.oneofIndex !== undefined && message.oneofIndex !== 0) {
-            obj.oneofIndex = Math.round(message.oneofIndex);
-        }
-        if (message.jsonName !== undefined && message.jsonName !== "") {
-            obj.jsonName = message.jsonName;
-        }
-        if (message.options !== undefined) {
-            obj.options = exports.FieldOptions.toJSON(message.options);
-        }
-        if (message.proto3Optional !== undefined && message.proto3Optional !== false) {
-            obj.proto3Optional = message.proto3Optional;
-        }
-        return obj;
-    },
-};
-exports.OneofDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? globalThis.String(object.name) : "",
-            options: isSet(object.options) ? exports.OneofOptions.fromJSON(object.options) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.name !== undefined && message.name !== "") {
-            obj.name = message.name;
-        }
-        if (message.options !== undefined) {
-            obj.options = exports.OneofOptions.toJSON(message.options);
-        }
-        return obj;
-    },
-};
-exports.EnumDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? globalThis.String(object.name) : "",
-            value: globalThis.Array.isArray(object?.value)
-                ? object.value.map((e) => exports.EnumValueDescriptorProto.fromJSON(e))
-                : [],
-            options: isSet(object.options) ? exports.EnumOptions.fromJSON(object.options) : undefined,
-            reservedRange: globalThis.Array.isArray(object?.reservedRange)
-                ? object.reservedRange.map((e) => exports.EnumDescriptorProto_EnumReservedRange.fromJSON(e))
-                : [],
-            reservedName: globalThis.Array.isArray(object?.reservedName)
-                ? object.reservedName.map((e) => globalThis.String(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.name !== undefined && message.name !== "") {
-            obj.name = message.name;
-        }
-        if (message.value?.length) {
-            obj.value = message.value.map((e) => exports.EnumValueDescriptorProto.toJSON(e));
-        }
-        if (message.options !== undefined) {
-            obj.options = exports.EnumOptions.toJSON(message.options);
-        }
-        if (message.reservedRange?.length) {
-            obj.reservedRange = message.reservedRange.map((e) => exports.EnumDescriptorProto_EnumReservedRange.toJSON(e));
-        }
-        if (message.reservedName?.length) {
-            obj.reservedName = message.reservedName;
-        }
-        return obj;
-    },
-};
-exports.EnumDescriptorProto_EnumReservedRange = {
-    fromJSON(object) {
-        return {
-            start: isSet(object.start) ? globalThis.Number(object.start) : 0,
-            end: isSet(object.end) ? globalThis.Number(object.end) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.start !== undefined && message.start !== 0) {
-            obj.start = Math.round(message.start);
-        }
-        if (message.end !== undefined && message.end !== 0) {
-            obj.end = Math.round(message.end);
-        }
-        return obj;
-    },
-};
-exports.EnumValueDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? globalThis.String(object.name) : "",
-            number: isSet(object.number) ? globalThis.Number(object.number) : 0,
-            options: isSet(object.options) ? exports.EnumValueOptions.fromJSON(object.options) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.name !== undefined && message.name !== "") {
-            obj.name = message.name;
-        }
-        if (message.number !== undefined && message.number !== 0) {
-            obj.number = Math.round(message.number);
-        }
-        if (message.options !== undefined) {
-            obj.options = exports.EnumValueOptions.toJSON(message.options);
-        }
-        return obj;
-    },
-};
-exports.ServiceDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? globalThis.String(object.name) : "",
-            method: globalThis.Array.isArray(object?.method)
-                ? object.method.map((e) => exports.MethodDescriptorProto.fromJSON(e))
-                : [],
-            options: isSet(object.options) ? exports.ServiceOptions.fromJSON(object.options) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.name !== undefined && message.name !== "") {
-            obj.name = message.name;
-        }
-        if (message.method?.length) {
-            obj.method = message.method.map((e) => exports.MethodDescriptorProto.toJSON(e));
-        }
-        if (message.options !== undefined) {
-            obj.options = exports.ServiceOptions.toJSON(message.options);
-        }
-        return obj;
-    },
-};
-exports.MethodDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? globalThis.String(object.name) : "",
-            inputType: isSet(object.inputType) ? globalThis.String(object.inputType) : "",
-            outputType: isSet(object.outputType) ? globalThis.String(object.outputType) : "",
-            options: isSet(object.options) ? exports.MethodOptions.fromJSON(object.options) : undefined,
-            clientStreaming: isSet(object.clientStreaming) ? globalThis.Boolean(object.clientStreaming) : false,
-            serverStreaming: isSet(object.serverStreaming) ? globalThis.Boolean(object.serverStreaming) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.name !== undefined && message.name !== "") {
-            obj.name = message.name;
-        }
-        if (message.inputType !== undefined && message.inputType !== "") {
-            obj.inputType = message.inputType;
-        }
-        if (message.outputType !== undefined && message.outputType !== "") {
-            obj.outputType = message.outputType;
-        }
-        if (message.options !== undefined) {
-            obj.options = exports.MethodOptions.toJSON(message.options);
-        }
-        if (message.clientStreaming !== undefined && message.clientStreaming !== false) {
-            obj.clientStreaming = message.clientStreaming;
-        }
-        if (message.serverStreaming !== undefined && message.serverStreaming !== false) {
-            obj.serverStreaming = message.serverStreaming;
-        }
-        return obj;
-    },
-};
-exports.FileOptions = {
-    fromJSON(object) {
-        return {
-            javaPackage: isSet(object.javaPackage) ? globalThis.String(object.javaPackage) : "",
-            javaOuterClassname: isSet(object.javaOuterClassname) ? globalThis.String(object.javaOuterClassname) : "",
-            javaMultipleFiles: isSet(object.javaMultipleFiles) ? globalThis.Boolean(object.javaMultipleFiles) : false,
-            javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash)
-                ? globalThis.Boolean(object.javaGenerateEqualsAndHash)
-                : false,
-            javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? globalThis.Boolean(object.javaStringCheckUtf8) : false,
-            optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1,
-            goPackage: isSet(object.goPackage) ? globalThis.String(object.goPackage) : "",
-            ccGenericServices: isSet(object.ccGenericServices) ? globalThis.Boolean(object.ccGenericServices) : false,
-            javaGenericServices: isSet(object.javaGenericServices) ? globalThis.Boolean(object.javaGenericServices) : false,
-            pyGenericServices: isSet(object.pyGenericServices) ? globalThis.Boolean(object.pyGenericServices) : false,
-            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
-            ccEnableArenas: isSet(object.ccEnableArenas) ? globalThis.Boolean(object.ccEnableArenas) : true,
-            objcClassPrefix: isSet(object.objcClassPrefix) ? globalThis.String(object.objcClassPrefix) : "",
-            csharpNamespace: isSet(object.csharpNamespace) ? globalThis.String(object.csharpNamespace) : "",
-            swiftPrefix: isSet(object.swiftPrefix) ? globalThis.String(object.swiftPrefix) : "",
-            phpClassPrefix: isSet(object.phpClassPrefix) ? globalThis.String(object.phpClassPrefix) : "",
-            phpNamespace: isSet(object.phpNamespace) ? globalThis.String(object.phpNamespace) : "",
-            phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? globalThis.String(object.phpMetadataNamespace) : "",
-            rubyPackage: isSet(object.rubyPackage) ? globalThis.String(object.rubyPackage) : "",
-            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
-            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.javaPackage !== undefined && message.javaPackage !== "") {
-            obj.javaPackage = message.javaPackage;
-        }
-        if (message.javaOuterClassname !== undefined && message.javaOuterClassname !== "") {
-            obj.javaOuterClassname = message.javaOuterClassname;
-        }
-        if (message.javaMultipleFiles !== undefined && message.javaMultipleFiles !== false) {
-            obj.javaMultipleFiles = message.javaMultipleFiles;
-        }
-        if (message.javaGenerateEqualsAndHash !== undefined && message.javaGenerateEqualsAndHash !== false) {
-            obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash;
-        }
-        if (message.javaStringCheckUtf8 !== undefined && message.javaStringCheckUtf8 !== false) {
-            obj.javaStringCheckUtf8 = message.javaStringCheckUtf8;
-        }
-        if (message.optimizeFor !== undefined && message.optimizeFor !== 1) {
-            obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor);
-        }
-        if (message.goPackage !== undefined && message.goPackage !== "") {
-            obj.goPackage = message.goPackage;
-        }
-        if (message.ccGenericServices !== undefined && message.ccGenericServices !== false) {
-            obj.ccGenericServices = message.ccGenericServices;
-        }
-        if (message.javaGenericServices !== undefined && message.javaGenericServices !== false) {
-            obj.javaGenericServices = message.javaGenericServices;
-        }
-        if (message.pyGenericServices !== undefined && message.pyGenericServices !== false) {
-            obj.pyGenericServices = message.pyGenericServices;
-        }
-        if (message.deprecated !== undefined && message.deprecated !== false) {
-            obj.deprecated = message.deprecated;
-        }
-        if (message.ccEnableArenas !== undefined && message.ccEnableArenas !== true) {
-            obj.ccEnableArenas = message.ccEnableArenas;
-        }
-        if (message.objcClassPrefix !== undefined && message.objcClassPrefix !== "") {
-            obj.objcClassPrefix = message.objcClassPrefix;
-        }
-        if (message.csharpNamespace !== undefined && message.csharpNamespace !== "") {
-            obj.csharpNamespace = message.csharpNamespace;
-        }
-        if (message.swiftPrefix !== undefined && message.swiftPrefix !== "") {
-            obj.swiftPrefix = message.swiftPrefix;
-        }
-        if (message.phpClassPrefix !== undefined && message.phpClassPrefix !== "") {
-            obj.phpClassPrefix = message.phpClassPrefix;
-        }
-        if (message.phpNamespace !== undefined && message.phpNamespace !== "") {
-            obj.phpNamespace = message.phpNamespace;
-        }
-        if (message.phpMetadataNamespace !== undefined && message.phpMetadataNamespace !== "") {
-            obj.phpMetadataNamespace = message.phpMetadataNamespace;
-        }
-        if (message.rubyPackage !== undefined && message.rubyPackage !== "") {
-            obj.rubyPackage = message.rubyPackage;
-        }
-        if (message.features !== undefined) {
-            obj.features = exports.FeatureSet.toJSON(message.features);
-        }
-        if (message.uninterpretedOption?.length) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.MessageOptions = {
-    fromJSON(object) {
-        return {
-            messageSetWireFormat: isSet(object.messageSetWireFormat)
-                ? globalThis.Boolean(object.messageSetWireFormat)
-                : false,
-            noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor)
-                ? globalThis.Boolean(object.noStandardDescriptorAccessor)
-                : false,
-            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
-            mapEntry: isSet(object.mapEntry) ? globalThis.Boolean(object.mapEntry) : false,
-            deprecatedLegacyJsonFieldConflicts: isSet(object.deprecatedLegacyJsonFieldConflicts)
-                ? globalThis.Boolean(object.deprecatedLegacyJsonFieldConflicts)
-                : false,
-            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
-            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.messageSetWireFormat !== undefined && message.messageSetWireFormat !== false) {
-            obj.messageSetWireFormat = message.messageSetWireFormat;
-        }
-        if (message.noStandardDescriptorAccessor !== undefined && message.noStandardDescriptorAccessor !== false) {
-            obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor;
-        }
-        if (message.deprecated !== undefined && message.deprecated !== false) {
-            obj.deprecated = message.deprecated;
-        }
-        if (message.mapEntry !== undefined && message.mapEntry !== false) {
-            obj.mapEntry = message.mapEntry;
-        }
-        if (message.deprecatedLegacyJsonFieldConflicts !== undefined && message.deprecatedLegacyJsonFieldConflicts !== false) {
-            obj.deprecatedLegacyJsonFieldConflicts = message.deprecatedLegacyJsonFieldConflicts;
-        }
-        if (message.features !== undefined) {
-            obj.features = exports.FeatureSet.toJSON(message.features);
-        }
-        if (message.uninterpretedOption?.length) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.FieldOptions = {
-    fromJSON(object) {
-        return {
-            ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0,
-            packed: isSet(object.packed) ? globalThis.Boolean(object.packed) : false,
-            jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0,
-            lazy: isSet(object.lazy) ? globalThis.Boolean(object.lazy) : false,
-            unverifiedLazy: isSet(object.unverifiedLazy) ? globalThis.Boolean(object.unverifiedLazy) : false,
-            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
-            weak: isSet(object.weak) ? globalThis.Boolean(object.weak) : false,
-            debugRedact: isSet(object.debugRedact) ? globalThis.Boolean(object.debugRedact) : false,
-            retention: isSet(object.retention) ? fieldOptions_OptionRetentionFromJSON(object.retention) : 0,
-            targets: globalThis.Array.isArray(object?.targets)
-                ? object.targets.map((e) => fieldOptions_OptionTargetTypeFromJSON(e))
-                : [],
-            editionDefaults: globalThis.Array.isArray(object?.editionDefaults)
-                ? object.editionDefaults.map((e) => exports.FieldOptions_EditionDefault.fromJSON(e))
-                : [],
-            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
-            featureSupport: isSet(object.featureSupport)
-                ? exports.FieldOptions_FeatureSupport.fromJSON(object.featureSupport)
-                : undefined,
-            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.ctype !== undefined && message.ctype !== 0) {
-            obj.ctype = fieldOptions_CTypeToJSON(message.ctype);
-        }
-        if (message.packed !== undefined && message.packed !== false) {
-            obj.packed = message.packed;
-        }
-        if (message.jstype !== undefined && message.jstype !== 0) {
-            obj.jstype = fieldOptions_JSTypeToJSON(message.jstype);
-        }
-        if (message.lazy !== undefined && message.lazy !== false) {
-            obj.lazy = message.lazy;
-        }
-        if (message.unverifiedLazy !== undefined && message.unverifiedLazy !== false) {
-            obj.unverifiedLazy = message.unverifiedLazy;
-        }
-        if (message.deprecated !== undefined && message.deprecated !== false) {
-            obj.deprecated = message.deprecated;
-        }
-        if (message.weak !== undefined && message.weak !== false) {
-            obj.weak = message.weak;
-        }
-        if (message.debugRedact !== undefined && message.debugRedact !== false) {
-            obj.debugRedact = message.debugRedact;
-        }
-        if (message.retention !== undefined && message.retention !== 0) {
-            obj.retention = fieldOptions_OptionRetentionToJSON(message.retention);
-        }
-        if (message.targets?.length) {
-            obj.targets = message.targets.map((e) => fieldOptions_OptionTargetTypeToJSON(e));
-        }
-        if (message.editionDefaults?.length) {
-            obj.editionDefaults = message.editionDefaults.map((e) => exports.FieldOptions_EditionDefault.toJSON(e));
-        }
-        if (message.features !== undefined) {
-            obj.features = exports.FeatureSet.toJSON(message.features);
-        }
-        if (message.featureSupport !== undefined) {
-            obj.featureSupport = exports.FieldOptions_FeatureSupport.toJSON(message.featureSupport);
-        }
-        if (message.uninterpretedOption?.length) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.FieldOptions_EditionDefault = {
-    fromJSON(object) {
-        return {
-            edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0,
-            value: isSet(object.value) ? globalThis.String(object.value) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.edition !== undefined && message.edition !== 0) {
-            obj.edition = editionToJSON(message.edition);
-        }
-        if (message.value !== undefined && message.value !== "") {
-            obj.value = message.value;
-        }
-        return obj;
-    },
-};
-exports.FieldOptions_FeatureSupport = {
-    fromJSON(object) {
-        return {
-            editionIntroduced: isSet(object.editionIntroduced) ? editionFromJSON(object.editionIntroduced) : 0,
-            editionDeprecated: isSet(object.editionDeprecated) ? editionFromJSON(object.editionDeprecated) : 0,
-            deprecationWarning: isSet(object.deprecationWarning) ? globalThis.String(object.deprecationWarning) : "",
-            editionRemoved: isSet(object.editionRemoved) ? editionFromJSON(object.editionRemoved) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.editionIntroduced !== undefined && message.editionIntroduced !== 0) {
-            obj.editionIntroduced = editionToJSON(message.editionIntroduced);
-        }
-        if (message.editionDeprecated !== undefined && message.editionDeprecated !== 0) {
-            obj.editionDeprecated = editionToJSON(message.editionDeprecated);
-        }
-        if (message.deprecationWarning !== undefined && message.deprecationWarning !== "") {
-            obj.deprecationWarning = message.deprecationWarning;
-        }
-        if (message.editionRemoved !== undefined && message.editionRemoved !== 0) {
-            obj.editionRemoved = editionToJSON(message.editionRemoved);
-        }
-        return obj;
-    },
-};
-exports.OneofOptions = {
-    fromJSON(object) {
-        return {
-            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
-            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.features !== undefined) {
-            obj.features = exports.FeatureSet.toJSON(message.features);
-        }
-        if (message.uninterpretedOption?.length) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.EnumOptions = {
-    fromJSON(object) {
-        return {
-            allowAlias: isSet(object.allowAlias) ? globalThis.Boolean(object.allowAlias) : false,
-            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
-            deprecatedLegacyJsonFieldConflicts: isSet(object.deprecatedLegacyJsonFieldConflicts)
-                ? globalThis.Boolean(object.deprecatedLegacyJsonFieldConflicts)
-                : false,
-            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
-            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.allowAlias !== undefined && message.allowAlias !== false) {
-            obj.allowAlias = message.allowAlias;
-        }
-        if (message.deprecated !== undefined && message.deprecated !== false) {
-            obj.deprecated = message.deprecated;
-        }
-        if (message.deprecatedLegacyJsonFieldConflicts !== undefined && message.deprecatedLegacyJsonFieldConflicts !== false) {
-            obj.deprecatedLegacyJsonFieldConflicts = message.deprecatedLegacyJsonFieldConflicts;
-        }
-        if (message.features !== undefined) {
-            obj.features = exports.FeatureSet.toJSON(message.features);
-        }
-        if (message.uninterpretedOption?.length) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.EnumValueOptions = {
-    fromJSON(object) {
-        return {
-            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
-            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
-            debugRedact: isSet(object.debugRedact) ? globalThis.Boolean(object.debugRedact) : false,
-            featureSupport: isSet(object.featureSupport)
-                ? exports.FieldOptions_FeatureSupport.fromJSON(object.featureSupport)
-                : undefined,
-            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.deprecated !== undefined && message.deprecated !== false) {
-            obj.deprecated = message.deprecated;
-        }
-        if (message.features !== undefined) {
-            obj.features = exports.FeatureSet.toJSON(message.features);
-        }
-        if (message.debugRedact !== undefined && message.debugRedact !== false) {
-            obj.debugRedact = message.debugRedact;
-        }
-        if (message.featureSupport !== undefined) {
-            obj.featureSupport = exports.FieldOptions_FeatureSupport.toJSON(message.featureSupport);
-        }
-        if (message.uninterpretedOption?.length) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.ServiceOptions = {
-    fromJSON(object) {
-        return {
-            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
-            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
-            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.features !== undefined) {
-            obj.features = exports.FeatureSet.toJSON(message.features);
-        }
-        if (message.deprecated !== undefined && message.deprecated !== false) {
-            obj.deprecated = message.deprecated;
-        }
-        if (message.uninterpretedOption?.length) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.MethodOptions = {
-    fromJSON(object) {
-        return {
-            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
-            idempotencyLevel: isSet(object.idempotencyLevel)
-                ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel)
-                : 0,
-            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
-            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.deprecated !== undefined && message.deprecated !== false) {
-            obj.deprecated = message.deprecated;
-        }
-        if (message.idempotencyLevel !== undefined && message.idempotencyLevel !== 0) {
-            obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel);
-        }
-        if (message.features !== undefined) {
-            obj.features = exports.FeatureSet.toJSON(message.features);
-        }
-        if (message.uninterpretedOption?.length) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.UninterpretedOption = {
-    fromJSON(object) {
-        return {
-            name: globalThis.Array.isArray(object?.name)
-                ? object.name.map((e) => exports.UninterpretedOption_NamePart.fromJSON(e))
-                : [],
-            identifierValue: isSet(object.identifierValue) ? globalThis.String(object.identifierValue) : "",
-            positiveIntValue: isSet(object.positiveIntValue) ? globalThis.String(object.positiveIntValue) : "0",
-            negativeIntValue: isSet(object.negativeIntValue) ? globalThis.String(object.negativeIntValue) : "0",
-            doubleValue: isSet(object.doubleValue) ? globalThis.Number(object.doubleValue) : 0,
-            stringValue: isSet(object.stringValue) ? Buffer.from(bytesFromBase64(object.stringValue)) : Buffer.alloc(0),
-            aggregateValue: isSet(object.aggregateValue) ? globalThis.String(object.aggregateValue) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.name?.length) {
-            obj.name = message.name.map((e) => exports.UninterpretedOption_NamePart.toJSON(e));
-        }
-        if (message.identifierValue !== undefined && message.identifierValue !== "") {
-            obj.identifierValue = message.identifierValue;
-        }
-        if (message.positiveIntValue !== undefined && message.positiveIntValue !== "0") {
-            obj.positiveIntValue = message.positiveIntValue;
-        }
-        if (message.negativeIntValue !== undefined && message.negativeIntValue !== "0") {
-            obj.negativeIntValue = message.negativeIntValue;
-        }
-        if (message.doubleValue !== undefined && message.doubleValue !== 0) {
-            obj.doubleValue = message.doubleValue;
-        }
-        if (message.stringValue !== undefined && message.stringValue.length !== 0) {
-            obj.stringValue = base64FromBytes(message.stringValue);
-        }
-        if (message.aggregateValue !== undefined && message.aggregateValue !== "") {
-            obj.aggregateValue = message.aggregateValue;
-        }
-        return obj;
-    },
-};
-exports.UninterpretedOption_NamePart = {
-    fromJSON(object) {
-        return {
-            namePart: isSet(object.namePart) ? globalThis.String(object.namePart) : "",
-            isExtension: isSet(object.isExtension) ? globalThis.Boolean(object.isExtension) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.namePart !== "") {
-            obj.namePart = message.namePart;
-        }
-        if (message.isExtension !== false) {
-            obj.isExtension = message.isExtension;
-        }
-        return obj;
-    },
-};
-exports.FeatureSet = {
-    fromJSON(object) {
-        return {
-            fieldPresence: isSet(object.fieldPresence) ? featureSet_FieldPresenceFromJSON(object.fieldPresence) : 0,
-            enumType: isSet(object.enumType) ? featureSet_EnumTypeFromJSON(object.enumType) : 0,
-            repeatedFieldEncoding: isSet(object.repeatedFieldEncoding)
-                ? featureSet_RepeatedFieldEncodingFromJSON(object.repeatedFieldEncoding)
-                : 0,
-            utf8Validation: isSet(object.utf8Validation) ? featureSet_Utf8ValidationFromJSON(object.utf8Validation) : 0,
-            messageEncoding: isSet(object.messageEncoding) ? featureSet_MessageEncodingFromJSON(object.messageEncoding) : 0,
-            jsonFormat: isSet(object.jsonFormat) ? featureSet_JsonFormatFromJSON(object.jsonFormat) : 0,
-            enforceNamingStyle: isSet(object.enforceNamingStyle)
-                ? featureSet_EnforceNamingStyleFromJSON(object.enforceNamingStyle)
-                : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.fieldPresence !== undefined && message.fieldPresence !== 0) {
-            obj.fieldPresence = featureSet_FieldPresenceToJSON(message.fieldPresence);
-        }
-        if (message.enumType !== undefined && message.enumType !== 0) {
-            obj.enumType = featureSet_EnumTypeToJSON(message.enumType);
-        }
-        if (message.repeatedFieldEncoding !== undefined && message.repeatedFieldEncoding !== 0) {
-            obj.repeatedFieldEncoding = featureSet_RepeatedFieldEncodingToJSON(message.repeatedFieldEncoding);
-        }
-        if (message.utf8Validation !== undefined && message.utf8Validation !== 0) {
-            obj.utf8Validation = featureSet_Utf8ValidationToJSON(message.utf8Validation);
-        }
-        if (message.messageEncoding !== undefined && message.messageEncoding !== 0) {
-            obj.messageEncoding = featureSet_MessageEncodingToJSON(message.messageEncoding);
-        }
-        if (message.jsonFormat !== undefined && message.jsonFormat !== 0) {
-            obj.jsonFormat = featureSet_JsonFormatToJSON(message.jsonFormat);
-        }
-        if (message.enforceNamingStyle !== undefined && message.enforceNamingStyle !== 0) {
-            obj.enforceNamingStyle = featureSet_EnforceNamingStyleToJSON(message.enforceNamingStyle);
-        }
-        return obj;
-    },
-};
-exports.FeatureSetDefaults = {
-    fromJSON(object) {
-        return {
-            defaults: globalThis.Array.isArray(object?.defaults)
-                ? object.defaults.map((e) => exports.FeatureSetDefaults_FeatureSetEditionDefault.fromJSON(e))
-                : [],
-            minimumEdition: isSet(object.minimumEdition) ? editionFromJSON(object.minimumEdition) : 0,
-            maximumEdition: isSet(object.maximumEdition) ? editionFromJSON(object.maximumEdition) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.defaults?.length) {
-            obj.defaults = message.defaults.map((e) => exports.FeatureSetDefaults_FeatureSetEditionDefault.toJSON(e));
-        }
-        if (message.minimumEdition !== undefined && message.minimumEdition !== 0) {
-            obj.minimumEdition = editionToJSON(message.minimumEdition);
-        }
-        if (message.maximumEdition !== undefined && message.maximumEdition !== 0) {
-            obj.maximumEdition = editionToJSON(message.maximumEdition);
-        }
-        return obj;
-    },
-};
-exports.FeatureSetDefaults_FeatureSetEditionDefault = {
-    fromJSON(object) {
-        return {
-            edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0,
-            overridableFeatures: isSet(object.overridableFeatures)
-                ? exports.FeatureSet.fromJSON(object.overridableFeatures)
-                : undefined,
-            fixedFeatures: isSet(object.fixedFeatures) ? exports.FeatureSet.fromJSON(object.fixedFeatures) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.edition !== undefined && message.edition !== 0) {
-            obj.edition = editionToJSON(message.edition);
-        }
-        if (message.overridableFeatures !== undefined) {
-            obj.overridableFeatures = exports.FeatureSet.toJSON(message.overridableFeatures);
-        }
-        if (message.fixedFeatures !== undefined) {
-            obj.fixedFeatures = exports.FeatureSet.toJSON(message.fixedFeatures);
-        }
-        return obj;
-    },
-};
-exports.SourceCodeInfo = {
-    fromJSON(object) {
-        return {
-            location: globalThis.Array.isArray(object?.location)
-                ? object.location.map((e) => exports.SourceCodeInfo_Location.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.location?.length) {
-            obj.location = message.location.map((e) => exports.SourceCodeInfo_Location.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.SourceCodeInfo_Location = {
-    fromJSON(object) {
-        return {
-            path: globalThis.Array.isArray(object?.path)
-                ? object.path.map((e) => globalThis.Number(e))
-                : [],
-            span: globalThis.Array.isArray(object?.span) ? object.span.map((e) => globalThis.Number(e)) : [],
-            leadingComments: isSet(object.leadingComments) ? globalThis.String(object.leadingComments) : "",
-            trailingComments: isSet(object.trailingComments) ? globalThis.String(object.trailingComments) : "",
-            leadingDetachedComments: globalThis.Array.isArray(object?.leadingDetachedComments)
-                ? object.leadingDetachedComments.map((e) => globalThis.String(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.path?.length) {
-            obj.path = message.path.map((e) => Math.round(e));
-        }
-        if (message.span?.length) {
-            obj.span = message.span.map((e) => Math.round(e));
-        }
-        if (message.leadingComments !== undefined && message.leadingComments !== "") {
-            obj.leadingComments = message.leadingComments;
-        }
-        if (message.trailingComments !== undefined && message.trailingComments !== "") {
-            obj.trailingComments = message.trailingComments;
-        }
-        if (message.leadingDetachedComments?.length) {
-            obj.leadingDetachedComments = message.leadingDetachedComments;
-        }
-        return obj;
-    },
-};
-exports.GeneratedCodeInfo = {
-    fromJSON(object) {
-        return {
-            annotation: globalThis.Array.isArray(object?.annotation)
-                ? object.annotation.map((e) => exports.GeneratedCodeInfo_Annotation.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.annotation?.length) {
-            obj.annotation = message.annotation.map((e) => exports.GeneratedCodeInfo_Annotation.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.GeneratedCodeInfo_Annotation = {
-    fromJSON(object) {
-        return {
-            path: globalThis.Array.isArray(object?.path)
-                ? object.path.map((e) => globalThis.Number(e))
-                : [],
-            sourceFile: isSet(object.sourceFile) ? globalThis.String(object.sourceFile) : "",
-            begin: isSet(object.begin) ? globalThis.Number(object.begin) : 0,
-            end: isSet(object.end) ? globalThis.Number(object.end) : 0,
-            semantic: isSet(object.semantic) ? generatedCodeInfo_Annotation_SemanticFromJSON(object.semantic) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.path?.length) {
-            obj.path = message.path.map((e) => Math.round(e));
-        }
-        if (message.sourceFile !== undefined && message.sourceFile !== "") {
-            obj.sourceFile = message.sourceFile;
-        }
-        if (message.begin !== undefined && message.begin !== 0) {
-            obj.begin = Math.round(message.begin);
-        }
-        if (message.end !== undefined && message.end !== 0) {
-            obj.end = Math.round(message.end);
-        }
-        if (message.semantic !== undefined && message.semantic !== 0) {
-            obj.semantic = generatedCodeInfo_Annotation_SemanticToJSON(message.semantic);
-        }
-        return obj;
-    },
-};
-function bytesFromBase64(b64) {
-    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
-}
-function base64FromBytes(arr) {
-    return globalThis.Buffer.from(arr).toString("base64");
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
deleted file mode 100644
index 9d24cbba10de9..0000000000000
--- a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
+++ /dev/null
@@ -1,29 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: google/protobuf/timestamp.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Timestamp = void 0;
-exports.Timestamp = {
-    fromJSON(object) {
-        return {
-            seconds: isSet(object.seconds) ? globalThis.String(object.seconds) : "0",
-            nanos: isSet(object.nanos) ? globalThis.Number(object.nanos) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.seconds !== "0") {
-            obj.seconds = message.seconds;
-        }
-        if (message.nanos !== 0) {
-            obj.nanos = Math.round(message.nanos);
-        }
-        return obj;
-    },
-};
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js
deleted file mode 100644
index abc766bed3b88..0000000000000
--- a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js
+++ /dev/null
@@ -1,55 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: rekor/v2/dsse.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.DSSELogEntryV002 = exports.DSSERequestV002 = void 0;
-/* eslint-disable */
-const envelope_1 = require("../../envelope");
-const sigstore_common_1 = require("../../sigstore_common");
-const verifier_1 = require("./verifier");
-exports.DSSERequestV002 = {
-    fromJSON(object) {
-        return {
-            envelope: isSet(object.envelope) ? envelope_1.Envelope.fromJSON(object.envelope) : undefined,
-            verifiers: globalThis.Array.isArray(object?.verifiers)
-                ? object.verifiers.map((e) => verifier_1.Verifier.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.envelope !== undefined) {
-            obj.envelope = envelope_1.Envelope.toJSON(message.envelope);
-        }
-        if (message.verifiers?.length) {
-            obj.verifiers = message.verifiers.map((e) => verifier_1.Verifier.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.DSSELogEntryV002 = {
-    fromJSON(object) {
-        return {
-            payloadHash: isSet(object.payloadHash) ? sigstore_common_1.HashOutput.fromJSON(object.payloadHash) : undefined,
-            signatures: globalThis.Array.isArray(object?.signatures)
-                ? object.signatures.map((e) => verifier_1.Signature.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.payloadHash !== undefined) {
-            obj.payloadHash = sigstore_common_1.HashOutput.toJSON(message.payloadHash);
-        }
-        if (message.signatures?.length) {
-            obj.signatures = message.signatures.map((e) => verifier_1.Signature.toJSON(e));
-        }
-        return obj;
-    },
-};
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js
deleted file mode 100644
index c5eccb10e0a68..0000000000000
--- a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js
+++ /dev/null
@@ -1,81 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: rekor/v2/entry.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.CreateEntryRequest = exports.Spec = exports.Entry = void 0;
-/* eslint-disable */
-const dsse_1 = require("./dsse");
-const hashedrekord_1 = require("./hashedrekord");
-exports.Entry = {
-    fromJSON(object) {
-        return {
-            kind: isSet(object.kind) ? globalThis.String(object.kind) : "",
-            apiVersion: isSet(object.apiVersion) ? globalThis.String(object.apiVersion) : "",
-            spec: isSet(object.spec) ? exports.Spec.fromJSON(object.spec) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.kind !== "") {
-            obj.kind = message.kind;
-        }
-        if (message.apiVersion !== "") {
-            obj.apiVersion = message.apiVersion;
-        }
-        if (message.spec !== undefined) {
-            obj.spec = exports.Spec.toJSON(message.spec);
-        }
-        return obj;
-    },
-};
-exports.Spec = {
-    fromJSON(object) {
-        return {
-            spec: isSet(object.hashedRekordV002)
-                ? { $case: "hashedRekordV002", hashedRekordV002: hashedrekord_1.HashedRekordLogEntryV002.fromJSON(object.hashedRekordV002) }
-                : isSet(object.dsseV002)
-                    ? { $case: "dsseV002", dsseV002: dsse_1.DSSELogEntryV002.fromJSON(object.dsseV002) }
-                    : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.spec?.$case === "hashedRekordV002") {
-            obj.hashedRekordV002 = hashedrekord_1.HashedRekordLogEntryV002.toJSON(message.spec.hashedRekordV002);
-        }
-        else if (message.spec?.$case === "dsseV002") {
-            obj.dsseV002 = dsse_1.DSSELogEntryV002.toJSON(message.spec.dsseV002);
-        }
-        return obj;
-    },
-};
-exports.CreateEntryRequest = {
-    fromJSON(object) {
-        return {
-            spec: isSet(object.hashedRekordRequestV002)
-                ? {
-                    $case: "hashedRekordRequestV002",
-                    hashedRekordRequestV002: hashedrekord_1.HashedRekordRequestV002.fromJSON(object.hashedRekordRequestV002),
-                }
-                : isSet(object.dsseRequestV002)
-                    ? { $case: "dsseRequestV002", dsseRequestV002: dsse_1.DSSERequestV002.fromJSON(object.dsseRequestV002) }
-                    : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.spec?.$case === "hashedRekordRequestV002") {
-            obj.hashedRekordRequestV002 = hashedrekord_1.HashedRekordRequestV002.toJSON(message.spec.hashedRekordRequestV002);
-        }
-        else if (message.spec?.$case === "dsseRequestV002") {
-            obj.dsseRequestV002 = dsse_1.DSSERequestV002.toJSON(message.spec.dsseRequestV002);
-        }
-        return obj;
-    },
-};
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js
deleted file mode 100644
index d3fd1af2483d1..0000000000000
--- a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js
+++ /dev/null
@@ -1,56 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: rekor/v2/hashedrekord.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.HashedRekordLogEntryV002 = exports.HashedRekordRequestV002 = void 0;
-/* eslint-disable */
-const sigstore_common_1 = require("../../sigstore_common");
-const verifier_1 = require("./verifier");
-exports.HashedRekordRequestV002 = {
-    fromJSON(object) {
-        return {
-            digest: isSet(object.digest) ? Buffer.from(bytesFromBase64(object.digest)) : Buffer.alloc(0),
-            signature: isSet(object.signature) ? verifier_1.Signature.fromJSON(object.signature) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.digest.length !== 0) {
-            obj.digest = base64FromBytes(message.digest);
-        }
-        if (message.signature !== undefined) {
-            obj.signature = verifier_1.Signature.toJSON(message.signature);
-        }
-        return obj;
-    },
-};
-exports.HashedRekordLogEntryV002 = {
-    fromJSON(object) {
-        return {
-            data: isSet(object.data) ? sigstore_common_1.HashOutput.fromJSON(object.data) : undefined,
-            signature: isSet(object.signature) ? verifier_1.Signature.fromJSON(object.signature) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.data !== undefined) {
-            obj.data = sigstore_common_1.HashOutput.toJSON(message.data);
-        }
-        if (message.signature !== undefined) {
-            obj.signature = verifier_1.Signature.toJSON(message.signature);
-        }
-        return obj;
-    },
-};
-function bytesFromBase64(b64) {
-    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
-}
-function base64FromBytes(arr) {
-    return globalThis.Buffer.from(arr).toString("base64");
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js
deleted file mode 100644
index c437d5053a3cb..0000000000000
--- a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js
+++ /dev/null
@@ -1,74 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: rekor/v2/verifier.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Signature = exports.Verifier = exports.PublicKey = void 0;
-/* eslint-disable */
-const sigstore_common_1 = require("../../sigstore_common");
-exports.PublicKey = {
-    fromJSON(object) {
-        return { rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : Buffer.alloc(0) };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.rawBytes.length !== 0) {
-            obj.rawBytes = base64FromBytes(message.rawBytes);
-        }
-        return obj;
-    },
-};
-exports.Verifier = {
-    fromJSON(object) {
-        return {
-            verifier: isSet(object.publicKey)
-                ? { $case: "publicKey", publicKey: exports.PublicKey.fromJSON(object.publicKey) }
-                : isSet(object.x509Certificate)
-                    ? { $case: "x509Certificate", x509Certificate: sigstore_common_1.X509Certificate.fromJSON(object.x509Certificate) }
-                    : undefined,
-            keyDetails: isSet(object.keyDetails) ? (0, sigstore_common_1.publicKeyDetailsFromJSON)(object.keyDetails) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.verifier?.$case === "publicKey") {
-            obj.publicKey = exports.PublicKey.toJSON(message.verifier.publicKey);
-        }
-        else if (message.verifier?.$case === "x509Certificate") {
-            obj.x509Certificate = sigstore_common_1.X509Certificate.toJSON(message.verifier.x509Certificate);
-        }
-        if (message.keyDetails !== 0) {
-            obj.keyDetails = (0, sigstore_common_1.publicKeyDetailsToJSON)(message.keyDetails);
-        }
-        return obj;
-    },
-};
-exports.Signature = {
-    fromJSON(object) {
-        return {
-            content: isSet(object.content) ? Buffer.from(bytesFromBase64(object.content)) : Buffer.alloc(0),
-            verifier: isSet(object.verifier) ? exports.Verifier.fromJSON(object.verifier) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.content.length !== 0) {
-            obj.content = base64FromBytes(message.content);
-        }
-        if (message.verifier !== undefined) {
-            obj.verifier = exports.Verifier.toJSON(message.verifier);
-        }
-        return obj;
-    },
-};
-function bytesFromBase64(b64) {
-    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
-}
-function base64FromBytes(arr) {
-    return globalThis.Buffer.from(arr).toString("base64");
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
deleted file mode 100644
index aed636f00e7cf..0000000000000
--- a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
+++ /dev/null
@@ -1,103 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: sigstore_bundle.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Bundle = exports.VerificationMaterial = exports.TimestampVerificationData = void 0;
-/* eslint-disable */
-const envelope_1 = require("./envelope");
-const sigstore_common_1 = require("./sigstore_common");
-const sigstore_rekor_1 = require("./sigstore_rekor");
-exports.TimestampVerificationData = {
-    fromJSON(object) {
-        return {
-            rfc3161Timestamps: globalThis.Array.isArray(object?.rfc3161Timestamps)
-                ? object.rfc3161Timestamps.map((e) => sigstore_common_1.RFC3161SignedTimestamp.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.rfc3161Timestamps?.length) {
-            obj.rfc3161Timestamps = message.rfc3161Timestamps.map((e) => sigstore_common_1.RFC3161SignedTimestamp.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.VerificationMaterial = {
-    fromJSON(object) {
-        return {
-            content: isSet(object.publicKey)
-                ? { $case: "publicKey", publicKey: sigstore_common_1.PublicKeyIdentifier.fromJSON(object.publicKey) }
-                : isSet(object.x509CertificateChain)
-                    ? {
-                        $case: "x509CertificateChain",
-                        x509CertificateChain: sigstore_common_1.X509CertificateChain.fromJSON(object.x509CertificateChain),
-                    }
-                    : isSet(object.certificate)
-                        ? { $case: "certificate", certificate: sigstore_common_1.X509Certificate.fromJSON(object.certificate) }
-                        : undefined,
-            tlogEntries: globalThis.Array.isArray(object?.tlogEntries)
-                ? object.tlogEntries.map((e) => sigstore_rekor_1.TransparencyLogEntry.fromJSON(e))
-                : [],
-            timestampVerificationData: isSet(object.timestampVerificationData)
-                ? exports.TimestampVerificationData.fromJSON(object.timestampVerificationData)
-                : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.content?.$case === "publicKey") {
-            obj.publicKey = sigstore_common_1.PublicKeyIdentifier.toJSON(message.content.publicKey);
-        }
-        else if (message.content?.$case === "x509CertificateChain") {
-            obj.x509CertificateChain = sigstore_common_1.X509CertificateChain.toJSON(message.content.x509CertificateChain);
-        }
-        else if (message.content?.$case === "certificate") {
-            obj.certificate = sigstore_common_1.X509Certificate.toJSON(message.content.certificate);
-        }
-        if (message.tlogEntries?.length) {
-            obj.tlogEntries = message.tlogEntries.map((e) => sigstore_rekor_1.TransparencyLogEntry.toJSON(e));
-        }
-        if (message.timestampVerificationData !== undefined) {
-            obj.timestampVerificationData = exports.TimestampVerificationData.toJSON(message.timestampVerificationData);
-        }
-        return obj;
-    },
-};
-exports.Bundle = {
-    fromJSON(object) {
-        return {
-            mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "",
-            verificationMaterial: isSet(object.verificationMaterial)
-                ? exports.VerificationMaterial.fromJSON(object.verificationMaterial)
-                : undefined,
-            content: isSet(object.messageSignature)
-                ? { $case: "messageSignature", messageSignature: sigstore_common_1.MessageSignature.fromJSON(object.messageSignature) }
-                : isSet(object.dsseEnvelope)
-                    ? { $case: "dsseEnvelope", dsseEnvelope: envelope_1.Envelope.fromJSON(object.dsseEnvelope) }
-                    : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.mediaType !== "") {
-            obj.mediaType = message.mediaType;
-        }
-        if (message.verificationMaterial !== undefined) {
-            obj.verificationMaterial = exports.VerificationMaterial.toJSON(message.verificationMaterial);
-        }
-        if (message.content?.$case === "messageSignature") {
-            obj.messageSignature = sigstore_common_1.MessageSignature.toJSON(message.content.messageSignature);
-        }
-        else if (message.content?.$case === "dsseEnvelope") {
-            obj.dsseEnvelope = envelope_1.Envelope.toJSON(message.content.dsseEnvelope);
-        }
-        return obj;
-    },
-};
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
deleted file mode 100644
index b900516ed3b55..0000000000000
--- a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
+++ /dev/null
@@ -1,596 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: sigstore_common.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TimeRange = exports.X509CertificateChain = exports.SubjectAlternativeName = exports.X509Certificate = exports.DistinguishedName = exports.ObjectIdentifierValuePair = exports.ObjectIdentifier = exports.PublicKeyIdentifier = exports.PublicKey = exports.RFC3161SignedTimestamp = exports.LogId = exports.MessageSignature = exports.HashOutput = exports.SubjectAlternativeNameType = exports.PublicKeyDetails = exports.HashAlgorithm = void 0;
-exports.hashAlgorithmFromJSON = hashAlgorithmFromJSON;
-exports.hashAlgorithmToJSON = hashAlgorithmToJSON;
-exports.publicKeyDetailsFromJSON = publicKeyDetailsFromJSON;
-exports.publicKeyDetailsToJSON = publicKeyDetailsToJSON;
-exports.subjectAlternativeNameTypeFromJSON = subjectAlternativeNameTypeFromJSON;
-exports.subjectAlternativeNameTypeToJSON = subjectAlternativeNameTypeToJSON;
-/* eslint-disable */
-const timestamp_1 = require("./google/protobuf/timestamp");
-/**
- * Only a subset of the secure hash standard algorithms are supported.
- * See  for more
- * details.
- * UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force
- * any proto JSON serialization to emit the used hash algorithm, as default
- * option is to *omit* the default value of an enum (which is the first
- * value, represented by '0'.
- */
-var HashAlgorithm;
-(function (HashAlgorithm) {
-    HashAlgorithm[HashAlgorithm["HASH_ALGORITHM_UNSPECIFIED"] = 0] = "HASH_ALGORITHM_UNSPECIFIED";
-    HashAlgorithm[HashAlgorithm["SHA2_256"] = 1] = "SHA2_256";
-    HashAlgorithm[HashAlgorithm["SHA2_384"] = 2] = "SHA2_384";
-    HashAlgorithm[HashAlgorithm["SHA2_512"] = 3] = "SHA2_512";
-    HashAlgorithm[HashAlgorithm["SHA3_256"] = 4] = "SHA3_256";
-    HashAlgorithm[HashAlgorithm["SHA3_384"] = 5] = "SHA3_384";
-})(HashAlgorithm || (exports.HashAlgorithm = HashAlgorithm = {}));
-function hashAlgorithmFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "HASH_ALGORITHM_UNSPECIFIED":
-            return HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED;
-        case 1:
-        case "SHA2_256":
-            return HashAlgorithm.SHA2_256;
-        case 2:
-        case "SHA2_384":
-            return HashAlgorithm.SHA2_384;
-        case 3:
-        case "SHA2_512":
-            return HashAlgorithm.SHA2_512;
-        case 4:
-        case "SHA3_256":
-            return HashAlgorithm.SHA3_256;
-        case 5:
-        case "SHA3_384":
-            return HashAlgorithm.SHA3_384;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
-    }
-}
-function hashAlgorithmToJSON(object) {
-    switch (object) {
-        case HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED:
-            return "HASH_ALGORITHM_UNSPECIFIED";
-        case HashAlgorithm.SHA2_256:
-            return "SHA2_256";
-        case HashAlgorithm.SHA2_384:
-            return "SHA2_384";
-        case HashAlgorithm.SHA2_512:
-            return "SHA2_512";
-        case HashAlgorithm.SHA3_256:
-            return "SHA3_256";
-        case HashAlgorithm.SHA3_384:
-            return "SHA3_384";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
-    }
-}
-/**
- * Details of a specific public key, capturing the the key encoding method,
- * and signature algorithm.
- *
- * PublicKeyDetails captures the public key/hash algorithm combinations
- * recommended in the Sigstore ecosystem.
- *
- * This is modelled as a linear set as we want to provide a small number of
- * opinionated options instead of allowing every possible permutation.
- *
- * Any changes to this enum MUST be reflected in the algorithm registry.
- *
- * See: 
- *
- * To avoid the possibility of contradicting formats such as PKCS1 with
- * ED25519 the valid permutations are listed as a linear set instead of a
- * cartesian set (i.e one combined variable instead of two, one for encoding
- * and one for the signature algorithm).
- */
-var PublicKeyDetails;
-(function (PublicKeyDetails) {
-    PublicKeyDetails[PublicKeyDetails["PUBLIC_KEY_DETAILS_UNSPECIFIED"] = 0] = "PUBLIC_KEY_DETAILS_UNSPECIFIED";
-    /**
-     * PKCS1_RSA_PKCS1V5 - RSA
-     *
-     * @deprecated
-     */
-    PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PKCS1V5"] = 1] = "PKCS1_RSA_PKCS1V5";
-    /**
-     * PKCS1_RSA_PSS - See RFC8017
-     *
-     * @deprecated
-     */
-    PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PSS"] = 2] = "PKCS1_RSA_PSS";
-    /** @deprecated */
-    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V5"] = 3] = "PKIX_RSA_PKCS1V5";
-    /** @deprecated */
-    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS"] = 4] = "PKIX_RSA_PSS";
-    /** PKIX_RSA_PKCS1V15_2048_SHA256 - RSA public key in PKIX format, PKCS#1v1.5 signature */
-    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V15_2048_SHA256"] = 9] = "PKIX_RSA_PKCS1V15_2048_SHA256";
-    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V15_3072_SHA256"] = 10] = "PKIX_RSA_PKCS1V15_3072_SHA256";
-    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V15_4096_SHA256"] = 11] = "PKIX_RSA_PKCS1V15_4096_SHA256";
-    /** PKIX_RSA_PSS_2048_SHA256 - RSA public key in PKIX format, RSASSA-PSS signature */
-    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS_2048_SHA256"] = 16] = "PKIX_RSA_PSS_2048_SHA256";
-    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS_3072_SHA256"] = 17] = "PKIX_RSA_PSS_3072_SHA256";
-    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS_4096_SHA256"] = 18] = "PKIX_RSA_PSS_4096_SHA256";
-    /**
-     * PKIX_ECDSA_P256_HMAC_SHA_256 - ECDSA
-     *
-     * @deprecated
-     */
-    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_HMAC_SHA_256"] = 6] = "PKIX_ECDSA_P256_HMAC_SHA_256";
-    /** PKIX_ECDSA_P256_SHA_256 - See NIST FIPS 186-4 */
-    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_SHA_256"] = 5] = "PKIX_ECDSA_P256_SHA_256";
-    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P384_SHA_384"] = 12] = "PKIX_ECDSA_P384_SHA_384";
-    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P521_SHA_512"] = 13] = "PKIX_ECDSA_P521_SHA_512";
-    /** PKIX_ED25519 - Ed 25519 */
-    PublicKeyDetails[PublicKeyDetails["PKIX_ED25519"] = 7] = "PKIX_ED25519";
-    PublicKeyDetails[PublicKeyDetails["PKIX_ED25519_PH"] = 8] = "PKIX_ED25519_PH";
-    /**
-     * PKIX_ECDSA_P384_SHA_256 - These algorithms are deprecated and should not be used, but they
-     * were/are being used by most Sigstore clients implementations.
-     *
-     * @deprecated
-     */
-    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P384_SHA_256"] = 19] = "PKIX_ECDSA_P384_SHA_256";
-    /** @deprecated */
-    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P521_SHA_256"] = 20] = "PKIX_ECDSA_P521_SHA_256";
-    /**
-     * LMS_SHA256 - LMS and LM-OTS
-     *
-     * These algorithms are deprecated and should not be used.
-     * Keys and signatures MAY be used by private Sigstore
-     * deployments, but will not be supported by the public
-     * good instance.
-     *
-     * USER WARNING: LMS and LM-OTS are both stateful signature schemes.
-     * Using them correctly requires discretion and careful consideration
-     * to ensure that individual secret keys are not used more than once.
-     * In addition, LM-OTS is a single-use scheme, meaning that it
-     * MUST NOT be used for more than one signature per LM-OTS key.
-     * If you cannot maintain these invariants, you MUST NOT use these
-     * schemes.
-     *
-     * @deprecated
-     */
-    PublicKeyDetails[PublicKeyDetails["LMS_SHA256"] = 14] = "LMS_SHA256";
-    /** @deprecated */
-    PublicKeyDetails[PublicKeyDetails["LMOTS_SHA256"] = 15] = "LMOTS_SHA256";
-    /**
-     * ML_DSA_65 - ML-DSA
-     *
-     * These ML_DSA_65 and ML-DSA_87 algorithms are the pure variants that
-     * take data to sign rather than the prehash variants (HashML-DSA), which
-     * take digests.  While considered quantum-resistant, their usage
-     * involves tradeoffs in that signatures and keys are much larger, and
-     * this makes deployments more costly.
-     *
-     * USER WARNING: ML_DSA_65 and ML_DSA_87 are experimental algorithms.
-     * In the future they MAY be used by private Sigstore deployments, but
-     * they are not yet fully functional.  This warning will be removed when
-     * these algorithms are widely supported by Sigstore clients and servers,
-     * but care should still be taken for production environments.
-     */
-    PublicKeyDetails[PublicKeyDetails["ML_DSA_65"] = 21] = "ML_DSA_65";
-    PublicKeyDetails[PublicKeyDetails["ML_DSA_87"] = 22] = "ML_DSA_87";
-})(PublicKeyDetails || (exports.PublicKeyDetails = PublicKeyDetails = {}));
-function publicKeyDetailsFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "PUBLIC_KEY_DETAILS_UNSPECIFIED":
-            return PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED;
-        case 1:
-        case "PKCS1_RSA_PKCS1V5":
-            return PublicKeyDetails.PKCS1_RSA_PKCS1V5;
-        case 2:
-        case "PKCS1_RSA_PSS":
-            return PublicKeyDetails.PKCS1_RSA_PSS;
-        case 3:
-        case "PKIX_RSA_PKCS1V5":
-            return PublicKeyDetails.PKIX_RSA_PKCS1V5;
-        case 4:
-        case "PKIX_RSA_PSS":
-            return PublicKeyDetails.PKIX_RSA_PSS;
-        case 9:
-        case "PKIX_RSA_PKCS1V15_2048_SHA256":
-            return PublicKeyDetails.PKIX_RSA_PKCS1V15_2048_SHA256;
-        case 10:
-        case "PKIX_RSA_PKCS1V15_3072_SHA256":
-            return PublicKeyDetails.PKIX_RSA_PKCS1V15_3072_SHA256;
-        case 11:
-        case "PKIX_RSA_PKCS1V15_4096_SHA256":
-            return PublicKeyDetails.PKIX_RSA_PKCS1V15_4096_SHA256;
-        case 16:
-        case "PKIX_RSA_PSS_2048_SHA256":
-            return PublicKeyDetails.PKIX_RSA_PSS_2048_SHA256;
-        case 17:
-        case "PKIX_RSA_PSS_3072_SHA256":
-            return PublicKeyDetails.PKIX_RSA_PSS_3072_SHA256;
-        case 18:
-        case "PKIX_RSA_PSS_4096_SHA256":
-            return PublicKeyDetails.PKIX_RSA_PSS_4096_SHA256;
-        case 6:
-        case "PKIX_ECDSA_P256_HMAC_SHA_256":
-            return PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256;
-        case 5:
-        case "PKIX_ECDSA_P256_SHA_256":
-            return PublicKeyDetails.PKIX_ECDSA_P256_SHA_256;
-        case 12:
-        case "PKIX_ECDSA_P384_SHA_384":
-            return PublicKeyDetails.PKIX_ECDSA_P384_SHA_384;
-        case 13:
-        case "PKIX_ECDSA_P521_SHA_512":
-            return PublicKeyDetails.PKIX_ECDSA_P521_SHA_512;
-        case 7:
-        case "PKIX_ED25519":
-            return PublicKeyDetails.PKIX_ED25519;
-        case 8:
-        case "PKIX_ED25519_PH":
-            return PublicKeyDetails.PKIX_ED25519_PH;
-        case 19:
-        case "PKIX_ECDSA_P384_SHA_256":
-            return PublicKeyDetails.PKIX_ECDSA_P384_SHA_256;
-        case 20:
-        case "PKIX_ECDSA_P521_SHA_256":
-            return PublicKeyDetails.PKIX_ECDSA_P521_SHA_256;
-        case 14:
-        case "LMS_SHA256":
-            return PublicKeyDetails.LMS_SHA256;
-        case 15:
-        case "LMOTS_SHA256":
-            return PublicKeyDetails.LMOTS_SHA256;
-        case 21:
-        case "ML_DSA_65":
-            return PublicKeyDetails.ML_DSA_65;
-        case 22:
-        case "ML_DSA_87":
-            return PublicKeyDetails.ML_DSA_87;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
-    }
-}
-function publicKeyDetailsToJSON(object) {
-    switch (object) {
-        case PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED:
-            return "PUBLIC_KEY_DETAILS_UNSPECIFIED";
-        case PublicKeyDetails.PKCS1_RSA_PKCS1V5:
-            return "PKCS1_RSA_PKCS1V5";
-        case PublicKeyDetails.PKCS1_RSA_PSS:
-            return "PKCS1_RSA_PSS";
-        case PublicKeyDetails.PKIX_RSA_PKCS1V5:
-            return "PKIX_RSA_PKCS1V5";
-        case PublicKeyDetails.PKIX_RSA_PSS:
-            return "PKIX_RSA_PSS";
-        case PublicKeyDetails.PKIX_RSA_PKCS1V15_2048_SHA256:
-            return "PKIX_RSA_PKCS1V15_2048_SHA256";
-        case PublicKeyDetails.PKIX_RSA_PKCS1V15_3072_SHA256:
-            return "PKIX_RSA_PKCS1V15_3072_SHA256";
-        case PublicKeyDetails.PKIX_RSA_PKCS1V15_4096_SHA256:
-            return "PKIX_RSA_PKCS1V15_4096_SHA256";
-        case PublicKeyDetails.PKIX_RSA_PSS_2048_SHA256:
-            return "PKIX_RSA_PSS_2048_SHA256";
-        case PublicKeyDetails.PKIX_RSA_PSS_3072_SHA256:
-            return "PKIX_RSA_PSS_3072_SHA256";
-        case PublicKeyDetails.PKIX_RSA_PSS_4096_SHA256:
-            return "PKIX_RSA_PSS_4096_SHA256";
-        case PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256:
-            return "PKIX_ECDSA_P256_HMAC_SHA_256";
-        case PublicKeyDetails.PKIX_ECDSA_P256_SHA_256:
-            return "PKIX_ECDSA_P256_SHA_256";
-        case PublicKeyDetails.PKIX_ECDSA_P384_SHA_384:
-            return "PKIX_ECDSA_P384_SHA_384";
-        case PublicKeyDetails.PKIX_ECDSA_P521_SHA_512:
-            return "PKIX_ECDSA_P521_SHA_512";
-        case PublicKeyDetails.PKIX_ED25519:
-            return "PKIX_ED25519";
-        case PublicKeyDetails.PKIX_ED25519_PH:
-            return "PKIX_ED25519_PH";
-        case PublicKeyDetails.PKIX_ECDSA_P384_SHA_256:
-            return "PKIX_ECDSA_P384_SHA_256";
-        case PublicKeyDetails.PKIX_ECDSA_P521_SHA_256:
-            return "PKIX_ECDSA_P521_SHA_256";
-        case PublicKeyDetails.LMS_SHA256:
-            return "LMS_SHA256";
-        case PublicKeyDetails.LMOTS_SHA256:
-            return "LMOTS_SHA256";
-        case PublicKeyDetails.ML_DSA_65:
-            return "ML_DSA_65";
-        case PublicKeyDetails.ML_DSA_87:
-            return "ML_DSA_87";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
-    }
-}
-var SubjectAlternativeNameType;
-(function (SubjectAlternativeNameType) {
-    SubjectAlternativeNameType[SubjectAlternativeNameType["SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED"] = 0] = "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
-    SubjectAlternativeNameType[SubjectAlternativeNameType["EMAIL"] = 1] = "EMAIL";
-    SubjectAlternativeNameType[SubjectAlternativeNameType["URI"] = 2] = "URI";
-    /**
-     * OTHER_NAME - OID 1.3.6.1.4.1.57264.1.7
-     * See https://github.com/sigstore/fulcio/blob/main/docs/oid-info.md#1361415726417--othername-san
-     * for more details.
-     */
-    SubjectAlternativeNameType[SubjectAlternativeNameType["OTHER_NAME"] = 3] = "OTHER_NAME";
-})(SubjectAlternativeNameType || (exports.SubjectAlternativeNameType = SubjectAlternativeNameType = {}));
-function subjectAlternativeNameTypeFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED":
-            return SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED;
-        case 1:
-        case "EMAIL":
-            return SubjectAlternativeNameType.EMAIL;
-        case 2:
-        case "URI":
-            return SubjectAlternativeNameType.URI;
-        case 3:
-        case "OTHER_NAME":
-            return SubjectAlternativeNameType.OTHER_NAME;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
-    }
-}
-function subjectAlternativeNameTypeToJSON(object) {
-    switch (object) {
-        case SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED:
-            return "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
-        case SubjectAlternativeNameType.EMAIL:
-            return "EMAIL";
-        case SubjectAlternativeNameType.URI:
-            return "URI";
-        case SubjectAlternativeNameType.OTHER_NAME:
-            return "OTHER_NAME";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
-    }
-}
-exports.HashOutput = {
-    fromJSON(object) {
-        return {
-            algorithm: isSet(object.algorithm) ? hashAlgorithmFromJSON(object.algorithm) : 0,
-            digest: isSet(object.digest) ? Buffer.from(bytesFromBase64(object.digest)) : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.algorithm !== 0) {
-            obj.algorithm = hashAlgorithmToJSON(message.algorithm);
-        }
-        if (message.digest.length !== 0) {
-            obj.digest = base64FromBytes(message.digest);
-        }
-        return obj;
-    },
-};
-exports.MessageSignature = {
-    fromJSON(object) {
-        return {
-            messageDigest: isSet(object.messageDigest) ? exports.HashOutput.fromJSON(object.messageDigest) : undefined,
-            signature: isSet(object.signature) ? Buffer.from(bytesFromBase64(object.signature)) : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.messageDigest !== undefined) {
-            obj.messageDigest = exports.HashOutput.toJSON(message.messageDigest);
-        }
-        if (message.signature.length !== 0) {
-            obj.signature = base64FromBytes(message.signature);
-        }
-        return obj;
-    },
-};
-exports.LogId = {
-    fromJSON(object) {
-        return { keyId: isSet(object.keyId) ? Buffer.from(bytesFromBase64(object.keyId)) : Buffer.alloc(0) };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.keyId.length !== 0) {
-            obj.keyId = base64FromBytes(message.keyId);
-        }
-        return obj;
-    },
-};
-exports.RFC3161SignedTimestamp = {
-    fromJSON(object) {
-        return {
-            signedTimestamp: isSet(object.signedTimestamp)
-                ? Buffer.from(bytesFromBase64(object.signedTimestamp))
-                : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.signedTimestamp.length !== 0) {
-            obj.signedTimestamp = base64FromBytes(message.signedTimestamp);
-        }
-        return obj;
-    },
-};
-exports.PublicKey = {
-    fromJSON(object) {
-        return {
-            rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : undefined,
-            keyDetails: isSet(object.keyDetails) ? publicKeyDetailsFromJSON(object.keyDetails) : 0,
-            validFor: isSet(object.validFor) ? exports.TimeRange.fromJSON(object.validFor) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.rawBytes !== undefined) {
-            obj.rawBytes = base64FromBytes(message.rawBytes);
-        }
-        if (message.keyDetails !== 0) {
-            obj.keyDetails = publicKeyDetailsToJSON(message.keyDetails);
-        }
-        if (message.validFor !== undefined) {
-            obj.validFor = exports.TimeRange.toJSON(message.validFor);
-        }
-        return obj;
-    },
-};
-exports.PublicKeyIdentifier = {
-    fromJSON(object) {
-        return { hint: isSet(object.hint) ? globalThis.String(object.hint) : "" };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.hint !== "") {
-            obj.hint = message.hint;
-        }
-        return obj;
-    },
-};
-exports.ObjectIdentifier = {
-    fromJSON(object) {
-        return { id: globalThis.Array.isArray(object?.id) ? object.id.map((e) => globalThis.Number(e)) : [] };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.id?.length) {
-            obj.id = message.id.map((e) => Math.round(e));
-        }
-        return obj;
-    },
-};
-exports.ObjectIdentifierValuePair = {
-    fromJSON(object) {
-        return {
-            oid: isSet(object.oid) ? exports.ObjectIdentifier.fromJSON(object.oid) : undefined,
-            value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.oid !== undefined) {
-            obj.oid = exports.ObjectIdentifier.toJSON(message.oid);
-        }
-        if (message.value.length !== 0) {
-            obj.value = base64FromBytes(message.value);
-        }
-        return obj;
-    },
-};
-exports.DistinguishedName = {
-    fromJSON(object) {
-        return {
-            organization: isSet(object.organization) ? globalThis.String(object.organization) : "",
-            commonName: isSet(object.commonName) ? globalThis.String(object.commonName) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.organization !== "") {
-            obj.organization = message.organization;
-        }
-        if (message.commonName !== "") {
-            obj.commonName = message.commonName;
-        }
-        return obj;
-    },
-};
-exports.X509Certificate = {
-    fromJSON(object) {
-        return { rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : Buffer.alloc(0) };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.rawBytes.length !== 0) {
-            obj.rawBytes = base64FromBytes(message.rawBytes);
-        }
-        return obj;
-    },
-};
-exports.SubjectAlternativeName = {
-    fromJSON(object) {
-        return {
-            type: isSet(object.type) ? subjectAlternativeNameTypeFromJSON(object.type) : 0,
-            identity: isSet(object.regexp)
-                ? { $case: "regexp", regexp: globalThis.String(object.regexp) }
-                : isSet(object.value)
-                    ? { $case: "value", value: globalThis.String(object.value) }
-                    : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.type !== 0) {
-            obj.type = subjectAlternativeNameTypeToJSON(message.type);
-        }
-        if (message.identity?.$case === "regexp") {
-            obj.regexp = message.identity.regexp;
-        }
-        else if (message.identity?.$case === "value") {
-            obj.value = message.identity.value;
-        }
-        return obj;
-    },
-};
-exports.X509CertificateChain = {
-    fromJSON(object) {
-        return {
-            certificates: globalThis.Array.isArray(object?.certificates)
-                ? object.certificates.map((e) => exports.X509Certificate.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.certificates?.length) {
-            obj.certificates = message.certificates.map((e) => exports.X509Certificate.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.TimeRange = {
-    fromJSON(object) {
-        return {
-            start: isSet(object.start) ? fromJsonTimestamp(object.start) : undefined,
-            end: isSet(object.end) ? fromJsonTimestamp(object.end) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.start !== undefined) {
-            obj.start = message.start.toISOString();
-        }
-        if (message.end !== undefined) {
-            obj.end = message.end.toISOString();
-        }
-        return obj;
-    },
-};
-function bytesFromBase64(b64) {
-    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
-}
-function base64FromBytes(arr) {
-    return globalThis.Buffer.from(arr).toString("base64");
-}
-function fromTimestamp(t) {
-    let millis = (globalThis.Number(t.seconds) || 0) * 1_000;
-    millis += (t.nanos || 0) / 1_000_000;
-    return new globalThis.Date(millis);
-}
-function fromJsonTimestamp(o) {
-    if (o instanceof globalThis.Date) {
-        return o;
-    }
-    else if (typeof o === "string") {
-        return new globalThis.Date(o);
-    }
-    else {
-        return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
-    }
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
deleted file mode 100644
index fd8ea8384664d..0000000000000
--- a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
+++ /dev/null
@@ -1,137 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: sigstore_rekor.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TransparencyLogEntry = exports.InclusionPromise = exports.InclusionProof = exports.Checkpoint = exports.KindVersion = void 0;
-/* eslint-disable */
-const sigstore_common_1 = require("./sigstore_common");
-exports.KindVersion = {
-    fromJSON(object) {
-        return {
-            kind: isSet(object.kind) ? globalThis.String(object.kind) : "",
-            version: isSet(object.version) ? globalThis.String(object.version) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.kind !== "") {
-            obj.kind = message.kind;
-        }
-        if (message.version !== "") {
-            obj.version = message.version;
-        }
-        return obj;
-    },
-};
-exports.Checkpoint = {
-    fromJSON(object) {
-        return { envelope: isSet(object.envelope) ? globalThis.String(object.envelope) : "" };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.envelope !== "") {
-            obj.envelope = message.envelope;
-        }
-        return obj;
-    },
-};
-exports.InclusionProof = {
-    fromJSON(object) {
-        return {
-            logIndex: isSet(object.logIndex) ? globalThis.String(object.logIndex) : "0",
-            rootHash: isSet(object.rootHash) ? Buffer.from(bytesFromBase64(object.rootHash)) : Buffer.alloc(0),
-            treeSize: isSet(object.treeSize) ? globalThis.String(object.treeSize) : "0",
-            hashes: globalThis.Array.isArray(object?.hashes)
-                ? object.hashes.map((e) => Buffer.from(bytesFromBase64(e)))
-                : [],
-            checkpoint: isSet(object.checkpoint) ? exports.Checkpoint.fromJSON(object.checkpoint) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.logIndex !== "0") {
-            obj.logIndex = message.logIndex;
-        }
-        if (message.rootHash.length !== 0) {
-            obj.rootHash = base64FromBytes(message.rootHash);
-        }
-        if (message.treeSize !== "0") {
-            obj.treeSize = message.treeSize;
-        }
-        if (message.hashes?.length) {
-            obj.hashes = message.hashes.map((e) => base64FromBytes(e));
-        }
-        if (message.checkpoint !== undefined) {
-            obj.checkpoint = exports.Checkpoint.toJSON(message.checkpoint);
-        }
-        return obj;
-    },
-};
-exports.InclusionPromise = {
-    fromJSON(object) {
-        return {
-            signedEntryTimestamp: isSet(object.signedEntryTimestamp)
-                ? Buffer.from(bytesFromBase64(object.signedEntryTimestamp))
-                : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.signedEntryTimestamp.length !== 0) {
-            obj.signedEntryTimestamp = base64FromBytes(message.signedEntryTimestamp);
-        }
-        return obj;
-    },
-};
-exports.TransparencyLogEntry = {
-    fromJSON(object) {
-        return {
-            logIndex: isSet(object.logIndex) ? globalThis.String(object.logIndex) : "0",
-            logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
-            kindVersion: isSet(object.kindVersion) ? exports.KindVersion.fromJSON(object.kindVersion) : undefined,
-            integratedTime: isSet(object.integratedTime) ? globalThis.String(object.integratedTime) : "0",
-            inclusionPromise: isSet(object.inclusionPromise) ? exports.InclusionPromise.fromJSON(object.inclusionPromise) : undefined,
-            inclusionProof: isSet(object.inclusionProof) ? exports.InclusionProof.fromJSON(object.inclusionProof) : undefined,
-            canonicalizedBody: isSet(object.canonicalizedBody)
-                ? Buffer.from(bytesFromBase64(object.canonicalizedBody))
-                : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.logIndex !== "0") {
-            obj.logIndex = message.logIndex;
-        }
-        if (message.logId !== undefined) {
-            obj.logId = sigstore_common_1.LogId.toJSON(message.logId);
-        }
-        if (message.kindVersion !== undefined) {
-            obj.kindVersion = exports.KindVersion.toJSON(message.kindVersion);
-        }
-        if (message.integratedTime !== "0") {
-            obj.integratedTime = message.integratedTime;
-        }
-        if (message.inclusionPromise !== undefined) {
-            obj.inclusionPromise = exports.InclusionPromise.toJSON(message.inclusionPromise);
-        }
-        if (message.inclusionProof !== undefined) {
-            obj.inclusionProof = exports.InclusionProof.toJSON(message.inclusionProof);
-        }
-        if (message.canonicalizedBody.length !== 0) {
-            obj.canonicalizedBody = base64FromBytes(message.canonicalizedBody);
-        }
-        return obj;
-    },
-};
-function bytesFromBase64(b64) {
-    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
-}
-function base64FromBytes(arr) {
-    return globalThis.Buffer.from(arr).toString("base64");
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
deleted file mode 100644
index 1b5492fb1a77e..0000000000000
--- a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
+++ /dev/null
@@ -1,284 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: sigstore_trustroot.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.ClientTrustConfig = exports.ServiceConfiguration = exports.Service = exports.SigningConfig = exports.TrustedRoot = exports.CertificateAuthority = exports.TransparencyLogInstance = exports.ServiceSelector = void 0;
-exports.serviceSelectorFromJSON = serviceSelectorFromJSON;
-exports.serviceSelectorToJSON = serviceSelectorToJSON;
-/* eslint-disable */
-const sigstore_common_1 = require("./sigstore_common");
-/**
- * ServiceSelector specifies how a client SHOULD select a set of
- * Services to connect to. A client SHOULD throw an error if
- * the value is SERVICE_SELECTOR_UNDEFINED.
- */
-var ServiceSelector;
-(function (ServiceSelector) {
-    ServiceSelector[ServiceSelector["SERVICE_SELECTOR_UNDEFINED"] = 0] = "SERVICE_SELECTOR_UNDEFINED";
-    /**
-     * ALL - Clients SHOULD select all Services based on supported API version
-     * and validity window.
-     */
-    ServiceSelector[ServiceSelector["ALL"] = 1] = "ALL";
-    /**
-     * ANY - Clients SHOULD select one Service based on supported API version
-     * and validity window. It is up to the client implementation to
-     * decide how to select the Service, e.g. random or round-robin.
-     */
-    ServiceSelector[ServiceSelector["ANY"] = 2] = "ANY";
-    /**
-     * EXACT - Clients SHOULD select a specific number of Services based on
-     * supported API version and validity window, using the provided
-     * `count`. It is up to the client implementation to decide how to
-     * select the Service, e.g. random or round-robin.
-     */
-    ServiceSelector[ServiceSelector["EXACT"] = 3] = "EXACT";
-})(ServiceSelector || (exports.ServiceSelector = ServiceSelector = {}));
-function serviceSelectorFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "SERVICE_SELECTOR_UNDEFINED":
-            return ServiceSelector.SERVICE_SELECTOR_UNDEFINED;
-        case 1:
-        case "ALL":
-            return ServiceSelector.ALL;
-        case 2:
-        case "ANY":
-            return ServiceSelector.ANY;
-        case 3:
-        case "EXACT":
-            return ServiceSelector.EXACT;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum ServiceSelector");
-    }
-}
-function serviceSelectorToJSON(object) {
-    switch (object) {
-        case ServiceSelector.SERVICE_SELECTOR_UNDEFINED:
-            return "SERVICE_SELECTOR_UNDEFINED";
-        case ServiceSelector.ALL:
-            return "ALL";
-        case ServiceSelector.ANY:
-            return "ANY";
-        case ServiceSelector.EXACT:
-            return "EXACT";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum ServiceSelector");
-    }
-}
-exports.TransparencyLogInstance = {
-    fromJSON(object) {
-        return {
-            baseUrl: isSet(object.baseUrl) ? globalThis.String(object.baseUrl) : "",
-            hashAlgorithm: isSet(object.hashAlgorithm) ? (0, sigstore_common_1.hashAlgorithmFromJSON)(object.hashAlgorithm) : 0,
-            publicKey: isSet(object.publicKey) ? sigstore_common_1.PublicKey.fromJSON(object.publicKey) : undefined,
-            logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
-            checkpointKeyId: isSet(object.checkpointKeyId) ? sigstore_common_1.LogId.fromJSON(object.checkpointKeyId) : undefined,
-            operator: isSet(object.operator) ? globalThis.String(object.operator) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.baseUrl !== "") {
-            obj.baseUrl = message.baseUrl;
-        }
-        if (message.hashAlgorithm !== 0) {
-            obj.hashAlgorithm = (0, sigstore_common_1.hashAlgorithmToJSON)(message.hashAlgorithm);
-        }
-        if (message.publicKey !== undefined) {
-            obj.publicKey = sigstore_common_1.PublicKey.toJSON(message.publicKey);
-        }
-        if (message.logId !== undefined) {
-            obj.logId = sigstore_common_1.LogId.toJSON(message.logId);
-        }
-        if (message.checkpointKeyId !== undefined) {
-            obj.checkpointKeyId = sigstore_common_1.LogId.toJSON(message.checkpointKeyId);
-        }
-        if (message.operator !== "") {
-            obj.operator = message.operator;
-        }
-        return obj;
-    },
-};
-exports.CertificateAuthority = {
-    fromJSON(object) {
-        return {
-            subject: isSet(object.subject) ? sigstore_common_1.DistinguishedName.fromJSON(object.subject) : undefined,
-            uri: isSet(object.uri) ? globalThis.String(object.uri) : "",
-            certChain: isSet(object.certChain) ? sigstore_common_1.X509CertificateChain.fromJSON(object.certChain) : undefined,
-            validFor: isSet(object.validFor) ? sigstore_common_1.TimeRange.fromJSON(object.validFor) : undefined,
-            operator: isSet(object.operator) ? globalThis.String(object.operator) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.subject !== undefined) {
-            obj.subject = sigstore_common_1.DistinguishedName.toJSON(message.subject);
-        }
-        if (message.uri !== "") {
-            obj.uri = message.uri;
-        }
-        if (message.certChain !== undefined) {
-            obj.certChain = sigstore_common_1.X509CertificateChain.toJSON(message.certChain);
-        }
-        if (message.validFor !== undefined) {
-            obj.validFor = sigstore_common_1.TimeRange.toJSON(message.validFor);
-        }
-        if (message.operator !== "") {
-            obj.operator = message.operator;
-        }
-        return obj;
-    },
-};
-exports.TrustedRoot = {
-    fromJSON(object) {
-        return {
-            mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "",
-            tlogs: globalThis.Array.isArray(object?.tlogs)
-                ? object.tlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e))
-                : [],
-            certificateAuthorities: globalThis.Array.isArray(object?.certificateAuthorities)
-                ? object.certificateAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
-                : [],
-            ctlogs: globalThis.Array.isArray(object?.ctlogs)
-                ? object.ctlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e))
-                : [],
-            timestampAuthorities: globalThis.Array.isArray(object?.timestampAuthorities)
-                ? object.timestampAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.mediaType !== "") {
-            obj.mediaType = message.mediaType;
-        }
-        if (message.tlogs?.length) {
-            obj.tlogs = message.tlogs.map((e) => exports.TransparencyLogInstance.toJSON(e));
-        }
-        if (message.certificateAuthorities?.length) {
-            obj.certificateAuthorities = message.certificateAuthorities.map((e) => exports.CertificateAuthority.toJSON(e));
-        }
-        if (message.ctlogs?.length) {
-            obj.ctlogs = message.ctlogs.map((e) => exports.TransparencyLogInstance.toJSON(e));
-        }
-        if (message.timestampAuthorities?.length) {
-            obj.timestampAuthorities = message.timestampAuthorities.map((e) => exports.CertificateAuthority.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.SigningConfig = {
-    fromJSON(object) {
-        return {
-            mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "",
-            caUrls: globalThis.Array.isArray(object?.caUrls) ? object.caUrls.map((e) => exports.Service.fromJSON(e)) : [],
-            oidcUrls: globalThis.Array.isArray(object?.oidcUrls) ? object.oidcUrls.map((e) => exports.Service.fromJSON(e)) : [],
-            rekorTlogUrls: globalThis.Array.isArray(object?.rekorTlogUrls)
-                ? object.rekorTlogUrls.map((e) => exports.Service.fromJSON(e))
-                : [],
-            rekorTlogConfig: isSet(object.rekorTlogConfig)
-                ? exports.ServiceConfiguration.fromJSON(object.rekorTlogConfig)
-                : undefined,
-            tsaUrls: globalThis.Array.isArray(object?.tsaUrls) ? object.tsaUrls.map((e) => exports.Service.fromJSON(e)) : [],
-            tsaConfig: isSet(object.tsaConfig) ? exports.ServiceConfiguration.fromJSON(object.tsaConfig) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.mediaType !== "") {
-            obj.mediaType = message.mediaType;
-        }
-        if (message.caUrls?.length) {
-            obj.caUrls = message.caUrls.map((e) => exports.Service.toJSON(e));
-        }
-        if (message.oidcUrls?.length) {
-            obj.oidcUrls = message.oidcUrls.map((e) => exports.Service.toJSON(e));
-        }
-        if (message.rekorTlogUrls?.length) {
-            obj.rekorTlogUrls = message.rekorTlogUrls.map((e) => exports.Service.toJSON(e));
-        }
-        if (message.rekorTlogConfig !== undefined) {
-            obj.rekorTlogConfig = exports.ServiceConfiguration.toJSON(message.rekorTlogConfig);
-        }
-        if (message.tsaUrls?.length) {
-            obj.tsaUrls = message.tsaUrls.map((e) => exports.Service.toJSON(e));
-        }
-        if (message.tsaConfig !== undefined) {
-            obj.tsaConfig = exports.ServiceConfiguration.toJSON(message.tsaConfig);
-        }
-        return obj;
-    },
-};
-exports.Service = {
-    fromJSON(object) {
-        return {
-            url: isSet(object.url) ? globalThis.String(object.url) : "",
-            majorApiVersion: isSet(object.majorApiVersion) ? globalThis.Number(object.majorApiVersion) : 0,
-            validFor: isSet(object.validFor) ? sigstore_common_1.TimeRange.fromJSON(object.validFor) : undefined,
-            operator: isSet(object.operator) ? globalThis.String(object.operator) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.url !== "") {
-            obj.url = message.url;
-        }
-        if (message.majorApiVersion !== 0) {
-            obj.majorApiVersion = Math.round(message.majorApiVersion);
-        }
-        if (message.validFor !== undefined) {
-            obj.validFor = sigstore_common_1.TimeRange.toJSON(message.validFor);
-        }
-        if (message.operator !== "") {
-            obj.operator = message.operator;
-        }
-        return obj;
-    },
-};
-exports.ServiceConfiguration = {
-    fromJSON(object) {
-        return {
-            selector: isSet(object.selector) ? serviceSelectorFromJSON(object.selector) : 0,
-            count: isSet(object.count) ? globalThis.Number(object.count) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.selector !== 0) {
-            obj.selector = serviceSelectorToJSON(message.selector);
-        }
-        if (message.count !== 0) {
-            obj.count = Math.round(message.count);
-        }
-        return obj;
-    },
-};
-exports.ClientTrustConfig = {
-    fromJSON(object) {
-        return {
-            mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "",
-            trustedRoot: isSet(object.trustedRoot) ? exports.TrustedRoot.fromJSON(object.trustedRoot) : undefined,
-            signingConfig: isSet(object.signingConfig) ? exports.SigningConfig.fromJSON(object.signingConfig) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.mediaType !== "") {
-            obj.mediaType = message.mediaType;
-        }
-        if (message.trustedRoot !== undefined) {
-            obj.trustedRoot = exports.TrustedRoot.toJSON(message.trustedRoot);
-        }
-        if (message.signingConfig !== undefined) {
-            obj.signingConfig = exports.SigningConfig.toJSON(message.signingConfig);
-        }
-        return obj;
-    },
-};
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
deleted file mode 100644
index 876fe9cc1db1d..0000000000000
--- a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
+++ /dev/null
@@ -1,281 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: sigstore_verification.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Input = exports.Artifact = exports.ArtifactVerificationOptions_ObserverTimestampOptions = exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions = exports.ArtifactVerificationOptions_TimestampAuthorityOptions = exports.ArtifactVerificationOptions_CtlogOptions = exports.ArtifactVerificationOptions_TlogOptions = exports.ArtifactVerificationOptions = exports.PublicKeyIdentities = exports.CertificateIdentities = exports.CertificateIdentity = void 0;
-/* eslint-disable */
-const sigstore_bundle_1 = require("./sigstore_bundle");
-const sigstore_common_1 = require("./sigstore_common");
-const sigstore_trustroot_1 = require("./sigstore_trustroot");
-exports.CertificateIdentity = {
-    fromJSON(object) {
-        return {
-            issuer: isSet(object.issuer) ? globalThis.String(object.issuer) : "",
-            san: isSet(object.san) ? sigstore_common_1.SubjectAlternativeName.fromJSON(object.san) : undefined,
-            oids: globalThis.Array.isArray(object?.oids)
-                ? object.oids.map((e) => sigstore_common_1.ObjectIdentifierValuePair.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.issuer !== "") {
-            obj.issuer = message.issuer;
-        }
-        if (message.san !== undefined) {
-            obj.san = sigstore_common_1.SubjectAlternativeName.toJSON(message.san);
-        }
-        if (message.oids?.length) {
-            obj.oids = message.oids.map((e) => sigstore_common_1.ObjectIdentifierValuePair.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.CertificateIdentities = {
-    fromJSON(object) {
-        return {
-            identities: globalThis.Array.isArray(object?.identities)
-                ? object.identities.map((e) => exports.CertificateIdentity.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.identities?.length) {
-            obj.identities = message.identities.map((e) => exports.CertificateIdentity.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.PublicKeyIdentities = {
-    fromJSON(object) {
-        return {
-            publicKeys: globalThis.Array.isArray(object?.publicKeys)
-                ? object.publicKeys.map((e) => sigstore_common_1.PublicKey.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.publicKeys?.length) {
-            obj.publicKeys = message.publicKeys.map((e) => sigstore_common_1.PublicKey.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.ArtifactVerificationOptions = {
-    fromJSON(object) {
-        return {
-            signers: isSet(object.certificateIdentities)
-                ? {
-                    $case: "certificateIdentities",
-                    certificateIdentities: exports.CertificateIdentities.fromJSON(object.certificateIdentities),
-                }
-                : isSet(object.publicKeys)
-                    ? { $case: "publicKeys", publicKeys: exports.PublicKeyIdentities.fromJSON(object.publicKeys) }
-                    : undefined,
-            tlogOptions: isSet(object.tlogOptions)
-                ? exports.ArtifactVerificationOptions_TlogOptions.fromJSON(object.tlogOptions)
-                : undefined,
-            ctlogOptions: isSet(object.ctlogOptions)
-                ? exports.ArtifactVerificationOptions_CtlogOptions.fromJSON(object.ctlogOptions)
-                : undefined,
-            tsaOptions: isSet(object.tsaOptions)
-                ? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.fromJSON(object.tsaOptions)
-                : undefined,
-            integratedTsOptions: isSet(object.integratedTsOptions)
-                ? exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions.fromJSON(object.integratedTsOptions)
-                : undefined,
-            observerOptions: isSet(object.observerOptions)
-                ? exports.ArtifactVerificationOptions_ObserverTimestampOptions.fromJSON(object.observerOptions)
-                : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.signers?.$case === "certificateIdentities") {
-            obj.certificateIdentities = exports.CertificateIdentities.toJSON(message.signers.certificateIdentities);
-        }
-        else if (message.signers?.$case === "publicKeys") {
-            obj.publicKeys = exports.PublicKeyIdentities.toJSON(message.signers.publicKeys);
-        }
-        if (message.tlogOptions !== undefined) {
-            obj.tlogOptions = exports.ArtifactVerificationOptions_TlogOptions.toJSON(message.tlogOptions);
-        }
-        if (message.ctlogOptions !== undefined) {
-            obj.ctlogOptions = exports.ArtifactVerificationOptions_CtlogOptions.toJSON(message.ctlogOptions);
-        }
-        if (message.tsaOptions !== undefined) {
-            obj.tsaOptions = exports.ArtifactVerificationOptions_TimestampAuthorityOptions.toJSON(message.tsaOptions);
-        }
-        if (message.integratedTsOptions !== undefined) {
-            obj.integratedTsOptions = exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions.toJSON(message.integratedTsOptions);
-        }
-        if (message.observerOptions !== undefined) {
-            obj.observerOptions = exports.ArtifactVerificationOptions_ObserverTimestampOptions.toJSON(message.observerOptions);
-        }
-        return obj;
-    },
-};
-exports.ArtifactVerificationOptions_TlogOptions = {
-    fromJSON(object) {
-        return {
-            threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
-            performOnlineVerification: isSet(object.performOnlineVerification)
-                ? globalThis.Boolean(object.performOnlineVerification)
-                : false,
-            disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.threshold !== 0) {
-            obj.threshold = Math.round(message.threshold);
-        }
-        if (message.performOnlineVerification !== false) {
-            obj.performOnlineVerification = message.performOnlineVerification;
-        }
-        if (message.disable !== false) {
-            obj.disable = message.disable;
-        }
-        return obj;
-    },
-};
-exports.ArtifactVerificationOptions_CtlogOptions = {
-    fromJSON(object) {
-        return {
-            threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
-            disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.threshold !== 0) {
-            obj.threshold = Math.round(message.threshold);
-        }
-        if (message.disable !== false) {
-            obj.disable = message.disable;
-        }
-        return obj;
-    },
-};
-exports.ArtifactVerificationOptions_TimestampAuthorityOptions = {
-    fromJSON(object) {
-        return {
-            threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
-            disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.threshold !== 0) {
-            obj.threshold = Math.round(message.threshold);
-        }
-        if (message.disable !== false) {
-            obj.disable = message.disable;
-        }
-        return obj;
-    },
-};
-exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions = {
-    fromJSON(object) {
-        return {
-            threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
-            disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.threshold !== 0) {
-            obj.threshold = Math.round(message.threshold);
-        }
-        if (message.disable !== false) {
-            obj.disable = message.disable;
-        }
-        return obj;
-    },
-};
-exports.ArtifactVerificationOptions_ObserverTimestampOptions = {
-    fromJSON(object) {
-        return {
-            threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
-            disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.threshold !== 0) {
-            obj.threshold = Math.round(message.threshold);
-        }
-        if (message.disable !== false) {
-            obj.disable = message.disable;
-        }
-        return obj;
-    },
-};
-exports.Artifact = {
-    fromJSON(object) {
-        return {
-            data: isSet(object.artifactUri)
-                ? { $case: "artifactUri", artifactUri: globalThis.String(object.artifactUri) }
-                : isSet(object.artifact)
-                    ? { $case: "artifact", artifact: Buffer.from(bytesFromBase64(object.artifact)) }
-                    : isSet(object.artifactDigest)
-                        ? { $case: "artifactDigest", artifactDigest: sigstore_common_1.HashOutput.fromJSON(object.artifactDigest) }
-                        : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.data?.$case === "artifactUri") {
-            obj.artifactUri = message.data.artifactUri;
-        }
-        else if (message.data?.$case === "artifact") {
-            obj.artifact = base64FromBytes(message.data.artifact);
-        }
-        else if (message.data?.$case === "artifactDigest") {
-            obj.artifactDigest = sigstore_common_1.HashOutput.toJSON(message.data.artifactDigest);
-        }
-        return obj;
-    },
-};
-exports.Input = {
-    fromJSON(object) {
-        return {
-            artifactTrustRoot: isSet(object.artifactTrustRoot) ? sigstore_trustroot_1.TrustedRoot.fromJSON(object.artifactTrustRoot) : undefined,
-            artifactVerificationOptions: isSet(object.artifactVerificationOptions)
-                ? exports.ArtifactVerificationOptions.fromJSON(object.artifactVerificationOptions)
-                : undefined,
-            bundle: isSet(object.bundle) ? sigstore_bundle_1.Bundle.fromJSON(object.bundle) : undefined,
-            artifact: isSet(object.artifact) ? exports.Artifact.fromJSON(object.artifact) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.artifactTrustRoot !== undefined) {
-            obj.artifactTrustRoot = sigstore_trustroot_1.TrustedRoot.toJSON(message.artifactTrustRoot);
-        }
-        if (message.artifactVerificationOptions !== undefined) {
-            obj.artifactVerificationOptions = exports.ArtifactVerificationOptions.toJSON(message.artifactVerificationOptions);
-        }
-        if (message.bundle !== undefined) {
-            obj.bundle = sigstore_bundle_1.Bundle.toJSON(message.bundle);
-        }
-        if (message.artifact !== undefined) {
-            obj.artifact = exports.Artifact.toJSON(message.artifact);
-        }
-        return obj;
-    },
-};
-function bytesFromBase64(b64) {
-    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
-}
-function base64FromBytes(arr) {
-    return globalThis.Buffer.from(arr).toString("base64");
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/index.js b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/index.js
deleted file mode 100644
index eafb768c48fca..0000000000000
--- a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/index.js
+++ /dev/null
@@ -1,37 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __exportStar = (this && this.__exportStar) || function(m, exports) {
-    for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-__exportStar(require("./__generated__/envelope"), exports);
-__exportStar(require("./__generated__/sigstore_bundle"), exports);
-__exportStar(require("./__generated__/sigstore_common"), exports);
-__exportStar(require("./__generated__/sigstore_rekor"), exports);
-__exportStar(require("./__generated__/sigstore_trustroot"), exports);
-__exportStar(require("./__generated__/sigstore_verification"), exports);
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/package.json b/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/package.json
deleted file mode 100644
index f87b2540fbf98..0000000000000
--- a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/package.json
+++ /dev/null
@@ -1,35 +0,0 @@
-{
-  "name": "@sigstore/protobuf-specs",
-  "version": "0.5.0",
-  "description": "code-signing for npm packages",
-  "main": "dist/index.js",
-  "types": "dist/index.d.ts",
-  "exports": {
-    ".": "./dist/index.js",
-    "./rekor/v2": "./dist/rekor/v2/index.js"
-  },
-  "scripts": {
-    "build": "tsc"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/sigstore/protobuf-specs.git"
-  },
-  "files": [
-    "dist"
-  ],
-  "author": "bdehamer@github.com",
-  "license": "Apache-2.0",
-  "bugs": {
-    "url": "https://github.com/sigstore/protobuf-specs/issues"
-  },
-  "homepage": "https://github.com/sigstore/protobuf-specs#readme",
-  "devDependencies": {
-    "@tsconfig/node18": "^18.2.4",
-    "@types/node": "^18.14.0",
-    "typescript": "^5.7.2"
-  },
-  "engines": {
-    "node": "^18.17.0 || >=20.5.0"
-  }
-}
diff --git a/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js b/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
index 3c9abff8899b5..5c4f37bfaf3fb 100644
--- a/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
+++ b/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
@@ -1,7 +1,7 @@
 "use strict";
 // Code generated by protoc-gen-ts_proto. DO NOT EDIT.
 // versions:
-//   protoc-gen-ts_proto  v2.7.0
+//   protoc-gen-ts_proto  v2.7.5
 //   protoc               v6.30.2
 // source: envelope.proto
 Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js b/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
index 46904b7ec64d9..6138fef5672fc 100644
--- a/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
+++ b/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
@@ -1,7 +1,7 @@
 "use strict";
 // Code generated by protoc-gen-ts_proto. DO NOT EDIT.
 // versions:
-//   protoc-gen-ts_proto  v2.7.0
+//   protoc-gen-ts_proto  v2.7.5
 //   protoc               v6.30.2
 // source: events.proto
 Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js b/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
index 14e559a5e0126..b4d9ccc781c2f 100644
--- a/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
+++ b/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
@@ -1,7 +1,7 @@
 "use strict";
 // Code generated by protoc-gen-ts_proto. DO NOT EDIT.
 // versions:
-//   protoc-gen-ts_proto  v2.7.0
+//   protoc-gen-ts_proto  v2.7.5
 //   protoc               v6.30.2
 // source: google/api/field_behavior.proto
 Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js b/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
index bc461887e318a..f0c8aab773e4c 100644
--- a/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
+++ b/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
@@ -1,7 +1,7 @@
 "use strict";
 // Code generated by protoc-gen-ts_proto. DO NOT EDIT.
 // versions:
-//   protoc-gen-ts_proto  v2.7.0
+//   protoc-gen-ts_proto  v2.7.5
 //   protoc               v6.30.2
 // source: google/protobuf/any.proto
 Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js b/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
index a7d7550fc9774..d6f8ddddf799d 100644
--- a/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
+++ b/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
@@ -1,7 +1,7 @@
 "use strict";
 // Code generated by protoc-gen-ts_proto. DO NOT EDIT.
 // versions:
-//   protoc-gen-ts_proto  v2.7.0
+//   protoc-gen-ts_proto  v2.7.5
 //   protoc               v6.30.2
 // source: google/protobuf/descriptor.proto
 Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js b/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
index 8b75b604c231c..9d24cbba10de9 100644
--- a/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
+++ b/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
@@ -1,7 +1,7 @@
 "use strict";
 // Code generated by protoc-gen-ts_proto. DO NOT EDIT.
 // versions:
-//   protoc-gen-ts_proto  v2.7.0
+//   protoc-gen-ts_proto  v2.7.5
 //   protoc               v6.30.2
 // source: google/protobuf/timestamp.proto
 Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js b/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js
index 13099ddc3631a..abc766bed3b88 100644
--- a/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js
+++ b/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js
@@ -1,7 +1,7 @@
 "use strict";
 // Code generated by protoc-gen-ts_proto. DO NOT EDIT.
 // versions:
-//   protoc-gen-ts_proto  v2.7.0
+//   protoc-gen-ts_proto  v2.7.5
 //   protoc               v6.30.2
 // source: rekor/v2/dsse.proto
 Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js b/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js
index 177fc0cbf3482..c5eccb10e0a68 100644
--- a/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js
+++ b/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js
@@ -1,7 +1,7 @@
 "use strict";
 // Code generated by protoc-gen-ts_proto. DO NOT EDIT.
 // versions:
-//   protoc-gen-ts_proto  v2.7.0
+//   protoc-gen-ts_proto  v2.7.5
 //   protoc               v6.30.2
 // source: rekor/v2/entry.proto
 Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js b/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js
index ed0d16494e06f..d3fd1af2483d1 100644
--- a/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js
+++ b/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js
@@ -1,7 +1,7 @@
 "use strict";
 // Code generated by protoc-gen-ts_proto. DO NOT EDIT.
 // versions:
-//   protoc-gen-ts_proto  v2.7.0
+//   protoc-gen-ts_proto  v2.7.5
 //   protoc               v6.30.2
 // source: rekor/v2/hashedrekord.proto
 Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js b/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js
index cc32d84bd7fae..c437d5053a3cb 100644
--- a/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js
+++ b/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js
@@ -1,7 +1,7 @@
 "use strict";
 // Code generated by protoc-gen-ts_proto. DO NOT EDIT.
 // versions:
-//   protoc-gen-ts_proto  v2.7.0
+//   protoc-gen-ts_proto  v2.7.5
 //   protoc               v6.30.2
 // source: rekor/v2/verifier.proto
 Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js b/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
index 0f0a27b662eba..aed636f00e7cf 100644
--- a/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
+++ b/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
@@ -1,7 +1,7 @@
 "use strict";
 // Code generated by protoc-gen-ts_proto. DO NOT EDIT.
 // versions:
-//   protoc-gen-ts_proto  v2.7.0
+//   protoc-gen-ts_proto  v2.7.5
 //   protoc               v6.30.2
 // source: sigstore_bundle.proto
 Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js b/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
index fd62147feaef7..b900516ed3b55 100644
--- a/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
+++ b/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
@@ -1,7 +1,7 @@
 "use strict";
 // Code generated by protoc-gen-ts_proto. DO NOT EDIT.
 // versions:
-//   protoc-gen-ts_proto  v2.7.0
+//   protoc-gen-ts_proto  v2.7.5
 //   protoc               v6.30.2
 // source: sigstore_common.proto
 Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js b/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
index 9f9b3d0d1b461..fd8ea8384664d 100644
--- a/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
+++ b/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
@@ -1,7 +1,7 @@
 "use strict";
 // Code generated by protoc-gen-ts_proto. DO NOT EDIT.
 // versions:
-//   protoc-gen-ts_proto  v2.7.0
+//   protoc-gen-ts_proto  v2.7.5
 //   protoc               v6.30.2
 // source: sigstore_rekor.proto
 Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js b/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
index d5f4e4ef3cddc..1b5492fb1a77e 100644
--- a/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
+++ b/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
@@ -1,7 +1,7 @@
 "use strict";
 // Code generated by protoc-gen-ts_proto. DO NOT EDIT.
 // versions:
-//   protoc-gen-ts_proto  v2.7.0
+//   protoc-gen-ts_proto  v2.7.5
 //   protoc               v6.30.2
 // source: sigstore_trustroot.proto
 Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js b/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
index a616d5f0f6a21..876fe9cc1db1d 100644
--- a/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
+++ b/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
@@ -1,7 +1,7 @@
 "use strict";
 // Code generated by protoc-gen-ts_proto. DO NOT EDIT.
 // versions:
-//   protoc-gen-ts_proto  v2.7.0
+//   protoc-gen-ts_proto  v2.7.5
 //   protoc               v6.30.2
 // source: sigstore_verification.proto
 Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/rekor/v2/index.js b/node_modules/@sigstore/protobuf-specs/dist/rekor/v2/index.js
similarity index 100%
rename from node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs/dist/rekor/v2/index.js
rename to node_modules/@sigstore/protobuf-specs/dist/rekor/v2/index.js
diff --git a/node_modules/@sigstore/protobuf-specs/package.json b/node_modules/@sigstore/protobuf-specs/package.json
index 3080a305a8f05..f87b2540fbf98 100644
--- a/node_modules/@sigstore/protobuf-specs/package.json
+++ b/node_modules/@sigstore/protobuf-specs/package.json
@@ -1,9 +1,13 @@
 {
   "name": "@sigstore/protobuf-specs",
-  "version": "0.4.3",
+  "version": "0.5.0",
   "description": "code-signing for npm packages",
   "main": "dist/index.js",
   "types": "dist/index.d.ts",
+  "exports": {
+    ".": "./dist/index.js",
+    "./rekor/v2": "./dist/rekor/v2/index.js"
+  },
   "scripts": {
     "build": "tsc"
   },
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/LICENSE b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/LICENSE
deleted file mode 100644
index e9e7c1679a09d..0000000000000
--- a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
-   APPENDIX: How to apply the Apache License to your work.
-
-      To apply the Apache License to your work, attach the following
-      boilerplate notice, with the fields enclosed by brackets "[]"
-      replaced with your own identifying information. (Don't include
-      the brackets!)  The text should be enclosed in the appropriate
-      comment syntax for the file format. We also recommend that a
-      file or class name and description of purpose be included on the
-      same "printed page" as the copyright notice for easier
-      identification within third-party archives.
-
-   Copyright 2023 The Sigstore Authors
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
deleted file mode 100644
index 5c4f37bfaf3fb..0000000000000
--- a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
+++ /dev/null
@@ -1,59 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: envelope.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Signature = exports.Envelope = void 0;
-exports.Envelope = {
-    fromJSON(object) {
-        return {
-            payload: isSet(object.payload) ? Buffer.from(bytesFromBase64(object.payload)) : Buffer.alloc(0),
-            payloadType: isSet(object.payloadType) ? globalThis.String(object.payloadType) : "",
-            signatures: globalThis.Array.isArray(object?.signatures)
-                ? object.signatures.map((e) => exports.Signature.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.payload.length !== 0) {
-            obj.payload = base64FromBytes(message.payload);
-        }
-        if (message.payloadType !== "") {
-            obj.payloadType = message.payloadType;
-        }
-        if (message.signatures?.length) {
-            obj.signatures = message.signatures.map((e) => exports.Signature.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.Signature = {
-    fromJSON(object) {
-        return {
-            sig: isSet(object.sig) ? Buffer.from(bytesFromBase64(object.sig)) : Buffer.alloc(0),
-            keyid: isSet(object.keyid) ? globalThis.String(object.keyid) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.sig.length !== 0) {
-            obj.sig = base64FromBytes(message.sig);
-        }
-        if (message.keyid !== "") {
-            obj.keyid = message.keyid;
-        }
-        return obj;
-    },
-};
-function bytesFromBase64(b64) {
-    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
-}
-function base64FromBytes(arr) {
-    return globalThis.Buffer.from(arr).toString("base64");
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
deleted file mode 100644
index 6138fef5672fc..0000000000000
--- a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
+++ /dev/null
@@ -1,174 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: events.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.CloudEventBatch = exports.CloudEvent_CloudEventAttributeValue = exports.CloudEvent_AttributesEntry = exports.CloudEvent = void 0;
-/* eslint-disable */
-const any_1 = require("./google/protobuf/any");
-const timestamp_1 = require("./google/protobuf/timestamp");
-exports.CloudEvent = {
-    fromJSON(object) {
-        return {
-            id: isSet(object.id) ? globalThis.String(object.id) : "",
-            source: isSet(object.source) ? globalThis.String(object.source) : "",
-            specVersion: isSet(object.specVersion) ? globalThis.String(object.specVersion) : "",
-            type: isSet(object.type) ? globalThis.String(object.type) : "",
-            attributes: isObject(object.attributes)
-                ? Object.entries(object.attributes).reduce((acc, [key, value]) => {
-                    acc[key] = exports.CloudEvent_CloudEventAttributeValue.fromJSON(value);
-                    return acc;
-                }, {})
-                : {},
-            data: isSet(object.binaryData)
-                ? { $case: "binaryData", binaryData: Buffer.from(bytesFromBase64(object.binaryData)) }
-                : isSet(object.textData)
-                    ? { $case: "textData", textData: globalThis.String(object.textData) }
-                    : isSet(object.protoData)
-                        ? { $case: "protoData", protoData: any_1.Any.fromJSON(object.protoData) }
-                        : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.id !== "") {
-            obj.id = message.id;
-        }
-        if (message.source !== "") {
-            obj.source = message.source;
-        }
-        if (message.specVersion !== "") {
-            obj.specVersion = message.specVersion;
-        }
-        if (message.type !== "") {
-            obj.type = message.type;
-        }
-        if (message.attributes) {
-            const entries = Object.entries(message.attributes);
-            if (entries.length > 0) {
-                obj.attributes = {};
-                entries.forEach(([k, v]) => {
-                    obj.attributes[k] = exports.CloudEvent_CloudEventAttributeValue.toJSON(v);
-                });
-            }
-        }
-        if (message.data?.$case === "binaryData") {
-            obj.binaryData = base64FromBytes(message.data.binaryData);
-        }
-        else if (message.data?.$case === "textData") {
-            obj.textData = message.data.textData;
-        }
-        else if (message.data?.$case === "protoData") {
-            obj.protoData = any_1.Any.toJSON(message.data.protoData);
-        }
-        return obj;
-    },
-};
-exports.CloudEvent_AttributesEntry = {
-    fromJSON(object) {
-        return {
-            key: isSet(object.key) ? globalThis.String(object.key) : "",
-            value: isSet(object.value) ? exports.CloudEvent_CloudEventAttributeValue.fromJSON(object.value) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.key !== "") {
-            obj.key = message.key;
-        }
-        if (message.value !== undefined) {
-            obj.value = exports.CloudEvent_CloudEventAttributeValue.toJSON(message.value);
-        }
-        return obj;
-    },
-};
-exports.CloudEvent_CloudEventAttributeValue = {
-    fromJSON(object) {
-        return {
-            attr: isSet(object.ceBoolean)
-                ? { $case: "ceBoolean", ceBoolean: globalThis.Boolean(object.ceBoolean) }
-                : isSet(object.ceInteger)
-                    ? { $case: "ceInteger", ceInteger: globalThis.Number(object.ceInteger) }
-                    : isSet(object.ceString)
-                        ? { $case: "ceString", ceString: globalThis.String(object.ceString) }
-                        : isSet(object.ceBytes)
-                            ? { $case: "ceBytes", ceBytes: Buffer.from(bytesFromBase64(object.ceBytes)) }
-                            : isSet(object.ceUri)
-                                ? { $case: "ceUri", ceUri: globalThis.String(object.ceUri) }
-                                : isSet(object.ceUriRef)
-                                    ? { $case: "ceUriRef", ceUriRef: globalThis.String(object.ceUriRef) }
-                                    : isSet(object.ceTimestamp)
-                                        ? { $case: "ceTimestamp", ceTimestamp: fromJsonTimestamp(object.ceTimestamp) }
-                                        : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.attr?.$case === "ceBoolean") {
-            obj.ceBoolean = message.attr.ceBoolean;
-        }
-        else if (message.attr?.$case === "ceInteger") {
-            obj.ceInteger = Math.round(message.attr.ceInteger);
-        }
-        else if (message.attr?.$case === "ceString") {
-            obj.ceString = message.attr.ceString;
-        }
-        else if (message.attr?.$case === "ceBytes") {
-            obj.ceBytes = base64FromBytes(message.attr.ceBytes);
-        }
-        else if (message.attr?.$case === "ceUri") {
-            obj.ceUri = message.attr.ceUri;
-        }
-        else if (message.attr?.$case === "ceUriRef") {
-            obj.ceUriRef = message.attr.ceUriRef;
-        }
-        else if (message.attr?.$case === "ceTimestamp") {
-            obj.ceTimestamp = message.attr.ceTimestamp.toISOString();
-        }
-        return obj;
-    },
-};
-exports.CloudEventBatch = {
-    fromJSON(object) {
-        return {
-            events: globalThis.Array.isArray(object?.events) ? object.events.map((e) => exports.CloudEvent.fromJSON(e)) : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.events?.length) {
-            obj.events = message.events.map((e) => exports.CloudEvent.toJSON(e));
-        }
-        return obj;
-    },
-};
-function bytesFromBase64(b64) {
-    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
-}
-function base64FromBytes(arr) {
-    return globalThis.Buffer.from(arr).toString("base64");
-}
-function fromTimestamp(t) {
-    let millis = (globalThis.Number(t.seconds) || 0) * 1_000;
-    millis += (t.nanos || 0) / 1_000_000;
-    return new globalThis.Date(millis);
-}
-function fromJsonTimestamp(o) {
-    if (o instanceof globalThis.Date) {
-        return o;
-    }
-    else if (typeof o === "string") {
-        return new globalThis.Date(o);
-    }
-    else {
-        return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
-    }
-}
-function isObject(value) {
-    return typeof value === "object" && value !== null;
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
deleted file mode 100644
index b4d9ccc781c2f..0000000000000
--- a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
+++ /dev/null
@@ -1,141 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: google/api/field_behavior.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.FieldBehavior = void 0;
-exports.fieldBehaviorFromJSON = fieldBehaviorFromJSON;
-exports.fieldBehaviorToJSON = fieldBehaviorToJSON;
-/* eslint-disable */
-/**
- * An indicator of the behavior of a given field (for example, that a field
- * is required in requests, or given as output but ignored as input).
- * This **does not** change the behavior in protocol buffers itself; it only
- * denotes the behavior and may affect how API tooling handles the field.
- *
- * Note: This enum **may** receive new values in the future.
- */
-var FieldBehavior;
-(function (FieldBehavior) {
-    /** FIELD_BEHAVIOR_UNSPECIFIED - Conventional default for enums. Do not use this. */
-    FieldBehavior[FieldBehavior["FIELD_BEHAVIOR_UNSPECIFIED"] = 0] = "FIELD_BEHAVIOR_UNSPECIFIED";
-    /**
-     * OPTIONAL - Specifically denotes a field as optional.
-     * While all fields in protocol buffers are optional, this may be specified
-     * for emphasis if appropriate.
-     */
-    FieldBehavior[FieldBehavior["OPTIONAL"] = 1] = "OPTIONAL";
-    /**
-     * REQUIRED - Denotes a field as required.
-     * This indicates that the field **must** be provided as part of the request,
-     * and failure to do so will cause an error (usually `INVALID_ARGUMENT`).
-     */
-    FieldBehavior[FieldBehavior["REQUIRED"] = 2] = "REQUIRED";
-    /**
-     * OUTPUT_ONLY - Denotes a field as output only.
-     * This indicates that the field is provided in responses, but including the
-     * field in a request does nothing (the server *must* ignore it and
-     * *must not* throw an error as a result of the field's presence).
-     */
-    FieldBehavior[FieldBehavior["OUTPUT_ONLY"] = 3] = "OUTPUT_ONLY";
-    /**
-     * INPUT_ONLY - Denotes a field as input only.
-     * This indicates that the field is provided in requests, and the
-     * corresponding field is not included in output.
-     */
-    FieldBehavior[FieldBehavior["INPUT_ONLY"] = 4] = "INPUT_ONLY";
-    /**
-     * IMMUTABLE - Denotes a field as immutable.
-     * This indicates that the field may be set once in a request to create a
-     * resource, but may not be changed thereafter.
-     */
-    FieldBehavior[FieldBehavior["IMMUTABLE"] = 5] = "IMMUTABLE";
-    /**
-     * UNORDERED_LIST - Denotes that a (repeated) field is an unordered list.
-     * This indicates that the service may provide the elements of the list
-     * in any arbitrary  order, rather than the order the user originally
-     * provided. Additionally, the list's order may or may not be stable.
-     */
-    FieldBehavior[FieldBehavior["UNORDERED_LIST"] = 6] = "UNORDERED_LIST";
-    /**
-     * NON_EMPTY_DEFAULT - Denotes that this field returns a non-empty default value if not set.
-     * This indicates that if the user provides the empty value in a request,
-     * a non-empty value will be returned. The user will not be aware of what
-     * non-empty value to expect.
-     */
-    FieldBehavior[FieldBehavior["NON_EMPTY_DEFAULT"] = 7] = "NON_EMPTY_DEFAULT";
-    /**
-     * IDENTIFIER - Denotes that the field in a resource (a message annotated with
-     * google.api.resource) is used in the resource name to uniquely identify the
-     * resource. For AIP-compliant APIs, this should only be applied to the
-     * `name` field on the resource.
-     *
-     * This behavior should not be applied to references to other resources within
-     * the message.
-     *
-     * The identifier field of resources often have different field behavior
-     * depending on the request it is embedded in (e.g. for Create methods name
-     * is optional and unused, while for Update methods it is required). Instead
-     * of method-specific annotations, only `IDENTIFIER` is required.
-     */
-    FieldBehavior[FieldBehavior["IDENTIFIER"] = 8] = "IDENTIFIER";
-})(FieldBehavior || (exports.FieldBehavior = FieldBehavior = {}));
-function fieldBehaviorFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "FIELD_BEHAVIOR_UNSPECIFIED":
-            return FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED;
-        case 1:
-        case "OPTIONAL":
-            return FieldBehavior.OPTIONAL;
-        case 2:
-        case "REQUIRED":
-            return FieldBehavior.REQUIRED;
-        case 3:
-        case "OUTPUT_ONLY":
-            return FieldBehavior.OUTPUT_ONLY;
-        case 4:
-        case "INPUT_ONLY":
-            return FieldBehavior.INPUT_ONLY;
-        case 5:
-        case "IMMUTABLE":
-            return FieldBehavior.IMMUTABLE;
-        case 6:
-        case "UNORDERED_LIST":
-            return FieldBehavior.UNORDERED_LIST;
-        case 7:
-        case "NON_EMPTY_DEFAULT":
-            return FieldBehavior.NON_EMPTY_DEFAULT;
-        case 8:
-        case "IDENTIFIER":
-            return FieldBehavior.IDENTIFIER;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
-    }
-}
-function fieldBehaviorToJSON(object) {
-    switch (object) {
-        case FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED:
-            return "FIELD_BEHAVIOR_UNSPECIFIED";
-        case FieldBehavior.OPTIONAL:
-            return "OPTIONAL";
-        case FieldBehavior.REQUIRED:
-            return "REQUIRED";
-        case FieldBehavior.OUTPUT_ONLY:
-            return "OUTPUT_ONLY";
-        case FieldBehavior.INPUT_ONLY:
-            return "INPUT_ONLY";
-        case FieldBehavior.IMMUTABLE:
-            return "IMMUTABLE";
-        case FieldBehavior.UNORDERED_LIST:
-            return "UNORDERED_LIST";
-        case FieldBehavior.NON_EMPTY_DEFAULT:
-            return "NON_EMPTY_DEFAULT";
-        case FieldBehavior.IDENTIFIER:
-            return "IDENTIFIER";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
-    }
-}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
deleted file mode 100644
index f0c8aab773e4c..0000000000000
--- a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
+++ /dev/null
@@ -1,35 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: google/protobuf/any.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Any = void 0;
-exports.Any = {
-    fromJSON(object) {
-        return {
-            typeUrl: isSet(object.typeUrl) ? globalThis.String(object.typeUrl) : "",
-            value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.typeUrl !== "") {
-            obj.typeUrl = message.typeUrl;
-        }
-        if (message.value.length !== 0) {
-            obj.value = base64FromBytes(message.value);
-        }
-        return obj;
-    },
-};
-function bytesFromBase64(b64) {
-    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
-}
-function base64FromBytes(arr) {
-    return globalThis.Buffer.from(arr).toString("base64");
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
deleted file mode 100644
index d6f8ddddf799d..0000000000000
--- a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
+++ /dev/null
@@ -1,2042 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: google/protobuf/descriptor.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.GeneratedCodeInfo = exports.SourceCodeInfo_Location = exports.SourceCodeInfo = exports.FeatureSetDefaults_FeatureSetEditionDefault = exports.FeatureSetDefaults = exports.FeatureSet = exports.UninterpretedOption_NamePart = exports.UninterpretedOption = exports.MethodOptions = exports.ServiceOptions = exports.EnumValueOptions = exports.EnumOptions = exports.OneofOptions = exports.FieldOptions_FeatureSupport = exports.FieldOptions_EditionDefault = exports.FieldOptions = exports.MessageOptions = exports.FileOptions = exports.MethodDescriptorProto = exports.ServiceDescriptorProto = exports.EnumValueDescriptorProto = exports.EnumDescriptorProto_EnumReservedRange = exports.EnumDescriptorProto = exports.OneofDescriptorProto = exports.FieldDescriptorProto = exports.ExtensionRangeOptions_Declaration = exports.ExtensionRangeOptions = exports.DescriptorProto_ReservedRange = exports.DescriptorProto_ExtensionRange = exports.DescriptorProto = exports.FileDescriptorProto = exports.FileDescriptorSet = exports.GeneratedCodeInfo_Annotation_Semantic = exports.FeatureSet_EnforceNamingStyle = exports.FeatureSet_JsonFormat = exports.FeatureSet_MessageEncoding = exports.FeatureSet_Utf8Validation = exports.FeatureSet_RepeatedFieldEncoding = exports.FeatureSet_EnumType = exports.FeatureSet_FieldPresence = exports.MethodOptions_IdempotencyLevel = exports.FieldOptions_OptionTargetType = exports.FieldOptions_OptionRetention = exports.FieldOptions_JSType = exports.FieldOptions_CType = exports.FileOptions_OptimizeMode = exports.FieldDescriptorProto_Label = exports.FieldDescriptorProto_Type = exports.ExtensionRangeOptions_VerificationState = exports.Edition = void 0;
-exports.GeneratedCodeInfo_Annotation = void 0;
-exports.editionFromJSON = editionFromJSON;
-exports.editionToJSON = editionToJSON;
-exports.extensionRangeOptions_VerificationStateFromJSON = extensionRangeOptions_VerificationStateFromJSON;
-exports.extensionRangeOptions_VerificationStateToJSON = extensionRangeOptions_VerificationStateToJSON;
-exports.fieldDescriptorProto_TypeFromJSON = fieldDescriptorProto_TypeFromJSON;
-exports.fieldDescriptorProto_TypeToJSON = fieldDescriptorProto_TypeToJSON;
-exports.fieldDescriptorProto_LabelFromJSON = fieldDescriptorProto_LabelFromJSON;
-exports.fieldDescriptorProto_LabelToJSON = fieldDescriptorProto_LabelToJSON;
-exports.fileOptions_OptimizeModeFromJSON = fileOptions_OptimizeModeFromJSON;
-exports.fileOptions_OptimizeModeToJSON = fileOptions_OptimizeModeToJSON;
-exports.fieldOptions_CTypeFromJSON = fieldOptions_CTypeFromJSON;
-exports.fieldOptions_CTypeToJSON = fieldOptions_CTypeToJSON;
-exports.fieldOptions_JSTypeFromJSON = fieldOptions_JSTypeFromJSON;
-exports.fieldOptions_JSTypeToJSON = fieldOptions_JSTypeToJSON;
-exports.fieldOptions_OptionRetentionFromJSON = fieldOptions_OptionRetentionFromJSON;
-exports.fieldOptions_OptionRetentionToJSON = fieldOptions_OptionRetentionToJSON;
-exports.fieldOptions_OptionTargetTypeFromJSON = fieldOptions_OptionTargetTypeFromJSON;
-exports.fieldOptions_OptionTargetTypeToJSON = fieldOptions_OptionTargetTypeToJSON;
-exports.methodOptions_IdempotencyLevelFromJSON = methodOptions_IdempotencyLevelFromJSON;
-exports.methodOptions_IdempotencyLevelToJSON = methodOptions_IdempotencyLevelToJSON;
-exports.featureSet_FieldPresenceFromJSON = featureSet_FieldPresenceFromJSON;
-exports.featureSet_FieldPresenceToJSON = featureSet_FieldPresenceToJSON;
-exports.featureSet_EnumTypeFromJSON = featureSet_EnumTypeFromJSON;
-exports.featureSet_EnumTypeToJSON = featureSet_EnumTypeToJSON;
-exports.featureSet_RepeatedFieldEncodingFromJSON = featureSet_RepeatedFieldEncodingFromJSON;
-exports.featureSet_RepeatedFieldEncodingToJSON = featureSet_RepeatedFieldEncodingToJSON;
-exports.featureSet_Utf8ValidationFromJSON = featureSet_Utf8ValidationFromJSON;
-exports.featureSet_Utf8ValidationToJSON = featureSet_Utf8ValidationToJSON;
-exports.featureSet_MessageEncodingFromJSON = featureSet_MessageEncodingFromJSON;
-exports.featureSet_MessageEncodingToJSON = featureSet_MessageEncodingToJSON;
-exports.featureSet_JsonFormatFromJSON = featureSet_JsonFormatFromJSON;
-exports.featureSet_JsonFormatToJSON = featureSet_JsonFormatToJSON;
-exports.featureSet_EnforceNamingStyleFromJSON = featureSet_EnforceNamingStyleFromJSON;
-exports.featureSet_EnforceNamingStyleToJSON = featureSet_EnforceNamingStyleToJSON;
-exports.generatedCodeInfo_Annotation_SemanticFromJSON = generatedCodeInfo_Annotation_SemanticFromJSON;
-exports.generatedCodeInfo_Annotation_SemanticToJSON = generatedCodeInfo_Annotation_SemanticToJSON;
-/* eslint-disable */
-/** The full set of known editions. */
-var Edition;
-(function (Edition) {
-    /** EDITION_UNKNOWN - A placeholder for an unknown edition value. */
-    Edition[Edition["EDITION_UNKNOWN"] = 0] = "EDITION_UNKNOWN";
-    /**
-     * EDITION_LEGACY - A placeholder edition for specifying default behaviors *before* a feature
-     * was first introduced.  This is effectively an "infinite past".
-     */
-    Edition[Edition["EDITION_LEGACY"] = 900] = "EDITION_LEGACY";
-    /**
-     * EDITION_PROTO2 - Legacy syntax "editions".  These pre-date editions, but behave much like
-     * distinct editions.  These can't be used to specify the edition of proto
-     * files, but feature definitions must supply proto2/proto3 defaults for
-     * backwards compatibility.
-     */
-    Edition[Edition["EDITION_PROTO2"] = 998] = "EDITION_PROTO2";
-    Edition[Edition["EDITION_PROTO3"] = 999] = "EDITION_PROTO3";
-    /**
-     * EDITION_2023 - Editions that have been released.  The specific values are arbitrary and
-     * should not be depended on, but they will always be time-ordered for easy
-     * comparison.
-     */
-    Edition[Edition["EDITION_2023"] = 1000] = "EDITION_2023";
-    Edition[Edition["EDITION_2024"] = 1001] = "EDITION_2024";
-    /**
-     * EDITION_1_TEST_ONLY - Placeholder editions for testing feature resolution.  These should not be
-     * used or relied on outside of tests.
-     */
-    Edition[Edition["EDITION_1_TEST_ONLY"] = 1] = "EDITION_1_TEST_ONLY";
-    Edition[Edition["EDITION_2_TEST_ONLY"] = 2] = "EDITION_2_TEST_ONLY";
-    Edition[Edition["EDITION_99997_TEST_ONLY"] = 99997] = "EDITION_99997_TEST_ONLY";
-    Edition[Edition["EDITION_99998_TEST_ONLY"] = 99998] = "EDITION_99998_TEST_ONLY";
-    Edition[Edition["EDITION_99999_TEST_ONLY"] = 99999] = "EDITION_99999_TEST_ONLY";
-    /**
-     * EDITION_MAX - Placeholder for specifying unbounded edition support.  This should only
-     * ever be used by plugins that can expect to never require any changes to
-     * support a new edition.
-     */
-    Edition[Edition["EDITION_MAX"] = 2147483647] = "EDITION_MAX";
-})(Edition || (exports.Edition = Edition = {}));
-function editionFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "EDITION_UNKNOWN":
-            return Edition.EDITION_UNKNOWN;
-        case 900:
-        case "EDITION_LEGACY":
-            return Edition.EDITION_LEGACY;
-        case 998:
-        case "EDITION_PROTO2":
-            return Edition.EDITION_PROTO2;
-        case 999:
-        case "EDITION_PROTO3":
-            return Edition.EDITION_PROTO3;
-        case 1000:
-        case "EDITION_2023":
-            return Edition.EDITION_2023;
-        case 1001:
-        case "EDITION_2024":
-            return Edition.EDITION_2024;
-        case 1:
-        case "EDITION_1_TEST_ONLY":
-            return Edition.EDITION_1_TEST_ONLY;
-        case 2:
-        case "EDITION_2_TEST_ONLY":
-            return Edition.EDITION_2_TEST_ONLY;
-        case 99997:
-        case "EDITION_99997_TEST_ONLY":
-            return Edition.EDITION_99997_TEST_ONLY;
-        case 99998:
-        case "EDITION_99998_TEST_ONLY":
-            return Edition.EDITION_99998_TEST_ONLY;
-        case 99999:
-        case "EDITION_99999_TEST_ONLY":
-            return Edition.EDITION_99999_TEST_ONLY;
-        case 2147483647:
-        case "EDITION_MAX":
-            return Edition.EDITION_MAX;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum Edition");
-    }
-}
-function editionToJSON(object) {
-    switch (object) {
-        case Edition.EDITION_UNKNOWN:
-            return "EDITION_UNKNOWN";
-        case Edition.EDITION_LEGACY:
-            return "EDITION_LEGACY";
-        case Edition.EDITION_PROTO2:
-            return "EDITION_PROTO2";
-        case Edition.EDITION_PROTO3:
-            return "EDITION_PROTO3";
-        case Edition.EDITION_2023:
-            return "EDITION_2023";
-        case Edition.EDITION_2024:
-            return "EDITION_2024";
-        case Edition.EDITION_1_TEST_ONLY:
-            return "EDITION_1_TEST_ONLY";
-        case Edition.EDITION_2_TEST_ONLY:
-            return "EDITION_2_TEST_ONLY";
-        case Edition.EDITION_99997_TEST_ONLY:
-            return "EDITION_99997_TEST_ONLY";
-        case Edition.EDITION_99998_TEST_ONLY:
-            return "EDITION_99998_TEST_ONLY";
-        case Edition.EDITION_99999_TEST_ONLY:
-            return "EDITION_99999_TEST_ONLY";
-        case Edition.EDITION_MAX:
-            return "EDITION_MAX";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum Edition");
-    }
-}
-/** The verification state of the extension range. */
-var ExtensionRangeOptions_VerificationState;
-(function (ExtensionRangeOptions_VerificationState) {
-    /** DECLARATION - All the extensions of the range must be declared. */
-    ExtensionRangeOptions_VerificationState[ExtensionRangeOptions_VerificationState["DECLARATION"] = 0] = "DECLARATION";
-    ExtensionRangeOptions_VerificationState[ExtensionRangeOptions_VerificationState["UNVERIFIED"] = 1] = "UNVERIFIED";
-})(ExtensionRangeOptions_VerificationState || (exports.ExtensionRangeOptions_VerificationState = ExtensionRangeOptions_VerificationState = {}));
-function extensionRangeOptions_VerificationStateFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "DECLARATION":
-            return ExtensionRangeOptions_VerificationState.DECLARATION;
-        case 1:
-        case "UNVERIFIED":
-            return ExtensionRangeOptions_VerificationState.UNVERIFIED;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum ExtensionRangeOptions_VerificationState");
-    }
-}
-function extensionRangeOptions_VerificationStateToJSON(object) {
-    switch (object) {
-        case ExtensionRangeOptions_VerificationState.DECLARATION:
-            return "DECLARATION";
-        case ExtensionRangeOptions_VerificationState.UNVERIFIED:
-            return "UNVERIFIED";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum ExtensionRangeOptions_VerificationState");
-    }
-}
-var FieldDescriptorProto_Type;
-(function (FieldDescriptorProto_Type) {
-    /**
-     * TYPE_DOUBLE - 0 is reserved for errors.
-     * Order is weird for historical reasons.
-     */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_DOUBLE"] = 1] = "TYPE_DOUBLE";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FLOAT"] = 2] = "TYPE_FLOAT";
-    /**
-     * TYPE_INT64 - Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT64 if
-     * negative values are likely.
-     */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT64"] = 3] = "TYPE_INT64";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT64"] = 4] = "TYPE_UINT64";
-    /**
-     * TYPE_INT32 - Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT32 if
-     * negative values are likely.
-     */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT32"] = 5] = "TYPE_INT32";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED64"] = 6] = "TYPE_FIXED64";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED32"] = 7] = "TYPE_FIXED32";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BOOL"] = 8] = "TYPE_BOOL";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_STRING"] = 9] = "TYPE_STRING";
-    /**
-     * TYPE_GROUP - Tag-delimited aggregate.
-     * Group type is deprecated and not supported after google.protobuf. However, Proto3
-     * implementations should still be able to parse the group wire format and
-     * treat group fields as unknown fields.  In Editions, the group wire format
-     * can be enabled via the `message_encoding` feature.
-     */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_GROUP"] = 10] = "TYPE_GROUP";
-    /** TYPE_MESSAGE - Length-delimited aggregate. */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_MESSAGE"] = 11] = "TYPE_MESSAGE";
-    /** TYPE_BYTES - New in version 2. */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BYTES"] = 12] = "TYPE_BYTES";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT32"] = 13] = "TYPE_UINT32";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_ENUM"] = 14] = "TYPE_ENUM";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED32"] = 15] = "TYPE_SFIXED32";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED64"] = 16] = "TYPE_SFIXED64";
-    /** TYPE_SINT32 - Uses ZigZag encoding. */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT32"] = 17] = "TYPE_SINT32";
-    /** TYPE_SINT64 - Uses ZigZag encoding. */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT64"] = 18] = "TYPE_SINT64";
-})(FieldDescriptorProto_Type || (exports.FieldDescriptorProto_Type = FieldDescriptorProto_Type = {}));
-function fieldDescriptorProto_TypeFromJSON(object) {
-    switch (object) {
-        case 1:
-        case "TYPE_DOUBLE":
-            return FieldDescriptorProto_Type.TYPE_DOUBLE;
-        case 2:
-        case "TYPE_FLOAT":
-            return FieldDescriptorProto_Type.TYPE_FLOAT;
-        case 3:
-        case "TYPE_INT64":
-            return FieldDescriptorProto_Type.TYPE_INT64;
-        case 4:
-        case "TYPE_UINT64":
-            return FieldDescriptorProto_Type.TYPE_UINT64;
-        case 5:
-        case "TYPE_INT32":
-            return FieldDescriptorProto_Type.TYPE_INT32;
-        case 6:
-        case "TYPE_FIXED64":
-            return FieldDescriptorProto_Type.TYPE_FIXED64;
-        case 7:
-        case "TYPE_FIXED32":
-            return FieldDescriptorProto_Type.TYPE_FIXED32;
-        case 8:
-        case "TYPE_BOOL":
-            return FieldDescriptorProto_Type.TYPE_BOOL;
-        case 9:
-        case "TYPE_STRING":
-            return FieldDescriptorProto_Type.TYPE_STRING;
-        case 10:
-        case "TYPE_GROUP":
-            return FieldDescriptorProto_Type.TYPE_GROUP;
-        case 11:
-        case "TYPE_MESSAGE":
-            return FieldDescriptorProto_Type.TYPE_MESSAGE;
-        case 12:
-        case "TYPE_BYTES":
-            return FieldDescriptorProto_Type.TYPE_BYTES;
-        case 13:
-        case "TYPE_UINT32":
-            return FieldDescriptorProto_Type.TYPE_UINT32;
-        case 14:
-        case "TYPE_ENUM":
-            return FieldDescriptorProto_Type.TYPE_ENUM;
-        case 15:
-        case "TYPE_SFIXED32":
-            return FieldDescriptorProto_Type.TYPE_SFIXED32;
-        case 16:
-        case "TYPE_SFIXED64":
-            return FieldDescriptorProto_Type.TYPE_SFIXED64;
-        case 17:
-        case "TYPE_SINT32":
-            return FieldDescriptorProto_Type.TYPE_SINT32;
-        case 18:
-        case "TYPE_SINT64":
-            return FieldDescriptorProto_Type.TYPE_SINT64;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
-    }
-}
-function fieldDescriptorProto_TypeToJSON(object) {
-    switch (object) {
-        case FieldDescriptorProto_Type.TYPE_DOUBLE:
-            return "TYPE_DOUBLE";
-        case FieldDescriptorProto_Type.TYPE_FLOAT:
-            return "TYPE_FLOAT";
-        case FieldDescriptorProto_Type.TYPE_INT64:
-            return "TYPE_INT64";
-        case FieldDescriptorProto_Type.TYPE_UINT64:
-            return "TYPE_UINT64";
-        case FieldDescriptorProto_Type.TYPE_INT32:
-            return "TYPE_INT32";
-        case FieldDescriptorProto_Type.TYPE_FIXED64:
-            return "TYPE_FIXED64";
-        case FieldDescriptorProto_Type.TYPE_FIXED32:
-            return "TYPE_FIXED32";
-        case FieldDescriptorProto_Type.TYPE_BOOL:
-            return "TYPE_BOOL";
-        case FieldDescriptorProto_Type.TYPE_STRING:
-            return "TYPE_STRING";
-        case FieldDescriptorProto_Type.TYPE_GROUP:
-            return "TYPE_GROUP";
-        case FieldDescriptorProto_Type.TYPE_MESSAGE:
-            return "TYPE_MESSAGE";
-        case FieldDescriptorProto_Type.TYPE_BYTES:
-            return "TYPE_BYTES";
-        case FieldDescriptorProto_Type.TYPE_UINT32:
-            return "TYPE_UINT32";
-        case FieldDescriptorProto_Type.TYPE_ENUM:
-            return "TYPE_ENUM";
-        case FieldDescriptorProto_Type.TYPE_SFIXED32:
-            return "TYPE_SFIXED32";
-        case FieldDescriptorProto_Type.TYPE_SFIXED64:
-            return "TYPE_SFIXED64";
-        case FieldDescriptorProto_Type.TYPE_SINT32:
-            return "TYPE_SINT32";
-        case FieldDescriptorProto_Type.TYPE_SINT64:
-            return "TYPE_SINT64";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
-    }
-}
-var FieldDescriptorProto_Label;
-(function (FieldDescriptorProto_Label) {
-    /** LABEL_OPTIONAL - 0 is reserved for errors */
-    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_OPTIONAL"] = 1] = "LABEL_OPTIONAL";
-    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REPEATED"] = 3] = "LABEL_REPEATED";
-    /**
-     * LABEL_REQUIRED - The required label is only allowed in google.protobuf.  In proto3 and Editions
-     * it's explicitly prohibited.  In Editions, the `field_presence` feature
-     * can be used to get this behavior.
-     */
-    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REQUIRED"] = 2] = "LABEL_REQUIRED";
-})(FieldDescriptorProto_Label || (exports.FieldDescriptorProto_Label = FieldDescriptorProto_Label = {}));
-function fieldDescriptorProto_LabelFromJSON(object) {
-    switch (object) {
-        case 1:
-        case "LABEL_OPTIONAL":
-            return FieldDescriptorProto_Label.LABEL_OPTIONAL;
-        case 3:
-        case "LABEL_REPEATED":
-            return FieldDescriptorProto_Label.LABEL_REPEATED;
-        case 2:
-        case "LABEL_REQUIRED":
-            return FieldDescriptorProto_Label.LABEL_REQUIRED;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
-    }
-}
-function fieldDescriptorProto_LabelToJSON(object) {
-    switch (object) {
-        case FieldDescriptorProto_Label.LABEL_OPTIONAL:
-            return "LABEL_OPTIONAL";
-        case FieldDescriptorProto_Label.LABEL_REPEATED:
-            return "LABEL_REPEATED";
-        case FieldDescriptorProto_Label.LABEL_REQUIRED:
-            return "LABEL_REQUIRED";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
-    }
-}
-/** Generated classes can be optimized for speed or code size. */
-var FileOptions_OptimizeMode;
-(function (FileOptions_OptimizeMode) {
-    /** SPEED - Generate complete code for parsing, serialization, */
-    FileOptions_OptimizeMode[FileOptions_OptimizeMode["SPEED"] = 1] = "SPEED";
-    /** CODE_SIZE - etc. */
-    FileOptions_OptimizeMode[FileOptions_OptimizeMode["CODE_SIZE"] = 2] = "CODE_SIZE";
-    /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */
-    FileOptions_OptimizeMode[FileOptions_OptimizeMode["LITE_RUNTIME"] = 3] = "LITE_RUNTIME";
-})(FileOptions_OptimizeMode || (exports.FileOptions_OptimizeMode = FileOptions_OptimizeMode = {}));
-function fileOptions_OptimizeModeFromJSON(object) {
-    switch (object) {
-        case 1:
-        case "SPEED":
-            return FileOptions_OptimizeMode.SPEED;
-        case 2:
-        case "CODE_SIZE":
-            return FileOptions_OptimizeMode.CODE_SIZE;
-        case 3:
-        case "LITE_RUNTIME":
-            return FileOptions_OptimizeMode.LITE_RUNTIME;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
-    }
-}
-function fileOptions_OptimizeModeToJSON(object) {
-    switch (object) {
-        case FileOptions_OptimizeMode.SPEED:
-            return "SPEED";
-        case FileOptions_OptimizeMode.CODE_SIZE:
-            return "CODE_SIZE";
-        case FileOptions_OptimizeMode.LITE_RUNTIME:
-            return "LITE_RUNTIME";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
-    }
-}
-var FieldOptions_CType;
-(function (FieldOptions_CType) {
-    /** STRING - Default mode. */
-    FieldOptions_CType[FieldOptions_CType["STRING"] = 0] = "STRING";
-    /**
-     * CORD - The option [ctype=CORD] may be applied to a non-repeated field of type
-     * "bytes". It indicates that in C++, the data should be stored in a Cord
-     * instead of a string.  For very large strings, this may reduce memory
-     * fragmentation. It may also allow better performance when parsing from a
-     * Cord, or when parsing with aliasing enabled, as the parsed Cord may then
-     * alias the original buffer.
-     */
-    FieldOptions_CType[FieldOptions_CType["CORD"] = 1] = "CORD";
-    FieldOptions_CType[FieldOptions_CType["STRING_PIECE"] = 2] = "STRING_PIECE";
-})(FieldOptions_CType || (exports.FieldOptions_CType = FieldOptions_CType = {}));
-function fieldOptions_CTypeFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "STRING":
-            return FieldOptions_CType.STRING;
-        case 1:
-        case "CORD":
-            return FieldOptions_CType.CORD;
-        case 2:
-        case "STRING_PIECE":
-            return FieldOptions_CType.STRING_PIECE;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
-    }
-}
-function fieldOptions_CTypeToJSON(object) {
-    switch (object) {
-        case FieldOptions_CType.STRING:
-            return "STRING";
-        case FieldOptions_CType.CORD:
-            return "CORD";
-        case FieldOptions_CType.STRING_PIECE:
-            return "STRING_PIECE";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
-    }
-}
-var FieldOptions_JSType;
-(function (FieldOptions_JSType) {
-    /** JS_NORMAL - Use the default type. */
-    FieldOptions_JSType[FieldOptions_JSType["JS_NORMAL"] = 0] = "JS_NORMAL";
-    /** JS_STRING - Use JavaScript strings. */
-    FieldOptions_JSType[FieldOptions_JSType["JS_STRING"] = 1] = "JS_STRING";
-    /** JS_NUMBER - Use JavaScript numbers. */
-    FieldOptions_JSType[FieldOptions_JSType["JS_NUMBER"] = 2] = "JS_NUMBER";
-})(FieldOptions_JSType || (exports.FieldOptions_JSType = FieldOptions_JSType = {}));
-function fieldOptions_JSTypeFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "JS_NORMAL":
-            return FieldOptions_JSType.JS_NORMAL;
-        case 1:
-        case "JS_STRING":
-            return FieldOptions_JSType.JS_STRING;
-        case 2:
-        case "JS_NUMBER":
-            return FieldOptions_JSType.JS_NUMBER;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
-    }
-}
-function fieldOptions_JSTypeToJSON(object) {
-    switch (object) {
-        case FieldOptions_JSType.JS_NORMAL:
-            return "JS_NORMAL";
-        case FieldOptions_JSType.JS_STRING:
-            return "JS_STRING";
-        case FieldOptions_JSType.JS_NUMBER:
-            return "JS_NUMBER";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
-    }
-}
-/** If set to RETENTION_SOURCE, the option will be omitted from the binary. */
-var FieldOptions_OptionRetention;
-(function (FieldOptions_OptionRetention) {
-    FieldOptions_OptionRetention[FieldOptions_OptionRetention["RETENTION_UNKNOWN"] = 0] = "RETENTION_UNKNOWN";
-    FieldOptions_OptionRetention[FieldOptions_OptionRetention["RETENTION_RUNTIME"] = 1] = "RETENTION_RUNTIME";
-    FieldOptions_OptionRetention[FieldOptions_OptionRetention["RETENTION_SOURCE"] = 2] = "RETENTION_SOURCE";
-})(FieldOptions_OptionRetention || (exports.FieldOptions_OptionRetention = FieldOptions_OptionRetention = {}));
-function fieldOptions_OptionRetentionFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "RETENTION_UNKNOWN":
-            return FieldOptions_OptionRetention.RETENTION_UNKNOWN;
-        case 1:
-        case "RETENTION_RUNTIME":
-            return FieldOptions_OptionRetention.RETENTION_RUNTIME;
-        case 2:
-        case "RETENTION_SOURCE":
-            return FieldOptions_OptionRetention.RETENTION_SOURCE;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionRetention");
-    }
-}
-function fieldOptions_OptionRetentionToJSON(object) {
-    switch (object) {
-        case FieldOptions_OptionRetention.RETENTION_UNKNOWN:
-            return "RETENTION_UNKNOWN";
-        case FieldOptions_OptionRetention.RETENTION_RUNTIME:
-            return "RETENTION_RUNTIME";
-        case FieldOptions_OptionRetention.RETENTION_SOURCE:
-            return "RETENTION_SOURCE";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionRetention");
-    }
-}
-/**
- * This indicates the types of entities that the field may apply to when used
- * as an option. If it is unset, then the field may be freely used as an
- * option on any kind of entity.
- */
-var FieldOptions_OptionTargetType;
-(function (FieldOptions_OptionTargetType) {
-    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_UNKNOWN"] = 0] = "TARGET_TYPE_UNKNOWN";
-    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_FILE"] = 1] = "TARGET_TYPE_FILE";
-    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_EXTENSION_RANGE"] = 2] = "TARGET_TYPE_EXTENSION_RANGE";
-    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_MESSAGE"] = 3] = "TARGET_TYPE_MESSAGE";
-    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_FIELD"] = 4] = "TARGET_TYPE_FIELD";
-    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_ONEOF"] = 5] = "TARGET_TYPE_ONEOF";
-    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_ENUM"] = 6] = "TARGET_TYPE_ENUM";
-    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_ENUM_ENTRY"] = 7] = "TARGET_TYPE_ENUM_ENTRY";
-    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_SERVICE"] = 8] = "TARGET_TYPE_SERVICE";
-    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_METHOD"] = 9] = "TARGET_TYPE_METHOD";
-})(FieldOptions_OptionTargetType || (exports.FieldOptions_OptionTargetType = FieldOptions_OptionTargetType = {}));
-function fieldOptions_OptionTargetTypeFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "TARGET_TYPE_UNKNOWN":
-            return FieldOptions_OptionTargetType.TARGET_TYPE_UNKNOWN;
-        case 1:
-        case "TARGET_TYPE_FILE":
-            return FieldOptions_OptionTargetType.TARGET_TYPE_FILE;
-        case 2:
-        case "TARGET_TYPE_EXTENSION_RANGE":
-            return FieldOptions_OptionTargetType.TARGET_TYPE_EXTENSION_RANGE;
-        case 3:
-        case "TARGET_TYPE_MESSAGE":
-            return FieldOptions_OptionTargetType.TARGET_TYPE_MESSAGE;
-        case 4:
-        case "TARGET_TYPE_FIELD":
-            return FieldOptions_OptionTargetType.TARGET_TYPE_FIELD;
-        case 5:
-        case "TARGET_TYPE_ONEOF":
-            return FieldOptions_OptionTargetType.TARGET_TYPE_ONEOF;
-        case 6:
-        case "TARGET_TYPE_ENUM":
-            return FieldOptions_OptionTargetType.TARGET_TYPE_ENUM;
-        case 7:
-        case "TARGET_TYPE_ENUM_ENTRY":
-            return FieldOptions_OptionTargetType.TARGET_TYPE_ENUM_ENTRY;
-        case 8:
-        case "TARGET_TYPE_SERVICE":
-            return FieldOptions_OptionTargetType.TARGET_TYPE_SERVICE;
-        case 9:
-        case "TARGET_TYPE_METHOD":
-            return FieldOptions_OptionTargetType.TARGET_TYPE_METHOD;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionTargetType");
-    }
-}
-function fieldOptions_OptionTargetTypeToJSON(object) {
-    switch (object) {
-        case FieldOptions_OptionTargetType.TARGET_TYPE_UNKNOWN:
-            return "TARGET_TYPE_UNKNOWN";
-        case FieldOptions_OptionTargetType.TARGET_TYPE_FILE:
-            return "TARGET_TYPE_FILE";
-        case FieldOptions_OptionTargetType.TARGET_TYPE_EXTENSION_RANGE:
-            return "TARGET_TYPE_EXTENSION_RANGE";
-        case FieldOptions_OptionTargetType.TARGET_TYPE_MESSAGE:
-            return "TARGET_TYPE_MESSAGE";
-        case FieldOptions_OptionTargetType.TARGET_TYPE_FIELD:
-            return "TARGET_TYPE_FIELD";
-        case FieldOptions_OptionTargetType.TARGET_TYPE_ONEOF:
-            return "TARGET_TYPE_ONEOF";
-        case FieldOptions_OptionTargetType.TARGET_TYPE_ENUM:
-            return "TARGET_TYPE_ENUM";
-        case FieldOptions_OptionTargetType.TARGET_TYPE_ENUM_ENTRY:
-            return "TARGET_TYPE_ENUM_ENTRY";
-        case FieldOptions_OptionTargetType.TARGET_TYPE_SERVICE:
-            return "TARGET_TYPE_SERVICE";
-        case FieldOptions_OptionTargetType.TARGET_TYPE_METHOD:
-            return "TARGET_TYPE_METHOD";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionTargetType");
-    }
-}
-/**
- * Is this method side-effect-free (or safe in HTTP parlance), or idempotent,
- * or neither? HTTP based RPC implementation may choose GET verb for safe
- * methods, and PUT verb for idempotent methods instead of the default POST.
- */
-var MethodOptions_IdempotencyLevel;
-(function (MethodOptions_IdempotencyLevel) {
-    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENCY_UNKNOWN"] = 0] = "IDEMPOTENCY_UNKNOWN";
-    /** NO_SIDE_EFFECTS - implies idempotent */
-    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["NO_SIDE_EFFECTS"] = 1] = "NO_SIDE_EFFECTS";
-    /** IDEMPOTENT - idempotent, but may have side effects */
-    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENT"] = 2] = "IDEMPOTENT";
-})(MethodOptions_IdempotencyLevel || (exports.MethodOptions_IdempotencyLevel = MethodOptions_IdempotencyLevel = {}));
-function methodOptions_IdempotencyLevelFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "IDEMPOTENCY_UNKNOWN":
-            return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN;
-        case 1:
-        case "NO_SIDE_EFFECTS":
-            return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS;
-        case 2:
-        case "IDEMPOTENT":
-            return MethodOptions_IdempotencyLevel.IDEMPOTENT;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
-    }
-}
-function methodOptions_IdempotencyLevelToJSON(object) {
-    switch (object) {
-        case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN:
-            return "IDEMPOTENCY_UNKNOWN";
-        case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS:
-            return "NO_SIDE_EFFECTS";
-        case MethodOptions_IdempotencyLevel.IDEMPOTENT:
-            return "IDEMPOTENT";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
-    }
-}
-var FeatureSet_FieldPresence;
-(function (FeatureSet_FieldPresence) {
-    FeatureSet_FieldPresence[FeatureSet_FieldPresence["FIELD_PRESENCE_UNKNOWN"] = 0] = "FIELD_PRESENCE_UNKNOWN";
-    FeatureSet_FieldPresence[FeatureSet_FieldPresence["EXPLICIT"] = 1] = "EXPLICIT";
-    FeatureSet_FieldPresence[FeatureSet_FieldPresence["IMPLICIT"] = 2] = "IMPLICIT";
-    FeatureSet_FieldPresence[FeatureSet_FieldPresence["LEGACY_REQUIRED"] = 3] = "LEGACY_REQUIRED";
-})(FeatureSet_FieldPresence || (exports.FeatureSet_FieldPresence = FeatureSet_FieldPresence = {}));
-function featureSet_FieldPresenceFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "FIELD_PRESENCE_UNKNOWN":
-            return FeatureSet_FieldPresence.FIELD_PRESENCE_UNKNOWN;
-        case 1:
-        case "EXPLICIT":
-            return FeatureSet_FieldPresence.EXPLICIT;
-        case 2:
-        case "IMPLICIT":
-            return FeatureSet_FieldPresence.IMPLICIT;
-        case 3:
-        case "LEGACY_REQUIRED":
-            return FeatureSet_FieldPresence.LEGACY_REQUIRED;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_FieldPresence");
-    }
-}
-function featureSet_FieldPresenceToJSON(object) {
-    switch (object) {
-        case FeatureSet_FieldPresence.FIELD_PRESENCE_UNKNOWN:
-            return "FIELD_PRESENCE_UNKNOWN";
-        case FeatureSet_FieldPresence.EXPLICIT:
-            return "EXPLICIT";
-        case FeatureSet_FieldPresence.IMPLICIT:
-            return "IMPLICIT";
-        case FeatureSet_FieldPresence.LEGACY_REQUIRED:
-            return "LEGACY_REQUIRED";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_FieldPresence");
-    }
-}
-var FeatureSet_EnumType;
-(function (FeatureSet_EnumType) {
-    FeatureSet_EnumType[FeatureSet_EnumType["ENUM_TYPE_UNKNOWN"] = 0] = "ENUM_TYPE_UNKNOWN";
-    FeatureSet_EnumType[FeatureSet_EnumType["OPEN"] = 1] = "OPEN";
-    FeatureSet_EnumType[FeatureSet_EnumType["CLOSED"] = 2] = "CLOSED";
-})(FeatureSet_EnumType || (exports.FeatureSet_EnumType = FeatureSet_EnumType = {}));
-function featureSet_EnumTypeFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "ENUM_TYPE_UNKNOWN":
-            return FeatureSet_EnumType.ENUM_TYPE_UNKNOWN;
-        case 1:
-        case "OPEN":
-            return FeatureSet_EnumType.OPEN;
-        case 2:
-        case "CLOSED":
-            return FeatureSet_EnumType.CLOSED;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnumType");
-    }
-}
-function featureSet_EnumTypeToJSON(object) {
-    switch (object) {
-        case FeatureSet_EnumType.ENUM_TYPE_UNKNOWN:
-            return "ENUM_TYPE_UNKNOWN";
-        case FeatureSet_EnumType.OPEN:
-            return "OPEN";
-        case FeatureSet_EnumType.CLOSED:
-            return "CLOSED";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnumType");
-    }
-}
-var FeatureSet_RepeatedFieldEncoding;
-(function (FeatureSet_RepeatedFieldEncoding) {
-    FeatureSet_RepeatedFieldEncoding[FeatureSet_RepeatedFieldEncoding["REPEATED_FIELD_ENCODING_UNKNOWN"] = 0] = "REPEATED_FIELD_ENCODING_UNKNOWN";
-    FeatureSet_RepeatedFieldEncoding[FeatureSet_RepeatedFieldEncoding["PACKED"] = 1] = "PACKED";
-    FeatureSet_RepeatedFieldEncoding[FeatureSet_RepeatedFieldEncoding["EXPANDED"] = 2] = "EXPANDED";
-})(FeatureSet_RepeatedFieldEncoding || (exports.FeatureSet_RepeatedFieldEncoding = FeatureSet_RepeatedFieldEncoding = {}));
-function featureSet_RepeatedFieldEncodingFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "REPEATED_FIELD_ENCODING_UNKNOWN":
-            return FeatureSet_RepeatedFieldEncoding.REPEATED_FIELD_ENCODING_UNKNOWN;
-        case 1:
-        case "PACKED":
-            return FeatureSet_RepeatedFieldEncoding.PACKED;
-        case 2:
-        case "EXPANDED":
-            return FeatureSet_RepeatedFieldEncoding.EXPANDED;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_RepeatedFieldEncoding");
-    }
-}
-function featureSet_RepeatedFieldEncodingToJSON(object) {
-    switch (object) {
-        case FeatureSet_RepeatedFieldEncoding.REPEATED_FIELD_ENCODING_UNKNOWN:
-            return "REPEATED_FIELD_ENCODING_UNKNOWN";
-        case FeatureSet_RepeatedFieldEncoding.PACKED:
-            return "PACKED";
-        case FeatureSet_RepeatedFieldEncoding.EXPANDED:
-            return "EXPANDED";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_RepeatedFieldEncoding");
-    }
-}
-var FeatureSet_Utf8Validation;
-(function (FeatureSet_Utf8Validation) {
-    FeatureSet_Utf8Validation[FeatureSet_Utf8Validation["UTF8_VALIDATION_UNKNOWN"] = 0] = "UTF8_VALIDATION_UNKNOWN";
-    FeatureSet_Utf8Validation[FeatureSet_Utf8Validation["VERIFY"] = 2] = "VERIFY";
-    FeatureSet_Utf8Validation[FeatureSet_Utf8Validation["NONE"] = 3] = "NONE";
-})(FeatureSet_Utf8Validation || (exports.FeatureSet_Utf8Validation = FeatureSet_Utf8Validation = {}));
-function featureSet_Utf8ValidationFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "UTF8_VALIDATION_UNKNOWN":
-            return FeatureSet_Utf8Validation.UTF8_VALIDATION_UNKNOWN;
-        case 2:
-        case "VERIFY":
-            return FeatureSet_Utf8Validation.VERIFY;
-        case 3:
-        case "NONE":
-            return FeatureSet_Utf8Validation.NONE;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_Utf8Validation");
-    }
-}
-function featureSet_Utf8ValidationToJSON(object) {
-    switch (object) {
-        case FeatureSet_Utf8Validation.UTF8_VALIDATION_UNKNOWN:
-            return "UTF8_VALIDATION_UNKNOWN";
-        case FeatureSet_Utf8Validation.VERIFY:
-            return "VERIFY";
-        case FeatureSet_Utf8Validation.NONE:
-            return "NONE";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_Utf8Validation");
-    }
-}
-var FeatureSet_MessageEncoding;
-(function (FeatureSet_MessageEncoding) {
-    FeatureSet_MessageEncoding[FeatureSet_MessageEncoding["MESSAGE_ENCODING_UNKNOWN"] = 0] = "MESSAGE_ENCODING_UNKNOWN";
-    FeatureSet_MessageEncoding[FeatureSet_MessageEncoding["LENGTH_PREFIXED"] = 1] = "LENGTH_PREFIXED";
-    FeatureSet_MessageEncoding[FeatureSet_MessageEncoding["DELIMITED"] = 2] = "DELIMITED";
-})(FeatureSet_MessageEncoding || (exports.FeatureSet_MessageEncoding = FeatureSet_MessageEncoding = {}));
-function featureSet_MessageEncodingFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "MESSAGE_ENCODING_UNKNOWN":
-            return FeatureSet_MessageEncoding.MESSAGE_ENCODING_UNKNOWN;
-        case 1:
-        case "LENGTH_PREFIXED":
-            return FeatureSet_MessageEncoding.LENGTH_PREFIXED;
-        case 2:
-        case "DELIMITED":
-            return FeatureSet_MessageEncoding.DELIMITED;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_MessageEncoding");
-    }
-}
-function featureSet_MessageEncodingToJSON(object) {
-    switch (object) {
-        case FeatureSet_MessageEncoding.MESSAGE_ENCODING_UNKNOWN:
-            return "MESSAGE_ENCODING_UNKNOWN";
-        case FeatureSet_MessageEncoding.LENGTH_PREFIXED:
-            return "LENGTH_PREFIXED";
-        case FeatureSet_MessageEncoding.DELIMITED:
-            return "DELIMITED";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_MessageEncoding");
-    }
-}
-var FeatureSet_JsonFormat;
-(function (FeatureSet_JsonFormat) {
-    FeatureSet_JsonFormat[FeatureSet_JsonFormat["JSON_FORMAT_UNKNOWN"] = 0] = "JSON_FORMAT_UNKNOWN";
-    FeatureSet_JsonFormat[FeatureSet_JsonFormat["ALLOW"] = 1] = "ALLOW";
-    FeatureSet_JsonFormat[FeatureSet_JsonFormat["LEGACY_BEST_EFFORT"] = 2] = "LEGACY_BEST_EFFORT";
-})(FeatureSet_JsonFormat || (exports.FeatureSet_JsonFormat = FeatureSet_JsonFormat = {}));
-function featureSet_JsonFormatFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "JSON_FORMAT_UNKNOWN":
-            return FeatureSet_JsonFormat.JSON_FORMAT_UNKNOWN;
-        case 1:
-        case "ALLOW":
-            return FeatureSet_JsonFormat.ALLOW;
-        case 2:
-        case "LEGACY_BEST_EFFORT":
-            return FeatureSet_JsonFormat.LEGACY_BEST_EFFORT;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_JsonFormat");
-    }
-}
-function featureSet_JsonFormatToJSON(object) {
-    switch (object) {
-        case FeatureSet_JsonFormat.JSON_FORMAT_UNKNOWN:
-            return "JSON_FORMAT_UNKNOWN";
-        case FeatureSet_JsonFormat.ALLOW:
-            return "ALLOW";
-        case FeatureSet_JsonFormat.LEGACY_BEST_EFFORT:
-            return "LEGACY_BEST_EFFORT";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_JsonFormat");
-    }
-}
-var FeatureSet_EnforceNamingStyle;
-(function (FeatureSet_EnforceNamingStyle) {
-    FeatureSet_EnforceNamingStyle[FeatureSet_EnforceNamingStyle["ENFORCE_NAMING_STYLE_UNKNOWN"] = 0] = "ENFORCE_NAMING_STYLE_UNKNOWN";
-    FeatureSet_EnforceNamingStyle[FeatureSet_EnforceNamingStyle["STYLE2024"] = 1] = "STYLE2024";
-    FeatureSet_EnforceNamingStyle[FeatureSet_EnforceNamingStyle["STYLE_LEGACY"] = 2] = "STYLE_LEGACY";
-})(FeatureSet_EnforceNamingStyle || (exports.FeatureSet_EnforceNamingStyle = FeatureSet_EnforceNamingStyle = {}));
-function featureSet_EnforceNamingStyleFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "ENFORCE_NAMING_STYLE_UNKNOWN":
-            return FeatureSet_EnforceNamingStyle.ENFORCE_NAMING_STYLE_UNKNOWN;
-        case 1:
-        case "STYLE2024":
-            return FeatureSet_EnforceNamingStyle.STYLE2024;
-        case 2:
-        case "STYLE_LEGACY":
-            return FeatureSet_EnforceNamingStyle.STYLE_LEGACY;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnforceNamingStyle");
-    }
-}
-function featureSet_EnforceNamingStyleToJSON(object) {
-    switch (object) {
-        case FeatureSet_EnforceNamingStyle.ENFORCE_NAMING_STYLE_UNKNOWN:
-            return "ENFORCE_NAMING_STYLE_UNKNOWN";
-        case FeatureSet_EnforceNamingStyle.STYLE2024:
-            return "STYLE2024";
-        case FeatureSet_EnforceNamingStyle.STYLE_LEGACY:
-            return "STYLE_LEGACY";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnforceNamingStyle");
-    }
-}
-/**
- * Represents the identified object's effect on the element in the original
- * .proto file.
- */
-var GeneratedCodeInfo_Annotation_Semantic;
-(function (GeneratedCodeInfo_Annotation_Semantic) {
-    /** NONE - There is no effect or the effect is indescribable. */
-    GeneratedCodeInfo_Annotation_Semantic[GeneratedCodeInfo_Annotation_Semantic["NONE"] = 0] = "NONE";
-    /** SET - The element is set or otherwise mutated. */
-    GeneratedCodeInfo_Annotation_Semantic[GeneratedCodeInfo_Annotation_Semantic["SET"] = 1] = "SET";
-    /** ALIAS - An alias to the element is returned. */
-    GeneratedCodeInfo_Annotation_Semantic[GeneratedCodeInfo_Annotation_Semantic["ALIAS"] = 2] = "ALIAS";
-})(GeneratedCodeInfo_Annotation_Semantic || (exports.GeneratedCodeInfo_Annotation_Semantic = GeneratedCodeInfo_Annotation_Semantic = {}));
-function generatedCodeInfo_Annotation_SemanticFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "NONE":
-            return GeneratedCodeInfo_Annotation_Semantic.NONE;
-        case 1:
-        case "SET":
-            return GeneratedCodeInfo_Annotation_Semantic.SET;
-        case 2:
-        case "ALIAS":
-            return GeneratedCodeInfo_Annotation_Semantic.ALIAS;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum GeneratedCodeInfo_Annotation_Semantic");
-    }
-}
-function generatedCodeInfo_Annotation_SemanticToJSON(object) {
-    switch (object) {
-        case GeneratedCodeInfo_Annotation_Semantic.NONE:
-            return "NONE";
-        case GeneratedCodeInfo_Annotation_Semantic.SET:
-            return "SET";
-        case GeneratedCodeInfo_Annotation_Semantic.ALIAS:
-            return "ALIAS";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum GeneratedCodeInfo_Annotation_Semantic");
-    }
-}
-exports.FileDescriptorSet = {
-    fromJSON(object) {
-        return {
-            file: globalThis.Array.isArray(object?.file) ? object.file.map((e) => exports.FileDescriptorProto.fromJSON(e)) : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.file?.length) {
-            obj.file = message.file.map((e) => exports.FileDescriptorProto.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.FileDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? globalThis.String(object.name) : "",
-            package: isSet(object.package) ? globalThis.String(object.package) : "",
-            dependency: globalThis.Array.isArray(object?.dependency)
-                ? object.dependency.map((e) => globalThis.String(e))
-                : [],
-            publicDependency: globalThis.Array.isArray(object?.publicDependency)
-                ? object.publicDependency.map((e) => globalThis.Number(e))
-                : [],
-            weakDependency: globalThis.Array.isArray(object?.weakDependency)
-                ? object.weakDependency.map((e) => globalThis.Number(e))
-                : [],
-            messageType: globalThis.Array.isArray(object?.messageType)
-                ? object.messageType.map((e) => exports.DescriptorProto.fromJSON(e))
-                : [],
-            enumType: globalThis.Array.isArray(object?.enumType)
-                ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e))
-                : [],
-            service: globalThis.Array.isArray(object?.service)
-                ? object.service.map((e) => exports.ServiceDescriptorProto.fromJSON(e))
-                : [],
-            extension: globalThis.Array.isArray(object?.extension)
-                ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e))
-                : [],
-            options: isSet(object.options) ? exports.FileOptions.fromJSON(object.options) : undefined,
-            sourceCodeInfo: isSet(object.sourceCodeInfo) ? exports.SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined,
-            syntax: isSet(object.syntax) ? globalThis.String(object.syntax) : "",
-            edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.name !== undefined && message.name !== "") {
-            obj.name = message.name;
-        }
-        if (message.package !== undefined && message.package !== "") {
-            obj.package = message.package;
-        }
-        if (message.dependency?.length) {
-            obj.dependency = message.dependency;
-        }
-        if (message.publicDependency?.length) {
-            obj.publicDependency = message.publicDependency.map((e) => Math.round(e));
-        }
-        if (message.weakDependency?.length) {
-            obj.weakDependency = message.weakDependency.map((e) => Math.round(e));
-        }
-        if (message.messageType?.length) {
-            obj.messageType = message.messageType.map((e) => exports.DescriptorProto.toJSON(e));
-        }
-        if (message.enumType?.length) {
-            obj.enumType = message.enumType.map((e) => exports.EnumDescriptorProto.toJSON(e));
-        }
-        if (message.service?.length) {
-            obj.service = message.service.map((e) => exports.ServiceDescriptorProto.toJSON(e));
-        }
-        if (message.extension?.length) {
-            obj.extension = message.extension.map((e) => exports.FieldDescriptorProto.toJSON(e));
-        }
-        if (message.options !== undefined) {
-            obj.options = exports.FileOptions.toJSON(message.options);
-        }
-        if (message.sourceCodeInfo !== undefined) {
-            obj.sourceCodeInfo = exports.SourceCodeInfo.toJSON(message.sourceCodeInfo);
-        }
-        if (message.syntax !== undefined && message.syntax !== "") {
-            obj.syntax = message.syntax;
-        }
-        if (message.edition !== undefined && message.edition !== 0) {
-            obj.edition = editionToJSON(message.edition);
-        }
-        return obj;
-    },
-};
-exports.DescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? globalThis.String(object.name) : "",
-            field: globalThis.Array.isArray(object?.field)
-                ? object.field.map((e) => exports.FieldDescriptorProto.fromJSON(e))
-                : [],
-            extension: globalThis.Array.isArray(object?.extension)
-                ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e))
-                : [],
-            nestedType: globalThis.Array.isArray(object?.nestedType)
-                ? object.nestedType.map((e) => exports.DescriptorProto.fromJSON(e))
-                : [],
-            enumType: globalThis.Array.isArray(object?.enumType)
-                ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e))
-                : [],
-            extensionRange: globalThis.Array.isArray(object?.extensionRange)
-                ? object.extensionRange.map((e) => exports.DescriptorProto_ExtensionRange.fromJSON(e))
-                : [],
-            oneofDecl: globalThis.Array.isArray(object?.oneofDecl)
-                ? object.oneofDecl.map((e) => exports.OneofDescriptorProto.fromJSON(e))
-                : [],
-            options: isSet(object.options) ? exports.MessageOptions.fromJSON(object.options) : undefined,
-            reservedRange: globalThis.Array.isArray(object?.reservedRange)
-                ? object.reservedRange.map((e) => exports.DescriptorProto_ReservedRange.fromJSON(e))
-                : [],
-            reservedName: globalThis.Array.isArray(object?.reservedName)
-                ? object.reservedName.map((e) => globalThis.String(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.name !== undefined && message.name !== "") {
-            obj.name = message.name;
-        }
-        if (message.field?.length) {
-            obj.field = message.field.map((e) => exports.FieldDescriptorProto.toJSON(e));
-        }
-        if (message.extension?.length) {
-            obj.extension = message.extension.map((e) => exports.FieldDescriptorProto.toJSON(e));
-        }
-        if (message.nestedType?.length) {
-            obj.nestedType = message.nestedType.map((e) => exports.DescriptorProto.toJSON(e));
-        }
-        if (message.enumType?.length) {
-            obj.enumType = message.enumType.map((e) => exports.EnumDescriptorProto.toJSON(e));
-        }
-        if (message.extensionRange?.length) {
-            obj.extensionRange = message.extensionRange.map((e) => exports.DescriptorProto_ExtensionRange.toJSON(e));
-        }
-        if (message.oneofDecl?.length) {
-            obj.oneofDecl = message.oneofDecl.map((e) => exports.OneofDescriptorProto.toJSON(e));
-        }
-        if (message.options !== undefined) {
-            obj.options = exports.MessageOptions.toJSON(message.options);
-        }
-        if (message.reservedRange?.length) {
-            obj.reservedRange = message.reservedRange.map((e) => exports.DescriptorProto_ReservedRange.toJSON(e));
-        }
-        if (message.reservedName?.length) {
-            obj.reservedName = message.reservedName;
-        }
-        return obj;
-    },
-};
-exports.DescriptorProto_ExtensionRange = {
-    fromJSON(object) {
-        return {
-            start: isSet(object.start) ? globalThis.Number(object.start) : 0,
-            end: isSet(object.end) ? globalThis.Number(object.end) : 0,
-            options: isSet(object.options) ? exports.ExtensionRangeOptions.fromJSON(object.options) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.start !== undefined && message.start !== 0) {
-            obj.start = Math.round(message.start);
-        }
-        if (message.end !== undefined && message.end !== 0) {
-            obj.end = Math.round(message.end);
-        }
-        if (message.options !== undefined) {
-            obj.options = exports.ExtensionRangeOptions.toJSON(message.options);
-        }
-        return obj;
-    },
-};
-exports.DescriptorProto_ReservedRange = {
-    fromJSON(object) {
-        return {
-            start: isSet(object.start) ? globalThis.Number(object.start) : 0,
-            end: isSet(object.end) ? globalThis.Number(object.end) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.start !== undefined && message.start !== 0) {
-            obj.start = Math.round(message.start);
-        }
-        if (message.end !== undefined && message.end !== 0) {
-            obj.end = Math.round(message.end);
-        }
-        return obj;
-    },
-};
-exports.ExtensionRangeOptions = {
-    fromJSON(object) {
-        return {
-            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-            declaration: globalThis.Array.isArray(object?.declaration)
-                ? object.declaration.map((e) => exports.ExtensionRangeOptions_Declaration.fromJSON(e))
-                : [],
-            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
-            verification: isSet(object.verification)
-                ? extensionRangeOptions_VerificationStateFromJSON(object.verification)
-                : 1,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.uninterpretedOption?.length) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
-        }
-        if (message.declaration?.length) {
-            obj.declaration = message.declaration.map((e) => exports.ExtensionRangeOptions_Declaration.toJSON(e));
-        }
-        if (message.features !== undefined) {
-            obj.features = exports.FeatureSet.toJSON(message.features);
-        }
-        if (message.verification !== undefined && message.verification !== 1) {
-            obj.verification = extensionRangeOptions_VerificationStateToJSON(message.verification);
-        }
-        return obj;
-    },
-};
-exports.ExtensionRangeOptions_Declaration = {
-    fromJSON(object) {
-        return {
-            number: isSet(object.number) ? globalThis.Number(object.number) : 0,
-            fullName: isSet(object.fullName) ? globalThis.String(object.fullName) : "",
-            type: isSet(object.type) ? globalThis.String(object.type) : "",
-            reserved: isSet(object.reserved) ? globalThis.Boolean(object.reserved) : false,
-            repeated: isSet(object.repeated) ? globalThis.Boolean(object.repeated) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.number !== undefined && message.number !== 0) {
-            obj.number = Math.round(message.number);
-        }
-        if (message.fullName !== undefined && message.fullName !== "") {
-            obj.fullName = message.fullName;
-        }
-        if (message.type !== undefined && message.type !== "") {
-            obj.type = message.type;
-        }
-        if (message.reserved !== undefined && message.reserved !== false) {
-            obj.reserved = message.reserved;
-        }
-        if (message.repeated !== undefined && message.repeated !== false) {
-            obj.repeated = message.repeated;
-        }
-        return obj;
-    },
-};
-exports.FieldDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? globalThis.String(object.name) : "",
-            number: isSet(object.number) ? globalThis.Number(object.number) : 0,
-            label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1,
-            type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1,
-            typeName: isSet(object.typeName) ? globalThis.String(object.typeName) : "",
-            extendee: isSet(object.extendee) ? globalThis.String(object.extendee) : "",
-            defaultValue: isSet(object.defaultValue) ? globalThis.String(object.defaultValue) : "",
-            oneofIndex: isSet(object.oneofIndex) ? globalThis.Number(object.oneofIndex) : 0,
-            jsonName: isSet(object.jsonName) ? globalThis.String(object.jsonName) : "",
-            options: isSet(object.options) ? exports.FieldOptions.fromJSON(object.options) : undefined,
-            proto3Optional: isSet(object.proto3Optional) ? globalThis.Boolean(object.proto3Optional) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.name !== undefined && message.name !== "") {
-            obj.name = message.name;
-        }
-        if (message.number !== undefined && message.number !== 0) {
-            obj.number = Math.round(message.number);
-        }
-        if (message.label !== undefined && message.label !== 1) {
-            obj.label = fieldDescriptorProto_LabelToJSON(message.label);
-        }
-        if (message.type !== undefined && message.type !== 1) {
-            obj.type = fieldDescriptorProto_TypeToJSON(message.type);
-        }
-        if (message.typeName !== undefined && message.typeName !== "") {
-            obj.typeName = message.typeName;
-        }
-        if (message.extendee !== undefined && message.extendee !== "") {
-            obj.extendee = message.extendee;
-        }
-        if (message.defaultValue !== undefined && message.defaultValue !== "") {
-            obj.defaultValue = message.defaultValue;
-        }
-        if (message.oneofIndex !== undefined && message.oneofIndex !== 0) {
-            obj.oneofIndex = Math.round(message.oneofIndex);
-        }
-        if (message.jsonName !== undefined && message.jsonName !== "") {
-            obj.jsonName = message.jsonName;
-        }
-        if (message.options !== undefined) {
-            obj.options = exports.FieldOptions.toJSON(message.options);
-        }
-        if (message.proto3Optional !== undefined && message.proto3Optional !== false) {
-            obj.proto3Optional = message.proto3Optional;
-        }
-        return obj;
-    },
-};
-exports.OneofDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? globalThis.String(object.name) : "",
-            options: isSet(object.options) ? exports.OneofOptions.fromJSON(object.options) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.name !== undefined && message.name !== "") {
-            obj.name = message.name;
-        }
-        if (message.options !== undefined) {
-            obj.options = exports.OneofOptions.toJSON(message.options);
-        }
-        return obj;
-    },
-};
-exports.EnumDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? globalThis.String(object.name) : "",
-            value: globalThis.Array.isArray(object?.value)
-                ? object.value.map((e) => exports.EnumValueDescriptorProto.fromJSON(e))
-                : [],
-            options: isSet(object.options) ? exports.EnumOptions.fromJSON(object.options) : undefined,
-            reservedRange: globalThis.Array.isArray(object?.reservedRange)
-                ? object.reservedRange.map((e) => exports.EnumDescriptorProto_EnumReservedRange.fromJSON(e))
-                : [],
-            reservedName: globalThis.Array.isArray(object?.reservedName)
-                ? object.reservedName.map((e) => globalThis.String(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.name !== undefined && message.name !== "") {
-            obj.name = message.name;
-        }
-        if (message.value?.length) {
-            obj.value = message.value.map((e) => exports.EnumValueDescriptorProto.toJSON(e));
-        }
-        if (message.options !== undefined) {
-            obj.options = exports.EnumOptions.toJSON(message.options);
-        }
-        if (message.reservedRange?.length) {
-            obj.reservedRange = message.reservedRange.map((e) => exports.EnumDescriptorProto_EnumReservedRange.toJSON(e));
-        }
-        if (message.reservedName?.length) {
-            obj.reservedName = message.reservedName;
-        }
-        return obj;
-    },
-};
-exports.EnumDescriptorProto_EnumReservedRange = {
-    fromJSON(object) {
-        return {
-            start: isSet(object.start) ? globalThis.Number(object.start) : 0,
-            end: isSet(object.end) ? globalThis.Number(object.end) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.start !== undefined && message.start !== 0) {
-            obj.start = Math.round(message.start);
-        }
-        if (message.end !== undefined && message.end !== 0) {
-            obj.end = Math.round(message.end);
-        }
-        return obj;
-    },
-};
-exports.EnumValueDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? globalThis.String(object.name) : "",
-            number: isSet(object.number) ? globalThis.Number(object.number) : 0,
-            options: isSet(object.options) ? exports.EnumValueOptions.fromJSON(object.options) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.name !== undefined && message.name !== "") {
-            obj.name = message.name;
-        }
-        if (message.number !== undefined && message.number !== 0) {
-            obj.number = Math.round(message.number);
-        }
-        if (message.options !== undefined) {
-            obj.options = exports.EnumValueOptions.toJSON(message.options);
-        }
-        return obj;
-    },
-};
-exports.ServiceDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? globalThis.String(object.name) : "",
-            method: globalThis.Array.isArray(object?.method)
-                ? object.method.map((e) => exports.MethodDescriptorProto.fromJSON(e))
-                : [],
-            options: isSet(object.options) ? exports.ServiceOptions.fromJSON(object.options) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.name !== undefined && message.name !== "") {
-            obj.name = message.name;
-        }
-        if (message.method?.length) {
-            obj.method = message.method.map((e) => exports.MethodDescriptorProto.toJSON(e));
-        }
-        if (message.options !== undefined) {
-            obj.options = exports.ServiceOptions.toJSON(message.options);
-        }
-        return obj;
-    },
-};
-exports.MethodDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? globalThis.String(object.name) : "",
-            inputType: isSet(object.inputType) ? globalThis.String(object.inputType) : "",
-            outputType: isSet(object.outputType) ? globalThis.String(object.outputType) : "",
-            options: isSet(object.options) ? exports.MethodOptions.fromJSON(object.options) : undefined,
-            clientStreaming: isSet(object.clientStreaming) ? globalThis.Boolean(object.clientStreaming) : false,
-            serverStreaming: isSet(object.serverStreaming) ? globalThis.Boolean(object.serverStreaming) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.name !== undefined && message.name !== "") {
-            obj.name = message.name;
-        }
-        if (message.inputType !== undefined && message.inputType !== "") {
-            obj.inputType = message.inputType;
-        }
-        if (message.outputType !== undefined && message.outputType !== "") {
-            obj.outputType = message.outputType;
-        }
-        if (message.options !== undefined) {
-            obj.options = exports.MethodOptions.toJSON(message.options);
-        }
-        if (message.clientStreaming !== undefined && message.clientStreaming !== false) {
-            obj.clientStreaming = message.clientStreaming;
-        }
-        if (message.serverStreaming !== undefined && message.serverStreaming !== false) {
-            obj.serverStreaming = message.serverStreaming;
-        }
-        return obj;
-    },
-};
-exports.FileOptions = {
-    fromJSON(object) {
-        return {
-            javaPackage: isSet(object.javaPackage) ? globalThis.String(object.javaPackage) : "",
-            javaOuterClassname: isSet(object.javaOuterClassname) ? globalThis.String(object.javaOuterClassname) : "",
-            javaMultipleFiles: isSet(object.javaMultipleFiles) ? globalThis.Boolean(object.javaMultipleFiles) : false,
-            javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash)
-                ? globalThis.Boolean(object.javaGenerateEqualsAndHash)
-                : false,
-            javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? globalThis.Boolean(object.javaStringCheckUtf8) : false,
-            optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1,
-            goPackage: isSet(object.goPackage) ? globalThis.String(object.goPackage) : "",
-            ccGenericServices: isSet(object.ccGenericServices) ? globalThis.Boolean(object.ccGenericServices) : false,
-            javaGenericServices: isSet(object.javaGenericServices) ? globalThis.Boolean(object.javaGenericServices) : false,
-            pyGenericServices: isSet(object.pyGenericServices) ? globalThis.Boolean(object.pyGenericServices) : false,
-            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
-            ccEnableArenas: isSet(object.ccEnableArenas) ? globalThis.Boolean(object.ccEnableArenas) : true,
-            objcClassPrefix: isSet(object.objcClassPrefix) ? globalThis.String(object.objcClassPrefix) : "",
-            csharpNamespace: isSet(object.csharpNamespace) ? globalThis.String(object.csharpNamespace) : "",
-            swiftPrefix: isSet(object.swiftPrefix) ? globalThis.String(object.swiftPrefix) : "",
-            phpClassPrefix: isSet(object.phpClassPrefix) ? globalThis.String(object.phpClassPrefix) : "",
-            phpNamespace: isSet(object.phpNamespace) ? globalThis.String(object.phpNamespace) : "",
-            phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? globalThis.String(object.phpMetadataNamespace) : "",
-            rubyPackage: isSet(object.rubyPackage) ? globalThis.String(object.rubyPackage) : "",
-            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
-            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.javaPackage !== undefined && message.javaPackage !== "") {
-            obj.javaPackage = message.javaPackage;
-        }
-        if (message.javaOuterClassname !== undefined && message.javaOuterClassname !== "") {
-            obj.javaOuterClassname = message.javaOuterClassname;
-        }
-        if (message.javaMultipleFiles !== undefined && message.javaMultipleFiles !== false) {
-            obj.javaMultipleFiles = message.javaMultipleFiles;
-        }
-        if (message.javaGenerateEqualsAndHash !== undefined && message.javaGenerateEqualsAndHash !== false) {
-            obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash;
-        }
-        if (message.javaStringCheckUtf8 !== undefined && message.javaStringCheckUtf8 !== false) {
-            obj.javaStringCheckUtf8 = message.javaStringCheckUtf8;
-        }
-        if (message.optimizeFor !== undefined && message.optimizeFor !== 1) {
-            obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor);
-        }
-        if (message.goPackage !== undefined && message.goPackage !== "") {
-            obj.goPackage = message.goPackage;
-        }
-        if (message.ccGenericServices !== undefined && message.ccGenericServices !== false) {
-            obj.ccGenericServices = message.ccGenericServices;
-        }
-        if (message.javaGenericServices !== undefined && message.javaGenericServices !== false) {
-            obj.javaGenericServices = message.javaGenericServices;
-        }
-        if (message.pyGenericServices !== undefined && message.pyGenericServices !== false) {
-            obj.pyGenericServices = message.pyGenericServices;
-        }
-        if (message.deprecated !== undefined && message.deprecated !== false) {
-            obj.deprecated = message.deprecated;
-        }
-        if (message.ccEnableArenas !== undefined && message.ccEnableArenas !== true) {
-            obj.ccEnableArenas = message.ccEnableArenas;
-        }
-        if (message.objcClassPrefix !== undefined && message.objcClassPrefix !== "") {
-            obj.objcClassPrefix = message.objcClassPrefix;
-        }
-        if (message.csharpNamespace !== undefined && message.csharpNamespace !== "") {
-            obj.csharpNamespace = message.csharpNamespace;
-        }
-        if (message.swiftPrefix !== undefined && message.swiftPrefix !== "") {
-            obj.swiftPrefix = message.swiftPrefix;
-        }
-        if (message.phpClassPrefix !== undefined && message.phpClassPrefix !== "") {
-            obj.phpClassPrefix = message.phpClassPrefix;
-        }
-        if (message.phpNamespace !== undefined && message.phpNamespace !== "") {
-            obj.phpNamespace = message.phpNamespace;
-        }
-        if (message.phpMetadataNamespace !== undefined && message.phpMetadataNamespace !== "") {
-            obj.phpMetadataNamespace = message.phpMetadataNamespace;
-        }
-        if (message.rubyPackage !== undefined && message.rubyPackage !== "") {
-            obj.rubyPackage = message.rubyPackage;
-        }
-        if (message.features !== undefined) {
-            obj.features = exports.FeatureSet.toJSON(message.features);
-        }
-        if (message.uninterpretedOption?.length) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.MessageOptions = {
-    fromJSON(object) {
-        return {
-            messageSetWireFormat: isSet(object.messageSetWireFormat)
-                ? globalThis.Boolean(object.messageSetWireFormat)
-                : false,
-            noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor)
-                ? globalThis.Boolean(object.noStandardDescriptorAccessor)
-                : false,
-            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
-            mapEntry: isSet(object.mapEntry) ? globalThis.Boolean(object.mapEntry) : false,
-            deprecatedLegacyJsonFieldConflicts: isSet(object.deprecatedLegacyJsonFieldConflicts)
-                ? globalThis.Boolean(object.deprecatedLegacyJsonFieldConflicts)
-                : false,
-            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
-            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.messageSetWireFormat !== undefined && message.messageSetWireFormat !== false) {
-            obj.messageSetWireFormat = message.messageSetWireFormat;
-        }
-        if (message.noStandardDescriptorAccessor !== undefined && message.noStandardDescriptorAccessor !== false) {
-            obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor;
-        }
-        if (message.deprecated !== undefined && message.deprecated !== false) {
-            obj.deprecated = message.deprecated;
-        }
-        if (message.mapEntry !== undefined && message.mapEntry !== false) {
-            obj.mapEntry = message.mapEntry;
-        }
-        if (message.deprecatedLegacyJsonFieldConflicts !== undefined && message.deprecatedLegacyJsonFieldConflicts !== false) {
-            obj.deprecatedLegacyJsonFieldConflicts = message.deprecatedLegacyJsonFieldConflicts;
-        }
-        if (message.features !== undefined) {
-            obj.features = exports.FeatureSet.toJSON(message.features);
-        }
-        if (message.uninterpretedOption?.length) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.FieldOptions = {
-    fromJSON(object) {
-        return {
-            ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0,
-            packed: isSet(object.packed) ? globalThis.Boolean(object.packed) : false,
-            jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0,
-            lazy: isSet(object.lazy) ? globalThis.Boolean(object.lazy) : false,
-            unverifiedLazy: isSet(object.unverifiedLazy) ? globalThis.Boolean(object.unverifiedLazy) : false,
-            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
-            weak: isSet(object.weak) ? globalThis.Boolean(object.weak) : false,
-            debugRedact: isSet(object.debugRedact) ? globalThis.Boolean(object.debugRedact) : false,
-            retention: isSet(object.retention) ? fieldOptions_OptionRetentionFromJSON(object.retention) : 0,
-            targets: globalThis.Array.isArray(object?.targets)
-                ? object.targets.map((e) => fieldOptions_OptionTargetTypeFromJSON(e))
-                : [],
-            editionDefaults: globalThis.Array.isArray(object?.editionDefaults)
-                ? object.editionDefaults.map((e) => exports.FieldOptions_EditionDefault.fromJSON(e))
-                : [],
-            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
-            featureSupport: isSet(object.featureSupport)
-                ? exports.FieldOptions_FeatureSupport.fromJSON(object.featureSupport)
-                : undefined,
-            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.ctype !== undefined && message.ctype !== 0) {
-            obj.ctype = fieldOptions_CTypeToJSON(message.ctype);
-        }
-        if (message.packed !== undefined && message.packed !== false) {
-            obj.packed = message.packed;
-        }
-        if (message.jstype !== undefined && message.jstype !== 0) {
-            obj.jstype = fieldOptions_JSTypeToJSON(message.jstype);
-        }
-        if (message.lazy !== undefined && message.lazy !== false) {
-            obj.lazy = message.lazy;
-        }
-        if (message.unverifiedLazy !== undefined && message.unverifiedLazy !== false) {
-            obj.unverifiedLazy = message.unverifiedLazy;
-        }
-        if (message.deprecated !== undefined && message.deprecated !== false) {
-            obj.deprecated = message.deprecated;
-        }
-        if (message.weak !== undefined && message.weak !== false) {
-            obj.weak = message.weak;
-        }
-        if (message.debugRedact !== undefined && message.debugRedact !== false) {
-            obj.debugRedact = message.debugRedact;
-        }
-        if (message.retention !== undefined && message.retention !== 0) {
-            obj.retention = fieldOptions_OptionRetentionToJSON(message.retention);
-        }
-        if (message.targets?.length) {
-            obj.targets = message.targets.map((e) => fieldOptions_OptionTargetTypeToJSON(e));
-        }
-        if (message.editionDefaults?.length) {
-            obj.editionDefaults = message.editionDefaults.map((e) => exports.FieldOptions_EditionDefault.toJSON(e));
-        }
-        if (message.features !== undefined) {
-            obj.features = exports.FeatureSet.toJSON(message.features);
-        }
-        if (message.featureSupport !== undefined) {
-            obj.featureSupport = exports.FieldOptions_FeatureSupport.toJSON(message.featureSupport);
-        }
-        if (message.uninterpretedOption?.length) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.FieldOptions_EditionDefault = {
-    fromJSON(object) {
-        return {
-            edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0,
-            value: isSet(object.value) ? globalThis.String(object.value) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.edition !== undefined && message.edition !== 0) {
-            obj.edition = editionToJSON(message.edition);
-        }
-        if (message.value !== undefined && message.value !== "") {
-            obj.value = message.value;
-        }
-        return obj;
-    },
-};
-exports.FieldOptions_FeatureSupport = {
-    fromJSON(object) {
-        return {
-            editionIntroduced: isSet(object.editionIntroduced) ? editionFromJSON(object.editionIntroduced) : 0,
-            editionDeprecated: isSet(object.editionDeprecated) ? editionFromJSON(object.editionDeprecated) : 0,
-            deprecationWarning: isSet(object.deprecationWarning) ? globalThis.String(object.deprecationWarning) : "",
-            editionRemoved: isSet(object.editionRemoved) ? editionFromJSON(object.editionRemoved) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.editionIntroduced !== undefined && message.editionIntroduced !== 0) {
-            obj.editionIntroduced = editionToJSON(message.editionIntroduced);
-        }
-        if (message.editionDeprecated !== undefined && message.editionDeprecated !== 0) {
-            obj.editionDeprecated = editionToJSON(message.editionDeprecated);
-        }
-        if (message.deprecationWarning !== undefined && message.deprecationWarning !== "") {
-            obj.deprecationWarning = message.deprecationWarning;
-        }
-        if (message.editionRemoved !== undefined && message.editionRemoved !== 0) {
-            obj.editionRemoved = editionToJSON(message.editionRemoved);
-        }
-        return obj;
-    },
-};
-exports.OneofOptions = {
-    fromJSON(object) {
-        return {
-            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
-            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.features !== undefined) {
-            obj.features = exports.FeatureSet.toJSON(message.features);
-        }
-        if (message.uninterpretedOption?.length) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.EnumOptions = {
-    fromJSON(object) {
-        return {
-            allowAlias: isSet(object.allowAlias) ? globalThis.Boolean(object.allowAlias) : false,
-            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
-            deprecatedLegacyJsonFieldConflicts: isSet(object.deprecatedLegacyJsonFieldConflicts)
-                ? globalThis.Boolean(object.deprecatedLegacyJsonFieldConflicts)
-                : false,
-            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
-            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.allowAlias !== undefined && message.allowAlias !== false) {
-            obj.allowAlias = message.allowAlias;
-        }
-        if (message.deprecated !== undefined && message.deprecated !== false) {
-            obj.deprecated = message.deprecated;
-        }
-        if (message.deprecatedLegacyJsonFieldConflicts !== undefined && message.deprecatedLegacyJsonFieldConflicts !== false) {
-            obj.deprecatedLegacyJsonFieldConflicts = message.deprecatedLegacyJsonFieldConflicts;
-        }
-        if (message.features !== undefined) {
-            obj.features = exports.FeatureSet.toJSON(message.features);
-        }
-        if (message.uninterpretedOption?.length) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.EnumValueOptions = {
-    fromJSON(object) {
-        return {
-            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
-            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
-            debugRedact: isSet(object.debugRedact) ? globalThis.Boolean(object.debugRedact) : false,
-            featureSupport: isSet(object.featureSupport)
-                ? exports.FieldOptions_FeatureSupport.fromJSON(object.featureSupport)
-                : undefined,
-            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.deprecated !== undefined && message.deprecated !== false) {
-            obj.deprecated = message.deprecated;
-        }
-        if (message.features !== undefined) {
-            obj.features = exports.FeatureSet.toJSON(message.features);
-        }
-        if (message.debugRedact !== undefined && message.debugRedact !== false) {
-            obj.debugRedact = message.debugRedact;
-        }
-        if (message.featureSupport !== undefined) {
-            obj.featureSupport = exports.FieldOptions_FeatureSupport.toJSON(message.featureSupport);
-        }
-        if (message.uninterpretedOption?.length) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.ServiceOptions = {
-    fromJSON(object) {
-        return {
-            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
-            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
-            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.features !== undefined) {
-            obj.features = exports.FeatureSet.toJSON(message.features);
-        }
-        if (message.deprecated !== undefined && message.deprecated !== false) {
-            obj.deprecated = message.deprecated;
-        }
-        if (message.uninterpretedOption?.length) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.MethodOptions = {
-    fromJSON(object) {
-        return {
-            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
-            idempotencyLevel: isSet(object.idempotencyLevel)
-                ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel)
-                : 0,
-            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
-            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.deprecated !== undefined && message.deprecated !== false) {
-            obj.deprecated = message.deprecated;
-        }
-        if (message.idempotencyLevel !== undefined && message.idempotencyLevel !== 0) {
-            obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel);
-        }
-        if (message.features !== undefined) {
-            obj.features = exports.FeatureSet.toJSON(message.features);
-        }
-        if (message.uninterpretedOption?.length) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.UninterpretedOption = {
-    fromJSON(object) {
-        return {
-            name: globalThis.Array.isArray(object?.name)
-                ? object.name.map((e) => exports.UninterpretedOption_NamePart.fromJSON(e))
-                : [],
-            identifierValue: isSet(object.identifierValue) ? globalThis.String(object.identifierValue) : "",
-            positiveIntValue: isSet(object.positiveIntValue) ? globalThis.String(object.positiveIntValue) : "0",
-            negativeIntValue: isSet(object.negativeIntValue) ? globalThis.String(object.negativeIntValue) : "0",
-            doubleValue: isSet(object.doubleValue) ? globalThis.Number(object.doubleValue) : 0,
-            stringValue: isSet(object.stringValue) ? Buffer.from(bytesFromBase64(object.stringValue)) : Buffer.alloc(0),
-            aggregateValue: isSet(object.aggregateValue) ? globalThis.String(object.aggregateValue) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.name?.length) {
-            obj.name = message.name.map((e) => exports.UninterpretedOption_NamePart.toJSON(e));
-        }
-        if (message.identifierValue !== undefined && message.identifierValue !== "") {
-            obj.identifierValue = message.identifierValue;
-        }
-        if (message.positiveIntValue !== undefined && message.positiveIntValue !== "0") {
-            obj.positiveIntValue = message.positiveIntValue;
-        }
-        if (message.negativeIntValue !== undefined && message.negativeIntValue !== "0") {
-            obj.negativeIntValue = message.negativeIntValue;
-        }
-        if (message.doubleValue !== undefined && message.doubleValue !== 0) {
-            obj.doubleValue = message.doubleValue;
-        }
-        if (message.stringValue !== undefined && message.stringValue.length !== 0) {
-            obj.stringValue = base64FromBytes(message.stringValue);
-        }
-        if (message.aggregateValue !== undefined && message.aggregateValue !== "") {
-            obj.aggregateValue = message.aggregateValue;
-        }
-        return obj;
-    },
-};
-exports.UninterpretedOption_NamePart = {
-    fromJSON(object) {
-        return {
-            namePart: isSet(object.namePart) ? globalThis.String(object.namePart) : "",
-            isExtension: isSet(object.isExtension) ? globalThis.Boolean(object.isExtension) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.namePart !== "") {
-            obj.namePart = message.namePart;
-        }
-        if (message.isExtension !== false) {
-            obj.isExtension = message.isExtension;
-        }
-        return obj;
-    },
-};
-exports.FeatureSet = {
-    fromJSON(object) {
-        return {
-            fieldPresence: isSet(object.fieldPresence) ? featureSet_FieldPresenceFromJSON(object.fieldPresence) : 0,
-            enumType: isSet(object.enumType) ? featureSet_EnumTypeFromJSON(object.enumType) : 0,
-            repeatedFieldEncoding: isSet(object.repeatedFieldEncoding)
-                ? featureSet_RepeatedFieldEncodingFromJSON(object.repeatedFieldEncoding)
-                : 0,
-            utf8Validation: isSet(object.utf8Validation) ? featureSet_Utf8ValidationFromJSON(object.utf8Validation) : 0,
-            messageEncoding: isSet(object.messageEncoding) ? featureSet_MessageEncodingFromJSON(object.messageEncoding) : 0,
-            jsonFormat: isSet(object.jsonFormat) ? featureSet_JsonFormatFromJSON(object.jsonFormat) : 0,
-            enforceNamingStyle: isSet(object.enforceNamingStyle)
-                ? featureSet_EnforceNamingStyleFromJSON(object.enforceNamingStyle)
-                : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.fieldPresence !== undefined && message.fieldPresence !== 0) {
-            obj.fieldPresence = featureSet_FieldPresenceToJSON(message.fieldPresence);
-        }
-        if (message.enumType !== undefined && message.enumType !== 0) {
-            obj.enumType = featureSet_EnumTypeToJSON(message.enumType);
-        }
-        if (message.repeatedFieldEncoding !== undefined && message.repeatedFieldEncoding !== 0) {
-            obj.repeatedFieldEncoding = featureSet_RepeatedFieldEncodingToJSON(message.repeatedFieldEncoding);
-        }
-        if (message.utf8Validation !== undefined && message.utf8Validation !== 0) {
-            obj.utf8Validation = featureSet_Utf8ValidationToJSON(message.utf8Validation);
-        }
-        if (message.messageEncoding !== undefined && message.messageEncoding !== 0) {
-            obj.messageEncoding = featureSet_MessageEncodingToJSON(message.messageEncoding);
-        }
-        if (message.jsonFormat !== undefined && message.jsonFormat !== 0) {
-            obj.jsonFormat = featureSet_JsonFormatToJSON(message.jsonFormat);
-        }
-        if (message.enforceNamingStyle !== undefined && message.enforceNamingStyle !== 0) {
-            obj.enforceNamingStyle = featureSet_EnforceNamingStyleToJSON(message.enforceNamingStyle);
-        }
-        return obj;
-    },
-};
-exports.FeatureSetDefaults = {
-    fromJSON(object) {
-        return {
-            defaults: globalThis.Array.isArray(object?.defaults)
-                ? object.defaults.map((e) => exports.FeatureSetDefaults_FeatureSetEditionDefault.fromJSON(e))
-                : [],
-            minimumEdition: isSet(object.minimumEdition) ? editionFromJSON(object.minimumEdition) : 0,
-            maximumEdition: isSet(object.maximumEdition) ? editionFromJSON(object.maximumEdition) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.defaults?.length) {
-            obj.defaults = message.defaults.map((e) => exports.FeatureSetDefaults_FeatureSetEditionDefault.toJSON(e));
-        }
-        if (message.minimumEdition !== undefined && message.minimumEdition !== 0) {
-            obj.minimumEdition = editionToJSON(message.minimumEdition);
-        }
-        if (message.maximumEdition !== undefined && message.maximumEdition !== 0) {
-            obj.maximumEdition = editionToJSON(message.maximumEdition);
-        }
-        return obj;
-    },
-};
-exports.FeatureSetDefaults_FeatureSetEditionDefault = {
-    fromJSON(object) {
-        return {
-            edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0,
-            overridableFeatures: isSet(object.overridableFeatures)
-                ? exports.FeatureSet.fromJSON(object.overridableFeatures)
-                : undefined,
-            fixedFeatures: isSet(object.fixedFeatures) ? exports.FeatureSet.fromJSON(object.fixedFeatures) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.edition !== undefined && message.edition !== 0) {
-            obj.edition = editionToJSON(message.edition);
-        }
-        if (message.overridableFeatures !== undefined) {
-            obj.overridableFeatures = exports.FeatureSet.toJSON(message.overridableFeatures);
-        }
-        if (message.fixedFeatures !== undefined) {
-            obj.fixedFeatures = exports.FeatureSet.toJSON(message.fixedFeatures);
-        }
-        return obj;
-    },
-};
-exports.SourceCodeInfo = {
-    fromJSON(object) {
-        return {
-            location: globalThis.Array.isArray(object?.location)
-                ? object.location.map((e) => exports.SourceCodeInfo_Location.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.location?.length) {
-            obj.location = message.location.map((e) => exports.SourceCodeInfo_Location.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.SourceCodeInfo_Location = {
-    fromJSON(object) {
-        return {
-            path: globalThis.Array.isArray(object?.path)
-                ? object.path.map((e) => globalThis.Number(e))
-                : [],
-            span: globalThis.Array.isArray(object?.span) ? object.span.map((e) => globalThis.Number(e)) : [],
-            leadingComments: isSet(object.leadingComments) ? globalThis.String(object.leadingComments) : "",
-            trailingComments: isSet(object.trailingComments) ? globalThis.String(object.trailingComments) : "",
-            leadingDetachedComments: globalThis.Array.isArray(object?.leadingDetachedComments)
-                ? object.leadingDetachedComments.map((e) => globalThis.String(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.path?.length) {
-            obj.path = message.path.map((e) => Math.round(e));
-        }
-        if (message.span?.length) {
-            obj.span = message.span.map((e) => Math.round(e));
-        }
-        if (message.leadingComments !== undefined && message.leadingComments !== "") {
-            obj.leadingComments = message.leadingComments;
-        }
-        if (message.trailingComments !== undefined && message.trailingComments !== "") {
-            obj.trailingComments = message.trailingComments;
-        }
-        if (message.leadingDetachedComments?.length) {
-            obj.leadingDetachedComments = message.leadingDetachedComments;
-        }
-        return obj;
-    },
-};
-exports.GeneratedCodeInfo = {
-    fromJSON(object) {
-        return {
-            annotation: globalThis.Array.isArray(object?.annotation)
-                ? object.annotation.map((e) => exports.GeneratedCodeInfo_Annotation.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.annotation?.length) {
-            obj.annotation = message.annotation.map((e) => exports.GeneratedCodeInfo_Annotation.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.GeneratedCodeInfo_Annotation = {
-    fromJSON(object) {
-        return {
-            path: globalThis.Array.isArray(object?.path)
-                ? object.path.map((e) => globalThis.Number(e))
-                : [],
-            sourceFile: isSet(object.sourceFile) ? globalThis.String(object.sourceFile) : "",
-            begin: isSet(object.begin) ? globalThis.Number(object.begin) : 0,
-            end: isSet(object.end) ? globalThis.Number(object.end) : 0,
-            semantic: isSet(object.semantic) ? generatedCodeInfo_Annotation_SemanticFromJSON(object.semantic) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.path?.length) {
-            obj.path = message.path.map((e) => Math.round(e));
-        }
-        if (message.sourceFile !== undefined && message.sourceFile !== "") {
-            obj.sourceFile = message.sourceFile;
-        }
-        if (message.begin !== undefined && message.begin !== 0) {
-            obj.begin = Math.round(message.begin);
-        }
-        if (message.end !== undefined && message.end !== 0) {
-            obj.end = Math.round(message.end);
-        }
-        if (message.semantic !== undefined && message.semantic !== 0) {
-            obj.semantic = generatedCodeInfo_Annotation_SemanticToJSON(message.semantic);
-        }
-        return obj;
-    },
-};
-function bytesFromBase64(b64) {
-    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
-}
-function base64FromBytes(arr) {
-    return globalThis.Buffer.from(arr).toString("base64");
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
deleted file mode 100644
index 9d24cbba10de9..0000000000000
--- a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
+++ /dev/null
@@ -1,29 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: google/protobuf/timestamp.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Timestamp = void 0;
-exports.Timestamp = {
-    fromJSON(object) {
-        return {
-            seconds: isSet(object.seconds) ? globalThis.String(object.seconds) : "0",
-            nanos: isSet(object.nanos) ? globalThis.Number(object.nanos) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.seconds !== "0") {
-            obj.seconds = message.seconds;
-        }
-        if (message.nanos !== 0) {
-            obj.nanos = Math.round(message.nanos);
-        }
-        return obj;
-    },
-};
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js
deleted file mode 100644
index abc766bed3b88..0000000000000
--- a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js
+++ /dev/null
@@ -1,55 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: rekor/v2/dsse.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.DSSELogEntryV002 = exports.DSSERequestV002 = void 0;
-/* eslint-disable */
-const envelope_1 = require("../../envelope");
-const sigstore_common_1 = require("../../sigstore_common");
-const verifier_1 = require("./verifier");
-exports.DSSERequestV002 = {
-    fromJSON(object) {
-        return {
-            envelope: isSet(object.envelope) ? envelope_1.Envelope.fromJSON(object.envelope) : undefined,
-            verifiers: globalThis.Array.isArray(object?.verifiers)
-                ? object.verifiers.map((e) => verifier_1.Verifier.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.envelope !== undefined) {
-            obj.envelope = envelope_1.Envelope.toJSON(message.envelope);
-        }
-        if (message.verifiers?.length) {
-            obj.verifiers = message.verifiers.map((e) => verifier_1.Verifier.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.DSSELogEntryV002 = {
-    fromJSON(object) {
-        return {
-            payloadHash: isSet(object.payloadHash) ? sigstore_common_1.HashOutput.fromJSON(object.payloadHash) : undefined,
-            signatures: globalThis.Array.isArray(object?.signatures)
-                ? object.signatures.map((e) => verifier_1.Signature.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.payloadHash !== undefined) {
-            obj.payloadHash = sigstore_common_1.HashOutput.toJSON(message.payloadHash);
-        }
-        if (message.signatures?.length) {
-            obj.signatures = message.signatures.map((e) => verifier_1.Signature.toJSON(e));
-        }
-        return obj;
-    },
-};
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js
deleted file mode 100644
index c5eccb10e0a68..0000000000000
--- a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js
+++ /dev/null
@@ -1,81 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: rekor/v2/entry.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.CreateEntryRequest = exports.Spec = exports.Entry = void 0;
-/* eslint-disable */
-const dsse_1 = require("./dsse");
-const hashedrekord_1 = require("./hashedrekord");
-exports.Entry = {
-    fromJSON(object) {
-        return {
-            kind: isSet(object.kind) ? globalThis.String(object.kind) : "",
-            apiVersion: isSet(object.apiVersion) ? globalThis.String(object.apiVersion) : "",
-            spec: isSet(object.spec) ? exports.Spec.fromJSON(object.spec) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.kind !== "") {
-            obj.kind = message.kind;
-        }
-        if (message.apiVersion !== "") {
-            obj.apiVersion = message.apiVersion;
-        }
-        if (message.spec !== undefined) {
-            obj.spec = exports.Spec.toJSON(message.spec);
-        }
-        return obj;
-    },
-};
-exports.Spec = {
-    fromJSON(object) {
-        return {
-            spec: isSet(object.hashedRekordV002)
-                ? { $case: "hashedRekordV002", hashedRekordV002: hashedrekord_1.HashedRekordLogEntryV002.fromJSON(object.hashedRekordV002) }
-                : isSet(object.dsseV002)
-                    ? { $case: "dsseV002", dsseV002: dsse_1.DSSELogEntryV002.fromJSON(object.dsseV002) }
-                    : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.spec?.$case === "hashedRekordV002") {
-            obj.hashedRekordV002 = hashedrekord_1.HashedRekordLogEntryV002.toJSON(message.spec.hashedRekordV002);
-        }
-        else if (message.spec?.$case === "dsseV002") {
-            obj.dsseV002 = dsse_1.DSSELogEntryV002.toJSON(message.spec.dsseV002);
-        }
-        return obj;
-    },
-};
-exports.CreateEntryRequest = {
-    fromJSON(object) {
-        return {
-            spec: isSet(object.hashedRekordRequestV002)
-                ? {
-                    $case: "hashedRekordRequestV002",
-                    hashedRekordRequestV002: hashedrekord_1.HashedRekordRequestV002.fromJSON(object.hashedRekordRequestV002),
-                }
-                : isSet(object.dsseRequestV002)
-                    ? { $case: "dsseRequestV002", dsseRequestV002: dsse_1.DSSERequestV002.fromJSON(object.dsseRequestV002) }
-                    : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.spec?.$case === "hashedRekordRequestV002") {
-            obj.hashedRekordRequestV002 = hashedrekord_1.HashedRekordRequestV002.toJSON(message.spec.hashedRekordRequestV002);
-        }
-        else if (message.spec?.$case === "dsseRequestV002") {
-            obj.dsseRequestV002 = dsse_1.DSSERequestV002.toJSON(message.spec.dsseRequestV002);
-        }
-        return obj;
-    },
-};
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js
deleted file mode 100644
index d3fd1af2483d1..0000000000000
--- a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js
+++ /dev/null
@@ -1,56 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: rekor/v2/hashedrekord.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.HashedRekordLogEntryV002 = exports.HashedRekordRequestV002 = void 0;
-/* eslint-disable */
-const sigstore_common_1 = require("../../sigstore_common");
-const verifier_1 = require("./verifier");
-exports.HashedRekordRequestV002 = {
-    fromJSON(object) {
-        return {
-            digest: isSet(object.digest) ? Buffer.from(bytesFromBase64(object.digest)) : Buffer.alloc(0),
-            signature: isSet(object.signature) ? verifier_1.Signature.fromJSON(object.signature) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.digest.length !== 0) {
-            obj.digest = base64FromBytes(message.digest);
-        }
-        if (message.signature !== undefined) {
-            obj.signature = verifier_1.Signature.toJSON(message.signature);
-        }
-        return obj;
-    },
-};
-exports.HashedRekordLogEntryV002 = {
-    fromJSON(object) {
-        return {
-            data: isSet(object.data) ? sigstore_common_1.HashOutput.fromJSON(object.data) : undefined,
-            signature: isSet(object.signature) ? verifier_1.Signature.fromJSON(object.signature) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.data !== undefined) {
-            obj.data = sigstore_common_1.HashOutput.toJSON(message.data);
-        }
-        if (message.signature !== undefined) {
-            obj.signature = verifier_1.Signature.toJSON(message.signature);
-        }
-        return obj;
-    },
-};
-function bytesFromBase64(b64) {
-    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
-}
-function base64FromBytes(arr) {
-    return globalThis.Buffer.from(arr).toString("base64");
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js
deleted file mode 100644
index c437d5053a3cb..0000000000000
--- a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js
+++ /dev/null
@@ -1,74 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: rekor/v2/verifier.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Signature = exports.Verifier = exports.PublicKey = void 0;
-/* eslint-disable */
-const sigstore_common_1 = require("../../sigstore_common");
-exports.PublicKey = {
-    fromJSON(object) {
-        return { rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : Buffer.alloc(0) };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.rawBytes.length !== 0) {
-            obj.rawBytes = base64FromBytes(message.rawBytes);
-        }
-        return obj;
-    },
-};
-exports.Verifier = {
-    fromJSON(object) {
-        return {
-            verifier: isSet(object.publicKey)
-                ? { $case: "publicKey", publicKey: exports.PublicKey.fromJSON(object.publicKey) }
-                : isSet(object.x509Certificate)
-                    ? { $case: "x509Certificate", x509Certificate: sigstore_common_1.X509Certificate.fromJSON(object.x509Certificate) }
-                    : undefined,
-            keyDetails: isSet(object.keyDetails) ? (0, sigstore_common_1.publicKeyDetailsFromJSON)(object.keyDetails) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.verifier?.$case === "publicKey") {
-            obj.publicKey = exports.PublicKey.toJSON(message.verifier.publicKey);
-        }
-        else if (message.verifier?.$case === "x509Certificate") {
-            obj.x509Certificate = sigstore_common_1.X509Certificate.toJSON(message.verifier.x509Certificate);
-        }
-        if (message.keyDetails !== 0) {
-            obj.keyDetails = (0, sigstore_common_1.publicKeyDetailsToJSON)(message.keyDetails);
-        }
-        return obj;
-    },
-};
-exports.Signature = {
-    fromJSON(object) {
-        return {
-            content: isSet(object.content) ? Buffer.from(bytesFromBase64(object.content)) : Buffer.alloc(0),
-            verifier: isSet(object.verifier) ? exports.Verifier.fromJSON(object.verifier) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.content.length !== 0) {
-            obj.content = base64FromBytes(message.content);
-        }
-        if (message.verifier !== undefined) {
-            obj.verifier = exports.Verifier.toJSON(message.verifier);
-        }
-        return obj;
-    },
-};
-function bytesFromBase64(b64) {
-    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
-}
-function base64FromBytes(arr) {
-    return globalThis.Buffer.from(arr).toString("base64");
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
deleted file mode 100644
index aed636f00e7cf..0000000000000
--- a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
+++ /dev/null
@@ -1,103 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: sigstore_bundle.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Bundle = exports.VerificationMaterial = exports.TimestampVerificationData = void 0;
-/* eslint-disable */
-const envelope_1 = require("./envelope");
-const sigstore_common_1 = require("./sigstore_common");
-const sigstore_rekor_1 = require("./sigstore_rekor");
-exports.TimestampVerificationData = {
-    fromJSON(object) {
-        return {
-            rfc3161Timestamps: globalThis.Array.isArray(object?.rfc3161Timestamps)
-                ? object.rfc3161Timestamps.map((e) => sigstore_common_1.RFC3161SignedTimestamp.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.rfc3161Timestamps?.length) {
-            obj.rfc3161Timestamps = message.rfc3161Timestamps.map((e) => sigstore_common_1.RFC3161SignedTimestamp.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.VerificationMaterial = {
-    fromJSON(object) {
-        return {
-            content: isSet(object.publicKey)
-                ? { $case: "publicKey", publicKey: sigstore_common_1.PublicKeyIdentifier.fromJSON(object.publicKey) }
-                : isSet(object.x509CertificateChain)
-                    ? {
-                        $case: "x509CertificateChain",
-                        x509CertificateChain: sigstore_common_1.X509CertificateChain.fromJSON(object.x509CertificateChain),
-                    }
-                    : isSet(object.certificate)
-                        ? { $case: "certificate", certificate: sigstore_common_1.X509Certificate.fromJSON(object.certificate) }
-                        : undefined,
-            tlogEntries: globalThis.Array.isArray(object?.tlogEntries)
-                ? object.tlogEntries.map((e) => sigstore_rekor_1.TransparencyLogEntry.fromJSON(e))
-                : [],
-            timestampVerificationData: isSet(object.timestampVerificationData)
-                ? exports.TimestampVerificationData.fromJSON(object.timestampVerificationData)
-                : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.content?.$case === "publicKey") {
-            obj.publicKey = sigstore_common_1.PublicKeyIdentifier.toJSON(message.content.publicKey);
-        }
-        else if (message.content?.$case === "x509CertificateChain") {
-            obj.x509CertificateChain = sigstore_common_1.X509CertificateChain.toJSON(message.content.x509CertificateChain);
-        }
-        else if (message.content?.$case === "certificate") {
-            obj.certificate = sigstore_common_1.X509Certificate.toJSON(message.content.certificate);
-        }
-        if (message.tlogEntries?.length) {
-            obj.tlogEntries = message.tlogEntries.map((e) => sigstore_rekor_1.TransparencyLogEntry.toJSON(e));
-        }
-        if (message.timestampVerificationData !== undefined) {
-            obj.timestampVerificationData = exports.TimestampVerificationData.toJSON(message.timestampVerificationData);
-        }
-        return obj;
-    },
-};
-exports.Bundle = {
-    fromJSON(object) {
-        return {
-            mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "",
-            verificationMaterial: isSet(object.verificationMaterial)
-                ? exports.VerificationMaterial.fromJSON(object.verificationMaterial)
-                : undefined,
-            content: isSet(object.messageSignature)
-                ? { $case: "messageSignature", messageSignature: sigstore_common_1.MessageSignature.fromJSON(object.messageSignature) }
-                : isSet(object.dsseEnvelope)
-                    ? { $case: "dsseEnvelope", dsseEnvelope: envelope_1.Envelope.fromJSON(object.dsseEnvelope) }
-                    : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.mediaType !== "") {
-            obj.mediaType = message.mediaType;
-        }
-        if (message.verificationMaterial !== undefined) {
-            obj.verificationMaterial = exports.VerificationMaterial.toJSON(message.verificationMaterial);
-        }
-        if (message.content?.$case === "messageSignature") {
-            obj.messageSignature = sigstore_common_1.MessageSignature.toJSON(message.content.messageSignature);
-        }
-        else if (message.content?.$case === "dsseEnvelope") {
-            obj.dsseEnvelope = envelope_1.Envelope.toJSON(message.content.dsseEnvelope);
-        }
-        return obj;
-    },
-};
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
deleted file mode 100644
index b900516ed3b55..0000000000000
--- a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
+++ /dev/null
@@ -1,596 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: sigstore_common.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TimeRange = exports.X509CertificateChain = exports.SubjectAlternativeName = exports.X509Certificate = exports.DistinguishedName = exports.ObjectIdentifierValuePair = exports.ObjectIdentifier = exports.PublicKeyIdentifier = exports.PublicKey = exports.RFC3161SignedTimestamp = exports.LogId = exports.MessageSignature = exports.HashOutput = exports.SubjectAlternativeNameType = exports.PublicKeyDetails = exports.HashAlgorithm = void 0;
-exports.hashAlgorithmFromJSON = hashAlgorithmFromJSON;
-exports.hashAlgorithmToJSON = hashAlgorithmToJSON;
-exports.publicKeyDetailsFromJSON = publicKeyDetailsFromJSON;
-exports.publicKeyDetailsToJSON = publicKeyDetailsToJSON;
-exports.subjectAlternativeNameTypeFromJSON = subjectAlternativeNameTypeFromJSON;
-exports.subjectAlternativeNameTypeToJSON = subjectAlternativeNameTypeToJSON;
-/* eslint-disable */
-const timestamp_1 = require("./google/protobuf/timestamp");
-/**
- * Only a subset of the secure hash standard algorithms are supported.
- * See  for more
- * details.
- * UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force
- * any proto JSON serialization to emit the used hash algorithm, as default
- * option is to *omit* the default value of an enum (which is the first
- * value, represented by '0'.
- */
-var HashAlgorithm;
-(function (HashAlgorithm) {
-    HashAlgorithm[HashAlgorithm["HASH_ALGORITHM_UNSPECIFIED"] = 0] = "HASH_ALGORITHM_UNSPECIFIED";
-    HashAlgorithm[HashAlgorithm["SHA2_256"] = 1] = "SHA2_256";
-    HashAlgorithm[HashAlgorithm["SHA2_384"] = 2] = "SHA2_384";
-    HashAlgorithm[HashAlgorithm["SHA2_512"] = 3] = "SHA2_512";
-    HashAlgorithm[HashAlgorithm["SHA3_256"] = 4] = "SHA3_256";
-    HashAlgorithm[HashAlgorithm["SHA3_384"] = 5] = "SHA3_384";
-})(HashAlgorithm || (exports.HashAlgorithm = HashAlgorithm = {}));
-function hashAlgorithmFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "HASH_ALGORITHM_UNSPECIFIED":
-            return HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED;
-        case 1:
-        case "SHA2_256":
-            return HashAlgorithm.SHA2_256;
-        case 2:
-        case "SHA2_384":
-            return HashAlgorithm.SHA2_384;
-        case 3:
-        case "SHA2_512":
-            return HashAlgorithm.SHA2_512;
-        case 4:
-        case "SHA3_256":
-            return HashAlgorithm.SHA3_256;
-        case 5:
-        case "SHA3_384":
-            return HashAlgorithm.SHA3_384;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
-    }
-}
-function hashAlgorithmToJSON(object) {
-    switch (object) {
-        case HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED:
-            return "HASH_ALGORITHM_UNSPECIFIED";
-        case HashAlgorithm.SHA2_256:
-            return "SHA2_256";
-        case HashAlgorithm.SHA2_384:
-            return "SHA2_384";
-        case HashAlgorithm.SHA2_512:
-            return "SHA2_512";
-        case HashAlgorithm.SHA3_256:
-            return "SHA3_256";
-        case HashAlgorithm.SHA3_384:
-            return "SHA3_384";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
-    }
-}
-/**
- * Details of a specific public key, capturing the the key encoding method,
- * and signature algorithm.
- *
- * PublicKeyDetails captures the public key/hash algorithm combinations
- * recommended in the Sigstore ecosystem.
- *
- * This is modelled as a linear set as we want to provide a small number of
- * opinionated options instead of allowing every possible permutation.
- *
- * Any changes to this enum MUST be reflected in the algorithm registry.
- *
- * See: 
- *
- * To avoid the possibility of contradicting formats such as PKCS1 with
- * ED25519 the valid permutations are listed as a linear set instead of a
- * cartesian set (i.e one combined variable instead of two, one for encoding
- * and one for the signature algorithm).
- */
-var PublicKeyDetails;
-(function (PublicKeyDetails) {
-    PublicKeyDetails[PublicKeyDetails["PUBLIC_KEY_DETAILS_UNSPECIFIED"] = 0] = "PUBLIC_KEY_DETAILS_UNSPECIFIED";
-    /**
-     * PKCS1_RSA_PKCS1V5 - RSA
-     *
-     * @deprecated
-     */
-    PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PKCS1V5"] = 1] = "PKCS1_RSA_PKCS1V5";
-    /**
-     * PKCS1_RSA_PSS - See RFC8017
-     *
-     * @deprecated
-     */
-    PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PSS"] = 2] = "PKCS1_RSA_PSS";
-    /** @deprecated */
-    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V5"] = 3] = "PKIX_RSA_PKCS1V5";
-    /** @deprecated */
-    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS"] = 4] = "PKIX_RSA_PSS";
-    /** PKIX_RSA_PKCS1V15_2048_SHA256 - RSA public key in PKIX format, PKCS#1v1.5 signature */
-    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V15_2048_SHA256"] = 9] = "PKIX_RSA_PKCS1V15_2048_SHA256";
-    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V15_3072_SHA256"] = 10] = "PKIX_RSA_PKCS1V15_3072_SHA256";
-    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V15_4096_SHA256"] = 11] = "PKIX_RSA_PKCS1V15_4096_SHA256";
-    /** PKIX_RSA_PSS_2048_SHA256 - RSA public key in PKIX format, RSASSA-PSS signature */
-    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS_2048_SHA256"] = 16] = "PKIX_RSA_PSS_2048_SHA256";
-    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS_3072_SHA256"] = 17] = "PKIX_RSA_PSS_3072_SHA256";
-    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS_4096_SHA256"] = 18] = "PKIX_RSA_PSS_4096_SHA256";
-    /**
-     * PKIX_ECDSA_P256_HMAC_SHA_256 - ECDSA
-     *
-     * @deprecated
-     */
-    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_HMAC_SHA_256"] = 6] = "PKIX_ECDSA_P256_HMAC_SHA_256";
-    /** PKIX_ECDSA_P256_SHA_256 - See NIST FIPS 186-4 */
-    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_SHA_256"] = 5] = "PKIX_ECDSA_P256_SHA_256";
-    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P384_SHA_384"] = 12] = "PKIX_ECDSA_P384_SHA_384";
-    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P521_SHA_512"] = 13] = "PKIX_ECDSA_P521_SHA_512";
-    /** PKIX_ED25519 - Ed 25519 */
-    PublicKeyDetails[PublicKeyDetails["PKIX_ED25519"] = 7] = "PKIX_ED25519";
-    PublicKeyDetails[PublicKeyDetails["PKIX_ED25519_PH"] = 8] = "PKIX_ED25519_PH";
-    /**
-     * PKIX_ECDSA_P384_SHA_256 - These algorithms are deprecated and should not be used, but they
-     * were/are being used by most Sigstore clients implementations.
-     *
-     * @deprecated
-     */
-    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P384_SHA_256"] = 19] = "PKIX_ECDSA_P384_SHA_256";
-    /** @deprecated */
-    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P521_SHA_256"] = 20] = "PKIX_ECDSA_P521_SHA_256";
-    /**
-     * LMS_SHA256 - LMS and LM-OTS
-     *
-     * These algorithms are deprecated and should not be used.
-     * Keys and signatures MAY be used by private Sigstore
-     * deployments, but will not be supported by the public
-     * good instance.
-     *
-     * USER WARNING: LMS and LM-OTS are both stateful signature schemes.
-     * Using them correctly requires discretion and careful consideration
-     * to ensure that individual secret keys are not used more than once.
-     * In addition, LM-OTS is a single-use scheme, meaning that it
-     * MUST NOT be used for more than one signature per LM-OTS key.
-     * If you cannot maintain these invariants, you MUST NOT use these
-     * schemes.
-     *
-     * @deprecated
-     */
-    PublicKeyDetails[PublicKeyDetails["LMS_SHA256"] = 14] = "LMS_SHA256";
-    /** @deprecated */
-    PublicKeyDetails[PublicKeyDetails["LMOTS_SHA256"] = 15] = "LMOTS_SHA256";
-    /**
-     * ML_DSA_65 - ML-DSA
-     *
-     * These ML_DSA_65 and ML-DSA_87 algorithms are the pure variants that
-     * take data to sign rather than the prehash variants (HashML-DSA), which
-     * take digests.  While considered quantum-resistant, their usage
-     * involves tradeoffs in that signatures and keys are much larger, and
-     * this makes deployments more costly.
-     *
-     * USER WARNING: ML_DSA_65 and ML_DSA_87 are experimental algorithms.
-     * In the future they MAY be used by private Sigstore deployments, but
-     * they are not yet fully functional.  This warning will be removed when
-     * these algorithms are widely supported by Sigstore clients and servers,
-     * but care should still be taken for production environments.
-     */
-    PublicKeyDetails[PublicKeyDetails["ML_DSA_65"] = 21] = "ML_DSA_65";
-    PublicKeyDetails[PublicKeyDetails["ML_DSA_87"] = 22] = "ML_DSA_87";
-})(PublicKeyDetails || (exports.PublicKeyDetails = PublicKeyDetails = {}));
-function publicKeyDetailsFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "PUBLIC_KEY_DETAILS_UNSPECIFIED":
-            return PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED;
-        case 1:
-        case "PKCS1_RSA_PKCS1V5":
-            return PublicKeyDetails.PKCS1_RSA_PKCS1V5;
-        case 2:
-        case "PKCS1_RSA_PSS":
-            return PublicKeyDetails.PKCS1_RSA_PSS;
-        case 3:
-        case "PKIX_RSA_PKCS1V5":
-            return PublicKeyDetails.PKIX_RSA_PKCS1V5;
-        case 4:
-        case "PKIX_RSA_PSS":
-            return PublicKeyDetails.PKIX_RSA_PSS;
-        case 9:
-        case "PKIX_RSA_PKCS1V15_2048_SHA256":
-            return PublicKeyDetails.PKIX_RSA_PKCS1V15_2048_SHA256;
-        case 10:
-        case "PKIX_RSA_PKCS1V15_3072_SHA256":
-            return PublicKeyDetails.PKIX_RSA_PKCS1V15_3072_SHA256;
-        case 11:
-        case "PKIX_RSA_PKCS1V15_4096_SHA256":
-            return PublicKeyDetails.PKIX_RSA_PKCS1V15_4096_SHA256;
-        case 16:
-        case "PKIX_RSA_PSS_2048_SHA256":
-            return PublicKeyDetails.PKIX_RSA_PSS_2048_SHA256;
-        case 17:
-        case "PKIX_RSA_PSS_3072_SHA256":
-            return PublicKeyDetails.PKIX_RSA_PSS_3072_SHA256;
-        case 18:
-        case "PKIX_RSA_PSS_4096_SHA256":
-            return PublicKeyDetails.PKIX_RSA_PSS_4096_SHA256;
-        case 6:
-        case "PKIX_ECDSA_P256_HMAC_SHA_256":
-            return PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256;
-        case 5:
-        case "PKIX_ECDSA_P256_SHA_256":
-            return PublicKeyDetails.PKIX_ECDSA_P256_SHA_256;
-        case 12:
-        case "PKIX_ECDSA_P384_SHA_384":
-            return PublicKeyDetails.PKIX_ECDSA_P384_SHA_384;
-        case 13:
-        case "PKIX_ECDSA_P521_SHA_512":
-            return PublicKeyDetails.PKIX_ECDSA_P521_SHA_512;
-        case 7:
-        case "PKIX_ED25519":
-            return PublicKeyDetails.PKIX_ED25519;
-        case 8:
-        case "PKIX_ED25519_PH":
-            return PublicKeyDetails.PKIX_ED25519_PH;
-        case 19:
-        case "PKIX_ECDSA_P384_SHA_256":
-            return PublicKeyDetails.PKIX_ECDSA_P384_SHA_256;
-        case 20:
-        case "PKIX_ECDSA_P521_SHA_256":
-            return PublicKeyDetails.PKIX_ECDSA_P521_SHA_256;
-        case 14:
-        case "LMS_SHA256":
-            return PublicKeyDetails.LMS_SHA256;
-        case 15:
-        case "LMOTS_SHA256":
-            return PublicKeyDetails.LMOTS_SHA256;
-        case 21:
-        case "ML_DSA_65":
-            return PublicKeyDetails.ML_DSA_65;
-        case 22:
-        case "ML_DSA_87":
-            return PublicKeyDetails.ML_DSA_87;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
-    }
-}
-function publicKeyDetailsToJSON(object) {
-    switch (object) {
-        case PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED:
-            return "PUBLIC_KEY_DETAILS_UNSPECIFIED";
-        case PublicKeyDetails.PKCS1_RSA_PKCS1V5:
-            return "PKCS1_RSA_PKCS1V5";
-        case PublicKeyDetails.PKCS1_RSA_PSS:
-            return "PKCS1_RSA_PSS";
-        case PublicKeyDetails.PKIX_RSA_PKCS1V5:
-            return "PKIX_RSA_PKCS1V5";
-        case PublicKeyDetails.PKIX_RSA_PSS:
-            return "PKIX_RSA_PSS";
-        case PublicKeyDetails.PKIX_RSA_PKCS1V15_2048_SHA256:
-            return "PKIX_RSA_PKCS1V15_2048_SHA256";
-        case PublicKeyDetails.PKIX_RSA_PKCS1V15_3072_SHA256:
-            return "PKIX_RSA_PKCS1V15_3072_SHA256";
-        case PublicKeyDetails.PKIX_RSA_PKCS1V15_4096_SHA256:
-            return "PKIX_RSA_PKCS1V15_4096_SHA256";
-        case PublicKeyDetails.PKIX_RSA_PSS_2048_SHA256:
-            return "PKIX_RSA_PSS_2048_SHA256";
-        case PublicKeyDetails.PKIX_RSA_PSS_3072_SHA256:
-            return "PKIX_RSA_PSS_3072_SHA256";
-        case PublicKeyDetails.PKIX_RSA_PSS_4096_SHA256:
-            return "PKIX_RSA_PSS_4096_SHA256";
-        case PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256:
-            return "PKIX_ECDSA_P256_HMAC_SHA_256";
-        case PublicKeyDetails.PKIX_ECDSA_P256_SHA_256:
-            return "PKIX_ECDSA_P256_SHA_256";
-        case PublicKeyDetails.PKIX_ECDSA_P384_SHA_384:
-            return "PKIX_ECDSA_P384_SHA_384";
-        case PublicKeyDetails.PKIX_ECDSA_P521_SHA_512:
-            return "PKIX_ECDSA_P521_SHA_512";
-        case PublicKeyDetails.PKIX_ED25519:
-            return "PKIX_ED25519";
-        case PublicKeyDetails.PKIX_ED25519_PH:
-            return "PKIX_ED25519_PH";
-        case PublicKeyDetails.PKIX_ECDSA_P384_SHA_256:
-            return "PKIX_ECDSA_P384_SHA_256";
-        case PublicKeyDetails.PKIX_ECDSA_P521_SHA_256:
-            return "PKIX_ECDSA_P521_SHA_256";
-        case PublicKeyDetails.LMS_SHA256:
-            return "LMS_SHA256";
-        case PublicKeyDetails.LMOTS_SHA256:
-            return "LMOTS_SHA256";
-        case PublicKeyDetails.ML_DSA_65:
-            return "ML_DSA_65";
-        case PublicKeyDetails.ML_DSA_87:
-            return "ML_DSA_87";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
-    }
-}
-var SubjectAlternativeNameType;
-(function (SubjectAlternativeNameType) {
-    SubjectAlternativeNameType[SubjectAlternativeNameType["SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED"] = 0] = "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
-    SubjectAlternativeNameType[SubjectAlternativeNameType["EMAIL"] = 1] = "EMAIL";
-    SubjectAlternativeNameType[SubjectAlternativeNameType["URI"] = 2] = "URI";
-    /**
-     * OTHER_NAME - OID 1.3.6.1.4.1.57264.1.7
-     * See https://github.com/sigstore/fulcio/blob/main/docs/oid-info.md#1361415726417--othername-san
-     * for more details.
-     */
-    SubjectAlternativeNameType[SubjectAlternativeNameType["OTHER_NAME"] = 3] = "OTHER_NAME";
-})(SubjectAlternativeNameType || (exports.SubjectAlternativeNameType = SubjectAlternativeNameType = {}));
-function subjectAlternativeNameTypeFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED":
-            return SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED;
-        case 1:
-        case "EMAIL":
-            return SubjectAlternativeNameType.EMAIL;
-        case 2:
-        case "URI":
-            return SubjectAlternativeNameType.URI;
-        case 3:
-        case "OTHER_NAME":
-            return SubjectAlternativeNameType.OTHER_NAME;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
-    }
-}
-function subjectAlternativeNameTypeToJSON(object) {
-    switch (object) {
-        case SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED:
-            return "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
-        case SubjectAlternativeNameType.EMAIL:
-            return "EMAIL";
-        case SubjectAlternativeNameType.URI:
-            return "URI";
-        case SubjectAlternativeNameType.OTHER_NAME:
-            return "OTHER_NAME";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
-    }
-}
-exports.HashOutput = {
-    fromJSON(object) {
-        return {
-            algorithm: isSet(object.algorithm) ? hashAlgorithmFromJSON(object.algorithm) : 0,
-            digest: isSet(object.digest) ? Buffer.from(bytesFromBase64(object.digest)) : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.algorithm !== 0) {
-            obj.algorithm = hashAlgorithmToJSON(message.algorithm);
-        }
-        if (message.digest.length !== 0) {
-            obj.digest = base64FromBytes(message.digest);
-        }
-        return obj;
-    },
-};
-exports.MessageSignature = {
-    fromJSON(object) {
-        return {
-            messageDigest: isSet(object.messageDigest) ? exports.HashOutput.fromJSON(object.messageDigest) : undefined,
-            signature: isSet(object.signature) ? Buffer.from(bytesFromBase64(object.signature)) : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.messageDigest !== undefined) {
-            obj.messageDigest = exports.HashOutput.toJSON(message.messageDigest);
-        }
-        if (message.signature.length !== 0) {
-            obj.signature = base64FromBytes(message.signature);
-        }
-        return obj;
-    },
-};
-exports.LogId = {
-    fromJSON(object) {
-        return { keyId: isSet(object.keyId) ? Buffer.from(bytesFromBase64(object.keyId)) : Buffer.alloc(0) };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.keyId.length !== 0) {
-            obj.keyId = base64FromBytes(message.keyId);
-        }
-        return obj;
-    },
-};
-exports.RFC3161SignedTimestamp = {
-    fromJSON(object) {
-        return {
-            signedTimestamp: isSet(object.signedTimestamp)
-                ? Buffer.from(bytesFromBase64(object.signedTimestamp))
-                : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.signedTimestamp.length !== 0) {
-            obj.signedTimestamp = base64FromBytes(message.signedTimestamp);
-        }
-        return obj;
-    },
-};
-exports.PublicKey = {
-    fromJSON(object) {
-        return {
-            rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : undefined,
-            keyDetails: isSet(object.keyDetails) ? publicKeyDetailsFromJSON(object.keyDetails) : 0,
-            validFor: isSet(object.validFor) ? exports.TimeRange.fromJSON(object.validFor) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.rawBytes !== undefined) {
-            obj.rawBytes = base64FromBytes(message.rawBytes);
-        }
-        if (message.keyDetails !== 0) {
-            obj.keyDetails = publicKeyDetailsToJSON(message.keyDetails);
-        }
-        if (message.validFor !== undefined) {
-            obj.validFor = exports.TimeRange.toJSON(message.validFor);
-        }
-        return obj;
-    },
-};
-exports.PublicKeyIdentifier = {
-    fromJSON(object) {
-        return { hint: isSet(object.hint) ? globalThis.String(object.hint) : "" };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.hint !== "") {
-            obj.hint = message.hint;
-        }
-        return obj;
-    },
-};
-exports.ObjectIdentifier = {
-    fromJSON(object) {
-        return { id: globalThis.Array.isArray(object?.id) ? object.id.map((e) => globalThis.Number(e)) : [] };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.id?.length) {
-            obj.id = message.id.map((e) => Math.round(e));
-        }
-        return obj;
-    },
-};
-exports.ObjectIdentifierValuePair = {
-    fromJSON(object) {
-        return {
-            oid: isSet(object.oid) ? exports.ObjectIdentifier.fromJSON(object.oid) : undefined,
-            value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.oid !== undefined) {
-            obj.oid = exports.ObjectIdentifier.toJSON(message.oid);
-        }
-        if (message.value.length !== 0) {
-            obj.value = base64FromBytes(message.value);
-        }
-        return obj;
-    },
-};
-exports.DistinguishedName = {
-    fromJSON(object) {
-        return {
-            organization: isSet(object.organization) ? globalThis.String(object.organization) : "",
-            commonName: isSet(object.commonName) ? globalThis.String(object.commonName) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.organization !== "") {
-            obj.organization = message.organization;
-        }
-        if (message.commonName !== "") {
-            obj.commonName = message.commonName;
-        }
-        return obj;
-    },
-};
-exports.X509Certificate = {
-    fromJSON(object) {
-        return { rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : Buffer.alloc(0) };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.rawBytes.length !== 0) {
-            obj.rawBytes = base64FromBytes(message.rawBytes);
-        }
-        return obj;
-    },
-};
-exports.SubjectAlternativeName = {
-    fromJSON(object) {
-        return {
-            type: isSet(object.type) ? subjectAlternativeNameTypeFromJSON(object.type) : 0,
-            identity: isSet(object.regexp)
-                ? { $case: "regexp", regexp: globalThis.String(object.regexp) }
-                : isSet(object.value)
-                    ? { $case: "value", value: globalThis.String(object.value) }
-                    : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.type !== 0) {
-            obj.type = subjectAlternativeNameTypeToJSON(message.type);
-        }
-        if (message.identity?.$case === "regexp") {
-            obj.regexp = message.identity.regexp;
-        }
-        else if (message.identity?.$case === "value") {
-            obj.value = message.identity.value;
-        }
-        return obj;
-    },
-};
-exports.X509CertificateChain = {
-    fromJSON(object) {
-        return {
-            certificates: globalThis.Array.isArray(object?.certificates)
-                ? object.certificates.map((e) => exports.X509Certificate.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.certificates?.length) {
-            obj.certificates = message.certificates.map((e) => exports.X509Certificate.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.TimeRange = {
-    fromJSON(object) {
-        return {
-            start: isSet(object.start) ? fromJsonTimestamp(object.start) : undefined,
-            end: isSet(object.end) ? fromJsonTimestamp(object.end) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.start !== undefined) {
-            obj.start = message.start.toISOString();
-        }
-        if (message.end !== undefined) {
-            obj.end = message.end.toISOString();
-        }
-        return obj;
-    },
-};
-function bytesFromBase64(b64) {
-    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
-}
-function base64FromBytes(arr) {
-    return globalThis.Buffer.from(arr).toString("base64");
-}
-function fromTimestamp(t) {
-    let millis = (globalThis.Number(t.seconds) || 0) * 1_000;
-    millis += (t.nanos || 0) / 1_000_000;
-    return new globalThis.Date(millis);
-}
-function fromJsonTimestamp(o) {
-    if (o instanceof globalThis.Date) {
-        return o;
-    }
-    else if (typeof o === "string") {
-        return new globalThis.Date(o);
-    }
-    else {
-        return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
-    }
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
deleted file mode 100644
index fd8ea8384664d..0000000000000
--- a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
+++ /dev/null
@@ -1,137 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: sigstore_rekor.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TransparencyLogEntry = exports.InclusionPromise = exports.InclusionProof = exports.Checkpoint = exports.KindVersion = void 0;
-/* eslint-disable */
-const sigstore_common_1 = require("./sigstore_common");
-exports.KindVersion = {
-    fromJSON(object) {
-        return {
-            kind: isSet(object.kind) ? globalThis.String(object.kind) : "",
-            version: isSet(object.version) ? globalThis.String(object.version) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.kind !== "") {
-            obj.kind = message.kind;
-        }
-        if (message.version !== "") {
-            obj.version = message.version;
-        }
-        return obj;
-    },
-};
-exports.Checkpoint = {
-    fromJSON(object) {
-        return { envelope: isSet(object.envelope) ? globalThis.String(object.envelope) : "" };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.envelope !== "") {
-            obj.envelope = message.envelope;
-        }
-        return obj;
-    },
-};
-exports.InclusionProof = {
-    fromJSON(object) {
-        return {
-            logIndex: isSet(object.logIndex) ? globalThis.String(object.logIndex) : "0",
-            rootHash: isSet(object.rootHash) ? Buffer.from(bytesFromBase64(object.rootHash)) : Buffer.alloc(0),
-            treeSize: isSet(object.treeSize) ? globalThis.String(object.treeSize) : "0",
-            hashes: globalThis.Array.isArray(object?.hashes)
-                ? object.hashes.map((e) => Buffer.from(bytesFromBase64(e)))
-                : [],
-            checkpoint: isSet(object.checkpoint) ? exports.Checkpoint.fromJSON(object.checkpoint) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.logIndex !== "0") {
-            obj.logIndex = message.logIndex;
-        }
-        if (message.rootHash.length !== 0) {
-            obj.rootHash = base64FromBytes(message.rootHash);
-        }
-        if (message.treeSize !== "0") {
-            obj.treeSize = message.treeSize;
-        }
-        if (message.hashes?.length) {
-            obj.hashes = message.hashes.map((e) => base64FromBytes(e));
-        }
-        if (message.checkpoint !== undefined) {
-            obj.checkpoint = exports.Checkpoint.toJSON(message.checkpoint);
-        }
-        return obj;
-    },
-};
-exports.InclusionPromise = {
-    fromJSON(object) {
-        return {
-            signedEntryTimestamp: isSet(object.signedEntryTimestamp)
-                ? Buffer.from(bytesFromBase64(object.signedEntryTimestamp))
-                : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.signedEntryTimestamp.length !== 0) {
-            obj.signedEntryTimestamp = base64FromBytes(message.signedEntryTimestamp);
-        }
-        return obj;
-    },
-};
-exports.TransparencyLogEntry = {
-    fromJSON(object) {
-        return {
-            logIndex: isSet(object.logIndex) ? globalThis.String(object.logIndex) : "0",
-            logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
-            kindVersion: isSet(object.kindVersion) ? exports.KindVersion.fromJSON(object.kindVersion) : undefined,
-            integratedTime: isSet(object.integratedTime) ? globalThis.String(object.integratedTime) : "0",
-            inclusionPromise: isSet(object.inclusionPromise) ? exports.InclusionPromise.fromJSON(object.inclusionPromise) : undefined,
-            inclusionProof: isSet(object.inclusionProof) ? exports.InclusionProof.fromJSON(object.inclusionProof) : undefined,
-            canonicalizedBody: isSet(object.canonicalizedBody)
-                ? Buffer.from(bytesFromBase64(object.canonicalizedBody))
-                : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.logIndex !== "0") {
-            obj.logIndex = message.logIndex;
-        }
-        if (message.logId !== undefined) {
-            obj.logId = sigstore_common_1.LogId.toJSON(message.logId);
-        }
-        if (message.kindVersion !== undefined) {
-            obj.kindVersion = exports.KindVersion.toJSON(message.kindVersion);
-        }
-        if (message.integratedTime !== "0") {
-            obj.integratedTime = message.integratedTime;
-        }
-        if (message.inclusionPromise !== undefined) {
-            obj.inclusionPromise = exports.InclusionPromise.toJSON(message.inclusionPromise);
-        }
-        if (message.inclusionProof !== undefined) {
-            obj.inclusionProof = exports.InclusionProof.toJSON(message.inclusionProof);
-        }
-        if (message.canonicalizedBody.length !== 0) {
-            obj.canonicalizedBody = base64FromBytes(message.canonicalizedBody);
-        }
-        return obj;
-    },
-};
-function bytesFromBase64(b64) {
-    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
-}
-function base64FromBytes(arr) {
-    return globalThis.Buffer.from(arr).toString("base64");
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
deleted file mode 100644
index 1b5492fb1a77e..0000000000000
--- a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
+++ /dev/null
@@ -1,284 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: sigstore_trustroot.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.ClientTrustConfig = exports.ServiceConfiguration = exports.Service = exports.SigningConfig = exports.TrustedRoot = exports.CertificateAuthority = exports.TransparencyLogInstance = exports.ServiceSelector = void 0;
-exports.serviceSelectorFromJSON = serviceSelectorFromJSON;
-exports.serviceSelectorToJSON = serviceSelectorToJSON;
-/* eslint-disable */
-const sigstore_common_1 = require("./sigstore_common");
-/**
- * ServiceSelector specifies how a client SHOULD select a set of
- * Services to connect to. A client SHOULD throw an error if
- * the value is SERVICE_SELECTOR_UNDEFINED.
- */
-var ServiceSelector;
-(function (ServiceSelector) {
-    ServiceSelector[ServiceSelector["SERVICE_SELECTOR_UNDEFINED"] = 0] = "SERVICE_SELECTOR_UNDEFINED";
-    /**
-     * ALL - Clients SHOULD select all Services based on supported API version
-     * and validity window.
-     */
-    ServiceSelector[ServiceSelector["ALL"] = 1] = "ALL";
-    /**
-     * ANY - Clients SHOULD select one Service based on supported API version
-     * and validity window. It is up to the client implementation to
-     * decide how to select the Service, e.g. random or round-robin.
-     */
-    ServiceSelector[ServiceSelector["ANY"] = 2] = "ANY";
-    /**
-     * EXACT - Clients SHOULD select a specific number of Services based on
-     * supported API version and validity window, using the provided
-     * `count`. It is up to the client implementation to decide how to
-     * select the Service, e.g. random or round-robin.
-     */
-    ServiceSelector[ServiceSelector["EXACT"] = 3] = "EXACT";
-})(ServiceSelector || (exports.ServiceSelector = ServiceSelector = {}));
-function serviceSelectorFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "SERVICE_SELECTOR_UNDEFINED":
-            return ServiceSelector.SERVICE_SELECTOR_UNDEFINED;
-        case 1:
-        case "ALL":
-            return ServiceSelector.ALL;
-        case 2:
-        case "ANY":
-            return ServiceSelector.ANY;
-        case 3:
-        case "EXACT":
-            return ServiceSelector.EXACT;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum ServiceSelector");
-    }
-}
-function serviceSelectorToJSON(object) {
-    switch (object) {
-        case ServiceSelector.SERVICE_SELECTOR_UNDEFINED:
-            return "SERVICE_SELECTOR_UNDEFINED";
-        case ServiceSelector.ALL:
-            return "ALL";
-        case ServiceSelector.ANY:
-            return "ANY";
-        case ServiceSelector.EXACT:
-            return "EXACT";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum ServiceSelector");
-    }
-}
-exports.TransparencyLogInstance = {
-    fromJSON(object) {
-        return {
-            baseUrl: isSet(object.baseUrl) ? globalThis.String(object.baseUrl) : "",
-            hashAlgorithm: isSet(object.hashAlgorithm) ? (0, sigstore_common_1.hashAlgorithmFromJSON)(object.hashAlgorithm) : 0,
-            publicKey: isSet(object.publicKey) ? sigstore_common_1.PublicKey.fromJSON(object.publicKey) : undefined,
-            logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
-            checkpointKeyId: isSet(object.checkpointKeyId) ? sigstore_common_1.LogId.fromJSON(object.checkpointKeyId) : undefined,
-            operator: isSet(object.operator) ? globalThis.String(object.operator) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.baseUrl !== "") {
-            obj.baseUrl = message.baseUrl;
-        }
-        if (message.hashAlgorithm !== 0) {
-            obj.hashAlgorithm = (0, sigstore_common_1.hashAlgorithmToJSON)(message.hashAlgorithm);
-        }
-        if (message.publicKey !== undefined) {
-            obj.publicKey = sigstore_common_1.PublicKey.toJSON(message.publicKey);
-        }
-        if (message.logId !== undefined) {
-            obj.logId = sigstore_common_1.LogId.toJSON(message.logId);
-        }
-        if (message.checkpointKeyId !== undefined) {
-            obj.checkpointKeyId = sigstore_common_1.LogId.toJSON(message.checkpointKeyId);
-        }
-        if (message.operator !== "") {
-            obj.operator = message.operator;
-        }
-        return obj;
-    },
-};
-exports.CertificateAuthority = {
-    fromJSON(object) {
-        return {
-            subject: isSet(object.subject) ? sigstore_common_1.DistinguishedName.fromJSON(object.subject) : undefined,
-            uri: isSet(object.uri) ? globalThis.String(object.uri) : "",
-            certChain: isSet(object.certChain) ? sigstore_common_1.X509CertificateChain.fromJSON(object.certChain) : undefined,
-            validFor: isSet(object.validFor) ? sigstore_common_1.TimeRange.fromJSON(object.validFor) : undefined,
-            operator: isSet(object.operator) ? globalThis.String(object.operator) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.subject !== undefined) {
-            obj.subject = sigstore_common_1.DistinguishedName.toJSON(message.subject);
-        }
-        if (message.uri !== "") {
-            obj.uri = message.uri;
-        }
-        if (message.certChain !== undefined) {
-            obj.certChain = sigstore_common_1.X509CertificateChain.toJSON(message.certChain);
-        }
-        if (message.validFor !== undefined) {
-            obj.validFor = sigstore_common_1.TimeRange.toJSON(message.validFor);
-        }
-        if (message.operator !== "") {
-            obj.operator = message.operator;
-        }
-        return obj;
-    },
-};
-exports.TrustedRoot = {
-    fromJSON(object) {
-        return {
-            mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "",
-            tlogs: globalThis.Array.isArray(object?.tlogs)
-                ? object.tlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e))
-                : [],
-            certificateAuthorities: globalThis.Array.isArray(object?.certificateAuthorities)
-                ? object.certificateAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
-                : [],
-            ctlogs: globalThis.Array.isArray(object?.ctlogs)
-                ? object.ctlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e))
-                : [],
-            timestampAuthorities: globalThis.Array.isArray(object?.timestampAuthorities)
-                ? object.timestampAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.mediaType !== "") {
-            obj.mediaType = message.mediaType;
-        }
-        if (message.tlogs?.length) {
-            obj.tlogs = message.tlogs.map((e) => exports.TransparencyLogInstance.toJSON(e));
-        }
-        if (message.certificateAuthorities?.length) {
-            obj.certificateAuthorities = message.certificateAuthorities.map((e) => exports.CertificateAuthority.toJSON(e));
-        }
-        if (message.ctlogs?.length) {
-            obj.ctlogs = message.ctlogs.map((e) => exports.TransparencyLogInstance.toJSON(e));
-        }
-        if (message.timestampAuthorities?.length) {
-            obj.timestampAuthorities = message.timestampAuthorities.map((e) => exports.CertificateAuthority.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.SigningConfig = {
-    fromJSON(object) {
-        return {
-            mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "",
-            caUrls: globalThis.Array.isArray(object?.caUrls) ? object.caUrls.map((e) => exports.Service.fromJSON(e)) : [],
-            oidcUrls: globalThis.Array.isArray(object?.oidcUrls) ? object.oidcUrls.map((e) => exports.Service.fromJSON(e)) : [],
-            rekorTlogUrls: globalThis.Array.isArray(object?.rekorTlogUrls)
-                ? object.rekorTlogUrls.map((e) => exports.Service.fromJSON(e))
-                : [],
-            rekorTlogConfig: isSet(object.rekorTlogConfig)
-                ? exports.ServiceConfiguration.fromJSON(object.rekorTlogConfig)
-                : undefined,
-            tsaUrls: globalThis.Array.isArray(object?.tsaUrls) ? object.tsaUrls.map((e) => exports.Service.fromJSON(e)) : [],
-            tsaConfig: isSet(object.tsaConfig) ? exports.ServiceConfiguration.fromJSON(object.tsaConfig) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.mediaType !== "") {
-            obj.mediaType = message.mediaType;
-        }
-        if (message.caUrls?.length) {
-            obj.caUrls = message.caUrls.map((e) => exports.Service.toJSON(e));
-        }
-        if (message.oidcUrls?.length) {
-            obj.oidcUrls = message.oidcUrls.map((e) => exports.Service.toJSON(e));
-        }
-        if (message.rekorTlogUrls?.length) {
-            obj.rekorTlogUrls = message.rekorTlogUrls.map((e) => exports.Service.toJSON(e));
-        }
-        if (message.rekorTlogConfig !== undefined) {
-            obj.rekorTlogConfig = exports.ServiceConfiguration.toJSON(message.rekorTlogConfig);
-        }
-        if (message.tsaUrls?.length) {
-            obj.tsaUrls = message.tsaUrls.map((e) => exports.Service.toJSON(e));
-        }
-        if (message.tsaConfig !== undefined) {
-            obj.tsaConfig = exports.ServiceConfiguration.toJSON(message.tsaConfig);
-        }
-        return obj;
-    },
-};
-exports.Service = {
-    fromJSON(object) {
-        return {
-            url: isSet(object.url) ? globalThis.String(object.url) : "",
-            majorApiVersion: isSet(object.majorApiVersion) ? globalThis.Number(object.majorApiVersion) : 0,
-            validFor: isSet(object.validFor) ? sigstore_common_1.TimeRange.fromJSON(object.validFor) : undefined,
-            operator: isSet(object.operator) ? globalThis.String(object.operator) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.url !== "") {
-            obj.url = message.url;
-        }
-        if (message.majorApiVersion !== 0) {
-            obj.majorApiVersion = Math.round(message.majorApiVersion);
-        }
-        if (message.validFor !== undefined) {
-            obj.validFor = sigstore_common_1.TimeRange.toJSON(message.validFor);
-        }
-        if (message.operator !== "") {
-            obj.operator = message.operator;
-        }
-        return obj;
-    },
-};
-exports.ServiceConfiguration = {
-    fromJSON(object) {
-        return {
-            selector: isSet(object.selector) ? serviceSelectorFromJSON(object.selector) : 0,
-            count: isSet(object.count) ? globalThis.Number(object.count) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.selector !== 0) {
-            obj.selector = serviceSelectorToJSON(message.selector);
-        }
-        if (message.count !== 0) {
-            obj.count = Math.round(message.count);
-        }
-        return obj;
-    },
-};
-exports.ClientTrustConfig = {
-    fromJSON(object) {
-        return {
-            mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "",
-            trustedRoot: isSet(object.trustedRoot) ? exports.TrustedRoot.fromJSON(object.trustedRoot) : undefined,
-            signingConfig: isSet(object.signingConfig) ? exports.SigningConfig.fromJSON(object.signingConfig) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.mediaType !== "") {
-            obj.mediaType = message.mediaType;
-        }
-        if (message.trustedRoot !== undefined) {
-            obj.trustedRoot = exports.TrustedRoot.toJSON(message.trustedRoot);
-        }
-        if (message.signingConfig !== undefined) {
-            obj.signingConfig = exports.SigningConfig.toJSON(message.signingConfig);
-        }
-        return obj;
-    },
-};
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
deleted file mode 100644
index 876fe9cc1db1d..0000000000000
--- a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
+++ /dev/null
@@ -1,281 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: sigstore_verification.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Input = exports.Artifact = exports.ArtifactVerificationOptions_ObserverTimestampOptions = exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions = exports.ArtifactVerificationOptions_TimestampAuthorityOptions = exports.ArtifactVerificationOptions_CtlogOptions = exports.ArtifactVerificationOptions_TlogOptions = exports.ArtifactVerificationOptions = exports.PublicKeyIdentities = exports.CertificateIdentities = exports.CertificateIdentity = void 0;
-/* eslint-disable */
-const sigstore_bundle_1 = require("./sigstore_bundle");
-const sigstore_common_1 = require("./sigstore_common");
-const sigstore_trustroot_1 = require("./sigstore_trustroot");
-exports.CertificateIdentity = {
-    fromJSON(object) {
-        return {
-            issuer: isSet(object.issuer) ? globalThis.String(object.issuer) : "",
-            san: isSet(object.san) ? sigstore_common_1.SubjectAlternativeName.fromJSON(object.san) : undefined,
-            oids: globalThis.Array.isArray(object?.oids)
-                ? object.oids.map((e) => sigstore_common_1.ObjectIdentifierValuePair.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.issuer !== "") {
-            obj.issuer = message.issuer;
-        }
-        if (message.san !== undefined) {
-            obj.san = sigstore_common_1.SubjectAlternativeName.toJSON(message.san);
-        }
-        if (message.oids?.length) {
-            obj.oids = message.oids.map((e) => sigstore_common_1.ObjectIdentifierValuePair.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.CertificateIdentities = {
-    fromJSON(object) {
-        return {
-            identities: globalThis.Array.isArray(object?.identities)
-                ? object.identities.map((e) => exports.CertificateIdentity.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.identities?.length) {
-            obj.identities = message.identities.map((e) => exports.CertificateIdentity.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.PublicKeyIdentities = {
-    fromJSON(object) {
-        return {
-            publicKeys: globalThis.Array.isArray(object?.publicKeys)
-                ? object.publicKeys.map((e) => sigstore_common_1.PublicKey.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.publicKeys?.length) {
-            obj.publicKeys = message.publicKeys.map((e) => sigstore_common_1.PublicKey.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.ArtifactVerificationOptions = {
-    fromJSON(object) {
-        return {
-            signers: isSet(object.certificateIdentities)
-                ? {
-                    $case: "certificateIdentities",
-                    certificateIdentities: exports.CertificateIdentities.fromJSON(object.certificateIdentities),
-                }
-                : isSet(object.publicKeys)
-                    ? { $case: "publicKeys", publicKeys: exports.PublicKeyIdentities.fromJSON(object.publicKeys) }
-                    : undefined,
-            tlogOptions: isSet(object.tlogOptions)
-                ? exports.ArtifactVerificationOptions_TlogOptions.fromJSON(object.tlogOptions)
-                : undefined,
-            ctlogOptions: isSet(object.ctlogOptions)
-                ? exports.ArtifactVerificationOptions_CtlogOptions.fromJSON(object.ctlogOptions)
-                : undefined,
-            tsaOptions: isSet(object.tsaOptions)
-                ? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.fromJSON(object.tsaOptions)
-                : undefined,
-            integratedTsOptions: isSet(object.integratedTsOptions)
-                ? exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions.fromJSON(object.integratedTsOptions)
-                : undefined,
-            observerOptions: isSet(object.observerOptions)
-                ? exports.ArtifactVerificationOptions_ObserverTimestampOptions.fromJSON(object.observerOptions)
-                : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.signers?.$case === "certificateIdentities") {
-            obj.certificateIdentities = exports.CertificateIdentities.toJSON(message.signers.certificateIdentities);
-        }
-        else if (message.signers?.$case === "publicKeys") {
-            obj.publicKeys = exports.PublicKeyIdentities.toJSON(message.signers.publicKeys);
-        }
-        if (message.tlogOptions !== undefined) {
-            obj.tlogOptions = exports.ArtifactVerificationOptions_TlogOptions.toJSON(message.tlogOptions);
-        }
-        if (message.ctlogOptions !== undefined) {
-            obj.ctlogOptions = exports.ArtifactVerificationOptions_CtlogOptions.toJSON(message.ctlogOptions);
-        }
-        if (message.tsaOptions !== undefined) {
-            obj.tsaOptions = exports.ArtifactVerificationOptions_TimestampAuthorityOptions.toJSON(message.tsaOptions);
-        }
-        if (message.integratedTsOptions !== undefined) {
-            obj.integratedTsOptions = exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions.toJSON(message.integratedTsOptions);
-        }
-        if (message.observerOptions !== undefined) {
-            obj.observerOptions = exports.ArtifactVerificationOptions_ObserverTimestampOptions.toJSON(message.observerOptions);
-        }
-        return obj;
-    },
-};
-exports.ArtifactVerificationOptions_TlogOptions = {
-    fromJSON(object) {
-        return {
-            threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
-            performOnlineVerification: isSet(object.performOnlineVerification)
-                ? globalThis.Boolean(object.performOnlineVerification)
-                : false,
-            disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.threshold !== 0) {
-            obj.threshold = Math.round(message.threshold);
-        }
-        if (message.performOnlineVerification !== false) {
-            obj.performOnlineVerification = message.performOnlineVerification;
-        }
-        if (message.disable !== false) {
-            obj.disable = message.disable;
-        }
-        return obj;
-    },
-};
-exports.ArtifactVerificationOptions_CtlogOptions = {
-    fromJSON(object) {
-        return {
-            threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
-            disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.threshold !== 0) {
-            obj.threshold = Math.round(message.threshold);
-        }
-        if (message.disable !== false) {
-            obj.disable = message.disable;
-        }
-        return obj;
-    },
-};
-exports.ArtifactVerificationOptions_TimestampAuthorityOptions = {
-    fromJSON(object) {
-        return {
-            threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
-            disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.threshold !== 0) {
-            obj.threshold = Math.round(message.threshold);
-        }
-        if (message.disable !== false) {
-            obj.disable = message.disable;
-        }
-        return obj;
-    },
-};
-exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions = {
-    fromJSON(object) {
-        return {
-            threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
-            disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.threshold !== 0) {
-            obj.threshold = Math.round(message.threshold);
-        }
-        if (message.disable !== false) {
-            obj.disable = message.disable;
-        }
-        return obj;
-    },
-};
-exports.ArtifactVerificationOptions_ObserverTimestampOptions = {
-    fromJSON(object) {
-        return {
-            threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
-            disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.threshold !== 0) {
-            obj.threshold = Math.round(message.threshold);
-        }
-        if (message.disable !== false) {
-            obj.disable = message.disable;
-        }
-        return obj;
-    },
-};
-exports.Artifact = {
-    fromJSON(object) {
-        return {
-            data: isSet(object.artifactUri)
-                ? { $case: "artifactUri", artifactUri: globalThis.String(object.artifactUri) }
-                : isSet(object.artifact)
-                    ? { $case: "artifact", artifact: Buffer.from(bytesFromBase64(object.artifact)) }
-                    : isSet(object.artifactDigest)
-                        ? { $case: "artifactDigest", artifactDigest: sigstore_common_1.HashOutput.fromJSON(object.artifactDigest) }
-                        : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.data?.$case === "artifactUri") {
-            obj.artifactUri = message.data.artifactUri;
-        }
-        else if (message.data?.$case === "artifact") {
-            obj.artifact = base64FromBytes(message.data.artifact);
-        }
-        else if (message.data?.$case === "artifactDigest") {
-            obj.artifactDigest = sigstore_common_1.HashOutput.toJSON(message.data.artifactDigest);
-        }
-        return obj;
-    },
-};
-exports.Input = {
-    fromJSON(object) {
-        return {
-            artifactTrustRoot: isSet(object.artifactTrustRoot) ? sigstore_trustroot_1.TrustedRoot.fromJSON(object.artifactTrustRoot) : undefined,
-            artifactVerificationOptions: isSet(object.artifactVerificationOptions)
-                ? exports.ArtifactVerificationOptions.fromJSON(object.artifactVerificationOptions)
-                : undefined,
-            bundle: isSet(object.bundle) ? sigstore_bundle_1.Bundle.fromJSON(object.bundle) : undefined,
-            artifact: isSet(object.artifact) ? exports.Artifact.fromJSON(object.artifact) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.artifactTrustRoot !== undefined) {
-            obj.artifactTrustRoot = sigstore_trustroot_1.TrustedRoot.toJSON(message.artifactTrustRoot);
-        }
-        if (message.artifactVerificationOptions !== undefined) {
-            obj.artifactVerificationOptions = exports.ArtifactVerificationOptions.toJSON(message.artifactVerificationOptions);
-        }
-        if (message.bundle !== undefined) {
-            obj.bundle = sigstore_bundle_1.Bundle.toJSON(message.bundle);
-        }
-        if (message.artifact !== undefined) {
-            obj.artifact = exports.Artifact.toJSON(message.artifact);
-        }
-        return obj;
-    },
-};
-function bytesFromBase64(b64) {
-    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
-}
-function base64FromBytes(arr) {
-    return globalThis.Buffer.from(arr).toString("base64");
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/index.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/index.js
deleted file mode 100644
index eafb768c48fca..0000000000000
--- a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/index.js
+++ /dev/null
@@ -1,37 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __exportStar = (this && this.__exportStar) || function(m, exports) {
-    for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-__exportStar(require("./__generated__/envelope"), exports);
-__exportStar(require("./__generated__/sigstore_bundle"), exports);
-__exportStar(require("./__generated__/sigstore_common"), exports);
-__exportStar(require("./__generated__/sigstore_rekor"), exports);
-__exportStar(require("./__generated__/sigstore_trustroot"), exports);
-__exportStar(require("./__generated__/sigstore_verification"), exports);
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/rekor/v2/index.js b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/rekor/v2/index.js
deleted file mode 100644
index 10745efc39a1f..0000000000000
--- a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/dist/rekor/v2/index.js
+++ /dev/null
@@ -1,35 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __exportStar = (this && this.__exportStar) || function(m, exports) {
-    for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-/*
-Copyright 2025 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-__exportStar(require("../../__generated__/rekor/v2/dsse"), exports);
-__exportStar(require("../../__generated__/rekor/v2/entry"), exports);
-__exportStar(require("../../__generated__/rekor/v2/hashedrekord"), exports);
-__exportStar(require("../../__generated__/rekor/v2/verifier"), exports);
diff --git a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/package.json b/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/package.json
deleted file mode 100644
index f87b2540fbf98..0000000000000
--- a/node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs/package.json
+++ /dev/null
@@ -1,35 +0,0 @@
-{
-  "name": "@sigstore/protobuf-specs",
-  "version": "0.5.0",
-  "description": "code-signing for npm packages",
-  "main": "dist/index.js",
-  "types": "dist/index.d.ts",
-  "exports": {
-    ".": "./dist/index.js",
-    "./rekor/v2": "./dist/rekor/v2/index.js"
-  },
-  "scripts": {
-    "build": "tsc"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/sigstore/protobuf-specs.git"
-  },
-  "files": [
-    "dist"
-  ],
-  "author": "bdehamer@github.com",
-  "license": "Apache-2.0",
-  "bugs": {
-    "url": "https://github.com/sigstore/protobuf-specs/issues"
-  },
-  "homepage": "https://github.com/sigstore/protobuf-specs#readme",
-  "devDependencies": {
-    "@tsconfig/node18": "^18.2.4",
-    "@types/node": "^18.14.0",
-    "typescript": "^5.7.2"
-  },
-  "engines": {
-    "node": "^18.17.0 || >=20.5.0"
-  }
-}
diff --git a/node_modules/@sigstore/tuf/dist/client.js b/node_modules/@sigstore/tuf/dist/client.js
index 328f49e40dbbd..2931a0a6b3ab5 100644
--- a/node_modules/@sigstore/tuf/dist/client.js
+++ b/node_modules/@sigstore/tuf/dist/client.js
@@ -63,6 +63,7 @@ function initTufCache(cachePath) {
     if (!fs_1.default.existsSync(cachePath)) {
         fs_1.default.mkdirSync(cachePath, { recursive: true });
     }
+    /* istanbul ignore else */
     if (!fs_1.default.existsSync(targetsPath)) {
         fs_1.default.mkdirSync(targetsPath);
     }
@@ -74,6 +75,7 @@ function seedCache({ cachePath, mirrorURL, tufRootPath, forceInit, }) {
     const cachedRootPath = path_1.default.join(cachePath, 'root.json');
     // If the root.json file does not exist (or we're forcing re-initialization),
     // populate it either from the supplied rootPath or from one of the repo seeds.
+    /* istanbul ignore else */
     if (!fs_1.default.existsSync(cachedRootPath) || forceInit) {
         if (tufRootPath) {
             fs_1.default.copyFileSync(tufRootPath, cachedRootPath);
diff --git a/node_modules/@sigstore/tuf/package.json b/node_modules/@sigstore/tuf/package.json
index 4eb105f1acf4e..42dad938c2808 100644
--- a/node_modules/@sigstore/tuf/package.json
+++ b/node_modules/@sigstore/tuf/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@sigstore/tuf",
-  "version": "3.1.1",
+  "version": "4.0.0",
   "description": "Client for the Sigstore TUF repository",
   "main": "dist/index.js",
   "types": "dist/index.d.ts",
@@ -32,10 +32,10 @@
     "@types/make-fetch-happen": "^10.0.4"
   },
   "dependencies": {
-    "@sigstore/protobuf-specs": "^0.4.1",
-    "tuf-js": "^3.0.1"
+    "@sigstore/protobuf-specs": "^0.5.0",
+    "tuf-js": "^4.0.0"
   },
   "engines": {
-    "node": "^18.17.0 || >=20.5.0"
+    "node": "^20.17.0 || >=22.9.0"
   }
 }
diff --git a/node_modules/@sigstore/tuf/seeds.json b/node_modules/@sigstore/tuf/seeds.json
index 04fe4e6ebfcdb..6d48f33afe700 100644
--- a/node_modules/@sigstore/tuf/seeds.json
+++ b/node_modules/@sigstore/tuf/seeds.json
@@ -1 +1 @@
-{"https://tuf-repo-cdn.sigstore.dev":{"root.json":"ewogInNpZ25hdHVyZXMiOiBbCiAgewogICAia2V5aWQiOiAiNmYyNjAwODlkNTkyM2RhZjIwMTY2Y2E2NTdjNTQzYWY2MTgzNDZhYjk3MTg4NGE5OTk2MmIwMTk4OGJiZTBjMyIsCiAgICJzaWciOiAiIgogIH0sCiAgewogICAia2V5aWQiOiAiZTcxYTU0ZDU0MzgzNWJhODZhZGFkOTQ2MDM3OWM3NjQxZmI4NzI2ZDE2NGVhNzY2ODAxYTFjNTIyYWJhN2VhMiIsCiAgICJzaWciOiAiMzA0NTAyMjEwMGIwYmNmMTg5Y2UxYjkzZTdkYjk2NDlkNWJlNTEyYTE4ODBjMGUzNTg4NzBlMzkzM2U0MjZjNWFmYjhhNDA2MTAwMjIwNmQyMTRiZDc5YjA5ZjQ1OGNjYzUyMWEyOTBhYTk2MGM0MTcwMTRmYzE2ZTYwNmY4MjA5MWI1ZTMxODE0ODg2YSIKICB9LAogIHsKICAgImtleWlkIjogIjIyZjRjYWVjNmQ4ZTZmOTU1NWFmNjZiM2Q0YzNjYjA2YTNiYjIzZmRjN2UzOWM5MTZjNjFmNDYyZTZmNTJiMDYiLAogICAic2lnIjogIiIKICB9LAogIHsKICAgImtleWlkIjogIjYxNjQzODM4MTI1YjQ0MGI0MGRiNjk0MmY1Y2I1YTMxYzBkYzA0MzY4MzE2ZWIyYWFhNThiOTU5MDRhNTgyMjIiLAogICAic2lnIjogIjMwNDUwMjIxMDBhOWI5ZTI5NGVjMjFiNjJkZmNhNmExNmExOWQwODQxODJjMTI1NzJlMzNkOWM0ZGNhYjUzMTdmYTFlOGE0NTlkMDIyMDY5ZjY4ZTU1ZWExZjk1YzVhMzY3YWFjN2E2MWE2NTc1N2Y5M2RhNWEwMDZhNWY0ZDFjZjk5NWJlODEyZDc2MDIiCiAgfSwKICB7CiAgICJrZXlpZCI6ICJhNjg3ZTViZjRmYWI4MmIwZWU1OGQ0NmUwNWM5NTM1MTQ1YTJjOWFmYjQ1OGY0M2Q0MmI0NWNhMGZkY2UyYTcwIiwKICAgInNpZyI6ICIzMDQ0MDIyMDc4MTE3OGVjMzkxNWNiMTZhY2E3NTdkNDBlMjg0MzVhYzUzNzhkNmI0ODdhY2IxMTFkMWVlYjMzOTM5N2Y3OWEwMjIwNzgxY2NlNDhhZTQ2ZjllNDdiOTdhODQxNGZjZjQ2NmE5ODY3MjZhNTg5NmM3MmEwZTRhYmEzMTYyY2I4MjZkZCIKICB9CiBdLAogInNpZ25lZCI6IHsKICAiX3R5cGUiOiAicm9vdCIsCiAgImNvbnNpc3RlbnRfc25hcHNob3QiOiB0cnVlLAogICJleHBpcmVzIjogIjIwMjUtMDgtMTlUMTQ6MzM6MDlaIiwKICAia2V5cyI6IHsKICAgIjBjODc0MzJjM2JmMDlmZDk5MTg5ZmRjMzJmYTVlYWVkZjRlNGE1ZmFjN2JhYjczZmEwNGEyZTBmYzY0YWY2ZjUiOiB7CiAgICAia2V5aWRfaGFzaF9hbGdvcml0aG1zIjogWwogICAgICJzaGEyNTYiLAogICAgICJzaGE1MTIiCiAgICBdLAogICAgImtleXR5cGUiOiAiZWNkc2EiLAogICAgImtleXZhbCI6IHsKICAgICAicHVibGljIjogIi0tLS0tQkVHSU4gUFVCTElDIEtFWS0tLS0tXG5NRmt3RXdZSEtvWkl6ajBDQVFZSUtvWkl6ajBEQVFjRFFnQUVXUmlHcjUraiszSjVTc0grWnRyNW5FMkgyd083XG5CVituTzNzOTNnTGNhMThxVE96SFkxb1d5QUdEeWtNU3NHVFVCU3Q5RCtBbjBLZktzRDJtZlNNNDJRPT1cbi0tLS0tRU5EIFBVQkxJQyBLRVktLS0tLVxuIgogICAgfSwKICAgICJzY2hlbWUiOiAiZWNkc2Etc2hhMi1uaXN0cDI1NiIsCiAgICAieC10dWYtb24tY2ktb25saW5lLXVyaSI6ICJnY3BrbXM6cHJvamVjdHMvc2lnc3RvcmUtcm9vdC1zaWduaW5nL2xvY2F0aW9ucy9nbG9iYWwva2V5UmluZ3Mvcm9vdC9jcnlwdG9LZXlzL3RpbWVzdGFtcC9jcnlwdG9LZXlWZXJzaW9ucy8xIgogICB9LAogICAiMjJmNGNhZWM2ZDhlNmY5NTU1YWY2NmIzZDRjM2NiMDZhM2JiMjNmZGM3ZTM5YzkxNmM2MWY0NjJlNmY1MmIwNiI6IHsKICAgICJrZXlpZF9oYXNoX2FsZ29yaXRobXMiOiBbCiAgICAgInNoYTI1NiIsCiAgICAgInNoYTUxMiIKICAgIF0sCiAgICAia2V5dHlwZSI6ICJlY2RzYSIsCiAgICAia2V5dmFsIjogewogICAgICJwdWJsaWMiOiAiLS0tLS1CRUdJTiBQVUJMSUMgS0VZLS0tLS1cbk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRXpCelZPbUhDUG9qTVZMU0kzNjRXaWlWOE5QckRcbjZJZ1J4Vmxpc2t6L3YreTNKRVI1bWNWR2NPTmxpRGNXTUM1SjJsZkhtalBOUGhiNEg3eG04THpmU0E9PVxuLS0tLS1FTkQgUFVCTElDIEtFWS0tLS0tXG4iCiAgICB9LAogICAgInNjaGVtZSI6ICJlY2RzYS1zaGEyLW5pc3RwMjU2IiwKICAgICJ4LXR1Zi1vbi1jaS1rZXlvd25lciI6ICJAc2FudGlhZ290b3JyZXMiCiAgIH0sCiAgICI2MTY0MzgzODEyNWI0NDBiNDBkYjY5NDJmNWNiNWEzMWMwZGMwNDM2ODMxNmViMmFhYTU4Yjk1OTA0YTU4MjIyIjogewogICAgImtleWlkX2hhc2hfYWxnb3JpdGhtcyI6IFsKICAgICAic2hhMjU2IiwKICAgICAic2hhNTEyIgogICAgXSwKICAgICJrZXl0eXBlIjogImVjZHNhIiwKICAgICJrZXl2YWwiOiB7CiAgICAgInB1YmxpYyI6ICItLS0tLUJFR0lOIFBVQkxJQyBLRVktLS0tLVxuTUZrd0V3WUhLb1pJemowQ0FRWUlLb1pJemowREFRY0RRZ0FFaW5pa1NzQVFtWWtOZUg1ZVlxL0NuSXpMYWFjT1xueGxTYWF3UURPd3FLeS90Q3F4cTV4eFBTSmMyMUs0V0loczlHeU9rS2Z6dWVZM0dJTHpjTUpaNGNXdz09XG4tLS0tLUVORCBQVUJMSUMgS0VZLS0tLS1cbiIKICAgIH0sCiAgICAic2NoZW1lIjogImVjZHNhLXNoYTItbmlzdHAyNTYiLAogICAgIngtdHVmLW9uLWNpLWtleW93bmVyIjogIkBib2JjYWxsYXdheSIKICAgfSwKICAgIjZmMjYwMDg5ZDU5MjNkYWYyMDE2NmNhNjU3YzU0M2FmNjE4MzQ2YWI5NzE4ODRhOTk5NjJiMDE5ODhiYmUwYzMiOiB7CiAgICAia2V5aWRfaGFzaF9hbGdvcml0aG1zIjogWwogICAgICJzaGEyNTYiLAogICAgICJzaGE1MTIiCiAgICBdLAogICAgImtleXR5cGUiOiAiZWNkc2EiLAogICAgImtleXZhbCI6IHsKICAgICAicHVibGljIjogIi0tLS0tQkVHSU4gUFVCTElDIEtFWS0tLS0tXG5NRmt3RXdZSEtvWkl6ajBDQVFZSUtvWkl6ajBEQVFjRFFnQUV5OFhLc21oQllESThKYzBHd3pCeGVLYXgwY201XG5TVEtFVTY1SFBGdW5VbjQxc1Q4cGkwRmpNNElrSHovWVVtd21MVU8wV3Q3bHhoajZCa0xJSzRxWUF3PT1cbi0tLS0tRU5EIFBVQkxJQyBLRVktLS0tLVxuIgogICAgfSwKICAgICJzY2hlbWUiOiAiZWNkc2Etc2hhMi1uaXN0cDI1NiIsCiAgICAieC10dWYtb24tY2kta2V5b3duZXIiOiAiQGRsb3JlbmMiCiAgIH0sCiAgICJhNjg3ZTViZjRmYWI4MmIwZWU1OGQ0NmUwNWM5NTM1MTQ1YTJjOWFmYjQ1OGY0M2Q0MmI0NWNhMGZkY2UyYTcwIjogewogICAgImtleWlkX2hhc2hfYWxnb3JpdGhtcyI6IFsKICAgICAic2hhMjU2IiwKICAgICAic2hhNTEyIgogICAgXSwKICAgICJrZXl0eXBlIjogImVjZHNhIiwKICAgICJrZXl2YWwiOiB7CiAgICAgInB1YmxpYyI6ICItLS0tLUJFR0lOIFBVQkxJQyBLRVktLS0tLVxuTUZrd0V3WUhLb1pJemowQ0FRWUlLb1pJemowREFRY0RRZ0FFMGdocmg5Mkx3MVlyM2lkR1Y1V3FDdE1EQjhDeFxuK0Q4aGRDNHcyWkxOSXBsVlJvVkdMc2tZYTNnaGVNeU9qaUo4a1BpMTVhUTIvLzdQK29qN1V2SlBHdz09XG4tLS0tLUVORCBQVUJMSUMgS0VZLS0tLS1cbiIKICAgIH0sCiAgICAic2NoZW1lIjogImVjZHNhLXNoYTItbmlzdHAyNTYiLAogICAgIngtdHVmLW9uLWNpLWtleW93bmVyIjogIkBqb3NodWFnbCIKICAgfSwKICAgImU3MWE1NGQ1NDM4MzViYTg2YWRhZDk0NjAzNzljNzY0MWZiODcyNmQxNjRlYTc2NjgwMWExYzUyMmFiYTdlYTIiOiB7CiAgICAia2V5aWRfaGFzaF9hbGdvcml0aG1zIjogWwogICAgICJzaGEyNTYiLAogICAgICJzaGE1MTIiCiAgICBdLAogICAgImtleXR5cGUiOiAiZWNkc2EiLAogICAgImtleXZhbCI6IHsKICAgICAicHVibGljIjogIi0tLS0tQkVHSU4gUFVCTElDIEtFWS0tLS0tXG5NRmt3RXdZSEtvWkl6ajBDQVFZSUtvWkl6ajBEQVFjRFFnQUVFWHN6M1NaWEZiOGpNVjQyajZwSmx5amJqUjhLXG5OM0J3b2NleHE2TE1JYjVxc1dLT1F2TE4xNk5VZWZMYzRIc3dPb3VtUnNWVmFhalNwUVM2Zm9ia1J3PT1cbi0tLS0tRU5EIFBVQkxJQyBLRVktLS0tLVxuIgogICAgfSwKICAgICJzY2hlbWUiOiAiZWNkc2Etc2hhMi1uaXN0cDI1NiIsCiAgICAieC10dWYtb24tY2kta2V5b3duZXIiOiAiQG1ubTY3OCIKICAgfQogIH0sCiAgInJvbGVzIjogewogICAicm9vdCI6IHsKICAgICJrZXlpZHMiOiBbCiAgICAgIjZmMjYwMDg5ZDU5MjNkYWYyMDE2NmNhNjU3YzU0M2FmNjE4MzQ2YWI5NzE4ODRhOTk5NjJiMDE5ODhiYmUwYzMiLAogICAgICJlNzFhNTRkNTQzODM1YmE4NmFkYWQ5NDYwMzc5Yzc2NDFmYjg3MjZkMTY0ZWE3NjY4MDFhMWM1MjJhYmE3ZWEyIiwKICAgICAiMjJmNGNhZWM2ZDhlNmY5NTU1YWY2NmIzZDRjM2NiMDZhM2JiMjNmZGM3ZTM5YzkxNmM2MWY0NjJlNmY1MmIwNiIsCiAgICAgIjYxNjQzODM4MTI1YjQ0MGI0MGRiNjk0MmY1Y2I1YTMxYzBkYzA0MzY4MzE2ZWIyYWFhNThiOTU5MDRhNTgyMjIiLAogICAgICJhNjg3ZTViZjRmYWI4MmIwZWU1OGQ0NmUwNWM5NTM1MTQ1YTJjOWFmYjQ1OGY0M2Q0MmI0NWNhMGZkY2UyYTcwIgogICAgXSwKICAgICJ0aHJlc2hvbGQiOiAzCiAgIH0sCiAgICJzbmFwc2hvdCI6IHsKICAgICJrZXlpZHMiOiBbCiAgICAgIjBjODc0MzJjM2JmMDlmZDk5MTg5ZmRjMzJmYTVlYWVkZjRlNGE1ZmFjN2JhYjczZmEwNGEyZTBmYzY0YWY2ZjUiCiAgICBdLAogICAgInRocmVzaG9sZCI6IDEsCiAgICAieC10dWYtb24tY2ktZXhwaXJ5LXBlcmlvZCI6IDM2NTAsCiAgICAieC10dWYtb24tY2ktc2lnbmluZy1wZXJpb2QiOiAzNjUKICAgfSwKICAgInRhcmdldHMiOiB7CiAgICAia2V5aWRzIjogWwogICAgICI2ZjI2MDA4OWQ1OTIzZGFmMjAxNjZjYTY1N2M1NDNhZjYxODM0NmFiOTcxODg0YTk5OTYyYjAxOTg4YmJlMGMzIiwKICAgICAiZTcxYTU0ZDU0MzgzNWJhODZhZGFkOTQ2MDM3OWM3NjQxZmI4NzI2ZDE2NGVhNzY2ODAxYTFjNTIyYWJhN2VhMiIsCiAgICAgIjIyZjRjYWVjNmQ4ZTZmOTU1NWFmNjZiM2Q0YzNjYjA2YTNiYjIzZmRjN2UzOWM5MTZjNjFmNDYyZTZmNTJiMDYiLAogICAgICI2MTY0MzgzODEyNWI0NDBiNDBkYjY5NDJmNWNiNWEzMWMwZGMwNDM2ODMxNmViMmFhYTU4Yjk1OTA0YTU4MjIyIiwKICAgICAiYTY4N2U1YmY0ZmFiODJiMGVlNThkNDZlMDVjOTUzNTE0NWEyYzlhZmI0NThmNDNkNDJiNDVjYTBmZGNlMmE3MCIKICAgIF0sCiAgICAidGhyZXNob2xkIjogMwogICB9LAogICAidGltZXN0YW1wIjogewogICAgImtleWlkcyI6IFsKICAgICAiMGM4NzQzMmMzYmYwOWZkOTkxODlmZGMzMmZhNWVhZWRmNGU0YTVmYWM3YmFiNzNmYTA0YTJlMGZjNjRhZjZmNSIKICAgIF0sCiAgICAidGhyZXNob2xkIjogMSwKICAgICJ4LXR1Zi1vbi1jaS1leHBpcnktcGVyaW9kIjogNywKICAgICJ4LXR1Zi1vbi1jaS1zaWduaW5nLXBlcmlvZCI6IDYKICAgfQogIH0sCiAgInNwZWNfdmVyc2lvbiI6ICIxLjAiLAogICJ2ZXJzaW9uIjogMTIsCiAgIngtdHVmLW9uLWNpLWV4cGlyeS1wZXJpb2QiOiAxOTcsCiAgIngtdHVmLW9uLWNpLXNpZ25pbmctcGVyaW9kIjogNDYKIH0KfQ==","targets":{"trusted_root.json":"ewogICJtZWRpYVR5cGUiOiAiYXBwbGljYXRpb24vdm5kLmRldi5zaWdzdG9yZS50cnVzdGVkcm9vdCtqc29uO3ZlcnNpb249MC4xIiwKICAidGxvZ3MiOiBbCiAgICB7CiAgICAgICJiYXNlVXJsIjogImh0dHBzOi8vcmVrb3Iuc2lnc3RvcmUuZGV2IiwKICAgICAgImhhc2hBbGdvcml0aG0iOiAiU0hBMl8yNTYiLAogICAgICAicHVibGljS2V5IjogewogICAgICAgICJyYXdCeXRlcyI6ICJNRmt3RXdZSEtvWkl6ajBDQVFZSUtvWkl6ajBEQVFjRFFnQUUyRzJZKzJ0YWJkVFY1QmNHaUJJeDBhOWZBRndya0JibUxTR3RrczRMM3FYNnlZWTB6dWZCbmhDOFVyL2l5NTVHaFdQLzlBL2JZMkxoQzMwTTkrUll0dz09IiwKICAgICAgICAia2V5RGV0YWlscyI6ICJQS0lYX0VDRFNBX1AyNTZfU0hBXzI1NiIsCiAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgInN0YXJ0IjogIjIwMjEtMDEtMTJUMTE6NTM6MjcuMDAwWiIKICAgICAgICB9CiAgICAgIH0sCiAgICAgICJsb2dJZCI6IHsKICAgICAgICAia2V5SWQiOiAid05JOWF0UUdseitWV2ZPNkxSeWdINFFVZlkvOFc0UkZ3aVQ1aTVXUmdCMD0iCiAgICAgIH0KICAgIH0KICBdLAogICJjZXJ0aWZpY2F0ZUF1dGhvcml0aWVzIjogWwogICAgewogICAgICAic3ViamVjdCI6IHsKICAgICAgICAib3JnYW5pemF0aW9uIjogInNpZ3N0b3JlLmRldiIsCiAgICAgICAgImNvbW1vbk5hbWUiOiAic2lnc3RvcmUiCiAgICAgIH0sCiAgICAgICJ1cmkiOiAiaHR0cHM6Ly9mdWxjaW8uc2lnc3RvcmUuZGV2IiwKICAgICAgImNlcnRDaGFpbiI6IHsKICAgICAgICAiY2VydGlmaWNhdGVzIjogWwogICAgICAgICAgewogICAgICAgICAgICAicmF3Qnl0ZXMiOiAiTUlJQitEQ0NBWDZnQXdJQkFnSVROVmtEWm9DaW9mUERzeTdkZm02Z2VMYnVoekFLQmdncWhrak9QUVFEQXpBcU1SVXdFd1lEVlFRS0V3eHphV2R6ZEc5eVpTNWtaWFl4RVRBUEJnTlZCQU1UQ0hOcFozTjBiM0psTUI0WERUSXhNRE13TnpBek1qQXlPVm9YRFRNeE1ESXlNekF6TWpBeU9Wb3dLakVWTUJNR0ExVUVDaE1NYzJsbmMzUnZjbVV1WkdWMk1SRXdEd1lEVlFRREV3aHphV2R6ZEc5eVpUQjJNQkFHQnlxR1NNNDlBZ0VHQlN1QkJBQWlBMklBQkxTeUE3SWk1aytwTk84WkVXWTB5bGVtV0Rvd09rTmEza0wrR1pFNVo1R1dlaEw5L0E5YlJOQTNSYnJzWjVpMEpjYXN0YVJMN1NwNWZwL2pENWR4cWMvVWRUVm5sdlMxNmFuKzJZZnN3ZS9RdUxvbFJVQ3JjT0UyKzJpQTUrdHpkNk5tTUdRd0RnWURWUjBQQVFIL0JBUURBZ0VHTUJJR0ExVWRFd0VCL3dRSU1BWUJBZjhDQVFFd0hRWURWUjBPQkJZRUZNakZIUUJCbWlRcE1sRWs2dzJ1U3UxS0J0UHNNQjhHQTFVZEl3UVlNQmFBRk1qRkhRQkJtaVFwTWxFazZ3MnVTdTFLQnRQc01Bb0dDQ3FHU000OUJBTURBMmdBTUdVQ01IOGxpV0pmTXVpNnZYWEJoakRnWTRNd3NsbU4vVEp4VmUvODNXckZvbXdtTmYwNTZ5MVg0OEY5YzRtM2Ezb3pYQUl4QUtqUmF5NS9hai9qc0tLR0lrbVFhdGpJOHV1cEhyLytDeEZ2YUpXbXBZcU5rTERHUlUrOW9yemg1aEkyUnJjdWFRPT0iCiAgICAgICAgICB9CiAgICAgICAgXQogICAgICB9LAogICAgICAidmFsaWRGb3IiOiB7CiAgICAgICAgInN0YXJ0IjogIjIwMjEtMDMtMDdUMDM6MjA6MjkuMDAwWiIsCiAgICAgICAgImVuZCI6ICIyMDIyLTEyLTMxVDIzOjU5OjU5Ljk5OVoiCiAgICAgIH0KICAgIH0sCiAgICB7CiAgICAgICJzdWJqZWN0IjogewogICAgICAgICJvcmdhbml6YXRpb24iOiAic2lnc3RvcmUuZGV2IiwKICAgICAgICAiY29tbW9uTmFtZSI6ICJzaWdzdG9yZSIKICAgICAgfSwKICAgICAgInVyaSI6ICJodHRwczovL2Z1bGNpby5zaWdzdG9yZS5kZXYiLAogICAgICAiY2VydENoYWluIjogewogICAgICAgICJjZXJ0aWZpY2F0ZXMiOiBbCiAgICAgICAgICB7CiAgICAgICAgICAgICJyYXdCeXRlcyI6ICJNSUlDR2pDQ0FhR2dBd0lCQWdJVUFMblZpVmZuVTBickphc21Sa0hybi9VbmZhUXdDZ1lJS29aSXpqMEVBd013S2pFVk1CTUdBMVVFQ2hNTWMybG5jM1J2Y21VdVpHVjJNUkV3RHdZRFZRUURFd2h6YVdkemRHOXlaVEFlRncweU1qQTBNVE15TURBMk1UVmFGdzB6TVRFd01EVXhNelUyTlRoYU1EY3hGVEFUQmdOVkJBb1RESE5wWjNOMGIzSmxMbVJsZGpFZU1Cd0dBMVVFQXhNVmMybG5jM1J2Y21VdGFXNTBaWEp0WldScFlYUmxNSFl3RUFZSEtvWkl6ajBDQVFZRks0RUVBQ0lEWWdBRThSVlMveXNIK05PdnVEWnlQSVp0aWxnVUY5TmxhcllwQWQ5SFAxdkJCSDFVNUNWNzdMU1M3czBaaUg0bkU3SHY3cHRTNkx2dlIvU1RrNzk4TFZnTXpMbEo0SGVJZkYzdEhTYWV4TGNZcFNBU3Ixa1MwTi9SZ0JKei85aldDaVhubzNzd2VUQU9CZ05WSFE4QkFmOEVCQU1DQVFZd0V3WURWUjBsQkF3d0NnWUlLd1lCQlFVSEF3TXdFZ1lEVlIwVEFRSC9CQWd3QmdFQi93SUJBREFkQmdOVkhRNEVGZ1FVMzlQcHoxWWtFWmI1cU5qcEtGV2l4aTRZWkQ4d0h3WURWUjBqQkJnd0ZvQVVXTUFlWDVGRnBXYXBlc3lRb1pNaTBDckZ4Zm93Q2dZSUtvWkl6ajBFQXdNRFp3QXdaQUl3UENzUUs0RFlpWllEUElhRGk1SEZLbmZ4WHg2QVNTVm1FUmZzeW5ZQmlYMlg2U0pSblpVODQvOURaZG5GdnZ4bUFqQk90NlFwQmxjNEovMER4dmtUQ3FwY2x2emlMNkJDQ1BuamRsSUIzUHUzQnhzUG15Z1VZN0lpMnpiZENkbGlpb3c9IgogICAgICAgICAgfSwKICAgICAgICAgIHsKICAgICAgICAgICAgInJhd0J5dGVzIjogIk1JSUI5ekNDQVh5Z0F3SUJBZ0lVQUxaTkFQRmR4SFB3amVEbG9Ed3lZQ2hBTy80d0NnWUlLb1pJemowRUF3TXdLakVWTUJNR0ExVUVDaE1NYzJsbmMzUnZjbVV1WkdWMk1SRXdEd1lEVlFRREV3aHphV2R6ZEc5eVpUQWVGdzB5TVRFd01EY3hNelUyTlRsYUZ3MHpNVEV3TURVeE16VTJOVGhhTUNveEZUQVRCZ05WQkFvVERITnBaM04wYjNKbExtUmxkakVSTUE4R0ExVUVBeE1JYzJsbmMzUnZjbVV3ZGpBUUJnY3Foa2pPUFFJQkJnVXJnUVFBSWdOaUFBVDdYZUZUNHJiM1BRR3dTNElhanRMazMvT2xucGdhbmdhQmNsWXBzWUJyNWkrNHluQjA3Y2ViM0xQME9JT1pkeGV4WDY5YzVpVnV5SlJRK0h6MDV5aStVRjN1QldBbEhwaVM1c2gwK0gyR0hFN1NYcmsxRUM1bTFUcjE5TDlnZzkyall6QmhNQTRHQTFVZER3RUIvd1FFQXdJQkJqQVBCZ05WSFJNQkFmOEVCVEFEQVFIL01CMEdBMVVkRGdRV0JCUll3QjVma1VXbFpxbDZ6SkNoa3lMUUtzWEYrakFmQmdOVkhTTUVHREFXZ0JSWXdCNWZrVVdsWnFsNnpKQ2hreUxRS3NYRitqQUtCZ2dxaGtqT1BRUURBd05wQURCbUFqRUFqMW5IZVhacCsxM05XQk5hK0VEc0RQOEcxV1dnMXRDTVdQL1dIUHFwYVZvMGpoc3dlTkZaZ1NzMGVFN3dZSTRxQWpFQTJXQjlvdDk4c0lrb0YzdlpZZGQzL1Z0V0I1YjlUTk1lYTdJeC9zdEo1VGZjTExlQUJMRTRCTkpPc1E0dm5CSEoiCiAgICAgICAgICB9CiAgICAgICAgXQogICAgICB9LAogICAgICAidmFsaWRGb3IiOiB7CiAgICAgICAgInN0YXJ0IjogIjIwMjItMDQtMTNUMjA6MDY6MTUuMDAwWiIKICAgICAgfQogICAgfQogIF0sCiAgImN0bG9ncyI6IFsKICAgIHsKICAgICAgImJhc2VVcmwiOiAiaHR0cHM6Ly9jdGZlLnNpZ3N0b3JlLmRldi90ZXN0IiwKICAgICAgImhhc2hBbGdvcml0aG0iOiAiU0hBMl8yNTYiLAogICAgICAicHVibGljS2V5IjogewogICAgICAgICJyYXdCeXRlcyI6ICJNRmt3RXdZSEtvWkl6ajBDQVFZSUtvWkl6ajBEQVFjRFFnQUViZndSK1JKdWRYc2NnUkJScEtYMVhGRHkzUHl1ZER4ei9TZm5SaTFmVDhla3BmQmQyTzF1b3o3anIzWjhuS3p4QTY5RVVRK2VGQ0ZJM3pldWJQV1U3dz09IiwKICAgICAgICAia2V5RGV0YWlscyI6ICJQS0lYX0VDRFNBX1AyNTZfU0hBXzI1NiIsCiAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgInN0YXJ0IjogIjIwMjEtMDMtMTRUMDA6MDA6MDAuMDAwWiIsCiAgICAgICAgICAiZW5kIjogIjIwMjItMTAtMzFUMjM6NTk6NTkuOTk5WiIKICAgICAgICB9CiAgICAgIH0sCiAgICAgICJsb2dJZCI6IHsKICAgICAgICAia2V5SWQiOiAiQ0dDUzhDaFMvMmhGMGRGcko0U2NSV2NZckJZOXd6alNiZWE4SWdZMmIzST0iCiAgICAgIH0KICAgIH0sCiAgICB7CiAgICAgICJiYXNlVXJsIjogImh0dHBzOi8vY3RmZS5zaWdzdG9yZS5kZXYvMjAyMiIsCiAgICAgICJoYXNoQWxnb3JpdGhtIjogIlNIQTJfMjU2IiwKICAgICAgInB1YmxpY0tleSI6IHsKICAgICAgICAicmF3Qnl0ZXMiOiAiTUZrd0V3WUhLb1pJemowQ0FRWUlLb1pJemowREFRY0RRZ0FFaVBTbEZpMENtRlRmRWpDVXFGOUh1Q0VjWVhOS0FhWWFsSUptQlo4eXllelBqVHFoeHJLQnBNbmFvY1Z0TEpCSTFlTTN1WG5RelFHQUpkSjRnczlGeXc9PSIsCiAgICAgICAgImtleURldGFpbHMiOiAiUEtJWF9FQ0RTQV9QMjU2X1NIQV8yNTYiLAogICAgICAgICJ2YWxpZEZvciI6IHsKICAgICAgICAgICJzdGFydCI6ICIyMDIyLTEwLTIwVDAwOjAwOjAwLjAwMFoiCiAgICAgICAgfQogICAgICB9LAogICAgICAibG9nSWQiOiB7CiAgICAgICAgImtleUlkIjogIjNUMHdhc2JIRVRKakdSNGNtV2MzQXFKS1hyamVQSzMvaDRweWdDOHA3bzQ9IgogICAgICB9CiAgICB9CiAgXQp9Cg==","registry.npmjs.org%2Fkeys.json":"ewogICAgImtleXMiOiBbCiAgICAgICAgewogICAgICAgICAgICAia2V5SWQiOiAiU0hBMjU2OmpsM2J3c3d1ODBQampva0NnaDBvMnc1YzJVNExoUUFFNTdnajljejFrekEiLAogICAgICAgICAgICAia2V5VXNhZ2UiOiAibnBtOnNpZ25hdHVyZXMiLAogICAgICAgICAgICAicHVibGljS2V5IjogewogICAgICAgICAgICAgICAgInJhd0J5dGVzIjogIk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRTFPbGIzek1BRkZ4WEtIaUlrUU81Y0ozWWhsNWk2VVBwK0lodXRlQkpidUhjQTVVb2dLbzBFV3RsV3dXNktTYUtvVE5FWUw3SmxDUWlWbmtoQmt0VWdnPT0iLAogICAgICAgICAgICAgICAgImtleURldGFpbHMiOiAiUEtJWF9FQ0RTQV9QMjU2X1NIQV8yNTYiLAogICAgICAgICAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgICAgICAgICAgICJzdGFydCI6ICIxOTk5LTAxLTAxVDAwOjAwOjAwLjAwMFoiLAogICAgICAgICAgICAgICAgICAgICJlbmQiOiAiMjAyNS0wMS0yOVQwMDowMDowMC4wMDBaIgogICAgICAgICAgICAgICAgfQogICAgICAgICAgICB9CiAgICAgICAgfSwKICAgICAgICB7CiAgICAgICAgICAgICJrZXlJZCI6ICJTSEEyNTY6amwzYndzd3U4MFBqam9rQ2doMG8ydzVjMlU0TGhRQUU1N2dqOWN6MWt6QSIsCiAgICAgICAgICAgICJrZXlVc2FnZSI6ICJucG06YXR0ZXN0YXRpb25zIiwKICAgICAgICAgICAgInB1YmxpY0tleSI6IHsKICAgICAgICAgICAgICAgICJyYXdCeXRlcyI6ICJNRmt3RXdZSEtvWkl6ajBDQVFZSUtvWkl6ajBEQVFjRFFnQUUxT2xiM3pNQUZGeFhLSGlJa1FPNWNKM1lobDVpNlVQcCtJaHV0ZUJKYnVIY0E1VW9nS28wRVd0bFd3VzZLU2FLb1RORVlMN0psQ1FpVm5raEJrdFVnZz09IiwKICAgICAgICAgICAgICAgICJrZXlEZXRhaWxzIjogIlBLSVhfRUNEU0FfUDI1Nl9TSEFfMjU2IiwKICAgICAgICAgICAgICAgICJ2YWxpZEZvciI6IHsKICAgICAgICAgICAgICAgICAgICAic3RhcnQiOiAiMjAyMi0xMi0wMVQwMDowMDowMC4wMDBaIiwKICAgICAgICAgICAgICAgICAgICAiZW5kIjogIjIwMjUtMDEtMjlUMDA6MDA6MDAuMDAwWiIKICAgICAgICAgICAgICAgIH0KICAgICAgICAgICAgfQogICAgICAgIH0sCiAgICAgICAgewogICAgICAgICAgICAia2V5SWQiOiAiU0hBMjU2OkRoUTh3UjVBUEJ2RkhMRi8rVGMrQVl2UE9kVHBjSURxT2h4c0JIUndDN1UiLAogICAgICAgICAgICAia2V5VXNhZ2UiOiAibnBtOnNpZ25hdHVyZXMiLAogICAgICAgICAgICAicHVibGljS2V5IjogewogICAgICAgICAgICAgICAgInJhd0J5dGVzIjogIk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRVk2WWE3VysrN2FVUHp2TVRyZXpINlljeDNjK0hPS1lDY05HeWJKWlNDSnEvZmQ3UWE4dXVBS3RkSWtVUXRRaUVLRVJoQW1FNWxNTUpoUDhPa0RPYTJnPT0iLAogICAgICAgICAgICAgICAgImtleURldGFpbHMiOiAiUEtJWF9FQ0RTQV9QMjU2X1NIQV8yNTYiLAogICAgICAgICAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgICAgICAgICAgICJzdGFydCI6ICIyMDI1LTAxLTEzVDAwOjAwOjAwLjAwMFoiCiAgICAgICAgICAgICAgICB9CiAgICAgICAgICAgIH0KICAgICAgICB9LAogICAgICAgIHsKICAgICAgICAgICAgImtleUlkIjogIlNIQTI1NjpEaFE4d1I1QVBCdkZITEYvK1RjK0FZdlBPZFRwY0lEcU9oeHNCSFJ3QzdVIiwKICAgICAgICAgICAgImtleVVzYWdlIjogIm5wbTphdHRlc3RhdGlvbnMiLAogICAgICAgICAgICAicHVibGljS2V5IjogewogICAgICAgICAgICAgICAgInJhd0J5dGVzIjogIk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRVk2WWE3VysrN2FVUHp2TVRyZXpINlljeDNjK0hPS1lDY05HeWJKWlNDSnEvZmQ3UWE4dXVBS3RkSWtVUXRRaUVLRVJoQW1FNWxNTUpoUDhPa0RPYTJnPT0iLAogICAgICAgICAgICAgICAgImtleURldGFpbHMiOiAiUEtJWF9FQ0RTQV9QMjU2X1NIQV8yNTYiLAogICAgICAgICAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgICAgICAgICAgICJzdGFydCI6ICIyMDI1LTAxLTEzVDAwOjAwOjAwLjAwMFoiCiAgICAgICAgICAgICAgICB9CiAgICAgICAgICAgIH0KICAgICAgICB9CiAgICBdCn0K"}}}
+{"https://tuf-repo-cdn.sigstore.dev":{"root.json":"ewogInNpZ25hdHVyZXMiOiBbCiAgewogICAia2V5aWQiOiAiNmYyNjAwODlkNTkyM2RhZjIwMTY2Y2E2NTdjNTQzYWY2MTgzNDZhYjk3MTg4NGE5OTk2MmIwMTk4OGJiZTBjMyIsCiAgICJzaWciOiAiIgogIH0sCiAgewogICAia2V5aWQiOiAiZTcxYTU0ZDU0MzgzNWJhODZhZGFkOTQ2MDM3OWM3NjQxZmI4NzI2ZDE2NGVhNzY2ODAxYTFjNTIyYWJhN2VhMiIsCiAgICJzaWciOiAiMzA0NTAyMjEwMGJiZGRkNDY0ZjgwNjZjZWI4OGJhNzg3Mzc1YzEyY2Q2MzMwNjgwZTA4YzI5MTA3MDNlNjUzOGM3MWNjNzlhZDIwMjIwNTE5MGIwNmU0NTM3ZmU5NjFiM2VmODFmZTY4ZWRjZDAwODljMTlmOTE5YWZlZDQyM2I5YWFmZDcwMDY0MTE1MyIKICB9LAogIHsKICAgImtleWlkIjogIjIyZjRjYWVjNmQ4ZTZmOTU1NWFmNjZiM2Q0YzNjYjA2YTNiYjIzZmRjN2UzOWM5MTZjNjFmNDYyZTZmNTJiMDYiLAogICAic2lnIjogIjMwNDQwMjIwNjkzMDZjZDUyNTdmNzMyYTc0MGMxYWZlNjBhOGU0MzNjNWRlNThlYWZlYWRiZTk5YzMzNmM5YzcxZDE5OGNmODAyMjAwZDc3Mzk1M2FlN2RiYzQ4ZDNlNWJhZDlhNmY2NGJhZmZmMTk2YjdlMmFkNGE1MmExOTUxOTM2N2Q0N2RjMDQyIgogIH0sCiAgewogICAia2V5aWQiOiAiNjE2NDM4MzgxMjViNDQwYjQwZGI2OTQyZjVjYjVhMzFjMGRjMDQzNjgzMTZlYjJhYWE1OGI5NTkwNGE1ODIyMiIsCiAgICJzaWciOiAiMzA0NDAyMjA0ZDIxYTJlYzgwZGY2NmU2MWY2ZmUyOTEyOTUxZGM0N2RmODM2MDM2ZjhjMGFiMTA4MTZkMzc1ZTcxZGJmNzllMDIyMDU0N2FkY2UxYWZkZjA0ZTY3OTRlZmEyMDNkZDUyNjRjNmY3ZTBlZjc4ZTU3ZmU5MzRiMGQyNmNiOTk0ZWVjNzYiCiAgfSwKICB7CiAgICJrZXlpZCI6ICJhNjg3ZTViZjRmYWI4MmIwZWU1OGQ0NmUwNWM5NTM1MTQ1YTJjOWFmYjQ1OGY0M2Q0MmI0NWNhMGZkY2UyYTcwIiwKICAgInNpZyI6ICIzMDQ1MDIyMDYwODI2NDk2NTU3MTQ0ZWIxNjQ5ODkzZWQ1ZjZmNGVhNTQ1MzZmZWIwY2E4MmY4Yjg5YWU2NDFiZTM5NzQzZTUwMjIxMDBhZDcxMThiNWU5ZDQ4MzczMjYyMDZlNDEyZmM2ZGEyOTk5OTI1ZDExMDMyOGE3YzE2NmIwNmM2MjQzMzZjOTNmIgogIH0sCiAgewogICAia2V5aWQiOiAiMTgzZTY0ZjM3NjcwZGMxM2NhMGQyODk5NWEzMDUzZjM3NDA5NTRkZGNlNDQzMjFhNDFlNDY1MzRjZjQ0ZTYzMiIsCiAgICJzaWciOiAiMzA0NjAyMjEwMGQ4MTc5NDM5YzJlNzNlYjBjMTczM2FiZWU3ZmFmODMyZGNhZWE3MjYzZWRjYjQ5MTk4OTFjM2EyNDdmMDU5MjMwMjIxMDBlMWE0MzdlMDc5N2U4MDNmOWI3MmRjOWQyZDkyMTU1YjBhMjI3MGMyNGVmZGQ1ZjRiM2E1ZDhmMGIwZjQzMWE3IgogIH0KIF0sCiAic2lnbmVkIjogewogICJfdHlwZSI6ICJyb290IiwKICAiY29uc2lzdGVudF9zbmFwc2hvdCI6IHRydWUsCiAgImV4cGlyZXMiOiAiMjAyNi0wMS0yMlQxMzowNTo1OVoiLAogICJrZXlzIjogewogICAiMGM4NzQzMmMzYmYwOWZkOTkxODlmZGMzMmZhNWVhZWRmNGU0YTVmYWM3YmFiNzNmYTA0YTJlMGZjNjRhZjZmNSI6IHsKICAgICJrZXlpZF9oYXNoX2FsZ29yaXRobXMiOiBbCiAgICAgInNoYTI1NiIsCiAgICAgInNoYTUxMiIKICAgIF0sCiAgICAia2V5dHlwZSI6ICJlY2RzYSIsCiAgICAia2V5dmFsIjogewogICAgICJwdWJsaWMiOiAiLS0tLS1CRUdJTiBQVUJMSUMgS0VZLS0tLS1cbk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRVdSaUdyNStqKzNKNVNzSCtadHI1bkUySDJ3TzdcbkJWK25PM3M5M2dMY2ExOHFUT3pIWTFvV3lBR0R5a01Tc0dUVUJTdDlEK0FuMEtmS3NEMm1mU000MlE9PVxuLS0tLS1FTkQgUFVCTElDIEtFWS0tLS0tXG4iCiAgICB9LAogICAgInNjaGVtZSI6ICJlY2RzYS1zaGEyLW5pc3RwMjU2IiwKICAgICJ4LXR1Zi1vbi1jaS1vbmxpbmUtdXJpIjogImdjcGttczpwcm9qZWN0cy9zaWdzdG9yZS1yb290LXNpZ25pbmcvbG9jYXRpb25zL2dsb2JhbC9rZXlSaW5ncy9yb290L2NyeXB0b0tleXMvdGltZXN0YW1wL2NyeXB0b0tleVZlcnNpb25zLzEiCiAgIH0sCiAgICIxODNlNjRmMzc2NzBkYzEzY2EwZDI4OTk1YTMwNTNmMzc0MDk1NGRkY2U0NDMyMWE0MWU0NjUzNGNmNDRlNjMyIjogewogICAgImtleXR5cGUiOiAiZWNkc2EiLAogICAgImtleXZhbCI6IHsKICAgICAicHVibGljIjogIi0tLS0tQkVHSU4gUFVCTElDIEtFWS0tLS0tXG5NRmt3RXdZSEtvWkl6ajBDQVFZSUtvWkl6ajBEQVFjRFFnQUVNeHBQT0pDSVo1b3RHNDEwNmZHSnNlRVFpM1Y5XG5wa01ZUTR1eVY5VGoxTTdXSFhJeUxHK2prZnZ1RzBnbFExSlpiUlpaQlYzZ0FSNHNvamRHSElTZW93PT1cbi0tLS0tRU5EIFBVQkxJQyBLRVktLS0tLVxuIgogICAgfSwKICAgICJzY2hlbWUiOiAiZWNkc2Etc2hhMi1uaXN0cDI1NiIsCiAgICAieC10dWYtb24tY2kta2V5b3duZXIiOiAiQGxhbmNlIgogICB9LAogICAiMjJmNGNhZWM2ZDhlNmY5NTU1YWY2NmIzZDRjM2NiMDZhM2JiMjNmZGM3ZTM5YzkxNmM2MWY0NjJlNmY1MmIwNiI6IHsKICAgICJrZXlpZF9oYXNoX2FsZ29yaXRobXMiOiBbCiAgICAgInNoYTI1NiIsCiAgICAgInNoYTUxMiIKICAgIF0sCiAgICAia2V5dHlwZSI6ICJlY2RzYSIsCiAgICAia2V5dmFsIjogewogICAgICJwdWJsaWMiOiAiLS0tLS1CRUdJTiBQVUJMSUMgS0VZLS0tLS1cbk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRXpCelZPbUhDUG9qTVZMU0kzNjRXaWlWOE5QckRcbjZJZ1J4Vmxpc2t6L3YreTNKRVI1bWNWR2NPTmxpRGNXTUM1SjJsZkhtalBOUGhiNEg3eG04THpmU0E9PVxuLS0tLS1FTkQgUFVCTElDIEtFWS0tLS0tXG4iCiAgICB9LAogICAgInNjaGVtZSI6ICJlY2RzYS1zaGEyLW5pc3RwMjU2IiwKICAgICJ4LXR1Zi1vbi1jaS1rZXlvd25lciI6ICJAc2FudGlhZ290b3JyZXMiCiAgIH0sCiAgICI2MTY0MzgzODEyNWI0NDBiNDBkYjY5NDJmNWNiNWEzMWMwZGMwNDM2ODMxNmViMmFhYTU4Yjk1OTA0YTU4MjIyIjogewogICAgImtleWlkX2hhc2hfYWxnb3JpdGhtcyI6IFsKICAgICAic2hhMjU2IiwKICAgICAic2hhNTEyIgogICAgXSwKICAgICJrZXl0eXBlIjogImVjZHNhIiwKICAgICJrZXl2YWwiOiB7CiAgICAgInB1YmxpYyI6ICItLS0tLUJFR0lOIFBVQkxJQyBLRVktLS0tLVxuTUZrd0V3WUhLb1pJemowQ0FRWUlLb1pJemowREFRY0RRZ0FFaW5pa1NzQVFtWWtOZUg1ZVlxL0NuSXpMYWFjT1xueGxTYWF3UURPd3FLeS90Q3F4cTV4eFBTSmMyMUs0V0loczlHeU9rS2Z6dWVZM0dJTHpjTUpaNGNXdz09XG4tLS0tLUVORCBQVUJMSUMgS0VZLS0tLS1cbiIKICAgIH0sCiAgICAic2NoZW1lIjogImVjZHNhLXNoYTItbmlzdHAyNTYiLAogICAgIngtdHVmLW9uLWNpLWtleW93bmVyIjogIkBib2JjYWxsYXdheSIKICAgfSwKICAgImE2ODdlNWJmNGZhYjgyYjBlZTU4ZDQ2ZTA1Yzk1MzUxNDVhMmM5YWZiNDU4ZjQzZDQyYjQ1Y2EwZmRjZTJhNzAiOiB7CiAgICAia2V5aWRfaGFzaF9hbGdvcml0aG1zIjogWwogICAgICJzaGEyNTYiLAogICAgICJzaGE1MTIiCiAgICBdLAogICAgImtleXR5cGUiOiAiZWNkc2EiLAogICAgImtleXZhbCI6IHsKICAgICAicHVibGljIjogIi0tLS0tQkVHSU4gUFVCTElDIEtFWS0tLS0tXG5NRmt3RXdZSEtvWkl6ajBDQVFZSUtvWkl6ajBEQVFjRFFnQUUwZ2hyaDkyTHcxWXIzaWRHVjVXcUN0TURCOEN4XG4rRDhoZEM0dzJaTE5JcGxWUm9WR0xza1lhM2doZU15T2ppSjhrUGkxNWFRMi8vN1Arb2o3VXZKUEd3PT1cbi0tLS0tRU5EIFBVQkxJQyBLRVktLS0tLVxuIgogICAgfSwKICAgICJzY2hlbWUiOiAiZWNkc2Etc2hhMi1uaXN0cDI1NiIsCiAgICAieC10dWYtb24tY2kta2V5b3duZXIiOiAiQGpvc2h1YWdsIgogICB9LAogICAiZTcxYTU0ZDU0MzgzNWJhODZhZGFkOTQ2MDM3OWM3NjQxZmI4NzI2ZDE2NGVhNzY2ODAxYTFjNTIyYWJhN2VhMiI6IHsKICAgICJrZXlpZF9oYXNoX2FsZ29yaXRobXMiOiBbCiAgICAgInNoYTI1NiIsCiAgICAgInNoYTUxMiIKICAgIF0sCiAgICAia2V5dHlwZSI6ICJlY2RzYSIsCiAgICAia2V5dmFsIjogewogICAgICJwdWJsaWMiOiAiLS0tLS1CRUdJTiBQVUJMSUMgS0VZLS0tLS1cbk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRUVYc3ozU1pYRmI4ak1WNDJqNnBKbHlqYmpSOEtcbk4zQndvY2V4cTZMTUliNXFzV0tPUXZMTjE2TlVlZkxjNEhzd09vdW1Sc1ZWYWFqU3BRUzZmb2JrUnc9PVxuLS0tLS1FTkQgUFVCTElDIEtFWS0tLS0tXG4iCiAgICB9LAogICAgInNjaGVtZSI6ICJlY2RzYS1zaGEyLW5pc3RwMjU2IiwKICAgICJ4LXR1Zi1vbi1jaS1rZXlvd25lciI6ICJAbW5tNjc4IgogICB9CiAgfSwKICAicm9sZXMiOiB7CiAgICJyb290IjogewogICAgImtleWlkcyI6IFsKICAgICAiZTcxYTU0ZDU0MzgzNWJhODZhZGFkOTQ2MDM3OWM3NjQxZmI4NzI2ZDE2NGVhNzY2ODAxYTFjNTIyYWJhN2VhMiIsCiAgICAgIjIyZjRjYWVjNmQ4ZTZmOTU1NWFmNjZiM2Q0YzNjYjA2YTNiYjIzZmRjN2UzOWM5MTZjNjFmNDYyZTZmNTJiMDYiLAogICAgICI2MTY0MzgzODEyNWI0NDBiNDBkYjY5NDJmNWNiNWEzMWMwZGMwNDM2ODMxNmViMmFhYTU4Yjk1OTA0YTU4MjIyIiwKICAgICAiYTY4N2U1YmY0ZmFiODJiMGVlNThkNDZlMDVjOTUzNTE0NWEyYzlhZmI0NThmNDNkNDJiNDVjYTBmZGNlMmE3MCIsCiAgICAgIjE4M2U2NGYzNzY3MGRjMTNjYTBkMjg5OTVhMzA1M2YzNzQwOTU0ZGRjZTQ0MzIxYTQxZTQ2NTM0Y2Y0NGU2MzIiCiAgICBdLAogICAgInRocmVzaG9sZCI6IDMKICAgfSwKICAgInNuYXBzaG90IjogewogICAgImtleWlkcyI6IFsKICAgICAiMGM4NzQzMmMzYmYwOWZkOTkxODlmZGMzMmZhNWVhZWRmNGU0YTVmYWM3YmFiNzNmYTA0YTJlMGZjNjRhZjZmNSIKICAgIF0sCiAgICAidGhyZXNob2xkIjogMSwKICAgICJ4LXR1Zi1vbi1jaS1leHBpcnktcGVyaW9kIjogMzY1MCwKICAgICJ4LXR1Zi1vbi1jaS1zaWduaW5nLXBlcmlvZCI6IDM2NQogICB9LAogICAidGFyZ2V0cyI6IHsKICAgICJrZXlpZHMiOiBbCiAgICAgImU3MWE1NGQ1NDM4MzViYTg2YWRhZDk0NjAzNzljNzY0MWZiODcyNmQxNjRlYTc2NjgwMWExYzUyMmFiYTdlYTIiLAogICAgICIyMmY0Y2FlYzZkOGU2Zjk1NTVhZjY2YjNkNGMzY2IwNmEzYmIyM2ZkYzdlMzljOTE2YzYxZjQ2MmU2ZjUyYjA2IiwKICAgICAiNjE2NDM4MzgxMjViNDQwYjQwZGI2OTQyZjVjYjVhMzFjMGRjMDQzNjgzMTZlYjJhYWE1OGI5NTkwNGE1ODIyMiIsCiAgICAgImE2ODdlNWJmNGZhYjgyYjBlZTU4ZDQ2ZTA1Yzk1MzUxNDVhMmM5YWZiNDU4ZjQzZDQyYjQ1Y2EwZmRjZTJhNzAiLAogICAgICIxODNlNjRmMzc2NzBkYzEzY2EwZDI4OTk1YTMwNTNmMzc0MDk1NGRkY2U0NDMyMWE0MWU0NjUzNGNmNDRlNjMyIgogICAgXSwKICAgICJ0aHJlc2hvbGQiOiAzCiAgIH0sCiAgICJ0aW1lc3RhbXAiOiB7CiAgICAia2V5aWRzIjogWwogICAgICIwYzg3NDMyYzNiZjA5ZmQ5OTE4OWZkYzMyZmE1ZWFlZGY0ZTRhNWZhYzdiYWI3M2ZhMDRhMmUwZmM2NGFmNmY1IgogICAgXSwKICAgICJ0aHJlc2hvbGQiOiAxLAogICAgIngtdHVmLW9uLWNpLWV4cGlyeS1wZXJpb2QiOiA3LAogICAgIngtdHVmLW9uLWNpLXNpZ25pbmctcGVyaW9kIjogNgogICB9CiAgfSwKICAic3BlY192ZXJzaW9uIjogIjEuMCIsCiAgInZlcnNpb24iOiAxMywKICAieC10dWYtb24tY2ktZXhwaXJ5LXBlcmlvZCI6IDE5NywKICAieC10dWYtb24tY2ktc2lnbmluZy1wZXJpb2QiOiA0NgogfQp9","targets":{"trusted_root.json":"{
  "mediaType": "application/vnd.dev.sigstore.trustedroot+json;version=0.1",
  "tlogs": [
    {
      "baseUrl": "https://rekor.sigstore.dev",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE2G2Y+2tabdTV5BcGiBIx0a9fAFwrkBbmLSGtks4L3qX6yYY0zufBnhC8Ur/iy55GhWP/9A/bY2LhC30M9+RYtw==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2021-01-12T11:53:27Z"
        }
      },
      "logId": {
        "keyId": "wNI9atQGlz+VWfO6LRygH4QUfY/8W4RFwiT5i5WRgB0="
      }
    }
  ],
  "certificateAuthorities": [
    {
      "subject": {
        "organization": "sigstore.dev",
        "commonName": "sigstore"
      },
      "uri": "https://fulcio.sigstore.dev",
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIIB+DCCAX6gAwIBAgITNVkDZoCiofPDsy7dfm6geLbuhzAKBggqhkjOPQQDAzAqMRUwEwYDVQQKEwxzaWdzdG9yZS5kZXYxETAPBgNVBAMTCHNpZ3N0b3JlMB4XDTIxMDMwNzAzMjAyOVoXDTMxMDIyMzAzMjAyOVowKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTB2MBAGByqGSM49AgEGBSuBBAAiA2IABLSyA7Ii5k+pNO8ZEWY0ylemWDowOkNa3kL+GZE5Z5GWehL9/A9bRNA3RbrsZ5i0JcastaRL7Sp5fp/jD5dxqc/UdTVnlvS16an+2Yfswe/QuLolRUCrcOE2+2iA5+tzd6NmMGQwDgYDVR0PAQH/BAQDAgEGMBIGA1UdEwEB/wQIMAYBAf8CAQEwHQYDVR0OBBYEFMjFHQBBmiQpMlEk6w2uSu1KBtPsMB8GA1UdIwQYMBaAFMjFHQBBmiQpMlEk6w2uSu1KBtPsMAoGCCqGSM49BAMDA2gAMGUCMH8liWJfMui6vXXBhjDgY4MwslmN/TJxVe/83WrFomwmNf056y1X48F9c4m3a3ozXAIxAKjRay5/aj/jsKKGIkmQatjI8uupHr/+CxFvaJWmpYqNkLDGRU+9orzh5hI2RrcuaQ=="
          }
        ]
      },
      "validFor": {
        "start": "2021-03-07T03:20:29Z",
        "end": "2022-12-31T23:59:59.999Z"
      }
    },
    {
      "subject": {
        "organization": "sigstore.dev",
        "commonName": "sigstore"
      },
      "uri": "https://fulcio.sigstore.dev",
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIICGjCCAaGgAwIBAgIUALnViVfnU0brJasmRkHrn/UnfaQwCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMjA0MTMyMDA2MTVaFw0zMTEwMDUxMzU2NThaMDcxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjEeMBwGA1UEAxMVc2lnc3RvcmUtaW50ZXJtZWRpYXRlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE8RVS/ysH+NOvuDZyPIZtilgUF9NlarYpAd9HP1vBBH1U5CV77LSS7s0ZiH4nE7Hv7ptS6LvvR/STk798LVgMzLlJ4HeIfF3tHSaexLcYpSASr1kS0N/RgBJz/9jWCiXno3sweTAOBgNVHQ8BAf8EBAMCAQYwEwYDVR0lBAwwCgYIKwYBBQUHAwMwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQU39Ppz1YkEZb5qNjpKFWixi4YZD8wHwYDVR0jBBgwFoAUWMAeX5FFpWapesyQoZMi0CrFxfowCgYIKoZIzj0EAwMDZwAwZAIwPCsQK4DYiZYDPIaDi5HFKnfxXx6ASSVmERfsynYBiX2X6SJRnZU84/9DZdnFvvxmAjBOt6QpBlc4J/0DxvkTCqpclvziL6BCCPnjdlIB3Pu3BxsPmygUY7Ii2zbdCdliiow="
          },
          {
            "rawBytes": "MIIB9zCCAXygAwIBAgIUALZNAPFdxHPwjeDloDwyYChAO/4wCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMTEwMDcxMzU2NTlaFw0zMTEwMDUxMzU2NThaMCoxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjERMA8GA1UEAxMIc2lnc3RvcmUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAT7XeFT4rb3PQGwS4IajtLk3/OlnpgangaBclYpsYBr5i+4ynB07ceb3LP0OIOZdxexX69c5iVuyJRQ+Hz05yi+UF3uBWAlHpiS5sh0+H2GHE7SXrk1EC5m1Tr19L9gg92jYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRYwB5fkUWlZql6zJChkyLQKsXF+jAfBgNVHSMEGDAWgBRYwB5fkUWlZql6zJChkyLQKsXF+jAKBggqhkjOPQQDAwNpADBmAjEAj1nHeXZp+13NWBNa+EDsDP8G1WWg1tCMWP/WHPqpaVo0jhsweNFZgSs0eE7wYI4qAjEA2WB9ot98sIkoF3vZYdd3/VtWB5b9TNMea7Ix/stJ5TfcLLeABLE4BNJOsQ4vnBHJ"
          }
        ]
      },
      "validFor": {
        "start": "2022-04-13T20:06:15Z"
      }
    }
  ],
  "ctlogs": [
    {
      "baseUrl": "https://ctfe.sigstore.dev/test",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEbfwR+RJudXscgRBRpKX1XFDy3PyudDxz/SfnRi1fT8ekpfBd2O1uoz7jr3Z8nKzxA69EUQ+eFCFI3zeubPWU7w==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2021-03-14T00:00:00Z",
          "end": "2022-10-31T23:59:59.999Z"
        }
      },
      "logId": {
        "keyId": "CGCS8ChS/2hF0dFrJ4ScRWcYrBY9wzjSbea8IgY2b3I="
      }
    },
    {
      "baseUrl": "https://ctfe.sigstore.dev/2022",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEiPSlFi0CmFTfEjCUqF9HuCEcYXNKAaYalIJmBZ8yyezPjTqhxrKBpMnaocVtLJBI1eM3uXnQzQGAJdJ4gs9Fyw==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2022-10-20T00:00:00Z"
        }
      },
      "logId": {
        "keyId": "3T0wasbHETJjGR4cmWc3AqJKXrjePK3/h4pygC8p7o4="
      }
    }
  ],
  "timestampAuthorities": [
    {
      "subject": {
        "organization": "sigstore.dev",
        "commonName": "sigstore-tsa-selfsigned"
      },
      "uri": "https://timestamp.sigstore.dev/api/v1/timestamp",
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIICEDCCAZagAwIBAgIUOhNULwyQYe68wUMvy4qOiyojiwwwCgYIKoZIzj0EAwMwOTEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MSAwHgYDVQQDExdzaWdzdG9yZS10c2Etc2VsZnNpZ25lZDAeFw0yNTA0MDgwNjU5NDNaFw0zNTA0MDYwNjU5NDNaMC4xFTATBgNVBAoTDHNpZ3N0b3JlLmRldjEVMBMGA1UEAxMMc2lnc3RvcmUtdHNhMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE4ra2Z8hKNig2T9kFjCAToGG30jky+WQv3BzL+mKvh1SKNR/UwuwsfNCg4sryoYAd8E6isovVA3M4aoNdm9QDi50Z8nTEyvqgfDPtTIwXItfiW/AFf1V7uwkbkAoj0xxco2owaDAOBgNVHQ8BAf8EBAMCB4AwHQYDVR0OBBYEFIn9eUOHz9BlRsMCRscsc1t9tOsDMB8GA1UdIwQYMBaAFJjsAe9/u1H/1JUeb4qImFMHic6/MBYGA1UdJQEB/wQMMAoGCCsGAQUFBwMIMAoGCCqGSM49BAMDA2gAMGUCMDtpsV/6KaO0qyF/UMsX2aSUXKQFdoGTptQGc0ftq1csulHPGG6dsmyMNd3JB+G3EQIxAOajvBcjpJmKb4Nv+2Taoj8Uc5+b6ih6FXCCKraSqupe07zqswMcXJTe1cExvHvvlw=="
          },
          {
            "rawBytes": "MIIB9zCCAXygAwIBAgIUV7f0GLDOoEzIh8LXSW80OJiUp14wCgYIKoZIzj0EAwMwOTEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MSAwHgYDVQQDExdzaWdzdG9yZS10c2Etc2VsZnNpZ25lZDAeFw0yNTA0MDgwNjU5NDNaFw0zNTA0MDYwNjU5NDNaMDkxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjEgMB4GA1UEAxMXc2lnc3RvcmUtdHNhLXNlbGZzaWduZWQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQUQNtfRT/ou3YATa6wB/kKTe70cfJwyRIBovMnt8RcJph/COE82uyS6FmppLLL1VBPGcPfpQPYJNXzWwi8icwhKQ6W/Qe2h3oebBb2FHpwNJDqo+TMaC/tdfkv/ElJB72jRTBDMA4GA1UdDwEB/wQEAwIBBjASBgNVHRMBAf8ECDAGAQH/AgEAMB0GA1UdDgQWBBSY7AHvf7tR/9SVHm+KiJhTB4nOvzAKBggqhkjOPQQDAwNpADBmAjEAwGEGrfGZR1cen1R8/DTVMI943LssZmJRtDp/i7SfGHmGRP6gRbuj9vOK3b67Z0QQAjEAuT2H673LQEaHTcyQSZrkp4mX7WwkmF+sVbkYY5mXN+RMH13KUEHHOqASaemYWK/E"
          }
        ]
      },
      "validFor": {
        "start": "2025-07-04T00:00:00Z"
      }
    }
  ]
}
","registry.npmjs.org%2Fkeys.json":"ewogICAgImtleXMiOiBbCiAgICAgICAgewogICAgICAgICAgICAia2V5SWQiOiAiU0hBMjU2OmpsM2J3c3d1ODBQampva0NnaDBvMnc1YzJVNExoUUFFNTdnajljejFrekEiLAogICAgICAgICAgICAia2V5VXNhZ2UiOiAibnBtOnNpZ25hdHVyZXMiLAogICAgICAgICAgICAicHVibGljS2V5IjogewogICAgICAgICAgICAgICAgInJhd0J5dGVzIjogIk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRTFPbGIzek1BRkZ4WEtIaUlrUU81Y0ozWWhsNWk2VVBwK0lodXRlQkpidUhjQTVVb2dLbzBFV3RsV3dXNktTYUtvVE5FWUw3SmxDUWlWbmtoQmt0VWdnPT0iLAogICAgICAgICAgICAgICAgImtleURldGFpbHMiOiAiUEtJWF9FQ0RTQV9QMjU2X1NIQV8yNTYiLAogICAgICAgICAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgICAgICAgICAgICJzdGFydCI6ICIxOTk5LTAxLTAxVDAwOjAwOjAwLjAwMFoiLAogICAgICAgICAgICAgICAgICAgICJlbmQiOiAiMjAyNS0wMS0yOVQwMDowMDowMC4wMDBaIgogICAgICAgICAgICAgICAgfQogICAgICAgICAgICB9CiAgICAgICAgfSwKICAgICAgICB7CiAgICAgICAgICAgICJrZXlJZCI6ICJTSEEyNTY6amwzYndzd3U4MFBqam9rQ2doMG8ydzVjMlU0TGhRQUU1N2dqOWN6MWt6QSIsCiAgICAgICAgICAgICJrZXlVc2FnZSI6ICJucG06YXR0ZXN0YXRpb25zIiwKICAgICAgICAgICAgInB1YmxpY0tleSI6IHsKICAgICAgICAgICAgICAgICJyYXdCeXRlcyI6ICJNRmt3RXdZSEtvWkl6ajBDQVFZSUtvWkl6ajBEQVFjRFFnQUUxT2xiM3pNQUZGeFhLSGlJa1FPNWNKM1lobDVpNlVQcCtJaHV0ZUJKYnVIY0E1VW9nS28wRVd0bFd3VzZLU2FLb1RORVlMN0psQ1FpVm5raEJrdFVnZz09IiwKICAgICAgICAgICAgICAgICJrZXlEZXRhaWxzIjogIlBLSVhfRUNEU0FfUDI1Nl9TSEFfMjU2IiwKICAgICAgICAgICAgICAgICJ2YWxpZEZvciI6IHsKICAgICAgICAgICAgICAgICAgICAic3RhcnQiOiAiMjAyMi0xMi0wMVQwMDowMDowMC4wMDBaIiwKICAgICAgICAgICAgICAgICAgICAiZW5kIjogIjIwMjUtMDEtMjlUMDA6MDA6MDAuMDAwWiIKICAgICAgICAgICAgICAgIH0KICAgICAgICAgICAgfQogICAgICAgIH0sCiAgICAgICAgewogICAgICAgICAgICAia2V5SWQiOiAiU0hBMjU2OkRoUTh3UjVBUEJ2RkhMRi8rVGMrQVl2UE9kVHBjSURxT2h4c0JIUndDN1UiLAogICAgICAgICAgICAia2V5VXNhZ2UiOiAibnBtOnNpZ25hdHVyZXMiLAogICAgICAgICAgICAicHVibGljS2V5IjogewogICAgICAgICAgICAgICAgInJhd0J5dGVzIjogIk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRVk2WWE3VysrN2FVUHp2TVRyZXpINlljeDNjK0hPS1lDY05HeWJKWlNDSnEvZmQ3UWE4dXVBS3RkSWtVUXRRaUVLRVJoQW1FNWxNTUpoUDhPa0RPYTJnPT0iLAogICAgICAgICAgICAgICAgImtleURldGFpbHMiOiAiUEtJWF9FQ0RTQV9QMjU2X1NIQV8yNTYiLAogICAgICAgICAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgICAgICAgICAgICJzdGFydCI6ICIyMDI1LTAxLTEzVDAwOjAwOjAwLjAwMFoiCiAgICAgICAgICAgICAgICB9CiAgICAgICAgICAgIH0KICAgICAgICB9LAogICAgICAgIHsKICAgICAgICAgICAgImtleUlkIjogIlNIQTI1NjpEaFE4d1I1QVBCdkZITEYvK1RjK0FZdlBPZFRwY0lEcU9oeHNCSFJ3QzdVIiwKICAgICAgICAgICAgImtleVVzYWdlIjogIm5wbTphdHRlc3RhdGlvbnMiLAogICAgICAgICAgICAicHVibGljS2V5IjogewogICAgICAgICAgICAgICAgInJhd0J5dGVzIjogIk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRVk2WWE3VysrN2FVUHp2TVRyZXpINlljeDNjK0hPS1lDY05HeWJKWlNDSnEvZmQ3UWE4dXVBS3RkSWtVUXRRaUVLRVJoQW1FNWxNTUpoUDhPa0RPYTJnPT0iLAogICAgICAgICAgICAgICAgImtleURldGFpbHMiOiAiUEtJWF9FQ0RTQV9QMjU2X1NIQV8yNTYiLAogICAgICAgICAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgICAgICAgICAgICJzdGFydCI6ICIyMDI1LTAxLTEzVDAwOjAwOjAwLjAwMFoiCiAgICAgICAgICAgICAgICB9CiAgICAgICAgICAgIH0KICAgICAgICB9CiAgICBdCn0K"}}}
diff --git a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/LICENSE b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/LICENSE
deleted file mode 100644
index e9e7c1679a09d..0000000000000
--- a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
-   APPENDIX: How to apply the Apache License to your work.
-
-      To apply the Apache License to your work, attach the following
-      boilerplate notice, with the fields enclosed by brackets "[]"
-      replaced with your own identifying information. (Don't include
-      the brackets!)  The text should be enclosed in the appropriate
-      comment syntax for the file format. We also recommend that a
-      file or class name and description of purpose be included on the
-      same "printed page" as the copyright notice for easier
-      identification within third-party archives.
-
-   Copyright 2023 The Sigstore Authors
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
diff --git a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
deleted file mode 100644
index 5c4f37bfaf3fb..0000000000000
--- a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
+++ /dev/null
@@ -1,59 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: envelope.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Signature = exports.Envelope = void 0;
-exports.Envelope = {
-    fromJSON(object) {
-        return {
-            payload: isSet(object.payload) ? Buffer.from(bytesFromBase64(object.payload)) : Buffer.alloc(0),
-            payloadType: isSet(object.payloadType) ? globalThis.String(object.payloadType) : "",
-            signatures: globalThis.Array.isArray(object?.signatures)
-                ? object.signatures.map((e) => exports.Signature.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.payload.length !== 0) {
-            obj.payload = base64FromBytes(message.payload);
-        }
-        if (message.payloadType !== "") {
-            obj.payloadType = message.payloadType;
-        }
-        if (message.signatures?.length) {
-            obj.signatures = message.signatures.map((e) => exports.Signature.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.Signature = {
-    fromJSON(object) {
-        return {
-            sig: isSet(object.sig) ? Buffer.from(bytesFromBase64(object.sig)) : Buffer.alloc(0),
-            keyid: isSet(object.keyid) ? globalThis.String(object.keyid) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.sig.length !== 0) {
-            obj.sig = base64FromBytes(message.sig);
-        }
-        if (message.keyid !== "") {
-            obj.keyid = message.keyid;
-        }
-        return obj;
-    },
-};
-function bytesFromBase64(b64) {
-    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
-}
-function base64FromBytes(arr) {
-    return globalThis.Buffer.from(arr).toString("base64");
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
deleted file mode 100644
index 6138fef5672fc..0000000000000
--- a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
+++ /dev/null
@@ -1,174 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: events.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.CloudEventBatch = exports.CloudEvent_CloudEventAttributeValue = exports.CloudEvent_AttributesEntry = exports.CloudEvent = void 0;
-/* eslint-disable */
-const any_1 = require("./google/protobuf/any");
-const timestamp_1 = require("./google/protobuf/timestamp");
-exports.CloudEvent = {
-    fromJSON(object) {
-        return {
-            id: isSet(object.id) ? globalThis.String(object.id) : "",
-            source: isSet(object.source) ? globalThis.String(object.source) : "",
-            specVersion: isSet(object.specVersion) ? globalThis.String(object.specVersion) : "",
-            type: isSet(object.type) ? globalThis.String(object.type) : "",
-            attributes: isObject(object.attributes)
-                ? Object.entries(object.attributes).reduce((acc, [key, value]) => {
-                    acc[key] = exports.CloudEvent_CloudEventAttributeValue.fromJSON(value);
-                    return acc;
-                }, {})
-                : {},
-            data: isSet(object.binaryData)
-                ? { $case: "binaryData", binaryData: Buffer.from(bytesFromBase64(object.binaryData)) }
-                : isSet(object.textData)
-                    ? { $case: "textData", textData: globalThis.String(object.textData) }
-                    : isSet(object.protoData)
-                        ? { $case: "protoData", protoData: any_1.Any.fromJSON(object.protoData) }
-                        : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.id !== "") {
-            obj.id = message.id;
-        }
-        if (message.source !== "") {
-            obj.source = message.source;
-        }
-        if (message.specVersion !== "") {
-            obj.specVersion = message.specVersion;
-        }
-        if (message.type !== "") {
-            obj.type = message.type;
-        }
-        if (message.attributes) {
-            const entries = Object.entries(message.attributes);
-            if (entries.length > 0) {
-                obj.attributes = {};
-                entries.forEach(([k, v]) => {
-                    obj.attributes[k] = exports.CloudEvent_CloudEventAttributeValue.toJSON(v);
-                });
-            }
-        }
-        if (message.data?.$case === "binaryData") {
-            obj.binaryData = base64FromBytes(message.data.binaryData);
-        }
-        else if (message.data?.$case === "textData") {
-            obj.textData = message.data.textData;
-        }
-        else if (message.data?.$case === "protoData") {
-            obj.protoData = any_1.Any.toJSON(message.data.protoData);
-        }
-        return obj;
-    },
-};
-exports.CloudEvent_AttributesEntry = {
-    fromJSON(object) {
-        return {
-            key: isSet(object.key) ? globalThis.String(object.key) : "",
-            value: isSet(object.value) ? exports.CloudEvent_CloudEventAttributeValue.fromJSON(object.value) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.key !== "") {
-            obj.key = message.key;
-        }
-        if (message.value !== undefined) {
-            obj.value = exports.CloudEvent_CloudEventAttributeValue.toJSON(message.value);
-        }
-        return obj;
-    },
-};
-exports.CloudEvent_CloudEventAttributeValue = {
-    fromJSON(object) {
-        return {
-            attr: isSet(object.ceBoolean)
-                ? { $case: "ceBoolean", ceBoolean: globalThis.Boolean(object.ceBoolean) }
-                : isSet(object.ceInteger)
-                    ? { $case: "ceInteger", ceInteger: globalThis.Number(object.ceInteger) }
-                    : isSet(object.ceString)
-                        ? { $case: "ceString", ceString: globalThis.String(object.ceString) }
-                        : isSet(object.ceBytes)
-                            ? { $case: "ceBytes", ceBytes: Buffer.from(bytesFromBase64(object.ceBytes)) }
-                            : isSet(object.ceUri)
-                                ? { $case: "ceUri", ceUri: globalThis.String(object.ceUri) }
-                                : isSet(object.ceUriRef)
-                                    ? { $case: "ceUriRef", ceUriRef: globalThis.String(object.ceUriRef) }
-                                    : isSet(object.ceTimestamp)
-                                        ? { $case: "ceTimestamp", ceTimestamp: fromJsonTimestamp(object.ceTimestamp) }
-                                        : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.attr?.$case === "ceBoolean") {
-            obj.ceBoolean = message.attr.ceBoolean;
-        }
-        else if (message.attr?.$case === "ceInteger") {
-            obj.ceInteger = Math.round(message.attr.ceInteger);
-        }
-        else if (message.attr?.$case === "ceString") {
-            obj.ceString = message.attr.ceString;
-        }
-        else if (message.attr?.$case === "ceBytes") {
-            obj.ceBytes = base64FromBytes(message.attr.ceBytes);
-        }
-        else if (message.attr?.$case === "ceUri") {
-            obj.ceUri = message.attr.ceUri;
-        }
-        else if (message.attr?.$case === "ceUriRef") {
-            obj.ceUriRef = message.attr.ceUriRef;
-        }
-        else if (message.attr?.$case === "ceTimestamp") {
-            obj.ceTimestamp = message.attr.ceTimestamp.toISOString();
-        }
-        return obj;
-    },
-};
-exports.CloudEventBatch = {
-    fromJSON(object) {
-        return {
-            events: globalThis.Array.isArray(object?.events) ? object.events.map((e) => exports.CloudEvent.fromJSON(e)) : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.events?.length) {
-            obj.events = message.events.map((e) => exports.CloudEvent.toJSON(e));
-        }
-        return obj;
-    },
-};
-function bytesFromBase64(b64) {
-    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
-}
-function base64FromBytes(arr) {
-    return globalThis.Buffer.from(arr).toString("base64");
-}
-function fromTimestamp(t) {
-    let millis = (globalThis.Number(t.seconds) || 0) * 1_000;
-    millis += (t.nanos || 0) / 1_000_000;
-    return new globalThis.Date(millis);
-}
-function fromJsonTimestamp(o) {
-    if (o instanceof globalThis.Date) {
-        return o;
-    }
-    else if (typeof o === "string") {
-        return new globalThis.Date(o);
-    }
-    else {
-        return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
-    }
-}
-function isObject(value) {
-    return typeof value === "object" && value !== null;
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
deleted file mode 100644
index b4d9ccc781c2f..0000000000000
--- a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
+++ /dev/null
@@ -1,141 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: google/api/field_behavior.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.FieldBehavior = void 0;
-exports.fieldBehaviorFromJSON = fieldBehaviorFromJSON;
-exports.fieldBehaviorToJSON = fieldBehaviorToJSON;
-/* eslint-disable */
-/**
- * An indicator of the behavior of a given field (for example, that a field
- * is required in requests, or given as output but ignored as input).
- * This **does not** change the behavior in protocol buffers itself; it only
- * denotes the behavior and may affect how API tooling handles the field.
- *
- * Note: This enum **may** receive new values in the future.
- */
-var FieldBehavior;
-(function (FieldBehavior) {
-    /** FIELD_BEHAVIOR_UNSPECIFIED - Conventional default for enums. Do not use this. */
-    FieldBehavior[FieldBehavior["FIELD_BEHAVIOR_UNSPECIFIED"] = 0] = "FIELD_BEHAVIOR_UNSPECIFIED";
-    /**
-     * OPTIONAL - Specifically denotes a field as optional.
-     * While all fields in protocol buffers are optional, this may be specified
-     * for emphasis if appropriate.
-     */
-    FieldBehavior[FieldBehavior["OPTIONAL"] = 1] = "OPTIONAL";
-    /**
-     * REQUIRED - Denotes a field as required.
-     * This indicates that the field **must** be provided as part of the request,
-     * and failure to do so will cause an error (usually `INVALID_ARGUMENT`).
-     */
-    FieldBehavior[FieldBehavior["REQUIRED"] = 2] = "REQUIRED";
-    /**
-     * OUTPUT_ONLY - Denotes a field as output only.
-     * This indicates that the field is provided in responses, but including the
-     * field in a request does nothing (the server *must* ignore it and
-     * *must not* throw an error as a result of the field's presence).
-     */
-    FieldBehavior[FieldBehavior["OUTPUT_ONLY"] = 3] = "OUTPUT_ONLY";
-    /**
-     * INPUT_ONLY - Denotes a field as input only.
-     * This indicates that the field is provided in requests, and the
-     * corresponding field is not included in output.
-     */
-    FieldBehavior[FieldBehavior["INPUT_ONLY"] = 4] = "INPUT_ONLY";
-    /**
-     * IMMUTABLE - Denotes a field as immutable.
-     * This indicates that the field may be set once in a request to create a
-     * resource, but may not be changed thereafter.
-     */
-    FieldBehavior[FieldBehavior["IMMUTABLE"] = 5] = "IMMUTABLE";
-    /**
-     * UNORDERED_LIST - Denotes that a (repeated) field is an unordered list.
-     * This indicates that the service may provide the elements of the list
-     * in any arbitrary  order, rather than the order the user originally
-     * provided. Additionally, the list's order may or may not be stable.
-     */
-    FieldBehavior[FieldBehavior["UNORDERED_LIST"] = 6] = "UNORDERED_LIST";
-    /**
-     * NON_EMPTY_DEFAULT - Denotes that this field returns a non-empty default value if not set.
-     * This indicates that if the user provides the empty value in a request,
-     * a non-empty value will be returned. The user will not be aware of what
-     * non-empty value to expect.
-     */
-    FieldBehavior[FieldBehavior["NON_EMPTY_DEFAULT"] = 7] = "NON_EMPTY_DEFAULT";
-    /**
-     * IDENTIFIER - Denotes that the field in a resource (a message annotated with
-     * google.api.resource) is used in the resource name to uniquely identify the
-     * resource. For AIP-compliant APIs, this should only be applied to the
-     * `name` field on the resource.
-     *
-     * This behavior should not be applied to references to other resources within
-     * the message.
-     *
-     * The identifier field of resources often have different field behavior
-     * depending on the request it is embedded in (e.g. for Create methods name
-     * is optional and unused, while for Update methods it is required). Instead
-     * of method-specific annotations, only `IDENTIFIER` is required.
-     */
-    FieldBehavior[FieldBehavior["IDENTIFIER"] = 8] = "IDENTIFIER";
-})(FieldBehavior || (exports.FieldBehavior = FieldBehavior = {}));
-function fieldBehaviorFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "FIELD_BEHAVIOR_UNSPECIFIED":
-            return FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED;
-        case 1:
-        case "OPTIONAL":
-            return FieldBehavior.OPTIONAL;
-        case 2:
-        case "REQUIRED":
-            return FieldBehavior.REQUIRED;
-        case 3:
-        case "OUTPUT_ONLY":
-            return FieldBehavior.OUTPUT_ONLY;
-        case 4:
-        case "INPUT_ONLY":
-            return FieldBehavior.INPUT_ONLY;
-        case 5:
-        case "IMMUTABLE":
-            return FieldBehavior.IMMUTABLE;
-        case 6:
-        case "UNORDERED_LIST":
-            return FieldBehavior.UNORDERED_LIST;
-        case 7:
-        case "NON_EMPTY_DEFAULT":
-            return FieldBehavior.NON_EMPTY_DEFAULT;
-        case 8:
-        case "IDENTIFIER":
-            return FieldBehavior.IDENTIFIER;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
-    }
-}
-function fieldBehaviorToJSON(object) {
-    switch (object) {
-        case FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED:
-            return "FIELD_BEHAVIOR_UNSPECIFIED";
-        case FieldBehavior.OPTIONAL:
-            return "OPTIONAL";
-        case FieldBehavior.REQUIRED:
-            return "REQUIRED";
-        case FieldBehavior.OUTPUT_ONLY:
-            return "OUTPUT_ONLY";
-        case FieldBehavior.INPUT_ONLY:
-            return "INPUT_ONLY";
-        case FieldBehavior.IMMUTABLE:
-            return "IMMUTABLE";
-        case FieldBehavior.UNORDERED_LIST:
-            return "UNORDERED_LIST";
-        case FieldBehavior.NON_EMPTY_DEFAULT:
-            return "NON_EMPTY_DEFAULT";
-        case FieldBehavior.IDENTIFIER:
-            return "IDENTIFIER";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
-    }
-}
diff --git a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
deleted file mode 100644
index f0c8aab773e4c..0000000000000
--- a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
+++ /dev/null
@@ -1,35 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: google/protobuf/any.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Any = void 0;
-exports.Any = {
-    fromJSON(object) {
-        return {
-            typeUrl: isSet(object.typeUrl) ? globalThis.String(object.typeUrl) : "",
-            value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.typeUrl !== "") {
-            obj.typeUrl = message.typeUrl;
-        }
-        if (message.value.length !== 0) {
-            obj.value = base64FromBytes(message.value);
-        }
-        return obj;
-    },
-};
-function bytesFromBase64(b64) {
-    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
-}
-function base64FromBytes(arr) {
-    return globalThis.Buffer.from(arr).toString("base64");
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
deleted file mode 100644
index d6f8ddddf799d..0000000000000
--- a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
+++ /dev/null
@@ -1,2042 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: google/protobuf/descriptor.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.GeneratedCodeInfo = exports.SourceCodeInfo_Location = exports.SourceCodeInfo = exports.FeatureSetDefaults_FeatureSetEditionDefault = exports.FeatureSetDefaults = exports.FeatureSet = exports.UninterpretedOption_NamePart = exports.UninterpretedOption = exports.MethodOptions = exports.ServiceOptions = exports.EnumValueOptions = exports.EnumOptions = exports.OneofOptions = exports.FieldOptions_FeatureSupport = exports.FieldOptions_EditionDefault = exports.FieldOptions = exports.MessageOptions = exports.FileOptions = exports.MethodDescriptorProto = exports.ServiceDescriptorProto = exports.EnumValueDescriptorProto = exports.EnumDescriptorProto_EnumReservedRange = exports.EnumDescriptorProto = exports.OneofDescriptorProto = exports.FieldDescriptorProto = exports.ExtensionRangeOptions_Declaration = exports.ExtensionRangeOptions = exports.DescriptorProto_ReservedRange = exports.DescriptorProto_ExtensionRange = exports.DescriptorProto = exports.FileDescriptorProto = exports.FileDescriptorSet = exports.GeneratedCodeInfo_Annotation_Semantic = exports.FeatureSet_EnforceNamingStyle = exports.FeatureSet_JsonFormat = exports.FeatureSet_MessageEncoding = exports.FeatureSet_Utf8Validation = exports.FeatureSet_RepeatedFieldEncoding = exports.FeatureSet_EnumType = exports.FeatureSet_FieldPresence = exports.MethodOptions_IdempotencyLevel = exports.FieldOptions_OptionTargetType = exports.FieldOptions_OptionRetention = exports.FieldOptions_JSType = exports.FieldOptions_CType = exports.FileOptions_OptimizeMode = exports.FieldDescriptorProto_Label = exports.FieldDescriptorProto_Type = exports.ExtensionRangeOptions_VerificationState = exports.Edition = void 0;
-exports.GeneratedCodeInfo_Annotation = void 0;
-exports.editionFromJSON = editionFromJSON;
-exports.editionToJSON = editionToJSON;
-exports.extensionRangeOptions_VerificationStateFromJSON = extensionRangeOptions_VerificationStateFromJSON;
-exports.extensionRangeOptions_VerificationStateToJSON = extensionRangeOptions_VerificationStateToJSON;
-exports.fieldDescriptorProto_TypeFromJSON = fieldDescriptorProto_TypeFromJSON;
-exports.fieldDescriptorProto_TypeToJSON = fieldDescriptorProto_TypeToJSON;
-exports.fieldDescriptorProto_LabelFromJSON = fieldDescriptorProto_LabelFromJSON;
-exports.fieldDescriptorProto_LabelToJSON = fieldDescriptorProto_LabelToJSON;
-exports.fileOptions_OptimizeModeFromJSON = fileOptions_OptimizeModeFromJSON;
-exports.fileOptions_OptimizeModeToJSON = fileOptions_OptimizeModeToJSON;
-exports.fieldOptions_CTypeFromJSON = fieldOptions_CTypeFromJSON;
-exports.fieldOptions_CTypeToJSON = fieldOptions_CTypeToJSON;
-exports.fieldOptions_JSTypeFromJSON = fieldOptions_JSTypeFromJSON;
-exports.fieldOptions_JSTypeToJSON = fieldOptions_JSTypeToJSON;
-exports.fieldOptions_OptionRetentionFromJSON = fieldOptions_OptionRetentionFromJSON;
-exports.fieldOptions_OptionRetentionToJSON = fieldOptions_OptionRetentionToJSON;
-exports.fieldOptions_OptionTargetTypeFromJSON = fieldOptions_OptionTargetTypeFromJSON;
-exports.fieldOptions_OptionTargetTypeToJSON = fieldOptions_OptionTargetTypeToJSON;
-exports.methodOptions_IdempotencyLevelFromJSON = methodOptions_IdempotencyLevelFromJSON;
-exports.methodOptions_IdempotencyLevelToJSON = methodOptions_IdempotencyLevelToJSON;
-exports.featureSet_FieldPresenceFromJSON = featureSet_FieldPresenceFromJSON;
-exports.featureSet_FieldPresenceToJSON = featureSet_FieldPresenceToJSON;
-exports.featureSet_EnumTypeFromJSON = featureSet_EnumTypeFromJSON;
-exports.featureSet_EnumTypeToJSON = featureSet_EnumTypeToJSON;
-exports.featureSet_RepeatedFieldEncodingFromJSON = featureSet_RepeatedFieldEncodingFromJSON;
-exports.featureSet_RepeatedFieldEncodingToJSON = featureSet_RepeatedFieldEncodingToJSON;
-exports.featureSet_Utf8ValidationFromJSON = featureSet_Utf8ValidationFromJSON;
-exports.featureSet_Utf8ValidationToJSON = featureSet_Utf8ValidationToJSON;
-exports.featureSet_MessageEncodingFromJSON = featureSet_MessageEncodingFromJSON;
-exports.featureSet_MessageEncodingToJSON = featureSet_MessageEncodingToJSON;
-exports.featureSet_JsonFormatFromJSON = featureSet_JsonFormatFromJSON;
-exports.featureSet_JsonFormatToJSON = featureSet_JsonFormatToJSON;
-exports.featureSet_EnforceNamingStyleFromJSON = featureSet_EnforceNamingStyleFromJSON;
-exports.featureSet_EnforceNamingStyleToJSON = featureSet_EnforceNamingStyleToJSON;
-exports.generatedCodeInfo_Annotation_SemanticFromJSON = generatedCodeInfo_Annotation_SemanticFromJSON;
-exports.generatedCodeInfo_Annotation_SemanticToJSON = generatedCodeInfo_Annotation_SemanticToJSON;
-/* eslint-disable */
-/** The full set of known editions. */
-var Edition;
-(function (Edition) {
-    /** EDITION_UNKNOWN - A placeholder for an unknown edition value. */
-    Edition[Edition["EDITION_UNKNOWN"] = 0] = "EDITION_UNKNOWN";
-    /**
-     * EDITION_LEGACY - A placeholder edition for specifying default behaviors *before* a feature
-     * was first introduced.  This is effectively an "infinite past".
-     */
-    Edition[Edition["EDITION_LEGACY"] = 900] = "EDITION_LEGACY";
-    /**
-     * EDITION_PROTO2 - Legacy syntax "editions".  These pre-date editions, but behave much like
-     * distinct editions.  These can't be used to specify the edition of proto
-     * files, but feature definitions must supply proto2/proto3 defaults for
-     * backwards compatibility.
-     */
-    Edition[Edition["EDITION_PROTO2"] = 998] = "EDITION_PROTO2";
-    Edition[Edition["EDITION_PROTO3"] = 999] = "EDITION_PROTO3";
-    /**
-     * EDITION_2023 - Editions that have been released.  The specific values are arbitrary and
-     * should not be depended on, but they will always be time-ordered for easy
-     * comparison.
-     */
-    Edition[Edition["EDITION_2023"] = 1000] = "EDITION_2023";
-    Edition[Edition["EDITION_2024"] = 1001] = "EDITION_2024";
-    /**
-     * EDITION_1_TEST_ONLY - Placeholder editions for testing feature resolution.  These should not be
-     * used or relied on outside of tests.
-     */
-    Edition[Edition["EDITION_1_TEST_ONLY"] = 1] = "EDITION_1_TEST_ONLY";
-    Edition[Edition["EDITION_2_TEST_ONLY"] = 2] = "EDITION_2_TEST_ONLY";
-    Edition[Edition["EDITION_99997_TEST_ONLY"] = 99997] = "EDITION_99997_TEST_ONLY";
-    Edition[Edition["EDITION_99998_TEST_ONLY"] = 99998] = "EDITION_99998_TEST_ONLY";
-    Edition[Edition["EDITION_99999_TEST_ONLY"] = 99999] = "EDITION_99999_TEST_ONLY";
-    /**
-     * EDITION_MAX - Placeholder for specifying unbounded edition support.  This should only
-     * ever be used by plugins that can expect to never require any changes to
-     * support a new edition.
-     */
-    Edition[Edition["EDITION_MAX"] = 2147483647] = "EDITION_MAX";
-})(Edition || (exports.Edition = Edition = {}));
-function editionFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "EDITION_UNKNOWN":
-            return Edition.EDITION_UNKNOWN;
-        case 900:
-        case "EDITION_LEGACY":
-            return Edition.EDITION_LEGACY;
-        case 998:
-        case "EDITION_PROTO2":
-            return Edition.EDITION_PROTO2;
-        case 999:
-        case "EDITION_PROTO3":
-            return Edition.EDITION_PROTO3;
-        case 1000:
-        case "EDITION_2023":
-            return Edition.EDITION_2023;
-        case 1001:
-        case "EDITION_2024":
-            return Edition.EDITION_2024;
-        case 1:
-        case "EDITION_1_TEST_ONLY":
-            return Edition.EDITION_1_TEST_ONLY;
-        case 2:
-        case "EDITION_2_TEST_ONLY":
-            return Edition.EDITION_2_TEST_ONLY;
-        case 99997:
-        case "EDITION_99997_TEST_ONLY":
-            return Edition.EDITION_99997_TEST_ONLY;
-        case 99998:
-        case "EDITION_99998_TEST_ONLY":
-            return Edition.EDITION_99998_TEST_ONLY;
-        case 99999:
-        case "EDITION_99999_TEST_ONLY":
-            return Edition.EDITION_99999_TEST_ONLY;
-        case 2147483647:
-        case "EDITION_MAX":
-            return Edition.EDITION_MAX;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum Edition");
-    }
-}
-function editionToJSON(object) {
-    switch (object) {
-        case Edition.EDITION_UNKNOWN:
-            return "EDITION_UNKNOWN";
-        case Edition.EDITION_LEGACY:
-            return "EDITION_LEGACY";
-        case Edition.EDITION_PROTO2:
-            return "EDITION_PROTO2";
-        case Edition.EDITION_PROTO3:
-            return "EDITION_PROTO3";
-        case Edition.EDITION_2023:
-            return "EDITION_2023";
-        case Edition.EDITION_2024:
-            return "EDITION_2024";
-        case Edition.EDITION_1_TEST_ONLY:
-            return "EDITION_1_TEST_ONLY";
-        case Edition.EDITION_2_TEST_ONLY:
-            return "EDITION_2_TEST_ONLY";
-        case Edition.EDITION_99997_TEST_ONLY:
-            return "EDITION_99997_TEST_ONLY";
-        case Edition.EDITION_99998_TEST_ONLY:
-            return "EDITION_99998_TEST_ONLY";
-        case Edition.EDITION_99999_TEST_ONLY:
-            return "EDITION_99999_TEST_ONLY";
-        case Edition.EDITION_MAX:
-            return "EDITION_MAX";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum Edition");
-    }
-}
-/** The verification state of the extension range. */
-var ExtensionRangeOptions_VerificationState;
-(function (ExtensionRangeOptions_VerificationState) {
-    /** DECLARATION - All the extensions of the range must be declared. */
-    ExtensionRangeOptions_VerificationState[ExtensionRangeOptions_VerificationState["DECLARATION"] = 0] = "DECLARATION";
-    ExtensionRangeOptions_VerificationState[ExtensionRangeOptions_VerificationState["UNVERIFIED"] = 1] = "UNVERIFIED";
-})(ExtensionRangeOptions_VerificationState || (exports.ExtensionRangeOptions_VerificationState = ExtensionRangeOptions_VerificationState = {}));
-function extensionRangeOptions_VerificationStateFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "DECLARATION":
-            return ExtensionRangeOptions_VerificationState.DECLARATION;
-        case 1:
-        case "UNVERIFIED":
-            return ExtensionRangeOptions_VerificationState.UNVERIFIED;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum ExtensionRangeOptions_VerificationState");
-    }
-}
-function extensionRangeOptions_VerificationStateToJSON(object) {
-    switch (object) {
-        case ExtensionRangeOptions_VerificationState.DECLARATION:
-            return "DECLARATION";
-        case ExtensionRangeOptions_VerificationState.UNVERIFIED:
-            return "UNVERIFIED";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum ExtensionRangeOptions_VerificationState");
-    }
-}
-var FieldDescriptorProto_Type;
-(function (FieldDescriptorProto_Type) {
-    /**
-     * TYPE_DOUBLE - 0 is reserved for errors.
-     * Order is weird for historical reasons.
-     */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_DOUBLE"] = 1] = "TYPE_DOUBLE";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FLOAT"] = 2] = "TYPE_FLOAT";
-    /**
-     * TYPE_INT64 - Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT64 if
-     * negative values are likely.
-     */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT64"] = 3] = "TYPE_INT64";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT64"] = 4] = "TYPE_UINT64";
-    /**
-     * TYPE_INT32 - Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT32 if
-     * negative values are likely.
-     */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT32"] = 5] = "TYPE_INT32";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED64"] = 6] = "TYPE_FIXED64";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED32"] = 7] = "TYPE_FIXED32";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BOOL"] = 8] = "TYPE_BOOL";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_STRING"] = 9] = "TYPE_STRING";
-    /**
-     * TYPE_GROUP - Tag-delimited aggregate.
-     * Group type is deprecated and not supported after google.protobuf. However, Proto3
-     * implementations should still be able to parse the group wire format and
-     * treat group fields as unknown fields.  In Editions, the group wire format
-     * can be enabled via the `message_encoding` feature.
-     */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_GROUP"] = 10] = "TYPE_GROUP";
-    /** TYPE_MESSAGE - Length-delimited aggregate. */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_MESSAGE"] = 11] = "TYPE_MESSAGE";
-    /** TYPE_BYTES - New in version 2. */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BYTES"] = 12] = "TYPE_BYTES";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT32"] = 13] = "TYPE_UINT32";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_ENUM"] = 14] = "TYPE_ENUM";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED32"] = 15] = "TYPE_SFIXED32";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED64"] = 16] = "TYPE_SFIXED64";
-    /** TYPE_SINT32 - Uses ZigZag encoding. */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT32"] = 17] = "TYPE_SINT32";
-    /** TYPE_SINT64 - Uses ZigZag encoding. */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT64"] = 18] = "TYPE_SINT64";
-})(FieldDescriptorProto_Type || (exports.FieldDescriptorProto_Type = FieldDescriptorProto_Type = {}));
-function fieldDescriptorProto_TypeFromJSON(object) {
-    switch (object) {
-        case 1:
-        case "TYPE_DOUBLE":
-            return FieldDescriptorProto_Type.TYPE_DOUBLE;
-        case 2:
-        case "TYPE_FLOAT":
-            return FieldDescriptorProto_Type.TYPE_FLOAT;
-        case 3:
-        case "TYPE_INT64":
-            return FieldDescriptorProto_Type.TYPE_INT64;
-        case 4:
-        case "TYPE_UINT64":
-            return FieldDescriptorProto_Type.TYPE_UINT64;
-        case 5:
-        case "TYPE_INT32":
-            return FieldDescriptorProto_Type.TYPE_INT32;
-        case 6:
-        case "TYPE_FIXED64":
-            return FieldDescriptorProto_Type.TYPE_FIXED64;
-        case 7:
-        case "TYPE_FIXED32":
-            return FieldDescriptorProto_Type.TYPE_FIXED32;
-        case 8:
-        case "TYPE_BOOL":
-            return FieldDescriptorProto_Type.TYPE_BOOL;
-        case 9:
-        case "TYPE_STRING":
-            return FieldDescriptorProto_Type.TYPE_STRING;
-        case 10:
-        case "TYPE_GROUP":
-            return FieldDescriptorProto_Type.TYPE_GROUP;
-        case 11:
-        case "TYPE_MESSAGE":
-            return FieldDescriptorProto_Type.TYPE_MESSAGE;
-        case 12:
-        case "TYPE_BYTES":
-            return FieldDescriptorProto_Type.TYPE_BYTES;
-        case 13:
-        case "TYPE_UINT32":
-            return FieldDescriptorProto_Type.TYPE_UINT32;
-        case 14:
-        case "TYPE_ENUM":
-            return FieldDescriptorProto_Type.TYPE_ENUM;
-        case 15:
-        case "TYPE_SFIXED32":
-            return FieldDescriptorProto_Type.TYPE_SFIXED32;
-        case 16:
-        case "TYPE_SFIXED64":
-            return FieldDescriptorProto_Type.TYPE_SFIXED64;
-        case 17:
-        case "TYPE_SINT32":
-            return FieldDescriptorProto_Type.TYPE_SINT32;
-        case 18:
-        case "TYPE_SINT64":
-            return FieldDescriptorProto_Type.TYPE_SINT64;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
-    }
-}
-function fieldDescriptorProto_TypeToJSON(object) {
-    switch (object) {
-        case FieldDescriptorProto_Type.TYPE_DOUBLE:
-            return "TYPE_DOUBLE";
-        case FieldDescriptorProto_Type.TYPE_FLOAT:
-            return "TYPE_FLOAT";
-        case FieldDescriptorProto_Type.TYPE_INT64:
-            return "TYPE_INT64";
-        case FieldDescriptorProto_Type.TYPE_UINT64:
-            return "TYPE_UINT64";
-        case FieldDescriptorProto_Type.TYPE_INT32:
-            return "TYPE_INT32";
-        case FieldDescriptorProto_Type.TYPE_FIXED64:
-            return "TYPE_FIXED64";
-        case FieldDescriptorProto_Type.TYPE_FIXED32:
-            return "TYPE_FIXED32";
-        case FieldDescriptorProto_Type.TYPE_BOOL:
-            return "TYPE_BOOL";
-        case FieldDescriptorProto_Type.TYPE_STRING:
-            return "TYPE_STRING";
-        case FieldDescriptorProto_Type.TYPE_GROUP:
-            return "TYPE_GROUP";
-        case FieldDescriptorProto_Type.TYPE_MESSAGE:
-            return "TYPE_MESSAGE";
-        case FieldDescriptorProto_Type.TYPE_BYTES:
-            return "TYPE_BYTES";
-        case FieldDescriptorProto_Type.TYPE_UINT32:
-            return "TYPE_UINT32";
-        case FieldDescriptorProto_Type.TYPE_ENUM:
-            return "TYPE_ENUM";
-        case FieldDescriptorProto_Type.TYPE_SFIXED32:
-            return "TYPE_SFIXED32";
-        case FieldDescriptorProto_Type.TYPE_SFIXED64:
-            return "TYPE_SFIXED64";
-        case FieldDescriptorProto_Type.TYPE_SINT32:
-            return "TYPE_SINT32";
-        case FieldDescriptorProto_Type.TYPE_SINT64:
-            return "TYPE_SINT64";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
-    }
-}
-var FieldDescriptorProto_Label;
-(function (FieldDescriptorProto_Label) {
-    /** LABEL_OPTIONAL - 0 is reserved for errors */
-    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_OPTIONAL"] = 1] = "LABEL_OPTIONAL";
-    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REPEATED"] = 3] = "LABEL_REPEATED";
-    /**
-     * LABEL_REQUIRED - The required label is only allowed in google.protobuf.  In proto3 and Editions
-     * it's explicitly prohibited.  In Editions, the `field_presence` feature
-     * can be used to get this behavior.
-     */
-    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REQUIRED"] = 2] = "LABEL_REQUIRED";
-})(FieldDescriptorProto_Label || (exports.FieldDescriptorProto_Label = FieldDescriptorProto_Label = {}));
-function fieldDescriptorProto_LabelFromJSON(object) {
-    switch (object) {
-        case 1:
-        case "LABEL_OPTIONAL":
-            return FieldDescriptorProto_Label.LABEL_OPTIONAL;
-        case 3:
-        case "LABEL_REPEATED":
-            return FieldDescriptorProto_Label.LABEL_REPEATED;
-        case 2:
-        case "LABEL_REQUIRED":
-            return FieldDescriptorProto_Label.LABEL_REQUIRED;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
-    }
-}
-function fieldDescriptorProto_LabelToJSON(object) {
-    switch (object) {
-        case FieldDescriptorProto_Label.LABEL_OPTIONAL:
-            return "LABEL_OPTIONAL";
-        case FieldDescriptorProto_Label.LABEL_REPEATED:
-            return "LABEL_REPEATED";
-        case FieldDescriptorProto_Label.LABEL_REQUIRED:
-            return "LABEL_REQUIRED";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
-    }
-}
-/** Generated classes can be optimized for speed or code size. */
-var FileOptions_OptimizeMode;
-(function (FileOptions_OptimizeMode) {
-    /** SPEED - Generate complete code for parsing, serialization, */
-    FileOptions_OptimizeMode[FileOptions_OptimizeMode["SPEED"] = 1] = "SPEED";
-    /** CODE_SIZE - etc. */
-    FileOptions_OptimizeMode[FileOptions_OptimizeMode["CODE_SIZE"] = 2] = "CODE_SIZE";
-    /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */
-    FileOptions_OptimizeMode[FileOptions_OptimizeMode["LITE_RUNTIME"] = 3] = "LITE_RUNTIME";
-})(FileOptions_OptimizeMode || (exports.FileOptions_OptimizeMode = FileOptions_OptimizeMode = {}));
-function fileOptions_OptimizeModeFromJSON(object) {
-    switch (object) {
-        case 1:
-        case "SPEED":
-            return FileOptions_OptimizeMode.SPEED;
-        case 2:
-        case "CODE_SIZE":
-            return FileOptions_OptimizeMode.CODE_SIZE;
-        case 3:
-        case "LITE_RUNTIME":
-            return FileOptions_OptimizeMode.LITE_RUNTIME;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
-    }
-}
-function fileOptions_OptimizeModeToJSON(object) {
-    switch (object) {
-        case FileOptions_OptimizeMode.SPEED:
-            return "SPEED";
-        case FileOptions_OptimizeMode.CODE_SIZE:
-            return "CODE_SIZE";
-        case FileOptions_OptimizeMode.LITE_RUNTIME:
-            return "LITE_RUNTIME";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
-    }
-}
-var FieldOptions_CType;
-(function (FieldOptions_CType) {
-    /** STRING - Default mode. */
-    FieldOptions_CType[FieldOptions_CType["STRING"] = 0] = "STRING";
-    /**
-     * CORD - The option [ctype=CORD] may be applied to a non-repeated field of type
-     * "bytes". It indicates that in C++, the data should be stored in a Cord
-     * instead of a string.  For very large strings, this may reduce memory
-     * fragmentation. It may also allow better performance when parsing from a
-     * Cord, or when parsing with aliasing enabled, as the parsed Cord may then
-     * alias the original buffer.
-     */
-    FieldOptions_CType[FieldOptions_CType["CORD"] = 1] = "CORD";
-    FieldOptions_CType[FieldOptions_CType["STRING_PIECE"] = 2] = "STRING_PIECE";
-})(FieldOptions_CType || (exports.FieldOptions_CType = FieldOptions_CType = {}));
-function fieldOptions_CTypeFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "STRING":
-            return FieldOptions_CType.STRING;
-        case 1:
-        case "CORD":
-            return FieldOptions_CType.CORD;
-        case 2:
-        case "STRING_PIECE":
-            return FieldOptions_CType.STRING_PIECE;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
-    }
-}
-function fieldOptions_CTypeToJSON(object) {
-    switch (object) {
-        case FieldOptions_CType.STRING:
-            return "STRING";
-        case FieldOptions_CType.CORD:
-            return "CORD";
-        case FieldOptions_CType.STRING_PIECE:
-            return "STRING_PIECE";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
-    }
-}
-var FieldOptions_JSType;
-(function (FieldOptions_JSType) {
-    /** JS_NORMAL - Use the default type. */
-    FieldOptions_JSType[FieldOptions_JSType["JS_NORMAL"] = 0] = "JS_NORMAL";
-    /** JS_STRING - Use JavaScript strings. */
-    FieldOptions_JSType[FieldOptions_JSType["JS_STRING"] = 1] = "JS_STRING";
-    /** JS_NUMBER - Use JavaScript numbers. */
-    FieldOptions_JSType[FieldOptions_JSType["JS_NUMBER"] = 2] = "JS_NUMBER";
-})(FieldOptions_JSType || (exports.FieldOptions_JSType = FieldOptions_JSType = {}));
-function fieldOptions_JSTypeFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "JS_NORMAL":
-            return FieldOptions_JSType.JS_NORMAL;
-        case 1:
-        case "JS_STRING":
-            return FieldOptions_JSType.JS_STRING;
-        case 2:
-        case "JS_NUMBER":
-            return FieldOptions_JSType.JS_NUMBER;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
-    }
-}
-function fieldOptions_JSTypeToJSON(object) {
-    switch (object) {
-        case FieldOptions_JSType.JS_NORMAL:
-            return "JS_NORMAL";
-        case FieldOptions_JSType.JS_STRING:
-            return "JS_STRING";
-        case FieldOptions_JSType.JS_NUMBER:
-            return "JS_NUMBER";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
-    }
-}
-/** If set to RETENTION_SOURCE, the option will be omitted from the binary. */
-var FieldOptions_OptionRetention;
-(function (FieldOptions_OptionRetention) {
-    FieldOptions_OptionRetention[FieldOptions_OptionRetention["RETENTION_UNKNOWN"] = 0] = "RETENTION_UNKNOWN";
-    FieldOptions_OptionRetention[FieldOptions_OptionRetention["RETENTION_RUNTIME"] = 1] = "RETENTION_RUNTIME";
-    FieldOptions_OptionRetention[FieldOptions_OptionRetention["RETENTION_SOURCE"] = 2] = "RETENTION_SOURCE";
-})(FieldOptions_OptionRetention || (exports.FieldOptions_OptionRetention = FieldOptions_OptionRetention = {}));
-function fieldOptions_OptionRetentionFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "RETENTION_UNKNOWN":
-            return FieldOptions_OptionRetention.RETENTION_UNKNOWN;
-        case 1:
-        case "RETENTION_RUNTIME":
-            return FieldOptions_OptionRetention.RETENTION_RUNTIME;
-        case 2:
-        case "RETENTION_SOURCE":
-            return FieldOptions_OptionRetention.RETENTION_SOURCE;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionRetention");
-    }
-}
-function fieldOptions_OptionRetentionToJSON(object) {
-    switch (object) {
-        case FieldOptions_OptionRetention.RETENTION_UNKNOWN:
-            return "RETENTION_UNKNOWN";
-        case FieldOptions_OptionRetention.RETENTION_RUNTIME:
-            return "RETENTION_RUNTIME";
-        case FieldOptions_OptionRetention.RETENTION_SOURCE:
-            return "RETENTION_SOURCE";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionRetention");
-    }
-}
-/**
- * This indicates the types of entities that the field may apply to when used
- * as an option. If it is unset, then the field may be freely used as an
- * option on any kind of entity.
- */
-var FieldOptions_OptionTargetType;
-(function (FieldOptions_OptionTargetType) {
-    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_UNKNOWN"] = 0] = "TARGET_TYPE_UNKNOWN";
-    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_FILE"] = 1] = "TARGET_TYPE_FILE";
-    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_EXTENSION_RANGE"] = 2] = "TARGET_TYPE_EXTENSION_RANGE";
-    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_MESSAGE"] = 3] = "TARGET_TYPE_MESSAGE";
-    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_FIELD"] = 4] = "TARGET_TYPE_FIELD";
-    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_ONEOF"] = 5] = "TARGET_TYPE_ONEOF";
-    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_ENUM"] = 6] = "TARGET_TYPE_ENUM";
-    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_ENUM_ENTRY"] = 7] = "TARGET_TYPE_ENUM_ENTRY";
-    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_SERVICE"] = 8] = "TARGET_TYPE_SERVICE";
-    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_METHOD"] = 9] = "TARGET_TYPE_METHOD";
-})(FieldOptions_OptionTargetType || (exports.FieldOptions_OptionTargetType = FieldOptions_OptionTargetType = {}));
-function fieldOptions_OptionTargetTypeFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "TARGET_TYPE_UNKNOWN":
-            return FieldOptions_OptionTargetType.TARGET_TYPE_UNKNOWN;
-        case 1:
-        case "TARGET_TYPE_FILE":
-            return FieldOptions_OptionTargetType.TARGET_TYPE_FILE;
-        case 2:
-        case "TARGET_TYPE_EXTENSION_RANGE":
-            return FieldOptions_OptionTargetType.TARGET_TYPE_EXTENSION_RANGE;
-        case 3:
-        case "TARGET_TYPE_MESSAGE":
-            return FieldOptions_OptionTargetType.TARGET_TYPE_MESSAGE;
-        case 4:
-        case "TARGET_TYPE_FIELD":
-            return FieldOptions_OptionTargetType.TARGET_TYPE_FIELD;
-        case 5:
-        case "TARGET_TYPE_ONEOF":
-            return FieldOptions_OptionTargetType.TARGET_TYPE_ONEOF;
-        case 6:
-        case "TARGET_TYPE_ENUM":
-            return FieldOptions_OptionTargetType.TARGET_TYPE_ENUM;
-        case 7:
-        case "TARGET_TYPE_ENUM_ENTRY":
-            return FieldOptions_OptionTargetType.TARGET_TYPE_ENUM_ENTRY;
-        case 8:
-        case "TARGET_TYPE_SERVICE":
-            return FieldOptions_OptionTargetType.TARGET_TYPE_SERVICE;
-        case 9:
-        case "TARGET_TYPE_METHOD":
-            return FieldOptions_OptionTargetType.TARGET_TYPE_METHOD;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionTargetType");
-    }
-}
-function fieldOptions_OptionTargetTypeToJSON(object) {
-    switch (object) {
-        case FieldOptions_OptionTargetType.TARGET_TYPE_UNKNOWN:
-            return "TARGET_TYPE_UNKNOWN";
-        case FieldOptions_OptionTargetType.TARGET_TYPE_FILE:
-            return "TARGET_TYPE_FILE";
-        case FieldOptions_OptionTargetType.TARGET_TYPE_EXTENSION_RANGE:
-            return "TARGET_TYPE_EXTENSION_RANGE";
-        case FieldOptions_OptionTargetType.TARGET_TYPE_MESSAGE:
-            return "TARGET_TYPE_MESSAGE";
-        case FieldOptions_OptionTargetType.TARGET_TYPE_FIELD:
-            return "TARGET_TYPE_FIELD";
-        case FieldOptions_OptionTargetType.TARGET_TYPE_ONEOF:
-            return "TARGET_TYPE_ONEOF";
-        case FieldOptions_OptionTargetType.TARGET_TYPE_ENUM:
-            return "TARGET_TYPE_ENUM";
-        case FieldOptions_OptionTargetType.TARGET_TYPE_ENUM_ENTRY:
-            return "TARGET_TYPE_ENUM_ENTRY";
-        case FieldOptions_OptionTargetType.TARGET_TYPE_SERVICE:
-            return "TARGET_TYPE_SERVICE";
-        case FieldOptions_OptionTargetType.TARGET_TYPE_METHOD:
-            return "TARGET_TYPE_METHOD";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionTargetType");
-    }
-}
-/**
- * Is this method side-effect-free (or safe in HTTP parlance), or idempotent,
- * or neither? HTTP based RPC implementation may choose GET verb for safe
- * methods, and PUT verb for idempotent methods instead of the default POST.
- */
-var MethodOptions_IdempotencyLevel;
-(function (MethodOptions_IdempotencyLevel) {
-    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENCY_UNKNOWN"] = 0] = "IDEMPOTENCY_UNKNOWN";
-    /** NO_SIDE_EFFECTS - implies idempotent */
-    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["NO_SIDE_EFFECTS"] = 1] = "NO_SIDE_EFFECTS";
-    /** IDEMPOTENT - idempotent, but may have side effects */
-    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENT"] = 2] = "IDEMPOTENT";
-})(MethodOptions_IdempotencyLevel || (exports.MethodOptions_IdempotencyLevel = MethodOptions_IdempotencyLevel = {}));
-function methodOptions_IdempotencyLevelFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "IDEMPOTENCY_UNKNOWN":
-            return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN;
-        case 1:
-        case "NO_SIDE_EFFECTS":
-            return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS;
-        case 2:
-        case "IDEMPOTENT":
-            return MethodOptions_IdempotencyLevel.IDEMPOTENT;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
-    }
-}
-function methodOptions_IdempotencyLevelToJSON(object) {
-    switch (object) {
-        case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN:
-            return "IDEMPOTENCY_UNKNOWN";
-        case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS:
-            return "NO_SIDE_EFFECTS";
-        case MethodOptions_IdempotencyLevel.IDEMPOTENT:
-            return "IDEMPOTENT";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
-    }
-}
-var FeatureSet_FieldPresence;
-(function (FeatureSet_FieldPresence) {
-    FeatureSet_FieldPresence[FeatureSet_FieldPresence["FIELD_PRESENCE_UNKNOWN"] = 0] = "FIELD_PRESENCE_UNKNOWN";
-    FeatureSet_FieldPresence[FeatureSet_FieldPresence["EXPLICIT"] = 1] = "EXPLICIT";
-    FeatureSet_FieldPresence[FeatureSet_FieldPresence["IMPLICIT"] = 2] = "IMPLICIT";
-    FeatureSet_FieldPresence[FeatureSet_FieldPresence["LEGACY_REQUIRED"] = 3] = "LEGACY_REQUIRED";
-})(FeatureSet_FieldPresence || (exports.FeatureSet_FieldPresence = FeatureSet_FieldPresence = {}));
-function featureSet_FieldPresenceFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "FIELD_PRESENCE_UNKNOWN":
-            return FeatureSet_FieldPresence.FIELD_PRESENCE_UNKNOWN;
-        case 1:
-        case "EXPLICIT":
-            return FeatureSet_FieldPresence.EXPLICIT;
-        case 2:
-        case "IMPLICIT":
-            return FeatureSet_FieldPresence.IMPLICIT;
-        case 3:
-        case "LEGACY_REQUIRED":
-            return FeatureSet_FieldPresence.LEGACY_REQUIRED;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_FieldPresence");
-    }
-}
-function featureSet_FieldPresenceToJSON(object) {
-    switch (object) {
-        case FeatureSet_FieldPresence.FIELD_PRESENCE_UNKNOWN:
-            return "FIELD_PRESENCE_UNKNOWN";
-        case FeatureSet_FieldPresence.EXPLICIT:
-            return "EXPLICIT";
-        case FeatureSet_FieldPresence.IMPLICIT:
-            return "IMPLICIT";
-        case FeatureSet_FieldPresence.LEGACY_REQUIRED:
-            return "LEGACY_REQUIRED";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_FieldPresence");
-    }
-}
-var FeatureSet_EnumType;
-(function (FeatureSet_EnumType) {
-    FeatureSet_EnumType[FeatureSet_EnumType["ENUM_TYPE_UNKNOWN"] = 0] = "ENUM_TYPE_UNKNOWN";
-    FeatureSet_EnumType[FeatureSet_EnumType["OPEN"] = 1] = "OPEN";
-    FeatureSet_EnumType[FeatureSet_EnumType["CLOSED"] = 2] = "CLOSED";
-})(FeatureSet_EnumType || (exports.FeatureSet_EnumType = FeatureSet_EnumType = {}));
-function featureSet_EnumTypeFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "ENUM_TYPE_UNKNOWN":
-            return FeatureSet_EnumType.ENUM_TYPE_UNKNOWN;
-        case 1:
-        case "OPEN":
-            return FeatureSet_EnumType.OPEN;
-        case 2:
-        case "CLOSED":
-            return FeatureSet_EnumType.CLOSED;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnumType");
-    }
-}
-function featureSet_EnumTypeToJSON(object) {
-    switch (object) {
-        case FeatureSet_EnumType.ENUM_TYPE_UNKNOWN:
-            return "ENUM_TYPE_UNKNOWN";
-        case FeatureSet_EnumType.OPEN:
-            return "OPEN";
-        case FeatureSet_EnumType.CLOSED:
-            return "CLOSED";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnumType");
-    }
-}
-var FeatureSet_RepeatedFieldEncoding;
-(function (FeatureSet_RepeatedFieldEncoding) {
-    FeatureSet_RepeatedFieldEncoding[FeatureSet_RepeatedFieldEncoding["REPEATED_FIELD_ENCODING_UNKNOWN"] = 0] = "REPEATED_FIELD_ENCODING_UNKNOWN";
-    FeatureSet_RepeatedFieldEncoding[FeatureSet_RepeatedFieldEncoding["PACKED"] = 1] = "PACKED";
-    FeatureSet_RepeatedFieldEncoding[FeatureSet_RepeatedFieldEncoding["EXPANDED"] = 2] = "EXPANDED";
-})(FeatureSet_RepeatedFieldEncoding || (exports.FeatureSet_RepeatedFieldEncoding = FeatureSet_RepeatedFieldEncoding = {}));
-function featureSet_RepeatedFieldEncodingFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "REPEATED_FIELD_ENCODING_UNKNOWN":
-            return FeatureSet_RepeatedFieldEncoding.REPEATED_FIELD_ENCODING_UNKNOWN;
-        case 1:
-        case "PACKED":
-            return FeatureSet_RepeatedFieldEncoding.PACKED;
-        case 2:
-        case "EXPANDED":
-            return FeatureSet_RepeatedFieldEncoding.EXPANDED;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_RepeatedFieldEncoding");
-    }
-}
-function featureSet_RepeatedFieldEncodingToJSON(object) {
-    switch (object) {
-        case FeatureSet_RepeatedFieldEncoding.REPEATED_FIELD_ENCODING_UNKNOWN:
-            return "REPEATED_FIELD_ENCODING_UNKNOWN";
-        case FeatureSet_RepeatedFieldEncoding.PACKED:
-            return "PACKED";
-        case FeatureSet_RepeatedFieldEncoding.EXPANDED:
-            return "EXPANDED";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_RepeatedFieldEncoding");
-    }
-}
-var FeatureSet_Utf8Validation;
-(function (FeatureSet_Utf8Validation) {
-    FeatureSet_Utf8Validation[FeatureSet_Utf8Validation["UTF8_VALIDATION_UNKNOWN"] = 0] = "UTF8_VALIDATION_UNKNOWN";
-    FeatureSet_Utf8Validation[FeatureSet_Utf8Validation["VERIFY"] = 2] = "VERIFY";
-    FeatureSet_Utf8Validation[FeatureSet_Utf8Validation["NONE"] = 3] = "NONE";
-})(FeatureSet_Utf8Validation || (exports.FeatureSet_Utf8Validation = FeatureSet_Utf8Validation = {}));
-function featureSet_Utf8ValidationFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "UTF8_VALIDATION_UNKNOWN":
-            return FeatureSet_Utf8Validation.UTF8_VALIDATION_UNKNOWN;
-        case 2:
-        case "VERIFY":
-            return FeatureSet_Utf8Validation.VERIFY;
-        case 3:
-        case "NONE":
-            return FeatureSet_Utf8Validation.NONE;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_Utf8Validation");
-    }
-}
-function featureSet_Utf8ValidationToJSON(object) {
-    switch (object) {
-        case FeatureSet_Utf8Validation.UTF8_VALIDATION_UNKNOWN:
-            return "UTF8_VALIDATION_UNKNOWN";
-        case FeatureSet_Utf8Validation.VERIFY:
-            return "VERIFY";
-        case FeatureSet_Utf8Validation.NONE:
-            return "NONE";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_Utf8Validation");
-    }
-}
-var FeatureSet_MessageEncoding;
-(function (FeatureSet_MessageEncoding) {
-    FeatureSet_MessageEncoding[FeatureSet_MessageEncoding["MESSAGE_ENCODING_UNKNOWN"] = 0] = "MESSAGE_ENCODING_UNKNOWN";
-    FeatureSet_MessageEncoding[FeatureSet_MessageEncoding["LENGTH_PREFIXED"] = 1] = "LENGTH_PREFIXED";
-    FeatureSet_MessageEncoding[FeatureSet_MessageEncoding["DELIMITED"] = 2] = "DELIMITED";
-})(FeatureSet_MessageEncoding || (exports.FeatureSet_MessageEncoding = FeatureSet_MessageEncoding = {}));
-function featureSet_MessageEncodingFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "MESSAGE_ENCODING_UNKNOWN":
-            return FeatureSet_MessageEncoding.MESSAGE_ENCODING_UNKNOWN;
-        case 1:
-        case "LENGTH_PREFIXED":
-            return FeatureSet_MessageEncoding.LENGTH_PREFIXED;
-        case 2:
-        case "DELIMITED":
-            return FeatureSet_MessageEncoding.DELIMITED;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_MessageEncoding");
-    }
-}
-function featureSet_MessageEncodingToJSON(object) {
-    switch (object) {
-        case FeatureSet_MessageEncoding.MESSAGE_ENCODING_UNKNOWN:
-            return "MESSAGE_ENCODING_UNKNOWN";
-        case FeatureSet_MessageEncoding.LENGTH_PREFIXED:
-            return "LENGTH_PREFIXED";
-        case FeatureSet_MessageEncoding.DELIMITED:
-            return "DELIMITED";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_MessageEncoding");
-    }
-}
-var FeatureSet_JsonFormat;
-(function (FeatureSet_JsonFormat) {
-    FeatureSet_JsonFormat[FeatureSet_JsonFormat["JSON_FORMAT_UNKNOWN"] = 0] = "JSON_FORMAT_UNKNOWN";
-    FeatureSet_JsonFormat[FeatureSet_JsonFormat["ALLOW"] = 1] = "ALLOW";
-    FeatureSet_JsonFormat[FeatureSet_JsonFormat["LEGACY_BEST_EFFORT"] = 2] = "LEGACY_BEST_EFFORT";
-})(FeatureSet_JsonFormat || (exports.FeatureSet_JsonFormat = FeatureSet_JsonFormat = {}));
-function featureSet_JsonFormatFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "JSON_FORMAT_UNKNOWN":
-            return FeatureSet_JsonFormat.JSON_FORMAT_UNKNOWN;
-        case 1:
-        case "ALLOW":
-            return FeatureSet_JsonFormat.ALLOW;
-        case 2:
-        case "LEGACY_BEST_EFFORT":
-            return FeatureSet_JsonFormat.LEGACY_BEST_EFFORT;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_JsonFormat");
-    }
-}
-function featureSet_JsonFormatToJSON(object) {
-    switch (object) {
-        case FeatureSet_JsonFormat.JSON_FORMAT_UNKNOWN:
-            return "JSON_FORMAT_UNKNOWN";
-        case FeatureSet_JsonFormat.ALLOW:
-            return "ALLOW";
-        case FeatureSet_JsonFormat.LEGACY_BEST_EFFORT:
-            return "LEGACY_BEST_EFFORT";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_JsonFormat");
-    }
-}
-var FeatureSet_EnforceNamingStyle;
-(function (FeatureSet_EnforceNamingStyle) {
-    FeatureSet_EnforceNamingStyle[FeatureSet_EnforceNamingStyle["ENFORCE_NAMING_STYLE_UNKNOWN"] = 0] = "ENFORCE_NAMING_STYLE_UNKNOWN";
-    FeatureSet_EnforceNamingStyle[FeatureSet_EnforceNamingStyle["STYLE2024"] = 1] = "STYLE2024";
-    FeatureSet_EnforceNamingStyle[FeatureSet_EnforceNamingStyle["STYLE_LEGACY"] = 2] = "STYLE_LEGACY";
-})(FeatureSet_EnforceNamingStyle || (exports.FeatureSet_EnforceNamingStyle = FeatureSet_EnforceNamingStyle = {}));
-function featureSet_EnforceNamingStyleFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "ENFORCE_NAMING_STYLE_UNKNOWN":
-            return FeatureSet_EnforceNamingStyle.ENFORCE_NAMING_STYLE_UNKNOWN;
-        case 1:
-        case "STYLE2024":
-            return FeatureSet_EnforceNamingStyle.STYLE2024;
-        case 2:
-        case "STYLE_LEGACY":
-            return FeatureSet_EnforceNamingStyle.STYLE_LEGACY;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnforceNamingStyle");
-    }
-}
-function featureSet_EnforceNamingStyleToJSON(object) {
-    switch (object) {
-        case FeatureSet_EnforceNamingStyle.ENFORCE_NAMING_STYLE_UNKNOWN:
-            return "ENFORCE_NAMING_STYLE_UNKNOWN";
-        case FeatureSet_EnforceNamingStyle.STYLE2024:
-            return "STYLE2024";
-        case FeatureSet_EnforceNamingStyle.STYLE_LEGACY:
-            return "STYLE_LEGACY";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnforceNamingStyle");
-    }
-}
-/**
- * Represents the identified object's effect on the element in the original
- * .proto file.
- */
-var GeneratedCodeInfo_Annotation_Semantic;
-(function (GeneratedCodeInfo_Annotation_Semantic) {
-    /** NONE - There is no effect or the effect is indescribable. */
-    GeneratedCodeInfo_Annotation_Semantic[GeneratedCodeInfo_Annotation_Semantic["NONE"] = 0] = "NONE";
-    /** SET - The element is set or otherwise mutated. */
-    GeneratedCodeInfo_Annotation_Semantic[GeneratedCodeInfo_Annotation_Semantic["SET"] = 1] = "SET";
-    /** ALIAS - An alias to the element is returned. */
-    GeneratedCodeInfo_Annotation_Semantic[GeneratedCodeInfo_Annotation_Semantic["ALIAS"] = 2] = "ALIAS";
-})(GeneratedCodeInfo_Annotation_Semantic || (exports.GeneratedCodeInfo_Annotation_Semantic = GeneratedCodeInfo_Annotation_Semantic = {}));
-function generatedCodeInfo_Annotation_SemanticFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "NONE":
-            return GeneratedCodeInfo_Annotation_Semantic.NONE;
-        case 1:
-        case "SET":
-            return GeneratedCodeInfo_Annotation_Semantic.SET;
-        case 2:
-        case "ALIAS":
-            return GeneratedCodeInfo_Annotation_Semantic.ALIAS;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum GeneratedCodeInfo_Annotation_Semantic");
-    }
-}
-function generatedCodeInfo_Annotation_SemanticToJSON(object) {
-    switch (object) {
-        case GeneratedCodeInfo_Annotation_Semantic.NONE:
-            return "NONE";
-        case GeneratedCodeInfo_Annotation_Semantic.SET:
-            return "SET";
-        case GeneratedCodeInfo_Annotation_Semantic.ALIAS:
-            return "ALIAS";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum GeneratedCodeInfo_Annotation_Semantic");
-    }
-}
-exports.FileDescriptorSet = {
-    fromJSON(object) {
-        return {
-            file: globalThis.Array.isArray(object?.file) ? object.file.map((e) => exports.FileDescriptorProto.fromJSON(e)) : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.file?.length) {
-            obj.file = message.file.map((e) => exports.FileDescriptorProto.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.FileDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? globalThis.String(object.name) : "",
-            package: isSet(object.package) ? globalThis.String(object.package) : "",
-            dependency: globalThis.Array.isArray(object?.dependency)
-                ? object.dependency.map((e) => globalThis.String(e))
-                : [],
-            publicDependency: globalThis.Array.isArray(object?.publicDependency)
-                ? object.publicDependency.map((e) => globalThis.Number(e))
-                : [],
-            weakDependency: globalThis.Array.isArray(object?.weakDependency)
-                ? object.weakDependency.map((e) => globalThis.Number(e))
-                : [],
-            messageType: globalThis.Array.isArray(object?.messageType)
-                ? object.messageType.map((e) => exports.DescriptorProto.fromJSON(e))
-                : [],
-            enumType: globalThis.Array.isArray(object?.enumType)
-                ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e))
-                : [],
-            service: globalThis.Array.isArray(object?.service)
-                ? object.service.map((e) => exports.ServiceDescriptorProto.fromJSON(e))
-                : [],
-            extension: globalThis.Array.isArray(object?.extension)
-                ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e))
-                : [],
-            options: isSet(object.options) ? exports.FileOptions.fromJSON(object.options) : undefined,
-            sourceCodeInfo: isSet(object.sourceCodeInfo) ? exports.SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined,
-            syntax: isSet(object.syntax) ? globalThis.String(object.syntax) : "",
-            edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.name !== undefined && message.name !== "") {
-            obj.name = message.name;
-        }
-        if (message.package !== undefined && message.package !== "") {
-            obj.package = message.package;
-        }
-        if (message.dependency?.length) {
-            obj.dependency = message.dependency;
-        }
-        if (message.publicDependency?.length) {
-            obj.publicDependency = message.publicDependency.map((e) => Math.round(e));
-        }
-        if (message.weakDependency?.length) {
-            obj.weakDependency = message.weakDependency.map((e) => Math.round(e));
-        }
-        if (message.messageType?.length) {
-            obj.messageType = message.messageType.map((e) => exports.DescriptorProto.toJSON(e));
-        }
-        if (message.enumType?.length) {
-            obj.enumType = message.enumType.map((e) => exports.EnumDescriptorProto.toJSON(e));
-        }
-        if (message.service?.length) {
-            obj.service = message.service.map((e) => exports.ServiceDescriptorProto.toJSON(e));
-        }
-        if (message.extension?.length) {
-            obj.extension = message.extension.map((e) => exports.FieldDescriptorProto.toJSON(e));
-        }
-        if (message.options !== undefined) {
-            obj.options = exports.FileOptions.toJSON(message.options);
-        }
-        if (message.sourceCodeInfo !== undefined) {
-            obj.sourceCodeInfo = exports.SourceCodeInfo.toJSON(message.sourceCodeInfo);
-        }
-        if (message.syntax !== undefined && message.syntax !== "") {
-            obj.syntax = message.syntax;
-        }
-        if (message.edition !== undefined && message.edition !== 0) {
-            obj.edition = editionToJSON(message.edition);
-        }
-        return obj;
-    },
-};
-exports.DescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? globalThis.String(object.name) : "",
-            field: globalThis.Array.isArray(object?.field)
-                ? object.field.map((e) => exports.FieldDescriptorProto.fromJSON(e))
-                : [],
-            extension: globalThis.Array.isArray(object?.extension)
-                ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e))
-                : [],
-            nestedType: globalThis.Array.isArray(object?.nestedType)
-                ? object.nestedType.map((e) => exports.DescriptorProto.fromJSON(e))
-                : [],
-            enumType: globalThis.Array.isArray(object?.enumType)
-                ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e))
-                : [],
-            extensionRange: globalThis.Array.isArray(object?.extensionRange)
-                ? object.extensionRange.map((e) => exports.DescriptorProto_ExtensionRange.fromJSON(e))
-                : [],
-            oneofDecl: globalThis.Array.isArray(object?.oneofDecl)
-                ? object.oneofDecl.map((e) => exports.OneofDescriptorProto.fromJSON(e))
-                : [],
-            options: isSet(object.options) ? exports.MessageOptions.fromJSON(object.options) : undefined,
-            reservedRange: globalThis.Array.isArray(object?.reservedRange)
-                ? object.reservedRange.map((e) => exports.DescriptorProto_ReservedRange.fromJSON(e))
-                : [],
-            reservedName: globalThis.Array.isArray(object?.reservedName)
-                ? object.reservedName.map((e) => globalThis.String(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.name !== undefined && message.name !== "") {
-            obj.name = message.name;
-        }
-        if (message.field?.length) {
-            obj.field = message.field.map((e) => exports.FieldDescriptorProto.toJSON(e));
-        }
-        if (message.extension?.length) {
-            obj.extension = message.extension.map((e) => exports.FieldDescriptorProto.toJSON(e));
-        }
-        if (message.nestedType?.length) {
-            obj.nestedType = message.nestedType.map((e) => exports.DescriptorProto.toJSON(e));
-        }
-        if (message.enumType?.length) {
-            obj.enumType = message.enumType.map((e) => exports.EnumDescriptorProto.toJSON(e));
-        }
-        if (message.extensionRange?.length) {
-            obj.extensionRange = message.extensionRange.map((e) => exports.DescriptorProto_ExtensionRange.toJSON(e));
-        }
-        if (message.oneofDecl?.length) {
-            obj.oneofDecl = message.oneofDecl.map((e) => exports.OneofDescriptorProto.toJSON(e));
-        }
-        if (message.options !== undefined) {
-            obj.options = exports.MessageOptions.toJSON(message.options);
-        }
-        if (message.reservedRange?.length) {
-            obj.reservedRange = message.reservedRange.map((e) => exports.DescriptorProto_ReservedRange.toJSON(e));
-        }
-        if (message.reservedName?.length) {
-            obj.reservedName = message.reservedName;
-        }
-        return obj;
-    },
-};
-exports.DescriptorProto_ExtensionRange = {
-    fromJSON(object) {
-        return {
-            start: isSet(object.start) ? globalThis.Number(object.start) : 0,
-            end: isSet(object.end) ? globalThis.Number(object.end) : 0,
-            options: isSet(object.options) ? exports.ExtensionRangeOptions.fromJSON(object.options) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.start !== undefined && message.start !== 0) {
-            obj.start = Math.round(message.start);
-        }
-        if (message.end !== undefined && message.end !== 0) {
-            obj.end = Math.round(message.end);
-        }
-        if (message.options !== undefined) {
-            obj.options = exports.ExtensionRangeOptions.toJSON(message.options);
-        }
-        return obj;
-    },
-};
-exports.DescriptorProto_ReservedRange = {
-    fromJSON(object) {
-        return {
-            start: isSet(object.start) ? globalThis.Number(object.start) : 0,
-            end: isSet(object.end) ? globalThis.Number(object.end) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.start !== undefined && message.start !== 0) {
-            obj.start = Math.round(message.start);
-        }
-        if (message.end !== undefined && message.end !== 0) {
-            obj.end = Math.round(message.end);
-        }
-        return obj;
-    },
-};
-exports.ExtensionRangeOptions = {
-    fromJSON(object) {
-        return {
-            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-            declaration: globalThis.Array.isArray(object?.declaration)
-                ? object.declaration.map((e) => exports.ExtensionRangeOptions_Declaration.fromJSON(e))
-                : [],
-            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
-            verification: isSet(object.verification)
-                ? extensionRangeOptions_VerificationStateFromJSON(object.verification)
-                : 1,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.uninterpretedOption?.length) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
-        }
-        if (message.declaration?.length) {
-            obj.declaration = message.declaration.map((e) => exports.ExtensionRangeOptions_Declaration.toJSON(e));
-        }
-        if (message.features !== undefined) {
-            obj.features = exports.FeatureSet.toJSON(message.features);
-        }
-        if (message.verification !== undefined && message.verification !== 1) {
-            obj.verification = extensionRangeOptions_VerificationStateToJSON(message.verification);
-        }
-        return obj;
-    },
-};
-exports.ExtensionRangeOptions_Declaration = {
-    fromJSON(object) {
-        return {
-            number: isSet(object.number) ? globalThis.Number(object.number) : 0,
-            fullName: isSet(object.fullName) ? globalThis.String(object.fullName) : "",
-            type: isSet(object.type) ? globalThis.String(object.type) : "",
-            reserved: isSet(object.reserved) ? globalThis.Boolean(object.reserved) : false,
-            repeated: isSet(object.repeated) ? globalThis.Boolean(object.repeated) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.number !== undefined && message.number !== 0) {
-            obj.number = Math.round(message.number);
-        }
-        if (message.fullName !== undefined && message.fullName !== "") {
-            obj.fullName = message.fullName;
-        }
-        if (message.type !== undefined && message.type !== "") {
-            obj.type = message.type;
-        }
-        if (message.reserved !== undefined && message.reserved !== false) {
-            obj.reserved = message.reserved;
-        }
-        if (message.repeated !== undefined && message.repeated !== false) {
-            obj.repeated = message.repeated;
-        }
-        return obj;
-    },
-};
-exports.FieldDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? globalThis.String(object.name) : "",
-            number: isSet(object.number) ? globalThis.Number(object.number) : 0,
-            label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1,
-            type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1,
-            typeName: isSet(object.typeName) ? globalThis.String(object.typeName) : "",
-            extendee: isSet(object.extendee) ? globalThis.String(object.extendee) : "",
-            defaultValue: isSet(object.defaultValue) ? globalThis.String(object.defaultValue) : "",
-            oneofIndex: isSet(object.oneofIndex) ? globalThis.Number(object.oneofIndex) : 0,
-            jsonName: isSet(object.jsonName) ? globalThis.String(object.jsonName) : "",
-            options: isSet(object.options) ? exports.FieldOptions.fromJSON(object.options) : undefined,
-            proto3Optional: isSet(object.proto3Optional) ? globalThis.Boolean(object.proto3Optional) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.name !== undefined && message.name !== "") {
-            obj.name = message.name;
-        }
-        if (message.number !== undefined && message.number !== 0) {
-            obj.number = Math.round(message.number);
-        }
-        if (message.label !== undefined && message.label !== 1) {
-            obj.label = fieldDescriptorProto_LabelToJSON(message.label);
-        }
-        if (message.type !== undefined && message.type !== 1) {
-            obj.type = fieldDescriptorProto_TypeToJSON(message.type);
-        }
-        if (message.typeName !== undefined && message.typeName !== "") {
-            obj.typeName = message.typeName;
-        }
-        if (message.extendee !== undefined && message.extendee !== "") {
-            obj.extendee = message.extendee;
-        }
-        if (message.defaultValue !== undefined && message.defaultValue !== "") {
-            obj.defaultValue = message.defaultValue;
-        }
-        if (message.oneofIndex !== undefined && message.oneofIndex !== 0) {
-            obj.oneofIndex = Math.round(message.oneofIndex);
-        }
-        if (message.jsonName !== undefined && message.jsonName !== "") {
-            obj.jsonName = message.jsonName;
-        }
-        if (message.options !== undefined) {
-            obj.options = exports.FieldOptions.toJSON(message.options);
-        }
-        if (message.proto3Optional !== undefined && message.proto3Optional !== false) {
-            obj.proto3Optional = message.proto3Optional;
-        }
-        return obj;
-    },
-};
-exports.OneofDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? globalThis.String(object.name) : "",
-            options: isSet(object.options) ? exports.OneofOptions.fromJSON(object.options) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.name !== undefined && message.name !== "") {
-            obj.name = message.name;
-        }
-        if (message.options !== undefined) {
-            obj.options = exports.OneofOptions.toJSON(message.options);
-        }
-        return obj;
-    },
-};
-exports.EnumDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? globalThis.String(object.name) : "",
-            value: globalThis.Array.isArray(object?.value)
-                ? object.value.map((e) => exports.EnumValueDescriptorProto.fromJSON(e))
-                : [],
-            options: isSet(object.options) ? exports.EnumOptions.fromJSON(object.options) : undefined,
-            reservedRange: globalThis.Array.isArray(object?.reservedRange)
-                ? object.reservedRange.map((e) => exports.EnumDescriptorProto_EnumReservedRange.fromJSON(e))
-                : [],
-            reservedName: globalThis.Array.isArray(object?.reservedName)
-                ? object.reservedName.map((e) => globalThis.String(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.name !== undefined && message.name !== "") {
-            obj.name = message.name;
-        }
-        if (message.value?.length) {
-            obj.value = message.value.map((e) => exports.EnumValueDescriptorProto.toJSON(e));
-        }
-        if (message.options !== undefined) {
-            obj.options = exports.EnumOptions.toJSON(message.options);
-        }
-        if (message.reservedRange?.length) {
-            obj.reservedRange = message.reservedRange.map((e) => exports.EnumDescriptorProto_EnumReservedRange.toJSON(e));
-        }
-        if (message.reservedName?.length) {
-            obj.reservedName = message.reservedName;
-        }
-        return obj;
-    },
-};
-exports.EnumDescriptorProto_EnumReservedRange = {
-    fromJSON(object) {
-        return {
-            start: isSet(object.start) ? globalThis.Number(object.start) : 0,
-            end: isSet(object.end) ? globalThis.Number(object.end) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.start !== undefined && message.start !== 0) {
-            obj.start = Math.round(message.start);
-        }
-        if (message.end !== undefined && message.end !== 0) {
-            obj.end = Math.round(message.end);
-        }
-        return obj;
-    },
-};
-exports.EnumValueDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? globalThis.String(object.name) : "",
-            number: isSet(object.number) ? globalThis.Number(object.number) : 0,
-            options: isSet(object.options) ? exports.EnumValueOptions.fromJSON(object.options) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.name !== undefined && message.name !== "") {
-            obj.name = message.name;
-        }
-        if (message.number !== undefined && message.number !== 0) {
-            obj.number = Math.round(message.number);
-        }
-        if (message.options !== undefined) {
-            obj.options = exports.EnumValueOptions.toJSON(message.options);
-        }
-        return obj;
-    },
-};
-exports.ServiceDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? globalThis.String(object.name) : "",
-            method: globalThis.Array.isArray(object?.method)
-                ? object.method.map((e) => exports.MethodDescriptorProto.fromJSON(e))
-                : [],
-            options: isSet(object.options) ? exports.ServiceOptions.fromJSON(object.options) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.name !== undefined && message.name !== "") {
-            obj.name = message.name;
-        }
-        if (message.method?.length) {
-            obj.method = message.method.map((e) => exports.MethodDescriptorProto.toJSON(e));
-        }
-        if (message.options !== undefined) {
-            obj.options = exports.ServiceOptions.toJSON(message.options);
-        }
-        return obj;
-    },
-};
-exports.MethodDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? globalThis.String(object.name) : "",
-            inputType: isSet(object.inputType) ? globalThis.String(object.inputType) : "",
-            outputType: isSet(object.outputType) ? globalThis.String(object.outputType) : "",
-            options: isSet(object.options) ? exports.MethodOptions.fromJSON(object.options) : undefined,
-            clientStreaming: isSet(object.clientStreaming) ? globalThis.Boolean(object.clientStreaming) : false,
-            serverStreaming: isSet(object.serverStreaming) ? globalThis.Boolean(object.serverStreaming) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.name !== undefined && message.name !== "") {
-            obj.name = message.name;
-        }
-        if (message.inputType !== undefined && message.inputType !== "") {
-            obj.inputType = message.inputType;
-        }
-        if (message.outputType !== undefined && message.outputType !== "") {
-            obj.outputType = message.outputType;
-        }
-        if (message.options !== undefined) {
-            obj.options = exports.MethodOptions.toJSON(message.options);
-        }
-        if (message.clientStreaming !== undefined && message.clientStreaming !== false) {
-            obj.clientStreaming = message.clientStreaming;
-        }
-        if (message.serverStreaming !== undefined && message.serverStreaming !== false) {
-            obj.serverStreaming = message.serverStreaming;
-        }
-        return obj;
-    },
-};
-exports.FileOptions = {
-    fromJSON(object) {
-        return {
-            javaPackage: isSet(object.javaPackage) ? globalThis.String(object.javaPackage) : "",
-            javaOuterClassname: isSet(object.javaOuterClassname) ? globalThis.String(object.javaOuterClassname) : "",
-            javaMultipleFiles: isSet(object.javaMultipleFiles) ? globalThis.Boolean(object.javaMultipleFiles) : false,
-            javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash)
-                ? globalThis.Boolean(object.javaGenerateEqualsAndHash)
-                : false,
-            javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? globalThis.Boolean(object.javaStringCheckUtf8) : false,
-            optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1,
-            goPackage: isSet(object.goPackage) ? globalThis.String(object.goPackage) : "",
-            ccGenericServices: isSet(object.ccGenericServices) ? globalThis.Boolean(object.ccGenericServices) : false,
-            javaGenericServices: isSet(object.javaGenericServices) ? globalThis.Boolean(object.javaGenericServices) : false,
-            pyGenericServices: isSet(object.pyGenericServices) ? globalThis.Boolean(object.pyGenericServices) : false,
-            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
-            ccEnableArenas: isSet(object.ccEnableArenas) ? globalThis.Boolean(object.ccEnableArenas) : true,
-            objcClassPrefix: isSet(object.objcClassPrefix) ? globalThis.String(object.objcClassPrefix) : "",
-            csharpNamespace: isSet(object.csharpNamespace) ? globalThis.String(object.csharpNamespace) : "",
-            swiftPrefix: isSet(object.swiftPrefix) ? globalThis.String(object.swiftPrefix) : "",
-            phpClassPrefix: isSet(object.phpClassPrefix) ? globalThis.String(object.phpClassPrefix) : "",
-            phpNamespace: isSet(object.phpNamespace) ? globalThis.String(object.phpNamespace) : "",
-            phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? globalThis.String(object.phpMetadataNamespace) : "",
-            rubyPackage: isSet(object.rubyPackage) ? globalThis.String(object.rubyPackage) : "",
-            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
-            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.javaPackage !== undefined && message.javaPackage !== "") {
-            obj.javaPackage = message.javaPackage;
-        }
-        if (message.javaOuterClassname !== undefined && message.javaOuterClassname !== "") {
-            obj.javaOuterClassname = message.javaOuterClassname;
-        }
-        if (message.javaMultipleFiles !== undefined && message.javaMultipleFiles !== false) {
-            obj.javaMultipleFiles = message.javaMultipleFiles;
-        }
-        if (message.javaGenerateEqualsAndHash !== undefined && message.javaGenerateEqualsAndHash !== false) {
-            obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash;
-        }
-        if (message.javaStringCheckUtf8 !== undefined && message.javaStringCheckUtf8 !== false) {
-            obj.javaStringCheckUtf8 = message.javaStringCheckUtf8;
-        }
-        if (message.optimizeFor !== undefined && message.optimizeFor !== 1) {
-            obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor);
-        }
-        if (message.goPackage !== undefined && message.goPackage !== "") {
-            obj.goPackage = message.goPackage;
-        }
-        if (message.ccGenericServices !== undefined && message.ccGenericServices !== false) {
-            obj.ccGenericServices = message.ccGenericServices;
-        }
-        if (message.javaGenericServices !== undefined && message.javaGenericServices !== false) {
-            obj.javaGenericServices = message.javaGenericServices;
-        }
-        if (message.pyGenericServices !== undefined && message.pyGenericServices !== false) {
-            obj.pyGenericServices = message.pyGenericServices;
-        }
-        if (message.deprecated !== undefined && message.deprecated !== false) {
-            obj.deprecated = message.deprecated;
-        }
-        if (message.ccEnableArenas !== undefined && message.ccEnableArenas !== true) {
-            obj.ccEnableArenas = message.ccEnableArenas;
-        }
-        if (message.objcClassPrefix !== undefined && message.objcClassPrefix !== "") {
-            obj.objcClassPrefix = message.objcClassPrefix;
-        }
-        if (message.csharpNamespace !== undefined && message.csharpNamespace !== "") {
-            obj.csharpNamespace = message.csharpNamespace;
-        }
-        if (message.swiftPrefix !== undefined && message.swiftPrefix !== "") {
-            obj.swiftPrefix = message.swiftPrefix;
-        }
-        if (message.phpClassPrefix !== undefined && message.phpClassPrefix !== "") {
-            obj.phpClassPrefix = message.phpClassPrefix;
-        }
-        if (message.phpNamespace !== undefined && message.phpNamespace !== "") {
-            obj.phpNamespace = message.phpNamespace;
-        }
-        if (message.phpMetadataNamespace !== undefined && message.phpMetadataNamespace !== "") {
-            obj.phpMetadataNamespace = message.phpMetadataNamespace;
-        }
-        if (message.rubyPackage !== undefined && message.rubyPackage !== "") {
-            obj.rubyPackage = message.rubyPackage;
-        }
-        if (message.features !== undefined) {
-            obj.features = exports.FeatureSet.toJSON(message.features);
-        }
-        if (message.uninterpretedOption?.length) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.MessageOptions = {
-    fromJSON(object) {
-        return {
-            messageSetWireFormat: isSet(object.messageSetWireFormat)
-                ? globalThis.Boolean(object.messageSetWireFormat)
-                : false,
-            noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor)
-                ? globalThis.Boolean(object.noStandardDescriptorAccessor)
-                : false,
-            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
-            mapEntry: isSet(object.mapEntry) ? globalThis.Boolean(object.mapEntry) : false,
-            deprecatedLegacyJsonFieldConflicts: isSet(object.deprecatedLegacyJsonFieldConflicts)
-                ? globalThis.Boolean(object.deprecatedLegacyJsonFieldConflicts)
-                : false,
-            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
-            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.messageSetWireFormat !== undefined && message.messageSetWireFormat !== false) {
-            obj.messageSetWireFormat = message.messageSetWireFormat;
-        }
-        if (message.noStandardDescriptorAccessor !== undefined && message.noStandardDescriptorAccessor !== false) {
-            obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor;
-        }
-        if (message.deprecated !== undefined && message.deprecated !== false) {
-            obj.deprecated = message.deprecated;
-        }
-        if (message.mapEntry !== undefined && message.mapEntry !== false) {
-            obj.mapEntry = message.mapEntry;
-        }
-        if (message.deprecatedLegacyJsonFieldConflicts !== undefined && message.deprecatedLegacyJsonFieldConflicts !== false) {
-            obj.deprecatedLegacyJsonFieldConflicts = message.deprecatedLegacyJsonFieldConflicts;
-        }
-        if (message.features !== undefined) {
-            obj.features = exports.FeatureSet.toJSON(message.features);
-        }
-        if (message.uninterpretedOption?.length) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.FieldOptions = {
-    fromJSON(object) {
-        return {
-            ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0,
-            packed: isSet(object.packed) ? globalThis.Boolean(object.packed) : false,
-            jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0,
-            lazy: isSet(object.lazy) ? globalThis.Boolean(object.lazy) : false,
-            unverifiedLazy: isSet(object.unverifiedLazy) ? globalThis.Boolean(object.unverifiedLazy) : false,
-            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
-            weak: isSet(object.weak) ? globalThis.Boolean(object.weak) : false,
-            debugRedact: isSet(object.debugRedact) ? globalThis.Boolean(object.debugRedact) : false,
-            retention: isSet(object.retention) ? fieldOptions_OptionRetentionFromJSON(object.retention) : 0,
-            targets: globalThis.Array.isArray(object?.targets)
-                ? object.targets.map((e) => fieldOptions_OptionTargetTypeFromJSON(e))
-                : [],
-            editionDefaults: globalThis.Array.isArray(object?.editionDefaults)
-                ? object.editionDefaults.map((e) => exports.FieldOptions_EditionDefault.fromJSON(e))
-                : [],
-            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
-            featureSupport: isSet(object.featureSupport)
-                ? exports.FieldOptions_FeatureSupport.fromJSON(object.featureSupport)
-                : undefined,
-            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.ctype !== undefined && message.ctype !== 0) {
-            obj.ctype = fieldOptions_CTypeToJSON(message.ctype);
-        }
-        if (message.packed !== undefined && message.packed !== false) {
-            obj.packed = message.packed;
-        }
-        if (message.jstype !== undefined && message.jstype !== 0) {
-            obj.jstype = fieldOptions_JSTypeToJSON(message.jstype);
-        }
-        if (message.lazy !== undefined && message.lazy !== false) {
-            obj.lazy = message.lazy;
-        }
-        if (message.unverifiedLazy !== undefined && message.unverifiedLazy !== false) {
-            obj.unverifiedLazy = message.unverifiedLazy;
-        }
-        if (message.deprecated !== undefined && message.deprecated !== false) {
-            obj.deprecated = message.deprecated;
-        }
-        if (message.weak !== undefined && message.weak !== false) {
-            obj.weak = message.weak;
-        }
-        if (message.debugRedact !== undefined && message.debugRedact !== false) {
-            obj.debugRedact = message.debugRedact;
-        }
-        if (message.retention !== undefined && message.retention !== 0) {
-            obj.retention = fieldOptions_OptionRetentionToJSON(message.retention);
-        }
-        if (message.targets?.length) {
-            obj.targets = message.targets.map((e) => fieldOptions_OptionTargetTypeToJSON(e));
-        }
-        if (message.editionDefaults?.length) {
-            obj.editionDefaults = message.editionDefaults.map((e) => exports.FieldOptions_EditionDefault.toJSON(e));
-        }
-        if (message.features !== undefined) {
-            obj.features = exports.FeatureSet.toJSON(message.features);
-        }
-        if (message.featureSupport !== undefined) {
-            obj.featureSupport = exports.FieldOptions_FeatureSupport.toJSON(message.featureSupport);
-        }
-        if (message.uninterpretedOption?.length) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.FieldOptions_EditionDefault = {
-    fromJSON(object) {
-        return {
-            edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0,
-            value: isSet(object.value) ? globalThis.String(object.value) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.edition !== undefined && message.edition !== 0) {
-            obj.edition = editionToJSON(message.edition);
-        }
-        if (message.value !== undefined && message.value !== "") {
-            obj.value = message.value;
-        }
-        return obj;
-    },
-};
-exports.FieldOptions_FeatureSupport = {
-    fromJSON(object) {
-        return {
-            editionIntroduced: isSet(object.editionIntroduced) ? editionFromJSON(object.editionIntroduced) : 0,
-            editionDeprecated: isSet(object.editionDeprecated) ? editionFromJSON(object.editionDeprecated) : 0,
-            deprecationWarning: isSet(object.deprecationWarning) ? globalThis.String(object.deprecationWarning) : "",
-            editionRemoved: isSet(object.editionRemoved) ? editionFromJSON(object.editionRemoved) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.editionIntroduced !== undefined && message.editionIntroduced !== 0) {
-            obj.editionIntroduced = editionToJSON(message.editionIntroduced);
-        }
-        if (message.editionDeprecated !== undefined && message.editionDeprecated !== 0) {
-            obj.editionDeprecated = editionToJSON(message.editionDeprecated);
-        }
-        if (message.deprecationWarning !== undefined && message.deprecationWarning !== "") {
-            obj.deprecationWarning = message.deprecationWarning;
-        }
-        if (message.editionRemoved !== undefined && message.editionRemoved !== 0) {
-            obj.editionRemoved = editionToJSON(message.editionRemoved);
-        }
-        return obj;
-    },
-};
-exports.OneofOptions = {
-    fromJSON(object) {
-        return {
-            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
-            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.features !== undefined) {
-            obj.features = exports.FeatureSet.toJSON(message.features);
-        }
-        if (message.uninterpretedOption?.length) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.EnumOptions = {
-    fromJSON(object) {
-        return {
-            allowAlias: isSet(object.allowAlias) ? globalThis.Boolean(object.allowAlias) : false,
-            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
-            deprecatedLegacyJsonFieldConflicts: isSet(object.deprecatedLegacyJsonFieldConflicts)
-                ? globalThis.Boolean(object.deprecatedLegacyJsonFieldConflicts)
-                : false,
-            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
-            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.allowAlias !== undefined && message.allowAlias !== false) {
-            obj.allowAlias = message.allowAlias;
-        }
-        if (message.deprecated !== undefined && message.deprecated !== false) {
-            obj.deprecated = message.deprecated;
-        }
-        if (message.deprecatedLegacyJsonFieldConflicts !== undefined && message.deprecatedLegacyJsonFieldConflicts !== false) {
-            obj.deprecatedLegacyJsonFieldConflicts = message.deprecatedLegacyJsonFieldConflicts;
-        }
-        if (message.features !== undefined) {
-            obj.features = exports.FeatureSet.toJSON(message.features);
-        }
-        if (message.uninterpretedOption?.length) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.EnumValueOptions = {
-    fromJSON(object) {
-        return {
-            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
-            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
-            debugRedact: isSet(object.debugRedact) ? globalThis.Boolean(object.debugRedact) : false,
-            featureSupport: isSet(object.featureSupport)
-                ? exports.FieldOptions_FeatureSupport.fromJSON(object.featureSupport)
-                : undefined,
-            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.deprecated !== undefined && message.deprecated !== false) {
-            obj.deprecated = message.deprecated;
-        }
-        if (message.features !== undefined) {
-            obj.features = exports.FeatureSet.toJSON(message.features);
-        }
-        if (message.debugRedact !== undefined && message.debugRedact !== false) {
-            obj.debugRedact = message.debugRedact;
-        }
-        if (message.featureSupport !== undefined) {
-            obj.featureSupport = exports.FieldOptions_FeatureSupport.toJSON(message.featureSupport);
-        }
-        if (message.uninterpretedOption?.length) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.ServiceOptions = {
-    fromJSON(object) {
-        return {
-            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
-            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
-            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.features !== undefined) {
-            obj.features = exports.FeatureSet.toJSON(message.features);
-        }
-        if (message.deprecated !== undefined && message.deprecated !== false) {
-            obj.deprecated = message.deprecated;
-        }
-        if (message.uninterpretedOption?.length) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.MethodOptions = {
-    fromJSON(object) {
-        return {
-            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
-            idempotencyLevel: isSet(object.idempotencyLevel)
-                ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel)
-                : 0,
-            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
-            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.deprecated !== undefined && message.deprecated !== false) {
-            obj.deprecated = message.deprecated;
-        }
-        if (message.idempotencyLevel !== undefined && message.idempotencyLevel !== 0) {
-            obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel);
-        }
-        if (message.features !== undefined) {
-            obj.features = exports.FeatureSet.toJSON(message.features);
-        }
-        if (message.uninterpretedOption?.length) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.UninterpretedOption = {
-    fromJSON(object) {
-        return {
-            name: globalThis.Array.isArray(object?.name)
-                ? object.name.map((e) => exports.UninterpretedOption_NamePart.fromJSON(e))
-                : [],
-            identifierValue: isSet(object.identifierValue) ? globalThis.String(object.identifierValue) : "",
-            positiveIntValue: isSet(object.positiveIntValue) ? globalThis.String(object.positiveIntValue) : "0",
-            negativeIntValue: isSet(object.negativeIntValue) ? globalThis.String(object.negativeIntValue) : "0",
-            doubleValue: isSet(object.doubleValue) ? globalThis.Number(object.doubleValue) : 0,
-            stringValue: isSet(object.stringValue) ? Buffer.from(bytesFromBase64(object.stringValue)) : Buffer.alloc(0),
-            aggregateValue: isSet(object.aggregateValue) ? globalThis.String(object.aggregateValue) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.name?.length) {
-            obj.name = message.name.map((e) => exports.UninterpretedOption_NamePart.toJSON(e));
-        }
-        if (message.identifierValue !== undefined && message.identifierValue !== "") {
-            obj.identifierValue = message.identifierValue;
-        }
-        if (message.positiveIntValue !== undefined && message.positiveIntValue !== "0") {
-            obj.positiveIntValue = message.positiveIntValue;
-        }
-        if (message.negativeIntValue !== undefined && message.negativeIntValue !== "0") {
-            obj.negativeIntValue = message.negativeIntValue;
-        }
-        if (message.doubleValue !== undefined && message.doubleValue !== 0) {
-            obj.doubleValue = message.doubleValue;
-        }
-        if (message.stringValue !== undefined && message.stringValue.length !== 0) {
-            obj.stringValue = base64FromBytes(message.stringValue);
-        }
-        if (message.aggregateValue !== undefined && message.aggregateValue !== "") {
-            obj.aggregateValue = message.aggregateValue;
-        }
-        return obj;
-    },
-};
-exports.UninterpretedOption_NamePart = {
-    fromJSON(object) {
-        return {
-            namePart: isSet(object.namePart) ? globalThis.String(object.namePart) : "",
-            isExtension: isSet(object.isExtension) ? globalThis.Boolean(object.isExtension) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.namePart !== "") {
-            obj.namePart = message.namePart;
-        }
-        if (message.isExtension !== false) {
-            obj.isExtension = message.isExtension;
-        }
-        return obj;
-    },
-};
-exports.FeatureSet = {
-    fromJSON(object) {
-        return {
-            fieldPresence: isSet(object.fieldPresence) ? featureSet_FieldPresenceFromJSON(object.fieldPresence) : 0,
-            enumType: isSet(object.enumType) ? featureSet_EnumTypeFromJSON(object.enumType) : 0,
-            repeatedFieldEncoding: isSet(object.repeatedFieldEncoding)
-                ? featureSet_RepeatedFieldEncodingFromJSON(object.repeatedFieldEncoding)
-                : 0,
-            utf8Validation: isSet(object.utf8Validation) ? featureSet_Utf8ValidationFromJSON(object.utf8Validation) : 0,
-            messageEncoding: isSet(object.messageEncoding) ? featureSet_MessageEncodingFromJSON(object.messageEncoding) : 0,
-            jsonFormat: isSet(object.jsonFormat) ? featureSet_JsonFormatFromJSON(object.jsonFormat) : 0,
-            enforceNamingStyle: isSet(object.enforceNamingStyle)
-                ? featureSet_EnforceNamingStyleFromJSON(object.enforceNamingStyle)
-                : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.fieldPresence !== undefined && message.fieldPresence !== 0) {
-            obj.fieldPresence = featureSet_FieldPresenceToJSON(message.fieldPresence);
-        }
-        if (message.enumType !== undefined && message.enumType !== 0) {
-            obj.enumType = featureSet_EnumTypeToJSON(message.enumType);
-        }
-        if (message.repeatedFieldEncoding !== undefined && message.repeatedFieldEncoding !== 0) {
-            obj.repeatedFieldEncoding = featureSet_RepeatedFieldEncodingToJSON(message.repeatedFieldEncoding);
-        }
-        if (message.utf8Validation !== undefined && message.utf8Validation !== 0) {
-            obj.utf8Validation = featureSet_Utf8ValidationToJSON(message.utf8Validation);
-        }
-        if (message.messageEncoding !== undefined && message.messageEncoding !== 0) {
-            obj.messageEncoding = featureSet_MessageEncodingToJSON(message.messageEncoding);
-        }
-        if (message.jsonFormat !== undefined && message.jsonFormat !== 0) {
-            obj.jsonFormat = featureSet_JsonFormatToJSON(message.jsonFormat);
-        }
-        if (message.enforceNamingStyle !== undefined && message.enforceNamingStyle !== 0) {
-            obj.enforceNamingStyle = featureSet_EnforceNamingStyleToJSON(message.enforceNamingStyle);
-        }
-        return obj;
-    },
-};
-exports.FeatureSetDefaults = {
-    fromJSON(object) {
-        return {
-            defaults: globalThis.Array.isArray(object?.defaults)
-                ? object.defaults.map((e) => exports.FeatureSetDefaults_FeatureSetEditionDefault.fromJSON(e))
-                : [],
-            minimumEdition: isSet(object.minimumEdition) ? editionFromJSON(object.minimumEdition) : 0,
-            maximumEdition: isSet(object.maximumEdition) ? editionFromJSON(object.maximumEdition) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.defaults?.length) {
-            obj.defaults = message.defaults.map((e) => exports.FeatureSetDefaults_FeatureSetEditionDefault.toJSON(e));
-        }
-        if (message.minimumEdition !== undefined && message.minimumEdition !== 0) {
-            obj.minimumEdition = editionToJSON(message.minimumEdition);
-        }
-        if (message.maximumEdition !== undefined && message.maximumEdition !== 0) {
-            obj.maximumEdition = editionToJSON(message.maximumEdition);
-        }
-        return obj;
-    },
-};
-exports.FeatureSetDefaults_FeatureSetEditionDefault = {
-    fromJSON(object) {
-        return {
-            edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0,
-            overridableFeatures: isSet(object.overridableFeatures)
-                ? exports.FeatureSet.fromJSON(object.overridableFeatures)
-                : undefined,
-            fixedFeatures: isSet(object.fixedFeatures) ? exports.FeatureSet.fromJSON(object.fixedFeatures) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.edition !== undefined && message.edition !== 0) {
-            obj.edition = editionToJSON(message.edition);
-        }
-        if (message.overridableFeatures !== undefined) {
-            obj.overridableFeatures = exports.FeatureSet.toJSON(message.overridableFeatures);
-        }
-        if (message.fixedFeatures !== undefined) {
-            obj.fixedFeatures = exports.FeatureSet.toJSON(message.fixedFeatures);
-        }
-        return obj;
-    },
-};
-exports.SourceCodeInfo = {
-    fromJSON(object) {
-        return {
-            location: globalThis.Array.isArray(object?.location)
-                ? object.location.map((e) => exports.SourceCodeInfo_Location.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.location?.length) {
-            obj.location = message.location.map((e) => exports.SourceCodeInfo_Location.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.SourceCodeInfo_Location = {
-    fromJSON(object) {
-        return {
-            path: globalThis.Array.isArray(object?.path)
-                ? object.path.map((e) => globalThis.Number(e))
-                : [],
-            span: globalThis.Array.isArray(object?.span) ? object.span.map((e) => globalThis.Number(e)) : [],
-            leadingComments: isSet(object.leadingComments) ? globalThis.String(object.leadingComments) : "",
-            trailingComments: isSet(object.trailingComments) ? globalThis.String(object.trailingComments) : "",
-            leadingDetachedComments: globalThis.Array.isArray(object?.leadingDetachedComments)
-                ? object.leadingDetachedComments.map((e) => globalThis.String(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.path?.length) {
-            obj.path = message.path.map((e) => Math.round(e));
-        }
-        if (message.span?.length) {
-            obj.span = message.span.map((e) => Math.round(e));
-        }
-        if (message.leadingComments !== undefined && message.leadingComments !== "") {
-            obj.leadingComments = message.leadingComments;
-        }
-        if (message.trailingComments !== undefined && message.trailingComments !== "") {
-            obj.trailingComments = message.trailingComments;
-        }
-        if (message.leadingDetachedComments?.length) {
-            obj.leadingDetachedComments = message.leadingDetachedComments;
-        }
-        return obj;
-    },
-};
-exports.GeneratedCodeInfo = {
-    fromJSON(object) {
-        return {
-            annotation: globalThis.Array.isArray(object?.annotation)
-                ? object.annotation.map((e) => exports.GeneratedCodeInfo_Annotation.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.annotation?.length) {
-            obj.annotation = message.annotation.map((e) => exports.GeneratedCodeInfo_Annotation.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.GeneratedCodeInfo_Annotation = {
-    fromJSON(object) {
-        return {
-            path: globalThis.Array.isArray(object?.path)
-                ? object.path.map((e) => globalThis.Number(e))
-                : [],
-            sourceFile: isSet(object.sourceFile) ? globalThis.String(object.sourceFile) : "",
-            begin: isSet(object.begin) ? globalThis.Number(object.begin) : 0,
-            end: isSet(object.end) ? globalThis.Number(object.end) : 0,
-            semantic: isSet(object.semantic) ? generatedCodeInfo_Annotation_SemanticFromJSON(object.semantic) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.path?.length) {
-            obj.path = message.path.map((e) => Math.round(e));
-        }
-        if (message.sourceFile !== undefined && message.sourceFile !== "") {
-            obj.sourceFile = message.sourceFile;
-        }
-        if (message.begin !== undefined && message.begin !== 0) {
-            obj.begin = Math.round(message.begin);
-        }
-        if (message.end !== undefined && message.end !== 0) {
-            obj.end = Math.round(message.end);
-        }
-        if (message.semantic !== undefined && message.semantic !== 0) {
-            obj.semantic = generatedCodeInfo_Annotation_SemanticToJSON(message.semantic);
-        }
-        return obj;
-    },
-};
-function bytesFromBase64(b64) {
-    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
-}
-function base64FromBytes(arr) {
-    return globalThis.Buffer.from(arr).toString("base64");
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
deleted file mode 100644
index 9d24cbba10de9..0000000000000
--- a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
+++ /dev/null
@@ -1,29 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: google/protobuf/timestamp.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Timestamp = void 0;
-exports.Timestamp = {
-    fromJSON(object) {
-        return {
-            seconds: isSet(object.seconds) ? globalThis.String(object.seconds) : "0",
-            nanos: isSet(object.nanos) ? globalThis.Number(object.nanos) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.seconds !== "0") {
-            obj.seconds = message.seconds;
-        }
-        if (message.nanos !== 0) {
-            obj.nanos = Math.round(message.nanos);
-        }
-        return obj;
-    },
-};
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js
deleted file mode 100644
index abc766bed3b88..0000000000000
--- a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js
+++ /dev/null
@@ -1,55 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: rekor/v2/dsse.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.DSSELogEntryV002 = exports.DSSERequestV002 = void 0;
-/* eslint-disable */
-const envelope_1 = require("../../envelope");
-const sigstore_common_1 = require("../../sigstore_common");
-const verifier_1 = require("./verifier");
-exports.DSSERequestV002 = {
-    fromJSON(object) {
-        return {
-            envelope: isSet(object.envelope) ? envelope_1.Envelope.fromJSON(object.envelope) : undefined,
-            verifiers: globalThis.Array.isArray(object?.verifiers)
-                ? object.verifiers.map((e) => verifier_1.Verifier.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.envelope !== undefined) {
-            obj.envelope = envelope_1.Envelope.toJSON(message.envelope);
-        }
-        if (message.verifiers?.length) {
-            obj.verifiers = message.verifiers.map((e) => verifier_1.Verifier.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.DSSELogEntryV002 = {
-    fromJSON(object) {
-        return {
-            payloadHash: isSet(object.payloadHash) ? sigstore_common_1.HashOutput.fromJSON(object.payloadHash) : undefined,
-            signatures: globalThis.Array.isArray(object?.signatures)
-                ? object.signatures.map((e) => verifier_1.Signature.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.payloadHash !== undefined) {
-            obj.payloadHash = sigstore_common_1.HashOutput.toJSON(message.payloadHash);
-        }
-        if (message.signatures?.length) {
-            obj.signatures = message.signatures.map((e) => verifier_1.Signature.toJSON(e));
-        }
-        return obj;
-    },
-};
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js
deleted file mode 100644
index c5eccb10e0a68..0000000000000
--- a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js
+++ /dev/null
@@ -1,81 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: rekor/v2/entry.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.CreateEntryRequest = exports.Spec = exports.Entry = void 0;
-/* eslint-disable */
-const dsse_1 = require("./dsse");
-const hashedrekord_1 = require("./hashedrekord");
-exports.Entry = {
-    fromJSON(object) {
-        return {
-            kind: isSet(object.kind) ? globalThis.String(object.kind) : "",
-            apiVersion: isSet(object.apiVersion) ? globalThis.String(object.apiVersion) : "",
-            spec: isSet(object.spec) ? exports.Spec.fromJSON(object.spec) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.kind !== "") {
-            obj.kind = message.kind;
-        }
-        if (message.apiVersion !== "") {
-            obj.apiVersion = message.apiVersion;
-        }
-        if (message.spec !== undefined) {
-            obj.spec = exports.Spec.toJSON(message.spec);
-        }
-        return obj;
-    },
-};
-exports.Spec = {
-    fromJSON(object) {
-        return {
-            spec: isSet(object.hashedRekordV002)
-                ? { $case: "hashedRekordV002", hashedRekordV002: hashedrekord_1.HashedRekordLogEntryV002.fromJSON(object.hashedRekordV002) }
-                : isSet(object.dsseV002)
-                    ? { $case: "dsseV002", dsseV002: dsse_1.DSSELogEntryV002.fromJSON(object.dsseV002) }
-                    : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.spec?.$case === "hashedRekordV002") {
-            obj.hashedRekordV002 = hashedrekord_1.HashedRekordLogEntryV002.toJSON(message.spec.hashedRekordV002);
-        }
-        else if (message.spec?.$case === "dsseV002") {
-            obj.dsseV002 = dsse_1.DSSELogEntryV002.toJSON(message.spec.dsseV002);
-        }
-        return obj;
-    },
-};
-exports.CreateEntryRequest = {
-    fromJSON(object) {
-        return {
-            spec: isSet(object.hashedRekordRequestV002)
-                ? {
-                    $case: "hashedRekordRequestV002",
-                    hashedRekordRequestV002: hashedrekord_1.HashedRekordRequestV002.fromJSON(object.hashedRekordRequestV002),
-                }
-                : isSet(object.dsseRequestV002)
-                    ? { $case: "dsseRequestV002", dsseRequestV002: dsse_1.DSSERequestV002.fromJSON(object.dsseRequestV002) }
-                    : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.spec?.$case === "hashedRekordRequestV002") {
-            obj.hashedRekordRequestV002 = hashedrekord_1.HashedRekordRequestV002.toJSON(message.spec.hashedRekordRequestV002);
-        }
-        else if (message.spec?.$case === "dsseRequestV002") {
-            obj.dsseRequestV002 = dsse_1.DSSERequestV002.toJSON(message.spec.dsseRequestV002);
-        }
-        return obj;
-    },
-};
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js
deleted file mode 100644
index d3fd1af2483d1..0000000000000
--- a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js
+++ /dev/null
@@ -1,56 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: rekor/v2/hashedrekord.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.HashedRekordLogEntryV002 = exports.HashedRekordRequestV002 = void 0;
-/* eslint-disable */
-const sigstore_common_1 = require("../../sigstore_common");
-const verifier_1 = require("./verifier");
-exports.HashedRekordRequestV002 = {
-    fromJSON(object) {
-        return {
-            digest: isSet(object.digest) ? Buffer.from(bytesFromBase64(object.digest)) : Buffer.alloc(0),
-            signature: isSet(object.signature) ? verifier_1.Signature.fromJSON(object.signature) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.digest.length !== 0) {
-            obj.digest = base64FromBytes(message.digest);
-        }
-        if (message.signature !== undefined) {
-            obj.signature = verifier_1.Signature.toJSON(message.signature);
-        }
-        return obj;
-    },
-};
-exports.HashedRekordLogEntryV002 = {
-    fromJSON(object) {
-        return {
-            data: isSet(object.data) ? sigstore_common_1.HashOutput.fromJSON(object.data) : undefined,
-            signature: isSet(object.signature) ? verifier_1.Signature.fromJSON(object.signature) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.data !== undefined) {
-            obj.data = sigstore_common_1.HashOutput.toJSON(message.data);
-        }
-        if (message.signature !== undefined) {
-            obj.signature = verifier_1.Signature.toJSON(message.signature);
-        }
-        return obj;
-    },
-};
-function bytesFromBase64(b64) {
-    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
-}
-function base64FromBytes(arr) {
-    return globalThis.Buffer.from(arr).toString("base64");
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js
deleted file mode 100644
index c437d5053a3cb..0000000000000
--- a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js
+++ /dev/null
@@ -1,74 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: rekor/v2/verifier.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Signature = exports.Verifier = exports.PublicKey = void 0;
-/* eslint-disable */
-const sigstore_common_1 = require("../../sigstore_common");
-exports.PublicKey = {
-    fromJSON(object) {
-        return { rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : Buffer.alloc(0) };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.rawBytes.length !== 0) {
-            obj.rawBytes = base64FromBytes(message.rawBytes);
-        }
-        return obj;
-    },
-};
-exports.Verifier = {
-    fromJSON(object) {
-        return {
-            verifier: isSet(object.publicKey)
-                ? { $case: "publicKey", publicKey: exports.PublicKey.fromJSON(object.publicKey) }
-                : isSet(object.x509Certificate)
-                    ? { $case: "x509Certificate", x509Certificate: sigstore_common_1.X509Certificate.fromJSON(object.x509Certificate) }
-                    : undefined,
-            keyDetails: isSet(object.keyDetails) ? (0, sigstore_common_1.publicKeyDetailsFromJSON)(object.keyDetails) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.verifier?.$case === "publicKey") {
-            obj.publicKey = exports.PublicKey.toJSON(message.verifier.publicKey);
-        }
-        else if (message.verifier?.$case === "x509Certificate") {
-            obj.x509Certificate = sigstore_common_1.X509Certificate.toJSON(message.verifier.x509Certificate);
-        }
-        if (message.keyDetails !== 0) {
-            obj.keyDetails = (0, sigstore_common_1.publicKeyDetailsToJSON)(message.keyDetails);
-        }
-        return obj;
-    },
-};
-exports.Signature = {
-    fromJSON(object) {
-        return {
-            content: isSet(object.content) ? Buffer.from(bytesFromBase64(object.content)) : Buffer.alloc(0),
-            verifier: isSet(object.verifier) ? exports.Verifier.fromJSON(object.verifier) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.content.length !== 0) {
-            obj.content = base64FromBytes(message.content);
-        }
-        if (message.verifier !== undefined) {
-            obj.verifier = exports.Verifier.toJSON(message.verifier);
-        }
-        return obj;
-    },
-};
-function bytesFromBase64(b64) {
-    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
-}
-function base64FromBytes(arr) {
-    return globalThis.Buffer.from(arr).toString("base64");
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
deleted file mode 100644
index aed636f00e7cf..0000000000000
--- a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
+++ /dev/null
@@ -1,103 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: sigstore_bundle.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Bundle = exports.VerificationMaterial = exports.TimestampVerificationData = void 0;
-/* eslint-disable */
-const envelope_1 = require("./envelope");
-const sigstore_common_1 = require("./sigstore_common");
-const sigstore_rekor_1 = require("./sigstore_rekor");
-exports.TimestampVerificationData = {
-    fromJSON(object) {
-        return {
-            rfc3161Timestamps: globalThis.Array.isArray(object?.rfc3161Timestamps)
-                ? object.rfc3161Timestamps.map((e) => sigstore_common_1.RFC3161SignedTimestamp.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.rfc3161Timestamps?.length) {
-            obj.rfc3161Timestamps = message.rfc3161Timestamps.map((e) => sigstore_common_1.RFC3161SignedTimestamp.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.VerificationMaterial = {
-    fromJSON(object) {
-        return {
-            content: isSet(object.publicKey)
-                ? { $case: "publicKey", publicKey: sigstore_common_1.PublicKeyIdentifier.fromJSON(object.publicKey) }
-                : isSet(object.x509CertificateChain)
-                    ? {
-                        $case: "x509CertificateChain",
-                        x509CertificateChain: sigstore_common_1.X509CertificateChain.fromJSON(object.x509CertificateChain),
-                    }
-                    : isSet(object.certificate)
-                        ? { $case: "certificate", certificate: sigstore_common_1.X509Certificate.fromJSON(object.certificate) }
-                        : undefined,
-            tlogEntries: globalThis.Array.isArray(object?.tlogEntries)
-                ? object.tlogEntries.map((e) => sigstore_rekor_1.TransparencyLogEntry.fromJSON(e))
-                : [],
-            timestampVerificationData: isSet(object.timestampVerificationData)
-                ? exports.TimestampVerificationData.fromJSON(object.timestampVerificationData)
-                : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.content?.$case === "publicKey") {
-            obj.publicKey = sigstore_common_1.PublicKeyIdentifier.toJSON(message.content.publicKey);
-        }
-        else if (message.content?.$case === "x509CertificateChain") {
-            obj.x509CertificateChain = sigstore_common_1.X509CertificateChain.toJSON(message.content.x509CertificateChain);
-        }
-        else if (message.content?.$case === "certificate") {
-            obj.certificate = sigstore_common_1.X509Certificate.toJSON(message.content.certificate);
-        }
-        if (message.tlogEntries?.length) {
-            obj.tlogEntries = message.tlogEntries.map((e) => sigstore_rekor_1.TransparencyLogEntry.toJSON(e));
-        }
-        if (message.timestampVerificationData !== undefined) {
-            obj.timestampVerificationData = exports.TimestampVerificationData.toJSON(message.timestampVerificationData);
-        }
-        return obj;
-    },
-};
-exports.Bundle = {
-    fromJSON(object) {
-        return {
-            mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "",
-            verificationMaterial: isSet(object.verificationMaterial)
-                ? exports.VerificationMaterial.fromJSON(object.verificationMaterial)
-                : undefined,
-            content: isSet(object.messageSignature)
-                ? { $case: "messageSignature", messageSignature: sigstore_common_1.MessageSignature.fromJSON(object.messageSignature) }
-                : isSet(object.dsseEnvelope)
-                    ? { $case: "dsseEnvelope", dsseEnvelope: envelope_1.Envelope.fromJSON(object.dsseEnvelope) }
-                    : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.mediaType !== "") {
-            obj.mediaType = message.mediaType;
-        }
-        if (message.verificationMaterial !== undefined) {
-            obj.verificationMaterial = exports.VerificationMaterial.toJSON(message.verificationMaterial);
-        }
-        if (message.content?.$case === "messageSignature") {
-            obj.messageSignature = sigstore_common_1.MessageSignature.toJSON(message.content.messageSignature);
-        }
-        else if (message.content?.$case === "dsseEnvelope") {
-            obj.dsseEnvelope = envelope_1.Envelope.toJSON(message.content.dsseEnvelope);
-        }
-        return obj;
-    },
-};
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
deleted file mode 100644
index b900516ed3b55..0000000000000
--- a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
+++ /dev/null
@@ -1,596 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: sigstore_common.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TimeRange = exports.X509CertificateChain = exports.SubjectAlternativeName = exports.X509Certificate = exports.DistinguishedName = exports.ObjectIdentifierValuePair = exports.ObjectIdentifier = exports.PublicKeyIdentifier = exports.PublicKey = exports.RFC3161SignedTimestamp = exports.LogId = exports.MessageSignature = exports.HashOutput = exports.SubjectAlternativeNameType = exports.PublicKeyDetails = exports.HashAlgorithm = void 0;
-exports.hashAlgorithmFromJSON = hashAlgorithmFromJSON;
-exports.hashAlgorithmToJSON = hashAlgorithmToJSON;
-exports.publicKeyDetailsFromJSON = publicKeyDetailsFromJSON;
-exports.publicKeyDetailsToJSON = publicKeyDetailsToJSON;
-exports.subjectAlternativeNameTypeFromJSON = subjectAlternativeNameTypeFromJSON;
-exports.subjectAlternativeNameTypeToJSON = subjectAlternativeNameTypeToJSON;
-/* eslint-disable */
-const timestamp_1 = require("./google/protobuf/timestamp");
-/**
- * Only a subset of the secure hash standard algorithms are supported.
- * See  for more
- * details.
- * UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force
- * any proto JSON serialization to emit the used hash algorithm, as default
- * option is to *omit* the default value of an enum (which is the first
- * value, represented by '0'.
- */
-var HashAlgorithm;
-(function (HashAlgorithm) {
-    HashAlgorithm[HashAlgorithm["HASH_ALGORITHM_UNSPECIFIED"] = 0] = "HASH_ALGORITHM_UNSPECIFIED";
-    HashAlgorithm[HashAlgorithm["SHA2_256"] = 1] = "SHA2_256";
-    HashAlgorithm[HashAlgorithm["SHA2_384"] = 2] = "SHA2_384";
-    HashAlgorithm[HashAlgorithm["SHA2_512"] = 3] = "SHA2_512";
-    HashAlgorithm[HashAlgorithm["SHA3_256"] = 4] = "SHA3_256";
-    HashAlgorithm[HashAlgorithm["SHA3_384"] = 5] = "SHA3_384";
-})(HashAlgorithm || (exports.HashAlgorithm = HashAlgorithm = {}));
-function hashAlgorithmFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "HASH_ALGORITHM_UNSPECIFIED":
-            return HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED;
-        case 1:
-        case "SHA2_256":
-            return HashAlgorithm.SHA2_256;
-        case 2:
-        case "SHA2_384":
-            return HashAlgorithm.SHA2_384;
-        case 3:
-        case "SHA2_512":
-            return HashAlgorithm.SHA2_512;
-        case 4:
-        case "SHA3_256":
-            return HashAlgorithm.SHA3_256;
-        case 5:
-        case "SHA3_384":
-            return HashAlgorithm.SHA3_384;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
-    }
-}
-function hashAlgorithmToJSON(object) {
-    switch (object) {
-        case HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED:
-            return "HASH_ALGORITHM_UNSPECIFIED";
-        case HashAlgorithm.SHA2_256:
-            return "SHA2_256";
-        case HashAlgorithm.SHA2_384:
-            return "SHA2_384";
-        case HashAlgorithm.SHA2_512:
-            return "SHA2_512";
-        case HashAlgorithm.SHA3_256:
-            return "SHA3_256";
-        case HashAlgorithm.SHA3_384:
-            return "SHA3_384";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
-    }
-}
-/**
- * Details of a specific public key, capturing the the key encoding method,
- * and signature algorithm.
- *
- * PublicKeyDetails captures the public key/hash algorithm combinations
- * recommended in the Sigstore ecosystem.
- *
- * This is modelled as a linear set as we want to provide a small number of
- * opinionated options instead of allowing every possible permutation.
- *
- * Any changes to this enum MUST be reflected in the algorithm registry.
- *
- * See: 
- *
- * To avoid the possibility of contradicting formats such as PKCS1 with
- * ED25519 the valid permutations are listed as a linear set instead of a
- * cartesian set (i.e one combined variable instead of two, one for encoding
- * and one for the signature algorithm).
- */
-var PublicKeyDetails;
-(function (PublicKeyDetails) {
-    PublicKeyDetails[PublicKeyDetails["PUBLIC_KEY_DETAILS_UNSPECIFIED"] = 0] = "PUBLIC_KEY_DETAILS_UNSPECIFIED";
-    /**
-     * PKCS1_RSA_PKCS1V5 - RSA
-     *
-     * @deprecated
-     */
-    PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PKCS1V5"] = 1] = "PKCS1_RSA_PKCS1V5";
-    /**
-     * PKCS1_RSA_PSS - See RFC8017
-     *
-     * @deprecated
-     */
-    PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PSS"] = 2] = "PKCS1_RSA_PSS";
-    /** @deprecated */
-    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V5"] = 3] = "PKIX_RSA_PKCS1V5";
-    /** @deprecated */
-    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS"] = 4] = "PKIX_RSA_PSS";
-    /** PKIX_RSA_PKCS1V15_2048_SHA256 - RSA public key in PKIX format, PKCS#1v1.5 signature */
-    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V15_2048_SHA256"] = 9] = "PKIX_RSA_PKCS1V15_2048_SHA256";
-    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V15_3072_SHA256"] = 10] = "PKIX_RSA_PKCS1V15_3072_SHA256";
-    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V15_4096_SHA256"] = 11] = "PKIX_RSA_PKCS1V15_4096_SHA256";
-    /** PKIX_RSA_PSS_2048_SHA256 - RSA public key in PKIX format, RSASSA-PSS signature */
-    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS_2048_SHA256"] = 16] = "PKIX_RSA_PSS_2048_SHA256";
-    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS_3072_SHA256"] = 17] = "PKIX_RSA_PSS_3072_SHA256";
-    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS_4096_SHA256"] = 18] = "PKIX_RSA_PSS_4096_SHA256";
-    /**
-     * PKIX_ECDSA_P256_HMAC_SHA_256 - ECDSA
-     *
-     * @deprecated
-     */
-    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_HMAC_SHA_256"] = 6] = "PKIX_ECDSA_P256_HMAC_SHA_256";
-    /** PKIX_ECDSA_P256_SHA_256 - See NIST FIPS 186-4 */
-    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_SHA_256"] = 5] = "PKIX_ECDSA_P256_SHA_256";
-    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P384_SHA_384"] = 12] = "PKIX_ECDSA_P384_SHA_384";
-    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P521_SHA_512"] = 13] = "PKIX_ECDSA_P521_SHA_512";
-    /** PKIX_ED25519 - Ed 25519 */
-    PublicKeyDetails[PublicKeyDetails["PKIX_ED25519"] = 7] = "PKIX_ED25519";
-    PublicKeyDetails[PublicKeyDetails["PKIX_ED25519_PH"] = 8] = "PKIX_ED25519_PH";
-    /**
-     * PKIX_ECDSA_P384_SHA_256 - These algorithms are deprecated and should not be used, but they
-     * were/are being used by most Sigstore clients implementations.
-     *
-     * @deprecated
-     */
-    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P384_SHA_256"] = 19] = "PKIX_ECDSA_P384_SHA_256";
-    /** @deprecated */
-    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P521_SHA_256"] = 20] = "PKIX_ECDSA_P521_SHA_256";
-    /**
-     * LMS_SHA256 - LMS and LM-OTS
-     *
-     * These algorithms are deprecated and should not be used.
-     * Keys and signatures MAY be used by private Sigstore
-     * deployments, but will not be supported by the public
-     * good instance.
-     *
-     * USER WARNING: LMS and LM-OTS are both stateful signature schemes.
-     * Using them correctly requires discretion and careful consideration
-     * to ensure that individual secret keys are not used more than once.
-     * In addition, LM-OTS is a single-use scheme, meaning that it
-     * MUST NOT be used for more than one signature per LM-OTS key.
-     * If you cannot maintain these invariants, you MUST NOT use these
-     * schemes.
-     *
-     * @deprecated
-     */
-    PublicKeyDetails[PublicKeyDetails["LMS_SHA256"] = 14] = "LMS_SHA256";
-    /** @deprecated */
-    PublicKeyDetails[PublicKeyDetails["LMOTS_SHA256"] = 15] = "LMOTS_SHA256";
-    /**
-     * ML_DSA_65 - ML-DSA
-     *
-     * These ML_DSA_65 and ML-DSA_87 algorithms are the pure variants that
-     * take data to sign rather than the prehash variants (HashML-DSA), which
-     * take digests.  While considered quantum-resistant, their usage
-     * involves tradeoffs in that signatures and keys are much larger, and
-     * this makes deployments more costly.
-     *
-     * USER WARNING: ML_DSA_65 and ML_DSA_87 are experimental algorithms.
-     * In the future they MAY be used by private Sigstore deployments, but
-     * they are not yet fully functional.  This warning will be removed when
-     * these algorithms are widely supported by Sigstore clients and servers,
-     * but care should still be taken for production environments.
-     */
-    PublicKeyDetails[PublicKeyDetails["ML_DSA_65"] = 21] = "ML_DSA_65";
-    PublicKeyDetails[PublicKeyDetails["ML_DSA_87"] = 22] = "ML_DSA_87";
-})(PublicKeyDetails || (exports.PublicKeyDetails = PublicKeyDetails = {}));
-function publicKeyDetailsFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "PUBLIC_KEY_DETAILS_UNSPECIFIED":
-            return PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED;
-        case 1:
-        case "PKCS1_RSA_PKCS1V5":
-            return PublicKeyDetails.PKCS1_RSA_PKCS1V5;
-        case 2:
-        case "PKCS1_RSA_PSS":
-            return PublicKeyDetails.PKCS1_RSA_PSS;
-        case 3:
-        case "PKIX_RSA_PKCS1V5":
-            return PublicKeyDetails.PKIX_RSA_PKCS1V5;
-        case 4:
-        case "PKIX_RSA_PSS":
-            return PublicKeyDetails.PKIX_RSA_PSS;
-        case 9:
-        case "PKIX_RSA_PKCS1V15_2048_SHA256":
-            return PublicKeyDetails.PKIX_RSA_PKCS1V15_2048_SHA256;
-        case 10:
-        case "PKIX_RSA_PKCS1V15_3072_SHA256":
-            return PublicKeyDetails.PKIX_RSA_PKCS1V15_3072_SHA256;
-        case 11:
-        case "PKIX_RSA_PKCS1V15_4096_SHA256":
-            return PublicKeyDetails.PKIX_RSA_PKCS1V15_4096_SHA256;
-        case 16:
-        case "PKIX_RSA_PSS_2048_SHA256":
-            return PublicKeyDetails.PKIX_RSA_PSS_2048_SHA256;
-        case 17:
-        case "PKIX_RSA_PSS_3072_SHA256":
-            return PublicKeyDetails.PKIX_RSA_PSS_3072_SHA256;
-        case 18:
-        case "PKIX_RSA_PSS_4096_SHA256":
-            return PublicKeyDetails.PKIX_RSA_PSS_4096_SHA256;
-        case 6:
-        case "PKIX_ECDSA_P256_HMAC_SHA_256":
-            return PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256;
-        case 5:
-        case "PKIX_ECDSA_P256_SHA_256":
-            return PublicKeyDetails.PKIX_ECDSA_P256_SHA_256;
-        case 12:
-        case "PKIX_ECDSA_P384_SHA_384":
-            return PublicKeyDetails.PKIX_ECDSA_P384_SHA_384;
-        case 13:
-        case "PKIX_ECDSA_P521_SHA_512":
-            return PublicKeyDetails.PKIX_ECDSA_P521_SHA_512;
-        case 7:
-        case "PKIX_ED25519":
-            return PublicKeyDetails.PKIX_ED25519;
-        case 8:
-        case "PKIX_ED25519_PH":
-            return PublicKeyDetails.PKIX_ED25519_PH;
-        case 19:
-        case "PKIX_ECDSA_P384_SHA_256":
-            return PublicKeyDetails.PKIX_ECDSA_P384_SHA_256;
-        case 20:
-        case "PKIX_ECDSA_P521_SHA_256":
-            return PublicKeyDetails.PKIX_ECDSA_P521_SHA_256;
-        case 14:
-        case "LMS_SHA256":
-            return PublicKeyDetails.LMS_SHA256;
-        case 15:
-        case "LMOTS_SHA256":
-            return PublicKeyDetails.LMOTS_SHA256;
-        case 21:
-        case "ML_DSA_65":
-            return PublicKeyDetails.ML_DSA_65;
-        case 22:
-        case "ML_DSA_87":
-            return PublicKeyDetails.ML_DSA_87;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
-    }
-}
-function publicKeyDetailsToJSON(object) {
-    switch (object) {
-        case PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED:
-            return "PUBLIC_KEY_DETAILS_UNSPECIFIED";
-        case PublicKeyDetails.PKCS1_RSA_PKCS1V5:
-            return "PKCS1_RSA_PKCS1V5";
-        case PublicKeyDetails.PKCS1_RSA_PSS:
-            return "PKCS1_RSA_PSS";
-        case PublicKeyDetails.PKIX_RSA_PKCS1V5:
-            return "PKIX_RSA_PKCS1V5";
-        case PublicKeyDetails.PKIX_RSA_PSS:
-            return "PKIX_RSA_PSS";
-        case PublicKeyDetails.PKIX_RSA_PKCS1V15_2048_SHA256:
-            return "PKIX_RSA_PKCS1V15_2048_SHA256";
-        case PublicKeyDetails.PKIX_RSA_PKCS1V15_3072_SHA256:
-            return "PKIX_RSA_PKCS1V15_3072_SHA256";
-        case PublicKeyDetails.PKIX_RSA_PKCS1V15_4096_SHA256:
-            return "PKIX_RSA_PKCS1V15_4096_SHA256";
-        case PublicKeyDetails.PKIX_RSA_PSS_2048_SHA256:
-            return "PKIX_RSA_PSS_2048_SHA256";
-        case PublicKeyDetails.PKIX_RSA_PSS_3072_SHA256:
-            return "PKIX_RSA_PSS_3072_SHA256";
-        case PublicKeyDetails.PKIX_RSA_PSS_4096_SHA256:
-            return "PKIX_RSA_PSS_4096_SHA256";
-        case PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256:
-            return "PKIX_ECDSA_P256_HMAC_SHA_256";
-        case PublicKeyDetails.PKIX_ECDSA_P256_SHA_256:
-            return "PKIX_ECDSA_P256_SHA_256";
-        case PublicKeyDetails.PKIX_ECDSA_P384_SHA_384:
-            return "PKIX_ECDSA_P384_SHA_384";
-        case PublicKeyDetails.PKIX_ECDSA_P521_SHA_512:
-            return "PKIX_ECDSA_P521_SHA_512";
-        case PublicKeyDetails.PKIX_ED25519:
-            return "PKIX_ED25519";
-        case PublicKeyDetails.PKIX_ED25519_PH:
-            return "PKIX_ED25519_PH";
-        case PublicKeyDetails.PKIX_ECDSA_P384_SHA_256:
-            return "PKIX_ECDSA_P384_SHA_256";
-        case PublicKeyDetails.PKIX_ECDSA_P521_SHA_256:
-            return "PKIX_ECDSA_P521_SHA_256";
-        case PublicKeyDetails.LMS_SHA256:
-            return "LMS_SHA256";
-        case PublicKeyDetails.LMOTS_SHA256:
-            return "LMOTS_SHA256";
-        case PublicKeyDetails.ML_DSA_65:
-            return "ML_DSA_65";
-        case PublicKeyDetails.ML_DSA_87:
-            return "ML_DSA_87";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
-    }
-}
-var SubjectAlternativeNameType;
-(function (SubjectAlternativeNameType) {
-    SubjectAlternativeNameType[SubjectAlternativeNameType["SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED"] = 0] = "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
-    SubjectAlternativeNameType[SubjectAlternativeNameType["EMAIL"] = 1] = "EMAIL";
-    SubjectAlternativeNameType[SubjectAlternativeNameType["URI"] = 2] = "URI";
-    /**
-     * OTHER_NAME - OID 1.3.6.1.4.1.57264.1.7
-     * See https://github.com/sigstore/fulcio/blob/main/docs/oid-info.md#1361415726417--othername-san
-     * for more details.
-     */
-    SubjectAlternativeNameType[SubjectAlternativeNameType["OTHER_NAME"] = 3] = "OTHER_NAME";
-})(SubjectAlternativeNameType || (exports.SubjectAlternativeNameType = SubjectAlternativeNameType = {}));
-function subjectAlternativeNameTypeFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED":
-            return SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED;
-        case 1:
-        case "EMAIL":
-            return SubjectAlternativeNameType.EMAIL;
-        case 2:
-        case "URI":
-            return SubjectAlternativeNameType.URI;
-        case 3:
-        case "OTHER_NAME":
-            return SubjectAlternativeNameType.OTHER_NAME;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
-    }
-}
-function subjectAlternativeNameTypeToJSON(object) {
-    switch (object) {
-        case SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED:
-            return "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
-        case SubjectAlternativeNameType.EMAIL:
-            return "EMAIL";
-        case SubjectAlternativeNameType.URI:
-            return "URI";
-        case SubjectAlternativeNameType.OTHER_NAME:
-            return "OTHER_NAME";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
-    }
-}
-exports.HashOutput = {
-    fromJSON(object) {
-        return {
-            algorithm: isSet(object.algorithm) ? hashAlgorithmFromJSON(object.algorithm) : 0,
-            digest: isSet(object.digest) ? Buffer.from(bytesFromBase64(object.digest)) : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.algorithm !== 0) {
-            obj.algorithm = hashAlgorithmToJSON(message.algorithm);
-        }
-        if (message.digest.length !== 0) {
-            obj.digest = base64FromBytes(message.digest);
-        }
-        return obj;
-    },
-};
-exports.MessageSignature = {
-    fromJSON(object) {
-        return {
-            messageDigest: isSet(object.messageDigest) ? exports.HashOutput.fromJSON(object.messageDigest) : undefined,
-            signature: isSet(object.signature) ? Buffer.from(bytesFromBase64(object.signature)) : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.messageDigest !== undefined) {
-            obj.messageDigest = exports.HashOutput.toJSON(message.messageDigest);
-        }
-        if (message.signature.length !== 0) {
-            obj.signature = base64FromBytes(message.signature);
-        }
-        return obj;
-    },
-};
-exports.LogId = {
-    fromJSON(object) {
-        return { keyId: isSet(object.keyId) ? Buffer.from(bytesFromBase64(object.keyId)) : Buffer.alloc(0) };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.keyId.length !== 0) {
-            obj.keyId = base64FromBytes(message.keyId);
-        }
-        return obj;
-    },
-};
-exports.RFC3161SignedTimestamp = {
-    fromJSON(object) {
-        return {
-            signedTimestamp: isSet(object.signedTimestamp)
-                ? Buffer.from(bytesFromBase64(object.signedTimestamp))
-                : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.signedTimestamp.length !== 0) {
-            obj.signedTimestamp = base64FromBytes(message.signedTimestamp);
-        }
-        return obj;
-    },
-};
-exports.PublicKey = {
-    fromJSON(object) {
-        return {
-            rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : undefined,
-            keyDetails: isSet(object.keyDetails) ? publicKeyDetailsFromJSON(object.keyDetails) : 0,
-            validFor: isSet(object.validFor) ? exports.TimeRange.fromJSON(object.validFor) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.rawBytes !== undefined) {
-            obj.rawBytes = base64FromBytes(message.rawBytes);
-        }
-        if (message.keyDetails !== 0) {
-            obj.keyDetails = publicKeyDetailsToJSON(message.keyDetails);
-        }
-        if (message.validFor !== undefined) {
-            obj.validFor = exports.TimeRange.toJSON(message.validFor);
-        }
-        return obj;
-    },
-};
-exports.PublicKeyIdentifier = {
-    fromJSON(object) {
-        return { hint: isSet(object.hint) ? globalThis.String(object.hint) : "" };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.hint !== "") {
-            obj.hint = message.hint;
-        }
-        return obj;
-    },
-};
-exports.ObjectIdentifier = {
-    fromJSON(object) {
-        return { id: globalThis.Array.isArray(object?.id) ? object.id.map((e) => globalThis.Number(e)) : [] };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.id?.length) {
-            obj.id = message.id.map((e) => Math.round(e));
-        }
-        return obj;
-    },
-};
-exports.ObjectIdentifierValuePair = {
-    fromJSON(object) {
-        return {
-            oid: isSet(object.oid) ? exports.ObjectIdentifier.fromJSON(object.oid) : undefined,
-            value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.oid !== undefined) {
-            obj.oid = exports.ObjectIdentifier.toJSON(message.oid);
-        }
-        if (message.value.length !== 0) {
-            obj.value = base64FromBytes(message.value);
-        }
-        return obj;
-    },
-};
-exports.DistinguishedName = {
-    fromJSON(object) {
-        return {
-            organization: isSet(object.organization) ? globalThis.String(object.organization) : "",
-            commonName: isSet(object.commonName) ? globalThis.String(object.commonName) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.organization !== "") {
-            obj.organization = message.organization;
-        }
-        if (message.commonName !== "") {
-            obj.commonName = message.commonName;
-        }
-        return obj;
-    },
-};
-exports.X509Certificate = {
-    fromJSON(object) {
-        return { rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : Buffer.alloc(0) };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.rawBytes.length !== 0) {
-            obj.rawBytes = base64FromBytes(message.rawBytes);
-        }
-        return obj;
-    },
-};
-exports.SubjectAlternativeName = {
-    fromJSON(object) {
-        return {
-            type: isSet(object.type) ? subjectAlternativeNameTypeFromJSON(object.type) : 0,
-            identity: isSet(object.regexp)
-                ? { $case: "regexp", regexp: globalThis.String(object.regexp) }
-                : isSet(object.value)
-                    ? { $case: "value", value: globalThis.String(object.value) }
-                    : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.type !== 0) {
-            obj.type = subjectAlternativeNameTypeToJSON(message.type);
-        }
-        if (message.identity?.$case === "regexp") {
-            obj.regexp = message.identity.regexp;
-        }
-        else if (message.identity?.$case === "value") {
-            obj.value = message.identity.value;
-        }
-        return obj;
-    },
-};
-exports.X509CertificateChain = {
-    fromJSON(object) {
-        return {
-            certificates: globalThis.Array.isArray(object?.certificates)
-                ? object.certificates.map((e) => exports.X509Certificate.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.certificates?.length) {
-            obj.certificates = message.certificates.map((e) => exports.X509Certificate.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.TimeRange = {
-    fromJSON(object) {
-        return {
-            start: isSet(object.start) ? fromJsonTimestamp(object.start) : undefined,
-            end: isSet(object.end) ? fromJsonTimestamp(object.end) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.start !== undefined) {
-            obj.start = message.start.toISOString();
-        }
-        if (message.end !== undefined) {
-            obj.end = message.end.toISOString();
-        }
-        return obj;
-    },
-};
-function bytesFromBase64(b64) {
-    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
-}
-function base64FromBytes(arr) {
-    return globalThis.Buffer.from(arr).toString("base64");
-}
-function fromTimestamp(t) {
-    let millis = (globalThis.Number(t.seconds) || 0) * 1_000;
-    millis += (t.nanos || 0) / 1_000_000;
-    return new globalThis.Date(millis);
-}
-function fromJsonTimestamp(o) {
-    if (o instanceof globalThis.Date) {
-        return o;
-    }
-    else if (typeof o === "string") {
-        return new globalThis.Date(o);
-    }
-    else {
-        return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
-    }
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
deleted file mode 100644
index fd8ea8384664d..0000000000000
--- a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
+++ /dev/null
@@ -1,137 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: sigstore_rekor.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TransparencyLogEntry = exports.InclusionPromise = exports.InclusionProof = exports.Checkpoint = exports.KindVersion = void 0;
-/* eslint-disable */
-const sigstore_common_1 = require("./sigstore_common");
-exports.KindVersion = {
-    fromJSON(object) {
-        return {
-            kind: isSet(object.kind) ? globalThis.String(object.kind) : "",
-            version: isSet(object.version) ? globalThis.String(object.version) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.kind !== "") {
-            obj.kind = message.kind;
-        }
-        if (message.version !== "") {
-            obj.version = message.version;
-        }
-        return obj;
-    },
-};
-exports.Checkpoint = {
-    fromJSON(object) {
-        return { envelope: isSet(object.envelope) ? globalThis.String(object.envelope) : "" };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.envelope !== "") {
-            obj.envelope = message.envelope;
-        }
-        return obj;
-    },
-};
-exports.InclusionProof = {
-    fromJSON(object) {
-        return {
-            logIndex: isSet(object.logIndex) ? globalThis.String(object.logIndex) : "0",
-            rootHash: isSet(object.rootHash) ? Buffer.from(bytesFromBase64(object.rootHash)) : Buffer.alloc(0),
-            treeSize: isSet(object.treeSize) ? globalThis.String(object.treeSize) : "0",
-            hashes: globalThis.Array.isArray(object?.hashes)
-                ? object.hashes.map((e) => Buffer.from(bytesFromBase64(e)))
-                : [],
-            checkpoint: isSet(object.checkpoint) ? exports.Checkpoint.fromJSON(object.checkpoint) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.logIndex !== "0") {
-            obj.logIndex = message.logIndex;
-        }
-        if (message.rootHash.length !== 0) {
-            obj.rootHash = base64FromBytes(message.rootHash);
-        }
-        if (message.treeSize !== "0") {
-            obj.treeSize = message.treeSize;
-        }
-        if (message.hashes?.length) {
-            obj.hashes = message.hashes.map((e) => base64FromBytes(e));
-        }
-        if (message.checkpoint !== undefined) {
-            obj.checkpoint = exports.Checkpoint.toJSON(message.checkpoint);
-        }
-        return obj;
-    },
-};
-exports.InclusionPromise = {
-    fromJSON(object) {
-        return {
-            signedEntryTimestamp: isSet(object.signedEntryTimestamp)
-                ? Buffer.from(bytesFromBase64(object.signedEntryTimestamp))
-                : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.signedEntryTimestamp.length !== 0) {
-            obj.signedEntryTimestamp = base64FromBytes(message.signedEntryTimestamp);
-        }
-        return obj;
-    },
-};
-exports.TransparencyLogEntry = {
-    fromJSON(object) {
-        return {
-            logIndex: isSet(object.logIndex) ? globalThis.String(object.logIndex) : "0",
-            logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
-            kindVersion: isSet(object.kindVersion) ? exports.KindVersion.fromJSON(object.kindVersion) : undefined,
-            integratedTime: isSet(object.integratedTime) ? globalThis.String(object.integratedTime) : "0",
-            inclusionPromise: isSet(object.inclusionPromise) ? exports.InclusionPromise.fromJSON(object.inclusionPromise) : undefined,
-            inclusionProof: isSet(object.inclusionProof) ? exports.InclusionProof.fromJSON(object.inclusionProof) : undefined,
-            canonicalizedBody: isSet(object.canonicalizedBody)
-                ? Buffer.from(bytesFromBase64(object.canonicalizedBody))
-                : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.logIndex !== "0") {
-            obj.logIndex = message.logIndex;
-        }
-        if (message.logId !== undefined) {
-            obj.logId = sigstore_common_1.LogId.toJSON(message.logId);
-        }
-        if (message.kindVersion !== undefined) {
-            obj.kindVersion = exports.KindVersion.toJSON(message.kindVersion);
-        }
-        if (message.integratedTime !== "0") {
-            obj.integratedTime = message.integratedTime;
-        }
-        if (message.inclusionPromise !== undefined) {
-            obj.inclusionPromise = exports.InclusionPromise.toJSON(message.inclusionPromise);
-        }
-        if (message.inclusionProof !== undefined) {
-            obj.inclusionProof = exports.InclusionProof.toJSON(message.inclusionProof);
-        }
-        if (message.canonicalizedBody.length !== 0) {
-            obj.canonicalizedBody = base64FromBytes(message.canonicalizedBody);
-        }
-        return obj;
-    },
-};
-function bytesFromBase64(b64) {
-    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
-}
-function base64FromBytes(arr) {
-    return globalThis.Buffer.from(arr).toString("base64");
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
deleted file mode 100644
index 1b5492fb1a77e..0000000000000
--- a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
+++ /dev/null
@@ -1,284 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: sigstore_trustroot.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.ClientTrustConfig = exports.ServiceConfiguration = exports.Service = exports.SigningConfig = exports.TrustedRoot = exports.CertificateAuthority = exports.TransparencyLogInstance = exports.ServiceSelector = void 0;
-exports.serviceSelectorFromJSON = serviceSelectorFromJSON;
-exports.serviceSelectorToJSON = serviceSelectorToJSON;
-/* eslint-disable */
-const sigstore_common_1 = require("./sigstore_common");
-/**
- * ServiceSelector specifies how a client SHOULD select a set of
- * Services to connect to. A client SHOULD throw an error if
- * the value is SERVICE_SELECTOR_UNDEFINED.
- */
-var ServiceSelector;
-(function (ServiceSelector) {
-    ServiceSelector[ServiceSelector["SERVICE_SELECTOR_UNDEFINED"] = 0] = "SERVICE_SELECTOR_UNDEFINED";
-    /**
-     * ALL - Clients SHOULD select all Services based on supported API version
-     * and validity window.
-     */
-    ServiceSelector[ServiceSelector["ALL"] = 1] = "ALL";
-    /**
-     * ANY - Clients SHOULD select one Service based on supported API version
-     * and validity window. It is up to the client implementation to
-     * decide how to select the Service, e.g. random or round-robin.
-     */
-    ServiceSelector[ServiceSelector["ANY"] = 2] = "ANY";
-    /**
-     * EXACT - Clients SHOULD select a specific number of Services based on
-     * supported API version and validity window, using the provided
-     * `count`. It is up to the client implementation to decide how to
-     * select the Service, e.g. random or round-robin.
-     */
-    ServiceSelector[ServiceSelector["EXACT"] = 3] = "EXACT";
-})(ServiceSelector || (exports.ServiceSelector = ServiceSelector = {}));
-function serviceSelectorFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "SERVICE_SELECTOR_UNDEFINED":
-            return ServiceSelector.SERVICE_SELECTOR_UNDEFINED;
-        case 1:
-        case "ALL":
-            return ServiceSelector.ALL;
-        case 2:
-        case "ANY":
-            return ServiceSelector.ANY;
-        case 3:
-        case "EXACT":
-            return ServiceSelector.EXACT;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum ServiceSelector");
-    }
-}
-function serviceSelectorToJSON(object) {
-    switch (object) {
-        case ServiceSelector.SERVICE_SELECTOR_UNDEFINED:
-            return "SERVICE_SELECTOR_UNDEFINED";
-        case ServiceSelector.ALL:
-            return "ALL";
-        case ServiceSelector.ANY:
-            return "ANY";
-        case ServiceSelector.EXACT:
-            return "EXACT";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum ServiceSelector");
-    }
-}
-exports.TransparencyLogInstance = {
-    fromJSON(object) {
-        return {
-            baseUrl: isSet(object.baseUrl) ? globalThis.String(object.baseUrl) : "",
-            hashAlgorithm: isSet(object.hashAlgorithm) ? (0, sigstore_common_1.hashAlgorithmFromJSON)(object.hashAlgorithm) : 0,
-            publicKey: isSet(object.publicKey) ? sigstore_common_1.PublicKey.fromJSON(object.publicKey) : undefined,
-            logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
-            checkpointKeyId: isSet(object.checkpointKeyId) ? sigstore_common_1.LogId.fromJSON(object.checkpointKeyId) : undefined,
-            operator: isSet(object.operator) ? globalThis.String(object.operator) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.baseUrl !== "") {
-            obj.baseUrl = message.baseUrl;
-        }
-        if (message.hashAlgorithm !== 0) {
-            obj.hashAlgorithm = (0, sigstore_common_1.hashAlgorithmToJSON)(message.hashAlgorithm);
-        }
-        if (message.publicKey !== undefined) {
-            obj.publicKey = sigstore_common_1.PublicKey.toJSON(message.publicKey);
-        }
-        if (message.logId !== undefined) {
-            obj.logId = sigstore_common_1.LogId.toJSON(message.logId);
-        }
-        if (message.checkpointKeyId !== undefined) {
-            obj.checkpointKeyId = sigstore_common_1.LogId.toJSON(message.checkpointKeyId);
-        }
-        if (message.operator !== "") {
-            obj.operator = message.operator;
-        }
-        return obj;
-    },
-};
-exports.CertificateAuthority = {
-    fromJSON(object) {
-        return {
-            subject: isSet(object.subject) ? sigstore_common_1.DistinguishedName.fromJSON(object.subject) : undefined,
-            uri: isSet(object.uri) ? globalThis.String(object.uri) : "",
-            certChain: isSet(object.certChain) ? sigstore_common_1.X509CertificateChain.fromJSON(object.certChain) : undefined,
-            validFor: isSet(object.validFor) ? sigstore_common_1.TimeRange.fromJSON(object.validFor) : undefined,
-            operator: isSet(object.operator) ? globalThis.String(object.operator) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.subject !== undefined) {
-            obj.subject = sigstore_common_1.DistinguishedName.toJSON(message.subject);
-        }
-        if (message.uri !== "") {
-            obj.uri = message.uri;
-        }
-        if (message.certChain !== undefined) {
-            obj.certChain = sigstore_common_1.X509CertificateChain.toJSON(message.certChain);
-        }
-        if (message.validFor !== undefined) {
-            obj.validFor = sigstore_common_1.TimeRange.toJSON(message.validFor);
-        }
-        if (message.operator !== "") {
-            obj.operator = message.operator;
-        }
-        return obj;
-    },
-};
-exports.TrustedRoot = {
-    fromJSON(object) {
-        return {
-            mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "",
-            tlogs: globalThis.Array.isArray(object?.tlogs)
-                ? object.tlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e))
-                : [],
-            certificateAuthorities: globalThis.Array.isArray(object?.certificateAuthorities)
-                ? object.certificateAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
-                : [],
-            ctlogs: globalThis.Array.isArray(object?.ctlogs)
-                ? object.ctlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e))
-                : [],
-            timestampAuthorities: globalThis.Array.isArray(object?.timestampAuthorities)
-                ? object.timestampAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.mediaType !== "") {
-            obj.mediaType = message.mediaType;
-        }
-        if (message.tlogs?.length) {
-            obj.tlogs = message.tlogs.map((e) => exports.TransparencyLogInstance.toJSON(e));
-        }
-        if (message.certificateAuthorities?.length) {
-            obj.certificateAuthorities = message.certificateAuthorities.map((e) => exports.CertificateAuthority.toJSON(e));
-        }
-        if (message.ctlogs?.length) {
-            obj.ctlogs = message.ctlogs.map((e) => exports.TransparencyLogInstance.toJSON(e));
-        }
-        if (message.timestampAuthorities?.length) {
-            obj.timestampAuthorities = message.timestampAuthorities.map((e) => exports.CertificateAuthority.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.SigningConfig = {
-    fromJSON(object) {
-        return {
-            mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "",
-            caUrls: globalThis.Array.isArray(object?.caUrls) ? object.caUrls.map((e) => exports.Service.fromJSON(e)) : [],
-            oidcUrls: globalThis.Array.isArray(object?.oidcUrls) ? object.oidcUrls.map((e) => exports.Service.fromJSON(e)) : [],
-            rekorTlogUrls: globalThis.Array.isArray(object?.rekorTlogUrls)
-                ? object.rekorTlogUrls.map((e) => exports.Service.fromJSON(e))
-                : [],
-            rekorTlogConfig: isSet(object.rekorTlogConfig)
-                ? exports.ServiceConfiguration.fromJSON(object.rekorTlogConfig)
-                : undefined,
-            tsaUrls: globalThis.Array.isArray(object?.tsaUrls) ? object.tsaUrls.map((e) => exports.Service.fromJSON(e)) : [],
-            tsaConfig: isSet(object.tsaConfig) ? exports.ServiceConfiguration.fromJSON(object.tsaConfig) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.mediaType !== "") {
-            obj.mediaType = message.mediaType;
-        }
-        if (message.caUrls?.length) {
-            obj.caUrls = message.caUrls.map((e) => exports.Service.toJSON(e));
-        }
-        if (message.oidcUrls?.length) {
-            obj.oidcUrls = message.oidcUrls.map((e) => exports.Service.toJSON(e));
-        }
-        if (message.rekorTlogUrls?.length) {
-            obj.rekorTlogUrls = message.rekorTlogUrls.map((e) => exports.Service.toJSON(e));
-        }
-        if (message.rekorTlogConfig !== undefined) {
-            obj.rekorTlogConfig = exports.ServiceConfiguration.toJSON(message.rekorTlogConfig);
-        }
-        if (message.tsaUrls?.length) {
-            obj.tsaUrls = message.tsaUrls.map((e) => exports.Service.toJSON(e));
-        }
-        if (message.tsaConfig !== undefined) {
-            obj.tsaConfig = exports.ServiceConfiguration.toJSON(message.tsaConfig);
-        }
-        return obj;
-    },
-};
-exports.Service = {
-    fromJSON(object) {
-        return {
-            url: isSet(object.url) ? globalThis.String(object.url) : "",
-            majorApiVersion: isSet(object.majorApiVersion) ? globalThis.Number(object.majorApiVersion) : 0,
-            validFor: isSet(object.validFor) ? sigstore_common_1.TimeRange.fromJSON(object.validFor) : undefined,
-            operator: isSet(object.operator) ? globalThis.String(object.operator) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.url !== "") {
-            obj.url = message.url;
-        }
-        if (message.majorApiVersion !== 0) {
-            obj.majorApiVersion = Math.round(message.majorApiVersion);
-        }
-        if (message.validFor !== undefined) {
-            obj.validFor = sigstore_common_1.TimeRange.toJSON(message.validFor);
-        }
-        if (message.operator !== "") {
-            obj.operator = message.operator;
-        }
-        return obj;
-    },
-};
-exports.ServiceConfiguration = {
-    fromJSON(object) {
-        return {
-            selector: isSet(object.selector) ? serviceSelectorFromJSON(object.selector) : 0,
-            count: isSet(object.count) ? globalThis.Number(object.count) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.selector !== 0) {
-            obj.selector = serviceSelectorToJSON(message.selector);
-        }
-        if (message.count !== 0) {
-            obj.count = Math.round(message.count);
-        }
-        return obj;
-    },
-};
-exports.ClientTrustConfig = {
-    fromJSON(object) {
-        return {
-            mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "",
-            trustedRoot: isSet(object.trustedRoot) ? exports.TrustedRoot.fromJSON(object.trustedRoot) : undefined,
-            signingConfig: isSet(object.signingConfig) ? exports.SigningConfig.fromJSON(object.signingConfig) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.mediaType !== "") {
-            obj.mediaType = message.mediaType;
-        }
-        if (message.trustedRoot !== undefined) {
-            obj.trustedRoot = exports.TrustedRoot.toJSON(message.trustedRoot);
-        }
-        if (message.signingConfig !== undefined) {
-            obj.signingConfig = exports.SigningConfig.toJSON(message.signingConfig);
-        }
-        return obj;
-    },
-};
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
deleted file mode 100644
index 876fe9cc1db1d..0000000000000
--- a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
+++ /dev/null
@@ -1,281 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: sigstore_verification.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Input = exports.Artifact = exports.ArtifactVerificationOptions_ObserverTimestampOptions = exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions = exports.ArtifactVerificationOptions_TimestampAuthorityOptions = exports.ArtifactVerificationOptions_CtlogOptions = exports.ArtifactVerificationOptions_TlogOptions = exports.ArtifactVerificationOptions = exports.PublicKeyIdentities = exports.CertificateIdentities = exports.CertificateIdentity = void 0;
-/* eslint-disable */
-const sigstore_bundle_1 = require("./sigstore_bundle");
-const sigstore_common_1 = require("./sigstore_common");
-const sigstore_trustroot_1 = require("./sigstore_trustroot");
-exports.CertificateIdentity = {
-    fromJSON(object) {
-        return {
-            issuer: isSet(object.issuer) ? globalThis.String(object.issuer) : "",
-            san: isSet(object.san) ? sigstore_common_1.SubjectAlternativeName.fromJSON(object.san) : undefined,
-            oids: globalThis.Array.isArray(object?.oids)
-                ? object.oids.map((e) => sigstore_common_1.ObjectIdentifierValuePair.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.issuer !== "") {
-            obj.issuer = message.issuer;
-        }
-        if (message.san !== undefined) {
-            obj.san = sigstore_common_1.SubjectAlternativeName.toJSON(message.san);
-        }
-        if (message.oids?.length) {
-            obj.oids = message.oids.map((e) => sigstore_common_1.ObjectIdentifierValuePair.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.CertificateIdentities = {
-    fromJSON(object) {
-        return {
-            identities: globalThis.Array.isArray(object?.identities)
-                ? object.identities.map((e) => exports.CertificateIdentity.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.identities?.length) {
-            obj.identities = message.identities.map((e) => exports.CertificateIdentity.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.PublicKeyIdentities = {
-    fromJSON(object) {
-        return {
-            publicKeys: globalThis.Array.isArray(object?.publicKeys)
-                ? object.publicKeys.map((e) => sigstore_common_1.PublicKey.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.publicKeys?.length) {
-            obj.publicKeys = message.publicKeys.map((e) => sigstore_common_1.PublicKey.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.ArtifactVerificationOptions = {
-    fromJSON(object) {
-        return {
-            signers: isSet(object.certificateIdentities)
-                ? {
-                    $case: "certificateIdentities",
-                    certificateIdentities: exports.CertificateIdentities.fromJSON(object.certificateIdentities),
-                }
-                : isSet(object.publicKeys)
-                    ? { $case: "publicKeys", publicKeys: exports.PublicKeyIdentities.fromJSON(object.publicKeys) }
-                    : undefined,
-            tlogOptions: isSet(object.tlogOptions)
-                ? exports.ArtifactVerificationOptions_TlogOptions.fromJSON(object.tlogOptions)
-                : undefined,
-            ctlogOptions: isSet(object.ctlogOptions)
-                ? exports.ArtifactVerificationOptions_CtlogOptions.fromJSON(object.ctlogOptions)
-                : undefined,
-            tsaOptions: isSet(object.tsaOptions)
-                ? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.fromJSON(object.tsaOptions)
-                : undefined,
-            integratedTsOptions: isSet(object.integratedTsOptions)
-                ? exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions.fromJSON(object.integratedTsOptions)
-                : undefined,
-            observerOptions: isSet(object.observerOptions)
-                ? exports.ArtifactVerificationOptions_ObserverTimestampOptions.fromJSON(object.observerOptions)
-                : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.signers?.$case === "certificateIdentities") {
-            obj.certificateIdentities = exports.CertificateIdentities.toJSON(message.signers.certificateIdentities);
-        }
-        else if (message.signers?.$case === "publicKeys") {
-            obj.publicKeys = exports.PublicKeyIdentities.toJSON(message.signers.publicKeys);
-        }
-        if (message.tlogOptions !== undefined) {
-            obj.tlogOptions = exports.ArtifactVerificationOptions_TlogOptions.toJSON(message.tlogOptions);
-        }
-        if (message.ctlogOptions !== undefined) {
-            obj.ctlogOptions = exports.ArtifactVerificationOptions_CtlogOptions.toJSON(message.ctlogOptions);
-        }
-        if (message.tsaOptions !== undefined) {
-            obj.tsaOptions = exports.ArtifactVerificationOptions_TimestampAuthorityOptions.toJSON(message.tsaOptions);
-        }
-        if (message.integratedTsOptions !== undefined) {
-            obj.integratedTsOptions = exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions.toJSON(message.integratedTsOptions);
-        }
-        if (message.observerOptions !== undefined) {
-            obj.observerOptions = exports.ArtifactVerificationOptions_ObserverTimestampOptions.toJSON(message.observerOptions);
-        }
-        return obj;
-    },
-};
-exports.ArtifactVerificationOptions_TlogOptions = {
-    fromJSON(object) {
-        return {
-            threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
-            performOnlineVerification: isSet(object.performOnlineVerification)
-                ? globalThis.Boolean(object.performOnlineVerification)
-                : false,
-            disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.threshold !== 0) {
-            obj.threshold = Math.round(message.threshold);
-        }
-        if (message.performOnlineVerification !== false) {
-            obj.performOnlineVerification = message.performOnlineVerification;
-        }
-        if (message.disable !== false) {
-            obj.disable = message.disable;
-        }
-        return obj;
-    },
-};
-exports.ArtifactVerificationOptions_CtlogOptions = {
-    fromJSON(object) {
-        return {
-            threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
-            disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.threshold !== 0) {
-            obj.threshold = Math.round(message.threshold);
-        }
-        if (message.disable !== false) {
-            obj.disable = message.disable;
-        }
-        return obj;
-    },
-};
-exports.ArtifactVerificationOptions_TimestampAuthorityOptions = {
-    fromJSON(object) {
-        return {
-            threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
-            disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.threshold !== 0) {
-            obj.threshold = Math.round(message.threshold);
-        }
-        if (message.disable !== false) {
-            obj.disable = message.disable;
-        }
-        return obj;
-    },
-};
-exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions = {
-    fromJSON(object) {
-        return {
-            threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
-            disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.threshold !== 0) {
-            obj.threshold = Math.round(message.threshold);
-        }
-        if (message.disable !== false) {
-            obj.disable = message.disable;
-        }
-        return obj;
-    },
-};
-exports.ArtifactVerificationOptions_ObserverTimestampOptions = {
-    fromJSON(object) {
-        return {
-            threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
-            disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.threshold !== 0) {
-            obj.threshold = Math.round(message.threshold);
-        }
-        if (message.disable !== false) {
-            obj.disable = message.disable;
-        }
-        return obj;
-    },
-};
-exports.Artifact = {
-    fromJSON(object) {
-        return {
-            data: isSet(object.artifactUri)
-                ? { $case: "artifactUri", artifactUri: globalThis.String(object.artifactUri) }
-                : isSet(object.artifact)
-                    ? { $case: "artifact", artifact: Buffer.from(bytesFromBase64(object.artifact)) }
-                    : isSet(object.artifactDigest)
-                        ? { $case: "artifactDigest", artifactDigest: sigstore_common_1.HashOutput.fromJSON(object.artifactDigest) }
-                        : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.data?.$case === "artifactUri") {
-            obj.artifactUri = message.data.artifactUri;
-        }
-        else if (message.data?.$case === "artifact") {
-            obj.artifact = base64FromBytes(message.data.artifact);
-        }
-        else if (message.data?.$case === "artifactDigest") {
-            obj.artifactDigest = sigstore_common_1.HashOutput.toJSON(message.data.artifactDigest);
-        }
-        return obj;
-    },
-};
-exports.Input = {
-    fromJSON(object) {
-        return {
-            artifactTrustRoot: isSet(object.artifactTrustRoot) ? sigstore_trustroot_1.TrustedRoot.fromJSON(object.artifactTrustRoot) : undefined,
-            artifactVerificationOptions: isSet(object.artifactVerificationOptions)
-                ? exports.ArtifactVerificationOptions.fromJSON(object.artifactVerificationOptions)
-                : undefined,
-            bundle: isSet(object.bundle) ? sigstore_bundle_1.Bundle.fromJSON(object.bundle) : undefined,
-            artifact: isSet(object.artifact) ? exports.Artifact.fromJSON(object.artifact) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.artifactTrustRoot !== undefined) {
-            obj.artifactTrustRoot = sigstore_trustroot_1.TrustedRoot.toJSON(message.artifactTrustRoot);
-        }
-        if (message.artifactVerificationOptions !== undefined) {
-            obj.artifactVerificationOptions = exports.ArtifactVerificationOptions.toJSON(message.artifactVerificationOptions);
-        }
-        if (message.bundle !== undefined) {
-            obj.bundle = sigstore_bundle_1.Bundle.toJSON(message.bundle);
-        }
-        if (message.artifact !== undefined) {
-            obj.artifact = exports.Artifact.toJSON(message.artifact);
-        }
-        return obj;
-    },
-};
-function bytesFromBase64(b64) {
-    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
-}
-function base64FromBytes(arr) {
-    return globalThis.Buffer.from(arr).toString("base64");
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/index.js b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/index.js
deleted file mode 100644
index eafb768c48fca..0000000000000
--- a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/index.js
+++ /dev/null
@@ -1,37 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __exportStar = (this && this.__exportStar) || function(m, exports) {
-    for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-__exportStar(require("./__generated__/envelope"), exports);
-__exportStar(require("./__generated__/sigstore_bundle"), exports);
-__exportStar(require("./__generated__/sigstore_common"), exports);
-__exportStar(require("./__generated__/sigstore_rekor"), exports);
-__exportStar(require("./__generated__/sigstore_trustroot"), exports);
-__exportStar(require("./__generated__/sigstore_verification"), exports);
diff --git a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/rekor/v2/index.js b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/rekor/v2/index.js
deleted file mode 100644
index 10745efc39a1f..0000000000000
--- a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/dist/rekor/v2/index.js
+++ /dev/null
@@ -1,35 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __exportStar = (this && this.__exportStar) || function(m, exports) {
-    for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-/*
-Copyright 2025 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-__exportStar(require("../../__generated__/rekor/v2/dsse"), exports);
-__exportStar(require("../../__generated__/rekor/v2/entry"), exports);
-__exportStar(require("../../__generated__/rekor/v2/hashedrekord"), exports);
-__exportStar(require("../../__generated__/rekor/v2/verifier"), exports);
diff --git a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/package.json b/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/package.json
deleted file mode 100644
index f87b2540fbf98..0000000000000
--- a/node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs/package.json
+++ /dev/null
@@ -1,35 +0,0 @@
-{
-  "name": "@sigstore/protobuf-specs",
-  "version": "0.5.0",
-  "description": "code-signing for npm packages",
-  "main": "dist/index.js",
-  "types": "dist/index.d.ts",
-  "exports": {
-    ".": "./dist/index.js",
-    "./rekor/v2": "./dist/rekor/v2/index.js"
-  },
-  "scripts": {
-    "build": "tsc"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/sigstore/protobuf-specs.git"
-  },
-  "files": [
-    "dist"
-  ],
-  "author": "bdehamer@github.com",
-  "license": "Apache-2.0",
-  "bugs": {
-    "url": "https://github.com/sigstore/protobuf-specs/issues"
-  },
-  "homepage": "https://github.com/sigstore/protobuf-specs#readme",
-  "devDependencies": {
-    "@tsconfig/node18": "^18.2.4",
-    "@types/node": "^18.14.0",
-    "typescript": "^5.7.2"
-  },
-  "engines": {
-    "node": "^18.17.0 || >=20.5.0"
-  }
-}
diff --git a/node_modules/@tufjs/models/dist/base.js b/node_modules/@tufjs/models/dist/base.js
deleted file mode 100644
index 85e45d8fc1151..0000000000000
--- a/node_modules/@tufjs/models/dist/base.js
+++ /dev/null
@@ -1,92 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Signed = exports.MetadataKind = void 0;
-exports.isMetadataKind = isMetadataKind;
-const util_1 = __importDefault(require("util"));
-const error_1 = require("./error");
-const utils_1 = require("./utils");
-const SPECIFICATION_VERSION = ['1', '0', '31'];
-var MetadataKind;
-(function (MetadataKind) {
-    MetadataKind["Root"] = "root";
-    MetadataKind["Timestamp"] = "timestamp";
-    MetadataKind["Snapshot"] = "snapshot";
-    MetadataKind["Targets"] = "targets";
-})(MetadataKind || (exports.MetadataKind = MetadataKind = {}));
-function isMetadataKind(value) {
-    return (typeof value === 'string' &&
-        Object.values(MetadataKind).includes(value));
-}
-/***
- * A base class for the signed part of TUF metadata.
- *
- * Objects with base class Signed are usually included in a ``Metadata`` object
- * on the signed attribute. This class provides attributes and methods that
- * are common for all TUF metadata types (roles).
- */
-class Signed {
-    constructor(options) {
-        this.specVersion = options.specVersion || SPECIFICATION_VERSION.join('.');
-        const specList = this.specVersion.split('.');
-        if (!(specList.length === 2 || specList.length === 3) ||
-            !specList.every((item) => isNumeric(item))) {
-            throw new error_1.ValueError('Failed to parse specVersion');
-        }
-        // major version must match
-        if (specList[0] != SPECIFICATION_VERSION[0]) {
-            throw new error_1.ValueError('Unsupported specVersion');
-        }
-        this.expires = options.expires;
-        this.version = options.version;
-        this.unrecognizedFields = options.unrecognizedFields || {};
-    }
-    equals(other) {
-        if (!(other instanceof Signed)) {
-            return false;
-        }
-        return (this.specVersion === other.specVersion &&
-            this.expires === other.expires &&
-            this.version === other.version &&
-            util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields));
-    }
-    isExpired(referenceTime) {
-        if (!referenceTime) {
-            referenceTime = new Date();
-        }
-        return referenceTime >= new Date(this.expires);
-    }
-    static commonFieldsFromJSON(data) {
-        const { spec_version, expires, version, ...rest } = data;
-        if (!utils_1.guard.isDefined(spec_version)) {
-            throw new error_1.ValueError('spec_version is not defined');
-        }
-        else if (typeof spec_version !== 'string') {
-            throw new TypeError('spec_version must be a string');
-        }
-        if (!utils_1.guard.isDefined(expires)) {
-            throw new error_1.ValueError('expires is not defined');
-        }
-        else if (!(typeof expires === 'string')) {
-            throw new TypeError('expires must be a string');
-        }
-        if (!utils_1.guard.isDefined(version)) {
-            throw new error_1.ValueError('version is not defined');
-        }
-        else if (!(typeof version === 'number')) {
-            throw new TypeError('version must be a number');
-        }
-        return {
-            specVersion: spec_version,
-            expires,
-            version,
-            unrecognizedFields: rest,
-        };
-    }
-}
-exports.Signed = Signed;
-function isNumeric(str) {
-    return !isNaN(Number(str));
-}
diff --git a/node_modules/@tufjs/models/dist/delegations.js b/node_modules/@tufjs/models/dist/delegations.js
deleted file mode 100644
index 7165f1e244393..0000000000000
--- a/node_modules/@tufjs/models/dist/delegations.js
+++ /dev/null
@@ -1,115 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Delegations = void 0;
-const util_1 = __importDefault(require("util"));
-const error_1 = require("./error");
-const key_1 = require("./key");
-const role_1 = require("./role");
-const utils_1 = require("./utils");
-/**
- * A container object storing information about all delegations.
- *
- * Targets roles that are trusted to provide signed metadata files
- * describing targets with designated pathnames and/or further delegations.
- */
-class Delegations {
-    constructor(options) {
-        this.keys = options.keys;
-        this.unrecognizedFields = options.unrecognizedFields || {};
-        if (options.roles) {
-            if (Object.keys(options.roles).some((roleName) => role_1.TOP_LEVEL_ROLE_NAMES.includes(roleName))) {
-                throw new error_1.ValueError('Delegated role name conflicts with top-level role name');
-            }
-        }
-        this.succinctRoles = options.succinctRoles;
-        this.roles = options.roles;
-    }
-    equals(other) {
-        if (!(other instanceof Delegations)) {
-            return false;
-        }
-        return (util_1.default.isDeepStrictEqual(this.keys, other.keys) &&
-            util_1.default.isDeepStrictEqual(this.roles, other.roles) &&
-            util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields) &&
-            util_1.default.isDeepStrictEqual(this.succinctRoles, other.succinctRoles));
-    }
-    *rolesForTarget(targetPath) {
-        if (this.roles) {
-            for (const role of Object.values(this.roles)) {
-                if (role.isDelegatedPath(targetPath)) {
-                    yield { role: role.name, terminating: role.terminating };
-                }
-            }
-        }
-        else if (this.succinctRoles) {
-            yield {
-                role: this.succinctRoles.getRoleForTarget(targetPath),
-                terminating: true,
-            };
-        }
-    }
-    toJSON() {
-        const json = {
-            keys: keysToJSON(this.keys),
-            ...this.unrecognizedFields,
-        };
-        if (this.roles) {
-            json.roles = rolesToJSON(this.roles);
-        }
-        else if (this.succinctRoles) {
-            json.succinct_roles = this.succinctRoles.toJSON();
-        }
-        return json;
-    }
-    static fromJSON(data) {
-        const { keys, roles, succinct_roles, ...unrecognizedFields } = data;
-        let succinctRoles;
-        if (utils_1.guard.isObject(succinct_roles)) {
-            succinctRoles = role_1.SuccinctRoles.fromJSON(succinct_roles);
-        }
-        return new Delegations({
-            keys: keysFromJSON(keys),
-            roles: rolesFromJSON(roles),
-            unrecognizedFields,
-            succinctRoles,
-        });
-    }
-}
-exports.Delegations = Delegations;
-function keysToJSON(keys) {
-    return Object.entries(keys).reduce((acc, [keyId, key]) => ({
-        ...acc,
-        [keyId]: key.toJSON(),
-    }), {});
-}
-function rolesToJSON(roles) {
-    return Object.values(roles).map((role) => role.toJSON());
-}
-function keysFromJSON(data) {
-    if (!utils_1.guard.isObjectRecord(data)) {
-        throw new TypeError('keys is malformed');
-    }
-    return Object.entries(data).reduce((acc, [keyID, keyData]) => ({
-        ...acc,
-        [keyID]: key_1.Key.fromJSON(keyID, keyData),
-    }), {});
-}
-function rolesFromJSON(data) {
-    let roleMap;
-    if (utils_1.guard.isDefined(data)) {
-        if (!utils_1.guard.isObjectArray(data)) {
-            throw new TypeError('roles is malformed');
-        }
-        roleMap = data.reduce((acc, role) => {
-            const delegatedRole = role_1.DelegatedRole.fromJSON(role);
-            return {
-                ...acc,
-                [delegatedRole.name]: delegatedRole,
-            };
-        }, {});
-    }
-    return roleMap;
-}
diff --git a/node_modules/@tufjs/models/dist/file.js b/node_modules/@tufjs/models/dist/file.js
deleted file mode 100644
index b35fe5950bbb7..0000000000000
--- a/node_modules/@tufjs/models/dist/file.js
+++ /dev/null
@@ -1,183 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TargetFile = exports.MetaFile = void 0;
-const crypto_1 = __importDefault(require("crypto"));
-const util_1 = __importDefault(require("util"));
-const error_1 = require("./error");
-const utils_1 = require("./utils");
-// A container with information about a particular metadata file.
-//
-// This class is used for Timestamp and Snapshot metadata.
-class MetaFile {
-    constructor(opts) {
-        if (opts.version <= 0) {
-            throw new error_1.ValueError('Metafile version must be at least 1');
-        }
-        if (opts.length !== undefined) {
-            validateLength(opts.length);
-        }
-        this.version = opts.version;
-        this.length = opts.length;
-        this.hashes = opts.hashes;
-        this.unrecognizedFields = opts.unrecognizedFields || {};
-    }
-    equals(other) {
-        if (!(other instanceof MetaFile)) {
-            return false;
-        }
-        return (this.version === other.version &&
-            this.length === other.length &&
-            util_1.default.isDeepStrictEqual(this.hashes, other.hashes) &&
-            util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields));
-    }
-    verify(data) {
-        // Verifies that the given data matches the expected length.
-        if (this.length !== undefined) {
-            if (data.length !== this.length) {
-                throw new error_1.LengthOrHashMismatchError(`Expected length ${this.length} but got ${data.length}`);
-            }
-        }
-        // Verifies that the given data matches the supplied hashes.
-        if (this.hashes) {
-            Object.entries(this.hashes).forEach(([key, value]) => {
-                let hash;
-                try {
-                    hash = crypto_1.default.createHash(key);
-                }
-                catch (e) {
-                    throw new error_1.LengthOrHashMismatchError(`Hash algorithm ${key} not supported`);
-                }
-                const observedHash = hash.update(data).digest('hex');
-                if (observedHash !== value) {
-                    throw new error_1.LengthOrHashMismatchError(`Expected hash ${value} but got ${observedHash}`);
-                }
-            });
-        }
-    }
-    toJSON() {
-        const json = {
-            version: this.version,
-            ...this.unrecognizedFields,
-        };
-        if (this.length !== undefined) {
-            json.length = this.length;
-        }
-        if (this.hashes) {
-            json.hashes = this.hashes;
-        }
-        return json;
-    }
-    static fromJSON(data) {
-        const { version, length, hashes, ...rest } = data;
-        if (typeof version !== 'number') {
-            throw new TypeError('version must be a number');
-        }
-        if (utils_1.guard.isDefined(length) && typeof length !== 'number') {
-            throw new TypeError('length must be a number');
-        }
-        if (utils_1.guard.isDefined(hashes) && !utils_1.guard.isStringRecord(hashes)) {
-            throw new TypeError('hashes must be string keys and values');
-        }
-        return new MetaFile({
-            version,
-            length,
-            hashes,
-            unrecognizedFields: rest,
-        });
-    }
-}
-exports.MetaFile = MetaFile;
-// Container for info about a particular target file.
-//
-// This class is used for Target metadata.
-class TargetFile {
-    constructor(opts) {
-        validateLength(opts.length);
-        this.length = opts.length;
-        this.path = opts.path;
-        this.hashes = opts.hashes;
-        this.unrecognizedFields = opts.unrecognizedFields || {};
-    }
-    get custom() {
-        const custom = this.unrecognizedFields['custom'];
-        if (!custom || Array.isArray(custom) || !(typeof custom === 'object')) {
-            return {};
-        }
-        return custom;
-    }
-    equals(other) {
-        if (!(other instanceof TargetFile)) {
-            return false;
-        }
-        return (this.length === other.length &&
-            this.path === other.path &&
-            util_1.default.isDeepStrictEqual(this.hashes, other.hashes) &&
-            util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields));
-    }
-    async verify(stream) {
-        let observedLength = 0;
-        // Create a digest for each hash algorithm
-        const digests = Object.keys(this.hashes).reduce((acc, key) => {
-            try {
-                acc[key] = crypto_1.default.createHash(key);
-            }
-            catch (e) {
-                throw new error_1.LengthOrHashMismatchError(`Hash algorithm ${key} not supported`);
-            }
-            return acc;
-        }, {});
-        // Read stream chunk by chunk
-        for await (const chunk of stream) {
-            // Keep running tally of stream length
-            observedLength += chunk.length;
-            // Append chunk to each digest
-            Object.values(digests).forEach((digest) => {
-                digest.update(chunk);
-            });
-        }
-        // Verify length matches expected value
-        if (observedLength !== this.length) {
-            throw new error_1.LengthOrHashMismatchError(`Expected length ${this.length} but got ${observedLength}`);
-        }
-        // Verify each digest matches expected value
-        Object.entries(digests).forEach(([key, value]) => {
-            const expected = this.hashes[key];
-            const actual = value.digest('hex');
-            if (actual !== expected) {
-                throw new error_1.LengthOrHashMismatchError(`Expected hash ${expected} but got ${actual}`);
-            }
-        });
-    }
-    toJSON() {
-        return {
-            length: this.length,
-            hashes: this.hashes,
-            ...this.unrecognizedFields,
-        };
-    }
-    static fromJSON(path, data) {
-        const { length, hashes, ...rest } = data;
-        if (typeof length !== 'number') {
-            throw new TypeError('length must be a number');
-        }
-        if (!utils_1.guard.isStringRecord(hashes)) {
-            throw new TypeError('hashes must have string keys and values');
-        }
-        return new TargetFile({
-            length,
-            path,
-            hashes,
-            unrecognizedFields: rest,
-        });
-    }
-}
-exports.TargetFile = TargetFile;
-// Check that supplied length if valid
-function validateLength(length) {
-    if (length < 0) {
-        throw new error_1.ValueError('Length must be at least 0');
-    }
-}
diff --git a/node_modules/@tufjs/models/dist/key.js b/node_modules/@tufjs/models/dist/key.js
deleted file mode 100644
index 5e55b09d7c6dd..0000000000000
--- a/node_modules/@tufjs/models/dist/key.js
+++ /dev/null
@@ -1,85 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Key = void 0;
-const util_1 = __importDefault(require("util"));
-const error_1 = require("./error");
-const utils_1 = require("./utils");
-const key_1 = require("./utils/key");
-// A container class representing the public portion of a Key.
-class Key {
-    constructor(options) {
-        const { keyID, keyType, scheme, keyVal, unrecognizedFields } = options;
-        this.keyID = keyID;
-        this.keyType = keyType;
-        this.scheme = scheme;
-        this.keyVal = keyVal;
-        this.unrecognizedFields = unrecognizedFields || {};
-    }
-    // Verifies the that the metadata.signatures contains a signature made with
-    // this key and is correctly signed.
-    verifySignature(metadata) {
-        const signature = metadata.signatures[this.keyID];
-        if (!signature)
-            throw new error_1.UnsignedMetadataError('no signature for key found in metadata');
-        if (!this.keyVal.public)
-            throw new error_1.UnsignedMetadataError('no public key found');
-        const publicKey = (0, key_1.getPublicKey)({
-            keyType: this.keyType,
-            scheme: this.scheme,
-            keyVal: this.keyVal.public,
-        });
-        const signedData = metadata.signed.toJSON();
-        try {
-            if (!utils_1.crypto.verifySignature(signedData, publicKey, signature.sig)) {
-                throw new error_1.UnsignedMetadataError(`failed to verify ${this.keyID} signature`);
-            }
-        }
-        catch (error) {
-            if (error instanceof error_1.UnsignedMetadataError) {
-                throw error;
-            }
-            throw new error_1.UnsignedMetadataError(`failed to verify ${this.keyID} signature`);
-        }
-    }
-    equals(other) {
-        if (!(other instanceof Key)) {
-            return false;
-        }
-        return (this.keyID === other.keyID &&
-            this.keyType === other.keyType &&
-            this.scheme === other.scheme &&
-            util_1.default.isDeepStrictEqual(this.keyVal, other.keyVal) &&
-            util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields));
-    }
-    toJSON() {
-        return {
-            keytype: this.keyType,
-            scheme: this.scheme,
-            keyval: this.keyVal,
-            ...this.unrecognizedFields,
-        };
-    }
-    static fromJSON(keyID, data) {
-        const { keytype, scheme, keyval, ...rest } = data;
-        if (typeof keytype !== 'string') {
-            throw new TypeError('keytype must be a string');
-        }
-        if (typeof scheme !== 'string') {
-            throw new TypeError('scheme must be a string');
-        }
-        if (!utils_1.guard.isStringRecord(keyval)) {
-            throw new TypeError('keyval must be a string record');
-        }
-        return new Key({
-            keyID,
-            keyType: keytype,
-            scheme,
-            keyVal: keyval,
-            unrecognizedFields: rest,
-        });
-    }
-}
-exports.Key = Key;
diff --git a/node_modules/@tufjs/models/dist/metadata.js b/node_modules/@tufjs/models/dist/metadata.js
deleted file mode 100644
index 389d2504e0b53..0000000000000
--- a/node_modules/@tufjs/models/dist/metadata.js
+++ /dev/null
@@ -1,160 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Metadata = void 0;
-const canonical_json_1 = require("@tufjs/canonical-json");
-const util_1 = __importDefault(require("util"));
-const base_1 = require("./base");
-const error_1 = require("./error");
-const root_1 = require("./root");
-const signature_1 = require("./signature");
-const snapshot_1 = require("./snapshot");
-const targets_1 = require("./targets");
-const timestamp_1 = require("./timestamp");
-const utils_1 = require("./utils");
-/***
- * A container for signed TUF metadata.
- *
- * Provides methods to convert to and from json, read and write to and
- * from JSON and to create and verify metadata signatures.
- *
- * ``Metadata[T]`` is a generic container type where T can be any one type of
- * [``Root``, ``Timestamp``, ``Snapshot``, ``Targets``]. The purpose of this
- * is to allow static type checking of the signed attribute in code using
- * Metadata::
- *
- * root_md = Metadata[Root].fromJSON("root.json")
- * # root_md type is now Metadata[Root]. This means signed and its
- * # attributes like consistent_snapshot are now statically typed and the
- * # types can be verified by static type checkers and shown by IDEs
- *
- * Using a type constraint is not required but not doing so means T is not a
- * specific type so static typing cannot happen. Note that the type constraint
- * ``[Root]`` is not validated at runtime (as pure annotations are not available
- * then).
- *
- * Apart from ``expires`` all of the arguments to the inner constructors have
- * reasonable default values for new metadata.
- */
-class Metadata {
-    constructor(signed, signatures, unrecognizedFields) {
-        this.signed = signed;
-        this.signatures = signatures || {};
-        this.unrecognizedFields = unrecognizedFields || {};
-    }
-    sign(signer, append = true) {
-        const bytes = Buffer.from((0, canonical_json_1.canonicalize)(this.signed.toJSON()));
-        const signature = signer(bytes);
-        if (!append) {
-            this.signatures = {};
-        }
-        this.signatures[signature.keyID] = signature;
-    }
-    verifyDelegate(delegatedRole, delegatedMetadata) {
-        let role;
-        let keys = {};
-        switch (this.signed.type) {
-            case base_1.MetadataKind.Root:
-                keys = this.signed.keys;
-                role = this.signed.roles[delegatedRole];
-                break;
-            case base_1.MetadataKind.Targets:
-                if (!this.signed.delegations) {
-                    throw new error_1.ValueError(`No delegations found for ${delegatedRole}`);
-                }
-                keys = this.signed.delegations.keys;
-                if (this.signed.delegations.roles) {
-                    role = this.signed.delegations.roles[delegatedRole];
-                }
-                else if (this.signed.delegations.succinctRoles) {
-                    if (this.signed.delegations.succinctRoles.isDelegatedRole(delegatedRole)) {
-                        role = this.signed.delegations.succinctRoles;
-                    }
-                }
-                break;
-            default:
-                throw new TypeError('invalid metadata type');
-        }
-        if (!role) {
-            throw new error_1.ValueError(`no delegation found for ${delegatedRole}`);
-        }
-        const signingKeys = new Set();
-        role.keyIDs.forEach((keyID) => {
-            const key = keys[keyID];
-            // If we dont' have the key, continue checking other keys
-            if (!key) {
-                return;
-            }
-            try {
-                key.verifySignature(delegatedMetadata);
-                signingKeys.add(key.keyID);
-            }
-            catch (error) {
-                // continue
-            }
-        });
-        if (signingKeys.size < role.threshold) {
-            throw new error_1.UnsignedMetadataError(`${delegatedRole} was signed by ${signingKeys.size}/${role.threshold} keys`);
-        }
-    }
-    equals(other) {
-        if (!(other instanceof Metadata)) {
-            return false;
-        }
-        return (this.signed.equals(other.signed) &&
-            util_1.default.isDeepStrictEqual(this.signatures, other.signatures) &&
-            util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields));
-    }
-    toJSON() {
-        const signatures = Object.values(this.signatures).map((signature) => {
-            return signature.toJSON();
-        });
-        return {
-            signatures,
-            signed: this.signed.toJSON(),
-            ...this.unrecognizedFields,
-        };
-    }
-    static fromJSON(type, data) {
-        const { signed, signatures, ...rest } = data;
-        if (!utils_1.guard.isDefined(signed) || !utils_1.guard.isObject(signed)) {
-            throw new TypeError('signed is not defined');
-        }
-        if (type !== signed._type) {
-            throw new error_1.ValueError(`expected '${type}', got ${signed['_type']}`);
-        }
-        if (!utils_1.guard.isObjectArray(signatures)) {
-            throw new TypeError('signatures is not an array');
-        }
-        let signedObj;
-        switch (type) {
-            case base_1.MetadataKind.Root:
-                signedObj = root_1.Root.fromJSON(signed);
-                break;
-            case base_1.MetadataKind.Timestamp:
-                signedObj = timestamp_1.Timestamp.fromJSON(signed);
-                break;
-            case base_1.MetadataKind.Snapshot:
-                signedObj = snapshot_1.Snapshot.fromJSON(signed);
-                break;
-            case base_1.MetadataKind.Targets:
-                signedObj = targets_1.Targets.fromJSON(signed);
-                break;
-            default:
-                throw new TypeError('invalid metadata type');
-        }
-        const sigMap = {};
-        // Ensure that each signature is unique
-        signatures.forEach((sigData) => {
-            const sig = signature_1.Signature.fromJSON(sigData);
-            if (sigMap[sig.keyID]) {
-                throw new error_1.ValueError(`multiple signatures found for keyid: ${sig.keyID}`);
-            }
-            sigMap[sig.keyID] = sig;
-        });
-        return new Metadata(signedObj, sigMap, rest);
-    }
-}
-exports.Metadata = Metadata;
diff --git a/node_modules/@tufjs/models/dist/role.js b/node_modules/@tufjs/models/dist/role.js
deleted file mode 100644
index f7ddbc6fe3f38..0000000000000
--- a/node_modules/@tufjs/models/dist/role.js
+++ /dev/null
@@ -1,299 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.SuccinctRoles = exports.DelegatedRole = exports.Role = exports.TOP_LEVEL_ROLE_NAMES = void 0;
-const crypto_1 = __importDefault(require("crypto"));
-const minimatch_1 = require("minimatch");
-const util_1 = __importDefault(require("util"));
-const error_1 = require("./error");
-const utils_1 = require("./utils");
-exports.TOP_LEVEL_ROLE_NAMES = [
-    'root',
-    'targets',
-    'snapshot',
-    'timestamp',
-];
-/**
- * Container that defines which keys are required to sign roles metadata.
- *
- * Role defines how many keys are required to successfully sign the roles
- * metadata, and which keys are accepted.
- */
-class Role {
-    constructor(options) {
-        const { keyIDs, threshold, unrecognizedFields } = options;
-        if (hasDuplicates(keyIDs)) {
-            throw new error_1.ValueError('duplicate key IDs found');
-        }
-        if (threshold < 1) {
-            throw new error_1.ValueError('threshold must be at least 1');
-        }
-        this.keyIDs = keyIDs;
-        this.threshold = threshold;
-        this.unrecognizedFields = unrecognizedFields || {};
-    }
-    equals(other) {
-        if (!(other instanceof Role)) {
-            return false;
-        }
-        return (this.threshold === other.threshold &&
-            util_1.default.isDeepStrictEqual(this.keyIDs, other.keyIDs) &&
-            util_1.default.isDeepStrictEqual(this.unrecognizedFields, other.unrecognizedFields));
-    }
-    toJSON() {
-        return {
-            keyids: this.keyIDs,
-            threshold: this.threshold,
-            ...this.unrecognizedFields,
-        };
-    }
-    static fromJSON(data) {
-        const { keyids, threshold, ...rest } = data;
-        if (!utils_1.guard.isStringArray(keyids)) {
-            throw new TypeError('keyids must be an array');
-        }
-        if (typeof threshold !== 'number') {
-            throw new TypeError('threshold must be a number');
-        }
-        return new Role({
-            keyIDs: keyids,
-            threshold,
-            unrecognizedFields: rest,
-        });
-    }
-}
-exports.Role = Role;
-function hasDuplicates(array) {
-    return new Set(array).size !== array.length;
-}
-/**
- * A container with information about a delegated role.
- *
- * A delegation can happen in two ways:
- *   - ``paths`` is set: delegates targets matching any path pattern in ``paths``
- *   - ``pathHashPrefixes`` is set: delegates targets whose target path hash
- *      starts with any of the prefixes in ``pathHashPrefixes``
- *
- *   ``paths`` and ``pathHashPrefixes`` are mutually exclusive: both cannot be
- *   set, at least one of them must be set.
- */
-class DelegatedRole extends Role {
-    constructor(opts) {
-        super(opts);
-        const { name, terminating, paths, pathHashPrefixes } = opts;
-        this.name = name;
-        this.terminating = terminating;
-        if (opts.paths && opts.pathHashPrefixes) {
-            throw new error_1.ValueError('paths and pathHashPrefixes are mutually exclusive');
-        }
-        this.paths = paths;
-        this.pathHashPrefixes = pathHashPrefixes;
-    }
-    equals(other) {
-        if (!(other instanceof DelegatedRole)) {
-            return false;
-        }
-        return (super.equals(other) &&
-            this.name === other.name &&
-            this.terminating === other.terminating &&
-            util_1.default.isDeepStrictEqual(this.paths, other.paths) &&
-            util_1.default.isDeepStrictEqual(this.pathHashPrefixes, other.pathHashPrefixes));
-    }
-    isDelegatedPath(targetFilepath) {
-        if (this.paths) {
-            return this.paths.some((pathPattern) => isTargetInPathPattern(targetFilepath, pathPattern));
-        }
-        if (this.pathHashPrefixes) {
-            const hasher = crypto_1.default.createHash('sha256');
-            const pathHash = hasher.update(targetFilepath).digest('hex');
-            return this.pathHashPrefixes.some((pathHashPrefix) => pathHash.startsWith(pathHashPrefix));
-        }
-        return false;
-    }
-    toJSON() {
-        const json = {
-            ...super.toJSON(),
-            name: this.name,
-            terminating: this.terminating,
-        };
-        if (this.paths) {
-            json.paths = this.paths;
-        }
-        if (this.pathHashPrefixes) {
-            json.path_hash_prefixes = this.pathHashPrefixes;
-        }
-        return json;
-    }
-    static fromJSON(data) {
-        const { keyids, threshold, name, terminating, paths, path_hash_prefixes, ...rest } = data;
-        if (!utils_1.guard.isStringArray(keyids)) {
-            throw new TypeError('keyids must be an array of strings');
-        }
-        if (typeof threshold !== 'number') {
-            throw new TypeError('threshold must be a number');
-        }
-        if (typeof name !== 'string') {
-            throw new TypeError('name must be a string');
-        }
-        if (typeof terminating !== 'boolean') {
-            throw new TypeError('terminating must be a boolean');
-        }
-        if (utils_1.guard.isDefined(paths) && !utils_1.guard.isStringArray(paths)) {
-            throw new TypeError('paths must be an array of strings');
-        }
-        if (utils_1.guard.isDefined(path_hash_prefixes) &&
-            !utils_1.guard.isStringArray(path_hash_prefixes)) {
-            throw new TypeError('path_hash_prefixes must be an array of strings');
-        }
-        return new DelegatedRole({
-            keyIDs: keyids,
-            threshold,
-            name,
-            terminating,
-            paths,
-            pathHashPrefixes: path_hash_prefixes,
-            unrecognizedFields: rest,
-        });
-    }
-}
-exports.DelegatedRole = DelegatedRole;
-// JS version of Ruby's Array#zip
-const zip = (a, b) => a.map((k, i) => [k, b[i]]);
-function isTargetInPathPattern(target, pattern) {
-    const targetParts = target.split('/');
-    const patternParts = pattern.split('/');
-    if (patternParts.length != targetParts.length) {
-        return false;
-    }
-    return zip(targetParts, patternParts).every(([targetPart, patternPart]) => (0, minimatch_1.minimatch)(targetPart, patternPart));
-}
-/**
- * Succinctly defines a hash bin delegation graph.
- *
- * A ``SuccinctRoles`` object describes a delegation graph that covers all
- * targets, distributing them uniformly over the delegated roles (i.e. bins)
- * in the graph.
- *
- * The total number of bins is 2 to the power of the passed ``bit_length``.
- *
- * Bin names are the concatenation of the passed ``name_prefix`` and a
- * zero-padded hex representation of the bin index separated by a hyphen.
- *
- * The passed ``keyids`` and ``threshold`` is used for each bin, and each bin
- * is 'terminating'.
- *
- * For details: https://github.com/theupdateframework/taps/blob/master/tap15.md
- */
-class SuccinctRoles extends Role {
-    constructor(opts) {
-        super(opts);
-        const { bitLength, namePrefix } = opts;
-        if (bitLength <= 0 || bitLength > 32) {
-            throw new error_1.ValueError('bitLength must be between 1 and 32');
-        }
-        this.bitLength = bitLength;
-        this.namePrefix = namePrefix;
-        // Calculate the suffix_len value based on the total number of bins in
-        // hex. If bit_length = 10 then number_of_bins = 1024 or bin names will
-        // have a suffix between "000" and "3ff" in hex and suffix_len will be 3
-        // meaning the third bin will have a suffix of "003".
-        this.numberOfBins = Math.pow(2, bitLength);
-        // suffix_len is calculated based on "number_of_bins - 1" as the name
-        // of the last bin contains the number "number_of_bins -1" as a suffix.
-        this.suffixLen = (this.numberOfBins - 1).toString(16).length;
-    }
-    equals(other) {
-        if (!(other instanceof SuccinctRoles)) {
-            return false;
-        }
-        return (super.equals(other) &&
-            this.bitLength === other.bitLength &&
-            this.namePrefix === other.namePrefix);
-    }
-    /***
-     * Calculates the name of the delegated role responsible for 'target_filepath'.
-     *
-     * The target at path ''target_filepath' is assigned to a bin by casting
-     * the left-most 'bit_length' of bits of the file path hash digest to
-     * int, using it as bin index between 0 and '2**bit_length - 1'.
-     *
-     * Args:
-     *  target_filepath: URL path to a target file, relative to a base
-     *  targets URL.
-     */
-    getRoleForTarget(targetFilepath) {
-        const hasher = crypto_1.default.createHash('sha256');
-        const hasherBuffer = hasher.update(targetFilepath).digest();
-        // can't ever need more than 4 bytes (32 bits).
-        const hashBytes = hasherBuffer.subarray(0, 4);
-        // Right shift hash bytes, so that we only have the leftmost
-        // bit_length bits that we care about.
-        const shiftValue = 32 - this.bitLength;
-        const binNumber = hashBytes.readUInt32BE() >>> shiftValue;
-        // Add zero padding if necessary and cast to hex the suffix.
-        const suffix = binNumber.toString(16).padStart(this.suffixLen, '0');
-        return `${this.namePrefix}-${suffix}`;
-    }
-    *getRoles() {
-        for (let i = 0; i < this.numberOfBins; i++) {
-            const suffix = i.toString(16).padStart(this.suffixLen, '0');
-            yield `${this.namePrefix}-${suffix}`;
-        }
-    }
-    /***
-     * Determines whether the given ``role_name`` is in one of
-     * the delegated roles that ``SuccinctRoles`` represents.
-     *
-     * Args:
-     *  role_name: The name of the role to check against.
-     */
-    isDelegatedRole(roleName) {
-        const desiredPrefix = this.namePrefix + '-';
-        if (!roleName.startsWith(desiredPrefix)) {
-            return false;
-        }
-        const suffix = roleName.slice(desiredPrefix.length, roleName.length);
-        if (suffix.length != this.suffixLen) {
-            return false;
-        }
-        // make sure the suffix is a hex string
-        if (!suffix.match(/^[0-9a-fA-F]+$/)) {
-            return false;
-        }
-        const num = parseInt(suffix, 16);
-        return 0 <= num && num < this.numberOfBins;
-    }
-    toJSON() {
-        const json = {
-            ...super.toJSON(),
-            bit_length: this.bitLength,
-            name_prefix: this.namePrefix,
-        };
-        return json;
-    }
-    static fromJSON(data) {
-        const { keyids, threshold, bit_length, name_prefix, ...rest } = data;
-        if (!utils_1.guard.isStringArray(keyids)) {
-            throw new TypeError('keyids must be an array of strings');
-        }
-        if (typeof threshold !== 'number') {
-            throw new TypeError('threshold must be a number');
-        }
-        if (typeof bit_length !== 'number') {
-            throw new TypeError('bit_length must be a number');
-        }
-        if (typeof name_prefix !== 'string') {
-            throw new TypeError('name_prefix must be a string');
-        }
-        return new SuccinctRoles({
-            keyIDs: keyids,
-            threshold,
-            bitLength: bit_length,
-            namePrefix: name_prefix,
-            unrecognizedFields: rest,
-        });
-    }
-}
-exports.SuccinctRoles = SuccinctRoles;
diff --git a/node_modules/@tufjs/models/dist/root.js b/node_modules/@tufjs/models/dist/root.js
deleted file mode 100644
index 36d0ef0f186d1..0000000000000
--- a/node_modules/@tufjs/models/dist/root.js
+++ /dev/null
@@ -1,116 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Root = void 0;
-const util_1 = __importDefault(require("util"));
-const base_1 = require("./base");
-const error_1 = require("./error");
-const key_1 = require("./key");
-const role_1 = require("./role");
-const utils_1 = require("./utils");
-/**
- * A container for the signed part of root metadata.
- *
- * The top-level role and metadata file signed by the root keys.
- * This role specifies trusted keys for all other top-level roles, which may further delegate trust.
- */
-class Root extends base_1.Signed {
-    constructor(options) {
-        super(options);
-        this.type = base_1.MetadataKind.Root;
-        this.keys = options.keys || {};
-        this.consistentSnapshot = options.consistentSnapshot ?? true;
-        if (!options.roles) {
-            this.roles = role_1.TOP_LEVEL_ROLE_NAMES.reduce((acc, role) => ({
-                ...acc,
-                [role]: new role_1.Role({ keyIDs: [], threshold: 1 }),
-            }), {});
-        }
-        else {
-            const roleNames = new Set(Object.keys(options.roles));
-            if (!role_1.TOP_LEVEL_ROLE_NAMES.every((role) => roleNames.has(role))) {
-                throw new error_1.ValueError('missing top-level role');
-            }
-            this.roles = options.roles;
-        }
-    }
-    addKey(key, role) {
-        if (!this.roles[role]) {
-            throw new error_1.ValueError(`role ${role} does not exist`);
-        }
-        if (!this.roles[role].keyIDs.includes(key.keyID)) {
-            this.roles[role].keyIDs.push(key.keyID);
-        }
-        this.keys[key.keyID] = key;
-    }
-    equals(other) {
-        if (!(other instanceof Root)) {
-            return false;
-        }
-        return (super.equals(other) &&
-            this.consistentSnapshot === other.consistentSnapshot &&
-            util_1.default.isDeepStrictEqual(this.keys, other.keys) &&
-            util_1.default.isDeepStrictEqual(this.roles, other.roles));
-    }
-    toJSON() {
-        return {
-            _type: this.type,
-            spec_version: this.specVersion,
-            version: this.version,
-            expires: this.expires,
-            keys: keysToJSON(this.keys),
-            roles: rolesToJSON(this.roles),
-            consistent_snapshot: this.consistentSnapshot,
-            ...this.unrecognizedFields,
-        };
-    }
-    static fromJSON(data) {
-        const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data);
-        const { keys, roles, consistent_snapshot, ...rest } = unrecognizedFields;
-        if (typeof consistent_snapshot !== 'boolean') {
-            throw new TypeError('consistent_snapshot must be a boolean');
-        }
-        return new Root({
-            ...commonFields,
-            keys: keysFromJSON(keys),
-            roles: rolesFromJSON(roles),
-            consistentSnapshot: consistent_snapshot,
-            unrecognizedFields: rest,
-        });
-    }
-}
-exports.Root = Root;
-function keysToJSON(keys) {
-    return Object.entries(keys).reduce((acc, [keyID, key]) => ({ ...acc, [keyID]: key.toJSON() }), {});
-}
-function rolesToJSON(roles) {
-    return Object.entries(roles).reduce((acc, [roleName, role]) => ({ ...acc, [roleName]: role.toJSON() }), {});
-}
-function keysFromJSON(data) {
-    let keys;
-    if (utils_1.guard.isDefined(data)) {
-        if (!utils_1.guard.isObjectRecord(data)) {
-            throw new TypeError('keys must be an object');
-        }
-        keys = Object.entries(data).reduce((acc, [keyID, keyData]) => ({
-            ...acc,
-            [keyID]: key_1.Key.fromJSON(keyID, keyData),
-        }), {});
-    }
-    return keys;
-}
-function rolesFromJSON(data) {
-    let roles;
-    if (utils_1.guard.isDefined(data)) {
-        if (!utils_1.guard.isObjectRecord(data)) {
-            throw new TypeError('roles must be an object');
-        }
-        roles = Object.entries(data).reduce((acc, [roleName, roleData]) => ({
-            ...acc,
-            [roleName]: role_1.Role.fromJSON(roleData),
-        }), {});
-    }
-    return roles;
-}
diff --git a/node_modules/@tufjs/models/dist/signature.js b/node_modules/@tufjs/models/dist/signature.js
deleted file mode 100644
index 33eb204eb0835..0000000000000
--- a/node_modules/@tufjs/models/dist/signature.js
+++ /dev/null
@@ -1,38 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Signature = void 0;
-/**
- * A container class containing information about a signature.
- *
- * Contains a signature and the keyid uniquely identifying the key used
- * to generate the signature.
- *
- * Provide a `fromJSON` method to create a Signature from a JSON object.
- */
-class Signature {
-    constructor(options) {
-        const { keyID, sig } = options;
-        this.keyID = keyID;
-        this.sig = sig;
-    }
-    toJSON() {
-        return {
-            keyid: this.keyID,
-            sig: this.sig,
-        };
-    }
-    static fromJSON(data) {
-        const { keyid, sig } = data;
-        if (typeof keyid !== 'string') {
-            throw new TypeError('keyid must be a string');
-        }
-        if (typeof sig !== 'string') {
-            throw new TypeError('sig must be a string');
-        }
-        return new Signature({
-            keyID: keyid,
-            sig: sig,
-        });
-    }
-}
-exports.Signature = Signature;
diff --git a/node_modules/@tufjs/models/dist/snapshot.js b/node_modules/@tufjs/models/dist/snapshot.js
deleted file mode 100644
index e90ea8e729e4e..0000000000000
--- a/node_modules/@tufjs/models/dist/snapshot.js
+++ /dev/null
@@ -1,71 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Snapshot = void 0;
-const util_1 = __importDefault(require("util"));
-const base_1 = require("./base");
-const file_1 = require("./file");
-const utils_1 = require("./utils");
-/**
- * A container for the signed part of snapshot metadata.
- *
- * Snapshot contains information about all target Metadata files.
- * A top-level role that specifies the latest versions of all targets metadata files,
- * and hence the latest versions of all targets (including any dependencies between them) on the repository.
- */
-class Snapshot extends base_1.Signed {
-    constructor(opts) {
-        super(opts);
-        this.type = base_1.MetadataKind.Snapshot;
-        this.meta = opts.meta || { 'targets.json': new file_1.MetaFile({ version: 1 }) };
-    }
-    equals(other) {
-        if (!(other instanceof Snapshot)) {
-            return false;
-        }
-        return super.equals(other) && util_1.default.isDeepStrictEqual(this.meta, other.meta);
-    }
-    toJSON() {
-        return {
-            _type: this.type,
-            meta: metaToJSON(this.meta),
-            spec_version: this.specVersion,
-            version: this.version,
-            expires: this.expires,
-            ...this.unrecognizedFields,
-        };
-    }
-    static fromJSON(data) {
-        const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data);
-        const { meta, ...rest } = unrecognizedFields;
-        return new Snapshot({
-            ...commonFields,
-            meta: metaFromJSON(meta),
-            unrecognizedFields: rest,
-        });
-    }
-}
-exports.Snapshot = Snapshot;
-function metaToJSON(meta) {
-    return Object.entries(meta).reduce((acc, [path, metadata]) => ({
-        ...acc,
-        [path]: metadata.toJSON(),
-    }), {});
-}
-function metaFromJSON(data) {
-    let meta;
-    if (utils_1.guard.isDefined(data)) {
-        if (!utils_1.guard.isObjectRecord(data)) {
-            throw new TypeError('meta field is malformed');
-        }
-        else {
-            meta = Object.entries(data).reduce((acc, [path, metadata]) => ({
-                ...acc,
-                [path]: file_1.MetaFile.fromJSON(metadata),
-            }), {});
-        }
-    }
-    return meta;
-}
diff --git a/node_modules/@tufjs/models/dist/targets.js b/node_modules/@tufjs/models/dist/targets.js
deleted file mode 100644
index 54bd8f8c554af..0000000000000
--- a/node_modules/@tufjs/models/dist/targets.js
+++ /dev/null
@@ -1,92 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Targets = void 0;
-const util_1 = __importDefault(require("util"));
-const base_1 = require("./base");
-const delegations_1 = require("./delegations");
-const file_1 = require("./file");
-const utils_1 = require("./utils");
-// Container for the signed part of targets metadata.
-//
-// Targets contains verifying information about target files and also delegates
-// responsible to other Targets roles.
-class Targets extends base_1.Signed {
-    constructor(options) {
-        super(options);
-        this.type = base_1.MetadataKind.Targets;
-        this.targets = options.targets || {};
-        this.delegations = options.delegations;
-    }
-    addTarget(target) {
-        this.targets[target.path] = target;
-    }
-    equals(other) {
-        if (!(other instanceof Targets)) {
-            return false;
-        }
-        return (super.equals(other) &&
-            util_1.default.isDeepStrictEqual(this.targets, other.targets) &&
-            util_1.default.isDeepStrictEqual(this.delegations, other.delegations));
-    }
-    toJSON() {
-        const json = {
-            _type: this.type,
-            spec_version: this.specVersion,
-            version: this.version,
-            expires: this.expires,
-            targets: targetsToJSON(this.targets),
-            ...this.unrecognizedFields,
-        };
-        if (this.delegations) {
-            json.delegations = this.delegations.toJSON();
-        }
-        return json;
-    }
-    static fromJSON(data) {
-        const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data);
-        const { targets, delegations, ...rest } = unrecognizedFields;
-        return new Targets({
-            ...commonFields,
-            targets: targetsFromJSON(targets),
-            delegations: delegationsFromJSON(delegations),
-            unrecognizedFields: rest,
-        });
-    }
-}
-exports.Targets = Targets;
-function targetsToJSON(targets) {
-    return Object.entries(targets).reduce((acc, [path, target]) => ({
-        ...acc,
-        [path]: target.toJSON(),
-    }), {});
-}
-function targetsFromJSON(data) {
-    let targets;
-    if (utils_1.guard.isDefined(data)) {
-        if (!utils_1.guard.isObjectRecord(data)) {
-            throw new TypeError('targets must be an object');
-        }
-        else {
-            targets = Object.entries(data).reduce((acc, [path, target]) => ({
-                ...acc,
-                [path]: file_1.TargetFile.fromJSON(path, target),
-            }), {});
-        }
-    }
-    return targets;
-}
-function delegationsFromJSON(data) {
-    let delegations;
-    if (utils_1.guard.isDefined(data)) {
-        if (!utils_1.guard.isObject(data)) {
-            throw new TypeError('delegations must be an object');
-        }
-        else {
-            delegations = delegations_1.Delegations.fromJSON(data);
-        }
-    }
-    return delegations;
-}
diff --git a/node_modules/@tufjs/models/dist/timestamp.js b/node_modules/@tufjs/models/dist/timestamp.js
deleted file mode 100644
index 9880c4c9fc254..0000000000000
--- a/node_modules/@tufjs/models/dist/timestamp.js
+++ /dev/null
@@ -1,58 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Timestamp = void 0;
-const base_1 = require("./base");
-const file_1 = require("./file");
-const utils_1 = require("./utils");
-/**
- * A container for the signed part of timestamp metadata.
- *
- * A top-level that specifies the latest version of the snapshot role metadata file,
- * and hence the latest versions of all metadata and targets on the repository.
- */
-class Timestamp extends base_1.Signed {
-    constructor(options) {
-        super(options);
-        this.type = base_1.MetadataKind.Timestamp;
-        this.snapshotMeta = options.snapshotMeta || new file_1.MetaFile({ version: 1 });
-    }
-    equals(other) {
-        if (!(other instanceof Timestamp)) {
-            return false;
-        }
-        return super.equals(other) && this.snapshotMeta.equals(other.snapshotMeta);
-    }
-    toJSON() {
-        return {
-            _type: this.type,
-            spec_version: this.specVersion,
-            version: this.version,
-            expires: this.expires,
-            meta: { 'snapshot.json': this.snapshotMeta.toJSON() },
-            ...this.unrecognizedFields,
-        };
-    }
-    static fromJSON(data) {
-        const { unrecognizedFields, ...commonFields } = base_1.Signed.commonFieldsFromJSON(data);
-        const { meta, ...rest } = unrecognizedFields;
-        return new Timestamp({
-            ...commonFields,
-            snapshotMeta: snapshotMetaFromJSON(meta),
-            unrecognizedFields: rest,
-        });
-    }
-}
-exports.Timestamp = Timestamp;
-function snapshotMetaFromJSON(data) {
-    let snapshotMeta;
-    if (utils_1.guard.isDefined(data)) {
-        const snapshotData = data['snapshot.json'];
-        if (!utils_1.guard.isDefined(snapshotData) || !utils_1.guard.isObject(snapshotData)) {
-            throw new TypeError('missing snapshot.json in meta');
-        }
-        else {
-            snapshotMeta = file_1.MetaFile.fromJSON(snapshotData);
-        }
-    }
-    return snapshotMeta;
-}
diff --git a/node_modules/@tufjs/models/dist/utils/index.js b/node_modules/@tufjs/models/dist/utils/index.js
deleted file mode 100644
index 872aae28049c9..0000000000000
--- a/node_modules/@tufjs/models/dist/utils/index.js
+++ /dev/null
@@ -1,28 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.crypto = exports.guard = void 0;
-exports.guard = __importStar(require("./guard"));
-exports.crypto = __importStar(require("./verify"));
diff --git a/node_modules/@tufjs/models/package.json b/node_modules/@tufjs/models/package.json
deleted file mode 100644
index 8e5132ddf1079..0000000000000
--- a/node_modules/@tufjs/models/package.json
+++ /dev/null
@@ -1,37 +0,0 @@
-{
-  "name": "@tufjs/models",
-  "version": "3.0.1",
-  "description": "TUF metadata models",
-  "main": "dist/index.js",
-  "types": "dist/index.d.ts",
-  "files": [
-    "dist"
-  ],
-  "scripts": {
-    "build": "tsc --build",
-    "clean": "rm -rf dist && rm tsconfig.tsbuildinfo",
-    "test": "jest"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/theupdateframework/tuf-js.git"
-  },
-  "keywords": [
-    "tuf",
-    "security",
-    "update"
-  ],
-  "author": "bdehamer@github.com",
-  "license": "MIT",
-  "bugs": {
-    "url": "https://github.com/theupdateframework/tuf-js/issues"
-  },
-  "homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/models#readme",
-  "dependencies": {
-    "@tufjs/canonical-json": "2.0.0",
-    "minimatch": "^9.0.5"
-  },
-  "engines": {
-    "node": "^18.17.0 || >=20.5.0"
-  }
-}
diff --git a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/LICENSE b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/LICENSE
deleted file mode 100644
index e9e7c1679a09d..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
-   APPENDIX: How to apply the Apache License to your work.
-
-      To apply the Apache License to your work, attach the following
-      boilerplate notice, with the fields enclosed by brackets "[]"
-      replaced with your own identifying information. (Don't include
-      the brackets!)  The text should be enclosed in the appropriate
-      comment syntax for the file format. We also recommend that a
-      file or class name and description of purpose be included on the
-      same "printed page" as the copyright notice for easier
-      identification within third-party archives.
-
-   Copyright 2023 The Sigstore Authors
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
diff --git a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
deleted file mode 100644
index 5c4f37bfaf3fb..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/envelope.js
+++ /dev/null
@@ -1,59 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: envelope.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Signature = exports.Envelope = void 0;
-exports.Envelope = {
-    fromJSON(object) {
-        return {
-            payload: isSet(object.payload) ? Buffer.from(bytesFromBase64(object.payload)) : Buffer.alloc(0),
-            payloadType: isSet(object.payloadType) ? globalThis.String(object.payloadType) : "",
-            signatures: globalThis.Array.isArray(object?.signatures)
-                ? object.signatures.map((e) => exports.Signature.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.payload.length !== 0) {
-            obj.payload = base64FromBytes(message.payload);
-        }
-        if (message.payloadType !== "") {
-            obj.payloadType = message.payloadType;
-        }
-        if (message.signatures?.length) {
-            obj.signatures = message.signatures.map((e) => exports.Signature.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.Signature = {
-    fromJSON(object) {
-        return {
-            sig: isSet(object.sig) ? Buffer.from(bytesFromBase64(object.sig)) : Buffer.alloc(0),
-            keyid: isSet(object.keyid) ? globalThis.String(object.keyid) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.sig.length !== 0) {
-            obj.sig = base64FromBytes(message.sig);
-        }
-        if (message.keyid !== "") {
-            obj.keyid = message.keyid;
-        }
-        return obj;
-    },
-};
-function bytesFromBase64(b64) {
-    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
-}
-function base64FromBytes(arr) {
-    return globalThis.Buffer.from(arr).toString("base64");
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
deleted file mode 100644
index 6138fef5672fc..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/events.js
+++ /dev/null
@@ -1,174 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: events.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.CloudEventBatch = exports.CloudEvent_CloudEventAttributeValue = exports.CloudEvent_AttributesEntry = exports.CloudEvent = void 0;
-/* eslint-disable */
-const any_1 = require("./google/protobuf/any");
-const timestamp_1 = require("./google/protobuf/timestamp");
-exports.CloudEvent = {
-    fromJSON(object) {
-        return {
-            id: isSet(object.id) ? globalThis.String(object.id) : "",
-            source: isSet(object.source) ? globalThis.String(object.source) : "",
-            specVersion: isSet(object.specVersion) ? globalThis.String(object.specVersion) : "",
-            type: isSet(object.type) ? globalThis.String(object.type) : "",
-            attributes: isObject(object.attributes)
-                ? Object.entries(object.attributes).reduce((acc, [key, value]) => {
-                    acc[key] = exports.CloudEvent_CloudEventAttributeValue.fromJSON(value);
-                    return acc;
-                }, {})
-                : {},
-            data: isSet(object.binaryData)
-                ? { $case: "binaryData", binaryData: Buffer.from(bytesFromBase64(object.binaryData)) }
-                : isSet(object.textData)
-                    ? { $case: "textData", textData: globalThis.String(object.textData) }
-                    : isSet(object.protoData)
-                        ? { $case: "protoData", protoData: any_1.Any.fromJSON(object.protoData) }
-                        : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.id !== "") {
-            obj.id = message.id;
-        }
-        if (message.source !== "") {
-            obj.source = message.source;
-        }
-        if (message.specVersion !== "") {
-            obj.specVersion = message.specVersion;
-        }
-        if (message.type !== "") {
-            obj.type = message.type;
-        }
-        if (message.attributes) {
-            const entries = Object.entries(message.attributes);
-            if (entries.length > 0) {
-                obj.attributes = {};
-                entries.forEach(([k, v]) => {
-                    obj.attributes[k] = exports.CloudEvent_CloudEventAttributeValue.toJSON(v);
-                });
-            }
-        }
-        if (message.data?.$case === "binaryData") {
-            obj.binaryData = base64FromBytes(message.data.binaryData);
-        }
-        else if (message.data?.$case === "textData") {
-            obj.textData = message.data.textData;
-        }
-        else if (message.data?.$case === "protoData") {
-            obj.protoData = any_1.Any.toJSON(message.data.protoData);
-        }
-        return obj;
-    },
-};
-exports.CloudEvent_AttributesEntry = {
-    fromJSON(object) {
-        return {
-            key: isSet(object.key) ? globalThis.String(object.key) : "",
-            value: isSet(object.value) ? exports.CloudEvent_CloudEventAttributeValue.fromJSON(object.value) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.key !== "") {
-            obj.key = message.key;
-        }
-        if (message.value !== undefined) {
-            obj.value = exports.CloudEvent_CloudEventAttributeValue.toJSON(message.value);
-        }
-        return obj;
-    },
-};
-exports.CloudEvent_CloudEventAttributeValue = {
-    fromJSON(object) {
-        return {
-            attr: isSet(object.ceBoolean)
-                ? { $case: "ceBoolean", ceBoolean: globalThis.Boolean(object.ceBoolean) }
-                : isSet(object.ceInteger)
-                    ? { $case: "ceInteger", ceInteger: globalThis.Number(object.ceInteger) }
-                    : isSet(object.ceString)
-                        ? { $case: "ceString", ceString: globalThis.String(object.ceString) }
-                        : isSet(object.ceBytes)
-                            ? { $case: "ceBytes", ceBytes: Buffer.from(bytesFromBase64(object.ceBytes)) }
-                            : isSet(object.ceUri)
-                                ? { $case: "ceUri", ceUri: globalThis.String(object.ceUri) }
-                                : isSet(object.ceUriRef)
-                                    ? { $case: "ceUriRef", ceUriRef: globalThis.String(object.ceUriRef) }
-                                    : isSet(object.ceTimestamp)
-                                        ? { $case: "ceTimestamp", ceTimestamp: fromJsonTimestamp(object.ceTimestamp) }
-                                        : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.attr?.$case === "ceBoolean") {
-            obj.ceBoolean = message.attr.ceBoolean;
-        }
-        else if (message.attr?.$case === "ceInteger") {
-            obj.ceInteger = Math.round(message.attr.ceInteger);
-        }
-        else if (message.attr?.$case === "ceString") {
-            obj.ceString = message.attr.ceString;
-        }
-        else if (message.attr?.$case === "ceBytes") {
-            obj.ceBytes = base64FromBytes(message.attr.ceBytes);
-        }
-        else if (message.attr?.$case === "ceUri") {
-            obj.ceUri = message.attr.ceUri;
-        }
-        else if (message.attr?.$case === "ceUriRef") {
-            obj.ceUriRef = message.attr.ceUriRef;
-        }
-        else if (message.attr?.$case === "ceTimestamp") {
-            obj.ceTimestamp = message.attr.ceTimestamp.toISOString();
-        }
-        return obj;
-    },
-};
-exports.CloudEventBatch = {
-    fromJSON(object) {
-        return {
-            events: globalThis.Array.isArray(object?.events) ? object.events.map((e) => exports.CloudEvent.fromJSON(e)) : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.events?.length) {
-            obj.events = message.events.map((e) => exports.CloudEvent.toJSON(e));
-        }
-        return obj;
-    },
-};
-function bytesFromBase64(b64) {
-    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
-}
-function base64FromBytes(arr) {
-    return globalThis.Buffer.from(arr).toString("base64");
-}
-function fromTimestamp(t) {
-    let millis = (globalThis.Number(t.seconds) || 0) * 1_000;
-    millis += (t.nanos || 0) / 1_000_000;
-    return new globalThis.Date(millis);
-}
-function fromJsonTimestamp(o) {
-    if (o instanceof globalThis.Date) {
-        return o;
-    }
-    else if (typeof o === "string") {
-        return new globalThis.Date(o);
-    }
-    else {
-        return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
-    }
-}
-function isObject(value) {
-    return typeof value === "object" && value !== null;
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
deleted file mode 100644
index b4d9ccc781c2f..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/api/field_behavior.js
+++ /dev/null
@@ -1,141 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: google/api/field_behavior.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.FieldBehavior = void 0;
-exports.fieldBehaviorFromJSON = fieldBehaviorFromJSON;
-exports.fieldBehaviorToJSON = fieldBehaviorToJSON;
-/* eslint-disable */
-/**
- * An indicator of the behavior of a given field (for example, that a field
- * is required in requests, or given as output but ignored as input).
- * This **does not** change the behavior in protocol buffers itself; it only
- * denotes the behavior and may affect how API tooling handles the field.
- *
- * Note: This enum **may** receive new values in the future.
- */
-var FieldBehavior;
-(function (FieldBehavior) {
-    /** FIELD_BEHAVIOR_UNSPECIFIED - Conventional default for enums. Do not use this. */
-    FieldBehavior[FieldBehavior["FIELD_BEHAVIOR_UNSPECIFIED"] = 0] = "FIELD_BEHAVIOR_UNSPECIFIED";
-    /**
-     * OPTIONAL - Specifically denotes a field as optional.
-     * While all fields in protocol buffers are optional, this may be specified
-     * for emphasis if appropriate.
-     */
-    FieldBehavior[FieldBehavior["OPTIONAL"] = 1] = "OPTIONAL";
-    /**
-     * REQUIRED - Denotes a field as required.
-     * This indicates that the field **must** be provided as part of the request,
-     * and failure to do so will cause an error (usually `INVALID_ARGUMENT`).
-     */
-    FieldBehavior[FieldBehavior["REQUIRED"] = 2] = "REQUIRED";
-    /**
-     * OUTPUT_ONLY - Denotes a field as output only.
-     * This indicates that the field is provided in responses, but including the
-     * field in a request does nothing (the server *must* ignore it and
-     * *must not* throw an error as a result of the field's presence).
-     */
-    FieldBehavior[FieldBehavior["OUTPUT_ONLY"] = 3] = "OUTPUT_ONLY";
-    /**
-     * INPUT_ONLY - Denotes a field as input only.
-     * This indicates that the field is provided in requests, and the
-     * corresponding field is not included in output.
-     */
-    FieldBehavior[FieldBehavior["INPUT_ONLY"] = 4] = "INPUT_ONLY";
-    /**
-     * IMMUTABLE - Denotes a field as immutable.
-     * This indicates that the field may be set once in a request to create a
-     * resource, but may not be changed thereafter.
-     */
-    FieldBehavior[FieldBehavior["IMMUTABLE"] = 5] = "IMMUTABLE";
-    /**
-     * UNORDERED_LIST - Denotes that a (repeated) field is an unordered list.
-     * This indicates that the service may provide the elements of the list
-     * in any arbitrary  order, rather than the order the user originally
-     * provided. Additionally, the list's order may or may not be stable.
-     */
-    FieldBehavior[FieldBehavior["UNORDERED_LIST"] = 6] = "UNORDERED_LIST";
-    /**
-     * NON_EMPTY_DEFAULT - Denotes that this field returns a non-empty default value if not set.
-     * This indicates that if the user provides the empty value in a request,
-     * a non-empty value will be returned. The user will not be aware of what
-     * non-empty value to expect.
-     */
-    FieldBehavior[FieldBehavior["NON_EMPTY_DEFAULT"] = 7] = "NON_EMPTY_DEFAULT";
-    /**
-     * IDENTIFIER - Denotes that the field in a resource (a message annotated with
-     * google.api.resource) is used in the resource name to uniquely identify the
-     * resource. For AIP-compliant APIs, this should only be applied to the
-     * `name` field on the resource.
-     *
-     * This behavior should not be applied to references to other resources within
-     * the message.
-     *
-     * The identifier field of resources often have different field behavior
-     * depending on the request it is embedded in (e.g. for Create methods name
-     * is optional and unused, while for Update methods it is required). Instead
-     * of method-specific annotations, only `IDENTIFIER` is required.
-     */
-    FieldBehavior[FieldBehavior["IDENTIFIER"] = 8] = "IDENTIFIER";
-})(FieldBehavior || (exports.FieldBehavior = FieldBehavior = {}));
-function fieldBehaviorFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "FIELD_BEHAVIOR_UNSPECIFIED":
-            return FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED;
-        case 1:
-        case "OPTIONAL":
-            return FieldBehavior.OPTIONAL;
-        case 2:
-        case "REQUIRED":
-            return FieldBehavior.REQUIRED;
-        case 3:
-        case "OUTPUT_ONLY":
-            return FieldBehavior.OUTPUT_ONLY;
-        case 4:
-        case "INPUT_ONLY":
-            return FieldBehavior.INPUT_ONLY;
-        case 5:
-        case "IMMUTABLE":
-            return FieldBehavior.IMMUTABLE;
-        case 6:
-        case "UNORDERED_LIST":
-            return FieldBehavior.UNORDERED_LIST;
-        case 7:
-        case "NON_EMPTY_DEFAULT":
-            return FieldBehavior.NON_EMPTY_DEFAULT;
-        case 8:
-        case "IDENTIFIER":
-            return FieldBehavior.IDENTIFIER;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
-    }
-}
-function fieldBehaviorToJSON(object) {
-    switch (object) {
-        case FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED:
-            return "FIELD_BEHAVIOR_UNSPECIFIED";
-        case FieldBehavior.OPTIONAL:
-            return "OPTIONAL";
-        case FieldBehavior.REQUIRED:
-            return "REQUIRED";
-        case FieldBehavior.OUTPUT_ONLY:
-            return "OUTPUT_ONLY";
-        case FieldBehavior.INPUT_ONLY:
-            return "INPUT_ONLY";
-        case FieldBehavior.IMMUTABLE:
-            return "IMMUTABLE";
-        case FieldBehavior.UNORDERED_LIST:
-            return "UNORDERED_LIST";
-        case FieldBehavior.NON_EMPTY_DEFAULT:
-            return "NON_EMPTY_DEFAULT";
-        case FieldBehavior.IDENTIFIER:
-            return "IDENTIFIER";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior");
-    }
-}
diff --git a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
deleted file mode 100644
index f0c8aab773e4c..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/any.js
+++ /dev/null
@@ -1,35 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: google/protobuf/any.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Any = void 0;
-exports.Any = {
-    fromJSON(object) {
-        return {
-            typeUrl: isSet(object.typeUrl) ? globalThis.String(object.typeUrl) : "",
-            value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.typeUrl !== "") {
-            obj.typeUrl = message.typeUrl;
-        }
-        if (message.value.length !== 0) {
-            obj.value = base64FromBytes(message.value);
-        }
-        return obj;
-    },
-};
-function bytesFromBase64(b64) {
-    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
-}
-function base64FromBytes(arr) {
-    return globalThis.Buffer.from(arr).toString("base64");
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
deleted file mode 100644
index d6f8ddddf799d..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/descriptor.js
+++ /dev/null
@@ -1,2042 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: google/protobuf/descriptor.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.GeneratedCodeInfo = exports.SourceCodeInfo_Location = exports.SourceCodeInfo = exports.FeatureSetDefaults_FeatureSetEditionDefault = exports.FeatureSetDefaults = exports.FeatureSet = exports.UninterpretedOption_NamePart = exports.UninterpretedOption = exports.MethodOptions = exports.ServiceOptions = exports.EnumValueOptions = exports.EnumOptions = exports.OneofOptions = exports.FieldOptions_FeatureSupport = exports.FieldOptions_EditionDefault = exports.FieldOptions = exports.MessageOptions = exports.FileOptions = exports.MethodDescriptorProto = exports.ServiceDescriptorProto = exports.EnumValueDescriptorProto = exports.EnumDescriptorProto_EnumReservedRange = exports.EnumDescriptorProto = exports.OneofDescriptorProto = exports.FieldDescriptorProto = exports.ExtensionRangeOptions_Declaration = exports.ExtensionRangeOptions = exports.DescriptorProto_ReservedRange = exports.DescriptorProto_ExtensionRange = exports.DescriptorProto = exports.FileDescriptorProto = exports.FileDescriptorSet = exports.GeneratedCodeInfo_Annotation_Semantic = exports.FeatureSet_EnforceNamingStyle = exports.FeatureSet_JsonFormat = exports.FeatureSet_MessageEncoding = exports.FeatureSet_Utf8Validation = exports.FeatureSet_RepeatedFieldEncoding = exports.FeatureSet_EnumType = exports.FeatureSet_FieldPresence = exports.MethodOptions_IdempotencyLevel = exports.FieldOptions_OptionTargetType = exports.FieldOptions_OptionRetention = exports.FieldOptions_JSType = exports.FieldOptions_CType = exports.FileOptions_OptimizeMode = exports.FieldDescriptorProto_Label = exports.FieldDescriptorProto_Type = exports.ExtensionRangeOptions_VerificationState = exports.Edition = void 0;
-exports.GeneratedCodeInfo_Annotation = void 0;
-exports.editionFromJSON = editionFromJSON;
-exports.editionToJSON = editionToJSON;
-exports.extensionRangeOptions_VerificationStateFromJSON = extensionRangeOptions_VerificationStateFromJSON;
-exports.extensionRangeOptions_VerificationStateToJSON = extensionRangeOptions_VerificationStateToJSON;
-exports.fieldDescriptorProto_TypeFromJSON = fieldDescriptorProto_TypeFromJSON;
-exports.fieldDescriptorProto_TypeToJSON = fieldDescriptorProto_TypeToJSON;
-exports.fieldDescriptorProto_LabelFromJSON = fieldDescriptorProto_LabelFromJSON;
-exports.fieldDescriptorProto_LabelToJSON = fieldDescriptorProto_LabelToJSON;
-exports.fileOptions_OptimizeModeFromJSON = fileOptions_OptimizeModeFromJSON;
-exports.fileOptions_OptimizeModeToJSON = fileOptions_OptimizeModeToJSON;
-exports.fieldOptions_CTypeFromJSON = fieldOptions_CTypeFromJSON;
-exports.fieldOptions_CTypeToJSON = fieldOptions_CTypeToJSON;
-exports.fieldOptions_JSTypeFromJSON = fieldOptions_JSTypeFromJSON;
-exports.fieldOptions_JSTypeToJSON = fieldOptions_JSTypeToJSON;
-exports.fieldOptions_OptionRetentionFromJSON = fieldOptions_OptionRetentionFromJSON;
-exports.fieldOptions_OptionRetentionToJSON = fieldOptions_OptionRetentionToJSON;
-exports.fieldOptions_OptionTargetTypeFromJSON = fieldOptions_OptionTargetTypeFromJSON;
-exports.fieldOptions_OptionTargetTypeToJSON = fieldOptions_OptionTargetTypeToJSON;
-exports.methodOptions_IdempotencyLevelFromJSON = methodOptions_IdempotencyLevelFromJSON;
-exports.methodOptions_IdempotencyLevelToJSON = methodOptions_IdempotencyLevelToJSON;
-exports.featureSet_FieldPresenceFromJSON = featureSet_FieldPresenceFromJSON;
-exports.featureSet_FieldPresenceToJSON = featureSet_FieldPresenceToJSON;
-exports.featureSet_EnumTypeFromJSON = featureSet_EnumTypeFromJSON;
-exports.featureSet_EnumTypeToJSON = featureSet_EnumTypeToJSON;
-exports.featureSet_RepeatedFieldEncodingFromJSON = featureSet_RepeatedFieldEncodingFromJSON;
-exports.featureSet_RepeatedFieldEncodingToJSON = featureSet_RepeatedFieldEncodingToJSON;
-exports.featureSet_Utf8ValidationFromJSON = featureSet_Utf8ValidationFromJSON;
-exports.featureSet_Utf8ValidationToJSON = featureSet_Utf8ValidationToJSON;
-exports.featureSet_MessageEncodingFromJSON = featureSet_MessageEncodingFromJSON;
-exports.featureSet_MessageEncodingToJSON = featureSet_MessageEncodingToJSON;
-exports.featureSet_JsonFormatFromJSON = featureSet_JsonFormatFromJSON;
-exports.featureSet_JsonFormatToJSON = featureSet_JsonFormatToJSON;
-exports.featureSet_EnforceNamingStyleFromJSON = featureSet_EnforceNamingStyleFromJSON;
-exports.featureSet_EnforceNamingStyleToJSON = featureSet_EnforceNamingStyleToJSON;
-exports.generatedCodeInfo_Annotation_SemanticFromJSON = generatedCodeInfo_Annotation_SemanticFromJSON;
-exports.generatedCodeInfo_Annotation_SemanticToJSON = generatedCodeInfo_Annotation_SemanticToJSON;
-/* eslint-disable */
-/** The full set of known editions. */
-var Edition;
-(function (Edition) {
-    /** EDITION_UNKNOWN - A placeholder for an unknown edition value. */
-    Edition[Edition["EDITION_UNKNOWN"] = 0] = "EDITION_UNKNOWN";
-    /**
-     * EDITION_LEGACY - A placeholder edition for specifying default behaviors *before* a feature
-     * was first introduced.  This is effectively an "infinite past".
-     */
-    Edition[Edition["EDITION_LEGACY"] = 900] = "EDITION_LEGACY";
-    /**
-     * EDITION_PROTO2 - Legacy syntax "editions".  These pre-date editions, but behave much like
-     * distinct editions.  These can't be used to specify the edition of proto
-     * files, but feature definitions must supply proto2/proto3 defaults for
-     * backwards compatibility.
-     */
-    Edition[Edition["EDITION_PROTO2"] = 998] = "EDITION_PROTO2";
-    Edition[Edition["EDITION_PROTO3"] = 999] = "EDITION_PROTO3";
-    /**
-     * EDITION_2023 - Editions that have been released.  The specific values are arbitrary and
-     * should not be depended on, but they will always be time-ordered for easy
-     * comparison.
-     */
-    Edition[Edition["EDITION_2023"] = 1000] = "EDITION_2023";
-    Edition[Edition["EDITION_2024"] = 1001] = "EDITION_2024";
-    /**
-     * EDITION_1_TEST_ONLY - Placeholder editions for testing feature resolution.  These should not be
-     * used or relied on outside of tests.
-     */
-    Edition[Edition["EDITION_1_TEST_ONLY"] = 1] = "EDITION_1_TEST_ONLY";
-    Edition[Edition["EDITION_2_TEST_ONLY"] = 2] = "EDITION_2_TEST_ONLY";
-    Edition[Edition["EDITION_99997_TEST_ONLY"] = 99997] = "EDITION_99997_TEST_ONLY";
-    Edition[Edition["EDITION_99998_TEST_ONLY"] = 99998] = "EDITION_99998_TEST_ONLY";
-    Edition[Edition["EDITION_99999_TEST_ONLY"] = 99999] = "EDITION_99999_TEST_ONLY";
-    /**
-     * EDITION_MAX - Placeholder for specifying unbounded edition support.  This should only
-     * ever be used by plugins that can expect to never require any changes to
-     * support a new edition.
-     */
-    Edition[Edition["EDITION_MAX"] = 2147483647] = "EDITION_MAX";
-})(Edition || (exports.Edition = Edition = {}));
-function editionFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "EDITION_UNKNOWN":
-            return Edition.EDITION_UNKNOWN;
-        case 900:
-        case "EDITION_LEGACY":
-            return Edition.EDITION_LEGACY;
-        case 998:
-        case "EDITION_PROTO2":
-            return Edition.EDITION_PROTO2;
-        case 999:
-        case "EDITION_PROTO3":
-            return Edition.EDITION_PROTO3;
-        case 1000:
-        case "EDITION_2023":
-            return Edition.EDITION_2023;
-        case 1001:
-        case "EDITION_2024":
-            return Edition.EDITION_2024;
-        case 1:
-        case "EDITION_1_TEST_ONLY":
-            return Edition.EDITION_1_TEST_ONLY;
-        case 2:
-        case "EDITION_2_TEST_ONLY":
-            return Edition.EDITION_2_TEST_ONLY;
-        case 99997:
-        case "EDITION_99997_TEST_ONLY":
-            return Edition.EDITION_99997_TEST_ONLY;
-        case 99998:
-        case "EDITION_99998_TEST_ONLY":
-            return Edition.EDITION_99998_TEST_ONLY;
-        case 99999:
-        case "EDITION_99999_TEST_ONLY":
-            return Edition.EDITION_99999_TEST_ONLY;
-        case 2147483647:
-        case "EDITION_MAX":
-            return Edition.EDITION_MAX;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum Edition");
-    }
-}
-function editionToJSON(object) {
-    switch (object) {
-        case Edition.EDITION_UNKNOWN:
-            return "EDITION_UNKNOWN";
-        case Edition.EDITION_LEGACY:
-            return "EDITION_LEGACY";
-        case Edition.EDITION_PROTO2:
-            return "EDITION_PROTO2";
-        case Edition.EDITION_PROTO3:
-            return "EDITION_PROTO3";
-        case Edition.EDITION_2023:
-            return "EDITION_2023";
-        case Edition.EDITION_2024:
-            return "EDITION_2024";
-        case Edition.EDITION_1_TEST_ONLY:
-            return "EDITION_1_TEST_ONLY";
-        case Edition.EDITION_2_TEST_ONLY:
-            return "EDITION_2_TEST_ONLY";
-        case Edition.EDITION_99997_TEST_ONLY:
-            return "EDITION_99997_TEST_ONLY";
-        case Edition.EDITION_99998_TEST_ONLY:
-            return "EDITION_99998_TEST_ONLY";
-        case Edition.EDITION_99999_TEST_ONLY:
-            return "EDITION_99999_TEST_ONLY";
-        case Edition.EDITION_MAX:
-            return "EDITION_MAX";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum Edition");
-    }
-}
-/** The verification state of the extension range. */
-var ExtensionRangeOptions_VerificationState;
-(function (ExtensionRangeOptions_VerificationState) {
-    /** DECLARATION - All the extensions of the range must be declared. */
-    ExtensionRangeOptions_VerificationState[ExtensionRangeOptions_VerificationState["DECLARATION"] = 0] = "DECLARATION";
-    ExtensionRangeOptions_VerificationState[ExtensionRangeOptions_VerificationState["UNVERIFIED"] = 1] = "UNVERIFIED";
-})(ExtensionRangeOptions_VerificationState || (exports.ExtensionRangeOptions_VerificationState = ExtensionRangeOptions_VerificationState = {}));
-function extensionRangeOptions_VerificationStateFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "DECLARATION":
-            return ExtensionRangeOptions_VerificationState.DECLARATION;
-        case 1:
-        case "UNVERIFIED":
-            return ExtensionRangeOptions_VerificationState.UNVERIFIED;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum ExtensionRangeOptions_VerificationState");
-    }
-}
-function extensionRangeOptions_VerificationStateToJSON(object) {
-    switch (object) {
-        case ExtensionRangeOptions_VerificationState.DECLARATION:
-            return "DECLARATION";
-        case ExtensionRangeOptions_VerificationState.UNVERIFIED:
-            return "UNVERIFIED";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum ExtensionRangeOptions_VerificationState");
-    }
-}
-var FieldDescriptorProto_Type;
-(function (FieldDescriptorProto_Type) {
-    /**
-     * TYPE_DOUBLE - 0 is reserved for errors.
-     * Order is weird for historical reasons.
-     */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_DOUBLE"] = 1] = "TYPE_DOUBLE";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FLOAT"] = 2] = "TYPE_FLOAT";
-    /**
-     * TYPE_INT64 - Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT64 if
-     * negative values are likely.
-     */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT64"] = 3] = "TYPE_INT64";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT64"] = 4] = "TYPE_UINT64";
-    /**
-     * TYPE_INT32 - Not ZigZag encoded.  Negative numbers take 10 bytes.  Use TYPE_SINT32 if
-     * negative values are likely.
-     */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_INT32"] = 5] = "TYPE_INT32";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED64"] = 6] = "TYPE_FIXED64";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_FIXED32"] = 7] = "TYPE_FIXED32";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BOOL"] = 8] = "TYPE_BOOL";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_STRING"] = 9] = "TYPE_STRING";
-    /**
-     * TYPE_GROUP - Tag-delimited aggregate.
-     * Group type is deprecated and not supported after google.protobuf. However, Proto3
-     * implementations should still be able to parse the group wire format and
-     * treat group fields as unknown fields.  In Editions, the group wire format
-     * can be enabled via the `message_encoding` feature.
-     */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_GROUP"] = 10] = "TYPE_GROUP";
-    /** TYPE_MESSAGE - Length-delimited aggregate. */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_MESSAGE"] = 11] = "TYPE_MESSAGE";
-    /** TYPE_BYTES - New in version 2. */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_BYTES"] = 12] = "TYPE_BYTES";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_UINT32"] = 13] = "TYPE_UINT32";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_ENUM"] = 14] = "TYPE_ENUM";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED32"] = 15] = "TYPE_SFIXED32";
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SFIXED64"] = 16] = "TYPE_SFIXED64";
-    /** TYPE_SINT32 - Uses ZigZag encoding. */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT32"] = 17] = "TYPE_SINT32";
-    /** TYPE_SINT64 - Uses ZigZag encoding. */
-    FieldDescriptorProto_Type[FieldDescriptorProto_Type["TYPE_SINT64"] = 18] = "TYPE_SINT64";
-})(FieldDescriptorProto_Type || (exports.FieldDescriptorProto_Type = FieldDescriptorProto_Type = {}));
-function fieldDescriptorProto_TypeFromJSON(object) {
-    switch (object) {
-        case 1:
-        case "TYPE_DOUBLE":
-            return FieldDescriptorProto_Type.TYPE_DOUBLE;
-        case 2:
-        case "TYPE_FLOAT":
-            return FieldDescriptorProto_Type.TYPE_FLOAT;
-        case 3:
-        case "TYPE_INT64":
-            return FieldDescriptorProto_Type.TYPE_INT64;
-        case 4:
-        case "TYPE_UINT64":
-            return FieldDescriptorProto_Type.TYPE_UINT64;
-        case 5:
-        case "TYPE_INT32":
-            return FieldDescriptorProto_Type.TYPE_INT32;
-        case 6:
-        case "TYPE_FIXED64":
-            return FieldDescriptorProto_Type.TYPE_FIXED64;
-        case 7:
-        case "TYPE_FIXED32":
-            return FieldDescriptorProto_Type.TYPE_FIXED32;
-        case 8:
-        case "TYPE_BOOL":
-            return FieldDescriptorProto_Type.TYPE_BOOL;
-        case 9:
-        case "TYPE_STRING":
-            return FieldDescriptorProto_Type.TYPE_STRING;
-        case 10:
-        case "TYPE_GROUP":
-            return FieldDescriptorProto_Type.TYPE_GROUP;
-        case 11:
-        case "TYPE_MESSAGE":
-            return FieldDescriptorProto_Type.TYPE_MESSAGE;
-        case 12:
-        case "TYPE_BYTES":
-            return FieldDescriptorProto_Type.TYPE_BYTES;
-        case 13:
-        case "TYPE_UINT32":
-            return FieldDescriptorProto_Type.TYPE_UINT32;
-        case 14:
-        case "TYPE_ENUM":
-            return FieldDescriptorProto_Type.TYPE_ENUM;
-        case 15:
-        case "TYPE_SFIXED32":
-            return FieldDescriptorProto_Type.TYPE_SFIXED32;
-        case 16:
-        case "TYPE_SFIXED64":
-            return FieldDescriptorProto_Type.TYPE_SFIXED64;
-        case 17:
-        case "TYPE_SINT32":
-            return FieldDescriptorProto_Type.TYPE_SINT32;
-        case 18:
-        case "TYPE_SINT64":
-            return FieldDescriptorProto_Type.TYPE_SINT64;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
-    }
-}
-function fieldDescriptorProto_TypeToJSON(object) {
-    switch (object) {
-        case FieldDescriptorProto_Type.TYPE_DOUBLE:
-            return "TYPE_DOUBLE";
-        case FieldDescriptorProto_Type.TYPE_FLOAT:
-            return "TYPE_FLOAT";
-        case FieldDescriptorProto_Type.TYPE_INT64:
-            return "TYPE_INT64";
-        case FieldDescriptorProto_Type.TYPE_UINT64:
-            return "TYPE_UINT64";
-        case FieldDescriptorProto_Type.TYPE_INT32:
-            return "TYPE_INT32";
-        case FieldDescriptorProto_Type.TYPE_FIXED64:
-            return "TYPE_FIXED64";
-        case FieldDescriptorProto_Type.TYPE_FIXED32:
-            return "TYPE_FIXED32";
-        case FieldDescriptorProto_Type.TYPE_BOOL:
-            return "TYPE_BOOL";
-        case FieldDescriptorProto_Type.TYPE_STRING:
-            return "TYPE_STRING";
-        case FieldDescriptorProto_Type.TYPE_GROUP:
-            return "TYPE_GROUP";
-        case FieldDescriptorProto_Type.TYPE_MESSAGE:
-            return "TYPE_MESSAGE";
-        case FieldDescriptorProto_Type.TYPE_BYTES:
-            return "TYPE_BYTES";
-        case FieldDescriptorProto_Type.TYPE_UINT32:
-            return "TYPE_UINT32";
-        case FieldDescriptorProto_Type.TYPE_ENUM:
-            return "TYPE_ENUM";
-        case FieldDescriptorProto_Type.TYPE_SFIXED32:
-            return "TYPE_SFIXED32";
-        case FieldDescriptorProto_Type.TYPE_SFIXED64:
-            return "TYPE_SFIXED64";
-        case FieldDescriptorProto_Type.TYPE_SINT32:
-            return "TYPE_SINT32";
-        case FieldDescriptorProto_Type.TYPE_SINT64:
-            return "TYPE_SINT64";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type");
-    }
-}
-var FieldDescriptorProto_Label;
-(function (FieldDescriptorProto_Label) {
-    /** LABEL_OPTIONAL - 0 is reserved for errors */
-    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_OPTIONAL"] = 1] = "LABEL_OPTIONAL";
-    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REPEATED"] = 3] = "LABEL_REPEATED";
-    /**
-     * LABEL_REQUIRED - The required label is only allowed in google.protobuf.  In proto3 and Editions
-     * it's explicitly prohibited.  In Editions, the `field_presence` feature
-     * can be used to get this behavior.
-     */
-    FieldDescriptorProto_Label[FieldDescriptorProto_Label["LABEL_REQUIRED"] = 2] = "LABEL_REQUIRED";
-})(FieldDescriptorProto_Label || (exports.FieldDescriptorProto_Label = FieldDescriptorProto_Label = {}));
-function fieldDescriptorProto_LabelFromJSON(object) {
-    switch (object) {
-        case 1:
-        case "LABEL_OPTIONAL":
-            return FieldDescriptorProto_Label.LABEL_OPTIONAL;
-        case 3:
-        case "LABEL_REPEATED":
-            return FieldDescriptorProto_Label.LABEL_REPEATED;
-        case 2:
-        case "LABEL_REQUIRED":
-            return FieldDescriptorProto_Label.LABEL_REQUIRED;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
-    }
-}
-function fieldDescriptorProto_LabelToJSON(object) {
-    switch (object) {
-        case FieldDescriptorProto_Label.LABEL_OPTIONAL:
-            return "LABEL_OPTIONAL";
-        case FieldDescriptorProto_Label.LABEL_REPEATED:
-            return "LABEL_REPEATED";
-        case FieldDescriptorProto_Label.LABEL_REQUIRED:
-            return "LABEL_REQUIRED";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label");
-    }
-}
-/** Generated classes can be optimized for speed or code size. */
-var FileOptions_OptimizeMode;
-(function (FileOptions_OptimizeMode) {
-    /** SPEED - Generate complete code for parsing, serialization, */
-    FileOptions_OptimizeMode[FileOptions_OptimizeMode["SPEED"] = 1] = "SPEED";
-    /** CODE_SIZE - etc. */
-    FileOptions_OptimizeMode[FileOptions_OptimizeMode["CODE_SIZE"] = 2] = "CODE_SIZE";
-    /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */
-    FileOptions_OptimizeMode[FileOptions_OptimizeMode["LITE_RUNTIME"] = 3] = "LITE_RUNTIME";
-})(FileOptions_OptimizeMode || (exports.FileOptions_OptimizeMode = FileOptions_OptimizeMode = {}));
-function fileOptions_OptimizeModeFromJSON(object) {
-    switch (object) {
-        case 1:
-        case "SPEED":
-            return FileOptions_OptimizeMode.SPEED;
-        case 2:
-        case "CODE_SIZE":
-            return FileOptions_OptimizeMode.CODE_SIZE;
-        case 3:
-        case "LITE_RUNTIME":
-            return FileOptions_OptimizeMode.LITE_RUNTIME;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
-    }
-}
-function fileOptions_OptimizeModeToJSON(object) {
-    switch (object) {
-        case FileOptions_OptimizeMode.SPEED:
-            return "SPEED";
-        case FileOptions_OptimizeMode.CODE_SIZE:
-            return "CODE_SIZE";
-        case FileOptions_OptimizeMode.LITE_RUNTIME:
-            return "LITE_RUNTIME";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode");
-    }
-}
-var FieldOptions_CType;
-(function (FieldOptions_CType) {
-    /** STRING - Default mode. */
-    FieldOptions_CType[FieldOptions_CType["STRING"] = 0] = "STRING";
-    /**
-     * CORD - The option [ctype=CORD] may be applied to a non-repeated field of type
-     * "bytes". It indicates that in C++, the data should be stored in a Cord
-     * instead of a string.  For very large strings, this may reduce memory
-     * fragmentation. It may also allow better performance when parsing from a
-     * Cord, or when parsing with aliasing enabled, as the parsed Cord may then
-     * alias the original buffer.
-     */
-    FieldOptions_CType[FieldOptions_CType["CORD"] = 1] = "CORD";
-    FieldOptions_CType[FieldOptions_CType["STRING_PIECE"] = 2] = "STRING_PIECE";
-})(FieldOptions_CType || (exports.FieldOptions_CType = FieldOptions_CType = {}));
-function fieldOptions_CTypeFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "STRING":
-            return FieldOptions_CType.STRING;
-        case 1:
-        case "CORD":
-            return FieldOptions_CType.CORD;
-        case 2:
-        case "STRING_PIECE":
-            return FieldOptions_CType.STRING_PIECE;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
-    }
-}
-function fieldOptions_CTypeToJSON(object) {
-    switch (object) {
-        case FieldOptions_CType.STRING:
-            return "STRING";
-        case FieldOptions_CType.CORD:
-            return "CORD";
-        case FieldOptions_CType.STRING_PIECE:
-            return "STRING_PIECE";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType");
-    }
-}
-var FieldOptions_JSType;
-(function (FieldOptions_JSType) {
-    /** JS_NORMAL - Use the default type. */
-    FieldOptions_JSType[FieldOptions_JSType["JS_NORMAL"] = 0] = "JS_NORMAL";
-    /** JS_STRING - Use JavaScript strings. */
-    FieldOptions_JSType[FieldOptions_JSType["JS_STRING"] = 1] = "JS_STRING";
-    /** JS_NUMBER - Use JavaScript numbers. */
-    FieldOptions_JSType[FieldOptions_JSType["JS_NUMBER"] = 2] = "JS_NUMBER";
-})(FieldOptions_JSType || (exports.FieldOptions_JSType = FieldOptions_JSType = {}));
-function fieldOptions_JSTypeFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "JS_NORMAL":
-            return FieldOptions_JSType.JS_NORMAL;
-        case 1:
-        case "JS_STRING":
-            return FieldOptions_JSType.JS_STRING;
-        case 2:
-        case "JS_NUMBER":
-            return FieldOptions_JSType.JS_NUMBER;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
-    }
-}
-function fieldOptions_JSTypeToJSON(object) {
-    switch (object) {
-        case FieldOptions_JSType.JS_NORMAL:
-            return "JS_NORMAL";
-        case FieldOptions_JSType.JS_STRING:
-            return "JS_STRING";
-        case FieldOptions_JSType.JS_NUMBER:
-            return "JS_NUMBER";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType");
-    }
-}
-/** If set to RETENTION_SOURCE, the option will be omitted from the binary. */
-var FieldOptions_OptionRetention;
-(function (FieldOptions_OptionRetention) {
-    FieldOptions_OptionRetention[FieldOptions_OptionRetention["RETENTION_UNKNOWN"] = 0] = "RETENTION_UNKNOWN";
-    FieldOptions_OptionRetention[FieldOptions_OptionRetention["RETENTION_RUNTIME"] = 1] = "RETENTION_RUNTIME";
-    FieldOptions_OptionRetention[FieldOptions_OptionRetention["RETENTION_SOURCE"] = 2] = "RETENTION_SOURCE";
-})(FieldOptions_OptionRetention || (exports.FieldOptions_OptionRetention = FieldOptions_OptionRetention = {}));
-function fieldOptions_OptionRetentionFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "RETENTION_UNKNOWN":
-            return FieldOptions_OptionRetention.RETENTION_UNKNOWN;
-        case 1:
-        case "RETENTION_RUNTIME":
-            return FieldOptions_OptionRetention.RETENTION_RUNTIME;
-        case 2:
-        case "RETENTION_SOURCE":
-            return FieldOptions_OptionRetention.RETENTION_SOURCE;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionRetention");
-    }
-}
-function fieldOptions_OptionRetentionToJSON(object) {
-    switch (object) {
-        case FieldOptions_OptionRetention.RETENTION_UNKNOWN:
-            return "RETENTION_UNKNOWN";
-        case FieldOptions_OptionRetention.RETENTION_RUNTIME:
-            return "RETENTION_RUNTIME";
-        case FieldOptions_OptionRetention.RETENTION_SOURCE:
-            return "RETENTION_SOURCE";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionRetention");
-    }
-}
-/**
- * This indicates the types of entities that the field may apply to when used
- * as an option. If it is unset, then the field may be freely used as an
- * option on any kind of entity.
- */
-var FieldOptions_OptionTargetType;
-(function (FieldOptions_OptionTargetType) {
-    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_UNKNOWN"] = 0] = "TARGET_TYPE_UNKNOWN";
-    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_FILE"] = 1] = "TARGET_TYPE_FILE";
-    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_EXTENSION_RANGE"] = 2] = "TARGET_TYPE_EXTENSION_RANGE";
-    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_MESSAGE"] = 3] = "TARGET_TYPE_MESSAGE";
-    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_FIELD"] = 4] = "TARGET_TYPE_FIELD";
-    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_ONEOF"] = 5] = "TARGET_TYPE_ONEOF";
-    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_ENUM"] = 6] = "TARGET_TYPE_ENUM";
-    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_ENUM_ENTRY"] = 7] = "TARGET_TYPE_ENUM_ENTRY";
-    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_SERVICE"] = 8] = "TARGET_TYPE_SERVICE";
-    FieldOptions_OptionTargetType[FieldOptions_OptionTargetType["TARGET_TYPE_METHOD"] = 9] = "TARGET_TYPE_METHOD";
-})(FieldOptions_OptionTargetType || (exports.FieldOptions_OptionTargetType = FieldOptions_OptionTargetType = {}));
-function fieldOptions_OptionTargetTypeFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "TARGET_TYPE_UNKNOWN":
-            return FieldOptions_OptionTargetType.TARGET_TYPE_UNKNOWN;
-        case 1:
-        case "TARGET_TYPE_FILE":
-            return FieldOptions_OptionTargetType.TARGET_TYPE_FILE;
-        case 2:
-        case "TARGET_TYPE_EXTENSION_RANGE":
-            return FieldOptions_OptionTargetType.TARGET_TYPE_EXTENSION_RANGE;
-        case 3:
-        case "TARGET_TYPE_MESSAGE":
-            return FieldOptions_OptionTargetType.TARGET_TYPE_MESSAGE;
-        case 4:
-        case "TARGET_TYPE_FIELD":
-            return FieldOptions_OptionTargetType.TARGET_TYPE_FIELD;
-        case 5:
-        case "TARGET_TYPE_ONEOF":
-            return FieldOptions_OptionTargetType.TARGET_TYPE_ONEOF;
-        case 6:
-        case "TARGET_TYPE_ENUM":
-            return FieldOptions_OptionTargetType.TARGET_TYPE_ENUM;
-        case 7:
-        case "TARGET_TYPE_ENUM_ENTRY":
-            return FieldOptions_OptionTargetType.TARGET_TYPE_ENUM_ENTRY;
-        case 8:
-        case "TARGET_TYPE_SERVICE":
-            return FieldOptions_OptionTargetType.TARGET_TYPE_SERVICE;
-        case 9:
-        case "TARGET_TYPE_METHOD":
-            return FieldOptions_OptionTargetType.TARGET_TYPE_METHOD;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionTargetType");
-    }
-}
-function fieldOptions_OptionTargetTypeToJSON(object) {
-    switch (object) {
-        case FieldOptions_OptionTargetType.TARGET_TYPE_UNKNOWN:
-            return "TARGET_TYPE_UNKNOWN";
-        case FieldOptions_OptionTargetType.TARGET_TYPE_FILE:
-            return "TARGET_TYPE_FILE";
-        case FieldOptions_OptionTargetType.TARGET_TYPE_EXTENSION_RANGE:
-            return "TARGET_TYPE_EXTENSION_RANGE";
-        case FieldOptions_OptionTargetType.TARGET_TYPE_MESSAGE:
-            return "TARGET_TYPE_MESSAGE";
-        case FieldOptions_OptionTargetType.TARGET_TYPE_FIELD:
-            return "TARGET_TYPE_FIELD";
-        case FieldOptions_OptionTargetType.TARGET_TYPE_ONEOF:
-            return "TARGET_TYPE_ONEOF";
-        case FieldOptions_OptionTargetType.TARGET_TYPE_ENUM:
-            return "TARGET_TYPE_ENUM";
-        case FieldOptions_OptionTargetType.TARGET_TYPE_ENUM_ENTRY:
-            return "TARGET_TYPE_ENUM_ENTRY";
-        case FieldOptions_OptionTargetType.TARGET_TYPE_SERVICE:
-            return "TARGET_TYPE_SERVICE";
-        case FieldOptions_OptionTargetType.TARGET_TYPE_METHOD:
-            return "TARGET_TYPE_METHOD";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionTargetType");
-    }
-}
-/**
- * Is this method side-effect-free (or safe in HTTP parlance), or idempotent,
- * or neither? HTTP based RPC implementation may choose GET verb for safe
- * methods, and PUT verb for idempotent methods instead of the default POST.
- */
-var MethodOptions_IdempotencyLevel;
-(function (MethodOptions_IdempotencyLevel) {
-    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENCY_UNKNOWN"] = 0] = "IDEMPOTENCY_UNKNOWN";
-    /** NO_SIDE_EFFECTS - implies idempotent */
-    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["NO_SIDE_EFFECTS"] = 1] = "NO_SIDE_EFFECTS";
-    /** IDEMPOTENT - idempotent, but may have side effects */
-    MethodOptions_IdempotencyLevel[MethodOptions_IdempotencyLevel["IDEMPOTENT"] = 2] = "IDEMPOTENT";
-})(MethodOptions_IdempotencyLevel || (exports.MethodOptions_IdempotencyLevel = MethodOptions_IdempotencyLevel = {}));
-function methodOptions_IdempotencyLevelFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "IDEMPOTENCY_UNKNOWN":
-            return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN;
-        case 1:
-        case "NO_SIDE_EFFECTS":
-            return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS;
-        case 2:
-        case "IDEMPOTENT":
-            return MethodOptions_IdempotencyLevel.IDEMPOTENT;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
-    }
-}
-function methodOptions_IdempotencyLevelToJSON(object) {
-    switch (object) {
-        case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN:
-            return "IDEMPOTENCY_UNKNOWN";
-        case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS:
-            return "NO_SIDE_EFFECTS";
-        case MethodOptions_IdempotencyLevel.IDEMPOTENT:
-            return "IDEMPOTENT";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel");
-    }
-}
-var FeatureSet_FieldPresence;
-(function (FeatureSet_FieldPresence) {
-    FeatureSet_FieldPresence[FeatureSet_FieldPresence["FIELD_PRESENCE_UNKNOWN"] = 0] = "FIELD_PRESENCE_UNKNOWN";
-    FeatureSet_FieldPresence[FeatureSet_FieldPresence["EXPLICIT"] = 1] = "EXPLICIT";
-    FeatureSet_FieldPresence[FeatureSet_FieldPresence["IMPLICIT"] = 2] = "IMPLICIT";
-    FeatureSet_FieldPresence[FeatureSet_FieldPresence["LEGACY_REQUIRED"] = 3] = "LEGACY_REQUIRED";
-})(FeatureSet_FieldPresence || (exports.FeatureSet_FieldPresence = FeatureSet_FieldPresence = {}));
-function featureSet_FieldPresenceFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "FIELD_PRESENCE_UNKNOWN":
-            return FeatureSet_FieldPresence.FIELD_PRESENCE_UNKNOWN;
-        case 1:
-        case "EXPLICIT":
-            return FeatureSet_FieldPresence.EXPLICIT;
-        case 2:
-        case "IMPLICIT":
-            return FeatureSet_FieldPresence.IMPLICIT;
-        case 3:
-        case "LEGACY_REQUIRED":
-            return FeatureSet_FieldPresence.LEGACY_REQUIRED;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_FieldPresence");
-    }
-}
-function featureSet_FieldPresenceToJSON(object) {
-    switch (object) {
-        case FeatureSet_FieldPresence.FIELD_PRESENCE_UNKNOWN:
-            return "FIELD_PRESENCE_UNKNOWN";
-        case FeatureSet_FieldPresence.EXPLICIT:
-            return "EXPLICIT";
-        case FeatureSet_FieldPresence.IMPLICIT:
-            return "IMPLICIT";
-        case FeatureSet_FieldPresence.LEGACY_REQUIRED:
-            return "LEGACY_REQUIRED";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_FieldPresence");
-    }
-}
-var FeatureSet_EnumType;
-(function (FeatureSet_EnumType) {
-    FeatureSet_EnumType[FeatureSet_EnumType["ENUM_TYPE_UNKNOWN"] = 0] = "ENUM_TYPE_UNKNOWN";
-    FeatureSet_EnumType[FeatureSet_EnumType["OPEN"] = 1] = "OPEN";
-    FeatureSet_EnumType[FeatureSet_EnumType["CLOSED"] = 2] = "CLOSED";
-})(FeatureSet_EnumType || (exports.FeatureSet_EnumType = FeatureSet_EnumType = {}));
-function featureSet_EnumTypeFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "ENUM_TYPE_UNKNOWN":
-            return FeatureSet_EnumType.ENUM_TYPE_UNKNOWN;
-        case 1:
-        case "OPEN":
-            return FeatureSet_EnumType.OPEN;
-        case 2:
-        case "CLOSED":
-            return FeatureSet_EnumType.CLOSED;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnumType");
-    }
-}
-function featureSet_EnumTypeToJSON(object) {
-    switch (object) {
-        case FeatureSet_EnumType.ENUM_TYPE_UNKNOWN:
-            return "ENUM_TYPE_UNKNOWN";
-        case FeatureSet_EnumType.OPEN:
-            return "OPEN";
-        case FeatureSet_EnumType.CLOSED:
-            return "CLOSED";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnumType");
-    }
-}
-var FeatureSet_RepeatedFieldEncoding;
-(function (FeatureSet_RepeatedFieldEncoding) {
-    FeatureSet_RepeatedFieldEncoding[FeatureSet_RepeatedFieldEncoding["REPEATED_FIELD_ENCODING_UNKNOWN"] = 0] = "REPEATED_FIELD_ENCODING_UNKNOWN";
-    FeatureSet_RepeatedFieldEncoding[FeatureSet_RepeatedFieldEncoding["PACKED"] = 1] = "PACKED";
-    FeatureSet_RepeatedFieldEncoding[FeatureSet_RepeatedFieldEncoding["EXPANDED"] = 2] = "EXPANDED";
-})(FeatureSet_RepeatedFieldEncoding || (exports.FeatureSet_RepeatedFieldEncoding = FeatureSet_RepeatedFieldEncoding = {}));
-function featureSet_RepeatedFieldEncodingFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "REPEATED_FIELD_ENCODING_UNKNOWN":
-            return FeatureSet_RepeatedFieldEncoding.REPEATED_FIELD_ENCODING_UNKNOWN;
-        case 1:
-        case "PACKED":
-            return FeatureSet_RepeatedFieldEncoding.PACKED;
-        case 2:
-        case "EXPANDED":
-            return FeatureSet_RepeatedFieldEncoding.EXPANDED;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_RepeatedFieldEncoding");
-    }
-}
-function featureSet_RepeatedFieldEncodingToJSON(object) {
-    switch (object) {
-        case FeatureSet_RepeatedFieldEncoding.REPEATED_FIELD_ENCODING_UNKNOWN:
-            return "REPEATED_FIELD_ENCODING_UNKNOWN";
-        case FeatureSet_RepeatedFieldEncoding.PACKED:
-            return "PACKED";
-        case FeatureSet_RepeatedFieldEncoding.EXPANDED:
-            return "EXPANDED";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_RepeatedFieldEncoding");
-    }
-}
-var FeatureSet_Utf8Validation;
-(function (FeatureSet_Utf8Validation) {
-    FeatureSet_Utf8Validation[FeatureSet_Utf8Validation["UTF8_VALIDATION_UNKNOWN"] = 0] = "UTF8_VALIDATION_UNKNOWN";
-    FeatureSet_Utf8Validation[FeatureSet_Utf8Validation["VERIFY"] = 2] = "VERIFY";
-    FeatureSet_Utf8Validation[FeatureSet_Utf8Validation["NONE"] = 3] = "NONE";
-})(FeatureSet_Utf8Validation || (exports.FeatureSet_Utf8Validation = FeatureSet_Utf8Validation = {}));
-function featureSet_Utf8ValidationFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "UTF8_VALIDATION_UNKNOWN":
-            return FeatureSet_Utf8Validation.UTF8_VALIDATION_UNKNOWN;
-        case 2:
-        case "VERIFY":
-            return FeatureSet_Utf8Validation.VERIFY;
-        case 3:
-        case "NONE":
-            return FeatureSet_Utf8Validation.NONE;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_Utf8Validation");
-    }
-}
-function featureSet_Utf8ValidationToJSON(object) {
-    switch (object) {
-        case FeatureSet_Utf8Validation.UTF8_VALIDATION_UNKNOWN:
-            return "UTF8_VALIDATION_UNKNOWN";
-        case FeatureSet_Utf8Validation.VERIFY:
-            return "VERIFY";
-        case FeatureSet_Utf8Validation.NONE:
-            return "NONE";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_Utf8Validation");
-    }
-}
-var FeatureSet_MessageEncoding;
-(function (FeatureSet_MessageEncoding) {
-    FeatureSet_MessageEncoding[FeatureSet_MessageEncoding["MESSAGE_ENCODING_UNKNOWN"] = 0] = "MESSAGE_ENCODING_UNKNOWN";
-    FeatureSet_MessageEncoding[FeatureSet_MessageEncoding["LENGTH_PREFIXED"] = 1] = "LENGTH_PREFIXED";
-    FeatureSet_MessageEncoding[FeatureSet_MessageEncoding["DELIMITED"] = 2] = "DELIMITED";
-})(FeatureSet_MessageEncoding || (exports.FeatureSet_MessageEncoding = FeatureSet_MessageEncoding = {}));
-function featureSet_MessageEncodingFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "MESSAGE_ENCODING_UNKNOWN":
-            return FeatureSet_MessageEncoding.MESSAGE_ENCODING_UNKNOWN;
-        case 1:
-        case "LENGTH_PREFIXED":
-            return FeatureSet_MessageEncoding.LENGTH_PREFIXED;
-        case 2:
-        case "DELIMITED":
-            return FeatureSet_MessageEncoding.DELIMITED;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_MessageEncoding");
-    }
-}
-function featureSet_MessageEncodingToJSON(object) {
-    switch (object) {
-        case FeatureSet_MessageEncoding.MESSAGE_ENCODING_UNKNOWN:
-            return "MESSAGE_ENCODING_UNKNOWN";
-        case FeatureSet_MessageEncoding.LENGTH_PREFIXED:
-            return "LENGTH_PREFIXED";
-        case FeatureSet_MessageEncoding.DELIMITED:
-            return "DELIMITED";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_MessageEncoding");
-    }
-}
-var FeatureSet_JsonFormat;
-(function (FeatureSet_JsonFormat) {
-    FeatureSet_JsonFormat[FeatureSet_JsonFormat["JSON_FORMAT_UNKNOWN"] = 0] = "JSON_FORMAT_UNKNOWN";
-    FeatureSet_JsonFormat[FeatureSet_JsonFormat["ALLOW"] = 1] = "ALLOW";
-    FeatureSet_JsonFormat[FeatureSet_JsonFormat["LEGACY_BEST_EFFORT"] = 2] = "LEGACY_BEST_EFFORT";
-})(FeatureSet_JsonFormat || (exports.FeatureSet_JsonFormat = FeatureSet_JsonFormat = {}));
-function featureSet_JsonFormatFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "JSON_FORMAT_UNKNOWN":
-            return FeatureSet_JsonFormat.JSON_FORMAT_UNKNOWN;
-        case 1:
-        case "ALLOW":
-            return FeatureSet_JsonFormat.ALLOW;
-        case 2:
-        case "LEGACY_BEST_EFFORT":
-            return FeatureSet_JsonFormat.LEGACY_BEST_EFFORT;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_JsonFormat");
-    }
-}
-function featureSet_JsonFormatToJSON(object) {
-    switch (object) {
-        case FeatureSet_JsonFormat.JSON_FORMAT_UNKNOWN:
-            return "JSON_FORMAT_UNKNOWN";
-        case FeatureSet_JsonFormat.ALLOW:
-            return "ALLOW";
-        case FeatureSet_JsonFormat.LEGACY_BEST_EFFORT:
-            return "LEGACY_BEST_EFFORT";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_JsonFormat");
-    }
-}
-var FeatureSet_EnforceNamingStyle;
-(function (FeatureSet_EnforceNamingStyle) {
-    FeatureSet_EnforceNamingStyle[FeatureSet_EnforceNamingStyle["ENFORCE_NAMING_STYLE_UNKNOWN"] = 0] = "ENFORCE_NAMING_STYLE_UNKNOWN";
-    FeatureSet_EnforceNamingStyle[FeatureSet_EnforceNamingStyle["STYLE2024"] = 1] = "STYLE2024";
-    FeatureSet_EnforceNamingStyle[FeatureSet_EnforceNamingStyle["STYLE_LEGACY"] = 2] = "STYLE_LEGACY";
-})(FeatureSet_EnforceNamingStyle || (exports.FeatureSet_EnforceNamingStyle = FeatureSet_EnforceNamingStyle = {}));
-function featureSet_EnforceNamingStyleFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "ENFORCE_NAMING_STYLE_UNKNOWN":
-            return FeatureSet_EnforceNamingStyle.ENFORCE_NAMING_STYLE_UNKNOWN;
-        case 1:
-        case "STYLE2024":
-            return FeatureSet_EnforceNamingStyle.STYLE2024;
-        case 2:
-        case "STYLE_LEGACY":
-            return FeatureSet_EnforceNamingStyle.STYLE_LEGACY;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnforceNamingStyle");
-    }
-}
-function featureSet_EnforceNamingStyleToJSON(object) {
-    switch (object) {
-        case FeatureSet_EnforceNamingStyle.ENFORCE_NAMING_STYLE_UNKNOWN:
-            return "ENFORCE_NAMING_STYLE_UNKNOWN";
-        case FeatureSet_EnforceNamingStyle.STYLE2024:
-            return "STYLE2024";
-        case FeatureSet_EnforceNamingStyle.STYLE_LEGACY:
-            return "STYLE_LEGACY";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnforceNamingStyle");
-    }
-}
-/**
- * Represents the identified object's effect on the element in the original
- * .proto file.
- */
-var GeneratedCodeInfo_Annotation_Semantic;
-(function (GeneratedCodeInfo_Annotation_Semantic) {
-    /** NONE - There is no effect or the effect is indescribable. */
-    GeneratedCodeInfo_Annotation_Semantic[GeneratedCodeInfo_Annotation_Semantic["NONE"] = 0] = "NONE";
-    /** SET - The element is set or otherwise mutated. */
-    GeneratedCodeInfo_Annotation_Semantic[GeneratedCodeInfo_Annotation_Semantic["SET"] = 1] = "SET";
-    /** ALIAS - An alias to the element is returned. */
-    GeneratedCodeInfo_Annotation_Semantic[GeneratedCodeInfo_Annotation_Semantic["ALIAS"] = 2] = "ALIAS";
-})(GeneratedCodeInfo_Annotation_Semantic || (exports.GeneratedCodeInfo_Annotation_Semantic = GeneratedCodeInfo_Annotation_Semantic = {}));
-function generatedCodeInfo_Annotation_SemanticFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "NONE":
-            return GeneratedCodeInfo_Annotation_Semantic.NONE;
-        case 1:
-        case "SET":
-            return GeneratedCodeInfo_Annotation_Semantic.SET;
-        case 2:
-        case "ALIAS":
-            return GeneratedCodeInfo_Annotation_Semantic.ALIAS;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum GeneratedCodeInfo_Annotation_Semantic");
-    }
-}
-function generatedCodeInfo_Annotation_SemanticToJSON(object) {
-    switch (object) {
-        case GeneratedCodeInfo_Annotation_Semantic.NONE:
-            return "NONE";
-        case GeneratedCodeInfo_Annotation_Semantic.SET:
-            return "SET";
-        case GeneratedCodeInfo_Annotation_Semantic.ALIAS:
-            return "ALIAS";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum GeneratedCodeInfo_Annotation_Semantic");
-    }
-}
-exports.FileDescriptorSet = {
-    fromJSON(object) {
-        return {
-            file: globalThis.Array.isArray(object?.file) ? object.file.map((e) => exports.FileDescriptorProto.fromJSON(e)) : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.file?.length) {
-            obj.file = message.file.map((e) => exports.FileDescriptorProto.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.FileDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? globalThis.String(object.name) : "",
-            package: isSet(object.package) ? globalThis.String(object.package) : "",
-            dependency: globalThis.Array.isArray(object?.dependency)
-                ? object.dependency.map((e) => globalThis.String(e))
-                : [],
-            publicDependency: globalThis.Array.isArray(object?.publicDependency)
-                ? object.publicDependency.map((e) => globalThis.Number(e))
-                : [],
-            weakDependency: globalThis.Array.isArray(object?.weakDependency)
-                ? object.weakDependency.map((e) => globalThis.Number(e))
-                : [],
-            messageType: globalThis.Array.isArray(object?.messageType)
-                ? object.messageType.map((e) => exports.DescriptorProto.fromJSON(e))
-                : [],
-            enumType: globalThis.Array.isArray(object?.enumType)
-                ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e))
-                : [],
-            service: globalThis.Array.isArray(object?.service)
-                ? object.service.map((e) => exports.ServiceDescriptorProto.fromJSON(e))
-                : [],
-            extension: globalThis.Array.isArray(object?.extension)
-                ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e))
-                : [],
-            options: isSet(object.options) ? exports.FileOptions.fromJSON(object.options) : undefined,
-            sourceCodeInfo: isSet(object.sourceCodeInfo) ? exports.SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined,
-            syntax: isSet(object.syntax) ? globalThis.String(object.syntax) : "",
-            edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.name !== undefined && message.name !== "") {
-            obj.name = message.name;
-        }
-        if (message.package !== undefined && message.package !== "") {
-            obj.package = message.package;
-        }
-        if (message.dependency?.length) {
-            obj.dependency = message.dependency;
-        }
-        if (message.publicDependency?.length) {
-            obj.publicDependency = message.publicDependency.map((e) => Math.round(e));
-        }
-        if (message.weakDependency?.length) {
-            obj.weakDependency = message.weakDependency.map((e) => Math.round(e));
-        }
-        if (message.messageType?.length) {
-            obj.messageType = message.messageType.map((e) => exports.DescriptorProto.toJSON(e));
-        }
-        if (message.enumType?.length) {
-            obj.enumType = message.enumType.map((e) => exports.EnumDescriptorProto.toJSON(e));
-        }
-        if (message.service?.length) {
-            obj.service = message.service.map((e) => exports.ServiceDescriptorProto.toJSON(e));
-        }
-        if (message.extension?.length) {
-            obj.extension = message.extension.map((e) => exports.FieldDescriptorProto.toJSON(e));
-        }
-        if (message.options !== undefined) {
-            obj.options = exports.FileOptions.toJSON(message.options);
-        }
-        if (message.sourceCodeInfo !== undefined) {
-            obj.sourceCodeInfo = exports.SourceCodeInfo.toJSON(message.sourceCodeInfo);
-        }
-        if (message.syntax !== undefined && message.syntax !== "") {
-            obj.syntax = message.syntax;
-        }
-        if (message.edition !== undefined && message.edition !== 0) {
-            obj.edition = editionToJSON(message.edition);
-        }
-        return obj;
-    },
-};
-exports.DescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? globalThis.String(object.name) : "",
-            field: globalThis.Array.isArray(object?.field)
-                ? object.field.map((e) => exports.FieldDescriptorProto.fromJSON(e))
-                : [],
-            extension: globalThis.Array.isArray(object?.extension)
-                ? object.extension.map((e) => exports.FieldDescriptorProto.fromJSON(e))
-                : [],
-            nestedType: globalThis.Array.isArray(object?.nestedType)
-                ? object.nestedType.map((e) => exports.DescriptorProto.fromJSON(e))
-                : [],
-            enumType: globalThis.Array.isArray(object?.enumType)
-                ? object.enumType.map((e) => exports.EnumDescriptorProto.fromJSON(e))
-                : [],
-            extensionRange: globalThis.Array.isArray(object?.extensionRange)
-                ? object.extensionRange.map((e) => exports.DescriptorProto_ExtensionRange.fromJSON(e))
-                : [],
-            oneofDecl: globalThis.Array.isArray(object?.oneofDecl)
-                ? object.oneofDecl.map((e) => exports.OneofDescriptorProto.fromJSON(e))
-                : [],
-            options: isSet(object.options) ? exports.MessageOptions.fromJSON(object.options) : undefined,
-            reservedRange: globalThis.Array.isArray(object?.reservedRange)
-                ? object.reservedRange.map((e) => exports.DescriptorProto_ReservedRange.fromJSON(e))
-                : [],
-            reservedName: globalThis.Array.isArray(object?.reservedName)
-                ? object.reservedName.map((e) => globalThis.String(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.name !== undefined && message.name !== "") {
-            obj.name = message.name;
-        }
-        if (message.field?.length) {
-            obj.field = message.field.map((e) => exports.FieldDescriptorProto.toJSON(e));
-        }
-        if (message.extension?.length) {
-            obj.extension = message.extension.map((e) => exports.FieldDescriptorProto.toJSON(e));
-        }
-        if (message.nestedType?.length) {
-            obj.nestedType = message.nestedType.map((e) => exports.DescriptorProto.toJSON(e));
-        }
-        if (message.enumType?.length) {
-            obj.enumType = message.enumType.map((e) => exports.EnumDescriptorProto.toJSON(e));
-        }
-        if (message.extensionRange?.length) {
-            obj.extensionRange = message.extensionRange.map((e) => exports.DescriptorProto_ExtensionRange.toJSON(e));
-        }
-        if (message.oneofDecl?.length) {
-            obj.oneofDecl = message.oneofDecl.map((e) => exports.OneofDescriptorProto.toJSON(e));
-        }
-        if (message.options !== undefined) {
-            obj.options = exports.MessageOptions.toJSON(message.options);
-        }
-        if (message.reservedRange?.length) {
-            obj.reservedRange = message.reservedRange.map((e) => exports.DescriptorProto_ReservedRange.toJSON(e));
-        }
-        if (message.reservedName?.length) {
-            obj.reservedName = message.reservedName;
-        }
-        return obj;
-    },
-};
-exports.DescriptorProto_ExtensionRange = {
-    fromJSON(object) {
-        return {
-            start: isSet(object.start) ? globalThis.Number(object.start) : 0,
-            end: isSet(object.end) ? globalThis.Number(object.end) : 0,
-            options: isSet(object.options) ? exports.ExtensionRangeOptions.fromJSON(object.options) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.start !== undefined && message.start !== 0) {
-            obj.start = Math.round(message.start);
-        }
-        if (message.end !== undefined && message.end !== 0) {
-            obj.end = Math.round(message.end);
-        }
-        if (message.options !== undefined) {
-            obj.options = exports.ExtensionRangeOptions.toJSON(message.options);
-        }
-        return obj;
-    },
-};
-exports.DescriptorProto_ReservedRange = {
-    fromJSON(object) {
-        return {
-            start: isSet(object.start) ? globalThis.Number(object.start) : 0,
-            end: isSet(object.end) ? globalThis.Number(object.end) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.start !== undefined && message.start !== 0) {
-            obj.start = Math.round(message.start);
-        }
-        if (message.end !== undefined && message.end !== 0) {
-            obj.end = Math.round(message.end);
-        }
-        return obj;
-    },
-};
-exports.ExtensionRangeOptions = {
-    fromJSON(object) {
-        return {
-            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-            declaration: globalThis.Array.isArray(object?.declaration)
-                ? object.declaration.map((e) => exports.ExtensionRangeOptions_Declaration.fromJSON(e))
-                : [],
-            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
-            verification: isSet(object.verification)
-                ? extensionRangeOptions_VerificationStateFromJSON(object.verification)
-                : 1,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.uninterpretedOption?.length) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
-        }
-        if (message.declaration?.length) {
-            obj.declaration = message.declaration.map((e) => exports.ExtensionRangeOptions_Declaration.toJSON(e));
-        }
-        if (message.features !== undefined) {
-            obj.features = exports.FeatureSet.toJSON(message.features);
-        }
-        if (message.verification !== undefined && message.verification !== 1) {
-            obj.verification = extensionRangeOptions_VerificationStateToJSON(message.verification);
-        }
-        return obj;
-    },
-};
-exports.ExtensionRangeOptions_Declaration = {
-    fromJSON(object) {
-        return {
-            number: isSet(object.number) ? globalThis.Number(object.number) : 0,
-            fullName: isSet(object.fullName) ? globalThis.String(object.fullName) : "",
-            type: isSet(object.type) ? globalThis.String(object.type) : "",
-            reserved: isSet(object.reserved) ? globalThis.Boolean(object.reserved) : false,
-            repeated: isSet(object.repeated) ? globalThis.Boolean(object.repeated) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.number !== undefined && message.number !== 0) {
-            obj.number = Math.round(message.number);
-        }
-        if (message.fullName !== undefined && message.fullName !== "") {
-            obj.fullName = message.fullName;
-        }
-        if (message.type !== undefined && message.type !== "") {
-            obj.type = message.type;
-        }
-        if (message.reserved !== undefined && message.reserved !== false) {
-            obj.reserved = message.reserved;
-        }
-        if (message.repeated !== undefined && message.repeated !== false) {
-            obj.repeated = message.repeated;
-        }
-        return obj;
-    },
-};
-exports.FieldDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? globalThis.String(object.name) : "",
-            number: isSet(object.number) ? globalThis.Number(object.number) : 0,
-            label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1,
-            type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1,
-            typeName: isSet(object.typeName) ? globalThis.String(object.typeName) : "",
-            extendee: isSet(object.extendee) ? globalThis.String(object.extendee) : "",
-            defaultValue: isSet(object.defaultValue) ? globalThis.String(object.defaultValue) : "",
-            oneofIndex: isSet(object.oneofIndex) ? globalThis.Number(object.oneofIndex) : 0,
-            jsonName: isSet(object.jsonName) ? globalThis.String(object.jsonName) : "",
-            options: isSet(object.options) ? exports.FieldOptions.fromJSON(object.options) : undefined,
-            proto3Optional: isSet(object.proto3Optional) ? globalThis.Boolean(object.proto3Optional) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.name !== undefined && message.name !== "") {
-            obj.name = message.name;
-        }
-        if (message.number !== undefined && message.number !== 0) {
-            obj.number = Math.round(message.number);
-        }
-        if (message.label !== undefined && message.label !== 1) {
-            obj.label = fieldDescriptorProto_LabelToJSON(message.label);
-        }
-        if (message.type !== undefined && message.type !== 1) {
-            obj.type = fieldDescriptorProto_TypeToJSON(message.type);
-        }
-        if (message.typeName !== undefined && message.typeName !== "") {
-            obj.typeName = message.typeName;
-        }
-        if (message.extendee !== undefined && message.extendee !== "") {
-            obj.extendee = message.extendee;
-        }
-        if (message.defaultValue !== undefined && message.defaultValue !== "") {
-            obj.defaultValue = message.defaultValue;
-        }
-        if (message.oneofIndex !== undefined && message.oneofIndex !== 0) {
-            obj.oneofIndex = Math.round(message.oneofIndex);
-        }
-        if (message.jsonName !== undefined && message.jsonName !== "") {
-            obj.jsonName = message.jsonName;
-        }
-        if (message.options !== undefined) {
-            obj.options = exports.FieldOptions.toJSON(message.options);
-        }
-        if (message.proto3Optional !== undefined && message.proto3Optional !== false) {
-            obj.proto3Optional = message.proto3Optional;
-        }
-        return obj;
-    },
-};
-exports.OneofDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? globalThis.String(object.name) : "",
-            options: isSet(object.options) ? exports.OneofOptions.fromJSON(object.options) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.name !== undefined && message.name !== "") {
-            obj.name = message.name;
-        }
-        if (message.options !== undefined) {
-            obj.options = exports.OneofOptions.toJSON(message.options);
-        }
-        return obj;
-    },
-};
-exports.EnumDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? globalThis.String(object.name) : "",
-            value: globalThis.Array.isArray(object?.value)
-                ? object.value.map((e) => exports.EnumValueDescriptorProto.fromJSON(e))
-                : [],
-            options: isSet(object.options) ? exports.EnumOptions.fromJSON(object.options) : undefined,
-            reservedRange: globalThis.Array.isArray(object?.reservedRange)
-                ? object.reservedRange.map((e) => exports.EnumDescriptorProto_EnumReservedRange.fromJSON(e))
-                : [],
-            reservedName: globalThis.Array.isArray(object?.reservedName)
-                ? object.reservedName.map((e) => globalThis.String(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.name !== undefined && message.name !== "") {
-            obj.name = message.name;
-        }
-        if (message.value?.length) {
-            obj.value = message.value.map((e) => exports.EnumValueDescriptorProto.toJSON(e));
-        }
-        if (message.options !== undefined) {
-            obj.options = exports.EnumOptions.toJSON(message.options);
-        }
-        if (message.reservedRange?.length) {
-            obj.reservedRange = message.reservedRange.map((e) => exports.EnumDescriptorProto_EnumReservedRange.toJSON(e));
-        }
-        if (message.reservedName?.length) {
-            obj.reservedName = message.reservedName;
-        }
-        return obj;
-    },
-};
-exports.EnumDescriptorProto_EnumReservedRange = {
-    fromJSON(object) {
-        return {
-            start: isSet(object.start) ? globalThis.Number(object.start) : 0,
-            end: isSet(object.end) ? globalThis.Number(object.end) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.start !== undefined && message.start !== 0) {
-            obj.start = Math.round(message.start);
-        }
-        if (message.end !== undefined && message.end !== 0) {
-            obj.end = Math.round(message.end);
-        }
-        return obj;
-    },
-};
-exports.EnumValueDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? globalThis.String(object.name) : "",
-            number: isSet(object.number) ? globalThis.Number(object.number) : 0,
-            options: isSet(object.options) ? exports.EnumValueOptions.fromJSON(object.options) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.name !== undefined && message.name !== "") {
-            obj.name = message.name;
-        }
-        if (message.number !== undefined && message.number !== 0) {
-            obj.number = Math.round(message.number);
-        }
-        if (message.options !== undefined) {
-            obj.options = exports.EnumValueOptions.toJSON(message.options);
-        }
-        return obj;
-    },
-};
-exports.ServiceDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? globalThis.String(object.name) : "",
-            method: globalThis.Array.isArray(object?.method)
-                ? object.method.map((e) => exports.MethodDescriptorProto.fromJSON(e))
-                : [],
-            options: isSet(object.options) ? exports.ServiceOptions.fromJSON(object.options) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.name !== undefined && message.name !== "") {
-            obj.name = message.name;
-        }
-        if (message.method?.length) {
-            obj.method = message.method.map((e) => exports.MethodDescriptorProto.toJSON(e));
-        }
-        if (message.options !== undefined) {
-            obj.options = exports.ServiceOptions.toJSON(message.options);
-        }
-        return obj;
-    },
-};
-exports.MethodDescriptorProto = {
-    fromJSON(object) {
-        return {
-            name: isSet(object.name) ? globalThis.String(object.name) : "",
-            inputType: isSet(object.inputType) ? globalThis.String(object.inputType) : "",
-            outputType: isSet(object.outputType) ? globalThis.String(object.outputType) : "",
-            options: isSet(object.options) ? exports.MethodOptions.fromJSON(object.options) : undefined,
-            clientStreaming: isSet(object.clientStreaming) ? globalThis.Boolean(object.clientStreaming) : false,
-            serverStreaming: isSet(object.serverStreaming) ? globalThis.Boolean(object.serverStreaming) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.name !== undefined && message.name !== "") {
-            obj.name = message.name;
-        }
-        if (message.inputType !== undefined && message.inputType !== "") {
-            obj.inputType = message.inputType;
-        }
-        if (message.outputType !== undefined && message.outputType !== "") {
-            obj.outputType = message.outputType;
-        }
-        if (message.options !== undefined) {
-            obj.options = exports.MethodOptions.toJSON(message.options);
-        }
-        if (message.clientStreaming !== undefined && message.clientStreaming !== false) {
-            obj.clientStreaming = message.clientStreaming;
-        }
-        if (message.serverStreaming !== undefined && message.serverStreaming !== false) {
-            obj.serverStreaming = message.serverStreaming;
-        }
-        return obj;
-    },
-};
-exports.FileOptions = {
-    fromJSON(object) {
-        return {
-            javaPackage: isSet(object.javaPackage) ? globalThis.String(object.javaPackage) : "",
-            javaOuterClassname: isSet(object.javaOuterClassname) ? globalThis.String(object.javaOuterClassname) : "",
-            javaMultipleFiles: isSet(object.javaMultipleFiles) ? globalThis.Boolean(object.javaMultipleFiles) : false,
-            javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash)
-                ? globalThis.Boolean(object.javaGenerateEqualsAndHash)
-                : false,
-            javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? globalThis.Boolean(object.javaStringCheckUtf8) : false,
-            optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1,
-            goPackage: isSet(object.goPackage) ? globalThis.String(object.goPackage) : "",
-            ccGenericServices: isSet(object.ccGenericServices) ? globalThis.Boolean(object.ccGenericServices) : false,
-            javaGenericServices: isSet(object.javaGenericServices) ? globalThis.Boolean(object.javaGenericServices) : false,
-            pyGenericServices: isSet(object.pyGenericServices) ? globalThis.Boolean(object.pyGenericServices) : false,
-            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
-            ccEnableArenas: isSet(object.ccEnableArenas) ? globalThis.Boolean(object.ccEnableArenas) : true,
-            objcClassPrefix: isSet(object.objcClassPrefix) ? globalThis.String(object.objcClassPrefix) : "",
-            csharpNamespace: isSet(object.csharpNamespace) ? globalThis.String(object.csharpNamespace) : "",
-            swiftPrefix: isSet(object.swiftPrefix) ? globalThis.String(object.swiftPrefix) : "",
-            phpClassPrefix: isSet(object.phpClassPrefix) ? globalThis.String(object.phpClassPrefix) : "",
-            phpNamespace: isSet(object.phpNamespace) ? globalThis.String(object.phpNamespace) : "",
-            phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? globalThis.String(object.phpMetadataNamespace) : "",
-            rubyPackage: isSet(object.rubyPackage) ? globalThis.String(object.rubyPackage) : "",
-            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
-            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.javaPackage !== undefined && message.javaPackage !== "") {
-            obj.javaPackage = message.javaPackage;
-        }
-        if (message.javaOuterClassname !== undefined && message.javaOuterClassname !== "") {
-            obj.javaOuterClassname = message.javaOuterClassname;
-        }
-        if (message.javaMultipleFiles !== undefined && message.javaMultipleFiles !== false) {
-            obj.javaMultipleFiles = message.javaMultipleFiles;
-        }
-        if (message.javaGenerateEqualsAndHash !== undefined && message.javaGenerateEqualsAndHash !== false) {
-            obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash;
-        }
-        if (message.javaStringCheckUtf8 !== undefined && message.javaStringCheckUtf8 !== false) {
-            obj.javaStringCheckUtf8 = message.javaStringCheckUtf8;
-        }
-        if (message.optimizeFor !== undefined && message.optimizeFor !== 1) {
-            obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor);
-        }
-        if (message.goPackage !== undefined && message.goPackage !== "") {
-            obj.goPackage = message.goPackage;
-        }
-        if (message.ccGenericServices !== undefined && message.ccGenericServices !== false) {
-            obj.ccGenericServices = message.ccGenericServices;
-        }
-        if (message.javaGenericServices !== undefined && message.javaGenericServices !== false) {
-            obj.javaGenericServices = message.javaGenericServices;
-        }
-        if (message.pyGenericServices !== undefined && message.pyGenericServices !== false) {
-            obj.pyGenericServices = message.pyGenericServices;
-        }
-        if (message.deprecated !== undefined && message.deprecated !== false) {
-            obj.deprecated = message.deprecated;
-        }
-        if (message.ccEnableArenas !== undefined && message.ccEnableArenas !== true) {
-            obj.ccEnableArenas = message.ccEnableArenas;
-        }
-        if (message.objcClassPrefix !== undefined && message.objcClassPrefix !== "") {
-            obj.objcClassPrefix = message.objcClassPrefix;
-        }
-        if (message.csharpNamespace !== undefined && message.csharpNamespace !== "") {
-            obj.csharpNamespace = message.csharpNamespace;
-        }
-        if (message.swiftPrefix !== undefined && message.swiftPrefix !== "") {
-            obj.swiftPrefix = message.swiftPrefix;
-        }
-        if (message.phpClassPrefix !== undefined && message.phpClassPrefix !== "") {
-            obj.phpClassPrefix = message.phpClassPrefix;
-        }
-        if (message.phpNamespace !== undefined && message.phpNamespace !== "") {
-            obj.phpNamespace = message.phpNamespace;
-        }
-        if (message.phpMetadataNamespace !== undefined && message.phpMetadataNamespace !== "") {
-            obj.phpMetadataNamespace = message.phpMetadataNamespace;
-        }
-        if (message.rubyPackage !== undefined && message.rubyPackage !== "") {
-            obj.rubyPackage = message.rubyPackage;
-        }
-        if (message.features !== undefined) {
-            obj.features = exports.FeatureSet.toJSON(message.features);
-        }
-        if (message.uninterpretedOption?.length) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.MessageOptions = {
-    fromJSON(object) {
-        return {
-            messageSetWireFormat: isSet(object.messageSetWireFormat)
-                ? globalThis.Boolean(object.messageSetWireFormat)
-                : false,
-            noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor)
-                ? globalThis.Boolean(object.noStandardDescriptorAccessor)
-                : false,
-            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
-            mapEntry: isSet(object.mapEntry) ? globalThis.Boolean(object.mapEntry) : false,
-            deprecatedLegacyJsonFieldConflicts: isSet(object.deprecatedLegacyJsonFieldConflicts)
-                ? globalThis.Boolean(object.deprecatedLegacyJsonFieldConflicts)
-                : false,
-            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
-            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.messageSetWireFormat !== undefined && message.messageSetWireFormat !== false) {
-            obj.messageSetWireFormat = message.messageSetWireFormat;
-        }
-        if (message.noStandardDescriptorAccessor !== undefined && message.noStandardDescriptorAccessor !== false) {
-            obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor;
-        }
-        if (message.deprecated !== undefined && message.deprecated !== false) {
-            obj.deprecated = message.deprecated;
-        }
-        if (message.mapEntry !== undefined && message.mapEntry !== false) {
-            obj.mapEntry = message.mapEntry;
-        }
-        if (message.deprecatedLegacyJsonFieldConflicts !== undefined && message.deprecatedLegacyJsonFieldConflicts !== false) {
-            obj.deprecatedLegacyJsonFieldConflicts = message.deprecatedLegacyJsonFieldConflicts;
-        }
-        if (message.features !== undefined) {
-            obj.features = exports.FeatureSet.toJSON(message.features);
-        }
-        if (message.uninterpretedOption?.length) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.FieldOptions = {
-    fromJSON(object) {
-        return {
-            ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0,
-            packed: isSet(object.packed) ? globalThis.Boolean(object.packed) : false,
-            jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0,
-            lazy: isSet(object.lazy) ? globalThis.Boolean(object.lazy) : false,
-            unverifiedLazy: isSet(object.unverifiedLazy) ? globalThis.Boolean(object.unverifiedLazy) : false,
-            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
-            weak: isSet(object.weak) ? globalThis.Boolean(object.weak) : false,
-            debugRedact: isSet(object.debugRedact) ? globalThis.Boolean(object.debugRedact) : false,
-            retention: isSet(object.retention) ? fieldOptions_OptionRetentionFromJSON(object.retention) : 0,
-            targets: globalThis.Array.isArray(object?.targets)
-                ? object.targets.map((e) => fieldOptions_OptionTargetTypeFromJSON(e))
-                : [],
-            editionDefaults: globalThis.Array.isArray(object?.editionDefaults)
-                ? object.editionDefaults.map((e) => exports.FieldOptions_EditionDefault.fromJSON(e))
-                : [],
-            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
-            featureSupport: isSet(object.featureSupport)
-                ? exports.FieldOptions_FeatureSupport.fromJSON(object.featureSupport)
-                : undefined,
-            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.ctype !== undefined && message.ctype !== 0) {
-            obj.ctype = fieldOptions_CTypeToJSON(message.ctype);
-        }
-        if (message.packed !== undefined && message.packed !== false) {
-            obj.packed = message.packed;
-        }
-        if (message.jstype !== undefined && message.jstype !== 0) {
-            obj.jstype = fieldOptions_JSTypeToJSON(message.jstype);
-        }
-        if (message.lazy !== undefined && message.lazy !== false) {
-            obj.lazy = message.lazy;
-        }
-        if (message.unverifiedLazy !== undefined && message.unverifiedLazy !== false) {
-            obj.unverifiedLazy = message.unverifiedLazy;
-        }
-        if (message.deprecated !== undefined && message.deprecated !== false) {
-            obj.deprecated = message.deprecated;
-        }
-        if (message.weak !== undefined && message.weak !== false) {
-            obj.weak = message.weak;
-        }
-        if (message.debugRedact !== undefined && message.debugRedact !== false) {
-            obj.debugRedact = message.debugRedact;
-        }
-        if (message.retention !== undefined && message.retention !== 0) {
-            obj.retention = fieldOptions_OptionRetentionToJSON(message.retention);
-        }
-        if (message.targets?.length) {
-            obj.targets = message.targets.map((e) => fieldOptions_OptionTargetTypeToJSON(e));
-        }
-        if (message.editionDefaults?.length) {
-            obj.editionDefaults = message.editionDefaults.map((e) => exports.FieldOptions_EditionDefault.toJSON(e));
-        }
-        if (message.features !== undefined) {
-            obj.features = exports.FeatureSet.toJSON(message.features);
-        }
-        if (message.featureSupport !== undefined) {
-            obj.featureSupport = exports.FieldOptions_FeatureSupport.toJSON(message.featureSupport);
-        }
-        if (message.uninterpretedOption?.length) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.FieldOptions_EditionDefault = {
-    fromJSON(object) {
-        return {
-            edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0,
-            value: isSet(object.value) ? globalThis.String(object.value) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.edition !== undefined && message.edition !== 0) {
-            obj.edition = editionToJSON(message.edition);
-        }
-        if (message.value !== undefined && message.value !== "") {
-            obj.value = message.value;
-        }
-        return obj;
-    },
-};
-exports.FieldOptions_FeatureSupport = {
-    fromJSON(object) {
-        return {
-            editionIntroduced: isSet(object.editionIntroduced) ? editionFromJSON(object.editionIntroduced) : 0,
-            editionDeprecated: isSet(object.editionDeprecated) ? editionFromJSON(object.editionDeprecated) : 0,
-            deprecationWarning: isSet(object.deprecationWarning) ? globalThis.String(object.deprecationWarning) : "",
-            editionRemoved: isSet(object.editionRemoved) ? editionFromJSON(object.editionRemoved) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.editionIntroduced !== undefined && message.editionIntroduced !== 0) {
-            obj.editionIntroduced = editionToJSON(message.editionIntroduced);
-        }
-        if (message.editionDeprecated !== undefined && message.editionDeprecated !== 0) {
-            obj.editionDeprecated = editionToJSON(message.editionDeprecated);
-        }
-        if (message.deprecationWarning !== undefined && message.deprecationWarning !== "") {
-            obj.deprecationWarning = message.deprecationWarning;
-        }
-        if (message.editionRemoved !== undefined && message.editionRemoved !== 0) {
-            obj.editionRemoved = editionToJSON(message.editionRemoved);
-        }
-        return obj;
-    },
-};
-exports.OneofOptions = {
-    fromJSON(object) {
-        return {
-            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
-            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.features !== undefined) {
-            obj.features = exports.FeatureSet.toJSON(message.features);
-        }
-        if (message.uninterpretedOption?.length) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.EnumOptions = {
-    fromJSON(object) {
-        return {
-            allowAlias: isSet(object.allowAlias) ? globalThis.Boolean(object.allowAlias) : false,
-            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
-            deprecatedLegacyJsonFieldConflicts: isSet(object.deprecatedLegacyJsonFieldConflicts)
-                ? globalThis.Boolean(object.deprecatedLegacyJsonFieldConflicts)
-                : false,
-            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
-            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.allowAlias !== undefined && message.allowAlias !== false) {
-            obj.allowAlias = message.allowAlias;
-        }
-        if (message.deprecated !== undefined && message.deprecated !== false) {
-            obj.deprecated = message.deprecated;
-        }
-        if (message.deprecatedLegacyJsonFieldConflicts !== undefined && message.deprecatedLegacyJsonFieldConflicts !== false) {
-            obj.deprecatedLegacyJsonFieldConflicts = message.deprecatedLegacyJsonFieldConflicts;
-        }
-        if (message.features !== undefined) {
-            obj.features = exports.FeatureSet.toJSON(message.features);
-        }
-        if (message.uninterpretedOption?.length) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.EnumValueOptions = {
-    fromJSON(object) {
-        return {
-            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
-            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
-            debugRedact: isSet(object.debugRedact) ? globalThis.Boolean(object.debugRedact) : false,
-            featureSupport: isSet(object.featureSupport)
-                ? exports.FieldOptions_FeatureSupport.fromJSON(object.featureSupport)
-                : undefined,
-            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.deprecated !== undefined && message.deprecated !== false) {
-            obj.deprecated = message.deprecated;
-        }
-        if (message.features !== undefined) {
-            obj.features = exports.FeatureSet.toJSON(message.features);
-        }
-        if (message.debugRedact !== undefined && message.debugRedact !== false) {
-            obj.debugRedact = message.debugRedact;
-        }
-        if (message.featureSupport !== undefined) {
-            obj.featureSupport = exports.FieldOptions_FeatureSupport.toJSON(message.featureSupport);
-        }
-        if (message.uninterpretedOption?.length) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.ServiceOptions = {
-    fromJSON(object) {
-        return {
-            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
-            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
-            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.features !== undefined) {
-            obj.features = exports.FeatureSet.toJSON(message.features);
-        }
-        if (message.deprecated !== undefined && message.deprecated !== false) {
-            obj.deprecated = message.deprecated;
-        }
-        if (message.uninterpretedOption?.length) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.MethodOptions = {
-    fromJSON(object) {
-        return {
-            deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false,
-            idempotencyLevel: isSet(object.idempotencyLevel)
-                ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel)
-                : 0,
-            features: isSet(object.features) ? exports.FeatureSet.fromJSON(object.features) : undefined,
-            uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption)
-                ? object.uninterpretedOption.map((e) => exports.UninterpretedOption.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.deprecated !== undefined && message.deprecated !== false) {
-            obj.deprecated = message.deprecated;
-        }
-        if (message.idempotencyLevel !== undefined && message.idempotencyLevel !== 0) {
-            obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel);
-        }
-        if (message.features !== undefined) {
-            obj.features = exports.FeatureSet.toJSON(message.features);
-        }
-        if (message.uninterpretedOption?.length) {
-            obj.uninterpretedOption = message.uninterpretedOption.map((e) => exports.UninterpretedOption.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.UninterpretedOption = {
-    fromJSON(object) {
-        return {
-            name: globalThis.Array.isArray(object?.name)
-                ? object.name.map((e) => exports.UninterpretedOption_NamePart.fromJSON(e))
-                : [],
-            identifierValue: isSet(object.identifierValue) ? globalThis.String(object.identifierValue) : "",
-            positiveIntValue: isSet(object.positiveIntValue) ? globalThis.String(object.positiveIntValue) : "0",
-            negativeIntValue: isSet(object.negativeIntValue) ? globalThis.String(object.negativeIntValue) : "0",
-            doubleValue: isSet(object.doubleValue) ? globalThis.Number(object.doubleValue) : 0,
-            stringValue: isSet(object.stringValue) ? Buffer.from(bytesFromBase64(object.stringValue)) : Buffer.alloc(0),
-            aggregateValue: isSet(object.aggregateValue) ? globalThis.String(object.aggregateValue) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.name?.length) {
-            obj.name = message.name.map((e) => exports.UninterpretedOption_NamePart.toJSON(e));
-        }
-        if (message.identifierValue !== undefined && message.identifierValue !== "") {
-            obj.identifierValue = message.identifierValue;
-        }
-        if (message.positiveIntValue !== undefined && message.positiveIntValue !== "0") {
-            obj.positiveIntValue = message.positiveIntValue;
-        }
-        if (message.negativeIntValue !== undefined && message.negativeIntValue !== "0") {
-            obj.negativeIntValue = message.negativeIntValue;
-        }
-        if (message.doubleValue !== undefined && message.doubleValue !== 0) {
-            obj.doubleValue = message.doubleValue;
-        }
-        if (message.stringValue !== undefined && message.stringValue.length !== 0) {
-            obj.stringValue = base64FromBytes(message.stringValue);
-        }
-        if (message.aggregateValue !== undefined && message.aggregateValue !== "") {
-            obj.aggregateValue = message.aggregateValue;
-        }
-        return obj;
-    },
-};
-exports.UninterpretedOption_NamePart = {
-    fromJSON(object) {
-        return {
-            namePart: isSet(object.namePart) ? globalThis.String(object.namePart) : "",
-            isExtension: isSet(object.isExtension) ? globalThis.Boolean(object.isExtension) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.namePart !== "") {
-            obj.namePart = message.namePart;
-        }
-        if (message.isExtension !== false) {
-            obj.isExtension = message.isExtension;
-        }
-        return obj;
-    },
-};
-exports.FeatureSet = {
-    fromJSON(object) {
-        return {
-            fieldPresence: isSet(object.fieldPresence) ? featureSet_FieldPresenceFromJSON(object.fieldPresence) : 0,
-            enumType: isSet(object.enumType) ? featureSet_EnumTypeFromJSON(object.enumType) : 0,
-            repeatedFieldEncoding: isSet(object.repeatedFieldEncoding)
-                ? featureSet_RepeatedFieldEncodingFromJSON(object.repeatedFieldEncoding)
-                : 0,
-            utf8Validation: isSet(object.utf8Validation) ? featureSet_Utf8ValidationFromJSON(object.utf8Validation) : 0,
-            messageEncoding: isSet(object.messageEncoding) ? featureSet_MessageEncodingFromJSON(object.messageEncoding) : 0,
-            jsonFormat: isSet(object.jsonFormat) ? featureSet_JsonFormatFromJSON(object.jsonFormat) : 0,
-            enforceNamingStyle: isSet(object.enforceNamingStyle)
-                ? featureSet_EnforceNamingStyleFromJSON(object.enforceNamingStyle)
-                : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.fieldPresence !== undefined && message.fieldPresence !== 0) {
-            obj.fieldPresence = featureSet_FieldPresenceToJSON(message.fieldPresence);
-        }
-        if (message.enumType !== undefined && message.enumType !== 0) {
-            obj.enumType = featureSet_EnumTypeToJSON(message.enumType);
-        }
-        if (message.repeatedFieldEncoding !== undefined && message.repeatedFieldEncoding !== 0) {
-            obj.repeatedFieldEncoding = featureSet_RepeatedFieldEncodingToJSON(message.repeatedFieldEncoding);
-        }
-        if (message.utf8Validation !== undefined && message.utf8Validation !== 0) {
-            obj.utf8Validation = featureSet_Utf8ValidationToJSON(message.utf8Validation);
-        }
-        if (message.messageEncoding !== undefined && message.messageEncoding !== 0) {
-            obj.messageEncoding = featureSet_MessageEncodingToJSON(message.messageEncoding);
-        }
-        if (message.jsonFormat !== undefined && message.jsonFormat !== 0) {
-            obj.jsonFormat = featureSet_JsonFormatToJSON(message.jsonFormat);
-        }
-        if (message.enforceNamingStyle !== undefined && message.enforceNamingStyle !== 0) {
-            obj.enforceNamingStyle = featureSet_EnforceNamingStyleToJSON(message.enforceNamingStyle);
-        }
-        return obj;
-    },
-};
-exports.FeatureSetDefaults = {
-    fromJSON(object) {
-        return {
-            defaults: globalThis.Array.isArray(object?.defaults)
-                ? object.defaults.map((e) => exports.FeatureSetDefaults_FeatureSetEditionDefault.fromJSON(e))
-                : [],
-            minimumEdition: isSet(object.minimumEdition) ? editionFromJSON(object.minimumEdition) : 0,
-            maximumEdition: isSet(object.maximumEdition) ? editionFromJSON(object.maximumEdition) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.defaults?.length) {
-            obj.defaults = message.defaults.map((e) => exports.FeatureSetDefaults_FeatureSetEditionDefault.toJSON(e));
-        }
-        if (message.minimumEdition !== undefined && message.minimumEdition !== 0) {
-            obj.minimumEdition = editionToJSON(message.minimumEdition);
-        }
-        if (message.maximumEdition !== undefined && message.maximumEdition !== 0) {
-            obj.maximumEdition = editionToJSON(message.maximumEdition);
-        }
-        return obj;
-    },
-};
-exports.FeatureSetDefaults_FeatureSetEditionDefault = {
-    fromJSON(object) {
-        return {
-            edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0,
-            overridableFeatures: isSet(object.overridableFeatures)
-                ? exports.FeatureSet.fromJSON(object.overridableFeatures)
-                : undefined,
-            fixedFeatures: isSet(object.fixedFeatures) ? exports.FeatureSet.fromJSON(object.fixedFeatures) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.edition !== undefined && message.edition !== 0) {
-            obj.edition = editionToJSON(message.edition);
-        }
-        if (message.overridableFeatures !== undefined) {
-            obj.overridableFeatures = exports.FeatureSet.toJSON(message.overridableFeatures);
-        }
-        if (message.fixedFeatures !== undefined) {
-            obj.fixedFeatures = exports.FeatureSet.toJSON(message.fixedFeatures);
-        }
-        return obj;
-    },
-};
-exports.SourceCodeInfo = {
-    fromJSON(object) {
-        return {
-            location: globalThis.Array.isArray(object?.location)
-                ? object.location.map((e) => exports.SourceCodeInfo_Location.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.location?.length) {
-            obj.location = message.location.map((e) => exports.SourceCodeInfo_Location.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.SourceCodeInfo_Location = {
-    fromJSON(object) {
-        return {
-            path: globalThis.Array.isArray(object?.path)
-                ? object.path.map((e) => globalThis.Number(e))
-                : [],
-            span: globalThis.Array.isArray(object?.span) ? object.span.map((e) => globalThis.Number(e)) : [],
-            leadingComments: isSet(object.leadingComments) ? globalThis.String(object.leadingComments) : "",
-            trailingComments: isSet(object.trailingComments) ? globalThis.String(object.trailingComments) : "",
-            leadingDetachedComments: globalThis.Array.isArray(object?.leadingDetachedComments)
-                ? object.leadingDetachedComments.map((e) => globalThis.String(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.path?.length) {
-            obj.path = message.path.map((e) => Math.round(e));
-        }
-        if (message.span?.length) {
-            obj.span = message.span.map((e) => Math.round(e));
-        }
-        if (message.leadingComments !== undefined && message.leadingComments !== "") {
-            obj.leadingComments = message.leadingComments;
-        }
-        if (message.trailingComments !== undefined && message.trailingComments !== "") {
-            obj.trailingComments = message.trailingComments;
-        }
-        if (message.leadingDetachedComments?.length) {
-            obj.leadingDetachedComments = message.leadingDetachedComments;
-        }
-        return obj;
-    },
-};
-exports.GeneratedCodeInfo = {
-    fromJSON(object) {
-        return {
-            annotation: globalThis.Array.isArray(object?.annotation)
-                ? object.annotation.map((e) => exports.GeneratedCodeInfo_Annotation.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.annotation?.length) {
-            obj.annotation = message.annotation.map((e) => exports.GeneratedCodeInfo_Annotation.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.GeneratedCodeInfo_Annotation = {
-    fromJSON(object) {
-        return {
-            path: globalThis.Array.isArray(object?.path)
-                ? object.path.map((e) => globalThis.Number(e))
-                : [],
-            sourceFile: isSet(object.sourceFile) ? globalThis.String(object.sourceFile) : "",
-            begin: isSet(object.begin) ? globalThis.Number(object.begin) : 0,
-            end: isSet(object.end) ? globalThis.Number(object.end) : 0,
-            semantic: isSet(object.semantic) ? generatedCodeInfo_Annotation_SemanticFromJSON(object.semantic) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.path?.length) {
-            obj.path = message.path.map((e) => Math.round(e));
-        }
-        if (message.sourceFile !== undefined && message.sourceFile !== "") {
-            obj.sourceFile = message.sourceFile;
-        }
-        if (message.begin !== undefined && message.begin !== 0) {
-            obj.begin = Math.round(message.begin);
-        }
-        if (message.end !== undefined && message.end !== 0) {
-            obj.end = Math.round(message.end);
-        }
-        if (message.semantic !== undefined && message.semantic !== 0) {
-            obj.semantic = generatedCodeInfo_Annotation_SemanticToJSON(message.semantic);
-        }
-        return obj;
-    },
-};
-function bytesFromBase64(b64) {
-    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
-}
-function base64FromBytes(arr) {
-    return globalThis.Buffer.from(arr).toString("base64");
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
deleted file mode 100644
index 9d24cbba10de9..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/google/protobuf/timestamp.js
+++ /dev/null
@@ -1,29 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: google/protobuf/timestamp.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Timestamp = void 0;
-exports.Timestamp = {
-    fromJSON(object) {
-        return {
-            seconds: isSet(object.seconds) ? globalThis.String(object.seconds) : "0",
-            nanos: isSet(object.nanos) ? globalThis.Number(object.nanos) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.seconds !== "0") {
-            obj.seconds = message.seconds;
-        }
-        if (message.nanos !== 0) {
-            obj.nanos = Math.round(message.nanos);
-        }
-        return obj;
-    },
-};
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js
deleted file mode 100644
index abc766bed3b88..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/dsse.js
+++ /dev/null
@@ -1,55 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: rekor/v2/dsse.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.DSSELogEntryV002 = exports.DSSERequestV002 = void 0;
-/* eslint-disable */
-const envelope_1 = require("../../envelope");
-const sigstore_common_1 = require("../../sigstore_common");
-const verifier_1 = require("./verifier");
-exports.DSSERequestV002 = {
-    fromJSON(object) {
-        return {
-            envelope: isSet(object.envelope) ? envelope_1.Envelope.fromJSON(object.envelope) : undefined,
-            verifiers: globalThis.Array.isArray(object?.verifiers)
-                ? object.verifiers.map((e) => verifier_1.Verifier.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.envelope !== undefined) {
-            obj.envelope = envelope_1.Envelope.toJSON(message.envelope);
-        }
-        if (message.verifiers?.length) {
-            obj.verifiers = message.verifiers.map((e) => verifier_1.Verifier.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.DSSELogEntryV002 = {
-    fromJSON(object) {
-        return {
-            payloadHash: isSet(object.payloadHash) ? sigstore_common_1.HashOutput.fromJSON(object.payloadHash) : undefined,
-            signatures: globalThis.Array.isArray(object?.signatures)
-                ? object.signatures.map((e) => verifier_1.Signature.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.payloadHash !== undefined) {
-            obj.payloadHash = sigstore_common_1.HashOutput.toJSON(message.payloadHash);
-        }
-        if (message.signatures?.length) {
-            obj.signatures = message.signatures.map((e) => verifier_1.Signature.toJSON(e));
-        }
-        return obj;
-    },
-};
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js
deleted file mode 100644
index c5eccb10e0a68..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/entry.js
+++ /dev/null
@@ -1,81 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: rekor/v2/entry.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.CreateEntryRequest = exports.Spec = exports.Entry = void 0;
-/* eslint-disable */
-const dsse_1 = require("./dsse");
-const hashedrekord_1 = require("./hashedrekord");
-exports.Entry = {
-    fromJSON(object) {
-        return {
-            kind: isSet(object.kind) ? globalThis.String(object.kind) : "",
-            apiVersion: isSet(object.apiVersion) ? globalThis.String(object.apiVersion) : "",
-            spec: isSet(object.spec) ? exports.Spec.fromJSON(object.spec) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.kind !== "") {
-            obj.kind = message.kind;
-        }
-        if (message.apiVersion !== "") {
-            obj.apiVersion = message.apiVersion;
-        }
-        if (message.spec !== undefined) {
-            obj.spec = exports.Spec.toJSON(message.spec);
-        }
-        return obj;
-    },
-};
-exports.Spec = {
-    fromJSON(object) {
-        return {
-            spec: isSet(object.hashedRekordV002)
-                ? { $case: "hashedRekordV002", hashedRekordV002: hashedrekord_1.HashedRekordLogEntryV002.fromJSON(object.hashedRekordV002) }
-                : isSet(object.dsseV002)
-                    ? { $case: "dsseV002", dsseV002: dsse_1.DSSELogEntryV002.fromJSON(object.dsseV002) }
-                    : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.spec?.$case === "hashedRekordV002") {
-            obj.hashedRekordV002 = hashedrekord_1.HashedRekordLogEntryV002.toJSON(message.spec.hashedRekordV002);
-        }
-        else if (message.spec?.$case === "dsseV002") {
-            obj.dsseV002 = dsse_1.DSSELogEntryV002.toJSON(message.spec.dsseV002);
-        }
-        return obj;
-    },
-};
-exports.CreateEntryRequest = {
-    fromJSON(object) {
-        return {
-            spec: isSet(object.hashedRekordRequestV002)
-                ? {
-                    $case: "hashedRekordRequestV002",
-                    hashedRekordRequestV002: hashedrekord_1.HashedRekordRequestV002.fromJSON(object.hashedRekordRequestV002),
-                }
-                : isSet(object.dsseRequestV002)
-                    ? { $case: "dsseRequestV002", dsseRequestV002: dsse_1.DSSERequestV002.fromJSON(object.dsseRequestV002) }
-                    : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.spec?.$case === "hashedRekordRequestV002") {
-            obj.hashedRekordRequestV002 = hashedrekord_1.HashedRekordRequestV002.toJSON(message.spec.hashedRekordRequestV002);
-        }
-        else if (message.spec?.$case === "dsseRequestV002") {
-            obj.dsseRequestV002 = dsse_1.DSSERequestV002.toJSON(message.spec.dsseRequestV002);
-        }
-        return obj;
-    },
-};
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js
deleted file mode 100644
index d3fd1af2483d1..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/hashedrekord.js
+++ /dev/null
@@ -1,56 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: rekor/v2/hashedrekord.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.HashedRekordLogEntryV002 = exports.HashedRekordRequestV002 = void 0;
-/* eslint-disable */
-const sigstore_common_1 = require("../../sigstore_common");
-const verifier_1 = require("./verifier");
-exports.HashedRekordRequestV002 = {
-    fromJSON(object) {
-        return {
-            digest: isSet(object.digest) ? Buffer.from(bytesFromBase64(object.digest)) : Buffer.alloc(0),
-            signature: isSet(object.signature) ? verifier_1.Signature.fromJSON(object.signature) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.digest.length !== 0) {
-            obj.digest = base64FromBytes(message.digest);
-        }
-        if (message.signature !== undefined) {
-            obj.signature = verifier_1.Signature.toJSON(message.signature);
-        }
-        return obj;
-    },
-};
-exports.HashedRekordLogEntryV002 = {
-    fromJSON(object) {
-        return {
-            data: isSet(object.data) ? sigstore_common_1.HashOutput.fromJSON(object.data) : undefined,
-            signature: isSet(object.signature) ? verifier_1.Signature.fromJSON(object.signature) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.data !== undefined) {
-            obj.data = sigstore_common_1.HashOutput.toJSON(message.data);
-        }
-        if (message.signature !== undefined) {
-            obj.signature = verifier_1.Signature.toJSON(message.signature);
-        }
-        return obj;
-    },
-};
-function bytesFromBase64(b64) {
-    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
-}
-function base64FromBytes(arr) {
-    return globalThis.Buffer.from(arr).toString("base64");
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js
deleted file mode 100644
index c437d5053a3cb..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/rekor/v2/verifier.js
+++ /dev/null
@@ -1,74 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: rekor/v2/verifier.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Signature = exports.Verifier = exports.PublicKey = void 0;
-/* eslint-disable */
-const sigstore_common_1 = require("../../sigstore_common");
-exports.PublicKey = {
-    fromJSON(object) {
-        return { rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : Buffer.alloc(0) };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.rawBytes.length !== 0) {
-            obj.rawBytes = base64FromBytes(message.rawBytes);
-        }
-        return obj;
-    },
-};
-exports.Verifier = {
-    fromJSON(object) {
-        return {
-            verifier: isSet(object.publicKey)
-                ? { $case: "publicKey", publicKey: exports.PublicKey.fromJSON(object.publicKey) }
-                : isSet(object.x509Certificate)
-                    ? { $case: "x509Certificate", x509Certificate: sigstore_common_1.X509Certificate.fromJSON(object.x509Certificate) }
-                    : undefined,
-            keyDetails: isSet(object.keyDetails) ? (0, sigstore_common_1.publicKeyDetailsFromJSON)(object.keyDetails) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.verifier?.$case === "publicKey") {
-            obj.publicKey = exports.PublicKey.toJSON(message.verifier.publicKey);
-        }
-        else if (message.verifier?.$case === "x509Certificate") {
-            obj.x509Certificate = sigstore_common_1.X509Certificate.toJSON(message.verifier.x509Certificate);
-        }
-        if (message.keyDetails !== 0) {
-            obj.keyDetails = (0, sigstore_common_1.publicKeyDetailsToJSON)(message.keyDetails);
-        }
-        return obj;
-    },
-};
-exports.Signature = {
-    fromJSON(object) {
-        return {
-            content: isSet(object.content) ? Buffer.from(bytesFromBase64(object.content)) : Buffer.alloc(0),
-            verifier: isSet(object.verifier) ? exports.Verifier.fromJSON(object.verifier) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.content.length !== 0) {
-            obj.content = base64FromBytes(message.content);
-        }
-        if (message.verifier !== undefined) {
-            obj.verifier = exports.Verifier.toJSON(message.verifier);
-        }
-        return obj;
-    },
-};
-function bytesFromBase64(b64) {
-    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
-}
-function base64FromBytes(arr) {
-    return globalThis.Buffer.from(arr).toString("base64");
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
deleted file mode 100644
index aed636f00e7cf..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_bundle.js
+++ /dev/null
@@ -1,103 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: sigstore_bundle.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Bundle = exports.VerificationMaterial = exports.TimestampVerificationData = void 0;
-/* eslint-disable */
-const envelope_1 = require("./envelope");
-const sigstore_common_1 = require("./sigstore_common");
-const sigstore_rekor_1 = require("./sigstore_rekor");
-exports.TimestampVerificationData = {
-    fromJSON(object) {
-        return {
-            rfc3161Timestamps: globalThis.Array.isArray(object?.rfc3161Timestamps)
-                ? object.rfc3161Timestamps.map((e) => sigstore_common_1.RFC3161SignedTimestamp.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.rfc3161Timestamps?.length) {
-            obj.rfc3161Timestamps = message.rfc3161Timestamps.map((e) => sigstore_common_1.RFC3161SignedTimestamp.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.VerificationMaterial = {
-    fromJSON(object) {
-        return {
-            content: isSet(object.publicKey)
-                ? { $case: "publicKey", publicKey: sigstore_common_1.PublicKeyIdentifier.fromJSON(object.publicKey) }
-                : isSet(object.x509CertificateChain)
-                    ? {
-                        $case: "x509CertificateChain",
-                        x509CertificateChain: sigstore_common_1.X509CertificateChain.fromJSON(object.x509CertificateChain),
-                    }
-                    : isSet(object.certificate)
-                        ? { $case: "certificate", certificate: sigstore_common_1.X509Certificate.fromJSON(object.certificate) }
-                        : undefined,
-            tlogEntries: globalThis.Array.isArray(object?.tlogEntries)
-                ? object.tlogEntries.map((e) => sigstore_rekor_1.TransparencyLogEntry.fromJSON(e))
-                : [],
-            timestampVerificationData: isSet(object.timestampVerificationData)
-                ? exports.TimestampVerificationData.fromJSON(object.timestampVerificationData)
-                : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.content?.$case === "publicKey") {
-            obj.publicKey = sigstore_common_1.PublicKeyIdentifier.toJSON(message.content.publicKey);
-        }
-        else if (message.content?.$case === "x509CertificateChain") {
-            obj.x509CertificateChain = sigstore_common_1.X509CertificateChain.toJSON(message.content.x509CertificateChain);
-        }
-        else if (message.content?.$case === "certificate") {
-            obj.certificate = sigstore_common_1.X509Certificate.toJSON(message.content.certificate);
-        }
-        if (message.tlogEntries?.length) {
-            obj.tlogEntries = message.tlogEntries.map((e) => sigstore_rekor_1.TransparencyLogEntry.toJSON(e));
-        }
-        if (message.timestampVerificationData !== undefined) {
-            obj.timestampVerificationData = exports.TimestampVerificationData.toJSON(message.timestampVerificationData);
-        }
-        return obj;
-    },
-};
-exports.Bundle = {
-    fromJSON(object) {
-        return {
-            mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "",
-            verificationMaterial: isSet(object.verificationMaterial)
-                ? exports.VerificationMaterial.fromJSON(object.verificationMaterial)
-                : undefined,
-            content: isSet(object.messageSignature)
-                ? { $case: "messageSignature", messageSignature: sigstore_common_1.MessageSignature.fromJSON(object.messageSignature) }
-                : isSet(object.dsseEnvelope)
-                    ? { $case: "dsseEnvelope", dsseEnvelope: envelope_1.Envelope.fromJSON(object.dsseEnvelope) }
-                    : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.mediaType !== "") {
-            obj.mediaType = message.mediaType;
-        }
-        if (message.verificationMaterial !== undefined) {
-            obj.verificationMaterial = exports.VerificationMaterial.toJSON(message.verificationMaterial);
-        }
-        if (message.content?.$case === "messageSignature") {
-            obj.messageSignature = sigstore_common_1.MessageSignature.toJSON(message.content.messageSignature);
-        }
-        else if (message.content?.$case === "dsseEnvelope") {
-            obj.dsseEnvelope = envelope_1.Envelope.toJSON(message.content.dsseEnvelope);
-        }
-        return obj;
-    },
-};
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
deleted file mode 100644
index b900516ed3b55..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_common.js
+++ /dev/null
@@ -1,596 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: sigstore_common.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TimeRange = exports.X509CertificateChain = exports.SubjectAlternativeName = exports.X509Certificate = exports.DistinguishedName = exports.ObjectIdentifierValuePair = exports.ObjectIdentifier = exports.PublicKeyIdentifier = exports.PublicKey = exports.RFC3161SignedTimestamp = exports.LogId = exports.MessageSignature = exports.HashOutput = exports.SubjectAlternativeNameType = exports.PublicKeyDetails = exports.HashAlgorithm = void 0;
-exports.hashAlgorithmFromJSON = hashAlgorithmFromJSON;
-exports.hashAlgorithmToJSON = hashAlgorithmToJSON;
-exports.publicKeyDetailsFromJSON = publicKeyDetailsFromJSON;
-exports.publicKeyDetailsToJSON = publicKeyDetailsToJSON;
-exports.subjectAlternativeNameTypeFromJSON = subjectAlternativeNameTypeFromJSON;
-exports.subjectAlternativeNameTypeToJSON = subjectAlternativeNameTypeToJSON;
-/* eslint-disable */
-const timestamp_1 = require("./google/protobuf/timestamp");
-/**
- * Only a subset of the secure hash standard algorithms are supported.
- * See  for more
- * details.
- * UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force
- * any proto JSON serialization to emit the used hash algorithm, as default
- * option is to *omit* the default value of an enum (which is the first
- * value, represented by '0'.
- */
-var HashAlgorithm;
-(function (HashAlgorithm) {
-    HashAlgorithm[HashAlgorithm["HASH_ALGORITHM_UNSPECIFIED"] = 0] = "HASH_ALGORITHM_UNSPECIFIED";
-    HashAlgorithm[HashAlgorithm["SHA2_256"] = 1] = "SHA2_256";
-    HashAlgorithm[HashAlgorithm["SHA2_384"] = 2] = "SHA2_384";
-    HashAlgorithm[HashAlgorithm["SHA2_512"] = 3] = "SHA2_512";
-    HashAlgorithm[HashAlgorithm["SHA3_256"] = 4] = "SHA3_256";
-    HashAlgorithm[HashAlgorithm["SHA3_384"] = 5] = "SHA3_384";
-})(HashAlgorithm || (exports.HashAlgorithm = HashAlgorithm = {}));
-function hashAlgorithmFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "HASH_ALGORITHM_UNSPECIFIED":
-            return HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED;
-        case 1:
-        case "SHA2_256":
-            return HashAlgorithm.SHA2_256;
-        case 2:
-        case "SHA2_384":
-            return HashAlgorithm.SHA2_384;
-        case 3:
-        case "SHA2_512":
-            return HashAlgorithm.SHA2_512;
-        case 4:
-        case "SHA3_256":
-            return HashAlgorithm.SHA3_256;
-        case 5:
-        case "SHA3_384":
-            return HashAlgorithm.SHA3_384;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
-    }
-}
-function hashAlgorithmToJSON(object) {
-    switch (object) {
-        case HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED:
-            return "HASH_ALGORITHM_UNSPECIFIED";
-        case HashAlgorithm.SHA2_256:
-            return "SHA2_256";
-        case HashAlgorithm.SHA2_384:
-            return "SHA2_384";
-        case HashAlgorithm.SHA2_512:
-            return "SHA2_512";
-        case HashAlgorithm.SHA3_256:
-            return "SHA3_256";
-        case HashAlgorithm.SHA3_384:
-            return "SHA3_384";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm");
-    }
-}
-/**
- * Details of a specific public key, capturing the the key encoding method,
- * and signature algorithm.
- *
- * PublicKeyDetails captures the public key/hash algorithm combinations
- * recommended in the Sigstore ecosystem.
- *
- * This is modelled as a linear set as we want to provide a small number of
- * opinionated options instead of allowing every possible permutation.
- *
- * Any changes to this enum MUST be reflected in the algorithm registry.
- *
- * See: 
- *
- * To avoid the possibility of contradicting formats such as PKCS1 with
- * ED25519 the valid permutations are listed as a linear set instead of a
- * cartesian set (i.e one combined variable instead of two, one for encoding
- * and one for the signature algorithm).
- */
-var PublicKeyDetails;
-(function (PublicKeyDetails) {
-    PublicKeyDetails[PublicKeyDetails["PUBLIC_KEY_DETAILS_UNSPECIFIED"] = 0] = "PUBLIC_KEY_DETAILS_UNSPECIFIED";
-    /**
-     * PKCS1_RSA_PKCS1V5 - RSA
-     *
-     * @deprecated
-     */
-    PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PKCS1V5"] = 1] = "PKCS1_RSA_PKCS1V5";
-    /**
-     * PKCS1_RSA_PSS - See RFC8017
-     *
-     * @deprecated
-     */
-    PublicKeyDetails[PublicKeyDetails["PKCS1_RSA_PSS"] = 2] = "PKCS1_RSA_PSS";
-    /** @deprecated */
-    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V5"] = 3] = "PKIX_RSA_PKCS1V5";
-    /** @deprecated */
-    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS"] = 4] = "PKIX_RSA_PSS";
-    /** PKIX_RSA_PKCS1V15_2048_SHA256 - RSA public key in PKIX format, PKCS#1v1.5 signature */
-    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V15_2048_SHA256"] = 9] = "PKIX_RSA_PKCS1V15_2048_SHA256";
-    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V15_3072_SHA256"] = 10] = "PKIX_RSA_PKCS1V15_3072_SHA256";
-    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PKCS1V15_4096_SHA256"] = 11] = "PKIX_RSA_PKCS1V15_4096_SHA256";
-    /** PKIX_RSA_PSS_2048_SHA256 - RSA public key in PKIX format, RSASSA-PSS signature */
-    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS_2048_SHA256"] = 16] = "PKIX_RSA_PSS_2048_SHA256";
-    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS_3072_SHA256"] = 17] = "PKIX_RSA_PSS_3072_SHA256";
-    PublicKeyDetails[PublicKeyDetails["PKIX_RSA_PSS_4096_SHA256"] = 18] = "PKIX_RSA_PSS_4096_SHA256";
-    /**
-     * PKIX_ECDSA_P256_HMAC_SHA_256 - ECDSA
-     *
-     * @deprecated
-     */
-    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_HMAC_SHA_256"] = 6] = "PKIX_ECDSA_P256_HMAC_SHA_256";
-    /** PKIX_ECDSA_P256_SHA_256 - See NIST FIPS 186-4 */
-    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P256_SHA_256"] = 5] = "PKIX_ECDSA_P256_SHA_256";
-    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P384_SHA_384"] = 12] = "PKIX_ECDSA_P384_SHA_384";
-    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P521_SHA_512"] = 13] = "PKIX_ECDSA_P521_SHA_512";
-    /** PKIX_ED25519 - Ed 25519 */
-    PublicKeyDetails[PublicKeyDetails["PKIX_ED25519"] = 7] = "PKIX_ED25519";
-    PublicKeyDetails[PublicKeyDetails["PKIX_ED25519_PH"] = 8] = "PKIX_ED25519_PH";
-    /**
-     * PKIX_ECDSA_P384_SHA_256 - These algorithms are deprecated and should not be used, but they
-     * were/are being used by most Sigstore clients implementations.
-     *
-     * @deprecated
-     */
-    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P384_SHA_256"] = 19] = "PKIX_ECDSA_P384_SHA_256";
-    /** @deprecated */
-    PublicKeyDetails[PublicKeyDetails["PKIX_ECDSA_P521_SHA_256"] = 20] = "PKIX_ECDSA_P521_SHA_256";
-    /**
-     * LMS_SHA256 - LMS and LM-OTS
-     *
-     * These algorithms are deprecated and should not be used.
-     * Keys and signatures MAY be used by private Sigstore
-     * deployments, but will not be supported by the public
-     * good instance.
-     *
-     * USER WARNING: LMS and LM-OTS are both stateful signature schemes.
-     * Using them correctly requires discretion and careful consideration
-     * to ensure that individual secret keys are not used more than once.
-     * In addition, LM-OTS is a single-use scheme, meaning that it
-     * MUST NOT be used for more than one signature per LM-OTS key.
-     * If you cannot maintain these invariants, you MUST NOT use these
-     * schemes.
-     *
-     * @deprecated
-     */
-    PublicKeyDetails[PublicKeyDetails["LMS_SHA256"] = 14] = "LMS_SHA256";
-    /** @deprecated */
-    PublicKeyDetails[PublicKeyDetails["LMOTS_SHA256"] = 15] = "LMOTS_SHA256";
-    /**
-     * ML_DSA_65 - ML-DSA
-     *
-     * These ML_DSA_65 and ML-DSA_87 algorithms are the pure variants that
-     * take data to sign rather than the prehash variants (HashML-DSA), which
-     * take digests.  While considered quantum-resistant, their usage
-     * involves tradeoffs in that signatures and keys are much larger, and
-     * this makes deployments more costly.
-     *
-     * USER WARNING: ML_DSA_65 and ML_DSA_87 are experimental algorithms.
-     * In the future they MAY be used by private Sigstore deployments, but
-     * they are not yet fully functional.  This warning will be removed when
-     * these algorithms are widely supported by Sigstore clients and servers,
-     * but care should still be taken for production environments.
-     */
-    PublicKeyDetails[PublicKeyDetails["ML_DSA_65"] = 21] = "ML_DSA_65";
-    PublicKeyDetails[PublicKeyDetails["ML_DSA_87"] = 22] = "ML_DSA_87";
-})(PublicKeyDetails || (exports.PublicKeyDetails = PublicKeyDetails = {}));
-function publicKeyDetailsFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "PUBLIC_KEY_DETAILS_UNSPECIFIED":
-            return PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED;
-        case 1:
-        case "PKCS1_RSA_PKCS1V5":
-            return PublicKeyDetails.PKCS1_RSA_PKCS1V5;
-        case 2:
-        case "PKCS1_RSA_PSS":
-            return PublicKeyDetails.PKCS1_RSA_PSS;
-        case 3:
-        case "PKIX_RSA_PKCS1V5":
-            return PublicKeyDetails.PKIX_RSA_PKCS1V5;
-        case 4:
-        case "PKIX_RSA_PSS":
-            return PublicKeyDetails.PKIX_RSA_PSS;
-        case 9:
-        case "PKIX_RSA_PKCS1V15_2048_SHA256":
-            return PublicKeyDetails.PKIX_RSA_PKCS1V15_2048_SHA256;
-        case 10:
-        case "PKIX_RSA_PKCS1V15_3072_SHA256":
-            return PublicKeyDetails.PKIX_RSA_PKCS1V15_3072_SHA256;
-        case 11:
-        case "PKIX_RSA_PKCS1V15_4096_SHA256":
-            return PublicKeyDetails.PKIX_RSA_PKCS1V15_4096_SHA256;
-        case 16:
-        case "PKIX_RSA_PSS_2048_SHA256":
-            return PublicKeyDetails.PKIX_RSA_PSS_2048_SHA256;
-        case 17:
-        case "PKIX_RSA_PSS_3072_SHA256":
-            return PublicKeyDetails.PKIX_RSA_PSS_3072_SHA256;
-        case 18:
-        case "PKIX_RSA_PSS_4096_SHA256":
-            return PublicKeyDetails.PKIX_RSA_PSS_4096_SHA256;
-        case 6:
-        case "PKIX_ECDSA_P256_HMAC_SHA_256":
-            return PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256;
-        case 5:
-        case "PKIX_ECDSA_P256_SHA_256":
-            return PublicKeyDetails.PKIX_ECDSA_P256_SHA_256;
-        case 12:
-        case "PKIX_ECDSA_P384_SHA_384":
-            return PublicKeyDetails.PKIX_ECDSA_P384_SHA_384;
-        case 13:
-        case "PKIX_ECDSA_P521_SHA_512":
-            return PublicKeyDetails.PKIX_ECDSA_P521_SHA_512;
-        case 7:
-        case "PKIX_ED25519":
-            return PublicKeyDetails.PKIX_ED25519;
-        case 8:
-        case "PKIX_ED25519_PH":
-            return PublicKeyDetails.PKIX_ED25519_PH;
-        case 19:
-        case "PKIX_ECDSA_P384_SHA_256":
-            return PublicKeyDetails.PKIX_ECDSA_P384_SHA_256;
-        case 20:
-        case "PKIX_ECDSA_P521_SHA_256":
-            return PublicKeyDetails.PKIX_ECDSA_P521_SHA_256;
-        case 14:
-        case "LMS_SHA256":
-            return PublicKeyDetails.LMS_SHA256;
-        case 15:
-        case "LMOTS_SHA256":
-            return PublicKeyDetails.LMOTS_SHA256;
-        case 21:
-        case "ML_DSA_65":
-            return PublicKeyDetails.ML_DSA_65;
-        case 22:
-        case "ML_DSA_87":
-            return PublicKeyDetails.ML_DSA_87;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
-    }
-}
-function publicKeyDetailsToJSON(object) {
-    switch (object) {
-        case PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED:
-            return "PUBLIC_KEY_DETAILS_UNSPECIFIED";
-        case PublicKeyDetails.PKCS1_RSA_PKCS1V5:
-            return "PKCS1_RSA_PKCS1V5";
-        case PublicKeyDetails.PKCS1_RSA_PSS:
-            return "PKCS1_RSA_PSS";
-        case PublicKeyDetails.PKIX_RSA_PKCS1V5:
-            return "PKIX_RSA_PKCS1V5";
-        case PublicKeyDetails.PKIX_RSA_PSS:
-            return "PKIX_RSA_PSS";
-        case PublicKeyDetails.PKIX_RSA_PKCS1V15_2048_SHA256:
-            return "PKIX_RSA_PKCS1V15_2048_SHA256";
-        case PublicKeyDetails.PKIX_RSA_PKCS1V15_3072_SHA256:
-            return "PKIX_RSA_PKCS1V15_3072_SHA256";
-        case PublicKeyDetails.PKIX_RSA_PKCS1V15_4096_SHA256:
-            return "PKIX_RSA_PKCS1V15_4096_SHA256";
-        case PublicKeyDetails.PKIX_RSA_PSS_2048_SHA256:
-            return "PKIX_RSA_PSS_2048_SHA256";
-        case PublicKeyDetails.PKIX_RSA_PSS_3072_SHA256:
-            return "PKIX_RSA_PSS_3072_SHA256";
-        case PublicKeyDetails.PKIX_RSA_PSS_4096_SHA256:
-            return "PKIX_RSA_PSS_4096_SHA256";
-        case PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256:
-            return "PKIX_ECDSA_P256_HMAC_SHA_256";
-        case PublicKeyDetails.PKIX_ECDSA_P256_SHA_256:
-            return "PKIX_ECDSA_P256_SHA_256";
-        case PublicKeyDetails.PKIX_ECDSA_P384_SHA_384:
-            return "PKIX_ECDSA_P384_SHA_384";
-        case PublicKeyDetails.PKIX_ECDSA_P521_SHA_512:
-            return "PKIX_ECDSA_P521_SHA_512";
-        case PublicKeyDetails.PKIX_ED25519:
-            return "PKIX_ED25519";
-        case PublicKeyDetails.PKIX_ED25519_PH:
-            return "PKIX_ED25519_PH";
-        case PublicKeyDetails.PKIX_ECDSA_P384_SHA_256:
-            return "PKIX_ECDSA_P384_SHA_256";
-        case PublicKeyDetails.PKIX_ECDSA_P521_SHA_256:
-            return "PKIX_ECDSA_P521_SHA_256";
-        case PublicKeyDetails.LMS_SHA256:
-            return "LMS_SHA256";
-        case PublicKeyDetails.LMOTS_SHA256:
-            return "LMOTS_SHA256";
-        case PublicKeyDetails.ML_DSA_65:
-            return "ML_DSA_65";
-        case PublicKeyDetails.ML_DSA_87:
-            return "ML_DSA_87";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails");
-    }
-}
-var SubjectAlternativeNameType;
-(function (SubjectAlternativeNameType) {
-    SubjectAlternativeNameType[SubjectAlternativeNameType["SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED"] = 0] = "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
-    SubjectAlternativeNameType[SubjectAlternativeNameType["EMAIL"] = 1] = "EMAIL";
-    SubjectAlternativeNameType[SubjectAlternativeNameType["URI"] = 2] = "URI";
-    /**
-     * OTHER_NAME - OID 1.3.6.1.4.1.57264.1.7
-     * See https://github.com/sigstore/fulcio/blob/main/docs/oid-info.md#1361415726417--othername-san
-     * for more details.
-     */
-    SubjectAlternativeNameType[SubjectAlternativeNameType["OTHER_NAME"] = 3] = "OTHER_NAME";
-})(SubjectAlternativeNameType || (exports.SubjectAlternativeNameType = SubjectAlternativeNameType = {}));
-function subjectAlternativeNameTypeFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED":
-            return SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED;
-        case 1:
-        case "EMAIL":
-            return SubjectAlternativeNameType.EMAIL;
-        case 2:
-        case "URI":
-            return SubjectAlternativeNameType.URI;
-        case 3:
-        case "OTHER_NAME":
-            return SubjectAlternativeNameType.OTHER_NAME;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
-    }
-}
-function subjectAlternativeNameTypeToJSON(object) {
-    switch (object) {
-        case SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED:
-            return "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED";
-        case SubjectAlternativeNameType.EMAIL:
-            return "EMAIL";
-        case SubjectAlternativeNameType.URI:
-            return "URI";
-        case SubjectAlternativeNameType.OTHER_NAME:
-            return "OTHER_NAME";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType");
-    }
-}
-exports.HashOutput = {
-    fromJSON(object) {
-        return {
-            algorithm: isSet(object.algorithm) ? hashAlgorithmFromJSON(object.algorithm) : 0,
-            digest: isSet(object.digest) ? Buffer.from(bytesFromBase64(object.digest)) : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.algorithm !== 0) {
-            obj.algorithm = hashAlgorithmToJSON(message.algorithm);
-        }
-        if (message.digest.length !== 0) {
-            obj.digest = base64FromBytes(message.digest);
-        }
-        return obj;
-    },
-};
-exports.MessageSignature = {
-    fromJSON(object) {
-        return {
-            messageDigest: isSet(object.messageDigest) ? exports.HashOutput.fromJSON(object.messageDigest) : undefined,
-            signature: isSet(object.signature) ? Buffer.from(bytesFromBase64(object.signature)) : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.messageDigest !== undefined) {
-            obj.messageDigest = exports.HashOutput.toJSON(message.messageDigest);
-        }
-        if (message.signature.length !== 0) {
-            obj.signature = base64FromBytes(message.signature);
-        }
-        return obj;
-    },
-};
-exports.LogId = {
-    fromJSON(object) {
-        return { keyId: isSet(object.keyId) ? Buffer.from(bytesFromBase64(object.keyId)) : Buffer.alloc(0) };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.keyId.length !== 0) {
-            obj.keyId = base64FromBytes(message.keyId);
-        }
-        return obj;
-    },
-};
-exports.RFC3161SignedTimestamp = {
-    fromJSON(object) {
-        return {
-            signedTimestamp: isSet(object.signedTimestamp)
-                ? Buffer.from(bytesFromBase64(object.signedTimestamp))
-                : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.signedTimestamp.length !== 0) {
-            obj.signedTimestamp = base64FromBytes(message.signedTimestamp);
-        }
-        return obj;
-    },
-};
-exports.PublicKey = {
-    fromJSON(object) {
-        return {
-            rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : undefined,
-            keyDetails: isSet(object.keyDetails) ? publicKeyDetailsFromJSON(object.keyDetails) : 0,
-            validFor: isSet(object.validFor) ? exports.TimeRange.fromJSON(object.validFor) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.rawBytes !== undefined) {
-            obj.rawBytes = base64FromBytes(message.rawBytes);
-        }
-        if (message.keyDetails !== 0) {
-            obj.keyDetails = publicKeyDetailsToJSON(message.keyDetails);
-        }
-        if (message.validFor !== undefined) {
-            obj.validFor = exports.TimeRange.toJSON(message.validFor);
-        }
-        return obj;
-    },
-};
-exports.PublicKeyIdentifier = {
-    fromJSON(object) {
-        return { hint: isSet(object.hint) ? globalThis.String(object.hint) : "" };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.hint !== "") {
-            obj.hint = message.hint;
-        }
-        return obj;
-    },
-};
-exports.ObjectIdentifier = {
-    fromJSON(object) {
-        return { id: globalThis.Array.isArray(object?.id) ? object.id.map((e) => globalThis.Number(e)) : [] };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.id?.length) {
-            obj.id = message.id.map((e) => Math.round(e));
-        }
-        return obj;
-    },
-};
-exports.ObjectIdentifierValuePair = {
-    fromJSON(object) {
-        return {
-            oid: isSet(object.oid) ? exports.ObjectIdentifier.fromJSON(object.oid) : undefined,
-            value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.oid !== undefined) {
-            obj.oid = exports.ObjectIdentifier.toJSON(message.oid);
-        }
-        if (message.value.length !== 0) {
-            obj.value = base64FromBytes(message.value);
-        }
-        return obj;
-    },
-};
-exports.DistinguishedName = {
-    fromJSON(object) {
-        return {
-            organization: isSet(object.organization) ? globalThis.String(object.organization) : "",
-            commonName: isSet(object.commonName) ? globalThis.String(object.commonName) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.organization !== "") {
-            obj.organization = message.organization;
-        }
-        if (message.commonName !== "") {
-            obj.commonName = message.commonName;
-        }
-        return obj;
-    },
-};
-exports.X509Certificate = {
-    fromJSON(object) {
-        return { rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : Buffer.alloc(0) };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.rawBytes.length !== 0) {
-            obj.rawBytes = base64FromBytes(message.rawBytes);
-        }
-        return obj;
-    },
-};
-exports.SubjectAlternativeName = {
-    fromJSON(object) {
-        return {
-            type: isSet(object.type) ? subjectAlternativeNameTypeFromJSON(object.type) : 0,
-            identity: isSet(object.regexp)
-                ? { $case: "regexp", regexp: globalThis.String(object.regexp) }
-                : isSet(object.value)
-                    ? { $case: "value", value: globalThis.String(object.value) }
-                    : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.type !== 0) {
-            obj.type = subjectAlternativeNameTypeToJSON(message.type);
-        }
-        if (message.identity?.$case === "regexp") {
-            obj.regexp = message.identity.regexp;
-        }
-        else if (message.identity?.$case === "value") {
-            obj.value = message.identity.value;
-        }
-        return obj;
-    },
-};
-exports.X509CertificateChain = {
-    fromJSON(object) {
-        return {
-            certificates: globalThis.Array.isArray(object?.certificates)
-                ? object.certificates.map((e) => exports.X509Certificate.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.certificates?.length) {
-            obj.certificates = message.certificates.map((e) => exports.X509Certificate.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.TimeRange = {
-    fromJSON(object) {
-        return {
-            start: isSet(object.start) ? fromJsonTimestamp(object.start) : undefined,
-            end: isSet(object.end) ? fromJsonTimestamp(object.end) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.start !== undefined) {
-            obj.start = message.start.toISOString();
-        }
-        if (message.end !== undefined) {
-            obj.end = message.end.toISOString();
-        }
-        return obj;
-    },
-};
-function bytesFromBase64(b64) {
-    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
-}
-function base64FromBytes(arr) {
-    return globalThis.Buffer.from(arr).toString("base64");
-}
-function fromTimestamp(t) {
-    let millis = (globalThis.Number(t.seconds) || 0) * 1_000;
-    millis += (t.nanos || 0) / 1_000_000;
-    return new globalThis.Date(millis);
-}
-function fromJsonTimestamp(o) {
-    if (o instanceof globalThis.Date) {
-        return o;
-    }
-    else if (typeof o === "string") {
-        return new globalThis.Date(o);
-    }
-    else {
-        return fromTimestamp(timestamp_1.Timestamp.fromJSON(o));
-    }
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
deleted file mode 100644
index fd8ea8384664d..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_rekor.js
+++ /dev/null
@@ -1,137 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: sigstore_rekor.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TransparencyLogEntry = exports.InclusionPromise = exports.InclusionProof = exports.Checkpoint = exports.KindVersion = void 0;
-/* eslint-disable */
-const sigstore_common_1 = require("./sigstore_common");
-exports.KindVersion = {
-    fromJSON(object) {
-        return {
-            kind: isSet(object.kind) ? globalThis.String(object.kind) : "",
-            version: isSet(object.version) ? globalThis.String(object.version) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.kind !== "") {
-            obj.kind = message.kind;
-        }
-        if (message.version !== "") {
-            obj.version = message.version;
-        }
-        return obj;
-    },
-};
-exports.Checkpoint = {
-    fromJSON(object) {
-        return { envelope: isSet(object.envelope) ? globalThis.String(object.envelope) : "" };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.envelope !== "") {
-            obj.envelope = message.envelope;
-        }
-        return obj;
-    },
-};
-exports.InclusionProof = {
-    fromJSON(object) {
-        return {
-            logIndex: isSet(object.logIndex) ? globalThis.String(object.logIndex) : "0",
-            rootHash: isSet(object.rootHash) ? Buffer.from(bytesFromBase64(object.rootHash)) : Buffer.alloc(0),
-            treeSize: isSet(object.treeSize) ? globalThis.String(object.treeSize) : "0",
-            hashes: globalThis.Array.isArray(object?.hashes)
-                ? object.hashes.map((e) => Buffer.from(bytesFromBase64(e)))
-                : [],
-            checkpoint: isSet(object.checkpoint) ? exports.Checkpoint.fromJSON(object.checkpoint) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.logIndex !== "0") {
-            obj.logIndex = message.logIndex;
-        }
-        if (message.rootHash.length !== 0) {
-            obj.rootHash = base64FromBytes(message.rootHash);
-        }
-        if (message.treeSize !== "0") {
-            obj.treeSize = message.treeSize;
-        }
-        if (message.hashes?.length) {
-            obj.hashes = message.hashes.map((e) => base64FromBytes(e));
-        }
-        if (message.checkpoint !== undefined) {
-            obj.checkpoint = exports.Checkpoint.toJSON(message.checkpoint);
-        }
-        return obj;
-    },
-};
-exports.InclusionPromise = {
-    fromJSON(object) {
-        return {
-            signedEntryTimestamp: isSet(object.signedEntryTimestamp)
-                ? Buffer.from(bytesFromBase64(object.signedEntryTimestamp))
-                : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.signedEntryTimestamp.length !== 0) {
-            obj.signedEntryTimestamp = base64FromBytes(message.signedEntryTimestamp);
-        }
-        return obj;
-    },
-};
-exports.TransparencyLogEntry = {
-    fromJSON(object) {
-        return {
-            logIndex: isSet(object.logIndex) ? globalThis.String(object.logIndex) : "0",
-            logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
-            kindVersion: isSet(object.kindVersion) ? exports.KindVersion.fromJSON(object.kindVersion) : undefined,
-            integratedTime: isSet(object.integratedTime) ? globalThis.String(object.integratedTime) : "0",
-            inclusionPromise: isSet(object.inclusionPromise) ? exports.InclusionPromise.fromJSON(object.inclusionPromise) : undefined,
-            inclusionProof: isSet(object.inclusionProof) ? exports.InclusionProof.fromJSON(object.inclusionProof) : undefined,
-            canonicalizedBody: isSet(object.canonicalizedBody)
-                ? Buffer.from(bytesFromBase64(object.canonicalizedBody))
-                : Buffer.alloc(0),
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.logIndex !== "0") {
-            obj.logIndex = message.logIndex;
-        }
-        if (message.logId !== undefined) {
-            obj.logId = sigstore_common_1.LogId.toJSON(message.logId);
-        }
-        if (message.kindVersion !== undefined) {
-            obj.kindVersion = exports.KindVersion.toJSON(message.kindVersion);
-        }
-        if (message.integratedTime !== "0") {
-            obj.integratedTime = message.integratedTime;
-        }
-        if (message.inclusionPromise !== undefined) {
-            obj.inclusionPromise = exports.InclusionPromise.toJSON(message.inclusionPromise);
-        }
-        if (message.inclusionProof !== undefined) {
-            obj.inclusionProof = exports.InclusionProof.toJSON(message.inclusionProof);
-        }
-        if (message.canonicalizedBody.length !== 0) {
-            obj.canonicalizedBody = base64FromBytes(message.canonicalizedBody);
-        }
-        return obj;
-    },
-};
-function bytesFromBase64(b64) {
-    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
-}
-function base64FromBytes(arr) {
-    return globalThis.Buffer.from(arr).toString("base64");
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
deleted file mode 100644
index 1b5492fb1a77e..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_trustroot.js
+++ /dev/null
@@ -1,284 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: sigstore_trustroot.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.ClientTrustConfig = exports.ServiceConfiguration = exports.Service = exports.SigningConfig = exports.TrustedRoot = exports.CertificateAuthority = exports.TransparencyLogInstance = exports.ServiceSelector = void 0;
-exports.serviceSelectorFromJSON = serviceSelectorFromJSON;
-exports.serviceSelectorToJSON = serviceSelectorToJSON;
-/* eslint-disable */
-const sigstore_common_1 = require("./sigstore_common");
-/**
- * ServiceSelector specifies how a client SHOULD select a set of
- * Services to connect to. A client SHOULD throw an error if
- * the value is SERVICE_SELECTOR_UNDEFINED.
- */
-var ServiceSelector;
-(function (ServiceSelector) {
-    ServiceSelector[ServiceSelector["SERVICE_SELECTOR_UNDEFINED"] = 0] = "SERVICE_SELECTOR_UNDEFINED";
-    /**
-     * ALL - Clients SHOULD select all Services based on supported API version
-     * and validity window.
-     */
-    ServiceSelector[ServiceSelector["ALL"] = 1] = "ALL";
-    /**
-     * ANY - Clients SHOULD select one Service based on supported API version
-     * and validity window. It is up to the client implementation to
-     * decide how to select the Service, e.g. random or round-robin.
-     */
-    ServiceSelector[ServiceSelector["ANY"] = 2] = "ANY";
-    /**
-     * EXACT - Clients SHOULD select a specific number of Services based on
-     * supported API version and validity window, using the provided
-     * `count`. It is up to the client implementation to decide how to
-     * select the Service, e.g. random or round-robin.
-     */
-    ServiceSelector[ServiceSelector["EXACT"] = 3] = "EXACT";
-})(ServiceSelector || (exports.ServiceSelector = ServiceSelector = {}));
-function serviceSelectorFromJSON(object) {
-    switch (object) {
-        case 0:
-        case "SERVICE_SELECTOR_UNDEFINED":
-            return ServiceSelector.SERVICE_SELECTOR_UNDEFINED;
-        case 1:
-        case "ALL":
-            return ServiceSelector.ALL;
-        case 2:
-        case "ANY":
-            return ServiceSelector.ANY;
-        case 3:
-        case "EXACT":
-            return ServiceSelector.EXACT;
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum ServiceSelector");
-    }
-}
-function serviceSelectorToJSON(object) {
-    switch (object) {
-        case ServiceSelector.SERVICE_SELECTOR_UNDEFINED:
-            return "SERVICE_SELECTOR_UNDEFINED";
-        case ServiceSelector.ALL:
-            return "ALL";
-        case ServiceSelector.ANY:
-            return "ANY";
-        case ServiceSelector.EXACT:
-            return "EXACT";
-        default:
-            throw new globalThis.Error("Unrecognized enum value " + object + " for enum ServiceSelector");
-    }
-}
-exports.TransparencyLogInstance = {
-    fromJSON(object) {
-        return {
-            baseUrl: isSet(object.baseUrl) ? globalThis.String(object.baseUrl) : "",
-            hashAlgorithm: isSet(object.hashAlgorithm) ? (0, sigstore_common_1.hashAlgorithmFromJSON)(object.hashAlgorithm) : 0,
-            publicKey: isSet(object.publicKey) ? sigstore_common_1.PublicKey.fromJSON(object.publicKey) : undefined,
-            logId: isSet(object.logId) ? sigstore_common_1.LogId.fromJSON(object.logId) : undefined,
-            checkpointKeyId: isSet(object.checkpointKeyId) ? sigstore_common_1.LogId.fromJSON(object.checkpointKeyId) : undefined,
-            operator: isSet(object.operator) ? globalThis.String(object.operator) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.baseUrl !== "") {
-            obj.baseUrl = message.baseUrl;
-        }
-        if (message.hashAlgorithm !== 0) {
-            obj.hashAlgorithm = (0, sigstore_common_1.hashAlgorithmToJSON)(message.hashAlgorithm);
-        }
-        if (message.publicKey !== undefined) {
-            obj.publicKey = sigstore_common_1.PublicKey.toJSON(message.publicKey);
-        }
-        if (message.logId !== undefined) {
-            obj.logId = sigstore_common_1.LogId.toJSON(message.logId);
-        }
-        if (message.checkpointKeyId !== undefined) {
-            obj.checkpointKeyId = sigstore_common_1.LogId.toJSON(message.checkpointKeyId);
-        }
-        if (message.operator !== "") {
-            obj.operator = message.operator;
-        }
-        return obj;
-    },
-};
-exports.CertificateAuthority = {
-    fromJSON(object) {
-        return {
-            subject: isSet(object.subject) ? sigstore_common_1.DistinguishedName.fromJSON(object.subject) : undefined,
-            uri: isSet(object.uri) ? globalThis.String(object.uri) : "",
-            certChain: isSet(object.certChain) ? sigstore_common_1.X509CertificateChain.fromJSON(object.certChain) : undefined,
-            validFor: isSet(object.validFor) ? sigstore_common_1.TimeRange.fromJSON(object.validFor) : undefined,
-            operator: isSet(object.operator) ? globalThis.String(object.operator) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.subject !== undefined) {
-            obj.subject = sigstore_common_1.DistinguishedName.toJSON(message.subject);
-        }
-        if (message.uri !== "") {
-            obj.uri = message.uri;
-        }
-        if (message.certChain !== undefined) {
-            obj.certChain = sigstore_common_1.X509CertificateChain.toJSON(message.certChain);
-        }
-        if (message.validFor !== undefined) {
-            obj.validFor = sigstore_common_1.TimeRange.toJSON(message.validFor);
-        }
-        if (message.operator !== "") {
-            obj.operator = message.operator;
-        }
-        return obj;
-    },
-};
-exports.TrustedRoot = {
-    fromJSON(object) {
-        return {
-            mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "",
-            tlogs: globalThis.Array.isArray(object?.tlogs)
-                ? object.tlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e))
-                : [],
-            certificateAuthorities: globalThis.Array.isArray(object?.certificateAuthorities)
-                ? object.certificateAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
-                : [],
-            ctlogs: globalThis.Array.isArray(object?.ctlogs)
-                ? object.ctlogs.map((e) => exports.TransparencyLogInstance.fromJSON(e))
-                : [],
-            timestampAuthorities: globalThis.Array.isArray(object?.timestampAuthorities)
-                ? object.timestampAuthorities.map((e) => exports.CertificateAuthority.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.mediaType !== "") {
-            obj.mediaType = message.mediaType;
-        }
-        if (message.tlogs?.length) {
-            obj.tlogs = message.tlogs.map((e) => exports.TransparencyLogInstance.toJSON(e));
-        }
-        if (message.certificateAuthorities?.length) {
-            obj.certificateAuthorities = message.certificateAuthorities.map((e) => exports.CertificateAuthority.toJSON(e));
-        }
-        if (message.ctlogs?.length) {
-            obj.ctlogs = message.ctlogs.map((e) => exports.TransparencyLogInstance.toJSON(e));
-        }
-        if (message.timestampAuthorities?.length) {
-            obj.timestampAuthorities = message.timestampAuthorities.map((e) => exports.CertificateAuthority.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.SigningConfig = {
-    fromJSON(object) {
-        return {
-            mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "",
-            caUrls: globalThis.Array.isArray(object?.caUrls) ? object.caUrls.map((e) => exports.Service.fromJSON(e)) : [],
-            oidcUrls: globalThis.Array.isArray(object?.oidcUrls) ? object.oidcUrls.map((e) => exports.Service.fromJSON(e)) : [],
-            rekorTlogUrls: globalThis.Array.isArray(object?.rekorTlogUrls)
-                ? object.rekorTlogUrls.map((e) => exports.Service.fromJSON(e))
-                : [],
-            rekorTlogConfig: isSet(object.rekorTlogConfig)
-                ? exports.ServiceConfiguration.fromJSON(object.rekorTlogConfig)
-                : undefined,
-            tsaUrls: globalThis.Array.isArray(object?.tsaUrls) ? object.tsaUrls.map((e) => exports.Service.fromJSON(e)) : [],
-            tsaConfig: isSet(object.tsaConfig) ? exports.ServiceConfiguration.fromJSON(object.tsaConfig) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.mediaType !== "") {
-            obj.mediaType = message.mediaType;
-        }
-        if (message.caUrls?.length) {
-            obj.caUrls = message.caUrls.map((e) => exports.Service.toJSON(e));
-        }
-        if (message.oidcUrls?.length) {
-            obj.oidcUrls = message.oidcUrls.map((e) => exports.Service.toJSON(e));
-        }
-        if (message.rekorTlogUrls?.length) {
-            obj.rekorTlogUrls = message.rekorTlogUrls.map((e) => exports.Service.toJSON(e));
-        }
-        if (message.rekorTlogConfig !== undefined) {
-            obj.rekorTlogConfig = exports.ServiceConfiguration.toJSON(message.rekorTlogConfig);
-        }
-        if (message.tsaUrls?.length) {
-            obj.tsaUrls = message.tsaUrls.map((e) => exports.Service.toJSON(e));
-        }
-        if (message.tsaConfig !== undefined) {
-            obj.tsaConfig = exports.ServiceConfiguration.toJSON(message.tsaConfig);
-        }
-        return obj;
-    },
-};
-exports.Service = {
-    fromJSON(object) {
-        return {
-            url: isSet(object.url) ? globalThis.String(object.url) : "",
-            majorApiVersion: isSet(object.majorApiVersion) ? globalThis.Number(object.majorApiVersion) : 0,
-            validFor: isSet(object.validFor) ? sigstore_common_1.TimeRange.fromJSON(object.validFor) : undefined,
-            operator: isSet(object.operator) ? globalThis.String(object.operator) : "",
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.url !== "") {
-            obj.url = message.url;
-        }
-        if (message.majorApiVersion !== 0) {
-            obj.majorApiVersion = Math.round(message.majorApiVersion);
-        }
-        if (message.validFor !== undefined) {
-            obj.validFor = sigstore_common_1.TimeRange.toJSON(message.validFor);
-        }
-        if (message.operator !== "") {
-            obj.operator = message.operator;
-        }
-        return obj;
-    },
-};
-exports.ServiceConfiguration = {
-    fromJSON(object) {
-        return {
-            selector: isSet(object.selector) ? serviceSelectorFromJSON(object.selector) : 0,
-            count: isSet(object.count) ? globalThis.Number(object.count) : 0,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.selector !== 0) {
-            obj.selector = serviceSelectorToJSON(message.selector);
-        }
-        if (message.count !== 0) {
-            obj.count = Math.round(message.count);
-        }
-        return obj;
-    },
-};
-exports.ClientTrustConfig = {
-    fromJSON(object) {
-        return {
-            mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "",
-            trustedRoot: isSet(object.trustedRoot) ? exports.TrustedRoot.fromJSON(object.trustedRoot) : undefined,
-            signingConfig: isSet(object.signingConfig) ? exports.SigningConfig.fromJSON(object.signingConfig) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.mediaType !== "") {
-            obj.mediaType = message.mediaType;
-        }
-        if (message.trustedRoot !== undefined) {
-            obj.trustedRoot = exports.TrustedRoot.toJSON(message.trustedRoot);
-        }
-        if (message.signingConfig !== undefined) {
-            obj.signingConfig = exports.SigningConfig.toJSON(message.signingConfig);
-        }
-        return obj;
-    },
-};
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
deleted file mode 100644
index 876fe9cc1db1d..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/__generated__/sigstore_verification.js
+++ /dev/null
@@ -1,281 +0,0 @@
-"use strict";
-// Code generated by protoc-gen-ts_proto. DO NOT EDIT.
-// versions:
-//   protoc-gen-ts_proto  v2.7.5
-//   protoc               v6.30.2
-// source: sigstore_verification.proto
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Input = exports.Artifact = exports.ArtifactVerificationOptions_ObserverTimestampOptions = exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions = exports.ArtifactVerificationOptions_TimestampAuthorityOptions = exports.ArtifactVerificationOptions_CtlogOptions = exports.ArtifactVerificationOptions_TlogOptions = exports.ArtifactVerificationOptions = exports.PublicKeyIdentities = exports.CertificateIdentities = exports.CertificateIdentity = void 0;
-/* eslint-disable */
-const sigstore_bundle_1 = require("./sigstore_bundle");
-const sigstore_common_1 = require("./sigstore_common");
-const sigstore_trustroot_1 = require("./sigstore_trustroot");
-exports.CertificateIdentity = {
-    fromJSON(object) {
-        return {
-            issuer: isSet(object.issuer) ? globalThis.String(object.issuer) : "",
-            san: isSet(object.san) ? sigstore_common_1.SubjectAlternativeName.fromJSON(object.san) : undefined,
-            oids: globalThis.Array.isArray(object?.oids)
-                ? object.oids.map((e) => sigstore_common_1.ObjectIdentifierValuePair.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.issuer !== "") {
-            obj.issuer = message.issuer;
-        }
-        if (message.san !== undefined) {
-            obj.san = sigstore_common_1.SubjectAlternativeName.toJSON(message.san);
-        }
-        if (message.oids?.length) {
-            obj.oids = message.oids.map((e) => sigstore_common_1.ObjectIdentifierValuePair.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.CertificateIdentities = {
-    fromJSON(object) {
-        return {
-            identities: globalThis.Array.isArray(object?.identities)
-                ? object.identities.map((e) => exports.CertificateIdentity.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.identities?.length) {
-            obj.identities = message.identities.map((e) => exports.CertificateIdentity.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.PublicKeyIdentities = {
-    fromJSON(object) {
-        return {
-            publicKeys: globalThis.Array.isArray(object?.publicKeys)
-                ? object.publicKeys.map((e) => sigstore_common_1.PublicKey.fromJSON(e))
-                : [],
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.publicKeys?.length) {
-            obj.publicKeys = message.publicKeys.map((e) => sigstore_common_1.PublicKey.toJSON(e));
-        }
-        return obj;
-    },
-};
-exports.ArtifactVerificationOptions = {
-    fromJSON(object) {
-        return {
-            signers: isSet(object.certificateIdentities)
-                ? {
-                    $case: "certificateIdentities",
-                    certificateIdentities: exports.CertificateIdentities.fromJSON(object.certificateIdentities),
-                }
-                : isSet(object.publicKeys)
-                    ? { $case: "publicKeys", publicKeys: exports.PublicKeyIdentities.fromJSON(object.publicKeys) }
-                    : undefined,
-            tlogOptions: isSet(object.tlogOptions)
-                ? exports.ArtifactVerificationOptions_TlogOptions.fromJSON(object.tlogOptions)
-                : undefined,
-            ctlogOptions: isSet(object.ctlogOptions)
-                ? exports.ArtifactVerificationOptions_CtlogOptions.fromJSON(object.ctlogOptions)
-                : undefined,
-            tsaOptions: isSet(object.tsaOptions)
-                ? exports.ArtifactVerificationOptions_TimestampAuthorityOptions.fromJSON(object.tsaOptions)
-                : undefined,
-            integratedTsOptions: isSet(object.integratedTsOptions)
-                ? exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions.fromJSON(object.integratedTsOptions)
-                : undefined,
-            observerOptions: isSet(object.observerOptions)
-                ? exports.ArtifactVerificationOptions_ObserverTimestampOptions.fromJSON(object.observerOptions)
-                : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.signers?.$case === "certificateIdentities") {
-            obj.certificateIdentities = exports.CertificateIdentities.toJSON(message.signers.certificateIdentities);
-        }
-        else if (message.signers?.$case === "publicKeys") {
-            obj.publicKeys = exports.PublicKeyIdentities.toJSON(message.signers.publicKeys);
-        }
-        if (message.tlogOptions !== undefined) {
-            obj.tlogOptions = exports.ArtifactVerificationOptions_TlogOptions.toJSON(message.tlogOptions);
-        }
-        if (message.ctlogOptions !== undefined) {
-            obj.ctlogOptions = exports.ArtifactVerificationOptions_CtlogOptions.toJSON(message.ctlogOptions);
-        }
-        if (message.tsaOptions !== undefined) {
-            obj.tsaOptions = exports.ArtifactVerificationOptions_TimestampAuthorityOptions.toJSON(message.tsaOptions);
-        }
-        if (message.integratedTsOptions !== undefined) {
-            obj.integratedTsOptions = exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions.toJSON(message.integratedTsOptions);
-        }
-        if (message.observerOptions !== undefined) {
-            obj.observerOptions = exports.ArtifactVerificationOptions_ObserverTimestampOptions.toJSON(message.observerOptions);
-        }
-        return obj;
-    },
-};
-exports.ArtifactVerificationOptions_TlogOptions = {
-    fromJSON(object) {
-        return {
-            threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
-            performOnlineVerification: isSet(object.performOnlineVerification)
-                ? globalThis.Boolean(object.performOnlineVerification)
-                : false,
-            disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.threshold !== 0) {
-            obj.threshold = Math.round(message.threshold);
-        }
-        if (message.performOnlineVerification !== false) {
-            obj.performOnlineVerification = message.performOnlineVerification;
-        }
-        if (message.disable !== false) {
-            obj.disable = message.disable;
-        }
-        return obj;
-    },
-};
-exports.ArtifactVerificationOptions_CtlogOptions = {
-    fromJSON(object) {
-        return {
-            threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
-            disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.threshold !== 0) {
-            obj.threshold = Math.round(message.threshold);
-        }
-        if (message.disable !== false) {
-            obj.disable = message.disable;
-        }
-        return obj;
-    },
-};
-exports.ArtifactVerificationOptions_TimestampAuthorityOptions = {
-    fromJSON(object) {
-        return {
-            threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
-            disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.threshold !== 0) {
-            obj.threshold = Math.round(message.threshold);
-        }
-        if (message.disable !== false) {
-            obj.disable = message.disable;
-        }
-        return obj;
-    },
-};
-exports.ArtifactVerificationOptions_TlogIntegratedTimestampOptions = {
-    fromJSON(object) {
-        return {
-            threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
-            disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.threshold !== 0) {
-            obj.threshold = Math.round(message.threshold);
-        }
-        if (message.disable !== false) {
-            obj.disable = message.disable;
-        }
-        return obj;
-    },
-};
-exports.ArtifactVerificationOptions_ObserverTimestampOptions = {
-    fromJSON(object) {
-        return {
-            threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0,
-            disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.threshold !== 0) {
-            obj.threshold = Math.round(message.threshold);
-        }
-        if (message.disable !== false) {
-            obj.disable = message.disable;
-        }
-        return obj;
-    },
-};
-exports.Artifact = {
-    fromJSON(object) {
-        return {
-            data: isSet(object.artifactUri)
-                ? { $case: "artifactUri", artifactUri: globalThis.String(object.artifactUri) }
-                : isSet(object.artifact)
-                    ? { $case: "artifact", artifact: Buffer.from(bytesFromBase64(object.artifact)) }
-                    : isSet(object.artifactDigest)
-                        ? { $case: "artifactDigest", artifactDigest: sigstore_common_1.HashOutput.fromJSON(object.artifactDigest) }
-                        : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.data?.$case === "artifactUri") {
-            obj.artifactUri = message.data.artifactUri;
-        }
-        else if (message.data?.$case === "artifact") {
-            obj.artifact = base64FromBytes(message.data.artifact);
-        }
-        else if (message.data?.$case === "artifactDigest") {
-            obj.artifactDigest = sigstore_common_1.HashOutput.toJSON(message.data.artifactDigest);
-        }
-        return obj;
-    },
-};
-exports.Input = {
-    fromJSON(object) {
-        return {
-            artifactTrustRoot: isSet(object.artifactTrustRoot) ? sigstore_trustroot_1.TrustedRoot.fromJSON(object.artifactTrustRoot) : undefined,
-            artifactVerificationOptions: isSet(object.artifactVerificationOptions)
-                ? exports.ArtifactVerificationOptions.fromJSON(object.artifactVerificationOptions)
-                : undefined,
-            bundle: isSet(object.bundle) ? sigstore_bundle_1.Bundle.fromJSON(object.bundle) : undefined,
-            artifact: isSet(object.artifact) ? exports.Artifact.fromJSON(object.artifact) : undefined,
-        };
-    },
-    toJSON(message) {
-        const obj = {};
-        if (message.artifactTrustRoot !== undefined) {
-            obj.artifactTrustRoot = sigstore_trustroot_1.TrustedRoot.toJSON(message.artifactTrustRoot);
-        }
-        if (message.artifactVerificationOptions !== undefined) {
-            obj.artifactVerificationOptions = exports.ArtifactVerificationOptions.toJSON(message.artifactVerificationOptions);
-        }
-        if (message.bundle !== undefined) {
-            obj.bundle = sigstore_bundle_1.Bundle.toJSON(message.bundle);
-        }
-        if (message.artifact !== undefined) {
-            obj.artifact = exports.Artifact.toJSON(message.artifact);
-        }
-        return obj;
-    },
-};
-function bytesFromBase64(b64) {
-    return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
-}
-function base64FromBytes(arr) {
-    return globalThis.Buffer.from(arr).toString("base64");
-}
-function isSet(value) {
-    return value !== null && value !== undefined;
-}
diff --git a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/index.js b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/index.js
deleted file mode 100644
index eafb768c48fca..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/index.js
+++ /dev/null
@@ -1,37 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __exportStar = (this && this.__exportStar) || function(m, exports) {
-    for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-__exportStar(require("./__generated__/envelope"), exports);
-__exportStar(require("./__generated__/sigstore_bundle"), exports);
-__exportStar(require("./__generated__/sigstore_common"), exports);
-__exportStar(require("./__generated__/sigstore_rekor"), exports);
-__exportStar(require("./__generated__/sigstore_trustroot"), exports);
-__exportStar(require("./__generated__/sigstore_verification"), exports);
diff --git a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/rekor/v2/index.js b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/rekor/v2/index.js
deleted file mode 100644
index 10745efc39a1f..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/dist/rekor/v2/index.js
+++ /dev/null
@@ -1,35 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __exportStar = (this && this.__exportStar) || function(m, exports) {
-    for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-/*
-Copyright 2025 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-__exportStar(require("../../__generated__/rekor/v2/dsse"), exports);
-__exportStar(require("../../__generated__/rekor/v2/entry"), exports);
-__exportStar(require("../../__generated__/rekor/v2/hashedrekord"), exports);
-__exportStar(require("../../__generated__/rekor/v2/verifier"), exports);
diff --git a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/package.json b/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/package.json
deleted file mode 100644
index f87b2540fbf98..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/protobuf-specs/package.json
+++ /dev/null
@@ -1,35 +0,0 @@
-{
-  "name": "@sigstore/protobuf-specs",
-  "version": "0.5.0",
-  "description": "code-signing for npm packages",
-  "main": "dist/index.js",
-  "types": "dist/index.d.ts",
-  "exports": {
-    ".": "./dist/index.js",
-    "./rekor/v2": "./dist/rekor/v2/index.js"
-  },
-  "scripts": {
-    "build": "tsc"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/sigstore/protobuf-specs.git"
-  },
-  "files": [
-    "dist"
-  ],
-  "author": "bdehamer@github.com",
-  "license": "Apache-2.0",
-  "bugs": {
-    "url": "https://github.com/sigstore/protobuf-specs/issues"
-  },
-  "homepage": "https://github.com/sigstore/protobuf-specs#readme",
-  "devDependencies": {
-    "@tsconfig/node18": "^18.2.4",
-    "@types/node": "^18.14.0",
-    "typescript": "^5.7.2"
-  },
-  "engines": {
-    "node": "^18.17.0 || >=20.5.0"
-  }
-}
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/LICENSE b/node_modules/sigstore/node_modules/@sigstore/tuf/LICENSE
deleted file mode 100644
index e9e7c1679a09d..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/tuf/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
-   APPENDIX: How to apply the Apache License to your work.
-
-      To apply the Apache License to your work, attach the following
-      boilerplate notice, with the fields enclosed by brackets "[]"
-      replaced with your own identifying information. (Don't include
-      the brackets!)  The text should be enclosed in the appropriate
-      comment syntax for the file format. We also recommend that a
-      file or class name and description of purpose be included on the
-      same "printed page" as the copyright notice for easier
-      identification within third-party archives.
-
-   Copyright 2023 The Sigstore Authors
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/dist/appdata.js b/node_modules/sigstore/node_modules/@sigstore/tuf/dist/appdata.js
deleted file mode 100644
index 06a8143e70da2..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/tuf/dist/appdata.js
+++ /dev/null
@@ -1,43 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.appDataPath = appDataPath;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const os_1 = __importDefault(require("os"));
-const path_1 = __importDefault(require("path"));
-function appDataPath(name) {
-    const homedir = os_1.default.homedir();
-    switch (process.platform) {
-        /* istanbul ignore next */
-        case 'darwin': {
-            const appSupport = path_1.default.join(homedir, 'Library', 'Application Support');
-            return path_1.default.join(appSupport, name);
-        }
-        /* istanbul ignore next */
-        case 'win32': {
-            const localAppData = process.env.LOCALAPPDATA || path_1.default.join(homedir, 'AppData', 'Local');
-            return path_1.default.join(localAppData, name, 'Data');
-        }
-        /* istanbul ignore next */
-        default: {
-            const localData = process.env.XDG_DATA_HOME || path_1.default.join(homedir, '.local', 'share');
-            return path_1.default.join(localData, name);
-        }
-    }
-}
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/dist/client.js b/node_modules/sigstore/node_modules/@sigstore/tuf/dist/client.js
deleted file mode 100644
index 2931a0a6b3ab5..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/tuf/dist/client.js
+++ /dev/null
@@ -1,113 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TUFClient = void 0;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const fs_1 = __importDefault(require("fs"));
-const path_1 = __importDefault(require("path"));
-const tuf_js_1 = require("tuf-js");
-const _1 = require(".");
-const target_1 = require("./target");
-const TARGETS_DIR_NAME = 'targets';
-class TUFClient {
-    constructor(options) {
-        const url = new URL(options.mirrorURL);
-        const repoName = encodeURIComponent(url.host + url.pathname.replace(/\/$/, ''));
-        const cachePath = path_1.default.join(options.cachePath, repoName);
-        initTufCache(cachePath);
-        seedCache({
-            cachePath,
-            mirrorURL: options.mirrorURL,
-            tufRootPath: options.rootPath,
-            forceInit: options.forceInit,
-        });
-        this.updater = initClient({
-            mirrorURL: options.mirrorURL,
-            cachePath,
-            forceCache: options.forceCache,
-            retry: options.retry,
-            timeout: options.timeout,
-        });
-    }
-    async refresh() {
-        return this.updater.refresh();
-    }
-    getTarget(targetName) {
-        return (0, target_1.readTarget)(this.updater, targetName);
-    }
-}
-exports.TUFClient = TUFClient;
-// Initializes the TUF cache directory structure including the initial
-// root.json file. If the cache directory does not exist, it will be
-// created. If the targets directory does not exist, it will be created.
-// If the root.json file does not exist, it will be copied from the
-// rootPath argument.
-function initTufCache(cachePath) {
-    const targetsPath = path_1.default.join(cachePath, TARGETS_DIR_NAME);
-    if (!fs_1.default.existsSync(cachePath)) {
-        fs_1.default.mkdirSync(cachePath, { recursive: true });
-    }
-    /* istanbul ignore else */
-    if (!fs_1.default.existsSync(targetsPath)) {
-        fs_1.default.mkdirSync(targetsPath);
-    }
-}
-// Populates the TUF cache with the initial root.json file. If the root.json
-// file does not exist (or we're forcing re-initialization), copy it from either
-// the rootPath argument or from one of the repo seeds.
-function seedCache({ cachePath, mirrorURL, tufRootPath, forceInit, }) {
-    const cachedRootPath = path_1.default.join(cachePath, 'root.json');
-    // If the root.json file does not exist (or we're forcing re-initialization),
-    // populate it either from the supplied rootPath or from one of the repo seeds.
-    /* istanbul ignore else */
-    if (!fs_1.default.existsSync(cachedRootPath) || forceInit) {
-        if (tufRootPath) {
-            fs_1.default.copyFileSync(tufRootPath, cachedRootPath);
-        }
-        else {
-            const seeds = require('../seeds.json');
-            const repoSeed = seeds[mirrorURL];
-            if (!repoSeed) {
-                throw new _1.TUFError({
-                    code: 'TUF_INIT_CACHE_ERROR',
-                    message: `No root.json found for mirror: ${mirrorURL}`,
-                });
-            }
-            fs_1.default.writeFileSync(cachedRootPath, Buffer.from(repoSeed['root.json'], 'base64'));
-            // Copy any seed targets into the cache
-            Object.entries(repoSeed.targets).forEach(([targetName, target]) => {
-                fs_1.default.writeFileSync(path_1.default.join(cachePath, TARGETS_DIR_NAME, targetName), Buffer.from(target, 'base64'));
-            });
-        }
-    }
-}
-function initClient(options) {
-    const config = {
-        fetchTimeout: options.timeout,
-        fetchRetry: options.retry,
-    };
-    return new tuf_js_1.Updater({
-        metadataBaseUrl: options.mirrorURL,
-        targetBaseUrl: `${options.mirrorURL}/targets`,
-        metadataDir: options.cachePath,
-        targetDir: path_1.default.join(options.cachePath, TARGETS_DIR_NAME),
-        forceCache: options.forceCache,
-        config,
-    });
-}
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/dist/error.js b/node_modules/sigstore/node_modules/@sigstore/tuf/dist/error.js
deleted file mode 100644
index e13971b289ff2..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/tuf/dist/error.js
+++ /dev/null
@@ -1,12 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TUFError = void 0;
-class TUFError extends Error {
-    constructor({ code, message, cause, }) {
-        super(message);
-        this.code = code;
-        this.cause = cause;
-        this.name = this.constructor.name;
-    }
-}
-exports.TUFError = TUFError;
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/dist/index.js b/node_modules/sigstore/node_modules/@sigstore/tuf/dist/index.js
deleted file mode 100644
index 2af5de93ec5d2..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/tuf/dist/index.js
+++ /dev/null
@@ -1,56 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TUFError = exports.DEFAULT_MIRROR_URL = void 0;
-exports.getTrustedRoot = getTrustedRoot;
-exports.initTUF = initTUF;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const protobuf_specs_1 = require("@sigstore/protobuf-specs");
-const appdata_1 = require("./appdata");
-const client_1 = require("./client");
-exports.DEFAULT_MIRROR_URL = 'https://tuf-repo-cdn.sigstore.dev';
-const DEFAULT_CACHE_DIR = 'sigstore-js';
-const DEFAULT_RETRY = { retries: 2 };
-const DEFAULT_TIMEOUT = 5000;
-const TRUSTED_ROOT_TARGET = 'trusted_root.json';
-async function getTrustedRoot(
-/* istanbul ignore next */
-options = {}) {
-    const client = createClient(options);
-    const trustedRoot = await client.getTarget(TRUSTED_ROOT_TARGET);
-    return protobuf_specs_1.TrustedRoot.fromJSON(JSON.parse(trustedRoot));
-}
-async function initTUF(
-/* istanbul ignore next */
-options = {}) {
-    const client = createClient(options);
-    return client.refresh().then(() => client);
-}
-// Create a TUF client with default options
-function createClient(options) {
-    /* istanbul ignore next */
-    return new client_1.TUFClient({
-        cachePath: options.cachePath || (0, appdata_1.appDataPath)(DEFAULT_CACHE_DIR),
-        rootPath: options.rootPath,
-        mirrorURL: options.mirrorURL || exports.DEFAULT_MIRROR_URL,
-        retry: options.retry ?? DEFAULT_RETRY,
-        timeout: options.timeout ?? DEFAULT_TIMEOUT,
-        forceCache: options.forceCache ?? false,
-        forceInit: options.forceInit ?? options.force ?? false,
-    });
-}
-var error_1 = require("./error");
-Object.defineProperty(exports, "TUFError", { enumerable: true, get: function () { return error_1.TUFError; } });
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/dist/target.js b/node_modules/sigstore/node_modules/@sigstore/tuf/dist/target.js
deleted file mode 100644
index 5c6675bdfbf5f..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/tuf/dist/target.js
+++ /dev/null
@@ -1,79 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.readTarget = readTarget;
-/*
-Copyright 2023 The Sigstore Authors.
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-    http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-*/
-const fs_1 = __importDefault(require("fs"));
-const error_1 = require("./error");
-// Downloads and returns the specified target from the provided TUF Updater.
-async function readTarget(tuf, targetPath) {
-    const path = await getTargetPath(tuf, targetPath);
-    return new Promise((resolve, reject) => {
-        fs_1.default.readFile(path, 'utf-8', (err, data) => {
-            if (err) {
-                reject(new error_1.TUFError({
-                    code: 'TUF_READ_TARGET_ERROR',
-                    message: `error reading target ${path}`,
-                    cause: err,
-                }));
-            }
-            else {
-                resolve(data);
-            }
-        });
-    });
-}
-// Returns the local path to the specified target. If the target is not yet
-// cached locally, the provided TUF Updater will be used to download and
-// cache the target.
-async function getTargetPath(tuf, target) {
-    let targetInfo;
-    try {
-        targetInfo = await tuf.getTargetInfo(target);
-    }
-    catch (err) {
-        throw new error_1.TUFError({
-            code: 'TUF_REFRESH_METADATA_ERROR',
-            message: 'error refreshing TUF metadata',
-            cause: err,
-        });
-    }
-    if (!targetInfo) {
-        throw new error_1.TUFError({
-            code: 'TUF_FIND_TARGET_ERROR',
-            message: `target ${target} not found`,
-        });
-    }
-    let path = await tuf.findCachedTarget(targetInfo);
-    // An empty path here means the target has not been cached locally, or is
-    // out of date. In either case, we need to download it.
-    if (!path) {
-        try {
-            path = await tuf.downloadTarget(targetInfo);
-        }
-        catch (err) {
-            throw new error_1.TUFError({
-                code: 'TUF_DOWNLOAD_TARGET_ERROR',
-                message: `error downloading target ${path}`,
-                cause: err,
-            });
-        }
-    }
-    return path;
-}
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/package.json b/node_modules/sigstore/node_modules/@sigstore/tuf/package.json
deleted file mode 100644
index 42dad938c2808..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/tuf/package.json
+++ /dev/null
@@ -1,41 +0,0 @@
-{
-  "name": "@sigstore/tuf",
-  "version": "4.0.0",
-  "description": "Client for the Sigstore TUF repository",
-  "main": "dist/index.js",
-  "types": "dist/index.d.ts",
-  "scripts": {
-    "clean": "shx rm -rf dist *.tsbuildinfo",
-    "build": "tsc --build",
-    "test": "jest"
-  },
-  "files": [
-    "dist",
-    "seeds.json"
-  ],
-  "author": "bdehamer@github.com",
-  "license": "Apache-2.0",
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/sigstore/sigstore-js.git"
-  },
-  "bugs": {
-    "url": "https://github.com/sigstore/sigstore-js/issues"
-  },
-  "homepage": "https://github.com/sigstore/sigstore-js/tree/main/packages/tuf#readme",
-  "publishConfig": {
-    "provenance": true
-  },
-  "devDependencies": {
-    "@sigstore/jest": "^0.0.0",
-    "@tufjs/repo-mock": "^3.0.1",
-    "@types/make-fetch-happen": "^10.0.4"
-  },
-  "dependencies": {
-    "@sigstore/protobuf-specs": "^0.5.0",
-    "tuf-js": "^4.0.0"
-  },
-  "engines": {
-    "node": "^20.17.0 || >=22.9.0"
-  }
-}
diff --git a/node_modules/sigstore/node_modules/@sigstore/tuf/seeds.json b/node_modules/sigstore/node_modules/@sigstore/tuf/seeds.json
deleted file mode 100644
index 6d48f33afe700..0000000000000
--- a/node_modules/sigstore/node_modules/@sigstore/tuf/seeds.json
+++ /dev/null
@@ -1 +0,0 @@
-{"https://tuf-repo-cdn.sigstore.dev":{"root.json":"ewogInNpZ25hdHVyZXMiOiBbCiAgewogICAia2V5aWQiOiAiNmYyNjAwODlkNTkyM2RhZjIwMTY2Y2E2NTdjNTQzYWY2MTgzNDZhYjk3MTg4NGE5OTk2MmIwMTk4OGJiZTBjMyIsCiAgICJzaWciOiAiIgogIH0sCiAgewogICAia2V5aWQiOiAiZTcxYTU0ZDU0MzgzNWJhODZhZGFkOTQ2MDM3OWM3NjQxZmI4NzI2ZDE2NGVhNzY2ODAxYTFjNTIyYWJhN2VhMiIsCiAgICJzaWciOiAiMzA0NTAyMjEwMGJiZGRkNDY0ZjgwNjZjZWI4OGJhNzg3Mzc1YzEyY2Q2MzMwNjgwZTA4YzI5MTA3MDNlNjUzOGM3MWNjNzlhZDIwMjIwNTE5MGIwNmU0NTM3ZmU5NjFiM2VmODFmZTY4ZWRjZDAwODljMTlmOTE5YWZlZDQyM2I5YWFmZDcwMDY0MTE1MyIKICB9LAogIHsKICAgImtleWlkIjogIjIyZjRjYWVjNmQ4ZTZmOTU1NWFmNjZiM2Q0YzNjYjA2YTNiYjIzZmRjN2UzOWM5MTZjNjFmNDYyZTZmNTJiMDYiLAogICAic2lnIjogIjMwNDQwMjIwNjkzMDZjZDUyNTdmNzMyYTc0MGMxYWZlNjBhOGU0MzNjNWRlNThlYWZlYWRiZTk5YzMzNmM5YzcxZDE5OGNmODAyMjAwZDc3Mzk1M2FlN2RiYzQ4ZDNlNWJhZDlhNmY2NGJhZmZmMTk2YjdlMmFkNGE1MmExOTUxOTM2N2Q0N2RjMDQyIgogIH0sCiAgewogICAia2V5aWQiOiAiNjE2NDM4MzgxMjViNDQwYjQwZGI2OTQyZjVjYjVhMzFjMGRjMDQzNjgzMTZlYjJhYWE1OGI5NTkwNGE1ODIyMiIsCiAgICJzaWciOiAiMzA0NDAyMjA0ZDIxYTJlYzgwZGY2NmU2MWY2ZmUyOTEyOTUxZGM0N2RmODM2MDM2ZjhjMGFiMTA4MTZkMzc1ZTcxZGJmNzllMDIyMDU0N2FkY2UxYWZkZjA0ZTY3OTRlZmEyMDNkZDUyNjRjNmY3ZTBlZjc4ZTU3ZmU5MzRiMGQyNmNiOTk0ZWVjNzYiCiAgfSwKICB7CiAgICJrZXlpZCI6ICJhNjg3ZTViZjRmYWI4MmIwZWU1OGQ0NmUwNWM5NTM1MTQ1YTJjOWFmYjQ1OGY0M2Q0MmI0NWNhMGZkY2UyYTcwIiwKICAgInNpZyI6ICIzMDQ1MDIyMDYwODI2NDk2NTU3MTQ0ZWIxNjQ5ODkzZWQ1ZjZmNGVhNTQ1MzZmZWIwY2E4MmY4Yjg5YWU2NDFiZTM5NzQzZTUwMjIxMDBhZDcxMThiNWU5ZDQ4MzczMjYyMDZlNDEyZmM2ZGEyOTk5OTI1ZDExMDMyOGE3YzE2NmIwNmM2MjQzMzZjOTNmIgogIH0sCiAgewogICAia2V5aWQiOiAiMTgzZTY0ZjM3NjcwZGMxM2NhMGQyODk5NWEzMDUzZjM3NDA5NTRkZGNlNDQzMjFhNDFlNDY1MzRjZjQ0ZTYzMiIsCiAgICJzaWciOiAiMzA0NjAyMjEwMGQ4MTc5NDM5YzJlNzNlYjBjMTczM2FiZWU3ZmFmODMyZGNhZWE3MjYzZWRjYjQ5MTk4OTFjM2EyNDdmMDU5MjMwMjIxMDBlMWE0MzdlMDc5N2U4MDNmOWI3MmRjOWQyZDkyMTU1YjBhMjI3MGMyNGVmZGQ1ZjRiM2E1ZDhmMGIwZjQzMWE3IgogIH0KIF0sCiAic2lnbmVkIjogewogICJfdHlwZSI6ICJyb290IiwKICAiY29uc2lzdGVudF9zbmFwc2hvdCI6IHRydWUsCiAgImV4cGlyZXMiOiAiMjAyNi0wMS0yMlQxMzowNTo1OVoiLAogICJrZXlzIjogewogICAiMGM4NzQzMmMzYmYwOWZkOTkxODlmZGMzMmZhNWVhZWRmNGU0YTVmYWM3YmFiNzNmYTA0YTJlMGZjNjRhZjZmNSI6IHsKICAgICJrZXlpZF9oYXNoX2FsZ29yaXRobXMiOiBbCiAgICAgInNoYTI1NiIsCiAgICAgInNoYTUxMiIKICAgIF0sCiAgICAia2V5dHlwZSI6ICJlY2RzYSIsCiAgICAia2V5dmFsIjogewogICAgICJwdWJsaWMiOiAiLS0tLS1CRUdJTiBQVUJMSUMgS0VZLS0tLS1cbk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRVdSaUdyNStqKzNKNVNzSCtadHI1bkUySDJ3TzdcbkJWK25PM3M5M2dMY2ExOHFUT3pIWTFvV3lBR0R5a01Tc0dUVUJTdDlEK0FuMEtmS3NEMm1mU000MlE9PVxuLS0tLS1FTkQgUFVCTElDIEtFWS0tLS0tXG4iCiAgICB9LAogICAgInNjaGVtZSI6ICJlY2RzYS1zaGEyLW5pc3RwMjU2IiwKICAgICJ4LXR1Zi1vbi1jaS1vbmxpbmUtdXJpIjogImdjcGttczpwcm9qZWN0cy9zaWdzdG9yZS1yb290LXNpZ25pbmcvbG9jYXRpb25zL2dsb2JhbC9rZXlSaW5ncy9yb290L2NyeXB0b0tleXMvdGltZXN0YW1wL2NyeXB0b0tleVZlcnNpb25zLzEiCiAgIH0sCiAgICIxODNlNjRmMzc2NzBkYzEzY2EwZDI4OTk1YTMwNTNmMzc0MDk1NGRkY2U0NDMyMWE0MWU0NjUzNGNmNDRlNjMyIjogewogICAgImtleXR5cGUiOiAiZWNkc2EiLAogICAgImtleXZhbCI6IHsKICAgICAicHVibGljIjogIi0tLS0tQkVHSU4gUFVCTElDIEtFWS0tLS0tXG5NRmt3RXdZSEtvWkl6ajBDQVFZSUtvWkl6ajBEQVFjRFFnQUVNeHBQT0pDSVo1b3RHNDEwNmZHSnNlRVFpM1Y5XG5wa01ZUTR1eVY5VGoxTTdXSFhJeUxHK2prZnZ1RzBnbFExSlpiUlpaQlYzZ0FSNHNvamRHSElTZW93PT1cbi0tLS0tRU5EIFBVQkxJQyBLRVktLS0tLVxuIgogICAgfSwKICAgICJzY2hlbWUiOiAiZWNkc2Etc2hhMi1uaXN0cDI1NiIsCiAgICAieC10dWYtb24tY2kta2V5b3duZXIiOiAiQGxhbmNlIgogICB9LAogICAiMjJmNGNhZWM2ZDhlNmY5NTU1YWY2NmIzZDRjM2NiMDZhM2JiMjNmZGM3ZTM5YzkxNmM2MWY0NjJlNmY1MmIwNiI6IHsKICAgICJrZXlpZF9oYXNoX2FsZ29yaXRobXMiOiBbCiAgICAgInNoYTI1NiIsCiAgICAgInNoYTUxMiIKICAgIF0sCiAgICAia2V5dHlwZSI6ICJlY2RzYSIsCiAgICAia2V5dmFsIjogewogICAgICJwdWJsaWMiOiAiLS0tLS1CRUdJTiBQVUJMSUMgS0VZLS0tLS1cbk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRXpCelZPbUhDUG9qTVZMU0kzNjRXaWlWOE5QckRcbjZJZ1J4Vmxpc2t6L3YreTNKRVI1bWNWR2NPTmxpRGNXTUM1SjJsZkhtalBOUGhiNEg3eG04THpmU0E9PVxuLS0tLS1FTkQgUFVCTElDIEtFWS0tLS0tXG4iCiAgICB9LAogICAgInNjaGVtZSI6ICJlY2RzYS1zaGEyLW5pc3RwMjU2IiwKICAgICJ4LXR1Zi1vbi1jaS1rZXlvd25lciI6ICJAc2FudGlhZ290b3JyZXMiCiAgIH0sCiAgICI2MTY0MzgzODEyNWI0NDBiNDBkYjY5NDJmNWNiNWEzMWMwZGMwNDM2ODMxNmViMmFhYTU4Yjk1OTA0YTU4MjIyIjogewogICAgImtleWlkX2hhc2hfYWxnb3JpdGhtcyI6IFsKICAgICAic2hhMjU2IiwKICAgICAic2hhNTEyIgogICAgXSwKICAgICJrZXl0eXBlIjogImVjZHNhIiwKICAgICJrZXl2YWwiOiB7CiAgICAgInB1YmxpYyI6ICItLS0tLUJFR0lOIFBVQkxJQyBLRVktLS0tLVxuTUZrd0V3WUhLb1pJemowQ0FRWUlLb1pJemowREFRY0RRZ0FFaW5pa1NzQVFtWWtOZUg1ZVlxL0NuSXpMYWFjT1xueGxTYWF3UURPd3FLeS90Q3F4cTV4eFBTSmMyMUs0V0loczlHeU9rS2Z6dWVZM0dJTHpjTUpaNGNXdz09XG4tLS0tLUVORCBQVUJMSUMgS0VZLS0tLS1cbiIKICAgIH0sCiAgICAic2NoZW1lIjogImVjZHNhLXNoYTItbmlzdHAyNTYiLAogICAgIngtdHVmLW9uLWNpLWtleW93bmVyIjogIkBib2JjYWxsYXdheSIKICAgfSwKICAgImE2ODdlNWJmNGZhYjgyYjBlZTU4ZDQ2ZTA1Yzk1MzUxNDVhMmM5YWZiNDU4ZjQzZDQyYjQ1Y2EwZmRjZTJhNzAiOiB7CiAgICAia2V5aWRfaGFzaF9hbGdvcml0aG1zIjogWwogICAgICJzaGEyNTYiLAogICAgICJzaGE1MTIiCiAgICBdLAogICAgImtleXR5cGUiOiAiZWNkc2EiLAogICAgImtleXZhbCI6IHsKICAgICAicHVibGljIjogIi0tLS0tQkVHSU4gUFVCTElDIEtFWS0tLS0tXG5NRmt3RXdZSEtvWkl6ajBDQVFZSUtvWkl6ajBEQVFjRFFnQUUwZ2hyaDkyTHcxWXIzaWRHVjVXcUN0TURCOEN4XG4rRDhoZEM0dzJaTE5JcGxWUm9WR0xza1lhM2doZU15T2ppSjhrUGkxNWFRMi8vN1Arb2o3VXZKUEd3PT1cbi0tLS0tRU5EIFBVQkxJQyBLRVktLS0tLVxuIgogICAgfSwKICAgICJzY2hlbWUiOiAiZWNkc2Etc2hhMi1uaXN0cDI1NiIsCiAgICAieC10dWYtb24tY2kta2V5b3duZXIiOiAiQGpvc2h1YWdsIgogICB9LAogICAiZTcxYTU0ZDU0MzgzNWJhODZhZGFkOTQ2MDM3OWM3NjQxZmI4NzI2ZDE2NGVhNzY2ODAxYTFjNTIyYWJhN2VhMiI6IHsKICAgICJrZXlpZF9oYXNoX2FsZ29yaXRobXMiOiBbCiAgICAgInNoYTI1NiIsCiAgICAgInNoYTUxMiIKICAgIF0sCiAgICAia2V5dHlwZSI6ICJlY2RzYSIsCiAgICAia2V5dmFsIjogewogICAgICJwdWJsaWMiOiAiLS0tLS1CRUdJTiBQVUJMSUMgS0VZLS0tLS1cbk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRUVYc3ozU1pYRmI4ak1WNDJqNnBKbHlqYmpSOEtcbk4zQndvY2V4cTZMTUliNXFzV0tPUXZMTjE2TlVlZkxjNEhzd09vdW1Sc1ZWYWFqU3BRUzZmb2JrUnc9PVxuLS0tLS1FTkQgUFVCTElDIEtFWS0tLS0tXG4iCiAgICB9LAogICAgInNjaGVtZSI6ICJlY2RzYS1zaGEyLW5pc3RwMjU2IiwKICAgICJ4LXR1Zi1vbi1jaS1rZXlvd25lciI6ICJAbW5tNjc4IgogICB9CiAgfSwKICAicm9sZXMiOiB7CiAgICJyb290IjogewogICAgImtleWlkcyI6IFsKICAgICAiZTcxYTU0ZDU0MzgzNWJhODZhZGFkOTQ2MDM3OWM3NjQxZmI4NzI2ZDE2NGVhNzY2ODAxYTFjNTIyYWJhN2VhMiIsCiAgICAgIjIyZjRjYWVjNmQ4ZTZmOTU1NWFmNjZiM2Q0YzNjYjA2YTNiYjIzZmRjN2UzOWM5MTZjNjFmNDYyZTZmNTJiMDYiLAogICAgICI2MTY0MzgzODEyNWI0NDBiNDBkYjY5NDJmNWNiNWEzMWMwZGMwNDM2ODMxNmViMmFhYTU4Yjk1OTA0YTU4MjIyIiwKICAgICAiYTY4N2U1YmY0ZmFiODJiMGVlNThkNDZlMDVjOTUzNTE0NWEyYzlhZmI0NThmNDNkNDJiNDVjYTBmZGNlMmE3MCIsCiAgICAgIjE4M2U2NGYzNzY3MGRjMTNjYTBkMjg5OTVhMzA1M2YzNzQwOTU0ZGRjZTQ0MzIxYTQxZTQ2NTM0Y2Y0NGU2MzIiCiAgICBdLAogICAgInRocmVzaG9sZCI6IDMKICAgfSwKICAgInNuYXBzaG90IjogewogICAgImtleWlkcyI6IFsKICAgICAiMGM4NzQzMmMzYmYwOWZkOTkxODlmZGMzMmZhNWVhZWRmNGU0YTVmYWM3YmFiNzNmYTA0YTJlMGZjNjRhZjZmNSIKICAgIF0sCiAgICAidGhyZXNob2xkIjogMSwKICAgICJ4LXR1Zi1vbi1jaS1leHBpcnktcGVyaW9kIjogMzY1MCwKICAgICJ4LXR1Zi1vbi1jaS1zaWduaW5nLXBlcmlvZCI6IDM2NQogICB9LAogICAidGFyZ2V0cyI6IHsKICAgICJrZXlpZHMiOiBbCiAgICAgImU3MWE1NGQ1NDM4MzViYTg2YWRhZDk0NjAzNzljNzY0MWZiODcyNmQxNjRlYTc2NjgwMWExYzUyMmFiYTdlYTIiLAogICAgICIyMmY0Y2FlYzZkOGU2Zjk1NTVhZjY2YjNkNGMzY2IwNmEzYmIyM2ZkYzdlMzljOTE2YzYxZjQ2MmU2ZjUyYjA2IiwKICAgICAiNjE2NDM4MzgxMjViNDQwYjQwZGI2OTQyZjVjYjVhMzFjMGRjMDQzNjgzMTZlYjJhYWE1OGI5NTkwNGE1ODIyMiIsCiAgICAgImE2ODdlNWJmNGZhYjgyYjBlZTU4ZDQ2ZTA1Yzk1MzUxNDVhMmM5YWZiNDU4ZjQzZDQyYjQ1Y2EwZmRjZTJhNzAiLAogICAgICIxODNlNjRmMzc2NzBkYzEzY2EwZDI4OTk1YTMwNTNmMzc0MDk1NGRkY2U0NDMyMWE0MWU0NjUzNGNmNDRlNjMyIgogICAgXSwKICAgICJ0aHJlc2hvbGQiOiAzCiAgIH0sCiAgICJ0aW1lc3RhbXAiOiB7CiAgICAia2V5aWRzIjogWwogICAgICIwYzg3NDMyYzNiZjA5ZmQ5OTE4OWZkYzMyZmE1ZWFlZGY0ZTRhNWZhYzdiYWI3M2ZhMDRhMmUwZmM2NGFmNmY1IgogICAgXSwKICAgICJ0aHJlc2hvbGQiOiAxLAogICAgIngtdHVmLW9uLWNpLWV4cGlyeS1wZXJpb2QiOiA3LAogICAgIngtdHVmLW9uLWNpLXNpZ25pbmctcGVyaW9kIjogNgogICB9CiAgfSwKICAic3BlY192ZXJzaW9uIjogIjEuMCIsCiAgInZlcnNpb24iOiAxMywKICAieC10dWYtb24tY2ktZXhwaXJ5LXBlcmlvZCI6IDE5NywKICAieC10dWYtb24tY2ktc2lnbmluZy1wZXJpb2QiOiA0NgogfQp9","targets":{"trusted_root.json":"{
  "mediaType": "application/vnd.dev.sigstore.trustedroot+json;version=0.1",
  "tlogs": [
    {
      "baseUrl": "https://rekor.sigstore.dev",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAE2G2Y+2tabdTV5BcGiBIx0a9fAFwrkBbmLSGtks4L3qX6yYY0zufBnhC8Ur/iy55GhWP/9A/bY2LhC30M9+RYtw==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2021-01-12T11:53:27Z"
        }
      },
      "logId": {
        "keyId": "wNI9atQGlz+VWfO6LRygH4QUfY/8W4RFwiT5i5WRgB0="
      }
    }
  ],
  "certificateAuthorities": [
    {
      "subject": {
        "organization": "sigstore.dev",
        "commonName": "sigstore"
      },
      "uri": "https://fulcio.sigstore.dev",
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIIB+DCCAX6gAwIBAgITNVkDZoCiofPDsy7dfm6geLbuhzAKBggqhkjOPQQDAzAqMRUwEwYDVQQKEwxzaWdzdG9yZS5kZXYxETAPBgNVBAMTCHNpZ3N0b3JlMB4XDTIxMDMwNzAzMjAyOVoXDTMxMDIyMzAzMjAyOVowKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTB2MBAGByqGSM49AgEGBSuBBAAiA2IABLSyA7Ii5k+pNO8ZEWY0ylemWDowOkNa3kL+GZE5Z5GWehL9/A9bRNA3RbrsZ5i0JcastaRL7Sp5fp/jD5dxqc/UdTVnlvS16an+2Yfswe/QuLolRUCrcOE2+2iA5+tzd6NmMGQwDgYDVR0PAQH/BAQDAgEGMBIGA1UdEwEB/wQIMAYBAf8CAQEwHQYDVR0OBBYEFMjFHQBBmiQpMlEk6w2uSu1KBtPsMB8GA1UdIwQYMBaAFMjFHQBBmiQpMlEk6w2uSu1KBtPsMAoGCCqGSM49BAMDA2gAMGUCMH8liWJfMui6vXXBhjDgY4MwslmN/TJxVe/83WrFomwmNf056y1X48F9c4m3a3ozXAIxAKjRay5/aj/jsKKGIkmQatjI8uupHr/+CxFvaJWmpYqNkLDGRU+9orzh5hI2RrcuaQ=="
          }
        ]
      },
      "validFor": {
        "start": "2021-03-07T03:20:29Z",
        "end": "2022-12-31T23:59:59.999Z"
      }
    },
    {
      "subject": {
        "organization": "sigstore.dev",
        "commonName": "sigstore"
      },
      "uri": "https://fulcio.sigstore.dev",
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIICGjCCAaGgAwIBAgIUALnViVfnU0brJasmRkHrn/UnfaQwCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMjA0MTMyMDA2MTVaFw0zMTEwMDUxMzU2NThaMDcxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjEeMBwGA1UEAxMVc2lnc3RvcmUtaW50ZXJtZWRpYXRlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE8RVS/ysH+NOvuDZyPIZtilgUF9NlarYpAd9HP1vBBH1U5CV77LSS7s0ZiH4nE7Hv7ptS6LvvR/STk798LVgMzLlJ4HeIfF3tHSaexLcYpSASr1kS0N/RgBJz/9jWCiXno3sweTAOBgNVHQ8BAf8EBAMCAQYwEwYDVR0lBAwwCgYIKwYBBQUHAwMwEgYDVR0TAQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQU39Ppz1YkEZb5qNjpKFWixi4YZD8wHwYDVR0jBBgwFoAUWMAeX5FFpWapesyQoZMi0CrFxfowCgYIKoZIzj0EAwMDZwAwZAIwPCsQK4DYiZYDPIaDi5HFKnfxXx6ASSVmERfsynYBiX2X6SJRnZU84/9DZdnFvvxmAjBOt6QpBlc4J/0DxvkTCqpclvziL6BCCPnjdlIB3Pu3BxsPmygUY7Ii2zbdCdliiow="
          },
          {
            "rawBytes": "MIIB9zCCAXygAwIBAgIUALZNAPFdxHPwjeDloDwyYChAO/4wCgYIKoZIzj0EAwMwKjEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MREwDwYDVQQDEwhzaWdzdG9yZTAeFw0yMTEwMDcxMzU2NTlaFw0zMTEwMDUxMzU2NThaMCoxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjERMA8GA1UEAxMIc2lnc3RvcmUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAT7XeFT4rb3PQGwS4IajtLk3/OlnpgangaBclYpsYBr5i+4ynB07ceb3LP0OIOZdxexX69c5iVuyJRQ+Hz05yi+UF3uBWAlHpiS5sh0+H2GHE7SXrk1EC5m1Tr19L9gg92jYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRYwB5fkUWlZql6zJChkyLQKsXF+jAfBgNVHSMEGDAWgBRYwB5fkUWlZql6zJChkyLQKsXF+jAKBggqhkjOPQQDAwNpADBmAjEAj1nHeXZp+13NWBNa+EDsDP8G1WWg1tCMWP/WHPqpaVo0jhsweNFZgSs0eE7wYI4qAjEA2WB9ot98sIkoF3vZYdd3/VtWB5b9TNMea7Ix/stJ5TfcLLeABLE4BNJOsQ4vnBHJ"
          }
        ]
      },
      "validFor": {
        "start": "2022-04-13T20:06:15Z"
      }
    }
  ],
  "ctlogs": [
    {
      "baseUrl": "https://ctfe.sigstore.dev/test",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEbfwR+RJudXscgRBRpKX1XFDy3PyudDxz/SfnRi1fT8ekpfBd2O1uoz7jr3Z8nKzxA69EUQ+eFCFI3zeubPWU7w==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2021-03-14T00:00:00Z",
          "end": "2022-10-31T23:59:59.999Z"
        }
      },
      "logId": {
        "keyId": "CGCS8ChS/2hF0dFrJ4ScRWcYrBY9wzjSbea8IgY2b3I="
      }
    },
    {
      "baseUrl": "https://ctfe.sigstore.dev/2022",
      "hashAlgorithm": "SHA2_256",
      "publicKey": {
        "rawBytes": "MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAEiPSlFi0CmFTfEjCUqF9HuCEcYXNKAaYalIJmBZ8yyezPjTqhxrKBpMnaocVtLJBI1eM3uXnQzQGAJdJ4gs9Fyw==",
        "keyDetails": "PKIX_ECDSA_P256_SHA_256",
        "validFor": {
          "start": "2022-10-20T00:00:00Z"
        }
      },
      "logId": {
        "keyId": "3T0wasbHETJjGR4cmWc3AqJKXrjePK3/h4pygC8p7o4="
      }
    }
  ],
  "timestampAuthorities": [
    {
      "subject": {
        "organization": "sigstore.dev",
        "commonName": "sigstore-tsa-selfsigned"
      },
      "uri": "https://timestamp.sigstore.dev/api/v1/timestamp",
      "certChain": {
        "certificates": [
          {
            "rawBytes": "MIICEDCCAZagAwIBAgIUOhNULwyQYe68wUMvy4qOiyojiwwwCgYIKoZIzj0EAwMwOTEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MSAwHgYDVQQDExdzaWdzdG9yZS10c2Etc2VsZnNpZ25lZDAeFw0yNTA0MDgwNjU5NDNaFw0zNTA0MDYwNjU5NDNaMC4xFTATBgNVBAoTDHNpZ3N0b3JlLmRldjEVMBMGA1UEAxMMc2lnc3RvcmUtdHNhMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE4ra2Z8hKNig2T9kFjCAToGG30jky+WQv3BzL+mKvh1SKNR/UwuwsfNCg4sryoYAd8E6isovVA3M4aoNdm9QDi50Z8nTEyvqgfDPtTIwXItfiW/AFf1V7uwkbkAoj0xxco2owaDAOBgNVHQ8BAf8EBAMCB4AwHQYDVR0OBBYEFIn9eUOHz9BlRsMCRscsc1t9tOsDMB8GA1UdIwQYMBaAFJjsAe9/u1H/1JUeb4qImFMHic6/MBYGA1UdJQEB/wQMMAoGCCsGAQUFBwMIMAoGCCqGSM49BAMDA2gAMGUCMDtpsV/6KaO0qyF/UMsX2aSUXKQFdoGTptQGc0ftq1csulHPGG6dsmyMNd3JB+G3EQIxAOajvBcjpJmKb4Nv+2Taoj8Uc5+b6ih6FXCCKraSqupe07zqswMcXJTe1cExvHvvlw=="
          },
          {
            "rawBytes": "MIIB9zCCAXygAwIBAgIUV7f0GLDOoEzIh8LXSW80OJiUp14wCgYIKoZIzj0EAwMwOTEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MSAwHgYDVQQDExdzaWdzdG9yZS10c2Etc2VsZnNpZ25lZDAeFw0yNTA0MDgwNjU5NDNaFw0zNTA0MDYwNjU5NDNaMDkxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjEgMB4GA1UEAxMXc2lnc3RvcmUtdHNhLXNlbGZzaWduZWQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQUQNtfRT/ou3YATa6wB/kKTe70cfJwyRIBovMnt8RcJph/COE82uyS6FmppLLL1VBPGcPfpQPYJNXzWwi8icwhKQ6W/Qe2h3oebBb2FHpwNJDqo+TMaC/tdfkv/ElJB72jRTBDMA4GA1UdDwEB/wQEAwIBBjASBgNVHRMBAf8ECDAGAQH/AgEAMB0GA1UdDgQWBBSY7AHvf7tR/9SVHm+KiJhTB4nOvzAKBggqhkjOPQQDAwNpADBmAjEAwGEGrfGZR1cen1R8/DTVMI943LssZmJRtDp/i7SfGHmGRP6gRbuj9vOK3b67Z0QQAjEAuT2H673LQEaHTcyQSZrkp4mX7WwkmF+sVbkYY5mXN+RMH13KUEHHOqASaemYWK/E"
          }
        ]
      },
      "validFor": {
        "start": "2025-07-04T00:00:00Z"
      }
    }
  ]
}
","registry.npmjs.org%2Fkeys.json":"ewogICAgImtleXMiOiBbCiAgICAgICAgewogICAgICAgICAgICAia2V5SWQiOiAiU0hBMjU2OmpsM2J3c3d1ODBQampva0NnaDBvMnc1YzJVNExoUUFFNTdnajljejFrekEiLAogICAgICAgICAgICAia2V5VXNhZ2UiOiAibnBtOnNpZ25hdHVyZXMiLAogICAgICAgICAgICAicHVibGljS2V5IjogewogICAgICAgICAgICAgICAgInJhd0J5dGVzIjogIk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRTFPbGIzek1BRkZ4WEtIaUlrUU81Y0ozWWhsNWk2VVBwK0lodXRlQkpidUhjQTVVb2dLbzBFV3RsV3dXNktTYUtvVE5FWUw3SmxDUWlWbmtoQmt0VWdnPT0iLAogICAgICAgICAgICAgICAgImtleURldGFpbHMiOiAiUEtJWF9FQ0RTQV9QMjU2X1NIQV8yNTYiLAogICAgICAgICAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgICAgICAgICAgICJzdGFydCI6ICIxOTk5LTAxLTAxVDAwOjAwOjAwLjAwMFoiLAogICAgICAgICAgICAgICAgICAgICJlbmQiOiAiMjAyNS0wMS0yOVQwMDowMDowMC4wMDBaIgogICAgICAgICAgICAgICAgfQogICAgICAgICAgICB9CiAgICAgICAgfSwKICAgICAgICB7CiAgICAgICAgICAgICJrZXlJZCI6ICJTSEEyNTY6amwzYndzd3U4MFBqam9rQ2doMG8ydzVjMlU0TGhRQUU1N2dqOWN6MWt6QSIsCiAgICAgICAgICAgICJrZXlVc2FnZSI6ICJucG06YXR0ZXN0YXRpb25zIiwKICAgICAgICAgICAgInB1YmxpY0tleSI6IHsKICAgICAgICAgICAgICAgICJyYXdCeXRlcyI6ICJNRmt3RXdZSEtvWkl6ajBDQVFZSUtvWkl6ajBEQVFjRFFnQUUxT2xiM3pNQUZGeFhLSGlJa1FPNWNKM1lobDVpNlVQcCtJaHV0ZUJKYnVIY0E1VW9nS28wRVd0bFd3VzZLU2FLb1RORVlMN0psQ1FpVm5raEJrdFVnZz09IiwKICAgICAgICAgICAgICAgICJrZXlEZXRhaWxzIjogIlBLSVhfRUNEU0FfUDI1Nl9TSEFfMjU2IiwKICAgICAgICAgICAgICAgICJ2YWxpZEZvciI6IHsKICAgICAgICAgICAgICAgICAgICAic3RhcnQiOiAiMjAyMi0xMi0wMVQwMDowMDowMC4wMDBaIiwKICAgICAgICAgICAgICAgICAgICAiZW5kIjogIjIwMjUtMDEtMjlUMDA6MDA6MDAuMDAwWiIKICAgICAgICAgICAgICAgIH0KICAgICAgICAgICAgfQogICAgICAgIH0sCiAgICAgICAgewogICAgICAgICAgICAia2V5SWQiOiAiU0hBMjU2OkRoUTh3UjVBUEJ2RkhMRi8rVGMrQVl2UE9kVHBjSURxT2h4c0JIUndDN1UiLAogICAgICAgICAgICAia2V5VXNhZ2UiOiAibnBtOnNpZ25hdHVyZXMiLAogICAgICAgICAgICAicHVibGljS2V5IjogewogICAgICAgICAgICAgICAgInJhd0J5dGVzIjogIk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRVk2WWE3VysrN2FVUHp2TVRyZXpINlljeDNjK0hPS1lDY05HeWJKWlNDSnEvZmQ3UWE4dXVBS3RkSWtVUXRRaUVLRVJoQW1FNWxNTUpoUDhPa0RPYTJnPT0iLAogICAgICAgICAgICAgICAgImtleURldGFpbHMiOiAiUEtJWF9FQ0RTQV9QMjU2X1NIQV8yNTYiLAogICAgICAgICAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgICAgICAgICAgICJzdGFydCI6ICIyMDI1LTAxLTEzVDAwOjAwOjAwLjAwMFoiCiAgICAgICAgICAgICAgICB9CiAgICAgICAgICAgIH0KICAgICAgICB9LAogICAgICAgIHsKICAgICAgICAgICAgImtleUlkIjogIlNIQTI1NjpEaFE4d1I1QVBCdkZITEYvK1RjK0FZdlBPZFRwY0lEcU9oeHNCSFJ3QzdVIiwKICAgICAgICAgICAgImtleVVzYWdlIjogIm5wbTphdHRlc3RhdGlvbnMiLAogICAgICAgICAgICAicHVibGljS2V5IjogewogICAgICAgICAgICAgICAgInJhd0J5dGVzIjogIk1Ga3dFd1lIS29aSXpqMENBUVlJS29aSXpqMERBUWNEUWdBRVk2WWE3VysrN2FVUHp2TVRyZXpINlljeDNjK0hPS1lDY05HeWJKWlNDSnEvZmQ3UWE4dXVBS3RkSWtVUXRRaUVLRVJoQW1FNWxNTUpoUDhPa0RPYTJnPT0iLAogICAgICAgICAgICAgICAgImtleURldGFpbHMiOiAiUEtJWF9FQ0RTQV9QMjU2X1NIQV8yNTYiLAogICAgICAgICAgICAgICAgInZhbGlkRm9yIjogewogICAgICAgICAgICAgICAgICAgICJzdGFydCI6ICIyMDI1LTAxLTEzVDAwOjAwOjAwLjAwMFoiCiAgICAgICAgICAgICAgICB9CiAgICAgICAgICAgIH0KICAgICAgICB9CiAgICBdCn0K"}}}
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/LICENSE b/node_modules/sigstore/node_modules/@tufjs/models/LICENSE
deleted file mode 100644
index 420700f5d3765..0000000000000
--- a/node_modules/sigstore/node_modules/@tufjs/models/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-MIT License
-
-Copyright (c) 2022 GitHub and the TUF Contributors
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/error.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/error.js
deleted file mode 100644
index ba80698747ba0..0000000000000
--- a/node_modules/sigstore/node_modules/@tufjs/models/dist/error.js
+++ /dev/null
@@ -1,27 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.UnsupportedAlgorithmError = exports.CryptoError = exports.LengthOrHashMismatchError = exports.UnsignedMetadataError = exports.RepositoryError = exports.ValueError = void 0;
-// An error about insufficient values
-class ValueError extends Error {
-}
-exports.ValueError = ValueError;
-// An error with a repository's state, such as a missing file.
-// It covers all exceptions that come from the repository side when
-// looking from the perspective of users of metadata API or ngclient.
-class RepositoryError extends Error {
-}
-exports.RepositoryError = RepositoryError;
-// An error about metadata object with insufficient threshold of signatures.
-class UnsignedMetadataError extends RepositoryError {
-}
-exports.UnsignedMetadataError = UnsignedMetadataError;
-// An error while checking the length and hash values of an object.
-class LengthOrHashMismatchError extends RepositoryError {
-}
-exports.LengthOrHashMismatchError = LengthOrHashMismatchError;
-class CryptoError extends Error {
-}
-exports.CryptoError = CryptoError;
-class UnsupportedAlgorithmError extends CryptoError {
-}
-exports.UnsupportedAlgorithmError = UnsupportedAlgorithmError;
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/index.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/index.js
deleted file mode 100644
index a4dc783659f04..0000000000000
--- a/node_modules/sigstore/node_modules/@tufjs/models/dist/index.js
+++ /dev/null
@@ -1,24 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Timestamp = exports.Targets = exports.Snapshot = exports.Signature = exports.Root = exports.Metadata = exports.Key = exports.TargetFile = exports.MetaFile = exports.ValueError = exports.MetadataKind = void 0;
-var base_1 = require("./base");
-Object.defineProperty(exports, "MetadataKind", { enumerable: true, get: function () { return base_1.MetadataKind; } });
-var error_1 = require("./error");
-Object.defineProperty(exports, "ValueError", { enumerable: true, get: function () { return error_1.ValueError; } });
-var file_1 = require("./file");
-Object.defineProperty(exports, "MetaFile", { enumerable: true, get: function () { return file_1.MetaFile; } });
-Object.defineProperty(exports, "TargetFile", { enumerable: true, get: function () { return file_1.TargetFile; } });
-var key_1 = require("./key");
-Object.defineProperty(exports, "Key", { enumerable: true, get: function () { return key_1.Key; } });
-var metadata_1 = require("./metadata");
-Object.defineProperty(exports, "Metadata", { enumerable: true, get: function () { return metadata_1.Metadata; } });
-var root_1 = require("./root");
-Object.defineProperty(exports, "Root", { enumerable: true, get: function () { return root_1.Root; } });
-var signature_1 = require("./signature");
-Object.defineProperty(exports, "Signature", { enumerable: true, get: function () { return signature_1.Signature; } });
-var snapshot_1 = require("./snapshot");
-Object.defineProperty(exports, "Snapshot", { enumerable: true, get: function () { return snapshot_1.Snapshot; } });
-var targets_1 = require("./targets");
-Object.defineProperty(exports, "Targets", { enumerable: true, get: function () { return targets_1.Targets; } });
-var timestamp_1 = require("./timestamp");
-Object.defineProperty(exports, "Timestamp", { enumerable: true, get: function () { return timestamp_1.Timestamp; } });
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/guard.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/guard.js
deleted file mode 100644
index 911e8475986bb..0000000000000
--- a/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/guard.js
+++ /dev/null
@@ -1,32 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.isDefined = isDefined;
-exports.isObject = isObject;
-exports.isStringArray = isStringArray;
-exports.isObjectArray = isObjectArray;
-exports.isStringRecord = isStringRecord;
-exports.isObjectRecord = isObjectRecord;
-function isDefined(val) {
-    return val !== undefined;
-}
-function isObject(value) {
-    return typeof value === 'object' && value !== null;
-}
-function isStringArray(value) {
-    return Array.isArray(value) && value.every((v) => typeof v === 'string');
-}
-function isObjectArray(value) {
-    return Array.isArray(value) && value.every(isObject);
-}
-function isStringRecord(value) {
-    return (typeof value === 'object' &&
-        value !== null &&
-        Object.keys(value).every((k) => typeof k === 'string') &&
-        Object.values(value).every((v) => typeof v === 'string'));
-}
-function isObjectRecord(value) {
-    return (typeof value === 'object' &&
-        value !== null &&
-        Object.keys(value).every((k) => typeof k === 'string') &&
-        Object.values(value).every((v) => typeof v === 'object' && v !== null));
-}
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/key.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/key.js
deleted file mode 100644
index 3c3ec07f1425a..0000000000000
--- a/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/key.js
+++ /dev/null
@@ -1,142 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.getPublicKey = getPublicKey;
-const crypto_1 = __importDefault(require("crypto"));
-const error_1 = require("../error");
-const oid_1 = require("./oid");
-const ASN1_TAG_SEQUENCE = 0x30;
-const ANS1_TAG_BIT_STRING = 0x03;
-const NULL_BYTE = 0x00;
-const OID_EDDSA = '1.3.101.112';
-const OID_EC_PUBLIC_KEY = '1.2.840.10045.2.1';
-const OID_EC_CURVE_P256V1 = '1.2.840.10045.3.1.7';
-const PEM_HEADER = '-----BEGIN PUBLIC KEY-----';
-function getPublicKey(keyInfo) {
-    switch (keyInfo.keyType) {
-        case 'rsa':
-            return getRSAPublicKey(keyInfo);
-        case 'ed25519':
-            return getED25519PublicKey(keyInfo);
-        case 'ecdsa':
-        case 'ecdsa-sha2-nistp256':
-        case 'ecdsa-sha2-nistp384':
-            return getECDCSAPublicKey(keyInfo);
-        default:
-            throw new error_1.UnsupportedAlgorithmError(`Unsupported key type: ${keyInfo.keyType}`);
-    }
-}
-function getRSAPublicKey(keyInfo) {
-    // Only support PEM-encoded RSA keys
-    if (!keyInfo.keyVal.startsWith(PEM_HEADER)) {
-        throw new error_1.CryptoError('Invalid key format');
-    }
-    const key = crypto_1.default.createPublicKey(keyInfo.keyVal);
-    switch (keyInfo.scheme) {
-        case 'rsassa-pss-sha256':
-            return {
-                key: key,
-                padding: crypto_1.default.constants.RSA_PKCS1_PSS_PADDING,
-            };
-        default:
-            throw new error_1.UnsupportedAlgorithmError(`Unsupported RSA scheme: ${keyInfo.scheme}`);
-    }
-}
-function getED25519PublicKey(keyInfo) {
-    let key;
-    // If key is already PEM-encoded we can just parse it
-    if (keyInfo.keyVal.startsWith(PEM_HEADER)) {
-        key = crypto_1.default.createPublicKey(keyInfo.keyVal);
-    }
-    else {
-        // If key is not PEM-encoded it had better be hex
-        if (!isHex(keyInfo.keyVal)) {
-            throw new error_1.CryptoError('Invalid key format');
-        }
-        key = crypto_1.default.createPublicKey({
-            key: ed25519.hexToDER(keyInfo.keyVal),
-            format: 'der',
-            type: 'spki',
-        });
-    }
-    return { key };
-}
-function getECDCSAPublicKey(keyInfo) {
-    let key;
-    // If key is already PEM-encoded we can just parse it
-    if (keyInfo.keyVal.startsWith(PEM_HEADER)) {
-        key = crypto_1.default.createPublicKey(keyInfo.keyVal);
-    }
-    else {
-        // If key is not PEM-encoded it had better be hex
-        if (!isHex(keyInfo.keyVal)) {
-            throw new error_1.CryptoError('Invalid key format');
-        }
-        key = crypto_1.default.createPublicKey({
-            key: ecdsa.hexToDER(keyInfo.keyVal),
-            format: 'der',
-            type: 'spki',
-        });
-    }
-    return { key };
-}
-const ed25519 = {
-    // Translates a hex key into a crypto KeyObject
-    // https://keygen.sh/blog/how-to-use-hexadecimal-ed25519-keys-in-node/
-    hexToDER: (hex) => {
-        const key = Buffer.from(hex, 'hex');
-        const oid = (0, oid_1.encodeOIDString)(OID_EDDSA);
-        // Create a byte sequence containing the OID and key
-        const elements = Buffer.concat([
-            Buffer.concat([
-                Buffer.from([ASN1_TAG_SEQUENCE]),
-                Buffer.from([oid.length]),
-                oid,
-            ]),
-            Buffer.concat([
-                Buffer.from([ANS1_TAG_BIT_STRING]),
-                Buffer.from([key.length + 1]),
-                Buffer.from([NULL_BYTE]),
-                key,
-            ]),
-        ]);
-        // Wrap up by creating a sequence of elements
-        const der = Buffer.concat([
-            Buffer.from([ASN1_TAG_SEQUENCE]),
-            Buffer.from([elements.length]),
-            elements,
-        ]);
-        return der;
-    },
-};
-const ecdsa = {
-    hexToDER: (hex) => {
-        const key = Buffer.from(hex, 'hex');
-        const bitString = Buffer.concat([
-            Buffer.from([ANS1_TAG_BIT_STRING]),
-            Buffer.from([key.length + 1]),
-            Buffer.from([NULL_BYTE]),
-            key,
-        ]);
-        const oids = Buffer.concat([
-            (0, oid_1.encodeOIDString)(OID_EC_PUBLIC_KEY),
-            (0, oid_1.encodeOIDString)(OID_EC_CURVE_P256V1),
-        ]);
-        const oidSequence = Buffer.concat([
-            Buffer.from([ASN1_TAG_SEQUENCE]),
-            Buffer.from([oids.length]),
-            oids,
-        ]);
-        // Wrap up by creating a sequence of elements
-        const der = Buffer.concat([
-            Buffer.from([ASN1_TAG_SEQUENCE]),
-            Buffer.from([oidSequence.length + bitString.length]),
-            oidSequence,
-            bitString,
-        ]);
-        return der;
-    },
-};
-const isHex = (key) => /^[0-9a-fA-F]+$/.test(key);
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/oid.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/oid.js
deleted file mode 100644
index 00b29c3030d1e..0000000000000
--- a/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/oid.js
+++ /dev/null
@@ -1,26 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.encodeOIDString = encodeOIDString;
-const ANS1_TAG_OID = 0x06;
-function encodeOIDString(oid) {
-    const parts = oid.split('.');
-    // The first two subidentifiers are encoded into the first byte
-    const first = parseInt(parts[0], 10) * 40 + parseInt(parts[1], 10);
-    const rest = [];
-    parts.slice(2).forEach((part) => {
-        const bytes = encodeVariableLengthInteger(parseInt(part, 10));
-        rest.push(...bytes);
-    });
-    const der = Buffer.from([first, ...rest]);
-    return Buffer.from([ANS1_TAG_OID, der.length, ...der]);
-}
-function encodeVariableLengthInteger(value) {
-    const bytes = [];
-    let mask = 0x00;
-    while (value > 0) {
-        bytes.unshift((value & 0x7f) | mask);
-        value >>= 7;
-        mask = 0x80;
-    }
-    return bytes;
-}
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/types.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/types.js
deleted file mode 100644
index c8ad2e549bdc6..0000000000000
--- a/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/types.js
+++ /dev/null
@@ -1,2 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/verify.js b/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/verify.js
deleted file mode 100644
index 8232b6f6a97ab..0000000000000
--- a/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/verify.js
+++ /dev/null
@@ -1,13 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.verifySignature = void 0;
-const canonical_json_1 = require("@tufjs/canonical-json");
-const crypto_1 = __importDefault(require("crypto"));
-const verifySignature = (metaDataSignedData, key, signature) => {
-    const canonicalData = Buffer.from((0, canonical_json_1.canonicalize)(metaDataSignedData));
-    return crypto_1.default.verify(undefined, canonicalData, key, Buffer.from(signature, 'hex'));
-};
-exports.verifySignature = verifySignature;
diff --git a/node_modules/sigstore/node_modules/tuf-js/LICENSE b/node_modules/sigstore/node_modules/tuf-js/LICENSE
deleted file mode 100644
index 420700f5d3765..0000000000000
--- a/node_modules/sigstore/node_modules/tuf-js/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-MIT License
-
-Copyright (c) 2022 GitHub and the TUF Contributors
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
diff --git a/node_modules/sigstore/node_modules/tuf-js/dist/config.js b/node_modules/sigstore/node_modules/tuf-js/dist/config.js
deleted file mode 100644
index c66d76af86b98..0000000000000
--- a/node_modules/sigstore/node_modules/tuf-js/dist/config.js
+++ /dev/null
@@ -1,15 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.defaultConfig = void 0;
-exports.defaultConfig = {
-    maxRootRotations: 256,
-    maxDelegations: 32,
-    rootMaxLength: 512000, //bytes
-    timestampMaxLength: 16384, // bytes
-    snapshotMaxLength: 2000000, // bytes
-    targetsMaxLength: 5000000, // bytes
-    prefixTargetsWithHash: true,
-    fetchTimeout: 100000, // milliseconds
-    fetchRetries: undefined,
-    fetchRetry: 2,
-};
diff --git a/node_modules/sigstore/node_modules/tuf-js/dist/error.js b/node_modules/sigstore/node_modules/tuf-js/dist/error.js
deleted file mode 100644
index 3a3c26a068a95..0000000000000
--- a/node_modules/sigstore/node_modules/tuf-js/dist/error.js
+++ /dev/null
@@ -1,49 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.DownloadHTTPError = exports.DownloadLengthMismatchError = exports.DownloadError = exports.ExpiredMetadataError = exports.EqualVersionError = exports.BadVersionError = exports.RepositoryError = exports.PersistError = exports.RuntimeError = exports.ValueError = void 0;
-// An error about insufficient values
-class ValueError extends Error {
-}
-exports.ValueError = ValueError;
-class RuntimeError extends Error {
-}
-exports.RuntimeError = RuntimeError;
-class PersistError extends Error {
-}
-exports.PersistError = PersistError;
-// An error with a repository's state, such as a missing file.
-// It covers all exceptions that come from the repository side when
-// looking from the perspective of users of metadata API or ngclient.
-class RepositoryError extends Error {
-}
-exports.RepositoryError = RepositoryError;
-// An error for metadata that contains an invalid version number.
-class BadVersionError extends RepositoryError {
-}
-exports.BadVersionError = BadVersionError;
-// An error for metadata containing a previously verified version number.
-class EqualVersionError extends BadVersionError {
-}
-exports.EqualVersionError = EqualVersionError;
-// Indicate that a TUF Metadata file has expired.
-class ExpiredMetadataError extends RepositoryError {
-}
-exports.ExpiredMetadataError = ExpiredMetadataError;
-//----- Download Errors -------------------------------------------------------
-// An error occurred while attempting to download a file.
-class DownloadError extends Error {
-}
-exports.DownloadError = DownloadError;
-// Indicate that a mismatch of lengths was seen while downloading a file
-class DownloadLengthMismatchError extends DownloadError {
-}
-exports.DownloadLengthMismatchError = DownloadLengthMismatchError;
-// Returned by FetcherInterface implementations for HTTP errors.
-class DownloadHTTPError extends DownloadError {
-    statusCode;
-    constructor(message, statusCode) {
-        super(message);
-        this.statusCode = statusCode;
-    }
-}
-exports.DownloadHTTPError = DownloadHTTPError;
diff --git a/node_modules/sigstore/node_modules/tuf-js/dist/fetcher.js b/node_modules/sigstore/node_modules/tuf-js/dist/fetcher.js
deleted file mode 100644
index b964135c7b008..0000000000000
--- a/node_modules/sigstore/node_modules/tuf-js/dist/fetcher.js
+++ /dev/null
@@ -1,86 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.DefaultFetcher = exports.BaseFetcher = void 0;
-const debug_1 = __importDefault(require("debug"));
-const fs_1 = __importDefault(require("fs"));
-const make_fetch_happen_1 = __importDefault(require("make-fetch-happen"));
-const util_1 = __importDefault(require("util"));
-const error_1 = require("./error");
-const tmpfile_1 = require("./utils/tmpfile");
-const log = (0, debug_1.default)('tuf:fetch');
-class BaseFetcher {
-    // Download file from given URL. The file is downloaded to a temporary
-    // location and then passed to the given handler. The handler is responsible
-    // for moving the file to its final location. The temporary file is deleted
-    // after the handler returns.
-    async downloadFile(url, maxLength, handler) {
-        return (0, tmpfile_1.withTempFile)(async (tmpFile) => {
-            const reader = await this.fetch(url);
-            let numberOfBytesReceived = 0;
-            const fileStream = fs_1.default.createWriteStream(tmpFile);
-            // Read the stream a chunk at a time so that we can check
-            // the length of the file as we go
-            try {
-                for await (const chunk of reader) {
-                    numberOfBytesReceived += chunk.length;
-                    if (numberOfBytesReceived > maxLength) {
-                        throw new error_1.DownloadLengthMismatchError('Max length reached');
-                    }
-                    await writeBufferToStream(fileStream, chunk);
-                }
-            }
-            finally {
-                // Make sure we always close the stream
-                // eslint-disable-next-line @typescript-eslint/unbound-method
-                await util_1.default.promisify(fileStream.close).bind(fileStream)();
-            }
-            return handler(tmpFile);
-        });
-    }
-    // Download bytes from given URL.
-    async downloadBytes(url, maxLength) {
-        return this.downloadFile(url, maxLength, async (file) => {
-            const stream = fs_1.default.createReadStream(file);
-            const chunks = [];
-            for await (const chunk of stream) {
-                chunks.push(chunk);
-            }
-            return Buffer.concat(chunks);
-        });
-    }
-}
-exports.BaseFetcher = BaseFetcher;
-class DefaultFetcher extends BaseFetcher {
-    timeout;
-    retry;
-    constructor(options = {}) {
-        super();
-        this.timeout = options.timeout;
-        this.retry = options.retry;
-    }
-    async fetch(url) {
-        log('GET %s', url);
-        const response = await (0, make_fetch_happen_1.default)(url, {
-            timeout: this.timeout,
-            retry: this.retry,
-        });
-        if (!response.ok || !response?.body) {
-            throw new error_1.DownloadHTTPError('Failed to download', response.status);
-        }
-        return response.body;
-    }
-}
-exports.DefaultFetcher = DefaultFetcher;
-const writeBufferToStream = async (stream, buffer) => {
-    return new Promise((resolve, reject) => {
-        stream.write(buffer, (err) => {
-            if (err) {
-                reject(err);
-            }
-            resolve(true);
-        });
-    });
-};
diff --git a/node_modules/sigstore/node_modules/tuf-js/dist/index.js b/node_modules/sigstore/node_modules/tuf-js/dist/index.js
deleted file mode 100644
index 5a83b91f355d8..0000000000000
--- a/node_modules/sigstore/node_modules/tuf-js/dist/index.js
+++ /dev/null
@@ -1,9 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Updater = exports.BaseFetcher = exports.TargetFile = void 0;
-var models_1 = require("@tufjs/models");
-Object.defineProperty(exports, "TargetFile", { enumerable: true, get: function () { return models_1.TargetFile; } });
-var fetcher_1 = require("./fetcher");
-Object.defineProperty(exports, "BaseFetcher", { enumerable: true, get: function () { return fetcher_1.BaseFetcher; } });
-var updater_1 = require("./updater");
-Object.defineProperty(exports, "Updater", { enumerable: true, get: function () { return updater_1.Updater; } });
diff --git a/node_modules/sigstore/node_modules/tuf-js/dist/store.js b/node_modules/sigstore/node_modules/tuf-js/dist/store.js
deleted file mode 100644
index 1b1669029a8db..0000000000000
--- a/node_modules/sigstore/node_modules/tuf-js/dist/store.js
+++ /dev/null
@@ -1,219 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.TrustedMetadataStore = void 0;
-const models_1 = require("@tufjs/models");
-const error_1 = require("./error");
-class TrustedMetadataStore {
-    trustedSet = {};
-    referenceTime;
-    constructor(rootData) {
-        // Client workflow 5.1: record fixed update start time
-        this.referenceTime = new Date();
-        // Client workflow 5.2: load trusted root metadata
-        this.loadTrustedRoot(rootData);
-    }
-    get root() {
-        if (!this.trustedSet.root) {
-            throw new ReferenceError('No trusted root metadata');
-        }
-        return this.trustedSet.root;
-    }
-    get timestamp() {
-        return this.trustedSet.timestamp;
-    }
-    get snapshot() {
-        return this.trustedSet.snapshot;
-    }
-    get targets() {
-        return this.trustedSet.targets;
-    }
-    getRole(name) {
-        return this.trustedSet[name];
-    }
-    updateRoot(bytesBuffer) {
-        // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
-        const data = JSON.parse(bytesBuffer.toString('utf8'));
-        // eslint-disable-next-line @typescript-eslint/no-unsafe-argument
-        const newRoot = models_1.Metadata.fromJSON(models_1.MetadataKind.Root, data);
-        if (newRoot.signed.type != models_1.MetadataKind.Root) {
-            throw new error_1.RepositoryError(`Expected 'root', got ${newRoot.signed.type}`);
-        }
-        // Client workflow 5.4: check for arbitrary software attack
-        this.root.verifyDelegate(models_1.MetadataKind.Root, newRoot);
-        // Client workflow 5.5: check for rollback attack
-        if (newRoot.signed.version != this.root.signed.version + 1) {
-            throw new error_1.BadVersionError(`Expected version ${this.root.signed.version + 1}, got ${newRoot.signed.version}`);
-        }
-        // Check that new root is signed by self
-        newRoot.verifyDelegate(models_1.MetadataKind.Root, newRoot);
-        // Client workflow 5.7: set new root as trusted root
-        this.trustedSet.root = newRoot;
-        return newRoot;
-    }
-    updateTimestamp(bytesBuffer) {
-        if (this.snapshot) {
-            throw new error_1.RuntimeError('Cannot update timestamp after snapshot');
-        }
-        if (this.root.signed.isExpired(this.referenceTime)) {
-            throw new error_1.ExpiredMetadataError('Final root.json is expired');
-        }
-        // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
-        const data = JSON.parse(bytesBuffer.toString('utf8'));
-        // eslint-disable-next-line @typescript-eslint/no-unsafe-argument
-        const newTimestamp = models_1.Metadata.fromJSON(models_1.MetadataKind.Timestamp, data);
-        if (newTimestamp.signed.type != models_1.MetadataKind.Timestamp) {
-            throw new error_1.RepositoryError(`Expected 'timestamp', got ${newTimestamp.signed.type}`);
-        }
-        // Client workflow 5.4.2: check for arbitrary software attack
-        this.root.verifyDelegate(models_1.MetadataKind.Timestamp, newTimestamp);
-        if (this.timestamp) {
-            // Prevent rolling back timestamp version
-            // Client workflow 5.4.3.1: check for rollback attack
-            if (newTimestamp.signed.version < this.timestamp.signed.version) {
-                throw new error_1.BadVersionError(`New timestamp version ${newTimestamp.signed.version} is less than current version ${this.timestamp.signed.version}`);
-            }
-            //  Keep using old timestamp if versions are equal.
-            if (newTimestamp.signed.version === this.timestamp.signed.version) {
-                throw new error_1.EqualVersionError(`New timestamp version ${newTimestamp.signed.version} is equal to current version ${this.timestamp.signed.version}`);
-            }
-            // Prevent rolling back snapshot version
-            // Client workflow 5.4.3.2: check for rollback attack
-            const snapshotMeta = this.timestamp.signed.snapshotMeta;
-            const newSnapshotMeta = newTimestamp.signed.snapshotMeta;
-            if (newSnapshotMeta.version < snapshotMeta.version) {
-                throw new error_1.BadVersionError(`New snapshot version ${newSnapshotMeta.version} is less than current version ${snapshotMeta.version}`);
-            }
-        }
-        // expiry not checked to allow old timestamp to be used for rollback
-        // protection of new timestamp: expiry is checked in update_snapshot
-        this.trustedSet.timestamp = newTimestamp;
-        // Client workflow 5.4.4: check for freeze attack
-        this.checkFinalTimestamp();
-        return newTimestamp;
-    }
-    updateSnapshot(bytesBuffer, trusted = false) {
-        if (!this.timestamp) {
-            throw new error_1.RuntimeError('Cannot update snapshot before timestamp');
-        }
-        if (this.targets) {
-            throw new error_1.RuntimeError('Cannot update snapshot after targets');
-        }
-        // Snapshot cannot be loaded if final timestamp is expired
-        this.checkFinalTimestamp();
-        const snapshotMeta = this.timestamp.signed.snapshotMeta;
-        // Verify non-trusted data against the hashes in timestamp, if any.
-        // Trusted snapshot data has already been verified once.
-        // Client workflow 5.5.2: check against timestamp role's snaphsot hash
-        if (!trusted) {
-            snapshotMeta.verify(bytesBuffer);
-        }
-        // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
-        const data = JSON.parse(bytesBuffer.toString('utf8'));
-        // eslint-disable-next-line @typescript-eslint/no-unsafe-argument
-        const newSnapshot = models_1.Metadata.fromJSON(models_1.MetadataKind.Snapshot, data);
-        if (newSnapshot.signed.type != models_1.MetadataKind.Snapshot) {
-            throw new error_1.RepositoryError(`Expected 'snapshot', got ${newSnapshot.signed.type}`);
-        }
-        // Client workflow 5.5.3: check for arbitrary software attack
-        this.root.verifyDelegate(models_1.MetadataKind.Snapshot, newSnapshot);
-        // version check against meta version (5.5.4) is deferred to allow old
-        // snapshot to be used in rollback protection
-        // Client workflow 5.5.5: check for rollback attack
-        if (this.snapshot) {
-            Object.entries(this.snapshot.signed.meta).forEach(([fileName, fileInfo]) => {
-                const newFileInfo = newSnapshot.signed.meta[fileName];
-                if (!newFileInfo) {
-                    throw new error_1.RepositoryError(`Missing file ${fileName} in new snapshot`);
-                }
-                if (newFileInfo.version < fileInfo.version) {
-                    throw new error_1.BadVersionError(`New version ${newFileInfo.version} of ${fileName} is less than current version ${fileInfo.version}`);
-                }
-            });
-        }
-        this.trustedSet.snapshot = newSnapshot;
-        // snapshot is loaded, but we raise if it's not valid _final_ snapshot
-        // Client workflow 5.5.4 & 5.5.6
-        this.checkFinalSnapsnot();
-        return newSnapshot;
-    }
-    updateDelegatedTargets(bytesBuffer, roleName, delegatorName) {
-        if (!this.snapshot) {
-            throw new error_1.RuntimeError('Cannot update delegated targets before snapshot');
-        }
-        // Targets cannot be loaded if final snapshot is expired or its version
-        // does not match meta version in timestamp.
-        this.checkFinalSnapsnot();
-        const delegator = this.trustedSet[delegatorName];
-        if (!delegator) {
-            throw new error_1.RuntimeError(`No trusted ${delegatorName} metadata`);
-        }
-        // Extract metadata for the delegated role from snapshot
-        const meta = this.snapshot.signed.meta?.[`${roleName}.json`];
-        if (!meta) {
-            throw new error_1.RepositoryError(`Missing ${roleName}.json in snapshot`);
-        }
-        // Client workflow 5.6.2: check against snapshot role's targets hash
-        meta.verify(bytesBuffer);
-        // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
-        const data = JSON.parse(bytesBuffer.toString('utf8'));
-        // eslint-disable-next-line @typescript-eslint/no-unsafe-argument
-        const newDelegate = models_1.Metadata.fromJSON(models_1.MetadataKind.Targets, data);
-        if (newDelegate.signed.type != models_1.MetadataKind.Targets) {
-            throw new error_1.RepositoryError(`Expected 'targets', got ${newDelegate.signed.type}`);
-        }
-        // Client workflow 5.6.3: check for arbitrary software attack
-        delegator.verifyDelegate(roleName, newDelegate);
-        // Client workflow 5.6.4: Check against snapshot role’s targets version
-        const version = newDelegate.signed.version;
-        if (version != meta.version) {
-            throw new error_1.BadVersionError(`Version ${version} of ${roleName} does not match snapshot version ${meta.version}`);
-        }
-        // Client workflow 5.6.5: check for a freeze attack
-        if (newDelegate.signed.isExpired(this.referenceTime)) {
-            throw new error_1.ExpiredMetadataError(`${roleName}.json is expired`);
-        }
-        this.trustedSet[roleName] = newDelegate;
-    }
-    // Verifies and loads data as trusted root metadata.
-    // Note that an expired initial root is still considered valid.
-    loadTrustedRoot(bytesBuffer) {
-        // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment
-        const data = JSON.parse(bytesBuffer.toString('utf8'));
-        // eslint-disable-next-line @typescript-eslint/no-unsafe-argument
-        const root = models_1.Metadata.fromJSON(models_1.MetadataKind.Root, data);
-        if (root.signed.type != models_1.MetadataKind.Root) {
-            throw new error_1.RepositoryError(`Expected 'root', got ${root.signed.type}`);
-        }
-        root.verifyDelegate(models_1.MetadataKind.Root, root);
-        this.trustedSet['root'] = root;
-    }
-    checkFinalTimestamp() {
-        // Timestamp MUST be loaded
-        if (!this.timestamp) {
-            throw new ReferenceError('No trusted timestamp metadata');
-        }
-        // Client workflow 5.4.4: check for freeze attack
-        if (this.timestamp.signed.isExpired(this.referenceTime)) {
-            throw new error_1.ExpiredMetadataError('Final timestamp.json is expired');
-        }
-    }
-    checkFinalSnapsnot() {
-        // Snapshot and timestamp MUST be loaded
-        if (!this.snapshot) {
-            throw new ReferenceError('No trusted snapshot metadata');
-        }
-        if (!this.timestamp) {
-            throw new ReferenceError('No trusted timestamp metadata');
-        }
-        // Client workflow 5.5.6: check for freeze attack
-        if (this.snapshot.signed.isExpired(this.referenceTime)) {
-            throw new error_1.ExpiredMetadataError('snapshot.json is expired');
-        }
-        // Client workflow 5.5.4: check against timestamp role’s snapshot version
-        const snapshotMeta = this.timestamp.signed.snapshotMeta;
-        if (this.snapshot.signed.version !== snapshotMeta.version) {
-            throw new error_1.BadVersionError("Snapshot version doesn't match timestamp");
-        }
-    }
-}
-exports.TrustedMetadataStore = TrustedMetadataStore;
diff --git a/node_modules/sigstore/node_modules/tuf-js/dist/updater.js b/node_modules/sigstore/node_modules/tuf-js/dist/updater.js
deleted file mode 100644
index 32046e4bec417..0000000000000
--- a/node_modules/sigstore/node_modules/tuf-js/dist/updater.js
+++ /dev/null
@@ -1,368 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || (function () {
-    var ownKeys = function(o) {
-        ownKeys = Object.getOwnPropertyNames || function (o) {
-            var ar = [];
-            for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
-            return ar;
-        };
-        return ownKeys(o);
-    };
-    return function (mod) {
-        if (mod && mod.__esModule) return mod;
-        var result = {};
-        if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
-        __setModuleDefault(result, mod);
-        return result;
-    };
-})();
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Updater = void 0;
-const models_1 = require("@tufjs/models");
-const debug_1 = __importDefault(require("debug"));
-const fs = __importStar(require("fs"));
-const path = __importStar(require("path"));
-const config_1 = require("./config");
-const error_1 = require("./error");
-const fetcher_1 = require("./fetcher");
-const store_1 = require("./store");
-const url = __importStar(require("./utils/url"));
-const log = (0, debug_1.default)('tuf:cache');
-class Updater {
-    dir;
-    metadataBaseUrl;
-    targetDir;
-    targetBaseUrl;
-    forceCache;
-    trustedSet;
-    config;
-    fetcher;
-    constructor(options) {
-        const { metadataDir, metadataBaseUrl, targetDir, targetBaseUrl, fetcher, config, } = options;
-        this.dir = metadataDir;
-        this.metadataBaseUrl = metadataBaseUrl;
-        this.targetDir = targetDir;
-        this.targetBaseUrl = targetBaseUrl;
-        this.forceCache = options.forceCache ?? false;
-        const data = this.loadLocalMetadata(models_1.MetadataKind.Root);
-        this.trustedSet = new store_1.TrustedMetadataStore(data);
-        this.config = { ...config_1.defaultConfig, ...config };
-        this.fetcher =
-            fetcher ||
-                new fetcher_1.DefaultFetcher({
-                    timeout: this.config.fetchTimeout,
-                    retry: this.config.fetchRetries ?? this.config.fetchRetry,
-                });
-    }
-    // refresh and load the metadata before downloading the target
-    // refresh should be called once after the client is initialized
-    async refresh() {
-        // If forceCache is true, try to load the timestamp from local storage
-        // without fetching it from the remote. Otherwise, load the root and
-        // timestamp from the remote per the TUF spec.
-        if (this.forceCache) {
-            // If anything fails, load the root and timestamp from the remote. This
-            // should cover any situation where the local metadata is corrupted or
-            // expired.
-            try {
-                await this.loadTimestamp({ checkRemote: false });
-            }
-            catch (error) {
-                await this.loadRoot();
-                await this.loadTimestamp();
-            }
-        }
-        else {
-            await this.loadRoot();
-            await this.loadTimestamp();
-        }
-        await this.loadSnapshot();
-        await this.loadTargets(models_1.MetadataKind.Targets, models_1.MetadataKind.Root);
-    }
-    // Returns the TargetFile instance with information for the given target path.
-    //
-    // Implicitly calls refresh if it hasn't already been called.
-    async getTargetInfo(targetPath) {
-        if (!this.trustedSet.targets) {
-            await this.refresh();
-        }
-        return this.preorderDepthFirstWalk(targetPath);
-    }
-    async downloadTarget(targetInfo, filePath, targetBaseUrl) {
-        const targetPath = filePath || this.generateTargetPath(targetInfo);
-        if (!targetBaseUrl) {
-            if (!this.targetBaseUrl) {
-                throw new error_1.ValueError('Target base URL not set');
-            }
-            targetBaseUrl = this.targetBaseUrl;
-        }
-        let targetFilePath = targetInfo.path;
-        const consistentSnapshot = this.trustedSet.root.signed.consistentSnapshot;
-        if (consistentSnapshot && this.config.prefixTargetsWithHash) {
-            const hashes = Object.values(targetInfo.hashes);
-            const { dir, base } = path.parse(targetFilePath);
-            const filename = `${hashes[0]}.${base}`;
-            targetFilePath = dir ? `${dir}/${filename}` : filename;
-        }
-        const targetUrl = url.join(targetBaseUrl, targetFilePath);
-        // Client workflow 5.7.3: download target file
-        await this.fetcher.downloadFile(targetUrl, targetInfo.length, async (fileName) => {
-            // Verify hashes and length of downloaded file
-            await targetInfo.verify(fs.createReadStream(fileName));
-            // Copy file to target path
-            log('WRITE %s', targetPath);
-            fs.copyFileSync(fileName, targetPath);
-        });
-        return targetPath;
-    }
-    async findCachedTarget(targetInfo, filePath) {
-        if (!filePath) {
-            filePath = this.generateTargetPath(targetInfo);
-        }
-        try {
-            if (fs.existsSync(filePath)) {
-                await targetInfo.verify(fs.createReadStream(filePath));
-                return filePath;
-            }
-        }
-        catch (error) {
-            return; // File not found
-        }
-        return; // File not found
-    }
-    loadLocalMetadata(fileName) {
-        const filePath = path.join(this.dir, `${fileName}.json`);
-        log('READ %s', filePath);
-        return fs.readFileSync(filePath);
-    }
-    // Sequentially load and persist on local disk every newer root metadata
-    // version available on the remote.
-    // Client workflow 5.3: update root role
-    async loadRoot() {
-        // Client workflow 5.3.2: version of trusted root metadata file
-        const rootVersion = this.trustedSet.root.signed.version;
-        const lowerBound = rootVersion + 1;
-        const upperBound = lowerBound + this.config.maxRootRotations;
-        for (let version = lowerBound; version < upperBound; version++) {
-            const rootUrl = url.join(this.metadataBaseUrl, `${version}.root.json`);
-            try {
-                // Client workflow 5.3.3: download new root metadata file
-                const bytesData = await this.fetcher.downloadBytes(rootUrl, this.config.rootMaxLength);
-                // Client workflow 5.3.4 - 5.4.7
-                this.trustedSet.updateRoot(bytesData);
-                // Client workflow 5.3.8: persist root metadata file
-                this.persistMetadata(models_1.MetadataKind.Root, bytesData);
-            }
-            catch (error) {
-                if (error instanceof error_1.DownloadHTTPError) {
-                    //  404/403 means current root is newest available
-                    if ([403, 404].includes(error.statusCode)) {
-                        break;
-                    }
-                }
-                throw error;
-            }
-        }
-    }
-    // Load local and remote timestamp metadata.
-    // Client workflow 5.4: update timestamp role
-    async loadTimestamp({ checkRemote } = { checkRemote: true }) {
-        // Load local and remote timestamp metadata
-        try {
-            const data = this.loadLocalMetadata(models_1.MetadataKind.Timestamp);
-            this.trustedSet.updateTimestamp(data);
-            // If checkRemote is disabled, return here to avoid fetching the remote
-            // timestamp metadata.
-            if (!checkRemote) {
-                return;
-            }
-        }
-        catch (error) {
-            // continue
-        }
-        //Load from remote (whether local load succeeded or not)
-        const timestampUrl = url.join(this.metadataBaseUrl, 'timestamp.json');
-        // Client workflow 5.4.1: download timestamp metadata file
-        const bytesData = await this.fetcher.downloadBytes(timestampUrl, this.config.timestampMaxLength);
-        try {
-            // Client workflow 5.4.2 - 5.4.4
-            this.trustedSet.updateTimestamp(bytesData);
-        }
-        catch (error) {
-            // If new timestamp version is same as current, discardd the new one.
-            // This is normal and should NOT raise an error.
-            if (error instanceof error_1.EqualVersionError) {
-                return;
-            }
-            // Re-raise any other error
-            throw error;
-        }
-        // Client workflow 5.4.5: persist timestamp metadata
-        this.persistMetadata(models_1.MetadataKind.Timestamp, bytesData);
-    }
-    // Load local and remote snapshot metadata.
-    // Client workflow 5.5: update snapshot role
-    async loadSnapshot() {
-        //Load local (and if needed remote) snapshot metadata
-        try {
-            const data = this.loadLocalMetadata(models_1.MetadataKind.Snapshot);
-            this.trustedSet.updateSnapshot(data, true);
-        }
-        catch (error) {
-            if (!this.trustedSet.timestamp) {
-                throw new ReferenceError('No timestamp metadata');
-            }
-            const snapshotMeta = this.trustedSet.timestamp.signed.snapshotMeta;
-            const maxLength = snapshotMeta.length || this.config.snapshotMaxLength;
-            const version = this.trustedSet.root.signed.consistentSnapshot
-                ? snapshotMeta.version
-                : undefined;
-            const snapshotUrl = url.join(this.metadataBaseUrl, version ? `${version}.snapshot.json` : 'snapshot.json');
-            try {
-                // Client workflow 5.5.1: download snapshot metadata file
-                const bytesData = await this.fetcher.downloadBytes(snapshotUrl, maxLength);
-                // Client workflow 5.5.2 - 5.5.6
-                this.trustedSet.updateSnapshot(bytesData);
-                // Client workflow 5.5.7: persist snapshot metadata file
-                this.persistMetadata(models_1.MetadataKind.Snapshot, bytesData);
-            }
-            catch (error) {
-                throw new error_1.RuntimeError(`Unable to load snapshot metadata error ${error}`);
-            }
-        }
-    }
-    // Load local and remote targets metadata.
-    // Client workflow 5.6: update targets role
-    async loadTargets(role, parentRole) {
-        if (this.trustedSet.getRole(role)) {
-            return this.trustedSet.getRole(role);
-        }
-        try {
-            const buffer = this.loadLocalMetadata(role);
-            this.trustedSet.updateDelegatedTargets(buffer, role, parentRole);
-        }
-        catch (error) {
-            // Local 'role' does not exist or is invalid: update from remote
-            if (!this.trustedSet.snapshot) {
-                throw new ReferenceError('No snapshot metadata');
-            }
-            const metaInfo = this.trustedSet.snapshot.signed.meta[`${role}.json`];
-            // TODO: use length for fetching
-            const maxLength = metaInfo.length || this.config.targetsMaxLength;
-            const version = this.trustedSet.root.signed.consistentSnapshot
-                ? metaInfo.version
-                : undefined;
-            const encodedRole = encodeURIComponent(role);
-            const metadataUrl = url.join(this.metadataBaseUrl, version ? `${version}.${encodedRole}.json` : `${encodedRole}.json`);
-            try {
-                // Client workflow 5.6.1: download targets metadata file
-                const bytesData = await this.fetcher.downloadBytes(metadataUrl, maxLength);
-                // Client workflow 5.6.2 - 5.6.6
-                this.trustedSet.updateDelegatedTargets(bytesData, role, parentRole);
-                // Client workflow 5.6.7: persist targets metadata file
-                this.persistMetadata(role, bytesData);
-            }
-            catch (error) {
-                throw new error_1.RuntimeError(`Unable to load targets error ${error}`);
-            }
-        }
-        return this.trustedSet.getRole(role);
-    }
-    async preorderDepthFirstWalk(targetPath) {
-        // Interrogates the tree of target delegations in order of appearance
-        // (which implicitly order trustworthiness), and returns the matching
-        // target found in the most trusted role.
-        // List of delegations to be interrogated. A (role, parent role) pair
-        // is needed to load and verify the delegated targets metadata.
-        const delegationsToVisit = [
-            {
-                roleName: models_1.MetadataKind.Targets,
-                parentRoleName: models_1.MetadataKind.Root,
-            },
-        ];
-        const visitedRoleNames = new Set();
-        // Client workflow 5.6.7: preorder depth-first traversal of the graph of
-        // target delegations
-        while (visitedRoleNames.size <= this.config.maxDelegations &&
-            delegationsToVisit.length > 0) {
-            //  Pop the role name from the top of the stack.
-            const { roleName, parentRoleName } = delegationsToVisit.pop();
-            // Skip any visited current role to prevent cycles.
-            // Client workflow 5.6.7.1: skip already-visited roles
-            if (visitedRoleNames.has(roleName)) {
-                continue;
-            }
-            // The metadata for 'role_name' must be downloaded/updated before
-            // its targets, delegations, and child roles can be inspected.
-            const targets = (await this.loadTargets(roleName, parentRoleName))
-                ?.signed;
-            if (!targets) {
-                continue;
-            }
-            const target = targets.targets?.[targetPath];
-            if (target) {
-                return target;
-            }
-            // After preorder check, add current role to set of visited roles.
-            visitedRoleNames.add(roleName);
-            if (targets.delegations) {
-                const childRolesToVisit = [];
-                // NOTE: This may be a slow operation if there are many delegated roles.
-                const rolesForTarget = targets.delegations.rolesForTarget(targetPath);
-                for (const { role: childName, terminating } of rolesForTarget) {
-                    childRolesToVisit.push({
-                        roleName: childName,
-                        parentRoleName: roleName,
-                    });
-                    // Client workflow 5.6.7.2.1
-                    if (terminating) {
-                        delegationsToVisit.splice(0); // empty the array
-                        break;
-                    }
-                }
-                childRolesToVisit.reverse();
-                delegationsToVisit.push(...childRolesToVisit);
-            }
-        }
-        return; // no matching target found
-    }
-    generateTargetPath(targetInfo) {
-        if (!this.targetDir) {
-            throw new error_1.ValueError('Target directory not set');
-        }
-        // URL encode target path
-        const filePath = encodeURIComponent(targetInfo.path);
-        return path.join(this.targetDir, filePath);
-    }
-    persistMetadata(metaDataName, bytesData) {
-        const encodedName = encodeURIComponent(metaDataName);
-        try {
-            const filePath = path.join(this.dir, `${encodedName}.json`);
-            log('WRITE %s', filePath);
-            fs.writeFileSync(filePath, bytesData.toString('utf8'));
-        }
-        catch (error) {
-            throw new error_1.PersistError(`Failed to persist metadata ${encodedName} error: ${error}`);
-        }
-    }
-}
-exports.Updater = Updater;
diff --git a/node_modules/sigstore/node_modules/tuf-js/dist/utils/tmpfile.js b/node_modules/sigstore/node_modules/tuf-js/dist/utils/tmpfile.js
deleted file mode 100644
index 923eef6044bcc..0000000000000
--- a/node_modules/sigstore/node_modules/tuf-js/dist/utils/tmpfile.js
+++ /dev/null
@@ -1,25 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.withTempFile = void 0;
-const promises_1 = __importDefault(require("fs/promises"));
-const os_1 = __importDefault(require("os"));
-const path_1 = __importDefault(require("path"));
-// Invokes the given handler with the path to a temporary file. The file
-// is deleted after the handler returns.
-const withTempFile = async (handler) => withTempDir(async (dir) => handler(path_1.default.join(dir, 'tempfile')));
-exports.withTempFile = withTempFile;
-// Invokes the given handler with a temporary directory. The directory is
-// deleted after the handler returns.
-const withTempDir = async (handler) => {
-    const tmpDir = await promises_1.default.realpath(os_1.default.tmpdir());
-    const dir = await promises_1.default.mkdtemp(tmpDir + path_1.default.sep);
-    try {
-        return await handler(dir);
-    }
-    finally {
-        await promises_1.default.rm(dir, { force: true, recursive: true, maxRetries: 3 });
-    }
-};
diff --git a/node_modules/sigstore/node_modules/tuf-js/dist/utils/url.js b/node_modules/sigstore/node_modules/tuf-js/dist/utils/url.js
deleted file mode 100644
index 359d1f3ef385b..0000000000000
--- a/node_modules/sigstore/node_modules/tuf-js/dist/utils/url.js
+++ /dev/null
@@ -1,13 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.join = join;
-const url_1 = require("url");
-function join(base, path) {
-    return new url_1.URL(ensureTrailingSlash(base) + removeLeadingSlash(path)).toString();
-}
-function ensureTrailingSlash(path) {
-    return path.endsWith('/') ? path : path + '/';
-}
-function removeLeadingSlash(path) {
-    return path.startsWith('/') ? path.slice(1) : path;
-}
diff --git a/node_modules/sigstore/node_modules/tuf-js/package.json b/node_modules/sigstore/node_modules/tuf-js/package.json
deleted file mode 100644
index c7f53556ac152..0000000000000
--- a/node_modules/sigstore/node_modules/tuf-js/package.json
+++ /dev/null
@@ -1,43 +0,0 @@
-{
-  "name": "tuf-js",
-  "version": "4.0.0",
-  "description": "JavaScript implementation of The Update Framework (TUF)",
-  "main": "dist/index.js",
-  "types": "dist/index.d.ts",
-  "scripts": {
-    "build": "tsc --build tsconfig.build.json",
-    "clean": "rm -rf dist && rm tsconfig.build.tsbuildinfo",
-    "test": "jest"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/theupdateframework/tuf-js.git"
-  },
-  "files": [
-    "dist"
-  ],
-  "keywords": [
-    "tuf",
-    "security",
-    "update"
-  ],
-  "author": "bdehamer@github.com",
-  "license": "MIT",
-  "bugs": {
-    "url": "https://github.com/theupdateframework/tuf-js/issues"
-  },
-  "homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/client#readme",
-  "devDependencies": {
-    "@tufjs/repo-mock": "4.0.0",
-    "@types/debug": "^4.1.12",
-    "@types/make-fetch-happen": "^10.0.4"
-  },
-  "dependencies": {
-    "@tufjs/models": "4.0.0",
-    "debug": "^4.4.1",
-    "make-fetch-happen": "^15.0.0"
-  },
-  "engines": {
-    "node": "^20.17.0 || >=22.9.0"
-  }
-}
diff --git a/node_modules/@tufjs/models/LICENSE b/node_modules/tuf-js/node_modules/@tufjs/models/LICENSE
similarity index 100%
rename from node_modules/@tufjs/models/LICENSE
rename to node_modules/tuf-js/node_modules/@tufjs/models/LICENSE
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/base.js b/node_modules/tuf-js/node_modules/@tufjs/models/dist/base.js
similarity index 100%
rename from node_modules/sigstore/node_modules/@tufjs/models/dist/base.js
rename to node_modules/tuf-js/node_modules/@tufjs/models/dist/base.js
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/delegations.js b/node_modules/tuf-js/node_modules/@tufjs/models/dist/delegations.js
similarity index 100%
rename from node_modules/sigstore/node_modules/@tufjs/models/dist/delegations.js
rename to node_modules/tuf-js/node_modules/@tufjs/models/dist/delegations.js
diff --git a/node_modules/@tufjs/models/dist/error.js b/node_modules/tuf-js/node_modules/@tufjs/models/dist/error.js
similarity index 100%
rename from node_modules/@tufjs/models/dist/error.js
rename to node_modules/tuf-js/node_modules/@tufjs/models/dist/error.js
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/file.js b/node_modules/tuf-js/node_modules/@tufjs/models/dist/file.js
similarity index 100%
rename from node_modules/sigstore/node_modules/@tufjs/models/dist/file.js
rename to node_modules/tuf-js/node_modules/@tufjs/models/dist/file.js
diff --git a/node_modules/@tufjs/models/dist/index.js b/node_modules/tuf-js/node_modules/@tufjs/models/dist/index.js
similarity index 100%
rename from node_modules/@tufjs/models/dist/index.js
rename to node_modules/tuf-js/node_modules/@tufjs/models/dist/index.js
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/key.js b/node_modules/tuf-js/node_modules/@tufjs/models/dist/key.js
similarity index 100%
rename from node_modules/sigstore/node_modules/@tufjs/models/dist/key.js
rename to node_modules/tuf-js/node_modules/@tufjs/models/dist/key.js
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/metadata.js b/node_modules/tuf-js/node_modules/@tufjs/models/dist/metadata.js
similarity index 100%
rename from node_modules/sigstore/node_modules/@tufjs/models/dist/metadata.js
rename to node_modules/tuf-js/node_modules/@tufjs/models/dist/metadata.js
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/role.js b/node_modules/tuf-js/node_modules/@tufjs/models/dist/role.js
similarity index 100%
rename from node_modules/sigstore/node_modules/@tufjs/models/dist/role.js
rename to node_modules/tuf-js/node_modules/@tufjs/models/dist/role.js
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/root.js b/node_modules/tuf-js/node_modules/@tufjs/models/dist/root.js
similarity index 100%
rename from node_modules/sigstore/node_modules/@tufjs/models/dist/root.js
rename to node_modules/tuf-js/node_modules/@tufjs/models/dist/root.js
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/signature.js b/node_modules/tuf-js/node_modules/@tufjs/models/dist/signature.js
similarity index 100%
rename from node_modules/sigstore/node_modules/@tufjs/models/dist/signature.js
rename to node_modules/tuf-js/node_modules/@tufjs/models/dist/signature.js
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/snapshot.js b/node_modules/tuf-js/node_modules/@tufjs/models/dist/snapshot.js
similarity index 100%
rename from node_modules/sigstore/node_modules/@tufjs/models/dist/snapshot.js
rename to node_modules/tuf-js/node_modules/@tufjs/models/dist/snapshot.js
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/targets.js b/node_modules/tuf-js/node_modules/@tufjs/models/dist/targets.js
similarity index 100%
rename from node_modules/sigstore/node_modules/@tufjs/models/dist/targets.js
rename to node_modules/tuf-js/node_modules/@tufjs/models/dist/targets.js
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/timestamp.js b/node_modules/tuf-js/node_modules/@tufjs/models/dist/timestamp.js
similarity index 100%
rename from node_modules/sigstore/node_modules/@tufjs/models/dist/timestamp.js
rename to node_modules/tuf-js/node_modules/@tufjs/models/dist/timestamp.js
diff --git a/node_modules/@tufjs/models/dist/utils/guard.js b/node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/guard.js
similarity index 100%
rename from node_modules/@tufjs/models/dist/utils/guard.js
rename to node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/guard.js
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/dist/utils/index.js b/node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/index.js
similarity index 100%
rename from node_modules/sigstore/node_modules/@tufjs/models/dist/utils/index.js
rename to node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/index.js
diff --git a/node_modules/@tufjs/models/dist/utils/key.js b/node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/key.js
similarity index 100%
rename from node_modules/@tufjs/models/dist/utils/key.js
rename to node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/key.js
diff --git a/node_modules/@tufjs/models/dist/utils/oid.js b/node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/oid.js
similarity index 100%
rename from node_modules/@tufjs/models/dist/utils/oid.js
rename to node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/oid.js
diff --git a/node_modules/@tufjs/models/dist/utils/types.js b/node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/types.js
similarity index 100%
rename from node_modules/@tufjs/models/dist/utils/types.js
rename to node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/types.js
diff --git a/node_modules/@tufjs/models/dist/utils/verify.js b/node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/verify.js
similarity index 100%
rename from node_modules/@tufjs/models/dist/utils/verify.js
rename to node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/verify.js
diff --git a/node_modules/sigstore/node_modules/@tufjs/models/package.json b/node_modules/tuf-js/node_modules/@tufjs/models/package.json
similarity index 100%
rename from node_modules/sigstore/node_modules/@tufjs/models/package.json
rename to node_modules/tuf-js/node_modules/@tufjs/models/package.json
diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/LICENSE b/node_modules/tuf-js/node_modules/make-fetch-happen/LICENSE
similarity index 100%
rename from node_modules/sigstore/node_modules/make-fetch-happen/LICENSE
rename to node_modules/tuf-js/node_modules/make-fetch-happen/LICENSE
diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/entry.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/entry.js
similarity index 100%
rename from node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/entry.js
rename to node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/entry.js
diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/errors.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/errors.js
similarity index 100%
rename from node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/errors.js
rename to node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/errors.js
diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/index.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/index.js
similarity index 100%
rename from node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/index.js
rename to node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/index.js
diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/key.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/key.js
similarity index 100%
rename from node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/key.js
rename to node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/key.js
diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/policy.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/policy.js
similarity index 100%
rename from node_modules/sigstore/node_modules/make-fetch-happen/lib/cache/policy.js
rename to node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/policy.js
diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/fetch.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/fetch.js
similarity index 100%
rename from node_modules/sigstore/node_modules/make-fetch-happen/lib/fetch.js
rename to node_modules/tuf-js/node_modules/make-fetch-happen/lib/fetch.js
diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/index.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/index.js
similarity index 100%
rename from node_modules/sigstore/node_modules/make-fetch-happen/lib/index.js
rename to node_modules/tuf-js/node_modules/make-fetch-happen/lib/index.js
diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/options.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/options.js
similarity index 100%
rename from node_modules/sigstore/node_modules/make-fetch-happen/lib/options.js
rename to node_modules/tuf-js/node_modules/make-fetch-happen/lib/options.js
diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/pipeline.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/pipeline.js
similarity index 100%
rename from node_modules/sigstore/node_modules/make-fetch-happen/lib/pipeline.js
rename to node_modules/tuf-js/node_modules/make-fetch-happen/lib/pipeline.js
diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/lib/remote.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/remote.js
similarity index 100%
rename from node_modules/sigstore/node_modules/make-fetch-happen/lib/remote.js
rename to node_modules/tuf-js/node_modules/make-fetch-happen/lib/remote.js
diff --git a/node_modules/sigstore/node_modules/make-fetch-happen/package.json b/node_modules/tuf-js/node_modules/make-fetch-happen/package.json
similarity index 100%
rename from node_modules/sigstore/node_modules/make-fetch-happen/package.json
rename to node_modules/tuf-js/node_modules/make-fetch-happen/package.json
diff --git a/node_modules/sigstore/node_modules/negotiator/HISTORY.md b/node_modules/tuf-js/node_modules/negotiator/HISTORY.md
similarity index 100%
rename from node_modules/sigstore/node_modules/negotiator/HISTORY.md
rename to node_modules/tuf-js/node_modules/negotiator/HISTORY.md
diff --git a/node_modules/sigstore/node_modules/negotiator/LICENSE b/node_modules/tuf-js/node_modules/negotiator/LICENSE
similarity index 100%
rename from node_modules/sigstore/node_modules/negotiator/LICENSE
rename to node_modules/tuf-js/node_modules/negotiator/LICENSE
diff --git a/node_modules/sigstore/node_modules/negotiator/index.js b/node_modules/tuf-js/node_modules/negotiator/index.js
similarity index 100%
rename from node_modules/sigstore/node_modules/negotiator/index.js
rename to node_modules/tuf-js/node_modules/negotiator/index.js
diff --git a/node_modules/sigstore/node_modules/negotiator/lib/charset.js b/node_modules/tuf-js/node_modules/negotiator/lib/charset.js
similarity index 100%
rename from node_modules/sigstore/node_modules/negotiator/lib/charset.js
rename to node_modules/tuf-js/node_modules/negotiator/lib/charset.js
diff --git a/node_modules/sigstore/node_modules/negotiator/lib/encoding.js b/node_modules/tuf-js/node_modules/negotiator/lib/encoding.js
similarity index 100%
rename from node_modules/sigstore/node_modules/negotiator/lib/encoding.js
rename to node_modules/tuf-js/node_modules/negotiator/lib/encoding.js
diff --git a/node_modules/sigstore/node_modules/negotiator/lib/language.js b/node_modules/tuf-js/node_modules/negotiator/lib/language.js
similarity index 100%
rename from node_modules/sigstore/node_modules/negotiator/lib/language.js
rename to node_modules/tuf-js/node_modules/negotiator/lib/language.js
diff --git a/node_modules/sigstore/node_modules/negotiator/lib/mediaType.js b/node_modules/tuf-js/node_modules/negotiator/lib/mediaType.js
similarity index 100%
rename from node_modules/sigstore/node_modules/negotiator/lib/mediaType.js
rename to node_modules/tuf-js/node_modules/negotiator/lib/mediaType.js
diff --git a/node_modules/sigstore/node_modules/negotiator/package.json b/node_modules/tuf-js/node_modules/negotiator/package.json
similarity index 100%
rename from node_modules/sigstore/node_modules/negotiator/package.json
rename to node_modules/tuf-js/node_modules/negotiator/package.json
diff --git a/node_modules/tuf-js/package.json b/node_modules/tuf-js/package.json
index 8fc7f37779421..c7f53556ac152 100644
--- a/node_modules/tuf-js/package.json
+++ b/node_modules/tuf-js/package.json
@@ -1,6 +1,6 @@
 {
   "name": "tuf-js",
-  "version": "3.1.0",
+  "version": "4.0.0",
   "description": "JavaScript implementation of The Update Framework (TUF)",
   "main": "dist/index.js",
   "types": "dist/index.d.ts",
@@ -28,16 +28,16 @@
   },
   "homepage": "https://github.com/theupdateframework/tuf-js/tree/main/packages/client#readme",
   "devDependencies": {
-    "@tufjs/repo-mock": "3.0.1",
+    "@tufjs/repo-mock": "4.0.0",
     "@types/debug": "^4.1.12",
     "@types/make-fetch-happen": "^10.0.4"
   },
   "dependencies": {
-    "@tufjs/models": "3.0.1",
+    "@tufjs/models": "4.0.0",
     "debug": "^4.4.1",
-    "make-fetch-happen": "^14.0.3"
+    "make-fetch-happen": "^15.0.0"
   },
   "engines": {
-    "node": "^18.17.0 || >=20.5.0"
+    "node": "^20.17.0 || >=22.9.0"
   }
 }
diff --git a/package-lock.json b/package-lock.json
index c67a540b65760..feec34299945e 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -93,7 +93,7 @@
         "@npmcli/promise-spawn": "^8.0.2",
         "@npmcli/redact": "^3.2.2",
         "@npmcli/run-script": "^10.0.0",
-        "@sigstore/tuf": "^3.1.1",
+        "@sigstore/tuf": "^4.0.0",
         "abbrev": "^3.0.1",
         "archy": "~1.0.0",
         "cacache": "^20.0.1",
@@ -5221,16 +5221,6 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs": {
-      "version": "0.5.0",
-      "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.5.0.tgz",
-      "integrity": "sha512-MM8XIwUjN2bwvCg1QvrMtbBmpcSHrkhFSCu1D11NyPvDQ25HEc4oG5/OcQfd/Tlf/OxmKWERDj0zGE23jQaMwA==",
-      "inBundle": true,
-      "license": "Apache-2.0",
-      "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
     "node_modules/@sigstore/core": {
       "version": "3.0.0",
       "resolved": "https://registry.npmjs.org/@sigstore/core/-/core-3.0.0.tgz",
@@ -5242,9 +5232,9 @@
       }
     },
     "node_modules/@sigstore/protobuf-specs": {
-      "version": "0.4.3",
-      "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.4.3.tgz",
-      "integrity": "sha512-fk2zjD9117RL9BjqEwF7fwv7Q/P9yGsMV4MUJZ/DocaQJ6+3pKr+syBq1owU5Q5qGw5CUbXzm+4yJ2JVRDQeSA==",
+      "version": "0.5.0",
+      "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.5.0.tgz",
+      "integrity": "sha512-MM8XIwUjN2bwvCg1QvrMtbBmpcSHrkhFSCu1D11NyPvDQ25HEc4oG5/OcQfd/Tlf/OxmKWERDj0zGE23jQaMwA==",
       "inBundle": true,
       "license": "Apache-2.0",
       "engines": {
@@ -5269,16 +5259,6 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/@sigstore/sign/node_modules/@sigstore/protobuf-specs": {
-      "version": "0.5.0",
-      "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.5.0.tgz",
-      "integrity": "sha512-MM8XIwUjN2bwvCg1QvrMtbBmpcSHrkhFSCu1D11NyPvDQ25HEc4oG5/OcQfd/Tlf/OxmKWERDj0zGE23jQaMwA==",
-      "inBundle": true,
-      "license": "Apache-2.0",
-      "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
     "node_modules/@sigstore/sign/node_modules/make-fetch-happen": {
       "version": "15.0.1",
       "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-15.0.1.tgz",
@@ -5313,17 +5293,17 @@
       }
     },
     "node_modules/@sigstore/tuf": {
-      "version": "3.1.1",
-      "resolved": "https://registry.npmjs.org/@sigstore/tuf/-/tuf-3.1.1.tgz",
-      "integrity": "sha512-eFFvlcBIoGwVkkwmTi/vEQFSva3xs5Ot3WmBcjgjVdiaoelBLQaQ/ZBfhlG0MnG0cmTYScPpk7eDdGDWUcFUmg==",
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/@sigstore/tuf/-/tuf-4.0.0.tgz",
+      "integrity": "sha512-0QFuWDHOQmz7t66gfpfNO6aEjoFrdhkJaej/AOqb4kqWZVbPWFZifXZzkxyQBB1OwTbkhdT3LNpMFxwkTvf+2w==",
       "inBundle": true,
       "license": "Apache-2.0",
       "dependencies": {
-        "@sigstore/protobuf-specs": "^0.4.1",
-        "tuf-js": "^3.0.1"
+        "@sigstore/protobuf-specs": "^0.5.0",
+        "tuf-js": "^4.0.0"
       },
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^20.17.0 || >=22.9.0"
       }
     },
     "node_modules/@sigstore/verify": {
@@ -5341,16 +5321,6 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/@sigstore/verify/node_modules/@sigstore/protobuf-specs": {
-      "version": "0.5.0",
-      "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.5.0.tgz",
-      "integrity": "sha512-MM8XIwUjN2bwvCg1QvrMtbBmpcSHrkhFSCu1D11NyPvDQ25HEc4oG5/OcQfd/Tlf/OxmKWERDj0zGE23jQaMwA==",
-      "inBundle": true,
-      "license": "Apache-2.0",
-      "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
     "node_modules/@tufjs/canonical-json": {
       "version": "2.0.0",
       "resolved": "https://registry.npmjs.org/@tufjs/canonical-json/-/canonical-json-2.0.0.tgz",
@@ -5365,7 +5335,7 @@
       "version": "3.0.1",
       "resolved": "https://registry.npmjs.org/@tufjs/models/-/models-3.0.1.tgz",
       "integrity": "sha512-UUYHISyhCU3ZgN8yaear3cGATHb3SMuKHsQ/nVbHXcmnBf+LzQ/cQfhNG+rfaSHgqGKNEm2cOCLVLELStUQ1JA==",
-      "inBundle": true,
+      "dev": true,
       "license": "MIT",
       "dependencies": {
         "@tufjs/canonical-json": "2.0.0",
@@ -15278,92 +15248,6 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/sigstore/node_modules/@sigstore/protobuf-specs": {
-      "version": "0.5.0",
-      "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.5.0.tgz",
-      "integrity": "sha512-MM8XIwUjN2bwvCg1QvrMtbBmpcSHrkhFSCu1D11NyPvDQ25HEc4oG5/OcQfd/Tlf/OxmKWERDj0zGE23jQaMwA==",
-      "inBundle": true,
-      "license": "Apache-2.0",
-      "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
-    "node_modules/sigstore/node_modules/@sigstore/tuf": {
-      "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/@sigstore/tuf/-/tuf-4.0.0.tgz",
-      "integrity": "sha512-0QFuWDHOQmz7t66gfpfNO6aEjoFrdhkJaej/AOqb4kqWZVbPWFZifXZzkxyQBB1OwTbkhdT3LNpMFxwkTvf+2w==",
-      "inBundle": true,
-      "license": "Apache-2.0",
-      "dependencies": {
-        "@sigstore/protobuf-specs": "^0.5.0",
-        "tuf-js": "^4.0.0"
-      },
-      "engines": {
-        "node": "^20.17.0 || >=22.9.0"
-      }
-    },
-    "node_modules/sigstore/node_modules/@tufjs/models": {
-      "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/@tufjs/models/-/models-4.0.0.tgz",
-      "integrity": "sha512-h5x5ga/hh82COe+GoD4+gKUeV4T3iaYOxqLt41GRKApinPI7DMidhCmNVTjKfhCWFJIGXaFJee07XczdT4jdZQ==",
-      "inBundle": true,
-      "license": "MIT",
-      "dependencies": {
-        "@tufjs/canonical-json": "2.0.0",
-        "minimatch": "^9.0.5"
-      },
-      "engines": {
-        "node": "^20.17.0 || >=22.9.0"
-      }
-    },
-    "node_modules/sigstore/node_modules/make-fetch-happen": {
-      "version": "15.0.1",
-      "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-15.0.1.tgz",
-      "integrity": "sha512-9GjpQcaUXO2xmre8JfALl8Oji8Jpo+SyY2HpqFFPHVczOld/I+JFRx9FkP/uedZzkJlI9uM5t/j6dGJv4BScQw==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "@npmcli/agent": "^3.0.0",
-        "cacache": "^20.0.1",
-        "http-cache-semantics": "^4.1.1",
-        "minipass": "^7.0.2",
-        "minipass-fetch": "^4.0.0",
-        "minipass-flush": "^1.0.5",
-        "minipass-pipeline": "^1.2.4",
-        "negotiator": "^1.0.0",
-        "proc-log": "^5.0.0",
-        "promise-retry": "^2.0.1",
-        "ssri": "^12.0.0"
-      },
-      "engines": {
-        "node": "^20.17.0 || >=22.9.0"
-      }
-    },
-    "node_modules/sigstore/node_modules/negotiator": {
-      "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz",
-      "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==",
-      "inBundle": true,
-      "license": "MIT",
-      "engines": {
-        "node": ">= 0.6"
-      }
-    },
-    "node_modules/sigstore/node_modules/tuf-js": {
-      "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-4.0.0.tgz",
-      "integrity": "sha512-Lq7ieeGvXDXwpoSmOSgLWVdsGGV9J4a77oDTAPe/Ltrqnnm/ETaRlBAQTH5JatEh8KXuE6sddf9qAv1Q2282Hg==",
-      "inBundle": true,
-      "license": "MIT",
-      "dependencies": {
-        "@tufjs/models": "4.0.0",
-        "debug": "^4.4.1",
-        "make-fetch-happen": "^15.0.0"
-      },
-      "engines": {
-        "node": "^20.17.0 || >=22.9.0"
-      }
-    },
     "node_modules/smart-buffer": {
       "version": "4.2.0",
       "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz",
@@ -18500,18 +18384,65 @@
       }
     },
     "node_modules/tuf-js": {
-      "version": "3.1.0",
-      "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-3.1.0.tgz",
-      "integrity": "sha512-3T3T04WzowbwV2FDiGXBbr81t64g1MUGGJRgT4x5o97N+8ArdhVCAF9IxFrxuSJmM3E5Asn7nKHkao0ibcZXAg==",
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-4.0.0.tgz",
+      "integrity": "sha512-Lq7ieeGvXDXwpoSmOSgLWVdsGGV9J4a77oDTAPe/Ltrqnnm/ETaRlBAQTH5JatEh8KXuE6sddf9qAv1Q2282Hg==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
-        "@tufjs/models": "3.0.1",
+        "@tufjs/models": "4.0.0",
         "debug": "^4.4.1",
-        "make-fetch-happen": "^14.0.3"
+        "make-fetch-happen": "^15.0.0"
       },
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/tuf-js/node_modules/@tufjs/models": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/@tufjs/models/-/models-4.0.0.tgz",
+      "integrity": "sha512-h5x5ga/hh82COe+GoD4+gKUeV4T3iaYOxqLt41GRKApinPI7DMidhCmNVTjKfhCWFJIGXaFJee07XczdT4jdZQ==",
+      "inBundle": true,
+      "license": "MIT",
+      "dependencies": {
+        "@tufjs/canonical-json": "2.0.0",
+        "minimatch": "^9.0.5"
+      },
+      "engines": {
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/tuf-js/node_modules/make-fetch-happen": {
+      "version": "15.0.1",
+      "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-15.0.1.tgz",
+      "integrity": "sha512-9GjpQcaUXO2xmre8JfALl8Oji8Jpo+SyY2HpqFFPHVczOld/I+JFRx9FkP/uedZzkJlI9uM5t/j6dGJv4BScQw==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "@npmcli/agent": "^3.0.0",
+        "cacache": "^20.0.1",
+        "http-cache-semantics": "^4.1.1",
+        "minipass": "^7.0.2",
+        "minipass-fetch": "^4.0.0",
+        "minipass-flush": "^1.0.5",
+        "minipass-pipeline": "^1.2.4",
+        "negotiator": "^1.0.0",
+        "proc-log": "^5.0.0",
+        "promise-retry": "^2.0.1",
+        "ssri": "^12.0.0"
+      },
+      "engines": {
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/tuf-js/node_modules/negotiator": {
+      "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz",
+      "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==",
+      "inBundle": true,
+      "license": "MIT",
+      "engines": {
+        "node": ">= 0.6"
       }
     },
     "node_modules/tunnel": {
diff --git a/package.json b/package.json
index 149a573dca677..a624a0b51b64a 100644
--- a/package.json
+++ b/package.json
@@ -60,7 +60,7 @@
     "@npmcli/promise-spawn": "^8.0.2",
     "@npmcli/redact": "^3.2.2",
     "@npmcli/run-script": "^10.0.0",
-    "@sigstore/tuf": "^3.1.1",
+    "@sigstore/tuf": "^4.0.0",
     "abbrev": "^3.0.1",
     "archy": "~1.0.0",
     "cacache": "^20.0.1",

From 66f64eb1426beaad314321c22b5debff64b2357a Mon Sep 17 00:00:00 2001
From: Gar 
Date: Wed, 17 Sep 2025 10:47:29 -0700
Subject: [PATCH 16/63] deps: make-fetch-happen@15.0.2

---
 node_modules/.gitignore                       |   35 +-
 .../node_modules/@npmcli/agent/lib/agents.js  |  206 +++
 .../node_modules/@npmcli/agent/lib/dns.js     |   53 +
 .../node_modules/@npmcli/agent/lib/errors.js  |   61 +
 .../node_modules/@npmcli/agent/lib/index.js   |   56 +
 .../node_modules/@npmcli/agent/lib/options.js |   86 +
 .../node_modules/@npmcli/agent/lib/proxy.js   |   88 +
 .../node_modules/@npmcli/agent/package.json   |   60 +
 .../node_modules/chownr/LICENSE.md            |   63 -
 .../chownr/dist/commonjs/index.js             |   93 -
 .../node_modules/chownr/dist/esm/index.js     |   85 -
 .../node_modules/chownr/package.json          |   69 -
 .../node_modules/{tar => lru-cache}/LICENSE   |    2 +-
 .../lru-cache/dist/commonjs/index.js          | 1564 +++++++++++++++++
 .../lru-cache/dist/commonjs/index.min.js      |    2 +
 .../dist/commonjs/package.json                |    0
 .../node_modules/lru-cache/dist/esm/index.js  | 1560 ++++++++++++++++
 .../lru-cache/dist/esm/index.min.js           |    2 +
 .../dist/esm/package.json                     |    0
 .../node_modules/lru-cache/package.json       |  113 ++
 .../node_modules/minizlib/LICENSE             |   26 -
 .../minizlib/dist/commonjs/constants.js       |  123 --
 .../minizlib/dist/commonjs/index.js           |  392 -----
 .../minizlib/dist/commonjs/package.json       |    3 -
 .../minizlib/dist/esm/constants.js            |  117 --
 .../node_modules/minizlib/dist/esm/index.js   |  340 ----
 .../minizlib/dist/esm/package.json            |    3 -
 .../node_modules/minizlib/package.json        |   80 -
 .../node_modules/mkdirp/LICENSE               |   21 -
 .../node_modules/mkdirp/dist/cjs/package.json |   91 -
 .../node_modules/mkdirp/dist/cjs/src/bin.js   |   80 -
 .../mkdirp/dist/cjs/src/find-made.js          |   35 -
 .../node_modules/mkdirp/dist/cjs/src/index.js |   53 -
 .../mkdirp/dist/cjs/src/mkdirp-manual.js      |   79 -
 .../mkdirp/dist/cjs/src/mkdirp-native.js      |   50 -
 .../mkdirp/dist/cjs/src/opts-arg.js           |   38 -
 .../mkdirp/dist/cjs/src/path-arg.js           |   28 -
 .../mkdirp/dist/cjs/src/use-native.js         |   17 -
 .../node_modules/mkdirp/dist/mjs/find-made.js |   30 -
 .../node_modules/mkdirp/dist/mjs/index.js     |   43 -
 .../mkdirp/dist/mjs/mkdirp-manual.js          |   75 -
 .../mkdirp/dist/mjs/mkdirp-native.js          |   46 -
 .../node_modules/mkdirp/dist/mjs/opts-arg.js  |   34 -
 .../node_modules/mkdirp/dist/mjs/package.json |    3 -
 .../node_modules/mkdirp/dist/mjs/path-arg.js  |   24 -
 .../mkdirp/dist/mjs/use-native.js             |   14 -
 .../node_modules/mkdirp/package.json          |   91 -
 .../node_modules/tar/dist/commonjs/create.js  |   83 -
 .../tar/dist/commonjs/cwd-error.js            |   18 -
 .../node_modules/tar/dist/commonjs/extract.js |   78 -
 .../tar/dist/commonjs/get-write-flag.js       |   29 -
 .../node_modules/tar/dist/commonjs/header.js  |  306 ----
 .../node_modules/tar/dist/commonjs/index.js   |   54 -
 .../tar/dist/commonjs/large-numbers.js        |   99 --
 .../node_modules/tar/dist/commonjs/list.js    |  136 --
 .../tar/dist/commonjs/make-command.js         |   61 -
 .../node_modules/tar/dist/commonjs/mkdir.js   |  209 ---
 .../tar/dist/commonjs/mode-fix.js             |   29 -
 .../tar/dist/commonjs/normalize-unicode.js    |   17 -
 .../dist/commonjs/normalize-windows-path.js   |   12 -
 .../node_modules/tar/dist/commonjs/options.js |   66 -
 .../node_modules/tar/dist/commonjs/pack.js    |  477 -----
 .../tar/dist/commonjs/package.json            |    3 -
 .../node_modules/tar/dist/commonjs/parse.js   |  599 -------
 .../tar/dist/commonjs/path-reservations.js    |  170 --
 .../node_modules/tar/dist/commonjs/pax.js     |  158 --
 .../tar/dist/commonjs/read-entry.js           |  140 --
 .../node_modules/tar/dist/commonjs/replace.js |  231 ---
 .../tar/dist/commonjs/strip-absolute-path.js  |   29 -
 .../dist/commonjs/strip-trailing-slashes.js   |   18 -
 .../tar/dist/commonjs/symlink-error.js        |   19 -
 .../node_modules/tar/dist/commonjs/types.js   |   50 -
 .../node_modules/tar/dist/commonjs/unpack.js  |  919 ----------
 .../node_modules/tar/dist/commonjs/update.js  |   33 -
 .../tar/dist/commonjs/warn-method.js          |   31 -
 .../tar/dist/commonjs/winchars.js             |   14 -
 .../tar/dist/commonjs/write-entry.js          |  689 --------
 .../node_modules/tar/dist/esm/create.js       |   77 -
 .../node_modules/tar/dist/esm/cwd-error.js    |   14 -
 .../node_modules/tar/dist/esm/extract.js      |   49 -
 .../tar/dist/esm/get-write-flag.js            |   23 -
 .../node_modules/tar/dist/esm/header.js       |  279 ---
 .../node_modules/tar/dist/esm/index.js        |   20 -
 .../tar/dist/esm/large-numbers.js             |   94 -
 .../node_modules/tar/dist/esm/list.js         |  106 --
 .../node_modules/tar/dist/esm/make-command.js |   57 -
 .../node_modules/tar/dist/esm/mkdir.js        |  201 ---
 .../node_modules/tar/dist/esm/mode-fix.js     |   25 -
 .../tar/dist/esm/normalize-unicode.js         |   13 -
 .../tar/dist/esm/normalize-windows-path.js    |    9 -
 .../node_modules/tar/dist/esm/options.js      |   54 -
 .../node_modules/tar/dist/esm/pack.js         |  445 -----
 .../node_modules/tar/dist/esm/package.json    |    3 -
 .../node_modules/tar/dist/esm/parse.js        |  595 -------
 .../tar/dist/esm/path-reservations.js         |  166 --
 .../node_modules/tar/dist/esm/pax.js          |  154 --
 .../node_modules/tar/dist/esm/read-entry.js   |  136 --
 .../node_modules/tar/dist/esm/replace.js      |  225 ---
 .../tar/dist/esm/strip-absolute-path.js       |   25 -
 .../tar/dist/esm/strip-trailing-slashes.js    |   14 -
 .../tar/dist/esm/symlink-error.js             |   15 -
 .../node_modules/tar/dist/esm/types.js        |   45 -
 .../node_modules/tar/dist/esm/unpack.js       |  888 ----------
 .../node_modules/tar/dist/esm/update.js       |   30 -
 .../node_modules/tar/dist/esm/warn-method.js  |   27 -
 .../node_modules/tar/dist/esm/winchars.js     |    9 -
 .../node_modules/tar/dist/esm/write-entry.js  |  657 -------
 .../node_modules/tar/package.json             |  325 ----
 .../node_modules/yallist/LICENSE.md           |   63 -
 .../yallist/dist/commonjs/index.js            |  384 ----
 .../yallist/dist/commonjs/package.json        |    3 -
 .../node_modules/yallist/dist/esm/index.js    |  379 ----
 .../yallist/dist/esm/package.json             |    3 -
 .../node_modules/yallist/package.json         |   68 -
 node_modules/make-fetch-happen/package.json   |   12 +-
 .../node_modules/cacache/LICENSE.md           |    0
 .../node_modules/cacache/lib/content/path.js  |    0
 .../node_modules/cacache/lib/content/read.js  |    0
 .../node_modules/cacache/lib/content/rm.js    |    0
 .../node_modules/cacache/lib/content/write.js |    0
 .../node_modules/cacache/lib/entry-index.js   |    0
 .../node_modules/cacache/lib/get.js           |    0
 .../node_modules/cacache/lib/index.js         |    0
 .../node_modules/cacache/lib/memoization.js   |    0
 .../node_modules/cacache/lib/put.js           |    0
 .../node_modules/cacache/lib/rm.js            |    0
 .../node_modules/cacache/lib/util/glob.js     |    0
 .../cacache/lib/util/hash-to-segments.js      |    0
 .../node_modules/cacache/lib/util/tmp.js      |    0
 .../node_modules/cacache/lib/verify.js        |    0
 .../node_modules/cacache/package.json         |    0
 .../node_modules/make-fetch-happen/LICENSE    |    0
 .../make-fetch-happen/lib/cache/entry.js      |    0
 .../make-fetch-happen/lib/cache/errors.js     |    0
 .../make-fetch-happen/lib/cache/index.js      |    0
 .../make-fetch-happen/lib/cache/key.js        |    0
 .../make-fetch-happen/lib/cache/policy.js     |    0
 .../make-fetch-happen/lib/fetch.js            |    0
 .../make-fetch-happen/lib/index.js            |    0
 .../make-fetch-happen/lib/options.js          |    0
 .../make-fetch-happen/lib/pipeline.js         |    0
 .../make-fetch-happen/lib/remote.js           |    0
 .../make-fetch-happen/package.json            |   10 +-
 .../node_modules/negotiator/HISTORY.md        |    0
 .../node_modules/negotiator/LICENSE           |    0
 .../node_modules/negotiator/index.js          |    0
 .../node_modules/negotiator/lib/charset.js    |    0
 .../node_modules/negotiator/lib/encoding.js   |    0
 .../node_modules/negotiator/lib/language.js   |    0
 .../node_modules/negotiator/lib/mediaType.js  |    0
 .../node_modules/negotiator/package.json      |    0
 .../node_modules/make-fetch-happen/LICENSE    |   16 -
 .../make-fetch-happen/lib/cache/entry.js      |  471 -----
 .../make-fetch-happen/lib/cache/errors.js     |   11 -
 .../make-fetch-happen/lib/cache/index.js      |   49 -
 .../make-fetch-happen/lib/cache/key.js        |   17 -
 .../make-fetch-happen/lib/cache/policy.js     |  161 --
 .../make-fetch-happen/lib/fetch.js            |  118 --
 .../make-fetch-happen/lib/index.js            |   41 -
 .../make-fetch-happen/lib/options.js          |   59 -
 .../make-fetch-happen/lib/pipeline.js         |   41 -
 .../make-fetch-happen/lib/remote.js           |  132 --
 .../make-fetch-happen/package.json            |   74 -
 .../node_modules/negotiator/HISTORY.md        |  114 --
 .../node_modules/negotiator/LICENSE           |   24 -
 .../node_modules/negotiator/index.js          |   83 -
 .../node_modules/negotiator/lib/charset.js    |  169 --
 .../node_modules/negotiator/lib/encoding.js   |  205 ---
 .../node_modules/negotiator/lib/language.js   |  179 --
 .../node_modules/negotiator/lib/mediaType.js  |  294 ----
 .../node_modules/negotiator/package.json      |   43 -
 .../node_modules/make-fetch-happen/LICENSE    |   16 -
 .../make-fetch-happen/lib/cache/entry.js      |  471 -----
 .../make-fetch-happen/lib/cache/errors.js     |   11 -
 .../make-fetch-happen/lib/cache/index.js      |   49 -
 .../make-fetch-happen/lib/cache/key.js        |   17 -
 .../make-fetch-happen/lib/cache/policy.js     |  161 --
 .../make-fetch-happen/lib/fetch.js            |  118 --
 .../make-fetch-happen/lib/index.js            |   41 -
 .../make-fetch-happen/lib/options.js          |   59 -
 .../make-fetch-happen/lib/pipeline.js         |   41 -
 .../make-fetch-happen/lib/remote.js           |  132 --
 .../make-fetch-happen/package.json            |   74 -
 .../tuf-js/node_modules/negotiator/HISTORY.md |  114 --
 .../tuf-js/node_modules/negotiator/LICENSE    |   24 -
 .../tuf-js/node_modules/negotiator/index.js   |   83 -
 .../node_modules/negotiator/lib/charset.js    |  169 --
 .../node_modules/negotiator/lib/encoding.js   |  205 ---
 .../node_modules/negotiator/lib/language.js   |  179 --
 .../node_modules/negotiator/lib/mediaType.js  |  294 ----
 .../node_modules/negotiator/package.json      |   43 -
 package-lock.json                             |  266 +--
 package.json                                  |    2 +-
 193 files changed, 3951 insertions(+), 17532 deletions(-)
 create mode 100644 node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/agents.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/dns.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/errors.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/index.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/options.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/proxy.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/@npmcli/agent/package.json
 delete mode 100644 node_modules/make-fetch-happen/node_modules/chownr/LICENSE.md
 delete mode 100644 node_modules/make-fetch-happen/node_modules/chownr/dist/commonjs/index.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/chownr/dist/esm/index.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/chownr/package.json
 rename node_modules/make-fetch-happen/node_modules/{tar => lru-cache}/LICENSE (92%)
 create mode 100644 node_modules/make-fetch-happen/node_modules/lru-cache/dist/commonjs/index.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/lru-cache/dist/commonjs/index.min.js
 rename node_modules/make-fetch-happen/node_modules/{chownr => lru-cache}/dist/commonjs/package.json (100%)
 create mode 100644 node_modules/make-fetch-happen/node_modules/lru-cache/dist/esm/index.js
 create mode 100644 node_modules/make-fetch-happen/node_modules/lru-cache/dist/esm/index.min.js
 rename node_modules/make-fetch-happen/node_modules/{chownr => lru-cache}/dist/esm/package.json (100%)
 create mode 100644 node_modules/make-fetch-happen/node_modules/lru-cache/package.json
 delete mode 100644 node_modules/make-fetch-happen/node_modules/minizlib/LICENSE
 delete mode 100644 node_modules/make-fetch-happen/node_modules/minizlib/dist/commonjs/constants.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/minizlib/dist/commonjs/index.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/minizlib/dist/commonjs/package.json
 delete mode 100644 node_modules/make-fetch-happen/node_modules/minizlib/dist/esm/constants.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/minizlib/dist/esm/index.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/minizlib/dist/esm/package.json
 delete mode 100644 node_modules/make-fetch-happen/node_modules/minizlib/package.json
 delete mode 100644 node_modules/make-fetch-happen/node_modules/mkdirp/LICENSE
 delete mode 100644 node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/package.json
 delete mode 100755 node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/bin.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/find-made.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/index.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/opts-arg.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/path-arg.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/use-native.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/find-made.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/index.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/mkdirp-manual.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/mkdirp-native.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/opts-arg.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/package.json
 delete mode 100644 node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/path-arg.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/use-native.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/mkdirp/package.json
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/create.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/cwd-error.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/extract.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/get-write-flag.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/header.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/index.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/large-numbers.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/list.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/make-command.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/mkdir.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/mode-fix.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/normalize-unicode.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/normalize-windows-path.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/options.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/pack.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/package.json
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/parse.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/path-reservations.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/pax.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/read-entry.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/replace.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/strip-absolute-path.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/strip-trailing-slashes.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/symlink-error.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/types.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/unpack.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/update.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/warn-method.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/winchars.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/write-entry.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/create.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/cwd-error.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/extract.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/get-write-flag.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/header.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/index.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/large-numbers.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/list.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/make-command.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/mkdir.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/mode-fix.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/normalize-unicode.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/normalize-windows-path.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/options.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/pack.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/package.json
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/parse.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/path-reservations.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/pax.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/read-entry.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/replace.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/strip-absolute-path.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/strip-trailing-slashes.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/symlink-error.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/types.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/unpack.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/update.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/warn-method.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/winchars.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/dist/esm/write-entry.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/tar/package.json
 delete mode 100644 node_modules/make-fetch-happen/node_modules/yallist/LICENSE.md
 delete mode 100644 node_modules/make-fetch-happen/node_modules/yallist/dist/commonjs/index.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/yallist/dist/commonjs/package.json
 delete mode 100644 node_modules/make-fetch-happen/node_modules/yallist/dist/esm/index.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/yallist/dist/esm/package.json
 delete mode 100644 node_modules/make-fetch-happen/node_modules/yallist/package.json
 rename node_modules/{make-fetch-happen => node-gyp}/node_modules/cacache/LICENSE.md (100%)
 rename node_modules/{make-fetch-happen => node-gyp}/node_modules/cacache/lib/content/path.js (100%)
 rename node_modules/{make-fetch-happen => node-gyp}/node_modules/cacache/lib/content/read.js (100%)
 rename node_modules/{make-fetch-happen => node-gyp}/node_modules/cacache/lib/content/rm.js (100%)
 rename node_modules/{make-fetch-happen => node-gyp}/node_modules/cacache/lib/content/write.js (100%)
 rename node_modules/{make-fetch-happen => node-gyp}/node_modules/cacache/lib/entry-index.js (100%)
 rename node_modules/{make-fetch-happen => node-gyp}/node_modules/cacache/lib/get.js (100%)
 rename node_modules/{make-fetch-happen => node-gyp}/node_modules/cacache/lib/index.js (100%)
 rename node_modules/{make-fetch-happen => node-gyp}/node_modules/cacache/lib/memoization.js (100%)
 rename node_modules/{make-fetch-happen => node-gyp}/node_modules/cacache/lib/put.js (100%)
 rename node_modules/{make-fetch-happen => node-gyp}/node_modules/cacache/lib/rm.js (100%)
 rename node_modules/{make-fetch-happen => node-gyp}/node_modules/cacache/lib/util/glob.js (100%)
 rename node_modules/{make-fetch-happen => node-gyp}/node_modules/cacache/lib/util/hash-to-segments.js (100%)
 rename node_modules/{make-fetch-happen => node-gyp}/node_modules/cacache/lib/util/tmp.js (100%)
 rename node_modules/{make-fetch-happen => node-gyp}/node_modules/cacache/lib/verify.js (100%)
 rename node_modules/{make-fetch-happen => node-gyp}/node_modules/cacache/package.json (100%)
 rename node_modules/{@sigstore/sign => node-gyp}/node_modules/make-fetch-happen/LICENSE (100%)
 rename node_modules/{@sigstore/sign => node-gyp}/node_modules/make-fetch-happen/lib/cache/entry.js (100%)
 rename node_modules/{@sigstore/sign => node-gyp}/node_modules/make-fetch-happen/lib/cache/errors.js (100%)
 rename node_modules/{@sigstore/sign => node-gyp}/node_modules/make-fetch-happen/lib/cache/index.js (100%)
 rename node_modules/{@sigstore/sign => node-gyp}/node_modules/make-fetch-happen/lib/cache/key.js (100%)
 rename node_modules/{@sigstore/sign => node-gyp}/node_modules/make-fetch-happen/lib/cache/policy.js (100%)
 rename node_modules/{@sigstore/sign => node-gyp}/node_modules/make-fetch-happen/lib/fetch.js (100%)
 rename node_modules/{@sigstore/sign => node-gyp}/node_modules/make-fetch-happen/lib/index.js (100%)
 rename node_modules/{@sigstore/sign => node-gyp}/node_modules/make-fetch-happen/lib/options.js (100%)
 rename node_modules/{@sigstore/sign => node-gyp}/node_modules/make-fetch-happen/lib/pipeline.js (100%)
 rename node_modules/{@sigstore/sign => node-gyp}/node_modules/make-fetch-happen/lib/remote.js (100%)
 rename node_modules/{@sigstore/sign => node-gyp}/node_modules/make-fetch-happen/package.json (91%)
 rename node_modules/{@sigstore/sign => node-gyp}/node_modules/negotiator/HISTORY.md (100%)
 rename node_modules/{@sigstore/sign => node-gyp}/node_modules/negotiator/LICENSE (100%)
 rename node_modules/{@sigstore/sign => node-gyp}/node_modules/negotiator/index.js (100%)
 rename node_modules/{@sigstore/sign => node-gyp}/node_modules/negotiator/lib/charset.js (100%)
 rename node_modules/{@sigstore/sign => node-gyp}/node_modules/negotiator/lib/encoding.js (100%)
 rename node_modules/{@sigstore/sign => node-gyp}/node_modules/negotiator/lib/language.js (100%)
 rename node_modules/{@sigstore/sign => node-gyp}/node_modules/negotiator/lib/mediaType.js (100%)
 rename node_modules/{@sigstore/sign => node-gyp}/node_modules/negotiator/package.json (100%)
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/make-fetch-happen/LICENSE
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/entry.js
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/errors.js
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/index.js
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/key.js
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/policy.js
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/fetch.js
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/index.js
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/options.js
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/pipeline.js
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/remote.js
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/make-fetch-happen/package.json
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/negotiator/HISTORY.md
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/negotiator/LICENSE
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/negotiator/index.js
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/negotiator/lib/charset.js
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/negotiator/lib/encoding.js
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/negotiator/lib/language.js
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/negotiator/lib/mediaType.js
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/negotiator/package.json
 delete mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/LICENSE
 delete mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/entry.js
 delete mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/errors.js
 delete mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/index.js
 delete mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/key.js
 delete mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/policy.js
 delete mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/fetch.js
 delete mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/index.js
 delete mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/options.js
 delete mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/pipeline.js
 delete mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/lib/remote.js
 delete mode 100644 node_modules/tuf-js/node_modules/make-fetch-happen/package.json
 delete mode 100644 node_modules/tuf-js/node_modules/negotiator/HISTORY.md
 delete mode 100644 node_modules/tuf-js/node_modules/negotiator/LICENSE
 delete mode 100644 node_modules/tuf-js/node_modules/negotiator/index.js
 delete mode 100644 node_modules/tuf-js/node_modules/negotiator/lib/charset.js
 delete mode 100644 node_modules/tuf-js/node_modules/negotiator/lib/encoding.js
 delete mode 100644 node_modules/tuf-js/node_modules/negotiator/lib/language.js
 delete mode 100644 node_modules/tuf-js/node_modules/negotiator/lib/mediaType.js
 delete mode 100644 node_modules/tuf-js/node_modules/negotiator/package.json

diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 96b8e7707c35e..8898459263936 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -60,10 +60,6 @@
 !/@sigstore/core
 !/@sigstore/protobuf-specs
 !/@sigstore/sign
-!/@sigstore/sign/node_modules/
-/@sigstore/sign/node_modules/*
-!/@sigstore/sign/node_modules/make-fetch-happen
-!/@sigstore/sign/node_modules/negotiator
 !/@sigstore/tuf
 !/@sigstore/verify
 !/@tufjs/
@@ -143,13 +139,11 @@
 !/make-fetch-happen
 !/make-fetch-happen/node_modules/
 /make-fetch-happen/node_modules/*
-!/make-fetch-happen/node_modules/cacache
-!/make-fetch-happen/node_modules/chownr
-!/make-fetch-happen/node_modules/minizlib
-!/make-fetch-happen/node_modules/mkdirp
+!/make-fetch-happen/node_modules/@npmcli/
+/make-fetch-happen/node_modules/@npmcli/*
+!/make-fetch-happen/node_modules/@npmcli/agent
+!/make-fetch-happen/node_modules/lru-cache
 !/make-fetch-happen/node_modules/negotiator
-!/make-fetch-happen/node_modules/tar
-!/make-fetch-happen/node_modules/yallist
 !/minimatch
 !/minipass-collect
 !/minipass-fetch
@@ -179,9 +173,12 @@
 !/node-gyp
 !/node-gyp/node_modules/
 /node-gyp/node_modules/*
+!/node-gyp/node_modules/cacache
 !/node-gyp/node_modules/chownr
+!/node-gyp/node_modules/make-fetch-happen
 !/node-gyp/node_modules/minizlib
 !/node-gyp/node_modules/mkdirp
+!/node-gyp/node_modules/negotiator
 !/node-gyp/node_modules/tar
 !/node-gyp/node_modules/yallist
 !/nopt
@@ -203,9 +200,7 @@
 /npm-registry-fetch/node_modules/*
 !/npm-registry-fetch/node_modules/hosted-git-info
 !/npm-registry-fetch/node_modules/lru-cache
-!/npm-registry-fetch/node_modules/make-fetch-happen
 !/npm-registry-fetch/node_modules/minizlib
-!/npm-registry-fetch/node_modules/negotiator
 !/npm-registry-fetch/node_modules/npm-package-arg
 !/npm-user-validate
 !/p-map
@@ -216,18 +211,6 @@
 !/pacote/node_modules/@npmcli/
 /pacote/node_modules/@npmcli/*
 !/pacote/node_modules/@npmcli/git
-!/pacote/node_modules/@npmcli/run-script
-!/pacote/node_modules/@sigstore/
-/pacote/node_modules/@sigstore/*
-!/pacote/node_modules/@sigstore/bundle
-!/pacote/node_modules/@sigstore/core
-!/pacote/node_modules/@sigstore/protobuf-specs
-!/pacote/node_modules/@sigstore/sign
-!/pacote/node_modules/@sigstore/tuf
-!/pacote/node_modules/@sigstore/verify
-!/pacote/node_modules/@tufjs/
-/pacote/node_modules/@tufjs/*
-!/pacote/node_modules/@tufjs/models
 !/pacote/node_modules/chownr
 !/pacote/node_modules/hosted-git-info
 !/pacote/node_modules/lru-cache
@@ -235,8 +218,6 @@
 !/pacote/node_modules/mkdirp
 !/pacote/node_modules/npm-package-arg
 !/pacote/node_modules/npm-pick-manifest
-!/pacote/node_modules/npm-registry-fetch
-!/pacote/node_modules/sigstore
 !/pacote/node_modules/tar
 !/pacote/node_modules/yallist
 !/parse-conflict-json
@@ -298,8 +279,6 @@
 !/tuf-js/node_modules/@tufjs/
 /tuf-js/node_modules/@tufjs/*
 !/tuf-js/node_modules/@tufjs/models
-!/tuf-js/node_modules/make-fetch-happen
-!/tuf-js/node_modules/negotiator
 !/unique-filename
 !/unique-slug
 !/util-deprecate
diff --git a/node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/agents.js b/node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/agents.js
new file mode 100644
index 0000000000000..c541b93001517
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/agents.js
@@ -0,0 +1,206 @@
+'use strict'
+
+const net = require('net')
+const tls = require('tls')
+const { once } = require('events')
+const timers = require('timers/promises')
+const { normalizeOptions, cacheOptions } = require('./options')
+const { getProxy, getProxyAgent, proxyCache } = require('./proxy.js')
+const Errors = require('./errors.js')
+const { Agent: AgentBase } = require('agent-base')
+
+module.exports = class Agent extends AgentBase {
+  #options
+  #timeouts
+  #proxy
+  #noProxy
+  #ProxyAgent
+
+  constructor (options = {}) {
+    const { timeouts, proxy, noProxy, ...normalizedOptions } = normalizeOptions(options)
+
+    super(normalizedOptions)
+
+    this.#options = normalizedOptions
+    this.#timeouts = timeouts
+
+    if (proxy) {
+      this.#proxy = new URL(proxy)
+      this.#noProxy = noProxy
+      this.#ProxyAgent = getProxyAgent(proxy)
+    }
+  }
+
+  get proxy () {
+    return this.#proxy ? { url: this.#proxy } : {}
+  }
+
+  #getProxy (options) {
+    if (!this.#proxy) {
+      return
+    }
+
+    const proxy = getProxy(`${options.protocol}//${options.host}:${options.port}`, {
+      proxy: this.#proxy,
+      noProxy: this.#noProxy,
+    })
+
+    if (!proxy) {
+      return
+    }
+
+    const cacheKey = cacheOptions({
+      ...options,
+      ...this.#options,
+      timeouts: this.#timeouts,
+      proxy,
+    })
+
+    if (proxyCache.has(cacheKey)) {
+      return proxyCache.get(cacheKey)
+    }
+
+    let ProxyAgent = this.#ProxyAgent
+    if (Array.isArray(ProxyAgent)) {
+      ProxyAgent = this.isSecureEndpoint(options) ? ProxyAgent[1] : ProxyAgent[0]
+    }
+
+    const proxyAgent = new ProxyAgent(proxy, {
+      ...this.#options,
+      socketOptions: { family: this.#options.family },
+    })
+    proxyCache.set(cacheKey, proxyAgent)
+
+    return proxyAgent
+  }
+
+  // takes an array of promises and races them against the connection timeout
+  // which will throw the necessary error if it is hit. This will return the
+  // result of the promise race.
+  async #timeoutConnection ({ promises, options, timeout }, ac = new AbortController()) {
+    if (timeout) {
+      const connectionTimeout = timers.setTimeout(timeout, null, { signal: ac.signal })
+        .then(() => {
+          throw new Errors.ConnectionTimeoutError(`${options.host}:${options.port}`)
+        }).catch((err) => {
+          if (err.name === 'AbortError') {
+            return
+          }
+          throw err
+        })
+      promises.push(connectionTimeout)
+    }
+
+    let result
+    try {
+      result = await Promise.race(promises)
+      ac.abort()
+    } catch (err) {
+      ac.abort()
+      throw err
+    }
+    return result
+  }
+
+  async connect (request, options) {
+    // if the connection does not have its own lookup function
+    // set, then use the one from our options
+    options.lookup ??= this.#options.lookup
+
+    let socket
+    let timeout = this.#timeouts.connection
+    const isSecureEndpoint = this.isSecureEndpoint(options)
+
+    const proxy = this.#getProxy(options)
+    if (proxy) {
+      // some of the proxies will wait for the socket to fully connect before
+      // returning so we have to await this while also racing it against the
+      // connection timeout.
+      const start = Date.now()
+      socket = await this.#timeoutConnection({
+        options,
+        timeout,
+        promises: [proxy.connect(request, options)],
+      })
+      // see how much time proxy.connect took and subtract it from
+      // the timeout
+      if (timeout) {
+        timeout = timeout - (Date.now() - start)
+      }
+    } else {
+      socket = (isSecureEndpoint ? tls : net).connect(options)
+    }
+
+    socket.setKeepAlive(this.keepAlive, this.keepAliveMsecs)
+    socket.setNoDelay(this.keepAlive)
+
+    const abortController = new AbortController()
+    const { signal } = abortController
+
+    const connectPromise = socket[isSecureEndpoint ? 'secureConnecting' : 'connecting']
+      ? once(socket, isSecureEndpoint ? 'secureConnect' : 'connect', { signal })
+      : Promise.resolve()
+
+    await this.#timeoutConnection({
+      options,
+      timeout,
+      promises: [
+        connectPromise,
+        once(socket, 'error', { signal }).then((err) => {
+          throw err[0]
+        }),
+      ],
+    }, abortController)
+
+    if (this.#timeouts.idle) {
+      socket.setTimeout(this.#timeouts.idle, () => {
+        socket.destroy(new Errors.IdleTimeoutError(`${options.host}:${options.port}`))
+      })
+    }
+
+    return socket
+  }
+
+  addRequest (request, options) {
+    const proxy = this.#getProxy(options)
+    // it would be better to call proxy.addRequest here but this causes the
+    // http-proxy-agent to call its super.addRequest which causes the request
+    // to be added to the agent twice. since we only support 3 agents
+    // currently (see the required agents in proxy.js) we have manually
+    // checked that the only public methods we need to call are called in the
+    // next block. this could change in the future and presumably we would get
+    // failing tests until we have properly called the necessary methods on
+    // each of our proxy agents
+    if (proxy?.setRequestProps) {
+      proxy.setRequestProps(request, options)
+    }
+
+    request.setHeader('connection', this.keepAlive ? 'keep-alive' : 'close')
+
+    if (this.#timeouts.response) {
+      let responseTimeout
+      request.once('finish', () => {
+        setTimeout(() => {
+          request.destroy(new Errors.ResponseTimeoutError(request, this.#proxy))
+        }, this.#timeouts.response)
+      })
+      request.once('response', () => {
+        clearTimeout(responseTimeout)
+      })
+    }
+
+    if (this.#timeouts.transfer) {
+      let transferTimeout
+      request.once('response', (res) => {
+        setTimeout(() => {
+          res.destroy(new Errors.TransferTimeoutError(request, this.#proxy))
+        }, this.#timeouts.transfer)
+        res.once('close', () => {
+          clearTimeout(transferTimeout)
+        })
+      })
+    }
+
+    return super.addRequest(request, options)
+  }
+}
diff --git a/node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/dns.js b/node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/dns.js
new file mode 100644
index 0000000000000..3c6946c566d73
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/dns.js
@@ -0,0 +1,53 @@
+'use strict'
+
+const { LRUCache } = require('lru-cache')
+const dns = require('dns')
+
+// this is a factory so that each request can have its own opts (i.e. ttl)
+// while still sharing the cache across all requests
+const cache = new LRUCache({ max: 50 })
+
+const getOptions = ({
+  family = 0,
+  hints = dns.ADDRCONFIG,
+  all = false,
+  verbatim = undefined,
+  ttl = 5 * 60 * 1000,
+  lookup = dns.lookup,
+}) => ({
+  // hints and lookup are returned since both are top level properties to (net|tls).connect
+  hints,
+  lookup: (hostname, ...args) => {
+    const callback = args.pop() // callback is always last arg
+    const lookupOptions = args[0] ?? {}
+
+    const options = {
+      family,
+      hints,
+      all,
+      verbatim,
+      ...(typeof lookupOptions === 'number' ? { family: lookupOptions } : lookupOptions),
+    }
+
+    const key = JSON.stringify({ hostname, ...options })
+
+    if (cache.has(key)) {
+      const cached = cache.get(key)
+      return process.nextTick(callback, null, ...cached)
+    }
+
+    lookup(hostname, options, (err, ...result) => {
+      if (err) {
+        return callback(err)
+      }
+
+      cache.set(key, result, { ttl })
+      return callback(null, ...result)
+    })
+  },
+})
+
+module.exports = {
+  cache,
+  getOptions,
+}
diff --git a/node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/errors.js b/node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/errors.js
new file mode 100644
index 0000000000000..70475aec8eb35
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/errors.js
@@ -0,0 +1,61 @@
+'use strict'
+
+class InvalidProxyProtocolError extends Error {
+  constructor (url) {
+    super(`Invalid protocol \`${url.protocol}\` connecting to proxy \`${url.host}\``)
+    this.code = 'EINVALIDPROXY'
+    this.proxy = url
+  }
+}
+
+class ConnectionTimeoutError extends Error {
+  constructor (host) {
+    super(`Timeout connecting to host \`${host}\``)
+    this.code = 'ECONNECTIONTIMEOUT'
+    this.host = host
+  }
+}
+
+class IdleTimeoutError extends Error {
+  constructor (host) {
+    super(`Idle timeout reached for host \`${host}\``)
+    this.code = 'EIDLETIMEOUT'
+    this.host = host
+  }
+}
+
+class ResponseTimeoutError extends Error {
+  constructor (request, proxy) {
+    let msg = 'Response timeout '
+    if (proxy) {
+      msg += `from proxy \`${proxy.host}\` `
+    }
+    msg += `connecting to host \`${request.host}\``
+    super(msg)
+    this.code = 'ERESPONSETIMEOUT'
+    this.proxy = proxy
+    this.request = request
+  }
+}
+
+class TransferTimeoutError extends Error {
+  constructor (request, proxy) {
+    let msg = 'Transfer timeout '
+    if (proxy) {
+      msg += `from proxy \`${proxy.host}\` `
+    }
+    msg += `for \`${request.host}\``
+    super(msg)
+    this.code = 'ETRANSFERTIMEOUT'
+    this.proxy = proxy
+    this.request = request
+  }
+}
+
+module.exports = {
+  InvalidProxyProtocolError,
+  ConnectionTimeoutError,
+  IdleTimeoutError,
+  ResponseTimeoutError,
+  TransferTimeoutError,
+}
diff --git a/node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/index.js b/node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/index.js
new file mode 100644
index 0000000000000..b33d6eaef07a2
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/index.js
@@ -0,0 +1,56 @@
+'use strict'
+
+const { LRUCache } = require('lru-cache')
+const { normalizeOptions, cacheOptions } = require('./options')
+const { getProxy, proxyCache } = require('./proxy.js')
+const dns = require('./dns.js')
+const Agent = require('./agents.js')
+
+const agentCache = new LRUCache({ max: 20 })
+
+const getAgent = (url, { agent, proxy, noProxy, ...options } = {}) => {
+  // false has meaning so this can't be a simple truthiness check
+  if (agent != null) {
+    return agent
+  }
+
+  url = new URL(url)
+
+  const proxyForUrl = getProxy(url, { proxy, noProxy })
+  const normalizedOptions = {
+    ...normalizeOptions(options),
+    proxy: proxyForUrl,
+  }
+
+  const cacheKey = cacheOptions({
+    ...normalizedOptions,
+    secureEndpoint: url.protocol === 'https:',
+  })
+
+  if (agentCache.has(cacheKey)) {
+    return agentCache.get(cacheKey)
+  }
+
+  const newAgent = new Agent(normalizedOptions)
+  agentCache.set(cacheKey, newAgent)
+
+  return newAgent
+}
+
+module.exports = {
+  getAgent,
+  Agent,
+  // these are exported for backwards compatability
+  HttpAgent: Agent,
+  HttpsAgent: Agent,
+  cache: {
+    proxy: proxyCache,
+    agent: agentCache,
+    dns: dns.cache,
+    clear: () => {
+      proxyCache.clear()
+      agentCache.clear()
+      dns.cache.clear()
+    },
+  },
+}
diff --git a/node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/options.js b/node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/options.js
new file mode 100644
index 0000000000000..0bf53f725f084
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/options.js
@@ -0,0 +1,86 @@
+'use strict'
+
+const dns = require('./dns')
+
+const normalizeOptions = (opts) => {
+  const family = parseInt(opts.family ?? '0', 10)
+  const keepAlive = opts.keepAlive ?? true
+
+  const normalized = {
+    // nodejs http agent options. these are all the defaults
+    // but kept here to increase the likelihood of cache hits
+    // https://nodejs.org/api/http.html#new-agentoptions
+    keepAliveMsecs: keepAlive ? 1000 : undefined,
+    maxSockets: opts.maxSockets ?? 15,
+    maxTotalSockets: Infinity,
+    maxFreeSockets: keepAlive ? 256 : undefined,
+    scheduling: 'fifo',
+    // then spread the rest of the options
+    ...opts,
+    // we already set these to their defaults that we want
+    family,
+    keepAlive,
+    // our custom timeout options
+    timeouts: {
+      // the standard timeout option is mapped to our idle timeout
+      // and then deleted below
+      idle: opts.timeout ?? 0,
+      connection: 0,
+      response: 0,
+      transfer: 0,
+      ...opts.timeouts,
+    },
+    // get the dns options that go at the top level of socket connection
+    ...dns.getOptions({ family, ...opts.dns }),
+  }
+
+  // remove timeout since we already used it to set our own idle timeout
+  delete normalized.timeout
+
+  return normalized
+}
+
+const createKey = (obj) => {
+  let key = ''
+  const sorted = Object.entries(obj).sort((a, b) => a[0] - b[0])
+  for (let [k, v] of sorted) {
+    if (v == null) {
+      v = 'null'
+    } else if (v instanceof URL) {
+      v = v.toString()
+    } else if (typeof v === 'object') {
+      v = createKey(v)
+    }
+    key += `${k}:${v}:`
+  }
+  return key
+}
+
+const cacheOptions = ({ secureEndpoint, ...options }) => createKey({
+  secureEndpoint: !!secureEndpoint,
+  // socket connect options
+  family: options.family,
+  hints: options.hints,
+  localAddress: options.localAddress,
+  // tls specific connect options
+  strictSsl: secureEndpoint ? !!options.rejectUnauthorized : false,
+  ca: secureEndpoint ? options.ca : null,
+  cert: secureEndpoint ? options.cert : null,
+  key: secureEndpoint ? options.key : null,
+  // http agent options
+  keepAlive: options.keepAlive,
+  keepAliveMsecs: options.keepAliveMsecs,
+  maxSockets: options.maxSockets,
+  maxTotalSockets: options.maxTotalSockets,
+  maxFreeSockets: options.maxFreeSockets,
+  scheduling: options.scheduling,
+  // timeout options
+  timeouts: options.timeouts,
+  // proxy
+  proxy: options.proxy,
+})
+
+module.exports = {
+  normalizeOptions,
+  cacheOptions,
+}
diff --git a/node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/proxy.js b/node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/proxy.js
new file mode 100644
index 0000000000000..6272e929e57bc
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/proxy.js
@@ -0,0 +1,88 @@
+'use strict'
+
+const { HttpProxyAgent } = require('http-proxy-agent')
+const { HttpsProxyAgent } = require('https-proxy-agent')
+const { SocksProxyAgent } = require('socks-proxy-agent')
+const { LRUCache } = require('lru-cache')
+const { InvalidProxyProtocolError } = require('./errors.js')
+
+const PROXY_CACHE = new LRUCache({ max: 20 })
+
+const SOCKS_PROTOCOLS = new Set(SocksProxyAgent.protocols)
+
+const PROXY_ENV_KEYS = new Set(['https_proxy', 'http_proxy', 'proxy', 'no_proxy'])
+
+const PROXY_ENV = Object.entries(process.env).reduce((acc, [key, value]) => {
+  key = key.toLowerCase()
+  if (PROXY_ENV_KEYS.has(key)) {
+    acc[key] = value
+  }
+  return acc
+}, {})
+
+const getProxyAgent = (url) => {
+  url = new URL(url)
+
+  const protocol = url.protocol.slice(0, -1)
+  if (SOCKS_PROTOCOLS.has(protocol)) {
+    return SocksProxyAgent
+  }
+  if (protocol === 'https' || protocol === 'http') {
+    return [HttpProxyAgent, HttpsProxyAgent]
+  }
+
+  throw new InvalidProxyProtocolError(url)
+}
+
+const isNoProxy = (url, noProxy) => {
+  if (typeof noProxy === 'string') {
+    noProxy = noProxy.split(',').map((p) => p.trim()).filter(Boolean)
+  }
+
+  if (!noProxy || !noProxy.length) {
+    return false
+  }
+
+  const hostSegments = url.hostname.split('.').reverse()
+
+  return noProxy.some((no) => {
+    const noSegments = no.split('.').filter(Boolean).reverse()
+    if (!noSegments.length) {
+      return false
+    }
+
+    for (let i = 0; i < noSegments.length; i++) {
+      if (hostSegments[i] !== noSegments[i]) {
+        return false
+      }
+    }
+
+    return true
+  })
+}
+
+const getProxy = (url, { proxy, noProxy }) => {
+  url = new URL(url)
+
+  if (!proxy) {
+    proxy = url.protocol === 'https:'
+      ? PROXY_ENV.https_proxy
+      : PROXY_ENV.https_proxy || PROXY_ENV.http_proxy || PROXY_ENV.proxy
+  }
+
+  if (!noProxy) {
+    noProxy = PROXY_ENV.no_proxy
+  }
+
+  if (!proxy || isNoProxy(url, noProxy)) {
+    return null
+  }
+
+  return new URL(proxy)
+}
+
+module.exports = {
+  getProxyAgent,
+  getProxy,
+  proxyCache: PROXY_CACHE,
+}
diff --git a/node_modules/make-fetch-happen/node_modules/@npmcli/agent/package.json b/node_modules/make-fetch-happen/node_modules/@npmcli/agent/package.json
new file mode 100644
index 0000000000000..67670a0c1c484
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/@npmcli/agent/package.json
@@ -0,0 +1,60 @@
+{
+  "name": "@npmcli/agent",
+  "version": "4.0.0",
+  "description": "the http/https agent used by the npm cli",
+  "main": "lib/index.js",
+  "scripts": {
+    "gencerts": "bash scripts/create-cert.sh",
+    "test": "tap",
+    "lint": "npm run eslint",
+    "postlint": "template-oss-check",
+    "template-oss-apply": "template-oss-apply --force",
+    "lintfix": "npm run eslint -- --fix",
+    "snap": "tap",
+    "posttest": "npm run lint",
+    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
+  },
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "bugs": {
+    "url": "https://github.com/npm/agent/issues"
+  },
+  "homepage": "https://github.com/npm/agent#readme",
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "engines": {
+    "node": "^20.17.0 || >=22.9.0"
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.25.0",
+    "publish": "true"
+  },
+  "dependencies": {
+    "agent-base": "^7.1.0",
+    "http-proxy-agent": "^7.0.0",
+    "https-proxy-agent": "^7.0.1",
+    "lru-cache": "^11.2.1",
+    "socks-proxy-agent": "^8.0.3"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^5.0.0",
+    "@npmcli/template-oss": "4.25.0",
+    "minipass-fetch": "^4.0.1",
+    "nock": "^14.0.3",
+    "socksv5": "^0.0.6",
+    "tap": "^16.3.0"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/npm/agent.git"
+  },
+  "tap": {
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  }
+}
diff --git a/node_modules/make-fetch-happen/node_modules/chownr/LICENSE.md b/node_modules/make-fetch-happen/node_modules/chownr/LICENSE.md
deleted file mode 100644
index 881248b6d7f0c..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/chownr/LICENSE.md
+++ /dev/null
@@ -1,63 +0,0 @@
-All packages under `src/` are licensed according to the terms in
-their respective `LICENSE` or `LICENSE.md` files.
-
-The remainder of this project is licensed under the Blue Oak
-Model License, as follows:
-
------
-
-# Blue Oak Model License
-
-Version 1.0.0
-
-## Purpose
-
-This license gives everyone as much permission to work with
-this software as possible, while protecting contributors
-from liability.
-
-## Acceptance
-
-In order to receive this license, you must agree to its
-rules.  The rules of this license are both obligations
-under that agreement and conditions to your license.
-You must not do anything with this software that triggers
-a rule that you cannot or will not follow.
-
-## Copyright
-
-Each contributor licenses you to do everything with this
-software that would otherwise infringe that contributor's
-copyright in it.
-
-## Notices
-
-You must ensure that everyone who gets a copy of
-any part of this software from you, with or without
-changes, also gets the text of this license or a link to
-.
-
-## Excuse
-
-If anyone notifies you in writing that you have not
-complied with [Notices](#notices), you can keep your
-license by taking all practical steps to comply within 30
-days after the notice.  If you do not do so, your license
-ends immediately.
-
-## Patent
-
-Each contributor licenses you to do everything with this
-software that would otherwise infringe any patent claims
-they can license or become able to license.
-
-## Reliability
-
-No contributor can revoke this license.
-
-## No Liability
-
-***As far as the law allows, this software comes as is,
-without any warranty or condition, and no contributor
-will be liable to anyone for any damages related to this
-software or this license, under any kind of legal claim.***
diff --git a/node_modules/make-fetch-happen/node_modules/chownr/dist/commonjs/index.js b/node_modules/make-fetch-happen/node_modules/chownr/dist/commonjs/index.js
deleted file mode 100644
index 6a7b68d5eac26..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/chownr/dist/commonjs/index.js
+++ /dev/null
@@ -1,93 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.chownrSync = exports.chownr = void 0;
-const node_fs_1 = __importDefault(require("node:fs"));
-const node_path_1 = __importDefault(require("node:path"));
-const lchownSync = (path, uid, gid) => {
-    try {
-        return node_fs_1.default.lchownSync(path, uid, gid);
-    }
-    catch (er) {
-        if (er?.code !== 'ENOENT')
-            throw er;
-    }
-};
-const chown = (cpath, uid, gid, cb) => {
-    node_fs_1.default.lchown(cpath, uid, gid, er => {
-        // Skip ENOENT error
-        cb(er && er?.code !== 'ENOENT' ? er : null);
-    });
-};
-const chownrKid = (p, child, uid, gid, cb) => {
-    if (child.isDirectory()) {
-        (0, exports.chownr)(node_path_1.default.resolve(p, child.name), uid, gid, (er) => {
-            if (er)
-                return cb(er);
-            const cpath = node_path_1.default.resolve(p, child.name);
-            chown(cpath, uid, gid, cb);
-        });
-    }
-    else {
-        const cpath = node_path_1.default.resolve(p, child.name);
-        chown(cpath, uid, gid, cb);
-    }
-};
-const chownr = (p, uid, gid, cb) => {
-    node_fs_1.default.readdir(p, { withFileTypes: true }, (er, children) => {
-        // any error other than ENOTDIR or ENOTSUP means it's not readable,
-        // or doesn't exist.  give up.
-        if (er) {
-            if (er.code === 'ENOENT')
-                return cb();
-            else if (er.code !== 'ENOTDIR' && er.code !== 'ENOTSUP')
-                return cb(er);
-        }
-        if (er || !children.length)
-            return chown(p, uid, gid, cb);
-        let len = children.length;
-        let errState = null;
-        const then = (er) => {
-            /* c8 ignore start */
-            if (errState)
-                return;
-            /* c8 ignore stop */
-            if (er)
-                return cb((errState = er));
-            if (--len === 0)
-                return chown(p, uid, gid, cb);
-        };
-        for (const child of children) {
-            chownrKid(p, child, uid, gid, then);
-        }
-    });
-};
-exports.chownr = chownr;
-const chownrKidSync = (p, child, uid, gid) => {
-    if (child.isDirectory())
-        (0, exports.chownrSync)(node_path_1.default.resolve(p, child.name), uid, gid);
-    lchownSync(node_path_1.default.resolve(p, child.name), uid, gid);
-};
-const chownrSync = (p, uid, gid) => {
-    let children;
-    try {
-        children = node_fs_1.default.readdirSync(p, { withFileTypes: true });
-    }
-    catch (er) {
-        const e = er;
-        if (e?.code === 'ENOENT')
-            return;
-        else if (e?.code === 'ENOTDIR' || e?.code === 'ENOTSUP')
-            return lchownSync(p, uid, gid);
-        else
-            throw e;
-    }
-    for (const child of children) {
-        chownrKidSync(p, child, uid, gid);
-    }
-    return lchownSync(p, uid, gid);
-};
-exports.chownrSync = chownrSync;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/chownr/dist/esm/index.js b/node_modules/make-fetch-happen/node_modules/chownr/dist/esm/index.js
deleted file mode 100644
index 5c2815297a67c..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/chownr/dist/esm/index.js
+++ /dev/null
@@ -1,85 +0,0 @@
-import fs from 'node:fs';
-import path from 'node:path';
-const lchownSync = (path, uid, gid) => {
-    try {
-        return fs.lchownSync(path, uid, gid);
-    }
-    catch (er) {
-        if (er?.code !== 'ENOENT')
-            throw er;
-    }
-};
-const chown = (cpath, uid, gid, cb) => {
-    fs.lchown(cpath, uid, gid, er => {
-        // Skip ENOENT error
-        cb(er && er?.code !== 'ENOENT' ? er : null);
-    });
-};
-const chownrKid = (p, child, uid, gid, cb) => {
-    if (child.isDirectory()) {
-        chownr(path.resolve(p, child.name), uid, gid, (er) => {
-            if (er)
-                return cb(er);
-            const cpath = path.resolve(p, child.name);
-            chown(cpath, uid, gid, cb);
-        });
-    }
-    else {
-        const cpath = path.resolve(p, child.name);
-        chown(cpath, uid, gid, cb);
-    }
-};
-export const chownr = (p, uid, gid, cb) => {
-    fs.readdir(p, { withFileTypes: true }, (er, children) => {
-        // any error other than ENOTDIR or ENOTSUP means it's not readable,
-        // or doesn't exist.  give up.
-        if (er) {
-            if (er.code === 'ENOENT')
-                return cb();
-            else if (er.code !== 'ENOTDIR' && er.code !== 'ENOTSUP')
-                return cb(er);
-        }
-        if (er || !children.length)
-            return chown(p, uid, gid, cb);
-        let len = children.length;
-        let errState = null;
-        const then = (er) => {
-            /* c8 ignore start */
-            if (errState)
-                return;
-            /* c8 ignore stop */
-            if (er)
-                return cb((errState = er));
-            if (--len === 0)
-                return chown(p, uid, gid, cb);
-        };
-        for (const child of children) {
-            chownrKid(p, child, uid, gid, then);
-        }
-    });
-};
-const chownrKidSync = (p, child, uid, gid) => {
-    if (child.isDirectory())
-        chownrSync(path.resolve(p, child.name), uid, gid);
-    lchownSync(path.resolve(p, child.name), uid, gid);
-};
-export const chownrSync = (p, uid, gid) => {
-    let children;
-    try {
-        children = fs.readdirSync(p, { withFileTypes: true });
-    }
-    catch (er) {
-        const e = er;
-        if (e?.code === 'ENOENT')
-            return;
-        else if (e?.code === 'ENOTDIR' || e?.code === 'ENOTSUP')
-            return lchownSync(p, uid, gid);
-        else
-            throw e;
-    }
-    for (const child of children) {
-        chownrKidSync(p, child, uid, gid);
-    }
-    return lchownSync(p, uid, gid);
-};
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/chownr/package.json b/node_modules/make-fetch-happen/node_modules/chownr/package.json
deleted file mode 100644
index 09aa6b2e2e576..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/chownr/package.json
+++ /dev/null
@@ -1,69 +0,0 @@
-{
-  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
-  "name": "chownr",
-  "description": "like `chown -R`",
-  "version": "3.0.0",
-  "repository": {
-    "type": "git",
-    "url": "git://github.com/isaacs/chownr.git"
-  },
-  "files": [
-    "dist"
-  ],
-  "devDependencies": {
-    "@types/node": "^20.12.5",
-    "mkdirp": "^3.0.1",
-    "prettier": "^3.2.5",
-    "rimraf": "^5.0.5",
-    "tap": "^18.7.2",
-    "tshy": "^1.13.1",
-    "typedoc": "^0.25.12"
-  },
-  "scripts": {
-    "prepare": "tshy",
-    "pretest": "npm run prepare",
-    "test": "tap",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "format": "prettier --write . --loglevel warn",
-    "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
-  },
-  "license": "BlueOak-1.0.0",
-  "engines": {
-    "node": ">=18"
-  },
-  "tshy": {
-    "exports": {
-      "./package.json": "./package.json",
-      ".": "./src/index.ts"
-    }
-  },
-  "exports": {
-    "./package.json": "./package.json",
-    ".": {
-      "import": {
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.js"
-      },
-      "require": {
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.js"
-      }
-    }
-  },
-  "main": "./dist/commonjs/index.js",
-  "types": "./dist/commonjs/index.d.ts",
-  "type": "module",
-  "prettier": {
-    "semi": false,
-    "printWidth": 75,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  }
-}
diff --git a/node_modules/make-fetch-happen/node_modules/tar/LICENSE b/node_modules/make-fetch-happen/node_modules/lru-cache/LICENSE
similarity index 92%
rename from node_modules/make-fetch-happen/node_modules/tar/LICENSE
rename to node_modules/make-fetch-happen/node_modules/lru-cache/LICENSE
index 19129e315fe59..f785757cd63f8 100644
--- a/node_modules/make-fetch-happen/node_modules/tar/LICENSE
+++ b/node_modules/make-fetch-happen/node_modules/lru-cache/LICENSE
@@ -1,6 +1,6 @@
 The ISC License
 
-Copyright (c) Isaac Z. Schlueter and Contributors
+Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors
 
 Permission to use, copy, modify, and/or distribute this software for any
 purpose with or without fee is hereby granted, provided that the above
diff --git a/node_modules/make-fetch-happen/node_modules/lru-cache/dist/commonjs/index.js b/node_modules/make-fetch-happen/node_modules/lru-cache/dist/commonjs/index.js
new file mode 100644
index 0000000000000..921b8f10f71b1
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/lru-cache/dist/commonjs/index.js
@@ -0,0 +1,1564 @@
+"use strict";
+/**
+ * @module LRUCache
+ */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.LRUCache = void 0;
+const defaultPerf = (typeof performance === 'object' &&
+    performance &&
+    typeof performance.now === 'function') ?
+    performance
+    : Date;
+const warned = new Set();
+/* c8 ignore start */
+const PROCESS = (typeof process === 'object' && !!process ?
+    process
+    : {});
+/* c8 ignore start */
+const emitWarning = (msg, type, code, fn) => {
+    typeof PROCESS.emitWarning === 'function' ?
+        PROCESS.emitWarning(msg, type, code, fn)
+        : console.error(`[${code}] ${type}: ${msg}`);
+};
+let AC = globalThis.AbortController;
+let AS = globalThis.AbortSignal;
+/* c8 ignore start */
+if (typeof AC === 'undefined') {
+    //@ts-ignore
+    AS = class AbortSignal {
+        onabort;
+        _onabort = [];
+        reason;
+        aborted = false;
+        addEventListener(_, fn) {
+            this._onabort.push(fn);
+        }
+    };
+    //@ts-ignore
+    AC = class AbortController {
+        constructor() {
+            warnACPolyfill();
+        }
+        signal = new AS();
+        abort(reason) {
+            if (this.signal.aborted)
+                return;
+            //@ts-ignore
+            this.signal.reason = reason;
+            //@ts-ignore
+            this.signal.aborted = true;
+            //@ts-ignore
+            for (const fn of this.signal._onabort) {
+                fn(reason);
+            }
+            this.signal.onabort?.(reason);
+        }
+    };
+    let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
+    const warnACPolyfill = () => {
+        if (!printACPolyfillWarning)
+            return;
+        printACPolyfillWarning = false;
+        emitWarning('AbortController is not defined. If using lru-cache in ' +
+            'node 14, load an AbortController polyfill from the ' +
+            '`node-abort-controller` package. A minimal polyfill is ' +
+            'provided for use by LRUCache.fetch(), but it should not be ' +
+            'relied upon in other contexts (eg, passing it to other APIs that ' +
+            'use AbortController/AbortSignal might have undesirable effects). ' +
+            'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
+    };
+}
+/* c8 ignore stop */
+const shouldWarn = (code) => !warned.has(code);
+const TYPE = Symbol('type');
+const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
+/* c8 ignore start */
+// This is a little bit ridiculous, tbh.
+// The maximum array length is 2^32-1 or thereabouts on most JS impls.
+// And well before that point, you're caching the entire world, I mean,
+// that's ~32GB of just integers for the next/prev links, plus whatever
+// else to hold that many keys and values.  Just filling the memory with
+// zeroes at init time is brutal when you get that big.
+// But why not be complete?
+// Maybe in the future, these limits will have expanded.
+const getUintArray = (max) => !isPosInt(max) ? null
+    : max <= Math.pow(2, 8) ? Uint8Array
+        : max <= Math.pow(2, 16) ? Uint16Array
+            : max <= Math.pow(2, 32) ? Uint32Array
+                : max <= Number.MAX_SAFE_INTEGER ? ZeroArray
+                    : null;
+/* c8 ignore stop */
+class ZeroArray extends Array {
+    constructor(size) {
+        super(size);
+        this.fill(0);
+    }
+}
+class Stack {
+    heap;
+    length;
+    // private constructor
+    static #constructing = false;
+    static create(max) {
+        const HeapCls = getUintArray(max);
+        if (!HeapCls)
+            return [];
+        Stack.#constructing = true;
+        const s = new Stack(max, HeapCls);
+        Stack.#constructing = false;
+        return s;
+    }
+    constructor(max, HeapCls) {
+        /* c8 ignore start */
+        if (!Stack.#constructing) {
+            throw new TypeError('instantiate Stack using Stack.create(n)');
+        }
+        /* c8 ignore stop */
+        this.heap = new HeapCls(max);
+        this.length = 0;
+    }
+    push(n) {
+        this.heap[this.length++] = n;
+    }
+    pop() {
+        return this.heap[--this.length];
+    }
+}
+/**
+ * Default export, the thing you're using this module to get.
+ *
+ * The `K` and `V` types define the key and value types, respectively. The
+ * optional `FC` type defines the type of the `context` object passed to
+ * `cache.fetch()` and `cache.memo()`.
+ *
+ * Keys and values **must not** be `null` or `undefined`.
+ *
+ * All properties from the options object (with the exception of `max`,
+ * `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are
+ * added as normal public members. (The listed options are read-only getters.)
+ *
+ * Changing any of these will alter the defaults for subsequent method calls.
+ */
+class LRUCache {
+    // options that cannot be changed without disaster
+    #max;
+    #maxSize;
+    #dispose;
+    #onInsert;
+    #disposeAfter;
+    #fetchMethod;
+    #memoMethod;
+    #perf;
+    /**
+     * {@link LRUCache.OptionsBase.perf}
+     */
+    get perf() {
+        return this.#perf;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.ttl}
+     */
+    ttl;
+    /**
+     * {@link LRUCache.OptionsBase.ttlResolution}
+     */
+    ttlResolution;
+    /**
+     * {@link LRUCache.OptionsBase.ttlAutopurge}
+     */
+    ttlAutopurge;
+    /**
+     * {@link LRUCache.OptionsBase.updateAgeOnGet}
+     */
+    updateAgeOnGet;
+    /**
+     * {@link LRUCache.OptionsBase.updateAgeOnHas}
+     */
+    updateAgeOnHas;
+    /**
+     * {@link LRUCache.OptionsBase.allowStale}
+     */
+    allowStale;
+    /**
+     * {@link LRUCache.OptionsBase.noDisposeOnSet}
+     */
+    noDisposeOnSet;
+    /**
+     * {@link LRUCache.OptionsBase.noUpdateTTL}
+     */
+    noUpdateTTL;
+    /**
+     * {@link LRUCache.OptionsBase.maxEntrySize}
+     */
+    maxEntrySize;
+    /**
+     * {@link LRUCache.OptionsBase.sizeCalculation}
+     */
+    sizeCalculation;
+    /**
+     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
+     */
+    noDeleteOnFetchRejection;
+    /**
+     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
+     */
+    noDeleteOnStaleGet;
+    /**
+     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
+     */
+    allowStaleOnFetchAbort;
+    /**
+     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
+     */
+    allowStaleOnFetchRejection;
+    /**
+     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
+     */
+    ignoreFetchAbort;
+    // computed properties
+    #size;
+    #calculatedSize;
+    #keyMap;
+    #keyList;
+    #valList;
+    #next;
+    #prev;
+    #head;
+    #tail;
+    #free;
+    #disposed;
+    #sizes;
+    #starts;
+    #ttls;
+    #hasDispose;
+    #hasFetchMethod;
+    #hasDisposeAfter;
+    #hasOnInsert;
+    /**
+     * Do not call this method unless you need to inspect the
+     * inner workings of the cache.  If anything returned by this
+     * object is modified in any way, strange breakage may occur.
+     *
+     * These fields are private for a reason!
+     *
+     * @internal
+     */
+    static unsafeExposeInternals(c) {
+        return {
+            // properties
+            starts: c.#starts,
+            ttls: c.#ttls,
+            sizes: c.#sizes,
+            keyMap: c.#keyMap,
+            keyList: c.#keyList,
+            valList: c.#valList,
+            next: c.#next,
+            prev: c.#prev,
+            get head() {
+                return c.#head;
+            },
+            get tail() {
+                return c.#tail;
+            },
+            free: c.#free,
+            // methods
+            isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
+            backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
+            moveToTail: (index) => c.#moveToTail(index),
+            indexes: (options) => c.#indexes(options),
+            rindexes: (options) => c.#rindexes(options),
+            isStale: (index) => c.#isStale(index),
+        };
+    }
+    // Protected read-only members
+    /**
+     * {@link LRUCache.OptionsBase.max} (read-only)
+     */
+    get max() {
+        return this.#max;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.maxSize} (read-only)
+     */
+    get maxSize() {
+        return this.#maxSize;
+    }
+    /**
+     * The total computed size of items in the cache (read-only)
+     */
+    get calculatedSize() {
+        return this.#calculatedSize;
+    }
+    /**
+     * The number of items stored in the cache (read-only)
+     */
+    get size() {
+        return this.#size;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
+     */
+    get fetchMethod() {
+        return this.#fetchMethod;
+    }
+    get memoMethod() {
+        return this.#memoMethod;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.dispose} (read-only)
+     */
+    get dispose() {
+        return this.#dispose;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.onInsert} (read-only)
+     */
+    get onInsert() {
+        return this.#onInsert;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
+     */
+    get disposeAfter() {
+        return this.#disposeAfter;
+    }
+    constructor(options) {
+        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, onInsert, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, perf, } = options;
+        if (perf !== undefined) {
+            if (typeof perf?.now !== 'function') {
+                throw new TypeError('perf option must have a now() method if specified');
+            }
+        }
+        this.#perf = perf ?? defaultPerf;
+        if (max !== 0 && !isPosInt(max)) {
+            throw new TypeError('max option must be a nonnegative integer');
+        }
+        const UintArray = max ? getUintArray(max) : Array;
+        if (!UintArray) {
+            throw new Error('invalid max value: ' + max);
+        }
+        this.#max = max;
+        this.#maxSize = maxSize;
+        this.maxEntrySize = maxEntrySize || this.#maxSize;
+        this.sizeCalculation = sizeCalculation;
+        if (this.sizeCalculation) {
+            if (!this.#maxSize && !this.maxEntrySize) {
+                throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
+            }
+            if (typeof this.sizeCalculation !== 'function') {
+                throw new TypeError('sizeCalculation set to non-function');
+            }
+        }
+        if (memoMethod !== undefined &&
+            typeof memoMethod !== 'function') {
+            throw new TypeError('memoMethod must be a function if defined');
+        }
+        this.#memoMethod = memoMethod;
+        if (fetchMethod !== undefined &&
+            typeof fetchMethod !== 'function') {
+            throw new TypeError('fetchMethod must be a function if specified');
+        }
+        this.#fetchMethod = fetchMethod;
+        this.#hasFetchMethod = !!fetchMethod;
+        this.#keyMap = new Map();
+        this.#keyList = new Array(max).fill(undefined);
+        this.#valList = new Array(max).fill(undefined);
+        this.#next = new UintArray(max);
+        this.#prev = new UintArray(max);
+        this.#head = 0;
+        this.#tail = 0;
+        this.#free = Stack.create(max);
+        this.#size = 0;
+        this.#calculatedSize = 0;
+        if (typeof dispose === 'function') {
+            this.#dispose = dispose;
+        }
+        if (typeof onInsert === 'function') {
+            this.#onInsert = onInsert;
+        }
+        if (typeof disposeAfter === 'function') {
+            this.#disposeAfter = disposeAfter;
+            this.#disposed = [];
+        }
+        else {
+            this.#disposeAfter = undefined;
+            this.#disposed = undefined;
+        }
+        this.#hasDispose = !!this.#dispose;
+        this.#hasOnInsert = !!this.#onInsert;
+        this.#hasDisposeAfter = !!this.#disposeAfter;
+        this.noDisposeOnSet = !!noDisposeOnSet;
+        this.noUpdateTTL = !!noUpdateTTL;
+        this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
+        this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
+        this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
+        this.ignoreFetchAbort = !!ignoreFetchAbort;
+        // NB: maxEntrySize is set to maxSize if it's set
+        if (this.maxEntrySize !== 0) {
+            if (this.#maxSize !== 0) {
+                if (!isPosInt(this.#maxSize)) {
+                    throw new TypeError('maxSize must be a positive integer if specified');
+                }
+            }
+            if (!isPosInt(this.maxEntrySize)) {
+                throw new TypeError('maxEntrySize must be a positive integer if specified');
+            }
+            this.#initializeSizeTracking();
+        }
+        this.allowStale = !!allowStale;
+        this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
+        this.updateAgeOnGet = !!updateAgeOnGet;
+        this.updateAgeOnHas = !!updateAgeOnHas;
+        this.ttlResolution =
+            isPosInt(ttlResolution) || ttlResolution === 0 ?
+                ttlResolution
+                : 1;
+        this.ttlAutopurge = !!ttlAutopurge;
+        this.ttl = ttl || 0;
+        if (this.ttl) {
+            if (!isPosInt(this.ttl)) {
+                throw new TypeError('ttl must be a positive integer if specified');
+            }
+            this.#initializeTTLTracking();
+        }
+        // do not allow completely unbounded caches
+        if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
+            throw new TypeError('At least one of max, maxSize, or ttl is required');
+        }
+        if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
+            const code = 'LRU_CACHE_UNBOUNDED';
+            if (shouldWarn(code)) {
+                warned.add(code);
+                const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
+                    'result in unbounded memory consumption.';
+                emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
+            }
+        }
+    }
+    /**
+     * Return the number of ms left in the item's TTL. If item is not in cache,
+     * returns `0`. Returns `Infinity` if item is in cache without a defined TTL.
+     */
+    getRemainingTTL(key) {
+        return this.#keyMap.has(key) ? Infinity : 0;
+    }
+    #initializeTTLTracking() {
+        const ttls = new ZeroArray(this.#max);
+        const starts = new ZeroArray(this.#max);
+        this.#ttls = ttls;
+        this.#starts = starts;
+        this.#setItemTTL = (index, ttl, start = this.#perf.now()) => {
+            starts[index] = ttl !== 0 ? start : 0;
+            ttls[index] = ttl;
+            if (ttl !== 0 && this.ttlAutopurge) {
+                const t = setTimeout(() => {
+                    if (this.#isStale(index)) {
+                        this.#delete(this.#keyList[index], 'expire');
+                    }
+                }, ttl + 1);
+                // unref() not supported on all platforms
+                /* c8 ignore start */
+                if (t.unref) {
+                    t.unref();
+                }
+                /* c8 ignore stop */
+            }
+        };
+        this.#updateItemAge = index => {
+            starts[index] = ttls[index] !== 0 ? this.#perf.now() : 0;
+        };
+        this.#statusTTL = (status, index) => {
+            if (ttls[index]) {
+                const ttl = ttls[index];
+                const start = starts[index];
+                /* c8 ignore next */
+                if (!ttl || !start)
+                    return;
+                status.ttl = ttl;
+                status.start = start;
+                status.now = cachedNow || getNow();
+                const age = status.now - start;
+                status.remainingTTL = ttl - age;
+            }
+        };
+        // debounce calls to perf.now() to 1s so we're not hitting
+        // that costly call repeatedly.
+        let cachedNow = 0;
+        const getNow = () => {
+            const n = this.#perf.now();
+            if (this.ttlResolution > 0) {
+                cachedNow = n;
+                const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
+                // not available on all platforms
+                /* c8 ignore start */
+                if (t.unref) {
+                    t.unref();
+                }
+                /* c8 ignore stop */
+            }
+            return n;
+        };
+        this.getRemainingTTL = key => {
+            const index = this.#keyMap.get(key);
+            if (index === undefined) {
+                return 0;
+            }
+            const ttl = ttls[index];
+            const start = starts[index];
+            if (!ttl || !start) {
+                return Infinity;
+            }
+            const age = (cachedNow || getNow()) - start;
+            return ttl - age;
+        };
+        this.#isStale = index => {
+            const s = starts[index];
+            const t = ttls[index];
+            return !!t && !!s && (cachedNow || getNow()) - s > t;
+        };
+    }
+    // conditionally set private methods related to TTL
+    #updateItemAge = () => { };
+    #statusTTL = () => { };
+    #setItemTTL = () => { };
+    /* c8 ignore stop */
+    #isStale = () => false;
+    #initializeSizeTracking() {
+        const sizes = new ZeroArray(this.#max);
+        this.#calculatedSize = 0;
+        this.#sizes = sizes;
+        this.#removeItemSize = index => {
+            this.#calculatedSize -= sizes[index];
+            sizes[index] = 0;
+        };
+        this.#requireSize = (k, v, size, sizeCalculation) => {
+            // provisionally accept background fetches.
+            // actual value size will be checked when they return.
+            if (this.#isBackgroundFetch(v)) {
+                return 0;
+            }
+            if (!isPosInt(size)) {
+                if (sizeCalculation) {
+                    if (typeof sizeCalculation !== 'function') {
+                        throw new TypeError('sizeCalculation must be a function');
+                    }
+                    size = sizeCalculation(v, k);
+                    if (!isPosInt(size)) {
+                        throw new TypeError('sizeCalculation return invalid (expect positive integer)');
+                    }
+                }
+                else {
+                    throw new TypeError('invalid size value (must be positive integer). ' +
+                        'When maxSize or maxEntrySize is used, sizeCalculation ' +
+                        'or size must be set.');
+                }
+            }
+            return size;
+        };
+        this.#addItemSize = (index, size, status) => {
+            sizes[index] = size;
+            if (this.#maxSize) {
+                const maxSize = this.#maxSize - sizes[index];
+                while (this.#calculatedSize > maxSize) {
+                    this.#evict(true);
+                }
+            }
+            this.#calculatedSize += sizes[index];
+            if (status) {
+                status.entrySize = size;
+                status.totalCalculatedSize = this.#calculatedSize;
+            }
+        };
+    }
+    #removeItemSize = _i => { };
+    #addItemSize = (_i, _s, _st) => { };
+    #requireSize = (_k, _v, size, sizeCalculation) => {
+        if (size || sizeCalculation) {
+            throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
+        }
+        return 0;
+    };
+    *#indexes({ allowStale = this.allowStale } = {}) {
+        if (this.#size) {
+            for (let i = this.#tail; true;) {
+                if (!this.#isValidIndex(i)) {
+                    break;
+                }
+                if (allowStale || !this.#isStale(i)) {
+                    yield i;
+                }
+                if (i === this.#head) {
+                    break;
+                }
+                else {
+                    i = this.#prev[i];
+                }
+            }
+        }
+    }
+    *#rindexes({ allowStale = this.allowStale } = {}) {
+        if (this.#size) {
+            for (let i = this.#head; true;) {
+                if (!this.#isValidIndex(i)) {
+                    break;
+                }
+                if (allowStale || !this.#isStale(i)) {
+                    yield i;
+                }
+                if (i === this.#tail) {
+                    break;
+                }
+                else {
+                    i = this.#next[i];
+                }
+            }
+        }
+    }
+    #isValidIndex(index) {
+        return (index !== undefined &&
+            this.#keyMap.get(this.#keyList[index]) === index);
+    }
+    /**
+     * Return a generator yielding `[key, value]` pairs,
+     * in order from most recently used to least recently used.
+     */
+    *entries() {
+        for (const i of this.#indexes()) {
+            if (this.#valList[i] !== undefined &&
+                this.#keyList[i] !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield [this.#keyList[i], this.#valList[i]];
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.entries}
+     *
+     * Return a generator yielding `[key, value]` pairs,
+     * in order from least recently used to most recently used.
+     */
+    *rentries() {
+        for (const i of this.#rindexes()) {
+            if (this.#valList[i] !== undefined &&
+                this.#keyList[i] !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield [this.#keyList[i], this.#valList[i]];
+            }
+        }
+    }
+    /**
+     * Return a generator yielding the keys in the cache,
+     * in order from most recently used to least recently used.
+     */
+    *keys() {
+        for (const i of this.#indexes()) {
+            const k = this.#keyList[i];
+            if (k !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield k;
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.keys}
+     *
+     * Return a generator yielding the keys in the cache,
+     * in order from least recently used to most recently used.
+     */
+    *rkeys() {
+        for (const i of this.#rindexes()) {
+            const k = this.#keyList[i];
+            if (k !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield k;
+            }
+        }
+    }
+    /**
+     * Return a generator yielding the values in the cache,
+     * in order from most recently used to least recently used.
+     */
+    *values() {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            if (v !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield this.#valList[i];
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.values}
+     *
+     * Return a generator yielding the values in the cache,
+     * in order from least recently used to most recently used.
+     */
+    *rvalues() {
+        for (const i of this.#rindexes()) {
+            const v = this.#valList[i];
+            if (v !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield this.#valList[i];
+            }
+        }
+    }
+    /**
+     * Iterating over the cache itself yields the same results as
+     * {@link LRUCache.entries}
+     */
+    [Symbol.iterator]() {
+        return this.entries();
+    }
+    /**
+     * A String value that is used in the creation of the default string
+     * description of an object. Called by the built-in method
+     * `Object.prototype.toString`.
+     */
+    [Symbol.toStringTag] = 'LRUCache';
+    /**
+     * Find a value for which the supplied fn method returns a truthy value,
+     * similar to `Array.find()`. fn is called as `fn(value, key, cache)`.
+     */
+    find(fn, getOptions = {}) {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            if (fn(value, this.#keyList[i], this)) {
+                return this.get(this.#keyList[i], getOptions);
+            }
+        }
+    }
+    /**
+     * Call the supplied function on each item in the cache, in order from most
+     * recently used to least recently used.
+     *
+     * `fn` is called as `fn(value, key, cache)`.
+     *
+     * If `thisp` is provided, function will be called in the `this`-context of
+     * the provided object, or the cache if no `thisp` object is provided.
+     *
+     * Does not update age or recenty of use, or iterate over stale values.
+     */
+    forEach(fn, thisp = this) {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            fn.call(thisp, value, this.#keyList[i], this);
+        }
+    }
+    /**
+     * The same as {@link LRUCache.forEach} but items are iterated over in
+     * reverse order.  (ie, less recently used items are iterated over first.)
+     */
+    rforEach(fn, thisp = this) {
+        for (const i of this.#rindexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            fn.call(thisp, value, this.#keyList[i], this);
+        }
+    }
+    /**
+     * Delete any stale entries. Returns true if anything was removed,
+     * false otherwise.
+     */
+    purgeStale() {
+        let deleted = false;
+        for (const i of this.#rindexes({ allowStale: true })) {
+            if (this.#isStale(i)) {
+                this.#delete(this.#keyList[i], 'expire');
+                deleted = true;
+            }
+        }
+        return deleted;
+    }
+    /**
+     * Get the extended info about a given entry, to get its value, size, and
+     * TTL info simultaneously. Returns `undefined` if the key is not present.
+     *
+     * Unlike {@link LRUCache#dump}, which is designed to be portable and survive
+     * serialization, the `start` value is always the current timestamp, and the
+     * `ttl` is a calculated remaining time to live (negative if expired).
+     *
+     * Always returns stale values, if their info is found in the cache, so be
+     * sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl})
+     * if relevant.
+     */
+    info(key) {
+        const i = this.#keyMap.get(key);
+        if (i === undefined)
+            return undefined;
+        const v = this.#valList[i];
+        /* c8 ignore start - this isn't tested for the info function,
+         * but it's the same logic as found in other places. */
+        const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+        if (value === undefined)
+            return undefined;
+        /* c8 ignore end */
+        const entry = { value };
+        if (this.#ttls && this.#starts) {
+            const ttl = this.#ttls[i];
+            const start = this.#starts[i];
+            if (ttl && start) {
+                const remain = ttl - (this.#perf.now() - start);
+                entry.ttl = remain;
+                entry.start = Date.now();
+            }
+        }
+        if (this.#sizes) {
+            entry.size = this.#sizes[i];
+        }
+        return entry;
+    }
+    /**
+     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
+     * passed to {@link LRUCache#load}.
+     *
+     * The `start` fields are calculated relative to a portable `Date.now()`
+     * timestamp, even if `performance.now()` is available.
+     *
+     * Stale entries are always included in the `dump`, even if
+     * {@link LRUCache.OptionsBase.allowStale} is false.
+     *
+     * Note: this returns an actual array, not a generator, so it can be more
+     * easily passed around.
+     */
+    dump() {
+        const arr = [];
+        for (const i of this.#indexes({ allowStale: true })) {
+            const key = this.#keyList[i];
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined || key === undefined)
+                continue;
+            const entry = { value };
+            if (this.#ttls && this.#starts) {
+                entry.ttl = this.#ttls[i];
+                // always dump the start relative to a portable timestamp
+                // it's ok for this to be a bit slow, it's a rare operation.
+                const age = this.#perf.now() - this.#starts[i];
+                entry.start = Math.floor(Date.now() - age);
+            }
+            if (this.#sizes) {
+                entry.size = this.#sizes[i];
+            }
+            arr.unshift([key, entry]);
+        }
+        return arr;
+    }
+    /**
+     * Reset the cache and load in the items in entries in the order listed.
+     *
+     * The shape of the resulting cache may be different if the same options are
+     * not used in both caches.
+     *
+     * The `start` fields are assumed to be calculated relative to a portable
+     * `Date.now()` timestamp, even if `performance.now()` is available.
+     */
+    load(arr) {
+        this.clear();
+        for (const [key, entry] of arr) {
+            if (entry.start) {
+                // entry.start is a portable timestamp, but we may be using
+                // node's performance.now(), so calculate the offset, so that
+                // we get the intended remaining TTL, no matter how long it's
+                // been on ice.
+                //
+                // it's ok for this to be a bit slow, it's a rare operation.
+                const age = Date.now() - entry.start;
+                entry.start = this.#perf.now() - age;
+            }
+            this.set(key, entry.value, entry);
+        }
+    }
+    /**
+     * Add a value to the cache.
+     *
+     * Note: if `undefined` is specified as a value, this is an alias for
+     * {@link LRUCache#delete}
+     *
+     * Fields on the {@link LRUCache.SetOptions} options param will override
+     * their corresponding values in the constructor options for the scope
+     * of this single `set()` operation.
+     *
+     * If `start` is provided, then that will set the effective start
+     * time for the TTL calculation. Note that this must be a previous
+     * value of `performance.now()` if supported, or a previous value of
+     * `Date.now()` if not.
+     *
+     * Options object may also include `size`, which will prevent
+     * calling the `sizeCalculation` function and just use the specified
+     * number if it is a positive integer, and `noDisposeOnSet` which
+     * will prevent calling a `dispose` function in the case of
+     * overwrites.
+     *
+     * If the `size` (or return value of `sizeCalculation`) for a given
+     * entry is greater than `maxEntrySize`, then the item will not be
+     * added to the cache.
+     *
+     * Will update the recency of the entry.
+     *
+     * If the value is `undefined`, then this is an alias for
+     * `cache.delete(key)`. `undefined` is never stored in the cache.
+     */
+    set(k, v, setOptions = {}) {
+        if (v === undefined) {
+            this.delete(k);
+            return this;
+        }
+        const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
+        let { noUpdateTTL = this.noUpdateTTL } = setOptions;
+        const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
+        // if the item doesn't fit, don't do anything
+        // NB: maxEntrySize set to maxSize by default
+        if (this.maxEntrySize && size > this.maxEntrySize) {
+            if (status) {
+                status.set = 'miss';
+                status.maxEntrySizeExceeded = true;
+            }
+            // have to delete, in case something is there already.
+            this.#delete(k, 'set');
+            return this;
+        }
+        let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
+        if (index === undefined) {
+            // addition
+            index = (this.#size === 0 ? this.#tail
+                : this.#free.length !== 0 ? this.#free.pop()
+                    : this.#size === this.#max ? this.#evict(false)
+                        : this.#size);
+            this.#keyList[index] = k;
+            this.#valList[index] = v;
+            this.#keyMap.set(k, index);
+            this.#next[this.#tail] = index;
+            this.#prev[index] = this.#tail;
+            this.#tail = index;
+            this.#size++;
+            this.#addItemSize(index, size, status);
+            if (status)
+                status.set = 'add';
+            noUpdateTTL = false;
+            if (this.#hasOnInsert) {
+                this.#onInsert?.(v, k, 'add');
+            }
+        }
+        else {
+            // update
+            this.#moveToTail(index);
+            const oldVal = this.#valList[index];
+            if (v !== oldVal) {
+                if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
+                    oldVal.__abortController.abort(new Error('replaced'));
+                    const { __staleWhileFetching: s } = oldVal;
+                    if (s !== undefined && !noDisposeOnSet) {
+                        if (this.#hasDispose) {
+                            this.#dispose?.(s, k, 'set');
+                        }
+                        if (this.#hasDisposeAfter) {
+                            this.#disposed?.push([s, k, 'set']);
+                        }
+                    }
+                }
+                else if (!noDisposeOnSet) {
+                    if (this.#hasDispose) {
+                        this.#dispose?.(oldVal, k, 'set');
+                    }
+                    if (this.#hasDisposeAfter) {
+                        this.#disposed?.push([oldVal, k, 'set']);
+                    }
+                }
+                this.#removeItemSize(index);
+                this.#addItemSize(index, size, status);
+                this.#valList[index] = v;
+                if (status) {
+                    status.set = 'replace';
+                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal) ?
+                        oldVal.__staleWhileFetching
+                        : oldVal;
+                    if (oldValue !== undefined)
+                        status.oldValue = oldValue;
+                }
+            }
+            else if (status) {
+                status.set = 'update';
+            }
+            if (this.#hasOnInsert) {
+                this.onInsert?.(v, k, v === oldVal ? 'update' : 'replace');
+            }
+        }
+        if (ttl !== 0 && !this.#ttls) {
+            this.#initializeTTLTracking();
+        }
+        if (this.#ttls) {
+            if (!noUpdateTTL) {
+                this.#setItemTTL(index, ttl, start);
+            }
+            if (status)
+                this.#statusTTL(status, index);
+        }
+        if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+        return this;
+    }
+    /**
+     * Evict the least recently used item, returning its value or
+     * `undefined` if cache is empty.
+     */
+    pop() {
+        try {
+            while (this.#size) {
+                const val = this.#valList[this.#head];
+                this.#evict(true);
+                if (this.#isBackgroundFetch(val)) {
+                    if (val.__staleWhileFetching) {
+                        return val.__staleWhileFetching;
+                    }
+                }
+                else if (val !== undefined) {
+                    return val;
+                }
+            }
+        }
+        finally {
+            if (this.#hasDisposeAfter && this.#disposed) {
+                const dt = this.#disposed;
+                let task;
+                while ((task = dt?.shift())) {
+                    this.#disposeAfter?.(...task);
+                }
+            }
+        }
+    }
+    #evict(free) {
+        const head = this.#head;
+        const k = this.#keyList[head];
+        const v = this.#valList[head];
+        if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
+            v.__abortController.abort(new Error('evicted'));
+        }
+        else if (this.#hasDispose || this.#hasDisposeAfter) {
+            if (this.#hasDispose) {
+                this.#dispose?.(v, k, 'evict');
+            }
+            if (this.#hasDisposeAfter) {
+                this.#disposed?.push([v, k, 'evict']);
+            }
+        }
+        this.#removeItemSize(head);
+        // if we aren't about to use the index, then null these out
+        if (free) {
+            this.#keyList[head] = undefined;
+            this.#valList[head] = undefined;
+            this.#free.push(head);
+        }
+        if (this.#size === 1) {
+            this.#head = this.#tail = 0;
+            this.#free.length = 0;
+        }
+        else {
+            this.#head = this.#next[head];
+        }
+        this.#keyMap.delete(k);
+        this.#size--;
+        return head;
+    }
+    /**
+     * Check if a key is in the cache, without updating the recency of use.
+     * Will return false if the item is stale, even though it is technically
+     * in the cache.
+     *
+     * Check if a key is in the cache, without updating the recency of
+     * use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set
+     * to `true` in either the options or the constructor.
+     *
+     * Will return `false` if the item is stale, even though it is technically in
+     * the cache. The difference can be determined (if it matters) by using a
+     * `status` argument, and inspecting the `has` field.
+     *
+     * Will not update item age unless
+     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
+     */
+    has(k, hasOptions = {}) {
+        const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
+        const index = this.#keyMap.get(k);
+        if (index !== undefined) {
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v) &&
+                v.__staleWhileFetching === undefined) {
+                return false;
+            }
+            if (!this.#isStale(index)) {
+                if (updateAgeOnHas) {
+                    this.#updateItemAge(index);
+                }
+                if (status) {
+                    status.has = 'hit';
+                    this.#statusTTL(status, index);
+                }
+                return true;
+            }
+            else if (status) {
+                status.has = 'stale';
+                this.#statusTTL(status, index);
+            }
+        }
+        else if (status) {
+            status.has = 'miss';
+        }
+        return false;
+    }
+    /**
+     * Like {@link LRUCache#get} but doesn't update recency or delete stale
+     * items.
+     *
+     * Returns `undefined` if the item is stale, unless
+     * {@link LRUCache.OptionsBase.allowStale} is set.
+     */
+    peek(k, peekOptions = {}) {
+        const { allowStale = this.allowStale } = peekOptions;
+        const index = this.#keyMap.get(k);
+        if (index === undefined ||
+            (!allowStale && this.#isStale(index))) {
+            return;
+        }
+        const v = this.#valList[index];
+        // either stale and allowed, or forcing a refresh of non-stale value
+        return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+    }
+    #backgroundFetch(k, index, options, context) {
+        const v = index === undefined ? undefined : this.#valList[index];
+        if (this.#isBackgroundFetch(v)) {
+            return v;
+        }
+        const ac = new AC();
+        const { signal } = options;
+        // when/if our AC signals, then stop listening to theirs.
+        signal?.addEventListener('abort', () => ac.abort(signal.reason), {
+            signal: ac.signal,
+        });
+        const fetchOpts = {
+            signal: ac.signal,
+            options,
+            context,
+        };
+        const cb = (v, updateCache = false) => {
+            const { aborted } = ac.signal;
+            const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
+            if (options.status) {
+                if (aborted && !updateCache) {
+                    options.status.fetchAborted = true;
+                    options.status.fetchError = ac.signal.reason;
+                    if (ignoreAbort)
+                        options.status.fetchAbortIgnored = true;
+                }
+                else {
+                    options.status.fetchResolved = true;
+                }
+            }
+            if (aborted && !ignoreAbort && !updateCache) {
+                return fetchFail(ac.signal.reason);
+            }
+            // either we didn't abort, and are still here, or we did, and ignored
+            const bf = p;
+            if (this.#valList[index] === p) {
+                if (v === undefined) {
+                    if (bf.__staleWhileFetching !== undefined) {
+                        this.#valList[index] = bf.__staleWhileFetching;
+                    }
+                    else {
+                        this.#delete(k, 'fetch');
+                    }
+                }
+                else {
+                    if (options.status)
+                        options.status.fetchUpdated = true;
+                    this.set(k, v, fetchOpts.options);
+                }
+            }
+            return v;
+        };
+        const eb = (er) => {
+            if (options.status) {
+                options.status.fetchRejected = true;
+                options.status.fetchError = er;
+            }
+            return fetchFail(er);
+        };
+        const fetchFail = (er) => {
+            const { aborted } = ac.signal;
+            const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
+            const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
+            const noDelete = allowStale || options.noDeleteOnFetchRejection;
+            const bf = p;
+            if (this.#valList[index] === p) {
+                // if we allow stale on fetch rejections, then we need to ensure that
+                // the stale value is not removed from the cache when the fetch fails.
+                const del = !noDelete || bf.__staleWhileFetching === undefined;
+                if (del) {
+                    this.#delete(k, 'fetch');
+                }
+                else if (!allowStaleAborted) {
+                    // still replace the *promise* with the stale value,
+                    // since we are done with the promise at this point.
+                    // leave it untouched if we're still waiting for an
+                    // aborted background fetch that hasn't yet returned.
+                    this.#valList[index] = bf.__staleWhileFetching;
+                }
+            }
+            if (allowStale) {
+                if (options.status && bf.__staleWhileFetching !== undefined) {
+                    options.status.returnedStale = true;
+                }
+                return bf.__staleWhileFetching;
+            }
+            else if (bf.__returned === bf) {
+                throw er;
+            }
+        };
+        const pcall = (res, rej) => {
+            const fmp = this.#fetchMethod?.(k, v, fetchOpts);
+            if (fmp && fmp instanceof Promise) {
+                fmp.then(v => res(v === undefined ? undefined : v), rej);
+            }
+            // ignored, we go until we finish, regardless.
+            // defer check until we are actually aborting,
+            // so fetchMethod can override.
+            ac.signal.addEventListener('abort', () => {
+                if (!options.ignoreFetchAbort ||
+                    options.allowStaleOnFetchAbort) {
+                    res(undefined);
+                    // when it eventually resolves, update the cache.
+                    if (options.allowStaleOnFetchAbort) {
+                        res = v => cb(v, true);
+                    }
+                }
+            });
+        };
+        if (options.status)
+            options.status.fetchDispatched = true;
+        const p = new Promise(pcall).then(cb, eb);
+        const bf = Object.assign(p, {
+            __abortController: ac,
+            __staleWhileFetching: v,
+            __returned: undefined,
+        });
+        if (index === undefined) {
+            // internal, don't expose status.
+            this.set(k, bf, { ...fetchOpts.options, status: undefined });
+            index = this.#keyMap.get(k);
+        }
+        else {
+            this.#valList[index] = bf;
+        }
+        return bf;
+    }
+    #isBackgroundFetch(p) {
+        if (!this.#hasFetchMethod)
+            return false;
+        const b = p;
+        return (!!b &&
+            b instanceof Promise &&
+            b.hasOwnProperty('__staleWhileFetching') &&
+            b.__abortController instanceof AC);
+    }
+    async fetch(k, fetchOptions = {}) {
+        const { 
+        // get options
+        allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, 
+        // set options
+        ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, 
+        // fetch exclusive options
+        noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
+        if (!this.#hasFetchMethod) {
+            if (status)
+                status.fetch = 'get';
+            return this.get(k, {
+                allowStale,
+                updateAgeOnGet,
+                noDeleteOnStaleGet,
+                status,
+            });
+        }
+        const options = {
+            allowStale,
+            updateAgeOnGet,
+            noDeleteOnStaleGet,
+            ttl,
+            noDisposeOnSet,
+            size,
+            sizeCalculation,
+            noUpdateTTL,
+            noDeleteOnFetchRejection,
+            allowStaleOnFetchRejection,
+            allowStaleOnFetchAbort,
+            ignoreFetchAbort,
+            status,
+            signal,
+        };
+        let index = this.#keyMap.get(k);
+        if (index === undefined) {
+            if (status)
+                status.fetch = 'miss';
+            const p = this.#backgroundFetch(k, index, options, context);
+            return (p.__returned = p);
+        }
+        else {
+            // in cache, maybe already fetching
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v)) {
+                const stale = allowStale && v.__staleWhileFetching !== undefined;
+                if (status) {
+                    status.fetch = 'inflight';
+                    if (stale)
+                        status.returnedStale = true;
+                }
+                return stale ? v.__staleWhileFetching : (v.__returned = v);
+            }
+            // if we force a refresh, that means do NOT serve the cached value,
+            // unless we are already in the process of refreshing the cache.
+            const isStale = this.#isStale(index);
+            if (!forceRefresh && !isStale) {
+                if (status)
+                    status.fetch = 'hit';
+                this.#moveToTail(index);
+                if (updateAgeOnGet) {
+                    this.#updateItemAge(index);
+                }
+                if (status)
+                    this.#statusTTL(status, index);
+                return v;
+            }
+            // ok, it is stale or a forced refresh, and not already fetching.
+            // refresh the cache.
+            const p = this.#backgroundFetch(k, index, options, context);
+            const hasStale = p.__staleWhileFetching !== undefined;
+            const staleVal = hasStale && allowStale;
+            if (status) {
+                status.fetch = isStale ? 'stale' : 'refresh';
+                if (staleVal && isStale)
+                    status.returnedStale = true;
+            }
+            return staleVal ? p.__staleWhileFetching : (p.__returned = p);
+        }
+    }
+    async forceFetch(k, fetchOptions = {}) {
+        const v = await this.fetch(k, fetchOptions);
+        if (v === undefined)
+            throw new Error('fetch() returned undefined');
+        return v;
+    }
+    memo(k, memoOptions = {}) {
+        const memoMethod = this.#memoMethod;
+        if (!memoMethod) {
+            throw new Error('no memoMethod provided to constructor');
+        }
+        const { context, forceRefresh, ...options } = memoOptions;
+        const v = this.get(k, options);
+        if (!forceRefresh && v !== undefined)
+            return v;
+        const vv = memoMethod(k, v, {
+            options,
+            context,
+        });
+        this.set(k, vv, options);
+        return vv;
+    }
+    /**
+     * Return a value from the cache. Will update the recency of the cache
+     * entry found.
+     *
+     * If the key is not found, get() will return `undefined`.
+     */
+    get(k, getOptions = {}) {
+        const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
+        const index = this.#keyMap.get(k);
+        if (index !== undefined) {
+            const value = this.#valList[index];
+            const fetching = this.#isBackgroundFetch(value);
+            if (status)
+                this.#statusTTL(status, index);
+            if (this.#isStale(index)) {
+                if (status)
+                    status.get = 'stale';
+                // delete only if not an in-flight background fetch
+                if (!fetching) {
+                    if (!noDeleteOnStaleGet) {
+                        this.#delete(k, 'expire');
+                    }
+                    if (status && allowStale)
+                        status.returnedStale = true;
+                    return allowStale ? value : undefined;
+                }
+                else {
+                    if (status &&
+                        allowStale &&
+                        value.__staleWhileFetching !== undefined) {
+                        status.returnedStale = true;
+                    }
+                    return allowStale ? value.__staleWhileFetching : undefined;
+                }
+            }
+            else {
+                if (status)
+                    status.get = 'hit';
+                // if we're currently fetching it, we don't actually have it yet
+                // it's not stale, which means this isn't a staleWhileRefetching.
+                // If it's not stale, and fetching, AND has a __staleWhileFetching
+                // value, then that means the user fetched with {forceRefresh:true},
+                // so it's safe to return that value.
+                if (fetching) {
+                    return value.__staleWhileFetching;
+                }
+                this.#moveToTail(index);
+                if (updateAgeOnGet) {
+                    this.#updateItemAge(index);
+                }
+                return value;
+            }
+        }
+        else if (status) {
+            status.get = 'miss';
+        }
+    }
+    #connect(p, n) {
+        this.#prev[n] = p;
+        this.#next[p] = n;
+    }
+    #moveToTail(index) {
+        // if tail already, nothing to do
+        // if head, move head to next[index]
+        // else
+        //   move next[prev[index]] to next[index] (head has no prev)
+        //   move prev[next[index]] to prev[index]
+        // prev[index] = tail
+        // next[tail] = index
+        // tail = index
+        if (index !== this.#tail) {
+            if (index === this.#head) {
+                this.#head = this.#next[index];
+            }
+            else {
+                this.#connect(this.#prev[index], this.#next[index]);
+            }
+            this.#connect(this.#tail, index);
+            this.#tail = index;
+        }
+    }
+    /**
+     * Deletes a key out of the cache.
+     *
+     * Returns true if the key was deleted, false otherwise.
+     */
+    delete(k) {
+        return this.#delete(k, 'delete');
+    }
+    #delete(k, reason) {
+        let deleted = false;
+        if (this.#size !== 0) {
+            const index = this.#keyMap.get(k);
+            if (index !== undefined) {
+                deleted = true;
+                if (this.#size === 1) {
+                    this.#clear(reason);
+                }
+                else {
+                    this.#removeItemSize(index);
+                    const v = this.#valList[index];
+                    if (this.#isBackgroundFetch(v)) {
+                        v.__abortController.abort(new Error('deleted'));
+                    }
+                    else if (this.#hasDispose || this.#hasDisposeAfter) {
+                        if (this.#hasDispose) {
+                            this.#dispose?.(v, k, reason);
+                        }
+                        if (this.#hasDisposeAfter) {
+                            this.#disposed?.push([v, k, reason]);
+                        }
+                    }
+                    this.#keyMap.delete(k);
+                    this.#keyList[index] = undefined;
+                    this.#valList[index] = undefined;
+                    if (index === this.#tail) {
+                        this.#tail = this.#prev[index];
+                    }
+                    else if (index === this.#head) {
+                        this.#head = this.#next[index];
+                    }
+                    else {
+                        const pi = this.#prev[index];
+                        this.#next[pi] = this.#next[index];
+                        const ni = this.#next[index];
+                        this.#prev[ni] = this.#prev[index];
+                    }
+                    this.#size--;
+                    this.#free.push(index);
+                }
+            }
+        }
+        if (this.#hasDisposeAfter && this.#disposed?.length) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+        return deleted;
+    }
+    /**
+     * Clear the cache entirely, throwing away all values.
+     */
+    clear() {
+        return this.#clear('delete');
+    }
+    #clear(reason) {
+        for (const index of this.#rindexes({ allowStale: true })) {
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v)) {
+                v.__abortController.abort(new Error('deleted'));
+            }
+            else {
+                const k = this.#keyList[index];
+                if (this.#hasDispose) {
+                    this.#dispose?.(v, k, reason);
+                }
+                if (this.#hasDisposeAfter) {
+                    this.#disposed?.push([v, k, reason]);
+                }
+            }
+        }
+        this.#keyMap.clear();
+        this.#valList.fill(undefined);
+        this.#keyList.fill(undefined);
+        if (this.#ttls && this.#starts) {
+            this.#ttls.fill(0);
+            this.#starts.fill(0);
+        }
+        if (this.#sizes) {
+            this.#sizes.fill(0);
+        }
+        this.#head = 0;
+        this.#tail = 0;
+        this.#free.length = 0;
+        this.#calculatedSize = 0;
+        this.#size = 0;
+        if (this.#hasDisposeAfter && this.#disposed) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+    }
+}
+exports.LRUCache = LRUCache;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/lru-cache/dist/commonjs/index.min.js b/node_modules/make-fetch-happen/node_modules/lru-cache/dist/commonjs/index.min.js
new file mode 100644
index 0000000000000..ef5027b91650d
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/lru-cache/dist/commonjs/index.min.js
@@ -0,0 +1,2 @@
+"use strict";Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var M=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,x=new Set,R=typeof process=="object"&&process?process:{},U=(a,t,e,i)=>{typeof R.emitWarning=="function"?R.emitWarning(a,t,e,i):console.error(`[${e}] ${t}: ${a}`)},C=globalThis.AbortController,L=globalThis.AbortSignal;if(typeof C>"u"){L=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},C=class{constructor(){t()}signal=new L;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let a=R.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{a&&(a=!1,U("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var G=a=>!x.has(a),H=Symbol("type"),y=a=>a&&a===Math.floor(a)&&a>0&&isFinite(a),I=a=>y(a)?a<=Math.pow(2,8)?Uint8Array:a<=Math.pow(2,16)?Uint16Array:a<=Math.pow(2,32)?Uint32Array:a<=Number.MAX_SAFE_INTEGER?E:null:null,E=class extends Array{constructor(t){super(t),this.fill(0)}},W=class a{heap;length;static#l=!1;static create(t){let e=I(t);if(!e)return[];a.#l=!0;let i=new a(t,e);return a.#l=!1,i}constructor(t,e){if(!a.#l)throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},D=class a{#l;#c;#p;#v;#w;#D;#L;#S;get perf(){return this.#S}ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#_;#s;#i;#t;#a;#u;#o;#h;#m;#r;#b;#y;#d;#A;#z;#f;#x;static unsafeExposeInternals(t){return{starts:t.#y,ttls:t.#d,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#a,prev:t.#u,get head(){return t.#o},get tail(){return t.#h},free:t.#m,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#M(e,i,s,n),moveToTail:e=>t.#W(e),indexes:e=>t.#F(e),rindexes:e=>t.#T(e),isStale:e=>t.#g(e)}}get max(){return this.#l}get maxSize(){return this.#c}get calculatedSize(){return this.#_}get size(){return this.#n}get fetchMethod(){return this.#D}get memoMethod(){return this.#L}get dispose(){return this.#p}get onInsert(){return this.#v}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,onInsert:_,disposeAfter:f,noDisposeOnSet:c,noUpdateTTL:u,maxSize:A=0,maxEntrySize:d=0,sizeCalculation:m,fetchMethod:l,memoMethod:w,noDeleteOnFetchRejection:b,noDeleteOnStaleGet:p,allowStaleOnFetchRejection:S,allowStaleOnFetchAbort:z,ignoreFetchAbort:F,perf:v}=t;if(v!==void 0&&typeof v?.now!="function")throw new TypeError("perf option must have a now() method if specified");if(this.#S=v??M,e!==0&&!y(e))throw new TypeError("max option must be a nonnegative integer");let T=e?I(e):Array;if(!T)throw new Error("invalid max value: "+e);if(this.#l=e,this.#c=A,this.maxEntrySize=d||this.#c,this.sizeCalculation=m,this.sizeCalculation){if(!this.#c&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#L=w,l!==void 0&&typeof l!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#D=l,this.#z=!!l,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#a=new T(e),this.#u=new T(e),this.#o=0,this.#h=0,this.#m=W.create(e),this.#n=0,this.#_=0,typeof g=="function"&&(this.#p=g),typeof _=="function"&&(this.#v=_),typeof f=="function"?(this.#w=f,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#A=!!this.#p,this.#x=!!this.#v,this.#f=!!this.#w,this.noDisposeOnSet=!!c,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!b,this.allowStaleOnFetchRejection=!!S,this.allowStaleOnFetchAbort=!!z,this.ignoreFetchAbort=!!F,this.maxEntrySize!==0){if(this.#c!==0&&!y(this.#c))throw new TypeError("maxSize must be a positive integer if specified");if(!y(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#V()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!p,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=y(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!y(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#G()}if(this.#l===0&&this.ttl===0&&this.#c===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#l&&!this.#c){let O="LRU_CACHE_UNBOUNDED";G(O)&&(x.add(O),U("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",O,a))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#G(){let t=new E(this.#l),e=new E(this.#l);this.#d=t,this.#y=e,this.#j=(n,h,o=this.#S.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#g(n)&&this.#O(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#C=n=>{e[n]=t[n]!==0?this.#S.now():0},this.#E=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=this.#S.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#g=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#C=()=>{};#E=()=>{};#j=()=>{};#g=()=>!1;#V(){let t=new E(this.#l);this.#_=0,this.#b=t,this.#R=e=>{this.#_-=t[e],t[e]=0},this.#N=(e,i,s,n)=>{if(this.#e(i))return 0;if(!y(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!y(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#U=(e,i,s)=>{if(t[e]=i,this.#c){let n=this.#c-t[e];for(;this.#_>n;)this.#I(!0)}this.#_+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#_)}}#R=t=>{};#U=(t,e,i)=>{};#N=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#o));)e=this.#u[e]}*#T({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#h));)e=this.#a[e]}#P(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#T())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#T()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#T())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#T()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#T({allowStale:!0}))this.#g(e)&&(this.#O(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#d&&this.#y){let h=this.#d[e],o=this.#y[e];if(h&&o){let r=h-(this.#S.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#F({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#d&&this.#y){h.ttl=this.#d[e];let o=this.#S.now()-this.#y[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=this.#S.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,_=this.#N(t,e,i.size||0,o);if(this.maxEntrySize&&_>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#O(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#m.length!==0?this.#m.pop():this.#n===this.#l?this.#I(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#a[this.#h]=f,this.#u[f]=this.#h,this.#h=f,this.#n++,this.#U(f,_,r),r&&(r.set="add"),g=!1,this.#x&&this.#v?.(e,t,"add");else{this.#W(f);let c=this.#t[f];if(e!==c){if(this.#z&&this.#e(c)){c.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:u}=c;u!==void 0&&!h&&(this.#A&&this.#p?.(u,t,"set"),this.#f&&this.#r?.push([u,t,"set"]))}else h||(this.#A&&this.#p?.(c,t,"set"),this.#f&&this.#r?.push([c,t,"set"]));if(this.#R(f),this.#U(f,_,r),this.#t[f]=e,r){r.set="replace";let u=c&&this.#e(c)?c.__staleWhileFetching:c;u!==void 0&&(r.oldValue=u)}}else r&&(r.set="update");this.#x&&this.onInsert?.(e,t,e===c?"update":"replace")}if(s!==0&&!this.#d&&this.#G(),this.#d&&(g||this.#j(f,s,n),r&&this.#E(r,f)),!h&&this.#f&&this.#r){let c=this.#r,u;for(;u=c?.shift();)this.#w?.(...u)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#I(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#f&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#I(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#z&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(s,i,"evict"),this.#f&&this.#r?.push([s,i,"evict"])),this.#R(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#m.push(e)),this.#n===1?(this.#o=this.#h=0,this.#m.length=0):this.#o=this.#a[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#g(n))s&&(s.has="stale",this.#E(s,n));else return i&&this.#C(n),s&&(s.has="hit",this.#E(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#g(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#M(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new C,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,m=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!m?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!m)return f(h.signal.reason);let b=u;return this.#t[e]===u&&(d===void 0?b.__staleWhileFetching!==void 0?this.#t[e]=b.__staleWhileFetching:this.#O(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},_=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:m}=h.signal,l=m&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=u;if(this.#t[e]===u&&(!b||p.__staleWhileFetching===void 0?this.#O(t,"fetch"):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},c=(d,m)=>{let l=this.#D?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),m),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let u=new Promise(c).then(g,_),A=Object.assign(u,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,A,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=A,A}#e(t){if(!this.#z)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof C}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:_=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:c=this.allowStaleOnFetchRejection,ignoreFetchAbort:u=this.ignoreFetchAbort,allowStaleOnFetchAbort:A=this.allowStaleOnFetchAbort,context:d,forceRefresh:m=!1,status:l,signal:w}=e;if(!this.#z)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:_,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:c,allowStaleOnFetchAbort:A,ignoreFetchAbort:u,status:l,signal:w},p=this.#s.get(t);if(p===void 0){l&&(l.fetch="miss");let S=this.#M(t,p,b,d);return S.__returned=S}else{let S=this.#t[p];if(this.#e(S)){let O=i&&S.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",O&&(l.returnedStale=!0)),O?S.__staleWhileFetching:S.__returned=S}let z=this.#g(p);if(!m&&!z)return l&&(l.fetch="hit"),this.#W(p),s&&this.#C(p),l&&this.#E(l,p),S;let F=this.#M(t,p,b,d),T=F.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=z?"stale":"refresh",T&&z&&(l.returnedStale=!0)),T?F.__staleWhileFetching:F.__returned=F}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#L;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#E(h,o),this.#g(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#O(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#W(o),s&&this.#C(o),r))}else h&&(h.get="miss")}#H(t,e){this.#u[e]=t,this.#a[t]=e}#W(t){t!==this.#h&&(t===this.#o?this.#o=this.#a[t]:this.#H(this.#u[t],this.#a[t]),this.#H(this.#h,t),this.#h=t)}delete(t){return this.#O(t,"delete")}#O(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#k(e);else{this.#R(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(n,t,e),this.#f&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#u[s];else if(s===this.#o)this.#o=this.#a[s];else{let h=this.#u[s];this.#a[h]=this.#a[s];let o=this.#a[s];this.#u[o]=this.#u[s]}this.#n--,this.#m.push(s)}}if(this.#f&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#k("delete")}#k(t){for(let e of this.#T({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#A&&this.#p?.(i,s,t),this.#f&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#d&&this.#y&&(this.#d.fill(0),this.#y.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#m.length=0,this.#_=0,this.#n=0,this.#f&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};exports.LRUCache=D;
+//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/make-fetch-happen/node_modules/chownr/dist/commonjs/package.json b/node_modules/make-fetch-happen/node_modules/lru-cache/dist/commonjs/package.json
similarity index 100%
rename from node_modules/make-fetch-happen/node_modules/chownr/dist/commonjs/package.json
rename to node_modules/make-fetch-happen/node_modules/lru-cache/dist/commonjs/package.json
diff --git a/node_modules/make-fetch-happen/node_modules/lru-cache/dist/esm/index.js b/node_modules/make-fetch-happen/node_modules/lru-cache/dist/esm/index.js
new file mode 100644
index 0000000000000..8fd8fc5f31507
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/lru-cache/dist/esm/index.js
@@ -0,0 +1,1560 @@
+/**
+ * @module LRUCache
+ */
+const defaultPerf = (typeof performance === 'object' &&
+    performance &&
+    typeof performance.now === 'function') ?
+    performance
+    : Date;
+const warned = new Set();
+/* c8 ignore start */
+const PROCESS = (typeof process === 'object' && !!process ?
+    process
+    : {});
+/* c8 ignore start */
+const emitWarning = (msg, type, code, fn) => {
+    typeof PROCESS.emitWarning === 'function' ?
+        PROCESS.emitWarning(msg, type, code, fn)
+        : console.error(`[${code}] ${type}: ${msg}`);
+};
+let AC = globalThis.AbortController;
+let AS = globalThis.AbortSignal;
+/* c8 ignore start */
+if (typeof AC === 'undefined') {
+    //@ts-ignore
+    AS = class AbortSignal {
+        onabort;
+        _onabort = [];
+        reason;
+        aborted = false;
+        addEventListener(_, fn) {
+            this._onabort.push(fn);
+        }
+    };
+    //@ts-ignore
+    AC = class AbortController {
+        constructor() {
+            warnACPolyfill();
+        }
+        signal = new AS();
+        abort(reason) {
+            if (this.signal.aborted)
+                return;
+            //@ts-ignore
+            this.signal.reason = reason;
+            //@ts-ignore
+            this.signal.aborted = true;
+            //@ts-ignore
+            for (const fn of this.signal._onabort) {
+                fn(reason);
+            }
+            this.signal.onabort?.(reason);
+        }
+    };
+    let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
+    const warnACPolyfill = () => {
+        if (!printACPolyfillWarning)
+            return;
+        printACPolyfillWarning = false;
+        emitWarning('AbortController is not defined. If using lru-cache in ' +
+            'node 14, load an AbortController polyfill from the ' +
+            '`node-abort-controller` package. A minimal polyfill is ' +
+            'provided for use by LRUCache.fetch(), but it should not be ' +
+            'relied upon in other contexts (eg, passing it to other APIs that ' +
+            'use AbortController/AbortSignal might have undesirable effects). ' +
+            'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
+    };
+}
+/* c8 ignore stop */
+const shouldWarn = (code) => !warned.has(code);
+const TYPE = Symbol('type');
+const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
+/* c8 ignore start */
+// This is a little bit ridiculous, tbh.
+// The maximum array length is 2^32-1 or thereabouts on most JS impls.
+// And well before that point, you're caching the entire world, I mean,
+// that's ~32GB of just integers for the next/prev links, plus whatever
+// else to hold that many keys and values.  Just filling the memory with
+// zeroes at init time is brutal when you get that big.
+// But why not be complete?
+// Maybe in the future, these limits will have expanded.
+const getUintArray = (max) => !isPosInt(max) ? null
+    : max <= Math.pow(2, 8) ? Uint8Array
+        : max <= Math.pow(2, 16) ? Uint16Array
+            : max <= Math.pow(2, 32) ? Uint32Array
+                : max <= Number.MAX_SAFE_INTEGER ? ZeroArray
+                    : null;
+/* c8 ignore stop */
+class ZeroArray extends Array {
+    constructor(size) {
+        super(size);
+        this.fill(0);
+    }
+}
+class Stack {
+    heap;
+    length;
+    // private constructor
+    static #constructing = false;
+    static create(max) {
+        const HeapCls = getUintArray(max);
+        if (!HeapCls)
+            return [];
+        Stack.#constructing = true;
+        const s = new Stack(max, HeapCls);
+        Stack.#constructing = false;
+        return s;
+    }
+    constructor(max, HeapCls) {
+        /* c8 ignore start */
+        if (!Stack.#constructing) {
+            throw new TypeError('instantiate Stack using Stack.create(n)');
+        }
+        /* c8 ignore stop */
+        this.heap = new HeapCls(max);
+        this.length = 0;
+    }
+    push(n) {
+        this.heap[this.length++] = n;
+    }
+    pop() {
+        return this.heap[--this.length];
+    }
+}
+/**
+ * Default export, the thing you're using this module to get.
+ *
+ * The `K` and `V` types define the key and value types, respectively. The
+ * optional `FC` type defines the type of the `context` object passed to
+ * `cache.fetch()` and `cache.memo()`.
+ *
+ * Keys and values **must not** be `null` or `undefined`.
+ *
+ * All properties from the options object (with the exception of `max`,
+ * `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are
+ * added as normal public members. (The listed options are read-only getters.)
+ *
+ * Changing any of these will alter the defaults for subsequent method calls.
+ */
+export class LRUCache {
+    // options that cannot be changed without disaster
+    #max;
+    #maxSize;
+    #dispose;
+    #onInsert;
+    #disposeAfter;
+    #fetchMethod;
+    #memoMethod;
+    #perf;
+    /**
+     * {@link LRUCache.OptionsBase.perf}
+     */
+    get perf() {
+        return this.#perf;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.ttl}
+     */
+    ttl;
+    /**
+     * {@link LRUCache.OptionsBase.ttlResolution}
+     */
+    ttlResolution;
+    /**
+     * {@link LRUCache.OptionsBase.ttlAutopurge}
+     */
+    ttlAutopurge;
+    /**
+     * {@link LRUCache.OptionsBase.updateAgeOnGet}
+     */
+    updateAgeOnGet;
+    /**
+     * {@link LRUCache.OptionsBase.updateAgeOnHas}
+     */
+    updateAgeOnHas;
+    /**
+     * {@link LRUCache.OptionsBase.allowStale}
+     */
+    allowStale;
+    /**
+     * {@link LRUCache.OptionsBase.noDisposeOnSet}
+     */
+    noDisposeOnSet;
+    /**
+     * {@link LRUCache.OptionsBase.noUpdateTTL}
+     */
+    noUpdateTTL;
+    /**
+     * {@link LRUCache.OptionsBase.maxEntrySize}
+     */
+    maxEntrySize;
+    /**
+     * {@link LRUCache.OptionsBase.sizeCalculation}
+     */
+    sizeCalculation;
+    /**
+     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
+     */
+    noDeleteOnFetchRejection;
+    /**
+     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
+     */
+    noDeleteOnStaleGet;
+    /**
+     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
+     */
+    allowStaleOnFetchAbort;
+    /**
+     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
+     */
+    allowStaleOnFetchRejection;
+    /**
+     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
+     */
+    ignoreFetchAbort;
+    // computed properties
+    #size;
+    #calculatedSize;
+    #keyMap;
+    #keyList;
+    #valList;
+    #next;
+    #prev;
+    #head;
+    #tail;
+    #free;
+    #disposed;
+    #sizes;
+    #starts;
+    #ttls;
+    #hasDispose;
+    #hasFetchMethod;
+    #hasDisposeAfter;
+    #hasOnInsert;
+    /**
+     * Do not call this method unless you need to inspect the
+     * inner workings of the cache.  If anything returned by this
+     * object is modified in any way, strange breakage may occur.
+     *
+     * These fields are private for a reason!
+     *
+     * @internal
+     */
+    static unsafeExposeInternals(c) {
+        return {
+            // properties
+            starts: c.#starts,
+            ttls: c.#ttls,
+            sizes: c.#sizes,
+            keyMap: c.#keyMap,
+            keyList: c.#keyList,
+            valList: c.#valList,
+            next: c.#next,
+            prev: c.#prev,
+            get head() {
+                return c.#head;
+            },
+            get tail() {
+                return c.#tail;
+            },
+            free: c.#free,
+            // methods
+            isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
+            backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
+            moveToTail: (index) => c.#moveToTail(index),
+            indexes: (options) => c.#indexes(options),
+            rindexes: (options) => c.#rindexes(options),
+            isStale: (index) => c.#isStale(index),
+        };
+    }
+    // Protected read-only members
+    /**
+     * {@link LRUCache.OptionsBase.max} (read-only)
+     */
+    get max() {
+        return this.#max;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.maxSize} (read-only)
+     */
+    get maxSize() {
+        return this.#maxSize;
+    }
+    /**
+     * The total computed size of items in the cache (read-only)
+     */
+    get calculatedSize() {
+        return this.#calculatedSize;
+    }
+    /**
+     * The number of items stored in the cache (read-only)
+     */
+    get size() {
+        return this.#size;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
+     */
+    get fetchMethod() {
+        return this.#fetchMethod;
+    }
+    get memoMethod() {
+        return this.#memoMethod;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.dispose} (read-only)
+     */
+    get dispose() {
+        return this.#dispose;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.onInsert} (read-only)
+     */
+    get onInsert() {
+        return this.#onInsert;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
+     */
+    get disposeAfter() {
+        return this.#disposeAfter;
+    }
+    constructor(options) {
+        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, onInsert, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, perf, } = options;
+        if (perf !== undefined) {
+            if (typeof perf?.now !== 'function') {
+                throw new TypeError('perf option must have a now() method if specified');
+            }
+        }
+        this.#perf = perf ?? defaultPerf;
+        if (max !== 0 && !isPosInt(max)) {
+            throw new TypeError('max option must be a nonnegative integer');
+        }
+        const UintArray = max ? getUintArray(max) : Array;
+        if (!UintArray) {
+            throw new Error('invalid max value: ' + max);
+        }
+        this.#max = max;
+        this.#maxSize = maxSize;
+        this.maxEntrySize = maxEntrySize || this.#maxSize;
+        this.sizeCalculation = sizeCalculation;
+        if (this.sizeCalculation) {
+            if (!this.#maxSize && !this.maxEntrySize) {
+                throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
+            }
+            if (typeof this.sizeCalculation !== 'function') {
+                throw new TypeError('sizeCalculation set to non-function');
+            }
+        }
+        if (memoMethod !== undefined &&
+            typeof memoMethod !== 'function') {
+            throw new TypeError('memoMethod must be a function if defined');
+        }
+        this.#memoMethod = memoMethod;
+        if (fetchMethod !== undefined &&
+            typeof fetchMethod !== 'function') {
+            throw new TypeError('fetchMethod must be a function if specified');
+        }
+        this.#fetchMethod = fetchMethod;
+        this.#hasFetchMethod = !!fetchMethod;
+        this.#keyMap = new Map();
+        this.#keyList = new Array(max).fill(undefined);
+        this.#valList = new Array(max).fill(undefined);
+        this.#next = new UintArray(max);
+        this.#prev = new UintArray(max);
+        this.#head = 0;
+        this.#tail = 0;
+        this.#free = Stack.create(max);
+        this.#size = 0;
+        this.#calculatedSize = 0;
+        if (typeof dispose === 'function') {
+            this.#dispose = dispose;
+        }
+        if (typeof onInsert === 'function') {
+            this.#onInsert = onInsert;
+        }
+        if (typeof disposeAfter === 'function') {
+            this.#disposeAfter = disposeAfter;
+            this.#disposed = [];
+        }
+        else {
+            this.#disposeAfter = undefined;
+            this.#disposed = undefined;
+        }
+        this.#hasDispose = !!this.#dispose;
+        this.#hasOnInsert = !!this.#onInsert;
+        this.#hasDisposeAfter = !!this.#disposeAfter;
+        this.noDisposeOnSet = !!noDisposeOnSet;
+        this.noUpdateTTL = !!noUpdateTTL;
+        this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
+        this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
+        this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
+        this.ignoreFetchAbort = !!ignoreFetchAbort;
+        // NB: maxEntrySize is set to maxSize if it's set
+        if (this.maxEntrySize !== 0) {
+            if (this.#maxSize !== 0) {
+                if (!isPosInt(this.#maxSize)) {
+                    throw new TypeError('maxSize must be a positive integer if specified');
+                }
+            }
+            if (!isPosInt(this.maxEntrySize)) {
+                throw new TypeError('maxEntrySize must be a positive integer if specified');
+            }
+            this.#initializeSizeTracking();
+        }
+        this.allowStale = !!allowStale;
+        this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
+        this.updateAgeOnGet = !!updateAgeOnGet;
+        this.updateAgeOnHas = !!updateAgeOnHas;
+        this.ttlResolution =
+            isPosInt(ttlResolution) || ttlResolution === 0 ?
+                ttlResolution
+                : 1;
+        this.ttlAutopurge = !!ttlAutopurge;
+        this.ttl = ttl || 0;
+        if (this.ttl) {
+            if (!isPosInt(this.ttl)) {
+                throw new TypeError('ttl must be a positive integer if specified');
+            }
+            this.#initializeTTLTracking();
+        }
+        // do not allow completely unbounded caches
+        if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
+            throw new TypeError('At least one of max, maxSize, or ttl is required');
+        }
+        if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
+            const code = 'LRU_CACHE_UNBOUNDED';
+            if (shouldWarn(code)) {
+                warned.add(code);
+                const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
+                    'result in unbounded memory consumption.';
+                emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
+            }
+        }
+    }
+    /**
+     * Return the number of ms left in the item's TTL. If item is not in cache,
+     * returns `0`. Returns `Infinity` if item is in cache without a defined TTL.
+     */
+    getRemainingTTL(key) {
+        return this.#keyMap.has(key) ? Infinity : 0;
+    }
+    #initializeTTLTracking() {
+        const ttls = new ZeroArray(this.#max);
+        const starts = new ZeroArray(this.#max);
+        this.#ttls = ttls;
+        this.#starts = starts;
+        this.#setItemTTL = (index, ttl, start = this.#perf.now()) => {
+            starts[index] = ttl !== 0 ? start : 0;
+            ttls[index] = ttl;
+            if (ttl !== 0 && this.ttlAutopurge) {
+                const t = setTimeout(() => {
+                    if (this.#isStale(index)) {
+                        this.#delete(this.#keyList[index], 'expire');
+                    }
+                }, ttl + 1);
+                // unref() not supported on all platforms
+                /* c8 ignore start */
+                if (t.unref) {
+                    t.unref();
+                }
+                /* c8 ignore stop */
+            }
+        };
+        this.#updateItemAge = index => {
+            starts[index] = ttls[index] !== 0 ? this.#perf.now() : 0;
+        };
+        this.#statusTTL = (status, index) => {
+            if (ttls[index]) {
+                const ttl = ttls[index];
+                const start = starts[index];
+                /* c8 ignore next */
+                if (!ttl || !start)
+                    return;
+                status.ttl = ttl;
+                status.start = start;
+                status.now = cachedNow || getNow();
+                const age = status.now - start;
+                status.remainingTTL = ttl - age;
+            }
+        };
+        // debounce calls to perf.now() to 1s so we're not hitting
+        // that costly call repeatedly.
+        let cachedNow = 0;
+        const getNow = () => {
+            const n = this.#perf.now();
+            if (this.ttlResolution > 0) {
+                cachedNow = n;
+                const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
+                // not available on all platforms
+                /* c8 ignore start */
+                if (t.unref) {
+                    t.unref();
+                }
+                /* c8 ignore stop */
+            }
+            return n;
+        };
+        this.getRemainingTTL = key => {
+            const index = this.#keyMap.get(key);
+            if (index === undefined) {
+                return 0;
+            }
+            const ttl = ttls[index];
+            const start = starts[index];
+            if (!ttl || !start) {
+                return Infinity;
+            }
+            const age = (cachedNow || getNow()) - start;
+            return ttl - age;
+        };
+        this.#isStale = index => {
+            const s = starts[index];
+            const t = ttls[index];
+            return !!t && !!s && (cachedNow || getNow()) - s > t;
+        };
+    }
+    // conditionally set private methods related to TTL
+    #updateItemAge = () => { };
+    #statusTTL = () => { };
+    #setItemTTL = () => { };
+    /* c8 ignore stop */
+    #isStale = () => false;
+    #initializeSizeTracking() {
+        const sizes = new ZeroArray(this.#max);
+        this.#calculatedSize = 0;
+        this.#sizes = sizes;
+        this.#removeItemSize = index => {
+            this.#calculatedSize -= sizes[index];
+            sizes[index] = 0;
+        };
+        this.#requireSize = (k, v, size, sizeCalculation) => {
+            // provisionally accept background fetches.
+            // actual value size will be checked when they return.
+            if (this.#isBackgroundFetch(v)) {
+                return 0;
+            }
+            if (!isPosInt(size)) {
+                if (sizeCalculation) {
+                    if (typeof sizeCalculation !== 'function') {
+                        throw new TypeError('sizeCalculation must be a function');
+                    }
+                    size = sizeCalculation(v, k);
+                    if (!isPosInt(size)) {
+                        throw new TypeError('sizeCalculation return invalid (expect positive integer)');
+                    }
+                }
+                else {
+                    throw new TypeError('invalid size value (must be positive integer). ' +
+                        'When maxSize or maxEntrySize is used, sizeCalculation ' +
+                        'or size must be set.');
+                }
+            }
+            return size;
+        };
+        this.#addItemSize = (index, size, status) => {
+            sizes[index] = size;
+            if (this.#maxSize) {
+                const maxSize = this.#maxSize - sizes[index];
+                while (this.#calculatedSize > maxSize) {
+                    this.#evict(true);
+                }
+            }
+            this.#calculatedSize += sizes[index];
+            if (status) {
+                status.entrySize = size;
+                status.totalCalculatedSize = this.#calculatedSize;
+            }
+        };
+    }
+    #removeItemSize = _i => { };
+    #addItemSize = (_i, _s, _st) => { };
+    #requireSize = (_k, _v, size, sizeCalculation) => {
+        if (size || sizeCalculation) {
+            throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
+        }
+        return 0;
+    };
+    *#indexes({ allowStale = this.allowStale } = {}) {
+        if (this.#size) {
+            for (let i = this.#tail; true;) {
+                if (!this.#isValidIndex(i)) {
+                    break;
+                }
+                if (allowStale || !this.#isStale(i)) {
+                    yield i;
+                }
+                if (i === this.#head) {
+                    break;
+                }
+                else {
+                    i = this.#prev[i];
+                }
+            }
+        }
+    }
+    *#rindexes({ allowStale = this.allowStale } = {}) {
+        if (this.#size) {
+            for (let i = this.#head; true;) {
+                if (!this.#isValidIndex(i)) {
+                    break;
+                }
+                if (allowStale || !this.#isStale(i)) {
+                    yield i;
+                }
+                if (i === this.#tail) {
+                    break;
+                }
+                else {
+                    i = this.#next[i];
+                }
+            }
+        }
+    }
+    #isValidIndex(index) {
+        return (index !== undefined &&
+            this.#keyMap.get(this.#keyList[index]) === index);
+    }
+    /**
+     * Return a generator yielding `[key, value]` pairs,
+     * in order from most recently used to least recently used.
+     */
+    *entries() {
+        for (const i of this.#indexes()) {
+            if (this.#valList[i] !== undefined &&
+                this.#keyList[i] !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield [this.#keyList[i], this.#valList[i]];
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.entries}
+     *
+     * Return a generator yielding `[key, value]` pairs,
+     * in order from least recently used to most recently used.
+     */
+    *rentries() {
+        for (const i of this.#rindexes()) {
+            if (this.#valList[i] !== undefined &&
+                this.#keyList[i] !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield [this.#keyList[i], this.#valList[i]];
+            }
+        }
+    }
+    /**
+     * Return a generator yielding the keys in the cache,
+     * in order from most recently used to least recently used.
+     */
+    *keys() {
+        for (const i of this.#indexes()) {
+            const k = this.#keyList[i];
+            if (k !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield k;
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.keys}
+     *
+     * Return a generator yielding the keys in the cache,
+     * in order from least recently used to most recently used.
+     */
+    *rkeys() {
+        for (const i of this.#rindexes()) {
+            const k = this.#keyList[i];
+            if (k !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield k;
+            }
+        }
+    }
+    /**
+     * Return a generator yielding the values in the cache,
+     * in order from most recently used to least recently used.
+     */
+    *values() {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            if (v !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield this.#valList[i];
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.values}
+     *
+     * Return a generator yielding the values in the cache,
+     * in order from least recently used to most recently used.
+     */
+    *rvalues() {
+        for (const i of this.#rindexes()) {
+            const v = this.#valList[i];
+            if (v !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield this.#valList[i];
+            }
+        }
+    }
+    /**
+     * Iterating over the cache itself yields the same results as
+     * {@link LRUCache.entries}
+     */
+    [Symbol.iterator]() {
+        return this.entries();
+    }
+    /**
+     * A String value that is used in the creation of the default string
+     * description of an object. Called by the built-in method
+     * `Object.prototype.toString`.
+     */
+    [Symbol.toStringTag] = 'LRUCache';
+    /**
+     * Find a value for which the supplied fn method returns a truthy value,
+     * similar to `Array.find()`. fn is called as `fn(value, key, cache)`.
+     */
+    find(fn, getOptions = {}) {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            if (fn(value, this.#keyList[i], this)) {
+                return this.get(this.#keyList[i], getOptions);
+            }
+        }
+    }
+    /**
+     * Call the supplied function on each item in the cache, in order from most
+     * recently used to least recently used.
+     *
+     * `fn` is called as `fn(value, key, cache)`.
+     *
+     * If `thisp` is provided, function will be called in the `this`-context of
+     * the provided object, or the cache if no `thisp` object is provided.
+     *
+     * Does not update age or recenty of use, or iterate over stale values.
+     */
+    forEach(fn, thisp = this) {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            fn.call(thisp, value, this.#keyList[i], this);
+        }
+    }
+    /**
+     * The same as {@link LRUCache.forEach} but items are iterated over in
+     * reverse order.  (ie, less recently used items are iterated over first.)
+     */
+    rforEach(fn, thisp = this) {
+        for (const i of this.#rindexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            fn.call(thisp, value, this.#keyList[i], this);
+        }
+    }
+    /**
+     * Delete any stale entries. Returns true if anything was removed,
+     * false otherwise.
+     */
+    purgeStale() {
+        let deleted = false;
+        for (const i of this.#rindexes({ allowStale: true })) {
+            if (this.#isStale(i)) {
+                this.#delete(this.#keyList[i], 'expire');
+                deleted = true;
+            }
+        }
+        return deleted;
+    }
+    /**
+     * Get the extended info about a given entry, to get its value, size, and
+     * TTL info simultaneously. Returns `undefined` if the key is not present.
+     *
+     * Unlike {@link LRUCache#dump}, which is designed to be portable and survive
+     * serialization, the `start` value is always the current timestamp, and the
+     * `ttl` is a calculated remaining time to live (negative if expired).
+     *
+     * Always returns stale values, if their info is found in the cache, so be
+     * sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl})
+     * if relevant.
+     */
+    info(key) {
+        const i = this.#keyMap.get(key);
+        if (i === undefined)
+            return undefined;
+        const v = this.#valList[i];
+        /* c8 ignore start - this isn't tested for the info function,
+         * but it's the same logic as found in other places. */
+        const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+        if (value === undefined)
+            return undefined;
+        /* c8 ignore end */
+        const entry = { value };
+        if (this.#ttls && this.#starts) {
+            const ttl = this.#ttls[i];
+            const start = this.#starts[i];
+            if (ttl && start) {
+                const remain = ttl - (this.#perf.now() - start);
+                entry.ttl = remain;
+                entry.start = Date.now();
+            }
+        }
+        if (this.#sizes) {
+            entry.size = this.#sizes[i];
+        }
+        return entry;
+    }
+    /**
+     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
+     * passed to {@link LRUCache#load}.
+     *
+     * The `start` fields are calculated relative to a portable `Date.now()`
+     * timestamp, even if `performance.now()` is available.
+     *
+     * Stale entries are always included in the `dump`, even if
+     * {@link LRUCache.OptionsBase.allowStale} is false.
+     *
+     * Note: this returns an actual array, not a generator, so it can be more
+     * easily passed around.
+     */
+    dump() {
+        const arr = [];
+        for (const i of this.#indexes({ allowStale: true })) {
+            const key = this.#keyList[i];
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined || key === undefined)
+                continue;
+            const entry = { value };
+            if (this.#ttls && this.#starts) {
+                entry.ttl = this.#ttls[i];
+                // always dump the start relative to a portable timestamp
+                // it's ok for this to be a bit slow, it's a rare operation.
+                const age = this.#perf.now() - this.#starts[i];
+                entry.start = Math.floor(Date.now() - age);
+            }
+            if (this.#sizes) {
+                entry.size = this.#sizes[i];
+            }
+            arr.unshift([key, entry]);
+        }
+        return arr;
+    }
+    /**
+     * Reset the cache and load in the items in entries in the order listed.
+     *
+     * The shape of the resulting cache may be different if the same options are
+     * not used in both caches.
+     *
+     * The `start` fields are assumed to be calculated relative to a portable
+     * `Date.now()` timestamp, even if `performance.now()` is available.
+     */
+    load(arr) {
+        this.clear();
+        for (const [key, entry] of arr) {
+            if (entry.start) {
+                // entry.start is a portable timestamp, but we may be using
+                // node's performance.now(), so calculate the offset, so that
+                // we get the intended remaining TTL, no matter how long it's
+                // been on ice.
+                //
+                // it's ok for this to be a bit slow, it's a rare operation.
+                const age = Date.now() - entry.start;
+                entry.start = this.#perf.now() - age;
+            }
+            this.set(key, entry.value, entry);
+        }
+    }
+    /**
+     * Add a value to the cache.
+     *
+     * Note: if `undefined` is specified as a value, this is an alias for
+     * {@link LRUCache#delete}
+     *
+     * Fields on the {@link LRUCache.SetOptions} options param will override
+     * their corresponding values in the constructor options for the scope
+     * of this single `set()` operation.
+     *
+     * If `start` is provided, then that will set the effective start
+     * time for the TTL calculation. Note that this must be a previous
+     * value of `performance.now()` if supported, or a previous value of
+     * `Date.now()` if not.
+     *
+     * Options object may also include `size`, which will prevent
+     * calling the `sizeCalculation` function and just use the specified
+     * number if it is a positive integer, and `noDisposeOnSet` which
+     * will prevent calling a `dispose` function in the case of
+     * overwrites.
+     *
+     * If the `size` (or return value of `sizeCalculation`) for a given
+     * entry is greater than `maxEntrySize`, then the item will not be
+     * added to the cache.
+     *
+     * Will update the recency of the entry.
+     *
+     * If the value is `undefined`, then this is an alias for
+     * `cache.delete(key)`. `undefined` is never stored in the cache.
+     */
+    set(k, v, setOptions = {}) {
+        if (v === undefined) {
+            this.delete(k);
+            return this;
+        }
+        const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
+        let { noUpdateTTL = this.noUpdateTTL } = setOptions;
+        const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
+        // if the item doesn't fit, don't do anything
+        // NB: maxEntrySize set to maxSize by default
+        if (this.maxEntrySize && size > this.maxEntrySize) {
+            if (status) {
+                status.set = 'miss';
+                status.maxEntrySizeExceeded = true;
+            }
+            // have to delete, in case something is there already.
+            this.#delete(k, 'set');
+            return this;
+        }
+        let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
+        if (index === undefined) {
+            // addition
+            index = (this.#size === 0 ? this.#tail
+                : this.#free.length !== 0 ? this.#free.pop()
+                    : this.#size === this.#max ? this.#evict(false)
+                        : this.#size);
+            this.#keyList[index] = k;
+            this.#valList[index] = v;
+            this.#keyMap.set(k, index);
+            this.#next[this.#tail] = index;
+            this.#prev[index] = this.#tail;
+            this.#tail = index;
+            this.#size++;
+            this.#addItemSize(index, size, status);
+            if (status)
+                status.set = 'add';
+            noUpdateTTL = false;
+            if (this.#hasOnInsert) {
+                this.#onInsert?.(v, k, 'add');
+            }
+        }
+        else {
+            // update
+            this.#moveToTail(index);
+            const oldVal = this.#valList[index];
+            if (v !== oldVal) {
+                if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
+                    oldVal.__abortController.abort(new Error('replaced'));
+                    const { __staleWhileFetching: s } = oldVal;
+                    if (s !== undefined && !noDisposeOnSet) {
+                        if (this.#hasDispose) {
+                            this.#dispose?.(s, k, 'set');
+                        }
+                        if (this.#hasDisposeAfter) {
+                            this.#disposed?.push([s, k, 'set']);
+                        }
+                    }
+                }
+                else if (!noDisposeOnSet) {
+                    if (this.#hasDispose) {
+                        this.#dispose?.(oldVal, k, 'set');
+                    }
+                    if (this.#hasDisposeAfter) {
+                        this.#disposed?.push([oldVal, k, 'set']);
+                    }
+                }
+                this.#removeItemSize(index);
+                this.#addItemSize(index, size, status);
+                this.#valList[index] = v;
+                if (status) {
+                    status.set = 'replace';
+                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal) ?
+                        oldVal.__staleWhileFetching
+                        : oldVal;
+                    if (oldValue !== undefined)
+                        status.oldValue = oldValue;
+                }
+            }
+            else if (status) {
+                status.set = 'update';
+            }
+            if (this.#hasOnInsert) {
+                this.onInsert?.(v, k, v === oldVal ? 'update' : 'replace');
+            }
+        }
+        if (ttl !== 0 && !this.#ttls) {
+            this.#initializeTTLTracking();
+        }
+        if (this.#ttls) {
+            if (!noUpdateTTL) {
+                this.#setItemTTL(index, ttl, start);
+            }
+            if (status)
+                this.#statusTTL(status, index);
+        }
+        if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+        return this;
+    }
+    /**
+     * Evict the least recently used item, returning its value or
+     * `undefined` if cache is empty.
+     */
+    pop() {
+        try {
+            while (this.#size) {
+                const val = this.#valList[this.#head];
+                this.#evict(true);
+                if (this.#isBackgroundFetch(val)) {
+                    if (val.__staleWhileFetching) {
+                        return val.__staleWhileFetching;
+                    }
+                }
+                else if (val !== undefined) {
+                    return val;
+                }
+            }
+        }
+        finally {
+            if (this.#hasDisposeAfter && this.#disposed) {
+                const dt = this.#disposed;
+                let task;
+                while ((task = dt?.shift())) {
+                    this.#disposeAfter?.(...task);
+                }
+            }
+        }
+    }
+    #evict(free) {
+        const head = this.#head;
+        const k = this.#keyList[head];
+        const v = this.#valList[head];
+        if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
+            v.__abortController.abort(new Error('evicted'));
+        }
+        else if (this.#hasDispose || this.#hasDisposeAfter) {
+            if (this.#hasDispose) {
+                this.#dispose?.(v, k, 'evict');
+            }
+            if (this.#hasDisposeAfter) {
+                this.#disposed?.push([v, k, 'evict']);
+            }
+        }
+        this.#removeItemSize(head);
+        // if we aren't about to use the index, then null these out
+        if (free) {
+            this.#keyList[head] = undefined;
+            this.#valList[head] = undefined;
+            this.#free.push(head);
+        }
+        if (this.#size === 1) {
+            this.#head = this.#tail = 0;
+            this.#free.length = 0;
+        }
+        else {
+            this.#head = this.#next[head];
+        }
+        this.#keyMap.delete(k);
+        this.#size--;
+        return head;
+    }
+    /**
+     * Check if a key is in the cache, without updating the recency of use.
+     * Will return false if the item is stale, even though it is technically
+     * in the cache.
+     *
+     * Check if a key is in the cache, without updating the recency of
+     * use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set
+     * to `true` in either the options or the constructor.
+     *
+     * Will return `false` if the item is stale, even though it is technically in
+     * the cache. The difference can be determined (if it matters) by using a
+     * `status` argument, and inspecting the `has` field.
+     *
+     * Will not update item age unless
+     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
+     */
+    has(k, hasOptions = {}) {
+        const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
+        const index = this.#keyMap.get(k);
+        if (index !== undefined) {
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v) &&
+                v.__staleWhileFetching === undefined) {
+                return false;
+            }
+            if (!this.#isStale(index)) {
+                if (updateAgeOnHas) {
+                    this.#updateItemAge(index);
+                }
+                if (status) {
+                    status.has = 'hit';
+                    this.#statusTTL(status, index);
+                }
+                return true;
+            }
+            else if (status) {
+                status.has = 'stale';
+                this.#statusTTL(status, index);
+            }
+        }
+        else if (status) {
+            status.has = 'miss';
+        }
+        return false;
+    }
+    /**
+     * Like {@link LRUCache#get} but doesn't update recency or delete stale
+     * items.
+     *
+     * Returns `undefined` if the item is stale, unless
+     * {@link LRUCache.OptionsBase.allowStale} is set.
+     */
+    peek(k, peekOptions = {}) {
+        const { allowStale = this.allowStale } = peekOptions;
+        const index = this.#keyMap.get(k);
+        if (index === undefined ||
+            (!allowStale && this.#isStale(index))) {
+            return;
+        }
+        const v = this.#valList[index];
+        // either stale and allowed, or forcing a refresh of non-stale value
+        return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+    }
+    #backgroundFetch(k, index, options, context) {
+        const v = index === undefined ? undefined : this.#valList[index];
+        if (this.#isBackgroundFetch(v)) {
+            return v;
+        }
+        const ac = new AC();
+        const { signal } = options;
+        // when/if our AC signals, then stop listening to theirs.
+        signal?.addEventListener('abort', () => ac.abort(signal.reason), {
+            signal: ac.signal,
+        });
+        const fetchOpts = {
+            signal: ac.signal,
+            options,
+            context,
+        };
+        const cb = (v, updateCache = false) => {
+            const { aborted } = ac.signal;
+            const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
+            if (options.status) {
+                if (aborted && !updateCache) {
+                    options.status.fetchAborted = true;
+                    options.status.fetchError = ac.signal.reason;
+                    if (ignoreAbort)
+                        options.status.fetchAbortIgnored = true;
+                }
+                else {
+                    options.status.fetchResolved = true;
+                }
+            }
+            if (aborted && !ignoreAbort && !updateCache) {
+                return fetchFail(ac.signal.reason);
+            }
+            // either we didn't abort, and are still here, or we did, and ignored
+            const bf = p;
+            if (this.#valList[index] === p) {
+                if (v === undefined) {
+                    if (bf.__staleWhileFetching !== undefined) {
+                        this.#valList[index] = bf.__staleWhileFetching;
+                    }
+                    else {
+                        this.#delete(k, 'fetch');
+                    }
+                }
+                else {
+                    if (options.status)
+                        options.status.fetchUpdated = true;
+                    this.set(k, v, fetchOpts.options);
+                }
+            }
+            return v;
+        };
+        const eb = (er) => {
+            if (options.status) {
+                options.status.fetchRejected = true;
+                options.status.fetchError = er;
+            }
+            return fetchFail(er);
+        };
+        const fetchFail = (er) => {
+            const { aborted } = ac.signal;
+            const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
+            const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
+            const noDelete = allowStale || options.noDeleteOnFetchRejection;
+            const bf = p;
+            if (this.#valList[index] === p) {
+                // if we allow stale on fetch rejections, then we need to ensure that
+                // the stale value is not removed from the cache when the fetch fails.
+                const del = !noDelete || bf.__staleWhileFetching === undefined;
+                if (del) {
+                    this.#delete(k, 'fetch');
+                }
+                else if (!allowStaleAborted) {
+                    // still replace the *promise* with the stale value,
+                    // since we are done with the promise at this point.
+                    // leave it untouched if we're still waiting for an
+                    // aborted background fetch that hasn't yet returned.
+                    this.#valList[index] = bf.__staleWhileFetching;
+                }
+            }
+            if (allowStale) {
+                if (options.status && bf.__staleWhileFetching !== undefined) {
+                    options.status.returnedStale = true;
+                }
+                return bf.__staleWhileFetching;
+            }
+            else if (bf.__returned === bf) {
+                throw er;
+            }
+        };
+        const pcall = (res, rej) => {
+            const fmp = this.#fetchMethod?.(k, v, fetchOpts);
+            if (fmp && fmp instanceof Promise) {
+                fmp.then(v => res(v === undefined ? undefined : v), rej);
+            }
+            // ignored, we go until we finish, regardless.
+            // defer check until we are actually aborting,
+            // so fetchMethod can override.
+            ac.signal.addEventListener('abort', () => {
+                if (!options.ignoreFetchAbort ||
+                    options.allowStaleOnFetchAbort) {
+                    res(undefined);
+                    // when it eventually resolves, update the cache.
+                    if (options.allowStaleOnFetchAbort) {
+                        res = v => cb(v, true);
+                    }
+                }
+            });
+        };
+        if (options.status)
+            options.status.fetchDispatched = true;
+        const p = new Promise(pcall).then(cb, eb);
+        const bf = Object.assign(p, {
+            __abortController: ac,
+            __staleWhileFetching: v,
+            __returned: undefined,
+        });
+        if (index === undefined) {
+            // internal, don't expose status.
+            this.set(k, bf, { ...fetchOpts.options, status: undefined });
+            index = this.#keyMap.get(k);
+        }
+        else {
+            this.#valList[index] = bf;
+        }
+        return bf;
+    }
+    #isBackgroundFetch(p) {
+        if (!this.#hasFetchMethod)
+            return false;
+        const b = p;
+        return (!!b &&
+            b instanceof Promise &&
+            b.hasOwnProperty('__staleWhileFetching') &&
+            b.__abortController instanceof AC);
+    }
+    async fetch(k, fetchOptions = {}) {
+        const { 
+        // get options
+        allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, 
+        // set options
+        ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, 
+        // fetch exclusive options
+        noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
+        if (!this.#hasFetchMethod) {
+            if (status)
+                status.fetch = 'get';
+            return this.get(k, {
+                allowStale,
+                updateAgeOnGet,
+                noDeleteOnStaleGet,
+                status,
+            });
+        }
+        const options = {
+            allowStale,
+            updateAgeOnGet,
+            noDeleteOnStaleGet,
+            ttl,
+            noDisposeOnSet,
+            size,
+            sizeCalculation,
+            noUpdateTTL,
+            noDeleteOnFetchRejection,
+            allowStaleOnFetchRejection,
+            allowStaleOnFetchAbort,
+            ignoreFetchAbort,
+            status,
+            signal,
+        };
+        let index = this.#keyMap.get(k);
+        if (index === undefined) {
+            if (status)
+                status.fetch = 'miss';
+            const p = this.#backgroundFetch(k, index, options, context);
+            return (p.__returned = p);
+        }
+        else {
+            // in cache, maybe already fetching
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v)) {
+                const stale = allowStale && v.__staleWhileFetching !== undefined;
+                if (status) {
+                    status.fetch = 'inflight';
+                    if (stale)
+                        status.returnedStale = true;
+                }
+                return stale ? v.__staleWhileFetching : (v.__returned = v);
+            }
+            // if we force a refresh, that means do NOT serve the cached value,
+            // unless we are already in the process of refreshing the cache.
+            const isStale = this.#isStale(index);
+            if (!forceRefresh && !isStale) {
+                if (status)
+                    status.fetch = 'hit';
+                this.#moveToTail(index);
+                if (updateAgeOnGet) {
+                    this.#updateItemAge(index);
+                }
+                if (status)
+                    this.#statusTTL(status, index);
+                return v;
+            }
+            // ok, it is stale or a forced refresh, and not already fetching.
+            // refresh the cache.
+            const p = this.#backgroundFetch(k, index, options, context);
+            const hasStale = p.__staleWhileFetching !== undefined;
+            const staleVal = hasStale && allowStale;
+            if (status) {
+                status.fetch = isStale ? 'stale' : 'refresh';
+                if (staleVal && isStale)
+                    status.returnedStale = true;
+            }
+            return staleVal ? p.__staleWhileFetching : (p.__returned = p);
+        }
+    }
+    async forceFetch(k, fetchOptions = {}) {
+        const v = await this.fetch(k, fetchOptions);
+        if (v === undefined)
+            throw new Error('fetch() returned undefined');
+        return v;
+    }
+    memo(k, memoOptions = {}) {
+        const memoMethod = this.#memoMethod;
+        if (!memoMethod) {
+            throw new Error('no memoMethod provided to constructor');
+        }
+        const { context, forceRefresh, ...options } = memoOptions;
+        const v = this.get(k, options);
+        if (!forceRefresh && v !== undefined)
+            return v;
+        const vv = memoMethod(k, v, {
+            options,
+            context,
+        });
+        this.set(k, vv, options);
+        return vv;
+    }
+    /**
+     * Return a value from the cache. Will update the recency of the cache
+     * entry found.
+     *
+     * If the key is not found, get() will return `undefined`.
+     */
+    get(k, getOptions = {}) {
+        const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
+        const index = this.#keyMap.get(k);
+        if (index !== undefined) {
+            const value = this.#valList[index];
+            const fetching = this.#isBackgroundFetch(value);
+            if (status)
+                this.#statusTTL(status, index);
+            if (this.#isStale(index)) {
+                if (status)
+                    status.get = 'stale';
+                // delete only if not an in-flight background fetch
+                if (!fetching) {
+                    if (!noDeleteOnStaleGet) {
+                        this.#delete(k, 'expire');
+                    }
+                    if (status && allowStale)
+                        status.returnedStale = true;
+                    return allowStale ? value : undefined;
+                }
+                else {
+                    if (status &&
+                        allowStale &&
+                        value.__staleWhileFetching !== undefined) {
+                        status.returnedStale = true;
+                    }
+                    return allowStale ? value.__staleWhileFetching : undefined;
+                }
+            }
+            else {
+                if (status)
+                    status.get = 'hit';
+                // if we're currently fetching it, we don't actually have it yet
+                // it's not stale, which means this isn't a staleWhileRefetching.
+                // If it's not stale, and fetching, AND has a __staleWhileFetching
+                // value, then that means the user fetched with {forceRefresh:true},
+                // so it's safe to return that value.
+                if (fetching) {
+                    return value.__staleWhileFetching;
+                }
+                this.#moveToTail(index);
+                if (updateAgeOnGet) {
+                    this.#updateItemAge(index);
+                }
+                return value;
+            }
+        }
+        else if (status) {
+            status.get = 'miss';
+        }
+    }
+    #connect(p, n) {
+        this.#prev[n] = p;
+        this.#next[p] = n;
+    }
+    #moveToTail(index) {
+        // if tail already, nothing to do
+        // if head, move head to next[index]
+        // else
+        //   move next[prev[index]] to next[index] (head has no prev)
+        //   move prev[next[index]] to prev[index]
+        // prev[index] = tail
+        // next[tail] = index
+        // tail = index
+        if (index !== this.#tail) {
+            if (index === this.#head) {
+                this.#head = this.#next[index];
+            }
+            else {
+                this.#connect(this.#prev[index], this.#next[index]);
+            }
+            this.#connect(this.#tail, index);
+            this.#tail = index;
+        }
+    }
+    /**
+     * Deletes a key out of the cache.
+     *
+     * Returns true if the key was deleted, false otherwise.
+     */
+    delete(k) {
+        return this.#delete(k, 'delete');
+    }
+    #delete(k, reason) {
+        let deleted = false;
+        if (this.#size !== 0) {
+            const index = this.#keyMap.get(k);
+            if (index !== undefined) {
+                deleted = true;
+                if (this.#size === 1) {
+                    this.#clear(reason);
+                }
+                else {
+                    this.#removeItemSize(index);
+                    const v = this.#valList[index];
+                    if (this.#isBackgroundFetch(v)) {
+                        v.__abortController.abort(new Error('deleted'));
+                    }
+                    else if (this.#hasDispose || this.#hasDisposeAfter) {
+                        if (this.#hasDispose) {
+                            this.#dispose?.(v, k, reason);
+                        }
+                        if (this.#hasDisposeAfter) {
+                            this.#disposed?.push([v, k, reason]);
+                        }
+                    }
+                    this.#keyMap.delete(k);
+                    this.#keyList[index] = undefined;
+                    this.#valList[index] = undefined;
+                    if (index === this.#tail) {
+                        this.#tail = this.#prev[index];
+                    }
+                    else if (index === this.#head) {
+                        this.#head = this.#next[index];
+                    }
+                    else {
+                        const pi = this.#prev[index];
+                        this.#next[pi] = this.#next[index];
+                        const ni = this.#next[index];
+                        this.#prev[ni] = this.#prev[index];
+                    }
+                    this.#size--;
+                    this.#free.push(index);
+                }
+            }
+        }
+        if (this.#hasDisposeAfter && this.#disposed?.length) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+        return deleted;
+    }
+    /**
+     * Clear the cache entirely, throwing away all values.
+     */
+    clear() {
+        return this.#clear('delete');
+    }
+    #clear(reason) {
+        for (const index of this.#rindexes({ allowStale: true })) {
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v)) {
+                v.__abortController.abort(new Error('deleted'));
+            }
+            else {
+                const k = this.#keyList[index];
+                if (this.#hasDispose) {
+                    this.#dispose?.(v, k, reason);
+                }
+                if (this.#hasDisposeAfter) {
+                    this.#disposed?.push([v, k, reason]);
+                }
+            }
+        }
+        this.#keyMap.clear();
+        this.#valList.fill(undefined);
+        this.#keyList.fill(undefined);
+        if (this.#ttls && this.#starts) {
+            this.#ttls.fill(0);
+            this.#starts.fill(0);
+        }
+        if (this.#sizes) {
+            this.#sizes.fill(0);
+        }
+        this.#head = 0;
+        this.#tail = 0;
+        this.#free.length = 0;
+        this.#calculatedSize = 0;
+        this.#size = 0;
+        if (this.#hasDisposeAfter && this.#disposed) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+    }
+}
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/lru-cache/dist/esm/index.min.js b/node_modules/make-fetch-happen/node_modules/lru-cache/dist/esm/index.min.js
new file mode 100644
index 0000000000000..07dd8fc3c59d8
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/lru-cache/dist/esm/index.min.js
@@ -0,0 +1,2 @@
+var M=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,x=new Set,R=typeof process=="object"&&process?process:{},I=(a,t,e,i)=>{typeof R.emitWarning=="function"?R.emitWarning(a,t,e,i):console.error(`[${e}] ${t}: ${a}`)},C=globalThis.AbortController,D=globalThis.AbortSignal;if(typeof C>"u"){D=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},C=class{constructor(){t()}signal=new D;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let a=R.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{a&&(a=!1,I("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var G=a=>!x.has(a),H=Symbol("type"),y=a=>a&&a===Math.floor(a)&&a>0&&isFinite(a),U=a=>y(a)?a<=Math.pow(2,8)?Uint8Array:a<=Math.pow(2,16)?Uint16Array:a<=Math.pow(2,32)?Uint32Array:a<=Number.MAX_SAFE_INTEGER?O:null:null,O=class extends Array{constructor(t){super(t),this.fill(0)}},W=class a{heap;length;static#l=!1;static create(t){let e=U(t);if(!e)return[];a.#l=!0;let i=new a(t,e);return a.#l=!1,i}constructor(t,e){if(!a.#l)throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},L=class a{#l;#c;#p;#v;#w;#D;#L;#S;get perf(){return this.#S}ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#_;#s;#i;#t;#a;#u;#o;#h;#m;#r;#b;#y;#d;#A;#z;#f;#x;static unsafeExposeInternals(t){return{starts:t.#y,ttls:t.#d,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#a,prev:t.#u,get head(){return t.#o},get tail(){return t.#h},free:t.#m,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#M(e,i,s,n),moveToTail:e=>t.#W(e),indexes:e=>t.#F(e),rindexes:e=>t.#T(e),isStale:e=>t.#g(e)}}get max(){return this.#l}get maxSize(){return this.#c}get calculatedSize(){return this.#_}get size(){return this.#n}get fetchMethod(){return this.#D}get memoMethod(){return this.#L}get dispose(){return this.#p}get onInsert(){return this.#v}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,onInsert:_,disposeAfter:f,noDisposeOnSet:c,noUpdateTTL:u,maxSize:A=0,maxEntrySize:d=0,sizeCalculation:m,fetchMethod:l,memoMethod:w,noDeleteOnFetchRejection:b,noDeleteOnStaleGet:p,allowStaleOnFetchRejection:S,allowStaleOnFetchAbort:z,ignoreFetchAbort:F,perf:v}=t;if(v!==void 0&&typeof v?.now!="function")throw new TypeError("perf option must have a now() method if specified");if(this.#S=v??M,e!==0&&!y(e))throw new TypeError("max option must be a nonnegative integer");let T=e?U(e):Array;if(!T)throw new Error("invalid max value: "+e);if(this.#l=e,this.#c=A,this.maxEntrySize=d||this.#c,this.sizeCalculation=m,this.sizeCalculation){if(!this.#c&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#L=w,l!==void 0&&typeof l!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#D=l,this.#z=!!l,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#a=new T(e),this.#u=new T(e),this.#o=0,this.#h=0,this.#m=W.create(e),this.#n=0,this.#_=0,typeof g=="function"&&(this.#p=g),typeof _=="function"&&(this.#v=_),typeof f=="function"?(this.#w=f,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#A=!!this.#p,this.#x=!!this.#v,this.#f=!!this.#w,this.noDisposeOnSet=!!c,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!b,this.allowStaleOnFetchRejection=!!S,this.allowStaleOnFetchAbort=!!z,this.ignoreFetchAbort=!!F,this.maxEntrySize!==0){if(this.#c!==0&&!y(this.#c))throw new TypeError("maxSize must be a positive integer if specified");if(!y(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#V()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!p,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=y(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!y(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#G()}if(this.#l===0&&this.ttl===0&&this.#c===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#l&&!this.#c){let E="LRU_CACHE_UNBOUNDED";G(E)&&(x.add(E),I("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",E,a))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#G(){let t=new O(this.#l),e=new O(this.#l);this.#d=t,this.#y=e,this.#j=(n,h,o=this.#S.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#g(n)&&this.#E(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#C=n=>{e[n]=t[n]!==0?this.#S.now():0},this.#O=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=this.#S.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#g=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#C=()=>{};#O=()=>{};#j=()=>{};#g=()=>!1;#V(){let t=new O(this.#l);this.#_=0,this.#b=t,this.#R=e=>{this.#_-=t[e],t[e]=0},this.#N=(e,i,s,n)=>{if(this.#e(i))return 0;if(!y(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!y(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#I=(e,i,s)=>{if(t[e]=i,this.#c){let n=this.#c-t[e];for(;this.#_>n;)this.#U(!0)}this.#_+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#_)}}#R=t=>{};#I=(t,e,i)=>{};#N=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#o));)e=this.#u[e]}*#T({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#h));)e=this.#a[e]}#P(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#T())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#T()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#T())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#T()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#T({allowStale:!0}))this.#g(e)&&(this.#E(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#d&&this.#y){let h=this.#d[e],o=this.#y[e];if(h&&o){let r=h-(this.#S.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#F({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#d&&this.#y){h.ttl=this.#d[e];let o=this.#S.now()-this.#y[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=this.#S.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,_=this.#N(t,e,i.size||0,o);if(this.maxEntrySize&&_>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#E(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#m.length!==0?this.#m.pop():this.#n===this.#l?this.#U(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#a[this.#h]=f,this.#u[f]=this.#h,this.#h=f,this.#n++,this.#I(f,_,r),r&&(r.set="add"),g=!1,this.#x&&this.#v?.(e,t,"add");else{this.#W(f);let c=this.#t[f];if(e!==c){if(this.#z&&this.#e(c)){c.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:u}=c;u!==void 0&&!h&&(this.#A&&this.#p?.(u,t,"set"),this.#f&&this.#r?.push([u,t,"set"]))}else h||(this.#A&&this.#p?.(c,t,"set"),this.#f&&this.#r?.push([c,t,"set"]));if(this.#R(f),this.#I(f,_,r),this.#t[f]=e,r){r.set="replace";let u=c&&this.#e(c)?c.__staleWhileFetching:c;u!==void 0&&(r.oldValue=u)}}else r&&(r.set="update");this.#x&&this.onInsert?.(e,t,e===c?"update":"replace")}if(s!==0&&!this.#d&&this.#G(),this.#d&&(g||this.#j(f,s,n),r&&this.#O(r,f)),!h&&this.#f&&this.#r){let c=this.#r,u;for(;u=c?.shift();)this.#w?.(...u)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#U(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#f&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#U(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#z&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(s,i,"evict"),this.#f&&this.#r?.push([s,i,"evict"])),this.#R(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#m.push(e)),this.#n===1?(this.#o=this.#h=0,this.#m.length=0):this.#o=this.#a[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#g(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#C(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#g(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#M(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new C,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,m=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!m?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!m)return f(h.signal.reason);let b=u;return this.#t[e]===u&&(d===void 0?b.__staleWhileFetching!==void 0?this.#t[e]=b.__staleWhileFetching:this.#E(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},_=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:m}=h.signal,l=m&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=u;if(this.#t[e]===u&&(!b||p.__staleWhileFetching===void 0?this.#E(t,"fetch"):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},c=(d,m)=>{let l=this.#D?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),m),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let u=new Promise(c).then(g,_),A=Object.assign(u,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,A,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=A,A}#e(t){if(!this.#z)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof C}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:_=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:c=this.allowStaleOnFetchRejection,ignoreFetchAbort:u=this.ignoreFetchAbort,allowStaleOnFetchAbort:A=this.allowStaleOnFetchAbort,context:d,forceRefresh:m=!1,status:l,signal:w}=e;if(!this.#z)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:_,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:c,allowStaleOnFetchAbort:A,ignoreFetchAbort:u,status:l,signal:w},p=this.#s.get(t);if(p===void 0){l&&(l.fetch="miss");let S=this.#M(t,p,b,d);return S.__returned=S}else{let S=this.#t[p];if(this.#e(S)){let E=i&&S.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",E&&(l.returnedStale=!0)),E?S.__staleWhileFetching:S.__returned=S}let z=this.#g(p);if(!m&&!z)return l&&(l.fetch="hit"),this.#W(p),s&&this.#C(p),l&&this.#O(l,p),S;let F=this.#M(t,p,b,d),T=F.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=z?"stale":"refresh",T&&z&&(l.returnedStale=!0)),T?F.__staleWhileFetching:F.__returned=F}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#L;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#O(h,o),this.#g(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#E(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#W(o),s&&this.#C(o),r))}else h&&(h.get="miss")}#H(t,e){this.#u[e]=t,this.#a[t]=e}#W(t){t!==this.#h&&(t===this.#o?this.#o=this.#a[t]:this.#H(this.#u[t],this.#a[t]),this.#H(this.#h,t),this.#h=t)}delete(t){return this.#E(t,"delete")}#E(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#k(e);else{this.#R(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(n,t,e),this.#f&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#u[s];else if(s===this.#o)this.#o=this.#a[s];else{let h=this.#u[s];this.#a[h]=this.#a[s];let o=this.#a[s];this.#u[o]=this.#u[s]}this.#n--,this.#m.push(s)}}if(this.#f&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#k("delete")}#k(t){for(let e of this.#T({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#A&&this.#p?.(i,s,t),this.#f&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#d&&this.#y&&(this.#d.fill(0),this.#y.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#m.length=0,this.#_=0,this.#n=0,this.#f&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};export{L as LRUCache};
+//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/make-fetch-happen/node_modules/chownr/dist/esm/package.json b/node_modules/make-fetch-happen/node_modules/lru-cache/dist/esm/package.json
similarity index 100%
rename from node_modules/make-fetch-happen/node_modules/chownr/dist/esm/package.json
rename to node_modules/make-fetch-happen/node_modules/lru-cache/dist/esm/package.json
diff --git a/node_modules/make-fetch-happen/node_modules/lru-cache/package.json b/node_modules/make-fetch-happen/node_modules/lru-cache/package.json
new file mode 100644
index 0000000000000..4953bdf4a7a35
--- /dev/null
+++ b/node_modules/make-fetch-happen/node_modules/lru-cache/package.json
@@ -0,0 +1,113 @@
+{
+  "name": "lru-cache",
+  "description": "A cache object that deletes the least-recently-used items.",
+  "version": "11.2.1",
+  "author": "Isaac Z. Schlueter ",
+  "keywords": [
+    "mru",
+    "lru",
+    "cache"
+  ],
+  "sideEffects": false,
+  "scripts": {
+    "build": "npm run prepare",
+    "prepare": "tshy && bash fixup.sh",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "test": "tap",
+    "snap": "tap",
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "format": "prettier --write .",
+    "typedoc": "typedoc --tsconfig ./.tshy/esm.json ./src/*.ts",
+    "benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh",
+    "prebenchmark": "npm run prepare",
+    "benchmark": "make -C benchmark",
+    "preprofile": "npm run prepare",
+    "profile": "make -C benchmark profile"
+  },
+  "main": "./dist/commonjs/index.js",
+  "types": "./dist/commonjs/index.d.ts",
+  "tshy": {
+    "exports": {
+      ".": "./src/index.ts",
+      "./min": {
+        "import": {
+          "types": "./dist/esm/index.d.ts",
+          "default": "./dist/esm/index.min.js"
+        },
+        "require": {
+          "types": "./dist/commonjs/index.d.ts",
+          "default": "./dist/commonjs/index.min.js"
+        }
+      }
+    }
+  },
+  "repository": {
+    "type": "git",
+    "url": "git://github.com/isaacs/node-lru-cache.git"
+  },
+  "devDependencies": {
+    "@types/node": "^24.3.0",
+    "benchmark": "^2.1.4",
+    "esbuild": "^0.25.9",
+    "marked": "^4.2.12",
+    "mkdirp": "^3.0.1",
+    "prettier": "^3.6.2",
+    "tap": "^21.1.0",
+    "tshy": "^3.0.2",
+    "typedoc": "^0.28.12"
+  },
+  "license": "ISC",
+  "files": [
+    "dist"
+  ],
+  "engines": {
+    "node": "20 || >=22"
+  },
+  "prettier": {
+    "experimentalTernaries": true,
+    "semi": false,
+    "printWidth": 70,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "tap": {
+    "node-arg": [
+      "--expose-gc"
+    ],
+    "plugin": [
+      "@tapjs/clock"
+    ]
+  },
+  "exports": {
+    ".": {
+      "import": {
+        "types": "./dist/esm/index.d.ts",
+        "default": "./dist/esm/index.js"
+      },
+      "require": {
+        "types": "./dist/commonjs/index.d.ts",
+        "default": "./dist/commonjs/index.js"
+      }
+    },
+    "./min": {
+      "import": {
+        "types": "./dist/esm/index.d.ts",
+        "default": "./dist/esm/index.min.js"
+      },
+      "require": {
+        "types": "./dist/commonjs/index.d.ts",
+        "default": "./dist/commonjs/index.min.js"
+      }
+    }
+  },
+  "type": "module",
+  "module": "./dist/esm/index.js"
+}
diff --git a/node_modules/make-fetch-happen/node_modules/minizlib/LICENSE b/node_modules/make-fetch-happen/node_modules/minizlib/LICENSE
deleted file mode 100644
index 49f7efe431c9e..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/minizlib/LICENSE
+++ /dev/null
@@ -1,26 +0,0 @@
-Minizlib was created by Isaac Z. Schlueter.
-It is a derivative work of the Node.js project.
-
-"""
-Copyright (c) 2017-2023 Isaac Z. Schlueter and Contributors
-Copyright (c) 2017-2023 Node.js contributors. All rights reserved.
-Copyright (c) 2017-2023 Joyent, Inc. and other Node contributors. All rights reserved.
-
-Permission is hereby granted, free of charge, to any person obtaining a
-copy of this software and associated documentation files (the "Software"),
-to deal in the Software without restriction, including without limitation
-the rights to use, copy, modify, merge, publish, distribute, sublicense,
-and/or sell copies of the Software, and to permit persons to whom the
-Software is furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-"""
diff --git a/node_modules/make-fetch-happen/node_modules/minizlib/dist/commonjs/constants.js b/node_modules/make-fetch-happen/node_modules/minizlib/dist/commonjs/constants.js
deleted file mode 100644
index dfc2c1957bfc9..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/minizlib/dist/commonjs/constants.js
+++ /dev/null
@@ -1,123 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.constants = void 0;
-// Update with any zlib constants that are added or changed in the future.
-// Node v6 didn't export this, so we just hard code the version and rely
-// on all the other hard-coded values from zlib v4736.  When node v6
-// support drops, we can just export the realZlibConstants object.
-const zlib_1 = __importDefault(require("zlib"));
-/* c8 ignore start */
-const realZlibConstants = zlib_1.default.constants || { ZLIB_VERNUM: 4736 };
-/* c8 ignore stop */
-exports.constants = Object.freeze(Object.assign(Object.create(null), {
-    Z_NO_FLUSH: 0,
-    Z_PARTIAL_FLUSH: 1,
-    Z_SYNC_FLUSH: 2,
-    Z_FULL_FLUSH: 3,
-    Z_FINISH: 4,
-    Z_BLOCK: 5,
-    Z_OK: 0,
-    Z_STREAM_END: 1,
-    Z_NEED_DICT: 2,
-    Z_ERRNO: -1,
-    Z_STREAM_ERROR: -2,
-    Z_DATA_ERROR: -3,
-    Z_MEM_ERROR: -4,
-    Z_BUF_ERROR: -5,
-    Z_VERSION_ERROR: -6,
-    Z_NO_COMPRESSION: 0,
-    Z_BEST_SPEED: 1,
-    Z_BEST_COMPRESSION: 9,
-    Z_DEFAULT_COMPRESSION: -1,
-    Z_FILTERED: 1,
-    Z_HUFFMAN_ONLY: 2,
-    Z_RLE: 3,
-    Z_FIXED: 4,
-    Z_DEFAULT_STRATEGY: 0,
-    DEFLATE: 1,
-    INFLATE: 2,
-    GZIP: 3,
-    GUNZIP: 4,
-    DEFLATERAW: 5,
-    INFLATERAW: 6,
-    UNZIP: 7,
-    BROTLI_DECODE: 8,
-    BROTLI_ENCODE: 9,
-    Z_MIN_WINDOWBITS: 8,
-    Z_MAX_WINDOWBITS: 15,
-    Z_DEFAULT_WINDOWBITS: 15,
-    Z_MIN_CHUNK: 64,
-    Z_MAX_CHUNK: Infinity,
-    Z_DEFAULT_CHUNK: 16384,
-    Z_MIN_MEMLEVEL: 1,
-    Z_MAX_MEMLEVEL: 9,
-    Z_DEFAULT_MEMLEVEL: 8,
-    Z_MIN_LEVEL: -1,
-    Z_MAX_LEVEL: 9,
-    Z_DEFAULT_LEVEL: -1,
-    BROTLI_OPERATION_PROCESS: 0,
-    BROTLI_OPERATION_FLUSH: 1,
-    BROTLI_OPERATION_FINISH: 2,
-    BROTLI_OPERATION_EMIT_METADATA: 3,
-    BROTLI_MODE_GENERIC: 0,
-    BROTLI_MODE_TEXT: 1,
-    BROTLI_MODE_FONT: 2,
-    BROTLI_DEFAULT_MODE: 0,
-    BROTLI_MIN_QUALITY: 0,
-    BROTLI_MAX_QUALITY: 11,
-    BROTLI_DEFAULT_QUALITY: 11,
-    BROTLI_MIN_WINDOW_BITS: 10,
-    BROTLI_MAX_WINDOW_BITS: 24,
-    BROTLI_LARGE_MAX_WINDOW_BITS: 30,
-    BROTLI_DEFAULT_WINDOW: 22,
-    BROTLI_MIN_INPUT_BLOCK_BITS: 16,
-    BROTLI_MAX_INPUT_BLOCK_BITS: 24,
-    BROTLI_PARAM_MODE: 0,
-    BROTLI_PARAM_QUALITY: 1,
-    BROTLI_PARAM_LGWIN: 2,
-    BROTLI_PARAM_LGBLOCK: 3,
-    BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4,
-    BROTLI_PARAM_SIZE_HINT: 5,
-    BROTLI_PARAM_LARGE_WINDOW: 6,
-    BROTLI_PARAM_NPOSTFIX: 7,
-    BROTLI_PARAM_NDIRECT: 8,
-    BROTLI_DECODER_RESULT_ERROR: 0,
-    BROTLI_DECODER_RESULT_SUCCESS: 1,
-    BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2,
-    BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3,
-    BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0,
-    BROTLI_DECODER_PARAM_LARGE_WINDOW: 1,
-    BROTLI_DECODER_NO_ERROR: 0,
-    BROTLI_DECODER_SUCCESS: 1,
-    BROTLI_DECODER_NEEDS_MORE_INPUT: 2,
-    BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3,
-    BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1,
-    BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2,
-    BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3,
-    BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4,
-    BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5,
-    BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6,
-    BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7,
-    BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8,
-    BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9,
-    BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10,
-    BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11,
-    BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12,
-    BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13,
-    BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14,
-    BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15,
-    BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16,
-    BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19,
-    BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20,
-    BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21,
-    BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22,
-    BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25,
-    BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26,
-    BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27,
-    BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30,
-    BROTLI_DECODER_ERROR_UNREACHABLE: -31,
-}, realZlibConstants));
-//# sourceMappingURL=constants.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/minizlib/dist/commonjs/index.js b/node_modules/make-fetch-happen/node_modules/minizlib/dist/commonjs/index.js
deleted file mode 100644
index b4906d2783372..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/minizlib/dist/commonjs/index.js
+++ /dev/null
@@ -1,392 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || (function () {
-    var ownKeys = function(o) {
-        ownKeys = Object.getOwnPropertyNames || function (o) {
-            var ar = [];
-            for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
-            return ar;
-        };
-        return ownKeys(o);
-    };
-    return function (mod) {
-        if (mod && mod.__esModule) return mod;
-        var result = {};
-        if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
-        __setModuleDefault(result, mod);
-        return result;
-    };
-})();
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.BrotliDecompress = exports.BrotliCompress = exports.Brotli = exports.Unzip = exports.InflateRaw = exports.DeflateRaw = exports.Gunzip = exports.Gzip = exports.Inflate = exports.Deflate = exports.Zlib = exports.ZlibError = exports.constants = void 0;
-const assert_1 = __importDefault(require("assert"));
-const buffer_1 = require("buffer");
-const minipass_1 = require("minipass");
-const realZlib = __importStar(require("zlib"));
-const constants_js_1 = require("./constants.js");
-var constants_js_2 = require("./constants.js");
-Object.defineProperty(exports, "constants", { enumerable: true, get: function () { return constants_js_2.constants; } });
-const OriginalBufferConcat = buffer_1.Buffer.concat;
-const desc = Object.getOwnPropertyDescriptor(buffer_1.Buffer, 'concat');
-const noop = (args) => args;
-const passthroughBufferConcat = desc?.writable === true || desc?.set !== undefined
-    ? (makeNoOp) => {
-        buffer_1.Buffer.concat = makeNoOp ? noop : OriginalBufferConcat;
-    }
-    : (_) => { };
-const _superWrite = Symbol('_superWrite');
-class ZlibError extends Error {
-    code;
-    errno;
-    constructor(err) {
-        super('zlib: ' + err.message);
-        this.code = err.code;
-        this.errno = err.errno;
-        /* c8 ignore next */
-        if (!this.code)
-            this.code = 'ZLIB_ERROR';
-        this.message = 'zlib: ' + err.message;
-        Error.captureStackTrace(this, this.constructor);
-    }
-    get name() {
-        return 'ZlibError';
-    }
-}
-exports.ZlibError = ZlibError;
-// the Zlib class they all inherit from
-// This thing manages the queue of requests, and returns
-// true or false if there is anything in the queue when
-// you call the .write() method.
-const _flushFlag = Symbol('flushFlag');
-class ZlibBase extends minipass_1.Minipass {
-    #sawError = false;
-    #ended = false;
-    #flushFlag;
-    #finishFlushFlag;
-    #fullFlushFlag;
-    #handle;
-    #onError;
-    get sawError() {
-        return this.#sawError;
-    }
-    get handle() {
-        return this.#handle;
-    }
-    /* c8 ignore start */
-    get flushFlag() {
-        return this.#flushFlag;
-    }
-    /* c8 ignore stop */
-    constructor(opts, mode) {
-        if (!opts || typeof opts !== 'object')
-            throw new TypeError('invalid options for ZlibBase constructor');
-        //@ts-ignore
-        super(opts);
-        /* c8 ignore start */
-        this.#flushFlag = opts.flush ?? 0;
-        this.#finishFlushFlag = opts.finishFlush ?? 0;
-        this.#fullFlushFlag = opts.fullFlushFlag ?? 0;
-        /* c8 ignore stop */
-        // this will throw if any options are invalid for the class selected
-        try {
-            // @types/node doesn't know that it exports the classes, but they're there
-            //@ts-ignore
-            this.#handle = new realZlib[mode](opts);
-        }
-        catch (er) {
-            // make sure that all errors get decorated properly
-            throw new ZlibError(er);
-        }
-        this.#onError = err => {
-            // no sense raising multiple errors, since we abort on the first one.
-            if (this.#sawError)
-                return;
-            this.#sawError = true;
-            // there is no way to cleanly recover.
-            // continuing only obscures problems.
-            this.close();
-            this.emit('error', err);
-        };
-        this.#handle?.on('error', er => this.#onError(new ZlibError(er)));
-        this.once('end', () => this.close);
-    }
-    close() {
-        if (this.#handle) {
-            this.#handle.close();
-            this.#handle = undefined;
-            this.emit('close');
-        }
-    }
-    reset() {
-        if (!this.#sawError) {
-            (0, assert_1.default)(this.#handle, 'zlib binding closed');
-            //@ts-ignore
-            return this.#handle.reset?.();
-        }
-    }
-    flush(flushFlag) {
-        if (this.ended)
-            return;
-        if (typeof flushFlag !== 'number')
-            flushFlag = this.#fullFlushFlag;
-        this.write(Object.assign(buffer_1.Buffer.alloc(0), { [_flushFlag]: flushFlag }));
-    }
-    end(chunk, encoding, cb) {
-        /* c8 ignore start */
-        if (typeof chunk === 'function') {
-            cb = chunk;
-            encoding = undefined;
-            chunk = undefined;
-        }
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        /* c8 ignore stop */
-        if (chunk) {
-            if (encoding)
-                this.write(chunk, encoding);
-            else
-                this.write(chunk);
-        }
-        this.flush(this.#finishFlushFlag);
-        this.#ended = true;
-        return super.end(cb);
-    }
-    get ended() {
-        return this.#ended;
-    }
-    // overridden in the gzip classes to do portable writes
-    [_superWrite](data) {
-        return super.write(data);
-    }
-    write(chunk, encoding, cb) {
-        // process the chunk using the sync process
-        // then super.write() all the outputted chunks
-        if (typeof encoding === 'function')
-            (cb = encoding), (encoding = 'utf8');
-        if (typeof chunk === 'string')
-            chunk = buffer_1.Buffer.from(chunk, encoding);
-        if (this.#sawError)
-            return;
-        (0, assert_1.default)(this.#handle, 'zlib binding closed');
-        // _processChunk tries to .close() the native handle after it's done, so we
-        // intercept that by temporarily making it a no-op.
-        // diving into the node:zlib internals a bit here
-        const nativeHandle = this.#handle
-            ._handle;
-        const originalNativeClose = nativeHandle.close;
-        nativeHandle.close = () => { };
-        const originalClose = this.#handle.close;
-        this.#handle.close = () => { };
-        // It also calls `Buffer.concat()` at the end, which may be convenient
-        // for some, but which we are not interested in as it slows us down.
-        passthroughBufferConcat(true);
-        let result = undefined;
-        try {
-            const flushFlag = typeof chunk[_flushFlag] === 'number'
-                ? chunk[_flushFlag]
-                : this.#flushFlag;
-            result = this.#handle._processChunk(chunk, flushFlag);
-            // if we don't throw, reset it back how it was
-            passthroughBufferConcat(false);
-        }
-        catch (err) {
-            // or if we do, put Buffer.concat() back before we emit error
-            // Error events call into user code, which may call Buffer.concat()
-            passthroughBufferConcat(false);
-            this.#onError(new ZlibError(err));
-        }
-        finally {
-            if (this.#handle) {
-                // Core zlib resets `_handle` to null after attempting to close the
-                // native handle. Our no-op handler prevented actual closure, but we
-                // need to restore the `._handle` property.
-                ;
-                this.#handle._handle =
-                    nativeHandle;
-                nativeHandle.close = originalNativeClose;
-                this.#handle.close = originalClose;
-                // `_processChunk()` adds an 'error' listener. If we don't remove it
-                // after each call, these handlers start piling up.
-                this.#handle.removeAllListeners('error');
-                // make sure OUR error listener is still attached tho
-            }
-        }
-        if (this.#handle)
-            this.#handle.on('error', er => this.#onError(new ZlibError(er)));
-        let writeReturn;
-        if (result) {
-            if (Array.isArray(result) && result.length > 0) {
-                const r = result[0];
-                // The first buffer is always `handle._outBuffer`, which would be
-                // re-used for later invocations; so, we always have to copy that one.
-                writeReturn = this[_superWrite](buffer_1.Buffer.from(r));
-                for (let i = 1; i < result.length; i++) {
-                    writeReturn = this[_superWrite](result[i]);
-                }
-            }
-            else {
-                // either a single Buffer or an empty array
-                writeReturn = this[_superWrite](buffer_1.Buffer.from(result));
-            }
-        }
-        if (cb)
-            cb();
-        return writeReturn;
-    }
-}
-class Zlib extends ZlibBase {
-    #level;
-    #strategy;
-    constructor(opts, mode) {
-        opts = opts || {};
-        opts.flush = opts.flush || constants_js_1.constants.Z_NO_FLUSH;
-        opts.finishFlush = opts.finishFlush || constants_js_1.constants.Z_FINISH;
-        opts.fullFlushFlag = constants_js_1.constants.Z_FULL_FLUSH;
-        super(opts, mode);
-        this.#level = opts.level;
-        this.#strategy = opts.strategy;
-    }
-    params(level, strategy) {
-        if (this.sawError)
-            return;
-        if (!this.handle)
-            throw new Error('cannot switch params when binding is closed');
-        // no way to test this without also not supporting params at all
-        /* c8 ignore start */
-        if (!this.handle.params)
-            throw new Error('not supported in this implementation');
-        /* c8 ignore stop */
-        if (this.#level !== level || this.#strategy !== strategy) {
-            this.flush(constants_js_1.constants.Z_SYNC_FLUSH);
-            (0, assert_1.default)(this.handle, 'zlib binding closed');
-            // .params() calls .flush(), but the latter is always async in the
-            // core zlib. We override .flush() temporarily to intercept that and
-            // flush synchronously.
-            const origFlush = this.handle.flush;
-            this.handle.flush = (flushFlag, cb) => {
-                /* c8 ignore start */
-                if (typeof flushFlag === 'function') {
-                    cb = flushFlag;
-                    flushFlag = this.flushFlag;
-                }
-                /* c8 ignore stop */
-                this.flush(flushFlag);
-                cb?.();
-            };
-            try {
-                ;
-                this.handle.params(level, strategy);
-            }
-            finally {
-                this.handle.flush = origFlush;
-            }
-            /* c8 ignore start */
-            if (this.handle) {
-                this.#level = level;
-                this.#strategy = strategy;
-            }
-            /* c8 ignore stop */
-        }
-    }
-}
-exports.Zlib = Zlib;
-// minimal 2-byte header
-class Deflate extends Zlib {
-    constructor(opts) {
-        super(opts, 'Deflate');
-    }
-}
-exports.Deflate = Deflate;
-class Inflate extends Zlib {
-    constructor(opts) {
-        super(opts, 'Inflate');
-    }
-}
-exports.Inflate = Inflate;
-class Gzip extends Zlib {
-    #portable;
-    constructor(opts) {
-        super(opts, 'Gzip');
-        this.#portable = opts && !!opts.portable;
-    }
-    [_superWrite](data) {
-        if (!this.#portable)
-            return super[_superWrite](data);
-        // we'll always get the header emitted in one first chunk
-        // overwrite the OS indicator byte with 0xFF
-        this.#portable = false;
-        data[9] = 255;
-        return super[_superWrite](data);
-    }
-}
-exports.Gzip = Gzip;
-class Gunzip extends Zlib {
-    constructor(opts) {
-        super(opts, 'Gunzip');
-    }
-}
-exports.Gunzip = Gunzip;
-// raw - no header
-class DeflateRaw extends Zlib {
-    constructor(opts) {
-        super(opts, 'DeflateRaw');
-    }
-}
-exports.DeflateRaw = DeflateRaw;
-class InflateRaw extends Zlib {
-    constructor(opts) {
-        super(opts, 'InflateRaw');
-    }
-}
-exports.InflateRaw = InflateRaw;
-// auto-detect header.
-class Unzip extends Zlib {
-    constructor(opts) {
-        super(opts, 'Unzip');
-    }
-}
-exports.Unzip = Unzip;
-class Brotli extends ZlibBase {
-    constructor(opts, mode) {
-        opts = opts || {};
-        opts.flush = opts.flush || constants_js_1.constants.BROTLI_OPERATION_PROCESS;
-        opts.finishFlush =
-            opts.finishFlush || constants_js_1.constants.BROTLI_OPERATION_FINISH;
-        opts.fullFlushFlag = constants_js_1.constants.BROTLI_OPERATION_FLUSH;
-        super(opts, mode);
-    }
-}
-exports.Brotli = Brotli;
-class BrotliCompress extends Brotli {
-    constructor(opts) {
-        super(opts, 'BrotliCompress');
-    }
-}
-exports.BrotliCompress = BrotliCompress;
-class BrotliDecompress extends Brotli {
-    constructor(opts) {
-        super(opts, 'BrotliDecompress');
-    }
-}
-exports.BrotliDecompress = BrotliDecompress;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/minizlib/dist/commonjs/package.json b/node_modules/make-fetch-happen/node_modules/minizlib/dist/commonjs/package.json
deleted file mode 100644
index 5bbefffbabee3..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/minizlib/dist/commonjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "commonjs"
-}
diff --git a/node_modules/make-fetch-happen/node_modules/minizlib/dist/esm/constants.js b/node_modules/make-fetch-happen/node_modules/minizlib/dist/esm/constants.js
deleted file mode 100644
index 7faf40be5068d..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/minizlib/dist/esm/constants.js
+++ /dev/null
@@ -1,117 +0,0 @@
-// Update with any zlib constants that are added or changed in the future.
-// Node v6 didn't export this, so we just hard code the version and rely
-// on all the other hard-coded values from zlib v4736.  When node v6
-// support drops, we can just export the realZlibConstants object.
-import realZlib from 'zlib';
-/* c8 ignore start */
-const realZlibConstants = realZlib.constants || { ZLIB_VERNUM: 4736 };
-/* c8 ignore stop */
-export const constants = Object.freeze(Object.assign(Object.create(null), {
-    Z_NO_FLUSH: 0,
-    Z_PARTIAL_FLUSH: 1,
-    Z_SYNC_FLUSH: 2,
-    Z_FULL_FLUSH: 3,
-    Z_FINISH: 4,
-    Z_BLOCK: 5,
-    Z_OK: 0,
-    Z_STREAM_END: 1,
-    Z_NEED_DICT: 2,
-    Z_ERRNO: -1,
-    Z_STREAM_ERROR: -2,
-    Z_DATA_ERROR: -3,
-    Z_MEM_ERROR: -4,
-    Z_BUF_ERROR: -5,
-    Z_VERSION_ERROR: -6,
-    Z_NO_COMPRESSION: 0,
-    Z_BEST_SPEED: 1,
-    Z_BEST_COMPRESSION: 9,
-    Z_DEFAULT_COMPRESSION: -1,
-    Z_FILTERED: 1,
-    Z_HUFFMAN_ONLY: 2,
-    Z_RLE: 3,
-    Z_FIXED: 4,
-    Z_DEFAULT_STRATEGY: 0,
-    DEFLATE: 1,
-    INFLATE: 2,
-    GZIP: 3,
-    GUNZIP: 4,
-    DEFLATERAW: 5,
-    INFLATERAW: 6,
-    UNZIP: 7,
-    BROTLI_DECODE: 8,
-    BROTLI_ENCODE: 9,
-    Z_MIN_WINDOWBITS: 8,
-    Z_MAX_WINDOWBITS: 15,
-    Z_DEFAULT_WINDOWBITS: 15,
-    Z_MIN_CHUNK: 64,
-    Z_MAX_CHUNK: Infinity,
-    Z_DEFAULT_CHUNK: 16384,
-    Z_MIN_MEMLEVEL: 1,
-    Z_MAX_MEMLEVEL: 9,
-    Z_DEFAULT_MEMLEVEL: 8,
-    Z_MIN_LEVEL: -1,
-    Z_MAX_LEVEL: 9,
-    Z_DEFAULT_LEVEL: -1,
-    BROTLI_OPERATION_PROCESS: 0,
-    BROTLI_OPERATION_FLUSH: 1,
-    BROTLI_OPERATION_FINISH: 2,
-    BROTLI_OPERATION_EMIT_METADATA: 3,
-    BROTLI_MODE_GENERIC: 0,
-    BROTLI_MODE_TEXT: 1,
-    BROTLI_MODE_FONT: 2,
-    BROTLI_DEFAULT_MODE: 0,
-    BROTLI_MIN_QUALITY: 0,
-    BROTLI_MAX_QUALITY: 11,
-    BROTLI_DEFAULT_QUALITY: 11,
-    BROTLI_MIN_WINDOW_BITS: 10,
-    BROTLI_MAX_WINDOW_BITS: 24,
-    BROTLI_LARGE_MAX_WINDOW_BITS: 30,
-    BROTLI_DEFAULT_WINDOW: 22,
-    BROTLI_MIN_INPUT_BLOCK_BITS: 16,
-    BROTLI_MAX_INPUT_BLOCK_BITS: 24,
-    BROTLI_PARAM_MODE: 0,
-    BROTLI_PARAM_QUALITY: 1,
-    BROTLI_PARAM_LGWIN: 2,
-    BROTLI_PARAM_LGBLOCK: 3,
-    BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4,
-    BROTLI_PARAM_SIZE_HINT: 5,
-    BROTLI_PARAM_LARGE_WINDOW: 6,
-    BROTLI_PARAM_NPOSTFIX: 7,
-    BROTLI_PARAM_NDIRECT: 8,
-    BROTLI_DECODER_RESULT_ERROR: 0,
-    BROTLI_DECODER_RESULT_SUCCESS: 1,
-    BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2,
-    BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3,
-    BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0,
-    BROTLI_DECODER_PARAM_LARGE_WINDOW: 1,
-    BROTLI_DECODER_NO_ERROR: 0,
-    BROTLI_DECODER_SUCCESS: 1,
-    BROTLI_DECODER_NEEDS_MORE_INPUT: 2,
-    BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3,
-    BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1,
-    BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2,
-    BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3,
-    BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4,
-    BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5,
-    BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6,
-    BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7,
-    BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8,
-    BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9,
-    BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10,
-    BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11,
-    BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12,
-    BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13,
-    BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14,
-    BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15,
-    BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16,
-    BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19,
-    BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20,
-    BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21,
-    BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22,
-    BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25,
-    BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26,
-    BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27,
-    BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30,
-    BROTLI_DECODER_ERROR_UNREACHABLE: -31,
-}, realZlibConstants));
-//# sourceMappingURL=constants.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/minizlib/dist/esm/index.js b/node_modules/make-fetch-happen/node_modules/minizlib/dist/esm/index.js
deleted file mode 100644
index f33586a8ab0ec..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/minizlib/dist/esm/index.js
+++ /dev/null
@@ -1,340 +0,0 @@
-import assert from 'assert';
-import { Buffer } from 'buffer';
-import { Minipass } from 'minipass';
-import * as realZlib from 'zlib';
-import { constants } from './constants.js';
-export { constants } from './constants.js';
-const OriginalBufferConcat = Buffer.concat;
-const desc = Object.getOwnPropertyDescriptor(Buffer, 'concat');
-const noop = (args) => args;
-const passthroughBufferConcat = desc?.writable === true || desc?.set !== undefined
-    ? (makeNoOp) => {
-        Buffer.concat = makeNoOp ? noop : OriginalBufferConcat;
-    }
-    : (_) => { };
-const _superWrite = Symbol('_superWrite');
-export class ZlibError extends Error {
-    code;
-    errno;
-    constructor(err) {
-        super('zlib: ' + err.message);
-        this.code = err.code;
-        this.errno = err.errno;
-        /* c8 ignore next */
-        if (!this.code)
-            this.code = 'ZLIB_ERROR';
-        this.message = 'zlib: ' + err.message;
-        Error.captureStackTrace(this, this.constructor);
-    }
-    get name() {
-        return 'ZlibError';
-    }
-}
-// the Zlib class they all inherit from
-// This thing manages the queue of requests, and returns
-// true or false if there is anything in the queue when
-// you call the .write() method.
-const _flushFlag = Symbol('flushFlag');
-class ZlibBase extends Minipass {
-    #sawError = false;
-    #ended = false;
-    #flushFlag;
-    #finishFlushFlag;
-    #fullFlushFlag;
-    #handle;
-    #onError;
-    get sawError() {
-        return this.#sawError;
-    }
-    get handle() {
-        return this.#handle;
-    }
-    /* c8 ignore start */
-    get flushFlag() {
-        return this.#flushFlag;
-    }
-    /* c8 ignore stop */
-    constructor(opts, mode) {
-        if (!opts || typeof opts !== 'object')
-            throw new TypeError('invalid options for ZlibBase constructor');
-        //@ts-ignore
-        super(opts);
-        /* c8 ignore start */
-        this.#flushFlag = opts.flush ?? 0;
-        this.#finishFlushFlag = opts.finishFlush ?? 0;
-        this.#fullFlushFlag = opts.fullFlushFlag ?? 0;
-        /* c8 ignore stop */
-        // this will throw if any options are invalid for the class selected
-        try {
-            // @types/node doesn't know that it exports the classes, but they're there
-            //@ts-ignore
-            this.#handle = new realZlib[mode](opts);
-        }
-        catch (er) {
-            // make sure that all errors get decorated properly
-            throw new ZlibError(er);
-        }
-        this.#onError = err => {
-            // no sense raising multiple errors, since we abort on the first one.
-            if (this.#sawError)
-                return;
-            this.#sawError = true;
-            // there is no way to cleanly recover.
-            // continuing only obscures problems.
-            this.close();
-            this.emit('error', err);
-        };
-        this.#handle?.on('error', er => this.#onError(new ZlibError(er)));
-        this.once('end', () => this.close);
-    }
-    close() {
-        if (this.#handle) {
-            this.#handle.close();
-            this.#handle = undefined;
-            this.emit('close');
-        }
-    }
-    reset() {
-        if (!this.#sawError) {
-            assert(this.#handle, 'zlib binding closed');
-            //@ts-ignore
-            return this.#handle.reset?.();
-        }
-    }
-    flush(flushFlag) {
-        if (this.ended)
-            return;
-        if (typeof flushFlag !== 'number')
-            flushFlag = this.#fullFlushFlag;
-        this.write(Object.assign(Buffer.alloc(0), { [_flushFlag]: flushFlag }));
-    }
-    end(chunk, encoding, cb) {
-        /* c8 ignore start */
-        if (typeof chunk === 'function') {
-            cb = chunk;
-            encoding = undefined;
-            chunk = undefined;
-        }
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        /* c8 ignore stop */
-        if (chunk) {
-            if (encoding)
-                this.write(chunk, encoding);
-            else
-                this.write(chunk);
-        }
-        this.flush(this.#finishFlushFlag);
-        this.#ended = true;
-        return super.end(cb);
-    }
-    get ended() {
-        return this.#ended;
-    }
-    // overridden in the gzip classes to do portable writes
-    [_superWrite](data) {
-        return super.write(data);
-    }
-    write(chunk, encoding, cb) {
-        // process the chunk using the sync process
-        // then super.write() all the outputted chunks
-        if (typeof encoding === 'function')
-            (cb = encoding), (encoding = 'utf8');
-        if (typeof chunk === 'string')
-            chunk = Buffer.from(chunk, encoding);
-        if (this.#sawError)
-            return;
-        assert(this.#handle, 'zlib binding closed');
-        // _processChunk tries to .close() the native handle after it's done, so we
-        // intercept that by temporarily making it a no-op.
-        // diving into the node:zlib internals a bit here
-        const nativeHandle = this.#handle
-            ._handle;
-        const originalNativeClose = nativeHandle.close;
-        nativeHandle.close = () => { };
-        const originalClose = this.#handle.close;
-        this.#handle.close = () => { };
-        // It also calls `Buffer.concat()` at the end, which may be convenient
-        // for some, but which we are not interested in as it slows us down.
-        passthroughBufferConcat(true);
-        let result = undefined;
-        try {
-            const flushFlag = typeof chunk[_flushFlag] === 'number'
-                ? chunk[_flushFlag]
-                : this.#flushFlag;
-            result = this.#handle._processChunk(chunk, flushFlag);
-            // if we don't throw, reset it back how it was
-            passthroughBufferConcat(false);
-        }
-        catch (err) {
-            // or if we do, put Buffer.concat() back before we emit error
-            // Error events call into user code, which may call Buffer.concat()
-            passthroughBufferConcat(false);
-            this.#onError(new ZlibError(err));
-        }
-        finally {
-            if (this.#handle) {
-                // Core zlib resets `_handle` to null after attempting to close the
-                // native handle. Our no-op handler prevented actual closure, but we
-                // need to restore the `._handle` property.
-                ;
-                this.#handle._handle =
-                    nativeHandle;
-                nativeHandle.close = originalNativeClose;
-                this.#handle.close = originalClose;
-                // `_processChunk()` adds an 'error' listener. If we don't remove it
-                // after each call, these handlers start piling up.
-                this.#handle.removeAllListeners('error');
-                // make sure OUR error listener is still attached tho
-            }
-        }
-        if (this.#handle)
-            this.#handle.on('error', er => this.#onError(new ZlibError(er)));
-        let writeReturn;
-        if (result) {
-            if (Array.isArray(result) && result.length > 0) {
-                const r = result[0];
-                // The first buffer is always `handle._outBuffer`, which would be
-                // re-used for later invocations; so, we always have to copy that one.
-                writeReturn = this[_superWrite](Buffer.from(r));
-                for (let i = 1; i < result.length; i++) {
-                    writeReturn = this[_superWrite](result[i]);
-                }
-            }
-            else {
-                // either a single Buffer or an empty array
-                writeReturn = this[_superWrite](Buffer.from(result));
-            }
-        }
-        if (cb)
-            cb();
-        return writeReturn;
-    }
-}
-export class Zlib extends ZlibBase {
-    #level;
-    #strategy;
-    constructor(opts, mode) {
-        opts = opts || {};
-        opts.flush = opts.flush || constants.Z_NO_FLUSH;
-        opts.finishFlush = opts.finishFlush || constants.Z_FINISH;
-        opts.fullFlushFlag = constants.Z_FULL_FLUSH;
-        super(opts, mode);
-        this.#level = opts.level;
-        this.#strategy = opts.strategy;
-    }
-    params(level, strategy) {
-        if (this.sawError)
-            return;
-        if (!this.handle)
-            throw new Error('cannot switch params when binding is closed');
-        // no way to test this without also not supporting params at all
-        /* c8 ignore start */
-        if (!this.handle.params)
-            throw new Error('not supported in this implementation');
-        /* c8 ignore stop */
-        if (this.#level !== level || this.#strategy !== strategy) {
-            this.flush(constants.Z_SYNC_FLUSH);
-            assert(this.handle, 'zlib binding closed');
-            // .params() calls .flush(), but the latter is always async in the
-            // core zlib. We override .flush() temporarily to intercept that and
-            // flush synchronously.
-            const origFlush = this.handle.flush;
-            this.handle.flush = (flushFlag, cb) => {
-                /* c8 ignore start */
-                if (typeof flushFlag === 'function') {
-                    cb = flushFlag;
-                    flushFlag = this.flushFlag;
-                }
-                /* c8 ignore stop */
-                this.flush(flushFlag);
-                cb?.();
-            };
-            try {
-                ;
-                this.handle.params(level, strategy);
-            }
-            finally {
-                this.handle.flush = origFlush;
-            }
-            /* c8 ignore start */
-            if (this.handle) {
-                this.#level = level;
-                this.#strategy = strategy;
-            }
-            /* c8 ignore stop */
-        }
-    }
-}
-// minimal 2-byte header
-export class Deflate extends Zlib {
-    constructor(opts) {
-        super(opts, 'Deflate');
-    }
-}
-export class Inflate extends Zlib {
-    constructor(opts) {
-        super(opts, 'Inflate');
-    }
-}
-export class Gzip extends Zlib {
-    #portable;
-    constructor(opts) {
-        super(opts, 'Gzip');
-        this.#portable = opts && !!opts.portable;
-    }
-    [_superWrite](data) {
-        if (!this.#portable)
-            return super[_superWrite](data);
-        // we'll always get the header emitted in one first chunk
-        // overwrite the OS indicator byte with 0xFF
-        this.#portable = false;
-        data[9] = 255;
-        return super[_superWrite](data);
-    }
-}
-export class Gunzip extends Zlib {
-    constructor(opts) {
-        super(opts, 'Gunzip');
-    }
-}
-// raw - no header
-export class DeflateRaw extends Zlib {
-    constructor(opts) {
-        super(opts, 'DeflateRaw');
-    }
-}
-export class InflateRaw extends Zlib {
-    constructor(opts) {
-        super(opts, 'InflateRaw');
-    }
-}
-// auto-detect header.
-export class Unzip extends Zlib {
-    constructor(opts) {
-        super(opts, 'Unzip');
-    }
-}
-export class Brotli extends ZlibBase {
-    constructor(opts, mode) {
-        opts = opts || {};
-        opts.flush = opts.flush || constants.BROTLI_OPERATION_PROCESS;
-        opts.finishFlush =
-            opts.finishFlush || constants.BROTLI_OPERATION_FINISH;
-        opts.fullFlushFlag = constants.BROTLI_OPERATION_FLUSH;
-        super(opts, mode);
-    }
-}
-export class BrotliCompress extends Brotli {
-    constructor(opts) {
-        super(opts, 'BrotliCompress');
-    }
-}
-export class BrotliDecompress extends Brotli {
-    constructor(opts) {
-        super(opts, 'BrotliDecompress');
-    }
-}
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/minizlib/dist/esm/package.json b/node_modules/make-fetch-happen/node_modules/minizlib/dist/esm/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/minizlib/dist/esm/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/make-fetch-happen/node_modules/minizlib/package.json b/node_modules/make-fetch-happen/node_modules/minizlib/package.json
deleted file mode 100644
index 43cb855e15a5d..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/minizlib/package.json
+++ /dev/null
@@ -1,80 +0,0 @@
-{
-  "name": "minizlib",
-  "version": "3.0.2",
-  "description": "A small fast zlib stream built on [minipass](http://npm.im/minipass) and Node.js's zlib binding.",
-  "main": "./dist/commonjs/index.js",
-  "dependencies": {
-    "minipass": "^7.1.2"
-  },
-  "scripts": {
-    "prepare": "tshy",
-    "pretest": "npm run prepare",
-    "test": "tap",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "format": "prettier --write . --loglevel warn",
-    "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/isaacs/minizlib.git"
-  },
-  "keywords": [
-    "zlib",
-    "gzip",
-    "gunzip",
-    "deflate",
-    "inflate",
-    "compression",
-    "zip",
-    "unzip"
-  ],
-  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
-  "license": "MIT",
-  "devDependencies": {
-    "@types/node": "^22.13.14",
-    "tap": "^21.1.0",
-    "tshy": "^3.0.2",
-    "typedoc": "^0.28.1"
-  },
-  "files": [
-    "dist"
-  ],
-  "engines": {
-    "node": ">= 18"
-  },
-  "tshy": {
-    "exports": {
-      "./package.json": "./package.json",
-      ".": "./src/index.ts"
-    }
-  },
-  "exports": {
-    "./package.json": "./package.json",
-    ".": {
-      "import": {
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.js"
-      },
-      "require": {
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.js"
-      }
-    }
-  },
-  "types": "./dist/commonjs/index.d.ts",
-  "type": "module",
-  "prettier": {
-    "semi": false,
-    "printWidth": 75,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "module": "./dist/esm/index.js"
-}
diff --git a/node_modules/make-fetch-happen/node_modules/mkdirp/LICENSE b/node_modules/make-fetch-happen/node_modules/mkdirp/LICENSE
deleted file mode 100644
index 0a034db7a73b5..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/mkdirp/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-Copyright (c) 2011-2023 James Halliday (mail@substack.net) and Isaac Z. Schlueter (i@izs.me)
-
-This project is free software released under the MIT license:
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
diff --git a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/package.json b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/package.json
deleted file mode 100644
index 9d04a66e16cd9..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/package.json
+++ /dev/null
@@ -1,91 +0,0 @@
-{
-    "name": "mkdirp",
-    "description": "Recursively mkdir, like `mkdir -p`",
-    "version": "3.0.1",
-    "keywords": [
-        "mkdir",
-        "directory",
-        "make dir",
-        "make",
-        "dir",
-        "recursive",
-        "native"
-    ],
-    "bin": "./dist/cjs/src/bin.js",
-    "main": "./dist/cjs/src/index.js",
-    "module": "./dist/mjs/index.js",
-    "types": "./dist/mjs/index.d.ts",
-    "exports": {
-        ".": {
-            "import": {
-                "types": "./dist/mjs/index.d.ts",
-                "default": "./dist/mjs/index.js"
-            },
-            "require": {
-                "types": "./dist/cjs/src/index.d.ts",
-                "default": "./dist/cjs/src/index.js"
-            }
-        }
-    },
-    "files": [
-        "dist"
-    ],
-    "scripts": {
-        "preversion": "npm test",
-        "postversion": "npm publish",
-        "prepublishOnly": "git push origin --follow-tags",
-        "preprepare": "rm -rf dist",
-        "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json",
-        "postprepare": "bash fixup.sh",
-        "pretest": "npm run prepare",
-        "presnap": "npm run prepare",
-        "test": "c8 tap",
-        "snap": "c8 tap",
-        "format": "prettier --write . --loglevel warn",
-        "benchmark": "node benchmark/index.js",
-        "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
-    },
-    "prettier": {
-        "semi": false,
-        "printWidth": 80,
-        "tabWidth": 2,
-        "useTabs": false,
-        "singleQuote": true,
-        "jsxSingleQuote": false,
-        "bracketSameLine": true,
-        "arrowParens": "avoid",
-        "endOfLine": "lf"
-    },
-    "devDependencies": {
-        "@types/brace-expansion": "^1.1.0",
-        "@types/node": "^18.11.9",
-        "@types/tap": "^15.0.7",
-        "c8": "^7.12.0",
-        "eslint-config-prettier": "^8.6.0",
-        "prettier": "^2.8.2",
-        "tap": "^16.3.3",
-        "ts-node": "^10.9.1",
-        "typedoc": "^0.23.21",
-        "typescript": "^4.9.3"
-    },
-    "tap": {
-        "coverage": false,
-        "node-arg": [
-            "--no-warnings",
-            "--loader",
-            "ts-node/esm"
-        ],
-        "ts": false
-    },
-    "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-    },
-    "repository": {
-        "type": "git",
-        "url": "https://github.com/isaacs/node-mkdirp.git"
-    },
-    "license": "MIT",
-    "engines": {
-        "node": ">=10"
-    }
-}
diff --git a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/bin.js b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/bin.js
deleted file mode 100755
index 757aae1fd96cb..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/bin.js
+++ /dev/null
@@ -1,80 +0,0 @@
-#!/usr/bin/env node
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-const package_json_1 = require("../package.json");
-const usage = () => `
-usage: mkdirp [DIR1,DIR2..] {OPTIONS}
-
-  Create each supplied directory including any necessary parent directories
-  that don't yet exist.
-
-  If the directory already exists, do nothing.
-
-OPTIONS are:
-
-  -m       If a directory needs to be created, set the mode as an octal
-  --mode=  permission string.
-
-  -v --version   Print the mkdirp version number
-
-  -h --help      Print this helpful banner
-
-  -p --print     Print the first directories created for each path provided
-
-  --manual       Use manual implementation, even if native is available
-`;
-const dirs = [];
-const opts = {};
-let doPrint = false;
-let dashdash = false;
-let manual = false;
-for (const arg of process.argv.slice(2)) {
-    if (dashdash)
-        dirs.push(arg);
-    else if (arg === '--')
-        dashdash = true;
-    else if (arg === '--manual')
-        manual = true;
-    else if (/^-h/.test(arg) || /^--help/.test(arg)) {
-        console.log(usage());
-        process.exit(0);
-    }
-    else if (arg === '-v' || arg === '--version') {
-        console.log(package_json_1.version);
-        process.exit(0);
-    }
-    else if (arg === '-p' || arg === '--print') {
-        doPrint = true;
-    }
-    else if (/^-m/.test(arg) || /^--mode=/.test(arg)) {
-        // these don't get covered in CI, but work locally
-        // weird because the tests below show as passing in the output.
-        /* c8 ignore start */
-        const mode = parseInt(arg.replace(/^(-m|--mode=)/, ''), 8);
-        if (isNaN(mode)) {
-            console.error(`invalid mode argument: ${arg}\nMust be an octal number.`);
-            process.exit(1);
-        }
-        /* c8 ignore stop */
-        opts.mode = mode;
-    }
-    else
-        dirs.push(arg);
-}
-const index_js_1 = require("./index.js");
-const impl = manual ? index_js_1.mkdirp.manual : index_js_1.mkdirp;
-if (dirs.length === 0) {
-    console.error(usage());
-}
-// these don't get covered in CI, but work locally
-/* c8 ignore start */
-Promise.all(dirs.map(dir => impl(dir, opts)))
-    .then(made => (doPrint ? made.forEach(m => m && console.log(m)) : null))
-    .catch(er => {
-    console.error(er.message);
-    if (er.code)
-        console.error('  code: ' + er.code);
-    process.exit(1);
-});
-/* c8 ignore stop */
-//# sourceMappingURL=bin.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/find-made.js b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/find-made.js
deleted file mode 100644
index e831ef27cadc1..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/find-made.js
+++ /dev/null
@@ -1,35 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.findMadeSync = exports.findMade = void 0;
-const path_1 = require("path");
-const findMade = async (opts, parent, path) => {
-    // we never want the 'made' return value to be a root directory
-    if (path === parent) {
-        return;
-    }
-    return opts.statAsync(parent).then(st => (st.isDirectory() ? path : undefined), // will fail later
-    // will fail later
-    er => {
-        const fer = er;
-        return fer && fer.code === 'ENOENT'
-            ? (0, exports.findMade)(opts, (0, path_1.dirname)(parent), parent)
-            : undefined;
-    });
-};
-exports.findMade = findMade;
-const findMadeSync = (opts, parent, path) => {
-    if (path === parent) {
-        return undefined;
-    }
-    try {
-        return opts.statSync(parent).isDirectory() ? path : undefined;
-    }
-    catch (er) {
-        const fer = er;
-        return fer && fer.code === 'ENOENT'
-            ? (0, exports.findMadeSync)(opts, (0, path_1.dirname)(parent), parent)
-            : undefined;
-    }
-};
-exports.findMadeSync = findMadeSync;
-//# sourceMappingURL=find-made.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/index.js b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/index.js
deleted file mode 100644
index ab9dc62cddda3..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/index.js
+++ /dev/null
@@ -1,53 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.mkdirp = exports.nativeSync = exports.native = exports.manualSync = exports.manual = exports.sync = exports.mkdirpSync = exports.useNativeSync = exports.useNative = exports.mkdirpNativeSync = exports.mkdirpNative = exports.mkdirpManualSync = exports.mkdirpManual = void 0;
-const mkdirp_manual_js_1 = require("./mkdirp-manual.js");
-const mkdirp_native_js_1 = require("./mkdirp-native.js");
-const opts_arg_js_1 = require("./opts-arg.js");
-const path_arg_js_1 = require("./path-arg.js");
-const use_native_js_1 = require("./use-native.js");
-/* c8 ignore start */
-var mkdirp_manual_js_2 = require("./mkdirp-manual.js");
-Object.defineProperty(exports, "mkdirpManual", { enumerable: true, get: function () { return mkdirp_manual_js_2.mkdirpManual; } });
-Object.defineProperty(exports, "mkdirpManualSync", { enumerable: true, get: function () { return mkdirp_manual_js_2.mkdirpManualSync; } });
-var mkdirp_native_js_2 = require("./mkdirp-native.js");
-Object.defineProperty(exports, "mkdirpNative", { enumerable: true, get: function () { return mkdirp_native_js_2.mkdirpNative; } });
-Object.defineProperty(exports, "mkdirpNativeSync", { enumerable: true, get: function () { return mkdirp_native_js_2.mkdirpNativeSync; } });
-var use_native_js_2 = require("./use-native.js");
-Object.defineProperty(exports, "useNative", { enumerable: true, get: function () { return use_native_js_2.useNative; } });
-Object.defineProperty(exports, "useNativeSync", { enumerable: true, get: function () { return use_native_js_2.useNativeSync; } });
-/* c8 ignore stop */
-const mkdirpSync = (path, opts) => {
-    path = (0, path_arg_js_1.pathArg)(path);
-    const resolved = (0, opts_arg_js_1.optsArg)(opts);
-    return (0, use_native_js_1.useNativeSync)(resolved)
-        ? (0, mkdirp_native_js_1.mkdirpNativeSync)(path, resolved)
-        : (0, mkdirp_manual_js_1.mkdirpManualSync)(path, resolved);
-};
-exports.mkdirpSync = mkdirpSync;
-exports.sync = exports.mkdirpSync;
-exports.manual = mkdirp_manual_js_1.mkdirpManual;
-exports.manualSync = mkdirp_manual_js_1.mkdirpManualSync;
-exports.native = mkdirp_native_js_1.mkdirpNative;
-exports.nativeSync = mkdirp_native_js_1.mkdirpNativeSync;
-exports.mkdirp = Object.assign(async (path, opts) => {
-    path = (0, path_arg_js_1.pathArg)(path);
-    const resolved = (0, opts_arg_js_1.optsArg)(opts);
-    return (0, use_native_js_1.useNative)(resolved)
-        ? (0, mkdirp_native_js_1.mkdirpNative)(path, resolved)
-        : (0, mkdirp_manual_js_1.mkdirpManual)(path, resolved);
-}, {
-    mkdirpSync: exports.mkdirpSync,
-    mkdirpNative: mkdirp_native_js_1.mkdirpNative,
-    mkdirpNativeSync: mkdirp_native_js_1.mkdirpNativeSync,
-    mkdirpManual: mkdirp_manual_js_1.mkdirpManual,
-    mkdirpManualSync: mkdirp_manual_js_1.mkdirpManualSync,
-    sync: exports.mkdirpSync,
-    native: mkdirp_native_js_1.mkdirpNative,
-    nativeSync: mkdirp_native_js_1.mkdirpNativeSync,
-    manual: mkdirp_manual_js_1.mkdirpManual,
-    manualSync: mkdirp_manual_js_1.mkdirpManualSync,
-    useNative: use_native_js_1.useNative,
-    useNativeSync: use_native_js_1.useNativeSync,
-});
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js
deleted file mode 100644
index d9bd1d8bb5a49..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js
+++ /dev/null
@@ -1,79 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.mkdirpManual = exports.mkdirpManualSync = void 0;
-const path_1 = require("path");
-const opts_arg_js_1 = require("./opts-arg.js");
-const mkdirpManualSync = (path, options, made) => {
-    const parent = (0, path_1.dirname)(path);
-    const opts = { ...(0, opts_arg_js_1.optsArg)(options), recursive: false };
-    if (parent === path) {
-        try {
-            return opts.mkdirSync(path, opts);
-        }
-        catch (er) {
-            // swallowed by recursive implementation on posix systems
-            // any other error is a failure
-            const fer = er;
-            if (fer && fer.code !== 'EISDIR') {
-                throw er;
-            }
-            return;
-        }
-    }
-    try {
-        opts.mkdirSync(path, opts);
-        return made || path;
-    }
-    catch (er) {
-        const fer = er;
-        if (fer && fer.code === 'ENOENT') {
-            return (0, exports.mkdirpManualSync)(path, opts, (0, exports.mkdirpManualSync)(parent, opts, made));
-        }
-        if (fer && fer.code !== 'EEXIST' && fer && fer.code !== 'EROFS') {
-            throw er;
-        }
-        try {
-            if (!opts.statSync(path).isDirectory())
-                throw er;
-        }
-        catch (_) {
-            throw er;
-        }
-    }
-};
-exports.mkdirpManualSync = mkdirpManualSync;
-exports.mkdirpManual = Object.assign(async (path, options, made) => {
-    const opts = (0, opts_arg_js_1.optsArg)(options);
-    opts.recursive = false;
-    const parent = (0, path_1.dirname)(path);
-    if (parent === path) {
-        return opts.mkdirAsync(path, opts).catch(er => {
-            // swallowed by recursive implementation on posix systems
-            // any other error is a failure
-            const fer = er;
-            if (fer && fer.code !== 'EISDIR') {
-                throw er;
-            }
-        });
-    }
-    return opts.mkdirAsync(path, opts).then(() => made || path, async (er) => {
-        const fer = er;
-        if (fer && fer.code === 'ENOENT') {
-            return (0, exports.mkdirpManual)(parent, opts).then((made) => (0, exports.mkdirpManual)(path, opts, made));
-        }
-        if (fer && fer.code !== 'EEXIST' && fer.code !== 'EROFS') {
-            throw er;
-        }
-        return opts.statAsync(path).then(st => {
-            if (st.isDirectory()) {
-                return made;
-            }
-            else {
-                throw er;
-            }
-        }, () => {
-            throw er;
-        });
-    });
-}, { sync: exports.mkdirpManualSync });
-//# sourceMappingURL=mkdirp-manual.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js
deleted file mode 100644
index 9f00567d7cc20..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js
+++ /dev/null
@@ -1,50 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.mkdirpNative = exports.mkdirpNativeSync = void 0;
-const path_1 = require("path");
-const find_made_js_1 = require("./find-made.js");
-const mkdirp_manual_js_1 = require("./mkdirp-manual.js");
-const opts_arg_js_1 = require("./opts-arg.js");
-const mkdirpNativeSync = (path, options) => {
-    const opts = (0, opts_arg_js_1.optsArg)(options);
-    opts.recursive = true;
-    const parent = (0, path_1.dirname)(path);
-    if (parent === path) {
-        return opts.mkdirSync(path, opts);
-    }
-    const made = (0, find_made_js_1.findMadeSync)(opts, path);
-    try {
-        opts.mkdirSync(path, opts);
-        return made;
-    }
-    catch (er) {
-        const fer = er;
-        if (fer && fer.code === 'ENOENT') {
-            return (0, mkdirp_manual_js_1.mkdirpManualSync)(path, opts);
-        }
-        else {
-            throw er;
-        }
-    }
-};
-exports.mkdirpNativeSync = mkdirpNativeSync;
-exports.mkdirpNative = Object.assign(async (path, options) => {
-    const opts = { ...(0, opts_arg_js_1.optsArg)(options), recursive: true };
-    const parent = (0, path_1.dirname)(path);
-    if (parent === path) {
-        return await opts.mkdirAsync(path, opts);
-    }
-    return (0, find_made_js_1.findMade)(opts, path).then((made) => opts
-        .mkdirAsync(path, opts)
-        .then(m => made || m)
-        .catch(er => {
-        const fer = er;
-        if (fer && fer.code === 'ENOENT') {
-            return (0, mkdirp_manual_js_1.mkdirpManual)(path, opts);
-        }
-        else {
-            throw er;
-        }
-    }));
-}, { sync: exports.mkdirpNativeSync });
-//# sourceMappingURL=mkdirp-native.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/opts-arg.js b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/opts-arg.js
deleted file mode 100644
index e8f486c090595..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/opts-arg.js
+++ /dev/null
@@ -1,38 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.optsArg = void 0;
-const fs_1 = require("fs");
-const optsArg = (opts) => {
-    if (!opts) {
-        opts = { mode: 0o777 };
-    }
-    else if (typeof opts === 'object') {
-        opts = { mode: 0o777, ...opts };
-    }
-    else if (typeof opts === 'number') {
-        opts = { mode: opts };
-    }
-    else if (typeof opts === 'string') {
-        opts = { mode: parseInt(opts, 8) };
-    }
-    else {
-        throw new TypeError('invalid options argument');
-    }
-    const resolved = opts;
-    const optsFs = opts.fs || {};
-    opts.mkdir = opts.mkdir || optsFs.mkdir || fs_1.mkdir;
-    opts.mkdirAsync = opts.mkdirAsync
-        ? opts.mkdirAsync
-        : async (path, options) => {
-            return new Promise((res, rej) => resolved.mkdir(path, options, (er, made) => er ? rej(er) : res(made)));
-        };
-    opts.stat = opts.stat || optsFs.stat || fs_1.stat;
-    opts.statAsync = opts.statAsync
-        ? opts.statAsync
-        : async (path) => new Promise((res, rej) => resolved.stat(path, (err, stats) => (err ? rej(err) : res(stats))));
-    opts.statSync = opts.statSync || optsFs.statSync || fs_1.statSync;
-    opts.mkdirSync = opts.mkdirSync || optsFs.mkdirSync || fs_1.mkdirSync;
-    return resolved;
-};
-exports.optsArg = optsArg;
-//# sourceMappingURL=opts-arg.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/path-arg.js b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/path-arg.js
deleted file mode 100644
index a6b457f6e23d5..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/path-arg.js
+++ /dev/null
@@ -1,28 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.pathArg = void 0;
-const platform = process.env.__TESTING_MKDIRP_PLATFORM__ || process.platform;
-const path_1 = require("path");
-const pathArg = (path) => {
-    if (/\0/.test(path)) {
-        // simulate same failure that node raises
-        throw Object.assign(new TypeError('path must be a string without null bytes'), {
-            path,
-            code: 'ERR_INVALID_ARG_VALUE',
-        });
-    }
-    path = (0, path_1.resolve)(path);
-    if (platform === 'win32') {
-        const badWinChars = /[*|"<>?:]/;
-        const { root } = (0, path_1.parse)(path);
-        if (badWinChars.test(path.substring(root.length))) {
-            throw Object.assign(new Error('Illegal characters in path.'), {
-                path,
-                code: 'EINVAL',
-            });
-        }
-    }
-    return path;
-};
-exports.pathArg = pathArg;
-//# sourceMappingURL=path-arg.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/use-native.js b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/use-native.js
deleted file mode 100644
index 550b3452688ee..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/cjs/src/use-native.js
+++ /dev/null
@@ -1,17 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.useNative = exports.useNativeSync = void 0;
-const fs_1 = require("fs");
-const opts_arg_js_1 = require("./opts-arg.js");
-const version = process.env.__TESTING_MKDIRP_NODE_VERSION__ || process.version;
-const versArr = version.replace(/^v/, '').split('.');
-const hasNative = +versArr[0] > 10 || (+versArr[0] === 10 && +versArr[1] >= 12);
-exports.useNativeSync = !hasNative
-    ? () => false
-    : (opts) => (0, opts_arg_js_1.optsArg)(opts).mkdirSync === fs_1.mkdirSync;
-exports.useNative = Object.assign(!hasNative
-    ? () => false
-    : (opts) => (0, opts_arg_js_1.optsArg)(opts).mkdir === fs_1.mkdir, {
-    sync: exports.useNativeSync,
-});
-//# sourceMappingURL=use-native.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/find-made.js b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/find-made.js
deleted file mode 100644
index 3e72fd59a2c1f..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/find-made.js
+++ /dev/null
@@ -1,30 +0,0 @@
-import { dirname } from 'path';
-export const findMade = async (opts, parent, path) => {
-    // we never want the 'made' return value to be a root directory
-    if (path === parent) {
-        return;
-    }
-    return opts.statAsync(parent).then(st => (st.isDirectory() ? path : undefined), // will fail later
-    // will fail later
-    er => {
-        const fer = er;
-        return fer && fer.code === 'ENOENT'
-            ? findMade(opts, dirname(parent), parent)
-            : undefined;
-    });
-};
-export const findMadeSync = (opts, parent, path) => {
-    if (path === parent) {
-        return undefined;
-    }
-    try {
-        return opts.statSync(parent).isDirectory() ? path : undefined;
-    }
-    catch (er) {
-        const fer = er;
-        return fer && fer.code === 'ENOENT'
-            ? findMadeSync(opts, dirname(parent), parent)
-            : undefined;
-    }
-};
-//# sourceMappingURL=find-made.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/index.js b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/index.js
deleted file mode 100644
index 0217ecc8cdd83..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/index.js
+++ /dev/null
@@ -1,43 +0,0 @@
-import { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js';
-import { mkdirpNative, mkdirpNativeSync } from './mkdirp-native.js';
-import { optsArg } from './opts-arg.js';
-import { pathArg } from './path-arg.js';
-import { useNative, useNativeSync } from './use-native.js';
-/* c8 ignore start */
-export { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js';
-export { mkdirpNative, mkdirpNativeSync } from './mkdirp-native.js';
-export { useNative, useNativeSync } from './use-native.js';
-/* c8 ignore stop */
-export const mkdirpSync = (path, opts) => {
-    path = pathArg(path);
-    const resolved = optsArg(opts);
-    return useNativeSync(resolved)
-        ? mkdirpNativeSync(path, resolved)
-        : mkdirpManualSync(path, resolved);
-};
-export const sync = mkdirpSync;
-export const manual = mkdirpManual;
-export const manualSync = mkdirpManualSync;
-export const native = mkdirpNative;
-export const nativeSync = mkdirpNativeSync;
-export const mkdirp = Object.assign(async (path, opts) => {
-    path = pathArg(path);
-    const resolved = optsArg(opts);
-    return useNative(resolved)
-        ? mkdirpNative(path, resolved)
-        : mkdirpManual(path, resolved);
-}, {
-    mkdirpSync,
-    mkdirpNative,
-    mkdirpNativeSync,
-    mkdirpManual,
-    mkdirpManualSync,
-    sync: mkdirpSync,
-    native: mkdirpNative,
-    nativeSync: mkdirpNativeSync,
-    manual: mkdirpManual,
-    manualSync: mkdirpManualSync,
-    useNative,
-    useNativeSync,
-});
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/mkdirp-manual.js b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/mkdirp-manual.js
deleted file mode 100644
index a4d044e02d3bf..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/mkdirp-manual.js
+++ /dev/null
@@ -1,75 +0,0 @@
-import { dirname } from 'path';
-import { optsArg } from './opts-arg.js';
-export const mkdirpManualSync = (path, options, made) => {
-    const parent = dirname(path);
-    const opts = { ...optsArg(options), recursive: false };
-    if (parent === path) {
-        try {
-            return opts.mkdirSync(path, opts);
-        }
-        catch (er) {
-            // swallowed by recursive implementation on posix systems
-            // any other error is a failure
-            const fer = er;
-            if (fer && fer.code !== 'EISDIR') {
-                throw er;
-            }
-            return;
-        }
-    }
-    try {
-        opts.mkdirSync(path, opts);
-        return made || path;
-    }
-    catch (er) {
-        const fer = er;
-        if (fer && fer.code === 'ENOENT') {
-            return mkdirpManualSync(path, opts, mkdirpManualSync(parent, opts, made));
-        }
-        if (fer && fer.code !== 'EEXIST' && fer && fer.code !== 'EROFS') {
-            throw er;
-        }
-        try {
-            if (!opts.statSync(path).isDirectory())
-                throw er;
-        }
-        catch (_) {
-            throw er;
-        }
-    }
-};
-export const mkdirpManual = Object.assign(async (path, options, made) => {
-    const opts = optsArg(options);
-    opts.recursive = false;
-    const parent = dirname(path);
-    if (parent === path) {
-        return opts.mkdirAsync(path, opts).catch(er => {
-            // swallowed by recursive implementation on posix systems
-            // any other error is a failure
-            const fer = er;
-            if (fer && fer.code !== 'EISDIR') {
-                throw er;
-            }
-        });
-    }
-    return opts.mkdirAsync(path, opts).then(() => made || path, async (er) => {
-        const fer = er;
-        if (fer && fer.code === 'ENOENT') {
-            return mkdirpManual(parent, opts).then((made) => mkdirpManual(path, opts, made));
-        }
-        if (fer && fer.code !== 'EEXIST' && fer.code !== 'EROFS') {
-            throw er;
-        }
-        return opts.statAsync(path).then(st => {
-            if (st.isDirectory()) {
-                return made;
-            }
-            else {
-                throw er;
-            }
-        }, () => {
-            throw er;
-        });
-    });
-}, { sync: mkdirpManualSync });
-//# sourceMappingURL=mkdirp-manual.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/mkdirp-native.js b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/mkdirp-native.js
deleted file mode 100644
index 99d10a5425dad..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/mkdirp-native.js
+++ /dev/null
@@ -1,46 +0,0 @@
-import { dirname } from 'path';
-import { findMade, findMadeSync } from './find-made.js';
-import { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js';
-import { optsArg } from './opts-arg.js';
-export const mkdirpNativeSync = (path, options) => {
-    const opts = optsArg(options);
-    opts.recursive = true;
-    const parent = dirname(path);
-    if (parent === path) {
-        return opts.mkdirSync(path, opts);
-    }
-    const made = findMadeSync(opts, path);
-    try {
-        opts.mkdirSync(path, opts);
-        return made;
-    }
-    catch (er) {
-        const fer = er;
-        if (fer && fer.code === 'ENOENT') {
-            return mkdirpManualSync(path, opts);
-        }
-        else {
-            throw er;
-        }
-    }
-};
-export const mkdirpNative = Object.assign(async (path, options) => {
-    const opts = { ...optsArg(options), recursive: true };
-    const parent = dirname(path);
-    if (parent === path) {
-        return await opts.mkdirAsync(path, opts);
-    }
-    return findMade(opts, path).then((made) => opts
-        .mkdirAsync(path, opts)
-        .then(m => made || m)
-        .catch(er => {
-        const fer = er;
-        if (fer && fer.code === 'ENOENT') {
-            return mkdirpManual(path, opts);
-        }
-        else {
-            throw er;
-        }
-    }));
-}, { sync: mkdirpNativeSync });
-//# sourceMappingURL=mkdirp-native.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/opts-arg.js b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/opts-arg.js
deleted file mode 100644
index d47e2927fee4c..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/opts-arg.js
+++ /dev/null
@@ -1,34 +0,0 @@
-import { mkdir, mkdirSync, stat, statSync, } from 'fs';
-export const optsArg = (opts) => {
-    if (!opts) {
-        opts = { mode: 0o777 };
-    }
-    else if (typeof opts === 'object') {
-        opts = { mode: 0o777, ...opts };
-    }
-    else if (typeof opts === 'number') {
-        opts = { mode: opts };
-    }
-    else if (typeof opts === 'string') {
-        opts = { mode: parseInt(opts, 8) };
-    }
-    else {
-        throw new TypeError('invalid options argument');
-    }
-    const resolved = opts;
-    const optsFs = opts.fs || {};
-    opts.mkdir = opts.mkdir || optsFs.mkdir || mkdir;
-    opts.mkdirAsync = opts.mkdirAsync
-        ? opts.mkdirAsync
-        : async (path, options) => {
-            return new Promise((res, rej) => resolved.mkdir(path, options, (er, made) => er ? rej(er) : res(made)));
-        };
-    opts.stat = opts.stat || optsFs.stat || stat;
-    opts.statAsync = opts.statAsync
-        ? opts.statAsync
-        : async (path) => new Promise((res, rej) => resolved.stat(path, (err, stats) => (err ? rej(err) : res(stats))));
-    opts.statSync = opts.statSync || optsFs.statSync || statSync;
-    opts.mkdirSync = opts.mkdirSync || optsFs.mkdirSync || mkdirSync;
-    return resolved;
-};
-//# sourceMappingURL=opts-arg.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/package.json b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/path-arg.js b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/path-arg.js
deleted file mode 100644
index 03539cc5a94f9..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/path-arg.js
+++ /dev/null
@@ -1,24 +0,0 @@
-const platform = process.env.__TESTING_MKDIRP_PLATFORM__ || process.platform;
-import { parse, resolve } from 'path';
-export const pathArg = (path) => {
-    if (/\0/.test(path)) {
-        // simulate same failure that node raises
-        throw Object.assign(new TypeError('path must be a string without null bytes'), {
-            path,
-            code: 'ERR_INVALID_ARG_VALUE',
-        });
-    }
-    path = resolve(path);
-    if (platform === 'win32') {
-        const badWinChars = /[*|"<>?:]/;
-        const { root } = parse(path);
-        if (badWinChars.test(path.substring(root.length))) {
-            throw Object.assign(new Error('Illegal characters in path.'), {
-                path,
-                code: 'EINVAL',
-            });
-        }
-    }
-    return path;
-};
-//# sourceMappingURL=path-arg.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/use-native.js b/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/use-native.js
deleted file mode 100644
index ad2093867eb74..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/mkdirp/dist/mjs/use-native.js
+++ /dev/null
@@ -1,14 +0,0 @@
-import { mkdir, mkdirSync } from 'fs';
-import { optsArg } from './opts-arg.js';
-const version = process.env.__TESTING_MKDIRP_NODE_VERSION__ || process.version;
-const versArr = version.replace(/^v/, '').split('.');
-const hasNative = +versArr[0] > 10 || (+versArr[0] === 10 && +versArr[1] >= 12);
-export const useNativeSync = !hasNative
-    ? () => false
-    : (opts) => optsArg(opts).mkdirSync === mkdirSync;
-export const useNative = Object.assign(!hasNative
-    ? () => false
-    : (opts) => optsArg(opts).mkdir === mkdir, {
-    sync: useNativeSync,
-});
-//# sourceMappingURL=use-native.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/mkdirp/package.json b/node_modules/make-fetch-happen/node_modules/mkdirp/package.json
deleted file mode 100644
index f31ac3314d6f6..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/mkdirp/package.json
+++ /dev/null
@@ -1,91 +0,0 @@
-{
-  "name": "mkdirp",
-  "description": "Recursively mkdir, like `mkdir -p`",
-  "version": "3.0.1",
-  "keywords": [
-    "mkdir",
-    "directory",
-    "make dir",
-    "make",
-    "dir",
-    "recursive",
-    "native"
-  ],
-  "bin": "./dist/cjs/src/bin.js",
-  "main": "./dist/cjs/src/index.js",
-  "module": "./dist/mjs/index.js",
-  "types": "./dist/mjs/index.d.ts",
-  "exports": {
-    ".": {
-      "import": {
-        "types": "./dist/mjs/index.d.ts",
-        "default": "./dist/mjs/index.js"
-      },
-      "require": {
-        "types": "./dist/cjs/src/index.d.ts",
-        "default": "./dist/cjs/src/index.js"
-      }
-    }
-  },
-  "files": [
-    "dist"
-  ],
-  "scripts": {
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "preprepare": "rm -rf dist",
-    "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json",
-    "postprepare": "bash fixup.sh",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "test": "c8 tap",
-    "snap": "c8 tap",
-    "format": "prettier --write . --loglevel warn",
-    "benchmark": "node benchmark/index.js",
-    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
-  },
-  "prettier": {
-    "semi": false,
-    "printWidth": 80,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "devDependencies": {
-    "@types/brace-expansion": "^1.1.0",
-    "@types/node": "^18.11.9",
-    "@types/tap": "^15.0.7",
-    "c8": "^7.12.0",
-    "eslint-config-prettier": "^8.6.0",
-    "prettier": "^2.8.2",
-    "tap": "^16.3.3",
-    "ts-node": "^10.9.1",
-    "typedoc": "^0.23.21",
-    "typescript": "^4.9.3"
-  },
-  "tap": {
-    "coverage": false,
-    "node-arg": [
-      "--no-warnings",
-      "--loader",
-      "ts-node/esm"
-    ],
-    "ts": false
-  },
-  "funding": {
-    "url": "https://github.com/sponsors/isaacs"
-  },
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/isaacs/node-mkdirp.git"
-  },
-  "license": "MIT",
-  "engines": {
-    "node": ">=10"
-  }
-}
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/create.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/create.js
deleted file mode 100644
index 3190afc48318f..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/create.js
+++ /dev/null
@@ -1,83 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.create = void 0;
-const fs_minipass_1 = require("@isaacs/fs-minipass");
-const node_path_1 = __importDefault(require("node:path"));
-const list_js_1 = require("./list.js");
-const make_command_js_1 = require("./make-command.js");
-const pack_js_1 = require("./pack.js");
-const createFileSync = (opt, files) => {
-    const p = new pack_js_1.PackSync(opt);
-    const stream = new fs_minipass_1.WriteStreamSync(opt.file, {
-        mode: opt.mode || 0o666,
-    });
-    p.pipe(stream);
-    addFilesSync(p, files);
-};
-const createFile = (opt, files) => {
-    const p = new pack_js_1.Pack(opt);
-    const stream = new fs_minipass_1.WriteStream(opt.file, {
-        mode: opt.mode || 0o666,
-    });
-    p.pipe(stream);
-    const promise = new Promise((res, rej) => {
-        stream.on('error', rej);
-        stream.on('close', res);
-        p.on('error', rej);
-    });
-    addFilesAsync(p, files);
-    return promise;
-};
-const addFilesSync = (p, files) => {
-    files.forEach(file => {
-        if (file.charAt(0) === '@') {
-            (0, list_js_1.list)({
-                file: node_path_1.default.resolve(p.cwd, file.slice(1)),
-                sync: true,
-                noResume: true,
-                onReadEntry: entry => p.add(entry),
-            });
-        }
-        else {
-            p.add(file);
-        }
-    });
-    p.end();
-};
-const addFilesAsync = async (p, files) => {
-    for (let i = 0; i < files.length; i++) {
-        const file = String(files[i]);
-        if (file.charAt(0) === '@') {
-            await (0, list_js_1.list)({
-                file: node_path_1.default.resolve(String(p.cwd), file.slice(1)),
-                noResume: true,
-                onReadEntry: entry => {
-                    p.add(entry);
-                },
-            });
-        }
-        else {
-            p.add(file);
-        }
-    }
-    p.end();
-};
-const createSync = (opt, files) => {
-    const p = new pack_js_1.PackSync(opt);
-    addFilesSync(p, files);
-    return p;
-};
-const createAsync = (opt, files) => {
-    const p = new pack_js_1.Pack(opt);
-    addFilesAsync(p, files);
-    return p;
-};
-exports.create = (0, make_command_js_1.makeCommand)(createFileSync, createFile, createSync, createAsync, (_opt, files) => {
-    if (!files?.length) {
-        throw new TypeError('no paths specified to add to archive');
-    }
-});
-//# sourceMappingURL=create.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/cwd-error.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/cwd-error.js
deleted file mode 100644
index d703a7772be3a..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/cwd-error.js
+++ /dev/null
@@ -1,18 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.CwdError = void 0;
-class CwdError extends Error {
-    path;
-    code;
-    syscall = 'chdir';
-    constructor(path, code) {
-        super(`${code}: Cannot cd into '${path}'`);
-        this.path = path;
-        this.code = code;
-    }
-    get name() {
-        return 'CwdError';
-    }
-}
-exports.CwdError = CwdError;
-//# sourceMappingURL=cwd-error.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/extract.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/extract.js
deleted file mode 100644
index f848cbcbf779e..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/extract.js
+++ /dev/null
@@ -1,78 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.extract = void 0;
-// tar -x
-const fsm = __importStar(require("@isaacs/fs-minipass"));
-const node_fs_1 = __importDefault(require("node:fs"));
-const list_js_1 = require("./list.js");
-const make_command_js_1 = require("./make-command.js");
-const unpack_js_1 = require("./unpack.js");
-const extractFileSync = (opt) => {
-    const u = new unpack_js_1.UnpackSync(opt);
-    const file = opt.file;
-    const stat = node_fs_1.default.statSync(file);
-    // This trades a zero-byte read() syscall for a stat
-    // However, it will usually result in less memory allocation
-    const readSize = opt.maxReadSize || 16 * 1024 * 1024;
-    const stream = new fsm.ReadStreamSync(file, {
-        readSize: readSize,
-        size: stat.size,
-    });
-    stream.pipe(u);
-};
-const extractFile = (opt, _) => {
-    const u = new unpack_js_1.Unpack(opt);
-    const readSize = opt.maxReadSize || 16 * 1024 * 1024;
-    const file = opt.file;
-    const p = new Promise((resolve, reject) => {
-        u.on('error', reject);
-        u.on('close', resolve);
-        // This trades a zero-byte read() syscall for a stat
-        // However, it will usually result in less memory allocation
-        node_fs_1.default.stat(file, (er, stat) => {
-            if (er) {
-                reject(er);
-            }
-            else {
-                const stream = new fsm.ReadStream(file, {
-                    readSize: readSize,
-                    size: stat.size,
-                });
-                stream.on('error', reject);
-                stream.pipe(u);
-            }
-        });
-    });
-    return p;
-};
-exports.extract = (0, make_command_js_1.makeCommand)(extractFileSync, extractFile, opt => new unpack_js_1.UnpackSync(opt), opt => new unpack_js_1.Unpack(opt), (opt, files) => {
-    if (files?.length)
-        (0, list_js_1.filesFilter)(opt, files);
-});
-//# sourceMappingURL=extract.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/get-write-flag.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/get-write-flag.js
deleted file mode 100644
index 94add8f6b2231..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/get-write-flag.js
+++ /dev/null
@@ -1,29 +0,0 @@
-"use strict";
-// Get the appropriate flag to use for creating files
-// We use fmap on Windows platforms for files less than
-// 512kb.  This is a fairly low limit, but avoids making
-// things slower in some cases.  Since most of what this
-// library is used for is extracting tarballs of many
-// relatively small files in npm packages and the like,
-// it can be a big boost on Windows platforms.
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.getWriteFlag = void 0;
-const fs_1 = __importDefault(require("fs"));
-const platform = process.env.__FAKE_PLATFORM__ || process.platform;
-const isWindows = platform === 'win32';
-/* c8 ignore start */
-const { O_CREAT, O_TRUNC, O_WRONLY } = fs_1.default.constants;
-const UV_FS_O_FILEMAP = Number(process.env.__FAKE_FS_O_FILENAME__) ||
-    fs_1.default.constants.UV_FS_O_FILEMAP ||
-    0;
-/* c8 ignore stop */
-const fMapEnabled = isWindows && !!UV_FS_O_FILEMAP;
-const fMapLimit = 512 * 1024;
-const fMapFlag = UV_FS_O_FILEMAP | O_TRUNC | O_CREAT | O_WRONLY;
-exports.getWriteFlag = !fMapEnabled ?
-    () => 'w'
-    : (size) => (size < fMapLimit ? fMapFlag : 'w');
-//# sourceMappingURL=get-write-flag.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/header.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/header.js
deleted file mode 100644
index b3a48037b849a..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/header.js
+++ /dev/null
@@ -1,306 +0,0 @@
-"use strict";
-// parse a 512-byte header block to a data object, or vice-versa
-// encode returns `true` if a pax extended header is needed, because
-// the data could not be faithfully encoded in a simple header.
-// (Also, check header.needPax to see if it needs a pax header.)
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Header = void 0;
-const node_path_1 = require("node:path");
-const large = __importStar(require("./large-numbers.js"));
-const types = __importStar(require("./types.js"));
-class Header {
-    cksumValid = false;
-    needPax = false;
-    nullBlock = false;
-    block;
-    path;
-    mode;
-    uid;
-    gid;
-    size;
-    cksum;
-    #type = 'Unsupported';
-    linkpath;
-    uname;
-    gname;
-    devmaj = 0;
-    devmin = 0;
-    atime;
-    ctime;
-    mtime;
-    charset;
-    comment;
-    constructor(data, off = 0, ex, gex) {
-        if (Buffer.isBuffer(data)) {
-            this.decode(data, off || 0, ex, gex);
-        }
-        else if (data) {
-            this.#slurp(data);
-        }
-    }
-    decode(buf, off, ex, gex) {
-        if (!off) {
-            off = 0;
-        }
-        if (!buf || !(buf.length >= off + 512)) {
-            throw new Error('need 512 bytes for header');
-        }
-        this.path = decString(buf, off, 100);
-        this.mode = decNumber(buf, off + 100, 8);
-        this.uid = decNumber(buf, off + 108, 8);
-        this.gid = decNumber(buf, off + 116, 8);
-        this.size = decNumber(buf, off + 124, 12);
-        this.mtime = decDate(buf, off + 136, 12);
-        this.cksum = decNumber(buf, off + 148, 12);
-        // if we have extended or global extended headers, apply them now
-        // See https://github.com/npm/node-tar/pull/187
-        // Apply global before local, so it overrides
-        if (gex)
-            this.#slurp(gex, true);
-        if (ex)
-            this.#slurp(ex);
-        // old tar versions marked dirs as a file with a trailing /
-        const t = decString(buf, off + 156, 1);
-        if (types.isCode(t)) {
-            this.#type = t || '0';
-        }
-        if (this.#type === '0' && this.path.slice(-1) === '/') {
-            this.#type = '5';
-        }
-        // tar implementations sometimes incorrectly put the stat(dir).size
-        // as the size in the tarball, even though Directory entries are
-        // not able to have any body at all.  In the very rare chance that
-        // it actually DOES have a body, we weren't going to do anything with
-        // it anyway, and it'll just be a warning about an invalid header.
-        if (this.#type === '5') {
-            this.size = 0;
-        }
-        this.linkpath = decString(buf, off + 157, 100);
-        if (buf.subarray(off + 257, off + 265).toString() ===
-            'ustar\u000000') {
-            this.uname = decString(buf, off + 265, 32);
-            this.gname = decString(buf, off + 297, 32);
-            /* c8 ignore start */
-            this.devmaj = decNumber(buf, off + 329, 8) ?? 0;
-            this.devmin = decNumber(buf, off + 337, 8) ?? 0;
-            /* c8 ignore stop */
-            if (buf[off + 475] !== 0) {
-                // definitely a prefix, definitely >130 chars.
-                const prefix = decString(buf, off + 345, 155);
-                this.path = prefix + '/' + this.path;
-            }
-            else {
-                const prefix = decString(buf, off + 345, 130);
-                if (prefix) {
-                    this.path = prefix + '/' + this.path;
-                }
-                this.atime = decDate(buf, off + 476, 12);
-                this.ctime = decDate(buf, off + 488, 12);
-            }
-        }
-        let sum = 8 * 0x20;
-        for (let i = off; i < off + 148; i++) {
-            sum += buf[i];
-        }
-        for (let i = off + 156; i < off + 512; i++) {
-            sum += buf[i];
-        }
-        this.cksumValid = sum === this.cksum;
-        if (this.cksum === undefined && sum === 8 * 0x20) {
-            this.nullBlock = true;
-        }
-    }
-    #slurp(ex, gex = false) {
-        Object.assign(this, Object.fromEntries(Object.entries(ex).filter(([k, v]) => {
-            // we slurp in everything except for the path attribute in
-            // a global extended header, because that's weird. Also, any
-            // null/undefined values are ignored.
-            return !(v === null ||
-                v === undefined ||
-                (k === 'path' && gex) ||
-                (k === 'linkpath' && gex) ||
-                k === 'global');
-        })));
-    }
-    encode(buf, off = 0) {
-        if (!buf) {
-            buf = this.block = Buffer.alloc(512);
-        }
-        if (this.#type === 'Unsupported') {
-            this.#type = '0';
-        }
-        if (!(buf.length >= off + 512)) {
-            throw new Error('need 512 bytes for header');
-        }
-        const prefixSize = this.ctime || this.atime ? 130 : 155;
-        const split = splitPrefix(this.path || '', prefixSize);
-        const path = split[0];
-        const prefix = split[1];
-        this.needPax = !!split[2];
-        this.needPax = encString(buf, off, 100, path) || this.needPax;
-        this.needPax =
-            encNumber(buf, off + 100, 8, this.mode) || this.needPax;
-        this.needPax =
-            encNumber(buf, off + 108, 8, this.uid) || this.needPax;
-        this.needPax =
-            encNumber(buf, off + 116, 8, this.gid) || this.needPax;
-        this.needPax =
-            encNumber(buf, off + 124, 12, this.size) || this.needPax;
-        this.needPax =
-            encDate(buf, off + 136, 12, this.mtime) || this.needPax;
-        buf[off + 156] = this.#type.charCodeAt(0);
-        this.needPax =
-            encString(buf, off + 157, 100, this.linkpath) || this.needPax;
-        buf.write('ustar\u000000', off + 257, 8);
-        this.needPax =
-            encString(buf, off + 265, 32, this.uname) || this.needPax;
-        this.needPax =
-            encString(buf, off + 297, 32, this.gname) || this.needPax;
-        this.needPax =
-            encNumber(buf, off + 329, 8, this.devmaj) || this.needPax;
-        this.needPax =
-            encNumber(buf, off + 337, 8, this.devmin) || this.needPax;
-        this.needPax =
-            encString(buf, off + 345, prefixSize, prefix) || this.needPax;
-        if (buf[off + 475] !== 0) {
-            this.needPax =
-                encString(buf, off + 345, 155, prefix) || this.needPax;
-        }
-        else {
-            this.needPax =
-                encString(buf, off + 345, 130, prefix) || this.needPax;
-            this.needPax =
-                encDate(buf, off + 476, 12, this.atime) || this.needPax;
-            this.needPax =
-                encDate(buf, off + 488, 12, this.ctime) || this.needPax;
-        }
-        let sum = 8 * 0x20;
-        for (let i = off; i < off + 148; i++) {
-            sum += buf[i];
-        }
-        for (let i = off + 156; i < off + 512; i++) {
-            sum += buf[i];
-        }
-        this.cksum = sum;
-        encNumber(buf, off + 148, 8, this.cksum);
-        this.cksumValid = true;
-        return this.needPax;
-    }
-    get type() {
-        return (this.#type === 'Unsupported' ?
-            this.#type
-            : types.name.get(this.#type));
-    }
-    get typeKey() {
-        return this.#type;
-    }
-    set type(type) {
-        const c = String(types.code.get(type));
-        if (types.isCode(c) || c === 'Unsupported') {
-            this.#type = c;
-        }
-        else if (types.isCode(type)) {
-            this.#type = type;
-        }
-        else {
-            throw new TypeError('invalid entry type: ' + type);
-        }
-    }
-}
-exports.Header = Header;
-const splitPrefix = (p, prefixSize) => {
-    const pathSize = 100;
-    let pp = p;
-    let prefix = '';
-    let ret = undefined;
-    const root = node_path_1.posix.parse(p).root || '.';
-    if (Buffer.byteLength(pp) < pathSize) {
-        ret = [pp, prefix, false];
-    }
-    else {
-        // first set prefix to the dir, and path to the base
-        prefix = node_path_1.posix.dirname(pp);
-        pp = node_path_1.posix.basename(pp);
-        do {
-            if (Buffer.byteLength(pp) <= pathSize &&
-                Buffer.byteLength(prefix) <= prefixSize) {
-                // both fit!
-                ret = [pp, prefix, false];
-            }
-            else if (Buffer.byteLength(pp) > pathSize &&
-                Buffer.byteLength(prefix) <= prefixSize) {
-                // prefix fits in prefix, but path doesn't fit in path
-                ret = [pp.slice(0, pathSize - 1), prefix, true];
-            }
-            else {
-                // make path take a bit from prefix
-                pp = node_path_1.posix.join(node_path_1.posix.basename(prefix), pp);
-                prefix = node_path_1.posix.dirname(prefix);
-            }
-        } while (prefix !== root && ret === undefined);
-        // at this point, found no resolution, just truncate
-        if (!ret) {
-            ret = [p.slice(0, pathSize - 1), '', true];
-        }
-    }
-    return ret;
-};
-const decString = (buf, off, size) => buf
-    .subarray(off, off + size)
-    .toString('utf8')
-    .replace(/\0.*/, '');
-const decDate = (buf, off, size) => numToDate(decNumber(buf, off, size));
-const numToDate = (num) => num === undefined ? undefined : new Date(num * 1000);
-const decNumber = (buf, off, size) => Number(buf[off]) & 0x80 ?
-    large.parse(buf.subarray(off, off + size))
-    : decSmallNumber(buf, off, size);
-const nanUndef = (value) => (isNaN(value) ? undefined : value);
-const decSmallNumber = (buf, off, size) => nanUndef(parseInt(buf
-    .subarray(off, off + size)
-    .toString('utf8')
-    .replace(/\0.*$/, '')
-    .trim(), 8));
-// the maximum encodable as a null-terminated octal, by field size
-const MAXNUM = {
-    12: 0o77777777777,
-    8: 0o7777777,
-};
-const encNumber = (buf, off, size, num) => num === undefined ? false
-    : num > MAXNUM[size] || num < 0 ?
-        (large.encode(num, buf.subarray(off, off + size)), true)
-        : (encSmallNumber(buf, off, size, num), false);
-const encSmallNumber = (buf, off, size, num) => buf.write(octalString(num, size), off, size, 'ascii');
-const octalString = (num, size) => padOctal(Math.floor(num).toString(8), size);
-const padOctal = (str, size) => (str.length === size - 1 ?
-    str
-    : new Array(size - str.length - 1).join('0') + str + ' ') + '\0';
-const encDate = (buf, off, size, date) => date === undefined ? false : (encNumber(buf, off, size, date.getTime() / 1000));
-// enough to fill the longest string we've got
-const NULLS = new Array(156).join('\0');
-// pad with nulls, return true if it's longer or non-ascii
-const encString = (buf, off, size, str) => str === undefined ? false : ((buf.write(str + NULLS, off, size, 'utf8'),
-    str.length !== Buffer.byteLength(str) || str.length > size));
-//# sourceMappingURL=header.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/index.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/index.js
deleted file mode 100644
index e93ed5ad54aa6..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/index.js
+++ /dev/null
@@ -1,54 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __exportStar = (this && this.__exportStar) || function(m, exports) {
-    for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
-};
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.u = exports.types = exports.r = exports.t = exports.x = exports.c = void 0;
-__exportStar(require("./create.js"), exports);
-var create_js_1 = require("./create.js");
-Object.defineProperty(exports, "c", { enumerable: true, get: function () { return create_js_1.create; } });
-__exportStar(require("./extract.js"), exports);
-var extract_js_1 = require("./extract.js");
-Object.defineProperty(exports, "x", { enumerable: true, get: function () { return extract_js_1.extract; } });
-__exportStar(require("./header.js"), exports);
-__exportStar(require("./list.js"), exports);
-var list_js_1 = require("./list.js");
-Object.defineProperty(exports, "t", { enumerable: true, get: function () { return list_js_1.list; } });
-// classes
-__exportStar(require("./pack.js"), exports);
-__exportStar(require("./parse.js"), exports);
-__exportStar(require("./pax.js"), exports);
-__exportStar(require("./read-entry.js"), exports);
-__exportStar(require("./replace.js"), exports);
-var replace_js_1 = require("./replace.js");
-Object.defineProperty(exports, "r", { enumerable: true, get: function () { return replace_js_1.replace; } });
-exports.types = __importStar(require("./types.js"));
-__exportStar(require("./unpack.js"), exports);
-__exportStar(require("./update.js"), exports);
-var update_js_1 = require("./update.js");
-Object.defineProperty(exports, "u", { enumerable: true, get: function () { return update_js_1.update; } });
-__exportStar(require("./write-entry.js"), exports);
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/large-numbers.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/large-numbers.js
deleted file mode 100644
index 5b07aa7f71b48..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/large-numbers.js
+++ /dev/null
@@ -1,99 +0,0 @@
-"use strict";
-// Tar can encode large and negative numbers using a leading byte of
-// 0xff for negative, and 0x80 for positive.
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.parse = exports.encode = void 0;
-const encode = (num, buf) => {
-    if (!Number.isSafeInteger(num)) {
-        // The number is so large that javascript cannot represent it with integer
-        // precision.
-        throw Error('cannot encode number outside of javascript safe integer range');
-    }
-    else if (num < 0) {
-        encodeNegative(num, buf);
-    }
-    else {
-        encodePositive(num, buf);
-    }
-    return buf;
-};
-exports.encode = encode;
-const encodePositive = (num, buf) => {
-    buf[0] = 0x80;
-    for (var i = buf.length; i > 1; i--) {
-        buf[i - 1] = num & 0xff;
-        num = Math.floor(num / 0x100);
-    }
-};
-const encodeNegative = (num, buf) => {
-    buf[0] = 0xff;
-    var flipped = false;
-    num = num * -1;
-    for (var i = buf.length; i > 1; i--) {
-        var byte = num & 0xff;
-        num = Math.floor(num / 0x100);
-        if (flipped) {
-            buf[i - 1] = onesComp(byte);
-        }
-        else if (byte === 0) {
-            buf[i - 1] = 0;
-        }
-        else {
-            flipped = true;
-            buf[i - 1] = twosComp(byte);
-        }
-    }
-};
-const parse = (buf) => {
-    const pre = buf[0];
-    const value = pre === 0x80 ? pos(buf.subarray(1, buf.length))
-        : pre === 0xff ? twos(buf)
-            : null;
-    if (value === null) {
-        throw Error('invalid base256 encoding');
-    }
-    if (!Number.isSafeInteger(value)) {
-        // The number is so large that javascript cannot represent it with integer
-        // precision.
-        throw Error('parsed number outside of javascript safe integer range');
-    }
-    return value;
-};
-exports.parse = parse;
-const twos = (buf) => {
-    var len = buf.length;
-    var sum = 0;
-    var flipped = false;
-    for (var i = len - 1; i > -1; i--) {
-        var byte = Number(buf[i]);
-        var f;
-        if (flipped) {
-            f = onesComp(byte);
-        }
-        else if (byte === 0) {
-            f = byte;
-        }
-        else {
-            flipped = true;
-            f = twosComp(byte);
-        }
-        if (f !== 0) {
-            sum -= f * Math.pow(256, len - i - 1);
-        }
-    }
-    return sum;
-};
-const pos = (buf) => {
-    var len = buf.length;
-    var sum = 0;
-    for (var i = len - 1; i > -1; i--) {
-        var byte = Number(buf[i]);
-        if (byte !== 0) {
-            sum += byte * Math.pow(256, len - i - 1);
-        }
-    }
-    return sum;
-};
-const onesComp = (byte) => (0xff ^ byte) & 0xff;
-const twosComp = (byte) => ((0xff ^ byte) + 1) & 0xff;
-//# sourceMappingURL=large-numbers.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/list.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/list.js
deleted file mode 100644
index 3cd34bb4bad48..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/list.js
+++ /dev/null
@@ -1,136 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.list = exports.filesFilter = void 0;
-// tar -t
-const fsm = __importStar(require("@isaacs/fs-minipass"));
-const node_fs_1 = __importDefault(require("node:fs"));
-const path_1 = require("path");
-const make_command_js_1 = require("./make-command.js");
-const parse_js_1 = require("./parse.js");
-const strip_trailing_slashes_js_1 = require("./strip-trailing-slashes.js");
-const onReadEntryFunction = (opt) => {
-    const onReadEntry = opt.onReadEntry;
-    opt.onReadEntry =
-        onReadEntry ?
-            e => {
-                onReadEntry(e);
-                e.resume();
-            }
-            : e => e.resume();
-};
-// construct a filter that limits the file entries listed
-// include child entries if a dir is included
-const filesFilter = (opt, files) => {
-    const map = new Map(files.map(f => [(0, strip_trailing_slashes_js_1.stripTrailingSlashes)(f), true]));
-    const filter = opt.filter;
-    const mapHas = (file, r = '') => {
-        const root = r || (0, path_1.parse)(file).root || '.';
-        let ret;
-        if (file === root)
-            ret = false;
-        else {
-            const m = map.get(file);
-            if (m !== undefined) {
-                ret = m;
-            }
-            else {
-                ret = mapHas((0, path_1.dirname)(file), root);
-            }
-        }
-        map.set(file, ret);
-        return ret;
-    };
-    opt.filter =
-        filter ?
-            (file, entry) => filter(file, entry) && mapHas((0, strip_trailing_slashes_js_1.stripTrailingSlashes)(file))
-            : file => mapHas((0, strip_trailing_slashes_js_1.stripTrailingSlashes)(file));
-};
-exports.filesFilter = filesFilter;
-const listFileSync = (opt) => {
-    const p = new parse_js_1.Parser(opt);
-    const file = opt.file;
-    let fd;
-    try {
-        const stat = node_fs_1.default.statSync(file);
-        const readSize = opt.maxReadSize || 16 * 1024 * 1024;
-        if (stat.size < readSize) {
-            p.end(node_fs_1.default.readFileSync(file));
-        }
-        else {
-            let pos = 0;
-            const buf = Buffer.allocUnsafe(readSize);
-            fd = node_fs_1.default.openSync(file, 'r');
-            while (pos < stat.size) {
-                const bytesRead = node_fs_1.default.readSync(fd, buf, 0, readSize, pos);
-                pos += bytesRead;
-                p.write(buf.subarray(0, bytesRead));
-            }
-            p.end();
-        }
-    }
-    finally {
-        if (typeof fd === 'number') {
-            try {
-                node_fs_1.default.closeSync(fd);
-                /* c8 ignore next */
-            }
-            catch (er) { }
-        }
-    }
-};
-const listFile = (opt, _files) => {
-    const parse = new parse_js_1.Parser(opt);
-    const readSize = opt.maxReadSize || 16 * 1024 * 1024;
-    const file = opt.file;
-    const p = new Promise((resolve, reject) => {
-        parse.on('error', reject);
-        parse.on('end', resolve);
-        node_fs_1.default.stat(file, (er, stat) => {
-            if (er) {
-                reject(er);
-            }
-            else {
-                const stream = new fsm.ReadStream(file, {
-                    readSize: readSize,
-                    size: stat.size,
-                });
-                stream.on('error', reject);
-                stream.pipe(parse);
-            }
-        });
-    });
-    return p;
-};
-exports.list = (0, make_command_js_1.makeCommand)(listFileSync, listFile, opt => new parse_js_1.Parser(opt), opt => new parse_js_1.Parser(opt), (opt, files) => {
-    if (files?.length)
-        (0, exports.filesFilter)(opt, files);
-    if (!opt.noResume)
-        onReadEntryFunction(opt);
-});
-//# sourceMappingURL=list.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/make-command.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/make-command.js
deleted file mode 100644
index 1814319e78bc6..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/make-command.js
+++ /dev/null
@@ -1,61 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.makeCommand = void 0;
-const options_js_1 = require("./options.js");
-const makeCommand = (syncFile, asyncFile, syncNoFile, asyncNoFile, validate) => {
-    return Object.assign((opt_ = [], entries, cb) => {
-        if (Array.isArray(opt_)) {
-            entries = opt_;
-            opt_ = {};
-        }
-        if (typeof entries === 'function') {
-            cb = entries;
-            entries = undefined;
-        }
-        if (!entries) {
-            entries = [];
-        }
-        else {
-            entries = Array.from(entries);
-        }
-        const opt = (0, options_js_1.dealias)(opt_);
-        validate?.(opt, entries);
-        if ((0, options_js_1.isSyncFile)(opt)) {
-            if (typeof cb === 'function') {
-                throw new TypeError('callback not supported for sync tar functions');
-            }
-            return syncFile(opt, entries);
-        }
-        else if ((0, options_js_1.isAsyncFile)(opt)) {
-            const p = asyncFile(opt, entries);
-            // weirdness to make TS happy
-            const c = cb ? cb : undefined;
-            return c ? p.then(() => c(), c) : p;
-        }
-        else if ((0, options_js_1.isSyncNoFile)(opt)) {
-            if (typeof cb === 'function') {
-                throw new TypeError('callback not supported for sync tar functions');
-            }
-            return syncNoFile(opt, entries);
-        }
-        else if ((0, options_js_1.isAsyncNoFile)(opt)) {
-            if (typeof cb === 'function') {
-                throw new TypeError('callback only supported with file option');
-            }
-            return asyncNoFile(opt, entries);
-            /* c8 ignore start */
-        }
-        else {
-            throw new Error('impossible options??');
-        }
-        /* c8 ignore stop */
-    }, {
-        syncFile,
-        asyncFile,
-        syncNoFile,
-        asyncNoFile,
-        validate,
-    });
-};
-exports.makeCommand = makeCommand;
-//# sourceMappingURL=make-command.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/mkdir.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/mkdir.js
deleted file mode 100644
index 2b13ecbab6723..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/mkdir.js
+++ /dev/null
@@ -1,209 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.mkdirSync = exports.mkdir = void 0;
-const chownr_1 = require("chownr");
-const fs_1 = __importDefault(require("fs"));
-const mkdirp_1 = require("mkdirp");
-const node_path_1 = __importDefault(require("node:path"));
-const cwd_error_js_1 = require("./cwd-error.js");
-const normalize_windows_path_js_1 = require("./normalize-windows-path.js");
-const symlink_error_js_1 = require("./symlink-error.js");
-const cGet = (cache, key) => cache.get((0, normalize_windows_path_js_1.normalizeWindowsPath)(key));
-const cSet = (cache, key, val) => cache.set((0, normalize_windows_path_js_1.normalizeWindowsPath)(key), val);
-const checkCwd = (dir, cb) => {
-    fs_1.default.stat(dir, (er, st) => {
-        if (er || !st.isDirectory()) {
-            er = new cwd_error_js_1.CwdError(dir, er?.code || 'ENOTDIR');
-        }
-        cb(er);
-    });
-};
-/**
- * Wrapper around mkdirp for tar's needs.
- *
- * The main purpose is to avoid creating directories if we know that
- * they already exist (and track which ones exist for this purpose),
- * and prevent entries from being extracted into symlinked folders,
- * if `preservePaths` is not set.
- */
-const mkdir = (dir, opt, cb) => {
-    dir = (0, normalize_windows_path_js_1.normalizeWindowsPath)(dir);
-    // if there's any overlap between mask and mode,
-    // then we'll need an explicit chmod
-    /* c8 ignore next */
-    const umask = opt.umask ?? 0o22;
-    const mode = opt.mode | 0o0700;
-    const needChmod = (mode & umask) !== 0;
-    const uid = opt.uid;
-    const gid = opt.gid;
-    const doChown = typeof uid === 'number' &&
-        typeof gid === 'number' &&
-        (uid !== opt.processUid || gid !== opt.processGid);
-    const preserve = opt.preserve;
-    const unlink = opt.unlink;
-    const cache = opt.cache;
-    const cwd = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.cwd);
-    const done = (er, created) => {
-        if (er) {
-            cb(er);
-        }
-        else {
-            cSet(cache, dir, true);
-            if (created && doChown) {
-                (0, chownr_1.chownr)(created, uid, gid, er => done(er));
-            }
-            else if (needChmod) {
-                fs_1.default.chmod(dir, mode, cb);
-            }
-            else {
-                cb();
-            }
-        }
-    };
-    if (cache && cGet(cache, dir) === true) {
-        return done();
-    }
-    if (dir === cwd) {
-        return checkCwd(dir, done);
-    }
-    if (preserve) {
-        return (0, mkdirp_1.mkdirp)(dir, { mode }).then(made => done(null, made ?? undefined), // oh, ts
-        done);
-    }
-    const sub = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.relative(cwd, dir));
-    const parts = sub.split('/');
-    mkdir_(cwd, parts, mode, cache, unlink, cwd, undefined, done);
-};
-exports.mkdir = mkdir;
-const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => {
-    if (!parts.length) {
-        return cb(null, created);
-    }
-    const p = parts.shift();
-    const part = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(base + '/' + p));
-    if (cGet(cache, part)) {
-        return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
-    }
-    fs_1.default.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb));
-};
-const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => (er) => {
-    if (er) {
-        fs_1.default.lstat(part, (statEr, st) => {
-            if (statEr) {
-                statEr.path =
-                    statEr.path && (0, normalize_windows_path_js_1.normalizeWindowsPath)(statEr.path);
-                cb(statEr);
-            }
-            else if (st.isDirectory()) {
-                mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
-            }
-            else if (unlink) {
-                fs_1.default.unlink(part, er => {
-                    if (er) {
-                        return cb(er);
-                    }
-                    fs_1.default.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb));
-                });
-            }
-            else if (st.isSymbolicLink()) {
-                return cb(new symlink_error_js_1.SymlinkError(part, part + '/' + parts.join('/')));
-            }
-            else {
-                cb(er);
-            }
-        });
-    }
-    else {
-        created = created || part;
-        mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
-    }
-};
-const checkCwdSync = (dir) => {
-    let ok = false;
-    let code = undefined;
-    try {
-        ok = fs_1.default.statSync(dir).isDirectory();
-    }
-    catch (er) {
-        code = er?.code;
-    }
-    finally {
-        if (!ok) {
-            throw new cwd_error_js_1.CwdError(dir, code ?? 'ENOTDIR');
-        }
-    }
-};
-const mkdirSync = (dir, opt) => {
-    dir = (0, normalize_windows_path_js_1.normalizeWindowsPath)(dir);
-    // if there's any overlap between mask and mode,
-    // then we'll need an explicit chmod
-    /* c8 ignore next */
-    const umask = opt.umask ?? 0o22;
-    const mode = opt.mode | 0o700;
-    const needChmod = (mode & umask) !== 0;
-    const uid = opt.uid;
-    const gid = opt.gid;
-    const doChown = typeof uid === 'number' &&
-        typeof gid === 'number' &&
-        (uid !== opt.processUid || gid !== opt.processGid);
-    const preserve = opt.preserve;
-    const unlink = opt.unlink;
-    const cache = opt.cache;
-    const cwd = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.cwd);
-    const done = (created) => {
-        cSet(cache, dir, true);
-        if (created && doChown) {
-            (0, chownr_1.chownrSync)(created, uid, gid);
-        }
-        if (needChmod) {
-            fs_1.default.chmodSync(dir, mode);
-        }
-    };
-    if (cache && cGet(cache, dir) === true) {
-        return done();
-    }
-    if (dir === cwd) {
-        checkCwdSync(cwd);
-        return done();
-    }
-    if (preserve) {
-        return done((0, mkdirp_1.mkdirpSync)(dir, mode) ?? undefined);
-    }
-    const sub = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.relative(cwd, dir));
-    const parts = sub.split('/');
-    let created = undefined;
-    for (let p = parts.shift(), part = cwd; p && (part += '/' + p); p = parts.shift()) {
-        part = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(part));
-        if (cGet(cache, part)) {
-            continue;
-        }
-        try {
-            fs_1.default.mkdirSync(part, mode);
-            created = created || part;
-            cSet(cache, part, true);
-        }
-        catch (er) {
-            const st = fs_1.default.lstatSync(part);
-            if (st.isDirectory()) {
-                cSet(cache, part, true);
-                continue;
-            }
-            else if (unlink) {
-                fs_1.default.unlinkSync(part);
-                fs_1.default.mkdirSync(part, mode);
-                created = created || part;
-                cSet(cache, part, true);
-                continue;
-            }
-            else if (st.isSymbolicLink()) {
-                return new symlink_error_js_1.SymlinkError(part, part + '/' + parts.join('/'));
-            }
-        }
-    }
-    return done(created);
-};
-exports.mkdirSync = mkdirSync;
-//# sourceMappingURL=mkdir.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/mode-fix.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/mode-fix.js
deleted file mode 100644
index 49dd727961d29..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/mode-fix.js
+++ /dev/null
@@ -1,29 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.modeFix = void 0;
-const modeFix = (mode, isDir, portable) => {
-    mode &= 0o7777;
-    // in portable mode, use the minimum reasonable umask
-    // if this system creates files with 0o664 by default
-    // (as some linux distros do), then we'll write the
-    // archive with 0o644 instead.  Also, don't ever create
-    // a file that is not readable/writable by the owner.
-    if (portable) {
-        mode = (mode | 0o600) & ~0o22;
-    }
-    // if dirs are readable, then they should be listable
-    if (isDir) {
-        if (mode & 0o400) {
-            mode |= 0o100;
-        }
-        if (mode & 0o40) {
-            mode |= 0o10;
-        }
-        if (mode & 0o4) {
-            mode |= 0o1;
-        }
-    }
-    return mode;
-};
-exports.modeFix = modeFix;
-//# sourceMappingURL=mode-fix.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/normalize-unicode.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/normalize-unicode.js
deleted file mode 100644
index 2f08ce46d98c4..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/normalize-unicode.js
+++ /dev/null
@@ -1,17 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.normalizeUnicode = void 0;
-// warning: extremely hot code path.
-// This has been meticulously optimized for use
-// within npm install on large package trees.
-// Do not edit without careful benchmarking.
-const normalizeCache = Object.create(null);
-const { hasOwnProperty } = Object.prototype;
-const normalizeUnicode = (s) => {
-    if (!hasOwnProperty.call(normalizeCache, s)) {
-        normalizeCache[s] = s.normalize('NFD');
-    }
-    return normalizeCache[s];
-};
-exports.normalizeUnicode = normalizeUnicode;
-//# sourceMappingURL=normalize-unicode.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/normalize-windows-path.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/normalize-windows-path.js
deleted file mode 100644
index b0c7aaa9f2d17..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/normalize-windows-path.js
+++ /dev/null
@@ -1,12 +0,0 @@
-"use strict";
-// on windows, either \ or / are valid directory separators.
-// on unix, \ is a valid character in filenames.
-// so, on windows, and only on windows, we replace all \ chars with /,
-// so that we can use / as our one and only directory separator char.
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.normalizeWindowsPath = void 0;
-const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform;
-exports.normalizeWindowsPath = platform !== 'win32' ?
-    (p) => p
-    : (p) => p && p.replace(/\\/g, '/');
-//# sourceMappingURL=normalize-windows-path.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/options.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/options.js
deleted file mode 100644
index 4cd06505bc72b..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/options.js
+++ /dev/null
@@ -1,66 +0,0 @@
-"use strict";
-// turn tar(1) style args like `C` into the more verbose things like `cwd`
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.dealias = exports.isNoFile = exports.isFile = exports.isAsync = exports.isSync = exports.isAsyncNoFile = exports.isSyncNoFile = exports.isAsyncFile = exports.isSyncFile = void 0;
-const argmap = new Map([
-    ['C', 'cwd'],
-    ['f', 'file'],
-    ['z', 'gzip'],
-    ['P', 'preservePaths'],
-    ['U', 'unlink'],
-    ['strip-components', 'strip'],
-    ['stripComponents', 'strip'],
-    ['keep-newer', 'newer'],
-    ['keepNewer', 'newer'],
-    ['keep-newer-files', 'newer'],
-    ['keepNewerFiles', 'newer'],
-    ['k', 'keep'],
-    ['keep-existing', 'keep'],
-    ['keepExisting', 'keep'],
-    ['m', 'noMtime'],
-    ['no-mtime', 'noMtime'],
-    ['p', 'preserveOwner'],
-    ['L', 'follow'],
-    ['h', 'follow'],
-    ['onentry', 'onReadEntry'],
-]);
-const isSyncFile = (o) => !!o.sync && !!o.file;
-exports.isSyncFile = isSyncFile;
-const isAsyncFile = (o) => !o.sync && !!o.file;
-exports.isAsyncFile = isAsyncFile;
-const isSyncNoFile = (o) => !!o.sync && !o.file;
-exports.isSyncNoFile = isSyncNoFile;
-const isAsyncNoFile = (o) => !o.sync && !o.file;
-exports.isAsyncNoFile = isAsyncNoFile;
-const isSync = (o) => !!o.sync;
-exports.isSync = isSync;
-const isAsync = (o) => !o.sync;
-exports.isAsync = isAsync;
-const isFile = (o) => !!o.file;
-exports.isFile = isFile;
-const isNoFile = (o) => !o.file;
-exports.isNoFile = isNoFile;
-const dealiasKey = (k) => {
-    const d = argmap.get(k);
-    if (d)
-        return d;
-    return k;
-};
-const dealias = (opt = {}) => {
-    if (!opt)
-        return {};
-    const result = {};
-    for (const [key, v] of Object.entries(opt)) {
-        // TS doesn't know that aliases are going to always be the same type
-        const k = dealiasKey(key);
-        result[k] = v;
-    }
-    // affordance for deprecated noChmod -> chmod
-    if (result.chmod === undefined && result.noChmod === false) {
-        result.chmod = true;
-    }
-    delete result.noChmod;
-    return result;
-};
-exports.dealias = dealias;
-//# sourceMappingURL=options.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/pack.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/pack.js
deleted file mode 100644
index 303e93063c2db..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/pack.js
+++ /dev/null
@@ -1,477 +0,0 @@
-"use strict";
-// A readable tar stream creator
-// Technically, this is a transform stream that you write paths into,
-// and tar format comes out of.
-// The `add()` method is like `write()` but returns this,
-// and end() return `this` as well, so you can
-// do `new Pack(opt).add('files').add('dir').end().pipe(output)
-// You could also do something like:
-// streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar'))
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.PackSync = exports.Pack = exports.PackJob = void 0;
-const fs_1 = __importDefault(require("fs"));
-const write_entry_js_1 = require("./write-entry.js");
-class PackJob {
-    path;
-    absolute;
-    entry;
-    stat;
-    readdir;
-    pending = false;
-    ignore = false;
-    piped = false;
-    constructor(path, absolute) {
-        this.path = path || './';
-        this.absolute = absolute;
-    }
-}
-exports.PackJob = PackJob;
-const minipass_1 = require("minipass");
-const zlib = __importStar(require("minizlib"));
-const yallist_1 = require("yallist");
-const read_entry_js_1 = require("./read-entry.js");
-const warn_method_js_1 = require("./warn-method.js");
-const EOF = Buffer.alloc(1024);
-const ONSTAT = Symbol('onStat');
-const ENDED = Symbol('ended');
-const QUEUE = Symbol('queue');
-const CURRENT = Symbol('current');
-const PROCESS = Symbol('process');
-const PROCESSING = Symbol('processing');
-const PROCESSJOB = Symbol('processJob');
-const JOBS = Symbol('jobs');
-const JOBDONE = Symbol('jobDone');
-const ADDFSENTRY = Symbol('addFSEntry');
-const ADDTARENTRY = Symbol('addTarEntry');
-const STAT = Symbol('stat');
-const READDIR = Symbol('readdir');
-const ONREADDIR = Symbol('onreaddir');
-const PIPE = Symbol('pipe');
-const ENTRY = Symbol('entry');
-const ENTRYOPT = Symbol('entryOpt');
-const WRITEENTRYCLASS = Symbol('writeEntryClass');
-const WRITE = Symbol('write');
-const ONDRAIN = Symbol('ondrain');
-const path_1 = __importDefault(require("path"));
-const normalize_windows_path_js_1 = require("./normalize-windows-path.js");
-class Pack extends minipass_1.Minipass {
-    opt;
-    cwd;
-    maxReadSize;
-    preservePaths;
-    strict;
-    noPax;
-    prefix;
-    linkCache;
-    statCache;
-    file;
-    portable;
-    zip;
-    readdirCache;
-    noDirRecurse;
-    follow;
-    noMtime;
-    mtime;
-    filter;
-    jobs;
-    [WRITEENTRYCLASS];
-    onWriteEntry;
-    [QUEUE];
-    [JOBS] = 0;
-    [PROCESSING] = false;
-    [ENDED] = false;
-    constructor(opt = {}) {
-        //@ts-ignore
-        super();
-        this.opt = opt;
-        this.file = opt.file || '';
-        this.cwd = opt.cwd || process.cwd();
-        this.maxReadSize = opt.maxReadSize;
-        this.preservePaths = !!opt.preservePaths;
-        this.strict = !!opt.strict;
-        this.noPax = !!opt.noPax;
-        this.prefix = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.prefix || '');
-        this.linkCache = opt.linkCache || new Map();
-        this.statCache = opt.statCache || new Map();
-        this.readdirCache = opt.readdirCache || new Map();
-        this.onWriteEntry = opt.onWriteEntry;
-        this[WRITEENTRYCLASS] = write_entry_js_1.WriteEntry;
-        if (typeof opt.onwarn === 'function') {
-            this.on('warn', opt.onwarn);
-        }
-        this.portable = !!opt.portable;
-        if (opt.gzip || opt.brotli) {
-            if (opt.gzip && opt.brotli) {
-                throw new TypeError('gzip and brotli are mutually exclusive');
-            }
-            if (opt.gzip) {
-                if (typeof opt.gzip !== 'object') {
-                    opt.gzip = {};
-                }
-                if (this.portable) {
-                    opt.gzip.portable = true;
-                }
-                this.zip = new zlib.Gzip(opt.gzip);
-            }
-            if (opt.brotli) {
-                if (typeof opt.brotli !== 'object') {
-                    opt.brotli = {};
-                }
-                this.zip = new zlib.BrotliCompress(opt.brotli);
-            }
-            /* c8 ignore next */
-            if (!this.zip)
-                throw new Error('impossible');
-            const zip = this.zip;
-            zip.on('data', chunk => super.write(chunk));
-            zip.on('end', () => super.end());
-            zip.on('drain', () => this[ONDRAIN]());
-            this.on('resume', () => zip.resume());
-        }
-        else {
-            this.on('drain', this[ONDRAIN]);
-        }
-        this.noDirRecurse = !!opt.noDirRecurse;
-        this.follow = !!opt.follow;
-        this.noMtime = !!opt.noMtime;
-        if (opt.mtime)
-            this.mtime = opt.mtime;
-        this.filter =
-            typeof opt.filter === 'function' ? opt.filter : () => true;
-        this[QUEUE] = new yallist_1.Yallist();
-        this[JOBS] = 0;
-        this.jobs = Number(opt.jobs) || 4;
-        this[PROCESSING] = false;
-        this[ENDED] = false;
-    }
-    [WRITE](chunk) {
-        return super.write(chunk);
-    }
-    add(path) {
-        this.write(path);
-        return this;
-    }
-    end(path, encoding, cb) {
-        /* c8 ignore start */
-        if (typeof path === 'function') {
-            cb = path;
-            path = undefined;
-        }
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        /* c8 ignore stop */
-        if (path) {
-            this.add(path);
-        }
-        this[ENDED] = true;
-        this[PROCESS]();
-        /* c8 ignore next */
-        if (cb)
-            cb();
-        return this;
-    }
-    write(path) {
-        if (this[ENDED]) {
-            throw new Error('write after end');
-        }
-        if (path instanceof read_entry_js_1.ReadEntry) {
-            this[ADDTARENTRY](path);
-        }
-        else {
-            this[ADDFSENTRY](path);
-        }
-        return this.flowing;
-    }
-    [ADDTARENTRY](p) {
-        const absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(path_1.default.resolve(this.cwd, p.path));
-        // in this case, we don't have to wait for the stat
-        if (!this.filter(p.path, p)) {
-            p.resume();
-        }
-        else {
-            const job = new PackJob(p.path, absolute);
-            job.entry = new write_entry_js_1.WriteEntryTar(p, this[ENTRYOPT](job));
-            job.entry.on('end', () => this[JOBDONE](job));
-            this[JOBS] += 1;
-            this[QUEUE].push(job);
-        }
-        this[PROCESS]();
-    }
-    [ADDFSENTRY](p) {
-        const absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(path_1.default.resolve(this.cwd, p));
-        this[QUEUE].push(new PackJob(p, absolute));
-        this[PROCESS]();
-    }
-    [STAT](job) {
-        job.pending = true;
-        this[JOBS] += 1;
-        const stat = this.follow ? 'stat' : 'lstat';
-        fs_1.default[stat](job.absolute, (er, stat) => {
-            job.pending = false;
-            this[JOBS] -= 1;
-            if (er) {
-                this.emit('error', er);
-            }
-            else {
-                this[ONSTAT](job, stat);
-            }
-        });
-    }
-    [ONSTAT](job, stat) {
-        this.statCache.set(job.absolute, stat);
-        job.stat = stat;
-        // now we have the stat, we can filter it.
-        if (!this.filter(job.path, stat)) {
-            job.ignore = true;
-        }
-        this[PROCESS]();
-    }
-    [READDIR](job) {
-        job.pending = true;
-        this[JOBS] += 1;
-        fs_1.default.readdir(job.absolute, (er, entries) => {
-            job.pending = false;
-            this[JOBS] -= 1;
-            if (er) {
-                return this.emit('error', er);
-            }
-            this[ONREADDIR](job, entries);
-        });
-    }
-    [ONREADDIR](job, entries) {
-        this.readdirCache.set(job.absolute, entries);
-        job.readdir = entries;
-        this[PROCESS]();
-    }
-    [PROCESS]() {
-        if (this[PROCESSING]) {
-            return;
-        }
-        this[PROCESSING] = true;
-        for (let w = this[QUEUE].head; !!w && this[JOBS] < this.jobs; w = w.next) {
-            this[PROCESSJOB](w.value);
-            if (w.value.ignore) {
-                const p = w.next;
-                this[QUEUE].removeNode(w);
-                w.next = p;
-            }
-        }
-        this[PROCESSING] = false;
-        if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) {
-            if (this.zip) {
-                this.zip.end(EOF);
-            }
-            else {
-                super.write(EOF);
-                super.end();
-            }
-        }
-    }
-    get [CURRENT]() {
-        return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value;
-    }
-    [JOBDONE](_job) {
-        this[QUEUE].shift();
-        this[JOBS] -= 1;
-        this[PROCESS]();
-    }
-    [PROCESSJOB](job) {
-        if (job.pending) {
-            return;
-        }
-        if (job.entry) {
-            if (job === this[CURRENT] && !job.piped) {
-                this[PIPE](job);
-            }
-            return;
-        }
-        if (!job.stat) {
-            const sc = this.statCache.get(job.absolute);
-            if (sc) {
-                this[ONSTAT](job, sc);
-            }
-            else {
-                this[STAT](job);
-            }
-        }
-        if (!job.stat) {
-            return;
-        }
-        // filtered out!
-        if (job.ignore) {
-            return;
-        }
-        if (!this.noDirRecurse &&
-            job.stat.isDirectory() &&
-            !job.readdir) {
-            const rc = this.readdirCache.get(job.absolute);
-            if (rc) {
-                this[ONREADDIR](job, rc);
-            }
-            else {
-                this[READDIR](job);
-            }
-            if (!job.readdir) {
-                return;
-            }
-        }
-        // we know it doesn't have an entry, because that got checked above
-        job.entry = this[ENTRY](job);
-        if (!job.entry) {
-            job.ignore = true;
-            return;
-        }
-        if (job === this[CURRENT] && !job.piped) {
-            this[PIPE](job);
-        }
-    }
-    [ENTRYOPT](job) {
-        return {
-            onwarn: (code, msg, data) => this.warn(code, msg, data),
-            noPax: this.noPax,
-            cwd: this.cwd,
-            absolute: job.absolute,
-            preservePaths: this.preservePaths,
-            maxReadSize: this.maxReadSize,
-            strict: this.strict,
-            portable: this.portable,
-            linkCache: this.linkCache,
-            statCache: this.statCache,
-            noMtime: this.noMtime,
-            mtime: this.mtime,
-            prefix: this.prefix,
-            onWriteEntry: this.onWriteEntry,
-        };
-    }
-    [ENTRY](job) {
-        this[JOBS] += 1;
-        try {
-            const e = new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job));
-            return e
-                .on('end', () => this[JOBDONE](job))
-                .on('error', er => this.emit('error', er));
-        }
-        catch (er) {
-            this.emit('error', er);
-        }
-    }
-    [ONDRAIN]() {
-        if (this[CURRENT] && this[CURRENT].entry) {
-            this[CURRENT].entry.resume();
-        }
-    }
-    // like .pipe() but using super, because our write() is special
-    [PIPE](job) {
-        job.piped = true;
-        if (job.readdir) {
-            job.readdir.forEach(entry => {
-                const p = job.path;
-                const base = p === './' ? '' : p.replace(/\/*$/, '/');
-                this[ADDFSENTRY](base + entry);
-            });
-        }
-        const source = job.entry;
-        const zip = this.zip;
-        /* c8 ignore start */
-        if (!source)
-            throw new Error('cannot pipe without source');
-        /* c8 ignore stop */
-        if (zip) {
-            source.on('data', chunk => {
-                if (!zip.write(chunk)) {
-                    source.pause();
-                }
-            });
-        }
-        else {
-            source.on('data', chunk => {
-                if (!super.write(chunk)) {
-                    source.pause();
-                }
-            });
-        }
-    }
-    pause() {
-        if (this.zip) {
-            this.zip.pause();
-        }
-        return super.pause();
-    }
-    warn(code, message, data = {}) {
-        (0, warn_method_js_1.warnMethod)(this, code, message, data);
-    }
-}
-exports.Pack = Pack;
-class PackSync extends Pack {
-    sync = true;
-    constructor(opt) {
-        super(opt);
-        this[WRITEENTRYCLASS] = write_entry_js_1.WriteEntrySync;
-    }
-    // pause/resume are no-ops in sync streams.
-    pause() { }
-    resume() { }
-    [STAT](job) {
-        const stat = this.follow ? 'statSync' : 'lstatSync';
-        this[ONSTAT](job, fs_1.default[stat](job.absolute));
-    }
-    [READDIR](job) {
-        this[ONREADDIR](job, fs_1.default.readdirSync(job.absolute));
-    }
-    // gotta get it all in this tick
-    [PIPE](job) {
-        const source = job.entry;
-        const zip = this.zip;
-        if (job.readdir) {
-            job.readdir.forEach(entry => {
-                const p = job.path;
-                const base = p === './' ? '' : p.replace(/\/*$/, '/');
-                this[ADDFSENTRY](base + entry);
-            });
-        }
-        /* c8 ignore start */
-        if (!source)
-            throw new Error('Cannot pipe without source');
-        /* c8 ignore stop */
-        if (zip) {
-            source.on('data', chunk => {
-                zip.write(chunk);
-            });
-        }
-        else {
-            source.on('data', chunk => {
-                super[WRITE](chunk);
-            });
-        }
-    }
-}
-exports.PackSync = PackSync;
-//# sourceMappingURL=pack.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/package.json b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/package.json
deleted file mode 100644
index 5bbefffbabee3..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "commonjs"
-}
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/parse.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/parse.js
deleted file mode 100644
index 9746a25899e6e..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/parse.js
+++ /dev/null
@@ -1,599 +0,0 @@
-"use strict";
-// this[BUFFER] is the remainder of a chunk if we're waiting for
-// the full 512 bytes of a header to come in.  We will Buffer.concat()
-// it to the next write(), which is a mem copy, but a small one.
-//
-// this[QUEUE] is a Yallist of entries that haven't been emitted
-// yet this can only get filled up if the user keeps write()ing after
-// a write() returns false, or does a write() with more than one entry
-//
-// We don't buffer chunks, we always parse them and either create an
-// entry, or push it into the active entry.  The ReadEntry class knows
-// to throw data away if .ignore=true
-//
-// Shift entry off the buffer when it emits 'end', and emit 'entry' for
-// the next one in the list.
-//
-// At any time, we're pushing body chunks into the entry at WRITEENTRY,
-// and waiting for 'end' on the entry at READENTRY
-//
-// ignored entries get .resume() called on them straight away
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Parser = void 0;
-const events_1 = require("events");
-const minizlib_1 = require("minizlib");
-const yallist_1 = require("yallist");
-const header_js_1 = require("./header.js");
-const pax_js_1 = require("./pax.js");
-const read_entry_js_1 = require("./read-entry.js");
-const warn_method_js_1 = require("./warn-method.js");
-const maxMetaEntrySize = 1024 * 1024;
-const gzipHeader = Buffer.from([0x1f, 0x8b]);
-const STATE = Symbol('state');
-const WRITEENTRY = Symbol('writeEntry');
-const READENTRY = Symbol('readEntry');
-const NEXTENTRY = Symbol('nextEntry');
-const PROCESSENTRY = Symbol('processEntry');
-const EX = Symbol('extendedHeader');
-const GEX = Symbol('globalExtendedHeader');
-const META = Symbol('meta');
-const EMITMETA = Symbol('emitMeta');
-const BUFFER = Symbol('buffer');
-const QUEUE = Symbol('queue');
-const ENDED = Symbol('ended');
-const EMITTEDEND = Symbol('emittedEnd');
-const EMIT = Symbol('emit');
-const UNZIP = Symbol('unzip');
-const CONSUMECHUNK = Symbol('consumeChunk');
-const CONSUMECHUNKSUB = Symbol('consumeChunkSub');
-const CONSUMEBODY = Symbol('consumeBody');
-const CONSUMEMETA = Symbol('consumeMeta');
-const CONSUMEHEADER = Symbol('consumeHeader');
-const CONSUMING = Symbol('consuming');
-const BUFFERCONCAT = Symbol('bufferConcat');
-const MAYBEEND = Symbol('maybeEnd');
-const WRITING = Symbol('writing');
-const ABORTED = Symbol('aborted');
-const DONE = Symbol('onDone');
-const SAW_VALID_ENTRY = Symbol('sawValidEntry');
-const SAW_NULL_BLOCK = Symbol('sawNullBlock');
-const SAW_EOF = Symbol('sawEOF');
-const CLOSESTREAM = Symbol('closeStream');
-const noop = () => true;
-class Parser extends events_1.EventEmitter {
-    file;
-    strict;
-    maxMetaEntrySize;
-    filter;
-    brotli;
-    writable = true;
-    readable = false;
-    [QUEUE] = new yallist_1.Yallist();
-    [BUFFER];
-    [READENTRY];
-    [WRITEENTRY];
-    [STATE] = 'begin';
-    [META] = '';
-    [EX];
-    [GEX];
-    [ENDED] = false;
-    [UNZIP];
-    [ABORTED] = false;
-    [SAW_VALID_ENTRY];
-    [SAW_NULL_BLOCK] = false;
-    [SAW_EOF] = false;
-    [WRITING] = false;
-    [CONSUMING] = false;
-    [EMITTEDEND] = false;
-    constructor(opt = {}) {
-        super();
-        this.file = opt.file || '';
-        // these BADARCHIVE errors can't be detected early. listen on DONE.
-        this.on(DONE, () => {
-            if (this[STATE] === 'begin' ||
-                this[SAW_VALID_ENTRY] === false) {
-                // either less than 1 block of data, or all entries were invalid.
-                // Either way, probably not even a tarball.
-                this.warn('TAR_BAD_ARCHIVE', 'Unrecognized archive format');
-            }
-        });
-        if (opt.ondone) {
-            this.on(DONE, opt.ondone);
-        }
-        else {
-            this.on(DONE, () => {
-                this.emit('prefinish');
-                this.emit('finish');
-                this.emit('end');
-            });
-        }
-        this.strict = !!opt.strict;
-        this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize;
-        this.filter = typeof opt.filter === 'function' ? opt.filter : noop;
-        // Unlike gzip, brotli doesn't have any magic bytes to identify it
-        // Users need to explicitly tell us they're extracting a brotli file
-        // Or we infer from the file extension
-        const isTBR = opt.file &&
-            (opt.file.endsWith('.tar.br') || opt.file.endsWith('.tbr'));
-        // if it's a tbr file it MIGHT be brotli, but we don't know until
-        // we look at it and verify it's not a valid tar file.
-        this.brotli =
-            !opt.gzip && opt.brotli !== undefined ? opt.brotli
-                : isTBR ? undefined
-                    : false;
-        // have to set this so that streams are ok piping into it
-        this.on('end', () => this[CLOSESTREAM]());
-        if (typeof opt.onwarn === 'function') {
-            this.on('warn', opt.onwarn);
-        }
-        if (typeof opt.onReadEntry === 'function') {
-            this.on('entry', opt.onReadEntry);
-        }
-    }
-    warn(code, message, data = {}) {
-        (0, warn_method_js_1.warnMethod)(this, code, message, data);
-    }
-    [CONSUMEHEADER](chunk, position) {
-        if (this[SAW_VALID_ENTRY] === undefined) {
-            this[SAW_VALID_ENTRY] = false;
-        }
-        let header;
-        try {
-            header = new header_js_1.Header(chunk, position, this[EX], this[GEX]);
-        }
-        catch (er) {
-            return this.warn('TAR_ENTRY_INVALID', er);
-        }
-        if (header.nullBlock) {
-            if (this[SAW_NULL_BLOCK]) {
-                this[SAW_EOF] = true;
-                // ending an archive with no entries.  pointless, but legal.
-                if (this[STATE] === 'begin') {
-                    this[STATE] = 'header';
-                }
-                this[EMIT]('eof');
-            }
-            else {
-                this[SAW_NULL_BLOCK] = true;
-                this[EMIT]('nullBlock');
-            }
-        }
-        else {
-            this[SAW_NULL_BLOCK] = false;
-            if (!header.cksumValid) {
-                this.warn('TAR_ENTRY_INVALID', 'checksum failure', { header });
-            }
-            else if (!header.path) {
-                this.warn('TAR_ENTRY_INVALID', 'path is required', { header });
-            }
-            else {
-                const type = header.type;
-                if (/^(Symbolic)?Link$/.test(type) && !header.linkpath) {
-                    this.warn('TAR_ENTRY_INVALID', 'linkpath required', {
-                        header,
-                    });
-                }
-                else if (!/^(Symbolic)?Link$/.test(type) &&
-                    !/^(Global)?ExtendedHeader$/.test(type) &&
-                    header.linkpath) {
-                    this.warn('TAR_ENTRY_INVALID', 'linkpath forbidden', {
-                        header,
-                    });
-                }
-                else {
-                    const entry = (this[WRITEENTRY] = new read_entry_js_1.ReadEntry(header, this[EX], this[GEX]));
-                    // we do this for meta & ignored entries as well, because they
-                    // are still valid tar, or else we wouldn't know to ignore them
-                    if (!this[SAW_VALID_ENTRY]) {
-                        if (entry.remain) {
-                            // this might be the one!
-                            const onend = () => {
-                                if (!entry.invalid) {
-                                    this[SAW_VALID_ENTRY] = true;
-                                }
-                            };
-                            entry.on('end', onend);
-                        }
-                        else {
-                            this[SAW_VALID_ENTRY] = true;
-                        }
-                    }
-                    if (entry.meta) {
-                        if (entry.size > this.maxMetaEntrySize) {
-                            entry.ignore = true;
-                            this[EMIT]('ignoredEntry', entry);
-                            this[STATE] = 'ignore';
-                            entry.resume();
-                        }
-                        else if (entry.size > 0) {
-                            this[META] = '';
-                            entry.on('data', c => (this[META] += c));
-                            this[STATE] = 'meta';
-                        }
-                    }
-                    else {
-                        this[EX] = undefined;
-                        entry.ignore =
-                            entry.ignore || !this.filter(entry.path, entry);
-                        if (entry.ignore) {
-                            // probably valid, just not something we care about
-                            this[EMIT]('ignoredEntry', entry);
-                            this[STATE] = entry.remain ? 'ignore' : 'header';
-                            entry.resume();
-                        }
-                        else {
-                            if (entry.remain) {
-                                this[STATE] = 'body';
-                            }
-                            else {
-                                this[STATE] = 'header';
-                                entry.end();
-                            }
-                            if (!this[READENTRY]) {
-                                this[QUEUE].push(entry);
-                                this[NEXTENTRY]();
-                            }
-                            else {
-                                this[QUEUE].push(entry);
-                            }
-                        }
-                    }
-                }
-            }
-        }
-    }
-    [CLOSESTREAM]() {
-        queueMicrotask(() => this.emit('close'));
-    }
-    [PROCESSENTRY](entry) {
-        let go = true;
-        if (!entry) {
-            this[READENTRY] = undefined;
-            go = false;
-        }
-        else if (Array.isArray(entry)) {
-            const [ev, ...args] = entry;
-            this.emit(ev, ...args);
-        }
-        else {
-            this[READENTRY] = entry;
-            this.emit('entry', entry);
-            if (!entry.emittedEnd) {
-                entry.on('end', () => this[NEXTENTRY]());
-                go = false;
-            }
-        }
-        return go;
-    }
-    [NEXTENTRY]() {
-        do { } while (this[PROCESSENTRY](this[QUEUE].shift()));
-        if (!this[QUEUE].length) {
-            // At this point, there's nothing in the queue, but we may have an
-            // entry which is being consumed (readEntry).
-            // If we don't, then we definitely can handle more data.
-            // If we do, and either it's flowing, or it has never had any data
-            // written to it, then it needs more.
-            // The only other possibility is that it has returned false from a
-            // write() call, so we wait for the next drain to continue.
-            const re = this[READENTRY];
-            const drainNow = !re || re.flowing || re.size === re.remain;
-            if (drainNow) {
-                if (!this[WRITING]) {
-                    this.emit('drain');
-                }
-            }
-            else {
-                re.once('drain', () => this.emit('drain'));
-            }
-        }
-    }
-    [CONSUMEBODY](chunk, position) {
-        // write up to but no  more than writeEntry.blockRemain
-        const entry = this[WRITEENTRY];
-        /* c8 ignore start */
-        if (!entry) {
-            throw new Error('attempt to consume body without entry??');
-        }
-        const br = entry.blockRemain ?? 0;
-        /* c8 ignore stop */
-        const c = br >= chunk.length && position === 0 ?
-            chunk
-            : chunk.subarray(position, position + br);
-        entry.write(c);
-        if (!entry.blockRemain) {
-            this[STATE] = 'header';
-            this[WRITEENTRY] = undefined;
-            entry.end();
-        }
-        return c.length;
-    }
-    [CONSUMEMETA](chunk, position) {
-        const entry = this[WRITEENTRY];
-        const ret = this[CONSUMEBODY](chunk, position);
-        // if we finished, then the entry is reset
-        if (!this[WRITEENTRY] && entry) {
-            this[EMITMETA](entry);
-        }
-        return ret;
-    }
-    [EMIT](ev, data, extra) {
-        if (!this[QUEUE].length && !this[READENTRY]) {
-            this.emit(ev, data, extra);
-        }
-        else {
-            this[QUEUE].push([ev, data, extra]);
-        }
-    }
-    [EMITMETA](entry) {
-        this[EMIT]('meta', this[META]);
-        switch (entry.type) {
-            case 'ExtendedHeader':
-            case 'OldExtendedHeader':
-                this[EX] = pax_js_1.Pax.parse(this[META], this[EX], false);
-                break;
-            case 'GlobalExtendedHeader':
-                this[GEX] = pax_js_1.Pax.parse(this[META], this[GEX], true);
-                break;
-            case 'NextFileHasLongPath':
-            case 'OldGnuLongPath': {
-                const ex = this[EX] ?? Object.create(null);
-                this[EX] = ex;
-                ex.path = this[META].replace(/\0.*/, '');
-                break;
-            }
-            case 'NextFileHasLongLinkpath': {
-                const ex = this[EX] || Object.create(null);
-                this[EX] = ex;
-                ex.linkpath = this[META].replace(/\0.*/, '');
-                break;
-            }
-            /* c8 ignore start */
-            default:
-                throw new Error('unknown meta: ' + entry.type);
-            /* c8 ignore stop */
-        }
-    }
-    abort(error) {
-        this[ABORTED] = true;
-        this.emit('abort', error);
-        // always throws, even in non-strict mode
-        this.warn('TAR_ABORT', error, { recoverable: false });
-    }
-    write(chunk, encoding, cb) {
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        if (typeof chunk === 'string') {
-            chunk = Buffer.from(chunk, 
-            /* c8 ignore next */
-            typeof encoding === 'string' ? encoding : 'utf8');
-        }
-        if (this[ABORTED]) {
-            /* c8 ignore next */
-            cb?.();
-            return false;
-        }
-        // first write, might be gzipped
-        const needSniff = this[UNZIP] === undefined ||
-            (this.brotli === undefined && this[UNZIP] === false);
-        if (needSniff && chunk) {
-            if (this[BUFFER]) {
-                chunk = Buffer.concat([this[BUFFER], chunk]);
-                this[BUFFER] = undefined;
-            }
-            if (chunk.length < gzipHeader.length) {
-                this[BUFFER] = chunk;
-                /* c8 ignore next */
-                cb?.();
-                return true;
-            }
-            // look for gzip header
-            for (let i = 0; this[UNZIP] === undefined && i < gzipHeader.length; i++) {
-                if (chunk[i] !== gzipHeader[i]) {
-                    this[UNZIP] = false;
-                }
-            }
-            const maybeBrotli = this.brotli === undefined;
-            if (this[UNZIP] === false && maybeBrotli) {
-                // read the first header to see if it's a valid tar file. If so,
-                // we can safely assume that it's not actually brotli, despite the
-                // .tbr or .tar.br file extension.
-                // if we ended before getting a full chunk, yes, def brotli
-                if (chunk.length < 512) {
-                    if (this[ENDED]) {
-                        this.brotli = true;
-                    }
-                    else {
-                        this[BUFFER] = chunk;
-                        /* c8 ignore next */
-                        cb?.();
-                        return true;
-                    }
-                }
-                else {
-                    // if it's tar, it's pretty reliably not brotli, chances of
-                    // that happening are astronomical.
-                    try {
-                        new header_js_1.Header(chunk.subarray(0, 512));
-                        this.brotli = false;
-                    }
-                    catch (_) {
-                        this.brotli = true;
-                    }
-                }
-            }
-            if (this[UNZIP] === undefined ||
-                (this[UNZIP] === false && this.brotli)) {
-                const ended = this[ENDED];
-                this[ENDED] = false;
-                this[UNZIP] =
-                    this[UNZIP] === undefined ?
-                        new minizlib_1.Unzip({})
-                        : new minizlib_1.BrotliDecompress({});
-                this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk));
-                this[UNZIP].on('error', er => this.abort(er));
-                this[UNZIP].on('end', () => {
-                    this[ENDED] = true;
-                    this[CONSUMECHUNK]();
-                });
-                this[WRITING] = true;
-                const ret = !!this[UNZIP][ended ? 'end' : 'write'](chunk);
-                this[WRITING] = false;
-                cb?.();
-                return ret;
-            }
-        }
-        this[WRITING] = true;
-        if (this[UNZIP]) {
-            this[UNZIP].write(chunk);
-        }
-        else {
-            this[CONSUMECHUNK](chunk);
-        }
-        this[WRITING] = false;
-        // return false if there's a queue, or if the current entry isn't flowing
-        const ret = this[QUEUE].length ? false
-            : this[READENTRY] ? this[READENTRY].flowing
-                : true;
-        // if we have no queue, then that means a clogged READENTRY
-        if (!ret && !this[QUEUE].length) {
-            this[READENTRY]?.once('drain', () => this.emit('drain'));
-        }
-        /* c8 ignore next */
-        cb?.();
-        return ret;
-    }
-    [BUFFERCONCAT](c) {
-        if (c && !this[ABORTED]) {
-            this[BUFFER] =
-                this[BUFFER] ? Buffer.concat([this[BUFFER], c]) : c;
-        }
-    }
-    [MAYBEEND]() {
-        if (this[ENDED] &&
-            !this[EMITTEDEND] &&
-            !this[ABORTED] &&
-            !this[CONSUMING]) {
-            this[EMITTEDEND] = true;
-            const entry = this[WRITEENTRY];
-            if (entry && entry.blockRemain) {
-                // truncated, likely a damaged file
-                const have = this[BUFFER] ? this[BUFFER].length : 0;
-                this.warn('TAR_BAD_ARCHIVE', `Truncated input (needed ${entry.blockRemain} more bytes, only ${have} available)`, { entry });
-                if (this[BUFFER]) {
-                    entry.write(this[BUFFER]);
-                }
-                entry.end();
-            }
-            this[EMIT](DONE);
-        }
-    }
-    [CONSUMECHUNK](chunk) {
-        if (this[CONSUMING] && chunk) {
-            this[BUFFERCONCAT](chunk);
-        }
-        else if (!chunk && !this[BUFFER]) {
-            this[MAYBEEND]();
-        }
-        else if (chunk) {
-            this[CONSUMING] = true;
-            if (this[BUFFER]) {
-                this[BUFFERCONCAT](chunk);
-                const c = this[BUFFER];
-                this[BUFFER] = undefined;
-                this[CONSUMECHUNKSUB](c);
-            }
-            else {
-                this[CONSUMECHUNKSUB](chunk);
-            }
-            while (this[BUFFER] &&
-                this[BUFFER]?.length >= 512 &&
-                !this[ABORTED] &&
-                !this[SAW_EOF]) {
-                const c = this[BUFFER];
-                this[BUFFER] = undefined;
-                this[CONSUMECHUNKSUB](c);
-            }
-            this[CONSUMING] = false;
-        }
-        if (!this[BUFFER] || this[ENDED]) {
-            this[MAYBEEND]();
-        }
-    }
-    [CONSUMECHUNKSUB](chunk) {
-        // we know that we are in CONSUMING mode, so anything written goes into
-        // the buffer.  Advance the position and put any remainder in the buffer.
-        let position = 0;
-        const length = chunk.length;
-        while (position + 512 <= length &&
-            !this[ABORTED] &&
-            !this[SAW_EOF]) {
-            switch (this[STATE]) {
-                case 'begin':
-                case 'header':
-                    this[CONSUMEHEADER](chunk, position);
-                    position += 512;
-                    break;
-                case 'ignore':
-                case 'body':
-                    position += this[CONSUMEBODY](chunk, position);
-                    break;
-                case 'meta':
-                    position += this[CONSUMEMETA](chunk, position);
-                    break;
-                /* c8 ignore start */
-                default:
-                    throw new Error('invalid state: ' + this[STATE]);
-                /* c8 ignore stop */
-            }
-        }
-        if (position < length) {
-            if (this[BUFFER]) {
-                this[BUFFER] = Buffer.concat([
-                    chunk.subarray(position),
-                    this[BUFFER],
-                ]);
-            }
-            else {
-                this[BUFFER] = chunk.subarray(position);
-            }
-        }
-    }
-    end(chunk, encoding, cb) {
-        if (typeof chunk === 'function') {
-            cb = chunk;
-            encoding = undefined;
-            chunk = undefined;
-        }
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        if (typeof chunk === 'string') {
-            chunk = Buffer.from(chunk, encoding);
-        }
-        if (cb)
-            this.once('finish', cb);
-        if (!this[ABORTED]) {
-            if (this[UNZIP]) {
-                /* c8 ignore start */
-                if (chunk)
-                    this[UNZIP].write(chunk);
-                /* c8 ignore stop */
-                this[UNZIP].end();
-            }
-            else {
-                this[ENDED] = true;
-                if (this.brotli === undefined)
-                    chunk = chunk || Buffer.alloc(0);
-                if (chunk)
-                    this.write(chunk);
-                this[MAYBEEND]();
-            }
-        }
-        return this;
-    }
-}
-exports.Parser = Parser;
-//# sourceMappingURL=parse.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/path-reservations.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/path-reservations.js
deleted file mode 100644
index 9ff391c44092c..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/path-reservations.js
+++ /dev/null
@@ -1,170 +0,0 @@
-"use strict";
-// A path exclusive reservation system
-// reserve([list, of, paths], fn)
-// When the fn is first in line for all its paths, it
-// is called with a cb that clears the reservation.
-//
-// Used by async unpack to avoid clobbering paths in use,
-// while still allowing maximal safe parallelization.
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.PathReservations = void 0;
-const node_path_1 = require("node:path");
-const normalize_unicode_js_1 = require("./normalize-unicode.js");
-const strip_trailing_slashes_js_1 = require("./strip-trailing-slashes.js");
-const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform;
-const isWindows = platform === 'win32';
-// return a set of parent dirs for a given path
-// '/a/b/c/d' -> ['/', '/a', '/a/b', '/a/b/c', '/a/b/c/d']
-const getDirs = (path) => {
-    const dirs = path
-        .split('/')
-        .slice(0, -1)
-        .reduce((set, path) => {
-        const s = set[set.length - 1];
-        if (s !== undefined) {
-            path = (0, node_path_1.join)(s, path);
-        }
-        set.push(path || '/');
-        return set;
-    }, []);
-    return dirs;
-};
-class PathReservations {
-    // path => [function or Set]
-    // A Set object means a directory reservation
-    // A fn is a direct reservation on that path
-    #queues = new Map();
-    // fn => {paths:[path,...], dirs:[path, ...]}
-    #reservations = new Map();
-    // functions currently running
-    #running = new Set();
-    reserve(paths, fn) {
-        paths =
-            isWindows ?
-                ['win32 parallelization disabled']
-                : paths.map(p => {
-                    // don't need normPath, because we skip this entirely for windows
-                    return (0, strip_trailing_slashes_js_1.stripTrailingSlashes)((0, node_path_1.join)((0, normalize_unicode_js_1.normalizeUnicode)(p))).toLowerCase();
-                });
-        const dirs = new Set(paths.map(path => getDirs(path)).reduce((a, b) => a.concat(b)));
-        this.#reservations.set(fn, { dirs, paths });
-        for (const p of paths) {
-            const q = this.#queues.get(p);
-            if (!q) {
-                this.#queues.set(p, [fn]);
-            }
-            else {
-                q.push(fn);
-            }
-        }
-        for (const dir of dirs) {
-            const q = this.#queues.get(dir);
-            if (!q) {
-                this.#queues.set(dir, [new Set([fn])]);
-            }
-            else {
-                const l = q[q.length - 1];
-                if (l instanceof Set) {
-                    l.add(fn);
-                }
-                else {
-                    q.push(new Set([fn]));
-                }
-            }
-        }
-        return this.#run(fn);
-    }
-    // return the queues for each path the function cares about
-    // fn => {paths, dirs}
-    #getQueues(fn) {
-        const res = this.#reservations.get(fn);
-        /* c8 ignore start */
-        if (!res) {
-            throw new Error('function does not have any path reservations');
-        }
-        /* c8 ignore stop */
-        return {
-            paths: res.paths.map((path) => this.#queues.get(path)),
-            dirs: [...res.dirs].map(path => this.#queues.get(path)),
-        };
-    }
-    // check if fn is first in line for all its paths, and is
-    // included in the first set for all its dir queues
-    check(fn) {
-        const { paths, dirs } = this.#getQueues(fn);
-        return (paths.every(q => q && q[0] === fn) &&
-            dirs.every(q => q && q[0] instanceof Set && q[0].has(fn)));
-    }
-    // run the function if it's first in line and not already running
-    #run(fn) {
-        if (this.#running.has(fn) || !this.check(fn)) {
-            return false;
-        }
-        this.#running.add(fn);
-        fn(() => this.#clear(fn));
-        return true;
-    }
-    #clear(fn) {
-        if (!this.#running.has(fn)) {
-            return false;
-        }
-        const res = this.#reservations.get(fn);
-        /* c8 ignore start */
-        if (!res) {
-            throw new Error('invalid reservation');
-        }
-        /* c8 ignore stop */
-        const { paths, dirs } = res;
-        const next = new Set();
-        for (const path of paths) {
-            const q = this.#queues.get(path);
-            /* c8 ignore start */
-            if (!q || q?.[0] !== fn) {
-                continue;
-            }
-            /* c8 ignore stop */
-            const q0 = q[1];
-            if (!q0) {
-                this.#queues.delete(path);
-                continue;
-            }
-            q.shift();
-            if (typeof q0 === 'function') {
-                next.add(q0);
-            }
-            else {
-                for (const f of q0) {
-                    next.add(f);
-                }
-            }
-        }
-        for (const dir of dirs) {
-            const q = this.#queues.get(dir);
-            const q0 = q?.[0];
-            /* c8 ignore next - type safety only */
-            if (!q || !(q0 instanceof Set))
-                continue;
-            if (q0.size === 1 && q.length === 1) {
-                this.#queues.delete(dir);
-                continue;
-            }
-            else if (q0.size === 1) {
-                q.shift();
-                // next one must be a function,
-                // or else the Set would've been reused
-                const n = q[0];
-                if (typeof n === 'function') {
-                    next.add(n);
-                }
-            }
-            else {
-                q0.delete(fn);
-            }
-        }
-        this.#running.delete(fn);
-        next.forEach(fn => this.#run(fn));
-        return true;
-    }
-}
-exports.PathReservations = PathReservations;
-//# sourceMappingURL=path-reservations.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/pax.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/pax.js
deleted file mode 100644
index d30c0f3efbe9e..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/pax.js
+++ /dev/null
@@ -1,158 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Pax = void 0;
-const node_path_1 = require("node:path");
-const header_js_1 = require("./header.js");
-class Pax {
-    atime;
-    mtime;
-    ctime;
-    charset;
-    comment;
-    gid;
-    uid;
-    gname;
-    uname;
-    linkpath;
-    dev;
-    ino;
-    nlink;
-    path;
-    size;
-    mode;
-    global;
-    constructor(obj, global = false) {
-        this.atime = obj.atime;
-        this.charset = obj.charset;
-        this.comment = obj.comment;
-        this.ctime = obj.ctime;
-        this.dev = obj.dev;
-        this.gid = obj.gid;
-        this.global = global;
-        this.gname = obj.gname;
-        this.ino = obj.ino;
-        this.linkpath = obj.linkpath;
-        this.mtime = obj.mtime;
-        this.nlink = obj.nlink;
-        this.path = obj.path;
-        this.size = obj.size;
-        this.uid = obj.uid;
-        this.uname = obj.uname;
-    }
-    encode() {
-        const body = this.encodeBody();
-        if (body === '') {
-            return Buffer.allocUnsafe(0);
-        }
-        const bodyLen = Buffer.byteLength(body);
-        // round up to 512 bytes
-        // add 512 for header
-        const bufLen = 512 * Math.ceil(1 + bodyLen / 512);
-        const buf = Buffer.allocUnsafe(bufLen);
-        // 0-fill the header section, it might not hit every field
-        for (let i = 0; i < 512; i++) {
-            buf[i] = 0;
-        }
-        new header_js_1.Header({
-            // XXX split the path
-            // then the path should be PaxHeader + basename, but less than 99,
-            // prepend with the dirname
-            /* c8 ignore start */
-            path: ('PaxHeader/' + (0, node_path_1.basename)(this.path ?? '')).slice(0, 99),
-            /* c8 ignore stop */
-            mode: this.mode || 0o644,
-            uid: this.uid,
-            gid: this.gid,
-            size: bodyLen,
-            mtime: this.mtime,
-            type: this.global ? 'GlobalExtendedHeader' : 'ExtendedHeader',
-            linkpath: '',
-            uname: this.uname || '',
-            gname: this.gname || '',
-            devmaj: 0,
-            devmin: 0,
-            atime: this.atime,
-            ctime: this.ctime,
-        }).encode(buf);
-        buf.write(body, 512, bodyLen, 'utf8');
-        // null pad after the body
-        for (let i = bodyLen + 512; i < buf.length; i++) {
-            buf[i] = 0;
-        }
-        return buf;
-    }
-    encodeBody() {
-        return (this.encodeField('path') +
-            this.encodeField('ctime') +
-            this.encodeField('atime') +
-            this.encodeField('dev') +
-            this.encodeField('ino') +
-            this.encodeField('nlink') +
-            this.encodeField('charset') +
-            this.encodeField('comment') +
-            this.encodeField('gid') +
-            this.encodeField('gname') +
-            this.encodeField('linkpath') +
-            this.encodeField('mtime') +
-            this.encodeField('size') +
-            this.encodeField('uid') +
-            this.encodeField('uname'));
-    }
-    encodeField(field) {
-        if (this[field] === undefined) {
-            return '';
-        }
-        const r = this[field];
-        const v = r instanceof Date ? r.getTime() / 1000 : r;
-        const s = ' ' +
-            (field === 'dev' || field === 'ino' || field === 'nlink' ?
-                'SCHILY.'
-                : '') +
-            field +
-            '=' +
-            v +
-            '\n';
-        const byteLen = Buffer.byteLength(s);
-        // the digits includes the length of the digits in ascii base-10
-        // so if it's 9 characters, then adding 1 for the 9 makes it 10
-        // which makes it 11 chars.
-        let digits = Math.floor(Math.log(byteLen) / Math.log(10)) + 1;
-        if (byteLen + digits >= Math.pow(10, digits)) {
-            digits += 1;
-        }
-        const len = digits + byteLen;
-        return len + s;
-    }
-    static parse(str, ex, g = false) {
-        return new Pax(merge(parseKV(str), ex), g);
-    }
-}
-exports.Pax = Pax;
-const merge = (a, b) => b ? Object.assign({}, b, a) : a;
-const parseKV = (str) => str
-    .replace(/\n$/, '')
-    .split('\n')
-    .reduce(parseKVLine, Object.create(null));
-const parseKVLine = (set, line) => {
-    const n = parseInt(line, 10);
-    // XXX Values with \n in them will fail this.
-    // Refactor to not be a naive line-by-line parse.
-    if (n !== Buffer.byteLength(line) + 1) {
-        return set;
-    }
-    line = line.slice((n + ' ').length);
-    const kv = line.split('=');
-    const r = kv.shift();
-    if (!r) {
-        return set;
-    }
-    const k = r.replace(/^SCHILY\.(dev|ino|nlink)/, '$1');
-    const v = kv.join('=');
-    set[k] =
-        /^([A-Z]+\.)?([mac]|birth|creation)time$/.test(k) ?
-            new Date(Number(v) * 1000)
-            : /^[0-9]+$/.test(v) ? +v
-                : v;
-    return set;
-};
-//# sourceMappingURL=pax.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/read-entry.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/read-entry.js
deleted file mode 100644
index 15e2d55c938a4..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/read-entry.js
+++ /dev/null
@@ -1,140 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.ReadEntry = void 0;
-const minipass_1 = require("minipass");
-const normalize_windows_path_js_1 = require("./normalize-windows-path.js");
-class ReadEntry extends minipass_1.Minipass {
-    extended;
-    globalExtended;
-    header;
-    startBlockSize;
-    blockRemain;
-    remain;
-    type;
-    meta = false;
-    ignore = false;
-    path;
-    mode;
-    uid;
-    gid;
-    uname;
-    gname;
-    size = 0;
-    mtime;
-    atime;
-    ctime;
-    linkpath;
-    dev;
-    ino;
-    nlink;
-    invalid = false;
-    absolute;
-    unsupported = false;
-    constructor(header, ex, gex) {
-        super({});
-        // read entries always start life paused.  this is to avoid the
-        // situation where Minipass's auto-ending empty streams results
-        // in an entry ending before we're ready for it.
-        this.pause();
-        this.extended = ex;
-        this.globalExtended = gex;
-        this.header = header;
-        /* c8 ignore start */
-        this.remain = header.size ?? 0;
-        /* c8 ignore stop */
-        this.startBlockSize = 512 * Math.ceil(this.remain / 512);
-        this.blockRemain = this.startBlockSize;
-        this.type = header.type;
-        switch (this.type) {
-            case 'File':
-            case 'OldFile':
-            case 'Link':
-            case 'SymbolicLink':
-            case 'CharacterDevice':
-            case 'BlockDevice':
-            case 'Directory':
-            case 'FIFO':
-            case 'ContiguousFile':
-            case 'GNUDumpDir':
-                break;
-            case 'NextFileHasLongLinkpath':
-            case 'NextFileHasLongPath':
-            case 'OldGnuLongPath':
-            case 'GlobalExtendedHeader':
-            case 'ExtendedHeader':
-            case 'OldExtendedHeader':
-                this.meta = true;
-                break;
-            // NOTE: gnutar and bsdtar treat unrecognized types as 'File'
-            // it may be worth doing the same, but with a warning.
-            default:
-                this.ignore = true;
-        }
-        /* c8 ignore start */
-        if (!header.path) {
-            throw new Error('no path provided for tar.ReadEntry');
-        }
-        /* c8 ignore stop */
-        this.path = (0, normalize_windows_path_js_1.normalizeWindowsPath)(header.path);
-        this.mode = header.mode;
-        if (this.mode) {
-            this.mode = this.mode & 0o7777;
-        }
-        this.uid = header.uid;
-        this.gid = header.gid;
-        this.uname = header.uname;
-        this.gname = header.gname;
-        this.size = this.remain;
-        this.mtime = header.mtime;
-        this.atime = header.atime;
-        this.ctime = header.ctime;
-        /* c8 ignore start */
-        this.linkpath =
-            header.linkpath ?
-                (0, normalize_windows_path_js_1.normalizeWindowsPath)(header.linkpath)
-                : undefined;
-        /* c8 ignore stop */
-        this.uname = header.uname;
-        this.gname = header.gname;
-        if (ex) {
-            this.#slurp(ex);
-        }
-        if (gex) {
-            this.#slurp(gex, true);
-        }
-    }
-    write(data) {
-        const writeLen = data.length;
-        if (writeLen > this.blockRemain) {
-            throw new Error('writing more to entry than is appropriate');
-        }
-        const r = this.remain;
-        const br = this.blockRemain;
-        this.remain = Math.max(0, r - writeLen);
-        this.blockRemain = Math.max(0, br - writeLen);
-        if (this.ignore) {
-            return true;
-        }
-        if (r >= writeLen) {
-            return super.write(data);
-        }
-        // r < writeLen
-        return super.write(data.subarray(0, r));
-    }
-    #slurp(ex, gex = false) {
-        if (ex.path)
-            ex.path = (0, normalize_windows_path_js_1.normalizeWindowsPath)(ex.path);
-        if (ex.linkpath)
-            ex.linkpath = (0, normalize_windows_path_js_1.normalizeWindowsPath)(ex.linkpath);
-        Object.assign(this, Object.fromEntries(Object.entries(ex).filter(([k, v]) => {
-            // we slurp in everything except for the path attribute in
-            // a global extended header, because that's weird. Also, any
-            // null/undefined values are ignored.
-            return !(v === null ||
-                v === undefined ||
-                (k === 'path' && gex));
-        })));
-    }
-}
-exports.ReadEntry = ReadEntry;
-//# sourceMappingURL=read-entry.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/replace.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/replace.js
deleted file mode 100644
index 262deecd12f9f..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/replace.js
+++ /dev/null
@@ -1,231 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.replace = void 0;
-// tar -r
-const fs_minipass_1 = require("@isaacs/fs-minipass");
-const node_fs_1 = __importDefault(require("node:fs"));
-const node_path_1 = __importDefault(require("node:path"));
-const header_js_1 = require("./header.js");
-const list_js_1 = require("./list.js");
-const make_command_js_1 = require("./make-command.js");
-const options_js_1 = require("./options.js");
-const pack_js_1 = require("./pack.js");
-// starting at the head of the file, read a Header
-// If the checksum is invalid, that's our position to start writing
-// If it is, jump forward by the specified size (round up to 512)
-// and try again.
-// Write the new Pack stream starting there.
-const replaceSync = (opt, files) => {
-    const p = new pack_js_1.PackSync(opt);
-    let threw = true;
-    let fd;
-    let position;
-    try {
-        try {
-            fd = node_fs_1.default.openSync(opt.file, 'r+');
-        }
-        catch (er) {
-            if (er?.code === 'ENOENT') {
-                fd = node_fs_1.default.openSync(opt.file, 'w+');
-            }
-            else {
-                throw er;
-            }
-        }
-        const st = node_fs_1.default.fstatSync(fd);
-        const headBuf = Buffer.alloc(512);
-        POSITION: for (position = 0; position < st.size; position += 512) {
-            for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) {
-                bytes = node_fs_1.default.readSync(fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos);
-                if (position === 0 &&
-                    headBuf[0] === 0x1f &&
-                    headBuf[1] === 0x8b) {
-                    throw new Error('cannot append to compressed archives');
-                }
-                if (!bytes) {
-                    break POSITION;
-                }
-            }
-            const h = new header_js_1.Header(headBuf);
-            if (!h.cksumValid) {
-                break;
-            }
-            const entryBlockSize = 512 * Math.ceil((h.size || 0) / 512);
-            if (position + entryBlockSize + 512 > st.size) {
-                break;
-            }
-            // the 512 for the header we just parsed will be added as well
-            // also jump ahead all the blocks for the body
-            position += entryBlockSize;
-            if (opt.mtimeCache && h.mtime) {
-                opt.mtimeCache.set(String(h.path), h.mtime);
-            }
-        }
-        threw = false;
-        streamSync(opt, p, position, fd, files);
-    }
-    finally {
-        if (threw) {
-            try {
-                node_fs_1.default.closeSync(fd);
-            }
-            catch (er) { }
-        }
-    }
-};
-const streamSync = (opt, p, position, fd, files) => {
-    const stream = new fs_minipass_1.WriteStreamSync(opt.file, {
-        fd: fd,
-        start: position,
-    });
-    p.pipe(stream);
-    addFilesSync(p, files);
-};
-const replaceAsync = (opt, files) => {
-    files = Array.from(files);
-    const p = new pack_js_1.Pack(opt);
-    const getPos = (fd, size, cb_) => {
-        const cb = (er, pos) => {
-            if (er) {
-                node_fs_1.default.close(fd, _ => cb_(er));
-            }
-            else {
-                cb_(null, pos);
-            }
-        };
-        let position = 0;
-        if (size === 0) {
-            return cb(null, 0);
-        }
-        let bufPos = 0;
-        const headBuf = Buffer.alloc(512);
-        const onread = (er, bytes) => {
-            if (er || typeof bytes === 'undefined') {
-                return cb(er);
-            }
-            bufPos += bytes;
-            if (bufPos < 512 && bytes) {
-                return node_fs_1.default.read(fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos, onread);
-            }
-            if (position === 0 &&
-                headBuf[0] === 0x1f &&
-                headBuf[1] === 0x8b) {
-                return cb(new Error('cannot append to compressed archives'));
-            }
-            // truncated header
-            if (bufPos < 512) {
-                return cb(null, position);
-            }
-            const h = new header_js_1.Header(headBuf);
-            if (!h.cksumValid) {
-                return cb(null, position);
-            }
-            /* c8 ignore next */
-            const entryBlockSize = 512 * Math.ceil((h.size ?? 0) / 512);
-            if (position + entryBlockSize + 512 > size) {
-                return cb(null, position);
-            }
-            position += entryBlockSize + 512;
-            if (position >= size) {
-                return cb(null, position);
-            }
-            if (opt.mtimeCache && h.mtime) {
-                opt.mtimeCache.set(String(h.path), h.mtime);
-            }
-            bufPos = 0;
-            node_fs_1.default.read(fd, headBuf, 0, 512, position, onread);
-        };
-        node_fs_1.default.read(fd, headBuf, 0, 512, position, onread);
-    };
-    const promise = new Promise((resolve, reject) => {
-        p.on('error', reject);
-        let flag = 'r+';
-        const onopen = (er, fd) => {
-            if (er && er.code === 'ENOENT' && flag === 'r+') {
-                flag = 'w+';
-                return node_fs_1.default.open(opt.file, flag, onopen);
-            }
-            if (er || !fd) {
-                return reject(er);
-            }
-            node_fs_1.default.fstat(fd, (er, st) => {
-                if (er) {
-                    return node_fs_1.default.close(fd, () => reject(er));
-                }
-                getPos(fd, st.size, (er, position) => {
-                    if (er) {
-                        return reject(er);
-                    }
-                    const stream = new fs_minipass_1.WriteStream(opt.file, {
-                        fd: fd,
-                        start: position,
-                    });
-                    p.pipe(stream);
-                    stream.on('error', reject);
-                    stream.on('close', resolve);
-                    addFilesAsync(p, files);
-                });
-            });
-        };
-        node_fs_1.default.open(opt.file, flag, onopen);
-    });
-    return promise;
-};
-const addFilesSync = (p, files) => {
-    files.forEach(file => {
-        if (file.charAt(0) === '@') {
-            (0, list_js_1.list)({
-                file: node_path_1.default.resolve(p.cwd, file.slice(1)),
-                sync: true,
-                noResume: true,
-                onReadEntry: entry => p.add(entry),
-            });
-        }
-        else {
-            p.add(file);
-        }
-    });
-    p.end();
-};
-const addFilesAsync = async (p, files) => {
-    for (let i = 0; i < files.length; i++) {
-        const file = String(files[i]);
-        if (file.charAt(0) === '@') {
-            await (0, list_js_1.list)({
-                file: node_path_1.default.resolve(String(p.cwd), file.slice(1)),
-                noResume: true,
-                onReadEntry: entry => p.add(entry),
-            });
-        }
-        else {
-            p.add(file);
-        }
-    }
-    p.end();
-};
-exports.replace = (0, make_command_js_1.makeCommand)(replaceSync, replaceAsync, 
-/* c8 ignore start */
-() => {
-    throw new TypeError('file is required');
-}, () => {
-    throw new TypeError('file is required');
-}, 
-/* c8 ignore stop */
-(opt, entries) => {
-    if (!(0, options_js_1.isFile)(opt)) {
-        throw new TypeError('file is required');
-    }
-    if (opt.gzip ||
-        opt.brotli ||
-        opt.file.endsWith('.br') ||
-        opt.file.endsWith('.tbr')) {
-        throw new TypeError('cannot append to compressed archives');
-    }
-    if (!entries?.length) {
-        throw new TypeError('no paths specified to add/replace');
-    }
-});
-//# sourceMappingURL=replace.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/strip-absolute-path.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/strip-absolute-path.js
deleted file mode 100644
index bb7639c35a110..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/strip-absolute-path.js
+++ /dev/null
@@ -1,29 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.stripAbsolutePath = void 0;
-// unix absolute paths are also absolute on win32, so we use this for both
-const node_path_1 = require("node:path");
-const { isAbsolute, parse } = node_path_1.win32;
-// returns [root, stripped]
-// Note that windows will think that //x/y/z/a has a "root" of //x/y, and in
-// those cases, we want to sanitize it to x/y/z/a, not z/a, so we strip /
-// explicitly if it's the first character.
-// drive-specific relative paths on Windows get their root stripped off even
-// though they are not absolute, so `c:../foo` becomes ['c:', '../foo']
-const stripAbsolutePath = (path) => {
-    let r = '';
-    let parsed = parse(path);
-    while (isAbsolute(path) || parsed.root) {
-        // windows will think that //x/y/z has a "root" of //x/y/
-        // but strip the //?/C:/ off of //?/C:/path
-        const root = path.charAt(0) === '/' && path.slice(0, 4) !== '//?/' ?
-            '/'
-            : parsed.root;
-        path = path.slice(root.length);
-        r += root;
-        parsed = parse(path);
-    }
-    return [r, path];
-};
-exports.stripAbsolutePath = stripAbsolutePath;
-//# sourceMappingURL=strip-absolute-path.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/strip-trailing-slashes.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/strip-trailing-slashes.js
deleted file mode 100644
index 6fa74ad6a4ac9..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/strip-trailing-slashes.js
+++ /dev/null
@@ -1,18 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.stripTrailingSlashes = void 0;
-// warning: extremely hot code path.
-// This has been meticulously optimized for use
-// within npm install on large package trees.
-// Do not edit without careful benchmarking.
-const stripTrailingSlashes = (str) => {
-    let i = str.length - 1;
-    let slashesStart = -1;
-    while (i > -1 && str.charAt(i) === '/') {
-        slashesStart = i;
-        i--;
-    }
-    return slashesStart === -1 ? str : str.slice(0, slashesStart);
-};
-exports.stripTrailingSlashes = stripTrailingSlashes;
-//# sourceMappingURL=strip-trailing-slashes.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/symlink-error.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/symlink-error.js
deleted file mode 100644
index cc19ac1a2e3c6..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/symlink-error.js
+++ /dev/null
@@ -1,19 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.SymlinkError = void 0;
-class SymlinkError extends Error {
-    path;
-    symlink;
-    syscall = 'symlink';
-    code = 'TAR_SYMLINK_ERROR';
-    constructor(symlink, path) {
-        super('TAR_SYMLINK_ERROR: Cannot extract through symbolic link');
-        this.symlink = symlink;
-        this.path = path;
-    }
-    get name() {
-        return 'SymlinkError';
-    }
-}
-exports.SymlinkError = SymlinkError;
-//# sourceMappingURL=symlink-error.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/types.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/types.js
deleted file mode 100644
index cb9b684e843b7..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/types.js
+++ /dev/null
@@ -1,50 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.code = exports.name = exports.isName = exports.isCode = void 0;
-const isCode = (c) => exports.name.has(c);
-exports.isCode = isCode;
-const isName = (c) => exports.code.has(c);
-exports.isName = isName;
-// map types from key to human-friendly name
-exports.name = new Map([
-    ['0', 'File'],
-    // same as File
-    ['', 'OldFile'],
-    ['1', 'Link'],
-    ['2', 'SymbolicLink'],
-    // Devices and FIFOs aren't fully supported
-    // they are parsed, but skipped when unpacking
-    ['3', 'CharacterDevice'],
-    ['4', 'BlockDevice'],
-    ['5', 'Directory'],
-    ['6', 'FIFO'],
-    // same as File
-    ['7', 'ContiguousFile'],
-    // pax headers
-    ['g', 'GlobalExtendedHeader'],
-    ['x', 'ExtendedHeader'],
-    // vendor-specific stuff
-    // skip
-    ['A', 'SolarisACL'],
-    // like 5, but with data, which should be skipped
-    ['D', 'GNUDumpDir'],
-    // metadata only, skip
-    ['I', 'Inode'],
-    // data = link path of next file
-    ['K', 'NextFileHasLongLinkpath'],
-    // data = path of next file
-    ['L', 'NextFileHasLongPath'],
-    // skip
-    ['M', 'ContinuationFile'],
-    // like L
-    ['N', 'OldGnuLongPath'],
-    // skip
-    ['S', 'SparseFile'],
-    // skip
-    ['V', 'TapeVolumeHeader'],
-    // like x
-    ['X', 'OldExtendedHeader'],
-]);
-// map the other direction
-exports.code = new Map(Array.from(exports.name).map(kv => [kv[1], kv[0]]));
-//# sourceMappingURL=types.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/unpack.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/unpack.js
deleted file mode 100644
index edf8acbb18c40..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/unpack.js
+++ /dev/null
@@ -1,919 +0,0 @@
-"use strict";
-// the PEND/UNPEND stuff tracks whether we're ready to emit end/close yet.
-// but the path reservations are required to avoid race conditions where
-// parallelized unpack ops may mess with one another, due to dependencies
-// (like a Link depending on its target) or destructive operations (like
-// clobbering an fs object to create one of a different type.)
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.UnpackSync = exports.Unpack = void 0;
-const fsm = __importStar(require("@isaacs/fs-minipass"));
-const node_assert_1 = __importDefault(require("node:assert"));
-const node_crypto_1 = require("node:crypto");
-const node_fs_1 = __importDefault(require("node:fs"));
-const node_path_1 = __importDefault(require("node:path"));
-const get_write_flag_js_1 = require("./get-write-flag.js");
-const mkdir_js_1 = require("./mkdir.js");
-const normalize_unicode_js_1 = require("./normalize-unicode.js");
-const normalize_windows_path_js_1 = require("./normalize-windows-path.js");
-const parse_js_1 = require("./parse.js");
-const strip_absolute_path_js_1 = require("./strip-absolute-path.js");
-const strip_trailing_slashes_js_1 = require("./strip-trailing-slashes.js");
-const wc = __importStar(require("./winchars.js"));
-const path_reservations_js_1 = require("./path-reservations.js");
-const ONENTRY = Symbol('onEntry');
-const CHECKFS = Symbol('checkFs');
-const CHECKFS2 = Symbol('checkFs2');
-const PRUNECACHE = Symbol('pruneCache');
-const ISREUSABLE = Symbol('isReusable');
-const MAKEFS = Symbol('makeFs');
-const FILE = Symbol('file');
-const DIRECTORY = Symbol('directory');
-const LINK = Symbol('link');
-const SYMLINK = Symbol('symlink');
-const HARDLINK = Symbol('hardlink');
-const UNSUPPORTED = Symbol('unsupported');
-const CHECKPATH = Symbol('checkPath');
-const MKDIR = Symbol('mkdir');
-const ONERROR = Symbol('onError');
-const PENDING = Symbol('pending');
-const PEND = Symbol('pend');
-const UNPEND = Symbol('unpend');
-const ENDED = Symbol('ended');
-const MAYBECLOSE = Symbol('maybeClose');
-const SKIP = Symbol('skip');
-const DOCHOWN = Symbol('doChown');
-const UID = Symbol('uid');
-const GID = Symbol('gid');
-const CHECKED_CWD = Symbol('checkedCwd');
-const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform;
-const isWindows = platform === 'win32';
-const DEFAULT_MAX_DEPTH = 1024;
-// Unlinks on Windows are not atomic.
-//
-// This means that if you have a file entry, followed by another
-// file entry with an identical name, and you cannot re-use the file
-// (because it's a hardlink, or because unlink:true is set, or it's
-// Windows, which does not have useful nlink values), then the unlink
-// will be committed to the disk AFTER the new file has been written
-// over the old one, deleting the new file.
-//
-// To work around this, on Windows systems, we rename the file and then
-// delete the renamed file.  It's a sloppy kludge, but frankly, I do not
-// know of a better way to do this, given windows' non-atomic unlink
-// semantics.
-//
-// See: https://github.com/npm/node-tar/issues/183
-/* c8 ignore start */
-const unlinkFile = (path, cb) => {
-    if (!isWindows) {
-        return node_fs_1.default.unlink(path, cb);
-    }
-    const name = path + '.DELETE.' + (0, node_crypto_1.randomBytes)(16).toString('hex');
-    node_fs_1.default.rename(path, name, er => {
-        if (er) {
-            return cb(er);
-        }
-        node_fs_1.default.unlink(name, cb);
-    });
-};
-/* c8 ignore stop */
-/* c8 ignore start */
-const unlinkFileSync = (path) => {
-    if (!isWindows) {
-        return node_fs_1.default.unlinkSync(path);
-    }
-    const name = path + '.DELETE.' + (0, node_crypto_1.randomBytes)(16).toString('hex');
-    node_fs_1.default.renameSync(path, name);
-    node_fs_1.default.unlinkSync(name);
-};
-/* c8 ignore stop */
-// this.gid, entry.gid, this.processUid
-const uint32 = (a, b, c) => a !== undefined && a === a >>> 0 ? a
-    : b !== undefined && b === b >>> 0 ? b
-        : c;
-// clear the cache if it's a case-insensitive unicode-squashing match.
-// we can't know if the current file system is case-sensitive or supports
-// unicode fully, so we check for similarity on the maximally compatible
-// representation.  Err on the side of pruning, since all it's doing is
-// preventing lstats, and it's not the end of the world if we get a false
-// positive.
-// Note that on windows, we always drop the entire cache whenever a
-// symbolic link is encountered, because 8.3 filenames are impossible
-// to reason about, and collisions are hazards rather than just failures.
-const cacheKeyNormalize = (path) => (0, strip_trailing_slashes_js_1.stripTrailingSlashes)((0, normalize_windows_path_js_1.normalizeWindowsPath)((0, normalize_unicode_js_1.normalizeUnicode)(path))).toLowerCase();
-// remove all cache entries matching ${abs}/**
-const pruneCache = (cache, abs) => {
-    abs = cacheKeyNormalize(abs);
-    for (const path of cache.keys()) {
-        const pnorm = cacheKeyNormalize(path);
-        if (pnorm === abs || pnorm.indexOf(abs + '/') === 0) {
-            cache.delete(path);
-        }
-    }
-};
-const dropCache = (cache) => {
-    for (const key of cache.keys()) {
-        cache.delete(key);
-    }
-};
-class Unpack extends parse_js_1.Parser {
-    [ENDED] = false;
-    [CHECKED_CWD] = false;
-    [PENDING] = 0;
-    reservations = new path_reservations_js_1.PathReservations();
-    transform;
-    writable = true;
-    readable = false;
-    dirCache;
-    uid;
-    gid;
-    setOwner;
-    preserveOwner;
-    processGid;
-    processUid;
-    maxDepth;
-    forceChown;
-    win32;
-    newer;
-    keep;
-    noMtime;
-    preservePaths;
-    unlink;
-    cwd;
-    strip;
-    processUmask;
-    umask;
-    dmode;
-    fmode;
-    chmod;
-    constructor(opt = {}) {
-        opt.ondone = () => {
-            this[ENDED] = true;
-            this[MAYBECLOSE]();
-        };
-        super(opt);
-        this.transform = opt.transform;
-        this.dirCache = opt.dirCache || new Map();
-        this.chmod = !!opt.chmod;
-        if (typeof opt.uid === 'number' || typeof opt.gid === 'number') {
-            // need both or neither
-            if (typeof opt.uid !== 'number' ||
-                typeof opt.gid !== 'number') {
-                throw new TypeError('cannot set owner without number uid and gid');
-            }
-            if (opt.preserveOwner) {
-                throw new TypeError('cannot preserve owner in archive and also set owner explicitly');
-            }
-            this.uid = opt.uid;
-            this.gid = opt.gid;
-            this.setOwner = true;
-        }
-        else {
-            this.uid = undefined;
-            this.gid = undefined;
-            this.setOwner = false;
-        }
-        // default true for root
-        if (opt.preserveOwner === undefined &&
-            typeof opt.uid !== 'number') {
-            this.preserveOwner = !!(process.getuid && process.getuid() === 0);
-        }
-        else {
-            this.preserveOwner = !!opt.preserveOwner;
-        }
-        this.processUid =
-            (this.preserveOwner || this.setOwner) && process.getuid ?
-                process.getuid()
-                : undefined;
-        this.processGid =
-            (this.preserveOwner || this.setOwner) && process.getgid ?
-                process.getgid()
-                : undefined;
-        // prevent excessively deep nesting of subfolders
-        // set to `Infinity` to remove this restriction
-        this.maxDepth =
-            typeof opt.maxDepth === 'number' ?
-                opt.maxDepth
-                : DEFAULT_MAX_DEPTH;
-        // mostly just for testing, but useful in some cases.
-        // Forcibly trigger a chown on every entry, no matter what
-        this.forceChown = opt.forceChown === true;
-        // turn > this[ONENTRY](entry));
-    }
-    // a bad or damaged archive is a warning for Parser, but an error
-    // when extracting.  Mark those errors as unrecoverable, because
-    // the Unpack contract cannot be met.
-    warn(code, msg, data = {}) {
-        if (code === 'TAR_BAD_ARCHIVE' || code === 'TAR_ABORT') {
-            data.recoverable = false;
-        }
-        return super.warn(code, msg, data);
-    }
-    [MAYBECLOSE]() {
-        if (this[ENDED] && this[PENDING] === 0) {
-            this.emit('prefinish');
-            this.emit('finish');
-            this.emit('end');
-        }
-    }
-    [CHECKPATH](entry) {
-        const p = (0, normalize_windows_path_js_1.normalizeWindowsPath)(entry.path);
-        const parts = p.split('/');
-        if (this.strip) {
-            if (parts.length < this.strip) {
-                return false;
-            }
-            if (entry.type === 'Link') {
-                const linkparts = (0, normalize_windows_path_js_1.normalizeWindowsPath)(String(entry.linkpath)).split('/');
-                if (linkparts.length >= this.strip) {
-                    entry.linkpath = linkparts.slice(this.strip).join('/');
-                }
-                else {
-                    return false;
-                }
-            }
-            parts.splice(0, this.strip);
-            entry.path = parts.join('/');
-        }
-        if (isFinite(this.maxDepth) && parts.length > this.maxDepth) {
-            this.warn('TAR_ENTRY_ERROR', 'path excessively deep', {
-                entry,
-                path: p,
-                depth: parts.length,
-                maxDepth: this.maxDepth,
-            });
-            return false;
-        }
-        if (!this.preservePaths) {
-            if (parts.includes('..') ||
-                /* c8 ignore next */
-                (isWindows && /^[a-z]:\.\.$/i.test(parts[0] ?? ''))) {
-                this.warn('TAR_ENTRY_ERROR', `path contains '..'`, {
-                    entry,
-                    path: p,
-                });
-                return false;
-            }
-            // strip off the root
-            const [root, stripped] = (0, strip_absolute_path_js_1.stripAbsolutePath)(p);
-            if (root) {
-                entry.path = String(stripped);
-                this.warn('TAR_ENTRY_INFO', `stripping ${root} from absolute path`, {
-                    entry,
-                    path: p,
-                });
-            }
-        }
-        if (node_path_1.default.isAbsolute(entry.path)) {
-            entry.absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(entry.path));
-        }
-        else {
-            entry.absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(this.cwd, entry.path));
-        }
-        // if we somehow ended up with a path that escapes the cwd, and we are
-        // not in preservePaths mode, then something is fishy!  This should have
-        // been prevented above, so ignore this for coverage.
-        /* c8 ignore start - defense in depth */
-        if (!this.preservePaths &&
-            typeof entry.absolute === 'string' &&
-            entry.absolute.indexOf(this.cwd + '/') !== 0 &&
-            entry.absolute !== this.cwd) {
-            this.warn('TAR_ENTRY_ERROR', 'path escaped extraction target', {
-                entry,
-                path: (0, normalize_windows_path_js_1.normalizeWindowsPath)(entry.path),
-                resolvedPath: entry.absolute,
-                cwd: this.cwd,
-            });
-            return false;
-        }
-        /* c8 ignore stop */
-        // an archive can set properties on the extraction directory, but it
-        // may not replace the cwd with a different kind of thing entirely.
-        if (entry.absolute === this.cwd &&
-            entry.type !== 'Directory' &&
-            entry.type !== 'GNUDumpDir') {
-            return false;
-        }
-        // only encode : chars that aren't drive letter indicators
-        if (this.win32) {
-            const { root: aRoot } = node_path_1.default.win32.parse(String(entry.absolute));
-            entry.absolute =
-                aRoot + wc.encode(String(entry.absolute).slice(aRoot.length));
-            const { root: pRoot } = node_path_1.default.win32.parse(entry.path);
-            entry.path = pRoot + wc.encode(entry.path.slice(pRoot.length));
-        }
-        return true;
-    }
-    [ONENTRY](entry) {
-        if (!this[CHECKPATH](entry)) {
-            return entry.resume();
-        }
-        node_assert_1.default.equal(typeof entry.absolute, 'string');
-        switch (entry.type) {
-            case 'Directory':
-            case 'GNUDumpDir':
-                if (entry.mode) {
-                    entry.mode = entry.mode | 0o700;
-                }
-            // eslint-disable-next-line no-fallthrough
-            case 'File':
-            case 'OldFile':
-            case 'ContiguousFile':
-            case 'Link':
-            case 'SymbolicLink':
-                return this[CHECKFS](entry);
-            case 'CharacterDevice':
-            case 'BlockDevice':
-            case 'FIFO':
-            default:
-                return this[UNSUPPORTED](entry);
-        }
-    }
-    [ONERROR](er, entry) {
-        // Cwd has to exist, or else nothing works. That's serious.
-        // Other errors are warnings, which raise the error in strict
-        // mode, but otherwise continue on.
-        if (er.name === 'CwdError') {
-            this.emit('error', er);
-        }
-        else {
-            this.warn('TAR_ENTRY_ERROR', er, { entry });
-            this[UNPEND]();
-            entry.resume();
-        }
-    }
-    [MKDIR](dir, mode, cb) {
-        (0, mkdir_js_1.mkdir)((0, normalize_windows_path_js_1.normalizeWindowsPath)(dir), {
-            uid: this.uid,
-            gid: this.gid,
-            processUid: this.processUid,
-            processGid: this.processGid,
-            umask: this.processUmask,
-            preserve: this.preservePaths,
-            unlink: this.unlink,
-            cache: this.dirCache,
-            cwd: this.cwd,
-            mode: mode,
-        }, cb);
-    }
-    [DOCHOWN](entry) {
-        // in preserve owner mode, chown if the entry doesn't match process
-        // in set owner mode, chown if setting doesn't match process
-        return (this.forceChown ||
-            (this.preserveOwner &&
-                ((typeof entry.uid === 'number' &&
-                    entry.uid !== this.processUid) ||
-                    (typeof entry.gid === 'number' &&
-                        entry.gid !== this.processGid))) ||
-            (typeof this.uid === 'number' &&
-                this.uid !== this.processUid) ||
-            (typeof this.gid === 'number' && this.gid !== this.processGid));
-    }
-    [UID](entry) {
-        return uint32(this.uid, entry.uid, this.processUid);
-    }
-    [GID](entry) {
-        return uint32(this.gid, entry.gid, this.processGid);
-    }
-    [FILE](entry, fullyDone) {
-        const mode = typeof entry.mode === 'number' ?
-            entry.mode & 0o7777
-            : this.fmode;
-        const stream = new fsm.WriteStream(String(entry.absolute), {
-            // slight lie, but it can be numeric flags
-            flags: (0, get_write_flag_js_1.getWriteFlag)(entry.size),
-            mode: mode,
-            autoClose: false,
-        });
-        stream.on('error', (er) => {
-            if (stream.fd) {
-                node_fs_1.default.close(stream.fd, () => { });
-            }
-            // flush all the data out so that we aren't left hanging
-            // if the error wasn't actually fatal.  otherwise the parse
-            // is blocked, and we never proceed.
-            stream.write = () => true;
-            this[ONERROR](er, entry);
-            fullyDone();
-        });
-        let actions = 1;
-        const done = (er) => {
-            if (er) {
-                /* c8 ignore start - we should always have a fd by now */
-                if (stream.fd) {
-                    node_fs_1.default.close(stream.fd, () => { });
-                }
-                /* c8 ignore stop */
-                this[ONERROR](er, entry);
-                fullyDone();
-                return;
-            }
-            if (--actions === 0) {
-                if (stream.fd !== undefined) {
-                    node_fs_1.default.close(stream.fd, er => {
-                        if (er) {
-                            this[ONERROR](er, entry);
-                        }
-                        else {
-                            this[UNPEND]();
-                        }
-                        fullyDone();
-                    });
-                }
-            }
-        };
-        stream.on('finish', () => {
-            // if futimes fails, try utimes
-            // if utimes fails, fail with the original error
-            // same for fchown/chown
-            const abs = String(entry.absolute);
-            const fd = stream.fd;
-            if (typeof fd === 'number' && entry.mtime && !this.noMtime) {
-                actions++;
-                const atime = entry.atime || new Date();
-                const mtime = entry.mtime;
-                node_fs_1.default.futimes(fd, atime, mtime, er => er ?
-                    node_fs_1.default.utimes(abs, atime, mtime, er2 => done(er2 && er))
-                    : done());
-            }
-            if (typeof fd === 'number' && this[DOCHOWN](entry)) {
-                actions++;
-                const uid = this[UID](entry);
-                const gid = this[GID](entry);
-                if (typeof uid === 'number' && typeof gid === 'number') {
-                    node_fs_1.default.fchown(fd, uid, gid, er => er ?
-                        node_fs_1.default.chown(abs, uid, gid, er2 => done(er2 && er))
-                        : done());
-                }
-            }
-            done();
-        });
-        const tx = this.transform ? this.transform(entry) || entry : entry;
-        if (tx !== entry) {
-            tx.on('error', (er) => {
-                this[ONERROR](er, entry);
-                fullyDone();
-            });
-            entry.pipe(tx);
-        }
-        tx.pipe(stream);
-    }
-    [DIRECTORY](entry, fullyDone) {
-        const mode = typeof entry.mode === 'number' ?
-            entry.mode & 0o7777
-            : this.dmode;
-        this[MKDIR](String(entry.absolute), mode, er => {
-            if (er) {
-                this[ONERROR](er, entry);
-                fullyDone();
-                return;
-            }
-            let actions = 1;
-            const done = () => {
-                if (--actions === 0) {
-                    fullyDone();
-                    this[UNPEND]();
-                    entry.resume();
-                }
-            };
-            if (entry.mtime && !this.noMtime) {
-                actions++;
-                node_fs_1.default.utimes(String(entry.absolute), entry.atime || new Date(), entry.mtime, done);
-            }
-            if (this[DOCHOWN](entry)) {
-                actions++;
-                node_fs_1.default.chown(String(entry.absolute), Number(this[UID](entry)), Number(this[GID](entry)), done);
-            }
-            done();
-        });
-    }
-    [UNSUPPORTED](entry) {
-        entry.unsupported = true;
-        this.warn('TAR_ENTRY_UNSUPPORTED', `unsupported entry type: ${entry.type}`, { entry });
-        entry.resume();
-    }
-    [SYMLINK](entry, done) {
-        this[LINK](entry, String(entry.linkpath), 'symlink', done);
-    }
-    [HARDLINK](entry, done) {
-        const linkpath = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(this.cwd, String(entry.linkpath)));
-        this[LINK](entry, linkpath, 'link', done);
-    }
-    [PEND]() {
-        this[PENDING]++;
-    }
-    [UNPEND]() {
-        this[PENDING]--;
-        this[MAYBECLOSE]();
-    }
-    [SKIP](entry) {
-        this[UNPEND]();
-        entry.resume();
-    }
-    // Check if we can reuse an existing filesystem entry safely and
-    // overwrite it, rather than unlinking and recreating
-    // Windows doesn't report a useful nlink, so we just never reuse entries
-    [ISREUSABLE](entry, st) {
-        return (entry.type === 'File' &&
-            !this.unlink &&
-            st.isFile() &&
-            st.nlink <= 1 &&
-            !isWindows);
-    }
-    // check if a thing is there, and if so, try to clobber it
-    [CHECKFS](entry) {
-        this[PEND]();
-        const paths = [entry.path];
-        if (entry.linkpath) {
-            paths.push(entry.linkpath);
-        }
-        this.reservations.reserve(paths, done => this[CHECKFS2](entry, done));
-    }
-    [PRUNECACHE](entry) {
-        // if we are not creating a directory, and the path is in the dirCache,
-        // then that means we are about to delete the directory we created
-        // previously, and it is no longer going to be a directory, and neither
-        // is any of its children.
-        // If a symbolic link is encountered, all bets are off.  There is no
-        // reasonable way to sanitize the cache in such a way we will be able to
-        // avoid having filesystem collisions.  If this happens with a non-symlink
-        // entry, it'll just fail to unpack, but a symlink to a directory, using an
-        // 8.3 shortname or certain unicode attacks, can evade detection and lead
-        // to arbitrary writes to anywhere on the system.
-        if (entry.type === 'SymbolicLink') {
-            dropCache(this.dirCache);
-        }
-        else if (entry.type !== 'Directory') {
-            pruneCache(this.dirCache, String(entry.absolute));
-        }
-    }
-    [CHECKFS2](entry, fullyDone) {
-        this[PRUNECACHE](entry);
-        const done = (er) => {
-            this[PRUNECACHE](entry);
-            fullyDone(er);
-        };
-        const checkCwd = () => {
-            this[MKDIR](this.cwd, this.dmode, er => {
-                if (er) {
-                    this[ONERROR](er, entry);
-                    done();
-                    return;
-                }
-                this[CHECKED_CWD] = true;
-                start();
-            });
-        };
-        const start = () => {
-            if (entry.absolute !== this.cwd) {
-                const parent = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.dirname(String(entry.absolute)));
-                if (parent !== this.cwd) {
-                    return this[MKDIR](parent, this.dmode, er => {
-                        if (er) {
-                            this[ONERROR](er, entry);
-                            done();
-                            return;
-                        }
-                        afterMakeParent();
-                    });
-                }
-            }
-            afterMakeParent();
-        };
-        const afterMakeParent = () => {
-            node_fs_1.default.lstat(String(entry.absolute), (lstatEr, st) => {
-                if (st &&
-                    (this.keep ||
-                        /* c8 ignore next */
-                        (this.newer && st.mtime > (entry.mtime ?? st.mtime)))) {
-                    this[SKIP](entry);
-                    done();
-                    return;
-                }
-                if (lstatEr || this[ISREUSABLE](entry, st)) {
-                    return this[MAKEFS](null, entry, done);
-                }
-                if (st.isDirectory()) {
-                    if (entry.type === 'Directory') {
-                        const needChmod = this.chmod &&
-                            entry.mode &&
-                            (st.mode & 0o7777) !== entry.mode;
-                        const afterChmod = (er) => this[MAKEFS](er ?? null, entry, done);
-                        if (!needChmod) {
-                            return afterChmod();
-                        }
-                        return node_fs_1.default.chmod(String(entry.absolute), Number(entry.mode), afterChmod);
-                    }
-                    // Not a dir entry, have to remove it.
-                    // NB: the only way to end up with an entry that is the cwd
-                    // itself, in such a way that == does not detect, is a
-                    // tricky windows absolute path with UNC or 8.3 parts (and
-                    // preservePaths:true, or else it will have been stripped).
-                    // In that case, the user has opted out of path protections
-                    // explicitly, so if they blow away the cwd, c'est la vie.
-                    if (entry.absolute !== this.cwd) {
-                        return node_fs_1.default.rmdir(String(entry.absolute), (er) => this[MAKEFS](er ?? null, entry, done));
-                    }
-                }
-                // not a dir, and not reusable
-                // don't remove if the cwd, we want that error
-                if (entry.absolute === this.cwd) {
-                    return this[MAKEFS](null, entry, done);
-                }
-                unlinkFile(String(entry.absolute), er => this[MAKEFS](er ?? null, entry, done));
-            });
-        };
-        if (this[CHECKED_CWD]) {
-            start();
-        }
-        else {
-            checkCwd();
-        }
-    }
-    [MAKEFS](er, entry, done) {
-        if (er) {
-            this[ONERROR](er, entry);
-            done();
-            return;
-        }
-        switch (entry.type) {
-            case 'File':
-            case 'OldFile':
-            case 'ContiguousFile':
-                return this[FILE](entry, done);
-            case 'Link':
-                return this[HARDLINK](entry, done);
-            case 'SymbolicLink':
-                return this[SYMLINK](entry, done);
-            case 'Directory':
-            case 'GNUDumpDir':
-                return this[DIRECTORY](entry, done);
-        }
-    }
-    [LINK](entry, linkpath, link, done) {
-        // XXX: get the type ('symlink' or 'junction') for windows
-        node_fs_1.default[link](linkpath, String(entry.absolute), er => {
-            if (er) {
-                this[ONERROR](er, entry);
-            }
-            else {
-                this[UNPEND]();
-                entry.resume();
-            }
-            done();
-        });
-    }
-}
-exports.Unpack = Unpack;
-const callSync = (fn) => {
-    try {
-        return [null, fn()];
-    }
-    catch (er) {
-        return [er, null];
-    }
-};
-class UnpackSync extends Unpack {
-    sync = true;
-    [MAKEFS](er, entry) {
-        return super[MAKEFS](er, entry, () => { });
-    }
-    [CHECKFS](entry) {
-        this[PRUNECACHE](entry);
-        if (!this[CHECKED_CWD]) {
-            const er = this[MKDIR](this.cwd, this.dmode);
-            if (er) {
-                return this[ONERROR](er, entry);
-            }
-            this[CHECKED_CWD] = true;
-        }
-        // don't bother to make the parent if the current entry is the cwd,
-        // we've already checked it.
-        if (entry.absolute !== this.cwd) {
-            const parent = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.dirname(String(entry.absolute)));
-            if (parent !== this.cwd) {
-                const mkParent = this[MKDIR](parent, this.dmode);
-                if (mkParent) {
-                    return this[ONERROR](mkParent, entry);
-                }
-            }
-        }
-        const [lstatEr, st] = callSync(() => node_fs_1.default.lstatSync(String(entry.absolute)));
-        if (st &&
-            (this.keep ||
-                /* c8 ignore next */
-                (this.newer && st.mtime > (entry.mtime ?? st.mtime)))) {
-            return this[SKIP](entry);
-        }
-        if (lstatEr || this[ISREUSABLE](entry, st)) {
-            return this[MAKEFS](null, entry);
-        }
-        if (st.isDirectory()) {
-            if (entry.type === 'Directory') {
-                const needChmod = this.chmod &&
-                    entry.mode &&
-                    (st.mode & 0o7777) !== entry.mode;
-                const [er] = needChmod ?
-                    callSync(() => {
-                        node_fs_1.default.chmodSync(String(entry.absolute), Number(entry.mode));
-                    })
-                    : [];
-                return this[MAKEFS](er, entry);
-            }
-            // not a dir entry, have to remove it
-            const [er] = callSync(() => node_fs_1.default.rmdirSync(String(entry.absolute)));
-            this[MAKEFS](er, entry);
-        }
-        // not a dir, and not reusable.
-        // don't remove if it's the cwd, since we want that error.
-        const [er] = entry.absolute === this.cwd ?
-            []
-            : callSync(() => unlinkFileSync(String(entry.absolute)));
-        this[MAKEFS](er, entry);
-    }
-    [FILE](entry, done) {
-        const mode = typeof entry.mode === 'number' ?
-            entry.mode & 0o7777
-            : this.fmode;
-        const oner = (er) => {
-            let closeError;
-            try {
-                node_fs_1.default.closeSync(fd);
-            }
-            catch (e) {
-                closeError = e;
-            }
-            if (er || closeError) {
-                this[ONERROR](er || closeError, entry);
-            }
-            done();
-        };
-        let fd;
-        try {
-            fd = node_fs_1.default.openSync(String(entry.absolute), (0, get_write_flag_js_1.getWriteFlag)(entry.size), mode);
-        }
-        catch (er) {
-            return oner(er);
-        }
-        const tx = this.transform ? this.transform(entry) || entry : entry;
-        if (tx !== entry) {
-            tx.on('error', (er) => this[ONERROR](er, entry));
-            entry.pipe(tx);
-        }
-        tx.on('data', (chunk) => {
-            try {
-                node_fs_1.default.writeSync(fd, chunk, 0, chunk.length);
-            }
-            catch (er) {
-                oner(er);
-            }
-        });
-        tx.on('end', () => {
-            let er = null;
-            // try both, falling futimes back to utimes
-            // if either fails, handle the first error
-            if (entry.mtime && !this.noMtime) {
-                const atime = entry.atime || new Date();
-                const mtime = entry.mtime;
-                try {
-                    node_fs_1.default.futimesSync(fd, atime, mtime);
-                }
-                catch (futimeser) {
-                    try {
-                        node_fs_1.default.utimesSync(String(entry.absolute), atime, mtime);
-                    }
-                    catch (utimeser) {
-                        er = futimeser;
-                    }
-                }
-            }
-            if (this[DOCHOWN](entry)) {
-                const uid = this[UID](entry);
-                const gid = this[GID](entry);
-                try {
-                    node_fs_1.default.fchownSync(fd, Number(uid), Number(gid));
-                }
-                catch (fchowner) {
-                    try {
-                        node_fs_1.default.chownSync(String(entry.absolute), Number(uid), Number(gid));
-                    }
-                    catch (chowner) {
-                        er = er || fchowner;
-                    }
-                }
-            }
-            oner(er);
-        });
-    }
-    [DIRECTORY](entry, done) {
-        const mode = typeof entry.mode === 'number' ?
-            entry.mode & 0o7777
-            : this.dmode;
-        const er = this[MKDIR](String(entry.absolute), mode);
-        if (er) {
-            this[ONERROR](er, entry);
-            done();
-            return;
-        }
-        if (entry.mtime && !this.noMtime) {
-            try {
-                node_fs_1.default.utimesSync(String(entry.absolute), entry.atime || new Date(), entry.mtime);
-                /* c8 ignore next */
-            }
-            catch (er) { }
-        }
-        if (this[DOCHOWN](entry)) {
-            try {
-                node_fs_1.default.chownSync(String(entry.absolute), Number(this[UID](entry)), Number(this[GID](entry)));
-            }
-            catch (er) { }
-        }
-        done();
-        entry.resume();
-    }
-    [MKDIR](dir, mode) {
-        try {
-            return (0, mkdir_js_1.mkdirSync)((0, normalize_windows_path_js_1.normalizeWindowsPath)(dir), {
-                uid: this.uid,
-                gid: this.gid,
-                processUid: this.processUid,
-                processGid: this.processGid,
-                umask: this.processUmask,
-                preserve: this.preservePaths,
-                unlink: this.unlink,
-                cache: this.dirCache,
-                cwd: this.cwd,
-                mode: mode,
-            });
-        }
-        catch (er) {
-            return er;
-        }
-    }
-    [LINK](entry, linkpath, link, done) {
-        const ls = `${link}Sync`;
-        try {
-            node_fs_1.default[ls](linkpath, String(entry.absolute));
-            done();
-            entry.resume();
-        }
-        catch (er) {
-            return this[ONERROR](er, entry);
-        }
-    }
-}
-exports.UnpackSync = UnpackSync;
-//# sourceMappingURL=unpack.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/update.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/update.js
deleted file mode 100644
index 7687896f4bfee..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/update.js
+++ /dev/null
@@ -1,33 +0,0 @@
-"use strict";
-// tar -u
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.update = void 0;
-const make_command_js_1 = require("./make-command.js");
-const replace_js_1 = require("./replace.js");
-// just call tar.r with the filter and mtimeCache
-exports.update = (0, make_command_js_1.makeCommand)(replace_js_1.replace.syncFile, replace_js_1.replace.asyncFile, replace_js_1.replace.syncNoFile, replace_js_1.replace.asyncNoFile, (opt, entries = []) => {
-    replace_js_1.replace.validate?.(opt, entries);
-    mtimeFilter(opt);
-});
-const mtimeFilter = (opt) => {
-    const filter = opt.filter;
-    if (!opt.mtimeCache) {
-        opt.mtimeCache = new Map();
-    }
-    opt.filter =
-        filter ?
-            (path, stat) => filter(path, stat) &&
-                !(
-                /* c8 ignore start */
-                ((opt.mtimeCache?.get(path) ?? stat.mtime ?? 0) >
-                    (stat.mtime ?? 0))
-                /* c8 ignore stop */
-                )
-            : (path, stat) => !(
-            /* c8 ignore start */
-            ((opt.mtimeCache?.get(path) ?? stat.mtime ?? 0) >
-                (stat.mtime ?? 0))
-            /* c8 ignore stop */
-            );
-};
-//# sourceMappingURL=update.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/warn-method.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/warn-method.js
deleted file mode 100644
index f25502776e36a..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/warn-method.js
+++ /dev/null
@@ -1,31 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.warnMethod = void 0;
-const warnMethod = (self, code, message, data = {}) => {
-    if (self.file) {
-        data.file = self.file;
-    }
-    if (self.cwd) {
-        data.cwd = self.cwd;
-    }
-    data.code =
-        (message instanceof Error &&
-            message.code) ||
-            code;
-    data.tarCode = code;
-    if (!self.strict && data.recoverable !== false) {
-        if (message instanceof Error) {
-            data = Object.assign(message, data);
-            message = message.message;
-        }
-        self.emit('warn', code, message, data);
-    }
-    else if (message instanceof Error) {
-        self.emit('error', Object.assign(message, data));
-    }
-    else {
-        self.emit('error', Object.assign(new Error(`${code}: ${message}`), data));
-    }
-};
-exports.warnMethod = warnMethod;
-//# sourceMappingURL=warn-method.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/winchars.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/winchars.js
deleted file mode 100644
index c0a4405812929..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/winchars.js
+++ /dev/null
@@ -1,14 +0,0 @@
-"use strict";
-// When writing files on Windows, translate the characters to their
-// 0xf000 higher-encoded versions.
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.decode = exports.encode = void 0;
-const raw = ['|', '<', '>', '?', ':'];
-const win = raw.map(char => String.fromCharCode(0xf000 + char.charCodeAt(0)));
-const toWin = new Map(raw.map((char, i) => [char, win[i]]));
-const toRaw = new Map(win.map((char, i) => [char, raw[i]]));
-const encode = (s) => raw.reduce((s, c) => s.split(c).join(toWin.get(c)), s);
-exports.encode = encode;
-const decode = (s) => win.reduce((s, c) => s.split(c).join(toRaw.get(c)), s);
-exports.decode = decode;
-//# sourceMappingURL=winchars.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/write-entry.js b/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/write-entry.js
deleted file mode 100644
index 45b7efeb79502..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/commonjs/write-entry.js
+++ /dev/null
@@ -1,689 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.WriteEntryTar = exports.WriteEntrySync = exports.WriteEntry = void 0;
-const fs_1 = __importDefault(require("fs"));
-const minipass_1 = require("minipass");
-const path_1 = __importDefault(require("path"));
-const header_js_1 = require("./header.js");
-const mode_fix_js_1 = require("./mode-fix.js");
-const normalize_windows_path_js_1 = require("./normalize-windows-path.js");
-const options_js_1 = require("./options.js");
-const pax_js_1 = require("./pax.js");
-const strip_absolute_path_js_1 = require("./strip-absolute-path.js");
-const strip_trailing_slashes_js_1 = require("./strip-trailing-slashes.js");
-const warn_method_js_1 = require("./warn-method.js");
-const winchars = __importStar(require("./winchars.js"));
-const prefixPath = (path, prefix) => {
-    if (!prefix) {
-        return (0, normalize_windows_path_js_1.normalizeWindowsPath)(path);
-    }
-    path = (0, normalize_windows_path_js_1.normalizeWindowsPath)(path).replace(/^\.(\/|$)/, '');
-    return (0, strip_trailing_slashes_js_1.stripTrailingSlashes)(prefix) + '/' + path;
-};
-const maxReadSize = 16 * 1024 * 1024;
-const PROCESS = Symbol('process');
-const FILE = Symbol('file');
-const DIRECTORY = Symbol('directory');
-const SYMLINK = Symbol('symlink');
-const HARDLINK = Symbol('hardlink');
-const HEADER = Symbol('header');
-const READ = Symbol('read');
-const LSTAT = Symbol('lstat');
-const ONLSTAT = Symbol('onlstat');
-const ONREAD = Symbol('onread');
-const ONREADLINK = Symbol('onreadlink');
-const OPENFILE = Symbol('openfile');
-const ONOPENFILE = Symbol('onopenfile');
-const CLOSE = Symbol('close');
-const MODE = Symbol('mode');
-const AWAITDRAIN = Symbol('awaitDrain');
-const ONDRAIN = Symbol('ondrain');
-const PREFIX = Symbol('prefix');
-class WriteEntry extends minipass_1.Minipass {
-    path;
-    portable;
-    myuid = (process.getuid && process.getuid()) || 0;
-    // until node has builtin pwnam functions, this'll have to do
-    myuser = process.env.USER || '';
-    maxReadSize;
-    linkCache;
-    statCache;
-    preservePaths;
-    cwd;
-    strict;
-    mtime;
-    noPax;
-    noMtime;
-    prefix;
-    fd;
-    blockLen = 0;
-    blockRemain = 0;
-    buf;
-    pos = 0;
-    remain = 0;
-    length = 0;
-    offset = 0;
-    win32;
-    absolute;
-    header;
-    type;
-    linkpath;
-    stat;
-    onWriteEntry;
-    #hadError = false;
-    constructor(p, opt_ = {}) {
-        const opt = (0, options_js_1.dealias)(opt_);
-        super();
-        this.path = (0, normalize_windows_path_js_1.normalizeWindowsPath)(p);
-        // suppress atime, ctime, uid, gid, uname, gname
-        this.portable = !!opt.portable;
-        this.maxReadSize = opt.maxReadSize || maxReadSize;
-        this.linkCache = opt.linkCache || new Map();
-        this.statCache = opt.statCache || new Map();
-        this.preservePaths = !!opt.preservePaths;
-        this.cwd = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.cwd || process.cwd());
-        this.strict = !!opt.strict;
-        this.noPax = !!opt.noPax;
-        this.noMtime = !!opt.noMtime;
-        this.mtime = opt.mtime;
-        this.prefix =
-            opt.prefix ? (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.prefix) : undefined;
-        this.onWriteEntry = opt.onWriteEntry;
-        if (typeof opt.onwarn === 'function') {
-            this.on('warn', opt.onwarn);
-        }
-        let pathWarn = false;
-        if (!this.preservePaths) {
-            const [root, stripped] = (0, strip_absolute_path_js_1.stripAbsolutePath)(this.path);
-            if (root && typeof stripped === 'string') {
-                this.path = stripped;
-                pathWarn = root;
-            }
-        }
-        this.win32 = !!opt.win32 || process.platform === 'win32';
-        if (this.win32) {
-            // force the \ to / normalization, since we might not *actually*
-            // be on windows, but want \ to be considered a path separator.
-            this.path = winchars.decode(this.path.replace(/\\/g, '/'));
-            p = p.replace(/\\/g, '/');
-        }
-        this.absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.absolute || path_1.default.resolve(this.cwd, p));
-        if (this.path === '') {
-            this.path = './';
-        }
-        if (pathWarn) {
-            this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, {
-                entry: this,
-                path: pathWarn + this.path,
-            });
-        }
-        const cs = this.statCache.get(this.absolute);
-        if (cs) {
-            this[ONLSTAT](cs);
-        }
-        else {
-            this[LSTAT]();
-        }
-    }
-    warn(code, message, data = {}) {
-        return (0, warn_method_js_1.warnMethod)(this, code, message, data);
-    }
-    emit(ev, ...data) {
-        if (ev === 'error') {
-            this.#hadError = true;
-        }
-        return super.emit(ev, ...data);
-    }
-    [LSTAT]() {
-        fs_1.default.lstat(this.absolute, (er, stat) => {
-            if (er) {
-                return this.emit('error', er);
-            }
-            this[ONLSTAT](stat);
-        });
-    }
-    [ONLSTAT](stat) {
-        this.statCache.set(this.absolute, stat);
-        this.stat = stat;
-        if (!stat.isFile()) {
-            stat.size = 0;
-        }
-        this.type = getType(stat);
-        this.emit('stat', stat);
-        this[PROCESS]();
-    }
-    [PROCESS]() {
-        switch (this.type) {
-            case 'File':
-                return this[FILE]();
-            case 'Directory':
-                return this[DIRECTORY]();
-            case 'SymbolicLink':
-                return this[SYMLINK]();
-            // unsupported types are ignored.
-            default:
-                return this.end();
-        }
-    }
-    [MODE](mode) {
-        return (0, mode_fix_js_1.modeFix)(mode, this.type === 'Directory', this.portable);
-    }
-    [PREFIX](path) {
-        return prefixPath(path, this.prefix);
-    }
-    [HEADER]() {
-        /* c8 ignore start */
-        if (!this.stat) {
-            throw new Error('cannot write header before stat');
-        }
-        /* c8 ignore stop */
-        if (this.type === 'Directory' && this.portable) {
-            this.noMtime = true;
-        }
-        this.onWriteEntry?.(this);
-        this.header = new header_js_1.Header({
-            path: this[PREFIX](this.path),
-            // only apply the prefix to hard links.
-            linkpath: this.type === 'Link' && this.linkpath !== undefined ?
-                this[PREFIX](this.linkpath)
-                : this.linkpath,
-            // only the permissions and setuid/setgid/sticky bitflags
-            // not the higher-order bits that specify file type
-            mode: this[MODE](this.stat.mode),
-            uid: this.portable ? undefined : this.stat.uid,
-            gid: this.portable ? undefined : this.stat.gid,
-            size: this.stat.size,
-            mtime: this.noMtime ? undefined : this.mtime || this.stat.mtime,
-            /* c8 ignore next */
-            type: this.type === 'Unsupported' ? undefined : this.type,
-            uname: this.portable ? undefined
-                : this.stat.uid === this.myuid ? this.myuser
-                    : '',
-            atime: this.portable ? undefined : this.stat.atime,
-            ctime: this.portable ? undefined : this.stat.ctime,
-        });
-        if (this.header.encode() && !this.noPax) {
-            super.write(new pax_js_1.Pax({
-                atime: this.portable ? undefined : this.header.atime,
-                ctime: this.portable ? undefined : this.header.ctime,
-                gid: this.portable ? undefined : this.header.gid,
-                mtime: this.noMtime ? undefined : (this.mtime || this.header.mtime),
-                path: this[PREFIX](this.path),
-                linkpath: this.type === 'Link' && this.linkpath !== undefined ?
-                    this[PREFIX](this.linkpath)
-                    : this.linkpath,
-                size: this.header.size,
-                uid: this.portable ? undefined : this.header.uid,
-                uname: this.portable ? undefined : this.header.uname,
-                dev: this.portable ? undefined : this.stat.dev,
-                ino: this.portable ? undefined : this.stat.ino,
-                nlink: this.portable ? undefined : this.stat.nlink,
-            }).encode());
-        }
-        const block = this.header?.block;
-        /* c8 ignore start */
-        if (!block) {
-            throw new Error('failed to encode header');
-        }
-        /* c8 ignore stop */
-        super.write(block);
-    }
-    [DIRECTORY]() {
-        /* c8 ignore start */
-        if (!this.stat) {
-            throw new Error('cannot create directory entry without stat');
-        }
-        /* c8 ignore stop */
-        if (this.path.slice(-1) !== '/') {
-            this.path += '/';
-        }
-        this.stat.size = 0;
-        this[HEADER]();
-        this.end();
-    }
-    [SYMLINK]() {
-        fs_1.default.readlink(this.absolute, (er, linkpath) => {
-            if (er) {
-                return this.emit('error', er);
-            }
-            this[ONREADLINK](linkpath);
-        });
-    }
-    [ONREADLINK](linkpath) {
-        this.linkpath = (0, normalize_windows_path_js_1.normalizeWindowsPath)(linkpath);
-        this[HEADER]();
-        this.end();
-    }
-    [HARDLINK](linkpath) {
-        /* c8 ignore start */
-        if (!this.stat) {
-            throw new Error('cannot create link entry without stat');
-        }
-        /* c8 ignore stop */
-        this.type = 'Link';
-        this.linkpath = (0, normalize_windows_path_js_1.normalizeWindowsPath)(path_1.default.relative(this.cwd, linkpath));
-        this.stat.size = 0;
-        this[HEADER]();
-        this.end();
-    }
-    [FILE]() {
-        /* c8 ignore start */
-        if (!this.stat) {
-            throw new Error('cannot create file entry without stat');
-        }
-        /* c8 ignore stop */
-        if (this.stat.nlink > 1) {
-            const linkKey = `${this.stat.dev}:${this.stat.ino}`;
-            const linkpath = this.linkCache.get(linkKey);
-            if (linkpath?.indexOf(this.cwd) === 0) {
-                return this[HARDLINK](linkpath);
-            }
-            this.linkCache.set(linkKey, this.absolute);
-        }
-        this[HEADER]();
-        if (this.stat.size === 0) {
-            return this.end();
-        }
-        this[OPENFILE]();
-    }
-    [OPENFILE]() {
-        fs_1.default.open(this.absolute, 'r', (er, fd) => {
-            if (er) {
-                return this.emit('error', er);
-            }
-            this[ONOPENFILE](fd);
-        });
-    }
-    [ONOPENFILE](fd) {
-        this.fd = fd;
-        if (this.#hadError) {
-            return this[CLOSE]();
-        }
-        /* c8 ignore start */
-        if (!this.stat) {
-            throw new Error('should stat before calling onopenfile');
-        }
-        /* c8 ignore start */
-        this.blockLen = 512 * Math.ceil(this.stat.size / 512);
-        this.blockRemain = this.blockLen;
-        const bufLen = Math.min(this.blockLen, this.maxReadSize);
-        this.buf = Buffer.allocUnsafe(bufLen);
-        this.offset = 0;
-        this.pos = 0;
-        this.remain = this.stat.size;
-        this.length = this.buf.length;
-        this[READ]();
-    }
-    [READ]() {
-        const { fd, buf, offset, length, pos } = this;
-        if (fd === undefined || buf === undefined) {
-            throw new Error('cannot read file without first opening');
-        }
-        fs_1.default.read(fd, buf, offset, length, pos, (er, bytesRead) => {
-            if (er) {
-                // ignoring the error from close(2) is a bad practice, but at
-                // this point we already have an error, don't need another one
-                return this[CLOSE](() => this.emit('error', er));
-            }
-            this[ONREAD](bytesRead);
-        });
-    }
-    /* c8 ignore start */
-    [CLOSE](cb = () => { }) {
-        /* c8 ignore stop */
-        if (this.fd !== undefined)
-            fs_1.default.close(this.fd, cb);
-    }
-    [ONREAD](bytesRead) {
-        if (bytesRead <= 0 && this.remain > 0) {
-            const er = Object.assign(new Error('encountered unexpected EOF'), {
-                path: this.absolute,
-                syscall: 'read',
-                code: 'EOF',
-            });
-            return this[CLOSE](() => this.emit('error', er));
-        }
-        if (bytesRead > this.remain) {
-            const er = Object.assign(new Error('did not encounter expected EOF'), {
-                path: this.absolute,
-                syscall: 'read',
-                code: 'EOF',
-            });
-            return this[CLOSE](() => this.emit('error', er));
-        }
-        /* c8 ignore start */
-        if (!this.buf) {
-            throw new Error('should have created buffer prior to reading');
-        }
-        /* c8 ignore stop */
-        // null out the rest of the buffer, if we could fit the block padding
-        // at the end of this loop, we've incremented bytesRead and this.remain
-        // to be incremented up to the blockRemain level, as if we had expected
-        // to get a null-padded file, and read it until the end.  then we will
-        // decrement both remain and blockRemain by bytesRead, and know that we
-        // reached the expected EOF, without any null buffer to append.
-        if (bytesRead === this.remain) {
-            for (let i = bytesRead; i < this.length && bytesRead < this.blockRemain; i++) {
-                this.buf[i + this.offset] = 0;
-                bytesRead++;
-                this.remain++;
-            }
-        }
-        const chunk = this.offset === 0 && bytesRead === this.buf.length ?
-            this.buf
-            : this.buf.subarray(this.offset, this.offset + bytesRead);
-        const flushed = this.write(chunk);
-        if (!flushed) {
-            this[AWAITDRAIN](() => this[ONDRAIN]());
-        }
-        else {
-            this[ONDRAIN]();
-        }
-    }
-    [AWAITDRAIN](cb) {
-        this.once('drain', cb);
-    }
-    write(chunk, encoding, cb) {
-        /* c8 ignore start - just junk to comply with NodeJS.WritableStream */
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        if (typeof chunk === 'string') {
-            chunk = Buffer.from(chunk, typeof encoding === 'string' ? encoding : 'utf8');
-        }
-        /* c8 ignore stop */
-        if (this.blockRemain < chunk.length) {
-            const er = Object.assign(new Error('writing more data than expected'), {
-                path: this.absolute,
-            });
-            return this.emit('error', er);
-        }
-        this.remain -= chunk.length;
-        this.blockRemain -= chunk.length;
-        this.pos += chunk.length;
-        this.offset += chunk.length;
-        return super.write(chunk, null, cb);
-    }
-    [ONDRAIN]() {
-        if (!this.remain) {
-            if (this.blockRemain) {
-                super.write(Buffer.alloc(this.blockRemain));
-            }
-            return this[CLOSE](er => er ? this.emit('error', er) : this.end());
-        }
-        /* c8 ignore start */
-        if (!this.buf) {
-            throw new Error('buffer lost somehow in ONDRAIN');
-        }
-        /* c8 ignore stop */
-        if (this.offset >= this.length) {
-            // if we only have a smaller bit left to read, alloc a smaller buffer
-            // otherwise, keep it the same length it was before.
-            this.buf = Buffer.allocUnsafe(Math.min(this.blockRemain, this.buf.length));
-            this.offset = 0;
-        }
-        this.length = this.buf.length - this.offset;
-        this[READ]();
-    }
-}
-exports.WriteEntry = WriteEntry;
-class WriteEntrySync extends WriteEntry {
-    sync = true;
-    [LSTAT]() {
-        this[ONLSTAT](fs_1.default.lstatSync(this.absolute));
-    }
-    [SYMLINK]() {
-        this[ONREADLINK](fs_1.default.readlinkSync(this.absolute));
-    }
-    [OPENFILE]() {
-        this[ONOPENFILE](fs_1.default.openSync(this.absolute, 'r'));
-    }
-    [READ]() {
-        let threw = true;
-        try {
-            const { fd, buf, offset, length, pos } = this;
-            /* c8 ignore start */
-            if (fd === undefined || buf === undefined) {
-                throw new Error('fd and buf must be set in READ method');
-            }
-            /* c8 ignore stop */
-            const bytesRead = fs_1.default.readSync(fd, buf, offset, length, pos);
-            this[ONREAD](bytesRead);
-            threw = false;
-        }
-        finally {
-            // ignoring the error from close(2) is a bad practice, but at
-            // this point we already have an error, don't need another one
-            if (threw) {
-                try {
-                    this[CLOSE](() => { });
-                }
-                catch (er) { }
-            }
-        }
-    }
-    [AWAITDRAIN](cb) {
-        cb();
-    }
-    /* c8 ignore start */
-    [CLOSE](cb = () => { }) {
-        /* c8 ignore stop */
-        if (this.fd !== undefined)
-            fs_1.default.closeSync(this.fd);
-        cb();
-    }
-}
-exports.WriteEntrySync = WriteEntrySync;
-class WriteEntryTar extends minipass_1.Minipass {
-    blockLen = 0;
-    blockRemain = 0;
-    buf = 0;
-    pos = 0;
-    remain = 0;
-    length = 0;
-    preservePaths;
-    portable;
-    strict;
-    noPax;
-    noMtime;
-    readEntry;
-    type;
-    prefix;
-    path;
-    mode;
-    uid;
-    gid;
-    uname;
-    gname;
-    header;
-    mtime;
-    atime;
-    ctime;
-    linkpath;
-    size;
-    onWriteEntry;
-    warn(code, message, data = {}) {
-        return (0, warn_method_js_1.warnMethod)(this, code, message, data);
-    }
-    constructor(readEntry, opt_ = {}) {
-        const opt = (0, options_js_1.dealias)(opt_);
-        super();
-        this.preservePaths = !!opt.preservePaths;
-        this.portable = !!opt.portable;
-        this.strict = !!opt.strict;
-        this.noPax = !!opt.noPax;
-        this.noMtime = !!opt.noMtime;
-        this.onWriteEntry = opt.onWriteEntry;
-        this.readEntry = readEntry;
-        const { type } = readEntry;
-        /* c8 ignore start */
-        if (type === 'Unsupported') {
-            throw new Error('writing entry that should be ignored');
-        }
-        /* c8 ignore stop */
-        this.type = type;
-        if (this.type === 'Directory' && this.portable) {
-            this.noMtime = true;
-        }
-        this.prefix = opt.prefix;
-        this.path = (0, normalize_windows_path_js_1.normalizeWindowsPath)(readEntry.path);
-        this.mode =
-            readEntry.mode !== undefined ?
-                this[MODE](readEntry.mode)
-                : undefined;
-        this.uid = this.portable ? undefined : readEntry.uid;
-        this.gid = this.portable ? undefined : readEntry.gid;
-        this.uname = this.portable ? undefined : readEntry.uname;
-        this.gname = this.portable ? undefined : readEntry.gname;
-        this.size = readEntry.size;
-        this.mtime =
-            this.noMtime ? undefined : opt.mtime || readEntry.mtime;
-        this.atime = this.portable ? undefined : readEntry.atime;
-        this.ctime = this.portable ? undefined : readEntry.ctime;
-        this.linkpath =
-            readEntry.linkpath !== undefined ?
-                (0, normalize_windows_path_js_1.normalizeWindowsPath)(readEntry.linkpath)
-                : undefined;
-        if (typeof opt.onwarn === 'function') {
-            this.on('warn', opt.onwarn);
-        }
-        let pathWarn = false;
-        if (!this.preservePaths) {
-            const [root, stripped] = (0, strip_absolute_path_js_1.stripAbsolutePath)(this.path);
-            if (root && typeof stripped === 'string') {
-                this.path = stripped;
-                pathWarn = root;
-            }
-        }
-        this.remain = readEntry.size;
-        this.blockRemain = readEntry.startBlockSize;
-        this.onWriteEntry?.(this);
-        this.header = new header_js_1.Header({
-            path: this[PREFIX](this.path),
-            linkpath: this.type === 'Link' && this.linkpath !== undefined ?
-                this[PREFIX](this.linkpath)
-                : this.linkpath,
-            // only the permissions and setuid/setgid/sticky bitflags
-            // not the higher-order bits that specify file type
-            mode: this.mode,
-            uid: this.portable ? undefined : this.uid,
-            gid: this.portable ? undefined : this.gid,
-            size: this.size,
-            mtime: this.noMtime ? undefined : this.mtime,
-            type: this.type,
-            uname: this.portable ? undefined : this.uname,
-            atime: this.portable ? undefined : this.atime,
-            ctime: this.portable ? undefined : this.ctime,
-        });
-        if (pathWarn) {
-            this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, {
-                entry: this,
-                path: pathWarn + this.path,
-            });
-        }
-        if (this.header.encode() && !this.noPax) {
-            super.write(new pax_js_1.Pax({
-                atime: this.portable ? undefined : this.atime,
-                ctime: this.portable ? undefined : this.ctime,
-                gid: this.portable ? undefined : this.gid,
-                mtime: this.noMtime ? undefined : this.mtime,
-                path: this[PREFIX](this.path),
-                linkpath: this.type === 'Link' && this.linkpath !== undefined ?
-                    this[PREFIX](this.linkpath)
-                    : this.linkpath,
-                size: this.size,
-                uid: this.portable ? undefined : this.uid,
-                uname: this.portable ? undefined : this.uname,
-                dev: this.portable ? undefined : this.readEntry.dev,
-                ino: this.portable ? undefined : this.readEntry.ino,
-                nlink: this.portable ? undefined : this.readEntry.nlink,
-            }).encode());
-        }
-        const b = this.header?.block;
-        /* c8 ignore start */
-        if (!b)
-            throw new Error('failed to encode header');
-        /* c8 ignore stop */
-        super.write(b);
-        readEntry.pipe(this);
-    }
-    [PREFIX](path) {
-        return prefixPath(path, this.prefix);
-    }
-    [MODE](mode) {
-        return (0, mode_fix_js_1.modeFix)(mode, this.type === 'Directory', this.portable);
-    }
-    write(chunk, encoding, cb) {
-        /* c8 ignore start - just junk to comply with NodeJS.WritableStream */
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        if (typeof chunk === 'string') {
-            chunk = Buffer.from(chunk, typeof encoding === 'string' ? encoding : 'utf8');
-        }
-        /* c8 ignore stop */
-        const writeLen = chunk.length;
-        if (writeLen > this.blockRemain) {
-            throw new Error('writing more to entry than is appropriate');
-        }
-        this.blockRemain -= writeLen;
-        return super.write(chunk, cb);
-    }
-    end(chunk, encoding, cb) {
-        if (this.blockRemain) {
-            super.write(Buffer.alloc(this.blockRemain));
-        }
-        /* c8 ignore start - just junk to comply with NodeJS.WritableStream */
-        if (typeof chunk === 'function') {
-            cb = chunk;
-            encoding = undefined;
-            chunk = undefined;
-        }
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        if (typeof chunk === 'string') {
-            chunk = Buffer.from(chunk, encoding ?? 'utf8');
-        }
-        if (cb)
-            this.once('finish', cb);
-        chunk ? super.end(chunk, cb) : super.end(cb);
-        /* c8 ignore stop */
-        return this;
-    }
-}
-exports.WriteEntryTar = WriteEntryTar;
-const getType = (stat) => stat.isFile() ? 'File'
-    : stat.isDirectory() ? 'Directory'
-        : stat.isSymbolicLink() ? 'SymbolicLink'
-            : 'Unsupported';
-//# sourceMappingURL=write-entry.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/create.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/create.js
deleted file mode 100644
index 512a9911d70d5..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/create.js
+++ /dev/null
@@ -1,77 +0,0 @@
-import { WriteStream, WriteStreamSync } from '@isaacs/fs-minipass';
-import path from 'node:path';
-import { list } from './list.js';
-import { makeCommand } from './make-command.js';
-import { Pack, PackSync } from './pack.js';
-const createFileSync = (opt, files) => {
-    const p = new PackSync(opt);
-    const stream = new WriteStreamSync(opt.file, {
-        mode: opt.mode || 0o666,
-    });
-    p.pipe(stream);
-    addFilesSync(p, files);
-};
-const createFile = (opt, files) => {
-    const p = new Pack(opt);
-    const stream = new WriteStream(opt.file, {
-        mode: opt.mode || 0o666,
-    });
-    p.pipe(stream);
-    const promise = new Promise((res, rej) => {
-        stream.on('error', rej);
-        stream.on('close', res);
-        p.on('error', rej);
-    });
-    addFilesAsync(p, files);
-    return promise;
-};
-const addFilesSync = (p, files) => {
-    files.forEach(file => {
-        if (file.charAt(0) === '@') {
-            list({
-                file: path.resolve(p.cwd, file.slice(1)),
-                sync: true,
-                noResume: true,
-                onReadEntry: entry => p.add(entry),
-            });
-        }
-        else {
-            p.add(file);
-        }
-    });
-    p.end();
-};
-const addFilesAsync = async (p, files) => {
-    for (let i = 0; i < files.length; i++) {
-        const file = String(files[i]);
-        if (file.charAt(0) === '@') {
-            await list({
-                file: path.resolve(String(p.cwd), file.slice(1)),
-                noResume: true,
-                onReadEntry: entry => {
-                    p.add(entry);
-                },
-            });
-        }
-        else {
-            p.add(file);
-        }
-    }
-    p.end();
-};
-const createSync = (opt, files) => {
-    const p = new PackSync(opt);
-    addFilesSync(p, files);
-    return p;
-};
-const createAsync = (opt, files) => {
-    const p = new Pack(opt);
-    addFilesAsync(p, files);
-    return p;
-};
-export const create = makeCommand(createFileSync, createFile, createSync, createAsync, (_opt, files) => {
-    if (!files?.length) {
-        throw new TypeError('no paths specified to add to archive');
-    }
-});
-//# sourceMappingURL=create.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/cwd-error.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/cwd-error.js
deleted file mode 100644
index 289a066b8e031..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/cwd-error.js
+++ /dev/null
@@ -1,14 +0,0 @@
-export class CwdError extends Error {
-    path;
-    code;
-    syscall = 'chdir';
-    constructor(path, code) {
-        super(`${code}: Cannot cd into '${path}'`);
-        this.path = path;
-        this.code = code;
-    }
-    get name() {
-        return 'CwdError';
-    }
-}
-//# sourceMappingURL=cwd-error.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/extract.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/extract.js
deleted file mode 100644
index 2274feef26e78..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/extract.js
+++ /dev/null
@@ -1,49 +0,0 @@
-// tar -x
-import * as fsm from '@isaacs/fs-minipass';
-import fs from 'node:fs';
-import { filesFilter } from './list.js';
-import { makeCommand } from './make-command.js';
-import { Unpack, UnpackSync } from './unpack.js';
-const extractFileSync = (opt) => {
-    const u = new UnpackSync(opt);
-    const file = opt.file;
-    const stat = fs.statSync(file);
-    // This trades a zero-byte read() syscall for a stat
-    // However, it will usually result in less memory allocation
-    const readSize = opt.maxReadSize || 16 * 1024 * 1024;
-    const stream = new fsm.ReadStreamSync(file, {
-        readSize: readSize,
-        size: stat.size,
-    });
-    stream.pipe(u);
-};
-const extractFile = (opt, _) => {
-    const u = new Unpack(opt);
-    const readSize = opt.maxReadSize || 16 * 1024 * 1024;
-    const file = opt.file;
-    const p = new Promise((resolve, reject) => {
-        u.on('error', reject);
-        u.on('close', resolve);
-        // This trades a zero-byte read() syscall for a stat
-        // However, it will usually result in less memory allocation
-        fs.stat(file, (er, stat) => {
-            if (er) {
-                reject(er);
-            }
-            else {
-                const stream = new fsm.ReadStream(file, {
-                    readSize: readSize,
-                    size: stat.size,
-                });
-                stream.on('error', reject);
-                stream.pipe(u);
-            }
-        });
-    });
-    return p;
-};
-export const extract = makeCommand(extractFileSync, extractFile, opt => new UnpackSync(opt), opt => new Unpack(opt), (opt, files) => {
-    if (files?.length)
-        filesFilter(opt, files);
-});
-//# sourceMappingURL=extract.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/get-write-flag.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/get-write-flag.js
deleted file mode 100644
index 2c7f3e8b28fda..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/get-write-flag.js
+++ /dev/null
@@ -1,23 +0,0 @@
-// Get the appropriate flag to use for creating files
-// We use fmap on Windows platforms for files less than
-// 512kb.  This is a fairly low limit, but avoids making
-// things slower in some cases.  Since most of what this
-// library is used for is extracting tarballs of many
-// relatively small files in npm packages and the like,
-// it can be a big boost on Windows platforms.
-import fs from 'fs';
-const platform = process.env.__FAKE_PLATFORM__ || process.platform;
-const isWindows = platform === 'win32';
-/* c8 ignore start */
-const { O_CREAT, O_TRUNC, O_WRONLY } = fs.constants;
-const UV_FS_O_FILEMAP = Number(process.env.__FAKE_FS_O_FILENAME__) ||
-    fs.constants.UV_FS_O_FILEMAP ||
-    0;
-/* c8 ignore stop */
-const fMapEnabled = isWindows && !!UV_FS_O_FILEMAP;
-const fMapLimit = 512 * 1024;
-const fMapFlag = UV_FS_O_FILEMAP | O_TRUNC | O_CREAT | O_WRONLY;
-export const getWriteFlag = !fMapEnabled ?
-    () => 'w'
-    : (size) => (size < fMapLimit ? fMapFlag : 'w');
-//# sourceMappingURL=get-write-flag.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/header.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/header.js
deleted file mode 100644
index e15192b14b16e..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/header.js
+++ /dev/null
@@ -1,279 +0,0 @@
-// parse a 512-byte header block to a data object, or vice-versa
-// encode returns `true` if a pax extended header is needed, because
-// the data could not be faithfully encoded in a simple header.
-// (Also, check header.needPax to see if it needs a pax header.)
-import { posix as pathModule } from 'node:path';
-import * as large from './large-numbers.js';
-import * as types from './types.js';
-export class Header {
-    cksumValid = false;
-    needPax = false;
-    nullBlock = false;
-    block;
-    path;
-    mode;
-    uid;
-    gid;
-    size;
-    cksum;
-    #type = 'Unsupported';
-    linkpath;
-    uname;
-    gname;
-    devmaj = 0;
-    devmin = 0;
-    atime;
-    ctime;
-    mtime;
-    charset;
-    comment;
-    constructor(data, off = 0, ex, gex) {
-        if (Buffer.isBuffer(data)) {
-            this.decode(data, off || 0, ex, gex);
-        }
-        else if (data) {
-            this.#slurp(data);
-        }
-    }
-    decode(buf, off, ex, gex) {
-        if (!off) {
-            off = 0;
-        }
-        if (!buf || !(buf.length >= off + 512)) {
-            throw new Error('need 512 bytes for header');
-        }
-        this.path = decString(buf, off, 100);
-        this.mode = decNumber(buf, off + 100, 8);
-        this.uid = decNumber(buf, off + 108, 8);
-        this.gid = decNumber(buf, off + 116, 8);
-        this.size = decNumber(buf, off + 124, 12);
-        this.mtime = decDate(buf, off + 136, 12);
-        this.cksum = decNumber(buf, off + 148, 12);
-        // if we have extended or global extended headers, apply them now
-        // See https://github.com/npm/node-tar/pull/187
-        // Apply global before local, so it overrides
-        if (gex)
-            this.#slurp(gex, true);
-        if (ex)
-            this.#slurp(ex);
-        // old tar versions marked dirs as a file with a trailing /
-        const t = decString(buf, off + 156, 1);
-        if (types.isCode(t)) {
-            this.#type = t || '0';
-        }
-        if (this.#type === '0' && this.path.slice(-1) === '/') {
-            this.#type = '5';
-        }
-        // tar implementations sometimes incorrectly put the stat(dir).size
-        // as the size in the tarball, even though Directory entries are
-        // not able to have any body at all.  In the very rare chance that
-        // it actually DOES have a body, we weren't going to do anything with
-        // it anyway, and it'll just be a warning about an invalid header.
-        if (this.#type === '5') {
-            this.size = 0;
-        }
-        this.linkpath = decString(buf, off + 157, 100);
-        if (buf.subarray(off + 257, off + 265).toString() ===
-            'ustar\u000000') {
-            this.uname = decString(buf, off + 265, 32);
-            this.gname = decString(buf, off + 297, 32);
-            /* c8 ignore start */
-            this.devmaj = decNumber(buf, off + 329, 8) ?? 0;
-            this.devmin = decNumber(buf, off + 337, 8) ?? 0;
-            /* c8 ignore stop */
-            if (buf[off + 475] !== 0) {
-                // definitely a prefix, definitely >130 chars.
-                const prefix = decString(buf, off + 345, 155);
-                this.path = prefix + '/' + this.path;
-            }
-            else {
-                const prefix = decString(buf, off + 345, 130);
-                if (prefix) {
-                    this.path = prefix + '/' + this.path;
-                }
-                this.atime = decDate(buf, off + 476, 12);
-                this.ctime = decDate(buf, off + 488, 12);
-            }
-        }
-        let sum = 8 * 0x20;
-        for (let i = off; i < off + 148; i++) {
-            sum += buf[i];
-        }
-        for (let i = off + 156; i < off + 512; i++) {
-            sum += buf[i];
-        }
-        this.cksumValid = sum === this.cksum;
-        if (this.cksum === undefined && sum === 8 * 0x20) {
-            this.nullBlock = true;
-        }
-    }
-    #slurp(ex, gex = false) {
-        Object.assign(this, Object.fromEntries(Object.entries(ex).filter(([k, v]) => {
-            // we slurp in everything except for the path attribute in
-            // a global extended header, because that's weird. Also, any
-            // null/undefined values are ignored.
-            return !(v === null ||
-                v === undefined ||
-                (k === 'path' && gex) ||
-                (k === 'linkpath' && gex) ||
-                k === 'global');
-        })));
-    }
-    encode(buf, off = 0) {
-        if (!buf) {
-            buf = this.block = Buffer.alloc(512);
-        }
-        if (this.#type === 'Unsupported') {
-            this.#type = '0';
-        }
-        if (!(buf.length >= off + 512)) {
-            throw new Error('need 512 bytes for header');
-        }
-        const prefixSize = this.ctime || this.atime ? 130 : 155;
-        const split = splitPrefix(this.path || '', prefixSize);
-        const path = split[0];
-        const prefix = split[1];
-        this.needPax = !!split[2];
-        this.needPax = encString(buf, off, 100, path) || this.needPax;
-        this.needPax =
-            encNumber(buf, off + 100, 8, this.mode) || this.needPax;
-        this.needPax =
-            encNumber(buf, off + 108, 8, this.uid) || this.needPax;
-        this.needPax =
-            encNumber(buf, off + 116, 8, this.gid) || this.needPax;
-        this.needPax =
-            encNumber(buf, off + 124, 12, this.size) || this.needPax;
-        this.needPax =
-            encDate(buf, off + 136, 12, this.mtime) || this.needPax;
-        buf[off + 156] = this.#type.charCodeAt(0);
-        this.needPax =
-            encString(buf, off + 157, 100, this.linkpath) || this.needPax;
-        buf.write('ustar\u000000', off + 257, 8);
-        this.needPax =
-            encString(buf, off + 265, 32, this.uname) || this.needPax;
-        this.needPax =
-            encString(buf, off + 297, 32, this.gname) || this.needPax;
-        this.needPax =
-            encNumber(buf, off + 329, 8, this.devmaj) || this.needPax;
-        this.needPax =
-            encNumber(buf, off + 337, 8, this.devmin) || this.needPax;
-        this.needPax =
-            encString(buf, off + 345, prefixSize, prefix) || this.needPax;
-        if (buf[off + 475] !== 0) {
-            this.needPax =
-                encString(buf, off + 345, 155, prefix) || this.needPax;
-        }
-        else {
-            this.needPax =
-                encString(buf, off + 345, 130, prefix) || this.needPax;
-            this.needPax =
-                encDate(buf, off + 476, 12, this.atime) || this.needPax;
-            this.needPax =
-                encDate(buf, off + 488, 12, this.ctime) || this.needPax;
-        }
-        let sum = 8 * 0x20;
-        for (let i = off; i < off + 148; i++) {
-            sum += buf[i];
-        }
-        for (let i = off + 156; i < off + 512; i++) {
-            sum += buf[i];
-        }
-        this.cksum = sum;
-        encNumber(buf, off + 148, 8, this.cksum);
-        this.cksumValid = true;
-        return this.needPax;
-    }
-    get type() {
-        return (this.#type === 'Unsupported' ?
-            this.#type
-            : types.name.get(this.#type));
-    }
-    get typeKey() {
-        return this.#type;
-    }
-    set type(type) {
-        const c = String(types.code.get(type));
-        if (types.isCode(c) || c === 'Unsupported') {
-            this.#type = c;
-        }
-        else if (types.isCode(type)) {
-            this.#type = type;
-        }
-        else {
-            throw new TypeError('invalid entry type: ' + type);
-        }
-    }
-}
-const splitPrefix = (p, prefixSize) => {
-    const pathSize = 100;
-    let pp = p;
-    let prefix = '';
-    let ret = undefined;
-    const root = pathModule.parse(p).root || '.';
-    if (Buffer.byteLength(pp) < pathSize) {
-        ret = [pp, prefix, false];
-    }
-    else {
-        // first set prefix to the dir, and path to the base
-        prefix = pathModule.dirname(pp);
-        pp = pathModule.basename(pp);
-        do {
-            if (Buffer.byteLength(pp) <= pathSize &&
-                Buffer.byteLength(prefix) <= prefixSize) {
-                // both fit!
-                ret = [pp, prefix, false];
-            }
-            else if (Buffer.byteLength(pp) > pathSize &&
-                Buffer.byteLength(prefix) <= prefixSize) {
-                // prefix fits in prefix, but path doesn't fit in path
-                ret = [pp.slice(0, pathSize - 1), prefix, true];
-            }
-            else {
-                // make path take a bit from prefix
-                pp = pathModule.join(pathModule.basename(prefix), pp);
-                prefix = pathModule.dirname(prefix);
-            }
-        } while (prefix !== root && ret === undefined);
-        // at this point, found no resolution, just truncate
-        if (!ret) {
-            ret = [p.slice(0, pathSize - 1), '', true];
-        }
-    }
-    return ret;
-};
-const decString = (buf, off, size) => buf
-    .subarray(off, off + size)
-    .toString('utf8')
-    .replace(/\0.*/, '');
-const decDate = (buf, off, size) => numToDate(decNumber(buf, off, size));
-const numToDate = (num) => num === undefined ? undefined : new Date(num * 1000);
-const decNumber = (buf, off, size) => Number(buf[off]) & 0x80 ?
-    large.parse(buf.subarray(off, off + size))
-    : decSmallNumber(buf, off, size);
-const nanUndef = (value) => (isNaN(value) ? undefined : value);
-const decSmallNumber = (buf, off, size) => nanUndef(parseInt(buf
-    .subarray(off, off + size)
-    .toString('utf8')
-    .replace(/\0.*$/, '')
-    .trim(), 8));
-// the maximum encodable as a null-terminated octal, by field size
-const MAXNUM = {
-    12: 0o77777777777,
-    8: 0o7777777,
-};
-const encNumber = (buf, off, size, num) => num === undefined ? false
-    : num > MAXNUM[size] || num < 0 ?
-        (large.encode(num, buf.subarray(off, off + size)), true)
-        : (encSmallNumber(buf, off, size, num), false);
-const encSmallNumber = (buf, off, size, num) => buf.write(octalString(num, size), off, size, 'ascii');
-const octalString = (num, size) => padOctal(Math.floor(num).toString(8), size);
-const padOctal = (str, size) => (str.length === size - 1 ?
-    str
-    : new Array(size - str.length - 1).join('0') + str + ' ') + '\0';
-const encDate = (buf, off, size, date) => date === undefined ? false : (encNumber(buf, off, size, date.getTime() / 1000));
-// enough to fill the longest string we've got
-const NULLS = new Array(156).join('\0');
-// pad with nulls, return true if it's longer or non-ascii
-const encString = (buf, off, size, str) => str === undefined ? false : ((buf.write(str + NULLS, off, size, 'utf8'),
-    str.length !== Buffer.byteLength(str) || str.length > size));
-//# sourceMappingURL=header.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/index.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/index.js
deleted file mode 100644
index 1bac6415c8d73..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/index.js
+++ /dev/null
@@ -1,20 +0,0 @@
-export * from './create.js';
-export { create as c } from './create.js';
-export * from './extract.js';
-export { extract as x } from './extract.js';
-export * from './header.js';
-export * from './list.js';
-export { list as t } from './list.js';
-// classes
-export * from './pack.js';
-export * from './parse.js';
-export * from './pax.js';
-export * from './read-entry.js';
-export * from './replace.js';
-export { replace as r } from './replace.js';
-export * as types from './types.js';
-export * from './unpack.js';
-export * from './update.js';
-export { update as u } from './update.js';
-export * from './write-entry.js';
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/large-numbers.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/large-numbers.js
deleted file mode 100644
index 4f2f7e5f14fc1..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/large-numbers.js
+++ /dev/null
@@ -1,94 +0,0 @@
-// Tar can encode large and negative numbers using a leading byte of
-// 0xff for negative, and 0x80 for positive.
-export const encode = (num, buf) => {
-    if (!Number.isSafeInteger(num)) {
-        // The number is so large that javascript cannot represent it with integer
-        // precision.
-        throw Error('cannot encode number outside of javascript safe integer range');
-    }
-    else if (num < 0) {
-        encodeNegative(num, buf);
-    }
-    else {
-        encodePositive(num, buf);
-    }
-    return buf;
-};
-const encodePositive = (num, buf) => {
-    buf[0] = 0x80;
-    for (var i = buf.length; i > 1; i--) {
-        buf[i - 1] = num & 0xff;
-        num = Math.floor(num / 0x100);
-    }
-};
-const encodeNegative = (num, buf) => {
-    buf[0] = 0xff;
-    var flipped = false;
-    num = num * -1;
-    for (var i = buf.length; i > 1; i--) {
-        var byte = num & 0xff;
-        num = Math.floor(num / 0x100);
-        if (flipped) {
-            buf[i - 1] = onesComp(byte);
-        }
-        else if (byte === 0) {
-            buf[i - 1] = 0;
-        }
-        else {
-            flipped = true;
-            buf[i - 1] = twosComp(byte);
-        }
-    }
-};
-export const parse = (buf) => {
-    const pre = buf[0];
-    const value = pre === 0x80 ? pos(buf.subarray(1, buf.length))
-        : pre === 0xff ? twos(buf)
-            : null;
-    if (value === null) {
-        throw Error('invalid base256 encoding');
-    }
-    if (!Number.isSafeInteger(value)) {
-        // The number is so large that javascript cannot represent it with integer
-        // precision.
-        throw Error('parsed number outside of javascript safe integer range');
-    }
-    return value;
-};
-const twos = (buf) => {
-    var len = buf.length;
-    var sum = 0;
-    var flipped = false;
-    for (var i = len - 1; i > -1; i--) {
-        var byte = Number(buf[i]);
-        var f;
-        if (flipped) {
-            f = onesComp(byte);
-        }
-        else if (byte === 0) {
-            f = byte;
-        }
-        else {
-            flipped = true;
-            f = twosComp(byte);
-        }
-        if (f !== 0) {
-            sum -= f * Math.pow(256, len - i - 1);
-        }
-    }
-    return sum;
-};
-const pos = (buf) => {
-    var len = buf.length;
-    var sum = 0;
-    for (var i = len - 1; i > -1; i--) {
-        var byte = Number(buf[i]);
-        if (byte !== 0) {
-            sum += byte * Math.pow(256, len - i - 1);
-        }
-    }
-    return sum;
-};
-const onesComp = (byte) => (0xff ^ byte) & 0xff;
-const twosComp = (byte) => ((0xff ^ byte) + 1) & 0xff;
-//# sourceMappingURL=large-numbers.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/list.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/list.js
deleted file mode 100644
index f49068400b6c9..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/list.js
+++ /dev/null
@@ -1,106 +0,0 @@
-// tar -t
-import * as fsm from '@isaacs/fs-minipass';
-import fs from 'node:fs';
-import { dirname, parse } from 'path';
-import { makeCommand } from './make-command.js';
-import { Parser } from './parse.js';
-import { stripTrailingSlashes } from './strip-trailing-slashes.js';
-const onReadEntryFunction = (opt) => {
-    const onReadEntry = opt.onReadEntry;
-    opt.onReadEntry =
-        onReadEntry ?
-            e => {
-                onReadEntry(e);
-                e.resume();
-            }
-            : e => e.resume();
-};
-// construct a filter that limits the file entries listed
-// include child entries if a dir is included
-export const filesFilter = (opt, files) => {
-    const map = new Map(files.map(f => [stripTrailingSlashes(f), true]));
-    const filter = opt.filter;
-    const mapHas = (file, r = '') => {
-        const root = r || parse(file).root || '.';
-        let ret;
-        if (file === root)
-            ret = false;
-        else {
-            const m = map.get(file);
-            if (m !== undefined) {
-                ret = m;
-            }
-            else {
-                ret = mapHas(dirname(file), root);
-            }
-        }
-        map.set(file, ret);
-        return ret;
-    };
-    opt.filter =
-        filter ?
-            (file, entry) => filter(file, entry) && mapHas(stripTrailingSlashes(file))
-            : file => mapHas(stripTrailingSlashes(file));
-};
-const listFileSync = (opt) => {
-    const p = new Parser(opt);
-    const file = opt.file;
-    let fd;
-    try {
-        const stat = fs.statSync(file);
-        const readSize = opt.maxReadSize || 16 * 1024 * 1024;
-        if (stat.size < readSize) {
-            p.end(fs.readFileSync(file));
-        }
-        else {
-            let pos = 0;
-            const buf = Buffer.allocUnsafe(readSize);
-            fd = fs.openSync(file, 'r');
-            while (pos < stat.size) {
-                const bytesRead = fs.readSync(fd, buf, 0, readSize, pos);
-                pos += bytesRead;
-                p.write(buf.subarray(0, bytesRead));
-            }
-            p.end();
-        }
-    }
-    finally {
-        if (typeof fd === 'number') {
-            try {
-                fs.closeSync(fd);
-                /* c8 ignore next */
-            }
-            catch (er) { }
-        }
-    }
-};
-const listFile = (opt, _files) => {
-    const parse = new Parser(opt);
-    const readSize = opt.maxReadSize || 16 * 1024 * 1024;
-    const file = opt.file;
-    const p = new Promise((resolve, reject) => {
-        parse.on('error', reject);
-        parse.on('end', resolve);
-        fs.stat(file, (er, stat) => {
-            if (er) {
-                reject(er);
-            }
-            else {
-                const stream = new fsm.ReadStream(file, {
-                    readSize: readSize,
-                    size: stat.size,
-                });
-                stream.on('error', reject);
-                stream.pipe(parse);
-            }
-        });
-    });
-    return p;
-};
-export const list = makeCommand(listFileSync, listFile, opt => new Parser(opt), opt => new Parser(opt), (opt, files) => {
-    if (files?.length)
-        filesFilter(opt, files);
-    if (!opt.noResume)
-        onReadEntryFunction(opt);
-});
-//# sourceMappingURL=list.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/make-command.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/make-command.js
deleted file mode 100644
index f2f737bca78fd..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/make-command.js
+++ /dev/null
@@ -1,57 +0,0 @@
-import { dealias, isAsyncFile, isAsyncNoFile, isSyncFile, isSyncNoFile, } from './options.js';
-export const makeCommand = (syncFile, asyncFile, syncNoFile, asyncNoFile, validate) => {
-    return Object.assign((opt_ = [], entries, cb) => {
-        if (Array.isArray(opt_)) {
-            entries = opt_;
-            opt_ = {};
-        }
-        if (typeof entries === 'function') {
-            cb = entries;
-            entries = undefined;
-        }
-        if (!entries) {
-            entries = [];
-        }
-        else {
-            entries = Array.from(entries);
-        }
-        const opt = dealias(opt_);
-        validate?.(opt, entries);
-        if (isSyncFile(opt)) {
-            if (typeof cb === 'function') {
-                throw new TypeError('callback not supported for sync tar functions');
-            }
-            return syncFile(opt, entries);
-        }
-        else if (isAsyncFile(opt)) {
-            const p = asyncFile(opt, entries);
-            // weirdness to make TS happy
-            const c = cb ? cb : undefined;
-            return c ? p.then(() => c(), c) : p;
-        }
-        else if (isSyncNoFile(opt)) {
-            if (typeof cb === 'function') {
-                throw new TypeError('callback not supported for sync tar functions');
-            }
-            return syncNoFile(opt, entries);
-        }
-        else if (isAsyncNoFile(opt)) {
-            if (typeof cb === 'function') {
-                throw new TypeError('callback only supported with file option');
-            }
-            return asyncNoFile(opt, entries);
-            /* c8 ignore start */
-        }
-        else {
-            throw new Error('impossible options??');
-        }
-        /* c8 ignore stop */
-    }, {
-        syncFile,
-        asyncFile,
-        syncNoFile,
-        asyncNoFile,
-        validate,
-    });
-};
-//# sourceMappingURL=make-command.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/mkdir.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/mkdir.js
deleted file mode 100644
index 13498ef0082f0..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/mkdir.js
+++ /dev/null
@@ -1,201 +0,0 @@
-import { chownr, chownrSync } from 'chownr';
-import fs from 'fs';
-import { mkdirp, mkdirpSync } from 'mkdirp';
-import path from 'node:path';
-import { CwdError } from './cwd-error.js';
-import { normalizeWindowsPath } from './normalize-windows-path.js';
-import { SymlinkError } from './symlink-error.js';
-const cGet = (cache, key) => cache.get(normalizeWindowsPath(key));
-const cSet = (cache, key, val) => cache.set(normalizeWindowsPath(key), val);
-const checkCwd = (dir, cb) => {
-    fs.stat(dir, (er, st) => {
-        if (er || !st.isDirectory()) {
-            er = new CwdError(dir, er?.code || 'ENOTDIR');
-        }
-        cb(er);
-    });
-};
-/**
- * Wrapper around mkdirp for tar's needs.
- *
- * The main purpose is to avoid creating directories if we know that
- * they already exist (and track which ones exist for this purpose),
- * and prevent entries from being extracted into symlinked folders,
- * if `preservePaths` is not set.
- */
-export const mkdir = (dir, opt, cb) => {
-    dir = normalizeWindowsPath(dir);
-    // if there's any overlap between mask and mode,
-    // then we'll need an explicit chmod
-    /* c8 ignore next */
-    const umask = opt.umask ?? 0o22;
-    const mode = opt.mode | 0o0700;
-    const needChmod = (mode & umask) !== 0;
-    const uid = opt.uid;
-    const gid = opt.gid;
-    const doChown = typeof uid === 'number' &&
-        typeof gid === 'number' &&
-        (uid !== opt.processUid || gid !== opt.processGid);
-    const preserve = opt.preserve;
-    const unlink = opt.unlink;
-    const cache = opt.cache;
-    const cwd = normalizeWindowsPath(opt.cwd);
-    const done = (er, created) => {
-        if (er) {
-            cb(er);
-        }
-        else {
-            cSet(cache, dir, true);
-            if (created && doChown) {
-                chownr(created, uid, gid, er => done(er));
-            }
-            else if (needChmod) {
-                fs.chmod(dir, mode, cb);
-            }
-            else {
-                cb();
-            }
-        }
-    };
-    if (cache && cGet(cache, dir) === true) {
-        return done();
-    }
-    if (dir === cwd) {
-        return checkCwd(dir, done);
-    }
-    if (preserve) {
-        return mkdirp(dir, { mode }).then(made => done(null, made ?? undefined), // oh, ts
-        done);
-    }
-    const sub = normalizeWindowsPath(path.relative(cwd, dir));
-    const parts = sub.split('/');
-    mkdir_(cwd, parts, mode, cache, unlink, cwd, undefined, done);
-};
-const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => {
-    if (!parts.length) {
-        return cb(null, created);
-    }
-    const p = parts.shift();
-    const part = normalizeWindowsPath(path.resolve(base + '/' + p));
-    if (cGet(cache, part)) {
-        return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
-    }
-    fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb));
-};
-const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => (er) => {
-    if (er) {
-        fs.lstat(part, (statEr, st) => {
-            if (statEr) {
-                statEr.path =
-                    statEr.path && normalizeWindowsPath(statEr.path);
-                cb(statEr);
-            }
-            else if (st.isDirectory()) {
-                mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
-            }
-            else if (unlink) {
-                fs.unlink(part, er => {
-                    if (er) {
-                        return cb(er);
-                    }
-                    fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb));
-                });
-            }
-            else if (st.isSymbolicLink()) {
-                return cb(new SymlinkError(part, part + '/' + parts.join('/')));
-            }
-            else {
-                cb(er);
-            }
-        });
-    }
-    else {
-        created = created || part;
-        mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
-    }
-};
-const checkCwdSync = (dir) => {
-    let ok = false;
-    let code = undefined;
-    try {
-        ok = fs.statSync(dir).isDirectory();
-    }
-    catch (er) {
-        code = er?.code;
-    }
-    finally {
-        if (!ok) {
-            throw new CwdError(dir, code ?? 'ENOTDIR');
-        }
-    }
-};
-export const mkdirSync = (dir, opt) => {
-    dir = normalizeWindowsPath(dir);
-    // if there's any overlap between mask and mode,
-    // then we'll need an explicit chmod
-    /* c8 ignore next */
-    const umask = opt.umask ?? 0o22;
-    const mode = opt.mode | 0o700;
-    const needChmod = (mode & umask) !== 0;
-    const uid = opt.uid;
-    const gid = opt.gid;
-    const doChown = typeof uid === 'number' &&
-        typeof gid === 'number' &&
-        (uid !== opt.processUid || gid !== opt.processGid);
-    const preserve = opt.preserve;
-    const unlink = opt.unlink;
-    const cache = opt.cache;
-    const cwd = normalizeWindowsPath(opt.cwd);
-    const done = (created) => {
-        cSet(cache, dir, true);
-        if (created && doChown) {
-            chownrSync(created, uid, gid);
-        }
-        if (needChmod) {
-            fs.chmodSync(dir, mode);
-        }
-    };
-    if (cache && cGet(cache, dir) === true) {
-        return done();
-    }
-    if (dir === cwd) {
-        checkCwdSync(cwd);
-        return done();
-    }
-    if (preserve) {
-        return done(mkdirpSync(dir, mode) ?? undefined);
-    }
-    const sub = normalizeWindowsPath(path.relative(cwd, dir));
-    const parts = sub.split('/');
-    let created = undefined;
-    for (let p = parts.shift(), part = cwd; p && (part += '/' + p); p = parts.shift()) {
-        part = normalizeWindowsPath(path.resolve(part));
-        if (cGet(cache, part)) {
-            continue;
-        }
-        try {
-            fs.mkdirSync(part, mode);
-            created = created || part;
-            cSet(cache, part, true);
-        }
-        catch (er) {
-            const st = fs.lstatSync(part);
-            if (st.isDirectory()) {
-                cSet(cache, part, true);
-                continue;
-            }
-            else if (unlink) {
-                fs.unlinkSync(part);
-                fs.mkdirSync(part, mode);
-                created = created || part;
-                cSet(cache, part, true);
-                continue;
-            }
-            else if (st.isSymbolicLink()) {
-                return new SymlinkError(part, part + '/' + parts.join('/'));
-            }
-        }
-    }
-    return done(created);
-};
-//# sourceMappingURL=mkdir.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/mode-fix.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/mode-fix.js
deleted file mode 100644
index 5fd3bb88c1cb2..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/mode-fix.js
+++ /dev/null
@@ -1,25 +0,0 @@
-export const modeFix = (mode, isDir, portable) => {
-    mode &= 0o7777;
-    // in portable mode, use the minimum reasonable umask
-    // if this system creates files with 0o664 by default
-    // (as some linux distros do), then we'll write the
-    // archive with 0o644 instead.  Also, don't ever create
-    // a file that is not readable/writable by the owner.
-    if (portable) {
-        mode = (mode | 0o600) & ~0o22;
-    }
-    // if dirs are readable, then they should be listable
-    if (isDir) {
-        if (mode & 0o400) {
-            mode |= 0o100;
-        }
-        if (mode & 0o40) {
-            mode |= 0o10;
-        }
-        if (mode & 0o4) {
-            mode |= 0o1;
-        }
-    }
-    return mode;
-};
-//# sourceMappingURL=mode-fix.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/normalize-unicode.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/normalize-unicode.js
deleted file mode 100644
index 94e5095476d6e..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/normalize-unicode.js
+++ /dev/null
@@ -1,13 +0,0 @@
-// warning: extremely hot code path.
-// This has been meticulously optimized for use
-// within npm install on large package trees.
-// Do not edit without careful benchmarking.
-const normalizeCache = Object.create(null);
-const { hasOwnProperty } = Object.prototype;
-export const normalizeUnicode = (s) => {
-    if (!hasOwnProperty.call(normalizeCache, s)) {
-        normalizeCache[s] = s.normalize('NFD');
-    }
-    return normalizeCache[s];
-};
-//# sourceMappingURL=normalize-unicode.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/normalize-windows-path.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/normalize-windows-path.js
deleted file mode 100644
index 2d97d2b884e62..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/normalize-windows-path.js
+++ /dev/null
@@ -1,9 +0,0 @@
-// on windows, either \ or / are valid directory separators.
-// on unix, \ is a valid character in filenames.
-// so, on windows, and only on windows, we replace all \ chars with /,
-// so that we can use / as our one and only directory separator char.
-const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform;
-export const normalizeWindowsPath = platform !== 'win32' ?
-    (p) => p
-    : (p) => p && p.replace(/\\/g, '/');
-//# sourceMappingURL=normalize-windows-path.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/options.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/options.js
deleted file mode 100644
index a006d36c23c92..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/options.js
+++ /dev/null
@@ -1,54 +0,0 @@
-// turn tar(1) style args like `C` into the more verbose things like `cwd`
-const argmap = new Map([
-    ['C', 'cwd'],
-    ['f', 'file'],
-    ['z', 'gzip'],
-    ['P', 'preservePaths'],
-    ['U', 'unlink'],
-    ['strip-components', 'strip'],
-    ['stripComponents', 'strip'],
-    ['keep-newer', 'newer'],
-    ['keepNewer', 'newer'],
-    ['keep-newer-files', 'newer'],
-    ['keepNewerFiles', 'newer'],
-    ['k', 'keep'],
-    ['keep-existing', 'keep'],
-    ['keepExisting', 'keep'],
-    ['m', 'noMtime'],
-    ['no-mtime', 'noMtime'],
-    ['p', 'preserveOwner'],
-    ['L', 'follow'],
-    ['h', 'follow'],
-    ['onentry', 'onReadEntry'],
-]);
-export const isSyncFile = (o) => !!o.sync && !!o.file;
-export const isAsyncFile = (o) => !o.sync && !!o.file;
-export const isSyncNoFile = (o) => !!o.sync && !o.file;
-export const isAsyncNoFile = (o) => !o.sync && !o.file;
-export const isSync = (o) => !!o.sync;
-export const isAsync = (o) => !o.sync;
-export const isFile = (o) => !!o.file;
-export const isNoFile = (o) => !o.file;
-const dealiasKey = (k) => {
-    const d = argmap.get(k);
-    if (d)
-        return d;
-    return k;
-};
-export const dealias = (opt = {}) => {
-    if (!opt)
-        return {};
-    const result = {};
-    for (const [key, v] of Object.entries(opt)) {
-        // TS doesn't know that aliases are going to always be the same type
-        const k = dealiasKey(key);
-        result[k] = v;
-    }
-    // affordance for deprecated noChmod -> chmod
-    if (result.chmod === undefined && result.noChmod === false) {
-        result.chmod = true;
-    }
-    delete result.noChmod;
-    return result;
-};
-//# sourceMappingURL=options.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/pack.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/pack.js
deleted file mode 100644
index f59f32f94201f..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/pack.js
+++ /dev/null
@@ -1,445 +0,0 @@
-// A readable tar stream creator
-// Technically, this is a transform stream that you write paths into,
-// and tar format comes out of.
-// The `add()` method is like `write()` but returns this,
-// and end() return `this` as well, so you can
-// do `new Pack(opt).add('files').add('dir').end().pipe(output)
-// You could also do something like:
-// streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar'))
-import fs from 'fs';
-import { WriteEntry, WriteEntrySync, WriteEntryTar, } from './write-entry.js';
-export class PackJob {
-    path;
-    absolute;
-    entry;
-    stat;
-    readdir;
-    pending = false;
-    ignore = false;
-    piped = false;
-    constructor(path, absolute) {
-        this.path = path || './';
-        this.absolute = absolute;
-    }
-}
-import { Minipass } from 'minipass';
-import * as zlib from 'minizlib';
-import { Yallist } from 'yallist';
-import { ReadEntry } from './read-entry.js';
-import { warnMethod, } from './warn-method.js';
-const EOF = Buffer.alloc(1024);
-const ONSTAT = Symbol('onStat');
-const ENDED = Symbol('ended');
-const QUEUE = Symbol('queue');
-const CURRENT = Symbol('current');
-const PROCESS = Symbol('process');
-const PROCESSING = Symbol('processing');
-const PROCESSJOB = Symbol('processJob');
-const JOBS = Symbol('jobs');
-const JOBDONE = Symbol('jobDone');
-const ADDFSENTRY = Symbol('addFSEntry');
-const ADDTARENTRY = Symbol('addTarEntry');
-const STAT = Symbol('stat');
-const READDIR = Symbol('readdir');
-const ONREADDIR = Symbol('onreaddir');
-const PIPE = Symbol('pipe');
-const ENTRY = Symbol('entry');
-const ENTRYOPT = Symbol('entryOpt');
-const WRITEENTRYCLASS = Symbol('writeEntryClass');
-const WRITE = Symbol('write');
-const ONDRAIN = Symbol('ondrain');
-import path from 'path';
-import { normalizeWindowsPath } from './normalize-windows-path.js';
-export class Pack extends Minipass {
-    opt;
-    cwd;
-    maxReadSize;
-    preservePaths;
-    strict;
-    noPax;
-    prefix;
-    linkCache;
-    statCache;
-    file;
-    portable;
-    zip;
-    readdirCache;
-    noDirRecurse;
-    follow;
-    noMtime;
-    mtime;
-    filter;
-    jobs;
-    [WRITEENTRYCLASS];
-    onWriteEntry;
-    [QUEUE];
-    [JOBS] = 0;
-    [PROCESSING] = false;
-    [ENDED] = false;
-    constructor(opt = {}) {
-        //@ts-ignore
-        super();
-        this.opt = opt;
-        this.file = opt.file || '';
-        this.cwd = opt.cwd || process.cwd();
-        this.maxReadSize = opt.maxReadSize;
-        this.preservePaths = !!opt.preservePaths;
-        this.strict = !!opt.strict;
-        this.noPax = !!opt.noPax;
-        this.prefix = normalizeWindowsPath(opt.prefix || '');
-        this.linkCache = opt.linkCache || new Map();
-        this.statCache = opt.statCache || new Map();
-        this.readdirCache = opt.readdirCache || new Map();
-        this.onWriteEntry = opt.onWriteEntry;
-        this[WRITEENTRYCLASS] = WriteEntry;
-        if (typeof opt.onwarn === 'function') {
-            this.on('warn', opt.onwarn);
-        }
-        this.portable = !!opt.portable;
-        if (opt.gzip || opt.brotli) {
-            if (opt.gzip && opt.brotli) {
-                throw new TypeError('gzip and brotli are mutually exclusive');
-            }
-            if (opt.gzip) {
-                if (typeof opt.gzip !== 'object') {
-                    opt.gzip = {};
-                }
-                if (this.portable) {
-                    opt.gzip.portable = true;
-                }
-                this.zip = new zlib.Gzip(opt.gzip);
-            }
-            if (opt.brotli) {
-                if (typeof opt.brotli !== 'object') {
-                    opt.brotli = {};
-                }
-                this.zip = new zlib.BrotliCompress(opt.brotli);
-            }
-            /* c8 ignore next */
-            if (!this.zip)
-                throw new Error('impossible');
-            const zip = this.zip;
-            zip.on('data', chunk => super.write(chunk));
-            zip.on('end', () => super.end());
-            zip.on('drain', () => this[ONDRAIN]());
-            this.on('resume', () => zip.resume());
-        }
-        else {
-            this.on('drain', this[ONDRAIN]);
-        }
-        this.noDirRecurse = !!opt.noDirRecurse;
-        this.follow = !!opt.follow;
-        this.noMtime = !!opt.noMtime;
-        if (opt.mtime)
-            this.mtime = opt.mtime;
-        this.filter =
-            typeof opt.filter === 'function' ? opt.filter : () => true;
-        this[QUEUE] = new Yallist();
-        this[JOBS] = 0;
-        this.jobs = Number(opt.jobs) || 4;
-        this[PROCESSING] = false;
-        this[ENDED] = false;
-    }
-    [WRITE](chunk) {
-        return super.write(chunk);
-    }
-    add(path) {
-        this.write(path);
-        return this;
-    }
-    end(path, encoding, cb) {
-        /* c8 ignore start */
-        if (typeof path === 'function') {
-            cb = path;
-            path = undefined;
-        }
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        /* c8 ignore stop */
-        if (path) {
-            this.add(path);
-        }
-        this[ENDED] = true;
-        this[PROCESS]();
-        /* c8 ignore next */
-        if (cb)
-            cb();
-        return this;
-    }
-    write(path) {
-        if (this[ENDED]) {
-            throw new Error('write after end');
-        }
-        if (path instanceof ReadEntry) {
-            this[ADDTARENTRY](path);
-        }
-        else {
-            this[ADDFSENTRY](path);
-        }
-        return this.flowing;
-    }
-    [ADDTARENTRY](p) {
-        const absolute = normalizeWindowsPath(path.resolve(this.cwd, p.path));
-        // in this case, we don't have to wait for the stat
-        if (!this.filter(p.path, p)) {
-            p.resume();
-        }
-        else {
-            const job = new PackJob(p.path, absolute);
-            job.entry = new WriteEntryTar(p, this[ENTRYOPT](job));
-            job.entry.on('end', () => this[JOBDONE](job));
-            this[JOBS] += 1;
-            this[QUEUE].push(job);
-        }
-        this[PROCESS]();
-    }
-    [ADDFSENTRY](p) {
-        const absolute = normalizeWindowsPath(path.resolve(this.cwd, p));
-        this[QUEUE].push(new PackJob(p, absolute));
-        this[PROCESS]();
-    }
-    [STAT](job) {
-        job.pending = true;
-        this[JOBS] += 1;
-        const stat = this.follow ? 'stat' : 'lstat';
-        fs[stat](job.absolute, (er, stat) => {
-            job.pending = false;
-            this[JOBS] -= 1;
-            if (er) {
-                this.emit('error', er);
-            }
-            else {
-                this[ONSTAT](job, stat);
-            }
-        });
-    }
-    [ONSTAT](job, stat) {
-        this.statCache.set(job.absolute, stat);
-        job.stat = stat;
-        // now we have the stat, we can filter it.
-        if (!this.filter(job.path, stat)) {
-            job.ignore = true;
-        }
-        this[PROCESS]();
-    }
-    [READDIR](job) {
-        job.pending = true;
-        this[JOBS] += 1;
-        fs.readdir(job.absolute, (er, entries) => {
-            job.pending = false;
-            this[JOBS] -= 1;
-            if (er) {
-                return this.emit('error', er);
-            }
-            this[ONREADDIR](job, entries);
-        });
-    }
-    [ONREADDIR](job, entries) {
-        this.readdirCache.set(job.absolute, entries);
-        job.readdir = entries;
-        this[PROCESS]();
-    }
-    [PROCESS]() {
-        if (this[PROCESSING]) {
-            return;
-        }
-        this[PROCESSING] = true;
-        for (let w = this[QUEUE].head; !!w && this[JOBS] < this.jobs; w = w.next) {
-            this[PROCESSJOB](w.value);
-            if (w.value.ignore) {
-                const p = w.next;
-                this[QUEUE].removeNode(w);
-                w.next = p;
-            }
-        }
-        this[PROCESSING] = false;
-        if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) {
-            if (this.zip) {
-                this.zip.end(EOF);
-            }
-            else {
-                super.write(EOF);
-                super.end();
-            }
-        }
-    }
-    get [CURRENT]() {
-        return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value;
-    }
-    [JOBDONE](_job) {
-        this[QUEUE].shift();
-        this[JOBS] -= 1;
-        this[PROCESS]();
-    }
-    [PROCESSJOB](job) {
-        if (job.pending) {
-            return;
-        }
-        if (job.entry) {
-            if (job === this[CURRENT] && !job.piped) {
-                this[PIPE](job);
-            }
-            return;
-        }
-        if (!job.stat) {
-            const sc = this.statCache.get(job.absolute);
-            if (sc) {
-                this[ONSTAT](job, sc);
-            }
-            else {
-                this[STAT](job);
-            }
-        }
-        if (!job.stat) {
-            return;
-        }
-        // filtered out!
-        if (job.ignore) {
-            return;
-        }
-        if (!this.noDirRecurse &&
-            job.stat.isDirectory() &&
-            !job.readdir) {
-            const rc = this.readdirCache.get(job.absolute);
-            if (rc) {
-                this[ONREADDIR](job, rc);
-            }
-            else {
-                this[READDIR](job);
-            }
-            if (!job.readdir) {
-                return;
-            }
-        }
-        // we know it doesn't have an entry, because that got checked above
-        job.entry = this[ENTRY](job);
-        if (!job.entry) {
-            job.ignore = true;
-            return;
-        }
-        if (job === this[CURRENT] && !job.piped) {
-            this[PIPE](job);
-        }
-    }
-    [ENTRYOPT](job) {
-        return {
-            onwarn: (code, msg, data) => this.warn(code, msg, data),
-            noPax: this.noPax,
-            cwd: this.cwd,
-            absolute: job.absolute,
-            preservePaths: this.preservePaths,
-            maxReadSize: this.maxReadSize,
-            strict: this.strict,
-            portable: this.portable,
-            linkCache: this.linkCache,
-            statCache: this.statCache,
-            noMtime: this.noMtime,
-            mtime: this.mtime,
-            prefix: this.prefix,
-            onWriteEntry: this.onWriteEntry,
-        };
-    }
-    [ENTRY](job) {
-        this[JOBS] += 1;
-        try {
-            const e = new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job));
-            return e
-                .on('end', () => this[JOBDONE](job))
-                .on('error', er => this.emit('error', er));
-        }
-        catch (er) {
-            this.emit('error', er);
-        }
-    }
-    [ONDRAIN]() {
-        if (this[CURRENT] && this[CURRENT].entry) {
-            this[CURRENT].entry.resume();
-        }
-    }
-    // like .pipe() but using super, because our write() is special
-    [PIPE](job) {
-        job.piped = true;
-        if (job.readdir) {
-            job.readdir.forEach(entry => {
-                const p = job.path;
-                const base = p === './' ? '' : p.replace(/\/*$/, '/');
-                this[ADDFSENTRY](base + entry);
-            });
-        }
-        const source = job.entry;
-        const zip = this.zip;
-        /* c8 ignore start */
-        if (!source)
-            throw new Error('cannot pipe without source');
-        /* c8 ignore stop */
-        if (zip) {
-            source.on('data', chunk => {
-                if (!zip.write(chunk)) {
-                    source.pause();
-                }
-            });
-        }
-        else {
-            source.on('data', chunk => {
-                if (!super.write(chunk)) {
-                    source.pause();
-                }
-            });
-        }
-    }
-    pause() {
-        if (this.zip) {
-            this.zip.pause();
-        }
-        return super.pause();
-    }
-    warn(code, message, data = {}) {
-        warnMethod(this, code, message, data);
-    }
-}
-export class PackSync extends Pack {
-    sync = true;
-    constructor(opt) {
-        super(opt);
-        this[WRITEENTRYCLASS] = WriteEntrySync;
-    }
-    // pause/resume are no-ops in sync streams.
-    pause() { }
-    resume() { }
-    [STAT](job) {
-        const stat = this.follow ? 'statSync' : 'lstatSync';
-        this[ONSTAT](job, fs[stat](job.absolute));
-    }
-    [READDIR](job) {
-        this[ONREADDIR](job, fs.readdirSync(job.absolute));
-    }
-    // gotta get it all in this tick
-    [PIPE](job) {
-        const source = job.entry;
-        const zip = this.zip;
-        if (job.readdir) {
-            job.readdir.forEach(entry => {
-                const p = job.path;
-                const base = p === './' ? '' : p.replace(/\/*$/, '/');
-                this[ADDFSENTRY](base + entry);
-            });
-        }
-        /* c8 ignore start */
-        if (!source)
-            throw new Error('Cannot pipe without source');
-        /* c8 ignore stop */
-        if (zip) {
-            source.on('data', chunk => {
-                zip.write(chunk);
-            });
-        }
-        else {
-            source.on('data', chunk => {
-                super[WRITE](chunk);
-            });
-        }
-    }
-}
-//# sourceMappingURL=pack.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/package.json b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/parse.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/parse.js
deleted file mode 100644
index cce430479cd0c..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/parse.js
+++ /dev/null
@@ -1,595 +0,0 @@
-// this[BUFFER] is the remainder of a chunk if we're waiting for
-// the full 512 bytes of a header to come in.  We will Buffer.concat()
-// it to the next write(), which is a mem copy, but a small one.
-//
-// this[QUEUE] is a Yallist of entries that haven't been emitted
-// yet this can only get filled up if the user keeps write()ing after
-// a write() returns false, or does a write() with more than one entry
-//
-// We don't buffer chunks, we always parse them and either create an
-// entry, or push it into the active entry.  The ReadEntry class knows
-// to throw data away if .ignore=true
-//
-// Shift entry off the buffer when it emits 'end', and emit 'entry' for
-// the next one in the list.
-//
-// At any time, we're pushing body chunks into the entry at WRITEENTRY,
-// and waiting for 'end' on the entry at READENTRY
-//
-// ignored entries get .resume() called on them straight away
-import { EventEmitter as EE } from 'events';
-import { BrotliDecompress, Unzip } from 'minizlib';
-import { Yallist } from 'yallist';
-import { Header } from './header.js';
-import { Pax } from './pax.js';
-import { ReadEntry } from './read-entry.js';
-import { warnMethod, } from './warn-method.js';
-const maxMetaEntrySize = 1024 * 1024;
-const gzipHeader = Buffer.from([0x1f, 0x8b]);
-const STATE = Symbol('state');
-const WRITEENTRY = Symbol('writeEntry');
-const READENTRY = Symbol('readEntry');
-const NEXTENTRY = Symbol('nextEntry');
-const PROCESSENTRY = Symbol('processEntry');
-const EX = Symbol('extendedHeader');
-const GEX = Symbol('globalExtendedHeader');
-const META = Symbol('meta');
-const EMITMETA = Symbol('emitMeta');
-const BUFFER = Symbol('buffer');
-const QUEUE = Symbol('queue');
-const ENDED = Symbol('ended');
-const EMITTEDEND = Symbol('emittedEnd');
-const EMIT = Symbol('emit');
-const UNZIP = Symbol('unzip');
-const CONSUMECHUNK = Symbol('consumeChunk');
-const CONSUMECHUNKSUB = Symbol('consumeChunkSub');
-const CONSUMEBODY = Symbol('consumeBody');
-const CONSUMEMETA = Symbol('consumeMeta');
-const CONSUMEHEADER = Symbol('consumeHeader');
-const CONSUMING = Symbol('consuming');
-const BUFFERCONCAT = Symbol('bufferConcat');
-const MAYBEEND = Symbol('maybeEnd');
-const WRITING = Symbol('writing');
-const ABORTED = Symbol('aborted');
-const DONE = Symbol('onDone');
-const SAW_VALID_ENTRY = Symbol('sawValidEntry');
-const SAW_NULL_BLOCK = Symbol('sawNullBlock');
-const SAW_EOF = Symbol('sawEOF');
-const CLOSESTREAM = Symbol('closeStream');
-const noop = () => true;
-export class Parser extends EE {
-    file;
-    strict;
-    maxMetaEntrySize;
-    filter;
-    brotli;
-    writable = true;
-    readable = false;
-    [QUEUE] = new Yallist();
-    [BUFFER];
-    [READENTRY];
-    [WRITEENTRY];
-    [STATE] = 'begin';
-    [META] = '';
-    [EX];
-    [GEX];
-    [ENDED] = false;
-    [UNZIP];
-    [ABORTED] = false;
-    [SAW_VALID_ENTRY];
-    [SAW_NULL_BLOCK] = false;
-    [SAW_EOF] = false;
-    [WRITING] = false;
-    [CONSUMING] = false;
-    [EMITTEDEND] = false;
-    constructor(opt = {}) {
-        super();
-        this.file = opt.file || '';
-        // these BADARCHIVE errors can't be detected early. listen on DONE.
-        this.on(DONE, () => {
-            if (this[STATE] === 'begin' ||
-                this[SAW_VALID_ENTRY] === false) {
-                // either less than 1 block of data, or all entries were invalid.
-                // Either way, probably not even a tarball.
-                this.warn('TAR_BAD_ARCHIVE', 'Unrecognized archive format');
-            }
-        });
-        if (opt.ondone) {
-            this.on(DONE, opt.ondone);
-        }
-        else {
-            this.on(DONE, () => {
-                this.emit('prefinish');
-                this.emit('finish');
-                this.emit('end');
-            });
-        }
-        this.strict = !!opt.strict;
-        this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize;
-        this.filter = typeof opt.filter === 'function' ? opt.filter : noop;
-        // Unlike gzip, brotli doesn't have any magic bytes to identify it
-        // Users need to explicitly tell us they're extracting a brotli file
-        // Or we infer from the file extension
-        const isTBR = opt.file &&
-            (opt.file.endsWith('.tar.br') || opt.file.endsWith('.tbr'));
-        // if it's a tbr file it MIGHT be brotli, but we don't know until
-        // we look at it and verify it's not a valid tar file.
-        this.brotli =
-            !opt.gzip && opt.brotli !== undefined ? opt.brotli
-                : isTBR ? undefined
-                    : false;
-        // have to set this so that streams are ok piping into it
-        this.on('end', () => this[CLOSESTREAM]());
-        if (typeof opt.onwarn === 'function') {
-            this.on('warn', opt.onwarn);
-        }
-        if (typeof opt.onReadEntry === 'function') {
-            this.on('entry', opt.onReadEntry);
-        }
-    }
-    warn(code, message, data = {}) {
-        warnMethod(this, code, message, data);
-    }
-    [CONSUMEHEADER](chunk, position) {
-        if (this[SAW_VALID_ENTRY] === undefined) {
-            this[SAW_VALID_ENTRY] = false;
-        }
-        let header;
-        try {
-            header = new Header(chunk, position, this[EX], this[GEX]);
-        }
-        catch (er) {
-            return this.warn('TAR_ENTRY_INVALID', er);
-        }
-        if (header.nullBlock) {
-            if (this[SAW_NULL_BLOCK]) {
-                this[SAW_EOF] = true;
-                // ending an archive with no entries.  pointless, but legal.
-                if (this[STATE] === 'begin') {
-                    this[STATE] = 'header';
-                }
-                this[EMIT]('eof');
-            }
-            else {
-                this[SAW_NULL_BLOCK] = true;
-                this[EMIT]('nullBlock');
-            }
-        }
-        else {
-            this[SAW_NULL_BLOCK] = false;
-            if (!header.cksumValid) {
-                this.warn('TAR_ENTRY_INVALID', 'checksum failure', { header });
-            }
-            else if (!header.path) {
-                this.warn('TAR_ENTRY_INVALID', 'path is required', { header });
-            }
-            else {
-                const type = header.type;
-                if (/^(Symbolic)?Link$/.test(type) && !header.linkpath) {
-                    this.warn('TAR_ENTRY_INVALID', 'linkpath required', {
-                        header,
-                    });
-                }
-                else if (!/^(Symbolic)?Link$/.test(type) &&
-                    !/^(Global)?ExtendedHeader$/.test(type) &&
-                    header.linkpath) {
-                    this.warn('TAR_ENTRY_INVALID', 'linkpath forbidden', {
-                        header,
-                    });
-                }
-                else {
-                    const entry = (this[WRITEENTRY] = new ReadEntry(header, this[EX], this[GEX]));
-                    // we do this for meta & ignored entries as well, because they
-                    // are still valid tar, or else we wouldn't know to ignore them
-                    if (!this[SAW_VALID_ENTRY]) {
-                        if (entry.remain) {
-                            // this might be the one!
-                            const onend = () => {
-                                if (!entry.invalid) {
-                                    this[SAW_VALID_ENTRY] = true;
-                                }
-                            };
-                            entry.on('end', onend);
-                        }
-                        else {
-                            this[SAW_VALID_ENTRY] = true;
-                        }
-                    }
-                    if (entry.meta) {
-                        if (entry.size > this.maxMetaEntrySize) {
-                            entry.ignore = true;
-                            this[EMIT]('ignoredEntry', entry);
-                            this[STATE] = 'ignore';
-                            entry.resume();
-                        }
-                        else if (entry.size > 0) {
-                            this[META] = '';
-                            entry.on('data', c => (this[META] += c));
-                            this[STATE] = 'meta';
-                        }
-                    }
-                    else {
-                        this[EX] = undefined;
-                        entry.ignore =
-                            entry.ignore || !this.filter(entry.path, entry);
-                        if (entry.ignore) {
-                            // probably valid, just not something we care about
-                            this[EMIT]('ignoredEntry', entry);
-                            this[STATE] = entry.remain ? 'ignore' : 'header';
-                            entry.resume();
-                        }
-                        else {
-                            if (entry.remain) {
-                                this[STATE] = 'body';
-                            }
-                            else {
-                                this[STATE] = 'header';
-                                entry.end();
-                            }
-                            if (!this[READENTRY]) {
-                                this[QUEUE].push(entry);
-                                this[NEXTENTRY]();
-                            }
-                            else {
-                                this[QUEUE].push(entry);
-                            }
-                        }
-                    }
-                }
-            }
-        }
-    }
-    [CLOSESTREAM]() {
-        queueMicrotask(() => this.emit('close'));
-    }
-    [PROCESSENTRY](entry) {
-        let go = true;
-        if (!entry) {
-            this[READENTRY] = undefined;
-            go = false;
-        }
-        else if (Array.isArray(entry)) {
-            const [ev, ...args] = entry;
-            this.emit(ev, ...args);
-        }
-        else {
-            this[READENTRY] = entry;
-            this.emit('entry', entry);
-            if (!entry.emittedEnd) {
-                entry.on('end', () => this[NEXTENTRY]());
-                go = false;
-            }
-        }
-        return go;
-    }
-    [NEXTENTRY]() {
-        do { } while (this[PROCESSENTRY](this[QUEUE].shift()));
-        if (!this[QUEUE].length) {
-            // At this point, there's nothing in the queue, but we may have an
-            // entry which is being consumed (readEntry).
-            // If we don't, then we definitely can handle more data.
-            // If we do, and either it's flowing, or it has never had any data
-            // written to it, then it needs more.
-            // The only other possibility is that it has returned false from a
-            // write() call, so we wait for the next drain to continue.
-            const re = this[READENTRY];
-            const drainNow = !re || re.flowing || re.size === re.remain;
-            if (drainNow) {
-                if (!this[WRITING]) {
-                    this.emit('drain');
-                }
-            }
-            else {
-                re.once('drain', () => this.emit('drain'));
-            }
-        }
-    }
-    [CONSUMEBODY](chunk, position) {
-        // write up to but no  more than writeEntry.blockRemain
-        const entry = this[WRITEENTRY];
-        /* c8 ignore start */
-        if (!entry) {
-            throw new Error('attempt to consume body without entry??');
-        }
-        const br = entry.blockRemain ?? 0;
-        /* c8 ignore stop */
-        const c = br >= chunk.length && position === 0 ?
-            chunk
-            : chunk.subarray(position, position + br);
-        entry.write(c);
-        if (!entry.blockRemain) {
-            this[STATE] = 'header';
-            this[WRITEENTRY] = undefined;
-            entry.end();
-        }
-        return c.length;
-    }
-    [CONSUMEMETA](chunk, position) {
-        const entry = this[WRITEENTRY];
-        const ret = this[CONSUMEBODY](chunk, position);
-        // if we finished, then the entry is reset
-        if (!this[WRITEENTRY] && entry) {
-            this[EMITMETA](entry);
-        }
-        return ret;
-    }
-    [EMIT](ev, data, extra) {
-        if (!this[QUEUE].length && !this[READENTRY]) {
-            this.emit(ev, data, extra);
-        }
-        else {
-            this[QUEUE].push([ev, data, extra]);
-        }
-    }
-    [EMITMETA](entry) {
-        this[EMIT]('meta', this[META]);
-        switch (entry.type) {
-            case 'ExtendedHeader':
-            case 'OldExtendedHeader':
-                this[EX] = Pax.parse(this[META], this[EX], false);
-                break;
-            case 'GlobalExtendedHeader':
-                this[GEX] = Pax.parse(this[META], this[GEX], true);
-                break;
-            case 'NextFileHasLongPath':
-            case 'OldGnuLongPath': {
-                const ex = this[EX] ?? Object.create(null);
-                this[EX] = ex;
-                ex.path = this[META].replace(/\0.*/, '');
-                break;
-            }
-            case 'NextFileHasLongLinkpath': {
-                const ex = this[EX] || Object.create(null);
-                this[EX] = ex;
-                ex.linkpath = this[META].replace(/\0.*/, '');
-                break;
-            }
-            /* c8 ignore start */
-            default:
-                throw new Error('unknown meta: ' + entry.type);
-            /* c8 ignore stop */
-        }
-    }
-    abort(error) {
-        this[ABORTED] = true;
-        this.emit('abort', error);
-        // always throws, even in non-strict mode
-        this.warn('TAR_ABORT', error, { recoverable: false });
-    }
-    write(chunk, encoding, cb) {
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        if (typeof chunk === 'string') {
-            chunk = Buffer.from(chunk, 
-            /* c8 ignore next */
-            typeof encoding === 'string' ? encoding : 'utf8');
-        }
-        if (this[ABORTED]) {
-            /* c8 ignore next */
-            cb?.();
-            return false;
-        }
-        // first write, might be gzipped
-        const needSniff = this[UNZIP] === undefined ||
-            (this.brotli === undefined && this[UNZIP] === false);
-        if (needSniff && chunk) {
-            if (this[BUFFER]) {
-                chunk = Buffer.concat([this[BUFFER], chunk]);
-                this[BUFFER] = undefined;
-            }
-            if (chunk.length < gzipHeader.length) {
-                this[BUFFER] = chunk;
-                /* c8 ignore next */
-                cb?.();
-                return true;
-            }
-            // look for gzip header
-            for (let i = 0; this[UNZIP] === undefined && i < gzipHeader.length; i++) {
-                if (chunk[i] !== gzipHeader[i]) {
-                    this[UNZIP] = false;
-                }
-            }
-            const maybeBrotli = this.brotli === undefined;
-            if (this[UNZIP] === false && maybeBrotli) {
-                // read the first header to see if it's a valid tar file. If so,
-                // we can safely assume that it's not actually brotli, despite the
-                // .tbr or .tar.br file extension.
-                // if we ended before getting a full chunk, yes, def brotli
-                if (chunk.length < 512) {
-                    if (this[ENDED]) {
-                        this.brotli = true;
-                    }
-                    else {
-                        this[BUFFER] = chunk;
-                        /* c8 ignore next */
-                        cb?.();
-                        return true;
-                    }
-                }
-                else {
-                    // if it's tar, it's pretty reliably not brotli, chances of
-                    // that happening are astronomical.
-                    try {
-                        new Header(chunk.subarray(0, 512));
-                        this.brotli = false;
-                    }
-                    catch (_) {
-                        this.brotli = true;
-                    }
-                }
-            }
-            if (this[UNZIP] === undefined ||
-                (this[UNZIP] === false && this.brotli)) {
-                const ended = this[ENDED];
-                this[ENDED] = false;
-                this[UNZIP] =
-                    this[UNZIP] === undefined ?
-                        new Unzip({})
-                        : new BrotliDecompress({});
-                this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk));
-                this[UNZIP].on('error', er => this.abort(er));
-                this[UNZIP].on('end', () => {
-                    this[ENDED] = true;
-                    this[CONSUMECHUNK]();
-                });
-                this[WRITING] = true;
-                const ret = !!this[UNZIP][ended ? 'end' : 'write'](chunk);
-                this[WRITING] = false;
-                cb?.();
-                return ret;
-            }
-        }
-        this[WRITING] = true;
-        if (this[UNZIP]) {
-            this[UNZIP].write(chunk);
-        }
-        else {
-            this[CONSUMECHUNK](chunk);
-        }
-        this[WRITING] = false;
-        // return false if there's a queue, or if the current entry isn't flowing
-        const ret = this[QUEUE].length ? false
-            : this[READENTRY] ? this[READENTRY].flowing
-                : true;
-        // if we have no queue, then that means a clogged READENTRY
-        if (!ret && !this[QUEUE].length) {
-            this[READENTRY]?.once('drain', () => this.emit('drain'));
-        }
-        /* c8 ignore next */
-        cb?.();
-        return ret;
-    }
-    [BUFFERCONCAT](c) {
-        if (c && !this[ABORTED]) {
-            this[BUFFER] =
-                this[BUFFER] ? Buffer.concat([this[BUFFER], c]) : c;
-        }
-    }
-    [MAYBEEND]() {
-        if (this[ENDED] &&
-            !this[EMITTEDEND] &&
-            !this[ABORTED] &&
-            !this[CONSUMING]) {
-            this[EMITTEDEND] = true;
-            const entry = this[WRITEENTRY];
-            if (entry && entry.blockRemain) {
-                // truncated, likely a damaged file
-                const have = this[BUFFER] ? this[BUFFER].length : 0;
-                this.warn('TAR_BAD_ARCHIVE', `Truncated input (needed ${entry.blockRemain} more bytes, only ${have} available)`, { entry });
-                if (this[BUFFER]) {
-                    entry.write(this[BUFFER]);
-                }
-                entry.end();
-            }
-            this[EMIT](DONE);
-        }
-    }
-    [CONSUMECHUNK](chunk) {
-        if (this[CONSUMING] && chunk) {
-            this[BUFFERCONCAT](chunk);
-        }
-        else if (!chunk && !this[BUFFER]) {
-            this[MAYBEEND]();
-        }
-        else if (chunk) {
-            this[CONSUMING] = true;
-            if (this[BUFFER]) {
-                this[BUFFERCONCAT](chunk);
-                const c = this[BUFFER];
-                this[BUFFER] = undefined;
-                this[CONSUMECHUNKSUB](c);
-            }
-            else {
-                this[CONSUMECHUNKSUB](chunk);
-            }
-            while (this[BUFFER] &&
-                this[BUFFER]?.length >= 512 &&
-                !this[ABORTED] &&
-                !this[SAW_EOF]) {
-                const c = this[BUFFER];
-                this[BUFFER] = undefined;
-                this[CONSUMECHUNKSUB](c);
-            }
-            this[CONSUMING] = false;
-        }
-        if (!this[BUFFER] || this[ENDED]) {
-            this[MAYBEEND]();
-        }
-    }
-    [CONSUMECHUNKSUB](chunk) {
-        // we know that we are in CONSUMING mode, so anything written goes into
-        // the buffer.  Advance the position and put any remainder in the buffer.
-        let position = 0;
-        const length = chunk.length;
-        while (position + 512 <= length &&
-            !this[ABORTED] &&
-            !this[SAW_EOF]) {
-            switch (this[STATE]) {
-                case 'begin':
-                case 'header':
-                    this[CONSUMEHEADER](chunk, position);
-                    position += 512;
-                    break;
-                case 'ignore':
-                case 'body':
-                    position += this[CONSUMEBODY](chunk, position);
-                    break;
-                case 'meta':
-                    position += this[CONSUMEMETA](chunk, position);
-                    break;
-                /* c8 ignore start */
-                default:
-                    throw new Error('invalid state: ' + this[STATE]);
-                /* c8 ignore stop */
-            }
-        }
-        if (position < length) {
-            if (this[BUFFER]) {
-                this[BUFFER] = Buffer.concat([
-                    chunk.subarray(position),
-                    this[BUFFER],
-                ]);
-            }
-            else {
-                this[BUFFER] = chunk.subarray(position);
-            }
-        }
-    }
-    end(chunk, encoding, cb) {
-        if (typeof chunk === 'function') {
-            cb = chunk;
-            encoding = undefined;
-            chunk = undefined;
-        }
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        if (typeof chunk === 'string') {
-            chunk = Buffer.from(chunk, encoding);
-        }
-        if (cb)
-            this.once('finish', cb);
-        if (!this[ABORTED]) {
-            if (this[UNZIP]) {
-                /* c8 ignore start */
-                if (chunk)
-                    this[UNZIP].write(chunk);
-                /* c8 ignore stop */
-                this[UNZIP].end();
-            }
-            else {
-                this[ENDED] = true;
-                if (this.brotli === undefined)
-                    chunk = chunk || Buffer.alloc(0);
-                if (chunk)
-                    this.write(chunk);
-                this[MAYBEEND]();
-            }
-        }
-        return this;
-    }
-}
-//# sourceMappingURL=parse.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/path-reservations.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/path-reservations.js
deleted file mode 100644
index e63b9c91e9a80..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/path-reservations.js
+++ /dev/null
@@ -1,166 +0,0 @@
-// A path exclusive reservation system
-// reserve([list, of, paths], fn)
-// When the fn is first in line for all its paths, it
-// is called with a cb that clears the reservation.
-//
-// Used by async unpack to avoid clobbering paths in use,
-// while still allowing maximal safe parallelization.
-import { join } from 'node:path';
-import { normalizeUnicode } from './normalize-unicode.js';
-import { stripTrailingSlashes } from './strip-trailing-slashes.js';
-const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform;
-const isWindows = platform === 'win32';
-// return a set of parent dirs for a given path
-// '/a/b/c/d' -> ['/', '/a', '/a/b', '/a/b/c', '/a/b/c/d']
-const getDirs = (path) => {
-    const dirs = path
-        .split('/')
-        .slice(0, -1)
-        .reduce((set, path) => {
-        const s = set[set.length - 1];
-        if (s !== undefined) {
-            path = join(s, path);
-        }
-        set.push(path || '/');
-        return set;
-    }, []);
-    return dirs;
-};
-export class PathReservations {
-    // path => [function or Set]
-    // A Set object means a directory reservation
-    // A fn is a direct reservation on that path
-    #queues = new Map();
-    // fn => {paths:[path,...], dirs:[path, ...]}
-    #reservations = new Map();
-    // functions currently running
-    #running = new Set();
-    reserve(paths, fn) {
-        paths =
-            isWindows ?
-                ['win32 parallelization disabled']
-                : paths.map(p => {
-                    // don't need normPath, because we skip this entirely for windows
-                    return stripTrailingSlashes(join(normalizeUnicode(p))).toLowerCase();
-                });
-        const dirs = new Set(paths.map(path => getDirs(path)).reduce((a, b) => a.concat(b)));
-        this.#reservations.set(fn, { dirs, paths });
-        for (const p of paths) {
-            const q = this.#queues.get(p);
-            if (!q) {
-                this.#queues.set(p, [fn]);
-            }
-            else {
-                q.push(fn);
-            }
-        }
-        for (const dir of dirs) {
-            const q = this.#queues.get(dir);
-            if (!q) {
-                this.#queues.set(dir, [new Set([fn])]);
-            }
-            else {
-                const l = q[q.length - 1];
-                if (l instanceof Set) {
-                    l.add(fn);
-                }
-                else {
-                    q.push(new Set([fn]));
-                }
-            }
-        }
-        return this.#run(fn);
-    }
-    // return the queues for each path the function cares about
-    // fn => {paths, dirs}
-    #getQueues(fn) {
-        const res = this.#reservations.get(fn);
-        /* c8 ignore start */
-        if (!res) {
-            throw new Error('function does not have any path reservations');
-        }
-        /* c8 ignore stop */
-        return {
-            paths: res.paths.map((path) => this.#queues.get(path)),
-            dirs: [...res.dirs].map(path => this.#queues.get(path)),
-        };
-    }
-    // check if fn is first in line for all its paths, and is
-    // included in the first set for all its dir queues
-    check(fn) {
-        const { paths, dirs } = this.#getQueues(fn);
-        return (paths.every(q => q && q[0] === fn) &&
-            dirs.every(q => q && q[0] instanceof Set && q[0].has(fn)));
-    }
-    // run the function if it's first in line and not already running
-    #run(fn) {
-        if (this.#running.has(fn) || !this.check(fn)) {
-            return false;
-        }
-        this.#running.add(fn);
-        fn(() => this.#clear(fn));
-        return true;
-    }
-    #clear(fn) {
-        if (!this.#running.has(fn)) {
-            return false;
-        }
-        const res = this.#reservations.get(fn);
-        /* c8 ignore start */
-        if (!res) {
-            throw new Error('invalid reservation');
-        }
-        /* c8 ignore stop */
-        const { paths, dirs } = res;
-        const next = new Set();
-        for (const path of paths) {
-            const q = this.#queues.get(path);
-            /* c8 ignore start */
-            if (!q || q?.[0] !== fn) {
-                continue;
-            }
-            /* c8 ignore stop */
-            const q0 = q[1];
-            if (!q0) {
-                this.#queues.delete(path);
-                continue;
-            }
-            q.shift();
-            if (typeof q0 === 'function') {
-                next.add(q0);
-            }
-            else {
-                for (const f of q0) {
-                    next.add(f);
-                }
-            }
-        }
-        for (const dir of dirs) {
-            const q = this.#queues.get(dir);
-            const q0 = q?.[0];
-            /* c8 ignore next - type safety only */
-            if (!q || !(q0 instanceof Set))
-                continue;
-            if (q0.size === 1 && q.length === 1) {
-                this.#queues.delete(dir);
-                continue;
-            }
-            else if (q0.size === 1) {
-                q.shift();
-                // next one must be a function,
-                // or else the Set would've been reused
-                const n = q[0];
-                if (typeof n === 'function') {
-                    next.add(n);
-                }
-            }
-            else {
-                q0.delete(fn);
-            }
-        }
-        this.#running.delete(fn);
-        next.forEach(fn => this.#run(fn));
-        return true;
-    }
-}
-//# sourceMappingURL=path-reservations.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/pax.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/pax.js
deleted file mode 100644
index 832808f344da5..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/pax.js
+++ /dev/null
@@ -1,154 +0,0 @@
-import { basename } from 'node:path';
-import { Header } from './header.js';
-export class Pax {
-    atime;
-    mtime;
-    ctime;
-    charset;
-    comment;
-    gid;
-    uid;
-    gname;
-    uname;
-    linkpath;
-    dev;
-    ino;
-    nlink;
-    path;
-    size;
-    mode;
-    global;
-    constructor(obj, global = false) {
-        this.atime = obj.atime;
-        this.charset = obj.charset;
-        this.comment = obj.comment;
-        this.ctime = obj.ctime;
-        this.dev = obj.dev;
-        this.gid = obj.gid;
-        this.global = global;
-        this.gname = obj.gname;
-        this.ino = obj.ino;
-        this.linkpath = obj.linkpath;
-        this.mtime = obj.mtime;
-        this.nlink = obj.nlink;
-        this.path = obj.path;
-        this.size = obj.size;
-        this.uid = obj.uid;
-        this.uname = obj.uname;
-    }
-    encode() {
-        const body = this.encodeBody();
-        if (body === '') {
-            return Buffer.allocUnsafe(0);
-        }
-        const bodyLen = Buffer.byteLength(body);
-        // round up to 512 bytes
-        // add 512 for header
-        const bufLen = 512 * Math.ceil(1 + bodyLen / 512);
-        const buf = Buffer.allocUnsafe(bufLen);
-        // 0-fill the header section, it might not hit every field
-        for (let i = 0; i < 512; i++) {
-            buf[i] = 0;
-        }
-        new Header({
-            // XXX split the path
-            // then the path should be PaxHeader + basename, but less than 99,
-            // prepend with the dirname
-            /* c8 ignore start */
-            path: ('PaxHeader/' + basename(this.path ?? '')).slice(0, 99),
-            /* c8 ignore stop */
-            mode: this.mode || 0o644,
-            uid: this.uid,
-            gid: this.gid,
-            size: bodyLen,
-            mtime: this.mtime,
-            type: this.global ? 'GlobalExtendedHeader' : 'ExtendedHeader',
-            linkpath: '',
-            uname: this.uname || '',
-            gname: this.gname || '',
-            devmaj: 0,
-            devmin: 0,
-            atime: this.atime,
-            ctime: this.ctime,
-        }).encode(buf);
-        buf.write(body, 512, bodyLen, 'utf8');
-        // null pad after the body
-        for (let i = bodyLen + 512; i < buf.length; i++) {
-            buf[i] = 0;
-        }
-        return buf;
-    }
-    encodeBody() {
-        return (this.encodeField('path') +
-            this.encodeField('ctime') +
-            this.encodeField('atime') +
-            this.encodeField('dev') +
-            this.encodeField('ino') +
-            this.encodeField('nlink') +
-            this.encodeField('charset') +
-            this.encodeField('comment') +
-            this.encodeField('gid') +
-            this.encodeField('gname') +
-            this.encodeField('linkpath') +
-            this.encodeField('mtime') +
-            this.encodeField('size') +
-            this.encodeField('uid') +
-            this.encodeField('uname'));
-    }
-    encodeField(field) {
-        if (this[field] === undefined) {
-            return '';
-        }
-        const r = this[field];
-        const v = r instanceof Date ? r.getTime() / 1000 : r;
-        const s = ' ' +
-            (field === 'dev' || field === 'ino' || field === 'nlink' ?
-                'SCHILY.'
-                : '') +
-            field +
-            '=' +
-            v +
-            '\n';
-        const byteLen = Buffer.byteLength(s);
-        // the digits includes the length of the digits in ascii base-10
-        // so if it's 9 characters, then adding 1 for the 9 makes it 10
-        // which makes it 11 chars.
-        let digits = Math.floor(Math.log(byteLen) / Math.log(10)) + 1;
-        if (byteLen + digits >= Math.pow(10, digits)) {
-            digits += 1;
-        }
-        const len = digits + byteLen;
-        return len + s;
-    }
-    static parse(str, ex, g = false) {
-        return new Pax(merge(parseKV(str), ex), g);
-    }
-}
-const merge = (a, b) => b ? Object.assign({}, b, a) : a;
-const parseKV = (str) => str
-    .replace(/\n$/, '')
-    .split('\n')
-    .reduce(parseKVLine, Object.create(null));
-const parseKVLine = (set, line) => {
-    const n = parseInt(line, 10);
-    // XXX Values with \n in them will fail this.
-    // Refactor to not be a naive line-by-line parse.
-    if (n !== Buffer.byteLength(line) + 1) {
-        return set;
-    }
-    line = line.slice((n + ' ').length);
-    const kv = line.split('=');
-    const r = kv.shift();
-    if (!r) {
-        return set;
-    }
-    const k = r.replace(/^SCHILY\.(dev|ino|nlink)/, '$1');
-    const v = kv.join('=');
-    set[k] =
-        /^([A-Z]+\.)?([mac]|birth|creation)time$/.test(k) ?
-            new Date(Number(v) * 1000)
-            : /^[0-9]+$/.test(v) ? +v
-                : v;
-    return set;
-};
-//# sourceMappingURL=pax.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/read-entry.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/read-entry.js
deleted file mode 100644
index 23cc673e61087..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/read-entry.js
+++ /dev/null
@@ -1,136 +0,0 @@
-import { Minipass } from 'minipass';
-import { normalizeWindowsPath } from './normalize-windows-path.js';
-export class ReadEntry extends Minipass {
-    extended;
-    globalExtended;
-    header;
-    startBlockSize;
-    blockRemain;
-    remain;
-    type;
-    meta = false;
-    ignore = false;
-    path;
-    mode;
-    uid;
-    gid;
-    uname;
-    gname;
-    size = 0;
-    mtime;
-    atime;
-    ctime;
-    linkpath;
-    dev;
-    ino;
-    nlink;
-    invalid = false;
-    absolute;
-    unsupported = false;
-    constructor(header, ex, gex) {
-        super({});
-        // read entries always start life paused.  this is to avoid the
-        // situation where Minipass's auto-ending empty streams results
-        // in an entry ending before we're ready for it.
-        this.pause();
-        this.extended = ex;
-        this.globalExtended = gex;
-        this.header = header;
-        /* c8 ignore start */
-        this.remain = header.size ?? 0;
-        /* c8 ignore stop */
-        this.startBlockSize = 512 * Math.ceil(this.remain / 512);
-        this.blockRemain = this.startBlockSize;
-        this.type = header.type;
-        switch (this.type) {
-            case 'File':
-            case 'OldFile':
-            case 'Link':
-            case 'SymbolicLink':
-            case 'CharacterDevice':
-            case 'BlockDevice':
-            case 'Directory':
-            case 'FIFO':
-            case 'ContiguousFile':
-            case 'GNUDumpDir':
-                break;
-            case 'NextFileHasLongLinkpath':
-            case 'NextFileHasLongPath':
-            case 'OldGnuLongPath':
-            case 'GlobalExtendedHeader':
-            case 'ExtendedHeader':
-            case 'OldExtendedHeader':
-                this.meta = true;
-                break;
-            // NOTE: gnutar and bsdtar treat unrecognized types as 'File'
-            // it may be worth doing the same, but with a warning.
-            default:
-                this.ignore = true;
-        }
-        /* c8 ignore start */
-        if (!header.path) {
-            throw new Error('no path provided for tar.ReadEntry');
-        }
-        /* c8 ignore stop */
-        this.path = normalizeWindowsPath(header.path);
-        this.mode = header.mode;
-        if (this.mode) {
-            this.mode = this.mode & 0o7777;
-        }
-        this.uid = header.uid;
-        this.gid = header.gid;
-        this.uname = header.uname;
-        this.gname = header.gname;
-        this.size = this.remain;
-        this.mtime = header.mtime;
-        this.atime = header.atime;
-        this.ctime = header.ctime;
-        /* c8 ignore start */
-        this.linkpath =
-            header.linkpath ?
-                normalizeWindowsPath(header.linkpath)
-                : undefined;
-        /* c8 ignore stop */
-        this.uname = header.uname;
-        this.gname = header.gname;
-        if (ex) {
-            this.#slurp(ex);
-        }
-        if (gex) {
-            this.#slurp(gex, true);
-        }
-    }
-    write(data) {
-        const writeLen = data.length;
-        if (writeLen > this.blockRemain) {
-            throw new Error('writing more to entry than is appropriate');
-        }
-        const r = this.remain;
-        const br = this.blockRemain;
-        this.remain = Math.max(0, r - writeLen);
-        this.blockRemain = Math.max(0, br - writeLen);
-        if (this.ignore) {
-            return true;
-        }
-        if (r >= writeLen) {
-            return super.write(data);
-        }
-        // r < writeLen
-        return super.write(data.subarray(0, r));
-    }
-    #slurp(ex, gex = false) {
-        if (ex.path)
-            ex.path = normalizeWindowsPath(ex.path);
-        if (ex.linkpath)
-            ex.linkpath = normalizeWindowsPath(ex.linkpath);
-        Object.assign(this, Object.fromEntries(Object.entries(ex).filter(([k, v]) => {
-            // we slurp in everything except for the path attribute in
-            // a global extended header, because that's weird. Also, any
-            // null/undefined values are ignored.
-            return !(v === null ||
-                v === undefined ||
-                (k === 'path' && gex));
-        })));
-    }
-}
-//# sourceMappingURL=read-entry.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/replace.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/replace.js
deleted file mode 100644
index bab622bfdf1f1..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/replace.js
+++ /dev/null
@@ -1,225 +0,0 @@
-// tar -r
-import { WriteStream, WriteStreamSync } from '@isaacs/fs-minipass';
-import fs from 'node:fs';
-import path from 'node:path';
-import { Header } from './header.js';
-import { list } from './list.js';
-import { makeCommand } from './make-command.js';
-import { isFile, } from './options.js';
-import { Pack, PackSync } from './pack.js';
-// starting at the head of the file, read a Header
-// If the checksum is invalid, that's our position to start writing
-// If it is, jump forward by the specified size (round up to 512)
-// and try again.
-// Write the new Pack stream starting there.
-const replaceSync = (opt, files) => {
-    const p = new PackSync(opt);
-    let threw = true;
-    let fd;
-    let position;
-    try {
-        try {
-            fd = fs.openSync(opt.file, 'r+');
-        }
-        catch (er) {
-            if (er?.code === 'ENOENT') {
-                fd = fs.openSync(opt.file, 'w+');
-            }
-            else {
-                throw er;
-            }
-        }
-        const st = fs.fstatSync(fd);
-        const headBuf = Buffer.alloc(512);
-        POSITION: for (position = 0; position < st.size; position += 512) {
-            for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) {
-                bytes = fs.readSync(fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos);
-                if (position === 0 &&
-                    headBuf[0] === 0x1f &&
-                    headBuf[1] === 0x8b) {
-                    throw new Error('cannot append to compressed archives');
-                }
-                if (!bytes) {
-                    break POSITION;
-                }
-            }
-            const h = new Header(headBuf);
-            if (!h.cksumValid) {
-                break;
-            }
-            const entryBlockSize = 512 * Math.ceil((h.size || 0) / 512);
-            if (position + entryBlockSize + 512 > st.size) {
-                break;
-            }
-            // the 512 for the header we just parsed will be added as well
-            // also jump ahead all the blocks for the body
-            position += entryBlockSize;
-            if (opt.mtimeCache && h.mtime) {
-                opt.mtimeCache.set(String(h.path), h.mtime);
-            }
-        }
-        threw = false;
-        streamSync(opt, p, position, fd, files);
-    }
-    finally {
-        if (threw) {
-            try {
-                fs.closeSync(fd);
-            }
-            catch (er) { }
-        }
-    }
-};
-const streamSync = (opt, p, position, fd, files) => {
-    const stream = new WriteStreamSync(opt.file, {
-        fd: fd,
-        start: position,
-    });
-    p.pipe(stream);
-    addFilesSync(p, files);
-};
-const replaceAsync = (opt, files) => {
-    files = Array.from(files);
-    const p = new Pack(opt);
-    const getPos = (fd, size, cb_) => {
-        const cb = (er, pos) => {
-            if (er) {
-                fs.close(fd, _ => cb_(er));
-            }
-            else {
-                cb_(null, pos);
-            }
-        };
-        let position = 0;
-        if (size === 0) {
-            return cb(null, 0);
-        }
-        let bufPos = 0;
-        const headBuf = Buffer.alloc(512);
-        const onread = (er, bytes) => {
-            if (er || typeof bytes === 'undefined') {
-                return cb(er);
-            }
-            bufPos += bytes;
-            if (bufPos < 512 && bytes) {
-                return fs.read(fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos, onread);
-            }
-            if (position === 0 &&
-                headBuf[0] === 0x1f &&
-                headBuf[1] === 0x8b) {
-                return cb(new Error('cannot append to compressed archives'));
-            }
-            // truncated header
-            if (bufPos < 512) {
-                return cb(null, position);
-            }
-            const h = new Header(headBuf);
-            if (!h.cksumValid) {
-                return cb(null, position);
-            }
-            /* c8 ignore next */
-            const entryBlockSize = 512 * Math.ceil((h.size ?? 0) / 512);
-            if (position + entryBlockSize + 512 > size) {
-                return cb(null, position);
-            }
-            position += entryBlockSize + 512;
-            if (position >= size) {
-                return cb(null, position);
-            }
-            if (opt.mtimeCache && h.mtime) {
-                opt.mtimeCache.set(String(h.path), h.mtime);
-            }
-            bufPos = 0;
-            fs.read(fd, headBuf, 0, 512, position, onread);
-        };
-        fs.read(fd, headBuf, 0, 512, position, onread);
-    };
-    const promise = new Promise((resolve, reject) => {
-        p.on('error', reject);
-        let flag = 'r+';
-        const onopen = (er, fd) => {
-            if (er && er.code === 'ENOENT' && flag === 'r+') {
-                flag = 'w+';
-                return fs.open(opt.file, flag, onopen);
-            }
-            if (er || !fd) {
-                return reject(er);
-            }
-            fs.fstat(fd, (er, st) => {
-                if (er) {
-                    return fs.close(fd, () => reject(er));
-                }
-                getPos(fd, st.size, (er, position) => {
-                    if (er) {
-                        return reject(er);
-                    }
-                    const stream = new WriteStream(opt.file, {
-                        fd: fd,
-                        start: position,
-                    });
-                    p.pipe(stream);
-                    stream.on('error', reject);
-                    stream.on('close', resolve);
-                    addFilesAsync(p, files);
-                });
-            });
-        };
-        fs.open(opt.file, flag, onopen);
-    });
-    return promise;
-};
-const addFilesSync = (p, files) => {
-    files.forEach(file => {
-        if (file.charAt(0) === '@') {
-            list({
-                file: path.resolve(p.cwd, file.slice(1)),
-                sync: true,
-                noResume: true,
-                onReadEntry: entry => p.add(entry),
-            });
-        }
-        else {
-            p.add(file);
-        }
-    });
-    p.end();
-};
-const addFilesAsync = async (p, files) => {
-    for (let i = 0; i < files.length; i++) {
-        const file = String(files[i]);
-        if (file.charAt(0) === '@') {
-            await list({
-                file: path.resolve(String(p.cwd), file.slice(1)),
-                noResume: true,
-                onReadEntry: entry => p.add(entry),
-            });
-        }
-        else {
-            p.add(file);
-        }
-    }
-    p.end();
-};
-export const replace = makeCommand(replaceSync, replaceAsync, 
-/* c8 ignore start */
-() => {
-    throw new TypeError('file is required');
-}, () => {
-    throw new TypeError('file is required');
-}, 
-/* c8 ignore stop */
-(opt, entries) => {
-    if (!isFile(opt)) {
-        throw new TypeError('file is required');
-    }
-    if (opt.gzip ||
-        opt.brotli ||
-        opt.file.endsWith('.br') ||
-        opt.file.endsWith('.tbr')) {
-        throw new TypeError('cannot append to compressed archives');
-    }
-    if (!entries?.length) {
-        throw new TypeError('no paths specified to add/replace');
-    }
-});
-//# sourceMappingURL=replace.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/strip-absolute-path.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/strip-absolute-path.js
deleted file mode 100644
index cce5ff80b00db..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/strip-absolute-path.js
+++ /dev/null
@@ -1,25 +0,0 @@
-// unix absolute paths are also absolute on win32, so we use this for both
-import { win32 } from 'node:path';
-const { isAbsolute, parse } = win32;
-// returns [root, stripped]
-// Note that windows will think that //x/y/z/a has a "root" of //x/y, and in
-// those cases, we want to sanitize it to x/y/z/a, not z/a, so we strip /
-// explicitly if it's the first character.
-// drive-specific relative paths on Windows get their root stripped off even
-// though they are not absolute, so `c:../foo` becomes ['c:', '../foo']
-export const stripAbsolutePath = (path) => {
-    let r = '';
-    let parsed = parse(path);
-    while (isAbsolute(path) || parsed.root) {
-        // windows will think that //x/y/z has a "root" of //x/y/
-        // but strip the //?/C:/ off of //?/C:/path
-        const root = path.charAt(0) === '/' && path.slice(0, 4) !== '//?/' ?
-            '/'
-            : parsed.root;
-        path = path.slice(root.length);
-        r += root;
-        parsed = parse(path);
-    }
-    return [r, path];
-};
-//# sourceMappingURL=strip-absolute-path.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/strip-trailing-slashes.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/strip-trailing-slashes.js
deleted file mode 100644
index ace4218a7547b..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/strip-trailing-slashes.js
+++ /dev/null
@@ -1,14 +0,0 @@
-// warning: extremely hot code path.
-// This has been meticulously optimized for use
-// within npm install on large package trees.
-// Do not edit without careful benchmarking.
-export const stripTrailingSlashes = (str) => {
-    let i = str.length - 1;
-    let slashesStart = -1;
-    while (i > -1 && str.charAt(i) === '/') {
-        slashesStart = i;
-        i--;
-    }
-    return slashesStart === -1 ? str : str.slice(0, slashesStart);
-};
-//# sourceMappingURL=strip-trailing-slashes.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/symlink-error.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/symlink-error.js
deleted file mode 100644
index d31766e2e0afa..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/symlink-error.js
+++ /dev/null
@@ -1,15 +0,0 @@
-export class SymlinkError extends Error {
-    path;
-    symlink;
-    syscall = 'symlink';
-    code = 'TAR_SYMLINK_ERROR';
-    constructor(symlink, path) {
-        super('TAR_SYMLINK_ERROR: Cannot extract through symbolic link');
-        this.symlink = symlink;
-        this.path = path;
-    }
-    get name() {
-        return 'SymlinkError';
-    }
-}
-//# sourceMappingURL=symlink-error.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/types.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/types.js
deleted file mode 100644
index 27b982ae1e092..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/types.js
+++ /dev/null
@@ -1,45 +0,0 @@
-export const isCode = (c) => name.has(c);
-export const isName = (c) => code.has(c);
-// map types from key to human-friendly name
-export const name = new Map([
-    ['0', 'File'],
-    // same as File
-    ['', 'OldFile'],
-    ['1', 'Link'],
-    ['2', 'SymbolicLink'],
-    // Devices and FIFOs aren't fully supported
-    // they are parsed, but skipped when unpacking
-    ['3', 'CharacterDevice'],
-    ['4', 'BlockDevice'],
-    ['5', 'Directory'],
-    ['6', 'FIFO'],
-    // same as File
-    ['7', 'ContiguousFile'],
-    // pax headers
-    ['g', 'GlobalExtendedHeader'],
-    ['x', 'ExtendedHeader'],
-    // vendor-specific stuff
-    // skip
-    ['A', 'SolarisACL'],
-    // like 5, but with data, which should be skipped
-    ['D', 'GNUDumpDir'],
-    // metadata only, skip
-    ['I', 'Inode'],
-    // data = link path of next file
-    ['K', 'NextFileHasLongLinkpath'],
-    // data = path of next file
-    ['L', 'NextFileHasLongPath'],
-    // skip
-    ['M', 'ContinuationFile'],
-    // like L
-    ['N', 'OldGnuLongPath'],
-    // skip
-    ['S', 'SparseFile'],
-    // skip
-    ['V', 'TapeVolumeHeader'],
-    // like x
-    ['X', 'OldExtendedHeader'],
-]);
-// map the other direction
-export const code = new Map(Array.from(name).map(kv => [kv[1], kv[0]]));
-//# sourceMappingURL=types.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/unpack.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/unpack.js
deleted file mode 100644
index 6e744cfc1a6f9..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/unpack.js
+++ /dev/null
@@ -1,888 +0,0 @@
-// the PEND/UNPEND stuff tracks whether we're ready to emit end/close yet.
-// but the path reservations are required to avoid race conditions where
-// parallelized unpack ops may mess with one another, due to dependencies
-// (like a Link depending on its target) or destructive operations (like
-// clobbering an fs object to create one of a different type.)
-import * as fsm from '@isaacs/fs-minipass';
-import assert from 'node:assert';
-import { randomBytes } from 'node:crypto';
-import fs from 'node:fs';
-import path from 'node:path';
-import { getWriteFlag } from './get-write-flag.js';
-import { mkdir, mkdirSync } from './mkdir.js';
-import { normalizeUnicode } from './normalize-unicode.js';
-import { normalizeWindowsPath } from './normalize-windows-path.js';
-import { Parser } from './parse.js';
-import { stripAbsolutePath } from './strip-absolute-path.js';
-import { stripTrailingSlashes } from './strip-trailing-slashes.js';
-import * as wc from './winchars.js';
-import { PathReservations } from './path-reservations.js';
-const ONENTRY = Symbol('onEntry');
-const CHECKFS = Symbol('checkFs');
-const CHECKFS2 = Symbol('checkFs2');
-const PRUNECACHE = Symbol('pruneCache');
-const ISREUSABLE = Symbol('isReusable');
-const MAKEFS = Symbol('makeFs');
-const FILE = Symbol('file');
-const DIRECTORY = Symbol('directory');
-const LINK = Symbol('link');
-const SYMLINK = Symbol('symlink');
-const HARDLINK = Symbol('hardlink');
-const UNSUPPORTED = Symbol('unsupported');
-const CHECKPATH = Symbol('checkPath');
-const MKDIR = Symbol('mkdir');
-const ONERROR = Symbol('onError');
-const PENDING = Symbol('pending');
-const PEND = Symbol('pend');
-const UNPEND = Symbol('unpend');
-const ENDED = Symbol('ended');
-const MAYBECLOSE = Symbol('maybeClose');
-const SKIP = Symbol('skip');
-const DOCHOWN = Symbol('doChown');
-const UID = Symbol('uid');
-const GID = Symbol('gid');
-const CHECKED_CWD = Symbol('checkedCwd');
-const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform;
-const isWindows = platform === 'win32';
-const DEFAULT_MAX_DEPTH = 1024;
-// Unlinks on Windows are not atomic.
-//
-// This means that if you have a file entry, followed by another
-// file entry with an identical name, and you cannot re-use the file
-// (because it's a hardlink, or because unlink:true is set, or it's
-// Windows, which does not have useful nlink values), then the unlink
-// will be committed to the disk AFTER the new file has been written
-// over the old one, deleting the new file.
-//
-// To work around this, on Windows systems, we rename the file and then
-// delete the renamed file.  It's a sloppy kludge, but frankly, I do not
-// know of a better way to do this, given windows' non-atomic unlink
-// semantics.
-//
-// See: https://github.com/npm/node-tar/issues/183
-/* c8 ignore start */
-const unlinkFile = (path, cb) => {
-    if (!isWindows) {
-        return fs.unlink(path, cb);
-    }
-    const name = path + '.DELETE.' + randomBytes(16).toString('hex');
-    fs.rename(path, name, er => {
-        if (er) {
-            return cb(er);
-        }
-        fs.unlink(name, cb);
-    });
-};
-/* c8 ignore stop */
-/* c8 ignore start */
-const unlinkFileSync = (path) => {
-    if (!isWindows) {
-        return fs.unlinkSync(path);
-    }
-    const name = path + '.DELETE.' + randomBytes(16).toString('hex');
-    fs.renameSync(path, name);
-    fs.unlinkSync(name);
-};
-/* c8 ignore stop */
-// this.gid, entry.gid, this.processUid
-const uint32 = (a, b, c) => a !== undefined && a === a >>> 0 ? a
-    : b !== undefined && b === b >>> 0 ? b
-        : c;
-// clear the cache if it's a case-insensitive unicode-squashing match.
-// we can't know if the current file system is case-sensitive or supports
-// unicode fully, so we check for similarity on the maximally compatible
-// representation.  Err on the side of pruning, since all it's doing is
-// preventing lstats, and it's not the end of the world if we get a false
-// positive.
-// Note that on windows, we always drop the entire cache whenever a
-// symbolic link is encountered, because 8.3 filenames are impossible
-// to reason about, and collisions are hazards rather than just failures.
-const cacheKeyNormalize = (path) => stripTrailingSlashes(normalizeWindowsPath(normalizeUnicode(path))).toLowerCase();
-// remove all cache entries matching ${abs}/**
-const pruneCache = (cache, abs) => {
-    abs = cacheKeyNormalize(abs);
-    for (const path of cache.keys()) {
-        const pnorm = cacheKeyNormalize(path);
-        if (pnorm === abs || pnorm.indexOf(abs + '/') === 0) {
-            cache.delete(path);
-        }
-    }
-};
-const dropCache = (cache) => {
-    for (const key of cache.keys()) {
-        cache.delete(key);
-    }
-};
-export class Unpack extends Parser {
-    [ENDED] = false;
-    [CHECKED_CWD] = false;
-    [PENDING] = 0;
-    reservations = new PathReservations();
-    transform;
-    writable = true;
-    readable = false;
-    dirCache;
-    uid;
-    gid;
-    setOwner;
-    preserveOwner;
-    processGid;
-    processUid;
-    maxDepth;
-    forceChown;
-    win32;
-    newer;
-    keep;
-    noMtime;
-    preservePaths;
-    unlink;
-    cwd;
-    strip;
-    processUmask;
-    umask;
-    dmode;
-    fmode;
-    chmod;
-    constructor(opt = {}) {
-        opt.ondone = () => {
-            this[ENDED] = true;
-            this[MAYBECLOSE]();
-        };
-        super(opt);
-        this.transform = opt.transform;
-        this.dirCache = opt.dirCache || new Map();
-        this.chmod = !!opt.chmod;
-        if (typeof opt.uid === 'number' || typeof opt.gid === 'number') {
-            // need both or neither
-            if (typeof opt.uid !== 'number' ||
-                typeof opt.gid !== 'number') {
-                throw new TypeError('cannot set owner without number uid and gid');
-            }
-            if (opt.preserveOwner) {
-                throw new TypeError('cannot preserve owner in archive and also set owner explicitly');
-            }
-            this.uid = opt.uid;
-            this.gid = opt.gid;
-            this.setOwner = true;
-        }
-        else {
-            this.uid = undefined;
-            this.gid = undefined;
-            this.setOwner = false;
-        }
-        // default true for root
-        if (opt.preserveOwner === undefined &&
-            typeof opt.uid !== 'number') {
-            this.preserveOwner = !!(process.getuid && process.getuid() === 0);
-        }
-        else {
-            this.preserveOwner = !!opt.preserveOwner;
-        }
-        this.processUid =
-            (this.preserveOwner || this.setOwner) && process.getuid ?
-                process.getuid()
-                : undefined;
-        this.processGid =
-            (this.preserveOwner || this.setOwner) && process.getgid ?
-                process.getgid()
-                : undefined;
-        // prevent excessively deep nesting of subfolders
-        // set to `Infinity` to remove this restriction
-        this.maxDepth =
-            typeof opt.maxDepth === 'number' ?
-                opt.maxDepth
-                : DEFAULT_MAX_DEPTH;
-        // mostly just for testing, but useful in some cases.
-        // Forcibly trigger a chown on every entry, no matter what
-        this.forceChown = opt.forceChown === true;
-        // turn > this[ONENTRY](entry));
-    }
-    // a bad or damaged archive is a warning for Parser, but an error
-    // when extracting.  Mark those errors as unrecoverable, because
-    // the Unpack contract cannot be met.
-    warn(code, msg, data = {}) {
-        if (code === 'TAR_BAD_ARCHIVE' || code === 'TAR_ABORT') {
-            data.recoverable = false;
-        }
-        return super.warn(code, msg, data);
-    }
-    [MAYBECLOSE]() {
-        if (this[ENDED] && this[PENDING] === 0) {
-            this.emit('prefinish');
-            this.emit('finish');
-            this.emit('end');
-        }
-    }
-    [CHECKPATH](entry) {
-        const p = normalizeWindowsPath(entry.path);
-        const parts = p.split('/');
-        if (this.strip) {
-            if (parts.length < this.strip) {
-                return false;
-            }
-            if (entry.type === 'Link') {
-                const linkparts = normalizeWindowsPath(String(entry.linkpath)).split('/');
-                if (linkparts.length >= this.strip) {
-                    entry.linkpath = linkparts.slice(this.strip).join('/');
-                }
-                else {
-                    return false;
-                }
-            }
-            parts.splice(0, this.strip);
-            entry.path = parts.join('/');
-        }
-        if (isFinite(this.maxDepth) && parts.length > this.maxDepth) {
-            this.warn('TAR_ENTRY_ERROR', 'path excessively deep', {
-                entry,
-                path: p,
-                depth: parts.length,
-                maxDepth: this.maxDepth,
-            });
-            return false;
-        }
-        if (!this.preservePaths) {
-            if (parts.includes('..') ||
-                /* c8 ignore next */
-                (isWindows && /^[a-z]:\.\.$/i.test(parts[0] ?? ''))) {
-                this.warn('TAR_ENTRY_ERROR', `path contains '..'`, {
-                    entry,
-                    path: p,
-                });
-                return false;
-            }
-            // strip off the root
-            const [root, stripped] = stripAbsolutePath(p);
-            if (root) {
-                entry.path = String(stripped);
-                this.warn('TAR_ENTRY_INFO', `stripping ${root} from absolute path`, {
-                    entry,
-                    path: p,
-                });
-            }
-        }
-        if (path.isAbsolute(entry.path)) {
-            entry.absolute = normalizeWindowsPath(path.resolve(entry.path));
-        }
-        else {
-            entry.absolute = normalizeWindowsPath(path.resolve(this.cwd, entry.path));
-        }
-        // if we somehow ended up with a path that escapes the cwd, and we are
-        // not in preservePaths mode, then something is fishy!  This should have
-        // been prevented above, so ignore this for coverage.
-        /* c8 ignore start - defense in depth */
-        if (!this.preservePaths &&
-            typeof entry.absolute === 'string' &&
-            entry.absolute.indexOf(this.cwd + '/') !== 0 &&
-            entry.absolute !== this.cwd) {
-            this.warn('TAR_ENTRY_ERROR', 'path escaped extraction target', {
-                entry,
-                path: normalizeWindowsPath(entry.path),
-                resolvedPath: entry.absolute,
-                cwd: this.cwd,
-            });
-            return false;
-        }
-        /* c8 ignore stop */
-        // an archive can set properties on the extraction directory, but it
-        // may not replace the cwd with a different kind of thing entirely.
-        if (entry.absolute === this.cwd &&
-            entry.type !== 'Directory' &&
-            entry.type !== 'GNUDumpDir') {
-            return false;
-        }
-        // only encode : chars that aren't drive letter indicators
-        if (this.win32) {
-            const { root: aRoot } = path.win32.parse(String(entry.absolute));
-            entry.absolute =
-                aRoot + wc.encode(String(entry.absolute).slice(aRoot.length));
-            const { root: pRoot } = path.win32.parse(entry.path);
-            entry.path = pRoot + wc.encode(entry.path.slice(pRoot.length));
-        }
-        return true;
-    }
-    [ONENTRY](entry) {
-        if (!this[CHECKPATH](entry)) {
-            return entry.resume();
-        }
-        assert.equal(typeof entry.absolute, 'string');
-        switch (entry.type) {
-            case 'Directory':
-            case 'GNUDumpDir':
-                if (entry.mode) {
-                    entry.mode = entry.mode | 0o700;
-                }
-            // eslint-disable-next-line no-fallthrough
-            case 'File':
-            case 'OldFile':
-            case 'ContiguousFile':
-            case 'Link':
-            case 'SymbolicLink':
-                return this[CHECKFS](entry);
-            case 'CharacterDevice':
-            case 'BlockDevice':
-            case 'FIFO':
-            default:
-                return this[UNSUPPORTED](entry);
-        }
-    }
-    [ONERROR](er, entry) {
-        // Cwd has to exist, or else nothing works. That's serious.
-        // Other errors are warnings, which raise the error in strict
-        // mode, but otherwise continue on.
-        if (er.name === 'CwdError') {
-            this.emit('error', er);
-        }
-        else {
-            this.warn('TAR_ENTRY_ERROR', er, { entry });
-            this[UNPEND]();
-            entry.resume();
-        }
-    }
-    [MKDIR](dir, mode, cb) {
-        mkdir(normalizeWindowsPath(dir), {
-            uid: this.uid,
-            gid: this.gid,
-            processUid: this.processUid,
-            processGid: this.processGid,
-            umask: this.processUmask,
-            preserve: this.preservePaths,
-            unlink: this.unlink,
-            cache: this.dirCache,
-            cwd: this.cwd,
-            mode: mode,
-        }, cb);
-    }
-    [DOCHOWN](entry) {
-        // in preserve owner mode, chown if the entry doesn't match process
-        // in set owner mode, chown if setting doesn't match process
-        return (this.forceChown ||
-            (this.preserveOwner &&
-                ((typeof entry.uid === 'number' &&
-                    entry.uid !== this.processUid) ||
-                    (typeof entry.gid === 'number' &&
-                        entry.gid !== this.processGid))) ||
-            (typeof this.uid === 'number' &&
-                this.uid !== this.processUid) ||
-            (typeof this.gid === 'number' && this.gid !== this.processGid));
-    }
-    [UID](entry) {
-        return uint32(this.uid, entry.uid, this.processUid);
-    }
-    [GID](entry) {
-        return uint32(this.gid, entry.gid, this.processGid);
-    }
-    [FILE](entry, fullyDone) {
-        const mode = typeof entry.mode === 'number' ?
-            entry.mode & 0o7777
-            : this.fmode;
-        const stream = new fsm.WriteStream(String(entry.absolute), {
-            // slight lie, but it can be numeric flags
-            flags: getWriteFlag(entry.size),
-            mode: mode,
-            autoClose: false,
-        });
-        stream.on('error', (er) => {
-            if (stream.fd) {
-                fs.close(stream.fd, () => { });
-            }
-            // flush all the data out so that we aren't left hanging
-            // if the error wasn't actually fatal.  otherwise the parse
-            // is blocked, and we never proceed.
-            stream.write = () => true;
-            this[ONERROR](er, entry);
-            fullyDone();
-        });
-        let actions = 1;
-        const done = (er) => {
-            if (er) {
-                /* c8 ignore start - we should always have a fd by now */
-                if (stream.fd) {
-                    fs.close(stream.fd, () => { });
-                }
-                /* c8 ignore stop */
-                this[ONERROR](er, entry);
-                fullyDone();
-                return;
-            }
-            if (--actions === 0) {
-                if (stream.fd !== undefined) {
-                    fs.close(stream.fd, er => {
-                        if (er) {
-                            this[ONERROR](er, entry);
-                        }
-                        else {
-                            this[UNPEND]();
-                        }
-                        fullyDone();
-                    });
-                }
-            }
-        };
-        stream.on('finish', () => {
-            // if futimes fails, try utimes
-            // if utimes fails, fail with the original error
-            // same for fchown/chown
-            const abs = String(entry.absolute);
-            const fd = stream.fd;
-            if (typeof fd === 'number' && entry.mtime && !this.noMtime) {
-                actions++;
-                const atime = entry.atime || new Date();
-                const mtime = entry.mtime;
-                fs.futimes(fd, atime, mtime, er => er ?
-                    fs.utimes(abs, atime, mtime, er2 => done(er2 && er))
-                    : done());
-            }
-            if (typeof fd === 'number' && this[DOCHOWN](entry)) {
-                actions++;
-                const uid = this[UID](entry);
-                const gid = this[GID](entry);
-                if (typeof uid === 'number' && typeof gid === 'number') {
-                    fs.fchown(fd, uid, gid, er => er ?
-                        fs.chown(abs, uid, gid, er2 => done(er2 && er))
-                        : done());
-                }
-            }
-            done();
-        });
-        const tx = this.transform ? this.transform(entry) || entry : entry;
-        if (tx !== entry) {
-            tx.on('error', (er) => {
-                this[ONERROR](er, entry);
-                fullyDone();
-            });
-            entry.pipe(tx);
-        }
-        tx.pipe(stream);
-    }
-    [DIRECTORY](entry, fullyDone) {
-        const mode = typeof entry.mode === 'number' ?
-            entry.mode & 0o7777
-            : this.dmode;
-        this[MKDIR](String(entry.absolute), mode, er => {
-            if (er) {
-                this[ONERROR](er, entry);
-                fullyDone();
-                return;
-            }
-            let actions = 1;
-            const done = () => {
-                if (--actions === 0) {
-                    fullyDone();
-                    this[UNPEND]();
-                    entry.resume();
-                }
-            };
-            if (entry.mtime && !this.noMtime) {
-                actions++;
-                fs.utimes(String(entry.absolute), entry.atime || new Date(), entry.mtime, done);
-            }
-            if (this[DOCHOWN](entry)) {
-                actions++;
-                fs.chown(String(entry.absolute), Number(this[UID](entry)), Number(this[GID](entry)), done);
-            }
-            done();
-        });
-    }
-    [UNSUPPORTED](entry) {
-        entry.unsupported = true;
-        this.warn('TAR_ENTRY_UNSUPPORTED', `unsupported entry type: ${entry.type}`, { entry });
-        entry.resume();
-    }
-    [SYMLINK](entry, done) {
-        this[LINK](entry, String(entry.linkpath), 'symlink', done);
-    }
-    [HARDLINK](entry, done) {
-        const linkpath = normalizeWindowsPath(path.resolve(this.cwd, String(entry.linkpath)));
-        this[LINK](entry, linkpath, 'link', done);
-    }
-    [PEND]() {
-        this[PENDING]++;
-    }
-    [UNPEND]() {
-        this[PENDING]--;
-        this[MAYBECLOSE]();
-    }
-    [SKIP](entry) {
-        this[UNPEND]();
-        entry.resume();
-    }
-    // Check if we can reuse an existing filesystem entry safely and
-    // overwrite it, rather than unlinking and recreating
-    // Windows doesn't report a useful nlink, so we just never reuse entries
-    [ISREUSABLE](entry, st) {
-        return (entry.type === 'File' &&
-            !this.unlink &&
-            st.isFile() &&
-            st.nlink <= 1 &&
-            !isWindows);
-    }
-    // check if a thing is there, and if so, try to clobber it
-    [CHECKFS](entry) {
-        this[PEND]();
-        const paths = [entry.path];
-        if (entry.linkpath) {
-            paths.push(entry.linkpath);
-        }
-        this.reservations.reserve(paths, done => this[CHECKFS2](entry, done));
-    }
-    [PRUNECACHE](entry) {
-        // if we are not creating a directory, and the path is in the dirCache,
-        // then that means we are about to delete the directory we created
-        // previously, and it is no longer going to be a directory, and neither
-        // is any of its children.
-        // If a symbolic link is encountered, all bets are off.  There is no
-        // reasonable way to sanitize the cache in such a way we will be able to
-        // avoid having filesystem collisions.  If this happens with a non-symlink
-        // entry, it'll just fail to unpack, but a symlink to a directory, using an
-        // 8.3 shortname or certain unicode attacks, can evade detection and lead
-        // to arbitrary writes to anywhere on the system.
-        if (entry.type === 'SymbolicLink') {
-            dropCache(this.dirCache);
-        }
-        else if (entry.type !== 'Directory') {
-            pruneCache(this.dirCache, String(entry.absolute));
-        }
-    }
-    [CHECKFS2](entry, fullyDone) {
-        this[PRUNECACHE](entry);
-        const done = (er) => {
-            this[PRUNECACHE](entry);
-            fullyDone(er);
-        };
-        const checkCwd = () => {
-            this[MKDIR](this.cwd, this.dmode, er => {
-                if (er) {
-                    this[ONERROR](er, entry);
-                    done();
-                    return;
-                }
-                this[CHECKED_CWD] = true;
-                start();
-            });
-        };
-        const start = () => {
-            if (entry.absolute !== this.cwd) {
-                const parent = normalizeWindowsPath(path.dirname(String(entry.absolute)));
-                if (parent !== this.cwd) {
-                    return this[MKDIR](parent, this.dmode, er => {
-                        if (er) {
-                            this[ONERROR](er, entry);
-                            done();
-                            return;
-                        }
-                        afterMakeParent();
-                    });
-                }
-            }
-            afterMakeParent();
-        };
-        const afterMakeParent = () => {
-            fs.lstat(String(entry.absolute), (lstatEr, st) => {
-                if (st &&
-                    (this.keep ||
-                        /* c8 ignore next */
-                        (this.newer && st.mtime > (entry.mtime ?? st.mtime)))) {
-                    this[SKIP](entry);
-                    done();
-                    return;
-                }
-                if (lstatEr || this[ISREUSABLE](entry, st)) {
-                    return this[MAKEFS](null, entry, done);
-                }
-                if (st.isDirectory()) {
-                    if (entry.type === 'Directory') {
-                        const needChmod = this.chmod &&
-                            entry.mode &&
-                            (st.mode & 0o7777) !== entry.mode;
-                        const afterChmod = (er) => this[MAKEFS](er ?? null, entry, done);
-                        if (!needChmod) {
-                            return afterChmod();
-                        }
-                        return fs.chmod(String(entry.absolute), Number(entry.mode), afterChmod);
-                    }
-                    // Not a dir entry, have to remove it.
-                    // NB: the only way to end up with an entry that is the cwd
-                    // itself, in such a way that == does not detect, is a
-                    // tricky windows absolute path with UNC or 8.3 parts (and
-                    // preservePaths:true, or else it will have been stripped).
-                    // In that case, the user has opted out of path protections
-                    // explicitly, so if they blow away the cwd, c'est la vie.
-                    if (entry.absolute !== this.cwd) {
-                        return fs.rmdir(String(entry.absolute), (er) => this[MAKEFS](er ?? null, entry, done));
-                    }
-                }
-                // not a dir, and not reusable
-                // don't remove if the cwd, we want that error
-                if (entry.absolute === this.cwd) {
-                    return this[MAKEFS](null, entry, done);
-                }
-                unlinkFile(String(entry.absolute), er => this[MAKEFS](er ?? null, entry, done));
-            });
-        };
-        if (this[CHECKED_CWD]) {
-            start();
-        }
-        else {
-            checkCwd();
-        }
-    }
-    [MAKEFS](er, entry, done) {
-        if (er) {
-            this[ONERROR](er, entry);
-            done();
-            return;
-        }
-        switch (entry.type) {
-            case 'File':
-            case 'OldFile':
-            case 'ContiguousFile':
-                return this[FILE](entry, done);
-            case 'Link':
-                return this[HARDLINK](entry, done);
-            case 'SymbolicLink':
-                return this[SYMLINK](entry, done);
-            case 'Directory':
-            case 'GNUDumpDir':
-                return this[DIRECTORY](entry, done);
-        }
-    }
-    [LINK](entry, linkpath, link, done) {
-        // XXX: get the type ('symlink' or 'junction') for windows
-        fs[link](linkpath, String(entry.absolute), er => {
-            if (er) {
-                this[ONERROR](er, entry);
-            }
-            else {
-                this[UNPEND]();
-                entry.resume();
-            }
-            done();
-        });
-    }
-}
-const callSync = (fn) => {
-    try {
-        return [null, fn()];
-    }
-    catch (er) {
-        return [er, null];
-    }
-};
-export class UnpackSync extends Unpack {
-    sync = true;
-    [MAKEFS](er, entry) {
-        return super[MAKEFS](er, entry, () => { });
-    }
-    [CHECKFS](entry) {
-        this[PRUNECACHE](entry);
-        if (!this[CHECKED_CWD]) {
-            const er = this[MKDIR](this.cwd, this.dmode);
-            if (er) {
-                return this[ONERROR](er, entry);
-            }
-            this[CHECKED_CWD] = true;
-        }
-        // don't bother to make the parent if the current entry is the cwd,
-        // we've already checked it.
-        if (entry.absolute !== this.cwd) {
-            const parent = normalizeWindowsPath(path.dirname(String(entry.absolute)));
-            if (parent !== this.cwd) {
-                const mkParent = this[MKDIR](parent, this.dmode);
-                if (mkParent) {
-                    return this[ONERROR](mkParent, entry);
-                }
-            }
-        }
-        const [lstatEr, st] = callSync(() => fs.lstatSync(String(entry.absolute)));
-        if (st &&
-            (this.keep ||
-                /* c8 ignore next */
-                (this.newer && st.mtime > (entry.mtime ?? st.mtime)))) {
-            return this[SKIP](entry);
-        }
-        if (lstatEr || this[ISREUSABLE](entry, st)) {
-            return this[MAKEFS](null, entry);
-        }
-        if (st.isDirectory()) {
-            if (entry.type === 'Directory') {
-                const needChmod = this.chmod &&
-                    entry.mode &&
-                    (st.mode & 0o7777) !== entry.mode;
-                const [er] = needChmod ?
-                    callSync(() => {
-                        fs.chmodSync(String(entry.absolute), Number(entry.mode));
-                    })
-                    : [];
-                return this[MAKEFS](er, entry);
-            }
-            // not a dir entry, have to remove it
-            const [er] = callSync(() => fs.rmdirSync(String(entry.absolute)));
-            this[MAKEFS](er, entry);
-        }
-        // not a dir, and not reusable.
-        // don't remove if it's the cwd, since we want that error.
-        const [er] = entry.absolute === this.cwd ?
-            []
-            : callSync(() => unlinkFileSync(String(entry.absolute)));
-        this[MAKEFS](er, entry);
-    }
-    [FILE](entry, done) {
-        const mode = typeof entry.mode === 'number' ?
-            entry.mode & 0o7777
-            : this.fmode;
-        const oner = (er) => {
-            let closeError;
-            try {
-                fs.closeSync(fd);
-            }
-            catch (e) {
-                closeError = e;
-            }
-            if (er || closeError) {
-                this[ONERROR](er || closeError, entry);
-            }
-            done();
-        };
-        let fd;
-        try {
-            fd = fs.openSync(String(entry.absolute), getWriteFlag(entry.size), mode);
-        }
-        catch (er) {
-            return oner(er);
-        }
-        const tx = this.transform ? this.transform(entry) || entry : entry;
-        if (tx !== entry) {
-            tx.on('error', (er) => this[ONERROR](er, entry));
-            entry.pipe(tx);
-        }
-        tx.on('data', (chunk) => {
-            try {
-                fs.writeSync(fd, chunk, 0, chunk.length);
-            }
-            catch (er) {
-                oner(er);
-            }
-        });
-        tx.on('end', () => {
-            let er = null;
-            // try both, falling futimes back to utimes
-            // if either fails, handle the first error
-            if (entry.mtime && !this.noMtime) {
-                const atime = entry.atime || new Date();
-                const mtime = entry.mtime;
-                try {
-                    fs.futimesSync(fd, atime, mtime);
-                }
-                catch (futimeser) {
-                    try {
-                        fs.utimesSync(String(entry.absolute), atime, mtime);
-                    }
-                    catch (utimeser) {
-                        er = futimeser;
-                    }
-                }
-            }
-            if (this[DOCHOWN](entry)) {
-                const uid = this[UID](entry);
-                const gid = this[GID](entry);
-                try {
-                    fs.fchownSync(fd, Number(uid), Number(gid));
-                }
-                catch (fchowner) {
-                    try {
-                        fs.chownSync(String(entry.absolute), Number(uid), Number(gid));
-                    }
-                    catch (chowner) {
-                        er = er || fchowner;
-                    }
-                }
-            }
-            oner(er);
-        });
-    }
-    [DIRECTORY](entry, done) {
-        const mode = typeof entry.mode === 'number' ?
-            entry.mode & 0o7777
-            : this.dmode;
-        const er = this[MKDIR](String(entry.absolute), mode);
-        if (er) {
-            this[ONERROR](er, entry);
-            done();
-            return;
-        }
-        if (entry.mtime && !this.noMtime) {
-            try {
-                fs.utimesSync(String(entry.absolute), entry.atime || new Date(), entry.mtime);
-                /* c8 ignore next */
-            }
-            catch (er) { }
-        }
-        if (this[DOCHOWN](entry)) {
-            try {
-                fs.chownSync(String(entry.absolute), Number(this[UID](entry)), Number(this[GID](entry)));
-            }
-            catch (er) { }
-        }
-        done();
-        entry.resume();
-    }
-    [MKDIR](dir, mode) {
-        try {
-            return mkdirSync(normalizeWindowsPath(dir), {
-                uid: this.uid,
-                gid: this.gid,
-                processUid: this.processUid,
-                processGid: this.processGid,
-                umask: this.processUmask,
-                preserve: this.preservePaths,
-                unlink: this.unlink,
-                cache: this.dirCache,
-                cwd: this.cwd,
-                mode: mode,
-            });
-        }
-        catch (er) {
-            return er;
-        }
-    }
-    [LINK](entry, linkpath, link, done) {
-        const ls = `${link}Sync`;
-        try {
-            fs[ls](linkpath, String(entry.absolute));
-            done();
-            entry.resume();
-        }
-        catch (er) {
-            return this[ONERROR](er, entry);
-        }
-    }
-}
-//# sourceMappingURL=unpack.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/update.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/update.js
deleted file mode 100644
index 21398e9766663..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/update.js
+++ /dev/null
@@ -1,30 +0,0 @@
-// tar -u
-import { makeCommand } from './make-command.js';
-import { replace as r } from './replace.js';
-// just call tar.r with the filter and mtimeCache
-export const update = makeCommand(r.syncFile, r.asyncFile, r.syncNoFile, r.asyncNoFile, (opt, entries = []) => {
-    r.validate?.(opt, entries);
-    mtimeFilter(opt);
-});
-const mtimeFilter = (opt) => {
-    const filter = opt.filter;
-    if (!opt.mtimeCache) {
-        opt.mtimeCache = new Map();
-    }
-    opt.filter =
-        filter ?
-            (path, stat) => filter(path, stat) &&
-                !(
-                /* c8 ignore start */
-                ((opt.mtimeCache?.get(path) ?? stat.mtime ?? 0) >
-                    (stat.mtime ?? 0))
-                /* c8 ignore stop */
-                )
-            : (path, stat) => !(
-            /* c8 ignore start */
-            ((opt.mtimeCache?.get(path) ?? stat.mtime ?? 0) >
-                (stat.mtime ?? 0))
-            /* c8 ignore stop */
-            );
-};
-//# sourceMappingURL=update.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/warn-method.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/warn-method.js
deleted file mode 100644
index 13e798afefc85..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/warn-method.js
+++ /dev/null
@@ -1,27 +0,0 @@
-export const warnMethod = (self, code, message, data = {}) => {
-    if (self.file) {
-        data.file = self.file;
-    }
-    if (self.cwd) {
-        data.cwd = self.cwd;
-    }
-    data.code =
-        (message instanceof Error &&
-            message.code) ||
-            code;
-    data.tarCode = code;
-    if (!self.strict && data.recoverable !== false) {
-        if (message instanceof Error) {
-            data = Object.assign(message, data);
-            message = message.message;
-        }
-        self.emit('warn', code, message, data);
-    }
-    else if (message instanceof Error) {
-        self.emit('error', Object.assign(message, data));
-    }
-    else {
-        self.emit('error', Object.assign(new Error(`${code}: ${message}`), data));
-    }
-};
-//# sourceMappingURL=warn-method.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/winchars.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/winchars.js
deleted file mode 100644
index c41eb86d69a4b..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/winchars.js
+++ /dev/null
@@ -1,9 +0,0 @@
-// When writing files on Windows, translate the characters to their
-// 0xf000 higher-encoded versions.
-const raw = ['|', '<', '>', '?', ':'];
-const win = raw.map(char => String.fromCharCode(0xf000 + char.charCodeAt(0)));
-const toWin = new Map(raw.map((char, i) => [char, win[i]]));
-const toRaw = new Map(win.map((char, i) => [char, raw[i]]));
-export const encode = (s) => raw.reduce((s, c) => s.split(c).join(toWin.get(c)), s);
-export const decode = (s) => win.reduce((s, c) => s.split(c).join(toRaw.get(c)), s);
-//# sourceMappingURL=winchars.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/write-entry.js b/node_modules/make-fetch-happen/node_modules/tar/dist/esm/write-entry.js
deleted file mode 100644
index 9028cd676b4cd..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/dist/esm/write-entry.js
+++ /dev/null
@@ -1,657 +0,0 @@
-import fs from 'fs';
-import { Minipass } from 'minipass';
-import path from 'path';
-import { Header } from './header.js';
-import { modeFix } from './mode-fix.js';
-import { normalizeWindowsPath } from './normalize-windows-path.js';
-import { dealias, } from './options.js';
-import { Pax } from './pax.js';
-import { stripAbsolutePath } from './strip-absolute-path.js';
-import { stripTrailingSlashes } from './strip-trailing-slashes.js';
-import { warnMethod, } from './warn-method.js';
-import * as winchars from './winchars.js';
-const prefixPath = (path, prefix) => {
-    if (!prefix) {
-        return normalizeWindowsPath(path);
-    }
-    path = normalizeWindowsPath(path).replace(/^\.(\/|$)/, '');
-    return stripTrailingSlashes(prefix) + '/' + path;
-};
-const maxReadSize = 16 * 1024 * 1024;
-const PROCESS = Symbol('process');
-const FILE = Symbol('file');
-const DIRECTORY = Symbol('directory');
-const SYMLINK = Symbol('symlink');
-const HARDLINK = Symbol('hardlink');
-const HEADER = Symbol('header');
-const READ = Symbol('read');
-const LSTAT = Symbol('lstat');
-const ONLSTAT = Symbol('onlstat');
-const ONREAD = Symbol('onread');
-const ONREADLINK = Symbol('onreadlink');
-const OPENFILE = Symbol('openfile');
-const ONOPENFILE = Symbol('onopenfile');
-const CLOSE = Symbol('close');
-const MODE = Symbol('mode');
-const AWAITDRAIN = Symbol('awaitDrain');
-const ONDRAIN = Symbol('ondrain');
-const PREFIX = Symbol('prefix');
-export class WriteEntry extends Minipass {
-    path;
-    portable;
-    myuid = (process.getuid && process.getuid()) || 0;
-    // until node has builtin pwnam functions, this'll have to do
-    myuser = process.env.USER || '';
-    maxReadSize;
-    linkCache;
-    statCache;
-    preservePaths;
-    cwd;
-    strict;
-    mtime;
-    noPax;
-    noMtime;
-    prefix;
-    fd;
-    blockLen = 0;
-    blockRemain = 0;
-    buf;
-    pos = 0;
-    remain = 0;
-    length = 0;
-    offset = 0;
-    win32;
-    absolute;
-    header;
-    type;
-    linkpath;
-    stat;
-    onWriteEntry;
-    #hadError = false;
-    constructor(p, opt_ = {}) {
-        const opt = dealias(opt_);
-        super();
-        this.path = normalizeWindowsPath(p);
-        // suppress atime, ctime, uid, gid, uname, gname
-        this.portable = !!opt.portable;
-        this.maxReadSize = opt.maxReadSize || maxReadSize;
-        this.linkCache = opt.linkCache || new Map();
-        this.statCache = opt.statCache || new Map();
-        this.preservePaths = !!opt.preservePaths;
-        this.cwd = normalizeWindowsPath(opt.cwd || process.cwd());
-        this.strict = !!opt.strict;
-        this.noPax = !!opt.noPax;
-        this.noMtime = !!opt.noMtime;
-        this.mtime = opt.mtime;
-        this.prefix =
-            opt.prefix ? normalizeWindowsPath(opt.prefix) : undefined;
-        this.onWriteEntry = opt.onWriteEntry;
-        if (typeof opt.onwarn === 'function') {
-            this.on('warn', opt.onwarn);
-        }
-        let pathWarn = false;
-        if (!this.preservePaths) {
-            const [root, stripped] = stripAbsolutePath(this.path);
-            if (root && typeof stripped === 'string') {
-                this.path = stripped;
-                pathWarn = root;
-            }
-        }
-        this.win32 = !!opt.win32 || process.platform === 'win32';
-        if (this.win32) {
-            // force the \ to / normalization, since we might not *actually*
-            // be on windows, but want \ to be considered a path separator.
-            this.path = winchars.decode(this.path.replace(/\\/g, '/'));
-            p = p.replace(/\\/g, '/');
-        }
-        this.absolute = normalizeWindowsPath(opt.absolute || path.resolve(this.cwd, p));
-        if (this.path === '') {
-            this.path = './';
-        }
-        if (pathWarn) {
-            this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, {
-                entry: this,
-                path: pathWarn + this.path,
-            });
-        }
-        const cs = this.statCache.get(this.absolute);
-        if (cs) {
-            this[ONLSTAT](cs);
-        }
-        else {
-            this[LSTAT]();
-        }
-    }
-    warn(code, message, data = {}) {
-        return warnMethod(this, code, message, data);
-    }
-    emit(ev, ...data) {
-        if (ev === 'error') {
-            this.#hadError = true;
-        }
-        return super.emit(ev, ...data);
-    }
-    [LSTAT]() {
-        fs.lstat(this.absolute, (er, stat) => {
-            if (er) {
-                return this.emit('error', er);
-            }
-            this[ONLSTAT](stat);
-        });
-    }
-    [ONLSTAT](stat) {
-        this.statCache.set(this.absolute, stat);
-        this.stat = stat;
-        if (!stat.isFile()) {
-            stat.size = 0;
-        }
-        this.type = getType(stat);
-        this.emit('stat', stat);
-        this[PROCESS]();
-    }
-    [PROCESS]() {
-        switch (this.type) {
-            case 'File':
-                return this[FILE]();
-            case 'Directory':
-                return this[DIRECTORY]();
-            case 'SymbolicLink':
-                return this[SYMLINK]();
-            // unsupported types are ignored.
-            default:
-                return this.end();
-        }
-    }
-    [MODE](mode) {
-        return modeFix(mode, this.type === 'Directory', this.portable);
-    }
-    [PREFIX](path) {
-        return prefixPath(path, this.prefix);
-    }
-    [HEADER]() {
-        /* c8 ignore start */
-        if (!this.stat) {
-            throw new Error('cannot write header before stat');
-        }
-        /* c8 ignore stop */
-        if (this.type === 'Directory' && this.portable) {
-            this.noMtime = true;
-        }
-        this.onWriteEntry?.(this);
-        this.header = new Header({
-            path: this[PREFIX](this.path),
-            // only apply the prefix to hard links.
-            linkpath: this.type === 'Link' && this.linkpath !== undefined ?
-                this[PREFIX](this.linkpath)
-                : this.linkpath,
-            // only the permissions and setuid/setgid/sticky bitflags
-            // not the higher-order bits that specify file type
-            mode: this[MODE](this.stat.mode),
-            uid: this.portable ? undefined : this.stat.uid,
-            gid: this.portable ? undefined : this.stat.gid,
-            size: this.stat.size,
-            mtime: this.noMtime ? undefined : this.mtime || this.stat.mtime,
-            /* c8 ignore next */
-            type: this.type === 'Unsupported' ? undefined : this.type,
-            uname: this.portable ? undefined
-                : this.stat.uid === this.myuid ? this.myuser
-                    : '',
-            atime: this.portable ? undefined : this.stat.atime,
-            ctime: this.portable ? undefined : this.stat.ctime,
-        });
-        if (this.header.encode() && !this.noPax) {
-            super.write(new Pax({
-                atime: this.portable ? undefined : this.header.atime,
-                ctime: this.portable ? undefined : this.header.ctime,
-                gid: this.portable ? undefined : this.header.gid,
-                mtime: this.noMtime ? undefined : (this.mtime || this.header.mtime),
-                path: this[PREFIX](this.path),
-                linkpath: this.type === 'Link' && this.linkpath !== undefined ?
-                    this[PREFIX](this.linkpath)
-                    : this.linkpath,
-                size: this.header.size,
-                uid: this.portable ? undefined : this.header.uid,
-                uname: this.portable ? undefined : this.header.uname,
-                dev: this.portable ? undefined : this.stat.dev,
-                ino: this.portable ? undefined : this.stat.ino,
-                nlink: this.portable ? undefined : this.stat.nlink,
-            }).encode());
-        }
-        const block = this.header?.block;
-        /* c8 ignore start */
-        if (!block) {
-            throw new Error('failed to encode header');
-        }
-        /* c8 ignore stop */
-        super.write(block);
-    }
-    [DIRECTORY]() {
-        /* c8 ignore start */
-        if (!this.stat) {
-            throw new Error('cannot create directory entry without stat');
-        }
-        /* c8 ignore stop */
-        if (this.path.slice(-1) !== '/') {
-            this.path += '/';
-        }
-        this.stat.size = 0;
-        this[HEADER]();
-        this.end();
-    }
-    [SYMLINK]() {
-        fs.readlink(this.absolute, (er, linkpath) => {
-            if (er) {
-                return this.emit('error', er);
-            }
-            this[ONREADLINK](linkpath);
-        });
-    }
-    [ONREADLINK](linkpath) {
-        this.linkpath = normalizeWindowsPath(linkpath);
-        this[HEADER]();
-        this.end();
-    }
-    [HARDLINK](linkpath) {
-        /* c8 ignore start */
-        if (!this.stat) {
-            throw new Error('cannot create link entry without stat');
-        }
-        /* c8 ignore stop */
-        this.type = 'Link';
-        this.linkpath = normalizeWindowsPath(path.relative(this.cwd, linkpath));
-        this.stat.size = 0;
-        this[HEADER]();
-        this.end();
-    }
-    [FILE]() {
-        /* c8 ignore start */
-        if (!this.stat) {
-            throw new Error('cannot create file entry without stat');
-        }
-        /* c8 ignore stop */
-        if (this.stat.nlink > 1) {
-            const linkKey = `${this.stat.dev}:${this.stat.ino}`;
-            const linkpath = this.linkCache.get(linkKey);
-            if (linkpath?.indexOf(this.cwd) === 0) {
-                return this[HARDLINK](linkpath);
-            }
-            this.linkCache.set(linkKey, this.absolute);
-        }
-        this[HEADER]();
-        if (this.stat.size === 0) {
-            return this.end();
-        }
-        this[OPENFILE]();
-    }
-    [OPENFILE]() {
-        fs.open(this.absolute, 'r', (er, fd) => {
-            if (er) {
-                return this.emit('error', er);
-            }
-            this[ONOPENFILE](fd);
-        });
-    }
-    [ONOPENFILE](fd) {
-        this.fd = fd;
-        if (this.#hadError) {
-            return this[CLOSE]();
-        }
-        /* c8 ignore start */
-        if (!this.stat) {
-            throw new Error('should stat before calling onopenfile');
-        }
-        /* c8 ignore start */
-        this.blockLen = 512 * Math.ceil(this.stat.size / 512);
-        this.blockRemain = this.blockLen;
-        const bufLen = Math.min(this.blockLen, this.maxReadSize);
-        this.buf = Buffer.allocUnsafe(bufLen);
-        this.offset = 0;
-        this.pos = 0;
-        this.remain = this.stat.size;
-        this.length = this.buf.length;
-        this[READ]();
-    }
-    [READ]() {
-        const { fd, buf, offset, length, pos } = this;
-        if (fd === undefined || buf === undefined) {
-            throw new Error('cannot read file without first opening');
-        }
-        fs.read(fd, buf, offset, length, pos, (er, bytesRead) => {
-            if (er) {
-                // ignoring the error from close(2) is a bad practice, but at
-                // this point we already have an error, don't need another one
-                return this[CLOSE](() => this.emit('error', er));
-            }
-            this[ONREAD](bytesRead);
-        });
-    }
-    /* c8 ignore start */
-    [CLOSE](cb = () => { }) {
-        /* c8 ignore stop */
-        if (this.fd !== undefined)
-            fs.close(this.fd, cb);
-    }
-    [ONREAD](bytesRead) {
-        if (bytesRead <= 0 && this.remain > 0) {
-            const er = Object.assign(new Error('encountered unexpected EOF'), {
-                path: this.absolute,
-                syscall: 'read',
-                code: 'EOF',
-            });
-            return this[CLOSE](() => this.emit('error', er));
-        }
-        if (bytesRead > this.remain) {
-            const er = Object.assign(new Error('did not encounter expected EOF'), {
-                path: this.absolute,
-                syscall: 'read',
-                code: 'EOF',
-            });
-            return this[CLOSE](() => this.emit('error', er));
-        }
-        /* c8 ignore start */
-        if (!this.buf) {
-            throw new Error('should have created buffer prior to reading');
-        }
-        /* c8 ignore stop */
-        // null out the rest of the buffer, if we could fit the block padding
-        // at the end of this loop, we've incremented bytesRead and this.remain
-        // to be incremented up to the blockRemain level, as if we had expected
-        // to get a null-padded file, and read it until the end.  then we will
-        // decrement both remain and blockRemain by bytesRead, and know that we
-        // reached the expected EOF, without any null buffer to append.
-        if (bytesRead === this.remain) {
-            for (let i = bytesRead; i < this.length && bytesRead < this.blockRemain; i++) {
-                this.buf[i + this.offset] = 0;
-                bytesRead++;
-                this.remain++;
-            }
-        }
-        const chunk = this.offset === 0 && bytesRead === this.buf.length ?
-            this.buf
-            : this.buf.subarray(this.offset, this.offset + bytesRead);
-        const flushed = this.write(chunk);
-        if (!flushed) {
-            this[AWAITDRAIN](() => this[ONDRAIN]());
-        }
-        else {
-            this[ONDRAIN]();
-        }
-    }
-    [AWAITDRAIN](cb) {
-        this.once('drain', cb);
-    }
-    write(chunk, encoding, cb) {
-        /* c8 ignore start - just junk to comply with NodeJS.WritableStream */
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        if (typeof chunk === 'string') {
-            chunk = Buffer.from(chunk, typeof encoding === 'string' ? encoding : 'utf8');
-        }
-        /* c8 ignore stop */
-        if (this.blockRemain < chunk.length) {
-            const er = Object.assign(new Error('writing more data than expected'), {
-                path: this.absolute,
-            });
-            return this.emit('error', er);
-        }
-        this.remain -= chunk.length;
-        this.blockRemain -= chunk.length;
-        this.pos += chunk.length;
-        this.offset += chunk.length;
-        return super.write(chunk, null, cb);
-    }
-    [ONDRAIN]() {
-        if (!this.remain) {
-            if (this.blockRemain) {
-                super.write(Buffer.alloc(this.blockRemain));
-            }
-            return this[CLOSE](er => er ? this.emit('error', er) : this.end());
-        }
-        /* c8 ignore start */
-        if (!this.buf) {
-            throw new Error('buffer lost somehow in ONDRAIN');
-        }
-        /* c8 ignore stop */
-        if (this.offset >= this.length) {
-            // if we only have a smaller bit left to read, alloc a smaller buffer
-            // otherwise, keep it the same length it was before.
-            this.buf = Buffer.allocUnsafe(Math.min(this.blockRemain, this.buf.length));
-            this.offset = 0;
-        }
-        this.length = this.buf.length - this.offset;
-        this[READ]();
-    }
-}
-export class WriteEntrySync extends WriteEntry {
-    sync = true;
-    [LSTAT]() {
-        this[ONLSTAT](fs.lstatSync(this.absolute));
-    }
-    [SYMLINK]() {
-        this[ONREADLINK](fs.readlinkSync(this.absolute));
-    }
-    [OPENFILE]() {
-        this[ONOPENFILE](fs.openSync(this.absolute, 'r'));
-    }
-    [READ]() {
-        let threw = true;
-        try {
-            const { fd, buf, offset, length, pos } = this;
-            /* c8 ignore start */
-            if (fd === undefined || buf === undefined) {
-                throw new Error('fd and buf must be set in READ method');
-            }
-            /* c8 ignore stop */
-            const bytesRead = fs.readSync(fd, buf, offset, length, pos);
-            this[ONREAD](bytesRead);
-            threw = false;
-        }
-        finally {
-            // ignoring the error from close(2) is a bad practice, but at
-            // this point we already have an error, don't need another one
-            if (threw) {
-                try {
-                    this[CLOSE](() => { });
-                }
-                catch (er) { }
-            }
-        }
-    }
-    [AWAITDRAIN](cb) {
-        cb();
-    }
-    /* c8 ignore start */
-    [CLOSE](cb = () => { }) {
-        /* c8 ignore stop */
-        if (this.fd !== undefined)
-            fs.closeSync(this.fd);
-        cb();
-    }
-}
-export class WriteEntryTar extends Minipass {
-    blockLen = 0;
-    blockRemain = 0;
-    buf = 0;
-    pos = 0;
-    remain = 0;
-    length = 0;
-    preservePaths;
-    portable;
-    strict;
-    noPax;
-    noMtime;
-    readEntry;
-    type;
-    prefix;
-    path;
-    mode;
-    uid;
-    gid;
-    uname;
-    gname;
-    header;
-    mtime;
-    atime;
-    ctime;
-    linkpath;
-    size;
-    onWriteEntry;
-    warn(code, message, data = {}) {
-        return warnMethod(this, code, message, data);
-    }
-    constructor(readEntry, opt_ = {}) {
-        const opt = dealias(opt_);
-        super();
-        this.preservePaths = !!opt.preservePaths;
-        this.portable = !!opt.portable;
-        this.strict = !!opt.strict;
-        this.noPax = !!opt.noPax;
-        this.noMtime = !!opt.noMtime;
-        this.onWriteEntry = opt.onWriteEntry;
-        this.readEntry = readEntry;
-        const { type } = readEntry;
-        /* c8 ignore start */
-        if (type === 'Unsupported') {
-            throw new Error('writing entry that should be ignored');
-        }
-        /* c8 ignore stop */
-        this.type = type;
-        if (this.type === 'Directory' && this.portable) {
-            this.noMtime = true;
-        }
-        this.prefix = opt.prefix;
-        this.path = normalizeWindowsPath(readEntry.path);
-        this.mode =
-            readEntry.mode !== undefined ?
-                this[MODE](readEntry.mode)
-                : undefined;
-        this.uid = this.portable ? undefined : readEntry.uid;
-        this.gid = this.portable ? undefined : readEntry.gid;
-        this.uname = this.portable ? undefined : readEntry.uname;
-        this.gname = this.portable ? undefined : readEntry.gname;
-        this.size = readEntry.size;
-        this.mtime =
-            this.noMtime ? undefined : opt.mtime || readEntry.mtime;
-        this.atime = this.portable ? undefined : readEntry.atime;
-        this.ctime = this.portable ? undefined : readEntry.ctime;
-        this.linkpath =
-            readEntry.linkpath !== undefined ?
-                normalizeWindowsPath(readEntry.linkpath)
-                : undefined;
-        if (typeof opt.onwarn === 'function') {
-            this.on('warn', opt.onwarn);
-        }
-        let pathWarn = false;
-        if (!this.preservePaths) {
-            const [root, stripped] = stripAbsolutePath(this.path);
-            if (root && typeof stripped === 'string') {
-                this.path = stripped;
-                pathWarn = root;
-            }
-        }
-        this.remain = readEntry.size;
-        this.blockRemain = readEntry.startBlockSize;
-        this.onWriteEntry?.(this);
-        this.header = new Header({
-            path: this[PREFIX](this.path),
-            linkpath: this.type === 'Link' && this.linkpath !== undefined ?
-                this[PREFIX](this.linkpath)
-                : this.linkpath,
-            // only the permissions and setuid/setgid/sticky bitflags
-            // not the higher-order bits that specify file type
-            mode: this.mode,
-            uid: this.portable ? undefined : this.uid,
-            gid: this.portable ? undefined : this.gid,
-            size: this.size,
-            mtime: this.noMtime ? undefined : this.mtime,
-            type: this.type,
-            uname: this.portable ? undefined : this.uname,
-            atime: this.portable ? undefined : this.atime,
-            ctime: this.portable ? undefined : this.ctime,
-        });
-        if (pathWarn) {
-            this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, {
-                entry: this,
-                path: pathWarn + this.path,
-            });
-        }
-        if (this.header.encode() && !this.noPax) {
-            super.write(new Pax({
-                atime: this.portable ? undefined : this.atime,
-                ctime: this.portable ? undefined : this.ctime,
-                gid: this.portable ? undefined : this.gid,
-                mtime: this.noMtime ? undefined : this.mtime,
-                path: this[PREFIX](this.path),
-                linkpath: this.type === 'Link' && this.linkpath !== undefined ?
-                    this[PREFIX](this.linkpath)
-                    : this.linkpath,
-                size: this.size,
-                uid: this.portable ? undefined : this.uid,
-                uname: this.portable ? undefined : this.uname,
-                dev: this.portable ? undefined : this.readEntry.dev,
-                ino: this.portable ? undefined : this.readEntry.ino,
-                nlink: this.portable ? undefined : this.readEntry.nlink,
-            }).encode());
-        }
-        const b = this.header?.block;
-        /* c8 ignore start */
-        if (!b)
-            throw new Error('failed to encode header');
-        /* c8 ignore stop */
-        super.write(b);
-        readEntry.pipe(this);
-    }
-    [PREFIX](path) {
-        return prefixPath(path, this.prefix);
-    }
-    [MODE](mode) {
-        return modeFix(mode, this.type === 'Directory', this.portable);
-    }
-    write(chunk, encoding, cb) {
-        /* c8 ignore start - just junk to comply with NodeJS.WritableStream */
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        if (typeof chunk === 'string') {
-            chunk = Buffer.from(chunk, typeof encoding === 'string' ? encoding : 'utf8');
-        }
-        /* c8 ignore stop */
-        const writeLen = chunk.length;
-        if (writeLen > this.blockRemain) {
-            throw new Error('writing more to entry than is appropriate');
-        }
-        this.blockRemain -= writeLen;
-        return super.write(chunk, cb);
-    }
-    end(chunk, encoding, cb) {
-        if (this.blockRemain) {
-            super.write(Buffer.alloc(this.blockRemain));
-        }
-        /* c8 ignore start - just junk to comply with NodeJS.WritableStream */
-        if (typeof chunk === 'function') {
-            cb = chunk;
-            encoding = undefined;
-            chunk = undefined;
-        }
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        if (typeof chunk === 'string') {
-            chunk = Buffer.from(chunk, encoding ?? 'utf8');
-        }
-        if (cb)
-            this.once('finish', cb);
-        chunk ? super.end(chunk, cb) : super.end(cb);
-        /* c8 ignore stop */
-        return this;
-    }
-}
-const getType = (stat) => stat.isFile() ? 'File'
-    : stat.isDirectory() ? 'Directory'
-        : stat.isSymbolicLink() ? 'SymbolicLink'
-            : 'Unsupported';
-//# sourceMappingURL=write-entry.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/tar/package.json b/node_modules/make-fetch-happen/node_modules/tar/package.json
deleted file mode 100644
index 0283103ee9eaf..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/tar/package.json
+++ /dev/null
@@ -1,325 +0,0 @@
-{
-  "author": "Isaac Z. Schlueter",
-  "name": "tar",
-  "description": "tar for node",
-  "version": "7.4.3",
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/isaacs/node-tar.git"
-  },
-  "scripts": {
-    "genparse": "node scripts/generate-parse-fixtures.js",
-    "snap": "tap",
-    "test": "tap",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "prepare": "tshy",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "format": "prettier --write . --log-level warn",
-    "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
-  },
-  "dependencies": {
-    "@isaacs/fs-minipass": "^4.0.0",
-    "chownr": "^3.0.0",
-    "minipass": "^7.1.2",
-    "minizlib": "^3.0.1",
-    "mkdirp": "^3.0.1",
-    "yallist": "^5.0.0"
-  },
-  "devDependencies": {
-    "chmodr": "^1.2.0",
-    "end-of-stream": "^1.4.3",
-    "events-to-array": "^2.0.3",
-    "mutate-fs": "^2.1.1",
-    "nock": "^13.5.4",
-    "prettier": "^3.2.5",
-    "rimraf": "^5.0.5",
-    "tap": "^18.7.2",
-    "tshy": "^1.13.1",
-    "typedoc": "^0.25.13"
-  },
-  "license": "ISC",
-  "engines": {
-    "node": ">=18"
-  },
-  "files": [
-    "dist"
-  ],
-  "tap": {
-    "coverage-map": "map.js",
-    "timeout": 0,
-    "typecheck": true
-  },
-  "prettier": {
-    "experimentalTernaries": true,
-    "semi": false,
-    "printWidth": 70,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "tshy": {
-    "exports": {
-      "./package.json": "./package.json",
-      ".": "./src/index.ts",
-      "./c": "./src/create.ts",
-      "./create": "./src/create.ts",
-      "./replace": "./src/create.ts",
-      "./r": "./src/create.ts",
-      "./list": "./src/list.ts",
-      "./t": "./src/list.ts",
-      "./update": "./src/update.ts",
-      "./u": "./src/update.ts",
-      "./extract": "./src/extract.ts",
-      "./x": "./src/extract.ts",
-      "./pack": "./src/pack.ts",
-      "./unpack": "./src/unpack.ts",
-      "./parse": "./src/parse.ts",
-      "./read-entry": "./src/read-entry.ts",
-      "./write-entry": "./src/write-entry.ts",
-      "./header": "./src/header.ts",
-      "./pax": "./src/pax.ts",
-      "./types": "./src/types.ts"
-    }
-  },
-  "exports": {
-    "./package.json": "./package.json",
-    ".": {
-      "import": {
-        "source": "./src/index.ts",
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.js"
-      },
-      "require": {
-        "source": "./src/index.ts",
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.js"
-      }
-    },
-    "./c": {
-      "import": {
-        "source": "./src/create.ts",
-        "types": "./dist/esm/create.d.ts",
-        "default": "./dist/esm/create.js"
-      },
-      "require": {
-        "source": "./src/create.ts",
-        "types": "./dist/commonjs/create.d.ts",
-        "default": "./dist/commonjs/create.js"
-      }
-    },
-    "./create": {
-      "import": {
-        "source": "./src/create.ts",
-        "types": "./dist/esm/create.d.ts",
-        "default": "./dist/esm/create.js"
-      },
-      "require": {
-        "source": "./src/create.ts",
-        "types": "./dist/commonjs/create.d.ts",
-        "default": "./dist/commonjs/create.js"
-      }
-    },
-    "./replace": {
-      "import": {
-        "source": "./src/create.ts",
-        "types": "./dist/esm/create.d.ts",
-        "default": "./dist/esm/create.js"
-      },
-      "require": {
-        "source": "./src/create.ts",
-        "types": "./dist/commonjs/create.d.ts",
-        "default": "./dist/commonjs/create.js"
-      }
-    },
-    "./r": {
-      "import": {
-        "source": "./src/create.ts",
-        "types": "./dist/esm/create.d.ts",
-        "default": "./dist/esm/create.js"
-      },
-      "require": {
-        "source": "./src/create.ts",
-        "types": "./dist/commonjs/create.d.ts",
-        "default": "./dist/commonjs/create.js"
-      }
-    },
-    "./list": {
-      "import": {
-        "source": "./src/list.ts",
-        "types": "./dist/esm/list.d.ts",
-        "default": "./dist/esm/list.js"
-      },
-      "require": {
-        "source": "./src/list.ts",
-        "types": "./dist/commonjs/list.d.ts",
-        "default": "./dist/commonjs/list.js"
-      }
-    },
-    "./t": {
-      "import": {
-        "source": "./src/list.ts",
-        "types": "./dist/esm/list.d.ts",
-        "default": "./dist/esm/list.js"
-      },
-      "require": {
-        "source": "./src/list.ts",
-        "types": "./dist/commonjs/list.d.ts",
-        "default": "./dist/commonjs/list.js"
-      }
-    },
-    "./update": {
-      "import": {
-        "source": "./src/update.ts",
-        "types": "./dist/esm/update.d.ts",
-        "default": "./dist/esm/update.js"
-      },
-      "require": {
-        "source": "./src/update.ts",
-        "types": "./dist/commonjs/update.d.ts",
-        "default": "./dist/commonjs/update.js"
-      }
-    },
-    "./u": {
-      "import": {
-        "source": "./src/update.ts",
-        "types": "./dist/esm/update.d.ts",
-        "default": "./dist/esm/update.js"
-      },
-      "require": {
-        "source": "./src/update.ts",
-        "types": "./dist/commonjs/update.d.ts",
-        "default": "./dist/commonjs/update.js"
-      }
-    },
-    "./extract": {
-      "import": {
-        "source": "./src/extract.ts",
-        "types": "./dist/esm/extract.d.ts",
-        "default": "./dist/esm/extract.js"
-      },
-      "require": {
-        "source": "./src/extract.ts",
-        "types": "./dist/commonjs/extract.d.ts",
-        "default": "./dist/commonjs/extract.js"
-      }
-    },
-    "./x": {
-      "import": {
-        "source": "./src/extract.ts",
-        "types": "./dist/esm/extract.d.ts",
-        "default": "./dist/esm/extract.js"
-      },
-      "require": {
-        "source": "./src/extract.ts",
-        "types": "./dist/commonjs/extract.d.ts",
-        "default": "./dist/commonjs/extract.js"
-      }
-    },
-    "./pack": {
-      "import": {
-        "source": "./src/pack.ts",
-        "types": "./dist/esm/pack.d.ts",
-        "default": "./dist/esm/pack.js"
-      },
-      "require": {
-        "source": "./src/pack.ts",
-        "types": "./dist/commonjs/pack.d.ts",
-        "default": "./dist/commonjs/pack.js"
-      }
-    },
-    "./unpack": {
-      "import": {
-        "source": "./src/unpack.ts",
-        "types": "./dist/esm/unpack.d.ts",
-        "default": "./dist/esm/unpack.js"
-      },
-      "require": {
-        "source": "./src/unpack.ts",
-        "types": "./dist/commonjs/unpack.d.ts",
-        "default": "./dist/commonjs/unpack.js"
-      }
-    },
-    "./parse": {
-      "import": {
-        "source": "./src/parse.ts",
-        "types": "./dist/esm/parse.d.ts",
-        "default": "./dist/esm/parse.js"
-      },
-      "require": {
-        "source": "./src/parse.ts",
-        "types": "./dist/commonjs/parse.d.ts",
-        "default": "./dist/commonjs/parse.js"
-      }
-    },
-    "./read-entry": {
-      "import": {
-        "source": "./src/read-entry.ts",
-        "types": "./dist/esm/read-entry.d.ts",
-        "default": "./dist/esm/read-entry.js"
-      },
-      "require": {
-        "source": "./src/read-entry.ts",
-        "types": "./dist/commonjs/read-entry.d.ts",
-        "default": "./dist/commonjs/read-entry.js"
-      }
-    },
-    "./write-entry": {
-      "import": {
-        "source": "./src/write-entry.ts",
-        "types": "./dist/esm/write-entry.d.ts",
-        "default": "./dist/esm/write-entry.js"
-      },
-      "require": {
-        "source": "./src/write-entry.ts",
-        "types": "./dist/commonjs/write-entry.d.ts",
-        "default": "./dist/commonjs/write-entry.js"
-      }
-    },
-    "./header": {
-      "import": {
-        "source": "./src/header.ts",
-        "types": "./dist/esm/header.d.ts",
-        "default": "./dist/esm/header.js"
-      },
-      "require": {
-        "source": "./src/header.ts",
-        "types": "./dist/commonjs/header.d.ts",
-        "default": "./dist/commonjs/header.js"
-      }
-    },
-    "./pax": {
-      "import": {
-        "source": "./src/pax.ts",
-        "types": "./dist/esm/pax.d.ts",
-        "default": "./dist/esm/pax.js"
-      },
-      "require": {
-        "source": "./src/pax.ts",
-        "types": "./dist/commonjs/pax.d.ts",
-        "default": "./dist/commonjs/pax.js"
-      }
-    },
-    "./types": {
-      "import": {
-        "source": "./src/types.ts",
-        "types": "./dist/esm/types.d.ts",
-        "default": "./dist/esm/types.js"
-      },
-      "require": {
-        "source": "./src/types.ts",
-        "types": "./dist/commonjs/types.d.ts",
-        "default": "./dist/commonjs/types.js"
-      }
-    }
-  },
-  "type": "module",
-  "main": "./dist/commonjs/index.js",
-  "types": "./dist/commonjs/index.d.ts"
-}
diff --git a/node_modules/make-fetch-happen/node_modules/yallist/LICENSE.md b/node_modules/make-fetch-happen/node_modules/yallist/LICENSE.md
deleted file mode 100644
index 881248b6d7f0c..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/yallist/LICENSE.md
+++ /dev/null
@@ -1,63 +0,0 @@
-All packages under `src/` are licensed according to the terms in
-their respective `LICENSE` or `LICENSE.md` files.
-
-The remainder of this project is licensed under the Blue Oak
-Model License, as follows:
-
------
-
-# Blue Oak Model License
-
-Version 1.0.0
-
-## Purpose
-
-This license gives everyone as much permission to work with
-this software as possible, while protecting contributors
-from liability.
-
-## Acceptance
-
-In order to receive this license, you must agree to its
-rules.  The rules of this license are both obligations
-under that agreement and conditions to your license.
-You must not do anything with this software that triggers
-a rule that you cannot or will not follow.
-
-## Copyright
-
-Each contributor licenses you to do everything with this
-software that would otherwise infringe that contributor's
-copyright in it.
-
-## Notices
-
-You must ensure that everyone who gets a copy of
-any part of this software from you, with or without
-changes, also gets the text of this license or a link to
-.
-
-## Excuse
-
-If anyone notifies you in writing that you have not
-complied with [Notices](#notices), you can keep your
-license by taking all practical steps to comply within 30
-days after the notice.  If you do not do so, your license
-ends immediately.
-
-## Patent
-
-Each contributor licenses you to do everything with this
-software that would otherwise infringe any patent claims
-they can license or become able to license.
-
-## Reliability
-
-No contributor can revoke this license.
-
-## No Liability
-
-***As far as the law allows, this software comes as is,
-without any warranty or condition, and no contributor
-will be liable to anyone for any damages related to this
-software or this license, under any kind of legal claim.***
diff --git a/node_modules/make-fetch-happen/node_modules/yallist/dist/commonjs/index.js b/node_modules/make-fetch-happen/node_modules/yallist/dist/commonjs/index.js
deleted file mode 100644
index c1e1e4741689d..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/yallist/dist/commonjs/index.js
+++ /dev/null
@@ -1,384 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Node = exports.Yallist = void 0;
-class Yallist {
-    tail;
-    head;
-    length = 0;
-    static create(list = []) {
-        return new Yallist(list);
-    }
-    constructor(list = []) {
-        for (const item of list) {
-            this.push(item);
-        }
-    }
-    *[Symbol.iterator]() {
-        for (let walker = this.head; walker; walker = walker.next) {
-            yield walker.value;
-        }
-    }
-    removeNode(node) {
-        if (node.list !== this) {
-            throw new Error('removing node which does not belong to this list');
-        }
-        const next = node.next;
-        const prev = node.prev;
-        if (next) {
-            next.prev = prev;
-        }
-        if (prev) {
-            prev.next = next;
-        }
-        if (node === this.head) {
-            this.head = next;
-        }
-        if (node === this.tail) {
-            this.tail = prev;
-        }
-        this.length--;
-        node.next = undefined;
-        node.prev = undefined;
-        node.list = undefined;
-        return next;
-    }
-    unshiftNode(node) {
-        if (node === this.head) {
-            return;
-        }
-        if (node.list) {
-            node.list.removeNode(node);
-        }
-        const head = this.head;
-        node.list = this;
-        node.next = head;
-        if (head) {
-            head.prev = node;
-        }
-        this.head = node;
-        if (!this.tail) {
-            this.tail = node;
-        }
-        this.length++;
-    }
-    pushNode(node) {
-        if (node === this.tail) {
-            return;
-        }
-        if (node.list) {
-            node.list.removeNode(node);
-        }
-        const tail = this.tail;
-        node.list = this;
-        node.prev = tail;
-        if (tail) {
-            tail.next = node;
-        }
-        this.tail = node;
-        if (!this.head) {
-            this.head = node;
-        }
-        this.length++;
-    }
-    push(...args) {
-        for (let i = 0, l = args.length; i < l; i++) {
-            push(this, args[i]);
-        }
-        return this.length;
-    }
-    unshift(...args) {
-        for (var i = 0, l = args.length; i < l; i++) {
-            unshift(this, args[i]);
-        }
-        return this.length;
-    }
-    pop() {
-        if (!this.tail) {
-            return undefined;
-        }
-        const res = this.tail.value;
-        const t = this.tail;
-        this.tail = this.tail.prev;
-        if (this.tail) {
-            this.tail.next = undefined;
-        }
-        else {
-            this.head = undefined;
-        }
-        t.list = undefined;
-        this.length--;
-        return res;
-    }
-    shift() {
-        if (!this.head) {
-            return undefined;
-        }
-        const res = this.head.value;
-        const h = this.head;
-        this.head = this.head.next;
-        if (this.head) {
-            this.head.prev = undefined;
-        }
-        else {
-            this.tail = undefined;
-        }
-        h.list = undefined;
-        this.length--;
-        return res;
-    }
-    forEach(fn, thisp) {
-        thisp = thisp || this;
-        for (let walker = this.head, i = 0; !!walker; i++) {
-            fn.call(thisp, walker.value, i, this);
-            walker = walker.next;
-        }
-    }
-    forEachReverse(fn, thisp) {
-        thisp = thisp || this;
-        for (let walker = this.tail, i = this.length - 1; !!walker; i--) {
-            fn.call(thisp, walker.value, i, this);
-            walker = walker.prev;
-        }
-    }
-    get(n) {
-        let i = 0;
-        let walker = this.head;
-        for (; !!walker && i < n; i++) {
-            walker = walker.next;
-        }
-        if (i === n && !!walker) {
-            return walker.value;
-        }
-    }
-    getReverse(n) {
-        let i = 0;
-        let walker = this.tail;
-        for (; !!walker && i < n; i++) {
-            // abort out of the list early if we hit a cycle
-            walker = walker.prev;
-        }
-        if (i === n && !!walker) {
-            return walker.value;
-        }
-    }
-    map(fn, thisp) {
-        thisp = thisp || this;
-        const res = new Yallist();
-        for (let walker = this.head; !!walker;) {
-            res.push(fn.call(thisp, walker.value, this));
-            walker = walker.next;
-        }
-        return res;
-    }
-    mapReverse(fn, thisp) {
-        thisp = thisp || this;
-        var res = new Yallist();
-        for (let walker = this.tail; !!walker;) {
-            res.push(fn.call(thisp, walker.value, this));
-            walker = walker.prev;
-        }
-        return res;
-    }
-    reduce(fn, initial) {
-        let acc;
-        let walker = this.head;
-        if (arguments.length > 1) {
-            acc = initial;
-        }
-        else if (this.head) {
-            walker = this.head.next;
-            acc = this.head.value;
-        }
-        else {
-            throw new TypeError('Reduce of empty list with no initial value');
-        }
-        for (var i = 0; !!walker; i++) {
-            acc = fn(acc, walker.value, i);
-            walker = walker.next;
-        }
-        return acc;
-    }
-    reduceReverse(fn, initial) {
-        let acc;
-        let walker = this.tail;
-        if (arguments.length > 1) {
-            acc = initial;
-        }
-        else if (this.tail) {
-            walker = this.tail.prev;
-            acc = this.tail.value;
-        }
-        else {
-            throw new TypeError('Reduce of empty list with no initial value');
-        }
-        for (let i = this.length - 1; !!walker; i--) {
-            acc = fn(acc, walker.value, i);
-            walker = walker.prev;
-        }
-        return acc;
-    }
-    toArray() {
-        const arr = new Array(this.length);
-        for (let i = 0, walker = this.head; !!walker; i++) {
-            arr[i] = walker.value;
-            walker = walker.next;
-        }
-        return arr;
-    }
-    toArrayReverse() {
-        const arr = new Array(this.length);
-        for (let i = 0, walker = this.tail; !!walker; i++) {
-            arr[i] = walker.value;
-            walker = walker.prev;
-        }
-        return arr;
-    }
-    slice(from = 0, to = this.length) {
-        if (to < 0) {
-            to += this.length;
-        }
-        if (from < 0) {
-            from += this.length;
-        }
-        const ret = new Yallist();
-        if (to < from || to < 0) {
-            return ret;
-        }
-        if (from < 0) {
-            from = 0;
-        }
-        if (to > this.length) {
-            to = this.length;
-        }
-        let walker = this.head;
-        let i = 0;
-        for (i = 0; !!walker && i < from; i++) {
-            walker = walker.next;
-        }
-        for (; !!walker && i < to; i++, walker = walker.next) {
-            ret.push(walker.value);
-        }
-        return ret;
-    }
-    sliceReverse(from = 0, to = this.length) {
-        if (to < 0) {
-            to += this.length;
-        }
-        if (from < 0) {
-            from += this.length;
-        }
-        const ret = new Yallist();
-        if (to < from || to < 0) {
-            return ret;
-        }
-        if (from < 0) {
-            from = 0;
-        }
-        if (to > this.length) {
-            to = this.length;
-        }
-        let i = this.length;
-        let walker = this.tail;
-        for (; !!walker && i > to; i--) {
-            walker = walker.prev;
-        }
-        for (; !!walker && i > from; i--, walker = walker.prev) {
-            ret.push(walker.value);
-        }
-        return ret;
-    }
-    splice(start, deleteCount = 0, ...nodes) {
-        if (start > this.length) {
-            start = this.length - 1;
-        }
-        if (start < 0) {
-            start = this.length + start;
-        }
-        let walker = this.head;
-        for (let i = 0; !!walker && i < start; i++) {
-            walker = walker.next;
-        }
-        const ret = [];
-        for (let i = 0; !!walker && i < deleteCount; i++) {
-            ret.push(walker.value);
-            walker = this.removeNode(walker);
-        }
-        if (!walker) {
-            walker = this.tail;
-        }
-        else if (walker !== this.tail) {
-            walker = walker.prev;
-        }
-        for (const v of nodes) {
-            walker = insertAfter(this, walker, v);
-        }
-        return ret;
-    }
-    reverse() {
-        const head = this.head;
-        const tail = this.tail;
-        for (let walker = head; !!walker; walker = walker.prev) {
-            const p = walker.prev;
-            walker.prev = walker.next;
-            walker.next = p;
-        }
-        this.head = tail;
-        this.tail = head;
-        return this;
-    }
-}
-exports.Yallist = Yallist;
-// insertAfter undefined means "make the node the new head of list"
-function insertAfter(self, node, value) {
-    const prev = node;
-    const next = node ? node.next : self.head;
-    const inserted = new Node(value, prev, next, self);
-    if (inserted.next === undefined) {
-        self.tail = inserted;
-    }
-    if (inserted.prev === undefined) {
-        self.head = inserted;
-    }
-    self.length++;
-    return inserted;
-}
-function push(self, item) {
-    self.tail = new Node(item, self.tail, undefined, self);
-    if (!self.head) {
-        self.head = self.tail;
-    }
-    self.length++;
-}
-function unshift(self, item) {
-    self.head = new Node(item, undefined, self.head, self);
-    if (!self.tail) {
-        self.tail = self.head;
-    }
-    self.length++;
-}
-class Node {
-    list;
-    next;
-    prev;
-    value;
-    constructor(value, prev, next, list) {
-        this.list = list;
-        this.value = value;
-        if (prev) {
-            prev.next = this;
-            this.prev = prev;
-        }
-        else {
-            this.prev = undefined;
-        }
-        if (next) {
-            next.prev = this;
-            this.next = next;
-        }
-        else {
-            this.next = undefined;
-        }
-    }
-}
-exports.Node = Node;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/yallist/dist/commonjs/package.json b/node_modules/make-fetch-happen/node_modules/yallist/dist/commonjs/package.json
deleted file mode 100644
index 5bbefffbabee3..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/yallist/dist/commonjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "commonjs"
-}
diff --git a/node_modules/make-fetch-happen/node_modules/yallist/dist/esm/index.js b/node_modules/make-fetch-happen/node_modules/yallist/dist/esm/index.js
deleted file mode 100644
index 3d81c5113b93a..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/yallist/dist/esm/index.js
+++ /dev/null
@@ -1,379 +0,0 @@
-export class Yallist {
-    tail;
-    head;
-    length = 0;
-    static create(list = []) {
-        return new Yallist(list);
-    }
-    constructor(list = []) {
-        for (const item of list) {
-            this.push(item);
-        }
-    }
-    *[Symbol.iterator]() {
-        for (let walker = this.head; walker; walker = walker.next) {
-            yield walker.value;
-        }
-    }
-    removeNode(node) {
-        if (node.list !== this) {
-            throw new Error('removing node which does not belong to this list');
-        }
-        const next = node.next;
-        const prev = node.prev;
-        if (next) {
-            next.prev = prev;
-        }
-        if (prev) {
-            prev.next = next;
-        }
-        if (node === this.head) {
-            this.head = next;
-        }
-        if (node === this.tail) {
-            this.tail = prev;
-        }
-        this.length--;
-        node.next = undefined;
-        node.prev = undefined;
-        node.list = undefined;
-        return next;
-    }
-    unshiftNode(node) {
-        if (node === this.head) {
-            return;
-        }
-        if (node.list) {
-            node.list.removeNode(node);
-        }
-        const head = this.head;
-        node.list = this;
-        node.next = head;
-        if (head) {
-            head.prev = node;
-        }
-        this.head = node;
-        if (!this.tail) {
-            this.tail = node;
-        }
-        this.length++;
-    }
-    pushNode(node) {
-        if (node === this.tail) {
-            return;
-        }
-        if (node.list) {
-            node.list.removeNode(node);
-        }
-        const tail = this.tail;
-        node.list = this;
-        node.prev = tail;
-        if (tail) {
-            tail.next = node;
-        }
-        this.tail = node;
-        if (!this.head) {
-            this.head = node;
-        }
-        this.length++;
-    }
-    push(...args) {
-        for (let i = 0, l = args.length; i < l; i++) {
-            push(this, args[i]);
-        }
-        return this.length;
-    }
-    unshift(...args) {
-        for (var i = 0, l = args.length; i < l; i++) {
-            unshift(this, args[i]);
-        }
-        return this.length;
-    }
-    pop() {
-        if (!this.tail) {
-            return undefined;
-        }
-        const res = this.tail.value;
-        const t = this.tail;
-        this.tail = this.tail.prev;
-        if (this.tail) {
-            this.tail.next = undefined;
-        }
-        else {
-            this.head = undefined;
-        }
-        t.list = undefined;
-        this.length--;
-        return res;
-    }
-    shift() {
-        if (!this.head) {
-            return undefined;
-        }
-        const res = this.head.value;
-        const h = this.head;
-        this.head = this.head.next;
-        if (this.head) {
-            this.head.prev = undefined;
-        }
-        else {
-            this.tail = undefined;
-        }
-        h.list = undefined;
-        this.length--;
-        return res;
-    }
-    forEach(fn, thisp) {
-        thisp = thisp || this;
-        for (let walker = this.head, i = 0; !!walker; i++) {
-            fn.call(thisp, walker.value, i, this);
-            walker = walker.next;
-        }
-    }
-    forEachReverse(fn, thisp) {
-        thisp = thisp || this;
-        for (let walker = this.tail, i = this.length - 1; !!walker; i--) {
-            fn.call(thisp, walker.value, i, this);
-            walker = walker.prev;
-        }
-    }
-    get(n) {
-        let i = 0;
-        let walker = this.head;
-        for (; !!walker && i < n; i++) {
-            walker = walker.next;
-        }
-        if (i === n && !!walker) {
-            return walker.value;
-        }
-    }
-    getReverse(n) {
-        let i = 0;
-        let walker = this.tail;
-        for (; !!walker && i < n; i++) {
-            // abort out of the list early if we hit a cycle
-            walker = walker.prev;
-        }
-        if (i === n && !!walker) {
-            return walker.value;
-        }
-    }
-    map(fn, thisp) {
-        thisp = thisp || this;
-        const res = new Yallist();
-        for (let walker = this.head; !!walker;) {
-            res.push(fn.call(thisp, walker.value, this));
-            walker = walker.next;
-        }
-        return res;
-    }
-    mapReverse(fn, thisp) {
-        thisp = thisp || this;
-        var res = new Yallist();
-        for (let walker = this.tail; !!walker;) {
-            res.push(fn.call(thisp, walker.value, this));
-            walker = walker.prev;
-        }
-        return res;
-    }
-    reduce(fn, initial) {
-        let acc;
-        let walker = this.head;
-        if (arguments.length > 1) {
-            acc = initial;
-        }
-        else if (this.head) {
-            walker = this.head.next;
-            acc = this.head.value;
-        }
-        else {
-            throw new TypeError('Reduce of empty list with no initial value');
-        }
-        for (var i = 0; !!walker; i++) {
-            acc = fn(acc, walker.value, i);
-            walker = walker.next;
-        }
-        return acc;
-    }
-    reduceReverse(fn, initial) {
-        let acc;
-        let walker = this.tail;
-        if (arguments.length > 1) {
-            acc = initial;
-        }
-        else if (this.tail) {
-            walker = this.tail.prev;
-            acc = this.tail.value;
-        }
-        else {
-            throw new TypeError('Reduce of empty list with no initial value');
-        }
-        for (let i = this.length - 1; !!walker; i--) {
-            acc = fn(acc, walker.value, i);
-            walker = walker.prev;
-        }
-        return acc;
-    }
-    toArray() {
-        const arr = new Array(this.length);
-        for (let i = 0, walker = this.head; !!walker; i++) {
-            arr[i] = walker.value;
-            walker = walker.next;
-        }
-        return arr;
-    }
-    toArrayReverse() {
-        const arr = new Array(this.length);
-        for (let i = 0, walker = this.tail; !!walker; i++) {
-            arr[i] = walker.value;
-            walker = walker.prev;
-        }
-        return arr;
-    }
-    slice(from = 0, to = this.length) {
-        if (to < 0) {
-            to += this.length;
-        }
-        if (from < 0) {
-            from += this.length;
-        }
-        const ret = new Yallist();
-        if (to < from || to < 0) {
-            return ret;
-        }
-        if (from < 0) {
-            from = 0;
-        }
-        if (to > this.length) {
-            to = this.length;
-        }
-        let walker = this.head;
-        let i = 0;
-        for (i = 0; !!walker && i < from; i++) {
-            walker = walker.next;
-        }
-        for (; !!walker && i < to; i++, walker = walker.next) {
-            ret.push(walker.value);
-        }
-        return ret;
-    }
-    sliceReverse(from = 0, to = this.length) {
-        if (to < 0) {
-            to += this.length;
-        }
-        if (from < 0) {
-            from += this.length;
-        }
-        const ret = new Yallist();
-        if (to < from || to < 0) {
-            return ret;
-        }
-        if (from < 0) {
-            from = 0;
-        }
-        if (to > this.length) {
-            to = this.length;
-        }
-        let i = this.length;
-        let walker = this.tail;
-        for (; !!walker && i > to; i--) {
-            walker = walker.prev;
-        }
-        for (; !!walker && i > from; i--, walker = walker.prev) {
-            ret.push(walker.value);
-        }
-        return ret;
-    }
-    splice(start, deleteCount = 0, ...nodes) {
-        if (start > this.length) {
-            start = this.length - 1;
-        }
-        if (start < 0) {
-            start = this.length + start;
-        }
-        let walker = this.head;
-        for (let i = 0; !!walker && i < start; i++) {
-            walker = walker.next;
-        }
-        const ret = [];
-        for (let i = 0; !!walker && i < deleteCount; i++) {
-            ret.push(walker.value);
-            walker = this.removeNode(walker);
-        }
-        if (!walker) {
-            walker = this.tail;
-        }
-        else if (walker !== this.tail) {
-            walker = walker.prev;
-        }
-        for (const v of nodes) {
-            walker = insertAfter(this, walker, v);
-        }
-        return ret;
-    }
-    reverse() {
-        const head = this.head;
-        const tail = this.tail;
-        for (let walker = head; !!walker; walker = walker.prev) {
-            const p = walker.prev;
-            walker.prev = walker.next;
-            walker.next = p;
-        }
-        this.head = tail;
-        this.tail = head;
-        return this;
-    }
-}
-// insertAfter undefined means "make the node the new head of list"
-function insertAfter(self, node, value) {
-    const prev = node;
-    const next = node ? node.next : self.head;
-    const inserted = new Node(value, prev, next, self);
-    if (inserted.next === undefined) {
-        self.tail = inserted;
-    }
-    if (inserted.prev === undefined) {
-        self.head = inserted;
-    }
-    self.length++;
-    return inserted;
-}
-function push(self, item) {
-    self.tail = new Node(item, self.tail, undefined, self);
-    if (!self.head) {
-        self.head = self.tail;
-    }
-    self.length++;
-}
-function unshift(self, item) {
-    self.head = new Node(item, undefined, self.head, self);
-    if (!self.tail) {
-        self.tail = self.head;
-    }
-    self.length++;
-}
-export class Node {
-    list;
-    next;
-    prev;
-    value;
-    constructor(value, prev, next, list) {
-        this.list = list;
-        this.value = value;
-        if (prev) {
-            prev.next = this;
-            this.prev = prev;
-        }
-        else {
-            this.prev = undefined;
-        }
-        if (next) {
-            next.prev = this;
-            this.next = next;
-        }
-        else {
-            this.next = undefined;
-        }
-    }
-}
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/yallist/dist/esm/package.json b/node_modules/make-fetch-happen/node_modules/yallist/dist/esm/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/yallist/dist/esm/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/make-fetch-happen/node_modules/yallist/package.json b/node_modules/make-fetch-happen/node_modules/yallist/package.json
deleted file mode 100644
index 2f5247808bbea..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/yallist/package.json
+++ /dev/null
@@ -1,68 +0,0 @@
-{
-  "name": "yallist",
-  "version": "5.0.0",
-  "description": "Yet Another Linked List",
-  "files": [
-    "dist"
-  ],
-  "devDependencies": {
-    "prettier": "^3.2.5",
-    "tap": "^18.7.2",
-    "tshy": "^1.13.1",
-    "typedoc": "^0.25.13"
-  },
-  "scripts": {
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "prepare": "tshy",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "test": "tap",
-    "snap": "tap",
-    "format": "prettier --write . --loglevel warn --ignore-path ../../.prettierignore --cache",
-    "typedoc": "typedoc"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/isaacs/yallist.git"
-  },
-  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
-  "license": "BlueOak-1.0.0",
-  "tshy": {
-    "exports": {
-      "./package.json": "./package.json",
-      ".": "./src/index.ts"
-    }
-  },
-  "exports": {
-    "./package.json": "./package.json",
-    ".": {
-      "import": {
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.js"
-      },
-      "require": {
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.js"
-      }
-    }
-  },
-  "main": "./dist/commonjs/index.js",
-  "types": "./dist/commonjs/index.d.ts",
-  "type": "module",
-  "prettier": {
-    "semi": false,
-    "printWidth": 70,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "engines": {
-    "node": ">=18"
-  }
-}
diff --git a/node_modules/make-fetch-happen/package.json b/node_modules/make-fetch-happen/package.json
index 054fe841f13b7..41815ec3c8f11 100644
--- a/node_modules/make-fetch-happen/package.json
+++ b/node_modules/make-fetch-happen/package.json
@@ -1,6 +1,6 @@
 {
   "name": "make-fetch-happen",
-  "version": "14.0.3",
+  "version": "15.0.2",
   "description": "Opinionated, caching, retrying fetch client",
   "main": "lib/index.js",
   "files": [
@@ -33,8 +33,8 @@
   "author": "GitHub Inc.",
   "license": "ISC",
   "dependencies": {
-    "@npmcli/agent": "^3.0.0",
-    "cacache": "^19.0.1",
+    "@npmcli/agent": "^4.0.0",
+    "cacache": "^20.0.1",
     "http-cache-semantics": "^4.1.1",
     "minipass": "^7.0.2",
     "minipass-fetch": "^4.0.0",
@@ -47,14 +47,14 @@
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.23.4",
+    "@npmcli/template-oss": "4.25.0",
     "nock": "^13.2.4",
     "safe-buffer": "^5.2.1",
     "standard-version": "^9.3.2",
     "tap": "^16.0.0"
   },
   "engines": {
-    "node": "^18.17.0 || >=20.5.0"
+    "node": "^20.17.0 || >=22.9.0"
   },
   "tap": {
     "color": 1,
@@ -68,7 +68,7 @@
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.23.4",
+    "version": "4.25.0",
     "publish": "true"
   }
 }
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/LICENSE.md b/node_modules/node-gyp/node_modules/cacache/LICENSE.md
similarity index 100%
rename from node_modules/make-fetch-happen/node_modules/cacache/LICENSE.md
rename to node_modules/node-gyp/node_modules/cacache/LICENSE.md
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/path.js b/node_modules/node-gyp/node_modules/cacache/lib/content/path.js
similarity index 100%
rename from node_modules/make-fetch-happen/node_modules/cacache/lib/content/path.js
rename to node_modules/node-gyp/node_modules/cacache/lib/content/path.js
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/read.js b/node_modules/node-gyp/node_modules/cacache/lib/content/read.js
similarity index 100%
rename from node_modules/make-fetch-happen/node_modules/cacache/lib/content/read.js
rename to node_modules/node-gyp/node_modules/cacache/lib/content/read.js
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/rm.js b/node_modules/node-gyp/node_modules/cacache/lib/content/rm.js
similarity index 100%
rename from node_modules/make-fetch-happen/node_modules/cacache/lib/content/rm.js
rename to node_modules/node-gyp/node_modules/cacache/lib/content/rm.js
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/content/write.js b/node_modules/node-gyp/node_modules/cacache/lib/content/write.js
similarity index 100%
rename from node_modules/make-fetch-happen/node_modules/cacache/lib/content/write.js
rename to node_modules/node-gyp/node_modules/cacache/lib/content/write.js
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/entry-index.js b/node_modules/node-gyp/node_modules/cacache/lib/entry-index.js
similarity index 100%
rename from node_modules/make-fetch-happen/node_modules/cacache/lib/entry-index.js
rename to node_modules/node-gyp/node_modules/cacache/lib/entry-index.js
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/get.js b/node_modules/node-gyp/node_modules/cacache/lib/get.js
similarity index 100%
rename from node_modules/make-fetch-happen/node_modules/cacache/lib/get.js
rename to node_modules/node-gyp/node_modules/cacache/lib/get.js
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/index.js b/node_modules/node-gyp/node_modules/cacache/lib/index.js
similarity index 100%
rename from node_modules/make-fetch-happen/node_modules/cacache/lib/index.js
rename to node_modules/node-gyp/node_modules/cacache/lib/index.js
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/memoization.js b/node_modules/node-gyp/node_modules/cacache/lib/memoization.js
similarity index 100%
rename from node_modules/make-fetch-happen/node_modules/cacache/lib/memoization.js
rename to node_modules/node-gyp/node_modules/cacache/lib/memoization.js
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/put.js b/node_modules/node-gyp/node_modules/cacache/lib/put.js
similarity index 100%
rename from node_modules/make-fetch-happen/node_modules/cacache/lib/put.js
rename to node_modules/node-gyp/node_modules/cacache/lib/put.js
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/rm.js b/node_modules/node-gyp/node_modules/cacache/lib/rm.js
similarity index 100%
rename from node_modules/make-fetch-happen/node_modules/cacache/lib/rm.js
rename to node_modules/node-gyp/node_modules/cacache/lib/rm.js
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/glob.js b/node_modules/node-gyp/node_modules/cacache/lib/util/glob.js
similarity index 100%
rename from node_modules/make-fetch-happen/node_modules/cacache/lib/util/glob.js
rename to node_modules/node-gyp/node_modules/cacache/lib/util/glob.js
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/hash-to-segments.js b/node_modules/node-gyp/node_modules/cacache/lib/util/hash-to-segments.js
similarity index 100%
rename from node_modules/make-fetch-happen/node_modules/cacache/lib/util/hash-to-segments.js
rename to node_modules/node-gyp/node_modules/cacache/lib/util/hash-to-segments.js
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/util/tmp.js b/node_modules/node-gyp/node_modules/cacache/lib/util/tmp.js
similarity index 100%
rename from node_modules/make-fetch-happen/node_modules/cacache/lib/util/tmp.js
rename to node_modules/node-gyp/node_modules/cacache/lib/util/tmp.js
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/lib/verify.js b/node_modules/node-gyp/node_modules/cacache/lib/verify.js
similarity index 100%
rename from node_modules/make-fetch-happen/node_modules/cacache/lib/verify.js
rename to node_modules/node-gyp/node_modules/cacache/lib/verify.js
diff --git a/node_modules/make-fetch-happen/node_modules/cacache/package.json b/node_modules/node-gyp/node_modules/cacache/package.json
similarity index 100%
rename from node_modules/make-fetch-happen/node_modules/cacache/package.json
rename to node_modules/node-gyp/node_modules/cacache/package.json
diff --git a/node_modules/@sigstore/sign/node_modules/make-fetch-happen/LICENSE b/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE
similarity index 100%
rename from node_modules/@sigstore/sign/node_modules/make-fetch-happen/LICENSE
rename to node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE
diff --git a/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/entry.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/entry.js
similarity index 100%
rename from node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/entry.js
rename to node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/entry.js
diff --git a/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/errors.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/errors.js
similarity index 100%
rename from node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/errors.js
rename to node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/errors.js
diff --git a/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/index.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/index.js
similarity index 100%
rename from node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/index.js
rename to node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/index.js
diff --git a/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/key.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/key.js
similarity index 100%
rename from node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/key.js
rename to node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/key.js
diff --git a/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/policy.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/policy.js
similarity index 100%
rename from node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/policy.js
rename to node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/policy.js
diff --git a/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/fetch.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/fetch.js
similarity index 100%
rename from node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/fetch.js
rename to node_modules/node-gyp/node_modules/make-fetch-happen/lib/fetch.js
diff --git a/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/index.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/index.js
similarity index 100%
rename from node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/index.js
rename to node_modules/node-gyp/node_modules/make-fetch-happen/lib/index.js
diff --git a/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/options.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/options.js
similarity index 100%
rename from node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/options.js
rename to node_modules/node-gyp/node_modules/make-fetch-happen/lib/options.js
diff --git a/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/pipeline.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/pipeline.js
similarity index 100%
rename from node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/pipeline.js
rename to node_modules/node-gyp/node_modules/make-fetch-happen/lib/pipeline.js
diff --git a/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/remote.js b/node_modules/node-gyp/node_modules/make-fetch-happen/lib/remote.js
similarity index 100%
rename from node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/remote.js
rename to node_modules/node-gyp/node_modules/make-fetch-happen/lib/remote.js
diff --git a/node_modules/@sigstore/sign/node_modules/make-fetch-happen/package.json b/node_modules/node-gyp/node_modules/make-fetch-happen/package.json
similarity index 91%
rename from node_modules/@sigstore/sign/node_modules/make-fetch-happen/package.json
rename to node_modules/node-gyp/node_modules/make-fetch-happen/package.json
index 1e27d4ee8a70e..054fe841f13b7 100644
--- a/node_modules/@sigstore/sign/node_modules/make-fetch-happen/package.json
+++ b/node_modules/node-gyp/node_modules/make-fetch-happen/package.json
@@ -1,6 +1,6 @@
 {
   "name": "make-fetch-happen",
-  "version": "15.0.1",
+  "version": "14.0.3",
   "description": "Opinionated, caching, retrying fetch client",
   "main": "lib/index.js",
   "files": [
@@ -34,7 +34,7 @@
   "license": "ISC",
   "dependencies": {
     "@npmcli/agent": "^3.0.0",
-    "cacache": "^20.0.1",
+    "cacache": "^19.0.1",
     "http-cache-semantics": "^4.1.1",
     "minipass": "^7.0.2",
     "minipass-fetch": "^4.0.0",
@@ -47,14 +47,14 @@
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.25.0",
+    "@npmcli/template-oss": "4.23.4",
     "nock": "^13.2.4",
     "safe-buffer": "^5.2.1",
     "standard-version": "^9.3.2",
     "tap": "^16.0.0"
   },
   "engines": {
-    "node": "^20.17.0 || >=22.9.0"
+    "node": "^18.17.0 || >=20.5.0"
   },
   "tap": {
     "color": 1,
@@ -68,7 +68,7 @@
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.25.0",
+    "version": "4.23.4",
     "publish": "true"
   }
 }
diff --git a/node_modules/@sigstore/sign/node_modules/negotiator/HISTORY.md b/node_modules/node-gyp/node_modules/negotiator/HISTORY.md
similarity index 100%
rename from node_modules/@sigstore/sign/node_modules/negotiator/HISTORY.md
rename to node_modules/node-gyp/node_modules/negotiator/HISTORY.md
diff --git a/node_modules/@sigstore/sign/node_modules/negotiator/LICENSE b/node_modules/node-gyp/node_modules/negotiator/LICENSE
similarity index 100%
rename from node_modules/@sigstore/sign/node_modules/negotiator/LICENSE
rename to node_modules/node-gyp/node_modules/negotiator/LICENSE
diff --git a/node_modules/@sigstore/sign/node_modules/negotiator/index.js b/node_modules/node-gyp/node_modules/negotiator/index.js
similarity index 100%
rename from node_modules/@sigstore/sign/node_modules/negotiator/index.js
rename to node_modules/node-gyp/node_modules/negotiator/index.js
diff --git a/node_modules/@sigstore/sign/node_modules/negotiator/lib/charset.js b/node_modules/node-gyp/node_modules/negotiator/lib/charset.js
similarity index 100%
rename from node_modules/@sigstore/sign/node_modules/negotiator/lib/charset.js
rename to node_modules/node-gyp/node_modules/negotiator/lib/charset.js
diff --git a/node_modules/@sigstore/sign/node_modules/negotiator/lib/encoding.js b/node_modules/node-gyp/node_modules/negotiator/lib/encoding.js
similarity index 100%
rename from node_modules/@sigstore/sign/node_modules/negotiator/lib/encoding.js
rename to node_modules/node-gyp/node_modules/negotiator/lib/encoding.js
diff --git a/node_modules/@sigstore/sign/node_modules/negotiator/lib/language.js b/node_modules/node-gyp/node_modules/negotiator/lib/language.js
similarity index 100%
rename from node_modules/@sigstore/sign/node_modules/negotiator/lib/language.js
rename to node_modules/node-gyp/node_modules/negotiator/lib/language.js
diff --git a/node_modules/@sigstore/sign/node_modules/negotiator/lib/mediaType.js b/node_modules/node-gyp/node_modules/negotiator/lib/mediaType.js
similarity index 100%
rename from node_modules/@sigstore/sign/node_modules/negotiator/lib/mediaType.js
rename to node_modules/node-gyp/node_modules/negotiator/lib/mediaType.js
diff --git a/node_modules/@sigstore/sign/node_modules/negotiator/package.json b/node_modules/node-gyp/node_modules/negotiator/package.json
similarity index 100%
rename from node_modules/@sigstore/sign/node_modules/negotiator/package.json
rename to node_modules/node-gyp/node_modules/negotiator/package.json
diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/LICENSE b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/LICENSE
deleted file mode 100644
index 1808eb2844231..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/LICENSE
+++ /dev/null
@@ -1,16 +0,0 @@
-ISC License
-
-Copyright 2017-2022 (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for
-any purpose with or without fee is hereby granted, provided that the
-above copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
-ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
-COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/entry.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/entry.js
deleted file mode 100644
index bfcfacbcc95e1..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/entry.js
+++ /dev/null
@@ -1,471 +0,0 @@
-const { Request, Response } = require('minipass-fetch')
-const { Minipass } = require('minipass')
-const MinipassFlush = require('minipass-flush')
-const cacache = require('cacache')
-const url = require('url')
-
-const CachingMinipassPipeline = require('../pipeline.js')
-const CachePolicy = require('./policy.js')
-const cacheKey = require('./key.js')
-const remote = require('../remote.js')
-
-const hasOwnProperty = (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop)
-
-// allow list for request headers that will be written to the cache index
-// note: we will also store any request headers
-// that are named in a response's vary header
-const KEEP_REQUEST_HEADERS = [
-  'accept-charset',
-  'accept-encoding',
-  'accept-language',
-  'accept',
-  'cache-control',
-]
-
-// allow list for response headers that will be written to the cache index
-// note: we must not store the real response's age header, or when we load
-// a cache policy based on the metadata it will think the cached response
-// is always stale
-const KEEP_RESPONSE_HEADERS = [
-  'cache-control',
-  'content-encoding',
-  'content-language',
-  'content-type',
-  'date',
-  'etag',
-  'expires',
-  'last-modified',
-  'link',
-  'location',
-  'pragma',
-  'vary',
-]
-
-// return an object containing all metadata to be written to the index
-const getMetadata = (request, response, options) => {
-  const metadata = {
-    time: Date.now(),
-    url: request.url,
-    reqHeaders: {},
-    resHeaders: {},
-
-    // options on which we must match the request and vary the response
-    options: {
-      compress: options.compress != null ? options.compress : request.compress,
-    },
-  }
-
-  // only save the status if it's not a 200 or 304
-  if (response.status !== 200 && response.status !== 304) {
-    metadata.status = response.status
-  }
-
-  for (const name of KEEP_REQUEST_HEADERS) {
-    if (request.headers.has(name)) {
-      metadata.reqHeaders[name] = request.headers.get(name)
-    }
-  }
-
-  // if the request's host header differs from the host in the url
-  // we need to keep it, otherwise it's just noise and we ignore it
-  const host = request.headers.get('host')
-  const parsedUrl = new url.URL(request.url)
-  if (host && parsedUrl.host !== host) {
-    metadata.reqHeaders.host = host
-  }
-
-  // if the response has a vary header, make sure
-  // we store the relevant request headers too
-  if (response.headers.has('vary')) {
-    const vary = response.headers.get('vary')
-    // a vary of "*" means every header causes a different response.
-    // in that scenario, we do not include any additional headers
-    // as the freshness check will always fail anyway and we don't
-    // want to bloat the cache indexes
-    if (vary !== '*') {
-      // copy any other request headers that will vary the response
-      const varyHeaders = vary.trim().toLowerCase().split(/\s*,\s*/)
-      for (const name of varyHeaders) {
-        if (request.headers.has(name)) {
-          metadata.reqHeaders[name] = request.headers.get(name)
-        }
-      }
-    }
-  }
-
-  for (const name of KEEP_RESPONSE_HEADERS) {
-    if (response.headers.has(name)) {
-      metadata.resHeaders[name] = response.headers.get(name)
-    }
-  }
-
-  for (const name of options.cacheAdditionalHeaders) {
-    if (response.headers.has(name)) {
-      metadata.resHeaders[name] = response.headers.get(name)
-    }
-  }
-
-  return metadata
-}
-
-// symbols used to hide objects that may be lazily evaluated in a getter
-const _request = Symbol('request')
-const _response = Symbol('response')
-const _policy = Symbol('policy')
-
-class CacheEntry {
-  constructor ({ entry, request, response, options }) {
-    if (entry) {
-      this.key = entry.key
-      this.entry = entry
-      // previous versions of this module didn't write an explicit timestamp in
-      // the metadata, so fall back to the entry's timestamp. we can't use the
-      // entry timestamp to determine staleness because cacache will update it
-      // when it verifies its data
-      this.entry.metadata.time = this.entry.metadata.time || this.entry.time
-    } else {
-      this.key = cacheKey(request)
-    }
-
-    this.options = options
-
-    // these properties are behind getters that lazily evaluate
-    this[_request] = request
-    this[_response] = response
-    this[_policy] = null
-  }
-
-  // returns a CacheEntry instance that satisfies the given request
-  // or undefined if no existing entry satisfies
-  static async find (request, options) {
-    try {
-      // compacts the index and returns an array of unique entries
-      var matches = await cacache.index.compact(options.cachePath, cacheKey(request), (A, B) => {
-        const entryA = new CacheEntry({ entry: A, options })
-        const entryB = new CacheEntry({ entry: B, options })
-        return entryA.policy.satisfies(entryB.request)
-      }, {
-        validateEntry: (entry) => {
-          // clean out entries with a buggy content-encoding value
-          if (entry.metadata &&
-              entry.metadata.resHeaders &&
-              entry.metadata.resHeaders['content-encoding'] === null) {
-            return false
-          }
-
-          // if an integrity is null, it needs to have a status specified
-          if (entry.integrity === null) {
-            return !!(entry.metadata && entry.metadata.status)
-          }
-
-          return true
-        },
-      })
-    } catch (err) {
-      // if the compact request fails, ignore the error and return
-      return
-    }
-
-    // a cache mode of 'reload' means to behave as though we have no cache
-    // on the way to the network. return undefined to allow cacheFetch to
-    // create a brand new request no matter what.
-    if (options.cache === 'reload') {
-      return
-    }
-
-    // find the specific entry that satisfies the request
-    let match
-    for (const entry of matches) {
-      const _entry = new CacheEntry({
-        entry,
-        options,
-      })
-
-      if (_entry.policy.satisfies(request)) {
-        match = _entry
-        break
-      }
-    }
-
-    return match
-  }
-
-  // if the user made a PUT/POST/PATCH then we invalidate our
-  // cache for the same url by deleting the index entirely
-  static async invalidate (request, options) {
-    const key = cacheKey(request)
-    try {
-      await cacache.rm.entry(options.cachePath, key, { removeFully: true })
-    } catch (err) {
-      // ignore errors
-    }
-  }
-
-  get request () {
-    if (!this[_request]) {
-      this[_request] = new Request(this.entry.metadata.url, {
-        method: 'GET',
-        headers: this.entry.metadata.reqHeaders,
-        ...this.entry.metadata.options,
-      })
-    }
-
-    return this[_request]
-  }
-
-  get response () {
-    if (!this[_response]) {
-      this[_response] = new Response(null, {
-        url: this.entry.metadata.url,
-        counter: this.options.counter,
-        status: this.entry.metadata.status || 200,
-        headers: {
-          ...this.entry.metadata.resHeaders,
-          'content-length': this.entry.size,
-        },
-      })
-    }
-
-    return this[_response]
-  }
-
-  get policy () {
-    if (!this[_policy]) {
-      this[_policy] = new CachePolicy({
-        entry: this.entry,
-        request: this.request,
-        response: this.response,
-        options: this.options,
-      })
-    }
-
-    return this[_policy]
-  }
-
-  // wraps the response in a pipeline that stores the data
-  // in the cache while the user consumes it
-  async store (status) {
-    // if we got a status other than 200, 301, or 308,
-    // or the CachePolicy forbid storage, append the
-    // cache status header and return it untouched
-    if (
-      this.request.method !== 'GET' ||
-      ![200, 301, 308].includes(this.response.status) ||
-      !this.policy.storable()
-    ) {
-      this.response.headers.set('x-local-cache-status', 'skip')
-      return this.response
-    }
-
-    const size = this.response.headers.get('content-length')
-    const cacheOpts = {
-      algorithms: this.options.algorithms,
-      metadata: getMetadata(this.request, this.response, this.options),
-      size,
-      integrity: this.options.integrity,
-      integrityEmitter: this.response.body.hasIntegrityEmitter && this.response.body,
-    }
-
-    let body = null
-    // we only set a body if the status is a 200, redirects are
-    // stored as metadata only
-    if (this.response.status === 200) {
-      let cacheWriteResolve, cacheWriteReject
-      const cacheWritePromise = new Promise((resolve, reject) => {
-        cacheWriteResolve = resolve
-        cacheWriteReject = reject
-      }).catch((err) => {
-        body.emit('error', err)
-      })
-
-      body = new CachingMinipassPipeline({ events: ['integrity', 'size'] }, new MinipassFlush({
-        flush () {
-          return cacheWritePromise
-        },
-      }))
-      // this is always true since if we aren't reusing the one from the remote fetch, we
-      // are using the one from cacache
-      body.hasIntegrityEmitter = true
-
-      const onResume = () => {
-        const tee = new Minipass()
-        const cacheStream = cacache.put.stream(this.options.cachePath, this.key, cacheOpts)
-        // re-emit the integrity and size events on our new response body so they can be reused
-        cacheStream.on('integrity', i => body.emit('integrity', i))
-        cacheStream.on('size', s => body.emit('size', s))
-        // stick a flag on here so downstream users will know if they can expect integrity events
-        tee.pipe(cacheStream)
-        // TODO if the cache write fails, log a warning but return the response anyway
-        // eslint-disable-next-line promise/catch-or-return
-        cacheStream.promise().then(cacheWriteResolve, cacheWriteReject)
-        body.unshift(tee)
-        body.unshift(this.response.body)
-      }
-
-      body.once('resume', onResume)
-      body.once('end', () => body.removeListener('resume', onResume))
-    } else {
-      await cacache.index.insert(this.options.cachePath, this.key, null, cacheOpts)
-    }
-
-    // note: we do not set the x-local-cache-hash header because we do not know
-    // the hash value until after the write to the cache completes, which doesn't
-    // happen until after the response has been sent and it's too late to write
-    // the header anyway
-    this.response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath))
-    this.response.headers.set('x-local-cache-key', encodeURIComponent(this.key))
-    this.response.headers.set('x-local-cache-mode', 'stream')
-    this.response.headers.set('x-local-cache-status', status)
-    this.response.headers.set('x-local-cache-time', new Date().toISOString())
-    const newResponse = new Response(body, {
-      url: this.response.url,
-      status: this.response.status,
-      headers: this.response.headers,
-      counter: this.options.counter,
-    })
-    return newResponse
-  }
-
-  // use the cached data to create a response and return it
-  async respond (method, options, status) {
-    let response
-    if (method === 'HEAD' || [301, 308].includes(this.response.status)) {
-      // if the request is a HEAD, or the response is a redirect,
-      // then the metadata in the entry already includes everything
-      // we need to build a response
-      response = this.response
-    } else {
-      // we're responding with a full cached response, so create a body
-      // that reads from cacache and attach it to a new Response
-      const body = new Minipass()
-      const headers = { ...this.policy.responseHeaders() }
-
-      const onResume = () => {
-        const cacheStream = cacache.get.stream.byDigest(
-          this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }
-        )
-        cacheStream.on('error', async (err) => {
-          cacheStream.pause()
-          if (err.code === 'EINTEGRITY') {
-            await cacache.rm.content(
-              this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }
-            )
-          }
-          if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') {
-            await CacheEntry.invalidate(this.request, this.options)
-          }
-          body.emit('error', err)
-          cacheStream.resume()
-        })
-        // emit the integrity and size events based on our metadata so we're consistent
-        body.emit('integrity', this.entry.integrity)
-        body.emit('size', Number(headers['content-length']))
-        cacheStream.pipe(body)
-      }
-
-      body.once('resume', onResume)
-      body.once('end', () => body.removeListener('resume', onResume))
-      response = new Response(body, {
-        url: this.entry.metadata.url,
-        counter: options.counter,
-        status: 200,
-        headers,
-      })
-    }
-
-    response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath))
-    response.headers.set('x-local-cache-hash', encodeURIComponent(this.entry.integrity))
-    response.headers.set('x-local-cache-key', encodeURIComponent(this.key))
-    response.headers.set('x-local-cache-mode', 'stream')
-    response.headers.set('x-local-cache-status', status)
-    response.headers.set('x-local-cache-time', new Date(this.entry.metadata.time).toUTCString())
-    return response
-  }
-
-  // use the provided request along with this cache entry to
-  // revalidate the stored response. returns a response, either
-  // from the cache or from the update
-  async revalidate (request, options) {
-    const revalidateRequest = new Request(request, {
-      headers: this.policy.revalidationHeaders(request),
-    })
-
-    try {
-      // NOTE: be sure to remove the headers property from the
-      // user supplied options, since we have already defined
-      // them on the new request object. if they're still in the
-      // options then those will overwrite the ones from the policy
-      var response = await remote(revalidateRequest, {
-        ...options,
-        headers: undefined,
-      })
-    } catch (err) {
-      // if the network fetch fails, return the stale
-      // cached response unless it has a cache-control
-      // of 'must-revalidate'
-      if (!this.policy.mustRevalidate) {
-        return this.respond(request.method, options, 'stale')
-      }
-
-      throw err
-    }
-
-    if (this.policy.revalidated(revalidateRequest, response)) {
-      // we got a 304, write a new index to the cache and respond from cache
-      const metadata = getMetadata(request, response, options)
-      // 304 responses do not include headers that are specific to the response data
-      // since they do not include a body, so we copy values for headers that were
-      // in the old cache entry to the new one, if the new metadata does not already
-      // include that header
-      for (const name of KEEP_RESPONSE_HEADERS) {
-        if (
-          !hasOwnProperty(metadata.resHeaders, name) &&
-          hasOwnProperty(this.entry.metadata.resHeaders, name)
-        ) {
-          metadata.resHeaders[name] = this.entry.metadata.resHeaders[name]
-        }
-      }
-
-      for (const name of options.cacheAdditionalHeaders) {
-        const inMeta = hasOwnProperty(metadata.resHeaders, name)
-        const inEntry = hasOwnProperty(this.entry.metadata.resHeaders, name)
-        const inPolicy = hasOwnProperty(this.policy.response.headers, name)
-
-        // if the header is in the existing entry, but it is not in the metadata
-        // then we need to write it to the metadata as this will refresh the on-disk cache
-        if (!inMeta && inEntry) {
-          metadata.resHeaders[name] = this.entry.metadata.resHeaders[name]
-        }
-        // if the header is in the metadata, but not in the policy, then we need to set
-        // it in the policy so that it's included in the immediate response. future
-        // responses will load a new cache entry, so we don't need to change that
-        if (!inPolicy && inMeta) {
-          this.policy.response.headers[name] = metadata.resHeaders[name]
-        }
-      }
-
-      try {
-        await cacache.index.insert(options.cachePath, this.key, this.entry.integrity, {
-          size: this.entry.size,
-          metadata,
-        })
-      } catch (err) {
-        // if updating the cache index fails, we ignore it and
-        // respond anyway
-      }
-      return this.respond(request.method, options, 'revalidated')
-    }
-
-    // if we got a modified response, create a new entry based on it
-    const newEntry = new CacheEntry({
-      request,
-      response,
-      options,
-    })
-
-    // respond with the new entry while writing it to the cache
-    return newEntry.store('updated')
-  }
-}
-
-module.exports = CacheEntry
diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/errors.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/errors.js
deleted file mode 100644
index 67a66573bebe6..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/errors.js
+++ /dev/null
@@ -1,11 +0,0 @@
-class NotCachedError extends Error {
-  constructor (url) {
-    /* eslint-disable-next-line max-len */
-    super(`request to ${url} failed: cache mode is 'only-if-cached' but no cached response is available.`)
-    this.code = 'ENOTCACHED'
-  }
-}
-
-module.exports = {
-  NotCachedError,
-}
diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/index.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/index.js
deleted file mode 100644
index 0de49d23fb933..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/index.js
+++ /dev/null
@@ -1,49 +0,0 @@
-const { NotCachedError } = require('./errors.js')
-const CacheEntry = require('./entry.js')
-const remote = require('../remote.js')
-
-// do whatever is necessary to get a Response and return it
-const cacheFetch = async (request, options) => {
-  // try to find a cached entry that satisfies this request
-  const entry = await CacheEntry.find(request, options)
-  if (!entry) {
-    // no cached result, if the cache mode is 'only-if-cached' that's a failure
-    if (options.cache === 'only-if-cached') {
-      throw new NotCachedError(request.url)
-    }
-
-    // otherwise, we make a request, store it and return it
-    const response = await remote(request, options)
-    const newEntry = new CacheEntry({ request, response, options })
-    return newEntry.store('miss')
-  }
-
-  // we have a cached response that satisfies this request, however if the cache
-  // mode is 'no-cache' then we send the revalidation request no matter what
-  if (options.cache === 'no-cache') {
-    return entry.revalidate(request, options)
-  }
-
-  // if the cached entry is not stale, or if the cache mode is 'force-cache' or
-  // 'only-if-cached' we can respond with the cached entry. set the status
-  // based on the result of needsRevalidation and respond
-  const _needsRevalidation = entry.policy.needsRevalidation(request)
-  if (options.cache === 'force-cache' ||
-      options.cache === 'only-if-cached' ||
-      !_needsRevalidation) {
-    return entry.respond(request.method, options, _needsRevalidation ? 'stale' : 'hit')
-  }
-
-  // if we got here, the cache entry is stale so revalidate it
-  return entry.revalidate(request, options)
-}
-
-cacheFetch.invalidate = async (request, options) => {
-  if (!options.cachePath) {
-    return
-  }
-
-  return CacheEntry.invalidate(request, options)
-}
-
-module.exports = cacheFetch
diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/key.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/key.js
deleted file mode 100644
index f7684d562b7fa..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/key.js
+++ /dev/null
@@ -1,17 +0,0 @@
-const { URL, format } = require('url')
-
-// options passed to url.format() when generating a key
-const formatOptions = {
-  auth: false,
-  fragment: false,
-  search: true,
-  unicode: false,
-}
-
-// returns a string to be used as the cache key for the Request
-const cacheKey = (request) => {
-  const parsed = new URL(request.url)
-  return `make-fetch-happen:request-cache:${format(parsed, formatOptions)}`
-}
-
-module.exports = cacheKey
diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/policy.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/policy.js
deleted file mode 100644
index ada3c8600dae9..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/cache/policy.js
+++ /dev/null
@@ -1,161 +0,0 @@
-const CacheSemantics = require('http-cache-semantics')
-const Negotiator = require('negotiator')
-const ssri = require('ssri')
-
-// options passed to http-cache-semantics constructor
-const policyOptions = {
-  shared: false,
-  ignoreCargoCult: true,
-}
-
-// a fake empty response, used when only testing the
-// request for storability
-const emptyResponse = { status: 200, headers: {} }
-
-// returns a plain object representation of the Request
-const requestObject = (request) => {
-  const _obj = {
-    method: request.method,
-    url: request.url,
-    headers: {},
-    compress: request.compress,
-  }
-
-  request.headers.forEach((value, key) => {
-    _obj.headers[key] = value
-  })
-
-  return _obj
-}
-
-// returns a plain object representation of the Response
-const responseObject = (response) => {
-  const _obj = {
-    status: response.status,
-    headers: {},
-  }
-
-  response.headers.forEach((value, key) => {
-    _obj.headers[key] = value
-  })
-
-  return _obj
-}
-
-class CachePolicy {
-  constructor ({ entry, request, response, options }) {
-    this.entry = entry
-    this.request = requestObject(request)
-    this.response = responseObject(response)
-    this.options = options
-    this.policy = new CacheSemantics(this.request, this.response, policyOptions)
-
-    if (this.entry) {
-      // if we have an entry, copy the timestamp to the _responseTime
-      // this is necessary because the CacheSemantics constructor forces
-      // the value to Date.now() which means a policy created from a
-      // cache entry is likely to always identify itself as stale
-      this.policy._responseTime = this.entry.metadata.time
-    }
-  }
-
-  // static method to quickly determine if a request alone is storable
-  static storable (request, options) {
-    // no cachePath means no caching
-    if (!options.cachePath) {
-      return false
-    }
-
-    // user explicitly asked not to cache
-    if (options.cache === 'no-store') {
-      return false
-    }
-
-    // we only cache GET and HEAD requests
-    if (!['GET', 'HEAD'].includes(request.method)) {
-      return false
-    }
-
-    // otherwise, let http-cache-semantics make the decision
-    // based on the request's headers
-    const policy = new CacheSemantics(requestObject(request), emptyResponse, policyOptions)
-    return policy.storable()
-  }
-
-  // returns true if the policy satisfies the request
-  satisfies (request) {
-    const _req = requestObject(request)
-    if (this.request.headers.host !== _req.headers.host) {
-      return false
-    }
-
-    if (this.request.compress !== _req.compress) {
-      return false
-    }
-
-    const negotiatorA = new Negotiator(this.request)
-    const negotiatorB = new Negotiator(_req)
-
-    if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes())) {
-      return false
-    }
-
-    if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages())) {
-      return false
-    }
-
-    if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings())) {
-      return false
-    }
-
-    if (this.options.integrity) {
-      return ssri.parse(this.options.integrity).match(this.entry.integrity)
-    }
-
-    return true
-  }
-
-  // returns true if the request and response allow caching
-  storable () {
-    return this.policy.storable()
-  }
-
-  // NOTE: this is a hack to avoid parsing the cache-control
-  // header ourselves, it returns true if the response's
-  // cache-control contains must-revalidate
-  get mustRevalidate () {
-    return !!this.policy._rescc['must-revalidate']
-  }
-
-  // returns true if the cached response requires revalidation
-  // for the given request
-  needsRevalidation (request) {
-    const _req = requestObject(request)
-    // force method to GET because we only cache GETs
-    // but can serve a HEAD from a cached GET
-    _req.method = 'GET'
-    return !this.policy.satisfiesWithoutRevalidation(_req)
-  }
-
-  responseHeaders () {
-    return this.policy.responseHeaders()
-  }
-
-  // returns a new object containing the appropriate headers
-  // to send a revalidation request
-  revalidationHeaders (request) {
-    const _req = requestObject(request)
-    return this.policy.revalidationHeaders(_req)
-  }
-
-  // returns true if the request/response was revalidated
-  // successfully. returns false if a new response was received
-  revalidated (request, response) {
-    const _req = requestObject(request)
-    const _res = responseObject(response)
-    const policy = this.policy.revalidatedPolicy(_req, _res)
-    return !policy.modified
-  }
-}
-
-module.exports = CachePolicy
diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/fetch.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/fetch.js
deleted file mode 100644
index 233ba67e16550..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/fetch.js
+++ /dev/null
@@ -1,118 +0,0 @@
-'use strict'
-
-const { FetchError, Request, isRedirect } = require('minipass-fetch')
-const url = require('url')
-
-const CachePolicy = require('./cache/policy.js')
-const cache = require('./cache/index.js')
-const remote = require('./remote.js')
-
-// given a Request, a Response and user options
-// return true if the response is a redirect that
-// can be followed. we throw errors that will result
-// in the fetch being rejected if the redirect is
-// possible but invalid for some reason
-const canFollowRedirect = (request, response, options) => {
-  if (!isRedirect(response.status)) {
-    return false
-  }
-
-  if (options.redirect === 'manual') {
-    return false
-  }
-
-  if (options.redirect === 'error') {
-    throw new FetchError(`redirect mode is set to error: ${request.url}`,
-      'no-redirect', { code: 'ENOREDIRECT' })
-  }
-
-  if (!response.headers.has('location')) {
-    throw new FetchError(`redirect location header missing for: ${request.url}`,
-      'no-location', { code: 'EINVALIDREDIRECT' })
-  }
-
-  if (request.counter >= request.follow) {
-    throw new FetchError(`maximum redirect reached at: ${request.url}`,
-      'max-redirect', { code: 'EMAXREDIRECT' })
-  }
-
-  return true
-}
-
-// given a Request, a Response, and the user's options return an object
-// with a new Request and a new options object that will be used for
-// following the redirect
-const getRedirect = (request, response, options) => {
-  const _opts = { ...options }
-  const location = response.headers.get('location')
-  const redirectUrl = new url.URL(location, /^https?:/.test(location) ? undefined : request.url)
-  // Comment below is used under the following license:
-  /**
-   * @license
-   * Copyright (c) 2010-2012 Mikeal Rogers
-   * Licensed under the Apache License, Version 2.0 (the "License");
-   * you may not use this file except in compliance with the License.
-   * You may obtain a copy of the License at
-   * http://www.apache.org/licenses/LICENSE-2.0
-   * Unless required by applicable law or agreed to in writing,
-   * software distributed under the License is distributed on an "AS
-   * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
-   * express or implied. See the License for the specific language
-   * governing permissions and limitations under the License.
-   */
-
-  // Remove authorization if changing hostnames (but not if just
-  // changing ports or protocols).  This matches the behavior of request:
-  // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138
-  if (new url.URL(request.url).hostname !== redirectUrl.hostname) {
-    request.headers.delete('authorization')
-    request.headers.delete('cookie')
-  }
-
-  // for POST request with 301/302 response, or any request with 303 response,
-  // use GET when following redirect
-  if (
-    response.status === 303 ||
-    (request.method === 'POST' && [301, 302].includes(response.status))
-  ) {
-    _opts.method = 'GET'
-    _opts.body = null
-    request.headers.delete('content-length')
-  }
-
-  _opts.headers = {}
-  request.headers.forEach((value, key) => {
-    _opts.headers[key] = value
-  })
-
-  _opts.counter = ++request.counter
-  const redirectReq = new Request(url.format(redirectUrl), _opts)
-  return {
-    request: redirectReq,
-    options: _opts,
-  }
-}
-
-const fetch = async (request, options) => {
-  const response = CachePolicy.storable(request, options)
-    ? await cache(request, options)
-    : await remote(request, options)
-
-  // if the request wasn't a GET or HEAD, and the response
-  // status is between 200 and 399 inclusive, invalidate the
-  // request url
-  if (!['GET', 'HEAD'].includes(request.method) &&
-      response.status >= 200 &&
-      response.status <= 399) {
-    await cache.invalidate(request, options)
-  }
-
-  if (!canFollowRedirect(request, response, options)) {
-    return response
-  }
-
-  const redirect = getRedirect(request, response, options)
-  return fetch(redirect.request, redirect.options)
-}
-
-module.exports = fetch
diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/index.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/index.js
deleted file mode 100644
index 2f12e8e1b6113..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/index.js
+++ /dev/null
@@ -1,41 +0,0 @@
-const { FetchError, Headers, Request, Response } = require('minipass-fetch')
-
-const configureOptions = require('./options.js')
-const fetch = require('./fetch.js')
-
-const makeFetchHappen = (url, opts) => {
-  const options = configureOptions(opts)
-
-  const request = new Request(url, options)
-  return fetch(request, options)
-}
-
-makeFetchHappen.defaults = (defaultUrl, defaultOptions = {}, wrappedFetch = makeFetchHappen) => {
-  if (typeof defaultUrl === 'object') {
-    defaultOptions = defaultUrl
-    defaultUrl = null
-  }
-
-  const defaultedFetch = (url, options = {}) => {
-    const finalUrl = url || defaultUrl
-    const finalOptions = {
-      ...defaultOptions,
-      ...options,
-      headers: {
-        ...defaultOptions.headers,
-        ...options.headers,
-      },
-    }
-    return wrappedFetch(finalUrl, finalOptions)
-  }
-
-  defaultedFetch.defaults = (defaultUrl1, defaultOptions1 = {}) =>
-    makeFetchHappen.defaults(defaultUrl1, defaultOptions1, defaultedFetch)
-  return defaultedFetch
-}
-
-module.exports = makeFetchHappen
-module.exports.FetchError = FetchError
-module.exports.Headers = Headers
-module.exports.Request = Request
-module.exports.Response = Response
diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/options.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/options.js
deleted file mode 100644
index db51cc6324817..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/options.js
+++ /dev/null
@@ -1,59 +0,0 @@
-const dns = require('dns')
-
-const conditionalHeaders = [
-  'if-modified-since',
-  'if-none-match',
-  'if-unmodified-since',
-  'if-match',
-  'if-range',
-]
-
-const configureOptions = (opts) => {
-  const { strictSSL, ...options } = { ...opts }
-  options.method = options.method ? options.method.toUpperCase() : 'GET'
-
-  if (strictSSL === undefined || strictSSL === null) {
-    options.rejectUnauthorized = process.env.NODE_TLS_REJECT_UNAUTHORIZED !== '0'
-  } else {
-    options.rejectUnauthorized = strictSSL !== false
-  }
-
-  if (!options.retry) {
-    options.retry = { retries: 0 }
-  } else if (typeof options.retry === 'string') {
-    const retries = parseInt(options.retry, 10)
-    if (isFinite(retries)) {
-      options.retry = { retries }
-    } else {
-      options.retry = { retries: 0 }
-    }
-  } else if (typeof options.retry === 'number') {
-    options.retry = { retries: options.retry }
-  } else {
-    options.retry = { retries: 0, ...options.retry }
-  }
-
-  options.dns = { ttl: 5 * 60 * 1000, lookup: dns.lookup, ...options.dns }
-
-  options.cache = options.cache || 'default'
-  if (options.cache === 'default') {
-    const hasConditionalHeader = Object.keys(options.headers || {}).some((name) => {
-      return conditionalHeaders.includes(name.toLowerCase())
-    })
-    if (hasConditionalHeader) {
-      options.cache = 'no-store'
-    }
-  }
-
-  options.cacheAdditionalHeaders = options.cacheAdditionalHeaders || []
-
-  // cacheManager is deprecated, but if it's set and
-  // cachePath is not we should copy it to the new field
-  if (options.cacheManager && !options.cachePath) {
-    options.cachePath = options.cacheManager
-  }
-
-  return options
-}
-
-module.exports = configureOptions
diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/pipeline.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/pipeline.js
deleted file mode 100644
index b1d221b2d0ce3..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/pipeline.js
+++ /dev/null
@@ -1,41 +0,0 @@
-'use strict'
-
-const MinipassPipeline = require('minipass-pipeline')
-
-class CachingMinipassPipeline extends MinipassPipeline {
-  #events = []
-  #data = new Map()
-
-  constructor (opts, ...streams) {
-    // CRITICAL: do NOT pass the streams to the call to super(), this will start
-    // the flow of data and potentially cause the events we need to catch to emit
-    // before we've finished our own setup. instead we call super() with no args,
-    // finish our setup, and then push the streams into ourselves to start the
-    // data flow
-    super()
-    this.#events = opts.events
-
-    /* istanbul ignore next - coverage disabled because this is pointless to test here */
-    if (streams.length) {
-      this.push(...streams)
-    }
-  }
-
-  on (event, handler) {
-    if (this.#events.includes(event) && this.#data.has(event)) {
-      return handler(...this.#data.get(event))
-    }
-
-    return super.on(event, handler)
-  }
-
-  emit (event, ...data) {
-    if (this.#events.includes(event)) {
-      this.#data.set(event, data)
-    }
-
-    return super.emit(event, ...data)
-  }
-}
-
-module.exports = CachingMinipassPipeline
diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/remote.js b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/remote.js
deleted file mode 100644
index 1d640e5380baa..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/lib/remote.js
+++ /dev/null
@@ -1,132 +0,0 @@
-const { Minipass } = require('minipass')
-const fetch = require('minipass-fetch')
-const promiseRetry = require('promise-retry')
-const ssri = require('ssri')
-const { log } = require('proc-log')
-
-const CachingMinipassPipeline = require('./pipeline.js')
-const { getAgent } = require('@npmcli/agent')
-const pkg = require('../package.json')
-
-const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})`
-
-const RETRY_ERRORS = [
-  'ECONNRESET', // remote socket closed on us
-  'ECONNREFUSED', // remote host refused to open connection
-  'EADDRINUSE', // failed to bind to a local port (proxy?)
-  'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW
-  // from @npmcli/agent
-  'ECONNECTIONTIMEOUT',
-  'EIDLETIMEOUT',
-  'ERESPONSETIMEOUT',
-  'ETRANSFERTIMEOUT',
-  // Known codes we do NOT retry on:
-  // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline)
-  // EINVALIDPROXY // invalid protocol from @npmcli/agent
-  // EINVALIDRESPONSE // invalid status code from @npmcli/agent
-]
-
-const RETRY_TYPES = [
-  'request-timeout',
-]
-
-// make a request directly to the remote source,
-// retrying certain classes of errors as well as
-// following redirects (through the cache if necessary)
-// and verifying response integrity
-const remoteFetch = (request, options) => {
-  // options.signal is intended for the fetch itself, not the agent.  Attaching it to the agent will re-use that signal across multiple requests, which prevents any connections beyond the first one.
-  const agent = getAgent(request.url, { ...options, signal: undefined })
-  if (!request.headers.has('connection')) {
-    request.headers.set('connection', agent ? 'keep-alive' : 'close')
-  }
-
-  if (!request.headers.has('user-agent')) {
-    request.headers.set('user-agent', USER_AGENT)
-  }
-
-  // keep our own options since we're overriding the agent
-  // and the redirect mode
-  const _opts = {
-    ...options,
-    agent,
-    redirect: 'manual',
-  }
-
-  return promiseRetry(async (retryHandler, attemptNum) => {
-    const req = new fetch.Request(request, _opts)
-    try {
-      let res = await fetch(req, _opts)
-      if (_opts.integrity && res.status === 200) {
-        // we got a 200 response and the user has specified an expected
-        // integrity value, so wrap the response in an ssri stream to verify it
-        const integrityStream = ssri.integrityStream({
-          algorithms: _opts.algorithms,
-          integrity: _opts.integrity,
-          size: _opts.size,
-        })
-        const pipeline = new CachingMinipassPipeline({
-          events: ['integrity', 'size'],
-        }, res.body, integrityStream)
-        // we also propagate the integrity and size events out to the pipeline so we can use
-        // this new response body as an integrityEmitter for cacache
-        integrityStream.on('integrity', i => pipeline.emit('integrity', i))
-        integrityStream.on('size', s => pipeline.emit('size', s))
-        res = new fetch.Response(pipeline, res)
-        // set an explicit flag so we know if our response body will emit integrity and size
-        res.body.hasIntegrityEmitter = true
-      }
-
-      res.headers.set('x-fetch-attempts', attemptNum)
-
-      // do not retry POST requests, or requests with a streaming body
-      // do retry requests with a 408, 420, 429 or 500+ status in the response
-      const isStream = Minipass.isStream(req.body)
-      const isRetriable = req.method !== 'POST' &&
-          !isStream &&
-          ([408, 420, 429].includes(res.status) || res.status >= 500)
-
-      if (isRetriable) {
-        if (typeof options.onRetry === 'function') {
-          options.onRetry(res)
-        }
-
-        /* eslint-disable-next-line max-len */
-        log.http('fetch', `${req.method} ${req.url} attempt ${attemptNum} failed with ${res.status}`)
-        return retryHandler(res)
-      }
-
-      return res
-    } catch (err) {
-      const code = (err.code === 'EPROMISERETRY')
-        ? err.retried.code
-        : err.code
-
-      // err.retried will be the thing that was thrown from above
-      // if it's a response, we just got a bad status code and we
-      // can re-throw to allow the retry
-      const isRetryError = err.retried instanceof fetch.Response ||
-        (RETRY_ERRORS.includes(code) && RETRY_TYPES.includes(err.type))
-
-      if (req.method === 'POST' || isRetryError) {
-        throw err
-      }
-
-      if (typeof options.onRetry === 'function') {
-        options.onRetry(err)
-      }
-
-      log.http('fetch', `${req.method} ${req.url} attempt ${attemptNum} failed with ${err.code}`)
-      return retryHandler(err)
-    }
-  }, options.retry).catch((err) => {
-    // don't reject for http errors, just return them
-    if (err.status >= 400 && err.type !== 'system') {
-      return err
-    }
-
-    throw err
-  })
-}
-
-module.exports = remoteFetch
diff --git a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/package.json b/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/package.json
deleted file mode 100644
index 1e27d4ee8a70e..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/make-fetch-happen/package.json
+++ /dev/null
@@ -1,74 +0,0 @@
-{
-  "name": "make-fetch-happen",
-  "version": "15.0.1",
-  "description": "Opinionated, caching, retrying fetch client",
-  "main": "lib/index.js",
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "scripts": {
-    "test": "tap",
-    "posttest": "npm run lint",
-    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
-    "lint": "npm run eslint",
-    "lintfix": "npm run eslint -- --fix",
-    "postlint": "template-oss-check",
-    "snap": "tap",
-    "template-oss-apply": "template-oss-apply --force"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/npm/make-fetch-happen.git"
-  },
-  "keywords": [
-    "http",
-    "request",
-    "fetch",
-    "mean girls",
-    "caching",
-    "cache",
-    "subresource integrity"
-  ],
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "dependencies": {
-    "@npmcli/agent": "^3.0.0",
-    "cacache": "^20.0.1",
-    "http-cache-semantics": "^4.1.1",
-    "minipass": "^7.0.2",
-    "minipass-fetch": "^4.0.0",
-    "minipass-flush": "^1.0.5",
-    "minipass-pipeline": "^1.2.4",
-    "negotiator": "^1.0.0",
-    "proc-log": "^5.0.0",
-    "promise-retry": "^2.0.1",
-    "ssri": "^12.0.0"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.25.0",
-    "nock": "^13.2.4",
-    "safe-buffer": "^5.2.1",
-    "standard-version": "^9.3.2",
-    "tap": "^16.0.0"
-  },
-  "engines": {
-    "node": "^20.17.0 || >=22.9.0"
-  },
-  "tap": {
-    "color": 1,
-    "files": "test/*.js",
-    "check-coverage": true,
-    "timeout": 60,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.25.0",
-    "publish": "true"
-  }
-}
diff --git a/node_modules/npm-registry-fetch/node_modules/negotiator/HISTORY.md b/node_modules/npm-registry-fetch/node_modules/negotiator/HISTORY.md
deleted file mode 100644
index 63d537d3f6811..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/negotiator/HISTORY.md
+++ /dev/null
@@ -1,114 +0,0 @@
-1.0.0 / 2024-08-31
-==================
-
-  * Drop support for node <18
-  * Added an option preferred encodings array #59
-
-0.6.3 / 2022-01-22
-==================
-
-  * Revert "Lazy-load modules from main entry point"
-
-0.6.2 / 2019-04-29
-==================
-
-  * Fix sorting charset, encoding, and language with extra parameters
-
-0.6.1 / 2016-05-02
-==================
-
-  * perf: improve `Accept` parsing speed
-  * perf: improve `Accept-Charset` parsing speed
-  * perf: improve `Accept-Encoding` parsing speed
-  * perf: improve `Accept-Language` parsing speed
-
-0.6.0 / 2015-09-29
-==================
-
-  * Fix including type extensions in parameters in `Accept` parsing
-  * Fix parsing `Accept` parameters with quoted equals
-  * Fix parsing `Accept` parameters with quoted semicolons
-  * Lazy-load modules from main entry point
-  * perf: delay type concatenation until needed
-  * perf: enable strict mode
-  * perf: hoist regular expressions
-  * perf: remove closures getting spec properties
-  * perf: remove a closure from media type parsing
-  * perf: remove property delete from media type parsing
-
-0.5.3 / 2015-05-10
-==================
-
-  * Fix media type parameter matching to be case-insensitive
-
-0.5.2 / 2015-05-06
-==================
-
-  * Fix comparing media types with quoted values
-  * Fix splitting media types with quoted commas
-
-0.5.1 / 2015-02-14
-==================
-
-  * Fix preference sorting to be stable for long acceptable lists
-
-0.5.0 / 2014-12-18
-==================
-
-  * Fix list return order when large accepted list
-  * Fix missing identity encoding when q=0 exists
-  * Remove dynamic building of Negotiator class
-
-0.4.9 / 2014-10-14
-==================
-
-  * Fix error when media type has invalid parameter
-
-0.4.8 / 2014-09-28
-==================
-
-  * Fix all negotiations to be case-insensitive
-  * Stable sort preferences of same quality according to client order
-  * Support Node.js 0.6
-
-0.4.7 / 2014-06-24
-==================
-
-  * Handle invalid provided languages
-  * Handle invalid provided media types
-
-0.4.6 / 2014-06-11
-==================
-
-  *  Order by specificity when quality is the same
-
-0.4.5 / 2014-05-29
-==================
-
-  * Fix regression in empty header handling
-
-0.4.4 / 2014-05-29
-==================
-
-  * Fix behaviors when headers are not present
-
-0.4.3 / 2014-04-16
-==================
-
-  * Handle slashes on media params correctly
-
-0.4.2 / 2014-02-28
-==================
-
-  * Fix media type sorting
-  * Handle media types params strictly
-
-0.4.1 / 2014-01-16
-==================
-
-  * Use most specific matches
-
-0.4.0 / 2014-01-09
-==================
-
-  * Remove preferred prefix from methods
diff --git a/node_modules/npm-registry-fetch/node_modules/negotiator/LICENSE b/node_modules/npm-registry-fetch/node_modules/negotiator/LICENSE
deleted file mode 100644
index ea6b9e2e9ac25..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/negotiator/LICENSE
+++ /dev/null
@@ -1,24 +0,0 @@
-(The MIT License)
-
-Copyright (c) 2012-2014 Federico Romero
-Copyright (c) 2012-2014 Isaac Z. Schlueter
-Copyright (c) 2014-2015 Douglas Christopher Wilson
-
-Permission is hereby granted, free of charge, to any person obtaining
-a copy of this software and associated documentation files (the
-'Software'), to deal in the Software without restriction, including
-without limitation the rights to use, copy, modify, merge, publish,
-distribute, sublicense, and/or sell copies of the Software, and to
-permit persons to whom the Software is furnished to do so, subject to
-the following conditions:
-
-The above copyright notice and this permission notice shall be
-included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/npm-registry-fetch/node_modules/negotiator/index.js b/node_modules/npm-registry-fetch/node_modules/negotiator/index.js
deleted file mode 100644
index 4f51315d6af4b..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/negotiator/index.js
+++ /dev/null
@@ -1,83 +0,0 @@
-/*!
- * negotiator
- * Copyright(c) 2012 Federico Romero
- * Copyright(c) 2012-2014 Isaac Z. Schlueter
- * Copyright(c) 2015 Douglas Christopher Wilson
- * MIT Licensed
- */
-
-'use strict';
-
-var preferredCharsets = require('./lib/charset')
-var preferredEncodings = require('./lib/encoding')
-var preferredLanguages = require('./lib/language')
-var preferredMediaTypes = require('./lib/mediaType')
-
-/**
- * Module exports.
- * @public
- */
-
-module.exports = Negotiator;
-module.exports.Negotiator = Negotiator;
-
-/**
- * Create a Negotiator instance from a request.
- * @param {object} request
- * @public
- */
-
-function Negotiator(request) {
-  if (!(this instanceof Negotiator)) {
-    return new Negotiator(request);
-  }
-
-  this.request = request;
-}
-
-Negotiator.prototype.charset = function charset(available) {
-  var set = this.charsets(available);
-  return set && set[0];
-};
-
-Negotiator.prototype.charsets = function charsets(available) {
-  return preferredCharsets(this.request.headers['accept-charset'], available);
-};
-
-Negotiator.prototype.encoding = function encoding(available, opts) {
-  var set = this.encodings(available, opts);
-  return set && set[0];
-};
-
-Negotiator.prototype.encodings = function encodings(available, options) {
-  var opts = options || {};
-  return preferredEncodings(this.request.headers['accept-encoding'], available, opts.preferred);
-};
-
-Negotiator.prototype.language = function language(available) {
-  var set = this.languages(available);
-  return set && set[0];
-};
-
-Negotiator.prototype.languages = function languages(available) {
-  return preferredLanguages(this.request.headers['accept-language'], available);
-};
-
-Negotiator.prototype.mediaType = function mediaType(available) {
-  var set = this.mediaTypes(available);
-  return set && set[0];
-};
-
-Negotiator.prototype.mediaTypes = function mediaTypes(available) {
-  return preferredMediaTypes(this.request.headers.accept, available);
-};
-
-// Backwards compatibility
-Negotiator.prototype.preferredCharset = Negotiator.prototype.charset;
-Negotiator.prototype.preferredCharsets = Negotiator.prototype.charsets;
-Negotiator.prototype.preferredEncoding = Negotiator.prototype.encoding;
-Negotiator.prototype.preferredEncodings = Negotiator.prototype.encodings;
-Negotiator.prototype.preferredLanguage = Negotiator.prototype.language;
-Negotiator.prototype.preferredLanguages = Negotiator.prototype.languages;
-Negotiator.prototype.preferredMediaType = Negotiator.prototype.mediaType;
-Negotiator.prototype.preferredMediaTypes = Negotiator.prototype.mediaTypes;
diff --git a/node_modules/npm-registry-fetch/node_modules/negotiator/lib/charset.js b/node_modules/npm-registry-fetch/node_modules/negotiator/lib/charset.js
deleted file mode 100644
index cdd014803474a..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/negotiator/lib/charset.js
+++ /dev/null
@@ -1,169 +0,0 @@
-/**
- * negotiator
- * Copyright(c) 2012 Isaac Z. Schlueter
- * Copyright(c) 2014 Federico Romero
- * Copyright(c) 2014-2015 Douglas Christopher Wilson
- * MIT Licensed
- */
-
-'use strict';
-
-/**
- * Module exports.
- * @public
- */
-
-module.exports = preferredCharsets;
-module.exports.preferredCharsets = preferredCharsets;
-
-/**
- * Module variables.
- * @private
- */
-
-var simpleCharsetRegExp = /^\s*([^\s;]+)\s*(?:;(.*))?$/;
-
-/**
- * Parse the Accept-Charset header.
- * @private
- */
-
-function parseAcceptCharset(accept) {
-  var accepts = accept.split(',');
-
-  for (var i = 0, j = 0; i < accepts.length; i++) {
-    var charset = parseCharset(accepts[i].trim(), i);
-
-    if (charset) {
-      accepts[j++] = charset;
-    }
-  }
-
-  // trim accepts
-  accepts.length = j;
-
-  return accepts;
-}
-
-/**
- * Parse a charset from the Accept-Charset header.
- * @private
- */
-
-function parseCharset(str, i) {
-  var match = simpleCharsetRegExp.exec(str);
-  if (!match) return null;
-
-  var charset = match[1];
-  var q = 1;
-  if (match[2]) {
-    var params = match[2].split(';')
-    for (var j = 0; j < params.length; j++) {
-      var p = params[j].trim().split('=');
-      if (p[0] === 'q') {
-        q = parseFloat(p[1]);
-        break;
-      }
-    }
-  }
-
-  return {
-    charset: charset,
-    q: q,
-    i: i
-  };
-}
-
-/**
- * Get the priority of a charset.
- * @private
- */
-
-function getCharsetPriority(charset, accepted, index) {
-  var priority = {o: -1, q: 0, s: 0};
-
-  for (var i = 0; i < accepted.length; i++) {
-    var spec = specify(charset, accepted[i], index);
-
-    if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
-      priority = spec;
-    }
-  }
-
-  return priority;
-}
-
-/**
- * Get the specificity of the charset.
- * @private
- */
-
-function specify(charset, spec, index) {
-  var s = 0;
-  if(spec.charset.toLowerCase() === charset.toLowerCase()){
-    s |= 1;
-  } else if (spec.charset !== '*' ) {
-    return null
-  }
-
-  return {
-    i: index,
-    o: spec.i,
-    q: spec.q,
-    s: s
-  }
-}
-
-/**
- * Get the preferred charsets from an Accept-Charset header.
- * @public
- */
-
-function preferredCharsets(accept, provided) {
-  // RFC 2616 sec 14.2: no header = *
-  var accepts = parseAcceptCharset(accept === undefined ? '*' : accept || '');
-
-  if (!provided) {
-    // sorted list of all charsets
-    return accepts
-      .filter(isQuality)
-      .sort(compareSpecs)
-      .map(getFullCharset);
-  }
-
-  var priorities = provided.map(function getPriority(type, index) {
-    return getCharsetPriority(type, accepts, index);
-  });
-
-  // sorted list of accepted charsets
-  return priorities.filter(isQuality).sort(compareSpecs).map(function getCharset(priority) {
-    return provided[priorities.indexOf(priority)];
-  });
-}
-
-/**
- * Compare two specs.
- * @private
- */
-
-function compareSpecs(a, b) {
-  return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0;
-}
-
-/**
- * Get full charset string.
- * @private
- */
-
-function getFullCharset(spec) {
-  return spec.charset;
-}
-
-/**
- * Check if a spec has any quality.
- * @private
- */
-
-function isQuality(spec) {
-  return spec.q > 0;
-}
diff --git a/node_modules/npm-registry-fetch/node_modules/negotiator/lib/encoding.js b/node_modules/npm-registry-fetch/node_modules/negotiator/lib/encoding.js
deleted file mode 100644
index 9ebb633d67743..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/negotiator/lib/encoding.js
+++ /dev/null
@@ -1,205 +0,0 @@
-/**
- * negotiator
- * Copyright(c) 2012 Isaac Z. Schlueter
- * Copyright(c) 2014 Federico Romero
- * Copyright(c) 2014-2015 Douglas Christopher Wilson
- * MIT Licensed
- */
-
-'use strict';
-
-/**
- * Module exports.
- * @public
- */
-
-module.exports = preferredEncodings;
-module.exports.preferredEncodings = preferredEncodings;
-
-/**
- * Module variables.
- * @private
- */
-
-var simpleEncodingRegExp = /^\s*([^\s;]+)\s*(?:;(.*))?$/;
-
-/**
- * Parse the Accept-Encoding header.
- * @private
- */
-
-function parseAcceptEncoding(accept) {
-  var accepts = accept.split(',');
-  var hasIdentity = false;
-  var minQuality = 1;
-
-  for (var i = 0, j = 0; i < accepts.length; i++) {
-    var encoding = parseEncoding(accepts[i].trim(), i);
-
-    if (encoding) {
-      accepts[j++] = encoding;
-      hasIdentity = hasIdentity || specify('identity', encoding);
-      minQuality = Math.min(minQuality, encoding.q || 1);
-    }
-  }
-
-  if (!hasIdentity) {
-    /*
-     * If identity doesn't explicitly appear in the accept-encoding header,
-     * it's added to the list of acceptable encoding with the lowest q
-     */
-    accepts[j++] = {
-      encoding: 'identity',
-      q: minQuality,
-      i: i
-    };
-  }
-
-  // trim accepts
-  accepts.length = j;
-
-  return accepts;
-}
-
-/**
- * Parse an encoding from the Accept-Encoding header.
- * @private
- */
-
-function parseEncoding(str, i) {
-  var match = simpleEncodingRegExp.exec(str);
-  if (!match) return null;
-
-  var encoding = match[1];
-  var q = 1;
-  if (match[2]) {
-    var params = match[2].split(';');
-    for (var j = 0; j < params.length; j++) {
-      var p = params[j].trim().split('=');
-      if (p[0] === 'q') {
-        q = parseFloat(p[1]);
-        break;
-      }
-    }
-  }
-
-  return {
-    encoding: encoding,
-    q: q,
-    i: i
-  };
-}
-
-/**
- * Get the priority of an encoding.
- * @private
- */
-
-function getEncodingPriority(encoding, accepted, index) {
-  var priority = {encoding: encoding, o: -1, q: 0, s: 0};
-
-  for (var i = 0; i < accepted.length; i++) {
-    var spec = specify(encoding, accepted[i], index);
-
-    if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
-      priority = spec;
-    }
-  }
-
-  return priority;
-}
-
-/**
- * Get the specificity of the encoding.
- * @private
- */
-
-function specify(encoding, spec, index) {
-  var s = 0;
-  if(spec.encoding.toLowerCase() === encoding.toLowerCase()){
-    s |= 1;
-  } else if (spec.encoding !== '*' ) {
-    return null
-  }
-
-  return {
-    encoding: encoding,
-    i: index,
-    o: spec.i,
-    q: spec.q,
-    s: s
-  }
-};
-
-/**
- * Get the preferred encodings from an Accept-Encoding header.
- * @public
- */
-
-function preferredEncodings(accept, provided, preferred) {
-  var accepts = parseAcceptEncoding(accept || '');
-
-  var comparator = preferred ? function comparator (a, b) {
-    if (a.q !== b.q) {
-      return b.q - a.q // higher quality first
-    }
-
-    var aPreferred = preferred.indexOf(a.encoding)
-    var bPreferred = preferred.indexOf(b.encoding)
-
-    if (aPreferred === -1 && bPreferred === -1) {
-      // consider the original specifity/order
-      return (b.s - a.s) || (a.o - b.o) || (a.i - b.i)
-    }
-
-    if (aPreferred !== -1 && bPreferred !== -1) {
-      return aPreferred - bPreferred // consider the preferred order
-    }
-
-    return aPreferred === -1 ? 1 : -1 // preferred first
-  } : compareSpecs;
-
-  if (!provided) {
-    // sorted list of all encodings
-    return accepts
-      .filter(isQuality)
-      .sort(comparator)
-      .map(getFullEncoding);
-  }
-
-  var priorities = provided.map(function getPriority(type, index) {
-    return getEncodingPriority(type, accepts, index);
-  });
-
-  // sorted list of accepted encodings
-  return priorities.filter(isQuality).sort(comparator).map(function getEncoding(priority) {
-    return provided[priorities.indexOf(priority)];
-  });
-}
-
-/**
- * Compare two specs.
- * @private
- */
-
-function compareSpecs(a, b) {
-  return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i);
-}
-
-/**
- * Get full encoding string.
- * @private
- */
-
-function getFullEncoding(spec) {
-  return spec.encoding;
-}
-
-/**
- * Check if a spec has any quality.
- * @private
- */
-
-function isQuality(spec) {
-  return spec.q > 0;
-}
diff --git a/node_modules/npm-registry-fetch/node_modules/negotiator/lib/language.js b/node_modules/npm-registry-fetch/node_modules/negotiator/lib/language.js
deleted file mode 100644
index a23167252719b..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/negotiator/lib/language.js
+++ /dev/null
@@ -1,179 +0,0 @@
-/**
- * negotiator
- * Copyright(c) 2012 Isaac Z. Schlueter
- * Copyright(c) 2014 Federico Romero
- * Copyright(c) 2014-2015 Douglas Christopher Wilson
- * MIT Licensed
- */
-
-'use strict';
-
-/**
- * Module exports.
- * @public
- */
-
-module.exports = preferredLanguages;
-module.exports.preferredLanguages = preferredLanguages;
-
-/**
- * Module variables.
- * @private
- */
-
-var simpleLanguageRegExp = /^\s*([^\s\-;]+)(?:-([^\s;]+))?\s*(?:;(.*))?$/;
-
-/**
- * Parse the Accept-Language header.
- * @private
- */
-
-function parseAcceptLanguage(accept) {
-  var accepts = accept.split(',');
-
-  for (var i = 0, j = 0; i < accepts.length; i++) {
-    var language = parseLanguage(accepts[i].trim(), i);
-
-    if (language) {
-      accepts[j++] = language;
-    }
-  }
-
-  // trim accepts
-  accepts.length = j;
-
-  return accepts;
-}
-
-/**
- * Parse a language from the Accept-Language header.
- * @private
- */
-
-function parseLanguage(str, i) {
-  var match = simpleLanguageRegExp.exec(str);
-  if (!match) return null;
-
-  var prefix = match[1]
-  var suffix = match[2]
-  var full = prefix
-
-  if (suffix) full += "-" + suffix;
-
-  var q = 1;
-  if (match[3]) {
-    var params = match[3].split(';')
-    for (var j = 0; j < params.length; j++) {
-      var p = params[j].split('=');
-      if (p[0] === 'q') q = parseFloat(p[1]);
-    }
-  }
-
-  return {
-    prefix: prefix,
-    suffix: suffix,
-    q: q,
-    i: i,
-    full: full
-  };
-}
-
-/**
- * Get the priority of a language.
- * @private
- */
-
-function getLanguagePriority(language, accepted, index) {
-  var priority = {o: -1, q: 0, s: 0};
-
-  for (var i = 0; i < accepted.length; i++) {
-    var spec = specify(language, accepted[i], index);
-
-    if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
-      priority = spec;
-    }
-  }
-
-  return priority;
-}
-
-/**
- * Get the specificity of the language.
- * @private
- */
-
-function specify(language, spec, index) {
-  var p = parseLanguage(language)
-  if (!p) return null;
-  var s = 0;
-  if(spec.full.toLowerCase() === p.full.toLowerCase()){
-    s |= 4;
-  } else if (spec.prefix.toLowerCase() === p.full.toLowerCase()) {
-    s |= 2;
-  } else if (spec.full.toLowerCase() === p.prefix.toLowerCase()) {
-    s |= 1;
-  } else if (spec.full !== '*' ) {
-    return null
-  }
-
-  return {
-    i: index,
-    o: spec.i,
-    q: spec.q,
-    s: s
-  }
-};
-
-/**
- * Get the preferred languages from an Accept-Language header.
- * @public
- */
-
-function preferredLanguages(accept, provided) {
-  // RFC 2616 sec 14.4: no header = *
-  var accepts = parseAcceptLanguage(accept === undefined ? '*' : accept || '');
-
-  if (!provided) {
-    // sorted list of all languages
-    return accepts
-      .filter(isQuality)
-      .sort(compareSpecs)
-      .map(getFullLanguage);
-  }
-
-  var priorities = provided.map(function getPriority(type, index) {
-    return getLanguagePriority(type, accepts, index);
-  });
-
-  // sorted list of accepted languages
-  return priorities.filter(isQuality).sort(compareSpecs).map(function getLanguage(priority) {
-    return provided[priorities.indexOf(priority)];
-  });
-}
-
-/**
- * Compare two specs.
- * @private
- */
-
-function compareSpecs(a, b) {
-  return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0;
-}
-
-/**
- * Get full language string.
- * @private
- */
-
-function getFullLanguage(spec) {
-  return spec.full;
-}
-
-/**
- * Check if a spec has any quality.
- * @private
- */
-
-function isQuality(spec) {
-  return spec.q > 0;
-}
diff --git a/node_modules/npm-registry-fetch/node_modules/negotiator/lib/mediaType.js b/node_modules/npm-registry-fetch/node_modules/negotiator/lib/mediaType.js
deleted file mode 100644
index 8e402ea88394c..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/negotiator/lib/mediaType.js
+++ /dev/null
@@ -1,294 +0,0 @@
-/**
- * negotiator
- * Copyright(c) 2012 Isaac Z. Schlueter
- * Copyright(c) 2014 Federico Romero
- * Copyright(c) 2014-2015 Douglas Christopher Wilson
- * MIT Licensed
- */
-
-'use strict';
-
-/**
- * Module exports.
- * @public
- */
-
-module.exports = preferredMediaTypes;
-module.exports.preferredMediaTypes = preferredMediaTypes;
-
-/**
- * Module variables.
- * @private
- */
-
-var simpleMediaTypeRegExp = /^\s*([^\s\/;]+)\/([^;\s]+)\s*(?:;(.*))?$/;
-
-/**
- * Parse the Accept header.
- * @private
- */
-
-function parseAccept(accept) {
-  var accepts = splitMediaTypes(accept);
-
-  for (var i = 0, j = 0; i < accepts.length; i++) {
-    var mediaType = parseMediaType(accepts[i].trim(), i);
-
-    if (mediaType) {
-      accepts[j++] = mediaType;
-    }
-  }
-
-  // trim accepts
-  accepts.length = j;
-
-  return accepts;
-}
-
-/**
- * Parse a media type from the Accept header.
- * @private
- */
-
-function parseMediaType(str, i) {
-  var match = simpleMediaTypeRegExp.exec(str);
-  if (!match) return null;
-
-  var params = Object.create(null);
-  var q = 1;
-  var subtype = match[2];
-  var type = match[1];
-
-  if (match[3]) {
-    var kvps = splitParameters(match[3]).map(splitKeyValuePair);
-
-    for (var j = 0; j < kvps.length; j++) {
-      var pair = kvps[j];
-      var key = pair[0].toLowerCase();
-      var val = pair[1];
-
-      // get the value, unwrapping quotes
-      var value = val && val[0] === '"' && val[val.length - 1] === '"'
-        ? val.slice(1, -1)
-        : val;
-
-      if (key === 'q') {
-        q = parseFloat(value);
-        break;
-      }
-
-      // store parameter
-      params[key] = value;
-    }
-  }
-
-  return {
-    type: type,
-    subtype: subtype,
-    params: params,
-    q: q,
-    i: i
-  };
-}
-
-/**
- * Get the priority of a media type.
- * @private
- */
-
-function getMediaTypePriority(type, accepted, index) {
-  var priority = {o: -1, q: 0, s: 0};
-
-  for (var i = 0; i < accepted.length; i++) {
-    var spec = specify(type, accepted[i], index);
-
-    if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
-      priority = spec;
-    }
-  }
-
-  return priority;
-}
-
-/**
- * Get the specificity of the media type.
- * @private
- */
-
-function specify(type, spec, index) {
-  var p = parseMediaType(type);
-  var s = 0;
-
-  if (!p) {
-    return null;
-  }
-
-  if(spec.type.toLowerCase() == p.type.toLowerCase()) {
-    s |= 4
-  } else if(spec.type != '*') {
-    return null;
-  }
-
-  if(spec.subtype.toLowerCase() == p.subtype.toLowerCase()) {
-    s |= 2
-  } else if(spec.subtype != '*') {
-    return null;
-  }
-
-  var keys = Object.keys(spec.params);
-  if (keys.length > 0) {
-    if (keys.every(function (k) {
-      return spec.params[k] == '*' || (spec.params[k] || '').toLowerCase() == (p.params[k] || '').toLowerCase();
-    })) {
-      s |= 1
-    } else {
-      return null
-    }
-  }
-
-  return {
-    i: index,
-    o: spec.i,
-    q: spec.q,
-    s: s,
-  }
-}
-
-/**
- * Get the preferred media types from an Accept header.
- * @public
- */
-
-function preferredMediaTypes(accept, provided) {
-  // RFC 2616 sec 14.2: no header = */*
-  var accepts = parseAccept(accept === undefined ? '*/*' : accept || '');
-
-  if (!provided) {
-    // sorted list of all types
-    return accepts
-      .filter(isQuality)
-      .sort(compareSpecs)
-      .map(getFullType);
-  }
-
-  var priorities = provided.map(function getPriority(type, index) {
-    return getMediaTypePriority(type, accepts, index);
-  });
-
-  // sorted list of accepted types
-  return priorities.filter(isQuality).sort(compareSpecs).map(function getType(priority) {
-    return provided[priorities.indexOf(priority)];
-  });
-}
-
-/**
- * Compare two specs.
- * @private
- */
-
-function compareSpecs(a, b) {
-  return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0;
-}
-
-/**
- * Get full type string.
- * @private
- */
-
-function getFullType(spec) {
-  return spec.type + '/' + spec.subtype;
-}
-
-/**
- * Check if a spec has any quality.
- * @private
- */
-
-function isQuality(spec) {
-  return spec.q > 0;
-}
-
-/**
- * Count the number of quotes in a string.
- * @private
- */
-
-function quoteCount(string) {
-  var count = 0;
-  var index = 0;
-
-  while ((index = string.indexOf('"', index)) !== -1) {
-    count++;
-    index++;
-  }
-
-  return count;
-}
-
-/**
- * Split a key value pair.
- * @private
- */
-
-function splitKeyValuePair(str) {
-  var index = str.indexOf('=');
-  var key;
-  var val;
-
-  if (index === -1) {
-    key = str;
-  } else {
-    key = str.slice(0, index);
-    val = str.slice(index + 1);
-  }
-
-  return [key, val];
-}
-
-/**
- * Split an Accept header into media types.
- * @private
- */
-
-function splitMediaTypes(accept) {
-  var accepts = accept.split(',');
-
-  for (var i = 1, j = 0; i < accepts.length; i++) {
-    if (quoteCount(accepts[j]) % 2 == 0) {
-      accepts[++j] = accepts[i];
-    } else {
-      accepts[j] += ',' + accepts[i];
-    }
-  }
-
-  // trim accepts
-  accepts.length = j + 1;
-
-  return accepts;
-}
-
-/**
- * Split a string of parameters.
- * @private
- */
-
-function splitParameters(str) {
-  var parameters = str.split(';');
-
-  for (var i = 1, j = 0; i < parameters.length; i++) {
-    if (quoteCount(parameters[j]) % 2 == 0) {
-      parameters[++j] = parameters[i];
-    } else {
-      parameters[j] += ';' + parameters[i];
-    }
-  }
-
-  // trim parameters
-  parameters.length = j + 1;
-
-  for (var i = 0; i < parameters.length; i++) {
-    parameters[i] = parameters[i].trim();
-  }
-
-  return parameters;
-}
diff --git a/node_modules/npm-registry-fetch/node_modules/negotiator/package.json b/node_modules/npm-registry-fetch/node_modules/negotiator/package.json
deleted file mode 100644
index e4bdc1ef4f748..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/negotiator/package.json
+++ /dev/null
@@ -1,43 +0,0 @@
-{
-  "name": "negotiator",
-  "description": "HTTP content negotiation",
-  "version": "1.0.0",
-  "contributors": [
-    "Douglas Christopher Wilson ",
-    "Federico Romero ",
-    "Isaac Z. Schlueter  (http://blog.izs.me/)"
-  ],
-  "license": "MIT",
-  "keywords": [
-    "http",
-    "content negotiation",
-    "accept",
-    "accept-language",
-    "accept-encoding",
-    "accept-charset"
-  ],
-  "repository": "jshttp/negotiator",
-  "devDependencies": {
-    "eslint": "7.32.0",
-    "eslint-plugin-markdown": "2.2.1",
-    "mocha": "9.1.3",
-    "nyc": "15.1.0"
-  },
-  "files": [
-    "lib/",
-    "HISTORY.md",
-    "LICENSE",
-    "index.js",
-    "README.md"
-  ],
-  "engines": {
-    "node": ">= 0.6"
-  },
-  "scripts": {
-    "lint": "eslint .",
-    "test": "mocha --reporter spec --check-leaks --bail test/",
-    "test:debug": "mocha --reporter spec --check-leaks --inspect --inspect-brk test/",
-    "test-ci": "nyc --reporter=lcov --reporter=text npm test",
-    "test-cov": "nyc --reporter=html --reporter=text npm test"
-  }
-}
diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/LICENSE b/node_modules/tuf-js/node_modules/make-fetch-happen/LICENSE
deleted file mode 100644
index 1808eb2844231..0000000000000
--- a/node_modules/tuf-js/node_modules/make-fetch-happen/LICENSE
+++ /dev/null
@@ -1,16 +0,0 @@
-ISC License
-
-Copyright 2017-2022 (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for
-any purpose with or without fee is hereby granted, provided that the
-above copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
-ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
-COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/entry.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/entry.js
deleted file mode 100644
index bfcfacbcc95e1..0000000000000
--- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/entry.js
+++ /dev/null
@@ -1,471 +0,0 @@
-const { Request, Response } = require('minipass-fetch')
-const { Minipass } = require('minipass')
-const MinipassFlush = require('minipass-flush')
-const cacache = require('cacache')
-const url = require('url')
-
-const CachingMinipassPipeline = require('../pipeline.js')
-const CachePolicy = require('./policy.js')
-const cacheKey = require('./key.js')
-const remote = require('../remote.js')
-
-const hasOwnProperty = (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop)
-
-// allow list for request headers that will be written to the cache index
-// note: we will also store any request headers
-// that are named in a response's vary header
-const KEEP_REQUEST_HEADERS = [
-  'accept-charset',
-  'accept-encoding',
-  'accept-language',
-  'accept',
-  'cache-control',
-]
-
-// allow list for response headers that will be written to the cache index
-// note: we must not store the real response's age header, or when we load
-// a cache policy based on the metadata it will think the cached response
-// is always stale
-const KEEP_RESPONSE_HEADERS = [
-  'cache-control',
-  'content-encoding',
-  'content-language',
-  'content-type',
-  'date',
-  'etag',
-  'expires',
-  'last-modified',
-  'link',
-  'location',
-  'pragma',
-  'vary',
-]
-
-// return an object containing all metadata to be written to the index
-const getMetadata = (request, response, options) => {
-  const metadata = {
-    time: Date.now(),
-    url: request.url,
-    reqHeaders: {},
-    resHeaders: {},
-
-    // options on which we must match the request and vary the response
-    options: {
-      compress: options.compress != null ? options.compress : request.compress,
-    },
-  }
-
-  // only save the status if it's not a 200 or 304
-  if (response.status !== 200 && response.status !== 304) {
-    metadata.status = response.status
-  }
-
-  for (const name of KEEP_REQUEST_HEADERS) {
-    if (request.headers.has(name)) {
-      metadata.reqHeaders[name] = request.headers.get(name)
-    }
-  }
-
-  // if the request's host header differs from the host in the url
-  // we need to keep it, otherwise it's just noise and we ignore it
-  const host = request.headers.get('host')
-  const parsedUrl = new url.URL(request.url)
-  if (host && parsedUrl.host !== host) {
-    metadata.reqHeaders.host = host
-  }
-
-  // if the response has a vary header, make sure
-  // we store the relevant request headers too
-  if (response.headers.has('vary')) {
-    const vary = response.headers.get('vary')
-    // a vary of "*" means every header causes a different response.
-    // in that scenario, we do not include any additional headers
-    // as the freshness check will always fail anyway and we don't
-    // want to bloat the cache indexes
-    if (vary !== '*') {
-      // copy any other request headers that will vary the response
-      const varyHeaders = vary.trim().toLowerCase().split(/\s*,\s*/)
-      for (const name of varyHeaders) {
-        if (request.headers.has(name)) {
-          metadata.reqHeaders[name] = request.headers.get(name)
-        }
-      }
-    }
-  }
-
-  for (const name of KEEP_RESPONSE_HEADERS) {
-    if (response.headers.has(name)) {
-      metadata.resHeaders[name] = response.headers.get(name)
-    }
-  }
-
-  for (const name of options.cacheAdditionalHeaders) {
-    if (response.headers.has(name)) {
-      metadata.resHeaders[name] = response.headers.get(name)
-    }
-  }
-
-  return metadata
-}
-
-// symbols used to hide objects that may be lazily evaluated in a getter
-const _request = Symbol('request')
-const _response = Symbol('response')
-const _policy = Symbol('policy')
-
-class CacheEntry {
-  constructor ({ entry, request, response, options }) {
-    if (entry) {
-      this.key = entry.key
-      this.entry = entry
-      // previous versions of this module didn't write an explicit timestamp in
-      // the metadata, so fall back to the entry's timestamp. we can't use the
-      // entry timestamp to determine staleness because cacache will update it
-      // when it verifies its data
-      this.entry.metadata.time = this.entry.metadata.time || this.entry.time
-    } else {
-      this.key = cacheKey(request)
-    }
-
-    this.options = options
-
-    // these properties are behind getters that lazily evaluate
-    this[_request] = request
-    this[_response] = response
-    this[_policy] = null
-  }
-
-  // returns a CacheEntry instance that satisfies the given request
-  // or undefined if no existing entry satisfies
-  static async find (request, options) {
-    try {
-      // compacts the index and returns an array of unique entries
-      var matches = await cacache.index.compact(options.cachePath, cacheKey(request), (A, B) => {
-        const entryA = new CacheEntry({ entry: A, options })
-        const entryB = new CacheEntry({ entry: B, options })
-        return entryA.policy.satisfies(entryB.request)
-      }, {
-        validateEntry: (entry) => {
-          // clean out entries with a buggy content-encoding value
-          if (entry.metadata &&
-              entry.metadata.resHeaders &&
-              entry.metadata.resHeaders['content-encoding'] === null) {
-            return false
-          }
-
-          // if an integrity is null, it needs to have a status specified
-          if (entry.integrity === null) {
-            return !!(entry.metadata && entry.metadata.status)
-          }
-
-          return true
-        },
-      })
-    } catch (err) {
-      // if the compact request fails, ignore the error and return
-      return
-    }
-
-    // a cache mode of 'reload' means to behave as though we have no cache
-    // on the way to the network. return undefined to allow cacheFetch to
-    // create a brand new request no matter what.
-    if (options.cache === 'reload') {
-      return
-    }
-
-    // find the specific entry that satisfies the request
-    let match
-    for (const entry of matches) {
-      const _entry = new CacheEntry({
-        entry,
-        options,
-      })
-
-      if (_entry.policy.satisfies(request)) {
-        match = _entry
-        break
-      }
-    }
-
-    return match
-  }
-
-  // if the user made a PUT/POST/PATCH then we invalidate our
-  // cache for the same url by deleting the index entirely
-  static async invalidate (request, options) {
-    const key = cacheKey(request)
-    try {
-      await cacache.rm.entry(options.cachePath, key, { removeFully: true })
-    } catch (err) {
-      // ignore errors
-    }
-  }
-
-  get request () {
-    if (!this[_request]) {
-      this[_request] = new Request(this.entry.metadata.url, {
-        method: 'GET',
-        headers: this.entry.metadata.reqHeaders,
-        ...this.entry.metadata.options,
-      })
-    }
-
-    return this[_request]
-  }
-
-  get response () {
-    if (!this[_response]) {
-      this[_response] = new Response(null, {
-        url: this.entry.metadata.url,
-        counter: this.options.counter,
-        status: this.entry.metadata.status || 200,
-        headers: {
-          ...this.entry.metadata.resHeaders,
-          'content-length': this.entry.size,
-        },
-      })
-    }
-
-    return this[_response]
-  }
-
-  get policy () {
-    if (!this[_policy]) {
-      this[_policy] = new CachePolicy({
-        entry: this.entry,
-        request: this.request,
-        response: this.response,
-        options: this.options,
-      })
-    }
-
-    return this[_policy]
-  }
-
-  // wraps the response in a pipeline that stores the data
-  // in the cache while the user consumes it
-  async store (status) {
-    // if we got a status other than 200, 301, or 308,
-    // or the CachePolicy forbid storage, append the
-    // cache status header and return it untouched
-    if (
-      this.request.method !== 'GET' ||
-      ![200, 301, 308].includes(this.response.status) ||
-      !this.policy.storable()
-    ) {
-      this.response.headers.set('x-local-cache-status', 'skip')
-      return this.response
-    }
-
-    const size = this.response.headers.get('content-length')
-    const cacheOpts = {
-      algorithms: this.options.algorithms,
-      metadata: getMetadata(this.request, this.response, this.options),
-      size,
-      integrity: this.options.integrity,
-      integrityEmitter: this.response.body.hasIntegrityEmitter && this.response.body,
-    }
-
-    let body = null
-    // we only set a body if the status is a 200, redirects are
-    // stored as metadata only
-    if (this.response.status === 200) {
-      let cacheWriteResolve, cacheWriteReject
-      const cacheWritePromise = new Promise((resolve, reject) => {
-        cacheWriteResolve = resolve
-        cacheWriteReject = reject
-      }).catch((err) => {
-        body.emit('error', err)
-      })
-
-      body = new CachingMinipassPipeline({ events: ['integrity', 'size'] }, new MinipassFlush({
-        flush () {
-          return cacheWritePromise
-        },
-      }))
-      // this is always true since if we aren't reusing the one from the remote fetch, we
-      // are using the one from cacache
-      body.hasIntegrityEmitter = true
-
-      const onResume = () => {
-        const tee = new Minipass()
-        const cacheStream = cacache.put.stream(this.options.cachePath, this.key, cacheOpts)
-        // re-emit the integrity and size events on our new response body so they can be reused
-        cacheStream.on('integrity', i => body.emit('integrity', i))
-        cacheStream.on('size', s => body.emit('size', s))
-        // stick a flag on here so downstream users will know if they can expect integrity events
-        tee.pipe(cacheStream)
-        // TODO if the cache write fails, log a warning but return the response anyway
-        // eslint-disable-next-line promise/catch-or-return
-        cacheStream.promise().then(cacheWriteResolve, cacheWriteReject)
-        body.unshift(tee)
-        body.unshift(this.response.body)
-      }
-
-      body.once('resume', onResume)
-      body.once('end', () => body.removeListener('resume', onResume))
-    } else {
-      await cacache.index.insert(this.options.cachePath, this.key, null, cacheOpts)
-    }
-
-    // note: we do not set the x-local-cache-hash header because we do not know
-    // the hash value until after the write to the cache completes, which doesn't
-    // happen until after the response has been sent and it's too late to write
-    // the header anyway
-    this.response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath))
-    this.response.headers.set('x-local-cache-key', encodeURIComponent(this.key))
-    this.response.headers.set('x-local-cache-mode', 'stream')
-    this.response.headers.set('x-local-cache-status', status)
-    this.response.headers.set('x-local-cache-time', new Date().toISOString())
-    const newResponse = new Response(body, {
-      url: this.response.url,
-      status: this.response.status,
-      headers: this.response.headers,
-      counter: this.options.counter,
-    })
-    return newResponse
-  }
-
-  // use the cached data to create a response and return it
-  async respond (method, options, status) {
-    let response
-    if (method === 'HEAD' || [301, 308].includes(this.response.status)) {
-      // if the request is a HEAD, or the response is a redirect,
-      // then the metadata in the entry already includes everything
-      // we need to build a response
-      response = this.response
-    } else {
-      // we're responding with a full cached response, so create a body
-      // that reads from cacache and attach it to a new Response
-      const body = new Minipass()
-      const headers = { ...this.policy.responseHeaders() }
-
-      const onResume = () => {
-        const cacheStream = cacache.get.stream.byDigest(
-          this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }
-        )
-        cacheStream.on('error', async (err) => {
-          cacheStream.pause()
-          if (err.code === 'EINTEGRITY') {
-            await cacache.rm.content(
-              this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize }
-            )
-          }
-          if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') {
-            await CacheEntry.invalidate(this.request, this.options)
-          }
-          body.emit('error', err)
-          cacheStream.resume()
-        })
-        // emit the integrity and size events based on our metadata so we're consistent
-        body.emit('integrity', this.entry.integrity)
-        body.emit('size', Number(headers['content-length']))
-        cacheStream.pipe(body)
-      }
-
-      body.once('resume', onResume)
-      body.once('end', () => body.removeListener('resume', onResume))
-      response = new Response(body, {
-        url: this.entry.metadata.url,
-        counter: options.counter,
-        status: 200,
-        headers,
-      })
-    }
-
-    response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath))
-    response.headers.set('x-local-cache-hash', encodeURIComponent(this.entry.integrity))
-    response.headers.set('x-local-cache-key', encodeURIComponent(this.key))
-    response.headers.set('x-local-cache-mode', 'stream')
-    response.headers.set('x-local-cache-status', status)
-    response.headers.set('x-local-cache-time', new Date(this.entry.metadata.time).toUTCString())
-    return response
-  }
-
-  // use the provided request along with this cache entry to
-  // revalidate the stored response. returns a response, either
-  // from the cache or from the update
-  async revalidate (request, options) {
-    const revalidateRequest = new Request(request, {
-      headers: this.policy.revalidationHeaders(request),
-    })
-
-    try {
-      // NOTE: be sure to remove the headers property from the
-      // user supplied options, since we have already defined
-      // them on the new request object. if they're still in the
-      // options then those will overwrite the ones from the policy
-      var response = await remote(revalidateRequest, {
-        ...options,
-        headers: undefined,
-      })
-    } catch (err) {
-      // if the network fetch fails, return the stale
-      // cached response unless it has a cache-control
-      // of 'must-revalidate'
-      if (!this.policy.mustRevalidate) {
-        return this.respond(request.method, options, 'stale')
-      }
-
-      throw err
-    }
-
-    if (this.policy.revalidated(revalidateRequest, response)) {
-      // we got a 304, write a new index to the cache and respond from cache
-      const metadata = getMetadata(request, response, options)
-      // 304 responses do not include headers that are specific to the response data
-      // since they do not include a body, so we copy values for headers that were
-      // in the old cache entry to the new one, if the new metadata does not already
-      // include that header
-      for (const name of KEEP_RESPONSE_HEADERS) {
-        if (
-          !hasOwnProperty(metadata.resHeaders, name) &&
-          hasOwnProperty(this.entry.metadata.resHeaders, name)
-        ) {
-          metadata.resHeaders[name] = this.entry.metadata.resHeaders[name]
-        }
-      }
-
-      for (const name of options.cacheAdditionalHeaders) {
-        const inMeta = hasOwnProperty(metadata.resHeaders, name)
-        const inEntry = hasOwnProperty(this.entry.metadata.resHeaders, name)
-        const inPolicy = hasOwnProperty(this.policy.response.headers, name)
-
-        // if the header is in the existing entry, but it is not in the metadata
-        // then we need to write it to the metadata as this will refresh the on-disk cache
-        if (!inMeta && inEntry) {
-          metadata.resHeaders[name] = this.entry.metadata.resHeaders[name]
-        }
-        // if the header is in the metadata, but not in the policy, then we need to set
-        // it in the policy so that it's included in the immediate response. future
-        // responses will load a new cache entry, so we don't need to change that
-        if (!inPolicy && inMeta) {
-          this.policy.response.headers[name] = metadata.resHeaders[name]
-        }
-      }
-
-      try {
-        await cacache.index.insert(options.cachePath, this.key, this.entry.integrity, {
-          size: this.entry.size,
-          metadata,
-        })
-      } catch (err) {
-        // if updating the cache index fails, we ignore it and
-        // respond anyway
-      }
-      return this.respond(request.method, options, 'revalidated')
-    }
-
-    // if we got a modified response, create a new entry based on it
-    const newEntry = new CacheEntry({
-      request,
-      response,
-      options,
-    })
-
-    // respond with the new entry while writing it to the cache
-    return newEntry.store('updated')
-  }
-}
-
-module.exports = CacheEntry
diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/errors.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/errors.js
deleted file mode 100644
index 67a66573bebe6..0000000000000
--- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/errors.js
+++ /dev/null
@@ -1,11 +0,0 @@
-class NotCachedError extends Error {
-  constructor (url) {
-    /* eslint-disable-next-line max-len */
-    super(`request to ${url} failed: cache mode is 'only-if-cached' but no cached response is available.`)
-    this.code = 'ENOTCACHED'
-  }
-}
-
-module.exports = {
-  NotCachedError,
-}
diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/index.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/index.js
deleted file mode 100644
index 0de49d23fb933..0000000000000
--- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/index.js
+++ /dev/null
@@ -1,49 +0,0 @@
-const { NotCachedError } = require('./errors.js')
-const CacheEntry = require('./entry.js')
-const remote = require('../remote.js')
-
-// do whatever is necessary to get a Response and return it
-const cacheFetch = async (request, options) => {
-  // try to find a cached entry that satisfies this request
-  const entry = await CacheEntry.find(request, options)
-  if (!entry) {
-    // no cached result, if the cache mode is 'only-if-cached' that's a failure
-    if (options.cache === 'only-if-cached') {
-      throw new NotCachedError(request.url)
-    }
-
-    // otherwise, we make a request, store it and return it
-    const response = await remote(request, options)
-    const newEntry = new CacheEntry({ request, response, options })
-    return newEntry.store('miss')
-  }
-
-  // we have a cached response that satisfies this request, however if the cache
-  // mode is 'no-cache' then we send the revalidation request no matter what
-  if (options.cache === 'no-cache') {
-    return entry.revalidate(request, options)
-  }
-
-  // if the cached entry is not stale, or if the cache mode is 'force-cache' or
-  // 'only-if-cached' we can respond with the cached entry. set the status
-  // based on the result of needsRevalidation and respond
-  const _needsRevalidation = entry.policy.needsRevalidation(request)
-  if (options.cache === 'force-cache' ||
-      options.cache === 'only-if-cached' ||
-      !_needsRevalidation) {
-    return entry.respond(request.method, options, _needsRevalidation ? 'stale' : 'hit')
-  }
-
-  // if we got here, the cache entry is stale so revalidate it
-  return entry.revalidate(request, options)
-}
-
-cacheFetch.invalidate = async (request, options) => {
-  if (!options.cachePath) {
-    return
-  }
-
-  return CacheEntry.invalidate(request, options)
-}
-
-module.exports = cacheFetch
diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/key.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/key.js
deleted file mode 100644
index f7684d562b7fa..0000000000000
--- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/key.js
+++ /dev/null
@@ -1,17 +0,0 @@
-const { URL, format } = require('url')
-
-// options passed to url.format() when generating a key
-const formatOptions = {
-  auth: false,
-  fragment: false,
-  search: true,
-  unicode: false,
-}
-
-// returns a string to be used as the cache key for the Request
-const cacheKey = (request) => {
-  const parsed = new URL(request.url)
-  return `make-fetch-happen:request-cache:${format(parsed, formatOptions)}`
-}
-
-module.exports = cacheKey
diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/policy.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/policy.js
deleted file mode 100644
index ada3c8600dae9..0000000000000
--- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/policy.js
+++ /dev/null
@@ -1,161 +0,0 @@
-const CacheSemantics = require('http-cache-semantics')
-const Negotiator = require('negotiator')
-const ssri = require('ssri')
-
-// options passed to http-cache-semantics constructor
-const policyOptions = {
-  shared: false,
-  ignoreCargoCult: true,
-}
-
-// a fake empty response, used when only testing the
-// request for storability
-const emptyResponse = { status: 200, headers: {} }
-
-// returns a plain object representation of the Request
-const requestObject = (request) => {
-  const _obj = {
-    method: request.method,
-    url: request.url,
-    headers: {},
-    compress: request.compress,
-  }
-
-  request.headers.forEach((value, key) => {
-    _obj.headers[key] = value
-  })
-
-  return _obj
-}
-
-// returns a plain object representation of the Response
-const responseObject = (response) => {
-  const _obj = {
-    status: response.status,
-    headers: {},
-  }
-
-  response.headers.forEach((value, key) => {
-    _obj.headers[key] = value
-  })
-
-  return _obj
-}
-
-class CachePolicy {
-  constructor ({ entry, request, response, options }) {
-    this.entry = entry
-    this.request = requestObject(request)
-    this.response = responseObject(response)
-    this.options = options
-    this.policy = new CacheSemantics(this.request, this.response, policyOptions)
-
-    if (this.entry) {
-      // if we have an entry, copy the timestamp to the _responseTime
-      // this is necessary because the CacheSemantics constructor forces
-      // the value to Date.now() which means a policy created from a
-      // cache entry is likely to always identify itself as stale
-      this.policy._responseTime = this.entry.metadata.time
-    }
-  }
-
-  // static method to quickly determine if a request alone is storable
-  static storable (request, options) {
-    // no cachePath means no caching
-    if (!options.cachePath) {
-      return false
-    }
-
-    // user explicitly asked not to cache
-    if (options.cache === 'no-store') {
-      return false
-    }
-
-    // we only cache GET and HEAD requests
-    if (!['GET', 'HEAD'].includes(request.method)) {
-      return false
-    }
-
-    // otherwise, let http-cache-semantics make the decision
-    // based on the request's headers
-    const policy = new CacheSemantics(requestObject(request), emptyResponse, policyOptions)
-    return policy.storable()
-  }
-
-  // returns true if the policy satisfies the request
-  satisfies (request) {
-    const _req = requestObject(request)
-    if (this.request.headers.host !== _req.headers.host) {
-      return false
-    }
-
-    if (this.request.compress !== _req.compress) {
-      return false
-    }
-
-    const negotiatorA = new Negotiator(this.request)
-    const negotiatorB = new Negotiator(_req)
-
-    if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes())) {
-      return false
-    }
-
-    if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages())) {
-      return false
-    }
-
-    if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings())) {
-      return false
-    }
-
-    if (this.options.integrity) {
-      return ssri.parse(this.options.integrity).match(this.entry.integrity)
-    }
-
-    return true
-  }
-
-  // returns true if the request and response allow caching
-  storable () {
-    return this.policy.storable()
-  }
-
-  // NOTE: this is a hack to avoid parsing the cache-control
-  // header ourselves, it returns true if the response's
-  // cache-control contains must-revalidate
-  get mustRevalidate () {
-    return !!this.policy._rescc['must-revalidate']
-  }
-
-  // returns true if the cached response requires revalidation
-  // for the given request
-  needsRevalidation (request) {
-    const _req = requestObject(request)
-    // force method to GET because we only cache GETs
-    // but can serve a HEAD from a cached GET
-    _req.method = 'GET'
-    return !this.policy.satisfiesWithoutRevalidation(_req)
-  }
-
-  responseHeaders () {
-    return this.policy.responseHeaders()
-  }
-
-  // returns a new object containing the appropriate headers
-  // to send a revalidation request
-  revalidationHeaders (request) {
-    const _req = requestObject(request)
-    return this.policy.revalidationHeaders(_req)
-  }
-
-  // returns true if the request/response was revalidated
-  // successfully. returns false if a new response was received
-  revalidated (request, response) {
-    const _req = requestObject(request)
-    const _res = responseObject(response)
-    const policy = this.policy.revalidatedPolicy(_req, _res)
-    return !policy.modified
-  }
-}
-
-module.exports = CachePolicy
diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/fetch.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/fetch.js
deleted file mode 100644
index 233ba67e16550..0000000000000
--- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/fetch.js
+++ /dev/null
@@ -1,118 +0,0 @@
-'use strict'
-
-const { FetchError, Request, isRedirect } = require('minipass-fetch')
-const url = require('url')
-
-const CachePolicy = require('./cache/policy.js')
-const cache = require('./cache/index.js')
-const remote = require('./remote.js')
-
-// given a Request, a Response and user options
-// return true if the response is a redirect that
-// can be followed. we throw errors that will result
-// in the fetch being rejected if the redirect is
-// possible but invalid for some reason
-const canFollowRedirect = (request, response, options) => {
-  if (!isRedirect(response.status)) {
-    return false
-  }
-
-  if (options.redirect === 'manual') {
-    return false
-  }
-
-  if (options.redirect === 'error') {
-    throw new FetchError(`redirect mode is set to error: ${request.url}`,
-      'no-redirect', { code: 'ENOREDIRECT' })
-  }
-
-  if (!response.headers.has('location')) {
-    throw new FetchError(`redirect location header missing for: ${request.url}`,
-      'no-location', { code: 'EINVALIDREDIRECT' })
-  }
-
-  if (request.counter >= request.follow) {
-    throw new FetchError(`maximum redirect reached at: ${request.url}`,
-      'max-redirect', { code: 'EMAXREDIRECT' })
-  }
-
-  return true
-}
-
-// given a Request, a Response, and the user's options return an object
-// with a new Request and a new options object that will be used for
-// following the redirect
-const getRedirect = (request, response, options) => {
-  const _opts = { ...options }
-  const location = response.headers.get('location')
-  const redirectUrl = new url.URL(location, /^https?:/.test(location) ? undefined : request.url)
-  // Comment below is used under the following license:
-  /**
-   * @license
-   * Copyright (c) 2010-2012 Mikeal Rogers
-   * Licensed under the Apache License, Version 2.0 (the "License");
-   * you may not use this file except in compliance with the License.
-   * You may obtain a copy of the License at
-   * http://www.apache.org/licenses/LICENSE-2.0
-   * Unless required by applicable law or agreed to in writing,
-   * software distributed under the License is distributed on an "AS
-   * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
-   * express or implied. See the License for the specific language
-   * governing permissions and limitations under the License.
-   */
-
-  // Remove authorization if changing hostnames (but not if just
-  // changing ports or protocols).  This matches the behavior of request:
-  // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138
-  if (new url.URL(request.url).hostname !== redirectUrl.hostname) {
-    request.headers.delete('authorization')
-    request.headers.delete('cookie')
-  }
-
-  // for POST request with 301/302 response, or any request with 303 response,
-  // use GET when following redirect
-  if (
-    response.status === 303 ||
-    (request.method === 'POST' && [301, 302].includes(response.status))
-  ) {
-    _opts.method = 'GET'
-    _opts.body = null
-    request.headers.delete('content-length')
-  }
-
-  _opts.headers = {}
-  request.headers.forEach((value, key) => {
-    _opts.headers[key] = value
-  })
-
-  _opts.counter = ++request.counter
-  const redirectReq = new Request(url.format(redirectUrl), _opts)
-  return {
-    request: redirectReq,
-    options: _opts,
-  }
-}
-
-const fetch = async (request, options) => {
-  const response = CachePolicy.storable(request, options)
-    ? await cache(request, options)
-    : await remote(request, options)
-
-  // if the request wasn't a GET or HEAD, and the response
-  // status is between 200 and 399 inclusive, invalidate the
-  // request url
-  if (!['GET', 'HEAD'].includes(request.method) &&
-      response.status >= 200 &&
-      response.status <= 399) {
-    await cache.invalidate(request, options)
-  }
-
-  if (!canFollowRedirect(request, response, options)) {
-    return response
-  }
-
-  const redirect = getRedirect(request, response, options)
-  return fetch(redirect.request, redirect.options)
-}
-
-module.exports = fetch
diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/index.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/index.js
deleted file mode 100644
index 2f12e8e1b6113..0000000000000
--- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/index.js
+++ /dev/null
@@ -1,41 +0,0 @@
-const { FetchError, Headers, Request, Response } = require('minipass-fetch')
-
-const configureOptions = require('./options.js')
-const fetch = require('./fetch.js')
-
-const makeFetchHappen = (url, opts) => {
-  const options = configureOptions(opts)
-
-  const request = new Request(url, options)
-  return fetch(request, options)
-}
-
-makeFetchHappen.defaults = (defaultUrl, defaultOptions = {}, wrappedFetch = makeFetchHappen) => {
-  if (typeof defaultUrl === 'object') {
-    defaultOptions = defaultUrl
-    defaultUrl = null
-  }
-
-  const defaultedFetch = (url, options = {}) => {
-    const finalUrl = url || defaultUrl
-    const finalOptions = {
-      ...defaultOptions,
-      ...options,
-      headers: {
-        ...defaultOptions.headers,
-        ...options.headers,
-      },
-    }
-    return wrappedFetch(finalUrl, finalOptions)
-  }
-
-  defaultedFetch.defaults = (defaultUrl1, defaultOptions1 = {}) =>
-    makeFetchHappen.defaults(defaultUrl1, defaultOptions1, defaultedFetch)
-  return defaultedFetch
-}
-
-module.exports = makeFetchHappen
-module.exports.FetchError = FetchError
-module.exports.Headers = Headers
-module.exports.Request = Request
-module.exports.Response = Response
diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/options.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/options.js
deleted file mode 100644
index db51cc6324817..0000000000000
--- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/options.js
+++ /dev/null
@@ -1,59 +0,0 @@
-const dns = require('dns')
-
-const conditionalHeaders = [
-  'if-modified-since',
-  'if-none-match',
-  'if-unmodified-since',
-  'if-match',
-  'if-range',
-]
-
-const configureOptions = (opts) => {
-  const { strictSSL, ...options } = { ...opts }
-  options.method = options.method ? options.method.toUpperCase() : 'GET'
-
-  if (strictSSL === undefined || strictSSL === null) {
-    options.rejectUnauthorized = process.env.NODE_TLS_REJECT_UNAUTHORIZED !== '0'
-  } else {
-    options.rejectUnauthorized = strictSSL !== false
-  }
-
-  if (!options.retry) {
-    options.retry = { retries: 0 }
-  } else if (typeof options.retry === 'string') {
-    const retries = parseInt(options.retry, 10)
-    if (isFinite(retries)) {
-      options.retry = { retries }
-    } else {
-      options.retry = { retries: 0 }
-    }
-  } else if (typeof options.retry === 'number') {
-    options.retry = { retries: options.retry }
-  } else {
-    options.retry = { retries: 0, ...options.retry }
-  }
-
-  options.dns = { ttl: 5 * 60 * 1000, lookup: dns.lookup, ...options.dns }
-
-  options.cache = options.cache || 'default'
-  if (options.cache === 'default') {
-    const hasConditionalHeader = Object.keys(options.headers || {}).some((name) => {
-      return conditionalHeaders.includes(name.toLowerCase())
-    })
-    if (hasConditionalHeader) {
-      options.cache = 'no-store'
-    }
-  }
-
-  options.cacheAdditionalHeaders = options.cacheAdditionalHeaders || []
-
-  // cacheManager is deprecated, but if it's set and
-  // cachePath is not we should copy it to the new field
-  if (options.cacheManager && !options.cachePath) {
-    options.cachePath = options.cacheManager
-  }
-
-  return options
-}
-
-module.exports = configureOptions
diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/pipeline.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/pipeline.js
deleted file mode 100644
index b1d221b2d0ce3..0000000000000
--- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/pipeline.js
+++ /dev/null
@@ -1,41 +0,0 @@
-'use strict'
-
-const MinipassPipeline = require('minipass-pipeline')
-
-class CachingMinipassPipeline extends MinipassPipeline {
-  #events = []
-  #data = new Map()
-
-  constructor (opts, ...streams) {
-    // CRITICAL: do NOT pass the streams to the call to super(), this will start
-    // the flow of data and potentially cause the events we need to catch to emit
-    // before we've finished our own setup. instead we call super() with no args,
-    // finish our setup, and then push the streams into ourselves to start the
-    // data flow
-    super()
-    this.#events = opts.events
-
-    /* istanbul ignore next - coverage disabled because this is pointless to test here */
-    if (streams.length) {
-      this.push(...streams)
-    }
-  }
-
-  on (event, handler) {
-    if (this.#events.includes(event) && this.#data.has(event)) {
-      return handler(...this.#data.get(event))
-    }
-
-    return super.on(event, handler)
-  }
-
-  emit (event, ...data) {
-    if (this.#events.includes(event)) {
-      this.#data.set(event, data)
-    }
-
-    return super.emit(event, ...data)
-  }
-}
-
-module.exports = CachingMinipassPipeline
diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/remote.js b/node_modules/tuf-js/node_modules/make-fetch-happen/lib/remote.js
deleted file mode 100644
index 1d640e5380baa..0000000000000
--- a/node_modules/tuf-js/node_modules/make-fetch-happen/lib/remote.js
+++ /dev/null
@@ -1,132 +0,0 @@
-const { Minipass } = require('minipass')
-const fetch = require('minipass-fetch')
-const promiseRetry = require('promise-retry')
-const ssri = require('ssri')
-const { log } = require('proc-log')
-
-const CachingMinipassPipeline = require('./pipeline.js')
-const { getAgent } = require('@npmcli/agent')
-const pkg = require('../package.json')
-
-const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})`
-
-const RETRY_ERRORS = [
-  'ECONNRESET', // remote socket closed on us
-  'ECONNREFUSED', // remote host refused to open connection
-  'EADDRINUSE', // failed to bind to a local port (proxy?)
-  'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW
-  // from @npmcli/agent
-  'ECONNECTIONTIMEOUT',
-  'EIDLETIMEOUT',
-  'ERESPONSETIMEOUT',
-  'ETRANSFERTIMEOUT',
-  // Known codes we do NOT retry on:
-  // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline)
-  // EINVALIDPROXY // invalid protocol from @npmcli/agent
-  // EINVALIDRESPONSE // invalid status code from @npmcli/agent
-]
-
-const RETRY_TYPES = [
-  'request-timeout',
-]
-
-// make a request directly to the remote source,
-// retrying certain classes of errors as well as
-// following redirects (through the cache if necessary)
-// and verifying response integrity
-const remoteFetch = (request, options) => {
-  // options.signal is intended for the fetch itself, not the agent.  Attaching it to the agent will re-use that signal across multiple requests, which prevents any connections beyond the first one.
-  const agent = getAgent(request.url, { ...options, signal: undefined })
-  if (!request.headers.has('connection')) {
-    request.headers.set('connection', agent ? 'keep-alive' : 'close')
-  }
-
-  if (!request.headers.has('user-agent')) {
-    request.headers.set('user-agent', USER_AGENT)
-  }
-
-  // keep our own options since we're overriding the agent
-  // and the redirect mode
-  const _opts = {
-    ...options,
-    agent,
-    redirect: 'manual',
-  }
-
-  return promiseRetry(async (retryHandler, attemptNum) => {
-    const req = new fetch.Request(request, _opts)
-    try {
-      let res = await fetch(req, _opts)
-      if (_opts.integrity && res.status === 200) {
-        // we got a 200 response and the user has specified an expected
-        // integrity value, so wrap the response in an ssri stream to verify it
-        const integrityStream = ssri.integrityStream({
-          algorithms: _opts.algorithms,
-          integrity: _opts.integrity,
-          size: _opts.size,
-        })
-        const pipeline = new CachingMinipassPipeline({
-          events: ['integrity', 'size'],
-        }, res.body, integrityStream)
-        // we also propagate the integrity and size events out to the pipeline so we can use
-        // this new response body as an integrityEmitter for cacache
-        integrityStream.on('integrity', i => pipeline.emit('integrity', i))
-        integrityStream.on('size', s => pipeline.emit('size', s))
-        res = new fetch.Response(pipeline, res)
-        // set an explicit flag so we know if our response body will emit integrity and size
-        res.body.hasIntegrityEmitter = true
-      }
-
-      res.headers.set('x-fetch-attempts', attemptNum)
-
-      // do not retry POST requests, or requests with a streaming body
-      // do retry requests with a 408, 420, 429 or 500+ status in the response
-      const isStream = Minipass.isStream(req.body)
-      const isRetriable = req.method !== 'POST' &&
-          !isStream &&
-          ([408, 420, 429].includes(res.status) || res.status >= 500)
-
-      if (isRetriable) {
-        if (typeof options.onRetry === 'function') {
-          options.onRetry(res)
-        }
-
-        /* eslint-disable-next-line max-len */
-        log.http('fetch', `${req.method} ${req.url} attempt ${attemptNum} failed with ${res.status}`)
-        return retryHandler(res)
-      }
-
-      return res
-    } catch (err) {
-      const code = (err.code === 'EPROMISERETRY')
-        ? err.retried.code
-        : err.code
-
-      // err.retried will be the thing that was thrown from above
-      // if it's a response, we just got a bad status code and we
-      // can re-throw to allow the retry
-      const isRetryError = err.retried instanceof fetch.Response ||
-        (RETRY_ERRORS.includes(code) && RETRY_TYPES.includes(err.type))
-
-      if (req.method === 'POST' || isRetryError) {
-        throw err
-      }
-
-      if (typeof options.onRetry === 'function') {
-        options.onRetry(err)
-      }
-
-      log.http('fetch', `${req.method} ${req.url} attempt ${attemptNum} failed with ${err.code}`)
-      return retryHandler(err)
-    }
-  }, options.retry).catch((err) => {
-    // don't reject for http errors, just return them
-    if (err.status >= 400 && err.type !== 'system') {
-      return err
-    }
-
-    throw err
-  })
-}
-
-module.exports = remoteFetch
diff --git a/node_modules/tuf-js/node_modules/make-fetch-happen/package.json b/node_modules/tuf-js/node_modules/make-fetch-happen/package.json
deleted file mode 100644
index 1e27d4ee8a70e..0000000000000
--- a/node_modules/tuf-js/node_modules/make-fetch-happen/package.json
+++ /dev/null
@@ -1,74 +0,0 @@
-{
-  "name": "make-fetch-happen",
-  "version": "15.0.1",
-  "description": "Opinionated, caching, retrying fetch client",
-  "main": "lib/index.js",
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "scripts": {
-    "test": "tap",
-    "posttest": "npm run lint",
-    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"",
-    "lint": "npm run eslint",
-    "lintfix": "npm run eslint -- --fix",
-    "postlint": "template-oss-check",
-    "snap": "tap",
-    "template-oss-apply": "template-oss-apply --force"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/npm/make-fetch-happen.git"
-  },
-  "keywords": [
-    "http",
-    "request",
-    "fetch",
-    "mean girls",
-    "caching",
-    "cache",
-    "subresource integrity"
-  ],
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "dependencies": {
-    "@npmcli/agent": "^3.0.0",
-    "cacache": "^20.0.1",
-    "http-cache-semantics": "^4.1.1",
-    "minipass": "^7.0.2",
-    "minipass-fetch": "^4.0.0",
-    "minipass-flush": "^1.0.5",
-    "minipass-pipeline": "^1.2.4",
-    "negotiator": "^1.0.0",
-    "proc-log": "^5.0.0",
-    "promise-retry": "^2.0.1",
-    "ssri": "^12.0.0"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.25.0",
-    "nock": "^13.2.4",
-    "safe-buffer": "^5.2.1",
-    "standard-version": "^9.3.2",
-    "tap": "^16.0.0"
-  },
-  "engines": {
-    "node": "^20.17.0 || >=22.9.0"
-  },
-  "tap": {
-    "color": 1,
-    "files": "test/*.js",
-    "check-coverage": true,
-    "timeout": 60,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.25.0",
-    "publish": "true"
-  }
-}
diff --git a/node_modules/tuf-js/node_modules/negotiator/HISTORY.md b/node_modules/tuf-js/node_modules/negotiator/HISTORY.md
deleted file mode 100644
index 63d537d3f6811..0000000000000
--- a/node_modules/tuf-js/node_modules/negotiator/HISTORY.md
+++ /dev/null
@@ -1,114 +0,0 @@
-1.0.0 / 2024-08-31
-==================
-
-  * Drop support for node <18
-  * Added an option preferred encodings array #59
-
-0.6.3 / 2022-01-22
-==================
-
-  * Revert "Lazy-load modules from main entry point"
-
-0.6.2 / 2019-04-29
-==================
-
-  * Fix sorting charset, encoding, and language with extra parameters
-
-0.6.1 / 2016-05-02
-==================
-
-  * perf: improve `Accept` parsing speed
-  * perf: improve `Accept-Charset` parsing speed
-  * perf: improve `Accept-Encoding` parsing speed
-  * perf: improve `Accept-Language` parsing speed
-
-0.6.0 / 2015-09-29
-==================
-
-  * Fix including type extensions in parameters in `Accept` parsing
-  * Fix parsing `Accept` parameters with quoted equals
-  * Fix parsing `Accept` parameters with quoted semicolons
-  * Lazy-load modules from main entry point
-  * perf: delay type concatenation until needed
-  * perf: enable strict mode
-  * perf: hoist regular expressions
-  * perf: remove closures getting spec properties
-  * perf: remove a closure from media type parsing
-  * perf: remove property delete from media type parsing
-
-0.5.3 / 2015-05-10
-==================
-
-  * Fix media type parameter matching to be case-insensitive
-
-0.5.2 / 2015-05-06
-==================
-
-  * Fix comparing media types with quoted values
-  * Fix splitting media types with quoted commas
-
-0.5.1 / 2015-02-14
-==================
-
-  * Fix preference sorting to be stable for long acceptable lists
-
-0.5.0 / 2014-12-18
-==================
-
-  * Fix list return order when large accepted list
-  * Fix missing identity encoding when q=0 exists
-  * Remove dynamic building of Negotiator class
-
-0.4.9 / 2014-10-14
-==================
-
-  * Fix error when media type has invalid parameter
-
-0.4.8 / 2014-09-28
-==================
-
-  * Fix all negotiations to be case-insensitive
-  * Stable sort preferences of same quality according to client order
-  * Support Node.js 0.6
-
-0.4.7 / 2014-06-24
-==================
-
-  * Handle invalid provided languages
-  * Handle invalid provided media types
-
-0.4.6 / 2014-06-11
-==================
-
-  *  Order by specificity when quality is the same
-
-0.4.5 / 2014-05-29
-==================
-
-  * Fix regression in empty header handling
-
-0.4.4 / 2014-05-29
-==================
-
-  * Fix behaviors when headers are not present
-
-0.4.3 / 2014-04-16
-==================
-
-  * Handle slashes on media params correctly
-
-0.4.2 / 2014-02-28
-==================
-
-  * Fix media type sorting
-  * Handle media types params strictly
-
-0.4.1 / 2014-01-16
-==================
-
-  * Use most specific matches
-
-0.4.0 / 2014-01-09
-==================
-
-  * Remove preferred prefix from methods
diff --git a/node_modules/tuf-js/node_modules/negotiator/LICENSE b/node_modules/tuf-js/node_modules/negotiator/LICENSE
deleted file mode 100644
index ea6b9e2e9ac25..0000000000000
--- a/node_modules/tuf-js/node_modules/negotiator/LICENSE
+++ /dev/null
@@ -1,24 +0,0 @@
-(The MIT License)
-
-Copyright (c) 2012-2014 Federico Romero
-Copyright (c) 2012-2014 Isaac Z. Schlueter
-Copyright (c) 2014-2015 Douglas Christopher Wilson
-
-Permission is hereby granted, free of charge, to any person obtaining
-a copy of this software and associated documentation files (the
-'Software'), to deal in the Software without restriction, including
-without limitation the rights to use, copy, modify, merge, publish,
-distribute, sublicense, and/or sell copies of the Software, and to
-permit persons to whom the Software is furnished to do so, subject to
-the following conditions:
-
-The above copyright notice and this permission notice shall be
-included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/tuf-js/node_modules/negotiator/index.js b/node_modules/tuf-js/node_modules/negotiator/index.js
deleted file mode 100644
index 4f51315d6af4b..0000000000000
--- a/node_modules/tuf-js/node_modules/negotiator/index.js
+++ /dev/null
@@ -1,83 +0,0 @@
-/*!
- * negotiator
- * Copyright(c) 2012 Federico Romero
- * Copyright(c) 2012-2014 Isaac Z. Schlueter
- * Copyright(c) 2015 Douglas Christopher Wilson
- * MIT Licensed
- */
-
-'use strict';
-
-var preferredCharsets = require('./lib/charset')
-var preferredEncodings = require('./lib/encoding')
-var preferredLanguages = require('./lib/language')
-var preferredMediaTypes = require('./lib/mediaType')
-
-/**
- * Module exports.
- * @public
- */
-
-module.exports = Negotiator;
-module.exports.Negotiator = Negotiator;
-
-/**
- * Create a Negotiator instance from a request.
- * @param {object} request
- * @public
- */
-
-function Negotiator(request) {
-  if (!(this instanceof Negotiator)) {
-    return new Negotiator(request);
-  }
-
-  this.request = request;
-}
-
-Negotiator.prototype.charset = function charset(available) {
-  var set = this.charsets(available);
-  return set && set[0];
-};
-
-Negotiator.prototype.charsets = function charsets(available) {
-  return preferredCharsets(this.request.headers['accept-charset'], available);
-};
-
-Negotiator.prototype.encoding = function encoding(available, opts) {
-  var set = this.encodings(available, opts);
-  return set && set[0];
-};
-
-Negotiator.prototype.encodings = function encodings(available, options) {
-  var opts = options || {};
-  return preferredEncodings(this.request.headers['accept-encoding'], available, opts.preferred);
-};
-
-Negotiator.prototype.language = function language(available) {
-  var set = this.languages(available);
-  return set && set[0];
-};
-
-Negotiator.prototype.languages = function languages(available) {
-  return preferredLanguages(this.request.headers['accept-language'], available);
-};
-
-Negotiator.prototype.mediaType = function mediaType(available) {
-  var set = this.mediaTypes(available);
-  return set && set[0];
-};
-
-Negotiator.prototype.mediaTypes = function mediaTypes(available) {
-  return preferredMediaTypes(this.request.headers.accept, available);
-};
-
-// Backwards compatibility
-Negotiator.prototype.preferredCharset = Negotiator.prototype.charset;
-Negotiator.prototype.preferredCharsets = Negotiator.prototype.charsets;
-Negotiator.prototype.preferredEncoding = Negotiator.prototype.encoding;
-Negotiator.prototype.preferredEncodings = Negotiator.prototype.encodings;
-Negotiator.prototype.preferredLanguage = Negotiator.prototype.language;
-Negotiator.prototype.preferredLanguages = Negotiator.prototype.languages;
-Negotiator.prototype.preferredMediaType = Negotiator.prototype.mediaType;
-Negotiator.prototype.preferredMediaTypes = Negotiator.prototype.mediaTypes;
diff --git a/node_modules/tuf-js/node_modules/negotiator/lib/charset.js b/node_modules/tuf-js/node_modules/negotiator/lib/charset.js
deleted file mode 100644
index cdd014803474a..0000000000000
--- a/node_modules/tuf-js/node_modules/negotiator/lib/charset.js
+++ /dev/null
@@ -1,169 +0,0 @@
-/**
- * negotiator
- * Copyright(c) 2012 Isaac Z. Schlueter
- * Copyright(c) 2014 Federico Romero
- * Copyright(c) 2014-2015 Douglas Christopher Wilson
- * MIT Licensed
- */
-
-'use strict';
-
-/**
- * Module exports.
- * @public
- */
-
-module.exports = preferredCharsets;
-module.exports.preferredCharsets = preferredCharsets;
-
-/**
- * Module variables.
- * @private
- */
-
-var simpleCharsetRegExp = /^\s*([^\s;]+)\s*(?:;(.*))?$/;
-
-/**
- * Parse the Accept-Charset header.
- * @private
- */
-
-function parseAcceptCharset(accept) {
-  var accepts = accept.split(',');
-
-  for (var i = 0, j = 0; i < accepts.length; i++) {
-    var charset = parseCharset(accepts[i].trim(), i);
-
-    if (charset) {
-      accepts[j++] = charset;
-    }
-  }
-
-  // trim accepts
-  accepts.length = j;
-
-  return accepts;
-}
-
-/**
- * Parse a charset from the Accept-Charset header.
- * @private
- */
-
-function parseCharset(str, i) {
-  var match = simpleCharsetRegExp.exec(str);
-  if (!match) return null;
-
-  var charset = match[1];
-  var q = 1;
-  if (match[2]) {
-    var params = match[2].split(';')
-    for (var j = 0; j < params.length; j++) {
-      var p = params[j].trim().split('=');
-      if (p[0] === 'q') {
-        q = parseFloat(p[1]);
-        break;
-      }
-    }
-  }
-
-  return {
-    charset: charset,
-    q: q,
-    i: i
-  };
-}
-
-/**
- * Get the priority of a charset.
- * @private
- */
-
-function getCharsetPriority(charset, accepted, index) {
-  var priority = {o: -1, q: 0, s: 0};
-
-  for (var i = 0; i < accepted.length; i++) {
-    var spec = specify(charset, accepted[i], index);
-
-    if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
-      priority = spec;
-    }
-  }
-
-  return priority;
-}
-
-/**
- * Get the specificity of the charset.
- * @private
- */
-
-function specify(charset, spec, index) {
-  var s = 0;
-  if(spec.charset.toLowerCase() === charset.toLowerCase()){
-    s |= 1;
-  } else if (spec.charset !== '*' ) {
-    return null
-  }
-
-  return {
-    i: index,
-    o: spec.i,
-    q: spec.q,
-    s: s
-  }
-}
-
-/**
- * Get the preferred charsets from an Accept-Charset header.
- * @public
- */
-
-function preferredCharsets(accept, provided) {
-  // RFC 2616 sec 14.2: no header = *
-  var accepts = parseAcceptCharset(accept === undefined ? '*' : accept || '');
-
-  if (!provided) {
-    // sorted list of all charsets
-    return accepts
-      .filter(isQuality)
-      .sort(compareSpecs)
-      .map(getFullCharset);
-  }
-
-  var priorities = provided.map(function getPriority(type, index) {
-    return getCharsetPriority(type, accepts, index);
-  });
-
-  // sorted list of accepted charsets
-  return priorities.filter(isQuality).sort(compareSpecs).map(function getCharset(priority) {
-    return provided[priorities.indexOf(priority)];
-  });
-}
-
-/**
- * Compare two specs.
- * @private
- */
-
-function compareSpecs(a, b) {
-  return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0;
-}
-
-/**
- * Get full charset string.
- * @private
- */
-
-function getFullCharset(spec) {
-  return spec.charset;
-}
-
-/**
- * Check if a spec has any quality.
- * @private
- */
-
-function isQuality(spec) {
-  return spec.q > 0;
-}
diff --git a/node_modules/tuf-js/node_modules/negotiator/lib/encoding.js b/node_modules/tuf-js/node_modules/negotiator/lib/encoding.js
deleted file mode 100644
index 9ebb633d67743..0000000000000
--- a/node_modules/tuf-js/node_modules/negotiator/lib/encoding.js
+++ /dev/null
@@ -1,205 +0,0 @@
-/**
- * negotiator
- * Copyright(c) 2012 Isaac Z. Schlueter
- * Copyright(c) 2014 Federico Romero
- * Copyright(c) 2014-2015 Douglas Christopher Wilson
- * MIT Licensed
- */
-
-'use strict';
-
-/**
- * Module exports.
- * @public
- */
-
-module.exports = preferredEncodings;
-module.exports.preferredEncodings = preferredEncodings;
-
-/**
- * Module variables.
- * @private
- */
-
-var simpleEncodingRegExp = /^\s*([^\s;]+)\s*(?:;(.*))?$/;
-
-/**
- * Parse the Accept-Encoding header.
- * @private
- */
-
-function parseAcceptEncoding(accept) {
-  var accepts = accept.split(',');
-  var hasIdentity = false;
-  var minQuality = 1;
-
-  for (var i = 0, j = 0; i < accepts.length; i++) {
-    var encoding = parseEncoding(accepts[i].trim(), i);
-
-    if (encoding) {
-      accepts[j++] = encoding;
-      hasIdentity = hasIdentity || specify('identity', encoding);
-      minQuality = Math.min(minQuality, encoding.q || 1);
-    }
-  }
-
-  if (!hasIdentity) {
-    /*
-     * If identity doesn't explicitly appear in the accept-encoding header,
-     * it's added to the list of acceptable encoding with the lowest q
-     */
-    accepts[j++] = {
-      encoding: 'identity',
-      q: minQuality,
-      i: i
-    };
-  }
-
-  // trim accepts
-  accepts.length = j;
-
-  return accepts;
-}
-
-/**
- * Parse an encoding from the Accept-Encoding header.
- * @private
- */
-
-function parseEncoding(str, i) {
-  var match = simpleEncodingRegExp.exec(str);
-  if (!match) return null;
-
-  var encoding = match[1];
-  var q = 1;
-  if (match[2]) {
-    var params = match[2].split(';');
-    for (var j = 0; j < params.length; j++) {
-      var p = params[j].trim().split('=');
-      if (p[0] === 'q') {
-        q = parseFloat(p[1]);
-        break;
-      }
-    }
-  }
-
-  return {
-    encoding: encoding,
-    q: q,
-    i: i
-  };
-}
-
-/**
- * Get the priority of an encoding.
- * @private
- */
-
-function getEncodingPriority(encoding, accepted, index) {
-  var priority = {encoding: encoding, o: -1, q: 0, s: 0};
-
-  for (var i = 0; i < accepted.length; i++) {
-    var spec = specify(encoding, accepted[i], index);
-
-    if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
-      priority = spec;
-    }
-  }
-
-  return priority;
-}
-
-/**
- * Get the specificity of the encoding.
- * @private
- */
-
-function specify(encoding, spec, index) {
-  var s = 0;
-  if(spec.encoding.toLowerCase() === encoding.toLowerCase()){
-    s |= 1;
-  } else if (spec.encoding !== '*' ) {
-    return null
-  }
-
-  return {
-    encoding: encoding,
-    i: index,
-    o: spec.i,
-    q: spec.q,
-    s: s
-  }
-};
-
-/**
- * Get the preferred encodings from an Accept-Encoding header.
- * @public
- */
-
-function preferredEncodings(accept, provided, preferred) {
-  var accepts = parseAcceptEncoding(accept || '');
-
-  var comparator = preferred ? function comparator (a, b) {
-    if (a.q !== b.q) {
-      return b.q - a.q // higher quality first
-    }
-
-    var aPreferred = preferred.indexOf(a.encoding)
-    var bPreferred = preferred.indexOf(b.encoding)
-
-    if (aPreferred === -1 && bPreferred === -1) {
-      // consider the original specifity/order
-      return (b.s - a.s) || (a.o - b.o) || (a.i - b.i)
-    }
-
-    if (aPreferred !== -1 && bPreferred !== -1) {
-      return aPreferred - bPreferred // consider the preferred order
-    }
-
-    return aPreferred === -1 ? 1 : -1 // preferred first
-  } : compareSpecs;
-
-  if (!provided) {
-    // sorted list of all encodings
-    return accepts
-      .filter(isQuality)
-      .sort(comparator)
-      .map(getFullEncoding);
-  }
-
-  var priorities = provided.map(function getPriority(type, index) {
-    return getEncodingPriority(type, accepts, index);
-  });
-
-  // sorted list of accepted encodings
-  return priorities.filter(isQuality).sort(comparator).map(function getEncoding(priority) {
-    return provided[priorities.indexOf(priority)];
-  });
-}
-
-/**
- * Compare two specs.
- * @private
- */
-
-function compareSpecs(a, b) {
-  return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i);
-}
-
-/**
- * Get full encoding string.
- * @private
- */
-
-function getFullEncoding(spec) {
-  return spec.encoding;
-}
-
-/**
- * Check if a spec has any quality.
- * @private
- */
-
-function isQuality(spec) {
-  return spec.q > 0;
-}
diff --git a/node_modules/tuf-js/node_modules/negotiator/lib/language.js b/node_modules/tuf-js/node_modules/negotiator/lib/language.js
deleted file mode 100644
index a23167252719b..0000000000000
--- a/node_modules/tuf-js/node_modules/negotiator/lib/language.js
+++ /dev/null
@@ -1,179 +0,0 @@
-/**
- * negotiator
- * Copyright(c) 2012 Isaac Z. Schlueter
- * Copyright(c) 2014 Federico Romero
- * Copyright(c) 2014-2015 Douglas Christopher Wilson
- * MIT Licensed
- */
-
-'use strict';
-
-/**
- * Module exports.
- * @public
- */
-
-module.exports = preferredLanguages;
-module.exports.preferredLanguages = preferredLanguages;
-
-/**
- * Module variables.
- * @private
- */
-
-var simpleLanguageRegExp = /^\s*([^\s\-;]+)(?:-([^\s;]+))?\s*(?:;(.*))?$/;
-
-/**
- * Parse the Accept-Language header.
- * @private
- */
-
-function parseAcceptLanguage(accept) {
-  var accepts = accept.split(',');
-
-  for (var i = 0, j = 0; i < accepts.length; i++) {
-    var language = parseLanguage(accepts[i].trim(), i);
-
-    if (language) {
-      accepts[j++] = language;
-    }
-  }
-
-  // trim accepts
-  accepts.length = j;
-
-  return accepts;
-}
-
-/**
- * Parse a language from the Accept-Language header.
- * @private
- */
-
-function parseLanguage(str, i) {
-  var match = simpleLanguageRegExp.exec(str);
-  if (!match) return null;
-
-  var prefix = match[1]
-  var suffix = match[2]
-  var full = prefix
-
-  if (suffix) full += "-" + suffix;
-
-  var q = 1;
-  if (match[3]) {
-    var params = match[3].split(';')
-    for (var j = 0; j < params.length; j++) {
-      var p = params[j].split('=');
-      if (p[0] === 'q') q = parseFloat(p[1]);
-    }
-  }
-
-  return {
-    prefix: prefix,
-    suffix: suffix,
-    q: q,
-    i: i,
-    full: full
-  };
-}
-
-/**
- * Get the priority of a language.
- * @private
- */
-
-function getLanguagePriority(language, accepted, index) {
-  var priority = {o: -1, q: 0, s: 0};
-
-  for (var i = 0; i < accepted.length; i++) {
-    var spec = specify(language, accepted[i], index);
-
-    if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
-      priority = spec;
-    }
-  }
-
-  return priority;
-}
-
-/**
- * Get the specificity of the language.
- * @private
- */
-
-function specify(language, spec, index) {
-  var p = parseLanguage(language)
-  if (!p) return null;
-  var s = 0;
-  if(spec.full.toLowerCase() === p.full.toLowerCase()){
-    s |= 4;
-  } else if (spec.prefix.toLowerCase() === p.full.toLowerCase()) {
-    s |= 2;
-  } else if (spec.full.toLowerCase() === p.prefix.toLowerCase()) {
-    s |= 1;
-  } else if (spec.full !== '*' ) {
-    return null
-  }
-
-  return {
-    i: index,
-    o: spec.i,
-    q: spec.q,
-    s: s
-  }
-};
-
-/**
- * Get the preferred languages from an Accept-Language header.
- * @public
- */
-
-function preferredLanguages(accept, provided) {
-  // RFC 2616 sec 14.4: no header = *
-  var accepts = parseAcceptLanguage(accept === undefined ? '*' : accept || '');
-
-  if (!provided) {
-    // sorted list of all languages
-    return accepts
-      .filter(isQuality)
-      .sort(compareSpecs)
-      .map(getFullLanguage);
-  }
-
-  var priorities = provided.map(function getPriority(type, index) {
-    return getLanguagePriority(type, accepts, index);
-  });
-
-  // sorted list of accepted languages
-  return priorities.filter(isQuality).sort(compareSpecs).map(function getLanguage(priority) {
-    return provided[priorities.indexOf(priority)];
-  });
-}
-
-/**
- * Compare two specs.
- * @private
- */
-
-function compareSpecs(a, b) {
-  return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0;
-}
-
-/**
- * Get full language string.
- * @private
- */
-
-function getFullLanguage(spec) {
-  return spec.full;
-}
-
-/**
- * Check if a spec has any quality.
- * @private
- */
-
-function isQuality(spec) {
-  return spec.q > 0;
-}
diff --git a/node_modules/tuf-js/node_modules/negotiator/lib/mediaType.js b/node_modules/tuf-js/node_modules/negotiator/lib/mediaType.js
deleted file mode 100644
index 8e402ea88394c..0000000000000
--- a/node_modules/tuf-js/node_modules/negotiator/lib/mediaType.js
+++ /dev/null
@@ -1,294 +0,0 @@
-/**
- * negotiator
- * Copyright(c) 2012 Isaac Z. Schlueter
- * Copyright(c) 2014 Federico Romero
- * Copyright(c) 2014-2015 Douglas Christopher Wilson
- * MIT Licensed
- */
-
-'use strict';
-
-/**
- * Module exports.
- * @public
- */
-
-module.exports = preferredMediaTypes;
-module.exports.preferredMediaTypes = preferredMediaTypes;
-
-/**
- * Module variables.
- * @private
- */
-
-var simpleMediaTypeRegExp = /^\s*([^\s\/;]+)\/([^;\s]+)\s*(?:;(.*))?$/;
-
-/**
- * Parse the Accept header.
- * @private
- */
-
-function parseAccept(accept) {
-  var accepts = splitMediaTypes(accept);
-
-  for (var i = 0, j = 0; i < accepts.length; i++) {
-    var mediaType = parseMediaType(accepts[i].trim(), i);
-
-    if (mediaType) {
-      accepts[j++] = mediaType;
-    }
-  }
-
-  // trim accepts
-  accepts.length = j;
-
-  return accepts;
-}
-
-/**
- * Parse a media type from the Accept header.
- * @private
- */
-
-function parseMediaType(str, i) {
-  var match = simpleMediaTypeRegExp.exec(str);
-  if (!match) return null;
-
-  var params = Object.create(null);
-  var q = 1;
-  var subtype = match[2];
-  var type = match[1];
-
-  if (match[3]) {
-    var kvps = splitParameters(match[3]).map(splitKeyValuePair);
-
-    for (var j = 0; j < kvps.length; j++) {
-      var pair = kvps[j];
-      var key = pair[0].toLowerCase();
-      var val = pair[1];
-
-      // get the value, unwrapping quotes
-      var value = val && val[0] === '"' && val[val.length - 1] === '"'
-        ? val.slice(1, -1)
-        : val;
-
-      if (key === 'q') {
-        q = parseFloat(value);
-        break;
-      }
-
-      // store parameter
-      params[key] = value;
-    }
-  }
-
-  return {
-    type: type,
-    subtype: subtype,
-    params: params,
-    q: q,
-    i: i
-  };
-}
-
-/**
- * Get the priority of a media type.
- * @private
- */
-
-function getMediaTypePriority(type, accepted, index) {
-  var priority = {o: -1, q: 0, s: 0};
-
-  for (var i = 0; i < accepted.length; i++) {
-    var spec = specify(type, accepted[i], index);
-
-    if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
-      priority = spec;
-    }
-  }
-
-  return priority;
-}
-
-/**
- * Get the specificity of the media type.
- * @private
- */
-
-function specify(type, spec, index) {
-  var p = parseMediaType(type);
-  var s = 0;
-
-  if (!p) {
-    return null;
-  }
-
-  if(spec.type.toLowerCase() == p.type.toLowerCase()) {
-    s |= 4
-  } else if(spec.type != '*') {
-    return null;
-  }
-
-  if(spec.subtype.toLowerCase() == p.subtype.toLowerCase()) {
-    s |= 2
-  } else if(spec.subtype != '*') {
-    return null;
-  }
-
-  var keys = Object.keys(spec.params);
-  if (keys.length > 0) {
-    if (keys.every(function (k) {
-      return spec.params[k] == '*' || (spec.params[k] || '').toLowerCase() == (p.params[k] || '').toLowerCase();
-    })) {
-      s |= 1
-    } else {
-      return null
-    }
-  }
-
-  return {
-    i: index,
-    o: spec.i,
-    q: spec.q,
-    s: s,
-  }
-}
-
-/**
- * Get the preferred media types from an Accept header.
- * @public
- */
-
-function preferredMediaTypes(accept, provided) {
-  // RFC 2616 sec 14.2: no header = */*
-  var accepts = parseAccept(accept === undefined ? '*/*' : accept || '');
-
-  if (!provided) {
-    // sorted list of all types
-    return accepts
-      .filter(isQuality)
-      .sort(compareSpecs)
-      .map(getFullType);
-  }
-
-  var priorities = provided.map(function getPriority(type, index) {
-    return getMediaTypePriority(type, accepts, index);
-  });
-
-  // sorted list of accepted types
-  return priorities.filter(isQuality).sort(compareSpecs).map(function getType(priority) {
-    return provided[priorities.indexOf(priority)];
-  });
-}
-
-/**
- * Compare two specs.
- * @private
- */
-
-function compareSpecs(a, b) {
-  return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0;
-}
-
-/**
- * Get full type string.
- * @private
- */
-
-function getFullType(spec) {
-  return spec.type + '/' + spec.subtype;
-}
-
-/**
- * Check if a spec has any quality.
- * @private
- */
-
-function isQuality(spec) {
-  return spec.q > 0;
-}
-
-/**
- * Count the number of quotes in a string.
- * @private
- */
-
-function quoteCount(string) {
-  var count = 0;
-  var index = 0;
-
-  while ((index = string.indexOf('"', index)) !== -1) {
-    count++;
-    index++;
-  }
-
-  return count;
-}
-
-/**
- * Split a key value pair.
- * @private
- */
-
-function splitKeyValuePair(str) {
-  var index = str.indexOf('=');
-  var key;
-  var val;
-
-  if (index === -1) {
-    key = str;
-  } else {
-    key = str.slice(0, index);
-    val = str.slice(index + 1);
-  }
-
-  return [key, val];
-}
-
-/**
- * Split an Accept header into media types.
- * @private
- */
-
-function splitMediaTypes(accept) {
-  var accepts = accept.split(',');
-
-  for (var i = 1, j = 0; i < accepts.length; i++) {
-    if (quoteCount(accepts[j]) % 2 == 0) {
-      accepts[++j] = accepts[i];
-    } else {
-      accepts[j] += ',' + accepts[i];
-    }
-  }
-
-  // trim accepts
-  accepts.length = j + 1;
-
-  return accepts;
-}
-
-/**
- * Split a string of parameters.
- * @private
- */
-
-function splitParameters(str) {
-  var parameters = str.split(';');
-
-  for (var i = 1, j = 0; i < parameters.length; i++) {
-    if (quoteCount(parameters[j]) % 2 == 0) {
-      parameters[++j] = parameters[i];
-    } else {
-      parameters[j] += ';' + parameters[i];
-    }
-  }
-
-  // trim parameters
-  parameters.length = j + 1;
-
-  for (var i = 0; i < parameters.length; i++) {
-    parameters[i] = parameters[i].trim();
-  }
-
-  return parameters;
-}
diff --git a/node_modules/tuf-js/node_modules/negotiator/package.json b/node_modules/tuf-js/node_modules/negotiator/package.json
deleted file mode 100644
index e4bdc1ef4f748..0000000000000
--- a/node_modules/tuf-js/node_modules/negotiator/package.json
+++ /dev/null
@@ -1,43 +0,0 @@
-{
-  "name": "negotiator",
-  "description": "HTTP content negotiation",
-  "version": "1.0.0",
-  "contributors": [
-    "Douglas Christopher Wilson ",
-    "Federico Romero ",
-    "Isaac Z. Schlueter  (http://blog.izs.me/)"
-  ],
-  "license": "MIT",
-  "keywords": [
-    "http",
-    "content negotiation",
-    "accept",
-    "accept-language",
-    "accept-encoding",
-    "accept-charset"
-  ],
-  "repository": "jshttp/negotiator",
-  "devDependencies": {
-    "eslint": "7.32.0",
-    "eslint-plugin-markdown": "2.2.1",
-    "mocha": "9.1.3",
-    "nyc": "15.1.0"
-  },
-  "files": [
-    "lib/",
-    "HISTORY.md",
-    "LICENSE",
-    "index.js",
-    "README.md"
-  ],
-  "engines": {
-    "node": ">= 0.6"
-  },
-  "scripts": {
-    "lint": "eslint .",
-    "test": "mocha --reporter spec --check-leaks --bail test/",
-    "test:debug": "mocha --reporter spec --check-leaks --inspect --inspect-brk test/",
-    "test-ci": "nyc --reporter=lcov --reporter=text npm test",
-    "test-cov": "nyc --reporter=html --reporter=text npm test"
-  }
-}
diff --git a/package-lock.json b/package-lock.json
index feec34299945e..384e05815bbb6 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -119,7 +119,7 @@
         "libnpmsearch": "^9.0.0",
         "libnpmteam": "^8.0.1",
         "libnpmversion": "^8.0.1",
-        "make-fetch-happen": "^14.0.3",
+        "make-fetch-happen": "^15.0.2",
         "minimatch": "^9.0.5",
         "minipass": "^7.1.1",
         "minipass-pipeline": "^1.2.4",
@@ -5259,39 +5259,6 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/@sigstore/sign/node_modules/make-fetch-happen": {
-      "version": "15.0.1",
-      "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-15.0.1.tgz",
-      "integrity": "sha512-9GjpQcaUXO2xmre8JfALl8Oji8Jpo+SyY2HpqFFPHVczOld/I+JFRx9FkP/uedZzkJlI9uM5t/j6dGJv4BScQw==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "@npmcli/agent": "^3.0.0",
-        "cacache": "^20.0.1",
-        "http-cache-semantics": "^4.1.1",
-        "minipass": "^7.0.2",
-        "minipass-fetch": "^4.0.0",
-        "minipass-flush": "^1.0.5",
-        "minipass-pipeline": "^1.2.4",
-        "negotiator": "^1.0.0",
-        "proc-log": "^5.0.0",
-        "promise-retry": "^2.0.1",
-        "ssri": "^12.0.0"
-      },
-      "engines": {
-        "node": "^20.17.0 || >=22.9.0"
-      }
-    },
-    "node_modules/@sigstore/sign/node_modules/negotiator": {
-      "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz",
-      "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==",
-      "inBundle": true,
-      "license": "MIT",
-      "engines": {
-        "node": ">= 0.6"
-      }
-    },
     "node_modules/@sigstore/tuf": {
       "version": "4.0.0",
       "resolved": "https://registry.npmjs.org/@sigstore/tuf/-/tuf-4.0.0.tgz",
@@ -11208,14 +11175,14 @@
       }
     },
     "node_modules/make-fetch-happen": {
-      "version": "14.0.3",
-      "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-14.0.3.tgz",
-      "integrity": "sha512-QMjGbFTP0blj97EeidG5hk/QhKQ3T4ICckQGLgz38QF7Vgbk6e6FTARN8KhKxyBbWn8R0HU+bnw8aSoFPD4qtQ==",
+      "version": "15.0.2",
+      "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-15.0.2.tgz",
+      "integrity": "sha512-sI1NY4lWlXBAfjmCtVWIIpBypbBdhHtcjnwnv+gtCnsaOffyFil3aidszGC8hgzJe+fT1qix05sWxmD/Bmf/oQ==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
-        "@npmcli/agent": "^3.0.0",
-        "cacache": "^19.0.1",
+        "@npmcli/agent": "^4.0.0",
+        "cacache": "^20.0.1",
         "http-cache-semantics": "^4.1.1",
         "minipass": "^7.0.2",
         "minipass-fetch": "^4.0.0",
@@ -11227,70 +11194,34 @@
         "ssri": "^12.0.0"
       },
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/make-fetch-happen/node_modules/cacache": {
-      "version": "19.0.1",
-      "resolved": "https://registry.npmjs.org/cacache/-/cacache-19.0.1.tgz",
-      "integrity": "sha512-hdsUxulXCi5STId78vRVYEtDAjq99ICAUktLTeTYsLoTE6Z8dS0c8pWNCxwdrk9YfJeobDZc2Y186hD/5ZQgFQ==",
+    "node_modules/make-fetch-happen/node_modules/@npmcli/agent": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/@npmcli/agent/-/agent-4.0.0.tgz",
+      "integrity": "sha512-kAQTcEN9E8ERLVg5AsGwLNoFb+oEG6engbqAU2P43gD4JEIkNGMHdVQ096FsOAAYpZPB0RSt0zgInKIAS1l5QA==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
-        "@npmcli/fs": "^4.0.0",
-        "fs-minipass": "^3.0.0",
-        "glob": "^10.2.2",
-        "lru-cache": "^10.0.1",
-        "minipass": "^7.0.3",
-        "minipass-collect": "^2.0.1",
-        "minipass-flush": "^1.0.5",
-        "minipass-pipeline": "^1.2.4",
-        "p-map": "^7.0.2",
-        "ssri": "^12.0.0",
-        "tar": "^7.4.3",
-        "unique-filename": "^4.0.0"
-      },
-      "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
-    "node_modules/make-fetch-happen/node_modules/chownr": {
-      "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz",
-      "integrity": "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==",
-      "inBundle": true,
-      "license": "BlueOak-1.0.0",
-      "engines": {
-        "node": ">=18"
-      }
-    },
-    "node_modules/make-fetch-happen/node_modules/minizlib": {
-      "version": "3.0.2",
-      "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.2.tgz",
-      "integrity": "sha512-oG62iEk+CYt5Xj2YqI5Xi9xWUeZhDI8jjQmC5oThVH5JGCTgIjr7ciJDzC7MBzYd//WvR1OTmP5Q38Q8ShQtVA==",
-      "inBundle": true,
-      "license": "MIT",
-      "dependencies": {
-        "minipass": "^7.1.2"
+        "agent-base": "^7.1.0",
+        "http-proxy-agent": "^7.0.0",
+        "https-proxy-agent": "^7.0.1",
+        "lru-cache": "^11.2.1",
+        "socks-proxy-agent": "^8.0.3"
       },
       "engines": {
-        "node": ">= 18"
+        "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/make-fetch-happen/node_modules/mkdirp": {
-      "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz",
-      "integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==",
+    "node_modules/make-fetch-happen/node_modules/lru-cache": {
+      "version": "11.2.1",
+      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.1.tgz",
+      "integrity": "sha512-r8LA6i4LP4EeWOhqBaZZjDWwehd1xUJPCJd9Sv300H0ZmcUER4+JPh7bqqZeqs1o5pgtgvXm+d9UGrB5zZGDiQ==",
       "inBundle": true,
-      "license": "MIT",
-      "bin": {
-        "mkdirp": "dist/cjs/src/bin.js"
-      },
+      "license": "ISC",
       "engines": {
-        "node": ">=10"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
+        "node": "20 || >=22"
       }
     },
     "node_modules/make-fetch-happen/node_modules/negotiator": {
@@ -11303,34 +11234,6 @@
         "node": ">= 0.6"
       }
     },
-    "node_modules/make-fetch-happen/node_modules/tar": {
-      "version": "7.4.3",
-      "resolved": "https://registry.npmjs.org/tar/-/tar-7.4.3.tgz",
-      "integrity": "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "@isaacs/fs-minipass": "^4.0.0",
-        "chownr": "^3.0.0",
-        "minipass": "^7.1.2",
-        "minizlib": "^3.0.1",
-        "mkdirp": "^3.0.1",
-        "yallist": "^5.0.0"
-      },
-      "engines": {
-        "node": ">=18"
-      }
-    },
-    "node_modules/make-fetch-happen/node_modules/yallist": {
-      "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz",
-      "integrity": "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==",
-      "inBundle": true,
-      "license": "BlueOak-1.0.0",
-      "engines": {
-        "node": ">=18"
-      }
-    },
     "node_modules/map-obj": {
       "version": "4.3.0",
       "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.3.0.tgz",
@@ -12648,6 +12551,30 @@
         "node": "^18.17.0 || >=20.5.0"
       }
     },
+    "node_modules/node-gyp/node_modules/cacache": {
+      "version": "19.0.1",
+      "resolved": "https://registry.npmjs.org/cacache/-/cacache-19.0.1.tgz",
+      "integrity": "sha512-hdsUxulXCi5STId78vRVYEtDAjq99ICAUktLTeTYsLoTE6Z8dS0c8pWNCxwdrk9YfJeobDZc2Y186hD/5ZQgFQ==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "@npmcli/fs": "^4.0.0",
+        "fs-minipass": "^3.0.0",
+        "glob": "^10.2.2",
+        "lru-cache": "^10.0.1",
+        "minipass": "^7.0.3",
+        "minipass-collect": "^2.0.1",
+        "minipass-flush": "^1.0.5",
+        "minipass-pipeline": "^1.2.4",
+        "p-map": "^7.0.2",
+        "ssri": "^12.0.0",
+        "tar": "^7.4.3",
+        "unique-filename": "^4.0.0"
+      },
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
     "node_modules/node-gyp/node_modules/chownr": {
       "version": "3.0.0",
       "resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz",
@@ -12658,6 +12585,29 @@
         "node": ">=18"
       }
     },
+    "node_modules/node-gyp/node_modules/make-fetch-happen": {
+      "version": "14.0.3",
+      "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-14.0.3.tgz",
+      "integrity": "sha512-QMjGbFTP0blj97EeidG5hk/QhKQ3T4ICckQGLgz38QF7Vgbk6e6FTARN8KhKxyBbWn8R0HU+bnw8aSoFPD4qtQ==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "@npmcli/agent": "^3.0.0",
+        "cacache": "^19.0.1",
+        "http-cache-semantics": "^4.1.1",
+        "minipass": "^7.0.2",
+        "minipass-fetch": "^4.0.0",
+        "minipass-flush": "^1.0.5",
+        "minipass-pipeline": "^1.2.4",
+        "negotiator": "^1.0.0",
+        "proc-log": "^5.0.0",
+        "promise-retry": "^2.0.1",
+        "ssri": "^12.0.0"
+      },
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
     "node_modules/node-gyp/node_modules/minizlib": {
       "version": "3.0.2",
       "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.2.tgz",
@@ -12687,6 +12637,16 @@
         "url": "https://github.com/sponsors/isaacs"
       }
     },
+    "node_modules/node-gyp/node_modules/negotiator": {
+      "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz",
+      "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==",
+      "inBundle": true,
+      "license": "MIT",
+      "engines": {
+        "node": ">= 0.6"
+      }
+    },
     "node_modules/node-gyp/node_modules/tar": {
       "version": "7.4.3",
       "resolved": "https://registry.npmjs.org/tar/-/tar-7.4.3.tgz",
@@ -12964,29 +12924,6 @@
         "node": "20 || >=22"
       }
     },
-    "node_modules/npm-registry-fetch/node_modules/make-fetch-happen": {
-      "version": "15.0.1",
-      "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-15.0.1.tgz",
-      "integrity": "sha512-9GjpQcaUXO2xmre8JfALl8Oji8Jpo+SyY2HpqFFPHVczOld/I+JFRx9FkP/uedZzkJlI9uM5t/j6dGJv4BScQw==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "@npmcli/agent": "^3.0.0",
-        "cacache": "^20.0.1",
-        "http-cache-semantics": "^4.1.1",
-        "minipass": "^7.0.2",
-        "minipass-fetch": "^4.0.0",
-        "minipass-flush": "^1.0.5",
-        "minipass-pipeline": "^1.2.4",
-        "negotiator": "^1.0.0",
-        "proc-log": "^5.0.0",
-        "promise-retry": "^2.0.1",
-        "ssri": "^12.0.0"
-      },
-      "engines": {
-        "node": "^20.17.0 || >=22.9.0"
-      }
-    },
     "node_modules/npm-registry-fetch/node_modules/minizlib": {
       "version": "3.0.2",
       "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.2.tgz",
@@ -13000,16 +12937,6 @@
         "node": ">= 18"
       }
     },
-    "node_modules/npm-registry-fetch/node_modules/negotiator": {
-      "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz",
-      "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==",
-      "inBundle": true,
-      "license": "MIT",
-      "engines": {
-        "node": ">= 0.6"
-      }
-    },
     "node_modules/npm-registry-fetch/node_modules/npm-package-arg": {
       "version": "13.0.0",
       "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-13.0.0.tgz",
@@ -18412,39 +18339,6 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/tuf-js/node_modules/make-fetch-happen": {
-      "version": "15.0.1",
-      "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-15.0.1.tgz",
-      "integrity": "sha512-9GjpQcaUXO2xmre8JfALl8Oji8Jpo+SyY2HpqFFPHVczOld/I+JFRx9FkP/uedZzkJlI9uM5t/j6dGJv4BScQw==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "@npmcli/agent": "^3.0.0",
-        "cacache": "^20.0.1",
-        "http-cache-semantics": "^4.1.1",
-        "minipass": "^7.0.2",
-        "minipass-fetch": "^4.0.0",
-        "minipass-flush": "^1.0.5",
-        "minipass-pipeline": "^1.2.4",
-        "negotiator": "^1.0.0",
-        "proc-log": "^5.0.0",
-        "promise-retry": "^2.0.1",
-        "ssri": "^12.0.0"
-      },
-      "engines": {
-        "node": "^20.17.0 || >=22.9.0"
-      }
-    },
-    "node_modules/tuf-js/node_modules/negotiator": {
-      "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz",
-      "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==",
-      "inBundle": true,
-      "license": "MIT",
-      "engines": {
-        "node": ">= 0.6"
-      }
-    },
     "node_modules/tunnel": {
       "version": "0.0.6",
       "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz",
diff --git a/package.json b/package.json
index a624a0b51b64a..6f41dbbbd3b99 100644
--- a/package.json
+++ b/package.json
@@ -86,7 +86,7 @@
     "libnpmsearch": "^9.0.0",
     "libnpmteam": "^8.0.1",
     "libnpmversion": "^8.0.1",
-    "make-fetch-happen": "^14.0.3",
+    "make-fetch-happen": "^15.0.2",
     "minimatch": "^9.0.5",
     "minipass": "^7.1.1",
     "minipass-pipeline": "^1.2.4",

From 633c4ed76ea13b8dfb5837a397e984e44cccb820 Mon Sep 17 00:00:00 2001
From: Gar 
Date: Wed, 17 Sep 2025 10:49:40 -0700
Subject: [PATCH 17/63] deps: hosted-git-info@9.0.0

---
 node_modules/.gitignore                       |   16 +-
 .../node_modules/lru-cache/LICENSE            |    0
 .../lru-cache/dist/commonjs/index.js          |    0
 .../lru-cache/dist/commonjs/index.min.js      |    0
 .../lru-cache/dist/commonjs/package.json      |    0
 .../node_modules/lru-cache/dist/esm/index.js  |    0
 .../lru-cache/dist/esm/index.min.js           |    0
 .../lru-cache/dist/esm/package.json           |    0
 .../node_modules/lru-cache/package.json       |    0
 node_modules/hosted-git-info/package.json     |   10 +-
 .../node_modules/hosted-git-info/LICENSE      |    0
 .../hosted-git-info/lib/from-url.js           |    0
 .../node_modules/hosted-git-info/lib/hosts.js |    0
 .../node_modules/hosted-git-info/lib/index.js |    0
 .../hosted-git-info/lib/parse-url.js          |    0
 .../node_modules/hosted-git-info/package.json |   10 +-
 .../node_modules/hosted-git-info/LICENSE      |    0
 .../hosted-git-info/lib/from-url.js           |    0
 .../node_modules/hosted-git-info/lib/hosts.js |    0
 .../node_modules/hosted-git-info/lib/index.js |    0
 .../hosted-git-info/lib/parse-url.js          |    0
 .../node_modules/hosted-git-info/package.json |   10 +-
 .../node_modules/hosted-git-info/LICENSE      |   13 -
 .../hosted-git-info/lib/from-url.js           |  122 --
 .../node_modules/hosted-git-info/lib/hosts.js |  231 ---
 .../node_modules/hosted-git-info/lib/index.js |  227 ---
 .../hosted-git-info/lib/parse-url.js          |   78 -
 .../node_modules/hosted-git-info/package.json |   61 -
 .../node_modules/lru-cache/LICENSE            |   15 -
 .../lru-cache/dist/commonjs/index.js          | 1564 -----------------
 .../lru-cache/dist/commonjs/index.min.js      |    2 -
 .../lru-cache/dist/commonjs/package.json      |    3 -
 .../node_modules/lru-cache/dist/esm/index.js  | 1560 ----------------
 .../lru-cache/dist/esm/index.min.js           |    2 -
 .../lru-cache/dist/esm/package.json           |    3 -
 .../node_modules/lru-cache/package.json       |  113 --
 .../node_modules/hosted-git-info/LICENSE      |   13 -
 .../hosted-git-info/lib/from-url.js           |  122 --
 .../node_modules/hosted-git-info/lib/hosts.js |  231 ---
 .../node_modules/hosted-git-info/lib/index.js |  227 ---
 .../hosted-git-info/lib/parse-url.js          |   78 -
 .../node_modules/hosted-git-info/package.json |   61 -
 .../pacote/node_modules/lru-cache/LICENSE     |   15 -
 .../lru-cache/dist/commonjs/index.js          | 1564 -----------------
 .../lru-cache/dist/commonjs/index.min.js      |    2 -
 .../lru-cache/dist/commonjs/package.json      |    3 -
 .../node_modules/lru-cache/dist/esm/index.js  | 1560 ----------------
 .../lru-cache/dist/esm/index.min.js           |    2 -
 .../lru-cache/dist/esm/package.json           |    3 -
 .../node_modules/lru-cache/package.json       |  113 --
 package-lock.json                             |  218 +--
 package.json                                  |    2 +-
 workspaces/arborist/package.json              |    2 +-
 53 files changed, 138 insertions(+), 8118 deletions(-)
 rename node_modules/{init-package-json => hosted-git-info}/node_modules/lru-cache/LICENSE (100%)
 rename node_modules/{init-package-json => hosted-git-info}/node_modules/lru-cache/dist/commonjs/index.js (100%)
 rename node_modules/{init-package-json => hosted-git-info}/node_modules/lru-cache/dist/commonjs/index.min.js (100%)
 rename node_modules/{init-package-json => hosted-git-info}/node_modules/lru-cache/dist/commonjs/package.json (100%)
 rename node_modules/{init-package-json => hosted-git-info}/node_modules/lru-cache/dist/esm/index.js (100%)
 rename node_modules/{init-package-json => hosted-git-info}/node_modules/lru-cache/dist/esm/index.min.js (100%)
 rename node_modules/{init-package-json => hosted-git-info}/node_modules/lru-cache/dist/esm/package.json (100%)
 rename node_modules/{init-package-json => hosted-git-info}/node_modules/lru-cache/package.json (100%)
 rename node_modules/{@npmcli/package-json => normalize-package-data}/node_modules/hosted-git-info/LICENSE (100%)
 rename node_modules/{@npmcli/package-json => normalize-package-data}/node_modules/hosted-git-info/lib/from-url.js (100%)
 rename node_modules/{@npmcli/package-json => normalize-package-data}/node_modules/hosted-git-info/lib/hosts.js (100%)
 rename node_modules/{@npmcli/package-json => normalize-package-data}/node_modules/hosted-git-info/lib/index.js (100%)
 rename node_modules/{@npmcli/package-json => normalize-package-data}/node_modules/hosted-git-info/lib/parse-url.js (100%)
 rename node_modules/{@npmcli/package-json => normalize-package-data}/node_modules/hosted-git-info/package.json (90%)
 rename node_modules/{init-package-json => npm-package-arg}/node_modules/hosted-git-info/LICENSE (100%)
 rename node_modules/{init-package-json => npm-package-arg}/node_modules/hosted-git-info/lib/from-url.js (100%)
 rename node_modules/{init-package-json => npm-package-arg}/node_modules/hosted-git-info/lib/hosts.js (100%)
 rename node_modules/{init-package-json => npm-package-arg}/node_modules/hosted-git-info/lib/index.js (100%)
 rename node_modules/{init-package-json => npm-package-arg}/node_modules/hosted-git-info/lib/parse-url.js (100%)
 rename node_modules/{init-package-json => npm-package-arg}/node_modules/hosted-git-info/package.json (90%)
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/hosted-git-info/LICENSE
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/from-url.js
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/hosts.js
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/index.js
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/parse-url.js
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/hosted-git-info/package.json
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/lru-cache/LICENSE
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/lru-cache/dist/commonjs/index.js
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/lru-cache/dist/commonjs/index.min.js
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/lru-cache/dist/commonjs/package.json
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/lru-cache/dist/esm/index.js
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/lru-cache/dist/esm/index.min.js
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/lru-cache/dist/esm/package.json
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/lru-cache/package.json
 delete mode 100644 node_modules/pacote/node_modules/hosted-git-info/LICENSE
 delete mode 100644 node_modules/pacote/node_modules/hosted-git-info/lib/from-url.js
 delete mode 100644 node_modules/pacote/node_modules/hosted-git-info/lib/hosts.js
 delete mode 100644 node_modules/pacote/node_modules/hosted-git-info/lib/index.js
 delete mode 100644 node_modules/pacote/node_modules/hosted-git-info/lib/parse-url.js
 delete mode 100644 node_modules/pacote/node_modules/hosted-git-info/package.json
 delete mode 100644 node_modules/pacote/node_modules/lru-cache/LICENSE
 delete mode 100644 node_modules/pacote/node_modules/lru-cache/dist/commonjs/index.js
 delete mode 100644 node_modules/pacote/node_modules/lru-cache/dist/commonjs/index.min.js
 delete mode 100644 node_modules/pacote/node_modules/lru-cache/dist/commonjs/package.json
 delete mode 100644 node_modules/pacote/node_modules/lru-cache/dist/esm/index.js
 delete mode 100644 node_modules/pacote/node_modules/lru-cache/dist/esm/index.min.js
 delete mode 100644 node_modules/pacote/node_modules/lru-cache/dist/esm/package.json
 delete mode 100644 node_modules/pacote/node_modules/lru-cache/package.json

diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 8898459263936..a525ff73d66e0 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -40,7 +40,6 @@
 /@npmcli/package-json/node_modules/@npmcli/*
 !/@npmcli/package-json/node_modules/@npmcli/git
 !/@npmcli/package-json/node_modules/glob
-!/@npmcli/package-json/node_modules/hosted-git-info
 !/@npmcli/package-json/node_modules/jackspeak
 !/@npmcli/package-json/node_modules/lru-cache
 !/@npmcli/package-json/node_modules/minimatch
@@ -111,6 +110,9 @@
 !/glob
 !/graceful-fs
 !/hosted-git-info
+!/hosted-git-info/node_modules/
+/hosted-git-info/node_modules/*
+!/hosted-git-info/node_modules/lru-cache
 !/http-cache-semantics
 !/http-proxy-agent
 !/https-proxy-agent
@@ -120,8 +122,6 @@
 !/init-package-json
 !/init-package-json/node_modules/
 /init-package-json/node_modules/*
-!/init-package-json/node_modules/hosted-git-info
-!/init-package-json/node_modules/lru-cache
 !/init-package-json/node_modules/npm-package-arg
 !/ip-address
 !/ip-regex
@@ -183,11 +183,17 @@
 !/node-gyp/node_modules/yallist
 !/nopt
 !/normalize-package-data
+!/normalize-package-data/node_modules/
+/normalize-package-data/node_modules/*
+!/normalize-package-data/node_modules/hosted-git-info
 !/npm-audit-report
 !/npm-bundled
 !/npm-install-checks
 !/npm-normalize-package-bin
 !/npm-package-arg
+!/npm-package-arg/node_modules/
+/npm-package-arg/node_modules/*
+!/npm-package-arg/node_modules/hosted-git-info
 !/npm-packlist
 !/npm-packlist/node_modules/
 /npm-packlist/node_modules/*
@@ -198,8 +204,6 @@
 !/npm-registry-fetch
 !/npm-registry-fetch/node_modules/
 /npm-registry-fetch/node_modules/*
-!/npm-registry-fetch/node_modules/hosted-git-info
-!/npm-registry-fetch/node_modules/lru-cache
 !/npm-registry-fetch/node_modules/minizlib
 !/npm-registry-fetch/node_modules/npm-package-arg
 !/npm-user-validate
@@ -212,8 +216,6 @@
 /pacote/node_modules/@npmcli/*
 !/pacote/node_modules/@npmcli/git
 !/pacote/node_modules/chownr
-!/pacote/node_modules/hosted-git-info
-!/pacote/node_modules/lru-cache
 !/pacote/node_modules/minizlib
 !/pacote/node_modules/mkdirp
 !/pacote/node_modules/npm-package-arg
diff --git a/node_modules/init-package-json/node_modules/lru-cache/LICENSE b/node_modules/hosted-git-info/node_modules/lru-cache/LICENSE
similarity index 100%
rename from node_modules/init-package-json/node_modules/lru-cache/LICENSE
rename to node_modules/hosted-git-info/node_modules/lru-cache/LICENSE
diff --git a/node_modules/init-package-json/node_modules/lru-cache/dist/commonjs/index.js b/node_modules/hosted-git-info/node_modules/lru-cache/dist/commonjs/index.js
similarity index 100%
rename from node_modules/init-package-json/node_modules/lru-cache/dist/commonjs/index.js
rename to node_modules/hosted-git-info/node_modules/lru-cache/dist/commonjs/index.js
diff --git a/node_modules/init-package-json/node_modules/lru-cache/dist/commonjs/index.min.js b/node_modules/hosted-git-info/node_modules/lru-cache/dist/commonjs/index.min.js
similarity index 100%
rename from node_modules/init-package-json/node_modules/lru-cache/dist/commonjs/index.min.js
rename to node_modules/hosted-git-info/node_modules/lru-cache/dist/commonjs/index.min.js
diff --git a/node_modules/init-package-json/node_modules/lru-cache/dist/commonjs/package.json b/node_modules/hosted-git-info/node_modules/lru-cache/dist/commonjs/package.json
similarity index 100%
rename from node_modules/init-package-json/node_modules/lru-cache/dist/commonjs/package.json
rename to node_modules/hosted-git-info/node_modules/lru-cache/dist/commonjs/package.json
diff --git a/node_modules/init-package-json/node_modules/lru-cache/dist/esm/index.js b/node_modules/hosted-git-info/node_modules/lru-cache/dist/esm/index.js
similarity index 100%
rename from node_modules/init-package-json/node_modules/lru-cache/dist/esm/index.js
rename to node_modules/hosted-git-info/node_modules/lru-cache/dist/esm/index.js
diff --git a/node_modules/init-package-json/node_modules/lru-cache/dist/esm/index.min.js b/node_modules/hosted-git-info/node_modules/lru-cache/dist/esm/index.min.js
similarity index 100%
rename from node_modules/init-package-json/node_modules/lru-cache/dist/esm/index.min.js
rename to node_modules/hosted-git-info/node_modules/lru-cache/dist/esm/index.min.js
diff --git a/node_modules/init-package-json/node_modules/lru-cache/dist/esm/package.json b/node_modules/hosted-git-info/node_modules/lru-cache/dist/esm/package.json
similarity index 100%
rename from node_modules/init-package-json/node_modules/lru-cache/dist/esm/package.json
rename to node_modules/hosted-git-info/node_modules/lru-cache/dist/esm/package.json
diff --git a/node_modules/init-package-json/node_modules/lru-cache/package.json b/node_modules/hosted-git-info/node_modules/lru-cache/package.json
similarity index 100%
rename from node_modules/init-package-json/node_modules/lru-cache/package.json
rename to node_modules/hosted-git-info/node_modules/lru-cache/package.json
diff --git a/node_modules/hosted-git-info/package.json b/node_modules/hosted-git-info/package.json
index a9bb26be4a704..5883a7d308d79 100644
--- a/node_modules/hosted-git-info/package.json
+++ b/node_modules/hosted-git-info/package.json
@@ -1,6 +1,6 @@
 {
   "name": "hosted-git-info",
-  "version": "8.1.0",
+  "version": "9.0.0",
   "description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab",
   "main": "./lib/index.js",
   "repository": {
@@ -31,11 +31,11 @@
     "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
   },
   "dependencies": {
-    "lru-cache": "^10.0.1"
+    "lru-cache": "^11.1.0"
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.24.3",
+    "@npmcli/template-oss": "4.25.0",
     "tap": "^16.0.1"
   },
   "files": [
@@ -43,7 +43,7 @@
     "lib/"
   ],
   "engines": {
-    "node": "^18.17.0 || >=20.5.0"
+    "node": "^20.17.0 || >=22.9.0"
   },
   "tap": {
     "color": 1,
@@ -55,7 +55,7 @@
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.24.3",
+    "version": "4.25.0",
     "publish": "true"
   }
 }
diff --git a/node_modules/@npmcli/package-json/node_modules/hosted-git-info/LICENSE b/node_modules/normalize-package-data/node_modules/hosted-git-info/LICENSE
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/hosted-git-info/LICENSE
rename to node_modules/normalize-package-data/node_modules/hosted-git-info/LICENSE
diff --git a/node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/from-url.js b/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/from-url.js
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/from-url.js
rename to node_modules/normalize-package-data/node_modules/hosted-git-info/lib/from-url.js
diff --git a/node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/hosts.js b/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/hosts.js
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/hosts.js
rename to node_modules/normalize-package-data/node_modules/hosted-git-info/lib/hosts.js
diff --git a/node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/index.js b/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/index.js
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/index.js
rename to node_modules/normalize-package-data/node_modules/hosted-git-info/lib/index.js
diff --git a/node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/parse-url.js b/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/parse-url.js
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/hosted-git-info/lib/parse-url.js
rename to node_modules/normalize-package-data/node_modules/hosted-git-info/lib/parse-url.js
diff --git a/node_modules/@npmcli/package-json/node_modules/hosted-git-info/package.json b/node_modules/normalize-package-data/node_modules/hosted-git-info/package.json
similarity index 90%
rename from node_modules/@npmcli/package-json/node_modules/hosted-git-info/package.json
rename to node_modules/normalize-package-data/node_modules/hosted-git-info/package.json
index 5883a7d308d79..a9bb26be4a704 100644
--- a/node_modules/@npmcli/package-json/node_modules/hosted-git-info/package.json
+++ b/node_modules/normalize-package-data/node_modules/hosted-git-info/package.json
@@ -1,6 +1,6 @@
 {
   "name": "hosted-git-info",
-  "version": "9.0.0",
+  "version": "8.1.0",
   "description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab",
   "main": "./lib/index.js",
   "repository": {
@@ -31,11 +31,11 @@
     "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
   },
   "dependencies": {
-    "lru-cache": "^11.1.0"
+    "lru-cache": "^10.0.1"
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.25.0",
+    "@npmcli/template-oss": "4.24.3",
     "tap": "^16.0.1"
   },
   "files": [
@@ -43,7 +43,7 @@
     "lib/"
   ],
   "engines": {
-    "node": "^20.17.0 || >=22.9.0"
+    "node": "^18.17.0 || >=20.5.0"
   },
   "tap": {
     "color": 1,
@@ -55,7 +55,7 @@
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.25.0",
+    "version": "4.24.3",
     "publish": "true"
   }
 }
diff --git a/node_modules/init-package-json/node_modules/hosted-git-info/LICENSE b/node_modules/npm-package-arg/node_modules/hosted-git-info/LICENSE
similarity index 100%
rename from node_modules/init-package-json/node_modules/hosted-git-info/LICENSE
rename to node_modules/npm-package-arg/node_modules/hosted-git-info/LICENSE
diff --git a/node_modules/init-package-json/node_modules/hosted-git-info/lib/from-url.js b/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/from-url.js
similarity index 100%
rename from node_modules/init-package-json/node_modules/hosted-git-info/lib/from-url.js
rename to node_modules/npm-package-arg/node_modules/hosted-git-info/lib/from-url.js
diff --git a/node_modules/init-package-json/node_modules/hosted-git-info/lib/hosts.js b/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/hosts.js
similarity index 100%
rename from node_modules/init-package-json/node_modules/hosted-git-info/lib/hosts.js
rename to node_modules/npm-package-arg/node_modules/hosted-git-info/lib/hosts.js
diff --git a/node_modules/init-package-json/node_modules/hosted-git-info/lib/index.js b/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/index.js
similarity index 100%
rename from node_modules/init-package-json/node_modules/hosted-git-info/lib/index.js
rename to node_modules/npm-package-arg/node_modules/hosted-git-info/lib/index.js
diff --git a/node_modules/init-package-json/node_modules/hosted-git-info/lib/parse-url.js b/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/parse-url.js
similarity index 100%
rename from node_modules/init-package-json/node_modules/hosted-git-info/lib/parse-url.js
rename to node_modules/npm-package-arg/node_modules/hosted-git-info/lib/parse-url.js
diff --git a/node_modules/init-package-json/node_modules/hosted-git-info/package.json b/node_modules/npm-package-arg/node_modules/hosted-git-info/package.json
similarity index 90%
rename from node_modules/init-package-json/node_modules/hosted-git-info/package.json
rename to node_modules/npm-package-arg/node_modules/hosted-git-info/package.json
index 5883a7d308d79..a9bb26be4a704 100644
--- a/node_modules/init-package-json/node_modules/hosted-git-info/package.json
+++ b/node_modules/npm-package-arg/node_modules/hosted-git-info/package.json
@@ -1,6 +1,6 @@
 {
   "name": "hosted-git-info",
-  "version": "9.0.0",
+  "version": "8.1.0",
   "description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab",
   "main": "./lib/index.js",
   "repository": {
@@ -31,11 +31,11 @@
     "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
   },
   "dependencies": {
-    "lru-cache": "^11.1.0"
+    "lru-cache": "^10.0.1"
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.25.0",
+    "@npmcli/template-oss": "4.24.3",
     "tap": "^16.0.1"
   },
   "files": [
@@ -43,7 +43,7 @@
     "lib/"
   ],
   "engines": {
-    "node": "^20.17.0 || >=22.9.0"
+    "node": "^18.17.0 || >=20.5.0"
   },
   "tap": {
     "color": 1,
@@ -55,7 +55,7 @@
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.25.0",
+    "version": "4.24.3",
     "publish": "true"
   }
 }
diff --git a/node_modules/npm-registry-fetch/node_modules/hosted-git-info/LICENSE b/node_modules/npm-registry-fetch/node_modules/hosted-git-info/LICENSE
deleted file mode 100644
index 45055763dc838..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/hosted-git-info/LICENSE
+++ /dev/null
@@ -1,13 +0,0 @@
-Copyright (c) 2015, Rebecca Turner
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
-REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
-FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
-INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
-LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
-OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
-PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/from-url.js b/node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/from-url.js
deleted file mode 100644
index efc1247d59d12..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/from-url.js
+++ /dev/null
@@ -1,122 +0,0 @@
-'use strict'
-
-const parseUrl = require('./parse-url')
-
-// look for github shorthand inputs, such as npm/cli
-const isGitHubShorthand = (arg) => {
-  // it cannot contain whitespace before the first #
-  // it cannot start with a / because that's probably an absolute file path
-  // but it must include a slash since repos are username/repository
-  // it cannot start with a . because that's probably a relative file path
-  // it cannot start with an @ because that's a scoped package if it passes the other tests
-  // it cannot contain a : before a # because that tells us that there's a protocol
-  // a second / may not exist before a #
-  const firstHash = arg.indexOf('#')
-  const firstSlash = arg.indexOf('/')
-  const secondSlash = arg.indexOf('/', firstSlash + 1)
-  const firstColon = arg.indexOf(':')
-  const firstSpace = /\s/.exec(arg)
-  const firstAt = arg.indexOf('@')
-
-  const spaceOnlyAfterHash = !firstSpace || (firstHash > -1 && firstSpace.index > firstHash)
-  const atOnlyAfterHash = firstAt === -1 || (firstHash > -1 && firstAt > firstHash)
-  const colonOnlyAfterHash = firstColon === -1 || (firstHash > -1 && firstColon > firstHash)
-  const secondSlashOnlyAfterHash = secondSlash === -1 || (firstHash > -1 && secondSlash > firstHash)
-  const hasSlash = firstSlash > 0
-  // if a # is found, what we really want to know is that the character
-  // immediately before # is not a /
-  const doesNotEndWithSlash = firstHash > -1 ? arg[firstHash - 1] !== '/' : !arg.endsWith('/')
-  const doesNotStartWithDot = !arg.startsWith('.')
-
-  return spaceOnlyAfterHash && hasSlash && doesNotEndWithSlash &&
-    doesNotStartWithDot && atOnlyAfterHash && colonOnlyAfterHash &&
-    secondSlashOnlyAfterHash
-}
-
-module.exports = (giturl, opts, { gitHosts, protocols }) => {
-  if (!giturl) {
-    return
-  }
-
-  const correctedUrl = isGitHubShorthand(giturl) ? `github:${giturl}` : giturl
-  const parsed = parseUrl(correctedUrl, protocols)
-  if (!parsed) {
-    return
-  }
-
-  const gitHostShortcut = gitHosts.byShortcut[parsed.protocol]
-  const gitHostDomain = gitHosts.byDomain[parsed.hostname.startsWith('www.')
-    ? parsed.hostname.slice(4)
-    : parsed.hostname]
-  const gitHostName = gitHostShortcut || gitHostDomain
-  if (!gitHostName) {
-    return
-  }
-
-  const gitHostInfo = gitHosts[gitHostShortcut || gitHostDomain]
-  let auth = null
-  if (protocols[parsed.protocol]?.auth && (parsed.username || parsed.password)) {
-    auth = `${parsed.username}${parsed.password ? ':' + parsed.password : ''}`
-  }
-
-  let committish = null
-  let user = null
-  let project = null
-  let defaultRepresentation = null
-
-  try {
-    if (gitHostShortcut) {
-      let pathname = parsed.pathname.startsWith('/') ? parsed.pathname.slice(1) : parsed.pathname
-      const firstAt = pathname.indexOf('@')
-      // we ignore auth for shortcuts, so just trim it out
-      if (firstAt > -1) {
-        pathname = pathname.slice(firstAt + 1)
-      }
-
-      const lastSlash = pathname.lastIndexOf('/')
-      if (lastSlash > -1) {
-        user = decodeURIComponent(pathname.slice(0, lastSlash))
-        // we want nulls only, never empty strings
-        if (!user) {
-          user = null
-        }
-        project = decodeURIComponent(pathname.slice(lastSlash + 1))
-      } else {
-        project = decodeURIComponent(pathname)
-      }
-
-      if (project.endsWith('.git')) {
-        project = project.slice(0, -4)
-      }
-
-      if (parsed.hash) {
-        committish = decodeURIComponent(parsed.hash.slice(1))
-      }
-
-      defaultRepresentation = 'shortcut'
-    } else {
-      if (!gitHostInfo.protocols.includes(parsed.protocol)) {
-        return
-      }
-
-      const segments = gitHostInfo.extract(parsed)
-      if (!segments) {
-        return
-      }
-
-      user = segments.user && decodeURIComponent(segments.user)
-      project = decodeURIComponent(segments.project)
-      committish = decodeURIComponent(segments.committish)
-      defaultRepresentation = protocols[parsed.protocol]?.name || parsed.protocol.slice(0, -1)
-    }
-  } catch (err) {
-    /* istanbul ignore else */
-    if (err instanceof URIError) {
-      return
-    } else {
-      throw err
-    }
-  }
-
-  return [gitHostName, user, auth, project, committish, defaultRepresentation, opts]
-}
diff --git a/node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/hosts.js b/node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/hosts.js
deleted file mode 100644
index 2a88e95927772..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/hosts.js
+++ /dev/null
@@ -1,231 +0,0 @@
-/* eslint-disable max-len */
-
-'use strict'
-
-const maybeJoin = (...args) => args.every(arg => arg) ? args.join('') : ''
-const maybeEncode = (arg) => arg ? encodeURIComponent(arg) : ''
-const formatHashFragment = (f) => f.toLowerCase()
-  .replace(/^\W+/g, '') // strip leading non-characters
-  .replace(/(?
-    `git@${domain}:${user}/${project}.git${maybeJoin('#', committish)}`,
-  sshurltemplate: ({ domain, user, project, committish }) =>
-    `git+ssh://git@${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  edittemplate: ({ domain, user, project, committish, editpath, path }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', editpath, '/', maybeEncode(committish || 'HEAD'), '/', path)}`,
-  browsetemplate: ({ domain, user, project, committish, treepath }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}`,
-  browsetreetemplate: ({ domain, user, project, committish, treepath, path, fragment, hashformat }) =>
-    `https://${domain}/${user}/${project}/${treepath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`,
-  browseblobtemplate: ({ domain, user, project, committish, blobpath, path, fragment, hashformat }) =>
-    `https://${domain}/${user}/${project}/${blobpath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`,
-  docstemplate: ({ domain, user, project, treepath, committish }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}#readme`,
-  httpstemplate: ({ auth, domain, user, project, committish }) =>
-    `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  filetemplate: ({ domain, user, project, committish, path }) =>
-    `https://${domain}/${user}/${project}/raw/${maybeEncode(committish || 'HEAD')}/${path}`,
-  shortcuttemplate: ({ type, user, project, committish }) =>
-    `${type}:${user}/${project}${maybeJoin('#', committish)}`,
-  pathtemplate: ({ user, project, committish }) =>
-    `${user}/${project}${maybeJoin('#', committish)}`,
-  bugstemplate: ({ domain, user, project }) =>
-    `https://${domain}/${user}/${project}/issues`,
-  hashformat: formatHashFragment,
-}
-
-const hosts = {}
-hosts.github = {
-  // First two are insecure and generally shouldn't be used any more, but
-  // they are still supported.
-  protocols: ['git:', 'http:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'],
-  domain: 'github.com',
-  treepath: 'tree',
-  blobpath: 'blob',
-  editpath: 'edit',
-  filetemplate: ({ auth, user, project, committish, path }) =>
-    `https://${maybeJoin(auth, '@')}raw.githubusercontent.com/${user}/${project}/${maybeEncode(committish || 'HEAD')}/${path}`,
-  gittemplate: ({ auth, domain, user, project, committish }) =>
-    `git://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  tarballtemplate: ({ domain, user, project, committish }) =>
-    `https://codeload.${domain}/${user}/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`,
-  extract: (url) => {
-    let [, user, project, type, committish] = url.pathname.split('/', 5)
-    if (type && type !== 'tree') {
-      return
-    }
-
-    if (!type) {
-      committish = url.hash.slice(1)
-    }
-
-    if (project && project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    if (!user || !project) {
-      return
-    }
-
-    return { user, project, committish }
-  },
-}
-
-hosts.bitbucket = {
-  protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'],
-  domain: 'bitbucket.org',
-  treepath: 'src',
-  blobpath: 'src',
-  editpath: '?mode=edit',
-  edittemplate: ({ domain, user, project, committish, treepath, path, editpath }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish || 'HEAD'), '/', path, editpath)}`,
-  tarballtemplate: ({ domain, user, project, committish }) =>
-    `https://${domain}/${user}/${project}/get/${maybeEncode(committish || 'HEAD')}.tar.gz`,
-  extract: (url) => {
-    let [, user, project, aux] = url.pathname.split('/', 4)
-    if (['get'].includes(aux)) {
-      return
-    }
-
-    if (project && project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    if (!user || !project) {
-      return
-    }
-
-    return { user, project, committish: url.hash.slice(1) }
-  },
-}
-
-hosts.gitlab = {
-  protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'],
-  domain: 'gitlab.com',
-  treepath: 'tree',
-  blobpath: 'tree',
-  editpath: '-/edit',
-  httpstemplate: ({ auth, domain, user, project, committish }) =>
-    `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  tarballtemplate: ({ domain, user, project, committish }) =>
-    `https://${domain}/${user}/${project}/repository/archive.tar.gz?ref=${maybeEncode(committish || 'HEAD')}`,
-  extract: (url) => {
-    const path = url.pathname.slice(1)
-    if (path.includes('/-/') || path.includes('/archive.tar.gz')) {
-      return
-    }
-
-    const segments = path.split('/')
-    let project = segments.pop()
-    if (project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    const user = segments.join('/')
-    if (!user || !project) {
-      return
-    }
-
-    return { user, project, committish: url.hash.slice(1) }
-  },
-}
-
-hosts.gist = {
-  protocols: ['git:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'],
-  domain: 'gist.github.com',
-  editpath: 'edit',
-  sshtemplate: ({ domain, project, committish }) =>
-    `git@${domain}:${project}.git${maybeJoin('#', committish)}`,
-  sshurltemplate: ({ domain, project, committish }) =>
-    `git+ssh://git@${domain}/${project}.git${maybeJoin('#', committish)}`,
-  edittemplate: ({ domain, user, project, committish, editpath }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', maybeEncode(committish))}/${editpath}`,
-  browsetemplate: ({ domain, project, committish }) =>
-    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`,
-  browsetreetemplate: ({ domain, project, committish, path, hashformat }) =>
-    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`,
-  browseblobtemplate: ({ domain, project, committish, path, hashformat }) =>
-    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`,
-  docstemplate: ({ domain, project, committish }) =>
-    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`,
-  httpstemplate: ({ domain, project, committish }) =>
-    `git+https://${domain}/${project}.git${maybeJoin('#', committish)}`,
-  filetemplate: ({ user, project, committish, path }) =>
-    `https://gist.githubusercontent.com/${user}/${project}/raw${maybeJoin('/', maybeEncode(committish))}/${path}`,
-  shortcuttemplate: ({ type, project, committish }) =>
-    `${type}:${project}${maybeJoin('#', committish)}`,
-  pathtemplate: ({ project, committish }) =>
-    `${project}${maybeJoin('#', committish)}`,
-  bugstemplate: ({ domain, project }) =>
-    `https://${domain}/${project}`,
-  gittemplate: ({ domain, project, committish }) =>
-    `git://${domain}/${project}.git${maybeJoin('#', committish)}`,
-  tarballtemplate: ({ project, committish }) =>
-    `https://codeload.github.com/gist/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`,
-  extract: (url) => {
-    let [, user, project, aux] = url.pathname.split('/', 4)
-    if (aux === 'raw') {
-      return
-    }
-
-    if (!project) {
-      if (!user) {
-        return
-      }
-
-      project = user
-      user = null
-    }
-
-    if (project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    return { user, project, committish: url.hash.slice(1) }
-  },
-  hashformat: function (fragment) {
-    return fragment && 'file-' + formatHashFragment(fragment)
-  },
-}
-
-hosts.sourcehut = {
-  protocols: ['git+ssh:', 'https:'],
-  domain: 'git.sr.ht',
-  treepath: 'tree',
-  blobpath: 'tree',
-  filetemplate: ({ domain, user, project, committish, path }) =>
-    `https://${domain}/${user}/${project}/blob/${maybeEncode(committish) || 'HEAD'}/${path}`,
-  httpstemplate: ({ domain, user, project, committish }) =>
-    `https://${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  tarballtemplate: ({ domain, user, project, committish }) =>
-    `https://${domain}/${user}/${project}/archive/${maybeEncode(committish) || 'HEAD'}.tar.gz`,
-  bugstemplate: () => null,
-  extract: (url) => {
-    let [, user, project, aux] = url.pathname.split('/', 4)
-
-    // tarball url
-    if (['archive'].includes(aux)) {
-      return
-    }
-
-    if (project && project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    if (!user || !project) {
-      return
-    }
-
-    return { user, project, committish: url.hash.slice(1) }
-  },
-}
-
-for (const [name, host] of Object.entries(hosts)) {
-  hosts[name] = Object.assign({}, defaults, host)
-}
-
-module.exports = hosts
diff --git a/node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/index.js b/node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/index.js
deleted file mode 100644
index 2a7100dcee6e7..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/index.js
+++ /dev/null
@@ -1,227 +0,0 @@
-'use strict'
-
-const { LRUCache } = require('lru-cache')
-const hosts = require('./hosts.js')
-const fromUrl = require('./from-url.js')
-const parseUrl = require('./parse-url.js')
-
-const cache = new LRUCache({ max: 1000 })
-
-function unknownHostedUrl (url) {
-  try {
-    const {
-      protocol,
-      hostname,
-      pathname,
-    } = new URL(url)
-
-    if (!hostname) {
-      return null
-    }
-
-    const proto = /(?:git\+)http:$/.test(protocol) ? 'http:' : 'https:'
-    const path = pathname.replace(/\.git$/, '')
-    return `${proto}//${hostname}${path}`
-  } catch {
-    return null
-  }
-}
-
-class GitHost {
-  constructor (type, user, auth, project, committish, defaultRepresentation, opts = {}) {
-    Object.assign(this, GitHost.#gitHosts[type], {
-      type,
-      user,
-      auth,
-      project,
-      committish,
-      default: defaultRepresentation,
-      opts,
-    })
-  }
-
-  static #gitHosts = { byShortcut: {}, byDomain: {} }
-  static #protocols = {
-    'git+ssh:': { name: 'sshurl' },
-    'ssh:': { name: 'sshurl' },
-    'git+https:': { name: 'https', auth: true },
-    'git:': { auth: true },
-    'http:': { auth: true },
-    'https:': { auth: true },
-    'git+http:': { auth: true },
-  }
-
-  static addHost (name, host) {
-    GitHost.#gitHosts[name] = host
-    GitHost.#gitHosts.byDomain[host.domain] = name
-    GitHost.#gitHosts.byShortcut[`${name}:`] = name
-    GitHost.#protocols[`${name}:`] = { name }
-  }
-
-  static fromUrl (giturl, opts) {
-    if (typeof giturl !== 'string') {
-      return
-    }
-
-    const key = giturl + JSON.stringify(opts || {})
-
-    if (!cache.has(key)) {
-      const hostArgs = fromUrl(giturl, opts, {
-        gitHosts: GitHost.#gitHosts,
-        protocols: GitHost.#protocols,
-      })
-      cache.set(key, hostArgs ? new GitHost(...hostArgs) : undefined)
-    }
-
-    return cache.get(key)
-  }
-
-  static fromManifest (manifest, opts = {}) {
-    if (!manifest || typeof manifest !== 'object') {
-      return
-    }
-
-    const r = manifest.repository
-    // TODO: look into also checking the `bugs`/`homepage` URLs
-
-    const rurl = r && (
-      typeof r === 'string'
-        ? r
-        : typeof r === 'object' && typeof r.url === 'string'
-          ? r.url
-          : null
-    )
-
-    if (!rurl) {
-      throw new Error('no repository')
-    }
-
-    const info = (rurl && GitHost.fromUrl(rurl.replace(/^git\+/, ''), opts)) || null
-    if (info) {
-      return info
-    }
-    const unk = unknownHostedUrl(rurl)
-    return GitHost.fromUrl(unk, opts) || unk
-  }
-
-  static parseUrl (url) {
-    return parseUrl(url)
-  }
-
-  #fill (template, opts) {
-    if (typeof template !== 'function') {
-      return null
-    }
-
-    const options = { ...this, ...this.opts, ...opts }
-
-    // the path should always be set so we don't end up with 'undefined' in urls
-    if (!options.path) {
-      options.path = ''
-    }
-
-    // template functions will insert the leading slash themselves
-    if (options.path.startsWith('/')) {
-      options.path = options.path.slice(1)
-    }
-
-    if (options.noCommittish) {
-      options.committish = null
-    }
-
-    const result = template(options)
-    return options.noGitPlus && result.startsWith('git+') ? result.slice(4) : result
-  }
-
-  hash () {
-    return this.committish ? `#${this.committish}` : ''
-  }
-
-  ssh (opts) {
-    return this.#fill(this.sshtemplate, opts)
-  }
-
-  sshurl (opts) {
-    return this.#fill(this.sshurltemplate, opts)
-  }
-
-  browse (path, ...args) {
-    // not a string, treat path as opts
-    if (typeof path !== 'string') {
-      return this.#fill(this.browsetemplate, path)
-    }
-
-    if (typeof args[0] !== 'string') {
-      return this.#fill(this.browsetreetemplate, { ...args[0], path })
-    }
-
-    return this.#fill(this.browsetreetemplate, { ...args[1], fragment: args[0], path })
-  }
-
-  // If the path is known to be a file, then browseFile should be used. For some hosts
-  // the url is the same as browse, but for others like GitHub a file can use both `/tree/`
-  // and `/blob/` in the path. When using a default committish of `HEAD` then the `/tree/`
-  // path will redirect to a specific commit. Using the `/blob/` path avoids this and
-  // does not redirect to a different commit.
-  browseFile (path, ...args) {
-    if (typeof args[0] !== 'string') {
-      return this.#fill(this.browseblobtemplate, { ...args[0], path })
-    }
-
-    return this.#fill(this.browseblobtemplate, { ...args[1], fragment: args[0], path })
-  }
-
-  docs (opts) {
-    return this.#fill(this.docstemplate, opts)
-  }
-
-  bugs (opts) {
-    return this.#fill(this.bugstemplate, opts)
-  }
-
-  https (opts) {
-    return this.#fill(this.httpstemplate, opts)
-  }
-
-  git (opts) {
-    return this.#fill(this.gittemplate, opts)
-  }
-
-  shortcut (opts) {
-    return this.#fill(this.shortcuttemplate, opts)
-  }
-
-  path (opts) {
-    return this.#fill(this.pathtemplate, opts)
-  }
-
-  tarball (opts) {
-    return this.#fill(this.tarballtemplate, { ...opts, noCommittish: false })
-  }
-
-  file (path, opts) {
-    return this.#fill(this.filetemplate, { ...opts, path })
-  }
-
-  edit (path, opts) {
-    return this.#fill(this.edittemplate, { ...opts, path })
-  }
-
-  getDefaultRepresentation () {
-    return this.default
-  }
-
-  toString (opts) {
-    if (this.default && typeof this[this.default] === 'function') {
-      return this[this.default](opts)
-    }
-
-    return this.sshurl(opts)
-  }
-}
-
-for (const [name, host] of Object.entries(hosts)) {
-  GitHost.addHost(name, host)
-}
-
-module.exports = GitHost
diff --git a/node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/parse-url.js b/node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/parse-url.js
deleted file mode 100644
index 7d5489c008ab4..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/hosted-git-info/lib/parse-url.js
+++ /dev/null
@@ -1,78 +0,0 @@
-const url = require('url')
-
-const lastIndexOfBefore = (str, char, beforeChar) => {
-  const startPosition = str.indexOf(beforeChar)
-  return str.lastIndexOf(char, startPosition > -1 ? startPosition : Infinity)
-}
-
-const safeUrl = (u) => {
-  try {
-    return new url.URL(u)
-  } catch {
-    // this fn should never throw
-  }
-}
-
-// accepts input like git:github.com:user/repo and inserts the // after the first :
-const correctProtocol = (arg, protocols) => {
-  const firstColon = arg.indexOf(':')
-  const proto = arg.slice(0, firstColon + 1)
-  if (Object.prototype.hasOwnProperty.call(protocols, proto)) {
-    return arg
-  }
-
-  const firstAt = arg.indexOf('@')
-  if (firstAt > -1) {
-    if (firstAt > firstColon) {
-      return `git+ssh://${arg}`
-    } else {
-      return arg
-    }
-  }
-
-  const doubleSlash = arg.indexOf('//')
-  if (doubleSlash === firstColon + 1) {
-    return arg
-  }
-
-  return `${arg.slice(0, firstColon + 1)}//${arg.slice(firstColon + 1)}`
-}
-
-// attempt to correct an scp style url so that it will parse with `new URL()`
-const correctUrl = (giturl) => {
-  // ignore @ that come after the first hash since the denotes the start
-  // of a committish which can contain @ characters
-  const firstAt = lastIndexOfBefore(giturl, '@', '#')
-  // ignore colons that come after the hash since that could include colons such as:
-  // git@github.com:user/package-2#semver:^1.0.0
-  const lastColonBeforeHash = lastIndexOfBefore(giturl, ':', '#')
-
-  if (lastColonBeforeHash > firstAt) {
-    // the last : comes after the first @ (or there is no @)
-    // like it would in:
-    // proto://hostname.com:user/repo
-    // username@hostname.com:user/repo
-    // :password@hostname.com:user/repo
-    // username:password@hostname.com:user/repo
-    // proto://username@hostname.com:user/repo
-    // proto://:password@hostname.com:user/repo
-    // proto://username:password@hostname.com:user/repo
-    // then we replace the last : with a / to create a valid path
-    giturl = giturl.slice(0, lastColonBeforeHash) + '/' + giturl.slice(lastColonBeforeHash + 1)
-  }
-
-  if (lastIndexOfBefore(giturl, ':', '#') === -1 && giturl.indexOf('//') === -1) {
-    // we have no : at all
-    // as it would be in:
-    // username@hostname.com/user/repo
-    // then we prepend a protocol
-    giturl = `git+ssh://${giturl}`
-  }
-
-  return giturl
-}
-
-module.exports = (giturl, protocols) => {
-  const withProtocol = protocols ? correctProtocol(giturl, protocols) : giturl
-  return safeUrl(withProtocol) || safeUrl(correctUrl(withProtocol))
-}
diff --git a/node_modules/npm-registry-fetch/node_modules/hosted-git-info/package.json b/node_modules/npm-registry-fetch/node_modules/hosted-git-info/package.json
deleted file mode 100644
index 5883a7d308d79..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/hosted-git-info/package.json
+++ /dev/null
@@ -1,61 +0,0 @@
-{
-  "name": "hosted-git-info",
-  "version": "9.0.0",
-  "description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab",
-  "main": "./lib/index.js",
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/npm/hosted-git-info.git"
-  },
-  "keywords": [
-    "git",
-    "github",
-    "bitbucket",
-    "gitlab"
-  ],
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "bugs": {
-    "url": "https://github.com/npm/hosted-git-info/issues"
-  },
-  "homepage": "https://github.com/npm/hosted-git-info",
-  "scripts": {
-    "posttest": "npm run lint",
-    "snap": "tap",
-    "test": "tap",
-    "test:coverage": "tap --coverage-report=html",
-    "lint": "npm run eslint",
-    "postlint": "template-oss-check",
-    "lintfix": "npm run eslint -- --fix",
-    "template-oss-apply": "template-oss-apply --force",
-    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
-  },
-  "dependencies": {
-    "lru-cache": "^11.1.0"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.25.0",
-    "tap": "^16.0.1"
-  },
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "engines": {
-    "node": "^20.17.0 || >=22.9.0"
-  },
-  "tap": {
-    "color": 1,
-    "coverage": true,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.25.0",
-    "publish": "true"
-  }
-}
diff --git a/node_modules/npm-registry-fetch/node_modules/lru-cache/LICENSE b/node_modules/npm-registry-fetch/node_modules/lru-cache/LICENSE
deleted file mode 100644
index f785757cd63f8..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/lru-cache/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/npm-registry-fetch/node_modules/lru-cache/dist/commonjs/index.js b/node_modules/npm-registry-fetch/node_modules/lru-cache/dist/commonjs/index.js
deleted file mode 100644
index 921b8f10f71b1..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/lru-cache/dist/commonjs/index.js
+++ /dev/null
@@ -1,1564 +0,0 @@
-"use strict";
-/**
- * @module LRUCache
- */
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.LRUCache = void 0;
-const defaultPerf = (typeof performance === 'object' &&
-    performance &&
-    typeof performance.now === 'function') ?
-    performance
-    : Date;
-const warned = new Set();
-/* c8 ignore start */
-const PROCESS = (typeof process === 'object' && !!process ?
-    process
-    : {});
-/* c8 ignore start */
-const emitWarning = (msg, type, code, fn) => {
-    typeof PROCESS.emitWarning === 'function' ?
-        PROCESS.emitWarning(msg, type, code, fn)
-        : console.error(`[${code}] ${type}: ${msg}`);
-};
-let AC = globalThis.AbortController;
-let AS = globalThis.AbortSignal;
-/* c8 ignore start */
-if (typeof AC === 'undefined') {
-    //@ts-ignore
-    AS = class AbortSignal {
-        onabort;
-        _onabort = [];
-        reason;
-        aborted = false;
-        addEventListener(_, fn) {
-            this._onabort.push(fn);
-        }
-    };
-    //@ts-ignore
-    AC = class AbortController {
-        constructor() {
-            warnACPolyfill();
-        }
-        signal = new AS();
-        abort(reason) {
-            if (this.signal.aborted)
-                return;
-            //@ts-ignore
-            this.signal.reason = reason;
-            //@ts-ignore
-            this.signal.aborted = true;
-            //@ts-ignore
-            for (const fn of this.signal._onabort) {
-                fn(reason);
-            }
-            this.signal.onabort?.(reason);
-        }
-    };
-    let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
-    const warnACPolyfill = () => {
-        if (!printACPolyfillWarning)
-            return;
-        printACPolyfillWarning = false;
-        emitWarning('AbortController is not defined. If using lru-cache in ' +
-            'node 14, load an AbortController polyfill from the ' +
-            '`node-abort-controller` package. A minimal polyfill is ' +
-            'provided for use by LRUCache.fetch(), but it should not be ' +
-            'relied upon in other contexts (eg, passing it to other APIs that ' +
-            'use AbortController/AbortSignal might have undesirable effects). ' +
-            'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
-    };
-}
-/* c8 ignore stop */
-const shouldWarn = (code) => !warned.has(code);
-const TYPE = Symbol('type');
-const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
-/* c8 ignore start */
-// This is a little bit ridiculous, tbh.
-// The maximum array length is 2^32-1 or thereabouts on most JS impls.
-// And well before that point, you're caching the entire world, I mean,
-// that's ~32GB of just integers for the next/prev links, plus whatever
-// else to hold that many keys and values.  Just filling the memory with
-// zeroes at init time is brutal when you get that big.
-// But why not be complete?
-// Maybe in the future, these limits will have expanded.
-const getUintArray = (max) => !isPosInt(max) ? null
-    : max <= Math.pow(2, 8) ? Uint8Array
-        : max <= Math.pow(2, 16) ? Uint16Array
-            : max <= Math.pow(2, 32) ? Uint32Array
-                : max <= Number.MAX_SAFE_INTEGER ? ZeroArray
-                    : null;
-/* c8 ignore stop */
-class ZeroArray extends Array {
-    constructor(size) {
-        super(size);
-        this.fill(0);
-    }
-}
-class Stack {
-    heap;
-    length;
-    // private constructor
-    static #constructing = false;
-    static create(max) {
-        const HeapCls = getUintArray(max);
-        if (!HeapCls)
-            return [];
-        Stack.#constructing = true;
-        const s = new Stack(max, HeapCls);
-        Stack.#constructing = false;
-        return s;
-    }
-    constructor(max, HeapCls) {
-        /* c8 ignore start */
-        if (!Stack.#constructing) {
-            throw new TypeError('instantiate Stack using Stack.create(n)');
-        }
-        /* c8 ignore stop */
-        this.heap = new HeapCls(max);
-        this.length = 0;
-    }
-    push(n) {
-        this.heap[this.length++] = n;
-    }
-    pop() {
-        return this.heap[--this.length];
-    }
-}
-/**
- * Default export, the thing you're using this module to get.
- *
- * The `K` and `V` types define the key and value types, respectively. The
- * optional `FC` type defines the type of the `context` object passed to
- * `cache.fetch()` and `cache.memo()`.
- *
- * Keys and values **must not** be `null` or `undefined`.
- *
- * All properties from the options object (with the exception of `max`,
- * `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are
- * added as normal public members. (The listed options are read-only getters.)
- *
- * Changing any of these will alter the defaults for subsequent method calls.
- */
-class LRUCache {
-    // options that cannot be changed without disaster
-    #max;
-    #maxSize;
-    #dispose;
-    #onInsert;
-    #disposeAfter;
-    #fetchMethod;
-    #memoMethod;
-    #perf;
-    /**
-     * {@link LRUCache.OptionsBase.perf}
-     */
-    get perf() {
-        return this.#perf;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.ttl}
-     */
-    ttl;
-    /**
-     * {@link LRUCache.OptionsBase.ttlResolution}
-     */
-    ttlResolution;
-    /**
-     * {@link LRUCache.OptionsBase.ttlAutopurge}
-     */
-    ttlAutopurge;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnGet}
-     */
-    updateAgeOnGet;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnHas}
-     */
-    updateAgeOnHas;
-    /**
-     * {@link LRUCache.OptionsBase.allowStale}
-     */
-    allowStale;
-    /**
-     * {@link LRUCache.OptionsBase.noDisposeOnSet}
-     */
-    noDisposeOnSet;
-    /**
-     * {@link LRUCache.OptionsBase.noUpdateTTL}
-     */
-    noUpdateTTL;
-    /**
-     * {@link LRUCache.OptionsBase.maxEntrySize}
-     */
-    maxEntrySize;
-    /**
-     * {@link LRUCache.OptionsBase.sizeCalculation}
-     */
-    sizeCalculation;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
-     */
-    noDeleteOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
-     */
-    noDeleteOnStaleGet;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
-     */
-    allowStaleOnFetchAbort;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
-     */
-    allowStaleOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
-     */
-    ignoreFetchAbort;
-    // computed properties
-    #size;
-    #calculatedSize;
-    #keyMap;
-    #keyList;
-    #valList;
-    #next;
-    #prev;
-    #head;
-    #tail;
-    #free;
-    #disposed;
-    #sizes;
-    #starts;
-    #ttls;
-    #hasDispose;
-    #hasFetchMethod;
-    #hasDisposeAfter;
-    #hasOnInsert;
-    /**
-     * Do not call this method unless you need to inspect the
-     * inner workings of the cache.  If anything returned by this
-     * object is modified in any way, strange breakage may occur.
-     *
-     * These fields are private for a reason!
-     *
-     * @internal
-     */
-    static unsafeExposeInternals(c) {
-        return {
-            // properties
-            starts: c.#starts,
-            ttls: c.#ttls,
-            sizes: c.#sizes,
-            keyMap: c.#keyMap,
-            keyList: c.#keyList,
-            valList: c.#valList,
-            next: c.#next,
-            prev: c.#prev,
-            get head() {
-                return c.#head;
-            },
-            get tail() {
-                return c.#tail;
-            },
-            free: c.#free,
-            // methods
-            isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
-            backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
-            moveToTail: (index) => c.#moveToTail(index),
-            indexes: (options) => c.#indexes(options),
-            rindexes: (options) => c.#rindexes(options),
-            isStale: (index) => c.#isStale(index),
-        };
-    }
-    // Protected read-only members
-    /**
-     * {@link LRUCache.OptionsBase.max} (read-only)
-     */
-    get max() {
-        return this.#max;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.maxSize} (read-only)
-     */
-    get maxSize() {
-        return this.#maxSize;
-    }
-    /**
-     * The total computed size of items in the cache (read-only)
-     */
-    get calculatedSize() {
-        return this.#calculatedSize;
-    }
-    /**
-     * The number of items stored in the cache (read-only)
-     */
-    get size() {
-        return this.#size;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
-     */
-    get fetchMethod() {
-        return this.#fetchMethod;
-    }
-    get memoMethod() {
-        return this.#memoMethod;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.dispose} (read-only)
-     */
-    get dispose() {
-        return this.#dispose;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.onInsert} (read-only)
-     */
-    get onInsert() {
-        return this.#onInsert;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
-     */
-    get disposeAfter() {
-        return this.#disposeAfter;
-    }
-    constructor(options) {
-        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, onInsert, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, perf, } = options;
-        if (perf !== undefined) {
-            if (typeof perf?.now !== 'function') {
-                throw new TypeError('perf option must have a now() method if specified');
-            }
-        }
-        this.#perf = perf ?? defaultPerf;
-        if (max !== 0 && !isPosInt(max)) {
-            throw new TypeError('max option must be a nonnegative integer');
-        }
-        const UintArray = max ? getUintArray(max) : Array;
-        if (!UintArray) {
-            throw new Error('invalid max value: ' + max);
-        }
-        this.#max = max;
-        this.#maxSize = maxSize;
-        this.maxEntrySize = maxEntrySize || this.#maxSize;
-        this.sizeCalculation = sizeCalculation;
-        if (this.sizeCalculation) {
-            if (!this.#maxSize && !this.maxEntrySize) {
-                throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
-            }
-            if (typeof this.sizeCalculation !== 'function') {
-                throw new TypeError('sizeCalculation set to non-function');
-            }
-        }
-        if (memoMethod !== undefined &&
-            typeof memoMethod !== 'function') {
-            throw new TypeError('memoMethod must be a function if defined');
-        }
-        this.#memoMethod = memoMethod;
-        if (fetchMethod !== undefined &&
-            typeof fetchMethod !== 'function') {
-            throw new TypeError('fetchMethod must be a function if specified');
-        }
-        this.#fetchMethod = fetchMethod;
-        this.#hasFetchMethod = !!fetchMethod;
-        this.#keyMap = new Map();
-        this.#keyList = new Array(max).fill(undefined);
-        this.#valList = new Array(max).fill(undefined);
-        this.#next = new UintArray(max);
-        this.#prev = new UintArray(max);
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free = Stack.create(max);
-        this.#size = 0;
-        this.#calculatedSize = 0;
-        if (typeof dispose === 'function') {
-            this.#dispose = dispose;
-        }
-        if (typeof onInsert === 'function') {
-            this.#onInsert = onInsert;
-        }
-        if (typeof disposeAfter === 'function') {
-            this.#disposeAfter = disposeAfter;
-            this.#disposed = [];
-        }
-        else {
-            this.#disposeAfter = undefined;
-            this.#disposed = undefined;
-        }
-        this.#hasDispose = !!this.#dispose;
-        this.#hasOnInsert = !!this.#onInsert;
-        this.#hasDisposeAfter = !!this.#disposeAfter;
-        this.noDisposeOnSet = !!noDisposeOnSet;
-        this.noUpdateTTL = !!noUpdateTTL;
-        this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
-        this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
-        this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
-        this.ignoreFetchAbort = !!ignoreFetchAbort;
-        // NB: maxEntrySize is set to maxSize if it's set
-        if (this.maxEntrySize !== 0) {
-            if (this.#maxSize !== 0) {
-                if (!isPosInt(this.#maxSize)) {
-                    throw new TypeError('maxSize must be a positive integer if specified');
-                }
-            }
-            if (!isPosInt(this.maxEntrySize)) {
-                throw new TypeError('maxEntrySize must be a positive integer if specified');
-            }
-            this.#initializeSizeTracking();
-        }
-        this.allowStale = !!allowStale;
-        this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
-        this.updateAgeOnGet = !!updateAgeOnGet;
-        this.updateAgeOnHas = !!updateAgeOnHas;
-        this.ttlResolution =
-            isPosInt(ttlResolution) || ttlResolution === 0 ?
-                ttlResolution
-                : 1;
-        this.ttlAutopurge = !!ttlAutopurge;
-        this.ttl = ttl || 0;
-        if (this.ttl) {
-            if (!isPosInt(this.ttl)) {
-                throw new TypeError('ttl must be a positive integer if specified');
-            }
-            this.#initializeTTLTracking();
-        }
-        // do not allow completely unbounded caches
-        if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
-            throw new TypeError('At least one of max, maxSize, or ttl is required');
-        }
-        if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
-            const code = 'LRU_CACHE_UNBOUNDED';
-            if (shouldWarn(code)) {
-                warned.add(code);
-                const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
-                    'result in unbounded memory consumption.';
-                emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
-            }
-        }
-    }
-    /**
-     * Return the number of ms left in the item's TTL. If item is not in cache,
-     * returns `0`. Returns `Infinity` if item is in cache without a defined TTL.
-     */
-    getRemainingTTL(key) {
-        return this.#keyMap.has(key) ? Infinity : 0;
-    }
-    #initializeTTLTracking() {
-        const ttls = new ZeroArray(this.#max);
-        const starts = new ZeroArray(this.#max);
-        this.#ttls = ttls;
-        this.#starts = starts;
-        this.#setItemTTL = (index, ttl, start = this.#perf.now()) => {
-            starts[index] = ttl !== 0 ? start : 0;
-            ttls[index] = ttl;
-            if (ttl !== 0 && this.ttlAutopurge) {
-                const t = setTimeout(() => {
-                    if (this.#isStale(index)) {
-                        this.#delete(this.#keyList[index], 'expire');
-                    }
-                }, ttl + 1);
-                // unref() not supported on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-        };
-        this.#updateItemAge = index => {
-            starts[index] = ttls[index] !== 0 ? this.#perf.now() : 0;
-        };
-        this.#statusTTL = (status, index) => {
-            if (ttls[index]) {
-                const ttl = ttls[index];
-                const start = starts[index];
-                /* c8 ignore next */
-                if (!ttl || !start)
-                    return;
-                status.ttl = ttl;
-                status.start = start;
-                status.now = cachedNow || getNow();
-                const age = status.now - start;
-                status.remainingTTL = ttl - age;
-            }
-        };
-        // debounce calls to perf.now() to 1s so we're not hitting
-        // that costly call repeatedly.
-        let cachedNow = 0;
-        const getNow = () => {
-            const n = this.#perf.now();
-            if (this.ttlResolution > 0) {
-                cachedNow = n;
-                const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
-                // not available on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-            return n;
-        };
-        this.getRemainingTTL = key => {
-            const index = this.#keyMap.get(key);
-            if (index === undefined) {
-                return 0;
-            }
-            const ttl = ttls[index];
-            const start = starts[index];
-            if (!ttl || !start) {
-                return Infinity;
-            }
-            const age = (cachedNow || getNow()) - start;
-            return ttl - age;
-        };
-        this.#isStale = index => {
-            const s = starts[index];
-            const t = ttls[index];
-            return !!t && !!s && (cachedNow || getNow()) - s > t;
-        };
-    }
-    // conditionally set private methods related to TTL
-    #updateItemAge = () => { };
-    #statusTTL = () => { };
-    #setItemTTL = () => { };
-    /* c8 ignore stop */
-    #isStale = () => false;
-    #initializeSizeTracking() {
-        const sizes = new ZeroArray(this.#max);
-        this.#calculatedSize = 0;
-        this.#sizes = sizes;
-        this.#removeItemSize = index => {
-            this.#calculatedSize -= sizes[index];
-            sizes[index] = 0;
-        };
-        this.#requireSize = (k, v, size, sizeCalculation) => {
-            // provisionally accept background fetches.
-            // actual value size will be checked when they return.
-            if (this.#isBackgroundFetch(v)) {
-                return 0;
-            }
-            if (!isPosInt(size)) {
-                if (sizeCalculation) {
-                    if (typeof sizeCalculation !== 'function') {
-                        throw new TypeError('sizeCalculation must be a function');
-                    }
-                    size = sizeCalculation(v, k);
-                    if (!isPosInt(size)) {
-                        throw new TypeError('sizeCalculation return invalid (expect positive integer)');
-                    }
-                }
-                else {
-                    throw new TypeError('invalid size value (must be positive integer). ' +
-                        'When maxSize or maxEntrySize is used, sizeCalculation ' +
-                        'or size must be set.');
-                }
-            }
-            return size;
-        };
-        this.#addItemSize = (index, size, status) => {
-            sizes[index] = size;
-            if (this.#maxSize) {
-                const maxSize = this.#maxSize - sizes[index];
-                while (this.#calculatedSize > maxSize) {
-                    this.#evict(true);
-                }
-            }
-            this.#calculatedSize += sizes[index];
-            if (status) {
-                status.entrySize = size;
-                status.totalCalculatedSize = this.#calculatedSize;
-            }
-        };
-    }
-    #removeItemSize = _i => { };
-    #addItemSize = (_i, _s, _st) => { };
-    #requireSize = (_k, _v, size, sizeCalculation) => {
-        if (size || sizeCalculation) {
-            throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
-        }
-        return 0;
-    };
-    *#indexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#tail; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#head) {
-                    break;
-                }
-                else {
-                    i = this.#prev[i];
-                }
-            }
-        }
-    }
-    *#rindexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#head; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#tail) {
-                    break;
-                }
-                else {
-                    i = this.#next[i];
-                }
-            }
-        }
-    }
-    #isValidIndex(index) {
-        return (index !== undefined &&
-            this.#keyMap.get(this.#keyList[index]) === index);
-    }
-    /**
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from most recently used to least recently used.
-     */
-    *entries() {
-        for (const i of this.#indexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.entries}
-     *
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from least recently used to most recently used.
-     */
-    *rentries() {
-        for (const i of this.#rindexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the keys in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *keys() {
-        for (const i of this.#indexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.keys}
-     *
-     * Return a generator yielding the keys in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rkeys() {
-        for (const i of this.#rindexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the values in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *values() {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.values}
-     *
-     * Return a generator yielding the values in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rvalues() {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Iterating over the cache itself yields the same results as
-     * {@link LRUCache.entries}
-     */
-    [Symbol.iterator]() {
-        return this.entries();
-    }
-    /**
-     * A String value that is used in the creation of the default string
-     * description of an object. Called by the built-in method
-     * `Object.prototype.toString`.
-     */
-    [Symbol.toStringTag] = 'LRUCache';
-    /**
-     * Find a value for which the supplied fn method returns a truthy value,
-     * similar to `Array.find()`. fn is called as `fn(value, key, cache)`.
-     */
-    find(fn, getOptions = {}) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-            if (value === undefined)
-                continue;
-            if (fn(value, this.#keyList[i], this)) {
-                return this.get(this.#keyList[i], getOptions);
-            }
-        }
-    }
-    /**
-     * Call the supplied function on each item in the cache, in order from most
-     * recently used to least recently used.
-     *
-     * `fn` is called as `fn(value, key, cache)`.
-     *
-     * If `thisp` is provided, function will be called in the `this`-context of
-     * the provided object, or the cache if no `thisp` object is provided.
-     *
-     * Does not update age or recenty of use, or iterate over stale values.
-     */
-    forEach(fn, thisp = this) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * The same as {@link LRUCache.forEach} but items are iterated over in
-     * reverse order.  (ie, less recently used items are iterated over first.)
-     */
-    rforEach(fn, thisp = this) {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * Delete any stale entries. Returns true if anything was removed,
-     * false otherwise.
-     */
-    purgeStale() {
-        let deleted = false;
-        for (const i of this.#rindexes({ allowStale: true })) {
-            if (this.#isStale(i)) {
-                this.#delete(this.#keyList[i], 'expire');
-                deleted = true;
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Get the extended info about a given entry, to get its value, size, and
-     * TTL info simultaneously. Returns `undefined` if the key is not present.
-     *
-     * Unlike {@link LRUCache#dump}, which is designed to be portable and survive
-     * serialization, the `start` value is always the current timestamp, and the
-     * `ttl` is a calculated remaining time to live (negative if expired).
-     *
-     * Always returns stale values, if their info is found in the cache, so be
-     * sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl})
-     * if relevant.
-     */
-    info(key) {
-        const i = this.#keyMap.get(key);
-        if (i === undefined)
-            return undefined;
-        const v = this.#valList[i];
-        /* c8 ignore start - this isn't tested for the info function,
-         * but it's the same logic as found in other places. */
-        const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-        if (value === undefined)
-            return undefined;
-        /* c8 ignore end */
-        const entry = { value };
-        if (this.#ttls && this.#starts) {
-            const ttl = this.#ttls[i];
-            const start = this.#starts[i];
-            if (ttl && start) {
-                const remain = ttl - (this.#perf.now() - start);
-                entry.ttl = remain;
-                entry.start = Date.now();
-            }
-        }
-        if (this.#sizes) {
-            entry.size = this.#sizes[i];
-        }
-        return entry;
-    }
-    /**
-     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
-     * passed to {@link LRUCache#load}.
-     *
-     * The `start` fields are calculated relative to a portable `Date.now()`
-     * timestamp, even if `performance.now()` is available.
-     *
-     * Stale entries are always included in the `dump`, even if
-     * {@link LRUCache.OptionsBase.allowStale} is false.
-     *
-     * Note: this returns an actual array, not a generator, so it can be more
-     * easily passed around.
-     */
-    dump() {
-        const arr = [];
-        for (const i of this.#indexes({ allowStale: true })) {
-            const key = this.#keyList[i];
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-            if (value === undefined || key === undefined)
-                continue;
-            const entry = { value };
-            if (this.#ttls && this.#starts) {
-                entry.ttl = this.#ttls[i];
-                // always dump the start relative to a portable timestamp
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = this.#perf.now() - this.#starts[i];
-                entry.start = Math.floor(Date.now() - age);
-            }
-            if (this.#sizes) {
-                entry.size = this.#sizes[i];
-            }
-            arr.unshift([key, entry]);
-        }
-        return arr;
-    }
-    /**
-     * Reset the cache and load in the items in entries in the order listed.
-     *
-     * The shape of the resulting cache may be different if the same options are
-     * not used in both caches.
-     *
-     * The `start` fields are assumed to be calculated relative to a portable
-     * `Date.now()` timestamp, even if `performance.now()` is available.
-     */
-    load(arr) {
-        this.clear();
-        for (const [key, entry] of arr) {
-            if (entry.start) {
-                // entry.start is a portable timestamp, but we may be using
-                // node's performance.now(), so calculate the offset, so that
-                // we get the intended remaining TTL, no matter how long it's
-                // been on ice.
-                //
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = Date.now() - entry.start;
-                entry.start = this.#perf.now() - age;
-            }
-            this.set(key, entry.value, entry);
-        }
-    }
-    /**
-     * Add a value to the cache.
-     *
-     * Note: if `undefined` is specified as a value, this is an alias for
-     * {@link LRUCache#delete}
-     *
-     * Fields on the {@link LRUCache.SetOptions} options param will override
-     * their corresponding values in the constructor options for the scope
-     * of this single `set()` operation.
-     *
-     * If `start` is provided, then that will set the effective start
-     * time for the TTL calculation. Note that this must be a previous
-     * value of `performance.now()` if supported, or a previous value of
-     * `Date.now()` if not.
-     *
-     * Options object may also include `size`, which will prevent
-     * calling the `sizeCalculation` function and just use the specified
-     * number if it is a positive integer, and `noDisposeOnSet` which
-     * will prevent calling a `dispose` function in the case of
-     * overwrites.
-     *
-     * If the `size` (or return value of `sizeCalculation`) for a given
-     * entry is greater than `maxEntrySize`, then the item will not be
-     * added to the cache.
-     *
-     * Will update the recency of the entry.
-     *
-     * If the value is `undefined`, then this is an alias for
-     * `cache.delete(key)`. `undefined` is never stored in the cache.
-     */
-    set(k, v, setOptions = {}) {
-        if (v === undefined) {
-            this.delete(k);
-            return this;
-        }
-        const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
-        let { noUpdateTTL = this.noUpdateTTL } = setOptions;
-        const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
-        // if the item doesn't fit, don't do anything
-        // NB: maxEntrySize set to maxSize by default
-        if (this.maxEntrySize && size > this.maxEntrySize) {
-            if (status) {
-                status.set = 'miss';
-                status.maxEntrySizeExceeded = true;
-            }
-            // have to delete, in case something is there already.
-            this.#delete(k, 'set');
-            return this;
-        }
-        let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
-        if (index === undefined) {
-            // addition
-            index = (this.#size === 0 ? this.#tail
-                : this.#free.length !== 0 ? this.#free.pop()
-                    : this.#size === this.#max ? this.#evict(false)
-                        : this.#size);
-            this.#keyList[index] = k;
-            this.#valList[index] = v;
-            this.#keyMap.set(k, index);
-            this.#next[this.#tail] = index;
-            this.#prev[index] = this.#tail;
-            this.#tail = index;
-            this.#size++;
-            this.#addItemSize(index, size, status);
-            if (status)
-                status.set = 'add';
-            noUpdateTTL = false;
-            if (this.#hasOnInsert) {
-                this.#onInsert?.(v, k, 'add');
-            }
-        }
-        else {
-            // update
-            this.#moveToTail(index);
-            const oldVal = this.#valList[index];
-            if (v !== oldVal) {
-                if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
-                    oldVal.__abortController.abort(new Error('replaced'));
-                    const { __staleWhileFetching: s } = oldVal;
-                    if (s !== undefined && !noDisposeOnSet) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(s, k, 'set');
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([s, k, 'set']);
-                        }
-                    }
-                }
-                else if (!noDisposeOnSet) {
-                    if (this.#hasDispose) {
-                        this.#dispose?.(oldVal, k, 'set');
-                    }
-                    if (this.#hasDisposeAfter) {
-                        this.#disposed?.push([oldVal, k, 'set']);
-                    }
-                }
-                this.#removeItemSize(index);
-                this.#addItemSize(index, size, status);
-                this.#valList[index] = v;
-                if (status) {
-                    status.set = 'replace';
-                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal) ?
-                        oldVal.__staleWhileFetching
-                        : oldVal;
-                    if (oldValue !== undefined)
-                        status.oldValue = oldValue;
-                }
-            }
-            else if (status) {
-                status.set = 'update';
-            }
-            if (this.#hasOnInsert) {
-                this.onInsert?.(v, k, v === oldVal ? 'update' : 'replace');
-            }
-        }
-        if (ttl !== 0 && !this.#ttls) {
-            this.#initializeTTLTracking();
-        }
-        if (this.#ttls) {
-            if (!noUpdateTTL) {
-                this.#setItemTTL(index, ttl, start);
-            }
-            if (status)
-                this.#statusTTL(status, index);
-        }
-        if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return this;
-    }
-    /**
-     * Evict the least recently used item, returning its value or
-     * `undefined` if cache is empty.
-     */
-    pop() {
-        try {
-            while (this.#size) {
-                const val = this.#valList[this.#head];
-                this.#evict(true);
-                if (this.#isBackgroundFetch(val)) {
-                    if (val.__staleWhileFetching) {
-                        return val.__staleWhileFetching;
-                    }
-                }
-                else if (val !== undefined) {
-                    return val;
-                }
-            }
-        }
-        finally {
-            if (this.#hasDisposeAfter && this.#disposed) {
-                const dt = this.#disposed;
-                let task;
-                while ((task = dt?.shift())) {
-                    this.#disposeAfter?.(...task);
-                }
-            }
-        }
-    }
-    #evict(free) {
-        const head = this.#head;
-        const k = this.#keyList[head];
-        const v = this.#valList[head];
-        if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
-            v.__abortController.abort(new Error('evicted'));
-        }
-        else if (this.#hasDispose || this.#hasDisposeAfter) {
-            if (this.#hasDispose) {
-                this.#dispose?.(v, k, 'evict');
-            }
-            if (this.#hasDisposeAfter) {
-                this.#disposed?.push([v, k, 'evict']);
-            }
-        }
-        this.#removeItemSize(head);
-        // if we aren't about to use the index, then null these out
-        if (free) {
-            this.#keyList[head] = undefined;
-            this.#valList[head] = undefined;
-            this.#free.push(head);
-        }
-        if (this.#size === 1) {
-            this.#head = this.#tail = 0;
-            this.#free.length = 0;
-        }
-        else {
-            this.#head = this.#next[head];
-        }
-        this.#keyMap.delete(k);
-        this.#size--;
-        return head;
-    }
-    /**
-     * Check if a key is in the cache, without updating the recency of use.
-     * Will return false if the item is stale, even though it is technically
-     * in the cache.
-     *
-     * Check if a key is in the cache, without updating the recency of
-     * use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set
-     * to `true` in either the options or the constructor.
-     *
-     * Will return `false` if the item is stale, even though it is technically in
-     * the cache. The difference can be determined (if it matters) by using a
-     * `status` argument, and inspecting the `has` field.
-     *
-     * Will not update item age unless
-     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
-     */
-    has(k, hasOptions = {}) {
-        const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v) &&
-                v.__staleWhileFetching === undefined) {
-                return false;
-            }
-            if (!this.#isStale(index)) {
-                if (updateAgeOnHas) {
-                    this.#updateItemAge(index);
-                }
-                if (status) {
-                    status.has = 'hit';
-                    this.#statusTTL(status, index);
-                }
-                return true;
-            }
-            else if (status) {
-                status.has = 'stale';
-                this.#statusTTL(status, index);
-            }
-        }
-        else if (status) {
-            status.has = 'miss';
-        }
-        return false;
-    }
-    /**
-     * Like {@link LRUCache#get} but doesn't update recency or delete stale
-     * items.
-     *
-     * Returns `undefined` if the item is stale, unless
-     * {@link LRUCache.OptionsBase.allowStale} is set.
-     */
-    peek(k, peekOptions = {}) {
-        const { allowStale = this.allowStale } = peekOptions;
-        const index = this.#keyMap.get(k);
-        if (index === undefined ||
-            (!allowStale && this.#isStale(index))) {
-            return;
-        }
-        const v = this.#valList[index];
-        // either stale and allowed, or forcing a refresh of non-stale value
-        return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-    }
-    #backgroundFetch(k, index, options, context) {
-        const v = index === undefined ? undefined : this.#valList[index];
-        if (this.#isBackgroundFetch(v)) {
-            return v;
-        }
-        const ac = new AC();
-        const { signal } = options;
-        // when/if our AC signals, then stop listening to theirs.
-        signal?.addEventListener('abort', () => ac.abort(signal.reason), {
-            signal: ac.signal,
-        });
-        const fetchOpts = {
-            signal: ac.signal,
-            options,
-            context,
-        };
-        const cb = (v, updateCache = false) => {
-            const { aborted } = ac.signal;
-            const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
-            if (options.status) {
-                if (aborted && !updateCache) {
-                    options.status.fetchAborted = true;
-                    options.status.fetchError = ac.signal.reason;
-                    if (ignoreAbort)
-                        options.status.fetchAbortIgnored = true;
-                }
-                else {
-                    options.status.fetchResolved = true;
-                }
-            }
-            if (aborted && !ignoreAbort && !updateCache) {
-                return fetchFail(ac.signal.reason);
-            }
-            // either we didn't abort, and are still here, or we did, and ignored
-            const bf = p;
-            if (this.#valList[index] === p) {
-                if (v === undefined) {
-                    if (bf.__staleWhileFetching !== undefined) {
-                        this.#valList[index] = bf.__staleWhileFetching;
-                    }
-                    else {
-                        this.#delete(k, 'fetch');
-                    }
-                }
-                else {
-                    if (options.status)
-                        options.status.fetchUpdated = true;
-                    this.set(k, v, fetchOpts.options);
-                }
-            }
-            return v;
-        };
-        const eb = (er) => {
-            if (options.status) {
-                options.status.fetchRejected = true;
-                options.status.fetchError = er;
-            }
-            return fetchFail(er);
-        };
-        const fetchFail = (er) => {
-            const { aborted } = ac.signal;
-            const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
-            const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
-            const noDelete = allowStale || options.noDeleteOnFetchRejection;
-            const bf = p;
-            if (this.#valList[index] === p) {
-                // if we allow stale on fetch rejections, then we need to ensure that
-                // the stale value is not removed from the cache when the fetch fails.
-                const del = !noDelete || bf.__staleWhileFetching === undefined;
-                if (del) {
-                    this.#delete(k, 'fetch');
-                }
-                else if (!allowStaleAborted) {
-                    // still replace the *promise* with the stale value,
-                    // since we are done with the promise at this point.
-                    // leave it untouched if we're still waiting for an
-                    // aborted background fetch that hasn't yet returned.
-                    this.#valList[index] = bf.__staleWhileFetching;
-                }
-            }
-            if (allowStale) {
-                if (options.status && bf.__staleWhileFetching !== undefined) {
-                    options.status.returnedStale = true;
-                }
-                return bf.__staleWhileFetching;
-            }
-            else if (bf.__returned === bf) {
-                throw er;
-            }
-        };
-        const pcall = (res, rej) => {
-            const fmp = this.#fetchMethod?.(k, v, fetchOpts);
-            if (fmp && fmp instanceof Promise) {
-                fmp.then(v => res(v === undefined ? undefined : v), rej);
-            }
-            // ignored, we go until we finish, regardless.
-            // defer check until we are actually aborting,
-            // so fetchMethod can override.
-            ac.signal.addEventListener('abort', () => {
-                if (!options.ignoreFetchAbort ||
-                    options.allowStaleOnFetchAbort) {
-                    res(undefined);
-                    // when it eventually resolves, update the cache.
-                    if (options.allowStaleOnFetchAbort) {
-                        res = v => cb(v, true);
-                    }
-                }
-            });
-        };
-        if (options.status)
-            options.status.fetchDispatched = true;
-        const p = new Promise(pcall).then(cb, eb);
-        const bf = Object.assign(p, {
-            __abortController: ac,
-            __staleWhileFetching: v,
-            __returned: undefined,
-        });
-        if (index === undefined) {
-            // internal, don't expose status.
-            this.set(k, bf, { ...fetchOpts.options, status: undefined });
-            index = this.#keyMap.get(k);
-        }
-        else {
-            this.#valList[index] = bf;
-        }
-        return bf;
-    }
-    #isBackgroundFetch(p) {
-        if (!this.#hasFetchMethod)
-            return false;
-        const b = p;
-        return (!!b &&
-            b instanceof Promise &&
-            b.hasOwnProperty('__staleWhileFetching') &&
-            b.__abortController instanceof AC);
-    }
-    async fetch(k, fetchOptions = {}) {
-        const { 
-        // get options
-        allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, 
-        // set options
-        ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, 
-        // fetch exclusive options
-        noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
-        if (!this.#hasFetchMethod) {
-            if (status)
-                status.fetch = 'get';
-            return this.get(k, {
-                allowStale,
-                updateAgeOnGet,
-                noDeleteOnStaleGet,
-                status,
-            });
-        }
-        const options = {
-            allowStale,
-            updateAgeOnGet,
-            noDeleteOnStaleGet,
-            ttl,
-            noDisposeOnSet,
-            size,
-            sizeCalculation,
-            noUpdateTTL,
-            noDeleteOnFetchRejection,
-            allowStaleOnFetchRejection,
-            allowStaleOnFetchAbort,
-            ignoreFetchAbort,
-            status,
-            signal,
-        };
-        let index = this.#keyMap.get(k);
-        if (index === undefined) {
-            if (status)
-                status.fetch = 'miss';
-            const p = this.#backgroundFetch(k, index, options, context);
-            return (p.__returned = p);
-        }
-        else {
-            // in cache, maybe already fetching
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                const stale = allowStale && v.__staleWhileFetching !== undefined;
-                if (status) {
-                    status.fetch = 'inflight';
-                    if (stale)
-                        status.returnedStale = true;
-                }
-                return stale ? v.__staleWhileFetching : (v.__returned = v);
-            }
-            // if we force a refresh, that means do NOT serve the cached value,
-            // unless we are already in the process of refreshing the cache.
-            const isStale = this.#isStale(index);
-            if (!forceRefresh && !isStale) {
-                if (status)
-                    status.fetch = 'hit';
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                if (status)
-                    this.#statusTTL(status, index);
-                return v;
-            }
-            // ok, it is stale or a forced refresh, and not already fetching.
-            // refresh the cache.
-            const p = this.#backgroundFetch(k, index, options, context);
-            const hasStale = p.__staleWhileFetching !== undefined;
-            const staleVal = hasStale && allowStale;
-            if (status) {
-                status.fetch = isStale ? 'stale' : 'refresh';
-                if (staleVal && isStale)
-                    status.returnedStale = true;
-            }
-            return staleVal ? p.__staleWhileFetching : (p.__returned = p);
-        }
-    }
-    async forceFetch(k, fetchOptions = {}) {
-        const v = await this.fetch(k, fetchOptions);
-        if (v === undefined)
-            throw new Error('fetch() returned undefined');
-        return v;
-    }
-    memo(k, memoOptions = {}) {
-        const memoMethod = this.#memoMethod;
-        if (!memoMethod) {
-            throw new Error('no memoMethod provided to constructor');
-        }
-        const { context, forceRefresh, ...options } = memoOptions;
-        const v = this.get(k, options);
-        if (!forceRefresh && v !== undefined)
-            return v;
-        const vv = memoMethod(k, v, {
-            options,
-            context,
-        });
-        this.set(k, vv, options);
-        return vv;
-    }
-    /**
-     * Return a value from the cache. Will update the recency of the cache
-     * entry found.
-     *
-     * If the key is not found, get() will return `undefined`.
-     */
-    get(k, getOptions = {}) {
-        const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const value = this.#valList[index];
-            const fetching = this.#isBackgroundFetch(value);
-            if (status)
-                this.#statusTTL(status, index);
-            if (this.#isStale(index)) {
-                if (status)
-                    status.get = 'stale';
-                // delete only if not an in-flight background fetch
-                if (!fetching) {
-                    if (!noDeleteOnStaleGet) {
-                        this.#delete(k, 'expire');
-                    }
-                    if (status && allowStale)
-                        status.returnedStale = true;
-                    return allowStale ? value : undefined;
-                }
-                else {
-                    if (status &&
-                        allowStale &&
-                        value.__staleWhileFetching !== undefined) {
-                        status.returnedStale = true;
-                    }
-                    return allowStale ? value.__staleWhileFetching : undefined;
-                }
-            }
-            else {
-                if (status)
-                    status.get = 'hit';
-                // if we're currently fetching it, we don't actually have it yet
-                // it's not stale, which means this isn't a staleWhileRefetching.
-                // If it's not stale, and fetching, AND has a __staleWhileFetching
-                // value, then that means the user fetched with {forceRefresh:true},
-                // so it's safe to return that value.
-                if (fetching) {
-                    return value.__staleWhileFetching;
-                }
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                return value;
-            }
-        }
-        else if (status) {
-            status.get = 'miss';
-        }
-    }
-    #connect(p, n) {
-        this.#prev[n] = p;
-        this.#next[p] = n;
-    }
-    #moveToTail(index) {
-        // if tail already, nothing to do
-        // if head, move head to next[index]
-        // else
-        //   move next[prev[index]] to next[index] (head has no prev)
-        //   move prev[next[index]] to prev[index]
-        // prev[index] = tail
-        // next[tail] = index
-        // tail = index
-        if (index !== this.#tail) {
-            if (index === this.#head) {
-                this.#head = this.#next[index];
-            }
-            else {
-                this.#connect(this.#prev[index], this.#next[index]);
-            }
-            this.#connect(this.#tail, index);
-            this.#tail = index;
-        }
-    }
-    /**
-     * Deletes a key out of the cache.
-     *
-     * Returns true if the key was deleted, false otherwise.
-     */
-    delete(k) {
-        return this.#delete(k, 'delete');
-    }
-    #delete(k, reason) {
-        let deleted = false;
-        if (this.#size !== 0) {
-            const index = this.#keyMap.get(k);
-            if (index !== undefined) {
-                deleted = true;
-                if (this.#size === 1) {
-                    this.#clear(reason);
-                }
-                else {
-                    this.#removeItemSize(index);
-                    const v = this.#valList[index];
-                    if (this.#isBackgroundFetch(v)) {
-                        v.__abortController.abort(new Error('deleted'));
-                    }
-                    else if (this.#hasDispose || this.#hasDisposeAfter) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(v, k, reason);
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([v, k, reason]);
-                        }
-                    }
-                    this.#keyMap.delete(k);
-                    this.#keyList[index] = undefined;
-                    this.#valList[index] = undefined;
-                    if (index === this.#tail) {
-                        this.#tail = this.#prev[index];
-                    }
-                    else if (index === this.#head) {
-                        this.#head = this.#next[index];
-                    }
-                    else {
-                        const pi = this.#prev[index];
-                        this.#next[pi] = this.#next[index];
-                        const ni = this.#next[index];
-                        this.#prev[ni] = this.#prev[index];
-                    }
-                    this.#size--;
-                    this.#free.push(index);
-                }
-            }
-        }
-        if (this.#hasDisposeAfter && this.#disposed?.length) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Clear the cache entirely, throwing away all values.
-     */
-    clear() {
-        return this.#clear('delete');
-    }
-    #clear(reason) {
-        for (const index of this.#rindexes({ allowStale: true })) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                v.__abortController.abort(new Error('deleted'));
-            }
-            else {
-                const k = this.#keyList[index];
-                if (this.#hasDispose) {
-                    this.#dispose?.(v, k, reason);
-                }
-                if (this.#hasDisposeAfter) {
-                    this.#disposed?.push([v, k, reason]);
-                }
-            }
-        }
-        this.#keyMap.clear();
-        this.#valList.fill(undefined);
-        this.#keyList.fill(undefined);
-        if (this.#ttls && this.#starts) {
-            this.#ttls.fill(0);
-            this.#starts.fill(0);
-        }
-        if (this.#sizes) {
-            this.#sizes.fill(0);
-        }
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free.length = 0;
-        this.#calculatedSize = 0;
-        this.#size = 0;
-        if (this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-    }
-}
-exports.LRUCache = LRUCache;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/npm-registry-fetch/node_modules/lru-cache/dist/commonjs/index.min.js b/node_modules/npm-registry-fetch/node_modules/lru-cache/dist/commonjs/index.min.js
deleted file mode 100644
index ef5027b91650d..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/lru-cache/dist/commonjs/index.min.js
+++ /dev/null
@@ -1,2 +0,0 @@
-"use strict";Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var M=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,x=new Set,R=typeof process=="object"&&process?process:{},U=(a,t,e,i)=>{typeof R.emitWarning=="function"?R.emitWarning(a,t,e,i):console.error(`[${e}] ${t}: ${a}`)},C=globalThis.AbortController,L=globalThis.AbortSignal;if(typeof C>"u"){L=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},C=class{constructor(){t()}signal=new L;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let a=R.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{a&&(a=!1,U("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var G=a=>!x.has(a),H=Symbol("type"),y=a=>a&&a===Math.floor(a)&&a>0&&isFinite(a),I=a=>y(a)?a<=Math.pow(2,8)?Uint8Array:a<=Math.pow(2,16)?Uint16Array:a<=Math.pow(2,32)?Uint32Array:a<=Number.MAX_SAFE_INTEGER?E:null:null,E=class extends Array{constructor(t){super(t),this.fill(0)}},W=class a{heap;length;static#l=!1;static create(t){let e=I(t);if(!e)return[];a.#l=!0;let i=new a(t,e);return a.#l=!1,i}constructor(t,e){if(!a.#l)throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},D=class a{#l;#c;#p;#v;#w;#D;#L;#S;get perf(){return this.#S}ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#_;#s;#i;#t;#a;#u;#o;#h;#m;#r;#b;#y;#d;#A;#z;#f;#x;static unsafeExposeInternals(t){return{starts:t.#y,ttls:t.#d,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#a,prev:t.#u,get head(){return t.#o},get tail(){return t.#h},free:t.#m,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#M(e,i,s,n),moveToTail:e=>t.#W(e),indexes:e=>t.#F(e),rindexes:e=>t.#T(e),isStale:e=>t.#g(e)}}get max(){return this.#l}get maxSize(){return this.#c}get calculatedSize(){return this.#_}get size(){return this.#n}get fetchMethod(){return this.#D}get memoMethod(){return this.#L}get dispose(){return this.#p}get onInsert(){return this.#v}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,onInsert:_,disposeAfter:f,noDisposeOnSet:c,noUpdateTTL:u,maxSize:A=0,maxEntrySize:d=0,sizeCalculation:m,fetchMethod:l,memoMethod:w,noDeleteOnFetchRejection:b,noDeleteOnStaleGet:p,allowStaleOnFetchRejection:S,allowStaleOnFetchAbort:z,ignoreFetchAbort:F,perf:v}=t;if(v!==void 0&&typeof v?.now!="function")throw new TypeError("perf option must have a now() method if specified");if(this.#S=v??M,e!==0&&!y(e))throw new TypeError("max option must be a nonnegative integer");let T=e?I(e):Array;if(!T)throw new Error("invalid max value: "+e);if(this.#l=e,this.#c=A,this.maxEntrySize=d||this.#c,this.sizeCalculation=m,this.sizeCalculation){if(!this.#c&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#L=w,l!==void 0&&typeof l!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#D=l,this.#z=!!l,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#a=new T(e),this.#u=new T(e),this.#o=0,this.#h=0,this.#m=W.create(e),this.#n=0,this.#_=0,typeof g=="function"&&(this.#p=g),typeof _=="function"&&(this.#v=_),typeof f=="function"?(this.#w=f,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#A=!!this.#p,this.#x=!!this.#v,this.#f=!!this.#w,this.noDisposeOnSet=!!c,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!b,this.allowStaleOnFetchRejection=!!S,this.allowStaleOnFetchAbort=!!z,this.ignoreFetchAbort=!!F,this.maxEntrySize!==0){if(this.#c!==0&&!y(this.#c))throw new TypeError("maxSize must be a positive integer if specified");if(!y(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#V()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!p,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=y(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!y(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#G()}if(this.#l===0&&this.ttl===0&&this.#c===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#l&&!this.#c){let O="LRU_CACHE_UNBOUNDED";G(O)&&(x.add(O),U("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",O,a))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#G(){let t=new E(this.#l),e=new E(this.#l);this.#d=t,this.#y=e,this.#j=(n,h,o=this.#S.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#g(n)&&this.#O(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#C=n=>{e[n]=t[n]!==0?this.#S.now():0},this.#E=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=this.#S.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#g=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#C=()=>{};#E=()=>{};#j=()=>{};#g=()=>!1;#V(){let t=new E(this.#l);this.#_=0,this.#b=t,this.#R=e=>{this.#_-=t[e],t[e]=0},this.#N=(e,i,s,n)=>{if(this.#e(i))return 0;if(!y(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!y(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#U=(e,i,s)=>{if(t[e]=i,this.#c){let n=this.#c-t[e];for(;this.#_>n;)this.#I(!0)}this.#_+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#_)}}#R=t=>{};#U=(t,e,i)=>{};#N=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#o));)e=this.#u[e]}*#T({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#h));)e=this.#a[e]}#P(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#T())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#T()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#T())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#T()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#T({allowStale:!0}))this.#g(e)&&(this.#O(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#d&&this.#y){let h=this.#d[e],o=this.#y[e];if(h&&o){let r=h-(this.#S.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#F({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#d&&this.#y){h.ttl=this.#d[e];let o=this.#S.now()-this.#y[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=this.#S.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,_=this.#N(t,e,i.size||0,o);if(this.maxEntrySize&&_>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#O(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#m.length!==0?this.#m.pop():this.#n===this.#l?this.#I(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#a[this.#h]=f,this.#u[f]=this.#h,this.#h=f,this.#n++,this.#U(f,_,r),r&&(r.set="add"),g=!1,this.#x&&this.#v?.(e,t,"add");else{this.#W(f);let c=this.#t[f];if(e!==c){if(this.#z&&this.#e(c)){c.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:u}=c;u!==void 0&&!h&&(this.#A&&this.#p?.(u,t,"set"),this.#f&&this.#r?.push([u,t,"set"]))}else h||(this.#A&&this.#p?.(c,t,"set"),this.#f&&this.#r?.push([c,t,"set"]));if(this.#R(f),this.#U(f,_,r),this.#t[f]=e,r){r.set="replace";let u=c&&this.#e(c)?c.__staleWhileFetching:c;u!==void 0&&(r.oldValue=u)}}else r&&(r.set="update");this.#x&&this.onInsert?.(e,t,e===c?"update":"replace")}if(s!==0&&!this.#d&&this.#G(),this.#d&&(g||this.#j(f,s,n),r&&this.#E(r,f)),!h&&this.#f&&this.#r){let c=this.#r,u;for(;u=c?.shift();)this.#w?.(...u)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#I(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#f&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#I(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#z&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(s,i,"evict"),this.#f&&this.#r?.push([s,i,"evict"])),this.#R(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#m.push(e)),this.#n===1?(this.#o=this.#h=0,this.#m.length=0):this.#o=this.#a[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#g(n))s&&(s.has="stale",this.#E(s,n));else return i&&this.#C(n),s&&(s.has="hit",this.#E(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#g(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#M(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new C,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,m=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!m?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!m)return f(h.signal.reason);let b=u;return this.#t[e]===u&&(d===void 0?b.__staleWhileFetching!==void 0?this.#t[e]=b.__staleWhileFetching:this.#O(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},_=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:m}=h.signal,l=m&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=u;if(this.#t[e]===u&&(!b||p.__staleWhileFetching===void 0?this.#O(t,"fetch"):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},c=(d,m)=>{let l=this.#D?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),m),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let u=new Promise(c).then(g,_),A=Object.assign(u,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,A,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=A,A}#e(t){if(!this.#z)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof C}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:_=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:c=this.allowStaleOnFetchRejection,ignoreFetchAbort:u=this.ignoreFetchAbort,allowStaleOnFetchAbort:A=this.allowStaleOnFetchAbort,context:d,forceRefresh:m=!1,status:l,signal:w}=e;if(!this.#z)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:_,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:c,allowStaleOnFetchAbort:A,ignoreFetchAbort:u,status:l,signal:w},p=this.#s.get(t);if(p===void 0){l&&(l.fetch="miss");let S=this.#M(t,p,b,d);return S.__returned=S}else{let S=this.#t[p];if(this.#e(S)){let O=i&&S.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",O&&(l.returnedStale=!0)),O?S.__staleWhileFetching:S.__returned=S}let z=this.#g(p);if(!m&&!z)return l&&(l.fetch="hit"),this.#W(p),s&&this.#C(p),l&&this.#E(l,p),S;let F=this.#M(t,p,b,d),T=F.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=z?"stale":"refresh",T&&z&&(l.returnedStale=!0)),T?F.__staleWhileFetching:F.__returned=F}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#L;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#E(h,o),this.#g(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#O(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#W(o),s&&this.#C(o),r))}else h&&(h.get="miss")}#H(t,e){this.#u[e]=t,this.#a[t]=e}#W(t){t!==this.#h&&(t===this.#o?this.#o=this.#a[t]:this.#H(this.#u[t],this.#a[t]),this.#H(this.#h,t),this.#h=t)}delete(t){return this.#O(t,"delete")}#O(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#k(e);else{this.#R(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(n,t,e),this.#f&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#u[s];else if(s===this.#o)this.#o=this.#a[s];else{let h=this.#u[s];this.#a[h]=this.#a[s];let o=this.#a[s];this.#u[o]=this.#u[s]}this.#n--,this.#m.push(s)}}if(this.#f&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#k("delete")}#k(t){for(let e of this.#T({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#A&&this.#p?.(i,s,t),this.#f&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#d&&this.#y&&(this.#d.fill(0),this.#y.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#m.length=0,this.#_=0,this.#n=0,this.#f&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};exports.LRUCache=D;
-//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/npm-registry-fetch/node_modules/lru-cache/dist/commonjs/package.json b/node_modules/npm-registry-fetch/node_modules/lru-cache/dist/commonjs/package.json
deleted file mode 100644
index 5bbefffbabee3..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/lru-cache/dist/commonjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "commonjs"
-}
diff --git a/node_modules/npm-registry-fetch/node_modules/lru-cache/dist/esm/index.js b/node_modules/npm-registry-fetch/node_modules/lru-cache/dist/esm/index.js
deleted file mode 100644
index 8fd8fc5f31507..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/lru-cache/dist/esm/index.js
+++ /dev/null
@@ -1,1560 +0,0 @@
-/**
- * @module LRUCache
- */
-const defaultPerf = (typeof performance === 'object' &&
-    performance &&
-    typeof performance.now === 'function') ?
-    performance
-    : Date;
-const warned = new Set();
-/* c8 ignore start */
-const PROCESS = (typeof process === 'object' && !!process ?
-    process
-    : {});
-/* c8 ignore start */
-const emitWarning = (msg, type, code, fn) => {
-    typeof PROCESS.emitWarning === 'function' ?
-        PROCESS.emitWarning(msg, type, code, fn)
-        : console.error(`[${code}] ${type}: ${msg}`);
-};
-let AC = globalThis.AbortController;
-let AS = globalThis.AbortSignal;
-/* c8 ignore start */
-if (typeof AC === 'undefined') {
-    //@ts-ignore
-    AS = class AbortSignal {
-        onabort;
-        _onabort = [];
-        reason;
-        aborted = false;
-        addEventListener(_, fn) {
-            this._onabort.push(fn);
-        }
-    };
-    //@ts-ignore
-    AC = class AbortController {
-        constructor() {
-            warnACPolyfill();
-        }
-        signal = new AS();
-        abort(reason) {
-            if (this.signal.aborted)
-                return;
-            //@ts-ignore
-            this.signal.reason = reason;
-            //@ts-ignore
-            this.signal.aborted = true;
-            //@ts-ignore
-            for (const fn of this.signal._onabort) {
-                fn(reason);
-            }
-            this.signal.onabort?.(reason);
-        }
-    };
-    let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
-    const warnACPolyfill = () => {
-        if (!printACPolyfillWarning)
-            return;
-        printACPolyfillWarning = false;
-        emitWarning('AbortController is not defined. If using lru-cache in ' +
-            'node 14, load an AbortController polyfill from the ' +
-            '`node-abort-controller` package. A minimal polyfill is ' +
-            'provided for use by LRUCache.fetch(), but it should not be ' +
-            'relied upon in other contexts (eg, passing it to other APIs that ' +
-            'use AbortController/AbortSignal might have undesirable effects). ' +
-            'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
-    };
-}
-/* c8 ignore stop */
-const shouldWarn = (code) => !warned.has(code);
-const TYPE = Symbol('type');
-const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
-/* c8 ignore start */
-// This is a little bit ridiculous, tbh.
-// The maximum array length is 2^32-1 or thereabouts on most JS impls.
-// And well before that point, you're caching the entire world, I mean,
-// that's ~32GB of just integers for the next/prev links, plus whatever
-// else to hold that many keys and values.  Just filling the memory with
-// zeroes at init time is brutal when you get that big.
-// But why not be complete?
-// Maybe in the future, these limits will have expanded.
-const getUintArray = (max) => !isPosInt(max) ? null
-    : max <= Math.pow(2, 8) ? Uint8Array
-        : max <= Math.pow(2, 16) ? Uint16Array
-            : max <= Math.pow(2, 32) ? Uint32Array
-                : max <= Number.MAX_SAFE_INTEGER ? ZeroArray
-                    : null;
-/* c8 ignore stop */
-class ZeroArray extends Array {
-    constructor(size) {
-        super(size);
-        this.fill(0);
-    }
-}
-class Stack {
-    heap;
-    length;
-    // private constructor
-    static #constructing = false;
-    static create(max) {
-        const HeapCls = getUintArray(max);
-        if (!HeapCls)
-            return [];
-        Stack.#constructing = true;
-        const s = new Stack(max, HeapCls);
-        Stack.#constructing = false;
-        return s;
-    }
-    constructor(max, HeapCls) {
-        /* c8 ignore start */
-        if (!Stack.#constructing) {
-            throw new TypeError('instantiate Stack using Stack.create(n)');
-        }
-        /* c8 ignore stop */
-        this.heap = new HeapCls(max);
-        this.length = 0;
-    }
-    push(n) {
-        this.heap[this.length++] = n;
-    }
-    pop() {
-        return this.heap[--this.length];
-    }
-}
-/**
- * Default export, the thing you're using this module to get.
- *
- * The `K` and `V` types define the key and value types, respectively. The
- * optional `FC` type defines the type of the `context` object passed to
- * `cache.fetch()` and `cache.memo()`.
- *
- * Keys and values **must not** be `null` or `undefined`.
- *
- * All properties from the options object (with the exception of `max`,
- * `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are
- * added as normal public members. (The listed options are read-only getters.)
- *
- * Changing any of these will alter the defaults for subsequent method calls.
- */
-export class LRUCache {
-    // options that cannot be changed without disaster
-    #max;
-    #maxSize;
-    #dispose;
-    #onInsert;
-    #disposeAfter;
-    #fetchMethod;
-    #memoMethod;
-    #perf;
-    /**
-     * {@link LRUCache.OptionsBase.perf}
-     */
-    get perf() {
-        return this.#perf;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.ttl}
-     */
-    ttl;
-    /**
-     * {@link LRUCache.OptionsBase.ttlResolution}
-     */
-    ttlResolution;
-    /**
-     * {@link LRUCache.OptionsBase.ttlAutopurge}
-     */
-    ttlAutopurge;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnGet}
-     */
-    updateAgeOnGet;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnHas}
-     */
-    updateAgeOnHas;
-    /**
-     * {@link LRUCache.OptionsBase.allowStale}
-     */
-    allowStale;
-    /**
-     * {@link LRUCache.OptionsBase.noDisposeOnSet}
-     */
-    noDisposeOnSet;
-    /**
-     * {@link LRUCache.OptionsBase.noUpdateTTL}
-     */
-    noUpdateTTL;
-    /**
-     * {@link LRUCache.OptionsBase.maxEntrySize}
-     */
-    maxEntrySize;
-    /**
-     * {@link LRUCache.OptionsBase.sizeCalculation}
-     */
-    sizeCalculation;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
-     */
-    noDeleteOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
-     */
-    noDeleteOnStaleGet;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
-     */
-    allowStaleOnFetchAbort;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
-     */
-    allowStaleOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
-     */
-    ignoreFetchAbort;
-    // computed properties
-    #size;
-    #calculatedSize;
-    #keyMap;
-    #keyList;
-    #valList;
-    #next;
-    #prev;
-    #head;
-    #tail;
-    #free;
-    #disposed;
-    #sizes;
-    #starts;
-    #ttls;
-    #hasDispose;
-    #hasFetchMethod;
-    #hasDisposeAfter;
-    #hasOnInsert;
-    /**
-     * Do not call this method unless you need to inspect the
-     * inner workings of the cache.  If anything returned by this
-     * object is modified in any way, strange breakage may occur.
-     *
-     * These fields are private for a reason!
-     *
-     * @internal
-     */
-    static unsafeExposeInternals(c) {
-        return {
-            // properties
-            starts: c.#starts,
-            ttls: c.#ttls,
-            sizes: c.#sizes,
-            keyMap: c.#keyMap,
-            keyList: c.#keyList,
-            valList: c.#valList,
-            next: c.#next,
-            prev: c.#prev,
-            get head() {
-                return c.#head;
-            },
-            get tail() {
-                return c.#tail;
-            },
-            free: c.#free,
-            // methods
-            isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
-            backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
-            moveToTail: (index) => c.#moveToTail(index),
-            indexes: (options) => c.#indexes(options),
-            rindexes: (options) => c.#rindexes(options),
-            isStale: (index) => c.#isStale(index),
-        };
-    }
-    // Protected read-only members
-    /**
-     * {@link LRUCache.OptionsBase.max} (read-only)
-     */
-    get max() {
-        return this.#max;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.maxSize} (read-only)
-     */
-    get maxSize() {
-        return this.#maxSize;
-    }
-    /**
-     * The total computed size of items in the cache (read-only)
-     */
-    get calculatedSize() {
-        return this.#calculatedSize;
-    }
-    /**
-     * The number of items stored in the cache (read-only)
-     */
-    get size() {
-        return this.#size;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
-     */
-    get fetchMethod() {
-        return this.#fetchMethod;
-    }
-    get memoMethod() {
-        return this.#memoMethod;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.dispose} (read-only)
-     */
-    get dispose() {
-        return this.#dispose;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.onInsert} (read-only)
-     */
-    get onInsert() {
-        return this.#onInsert;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
-     */
-    get disposeAfter() {
-        return this.#disposeAfter;
-    }
-    constructor(options) {
-        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, onInsert, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, perf, } = options;
-        if (perf !== undefined) {
-            if (typeof perf?.now !== 'function') {
-                throw new TypeError('perf option must have a now() method if specified');
-            }
-        }
-        this.#perf = perf ?? defaultPerf;
-        if (max !== 0 && !isPosInt(max)) {
-            throw new TypeError('max option must be a nonnegative integer');
-        }
-        const UintArray = max ? getUintArray(max) : Array;
-        if (!UintArray) {
-            throw new Error('invalid max value: ' + max);
-        }
-        this.#max = max;
-        this.#maxSize = maxSize;
-        this.maxEntrySize = maxEntrySize || this.#maxSize;
-        this.sizeCalculation = sizeCalculation;
-        if (this.sizeCalculation) {
-            if (!this.#maxSize && !this.maxEntrySize) {
-                throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
-            }
-            if (typeof this.sizeCalculation !== 'function') {
-                throw new TypeError('sizeCalculation set to non-function');
-            }
-        }
-        if (memoMethod !== undefined &&
-            typeof memoMethod !== 'function') {
-            throw new TypeError('memoMethod must be a function if defined');
-        }
-        this.#memoMethod = memoMethod;
-        if (fetchMethod !== undefined &&
-            typeof fetchMethod !== 'function') {
-            throw new TypeError('fetchMethod must be a function if specified');
-        }
-        this.#fetchMethod = fetchMethod;
-        this.#hasFetchMethod = !!fetchMethod;
-        this.#keyMap = new Map();
-        this.#keyList = new Array(max).fill(undefined);
-        this.#valList = new Array(max).fill(undefined);
-        this.#next = new UintArray(max);
-        this.#prev = new UintArray(max);
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free = Stack.create(max);
-        this.#size = 0;
-        this.#calculatedSize = 0;
-        if (typeof dispose === 'function') {
-            this.#dispose = dispose;
-        }
-        if (typeof onInsert === 'function') {
-            this.#onInsert = onInsert;
-        }
-        if (typeof disposeAfter === 'function') {
-            this.#disposeAfter = disposeAfter;
-            this.#disposed = [];
-        }
-        else {
-            this.#disposeAfter = undefined;
-            this.#disposed = undefined;
-        }
-        this.#hasDispose = !!this.#dispose;
-        this.#hasOnInsert = !!this.#onInsert;
-        this.#hasDisposeAfter = !!this.#disposeAfter;
-        this.noDisposeOnSet = !!noDisposeOnSet;
-        this.noUpdateTTL = !!noUpdateTTL;
-        this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
-        this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
-        this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
-        this.ignoreFetchAbort = !!ignoreFetchAbort;
-        // NB: maxEntrySize is set to maxSize if it's set
-        if (this.maxEntrySize !== 0) {
-            if (this.#maxSize !== 0) {
-                if (!isPosInt(this.#maxSize)) {
-                    throw new TypeError('maxSize must be a positive integer if specified');
-                }
-            }
-            if (!isPosInt(this.maxEntrySize)) {
-                throw new TypeError('maxEntrySize must be a positive integer if specified');
-            }
-            this.#initializeSizeTracking();
-        }
-        this.allowStale = !!allowStale;
-        this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
-        this.updateAgeOnGet = !!updateAgeOnGet;
-        this.updateAgeOnHas = !!updateAgeOnHas;
-        this.ttlResolution =
-            isPosInt(ttlResolution) || ttlResolution === 0 ?
-                ttlResolution
-                : 1;
-        this.ttlAutopurge = !!ttlAutopurge;
-        this.ttl = ttl || 0;
-        if (this.ttl) {
-            if (!isPosInt(this.ttl)) {
-                throw new TypeError('ttl must be a positive integer if specified');
-            }
-            this.#initializeTTLTracking();
-        }
-        // do not allow completely unbounded caches
-        if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
-            throw new TypeError('At least one of max, maxSize, or ttl is required');
-        }
-        if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
-            const code = 'LRU_CACHE_UNBOUNDED';
-            if (shouldWarn(code)) {
-                warned.add(code);
-                const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
-                    'result in unbounded memory consumption.';
-                emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
-            }
-        }
-    }
-    /**
-     * Return the number of ms left in the item's TTL. If item is not in cache,
-     * returns `0`. Returns `Infinity` if item is in cache without a defined TTL.
-     */
-    getRemainingTTL(key) {
-        return this.#keyMap.has(key) ? Infinity : 0;
-    }
-    #initializeTTLTracking() {
-        const ttls = new ZeroArray(this.#max);
-        const starts = new ZeroArray(this.#max);
-        this.#ttls = ttls;
-        this.#starts = starts;
-        this.#setItemTTL = (index, ttl, start = this.#perf.now()) => {
-            starts[index] = ttl !== 0 ? start : 0;
-            ttls[index] = ttl;
-            if (ttl !== 0 && this.ttlAutopurge) {
-                const t = setTimeout(() => {
-                    if (this.#isStale(index)) {
-                        this.#delete(this.#keyList[index], 'expire');
-                    }
-                }, ttl + 1);
-                // unref() not supported on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-        };
-        this.#updateItemAge = index => {
-            starts[index] = ttls[index] !== 0 ? this.#perf.now() : 0;
-        };
-        this.#statusTTL = (status, index) => {
-            if (ttls[index]) {
-                const ttl = ttls[index];
-                const start = starts[index];
-                /* c8 ignore next */
-                if (!ttl || !start)
-                    return;
-                status.ttl = ttl;
-                status.start = start;
-                status.now = cachedNow || getNow();
-                const age = status.now - start;
-                status.remainingTTL = ttl - age;
-            }
-        };
-        // debounce calls to perf.now() to 1s so we're not hitting
-        // that costly call repeatedly.
-        let cachedNow = 0;
-        const getNow = () => {
-            const n = this.#perf.now();
-            if (this.ttlResolution > 0) {
-                cachedNow = n;
-                const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
-                // not available on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-            return n;
-        };
-        this.getRemainingTTL = key => {
-            const index = this.#keyMap.get(key);
-            if (index === undefined) {
-                return 0;
-            }
-            const ttl = ttls[index];
-            const start = starts[index];
-            if (!ttl || !start) {
-                return Infinity;
-            }
-            const age = (cachedNow || getNow()) - start;
-            return ttl - age;
-        };
-        this.#isStale = index => {
-            const s = starts[index];
-            const t = ttls[index];
-            return !!t && !!s && (cachedNow || getNow()) - s > t;
-        };
-    }
-    // conditionally set private methods related to TTL
-    #updateItemAge = () => { };
-    #statusTTL = () => { };
-    #setItemTTL = () => { };
-    /* c8 ignore stop */
-    #isStale = () => false;
-    #initializeSizeTracking() {
-        const sizes = new ZeroArray(this.#max);
-        this.#calculatedSize = 0;
-        this.#sizes = sizes;
-        this.#removeItemSize = index => {
-            this.#calculatedSize -= sizes[index];
-            sizes[index] = 0;
-        };
-        this.#requireSize = (k, v, size, sizeCalculation) => {
-            // provisionally accept background fetches.
-            // actual value size will be checked when they return.
-            if (this.#isBackgroundFetch(v)) {
-                return 0;
-            }
-            if (!isPosInt(size)) {
-                if (sizeCalculation) {
-                    if (typeof sizeCalculation !== 'function') {
-                        throw new TypeError('sizeCalculation must be a function');
-                    }
-                    size = sizeCalculation(v, k);
-                    if (!isPosInt(size)) {
-                        throw new TypeError('sizeCalculation return invalid (expect positive integer)');
-                    }
-                }
-                else {
-                    throw new TypeError('invalid size value (must be positive integer). ' +
-                        'When maxSize or maxEntrySize is used, sizeCalculation ' +
-                        'or size must be set.');
-                }
-            }
-            return size;
-        };
-        this.#addItemSize = (index, size, status) => {
-            sizes[index] = size;
-            if (this.#maxSize) {
-                const maxSize = this.#maxSize - sizes[index];
-                while (this.#calculatedSize > maxSize) {
-                    this.#evict(true);
-                }
-            }
-            this.#calculatedSize += sizes[index];
-            if (status) {
-                status.entrySize = size;
-                status.totalCalculatedSize = this.#calculatedSize;
-            }
-        };
-    }
-    #removeItemSize = _i => { };
-    #addItemSize = (_i, _s, _st) => { };
-    #requireSize = (_k, _v, size, sizeCalculation) => {
-        if (size || sizeCalculation) {
-            throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
-        }
-        return 0;
-    };
-    *#indexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#tail; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#head) {
-                    break;
-                }
-                else {
-                    i = this.#prev[i];
-                }
-            }
-        }
-    }
-    *#rindexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#head; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#tail) {
-                    break;
-                }
-                else {
-                    i = this.#next[i];
-                }
-            }
-        }
-    }
-    #isValidIndex(index) {
-        return (index !== undefined &&
-            this.#keyMap.get(this.#keyList[index]) === index);
-    }
-    /**
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from most recently used to least recently used.
-     */
-    *entries() {
-        for (const i of this.#indexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.entries}
-     *
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from least recently used to most recently used.
-     */
-    *rentries() {
-        for (const i of this.#rindexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the keys in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *keys() {
-        for (const i of this.#indexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.keys}
-     *
-     * Return a generator yielding the keys in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rkeys() {
-        for (const i of this.#rindexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the values in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *values() {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.values}
-     *
-     * Return a generator yielding the values in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rvalues() {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Iterating over the cache itself yields the same results as
-     * {@link LRUCache.entries}
-     */
-    [Symbol.iterator]() {
-        return this.entries();
-    }
-    /**
-     * A String value that is used in the creation of the default string
-     * description of an object. Called by the built-in method
-     * `Object.prototype.toString`.
-     */
-    [Symbol.toStringTag] = 'LRUCache';
-    /**
-     * Find a value for which the supplied fn method returns a truthy value,
-     * similar to `Array.find()`. fn is called as `fn(value, key, cache)`.
-     */
-    find(fn, getOptions = {}) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-            if (value === undefined)
-                continue;
-            if (fn(value, this.#keyList[i], this)) {
-                return this.get(this.#keyList[i], getOptions);
-            }
-        }
-    }
-    /**
-     * Call the supplied function on each item in the cache, in order from most
-     * recently used to least recently used.
-     *
-     * `fn` is called as `fn(value, key, cache)`.
-     *
-     * If `thisp` is provided, function will be called in the `this`-context of
-     * the provided object, or the cache if no `thisp` object is provided.
-     *
-     * Does not update age or recenty of use, or iterate over stale values.
-     */
-    forEach(fn, thisp = this) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * The same as {@link LRUCache.forEach} but items are iterated over in
-     * reverse order.  (ie, less recently used items are iterated over first.)
-     */
-    rforEach(fn, thisp = this) {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * Delete any stale entries. Returns true if anything was removed,
-     * false otherwise.
-     */
-    purgeStale() {
-        let deleted = false;
-        for (const i of this.#rindexes({ allowStale: true })) {
-            if (this.#isStale(i)) {
-                this.#delete(this.#keyList[i], 'expire');
-                deleted = true;
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Get the extended info about a given entry, to get its value, size, and
-     * TTL info simultaneously. Returns `undefined` if the key is not present.
-     *
-     * Unlike {@link LRUCache#dump}, which is designed to be portable and survive
-     * serialization, the `start` value is always the current timestamp, and the
-     * `ttl` is a calculated remaining time to live (negative if expired).
-     *
-     * Always returns stale values, if their info is found in the cache, so be
-     * sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl})
-     * if relevant.
-     */
-    info(key) {
-        const i = this.#keyMap.get(key);
-        if (i === undefined)
-            return undefined;
-        const v = this.#valList[i];
-        /* c8 ignore start - this isn't tested for the info function,
-         * but it's the same logic as found in other places. */
-        const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-        if (value === undefined)
-            return undefined;
-        /* c8 ignore end */
-        const entry = { value };
-        if (this.#ttls && this.#starts) {
-            const ttl = this.#ttls[i];
-            const start = this.#starts[i];
-            if (ttl && start) {
-                const remain = ttl - (this.#perf.now() - start);
-                entry.ttl = remain;
-                entry.start = Date.now();
-            }
-        }
-        if (this.#sizes) {
-            entry.size = this.#sizes[i];
-        }
-        return entry;
-    }
-    /**
-     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
-     * passed to {@link LRUCache#load}.
-     *
-     * The `start` fields are calculated relative to a portable `Date.now()`
-     * timestamp, even if `performance.now()` is available.
-     *
-     * Stale entries are always included in the `dump`, even if
-     * {@link LRUCache.OptionsBase.allowStale} is false.
-     *
-     * Note: this returns an actual array, not a generator, so it can be more
-     * easily passed around.
-     */
-    dump() {
-        const arr = [];
-        for (const i of this.#indexes({ allowStale: true })) {
-            const key = this.#keyList[i];
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-            if (value === undefined || key === undefined)
-                continue;
-            const entry = { value };
-            if (this.#ttls && this.#starts) {
-                entry.ttl = this.#ttls[i];
-                // always dump the start relative to a portable timestamp
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = this.#perf.now() - this.#starts[i];
-                entry.start = Math.floor(Date.now() - age);
-            }
-            if (this.#sizes) {
-                entry.size = this.#sizes[i];
-            }
-            arr.unshift([key, entry]);
-        }
-        return arr;
-    }
-    /**
-     * Reset the cache and load in the items in entries in the order listed.
-     *
-     * The shape of the resulting cache may be different if the same options are
-     * not used in both caches.
-     *
-     * The `start` fields are assumed to be calculated relative to a portable
-     * `Date.now()` timestamp, even if `performance.now()` is available.
-     */
-    load(arr) {
-        this.clear();
-        for (const [key, entry] of arr) {
-            if (entry.start) {
-                // entry.start is a portable timestamp, but we may be using
-                // node's performance.now(), so calculate the offset, so that
-                // we get the intended remaining TTL, no matter how long it's
-                // been on ice.
-                //
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = Date.now() - entry.start;
-                entry.start = this.#perf.now() - age;
-            }
-            this.set(key, entry.value, entry);
-        }
-    }
-    /**
-     * Add a value to the cache.
-     *
-     * Note: if `undefined` is specified as a value, this is an alias for
-     * {@link LRUCache#delete}
-     *
-     * Fields on the {@link LRUCache.SetOptions} options param will override
-     * their corresponding values in the constructor options for the scope
-     * of this single `set()` operation.
-     *
-     * If `start` is provided, then that will set the effective start
-     * time for the TTL calculation. Note that this must be a previous
-     * value of `performance.now()` if supported, or a previous value of
-     * `Date.now()` if not.
-     *
-     * Options object may also include `size`, which will prevent
-     * calling the `sizeCalculation` function and just use the specified
-     * number if it is a positive integer, and `noDisposeOnSet` which
-     * will prevent calling a `dispose` function in the case of
-     * overwrites.
-     *
-     * If the `size` (or return value of `sizeCalculation`) for a given
-     * entry is greater than `maxEntrySize`, then the item will not be
-     * added to the cache.
-     *
-     * Will update the recency of the entry.
-     *
-     * If the value is `undefined`, then this is an alias for
-     * `cache.delete(key)`. `undefined` is never stored in the cache.
-     */
-    set(k, v, setOptions = {}) {
-        if (v === undefined) {
-            this.delete(k);
-            return this;
-        }
-        const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
-        let { noUpdateTTL = this.noUpdateTTL } = setOptions;
-        const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
-        // if the item doesn't fit, don't do anything
-        // NB: maxEntrySize set to maxSize by default
-        if (this.maxEntrySize && size > this.maxEntrySize) {
-            if (status) {
-                status.set = 'miss';
-                status.maxEntrySizeExceeded = true;
-            }
-            // have to delete, in case something is there already.
-            this.#delete(k, 'set');
-            return this;
-        }
-        let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
-        if (index === undefined) {
-            // addition
-            index = (this.#size === 0 ? this.#tail
-                : this.#free.length !== 0 ? this.#free.pop()
-                    : this.#size === this.#max ? this.#evict(false)
-                        : this.#size);
-            this.#keyList[index] = k;
-            this.#valList[index] = v;
-            this.#keyMap.set(k, index);
-            this.#next[this.#tail] = index;
-            this.#prev[index] = this.#tail;
-            this.#tail = index;
-            this.#size++;
-            this.#addItemSize(index, size, status);
-            if (status)
-                status.set = 'add';
-            noUpdateTTL = false;
-            if (this.#hasOnInsert) {
-                this.#onInsert?.(v, k, 'add');
-            }
-        }
-        else {
-            // update
-            this.#moveToTail(index);
-            const oldVal = this.#valList[index];
-            if (v !== oldVal) {
-                if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
-                    oldVal.__abortController.abort(new Error('replaced'));
-                    const { __staleWhileFetching: s } = oldVal;
-                    if (s !== undefined && !noDisposeOnSet) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(s, k, 'set');
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([s, k, 'set']);
-                        }
-                    }
-                }
-                else if (!noDisposeOnSet) {
-                    if (this.#hasDispose) {
-                        this.#dispose?.(oldVal, k, 'set');
-                    }
-                    if (this.#hasDisposeAfter) {
-                        this.#disposed?.push([oldVal, k, 'set']);
-                    }
-                }
-                this.#removeItemSize(index);
-                this.#addItemSize(index, size, status);
-                this.#valList[index] = v;
-                if (status) {
-                    status.set = 'replace';
-                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal) ?
-                        oldVal.__staleWhileFetching
-                        : oldVal;
-                    if (oldValue !== undefined)
-                        status.oldValue = oldValue;
-                }
-            }
-            else if (status) {
-                status.set = 'update';
-            }
-            if (this.#hasOnInsert) {
-                this.onInsert?.(v, k, v === oldVal ? 'update' : 'replace');
-            }
-        }
-        if (ttl !== 0 && !this.#ttls) {
-            this.#initializeTTLTracking();
-        }
-        if (this.#ttls) {
-            if (!noUpdateTTL) {
-                this.#setItemTTL(index, ttl, start);
-            }
-            if (status)
-                this.#statusTTL(status, index);
-        }
-        if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return this;
-    }
-    /**
-     * Evict the least recently used item, returning its value or
-     * `undefined` if cache is empty.
-     */
-    pop() {
-        try {
-            while (this.#size) {
-                const val = this.#valList[this.#head];
-                this.#evict(true);
-                if (this.#isBackgroundFetch(val)) {
-                    if (val.__staleWhileFetching) {
-                        return val.__staleWhileFetching;
-                    }
-                }
-                else if (val !== undefined) {
-                    return val;
-                }
-            }
-        }
-        finally {
-            if (this.#hasDisposeAfter && this.#disposed) {
-                const dt = this.#disposed;
-                let task;
-                while ((task = dt?.shift())) {
-                    this.#disposeAfter?.(...task);
-                }
-            }
-        }
-    }
-    #evict(free) {
-        const head = this.#head;
-        const k = this.#keyList[head];
-        const v = this.#valList[head];
-        if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
-            v.__abortController.abort(new Error('evicted'));
-        }
-        else if (this.#hasDispose || this.#hasDisposeAfter) {
-            if (this.#hasDispose) {
-                this.#dispose?.(v, k, 'evict');
-            }
-            if (this.#hasDisposeAfter) {
-                this.#disposed?.push([v, k, 'evict']);
-            }
-        }
-        this.#removeItemSize(head);
-        // if we aren't about to use the index, then null these out
-        if (free) {
-            this.#keyList[head] = undefined;
-            this.#valList[head] = undefined;
-            this.#free.push(head);
-        }
-        if (this.#size === 1) {
-            this.#head = this.#tail = 0;
-            this.#free.length = 0;
-        }
-        else {
-            this.#head = this.#next[head];
-        }
-        this.#keyMap.delete(k);
-        this.#size--;
-        return head;
-    }
-    /**
-     * Check if a key is in the cache, without updating the recency of use.
-     * Will return false if the item is stale, even though it is technically
-     * in the cache.
-     *
-     * Check if a key is in the cache, without updating the recency of
-     * use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set
-     * to `true` in either the options or the constructor.
-     *
-     * Will return `false` if the item is stale, even though it is technically in
-     * the cache. The difference can be determined (if it matters) by using a
-     * `status` argument, and inspecting the `has` field.
-     *
-     * Will not update item age unless
-     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
-     */
-    has(k, hasOptions = {}) {
-        const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v) &&
-                v.__staleWhileFetching === undefined) {
-                return false;
-            }
-            if (!this.#isStale(index)) {
-                if (updateAgeOnHas) {
-                    this.#updateItemAge(index);
-                }
-                if (status) {
-                    status.has = 'hit';
-                    this.#statusTTL(status, index);
-                }
-                return true;
-            }
-            else if (status) {
-                status.has = 'stale';
-                this.#statusTTL(status, index);
-            }
-        }
-        else if (status) {
-            status.has = 'miss';
-        }
-        return false;
-    }
-    /**
-     * Like {@link LRUCache#get} but doesn't update recency or delete stale
-     * items.
-     *
-     * Returns `undefined` if the item is stale, unless
-     * {@link LRUCache.OptionsBase.allowStale} is set.
-     */
-    peek(k, peekOptions = {}) {
-        const { allowStale = this.allowStale } = peekOptions;
-        const index = this.#keyMap.get(k);
-        if (index === undefined ||
-            (!allowStale && this.#isStale(index))) {
-            return;
-        }
-        const v = this.#valList[index];
-        // either stale and allowed, or forcing a refresh of non-stale value
-        return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-    }
-    #backgroundFetch(k, index, options, context) {
-        const v = index === undefined ? undefined : this.#valList[index];
-        if (this.#isBackgroundFetch(v)) {
-            return v;
-        }
-        const ac = new AC();
-        const { signal } = options;
-        // when/if our AC signals, then stop listening to theirs.
-        signal?.addEventListener('abort', () => ac.abort(signal.reason), {
-            signal: ac.signal,
-        });
-        const fetchOpts = {
-            signal: ac.signal,
-            options,
-            context,
-        };
-        const cb = (v, updateCache = false) => {
-            const { aborted } = ac.signal;
-            const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
-            if (options.status) {
-                if (aborted && !updateCache) {
-                    options.status.fetchAborted = true;
-                    options.status.fetchError = ac.signal.reason;
-                    if (ignoreAbort)
-                        options.status.fetchAbortIgnored = true;
-                }
-                else {
-                    options.status.fetchResolved = true;
-                }
-            }
-            if (aborted && !ignoreAbort && !updateCache) {
-                return fetchFail(ac.signal.reason);
-            }
-            // either we didn't abort, and are still here, or we did, and ignored
-            const bf = p;
-            if (this.#valList[index] === p) {
-                if (v === undefined) {
-                    if (bf.__staleWhileFetching !== undefined) {
-                        this.#valList[index] = bf.__staleWhileFetching;
-                    }
-                    else {
-                        this.#delete(k, 'fetch');
-                    }
-                }
-                else {
-                    if (options.status)
-                        options.status.fetchUpdated = true;
-                    this.set(k, v, fetchOpts.options);
-                }
-            }
-            return v;
-        };
-        const eb = (er) => {
-            if (options.status) {
-                options.status.fetchRejected = true;
-                options.status.fetchError = er;
-            }
-            return fetchFail(er);
-        };
-        const fetchFail = (er) => {
-            const { aborted } = ac.signal;
-            const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
-            const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
-            const noDelete = allowStale || options.noDeleteOnFetchRejection;
-            const bf = p;
-            if (this.#valList[index] === p) {
-                // if we allow stale on fetch rejections, then we need to ensure that
-                // the stale value is not removed from the cache when the fetch fails.
-                const del = !noDelete || bf.__staleWhileFetching === undefined;
-                if (del) {
-                    this.#delete(k, 'fetch');
-                }
-                else if (!allowStaleAborted) {
-                    // still replace the *promise* with the stale value,
-                    // since we are done with the promise at this point.
-                    // leave it untouched if we're still waiting for an
-                    // aborted background fetch that hasn't yet returned.
-                    this.#valList[index] = bf.__staleWhileFetching;
-                }
-            }
-            if (allowStale) {
-                if (options.status && bf.__staleWhileFetching !== undefined) {
-                    options.status.returnedStale = true;
-                }
-                return bf.__staleWhileFetching;
-            }
-            else if (bf.__returned === bf) {
-                throw er;
-            }
-        };
-        const pcall = (res, rej) => {
-            const fmp = this.#fetchMethod?.(k, v, fetchOpts);
-            if (fmp && fmp instanceof Promise) {
-                fmp.then(v => res(v === undefined ? undefined : v), rej);
-            }
-            // ignored, we go until we finish, regardless.
-            // defer check until we are actually aborting,
-            // so fetchMethod can override.
-            ac.signal.addEventListener('abort', () => {
-                if (!options.ignoreFetchAbort ||
-                    options.allowStaleOnFetchAbort) {
-                    res(undefined);
-                    // when it eventually resolves, update the cache.
-                    if (options.allowStaleOnFetchAbort) {
-                        res = v => cb(v, true);
-                    }
-                }
-            });
-        };
-        if (options.status)
-            options.status.fetchDispatched = true;
-        const p = new Promise(pcall).then(cb, eb);
-        const bf = Object.assign(p, {
-            __abortController: ac,
-            __staleWhileFetching: v,
-            __returned: undefined,
-        });
-        if (index === undefined) {
-            // internal, don't expose status.
-            this.set(k, bf, { ...fetchOpts.options, status: undefined });
-            index = this.#keyMap.get(k);
-        }
-        else {
-            this.#valList[index] = bf;
-        }
-        return bf;
-    }
-    #isBackgroundFetch(p) {
-        if (!this.#hasFetchMethod)
-            return false;
-        const b = p;
-        return (!!b &&
-            b instanceof Promise &&
-            b.hasOwnProperty('__staleWhileFetching') &&
-            b.__abortController instanceof AC);
-    }
-    async fetch(k, fetchOptions = {}) {
-        const { 
-        // get options
-        allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, 
-        // set options
-        ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, 
-        // fetch exclusive options
-        noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
-        if (!this.#hasFetchMethod) {
-            if (status)
-                status.fetch = 'get';
-            return this.get(k, {
-                allowStale,
-                updateAgeOnGet,
-                noDeleteOnStaleGet,
-                status,
-            });
-        }
-        const options = {
-            allowStale,
-            updateAgeOnGet,
-            noDeleteOnStaleGet,
-            ttl,
-            noDisposeOnSet,
-            size,
-            sizeCalculation,
-            noUpdateTTL,
-            noDeleteOnFetchRejection,
-            allowStaleOnFetchRejection,
-            allowStaleOnFetchAbort,
-            ignoreFetchAbort,
-            status,
-            signal,
-        };
-        let index = this.#keyMap.get(k);
-        if (index === undefined) {
-            if (status)
-                status.fetch = 'miss';
-            const p = this.#backgroundFetch(k, index, options, context);
-            return (p.__returned = p);
-        }
-        else {
-            // in cache, maybe already fetching
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                const stale = allowStale && v.__staleWhileFetching !== undefined;
-                if (status) {
-                    status.fetch = 'inflight';
-                    if (stale)
-                        status.returnedStale = true;
-                }
-                return stale ? v.__staleWhileFetching : (v.__returned = v);
-            }
-            // if we force a refresh, that means do NOT serve the cached value,
-            // unless we are already in the process of refreshing the cache.
-            const isStale = this.#isStale(index);
-            if (!forceRefresh && !isStale) {
-                if (status)
-                    status.fetch = 'hit';
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                if (status)
-                    this.#statusTTL(status, index);
-                return v;
-            }
-            // ok, it is stale or a forced refresh, and not already fetching.
-            // refresh the cache.
-            const p = this.#backgroundFetch(k, index, options, context);
-            const hasStale = p.__staleWhileFetching !== undefined;
-            const staleVal = hasStale && allowStale;
-            if (status) {
-                status.fetch = isStale ? 'stale' : 'refresh';
-                if (staleVal && isStale)
-                    status.returnedStale = true;
-            }
-            return staleVal ? p.__staleWhileFetching : (p.__returned = p);
-        }
-    }
-    async forceFetch(k, fetchOptions = {}) {
-        const v = await this.fetch(k, fetchOptions);
-        if (v === undefined)
-            throw new Error('fetch() returned undefined');
-        return v;
-    }
-    memo(k, memoOptions = {}) {
-        const memoMethod = this.#memoMethod;
-        if (!memoMethod) {
-            throw new Error('no memoMethod provided to constructor');
-        }
-        const { context, forceRefresh, ...options } = memoOptions;
-        const v = this.get(k, options);
-        if (!forceRefresh && v !== undefined)
-            return v;
-        const vv = memoMethod(k, v, {
-            options,
-            context,
-        });
-        this.set(k, vv, options);
-        return vv;
-    }
-    /**
-     * Return a value from the cache. Will update the recency of the cache
-     * entry found.
-     *
-     * If the key is not found, get() will return `undefined`.
-     */
-    get(k, getOptions = {}) {
-        const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const value = this.#valList[index];
-            const fetching = this.#isBackgroundFetch(value);
-            if (status)
-                this.#statusTTL(status, index);
-            if (this.#isStale(index)) {
-                if (status)
-                    status.get = 'stale';
-                // delete only if not an in-flight background fetch
-                if (!fetching) {
-                    if (!noDeleteOnStaleGet) {
-                        this.#delete(k, 'expire');
-                    }
-                    if (status && allowStale)
-                        status.returnedStale = true;
-                    return allowStale ? value : undefined;
-                }
-                else {
-                    if (status &&
-                        allowStale &&
-                        value.__staleWhileFetching !== undefined) {
-                        status.returnedStale = true;
-                    }
-                    return allowStale ? value.__staleWhileFetching : undefined;
-                }
-            }
-            else {
-                if (status)
-                    status.get = 'hit';
-                // if we're currently fetching it, we don't actually have it yet
-                // it's not stale, which means this isn't a staleWhileRefetching.
-                // If it's not stale, and fetching, AND has a __staleWhileFetching
-                // value, then that means the user fetched with {forceRefresh:true},
-                // so it's safe to return that value.
-                if (fetching) {
-                    return value.__staleWhileFetching;
-                }
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                return value;
-            }
-        }
-        else if (status) {
-            status.get = 'miss';
-        }
-    }
-    #connect(p, n) {
-        this.#prev[n] = p;
-        this.#next[p] = n;
-    }
-    #moveToTail(index) {
-        // if tail already, nothing to do
-        // if head, move head to next[index]
-        // else
-        //   move next[prev[index]] to next[index] (head has no prev)
-        //   move prev[next[index]] to prev[index]
-        // prev[index] = tail
-        // next[tail] = index
-        // tail = index
-        if (index !== this.#tail) {
-            if (index === this.#head) {
-                this.#head = this.#next[index];
-            }
-            else {
-                this.#connect(this.#prev[index], this.#next[index]);
-            }
-            this.#connect(this.#tail, index);
-            this.#tail = index;
-        }
-    }
-    /**
-     * Deletes a key out of the cache.
-     *
-     * Returns true if the key was deleted, false otherwise.
-     */
-    delete(k) {
-        return this.#delete(k, 'delete');
-    }
-    #delete(k, reason) {
-        let deleted = false;
-        if (this.#size !== 0) {
-            const index = this.#keyMap.get(k);
-            if (index !== undefined) {
-                deleted = true;
-                if (this.#size === 1) {
-                    this.#clear(reason);
-                }
-                else {
-                    this.#removeItemSize(index);
-                    const v = this.#valList[index];
-                    if (this.#isBackgroundFetch(v)) {
-                        v.__abortController.abort(new Error('deleted'));
-                    }
-                    else if (this.#hasDispose || this.#hasDisposeAfter) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(v, k, reason);
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([v, k, reason]);
-                        }
-                    }
-                    this.#keyMap.delete(k);
-                    this.#keyList[index] = undefined;
-                    this.#valList[index] = undefined;
-                    if (index === this.#tail) {
-                        this.#tail = this.#prev[index];
-                    }
-                    else if (index === this.#head) {
-                        this.#head = this.#next[index];
-                    }
-                    else {
-                        const pi = this.#prev[index];
-                        this.#next[pi] = this.#next[index];
-                        const ni = this.#next[index];
-                        this.#prev[ni] = this.#prev[index];
-                    }
-                    this.#size--;
-                    this.#free.push(index);
-                }
-            }
-        }
-        if (this.#hasDisposeAfter && this.#disposed?.length) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Clear the cache entirely, throwing away all values.
-     */
-    clear() {
-        return this.#clear('delete');
-    }
-    #clear(reason) {
-        for (const index of this.#rindexes({ allowStale: true })) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                v.__abortController.abort(new Error('deleted'));
-            }
-            else {
-                const k = this.#keyList[index];
-                if (this.#hasDispose) {
-                    this.#dispose?.(v, k, reason);
-                }
-                if (this.#hasDisposeAfter) {
-                    this.#disposed?.push([v, k, reason]);
-                }
-            }
-        }
-        this.#keyMap.clear();
-        this.#valList.fill(undefined);
-        this.#keyList.fill(undefined);
-        if (this.#ttls && this.#starts) {
-            this.#ttls.fill(0);
-            this.#starts.fill(0);
-        }
-        if (this.#sizes) {
-            this.#sizes.fill(0);
-        }
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free.length = 0;
-        this.#calculatedSize = 0;
-        this.#size = 0;
-        if (this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-    }
-}
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/npm-registry-fetch/node_modules/lru-cache/dist/esm/index.min.js b/node_modules/npm-registry-fetch/node_modules/lru-cache/dist/esm/index.min.js
deleted file mode 100644
index 07dd8fc3c59d8..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/lru-cache/dist/esm/index.min.js
+++ /dev/null
@@ -1,2 +0,0 @@
-var M=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,x=new Set,R=typeof process=="object"&&process?process:{},I=(a,t,e,i)=>{typeof R.emitWarning=="function"?R.emitWarning(a,t,e,i):console.error(`[${e}] ${t}: ${a}`)},C=globalThis.AbortController,D=globalThis.AbortSignal;if(typeof C>"u"){D=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},C=class{constructor(){t()}signal=new D;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let a=R.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{a&&(a=!1,I("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var G=a=>!x.has(a),H=Symbol("type"),y=a=>a&&a===Math.floor(a)&&a>0&&isFinite(a),U=a=>y(a)?a<=Math.pow(2,8)?Uint8Array:a<=Math.pow(2,16)?Uint16Array:a<=Math.pow(2,32)?Uint32Array:a<=Number.MAX_SAFE_INTEGER?O:null:null,O=class extends Array{constructor(t){super(t),this.fill(0)}},W=class a{heap;length;static#l=!1;static create(t){let e=U(t);if(!e)return[];a.#l=!0;let i=new a(t,e);return a.#l=!1,i}constructor(t,e){if(!a.#l)throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},L=class a{#l;#c;#p;#v;#w;#D;#L;#S;get perf(){return this.#S}ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#_;#s;#i;#t;#a;#u;#o;#h;#m;#r;#b;#y;#d;#A;#z;#f;#x;static unsafeExposeInternals(t){return{starts:t.#y,ttls:t.#d,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#a,prev:t.#u,get head(){return t.#o},get tail(){return t.#h},free:t.#m,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#M(e,i,s,n),moveToTail:e=>t.#W(e),indexes:e=>t.#F(e),rindexes:e=>t.#T(e),isStale:e=>t.#g(e)}}get max(){return this.#l}get maxSize(){return this.#c}get calculatedSize(){return this.#_}get size(){return this.#n}get fetchMethod(){return this.#D}get memoMethod(){return this.#L}get dispose(){return this.#p}get onInsert(){return this.#v}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,onInsert:_,disposeAfter:f,noDisposeOnSet:c,noUpdateTTL:u,maxSize:A=0,maxEntrySize:d=0,sizeCalculation:m,fetchMethod:l,memoMethod:w,noDeleteOnFetchRejection:b,noDeleteOnStaleGet:p,allowStaleOnFetchRejection:S,allowStaleOnFetchAbort:z,ignoreFetchAbort:F,perf:v}=t;if(v!==void 0&&typeof v?.now!="function")throw new TypeError("perf option must have a now() method if specified");if(this.#S=v??M,e!==0&&!y(e))throw new TypeError("max option must be a nonnegative integer");let T=e?U(e):Array;if(!T)throw new Error("invalid max value: "+e);if(this.#l=e,this.#c=A,this.maxEntrySize=d||this.#c,this.sizeCalculation=m,this.sizeCalculation){if(!this.#c&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#L=w,l!==void 0&&typeof l!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#D=l,this.#z=!!l,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#a=new T(e),this.#u=new T(e),this.#o=0,this.#h=0,this.#m=W.create(e),this.#n=0,this.#_=0,typeof g=="function"&&(this.#p=g),typeof _=="function"&&(this.#v=_),typeof f=="function"?(this.#w=f,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#A=!!this.#p,this.#x=!!this.#v,this.#f=!!this.#w,this.noDisposeOnSet=!!c,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!b,this.allowStaleOnFetchRejection=!!S,this.allowStaleOnFetchAbort=!!z,this.ignoreFetchAbort=!!F,this.maxEntrySize!==0){if(this.#c!==0&&!y(this.#c))throw new TypeError("maxSize must be a positive integer if specified");if(!y(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#V()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!p,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=y(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!y(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#G()}if(this.#l===0&&this.ttl===0&&this.#c===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#l&&!this.#c){let E="LRU_CACHE_UNBOUNDED";G(E)&&(x.add(E),I("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",E,a))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#G(){let t=new O(this.#l),e=new O(this.#l);this.#d=t,this.#y=e,this.#j=(n,h,o=this.#S.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#g(n)&&this.#E(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#C=n=>{e[n]=t[n]!==0?this.#S.now():0},this.#O=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=this.#S.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#g=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#C=()=>{};#O=()=>{};#j=()=>{};#g=()=>!1;#V(){let t=new O(this.#l);this.#_=0,this.#b=t,this.#R=e=>{this.#_-=t[e],t[e]=0},this.#N=(e,i,s,n)=>{if(this.#e(i))return 0;if(!y(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!y(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#I=(e,i,s)=>{if(t[e]=i,this.#c){let n=this.#c-t[e];for(;this.#_>n;)this.#U(!0)}this.#_+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#_)}}#R=t=>{};#I=(t,e,i)=>{};#N=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#o));)e=this.#u[e]}*#T({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#h));)e=this.#a[e]}#P(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#T())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#T()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#T())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#T()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#T({allowStale:!0}))this.#g(e)&&(this.#E(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#d&&this.#y){let h=this.#d[e],o=this.#y[e];if(h&&o){let r=h-(this.#S.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#F({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#d&&this.#y){h.ttl=this.#d[e];let o=this.#S.now()-this.#y[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=this.#S.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,_=this.#N(t,e,i.size||0,o);if(this.maxEntrySize&&_>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#E(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#m.length!==0?this.#m.pop():this.#n===this.#l?this.#U(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#a[this.#h]=f,this.#u[f]=this.#h,this.#h=f,this.#n++,this.#I(f,_,r),r&&(r.set="add"),g=!1,this.#x&&this.#v?.(e,t,"add");else{this.#W(f);let c=this.#t[f];if(e!==c){if(this.#z&&this.#e(c)){c.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:u}=c;u!==void 0&&!h&&(this.#A&&this.#p?.(u,t,"set"),this.#f&&this.#r?.push([u,t,"set"]))}else h||(this.#A&&this.#p?.(c,t,"set"),this.#f&&this.#r?.push([c,t,"set"]));if(this.#R(f),this.#I(f,_,r),this.#t[f]=e,r){r.set="replace";let u=c&&this.#e(c)?c.__staleWhileFetching:c;u!==void 0&&(r.oldValue=u)}}else r&&(r.set="update");this.#x&&this.onInsert?.(e,t,e===c?"update":"replace")}if(s!==0&&!this.#d&&this.#G(),this.#d&&(g||this.#j(f,s,n),r&&this.#O(r,f)),!h&&this.#f&&this.#r){let c=this.#r,u;for(;u=c?.shift();)this.#w?.(...u)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#U(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#f&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#U(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#z&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(s,i,"evict"),this.#f&&this.#r?.push([s,i,"evict"])),this.#R(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#m.push(e)),this.#n===1?(this.#o=this.#h=0,this.#m.length=0):this.#o=this.#a[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#g(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#C(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#g(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#M(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new C,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,m=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!m?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!m)return f(h.signal.reason);let b=u;return this.#t[e]===u&&(d===void 0?b.__staleWhileFetching!==void 0?this.#t[e]=b.__staleWhileFetching:this.#E(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},_=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:m}=h.signal,l=m&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=u;if(this.#t[e]===u&&(!b||p.__staleWhileFetching===void 0?this.#E(t,"fetch"):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},c=(d,m)=>{let l=this.#D?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),m),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let u=new Promise(c).then(g,_),A=Object.assign(u,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,A,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=A,A}#e(t){if(!this.#z)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof C}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:_=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:c=this.allowStaleOnFetchRejection,ignoreFetchAbort:u=this.ignoreFetchAbort,allowStaleOnFetchAbort:A=this.allowStaleOnFetchAbort,context:d,forceRefresh:m=!1,status:l,signal:w}=e;if(!this.#z)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:_,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:c,allowStaleOnFetchAbort:A,ignoreFetchAbort:u,status:l,signal:w},p=this.#s.get(t);if(p===void 0){l&&(l.fetch="miss");let S=this.#M(t,p,b,d);return S.__returned=S}else{let S=this.#t[p];if(this.#e(S)){let E=i&&S.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",E&&(l.returnedStale=!0)),E?S.__staleWhileFetching:S.__returned=S}let z=this.#g(p);if(!m&&!z)return l&&(l.fetch="hit"),this.#W(p),s&&this.#C(p),l&&this.#O(l,p),S;let F=this.#M(t,p,b,d),T=F.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=z?"stale":"refresh",T&&z&&(l.returnedStale=!0)),T?F.__staleWhileFetching:F.__returned=F}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#L;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#O(h,o),this.#g(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#E(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#W(o),s&&this.#C(o),r))}else h&&(h.get="miss")}#H(t,e){this.#u[e]=t,this.#a[t]=e}#W(t){t!==this.#h&&(t===this.#o?this.#o=this.#a[t]:this.#H(this.#u[t],this.#a[t]),this.#H(this.#h,t),this.#h=t)}delete(t){return this.#E(t,"delete")}#E(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#k(e);else{this.#R(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(n,t,e),this.#f&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#u[s];else if(s===this.#o)this.#o=this.#a[s];else{let h=this.#u[s];this.#a[h]=this.#a[s];let o=this.#a[s];this.#u[o]=this.#u[s]}this.#n--,this.#m.push(s)}}if(this.#f&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#k("delete")}#k(t){for(let e of this.#T({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#A&&this.#p?.(i,s,t),this.#f&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#d&&this.#y&&(this.#d.fill(0),this.#y.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#m.length=0,this.#_=0,this.#n=0,this.#f&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};export{L as LRUCache};
-//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/npm-registry-fetch/node_modules/lru-cache/dist/esm/package.json b/node_modules/npm-registry-fetch/node_modules/lru-cache/dist/esm/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/lru-cache/dist/esm/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/npm-registry-fetch/node_modules/lru-cache/package.json b/node_modules/npm-registry-fetch/node_modules/lru-cache/package.json
deleted file mode 100644
index 4953bdf4a7a35..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/lru-cache/package.json
+++ /dev/null
@@ -1,113 +0,0 @@
-{
-  "name": "lru-cache",
-  "description": "A cache object that deletes the least-recently-used items.",
-  "version": "11.2.1",
-  "author": "Isaac Z. Schlueter ",
-  "keywords": [
-    "mru",
-    "lru",
-    "cache"
-  ],
-  "sideEffects": false,
-  "scripts": {
-    "build": "npm run prepare",
-    "prepare": "tshy && bash fixup.sh",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "test": "tap",
-    "snap": "tap",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "format": "prettier --write .",
-    "typedoc": "typedoc --tsconfig ./.tshy/esm.json ./src/*.ts",
-    "benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh",
-    "prebenchmark": "npm run prepare",
-    "benchmark": "make -C benchmark",
-    "preprofile": "npm run prepare",
-    "profile": "make -C benchmark profile"
-  },
-  "main": "./dist/commonjs/index.js",
-  "types": "./dist/commonjs/index.d.ts",
-  "tshy": {
-    "exports": {
-      ".": "./src/index.ts",
-      "./min": {
-        "import": {
-          "types": "./dist/esm/index.d.ts",
-          "default": "./dist/esm/index.min.js"
-        },
-        "require": {
-          "types": "./dist/commonjs/index.d.ts",
-          "default": "./dist/commonjs/index.min.js"
-        }
-      }
-    }
-  },
-  "repository": {
-    "type": "git",
-    "url": "git://github.com/isaacs/node-lru-cache.git"
-  },
-  "devDependencies": {
-    "@types/node": "^24.3.0",
-    "benchmark": "^2.1.4",
-    "esbuild": "^0.25.9",
-    "marked": "^4.2.12",
-    "mkdirp": "^3.0.1",
-    "prettier": "^3.6.2",
-    "tap": "^21.1.0",
-    "tshy": "^3.0.2",
-    "typedoc": "^0.28.12"
-  },
-  "license": "ISC",
-  "files": [
-    "dist"
-  ],
-  "engines": {
-    "node": "20 || >=22"
-  },
-  "prettier": {
-    "experimentalTernaries": true,
-    "semi": false,
-    "printWidth": 70,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "tap": {
-    "node-arg": [
-      "--expose-gc"
-    ],
-    "plugin": [
-      "@tapjs/clock"
-    ]
-  },
-  "exports": {
-    ".": {
-      "import": {
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.js"
-      },
-      "require": {
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.js"
-      }
-    },
-    "./min": {
-      "import": {
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.min.js"
-      },
-      "require": {
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.min.js"
-      }
-    }
-  },
-  "type": "module",
-  "module": "./dist/esm/index.js"
-}
diff --git a/node_modules/pacote/node_modules/hosted-git-info/LICENSE b/node_modules/pacote/node_modules/hosted-git-info/LICENSE
deleted file mode 100644
index 45055763dc838..0000000000000
--- a/node_modules/pacote/node_modules/hosted-git-info/LICENSE
+++ /dev/null
@@ -1,13 +0,0 @@
-Copyright (c) 2015, Rebecca Turner
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
-REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
-FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
-INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
-LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
-OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
-PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/hosted-git-info/lib/from-url.js b/node_modules/pacote/node_modules/hosted-git-info/lib/from-url.js
deleted file mode 100644
index efc1247d59d12..0000000000000
--- a/node_modules/pacote/node_modules/hosted-git-info/lib/from-url.js
+++ /dev/null
@@ -1,122 +0,0 @@
-'use strict'
-
-const parseUrl = require('./parse-url')
-
-// look for github shorthand inputs, such as npm/cli
-const isGitHubShorthand = (arg) => {
-  // it cannot contain whitespace before the first #
-  // it cannot start with a / because that's probably an absolute file path
-  // but it must include a slash since repos are username/repository
-  // it cannot start with a . because that's probably a relative file path
-  // it cannot start with an @ because that's a scoped package if it passes the other tests
-  // it cannot contain a : before a # because that tells us that there's a protocol
-  // a second / may not exist before a #
-  const firstHash = arg.indexOf('#')
-  const firstSlash = arg.indexOf('/')
-  const secondSlash = arg.indexOf('/', firstSlash + 1)
-  const firstColon = arg.indexOf(':')
-  const firstSpace = /\s/.exec(arg)
-  const firstAt = arg.indexOf('@')
-
-  const spaceOnlyAfterHash = !firstSpace || (firstHash > -1 && firstSpace.index > firstHash)
-  const atOnlyAfterHash = firstAt === -1 || (firstHash > -1 && firstAt > firstHash)
-  const colonOnlyAfterHash = firstColon === -1 || (firstHash > -1 && firstColon > firstHash)
-  const secondSlashOnlyAfterHash = secondSlash === -1 || (firstHash > -1 && secondSlash > firstHash)
-  const hasSlash = firstSlash > 0
-  // if a # is found, what we really want to know is that the character
-  // immediately before # is not a /
-  const doesNotEndWithSlash = firstHash > -1 ? arg[firstHash - 1] !== '/' : !arg.endsWith('/')
-  const doesNotStartWithDot = !arg.startsWith('.')
-
-  return spaceOnlyAfterHash && hasSlash && doesNotEndWithSlash &&
-    doesNotStartWithDot && atOnlyAfterHash && colonOnlyAfterHash &&
-    secondSlashOnlyAfterHash
-}
-
-module.exports = (giturl, opts, { gitHosts, protocols }) => {
-  if (!giturl) {
-    return
-  }
-
-  const correctedUrl = isGitHubShorthand(giturl) ? `github:${giturl}` : giturl
-  const parsed = parseUrl(correctedUrl, protocols)
-  if (!parsed) {
-    return
-  }
-
-  const gitHostShortcut = gitHosts.byShortcut[parsed.protocol]
-  const gitHostDomain = gitHosts.byDomain[parsed.hostname.startsWith('www.')
-    ? parsed.hostname.slice(4)
-    : parsed.hostname]
-  const gitHostName = gitHostShortcut || gitHostDomain
-  if (!gitHostName) {
-    return
-  }
-
-  const gitHostInfo = gitHosts[gitHostShortcut || gitHostDomain]
-  let auth = null
-  if (protocols[parsed.protocol]?.auth && (parsed.username || parsed.password)) {
-    auth = `${parsed.username}${parsed.password ? ':' + parsed.password : ''}`
-  }
-
-  let committish = null
-  let user = null
-  let project = null
-  let defaultRepresentation = null
-
-  try {
-    if (gitHostShortcut) {
-      let pathname = parsed.pathname.startsWith('/') ? parsed.pathname.slice(1) : parsed.pathname
-      const firstAt = pathname.indexOf('@')
-      // we ignore auth for shortcuts, so just trim it out
-      if (firstAt > -1) {
-        pathname = pathname.slice(firstAt + 1)
-      }
-
-      const lastSlash = pathname.lastIndexOf('/')
-      if (lastSlash > -1) {
-        user = decodeURIComponent(pathname.slice(0, lastSlash))
-        // we want nulls only, never empty strings
-        if (!user) {
-          user = null
-        }
-        project = decodeURIComponent(pathname.slice(lastSlash + 1))
-      } else {
-        project = decodeURIComponent(pathname)
-      }
-
-      if (project.endsWith('.git')) {
-        project = project.slice(0, -4)
-      }
-
-      if (parsed.hash) {
-        committish = decodeURIComponent(parsed.hash.slice(1))
-      }
-
-      defaultRepresentation = 'shortcut'
-    } else {
-      if (!gitHostInfo.protocols.includes(parsed.protocol)) {
-        return
-      }
-
-      const segments = gitHostInfo.extract(parsed)
-      if (!segments) {
-        return
-      }
-
-      user = segments.user && decodeURIComponent(segments.user)
-      project = decodeURIComponent(segments.project)
-      committish = decodeURIComponent(segments.committish)
-      defaultRepresentation = protocols[parsed.protocol]?.name || parsed.protocol.slice(0, -1)
-    }
-  } catch (err) {
-    /* istanbul ignore else */
-    if (err instanceof URIError) {
-      return
-    } else {
-      throw err
-    }
-  }
-
-  return [gitHostName, user, auth, project, committish, defaultRepresentation, opts]
-}
diff --git a/node_modules/pacote/node_modules/hosted-git-info/lib/hosts.js b/node_modules/pacote/node_modules/hosted-git-info/lib/hosts.js
deleted file mode 100644
index 2a88e95927772..0000000000000
--- a/node_modules/pacote/node_modules/hosted-git-info/lib/hosts.js
+++ /dev/null
@@ -1,231 +0,0 @@
-/* eslint-disable max-len */
-
-'use strict'
-
-const maybeJoin = (...args) => args.every(arg => arg) ? args.join('') : ''
-const maybeEncode = (arg) => arg ? encodeURIComponent(arg) : ''
-const formatHashFragment = (f) => f.toLowerCase()
-  .replace(/^\W+/g, '') // strip leading non-characters
-  .replace(/(?
-    `git@${domain}:${user}/${project}.git${maybeJoin('#', committish)}`,
-  sshurltemplate: ({ domain, user, project, committish }) =>
-    `git+ssh://git@${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  edittemplate: ({ domain, user, project, committish, editpath, path }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', editpath, '/', maybeEncode(committish || 'HEAD'), '/', path)}`,
-  browsetemplate: ({ domain, user, project, committish, treepath }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}`,
-  browsetreetemplate: ({ domain, user, project, committish, treepath, path, fragment, hashformat }) =>
-    `https://${domain}/${user}/${project}/${treepath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`,
-  browseblobtemplate: ({ domain, user, project, committish, blobpath, path, fragment, hashformat }) =>
-    `https://${domain}/${user}/${project}/${blobpath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`,
-  docstemplate: ({ domain, user, project, treepath, committish }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}#readme`,
-  httpstemplate: ({ auth, domain, user, project, committish }) =>
-    `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  filetemplate: ({ domain, user, project, committish, path }) =>
-    `https://${domain}/${user}/${project}/raw/${maybeEncode(committish || 'HEAD')}/${path}`,
-  shortcuttemplate: ({ type, user, project, committish }) =>
-    `${type}:${user}/${project}${maybeJoin('#', committish)}`,
-  pathtemplate: ({ user, project, committish }) =>
-    `${user}/${project}${maybeJoin('#', committish)}`,
-  bugstemplate: ({ domain, user, project }) =>
-    `https://${domain}/${user}/${project}/issues`,
-  hashformat: formatHashFragment,
-}
-
-const hosts = {}
-hosts.github = {
-  // First two are insecure and generally shouldn't be used any more, but
-  // they are still supported.
-  protocols: ['git:', 'http:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'],
-  domain: 'github.com',
-  treepath: 'tree',
-  blobpath: 'blob',
-  editpath: 'edit',
-  filetemplate: ({ auth, user, project, committish, path }) =>
-    `https://${maybeJoin(auth, '@')}raw.githubusercontent.com/${user}/${project}/${maybeEncode(committish || 'HEAD')}/${path}`,
-  gittemplate: ({ auth, domain, user, project, committish }) =>
-    `git://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  tarballtemplate: ({ domain, user, project, committish }) =>
-    `https://codeload.${domain}/${user}/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`,
-  extract: (url) => {
-    let [, user, project, type, committish] = url.pathname.split('/', 5)
-    if (type && type !== 'tree') {
-      return
-    }
-
-    if (!type) {
-      committish = url.hash.slice(1)
-    }
-
-    if (project && project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    if (!user || !project) {
-      return
-    }
-
-    return { user, project, committish }
-  },
-}
-
-hosts.bitbucket = {
-  protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'],
-  domain: 'bitbucket.org',
-  treepath: 'src',
-  blobpath: 'src',
-  editpath: '?mode=edit',
-  edittemplate: ({ domain, user, project, committish, treepath, path, editpath }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish || 'HEAD'), '/', path, editpath)}`,
-  tarballtemplate: ({ domain, user, project, committish }) =>
-    `https://${domain}/${user}/${project}/get/${maybeEncode(committish || 'HEAD')}.tar.gz`,
-  extract: (url) => {
-    let [, user, project, aux] = url.pathname.split('/', 4)
-    if (['get'].includes(aux)) {
-      return
-    }
-
-    if (project && project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    if (!user || !project) {
-      return
-    }
-
-    return { user, project, committish: url.hash.slice(1) }
-  },
-}
-
-hosts.gitlab = {
-  protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'],
-  domain: 'gitlab.com',
-  treepath: 'tree',
-  blobpath: 'tree',
-  editpath: '-/edit',
-  httpstemplate: ({ auth, domain, user, project, committish }) =>
-    `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  tarballtemplate: ({ domain, user, project, committish }) =>
-    `https://${domain}/${user}/${project}/repository/archive.tar.gz?ref=${maybeEncode(committish || 'HEAD')}`,
-  extract: (url) => {
-    const path = url.pathname.slice(1)
-    if (path.includes('/-/') || path.includes('/archive.tar.gz')) {
-      return
-    }
-
-    const segments = path.split('/')
-    let project = segments.pop()
-    if (project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    const user = segments.join('/')
-    if (!user || !project) {
-      return
-    }
-
-    return { user, project, committish: url.hash.slice(1) }
-  },
-}
-
-hosts.gist = {
-  protocols: ['git:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'],
-  domain: 'gist.github.com',
-  editpath: 'edit',
-  sshtemplate: ({ domain, project, committish }) =>
-    `git@${domain}:${project}.git${maybeJoin('#', committish)}`,
-  sshurltemplate: ({ domain, project, committish }) =>
-    `git+ssh://git@${domain}/${project}.git${maybeJoin('#', committish)}`,
-  edittemplate: ({ domain, user, project, committish, editpath }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', maybeEncode(committish))}/${editpath}`,
-  browsetemplate: ({ domain, project, committish }) =>
-    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`,
-  browsetreetemplate: ({ domain, project, committish, path, hashformat }) =>
-    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`,
-  browseblobtemplate: ({ domain, project, committish, path, hashformat }) =>
-    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`,
-  docstemplate: ({ domain, project, committish }) =>
-    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`,
-  httpstemplate: ({ domain, project, committish }) =>
-    `git+https://${domain}/${project}.git${maybeJoin('#', committish)}`,
-  filetemplate: ({ user, project, committish, path }) =>
-    `https://gist.githubusercontent.com/${user}/${project}/raw${maybeJoin('/', maybeEncode(committish))}/${path}`,
-  shortcuttemplate: ({ type, project, committish }) =>
-    `${type}:${project}${maybeJoin('#', committish)}`,
-  pathtemplate: ({ project, committish }) =>
-    `${project}${maybeJoin('#', committish)}`,
-  bugstemplate: ({ domain, project }) =>
-    `https://${domain}/${project}`,
-  gittemplate: ({ domain, project, committish }) =>
-    `git://${domain}/${project}.git${maybeJoin('#', committish)}`,
-  tarballtemplate: ({ project, committish }) =>
-    `https://codeload.github.com/gist/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`,
-  extract: (url) => {
-    let [, user, project, aux] = url.pathname.split('/', 4)
-    if (aux === 'raw') {
-      return
-    }
-
-    if (!project) {
-      if (!user) {
-        return
-      }
-
-      project = user
-      user = null
-    }
-
-    if (project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    return { user, project, committish: url.hash.slice(1) }
-  },
-  hashformat: function (fragment) {
-    return fragment && 'file-' + formatHashFragment(fragment)
-  },
-}
-
-hosts.sourcehut = {
-  protocols: ['git+ssh:', 'https:'],
-  domain: 'git.sr.ht',
-  treepath: 'tree',
-  blobpath: 'tree',
-  filetemplate: ({ domain, user, project, committish, path }) =>
-    `https://${domain}/${user}/${project}/blob/${maybeEncode(committish) || 'HEAD'}/${path}`,
-  httpstemplate: ({ domain, user, project, committish }) =>
-    `https://${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  tarballtemplate: ({ domain, user, project, committish }) =>
-    `https://${domain}/${user}/${project}/archive/${maybeEncode(committish) || 'HEAD'}.tar.gz`,
-  bugstemplate: () => null,
-  extract: (url) => {
-    let [, user, project, aux] = url.pathname.split('/', 4)
-
-    // tarball url
-    if (['archive'].includes(aux)) {
-      return
-    }
-
-    if (project && project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    if (!user || !project) {
-      return
-    }
-
-    return { user, project, committish: url.hash.slice(1) }
-  },
-}
-
-for (const [name, host] of Object.entries(hosts)) {
-  hosts[name] = Object.assign({}, defaults, host)
-}
-
-module.exports = hosts
diff --git a/node_modules/pacote/node_modules/hosted-git-info/lib/index.js b/node_modules/pacote/node_modules/hosted-git-info/lib/index.js
deleted file mode 100644
index 2a7100dcee6e7..0000000000000
--- a/node_modules/pacote/node_modules/hosted-git-info/lib/index.js
+++ /dev/null
@@ -1,227 +0,0 @@
-'use strict'
-
-const { LRUCache } = require('lru-cache')
-const hosts = require('./hosts.js')
-const fromUrl = require('./from-url.js')
-const parseUrl = require('./parse-url.js')
-
-const cache = new LRUCache({ max: 1000 })
-
-function unknownHostedUrl (url) {
-  try {
-    const {
-      protocol,
-      hostname,
-      pathname,
-    } = new URL(url)
-
-    if (!hostname) {
-      return null
-    }
-
-    const proto = /(?:git\+)http:$/.test(protocol) ? 'http:' : 'https:'
-    const path = pathname.replace(/\.git$/, '')
-    return `${proto}//${hostname}${path}`
-  } catch {
-    return null
-  }
-}
-
-class GitHost {
-  constructor (type, user, auth, project, committish, defaultRepresentation, opts = {}) {
-    Object.assign(this, GitHost.#gitHosts[type], {
-      type,
-      user,
-      auth,
-      project,
-      committish,
-      default: defaultRepresentation,
-      opts,
-    })
-  }
-
-  static #gitHosts = { byShortcut: {}, byDomain: {} }
-  static #protocols = {
-    'git+ssh:': { name: 'sshurl' },
-    'ssh:': { name: 'sshurl' },
-    'git+https:': { name: 'https', auth: true },
-    'git:': { auth: true },
-    'http:': { auth: true },
-    'https:': { auth: true },
-    'git+http:': { auth: true },
-  }
-
-  static addHost (name, host) {
-    GitHost.#gitHosts[name] = host
-    GitHost.#gitHosts.byDomain[host.domain] = name
-    GitHost.#gitHosts.byShortcut[`${name}:`] = name
-    GitHost.#protocols[`${name}:`] = { name }
-  }
-
-  static fromUrl (giturl, opts) {
-    if (typeof giturl !== 'string') {
-      return
-    }
-
-    const key = giturl + JSON.stringify(opts || {})
-
-    if (!cache.has(key)) {
-      const hostArgs = fromUrl(giturl, opts, {
-        gitHosts: GitHost.#gitHosts,
-        protocols: GitHost.#protocols,
-      })
-      cache.set(key, hostArgs ? new GitHost(...hostArgs) : undefined)
-    }
-
-    return cache.get(key)
-  }
-
-  static fromManifest (manifest, opts = {}) {
-    if (!manifest || typeof manifest !== 'object') {
-      return
-    }
-
-    const r = manifest.repository
-    // TODO: look into also checking the `bugs`/`homepage` URLs
-
-    const rurl = r && (
-      typeof r === 'string'
-        ? r
-        : typeof r === 'object' && typeof r.url === 'string'
-          ? r.url
-          : null
-    )
-
-    if (!rurl) {
-      throw new Error('no repository')
-    }
-
-    const info = (rurl && GitHost.fromUrl(rurl.replace(/^git\+/, ''), opts)) || null
-    if (info) {
-      return info
-    }
-    const unk = unknownHostedUrl(rurl)
-    return GitHost.fromUrl(unk, opts) || unk
-  }
-
-  static parseUrl (url) {
-    return parseUrl(url)
-  }
-
-  #fill (template, opts) {
-    if (typeof template !== 'function') {
-      return null
-    }
-
-    const options = { ...this, ...this.opts, ...opts }
-
-    // the path should always be set so we don't end up with 'undefined' in urls
-    if (!options.path) {
-      options.path = ''
-    }
-
-    // template functions will insert the leading slash themselves
-    if (options.path.startsWith('/')) {
-      options.path = options.path.slice(1)
-    }
-
-    if (options.noCommittish) {
-      options.committish = null
-    }
-
-    const result = template(options)
-    return options.noGitPlus && result.startsWith('git+') ? result.slice(4) : result
-  }
-
-  hash () {
-    return this.committish ? `#${this.committish}` : ''
-  }
-
-  ssh (opts) {
-    return this.#fill(this.sshtemplate, opts)
-  }
-
-  sshurl (opts) {
-    return this.#fill(this.sshurltemplate, opts)
-  }
-
-  browse (path, ...args) {
-    // not a string, treat path as opts
-    if (typeof path !== 'string') {
-      return this.#fill(this.browsetemplate, path)
-    }
-
-    if (typeof args[0] !== 'string') {
-      return this.#fill(this.browsetreetemplate, { ...args[0], path })
-    }
-
-    return this.#fill(this.browsetreetemplate, { ...args[1], fragment: args[0], path })
-  }
-
-  // If the path is known to be a file, then browseFile should be used. For some hosts
-  // the url is the same as browse, but for others like GitHub a file can use both `/tree/`
-  // and `/blob/` in the path. When using a default committish of `HEAD` then the `/tree/`
-  // path will redirect to a specific commit. Using the `/blob/` path avoids this and
-  // does not redirect to a different commit.
-  browseFile (path, ...args) {
-    if (typeof args[0] !== 'string') {
-      return this.#fill(this.browseblobtemplate, { ...args[0], path })
-    }
-
-    return this.#fill(this.browseblobtemplate, { ...args[1], fragment: args[0], path })
-  }
-
-  docs (opts) {
-    return this.#fill(this.docstemplate, opts)
-  }
-
-  bugs (opts) {
-    return this.#fill(this.bugstemplate, opts)
-  }
-
-  https (opts) {
-    return this.#fill(this.httpstemplate, opts)
-  }
-
-  git (opts) {
-    return this.#fill(this.gittemplate, opts)
-  }
-
-  shortcut (opts) {
-    return this.#fill(this.shortcuttemplate, opts)
-  }
-
-  path (opts) {
-    return this.#fill(this.pathtemplate, opts)
-  }
-
-  tarball (opts) {
-    return this.#fill(this.tarballtemplate, { ...opts, noCommittish: false })
-  }
-
-  file (path, opts) {
-    return this.#fill(this.filetemplate, { ...opts, path })
-  }
-
-  edit (path, opts) {
-    return this.#fill(this.edittemplate, { ...opts, path })
-  }
-
-  getDefaultRepresentation () {
-    return this.default
-  }
-
-  toString (opts) {
-    if (this.default && typeof this[this.default] === 'function') {
-      return this[this.default](opts)
-    }
-
-    return this.sshurl(opts)
-  }
-}
-
-for (const [name, host] of Object.entries(hosts)) {
-  GitHost.addHost(name, host)
-}
-
-module.exports = GitHost
diff --git a/node_modules/pacote/node_modules/hosted-git-info/lib/parse-url.js b/node_modules/pacote/node_modules/hosted-git-info/lib/parse-url.js
deleted file mode 100644
index 7d5489c008ab4..0000000000000
--- a/node_modules/pacote/node_modules/hosted-git-info/lib/parse-url.js
+++ /dev/null
@@ -1,78 +0,0 @@
-const url = require('url')
-
-const lastIndexOfBefore = (str, char, beforeChar) => {
-  const startPosition = str.indexOf(beforeChar)
-  return str.lastIndexOf(char, startPosition > -1 ? startPosition : Infinity)
-}
-
-const safeUrl = (u) => {
-  try {
-    return new url.URL(u)
-  } catch {
-    // this fn should never throw
-  }
-}
-
-// accepts input like git:github.com:user/repo and inserts the // after the first :
-const correctProtocol = (arg, protocols) => {
-  const firstColon = arg.indexOf(':')
-  const proto = arg.slice(0, firstColon + 1)
-  if (Object.prototype.hasOwnProperty.call(protocols, proto)) {
-    return arg
-  }
-
-  const firstAt = arg.indexOf('@')
-  if (firstAt > -1) {
-    if (firstAt > firstColon) {
-      return `git+ssh://${arg}`
-    } else {
-      return arg
-    }
-  }
-
-  const doubleSlash = arg.indexOf('//')
-  if (doubleSlash === firstColon + 1) {
-    return arg
-  }
-
-  return `${arg.slice(0, firstColon + 1)}//${arg.slice(firstColon + 1)}`
-}
-
-// attempt to correct an scp style url so that it will parse with `new URL()`
-const correctUrl = (giturl) => {
-  // ignore @ that come after the first hash since the denotes the start
-  // of a committish which can contain @ characters
-  const firstAt = lastIndexOfBefore(giturl, '@', '#')
-  // ignore colons that come after the hash since that could include colons such as:
-  // git@github.com:user/package-2#semver:^1.0.0
-  const lastColonBeforeHash = lastIndexOfBefore(giturl, ':', '#')
-
-  if (lastColonBeforeHash > firstAt) {
-    // the last : comes after the first @ (or there is no @)
-    // like it would in:
-    // proto://hostname.com:user/repo
-    // username@hostname.com:user/repo
-    // :password@hostname.com:user/repo
-    // username:password@hostname.com:user/repo
-    // proto://username@hostname.com:user/repo
-    // proto://:password@hostname.com:user/repo
-    // proto://username:password@hostname.com:user/repo
-    // then we replace the last : with a / to create a valid path
-    giturl = giturl.slice(0, lastColonBeforeHash) + '/' + giturl.slice(lastColonBeforeHash + 1)
-  }
-
-  if (lastIndexOfBefore(giturl, ':', '#') === -1 && giturl.indexOf('//') === -1) {
-    // we have no : at all
-    // as it would be in:
-    // username@hostname.com/user/repo
-    // then we prepend a protocol
-    giturl = `git+ssh://${giturl}`
-  }
-
-  return giturl
-}
-
-module.exports = (giturl, protocols) => {
-  const withProtocol = protocols ? correctProtocol(giturl, protocols) : giturl
-  return safeUrl(withProtocol) || safeUrl(correctUrl(withProtocol))
-}
diff --git a/node_modules/pacote/node_modules/hosted-git-info/package.json b/node_modules/pacote/node_modules/hosted-git-info/package.json
deleted file mode 100644
index 5883a7d308d79..0000000000000
--- a/node_modules/pacote/node_modules/hosted-git-info/package.json
+++ /dev/null
@@ -1,61 +0,0 @@
-{
-  "name": "hosted-git-info",
-  "version": "9.0.0",
-  "description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab",
-  "main": "./lib/index.js",
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/npm/hosted-git-info.git"
-  },
-  "keywords": [
-    "git",
-    "github",
-    "bitbucket",
-    "gitlab"
-  ],
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "bugs": {
-    "url": "https://github.com/npm/hosted-git-info/issues"
-  },
-  "homepage": "https://github.com/npm/hosted-git-info",
-  "scripts": {
-    "posttest": "npm run lint",
-    "snap": "tap",
-    "test": "tap",
-    "test:coverage": "tap --coverage-report=html",
-    "lint": "npm run eslint",
-    "postlint": "template-oss-check",
-    "lintfix": "npm run eslint -- --fix",
-    "template-oss-apply": "template-oss-apply --force",
-    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
-  },
-  "dependencies": {
-    "lru-cache": "^11.1.0"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.25.0",
-    "tap": "^16.0.1"
-  },
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "engines": {
-    "node": "^20.17.0 || >=22.9.0"
-  },
-  "tap": {
-    "color": 1,
-    "coverage": true,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.25.0",
-    "publish": "true"
-  }
-}
diff --git a/node_modules/pacote/node_modules/lru-cache/LICENSE b/node_modules/pacote/node_modules/lru-cache/LICENSE
deleted file mode 100644
index f785757cd63f8..0000000000000
--- a/node_modules/pacote/node_modules/lru-cache/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/lru-cache/dist/commonjs/index.js b/node_modules/pacote/node_modules/lru-cache/dist/commonjs/index.js
deleted file mode 100644
index 921b8f10f71b1..0000000000000
--- a/node_modules/pacote/node_modules/lru-cache/dist/commonjs/index.js
+++ /dev/null
@@ -1,1564 +0,0 @@
-"use strict";
-/**
- * @module LRUCache
- */
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.LRUCache = void 0;
-const defaultPerf = (typeof performance === 'object' &&
-    performance &&
-    typeof performance.now === 'function') ?
-    performance
-    : Date;
-const warned = new Set();
-/* c8 ignore start */
-const PROCESS = (typeof process === 'object' && !!process ?
-    process
-    : {});
-/* c8 ignore start */
-const emitWarning = (msg, type, code, fn) => {
-    typeof PROCESS.emitWarning === 'function' ?
-        PROCESS.emitWarning(msg, type, code, fn)
-        : console.error(`[${code}] ${type}: ${msg}`);
-};
-let AC = globalThis.AbortController;
-let AS = globalThis.AbortSignal;
-/* c8 ignore start */
-if (typeof AC === 'undefined') {
-    //@ts-ignore
-    AS = class AbortSignal {
-        onabort;
-        _onabort = [];
-        reason;
-        aborted = false;
-        addEventListener(_, fn) {
-            this._onabort.push(fn);
-        }
-    };
-    //@ts-ignore
-    AC = class AbortController {
-        constructor() {
-            warnACPolyfill();
-        }
-        signal = new AS();
-        abort(reason) {
-            if (this.signal.aborted)
-                return;
-            //@ts-ignore
-            this.signal.reason = reason;
-            //@ts-ignore
-            this.signal.aborted = true;
-            //@ts-ignore
-            for (const fn of this.signal._onabort) {
-                fn(reason);
-            }
-            this.signal.onabort?.(reason);
-        }
-    };
-    let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
-    const warnACPolyfill = () => {
-        if (!printACPolyfillWarning)
-            return;
-        printACPolyfillWarning = false;
-        emitWarning('AbortController is not defined. If using lru-cache in ' +
-            'node 14, load an AbortController polyfill from the ' +
-            '`node-abort-controller` package. A minimal polyfill is ' +
-            'provided for use by LRUCache.fetch(), but it should not be ' +
-            'relied upon in other contexts (eg, passing it to other APIs that ' +
-            'use AbortController/AbortSignal might have undesirable effects). ' +
-            'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
-    };
-}
-/* c8 ignore stop */
-const shouldWarn = (code) => !warned.has(code);
-const TYPE = Symbol('type');
-const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
-/* c8 ignore start */
-// This is a little bit ridiculous, tbh.
-// The maximum array length is 2^32-1 or thereabouts on most JS impls.
-// And well before that point, you're caching the entire world, I mean,
-// that's ~32GB of just integers for the next/prev links, plus whatever
-// else to hold that many keys and values.  Just filling the memory with
-// zeroes at init time is brutal when you get that big.
-// But why not be complete?
-// Maybe in the future, these limits will have expanded.
-const getUintArray = (max) => !isPosInt(max) ? null
-    : max <= Math.pow(2, 8) ? Uint8Array
-        : max <= Math.pow(2, 16) ? Uint16Array
-            : max <= Math.pow(2, 32) ? Uint32Array
-                : max <= Number.MAX_SAFE_INTEGER ? ZeroArray
-                    : null;
-/* c8 ignore stop */
-class ZeroArray extends Array {
-    constructor(size) {
-        super(size);
-        this.fill(0);
-    }
-}
-class Stack {
-    heap;
-    length;
-    // private constructor
-    static #constructing = false;
-    static create(max) {
-        const HeapCls = getUintArray(max);
-        if (!HeapCls)
-            return [];
-        Stack.#constructing = true;
-        const s = new Stack(max, HeapCls);
-        Stack.#constructing = false;
-        return s;
-    }
-    constructor(max, HeapCls) {
-        /* c8 ignore start */
-        if (!Stack.#constructing) {
-            throw new TypeError('instantiate Stack using Stack.create(n)');
-        }
-        /* c8 ignore stop */
-        this.heap = new HeapCls(max);
-        this.length = 0;
-    }
-    push(n) {
-        this.heap[this.length++] = n;
-    }
-    pop() {
-        return this.heap[--this.length];
-    }
-}
-/**
- * Default export, the thing you're using this module to get.
- *
- * The `K` and `V` types define the key and value types, respectively. The
- * optional `FC` type defines the type of the `context` object passed to
- * `cache.fetch()` and `cache.memo()`.
- *
- * Keys and values **must not** be `null` or `undefined`.
- *
- * All properties from the options object (with the exception of `max`,
- * `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are
- * added as normal public members. (The listed options are read-only getters.)
- *
- * Changing any of these will alter the defaults for subsequent method calls.
- */
-class LRUCache {
-    // options that cannot be changed without disaster
-    #max;
-    #maxSize;
-    #dispose;
-    #onInsert;
-    #disposeAfter;
-    #fetchMethod;
-    #memoMethod;
-    #perf;
-    /**
-     * {@link LRUCache.OptionsBase.perf}
-     */
-    get perf() {
-        return this.#perf;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.ttl}
-     */
-    ttl;
-    /**
-     * {@link LRUCache.OptionsBase.ttlResolution}
-     */
-    ttlResolution;
-    /**
-     * {@link LRUCache.OptionsBase.ttlAutopurge}
-     */
-    ttlAutopurge;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnGet}
-     */
-    updateAgeOnGet;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnHas}
-     */
-    updateAgeOnHas;
-    /**
-     * {@link LRUCache.OptionsBase.allowStale}
-     */
-    allowStale;
-    /**
-     * {@link LRUCache.OptionsBase.noDisposeOnSet}
-     */
-    noDisposeOnSet;
-    /**
-     * {@link LRUCache.OptionsBase.noUpdateTTL}
-     */
-    noUpdateTTL;
-    /**
-     * {@link LRUCache.OptionsBase.maxEntrySize}
-     */
-    maxEntrySize;
-    /**
-     * {@link LRUCache.OptionsBase.sizeCalculation}
-     */
-    sizeCalculation;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
-     */
-    noDeleteOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
-     */
-    noDeleteOnStaleGet;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
-     */
-    allowStaleOnFetchAbort;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
-     */
-    allowStaleOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
-     */
-    ignoreFetchAbort;
-    // computed properties
-    #size;
-    #calculatedSize;
-    #keyMap;
-    #keyList;
-    #valList;
-    #next;
-    #prev;
-    #head;
-    #tail;
-    #free;
-    #disposed;
-    #sizes;
-    #starts;
-    #ttls;
-    #hasDispose;
-    #hasFetchMethod;
-    #hasDisposeAfter;
-    #hasOnInsert;
-    /**
-     * Do not call this method unless you need to inspect the
-     * inner workings of the cache.  If anything returned by this
-     * object is modified in any way, strange breakage may occur.
-     *
-     * These fields are private for a reason!
-     *
-     * @internal
-     */
-    static unsafeExposeInternals(c) {
-        return {
-            // properties
-            starts: c.#starts,
-            ttls: c.#ttls,
-            sizes: c.#sizes,
-            keyMap: c.#keyMap,
-            keyList: c.#keyList,
-            valList: c.#valList,
-            next: c.#next,
-            prev: c.#prev,
-            get head() {
-                return c.#head;
-            },
-            get tail() {
-                return c.#tail;
-            },
-            free: c.#free,
-            // methods
-            isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
-            backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
-            moveToTail: (index) => c.#moveToTail(index),
-            indexes: (options) => c.#indexes(options),
-            rindexes: (options) => c.#rindexes(options),
-            isStale: (index) => c.#isStale(index),
-        };
-    }
-    // Protected read-only members
-    /**
-     * {@link LRUCache.OptionsBase.max} (read-only)
-     */
-    get max() {
-        return this.#max;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.maxSize} (read-only)
-     */
-    get maxSize() {
-        return this.#maxSize;
-    }
-    /**
-     * The total computed size of items in the cache (read-only)
-     */
-    get calculatedSize() {
-        return this.#calculatedSize;
-    }
-    /**
-     * The number of items stored in the cache (read-only)
-     */
-    get size() {
-        return this.#size;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
-     */
-    get fetchMethod() {
-        return this.#fetchMethod;
-    }
-    get memoMethod() {
-        return this.#memoMethod;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.dispose} (read-only)
-     */
-    get dispose() {
-        return this.#dispose;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.onInsert} (read-only)
-     */
-    get onInsert() {
-        return this.#onInsert;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
-     */
-    get disposeAfter() {
-        return this.#disposeAfter;
-    }
-    constructor(options) {
-        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, onInsert, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, perf, } = options;
-        if (perf !== undefined) {
-            if (typeof perf?.now !== 'function') {
-                throw new TypeError('perf option must have a now() method if specified');
-            }
-        }
-        this.#perf = perf ?? defaultPerf;
-        if (max !== 0 && !isPosInt(max)) {
-            throw new TypeError('max option must be a nonnegative integer');
-        }
-        const UintArray = max ? getUintArray(max) : Array;
-        if (!UintArray) {
-            throw new Error('invalid max value: ' + max);
-        }
-        this.#max = max;
-        this.#maxSize = maxSize;
-        this.maxEntrySize = maxEntrySize || this.#maxSize;
-        this.sizeCalculation = sizeCalculation;
-        if (this.sizeCalculation) {
-            if (!this.#maxSize && !this.maxEntrySize) {
-                throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
-            }
-            if (typeof this.sizeCalculation !== 'function') {
-                throw new TypeError('sizeCalculation set to non-function');
-            }
-        }
-        if (memoMethod !== undefined &&
-            typeof memoMethod !== 'function') {
-            throw new TypeError('memoMethod must be a function if defined');
-        }
-        this.#memoMethod = memoMethod;
-        if (fetchMethod !== undefined &&
-            typeof fetchMethod !== 'function') {
-            throw new TypeError('fetchMethod must be a function if specified');
-        }
-        this.#fetchMethod = fetchMethod;
-        this.#hasFetchMethod = !!fetchMethod;
-        this.#keyMap = new Map();
-        this.#keyList = new Array(max).fill(undefined);
-        this.#valList = new Array(max).fill(undefined);
-        this.#next = new UintArray(max);
-        this.#prev = new UintArray(max);
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free = Stack.create(max);
-        this.#size = 0;
-        this.#calculatedSize = 0;
-        if (typeof dispose === 'function') {
-            this.#dispose = dispose;
-        }
-        if (typeof onInsert === 'function') {
-            this.#onInsert = onInsert;
-        }
-        if (typeof disposeAfter === 'function') {
-            this.#disposeAfter = disposeAfter;
-            this.#disposed = [];
-        }
-        else {
-            this.#disposeAfter = undefined;
-            this.#disposed = undefined;
-        }
-        this.#hasDispose = !!this.#dispose;
-        this.#hasOnInsert = !!this.#onInsert;
-        this.#hasDisposeAfter = !!this.#disposeAfter;
-        this.noDisposeOnSet = !!noDisposeOnSet;
-        this.noUpdateTTL = !!noUpdateTTL;
-        this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
-        this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
-        this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
-        this.ignoreFetchAbort = !!ignoreFetchAbort;
-        // NB: maxEntrySize is set to maxSize if it's set
-        if (this.maxEntrySize !== 0) {
-            if (this.#maxSize !== 0) {
-                if (!isPosInt(this.#maxSize)) {
-                    throw new TypeError('maxSize must be a positive integer if specified');
-                }
-            }
-            if (!isPosInt(this.maxEntrySize)) {
-                throw new TypeError('maxEntrySize must be a positive integer if specified');
-            }
-            this.#initializeSizeTracking();
-        }
-        this.allowStale = !!allowStale;
-        this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
-        this.updateAgeOnGet = !!updateAgeOnGet;
-        this.updateAgeOnHas = !!updateAgeOnHas;
-        this.ttlResolution =
-            isPosInt(ttlResolution) || ttlResolution === 0 ?
-                ttlResolution
-                : 1;
-        this.ttlAutopurge = !!ttlAutopurge;
-        this.ttl = ttl || 0;
-        if (this.ttl) {
-            if (!isPosInt(this.ttl)) {
-                throw new TypeError('ttl must be a positive integer if specified');
-            }
-            this.#initializeTTLTracking();
-        }
-        // do not allow completely unbounded caches
-        if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
-            throw new TypeError('At least one of max, maxSize, or ttl is required');
-        }
-        if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
-            const code = 'LRU_CACHE_UNBOUNDED';
-            if (shouldWarn(code)) {
-                warned.add(code);
-                const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
-                    'result in unbounded memory consumption.';
-                emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
-            }
-        }
-    }
-    /**
-     * Return the number of ms left in the item's TTL. If item is not in cache,
-     * returns `0`. Returns `Infinity` if item is in cache without a defined TTL.
-     */
-    getRemainingTTL(key) {
-        return this.#keyMap.has(key) ? Infinity : 0;
-    }
-    #initializeTTLTracking() {
-        const ttls = new ZeroArray(this.#max);
-        const starts = new ZeroArray(this.#max);
-        this.#ttls = ttls;
-        this.#starts = starts;
-        this.#setItemTTL = (index, ttl, start = this.#perf.now()) => {
-            starts[index] = ttl !== 0 ? start : 0;
-            ttls[index] = ttl;
-            if (ttl !== 0 && this.ttlAutopurge) {
-                const t = setTimeout(() => {
-                    if (this.#isStale(index)) {
-                        this.#delete(this.#keyList[index], 'expire');
-                    }
-                }, ttl + 1);
-                // unref() not supported on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-        };
-        this.#updateItemAge = index => {
-            starts[index] = ttls[index] !== 0 ? this.#perf.now() : 0;
-        };
-        this.#statusTTL = (status, index) => {
-            if (ttls[index]) {
-                const ttl = ttls[index];
-                const start = starts[index];
-                /* c8 ignore next */
-                if (!ttl || !start)
-                    return;
-                status.ttl = ttl;
-                status.start = start;
-                status.now = cachedNow || getNow();
-                const age = status.now - start;
-                status.remainingTTL = ttl - age;
-            }
-        };
-        // debounce calls to perf.now() to 1s so we're not hitting
-        // that costly call repeatedly.
-        let cachedNow = 0;
-        const getNow = () => {
-            const n = this.#perf.now();
-            if (this.ttlResolution > 0) {
-                cachedNow = n;
-                const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
-                // not available on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-            return n;
-        };
-        this.getRemainingTTL = key => {
-            const index = this.#keyMap.get(key);
-            if (index === undefined) {
-                return 0;
-            }
-            const ttl = ttls[index];
-            const start = starts[index];
-            if (!ttl || !start) {
-                return Infinity;
-            }
-            const age = (cachedNow || getNow()) - start;
-            return ttl - age;
-        };
-        this.#isStale = index => {
-            const s = starts[index];
-            const t = ttls[index];
-            return !!t && !!s && (cachedNow || getNow()) - s > t;
-        };
-    }
-    // conditionally set private methods related to TTL
-    #updateItemAge = () => { };
-    #statusTTL = () => { };
-    #setItemTTL = () => { };
-    /* c8 ignore stop */
-    #isStale = () => false;
-    #initializeSizeTracking() {
-        const sizes = new ZeroArray(this.#max);
-        this.#calculatedSize = 0;
-        this.#sizes = sizes;
-        this.#removeItemSize = index => {
-            this.#calculatedSize -= sizes[index];
-            sizes[index] = 0;
-        };
-        this.#requireSize = (k, v, size, sizeCalculation) => {
-            // provisionally accept background fetches.
-            // actual value size will be checked when they return.
-            if (this.#isBackgroundFetch(v)) {
-                return 0;
-            }
-            if (!isPosInt(size)) {
-                if (sizeCalculation) {
-                    if (typeof sizeCalculation !== 'function') {
-                        throw new TypeError('sizeCalculation must be a function');
-                    }
-                    size = sizeCalculation(v, k);
-                    if (!isPosInt(size)) {
-                        throw new TypeError('sizeCalculation return invalid (expect positive integer)');
-                    }
-                }
-                else {
-                    throw new TypeError('invalid size value (must be positive integer). ' +
-                        'When maxSize or maxEntrySize is used, sizeCalculation ' +
-                        'or size must be set.');
-                }
-            }
-            return size;
-        };
-        this.#addItemSize = (index, size, status) => {
-            sizes[index] = size;
-            if (this.#maxSize) {
-                const maxSize = this.#maxSize - sizes[index];
-                while (this.#calculatedSize > maxSize) {
-                    this.#evict(true);
-                }
-            }
-            this.#calculatedSize += sizes[index];
-            if (status) {
-                status.entrySize = size;
-                status.totalCalculatedSize = this.#calculatedSize;
-            }
-        };
-    }
-    #removeItemSize = _i => { };
-    #addItemSize = (_i, _s, _st) => { };
-    #requireSize = (_k, _v, size, sizeCalculation) => {
-        if (size || sizeCalculation) {
-            throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
-        }
-        return 0;
-    };
-    *#indexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#tail; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#head) {
-                    break;
-                }
-                else {
-                    i = this.#prev[i];
-                }
-            }
-        }
-    }
-    *#rindexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#head; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#tail) {
-                    break;
-                }
-                else {
-                    i = this.#next[i];
-                }
-            }
-        }
-    }
-    #isValidIndex(index) {
-        return (index !== undefined &&
-            this.#keyMap.get(this.#keyList[index]) === index);
-    }
-    /**
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from most recently used to least recently used.
-     */
-    *entries() {
-        for (const i of this.#indexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.entries}
-     *
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from least recently used to most recently used.
-     */
-    *rentries() {
-        for (const i of this.#rindexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the keys in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *keys() {
-        for (const i of this.#indexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.keys}
-     *
-     * Return a generator yielding the keys in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rkeys() {
-        for (const i of this.#rindexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the values in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *values() {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.values}
-     *
-     * Return a generator yielding the values in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rvalues() {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Iterating over the cache itself yields the same results as
-     * {@link LRUCache.entries}
-     */
-    [Symbol.iterator]() {
-        return this.entries();
-    }
-    /**
-     * A String value that is used in the creation of the default string
-     * description of an object. Called by the built-in method
-     * `Object.prototype.toString`.
-     */
-    [Symbol.toStringTag] = 'LRUCache';
-    /**
-     * Find a value for which the supplied fn method returns a truthy value,
-     * similar to `Array.find()`. fn is called as `fn(value, key, cache)`.
-     */
-    find(fn, getOptions = {}) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-            if (value === undefined)
-                continue;
-            if (fn(value, this.#keyList[i], this)) {
-                return this.get(this.#keyList[i], getOptions);
-            }
-        }
-    }
-    /**
-     * Call the supplied function on each item in the cache, in order from most
-     * recently used to least recently used.
-     *
-     * `fn` is called as `fn(value, key, cache)`.
-     *
-     * If `thisp` is provided, function will be called in the `this`-context of
-     * the provided object, or the cache if no `thisp` object is provided.
-     *
-     * Does not update age or recenty of use, or iterate over stale values.
-     */
-    forEach(fn, thisp = this) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * The same as {@link LRUCache.forEach} but items are iterated over in
-     * reverse order.  (ie, less recently used items are iterated over first.)
-     */
-    rforEach(fn, thisp = this) {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * Delete any stale entries. Returns true if anything was removed,
-     * false otherwise.
-     */
-    purgeStale() {
-        let deleted = false;
-        for (const i of this.#rindexes({ allowStale: true })) {
-            if (this.#isStale(i)) {
-                this.#delete(this.#keyList[i], 'expire');
-                deleted = true;
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Get the extended info about a given entry, to get its value, size, and
-     * TTL info simultaneously. Returns `undefined` if the key is not present.
-     *
-     * Unlike {@link LRUCache#dump}, which is designed to be portable and survive
-     * serialization, the `start` value is always the current timestamp, and the
-     * `ttl` is a calculated remaining time to live (negative if expired).
-     *
-     * Always returns stale values, if their info is found in the cache, so be
-     * sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl})
-     * if relevant.
-     */
-    info(key) {
-        const i = this.#keyMap.get(key);
-        if (i === undefined)
-            return undefined;
-        const v = this.#valList[i];
-        /* c8 ignore start - this isn't tested for the info function,
-         * but it's the same logic as found in other places. */
-        const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-        if (value === undefined)
-            return undefined;
-        /* c8 ignore end */
-        const entry = { value };
-        if (this.#ttls && this.#starts) {
-            const ttl = this.#ttls[i];
-            const start = this.#starts[i];
-            if (ttl && start) {
-                const remain = ttl - (this.#perf.now() - start);
-                entry.ttl = remain;
-                entry.start = Date.now();
-            }
-        }
-        if (this.#sizes) {
-            entry.size = this.#sizes[i];
-        }
-        return entry;
-    }
-    /**
-     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
-     * passed to {@link LRUCache#load}.
-     *
-     * The `start` fields are calculated relative to a portable `Date.now()`
-     * timestamp, even if `performance.now()` is available.
-     *
-     * Stale entries are always included in the `dump`, even if
-     * {@link LRUCache.OptionsBase.allowStale} is false.
-     *
-     * Note: this returns an actual array, not a generator, so it can be more
-     * easily passed around.
-     */
-    dump() {
-        const arr = [];
-        for (const i of this.#indexes({ allowStale: true })) {
-            const key = this.#keyList[i];
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-            if (value === undefined || key === undefined)
-                continue;
-            const entry = { value };
-            if (this.#ttls && this.#starts) {
-                entry.ttl = this.#ttls[i];
-                // always dump the start relative to a portable timestamp
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = this.#perf.now() - this.#starts[i];
-                entry.start = Math.floor(Date.now() - age);
-            }
-            if (this.#sizes) {
-                entry.size = this.#sizes[i];
-            }
-            arr.unshift([key, entry]);
-        }
-        return arr;
-    }
-    /**
-     * Reset the cache and load in the items in entries in the order listed.
-     *
-     * The shape of the resulting cache may be different if the same options are
-     * not used in both caches.
-     *
-     * The `start` fields are assumed to be calculated relative to a portable
-     * `Date.now()` timestamp, even if `performance.now()` is available.
-     */
-    load(arr) {
-        this.clear();
-        for (const [key, entry] of arr) {
-            if (entry.start) {
-                // entry.start is a portable timestamp, but we may be using
-                // node's performance.now(), so calculate the offset, so that
-                // we get the intended remaining TTL, no matter how long it's
-                // been on ice.
-                //
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = Date.now() - entry.start;
-                entry.start = this.#perf.now() - age;
-            }
-            this.set(key, entry.value, entry);
-        }
-    }
-    /**
-     * Add a value to the cache.
-     *
-     * Note: if `undefined` is specified as a value, this is an alias for
-     * {@link LRUCache#delete}
-     *
-     * Fields on the {@link LRUCache.SetOptions} options param will override
-     * their corresponding values in the constructor options for the scope
-     * of this single `set()` operation.
-     *
-     * If `start` is provided, then that will set the effective start
-     * time for the TTL calculation. Note that this must be a previous
-     * value of `performance.now()` if supported, or a previous value of
-     * `Date.now()` if not.
-     *
-     * Options object may also include `size`, which will prevent
-     * calling the `sizeCalculation` function and just use the specified
-     * number if it is a positive integer, and `noDisposeOnSet` which
-     * will prevent calling a `dispose` function in the case of
-     * overwrites.
-     *
-     * If the `size` (or return value of `sizeCalculation`) for a given
-     * entry is greater than `maxEntrySize`, then the item will not be
-     * added to the cache.
-     *
-     * Will update the recency of the entry.
-     *
-     * If the value is `undefined`, then this is an alias for
-     * `cache.delete(key)`. `undefined` is never stored in the cache.
-     */
-    set(k, v, setOptions = {}) {
-        if (v === undefined) {
-            this.delete(k);
-            return this;
-        }
-        const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
-        let { noUpdateTTL = this.noUpdateTTL } = setOptions;
-        const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
-        // if the item doesn't fit, don't do anything
-        // NB: maxEntrySize set to maxSize by default
-        if (this.maxEntrySize && size > this.maxEntrySize) {
-            if (status) {
-                status.set = 'miss';
-                status.maxEntrySizeExceeded = true;
-            }
-            // have to delete, in case something is there already.
-            this.#delete(k, 'set');
-            return this;
-        }
-        let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
-        if (index === undefined) {
-            // addition
-            index = (this.#size === 0 ? this.#tail
-                : this.#free.length !== 0 ? this.#free.pop()
-                    : this.#size === this.#max ? this.#evict(false)
-                        : this.#size);
-            this.#keyList[index] = k;
-            this.#valList[index] = v;
-            this.#keyMap.set(k, index);
-            this.#next[this.#tail] = index;
-            this.#prev[index] = this.#tail;
-            this.#tail = index;
-            this.#size++;
-            this.#addItemSize(index, size, status);
-            if (status)
-                status.set = 'add';
-            noUpdateTTL = false;
-            if (this.#hasOnInsert) {
-                this.#onInsert?.(v, k, 'add');
-            }
-        }
-        else {
-            // update
-            this.#moveToTail(index);
-            const oldVal = this.#valList[index];
-            if (v !== oldVal) {
-                if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
-                    oldVal.__abortController.abort(new Error('replaced'));
-                    const { __staleWhileFetching: s } = oldVal;
-                    if (s !== undefined && !noDisposeOnSet) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(s, k, 'set');
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([s, k, 'set']);
-                        }
-                    }
-                }
-                else if (!noDisposeOnSet) {
-                    if (this.#hasDispose) {
-                        this.#dispose?.(oldVal, k, 'set');
-                    }
-                    if (this.#hasDisposeAfter) {
-                        this.#disposed?.push([oldVal, k, 'set']);
-                    }
-                }
-                this.#removeItemSize(index);
-                this.#addItemSize(index, size, status);
-                this.#valList[index] = v;
-                if (status) {
-                    status.set = 'replace';
-                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal) ?
-                        oldVal.__staleWhileFetching
-                        : oldVal;
-                    if (oldValue !== undefined)
-                        status.oldValue = oldValue;
-                }
-            }
-            else if (status) {
-                status.set = 'update';
-            }
-            if (this.#hasOnInsert) {
-                this.onInsert?.(v, k, v === oldVal ? 'update' : 'replace');
-            }
-        }
-        if (ttl !== 0 && !this.#ttls) {
-            this.#initializeTTLTracking();
-        }
-        if (this.#ttls) {
-            if (!noUpdateTTL) {
-                this.#setItemTTL(index, ttl, start);
-            }
-            if (status)
-                this.#statusTTL(status, index);
-        }
-        if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return this;
-    }
-    /**
-     * Evict the least recently used item, returning its value or
-     * `undefined` if cache is empty.
-     */
-    pop() {
-        try {
-            while (this.#size) {
-                const val = this.#valList[this.#head];
-                this.#evict(true);
-                if (this.#isBackgroundFetch(val)) {
-                    if (val.__staleWhileFetching) {
-                        return val.__staleWhileFetching;
-                    }
-                }
-                else if (val !== undefined) {
-                    return val;
-                }
-            }
-        }
-        finally {
-            if (this.#hasDisposeAfter && this.#disposed) {
-                const dt = this.#disposed;
-                let task;
-                while ((task = dt?.shift())) {
-                    this.#disposeAfter?.(...task);
-                }
-            }
-        }
-    }
-    #evict(free) {
-        const head = this.#head;
-        const k = this.#keyList[head];
-        const v = this.#valList[head];
-        if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
-            v.__abortController.abort(new Error('evicted'));
-        }
-        else if (this.#hasDispose || this.#hasDisposeAfter) {
-            if (this.#hasDispose) {
-                this.#dispose?.(v, k, 'evict');
-            }
-            if (this.#hasDisposeAfter) {
-                this.#disposed?.push([v, k, 'evict']);
-            }
-        }
-        this.#removeItemSize(head);
-        // if we aren't about to use the index, then null these out
-        if (free) {
-            this.#keyList[head] = undefined;
-            this.#valList[head] = undefined;
-            this.#free.push(head);
-        }
-        if (this.#size === 1) {
-            this.#head = this.#tail = 0;
-            this.#free.length = 0;
-        }
-        else {
-            this.#head = this.#next[head];
-        }
-        this.#keyMap.delete(k);
-        this.#size--;
-        return head;
-    }
-    /**
-     * Check if a key is in the cache, without updating the recency of use.
-     * Will return false if the item is stale, even though it is technically
-     * in the cache.
-     *
-     * Check if a key is in the cache, without updating the recency of
-     * use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set
-     * to `true` in either the options or the constructor.
-     *
-     * Will return `false` if the item is stale, even though it is technically in
-     * the cache. The difference can be determined (if it matters) by using a
-     * `status` argument, and inspecting the `has` field.
-     *
-     * Will not update item age unless
-     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
-     */
-    has(k, hasOptions = {}) {
-        const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v) &&
-                v.__staleWhileFetching === undefined) {
-                return false;
-            }
-            if (!this.#isStale(index)) {
-                if (updateAgeOnHas) {
-                    this.#updateItemAge(index);
-                }
-                if (status) {
-                    status.has = 'hit';
-                    this.#statusTTL(status, index);
-                }
-                return true;
-            }
-            else if (status) {
-                status.has = 'stale';
-                this.#statusTTL(status, index);
-            }
-        }
-        else if (status) {
-            status.has = 'miss';
-        }
-        return false;
-    }
-    /**
-     * Like {@link LRUCache#get} but doesn't update recency or delete stale
-     * items.
-     *
-     * Returns `undefined` if the item is stale, unless
-     * {@link LRUCache.OptionsBase.allowStale} is set.
-     */
-    peek(k, peekOptions = {}) {
-        const { allowStale = this.allowStale } = peekOptions;
-        const index = this.#keyMap.get(k);
-        if (index === undefined ||
-            (!allowStale && this.#isStale(index))) {
-            return;
-        }
-        const v = this.#valList[index];
-        // either stale and allowed, or forcing a refresh of non-stale value
-        return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-    }
-    #backgroundFetch(k, index, options, context) {
-        const v = index === undefined ? undefined : this.#valList[index];
-        if (this.#isBackgroundFetch(v)) {
-            return v;
-        }
-        const ac = new AC();
-        const { signal } = options;
-        // when/if our AC signals, then stop listening to theirs.
-        signal?.addEventListener('abort', () => ac.abort(signal.reason), {
-            signal: ac.signal,
-        });
-        const fetchOpts = {
-            signal: ac.signal,
-            options,
-            context,
-        };
-        const cb = (v, updateCache = false) => {
-            const { aborted } = ac.signal;
-            const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
-            if (options.status) {
-                if (aborted && !updateCache) {
-                    options.status.fetchAborted = true;
-                    options.status.fetchError = ac.signal.reason;
-                    if (ignoreAbort)
-                        options.status.fetchAbortIgnored = true;
-                }
-                else {
-                    options.status.fetchResolved = true;
-                }
-            }
-            if (aborted && !ignoreAbort && !updateCache) {
-                return fetchFail(ac.signal.reason);
-            }
-            // either we didn't abort, and are still here, or we did, and ignored
-            const bf = p;
-            if (this.#valList[index] === p) {
-                if (v === undefined) {
-                    if (bf.__staleWhileFetching !== undefined) {
-                        this.#valList[index] = bf.__staleWhileFetching;
-                    }
-                    else {
-                        this.#delete(k, 'fetch');
-                    }
-                }
-                else {
-                    if (options.status)
-                        options.status.fetchUpdated = true;
-                    this.set(k, v, fetchOpts.options);
-                }
-            }
-            return v;
-        };
-        const eb = (er) => {
-            if (options.status) {
-                options.status.fetchRejected = true;
-                options.status.fetchError = er;
-            }
-            return fetchFail(er);
-        };
-        const fetchFail = (er) => {
-            const { aborted } = ac.signal;
-            const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
-            const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
-            const noDelete = allowStale || options.noDeleteOnFetchRejection;
-            const bf = p;
-            if (this.#valList[index] === p) {
-                // if we allow stale on fetch rejections, then we need to ensure that
-                // the stale value is not removed from the cache when the fetch fails.
-                const del = !noDelete || bf.__staleWhileFetching === undefined;
-                if (del) {
-                    this.#delete(k, 'fetch');
-                }
-                else if (!allowStaleAborted) {
-                    // still replace the *promise* with the stale value,
-                    // since we are done with the promise at this point.
-                    // leave it untouched if we're still waiting for an
-                    // aborted background fetch that hasn't yet returned.
-                    this.#valList[index] = bf.__staleWhileFetching;
-                }
-            }
-            if (allowStale) {
-                if (options.status && bf.__staleWhileFetching !== undefined) {
-                    options.status.returnedStale = true;
-                }
-                return bf.__staleWhileFetching;
-            }
-            else if (bf.__returned === bf) {
-                throw er;
-            }
-        };
-        const pcall = (res, rej) => {
-            const fmp = this.#fetchMethod?.(k, v, fetchOpts);
-            if (fmp && fmp instanceof Promise) {
-                fmp.then(v => res(v === undefined ? undefined : v), rej);
-            }
-            // ignored, we go until we finish, regardless.
-            // defer check until we are actually aborting,
-            // so fetchMethod can override.
-            ac.signal.addEventListener('abort', () => {
-                if (!options.ignoreFetchAbort ||
-                    options.allowStaleOnFetchAbort) {
-                    res(undefined);
-                    // when it eventually resolves, update the cache.
-                    if (options.allowStaleOnFetchAbort) {
-                        res = v => cb(v, true);
-                    }
-                }
-            });
-        };
-        if (options.status)
-            options.status.fetchDispatched = true;
-        const p = new Promise(pcall).then(cb, eb);
-        const bf = Object.assign(p, {
-            __abortController: ac,
-            __staleWhileFetching: v,
-            __returned: undefined,
-        });
-        if (index === undefined) {
-            // internal, don't expose status.
-            this.set(k, bf, { ...fetchOpts.options, status: undefined });
-            index = this.#keyMap.get(k);
-        }
-        else {
-            this.#valList[index] = bf;
-        }
-        return bf;
-    }
-    #isBackgroundFetch(p) {
-        if (!this.#hasFetchMethod)
-            return false;
-        const b = p;
-        return (!!b &&
-            b instanceof Promise &&
-            b.hasOwnProperty('__staleWhileFetching') &&
-            b.__abortController instanceof AC);
-    }
-    async fetch(k, fetchOptions = {}) {
-        const { 
-        // get options
-        allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, 
-        // set options
-        ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, 
-        // fetch exclusive options
-        noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
-        if (!this.#hasFetchMethod) {
-            if (status)
-                status.fetch = 'get';
-            return this.get(k, {
-                allowStale,
-                updateAgeOnGet,
-                noDeleteOnStaleGet,
-                status,
-            });
-        }
-        const options = {
-            allowStale,
-            updateAgeOnGet,
-            noDeleteOnStaleGet,
-            ttl,
-            noDisposeOnSet,
-            size,
-            sizeCalculation,
-            noUpdateTTL,
-            noDeleteOnFetchRejection,
-            allowStaleOnFetchRejection,
-            allowStaleOnFetchAbort,
-            ignoreFetchAbort,
-            status,
-            signal,
-        };
-        let index = this.#keyMap.get(k);
-        if (index === undefined) {
-            if (status)
-                status.fetch = 'miss';
-            const p = this.#backgroundFetch(k, index, options, context);
-            return (p.__returned = p);
-        }
-        else {
-            // in cache, maybe already fetching
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                const stale = allowStale && v.__staleWhileFetching !== undefined;
-                if (status) {
-                    status.fetch = 'inflight';
-                    if (stale)
-                        status.returnedStale = true;
-                }
-                return stale ? v.__staleWhileFetching : (v.__returned = v);
-            }
-            // if we force a refresh, that means do NOT serve the cached value,
-            // unless we are already in the process of refreshing the cache.
-            const isStale = this.#isStale(index);
-            if (!forceRefresh && !isStale) {
-                if (status)
-                    status.fetch = 'hit';
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                if (status)
-                    this.#statusTTL(status, index);
-                return v;
-            }
-            // ok, it is stale or a forced refresh, and not already fetching.
-            // refresh the cache.
-            const p = this.#backgroundFetch(k, index, options, context);
-            const hasStale = p.__staleWhileFetching !== undefined;
-            const staleVal = hasStale && allowStale;
-            if (status) {
-                status.fetch = isStale ? 'stale' : 'refresh';
-                if (staleVal && isStale)
-                    status.returnedStale = true;
-            }
-            return staleVal ? p.__staleWhileFetching : (p.__returned = p);
-        }
-    }
-    async forceFetch(k, fetchOptions = {}) {
-        const v = await this.fetch(k, fetchOptions);
-        if (v === undefined)
-            throw new Error('fetch() returned undefined');
-        return v;
-    }
-    memo(k, memoOptions = {}) {
-        const memoMethod = this.#memoMethod;
-        if (!memoMethod) {
-            throw new Error('no memoMethod provided to constructor');
-        }
-        const { context, forceRefresh, ...options } = memoOptions;
-        const v = this.get(k, options);
-        if (!forceRefresh && v !== undefined)
-            return v;
-        const vv = memoMethod(k, v, {
-            options,
-            context,
-        });
-        this.set(k, vv, options);
-        return vv;
-    }
-    /**
-     * Return a value from the cache. Will update the recency of the cache
-     * entry found.
-     *
-     * If the key is not found, get() will return `undefined`.
-     */
-    get(k, getOptions = {}) {
-        const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const value = this.#valList[index];
-            const fetching = this.#isBackgroundFetch(value);
-            if (status)
-                this.#statusTTL(status, index);
-            if (this.#isStale(index)) {
-                if (status)
-                    status.get = 'stale';
-                // delete only if not an in-flight background fetch
-                if (!fetching) {
-                    if (!noDeleteOnStaleGet) {
-                        this.#delete(k, 'expire');
-                    }
-                    if (status && allowStale)
-                        status.returnedStale = true;
-                    return allowStale ? value : undefined;
-                }
-                else {
-                    if (status &&
-                        allowStale &&
-                        value.__staleWhileFetching !== undefined) {
-                        status.returnedStale = true;
-                    }
-                    return allowStale ? value.__staleWhileFetching : undefined;
-                }
-            }
-            else {
-                if (status)
-                    status.get = 'hit';
-                // if we're currently fetching it, we don't actually have it yet
-                // it's not stale, which means this isn't a staleWhileRefetching.
-                // If it's not stale, and fetching, AND has a __staleWhileFetching
-                // value, then that means the user fetched with {forceRefresh:true},
-                // so it's safe to return that value.
-                if (fetching) {
-                    return value.__staleWhileFetching;
-                }
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                return value;
-            }
-        }
-        else if (status) {
-            status.get = 'miss';
-        }
-    }
-    #connect(p, n) {
-        this.#prev[n] = p;
-        this.#next[p] = n;
-    }
-    #moveToTail(index) {
-        // if tail already, nothing to do
-        // if head, move head to next[index]
-        // else
-        //   move next[prev[index]] to next[index] (head has no prev)
-        //   move prev[next[index]] to prev[index]
-        // prev[index] = tail
-        // next[tail] = index
-        // tail = index
-        if (index !== this.#tail) {
-            if (index === this.#head) {
-                this.#head = this.#next[index];
-            }
-            else {
-                this.#connect(this.#prev[index], this.#next[index]);
-            }
-            this.#connect(this.#tail, index);
-            this.#tail = index;
-        }
-    }
-    /**
-     * Deletes a key out of the cache.
-     *
-     * Returns true if the key was deleted, false otherwise.
-     */
-    delete(k) {
-        return this.#delete(k, 'delete');
-    }
-    #delete(k, reason) {
-        let deleted = false;
-        if (this.#size !== 0) {
-            const index = this.#keyMap.get(k);
-            if (index !== undefined) {
-                deleted = true;
-                if (this.#size === 1) {
-                    this.#clear(reason);
-                }
-                else {
-                    this.#removeItemSize(index);
-                    const v = this.#valList[index];
-                    if (this.#isBackgroundFetch(v)) {
-                        v.__abortController.abort(new Error('deleted'));
-                    }
-                    else if (this.#hasDispose || this.#hasDisposeAfter) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(v, k, reason);
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([v, k, reason]);
-                        }
-                    }
-                    this.#keyMap.delete(k);
-                    this.#keyList[index] = undefined;
-                    this.#valList[index] = undefined;
-                    if (index === this.#tail) {
-                        this.#tail = this.#prev[index];
-                    }
-                    else if (index === this.#head) {
-                        this.#head = this.#next[index];
-                    }
-                    else {
-                        const pi = this.#prev[index];
-                        this.#next[pi] = this.#next[index];
-                        const ni = this.#next[index];
-                        this.#prev[ni] = this.#prev[index];
-                    }
-                    this.#size--;
-                    this.#free.push(index);
-                }
-            }
-        }
-        if (this.#hasDisposeAfter && this.#disposed?.length) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Clear the cache entirely, throwing away all values.
-     */
-    clear() {
-        return this.#clear('delete');
-    }
-    #clear(reason) {
-        for (const index of this.#rindexes({ allowStale: true })) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                v.__abortController.abort(new Error('deleted'));
-            }
-            else {
-                const k = this.#keyList[index];
-                if (this.#hasDispose) {
-                    this.#dispose?.(v, k, reason);
-                }
-                if (this.#hasDisposeAfter) {
-                    this.#disposed?.push([v, k, reason]);
-                }
-            }
-        }
-        this.#keyMap.clear();
-        this.#valList.fill(undefined);
-        this.#keyList.fill(undefined);
-        if (this.#ttls && this.#starts) {
-            this.#ttls.fill(0);
-            this.#starts.fill(0);
-        }
-        if (this.#sizes) {
-            this.#sizes.fill(0);
-        }
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free.length = 0;
-        this.#calculatedSize = 0;
-        this.#size = 0;
-        if (this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-    }
-}
-exports.LRUCache = LRUCache;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/lru-cache/dist/commonjs/index.min.js b/node_modules/pacote/node_modules/lru-cache/dist/commonjs/index.min.js
deleted file mode 100644
index ef5027b91650d..0000000000000
--- a/node_modules/pacote/node_modules/lru-cache/dist/commonjs/index.min.js
+++ /dev/null
@@ -1,2 +0,0 @@
-"use strict";Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var M=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,x=new Set,R=typeof process=="object"&&process?process:{},U=(a,t,e,i)=>{typeof R.emitWarning=="function"?R.emitWarning(a,t,e,i):console.error(`[${e}] ${t}: ${a}`)},C=globalThis.AbortController,L=globalThis.AbortSignal;if(typeof C>"u"){L=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},C=class{constructor(){t()}signal=new L;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let a=R.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{a&&(a=!1,U("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var G=a=>!x.has(a),H=Symbol("type"),y=a=>a&&a===Math.floor(a)&&a>0&&isFinite(a),I=a=>y(a)?a<=Math.pow(2,8)?Uint8Array:a<=Math.pow(2,16)?Uint16Array:a<=Math.pow(2,32)?Uint32Array:a<=Number.MAX_SAFE_INTEGER?E:null:null,E=class extends Array{constructor(t){super(t),this.fill(0)}},W=class a{heap;length;static#l=!1;static create(t){let e=I(t);if(!e)return[];a.#l=!0;let i=new a(t,e);return a.#l=!1,i}constructor(t,e){if(!a.#l)throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},D=class a{#l;#c;#p;#v;#w;#D;#L;#S;get perf(){return this.#S}ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#_;#s;#i;#t;#a;#u;#o;#h;#m;#r;#b;#y;#d;#A;#z;#f;#x;static unsafeExposeInternals(t){return{starts:t.#y,ttls:t.#d,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#a,prev:t.#u,get head(){return t.#o},get tail(){return t.#h},free:t.#m,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#M(e,i,s,n),moveToTail:e=>t.#W(e),indexes:e=>t.#F(e),rindexes:e=>t.#T(e),isStale:e=>t.#g(e)}}get max(){return this.#l}get maxSize(){return this.#c}get calculatedSize(){return this.#_}get size(){return this.#n}get fetchMethod(){return this.#D}get memoMethod(){return this.#L}get dispose(){return this.#p}get onInsert(){return this.#v}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,onInsert:_,disposeAfter:f,noDisposeOnSet:c,noUpdateTTL:u,maxSize:A=0,maxEntrySize:d=0,sizeCalculation:m,fetchMethod:l,memoMethod:w,noDeleteOnFetchRejection:b,noDeleteOnStaleGet:p,allowStaleOnFetchRejection:S,allowStaleOnFetchAbort:z,ignoreFetchAbort:F,perf:v}=t;if(v!==void 0&&typeof v?.now!="function")throw new TypeError("perf option must have a now() method if specified");if(this.#S=v??M,e!==0&&!y(e))throw new TypeError("max option must be a nonnegative integer");let T=e?I(e):Array;if(!T)throw new Error("invalid max value: "+e);if(this.#l=e,this.#c=A,this.maxEntrySize=d||this.#c,this.sizeCalculation=m,this.sizeCalculation){if(!this.#c&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#L=w,l!==void 0&&typeof l!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#D=l,this.#z=!!l,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#a=new T(e),this.#u=new T(e),this.#o=0,this.#h=0,this.#m=W.create(e),this.#n=0,this.#_=0,typeof g=="function"&&(this.#p=g),typeof _=="function"&&(this.#v=_),typeof f=="function"?(this.#w=f,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#A=!!this.#p,this.#x=!!this.#v,this.#f=!!this.#w,this.noDisposeOnSet=!!c,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!b,this.allowStaleOnFetchRejection=!!S,this.allowStaleOnFetchAbort=!!z,this.ignoreFetchAbort=!!F,this.maxEntrySize!==0){if(this.#c!==0&&!y(this.#c))throw new TypeError("maxSize must be a positive integer if specified");if(!y(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#V()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!p,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=y(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!y(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#G()}if(this.#l===0&&this.ttl===0&&this.#c===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#l&&!this.#c){let O="LRU_CACHE_UNBOUNDED";G(O)&&(x.add(O),U("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",O,a))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#G(){let t=new E(this.#l),e=new E(this.#l);this.#d=t,this.#y=e,this.#j=(n,h,o=this.#S.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#g(n)&&this.#O(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#C=n=>{e[n]=t[n]!==0?this.#S.now():0},this.#E=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=this.#S.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#g=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#C=()=>{};#E=()=>{};#j=()=>{};#g=()=>!1;#V(){let t=new E(this.#l);this.#_=0,this.#b=t,this.#R=e=>{this.#_-=t[e],t[e]=0},this.#N=(e,i,s,n)=>{if(this.#e(i))return 0;if(!y(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!y(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#U=(e,i,s)=>{if(t[e]=i,this.#c){let n=this.#c-t[e];for(;this.#_>n;)this.#I(!0)}this.#_+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#_)}}#R=t=>{};#U=(t,e,i)=>{};#N=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#o));)e=this.#u[e]}*#T({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#h));)e=this.#a[e]}#P(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#T())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#T()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#T())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#T()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#T({allowStale:!0}))this.#g(e)&&(this.#O(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#d&&this.#y){let h=this.#d[e],o=this.#y[e];if(h&&o){let r=h-(this.#S.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#F({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#d&&this.#y){h.ttl=this.#d[e];let o=this.#S.now()-this.#y[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=this.#S.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,_=this.#N(t,e,i.size||0,o);if(this.maxEntrySize&&_>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#O(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#m.length!==0?this.#m.pop():this.#n===this.#l?this.#I(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#a[this.#h]=f,this.#u[f]=this.#h,this.#h=f,this.#n++,this.#U(f,_,r),r&&(r.set="add"),g=!1,this.#x&&this.#v?.(e,t,"add");else{this.#W(f);let c=this.#t[f];if(e!==c){if(this.#z&&this.#e(c)){c.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:u}=c;u!==void 0&&!h&&(this.#A&&this.#p?.(u,t,"set"),this.#f&&this.#r?.push([u,t,"set"]))}else h||(this.#A&&this.#p?.(c,t,"set"),this.#f&&this.#r?.push([c,t,"set"]));if(this.#R(f),this.#U(f,_,r),this.#t[f]=e,r){r.set="replace";let u=c&&this.#e(c)?c.__staleWhileFetching:c;u!==void 0&&(r.oldValue=u)}}else r&&(r.set="update");this.#x&&this.onInsert?.(e,t,e===c?"update":"replace")}if(s!==0&&!this.#d&&this.#G(),this.#d&&(g||this.#j(f,s,n),r&&this.#E(r,f)),!h&&this.#f&&this.#r){let c=this.#r,u;for(;u=c?.shift();)this.#w?.(...u)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#I(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#f&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#I(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#z&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(s,i,"evict"),this.#f&&this.#r?.push([s,i,"evict"])),this.#R(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#m.push(e)),this.#n===1?(this.#o=this.#h=0,this.#m.length=0):this.#o=this.#a[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#g(n))s&&(s.has="stale",this.#E(s,n));else return i&&this.#C(n),s&&(s.has="hit",this.#E(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#g(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#M(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new C,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,m=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!m?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!m)return f(h.signal.reason);let b=u;return this.#t[e]===u&&(d===void 0?b.__staleWhileFetching!==void 0?this.#t[e]=b.__staleWhileFetching:this.#O(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},_=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:m}=h.signal,l=m&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=u;if(this.#t[e]===u&&(!b||p.__staleWhileFetching===void 0?this.#O(t,"fetch"):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},c=(d,m)=>{let l=this.#D?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),m),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let u=new Promise(c).then(g,_),A=Object.assign(u,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,A,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=A,A}#e(t){if(!this.#z)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof C}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:_=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:c=this.allowStaleOnFetchRejection,ignoreFetchAbort:u=this.ignoreFetchAbort,allowStaleOnFetchAbort:A=this.allowStaleOnFetchAbort,context:d,forceRefresh:m=!1,status:l,signal:w}=e;if(!this.#z)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:_,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:c,allowStaleOnFetchAbort:A,ignoreFetchAbort:u,status:l,signal:w},p=this.#s.get(t);if(p===void 0){l&&(l.fetch="miss");let S=this.#M(t,p,b,d);return S.__returned=S}else{let S=this.#t[p];if(this.#e(S)){let O=i&&S.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",O&&(l.returnedStale=!0)),O?S.__staleWhileFetching:S.__returned=S}let z=this.#g(p);if(!m&&!z)return l&&(l.fetch="hit"),this.#W(p),s&&this.#C(p),l&&this.#E(l,p),S;let F=this.#M(t,p,b,d),T=F.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=z?"stale":"refresh",T&&z&&(l.returnedStale=!0)),T?F.__staleWhileFetching:F.__returned=F}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#L;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#E(h,o),this.#g(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#O(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#W(o),s&&this.#C(o),r))}else h&&(h.get="miss")}#H(t,e){this.#u[e]=t,this.#a[t]=e}#W(t){t!==this.#h&&(t===this.#o?this.#o=this.#a[t]:this.#H(this.#u[t],this.#a[t]),this.#H(this.#h,t),this.#h=t)}delete(t){return this.#O(t,"delete")}#O(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#k(e);else{this.#R(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(n,t,e),this.#f&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#u[s];else if(s===this.#o)this.#o=this.#a[s];else{let h=this.#u[s];this.#a[h]=this.#a[s];let o=this.#a[s];this.#u[o]=this.#u[s]}this.#n--,this.#m.push(s)}}if(this.#f&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#k("delete")}#k(t){for(let e of this.#T({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#A&&this.#p?.(i,s,t),this.#f&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#d&&this.#y&&(this.#d.fill(0),this.#y.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#m.length=0,this.#_=0,this.#n=0,this.#f&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};exports.LRUCache=D;
-//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/pacote/node_modules/lru-cache/dist/commonjs/package.json b/node_modules/pacote/node_modules/lru-cache/dist/commonjs/package.json
deleted file mode 100644
index 5bbefffbabee3..0000000000000
--- a/node_modules/pacote/node_modules/lru-cache/dist/commonjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "commonjs"
-}
diff --git a/node_modules/pacote/node_modules/lru-cache/dist/esm/index.js b/node_modules/pacote/node_modules/lru-cache/dist/esm/index.js
deleted file mode 100644
index 8fd8fc5f31507..0000000000000
--- a/node_modules/pacote/node_modules/lru-cache/dist/esm/index.js
+++ /dev/null
@@ -1,1560 +0,0 @@
-/**
- * @module LRUCache
- */
-const defaultPerf = (typeof performance === 'object' &&
-    performance &&
-    typeof performance.now === 'function') ?
-    performance
-    : Date;
-const warned = new Set();
-/* c8 ignore start */
-const PROCESS = (typeof process === 'object' && !!process ?
-    process
-    : {});
-/* c8 ignore start */
-const emitWarning = (msg, type, code, fn) => {
-    typeof PROCESS.emitWarning === 'function' ?
-        PROCESS.emitWarning(msg, type, code, fn)
-        : console.error(`[${code}] ${type}: ${msg}`);
-};
-let AC = globalThis.AbortController;
-let AS = globalThis.AbortSignal;
-/* c8 ignore start */
-if (typeof AC === 'undefined') {
-    //@ts-ignore
-    AS = class AbortSignal {
-        onabort;
-        _onabort = [];
-        reason;
-        aborted = false;
-        addEventListener(_, fn) {
-            this._onabort.push(fn);
-        }
-    };
-    //@ts-ignore
-    AC = class AbortController {
-        constructor() {
-            warnACPolyfill();
-        }
-        signal = new AS();
-        abort(reason) {
-            if (this.signal.aborted)
-                return;
-            //@ts-ignore
-            this.signal.reason = reason;
-            //@ts-ignore
-            this.signal.aborted = true;
-            //@ts-ignore
-            for (const fn of this.signal._onabort) {
-                fn(reason);
-            }
-            this.signal.onabort?.(reason);
-        }
-    };
-    let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
-    const warnACPolyfill = () => {
-        if (!printACPolyfillWarning)
-            return;
-        printACPolyfillWarning = false;
-        emitWarning('AbortController is not defined. If using lru-cache in ' +
-            'node 14, load an AbortController polyfill from the ' +
-            '`node-abort-controller` package. A minimal polyfill is ' +
-            'provided for use by LRUCache.fetch(), but it should not be ' +
-            'relied upon in other contexts (eg, passing it to other APIs that ' +
-            'use AbortController/AbortSignal might have undesirable effects). ' +
-            'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
-    };
-}
-/* c8 ignore stop */
-const shouldWarn = (code) => !warned.has(code);
-const TYPE = Symbol('type');
-const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
-/* c8 ignore start */
-// This is a little bit ridiculous, tbh.
-// The maximum array length is 2^32-1 or thereabouts on most JS impls.
-// And well before that point, you're caching the entire world, I mean,
-// that's ~32GB of just integers for the next/prev links, plus whatever
-// else to hold that many keys and values.  Just filling the memory with
-// zeroes at init time is brutal when you get that big.
-// But why not be complete?
-// Maybe in the future, these limits will have expanded.
-const getUintArray = (max) => !isPosInt(max) ? null
-    : max <= Math.pow(2, 8) ? Uint8Array
-        : max <= Math.pow(2, 16) ? Uint16Array
-            : max <= Math.pow(2, 32) ? Uint32Array
-                : max <= Number.MAX_SAFE_INTEGER ? ZeroArray
-                    : null;
-/* c8 ignore stop */
-class ZeroArray extends Array {
-    constructor(size) {
-        super(size);
-        this.fill(0);
-    }
-}
-class Stack {
-    heap;
-    length;
-    // private constructor
-    static #constructing = false;
-    static create(max) {
-        const HeapCls = getUintArray(max);
-        if (!HeapCls)
-            return [];
-        Stack.#constructing = true;
-        const s = new Stack(max, HeapCls);
-        Stack.#constructing = false;
-        return s;
-    }
-    constructor(max, HeapCls) {
-        /* c8 ignore start */
-        if (!Stack.#constructing) {
-            throw new TypeError('instantiate Stack using Stack.create(n)');
-        }
-        /* c8 ignore stop */
-        this.heap = new HeapCls(max);
-        this.length = 0;
-    }
-    push(n) {
-        this.heap[this.length++] = n;
-    }
-    pop() {
-        return this.heap[--this.length];
-    }
-}
-/**
- * Default export, the thing you're using this module to get.
- *
- * The `K` and `V` types define the key and value types, respectively. The
- * optional `FC` type defines the type of the `context` object passed to
- * `cache.fetch()` and `cache.memo()`.
- *
- * Keys and values **must not** be `null` or `undefined`.
- *
- * All properties from the options object (with the exception of `max`,
- * `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are
- * added as normal public members. (The listed options are read-only getters.)
- *
- * Changing any of these will alter the defaults for subsequent method calls.
- */
-export class LRUCache {
-    // options that cannot be changed without disaster
-    #max;
-    #maxSize;
-    #dispose;
-    #onInsert;
-    #disposeAfter;
-    #fetchMethod;
-    #memoMethod;
-    #perf;
-    /**
-     * {@link LRUCache.OptionsBase.perf}
-     */
-    get perf() {
-        return this.#perf;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.ttl}
-     */
-    ttl;
-    /**
-     * {@link LRUCache.OptionsBase.ttlResolution}
-     */
-    ttlResolution;
-    /**
-     * {@link LRUCache.OptionsBase.ttlAutopurge}
-     */
-    ttlAutopurge;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnGet}
-     */
-    updateAgeOnGet;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnHas}
-     */
-    updateAgeOnHas;
-    /**
-     * {@link LRUCache.OptionsBase.allowStale}
-     */
-    allowStale;
-    /**
-     * {@link LRUCache.OptionsBase.noDisposeOnSet}
-     */
-    noDisposeOnSet;
-    /**
-     * {@link LRUCache.OptionsBase.noUpdateTTL}
-     */
-    noUpdateTTL;
-    /**
-     * {@link LRUCache.OptionsBase.maxEntrySize}
-     */
-    maxEntrySize;
-    /**
-     * {@link LRUCache.OptionsBase.sizeCalculation}
-     */
-    sizeCalculation;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
-     */
-    noDeleteOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
-     */
-    noDeleteOnStaleGet;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
-     */
-    allowStaleOnFetchAbort;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
-     */
-    allowStaleOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
-     */
-    ignoreFetchAbort;
-    // computed properties
-    #size;
-    #calculatedSize;
-    #keyMap;
-    #keyList;
-    #valList;
-    #next;
-    #prev;
-    #head;
-    #tail;
-    #free;
-    #disposed;
-    #sizes;
-    #starts;
-    #ttls;
-    #hasDispose;
-    #hasFetchMethod;
-    #hasDisposeAfter;
-    #hasOnInsert;
-    /**
-     * Do not call this method unless you need to inspect the
-     * inner workings of the cache.  If anything returned by this
-     * object is modified in any way, strange breakage may occur.
-     *
-     * These fields are private for a reason!
-     *
-     * @internal
-     */
-    static unsafeExposeInternals(c) {
-        return {
-            // properties
-            starts: c.#starts,
-            ttls: c.#ttls,
-            sizes: c.#sizes,
-            keyMap: c.#keyMap,
-            keyList: c.#keyList,
-            valList: c.#valList,
-            next: c.#next,
-            prev: c.#prev,
-            get head() {
-                return c.#head;
-            },
-            get tail() {
-                return c.#tail;
-            },
-            free: c.#free,
-            // methods
-            isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
-            backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
-            moveToTail: (index) => c.#moveToTail(index),
-            indexes: (options) => c.#indexes(options),
-            rindexes: (options) => c.#rindexes(options),
-            isStale: (index) => c.#isStale(index),
-        };
-    }
-    // Protected read-only members
-    /**
-     * {@link LRUCache.OptionsBase.max} (read-only)
-     */
-    get max() {
-        return this.#max;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.maxSize} (read-only)
-     */
-    get maxSize() {
-        return this.#maxSize;
-    }
-    /**
-     * The total computed size of items in the cache (read-only)
-     */
-    get calculatedSize() {
-        return this.#calculatedSize;
-    }
-    /**
-     * The number of items stored in the cache (read-only)
-     */
-    get size() {
-        return this.#size;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
-     */
-    get fetchMethod() {
-        return this.#fetchMethod;
-    }
-    get memoMethod() {
-        return this.#memoMethod;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.dispose} (read-only)
-     */
-    get dispose() {
-        return this.#dispose;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.onInsert} (read-only)
-     */
-    get onInsert() {
-        return this.#onInsert;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
-     */
-    get disposeAfter() {
-        return this.#disposeAfter;
-    }
-    constructor(options) {
-        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, onInsert, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, perf, } = options;
-        if (perf !== undefined) {
-            if (typeof perf?.now !== 'function') {
-                throw new TypeError('perf option must have a now() method if specified');
-            }
-        }
-        this.#perf = perf ?? defaultPerf;
-        if (max !== 0 && !isPosInt(max)) {
-            throw new TypeError('max option must be a nonnegative integer');
-        }
-        const UintArray = max ? getUintArray(max) : Array;
-        if (!UintArray) {
-            throw new Error('invalid max value: ' + max);
-        }
-        this.#max = max;
-        this.#maxSize = maxSize;
-        this.maxEntrySize = maxEntrySize || this.#maxSize;
-        this.sizeCalculation = sizeCalculation;
-        if (this.sizeCalculation) {
-            if (!this.#maxSize && !this.maxEntrySize) {
-                throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
-            }
-            if (typeof this.sizeCalculation !== 'function') {
-                throw new TypeError('sizeCalculation set to non-function');
-            }
-        }
-        if (memoMethod !== undefined &&
-            typeof memoMethod !== 'function') {
-            throw new TypeError('memoMethod must be a function if defined');
-        }
-        this.#memoMethod = memoMethod;
-        if (fetchMethod !== undefined &&
-            typeof fetchMethod !== 'function') {
-            throw new TypeError('fetchMethod must be a function if specified');
-        }
-        this.#fetchMethod = fetchMethod;
-        this.#hasFetchMethod = !!fetchMethod;
-        this.#keyMap = new Map();
-        this.#keyList = new Array(max).fill(undefined);
-        this.#valList = new Array(max).fill(undefined);
-        this.#next = new UintArray(max);
-        this.#prev = new UintArray(max);
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free = Stack.create(max);
-        this.#size = 0;
-        this.#calculatedSize = 0;
-        if (typeof dispose === 'function') {
-            this.#dispose = dispose;
-        }
-        if (typeof onInsert === 'function') {
-            this.#onInsert = onInsert;
-        }
-        if (typeof disposeAfter === 'function') {
-            this.#disposeAfter = disposeAfter;
-            this.#disposed = [];
-        }
-        else {
-            this.#disposeAfter = undefined;
-            this.#disposed = undefined;
-        }
-        this.#hasDispose = !!this.#dispose;
-        this.#hasOnInsert = !!this.#onInsert;
-        this.#hasDisposeAfter = !!this.#disposeAfter;
-        this.noDisposeOnSet = !!noDisposeOnSet;
-        this.noUpdateTTL = !!noUpdateTTL;
-        this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
-        this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
-        this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
-        this.ignoreFetchAbort = !!ignoreFetchAbort;
-        // NB: maxEntrySize is set to maxSize if it's set
-        if (this.maxEntrySize !== 0) {
-            if (this.#maxSize !== 0) {
-                if (!isPosInt(this.#maxSize)) {
-                    throw new TypeError('maxSize must be a positive integer if specified');
-                }
-            }
-            if (!isPosInt(this.maxEntrySize)) {
-                throw new TypeError('maxEntrySize must be a positive integer if specified');
-            }
-            this.#initializeSizeTracking();
-        }
-        this.allowStale = !!allowStale;
-        this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
-        this.updateAgeOnGet = !!updateAgeOnGet;
-        this.updateAgeOnHas = !!updateAgeOnHas;
-        this.ttlResolution =
-            isPosInt(ttlResolution) || ttlResolution === 0 ?
-                ttlResolution
-                : 1;
-        this.ttlAutopurge = !!ttlAutopurge;
-        this.ttl = ttl || 0;
-        if (this.ttl) {
-            if (!isPosInt(this.ttl)) {
-                throw new TypeError('ttl must be a positive integer if specified');
-            }
-            this.#initializeTTLTracking();
-        }
-        // do not allow completely unbounded caches
-        if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
-            throw new TypeError('At least one of max, maxSize, or ttl is required');
-        }
-        if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
-            const code = 'LRU_CACHE_UNBOUNDED';
-            if (shouldWarn(code)) {
-                warned.add(code);
-                const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
-                    'result in unbounded memory consumption.';
-                emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
-            }
-        }
-    }
-    /**
-     * Return the number of ms left in the item's TTL. If item is not in cache,
-     * returns `0`. Returns `Infinity` if item is in cache without a defined TTL.
-     */
-    getRemainingTTL(key) {
-        return this.#keyMap.has(key) ? Infinity : 0;
-    }
-    #initializeTTLTracking() {
-        const ttls = new ZeroArray(this.#max);
-        const starts = new ZeroArray(this.#max);
-        this.#ttls = ttls;
-        this.#starts = starts;
-        this.#setItemTTL = (index, ttl, start = this.#perf.now()) => {
-            starts[index] = ttl !== 0 ? start : 0;
-            ttls[index] = ttl;
-            if (ttl !== 0 && this.ttlAutopurge) {
-                const t = setTimeout(() => {
-                    if (this.#isStale(index)) {
-                        this.#delete(this.#keyList[index], 'expire');
-                    }
-                }, ttl + 1);
-                // unref() not supported on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-        };
-        this.#updateItemAge = index => {
-            starts[index] = ttls[index] !== 0 ? this.#perf.now() : 0;
-        };
-        this.#statusTTL = (status, index) => {
-            if (ttls[index]) {
-                const ttl = ttls[index];
-                const start = starts[index];
-                /* c8 ignore next */
-                if (!ttl || !start)
-                    return;
-                status.ttl = ttl;
-                status.start = start;
-                status.now = cachedNow || getNow();
-                const age = status.now - start;
-                status.remainingTTL = ttl - age;
-            }
-        };
-        // debounce calls to perf.now() to 1s so we're not hitting
-        // that costly call repeatedly.
-        let cachedNow = 0;
-        const getNow = () => {
-            const n = this.#perf.now();
-            if (this.ttlResolution > 0) {
-                cachedNow = n;
-                const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
-                // not available on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-            return n;
-        };
-        this.getRemainingTTL = key => {
-            const index = this.#keyMap.get(key);
-            if (index === undefined) {
-                return 0;
-            }
-            const ttl = ttls[index];
-            const start = starts[index];
-            if (!ttl || !start) {
-                return Infinity;
-            }
-            const age = (cachedNow || getNow()) - start;
-            return ttl - age;
-        };
-        this.#isStale = index => {
-            const s = starts[index];
-            const t = ttls[index];
-            return !!t && !!s && (cachedNow || getNow()) - s > t;
-        };
-    }
-    // conditionally set private methods related to TTL
-    #updateItemAge = () => { };
-    #statusTTL = () => { };
-    #setItemTTL = () => { };
-    /* c8 ignore stop */
-    #isStale = () => false;
-    #initializeSizeTracking() {
-        const sizes = new ZeroArray(this.#max);
-        this.#calculatedSize = 0;
-        this.#sizes = sizes;
-        this.#removeItemSize = index => {
-            this.#calculatedSize -= sizes[index];
-            sizes[index] = 0;
-        };
-        this.#requireSize = (k, v, size, sizeCalculation) => {
-            // provisionally accept background fetches.
-            // actual value size will be checked when they return.
-            if (this.#isBackgroundFetch(v)) {
-                return 0;
-            }
-            if (!isPosInt(size)) {
-                if (sizeCalculation) {
-                    if (typeof sizeCalculation !== 'function') {
-                        throw new TypeError('sizeCalculation must be a function');
-                    }
-                    size = sizeCalculation(v, k);
-                    if (!isPosInt(size)) {
-                        throw new TypeError('sizeCalculation return invalid (expect positive integer)');
-                    }
-                }
-                else {
-                    throw new TypeError('invalid size value (must be positive integer). ' +
-                        'When maxSize or maxEntrySize is used, sizeCalculation ' +
-                        'or size must be set.');
-                }
-            }
-            return size;
-        };
-        this.#addItemSize = (index, size, status) => {
-            sizes[index] = size;
-            if (this.#maxSize) {
-                const maxSize = this.#maxSize - sizes[index];
-                while (this.#calculatedSize > maxSize) {
-                    this.#evict(true);
-                }
-            }
-            this.#calculatedSize += sizes[index];
-            if (status) {
-                status.entrySize = size;
-                status.totalCalculatedSize = this.#calculatedSize;
-            }
-        };
-    }
-    #removeItemSize = _i => { };
-    #addItemSize = (_i, _s, _st) => { };
-    #requireSize = (_k, _v, size, sizeCalculation) => {
-        if (size || sizeCalculation) {
-            throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
-        }
-        return 0;
-    };
-    *#indexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#tail; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#head) {
-                    break;
-                }
-                else {
-                    i = this.#prev[i];
-                }
-            }
-        }
-    }
-    *#rindexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#head; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#tail) {
-                    break;
-                }
-                else {
-                    i = this.#next[i];
-                }
-            }
-        }
-    }
-    #isValidIndex(index) {
-        return (index !== undefined &&
-            this.#keyMap.get(this.#keyList[index]) === index);
-    }
-    /**
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from most recently used to least recently used.
-     */
-    *entries() {
-        for (const i of this.#indexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.entries}
-     *
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from least recently used to most recently used.
-     */
-    *rentries() {
-        for (const i of this.#rindexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the keys in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *keys() {
-        for (const i of this.#indexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.keys}
-     *
-     * Return a generator yielding the keys in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rkeys() {
-        for (const i of this.#rindexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the values in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *values() {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.values}
-     *
-     * Return a generator yielding the values in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rvalues() {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Iterating over the cache itself yields the same results as
-     * {@link LRUCache.entries}
-     */
-    [Symbol.iterator]() {
-        return this.entries();
-    }
-    /**
-     * A String value that is used in the creation of the default string
-     * description of an object. Called by the built-in method
-     * `Object.prototype.toString`.
-     */
-    [Symbol.toStringTag] = 'LRUCache';
-    /**
-     * Find a value for which the supplied fn method returns a truthy value,
-     * similar to `Array.find()`. fn is called as `fn(value, key, cache)`.
-     */
-    find(fn, getOptions = {}) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-            if (value === undefined)
-                continue;
-            if (fn(value, this.#keyList[i], this)) {
-                return this.get(this.#keyList[i], getOptions);
-            }
-        }
-    }
-    /**
-     * Call the supplied function on each item in the cache, in order from most
-     * recently used to least recently used.
-     *
-     * `fn` is called as `fn(value, key, cache)`.
-     *
-     * If `thisp` is provided, function will be called in the `this`-context of
-     * the provided object, or the cache if no `thisp` object is provided.
-     *
-     * Does not update age or recenty of use, or iterate over stale values.
-     */
-    forEach(fn, thisp = this) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * The same as {@link LRUCache.forEach} but items are iterated over in
-     * reverse order.  (ie, less recently used items are iterated over first.)
-     */
-    rforEach(fn, thisp = this) {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * Delete any stale entries. Returns true if anything was removed,
-     * false otherwise.
-     */
-    purgeStale() {
-        let deleted = false;
-        for (const i of this.#rindexes({ allowStale: true })) {
-            if (this.#isStale(i)) {
-                this.#delete(this.#keyList[i], 'expire');
-                deleted = true;
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Get the extended info about a given entry, to get its value, size, and
-     * TTL info simultaneously. Returns `undefined` if the key is not present.
-     *
-     * Unlike {@link LRUCache#dump}, which is designed to be portable and survive
-     * serialization, the `start` value is always the current timestamp, and the
-     * `ttl` is a calculated remaining time to live (negative if expired).
-     *
-     * Always returns stale values, if their info is found in the cache, so be
-     * sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl})
-     * if relevant.
-     */
-    info(key) {
-        const i = this.#keyMap.get(key);
-        if (i === undefined)
-            return undefined;
-        const v = this.#valList[i];
-        /* c8 ignore start - this isn't tested for the info function,
-         * but it's the same logic as found in other places. */
-        const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-        if (value === undefined)
-            return undefined;
-        /* c8 ignore end */
-        const entry = { value };
-        if (this.#ttls && this.#starts) {
-            const ttl = this.#ttls[i];
-            const start = this.#starts[i];
-            if (ttl && start) {
-                const remain = ttl - (this.#perf.now() - start);
-                entry.ttl = remain;
-                entry.start = Date.now();
-            }
-        }
-        if (this.#sizes) {
-            entry.size = this.#sizes[i];
-        }
-        return entry;
-    }
-    /**
-     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
-     * passed to {@link LRUCache#load}.
-     *
-     * The `start` fields are calculated relative to a portable `Date.now()`
-     * timestamp, even if `performance.now()` is available.
-     *
-     * Stale entries are always included in the `dump`, even if
-     * {@link LRUCache.OptionsBase.allowStale} is false.
-     *
-     * Note: this returns an actual array, not a generator, so it can be more
-     * easily passed around.
-     */
-    dump() {
-        const arr = [];
-        for (const i of this.#indexes({ allowStale: true })) {
-            const key = this.#keyList[i];
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-            if (value === undefined || key === undefined)
-                continue;
-            const entry = { value };
-            if (this.#ttls && this.#starts) {
-                entry.ttl = this.#ttls[i];
-                // always dump the start relative to a portable timestamp
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = this.#perf.now() - this.#starts[i];
-                entry.start = Math.floor(Date.now() - age);
-            }
-            if (this.#sizes) {
-                entry.size = this.#sizes[i];
-            }
-            arr.unshift([key, entry]);
-        }
-        return arr;
-    }
-    /**
-     * Reset the cache and load in the items in entries in the order listed.
-     *
-     * The shape of the resulting cache may be different if the same options are
-     * not used in both caches.
-     *
-     * The `start` fields are assumed to be calculated relative to a portable
-     * `Date.now()` timestamp, even if `performance.now()` is available.
-     */
-    load(arr) {
-        this.clear();
-        for (const [key, entry] of arr) {
-            if (entry.start) {
-                // entry.start is a portable timestamp, but we may be using
-                // node's performance.now(), so calculate the offset, so that
-                // we get the intended remaining TTL, no matter how long it's
-                // been on ice.
-                //
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = Date.now() - entry.start;
-                entry.start = this.#perf.now() - age;
-            }
-            this.set(key, entry.value, entry);
-        }
-    }
-    /**
-     * Add a value to the cache.
-     *
-     * Note: if `undefined` is specified as a value, this is an alias for
-     * {@link LRUCache#delete}
-     *
-     * Fields on the {@link LRUCache.SetOptions} options param will override
-     * their corresponding values in the constructor options for the scope
-     * of this single `set()` operation.
-     *
-     * If `start` is provided, then that will set the effective start
-     * time for the TTL calculation. Note that this must be a previous
-     * value of `performance.now()` if supported, or a previous value of
-     * `Date.now()` if not.
-     *
-     * Options object may also include `size`, which will prevent
-     * calling the `sizeCalculation` function and just use the specified
-     * number if it is a positive integer, and `noDisposeOnSet` which
-     * will prevent calling a `dispose` function in the case of
-     * overwrites.
-     *
-     * If the `size` (or return value of `sizeCalculation`) for a given
-     * entry is greater than `maxEntrySize`, then the item will not be
-     * added to the cache.
-     *
-     * Will update the recency of the entry.
-     *
-     * If the value is `undefined`, then this is an alias for
-     * `cache.delete(key)`. `undefined` is never stored in the cache.
-     */
-    set(k, v, setOptions = {}) {
-        if (v === undefined) {
-            this.delete(k);
-            return this;
-        }
-        const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
-        let { noUpdateTTL = this.noUpdateTTL } = setOptions;
-        const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
-        // if the item doesn't fit, don't do anything
-        // NB: maxEntrySize set to maxSize by default
-        if (this.maxEntrySize && size > this.maxEntrySize) {
-            if (status) {
-                status.set = 'miss';
-                status.maxEntrySizeExceeded = true;
-            }
-            // have to delete, in case something is there already.
-            this.#delete(k, 'set');
-            return this;
-        }
-        let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
-        if (index === undefined) {
-            // addition
-            index = (this.#size === 0 ? this.#tail
-                : this.#free.length !== 0 ? this.#free.pop()
-                    : this.#size === this.#max ? this.#evict(false)
-                        : this.#size);
-            this.#keyList[index] = k;
-            this.#valList[index] = v;
-            this.#keyMap.set(k, index);
-            this.#next[this.#tail] = index;
-            this.#prev[index] = this.#tail;
-            this.#tail = index;
-            this.#size++;
-            this.#addItemSize(index, size, status);
-            if (status)
-                status.set = 'add';
-            noUpdateTTL = false;
-            if (this.#hasOnInsert) {
-                this.#onInsert?.(v, k, 'add');
-            }
-        }
-        else {
-            // update
-            this.#moveToTail(index);
-            const oldVal = this.#valList[index];
-            if (v !== oldVal) {
-                if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
-                    oldVal.__abortController.abort(new Error('replaced'));
-                    const { __staleWhileFetching: s } = oldVal;
-                    if (s !== undefined && !noDisposeOnSet) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(s, k, 'set');
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([s, k, 'set']);
-                        }
-                    }
-                }
-                else if (!noDisposeOnSet) {
-                    if (this.#hasDispose) {
-                        this.#dispose?.(oldVal, k, 'set');
-                    }
-                    if (this.#hasDisposeAfter) {
-                        this.#disposed?.push([oldVal, k, 'set']);
-                    }
-                }
-                this.#removeItemSize(index);
-                this.#addItemSize(index, size, status);
-                this.#valList[index] = v;
-                if (status) {
-                    status.set = 'replace';
-                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal) ?
-                        oldVal.__staleWhileFetching
-                        : oldVal;
-                    if (oldValue !== undefined)
-                        status.oldValue = oldValue;
-                }
-            }
-            else if (status) {
-                status.set = 'update';
-            }
-            if (this.#hasOnInsert) {
-                this.onInsert?.(v, k, v === oldVal ? 'update' : 'replace');
-            }
-        }
-        if (ttl !== 0 && !this.#ttls) {
-            this.#initializeTTLTracking();
-        }
-        if (this.#ttls) {
-            if (!noUpdateTTL) {
-                this.#setItemTTL(index, ttl, start);
-            }
-            if (status)
-                this.#statusTTL(status, index);
-        }
-        if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return this;
-    }
-    /**
-     * Evict the least recently used item, returning its value or
-     * `undefined` if cache is empty.
-     */
-    pop() {
-        try {
-            while (this.#size) {
-                const val = this.#valList[this.#head];
-                this.#evict(true);
-                if (this.#isBackgroundFetch(val)) {
-                    if (val.__staleWhileFetching) {
-                        return val.__staleWhileFetching;
-                    }
-                }
-                else if (val !== undefined) {
-                    return val;
-                }
-            }
-        }
-        finally {
-            if (this.#hasDisposeAfter && this.#disposed) {
-                const dt = this.#disposed;
-                let task;
-                while ((task = dt?.shift())) {
-                    this.#disposeAfter?.(...task);
-                }
-            }
-        }
-    }
-    #evict(free) {
-        const head = this.#head;
-        const k = this.#keyList[head];
-        const v = this.#valList[head];
-        if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
-            v.__abortController.abort(new Error('evicted'));
-        }
-        else if (this.#hasDispose || this.#hasDisposeAfter) {
-            if (this.#hasDispose) {
-                this.#dispose?.(v, k, 'evict');
-            }
-            if (this.#hasDisposeAfter) {
-                this.#disposed?.push([v, k, 'evict']);
-            }
-        }
-        this.#removeItemSize(head);
-        // if we aren't about to use the index, then null these out
-        if (free) {
-            this.#keyList[head] = undefined;
-            this.#valList[head] = undefined;
-            this.#free.push(head);
-        }
-        if (this.#size === 1) {
-            this.#head = this.#tail = 0;
-            this.#free.length = 0;
-        }
-        else {
-            this.#head = this.#next[head];
-        }
-        this.#keyMap.delete(k);
-        this.#size--;
-        return head;
-    }
-    /**
-     * Check if a key is in the cache, without updating the recency of use.
-     * Will return false if the item is stale, even though it is technically
-     * in the cache.
-     *
-     * Check if a key is in the cache, without updating the recency of
-     * use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set
-     * to `true` in either the options or the constructor.
-     *
-     * Will return `false` if the item is stale, even though it is technically in
-     * the cache. The difference can be determined (if it matters) by using a
-     * `status` argument, and inspecting the `has` field.
-     *
-     * Will not update item age unless
-     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
-     */
-    has(k, hasOptions = {}) {
-        const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v) &&
-                v.__staleWhileFetching === undefined) {
-                return false;
-            }
-            if (!this.#isStale(index)) {
-                if (updateAgeOnHas) {
-                    this.#updateItemAge(index);
-                }
-                if (status) {
-                    status.has = 'hit';
-                    this.#statusTTL(status, index);
-                }
-                return true;
-            }
-            else if (status) {
-                status.has = 'stale';
-                this.#statusTTL(status, index);
-            }
-        }
-        else if (status) {
-            status.has = 'miss';
-        }
-        return false;
-    }
-    /**
-     * Like {@link LRUCache#get} but doesn't update recency or delete stale
-     * items.
-     *
-     * Returns `undefined` if the item is stale, unless
-     * {@link LRUCache.OptionsBase.allowStale} is set.
-     */
-    peek(k, peekOptions = {}) {
-        const { allowStale = this.allowStale } = peekOptions;
-        const index = this.#keyMap.get(k);
-        if (index === undefined ||
-            (!allowStale && this.#isStale(index))) {
-            return;
-        }
-        const v = this.#valList[index];
-        // either stale and allowed, or forcing a refresh of non-stale value
-        return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-    }
-    #backgroundFetch(k, index, options, context) {
-        const v = index === undefined ? undefined : this.#valList[index];
-        if (this.#isBackgroundFetch(v)) {
-            return v;
-        }
-        const ac = new AC();
-        const { signal } = options;
-        // when/if our AC signals, then stop listening to theirs.
-        signal?.addEventListener('abort', () => ac.abort(signal.reason), {
-            signal: ac.signal,
-        });
-        const fetchOpts = {
-            signal: ac.signal,
-            options,
-            context,
-        };
-        const cb = (v, updateCache = false) => {
-            const { aborted } = ac.signal;
-            const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
-            if (options.status) {
-                if (aborted && !updateCache) {
-                    options.status.fetchAborted = true;
-                    options.status.fetchError = ac.signal.reason;
-                    if (ignoreAbort)
-                        options.status.fetchAbortIgnored = true;
-                }
-                else {
-                    options.status.fetchResolved = true;
-                }
-            }
-            if (aborted && !ignoreAbort && !updateCache) {
-                return fetchFail(ac.signal.reason);
-            }
-            // either we didn't abort, and are still here, or we did, and ignored
-            const bf = p;
-            if (this.#valList[index] === p) {
-                if (v === undefined) {
-                    if (bf.__staleWhileFetching !== undefined) {
-                        this.#valList[index] = bf.__staleWhileFetching;
-                    }
-                    else {
-                        this.#delete(k, 'fetch');
-                    }
-                }
-                else {
-                    if (options.status)
-                        options.status.fetchUpdated = true;
-                    this.set(k, v, fetchOpts.options);
-                }
-            }
-            return v;
-        };
-        const eb = (er) => {
-            if (options.status) {
-                options.status.fetchRejected = true;
-                options.status.fetchError = er;
-            }
-            return fetchFail(er);
-        };
-        const fetchFail = (er) => {
-            const { aborted } = ac.signal;
-            const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
-            const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
-            const noDelete = allowStale || options.noDeleteOnFetchRejection;
-            const bf = p;
-            if (this.#valList[index] === p) {
-                // if we allow stale on fetch rejections, then we need to ensure that
-                // the stale value is not removed from the cache when the fetch fails.
-                const del = !noDelete || bf.__staleWhileFetching === undefined;
-                if (del) {
-                    this.#delete(k, 'fetch');
-                }
-                else if (!allowStaleAborted) {
-                    // still replace the *promise* with the stale value,
-                    // since we are done with the promise at this point.
-                    // leave it untouched if we're still waiting for an
-                    // aborted background fetch that hasn't yet returned.
-                    this.#valList[index] = bf.__staleWhileFetching;
-                }
-            }
-            if (allowStale) {
-                if (options.status && bf.__staleWhileFetching !== undefined) {
-                    options.status.returnedStale = true;
-                }
-                return bf.__staleWhileFetching;
-            }
-            else if (bf.__returned === bf) {
-                throw er;
-            }
-        };
-        const pcall = (res, rej) => {
-            const fmp = this.#fetchMethod?.(k, v, fetchOpts);
-            if (fmp && fmp instanceof Promise) {
-                fmp.then(v => res(v === undefined ? undefined : v), rej);
-            }
-            // ignored, we go until we finish, regardless.
-            // defer check until we are actually aborting,
-            // so fetchMethod can override.
-            ac.signal.addEventListener('abort', () => {
-                if (!options.ignoreFetchAbort ||
-                    options.allowStaleOnFetchAbort) {
-                    res(undefined);
-                    // when it eventually resolves, update the cache.
-                    if (options.allowStaleOnFetchAbort) {
-                        res = v => cb(v, true);
-                    }
-                }
-            });
-        };
-        if (options.status)
-            options.status.fetchDispatched = true;
-        const p = new Promise(pcall).then(cb, eb);
-        const bf = Object.assign(p, {
-            __abortController: ac,
-            __staleWhileFetching: v,
-            __returned: undefined,
-        });
-        if (index === undefined) {
-            // internal, don't expose status.
-            this.set(k, bf, { ...fetchOpts.options, status: undefined });
-            index = this.#keyMap.get(k);
-        }
-        else {
-            this.#valList[index] = bf;
-        }
-        return bf;
-    }
-    #isBackgroundFetch(p) {
-        if (!this.#hasFetchMethod)
-            return false;
-        const b = p;
-        return (!!b &&
-            b instanceof Promise &&
-            b.hasOwnProperty('__staleWhileFetching') &&
-            b.__abortController instanceof AC);
-    }
-    async fetch(k, fetchOptions = {}) {
-        const { 
-        // get options
-        allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, 
-        // set options
-        ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, 
-        // fetch exclusive options
-        noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
-        if (!this.#hasFetchMethod) {
-            if (status)
-                status.fetch = 'get';
-            return this.get(k, {
-                allowStale,
-                updateAgeOnGet,
-                noDeleteOnStaleGet,
-                status,
-            });
-        }
-        const options = {
-            allowStale,
-            updateAgeOnGet,
-            noDeleteOnStaleGet,
-            ttl,
-            noDisposeOnSet,
-            size,
-            sizeCalculation,
-            noUpdateTTL,
-            noDeleteOnFetchRejection,
-            allowStaleOnFetchRejection,
-            allowStaleOnFetchAbort,
-            ignoreFetchAbort,
-            status,
-            signal,
-        };
-        let index = this.#keyMap.get(k);
-        if (index === undefined) {
-            if (status)
-                status.fetch = 'miss';
-            const p = this.#backgroundFetch(k, index, options, context);
-            return (p.__returned = p);
-        }
-        else {
-            // in cache, maybe already fetching
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                const stale = allowStale && v.__staleWhileFetching !== undefined;
-                if (status) {
-                    status.fetch = 'inflight';
-                    if (stale)
-                        status.returnedStale = true;
-                }
-                return stale ? v.__staleWhileFetching : (v.__returned = v);
-            }
-            // if we force a refresh, that means do NOT serve the cached value,
-            // unless we are already in the process of refreshing the cache.
-            const isStale = this.#isStale(index);
-            if (!forceRefresh && !isStale) {
-                if (status)
-                    status.fetch = 'hit';
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                if (status)
-                    this.#statusTTL(status, index);
-                return v;
-            }
-            // ok, it is stale or a forced refresh, and not already fetching.
-            // refresh the cache.
-            const p = this.#backgroundFetch(k, index, options, context);
-            const hasStale = p.__staleWhileFetching !== undefined;
-            const staleVal = hasStale && allowStale;
-            if (status) {
-                status.fetch = isStale ? 'stale' : 'refresh';
-                if (staleVal && isStale)
-                    status.returnedStale = true;
-            }
-            return staleVal ? p.__staleWhileFetching : (p.__returned = p);
-        }
-    }
-    async forceFetch(k, fetchOptions = {}) {
-        const v = await this.fetch(k, fetchOptions);
-        if (v === undefined)
-            throw new Error('fetch() returned undefined');
-        return v;
-    }
-    memo(k, memoOptions = {}) {
-        const memoMethod = this.#memoMethod;
-        if (!memoMethod) {
-            throw new Error('no memoMethod provided to constructor');
-        }
-        const { context, forceRefresh, ...options } = memoOptions;
-        const v = this.get(k, options);
-        if (!forceRefresh && v !== undefined)
-            return v;
-        const vv = memoMethod(k, v, {
-            options,
-            context,
-        });
-        this.set(k, vv, options);
-        return vv;
-    }
-    /**
-     * Return a value from the cache. Will update the recency of the cache
-     * entry found.
-     *
-     * If the key is not found, get() will return `undefined`.
-     */
-    get(k, getOptions = {}) {
-        const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const value = this.#valList[index];
-            const fetching = this.#isBackgroundFetch(value);
-            if (status)
-                this.#statusTTL(status, index);
-            if (this.#isStale(index)) {
-                if (status)
-                    status.get = 'stale';
-                // delete only if not an in-flight background fetch
-                if (!fetching) {
-                    if (!noDeleteOnStaleGet) {
-                        this.#delete(k, 'expire');
-                    }
-                    if (status && allowStale)
-                        status.returnedStale = true;
-                    return allowStale ? value : undefined;
-                }
-                else {
-                    if (status &&
-                        allowStale &&
-                        value.__staleWhileFetching !== undefined) {
-                        status.returnedStale = true;
-                    }
-                    return allowStale ? value.__staleWhileFetching : undefined;
-                }
-            }
-            else {
-                if (status)
-                    status.get = 'hit';
-                // if we're currently fetching it, we don't actually have it yet
-                // it's not stale, which means this isn't a staleWhileRefetching.
-                // If it's not stale, and fetching, AND has a __staleWhileFetching
-                // value, then that means the user fetched with {forceRefresh:true},
-                // so it's safe to return that value.
-                if (fetching) {
-                    return value.__staleWhileFetching;
-                }
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                return value;
-            }
-        }
-        else if (status) {
-            status.get = 'miss';
-        }
-    }
-    #connect(p, n) {
-        this.#prev[n] = p;
-        this.#next[p] = n;
-    }
-    #moveToTail(index) {
-        // if tail already, nothing to do
-        // if head, move head to next[index]
-        // else
-        //   move next[prev[index]] to next[index] (head has no prev)
-        //   move prev[next[index]] to prev[index]
-        // prev[index] = tail
-        // next[tail] = index
-        // tail = index
-        if (index !== this.#tail) {
-            if (index === this.#head) {
-                this.#head = this.#next[index];
-            }
-            else {
-                this.#connect(this.#prev[index], this.#next[index]);
-            }
-            this.#connect(this.#tail, index);
-            this.#tail = index;
-        }
-    }
-    /**
-     * Deletes a key out of the cache.
-     *
-     * Returns true if the key was deleted, false otherwise.
-     */
-    delete(k) {
-        return this.#delete(k, 'delete');
-    }
-    #delete(k, reason) {
-        let deleted = false;
-        if (this.#size !== 0) {
-            const index = this.#keyMap.get(k);
-            if (index !== undefined) {
-                deleted = true;
-                if (this.#size === 1) {
-                    this.#clear(reason);
-                }
-                else {
-                    this.#removeItemSize(index);
-                    const v = this.#valList[index];
-                    if (this.#isBackgroundFetch(v)) {
-                        v.__abortController.abort(new Error('deleted'));
-                    }
-                    else if (this.#hasDispose || this.#hasDisposeAfter) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(v, k, reason);
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([v, k, reason]);
-                        }
-                    }
-                    this.#keyMap.delete(k);
-                    this.#keyList[index] = undefined;
-                    this.#valList[index] = undefined;
-                    if (index === this.#tail) {
-                        this.#tail = this.#prev[index];
-                    }
-                    else if (index === this.#head) {
-                        this.#head = this.#next[index];
-                    }
-                    else {
-                        const pi = this.#prev[index];
-                        this.#next[pi] = this.#next[index];
-                        const ni = this.#next[index];
-                        this.#prev[ni] = this.#prev[index];
-                    }
-                    this.#size--;
-                    this.#free.push(index);
-                }
-            }
-        }
-        if (this.#hasDisposeAfter && this.#disposed?.length) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Clear the cache entirely, throwing away all values.
-     */
-    clear() {
-        return this.#clear('delete');
-    }
-    #clear(reason) {
-        for (const index of this.#rindexes({ allowStale: true })) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                v.__abortController.abort(new Error('deleted'));
-            }
-            else {
-                const k = this.#keyList[index];
-                if (this.#hasDispose) {
-                    this.#dispose?.(v, k, reason);
-                }
-                if (this.#hasDisposeAfter) {
-                    this.#disposed?.push([v, k, reason]);
-                }
-            }
-        }
-        this.#keyMap.clear();
-        this.#valList.fill(undefined);
-        this.#keyList.fill(undefined);
-        if (this.#ttls && this.#starts) {
-            this.#ttls.fill(0);
-            this.#starts.fill(0);
-        }
-        if (this.#sizes) {
-            this.#sizes.fill(0);
-        }
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free.length = 0;
-        this.#calculatedSize = 0;
-        this.#size = 0;
-        if (this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-    }
-}
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/lru-cache/dist/esm/index.min.js b/node_modules/pacote/node_modules/lru-cache/dist/esm/index.min.js
deleted file mode 100644
index 07dd8fc3c59d8..0000000000000
--- a/node_modules/pacote/node_modules/lru-cache/dist/esm/index.min.js
+++ /dev/null
@@ -1,2 +0,0 @@
-var M=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,x=new Set,R=typeof process=="object"&&process?process:{},I=(a,t,e,i)=>{typeof R.emitWarning=="function"?R.emitWarning(a,t,e,i):console.error(`[${e}] ${t}: ${a}`)},C=globalThis.AbortController,D=globalThis.AbortSignal;if(typeof C>"u"){D=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},C=class{constructor(){t()}signal=new D;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let a=R.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{a&&(a=!1,I("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var G=a=>!x.has(a),H=Symbol("type"),y=a=>a&&a===Math.floor(a)&&a>0&&isFinite(a),U=a=>y(a)?a<=Math.pow(2,8)?Uint8Array:a<=Math.pow(2,16)?Uint16Array:a<=Math.pow(2,32)?Uint32Array:a<=Number.MAX_SAFE_INTEGER?O:null:null,O=class extends Array{constructor(t){super(t),this.fill(0)}},W=class a{heap;length;static#l=!1;static create(t){let e=U(t);if(!e)return[];a.#l=!0;let i=new a(t,e);return a.#l=!1,i}constructor(t,e){if(!a.#l)throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},L=class a{#l;#c;#p;#v;#w;#D;#L;#S;get perf(){return this.#S}ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#_;#s;#i;#t;#a;#u;#o;#h;#m;#r;#b;#y;#d;#A;#z;#f;#x;static unsafeExposeInternals(t){return{starts:t.#y,ttls:t.#d,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#a,prev:t.#u,get head(){return t.#o},get tail(){return t.#h},free:t.#m,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#M(e,i,s,n),moveToTail:e=>t.#W(e),indexes:e=>t.#F(e),rindexes:e=>t.#T(e),isStale:e=>t.#g(e)}}get max(){return this.#l}get maxSize(){return this.#c}get calculatedSize(){return this.#_}get size(){return this.#n}get fetchMethod(){return this.#D}get memoMethod(){return this.#L}get dispose(){return this.#p}get onInsert(){return this.#v}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,onInsert:_,disposeAfter:f,noDisposeOnSet:c,noUpdateTTL:u,maxSize:A=0,maxEntrySize:d=0,sizeCalculation:m,fetchMethod:l,memoMethod:w,noDeleteOnFetchRejection:b,noDeleteOnStaleGet:p,allowStaleOnFetchRejection:S,allowStaleOnFetchAbort:z,ignoreFetchAbort:F,perf:v}=t;if(v!==void 0&&typeof v?.now!="function")throw new TypeError("perf option must have a now() method if specified");if(this.#S=v??M,e!==0&&!y(e))throw new TypeError("max option must be a nonnegative integer");let T=e?U(e):Array;if(!T)throw new Error("invalid max value: "+e);if(this.#l=e,this.#c=A,this.maxEntrySize=d||this.#c,this.sizeCalculation=m,this.sizeCalculation){if(!this.#c&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#L=w,l!==void 0&&typeof l!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#D=l,this.#z=!!l,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#a=new T(e),this.#u=new T(e),this.#o=0,this.#h=0,this.#m=W.create(e),this.#n=0,this.#_=0,typeof g=="function"&&(this.#p=g),typeof _=="function"&&(this.#v=_),typeof f=="function"?(this.#w=f,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#A=!!this.#p,this.#x=!!this.#v,this.#f=!!this.#w,this.noDisposeOnSet=!!c,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!b,this.allowStaleOnFetchRejection=!!S,this.allowStaleOnFetchAbort=!!z,this.ignoreFetchAbort=!!F,this.maxEntrySize!==0){if(this.#c!==0&&!y(this.#c))throw new TypeError("maxSize must be a positive integer if specified");if(!y(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#V()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!p,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=y(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!y(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#G()}if(this.#l===0&&this.ttl===0&&this.#c===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#l&&!this.#c){let E="LRU_CACHE_UNBOUNDED";G(E)&&(x.add(E),I("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",E,a))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#G(){let t=new O(this.#l),e=new O(this.#l);this.#d=t,this.#y=e,this.#j=(n,h,o=this.#S.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#g(n)&&this.#E(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#C=n=>{e[n]=t[n]!==0?this.#S.now():0},this.#O=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=this.#S.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#g=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#C=()=>{};#O=()=>{};#j=()=>{};#g=()=>!1;#V(){let t=new O(this.#l);this.#_=0,this.#b=t,this.#R=e=>{this.#_-=t[e],t[e]=0},this.#N=(e,i,s,n)=>{if(this.#e(i))return 0;if(!y(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!y(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#I=(e,i,s)=>{if(t[e]=i,this.#c){let n=this.#c-t[e];for(;this.#_>n;)this.#U(!0)}this.#_+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#_)}}#R=t=>{};#I=(t,e,i)=>{};#N=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#o));)e=this.#u[e]}*#T({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#h));)e=this.#a[e]}#P(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#T())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#T()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#T())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#T()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#T({allowStale:!0}))this.#g(e)&&(this.#E(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#d&&this.#y){let h=this.#d[e],o=this.#y[e];if(h&&o){let r=h-(this.#S.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#F({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#d&&this.#y){h.ttl=this.#d[e];let o=this.#S.now()-this.#y[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=this.#S.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,_=this.#N(t,e,i.size||0,o);if(this.maxEntrySize&&_>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#E(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#m.length!==0?this.#m.pop():this.#n===this.#l?this.#U(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#a[this.#h]=f,this.#u[f]=this.#h,this.#h=f,this.#n++,this.#I(f,_,r),r&&(r.set="add"),g=!1,this.#x&&this.#v?.(e,t,"add");else{this.#W(f);let c=this.#t[f];if(e!==c){if(this.#z&&this.#e(c)){c.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:u}=c;u!==void 0&&!h&&(this.#A&&this.#p?.(u,t,"set"),this.#f&&this.#r?.push([u,t,"set"]))}else h||(this.#A&&this.#p?.(c,t,"set"),this.#f&&this.#r?.push([c,t,"set"]));if(this.#R(f),this.#I(f,_,r),this.#t[f]=e,r){r.set="replace";let u=c&&this.#e(c)?c.__staleWhileFetching:c;u!==void 0&&(r.oldValue=u)}}else r&&(r.set="update");this.#x&&this.onInsert?.(e,t,e===c?"update":"replace")}if(s!==0&&!this.#d&&this.#G(),this.#d&&(g||this.#j(f,s,n),r&&this.#O(r,f)),!h&&this.#f&&this.#r){let c=this.#r,u;for(;u=c?.shift();)this.#w?.(...u)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#U(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#f&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#U(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#z&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(s,i,"evict"),this.#f&&this.#r?.push([s,i,"evict"])),this.#R(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#m.push(e)),this.#n===1?(this.#o=this.#h=0,this.#m.length=0):this.#o=this.#a[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#g(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#C(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#g(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#M(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new C,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,m=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!m?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!m)return f(h.signal.reason);let b=u;return this.#t[e]===u&&(d===void 0?b.__staleWhileFetching!==void 0?this.#t[e]=b.__staleWhileFetching:this.#E(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},_=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:m}=h.signal,l=m&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=u;if(this.#t[e]===u&&(!b||p.__staleWhileFetching===void 0?this.#E(t,"fetch"):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},c=(d,m)=>{let l=this.#D?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),m),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let u=new Promise(c).then(g,_),A=Object.assign(u,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,A,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=A,A}#e(t){if(!this.#z)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof C}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:_=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:c=this.allowStaleOnFetchRejection,ignoreFetchAbort:u=this.ignoreFetchAbort,allowStaleOnFetchAbort:A=this.allowStaleOnFetchAbort,context:d,forceRefresh:m=!1,status:l,signal:w}=e;if(!this.#z)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:_,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:c,allowStaleOnFetchAbort:A,ignoreFetchAbort:u,status:l,signal:w},p=this.#s.get(t);if(p===void 0){l&&(l.fetch="miss");let S=this.#M(t,p,b,d);return S.__returned=S}else{let S=this.#t[p];if(this.#e(S)){let E=i&&S.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",E&&(l.returnedStale=!0)),E?S.__staleWhileFetching:S.__returned=S}let z=this.#g(p);if(!m&&!z)return l&&(l.fetch="hit"),this.#W(p),s&&this.#C(p),l&&this.#O(l,p),S;let F=this.#M(t,p,b,d),T=F.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=z?"stale":"refresh",T&&z&&(l.returnedStale=!0)),T?F.__staleWhileFetching:F.__returned=F}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#L;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#O(h,o),this.#g(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#E(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#W(o),s&&this.#C(o),r))}else h&&(h.get="miss")}#H(t,e){this.#u[e]=t,this.#a[t]=e}#W(t){t!==this.#h&&(t===this.#o?this.#o=this.#a[t]:this.#H(this.#u[t],this.#a[t]),this.#H(this.#h,t),this.#h=t)}delete(t){return this.#E(t,"delete")}#E(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#k(e);else{this.#R(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(n,t,e),this.#f&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#u[s];else if(s===this.#o)this.#o=this.#a[s];else{let h=this.#u[s];this.#a[h]=this.#a[s];let o=this.#a[s];this.#u[o]=this.#u[s]}this.#n--,this.#m.push(s)}}if(this.#f&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#k("delete")}#k(t){for(let e of this.#T({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#A&&this.#p?.(i,s,t),this.#f&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#d&&this.#y&&(this.#d.fill(0),this.#y.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#m.length=0,this.#_=0,this.#n=0,this.#f&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};export{L as LRUCache};
-//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/pacote/node_modules/lru-cache/dist/esm/package.json b/node_modules/pacote/node_modules/lru-cache/dist/esm/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/pacote/node_modules/lru-cache/dist/esm/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/pacote/node_modules/lru-cache/package.json b/node_modules/pacote/node_modules/lru-cache/package.json
deleted file mode 100644
index 4953bdf4a7a35..0000000000000
--- a/node_modules/pacote/node_modules/lru-cache/package.json
+++ /dev/null
@@ -1,113 +0,0 @@
-{
-  "name": "lru-cache",
-  "description": "A cache object that deletes the least-recently-used items.",
-  "version": "11.2.1",
-  "author": "Isaac Z. Schlueter ",
-  "keywords": [
-    "mru",
-    "lru",
-    "cache"
-  ],
-  "sideEffects": false,
-  "scripts": {
-    "build": "npm run prepare",
-    "prepare": "tshy && bash fixup.sh",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "test": "tap",
-    "snap": "tap",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "format": "prettier --write .",
-    "typedoc": "typedoc --tsconfig ./.tshy/esm.json ./src/*.ts",
-    "benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh",
-    "prebenchmark": "npm run prepare",
-    "benchmark": "make -C benchmark",
-    "preprofile": "npm run prepare",
-    "profile": "make -C benchmark profile"
-  },
-  "main": "./dist/commonjs/index.js",
-  "types": "./dist/commonjs/index.d.ts",
-  "tshy": {
-    "exports": {
-      ".": "./src/index.ts",
-      "./min": {
-        "import": {
-          "types": "./dist/esm/index.d.ts",
-          "default": "./dist/esm/index.min.js"
-        },
-        "require": {
-          "types": "./dist/commonjs/index.d.ts",
-          "default": "./dist/commonjs/index.min.js"
-        }
-      }
-    }
-  },
-  "repository": {
-    "type": "git",
-    "url": "git://github.com/isaacs/node-lru-cache.git"
-  },
-  "devDependencies": {
-    "@types/node": "^24.3.0",
-    "benchmark": "^2.1.4",
-    "esbuild": "^0.25.9",
-    "marked": "^4.2.12",
-    "mkdirp": "^3.0.1",
-    "prettier": "^3.6.2",
-    "tap": "^21.1.0",
-    "tshy": "^3.0.2",
-    "typedoc": "^0.28.12"
-  },
-  "license": "ISC",
-  "files": [
-    "dist"
-  ],
-  "engines": {
-    "node": "20 || >=22"
-  },
-  "prettier": {
-    "experimentalTernaries": true,
-    "semi": false,
-    "printWidth": 70,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "tap": {
-    "node-arg": [
-      "--expose-gc"
-    ],
-    "plugin": [
-      "@tapjs/clock"
-    ]
-  },
-  "exports": {
-    ".": {
-      "import": {
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.js"
-      },
-      "require": {
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.js"
-      }
-    },
-    "./min": {
-      "import": {
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.min.js"
-      },
-      "require": {
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.min.js"
-      }
-    }
-  },
-  "type": "module",
-  "module": "./dist/esm/index.js"
-}
diff --git a/package-lock.json b/package-lock.json
index 384e05815bbb6..926637ac7e9c2 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -104,7 +104,7 @@
         "fs-minipass": "^3.0.3",
         "glob": "^10.4.5",
         "graceful-fs": "^4.2.11",
-        "hosted-git-info": "^8.1.0",
+        "hosted-git-info": "^9.0.0",
         "ini": "^5.0.0",
         "init-package-json": "^8.2.2",
         "is-cidr": "^5.1.1",
@@ -3661,19 +3661,6 @@
         "url": "https://github.com/sponsors/isaacs"
       }
     },
-    "node_modules/@npmcli/package-json/node_modules/hosted-git-info": {
-      "version": "9.0.0",
-      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-9.0.0.tgz",
-      "integrity": "sha512-gEf705MZLrDPkbbhi8PnoO4ZwYgKoNL+ISZ3AjZMht2r3N5tuTwncyDi6Fv2/qDnMmZxgs0yI8WDOyR8q3G+SQ==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "lru-cache": "^11.1.0"
-      },
-      "engines": {
-        "node": "^20.17.0 || >=22.9.0"
-      }
-    },
     "node_modules/@npmcli/package-json/node_modules/jackspeak": {
       "version": "4.1.1",
       "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-4.1.1.tgz",
@@ -3973,6 +3960,19 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist/node_modules/hosted-git-info": {
+      "version": "7.0.2",
+      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.2.tgz",
+      "integrity": "sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "lru-cache": "^10.0.1"
+      },
+      "engines": {
+        "node": "^16.14.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/fs": {
       "version": "3.1.1",
       "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-3.1.1.tgz",
@@ -4139,19 +4139,6 @@
         "node": "^18.17.0 || >=20.5.0"
       }
     },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/package-json/node_modules/hosted-git-info": {
-      "version": "8.1.0",
-      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-8.1.0.tgz",
-      "integrity": "sha512-Rw/B2DNQaPBICNXEm8balFz9a6WpZrkCGpcWFpy7nCj+NyhSdqXipmfvtmWt9xGfp0wZnBxB+iVpLmQMYt47Tw==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "lru-cache": "^10.0.1"
-      },
-      "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/package-json/node_modules/ini": {
       "version": "5.0.0",
       "resolved": "https://registry.npmjs.org/ini/-/ini-5.0.0.tgz",
@@ -4336,6 +4323,19 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/run-script/node_modules/hosted-git-info": {
+      "version": "7.0.2",
+      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.2.tgz",
+      "integrity": "sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "lru-cache": "^10.0.1"
+      },
+      "engines": {
+        "node": "^16.14.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/@sigstore/bundle": {
       "version": "2.3.2",
       "resolved": "https://registry.npmjs.org/@sigstore/bundle/-/bundle-2.3.2.tgz",
@@ -4491,16 +4491,16 @@
       }
     },
     "node_modules/@npmcli/template-oss/node_modules/hosted-git-info": {
-      "version": "7.0.2",
-      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.2.tgz",
-      "integrity": "sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==",
+      "version": "8.1.0",
+      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-8.1.0.tgz",
+      "integrity": "sha512-Rw/B2DNQaPBICNXEm8balFz9a6WpZrkCGpcWFpy7nCj+NyhSdqXipmfvtmWt9xGfp0wZnBxB+iVpLmQMYt47Tw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
         "lru-cache": "^10.0.1"
       },
       "engines": {
-        "node": "^16.14.0 || >=18.0.0"
+        "node": "^18.17.0 || >=20.5.0"
       }
     },
     "node_modules/@npmcli/template-oss/node_modules/ignore-walk": {
@@ -4644,6 +4644,19 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/normalize-package-data/node_modules/hosted-git-info": {
+      "version": "7.0.2",
+      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.2.tgz",
+      "integrity": "sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "lru-cache": "^10.0.1"
+      },
+      "engines": {
+        "node": "^16.14.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/npm-bundled": {
       "version": "3.0.1",
       "resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-3.0.1.tgz",
@@ -4696,6 +4709,19 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/npm-package-arg/node_modules/hosted-git-info": {
+      "version": "7.0.2",
+      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.2.tgz",
+      "integrity": "sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "lru-cache": "^10.0.1"
+      },
+      "engines": {
+        "node": "^16.14.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/npm-packlist": {
       "version": "8.0.2",
       "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-8.0.2.tgz",
@@ -4812,6 +4838,19 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/pacote/node_modules/hosted-git-info": {
+      "version": "7.0.2",
+      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.2.tgz",
+      "integrity": "sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "lru-cache": "^10.0.1"
+      },
+      "engines": {
+        "node": "^16.14.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/parse-conflict-json": {
       "version": "3.0.1",
       "resolved": "https://registry.npmjs.org/parse-conflict-json/-/parse-conflict-json-3.0.1.tgz",
@@ -9557,16 +9596,26 @@
       }
     },
     "node_modules/hosted-git-info": {
-      "version": "8.1.0",
-      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-8.1.0.tgz",
-      "integrity": "sha512-Rw/B2DNQaPBICNXEm8balFz9a6WpZrkCGpcWFpy7nCj+NyhSdqXipmfvtmWt9xGfp0wZnBxB+iVpLmQMYt47Tw==",
+      "version": "9.0.0",
+      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-9.0.0.tgz",
+      "integrity": "sha512-gEf705MZLrDPkbbhi8PnoO4ZwYgKoNL+ISZ3AjZMht2r3N5tuTwncyDi6Fv2/qDnMmZxgs0yI8WDOyR8q3G+SQ==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
-        "lru-cache": "^10.0.1"
+        "lru-cache": "^11.1.0"
       },
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/hosted-git-info/node_modules/lru-cache": {
+      "version": "11.2.1",
+      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.1.tgz",
+      "integrity": "sha512-r8LA6i4LP4EeWOhqBaZZjDWwehd1xUJPCJd9Sv300H0ZmcUER4+JPh7bqqZeqs1o5pgtgvXm+d9UGrB5zZGDiQ==",
+      "inBundle": true,
+      "license": "ISC",
+      "engines": {
+        "node": "20 || >=22"
       }
     },
     "node_modules/html-encoding-sniffer": {
@@ -9767,29 +9816,6 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/init-package-json/node_modules/hosted-git-info": {
-      "version": "9.0.0",
-      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-9.0.0.tgz",
-      "integrity": "sha512-gEf705MZLrDPkbbhi8PnoO4ZwYgKoNL+ISZ3AjZMht2r3N5tuTwncyDi6Fv2/qDnMmZxgs0yI8WDOyR8q3G+SQ==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "lru-cache": "^11.1.0"
-      },
-      "engines": {
-        "node": "^20.17.0 || >=22.9.0"
-      }
-    },
-    "node_modules/init-package-json/node_modules/lru-cache": {
-      "version": "11.2.1",
-      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.1.tgz",
-      "integrity": "sha512-r8LA6i4LP4EeWOhqBaZZjDWwehd1xUJPCJd9Sv300H0ZmcUER4+JPh7bqqZeqs1o5pgtgvXm+d9UGrB5zZGDiQ==",
-      "inBundle": true,
-      "license": "ISC",
-      "engines": {
-        "node": "20 || >=22"
-      }
-    },
     "node_modules/init-package-json/node_modules/npm-package-arg": {
       "version": "13.0.0",
       "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-13.0.0.tgz",
@@ -12737,6 +12763,19 @@
         "node": "^18.17.0 || >=20.5.0"
       }
     },
+    "node_modules/normalize-package-data/node_modules/hosted-git-info": {
+      "version": "8.1.0",
+      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-8.1.0.tgz",
+      "integrity": "sha512-Rw/B2DNQaPBICNXEm8balFz9a6WpZrkCGpcWFpy7nCj+NyhSdqXipmfvtmWt9xGfp0wZnBxB+iVpLmQMYt47Tw==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "lru-cache": "^10.0.1"
+      },
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
     "node_modules/normalize-path": {
       "version": "3.0.0",
       "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz",
@@ -12809,6 +12848,19 @@
         "node": "^18.17.0 || >=20.5.0"
       }
     },
+    "node_modules/npm-package-arg/node_modules/hosted-git-info": {
+      "version": "8.1.0",
+      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-8.1.0.tgz",
+      "integrity": "sha512-Rw/B2DNQaPBICNXEm8balFz9a6WpZrkCGpcWFpy7nCj+NyhSdqXipmfvtmWt9xGfp0wZnBxB+iVpLmQMYt47Tw==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "lru-cache": "^10.0.1"
+      },
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
     "node_modules/npm-packlist": {
       "version": "10.0.1",
       "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-10.0.1.tgz",
@@ -12901,29 +12953,6 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/npm-registry-fetch/node_modules/hosted-git-info": {
-      "version": "9.0.0",
-      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-9.0.0.tgz",
-      "integrity": "sha512-gEf705MZLrDPkbbhi8PnoO4ZwYgKoNL+ISZ3AjZMht2r3N5tuTwncyDi6Fv2/qDnMmZxgs0yI8WDOyR8q3G+SQ==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "lru-cache": "^11.1.0"
-      },
-      "engines": {
-        "node": "^20.17.0 || >=22.9.0"
-      }
-    },
-    "node_modules/npm-registry-fetch/node_modules/lru-cache": {
-      "version": "11.2.1",
-      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.1.tgz",
-      "integrity": "sha512-r8LA6i4LP4EeWOhqBaZZjDWwehd1xUJPCJd9Sv300H0ZmcUER4+JPh7bqqZeqs1o5pgtgvXm+d9UGrB5zZGDiQ==",
-      "inBundle": true,
-      "license": "ISC",
-      "engines": {
-        "node": "20 || >=22"
-      }
-    },
     "node_modules/npm-registry-fetch/node_modules/minizlib": {
       "version": "3.0.2",
       "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.2.tgz",
@@ -13585,29 +13614,6 @@
         "node": ">=18"
       }
     },
-    "node_modules/pacote/node_modules/hosted-git-info": {
-      "version": "9.0.0",
-      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-9.0.0.tgz",
-      "integrity": "sha512-gEf705MZLrDPkbbhi8PnoO4ZwYgKoNL+ISZ3AjZMht2r3N5tuTwncyDi6Fv2/qDnMmZxgs0yI8WDOyR8q3G+SQ==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "lru-cache": "^11.1.0"
-      },
-      "engines": {
-        "node": "^20.17.0 || >=22.9.0"
-      }
-    },
-    "node_modules/pacote/node_modules/lru-cache": {
-      "version": "11.2.1",
-      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.1.tgz",
-      "integrity": "sha512-r8LA6i4LP4EeWOhqBaZZjDWwehd1xUJPCJd9Sv300H0ZmcUER4+JPh7bqqZeqs1o5pgtgvXm+d9UGrB5zZGDiQ==",
-      "inBundle": true,
-      "license": "ISC",
-      "engines": {
-        "node": "20 || >=22"
-      }
-    },
     "node_modules/pacote/node_modules/minizlib": {
       "version": "3.0.2",
       "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.2.tgz",
@@ -19494,7 +19500,7 @@
         "bin-links": "^5.0.0",
         "cacache": "^20.0.1",
         "common-ancestor-path": "^1.0.1",
-        "hosted-git-info": "^8.0.0",
+        "hosted-git-info": "^9.0.0",
         "json-stringify-nice": "^1.1.4",
         "lru-cache": "^10.2.2",
         "minimatch": "^9.0.4",
diff --git a/package.json b/package.json
index 6f41dbbbd3b99..35e5313ee9704 100644
--- a/package.json
+++ b/package.json
@@ -71,7 +71,7 @@
     "fs-minipass": "^3.0.3",
     "glob": "^10.4.5",
     "graceful-fs": "^4.2.11",
-    "hosted-git-info": "^8.1.0",
+    "hosted-git-info": "^9.0.0",
     "ini": "^5.0.0",
     "init-package-json": "^8.2.2",
     "is-cidr": "^5.1.1",
diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json
index 70e8775747b1c..40a08b66a7ff1 100644
--- a/workspaces/arborist/package.json
+++ b/workspaces/arborist/package.json
@@ -17,7 +17,7 @@
     "bin-links": "^5.0.0",
     "cacache": "^20.0.1",
     "common-ancestor-path": "^1.0.1",
-    "hosted-git-info": "^8.0.0",
+    "hosted-git-info": "^9.0.0",
     "json-stringify-nice": "^1.1.4",
     "lru-cache": "^10.2.2",
     "minimatch": "^9.0.4",

From 0082083fe4f52d3ef40241e9d8b991f7ed4a60dc Mon Sep 17 00:00:00 2001
From: Gar 
Date: Wed, 17 Sep 2025 10:52:44 -0700
Subject: [PATCH 18/63] deps: normalize-package-data@8.0.0

---
 node_modules/.gitignore                       |   3 -
 .../node_modules/hosted-git-info/LICENSE      |  13 -
 .../hosted-git-info/lib/from-url.js           | 122 ---------
 .../node_modules/hosted-git-info/lib/hosts.js | 231 ------------------
 .../node_modules/hosted-git-info/lib/index.js | 227 -----------------
 .../hosted-git-info/lib/parse-url.js          |  78 ------
 .../node_modules/hosted-git-info/package.json |  61 -----
 .../normalize-package-data/package.json       |   6 +-
 package-lock.json                             |  25 +-
 package.json                                  |   2 +-
 10 files changed, 10 insertions(+), 758 deletions(-)
 delete mode 100644 node_modules/normalize-package-data/node_modules/hosted-git-info/LICENSE
 delete mode 100644 node_modules/normalize-package-data/node_modules/hosted-git-info/lib/from-url.js
 delete mode 100644 node_modules/normalize-package-data/node_modules/hosted-git-info/lib/hosts.js
 delete mode 100644 node_modules/normalize-package-data/node_modules/hosted-git-info/lib/index.js
 delete mode 100644 node_modules/normalize-package-data/node_modules/hosted-git-info/lib/parse-url.js
 delete mode 100644 node_modules/normalize-package-data/node_modules/hosted-git-info/package.json

diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index a525ff73d66e0..21f380a400c8c 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -183,9 +183,6 @@
 !/node-gyp/node_modules/yallist
 !/nopt
 !/normalize-package-data
-!/normalize-package-data/node_modules/
-/normalize-package-data/node_modules/*
-!/normalize-package-data/node_modules/hosted-git-info
 !/npm-audit-report
 !/npm-bundled
 !/npm-install-checks
diff --git a/node_modules/normalize-package-data/node_modules/hosted-git-info/LICENSE b/node_modules/normalize-package-data/node_modules/hosted-git-info/LICENSE
deleted file mode 100644
index 45055763dc838..0000000000000
--- a/node_modules/normalize-package-data/node_modules/hosted-git-info/LICENSE
+++ /dev/null
@@ -1,13 +0,0 @@
-Copyright (c) 2015, Rebecca Turner
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
-REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
-FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
-INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
-LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
-OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
-PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/from-url.js b/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/from-url.js
deleted file mode 100644
index efc1247d59d12..0000000000000
--- a/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/from-url.js
+++ /dev/null
@@ -1,122 +0,0 @@
-'use strict'
-
-const parseUrl = require('./parse-url')
-
-// look for github shorthand inputs, such as npm/cli
-const isGitHubShorthand = (arg) => {
-  // it cannot contain whitespace before the first #
-  // it cannot start with a / because that's probably an absolute file path
-  // but it must include a slash since repos are username/repository
-  // it cannot start with a . because that's probably a relative file path
-  // it cannot start with an @ because that's a scoped package if it passes the other tests
-  // it cannot contain a : before a # because that tells us that there's a protocol
-  // a second / may not exist before a #
-  const firstHash = arg.indexOf('#')
-  const firstSlash = arg.indexOf('/')
-  const secondSlash = arg.indexOf('/', firstSlash + 1)
-  const firstColon = arg.indexOf(':')
-  const firstSpace = /\s/.exec(arg)
-  const firstAt = arg.indexOf('@')
-
-  const spaceOnlyAfterHash = !firstSpace || (firstHash > -1 && firstSpace.index > firstHash)
-  const atOnlyAfterHash = firstAt === -1 || (firstHash > -1 && firstAt > firstHash)
-  const colonOnlyAfterHash = firstColon === -1 || (firstHash > -1 && firstColon > firstHash)
-  const secondSlashOnlyAfterHash = secondSlash === -1 || (firstHash > -1 && secondSlash > firstHash)
-  const hasSlash = firstSlash > 0
-  // if a # is found, what we really want to know is that the character
-  // immediately before # is not a /
-  const doesNotEndWithSlash = firstHash > -1 ? arg[firstHash - 1] !== '/' : !arg.endsWith('/')
-  const doesNotStartWithDot = !arg.startsWith('.')
-
-  return spaceOnlyAfterHash && hasSlash && doesNotEndWithSlash &&
-    doesNotStartWithDot && atOnlyAfterHash && colonOnlyAfterHash &&
-    secondSlashOnlyAfterHash
-}
-
-module.exports = (giturl, opts, { gitHosts, protocols }) => {
-  if (!giturl) {
-    return
-  }
-
-  const correctedUrl = isGitHubShorthand(giturl) ? `github:${giturl}` : giturl
-  const parsed = parseUrl(correctedUrl, protocols)
-  if (!parsed) {
-    return
-  }
-
-  const gitHostShortcut = gitHosts.byShortcut[parsed.protocol]
-  const gitHostDomain = gitHosts.byDomain[parsed.hostname.startsWith('www.')
-    ? parsed.hostname.slice(4)
-    : parsed.hostname]
-  const gitHostName = gitHostShortcut || gitHostDomain
-  if (!gitHostName) {
-    return
-  }
-
-  const gitHostInfo = gitHosts[gitHostShortcut || gitHostDomain]
-  let auth = null
-  if (protocols[parsed.protocol]?.auth && (parsed.username || parsed.password)) {
-    auth = `${parsed.username}${parsed.password ? ':' + parsed.password : ''}`
-  }
-
-  let committish = null
-  let user = null
-  let project = null
-  let defaultRepresentation = null
-
-  try {
-    if (gitHostShortcut) {
-      let pathname = parsed.pathname.startsWith('/') ? parsed.pathname.slice(1) : parsed.pathname
-      const firstAt = pathname.indexOf('@')
-      // we ignore auth for shortcuts, so just trim it out
-      if (firstAt > -1) {
-        pathname = pathname.slice(firstAt + 1)
-      }
-
-      const lastSlash = pathname.lastIndexOf('/')
-      if (lastSlash > -1) {
-        user = decodeURIComponent(pathname.slice(0, lastSlash))
-        // we want nulls only, never empty strings
-        if (!user) {
-          user = null
-        }
-        project = decodeURIComponent(pathname.slice(lastSlash + 1))
-      } else {
-        project = decodeURIComponent(pathname)
-      }
-
-      if (project.endsWith('.git')) {
-        project = project.slice(0, -4)
-      }
-
-      if (parsed.hash) {
-        committish = decodeURIComponent(parsed.hash.slice(1))
-      }
-
-      defaultRepresentation = 'shortcut'
-    } else {
-      if (!gitHostInfo.protocols.includes(parsed.protocol)) {
-        return
-      }
-
-      const segments = gitHostInfo.extract(parsed)
-      if (!segments) {
-        return
-      }
-
-      user = segments.user && decodeURIComponent(segments.user)
-      project = decodeURIComponent(segments.project)
-      committish = decodeURIComponent(segments.committish)
-      defaultRepresentation = protocols[parsed.protocol]?.name || parsed.protocol.slice(0, -1)
-    }
-  } catch (err) {
-    /* istanbul ignore else */
-    if (err instanceof URIError) {
-      return
-    } else {
-      throw err
-    }
-  }
-
-  return [gitHostName, user, auth, project, committish, defaultRepresentation, opts]
-}
diff --git a/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/hosts.js b/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/hosts.js
deleted file mode 100644
index 2a88e95927772..0000000000000
--- a/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/hosts.js
+++ /dev/null
@@ -1,231 +0,0 @@
-/* eslint-disable max-len */
-
-'use strict'
-
-const maybeJoin = (...args) => args.every(arg => arg) ? args.join('') : ''
-const maybeEncode = (arg) => arg ? encodeURIComponent(arg) : ''
-const formatHashFragment = (f) => f.toLowerCase()
-  .replace(/^\W+/g, '') // strip leading non-characters
-  .replace(/(?
-    `git@${domain}:${user}/${project}.git${maybeJoin('#', committish)}`,
-  sshurltemplate: ({ domain, user, project, committish }) =>
-    `git+ssh://git@${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  edittemplate: ({ domain, user, project, committish, editpath, path }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', editpath, '/', maybeEncode(committish || 'HEAD'), '/', path)}`,
-  browsetemplate: ({ domain, user, project, committish, treepath }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}`,
-  browsetreetemplate: ({ domain, user, project, committish, treepath, path, fragment, hashformat }) =>
-    `https://${domain}/${user}/${project}/${treepath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`,
-  browseblobtemplate: ({ domain, user, project, committish, blobpath, path, fragment, hashformat }) =>
-    `https://${domain}/${user}/${project}/${blobpath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`,
-  docstemplate: ({ domain, user, project, treepath, committish }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}#readme`,
-  httpstemplate: ({ auth, domain, user, project, committish }) =>
-    `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  filetemplate: ({ domain, user, project, committish, path }) =>
-    `https://${domain}/${user}/${project}/raw/${maybeEncode(committish || 'HEAD')}/${path}`,
-  shortcuttemplate: ({ type, user, project, committish }) =>
-    `${type}:${user}/${project}${maybeJoin('#', committish)}`,
-  pathtemplate: ({ user, project, committish }) =>
-    `${user}/${project}${maybeJoin('#', committish)}`,
-  bugstemplate: ({ domain, user, project }) =>
-    `https://${domain}/${user}/${project}/issues`,
-  hashformat: formatHashFragment,
-}
-
-const hosts = {}
-hosts.github = {
-  // First two are insecure and generally shouldn't be used any more, but
-  // they are still supported.
-  protocols: ['git:', 'http:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'],
-  domain: 'github.com',
-  treepath: 'tree',
-  blobpath: 'blob',
-  editpath: 'edit',
-  filetemplate: ({ auth, user, project, committish, path }) =>
-    `https://${maybeJoin(auth, '@')}raw.githubusercontent.com/${user}/${project}/${maybeEncode(committish || 'HEAD')}/${path}`,
-  gittemplate: ({ auth, domain, user, project, committish }) =>
-    `git://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  tarballtemplate: ({ domain, user, project, committish }) =>
-    `https://codeload.${domain}/${user}/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`,
-  extract: (url) => {
-    let [, user, project, type, committish] = url.pathname.split('/', 5)
-    if (type && type !== 'tree') {
-      return
-    }
-
-    if (!type) {
-      committish = url.hash.slice(1)
-    }
-
-    if (project && project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    if (!user || !project) {
-      return
-    }
-
-    return { user, project, committish }
-  },
-}
-
-hosts.bitbucket = {
-  protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'],
-  domain: 'bitbucket.org',
-  treepath: 'src',
-  blobpath: 'src',
-  editpath: '?mode=edit',
-  edittemplate: ({ domain, user, project, committish, treepath, path, editpath }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish || 'HEAD'), '/', path, editpath)}`,
-  tarballtemplate: ({ domain, user, project, committish }) =>
-    `https://${domain}/${user}/${project}/get/${maybeEncode(committish || 'HEAD')}.tar.gz`,
-  extract: (url) => {
-    let [, user, project, aux] = url.pathname.split('/', 4)
-    if (['get'].includes(aux)) {
-      return
-    }
-
-    if (project && project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    if (!user || !project) {
-      return
-    }
-
-    return { user, project, committish: url.hash.slice(1) }
-  },
-}
-
-hosts.gitlab = {
-  protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'],
-  domain: 'gitlab.com',
-  treepath: 'tree',
-  blobpath: 'tree',
-  editpath: '-/edit',
-  httpstemplate: ({ auth, domain, user, project, committish }) =>
-    `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  tarballtemplate: ({ domain, user, project, committish }) =>
-    `https://${domain}/${user}/${project}/repository/archive.tar.gz?ref=${maybeEncode(committish || 'HEAD')}`,
-  extract: (url) => {
-    const path = url.pathname.slice(1)
-    if (path.includes('/-/') || path.includes('/archive.tar.gz')) {
-      return
-    }
-
-    const segments = path.split('/')
-    let project = segments.pop()
-    if (project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    const user = segments.join('/')
-    if (!user || !project) {
-      return
-    }
-
-    return { user, project, committish: url.hash.slice(1) }
-  },
-}
-
-hosts.gist = {
-  protocols: ['git:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'],
-  domain: 'gist.github.com',
-  editpath: 'edit',
-  sshtemplate: ({ domain, project, committish }) =>
-    `git@${domain}:${project}.git${maybeJoin('#', committish)}`,
-  sshurltemplate: ({ domain, project, committish }) =>
-    `git+ssh://git@${domain}/${project}.git${maybeJoin('#', committish)}`,
-  edittemplate: ({ domain, user, project, committish, editpath }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', maybeEncode(committish))}/${editpath}`,
-  browsetemplate: ({ domain, project, committish }) =>
-    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`,
-  browsetreetemplate: ({ domain, project, committish, path, hashformat }) =>
-    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`,
-  browseblobtemplate: ({ domain, project, committish, path, hashformat }) =>
-    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`,
-  docstemplate: ({ domain, project, committish }) =>
-    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`,
-  httpstemplate: ({ domain, project, committish }) =>
-    `git+https://${domain}/${project}.git${maybeJoin('#', committish)}`,
-  filetemplate: ({ user, project, committish, path }) =>
-    `https://gist.githubusercontent.com/${user}/${project}/raw${maybeJoin('/', maybeEncode(committish))}/${path}`,
-  shortcuttemplate: ({ type, project, committish }) =>
-    `${type}:${project}${maybeJoin('#', committish)}`,
-  pathtemplate: ({ project, committish }) =>
-    `${project}${maybeJoin('#', committish)}`,
-  bugstemplate: ({ domain, project }) =>
-    `https://${domain}/${project}`,
-  gittemplate: ({ domain, project, committish }) =>
-    `git://${domain}/${project}.git${maybeJoin('#', committish)}`,
-  tarballtemplate: ({ project, committish }) =>
-    `https://codeload.github.com/gist/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`,
-  extract: (url) => {
-    let [, user, project, aux] = url.pathname.split('/', 4)
-    if (aux === 'raw') {
-      return
-    }
-
-    if (!project) {
-      if (!user) {
-        return
-      }
-
-      project = user
-      user = null
-    }
-
-    if (project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    return { user, project, committish: url.hash.slice(1) }
-  },
-  hashformat: function (fragment) {
-    return fragment && 'file-' + formatHashFragment(fragment)
-  },
-}
-
-hosts.sourcehut = {
-  protocols: ['git+ssh:', 'https:'],
-  domain: 'git.sr.ht',
-  treepath: 'tree',
-  blobpath: 'tree',
-  filetemplate: ({ domain, user, project, committish, path }) =>
-    `https://${domain}/${user}/${project}/blob/${maybeEncode(committish) || 'HEAD'}/${path}`,
-  httpstemplate: ({ domain, user, project, committish }) =>
-    `https://${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  tarballtemplate: ({ domain, user, project, committish }) =>
-    `https://${domain}/${user}/${project}/archive/${maybeEncode(committish) || 'HEAD'}.tar.gz`,
-  bugstemplate: () => null,
-  extract: (url) => {
-    let [, user, project, aux] = url.pathname.split('/', 4)
-
-    // tarball url
-    if (['archive'].includes(aux)) {
-      return
-    }
-
-    if (project && project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    if (!user || !project) {
-      return
-    }
-
-    return { user, project, committish: url.hash.slice(1) }
-  },
-}
-
-for (const [name, host] of Object.entries(hosts)) {
-  hosts[name] = Object.assign({}, defaults, host)
-}
-
-module.exports = hosts
diff --git a/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/index.js b/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/index.js
deleted file mode 100644
index 2a7100dcee6e7..0000000000000
--- a/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/index.js
+++ /dev/null
@@ -1,227 +0,0 @@
-'use strict'
-
-const { LRUCache } = require('lru-cache')
-const hosts = require('./hosts.js')
-const fromUrl = require('./from-url.js')
-const parseUrl = require('./parse-url.js')
-
-const cache = new LRUCache({ max: 1000 })
-
-function unknownHostedUrl (url) {
-  try {
-    const {
-      protocol,
-      hostname,
-      pathname,
-    } = new URL(url)
-
-    if (!hostname) {
-      return null
-    }
-
-    const proto = /(?:git\+)http:$/.test(protocol) ? 'http:' : 'https:'
-    const path = pathname.replace(/\.git$/, '')
-    return `${proto}//${hostname}${path}`
-  } catch {
-    return null
-  }
-}
-
-class GitHost {
-  constructor (type, user, auth, project, committish, defaultRepresentation, opts = {}) {
-    Object.assign(this, GitHost.#gitHosts[type], {
-      type,
-      user,
-      auth,
-      project,
-      committish,
-      default: defaultRepresentation,
-      opts,
-    })
-  }
-
-  static #gitHosts = { byShortcut: {}, byDomain: {} }
-  static #protocols = {
-    'git+ssh:': { name: 'sshurl' },
-    'ssh:': { name: 'sshurl' },
-    'git+https:': { name: 'https', auth: true },
-    'git:': { auth: true },
-    'http:': { auth: true },
-    'https:': { auth: true },
-    'git+http:': { auth: true },
-  }
-
-  static addHost (name, host) {
-    GitHost.#gitHosts[name] = host
-    GitHost.#gitHosts.byDomain[host.domain] = name
-    GitHost.#gitHosts.byShortcut[`${name}:`] = name
-    GitHost.#protocols[`${name}:`] = { name }
-  }
-
-  static fromUrl (giturl, opts) {
-    if (typeof giturl !== 'string') {
-      return
-    }
-
-    const key = giturl + JSON.stringify(opts || {})
-
-    if (!cache.has(key)) {
-      const hostArgs = fromUrl(giturl, opts, {
-        gitHosts: GitHost.#gitHosts,
-        protocols: GitHost.#protocols,
-      })
-      cache.set(key, hostArgs ? new GitHost(...hostArgs) : undefined)
-    }
-
-    return cache.get(key)
-  }
-
-  static fromManifest (manifest, opts = {}) {
-    if (!manifest || typeof manifest !== 'object') {
-      return
-    }
-
-    const r = manifest.repository
-    // TODO: look into also checking the `bugs`/`homepage` URLs
-
-    const rurl = r && (
-      typeof r === 'string'
-        ? r
-        : typeof r === 'object' && typeof r.url === 'string'
-          ? r.url
-          : null
-    )
-
-    if (!rurl) {
-      throw new Error('no repository')
-    }
-
-    const info = (rurl && GitHost.fromUrl(rurl.replace(/^git\+/, ''), opts)) || null
-    if (info) {
-      return info
-    }
-    const unk = unknownHostedUrl(rurl)
-    return GitHost.fromUrl(unk, opts) || unk
-  }
-
-  static parseUrl (url) {
-    return parseUrl(url)
-  }
-
-  #fill (template, opts) {
-    if (typeof template !== 'function') {
-      return null
-    }
-
-    const options = { ...this, ...this.opts, ...opts }
-
-    // the path should always be set so we don't end up with 'undefined' in urls
-    if (!options.path) {
-      options.path = ''
-    }
-
-    // template functions will insert the leading slash themselves
-    if (options.path.startsWith('/')) {
-      options.path = options.path.slice(1)
-    }
-
-    if (options.noCommittish) {
-      options.committish = null
-    }
-
-    const result = template(options)
-    return options.noGitPlus && result.startsWith('git+') ? result.slice(4) : result
-  }
-
-  hash () {
-    return this.committish ? `#${this.committish}` : ''
-  }
-
-  ssh (opts) {
-    return this.#fill(this.sshtemplate, opts)
-  }
-
-  sshurl (opts) {
-    return this.#fill(this.sshurltemplate, opts)
-  }
-
-  browse (path, ...args) {
-    // not a string, treat path as opts
-    if (typeof path !== 'string') {
-      return this.#fill(this.browsetemplate, path)
-    }
-
-    if (typeof args[0] !== 'string') {
-      return this.#fill(this.browsetreetemplate, { ...args[0], path })
-    }
-
-    return this.#fill(this.browsetreetemplate, { ...args[1], fragment: args[0], path })
-  }
-
-  // If the path is known to be a file, then browseFile should be used. For some hosts
-  // the url is the same as browse, but for others like GitHub a file can use both `/tree/`
-  // and `/blob/` in the path. When using a default committish of `HEAD` then the `/tree/`
-  // path will redirect to a specific commit. Using the `/blob/` path avoids this and
-  // does not redirect to a different commit.
-  browseFile (path, ...args) {
-    if (typeof args[0] !== 'string') {
-      return this.#fill(this.browseblobtemplate, { ...args[0], path })
-    }
-
-    return this.#fill(this.browseblobtemplate, { ...args[1], fragment: args[0], path })
-  }
-
-  docs (opts) {
-    return this.#fill(this.docstemplate, opts)
-  }
-
-  bugs (opts) {
-    return this.#fill(this.bugstemplate, opts)
-  }
-
-  https (opts) {
-    return this.#fill(this.httpstemplate, opts)
-  }
-
-  git (opts) {
-    return this.#fill(this.gittemplate, opts)
-  }
-
-  shortcut (opts) {
-    return this.#fill(this.shortcuttemplate, opts)
-  }
-
-  path (opts) {
-    return this.#fill(this.pathtemplate, opts)
-  }
-
-  tarball (opts) {
-    return this.#fill(this.tarballtemplate, { ...opts, noCommittish: false })
-  }
-
-  file (path, opts) {
-    return this.#fill(this.filetemplate, { ...opts, path })
-  }
-
-  edit (path, opts) {
-    return this.#fill(this.edittemplate, { ...opts, path })
-  }
-
-  getDefaultRepresentation () {
-    return this.default
-  }
-
-  toString (opts) {
-    if (this.default && typeof this[this.default] === 'function') {
-      return this[this.default](opts)
-    }
-
-    return this.sshurl(opts)
-  }
-}
-
-for (const [name, host] of Object.entries(hosts)) {
-  GitHost.addHost(name, host)
-}
-
-module.exports = GitHost
diff --git a/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/parse-url.js b/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/parse-url.js
deleted file mode 100644
index 7d5489c008ab4..0000000000000
--- a/node_modules/normalize-package-data/node_modules/hosted-git-info/lib/parse-url.js
+++ /dev/null
@@ -1,78 +0,0 @@
-const url = require('url')
-
-const lastIndexOfBefore = (str, char, beforeChar) => {
-  const startPosition = str.indexOf(beforeChar)
-  return str.lastIndexOf(char, startPosition > -1 ? startPosition : Infinity)
-}
-
-const safeUrl = (u) => {
-  try {
-    return new url.URL(u)
-  } catch {
-    // this fn should never throw
-  }
-}
-
-// accepts input like git:github.com:user/repo and inserts the // after the first :
-const correctProtocol = (arg, protocols) => {
-  const firstColon = arg.indexOf(':')
-  const proto = arg.slice(0, firstColon + 1)
-  if (Object.prototype.hasOwnProperty.call(protocols, proto)) {
-    return arg
-  }
-
-  const firstAt = arg.indexOf('@')
-  if (firstAt > -1) {
-    if (firstAt > firstColon) {
-      return `git+ssh://${arg}`
-    } else {
-      return arg
-    }
-  }
-
-  const doubleSlash = arg.indexOf('//')
-  if (doubleSlash === firstColon + 1) {
-    return arg
-  }
-
-  return `${arg.slice(0, firstColon + 1)}//${arg.slice(firstColon + 1)}`
-}
-
-// attempt to correct an scp style url so that it will parse with `new URL()`
-const correctUrl = (giturl) => {
-  // ignore @ that come after the first hash since the denotes the start
-  // of a committish which can contain @ characters
-  const firstAt = lastIndexOfBefore(giturl, '@', '#')
-  // ignore colons that come after the hash since that could include colons such as:
-  // git@github.com:user/package-2#semver:^1.0.0
-  const lastColonBeforeHash = lastIndexOfBefore(giturl, ':', '#')
-
-  if (lastColonBeforeHash > firstAt) {
-    // the last : comes after the first @ (or there is no @)
-    // like it would in:
-    // proto://hostname.com:user/repo
-    // username@hostname.com:user/repo
-    // :password@hostname.com:user/repo
-    // username:password@hostname.com:user/repo
-    // proto://username@hostname.com:user/repo
-    // proto://:password@hostname.com:user/repo
-    // proto://username:password@hostname.com:user/repo
-    // then we replace the last : with a / to create a valid path
-    giturl = giturl.slice(0, lastColonBeforeHash) + '/' + giturl.slice(lastColonBeforeHash + 1)
-  }
-
-  if (lastIndexOfBefore(giturl, ':', '#') === -1 && giturl.indexOf('//') === -1) {
-    // we have no : at all
-    // as it would be in:
-    // username@hostname.com/user/repo
-    // then we prepend a protocol
-    giturl = `git+ssh://${giturl}`
-  }
-
-  return giturl
-}
-
-module.exports = (giturl, protocols) => {
-  const withProtocol = protocols ? correctProtocol(giturl, protocols) : giturl
-  return safeUrl(withProtocol) || safeUrl(correctUrl(withProtocol))
-}
diff --git a/node_modules/normalize-package-data/node_modules/hosted-git-info/package.json b/node_modules/normalize-package-data/node_modules/hosted-git-info/package.json
deleted file mode 100644
index a9bb26be4a704..0000000000000
--- a/node_modules/normalize-package-data/node_modules/hosted-git-info/package.json
+++ /dev/null
@@ -1,61 +0,0 @@
-{
-  "name": "hosted-git-info",
-  "version": "8.1.0",
-  "description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab",
-  "main": "./lib/index.js",
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/npm/hosted-git-info.git"
-  },
-  "keywords": [
-    "git",
-    "github",
-    "bitbucket",
-    "gitlab"
-  ],
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "bugs": {
-    "url": "https://github.com/npm/hosted-git-info/issues"
-  },
-  "homepage": "https://github.com/npm/hosted-git-info",
-  "scripts": {
-    "posttest": "npm run lint",
-    "snap": "tap",
-    "test": "tap",
-    "test:coverage": "tap --coverage-report=html",
-    "lint": "npm run eslint",
-    "postlint": "template-oss-check",
-    "lintfix": "npm run eslint -- --fix",
-    "template-oss-apply": "template-oss-apply --force",
-    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
-  },
-  "dependencies": {
-    "lru-cache": "^10.0.1"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.24.3",
-    "tap": "^16.0.1"
-  },
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "engines": {
-    "node": "^18.17.0 || >=20.5.0"
-  },
-  "tap": {
-    "color": 1,
-    "coverage": true,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.24.3",
-    "publish": "true"
-  }
-}
diff --git a/node_modules/normalize-package-data/package.json b/node_modules/normalize-package-data/package.json
index bf9b20f19d623..e4fbdddce4d61 100644
--- a/node_modules/normalize-package-data/package.json
+++ b/node_modules/normalize-package-data/package.json
@@ -1,6 +1,6 @@
 {
   "name": "normalize-package-data",
-  "version": "7.0.1",
+  "version": "8.0.0",
   "author": "GitHub Inc.",
   "description": "Normalizes data that can be found in package.json files.",
   "license": "BSD-2-Clause",
@@ -22,7 +22,7 @@
     "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
   },
   "dependencies": {
-    "hosted-git-info": "^8.0.0",
+    "hosted-git-info": "^9.0.0",
     "semver": "^7.3.5",
     "validate-npm-package-license": "^3.0.4"
   },
@@ -36,7 +36,7 @@
     "lib/"
   ],
   "engines": {
-    "node": "^18.17.0 || >=20.5.0"
+    "node": "^20.17.0 || >=22.9.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
diff --git a/package-lock.json b/package-lock.json
index 926637ac7e9c2..7afa0d91ab556 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -126,7 +126,7 @@
         "ms": "^2.1.2",
         "node-gyp": "^11.2.0",
         "nopt": "^8.1.0",
-        "normalize-package-data": "^7.0.1",
+        "normalize-package-data": "^8.0.0",
         "npm-audit-report": "^6.0.0",
         "npm-install-checks": "^7.1.1",
         "npm-package-arg": "^12.0.2",
@@ -12749,31 +12749,18 @@
       }
     },
     "node_modules/normalize-package-data": {
-      "version": "7.0.1",
-      "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-7.0.1.tgz",
-      "integrity": "sha512-linxNAT6M0ebEYZOx2tO6vBEFsVgnPpv+AVjk0wJHfaUIbq31Jm3T6vvZaarnOeWDh8ShnwXuaAyM7WT3RzErA==",
+      "version": "8.0.0",
+      "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-8.0.0.tgz",
+      "integrity": "sha512-RWk+PI433eESQ7ounYxIp67CYuVsS1uYSonX3kA6ps/3LWfjVQa/ptEg6Y3T6uAMq1mWpX9PQ+qx+QaHpsc7gQ==",
       "inBundle": true,
       "license": "BSD-2-Clause",
       "dependencies": {
-        "hosted-git-info": "^8.0.0",
+        "hosted-git-info": "^9.0.0",
         "semver": "^7.3.5",
         "validate-npm-package-license": "^3.0.4"
       },
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
-    "node_modules/normalize-package-data/node_modules/hosted-git-info": {
-      "version": "8.1.0",
-      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-8.1.0.tgz",
-      "integrity": "sha512-Rw/B2DNQaPBICNXEm8balFz9a6WpZrkCGpcWFpy7nCj+NyhSdqXipmfvtmWt9xGfp0wZnBxB+iVpLmQMYt47Tw==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "lru-cache": "^10.0.1"
-      },
-      "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^20.17.0 || >=22.9.0"
       }
     },
     "node_modules/normalize-path": {
diff --git a/package.json b/package.json
index 35e5313ee9704..e33b015270a88 100644
--- a/package.json
+++ b/package.json
@@ -93,7 +93,7 @@
     "ms": "^2.1.2",
     "node-gyp": "^11.2.0",
     "nopt": "^8.1.0",
-    "normalize-package-data": "^7.0.1",
+    "normalize-package-data": "^8.0.0",
     "npm-audit-report": "^6.0.0",
     "npm-install-checks": "^7.1.1",
     "npm-package-arg": "^12.0.2",

From 9392488d6036dfc9696e29cc8d463335517974ca Mon Sep 17 00:00:00 2001
From: Gar 
Date: Wed, 17 Sep 2025 14:17:01 -0700
Subject: [PATCH 19/63] deps: npm-package-manifest@11.0.1

---
 node_modules/.gitignore                       |   6 +
 .../node_modules/npm-pick-manifest/LICENSE.md |   0
 .../npm-pick-manifest/lib/index.js            |  11 +-
 .../npm-pick-manifest/package.json            |  10 +-
 .../npm-pick-manifest/package.json            |  58 -----
 node_modules/npm-pick-manifest/lib/index.js   |  11 +-
 .../node_modules/npm-package-arg/LICENSE      |   0
 .../node_modules/npm-package-arg/lib/npa.js   |   0
 .../node_modules/npm-package-arg/package.json |   0
 node_modules/npm-pick-manifest/package.json   |  10 +-
 .../node_modules/npm-pick-manifest/LICENSE.md |  16 --
 .../npm-pick-manifest/lib/index.js            | 219 ------------------
 package-lock.json                             | 103 ++++----
 package.json                                  |   2 +-
 workspaces/arborist/package.json              |   2 +-
 15 files changed, 77 insertions(+), 371 deletions(-)
 rename node_modules/@npmcli/{package-json => git}/node_modules/npm-pick-manifest/LICENSE.md (100%)
 rename node_modules/@npmcli/{package-json => git}/node_modules/npm-pick-manifest/lib/index.js (96%)
 rename node_modules/{pacote => @npmcli/git}/node_modules/npm-pick-manifest/package.json (89%)
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/npm-pick-manifest/package.json
 rename node_modules/{@npmcli/package-json => npm-pick-manifest}/node_modules/npm-package-arg/LICENSE (100%)
 rename node_modules/{@npmcli/package-json => npm-pick-manifest}/node_modules/npm-package-arg/lib/npa.js (100%)
 rename node_modules/{@npmcli/package-json => npm-pick-manifest}/node_modules/npm-package-arg/package.json (100%)
 delete mode 100644 node_modules/pacote/node_modules/npm-pick-manifest/LICENSE.md
 delete mode 100644 node_modules/pacote/node_modules/npm-pick-manifest/lib/index.js

diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 21f380a400c8c..5fd17f3d4245b 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -21,6 +21,9 @@
 !/@npmcli/agent
 !/@npmcli/fs
 !/@npmcli/git
+!/@npmcli/git/node_modules/
+/@npmcli/git/node_modules/*
+!/@npmcli/git/node_modules/npm-pick-manifest
 !/@npmcli/installed-package-contents
 !/@npmcli/map-workspaces
 !/@npmcli/map-workspaces/node_modules/
@@ -197,6 +200,9 @@
 !/npm-packlist/node_modules/ignore-walk
 !/npm-packlist/node_modules/minimatch
 !/npm-pick-manifest
+!/npm-pick-manifest/node_modules/
+/npm-pick-manifest/node_modules/*
+!/npm-pick-manifest/node_modules/npm-package-arg
 !/npm-profile
 !/npm-registry-fetch
 !/npm-registry-fetch/node_modules/
diff --git a/node_modules/@npmcli/package-json/node_modules/npm-pick-manifest/LICENSE.md b/node_modules/@npmcli/git/node_modules/npm-pick-manifest/LICENSE.md
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/npm-pick-manifest/LICENSE.md
rename to node_modules/@npmcli/git/node_modules/npm-pick-manifest/LICENSE.md
diff --git a/node_modules/@npmcli/package-json/node_modules/npm-pick-manifest/lib/index.js b/node_modules/@npmcli/git/node_modules/npm-pick-manifest/lib/index.js
similarity index 96%
rename from node_modules/@npmcli/package-json/node_modules/npm-pick-manifest/lib/index.js
rename to node_modules/@npmcli/git/node_modules/npm-pick-manifest/lib/index.js
index 985c78df7a9bf..82807971844bf 100644
--- a/node_modules/@npmcli/package-json/node_modules/npm-pick-manifest/lib/index.js
+++ b/node_modules/@npmcli/git/node_modules/npm-pick-manifest/lib/index.js
@@ -93,10 +93,13 @@ const pickManifest = (packument, wanted, opts) => {
     throw new Error('Only tag, version, and range are supported')
   }
 
-  // if the type is 'tag', and not just the implicit default, then it must be that exactly, or nothing else will do.
+  // if the type is 'tag', and not just the implicit default, then it must
+  // be that exactly, or nothing else will do.
   if (wanted && type === 'tag') {
     const ver = distTags[wanted]
-    // if the version in the dist-tags is before the before date, then we use that. Otherwise, we get the highest precedence version prior to the dist-tag.
+    // if the version in the dist-tags is before the before date, then
+    // we use that.  Otherwise, we get the highest precedence version
+    // prior to the dist-tag.
     if (isBefore(verTimes, ver, time)) {
       return decorateAvoid(versions[ver] || staged[ver] || restricted[ver], avoid)
     } else {
@@ -114,7 +117,9 @@ const pickManifest = (packument, wanted, opts) => {
   // ok, sort based on our heuristics, and pick the best fit
   const range = type === 'range' ? wanted : '*'
 
-  // if the range is *, then we prefer the 'latest' if available but skip this if it should be avoided, in that case we have to try a little harder.
+  // if the range is *, then we prefer the 'latest' if available
+  // but skip this if it should be avoided, in that case we have
+  // to try a little harder.
   const defaultVer = distTags[defaultTag]
   if (defaultVer &&
       (range === '*' || semver.satisfies(defaultVer, range, { loose: true })) &&
diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/package.json b/node_modules/@npmcli/git/node_modules/npm-pick-manifest/package.json
similarity index 89%
rename from node_modules/pacote/node_modules/npm-pick-manifest/package.json
rename to node_modules/@npmcli/git/node_modules/npm-pick-manifest/package.json
index f1ca18ed32108..5763088c250b6 100644
--- a/node_modules/pacote/node_modules/npm-pick-manifest/package.json
+++ b/node_modules/@npmcli/git/node_modules/npm-pick-manifest/package.json
@@ -1,6 +1,6 @@
 {
   "name": "npm-pick-manifest",
-  "version": "11.0.1",
+  "version": "10.0.0",
   "description": "Resolves a matching manifest from a package metadata document according to standard npm semver resolution rules.",
   "main": "./lib",
   "files": [
@@ -32,12 +32,12 @@
   "dependencies": {
     "npm-install-checks": "^7.1.0",
     "npm-normalize-package-bin": "^4.0.0",
-    "npm-package-arg": "^13.0.0",
+    "npm-package-arg": "^12.0.0",
     "semver": "^7.3.5"
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.25.0",
+    "@npmcli/template-oss": "4.23.3",
     "tap": "^16.0.1"
   },
   "tap": {
@@ -48,11 +48,11 @@
     ]
   },
   "engines": {
-    "node": "^20.17.0 || >=22.9.0"
+    "node": "^18.17.0 || >=20.5.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.25.0",
+    "version": "4.23.3",
     "publish": true
   }
 }
diff --git a/node_modules/@npmcli/package-json/node_modules/npm-pick-manifest/package.json b/node_modules/@npmcli/package-json/node_modules/npm-pick-manifest/package.json
deleted file mode 100644
index f1ca18ed32108..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/npm-pick-manifest/package.json
+++ /dev/null
@@ -1,58 +0,0 @@
-{
-  "name": "npm-pick-manifest",
-  "version": "11.0.1",
-  "description": "Resolves a matching manifest from a package metadata document according to standard npm semver resolution rules.",
-  "main": "./lib",
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "scripts": {
-    "coverage": "tap",
-    "lint": "npm run eslint",
-    "test": "tap",
-    "posttest": "npm run lint",
-    "postlint": "template-oss-check",
-    "lintfix": "npm run eslint -- --fix",
-    "snap": "tap",
-    "template-oss-apply": "template-oss-apply --force",
-    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/npm/npm-pick-manifest.git"
-  },
-  "keywords": [
-    "npm",
-    "semver",
-    "package manager"
-  ],
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "dependencies": {
-    "npm-install-checks": "^7.1.0",
-    "npm-normalize-package-bin": "^4.0.0",
-    "npm-package-arg": "^13.0.0",
-    "semver": "^7.3.5"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.25.0",
-    "tap": "^16.0.1"
-  },
-  "tap": {
-    "check-coverage": true,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "engines": {
-    "node": "^20.17.0 || >=22.9.0"
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.25.0",
-    "publish": true
-  }
-}
diff --git a/node_modules/npm-pick-manifest/lib/index.js b/node_modules/npm-pick-manifest/lib/index.js
index 82807971844bf..985c78df7a9bf 100644
--- a/node_modules/npm-pick-manifest/lib/index.js
+++ b/node_modules/npm-pick-manifest/lib/index.js
@@ -93,13 +93,10 @@ const pickManifest = (packument, wanted, opts) => {
     throw new Error('Only tag, version, and range are supported')
   }
 
-  // if the type is 'tag', and not just the implicit default, then it must
-  // be that exactly, or nothing else will do.
+  // if the type is 'tag', and not just the implicit default, then it must be that exactly, or nothing else will do.
   if (wanted && type === 'tag') {
     const ver = distTags[wanted]
-    // if the version in the dist-tags is before the before date, then
-    // we use that.  Otherwise, we get the highest precedence version
-    // prior to the dist-tag.
+    // if the version in the dist-tags is before the before date, then we use that. Otherwise, we get the highest precedence version prior to the dist-tag.
     if (isBefore(verTimes, ver, time)) {
       return decorateAvoid(versions[ver] || staged[ver] || restricted[ver], avoid)
     } else {
@@ -117,9 +114,7 @@ const pickManifest = (packument, wanted, opts) => {
   // ok, sort based on our heuristics, and pick the best fit
   const range = type === 'range' ? wanted : '*'
 
-  // if the range is *, then we prefer the 'latest' if available
-  // but skip this if it should be avoided, in that case we have
-  // to try a little harder.
+  // if the range is *, then we prefer the 'latest' if available but skip this if it should be avoided, in that case we have to try a little harder.
   const defaultVer = distTags[defaultTag]
   if (defaultVer &&
       (range === '*' || semver.satisfies(defaultVer, range, { loose: true })) &&
diff --git a/node_modules/@npmcli/package-json/node_modules/npm-package-arg/LICENSE b/node_modules/npm-pick-manifest/node_modules/npm-package-arg/LICENSE
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/npm-package-arg/LICENSE
rename to node_modules/npm-pick-manifest/node_modules/npm-package-arg/LICENSE
diff --git a/node_modules/@npmcli/package-json/node_modules/npm-package-arg/lib/npa.js b/node_modules/npm-pick-manifest/node_modules/npm-package-arg/lib/npa.js
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/npm-package-arg/lib/npa.js
rename to node_modules/npm-pick-manifest/node_modules/npm-package-arg/lib/npa.js
diff --git a/node_modules/@npmcli/package-json/node_modules/npm-package-arg/package.json b/node_modules/npm-pick-manifest/node_modules/npm-package-arg/package.json
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/npm-package-arg/package.json
rename to node_modules/npm-pick-manifest/node_modules/npm-package-arg/package.json
diff --git a/node_modules/npm-pick-manifest/package.json b/node_modules/npm-pick-manifest/package.json
index 5763088c250b6..f1ca18ed32108 100644
--- a/node_modules/npm-pick-manifest/package.json
+++ b/node_modules/npm-pick-manifest/package.json
@@ -1,6 +1,6 @@
 {
   "name": "npm-pick-manifest",
-  "version": "10.0.0",
+  "version": "11.0.1",
   "description": "Resolves a matching manifest from a package metadata document according to standard npm semver resolution rules.",
   "main": "./lib",
   "files": [
@@ -32,12 +32,12 @@
   "dependencies": {
     "npm-install-checks": "^7.1.0",
     "npm-normalize-package-bin": "^4.0.0",
-    "npm-package-arg": "^12.0.0",
+    "npm-package-arg": "^13.0.0",
     "semver": "^7.3.5"
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.23.3",
+    "@npmcli/template-oss": "4.25.0",
     "tap": "^16.0.1"
   },
   "tap": {
@@ -48,11 +48,11 @@
     ]
   },
   "engines": {
-    "node": "^18.17.0 || >=20.5.0"
+    "node": "^20.17.0 || >=22.9.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.23.3",
+    "version": "4.25.0",
     "publish": true
   }
 }
diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/LICENSE.md b/node_modules/pacote/node_modules/npm-pick-manifest/LICENSE.md
deleted file mode 100644
index 8d28acf866d93..0000000000000
--- a/node_modules/pacote/node_modules/npm-pick-manifest/LICENSE.md
+++ /dev/null
@@ -1,16 +0,0 @@
-ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for
-any purpose with or without fee is hereby granted, provided that the
-above copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
-ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
-COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/npm-pick-manifest/lib/index.js b/node_modules/pacote/node_modules/npm-pick-manifest/lib/index.js
deleted file mode 100644
index 985c78df7a9bf..0000000000000
--- a/node_modules/pacote/node_modules/npm-pick-manifest/lib/index.js
+++ /dev/null
@@ -1,219 +0,0 @@
-'use strict'
-
-const npa = require('npm-package-arg')
-const semver = require('semver')
-const { checkEngine } = require('npm-install-checks')
-const normalizeBin = require('npm-normalize-package-bin')
-
-const engineOk = (manifest, npmVersion, nodeVersion) => {
-  try {
-    checkEngine(manifest, npmVersion, nodeVersion)
-    return true
-  } catch (_) {
-    return false
-  }
-}
-
-const isBefore = (verTimes, ver, time) =>
-  !verTimes || !verTimes[ver] || Date.parse(verTimes[ver]) <= time
-
-const avoidSemverOpt = { includePrerelease: true, loose: true }
-const shouldAvoid = (ver, avoid) =>
-  avoid && semver.satisfies(ver, avoid, avoidSemverOpt)
-
-const decorateAvoid = (result, avoid) =>
-  result && shouldAvoid(result.version, avoid)
-    ? { ...result, _shouldAvoid: true }
-    : result
-
-const pickManifest = (packument, wanted, opts) => {
-  const {
-    defaultTag = 'latest',
-    before = null,
-    nodeVersion = process.version,
-    npmVersion = null,
-    includeStaged = false,
-    avoid = null,
-    avoidStrict = false,
-  } = opts
-
-  const { name, time: verTimes } = packument
-  const versions = packument.versions || {}
-
-  if (avoidStrict) {
-    const looseOpts = {
-      ...opts,
-      avoidStrict: false,
-    }
-
-    const result = pickManifest(packument, wanted, looseOpts)
-    if (!result || !result._shouldAvoid) {
-      return result
-    }
-
-    const caret = pickManifest(packument, `^${result.version}`, looseOpts)
-    if (!caret || !caret._shouldAvoid) {
-      return {
-        ...caret,
-        _outsideDependencyRange: true,
-        _isSemVerMajor: false,
-      }
-    }
-
-    const star = pickManifest(packument, '*', looseOpts)
-    if (!star || !star._shouldAvoid) {
-      return {
-        ...star,
-        _outsideDependencyRange: true,
-        _isSemVerMajor: true,
-      }
-    }
-
-    throw Object.assign(new Error(`No avoidable versions for ${name}`), {
-      code: 'ETARGET',
-      name,
-      wanted,
-      avoid,
-      before,
-      versions: Object.keys(versions),
-    })
-  }
-
-  const staged = (includeStaged && packument.stagedVersions &&
-    packument.stagedVersions.versions) || {}
-  const restricted = (packument.policyRestrictions &&
-    packument.policyRestrictions.versions) || {}
-
-  const time = before && verTimes ? +(new Date(before)) : Infinity
-  const spec = npa.resolve(name, wanted || defaultTag)
-  const type = spec.type
-  const distTags = packument['dist-tags'] || {}
-
-  if (type !== 'tag' && type !== 'version' && type !== 'range') {
-    throw new Error('Only tag, version, and range are supported')
-  }
-
-  // if the type is 'tag', and not just the implicit default, then it must be that exactly, or nothing else will do.
-  if (wanted && type === 'tag') {
-    const ver = distTags[wanted]
-    // if the version in the dist-tags is before the before date, then we use that. Otherwise, we get the highest precedence version prior to the dist-tag.
-    if (isBefore(verTimes, ver, time)) {
-      return decorateAvoid(versions[ver] || staged[ver] || restricted[ver], avoid)
-    } else {
-      return pickManifest(packument, `<=${ver}`, opts)
-    }
-  }
-
-  // similarly, if a specific version, then only that version will do
-  if (wanted && type === 'version') {
-    const ver = semver.clean(wanted, { loose: true })
-    const mani = versions[ver] || staged[ver] || restricted[ver]
-    return isBefore(verTimes, ver, time) ? decorateAvoid(mani, avoid) : null
-  }
-
-  // ok, sort based on our heuristics, and pick the best fit
-  const range = type === 'range' ? wanted : '*'
-
-  // if the range is *, then we prefer the 'latest' if available but skip this if it should be avoided, in that case we have to try a little harder.
-  const defaultVer = distTags[defaultTag]
-  if (defaultVer &&
-      (range === '*' || semver.satisfies(defaultVer, range, { loose: true })) &&
-      !restricted[defaultVer] &&
-      !shouldAvoid(defaultVer, avoid)) {
-    const mani = versions[defaultVer]
-    const ok = mani &&
-      isBefore(verTimes, defaultVer, time) &&
-      engineOk(mani, npmVersion, nodeVersion) &&
-      !mani.deprecated &&
-      !staged[defaultVer]
-    if (ok) {
-      return mani
-    }
-  }
-
-  // ok, actually have to sort the list and take the winner
-  const allEntries = Object.entries(versions)
-    .concat(Object.entries(staged))
-    .concat(Object.entries(restricted))
-    .filter(([ver]) => isBefore(verTimes, ver, time))
-
-  if (!allEntries.length) {
-    throw Object.assign(new Error(`No versions available for ${name}`), {
-      code: 'ENOVERSIONS',
-      name,
-      type,
-      wanted,
-      before,
-      versions: Object.keys(versions),
-    })
-  }
-
-  const sortSemverOpt = { loose: true }
-  const entries = allEntries.filter(([ver]) =>
-    semver.satisfies(ver, range, { loose: true }))
-    .sort((a, b) => {
-      const [vera, mania] = a
-      const [verb, manib] = b
-      const notavoida = !shouldAvoid(vera, avoid)
-      const notavoidb = !shouldAvoid(verb, avoid)
-      const notrestra = !restricted[vera]
-      const notrestrb = !restricted[verb]
-      const notstagea = !staged[vera]
-      const notstageb = !staged[verb]
-      const notdepra = !mania.deprecated
-      const notdeprb = !manib.deprecated
-      const enginea = engineOk(mania, npmVersion, nodeVersion)
-      const engineb = engineOk(manib, npmVersion, nodeVersion)
-      // sort by:
-      // - not an avoided version
-      // - not restricted
-      // - not staged
-      // - not deprecated and engine ok
-      // - engine ok
-      // - not deprecated
-      // - semver
-      return (notavoidb - notavoida) ||
-        (notrestrb - notrestra) ||
-        (notstageb - notstagea) ||
-        ((notdeprb && engineb) - (notdepra && enginea)) ||
-        (engineb - enginea) ||
-        (notdeprb - notdepra) ||
-        semver.rcompare(vera, verb, sortSemverOpt)
-    })
-
-  return decorateAvoid(entries[0] && entries[0][1], avoid)
-}
-
-module.exports = (packument, wanted, opts = {}) => {
-  const mani = pickManifest(packument, wanted, opts)
-  const picked = mani && normalizeBin(mani)
-  const policyRestrictions = packument.policyRestrictions
-  const restricted = (policyRestrictions && policyRestrictions.versions) || {}
-
-  if (picked && !restricted[picked.version]) {
-    return picked
-  }
-
-  const { before = null, defaultTag = 'latest' } = opts
-  const bstr = before ? new Date(before).toLocaleString() : ''
-  const { name } = packument
-  const pckg = `${name}@${wanted}` +
-    (before ? ` with a date before ${bstr}` : '')
-
-  const isForbidden = picked && !!restricted[picked.version]
-  const polMsg = isForbidden ? policyRestrictions.message : ''
-
-  const msg = !isForbidden ? `No matching version found for ${pckg}.`
-    : `Could not download ${pckg} due to policy violations:\n${polMsg}`
-
-  const code = isForbidden ? 'E403' : 'ETARGET'
-  throw Object.assign(new Error(msg), {
-    code,
-    type: npa.resolve(packument.name, wanted).type,
-    wanted,
-    versions: Object.keys(packument.versions ?? {}),
-    name,
-    distTags: packument['dist-tags'],
-    defaultTag,
-  })
-}
diff --git a/package-lock.json b/package-lock.json
index 7afa0d91ab556..dbdfb7625d944 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -130,7 +130,7 @@
         "npm-audit-report": "^6.0.0",
         "npm-install-checks": "^7.1.1",
         "npm-package-arg": "^12.0.2",
-        "npm-pick-manifest": "^10.0.0",
+        "npm-pick-manifest": "^11.0.1",
         "npm-profile": "^12.0.0",
         "npm-registry-fetch": "^19.0.0",
         "npm-user-validate": "^3.0.0",
@@ -3438,6 +3438,21 @@
         "node": "^18.17.0 || >=20.5.0"
       }
     },
+    "node_modules/@npmcli/git/node_modules/npm-pick-manifest": {
+      "version": "10.0.0",
+      "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-10.0.0.tgz",
+      "integrity": "sha512-r4fFa4FqYY8xaM7fHecQ9Z2nE9hgNfJR+EmoKv0+chvzWkBcORX3r0FpTByP+CbOVJDladMXnPQGVN8PBLGuTQ==",
+      "license": "ISC",
+      "dependencies": {
+        "npm-install-checks": "^7.1.0",
+        "npm-normalize-package-bin": "^4.0.0",
+        "npm-package-arg": "^12.0.0",
+        "semver": "^7.3.5"
+      },
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
     "node_modules/@npmcli/installed-package-contents": {
       "version": "3.0.0",
       "resolved": "https://registry.npmjs.org/@npmcli/installed-package-contents/-/installed-package-contents-3.0.0.tgz",
@@ -3703,38 +3718,6 @@
         "url": "https://github.com/sponsors/isaacs"
       }
     },
-    "node_modules/@npmcli/package-json/node_modules/npm-package-arg": {
-      "version": "13.0.0",
-      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-13.0.0.tgz",
-      "integrity": "sha512-+t2etZAGcB7TbbLHfDwooV9ppB2LhhcT6A+L9cahsf9mEUAoQ6CktLEVvEnpD0N5CkX7zJqnPGaFtoQDy9EkHQ==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "hosted-git-info": "^9.0.0",
-        "proc-log": "^5.0.0",
-        "semver": "^7.3.5",
-        "validate-npm-package-name": "^6.0.0"
-      },
-      "engines": {
-        "node": "^20.17.0 || >=22.9.0"
-      }
-    },
-    "node_modules/@npmcli/package-json/node_modules/npm-pick-manifest": {
-      "version": "11.0.1",
-      "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-11.0.1.tgz",
-      "integrity": "sha512-HnU7FYSWbo7dTVHtK0G+BXbZ0aIfxz/aUCVLN0979Ec6rGUX5cJ6RbgVx5fqb5G31ufz+BVFA7y1SkRTPVNoVQ==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "npm-install-checks": "^7.1.0",
-        "npm-normalize-package-bin": "^4.0.0",
-        "npm-package-arg": "^13.0.0",
-        "semver": "^7.3.5"
-      },
-      "engines": {
-        "node": "^20.17.0 || >=22.9.0"
-      }
-    },
     "node_modules/@npmcli/package-json/node_modules/path-scurry": {
       "version": "2.0.0",
       "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-2.0.0.tgz",
@@ -12891,19 +12874,35 @@
       }
     },
     "node_modules/npm-pick-manifest": {
-      "version": "10.0.0",
-      "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-10.0.0.tgz",
-      "integrity": "sha512-r4fFa4FqYY8xaM7fHecQ9Z2nE9hgNfJR+EmoKv0+chvzWkBcORX3r0FpTByP+CbOVJDladMXnPQGVN8PBLGuTQ==",
+      "version": "11.0.1",
+      "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-11.0.1.tgz",
+      "integrity": "sha512-HnU7FYSWbo7dTVHtK0G+BXbZ0aIfxz/aUCVLN0979Ec6rGUX5cJ6RbgVx5fqb5G31ufz+BVFA7y1SkRTPVNoVQ==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
         "npm-install-checks": "^7.1.0",
         "npm-normalize-package-bin": "^4.0.0",
-        "npm-package-arg": "^12.0.0",
+        "npm-package-arg": "^13.0.0",
         "semver": "^7.3.5"
       },
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/npm-pick-manifest/node_modules/npm-package-arg": {
+      "version": "13.0.0",
+      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-13.0.0.tgz",
+      "integrity": "sha512-+t2etZAGcB7TbbLHfDwooV9ppB2LhhcT6A+L9cahsf9mEUAoQ6CktLEVvEnpD0N5CkX7zJqnPGaFtoQDy9EkHQ==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "hosted-git-info": "^9.0.0",
+        "proc-log": "^5.0.0",
+        "semver": "^7.3.5",
+        "validate-npm-package-name": "^6.0.0"
+      },
+      "engines": {
+        "node": "^20.17.0 || >=22.9.0"
       }
     },
     "node_modules/npm-profile": {
@@ -13601,6 +13600,16 @@
         "node": ">=18"
       }
     },
+    "node_modules/pacote/node_modules/lru-cache": {
+      "version": "11.2.1",
+      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.1.tgz",
+      "integrity": "sha512-r8LA6i4LP4EeWOhqBaZZjDWwehd1xUJPCJd9Sv300H0ZmcUER4+JPh7bqqZeqs1o5pgtgvXm+d9UGrB5zZGDiQ==",
+      "inBundle": true,
+      "license": "ISC",
+      "engines": {
+        "node": "20 || >=22"
+      }
+    },
     "node_modules/pacote/node_modules/minizlib": {
       "version": "3.0.2",
       "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.2.tgz",
@@ -13646,22 +13655,6 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/pacote/node_modules/npm-pick-manifest": {
-      "version": "11.0.1",
-      "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-11.0.1.tgz",
-      "integrity": "sha512-HnU7FYSWbo7dTVHtK0G+BXbZ0aIfxz/aUCVLN0979Ec6rGUX5cJ6RbgVx5fqb5G31ufz+BVFA7y1SkRTPVNoVQ==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "npm-install-checks": "^7.1.0",
-        "npm-normalize-package-bin": "^4.0.0",
-        "npm-package-arg": "^13.0.0",
-        "semver": "^7.3.5"
-      },
-      "engines": {
-        "node": "^20.17.0 || >=22.9.0"
-      }
-    },
     "node_modules/pacote/node_modules/tar": {
       "version": "7.4.3",
       "resolved": "https://registry.npmjs.org/tar/-/tar-7.4.3.tgz",
@@ -19494,7 +19487,7 @@
         "nopt": "^8.0.0",
         "npm-install-checks": "^7.1.0",
         "npm-package-arg": "^12.0.0",
-        "npm-pick-manifest": "^10.0.0",
+        "npm-pick-manifest": "^11.0.1",
         "npm-registry-fetch": "^19.0.0",
         "pacote": "^21.0.2",
         "parse-conflict-json": "^4.0.0",
diff --git a/package.json b/package.json
index e33b015270a88..ed53c0852d12c 100644
--- a/package.json
+++ b/package.json
@@ -97,7 +97,7 @@
     "npm-audit-report": "^6.0.0",
     "npm-install-checks": "^7.1.1",
     "npm-package-arg": "^12.0.2",
-    "npm-pick-manifest": "^10.0.0",
+    "npm-pick-manifest": "^11.0.1",
     "npm-profile": "^12.0.0",
     "npm-registry-fetch": "^19.0.0",
     "npm-user-validate": "^3.0.0",
diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json
index 40a08b66a7ff1..142c62a65a5c3 100644
--- a/workspaces/arborist/package.json
+++ b/workspaces/arborist/package.json
@@ -24,7 +24,7 @@
     "nopt": "^8.0.0",
     "npm-install-checks": "^7.1.0",
     "npm-package-arg": "^12.0.0",
-    "npm-pick-manifest": "^10.0.0",
+    "npm-pick-manifest": "^11.0.1",
     "npm-registry-fetch": "^19.0.0",
     "pacote": "^21.0.2",
     "parse-conflict-json": "^4.0.0",

From bf6b6862731e03002cc6fa3b86b6f090df46b009 Mon Sep 17 00:00:00 2001
From: Gar 
Date: Wed, 17 Sep 2025 14:18:40 -0700
Subject: [PATCH 20/63] deps: npm-package-arg@13.0.0

---
 mock-registry/package.json                    |   2 +-
 node_modules/.gitignore                       |  12 +-
 .../git}/node_modules/hosted-git-info/LICENSE |   0
 .../hosted-git-info/lib/from-url.js           |   0
 .../node_modules/hosted-git-info/lib/hosts.js |   0
 .../node_modules/hosted-git-info/lib/index.js |   0
 .../hosted-git-info/lib/parse-url.js          |   0
 .../node_modules/hosted-git-info/package.json |   0
 .../git}/node_modules/npm-package-arg/LICENSE |   0
 .../node_modules/npm-package-arg/lib/npa.js   |   0
 .../node_modules/npm-package-arg/package.json |   6 +-
 node_modules/npm-package-arg/package.json     |   6 +-
 .../node_modules/npm-package-arg/LICENSE      |  15 -
 .../node_modules/npm-package-arg/lib/npa.js   | 481 ------------------
 .../node_modules/npm-package-arg/package.json |  61 ---
 .../node_modules/npm-package-arg/LICENSE      |  15 -
 .../node_modules/npm-package-arg/lib/npa.js   | 481 ------------------
 .../node_modules/npm-package-arg/package.json |  61 ---
 .../node_modules/npm-package-arg/LICENSE      |  15 -
 .../node_modules/npm-package-arg/lib/npa.js   | 481 ------------------
 .../node_modules/npm-package-arg/package.json |  61 ---
 package-lock.json                             | 323 +++++++-----
 package.json                                  |   2 +-
 workspaces/arborist/package.json              |   2 +-
 workspaces/libnpmaccess/package.json          |   2 +-
 workspaces/libnpmdiff/package.json            |   2 +-
 workspaces/libnpmexec/package.json            |   2 +-
 workspaces/libnpmpack/package.json            |   2 +-
 workspaces/libnpmpublish/package.json         |   2 +-
 29 files changed, 198 insertions(+), 1836 deletions(-)
 rename node_modules/{npm-package-arg => @npmcli/git}/node_modules/hosted-git-info/LICENSE (100%)
 rename node_modules/{npm-package-arg => @npmcli/git}/node_modules/hosted-git-info/lib/from-url.js (100%)
 rename node_modules/{npm-package-arg => @npmcli/git}/node_modules/hosted-git-info/lib/hosts.js (100%)
 rename node_modules/{npm-package-arg => @npmcli/git}/node_modules/hosted-git-info/lib/index.js (100%)
 rename node_modules/{npm-package-arg => @npmcli/git}/node_modules/hosted-git-info/lib/parse-url.js (100%)
 rename node_modules/{npm-package-arg => @npmcli/git}/node_modules/hosted-git-info/package.json (100%)
 rename node_modules/{init-package-json => @npmcli/git}/node_modules/npm-package-arg/LICENSE (100%)
 rename node_modules/{init-package-json => @npmcli/git}/node_modules/npm-package-arg/lib/npa.js (100%)
 rename node_modules/{init-package-json => @npmcli/git}/node_modules/npm-package-arg/package.json (94%)
 delete mode 100644 node_modules/npm-pick-manifest/node_modules/npm-package-arg/LICENSE
 delete mode 100644 node_modules/npm-pick-manifest/node_modules/npm-package-arg/lib/npa.js
 delete mode 100644 node_modules/npm-pick-manifest/node_modules/npm-package-arg/package.json
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/npm-package-arg/LICENSE
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/npm-package-arg/lib/npa.js
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/npm-package-arg/package.json
 delete mode 100644 node_modules/pacote/node_modules/npm-package-arg/LICENSE
 delete mode 100644 node_modules/pacote/node_modules/npm-package-arg/lib/npa.js
 delete mode 100644 node_modules/pacote/node_modules/npm-package-arg/package.json

diff --git a/mock-registry/package.json b/mock-registry/package.json
index 5e854daa47ff9..4db2bda9ee0dd 100644
--- a/mock-registry/package.json
+++ b/mock-registry/package.json
@@ -51,7 +51,7 @@
     "@npmcli/template-oss": "4.24.4",
     "json-stringify-safe": "^5.0.1",
     "nock": "^13.3.3",
-    "npm-package-arg": "^12.0.0",
+    "npm-package-arg": "^13.0.0",
     "pacote": "^21.0.2",
     "tap": "^16.3.8"
   }
diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 5fd17f3d4245b..d3ea3a40edd1a 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -23,6 +23,8 @@
 !/@npmcli/git
 !/@npmcli/git/node_modules/
 /@npmcli/git/node_modules/*
+!/@npmcli/git/node_modules/hosted-git-info
+!/@npmcli/git/node_modules/npm-package-arg
 !/@npmcli/git/node_modules/npm-pick-manifest
 !/@npmcli/installed-package-contents
 !/@npmcli/map-workspaces
@@ -123,9 +125,6 @@
 !/imurmurhash
 !/ini
 !/init-package-json
-!/init-package-json/node_modules/
-/init-package-json/node_modules/*
-!/init-package-json/node_modules/npm-package-arg
 !/ip-address
 !/ip-regex
 !/is-cidr
@@ -191,24 +190,17 @@
 !/npm-install-checks
 !/npm-normalize-package-bin
 !/npm-package-arg
-!/npm-package-arg/node_modules/
-/npm-package-arg/node_modules/*
-!/npm-package-arg/node_modules/hosted-git-info
 !/npm-packlist
 !/npm-packlist/node_modules/
 /npm-packlist/node_modules/*
 !/npm-packlist/node_modules/ignore-walk
 !/npm-packlist/node_modules/minimatch
 !/npm-pick-manifest
-!/npm-pick-manifest/node_modules/
-/npm-pick-manifest/node_modules/*
-!/npm-pick-manifest/node_modules/npm-package-arg
 !/npm-profile
 !/npm-registry-fetch
 !/npm-registry-fetch/node_modules/
 /npm-registry-fetch/node_modules/*
 !/npm-registry-fetch/node_modules/minizlib
-!/npm-registry-fetch/node_modules/npm-package-arg
 !/npm-user-validate
 !/p-map
 !/package-json-from-dist
diff --git a/node_modules/npm-package-arg/node_modules/hosted-git-info/LICENSE b/node_modules/@npmcli/git/node_modules/hosted-git-info/LICENSE
similarity index 100%
rename from node_modules/npm-package-arg/node_modules/hosted-git-info/LICENSE
rename to node_modules/@npmcli/git/node_modules/hosted-git-info/LICENSE
diff --git a/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/from-url.js b/node_modules/@npmcli/git/node_modules/hosted-git-info/lib/from-url.js
similarity index 100%
rename from node_modules/npm-package-arg/node_modules/hosted-git-info/lib/from-url.js
rename to node_modules/@npmcli/git/node_modules/hosted-git-info/lib/from-url.js
diff --git a/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/hosts.js b/node_modules/@npmcli/git/node_modules/hosted-git-info/lib/hosts.js
similarity index 100%
rename from node_modules/npm-package-arg/node_modules/hosted-git-info/lib/hosts.js
rename to node_modules/@npmcli/git/node_modules/hosted-git-info/lib/hosts.js
diff --git a/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/index.js b/node_modules/@npmcli/git/node_modules/hosted-git-info/lib/index.js
similarity index 100%
rename from node_modules/npm-package-arg/node_modules/hosted-git-info/lib/index.js
rename to node_modules/@npmcli/git/node_modules/hosted-git-info/lib/index.js
diff --git a/node_modules/npm-package-arg/node_modules/hosted-git-info/lib/parse-url.js b/node_modules/@npmcli/git/node_modules/hosted-git-info/lib/parse-url.js
similarity index 100%
rename from node_modules/npm-package-arg/node_modules/hosted-git-info/lib/parse-url.js
rename to node_modules/@npmcli/git/node_modules/hosted-git-info/lib/parse-url.js
diff --git a/node_modules/npm-package-arg/node_modules/hosted-git-info/package.json b/node_modules/@npmcli/git/node_modules/hosted-git-info/package.json
similarity index 100%
rename from node_modules/npm-package-arg/node_modules/hosted-git-info/package.json
rename to node_modules/@npmcli/git/node_modules/hosted-git-info/package.json
diff --git a/node_modules/init-package-json/node_modules/npm-package-arg/LICENSE b/node_modules/@npmcli/git/node_modules/npm-package-arg/LICENSE
similarity index 100%
rename from node_modules/init-package-json/node_modules/npm-package-arg/LICENSE
rename to node_modules/@npmcli/git/node_modules/npm-package-arg/LICENSE
diff --git a/node_modules/init-package-json/node_modules/npm-package-arg/lib/npa.js b/node_modules/@npmcli/git/node_modules/npm-package-arg/lib/npa.js
similarity index 100%
rename from node_modules/init-package-json/node_modules/npm-package-arg/lib/npa.js
rename to node_modules/@npmcli/git/node_modules/npm-package-arg/lib/npa.js
diff --git a/node_modules/init-package-json/node_modules/npm-package-arg/package.json b/node_modules/@npmcli/git/node_modules/npm-package-arg/package.json
similarity index 94%
rename from node_modules/init-package-json/node_modules/npm-package-arg/package.json
rename to node_modules/@npmcli/git/node_modules/npm-package-arg/package.json
index db6ce9074cfa2..58920fe240e5f 100644
--- a/node_modules/init-package-json/node_modules/npm-package-arg/package.json
+++ b/node_modules/@npmcli/git/node_modules/npm-package-arg/package.json
@@ -1,6 +1,6 @@
 {
   "name": "npm-package-arg",
-  "version": "13.0.0",
+  "version": "12.0.2",
   "description": "Parse the things that can be arguments to `npm install`",
   "main": "./lib/npa.js",
   "directories": {
@@ -11,7 +11,7 @@
     "lib/"
   ],
   "dependencies": {
-    "hosted-git-info": "^9.0.0",
+    "hosted-git-info": "^8.0.0",
     "proc-log": "^5.0.0",
     "semver": "^7.3.5",
     "validate-npm-package-name": "^6.0.0"
@@ -44,7 +44,7 @@
   },
   "homepage": "https://github.com/npm/npm-package-arg",
   "engines": {
-    "node": "^20.17.0 || >=22.9.0"
+    "node": "^18.17.0 || >=20.5.0"
   },
   "tap": {
     "branches": 97,
diff --git a/node_modules/npm-package-arg/package.json b/node_modules/npm-package-arg/package.json
index 58920fe240e5f..db6ce9074cfa2 100644
--- a/node_modules/npm-package-arg/package.json
+++ b/node_modules/npm-package-arg/package.json
@@ -1,6 +1,6 @@
 {
   "name": "npm-package-arg",
-  "version": "12.0.2",
+  "version": "13.0.0",
   "description": "Parse the things that can be arguments to `npm install`",
   "main": "./lib/npa.js",
   "directories": {
@@ -11,7 +11,7 @@
     "lib/"
   ],
   "dependencies": {
-    "hosted-git-info": "^8.0.0",
+    "hosted-git-info": "^9.0.0",
     "proc-log": "^5.0.0",
     "semver": "^7.3.5",
     "validate-npm-package-name": "^6.0.0"
@@ -44,7 +44,7 @@
   },
   "homepage": "https://github.com/npm/npm-package-arg",
   "engines": {
-    "node": "^18.17.0 || >=20.5.0"
+    "node": "^20.17.0 || >=22.9.0"
   },
   "tap": {
     "branches": 97,
diff --git a/node_modules/npm-pick-manifest/node_modules/npm-package-arg/LICENSE b/node_modules/npm-pick-manifest/node_modules/npm-package-arg/LICENSE
deleted file mode 100644
index 19cec97b18468..0000000000000
--- a/node_modules/npm-pick-manifest/node_modules/npm-package-arg/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/npm-pick-manifest/node_modules/npm-package-arg/lib/npa.js b/node_modules/npm-pick-manifest/node_modules/npm-package-arg/lib/npa.js
deleted file mode 100644
index d409b7f1becfc..0000000000000
--- a/node_modules/npm-pick-manifest/node_modules/npm-package-arg/lib/npa.js
+++ /dev/null
@@ -1,481 +0,0 @@
-'use strict'
-
-const isWindows = process.platform === 'win32'
-
-const { URL } = require('node:url')
-// We need to use path/win32 so that we get consistent results in tests, but this also means we need to manually convert backslashes to forward slashes when generating file: urls with paths.
-const path = isWindows ? require('node:path/win32') : require('node:path')
-const { homedir } = require('node:os')
-const HostedGit = require('hosted-git-info')
-const semver = require('semver')
-const validatePackageName = require('validate-npm-package-name')
-const { log } = require('proc-log')
-
-const hasSlashes = isWindows ? /\\|[/]/ : /[/]/
-const isURL = /^(?:git[+])?[a-z]+:/i
-const isGit = /^[^@]+@[^:.]+\.[^:]+:.+$/i
-const isFileType = /[.](?:tgz|tar.gz|tar)$/i
-const isPortNumber = /:[0-9]+(\/|$)/i
-const isWindowsFile = /^(?:[.]|~[/]|[/\\]|[a-zA-Z]:)/
-const isPosixFile = /^(?:[.]|~[/]|[/]|[a-zA-Z]:)/
-const defaultRegistry = 'https://registry.npmjs.org'
-
-function npa (arg, where) {
-  let name
-  let spec
-  if (typeof arg === 'object') {
-    if (arg instanceof Result && (!where || where === arg.where)) {
-      return arg
-    } else if (arg.name && arg.rawSpec) {
-      return npa.resolve(arg.name, arg.rawSpec, where || arg.where)
-    } else {
-      return npa(arg.raw, where || arg.where)
-    }
-  }
-  const nameEndsAt = arg.indexOf('@', 1) // Skip possible leading @
-  const namePart = nameEndsAt > 0 ? arg.slice(0, nameEndsAt) : arg
-  if (isURL.test(arg)) {
-    spec = arg
-  } else if (isGit.test(arg)) {
-    spec = `git+ssh://${arg}`
-  // eslint-disable-next-line max-len
-  } else if (!namePart.startsWith('@') && (hasSlashes.test(namePart) || isFileType.test(namePart))) {
-    spec = arg
-  } else if (nameEndsAt > 0) {
-    name = namePart
-    spec = arg.slice(nameEndsAt + 1) || '*'
-  } else {
-    const valid = validatePackageName(arg)
-    if (valid.validForOldPackages) {
-      name = arg
-      spec = '*'
-    } else {
-      spec = arg
-    }
-  }
-  return resolve(name, spec, where, arg)
-}
-
-function isFileSpec (spec) {
-  if (!spec) {
-    return false
-  }
-  if (spec.toLowerCase().startsWith('file:')) {
-    return true
-  }
-  if (isWindows) {
-    return isWindowsFile.test(spec)
-  }
-  // We never hit this in windows tests, obviously
-  /* istanbul ignore next */
-  return isPosixFile.test(spec)
-}
-
-function isAliasSpec (spec) {
-  if (!spec) {
-    return false
-  }
-  return spec.toLowerCase().startsWith('npm:')
-}
-
-function resolve (name, spec, where, arg) {
-  const res = new Result({
-    raw: arg,
-    name: name,
-    rawSpec: spec,
-    fromArgument: arg != null,
-  })
-
-  if (name) {
-    res.name = name
-  }
-
-  if (!where) {
-    where = process.cwd()
-  }
-
-  if (isFileSpec(spec)) {
-    return fromFile(res, where)
-  } else if (isAliasSpec(spec)) {
-    return fromAlias(res, where)
-  }
-
-  const hosted = HostedGit.fromUrl(spec, {
-    noGitPlus: true,
-    noCommittish: true,
-  })
-  if (hosted) {
-    return fromHostedGit(res, hosted)
-  } else if (spec && isURL.test(spec)) {
-    return fromURL(res)
-  } else if (spec && (hasSlashes.test(spec) || isFileType.test(spec))) {
-    return fromFile(res, where)
-  } else {
-    return fromRegistry(res)
-  }
-}
-
-function toPurl (arg, reg = defaultRegistry) {
-  const res = npa(arg)
-
-  if (res.type !== 'version') {
-    throw invalidPurlType(res.type, res.raw)
-  }
-
-  // URI-encode leading @ of scoped packages
-  let purl = 'pkg:npm/' + res.name.replace(/^@/, '%40') + '@' + res.rawSpec
-  if (reg !== defaultRegistry) {
-    purl += '?repository_url=' + reg
-  }
-
-  return purl
-}
-
-function invalidPackageName (name, valid, raw) {
-  // eslint-disable-next-line max-len
-  const err = new Error(`Invalid package name "${name}" of package "${raw}": ${valid.errors.join('; ')}.`)
-  err.code = 'EINVALIDPACKAGENAME'
-  return err
-}
-
-function invalidTagName (name, raw) {
-  // eslint-disable-next-line max-len
-  const err = new Error(`Invalid tag name "${name}" of package "${raw}": Tags may not have any characters that encodeURIComponent encodes.`)
-  err.code = 'EINVALIDTAGNAME'
-  return err
-}
-
-function invalidPurlType (type, raw) {
-  // eslint-disable-next-line max-len
-  const err = new Error(`Invalid type "${type}" of package "${raw}": Purl can only be generated for "version" types.`)
-  err.code = 'EINVALIDPURLTYPE'
-  return err
-}
-
-class Result {
-  constructor (opts) {
-    this.type = opts.type
-    this.registry = opts.registry
-    this.where = opts.where
-    if (opts.raw == null) {
-      this.raw = opts.name ? `${opts.name}@${opts.rawSpec}` : opts.rawSpec
-    } else {
-      this.raw = opts.raw
-    }
-    this.name = undefined
-    this.escapedName = undefined
-    this.scope = undefined
-    this.rawSpec = opts.rawSpec || ''
-    this.saveSpec = opts.saveSpec
-    this.fetchSpec = opts.fetchSpec
-    if (opts.name) {
-      this.setName(opts.name)
-    }
-    this.gitRange = opts.gitRange
-    this.gitCommittish = opts.gitCommittish
-    this.gitSubdir = opts.gitSubdir
-    this.hosted = opts.hosted
-  }
-
-  // TODO move this to a getter/setter in a semver major
-  setName (name) {
-    const valid = validatePackageName(name)
-    if (!valid.validForOldPackages) {
-      throw invalidPackageName(name, valid, this.raw)
-    }
-
-    this.name = name
-    this.scope = name[0] === '@' ? name.slice(0, name.indexOf('/')) : undefined
-    // scoped packages in couch must have slash url-encoded, e.g. @foo%2Fbar
-    this.escapedName = name.replace('/', '%2f')
-    return this
-  }
-
-  toString () {
-    const full = []
-    if (this.name != null && this.name !== '') {
-      full.push(this.name)
-    }
-    const spec = this.saveSpec || this.fetchSpec || this.rawSpec
-    if (spec != null && spec !== '') {
-      full.push(spec)
-    }
-    return full.length ? full.join('@') : this.raw
-  }
-
-  toJSON () {
-    const result = Object.assign({}, this)
-    delete result.hosted
-    return result
-  }
-}
-
-// sets res.gitCommittish, res.gitRange, and res.gitSubdir
-function setGitAttrs (res, committish) {
-  if (!committish) {
-    res.gitCommittish = null
-    return
-  }
-
-  // for each :: separated item:
-  for (const part of committish.split('::')) {
-    // if the item has no : the n it is a commit-ish
-    if (!part.includes(':')) {
-      if (res.gitRange) {
-        throw new Error('cannot override existing semver range with a committish')
-      }
-      if (res.gitCommittish) {
-        throw new Error('cannot override existing committish with a second committish')
-      }
-      res.gitCommittish = part
-      continue
-    }
-    // split on name:value
-    const [name, value] = part.split(':')
-    // if name is semver do semver lookup of ref or tag
-    if (name === 'semver') {
-      if (res.gitCommittish) {
-        throw new Error('cannot override existing committish with a semver range')
-      }
-      if (res.gitRange) {
-        throw new Error('cannot override existing semver range with a second semver range')
-      }
-      res.gitRange = decodeURIComponent(value)
-      continue
-    }
-    if (name === 'path') {
-      if (res.gitSubdir) {
-        throw new Error('cannot override existing path with a second path')
-      }
-      res.gitSubdir = `/${value}`
-      continue
-    }
-    log.warn('npm-package-arg', `ignoring unknown key "${name}"`)
-  }
-}
-
-// Taken from: EncodePathChars and lookup_table in src/node_url.cc
-// url.pathToFileURL only returns absolute references.  We can't use it to encode paths.
-// encodeURI mangles windows paths. We can't use it to encode paths.
-// Under the hood, url.pathToFileURL does a limited set of encoding, with an extra windows step, and then calls path.resolve.
-// The encoding node does without path.resolve is not available outside of the source, so we are recreating it here.
-const encodedPathChars = new Map([
-  ['\0', '%00'],
-  ['\t', '%09'],
-  ['\n', '%0A'],
-  ['\r', '%0D'],
-  [' ', '%20'],
-  ['"', '%22'],
-  ['#', '%23'],
-  ['%', '%25'],
-  ['?', '%3F'],
-  ['[', '%5B'],
-  ['\\', isWindows ? '/' : '%5C'],
-  [']', '%5D'],
-  ['^', '%5E'],
-  ['|', '%7C'],
-  ['~', '%7E'],
-])
-
-function pathToFileURL (str) {
-  let result = ''
-  for (let i = 0; i < str.length; i++) {
-    result = `${result}${encodedPathChars.get(str[i]) ?? str[i]}`
-  }
-  if (result.startsWith('file:')) {
-    return result
-  }
-  return `file:${result}`
-}
-
-function fromFile (res, where) {
-  res.type = isFileType.test(res.rawSpec) ? 'file' : 'directory'
-  res.where = where
-
-  let rawSpec = pathToFileURL(res.rawSpec)
-
-  if (rawSpec.startsWith('file:/')) {
-    // XXX backwards compatibility lack of compliance with RFC 8089
-
-    // turn file://path into file:/path
-    if (/^file:\/\/[^/]/.test(rawSpec)) {
-      rawSpec = `file:/${rawSpec.slice(5)}`
-    }
-
-    // turn file:/../path into file:../path
-    // for 1 or 3 leading slashes (2 is already ruled out from handling file:// explicitly above)
-    if (/^\/{1,3}\.\.?(\/|$)/.test(rawSpec.slice(5))) {
-      rawSpec = rawSpec.replace(/^file:\/{1,3}/, 'file:')
-    }
-  }
-
-  let resolvedUrl
-  let specUrl
-  try {
-    // always put the '/' on "where", or else file:foo from /path/to/bar goes to /path/to/foo, when we want it to be /path/to/bar/foo
-    resolvedUrl = new URL(rawSpec, `${pathToFileURL(path.resolve(where))}/`)
-    specUrl = new URL(rawSpec)
-  } catch (originalError) {
-    const er = new Error('Invalid file: URL, must comply with RFC 8089')
-    throw Object.assign(er, {
-      raw: res.rawSpec,
-      spec: res,
-      where,
-      originalError,
-    })
-  }
-
-  // turn /C:/blah into just C:/blah on windows
-  let specPath = decodeURIComponent(specUrl.pathname)
-  let resolvedPath = decodeURIComponent(resolvedUrl.pathname)
-  if (isWindows) {
-    specPath = specPath.replace(/^\/+([a-z]:\/)/i, '$1')
-    resolvedPath = resolvedPath.replace(/^\/+([a-z]:\/)/i, '$1')
-  }
-
-  // replace ~ with homedir, but keep the ~ in the saveSpec
-  // otherwise, make it relative to where param
-  if (/^\/~(\/|$)/.test(specPath)) {
-    res.saveSpec = `file:${specPath.substr(1)}`
-    resolvedPath = path.resolve(homedir(), specPath.substr(3))
-  } else if (!path.isAbsolute(rawSpec.slice(5))) {
-    res.saveSpec = `file:${path.relative(where, resolvedPath)}`
-  } else {
-    res.saveSpec = `file:${path.resolve(resolvedPath)}`
-  }
-
-  res.fetchSpec = path.resolve(where, resolvedPath)
-  // re-normalize the slashes in saveSpec due to node:path/win32 behavior in windows
-  res.saveSpec = res.saveSpec.split('\\').join('/')
-  // Ignoring because this only happens in windows
-  /* istanbul ignore next */
-  if (res.saveSpec.startsWith('file://')) {
-    // normalization of \\win32\root paths can cause a double / which we don't want
-    res.saveSpec = `file:/${res.saveSpec.slice(7)}`
-  }
-  return res
-}
-
-function fromHostedGit (res, hosted) {
-  res.type = 'git'
-  res.hosted = hosted
-  res.saveSpec = hosted.toString({ noGitPlus: false, noCommittish: false })
-  res.fetchSpec = hosted.getDefaultRepresentation() === 'shortcut' ? null : hosted.toString()
-  setGitAttrs(res, hosted.committish)
-  return res
-}
-
-function unsupportedURLType (protocol, spec) {
-  const err = new Error(`Unsupported URL Type "${protocol}": ${spec}`)
-  err.code = 'EUNSUPPORTEDPROTOCOL'
-  return err
-}
-
-function fromURL (res) {
-  let rawSpec = res.rawSpec
-  res.saveSpec = rawSpec
-  if (rawSpec.startsWith('git+ssh:')) {
-    // git ssh specifiers are overloaded to also use scp-style git
-    // specifiers, so we have to parse those out and treat them special.
-    // They are NOT true URIs, so we can't hand them to URL.
-
-    // This regex looks for things that look like:
-    // git+ssh://git@my.custom.git.com:username/project.git#deadbeef
-    // ...and various combinations. The username in the beginning is *required*.
-    const matched = rawSpec.match(/^git\+ssh:\/\/([^:#]+:[^#]+(?:\.git)?)(?:#(.*))?$/i)
-    // Filter out all-number "usernames" which are really port numbers
-    // They can either be :1234 :1234/ or :1234/path but not :12abc
-    if (matched && !matched[1].match(isPortNumber)) {
-      res.type = 'git'
-      setGitAttrs(res, matched[2])
-      res.fetchSpec = matched[1]
-      return res
-    }
-  } else if (rawSpec.startsWith('git+file://')) {
-    // URL can't handle windows paths
-    rawSpec = rawSpec.replace(/\\/g, '/')
-  }
-  const parsedUrl = new URL(rawSpec)
-  // check the protocol, and then see if it's git or not
-  switch (parsedUrl.protocol) {
-    case 'git:':
-    case 'git+http:':
-    case 'git+https:':
-    case 'git+rsync:':
-    case 'git+ftp:':
-    case 'git+file:':
-    case 'git+ssh:':
-      res.type = 'git'
-      setGitAttrs(res, parsedUrl.hash.slice(1))
-      if (parsedUrl.protocol === 'git+file:' && /^git\+file:\/\/[a-z]:/i.test(rawSpec)) {
-        // URL can't handle drive letters on windows file paths, the host can't contain a :
-        res.fetchSpec = `git+file://${parsedUrl.host.toLowerCase()}:${parsedUrl.pathname}`
-      } else {
-        parsedUrl.hash = ''
-        res.fetchSpec = parsedUrl.toString()
-      }
-      if (res.fetchSpec.startsWith('git+')) {
-        res.fetchSpec = res.fetchSpec.slice(4)
-      }
-      break
-    case 'http:':
-    case 'https:':
-      res.type = 'remote'
-      res.fetchSpec = res.saveSpec
-      break
-
-    default:
-      throw unsupportedURLType(parsedUrl.protocol, rawSpec)
-  }
-
-  return res
-}
-
-function fromAlias (res, where) {
-  const subSpec = npa(res.rawSpec.substr(4), where)
-  if (subSpec.type === 'alias') {
-    throw new Error('nested aliases not supported')
-  }
-
-  if (!subSpec.registry) {
-    throw new Error('aliases only work for registry deps')
-  }
-
-  if (!subSpec.name) {
-    throw new Error('aliases must have a name')
-  }
-
-  res.subSpec = subSpec
-  res.registry = true
-  res.type = 'alias'
-  res.saveSpec = null
-  res.fetchSpec = null
-  return res
-}
-
-function fromRegistry (res) {
-  res.registry = true
-  const spec = res.rawSpec.trim()
-  // no save spec for registry components as we save based on the fetched
-  // version, not on the argument so this can't compute that.
-  res.saveSpec = null
-  res.fetchSpec = spec
-  const version = semver.valid(spec, true)
-  const range = semver.validRange(spec, true)
-  if (version) {
-    res.type = 'version'
-  } else if (range) {
-    res.type = 'range'
-  } else {
-    if (encodeURIComponent(spec) !== spec) {
-      throw invalidTagName(spec, res.raw)
-    }
-    res.type = 'tag'
-  }
-  return res
-}
-
-module.exports = npa
-module.exports.resolve = resolve
-module.exports.toPurl = toPurl
-module.exports.Result = Result
diff --git a/node_modules/npm-pick-manifest/node_modules/npm-package-arg/package.json b/node_modules/npm-pick-manifest/node_modules/npm-package-arg/package.json
deleted file mode 100644
index db6ce9074cfa2..0000000000000
--- a/node_modules/npm-pick-manifest/node_modules/npm-package-arg/package.json
+++ /dev/null
@@ -1,61 +0,0 @@
-{
-  "name": "npm-package-arg",
-  "version": "13.0.0",
-  "description": "Parse the things that can be arguments to `npm install`",
-  "main": "./lib/npa.js",
-  "directories": {
-    "test": "test"
-  },
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "dependencies": {
-    "hosted-git-info": "^9.0.0",
-    "proc-log": "^5.0.0",
-    "semver": "^7.3.5",
-    "validate-npm-package-name": "^6.0.0"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.23.5",
-    "tap": "^16.0.1"
-  },
-  "scripts": {
-    "test": "tap",
-    "snap": "tap",
-    "npmclilint": "npmcli-lint",
-    "lint": "npm run eslint",
-    "lintfix": "npm run eslint -- --fix",
-    "posttest": "npm run lint",
-    "postsnap": "npm run lintfix --",
-    "postlint": "template-oss-check",
-    "template-oss-apply": "template-oss-apply --force",
-    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/npm/npm-package-arg.git"
-  },
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "bugs": {
-    "url": "https://github.com/npm/npm-package-arg/issues"
-  },
-  "homepage": "https://github.com/npm/npm-package-arg",
-  "engines": {
-    "node": "^20.17.0 || >=22.9.0"
-  },
-  "tap": {
-    "branches": 97,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.23.5",
-    "publish": true
-  }
-}
diff --git a/node_modules/npm-registry-fetch/node_modules/npm-package-arg/LICENSE b/node_modules/npm-registry-fetch/node_modules/npm-package-arg/LICENSE
deleted file mode 100644
index 19cec97b18468..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/npm-package-arg/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/npm-registry-fetch/node_modules/npm-package-arg/lib/npa.js b/node_modules/npm-registry-fetch/node_modules/npm-package-arg/lib/npa.js
deleted file mode 100644
index d409b7f1becfc..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/npm-package-arg/lib/npa.js
+++ /dev/null
@@ -1,481 +0,0 @@
-'use strict'
-
-const isWindows = process.platform === 'win32'
-
-const { URL } = require('node:url')
-// We need to use path/win32 so that we get consistent results in tests, but this also means we need to manually convert backslashes to forward slashes when generating file: urls with paths.
-const path = isWindows ? require('node:path/win32') : require('node:path')
-const { homedir } = require('node:os')
-const HostedGit = require('hosted-git-info')
-const semver = require('semver')
-const validatePackageName = require('validate-npm-package-name')
-const { log } = require('proc-log')
-
-const hasSlashes = isWindows ? /\\|[/]/ : /[/]/
-const isURL = /^(?:git[+])?[a-z]+:/i
-const isGit = /^[^@]+@[^:.]+\.[^:]+:.+$/i
-const isFileType = /[.](?:tgz|tar.gz|tar)$/i
-const isPortNumber = /:[0-9]+(\/|$)/i
-const isWindowsFile = /^(?:[.]|~[/]|[/\\]|[a-zA-Z]:)/
-const isPosixFile = /^(?:[.]|~[/]|[/]|[a-zA-Z]:)/
-const defaultRegistry = 'https://registry.npmjs.org'
-
-function npa (arg, where) {
-  let name
-  let spec
-  if (typeof arg === 'object') {
-    if (arg instanceof Result && (!where || where === arg.where)) {
-      return arg
-    } else if (arg.name && arg.rawSpec) {
-      return npa.resolve(arg.name, arg.rawSpec, where || arg.where)
-    } else {
-      return npa(arg.raw, where || arg.where)
-    }
-  }
-  const nameEndsAt = arg.indexOf('@', 1) // Skip possible leading @
-  const namePart = nameEndsAt > 0 ? arg.slice(0, nameEndsAt) : arg
-  if (isURL.test(arg)) {
-    spec = arg
-  } else if (isGit.test(arg)) {
-    spec = `git+ssh://${arg}`
-  // eslint-disable-next-line max-len
-  } else if (!namePart.startsWith('@') && (hasSlashes.test(namePart) || isFileType.test(namePart))) {
-    spec = arg
-  } else if (nameEndsAt > 0) {
-    name = namePart
-    spec = arg.slice(nameEndsAt + 1) || '*'
-  } else {
-    const valid = validatePackageName(arg)
-    if (valid.validForOldPackages) {
-      name = arg
-      spec = '*'
-    } else {
-      spec = arg
-    }
-  }
-  return resolve(name, spec, where, arg)
-}
-
-function isFileSpec (spec) {
-  if (!spec) {
-    return false
-  }
-  if (spec.toLowerCase().startsWith('file:')) {
-    return true
-  }
-  if (isWindows) {
-    return isWindowsFile.test(spec)
-  }
-  // We never hit this in windows tests, obviously
-  /* istanbul ignore next */
-  return isPosixFile.test(spec)
-}
-
-function isAliasSpec (spec) {
-  if (!spec) {
-    return false
-  }
-  return spec.toLowerCase().startsWith('npm:')
-}
-
-function resolve (name, spec, where, arg) {
-  const res = new Result({
-    raw: arg,
-    name: name,
-    rawSpec: spec,
-    fromArgument: arg != null,
-  })
-
-  if (name) {
-    res.name = name
-  }
-
-  if (!where) {
-    where = process.cwd()
-  }
-
-  if (isFileSpec(spec)) {
-    return fromFile(res, where)
-  } else if (isAliasSpec(spec)) {
-    return fromAlias(res, where)
-  }
-
-  const hosted = HostedGit.fromUrl(spec, {
-    noGitPlus: true,
-    noCommittish: true,
-  })
-  if (hosted) {
-    return fromHostedGit(res, hosted)
-  } else if (spec && isURL.test(spec)) {
-    return fromURL(res)
-  } else if (spec && (hasSlashes.test(spec) || isFileType.test(spec))) {
-    return fromFile(res, where)
-  } else {
-    return fromRegistry(res)
-  }
-}
-
-function toPurl (arg, reg = defaultRegistry) {
-  const res = npa(arg)
-
-  if (res.type !== 'version') {
-    throw invalidPurlType(res.type, res.raw)
-  }
-
-  // URI-encode leading @ of scoped packages
-  let purl = 'pkg:npm/' + res.name.replace(/^@/, '%40') + '@' + res.rawSpec
-  if (reg !== defaultRegistry) {
-    purl += '?repository_url=' + reg
-  }
-
-  return purl
-}
-
-function invalidPackageName (name, valid, raw) {
-  // eslint-disable-next-line max-len
-  const err = new Error(`Invalid package name "${name}" of package "${raw}": ${valid.errors.join('; ')}.`)
-  err.code = 'EINVALIDPACKAGENAME'
-  return err
-}
-
-function invalidTagName (name, raw) {
-  // eslint-disable-next-line max-len
-  const err = new Error(`Invalid tag name "${name}" of package "${raw}": Tags may not have any characters that encodeURIComponent encodes.`)
-  err.code = 'EINVALIDTAGNAME'
-  return err
-}
-
-function invalidPurlType (type, raw) {
-  // eslint-disable-next-line max-len
-  const err = new Error(`Invalid type "${type}" of package "${raw}": Purl can only be generated for "version" types.`)
-  err.code = 'EINVALIDPURLTYPE'
-  return err
-}
-
-class Result {
-  constructor (opts) {
-    this.type = opts.type
-    this.registry = opts.registry
-    this.where = opts.where
-    if (opts.raw == null) {
-      this.raw = opts.name ? `${opts.name}@${opts.rawSpec}` : opts.rawSpec
-    } else {
-      this.raw = opts.raw
-    }
-    this.name = undefined
-    this.escapedName = undefined
-    this.scope = undefined
-    this.rawSpec = opts.rawSpec || ''
-    this.saveSpec = opts.saveSpec
-    this.fetchSpec = opts.fetchSpec
-    if (opts.name) {
-      this.setName(opts.name)
-    }
-    this.gitRange = opts.gitRange
-    this.gitCommittish = opts.gitCommittish
-    this.gitSubdir = opts.gitSubdir
-    this.hosted = opts.hosted
-  }
-
-  // TODO move this to a getter/setter in a semver major
-  setName (name) {
-    const valid = validatePackageName(name)
-    if (!valid.validForOldPackages) {
-      throw invalidPackageName(name, valid, this.raw)
-    }
-
-    this.name = name
-    this.scope = name[0] === '@' ? name.slice(0, name.indexOf('/')) : undefined
-    // scoped packages in couch must have slash url-encoded, e.g. @foo%2Fbar
-    this.escapedName = name.replace('/', '%2f')
-    return this
-  }
-
-  toString () {
-    const full = []
-    if (this.name != null && this.name !== '') {
-      full.push(this.name)
-    }
-    const spec = this.saveSpec || this.fetchSpec || this.rawSpec
-    if (spec != null && spec !== '') {
-      full.push(spec)
-    }
-    return full.length ? full.join('@') : this.raw
-  }
-
-  toJSON () {
-    const result = Object.assign({}, this)
-    delete result.hosted
-    return result
-  }
-}
-
-// sets res.gitCommittish, res.gitRange, and res.gitSubdir
-function setGitAttrs (res, committish) {
-  if (!committish) {
-    res.gitCommittish = null
-    return
-  }
-
-  // for each :: separated item:
-  for (const part of committish.split('::')) {
-    // if the item has no : the n it is a commit-ish
-    if (!part.includes(':')) {
-      if (res.gitRange) {
-        throw new Error('cannot override existing semver range with a committish')
-      }
-      if (res.gitCommittish) {
-        throw new Error('cannot override existing committish with a second committish')
-      }
-      res.gitCommittish = part
-      continue
-    }
-    // split on name:value
-    const [name, value] = part.split(':')
-    // if name is semver do semver lookup of ref or tag
-    if (name === 'semver') {
-      if (res.gitCommittish) {
-        throw new Error('cannot override existing committish with a semver range')
-      }
-      if (res.gitRange) {
-        throw new Error('cannot override existing semver range with a second semver range')
-      }
-      res.gitRange = decodeURIComponent(value)
-      continue
-    }
-    if (name === 'path') {
-      if (res.gitSubdir) {
-        throw new Error('cannot override existing path with a second path')
-      }
-      res.gitSubdir = `/${value}`
-      continue
-    }
-    log.warn('npm-package-arg', `ignoring unknown key "${name}"`)
-  }
-}
-
-// Taken from: EncodePathChars and lookup_table in src/node_url.cc
-// url.pathToFileURL only returns absolute references.  We can't use it to encode paths.
-// encodeURI mangles windows paths. We can't use it to encode paths.
-// Under the hood, url.pathToFileURL does a limited set of encoding, with an extra windows step, and then calls path.resolve.
-// The encoding node does without path.resolve is not available outside of the source, so we are recreating it here.
-const encodedPathChars = new Map([
-  ['\0', '%00'],
-  ['\t', '%09'],
-  ['\n', '%0A'],
-  ['\r', '%0D'],
-  [' ', '%20'],
-  ['"', '%22'],
-  ['#', '%23'],
-  ['%', '%25'],
-  ['?', '%3F'],
-  ['[', '%5B'],
-  ['\\', isWindows ? '/' : '%5C'],
-  [']', '%5D'],
-  ['^', '%5E'],
-  ['|', '%7C'],
-  ['~', '%7E'],
-])
-
-function pathToFileURL (str) {
-  let result = ''
-  for (let i = 0; i < str.length; i++) {
-    result = `${result}${encodedPathChars.get(str[i]) ?? str[i]}`
-  }
-  if (result.startsWith('file:')) {
-    return result
-  }
-  return `file:${result}`
-}
-
-function fromFile (res, where) {
-  res.type = isFileType.test(res.rawSpec) ? 'file' : 'directory'
-  res.where = where
-
-  let rawSpec = pathToFileURL(res.rawSpec)
-
-  if (rawSpec.startsWith('file:/')) {
-    // XXX backwards compatibility lack of compliance with RFC 8089
-
-    // turn file://path into file:/path
-    if (/^file:\/\/[^/]/.test(rawSpec)) {
-      rawSpec = `file:/${rawSpec.slice(5)}`
-    }
-
-    // turn file:/../path into file:../path
-    // for 1 or 3 leading slashes (2 is already ruled out from handling file:// explicitly above)
-    if (/^\/{1,3}\.\.?(\/|$)/.test(rawSpec.slice(5))) {
-      rawSpec = rawSpec.replace(/^file:\/{1,3}/, 'file:')
-    }
-  }
-
-  let resolvedUrl
-  let specUrl
-  try {
-    // always put the '/' on "where", or else file:foo from /path/to/bar goes to /path/to/foo, when we want it to be /path/to/bar/foo
-    resolvedUrl = new URL(rawSpec, `${pathToFileURL(path.resolve(where))}/`)
-    specUrl = new URL(rawSpec)
-  } catch (originalError) {
-    const er = new Error('Invalid file: URL, must comply with RFC 8089')
-    throw Object.assign(er, {
-      raw: res.rawSpec,
-      spec: res,
-      where,
-      originalError,
-    })
-  }
-
-  // turn /C:/blah into just C:/blah on windows
-  let specPath = decodeURIComponent(specUrl.pathname)
-  let resolvedPath = decodeURIComponent(resolvedUrl.pathname)
-  if (isWindows) {
-    specPath = specPath.replace(/^\/+([a-z]:\/)/i, '$1')
-    resolvedPath = resolvedPath.replace(/^\/+([a-z]:\/)/i, '$1')
-  }
-
-  // replace ~ with homedir, but keep the ~ in the saveSpec
-  // otherwise, make it relative to where param
-  if (/^\/~(\/|$)/.test(specPath)) {
-    res.saveSpec = `file:${specPath.substr(1)}`
-    resolvedPath = path.resolve(homedir(), specPath.substr(3))
-  } else if (!path.isAbsolute(rawSpec.slice(5))) {
-    res.saveSpec = `file:${path.relative(where, resolvedPath)}`
-  } else {
-    res.saveSpec = `file:${path.resolve(resolvedPath)}`
-  }
-
-  res.fetchSpec = path.resolve(where, resolvedPath)
-  // re-normalize the slashes in saveSpec due to node:path/win32 behavior in windows
-  res.saveSpec = res.saveSpec.split('\\').join('/')
-  // Ignoring because this only happens in windows
-  /* istanbul ignore next */
-  if (res.saveSpec.startsWith('file://')) {
-    // normalization of \\win32\root paths can cause a double / which we don't want
-    res.saveSpec = `file:/${res.saveSpec.slice(7)}`
-  }
-  return res
-}
-
-function fromHostedGit (res, hosted) {
-  res.type = 'git'
-  res.hosted = hosted
-  res.saveSpec = hosted.toString({ noGitPlus: false, noCommittish: false })
-  res.fetchSpec = hosted.getDefaultRepresentation() === 'shortcut' ? null : hosted.toString()
-  setGitAttrs(res, hosted.committish)
-  return res
-}
-
-function unsupportedURLType (protocol, spec) {
-  const err = new Error(`Unsupported URL Type "${protocol}": ${spec}`)
-  err.code = 'EUNSUPPORTEDPROTOCOL'
-  return err
-}
-
-function fromURL (res) {
-  let rawSpec = res.rawSpec
-  res.saveSpec = rawSpec
-  if (rawSpec.startsWith('git+ssh:')) {
-    // git ssh specifiers are overloaded to also use scp-style git
-    // specifiers, so we have to parse those out and treat them special.
-    // They are NOT true URIs, so we can't hand them to URL.
-
-    // This regex looks for things that look like:
-    // git+ssh://git@my.custom.git.com:username/project.git#deadbeef
-    // ...and various combinations. The username in the beginning is *required*.
-    const matched = rawSpec.match(/^git\+ssh:\/\/([^:#]+:[^#]+(?:\.git)?)(?:#(.*))?$/i)
-    // Filter out all-number "usernames" which are really port numbers
-    // They can either be :1234 :1234/ or :1234/path but not :12abc
-    if (matched && !matched[1].match(isPortNumber)) {
-      res.type = 'git'
-      setGitAttrs(res, matched[2])
-      res.fetchSpec = matched[1]
-      return res
-    }
-  } else if (rawSpec.startsWith('git+file://')) {
-    // URL can't handle windows paths
-    rawSpec = rawSpec.replace(/\\/g, '/')
-  }
-  const parsedUrl = new URL(rawSpec)
-  // check the protocol, and then see if it's git or not
-  switch (parsedUrl.protocol) {
-    case 'git:':
-    case 'git+http:':
-    case 'git+https:':
-    case 'git+rsync:':
-    case 'git+ftp:':
-    case 'git+file:':
-    case 'git+ssh:':
-      res.type = 'git'
-      setGitAttrs(res, parsedUrl.hash.slice(1))
-      if (parsedUrl.protocol === 'git+file:' && /^git\+file:\/\/[a-z]:/i.test(rawSpec)) {
-        // URL can't handle drive letters on windows file paths, the host can't contain a :
-        res.fetchSpec = `git+file://${parsedUrl.host.toLowerCase()}:${parsedUrl.pathname}`
-      } else {
-        parsedUrl.hash = ''
-        res.fetchSpec = parsedUrl.toString()
-      }
-      if (res.fetchSpec.startsWith('git+')) {
-        res.fetchSpec = res.fetchSpec.slice(4)
-      }
-      break
-    case 'http:':
-    case 'https:':
-      res.type = 'remote'
-      res.fetchSpec = res.saveSpec
-      break
-
-    default:
-      throw unsupportedURLType(parsedUrl.protocol, rawSpec)
-  }
-
-  return res
-}
-
-function fromAlias (res, where) {
-  const subSpec = npa(res.rawSpec.substr(4), where)
-  if (subSpec.type === 'alias') {
-    throw new Error('nested aliases not supported')
-  }
-
-  if (!subSpec.registry) {
-    throw new Error('aliases only work for registry deps')
-  }
-
-  if (!subSpec.name) {
-    throw new Error('aliases must have a name')
-  }
-
-  res.subSpec = subSpec
-  res.registry = true
-  res.type = 'alias'
-  res.saveSpec = null
-  res.fetchSpec = null
-  return res
-}
-
-function fromRegistry (res) {
-  res.registry = true
-  const spec = res.rawSpec.trim()
-  // no save spec for registry components as we save based on the fetched
-  // version, not on the argument so this can't compute that.
-  res.saveSpec = null
-  res.fetchSpec = spec
-  const version = semver.valid(spec, true)
-  const range = semver.validRange(spec, true)
-  if (version) {
-    res.type = 'version'
-  } else if (range) {
-    res.type = 'range'
-  } else {
-    if (encodeURIComponent(spec) !== spec) {
-      throw invalidTagName(spec, res.raw)
-    }
-    res.type = 'tag'
-  }
-  return res
-}
-
-module.exports = npa
-module.exports.resolve = resolve
-module.exports.toPurl = toPurl
-module.exports.Result = Result
diff --git a/node_modules/npm-registry-fetch/node_modules/npm-package-arg/package.json b/node_modules/npm-registry-fetch/node_modules/npm-package-arg/package.json
deleted file mode 100644
index db6ce9074cfa2..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/npm-package-arg/package.json
+++ /dev/null
@@ -1,61 +0,0 @@
-{
-  "name": "npm-package-arg",
-  "version": "13.0.0",
-  "description": "Parse the things that can be arguments to `npm install`",
-  "main": "./lib/npa.js",
-  "directories": {
-    "test": "test"
-  },
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "dependencies": {
-    "hosted-git-info": "^9.0.0",
-    "proc-log": "^5.0.0",
-    "semver": "^7.3.5",
-    "validate-npm-package-name": "^6.0.0"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.23.5",
-    "tap": "^16.0.1"
-  },
-  "scripts": {
-    "test": "tap",
-    "snap": "tap",
-    "npmclilint": "npmcli-lint",
-    "lint": "npm run eslint",
-    "lintfix": "npm run eslint -- --fix",
-    "posttest": "npm run lint",
-    "postsnap": "npm run lintfix --",
-    "postlint": "template-oss-check",
-    "template-oss-apply": "template-oss-apply --force",
-    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/npm/npm-package-arg.git"
-  },
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "bugs": {
-    "url": "https://github.com/npm/npm-package-arg/issues"
-  },
-  "homepage": "https://github.com/npm/npm-package-arg",
-  "engines": {
-    "node": "^20.17.0 || >=22.9.0"
-  },
-  "tap": {
-    "branches": 97,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.23.5",
-    "publish": true
-  }
-}
diff --git a/node_modules/pacote/node_modules/npm-package-arg/LICENSE b/node_modules/pacote/node_modules/npm-package-arg/LICENSE
deleted file mode 100644
index 19cec97b18468..0000000000000
--- a/node_modules/pacote/node_modules/npm-package-arg/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/npm-package-arg/lib/npa.js b/node_modules/pacote/node_modules/npm-package-arg/lib/npa.js
deleted file mode 100644
index d409b7f1becfc..0000000000000
--- a/node_modules/pacote/node_modules/npm-package-arg/lib/npa.js
+++ /dev/null
@@ -1,481 +0,0 @@
-'use strict'
-
-const isWindows = process.platform === 'win32'
-
-const { URL } = require('node:url')
-// We need to use path/win32 so that we get consistent results in tests, but this also means we need to manually convert backslashes to forward slashes when generating file: urls with paths.
-const path = isWindows ? require('node:path/win32') : require('node:path')
-const { homedir } = require('node:os')
-const HostedGit = require('hosted-git-info')
-const semver = require('semver')
-const validatePackageName = require('validate-npm-package-name')
-const { log } = require('proc-log')
-
-const hasSlashes = isWindows ? /\\|[/]/ : /[/]/
-const isURL = /^(?:git[+])?[a-z]+:/i
-const isGit = /^[^@]+@[^:.]+\.[^:]+:.+$/i
-const isFileType = /[.](?:tgz|tar.gz|tar)$/i
-const isPortNumber = /:[0-9]+(\/|$)/i
-const isWindowsFile = /^(?:[.]|~[/]|[/\\]|[a-zA-Z]:)/
-const isPosixFile = /^(?:[.]|~[/]|[/]|[a-zA-Z]:)/
-const defaultRegistry = 'https://registry.npmjs.org'
-
-function npa (arg, where) {
-  let name
-  let spec
-  if (typeof arg === 'object') {
-    if (arg instanceof Result && (!where || where === arg.where)) {
-      return arg
-    } else if (arg.name && arg.rawSpec) {
-      return npa.resolve(arg.name, arg.rawSpec, where || arg.where)
-    } else {
-      return npa(arg.raw, where || arg.where)
-    }
-  }
-  const nameEndsAt = arg.indexOf('@', 1) // Skip possible leading @
-  const namePart = nameEndsAt > 0 ? arg.slice(0, nameEndsAt) : arg
-  if (isURL.test(arg)) {
-    spec = arg
-  } else if (isGit.test(arg)) {
-    spec = `git+ssh://${arg}`
-  // eslint-disable-next-line max-len
-  } else if (!namePart.startsWith('@') && (hasSlashes.test(namePart) || isFileType.test(namePart))) {
-    spec = arg
-  } else if (nameEndsAt > 0) {
-    name = namePart
-    spec = arg.slice(nameEndsAt + 1) || '*'
-  } else {
-    const valid = validatePackageName(arg)
-    if (valid.validForOldPackages) {
-      name = arg
-      spec = '*'
-    } else {
-      spec = arg
-    }
-  }
-  return resolve(name, spec, where, arg)
-}
-
-function isFileSpec (spec) {
-  if (!spec) {
-    return false
-  }
-  if (spec.toLowerCase().startsWith('file:')) {
-    return true
-  }
-  if (isWindows) {
-    return isWindowsFile.test(spec)
-  }
-  // We never hit this in windows tests, obviously
-  /* istanbul ignore next */
-  return isPosixFile.test(spec)
-}
-
-function isAliasSpec (spec) {
-  if (!spec) {
-    return false
-  }
-  return spec.toLowerCase().startsWith('npm:')
-}
-
-function resolve (name, spec, where, arg) {
-  const res = new Result({
-    raw: arg,
-    name: name,
-    rawSpec: spec,
-    fromArgument: arg != null,
-  })
-
-  if (name) {
-    res.name = name
-  }
-
-  if (!where) {
-    where = process.cwd()
-  }
-
-  if (isFileSpec(spec)) {
-    return fromFile(res, where)
-  } else if (isAliasSpec(spec)) {
-    return fromAlias(res, where)
-  }
-
-  const hosted = HostedGit.fromUrl(spec, {
-    noGitPlus: true,
-    noCommittish: true,
-  })
-  if (hosted) {
-    return fromHostedGit(res, hosted)
-  } else if (spec && isURL.test(spec)) {
-    return fromURL(res)
-  } else if (spec && (hasSlashes.test(spec) || isFileType.test(spec))) {
-    return fromFile(res, where)
-  } else {
-    return fromRegistry(res)
-  }
-}
-
-function toPurl (arg, reg = defaultRegistry) {
-  const res = npa(arg)
-
-  if (res.type !== 'version') {
-    throw invalidPurlType(res.type, res.raw)
-  }
-
-  // URI-encode leading @ of scoped packages
-  let purl = 'pkg:npm/' + res.name.replace(/^@/, '%40') + '@' + res.rawSpec
-  if (reg !== defaultRegistry) {
-    purl += '?repository_url=' + reg
-  }
-
-  return purl
-}
-
-function invalidPackageName (name, valid, raw) {
-  // eslint-disable-next-line max-len
-  const err = new Error(`Invalid package name "${name}" of package "${raw}": ${valid.errors.join('; ')}.`)
-  err.code = 'EINVALIDPACKAGENAME'
-  return err
-}
-
-function invalidTagName (name, raw) {
-  // eslint-disable-next-line max-len
-  const err = new Error(`Invalid tag name "${name}" of package "${raw}": Tags may not have any characters that encodeURIComponent encodes.`)
-  err.code = 'EINVALIDTAGNAME'
-  return err
-}
-
-function invalidPurlType (type, raw) {
-  // eslint-disable-next-line max-len
-  const err = new Error(`Invalid type "${type}" of package "${raw}": Purl can only be generated for "version" types.`)
-  err.code = 'EINVALIDPURLTYPE'
-  return err
-}
-
-class Result {
-  constructor (opts) {
-    this.type = opts.type
-    this.registry = opts.registry
-    this.where = opts.where
-    if (opts.raw == null) {
-      this.raw = opts.name ? `${opts.name}@${opts.rawSpec}` : opts.rawSpec
-    } else {
-      this.raw = opts.raw
-    }
-    this.name = undefined
-    this.escapedName = undefined
-    this.scope = undefined
-    this.rawSpec = opts.rawSpec || ''
-    this.saveSpec = opts.saveSpec
-    this.fetchSpec = opts.fetchSpec
-    if (opts.name) {
-      this.setName(opts.name)
-    }
-    this.gitRange = opts.gitRange
-    this.gitCommittish = opts.gitCommittish
-    this.gitSubdir = opts.gitSubdir
-    this.hosted = opts.hosted
-  }
-
-  // TODO move this to a getter/setter in a semver major
-  setName (name) {
-    const valid = validatePackageName(name)
-    if (!valid.validForOldPackages) {
-      throw invalidPackageName(name, valid, this.raw)
-    }
-
-    this.name = name
-    this.scope = name[0] === '@' ? name.slice(0, name.indexOf('/')) : undefined
-    // scoped packages in couch must have slash url-encoded, e.g. @foo%2Fbar
-    this.escapedName = name.replace('/', '%2f')
-    return this
-  }
-
-  toString () {
-    const full = []
-    if (this.name != null && this.name !== '') {
-      full.push(this.name)
-    }
-    const spec = this.saveSpec || this.fetchSpec || this.rawSpec
-    if (spec != null && spec !== '') {
-      full.push(spec)
-    }
-    return full.length ? full.join('@') : this.raw
-  }
-
-  toJSON () {
-    const result = Object.assign({}, this)
-    delete result.hosted
-    return result
-  }
-}
-
-// sets res.gitCommittish, res.gitRange, and res.gitSubdir
-function setGitAttrs (res, committish) {
-  if (!committish) {
-    res.gitCommittish = null
-    return
-  }
-
-  // for each :: separated item:
-  for (const part of committish.split('::')) {
-    // if the item has no : the n it is a commit-ish
-    if (!part.includes(':')) {
-      if (res.gitRange) {
-        throw new Error('cannot override existing semver range with a committish')
-      }
-      if (res.gitCommittish) {
-        throw new Error('cannot override existing committish with a second committish')
-      }
-      res.gitCommittish = part
-      continue
-    }
-    // split on name:value
-    const [name, value] = part.split(':')
-    // if name is semver do semver lookup of ref or tag
-    if (name === 'semver') {
-      if (res.gitCommittish) {
-        throw new Error('cannot override existing committish with a semver range')
-      }
-      if (res.gitRange) {
-        throw new Error('cannot override existing semver range with a second semver range')
-      }
-      res.gitRange = decodeURIComponent(value)
-      continue
-    }
-    if (name === 'path') {
-      if (res.gitSubdir) {
-        throw new Error('cannot override existing path with a second path')
-      }
-      res.gitSubdir = `/${value}`
-      continue
-    }
-    log.warn('npm-package-arg', `ignoring unknown key "${name}"`)
-  }
-}
-
-// Taken from: EncodePathChars and lookup_table in src/node_url.cc
-// url.pathToFileURL only returns absolute references.  We can't use it to encode paths.
-// encodeURI mangles windows paths. We can't use it to encode paths.
-// Under the hood, url.pathToFileURL does a limited set of encoding, with an extra windows step, and then calls path.resolve.
-// The encoding node does without path.resolve is not available outside of the source, so we are recreating it here.
-const encodedPathChars = new Map([
-  ['\0', '%00'],
-  ['\t', '%09'],
-  ['\n', '%0A'],
-  ['\r', '%0D'],
-  [' ', '%20'],
-  ['"', '%22'],
-  ['#', '%23'],
-  ['%', '%25'],
-  ['?', '%3F'],
-  ['[', '%5B'],
-  ['\\', isWindows ? '/' : '%5C'],
-  [']', '%5D'],
-  ['^', '%5E'],
-  ['|', '%7C'],
-  ['~', '%7E'],
-])
-
-function pathToFileURL (str) {
-  let result = ''
-  for (let i = 0; i < str.length; i++) {
-    result = `${result}${encodedPathChars.get(str[i]) ?? str[i]}`
-  }
-  if (result.startsWith('file:')) {
-    return result
-  }
-  return `file:${result}`
-}
-
-function fromFile (res, where) {
-  res.type = isFileType.test(res.rawSpec) ? 'file' : 'directory'
-  res.where = where
-
-  let rawSpec = pathToFileURL(res.rawSpec)
-
-  if (rawSpec.startsWith('file:/')) {
-    // XXX backwards compatibility lack of compliance with RFC 8089
-
-    // turn file://path into file:/path
-    if (/^file:\/\/[^/]/.test(rawSpec)) {
-      rawSpec = `file:/${rawSpec.slice(5)}`
-    }
-
-    // turn file:/../path into file:../path
-    // for 1 or 3 leading slashes (2 is already ruled out from handling file:// explicitly above)
-    if (/^\/{1,3}\.\.?(\/|$)/.test(rawSpec.slice(5))) {
-      rawSpec = rawSpec.replace(/^file:\/{1,3}/, 'file:')
-    }
-  }
-
-  let resolvedUrl
-  let specUrl
-  try {
-    // always put the '/' on "where", or else file:foo from /path/to/bar goes to /path/to/foo, when we want it to be /path/to/bar/foo
-    resolvedUrl = new URL(rawSpec, `${pathToFileURL(path.resolve(where))}/`)
-    specUrl = new URL(rawSpec)
-  } catch (originalError) {
-    const er = new Error('Invalid file: URL, must comply with RFC 8089')
-    throw Object.assign(er, {
-      raw: res.rawSpec,
-      spec: res,
-      where,
-      originalError,
-    })
-  }
-
-  // turn /C:/blah into just C:/blah on windows
-  let specPath = decodeURIComponent(specUrl.pathname)
-  let resolvedPath = decodeURIComponent(resolvedUrl.pathname)
-  if (isWindows) {
-    specPath = specPath.replace(/^\/+([a-z]:\/)/i, '$1')
-    resolvedPath = resolvedPath.replace(/^\/+([a-z]:\/)/i, '$1')
-  }
-
-  // replace ~ with homedir, but keep the ~ in the saveSpec
-  // otherwise, make it relative to where param
-  if (/^\/~(\/|$)/.test(specPath)) {
-    res.saveSpec = `file:${specPath.substr(1)}`
-    resolvedPath = path.resolve(homedir(), specPath.substr(3))
-  } else if (!path.isAbsolute(rawSpec.slice(5))) {
-    res.saveSpec = `file:${path.relative(where, resolvedPath)}`
-  } else {
-    res.saveSpec = `file:${path.resolve(resolvedPath)}`
-  }
-
-  res.fetchSpec = path.resolve(where, resolvedPath)
-  // re-normalize the slashes in saveSpec due to node:path/win32 behavior in windows
-  res.saveSpec = res.saveSpec.split('\\').join('/')
-  // Ignoring because this only happens in windows
-  /* istanbul ignore next */
-  if (res.saveSpec.startsWith('file://')) {
-    // normalization of \\win32\root paths can cause a double / which we don't want
-    res.saveSpec = `file:/${res.saveSpec.slice(7)}`
-  }
-  return res
-}
-
-function fromHostedGit (res, hosted) {
-  res.type = 'git'
-  res.hosted = hosted
-  res.saveSpec = hosted.toString({ noGitPlus: false, noCommittish: false })
-  res.fetchSpec = hosted.getDefaultRepresentation() === 'shortcut' ? null : hosted.toString()
-  setGitAttrs(res, hosted.committish)
-  return res
-}
-
-function unsupportedURLType (protocol, spec) {
-  const err = new Error(`Unsupported URL Type "${protocol}": ${spec}`)
-  err.code = 'EUNSUPPORTEDPROTOCOL'
-  return err
-}
-
-function fromURL (res) {
-  let rawSpec = res.rawSpec
-  res.saveSpec = rawSpec
-  if (rawSpec.startsWith('git+ssh:')) {
-    // git ssh specifiers are overloaded to also use scp-style git
-    // specifiers, so we have to parse those out and treat them special.
-    // They are NOT true URIs, so we can't hand them to URL.
-
-    // This regex looks for things that look like:
-    // git+ssh://git@my.custom.git.com:username/project.git#deadbeef
-    // ...and various combinations. The username in the beginning is *required*.
-    const matched = rawSpec.match(/^git\+ssh:\/\/([^:#]+:[^#]+(?:\.git)?)(?:#(.*))?$/i)
-    // Filter out all-number "usernames" which are really port numbers
-    // They can either be :1234 :1234/ or :1234/path but not :12abc
-    if (matched && !matched[1].match(isPortNumber)) {
-      res.type = 'git'
-      setGitAttrs(res, matched[2])
-      res.fetchSpec = matched[1]
-      return res
-    }
-  } else if (rawSpec.startsWith('git+file://')) {
-    // URL can't handle windows paths
-    rawSpec = rawSpec.replace(/\\/g, '/')
-  }
-  const parsedUrl = new URL(rawSpec)
-  // check the protocol, and then see if it's git or not
-  switch (parsedUrl.protocol) {
-    case 'git:':
-    case 'git+http:':
-    case 'git+https:':
-    case 'git+rsync:':
-    case 'git+ftp:':
-    case 'git+file:':
-    case 'git+ssh:':
-      res.type = 'git'
-      setGitAttrs(res, parsedUrl.hash.slice(1))
-      if (parsedUrl.protocol === 'git+file:' && /^git\+file:\/\/[a-z]:/i.test(rawSpec)) {
-        // URL can't handle drive letters on windows file paths, the host can't contain a :
-        res.fetchSpec = `git+file://${parsedUrl.host.toLowerCase()}:${parsedUrl.pathname}`
-      } else {
-        parsedUrl.hash = ''
-        res.fetchSpec = parsedUrl.toString()
-      }
-      if (res.fetchSpec.startsWith('git+')) {
-        res.fetchSpec = res.fetchSpec.slice(4)
-      }
-      break
-    case 'http:':
-    case 'https:':
-      res.type = 'remote'
-      res.fetchSpec = res.saveSpec
-      break
-
-    default:
-      throw unsupportedURLType(parsedUrl.protocol, rawSpec)
-  }
-
-  return res
-}
-
-function fromAlias (res, where) {
-  const subSpec = npa(res.rawSpec.substr(4), where)
-  if (subSpec.type === 'alias') {
-    throw new Error('nested aliases not supported')
-  }
-
-  if (!subSpec.registry) {
-    throw new Error('aliases only work for registry deps')
-  }
-
-  if (!subSpec.name) {
-    throw new Error('aliases must have a name')
-  }
-
-  res.subSpec = subSpec
-  res.registry = true
-  res.type = 'alias'
-  res.saveSpec = null
-  res.fetchSpec = null
-  return res
-}
-
-function fromRegistry (res) {
-  res.registry = true
-  const spec = res.rawSpec.trim()
-  // no save spec for registry components as we save based on the fetched
-  // version, not on the argument so this can't compute that.
-  res.saveSpec = null
-  res.fetchSpec = spec
-  const version = semver.valid(spec, true)
-  const range = semver.validRange(spec, true)
-  if (version) {
-    res.type = 'version'
-  } else if (range) {
-    res.type = 'range'
-  } else {
-    if (encodeURIComponent(spec) !== spec) {
-      throw invalidTagName(spec, res.raw)
-    }
-    res.type = 'tag'
-  }
-  return res
-}
-
-module.exports = npa
-module.exports.resolve = resolve
-module.exports.toPurl = toPurl
-module.exports.Result = Result
diff --git a/node_modules/pacote/node_modules/npm-package-arg/package.json b/node_modules/pacote/node_modules/npm-package-arg/package.json
deleted file mode 100644
index db6ce9074cfa2..0000000000000
--- a/node_modules/pacote/node_modules/npm-package-arg/package.json
+++ /dev/null
@@ -1,61 +0,0 @@
-{
-  "name": "npm-package-arg",
-  "version": "13.0.0",
-  "description": "Parse the things that can be arguments to `npm install`",
-  "main": "./lib/npa.js",
-  "directories": {
-    "test": "test"
-  },
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "dependencies": {
-    "hosted-git-info": "^9.0.0",
-    "proc-log": "^5.0.0",
-    "semver": "^7.3.5",
-    "validate-npm-package-name": "^6.0.0"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.23.5",
-    "tap": "^16.0.1"
-  },
-  "scripts": {
-    "test": "tap",
-    "snap": "tap",
-    "npmclilint": "npmcli-lint",
-    "lint": "npm run eslint",
-    "lintfix": "npm run eslint -- --fix",
-    "posttest": "npm run lint",
-    "postsnap": "npm run lintfix --",
-    "postlint": "template-oss-check",
-    "template-oss-apply": "template-oss-apply --force",
-    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/npm/npm-package-arg.git"
-  },
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "bugs": {
-    "url": "https://github.com/npm/npm-package-arg/issues"
-  },
-  "homepage": "https://github.com/npm/npm-package-arg",
-  "engines": {
-    "node": "^20.17.0 || >=22.9.0"
-  },
-  "tap": {
-    "branches": 97,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.23.5",
-    "publish": true
-  }
-}
diff --git a/package-lock.json b/package-lock.json
index dbdfb7625d944..11a0c28d8d0ac 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -129,7 +129,7 @@
         "normalize-package-data": "^8.0.0",
         "npm-audit-report": "^6.0.0",
         "npm-install-checks": "^7.1.1",
-        "npm-package-arg": "^12.0.2",
+        "npm-package-arg": "^13.0.0",
         "npm-pick-manifest": "^11.0.1",
         "npm-profile": "^12.0.0",
         "npm-registry-fetch": "^19.0.0",
@@ -2012,7 +2012,7 @@
         "@npmcli/template-oss": "4.24.4",
         "json-stringify-safe": "^5.0.1",
         "nock": "^13.3.3",
-        "npm-package-arg": "^12.0.0",
+        "npm-package-arg": "^13.0.0",
         "pacote": "^21.0.2",
         "tap": "^16.3.8"
       },
@@ -3438,6 +3438,33 @@
         "node": "^18.17.0 || >=20.5.0"
       }
     },
+    "node_modules/@npmcli/git/node_modules/hosted-git-info": {
+      "version": "8.1.0",
+      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-8.1.0.tgz",
+      "integrity": "sha512-Rw/B2DNQaPBICNXEm8balFz9a6WpZrkCGpcWFpy7nCj+NyhSdqXipmfvtmWt9xGfp0wZnBxB+iVpLmQMYt47Tw==",
+      "license": "ISC",
+      "dependencies": {
+        "lru-cache": "^10.0.1"
+      },
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
+    "node_modules/@npmcli/git/node_modules/npm-package-arg": {
+      "version": "12.0.2",
+      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-12.0.2.tgz",
+      "integrity": "sha512-f1NpFjNI9O4VbKMOlA5QoBq/vSQPORHcTZ2feJpFkTHJ9eQkdlmZEKSjcAhxTGInC7RlEyScT9ui67NaOsjFWA==",
+      "license": "ISC",
+      "dependencies": {
+        "hosted-git-info": "^8.0.0",
+        "proc-log": "^5.0.0",
+        "semver": "^7.3.5",
+        "validate-npm-package-name": "^6.0.0"
+      },
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
     "node_modules/@npmcli/git/node_modules/npm-pick-manifest": {
       "version": "10.0.0",
       "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-10.0.0.tgz",
@@ -3956,6 +3983,32 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist/node_modules/npm-package-arg": {
+      "version": "11.0.3",
+      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-11.0.3.tgz",
+      "integrity": "sha512-sHGJy8sOC1YraBywpzQlIKBE4pBbGbiF95U6Auspzyem956E0+FtDtsx1ZxlOJkQCZ1AFXAY/yuvtFYrOxF+Bw==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "hosted-git-info": "^7.0.0",
+        "proc-log": "^4.0.0",
+        "semver": "^7.3.5",
+        "validate-npm-package-name": "^5.0.0"
+      },
+      "engines": {
+        "node": "^16.14.0 || >=18.0.0"
+      }
+    },
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist/node_modules/validate-npm-package-name": {
+      "version": "5.0.1",
+      "resolved": "https://registry.npmjs.org/validate-npm-package-name/-/validate-npm-package-name-5.0.1.tgz",
+      "integrity": "sha512-OljLrQ9SQdOUqTaQxqL5dEfZWrXExyyWsozYlAWFawPVNuD83igl7uJD2RTkNMbniIYgt8l81eCJGIdQF7avLQ==",
+      "dev": true,
+      "license": "ISC",
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/fs": {
       "version": "3.1.1",
       "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-3.1.1.tgz",
@@ -4165,22 +4218,6 @@
         "node": "^18.17.0 || >=20.5.0"
       }
     },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/package-json/node_modules/npm-package-arg": {
-      "version": "12.0.2",
-      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-12.0.2.tgz",
-      "integrity": "sha512-f1NpFjNI9O4VbKMOlA5QoBq/vSQPORHcTZ2feJpFkTHJ9eQkdlmZEKSjcAhxTGInC7RlEyScT9ui67NaOsjFWA==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "hosted-git-info": "^8.0.0",
-        "proc-log": "^5.0.0",
-        "semver": "^7.3.5",
-        "validate-npm-package-name": "^6.0.0"
-      },
-      "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/package-json/node_modules/npm-pick-manifest": {
       "version": "10.0.0",
       "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-10.0.0.tgz",
@@ -4207,16 +4244,6 @@
         "node": "^18.17.0 || >=20.5.0"
       }
     },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/package-json/node_modules/validate-npm-package-name": {
-      "version": "6.0.2",
-      "resolved": "https://registry.npmjs.org/validate-npm-package-name/-/validate-npm-package-name-6.0.2.tgz",
-      "integrity": "sha512-IUoow1YUtvoBBC06dXs8bR8B9vuA3aJfmQNKMoaPG/OFsPmoQvw8xh+6Ye25Gx9DQhoEom3Pcu9MKHerm/NpUQ==",
-      "dev": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/package-json/node_modules/which": {
       "version": "5.0.0",
       "resolved": "https://registry.npmjs.org/which/-/which-5.0.0.tgz",
@@ -4677,32 +4704,29 @@
       }
     },
     "node_modules/@npmcli/template-oss/node_modules/npm-package-arg": {
-      "version": "11.0.3",
-      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-11.0.3.tgz",
-      "integrity": "sha512-sHGJy8sOC1YraBywpzQlIKBE4pBbGbiF95U6Auspzyem956E0+FtDtsx1ZxlOJkQCZ1AFXAY/yuvtFYrOxF+Bw==",
+      "version": "12.0.2",
+      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-12.0.2.tgz",
+      "integrity": "sha512-f1NpFjNI9O4VbKMOlA5QoBq/vSQPORHcTZ2feJpFkTHJ9eQkdlmZEKSjcAhxTGInC7RlEyScT9ui67NaOsjFWA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
-        "hosted-git-info": "^7.0.0",
-        "proc-log": "^4.0.0",
+        "hosted-git-info": "^8.0.0",
+        "proc-log": "^5.0.0",
         "semver": "^7.3.5",
-        "validate-npm-package-name": "^5.0.0"
+        "validate-npm-package-name": "^6.0.0"
       },
       "engines": {
-        "node": "^16.14.0 || >=18.0.0"
+        "node": "^18.17.0 || >=20.5.0"
       }
     },
-    "node_modules/@npmcli/template-oss/node_modules/npm-package-arg/node_modules/hosted-git-info": {
-      "version": "7.0.2",
-      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.2.tgz",
-      "integrity": "sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==",
+    "node_modules/@npmcli/template-oss/node_modules/npm-package-arg/node_modules/proc-log": {
+      "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-5.0.0.tgz",
+      "integrity": "sha512-Azwzvl90HaF0aCz1JrDdXQykFakSSNPaPoiZ9fm5qJIMHioDZEi7OAdRwSm6rSoPtY3Qutnm3L7ogmg3dc+wbQ==",
       "dev": true,
       "license": "ISC",
-      "dependencies": {
-        "lru-cache": "^10.0.1"
-      },
       "engines": {
-        "node": "^16.14.0 || >=18.0.0"
+        "node": "^18.17.0 || >=20.5.0"
       }
     },
     "node_modules/@npmcli/template-oss/node_modules/npm-packlist": {
@@ -4734,6 +4758,45 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/npm-pick-manifest/node_modules/hosted-git-info": {
+      "version": "7.0.2",
+      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.2.tgz",
+      "integrity": "sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "lru-cache": "^10.0.1"
+      },
+      "engines": {
+        "node": "^16.14.0 || >=18.0.0"
+      }
+    },
+    "node_modules/@npmcli/template-oss/node_modules/npm-pick-manifest/node_modules/npm-package-arg": {
+      "version": "11.0.3",
+      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-11.0.3.tgz",
+      "integrity": "sha512-sHGJy8sOC1YraBywpzQlIKBE4pBbGbiF95U6Auspzyem956E0+FtDtsx1ZxlOJkQCZ1AFXAY/yuvtFYrOxF+Bw==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "hosted-git-info": "^7.0.0",
+        "proc-log": "^4.0.0",
+        "semver": "^7.3.5",
+        "validate-npm-package-name": "^5.0.0"
+      },
+      "engines": {
+        "node": "^16.14.0 || >=18.0.0"
+      }
+    },
+    "node_modules/@npmcli/template-oss/node_modules/npm-pick-manifest/node_modules/validate-npm-package-name": {
+      "version": "5.0.1",
+      "resolved": "https://registry.npmjs.org/validate-npm-package-name/-/validate-npm-package-name-5.0.1.tgz",
+      "integrity": "sha512-OljLrQ9SQdOUqTaQxqL5dEfZWrXExyyWsozYlAWFawPVNuD83igl7uJD2RTkNMbniIYgt8l81eCJGIdQF7avLQ==",
+      "dev": true,
+      "license": "ISC",
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/npm-registry-fetch": {
       "version": "17.1.0",
       "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-17.1.0.tgz",
@@ -4754,6 +4817,45 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/npm-registry-fetch/node_modules/hosted-git-info": {
+      "version": "7.0.2",
+      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.2.tgz",
+      "integrity": "sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "lru-cache": "^10.0.1"
+      },
+      "engines": {
+        "node": "^16.14.0 || >=18.0.0"
+      }
+    },
+    "node_modules/@npmcli/template-oss/node_modules/npm-registry-fetch/node_modules/npm-package-arg": {
+      "version": "11.0.3",
+      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-11.0.3.tgz",
+      "integrity": "sha512-sHGJy8sOC1YraBywpzQlIKBE4pBbGbiF95U6Auspzyem956E0+FtDtsx1ZxlOJkQCZ1AFXAY/yuvtFYrOxF+Bw==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "hosted-git-info": "^7.0.0",
+        "proc-log": "^4.0.0",
+        "semver": "^7.3.5",
+        "validate-npm-package-name": "^5.0.0"
+      },
+      "engines": {
+        "node": "^16.14.0 || >=18.0.0"
+      }
+    },
+    "node_modules/@npmcli/template-oss/node_modules/npm-registry-fetch/node_modules/validate-npm-package-name": {
+      "version": "5.0.1",
+      "resolved": "https://registry.npmjs.org/validate-npm-package-name/-/validate-npm-package-name-5.0.1.tgz",
+      "integrity": "sha512-OljLrQ9SQdOUqTaQxqL5dEfZWrXExyyWsozYlAWFawPVNuD83igl7uJD2RTkNMbniIYgt8l81eCJGIdQF7avLQ==",
+      "dev": true,
+      "license": "ISC",
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/p-map": {
       "version": "4.0.0",
       "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz",
@@ -4834,6 +4936,32 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/pacote/node_modules/npm-package-arg": {
+      "version": "11.0.3",
+      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-11.0.3.tgz",
+      "integrity": "sha512-sHGJy8sOC1YraBywpzQlIKBE4pBbGbiF95U6Auspzyem956E0+FtDtsx1ZxlOJkQCZ1AFXAY/yuvtFYrOxF+Bw==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "hosted-git-info": "^7.0.0",
+        "proc-log": "^4.0.0",
+        "semver": "^7.3.5",
+        "validate-npm-package-name": "^5.0.0"
+      },
+      "engines": {
+        "node": "^16.14.0 || >=18.0.0"
+      }
+    },
+    "node_modules/@npmcli/template-oss/node_modules/pacote/node_modules/validate-npm-package-name": {
+      "version": "5.0.1",
+      "resolved": "https://registry.npmjs.org/validate-npm-package-name/-/validate-npm-package-name-5.0.1.tgz",
+      "integrity": "sha512-OljLrQ9SQdOUqTaQxqL5dEfZWrXExyyWsozYlAWFawPVNuD83igl7uJD2RTkNMbniIYgt8l81eCJGIdQF7avLQ==",
+      "dev": true,
+      "license": "ISC",
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/parse-conflict-json": {
       "version": "3.0.1",
       "resolved": "https://registry.npmjs.org/parse-conflict-json/-/parse-conflict-json-3.0.1.tgz",
@@ -4979,16 +5107,6 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
-    "node_modules/@npmcli/template-oss/node_modules/validate-npm-package-name": {
-      "version": "5.0.1",
-      "resolved": "https://registry.npmjs.org/validate-npm-package-name/-/validate-npm-package-name-5.0.1.tgz",
-      "integrity": "sha512-OljLrQ9SQdOUqTaQxqL5dEfZWrXExyyWsozYlAWFawPVNuD83igl7uJD2RTkNMbniIYgt8l81eCJGIdQF7avLQ==",
-      "dev": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
     "node_modules/@npmcli/template-oss/node_modules/walk-up-path": {
       "version": "3.0.1",
       "resolved": "https://registry.npmjs.org/walk-up-path/-/walk-up-path-3.0.1.tgz",
@@ -9799,22 +9917,6 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/init-package-json/node_modules/npm-package-arg": {
-      "version": "13.0.0",
-      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-13.0.0.tgz",
-      "integrity": "sha512-+t2etZAGcB7TbbLHfDwooV9ppB2LhhcT6A+L9cahsf9mEUAoQ6CktLEVvEnpD0N5CkX7zJqnPGaFtoQDy9EkHQ==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "hosted-git-info": "^9.0.0",
-        "proc-log": "^5.0.0",
-        "semver": "^7.3.5",
-        "validate-npm-package-name": "^6.0.0"
-      },
-      "engines": {
-        "node": "^20.17.0 || >=22.9.0"
-      }
-    },
     "node_modules/internal-slot": {
       "version": "1.1.0",
       "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.1.0.tgz",
@@ -12803,32 +12905,19 @@
       }
     },
     "node_modules/npm-package-arg": {
-      "version": "12.0.2",
-      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-12.0.2.tgz",
-      "integrity": "sha512-f1NpFjNI9O4VbKMOlA5QoBq/vSQPORHcTZ2feJpFkTHJ9eQkdlmZEKSjcAhxTGInC7RlEyScT9ui67NaOsjFWA==",
+      "version": "13.0.0",
+      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-13.0.0.tgz",
+      "integrity": "sha512-+t2etZAGcB7TbbLHfDwooV9ppB2LhhcT6A+L9cahsf9mEUAoQ6CktLEVvEnpD0N5CkX7zJqnPGaFtoQDy9EkHQ==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
-        "hosted-git-info": "^8.0.0",
+        "hosted-git-info": "^9.0.0",
         "proc-log": "^5.0.0",
         "semver": "^7.3.5",
         "validate-npm-package-name": "^6.0.0"
       },
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
-    "node_modules/npm-package-arg/node_modules/hosted-git-info": {
-      "version": "8.1.0",
-      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-8.1.0.tgz",
-      "integrity": "sha512-Rw/B2DNQaPBICNXEm8balFz9a6WpZrkCGpcWFpy7nCj+NyhSdqXipmfvtmWt9xGfp0wZnBxB+iVpLmQMYt47Tw==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "lru-cache": "^10.0.1"
-      },
-      "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^20.17.0 || >=22.9.0"
       }
     },
     "node_modules/npm-packlist": {
@@ -12889,22 +12978,6 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/npm-pick-manifest/node_modules/npm-package-arg": {
-      "version": "13.0.0",
-      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-13.0.0.tgz",
-      "integrity": "sha512-+t2etZAGcB7TbbLHfDwooV9ppB2LhhcT6A+L9cahsf9mEUAoQ6CktLEVvEnpD0N5CkX7zJqnPGaFtoQDy9EkHQ==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "hosted-git-info": "^9.0.0",
-        "proc-log": "^5.0.0",
-        "semver": "^7.3.5",
-        "validate-npm-package-name": "^6.0.0"
-      },
-      "engines": {
-        "node": "^20.17.0 || >=22.9.0"
-      }
-    },
     "node_modules/npm-profile": {
       "version": "12.0.0",
       "resolved": "https://registry.npmjs.org/npm-profile/-/npm-profile-12.0.0.tgz",
@@ -12952,22 +13025,6 @@
         "node": ">= 18"
       }
     },
-    "node_modules/npm-registry-fetch/node_modules/npm-package-arg": {
-      "version": "13.0.0",
-      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-13.0.0.tgz",
-      "integrity": "sha512-+t2etZAGcB7TbbLHfDwooV9ppB2LhhcT6A+L9cahsf9mEUAoQ6CktLEVvEnpD0N5CkX7zJqnPGaFtoQDy9EkHQ==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "hosted-git-info": "^9.0.0",
-        "proc-log": "^5.0.0",
-        "semver": "^7.3.5",
-        "validate-npm-package-name": "^6.0.0"
-      },
-      "engines": {
-        "node": "^20.17.0 || >=22.9.0"
-      }
-    },
     "node_modules/npm-user-validate": {
       "version": "3.0.0",
       "resolved": "https://registry.npmjs.org/npm-user-validate/-/npm-user-validate-3.0.0.tgz",
@@ -13639,22 +13696,6 @@
         "url": "https://github.com/sponsors/isaacs"
       }
     },
-    "node_modules/pacote/node_modules/npm-package-arg": {
-      "version": "13.0.0",
-      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-13.0.0.tgz",
-      "integrity": "sha512-+t2etZAGcB7TbbLHfDwooV9ppB2LhhcT6A+L9cahsf9mEUAoQ6CktLEVvEnpD0N5CkX7zJqnPGaFtoQDy9EkHQ==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "hosted-git-info": "^9.0.0",
-        "proc-log": "^5.0.0",
-        "semver": "^7.3.5",
-        "validate-npm-package-name": "^6.0.0"
-      },
-      "engines": {
-        "node": "^20.17.0 || >=22.9.0"
-      }
-    },
     "node_modules/pacote/node_modules/tar": {
       "version": "7.4.3",
       "resolved": "https://registry.npmjs.org/tar/-/tar-7.4.3.tgz",
@@ -19486,7 +19527,7 @@
         "minimatch": "^9.0.4",
         "nopt": "^8.0.0",
         "npm-install-checks": "^7.1.0",
-        "npm-package-arg": "^12.0.0",
+        "npm-package-arg": "^13.0.0",
         "npm-pick-manifest": "^11.0.1",
         "npm-registry-fetch": "^19.0.0",
         "pacote": "^21.0.2",
@@ -19546,7 +19587,7 @@
       "version": "10.0.1",
       "license": "ISC",
       "dependencies": {
-        "npm-package-arg": "^12.0.0",
+        "npm-package-arg": "^13.0.0",
         "npm-registry-fetch": "^19.0.0"
       },
       "devDependencies": {
@@ -19568,7 +19609,7 @@
         "binary-extensions": "^3.0.0",
         "diff": "^7.0.0",
         "minimatch": "^9.0.4",
-        "npm-package-arg": "^12.0.0",
+        "npm-package-arg": "^13.0.0",
         "pacote": "^21.0.2",
         "tar": "^6.2.1"
       },
@@ -19589,7 +19630,7 @@
         "@npmcli/package-json": "^7.0.0",
         "@npmcli/run-script": "^10.0.0",
         "ci-info": "^4.0.0",
-        "npm-package-arg": "^12.0.0",
+        "npm-package-arg": "^13.0.0",
         "pacote": "^21.0.2",
         "proc-log": "^5.0.0",
         "promise-retry": "^2.0.1",
@@ -19651,7 +19692,7 @@
       "dependencies": {
         "@npmcli/arborist": "^9.1.4",
         "@npmcli/run-script": "^10.0.0",
-        "npm-package-arg": "^12.0.0",
+        "npm-package-arg": "^13.0.0",
         "pacote": "^21.0.2"
       },
       "devDependencies": {
@@ -19671,7 +19712,7 @@
       "dependencies": {
         "@npmcli/package-json": "^7.0.0",
         "ci-info": "^4.0.0",
-        "npm-package-arg": "^12.0.0",
+        "npm-package-arg": "^13.0.0",
         "npm-registry-fetch": "^19.0.0",
         "proc-log": "^5.0.0",
         "semver": "^7.3.7",
diff --git a/package.json b/package.json
index ed53c0852d12c..4f00629e1949d 100644
--- a/package.json
+++ b/package.json
@@ -96,7 +96,7 @@
     "normalize-package-data": "^8.0.0",
     "npm-audit-report": "^6.0.0",
     "npm-install-checks": "^7.1.1",
-    "npm-package-arg": "^12.0.2",
+    "npm-package-arg": "^13.0.0",
     "npm-pick-manifest": "^11.0.1",
     "npm-profile": "^12.0.0",
     "npm-registry-fetch": "^19.0.0",
diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json
index 142c62a65a5c3..8a23dedfa2dd8 100644
--- a/workspaces/arborist/package.json
+++ b/workspaces/arborist/package.json
@@ -23,7 +23,7 @@
     "minimatch": "^9.0.4",
     "nopt": "^8.0.0",
     "npm-install-checks": "^7.1.0",
-    "npm-package-arg": "^12.0.0",
+    "npm-package-arg": "^13.0.0",
     "npm-pick-manifest": "^11.0.1",
     "npm-registry-fetch": "^19.0.0",
     "pacote": "^21.0.2",
diff --git a/workspaces/libnpmaccess/package.json b/workspaces/libnpmaccess/package.json
index 9c3c446045b6f..c4f81159c6e0d 100644
--- a/workspaces/libnpmaccess/package.json
+++ b/workspaces/libnpmaccess/package.json
@@ -29,7 +29,7 @@
   "bugs": "https://github.com/npm/libnpmaccess/issues",
   "homepage": "https://npmjs.com/package/libnpmaccess",
   "dependencies": {
-    "npm-package-arg": "^12.0.0",
+    "npm-package-arg": "^13.0.0",
     "npm-registry-fetch": "^19.0.0"
   },
   "engines": {
diff --git a/workspaces/libnpmdiff/package.json b/workspaces/libnpmdiff/package.json
index 13c7f7cc7dd6f..f1ef61ca4cc62 100644
--- a/workspaces/libnpmdiff/package.json
+++ b/workspaces/libnpmdiff/package.json
@@ -52,7 +52,7 @@
     "binary-extensions": "^3.0.0",
     "diff": "^7.0.0",
     "minimatch": "^9.0.4",
-    "npm-package-arg": "^12.0.0",
+    "npm-package-arg": "^13.0.0",
     "pacote": "^21.0.2",
     "tar": "^6.2.1"
   },
diff --git a/workspaces/libnpmexec/package.json b/workspaces/libnpmexec/package.json
index 687b02f7dc126..2acf608ef3858 100644
--- a/workspaces/libnpmexec/package.json
+++ b/workspaces/libnpmexec/package.json
@@ -64,7 +64,7 @@
     "@npmcli/package-json": "^7.0.0",
     "@npmcli/run-script": "^10.0.0",
     "ci-info": "^4.0.0",
-    "npm-package-arg": "^12.0.0",
+    "npm-package-arg": "^13.0.0",
     "pacote": "^21.0.2",
     "proc-log": "^5.0.0",
     "promise-retry": "^2.0.1",
diff --git a/workspaces/libnpmpack/package.json b/workspaces/libnpmpack/package.json
index 5fd5f945f2a39..29c3fe93375a5 100644
--- a/workspaces/libnpmpack/package.json
+++ b/workspaces/libnpmpack/package.json
@@ -39,7 +39,7 @@
   "dependencies": {
     "@npmcli/arborist": "^9.1.4",
     "@npmcli/run-script": "^10.0.0",
-    "npm-package-arg": "^12.0.0",
+    "npm-package-arg": "^13.0.0",
     "pacote": "^21.0.2"
   },
   "engines": {
diff --git a/workspaces/libnpmpublish/package.json b/workspaces/libnpmpublish/package.json
index 68b2997649a77..d789a3cbabe01 100644
--- a/workspaces/libnpmpublish/package.json
+++ b/workspaces/libnpmpublish/package.json
@@ -40,7 +40,7 @@
   "dependencies": {
     "@npmcli/package-json": "^7.0.0",
     "ci-info": "^4.0.0",
-    "npm-package-arg": "^12.0.0",
+    "npm-package-arg": "^13.0.0",
     "npm-registry-fetch": "^19.0.0",
     "proc-log": "^5.0.0",
     "semver": "^7.3.7",

From 521823bc398de0eb85135a3ef09e217db93ed1ce Mon Sep 17 00:00:00 2001
From: Gar 
Date: Thu, 18 Sep 2025 09:00:05 -0700
Subject: [PATCH 21/63] deps: @npmcli/git@7.0.0

---
 node_modules/.gitignore                       |   12 +-
 .../git/node_modules/hosted-git-info/LICENSE  |   13 -
 .../hosted-git-info/lib/from-url.js           |  122 -
 .../node_modules/hosted-git-info/lib/hosts.js |  231 --
 .../node_modules/hosted-git-info/lib/index.js |  227 --
 .../hosted-git-info/lib/parse-url.js          |   78 -
 .../node_modules/hosted-git-info/package.json |   61 -
 .../{npm-package-arg => lru-cache}/LICENSE    |    2 +-
 .../lru-cache/dist/commonjs/index.js          | 1564 +++++++++
 .../lru-cache/dist/commonjs/index.min.js      |    2 +
 .../lru-cache/dist/commonjs/package.json      |    3 +
 .../node_modules/lru-cache/dist/esm/index.js  | 1560 +++++++++
 .../lru-cache/dist/esm/index.min.js           |    2 +
 .../lru-cache/dist/esm/package.json           |    3 +
 .../git/node_modules/lru-cache/package.json   |  113 +
 .../node_modules/npm-package-arg/lib/npa.js   |  481 ---
 .../node_modules/npm-package-arg/package.json |   61 -
 .../node_modules/npm-pick-manifest/LICENSE.md |   16 -
 .../npm-pick-manifest/lib/index.js            |  224 --
 .../npm-pick-manifest/package.json            |   58 -
 node_modules/@npmcli/git/package.json         |   10 +-
 .../node_modules/@npmcli/git/LICENSE          |   15 -
 .../node_modules/@npmcli/git/lib/clone.js     |  172 -
 .../node_modules/@npmcli/git/lib/errors.js    |   36 -
 .../node_modules/@npmcli/git/lib/find.js      |   15 -
 .../node_modules/@npmcli/git/lib/index.js     |    9 -
 .../node_modules/@npmcli/git/lib/is-clean.js  |    6 -
 .../node_modules/@npmcli/git/lib/is.js        |    4 -
 .../@npmcli/git/lib/lines-to-revs.js          |  147 -
 .../@npmcli/git/lib/make-error.js             |   33 -
 .../node_modules/@npmcli/git/lib/opts.js      |   57 -
 .../node_modules/@npmcli/git/lib/revs.js      |   22 -
 .../node_modules/@npmcli/git/lib/spawn.js     |   44 -
 .../node_modules/@npmcli/git/lib/utils.js     |    3 -
 .../node_modules/@npmcli/git/lib/which.js     |   18 -
 .../node_modules/@npmcli/git/package.json     |   58 -
 .../pacote/node_modules/@npmcli/git/LICENSE   |   15 -
 .../node_modules/@npmcli/git/lib/clone.js     |  172 -
 .../node_modules/@npmcli/git/lib/errors.js    |   36 -
 .../node_modules/@npmcli/git/lib/find.js      |   15 -
 .../node_modules/@npmcli/git/lib/index.js     |    9 -
 .../node_modules/@npmcli/git/lib/is-clean.js  |    6 -
 .../pacote/node_modules/@npmcli/git/lib/is.js |    4 -
 .../@npmcli/git/lib/lines-to-revs.js          |  147 -
 .../@npmcli/git/lib/make-error.js             |   33 -
 .../node_modules/@npmcli/git/lib/opts.js      |   57 -
 .../node_modules/@npmcli/git/lib/revs.js      |   22 -
 .../node_modules/@npmcli/git/lib/spawn.js     |   44 -
 .../node_modules/@npmcli/git/lib/utils.js     |    3 -
 .../node_modules/@npmcli/git/lib/which.js     |   18 -
 .../node_modules/@npmcli/git/package.json     |   58 -
 package-lock.json                             | 2874 ++---------------
 package.json                                  |    2 +-
 workspaces/libnpmversion/package.json         |    2 +-
 54 files changed, 3448 insertions(+), 5551 deletions(-)
 delete mode 100644 node_modules/@npmcli/git/node_modules/hosted-git-info/LICENSE
 delete mode 100644 node_modules/@npmcli/git/node_modules/hosted-git-info/lib/from-url.js
 delete mode 100644 node_modules/@npmcli/git/node_modules/hosted-git-info/lib/hosts.js
 delete mode 100644 node_modules/@npmcli/git/node_modules/hosted-git-info/lib/index.js
 delete mode 100644 node_modules/@npmcli/git/node_modules/hosted-git-info/lib/parse-url.js
 delete mode 100644 node_modules/@npmcli/git/node_modules/hosted-git-info/package.json
 rename node_modules/@npmcli/git/node_modules/{npm-package-arg => lru-cache}/LICENSE (92%)
 create mode 100644 node_modules/@npmcli/git/node_modules/lru-cache/dist/commonjs/index.js
 create mode 100644 node_modules/@npmcli/git/node_modules/lru-cache/dist/commonjs/index.min.js
 create mode 100644 node_modules/@npmcli/git/node_modules/lru-cache/dist/commonjs/package.json
 create mode 100644 node_modules/@npmcli/git/node_modules/lru-cache/dist/esm/index.js
 create mode 100644 node_modules/@npmcli/git/node_modules/lru-cache/dist/esm/index.min.js
 create mode 100644 node_modules/@npmcli/git/node_modules/lru-cache/dist/esm/package.json
 create mode 100644 node_modules/@npmcli/git/node_modules/lru-cache/package.json
 delete mode 100644 node_modules/@npmcli/git/node_modules/npm-package-arg/lib/npa.js
 delete mode 100644 node_modules/@npmcli/git/node_modules/npm-package-arg/package.json
 delete mode 100644 node_modules/@npmcli/git/node_modules/npm-pick-manifest/LICENSE.md
 delete mode 100644 node_modules/@npmcli/git/node_modules/npm-pick-manifest/lib/index.js
 delete mode 100644 node_modules/@npmcli/git/node_modules/npm-pick-manifest/package.json
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/LICENSE
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/clone.js
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/errors.js
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/find.js
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/index.js
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/is-clean.js
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/is.js
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/lines-to-revs.js
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/make-error.js
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/opts.js
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/revs.js
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/spawn.js
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/utils.js
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/which.js
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/@npmcli/git/package.json
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/git/LICENSE
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/clone.js
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/errors.js
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/find.js
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/index.js
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/is-clean.js
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/is.js
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/lines-to-revs.js
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/make-error.js
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/opts.js
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/revs.js
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/spawn.js
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/utils.js
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/git/lib/which.js
 delete mode 100644 node_modules/pacote/node_modules/@npmcli/git/package.json

diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index d3ea3a40edd1a..26bf0a2939aef 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -23,9 +23,7 @@
 !/@npmcli/git
 !/@npmcli/git/node_modules/
 /@npmcli/git/node_modules/*
-!/@npmcli/git/node_modules/hosted-git-info
-!/@npmcli/git/node_modules/npm-package-arg
-!/@npmcli/git/node_modules/npm-pick-manifest
+!/@npmcli/git/node_modules/lru-cache
 !/@npmcli/installed-package-contents
 !/@npmcli/map-workspaces
 !/@npmcli/map-workspaces/node_modules/
@@ -41,15 +39,10 @@
 !/@npmcli/package-json
 !/@npmcli/package-json/node_modules/
 /@npmcli/package-json/node_modules/*
-!/@npmcli/package-json/node_modules/@npmcli/
-/@npmcli/package-json/node_modules/@npmcli/*
-!/@npmcli/package-json/node_modules/@npmcli/git
 !/@npmcli/package-json/node_modules/glob
 !/@npmcli/package-json/node_modules/jackspeak
 !/@npmcli/package-json/node_modules/lru-cache
 !/@npmcli/package-json/node_modules/minimatch
-!/@npmcli/package-json/node_modules/npm-package-arg
-!/@npmcli/package-json/node_modules/npm-pick-manifest
 !/@npmcli/package-json/node_modules/path-scurry
 !/@npmcli/promise-spawn
 !/@npmcli/query
@@ -207,9 +200,6 @@
 !/pacote
 !/pacote/node_modules/
 /pacote/node_modules/*
-!/pacote/node_modules/@npmcli/
-/pacote/node_modules/@npmcli/*
-!/pacote/node_modules/@npmcli/git
 !/pacote/node_modules/chownr
 !/pacote/node_modules/minizlib
 !/pacote/node_modules/mkdirp
diff --git a/node_modules/@npmcli/git/node_modules/hosted-git-info/LICENSE b/node_modules/@npmcli/git/node_modules/hosted-git-info/LICENSE
deleted file mode 100644
index 45055763dc838..0000000000000
--- a/node_modules/@npmcli/git/node_modules/hosted-git-info/LICENSE
+++ /dev/null
@@ -1,13 +0,0 @@
-Copyright (c) 2015, Rebecca Turner
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
-REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
-FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
-INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
-LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
-OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
-PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/@npmcli/git/node_modules/hosted-git-info/lib/from-url.js b/node_modules/@npmcli/git/node_modules/hosted-git-info/lib/from-url.js
deleted file mode 100644
index efc1247d59d12..0000000000000
--- a/node_modules/@npmcli/git/node_modules/hosted-git-info/lib/from-url.js
+++ /dev/null
@@ -1,122 +0,0 @@
-'use strict'
-
-const parseUrl = require('./parse-url')
-
-// look for github shorthand inputs, such as npm/cli
-const isGitHubShorthand = (arg) => {
-  // it cannot contain whitespace before the first #
-  // it cannot start with a / because that's probably an absolute file path
-  // but it must include a slash since repos are username/repository
-  // it cannot start with a . because that's probably a relative file path
-  // it cannot start with an @ because that's a scoped package if it passes the other tests
-  // it cannot contain a : before a # because that tells us that there's a protocol
-  // a second / may not exist before a #
-  const firstHash = arg.indexOf('#')
-  const firstSlash = arg.indexOf('/')
-  const secondSlash = arg.indexOf('/', firstSlash + 1)
-  const firstColon = arg.indexOf(':')
-  const firstSpace = /\s/.exec(arg)
-  const firstAt = arg.indexOf('@')
-
-  const spaceOnlyAfterHash = !firstSpace || (firstHash > -1 && firstSpace.index > firstHash)
-  const atOnlyAfterHash = firstAt === -1 || (firstHash > -1 && firstAt > firstHash)
-  const colonOnlyAfterHash = firstColon === -1 || (firstHash > -1 && firstColon > firstHash)
-  const secondSlashOnlyAfterHash = secondSlash === -1 || (firstHash > -1 && secondSlash > firstHash)
-  const hasSlash = firstSlash > 0
-  // if a # is found, what we really want to know is that the character
-  // immediately before # is not a /
-  const doesNotEndWithSlash = firstHash > -1 ? arg[firstHash - 1] !== '/' : !arg.endsWith('/')
-  const doesNotStartWithDot = !arg.startsWith('.')
-
-  return spaceOnlyAfterHash && hasSlash && doesNotEndWithSlash &&
-    doesNotStartWithDot && atOnlyAfterHash && colonOnlyAfterHash &&
-    secondSlashOnlyAfterHash
-}
-
-module.exports = (giturl, opts, { gitHosts, protocols }) => {
-  if (!giturl) {
-    return
-  }
-
-  const correctedUrl = isGitHubShorthand(giturl) ? `github:${giturl}` : giturl
-  const parsed = parseUrl(correctedUrl, protocols)
-  if (!parsed) {
-    return
-  }
-
-  const gitHostShortcut = gitHosts.byShortcut[parsed.protocol]
-  const gitHostDomain = gitHosts.byDomain[parsed.hostname.startsWith('www.')
-    ? parsed.hostname.slice(4)
-    : parsed.hostname]
-  const gitHostName = gitHostShortcut || gitHostDomain
-  if (!gitHostName) {
-    return
-  }
-
-  const gitHostInfo = gitHosts[gitHostShortcut || gitHostDomain]
-  let auth = null
-  if (protocols[parsed.protocol]?.auth && (parsed.username || parsed.password)) {
-    auth = `${parsed.username}${parsed.password ? ':' + parsed.password : ''}`
-  }
-
-  let committish = null
-  let user = null
-  let project = null
-  let defaultRepresentation = null
-
-  try {
-    if (gitHostShortcut) {
-      let pathname = parsed.pathname.startsWith('/') ? parsed.pathname.slice(1) : parsed.pathname
-      const firstAt = pathname.indexOf('@')
-      // we ignore auth for shortcuts, so just trim it out
-      if (firstAt > -1) {
-        pathname = pathname.slice(firstAt + 1)
-      }
-
-      const lastSlash = pathname.lastIndexOf('/')
-      if (lastSlash > -1) {
-        user = decodeURIComponent(pathname.slice(0, lastSlash))
-        // we want nulls only, never empty strings
-        if (!user) {
-          user = null
-        }
-        project = decodeURIComponent(pathname.slice(lastSlash + 1))
-      } else {
-        project = decodeURIComponent(pathname)
-      }
-
-      if (project.endsWith('.git')) {
-        project = project.slice(0, -4)
-      }
-
-      if (parsed.hash) {
-        committish = decodeURIComponent(parsed.hash.slice(1))
-      }
-
-      defaultRepresentation = 'shortcut'
-    } else {
-      if (!gitHostInfo.protocols.includes(parsed.protocol)) {
-        return
-      }
-
-      const segments = gitHostInfo.extract(parsed)
-      if (!segments) {
-        return
-      }
-
-      user = segments.user && decodeURIComponent(segments.user)
-      project = decodeURIComponent(segments.project)
-      committish = decodeURIComponent(segments.committish)
-      defaultRepresentation = protocols[parsed.protocol]?.name || parsed.protocol.slice(0, -1)
-    }
-  } catch (err) {
-    /* istanbul ignore else */
-    if (err instanceof URIError) {
-      return
-    } else {
-      throw err
-    }
-  }
-
-  return [gitHostName, user, auth, project, committish, defaultRepresentation, opts]
-}
diff --git a/node_modules/@npmcli/git/node_modules/hosted-git-info/lib/hosts.js b/node_modules/@npmcli/git/node_modules/hosted-git-info/lib/hosts.js
deleted file mode 100644
index 2a88e95927772..0000000000000
--- a/node_modules/@npmcli/git/node_modules/hosted-git-info/lib/hosts.js
+++ /dev/null
@@ -1,231 +0,0 @@
-/* eslint-disable max-len */
-
-'use strict'
-
-const maybeJoin = (...args) => args.every(arg => arg) ? args.join('') : ''
-const maybeEncode = (arg) => arg ? encodeURIComponent(arg) : ''
-const formatHashFragment = (f) => f.toLowerCase()
-  .replace(/^\W+/g, '') // strip leading non-characters
-  .replace(/(?
-    `git@${domain}:${user}/${project}.git${maybeJoin('#', committish)}`,
-  sshurltemplate: ({ domain, user, project, committish }) =>
-    `git+ssh://git@${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  edittemplate: ({ domain, user, project, committish, editpath, path }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', editpath, '/', maybeEncode(committish || 'HEAD'), '/', path)}`,
-  browsetemplate: ({ domain, user, project, committish, treepath }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}`,
-  browsetreetemplate: ({ domain, user, project, committish, treepath, path, fragment, hashformat }) =>
-    `https://${domain}/${user}/${project}/${treepath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`,
-  browseblobtemplate: ({ domain, user, project, committish, blobpath, path, fragment, hashformat }) =>
-    `https://${domain}/${user}/${project}/${blobpath}/${maybeEncode(committish || 'HEAD')}/${path}${maybeJoin('#', hashformat(fragment || ''))}`,
-  docstemplate: ({ domain, user, project, treepath, committish }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish))}#readme`,
-  httpstemplate: ({ auth, domain, user, project, committish }) =>
-    `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  filetemplate: ({ domain, user, project, committish, path }) =>
-    `https://${domain}/${user}/${project}/raw/${maybeEncode(committish || 'HEAD')}/${path}`,
-  shortcuttemplate: ({ type, user, project, committish }) =>
-    `${type}:${user}/${project}${maybeJoin('#', committish)}`,
-  pathtemplate: ({ user, project, committish }) =>
-    `${user}/${project}${maybeJoin('#', committish)}`,
-  bugstemplate: ({ domain, user, project }) =>
-    `https://${domain}/${user}/${project}/issues`,
-  hashformat: formatHashFragment,
-}
-
-const hosts = {}
-hosts.github = {
-  // First two are insecure and generally shouldn't be used any more, but
-  // they are still supported.
-  protocols: ['git:', 'http:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'],
-  domain: 'github.com',
-  treepath: 'tree',
-  blobpath: 'blob',
-  editpath: 'edit',
-  filetemplate: ({ auth, user, project, committish, path }) =>
-    `https://${maybeJoin(auth, '@')}raw.githubusercontent.com/${user}/${project}/${maybeEncode(committish || 'HEAD')}/${path}`,
-  gittemplate: ({ auth, domain, user, project, committish }) =>
-    `git://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  tarballtemplate: ({ domain, user, project, committish }) =>
-    `https://codeload.${domain}/${user}/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`,
-  extract: (url) => {
-    let [, user, project, type, committish] = url.pathname.split('/', 5)
-    if (type && type !== 'tree') {
-      return
-    }
-
-    if (!type) {
-      committish = url.hash.slice(1)
-    }
-
-    if (project && project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    if (!user || !project) {
-      return
-    }
-
-    return { user, project, committish }
-  },
-}
-
-hosts.bitbucket = {
-  protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'],
-  domain: 'bitbucket.org',
-  treepath: 'src',
-  blobpath: 'src',
-  editpath: '?mode=edit',
-  edittemplate: ({ domain, user, project, committish, treepath, path, editpath }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', treepath, '/', maybeEncode(committish || 'HEAD'), '/', path, editpath)}`,
-  tarballtemplate: ({ domain, user, project, committish }) =>
-    `https://${domain}/${user}/${project}/get/${maybeEncode(committish || 'HEAD')}.tar.gz`,
-  extract: (url) => {
-    let [, user, project, aux] = url.pathname.split('/', 4)
-    if (['get'].includes(aux)) {
-      return
-    }
-
-    if (project && project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    if (!user || !project) {
-      return
-    }
-
-    return { user, project, committish: url.hash.slice(1) }
-  },
-}
-
-hosts.gitlab = {
-  protocols: ['git+ssh:', 'git+https:', 'ssh:', 'https:'],
-  domain: 'gitlab.com',
-  treepath: 'tree',
-  blobpath: 'tree',
-  editpath: '-/edit',
-  httpstemplate: ({ auth, domain, user, project, committish }) =>
-    `git+https://${maybeJoin(auth, '@')}${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  tarballtemplate: ({ domain, user, project, committish }) =>
-    `https://${domain}/${user}/${project}/repository/archive.tar.gz?ref=${maybeEncode(committish || 'HEAD')}`,
-  extract: (url) => {
-    const path = url.pathname.slice(1)
-    if (path.includes('/-/') || path.includes('/archive.tar.gz')) {
-      return
-    }
-
-    const segments = path.split('/')
-    let project = segments.pop()
-    if (project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    const user = segments.join('/')
-    if (!user || !project) {
-      return
-    }
-
-    return { user, project, committish: url.hash.slice(1) }
-  },
-}
-
-hosts.gist = {
-  protocols: ['git:', 'git+ssh:', 'git+https:', 'ssh:', 'https:'],
-  domain: 'gist.github.com',
-  editpath: 'edit',
-  sshtemplate: ({ domain, project, committish }) =>
-    `git@${domain}:${project}.git${maybeJoin('#', committish)}`,
-  sshurltemplate: ({ domain, project, committish }) =>
-    `git+ssh://git@${domain}/${project}.git${maybeJoin('#', committish)}`,
-  edittemplate: ({ domain, user, project, committish, editpath }) =>
-    `https://${domain}/${user}/${project}${maybeJoin('/', maybeEncode(committish))}/${editpath}`,
-  browsetemplate: ({ domain, project, committish }) =>
-    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`,
-  browsetreetemplate: ({ domain, project, committish, path, hashformat }) =>
-    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`,
-  browseblobtemplate: ({ domain, project, committish, path, hashformat }) =>
-    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}${maybeJoin('#', hashformat(path))}`,
-  docstemplate: ({ domain, project, committish }) =>
-    `https://${domain}/${project}${maybeJoin('/', maybeEncode(committish))}`,
-  httpstemplate: ({ domain, project, committish }) =>
-    `git+https://${domain}/${project}.git${maybeJoin('#', committish)}`,
-  filetemplate: ({ user, project, committish, path }) =>
-    `https://gist.githubusercontent.com/${user}/${project}/raw${maybeJoin('/', maybeEncode(committish))}/${path}`,
-  shortcuttemplate: ({ type, project, committish }) =>
-    `${type}:${project}${maybeJoin('#', committish)}`,
-  pathtemplate: ({ project, committish }) =>
-    `${project}${maybeJoin('#', committish)}`,
-  bugstemplate: ({ domain, project }) =>
-    `https://${domain}/${project}`,
-  gittemplate: ({ domain, project, committish }) =>
-    `git://${domain}/${project}.git${maybeJoin('#', committish)}`,
-  tarballtemplate: ({ project, committish }) =>
-    `https://codeload.github.com/gist/${project}/tar.gz/${maybeEncode(committish || 'HEAD')}`,
-  extract: (url) => {
-    let [, user, project, aux] = url.pathname.split('/', 4)
-    if (aux === 'raw') {
-      return
-    }
-
-    if (!project) {
-      if (!user) {
-        return
-      }
-
-      project = user
-      user = null
-    }
-
-    if (project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    return { user, project, committish: url.hash.slice(1) }
-  },
-  hashformat: function (fragment) {
-    return fragment && 'file-' + formatHashFragment(fragment)
-  },
-}
-
-hosts.sourcehut = {
-  protocols: ['git+ssh:', 'https:'],
-  domain: 'git.sr.ht',
-  treepath: 'tree',
-  blobpath: 'tree',
-  filetemplate: ({ domain, user, project, committish, path }) =>
-    `https://${domain}/${user}/${project}/blob/${maybeEncode(committish) || 'HEAD'}/${path}`,
-  httpstemplate: ({ domain, user, project, committish }) =>
-    `https://${domain}/${user}/${project}.git${maybeJoin('#', committish)}`,
-  tarballtemplate: ({ domain, user, project, committish }) =>
-    `https://${domain}/${user}/${project}/archive/${maybeEncode(committish) || 'HEAD'}.tar.gz`,
-  bugstemplate: () => null,
-  extract: (url) => {
-    let [, user, project, aux] = url.pathname.split('/', 4)
-
-    // tarball url
-    if (['archive'].includes(aux)) {
-      return
-    }
-
-    if (project && project.endsWith('.git')) {
-      project = project.slice(0, -4)
-    }
-
-    if (!user || !project) {
-      return
-    }
-
-    return { user, project, committish: url.hash.slice(1) }
-  },
-}
-
-for (const [name, host] of Object.entries(hosts)) {
-  hosts[name] = Object.assign({}, defaults, host)
-}
-
-module.exports = hosts
diff --git a/node_modules/@npmcli/git/node_modules/hosted-git-info/lib/index.js b/node_modules/@npmcli/git/node_modules/hosted-git-info/lib/index.js
deleted file mode 100644
index 2a7100dcee6e7..0000000000000
--- a/node_modules/@npmcli/git/node_modules/hosted-git-info/lib/index.js
+++ /dev/null
@@ -1,227 +0,0 @@
-'use strict'
-
-const { LRUCache } = require('lru-cache')
-const hosts = require('./hosts.js')
-const fromUrl = require('./from-url.js')
-const parseUrl = require('./parse-url.js')
-
-const cache = new LRUCache({ max: 1000 })
-
-function unknownHostedUrl (url) {
-  try {
-    const {
-      protocol,
-      hostname,
-      pathname,
-    } = new URL(url)
-
-    if (!hostname) {
-      return null
-    }
-
-    const proto = /(?:git\+)http:$/.test(protocol) ? 'http:' : 'https:'
-    const path = pathname.replace(/\.git$/, '')
-    return `${proto}//${hostname}${path}`
-  } catch {
-    return null
-  }
-}
-
-class GitHost {
-  constructor (type, user, auth, project, committish, defaultRepresentation, opts = {}) {
-    Object.assign(this, GitHost.#gitHosts[type], {
-      type,
-      user,
-      auth,
-      project,
-      committish,
-      default: defaultRepresentation,
-      opts,
-    })
-  }
-
-  static #gitHosts = { byShortcut: {}, byDomain: {} }
-  static #protocols = {
-    'git+ssh:': { name: 'sshurl' },
-    'ssh:': { name: 'sshurl' },
-    'git+https:': { name: 'https', auth: true },
-    'git:': { auth: true },
-    'http:': { auth: true },
-    'https:': { auth: true },
-    'git+http:': { auth: true },
-  }
-
-  static addHost (name, host) {
-    GitHost.#gitHosts[name] = host
-    GitHost.#gitHosts.byDomain[host.domain] = name
-    GitHost.#gitHosts.byShortcut[`${name}:`] = name
-    GitHost.#protocols[`${name}:`] = { name }
-  }
-
-  static fromUrl (giturl, opts) {
-    if (typeof giturl !== 'string') {
-      return
-    }
-
-    const key = giturl + JSON.stringify(opts || {})
-
-    if (!cache.has(key)) {
-      const hostArgs = fromUrl(giturl, opts, {
-        gitHosts: GitHost.#gitHosts,
-        protocols: GitHost.#protocols,
-      })
-      cache.set(key, hostArgs ? new GitHost(...hostArgs) : undefined)
-    }
-
-    return cache.get(key)
-  }
-
-  static fromManifest (manifest, opts = {}) {
-    if (!manifest || typeof manifest !== 'object') {
-      return
-    }
-
-    const r = manifest.repository
-    // TODO: look into also checking the `bugs`/`homepage` URLs
-
-    const rurl = r && (
-      typeof r === 'string'
-        ? r
-        : typeof r === 'object' && typeof r.url === 'string'
-          ? r.url
-          : null
-    )
-
-    if (!rurl) {
-      throw new Error('no repository')
-    }
-
-    const info = (rurl && GitHost.fromUrl(rurl.replace(/^git\+/, ''), opts)) || null
-    if (info) {
-      return info
-    }
-    const unk = unknownHostedUrl(rurl)
-    return GitHost.fromUrl(unk, opts) || unk
-  }
-
-  static parseUrl (url) {
-    return parseUrl(url)
-  }
-
-  #fill (template, opts) {
-    if (typeof template !== 'function') {
-      return null
-    }
-
-    const options = { ...this, ...this.opts, ...opts }
-
-    // the path should always be set so we don't end up with 'undefined' in urls
-    if (!options.path) {
-      options.path = ''
-    }
-
-    // template functions will insert the leading slash themselves
-    if (options.path.startsWith('/')) {
-      options.path = options.path.slice(1)
-    }
-
-    if (options.noCommittish) {
-      options.committish = null
-    }
-
-    const result = template(options)
-    return options.noGitPlus && result.startsWith('git+') ? result.slice(4) : result
-  }
-
-  hash () {
-    return this.committish ? `#${this.committish}` : ''
-  }
-
-  ssh (opts) {
-    return this.#fill(this.sshtemplate, opts)
-  }
-
-  sshurl (opts) {
-    return this.#fill(this.sshurltemplate, opts)
-  }
-
-  browse (path, ...args) {
-    // not a string, treat path as opts
-    if (typeof path !== 'string') {
-      return this.#fill(this.browsetemplate, path)
-    }
-
-    if (typeof args[0] !== 'string') {
-      return this.#fill(this.browsetreetemplate, { ...args[0], path })
-    }
-
-    return this.#fill(this.browsetreetemplate, { ...args[1], fragment: args[0], path })
-  }
-
-  // If the path is known to be a file, then browseFile should be used. For some hosts
-  // the url is the same as browse, but for others like GitHub a file can use both `/tree/`
-  // and `/blob/` in the path. When using a default committish of `HEAD` then the `/tree/`
-  // path will redirect to a specific commit. Using the `/blob/` path avoids this and
-  // does not redirect to a different commit.
-  browseFile (path, ...args) {
-    if (typeof args[0] !== 'string') {
-      return this.#fill(this.browseblobtemplate, { ...args[0], path })
-    }
-
-    return this.#fill(this.browseblobtemplate, { ...args[1], fragment: args[0], path })
-  }
-
-  docs (opts) {
-    return this.#fill(this.docstemplate, opts)
-  }
-
-  bugs (opts) {
-    return this.#fill(this.bugstemplate, opts)
-  }
-
-  https (opts) {
-    return this.#fill(this.httpstemplate, opts)
-  }
-
-  git (opts) {
-    return this.#fill(this.gittemplate, opts)
-  }
-
-  shortcut (opts) {
-    return this.#fill(this.shortcuttemplate, opts)
-  }
-
-  path (opts) {
-    return this.#fill(this.pathtemplate, opts)
-  }
-
-  tarball (opts) {
-    return this.#fill(this.tarballtemplate, { ...opts, noCommittish: false })
-  }
-
-  file (path, opts) {
-    return this.#fill(this.filetemplate, { ...opts, path })
-  }
-
-  edit (path, opts) {
-    return this.#fill(this.edittemplate, { ...opts, path })
-  }
-
-  getDefaultRepresentation () {
-    return this.default
-  }
-
-  toString (opts) {
-    if (this.default && typeof this[this.default] === 'function') {
-      return this[this.default](opts)
-    }
-
-    return this.sshurl(opts)
-  }
-}
-
-for (const [name, host] of Object.entries(hosts)) {
-  GitHost.addHost(name, host)
-}
-
-module.exports = GitHost
diff --git a/node_modules/@npmcli/git/node_modules/hosted-git-info/lib/parse-url.js b/node_modules/@npmcli/git/node_modules/hosted-git-info/lib/parse-url.js
deleted file mode 100644
index 7d5489c008ab4..0000000000000
--- a/node_modules/@npmcli/git/node_modules/hosted-git-info/lib/parse-url.js
+++ /dev/null
@@ -1,78 +0,0 @@
-const url = require('url')
-
-const lastIndexOfBefore = (str, char, beforeChar) => {
-  const startPosition = str.indexOf(beforeChar)
-  return str.lastIndexOf(char, startPosition > -1 ? startPosition : Infinity)
-}
-
-const safeUrl = (u) => {
-  try {
-    return new url.URL(u)
-  } catch {
-    // this fn should never throw
-  }
-}
-
-// accepts input like git:github.com:user/repo and inserts the // after the first :
-const correctProtocol = (arg, protocols) => {
-  const firstColon = arg.indexOf(':')
-  const proto = arg.slice(0, firstColon + 1)
-  if (Object.prototype.hasOwnProperty.call(protocols, proto)) {
-    return arg
-  }
-
-  const firstAt = arg.indexOf('@')
-  if (firstAt > -1) {
-    if (firstAt > firstColon) {
-      return `git+ssh://${arg}`
-    } else {
-      return arg
-    }
-  }
-
-  const doubleSlash = arg.indexOf('//')
-  if (doubleSlash === firstColon + 1) {
-    return arg
-  }
-
-  return `${arg.slice(0, firstColon + 1)}//${arg.slice(firstColon + 1)}`
-}
-
-// attempt to correct an scp style url so that it will parse with `new URL()`
-const correctUrl = (giturl) => {
-  // ignore @ that come after the first hash since the denotes the start
-  // of a committish which can contain @ characters
-  const firstAt = lastIndexOfBefore(giturl, '@', '#')
-  // ignore colons that come after the hash since that could include colons such as:
-  // git@github.com:user/package-2#semver:^1.0.0
-  const lastColonBeforeHash = lastIndexOfBefore(giturl, ':', '#')
-
-  if (lastColonBeforeHash > firstAt) {
-    // the last : comes after the first @ (or there is no @)
-    // like it would in:
-    // proto://hostname.com:user/repo
-    // username@hostname.com:user/repo
-    // :password@hostname.com:user/repo
-    // username:password@hostname.com:user/repo
-    // proto://username@hostname.com:user/repo
-    // proto://:password@hostname.com:user/repo
-    // proto://username:password@hostname.com:user/repo
-    // then we replace the last : with a / to create a valid path
-    giturl = giturl.slice(0, lastColonBeforeHash) + '/' + giturl.slice(lastColonBeforeHash + 1)
-  }
-
-  if (lastIndexOfBefore(giturl, ':', '#') === -1 && giturl.indexOf('//') === -1) {
-    // we have no : at all
-    // as it would be in:
-    // username@hostname.com/user/repo
-    // then we prepend a protocol
-    giturl = `git+ssh://${giturl}`
-  }
-
-  return giturl
-}
-
-module.exports = (giturl, protocols) => {
-  const withProtocol = protocols ? correctProtocol(giturl, protocols) : giturl
-  return safeUrl(withProtocol) || safeUrl(correctUrl(withProtocol))
-}
diff --git a/node_modules/@npmcli/git/node_modules/hosted-git-info/package.json b/node_modules/@npmcli/git/node_modules/hosted-git-info/package.json
deleted file mode 100644
index a9bb26be4a704..0000000000000
--- a/node_modules/@npmcli/git/node_modules/hosted-git-info/package.json
+++ /dev/null
@@ -1,61 +0,0 @@
-{
-  "name": "hosted-git-info",
-  "version": "8.1.0",
-  "description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab",
-  "main": "./lib/index.js",
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/npm/hosted-git-info.git"
-  },
-  "keywords": [
-    "git",
-    "github",
-    "bitbucket",
-    "gitlab"
-  ],
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "bugs": {
-    "url": "https://github.com/npm/hosted-git-info/issues"
-  },
-  "homepage": "https://github.com/npm/hosted-git-info",
-  "scripts": {
-    "posttest": "npm run lint",
-    "snap": "tap",
-    "test": "tap",
-    "test:coverage": "tap --coverage-report=html",
-    "lint": "npm run eslint",
-    "postlint": "template-oss-check",
-    "lintfix": "npm run eslint -- --fix",
-    "template-oss-apply": "template-oss-apply --force",
-    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
-  },
-  "dependencies": {
-    "lru-cache": "^10.0.1"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.24.3",
-    "tap": "^16.0.1"
-  },
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "engines": {
-    "node": "^18.17.0 || >=20.5.0"
-  },
-  "tap": {
-    "color": 1,
-    "coverage": true,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.24.3",
-    "publish": "true"
-  }
-}
diff --git a/node_modules/@npmcli/git/node_modules/npm-package-arg/LICENSE b/node_modules/@npmcli/git/node_modules/lru-cache/LICENSE
similarity index 92%
rename from node_modules/@npmcli/git/node_modules/npm-package-arg/LICENSE
rename to node_modules/@npmcli/git/node_modules/lru-cache/LICENSE
index 19cec97b18468..f785757cd63f8 100644
--- a/node_modules/@npmcli/git/node_modules/npm-package-arg/LICENSE
+++ b/node_modules/@npmcli/git/node_modules/lru-cache/LICENSE
@@ -1,6 +1,6 @@
 The ISC License
 
-Copyright (c) npm, Inc.
+Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors
 
 Permission to use, copy, modify, and/or distribute this software for any
 purpose with or without fee is hereby granted, provided that the above
diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/dist/commonjs/index.js b/node_modules/@npmcli/git/node_modules/lru-cache/dist/commonjs/index.js
new file mode 100644
index 0000000000000..921b8f10f71b1
--- /dev/null
+++ b/node_modules/@npmcli/git/node_modules/lru-cache/dist/commonjs/index.js
@@ -0,0 +1,1564 @@
+"use strict";
+/**
+ * @module LRUCache
+ */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.LRUCache = void 0;
+const defaultPerf = (typeof performance === 'object' &&
+    performance &&
+    typeof performance.now === 'function') ?
+    performance
+    : Date;
+const warned = new Set();
+/* c8 ignore start */
+const PROCESS = (typeof process === 'object' && !!process ?
+    process
+    : {});
+/* c8 ignore start */
+const emitWarning = (msg, type, code, fn) => {
+    typeof PROCESS.emitWarning === 'function' ?
+        PROCESS.emitWarning(msg, type, code, fn)
+        : console.error(`[${code}] ${type}: ${msg}`);
+};
+let AC = globalThis.AbortController;
+let AS = globalThis.AbortSignal;
+/* c8 ignore start */
+if (typeof AC === 'undefined') {
+    //@ts-ignore
+    AS = class AbortSignal {
+        onabort;
+        _onabort = [];
+        reason;
+        aborted = false;
+        addEventListener(_, fn) {
+            this._onabort.push(fn);
+        }
+    };
+    //@ts-ignore
+    AC = class AbortController {
+        constructor() {
+            warnACPolyfill();
+        }
+        signal = new AS();
+        abort(reason) {
+            if (this.signal.aborted)
+                return;
+            //@ts-ignore
+            this.signal.reason = reason;
+            //@ts-ignore
+            this.signal.aborted = true;
+            //@ts-ignore
+            for (const fn of this.signal._onabort) {
+                fn(reason);
+            }
+            this.signal.onabort?.(reason);
+        }
+    };
+    let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
+    const warnACPolyfill = () => {
+        if (!printACPolyfillWarning)
+            return;
+        printACPolyfillWarning = false;
+        emitWarning('AbortController is not defined. If using lru-cache in ' +
+            'node 14, load an AbortController polyfill from the ' +
+            '`node-abort-controller` package. A minimal polyfill is ' +
+            'provided for use by LRUCache.fetch(), but it should not be ' +
+            'relied upon in other contexts (eg, passing it to other APIs that ' +
+            'use AbortController/AbortSignal might have undesirable effects). ' +
+            'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
+    };
+}
+/* c8 ignore stop */
+const shouldWarn = (code) => !warned.has(code);
+const TYPE = Symbol('type');
+const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
+/* c8 ignore start */
+// This is a little bit ridiculous, tbh.
+// The maximum array length is 2^32-1 or thereabouts on most JS impls.
+// And well before that point, you're caching the entire world, I mean,
+// that's ~32GB of just integers for the next/prev links, plus whatever
+// else to hold that many keys and values.  Just filling the memory with
+// zeroes at init time is brutal when you get that big.
+// But why not be complete?
+// Maybe in the future, these limits will have expanded.
+const getUintArray = (max) => !isPosInt(max) ? null
+    : max <= Math.pow(2, 8) ? Uint8Array
+        : max <= Math.pow(2, 16) ? Uint16Array
+            : max <= Math.pow(2, 32) ? Uint32Array
+                : max <= Number.MAX_SAFE_INTEGER ? ZeroArray
+                    : null;
+/* c8 ignore stop */
+class ZeroArray extends Array {
+    constructor(size) {
+        super(size);
+        this.fill(0);
+    }
+}
+class Stack {
+    heap;
+    length;
+    // private constructor
+    static #constructing = false;
+    static create(max) {
+        const HeapCls = getUintArray(max);
+        if (!HeapCls)
+            return [];
+        Stack.#constructing = true;
+        const s = new Stack(max, HeapCls);
+        Stack.#constructing = false;
+        return s;
+    }
+    constructor(max, HeapCls) {
+        /* c8 ignore start */
+        if (!Stack.#constructing) {
+            throw new TypeError('instantiate Stack using Stack.create(n)');
+        }
+        /* c8 ignore stop */
+        this.heap = new HeapCls(max);
+        this.length = 0;
+    }
+    push(n) {
+        this.heap[this.length++] = n;
+    }
+    pop() {
+        return this.heap[--this.length];
+    }
+}
+/**
+ * Default export, the thing you're using this module to get.
+ *
+ * The `K` and `V` types define the key and value types, respectively. The
+ * optional `FC` type defines the type of the `context` object passed to
+ * `cache.fetch()` and `cache.memo()`.
+ *
+ * Keys and values **must not** be `null` or `undefined`.
+ *
+ * All properties from the options object (with the exception of `max`,
+ * `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are
+ * added as normal public members. (The listed options are read-only getters.)
+ *
+ * Changing any of these will alter the defaults for subsequent method calls.
+ */
+class LRUCache {
+    // options that cannot be changed without disaster
+    #max;
+    #maxSize;
+    #dispose;
+    #onInsert;
+    #disposeAfter;
+    #fetchMethod;
+    #memoMethod;
+    #perf;
+    /**
+     * {@link LRUCache.OptionsBase.perf}
+     */
+    get perf() {
+        return this.#perf;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.ttl}
+     */
+    ttl;
+    /**
+     * {@link LRUCache.OptionsBase.ttlResolution}
+     */
+    ttlResolution;
+    /**
+     * {@link LRUCache.OptionsBase.ttlAutopurge}
+     */
+    ttlAutopurge;
+    /**
+     * {@link LRUCache.OptionsBase.updateAgeOnGet}
+     */
+    updateAgeOnGet;
+    /**
+     * {@link LRUCache.OptionsBase.updateAgeOnHas}
+     */
+    updateAgeOnHas;
+    /**
+     * {@link LRUCache.OptionsBase.allowStale}
+     */
+    allowStale;
+    /**
+     * {@link LRUCache.OptionsBase.noDisposeOnSet}
+     */
+    noDisposeOnSet;
+    /**
+     * {@link LRUCache.OptionsBase.noUpdateTTL}
+     */
+    noUpdateTTL;
+    /**
+     * {@link LRUCache.OptionsBase.maxEntrySize}
+     */
+    maxEntrySize;
+    /**
+     * {@link LRUCache.OptionsBase.sizeCalculation}
+     */
+    sizeCalculation;
+    /**
+     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
+     */
+    noDeleteOnFetchRejection;
+    /**
+     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
+     */
+    noDeleteOnStaleGet;
+    /**
+     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
+     */
+    allowStaleOnFetchAbort;
+    /**
+     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
+     */
+    allowStaleOnFetchRejection;
+    /**
+     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
+     */
+    ignoreFetchAbort;
+    // computed properties
+    #size;
+    #calculatedSize;
+    #keyMap;
+    #keyList;
+    #valList;
+    #next;
+    #prev;
+    #head;
+    #tail;
+    #free;
+    #disposed;
+    #sizes;
+    #starts;
+    #ttls;
+    #hasDispose;
+    #hasFetchMethod;
+    #hasDisposeAfter;
+    #hasOnInsert;
+    /**
+     * Do not call this method unless you need to inspect the
+     * inner workings of the cache.  If anything returned by this
+     * object is modified in any way, strange breakage may occur.
+     *
+     * These fields are private for a reason!
+     *
+     * @internal
+     */
+    static unsafeExposeInternals(c) {
+        return {
+            // properties
+            starts: c.#starts,
+            ttls: c.#ttls,
+            sizes: c.#sizes,
+            keyMap: c.#keyMap,
+            keyList: c.#keyList,
+            valList: c.#valList,
+            next: c.#next,
+            prev: c.#prev,
+            get head() {
+                return c.#head;
+            },
+            get tail() {
+                return c.#tail;
+            },
+            free: c.#free,
+            // methods
+            isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
+            backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
+            moveToTail: (index) => c.#moveToTail(index),
+            indexes: (options) => c.#indexes(options),
+            rindexes: (options) => c.#rindexes(options),
+            isStale: (index) => c.#isStale(index),
+        };
+    }
+    // Protected read-only members
+    /**
+     * {@link LRUCache.OptionsBase.max} (read-only)
+     */
+    get max() {
+        return this.#max;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.maxSize} (read-only)
+     */
+    get maxSize() {
+        return this.#maxSize;
+    }
+    /**
+     * The total computed size of items in the cache (read-only)
+     */
+    get calculatedSize() {
+        return this.#calculatedSize;
+    }
+    /**
+     * The number of items stored in the cache (read-only)
+     */
+    get size() {
+        return this.#size;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
+     */
+    get fetchMethod() {
+        return this.#fetchMethod;
+    }
+    get memoMethod() {
+        return this.#memoMethod;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.dispose} (read-only)
+     */
+    get dispose() {
+        return this.#dispose;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.onInsert} (read-only)
+     */
+    get onInsert() {
+        return this.#onInsert;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
+     */
+    get disposeAfter() {
+        return this.#disposeAfter;
+    }
+    constructor(options) {
+        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, onInsert, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, perf, } = options;
+        if (perf !== undefined) {
+            if (typeof perf?.now !== 'function') {
+                throw new TypeError('perf option must have a now() method if specified');
+            }
+        }
+        this.#perf = perf ?? defaultPerf;
+        if (max !== 0 && !isPosInt(max)) {
+            throw new TypeError('max option must be a nonnegative integer');
+        }
+        const UintArray = max ? getUintArray(max) : Array;
+        if (!UintArray) {
+            throw new Error('invalid max value: ' + max);
+        }
+        this.#max = max;
+        this.#maxSize = maxSize;
+        this.maxEntrySize = maxEntrySize || this.#maxSize;
+        this.sizeCalculation = sizeCalculation;
+        if (this.sizeCalculation) {
+            if (!this.#maxSize && !this.maxEntrySize) {
+                throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
+            }
+            if (typeof this.sizeCalculation !== 'function') {
+                throw new TypeError('sizeCalculation set to non-function');
+            }
+        }
+        if (memoMethod !== undefined &&
+            typeof memoMethod !== 'function') {
+            throw new TypeError('memoMethod must be a function if defined');
+        }
+        this.#memoMethod = memoMethod;
+        if (fetchMethod !== undefined &&
+            typeof fetchMethod !== 'function') {
+            throw new TypeError('fetchMethod must be a function if specified');
+        }
+        this.#fetchMethod = fetchMethod;
+        this.#hasFetchMethod = !!fetchMethod;
+        this.#keyMap = new Map();
+        this.#keyList = new Array(max).fill(undefined);
+        this.#valList = new Array(max).fill(undefined);
+        this.#next = new UintArray(max);
+        this.#prev = new UintArray(max);
+        this.#head = 0;
+        this.#tail = 0;
+        this.#free = Stack.create(max);
+        this.#size = 0;
+        this.#calculatedSize = 0;
+        if (typeof dispose === 'function') {
+            this.#dispose = dispose;
+        }
+        if (typeof onInsert === 'function') {
+            this.#onInsert = onInsert;
+        }
+        if (typeof disposeAfter === 'function') {
+            this.#disposeAfter = disposeAfter;
+            this.#disposed = [];
+        }
+        else {
+            this.#disposeAfter = undefined;
+            this.#disposed = undefined;
+        }
+        this.#hasDispose = !!this.#dispose;
+        this.#hasOnInsert = !!this.#onInsert;
+        this.#hasDisposeAfter = !!this.#disposeAfter;
+        this.noDisposeOnSet = !!noDisposeOnSet;
+        this.noUpdateTTL = !!noUpdateTTL;
+        this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
+        this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
+        this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
+        this.ignoreFetchAbort = !!ignoreFetchAbort;
+        // NB: maxEntrySize is set to maxSize if it's set
+        if (this.maxEntrySize !== 0) {
+            if (this.#maxSize !== 0) {
+                if (!isPosInt(this.#maxSize)) {
+                    throw new TypeError('maxSize must be a positive integer if specified');
+                }
+            }
+            if (!isPosInt(this.maxEntrySize)) {
+                throw new TypeError('maxEntrySize must be a positive integer if specified');
+            }
+            this.#initializeSizeTracking();
+        }
+        this.allowStale = !!allowStale;
+        this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
+        this.updateAgeOnGet = !!updateAgeOnGet;
+        this.updateAgeOnHas = !!updateAgeOnHas;
+        this.ttlResolution =
+            isPosInt(ttlResolution) || ttlResolution === 0 ?
+                ttlResolution
+                : 1;
+        this.ttlAutopurge = !!ttlAutopurge;
+        this.ttl = ttl || 0;
+        if (this.ttl) {
+            if (!isPosInt(this.ttl)) {
+                throw new TypeError('ttl must be a positive integer if specified');
+            }
+            this.#initializeTTLTracking();
+        }
+        // do not allow completely unbounded caches
+        if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
+            throw new TypeError('At least one of max, maxSize, or ttl is required');
+        }
+        if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
+            const code = 'LRU_CACHE_UNBOUNDED';
+            if (shouldWarn(code)) {
+                warned.add(code);
+                const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
+                    'result in unbounded memory consumption.';
+                emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
+            }
+        }
+    }
+    /**
+     * Return the number of ms left in the item's TTL. If item is not in cache,
+     * returns `0`. Returns `Infinity` if item is in cache without a defined TTL.
+     */
+    getRemainingTTL(key) {
+        return this.#keyMap.has(key) ? Infinity : 0;
+    }
+    #initializeTTLTracking() {
+        const ttls = new ZeroArray(this.#max);
+        const starts = new ZeroArray(this.#max);
+        this.#ttls = ttls;
+        this.#starts = starts;
+        this.#setItemTTL = (index, ttl, start = this.#perf.now()) => {
+            starts[index] = ttl !== 0 ? start : 0;
+            ttls[index] = ttl;
+            if (ttl !== 0 && this.ttlAutopurge) {
+                const t = setTimeout(() => {
+                    if (this.#isStale(index)) {
+                        this.#delete(this.#keyList[index], 'expire');
+                    }
+                }, ttl + 1);
+                // unref() not supported on all platforms
+                /* c8 ignore start */
+                if (t.unref) {
+                    t.unref();
+                }
+                /* c8 ignore stop */
+            }
+        };
+        this.#updateItemAge = index => {
+            starts[index] = ttls[index] !== 0 ? this.#perf.now() : 0;
+        };
+        this.#statusTTL = (status, index) => {
+            if (ttls[index]) {
+                const ttl = ttls[index];
+                const start = starts[index];
+                /* c8 ignore next */
+                if (!ttl || !start)
+                    return;
+                status.ttl = ttl;
+                status.start = start;
+                status.now = cachedNow || getNow();
+                const age = status.now - start;
+                status.remainingTTL = ttl - age;
+            }
+        };
+        // debounce calls to perf.now() to 1s so we're not hitting
+        // that costly call repeatedly.
+        let cachedNow = 0;
+        const getNow = () => {
+            const n = this.#perf.now();
+            if (this.ttlResolution > 0) {
+                cachedNow = n;
+                const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
+                // not available on all platforms
+                /* c8 ignore start */
+                if (t.unref) {
+                    t.unref();
+                }
+                /* c8 ignore stop */
+            }
+            return n;
+        };
+        this.getRemainingTTL = key => {
+            const index = this.#keyMap.get(key);
+            if (index === undefined) {
+                return 0;
+            }
+            const ttl = ttls[index];
+            const start = starts[index];
+            if (!ttl || !start) {
+                return Infinity;
+            }
+            const age = (cachedNow || getNow()) - start;
+            return ttl - age;
+        };
+        this.#isStale = index => {
+            const s = starts[index];
+            const t = ttls[index];
+            return !!t && !!s && (cachedNow || getNow()) - s > t;
+        };
+    }
+    // conditionally set private methods related to TTL
+    #updateItemAge = () => { };
+    #statusTTL = () => { };
+    #setItemTTL = () => { };
+    /* c8 ignore stop */
+    #isStale = () => false;
+    #initializeSizeTracking() {
+        const sizes = new ZeroArray(this.#max);
+        this.#calculatedSize = 0;
+        this.#sizes = sizes;
+        this.#removeItemSize = index => {
+            this.#calculatedSize -= sizes[index];
+            sizes[index] = 0;
+        };
+        this.#requireSize = (k, v, size, sizeCalculation) => {
+            // provisionally accept background fetches.
+            // actual value size will be checked when they return.
+            if (this.#isBackgroundFetch(v)) {
+                return 0;
+            }
+            if (!isPosInt(size)) {
+                if (sizeCalculation) {
+                    if (typeof sizeCalculation !== 'function') {
+                        throw new TypeError('sizeCalculation must be a function');
+                    }
+                    size = sizeCalculation(v, k);
+                    if (!isPosInt(size)) {
+                        throw new TypeError('sizeCalculation return invalid (expect positive integer)');
+                    }
+                }
+                else {
+                    throw new TypeError('invalid size value (must be positive integer). ' +
+                        'When maxSize or maxEntrySize is used, sizeCalculation ' +
+                        'or size must be set.');
+                }
+            }
+            return size;
+        };
+        this.#addItemSize = (index, size, status) => {
+            sizes[index] = size;
+            if (this.#maxSize) {
+                const maxSize = this.#maxSize - sizes[index];
+                while (this.#calculatedSize > maxSize) {
+                    this.#evict(true);
+                }
+            }
+            this.#calculatedSize += sizes[index];
+            if (status) {
+                status.entrySize = size;
+                status.totalCalculatedSize = this.#calculatedSize;
+            }
+        };
+    }
+    #removeItemSize = _i => { };
+    #addItemSize = (_i, _s, _st) => { };
+    #requireSize = (_k, _v, size, sizeCalculation) => {
+        if (size || sizeCalculation) {
+            throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
+        }
+        return 0;
+    };
+    *#indexes({ allowStale = this.allowStale } = {}) {
+        if (this.#size) {
+            for (let i = this.#tail; true;) {
+                if (!this.#isValidIndex(i)) {
+                    break;
+                }
+                if (allowStale || !this.#isStale(i)) {
+                    yield i;
+                }
+                if (i === this.#head) {
+                    break;
+                }
+                else {
+                    i = this.#prev[i];
+                }
+            }
+        }
+    }
+    *#rindexes({ allowStale = this.allowStale } = {}) {
+        if (this.#size) {
+            for (let i = this.#head; true;) {
+                if (!this.#isValidIndex(i)) {
+                    break;
+                }
+                if (allowStale || !this.#isStale(i)) {
+                    yield i;
+                }
+                if (i === this.#tail) {
+                    break;
+                }
+                else {
+                    i = this.#next[i];
+                }
+            }
+        }
+    }
+    #isValidIndex(index) {
+        return (index !== undefined &&
+            this.#keyMap.get(this.#keyList[index]) === index);
+    }
+    /**
+     * Return a generator yielding `[key, value]` pairs,
+     * in order from most recently used to least recently used.
+     */
+    *entries() {
+        for (const i of this.#indexes()) {
+            if (this.#valList[i] !== undefined &&
+                this.#keyList[i] !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield [this.#keyList[i], this.#valList[i]];
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.entries}
+     *
+     * Return a generator yielding `[key, value]` pairs,
+     * in order from least recently used to most recently used.
+     */
+    *rentries() {
+        for (const i of this.#rindexes()) {
+            if (this.#valList[i] !== undefined &&
+                this.#keyList[i] !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield [this.#keyList[i], this.#valList[i]];
+            }
+        }
+    }
+    /**
+     * Return a generator yielding the keys in the cache,
+     * in order from most recently used to least recently used.
+     */
+    *keys() {
+        for (const i of this.#indexes()) {
+            const k = this.#keyList[i];
+            if (k !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield k;
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.keys}
+     *
+     * Return a generator yielding the keys in the cache,
+     * in order from least recently used to most recently used.
+     */
+    *rkeys() {
+        for (const i of this.#rindexes()) {
+            const k = this.#keyList[i];
+            if (k !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield k;
+            }
+        }
+    }
+    /**
+     * Return a generator yielding the values in the cache,
+     * in order from most recently used to least recently used.
+     */
+    *values() {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            if (v !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield this.#valList[i];
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.values}
+     *
+     * Return a generator yielding the values in the cache,
+     * in order from least recently used to most recently used.
+     */
+    *rvalues() {
+        for (const i of this.#rindexes()) {
+            const v = this.#valList[i];
+            if (v !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield this.#valList[i];
+            }
+        }
+    }
+    /**
+     * Iterating over the cache itself yields the same results as
+     * {@link LRUCache.entries}
+     */
+    [Symbol.iterator]() {
+        return this.entries();
+    }
+    /**
+     * A String value that is used in the creation of the default string
+     * description of an object. Called by the built-in method
+     * `Object.prototype.toString`.
+     */
+    [Symbol.toStringTag] = 'LRUCache';
+    /**
+     * Find a value for which the supplied fn method returns a truthy value,
+     * similar to `Array.find()`. fn is called as `fn(value, key, cache)`.
+     */
+    find(fn, getOptions = {}) {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            if (fn(value, this.#keyList[i], this)) {
+                return this.get(this.#keyList[i], getOptions);
+            }
+        }
+    }
+    /**
+     * Call the supplied function on each item in the cache, in order from most
+     * recently used to least recently used.
+     *
+     * `fn` is called as `fn(value, key, cache)`.
+     *
+     * If `thisp` is provided, function will be called in the `this`-context of
+     * the provided object, or the cache if no `thisp` object is provided.
+     *
+     * Does not update age or recenty of use, or iterate over stale values.
+     */
+    forEach(fn, thisp = this) {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            fn.call(thisp, value, this.#keyList[i], this);
+        }
+    }
+    /**
+     * The same as {@link LRUCache.forEach} but items are iterated over in
+     * reverse order.  (ie, less recently used items are iterated over first.)
+     */
+    rforEach(fn, thisp = this) {
+        for (const i of this.#rindexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            fn.call(thisp, value, this.#keyList[i], this);
+        }
+    }
+    /**
+     * Delete any stale entries. Returns true if anything was removed,
+     * false otherwise.
+     */
+    purgeStale() {
+        let deleted = false;
+        for (const i of this.#rindexes({ allowStale: true })) {
+            if (this.#isStale(i)) {
+                this.#delete(this.#keyList[i], 'expire');
+                deleted = true;
+            }
+        }
+        return deleted;
+    }
+    /**
+     * Get the extended info about a given entry, to get its value, size, and
+     * TTL info simultaneously. Returns `undefined` if the key is not present.
+     *
+     * Unlike {@link LRUCache#dump}, which is designed to be portable and survive
+     * serialization, the `start` value is always the current timestamp, and the
+     * `ttl` is a calculated remaining time to live (negative if expired).
+     *
+     * Always returns stale values, if their info is found in the cache, so be
+     * sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl})
+     * if relevant.
+     */
+    info(key) {
+        const i = this.#keyMap.get(key);
+        if (i === undefined)
+            return undefined;
+        const v = this.#valList[i];
+        /* c8 ignore start - this isn't tested for the info function,
+         * but it's the same logic as found in other places. */
+        const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+        if (value === undefined)
+            return undefined;
+        /* c8 ignore end */
+        const entry = { value };
+        if (this.#ttls && this.#starts) {
+            const ttl = this.#ttls[i];
+            const start = this.#starts[i];
+            if (ttl && start) {
+                const remain = ttl - (this.#perf.now() - start);
+                entry.ttl = remain;
+                entry.start = Date.now();
+            }
+        }
+        if (this.#sizes) {
+            entry.size = this.#sizes[i];
+        }
+        return entry;
+    }
+    /**
+     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
+     * passed to {@link LRUCache#load}.
+     *
+     * The `start` fields are calculated relative to a portable `Date.now()`
+     * timestamp, even if `performance.now()` is available.
+     *
+     * Stale entries are always included in the `dump`, even if
+     * {@link LRUCache.OptionsBase.allowStale} is false.
+     *
+     * Note: this returns an actual array, not a generator, so it can be more
+     * easily passed around.
+     */
+    dump() {
+        const arr = [];
+        for (const i of this.#indexes({ allowStale: true })) {
+            const key = this.#keyList[i];
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined || key === undefined)
+                continue;
+            const entry = { value };
+            if (this.#ttls && this.#starts) {
+                entry.ttl = this.#ttls[i];
+                // always dump the start relative to a portable timestamp
+                // it's ok for this to be a bit slow, it's a rare operation.
+                const age = this.#perf.now() - this.#starts[i];
+                entry.start = Math.floor(Date.now() - age);
+            }
+            if (this.#sizes) {
+                entry.size = this.#sizes[i];
+            }
+            arr.unshift([key, entry]);
+        }
+        return arr;
+    }
+    /**
+     * Reset the cache and load in the items in entries in the order listed.
+     *
+     * The shape of the resulting cache may be different if the same options are
+     * not used in both caches.
+     *
+     * The `start` fields are assumed to be calculated relative to a portable
+     * `Date.now()` timestamp, even if `performance.now()` is available.
+     */
+    load(arr) {
+        this.clear();
+        for (const [key, entry] of arr) {
+            if (entry.start) {
+                // entry.start is a portable timestamp, but we may be using
+                // node's performance.now(), so calculate the offset, so that
+                // we get the intended remaining TTL, no matter how long it's
+                // been on ice.
+                //
+                // it's ok for this to be a bit slow, it's a rare operation.
+                const age = Date.now() - entry.start;
+                entry.start = this.#perf.now() - age;
+            }
+            this.set(key, entry.value, entry);
+        }
+    }
+    /**
+     * Add a value to the cache.
+     *
+     * Note: if `undefined` is specified as a value, this is an alias for
+     * {@link LRUCache#delete}
+     *
+     * Fields on the {@link LRUCache.SetOptions} options param will override
+     * their corresponding values in the constructor options for the scope
+     * of this single `set()` operation.
+     *
+     * If `start` is provided, then that will set the effective start
+     * time for the TTL calculation. Note that this must be a previous
+     * value of `performance.now()` if supported, or a previous value of
+     * `Date.now()` if not.
+     *
+     * Options object may also include `size`, which will prevent
+     * calling the `sizeCalculation` function and just use the specified
+     * number if it is a positive integer, and `noDisposeOnSet` which
+     * will prevent calling a `dispose` function in the case of
+     * overwrites.
+     *
+     * If the `size` (or return value of `sizeCalculation`) for a given
+     * entry is greater than `maxEntrySize`, then the item will not be
+     * added to the cache.
+     *
+     * Will update the recency of the entry.
+     *
+     * If the value is `undefined`, then this is an alias for
+     * `cache.delete(key)`. `undefined` is never stored in the cache.
+     */
+    set(k, v, setOptions = {}) {
+        if (v === undefined) {
+            this.delete(k);
+            return this;
+        }
+        const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
+        let { noUpdateTTL = this.noUpdateTTL } = setOptions;
+        const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
+        // if the item doesn't fit, don't do anything
+        // NB: maxEntrySize set to maxSize by default
+        if (this.maxEntrySize && size > this.maxEntrySize) {
+            if (status) {
+                status.set = 'miss';
+                status.maxEntrySizeExceeded = true;
+            }
+            // have to delete, in case something is there already.
+            this.#delete(k, 'set');
+            return this;
+        }
+        let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
+        if (index === undefined) {
+            // addition
+            index = (this.#size === 0 ? this.#tail
+                : this.#free.length !== 0 ? this.#free.pop()
+                    : this.#size === this.#max ? this.#evict(false)
+                        : this.#size);
+            this.#keyList[index] = k;
+            this.#valList[index] = v;
+            this.#keyMap.set(k, index);
+            this.#next[this.#tail] = index;
+            this.#prev[index] = this.#tail;
+            this.#tail = index;
+            this.#size++;
+            this.#addItemSize(index, size, status);
+            if (status)
+                status.set = 'add';
+            noUpdateTTL = false;
+            if (this.#hasOnInsert) {
+                this.#onInsert?.(v, k, 'add');
+            }
+        }
+        else {
+            // update
+            this.#moveToTail(index);
+            const oldVal = this.#valList[index];
+            if (v !== oldVal) {
+                if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
+                    oldVal.__abortController.abort(new Error('replaced'));
+                    const { __staleWhileFetching: s } = oldVal;
+                    if (s !== undefined && !noDisposeOnSet) {
+                        if (this.#hasDispose) {
+                            this.#dispose?.(s, k, 'set');
+                        }
+                        if (this.#hasDisposeAfter) {
+                            this.#disposed?.push([s, k, 'set']);
+                        }
+                    }
+                }
+                else if (!noDisposeOnSet) {
+                    if (this.#hasDispose) {
+                        this.#dispose?.(oldVal, k, 'set');
+                    }
+                    if (this.#hasDisposeAfter) {
+                        this.#disposed?.push([oldVal, k, 'set']);
+                    }
+                }
+                this.#removeItemSize(index);
+                this.#addItemSize(index, size, status);
+                this.#valList[index] = v;
+                if (status) {
+                    status.set = 'replace';
+                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal) ?
+                        oldVal.__staleWhileFetching
+                        : oldVal;
+                    if (oldValue !== undefined)
+                        status.oldValue = oldValue;
+                }
+            }
+            else if (status) {
+                status.set = 'update';
+            }
+            if (this.#hasOnInsert) {
+                this.onInsert?.(v, k, v === oldVal ? 'update' : 'replace');
+            }
+        }
+        if (ttl !== 0 && !this.#ttls) {
+            this.#initializeTTLTracking();
+        }
+        if (this.#ttls) {
+            if (!noUpdateTTL) {
+                this.#setItemTTL(index, ttl, start);
+            }
+            if (status)
+                this.#statusTTL(status, index);
+        }
+        if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+        return this;
+    }
+    /**
+     * Evict the least recently used item, returning its value or
+     * `undefined` if cache is empty.
+     */
+    pop() {
+        try {
+            while (this.#size) {
+                const val = this.#valList[this.#head];
+                this.#evict(true);
+                if (this.#isBackgroundFetch(val)) {
+                    if (val.__staleWhileFetching) {
+                        return val.__staleWhileFetching;
+                    }
+                }
+                else if (val !== undefined) {
+                    return val;
+                }
+            }
+        }
+        finally {
+            if (this.#hasDisposeAfter && this.#disposed) {
+                const dt = this.#disposed;
+                let task;
+                while ((task = dt?.shift())) {
+                    this.#disposeAfter?.(...task);
+                }
+            }
+        }
+    }
+    #evict(free) {
+        const head = this.#head;
+        const k = this.#keyList[head];
+        const v = this.#valList[head];
+        if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
+            v.__abortController.abort(new Error('evicted'));
+        }
+        else if (this.#hasDispose || this.#hasDisposeAfter) {
+            if (this.#hasDispose) {
+                this.#dispose?.(v, k, 'evict');
+            }
+            if (this.#hasDisposeAfter) {
+                this.#disposed?.push([v, k, 'evict']);
+            }
+        }
+        this.#removeItemSize(head);
+        // if we aren't about to use the index, then null these out
+        if (free) {
+            this.#keyList[head] = undefined;
+            this.#valList[head] = undefined;
+            this.#free.push(head);
+        }
+        if (this.#size === 1) {
+            this.#head = this.#tail = 0;
+            this.#free.length = 0;
+        }
+        else {
+            this.#head = this.#next[head];
+        }
+        this.#keyMap.delete(k);
+        this.#size--;
+        return head;
+    }
+    /**
+     * Check if a key is in the cache, without updating the recency of use.
+     * Will return false if the item is stale, even though it is technically
+     * in the cache.
+     *
+     * Check if a key is in the cache, without updating the recency of
+     * use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set
+     * to `true` in either the options or the constructor.
+     *
+     * Will return `false` if the item is stale, even though it is technically in
+     * the cache. The difference can be determined (if it matters) by using a
+     * `status` argument, and inspecting the `has` field.
+     *
+     * Will not update item age unless
+     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
+     */
+    has(k, hasOptions = {}) {
+        const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
+        const index = this.#keyMap.get(k);
+        if (index !== undefined) {
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v) &&
+                v.__staleWhileFetching === undefined) {
+                return false;
+            }
+            if (!this.#isStale(index)) {
+                if (updateAgeOnHas) {
+                    this.#updateItemAge(index);
+                }
+                if (status) {
+                    status.has = 'hit';
+                    this.#statusTTL(status, index);
+                }
+                return true;
+            }
+            else if (status) {
+                status.has = 'stale';
+                this.#statusTTL(status, index);
+            }
+        }
+        else if (status) {
+            status.has = 'miss';
+        }
+        return false;
+    }
+    /**
+     * Like {@link LRUCache#get} but doesn't update recency or delete stale
+     * items.
+     *
+     * Returns `undefined` if the item is stale, unless
+     * {@link LRUCache.OptionsBase.allowStale} is set.
+     */
+    peek(k, peekOptions = {}) {
+        const { allowStale = this.allowStale } = peekOptions;
+        const index = this.#keyMap.get(k);
+        if (index === undefined ||
+            (!allowStale && this.#isStale(index))) {
+            return;
+        }
+        const v = this.#valList[index];
+        // either stale and allowed, or forcing a refresh of non-stale value
+        return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+    }
+    #backgroundFetch(k, index, options, context) {
+        const v = index === undefined ? undefined : this.#valList[index];
+        if (this.#isBackgroundFetch(v)) {
+            return v;
+        }
+        const ac = new AC();
+        const { signal } = options;
+        // when/if our AC signals, then stop listening to theirs.
+        signal?.addEventListener('abort', () => ac.abort(signal.reason), {
+            signal: ac.signal,
+        });
+        const fetchOpts = {
+            signal: ac.signal,
+            options,
+            context,
+        };
+        const cb = (v, updateCache = false) => {
+            const { aborted } = ac.signal;
+            const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
+            if (options.status) {
+                if (aborted && !updateCache) {
+                    options.status.fetchAborted = true;
+                    options.status.fetchError = ac.signal.reason;
+                    if (ignoreAbort)
+                        options.status.fetchAbortIgnored = true;
+                }
+                else {
+                    options.status.fetchResolved = true;
+                }
+            }
+            if (aborted && !ignoreAbort && !updateCache) {
+                return fetchFail(ac.signal.reason);
+            }
+            // either we didn't abort, and are still here, or we did, and ignored
+            const bf = p;
+            if (this.#valList[index] === p) {
+                if (v === undefined) {
+                    if (bf.__staleWhileFetching !== undefined) {
+                        this.#valList[index] = bf.__staleWhileFetching;
+                    }
+                    else {
+                        this.#delete(k, 'fetch');
+                    }
+                }
+                else {
+                    if (options.status)
+                        options.status.fetchUpdated = true;
+                    this.set(k, v, fetchOpts.options);
+                }
+            }
+            return v;
+        };
+        const eb = (er) => {
+            if (options.status) {
+                options.status.fetchRejected = true;
+                options.status.fetchError = er;
+            }
+            return fetchFail(er);
+        };
+        const fetchFail = (er) => {
+            const { aborted } = ac.signal;
+            const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
+            const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
+            const noDelete = allowStale || options.noDeleteOnFetchRejection;
+            const bf = p;
+            if (this.#valList[index] === p) {
+                // if we allow stale on fetch rejections, then we need to ensure that
+                // the stale value is not removed from the cache when the fetch fails.
+                const del = !noDelete || bf.__staleWhileFetching === undefined;
+                if (del) {
+                    this.#delete(k, 'fetch');
+                }
+                else if (!allowStaleAborted) {
+                    // still replace the *promise* with the stale value,
+                    // since we are done with the promise at this point.
+                    // leave it untouched if we're still waiting for an
+                    // aborted background fetch that hasn't yet returned.
+                    this.#valList[index] = bf.__staleWhileFetching;
+                }
+            }
+            if (allowStale) {
+                if (options.status && bf.__staleWhileFetching !== undefined) {
+                    options.status.returnedStale = true;
+                }
+                return bf.__staleWhileFetching;
+            }
+            else if (bf.__returned === bf) {
+                throw er;
+            }
+        };
+        const pcall = (res, rej) => {
+            const fmp = this.#fetchMethod?.(k, v, fetchOpts);
+            if (fmp && fmp instanceof Promise) {
+                fmp.then(v => res(v === undefined ? undefined : v), rej);
+            }
+            // ignored, we go until we finish, regardless.
+            // defer check until we are actually aborting,
+            // so fetchMethod can override.
+            ac.signal.addEventListener('abort', () => {
+                if (!options.ignoreFetchAbort ||
+                    options.allowStaleOnFetchAbort) {
+                    res(undefined);
+                    // when it eventually resolves, update the cache.
+                    if (options.allowStaleOnFetchAbort) {
+                        res = v => cb(v, true);
+                    }
+                }
+            });
+        };
+        if (options.status)
+            options.status.fetchDispatched = true;
+        const p = new Promise(pcall).then(cb, eb);
+        const bf = Object.assign(p, {
+            __abortController: ac,
+            __staleWhileFetching: v,
+            __returned: undefined,
+        });
+        if (index === undefined) {
+            // internal, don't expose status.
+            this.set(k, bf, { ...fetchOpts.options, status: undefined });
+            index = this.#keyMap.get(k);
+        }
+        else {
+            this.#valList[index] = bf;
+        }
+        return bf;
+    }
+    #isBackgroundFetch(p) {
+        if (!this.#hasFetchMethod)
+            return false;
+        const b = p;
+        return (!!b &&
+            b instanceof Promise &&
+            b.hasOwnProperty('__staleWhileFetching') &&
+            b.__abortController instanceof AC);
+    }
+    async fetch(k, fetchOptions = {}) {
+        const { 
+        // get options
+        allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, 
+        // set options
+        ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, 
+        // fetch exclusive options
+        noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
+        if (!this.#hasFetchMethod) {
+            if (status)
+                status.fetch = 'get';
+            return this.get(k, {
+                allowStale,
+                updateAgeOnGet,
+                noDeleteOnStaleGet,
+                status,
+            });
+        }
+        const options = {
+            allowStale,
+            updateAgeOnGet,
+            noDeleteOnStaleGet,
+            ttl,
+            noDisposeOnSet,
+            size,
+            sizeCalculation,
+            noUpdateTTL,
+            noDeleteOnFetchRejection,
+            allowStaleOnFetchRejection,
+            allowStaleOnFetchAbort,
+            ignoreFetchAbort,
+            status,
+            signal,
+        };
+        let index = this.#keyMap.get(k);
+        if (index === undefined) {
+            if (status)
+                status.fetch = 'miss';
+            const p = this.#backgroundFetch(k, index, options, context);
+            return (p.__returned = p);
+        }
+        else {
+            // in cache, maybe already fetching
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v)) {
+                const stale = allowStale && v.__staleWhileFetching !== undefined;
+                if (status) {
+                    status.fetch = 'inflight';
+                    if (stale)
+                        status.returnedStale = true;
+                }
+                return stale ? v.__staleWhileFetching : (v.__returned = v);
+            }
+            // if we force a refresh, that means do NOT serve the cached value,
+            // unless we are already in the process of refreshing the cache.
+            const isStale = this.#isStale(index);
+            if (!forceRefresh && !isStale) {
+                if (status)
+                    status.fetch = 'hit';
+                this.#moveToTail(index);
+                if (updateAgeOnGet) {
+                    this.#updateItemAge(index);
+                }
+                if (status)
+                    this.#statusTTL(status, index);
+                return v;
+            }
+            // ok, it is stale or a forced refresh, and not already fetching.
+            // refresh the cache.
+            const p = this.#backgroundFetch(k, index, options, context);
+            const hasStale = p.__staleWhileFetching !== undefined;
+            const staleVal = hasStale && allowStale;
+            if (status) {
+                status.fetch = isStale ? 'stale' : 'refresh';
+                if (staleVal && isStale)
+                    status.returnedStale = true;
+            }
+            return staleVal ? p.__staleWhileFetching : (p.__returned = p);
+        }
+    }
+    async forceFetch(k, fetchOptions = {}) {
+        const v = await this.fetch(k, fetchOptions);
+        if (v === undefined)
+            throw new Error('fetch() returned undefined');
+        return v;
+    }
+    memo(k, memoOptions = {}) {
+        const memoMethod = this.#memoMethod;
+        if (!memoMethod) {
+            throw new Error('no memoMethod provided to constructor');
+        }
+        const { context, forceRefresh, ...options } = memoOptions;
+        const v = this.get(k, options);
+        if (!forceRefresh && v !== undefined)
+            return v;
+        const vv = memoMethod(k, v, {
+            options,
+            context,
+        });
+        this.set(k, vv, options);
+        return vv;
+    }
+    /**
+     * Return a value from the cache. Will update the recency of the cache
+     * entry found.
+     *
+     * If the key is not found, get() will return `undefined`.
+     */
+    get(k, getOptions = {}) {
+        const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
+        const index = this.#keyMap.get(k);
+        if (index !== undefined) {
+            const value = this.#valList[index];
+            const fetching = this.#isBackgroundFetch(value);
+            if (status)
+                this.#statusTTL(status, index);
+            if (this.#isStale(index)) {
+                if (status)
+                    status.get = 'stale';
+                // delete only if not an in-flight background fetch
+                if (!fetching) {
+                    if (!noDeleteOnStaleGet) {
+                        this.#delete(k, 'expire');
+                    }
+                    if (status && allowStale)
+                        status.returnedStale = true;
+                    return allowStale ? value : undefined;
+                }
+                else {
+                    if (status &&
+                        allowStale &&
+                        value.__staleWhileFetching !== undefined) {
+                        status.returnedStale = true;
+                    }
+                    return allowStale ? value.__staleWhileFetching : undefined;
+                }
+            }
+            else {
+                if (status)
+                    status.get = 'hit';
+                // if we're currently fetching it, we don't actually have it yet
+                // it's not stale, which means this isn't a staleWhileRefetching.
+                // If it's not stale, and fetching, AND has a __staleWhileFetching
+                // value, then that means the user fetched with {forceRefresh:true},
+                // so it's safe to return that value.
+                if (fetching) {
+                    return value.__staleWhileFetching;
+                }
+                this.#moveToTail(index);
+                if (updateAgeOnGet) {
+                    this.#updateItemAge(index);
+                }
+                return value;
+            }
+        }
+        else if (status) {
+            status.get = 'miss';
+        }
+    }
+    #connect(p, n) {
+        this.#prev[n] = p;
+        this.#next[p] = n;
+    }
+    #moveToTail(index) {
+        // if tail already, nothing to do
+        // if head, move head to next[index]
+        // else
+        //   move next[prev[index]] to next[index] (head has no prev)
+        //   move prev[next[index]] to prev[index]
+        // prev[index] = tail
+        // next[tail] = index
+        // tail = index
+        if (index !== this.#tail) {
+            if (index === this.#head) {
+                this.#head = this.#next[index];
+            }
+            else {
+                this.#connect(this.#prev[index], this.#next[index]);
+            }
+            this.#connect(this.#tail, index);
+            this.#tail = index;
+        }
+    }
+    /**
+     * Deletes a key out of the cache.
+     *
+     * Returns true if the key was deleted, false otherwise.
+     */
+    delete(k) {
+        return this.#delete(k, 'delete');
+    }
+    #delete(k, reason) {
+        let deleted = false;
+        if (this.#size !== 0) {
+            const index = this.#keyMap.get(k);
+            if (index !== undefined) {
+                deleted = true;
+                if (this.#size === 1) {
+                    this.#clear(reason);
+                }
+                else {
+                    this.#removeItemSize(index);
+                    const v = this.#valList[index];
+                    if (this.#isBackgroundFetch(v)) {
+                        v.__abortController.abort(new Error('deleted'));
+                    }
+                    else if (this.#hasDispose || this.#hasDisposeAfter) {
+                        if (this.#hasDispose) {
+                            this.#dispose?.(v, k, reason);
+                        }
+                        if (this.#hasDisposeAfter) {
+                            this.#disposed?.push([v, k, reason]);
+                        }
+                    }
+                    this.#keyMap.delete(k);
+                    this.#keyList[index] = undefined;
+                    this.#valList[index] = undefined;
+                    if (index === this.#tail) {
+                        this.#tail = this.#prev[index];
+                    }
+                    else if (index === this.#head) {
+                        this.#head = this.#next[index];
+                    }
+                    else {
+                        const pi = this.#prev[index];
+                        this.#next[pi] = this.#next[index];
+                        const ni = this.#next[index];
+                        this.#prev[ni] = this.#prev[index];
+                    }
+                    this.#size--;
+                    this.#free.push(index);
+                }
+            }
+        }
+        if (this.#hasDisposeAfter && this.#disposed?.length) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+        return deleted;
+    }
+    /**
+     * Clear the cache entirely, throwing away all values.
+     */
+    clear() {
+        return this.#clear('delete');
+    }
+    #clear(reason) {
+        for (const index of this.#rindexes({ allowStale: true })) {
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v)) {
+                v.__abortController.abort(new Error('deleted'));
+            }
+            else {
+                const k = this.#keyList[index];
+                if (this.#hasDispose) {
+                    this.#dispose?.(v, k, reason);
+                }
+                if (this.#hasDisposeAfter) {
+                    this.#disposed?.push([v, k, reason]);
+                }
+            }
+        }
+        this.#keyMap.clear();
+        this.#valList.fill(undefined);
+        this.#keyList.fill(undefined);
+        if (this.#ttls && this.#starts) {
+            this.#ttls.fill(0);
+            this.#starts.fill(0);
+        }
+        if (this.#sizes) {
+            this.#sizes.fill(0);
+        }
+        this.#head = 0;
+        this.#tail = 0;
+        this.#free.length = 0;
+        this.#calculatedSize = 0;
+        this.#size = 0;
+        if (this.#hasDisposeAfter && this.#disposed) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+    }
+}
+exports.LRUCache = LRUCache;
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/dist/commonjs/index.min.js b/node_modules/@npmcli/git/node_modules/lru-cache/dist/commonjs/index.min.js
new file mode 100644
index 0000000000000..ef5027b91650d
--- /dev/null
+++ b/node_modules/@npmcli/git/node_modules/lru-cache/dist/commonjs/index.min.js
@@ -0,0 +1,2 @@
+"use strict";Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var M=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,x=new Set,R=typeof process=="object"&&process?process:{},U=(a,t,e,i)=>{typeof R.emitWarning=="function"?R.emitWarning(a,t,e,i):console.error(`[${e}] ${t}: ${a}`)},C=globalThis.AbortController,L=globalThis.AbortSignal;if(typeof C>"u"){L=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},C=class{constructor(){t()}signal=new L;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let a=R.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{a&&(a=!1,U("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var G=a=>!x.has(a),H=Symbol("type"),y=a=>a&&a===Math.floor(a)&&a>0&&isFinite(a),I=a=>y(a)?a<=Math.pow(2,8)?Uint8Array:a<=Math.pow(2,16)?Uint16Array:a<=Math.pow(2,32)?Uint32Array:a<=Number.MAX_SAFE_INTEGER?E:null:null,E=class extends Array{constructor(t){super(t),this.fill(0)}},W=class a{heap;length;static#l=!1;static create(t){let e=I(t);if(!e)return[];a.#l=!0;let i=new a(t,e);return a.#l=!1,i}constructor(t,e){if(!a.#l)throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},D=class a{#l;#c;#p;#v;#w;#D;#L;#S;get perf(){return this.#S}ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#_;#s;#i;#t;#a;#u;#o;#h;#m;#r;#b;#y;#d;#A;#z;#f;#x;static unsafeExposeInternals(t){return{starts:t.#y,ttls:t.#d,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#a,prev:t.#u,get head(){return t.#o},get tail(){return t.#h},free:t.#m,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#M(e,i,s,n),moveToTail:e=>t.#W(e),indexes:e=>t.#F(e),rindexes:e=>t.#T(e),isStale:e=>t.#g(e)}}get max(){return this.#l}get maxSize(){return this.#c}get calculatedSize(){return this.#_}get size(){return this.#n}get fetchMethod(){return this.#D}get memoMethod(){return this.#L}get dispose(){return this.#p}get onInsert(){return this.#v}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,onInsert:_,disposeAfter:f,noDisposeOnSet:c,noUpdateTTL:u,maxSize:A=0,maxEntrySize:d=0,sizeCalculation:m,fetchMethod:l,memoMethod:w,noDeleteOnFetchRejection:b,noDeleteOnStaleGet:p,allowStaleOnFetchRejection:S,allowStaleOnFetchAbort:z,ignoreFetchAbort:F,perf:v}=t;if(v!==void 0&&typeof v?.now!="function")throw new TypeError("perf option must have a now() method if specified");if(this.#S=v??M,e!==0&&!y(e))throw new TypeError("max option must be a nonnegative integer");let T=e?I(e):Array;if(!T)throw new Error("invalid max value: "+e);if(this.#l=e,this.#c=A,this.maxEntrySize=d||this.#c,this.sizeCalculation=m,this.sizeCalculation){if(!this.#c&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#L=w,l!==void 0&&typeof l!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#D=l,this.#z=!!l,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#a=new T(e),this.#u=new T(e),this.#o=0,this.#h=0,this.#m=W.create(e),this.#n=0,this.#_=0,typeof g=="function"&&(this.#p=g),typeof _=="function"&&(this.#v=_),typeof f=="function"?(this.#w=f,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#A=!!this.#p,this.#x=!!this.#v,this.#f=!!this.#w,this.noDisposeOnSet=!!c,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!b,this.allowStaleOnFetchRejection=!!S,this.allowStaleOnFetchAbort=!!z,this.ignoreFetchAbort=!!F,this.maxEntrySize!==0){if(this.#c!==0&&!y(this.#c))throw new TypeError("maxSize must be a positive integer if specified");if(!y(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#V()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!p,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=y(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!y(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#G()}if(this.#l===0&&this.ttl===0&&this.#c===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#l&&!this.#c){let O="LRU_CACHE_UNBOUNDED";G(O)&&(x.add(O),U("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",O,a))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#G(){let t=new E(this.#l),e=new E(this.#l);this.#d=t,this.#y=e,this.#j=(n,h,o=this.#S.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#g(n)&&this.#O(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#C=n=>{e[n]=t[n]!==0?this.#S.now():0},this.#E=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=this.#S.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#g=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#C=()=>{};#E=()=>{};#j=()=>{};#g=()=>!1;#V(){let t=new E(this.#l);this.#_=0,this.#b=t,this.#R=e=>{this.#_-=t[e],t[e]=0},this.#N=(e,i,s,n)=>{if(this.#e(i))return 0;if(!y(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!y(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#U=(e,i,s)=>{if(t[e]=i,this.#c){let n=this.#c-t[e];for(;this.#_>n;)this.#I(!0)}this.#_+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#_)}}#R=t=>{};#U=(t,e,i)=>{};#N=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#o));)e=this.#u[e]}*#T({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#h));)e=this.#a[e]}#P(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#T())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#T()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#T())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#T()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#T({allowStale:!0}))this.#g(e)&&(this.#O(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#d&&this.#y){let h=this.#d[e],o=this.#y[e];if(h&&o){let r=h-(this.#S.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#F({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#d&&this.#y){h.ttl=this.#d[e];let o=this.#S.now()-this.#y[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=this.#S.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,_=this.#N(t,e,i.size||0,o);if(this.maxEntrySize&&_>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#O(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#m.length!==0?this.#m.pop():this.#n===this.#l?this.#I(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#a[this.#h]=f,this.#u[f]=this.#h,this.#h=f,this.#n++,this.#U(f,_,r),r&&(r.set="add"),g=!1,this.#x&&this.#v?.(e,t,"add");else{this.#W(f);let c=this.#t[f];if(e!==c){if(this.#z&&this.#e(c)){c.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:u}=c;u!==void 0&&!h&&(this.#A&&this.#p?.(u,t,"set"),this.#f&&this.#r?.push([u,t,"set"]))}else h||(this.#A&&this.#p?.(c,t,"set"),this.#f&&this.#r?.push([c,t,"set"]));if(this.#R(f),this.#U(f,_,r),this.#t[f]=e,r){r.set="replace";let u=c&&this.#e(c)?c.__staleWhileFetching:c;u!==void 0&&(r.oldValue=u)}}else r&&(r.set="update");this.#x&&this.onInsert?.(e,t,e===c?"update":"replace")}if(s!==0&&!this.#d&&this.#G(),this.#d&&(g||this.#j(f,s,n),r&&this.#E(r,f)),!h&&this.#f&&this.#r){let c=this.#r,u;for(;u=c?.shift();)this.#w?.(...u)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#I(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#f&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#I(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#z&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(s,i,"evict"),this.#f&&this.#r?.push([s,i,"evict"])),this.#R(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#m.push(e)),this.#n===1?(this.#o=this.#h=0,this.#m.length=0):this.#o=this.#a[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#g(n))s&&(s.has="stale",this.#E(s,n));else return i&&this.#C(n),s&&(s.has="hit",this.#E(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#g(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#M(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new C,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,m=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!m?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!m)return f(h.signal.reason);let b=u;return this.#t[e]===u&&(d===void 0?b.__staleWhileFetching!==void 0?this.#t[e]=b.__staleWhileFetching:this.#O(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},_=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:m}=h.signal,l=m&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=u;if(this.#t[e]===u&&(!b||p.__staleWhileFetching===void 0?this.#O(t,"fetch"):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},c=(d,m)=>{let l=this.#D?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),m),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let u=new Promise(c).then(g,_),A=Object.assign(u,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,A,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=A,A}#e(t){if(!this.#z)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof C}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:_=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:c=this.allowStaleOnFetchRejection,ignoreFetchAbort:u=this.ignoreFetchAbort,allowStaleOnFetchAbort:A=this.allowStaleOnFetchAbort,context:d,forceRefresh:m=!1,status:l,signal:w}=e;if(!this.#z)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:_,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:c,allowStaleOnFetchAbort:A,ignoreFetchAbort:u,status:l,signal:w},p=this.#s.get(t);if(p===void 0){l&&(l.fetch="miss");let S=this.#M(t,p,b,d);return S.__returned=S}else{let S=this.#t[p];if(this.#e(S)){let O=i&&S.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",O&&(l.returnedStale=!0)),O?S.__staleWhileFetching:S.__returned=S}let z=this.#g(p);if(!m&&!z)return l&&(l.fetch="hit"),this.#W(p),s&&this.#C(p),l&&this.#E(l,p),S;let F=this.#M(t,p,b,d),T=F.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=z?"stale":"refresh",T&&z&&(l.returnedStale=!0)),T?F.__staleWhileFetching:F.__returned=F}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#L;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#E(h,o),this.#g(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#O(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#W(o),s&&this.#C(o),r))}else h&&(h.get="miss")}#H(t,e){this.#u[e]=t,this.#a[t]=e}#W(t){t!==this.#h&&(t===this.#o?this.#o=this.#a[t]:this.#H(this.#u[t],this.#a[t]),this.#H(this.#h,t),this.#h=t)}delete(t){return this.#O(t,"delete")}#O(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#k(e);else{this.#R(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(n,t,e),this.#f&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#u[s];else if(s===this.#o)this.#o=this.#a[s];else{let h=this.#u[s];this.#a[h]=this.#a[s];let o=this.#a[s];this.#u[o]=this.#u[s]}this.#n--,this.#m.push(s)}}if(this.#f&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#k("delete")}#k(t){for(let e of this.#T({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#A&&this.#p?.(i,s,t),this.#f&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#d&&this.#y&&(this.#d.fill(0),this.#y.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#m.length=0,this.#_=0,this.#n=0,this.#f&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};exports.LRUCache=D;
+//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/dist/commonjs/package.json b/node_modules/@npmcli/git/node_modules/lru-cache/dist/commonjs/package.json
new file mode 100644
index 0000000000000..5bbefffbabee3
--- /dev/null
+++ b/node_modules/@npmcli/git/node_modules/lru-cache/dist/commonjs/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "commonjs"
+}
diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/dist/esm/index.js b/node_modules/@npmcli/git/node_modules/lru-cache/dist/esm/index.js
new file mode 100644
index 0000000000000..8fd8fc5f31507
--- /dev/null
+++ b/node_modules/@npmcli/git/node_modules/lru-cache/dist/esm/index.js
@@ -0,0 +1,1560 @@
+/**
+ * @module LRUCache
+ */
+const defaultPerf = (typeof performance === 'object' &&
+    performance &&
+    typeof performance.now === 'function') ?
+    performance
+    : Date;
+const warned = new Set();
+/* c8 ignore start */
+const PROCESS = (typeof process === 'object' && !!process ?
+    process
+    : {});
+/* c8 ignore start */
+const emitWarning = (msg, type, code, fn) => {
+    typeof PROCESS.emitWarning === 'function' ?
+        PROCESS.emitWarning(msg, type, code, fn)
+        : console.error(`[${code}] ${type}: ${msg}`);
+};
+let AC = globalThis.AbortController;
+let AS = globalThis.AbortSignal;
+/* c8 ignore start */
+if (typeof AC === 'undefined') {
+    //@ts-ignore
+    AS = class AbortSignal {
+        onabort;
+        _onabort = [];
+        reason;
+        aborted = false;
+        addEventListener(_, fn) {
+            this._onabort.push(fn);
+        }
+    };
+    //@ts-ignore
+    AC = class AbortController {
+        constructor() {
+            warnACPolyfill();
+        }
+        signal = new AS();
+        abort(reason) {
+            if (this.signal.aborted)
+                return;
+            //@ts-ignore
+            this.signal.reason = reason;
+            //@ts-ignore
+            this.signal.aborted = true;
+            //@ts-ignore
+            for (const fn of this.signal._onabort) {
+                fn(reason);
+            }
+            this.signal.onabort?.(reason);
+        }
+    };
+    let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
+    const warnACPolyfill = () => {
+        if (!printACPolyfillWarning)
+            return;
+        printACPolyfillWarning = false;
+        emitWarning('AbortController is not defined. If using lru-cache in ' +
+            'node 14, load an AbortController polyfill from the ' +
+            '`node-abort-controller` package. A minimal polyfill is ' +
+            'provided for use by LRUCache.fetch(), but it should not be ' +
+            'relied upon in other contexts (eg, passing it to other APIs that ' +
+            'use AbortController/AbortSignal might have undesirable effects). ' +
+            'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
+    };
+}
+/* c8 ignore stop */
+const shouldWarn = (code) => !warned.has(code);
+const TYPE = Symbol('type');
+const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
+/* c8 ignore start */
+// This is a little bit ridiculous, tbh.
+// The maximum array length is 2^32-1 or thereabouts on most JS impls.
+// And well before that point, you're caching the entire world, I mean,
+// that's ~32GB of just integers for the next/prev links, plus whatever
+// else to hold that many keys and values.  Just filling the memory with
+// zeroes at init time is brutal when you get that big.
+// But why not be complete?
+// Maybe in the future, these limits will have expanded.
+const getUintArray = (max) => !isPosInt(max) ? null
+    : max <= Math.pow(2, 8) ? Uint8Array
+        : max <= Math.pow(2, 16) ? Uint16Array
+            : max <= Math.pow(2, 32) ? Uint32Array
+                : max <= Number.MAX_SAFE_INTEGER ? ZeroArray
+                    : null;
+/* c8 ignore stop */
+class ZeroArray extends Array {
+    constructor(size) {
+        super(size);
+        this.fill(0);
+    }
+}
+class Stack {
+    heap;
+    length;
+    // private constructor
+    static #constructing = false;
+    static create(max) {
+        const HeapCls = getUintArray(max);
+        if (!HeapCls)
+            return [];
+        Stack.#constructing = true;
+        const s = new Stack(max, HeapCls);
+        Stack.#constructing = false;
+        return s;
+    }
+    constructor(max, HeapCls) {
+        /* c8 ignore start */
+        if (!Stack.#constructing) {
+            throw new TypeError('instantiate Stack using Stack.create(n)');
+        }
+        /* c8 ignore stop */
+        this.heap = new HeapCls(max);
+        this.length = 0;
+    }
+    push(n) {
+        this.heap[this.length++] = n;
+    }
+    pop() {
+        return this.heap[--this.length];
+    }
+}
+/**
+ * Default export, the thing you're using this module to get.
+ *
+ * The `K` and `V` types define the key and value types, respectively. The
+ * optional `FC` type defines the type of the `context` object passed to
+ * `cache.fetch()` and `cache.memo()`.
+ *
+ * Keys and values **must not** be `null` or `undefined`.
+ *
+ * All properties from the options object (with the exception of `max`,
+ * `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are
+ * added as normal public members. (The listed options are read-only getters.)
+ *
+ * Changing any of these will alter the defaults for subsequent method calls.
+ */
+export class LRUCache {
+    // options that cannot be changed without disaster
+    #max;
+    #maxSize;
+    #dispose;
+    #onInsert;
+    #disposeAfter;
+    #fetchMethod;
+    #memoMethod;
+    #perf;
+    /**
+     * {@link LRUCache.OptionsBase.perf}
+     */
+    get perf() {
+        return this.#perf;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.ttl}
+     */
+    ttl;
+    /**
+     * {@link LRUCache.OptionsBase.ttlResolution}
+     */
+    ttlResolution;
+    /**
+     * {@link LRUCache.OptionsBase.ttlAutopurge}
+     */
+    ttlAutopurge;
+    /**
+     * {@link LRUCache.OptionsBase.updateAgeOnGet}
+     */
+    updateAgeOnGet;
+    /**
+     * {@link LRUCache.OptionsBase.updateAgeOnHas}
+     */
+    updateAgeOnHas;
+    /**
+     * {@link LRUCache.OptionsBase.allowStale}
+     */
+    allowStale;
+    /**
+     * {@link LRUCache.OptionsBase.noDisposeOnSet}
+     */
+    noDisposeOnSet;
+    /**
+     * {@link LRUCache.OptionsBase.noUpdateTTL}
+     */
+    noUpdateTTL;
+    /**
+     * {@link LRUCache.OptionsBase.maxEntrySize}
+     */
+    maxEntrySize;
+    /**
+     * {@link LRUCache.OptionsBase.sizeCalculation}
+     */
+    sizeCalculation;
+    /**
+     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
+     */
+    noDeleteOnFetchRejection;
+    /**
+     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
+     */
+    noDeleteOnStaleGet;
+    /**
+     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
+     */
+    allowStaleOnFetchAbort;
+    /**
+     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
+     */
+    allowStaleOnFetchRejection;
+    /**
+     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
+     */
+    ignoreFetchAbort;
+    // computed properties
+    #size;
+    #calculatedSize;
+    #keyMap;
+    #keyList;
+    #valList;
+    #next;
+    #prev;
+    #head;
+    #tail;
+    #free;
+    #disposed;
+    #sizes;
+    #starts;
+    #ttls;
+    #hasDispose;
+    #hasFetchMethod;
+    #hasDisposeAfter;
+    #hasOnInsert;
+    /**
+     * Do not call this method unless you need to inspect the
+     * inner workings of the cache.  If anything returned by this
+     * object is modified in any way, strange breakage may occur.
+     *
+     * These fields are private for a reason!
+     *
+     * @internal
+     */
+    static unsafeExposeInternals(c) {
+        return {
+            // properties
+            starts: c.#starts,
+            ttls: c.#ttls,
+            sizes: c.#sizes,
+            keyMap: c.#keyMap,
+            keyList: c.#keyList,
+            valList: c.#valList,
+            next: c.#next,
+            prev: c.#prev,
+            get head() {
+                return c.#head;
+            },
+            get tail() {
+                return c.#tail;
+            },
+            free: c.#free,
+            // methods
+            isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
+            backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
+            moveToTail: (index) => c.#moveToTail(index),
+            indexes: (options) => c.#indexes(options),
+            rindexes: (options) => c.#rindexes(options),
+            isStale: (index) => c.#isStale(index),
+        };
+    }
+    // Protected read-only members
+    /**
+     * {@link LRUCache.OptionsBase.max} (read-only)
+     */
+    get max() {
+        return this.#max;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.maxSize} (read-only)
+     */
+    get maxSize() {
+        return this.#maxSize;
+    }
+    /**
+     * The total computed size of items in the cache (read-only)
+     */
+    get calculatedSize() {
+        return this.#calculatedSize;
+    }
+    /**
+     * The number of items stored in the cache (read-only)
+     */
+    get size() {
+        return this.#size;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
+     */
+    get fetchMethod() {
+        return this.#fetchMethod;
+    }
+    get memoMethod() {
+        return this.#memoMethod;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.dispose} (read-only)
+     */
+    get dispose() {
+        return this.#dispose;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.onInsert} (read-only)
+     */
+    get onInsert() {
+        return this.#onInsert;
+    }
+    /**
+     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
+     */
+    get disposeAfter() {
+        return this.#disposeAfter;
+    }
+    constructor(options) {
+        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, onInsert, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, perf, } = options;
+        if (perf !== undefined) {
+            if (typeof perf?.now !== 'function') {
+                throw new TypeError('perf option must have a now() method if specified');
+            }
+        }
+        this.#perf = perf ?? defaultPerf;
+        if (max !== 0 && !isPosInt(max)) {
+            throw new TypeError('max option must be a nonnegative integer');
+        }
+        const UintArray = max ? getUintArray(max) : Array;
+        if (!UintArray) {
+            throw new Error('invalid max value: ' + max);
+        }
+        this.#max = max;
+        this.#maxSize = maxSize;
+        this.maxEntrySize = maxEntrySize || this.#maxSize;
+        this.sizeCalculation = sizeCalculation;
+        if (this.sizeCalculation) {
+            if (!this.#maxSize && !this.maxEntrySize) {
+                throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
+            }
+            if (typeof this.sizeCalculation !== 'function') {
+                throw new TypeError('sizeCalculation set to non-function');
+            }
+        }
+        if (memoMethod !== undefined &&
+            typeof memoMethod !== 'function') {
+            throw new TypeError('memoMethod must be a function if defined');
+        }
+        this.#memoMethod = memoMethod;
+        if (fetchMethod !== undefined &&
+            typeof fetchMethod !== 'function') {
+            throw new TypeError('fetchMethod must be a function if specified');
+        }
+        this.#fetchMethod = fetchMethod;
+        this.#hasFetchMethod = !!fetchMethod;
+        this.#keyMap = new Map();
+        this.#keyList = new Array(max).fill(undefined);
+        this.#valList = new Array(max).fill(undefined);
+        this.#next = new UintArray(max);
+        this.#prev = new UintArray(max);
+        this.#head = 0;
+        this.#tail = 0;
+        this.#free = Stack.create(max);
+        this.#size = 0;
+        this.#calculatedSize = 0;
+        if (typeof dispose === 'function') {
+            this.#dispose = dispose;
+        }
+        if (typeof onInsert === 'function') {
+            this.#onInsert = onInsert;
+        }
+        if (typeof disposeAfter === 'function') {
+            this.#disposeAfter = disposeAfter;
+            this.#disposed = [];
+        }
+        else {
+            this.#disposeAfter = undefined;
+            this.#disposed = undefined;
+        }
+        this.#hasDispose = !!this.#dispose;
+        this.#hasOnInsert = !!this.#onInsert;
+        this.#hasDisposeAfter = !!this.#disposeAfter;
+        this.noDisposeOnSet = !!noDisposeOnSet;
+        this.noUpdateTTL = !!noUpdateTTL;
+        this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
+        this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
+        this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
+        this.ignoreFetchAbort = !!ignoreFetchAbort;
+        // NB: maxEntrySize is set to maxSize if it's set
+        if (this.maxEntrySize !== 0) {
+            if (this.#maxSize !== 0) {
+                if (!isPosInt(this.#maxSize)) {
+                    throw new TypeError('maxSize must be a positive integer if specified');
+                }
+            }
+            if (!isPosInt(this.maxEntrySize)) {
+                throw new TypeError('maxEntrySize must be a positive integer if specified');
+            }
+            this.#initializeSizeTracking();
+        }
+        this.allowStale = !!allowStale;
+        this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
+        this.updateAgeOnGet = !!updateAgeOnGet;
+        this.updateAgeOnHas = !!updateAgeOnHas;
+        this.ttlResolution =
+            isPosInt(ttlResolution) || ttlResolution === 0 ?
+                ttlResolution
+                : 1;
+        this.ttlAutopurge = !!ttlAutopurge;
+        this.ttl = ttl || 0;
+        if (this.ttl) {
+            if (!isPosInt(this.ttl)) {
+                throw new TypeError('ttl must be a positive integer if specified');
+            }
+            this.#initializeTTLTracking();
+        }
+        // do not allow completely unbounded caches
+        if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
+            throw new TypeError('At least one of max, maxSize, or ttl is required');
+        }
+        if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
+            const code = 'LRU_CACHE_UNBOUNDED';
+            if (shouldWarn(code)) {
+                warned.add(code);
+                const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
+                    'result in unbounded memory consumption.';
+                emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
+            }
+        }
+    }
+    /**
+     * Return the number of ms left in the item's TTL. If item is not in cache,
+     * returns `0`. Returns `Infinity` if item is in cache without a defined TTL.
+     */
+    getRemainingTTL(key) {
+        return this.#keyMap.has(key) ? Infinity : 0;
+    }
+    #initializeTTLTracking() {
+        const ttls = new ZeroArray(this.#max);
+        const starts = new ZeroArray(this.#max);
+        this.#ttls = ttls;
+        this.#starts = starts;
+        this.#setItemTTL = (index, ttl, start = this.#perf.now()) => {
+            starts[index] = ttl !== 0 ? start : 0;
+            ttls[index] = ttl;
+            if (ttl !== 0 && this.ttlAutopurge) {
+                const t = setTimeout(() => {
+                    if (this.#isStale(index)) {
+                        this.#delete(this.#keyList[index], 'expire');
+                    }
+                }, ttl + 1);
+                // unref() not supported on all platforms
+                /* c8 ignore start */
+                if (t.unref) {
+                    t.unref();
+                }
+                /* c8 ignore stop */
+            }
+        };
+        this.#updateItemAge = index => {
+            starts[index] = ttls[index] !== 0 ? this.#perf.now() : 0;
+        };
+        this.#statusTTL = (status, index) => {
+            if (ttls[index]) {
+                const ttl = ttls[index];
+                const start = starts[index];
+                /* c8 ignore next */
+                if (!ttl || !start)
+                    return;
+                status.ttl = ttl;
+                status.start = start;
+                status.now = cachedNow || getNow();
+                const age = status.now - start;
+                status.remainingTTL = ttl - age;
+            }
+        };
+        // debounce calls to perf.now() to 1s so we're not hitting
+        // that costly call repeatedly.
+        let cachedNow = 0;
+        const getNow = () => {
+            const n = this.#perf.now();
+            if (this.ttlResolution > 0) {
+                cachedNow = n;
+                const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
+                // not available on all platforms
+                /* c8 ignore start */
+                if (t.unref) {
+                    t.unref();
+                }
+                /* c8 ignore stop */
+            }
+            return n;
+        };
+        this.getRemainingTTL = key => {
+            const index = this.#keyMap.get(key);
+            if (index === undefined) {
+                return 0;
+            }
+            const ttl = ttls[index];
+            const start = starts[index];
+            if (!ttl || !start) {
+                return Infinity;
+            }
+            const age = (cachedNow || getNow()) - start;
+            return ttl - age;
+        };
+        this.#isStale = index => {
+            const s = starts[index];
+            const t = ttls[index];
+            return !!t && !!s && (cachedNow || getNow()) - s > t;
+        };
+    }
+    // conditionally set private methods related to TTL
+    #updateItemAge = () => { };
+    #statusTTL = () => { };
+    #setItemTTL = () => { };
+    /* c8 ignore stop */
+    #isStale = () => false;
+    #initializeSizeTracking() {
+        const sizes = new ZeroArray(this.#max);
+        this.#calculatedSize = 0;
+        this.#sizes = sizes;
+        this.#removeItemSize = index => {
+            this.#calculatedSize -= sizes[index];
+            sizes[index] = 0;
+        };
+        this.#requireSize = (k, v, size, sizeCalculation) => {
+            // provisionally accept background fetches.
+            // actual value size will be checked when they return.
+            if (this.#isBackgroundFetch(v)) {
+                return 0;
+            }
+            if (!isPosInt(size)) {
+                if (sizeCalculation) {
+                    if (typeof sizeCalculation !== 'function') {
+                        throw new TypeError('sizeCalculation must be a function');
+                    }
+                    size = sizeCalculation(v, k);
+                    if (!isPosInt(size)) {
+                        throw new TypeError('sizeCalculation return invalid (expect positive integer)');
+                    }
+                }
+                else {
+                    throw new TypeError('invalid size value (must be positive integer). ' +
+                        'When maxSize or maxEntrySize is used, sizeCalculation ' +
+                        'or size must be set.');
+                }
+            }
+            return size;
+        };
+        this.#addItemSize = (index, size, status) => {
+            sizes[index] = size;
+            if (this.#maxSize) {
+                const maxSize = this.#maxSize - sizes[index];
+                while (this.#calculatedSize > maxSize) {
+                    this.#evict(true);
+                }
+            }
+            this.#calculatedSize += sizes[index];
+            if (status) {
+                status.entrySize = size;
+                status.totalCalculatedSize = this.#calculatedSize;
+            }
+        };
+    }
+    #removeItemSize = _i => { };
+    #addItemSize = (_i, _s, _st) => { };
+    #requireSize = (_k, _v, size, sizeCalculation) => {
+        if (size || sizeCalculation) {
+            throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
+        }
+        return 0;
+    };
+    *#indexes({ allowStale = this.allowStale } = {}) {
+        if (this.#size) {
+            for (let i = this.#tail; true;) {
+                if (!this.#isValidIndex(i)) {
+                    break;
+                }
+                if (allowStale || !this.#isStale(i)) {
+                    yield i;
+                }
+                if (i === this.#head) {
+                    break;
+                }
+                else {
+                    i = this.#prev[i];
+                }
+            }
+        }
+    }
+    *#rindexes({ allowStale = this.allowStale } = {}) {
+        if (this.#size) {
+            for (let i = this.#head; true;) {
+                if (!this.#isValidIndex(i)) {
+                    break;
+                }
+                if (allowStale || !this.#isStale(i)) {
+                    yield i;
+                }
+                if (i === this.#tail) {
+                    break;
+                }
+                else {
+                    i = this.#next[i];
+                }
+            }
+        }
+    }
+    #isValidIndex(index) {
+        return (index !== undefined &&
+            this.#keyMap.get(this.#keyList[index]) === index);
+    }
+    /**
+     * Return a generator yielding `[key, value]` pairs,
+     * in order from most recently used to least recently used.
+     */
+    *entries() {
+        for (const i of this.#indexes()) {
+            if (this.#valList[i] !== undefined &&
+                this.#keyList[i] !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield [this.#keyList[i], this.#valList[i]];
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.entries}
+     *
+     * Return a generator yielding `[key, value]` pairs,
+     * in order from least recently used to most recently used.
+     */
+    *rentries() {
+        for (const i of this.#rindexes()) {
+            if (this.#valList[i] !== undefined &&
+                this.#keyList[i] !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield [this.#keyList[i], this.#valList[i]];
+            }
+        }
+    }
+    /**
+     * Return a generator yielding the keys in the cache,
+     * in order from most recently used to least recently used.
+     */
+    *keys() {
+        for (const i of this.#indexes()) {
+            const k = this.#keyList[i];
+            if (k !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield k;
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.keys}
+     *
+     * Return a generator yielding the keys in the cache,
+     * in order from least recently used to most recently used.
+     */
+    *rkeys() {
+        for (const i of this.#rindexes()) {
+            const k = this.#keyList[i];
+            if (k !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield k;
+            }
+        }
+    }
+    /**
+     * Return a generator yielding the values in the cache,
+     * in order from most recently used to least recently used.
+     */
+    *values() {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            if (v !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield this.#valList[i];
+            }
+        }
+    }
+    /**
+     * Inverse order version of {@link LRUCache.values}
+     *
+     * Return a generator yielding the values in the cache,
+     * in order from least recently used to most recently used.
+     */
+    *rvalues() {
+        for (const i of this.#rindexes()) {
+            const v = this.#valList[i];
+            if (v !== undefined &&
+                !this.#isBackgroundFetch(this.#valList[i])) {
+                yield this.#valList[i];
+            }
+        }
+    }
+    /**
+     * Iterating over the cache itself yields the same results as
+     * {@link LRUCache.entries}
+     */
+    [Symbol.iterator]() {
+        return this.entries();
+    }
+    /**
+     * A String value that is used in the creation of the default string
+     * description of an object. Called by the built-in method
+     * `Object.prototype.toString`.
+     */
+    [Symbol.toStringTag] = 'LRUCache';
+    /**
+     * Find a value for which the supplied fn method returns a truthy value,
+     * similar to `Array.find()`. fn is called as `fn(value, key, cache)`.
+     */
+    find(fn, getOptions = {}) {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            if (fn(value, this.#keyList[i], this)) {
+                return this.get(this.#keyList[i], getOptions);
+            }
+        }
+    }
+    /**
+     * Call the supplied function on each item in the cache, in order from most
+     * recently used to least recently used.
+     *
+     * `fn` is called as `fn(value, key, cache)`.
+     *
+     * If `thisp` is provided, function will be called in the `this`-context of
+     * the provided object, or the cache if no `thisp` object is provided.
+     *
+     * Does not update age or recenty of use, or iterate over stale values.
+     */
+    forEach(fn, thisp = this) {
+        for (const i of this.#indexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            fn.call(thisp, value, this.#keyList[i], this);
+        }
+    }
+    /**
+     * The same as {@link LRUCache.forEach} but items are iterated over in
+     * reverse order.  (ie, less recently used items are iterated over first.)
+     */
+    rforEach(fn, thisp = this) {
+        for (const i of this.#rindexes()) {
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined)
+                continue;
+            fn.call(thisp, value, this.#keyList[i], this);
+        }
+    }
+    /**
+     * Delete any stale entries. Returns true if anything was removed,
+     * false otherwise.
+     */
+    purgeStale() {
+        let deleted = false;
+        for (const i of this.#rindexes({ allowStale: true })) {
+            if (this.#isStale(i)) {
+                this.#delete(this.#keyList[i], 'expire');
+                deleted = true;
+            }
+        }
+        return deleted;
+    }
+    /**
+     * Get the extended info about a given entry, to get its value, size, and
+     * TTL info simultaneously. Returns `undefined` if the key is not present.
+     *
+     * Unlike {@link LRUCache#dump}, which is designed to be portable and survive
+     * serialization, the `start` value is always the current timestamp, and the
+     * `ttl` is a calculated remaining time to live (negative if expired).
+     *
+     * Always returns stale values, if their info is found in the cache, so be
+     * sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl})
+     * if relevant.
+     */
+    info(key) {
+        const i = this.#keyMap.get(key);
+        if (i === undefined)
+            return undefined;
+        const v = this.#valList[i];
+        /* c8 ignore start - this isn't tested for the info function,
+         * but it's the same logic as found in other places. */
+        const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+        if (value === undefined)
+            return undefined;
+        /* c8 ignore end */
+        const entry = { value };
+        if (this.#ttls && this.#starts) {
+            const ttl = this.#ttls[i];
+            const start = this.#starts[i];
+            if (ttl && start) {
+                const remain = ttl - (this.#perf.now() - start);
+                entry.ttl = remain;
+                entry.start = Date.now();
+            }
+        }
+        if (this.#sizes) {
+            entry.size = this.#sizes[i];
+        }
+        return entry;
+    }
+    /**
+     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
+     * passed to {@link LRUCache#load}.
+     *
+     * The `start` fields are calculated relative to a portable `Date.now()`
+     * timestamp, even if `performance.now()` is available.
+     *
+     * Stale entries are always included in the `dump`, even if
+     * {@link LRUCache.OptionsBase.allowStale} is false.
+     *
+     * Note: this returns an actual array, not a generator, so it can be more
+     * easily passed around.
+     */
+    dump() {
+        const arr = [];
+        for (const i of this.#indexes({ allowStale: true })) {
+            const key = this.#keyList[i];
+            const v = this.#valList[i];
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            if (value === undefined || key === undefined)
+                continue;
+            const entry = { value };
+            if (this.#ttls && this.#starts) {
+                entry.ttl = this.#ttls[i];
+                // always dump the start relative to a portable timestamp
+                // it's ok for this to be a bit slow, it's a rare operation.
+                const age = this.#perf.now() - this.#starts[i];
+                entry.start = Math.floor(Date.now() - age);
+            }
+            if (this.#sizes) {
+                entry.size = this.#sizes[i];
+            }
+            arr.unshift([key, entry]);
+        }
+        return arr;
+    }
+    /**
+     * Reset the cache and load in the items in entries in the order listed.
+     *
+     * The shape of the resulting cache may be different if the same options are
+     * not used in both caches.
+     *
+     * The `start` fields are assumed to be calculated relative to a portable
+     * `Date.now()` timestamp, even if `performance.now()` is available.
+     */
+    load(arr) {
+        this.clear();
+        for (const [key, entry] of arr) {
+            if (entry.start) {
+                // entry.start is a portable timestamp, but we may be using
+                // node's performance.now(), so calculate the offset, so that
+                // we get the intended remaining TTL, no matter how long it's
+                // been on ice.
+                //
+                // it's ok for this to be a bit slow, it's a rare operation.
+                const age = Date.now() - entry.start;
+                entry.start = this.#perf.now() - age;
+            }
+            this.set(key, entry.value, entry);
+        }
+    }
+    /**
+     * Add a value to the cache.
+     *
+     * Note: if `undefined` is specified as a value, this is an alias for
+     * {@link LRUCache#delete}
+     *
+     * Fields on the {@link LRUCache.SetOptions} options param will override
+     * their corresponding values in the constructor options for the scope
+     * of this single `set()` operation.
+     *
+     * If `start` is provided, then that will set the effective start
+     * time for the TTL calculation. Note that this must be a previous
+     * value of `performance.now()` if supported, or a previous value of
+     * `Date.now()` if not.
+     *
+     * Options object may also include `size`, which will prevent
+     * calling the `sizeCalculation` function and just use the specified
+     * number if it is a positive integer, and `noDisposeOnSet` which
+     * will prevent calling a `dispose` function in the case of
+     * overwrites.
+     *
+     * If the `size` (or return value of `sizeCalculation`) for a given
+     * entry is greater than `maxEntrySize`, then the item will not be
+     * added to the cache.
+     *
+     * Will update the recency of the entry.
+     *
+     * If the value is `undefined`, then this is an alias for
+     * `cache.delete(key)`. `undefined` is never stored in the cache.
+     */
+    set(k, v, setOptions = {}) {
+        if (v === undefined) {
+            this.delete(k);
+            return this;
+        }
+        const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
+        let { noUpdateTTL = this.noUpdateTTL } = setOptions;
+        const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
+        // if the item doesn't fit, don't do anything
+        // NB: maxEntrySize set to maxSize by default
+        if (this.maxEntrySize && size > this.maxEntrySize) {
+            if (status) {
+                status.set = 'miss';
+                status.maxEntrySizeExceeded = true;
+            }
+            // have to delete, in case something is there already.
+            this.#delete(k, 'set');
+            return this;
+        }
+        let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
+        if (index === undefined) {
+            // addition
+            index = (this.#size === 0 ? this.#tail
+                : this.#free.length !== 0 ? this.#free.pop()
+                    : this.#size === this.#max ? this.#evict(false)
+                        : this.#size);
+            this.#keyList[index] = k;
+            this.#valList[index] = v;
+            this.#keyMap.set(k, index);
+            this.#next[this.#tail] = index;
+            this.#prev[index] = this.#tail;
+            this.#tail = index;
+            this.#size++;
+            this.#addItemSize(index, size, status);
+            if (status)
+                status.set = 'add';
+            noUpdateTTL = false;
+            if (this.#hasOnInsert) {
+                this.#onInsert?.(v, k, 'add');
+            }
+        }
+        else {
+            // update
+            this.#moveToTail(index);
+            const oldVal = this.#valList[index];
+            if (v !== oldVal) {
+                if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
+                    oldVal.__abortController.abort(new Error('replaced'));
+                    const { __staleWhileFetching: s } = oldVal;
+                    if (s !== undefined && !noDisposeOnSet) {
+                        if (this.#hasDispose) {
+                            this.#dispose?.(s, k, 'set');
+                        }
+                        if (this.#hasDisposeAfter) {
+                            this.#disposed?.push([s, k, 'set']);
+                        }
+                    }
+                }
+                else if (!noDisposeOnSet) {
+                    if (this.#hasDispose) {
+                        this.#dispose?.(oldVal, k, 'set');
+                    }
+                    if (this.#hasDisposeAfter) {
+                        this.#disposed?.push([oldVal, k, 'set']);
+                    }
+                }
+                this.#removeItemSize(index);
+                this.#addItemSize(index, size, status);
+                this.#valList[index] = v;
+                if (status) {
+                    status.set = 'replace';
+                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal) ?
+                        oldVal.__staleWhileFetching
+                        : oldVal;
+                    if (oldValue !== undefined)
+                        status.oldValue = oldValue;
+                }
+            }
+            else if (status) {
+                status.set = 'update';
+            }
+            if (this.#hasOnInsert) {
+                this.onInsert?.(v, k, v === oldVal ? 'update' : 'replace');
+            }
+        }
+        if (ttl !== 0 && !this.#ttls) {
+            this.#initializeTTLTracking();
+        }
+        if (this.#ttls) {
+            if (!noUpdateTTL) {
+                this.#setItemTTL(index, ttl, start);
+            }
+            if (status)
+                this.#statusTTL(status, index);
+        }
+        if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+        return this;
+    }
+    /**
+     * Evict the least recently used item, returning its value or
+     * `undefined` if cache is empty.
+     */
+    pop() {
+        try {
+            while (this.#size) {
+                const val = this.#valList[this.#head];
+                this.#evict(true);
+                if (this.#isBackgroundFetch(val)) {
+                    if (val.__staleWhileFetching) {
+                        return val.__staleWhileFetching;
+                    }
+                }
+                else if (val !== undefined) {
+                    return val;
+                }
+            }
+        }
+        finally {
+            if (this.#hasDisposeAfter && this.#disposed) {
+                const dt = this.#disposed;
+                let task;
+                while ((task = dt?.shift())) {
+                    this.#disposeAfter?.(...task);
+                }
+            }
+        }
+    }
+    #evict(free) {
+        const head = this.#head;
+        const k = this.#keyList[head];
+        const v = this.#valList[head];
+        if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
+            v.__abortController.abort(new Error('evicted'));
+        }
+        else if (this.#hasDispose || this.#hasDisposeAfter) {
+            if (this.#hasDispose) {
+                this.#dispose?.(v, k, 'evict');
+            }
+            if (this.#hasDisposeAfter) {
+                this.#disposed?.push([v, k, 'evict']);
+            }
+        }
+        this.#removeItemSize(head);
+        // if we aren't about to use the index, then null these out
+        if (free) {
+            this.#keyList[head] = undefined;
+            this.#valList[head] = undefined;
+            this.#free.push(head);
+        }
+        if (this.#size === 1) {
+            this.#head = this.#tail = 0;
+            this.#free.length = 0;
+        }
+        else {
+            this.#head = this.#next[head];
+        }
+        this.#keyMap.delete(k);
+        this.#size--;
+        return head;
+    }
+    /**
+     * Check if a key is in the cache, without updating the recency of use.
+     * Will return false if the item is stale, even though it is technically
+     * in the cache.
+     *
+     * Check if a key is in the cache, without updating the recency of
+     * use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set
+     * to `true` in either the options or the constructor.
+     *
+     * Will return `false` if the item is stale, even though it is technically in
+     * the cache. The difference can be determined (if it matters) by using a
+     * `status` argument, and inspecting the `has` field.
+     *
+     * Will not update item age unless
+     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
+     */
+    has(k, hasOptions = {}) {
+        const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
+        const index = this.#keyMap.get(k);
+        if (index !== undefined) {
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v) &&
+                v.__staleWhileFetching === undefined) {
+                return false;
+            }
+            if (!this.#isStale(index)) {
+                if (updateAgeOnHas) {
+                    this.#updateItemAge(index);
+                }
+                if (status) {
+                    status.has = 'hit';
+                    this.#statusTTL(status, index);
+                }
+                return true;
+            }
+            else if (status) {
+                status.has = 'stale';
+                this.#statusTTL(status, index);
+            }
+        }
+        else if (status) {
+            status.has = 'miss';
+        }
+        return false;
+    }
+    /**
+     * Like {@link LRUCache#get} but doesn't update recency or delete stale
+     * items.
+     *
+     * Returns `undefined` if the item is stale, unless
+     * {@link LRUCache.OptionsBase.allowStale} is set.
+     */
+    peek(k, peekOptions = {}) {
+        const { allowStale = this.allowStale } = peekOptions;
+        const index = this.#keyMap.get(k);
+        if (index === undefined ||
+            (!allowStale && this.#isStale(index))) {
+            return;
+        }
+        const v = this.#valList[index];
+        // either stale and allowed, or forcing a refresh of non-stale value
+        return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+    }
+    #backgroundFetch(k, index, options, context) {
+        const v = index === undefined ? undefined : this.#valList[index];
+        if (this.#isBackgroundFetch(v)) {
+            return v;
+        }
+        const ac = new AC();
+        const { signal } = options;
+        // when/if our AC signals, then stop listening to theirs.
+        signal?.addEventListener('abort', () => ac.abort(signal.reason), {
+            signal: ac.signal,
+        });
+        const fetchOpts = {
+            signal: ac.signal,
+            options,
+            context,
+        };
+        const cb = (v, updateCache = false) => {
+            const { aborted } = ac.signal;
+            const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
+            if (options.status) {
+                if (aborted && !updateCache) {
+                    options.status.fetchAborted = true;
+                    options.status.fetchError = ac.signal.reason;
+                    if (ignoreAbort)
+                        options.status.fetchAbortIgnored = true;
+                }
+                else {
+                    options.status.fetchResolved = true;
+                }
+            }
+            if (aborted && !ignoreAbort && !updateCache) {
+                return fetchFail(ac.signal.reason);
+            }
+            // either we didn't abort, and are still here, or we did, and ignored
+            const bf = p;
+            if (this.#valList[index] === p) {
+                if (v === undefined) {
+                    if (bf.__staleWhileFetching !== undefined) {
+                        this.#valList[index] = bf.__staleWhileFetching;
+                    }
+                    else {
+                        this.#delete(k, 'fetch');
+                    }
+                }
+                else {
+                    if (options.status)
+                        options.status.fetchUpdated = true;
+                    this.set(k, v, fetchOpts.options);
+                }
+            }
+            return v;
+        };
+        const eb = (er) => {
+            if (options.status) {
+                options.status.fetchRejected = true;
+                options.status.fetchError = er;
+            }
+            return fetchFail(er);
+        };
+        const fetchFail = (er) => {
+            const { aborted } = ac.signal;
+            const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
+            const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
+            const noDelete = allowStale || options.noDeleteOnFetchRejection;
+            const bf = p;
+            if (this.#valList[index] === p) {
+                // if we allow stale on fetch rejections, then we need to ensure that
+                // the stale value is not removed from the cache when the fetch fails.
+                const del = !noDelete || bf.__staleWhileFetching === undefined;
+                if (del) {
+                    this.#delete(k, 'fetch');
+                }
+                else if (!allowStaleAborted) {
+                    // still replace the *promise* with the stale value,
+                    // since we are done with the promise at this point.
+                    // leave it untouched if we're still waiting for an
+                    // aborted background fetch that hasn't yet returned.
+                    this.#valList[index] = bf.__staleWhileFetching;
+                }
+            }
+            if (allowStale) {
+                if (options.status && bf.__staleWhileFetching !== undefined) {
+                    options.status.returnedStale = true;
+                }
+                return bf.__staleWhileFetching;
+            }
+            else if (bf.__returned === bf) {
+                throw er;
+            }
+        };
+        const pcall = (res, rej) => {
+            const fmp = this.#fetchMethod?.(k, v, fetchOpts);
+            if (fmp && fmp instanceof Promise) {
+                fmp.then(v => res(v === undefined ? undefined : v), rej);
+            }
+            // ignored, we go until we finish, regardless.
+            // defer check until we are actually aborting,
+            // so fetchMethod can override.
+            ac.signal.addEventListener('abort', () => {
+                if (!options.ignoreFetchAbort ||
+                    options.allowStaleOnFetchAbort) {
+                    res(undefined);
+                    // when it eventually resolves, update the cache.
+                    if (options.allowStaleOnFetchAbort) {
+                        res = v => cb(v, true);
+                    }
+                }
+            });
+        };
+        if (options.status)
+            options.status.fetchDispatched = true;
+        const p = new Promise(pcall).then(cb, eb);
+        const bf = Object.assign(p, {
+            __abortController: ac,
+            __staleWhileFetching: v,
+            __returned: undefined,
+        });
+        if (index === undefined) {
+            // internal, don't expose status.
+            this.set(k, bf, { ...fetchOpts.options, status: undefined });
+            index = this.#keyMap.get(k);
+        }
+        else {
+            this.#valList[index] = bf;
+        }
+        return bf;
+    }
+    #isBackgroundFetch(p) {
+        if (!this.#hasFetchMethod)
+            return false;
+        const b = p;
+        return (!!b &&
+            b instanceof Promise &&
+            b.hasOwnProperty('__staleWhileFetching') &&
+            b.__abortController instanceof AC);
+    }
+    async fetch(k, fetchOptions = {}) {
+        const { 
+        // get options
+        allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, 
+        // set options
+        ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, 
+        // fetch exclusive options
+        noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
+        if (!this.#hasFetchMethod) {
+            if (status)
+                status.fetch = 'get';
+            return this.get(k, {
+                allowStale,
+                updateAgeOnGet,
+                noDeleteOnStaleGet,
+                status,
+            });
+        }
+        const options = {
+            allowStale,
+            updateAgeOnGet,
+            noDeleteOnStaleGet,
+            ttl,
+            noDisposeOnSet,
+            size,
+            sizeCalculation,
+            noUpdateTTL,
+            noDeleteOnFetchRejection,
+            allowStaleOnFetchRejection,
+            allowStaleOnFetchAbort,
+            ignoreFetchAbort,
+            status,
+            signal,
+        };
+        let index = this.#keyMap.get(k);
+        if (index === undefined) {
+            if (status)
+                status.fetch = 'miss';
+            const p = this.#backgroundFetch(k, index, options, context);
+            return (p.__returned = p);
+        }
+        else {
+            // in cache, maybe already fetching
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v)) {
+                const stale = allowStale && v.__staleWhileFetching !== undefined;
+                if (status) {
+                    status.fetch = 'inflight';
+                    if (stale)
+                        status.returnedStale = true;
+                }
+                return stale ? v.__staleWhileFetching : (v.__returned = v);
+            }
+            // if we force a refresh, that means do NOT serve the cached value,
+            // unless we are already in the process of refreshing the cache.
+            const isStale = this.#isStale(index);
+            if (!forceRefresh && !isStale) {
+                if (status)
+                    status.fetch = 'hit';
+                this.#moveToTail(index);
+                if (updateAgeOnGet) {
+                    this.#updateItemAge(index);
+                }
+                if (status)
+                    this.#statusTTL(status, index);
+                return v;
+            }
+            // ok, it is stale or a forced refresh, and not already fetching.
+            // refresh the cache.
+            const p = this.#backgroundFetch(k, index, options, context);
+            const hasStale = p.__staleWhileFetching !== undefined;
+            const staleVal = hasStale && allowStale;
+            if (status) {
+                status.fetch = isStale ? 'stale' : 'refresh';
+                if (staleVal && isStale)
+                    status.returnedStale = true;
+            }
+            return staleVal ? p.__staleWhileFetching : (p.__returned = p);
+        }
+    }
+    async forceFetch(k, fetchOptions = {}) {
+        const v = await this.fetch(k, fetchOptions);
+        if (v === undefined)
+            throw new Error('fetch() returned undefined');
+        return v;
+    }
+    memo(k, memoOptions = {}) {
+        const memoMethod = this.#memoMethod;
+        if (!memoMethod) {
+            throw new Error('no memoMethod provided to constructor');
+        }
+        const { context, forceRefresh, ...options } = memoOptions;
+        const v = this.get(k, options);
+        if (!forceRefresh && v !== undefined)
+            return v;
+        const vv = memoMethod(k, v, {
+            options,
+            context,
+        });
+        this.set(k, vv, options);
+        return vv;
+    }
+    /**
+     * Return a value from the cache. Will update the recency of the cache
+     * entry found.
+     *
+     * If the key is not found, get() will return `undefined`.
+     */
+    get(k, getOptions = {}) {
+        const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
+        const index = this.#keyMap.get(k);
+        if (index !== undefined) {
+            const value = this.#valList[index];
+            const fetching = this.#isBackgroundFetch(value);
+            if (status)
+                this.#statusTTL(status, index);
+            if (this.#isStale(index)) {
+                if (status)
+                    status.get = 'stale';
+                // delete only if not an in-flight background fetch
+                if (!fetching) {
+                    if (!noDeleteOnStaleGet) {
+                        this.#delete(k, 'expire');
+                    }
+                    if (status && allowStale)
+                        status.returnedStale = true;
+                    return allowStale ? value : undefined;
+                }
+                else {
+                    if (status &&
+                        allowStale &&
+                        value.__staleWhileFetching !== undefined) {
+                        status.returnedStale = true;
+                    }
+                    return allowStale ? value.__staleWhileFetching : undefined;
+                }
+            }
+            else {
+                if (status)
+                    status.get = 'hit';
+                // if we're currently fetching it, we don't actually have it yet
+                // it's not stale, which means this isn't a staleWhileRefetching.
+                // If it's not stale, and fetching, AND has a __staleWhileFetching
+                // value, then that means the user fetched with {forceRefresh:true},
+                // so it's safe to return that value.
+                if (fetching) {
+                    return value.__staleWhileFetching;
+                }
+                this.#moveToTail(index);
+                if (updateAgeOnGet) {
+                    this.#updateItemAge(index);
+                }
+                return value;
+            }
+        }
+        else if (status) {
+            status.get = 'miss';
+        }
+    }
+    #connect(p, n) {
+        this.#prev[n] = p;
+        this.#next[p] = n;
+    }
+    #moveToTail(index) {
+        // if tail already, nothing to do
+        // if head, move head to next[index]
+        // else
+        //   move next[prev[index]] to next[index] (head has no prev)
+        //   move prev[next[index]] to prev[index]
+        // prev[index] = tail
+        // next[tail] = index
+        // tail = index
+        if (index !== this.#tail) {
+            if (index === this.#head) {
+                this.#head = this.#next[index];
+            }
+            else {
+                this.#connect(this.#prev[index], this.#next[index]);
+            }
+            this.#connect(this.#tail, index);
+            this.#tail = index;
+        }
+    }
+    /**
+     * Deletes a key out of the cache.
+     *
+     * Returns true if the key was deleted, false otherwise.
+     */
+    delete(k) {
+        return this.#delete(k, 'delete');
+    }
+    #delete(k, reason) {
+        let deleted = false;
+        if (this.#size !== 0) {
+            const index = this.#keyMap.get(k);
+            if (index !== undefined) {
+                deleted = true;
+                if (this.#size === 1) {
+                    this.#clear(reason);
+                }
+                else {
+                    this.#removeItemSize(index);
+                    const v = this.#valList[index];
+                    if (this.#isBackgroundFetch(v)) {
+                        v.__abortController.abort(new Error('deleted'));
+                    }
+                    else if (this.#hasDispose || this.#hasDisposeAfter) {
+                        if (this.#hasDispose) {
+                            this.#dispose?.(v, k, reason);
+                        }
+                        if (this.#hasDisposeAfter) {
+                            this.#disposed?.push([v, k, reason]);
+                        }
+                    }
+                    this.#keyMap.delete(k);
+                    this.#keyList[index] = undefined;
+                    this.#valList[index] = undefined;
+                    if (index === this.#tail) {
+                        this.#tail = this.#prev[index];
+                    }
+                    else if (index === this.#head) {
+                        this.#head = this.#next[index];
+                    }
+                    else {
+                        const pi = this.#prev[index];
+                        this.#next[pi] = this.#next[index];
+                        const ni = this.#next[index];
+                        this.#prev[ni] = this.#prev[index];
+                    }
+                    this.#size--;
+                    this.#free.push(index);
+                }
+            }
+        }
+        if (this.#hasDisposeAfter && this.#disposed?.length) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+        return deleted;
+    }
+    /**
+     * Clear the cache entirely, throwing away all values.
+     */
+    clear() {
+        return this.#clear('delete');
+    }
+    #clear(reason) {
+        for (const index of this.#rindexes({ allowStale: true })) {
+            const v = this.#valList[index];
+            if (this.#isBackgroundFetch(v)) {
+                v.__abortController.abort(new Error('deleted'));
+            }
+            else {
+                const k = this.#keyList[index];
+                if (this.#hasDispose) {
+                    this.#dispose?.(v, k, reason);
+                }
+                if (this.#hasDisposeAfter) {
+                    this.#disposed?.push([v, k, reason]);
+                }
+            }
+        }
+        this.#keyMap.clear();
+        this.#valList.fill(undefined);
+        this.#keyList.fill(undefined);
+        if (this.#ttls && this.#starts) {
+            this.#ttls.fill(0);
+            this.#starts.fill(0);
+        }
+        if (this.#sizes) {
+            this.#sizes.fill(0);
+        }
+        this.#head = 0;
+        this.#tail = 0;
+        this.#free.length = 0;
+        this.#calculatedSize = 0;
+        this.#size = 0;
+        if (this.#hasDisposeAfter && this.#disposed) {
+            const dt = this.#disposed;
+            let task;
+            while ((task = dt?.shift())) {
+                this.#disposeAfter?.(...task);
+            }
+        }
+    }
+}
+//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/dist/esm/index.min.js b/node_modules/@npmcli/git/node_modules/lru-cache/dist/esm/index.min.js
new file mode 100644
index 0000000000000..07dd8fc3c59d8
--- /dev/null
+++ b/node_modules/@npmcli/git/node_modules/lru-cache/dist/esm/index.min.js
@@ -0,0 +1,2 @@
+var M=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,x=new Set,R=typeof process=="object"&&process?process:{},I=(a,t,e,i)=>{typeof R.emitWarning=="function"?R.emitWarning(a,t,e,i):console.error(`[${e}] ${t}: ${a}`)},C=globalThis.AbortController,D=globalThis.AbortSignal;if(typeof C>"u"){D=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},C=class{constructor(){t()}signal=new D;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let a=R.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{a&&(a=!1,I("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var G=a=>!x.has(a),H=Symbol("type"),y=a=>a&&a===Math.floor(a)&&a>0&&isFinite(a),U=a=>y(a)?a<=Math.pow(2,8)?Uint8Array:a<=Math.pow(2,16)?Uint16Array:a<=Math.pow(2,32)?Uint32Array:a<=Number.MAX_SAFE_INTEGER?O:null:null,O=class extends Array{constructor(t){super(t),this.fill(0)}},W=class a{heap;length;static#l=!1;static create(t){let e=U(t);if(!e)return[];a.#l=!0;let i=new a(t,e);return a.#l=!1,i}constructor(t,e){if(!a.#l)throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},L=class a{#l;#c;#p;#v;#w;#D;#L;#S;get perf(){return this.#S}ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#_;#s;#i;#t;#a;#u;#o;#h;#m;#r;#b;#y;#d;#A;#z;#f;#x;static unsafeExposeInternals(t){return{starts:t.#y,ttls:t.#d,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#a,prev:t.#u,get head(){return t.#o},get tail(){return t.#h},free:t.#m,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#M(e,i,s,n),moveToTail:e=>t.#W(e),indexes:e=>t.#F(e),rindexes:e=>t.#T(e),isStale:e=>t.#g(e)}}get max(){return this.#l}get maxSize(){return this.#c}get calculatedSize(){return this.#_}get size(){return this.#n}get fetchMethod(){return this.#D}get memoMethod(){return this.#L}get dispose(){return this.#p}get onInsert(){return this.#v}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,onInsert:_,disposeAfter:f,noDisposeOnSet:c,noUpdateTTL:u,maxSize:A=0,maxEntrySize:d=0,sizeCalculation:m,fetchMethod:l,memoMethod:w,noDeleteOnFetchRejection:b,noDeleteOnStaleGet:p,allowStaleOnFetchRejection:S,allowStaleOnFetchAbort:z,ignoreFetchAbort:F,perf:v}=t;if(v!==void 0&&typeof v?.now!="function")throw new TypeError("perf option must have a now() method if specified");if(this.#S=v??M,e!==0&&!y(e))throw new TypeError("max option must be a nonnegative integer");let T=e?U(e):Array;if(!T)throw new Error("invalid max value: "+e);if(this.#l=e,this.#c=A,this.maxEntrySize=d||this.#c,this.sizeCalculation=m,this.sizeCalculation){if(!this.#c&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#L=w,l!==void 0&&typeof l!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#D=l,this.#z=!!l,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#a=new T(e),this.#u=new T(e),this.#o=0,this.#h=0,this.#m=W.create(e),this.#n=0,this.#_=0,typeof g=="function"&&(this.#p=g),typeof _=="function"&&(this.#v=_),typeof f=="function"?(this.#w=f,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#A=!!this.#p,this.#x=!!this.#v,this.#f=!!this.#w,this.noDisposeOnSet=!!c,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!b,this.allowStaleOnFetchRejection=!!S,this.allowStaleOnFetchAbort=!!z,this.ignoreFetchAbort=!!F,this.maxEntrySize!==0){if(this.#c!==0&&!y(this.#c))throw new TypeError("maxSize must be a positive integer if specified");if(!y(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#V()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!p,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=y(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!y(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#G()}if(this.#l===0&&this.ttl===0&&this.#c===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#l&&!this.#c){let E="LRU_CACHE_UNBOUNDED";G(E)&&(x.add(E),I("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",E,a))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#G(){let t=new O(this.#l),e=new O(this.#l);this.#d=t,this.#y=e,this.#j=(n,h,o=this.#S.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#g(n)&&this.#E(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#C=n=>{e[n]=t[n]!==0?this.#S.now():0},this.#O=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=this.#S.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#g=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#C=()=>{};#O=()=>{};#j=()=>{};#g=()=>!1;#V(){let t=new O(this.#l);this.#_=0,this.#b=t,this.#R=e=>{this.#_-=t[e],t[e]=0},this.#N=(e,i,s,n)=>{if(this.#e(i))return 0;if(!y(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!y(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#I=(e,i,s)=>{if(t[e]=i,this.#c){let n=this.#c-t[e];for(;this.#_>n;)this.#U(!0)}this.#_+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#_)}}#R=t=>{};#I=(t,e,i)=>{};#N=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#o));)e=this.#u[e]}*#T({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#h));)e=this.#a[e]}#P(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#T())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#T()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#T())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#T()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#T({allowStale:!0}))this.#g(e)&&(this.#E(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#d&&this.#y){let h=this.#d[e],o=this.#y[e];if(h&&o){let r=h-(this.#S.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#F({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#d&&this.#y){h.ttl=this.#d[e];let o=this.#S.now()-this.#y[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=this.#S.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,_=this.#N(t,e,i.size||0,o);if(this.maxEntrySize&&_>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#E(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#m.length!==0?this.#m.pop():this.#n===this.#l?this.#U(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#a[this.#h]=f,this.#u[f]=this.#h,this.#h=f,this.#n++,this.#I(f,_,r),r&&(r.set="add"),g=!1,this.#x&&this.#v?.(e,t,"add");else{this.#W(f);let c=this.#t[f];if(e!==c){if(this.#z&&this.#e(c)){c.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:u}=c;u!==void 0&&!h&&(this.#A&&this.#p?.(u,t,"set"),this.#f&&this.#r?.push([u,t,"set"]))}else h||(this.#A&&this.#p?.(c,t,"set"),this.#f&&this.#r?.push([c,t,"set"]));if(this.#R(f),this.#I(f,_,r),this.#t[f]=e,r){r.set="replace";let u=c&&this.#e(c)?c.__staleWhileFetching:c;u!==void 0&&(r.oldValue=u)}}else r&&(r.set="update");this.#x&&this.onInsert?.(e,t,e===c?"update":"replace")}if(s!==0&&!this.#d&&this.#G(),this.#d&&(g||this.#j(f,s,n),r&&this.#O(r,f)),!h&&this.#f&&this.#r){let c=this.#r,u;for(;u=c?.shift();)this.#w?.(...u)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#U(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#f&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#U(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#z&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(s,i,"evict"),this.#f&&this.#r?.push([s,i,"evict"])),this.#R(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#m.push(e)),this.#n===1?(this.#o=this.#h=0,this.#m.length=0):this.#o=this.#a[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#g(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#C(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#g(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#M(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new C,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,m=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!m?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!m)return f(h.signal.reason);let b=u;return this.#t[e]===u&&(d===void 0?b.__staleWhileFetching!==void 0?this.#t[e]=b.__staleWhileFetching:this.#E(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},_=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:m}=h.signal,l=m&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=u;if(this.#t[e]===u&&(!b||p.__staleWhileFetching===void 0?this.#E(t,"fetch"):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},c=(d,m)=>{let l=this.#D?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),m),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let u=new Promise(c).then(g,_),A=Object.assign(u,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,A,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=A,A}#e(t){if(!this.#z)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof C}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:_=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:c=this.allowStaleOnFetchRejection,ignoreFetchAbort:u=this.ignoreFetchAbort,allowStaleOnFetchAbort:A=this.allowStaleOnFetchAbort,context:d,forceRefresh:m=!1,status:l,signal:w}=e;if(!this.#z)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:_,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:c,allowStaleOnFetchAbort:A,ignoreFetchAbort:u,status:l,signal:w},p=this.#s.get(t);if(p===void 0){l&&(l.fetch="miss");let S=this.#M(t,p,b,d);return S.__returned=S}else{let S=this.#t[p];if(this.#e(S)){let E=i&&S.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",E&&(l.returnedStale=!0)),E?S.__staleWhileFetching:S.__returned=S}let z=this.#g(p);if(!m&&!z)return l&&(l.fetch="hit"),this.#W(p),s&&this.#C(p),l&&this.#O(l,p),S;let F=this.#M(t,p,b,d),T=F.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=z?"stale":"refresh",T&&z&&(l.returnedStale=!0)),T?F.__staleWhileFetching:F.__returned=F}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#L;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#O(h,o),this.#g(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#E(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#W(o),s&&this.#C(o),r))}else h&&(h.get="miss")}#H(t,e){this.#u[e]=t,this.#a[t]=e}#W(t){t!==this.#h&&(t===this.#o?this.#o=this.#a[t]:this.#H(this.#u[t],this.#a[t]),this.#H(this.#h,t),this.#h=t)}delete(t){return this.#E(t,"delete")}#E(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#k(e);else{this.#R(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(n,t,e),this.#f&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#u[s];else if(s===this.#o)this.#o=this.#a[s];else{let h=this.#u[s];this.#a[h]=this.#a[s];let o=this.#a[s];this.#u[o]=this.#u[s]}this.#n--,this.#m.push(s)}}if(this.#f&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#k("delete")}#k(t){for(let e of this.#T({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#A&&this.#p?.(i,s,t),this.#f&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#d&&this.#y&&(this.#d.fill(0),this.#y.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#m.length=0,this.#_=0,this.#n=0,this.#f&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};export{L as LRUCache};
+//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/dist/esm/package.json b/node_modules/@npmcli/git/node_modules/lru-cache/dist/esm/package.json
new file mode 100644
index 0000000000000..3dbc1ca591c05
--- /dev/null
+++ b/node_modules/@npmcli/git/node_modules/lru-cache/dist/esm/package.json
@@ -0,0 +1,3 @@
+{
+  "type": "module"
+}
diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/package.json b/node_modules/@npmcli/git/node_modules/lru-cache/package.json
new file mode 100644
index 0000000000000..4953bdf4a7a35
--- /dev/null
+++ b/node_modules/@npmcli/git/node_modules/lru-cache/package.json
@@ -0,0 +1,113 @@
+{
+  "name": "lru-cache",
+  "description": "A cache object that deletes the least-recently-used items.",
+  "version": "11.2.1",
+  "author": "Isaac Z. Schlueter ",
+  "keywords": [
+    "mru",
+    "lru",
+    "cache"
+  ],
+  "sideEffects": false,
+  "scripts": {
+    "build": "npm run prepare",
+    "prepare": "tshy && bash fixup.sh",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "test": "tap",
+    "snap": "tap",
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "format": "prettier --write .",
+    "typedoc": "typedoc --tsconfig ./.tshy/esm.json ./src/*.ts",
+    "benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh",
+    "prebenchmark": "npm run prepare",
+    "benchmark": "make -C benchmark",
+    "preprofile": "npm run prepare",
+    "profile": "make -C benchmark profile"
+  },
+  "main": "./dist/commonjs/index.js",
+  "types": "./dist/commonjs/index.d.ts",
+  "tshy": {
+    "exports": {
+      ".": "./src/index.ts",
+      "./min": {
+        "import": {
+          "types": "./dist/esm/index.d.ts",
+          "default": "./dist/esm/index.min.js"
+        },
+        "require": {
+          "types": "./dist/commonjs/index.d.ts",
+          "default": "./dist/commonjs/index.min.js"
+        }
+      }
+    }
+  },
+  "repository": {
+    "type": "git",
+    "url": "git://github.com/isaacs/node-lru-cache.git"
+  },
+  "devDependencies": {
+    "@types/node": "^24.3.0",
+    "benchmark": "^2.1.4",
+    "esbuild": "^0.25.9",
+    "marked": "^4.2.12",
+    "mkdirp": "^3.0.1",
+    "prettier": "^3.6.2",
+    "tap": "^21.1.0",
+    "tshy": "^3.0.2",
+    "typedoc": "^0.28.12"
+  },
+  "license": "ISC",
+  "files": [
+    "dist"
+  ],
+  "engines": {
+    "node": "20 || >=22"
+  },
+  "prettier": {
+    "experimentalTernaries": true,
+    "semi": false,
+    "printWidth": 70,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "tap": {
+    "node-arg": [
+      "--expose-gc"
+    ],
+    "plugin": [
+      "@tapjs/clock"
+    ]
+  },
+  "exports": {
+    ".": {
+      "import": {
+        "types": "./dist/esm/index.d.ts",
+        "default": "./dist/esm/index.js"
+      },
+      "require": {
+        "types": "./dist/commonjs/index.d.ts",
+        "default": "./dist/commonjs/index.js"
+      }
+    },
+    "./min": {
+      "import": {
+        "types": "./dist/esm/index.d.ts",
+        "default": "./dist/esm/index.min.js"
+      },
+      "require": {
+        "types": "./dist/commonjs/index.d.ts",
+        "default": "./dist/commonjs/index.min.js"
+      }
+    }
+  },
+  "type": "module",
+  "module": "./dist/esm/index.js"
+}
diff --git a/node_modules/@npmcli/git/node_modules/npm-package-arg/lib/npa.js b/node_modules/@npmcli/git/node_modules/npm-package-arg/lib/npa.js
deleted file mode 100644
index d409b7f1becfc..0000000000000
--- a/node_modules/@npmcli/git/node_modules/npm-package-arg/lib/npa.js
+++ /dev/null
@@ -1,481 +0,0 @@
-'use strict'
-
-const isWindows = process.platform === 'win32'
-
-const { URL } = require('node:url')
-// We need to use path/win32 so that we get consistent results in tests, but this also means we need to manually convert backslashes to forward slashes when generating file: urls with paths.
-const path = isWindows ? require('node:path/win32') : require('node:path')
-const { homedir } = require('node:os')
-const HostedGit = require('hosted-git-info')
-const semver = require('semver')
-const validatePackageName = require('validate-npm-package-name')
-const { log } = require('proc-log')
-
-const hasSlashes = isWindows ? /\\|[/]/ : /[/]/
-const isURL = /^(?:git[+])?[a-z]+:/i
-const isGit = /^[^@]+@[^:.]+\.[^:]+:.+$/i
-const isFileType = /[.](?:tgz|tar.gz|tar)$/i
-const isPortNumber = /:[0-9]+(\/|$)/i
-const isWindowsFile = /^(?:[.]|~[/]|[/\\]|[a-zA-Z]:)/
-const isPosixFile = /^(?:[.]|~[/]|[/]|[a-zA-Z]:)/
-const defaultRegistry = 'https://registry.npmjs.org'
-
-function npa (arg, where) {
-  let name
-  let spec
-  if (typeof arg === 'object') {
-    if (arg instanceof Result && (!where || where === arg.where)) {
-      return arg
-    } else if (arg.name && arg.rawSpec) {
-      return npa.resolve(arg.name, arg.rawSpec, where || arg.where)
-    } else {
-      return npa(arg.raw, where || arg.where)
-    }
-  }
-  const nameEndsAt = arg.indexOf('@', 1) // Skip possible leading @
-  const namePart = nameEndsAt > 0 ? arg.slice(0, nameEndsAt) : arg
-  if (isURL.test(arg)) {
-    spec = arg
-  } else if (isGit.test(arg)) {
-    spec = `git+ssh://${arg}`
-  // eslint-disable-next-line max-len
-  } else if (!namePart.startsWith('@') && (hasSlashes.test(namePart) || isFileType.test(namePart))) {
-    spec = arg
-  } else if (nameEndsAt > 0) {
-    name = namePart
-    spec = arg.slice(nameEndsAt + 1) || '*'
-  } else {
-    const valid = validatePackageName(arg)
-    if (valid.validForOldPackages) {
-      name = arg
-      spec = '*'
-    } else {
-      spec = arg
-    }
-  }
-  return resolve(name, spec, where, arg)
-}
-
-function isFileSpec (spec) {
-  if (!spec) {
-    return false
-  }
-  if (spec.toLowerCase().startsWith('file:')) {
-    return true
-  }
-  if (isWindows) {
-    return isWindowsFile.test(spec)
-  }
-  // We never hit this in windows tests, obviously
-  /* istanbul ignore next */
-  return isPosixFile.test(spec)
-}
-
-function isAliasSpec (spec) {
-  if (!spec) {
-    return false
-  }
-  return spec.toLowerCase().startsWith('npm:')
-}
-
-function resolve (name, spec, where, arg) {
-  const res = new Result({
-    raw: arg,
-    name: name,
-    rawSpec: spec,
-    fromArgument: arg != null,
-  })
-
-  if (name) {
-    res.name = name
-  }
-
-  if (!where) {
-    where = process.cwd()
-  }
-
-  if (isFileSpec(spec)) {
-    return fromFile(res, where)
-  } else if (isAliasSpec(spec)) {
-    return fromAlias(res, where)
-  }
-
-  const hosted = HostedGit.fromUrl(spec, {
-    noGitPlus: true,
-    noCommittish: true,
-  })
-  if (hosted) {
-    return fromHostedGit(res, hosted)
-  } else if (spec && isURL.test(spec)) {
-    return fromURL(res)
-  } else if (spec && (hasSlashes.test(spec) || isFileType.test(spec))) {
-    return fromFile(res, where)
-  } else {
-    return fromRegistry(res)
-  }
-}
-
-function toPurl (arg, reg = defaultRegistry) {
-  const res = npa(arg)
-
-  if (res.type !== 'version') {
-    throw invalidPurlType(res.type, res.raw)
-  }
-
-  // URI-encode leading @ of scoped packages
-  let purl = 'pkg:npm/' + res.name.replace(/^@/, '%40') + '@' + res.rawSpec
-  if (reg !== defaultRegistry) {
-    purl += '?repository_url=' + reg
-  }
-
-  return purl
-}
-
-function invalidPackageName (name, valid, raw) {
-  // eslint-disable-next-line max-len
-  const err = new Error(`Invalid package name "${name}" of package "${raw}": ${valid.errors.join('; ')}.`)
-  err.code = 'EINVALIDPACKAGENAME'
-  return err
-}
-
-function invalidTagName (name, raw) {
-  // eslint-disable-next-line max-len
-  const err = new Error(`Invalid tag name "${name}" of package "${raw}": Tags may not have any characters that encodeURIComponent encodes.`)
-  err.code = 'EINVALIDTAGNAME'
-  return err
-}
-
-function invalidPurlType (type, raw) {
-  // eslint-disable-next-line max-len
-  const err = new Error(`Invalid type "${type}" of package "${raw}": Purl can only be generated for "version" types.`)
-  err.code = 'EINVALIDPURLTYPE'
-  return err
-}
-
-class Result {
-  constructor (opts) {
-    this.type = opts.type
-    this.registry = opts.registry
-    this.where = opts.where
-    if (opts.raw == null) {
-      this.raw = opts.name ? `${opts.name}@${opts.rawSpec}` : opts.rawSpec
-    } else {
-      this.raw = opts.raw
-    }
-    this.name = undefined
-    this.escapedName = undefined
-    this.scope = undefined
-    this.rawSpec = opts.rawSpec || ''
-    this.saveSpec = opts.saveSpec
-    this.fetchSpec = opts.fetchSpec
-    if (opts.name) {
-      this.setName(opts.name)
-    }
-    this.gitRange = opts.gitRange
-    this.gitCommittish = opts.gitCommittish
-    this.gitSubdir = opts.gitSubdir
-    this.hosted = opts.hosted
-  }
-
-  // TODO move this to a getter/setter in a semver major
-  setName (name) {
-    const valid = validatePackageName(name)
-    if (!valid.validForOldPackages) {
-      throw invalidPackageName(name, valid, this.raw)
-    }
-
-    this.name = name
-    this.scope = name[0] === '@' ? name.slice(0, name.indexOf('/')) : undefined
-    // scoped packages in couch must have slash url-encoded, e.g. @foo%2Fbar
-    this.escapedName = name.replace('/', '%2f')
-    return this
-  }
-
-  toString () {
-    const full = []
-    if (this.name != null && this.name !== '') {
-      full.push(this.name)
-    }
-    const spec = this.saveSpec || this.fetchSpec || this.rawSpec
-    if (spec != null && spec !== '') {
-      full.push(spec)
-    }
-    return full.length ? full.join('@') : this.raw
-  }
-
-  toJSON () {
-    const result = Object.assign({}, this)
-    delete result.hosted
-    return result
-  }
-}
-
-// sets res.gitCommittish, res.gitRange, and res.gitSubdir
-function setGitAttrs (res, committish) {
-  if (!committish) {
-    res.gitCommittish = null
-    return
-  }
-
-  // for each :: separated item:
-  for (const part of committish.split('::')) {
-    // if the item has no : the n it is a commit-ish
-    if (!part.includes(':')) {
-      if (res.gitRange) {
-        throw new Error('cannot override existing semver range with a committish')
-      }
-      if (res.gitCommittish) {
-        throw new Error('cannot override existing committish with a second committish')
-      }
-      res.gitCommittish = part
-      continue
-    }
-    // split on name:value
-    const [name, value] = part.split(':')
-    // if name is semver do semver lookup of ref or tag
-    if (name === 'semver') {
-      if (res.gitCommittish) {
-        throw new Error('cannot override existing committish with a semver range')
-      }
-      if (res.gitRange) {
-        throw new Error('cannot override existing semver range with a second semver range')
-      }
-      res.gitRange = decodeURIComponent(value)
-      continue
-    }
-    if (name === 'path') {
-      if (res.gitSubdir) {
-        throw new Error('cannot override existing path with a second path')
-      }
-      res.gitSubdir = `/${value}`
-      continue
-    }
-    log.warn('npm-package-arg', `ignoring unknown key "${name}"`)
-  }
-}
-
-// Taken from: EncodePathChars and lookup_table in src/node_url.cc
-// url.pathToFileURL only returns absolute references.  We can't use it to encode paths.
-// encodeURI mangles windows paths. We can't use it to encode paths.
-// Under the hood, url.pathToFileURL does a limited set of encoding, with an extra windows step, and then calls path.resolve.
-// The encoding node does without path.resolve is not available outside of the source, so we are recreating it here.
-const encodedPathChars = new Map([
-  ['\0', '%00'],
-  ['\t', '%09'],
-  ['\n', '%0A'],
-  ['\r', '%0D'],
-  [' ', '%20'],
-  ['"', '%22'],
-  ['#', '%23'],
-  ['%', '%25'],
-  ['?', '%3F'],
-  ['[', '%5B'],
-  ['\\', isWindows ? '/' : '%5C'],
-  [']', '%5D'],
-  ['^', '%5E'],
-  ['|', '%7C'],
-  ['~', '%7E'],
-])
-
-function pathToFileURL (str) {
-  let result = ''
-  for (let i = 0; i < str.length; i++) {
-    result = `${result}${encodedPathChars.get(str[i]) ?? str[i]}`
-  }
-  if (result.startsWith('file:')) {
-    return result
-  }
-  return `file:${result}`
-}
-
-function fromFile (res, where) {
-  res.type = isFileType.test(res.rawSpec) ? 'file' : 'directory'
-  res.where = where
-
-  let rawSpec = pathToFileURL(res.rawSpec)
-
-  if (rawSpec.startsWith('file:/')) {
-    // XXX backwards compatibility lack of compliance with RFC 8089
-
-    // turn file://path into file:/path
-    if (/^file:\/\/[^/]/.test(rawSpec)) {
-      rawSpec = `file:/${rawSpec.slice(5)}`
-    }
-
-    // turn file:/../path into file:../path
-    // for 1 or 3 leading slashes (2 is already ruled out from handling file:// explicitly above)
-    if (/^\/{1,3}\.\.?(\/|$)/.test(rawSpec.slice(5))) {
-      rawSpec = rawSpec.replace(/^file:\/{1,3}/, 'file:')
-    }
-  }
-
-  let resolvedUrl
-  let specUrl
-  try {
-    // always put the '/' on "where", or else file:foo from /path/to/bar goes to /path/to/foo, when we want it to be /path/to/bar/foo
-    resolvedUrl = new URL(rawSpec, `${pathToFileURL(path.resolve(where))}/`)
-    specUrl = new URL(rawSpec)
-  } catch (originalError) {
-    const er = new Error('Invalid file: URL, must comply with RFC 8089')
-    throw Object.assign(er, {
-      raw: res.rawSpec,
-      spec: res,
-      where,
-      originalError,
-    })
-  }
-
-  // turn /C:/blah into just C:/blah on windows
-  let specPath = decodeURIComponent(specUrl.pathname)
-  let resolvedPath = decodeURIComponent(resolvedUrl.pathname)
-  if (isWindows) {
-    specPath = specPath.replace(/^\/+([a-z]:\/)/i, '$1')
-    resolvedPath = resolvedPath.replace(/^\/+([a-z]:\/)/i, '$1')
-  }
-
-  // replace ~ with homedir, but keep the ~ in the saveSpec
-  // otherwise, make it relative to where param
-  if (/^\/~(\/|$)/.test(specPath)) {
-    res.saveSpec = `file:${specPath.substr(1)}`
-    resolvedPath = path.resolve(homedir(), specPath.substr(3))
-  } else if (!path.isAbsolute(rawSpec.slice(5))) {
-    res.saveSpec = `file:${path.relative(where, resolvedPath)}`
-  } else {
-    res.saveSpec = `file:${path.resolve(resolvedPath)}`
-  }
-
-  res.fetchSpec = path.resolve(where, resolvedPath)
-  // re-normalize the slashes in saveSpec due to node:path/win32 behavior in windows
-  res.saveSpec = res.saveSpec.split('\\').join('/')
-  // Ignoring because this only happens in windows
-  /* istanbul ignore next */
-  if (res.saveSpec.startsWith('file://')) {
-    // normalization of \\win32\root paths can cause a double / which we don't want
-    res.saveSpec = `file:/${res.saveSpec.slice(7)}`
-  }
-  return res
-}
-
-function fromHostedGit (res, hosted) {
-  res.type = 'git'
-  res.hosted = hosted
-  res.saveSpec = hosted.toString({ noGitPlus: false, noCommittish: false })
-  res.fetchSpec = hosted.getDefaultRepresentation() === 'shortcut' ? null : hosted.toString()
-  setGitAttrs(res, hosted.committish)
-  return res
-}
-
-function unsupportedURLType (protocol, spec) {
-  const err = new Error(`Unsupported URL Type "${protocol}": ${spec}`)
-  err.code = 'EUNSUPPORTEDPROTOCOL'
-  return err
-}
-
-function fromURL (res) {
-  let rawSpec = res.rawSpec
-  res.saveSpec = rawSpec
-  if (rawSpec.startsWith('git+ssh:')) {
-    // git ssh specifiers are overloaded to also use scp-style git
-    // specifiers, so we have to parse those out and treat them special.
-    // They are NOT true URIs, so we can't hand them to URL.
-
-    // This regex looks for things that look like:
-    // git+ssh://git@my.custom.git.com:username/project.git#deadbeef
-    // ...and various combinations. The username in the beginning is *required*.
-    const matched = rawSpec.match(/^git\+ssh:\/\/([^:#]+:[^#]+(?:\.git)?)(?:#(.*))?$/i)
-    // Filter out all-number "usernames" which are really port numbers
-    // They can either be :1234 :1234/ or :1234/path but not :12abc
-    if (matched && !matched[1].match(isPortNumber)) {
-      res.type = 'git'
-      setGitAttrs(res, matched[2])
-      res.fetchSpec = matched[1]
-      return res
-    }
-  } else if (rawSpec.startsWith('git+file://')) {
-    // URL can't handle windows paths
-    rawSpec = rawSpec.replace(/\\/g, '/')
-  }
-  const parsedUrl = new URL(rawSpec)
-  // check the protocol, and then see if it's git or not
-  switch (parsedUrl.protocol) {
-    case 'git:':
-    case 'git+http:':
-    case 'git+https:':
-    case 'git+rsync:':
-    case 'git+ftp:':
-    case 'git+file:':
-    case 'git+ssh:':
-      res.type = 'git'
-      setGitAttrs(res, parsedUrl.hash.slice(1))
-      if (parsedUrl.protocol === 'git+file:' && /^git\+file:\/\/[a-z]:/i.test(rawSpec)) {
-        // URL can't handle drive letters on windows file paths, the host can't contain a :
-        res.fetchSpec = `git+file://${parsedUrl.host.toLowerCase()}:${parsedUrl.pathname}`
-      } else {
-        parsedUrl.hash = ''
-        res.fetchSpec = parsedUrl.toString()
-      }
-      if (res.fetchSpec.startsWith('git+')) {
-        res.fetchSpec = res.fetchSpec.slice(4)
-      }
-      break
-    case 'http:':
-    case 'https:':
-      res.type = 'remote'
-      res.fetchSpec = res.saveSpec
-      break
-
-    default:
-      throw unsupportedURLType(parsedUrl.protocol, rawSpec)
-  }
-
-  return res
-}
-
-function fromAlias (res, where) {
-  const subSpec = npa(res.rawSpec.substr(4), where)
-  if (subSpec.type === 'alias') {
-    throw new Error('nested aliases not supported')
-  }
-
-  if (!subSpec.registry) {
-    throw new Error('aliases only work for registry deps')
-  }
-
-  if (!subSpec.name) {
-    throw new Error('aliases must have a name')
-  }
-
-  res.subSpec = subSpec
-  res.registry = true
-  res.type = 'alias'
-  res.saveSpec = null
-  res.fetchSpec = null
-  return res
-}
-
-function fromRegistry (res) {
-  res.registry = true
-  const spec = res.rawSpec.trim()
-  // no save spec for registry components as we save based on the fetched
-  // version, not on the argument so this can't compute that.
-  res.saveSpec = null
-  res.fetchSpec = spec
-  const version = semver.valid(spec, true)
-  const range = semver.validRange(spec, true)
-  if (version) {
-    res.type = 'version'
-  } else if (range) {
-    res.type = 'range'
-  } else {
-    if (encodeURIComponent(spec) !== spec) {
-      throw invalidTagName(spec, res.raw)
-    }
-    res.type = 'tag'
-  }
-  return res
-}
-
-module.exports = npa
-module.exports.resolve = resolve
-module.exports.toPurl = toPurl
-module.exports.Result = Result
diff --git a/node_modules/@npmcli/git/node_modules/npm-package-arg/package.json b/node_modules/@npmcli/git/node_modules/npm-package-arg/package.json
deleted file mode 100644
index 58920fe240e5f..0000000000000
--- a/node_modules/@npmcli/git/node_modules/npm-package-arg/package.json
+++ /dev/null
@@ -1,61 +0,0 @@
-{
-  "name": "npm-package-arg",
-  "version": "12.0.2",
-  "description": "Parse the things that can be arguments to `npm install`",
-  "main": "./lib/npa.js",
-  "directories": {
-    "test": "test"
-  },
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "dependencies": {
-    "hosted-git-info": "^8.0.0",
-    "proc-log": "^5.0.0",
-    "semver": "^7.3.5",
-    "validate-npm-package-name": "^6.0.0"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.23.5",
-    "tap": "^16.0.1"
-  },
-  "scripts": {
-    "test": "tap",
-    "snap": "tap",
-    "npmclilint": "npmcli-lint",
-    "lint": "npm run eslint",
-    "lintfix": "npm run eslint -- --fix",
-    "posttest": "npm run lint",
-    "postsnap": "npm run lintfix --",
-    "postlint": "template-oss-check",
-    "template-oss-apply": "template-oss-apply --force",
-    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/npm/npm-package-arg.git"
-  },
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "bugs": {
-    "url": "https://github.com/npm/npm-package-arg/issues"
-  },
-  "homepage": "https://github.com/npm/npm-package-arg",
-  "engines": {
-    "node": "^18.17.0 || >=20.5.0"
-  },
-  "tap": {
-    "branches": 97,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.23.5",
-    "publish": true
-  }
-}
diff --git a/node_modules/@npmcli/git/node_modules/npm-pick-manifest/LICENSE.md b/node_modules/@npmcli/git/node_modules/npm-pick-manifest/LICENSE.md
deleted file mode 100644
index 8d28acf866d93..0000000000000
--- a/node_modules/@npmcli/git/node_modules/npm-pick-manifest/LICENSE.md
+++ /dev/null
@@ -1,16 +0,0 @@
-ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for
-any purpose with or without fee is hereby granted, provided that the
-above copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS
-ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
-COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR
-CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
-OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE
-USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/@npmcli/git/node_modules/npm-pick-manifest/lib/index.js b/node_modules/@npmcli/git/node_modules/npm-pick-manifest/lib/index.js
deleted file mode 100644
index 82807971844bf..0000000000000
--- a/node_modules/@npmcli/git/node_modules/npm-pick-manifest/lib/index.js
+++ /dev/null
@@ -1,224 +0,0 @@
-'use strict'
-
-const npa = require('npm-package-arg')
-const semver = require('semver')
-const { checkEngine } = require('npm-install-checks')
-const normalizeBin = require('npm-normalize-package-bin')
-
-const engineOk = (manifest, npmVersion, nodeVersion) => {
-  try {
-    checkEngine(manifest, npmVersion, nodeVersion)
-    return true
-  } catch (_) {
-    return false
-  }
-}
-
-const isBefore = (verTimes, ver, time) =>
-  !verTimes || !verTimes[ver] || Date.parse(verTimes[ver]) <= time
-
-const avoidSemverOpt = { includePrerelease: true, loose: true }
-const shouldAvoid = (ver, avoid) =>
-  avoid && semver.satisfies(ver, avoid, avoidSemverOpt)
-
-const decorateAvoid = (result, avoid) =>
-  result && shouldAvoid(result.version, avoid)
-    ? { ...result, _shouldAvoid: true }
-    : result
-
-const pickManifest = (packument, wanted, opts) => {
-  const {
-    defaultTag = 'latest',
-    before = null,
-    nodeVersion = process.version,
-    npmVersion = null,
-    includeStaged = false,
-    avoid = null,
-    avoidStrict = false,
-  } = opts
-
-  const { name, time: verTimes } = packument
-  const versions = packument.versions || {}
-
-  if (avoidStrict) {
-    const looseOpts = {
-      ...opts,
-      avoidStrict: false,
-    }
-
-    const result = pickManifest(packument, wanted, looseOpts)
-    if (!result || !result._shouldAvoid) {
-      return result
-    }
-
-    const caret = pickManifest(packument, `^${result.version}`, looseOpts)
-    if (!caret || !caret._shouldAvoid) {
-      return {
-        ...caret,
-        _outsideDependencyRange: true,
-        _isSemVerMajor: false,
-      }
-    }
-
-    const star = pickManifest(packument, '*', looseOpts)
-    if (!star || !star._shouldAvoid) {
-      return {
-        ...star,
-        _outsideDependencyRange: true,
-        _isSemVerMajor: true,
-      }
-    }
-
-    throw Object.assign(new Error(`No avoidable versions for ${name}`), {
-      code: 'ETARGET',
-      name,
-      wanted,
-      avoid,
-      before,
-      versions: Object.keys(versions),
-    })
-  }
-
-  const staged = (includeStaged && packument.stagedVersions &&
-    packument.stagedVersions.versions) || {}
-  const restricted = (packument.policyRestrictions &&
-    packument.policyRestrictions.versions) || {}
-
-  const time = before && verTimes ? +(new Date(before)) : Infinity
-  const spec = npa.resolve(name, wanted || defaultTag)
-  const type = spec.type
-  const distTags = packument['dist-tags'] || {}
-
-  if (type !== 'tag' && type !== 'version' && type !== 'range') {
-    throw new Error('Only tag, version, and range are supported')
-  }
-
-  // if the type is 'tag', and not just the implicit default, then it must
-  // be that exactly, or nothing else will do.
-  if (wanted && type === 'tag') {
-    const ver = distTags[wanted]
-    // if the version in the dist-tags is before the before date, then
-    // we use that.  Otherwise, we get the highest precedence version
-    // prior to the dist-tag.
-    if (isBefore(verTimes, ver, time)) {
-      return decorateAvoid(versions[ver] || staged[ver] || restricted[ver], avoid)
-    } else {
-      return pickManifest(packument, `<=${ver}`, opts)
-    }
-  }
-
-  // similarly, if a specific version, then only that version will do
-  if (wanted && type === 'version') {
-    const ver = semver.clean(wanted, { loose: true })
-    const mani = versions[ver] || staged[ver] || restricted[ver]
-    return isBefore(verTimes, ver, time) ? decorateAvoid(mani, avoid) : null
-  }
-
-  // ok, sort based on our heuristics, and pick the best fit
-  const range = type === 'range' ? wanted : '*'
-
-  // if the range is *, then we prefer the 'latest' if available
-  // but skip this if it should be avoided, in that case we have
-  // to try a little harder.
-  const defaultVer = distTags[defaultTag]
-  if (defaultVer &&
-      (range === '*' || semver.satisfies(defaultVer, range, { loose: true })) &&
-      !restricted[defaultVer] &&
-      !shouldAvoid(defaultVer, avoid)) {
-    const mani = versions[defaultVer]
-    const ok = mani &&
-      isBefore(verTimes, defaultVer, time) &&
-      engineOk(mani, npmVersion, nodeVersion) &&
-      !mani.deprecated &&
-      !staged[defaultVer]
-    if (ok) {
-      return mani
-    }
-  }
-
-  // ok, actually have to sort the list and take the winner
-  const allEntries = Object.entries(versions)
-    .concat(Object.entries(staged))
-    .concat(Object.entries(restricted))
-    .filter(([ver]) => isBefore(verTimes, ver, time))
-
-  if (!allEntries.length) {
-    throw Object.assign(new Error(`No versions available for ${name}`), {
-      code: 'ENOVERSIONS',
-      name,
-      type,
-      wanted,
-      before,
-      versions: Object.keys(versions),
-    })
-  }
-
-  const sortSemverOpt = { loose: true }
-  const entries = allEntries.filter(([ver]) =>
-    semver.satisfies(ver, range, { loose: true }))
-    .sort((a, b) => {
-      const [vera, mania] = a
-      const [verb, manib] = b
-      const notavoida = !shouldAvoid(vera, avoid)
-      const notavoidb = !shouldAvoid(verb, avoid)
-      const notrestra = !restricted[vera]
-      const notrestrb = !restricted[verb]
-      const notstagea = !staged[vera]
-      const notstageb = !staged[verb]
-      const notdepra = !mania.deprecated
-      const notdeprb = !manib.deprecated
-      const enginea = engineOk(mania, npmVersion, nodeVersion)
-      const engineb = engineOk(manib, npmVersion, nodeVersion)
-      // sort by:
-      // - not an avoided version
-      // - not restricted
-      // - not staged
-      // - not deprecated and engine ok
-      // - engine ok
-      // - not deprecated
-      // - semver
-      return (notavoidb - notavoida) ||
-        (notrestrb - notrestra) ||
-        (notstageb - notstagea) ||
-        ((notdeprb && engineb) - (notdepra && enginea)) ||
-        (engineb - enginea) ||
-        (notdeprb - notdepra) ||
-        semver.rcompare(vera, verb, sortSemverOpt)
-    })
-
-  return decorateAvoid(entries[0] && entries[0][1], avoid)
-}
-
-module.exports = (packument, wanted, opts = {}) => {
-  const mani = pickManifest(packument, wanted, opts)
-  const picked = mani && normalizeBin(mani)
-  const policyRestrictions = packument.policyRestrictions
-  const restricted = (policyRestrictions && policyRestrictions.versions) || {}
-
-  if (picked && !restricted[picked.version]) {
-    return picked
-  }
-
-  const { before = null, defaultTag = 'latest' } = opts
-  const bstr = before ? new Date(before).toLocaleString() : ''
-  const { name } = packument
-  const pckg = `${name}@${wanted}` +
-    (before ? ` with a date before ${bstr}` : '')
-
-  const isForbidden = picked && !!restricted[picked.version]
-  const polMsg = isForbidden ? policyRestrictions.message : ''
-
-  const msg = !isForbidden ? `No matching version found for ${pckg}.`
-    : `Could not download ${pckg} due to policy violations:\n${polMsg}`
-
-  const code = isForbidden ? 'E403' : 'ETARGET'
-  throw Object.assign(new Error(msg), {
-    code,
-    type: npa.resolve(packument.name, wanted).type,
-    wanted,
-    versions: Object.keys(packument.versions ?? {}),
-    name,
-    distTags: packument['dist-tags'],
-    defaultTag,
-  })
-}
diff --git a/node_modules/@npmcli/git/node_modules/npm-pick-manifest/package.json b/node_modules/@npmcli/git/node_modules/npm-pick-manifest/package.json
deleted file mode 100644
index 5763088c250b6..0000000000000
--- a/node_modules/@npmcli/git/node_modules/npm-pick-manifest/package.json
+++ /dev/null
@@ -1,58 +0,0 @@
-{
-  "name": "npm-pick-manifest",
-  "version": "10.0.0",
-  "description": "Resolves a matching manifest from a package metadata document according to standard npm semver resolution rules.",
-  "main": "./lib",
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "scripts": {
-    "coverage": "tap",
-    "lint": "npm run eslint",
-    "test": "tap",
-    "posttest": "npm run lint",
-    "postlint": "template-oss-check",
-    "lintfix": "npm run eslint -- --fix",
-    "snap": "tap",
-    "template-oss-apply": "template-oss-apply --force",
-    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/npm/npm-pick-manifest.git"
-  },
-  "keywords": [
-    "npm",
-    "semver",
-    "package manager"
-  ],
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "dependencies": {
-    "npm-install-checks": "^7.1.0",
-    "npm-normalize-package-bin": "^4.0.0",
-    "npm-package-arg": "^12.0.0",
-    "semver": "^7.3.5"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.23.3",
-    "tap": "^16.0.1"
-  },
-  "tap": {
-    "check-coverage": true,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "engines": {
-    "node": "^18.17.0 || >=20.5.0"
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.23.3",
-    "publish": true
-  }
-}
diff --git a/node_modules/@npmcli/git/package.json b/node_modules/@npmcli/git/package.json
index 0880b2443d9fd..f4e844bccab0d 100644
--- a/node_modules/@npmcli/git/package.json
+++ b/node_modules/@npmcli/git/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@npmcli/git",
-  "version": "6.0.3",
+  "version": "7.0.0",
   "main": "lib/index.js",
   "files": [
     "bin/",
@@ -33,22 +33,22 @@
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.0",
     "@npmcli/template-oss": "4.24.1",
-    "npm-package-arg": "^12.0.1",
+    "npm-package-arg": "^13.0.0",
     "slash": "^3.0.0",
     "tap": "^16.0.1"
   },
   "dependencies": {
     "@npmcli/promise-spawn": "^8.0.0",
     "ini": "^5.0.0",
-    "lru-cache": "^10.0.1",
-    "npm-pick-manifest": "^10.0.0",
+    "lru-cache": "^11.2.1",
+    "npm-pick-manifest": "^11.0.1",
     "proc-log": "^5.0.0",
     "promise-retry": "^2.0.1",
     "semver": "^7.3.5",
     "which": "^5.0.0"
   },
   "engines": {
-    "node": "^18.17.0 || >=20.5.0"
+    "node": "^20.17.0 || >=22.9.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/LICENSE b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/LICENSE
deleted file mode 100644
index 8f90f96f4c6c5..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE NPM DISCLAIMS ALL WARRANTIES WITH
-REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
-FITNESS. IN NO EVENT SHALL THE NPM BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT,
-OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
-DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
-ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS
-SOFTWARE.
diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/clone.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/clone.js
deleted file mode 100644
index e25a4d1426821..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/clone.js
+++ /dev/null
@@ -1,172 +0,0 @@
-// The goal here is to minimize both git workload and
-// the number of refs we download over the network.
-//
-// Every method ends up with the checked out working dir
-// at the specified ref, and resolves with the git sha.
-
-// Only certain whitelisted hosts get shallow cloning.
-// Many hosts (including GHE) don't always support it.
-// A failed shallow fetch takes a LOT longer than a full
-// fetch in most cases, so we skip it entirely.
-// Set opts.gitShallow = true/false to force this behavior
-// one way or the other.
-const shallowHosts = new Set([
-  'github.com',
-  'gist.github.com',
-  'gitlab.com',
-  'bitbucket.com',
-  'bitbucket.org',
-])
-// we have to use url.parse until we add the same shim that hosted-git-info has
-// to handle scp:// urls
-const { parse } = require('url') // eslint-disable-line node/no-deprecated-api
-const path = require('path')
-
-const getRevs = require('./revs.js')
-const spawn = require('./spawn.js')
-const { isWindows } = require('./utils.js')
-
-const pickManifest = require('npm-pick-manifest')
-const fs = require('fs/promises')
-
-module.exports = (repo, ref = 'HEAD', target = null, opts = {}) =>
-  getRevs(repo, opts).then(revs => clone(
-    repo,
-    revs,
-    ref,
-    resolveRef(revs, ref, opts),
-    target || defaultTarget(repo, opts.cwd),
-    opts
-  ))
-
-const maybeShallow = (repo, opts) => {
-  if (opts.gitShallow === false || opts.gitShallow) {
-    return opts.gitShallow
-  }
-  return shallowHosts.has(parse(repo).host)
-}
-
-const defaultTarget = (repo, /* istanbul ignore next */ cwd = process.cwd()) =>
-  path.resolve(cwd, path.basename(repo.replace(/[/\\]?\.git$/, '')))
-
-const clone = (repo, revs, ref, revDoc, target, opts) => {
-  if (!revDoc) {
-    return unresolved(repo, ref, target, opts)
-  }
-  if (revDoc.sha === revs.refs.HEAD.sha) {
-    return plain(repo, revDoc, target, opts)
-  }
-  if (revDoc.type === 'tag' || revDoc.type === 'branch') {
-    return branch(repo, revDoc, target, opts)
-  }
-  return other(repo, revDoc, target, opts)
-}
-
-const resolveRef = (revs, ref, opts) => {
-  const { spec = {} } = opts
-  ref = spec.gitCommittish || ref
-  /* istanbul ignore next - will fail anyway, can't pull */
-  if (!revs) {
-    return null
-  }
-  if (spec.gitRange) {
-    return pickManifest(revs, spec.gitRange, opts)
-  }
-  if (!ref) {
-    return revs.refs.HEAD
-  }
-  if (revs.refs[ref]) {
-    return revs.refs[ref]
-  }
-  if (revs.shas[ref]) {
-    return revs.refs[revs.shas[ref][0]]
-  }
-  return null
-}
-
-// pull request or some other kind of advertised ref
-const other = (repo, revDoc, target, opts) => {
-  const shallow = maybeShallow(repo, opts)
-
-  const fetchOrigin = ['fetch', 'origin', revDoc.rawRef]
-    .concat(shallow ? ['--depth=1'] : [])
-
-  const git = (args) => spawn(args, { ...opts, cwd: target })
-  return fs.mkdir(target, { recursive: true })
-    .then(() => git(['init']))
-    .then(() => isWindows(opts)
-      ? git(['config', '--local', '--add', 'core.longpaths', 'true'])
-      : null)
-    .then(() => git(['remote', 'add', 'origin', repo]))
-    .then(() => git(fetchOrigin))
-    .then(() => git(['checkout', revDoc.sha]))
-    .then(() => updateSubmodules(target, opts))
-    .then(() => revDoc.sha)
-}
-
-// tag or branches.  use -b
-const branch = (repo, revDoc, target, opts) => {
-  const args = [
-    'clone',
-    '-b',
-    revDoc.ref,
-    repo,
-    target,
-    '--recurse-submodules',
-  ]
-  if (maybeShallow(repo, opts)) {
-    args.push('--depth=1')
-  }
-  if (isWindows(opts)) {
-    args.push('--config', 'core.longpaths=true')
-  }
-  return spawn(args, opts).then(() => revDoc.sha)
-}
-
-// just the head.  clone it
-const plain = (repo, revDoc, target, opts) => {
-  const args = [
-    'clone',
-    repo,
-    target,
-    '--recurse-submodules',
-  ]
-  if (maybeShallow(repo, opts)) {
-    args.push('--depth=1')
-  }
-  if (isWindows(opts)) {
-    args.push('--config', 'core.longpaths=true')
-  }
-  return spawn(args, opts).then(() => revDoc.sha)
-}
-
-const updateSubmodules = async (target, opts) => {
-  const hasSubmodules = await fs.stat(`${target}/.gitmodules`)
-    .then(() => true)
-    .catch(() => false)
-  if (!hasSubmodules) {
-    return null
-  }
-  return spawn([
-    'submodule',
-    'update',
-    '-q',
-    '--init',
-    '--recursive',
-  ], { ...opts, cwd: target })
-}
-
-const unresolved = (repo, ref, target, opts) => {
-  // can't do this one shallowly, because the ref isn't advertised
-  // but we can avoid checking out the working dir twice, at least
-  const lp = isWindows(opts) ? ['--config', 'core.longpaths=true'] : []
-  const cloneArgs = ['clone', '--mirror', '-q', repo, target + '/.git']
-  const git = (args) => spawn(args, { ...opts, cwd: target })
-  return fs.mkdir(target, { recursive: true })
-    .then(() => git(cloneArgs.concat(lp)))
-    .then(() => git(['init']))
-    .then(() => git(['checkout', ref]))
-    .then(() => updateSubmodules(target, opts))
-    .then(() => git(['rev-parse', '--revs-only', 'HEAD']))
-    .then(({ stdout }) => stdout.trim())
-}
diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/errors.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/errors.js
deleted file mode 100644
index 3ceaa45811669..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/errors.js
+++ /dev/null
@@ -1,36 +0,0 @@
-
-const maxRetry = 3
-
-class GitError extends Error {
-  shouldRetry () {
-    return false
-  }
-}
-
-class GitConnectionError extends GitError {
-  constructor () {
-    super('A git connection error occurred')
-  }
-
-  shouldRetry (number) {
-    return number < maxRetry
-  }
-}
-
-class GitPathspecError extends GitError {
-  constructor () {
-    super('The git reference could not be found')
-  }
-}
-
-class GitUnknownError extends GitError {
-  constructor () {
-    super('An unknown git error occurred')
-  }
-}
-
-module.exports = {
-  GitConnectionError,
-  GitPathspecError,
-  GitUnknownError,
-}
diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/find.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/find.js
deleted file mode 100644
index 34bd310b88e5d..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/find.js
+++ /dev/null
@@ -1,15 +0,0 @@
-const is = require('./is.js')
-const { dirname } = require('path')
-
-module.exports = async ({ cwd = process.cwd(), root } = {}) => {
-  while (true) {
-    if (await is({ cwd })) {
-      return cwd
-    }
-    const next = dirname(cwd)
-    if (cwd === root || cwd === next) {
-      return null
-    }
-    cwd = next
-  }
-}
diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/index.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/index.js
deleted file mode 100644
index 10a65f782e6da..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/index.js
+++ /dev/null
@@ -1,9 +0,0 @@
-module.exports = {
-  clone: require('./clone.js'),
-  revs: require('./revs.js'),
-  spawn: require('./spawn.js'),
-  is: require('./is.js'),
-  find: require('./find.js'),
-  isClean: require('./is-clean.js'),
-  errors: require('./errors.js'),
-}
diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/is-clean.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/is-clean.js
deleted file mode 100644
index 182373be94193..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/is-clean.js
+++ /dev/null
@@ -1,6 +0,0 @@
-const spawn = require('./spawn.js')
-
-module.exports = (opts = {}) =>
-  spawn(['status', '--porcelain=v1', '-uno'], opts)
-    .then(res => !res.stdout.trim().split(/\r?\n+/)
-      .map(l => l.trim()).filter(l => l).length)
diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/is.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/is.js
deleted file mode 100644
index f5a0e8754f10d..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/is.js
+++ /dev/null
@@ -1,4 +0,0 @@
-// not an airtight indicator, but a good gut-check to even bother trying
-const { stat } = require('fs/promises')
-module.exports = ({ cwd = process.cwd() } = {}) =>
-  stat(cwd + '/.git').then(() => true, () => false)
diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/lines-to-revs.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/lines-to-revs.js
deleted file mode 100644
index 6bd7e7a4c1531..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/lines-to-revs.js
+++ /dev/null
@@ -1,147 +0,0 @@
-// turn an array of lines from `git ls-remote` into a thing
-// vaguely resembling a packument, where docs are a resolved ref
-
-const semver = require('semver')
-
-module.exports = lines => finish(lines.reduce(linesToRevsReducer, {
-  versions: {},
-  'dist-tags': {},
-  refs: {},
-  shas: {},
-}))
-
-const finish = revs => distTags(shaList(peelTags(revs)))
-
-// We can check out shallow clones on specific SHAs if we have a ref
-const shaList = revs => {
-  Object.keys(revs.refs).forEach(ref => {
-    const doc = revs.refs[ref]
-    if (!revs.shas[doc.sha]) {
-      revs.shas[doc.sha] = [ref]
-    } else {
-      revs.shas[doc.sha].push(ref)
-    }
-  })
-  return revs
-}
-
-// Replace any tags with their ^{} counterparts, if those exist
-const peelTags = revs => {
-  Object.keys(revs.refs).filter(ref => ref.endsWith('^{}')).forEach(ref => {
-    const peeled = revs.refs[ref]
-    const unpeeled = revs.refs[ref.replace(/\^\{\}$/, '')]
-    if (unpeeled) {
-      unpeeled.sha = peeled.sha
-      delete revs.refs[ref]
-    }
-  })
-  return revs
-}
-
-const distTags = revs => {
-  // not entirely sure what situations would result in an
-  // ichabod repo, but best to be careful in Sleepy Hollow anyway
-  const HEAD = revs.refs.HEAD || /* istanbul ignore next */ {}
-  const versions = Object.keys(revs.versions)
-  versions.forEach(v => {
-    // simulate a dist-tags with latest pointing at the
-    // 'latest' branch if one exists and is a version,
-    // or HEAD if not.
-    const ver = revs.versions[v]
-    if (revs.refs.latest && ver.sha === revs.refs.latest.sha) {
-      revs['dist-tags'].latest = v
-    } else if (ver.sha === HEAD.sha) {
-      revs['dist-tags'].HEAD = v
-      if (!revs.refs.latest) {
-        revs['dist-tags'].latest = v
-      }
-    }
-  })
-  return revs
-}
-
-const refType = ref => {
-  if (ref.startsWith('refs/tags/')) {
-    return 'tag'
-  }
-  if (ref.startsWith('refs/heads/')) {
-    return 'branch'
-  }
-  if (ref.startsWith('refs/pull/')) {
-    return 'pull'
-  }
-  if (ref === 'HEAD') {
-    return 'head'
-  }
-  // Could be anything, ignore for now
-  /* istanbul ignore next */
-  return 'other'
-}
-
-// return the doc, or null if we should ignore it.
-const lineToRevDoc = line => {
-  const split = line.trim().split(/\s+/, 2)
-  if (split.length < 2) {
-    return null
-  }
-
-  const sha = split[0].trim()
-  const rawRef = split[1].trim()
-  const type = refType(rawRef)
-
-  if (type === 'tag') {
-    // refs/tags/foo^{} is the 'peeled tag', ie the commit
-    // that is tagged by refs/tags/foo they resolve to the same
-    // content, just different objects in git's data structure.
-    // But, we care about the thing the tag POINTS to, not the tag
-    // object itself, so we only look at the peeled tag refs, and
-    // ignore the pointer.
-    // For now, though, we have to save both, because some tags
-    // don't have peels, if they were not annotated.
-    const ref = rawRef.slice('refs/tags/'.length)
-    return { sha, ref, rawRef, type }
-  }
-
-  if (type === 'branch') {
-    const ref = rawRef.slice('refs/heads/'.length)
-    return { sha, ref, rawRef, type }
-  }
-
-  if (type === 'pull') {
-    // NB: merged pull requests installable with #pull/123/merge
-    // for the merged pr, or #pull/123 for the PR head
-    const ref = rawRef.slice('refs/'.length).replace(/\/head$/, '')
-    return { sha, ref, rawRef, type }
-  }
-
-  if (type === 'head') {
-    const ref = 'HEAD'
-    return { sha, ref, rawRef, type }
-  }
-
-  // at this point, all we can do is leave the ref un-munged
-  return { sha, ref: rawRef, rawRef, type }
-}
-
-const linesToRevsReducer = (revs, line) => {
-  const doc = lineToRevDoc(line)
-
-  if (!doc) {
-    return revs
-  }
-
-  revs.refs[doc.ref] = doc
-  revs.refs[doc.rawRef] = doc
-
-  if (doc.type === 'tag') {
-    // try to pull a semver value out of tags like `release-v1.2.3`
-    // which is a pretty common pattern.
-    const match = !doc.ref.endsWith('^{}') &&
-      doc.ref.match(/v?(\d+\.\d+\.\d+(?:[-+].+)?)$/)
-    if (match && semver.valid(match[1], true)) {
-      revs.versions[semver.clean(match[1], true)] = doc
-    }
-  }
-
-  return revs
-}
diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/make-error.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/make-error.js
deleted file mode 100644
index 7540ec7c8b9f7..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/make-error.js
+++ /dev/null
@@ -1,33 +0,0 @@
-const {
-  GitConnectionError,
-  GitPathspecError,
-  GitUnknownError,
-} = require('./errors.js')
-
-const connectionErrorRe = new RegExp([
-  'remote error: Internal Server Error',
-  'The remote end hung up unexpectedly',
-  'Connection timed out',
-  'Operation timed out',
-  'Failed to connect to .* Timed out',
-  'Connection reset by peer',
-  'SSL_ERROR_SYSCALL',
-  'The requested URL returned error: 503',
-].join('|'))
-
-const missingPathspecRe = /pathspec .* did not match any file\(s\) known to git/
-
-function makeError (er) {
-  const message = er.stderr
-  let gitEr
-  if (connectionErrorRe.test(message)) {
-    gitEr = new GitConnectionError(message)
-  } else if (missingPathspecRe.test(message)) {
-    gitEr = new GitPathspecError(message)
-  } else {
-    gitEr = new GitUnknownError(message)
-  }
-  return Object.assign(gitEr, er)
-}
-
-module.exports = makeError
diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/opts.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/opts.js
deleted file mode 100644
index 1e80e9efe4989..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/opts.js
+++ /dev/null
@@ -1,57 +0,0 @@
-const fs = require('node:fs')
-const os = require('node:os')
-const path = require('node:path')
-const ini = require('ini')
-
-const gitConfigPath = path.join(os.homedir(), '.gitconfig')
-
-let cachedConfig = null
-
-// Function to load and cache the git config
-const loadGitConfig = () => {
-  if (cachedConfig === null) {
-    try {
-      cachedConfig = {}
-      if (fs.existsSync(gitConfigPath)) {
-        const configContent = fs.readFileSync(gitConfigPath, 'utf-8')
-        cachedConfig = ini.parse(configContent)
-      }
-    } catch (error) {
-      cachedConfig = {}
-    }
-  }
-  return cachedConfig
-}
-
-const checkGitConfigs = () => {
-  const config = loadGitConfig()
-  return {
-    sshCommandSetInConfig: config?.core?.sshCommand !== undefined,
-    askPassSetInConfig: config?.core?.askpass !== undefined,
-  }
-}
-
-const sshCommandSetInEnv = process.env.GIT_SSH_COMMAND !== undefined
-const askPassSetInEnv = process.env.GIT_ASKPASS !== undefined
-const { sshCommandSetInConfig, askPassSetInConfig } = checkGitConfigs()
-
-// Values we want to set if they're not already defined by the end user
-// This defaults to accepting new ssh host key fingerprints
-const finalGitEnv = {
-  ...(askPassSetInEnv || askPassSetInConfig ? {} : {
-    GIT_ASKPASS: 'echo',
-  }),
-  ...(sshCommandSetInEnv || sshCommandSetInConfig ? {} : {
-    GIT_SSH_COMMAND: 'ssh -oStrictHostKeyChecking=accept-new',
-  }),
-}
-
-module.exports = (opts = {}) => ({
-  stdioString: true,
-  ...opts,
-  shell: false,
-  env: opts.env || { ...finalGitEnv, ...process.env },
-})
-
-// Export the loadGitConfig function for testing
-module.exports.loadGitConfig = loadGitConfig
diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/revs.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/revs.js
deleted file mode 100644
index ebcc848fa3458..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/revs.js
+++ /dev/null
@@ -1,22 +0,0 @@
-const spawn = require('./spawn.js')
-const { LRUCache } = require('lru-cache')
-const linesToRevs = require('./lines-to-revs.js')
-
-const revsCache = new LRUCache({
-  max: 100,
-  ttl: 5 * 60 * 1000,
-})
-
-module.exports = async (repo, opts = {}) => {
-  if (!opts.noGitRevCache) {
-    const cached = revsCache.get(repo)
-    if (cached) {
-      return cached
-    }
-  }
-
-  const { stdout } = await spawn(['ls-remote', repo], opts)
-  const revs = linesToRevs(stdout.trim().split('\n'))
-  revsCache.set(repo, revs)
-  return revs
-}
diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/spawn.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/spawn.js
deleted file mode 100644
index 03c1cbde21547..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/spawn.js
+++ /dev/null
@@ -1,44 +0,0 @@
-const spawn = require('@npmcli/promise-spawn')
-const promiseRetry = require('promise-retry')
-const { log } = require('proc-log')
-const makeError = require('./make-error.js')
-const makeOpts = require('./opts.js')
-
-module.exports = (gitArgs, opts = {}) => {
-  const whichGit = require('./which.js')
-  const gitPath = whichGit(opts)
-
-  if (gitPath instanceof Error) {
-    return Promise.reject(gitPath)
-  }
-
-  // undocumented option, mostly only here for tests
-  const args = opts.allowReplace || gitArgs[0] === '--no-replace-objects'
-    ? gitArgs
-    : ['--no-replace-objects', ...gitArgs]
-
-  let retryOpts = opts.retry
-  if (retryOpts === null || retryOpts === undefined) {
-    retryOpts = {
-      retries: opts.fetchRetries || 2,
-      factor: opts.fetchRetryFactor || 10,
-      maxTimeout: opts.fetchRetryMaxtimeout || 60000,
-      minTimeout: opts.fetchRetryMintimeout || 1000,
-    }
-  }
-  return promiseRetry((retryFn, number) => {
-    if (number !== 1) {
-      log.silly('git', `Retrying git command: ${
-        args.join(' ')} attempt # ${number}`)
-    }
-
-    return spawn(gitPath, args, makeOpts(opts))
-      .catch(er => {
-        const gitError = makeError(er)
-        if (!gitError.shouldRetry(number)) {
-          throw gitError
-        }
-        retryFn(gitError)
-      })
-  }, retryOpts)
-}
diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/utils.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/utils.js
deleted file mode 100644
index fcd9578a19597..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/utils.js
+++ /dev/null
@@ -1,3 +0,0 @@
-const isWindows = opts => (opts.fakePlatform || process.platform) === 'win32'
-
-exports.isWindows = isWindows
diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/which.js b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/which.js
deleted file mode 100644
index dc2a1ad212166..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/lib/which.js
+++ /dev/null
@@ -1,18 +0,0 @@
-const which = require('which')
-
-let gitPath
-try {
-  gitPath = which.sync('git')
-} catch {
-  // ignore errors
-}
-
-module.exports = (opts = {}) => {
-  if (opts.git) {
-    return opts.git
-  }
-  if (!gitPath || opts.git === false) {
-    return Object.assign(new Error('No git binary found in $PATH'), { code: 'ENOGIT' })
-  }
-  return gitPath
-}
diff --git a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/package.json b/node_modules/@npmcli/package-json/node_modules/@npmcli/git/package.json
deleted file mode 100644
index f4e844bccab0d..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/@npmcli/git/package.json
+++ /dev/null
@@ -1,58 +0,0 @@
-{
-  "name": "@npmcli/git",
-  "version": "7.0.0",
-  "main": "lib/index.js",
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "description": "a util for spawning git from npm CLI contexts",
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/npm/git.git"
-  },
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "scripts": {
-    "lint": "npm run eslint",
-    "snap": "tap",
-    "test": "tap",
-    "posttest": "npm run lint",
-    "postlint": "template-oss-check",
-    "lintfix": "npm run eslint -- --fix",
-    "template-oss-apply": "template-oss-apply --force",
-    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
-  },
-  "tap": {
-    "timeout": 600,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.24.1",
-    "npm-package-arg": "^13.0.0",
-    "slash": "^3.0.0",
-    "tap": "^16.0.1"
-  },
-  "dependencies": {
-    "@npmcli/promise-spawn": "^8.0.0",
-    "ini": "^5.0.0",
-    "lru-cache": "^11.2.1",
-    "npm-pick-manifest": "^11.0.1",
-    "proc-log": "^5.0.0",
-    "promise-retry": "^2.0.1",
-    "semver": "^7.3.5",
-    "which": "^5.0.0"
-  },
-  "engines": {
-    "node": "^20.17.0 || >=22.9.0"
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.24.1",
-    "publish": true
-  }
-}
diff --git a/node_modules/pacote/node_modules/@npmcli/git/LICENSE b/node_modules/pacote/node_modules/@npmcli/git/LICENSE
deleted file mode 100644
index 8f90f96f4c6c5..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/git/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) npm, Inc.
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE NPM DISCLAIMS ALL WARRANTIES WITH
-REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
-FITNESS. IN NO EVENT SHALL THE NPM BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT,
-OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
-DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
-ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS
-SOFTWARE.
diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/clone.js b/node_modules/pacote/node_modules/@npmcli/git/lib/clone.js
deleted file mode 100644
index e25a4d1426821..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/git/lib/clone.js
+++ /dev/null
@@ -1,172 +0,0 @@
-// The goal here is to minimize both git workload and
-// the number of refs we download over the network.
-//
-// Every method ends up with the checked out working dir
-// at the specified ref, and resolves with the git sha.
-
-// Only certain whitelisted hosts get shallow cloning.
-// Many hosts (including GHE) don't always support it.
-// A failed shallow fetch takes a LOT longer than a full
-// fetch in most cases, so we skip it entirely.
-// Set opts.gitShallow = true/false to force this behavior
-// one way or the other.
-const shallowHosts = new Set([
-  'github.com',
-  'gist.github.com',
-  'gitlab.com',
-  'bitbucket.com',
-  'bitbucket.org',
-])
-// we have to use url.parse until we add the same shim that hosted-git-info has
-// to handle scp:// urls
-const { parse } = require('url') // eslint-disable-line node/no-deprecated-api
-const path = require('path')
-
-const getRevs = require('./revs.js')
-const spawn = require('./spawn.js')
-const { isWindows } = require('./utils.js')
-
-const pickManifest = require('npm-pick-manifest')
-const fs = require('fs/promises')
-
-module.exports = (repo, ref = 'HEAD', target = null, opts = {}) =>
-  getRevs(repo, opts).then(revs => clone(
-    repo,
-    revs,
-    ref,
-    resolveRef(revs, ref, opts),
-    target || defaultTarget(repo, opts.cwd),
-    opts
-  ))
-
-const maybeShallow = (repo, opts) => {
-  if (opts.gitShallow === false || opts.gitShallow) {
-    return opts.gitShallow
-  }
-  return shallowHosts.has(parse(repo).host)
-}
-
-const defaultTarget = (repo, /* istanbul ignore next */ cwd = process.cwd()) =>
-  path.resolve(cwd, path.basename(repo.replace(/[/\\]?\.git$/, '')))
-
-const clone = (repo, revs, ref, revDoc, target, opts) => {
-  if (!revDoc) {
-    return unresolved(repo, ref, target, opts)
-  }
-  if (revDoc.sha === revs.refs.HEAD.sha) {
-    return plain(repo, revDoc, target, opts)
-  }
-  if (revDoc.type === 'tag' || revDoc.type === 'branch') {
-    return branch(repo, revDoc, target, opts)
-  }
-  return other(repo, revDoc, target, opts)
-}
-
-const resolveRef = (revs, ref, opts) => {
-  const { spec = {} } = opts
-  ref = spec.gitCommittish || ref
-  /* istanbul ignore next - will fail anyway, can't pull */
-  if (!revs) {
-    return null
-  }
-  if (spec.gitRange) {
-    return pickManifest(revs, spec.gitRange, opts)
-  }
-  if (!ref) {
-    return revs.refs.HEAD
-  }
-  if (revs.refs[ref]) {
-    return revs.refs[ref]
-  }
-  if (revs.shas[ref]) {
-    return revs.refs[revs.shas[ref][0]]
-  }
-  return null
-}
-
-// pull request or some other kind of advertised ref
-const other = (repo, revDoc, target, opts) => {
-  const shallow = maybeShallow(repo, opts)
-
-  const fetchOrigin = ['fetch', 'origin', revDoc.rawRef]
-    .concat(shallow ? ['--depth=1'] : [])
-
-  const git = (args) => spawn(args, { ...opts, cwd: target })
-  return fs.mkdir(target, { recursive: true })
-    .then(() => git(['init']))
-    .then(() => isWindows(opts)
-      ? git(['config', '--local', '--add', 'core.longpaths', 'true'])
-      : null)
-    .then(() => git(['remote', 'add', 'origin', repo]))
-    .then(() => git(fetchOrigin))
-    .then(() => git(['checkout', revDoc.sha]))
-    .then(() => updateSubmodules(target, opts))
-    .then(() => revDoc.sha)
-}
-
-// tag or branches.  use -b
-const branch = (repo, revDoc, target, opts) => {
-  const args = [
-    'clone',
-    '-b',
-    revDoc.ref,
-    repo,
-    target,
-    '--recurse-submodules',
-  ]
-  if (maybeShallow(repo, opts)) {
-    args.push('--depth=1')
-  }
-  if (isWindows(opts)) {
-    args.push('--config', 'core.longpaths=true')
-  }
-  return spawn(args, opts).then(() => revDoc.sha)
-}
-
-// just the head.  clone it
-const plain = (repo, revDoc, target, opts) => {
-  const args = [
-    'clone',
-    repo,
-    target,
-    '--recurse-submodules',
-  ]
-  if (maybeShallow(repo, opts)) {
-    args.push('--depth=1')
-  }
-  if (isWindows(opts)) {
-    args.push('--config', 'core.longpaths=true')
-  }
-  return spawn(args, opts).then(() => revDoc.sha)
-}
-
-const updateSubmodules = async (target, opts) => {
-  const hasSubmodules = await fs.stat(`${target}/.gitmodules`)
-    .then(() => true)
-    .catch(() => false)
-  if (!hasSubmodules) {
-    return null
-  }
-  return spawn([
-    'submodule',
-    'update',
-    '-q',
-    '--init',
-    '--recursive',
-  ], { ...opts, cwd: target })
-}
-
-const unresolved = (repo, ref, target, opts) => {
-  // can't do this one shallowly, because the ref isn't advertised
-  // but we can avoid checking out the working dir twice, at least
-  const lp = isWindows(opts) ? ['--config', 'core.longpaths=true'] : []
-  const cloneArgs = ['clone', '--mirror', '-q', repo, target + '/.git']
-  const git = (args) => spawn(args, { ...opts, cwd: target })
-  return fs.mkdir(target, { recursive: true })
-    .then(() => git(cloneArgs.concat(lp)))
-    .then(() => git(['init']))
-    .then(() => git(['checkout', ref]))
-    .then(() => updateSubmodules(target, opts))
-    .then(() => git(['rev-parse', '--revs-only', 'HEAD']))
-    .then(({ stdout }) => stdout.trim())
-}
diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/errors.js b/node_modules/pacote/node_modules/@npmcli/git/lib/errors.js
deleted file mode 100644
index 3ceaa45811669..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/git/lib/errors.js
+++ /dev/null
@@ -1,36 +0,0 @@
-
-const maxRetry = 3
-
-class GitError extends Error {
-  shouldRetry () {
-    return false
-  }
-}
-
-class GitConnectionError extends GitError {
-  constructor () {
-    super('A git connection error occurred')
-  }
-
-  shouldRetry (number) {
-    return number < maxRetry
-  }
-}
-
-class GitPathspecError extends GitError {
-  constructor () {
-    super('The git reference could not be found')
-  }
-}
-
-class GitUnknownError extends GitError {
-  constructor () {
-    super('An unknown git error occurred')
-  }
-}
-
-module.exports = {
-  GitConnectionError,
-  GitPathspecError,
-  GitUnknownError,
-}
diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/find.js b/node_modules/pacote/node_modules/@npmcli/git/lib/find.js
deleted file mode 100644
index 34bd310b88e5d..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/git/lib/find.js
+++ /dev/null
@@ -1,15 +0,0 @@
-const is = require('./is.js')
-const { dirname } = require('path')
-
-module.exports = async ({ cwd = process.cwd(), root } = {}) => {
-  while (true) {
-    if (await is({ cwd })) {
-      return cwd
-    }
-    const next = dirname(cwd)
-    if (cwd === root || cwd === next) {
-      return null
-    }
-    cwd = next
-  }
-}
diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/index.js b/node_modules/pacote/node_modules/@npmcli/git/lib/index.js
deleted file mode 100644
index 10a65f782e6da..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/git/lib/index.js
+++ /dev/null
@@ -1,9 +0,0 @@
-module.exports = {
-  clone: require('./clone.js'),
-  revs: require('./revs.js'),
-  spawn: require('./spawn.js'),
-  is: require('./is.js'),
-  find: require('./find.js'),
-  isClean: require('./is-clean.js'),
-  errors: require('./errors.js'),
-}
diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/is-clean.js b/node_modules/pacote/node_modules/@npmcli/git/lib/is-clean.js
deleted file mode 100644
index 182373be94193..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/git/lib/is-clean.js
+++ /dev/null
@@ -1,6 +0,0 @@
-const spawn = require('./spawn.js')
-
-module.exports = (opts = {}) =>
-  spawn(['status', '--porcelain=v1', '-uno'], opts)
-    .then(res => !res.stdout.trim().split(/\r?\n+/)
-      .map(l => l.trim()).filter(l => l).length)
diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/is.js b/node_modules/pacote/node_modules/@npmcli/git/lib/is.js
deleted file mode 100644
index f5a0e8754f10d..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/git/lib/is.js
+++ /dev/null
@@ -1,4 +0,0 @@
-// not an airtight indicator, but a good gut-check to even bother trying
-const { stat } = require('fs/promises')
-module.exports = ({ cwd = process.cwd() } = {}) =>
-  stat(cwd + '/.git').then(() => true, () => false)
diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/lines-to-revs.js b/node_modules/pacote/node_modules/@npmcli/git/lib/lines-to-revs.js
deleted file mode 100644
index 6bd7e7a4c1531..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/git/lib/lines-to-revs.js
+++ /dev/null
@@ -1,147 +0,0 @@
-// turn an array of lines from `git ls-remote` into a thing
-// vaguely resembling a packument, where docs are a resolved ref
-
-const semver = require('semver')
-
-module.exports = lines => finish(lines.reduce(linesToRevsReducer, {
-  versions: {},
-  'dist-tags': {},
-  refs: {},
-  shas: {},
-}))
-
-const finish = revs => distTags(shaList(peelTags(revs)))
-
-// We can check out shallow clones on specific SHAs if we have a ref
-const shaList = revs => {
-  Object.keys(revs.refs).forEach(ref => {
-    const doc = revs.refs[ref]
-    if (!revs.shas[doc.sha]) {
-      revs.shas[doc.sha] = [ref]
-    } else {
-      revs.shas[doc.sha].push(ref)
-    }
-  })
-  return revs
-}
-
-// Replace any tags with their ^{} counterparts, if those exist
-const peelTags = revs => {
-  Object.keys(revs.refs).filter(ref => ref.endsWith('^{}')).forEach(ref => {
-    const peeled = revs.refs[ref]
-    const unpeeled = revs.refs[ref.replace(/\^\{\}$/, '')]
-    if (unpeeled) {
-      unpeeled.sha = peeled.sha
-      delete revs.refs[ref]
-    }
-  })
-  return revs
-}
-
-const distTags = revs => {
-  // not entirely sure what situations would result in an
-  // ichabod repo, but best to be careful in Sleepy Hollow anyway
-  const HEAD = revs.refs.HEAD || /* istanbul ignore next */ {}
-  const versions = Object.keys(revs.versions)
-  versions.forEach(v => {
-    // simulate a dist-tags with latest pointing at the
-    // 'latest' branch if one exists and is a version,
-    // or HEAD if not.
-    const ver = revs.versions[v]
-    if (revs.refs.latest && ver.sha === revs.refs.latest.sha) {
-      revs['dist-tags'].latest = v
-    } else if (ver.sha === HEAD.sha) {
-      revs['dist-tags'].HEAD = v
-      if (!revs.refs.latest) {
-        revs['dist-tags'].latest = v
-      }
-    }
-  })
-  return revs
-}
-
-const refType = ref => {
-  if (ref.startsWith('refs/tags/')) {
-    return 'tag'
-  }
-  if (ref.startsWith('refs/heads/')) {
-    return 'branch'
-  }
-  if (ref.startsWith('refs/pull/')) {
-    return 'pull'
-  }
-  if (ref === 'HEAD') {
-    return 'head'
-  }
-  // Could be anything, ignore for now
-  /* istanbul ignore next */
-  return 'other'
-}
-
-// return the doc, or null if we should ignore it.
-const lineToRevDoc = line => {
-  const split = line.trim().split(/\s+/, 2)
-  if (split.length < 2) {
-    return null
-  }
-
-  const sha = split[0].trim()
-  const rawRef = split[1].trim()
-  const type = refType(rawRef)
-
-  if (type === 'tag') {
-    // refs/tags/foo^{} is the 'peeled tag', ie the commit
-    // that is tagged by refs/tags/foo they resolve to the same
-    // content, just different objects in git's data structure.
-    // But, we care about the thing the tag POINTS to, not the tag
-    // object itself, so we only look at the peeled tag refs, and
-    // ignore the pointer.
-    // For now, though, we have to save both, because some tags
-    // don't have peels, if they were not annotated.
-    const ref = rawRef.slice('refs/tags/'.length)
-    return { sha, ref, rawRef, type }
-  }
-
-  if (type === 'branch') {
-    const ref = rawRef.slice('refs/heads/'.length)
-    return { sha, ref, rawRef, type }
-  }
-
-  if (type === 'pull') {
-    // NB: merged pull requests installable with #pull/123/merge
-    // for the merged pr, or #pull/123 for the PR head
-    const ref = rawRef.slice('refs/'.length).replace(/\/head$/, '')
-    return { sha, ref, rawRef, type }
-  }
-
-  if (type === 'head') {
-    const ref = 'HEAD'
-    return { sha, ref, rawRef, type }
-  }
-
-  // at this point, all we can do is leave the ref un-munged
-  return { sha, ref: rawRef, rawRef, type }
-}
-
-const linesToRevsReducer = (revs, line) => {
-  const doc = lineToRevDoc(line)
-
-  if (!doc) {
-    return revs
-  }
-
-  revs.refs[doc.ref] = doc
-  revs.refs[doc.rawRef] = doc
-
-  if (doc.type === 'tag') {
-    // try to pull a semver value out of tags like `release-v1.2.3`
-    // which is a pretty common pattern.
-    const match = !doc.ref.endsWith('^{}') &&
-      doc.ref.match(/v?(\d+\.\d+\.\d+(?:[-+].+)?)$/)
-    if (match && semver.valid(match[1], true)) {
-      revs.versions[semver.clean(match[1], true)] = doc
-    }
-  }
-
-  return revs
-}
diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/make-error.js b/node_modules/pacote/node_modules/@npmcli/git/lib/make-error.js
deleted file mode 100644
index 7540ec7c8b9f7..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/git/lib/make-error.js
+++ /dev/null
@@ -1,33 +0,0 @@
-const {
-  GitConnectionError,
-  GitPathspecError,
-  GitUnknownError,
-} = require('./errors.js')
-
-const connectionErrorRe = new RegExp([
-  'remote error: Internal Server Error',
-  'The remote end hung up unexpectedly',
-  'Connection timed out',
-  'Operation timed out',
-  'Failed to connect to .* Timed out',
-  'Connection reset by peer',
-  'SSL_ERROR_SYSCALL',
-  'The requested URL returned error: 503',
-].join('|'))
-
-const missingPathspecRe = /pathspec .* did not match any file\(s\) known to git/
-
-function makeError (er) {
-  const message = er.stderr
-  let gitEr
-  if (connectionErrorRe.test(message)) {
-    gitEr = new GitConnectionError(message)
-  } else if (missingPathspecRe.test(message)) {
-    gitEr = new GitPathspecError(message)
-  } else {
-    gitEr = new GitUnknownError(message)
-  }
-  return Object.assign(gitEr, er)
-}
-
-module.exports = makeError
diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/opts.js b/node_modules/pacote/node_modules/@npmcli/git/lib/opts.js
deleted file mode 100644
index 1e80e9efe4989..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/git/lib/opts.js
+++ /dev/null
@@ -1,57 +0,0 @@
-const fs = require('node:fs')
-const os = require('node:os')
-const path = require('node:path')
-const ini = require('ini')
-
-const gitConfigPath = path.join(os.homedir(), '.gitconfig')
-
-let cachedConfig = null
-
-// Function to load and cache the git config
-const loadGitConfig = () => {
-  if (cachedConfig === null) {
-    try {
-      cachedConfig = {}
-      if (fs.existsSync(gitConfigPath)) {
-        const configContent = fs.readFileSync(gitConfigPath, 'utf-8')
-        cachedConfig = ini.parse(configContent)
-      }
-    } catch (error) {
-      cachedConfig = {}
-    }
-  }
-  return cachedConfig
-}
-
-const checkGitConfigs = () => {
-  const config = loadGitConfig()
-  return {
-    sshCommandSetInConfig: config?.core?.sshCommand !== undefined,
-    askPassSetInConfig: config?.core?.askpass !== undefined,
-  }
-}
-
-const sshCommandSetInEnv = process.env.GIT_SSH_COMMAND !== undefined
-const askPassSetInEnv = process.env.GIT_ASKPASS !== undefined
-const { sshCommandSetInConfig, askPassSetInConfig } = checkGitConfigs()
-
-// Values we want to set if they're not already defined by the end user
-// This defaults to accepting new ssh host key fingerprints
-const finalGitEnv = {
-  ...(askPassSetInEnv || askPassSetInConfig ? {} : {
-    GIT_ASKPASS: 'echo',
-  }),
-  ...(sshCommandSetInEnv || sshCommandSetInConfig ? {} : {
-    GIT_SSH_COMMAND: 'ssh -oStrictHostKeyChecking=accept-new',
-  }),
-}
-
-module.exports = (opts = {}) => ({
-  stdioString: true,
-  ...opts,
-  shell: false,
-  env: opts.env || { ...finalGitEnv, ...process.env },
-})
-
-// Export the loadGitConfig function for testing
-module.exports.loadGitConfig = loadGitConfig
diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/revs.js b/node_modules/pacote/node_modules/@npmcli/git/lib/revs.js
deleted file mode 100644
index ebcc848fa3458..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/git/lib/revs.js
+++ /dev/null
@@ -1,22 +0,0 @@
-const spawn = require('./spawn.js')
-const { LRUCache } = require('lru-cache')
-const linesToRevs = require('./lines-to-revs.js')
-
-const revsCache = new LRUCache({
-  max: 100,
-  ttl: 5 * 60 * 1000,
-})
-
-module.exports = async (repo, opts = {}) => {
-  if (!opts.noGitRevCache) {
-    const cached = revsCache.get(repo)
-    if (cached) {
-      return cached
-    }
-  }
-
-  const { stdout } = await spawn(['ls-remote', repo], opts)
-  const revs = linesToRevs(stdout.trim().split('\n'))
-  revsCache.set(repo, revs)
-  return revs
-}
diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/spawn.js b/node_modules/pacote/node_modules/@npmcli/git/lib/spawn.js
deleted file mode 100644
index 03c1cbde21547..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/git/lib/spawn.js
+++ /dev/null
@@ -1,44 +0,0 @@
-const spawn = require('@npmcli/promise-spawn')
-const promiseRetry = require('promise-retry')
-const { log } = require('proc-log')
-const makeError = require('./make-error.js')
-const makeOpts = require('./opts.js')
-
-module.exports = (gitArgs, opts = {}) => {
-  const whichGit = require('./which.js')
-  const gitPath = whichGit(opts)
-
-  if (gitPath instanceof Error) {
-    return Promise.reject(gitPath)
-  }
-
-  // undocumented option, mostly only here for tests
-  const args = opts.allowReplace || gitArgs[0] === '--no-replace-objects'
-    ? gitArgs
-    : ['--no-replace-objects', ...gitArgs]
-
-  let retryOpts = opts.retry
-  if (retryOpts === null || retryOpts === undefined) {
-    retryOpts = {
-      retries: opts.fetchRetries || 2,
-      factor: opts.fetchRetryFactor || 10,
-      maxTimeout: opts.fetchRetryMaxtimeout || 60000,
-      minTimeout: opts.fetchRetryMintimeout || 1000,
-    }
-  }
-  return promiseRetry((retryFn, number) => {
-    if (number !== 1) {
-      log.silly('git', `Retrying git command: ${
-        args.join(' ')} attempt # ${number}`)
-    }
-
-    return spawn(gitPath, args, makeOpts(opts))
-      .catch(er => {
-        const gitError = makeError(er)
-        if (!gitError.shouldRetry(number)) {
-          throw gitError
-        }
-        retryFn(gitError)
-      })
-  }, retryOpts)
-}
diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/utils.js b/node_modules/pacote/node_modules/@npmcli/git/lib/utils.js
deleted file mode 100644
index fcd9578a19597..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/git/lib/utils.js
+++ /dev/null
@@ -1,3 +0,0 @@
-const isWindows = opts => (opts.fakePlatform || process.platform) === 'win32'
-
-exports.isWindows = isWindows
diff --git a/node_modules/pacote/node_modules/@npmcli/git/lib/which.js b/node_modules/pacote/node_modules/@npmcli/git/lib/which.js
deleted file mode 100644
index dc2a1ad212166..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/git/lib/which.js
+++ /dev/null
@@ -1,18 +0,0 @@
-const which = require('which')
-
-let gitPath
-try {
-  gitPath = which.sync('git')
-} catch {
-  // ignore errors
-}
-
-module.exports = (opts = {}) => {
-  if (opts.git) {
-    return opts.git
-  }
-  if (!gitPath || opts.git === false) {
-    return Object.assign(new Error('No git binary found in $PATH'), { code: 'ENOGIT' })
-  }
-  return gitPath
-}
diff --git a/node_modules/pacote/node_modules/@npmcli/git/package.json b/node_modules/pacote/node_modules/@npmcli/git/package.json
deleted file mode 100644
index f4e844bccab0d..0000000000000
--- a/node_modules/pacote/node_modules/@npmcli/git/package.json
+++ /dev/null
@@ -1,58 +0,0 @@
-{
-  "name": "@npmcli/git",
-  "version": "7.0.0",
-  "main": "lib/index.js",
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "description": "a util for spawning git from npm CLI contexts",
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/npm/git.git"
-  },
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "scripts": {
-    "lint": "npm run eslint",
-    "snap": "tap",
-    "test": "tap",
-    "posttest": "npm run lint",
-    "postlint": "template-oss-check",
-    "lintfix": "npm run eslint -- --fix",
-    "template-oss-apply": "template-oss-apply --force",
-    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
-  },
-  "tap": {
-    "timeout": 600,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.24.1",
-    "npm-package-arg": "^13.0.0",
-    "slash": "^3.0.0",
-    "tap": "^16.0.1"
-  },
-  "dependencies": {
-    "@npmcli/promise-spawn": "^8.0.0",
-    "ini": "^5.0.0",
-    "lru-cache": "^11.2.1",
-    "npm-pick-manifest": "^11.0.1",
-    "proc-log": "^5.0.0",
-    "promise-retry": "^2.0.1",
-    "semver": "^7.3.5",
-    "which": "^5.0.0"
-  },
-  "engines": {
-    "node": "^20.17.0 || >=22.9.0"
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.24.1",
-    "publish": true
-  }
-}
diff --git a/package-lock.json b/package-lock.json
index 11a0c28d8d0ac..ce454aa2e587d 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -158,7 +158,7 @@
       "devDependencies": {
         "@npmcli/docs": "^1.0.0",
         "@npmcli/eslint-config": "^5.1.0",
-        "@npmcli/git": "^6.0.3",
+        "@npmcli/git": "^7.0.0",
         "@npmcli/mock-globals": "^1.0.0",
         "@npmcli/mock-registry": "^1.0.0",
         "@npmcli/template-oss": "4.24.4",
@@ -208,8 +208,6 @@
     },
     "docs/node_modules/@types/hast": {
       "version": "2.3.10",
-      "resolved": "https://registry.npmjs.org/@types/hast/-/hast-2.3.10.tgz",
-      "integrity": "sha512-McWspRw8xx8J9HurkVBfYj0xKoE25tOFlHGdx4MJ5xORQrMGZNqJhVQWaIbm6Oyla5kYOXtDiopzKRJzEOkwJw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -218,15 +216,11 @@
     },
     "docs/node_modules/@types/hast/node_modules/@types/unist": {
       "version": "2.0.11",
-      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.11.tgz",
-      "integrity": "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==",
       "dev": true,
       "license": "MIT"
     },
     "docs/node_modules/@types/mdast": {
       "version": "4.0.4",
-      "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz",
-      "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -235,15 +229,11 @@
     },
     "docs/node_modules/@types/unist": {
       "version": "3.0.3",
-      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
-      "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
       "dev": true,
       "license": "MIT"
     },
     "docs/node_modules/escape-string-regexp": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz",
-      "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -255,15 +245,11 @@
     },
     "docs/node_modules/github-slugger": {
       "version": "1.5.0",
-      "resolved": "https://registry.npmjs.org/github-slugger/-/github-slugger-1.5.0.tgz",
-      "integrity": "sha512-wIh+gKBI9Nshz2o46B0B3f5k/W+WI9ZAv6y5Dn5WJ5SK1t0TnDimB4WE5rmTD05ZAIn8HALCZVmCsvj0w0v0lw==",
       "dev": true,
       "license": "ISC"
     },
     "docs/node_modules/hast-util-to-html": {
       "version": "8.0.4",
-      "resolved": "https://registry.npmjs.org/hast-util-to-html/-/hast-util-to-html-8.0.4.tgz",
-      "integrity": "sha512-4tpQTUOr9BMjtYyNlt0P50mH7xj0Ks2xpo8M943Vykljf99HW6EzulIoJP1N3eKOSScEHzyzi9dm7/cn0RfGwA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -286,15 +272,11 @@
     },
     "docs/node_modules/hast-util-to-html/node_modules/@types/unist": {
       "version": "2.0.11",
-      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.11.tgz",
-      "integrity": "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==",
       "dev": true,
       "license": "MIT"
     },
     "docs/node_modules/hast-util-whitespace": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/hast-util-whitespace/-/hast-util-whitespace-2.0.1.tgz",
-      "integrity": "sha512-nAxA0v8+vXSBDt3AnRUNjyRIQ0rD+ntpbAp4LnPkumc5M9yUbSMa4XDU9Q6etY4f1Wp4bNgvc1yjiZtsTTrSng==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -304,8 +286,6 @@
     },
     "docs/node_modules/html-void-elements": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/html-void-elements/-/html-void-elements-2.0.1.tgz",
-      "integrity": "sha512-0quDb7s97CfemeJAnW9wC0hw78MtW7NU3hqtCD75g2vFlDLt36llsYD7uB7SUzojLMP24N5IatXf7ylGXiGG9A==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -315,8 +295,6 @@
     },
     "docs/node_modules/jsdom": {
       "version": "24.1.3",
-      "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-24.1.3.tgz",
-      "integrity": "sha512-MyL55p3Ut3cXbeBEG7Hcv0mVM8pp8PBNWxRqchZnSfAiES1v1mRnMeFfaHWIPULpwsYfvO+ZmMZz5tGCnjzDUQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -356,8 +334,6 @@
     },
     "docs/node_modules/mdast-util-definitions": {
       "version": "5.1.2",
-      "resolved": "https://registry.npmjs.org/mdast-util-definitions/-/mdast-util-definitions-5.1.2.tgz",
-      "integrity": "sha512-8SVPMuHqlPME/z3gqVwWY4zVXn8lqKv/pAhC57FuJ40ImXyBpmO5ukh98zB2v7Blql2FiHjHv9LVztSIqjY+MA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -372,8 +348,6 @@
     },
     "docs/node_modules/mdast-util-definitions/node_modules/@types/mdast": {
       "version": "3.0.15",
-      "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-3.0.15.tgz",
-      "integrity": "sha512-LnwD+mUEfxWMa1QpDraczIn6k0Ee3SMicuYSSzS6ZYl2gKS09EClnJYGd8Du6rfc5r/GZEk5o1mRb8TaTj03sQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -382,15 +356,11 @@
     },
     "docs/node_modules/mdast-util-definitions/node_modules/@types/unist": {
       "version": "2.0.11",
-      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.11.tgz",
-      "integrity": "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==",
       "dev": true,
       "license": "MIT"
     },
     "docs/node_modules/mdast-util-definitions/node_modules/unist-util-is": {
       "version": "5.2.1",
-      "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-5.2.1.tgz",
-      "integrity": "sha512-u9njyyfEh43npf1M+yGKDGVPbY/JWEemg5nH05ncKPfi+kBbKBJoTdsogMu33uhytuLlv9y0O7GH7fEdwLdLQw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -403,8 +373,6 @@
     },
     "docs/node_modules/mdast-util-definitions/node_modules/unist-util-visit": {
       "version": "4.1.2",
-      "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-4.1.2.tgz",
-      "integrity": "sha512-MSd8OUGISqHdVvfY9TPhyK2VdUrPgxkUtWSuMHF6XAAFuL4LokseigBnZtPnJMu+FbynTkFNnFlyjxpVKujMRg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -419,8 +387,6 @@
     },
     "docs/node_modules/mdast-util-definitions/node_modules/unist-util-visit-parents": {
       "version": "5.1.3",
-      "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-5.1.3.tgz",
-      "integrity": "sha512-x6+y8g7wWMyQhL1iZfhIPhDAs7Xwbn9nRosDXl7qoPTSCy0yNxnKc+hWokFifWQIDGi154rdUqKvbCa4+1kLhg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -434,8 +400,6 @@
     },
     "docs/node_modules/mdast-util-find-and-replace": {
       "version": "3.0.2",
-      "resolved": "https://registry.npmjs.org/mdast-util-find-and-replace/-/mdast-util-find-and-replace-3.0.2.tgz",
-      "integrity": "sha512-Tmd1Vg/m3Xz43afeNxDIhWRtFZgM2VLyaf4vSTYwudTyeuTneoL3qtWMA5jeLyz/O1vDJmmV4QuScFCA2tBPwg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -451,8 +415,6 @@
     },
     "docs/node_modules/mdast-util-from-markdown": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-2.0.2.tgz",
-      "integrity": "sha512-uZhTV/8NBuw0WHkPTrCqDOl0zVe1BIng5ZtHoDk49ME1qqcjYmmLmOf0gELgcRMxN4w2iuIeVso5/6QymSrgmA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -476,8 +438,6 @@
     },
     "docs/node_modules/mdast-util-gfm": {
       "version": "3.1.0",
-      "resolved": "https://registry.npmjs.org/mdast-util-gfm/-/mdast-util-gfm-3.1.0.tgz",
-      "integrity": "sha512-0ulfdQOM3ysHhCJ1p06l0b0VKlhU0wuQs3thxZQagjcjPrlFRqY215uZGHHJan9GEAXd9MbfPjFJz+qMkVR6zQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -496,8 +456,6 @@
     },
     "docs/node_modules/mdast-util-gfm-autolink-literal": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/mdast-util-gfm-autolink-literal/-/mdast-util-gfm-autolink-literal-2.0.1.tgz",
-      "integrity": "sha512-5HVP2MKaP6L+G6YaxPNjuL0BPrq9orG3TsrZ9YXbA3vDw/ACI4MEsnoDpn6ZNm7GnZgtAcONJyPhOP8tNJQavQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -514,8 +472,6 @@
     },
     "docs/node_modules/mdast-util-gfm-footnote": {
       "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/mdast-util-gfm-footnote/-/mdast-util-gfm-footnote-2.1.0.tgz",
-      "integrity": "sha512-sqpDWlsHn7Ac9GNZQMeUzPQSMzR6Wv0WKRNvQRg0KqHh02fpTz69Qc1QSseNX29bhz1ROIyNyxExfawVKTm1GQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -532,8 +488,6 @@
     },
     "docs/node_modules/mdast-util-gfm-strikethrough": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/mdast-util-gfm-strikethrough/-/mdast-util-gfm-strikethrough-2.0.0.tgz",
-      "integrity": "sha512-mKKb915TF+OC5ptj5bJ7WFRPdYtuHv0yTRxK2tJvi+BDqbkiG7h7u/9SI89nRAYcmap2xHQL9D+QG/6wSrTtXg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -548,8 +502,6 @@
     },
     "docs/node_modules/mdast-util-gfm-table": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/mdast-util-gfm-table/-/mdast-util-gfm-table-2.0.0.tgz",
-      "integrity": "sha512-78UEvebzz/rJIxLvE7ZtDd/vIQ0RHv+3Mh5DR96p7cS7HsBhYIICDBCu8csTNWNO6tBWfqXPWekRuj2FNOGOZg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -566,8 +518,6 @@
     },
     "docs/node_modules/mdast-util-gfm-task-list-item": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/mdast-util-gfm-task-list-item/-/mdast-util-gfm-task-list-item-2.0.0.tgz",
-      "integrity": "sha512-IrtvNvjxC1o06taBAVJznEnkiHxLFTzgonUdy8hzFVeDun0uTjxxrRGVaNFqkU1wJR3RBPEfsxmU6jDWPofrTQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -583,8 +533,6 @@
     },
     "docs/node_modules/mdast-util-phrasing": {
       "version": "4.1.0",
-      "resolved": "https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-4.1.0.tgz",
-      "integrity": "sha512-TqICwyvJJpBwvGAMZjj4J2n0X8QWp21b9l0o7eXyVJ25YNWYbJDVIyD1bZXE6WtV6RmKJVYmQAKWa0zWOABz2w==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -598,8 +546,6 @@
     },
     "docs/node_modules/mdast-util-to-hast": {
       "version": "12.3.0",
-      "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-12.3.0.tgz",
-      "integrity": "sha512-pits93r8PhnIoU4Vy9bjW39M2jJ6/tdHyja9rrot9uujkN7UTU9SDnE6WNJz/IGyQk3XHX6yNNtrBH6cQzm8Hw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -619,8 +565,6 @@
     },
     "docs/node_modules/mdast-util-to-hast/node_modules/@types/mdast": {
       "version": "3.0.15",
-      "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-3.0.15.tgz",
-      "integrity": "sha512-LnwD+mUEfxWMa1QpDraczIn6k0Ee3SMicuYSSzS6ZYl2gKS09EClnJYGd8Du6rfc5r/GZEk5o1mRb8TaTj03sQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -629,15 +573,11 @@
     },
     "docs/node_modules/mdast-util-to-hast/node_modules/@types/unist": {
       "version": "2.0.11",
-      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.11.tgz",
-      "integrity": "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==",
       "dev": true,
       "license": "MIT"
     },
     "docs/node_modules/mdast-util-to-hast/node_modules/micromark-util-character": {
       "version": "1.2.0",
-      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-1.2.0.tgz",
-      "integrity": "sha512-lXraTwcX3yH/vMDaFWCQJP1uIszLVebzUa3ZHdrgxr7KEU/9mL4mVgCpGbyhvNLNlauROiNUq7WN5u7ndbY6xg==",
       "dev": true,
       "funding": [
         {
@@ -657,8 +597,6 @@
     },
     "docs/node_modules/mdast-util-to-hast/node_modules/micromark-util-encode": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-1.1.0.tgz",
-      "integrity": "sha512-EuEzTWSTAj9PA5GOAs992GzNh2dGQO52UvAbtSOMvXTxv3Criqb6IOzJUBCmEqrrXSblJIJBbFFv6zPxpreiJw==",
       "dev": true,
       "funding": [
         {
@@ -674,8 +612,6 @@
     },
     "docs/node_modules/mdast-util-to-hast/node_modules/micromark-util-sanitize-uri": {
       "version": "1.2.0",
-      "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-1.2.0.tgz",
-      "integrity": "sha512-QO4GXv0XZfWey4pYFndLUKEAktKkG5kZTdUNaTAkzbuJxn2tNBOr+QtxR2XpWaMhbImT2dPzyLrPXLlPhph34A==",
       "dev": true,
       "funding": [
         {
@@ -696,8 +632,6 @@
     },
     "docs/node_modules/mdast-util-to-hast/node_modules/micromark-util-symbol": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-1.1.0.tgz",
-      "integrity": "sha512-uEjpEYY6KMs1g7QfJ2eX1SQEV+ZT4rUD3UcF6l57acZvLNK7PBZL+ty82Z1qhK1/yXIY4bdx04FKMgR0g4IAag==",
       "dev": true,
       "funding": [
         {
@@ -713,8 +647,6 @@
     },
     "docs/node_modules/mdast-util-to-hast/node_modules/micromark-util-types": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-1.1.0.tgz",
-      "integrity": "sha512-ukRBgie8TIAcacscVHSiddHjO4k/q3pnedmzMQ4iwDcK0FtFCohKOlFbaOL/mPgfnPsL3C1ZyxJa4sbWrBl3jg==",
       "dev": true,
       "funding": [
         {
@@ -730,8 +662,6 @@
     },
     "docs/node_modules/mdast-util-to-hast/node_modules/unist-util-is": {
       "version": "5.2.1",
-      "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-5.2.1.tgz",
-      "integrity": "sha512-u9njyyfEh43npf1M+yGKDGVPbY/JWEemg5nH05ncKPfi+kBbKBJoTdsogMu33uhytuLlv9y0O7GH7fEdwLdLQw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -744,8 +674,6 @@
     },
     "docs/node_modules/mdast-util-to-hast/node_modules/unist-util-visit": {
       "version": "4.1.2",
-      "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-4.1.2.tgz",
-      "integrity": "sha512-MSd8OUGISqHdVvfY9TPhyK2VdUrPgxkUtWSuMHF6XAAFuL4LokseigBnZtPnJMu+FbynTkFNnFlyjxpVKujMRg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -760,8 +688,6 @@
     },
     "docs/node_modules/mdast-util-to-hast/node_modules/unist-util-visit-parents": {
       "version": "5.1.3",
-      "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-5.1.3.tgz",
-      "integrity": "sha512-x6+y8g7wWMyQhL1iZfhIPhDAs7Xwbn9nRosDXl7qoPTSCy0yNxnKc+hWokFifWQIDGi154rdUqKvbCa4+1kLhg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -775,8 +701,6 @@
     },
     "docs/node_modules/mdast-util-to-markdown": {
       "version": "2.1.2",
-      "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-2.1.2.tgz",
-      "integrity": "sha512-xj68wMTvGXVOKonmog6LwyJKrYXZPvlwabaryTjLh9LuvovB/KAH+kvi8Gjj+7rJjsFi23nkUxRQv1KqSroMqA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -797,8 +721,6 @@
     },
     "docs/node_modules/mdast-util-to-string": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-4.0.0.tgz",
-      "integrity": "sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -811,8 +733,6 @@
     },
     "docs/node_modules/micromark": {
       "version": "4.0.2",
-      "resolved": "https://registry.npmjs.org/micromark/-/micromark-4.0.2.tgz",
-      "integrity": "sha512-zpe98Q6kvavpCr1NPVSCMebCKfD7CA2NqZ+rykeNhONIJBpc1tFKt9hucLGwha3jNTNI8lHpctWJWoimVF4PfA==",
       "dev": true,
       "funding": [
         {
@@ -847,8 +767,6 @@
     },
     "docs/node_modules/micromark-core-commonmark": {
       "version": "2.0.3",
-      "resolved": "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-2.0.3.tgz",
-      "integrity": "sha512-RDBrHEMSxVFLg6xvnXmb1Ayr2WzLAWjeSATAoxwKYJV94TeNavgoIdA0a9ytzDSVzBy2YKFK+emCPOEibLeCrg==",
       "dev": true,
       "funding": [
         {
@@ -882,8 +800,6 @@
     },
     "docs/node_modules/micromark-extension-gfm": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/micromark-extension-gfm/-/micromark-extension-gfm-3.0.0.tgz",
-      "integrity": "sha512-vsKArQsicm7t0z2GugkCKtZehqUm31oeGBV/KVSorWSy8ZlNAv7ytjFhvaryUiCUJYqs+NoE6AFhpQvBTM6Q4w==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -903,8 +819,6 @@
     },
     "docs/node_modules/micromark-extension-gfm-autolink-literal": {
       "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-extension-gfm-autolink-literal/-/micromark-extension-gfm-autolink-literal-2.1.0.tgz",
-      "integrity": "sha512-oOg7knzhicgQ3t4QCjCWgTmfNhvQbDDnJeVu9v81r7NltNCVmhPy1fJRX27pISafdjL+SVc4d3l48Gb6pbRypw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -920,8 +834,6 @@
     },
     "docs/node_modules/micromark-extension-gfm-footnote": {
       "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-extension-gfm-footnote/-/micromark-extension-gfm-footnote-2.1.0.tgz",
-      "integrity": "sha512-/yPhxI1ntnDNsiHtzLKYnE3vf9JZ6cAisqVDauhp4CEHxlb4uoOTxOCJ+9s51bIB8U1N1FJ1RXOKTIlD5B/gqw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -941,8 +853,6 @@
     },
     "docs/node_modules/micromark-extension-gfm-strikethrough": {
       "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-extension-gfm-strikethrough/-/micromark-extension-gfm-strikethrough-2.1.0.tgz",
-      "integrity": "sha512-ADVjpOOkjz1hhkZLlBiYA9cR2Anf8F4HqZUO6e5eDcPQd0Txw5fxLzzxnEkSkfnD0wziSGiv7sYhk/ktvbf1uw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -960,8 +870,6 @@
     },
     "docs/node_modules/micromark-extension-gfm-table": {
       "version": "2.1.1",
-      "resolved": "https://registry.npmjs.org/micromark-extension-gfm-table/-/micromark-extension-gfm-table-2.1.1.tgz",
-      "integrity": "sha512-t2OU/dXXioARrC6yWfJ4hqB7rct14e8f7m0cbI5hUmDyyIlwv5vEtooptH8INkbLzOatzKuVbQmAYcbWoyz6Dg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -978,8 +886,6 @@
     },
     "docs/node_modules/micromark-extension-gfm-tagfilter": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/micromark-extension-gfm-tagfilter/-/micromark-extension-gfm-tagfilter-2.0.0.tgz",
-      "integrity": "sha512-xHlTOmuCSotIA8TW1mDIM6X2O1SiX5P9IuDtqGonFhEK0qgRI4yeC6vMxEV2dgyr2TiD+2PQ10o+cOhdVAcwfg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -992,8 +898,6 @@
     },
     "docs/node_modules/micromark-extension-gfm-task-list-item": {
       "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-extension-gfm-task-list-item/-/micromark-extension-gfm-task-list-item-2.1.0.tgz",
-      "integrity": "sha512-qIBZhqxqI6fjLDYFTBIa4eivDMnP+OZqsNwmQ3xNLE4Cxwc+zfQEfbs6tzAo2Hjq+bh6q5F+Z8/cksrLFYWQQw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1010,8 +914,6 @@
     },
     "docs/node_modules/micromark-factory-destination": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-2.0.1.tgz",
-      "integrity": "sha512-Xe6rDdJlkmbFRExpTOmRj9N3MaWmbAgdpSrBQvCFqhezUn4AHqJHbaEnfbVYYiexVSs//tqOdY/DxhjdCiJnIA==",
       "dev": true,
       "funding": [
         {
@@ -1032,8 +934,6 @@
     },
     "docs/node_modules/micromark-factory-label": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-factory-label/-/micromark-factory-label-2.0.1.tgz",
-      "integrity": "sha512-VFMekyQExqIW7xIChcXn4ok29YE3rnuyveW3wZQWWqF4Nv9Wk5rgJ99KzPvHjkmPXF93FXIbBp6YdW3t71/7Vg==",
       "dev": true,
       "funding": [
         {
@@ -1055,8 +955,6 @@
     },
     "docs/node_modules/micromark-factory-space": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-2.0.1.tgz",
-      "integrity": "sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg==",
       "dev": true,
       "funding": [
         {
@@ -1076,8 +974,6 @@
     },
     "docs/node_modules/micromark-factory-title": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-factory-title/-/micromark-factory-title-2.0.1.tgz",
-      "integrity": "sha512-5bZ+3CjhAd9eChYTHsjy6TGxpOFSKgKKJPJxr293jTbfry2KDoWkhBb6TcPVB4NmzaPhMs1Frm9AZH7OD4Cjzw==",
       "dev": true,
       "funding": [
         {
@@ -1099,8 +995,6 @@
     },
     "docs/node_modules/micromark-factory-whitespace": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-factory-whitespace/-/micromark-factory-whitespace-2.0.1.tgz",
-      "integrity": "sha512-Ob0nuZ3PKt/n0hORHyvoD9uZhr+Za8sFoP+OnMcnWK5lngSzALgQYKMr9RJVOWLqQYuyn6ulqGWSXdwf6F80lQ==",
       "dev": true,
       "funding": [
         {
@@ -1122,8 +1016,6 @@
     },
     "docs/node_modules/micromark-util-character": {
       "version": "2.1.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
-      "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
       "dev": true,
       "funding": [
         {
@@ -1143,8 +1035,6 @@
     },
     "docs/node_modules/micromark-util-chunked": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-chunked/-/micromark-util-chunked-2.0.1.tgz",
-      "integrity": "sha512-QUNFEOPELfmvv+4xiNg2sRYeS/P84pTW0TCgP5zc9FpXetHY0ab7SxKyAQCNCc1eK0459uoLI1y5oO5Vc1dbhA==",
       "dev": true,
       "funding": [
         {
@@ -1163,8 +1053,6 @@
     },
     "docs/node_modules/micromark-util-classify-character": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-classify-character/-/micromark-util-classify-character-2.0.1.tgz",
-      "integrity": "sha512-K0kHzM6afW/MbeWYWLjoHQv1sgg2Q9EccHEDzSkxiP/EaagNzCm7T/WMKZ3rjMbvIpvBiZgwR3dKMygtA4mG1Q==",
       "dev": true,
       "funding": [
         {
@@ -1185,8 +1073,6 @@
     },
     "docs/node_modules/micromark-util-combine-extensions": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-combine-extensions/-/micromark-util-combine-extensions-2.0.1.tgz",
-      "integrity": "sha512-OnAnH8Ujmy59JcyZw8JSbK9cGpdVY44NKgSM7E9Eh7DiLS2E9RNQf0dONaGDzEG9yjEl5hcqeIsj4hfRkLH/Bg==",
       "dev": true,
       "funding": [
         {
@@ -1206,8 +1092,6 @@
     },
     "docs/node_modules/micromark-util-decode-numeric-character-reference": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-2.0.2.tgz",
-      "integrity": "sha512-ccUbYk6CwVdkmCQMyr64dXz42EfHGkPQlBj5p7YVGzq8I7CtjXZJrubAYezf7Rp+bjPseiROqe7G6foFd+lEuw==",
       "dev": true,
       "funding": [
         {
@@ -1226,8 +1110,6 @@
     },
     "docs/node_modules/micromark-util-decode-string": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-decode-string/-/micromark-util-decode-string-2.0.1.tgz",
-      "integrity": "sha512-nDV/77Fj6eH1ynwscYTOsbK7rR//Uj0bZXBwJZRfaLEJ1iGBR6kIfNmlNqaqJf649EP0F3NWNdeJi03elllNUQ==",
       "dev": true,
       "funding": [
         {
@@ -1249,8 +1131,6 @@
     },
     "docs/node_modules/micromark-util-encode": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-2.0.1.tgz",
-      "integrity": "sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw==",
       "dev": true,
       "funding": [
         {
@@ -1266,8 +1146,6 @@
     },
     "docs/node_modules/micromark-util-html-tag-name": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-html-tag-name/-/micromark-util-html-tag-name-2.0.1.tgz",
-      "integrity": "sha512-2cNEiYDhCWKI+Gs9T0Tiysk136SnR13hhO8yW6BGNyhOC4qYFnwF1nKfD3HFAIXA5c45RrIG1ub11GiXeYd1xA==",
       "dev": true,
       "funding": [
         {
@@ -1283,8 +1161,6 @@
     },
     "docs/node_modules/micromark-util-normalize-identifier": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-2.0.1.tgz",
-      "integrity": "sha512-sxPqmo70LyARJs0w2UclACPUUEqltCkJ6PhKdMIDuJ3gSf/Q+/GIe3WKl0Ijb/GyH9lOpUkRAO2wp0GVkLvS9Q==",
       "dev": true,
       "funding": [
         {
@@ -1303,8 +1179,6 @@
     },
     "docs/node_modules/micromark-util-resolve-all": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-resolve-all/-/micromark-util-resolve-all-2.0.1.tgz",
-      "integrity": "sha512-VdQyxFWFT2/FGJgwQnJYbe1jjQoNTS4RjglmSjTUlpUMa95Htx9NHeYW4rGDJzbjvCsl9eLjMQwGeElsqmzcHg==",
       "dev": true,
       "funding": [
         {
@@ -1323,8 +1197,6 @@
     },
     "docs/node_modules/micromark-util-sanitize-uri": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.1.tgz",
-      "integrity": "sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ==",
       "dev": true,
       "funding": [
         {
@@ -1345,8 +1217,6 @@
     },
     "docs/node_modules/micromark-util-subtokenize": {
       "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-util-subtokenize/-/micromark-util-subtokenize-2.1.0.tgz",
-      "integrity": "sha512-XQLu552iSctvnEcgXw6+Sx75GflAPNED1qx7eBJ+wydBb2KCbRZe+NwvIEEMM83uml1+2WSXpBAcp9IUCgCYWA==",
       "dev": true,
       "funding": [
         {
@@ -1368,8 +1238,6 @@
     },
     "docs/node_modules/micromark-util-symbol": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
-      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
       "dev": true,
       "funding": [
         {
@@ -1385,8 +1253,6 @@
     },
     "docs/node_modules/micromark-util-types": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
-      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
       "dev": true,
       "funding": [
         {
@@ -1402,8 +1268,6 @@
     },
     "docs/node_modules/rehype-stringify": {
       "version": "9.0.4",
-      "resolved": "https://registry.npmjs.org/rehype-stringify/-/rehype-stringify-9.0.4.tgz",
-      "integrity": "sha512-Uk5xu1YKdqobe5XpSskwPvo1XeHUUucWEQSl8hTrXt5selvca1e8K1EZ37E6YoZ4BT8BCqCdVfQW7OfHfthtVQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1418,15 +1282,11 @@
     },
     "docs/node_modules/rehype-stringify/node_modules/@types/unist": {
       "version": "2.0.11",
-      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.11.tgz",
-      "integrity": "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==",
       "dev": true,
       "license": "MIT"
     },
     "docs/node_modules/rehype-stringify/node_modules/unified": {
       "version": "10.1.2",
-      "resolved": "https://registry.npmjs.org/unified/-/unified-10.1.2.tgz",
-      "integrity": "sha512-pUSWAi/RAnVy1Pif2kAoeWNBa3JVrx0MId2LASj8G+7AiHWoKZNTomq6LG326T68U7/e263X6fTdcXIy7XnF7Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1445,8 +1305,6 @@
     },
     "docs/node_modules/rehype-stringify/node_modules/unist-util-stringify-position": {
       "version": "3.0.3",
-      "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-3.0.3.tgz",
-      "integrity": "sha512-k5GzIBZ/QatR8N5X2y+drfpWG8IDBzdnVj6OInRNWm1oXrzydiaAT2OQiA8DPRRZyAKb9b6I2a6PxYklZD0gKg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1459,8 +1317,6 @@
     },
     "docs/node_modules/rehype-stringify/node_modules/vfile": {
       "version": "5.3.7",
-      "resolved": "https://registry.npmjs.org/vfile/-/vfile-5.3.7.tgz",
-      "integrity": "sha512-r7qlzkgErKjobAmyNIkkSpizsFPYiUPuJb5pNW1RB4JcYVZhs4lIbVqk8XPk033CV/1z8ss5pkax8SuhGpcG8g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1476,8 +1332,6 @@
     },
     "docs/node_modules/rehype-stringify/node_modules/vfile-message": {
       "version": "3.1.4",
-      "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-3.1.4.tgz",
-      "integrity": "sha512-fa0Z6P8HUrQN4BZaX05SIVXic+7kE3b05PWAtPuYP9QLHsLKYR7/AlLW3NtOrpXRLeawpDLMsVkmk5DG0NXgWw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1491,8 +1345,6 @@
     },
     "docs/node_modules/remark-gfm": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/remark-gfm/-/remark-gfm-4.0.0.tgz",
-      "integrity": "sha512-U92vJgBPkbw4Zfu/IiW2oTZLSL3Zpv+uI7My2eq8JxKgqraFdU8YUGicEJCEgSbeaG+QDFqIcwwfMTOEelPxuA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1510,8 +1362,6 @@
     },
     "docs/node_modules/remark-man": {
       "version": "8.0.1",
-      "resolved": "https://registry.npmjs.org/remark-man/-/remark-man-8.0.1.tgz",
-      "integrity": "sha512-F/BbNaEF/QiZXoMiC43/qb8kAgGBKIS3yA+Br4CObgyoD+9Bioq1v+LmrLVbkwy9BErircQQ4J8yR2vFD34fBA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1533,8 +1383,6 @@
     },
     "docs/node_modules/remark-man/node_modules/@types/mdast": {
       "version": "3.0.15",
-      "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-3.0.15.tgz",
-      "integrity": "sha512-LnwD+mUEfxWMa1QpDraczIn6k0Ee3SMicuYSSzS6ZYl2gKS09EClnJYGd8Du6rfc5r/GZEk5o1mRb8TaTj03sQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1543,15 +1391,11 @@
     },
     "docs/node_modules/remark-man/node_modules/@types/unist": {
       "version": "2.0.11",
-      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.11.tgz",
-      "integrity": "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==",
       "dev": true,
       "license": "MIT"
     },
     "docs/node_modules/remark-man/node_modules/mdast-util-to-string": {
       "version": "3.2.0",
-      "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-3.2.0.tgz",
-      "integrity": "sha512-V4Zn/ncyN1QNSqSBxTrMOLpjr+IKdHl2v3KVLoWmDPscP4r9GcCi71gjgvUV1SFSKh92AjAG4peFuBl2/YgCJg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1564,8 +1408,6 @@
     },
     "docs/node_modules/remark-man/node_modules/unified": {
       "version": "10.1.2",
-      "resolved": "https://registry.npmjs.org/unified/-/unified-10.1.2.tgz",
-      "integrity": "sha512-pUSWAi/RAnVy1Pif2kAoeWNBa3JVrx0MId2LASj8G+7AiHWoKZNTomq6LG326T68U7/e263X6fTdcXIy7XnF7Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1584,8 +1426,6 @@
     },
     "docs/node_modules/remark-man/node_modules/unist-util-is": {
       "version": "5.2.1",
-      "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-5.2.1.tgz",
-      "integrity": "sha512-u9njyyfEh43npf1M+yGKDGVPbY/JWEemg5nH05ncKPfi+kBbKBJoTdsogMu33uhytuLlv9y0O7GH7fEdwLdLQw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1598,8 +1438,6 @@
     },
     "docs/node_modules/remark-man/node_modules/unist-util-stringify-position": {
       "version": "3.0.3",
-      "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-3.0.3.tgz",
-      "integrity": "sha512-k5GzIBZ/QatR8N5X2y+drfpWG8IDBzdnVj6OInRNWm1oXrzydiaAT2OQiA8DPRRZyAKb9b6I2a6PxYklZD0gKg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1612,8 +1450,6 @@
     },
     "docs/node_modules/remark-man/node_modules/unist-util-visit": {
       "version": "4.1.2",
-      "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-4.1.2.tgz",
-      "integrity": "sha512-MSd8OUGISqHdVvfY9TPhyK2VdUrPgxkUtWSuMHF6XAAFuL4LokseigBnZtPnJMu+FbynTkFNnFlyjxpVKujMRg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1628,8 +1464,6 @@
     },
     "docs/node_modules/remark-man/node_modules/unist-util-visit-parents": {
       "version": "5.1.3",
-      "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-5.1.3.tgz",
-      "integrity": "sha512-x6+y8g7wWMyQhL1iZfhIPhDAs7Xwbn9nRosDXl7qoPTSCy0yNxnKc+hWokFifWQIDGi154rdUqKvbCa4+1kLhg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1643,8 +1477,6 @@
     },
     "docs/node_modules/remark-man/node_modules/vfile": {
       "version": "5.3.7",
-      "resolved": "https://registry.npmjs.org/vfile/-/vfile-5.3.7.tgz",
-      "integrity": "sha512-r7qlzkgErKjobAmyNIkkSpizsFPYiUPuJb5pNW1RB4JcYVZhs4lIbVqk8XPk033CV/1z8ss5pkax8SuhGpcG8g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1660,8 +1492,6 @@
     },
     "docs/node_modules/remark-man/node_modules/vfile-message": {
       "version": "3.1.4",
-      "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-3.1.4.tgz",
-      "integrity": "sha512-fa0Z6P8HUrQN4BZaX05SIVXic+7kE3b05PWAtPuYP9QLHsLKYR7/AlLW3NtOrpXRLeawpDLMsVkmk5DG0NXgWw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1675,8 +1505,6 @@
     },
     "docs/node_modules/remark-parse": {
       "version": "11.0.0",
-      "resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-11.0.0.tgz",
-      "integrity": "sha512-FCxlKLNGknS5ba/1lmpYijMUzX2esxW5xQqjWxw2eHFfS2MSdaHVINFmhjo+qN1WhZhNimq0dZATN9pH0IDrpA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1692,8 +1520,6 @@
     },
     "docs/node_modules/remark-rehype": {
       "version": "10.1.0",
-      "resolved": "https://registry.npmjs.org/remark-rehype/-/remark-rehype-10.1.0.tgz",
-      "integrity": "sha512-EFmR5zppdBp0WQeDVZ/b66CWJipB2q2VLNFMabzDSGR66Z2fQii83G5gTBbgGEnEEA0QRussvrFHxk1HWGJskw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1709,8 +1535,6 @@
     },
     "docs/node_modules/remark-rehype/node_modules/@types/mdast": {
       "version": "3.0.15",
-      "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-3.0.15.tgz",
-      "integrity": "sha512-LnwD+mUEfxWMa1QpDraczIn6k0Ee3SMicuYSSzS6ZYl2gKS09EClnJYGd8Du6rfc5r/GZEk5o1mRb8TaTj03sQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1719,15 +1543,11 @@
     },
     "docs/node_modules/remark-rehype/node_modules/@types/unist": {
       "version": "2.0.11",
-      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.11.tgz",
-      "integrity": "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==",
       "dev": true,
       "license": "MIT"
     },
     "docs/node_modules/remark-rehype/node_modules/unified": {
       "version": "10.1.2",
-      "resolved": "https://registry.npmjs.org/unified/-/unified-10.1.2.tgz",
-      "integrity": "sha512-pUSWAi/RAnVy1Pif2kAoeWNBa3JVrx0MId2LASj8G+7AiHWoKZNTomq6LG326T68U7/e263X6fTdcXIy7XnF7Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1746,8 +1566,6 @@
     },
     "docs/node_modules/remark-rehype/node_modules/unist-util-stringify-position": {
       "version": "3.0.3",
-      "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-3.0.3.tgz",
-      "integrity": "sha512-k5GzIBZ/QatR8N5X2y+drfpWG8IDBzdnVj6OInRNWm1oXrzydiaAT2OQiA8DPRRZyAKb9b6I2a6PxYklZD0gKg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1760,8 +1578,6 @@
     },
     "docs/node_modules/remark-rehype/node_modules/vfile": {
       "version": "5.3.7",
-      "resolved": "https://registry.npmjs.org/vfile/-/vfile-5.3.7.tgz",
-      "integrity": "sha512-r7qlzkgErKjobAmyNIkkSpizsFPYiUPuJb5pNW1RB4JcYVZhs4lIbVqk8XPk033CV/1z8ss5pkax8SuhGpcG8g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1777,8 +1593,6 @@
     },
     "docs/node_modules/remark-rehype/node_modules/vfile-message": {
       "version": "3.1.4",
-      "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-3.1.4.tgz",
-      "integrity": "sha512-fa0Z6P8HUrQN4BZaX05SIVXic+7kE3b05PWAtPuYP9QLHsLKYR7/AlLW3NtOrpXRLeawpDLMsVkmk5DG0NXgWw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1792,8 +1606,6 @@
     },
     "docs/node_modules/remark-stringify": {
       "version": "11.0.0",
-      "resolved": "https://registry.npmjs.org/remark-stringify/-/remark-stringify-11.0.0.tgz",
-      "integrity": "sha512-1OSmLd3awB/t8qdoEOMazZkNsfVTeY4fTsgzcQFdXNq8ToTN4ZGwrMnlda4K6smTFKD+GRV6O48i6Z4iKgPPpw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1808,8 +1620,6 @@
     },
     "docs/node_modules/tough-cookie": {
       "version": "4.1.4",
-      "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.4.tgz",
-      "integrity": "sha512-Loo5UUvLD9ScZ6jh8beX1T6sO1w2/MpCRpEP7V280GKMVUQ0Jzar2U3UJPsrdbziLEMMhu3Ujnq//rhiFuIeag==",
       "dev": true,
       "license": "BSD-3-Clause",
       "dependencies": {
@@ -1824,8 +1634,6 @@
     },
     "docs/node_modules/tr46": {
       "version": "5.1.1",
-      "resolved": "https://registry.npmjs.org/tr46/-/tr46-5.1.1.tgz",
-      "integrity": "sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1837,8 +1645,6 @@
     },
     "docs/node_modules/unified": {
       "version": "11.0.5",
-      "resolved": "https://registry.npmjs.org/unified/-/unified-11.0.5.tgz",
-      "integrity": "sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1857,8 +1663,6 @@
     },
     "docs/node_modules/unist-util-is": {
       "version": "6.0.0",
-      "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.0.tgz",
-      "integrity": "sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1871,8 +1675,6 @@
     },
     "docs/node_modules/unist-util-position": {
       "version": "4.0.4",
-      "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-4.0.4.tgz",
-      "integrity": "sha512-kUBE91efOWfIVBo8xzh/uZQ7p9ffYRtUbMRZBNFYwf0RK8koUMx6dGUfwylLOKmaT2cs4wSW96QoYUSXAyEtpg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1885,15 +1687,11 @@
     },
     "docs/node_modules/unist-util-position/node_modules/@types/unist": {
       "version": "2.0.11",
-      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.11.tgz",
-      "integrity": "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==",
       "dev": true,
       "license": "MIT"
     },
     "docs/node_modules/unist-util-stringify-position": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz",
-      "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1906,8 +1704,6 @@
     },
     "docs/node_modules/unist-util-visit": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-5.0.0.tgz",
-      "integrity": "sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1922,8 +1718,6 @@
     },
     "docs/node_modules/unist-util-visit-parents": {
       "version": "6.0.1",
-      "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.1.tgz",
-      "integrity": "sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1937,8 +1731,6 @@
     },
     "docs/node_modules/vfile": {
       "version": "6.0.3",
-      "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.3.tgz",
-      "integrity": "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1952,8 +1744,6 @@
     },
     "docs/node_modules/vfile-message": {
       "version": "4.0.2",
-      "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.2.tgz",
-      "integrity": "sha512-jRDZ1IMLttGj41KcZvlrYAaI3CfqpLpfpf+Mfig13viT6NKvRzWZ+lXz0Y5D60w6uJIBAOGq9mSHf0gktF0duw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1967,8 +1757,6 @@
     },
     "docs/node_modules/webidl-conversions": {
       "version": "7.0.0",
-      "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz",
-      "integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==",
       "dev": true,
       "license": "BSD-2-Clause",
       "engines": {
@@ -1977,8 +1765,6 @@
     },
     "docs/node_modules/whatwg-url": {
       "version": "14.2.0",
-      "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-14.2.0.tgz",
-      "integrity": "sha512-De72GdQZzNTUBBChsXueQUnPKDkg/5A5zp7pFDuQAj5UFoENpiACU0wlCvzpAGnTkj++ihpKwKyYewn/XNUbKw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2022,8 +1808,6 @@
     },
     "node_modules/@actions/core": {
       "version": "1.11.1",
-      "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.11.1.tgz",
-      "integrity": "sha512-hXJCSrkwfA46Vd9Z3q4cpEpHB1rL5NG04+/rbqW9d3+CSvtB1tYe8UTpAlixa1vj0m/ULglfEK2UKxMGxCxv5A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2033,8 +1817,6 @@
     },
     "node_modules/@actions/exec": {
       "version": "1.1.1",
-      "resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.1.1.tgz",
-      "integrity": "sha512-+sCcHHbVdk93a0XT19ECtO/gIXoxvdsgQLzb2fE2/5sIZmWQuluYyjPQtrtTHdU1YzTZ7bAPN4sITq2xi1679w==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2043,8 +1825,6 @@
     },
     "node_modules/@actions/http-client": {
       "version": "2.2.3",
-      "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.2.3.tgz",
-      "integrity": "sha512-mx8hyJi/hjFvbPokCg4uRd4ZX78t+YyRPtnKWwIl+RzNaVuFpQHfmlGVfsKEJN8LwTCvL+DfVgAM04XaHkm6bA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2054,8 +1834,6 @@
     },
     "node_modules/@actions/http-client/node_modules/undici": {
       "version": "5.29.0",
-      "resolved": "https://registry.npmjs.org/undici/-/undici-5.29.0.tgz",
-      "integrity": "sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2067,15 +1845,11 @@
     },
     "node_modules/@actions/io": {
       "version": "1.1.3",
-      "resolved": "https://registry.npmjs.org/@actions/io/-/io-1.1.3.tgz",
-      "integrity": "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/@ampproject/remapping": {
       "version": "2.3.0",
-      "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz",
-      "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==",
       "dev": true,
       "license": "Apache-2.0",
       "dependencies": {
@@ -2088,8 +1862,6 @@
     },
     "node_modules/@asamuzakjp/css-color": {
       "version": "3.2.0",
-      "resolved": "https://registry.npmjs.org/@asamuzakjp/css-color/-/css-color-3.2.0.tgz",
-      "integrity": "sha512-K1A6z8tS3XsmCMM86xoWdn7Fkdn9m6RSVtocUrJYIwZnFVkng/PvkEoWtOWmP+Scc6saYWHWZYbndEEXxl24jw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2102,8 +1874,6 @@
     },
     "node_modules/@babel/code-frame": {
       "version": "7.27.1",
-      "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz",
-      "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2117,8 +1887,6 @@
     },
     "node_modules/@babel/compat-data": {
       "version": "7.28.0",
-      "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.28.0.tgz",
-      "integrity": "sha512-60X7qkglvrap8mn1lh2ebxXdZYtUcpd7gsmy9kLaBJ4i/WdY8PqTSdxyA8qraikqKQK5C1KRBKXqznrVapyNaw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -2127,8 +1895,6 @@
     },
     "node_modules/@babel/core": {
       "version": "7.28.0",
-      "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.28.0.tgz",
-      "integrity": "sha512-UlLAnTPrFdNGoFtbSXwcGFQBtQZJCNjaN6hQNP3UPvuNXT1i82N26KL3dZeIpNalWywr9IuQuncaAfUaS1g6sQ==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -2159,15 +1925,11 @@
     },
     "node_modules/@babel/core/node_modules/convert-source-map": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz",
-      "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/@babel/core/node_modules/semver": {
       "version": "6.3.1",
-      "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
-      "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
       "dev": true,
       "license": "ISC",
       "bin": {
@@ -2176,8 +1938,6 @@
     },
     "node_modules/@babel/generator": {
       "version": "7.28.0",
-      "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.0.tgz",
-      "integrity": "sha512-lJjzvrbEeWrhB4P3QBsH7tey117PjLZnDbLiQEKjQ/fNJTjuq4HSqgFA+UNSwZT8D7dxxbnuSBMsa1lrWzKlQg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2193,8 +1953,6 @@
     },
     "node_modules/@babel/helper-compilation-targets": {
       "version": "7.27.2",
-      "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.27.2.tgz",
-      "integrity": "sha512-2+1thGUUWWjLTYTHZWK1n8Yga0ijBz1XAhUXcKy81rd5g6yh7hGqMp45v7cadSbEHc9G3OTv45SyneRN3ps4DQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2210,8 +1968,6 @@
     },
     "node_modules/@babel/helper-compilation-targets/node_modules/lru-cache": {
       "version": "5.1.1",
-      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz",
-      "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -2220,8 +1976,6 @@
     },
     "node_modules/@babel/helper-compilation-targets/node_modules/semver": {
       "version": "6.3.1",
-      "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
-      "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
       "dev": true,
       "license": "ISC",
       "bin": {
@@ -2230,15 +1984,11 @@
     },
     "node_modules/@babel/helper-compilation-targets/node_modules/yallist": {
       "version": "3.1.1",
-      "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz",
-      "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/@babel/helper-globals": {
       "version": "7.28.0",
-      "resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz",
-      "integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -2247,8 +1997,6 @@
     },
     "node_modules/@babel/helper-module-imports": {
       "version": "7.27.1",
-      "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.27.1.tgz",
-      "integrity": "sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2261,8 +2009,6 @@
     },
     "node_modules/@babel/helper-module-transforms": {
       "version": "7.27.3",
-      "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.27.3.tgz",
-      "integrity": "sha512-dSOvYwvyLsWBeIRyOeHXp5vPj5l1I011r52FM1+r1jCERv+aFXYk4whgQccYEGYxK2H3ZAIA8nuPkQ0HaUo3qg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2279,8 +2025,6 @@
     },
     "node_modules/@babel/helper-string-parser": {
       "version": "7.27.1",
-      "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz",
-      "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -2289,8 +2033,6 @@
     },
     "node_modules/@babel/helper-validator-identifier": {
       "version": "7.27.1",
-      "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz",
-      "integrity": "sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -2299,8 +2041,6 @@
     },
     "node_modules/@babel/helper-validator-option": {
       "version": "7.27.1",
-      "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz",
-      "integrity": "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -2309,8 +2049,6 @@
     },
     "node_modules/@babel/helpers": {
       "version": "7.27.6",
-      "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.27.6.tgz",
-      "integrity": "sha512-muE8Tt8M22638HU31A3CgfSUciwz1fhATfoVai05aPXGor//CdWDCbnlY1yvBPo07njuVOCNGCSp/GTt12lIug==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2323,8 +2061,6 @@
     },
     "node_modules/@babel/parser": {
       "version": "7.28.0",
-      "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.0.tgz",
-      "integrity": "sha512-jVZGvOxOuNSsuQuLRTh13nU0AogFlw32w/MT+LV6D3sP5WdbW61E77RnkbaO2dUvmPAYrBDJXGn5gGS6tH4j8g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2339,8 +2075,6 @@
     },
     "node_modules/@babel/template": {
       "version": "7.27.2",
-      "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.2.tgz",
-      "integrity": "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2354,8 +2088,6 @@
     },
     "node_modules/@babel/traverse": {
       "version": "7.28.0",
-      "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.0.tgz",
-      "integrity": "sha512-mGe7UK5wWyh0bKRfupsUchrQGqvDbZDbKJw+kcRGSmdHVYrv+ltd0pnpDTVpiTqnaBru9iEvA8pz8W46v0Amwg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2373,8 +2105,6 @@
     },
     "node_modules/@babel/types": {
       "version": "7.28.1",
-      "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.1.tgz",
-      "integrity": "sha512-x0LvFTekgSX+83TI28Y9wYPUfzrnl2aT5+5QLnO6v7mSJYtEEevuDRN0F0uSHRk1G1IWZC43o00Y0xDDrpBGPQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2387,8 +2117,6 @@
     },
     "node_modules/@colors/colors": {
       "version": "1.5.0",
-      "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz",
-      "integrity": "sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==",
       "dev": true,
       "license": "MIT",
       "optional": true,
@@ -2398,8 +2126,6 @@
     },
     "node_modules/@commitlint/cli": {
       "version": "19.8.1",
-      "resolved": "https://registry.npmjs.org/@commitlint/cli/-/cli-19.8.1.tgz",
-      "integrity": "sha512-LXUdNIkspyxrlV6VDHWBmCZRtkEVRpBKxi2Gtw3J54cGWhLCTouVD/Q6ZSaSvd2YaDObWK8mDjrz3TIKtaQMAA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2420,8 +2146,6 @@
     },
     "node_modules/@commitlint/config-conventional": {
       "version": "19.8.1",
-      "resolved": "https://registry.npmjs.org/@commitlint/config-conventional/-/config-conventional-19.8.1.tgz",
-      "integrity": "sha512-/AZHJL6F6B/G959CsMAzrPKKZjeEiAVifRyEwXxcT6qtqbPwGw+iQxmNS+Bu+i09OCtdNRW6pNpBvgPrtMr9EQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2434,8 +2158,6 @@
     },
     "node_modules/@commitlint/config-validator": {
       "version": "19.8.1",
-      "resolved": "https://registry.npmjs.org/@commitlint/config-validator/-/config-validator-19.8.1.tgz",
-      "integrity": "sha512-0jvJ4u+eqGPBIzzSdqKNX1rvdbSU1lPNYlfQQRIFnBgLy26BtC0cFnr7c/AyuzExMxWsMOte6MkTi9I3SQ3iGQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2448,8 +2170,6 @@
     },
     "node_modules/@commitlint/ensure": {
       "version": "19.8.1",
-      "resolved": "https://registry.npmjs.org/@commitlint/ensure/-/ensure-19.8.1.tgz",
-      "integrity": "sha512-mXDnlJdvDzSObafjYrOSvZBwkD01cqB4gbnnFuVyNpGUM5ijwU/r/6uqUmBXAAOKRfyEjpkGVZxaDsCVnHAgyw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2466,8 +2186,6 @@
     },
     "node_modules/@commitlint/execute-rule": {
       "version": "19.8.1",
-      "resolved": "https://registry.npmjs.org/@commitlint/execute-rule/-/execute-rule-19.8.1.tgz",
-      "integrity": "sha512-YfJyIqIKWI64Mgvn/sE7FXvVMQER/Cd+s3hZke6cI1xgNT/f6ZAz5heND0QtffH+KbcqAwXDEE1/5niYayYaQA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -2476,8 +2194,6 @@
     },
     "node_modules/@commitlint/format": {
       "version": "19.8.1",
-      "resolved": "https://registry.npmjs.org/@commitlint/format/-/format-19.8.1.tgz",
-      "integrity": "sha512-kSJj34Rp10ItP+Eh9oCItiuN/HwGQMXBnIRk69jdOwEW9llW9FlyqcWYbHPSGofmjsqeoxa38UaEA5tsbm2JWw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2490,8 +2206,6 @@
     },
     "node_modules/@commitlint/is-ignored": {
       "version": "19.8.1",
-      "resolved": "https://registry.npmjs.org/@commitlint/is-ignored/-/is-ignored-19.8.1.tgz",
-      "integrity": "sha512-AceOhEhekBUQ5dzrVhDDsbMaY5LqtN8s1mqSnT2Kz1ERvVZkNihrs3Sfk1Je/rxRNbXYFzKZSHaPsEJJDJV8dg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2504,8 +2218,6 @@
     },
     "node_modules/@commitlint/lint": {
       "version": "19.8.1",
-      "resolved": "https://registry.npmjs.org/@commitlint/lint/-/lint-19.8.1.tgz",
-      "integrity": "sha512-52PFbsl+1EvMuokZXLRlOsdcLHf10isTPlWwoY1FQIidTsTvjKXVXYb7AvtpWkDzRO2ZsqIgPK7bI98x8LRUEw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2520,8 +2232,6 @@
     },
     "node_modules/@commitlint/load": {
       "version": "19.8.1",
-      "resolved": "https://registry.npmjs.org/@commitlint/load/-/load-19.8.1.tgz",
-      "integrity": "sha512-9V99EKG3u7z+FEoe4ikgq7YGRCSukAcvmKQuTtUyiYPnOd9a2/H9Ak1J9nJA1HChRQp9OA/sIKPugGS+FK/k1A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2542,8 +2252,6 @@
     },
     "node_modules/@commitlint/message": {
       "version": "19.8.1",
-      "resolved": "https://registry.npmjs.org/@commitlint/message/-/message-19.8.1.tgz",
-      "integrity": "sha512-+PMLQvjRXiU+Ae0Wc+p99EoGEutzSXFVwQfa3jRNUZLNW5odZAyseb92OSBTKCu+9gGZiJASt76Cj3dLTtcTdg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -2552,8 +2260,6 @@
     },
     "node_modules/@commitlint/parse": {
       "version": "19.8.1",
-      "resolved": "https://registry.npmjs.org/@commitlint/parse/-/parse-19.8.1.tgz",
-      "integrity": "sha512-mmAHYcMBmAgJDKWdkjIGq50X4yB0pSGpxyOODwYmoexxxiUCy5JJT99t1+PEMK7KtsCtzuWYIAXYAiKR+k+/Jw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2567,8 +2273,6 @@
     },
     "node_modules/@commitlint/read": {
       "version": "19.8.1",
-      "resolved": "https://registry.npmjs.org/@commitlint/read/-/read-19.8.1.tgz",
-      "integrity": "sha512-03Jbjb1MqluaVXKHKRuGhcKWtSgh3Jizqy2lJCRbRrnWpcM06MYm8th59Xcns8EqBYvo0Xqb+2DoZFlga97uXQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2584,8 +2288,6 @@
     },
     "node_modules/@commitlint/resolve-extends": {
       "version": "19.8.1",
-      "resolved": "https://registry.npmjs.org/@commitlint/resolve-extends/-/resolve-extends-19.8.1.tgz",
-      "integrity": "sha512-GM0mAhFk49I+T/5UCYns5ayGStkTt4XFFrjjf0L4S26xoMTSkdCf9ZRO8en1kuopC4isDFuEm7ZOm/WRVeElVg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2602,8 +2304,6 @@
     },
     "node_modules/@commitlint/rules": {
       "version": "19.8.1",
-      "resolved": "https://registry.npmjs.org/@commitlint/rules/-/rules-19.8.1.tgz",
-      "integrity": "sha512-Hnlhd9DyvGiGwjfjfToMi1dsnw1EXKGJNLTcsuGORHz6SS9swRgkBsou33MQ2n51/boIDrbsg4tIBbRpEWK2kw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2618,8 +2318,6 @@
     },
     "node_modules/@commitlint/to-lines": {
       "version": "19.8.1",
-      "resolved": "https://registry.npmjs.org/@commitlint/to-lines/-/to-lines-19.8.1.tgz",
-      "integrity": "sha512-98Mm5inzbWTKuZQr2aW4SReY6WUukdWXuZhrqf1QdKPZBCCsXuG87c+iP0bwtD6DBnmVVQjgp4whoHRVixyPBg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -2628,8 +2326,6 @@
     },
     "node_modules/@commitlint/top-level": {
       "version": "19.8.1",
-      "resolved": "https://registry.npmjs.org/@commitlint/top-level/-/top-level-19.8.1.tgz",
-      "integrity": "sha512-Ph8IN1IOHPSDhURCSXBz44+CIu+60duFwRsg6HqaISFHQHbmBtxVw4ZrFNIYUzEP7WwrNPxa2/5qJ//NK1FGcw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2641,8 +2337,6 @@
     },
     "node_modules/@commitlint/types": {
       "version": "19.8.1",
-      "resolved": "https://registry.npmjs.org/@commitlint/types/-/types-19.8.1.tgz",
-      "integrity": "sha512-/yCrWGCoA1SVKOks25EGadP9Pnj0oAIHGpl2wH2M2Y46dPM2ueb8wyCVOD7O3WCTkaJ0IkKvzhl1JY7+uCT2Dw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2655,8 +2349,6 @@
     },
     "node_modules/@conventional-commits/parser": {
       "version": "0.4.1",
-      "resolved": "https://registry.npmjs.org/@conventional-commits/parser/-/parser-0.4.1.tgz",
-      "integrity": "sha512-H2ZmUVt6q+KBccXfMBhbBF14NlANeqHTXL4qCL6QGbMzrc4HDXyzWuxPxPNbz71f/5UkR5DrycP5VO9u7crahg==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -2666,8 +2358,6 @@
     },
     "node_modules/@csstools/color-helpers": {
       "version": "5.0.2",
-      "resolved": "https://registry.npmjs.org/@csstools/color-helpers/-/color-helpers-5.0.2.tgz",
-      "integrity": "sha512-JqWH1vsgdGcw2RR6VliXXdA0/59LttzlU8UlRT/iUUsEeWfYq8I+K0yhihEUTTHLRm1EXvpsCx3083EU15ecsA==",
       "dev": true,
       "funding": [
         {
@@ -2686,8 +2376,6 @@
     },
     "node_modules/@csstools/css-calc": {
       "version": "2.1.4",
-      "resolved": "https://registry.npmjs.org/@csstools/css-calc/-/css-calc-2.1.4.tgz",
-      "integrity": "sha512-3N8oaj+0juUw/1H3YwmDDJXCgTB1gKU6Hc/bB502u9zR0q2vd786XJH9QfrKIEgFlZmhZiq6epXl4rHqhzsIgQ==",
       "dev": true,
       "funding": [
         {
@@ -2710,8 +2398,6 @@
     },
     "node_modules/@csstools/css-color-parser": {
       "version": "3.0.10",
-      "resolved": "https://registry.npmjs.org/@csstools/css-color-parser/-/css-color-parser-3.0.10.tgz",
-      "integrity": "sha512-TiJ5Ajr6WRd1r8HSiwJvZBiJOqtH86aHpUjq5aEKWHiII2Qfjqd/HCWKPOW8EP4vcspXbHnXrwIDlu5savQipg==",
       "dev": true,
       "funding": [
         {
@@ -2738,8 +2424,6 @@
     },
     "node_modules/@csstools/css-parser-algorithms": {
       "version": "3.0.5",
-      "resolved": "https://registry.npmjs.org/@csstools/css-parser-algorithms/-/css-parser-algorithms-3.0.5.tgz",
-      "integrity": "sha512-DaDeUkXZKjdGhgYaHNJTV9pV7Y9B3b644jCLs9Upc3VeNGg6LWARAT6O+Q+/COo+2gg/bM5rhpMAtf70WqfBdQ==",
       "dev": true,
       "funding": [
         {
@@ -2762,8 +2446,6 @@
     },
     "node_modules/@csstools/css-tokenizer": {
       "version": "3.0.4",
-      "resolved": "https://registry.npmjs.org/@csstools/css-tokenizer/-/css-tokenizer-3.0.4.tgz",
-      "integrity": "sha512-Vd/9EVDiu6PPJt9yAh6roZP6El1xHrdvIVGjyBsHR0RYwNHgL7FJPyIIW4fANJNG6FtyZfvlRPpFI4ZM/lubvw==",
       "dev": true,
       "funding": [
         {
@@ -2783,8 +2465,6 @@
     },
     "node_modules/@eslint-community/eslint-utils": {
       "version": "4.7.0",
-      "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.7.0.tgz",
-      "integrity": "sha512-dyybb3AcajC7uha6CvhdVRJqaKyn7w2YKqKyAN37NKYgZT36w+iRb0Dymmc5qEJ549c/S31cMMSFd75bteCpCw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2802,8 +2482,6 @@
     },
     "node_modules/@eslint-community/regexpp": {
       "version": "4.12.1",
-      "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.1.tgz",
-      "integrity": "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -2812,8 +2490,6 @@
     },
     "node_modules/@eslint/eslintrc": {
       "version": "2.1.4",
-      "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.4.tgz",
-      "integrity": "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2836,8 +2512,6 @@
     },
     "node_modules/@eslint/eslintrc/node_modules/ajv": {
       "version": "6.12.6",
-      "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz",
-      "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2853,8 +2527,6 @@
     },
     "node_modules/@eslint/eslintrc/node_modules/brace-expansion": {
       "version": "1.1.12",
-      "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
-      "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2864,15 +2536,11 @@
     },
     "node_modules/@eslint/eslintrc/node_modules/json-schema-traverse": {
       "version": "0.4.1",
-      "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
-      "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/@eslint/eslintrc/node_modules/minimatch": {
       "version": "3.1.2",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
-      "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -2884,8 +2552,6 @@
     },
     "node_modules/@eslint/js": {
       "version": "8.57.1",
-      "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.1.tgz",
-      "integrity": "sha512-d9zaMRSTIKDLhctzH12MtXvJKSSUhaHcjV+2Z+GK+EEY7XKpP5yR4x+N3TAcHTcu963nIr+TMcCb4DBCYX1z6Q==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -2894,8 +2560,6 @@
     },
     "node_modules/@fastify/busboy": {
       "version": "2.1.1",
-      "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz",
-      "integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -2904,8 +2568,6 @@
     },
     "node_modules/@google-automations/git-file-utils": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/@google-automations/git-file-utils/-/git-file-utils-2.0.0.tgz",
-      "integrity": "sha512-F6h8npq7rt60fr3W+cil/zXbIiF9Hj8JzaN3LNh7uBIJpsWnjL9ObV84qW/345boMheDdo/n+cItmvCfsn0lLA==",
       "dev": true,
       "license": "Apache-2.0",
       "dependencies": {
@@ -2919,8 +2581,6 @@
     },
     "node_modules/@google-automations/git-file-utils/node_modules/minimatch": {
       "version": "5.1.6",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz",
-      "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -2932,9 +2592,6 @@
     },
     "node_modules/@humanwhocodes/config-array": {
       "version": "0.13.0",
-      "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.13.0.tgz",
-      "integrity": "sha512-DZLEEqFWQFiyK6h5YIeynKx7JlvCYWL0cImfSRXZ9l4Sg2efkFGTuFf6vzXjK1cq6IYkU+Eg/JizXw+TD2vRNw==",
-      "deprecated": "Use @eslint/config-array instead",
       "dev": true,
       "license": "Apache-2.0",
       "dependencies": {
@@ -2948,8 +2605,6 @@
     },
     "node_modules/@humanwhocodes/config-array/node_modules/brace-expansion": {
       "version": "1.1.12",
-      "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
-      "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2959,8 +2614,6 @@
     },
     "node_modules/@humanwhocodes/config-array/node_modules/minimatch": {
       "version": "3.1.2",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
-      "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -2972,8 +2625,6 @@
     },
     "node_modules/@humanwhocodes/module-importer": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz",
-      "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==",
       "dev": true,
       "license": "Apache-2.0",
       "engines": {
@@ -2986,23 +2637,16 @@
     },
     "node_modules/@humanwhocodes/object-schema": {
       "version": "2.0.3",
-      "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.3.tgz",
-      "integrity": "sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==",
-      "deprecated": "Use @eslint/object-schema instead",
       "dev": true,
       "license": "BSD-3-Clause"
     },
     "node_modules/@iarna/toml": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/@iarna/toml/-/toml-3.0.0.tgz",
-      "integrity": "sha512-td6ZUkz2oS3VeleBcN+m//Q6HlCFCPrnI0FZhrt/h4XqLEdOyYp2u21nd8MdsR+WJy5r9PTDaHTDDfhf4H4l6Q==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/@isaacs/balanced-match": {
       "version": "4.0.1",
-      "resolved": "https://registry.npmjs.org/@isaacs/balanced-match/-/balanced-match-4.0.1.tgz",
-      "integrity": "sha512-yzMTt9lEb8Gv7zRioUilSglI0c0smZ9k5D65677DLWLtWJaXIS3CqcGyUFByYKlnUj6TkjLVs54fBl6+TiGQDQ==",
       "inBundle": true,
       "license": "MIT",
       "engines": {
@@ -3011,8 +2655,6 @@
     },
     "node_modules/@isaacs/brace-expansion": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/@isaacs/brace-expansion/-/brace-expansion-5.0.0.tgz",
-      "integrity": "sha512-ZT55BDLV0yv0RBm2czMiZ+SqCGO7AvmOM3G/w2xhVPH+te0aKgFjmBvGlL1dH+ql2tgGO3MVrbb3jCKyvpgnxA==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -3024,8 +2666,6 @@
     },
     "node_modules/@isaacs/cliui": {
       "version": "8.0.2",
-      "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz",
-      "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -3042,8 +2682,6 @@
     },
     "node_modules/@isaacs/cliui/node_modules/ansi-regex": {
       "version": "6.1.0",
-      "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz",
-      "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==",
       "inBundle": true,
       "license": "MIT",
       "engines": {
@@ -3055,15 +2693,11 @@
     },
     "node_modules/@isaacs/cliui/node_modules/emoji-regex": {
       "version": "9.2.2",
-      "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz",
-      "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==",
       "inBundle": true,
       "license": "MIT"
     },
     "node_modules/@isaacs/cliui/node_modules/string-width": {
       "version": "5.1.2",
-      "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz",
-      "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -3080,8 +2714,6 @@
     },
     "node_modules/@isaacs/cliui/node_modules/strip-ansi": {
       "version": "7.1.0",
-      "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz",
-      "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -3096,8 +2728,6 @@
     },
     "node_modules/@isaacs/fs-minipass": {
       "version": "4.0.1",
-      "resolved": "https://registry.npmjs.org/@isaacs/fs-minipass/-/fs-minipass-4.0.1.tgz",
-      "integrity": "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -3109,15 +2739,11 @@
     },
     "node_modules/@isaacs/string-locale-compare": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/@isaacs/string-locale-compare/-/string-locale-compare-1.1.0.tgz",
-      "integrity": "sha512-SQ7Kzhh9+D+ZW9MA0zkYv3VXhIDNx+LzM6EJ+/65I3QY+enU6Itte7E5XX7EWrqLW2FN4n06GWzBnPoC3th2aQ==",
       "inBundle": true,
       "license": "ISC"
     },
     "node_modules/@istanbuljs/load-nyc-config": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz",
-      "integrity": "sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3133,8 +2759,6 @@
     },
     "node_modules/@istanbuljs/load-nyc-config/node_modules/argparse": {
       "version": "1.0.10",
-      "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz",
-      "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3143,8 +2767,6 @@
     },
     "node_modules/@istanbuljs/load-nyc-config/node_modules/esprima": {
       "version": "4.0.1",
-      "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz",
-      "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==",
       "dev": true,
       "license": "BSD-2-Clause",
       "bin": {
@@ -3157,8 +2779,6 @@
     },
     "node_modules/@istanbuljs/load-nyc-config/node_modules/find-up": {
       "version": "4.1.0",
-      "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz",
-      "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3171,8 +2791,6 @@
     },
     "node_modules/@istanbuljs/load-nyc-config/node_modules/js-yaml": {
       "version": "3.14.1",
-      "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz",
-      "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3185,8 +2803,6 @@
     },
     "node_modules/@istanbuljs/load-nyc-config/node_modules/locate-path": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz",
-      "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3198,8 +2814,6 @@
     },
     "node_modules/@istanbuljs/load-nyc-config/node_modules/p-limit": {
       "version": "2.3.0",
-      "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz",
-      "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3214,8 +2828,6 @@
     },
     "node_modules/@istanbuljs/load-nyc-config/node_modules/p-locate": {
       "version": "4.1.0",
-      "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz",
-      "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3227,8 +2839,6 @@
     },
     "node_modules/@istanbuljs/load-nyc-config/node_modules/path-exists": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
-      "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -3237,15 +2847,11 @@
     },
     "node_modules/@istanbuljs/load-nyc-config/node_modules/sprintf-js": {
       "version": "1.0.3",
-      "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz",
-      "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==",
       "dev": true,
       "license": "BSD-3-Clause"
     },
     "node_modules/@istanbuljs/schema": {
       "version": "0.1.3",
-      "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz",
-      "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -3254,8 +2860,6 @@
     },
     "node_modules/@jridgewell/gen-mapping": {
       "version": "0.3.12",
-      "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.12.tgz",
-      "integrity": "sha512-OuLGC46TjB5BbN1dH8JULVVZY4WTdkF7tV9Ys6wLL1rubZnCMstOhNHueU5bLCrnRuDhKPDM4g6sw4Bel5Gzqg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3265,8 +2869,6 @@
     },
     "node_modules/@jridgewell/resolve-uri": {
       "version": "3.1.2",
-      "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz",
-      "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -3275,15 +2877,11 @@
     },
     "node_modules/@jridgewell/sourcemap-codec": {
       "version": "1.5.4",
-      "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.4.tgz",
-      "integrity": "sha512-VT2+G1VQs/9oz078bLrYbecdZKs912zQlkelYpuf+SXF+QvZDYJlbx/LSx+meSAwdDFnF8FVXW92AVjjkVmgFw==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/@jridgewell/trace-mapping": {
       "version": "0.3.29",
-      "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.29.tgz",
-      "integrity": "sha512-uw6guiW/gcAGPDhLmd77/6lW8QLeiV5RUTsAX46Db6oLhGaVj4lhnPwb184s1bkc8kdVg/+h988dro8GRDpmYQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3293,8 +2891,6 @@
     },
     "node_modules/@jsep-plugin/assignment": {
       "version": "1.3.0",
-      "resolved": "https://registry.npmjs.org/@jsep-plugin/assignment/-/assignment-1.3.0.tgz",
-      "integrity": "sha512-VVgV+CXrhbMI3aSusQyclHkenWSAm95WaiKrMxRFam3JSUiIaQjoMIw2sEs/OX4XifnqeQUN4DYbJjlA8EfktQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -3306,8 +2902,6 @@
     },
     "node_modules/@jsep-plugin/regex": {
       "version": "1.0.4",
-      "resolved": "https://registry.npmjs.org/@jsep-plugin/regex/-/regex-1.0.4.tgz",
-      "integrity": "sha512-q7qL4Mgjs1vByCaTnDFcBnV9HS7GVPJX5vyVoCgZHNSC9rjwIlmbXG5sUuorR5ndfHAIlJ8pVStxvjXHbNvtUg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -3319,8 +2913,6 @@
     },
     "node_modules/@nodelib/fs.scandir": {
       "version": "2.1.5",
-      "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz",
-      "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3333,8 +2925,6 @@
     },
     "node_modules/@nodelib/fs.stat": {
       "version": "2.0.5",
-      "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz",
-      "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -3343,8 +2933,6 @@
     },
     "node_modules/@nodelib/fs.walk": {
       "version": "1.2.8",
-      "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz",
-      "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3357,8 +2945,6 @@
     },
     "node_modules/@npmcli/agent": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/agent/-/agent-3.0.0.tgz",
-      "integrity": "sha512-S79NdEgDQd/NGCay6TCoVzXSj74skRZIKJcpJjC5lOq34SZzyI6MqtiiWoiVWoVrTcGjNeC4ipbh1VIHlpfF5Q==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -3386,8 +2972,6 @@
     },
     "node_modules/@npmcli/eslint-config": {
       "version": "5.1.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/eslint-config/-/eslint-config-5.1.0.tgz",
-      "integrity": "sha512-L4FAYndvARxkbTBNbsbDDkArIf8A8WmTFGVKdevJ3jd9nPzDKWiuC9TW0QtEnRsFHr5IX7G6qkRLK+drLIGoEA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3408,8 +2992,6 @@
     },
     "node_modules/@npmcli/fs": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-4.0.0.tgz",
-      "integrity": "sha512-/xGlezI6xfGO9NwuJlnwz/K14qD1kCSAGtacBHnGzeAIuJGazcp45KP5NuyARXoKb7cwulAGWVsbeSxdG/cb0Q==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -3420,70 +3002,33 @@
       }
     },
     "node_modules/@npmcli/git": {
-      "version": "6.0.3",
-      "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-6.0.3.tgz",
-      "integrity": "sha512-GUYESQlxZRAdhs3UhbB6pVRNUELQOHXwK9ruDkwmCv2aZ5y0SApQzUJCg02p3A7Ue2J5hxvlk1YI53c00NmRyQ==",
+      "version": "7.0.0",
+      "inBundle": true,
       "license": "ISC",
       "dependencies": {
         "@npmcli/promise-spawn": "^8.0.0",
         "ini": "^5.0.0",
-        "lru-cache": "^10.0.1",
-        "npm-pick-manifest": "^10.0.0",
+        "lru-cache": "^11.2.1",
+        "npm-pick-manifest": "^11.0.1",
         "proc-log": "^5.0.0",
         "promise-retry": "^2.0.1",
         "semver": "^7.3.5",
         "which": "^5.0.0"
       },
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
-    "node_modules/@npmcli/git/node_modules/hosted-git-info": {
-      "version": "8.1.0",
-      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-8.1.0.tgz",
-      "integrity": "sha512-Rw/B2DNQaPBICNXEm8balFz9a6WpZrkCGpcWFpy7nCj+NyhSdqXipmfvtmWt9xGfp0wZnBxB+iVpLmQMYt47Tw==",
-      "license": "ISC",
-      "dependencies": {
-        "lru-cache": "^10.0.1"
-      },
-      "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
-    "node_modules/@npmcli/git/node_modules/npm-package-arg": {
-      "version": "12.0.2",
-      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-12.0.2.tgz",
-      "integrity": "sha512-f1NpFjNI9O4VbKMOlA5QoBq/vSQPORHcTZ2feJpFkTHJ9eQkdlmZEKSjcAhxTGInC7RlEyScT9ui67NaOsjFWA==",
-      "license": "ISC",
-      "dependencies": {
-        "hosted-git-info": "^8.0.0",
-        "proc-log": "^5.0.0",
-        "semver": "^7.3.5",
-        "validate-npm-package-name": "^6.0.0"
-      },
-      "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/@npmcli/git/node_modules/npm-pick-manifest": {
-      "version": "10.0.0",
-      "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-10.0.0.tgz",
-      "integrity": "sha512-r4fFa4FqYY8xaM7fHecQ9Z2nE9hgNfJR+EmoKv0+chvzWkBcORX3r0FpTByP+CbOVJDladMXnPQGVN8PBLGuTQ==",
+    "node_modules/@npmcli/git/node_modules/lru-cache": {
+      "version": "11.2.1",
+      "inBundle": true,
       "license": "ISC",
-      "dependencies": {
-        "npm-install-checks": "^7.1.0",
-        "npm-normalize-package-bin": "^4.0.0",
-        "npm-package-arg": "^12.0.0",
-        "semver": "^7.3.5"
-      },
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "20 || >=22"
       }
     },
     "node_modules/@npmcli/installed-package-contents": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/installed-package-contents/-/installed-package-contents-3.0.0.tgz",
-      "integrity": "sha512-fkxoPuFGvxyrH+OQzyTkX2LUEamrF4jZSmxjAtPPHHGO0dqsQ8tTKjnIS8SAnPHdk2I03BDtSMR5K/4loKg79Q==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -3499,8 +3044,6 @@
     },
     "node_modules/@npmcli/map-workspaces": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/map-workspaces/-/map-workspaces-5.0.0.tgz",
-      "integrity": "sha512-+YJN6+BIQEC5QL4EqffJ2I1S9ySspwn7GP7uQINtZhf3uy7P0KnnIg+Ab5WeSUTZYpg+jn3GSfMme2FutB7qEQ==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -3515,8 +3058,6 @@
     },
     "node_modules/@npmcli/map-workspaces/node_modules/glob": {
       "version": "11.0.3",
-      "resolved": "https://registry.npmjs.org/glob/-/glob-11.0.3.tgz",
-      "integrity": "sha512-2Nim7dha1KVkaiF4q6Dj+ngPPMdfvLJEOpZk/jKiUAkqKebpGAWQXAq9z1xu9HKu5lWfqw/FASuccEjyznjPaA==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -3539,8 +3080,6 @@
     },
     "node_modules/@npmcli/map-workspaces/node_modules/jackspeak": {
       "version": "4.1.1",
-      "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-4.1.1.tgz",
-      "integrity": "sha512-zptv57P3GpL+O0I7VdMJNBZCu+BPHVQUk55Ft8/QCJjTVxrnJHuVuX/0Bl2A6/+2oyR/ZMEuFKwmzqqZ/U5nPQ==",
       "inBundle": true,
       "license": "BlueOak-1.0.0",
       "dependencies": {
@@ -3555,8 +3094,6 @@
     },
     "node_modules/@npmcli/map-workspaces/node_modules/lru-cache": {
       "version": "11.2.1",
-      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.1.tgz",
-      "integrity": "sha512-r8LA6i4LP4EeWOhqBaZZjDWwehd1xUJPCJd9Sv300H0ZmcUER4+JPh7bqqZeqs1o5pgtgvXm+d9UGrB5zZGDiQ==",
       "inBundle": true,
       "license": "ISC",
       "engines": {
@@ -3565,8 +3102,6 @@
     },
     "node_modules/@npmcli/map-workspaces/node_modules/minimatch": {
       "version": "10.0.3",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.0.3.tgz",
-      "integrity": "sha512-IPZ167aShDZZUMdRk66cyQAW3qr0WzbHkPdMYa8bzZhlHhO3jALbKdxcaak7W9FfT2rZNpQuUu4Od7ILEpXSaw==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -3581,8 +3116,6 @@
     },
     "node_modules/@npmcli/map-workspaces/node_modules/path-scurry": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-2.0.0.tgz",
-      "integrity": "sha512-ypGJsmGtdXUOeM5u93TyeIEfEhM6s+ljAhrk5vAvSx8uyY/02OvrZnA0YNGUrPXfpJMgI1ODd3nwz8Npx4O4cg==",
       "inBundle": true,
       "license": "BlueOak-1.0.0",
       "dependencies": {
@@ -3598,8 +3131,6 @@
     },
     "node_modules/@npmcli/metavuln-calculator": {
       "version": "9.0.2",
-      "resolved": "https://registry.npmjs.org/@npmcli/metavuln-calculator/-/metavuln-calculator-9.0.2.tgz",
-      "integrity": "sha512-eESzlCRLuD30qYefT2jYZTUepgu9DNJQdXABGGxjkir055x2UtnpNfDZCA6OJxButQNgxNKc9AeTchYxSgbMCw==",
       "license": "ISC",
       "dependencies": {
         "cacache": "^20.0.0",
@@ -3622,8 +3153,6 @@
     },
     "node_modules/@npmcli/name-from-folder": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/name-from-folder/-/name-from-folder-3.0.0.tgz",
-      "integrity": "sha512-61cDL8LUc9y80fXn+lir+iVt8IS0xHqEKwPu/5jCjxQTVoSCmkXvw4vbMrzAMtmghz3/AkiBjhHkDKUH+kf7kA==",
       "inBundle": true,
       "license": "ISC",
       "engines": {
@@ -3632,8 +3161,6 @@
     },
     "node_modules/@npmcli/node-gyp": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/node-gyp/-/node-gyp-4.0.0.tgz",
-      "integrity": "sha512-+t5DZ6mO/QFh78PByMq1fGSAub/agLJZDRfJRMeOSNCt8s9YVlTjmGpIPwPhvXTGUIJk+WszlT0rQa1W33yzNA==",
       "inBundle": true,
       "license": "ISC",
       "engines": {
@@ -3642,8 +3169,6 @@
     },
     "node_modules/@npmcli/package-json": {
       "version": "7.0.1",
-      "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-7.0.1.tgz",
-      "integrity": "sha512-956YUeI0YITbk2+KnirCkD19HLzES0habV+Els+dyZaVsaM6VGSiNwnRu6t3CZaqDLz4KXy2zx+0N/Zy6YjlAA==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -3659,30 +3184,8 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/@npmcli/package-json/node_modules/@npmcli/git": {
-      "version": "7.0.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-7.0.0.tgz",
-      "integrity": "sha512-vnz7BVGtOctJAIHouCJdvWBhsTVSICMeUgZo2c7XAi5d5Rrl80S1H7oPym7K03cRuinK5Q6s2dw36+PgXQTcMA==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "@npmcli/promise-spawn": "^8.0.0",
-        "ini": "^5.0.0",
-        "lru-cache": "^11.2.1",
-        "npm-pick-manifest": "^11.0.1",
-        "proc-log": "^5.0.0",
-        "promise-retry": "^2.0.1",
-        "semver": "^7.3.5",
-        "which": "^5.0.0"
-      },
-      "engines": {
-        "node": "^20.17.0 || >=22.9.0"
-      }
-    },
     "node_modules/@npmcli/package-json/node_modules/glob": {
       "version": "11.0.3",
-      "resolved": "https://registry.npmjs.org/glob/-/glob-11.0.3.tgz",
-      "integrity": "sha512-2Nim7dha1KVkaiF4q6Dj+ngPPMdfvLJEOpZk/jKiUAkqKebpGAWQXAq9z1xu9HKu5lWfqw/FASuccEjyznjPaA==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -3705,8 +3208,6 @@
     },
     "node_modules/@npmcli/package-json/node_modules/jackspeak": {
       "version": "4.1.1",
-      "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-4.1.1.tgz",
-      "integrity": "sha512-zptv57P3GpL+O0I7VdMJNBZCu+BPHVQUk55Ft8/QCJjTVxrnJHuVuX/0Bl2A6/+2oyR/ZMEuFKwmzqqZ/U5nPQ==",
       "inBundle": true,
       "license": "BlueOak-1.0.0",
       "dependencies": {
@@ -3721,8 +3222,6 @@
     },
     "node_modules/@npmcli/package-json/node_modules/lru-cache": {
       "version": "11.2.1",
-      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.1.tgz",
-      "integrity": "sha512-r8LA6i4LP4EeWOhqBaZZjDWwehd1xUJPCJd9Sv300H0ZmcUER4+JPh7bqqZeqs1o5pgtgvXm+d9UGrB5zZGDiQ==",
       "inBundle": true,
       "license": "ISC",
       "engines": {
@@ -3731,8 +3230,6 @@
     },
     "node_modules/@npmcli/package-json/node_modules/minimatch": {
       "version": "10.0.3",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.0.3.tgz",
-      "integrity": "sha512-IPZ167aShDZZUMdRk66cyQAW3qr0WzbHkPdMYa8bzZhlHhO3jALbKdxcaak7W9FfT2rZNpQuUu4Od7ILEpXSaw==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -3747,8 +3244,6 @@
     },
     "node_modules/@npmcli/package-json/node_modules/path-scurry": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-2.0.0.tgz",
-      "integrity": "sha512-ypGJsmGtdXUOeM5u93TyeIEfEhM6s+ljAhrk5vAvSx8uyY/02OvrZnA0YNGUrPXfpJMgI1ODd3nwz8Npx4O4cg==",
       "inBundle": true,
       "license": "BlueOak-1.0.0",
       "dependencies": {
@@ -3764,8 +3259,6 @@
     },
     "node_modules/@npmcli/promise-spawn": {
       "version": "8.0.2",
-      "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-8.0.2.tgz",
-      "integrity": "sha512-/bNJhjc+o6qL+Dwz/bqfTQClkEO5nTQ1ZEcdCkAQjhkZMHIh22LPG7fNh1enJP1NKWDqYiiABnjFCY7E0zHYtQ==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -3777,8 +3270,6 @@
     },
     "node_modules/@npmcli/query": {
       "version": "4.0.1",
-      "resolved": "https://registry.npmjs.org/@npmcli/query/-/query-4.0.1.tgz",
-      "integrity": "sha512-4OIPFb4weUUwkDXJf4Hh1inAn8neBGq3xsH4ZsAaN6FK3ldrFkH7jSpCc7N9xesi0Sp+EBXJ9eGMDrEww2Ztqw==",
       "license": "ISC",
       "dependencies": {
         "postcss-selector-parser": "^7.0.0"
@@ -3789,8 +3280,6 @@
     },
     "node_modules/@npmcli/redact": {
       "version": "3.2.2",
-      "resolved": "https://registry.npmjs.org/@npmcli/redact/-/redact-3.2.2.tgz",
-      "integrity": "sha512-7VmYAmk4csGv08QzrDKScdzn11jHPFGyqJW39FyPgPuAp3zIaUmuCo1yxw9aGs+NEJuTGQ9Gwqpt93vtJubucg==",
       "inBundle": true,
       "license": "ISC",
       "engines": {
@@ -3799,8 +3288,6 @@
     },
     "node_modules/@npmcli/run-script": {
       "version": "10.0.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-10.0.0.tgz",
-      "integrity": "sha512-vaQj4nccJbAslopIvd49pQH2NhUp7G9pY4byUtmwhe37ZZuubGrx0eB9hW2F37uVNRuDDK6byFGXF+7JCuMSZg==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -3821,8 +3308,6 @@
     },
     "node_modules/@npmcli/template-oss": {
       "version": "4.24.4",
-      "resolved": "https://registry.npmjs.org/@npmcli/template-oss/-/template-oss-4.24.4.tgz",
-      "integrity": "sha512-NF6SQC2wjBTft7RM9YaILf8dSum5cjQCDnsOlQYdarNQJSxKqaePKpOEYSsy6crjz3TfZ/jrAd0M4pLT/VGc/w==",
       "dev": true,
       "hasInstallScript": true,
       "license": "ISC",
@@ -3870,8 +3355,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/agent": {
       "version": "2.2.2",
-      "resolved": "https://registry.npmjs.org/@npmcli/agent/-/agent-2.2.2.tgz",
-      "integrity": "sha512-OrcNPXdpSl9UX7qPVRWbmWMCSXrcDa2M9DvrbOTj7ao1S4PlqVFYv9/yLKMkrJKZ/V5A/kDBC690or307i26Og==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3887,8 +3370,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist": {
       "version": "7.5.4",
-      "resolved": "https://registry.npmjs.org/@npmcli/arborist/-/arborist-7.5.4.tgz",
-      "integrity": "sha512-nWtIc6QwwoUORCRNzKx4ypHqCk3drI+5aeYdMTQQiRCcn4lOOgfQh7WyZobGYTxXPSq1VwV53lkpN/BRlRk08g==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3935,10 +3416,29 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist/node_modules/@npmcli/git": {
+      "version": "5.0.8",
+      "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-5.0.8.tgz",
+      "integrity": "sha512-liASfw5cqhjNW9UFd+ruwwdEf/lbOAQjLL2XY2dFW/bkJheXDYZgOyul/4gVvEV4BWkTXjYGmDqMw9uegdbJNQ==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "@npmcli/promise-spawn": "^7.0.0",
+        "ini": "^4.1.3",
+        "lru-cache": "^10.0.1",
+        "npm-pick-manifest": "^9.0.0",
+        "proc-log": "^4.0.0",
+        "promise-inflight": "^1.0.1",
+        "promise-retry": "^2.0.1",
+        "semver": "^7.3.5",
+        "which": "^4.0.0"
+      },
+      "engines": {
+        "node": "^16.14.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist/node_modules/@npmcli/map-workspaces": {
       "version": "3.0.6",
-      "resolved": "https://registry.npmjs.org/@npmcli/map-workspaces/-/map-workspaces-3.0.6.tgz",
-      "integrity": "sha512-tkYs0OYnzQm6iIRdfy+LcLBjcKuQCeE5YLb8KnrIlutJfheNaPvPpgoFEyEFgbjzl5PLZ3IA/BWAwRU0eHuQDA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3953,8 +3453,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist/node_modules/@npmcli/package-json": {
       "version": "5.2.1",
-      "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-5.2.1.tgz",
-      "integrity": "sha512-f7zYC6kQautXHvNbLEWgD/uGu1+xCn9izgqBfgItWSx22U0ZDekxN08A1vM8cTxj/cRVe0Q94Ode+tdoYmIOOQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3972,8 +3470,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist/node_modules/hosted-git-info": {
       "version": "7.0.2",
-      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.2.tgz",
-      "integrity": "sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3983,10 +3479,18 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist/node_modules/ini": {
+      "version": "4.1.3",
+      "resolved": "https://registry.npmjs.org/ini/-/ini-4.1.3.tgz",
+      "integrity": "sha512-X7rqawQBvfdjS10YU1y1YVreA3SsLrW9dX2CewP2EbBJM4ypVNLDkO5y04gejPwKIY9lR+7r9gn3rFPt/kmWFg==",
+      "dev": true,
+      "license": "ISC",
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist/node_modules/npm-package-arg": {
       "version": "11.0.3",
-      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-11.0.3.tgz",
-      "integrity": "sha512-sHGJy8sOC1YraBywpzQlIKBE4pBbGbiF95U6Auspzyem956E0+FtDtsx1ZxlOJkQCZ1AFXAY/yuvtFYrOxF+Bw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4001,8 +3505,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist/node_modules/validate-npm-package-name": {
       "version": "5.0.1",
-      "resolved": "https://registry.npmjs.org/validate-npm-package-name/-/validate-npm-package-name-5.0.1.tgz",
-      "integrity": "sha512-OljLrQ9SQdOUqTaQxqL5dEfZWrXExyyWsozYlAWFawPVNuD83igl7uJD2RTkNMbniIYgt8l81eCJGIdQF7avLQ==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -4011,8 +3513,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/fs": {
       "version": "3.1.1",
-      "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-3.1.1.tgz",
-      "integrity": "sha512-q9CRWjpHCMIh5sVyefoD1cA7PkvILqCZsnSOEUUivORLjxCO/Irmue2DprETiNgEqktDBZaM1Bi+jrarx1XdCg==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4023,30 +3523,105 @@
       }
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/git": {
-      "version": "5.0.8",
-      "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-5.0.8.tgz",
-      "integrity": "sha512-liASfw5cqhjNW9UFd+ruwwdEf/lbOAQjLL2XY2dFW/bkJheXDYZgOyul/4gVvEV4BWkTXjYGmDqMw9uegdbJNQ==",
+      "version": "6.0.3",
+      "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-6.0.3.tgz",
+      "integrity": "sha512-GUYESQlxZRAdhs3UhbB6pVRNUELQOHXwK9ruDkwmCv2aZ5y0SApQzUJCg02p3A7Ue2J5hxvlk1YI53c00NmRyQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
-        "@npmcli/promise-spawn": "^7.0.0",
-        "ini": "^4.1.3",
+        "@npmcli/promise-spawn": "^8.0.0",
+        "ini": "^5.0.0",
         "lru-cache": "^10.0.1",
-        "npm-pick-manifest": "^9.0.0",
-        "proc-log": "^4.0.0",
-        "promise-inflight": "^1.0.1",
+        "npm-pick-manifest": "^10.0.0",
+        "proc-log": "^5.0.0",
         "promise-retry": "^2.0.1",
         "semver": "^7.3.5",
-        "which": "^4.0.0"
+        "which": "^5.0.0"
       },
       "engines": {
-        "node": "^16.14.0 || >=18.0.0"
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/git/node_modules/@npmcli/promise-spawn": {
+      "version": "8.0.3",
+      "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-8.0.3.tgz",
+      "integrity": "sha512-Yb00SWaL4F8w+K8YGhQ55+xE4RUNdMHV43WZGsiTM92gS+lC0mGsn7I4hLug7pbao035S6bj3Y3w0cUNGLfmkg==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "which": "^5.0.0"
+      },
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/git/node_modules/npm-install-checks": {
+      "version": "7.1.2",
+      "resolved": "https://registry.npmjs.org/npm-install-checks/-/npm-install-checks-7.1.2.tgz",
+      "integrity": "sha512-z9HJBCYw9Zr8BqXcllKIs5nI+QggAImbBdHphOzVYrz2CB4iQ6FzWyKmlqDZua+51nAu7FcemlbTc9VgQN5XDQ==",
+      "dev": true,
+      "license": "BSD-2-Clause",
+      "dependencies": {
+        "semver": "^7.1.1"
+      },
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/git/node_modules/npm-normalize-package-bin": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-4.0.0.tgz",
+      "integrity": "sha512-TZKxPvItzai9kN9H/TkmCtx/ZN/hvr3vUycjlfmH0ootY9yFBzNOpiXAdIn1Iteqsvk4lQn6B5PTrt+n6h8k/w==",
+      "dev": true,
+      "license": "ISC",
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/git/node_modules/npm-pick-manifest": {
+      "version": "10.0.0",
+      "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-10.0.0.tgz",
+      "integrity": "sha512-r4fFa4FqYY8xaM7fHecQ9Z2nE9hgNfJR+EmoKv0+chvzWkBcORX3r0FpTByP+CbOVJDladMXnPQGVN8PBLGuTQ==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "npm-install-checks": "^7.1.0",
+        "npm-normalize-package-bin": "^4.0.0",
+        "npm-package-arg": "^12.0.0",
+        "semver": "^7.3.5"
+      },
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/git/node_modules/proc-log": {
+      "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-5.0.0.tgz",
+      "integrity": "sha512-Azwzvl90HaF0aCz1JrDdXQykFakSSNPaPoiZ9fm5qJIMHioDZEi7OAdRwSm6rSoPtY3Qutnm3L7ogmg3dc+wbQ==",
+      "dev": true,
+      "license": "ISC",
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/git/node_modules/which": {
+      "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/which/-/which-5.0.0.tgz",
+      "integrity": "sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "isexe": "^3.1.1"
+      },
+      "bin": {
+        "node-which": "bin/which.js"
+      },
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
       }
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/installed-package-contents": {
       "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/installed-package-contents/-/installed-package-contents-2.1.0.tgz",
-      "integrity": "sha512-c8UuGLeZpm69BryRykLuKRyKFZYJsZSCT4aVY5ds4omyZqJ172ApzgfKJ5eV/r3HgLdUYgFVe54KSFVjKoe27w==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4062,8 +3637,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/map-workspaces": {
       "version": "4.0.2",
-      "resolved": "https://registry.npmjs.org/@npmcli/map-workspaces/-/map-workspaces-4.0.2.tgz",
-      "integrity": "sha512-mnuMuibEbkaBTYj9HQ3dMe6L0ylYW+s/gfz7tBDMFY/la0w9Kf44P9aLn4/+/t3aTR3YUHKoT6XQL9rlicIe3Q==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4078,8 +3651,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/name-from-folder": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/name-from-folder/-/name-from-folder-3.0.0.tgz",
-      "integrity": "sha512-61cDL8LUc9y80fXn+lir+iVt8IS0xHqEKwPu/5jCjxQTVoSCmkXvw4vbMrzAMtmghz3/AkiBjhHkDKUH+kf7kA==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -4088,8 +3659,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/metavuln-calculator": {
       "version": "7.1.1",
-      "resolved": "https://registry.npmjs.org/@npmcli/metavuln-calculator/-/metavuln-calculator-7.1.1.tgz",
-      "integrity": "sha512-Nkxf96V0lAx3HCpVda7Vw4P23RILgdi/5K1fmj2tZkWIYLpXAN8k2UVVOsW16TsS5F8Ws2I7Cm+PU1/rsVF47g==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4105,8 +3674,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/name-from-folder": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/name-from-folder/-/name-from-folder-2.0.0.tgz",
-      "integrity": "sha512-pwK+BfEBZJbKdNYpHHRTNBwBoqrN/iIMO0AiGvYsp3Hoaq0WbgGSWQR6SCldZovoDpY3yje5lkFUe6gsDgJ2vg==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -4115,8 +3682,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/node-gyp": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/node-gyp/-/node-gyp-3.0.0.tgz",
-      "integrity": "sha512-gp8pRXC2oOxu0DUE1/M3bYtb1b3/DbJ5aM113+XJBgfXdussRAsX0YOrOhdd8WvnAR6auDBvJomGAkLKA5ydxA==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -4125,8 +3690,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/package-json": {
       "version": "6.2.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-6.2.0.tgz",
-      "integrity": "sha512-rCNLSB/JzNvot0SEyXqWZ7tX2B5dD2a1br2Dp0vSYVo5jh8Z0EZ7lS9TsZ1UtziddB1UfNUaMCc538/HztnJGA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4142,128 +3705,24 @@
         "node": "^18.17.0 || >=20.5.0"
       }
     },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/package-json/node_modules/@npmcli/git": {
-      "version": "6.0.3",
-      "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-6.0.3.tgz",
-      "integrity": "sha512-GUYESQlxZRAdhs3UhbB6pVRNUELQOHXwK9ruDkwmCv2aZ5y0SApQzUJCg02p3A7Ue2J5hxvlk1YI53c00NmRyQ==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "@npmcli/promise-spawn": "^8.0.0",
-        "ini": "^5.0.0",
-        "lru-cache": "^10.0.1",
-        "npm-pick-manifest": "^10.0.0",
-        "proc-log": "^5.0.0",
-        "promise-retry": "^2.0.1",
-        "semver": "^7.3.5",
-        "which": "^5.0.0"
-      },
-      "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/package-json/node_modules/@npmcli/promise-spawn": {
-      "version": "8.0.3",
-      "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-8.0.3.tgz",
-      "integrity": "sha512-Yb00SWaL4F8w+K8YGhQ55+xE4RUNdMHV43WZGsiTM92gS+lC0mGsn7I4hLug7pbao035S6bj3Y3w0cUNGLfmkg==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "which": "^5.0.0"
-      },
-      "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/package-json/node_modules/ini": {
-      "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/ini/-/ini-5.0.0.tgz",
-      "integrity": "sha512-+N0ngpO3e7cRUWOJAS7qw0IZIVc6XPrW4MlFBdD066F2L4k1L6ker3hLqSq7iXxU5tgS4WGkIUElWn5vogAEnw==",
-      "dev": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/package-json/node_modules/json-parse-even-better-errors": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-4.0.0.tgz",
-      "integrity": "sha512-lR4MXjGNgkJc7tkQ97kb2nuEMnNCyU//XYVH0MKTGcXEiSudQ5MKGKen3C5QubYy0vmq+JGitUg92uuywGEwIA==",
       "dev": true,
       "license": "MIT",
       "engines": {
         "node": "^18.17.0 || >=20.5.0"
       }
     },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/package-json/node_modules/npm-install-checks": {
-      "version": "7.1.2",
-      "resolved": "https://registry.npmjs.org/npm-install-checks/-/npm-install-checks-7.1.2.tgz",
-      "integrity": "sha512-z9HJBCYw9Zr8BqXcllKIs5nI+QggAImbBdHphOzVYrz2CB4iQ6FzWyKmlqDZua+51nAu7FcemlbTc9VgQN5XDQ==",
-      "dev": true,
-      "license": "BSD-2-Clause",
-      "dependencies": {
-        "semver": "^7.1.1"
-      },
-      "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/package-json/node_modules/npm-normalize-package-bin": {
-      "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-4.0.0.tgz",
-      "integrity": "sha512-TZKxPvItzai9kN9H/TkmCtx/ZN/hvr3vUycjlfmH0ootY9yFBzNOpiXAdIn1Iteqsvk4lQn6B5PTrt+n6h8k/w==",
-      "dev": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/package-json/node_modules/npm-pick-manifest": {
-      "version": "10.0.0",
-      "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-10.0.0.tgz",
-      "integrity": "sha512-r4fFa4FqYY8xaM7fHecQ9Z2nE9hgNfJR+EmoKv0+chvzWkBcORX3r0FpTByP+CbOVJDladMXnPQGVN8PBLGuTQ==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "npm-install-checks": "^7.1.0",
-        "npm-normalize-package-bin": "^4.0.0",
-        "npm-package-arg": "^12.0.0",
-        "semver": "^7.3.5"
-      },
-      "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/package-json/node_modules/proc-log": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-5.0.0.tgz",
-      "integrity": "sha512-Azwzvl90HaF0aCz1JrDdXQykFakSSNPaPoiZ9fm5qJIMHioDZEi7OAdRwSm6rSoPtY3Qutnm3L7ogmg3dc+wbQ==",
       "dev": true,
       "license": "ISC",
       "engines": {
         "node": "^18.17.0 || >=20.5.0"
       }
     },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/package-json/node_modules/which": {
-      "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/which/-/which-5.0.0.tgz",
-      "integrity": "sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "isexe": "^3.1.1"
-      },
-      "bin": {
-        "node-which": "bin/which.js"
-      },
-      "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/promise-spawn": {
       "version": "7.0.2",
-      "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-7.0.2.tgz",
-      "integrity": "sha512-xhfYPXoV5Dy4UkY0D+v2KkwvnDfiA/8Mt3sWCGI/hM03NsYIH8ZaG6QzS9x7pje5vHZBZJ2v6VRFVTWACnqcmQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4275,8 +3734,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/query": {
       "version": "3.1.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/query/-/query-3.1.0.tgz",
-      "integrity": "sha512-C/iR0tk7KSKGldibYIB9x8GtO/0Bd0I2mhOaDb8ucQL/bQVTmGoeREaFj64Z5+iCBRf3dQfed0CjJL7I8iTkiQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4288,8 +3745,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/redact": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/@npmcli/redact/-/redact-2.0.1.tgz",
-      "integrity": "sha512-YgsR5jCQZhVmTJvjduTOIHph0L73pK8xwMVaDY0PatySqVM9AZj93jpoXYSJqfHFxFkN9dmqTw6OiqExsS3LPw==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -4298,8 +3753,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/run-script": {
       "version": "8.1.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-8.1.0.tgz",
-      "integrity": "sha512-y7efHHwghQfk28G2z3tlZ67pLG0XdfYbcVG26r7YIXALRsrVQcTq4/tdenSmdOrEsNahIYA/eh8aEVROWGFUDg==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4314,10 +3767,29 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/run-script/node_modules/@npmcli/git": {
+      "version": "5.0.8",
+      "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-5.0.8.tgz",
+      "integrity": "sha512-liASfw5cqhjNW9UFd+ruwwdEf/lbOAQjLL2XY2dFW/bkJheXDYZgOyul/4gVvEV4BWkTXjYGmDqMw9uegdbJNQ==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "@npmcli/promise-spawn": "^7.0.0",
+        "ini": "^4.1.3",
+        "lru-cache": "^10.0.1",
+        "npm-pick-manifest": "^9.0.0",
+        "proc-log": "^4.0.0",
+        "promise-inflight": "^1.0.1",
+        "promise-retry": "^2.0.1",
+        "semver": "^7.3.5",
+        "which": "^4.0.0"
+      },
+      "engines": {
+        "node": "^16.14.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json": {
       "version": "5.2.1",
-      "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-5.2.1.tgz",
-      "integrity": "sha512-f7zYC6kQautXHvNbLEWgD/uGu1+xCn9izgqBfgItWSx22U0ZDekxN08A1vM8cTxj/cRVe0Q94Ode+tdoYmIOOQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4335,8 +3807,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/run-script/node_modules/hosted-git-info": {
       "version": "7.0.2",
-      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.2.tgz",
-      "integrity": "sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4346,10 +3816,18 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/run-script/node_modules/ini": {
+      "version": "4.1.3",
+      "resolved": "https://registry.npmjs.org/ini/-/ini-4.1.3.tgz",
+      "integrity": "sha512-X7rqawQBvfdjS10YU1y1YVreA3SsLrW9dX2CewP2EbBJM4ypVNLDkO5y04gejPwKIY9lR+7r9gn3rFPt/kmWFg==",
+      "dev": true,
+      "license": "ISC",
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/@sigstore/bundle": {
       "version": "2.3.2",
-      "resolved": "https://registry.npmjs.org/@sigstore/bundle/-/bundle-2.3.2.tgz",
-      "integrity": "sha512-wueKWDk70QixNLB363yHc2D2ItTgYiMTdPwK8D9dKQMR3ZQ0c35IxP5xnwQ8cNLoCgCRcHf14kE+CLIvNX1zmA==",
       "dev": true,
       "license": "Apache-2.0",
       "dependencies": {
@@ -4361,8 +3839,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@sigstore/core": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/@sigstore/core/-/core-1.1.0.tgz",
-      "integrity": "sha512-JzBqdVIyqm2FRQCulY6nbQzMpJJpSiJ8XXWMhtOX9eKgaXXpfNOF53lzQEjIydlStnd/eFtuC1dW4VYdD93oRg==",
       "dev": true,
       "license": "Apache-2.0",
       "engines": {
@@ -4371,8 +3847,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@sigstore/protobuf-specs": {
       "version": "0.3.3",
-      "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.3.3.tgz",
-      "integrity": "sha512-RpacQhBlwpBWd7KEJsRKcBQalbV28fvkxwTOJIqhIuDysMMaJW47V4OqW30iJB9uRpqOSxxEAQFdr8tTattReQ==",
       "dev": true,
       "license": "Apache-2.0",
       "engines": {
@@ -4381,8 +3855,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@sigstore/sign": {
       "version": "2.3.2",
-      "resolved": "https://registry.npmjs.org/@sigstore/sign/-/sign-2.3.2.tgz",
-      "integrity": "sha512-5Vz5dPVuunIIvC5vBb0APwo7qKA4G9yM48kPWJT+OEERs40md5GoUR1yedwpekWZ4m0Hhw44m6zU+ObsON+iDA==",
       "dev": true,
       "license": "Apache-2.0",
       "dependencies": {
@@ -4399,8 +3871,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@sigstore/tuf": {
       "version": "2.3.4",
-      "resolved": "https://registry.npmjs.org/@sigstore/tuf/-/tuf-2.3.4.tgz",
-      "integrity": "sha512-44vtsveTPUpqhm9NCrbU8CWLe3Vck2HO1PNLw7RIajbB7xhtn5RBPm1VNSCMwqGYHhDsBJG8gDF0q4lgydsJvw==",
       "dev": true,
       "license": "Apache-2.0",
       "dependencies": {
@@ -4413,8 +3883,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@sigstore/verify": {
       "version": "1.2.1",
-      "resolved": "https://registry.npmjs.org/@sigstore/verify/-/verify-1.2.1.tgz",
-      "integrity": "sha512-8iKx79/F73DKbGfRf7+t4dqrc0bRr0thdPrxAtCKWRm/F0tG71i6O1rvlnScncJLLBZHn3h8M3c1BSUAb9yu8g==",
       "dev": true,
       "license": "Apache-2.0",
       "dependencies": {
@@ -4428,8 +3896,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@tufjs/models": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/@tufjs/models/-/models-2.0.1.tgz",
-      "integrity": "sha512-92F7/SFyufn4DXsha9+QfKnN03JGqtMFMXgSHbZOo8JG59WkTni7UzAouNQDf7AuP9OAMxVOPQcqG3sB7w+kkg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -4442,8 +3908,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/abbrev": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-2.0.0.tgz",
-      "integrity": "sha512-6/mh1E2u2YgEsCHdY0Yx5oW+61gZU+1vXaoiHHrpKeuRNNgFvS+/jrwHiQhB5apAf5oB7UB7E19ol2R2LKH8hQ==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -4452,8 +3916,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/bin-links": {
       "version": "4.0.4",
-      "resolved": "https://registry.npmjs.org/bin-links/-/bin-links-4.0.4.tgz",
-      "integrity": "sha512-cMtq4W5ZsEwcutJrVId+a/tjt8GSbS+h0oNkdl6+6rBuEv8Ot33Bevj5KPm40t309zuhVic8NjpuL42QCiJWWA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4468,8 +3930,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/cacache": {
       "version": "18.0.4",
-      "resolved": "https://registry.npmjs.org/cacache/-/cacache-18.0.4.tgz",
-      "integrity": "sha512-B+L5iIa9mgcjLbliir2th36yEwPftrzteHYujzsx3dFP/31GCHcIeS8f5MGd80odLOjaOvSpU3EEAmRQptkxLQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4492,8 +3952,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/cmd-shim": {
       "version": "6.0.3",
-      "resolved": "https://registry.npmjs.org/cmd-shim/-/cmd-shim-6.0.3.tgz",
-      "integrity": "sha512-FMabTRlc5t5zjdenF6mS0MBeFZm0XqHqeOkcskKFb/LYCcRQ5fVgLOHVc4Lq9CqABd9zhjwPjMBCJvMCziSVtA==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -4502,8 +3960,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/hosted-git-info": {
       "version": "8.1.0",
-      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-8.1.0.tgz",
-      "integrity": "sha512-Rw/B2DNQaPBICNXEm8balFz9a6WpZrkCGpcWFpy7nCj+NyhSdqXipmfvtmWt9xGfp0wZnBxB+iVpLmQMYt47Tw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4515,8 +3971,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/ignore-walk": {
       "version": "6.0.5",
-      "resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-6.0.5.tgz",
-      "integrity": "sha512-VuuG0wCnjhnylG1ABXT3dAuIpTNDs/G8jlpmwXY03fXoXy/8ZK8/T+hMzt8L4WnrLCJgdybqgPagnF/f97cg3A==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4526,20 +3980,8 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
-    "node_modules/@npmcli/template-oss/node_modules/ini": {
-      "version": "4.1.3",
-      "resolved": "https://registry.npmjs.org/ini/-/ini-4.1.3.tgz",
-      "integrity": "sha512-X7rqawQBvfdjS10YU1y1YVreA3SsLrW9dX2CewP2EbBJM4ypVNLDkO5y04gejPwKIY9lR+7r9gn3rFPt/kmWFg==",
-      "dev": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
     "node_modules/@npmcli/template-oss/node_modules/isexe": {
       "version": "3.1.1",
-      "resolved": "https://registry.npmjs.org/isexe/-/isexe-3.1.1.tgz",
-      "integrity": "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -4548,8 +3990,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/json-parse-even-better-errors": {
       "version": "3.0.2",
-      "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-3.0.2.tgz",
-      "integrity": "sha512-fi0NG4bPjCHunUJffmLd0gxssIgkNmArMvis4iNah6Owg1MCJjWhEcDLmsK6iGkJq3tHwbDkTlce70/tmXN4cQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -4558,8 +3998,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/make-fetch-happen": {
       "version": "13.0.1",
-      "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-13.0.1.tgz",
-      "integrity": "sha512-cKTUFc/rbKUd/9meOvgrpJ2WrNzymt6jfRDdwg5UCnVzv9dTpEj9JS5m3wtziXVCjluIXyL8pcaukYqezIzZQA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4582,8 +4020,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/minipass-fetch": {
       "version": "3.0.5",
-      "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-3.0.5.tgz",
-      "integrity": "sha512-2N8elDQAtSnFV0Dk7gt15KHsS0Fyz6CbYZ360h0WTYV1Ty46li3rAXVOQj1THMNLdmrD9Vt5pBPtWtVkpwGBqg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -4600,8 +4036,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/node-gyp": {
       "version": "10.3.1",
-      "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-10.3.1.tgz",
-      "integrity": "sha512-Pp3nFHBThHzVtNY7U6JfPjvT/DTE8+o/4xKsLQtBoU+j2HLsGlhcfzflAoUreaJbNmYnX+LlLi0qjV8kpyO6xQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -4625,8 +4059,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/nopt": {
       "version": "7.2.1",
-      "resolved": "https://registry.npmjs.org/nopt/-/nopt-7.2.1.tgz",
-      "integrity": "sha512-taM24ViiimT/XntxbPyJQzCG+p4EKOpgD3mxFwW38mGjVUrfERQOeY4EDHjdnptttfHuHQXFx+lTP08Q+mLa/w==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4641,8 +4073,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/normalize-package-data": {
       "version": "6.0.2",
-      "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-6.0.2.tgz",
-      "integrity": "sha512-V6gygoYb/5EmNI+MEGrWkC+e6+Rr7mTmfHrxDbLzxQogBkgzo76rkok0Am6thgSF7Mv2nLOajAJj5vDJZEFn7g==",
       "dev": true,
       "license": "BSD-2-Clause",
       "dependencies": {
@@ -4656,8 +4086,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/normalize-package-data/node_modules/hosted-git-info": {
       "version": "7.0.2",
-      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.2.tgz",
-      "integrity": "sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4669,8 +4097,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/npm-bundled": {
       "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-3.0.1.tgz",
-      "integrity": "sha512-+AvaheE/ww1JEwRHOrn4WHNzOxGtVp+adrg2AeZS/7KuxGUYFuBta98wYpfHBbJp6Tg6j1NKSEVHNcfZzJHQwQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4682,8 +4108,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/npm-install-checks": {
       "version": "6.3.0",
-      "resolved": "https://registry.npmjs.org/npm-install-checks/-/npm-install-checks-6.3.0.tgz",
-      "integrity": "sha512-W29RiK/xtpCGqn6f3ixfRYGk+zRyr+Ew9F2E20BfXxT5/euLdA/Nm7fO7OeTGuAmTs30cpgInyJ0cYe708YTZw==",
       "dev": true,
       "license": "BSD-2-Clause",
       "dependencies": {
@@ -4695,8 +4119,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/npm-normalize-package-bin": {
       "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-3.0.1.tgz",
-      "integrity": "sha512-dMxCf+zZ+3zeQZXKxmyuCKlIDPGuv8EF940xbkC4kQVDTtqoh6rJFO+JTKSA6/Rwi0getWmtuy4Itup0AMcaDQ==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -4705,8 +4127,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/npm-package-arg": {
       "version": "12.0.2",
-      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-12.0.2.tgz",
-      "integrity": "sha512-f1NpFjNI9O4VbKMOlA5QoBq/vSQPORHcTZ2feJpFkTHJ9eQkdlmZEKSjcAhxTGInC7RlEyScT9ui67NaOsjFWA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4721,8 +4141,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/npm-package-arg/node_modules/proc-log": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-5.0.0.tgz",
-      "integrity": "sha512-Azwzvl90HaF0aCz1JrDdXQykFakSSNPaPoiZ9fm5qJIMHioDZEi7OAdRwSm6rSoPtY3Qutnm3L7ogmg3dc+wbQ==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -4731,8 +4149,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/npm-packlist": {
       "version": "8.0.2",
-      "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-8.0.2.tgz",
-      "integrity": "sha512-shYrPFIS/JLP4oQmAwDyk5HcyysKW8/JLTEA32S0Z5TzvpaeeX2yMFfoK1fjEBnCBvVyIB/Jj/GBFdm0wsgzbA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4744,8 +4160,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/npm-pick-manifest": {
       "version": "9.1.0",
-      "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-9.1.0.tgz",
-      "integrity": "sha512-nkc+3pIIhqHVQr085X9d2JzPzLyjzQS96zbruppqC9aZRm/x8xx6xhI98gHtsfELP2bE+loHq8ZaHFHhe+NauA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4760,8 +4174,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/npm-pick-manifest/node_modules/hosted-git-info": {
       "version": "7.0.2",
-      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.2.tgz",
-      "integrity": "sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4773,8 +4185,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/npm-pick-manifest/node_modules/npm-package-arg": {
       "version": "11.0.3",
-      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-11.0.3.tgz",
-      "integrity": "sha512-sHGJy8sOC1YraBywpzQlIKBE4pBbGbiF95U6Auspzyem956E0+FtDtsx1ZxlOJkQCZ1AFXAY/yuvtFYrOxF+Bw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4789,8 +4199,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/npm-pick-manifest/node_modules/validate-npm-package-name": {
       "version": "5.0.1",
-      "resolved": "https://registry.npmjs.org/validate-npm-package-name/-/validate-npm-package-name-5.0.1.tgz",
-      "integrity": "sha512-OljLrQ9SQdOUqTaQxqL5dEfZWrXExyyWsozYlAWFawPVNuD83igl7uJD2RTkNMbniIYgt8l81eCJGIdQF7avLQ==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -4799,8 +4207,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/npm-registry-fetch": {
       "version": "17.1.0",
-      "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-17.1.0.tgz",
-      "integrity": "sha512-5+bKQRH0J1xG1uZ1zMNvxW0VEyoNWgJpY9UDuluPFLKDfJ9u2JmmjmTJV1srBGQOROfdBMiVvnH2Zvpbm+xkVA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4819,8 +4225,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/npm-registry-fetch/node_modules/hosted-git-info": {
       "version": "7.0.2",
-      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.2.tgz",
-      "integrity": "sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4832,8 +4236,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/npm-registry-fetch/node_modules/npm-package-arg": {
       "version": "11.0.3",
-      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-11.0.3.tgz",
-      "integrity": "sha512-sHGJy8sOC1YraBywpzQlIKBE4pBbGbiF95U6Auspzyem956E0+FtDtsx1ZxlOJkQCZ1AFXAY/yuvtFYrOxF+Bw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4848,8 +4250,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/npm-registry-fetch/node_modules/validate-npm-package-name": {
       "version": "5.0.1",
-      "resolved": "https://registry.npmjs.org/validate-npm-package-name/-/validate-npm-package-name-5.0.1.tgz",
-      "integrity": "sha512-OljLrQ9SQdOUqTaQxqL5dEfZWrXExyyWsozYlAWFawPVNuD83igl7uJD2RTkNMbniIYgt8l81eCJGIdQF7avLQ==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -4858,8 +4258,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/p-map": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz",
-      "integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -4874,8 +4272,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/pacote": {
       "version": "18.0.6",
-      "resolved": "https://registry.npmjs.org/pacote/-/pacote-18.0.6.tgz",
-      "integrity": "sha512-+eK3G27SMwsB8kLIuj4h1FUhHtwiEUo21Tw8wNjmvdlpOEr613edv+8FUsTj/4F/VN5ywGE19X18N7CC2EJk6A==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4904,10 +4300,29 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/pacote/node_modules/@npmcli/git": {
+      "version": "5.0.8",
+      "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-5.0.8.tgz",
+      "integrity": "sha512-liASfw5cqhjNW9UFd+ruwwdEf/lbOAQjLL2XY2dFW/bkJheXDYZgOyul/4gVvEV4BWkTXjYGmDqMw9uegdbJNQ==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "@npmcli/promise-spawn": "^7.0.0",
+        "ini": "^4.1.3",
+        "lru-cache": "^10.0.1",
+        "npm-pick-manifest": "^9.0.0",
+        "proc-log": "^4.0.0",
+        "promise-inflight": "^1.0.1",
+        "promise-retry": "^2.0.1",
+        "semver": "^7.3.5",
+        "which": "^4.0.0"
+      },
+      "engines": {
+        "node": "^16.14.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/pacote/node_modules/@npmcli/package-json": {
       "version": "5.2.1",
-      "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-5.2.1.tgz",
-      "integrity": "sha512-f7zYC6kQautXHvNbLEWgD/uGu1+xCn9izgqBfgItWSx22U0ZDekxN08A1vM8cTxj/cRVe0Q94Ode+tdoYmIOOQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4925,8 +4340,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/pacote/node_modules/hosted-git-info": {
       "version": "7.0.2",
-      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.2.tgz",
-      "integrity": "sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4936,10 +4349,18 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/pacote/node_modules/ini": {
+      "version": "4.1.3",
+      "resolved": "https://registry.npmjs.org/ini/-/ini-4.1.3.tgz",
+      "integrity": "sha512-X7rqawQBvfdjS10YU1y1YVreA3SsLrW9dX2CewP2EbBJM4ypVNLDkO5y04gejPwKIY9lR+7r9gn3rFPt/kmWFg==",
+      "dev": true,
+      "license": "ISC",
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/pacote/node_modules/npm-package-arg": {
       "version": "11.0.3",
-      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-11.0.3.tgz",
-      "integrity": "sha512-sHGJy8sOC1YraBywpzQlIKBE4pBbGbiF95U6Auspzyem956E0+FtDtsx1ZxlOJkQCZ1AFXAY/yuvtFYrOxF+Bw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4954,8 +4375,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/pacote/node_modules/validate-npm-package-name": {
       "version": "5.0.1",
-      "resolved": "https://registry.npmjs.org/validate-npm-package-name/-/validate-npm-package-name-5.0.1.tgz",
-      "integrity": "sha512-OljLrQ9SQdOUqTaQxqL5dEfZWrXExyyWsozYlAWFawPVNuD83igl7uJD2RTkNMbniIYgt8l81eCJGIdQF7avLQ==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -4964,8 +4383,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/parse-conflict-json": {
       "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/parse-conflict-json/-/parse-conflict-json-3.0.1.tgz",
-      "integrity": "sha512-01TvEktc68vwbJOtWZluyWeVGWjP+bZwXtPDMQVbBKzbJ/vZBif0L69KH1+cHv1SZ6e0FKLvjyHe8mqsIqYOmw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4979,8 +4396,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/postcss-selector-parser": {
       "version": "6.1.2",
-      "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.1.2.tgz",
-      "integrity": "sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -4993,8 +4408,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/proc-log": {
       "version": "4.2.0",
-      "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-4.2.0.tgz",
-      "integrity": "sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -5003,8 +4416,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/proggy": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/proggy/-/proggy-2.0.0.tgz",
-      "integrity": "sha512-69agxLtnI8xBs9gUGqEnK26UfiexpHy+KUpBQWabiytQjnn5wFY8rklAi7GRfABIuPNnQ/ik48+LGLkYYJcy4A==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -5013,8 +4424,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/read-cmd-shim": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/read-cmd-shim/-/read-cmd-shim-4.0.0.tgz",
-      "integrity": "sha512-yILWifhaSEEytfXI76kB9xEEiG1AiozaCJZ83A87ytjRiN+jVibXjedjCRNjoZviinhG+4UkalO3mWTd8u5O0Q==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -5023,8 +4432,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/read-package-json-fast": {
       "version": "3.0.2",
-      "resolved": "https://registry.npmjs.org/read-package-json-fast/-/read-package-json-fast-3.0.2.tgz",
-      "integrity": "sha512-0J+Msgym3vrLOUB3hzQCuZHII0xkNGCtz/HJH9xZshwv9DbDwkw1KaE3gx/e2J5rpEY5rtOy6cyhKOPrkP7FZw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -5037,8 +4444,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/sigstore": {
       "version": "2.3.1",
-      "resolved": "https://registry.npmjs.org/sigstore/-/sigstore-2.3.1.tgz",
-      "integrity": "sha512-8G+/XDU8wNsJOQS5ysDVO0Etg9/2uA5gR9l4ZwijjlwxBcrU6RPfwi2+jJmbP+Ap1Hlp/nVAaEO4Fj22/SL2gQ==",
       "dev": true,
       "license": "Apache-2.0",
       "dependencies": {
@@ -5055,8 +4460,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/ssri": {
       "version": "10.0.6",
-      "resolved": "https://registry.npmjs.org/ssri/-/ssri-10.0.6.tgz",
-      "integrity": "sha512-MGrFH9Z4NP9Iyhqn16sDtBpRRNJ0Y2hNa6D65h736fVSaPCHr4DM4sWUNvVaSuC+0OBGhwsrydQwmgfg5LncqQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -5068,8 +4471,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/tuf-js": {
       "version": "2.2.1",
-      "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-2.2.1.tgz",
-      "integrity": "sha512-GwIJau9XaA8nLVbUXsN3IlFi7WmQ48gBUrl3FTkkL/XLu/POhBzfmX9hd33FNMX1qAsfl6ozO1iMmW9NC8YniA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5083,8 +4484,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/unique-filename": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-3.0.0.tgz",
-      "integrity": "sha512-afXhuC55wkAmZ0P18QsVE6kp8JaxrEokN2HGIoIVv2ijHQd419H0+6EigAFcIzXeMIkcIkNBpB3L/DXB3cTS/g==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -5096,8 +4495,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/unique-slug": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/unique-slug/-/unique-slug-4.0.0.tgz",
-      "integrity": "sha512-WrcA6AyEfqDX5bWige/4NQfPZMtASNVxdmWR76WESYQVAACSgWcR6e9i0mofqqBxYFtL4oAxPIptY73/0YE1DQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -5109,15 +4506,11 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/walk-up-path": {
       "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/walk-up-path/-/walk-up-path-3.0.1.tgz",
-      "integrity": "sha512-9YlCL/ynK3CTlrSRrDxZvUauLzAswPCrsaCgilqFevUYpeEW0/3ScEjaa3kbW/T0ghhkEr7mv+fpjqn1Y1YuTA==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/@npmcli/template-oss/node_modules/which": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/which/-/which-4.0.0.tgz",
-      "integrity": "sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -5132,8 +4525,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/write-file-atomic": {
       "version": "5.0.1",
-      "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-5.0.1.tgz",
-      "integrity": "sha512-+QU2zd6OTD8XWIJCbffaiQeH9U73qIqafo1x6V1snCWYGJf6cVE0cDR4D8xRzcEnfI21IFrUPzPGtcPf8AC+Rw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -5146,8 +4537,6 @@
     },
     "node_modules/@octokit/auth-token": {
       "version": "3.0.4",
-      "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-3.0.4.tgz",
-      "integrity": "sha512-TWFX7cZF2LXoCvdmJWY7XVPi74aSY0+FfBZNSXEXFkMpjcqsQwDSYVv5FhRFaI0V1ECnwbz4j59T/G+rXNWaIQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -5156,8 +4545,6 @@
     },
     "node_modules/@octokit/core": {
       "version": "4.2.4",
-      "resolved": "https://registry.npmjs.org/@octokit/core/-/core-4.2.4.tgz",
-      "integrity": "sha512-rYKilwgzQ7/imScn3M9/pFfUf4I1AZEH3KhyJmtPdE2zfaXAn2mFfUy4FbKewzc2We5y/LlKLj36fWJLKC2SIQ==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -5176,8 +4563,6 @@
     },
     "node_modules/@octokit/endpoint": {
       "version": "7.0.6",
-      "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-7.0.6.tgz",
-      "integrity": "sha512-5L4fseVRUsDFGR00tMWD/Trdeeihn999rTMGRMC1G/Ldi1uWlWJzI98H4Iak5DB/RVvQuyMYKqSK/R6mbSOQyg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5191,8 +4576,6 @@
     },
     "node_modules/@octokit/graphql": {
       "version": "5.0.6",
-      "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-5.0.6.tgz",
-      "integrity": "sha512-Fxyxdy/JH0MnIB5h+UQ3yCoh1FG4kWXfFKkpWqjZHw/p+Kc8Y44Hu/kCgNBT6nU1shNumEchmW/sUO1JuQnPcw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5206,15 +4589,11 @@
     },
     "node_modules/@octokit/openapi-types": {
       "version": "18.1.1",
-      "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-18.1.1.tgz",
-      "integrity": "sha512-VRaeH8nCDtF5aXWnjPuEMIYf1itK/s3JYyJcWFJT8X9pSNnBtriDf7wlEWsGuhPLl4QIH4xM8fqTXDwJ3Mu6sw==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/@octokit/plugin-paginate-rest": {
       "version": "6.1.2",
-      "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-6.1.2.tgz",
-      "integrity": "sha512-qhrmtQeHU/IivxucOV1bbI/xZyC/iOBhclokv7Sut5vnejAIAEXVcGQeRpQlU39E0WwK9lNvJHphHri/DB6lbQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5230,8 +4609,6 @@
     },
     "node_modules/@octokit/plugin-request-log": {
       "version": "1.0.4",
-      "resolved": "https://registry.npmjs.org/@octokit/plugin-request-log/-/plugin-request-log-1.0.4.tgz",
-      "integrity": "sha512-mLUsMkgP7K/cnFEw07kWqXGF5LKrOkD+lhCrKvPHXWDywAwuDUeDwWBpc69XK3pNX0uKiVt8g5z96PJ6z9xCFA==",
       "dev": true,
       "license": "MIT",
       "peerDependencies": {
@@ -5240,8 +4617,6 @@
     },
     "node_modules/@octokit/plugin-rest-endpoint-methods": {
       "version": "7.2.3",
-      "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-7.2.3.tgz",
-      "integrity": "sha512-I5Gml6kTAkzVlN7KCtjOM+Ruwe/rQppp0QU372K1GP7kNOYEKe8Xn5BW4sE62JAHdwpq95OQK/qGNyKQMUzVgA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5256,8 +4631,6 @@
     },
     "node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types": {
       "version": "10.0.0",
-      "resolved": "https://registry.npmjs.org/@octokit/types/-/types-10.0.0.tgz",
-      "integrity": "sha512-Vm8IddVmhCgU1fxC1eyinpwqzXPEYu0NrYzD3YZjlGjyftdLBTeqNblRC0jmJmgxbJIsQlyogVeGnrNaaMVzIg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5266,8 +4639,6 @@
     },
     "node_modules/@octokit/request": {
       "version": "6.2.8",
-      "resolved": "https://registry.npmjs.org/@octokit/request/-/request-6.2.8.tgz",
-      "integrity": "sha512-ow4+pkVQ+6XVVsekSYBzJC0VTVvh/FCTUUgTsboGq+DTeWdyIFV8WSCdo0RIxk6wSkBTHqIK1mYuY7nOBXOchw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5284,8 +4655,6 @@
     },
     "node_modules/@octokit/request-error": {
       "version": "3.0.3",
-      "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-3.0.3.tgz",
-      "integrity": "sha512-crqw3V5Iy2uOU5Np+8M/YexTlT8zxCfI+qu+LxUB7SZpje4Qmx3mub5DfEKSO8Ylyk0aogi6TYdf6kxzh2BguQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5299,8 +4668,6 @@
     },
     "node_modules/@octokit/rest": {
       "version": "19.0.13",
-      "resolved": "https://registry.npmjs.org/@octokit/rest/-/rest-19.0.13.tgz",
-      "integrity": "sha512-/EzVox5V9gYGdbAI+ovYj3nXQT1TtTHRT+0eZPcuC05UFSWO3mdO9UY1C0i2eLF9Un1ONJkAk+IEtYGAC+TahA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5315,15 +4682,11 @@
     },
     "node_modules/@octokit/tsconfig": {
       "version": "1.0.2",
-      "resolved": "https://registry.npmjs.org/@octokit/tsconfig/-/tsconfig-1.0.2.tgz",
-      "integrity": "sha512-I0vDR0rdtP8p2lGMzvsJzbhdOWy405HcGovrspJ8RRibHnyRgggUSNO5AIox5LmqiwmatHKYsvj6VGFHkqS7lA==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/@octokit/types": {
       "version": "9.3.2",
-      "resolved": "https://registry.npmjs.org/@octokit/types/-/types-9.3.2.tgz",
-      "integrity": "sha512-D4iHGTdAnEEVsB8fl95m1hiz7D5YiRdQ9b/OEb3BYRVwbLsGHcRVPz+u+BgRLNk0Q0/4iZCBqDN96j2XNxfXrA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5332,8 +4695,6 @@
     },
     "node_modules/@pkgjs/parseargs": {
       "version": "0.11.0",
-      "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz",
-      "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==",
       "inBundle": true,
       "license": "MIT",
       "optional": true,
@@ -5343,15 +4704,11 @@
     },
     "node_modules/@rtsao/scc": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/@rtsao/scc/-/scc-1.1.0.tgz",
-      "integrity": "sha512-zt6OdqaDoOnJ1ZYsCYGt9YmWzDXl4vQdKTyJev62gFhRGKdx7mcT54V9KIjg+d2wi9EXsPvAPKe7i7WjfVWB8g==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/@sigstore/bundle": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/@sigstore/bundle/-/bundle-4.0.0.tgz",
-      "integrity": "sha512-NwCl5Y0V6Di0NexvkTqdoVfmjTaQwoLM236r89KEojGmq/jMls8S+zb7yOwAPdXvbwfKDlP+lmXgAL4vKSQT+A==",
       "inBundle": true,
       "license": "Apache-2.0",
       "dependencies": {
@@ -5363,8 +4720,6 @@
     },
     "node_modules/@sigstore/core": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/@sigstore/core/-/core-3.0.0.tgz",
-      "integrity": "sha512-NgbJ+aW9gQl/25+GIEGYcCyi8M+ng2/5X04BMuIgoDfgvp18vDcoNHOQjQsG9418HGNYRxG3vfEXaR1ayD37gg==",
       "inBundle": true,
       "license": "Apache-2.0",
       "engines": {
@@ -5373,8 +4728,6 @@
     },
     "node_modules/@sigstore/protobuf-specs": {
       "version": "0.5.0",
-      "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.5.0.tgz",
-      "integrity": "sha512-MM8XIwUjN2bwvCg1QvrMtbBmpcSHrkhFSCu1D11NyPvDQ25HEc4oG5/OcQfd/Tlf/OxmKWERDj0zGE23jQaMwA==",
       "inBundle": true,
       "license": "Apache-2.0",
       "engines": {
@@ -5383,8 +4736,6 @@
     },
     "node_modules/@sigstore/sign": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/@sigstore/sign/-/sign-4.0.0.tgz",
-      "integrity": "sha512-5+IadiqPzRRMfvftHONzpeH2EzlDNuBiTMC3Lx7+9tLqn/4xbWVfSZA+YaOzKCn86k5BWfJ+aGO9v+pQmIyxqQ==",
       "inBundle": true,
       "license": "Apache-2.0",
       "dependencies": {
@@ -5401,8 +4752,6 @@
     },
     "node_modules/@sigstore/tuf": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/@sigstore/tuf/-/tuf-4.0.0.tgz",
-      "integrity": "sha512-0QFuWDHOQmz7t66gfpfNO6aEjoFrdhkJaej/AOqb4kqWZVbPWFZifXZzkxyQBB1OwTbkhdT3LNpMFxwkTvf+2w==",
       "inBundle": true,
       "license": "Apache-2.0",
       "dependencies": {
@@ -5415,8 +4764,6 @@
     },
     "node_modules/@sigstore/verify": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/@sigstore/verify/-/verify-3.0.0.tgz",
-      "integrity": "sha512-moXtHH33AobOhTZF8xcX1MpOFqdvfCk7v6+teJL8zymBiDXwEsQH6XG9HGx2VIxnJZNm4cNSzflTLDnQLmIdmw==",
       "inBundle": true,
       "license": "Apache-2.0",
       "dependencies": {
@@ -5430,8 +4777,6 @@
     },
     "node_modules/@tufjs/canonical-json": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/@tufjs/canonical-json/-/canonical-json-2.0.0.tgz",
-      "integrity": "sha512-yVtV8zsdo8qFHe+/3kw81dSLyF7D576A5cCFCi4X7B39tWT7SekaEFUnvnWJHz+9qO7qJTah1JbrDjWKqFtdWA==",
       "inBundle": true,
       "license": "MIT",
       "engines": {
@@ -5440,8 +4785,6 @@
     },
     "node_modules/@tufjs/models": {
       "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/@tufjs/models/-/models-3.0.1.tgz",
-      "integrity": "sha512-UUYHISyhCU3ZgN8yaear3cGATHb3SMuKHsQ/nVbHXcmnBf+LzQ/cQfhNG+rfaSHgqGKNEm2cOCLVLELStUQ1JA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5454,9 +4797,8 @@
     },
     "node_modules/@tufjs/repo-mock": {
       "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/@tufjs/repo-mock/-/repo-mock-3.0.1.tgz",
-      "integrity": "sha512-9as4Bg7trZ06+qQ4aqPcYWY0TUYuewG0e7kPsrAVokdBJh35TTqPR68o9L8ojyJcBM5xgSIDvLy0XPM1RCZdJA==",
       "dev": true,
+      "license": "MIT",
       "dependencies": {
         "@tufjs/models": "3.0.1",
         "nock": "^13.5.5"
@@ -5467,8 +4809,6 @@
     },
     "node_modules/@types/conventional-commits-parser": {
       "version": "5.0.1",
-      "resolved": "https://registry.npmjs.org/@types/conventional-commits-parser/-/conventional-commits-parser-5.0.1.tgz",
-      "integrity": "sha512-7uz5EHdzz2TqoMfV7ee61Egf5y6NkcO4FB/1iCCQnbeiI1F3xzv3vK5dBCXUCLQgGYS+mUeigK1iKQzvED+QnQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5477,8 +4817,6 @@
     },
     "node_modules/@types/debug": {
       "version": "4.1.12",
-      "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz",
-      "integrity": "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5487,15 +4825,11 @@
     },
     "node_modules/@types/json5": {
       "version": "0.0.29",
-      "resolved": "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz",
-      "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/@types/mdast": {
       "version": "3.0.15",
-      "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-3.0.15.tgz",
-      "integrity": "sha512-LnwD+mUEfxWMa1QpDraczIn6k0Ee3SMicuYSSzS6ZYl2gKS09EClnJYGd8Du6rfc5r/GZEk5o1mRb8TaTj03sQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5504,22 +4838,16 @@
     },
     "node_modules/@types/minimist": {
       "version": "1.2.5",
-      "resolved": "https://registry.npmjs.org/@types/minimist/-/minimist-1.2.5.tgz",
-      "integrity": "sha512-hov8bUuiLiyFPGyFPE1lwWhmzYbirOXQNNo40+y3zow8aFVTeyn3VWL0VFFfdNddA8S4Vf0Tc062rzyNr7Paag==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/@types/ms": {
       "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/@types/ms/-/ms-2.1.0.tgz",
-      "integrity": "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/@types/node": {
       "version": "24.1.0",
-      "resolved": "https://registry.npmjs.org/@types/node/-/node-24.1.0.tgz",
-      "integrity": "sha512-ut5FthK5moxFKH2T1CUOC6ctR67rQRvvHdFLCD2Ql6KXmMuCrjsSsRI9UsLCm9M18BMwClv4pn327UvB7eeO1w==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -5529,36 +4857,26 @@
     },
     "node_modules/@types/normalize-package-data": {
       "version": "2.4.4",
-      "resolved": "https://registry.npmjs.org/@types/normalize-package-data/-/normalize-package-data-2.4.4.tgz",
-      "integrity": "sha512-37i+OaWTh9qeK4LSHPsyRC7NahnGotNuZvjLSgcPzblpHB3rrCJxAOgI5gCdKm7coonsaX1Of0ILiTcnZjbfxA==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/@types/npm-package-arg": {
       "version": "6.1.4",
-      "resolved": "https://registry.npmjs.org/@types/npm-package-arg/-/npm-package-arg-6.1.4.tgz",
-      "integrity": "sha512-vDgdbMy2QXHnAruzlv68pUtXCjmqUk3WrBAsRboRovsOmxbfn/WiYCjmecyKjGztnMps5dWp4Uq2prp+Ilo17Q==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/@types/parse5": {
       "version": "6.0.3",
-      "resolved": "https://registry.npmjs.org/@types/parse5/-/parse5-6.0.3.tgz",
-      "integrity": "sha512-SuT16Q1K51EAVPz1K29DJ/sXjhSQ0zjvsypYJ6tlwVsRV9jwW5Adq2ch8Dq8kDBCkYnELS7N7VNCSB5nC56t/g==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/@types/unist": {
       "version": "2.0.11",
-      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.11.tgz",
-      "integrity": "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/@types/yargs": {
       "version": "16.0.9",
-      "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-16.0.9.tgz",
-      "integrity": "sha512-tHhzvkFXZQeTECenFoRljLBYPZJ7jAVxqqtEI0qTLOmuultnFp4I9yKE17vTuhf7BkhCu7I4XuemPgikDVuYqA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5567,22 +4885,16 @@
     },
     "node_modules/@types/yargs-parser": {
       "version": "21.0.3",
-      "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.3.tgz",
-      "integrity": "sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/@ungap/structured-clone": {
       "version": "1.3.0",
-      "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.3.0.tgz",
-      "integrity": "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/@xmldom/xmldom": {
       "version": "0.8.10",
-      "resolved": "https://registry.npmjs.org/@xmldom/xmldom/-/xmldom-0.8.10.tgz",
-      "integrity": "sha512-2WALfTl4xo2SkGCYRt6rDTFfk9R1czmBvUQy12gK2KuRKIpWEhcbbzy8EZXtz/jkRqHX8bFEc6FC1HjX4TUWYw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -5591,8 +4903,6 @@
     },
     "node_modules/abbrev": {
       "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-3.0.1.tgz",
-      "integrity": "sha512-AO2ac6pjRB3SJmGJo+v5/aK6Omggp6fsLrs6wN9bd35ulu4cCwaAU9+7ZhXjeqHVkaHThLuzH0nZr0YpCDhygg==",
       "inBundle": true,
       "license": "ISC",
       "engines": {
@@ -5601,8 +4911,6 @@
     },
     "node_modules/acorn": {
       "version": "8.15.0",
-      "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz",
-      "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -5615,8 +4923,6 @@
     },
     "node_modules/acorn-jsx": {
       "version": "5.3.2",
-      "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz",
-      "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==",
       "dev": true,
       "license": "MIT",
       "peerDependencies": {
@@ -5625,8 +4931,6 @@
     },
     "node_modules/agent-base": {
       "version": "7.1.4",
-      "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.4.tgz",
-      "integrity": "sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==",
       "inBundle": true,
       "license": "MIT",
       "engines": {
@@ -5635,8 +4939,6 @@
     },
     "node_modules/aggregate-error": {
       "version": "3.1.0",
-      "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz",
-      "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5649,8 +4951,6 @@
     },
     "node_modules/ajv": {
       "version": "8.17.1",
-      "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz",
-      "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -5667,8 +4967,6 @@
     },
     "node_modules/ajv-formats": {
       "version": "2.1.1",
-      "resolved": "https://registry.npmjs.org/ajv-formats/-/ajv-formats-2.1.1.tgz",
-      "integrity": "sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5685,8 +4983,6 @@
     },
     "node_modules/ajv-formats-draft2019": {
       "version": "1.6.1",
-      "resolved": "https://registry.npmjs.org/ajv-formats-draft2019/-/ajv-formats-draft2019-1.6.1.tgz",
-      "integrity": "sha512-JQPvavpkWDvIsBp2Z33UkYCtXCSpW4HD3tAZ+oL4iEFOk9obQZffx0yANwECt6vzr6ET+7HN5czRyqXbnq/u0Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5701,8 +4997,6 @@
     },
     "node_modules/ansi-regex": {
       "version": "5.0.1",
-      "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
-      "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
       "inBundle": true,
       "license": "MIT",
       "engines": {
@@ -5711,8 +5005,6 @@
     },
     "node_modules/ansi-styles": {
       "version": "6.2.1",
-      "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz",
-      "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==",
       "inBundle": true,
       "license": "MIT",
       "engines": {
@@ -5724,8 +5016,6 @@
     },
     "node_modules/anymatch": {
       "version": "3.1.3",
-      "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz",
-      "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -5738,8 +5028,6 @@
     },
     "node_modules/append-transform": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/append-transform/-/append-transform-2.0.0.tgz",
-      "integrity": "sha512-7yeyCEurROLQJFv5Xj4lEGTy0borxepjFv1g22oAdqFu//SrAlDl1O1Nxx15SH1RoliUml6p8dwJW9jvZughhg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5751,28 +5039,20 @@
     },
     "node_modules/aproba": {
       "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/aproba/-/aproba-2.1.0.tgz",
-      "integrity": "sha512-tLIEcj5GuR2RSTnxNKdkK0dJ/GrC7P38sUkiDmDuHfsHmbagTFAxDVIBltoklXEVIQ/f14IL8IMJ5pn9Hez1Ew==",
       "license": "ISC"
     },
     "node_modules/archy": {
       "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/archy/-/archy-1.0.0.tgz",
-      "integrity": "sha512-Xg+9RwCg/0p32teKdGMPTPnVXKD0w3DfHnFTficozsAgsvq2XenPJq/MYpzzQ/v8zrOyJn6Ds39VA4JIDwFfqw==",
       "inBundle": true,
       "license": "MIT"
     },
     "node_modules/argparse": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz",
-      "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==",
       "dev": true,
       "license": "Python-2.0"
     },
     "node_modules/args": {
       "version": "5.0.3",
-      "resolved": "https://registry.npmjs.org/args/-/args-5.0.3.tgz",
-      "integrity": "sha512-h6k/zfFgusnv3i5TU08KQkVKuCPBtL/PWQbWkHUxvJrZ2nAyeaUupneemcrgn1xmqxPQsPIzwkUhOpoqPDRZuA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5787,8 +5067,6 @@
     },
     "node_modules/args/node_modules/ansi-styles": {
       "version": "3.2.1",
-      "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz",
-      "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5800,8 +5078,6 @@
     },
     "node_modules/args/node_modules/camelcase": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.0.0.tgz",
-      "integrity": "sha512-faqwZqnWxbxn+F1d399ygeamQNy3lPp/H9H6rNrqYh4FSVCtcY+3cub1MxA8o9mDd55mM8Aghuu/kuyYA6VTsA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -5810,8 +5086,6 @@
     },
     "node_modules/args/node_modules/chalk": {
       "version": "2.4.2",
-      "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz",
-      "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5825,8 +5099,6 @@
     },
     "node_modules/args/node_modules/color-convert": {
       "version": "1.9.3",
-      "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz",
-      "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5835,15 +5107,11 @@
     },
     "node_modules/args/node_modules/color-name": {
       "version": "1.1.3",
-      "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz",
-      "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/args/node_modules/escape-string-regexp": {
       "version": "1.0.5",
-      "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz",
-      "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -5852,8 +5120,6 @@
     },
     "node_modules/args/node_modules/mri": {
       "version": "1.1.4",
-      "resolved": "https://registry.npmjs.org/mri/-/mri-1.1.4.tgz",
-      "integrity": "sha512-6y7IjGPm8AzlvoUrwAaw1tLnUBudaS3752vcd8JtrpGGQn+rXIe63LFVHm/YMwtqAuh+LJPCFdlLYPWM1nYn6w==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -5862,8 +5128,6 @@
     },
     "node_modules/args/node_modules/supports-color": {
       "version": "5.5.0",
-      "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz",
-      "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5875,8 +5139,6 @@
     },
     "node_modules/array-buffer-byte-length": {
       "version": "1.0.2",
-      "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.2.tgz",
-      "integrity": "sha512-LHE+8BuR7RYGDKvnrmcuSq3tDcKv9OFEXQt/HpbZhY7V6h0zlUXutnAD82GiFx9rdieCMjkvtcsPqBwgUl1Iiw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5892,15 +5154,11 @@
     },
     "node_modules/array-ify": {
       "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/array-ify/-/array-ify-1.0.0.tgz",
-      "integrity": "sha512-c5AMf34bKdvPhQ7tBGhqkgKNUzMr4WUs+WDtC2ZUGOUncbxKMTvqxYctiseW3+L4bA8ec+GcZ6/A/FW4m8ukng==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/array-includes": {
       "version": "3.1.9",
-      "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.9.tgz",
-      "integrity": "sha512-FmeCCAenzH0KH381SPT5FZmiA/TmpndpcaShhfgEN9eCVjnFBqq3l1xrI42y8+PPLI6hypzou4GXw00WHmPBLQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5922,8 +5180,6 @@
     },
     "node_modules/array.prototype.findlastindex": {
       "version": "1.2.6",
-      "resolved": "https://registry.npmjs.org/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.6.tgz",
-      "integrity": "sha512-F/TKATkzseUExPlfvmwQKGITM3DGTK+vkAsCZoDc5daVygbJBnjEUCbgkAvVFsgfXfX4YIqZ/27G3k3tdXrTxQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5944,8 +5200,6 @@
     },
     "node_modules/array.prototype.flat": {
       "version": "1.3.3",
-      "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.3.tgz",
-      "integrity": "sha512-rwG/ja1neyLqCuGZ5YYrznA62D4mZXg0i1cIskIUKSiqF3Cje9/wXAls9B9s1Wa2fomMsIv8czB8jZcPmxCXFg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5963,8 +5217,6 @@
     },
     "node_modules/array.prototype.flatmap": {
       "version": "1.3.3",
-      "resolved": "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.3.tgz",
-      "integrity": "sha512-Y7Wt51eKJSyi80hFrJCePGGNo5ktJCslFuboqJsbf57CCPcm5zztluPlc4/aD8sWsKvlwatezpV4U1efk8kpjg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5982,8 +5234,6 @@
     },
     "node_modules/arraybuffer.prototype.slice": {
       "version": "1.0.4",
-      "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.4.tgz",
-      "integrity": "sha512-BNoCY6SXXPQ7gF2opIP4GBE+Xw7U+pHMYKuzjgCN3GwiaIR09UUeKfheyIry77QtrCBlC0KK0q5/TER/tYh3PQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6004,8 +5254,6 @@
     },
     "node_modules/arrify": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz",
-      "integrity": "sha512-3CYzex9M9FGQjCGMGyi6/31c8GJbgb0qGyrx5HWxPd0aCwh4cB2YjMb2Xf9UuoogrMrlO9cTqnB5rI5GHZTcUA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -6014,8 +5262,6 @@
     },
     "node_modules/async-function": {
       "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/async-function/-/async-function-1.0.0.tgz",
-      "integrity": "sha512-hsU18Ae8CDTR6Kgu9DYf0EbCr/a5iGL0rytQDobUcdpYOKokk8LEjVphnXkDkgpi0wYVsqrXuP0bZxJaTqdgoA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -6024,8 +5270,6 @@
     },
     "node_modules/async-hook-domain": {
       "version": "2.0.4",
-      "resolved": "https://registry.npmjs.org/async-hook-domain/-/async-hook-domain-2.0.4.tgz",
-      "integrity": "sha512-14LjCmlK1PK8eDtTezR6WX8TMaYNIzBIsd2D1sGoGjgx0BuNMMoSdk7i/drlbtamy0AWv9yv2tkB+ASdmeqFIw==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -6034,8 +5278,6 @@
     },
     "node_modules/async-retry": {
       "version": "1.3.3",
-      "resolved": "https://registry.npmjs.org/async-retry/-/async-retry-1.3.3.tgz",
-      "integrity": "sha512-wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6044,8 +5286,6 @@
     },
     "node_modules/async-retry/node_modules/retry": {
       "version": "0.13.1",
-      "resolved": "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz",
-      "integrity": "sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -6054,15 +5294,11 @@
     },
     "node_modules/asynckit": {
       "version": "0.4.0",
-      "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
-      "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/available-typed-arrays": {
       "version": "1.0.7",
-      "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz",
-      "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6077,15 +5313,11 @@
     },
     "node_modules/b4a": {
       "version": "1.6.7",
-      "resolved": "https://registry.npmjs.org/b4a/-/b4a-1.6.7.tgz",
-      "integrity": "sha512-OnAYlL5b7LEkALw87fUVafQw5rVR9RjwGd4KUwNQ6DrrNmaVaUCgLipfVlzrPQ4tWOR9P0IXGNOx50jYCCdSJg==",
       "dev": true,
       "license": "Apache-2.0"
     },
     "node_modules/bail": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/bail/-/bail-2.0.2.tgz",
-      "integrity": "sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -6095,36 +5327,26 @@
     },
     "node_modules/balanced-match": {
       "version": "1.0.2",
-      "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
-      "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
       "inBundle": true,
       "license": "MIT"
     },
     "node_modules/bare-events": {
       "version": "2.6.0",
-      "resolved": "https://registry.npmjs.org/bare-events/-/bare-events-2.6.0.tgz",
-      "integrity": "sha512-EKZ5BTXYExaNqi3I3f9RtEsaI/xBSGjE0XZCZilPzFAV/goswFHuPd9jEZlPIZ/iNZJwDSao9qRiScySz7MbQg==",
       "dev": true,
       "license": "Apache-2.0",
       "optional": true
     },
     "node_modules/basic-auth-parser": {
       "version": "0.0.2-1",
-      "resolved": "https://registry.npmjs.org/basic-auth-parser/-/basic-auth-parser-0.0.2-1.tgz",
-      "integrity": "sha512-GFj8iVxo9onSU6BnnQvVwqvxh60UcSHJEDnIk3z4B6iOjsKSmqe+ibW0Rsz7YO7IE1HG3D3tqCNIidP46SZVdQ==",
       "dev": true
     },
     "node_modules/before-after-hook": {
       "version": "2.2.3",
-      "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz",
-      "integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==",
       "dev": true,
       "license": "Apache-2.0"
     },
     "node_modules/benchmark": {
       "version": "2.1.4",
-      "resolved": "https://registry.npmjs.org/benchmark/-/benchmark-2.1.4.tgz",
-      "integrity": "sha512-l9MlfN4M1K/H2fbhfMy3B7vJd6AGKJVQn2h6Sg/Yx+KckoUA7ewS5Vv6TjSq18ooE1kS9hhAlQRH3AkXIh/aOQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6134,8 +5356,6 @@
     },
     "node_modules/bin-links": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/bin-links/-/bin-links-5.0.0.tgz",
-      "integrity": "sha512-sdleLVfCjBtgO5cNjA2HVRvWBJAHs4zwenaCPMNJAJU0yNxpzj80IpjOIimkpkr+mhlA+how5poQtt53PygbHA==",
       "license": "ISC",
       "dependencies": {
         "cmd-shim": "^7.0.0",
@@ -6150,8 +5370,6 @@
     },
     "node_modules/binary-extensions": {
       "version": "3.1.0",
-      "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-3.1.0.tgz",
-      "integrity": "sha512-Jvvd9hy1w+xUad8+ckQsWA/V1AoyubOvqn0aygjMOVM4BfIaRav1NFS3LsTSDaV4n4FtcCtQXvzep1E6MboqwQ==",
       "license": "MIT",
       "engines": {
         "node": ">=18.20"
@@ -6162,8 +5380,6 @@
     },
     "node_modules/bind-obj-methods": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/bind-obj-methods/-/bind-obj-methods-3.0.0.tgz",
-      "integrity": "sha512-nLEaaz3/sEzNSyPWRsN9HNsqwk1AUyECtGj+XwGdIi3xABnEqecvXtIJ0wehQXuuER5uZ/5fTs2usONgYjG+iw==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -6172,15 +5388,11 @@
     },
     "node_modules/boolbase": {
       "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz",
-      "integrity": "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/brace-expansion": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz",
-      "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -6189,8 +5401,6 @@
     },
     "node_modules/braces": {
       "version": "3.0.3",
-      "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
-      "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6202,8 +5412,6 @@
     },
     "node_modules/browserslist": {
       "version": "4.25.1",
-      "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.25.1.tgz",
-      "integrity": "sha512-KGj0KoOMXLpSNkkEI6Z6mShmQy0bc1I+T7K9N81k4WWMrfz+6fQ6es80B/YLAeRoKvjYE1YSHHOW1qe9xIVzHw==",
       "dev": true,
       "funding": [
         {
@@ -6236,15 +5444,11 @@
     },
     "node_modules/buffer-from": {
       "version": "1.1.2",
-      "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz",
-      "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/cacache": {
       "version": "20.0.1",
-      "resolved": "https://registry.npmjs.org/cacache/-/cacache-20.0.1.tgz",
-      "integrity": "sha512-+7LYcYGBYoNqTp1Rv7Ny1YjUo5E0/ftkQtraH3vkfAGgVHc+ouWdC8okAwQgQR7EVIdW6JTzTmhKFwzb+4okAQ==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -6266,8 +5470,6 @@
     },
     "node_modules/cacache/node_modules/glob": {
       "version": "11.0.3",
-      "resolved": "https://registry.npmjs.org/glob/-/glob-11.0.3.tgz",
-      "integrity": "sha512-2Nim7dha1KVkaiF4q6Dj+ngPPMdfvLJEOpZk/jKiUAkqKebpGAWQXAq9z1xu9HKu5lWfqw/FASuccEjyznjPaA==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -6290,8 +5492,6 @@
     },
     "node_modules/cacache/node_modules/jackspeak": {
       "version": "4.1.1",
-      "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-4.1.1.tgz",
-      "integrity": "sha512-zptv57P3GpL+O0I7VdMJNBZCu+BPHVQUk55Ft8/QCJjTVxrnJHuVuX/0Bl2A6/+2oyR/ZMEuFKwmzqqZ/U5nPQ==",
       "inBundle": true,
       "license": "BlueOak-1.0.0",
       "dependencies": {
@@ -6306,8 +5506,6 @@
     },
     "node_modules/cacache/node_modules/lru-cache": {
       "version": "11.2.1",
-      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.1.tgz",
-      "integrity": "sha512-r8LA6i4LP4EeWOhqBaZZjDWwehd1xUJPCJd9Sv300H0ZmcUER4+JPh7bqqZeqs1o5pgtgvXm+d9UGrB5zZGDiQ==",
       "inBundle": true,
       "license": "ISC",
       "engines": {
@@ -6316,8 +5514,6 @@
     },
     "node_modules/cacache/node_modules/minimatch": {
       "version": "10.0.3",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.0.3.tgz",
-      "integrity": "sha512-IPZ167aShDZZUMdRk66cyQAW3qr0WzbHkPdMYa8bzZhlHhO3jALbKdxcaak7W9FfT2rZNpQuUu4Od7ILEpXSaw==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -6332,8 +5528,6 @@
     },
     "node_modules/cacache/node_modules/path-scurry": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-2.0.0.tgz",
-      "integrity": "sha512-ypGJsmGtdXUOeM5u93TyeIEfEhM6s+ljAhrk5vAvSx8uyY/02OvrZnA0YNGUrPXfpJMgI1ODd3nwz8Npx4O4cg==",
       "inBundle": true,
       "license": "BlueOak-1.0.0",
       "dependencies": {
@@ -6349,8 +5543,6 @@
     },
     "node_modules/caching-transform": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/caching-transform/-/caching-transform-4.0.0.tgz",
-      "integrity": "sha512-kpqOvwXnjjN44D89K5ccQC+RUrsy7jB/XLlRrx0D7/2HNcTPqzsb6XgYoErwko6QsV184CA2YgS1fxDiiDZMWA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6365,15 +5557,11 @@
     },
     "node_modules/caching-transform/node_modules/signal-exit": {
       "version": "3.0.7",
-      "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz",
-      "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/caching-transform/node_modules/write-file-atomic": {
       "version": "3.0.3",
-      "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-3.0.3.tgz",
-      "integrity": "sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -6385,8 +5573,6 @@
     },
     "node_modules/call-bind": {
       "version": "1.0.8",
-      "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.8.tgz",
-      "integrity": "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6404,8 +5590,6 @@
     },
     "node_modules/call-bind-apply-helpers": {
       "version": "1.0.2",
-      "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz",
-      "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6418,8 +5602,6 @@
     },
     "node_modules/call-bound": {
       "version": "1.0.4",
-      "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz",
-      "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6435,15 +5617,11 @@
     },
     "node_modules/caller": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/caller/-/caller-1.1.0.tgz",
-      "integrity": "sha512-n+21IZC3j06YpCWaxmUy5AnVqhmCIM2bQtqQyy00HJlmStRt6kwDX5F9Z97pqwAB+G/tgSz6q/kUBbNyQzIubw==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/callsites": {
       "version": "3.1.0",
-      "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz",
-      "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -6452,8 +5630,6 @@
     },
     "node_modules/camelcase": {
       "version": "5.3.1",
-      "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz",
-      "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -6462,8 +5638,6 @@
     },
     "node_modules/camelcase-keys": {
       "version": "6.2.2",
-      "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-6.2.2.tgz",
-      "integrity": "sha512-YrwaA0vEKazPBkn0ipTiMpSajYDSe+KjQfrjhcBMxJt/znbvlHd8Pw/Vamaz5EB4Wfhs3SUR3Z9mwRu/P3s3Yg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6480,8 +5654,6 @@
     },
     "node_modules/caniuse-lite": {
       "version": "1.0.30001727",
-      "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001727.tgz",
-      "integrity": "sha512-pB68nIHmbN6L/4C6MH1DokyR3bYqFwjaSs/sWDHGj4CTcFtQUQMuJftVwWkXq7mNWOybD3KhUv3oWHoGxgP14Q==",
       "dev": true,
       "funding": [
         {
@@ -6501,8 +5673,6 @@
     },
     "node_modules/ccount": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/ccount/-/ccount-2.0.1.tgz",
-      "integrity": "sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -6512,8 +5682,6 @@
     },
     "node_modules/chalk": {
       "version": "5.4.1",
-      "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.4.1.tgz",
-      "integrity": "sha512-zgVZuo2WcZgfUEmsn6eO3kINexW8RAE4maiQ8QNs8CtpPCSyMiYsULR3HQYkm3w8FIA3SberyMJMSldGsW+U3w==",
       "inBundle": true,
       "license": "MIT",
       "engines": {
@@ -6525,8 +5693,6 @@
     },
     "node_modules/character-entities": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/character-entities/-/character-entities-2.0.2.tgz",
-      "integrity": "sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -6536,8 +5702,6 @@
     },
     "node_modules/character-entities-html4": {
       "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/character-entities-html4/-/character-entities-html4-2.1.0.tgz",
-      "integrity": "sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -6547,8 +5711,6 @@
     },
     "node_modules/character-entities-legacy": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-3.0.0.tgz",
-      "integrity": "sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -6558,8 +5720,6 @@
     },
     "node_modules/chokidar": {
       "version": "3.6.0",
-      "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz",
-      "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6583,8 +5743,6 @@
     },
     "node_modules/chokidar/node_modules/glob-parent": {
       "version": "5.1.2",
-      "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz",
-      "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -6596,8 +5754,6 @@
     },
     "node_modules/chownr": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz",
-      "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==",
       "inBundle": true,
       "license": "ISC",
       "engines": {
@@ -6606,8 +5762,6 @@
     },
     "node_modules/ci-info": {
       "version": "4.3.0",
-      "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-4.3.0.tgz",
-      "integrity": "sha512-l+2bNRMiQgcfILUi33labAZYIWlH1kWDp+ecNo5iisRKrbm0xcRyCww71/YU0Fkw0mAFpz9bJayXPjey6vkmaQ==",
       "funding": [
         {
           "type": "github",
@@ -6622,8 +5776,6 @@
     },
     "node_modules/cidr-regex": {
       "version": "4.1.3",
-      "resolved": "https://registry.npmjs.org/cidr-regex/-/cidr-regex-4.1.3.tgz",
-      "integrity": "sha512-86M1y3ZeQvpZkZejQCcS+IaSWjlDUC+ORP0peScQ4uEUFCZ8bEQVz7NlJHqysoUb6w3zCjx4Mq/8/2RHhMwHYw==",
       "inBundle": true,
       "license": "BSD-2-Clause",
       "dependencies": {
@@ -6635,8 +5787,6 @@
     },
     "node_modules/clean-stack": {
       "version": "2.2.0",
-      "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz",
-      "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -6645,8 +5795,6 @@
     },
     "node_modules/cli-columns": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/cli-columns/-/cli-columns-4.0.0.tgz",
-      "integrity": "sha512-XW2Vg+w+L9on9wtwKpyzluIPCWXjaBahI7mTcYjx+BVIYD9c3yqcv/yKC7CmdCZat4rq2yiE1UMSJC5ivKfMtQ==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -6659,8 +5807,6 @@
     },
     "node_modules/cli-table3": {
       "version": "0.6.5",
-      "resolved": "https://registry.npmjs.org/cli-table3/-/cli-table3-0.6.5.tgz",
-      "integrity": "sha512-+W/5efTR7y5HRD7gACw9yQjqMVvEMLBHmboM/kPWam+H+Hmyrgjh6YncVKK122YZkXrLudzTuAukUw9FnMf7IQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6675,8 +5821,6 @@
     },
     "node_modules/cliui": {
       "version": "8.0.1",
-      "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz",
-      "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -6690,8 +5834,6 @@
     },
     "node_modules/cliui/node_modules/ansi-styles": {
       "version": "4.3.0",
-      "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
-      "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6706,8 +5848,6 @@
     },
     "node_modules/cliui/node_modules/wrap-ansi": {
       "version": "7.0.0",
-      "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
-      "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6724,8 +5864,6 @@
     },
     "node_modules/cmd-shim": {
       "version": "7.0.0",
-      "resolved": "https://registry.npmjs.org/cmd-shim/-/cmd-shim-7.0.0.tgz",
-      "integrity": "sha512-rtpaCbr164TPPh+zFdkWpCyZuKkjpAzODfaZCf/SVJZzJN+4bHQb/LP3Jzq5/+84um3XXY8r548XiWKSborwVw==",
       "license": "ISC",
       "engines": {
         "node": "^18.17.0 || >=20.5.0"
@@ -6733,8 +5871,6 @@
     },
     "node_modules/code-suggester": {
       "version": "4.3.4",
-      "resolved": "https://registry.npmjs.org/code-suggester/-/code-suggester-4.3.4.tgz",
-      "integrity": "sha512-qOj12mccFX2NALK01WnrwJKCmIwp1TMuskueh2EVaR4bc3xw072yfX9Ojq7yFQL4AmXfTXHKNjSO8lvh0y5MuA==",
       "dev": true,
       "license": "Apache-2.0",
       "dependencies": {
@@ -6755,8 +5891,6 @@
     },
     "node_modules/code-suggester/node_modules/ansi-styles": {
       "version": "4.3.0",
-      "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
-      "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6771,8 +5905,6 @@
     },
     "node_modules/code-suggester/node_modules/brace-expansion": {
       "version": "1.1.12",
-      "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
-      "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6782,8 +5914,6 @@
     },
     "node_modules/code-suggester/node_modules/cliui": {
       "version": "7.0.4",
-      "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz",
-      "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -6794,8 +5924,6 @@
     },
     "node_modules/code-suggester/node_modules/diff": {
       "version": "5.2.0",
-      "resolved": "https://registry.npmjs.org/diff/-/diff-5.2.0.tgz",
-      "integrity": "sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A==",
       "dev": true,
       "license": "BSD-3-Clause",
       "engines": {
@@ -6804,9 +5932,6 @@
     },
     "node_modules/code-suggester/node_modules/glob": {
       "version": "7.2.3",
-      "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
-      "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
-      "deprecated": "Glob versions prior to v9 are no longer supported",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -6826,8 +5951,6 @@
     },
     "node_modules/code-suggester/node_modules/minimatch": {
       "version": "3.1.2",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
-      "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -6839,8 +5962,6 @@
     },
     "node_modules/code-suggester/node_modules/wrap-ansi": {
       "version": "7.0.0",
-      "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
-      "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6857,8 +5978,6 @@
     },
     "node_modules/code-suggester/node_modules/yargs": {
       "version": "16.2.0",
-      "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz",
-      "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6876,8 +5995,6 @@
     },
     "node_modules/code-suggester/node_modules/yargs-parser": {
       "version": "20.2.9",
-      "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz",
-      "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -6886,8 +6003,6 @@
     },
     "node_modules/color-convert": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
-      "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -6899,15 +6014,11 @@
     },
     "node_modules/color-name": {
       "version": "1.1.4",
-      "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
-      "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
       "inBundle": true,
       "license": "MIT"
     },
     "node_modules/color-support": {
       "version": "1.1.3",
-      "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz",
-      "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==",
       "dev": true,
       "license": "ISC",
       "bin": {
@@ -6916,8 +6027,6 @@
     },
     "node_modules/combined-stream": {
       "version": "1.0.8",
-      "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
-      "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6929,8 +6038,6 @@
     },
     "node_modules/comma-separated-tokens": {
       "version": "2.0.3",
-      "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-2.0.3.tgz",
-      "integrity": "sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -6940,28 +6047,20 @@
     },
     "node_modules/commander": {
       "version": "2.20.3",
-      "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz",
-      "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/common-ancestor-path": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/common-ancestor-path/-/common-ancestor-path-1.0.1.tgz",
-      "integrity": "sha512-L3sHRo1pXXEqX8VU28kfgUY+YGsk09hPqZiZmLacNib6XNTCM8ubYeT7ryXQw8asB1sKgcU5lkB7ONug08aB8w==",
       "license": "ISC"
     },
     "node_modules/commondir": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz",
-      "integrity": "sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/compare-func": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/compare-func/-/compare-func-2.0.0.tgz",
-      "integrity": "sha512-zHig5N+tPWARooBnb0Zx1MFcdfpyJrfTJ3Y5L+IFvUm8rM74hHz66z0gw0x4tijh5CorKkKUCnW82R2vmpeCRA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6971,15 +6070,11 @@
     },
     "node_modules/concat-map": {
       "version": "0.0.1",
-      "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
-      "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/conventional-changelog-angular": {
       "version": "7.0.0",
-      "resolved": "https://registry.npmjs.org/conventional-changelog-angular/-/conventional-changelog-angular-7.0.0.tgz",
-      "integrity": "sha512-ROjNchA9LgfNMTTFSIWPzebCwOGFdgkEq45EnvvrmSLvCtAw0HSmrCs7/ty+wAeYUZyNay0YMUNYFTRL72PkBQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -6991,8 +6086,6 @@
     },
     "node_modules/conventional-changelog-conventionalcommits": {
       "version": "7.0.2",
-      "resolved": "https://registry.npmjs.org/conventional-changelog-conventionalcommits/-/conventional-changelog-conventionalcommits-7.0.2.tgz",
-      "integrity": "sha512-NKXYmMR/Hr1DevQegFB4MwfM5Vv0m4UIxKZTTYuD98lpTknaZlSRrDOG4X7wIXpGkfsYxZTghUN+Qq+T0YQI7w==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -7004,8 +6097,6 @@
     },
     "node_modules/conventional-changelog-writer": {
       "version": "6.0.1",
-      "resolved": "https://registry.npmjs.org/conventional-changelog-writer/-/conventional-changelog-writer-6.0.1.tgz",
-      "integrity": "sha512-359t9aHorPw+U+nHzUXHS5ZnPBOizRxfQsWT5ZDHBfvfxQOAik+yfuhKXG66CN5LEWPpMNnIMHUTCKeYNprvHQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7026,8 +6117,6 @@
     },
     "node_modules/conventional-changelog-writer/node_modules/hosted-git-info": {
       "version": "4.1.0",
-      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-4.1.0.tgz",
-      "integrity": "sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -7039,8 +6128,6 @@
     },
     "node_modules/conventional-changelog-writer/node_modules/lru-cache": {
       "version": "6.0.0",
-      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
-      "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -7052,8 +6139,6 @@
     },
     "node_modules/conventional-changelog-writer/node_modules/meow": {
       "version": "8.1.2",
-      "resolved": "https://registry.npmjs.org/meow/-/meow-8.1.2.tgz",
-      "integrity": "sha512-r85E3NdZ+mpYk1C6RjPFEMSE+s1iZMuHtsHAqY0DT3jZczl0diWUZ8g6oU7h0M9cD2EL+PzaYghhCLzR0ZNn5Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7078,8 +6163,6 @@
     },
     "node_modules/conventional-changelog-writer/node_modules/normalize-package-data": {
       "version": "3.0.3",
-      "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-3.0.3.tgz",
-      "integrity": "sha512-p2W1sgqij3zMMyRC067Dg16bfzVH+w7hyegmpIvZ4JNjqtGOVAIvLmjBx3yP7YTe9vKJgkoNOPjwQGogDoMXFA==",
       "dev": true,
       "license": "BSD-2-Clause",
       "dependencies": {
@@ -7094,8 +6177,6 @@
     },
     "node_modules/conventional-changelog-writer/node_modules/type-fest": {
       "version": "0.18.1",
-      "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.18.1.tgz",
-      "integrity": "sha512-OIAYXk8+ISY+qTOwkHtKqzAuxchoMiD9Udx+FSGQDuiRR+PJKJHc2NJAXlbhkGwTt/4/nKZxELY1w3ReWOL8mw==",
       "dev": true,
       "license": "(MIT OR CC0-1.0)",
       "engines": {
@@ -7107,8 +6188,6 @@
     },
     "node_modules/conventional-changelog-writer/node_modules/yargs-parser": {
       "version": "20.2.9",
-      "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz",
-      "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -7117,8 +6196,6 @@
     },
     "node_modules/conventional-commits-filter": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/conventional-commits-filter/-/conventional-commits-filter-3.0.0.tgz",
-      "integrity": "sha512-1ymej8b5LouPx9Ox0Dw/qAO2dVdfpRFq28e5Y0jJEU8ZrLdy0vOSkkIInwmxErFGhg6SALro60ZrwYFVTUDo4Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7131,8 +6208,6 @@
     },
     "node_modules/conventional-commits-parser": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/conventional-commits-parser/-/conventional-commits-parser-5.0.0.tgz",
-      "integrity": "sha512-ZPMl0ZJbw74iS9LuX9YIAiW8pfM5p3yh2o/NbXHbkFuZzY5jvdi5jFycEOkmBW5H5I7nA+D6f3UcsCLP2vvSEA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7150,15 +6225,11 @@
     },
     "node_modules/convert-source-map": {
       "version": "1.9.0",
-      "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.9.0.tgz",
-      "integrity": "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/cosmiconfig": {
       "version": "9.0.0",
-      "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-9.0.0.tgz",
-      "integrity": "sha512-itvL5h8RETACmOTFc4UfIyB2RfEHi71Ax6E/PivVxq9NseKbOWpeyHEOIbmAw1rs8Ak0VursQNww7lf7YtUwzg==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -7185,8 +6256,6 @@
     },
     "node_modules/cosmiconfig-typescript-loader": {
       "version": "6.1.0",
-      "resolved": "https://registry.npmjs.org/cosmiconfig-typescript-loader/-/cosmiconfig-typescript-loader-6.1.0.tgz",
-      "integrity": "sha512-tJ1w35ZRUiM5FeTzT7DtYWAFFv37ZLqSRkGi2oeCK1gPhvaWjkAtfXvLmvE1pRfxxp9aQo6ba/Pvg1dKj05D4g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7203,8 +6272,6 @@
     },
     "node_modules/cross-spawn": {
       "version": "7.0.6",
-      "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
-      "integrity": "sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -7218,8 +6285,6 @@
     },
     "node_modules/cross-spawn/node_modules/which": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
-      "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -7234,8 +6299,6 @@
     },
     "node_modules/css-select": {
       "version": "5.2.2",
-      "resolved": "https://registry.npmjs.org/css-select/-/css-select-5.2.2.tgz",
-      "integrity": "sha512-TizTzUddG/xYLA3NXodFM0fSbNizXjOKhqiQQwvhlspadZokn1KDy0NZFS0wuEubIYAV5/c1/lAr0TaaFXEXzw==",
       "dev": true,
       "license": "BSD-2-Clause",
       "dependencies": {
@@ -7251,8 +6314,6 @@
     },
     "node_modules/css-what": {
       "version": "6.2.2",
-      "resolved": "https://registry.npmjs.org/css-what/-/css-what-6.2.2.tgz",
-      "integrity": "sha512-u/O3vwbptzhMs3L1fQE82ZSLHQQfto5gyZzwteVIEyeaY5Fc7R4dapF/BvRoSYFeqfBk4m0V1Vafq5Pjv25wvA==",
       "dev": true,
       "license": "BSD-2-Clause",
       "engines": {
@@ -7264,8 +6325,6 @@
     },
     "node_modules/cssesc": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz",
-      "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==",
       "license": "MIT",
       "bin": {
         "cssesc": "bin/cssesc"
@@ -7276,8 +6335,6 @@
     },
     "node_modules/cssstyle": {
       "version": "4.6.0",
-      "resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-4.6.0.tgz",
-      "integrity": "sha512-2z+rWdzbbSZv6/rhtvzvqeZQHrBaqgogqt85sqFNbabZOuFbCVFb8kPeEtZjiKkbrm395irpNKiYeFeLiQnFPg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7290,15 +6347,11 @@
     },
     "node_modules/cssstyle/node_modules/rrweb-cssom": {
       "version": "0.8.0",
-      "resolved": "https://registry.npmjs.org/rrweb-cssom/-/rrweb-cssom-0.8.0.tgz",
-      "integrity": "sha512-guoltQEx+9aMf2gDZ0s62EcV8lsXR+0w8915TC3ITdn2YueuNjdAYh/levpU9nFaoChh9RUS5ZdQMrKfVEN9tw==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/dargs": {
       "version": "8.1.0",
-      "resolved": "https://registry.npmjs.org/dargs/-/dargs-8.1.0.tgz",
-      "integrity": "sha512-wAV9QHOsNbwnWdNW2FYvE1P56wtgSbM+3SZcdGiWQILwVjACCXDCI3Ai8QlCjMDB8YK5zySiXZYBiwGmNY3lnw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -7310,8 +6363,6 @@
     },
     "node_modules/data-urls": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/data-urls/-/data-urls-5.0.0.tgz",
-      "integrity": "sha512-ZYP5VBHshaDAiVZxjbRVcFJpc+4xGgT0bK3vzy1HLN8jTO975HEbuYzZJcHoQEY5K1a0z8YayJkyVETa08eNTg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7324,8 +6375,6 @@
     },
     "node_modules/data-urls/node_modules/tr46": {
       "version": "5.1.1",
-      "resolved": "https://registry.npmjs.org/tr46/-/tr46-5.1.1.tgz",
-      "integrity": "sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7337,8 +6386,6 @@
     },
     "node_modules/data-urls/node_modules/webidl-conversions": {
       "version": "7.0.0",
-      "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz",
-      "integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==",
       "dev": true,
       "license": "BSD-2-Clause",
       "engines": {
@@ -7347,8 +6394,6 @@
     },
     "node_modules/data-urls/node_modules/whatwg-url": {
       "version": "14.2.0",
-      "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-14.2.0.tgz",
-      "integrity": "sha512-De72GdQZzNTUBBChsXueQUnPKDkg/5A5zp7pFDuQAj5UFoENpiACU0wlCvzpAGnTkj++ihpKwKyYewn/XNUbKw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7361,8 +6406,6 @@
     },
     "node_modules/data-view-buffer": {
       "version": "1.0.2",
-      "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.2.tgz",
-      "integrity": "sha512-EmKO5V3OLXh1rtK2wgXRansaK1/mtVdTUEiEI0W8RkvgT05kfxaH29PliLnpLP73yYO6142Q72QNa8Wx/A5CqQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7379,8 +6422,6 @@
     },
     "node_modules/data-view-byte-length": {
       "version": "1.0.2",
-      "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.2.tgz",
-      "integrity": "sha512-tuhGbE6CfTM9+5ANGf+oQb72Ky/0+s3xKUpHvShfiz2RxMFgFPjsXuRLBVMtvMs15awe45SRb83D6wH4ew6wlQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7397,8 +6438,6 @@
     },
     "node_modules/data-view-byte-offset": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.1.tgz",
-      "integrity": "sha512-BS8PfmtDGnrgYdOonGZQdLZslWIeCGFP9tpan0hi1Co2Zr2NKADsvGYA8XxuG/4UWgJ6Cjtv+YJnB6MM69QGlQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7415,8 +6454,6 @@
     },
     "node_modules/dateformat": {
       "version": "3.0.3",
-      "resolved": "https://registry.npmjs.org/dateformat/-/dateformat-3.0.3.tgz",
-      "integrity": "sha512-jyCETtSl3VMZMWeRo7iY1FL19ges1t55hMo5yaam4Jrsm5EPL89UQkoQRyiI+Yf4k8r2ZpdngkV8hr1lIdjb3Q==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -7425,8 +6462,6 @@
     },
     "node_modules/debug": {
       "version": "4.4.1",
-      "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.1.tgz",
-      "integrity": "sha512-KcKCqiftBJcZr++7ykoDIEwSa3XWowTfNPo92BYxjXiyYEVrUQh2aLyhxBCwww+heortUFxEJYcRzosstTEBYQ==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -7443,8 +6478,6 @@
     },
     "node_modules/decamelize": {
       "version": "1.2.0",
-      "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz",
-      "integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -7453,8 +6486,6 @@
     },
     "node_modules/decamelize-keys": {
       "version": "1.1.1",
-      "resolved": "https://registry.npmjs.org/decamelize-keys/-/decamelize-keys-1.1.1.tgz",
-      "integrity": "sha512-WiPxgEirIV0/eIOMcnFBA3/IJZAZqKnwAwWyvvdi4lsr1WCN22nhdf/3db3DoZcUjTV2SqfzIwNyp6y2xs3nmg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7470,8 +6501,6 @@
     },
     "node_modules/decamelize-keys/node_modules/map-obj": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-1.0.1.tgz",
-      "integrity": "sha512-7N/q3lyZ+LVCp7PzuxrJr4KMbBE2hW7BT7YNia330OFxIf4d3r5zVpicP2650l7CPN6RM9zOJRl3NGpqSiw3Eg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -7480,15 +6509,11 @@
     },
     "node_modules/decimal.js": {
       "version": "10.6.0",
-      "resolved": "https://registry.npmjs.org/decimal.js/-/decimal.js-10.6.0.tgz",
-      "integrity": "sha512-YpgQiITW3JXGntzdUmyUR1V812Hn8T1YVXhCu+wO3OpS4eU9l4YdD3qjyiKdV6mvV29zapkMeD390UVEf2lkUg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/decode-named-character-reference": {
       "version": "1.2.0",
-      "resolved": "https://registry.npmjs.org/decode-named-character-reference/-/decode-named-character-reference-1.2.0.tgz",
-      "integrity": "sha512-c6fcElNV6ShtZXmsgNgFFV5tVX2PaV4g+MOAkb8eXHvn6sryJBrZa9r0zV6+dtTyoCKxtDy5tyQ5ZwQuidtd+Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7501,8 +6526,6 @@
     },
     "node_modules/dedent": {
       "version": "1.6.0",
-      "resolved": "https://registry.npmjs.org/dedent/-/dedent-1.6.0.tgz",
-      "integrity": "sha512-F1Z+5UCFpmQUzJa11agbyPVMbpgT/qA3/SKyJ1jyBgm7dUcUEa8v9JwDkerSQXfakBwFljIxhOJqGkjUwZ9FSA==",
       "dev": true,
       "license": "MIT",
       "peerDependencies": {
@@ -7516,15 +6539,11 @@
     },
     "node_modules/deep-is": {
       "version": "0.1.4",
-      "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz",
-      "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/default-require-extensions": {
       "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/default-require-extensions/-/default-require-extensions-3.0.1.tgz",
-      "integrity": "sha512-eXTJmRbm2TIt9MgWTsOH1wEuhew6XGZcMeGKCtLedIg/NCsg1iBePXkceTdK4Fii7pzmN9tGsZhKzZ4h7O/fxw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7539,8 +6558,6 @@
     },
     "node_modules/define-data-property": {
       "version": "1.1.4",
-      "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz",
-      "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7557,8 +6574,6 @@
     },
     "node_modules/define-properties": {
       "version": "1.2.1",
-      "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz",
-      "integrity": "sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7575,8 +6590,6 @@
     },
     "node_modules/delayed-stream": {
       "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
-      "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -7585,15 +6598,11 @@
     },
     "node_modules/deprecation": {
       "version": "2.3.1",
-      "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz",
-      "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/dequal": {
       "version": "2.0.3",
-      "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz",
-      "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -7602,8 +6611,6 @@
     },
     "node_modules/detect-indent": {
       "version": "6.1.0",
-      "resolved": "https://registry.npmjs.org/detect-indent/-/detect-indent-6.1.0.tgz",
-      "integrity": "sha512-reYkTUJAZb9gUuZ2RvVCNhVHdg62RHnJ7WJl8ftMi4diZ6NWlciOzQN88pUhSELEwflJht4oQDv0F0BMlwaYtA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -7612,8 +6619,6 @@
     },
     "node_modules/devlop": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/devlop/-/devlop-1.1.0.tgz",
-      "integrity": "sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7626,8 +6631,6 @@
     },
     "node_modules/diff": {
       "version": "7.0.0",
-      "resolved": "https://registry.npmjs.org/diff/-/diff-7.0.0.tgz",
-      "integrity": "sha512-PJWHUb1RFevKCwaFA9RlG5tCd+FO5iRh9A8HEtkmBH2Li03iJriB6m6JIN4rGz3K3JLawI7/veA1xzRKP6ISBw==",
       "license": "BSD-3-Clause",
       "engines": {
         "node": ">=0.3.1"
@@ -7635,15 +6638,11 @@
     },
     "node_modules/discontinuous-range": {
       "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/discontinuous-range/-/discontinuous-range-1.0.0.tgz",
-      "integrity": "sha512-c68LpLbO+7kP/b1Hr1qs8/BJ09F5khZGTxqxZuhzxpmwJKOgRFHJWIb9/KmqnqHhLdO55aOxFH/EGBvUQbL/RQ==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/doctrine": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz",
-      "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==",
       "dev": true,
       "license": "Apache-2.0",
       "dependencies": {
@@ -7655,8 +6654,6 @@
     },
     "node_modules/dom-serializer": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz",
-      "integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7670,8 +6667,6 @@
     },
     "node_modules/domelementtype": {
       "version": "2.3.0",
-      "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz",
-      "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==",
       "dev": true,
       "funding": [
         {
@@ -7683,8 +6678,6 @@
     },
     "node_modules/domhandler": {
       "version": "5.0.3",
-      "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz",
-      "integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==",
       "dev": true,
       "license": "BSD-2-Clause",
       "dependencies": {
@@ -7699,8 +6692,6 @@
     },
     "node_modules/domutils": {
       "version": "3.2.2",
-      "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.2.2.tgz",
-      "integrity": "sha512-6kZKyUajlDuqlHKVX1w7gyslj9MPIXzIFiz/rGu35uC1wMi+kMhQwGhl4lt9unC9Vb9INnY9Z3/ZA3+FhASLaw==",
       "dev": true,
       "license": "BSD-2-Clause",
       "dependencies": {
@@ -7714,8 +6705,6 @@
     },
     "node_modules/dot-prop": {
       "version": "5.3.0",
-      "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-5.3.0.tgz",
-      "integrity": "sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7727,8 +6716,6 @@
     },
     "node_modules/dunder-proto": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz",
-      "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7742,29 +6729,21 @@
     },
     "node_modules/eastasianwidth": {
       "version": "0.2.0",
-      "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz",
-      "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==",
       "inBundle": true,
       "license": "MIT"
     },
     "node_modules/electron-to-chromium": {
       "version": "1.5.189",
-      "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.189.tgz",
-      "integrity": "sha512-y9D1ntS1ruO/pZ/V2FtLE+JXLQe28XoRpZ7QCCo0T8LdQladzdcOVQZH/IWLVJvCw12OGMb6hYOeOAjntCmJRQ==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/emoji-regex": {
       "version": "8.0.0",
-      "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
-      "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
       "inBundle": true,
       "license": "MIT"
     },
     "node_modules/encoding": {
       "version": "0.1.13",
-      "resolved": "https://registry.npmjs.org/encoding/-/encoding-0.1.13.tgz",
-      "integrity": "sha512-ETBauow1T35Y/WZMkio9jiM0Z5xjHHmJ4XmjZOq1l/dXz3lr2sRn87nJy20RupqSh1F2m3HHPSp8ShIPQJrJ3A==",
       "inBundle": true,
       "license": "MIT",
       "optional": true,
@@ -7774,8 +6753,6 @@
     },
     "node_modules/entities": {
       "version": "4.5.0",
-      "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz",
-      "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==",
       "dev": true,
       "license": "BSD-2-Clause",
       "engines": {
@@ -7787,8 +6764,6 @@
     },
     "node_modules/env-paths": {
       "version": "2.2.1",
-      "resolved": "https://registry.npmjs.org/env-paths/-/env-paths-2.2.1.tgz",
-      "integrity": "sha512-+h1lkLKhZMTYjog1VEpJNG7NZJWcuc2DDk/qsqSTRRCOXiLjeQ1d1/udrUGhqMxUgAlwKNZ0cf2uqan5GLuS2A==",
       "inBundle": true,
       "license": "MIT",
       "engines": {
@@ -7797,15 +6772,11 @@
     },
     "node_modules/err-code": {
       "version": "2.0.3",
-      "resolved": "https://registry.npmjs.org/err-code/-/err-code-2.0.3.tgz",
-      "integrity": "sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==",
       "inBundle": true,
       "license": "MIT"
     },
     "node_modules/error-ex": {
       "version": "1.3.2",
-      "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz",
-      "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7814,8 +6785,6 @@
     },
     "node_modules/es-abstract": {
       "version": "1.24.0",
-      "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.24.0.tgz",
-      "integrity": "sha512-WSzPgsdLtTcQwm4CROfS5ju2Wa1QQcVeT37jFjYzdFz1r9ahadC8B8/a4qxJxM+09F18iumCdRmlr96ZYkQvEg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7883,8 +6852,6 @@
     },
     "node_modules/es-define-property": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz",
-      "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -7893,8 +6860,6 @@
     },
     "node_modules/es-errors": {
       "version": "1.3.0",
-      "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz",
-      "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -7903,8 +6868,6 @@
     },
     "node_modules/es-object-atoms": {
       "version": "1.1.1",
-      "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz",
-      "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7916,8 +6879,6 @@
     },
     "node_modules/es-set-tostringtag": {
       "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz",
-      "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7932,8 +6893,6 @@
     },
     "node_modules/es-shim-unscopables": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.1.0.tgz",
-      "integrity": "sha512-d9T8ucsEhh8Bi1woXCf+TIKDIROLG5WCkxg8geBCbvk22kzwC5G2OnXVMO6FUsvQlgUUXQ2itephWDLqDzbeCw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7945,8 +6904,6 @@
     },
     "node_modules/es-to-primitive": {
       "version": "1.3.0",
-      "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.3.0.tgz",
-      "integrity": "sha512-w+5mJ3GuFL+NjVtJlvydShqE1eN3h3PbI7/5LAsYJP/2qtuMXjfL2LpHSRqo4b4eSF5K/DH1JXKUAHSB2UW50g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7963,15 +6920,11 @@
     },
     "node_modules/es6-error": {
       "version": "4.1.1",
-      "resolved": "https://registry.npmjs.org/es6-error/-/es6-error-4.1.1.tgz",
-      "integrity": "sha512-Um/+FxMr9CISWh0bi5Zv0iOD+4cFh5qLeks1qhAopKVAJw3drgKbKySikp7wGhDL0HPeaja0P5ULZrxLkniUVg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/escalade": {
       "version": "3.2.0",
-      "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz",
-      "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -7980,8 +6933,6 @@
     },
     "node_modules/escape-string-regexp": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz",
-      "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -7993,9 +6944,6 @@
     },
     "node_modules/eslint": {
       "version": "8.57.1",
-      "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.1.tgz",
-      "integrity": "sha512-ypowyDxpVSYpkXr9WPv2PAZCtNip1Mv5KTW0SCurXv/9iOpcrH9PaqUElksqEB6pChqHGDRCFTyrZlGhnLNGiA==",
-      "deprecated": "This version is no longer supported. Please see https://eslint.org/version-support for other options.",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -8051,8 +6999,6 @@
     },
     "node_modules/eslint-import-resolver-node": {
       "version": "0.3.9",
-      "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.9.tgz",
-      "integrity": "sha512-WFj2isz22JahUv+B788TlO3N6zL3nNJGU8CcZbPZvVEkBPaJdCV4vy5wyghty5ROFbCRnm132v8BScu5/1BQ8g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8063,8 +7009,6 @@
     },
     "node_modules/eslint-import-resolver-node/node_modules/debug": {
       "version": "3.2.7",
-      "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz",
-      "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8073,8 +7017,6 @@
     },
     "node_modules/eslint-module-utils": {
       "version": "2.12.1",
-      "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.12.1.tgz",
-      "integrity": "sha512-L8jSWTze7K2mTg0vos/RuLRS5soomksDPoJLXIslC7c8Wmut3bx7CPpJijDcBZtxQ5lrbUdM+s0OlNbz0DCDNw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8091,8 +7033,6 @@
     },
     "node_modules/eslint-module-utils/node_modules/debug": {
       "version": "3.2.7",
-      "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz",
-      "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8101,8 +7041,6 @@
     },
     "node_modules/eslint-plugin-es": {
       "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/eslint-plugin-es/-/eslint-plugin-es-3.0.1.tgz",
-      "integrity": "sha512-GUmAsJaN4Fc7Gbtl8uOBlayo2DqhwWvEzykMHSCZHU3XdJ+NSzzZcVhXh3VxX5icqQ+oQdIEawXX8xkR3mIFmQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8121,8 +7059,6 @@
     },
     "node_modules/eslint-plugin-import": {
       "version": "2.32.0",
-      "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.32.0.tgz",
-      "integrity": "sha512-whOE1HFo/qJDyX4SnXzP4N6zOWn79WhnCUY/iDR0mPfQZO8wcYE4JClzI2oZrhBnnMUCBCHZhO6VQyoBU95mZA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8155,8 +7091,6 @@
     },
     "node_modules/eslint-plugin-import/node_modules/brace-expansion": {
       "version": "1.1.12",
-      "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
-      "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8166,8 +7100,6 @@
     },
     "node_modules/eslint-plugin-import/node_modules/debug": {
       "version": "3.2.7",
-      "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz",
-      "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8176,8 +7108,6 @@
     },
     "node_modules/eslint-plugin-import/node_modules/doctrine": {
       "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz",
-      "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==",
       "dev": true,
       "license": "Apache-2.0",
       "dependencies": {
@@ -8189,8 +7119,6 @@
     },
     "node_modules/eslint-plugin-import/node_modules/minimatch": {
       "version": "3.1.2",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
-      "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -8202,8 +7130,6 @@
     },
     "node_modules/eslint-plugin-import/node_modules/semver": {
       "version": "6.3.1",
-      "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
-      "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
       "dev": true,
       "license": "ISC",
       "bin": {
@@ -8212,8 +7138,6 @@
     },
     "node_modules/eslint-plugin-node": {
       "version": "11.1.0",
-      "resolved": "https://registry.npmjs.org/eslint-plugin-node/-/eslint-plugin-node-11.1.0.tgz",
-      "integrity": "sha512-oUwtPJ1W0SKD0Tr+wqu92c5xuCeQqB3hSCHasn/ZgjFdA9iDGNkNf2Zi9ztY7X+hNuMib23LNGRm6+uN+KLE3g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8233,8 +7157,6 @@
     },
     "node_modules/eslint-plugin-node/node_modules/brace-expansion": {
       "version": "1.1.12",
-      "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
-      "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8244,8 +7166,6 @@
     },
     "node_modules/eslint-plugin-node/node_modules/minimatch": {
       "version": "3.1.2",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
-      "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -8257,8 +7177,6 @@
     },
     "node_modules/eslint-plugin-node/node_modules/semver": {
       "version": "6.3.1",
-      "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
-      "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
       "dev": true,
       "license": "ISC",
       "bin": {
@@ -8267,8 +7185,6 @@
     },
     "node_modules/eslint-plugin-promise": {
       "version": "6.6.0",
-      "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-6.6.0.tgz",
-      "integrity": "sha512-57Zzfw8G6+Gq7axm2Pdo3gW/Rx3h9Yywgn61uE/3elTCOePEHVrn2i5CdfBwA1BLK0Q0WqctICIUSqXZW/VprQ==",
       "dev": true,
       "license": "ISC",
       "peer": true,
@@ -8284,8 +7200,6 @@
     },
     "node_modules/eslint-scope": {
       "version": "7.2.2",
-      "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz",
-      "integrity": "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==",
       "dev": true,
       "license": "BSD-2-Clause",
       "dependencies": {
@@ -8301,8 +7215,6 @@
     },
     "node_modules/eslint-utils": {
       "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.1.0.tgz",
-      "integrity": "sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8317,8 +7229,6 @@
     },
     "node_modules/eslint-utils/node_modules/eslint-visitor-keys": {
       "version": "1.3.0",
-      "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz",
-      "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==",
       "dev": true,
       "license": "Apache-2.0",
       "engines": {
@@ -8327,8 +7237,6 @@
     },
     "node_modules/eslint-visitor-keys": {
       "version": "3.4.3",
-      "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz",
-      "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==",
       "dev": true,
       "license": "Apache-2.0",
       "engines": {
@@ -8340,8 +7248,6 @@
     },
     "node_modules/eslint/node_modules/ajv": {
       "version": "6.12.6",
-      "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz",
-      "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8357,8 +7263,6 @@
     },
     "node_modules/eslint/node_modules/ansi-styles": {
       "version": "4.3.0",
-      "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
-      "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8373,8 +7277,6 @@
     },
     "node_modules/eslint/node_modules/brace-expansion": {
       "version": "1.1.12",
-      "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
-      "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8384,8 +7286,6 @@
     },
     "node_modules/eslint/node_modules/chalk": {
       "version": "4.1.2",
-      "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
-      "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8401,8 +7301,6 @@
     },
     "node_modules/eslint/node_modules/find-up": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz",
-      "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8418,8 +7316,6 @@
     },
     "node_modules/eslint/node_modules/has-flag": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
-      "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -8428,15 +7324,11 @@
     },
     "node_modules/eslint/node_modules/json-schema-traverse": {
       "version": "0.4.1",
-      "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
-      "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/eslint/node_modules/locate-path": {
       "version": "6.0.0",
-      "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz",
-      "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8451,8 +7343,6 @@
     },
     "node_modules/eslint/node_modules/minimatch": {
       "version": "3.1.2",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
-      "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -8464,8 +7354,6 @@
     },
     "node_modules/eslint/node_modules/p-limit": {
       "version": "3.1.0",
-      "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz",
-      "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8480,8 +7368,6 @@
     },
     "node_modules/eslint/node_modules/p-locate": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz",
-      "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8496,8 +7382,6 @@
     },
     "node_modules/eslint/node_modules/path-exists": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
-      "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -8506,8 +7390,6 @@
     },
     "node_modules/eslint/node_modules/supports-color": {
       "version": "7.2.0",
-      "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
-      "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8519,8 +7401,6 @@
     },
     "node_modules/eslint/node_modules/yocto-queue": {
       "version": "0.1.0",
-      "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz",
-      "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -8532,8 +7412,6 @@
     },
     "node_modules/espree": {
       "version": "9.6.1",
-      "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz",
-      "integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==",
       "dev": true,
       "license": "BSD-2-Clause",
       "dependencies": {
@@ -8550,8 +7428,6 @@
     },
     "node_modules/esquery": {
       "version": "1.6.0",
-      "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz",
-      "integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==",
       "dev": true,
       "license": "BSD-3-Clause",
       "dependencies": {
@@ -8563,8 +7439,6 @@
     },
     "node_modules/esrecurse": {
       "version": "4.3.0",
-      "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz",
-      "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==",
       "dev": true,
       "license": "BSD-2-Clause",
       "dependencies": {
@@ -8576,8 +7450,6 @@
     },
     "node_modules/estraverse": {
       "version": "5.3.0",
-      "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz",
-      "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==",
       "dev": true,
       "license": "BSD-2-Clause",
       "engines": {
@@ -8586,8 +7458,6 @@
     },
     "node_modules/esutils": {
       "version": "2.0.3",
-      "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz",
-      "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==",
       "dev": true,
       "license": "BSD-2-Clause",
       "engines": {
@@ -8596,57 +7466,41 @@
     },
     "node_modules/events-to-array": {
       "version": "1.1.2",
-      "resolved": "https://registry.npmjs.org/events-to-array/-/events-to-array-1.1.2.tgz",
-      "integrity": "sha512-inRWzRY7nG+aXZxBzEqYKB3HPgwflZRopAjDCHv0whhRx+MTUr1ei0ICZUypdyE0HRm4L2d5VEcIqLD6yl+BFA==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/exponential-backoff": {
       "version": "3.1.2",
-      "resolved": "https://registry.npmjs.org/exponential-backoff/-/exponential-backoff-3.1.2.tgz",
-      "integrity": "sha512-8QxYTVXUkuy7fIIoitQkPwGonB8F3Zj8eEO8Sqg9Zv/bkI7RJAzowee4gr81Hak/dUTpA2Z7VfQgoijjPNlUZA==",
       "inBundle": true,
       "license": "Apache-2.0"
     },
     "node_modules/extend": {
       "version": "3.0.2",
-      "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz",
-      "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/fast-deep-equal": {
       "version": "3.1.3",
-      "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
-      "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/fast-fifo": {
       "version": "1.3.2",
-      "resolved": "https://registry.npmjs.org/fast-fifo/-/fast-fifo-1.3.2.tgz",
-      "integrity": "sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/fast-json-stable-stringify": {
       "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz",
-      "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/fast-levenshtein": {
       "version": "2.0.6",
-      "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz",
-      "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/fast-uri": {
       "version": "3.0.6",
-      "resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.0.6.tgz",
-      "integrity": "sha512-Atfo14OibSv5wAp4VWNsFYE1AchQRTv9cBGWET4pZWHzYshFSS9NQI6I57rdKn9croWVMbYFbLhJ+yJvmZIIHw==",
       "dev": true,
       "funding": [
         {
@@ -8662,8 +7516,6 @@
     },
     "node_modules/fastest-levenshtein": {
       "version": "1.0.16",
-      "resolved": "https://registry.npmjs.org/fastest-levenshtein/-/fastest-levenshtein-1.0.16.tgz",
-      "integrity": "sha512-eRnCtTTtGZFpQCwhJiUOuxPQWRXVKYDn0b2PeHfXL6/Zi53SLAzAHfVhVWK2AryC/WH05kGfxhFIPvTF0SXQzg==",
       "inBundle": true,
       "license": "MIT",
       "engines": {
@@ -8672,8 +7524,6 @@
     },
     "node_modules/fastq": {
       "version": "1.19.1",
-      "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.1.tgz",
-      "integrity": "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -8682,8 +7532,6 @@
     },
     "node_modules/figures": {
       "version": "3.2.0",
-      "resolved": "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz",
-      "integrity": "sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8698,8 +7546,6 @@
     },
     "node_modules/figures/node_modules/escape-string-regexp": {
       "version": "1.0.5",
-      "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz",
-      "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -8708,8 +7554,6 @@
     },
     "node_modules/file-entry-cache": {
       "version": "6.0.1",
-      "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz",
-      "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8721,8 +7565,6 @@
     },
     "node_modules/fill-range": {
       "version": "7.1.1",
-      "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
-      "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8734,8 +7576,6 @@
     },
     "node_modules/find-cache-dir": {
       "version": "3.3.2",
-      "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-3.3.2.tgz",
-      "integrity": "sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8752,8 +7592,6 @@
     },
     "node_modules/find-up": {
       "version": "7.0.0",
-      "resolved": "https://registry.npmjs.org/find-up/-/find-up-7.0.0.tgz",
-      "integrity": "sha512-YyZM99iHrqLKjmt4LJDj58KI+fYyufRLBSYcqycxf//KpBk9FoewoGX0450m9nB44qrZnovzC2oeP5hUibxc/g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8770,15 +7608,11 @@
     },
     "node_modules/findit": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/findit/-/findit-2.0.0.tgz",
-      "integrity": "sha512-ENZS237/Hr8bjczn5eKuBohLgaD0JyUd0arxretR1f9RO46vZHA1b2y0VorgGV3WaOT3c+78P8h7v4JGJ1i/rg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/flat-cache": {
       "version": "3.2.0",
-      "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.2.0.tgz",
-      "integrity": "sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8792,8 +7626,6 @@
     },
     "node_modules/flat-cache/node_modules/brace-expansion": {
       "version": "1.1.12",
-      "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
-      "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8803,9 +7635,6 @@
     },
     "node_modules/flat-cache/node_modules/glob": {
       "version": "7.2.3",
-      "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
-      "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
-      "deprecated": "Glob versions prior to v9 are no longer supported",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -8825,8 +7654,6 @@
     },
     "node_modules/flat-cache/node_modules/minimatch": {
       "version": "3.1.2",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
-      "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -8838,9 +7665,6 @@
     },
     "node_modules/flat-cache/node_modules/rimraf": {
       "version": "3.0.2",
-      "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz",
-      "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==",
-      "deprecated": "Rimraf versions prior to v4 are no longer supported",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -8855,15 +7679,11 @@
     },
     "node_modules/flatted": {
       "version": "3.3.3",
-      "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz",
-      "integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/for-each": {
       "version": "0.3.5",
-      "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.5.tgz",
-      "integrity": "sha512-dKx12eRCVIzqCxFGplyFKJMPvLEWgmNtUrpTiJIR5u97zEhRG8ySrtboPHZXx7daLxQVrl643cTzbab2tkQjxg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8878,8 +7698,6 @@
     },
     "node_modules/foreground-child": {
       "version": "3.3.1",
-      "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.1.tgz",
-      "integrity": "sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -8895,8 +7713,6 @@
     },
     "node_modules/form-data": {
       "version": "4.0.4",
-      "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz",
-      "integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8912,8 +7728,6 @@
     },
     "node_modules/fromentries": {
       "version": "1.3.2",
-      "resolved": "https://registry.npmjs.org/fromentries/-/fromentries-1.3.2.tgz",
-      "integrity": "sha512-cHEpEQHUg0f8XdtZCc2ZAhrHzKzT0MrFUTcvx+hfxYu7rGMDc5SKoXFh+n4YigxsHXRzc6OrCshdR1bWH6HHyg==",
       "dev": true,
       "funding": [
         {
@@ -8933,8 +7747,6 @@
     },
     "node_modules/front-matter": {
       "version": "4.0.2",
-      "resolved": "https://registry.npmjs.org/front-matter/-/front-matter-4.0.2.tgz",
-      "integrity": "sha512-I8ZuJ/qG92NWX8i5x1Y8qyj3vizhXS31OxjKDu3LKP+7/qBgfIKValiZIEwoVoJKUHlhWtYrktkxV1XsX+pPlg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8943,8 +7755,6 @@
     },
     "node_modules/front-matter/node_modules/argparse": {
       "version": "1.0.10",
-      "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz",
-      "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8953,8 +7763,6 @@
     },
     "node_modules/front-matter/node_modules/esprima": {
       "version": "4.0.1",
-      "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz",
-      "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==",
       "dev": true,
       "license": "BSD-2-Clause",
       "bin": {
@@ -8967,8 +7775,6 @@
     },
     "node_modules/front-matter/node_modules/js-yaml": {
       "version": "3.14.1",
-      "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz",
-      "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8981,22 +7787,16 @@
     },
     "node_modules/front-matter/node_modules/sprintf-js": {
       "version": "1.0.3",
-      "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz",
-      "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==",
       "dev": true,
       "license": "BSD-3-Clause"
     },
     "node_modules/fs-exists-cached": {
       "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/fs-exists-cached/-/fs-exists-cached-1.0.0.tgz",
-      "integrity": "sha512-kSxoARUDn4F2RPXX48UXnaFKwVU7Ivd/6qpzZL29MCDmr9sTvybv4gFCp+qaI4fM9m0z9fgz/yJvi56GAz+BZg==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/fs-minipass": {
       "version": "3.0.3",
-      "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-3.0.3.tgz",
-      "integrity": "sha512-XUBA9XClHbnJWSfBzjkm6RvPsyg3sryZt06BEQoXcF7EK/xpGaQYJgQKDJSUH5SGZ76Y7pFx1QBnXz09rU5Fbw==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -9008,17 +7808,12 @@
     },
     "node_modules/fs.realpath": {
       "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
-      "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/fsevents": {
       "version": "2.3.3",
-      "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz",
-      "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==",
       "dev": true,
-      "hasInstallScript": true,
       "license": "MIT",
       "optional": true,
       "os": [
@@ -9030,8 +7825,6 @@
     },
     "node_modules/function-bind": {
       "version": "1.1.2",
-      "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz",
-      "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -9040,15 +7833,11 @@
     },
     "node_modules/function-loop": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/function-loop/-/function-loop-2.0.1.tgz",
-      "integrity": "sha512-ktIR+O6i/4h+j/ZhZJNdzeI4i9lEPeEK6UPR2EVyTVBqOwcU3Za9xYKLH64ZR9HmcROyRrOkizNyjjtWJzDDkQ==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/function.prototype.name": {
       "version": "1.1.8",
-      "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.8.tgz",
-      "integrity": "sha512-e5iwyodOHhbMr/yNrc7fDYG4qlbIvI5gajyzPnb5TCwyhjApznQh1BMFou9b30SevY43gCJKXycoCBjMbsuW0Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9068,8 +7857,6 @@
     },
     "node_modules/functions-have-names": {
       "version": "1.2.3",
-      "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz",
-      "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -9078,8 +7865,6 @@
     },
     "node_modules/gensync": {
       "version": "1.0.0-beta.2",
-      "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz",
-      "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -9088,8 +7873,6 @@
     },
     "node_modules/get-caller-file": {
       "version": "2.0.5",
-      "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz",
-      "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -9098,8 +7881,6 @@
     },
     "node_modules/get-intrinsic": {
       "version": "1.3.0",
-      "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz",
-      "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9123,8 +7904,6 @@
     },
     "node_modules/get-package-type": {
       "version": "0.1.0",
-      "resolved": "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz",
-      "integrity": "sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -9133,8 +7912,6 @@
     },
     "node_modules/get-proto": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz",
-      "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9147,8 +7924,6 @@
     },
     "node_modules/get-symbol-description": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.1.0.tgz",
-      "integrity": "sha512-w9UMqWwJxHNOvoNzSJ2oPF5wvYcvP7jUvYzhp67yEhTi17ZDBBC1z9pTdGuzjD+EFIqLSYRweZjqfiPzQ06Ebg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9165,8 +7940,6 @@
     },
     "node_modules/git-raw-commits": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/git-raw-commits/-/git-raw-commits-4.0.0.tgz",
-      "integrity": "sha512-ICsMM1Wk8xSGMowkOmPrzo2Fgmfo4bMHLNX6ytHjajRJUqvHOw/TFapQ+QG75c3X/tTDDhOSRPGC52dDbNM8FQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9183,8 +7956,6 @@
     },
     "node_modules/glob": {
       "version": "10.4.5",
-      "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz",
-      "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -9204,8 +7975,6 @@
     },
     "node_modules/glob-parent": {
       "version": "6.0.2",
-      "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz",
-      "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -9217,8 +7986,6 @@
     },
     "node_modules/global-directory": {
       "version": "4.0.1",
-      "resolved": "https://registry.npmjs.org/global-directory/-/global-directory-4.0.1.tgz",
-      "integrity": "sha512-wHTUcDUoZ1H5/0iVqEudYW4/kAlN5cZ3j/bXn0Dpbizl9iaUVeWSHqiOjsgk6OW2bkLclbBjzewBz6weQ1zA2Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9233,8 +8000,6 @@
     },
     "node_modules/global-directory/node_modules/ini": {
       "version": "4.1.1",
-      "resolved": "https://registry.npmjs.org/ini/-/ini-4.1.1.tgz",
-      "integrity": "sha512-QQnnxNyfvmHFIsj7gkPcYymR8Jdw/o7mp5ZFihxn6h8Ci6fh3Dx4E1gPjpQEpIuPo9XVNY/ZUwh4BPMjGyL01g==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -9243,8 +8008,6 @@
     },
     "node_modules/globals": {
       "version": "13.24.0",
-      "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz",
-      "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9259,8 +8022,6 @@
     },
     "node_modules/globalthis": {
       "version": "1.0.4",
-      "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.4.tgz",
-      "integrity": "sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9276,8 +8037,6 @@
     },
     "node_modules/gopd": {
       "version": "1.2.0",
-      "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz",
-      "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -9289,22 +8048,16 @@
     },
     "node_modules/graceful-fs": {
       "version": "4.2.11",
-      "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz",
-      "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==",
       "inBundle": true,
       "license": "ISC"
     },
     "node_modules/graphemer": {
       "version": "1.4.0",
-      "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz",
-      "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/groff-escape": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/groff-escape/-/groff-escape-2.0.1.tgz",
-      "integrity": "sha512-S0nG+mLFTu1buDKQsRlBtIxZU/dMvrdCURJg/zSLKpL333yi1Fs5bLUYk+v3pRYlc+qmHtukMAM2slB0AKFKAw==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -9314,8 +8067,6 @@
     },
     "node_modules/handlebars": {
       "version": "4.7.8",
-      "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.8.tgz",
-      "integrity": "sha512-vafaFqs8MZkRrSX7sFVUdo3ap/eNiLnb4IakshzvP56X5Nr1iGKAIqdX6tMlm6HcNRIkr6AxO5jFEoJzzpT8aQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9336,8 +8087,6 @@
     },
     "node_modules/hard-rejection": {
       "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/hard-rejection/-/hard-rejection-2.1.0.tgz",
-      "integrity": "sha512-VIZB+ibDhx7ObhAe7OVtoEbuP4h/MuOTHJ+J8h/eBXotJYl0fBgR72xDFCKgIh22OJZIOVNxBMWuhAr10r8HdA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -9346,8 +8095,6 @@
     },
     "node_modules/has-bigints": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.1.0.tgz",
-      "integrity": "sha512-R3pbpkcIqv2Pm3dUwgjclDRVmWpTJW2DcMzcIhEXEx1oh/CEMObMm3KLmRJOdvhM7o4uQBnwr8pzRK2sJWIqfg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -9359,8 +8106,6 @@
     },
     "node_modules/has-flag": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz",
-      "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -9369,8 +8114,6 @@
     },
     "node_modules/has-property-descriptors": {
       "version": "1.0.2",
-      "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz",
-      "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9382,8 +8125,6 @@
     },
     "node_modules/has-proto": {
       "version": "1.2.0",
-      "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.2.0.tgz",
-      "integrity": "sha512-KIL7eQPfHQRC8+XluaIw7BHUwwqL19bQn4hzNgdr+1wXoU0KKj6rufu47lhY7KbJR2C6T6+PfyN0Ea7wkSS+qQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9398,8 +8139,6 @@
     },
     "node_modules/has-symbols": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz",
-      "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -9411,8 +8150,6 @@
     },
     "node_modules/has-tostringtag": {
       "version": "1.0.2",
-      "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz",
-      "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9427,8 +8164,6 @@
     },
     "node_modules/hasha": {
       "version": "5.2.2",
-      "resolved": "https://registry.npmjs.org/hasha/-/hasha-5.2.2.tgz",
-      "integrity": "sha512-Hrp5vIK/xr5SkeN2onO32H0MgNZ0f17HRNH39WfL0SYUNOTZ5Lz1TJ8Pajo/87dYGEFlLMm7mIc/k/s6Bvz9HQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9444,8 +8179,6 @@
     },
     "node_modules/hasha/node_modules/is-stream": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz",
-      "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -9457,8 +8190,6 @@
     },
     "node_modules/hasha/node_modules/type-fest": {
       "version": "0.8.1",
-      "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz",
-      "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==",
       "dev": true,
       "license": "(MIT OR CC0-1.0)",
       "engines": {
@@ -9467,8 +8198,6 @@
     },
     "node_modules/hasown": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz",
-      "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9480,8 +8209,6 @@
     },
     "node_modules/hast-util-from-parse5": {
       "version": "7.1.2",
-      "resolved": "https://registry.npmjs.org/hast-util-from-parse5/-/hast-util-from-parse5-7.1.2.tgz",
-      "integrity": "sha512-Nz7FfPBuljzsN3tCQ4kCBKqdNhQE2l0Tn+X1ubgKBPRoiDIu1mL08Cfw4k7q71+Duyaw7DXDN+VTAp4Vh3oCOw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9500,8 +8227,6 @@
     },
     "node_modules/hast-util-from-parse5/node_modules/@types/hast": {
       "version": "2.3.10",
-      "resolved": "https://registry.npmjs.org/@types/hast/-/hast-2.3.10.tgz",
-      "integrity": "sha512-McWspRw8xx8J9HurkVBfYj0xKoE25tOFlHGdx4MJ5xORQrMGZNqJhVQWaIbm6Oyla5kYOXtDiopzKRJzEOkwJw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9510,8 +8235,6 @@
     },
     "node_modules/hast-util-parse-selector": {
       "version": "3.1.1",
-      "resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-3.1.1.tgz",
-      "integrity": "sha512-jdlwBjEexy1oGz0aJ2f4GKMaVKkA9jwjr4MjAAI22E5fM/TXVZHuS5OpONtdeIkRKqAaryQ2E9xNQxijoThSZA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9524,8 +8247,6 @@
     },
     "node_modules/hast-util-parse-selector/node_modules/@types/hast": {
       "version": "2.3.10",
-      "resolved": "https://registry.npmjs.org/@types/hast/-/hast-2.3.10.tgz",
-      "integrity": "sha512-McWspRw8xx8J9HurkVBfYj0xKoE25tOFlHGdx4MJ5xORQrMGZNqJhVQWaIbm6Oyla5kYOXtDiopzKRJzEOkwJw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9534,8 +8255,6 @@
     },
     "node_modules/hast-util-raw": {
       "version": "7.2.3",
-      "resolved": "https://registry.npmjs.org/hast-util-raw/-/hast-util-raw-7.2.3.tgz",
-      "integrity": "sha512-RujVQfVsOrxzPOPSzZFiwofMArbQke6DJjnFfceiEbFh7S05CbPt0cYN+A5YeD3pso0JQk6O1aHBnx9+Pm2uqg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9558,8 +8277,6 @@
     },
     "node_modules/hast-util-raw/node_modules/@types/hast": {
       "version": "2.3.10",
-      "resolved": "https://registry.npmjs.org/@types/hast/-/hast-2.3.10.tgz",
-      "integrity": "sha512-McWspRw8xx8J9HurkVBfYj0xKoE25tOFlHGdx4MJ5xORQrMGZNqJhVQWaIbm6Oyla5kYOXtDiopzKRJzEOkwJw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9568,8 +8285,6 @@
     },
     "node_modules/hast-util-raw/node_modules/html-void-elements": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/html-void-elements/-/html-void-elements-2.0.1.tgz",
-      "integrity": "sha512-0quDb7s97CfemeJAnW9wC0hw78MtW7NU3hqtCD75g2vFlDLt36llsYD7uB7SUzojLMP24N5IatXf7ylGXiGG9A==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -9579,15 +8294,11 @@
     },
     "node_modules/hast-util-raw/node_modules/parse5": {
       "version": "6.0.1",
-      "resolved": "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz",
-      "integrity": "sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/hast-util-raw/node_modules/unist-util-position": {
       "version": "4.0.4",
-      "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-4.0.4.tgz",
-      "integrity": "sha512-kUBE91efOWfIVBo8xzh/uZQ7p9ffYRtUbMRZBNFYwf0RK8koUMx6dGUfwylLOKmaT2cs4wSW96QoYUSXAyEtpg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9600,8 +8311,6 @@
     },
     "node_modules/hast-util-raw/node_modules/unist-util-visit": {
       "version": "4.1.2",
-      "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-4.1.2.tgz",
-      "integrity": "sha512-MSd8OUGISqHdVvfY9TPhyK2VdUrPgxkUtWSuMHF6XAAFuL4LokseigBnZtPnJMu+FbynTkFNnFlyjxpVKujMRg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9616,8 +8325,6 @@
     },
     "node_modules/hast-util-raw/node_modules/unist-util-visit-parents": {
       "version": "5.1.3",
-      "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-5.1.3.tgz",
-      "integrity": "sha512-x6+y8g7wWMyQhL1iZfhIPhDAs7Xwbn9nRosDXl7qoPTSCy0yNxnKc+hWokFifWQIDGi154rdUqKvbCa4+1kLhg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9631,8 +8338,6 @@
     },
     "node_modules/hast-util-to-parse5": {
       "version": "7.1.0",
-      "resolved": "https://registry.npmjs.org/hast-util-to-parse5/-/hast-util-to-parse5-7.1.0.tgz",
-      "integrity": "sha512-YNRgAJkH2Jky5ySkIqFXTQiaqcAtJyVE+D5lkN6CdtOqrnkLfGYYrEcKuHOJZlp+MwjSwuD3fZuawI+sic/RBw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9650,8 +8355,6 @@
     },
     "node_modules/hast-util-to-parse5/node_modules/@types/hast": {
       "version": "2.3.10",
-      "resolved": "https://registry.npmjs.org/@types/hast/-/hast-2.3.10.tgz",
-      "integrity": "sha512-McWspRw8xx8J9HurkVBfYj0xKoE25tOFlHGdx4MJ5xORQrMGZNqJhVQWaIbm6Oyla5kYOXtDiopzKRJzEOkwJw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9660,8 +8363,6 @@
     },
     "node_modules/hastscript": {
       "version": "7.2.0",
-      "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-7.2.0.tgz",
-      "integrity": "sha512-TtYPq24IldU8iKoJQqvZOuhi5CyCQRAbvDOX0x1eW6rsHSxa/1i2CCiptNTotGHJ3VoHRGmqiv6/D3q113ikkw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9678,8 +8379,6 @@
     },
     "node_modules/hastscript/node_modules/@types/hast": {
       "version": "2.3.10",
-      "resolved": "https://registry.npmjs.org/@types/hast/-/hast-2.3.10.tgz",
-      "integrity": "sha512-McWspRw8xx8J9HurkVBfYj0xKoE25tOFlHGdx4MJ5xORQrMGZNqJhVQWaIbm6Oyla5kYOXtDiopzKRJzEOkwJw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9688,8 +8387,6 @@
     },
     "node_modules/he": {
       "version": "1.2.0",
-      "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz",
-      "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==",
       "dev": true,
       "license": "MIT",
       "bin": {
@@ -9698,8 +8395,6 @@
     },
     "node_modules/hosted-git-info": {
       "version": "9.0.0",
-      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-9.0.0.tgz",
-      "integrity": "sha512-gEf705MZLrDPkbbhi8PnoO4ZwYgKoNL+ISZ3AjZMht2r3N5tuTwncyDi6Fv2/qDnMmZxgs0yI8WDOyR8q3G+SQ==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -9711,8 +8406,6 @@
     },
     "node_modules/hosted-git-info/node_modules/lru-cache": {
       "version": "11.2.1",
-      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.1.tgz",
-      "integrity": "sha512-r8LA6i4LP4EeWOhqBaZZjDWwehd1xUJPCJd9Sv300H0ZmcUER4+JPh7bqqZeqs1o5pgtgvXm+d9UGrB5zZGDiQ==",
       "inBundle": true,
       "license": "ISC",
       "engines": {
@@ -9721,8 +8414,6 @@
     },
     "node_modules/html-encoding-sniffer": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-4.0.0.tgz",
-      "integrity": "sha512-Y22oTqIU4uuPgEemfz7NDJz6OeKf12Lsu+QC+s3BVpda64lTiMYCyGwg5ki4vFxkMwQdeZDl2adZoqUgdFuTgQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9734,22 +8425,16 @@
     },
     "node_modules/html-escaper": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz",
-      "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/http-cache-semantics": {
       "version": "4.2.0",
-      "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.2.0.tgz",
-      "integrity": "sha512-dTxcvPXqPvXBQpq5dUr6mEMJX4oIEFv6bwom3FDwKRDsuIjjJGANqhBuoAn9c1RQJIdAKav33ED65E2ys+87QQ==",
       "inBundle": true,
       "license": "BSD-2-Clause"
     },
     "node_modules/http-proxy-agent": {
       "version": "7.0.2",
-      "resolved": "https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz",
-      "integrity": "sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -9762,8 +8447,6 @@
     },
     "node_modules/https-proxy-agent": {
       "version": "7.0.6",
-      "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz",
-      "integrity": "sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -9776,8 +8459,6 @@
     },
     "node_modules/iconv-lite": {
       "version": "0.6.3",
-      "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz",
-      "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==",
       "devOptional": true,
       "inBundle": true,
       "license": "MIT",
@@ -9790,8 +8471,6 @@
     },
     "node_modules/ignore": {
       "version": "5.3.2",
-      "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz",
-      "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -9800,8 +8479,6 @@
     },
     "node_modules/ignore-walk": {
       "version": "7.0.0",
-      "resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-7.0.0.tgz",
-      "integrity": "sha512-T4gbf83A4NH95zvhVYZc+qWocBBGlpzUXLPGurJggw/WIOwicfXJChLDP/iBZnN5WqROSu5Bm3hhle4z8a8YGQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -9813,8 +8490,6 @@
     },
     "node_modules/import-fresh": {
       "version": "3.3.1",
-      "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz",
-      "integrity": "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9830,8 +8505,6 @@
     },
     "node_modules/import-fresh/node_modules/resolve-from": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz",
-      "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -9840,8 +8513,6 @@
     },
     "node_modules/import-meta-resolve": {
       "version": "4.1.0",
-      "resolved": "https://registry.npmjs.org/import-meta-resolve/-/import-meta-resolve-4.1.0.tgz",
-      "integrity": "sha512-I6fiaX09Xivtk+THaMfAwnA3MVA5Big1WHF1Dfx9hFuvNIWpXnorlkzhcQf6ehrqQiiZECRt1poOAkPmer3ruw==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -9851,8 +8522,6 @@
     },
     "node_modules/imurmurhash": {
       "version": "0.1.4",
-      "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz",
-      "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==",
       "inBundle": true,
       "license": "MIT",
       "engines": {
@@ -9861,8 +8530,6 @@
     },
     "node_modules/indent-string": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz",
-      "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -9871,9 +8538,6 @@
     },
     "node_modules/inflight": {
       "version": "1.0.6",
-      "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
-      "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==",
-      "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -9883,15 +8547,11 @@
     },
     "node_modules/inherits": {
       "version": "2.0.4",
-      "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
-      "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/ini": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/ini/-/ini-5.0.0.tgz",
-      "integrity": "sha512-+N0ngpO3e7cRUWOJAS7qw0IZIVc6XPrW4MlFBdD066F2L4k1L6ker3hLqSq7iXxU5tgS4WGkIUElWn5vogAEnw==",
       "inBundle": true,
       "license": "ISC",
       "engines": {
@@ -9900,8 +8560,6 @@
     },
     "node_modules/init-package-json": {
       "version": "8.2.2",
-      "resolved": "https://registry.npmjs.org/init-package-json/-/init-package-json-8.2.2.tgz",
-      "integrity": "sha512-pXVMn67Jdw2hPKLCuJZj62NC9B2OIDd1R3JwZXTHXuEnfN3Uq5kJbKOSld6YEU+KOGfMD82EzxFTYz5o0SSJoA==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -9919,8 +8577,6 @@
     },
     "node_modules/internal-slot": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.1.0.tgz",
-      "integrity": "sha512-4gd7VpWNQNB4UKKCFFVcp1AVv+FMOgs9NKzjHKusc8jTMhd5eL1NqQqOpE0KzMds804/yHlglp3uxgluOqAPLw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9934,8 +8590,6 @@
     },
     "node_modules/ip-address": {
       "version": "9.0.5",
-      "resolved": "https://registry.npmjs.org/ip-address/-/ip-address-9.0.5.tgz",
-      "integrity": "sha512-zHtQzGojZXTwZTHQqra+ETKd4Sn3vgi7uBmlPoXVWZqYvuKmtI0l/VZTjqGmJY9x88GGOaZ9+G9ES8hC4T4X8g==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -9948,8 +8602,6 @@
     },
     "node_modules/ip-regex": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/ip-regex/-/ip-regex-5.0.0.tgz",
-      "integrity": "sha512-fOCG6lhoKKakwv+C6KdsOnGvgXnmgfmp0myi3bcNwj3qfwPAxRKWEuFhvEFF7ceYIz6+1jRZ+yguLFAmUNPEfw==",
       "inBundle": true,
       "license": "MIT",
       "engines": {
@@ -9961,8 +8613,6 @@
     },
     "node_modules/is-array-buffer": {
       "version": "3.0.5",
-      "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.5.tgz",
-      "integrity": "sha512-DDfANUiiG2wC1qawP66qlTugJeL5HyzMpfr8lLK+jMQirGzNod0B12cFB/9q838Ru27sBwfw78/rdoU7RERz6A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9979,15 +8629,11 @@
     },
     "node_modules/is-arrayish": {
       "version": "0.2.1",
-      "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz",
-      "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/is-async-function": {
       "version": "2.1.1",
-      "resolved": "https://registry.npmjs.org/is-async-function/-/is-async-function-2.1.1.tgz",
-      "integrity": "sha512-9dgM/cZBnNvjzaMYHVoxxfPj2QXt22Ev7SuuPrs+xav0ukGB0S6d4ydZdEiM48kLx5kDV+QBPrpVnFyefL8kkQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10006,8 +8652,6 @@
     },
     "node_modules/is-bigint": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.1.0.tgz",
-      "integrity": "sha512-n4ZT37wG78iz03xPRKJrHTdZbe3IicyucEtdRsV5yglwc3GyUfbAfpSeD0FJ41NbUNSt5wbhqfp1fS+BgnvDFQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10022,8 +8666,6 @@
     },
     "node_modules/is-binary-path": {
       "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz",
-      "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10035,8 +8677,6 @@
     },
     "node_modules/is-binary-path/node_modules/binary-extensions": {
       "version": "2.3.0",
-      "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz",
-      "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -10048,8 +8688,6 @@
     },
     "node_modules/is-boolean-object": {
       "version": "1.2.2",
-      "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.2.2.tgz",
-      "integrity": "sha512-wa56o2/ElJMYqjCjGkXri7it5FbebW5usLw/nPmCMs5DeZ7eziSYZhSmPRn0txqeW4LnAmQQU7FgqLpsEFKM4A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10065,8 +8703,6 @@
     },
     "node_modules/is-buffer": {
       "version": "2.0.5",
-      "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.5.tgz",
-      "integrity": "sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ==",
       "dev": true,
       "funding": [
         {
@@ -10089,8 +8725,6 @@
     },
     "node_modules/is-callable": {
       "version": "1.2.7",
-      "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz",
-      "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -10102,8 +8736,6 @@
     },
     "node_modules/is-cidr": {
       "version": "5.1.1",
-      "resolved": "https://registry.npmjs.org/is-cidr/-/is-cidr-5.1.1.tgz",
-      "integrity": "sha512-AwzRMjtJNTPOgm7xuYZ71715z99t+4yRnSnSzgK5err5+heYi4zMuvmpUadaJ28+KCXCQo8CjUrKQZRWSPmqTQ==",
       "inBundle": true,
       "license": "BSD-2-Clause",
       "dependencies": {
@@ -10115,8 +8747,6 @@
     },
     "node_modules/is-core-module": {
       "version": "2.16.1",
-      "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz",
-      "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10131,8 +8761,6 @@
     },
     "node_modules/is-data-view": {
       "version": "1.0.2",
-      "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.2.tgz",
-      "integrity": "sha512-RKtWF8pGmS87i2D6gqQu/l7EYRlVdfzemCJN/P3UOs//x1QE7mfhvzHIApBTRf7axvT6DMGwSwBXYCT0nfB9xw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10149,8 +8777,6 @@
     },
     "node_modules/is-date-object": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.1.0.tgz",
-      "integrity": "sha512-PwwhEakHVKTdRNVOw+/Gyh0+MzlCl4R6qKvkhuvLtPMggI1WAHt9sOwZxQLSGpUaDnrdyDsomoRgNnCfKNSXXg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10166,8 +8792,6 @@
     },
     "node_modules/is-extglob": {
       "version": "2.1.1",
-      "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz",
-      "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -10176,8 +8800,6 @@
     },
     "node_modules/is-finalizationregistry": {
       "version": "1.1.1",
-      "resolved": "https://registry.npmjs.org/is-finalizationregistry/-/is-finalizationregistry-1.1.1.tgz",
-      "integrity": "sha512-1pC6N8qWJbWoPtEjgcL2xyhQOP491EQjeUo3qTKcmV8YSDDJrOepfG8pcC7h/QgnQHYSv0mJ3Z/ZWxmatVrysg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10192,8 +8814,6 @@
     },
     "node_modules/is-fullwidth-code-point": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
-      "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==",
       "inBundle": true,
       "license": "MIT",
       "engines": {
@@ -10202,8 +8822,6 @@
     },
     "node_modules/is-generator-function": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.1.0.tgz",
-      "integrity": "sha512-nPUB5km40q9e8UfN/Zc24eLlzdSf9OfKByBw9CIdw4H1giPMeA0OIJvbchsCu4npfI2QcMVBsGEBHKZ7wLTWmQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10221,8 +8839,6 @@
     },
     "node_modules/is-glob": {
       "version": "4.0.3",
-      "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz",
-      "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10234,15 +8850,11 @@
     },
     "node_modules/is-lambda": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/is-lambda/-/is-lambda-1.0.1.tgz",
-      "integrity": "sha512-z7CMFGNrENq5iFB9Bqo64Xk6Y9sg+epq1myIcdHaGnbMTYOxvzsEtdYqQUylB7LxfkvgrrjP32T6Ywciio9UIQ==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/is-map": {
       "version": "2.0.3",
-      "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.3.tgz",
-      "integrity": "sha512-1Qed0/Hr2m+YqxnM09CjA2d/i6YZNfF6R2oRAOj36eUdS6qIV/huPJNSEpKbupewFs+ZsJlxsjjPbc0/afW6Lw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -10254,8 +8866,6 @@
     },
     "node_modules/is-negative-zero": {
       "version": "2.0.3",
-      "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.3.tgz",
-      "integrity": "sha512-5KoIu2Ngpyek75jXodFvnafB6DJgr3u8uuK0LEZJjrU19DrMD3EVERaR8sjz8CCGgpZvxPl9SuE1GMVPFHx1mw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -10267,8 +8877,6 @@
     },
     "node_modules/is-number": {
       "version": "7.0.0",
-      "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
-      "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -10277,8 +8885,6 @@
     },
     "node_modules/is-number-object": {
       "version": "1.1.1",
-      "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.1.1.tgz",
-      "integrity": "sha512-lZhclumE1G6VYD8VHe35wFaIif+CTy5SJIi5+3y4psDgWu4wPDoBhF8NxUOinEc7pHgiTsT6MaBb92rKhhD+Xw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10294,8 +8900,6 @@
     },
     "node_modules/is-obj": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz",
-      "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -10304,8 +8908,6 @@
     },
     "node_modules/is-path-inside": {
       "version": "3.0.3",
-      "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz",
-      "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -10314,8 +8916,6 @@
     },
     "node_modules/is-plain-obj": {
       "version": "4.1.0",
-      "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-4.1.0.tgz",
-      "integrity": "sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -10327,8 +8927,6 @@
     },
     "node_modules/is-plain-object": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz",
-      "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -10337,15 +8935,11 @@
     },
     "node_modules/is-potential-custom-element-name": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz",
-      "integrity": "sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/is-regex": {
       "version": "1.2.1",
-      "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.2.1.tgz",
-      "integrity": "sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10363,8 +8957,6 @@
     },
     "node_modules/is-set": {
       "version": "2.0.3",
-      "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.3.tgz",
-      "integrity": "sha512-iPAjerrse27/ygGLxw+EBR9agv9Y6uLeYVJMu+QNCoouJ1/1ri0mGrcWpfCqFZuzzx3WjtwxG098X+n4OuRkPg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -10376,8 +8968,6 @@
     },
     "node_modules/is-shared-array-buffer": {
       "version": "1.0.4",
-      "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.4.tgz",
-      "integrity": "sha512-ISWac8drv4ZGfwKl5slpHG9OwPNty4jOWPRIhBpxOoD+hqITiwuipOQ2bNthAzwA3B4fIjO4Nln74N0S9byq8A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10392,8 +8982,6 @@
     },
     "node_modules/is-string": {
       "version": "1.1.1",
-      "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.1.1.tgz",
-      "integrity": "sha512-BtEeSsoaQjlSPBemMQIrY1MY0uM6vnS1g5fmufYOtnxLGUZM2178PKbhsk7Ffv58IX+ZtcvoGwccYsh0PglkAA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10409,8 +8997,6 @@
     },
     "node_modules/is-symbol": {
       "version": "1.1.1",
-      "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.1.1.tgz",
-      "integrity": "sha512-9gGx6GTtCQM73BgmHQXfDmLtfjjTUDSyoxTCbp5WtoixAhfgsDirWIcVQ/IHpvI5Vgd5i/J5F7B9cN/WlVbC/w==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10427,8 +9013,6 @@
     },
     "node_modules/is-text-path": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/is-text-path/-/is-text-path-2.0.0.tgz",
-      "integrity": "sha512-+oDTluR6WEjdXEJMnC2z6A4FRwFoYuvShVVEGsS7ewc0UTi2QtAKMDJuL4BDEVt+5T7MjFo12RP8ghOM75oKJw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10440,8 +9024,6 @@
     },
     "node_modules/is-typed-array": {
       "version": "1.1.15",
-      "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.15.tgz",
-      "integrity": "sha512-p3EcsicXjit7SaskXHs1hA91QxgTw46Fv6EFKKGS5DRFLD8yKnohjF3hxoju94b/OcMZoQukzpPpBE9uLVKzgQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10456,15 +9038,11 @@
     },
     "node_modules/is-typedarray": {
       "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz",
-      "integrity": "sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/is-weakmap": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.2.tgz",
-      "integrity": "sha512-K5pXYOm9wqY1RgjpL3YTkF39tni1XajUIkawTLUo9EZEVUFga5gSQJF8nNS7ZwJQ02y+1YCNYcMh+HIf1ZqE+w==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -10476,8 +9054,6 @@
     },
     "node_modules/is-weakref": {
       "version": "1.1.1",
-      "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.1.1.tgz",
-      "integrity": "sha512-6i9mGWSlqzNMEqpCp93KwRS1uUOodk2OJ6b+sq7ZPDSy2WuI5NFIxp/254TytR8ftefexkWn5xNiHUNpPOfSew==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10492,8 +9068,6 @@
     },
     "node_modules/is-weakset": {
       "version": "2.0.4",
-      "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.4.tgz",
-      "integrity": "sha512-mfcwb6IzQyOKTs84CQMrOwW4gQcaTOAWJ0zzJCl2WSPDrWk/OzDaImWFH3djXhb24g4eudZfLRozAvPGw4d9hQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10509,8 +9083,6 @@
     },
     "node_modules/is-windows": {
       "version": "1.0.2",
-      "resolved": "https://registry.npmjs.org/is-windows/-/is-windows-1.0.2.tgz",
-      "integrity": "sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -10519,22 +9091,16 @@
     },
     "node_modules/isarray": {
       "version": "2.0.5",
-      "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz",
-      "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/isexe": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
-      "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==",
       "inBundle": true,
       "license": "ISC"
     },
     "node_modules/istanbul-lib-coverage": {
       "version": "3.2.2",
-      "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz",
-      "integrity": "sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==",
       "dev": true,
       "license": "BSD-3-Clause",
       "engines": {
@@ -10543,8 +9109,6 @@
     },
     "node_modules/istanbul-lib-hook": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/istanbul-lib-hook/-/istanbul-lib-hook-3.0.0.tgz",
-      "integrity": "sha512-Pt/uge1Q9s+5VAZ+pCo16TYMWPBIl+oaNIjgLQxcX0itS6ueeaA+pEfThZpH8WxhFgCiEb8sAJY6MdUKgiIWaQ==",
       "dev": true,
       "license": "BSD-3-Clause",
       "dependencies": {
@@ -10556,8 +9120,6 @@
     },
     "node_modules/istanbul-lib-instrument": {
       "version": "4.0.3",
-      "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-4.0.3.tgz",
-      "integrity": "sha512-BXgQl9kf4WTCPCCpmFGoJkz/+uhvm7h7PFKUYxh7qarQd3ER33vHG//qaE8eN25l07YqZPpHXU9I09l/RD5aGQ==",
       "dev": true,
       "license": "BSD-3-Clause",
       "dependencies": {
@@ -10572,8 +9134,6 @@
     },
     "node_modules/istanbul-lib-instrument/node_modules/semver": {
       "version": "6.3.1",
-      "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
-      "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
       "dev": true,
       "license": "ISC",
       "bin": {
@@ -10582,8 +9142,6 @@
     },
     "node_modules/istanbul-lib-processinfo": {
       "version": "2.0.3",
-      "resolved": "https://registry.npmjs.org/istanbul-lib-processinfo/-/istanbul-lib-processinfo-2.0.3.tgz",
-      "integrity": "sha512-NkwHbo3E00oybX6NGJi6ar0B29vxyvNwoC7eJ4G4Yq28UfY758Hgn/heV8VRFhevPED4LXfFz0DQ8z/0kw9zMg==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -10600,8 +9158,6 @@
     },
     "node_modules/istanbul-lib-processinfo/node_modules/brace-expansion": {
       "version": "1.1.12",
-      "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
-      "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10611,9 +9167,6 @@
     },
     "node_modules/istanbul-lib-processinfo/node_modules/glob": {
       "version": "7.2.3",
-      "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
-      "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
-      "deprecated": "Glob versions prior to v9 are no longer supported",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -10633,8 +9186,6 @@
     },
     "node_modules/istanbul-lib-processinfo/node_modules/minimatch": {
       "version": "3.1.2",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
-      "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -10646,8 +9197,6 @@
     },
     "node_modules/istanbul-lib-processinfo/node_modules/p-map": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/p-map/-/p-map-3.0.0.tgz",
-      "integrity": "sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10659,9 +9208,6 @@
     },
     "node_modules/istanbul-lib-processinfo/node_modules/rimraf": {
       "version": "3.0.2",
-      "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz",
-      "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==",
-      "deprecated": "Rimraf versions prior to v4 are no longer supported",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -10676,8 +9222,6 @@
     },
     "node_modules/istanbul-lib-report": {
       "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz",
-      "integrity": "sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==",
       "dev": true,
       "license": "BSD-3-Clause",
       "dependencies": {
@@ -10691,8 +9235,6 @@
     },
     "node_modules/istanbul-lib-report/node_modules/has-flag": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
-      "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -10701,8 +9243,6 @@
     },
     "node_modules/istanbul-lib-report/node_modules/make-dir": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz",
-      "integrity": "sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10717,8 +9257,6 @@
     },
     "node_modules/istanbul-lib-report/node_modules/supports-color": {
       "version": "7.2.0",
-      "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
-      "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10730,8 +9268,6 @@
     },
     "node_modules/istanbul-lib-source-maps": {
       "version": "4.0.1",
-      "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz",
-      "integrity": "sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==",
       "dev": true,
       "license": "BSD-3-Clause",
       "dependencies": {
@@ -10745,8 +9281,6 @@
     },
     "node_modules/istanbul-reports": {
       "version": "3.1.7",
-      "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.7.tgz",
-      "integrity": "sha512-BewmUXImeuRk2YY0PVbxgKAysvhRPUQE0h5QRM++nVWyubKGV0l8qQ5op8+B2DOmwSe63Jivj0BjkPQVf8fP5g==",
       "dev": true,
       "license": "BSD-3-Clause",
       "dependencies": {
@@ -10759,8 +9293,6 @@
     },
     "node_modules/jackspeak": {
       "version": "3.4.3",
-      "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz",
-      "integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==",
       "inBundle": true,
       "license": "BlueOak-1.0.0",
       "dependencies": {
@@ -10775,8 +9307,6 @@
     },
     "node_modules/jiti": {
       "version": "2.4.2",
-      "resolved": "https://registry.npmjs.org/jiti/-/jiti-2.4.2.tgz",
-      "integrity": "sha512-rg9zJN+G4n2nfJl5MW3BMygZX56zKPNVEYYqq7adpmMh4Jn2QNEwhvQlFy6jPVdcod7txZtKHWnyZiA3a0zP7A==",
       "dev": true,
       "license": "MIT",
       "bin": {
@@ -10785,15 +9315,11 @@
     },
     "node_modules/js-tokens": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
-      "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/js-yaml": {
       "version": "4.1.0",
-      "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz",
-      "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10805,15 +9331,11 @@
     },
     "node_modules/jsbn": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-1.1.0.tgz",
-      "integrity": "sha512-4bYVV3aAMtDTTu4+xsDYa6sy9GyJ69/amsu9sYF2zqjiEoZA5xJi3BrfX3uY+/IekIu7MwdObdbDWpoZdBv3/A==",
       "inBundle": true,
       "license": "MIT"
     },
     "node_modules/jsep": {
       "version": "1.4.0",
-      "resolved": "https://registry.npmjs.org/jsep/-/jsep-1.4.0.tgz",
-      "integrity": "sha512-B7qPcEVE3NVkmSJbaYxvv4cHkVW7DQsZz13pUMrfS8z8Q/BuShN+gcTXrUlPiGqM2/t/EEaI030bpxMqY8gMlw==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -10823,8 +9345,6 @@
     },
     "node_modules/jsesc": {
       "version": "3.1.0",
-      "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz",
-      "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==",
       "dev": true,
       "license": "MIT",
       "bin": {
@@ -10836,15 +9356,11 @@
     },
     "node_modules/json-buffer": {
       "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz",
-      "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/json-parse-even-better-errors": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-4.0.0.tgz",
-      "integrity": "sha512-lR4MXjGNgkJc7tkQ97kb2nuEMnNCyU//XYVH0MKTGcXEiSudQ5MKGKen3C5QubYy0vmq+JGitUg92uuywGEwIA==",
       "inBundle": true,
       "license": "MIT",
       "engines": {
@@ -10853,22 +9369,16 @@
     },
     "node_modules/json-schema-traverse": {
       "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz",
-      "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/json-stable-stringify-without-jsonify": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz",
-      "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/json-stringify-nice": {
       "version": "1.1.4",
-      "resolved": "https://registry.npmjs.org/json-stringify-nice/-/json-stringify-nice-1.1.4.tgz",
-      "integrity": "sha512-5Z5RFW63yxReJ7vANgW6eZFGWaQvnPE3WNmZoOJrSkGju2etKA2L5rrOa1sm877TVTFt57A80BH1bArcmlLfPw==",
       "license": "ISC",
       "funding": {
         "url": "https://github.com/sponsors/isaacs"
@@ -10876,15 +9386,11 @@
     },
     "node_modules/json-stringify-safe": {
       "version": "5.0.1",
-      "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz",
-      "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/json5": {
       "version": "2.2.3",
-      "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz",
-      "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==",
       "dev": true,
       "license": "MIT",
       "bin": {
@@ -10896,8 +9402,6 @@
     },
     "node_modules/jsonparse": {
       "version": "1.3.1",
-      "resolved": "https://registry.npmjs.org/jsonparse/-/jsonparse-1.3.1.tgz",
-      "integrity": "sha512-POQXvpdL69+CluYsillJ7SUhKvytYjW9vG/GKpnf+xP8UWgYEM/RaMzHHofbALDiKbbP1W8UEYmgGl39WkPZsg==",
       "engines": [
         "node >= 0.2.0"
       ],
@@ -10906,8 +9410,6 @@
     },
     "node_modules/jsonpath-plus": {
       "version": "10.3.0",
-      "resolved": "https://registry.npmjs.org/jsonpath-plus/-/jsonpath-plus-10.3.0.tgz",
-      "integrity": "sha512-8TNmfeTCk2Le33A3vRRwtuworG/L5RrgMvdjhKZxvyShO+mBu2fP50OWUjRLNtvw344DdDarFh9buFAZs5ujeA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10925,8 +9427,6 @@
     },
     "node_modules/JSONStream": {
       "version": "1.3.5",
-      "resolved": "https://registry.npmjs.org/JSONStream/-/JSONStream-1.3.5.tgz",
-      "integrity": "sha512-E+iruNOY8VV9s4JEbe1aNEm6MiszPRr/UfcHMz0TQh1BXSxHK+ASV1R6W4HpjBhSeS+54PIsAMCBmwD06LLsqQ==",
       "dev": true,
       "license": "(MIT OR Apache-2.0)",
       "dependencies": {
@@ -10942,50 +9442,36 @@
     },
     "node_modules/just-deep-map-values": {
       "version": "1.2.0",
-      "resolved": "https://registry.npmjs.org/just-deep-map-values/-/just-deep-map-values-1.2.0.tgz",
-      "integrity": "sha512-4vpPBzHHis4UW/EbH5kHZn0gJvKP+EiMpbjD669ZSxdwx+EoAlQLMbLR08SEtydcq/MjDPPtwGiPo9R893iHVA==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/just-diff": {
       "version": "6.0.2",
-      "resolved": "https://registry.npmjs.org/just-diff/-/just-diff-6.0.2.tgz",
-      "integrity": "sha512-S59eriX5u3/QhMNq3v/gm8Kd0w8OS6Tz2FS1NG4blv+z0MuQcBRJyFWjdovM0Rad4/P4aUPFtnkNjMjyMlMSYA==",
       "inBundle": true,
       "license": "MIT"
     },
     "node_modules/just-diff-apply": {
       "version": "5.5.0",
-      "resolved": "https://registry.npmjs.org/just-diff-apply/-/just-diff-apply-5.5.0.tgz",
-      "integrity": "sha512-OYTthRfSh55WOItVqwpefPtNt2VdKsq5AnAK6apdtR6yCH8pr0CmSr710J0Mf+WdQy7K/OzMy7K2MgAfdQURDw==",
       "inBundle": true,
       "license": "MIT"
     },
     "node_modules/just-extend": {
       "version": "6.2.0",
-      "resolved": "https://registry.npmjs.org/just-extend/-/just-extend-6.2.0.tgz",
-      "integrity": "sha512-cYofQu2Xpom82S6qD778jBDpwvvy39s1l/hrYij2u9AMdQcGRpaBu6kY4mVhuno5kJVi1DAz4aiphA2WI1/OAw==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/just-omit": {
       "version": "2.2.0",
-      "resolved": "https://registry.npmjs.org/just-omit/-/just-omit-2.2.0.tgz",
-      "integrity": "sha512-Js7+HxDOGcB3RhI38Mird/RgyMf3t0DAJFda1QWqqlAKTa36NeSYIufJXxrZUbysFTRcTOFcoMCiFK5FwCoI7Q==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/just-safe-set": {
       "version": "4.2.1",
-      "resolved": "https://registry.npmjs.org/just-safe-set/-/just-safe-set-4.2.1.tgz",
-      "integrity": "sha512-La5CP41Ycv52+E4g7w1sRV8XXk7Sp8a/TwWQAYQKn6RsQz1FD4Z/rDRRmqV3wJznS1MDF3YxK7BCudX1J8FxLg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/keyv": {
       "version": "4.5.4",
-      "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz",
-      "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10994,8 +9480,6 @@
     },
     "node_modules/kind-of": {
       "version": "6.0.3",
-      "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz",
-      "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -11004,8 +9488,6 @@
     },
     "node_modules/kleur": {
       "version": "4.1.5",
-      "resolved": "https://registry.npmjs.org/kleur/-/kleur-4.1.5.tgz",
-      "integrity": "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -11014,8 +9496,6 @@
     },
     "node_modules/leven": {
       "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/leven/-/leven-2.1.0.tgz",
-      "integrity": "sha512-nvVPLpIHUxCUoRLrFqTgSxXJ614d8AgQoWl7zPe/2VadE8+1dpU3LBhowRuBAcuwruWtOdD8oYC9jDNJjXDPyA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -11024,8 +9504,6 @@
     },
     "node_modules/levn": {
       "version": "0.4.1",
-      "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz",
-      "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11078,8 +9556,6 @@
     },
     "node_modules/libtap": {
       "version": "1.4.1",
-      "resolved": "https://registry.npmjs.org/libtap/-/libtap-1.4.1.tgz",
-      "integrity": "sha512-S9v19shLTigoMn3c02V7LZ4t09zxmVP3r3RbEAwuHFYeKgF+ESFJxoQ0PMFKW4XdgQhcjVBEwDoopG6WROq/gw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -11106,8 +9582,6 @@
     },
     "node_modules/libtap/node_modules/diff": {
       "version": "4.0.2",
-      "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz",
-      "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==",
       "dev": true,
       "license": "BSD-3-Clause",
       "engines": {
@@ -11116,8 +9590,6 @@
     },
     "node_modules/libtap/node_modules/minipass": {
       "version": "3.3.6",
-      "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz",
-      "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -11129,22 +9601,16 @@
     },
     "node_modules/libtap/node_modules/signal-exit": {
       "version": "3.0.7",
-      "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz",
-      "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/lines-and-columns": {
       "version": "1.2.4",
-      "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz",
-      "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/locate-path": {
       "version": "7.2.0",
-      "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-7.2.0.tgz",
-      "integrity": "sha512-gvVijfZvn7R+2qyPX8mAuKcFGDf6Nc61GdvGafQsHL0sBIxfKzA+usWn4GFC/bk+QdwPUD4kWFJLhElipq+0VA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11159,92 +9625,66 @@
     },
     "node_modules/lodash": {
       "version": "4.17.21",
-      "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
-      "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/lodash.camelcase": {
       "version": "4.3.0",
-      "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz",
-      "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/lodash.flattendeep": {
       "version": "4.4.0",
-      "resolved": "https://registry.npmjs.org/lodash.flattendeep/-/lodash.flattendeep-4.4.0.tgz",
-      "integrity": "sha512-uHaJFihxmJcEX3kT4I23ABqKKalJ/zDrDg0lsFtc1h+3uw49SIJ5beyhx5ExVRti3AvKoOJngIj7xz3oylPdWQ==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/lodash.ismatch": {
       "version": "4.4.0",
-      "resolved": "https://registry.npmjs.org/lodash.ismatch/-/lodash.ismatch-4.4.0.tgz",
-      "integrity": "sha512-fPMfXjGQEV9Xsq/8MTSgUf255gawYRbjwMyDbcvDhXgV7enSZA0hynz6vMPnpAb5iONEzBHBPsT+0zes5Z301g==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/lodash.isplainobject": {
       "version": "4.0.6",
-      "resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz",
-      "integrity": "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/lodash.kebabcase": {
       "version": "4.1.1",
-      "resolved": "https://registry.npmjs.org/lodash.kebabcase/-/lodash.kebabcase-4.1.1.tgz",
-      "integrity": "sha512-N8XRTIMMqqDgSy4VLKPnJ/+hpGZN+PHQiJnSenYqPaVV/NCqEogTnAdZLQiGKhxX+JCs8waWq2t1XHWKOmlY8g==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/lodash.merge": {
       "version": "4.6.2",
-      "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz",
-      "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/lodash.mergewith": {
       "version": "4.6.2",
-      "resolved": "https://registry.npmjs.org/lodash.mergewith/-/lodash.mergewith-4.6.2.tgz",
-      "integrity": "sha512-GK3g5RPZWTRSeLSpgP8Xhra+pnjBC56q9FZYe1d5RN3TJ35dbkGy3YqBSMbyCrlbi+CM9Z3Jk5yTL7RCsqboyQ==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/lodash.snakecase": {
       "version": "4.1.1",
-      "resolved": "https://registry.npmjs.org/lodash.snakecase/-/lodash.snakecase-4.1.1.tgz",
-      "integrity": "sha512-QZ1d4xoBHYUeuouhEq3lk3Uq7ldgyFXGBhg04+oRLnIz8o9T65Eh+8YdroUwn846zchkA9yDsDl5CVVaV2nqYw==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/lodash.startcase": {
       "version": "4.4.0",
-      "resolved": "https://registry.npmjs.org/lodash.startcase/-/lodash.startcase-4.4.0.tgz",
-      "integrity": "sha512-+WKqsK294HMSc2jEbNgpHpd0JfIBhp7rEV4aqXWqFr6AlXov+SlcgB1Fv01y2kGe3Gc8nMW7VA0SrGuSkRfIEg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/lodash.uniq": {
       "version": "4.5.0",
-      "resolved": "https://registry.npmjs.org/lodash.uniq/-/lodash.uniq-4.5.0.tgz",
-      "integrity": "sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/lodash.upperfirst": {
       "version": "4.3.1",
-      "resolved": "https://registry.npmjs.org/lodash.upperfirst/-/lodash.upperfirst-4.3.1.tgz",
-      "integrity": "sha512-sReKOYJIJf74dhJONhU4e0/shzi1trVbSWDOhKYE5XV2O+H7Sb2Dihwuc7xWxVl+DgFPyTqIN3zMfT9cq5iWDg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/longest-streak": {
       "version": "3.1.0",
-      "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-3.1.0.tgz",
-      "integrity": "sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -11254,15 +9694,11 @@
     },
     "node_modules/lru-cache": {
       "version": "10.4.3",
-      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz",
-      "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==",
       "inBundle": true,
       "license": "ISC"
     },
     "node_modules/make-dir": {
       "version": "3.1.0",
-      "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz",
-      "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11277,8 +9713,6 @@
     },
     "node_modules/make-dir/node_modules/semver": {
       "version": "6.3.1",
-      "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
-      "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
       "dev": true,
       "license": "ISC",
       "bin": {
@@ -11287,8 +9721,6 @@
     },
     "node_modules/make-fetch-happen": {
       "version": "15.0.2",
-      "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-15.0.2.tgz",
-      "integrity": "sha512-sI1NY4lWlXBAfjmCtVWIIpBypbBdhHtcjnwnv+gtCnsaOffyFil3aidszGC8hgzJe+fT1qix05sWxmD/Bmf/oQ==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -11310,8 +9742,6 @@
     },
     "node_modules/make-fetch-happen/node_modules/@npmcli/agent": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/agent/-/agent-4.0.0.tgz",
-      "integrity": "sha512-kAQTcEN9E8ERLVg5AsGwLNoFb+oEG6engbqAU2P43gD4JEIkNGMHdVQ096FsOAAYpZPB0RSt0zgInKIAS1l5QA==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -11327,8 +9757,6 @@
     },
     "node_modules/make-fetch-happen/node_modules/lru-cache": {
       "version": "11.2.1",
-      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.1.tgz",
-      "integrity": "sha512-r8LA6i4LP4EeWOhqBaZZjDWwehd1xUJPCJd9Sv300H0ZmcUER4+JPh7bqqZeqs1o5pgtgvXm+d9UGrB5zZGDiQ==",
       "inBundle": true,
       "license": "ISC",
       "engines": {
@@ -11337,8 +9765,6 @@
     },
     "node_modules/make-fetch-happen/node_modules/negotiator": {
       "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz",
-      "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==",
       "inBundle": true,
       "license": "MIT",
       "engines": {
@@ -11347,8 +9773,6 @@
     },
     "node_modules/map-obj": {
       "version": "4.3.0",
-      "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.3.0.tgz",
-      "integrity": "sha512-hdN1wVrZbb29eBGiGjJbeP8JbKjq1urkHJ/LIP/NY48MZ1QVXUsQBV1G1zvYFHn1XE06cwjBsOI2K3Ulnj1YXQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -11360,8 +9784,6 @@
     },
     "node_modules/markdown-table": {
       "version": "3.0.4",
-      "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-3.0.4.tgz",
-      "integrity": "sha512-wiYz4+JrLyb/DqW2hkFJxP7Vd7JuTDm77fvbM8VfEQdmSMqcImWeeRbHwZjBjIFki/VaMK2BhFi7oUUZeM5bqw==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -11371,8 +9793,6 @@
     },
     "node_modules/math-intrinsics": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz",
-      "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -11381,8 +9801,6 @@
     },
     "node_modules/mdast-util-find-and-replace": {
       "version": "2.2.2",
-      "resolved": "https://registry.npmjs.org/mdast-util-find-and-replace/-/mdast-util-find-and-replace-2.2.2.tgz",
-      "integrity": "sha512-MTtdFRz/eMDHXzeK6W3dO7mXUlF82Gom4y0oOgvHhh/HXZAGvIQDUvQ0SuUx+j2tv44b8xTHOm8K/9OoRFnXKw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11398,8 +9816,6 @@
     },
     "node_modules/mdast-util-find-and-replace/node_modules/escape-string-regexp": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz",
-      "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -11411,8 +9827,6 @@
     },
     "node_modules/mdast-util-find-and-replace/node_modules/unist-util-visit-parents": {
       "version": "5.1.3",
-      "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-5.1.3.tgz",
-      "integrity": "sha512-x6+y8g7wWMyQhL1iZfhIPhDAs7Xwbn9nRosDXl7qoPTSCy0yNxnKc+hWokFifWQIDGi154rdUqKvbCa4+1kLhg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11426,8 +9840,6 @@
     },
     "node_modules/mdast-util-from-markdown": {
       "version": "1.3.1",
-      "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-1.3.1.tgz",
-      "integrity": "sha512-4xTO/M8c82qBcnQc1tgpNtubGUW/Y1tBQ1B0i5CtSoelOLKFYlElIr3bvgREYYO5iRqbMY1YuqZng0GVOI8Qww==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11451,8 +9863,6 @@
     },
     "node_modules/mdast-util-gfm": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/mdast-util-gfm/-/mdast-util-gfm-2.0.2.tgz",
-      "integrity": "sha512-qvZ608nBppZ4icQlhQQIAdc6S3Ffj9RGmzwUKUWuEICFnd1LVkN3EktF7ZHAgfcEdvZB5owU9tQgt99e2TlLjg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11471,8 +9881,6 @@
     },
     "node_modules/mdast-util-gfm-autolink-literal": {
       "version": "1.0.3",
-      "resolved": "https://registry.npmjs.org/mdast-util-gfm-autolink-literal/-/mdast-util-gfm-autolink-literal-1.0.3.tgz",
-      "integrity": "sha512-My8KJ57FYEy2W2LyNom4n3E7hKTuQk/0SES0u16tjA9Z3oFkF4RrC/hPAPgjlSpezsOvI8ObcXcElo92wn5IGA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11488,8 +9896,6 @@
     },
     "node_modules/mdast-util-gfm-footnote": {
       "version": "1.0.2",
-      "resolved": "https://registry.npmjs.org/mdast-util-gfm-footnote/-/mdast-util-gfm-footnote-1.0.2.tgz",
-      "integrity": "sha512-56D19KOGbE00uKVj3sgIykpwKL179QsVFwx/DCW0u/0+URsryacI4MAdNJl0dh+u2PSsD9FtxPFbHCzJ78qJFQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11504,8 +9910,6 @@
     },
     "node_modules/mdast-util-gfm-strikethrough": {
       "version": "1.0.3",
-      "resolved": "https://registry.npmjs.org/mdast-util-gfm-strikethrough/-/mdast-util-gfm-strikethrough-1.0.3.tgz",
-      "integrity": "sha512-DAPhYzTYrRcXdMjUtUjKvW9z/FNAMTdU0ORyMcbmkwYNbKocDpdk+PX1L1dQgOID/+vVs1uBQ7ElrBQfZ0cuiQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11519,8 +9923,6 @@
     },
     "node_modules/mdast-util-gfm-table": {
       "version": "1.0.7",
-      "resolved": "https://registry.npmjs.org/mdast-util-gfm-table/-/mdast-util-gfm-table-1.0.7.tgz",
-      "integrity": "sha512-jjcpmNnQvrmN5Vx7y7lEc2iIOEytYv7rTvu+MeyAsSHTASGCCRA79Igg2uKssgOs1i1po8s3plW0sTu1wkkLGg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11536,8 +9938,6 @@
     },
     "node_modules/mdast-util-gfm-task-list-item": {
       "version": "1.0.2",
-      "resolved": "https://registry.npmjs.org/mdast-util-gfm-task-list-item/-/mdast-util-gfm-task-list-item-1.0.2.tgz",
-      "integrity": "sha512-PFTA1gzfp1B1UaiJVyhJZA1rm0+Tzn690frc/L8vNX1Jop4STZgOE6bxUhnzdVSB+vm2GU1tIsuQcA9bxTQpMQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11551,8 +9951,6 @@
     },
     "node_modules/mdast-util-phrasing": {
       "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-3.0.1.tgz",
-      "integrity": "sha512-WmI1gTXUBJo4/ZmSk79Wcb2HcjPJBzM1nlI/OUWA8yk2X9ik3ffNbBGsU+09BFmXaL1IBb9fiuvq6/KMiNycSg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11566,8 +9964,6 @@
     },
     "node_modules/mdast-util-to-markdown": {
       "version": "1.5.0",
-      "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-1.5.0.tgz",
-      "integrity": "sha512-bbv7TPv/WC49thZPg3jXuqzuvI45IL2EVAr/KxF0BSdHsU0ceFHOmwQn6evxAh1GaoK/6GQ1wp4R4oW2+LFL/A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11587,8 +9983,6 @@
     },
     "node_modules/mdast-util-to-markdown/node_modules/unist-util-visit": {
       "version": "4.1.2",
-      "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-4.1.2.tgz",
-      "integrity": "sha512-MSd8OUGISqHdVvfY9TPhyK2VdUrPgxkUtWSuMHF6XAAFuL4LokseigBnZtPnJMu+FbynTkFNnFlyjxpVKujMRg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11603,8 +9997,6 @@
     },
     "node_modules/mdast-util-to-markdown/node_modules/unist-util-visit-parents": {
       "version": "5.1.3",
-      "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-5.1.3.tgz",
-      "integrity": "sha512-x6+y8g7wWMyQhL1iZfhIPhDAs7Xwbn9nRosDXl7qoPTSCy0yNxnKc+hWokFifWQIDGi154rdUqKvbCa4+1kLhg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11618,8 +10010,6 @@
     },
     "node_modules/mdast-util-to-string": {
       "version": "3.2.0",
-      "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-3.2.0.tgz",
-      "integrity": "sha512-V4Zn/ncyN1QNSqSBxTrMOLpjr+IKdHl2v3KVLoWmDPscP4r9GcCi71gjgvUV1SFSKh92AjAG4peFuBl2/YgCJg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11632,8 +10022,6 @@
     },
     "node_modules/meow": {
       "version": "12.1.1",
-      "resolved": "https://registry.npmjs.org/meow/-/meow-12.1.1.tgz",
-      "integrity": "sha512-BhXM0Au22RwUneMPwSCnyhTOizdWoIEPU9sp0Aqa1PnDMR5Wv2FGXYDjuzJEIX+Eo2Rb8xuYe5jrnm5QowQFkw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -11645,8 +10033,6 @@
     },
     "node_modules/micromark": {
       "version": "3.2.0",
-      "resolved": "https://registry.npmjs.org/micromark/-/micromark-3.2.0.tgz",
-      "integrity": "sha512-uD66tJj54JLYq0De10AhWycZWGQNUvDI55xPgk2sQM5kn1JYlhbCMTtEeT27+vAhW2FBQxLlOmS3pmA7/2z4aA==",
       "dev": true,
       "funding": [
         {
@@ -11681,8 +10067,6 @@
     },
     "node_modules/micromark-core-commonmark": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-1.1.0.tgz",
-      "integrity": "sha512-BgHO1aRbolh2hcrzL2d1La37V0Aoz73ymF8rAcKnohLy93titmv62E0gP8Hrx9PKcKrqCZ1BbLGbP3bEhoXYlw==",
       "dev": true,
       "funding": [
         {
@@ -11716,8 +10100,6 @@
     },
     "node_modules/micromark-extension-gfm": {
       "version": "2.0.3",
-      "resolved": "https://registry.npmjs.org/micromark-extension-gfm/-/micromark-extension-gfm-2.0.3.tgz",
-      "integrity": "sha512-vb9OoHqrhCmbRidQv/2+Bc6pkP0FrtlhurxZofvOEy5o8RtuuvTq+RQ1Vw5ZDNrVraQZu3HixESqbG+0iKk/MQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11737,8 +10119,6 @@
     },
     "node_modules/micromark-extension-gfm-autolink-literal": {
       "version": "1.0.5",
-      "resolved": "https://registry.npmjs.org/micromark-extension-gfm-autolink-literal/-/micromark-extension-gfm-autolink-literal-1.0.5.tgz",
-      "integrity": "sha512-z3wJSLrDf8kRDOh2qBtoTRD53vJ+CWIyo7uyZuxf/JAbNJjiHsOpG1y5wxk8drtv3ETAHutCu6N3thkOOgueWg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11754,8 +10134,6 @@
     },
     "node_modules/micromark-extension-gfm-footnote": {
       "version": "1.1.2",
-      "resolved": "https://registry.npmjs.org/micromark-extension-gfm-footnote/-/micromark-extension-gfm-footnote-1.1.2.tgz",
-      "integrity": "sha512-Yxn7z7SxgyGWRNa4wzf8AhYYWNrwl5q1Z8ii+CSTTIqVkmGZF1CElX2JI8g5yGoM3GAman9/PVCUFUSJ0kB/8Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11775,8 +10153,6 @@
     },
     "node_modules/micromark-extension-gfm-strikethrough": {
       "version": "1.0.7",
-      "resolved": "https://registry.npmjs.org/micromark-extension-gfm-strikethrough/-/micromark-extension-gfm-strikethrough-1.0.7.tgz",
-      "integrity": "sha512-sX0FawVE1o3abGk3vRjOH50L5TTLr3b5XMqnP9YDRb34M0v5OoZhG+OHFz1OffZ9dlwgpTBKaT4XW/AsUVnSDw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11794,8 +10170,6 @@
     },
     "node_modules/micromark-extension-gfm-table": {
       "version": "1.0.7",
-      "resolved": "https://registry.npmjs.org/micromark-extension-gfm-table/-/micromark-extension-gfm-table-1.0.7.tgz",
-      "integrity": "sha512-3ZORTHtcSnMQEKtAOsBQ9/oHp9096pI/UvdPtN7ehKvrmZZ2+bbWhi0ln+I9drmwXMt5boocn6OlwQzNXeVeqw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11812,8 +10186,6 @@
     },
     "node_modules/micromark-extension-gfm-tagfilter": {
       "version": "1.0.2",
-      "resolved": "https://registry.npmjs.org/micromark-extension-gfm-tagfilter/-/micromark-extension-gfm-tagfilter-1.0.2.tgz",
-      "integrity": "sha512-5XWB9GbAUSHTn8VPU8/1DBXMuKYT5uOgEjJb8gN3mW0PNW5OPHpSdojoqf+iq1xo7vWzw/P8bAHY0n6ijpXF7g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11826,8 +10198,6 @@
     },
     "node_modules/micromark-extension-gfm-task-list-item": {
       "version": "1.0.5",
-      "resolved": "https://registry.npmjs.org/micromark-extension-gfm-task-list-item/-/micromark-extension-gfm-task-list-item-1.0.5.tgz",
-      "integrity": "sha512-RMFXl2uQ0pNQy6Lun2YBYT9g9INXtWJULgbt01D/x8/6yJ2qpKyzdZD3pi6UIkzF++Da49xAelVKUeUMqd5eIQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11844,8 +10214,6 @@
     },
     "node_modules/micromark-factory-destination": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-1.1.0.tgz",
-      "integrity": "sha512-XaNDROBgx9SgSChd69pjiGKbV+nfHGDPVYFs5dOoDd7ZnMAE+Cuu91BCpsY8RT2NP9vo/B8pds2VQNCLiu0zhg==",
       "dev": true,
       "funding": [
         {
@@ -11866,8 +10234,6 @@
     },
     "node_modules/micromark-factory-label": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-factory-label/-/micromark-factory-label-1.1.0.tgz",
-      "integrity": "sha512-OLtyez4vZo/1NjxGhcpDSbHQ+m0IIGnT8BoPamh+7jVlzLJBH98zzuCoUeMxvM6WsNeh8wx8cKvqLiPHEACn0w==",
       "dev": true,
       "funding": [
         {
@@ -11889,8 +10255,6 @@
     },
     "node_modules/micromark-factory-space": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-1.1.0.tgz",
-      "integrity": "sha512-cRzEj7c0OL4Mw2v6nwzttyOZe8XY/Z8G0rzmWQZTBi/jjwyw/U4uqKtUORXQrR5bAZZnbTI/feRV/R7hc4jQYQ==",
       "dev": true,
       "funding": [
         {
@@ -11910,8 +10274,6 @@
     },
     "node_modules/micromark-factory-title": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-factory-title/-/micromark-factory-title-1.1.0.tgz",
-      "integrity": "sha512-J7n9R3vMmgjDOCY8NPw55jiyaQnH5kBdV2/UXCtZIpnHH3P6nHUKaH7XXEYuWwx/xUJcawa8plLBEjMPU24HzQ==",
       "dev": true,
       "funding": [
         {
@@ -11933,8 +10295,6 @@
     },
     "node_modules/micromark-factory-whitespace": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-factory-whitespace/-/micromark-factory-whitespace-1.1.0.tgz",
-      "integrity": "sha512-v2WlmiymVSp5oMg+1Q0N1Lxmt6pMhIHD457whWM7/GUlEks1hI9xj5w3zbc4uuMKXGisksZk8DzP2UyGbGqNsQ==",
       "dev": true,
       "funding": [
         {
@@ -11956,8 +10316,6 @@
     },
     "node_modules/micromark-util-character": {
       "version": "1.2.0",
-      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-1.2.0.tgz",
-      "integrity": "sha512-lXraTwcX3yH/vMDaFWCQJP1uIszLVebzUa3ZHdrgxr7KEU/9mL4mVgCpGbyhvNLNlauROiNUq7WN5u7ndbY6xg==",
       "dev": true,
       "funding": [
         {
@@ -11977,8 +10335,6 @@
     },
     "node_modules/micromark-util-chunked": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-util-chunked/-/micromark-util-chunked-1.1.0.tgz",
-      "integrity": "sha512-Ye01HXpkZPNcV6FiyoW2fGZDUw4Yc7vT0E9Sad83+bEDiCJ1uXu0S3mr8WLpsz3HaG3x2q0HM6CTuPdcZcluFQ==",
       "dev": true,
       "funding": [
         {
@@ -11997,8 +10353,6 @@
     },
     "node_modules/micromark-util-classify-character": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-util-classify-character/-/micromark-util-classify-character-1.1.0.tgz",
-      "integrity": "sha512-SL0wLxtKSnklKSUplok1WQFoGhUdWYKggKUiqhX+Swala+BtptGCu5iPRc+xvzJ4PXE/hwM3FNXsfEVgoZsWbw==",
       "dev": true,
       "funding": [
         {
@@ -12019,8 +10373,6 @@
     },
     "node_modules/micromark-util-combine-extensions": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-util-combine-extensions/-/micromark-util-combine-extensions-1.1.0.tgz",
-      "integrity": "sha512-Q20sp4mfNf9yEqDL50WwuWZHUrCO4fEyeDCnMGmG5Pr0Cz15Uo7KBs6jq+dq0EgX4DPwwrh9m0X+zPV1ypFvUA==",
       "dev": true,
       "funding": [
         {
@@ -12040,8 +10392,6 @@
     },
     "node_modules/micromark-util-decode-numeric-character-reference": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-1.1.0.tgz",
-      "integrity": "sha512-m9V0ExGv0jB1OT21mrWcuf4QhP46pH1KkfWy9ZEezqHKAxkj4mPCy3nIH1rkbdMlChLHX531eOrymlwyZIf2iw==",
       "dev": true,
       "funding": [
         {
@@ -12060,8 +10410,6 @@
     },
     "node_modules/micromark-util-decode-string": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-util-decode-string/-/micromark-util-decode-string-1.1.0.tgz",
-      "integrity": "sha512-YphLGCK8gM1tG1bd54azwyrQRjCFcmgj2S2GoJDNnh4vYtnL38JS8M4gpxzOPNyHdNEpheyWXCTnnTDY3N+NVQ==",
       "dev": true,
       "funding": [
         {
@@ -12083,8 +10431,6 @@
     },
     "node_modules/micromark-util-encode": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-1.1.0.tgz",
-      "integrity": "sha512-EuEzTWSTAj9PA5GOAs992GzNh2dGQO52UvAbtSOMvXTxv3Criqb6IOzJUBCmEqrrXSblJIJBbFFv6zPxpreiJw==",
       "dev": true,
       "funding": [
         {
@@ -12100,8 +10446,6 @@
     },
     "node_modules/micromark-util-html-tag-name": {
       "version": "1.2.0",
-      "resolved": "https://registry.npmjs.org/micromark-util-html-tag-name/-/micromark-util-html-tag-name-1.2.0.tgz",
-      "integrity": "sha512-VTQzcuQgFUD7yYztuQFKXT49KghjtETQ+Wv/zUjGSGBioZnkA4P1XXZPT1FHeJA6RwRXSF47yvJ1tsJdoxwO+Q==",
       "dev": true,
       "funding": [
         {
@@ -12117,8 +10461,6 @@
     },
     "node_modules/micromark-util-normalize-identifier": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-1.1.0.tgz",
-      "integrity": "sha512-N+w5vhqrBihhjdpM8+5Xsxy71QWqGn7HYNUvch71iV2PM7+E3uWGox1Qp90loa1ephtCxG2ftRV/Conitc6P2Q==",
       "dev": true,
       "funding": [
         {
@@ -12137,8 +10479,6 @@
     },
     "node_modules/micromark-util-resolve-all": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-util-resolve-all/-/micromark-util-resolve-all-1.1.0.tgz",
-      "integrity": "sha512-b/G6BTMSg+bX+xVCshPTPyAu2tmA0E4X98NSR7eIbeC6ycCqCeE7wjfDIgzEbkzdEVJXRtOG4FbEm/uGbCRouA==",
       "dev": true,
       "funding": [
         {
@@ -12157,8 +10497,6 @@
     },
     "node_modules/micromark-util-sanitize-uri": {
       "version": "1.2.0",
-      "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-1.2.0.tgz",
-      "integrity": "sha512-QO4GXv0XZfWey4pYFndLUKEAktKkG5kZTdUNaTAkzbuJxn2tNBOr+QtxR2XpWaMhbImT2dPzyLrPXLlPhph34A==",
       "dev": true,
       "funding": [
         {
@@ -12179,8 +10517,6 @@
     },
     "node_modules/micromark-util-subtokenize": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-util-subtokenize/-/micromark-util-subtokenize-1.1.0.tgz",
-      "integrity": "sha512-kUQHyzRoxvZO2PuLzMt2P/dwVsTiivCK8icYTeR+3WgbuPqfHgPPy7nFKbeqRivBvn/3N3GBiNC+JRTMSxEC7A==",
       "dev": true,
       "funding": [
         {
@@ -12202,8 +10538,6 @@
     },
     "node_modules/micromark-util-symbol": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-1.1.0.tgz",
-      "integrity": "sha512-uEjpEYY6KMs1g7QfJ2eX1SQEV+ZT4rUD3UcF6l57acZvLNK7PBZL+ty82Z1qhK1/yXIY4bdx04FKMgR0g4IAag==",
       "dev": true,
       "funding": [
         {
@@ -12219,8 +10553,6 @@
     },
     "node_modules/micromark-util-types": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-1.1.0.tgz",
-      "integrity": "sha512-ukRBgie8TIAcacscVHSiddHjO4k/q3pnedmzMQ4iwDcK0FtFCohKOlFbaOL/mPgfnPsL3C1ZyxJa4sbWrBl3jg==",
       "dev": true,
       "funding": [
         {
@@ -12236,8 +10568,6 @@
     },
     "node_modules/mime-db": {
       "version": "1.52.0",
-      "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
-      "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -12246,8 +10576,6 @@
     },
     "node_modules/mime-types": {
       "version": "2.1.35",
-      "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
-      "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12259,8 +10587,6 @@
     },
     "node_modules/min-indent": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz",
-      "integrity": "sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -12269,8 +10595,6 @@
     },
     "node_modules/minify-registry-metadata": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/minify-registry-metadata/-/minify-registry-metadata-4.0.0.tgz",
-      "integrity": "sha512-dWVW3TmMejEOKNwQ09iPCyVf6+kgtG9E3806YZYY4URy5o1dSb1cAn8aUe5zOgvOyrVKLfIHt9fSsXGyhwVsgA==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -12279,8 +10603,6 @@
     },
     "node_modules/minimatch": {
       "version": "9.0.5",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz",
-      "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -12295,8 +10617,6 @@
     },
     "node_modules/minimist": {
       "version": "1.2.8",
-      "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz",
-      "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -12305,8 +10625,6 @@
     },
     "node_modules/minimist-options": {
       "version": "4.1.0",
-      "resolved": "https://registry.npmjs.org/minimist-options/-/minimist-options-4.1.0.tgz",
-      "integrity": "sha512-Q4r8ghd80yhO/0j1O3B2BjweX3fiHg9cdOwjJd2J76Q135c+NDxGCqdYKQ1SKBuFfgWbAUzBfvYjPUEeNgqN1A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12320,8 +10638,6 @@
     },
     "node_modules/minimist-options/node_modules/is-plain-obj": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz",
-      "integrity": "sha512-yvkRyxmFKEOQ4pNXCmJG5AEQNlXJS5LaONXo5/cLdTZdWvsZ1ioJEonLGAosKlMWE8lwUy/bJzMjcw8az73+Fg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -12330,8 +10646,6 @@
     },
     "node_modules/minipass": {
       "version": "7.1.2",
-      "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz",
-      "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==",
       "inBundle": true,
       "license": "ISC",
       "engines": {
@@ -12340,8 +10654,6 @@
     },
     "node_modules/minipass-collect": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/minipass-collect/-/minipass-collect-2.0.1.tgz",
-      "integrity": "sha512-D7V8PO9oaz7PWGLbCACuI1qEOsq7UKfLotx/C0Aet43fCUB/wfQ7DYeq2oR/svFJGYDHPr38SHATeaj/ZoKHKw==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -12353,8 +10665,6 @@
     },
     "node_modules/minipass-fetch": {
       "version": "4.0.1",
-      "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-4.0.1.tgz",
-      "integrity": "sha512-j7U11C5HXigVuutxebFadoYBbd7VSdZWggSe64NVdvWNBqGAiXPL2QVCehjmw7lY1oF9gOllYbORh+hiNgfPgQ==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -12371,8 +10681,6 @@
     },
     "node_modules/minipass-fetch/node_modules/minizlib": {
       "version": "3.0.2",
-      "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.2.tgz",
-      "integrity": "sha512-oG62iEk+CYt5Xj2YqI5Xi9xWUeZhDI8jjQmC5oThVH5JGCTgIjr7ciJDzC7MBzYd//WvR1OTmP5Q38Q8ShQtVA==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -12384,8 +10692,6 @@
     },
     "node_modules/minipass-flush": {
       "version": "1.0.5",
-      "resolved": "https://registry.npmjs.org/minipass-flush/-/minipass-flush-1.0.5.tgz",
-      "integrity": "sha512-JmQSYYpPUqX5Jyn1mXaRwOda1uQ8HP5KAT/oDSLCzt1BYRhQU0/hDtsB1ufZfEEzMZ9aAVmsBw8+FWsIXlClWw==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -12397,8 +10703,6 @@
     },
     "node_modules/minipass-flush/node_modules/minipass": {
       "version": "3.3.6",
-      "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz",
-      "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -12410,8 +10714,6 @@
     },
     "node_modules/minipass-pipeline": {
       "version": "1.2.4",
-      "resolved": "https://registry.npmjs.org/minipass-pipeline/-/minipass-pipeline-1.2.4.tgz",
-      "integrity": "sha512-xuIq7cIOt09RPRJ19gdi4b+RiNvDFYe5JH+ggNvBqGqpQXcru3PcRmOZuHBKWK1Txf9+cQ+HMVN4d6z46LZP7A==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -12423,8 +10725,6 @@
     },
     "node_modules/minipass-pipeline/node_modules/minipass": {
       "version": "3.3.6",
-      "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz",
-      "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -12436,8 +10736,6 @@
     },
     "node_modules/minipass-sized": {
       "version": "1.0.3",
-      "resolved": "https://registry.npmjs.org/minipass-sized/-/minipass-sized-1.0.3.tgz",
-      "integrity": "sha512-MbkQQ2CTiBMlA2Dm/5cY+9SWFEN8pzzOXi6rlM5Xxq0Yqbda5ZQy9sU75a673FE9ZK0Zsbr6Y5iP6u9nktfg2g==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -12449,8 +10747,6 @@
     },
     "node_modules/minipass-sized/node_modules/minipass": {
       "version": "3.3.6",
-      "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz",
-      "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -12462,8 +10758,6 @@
     },
     "node_modules/minizlib": {
       "version": "2.1.2",
-      "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz",
-      "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -12476,8 +10770,6 @@
     },
     "node_modules/minizlib/node_modules/minipass": {
       "version": "3.3.6",
-      "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz",
-      "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -12489,8 +10781,6 @@
     },
     "node_modules/mkdirp": {
       "version": "1.0.4",
-      "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz",
-      "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==",
       "inBundle": true,
       "license": "MIT",
       "bin": {
@@ -12502,8 +10792,6 @@
     },
     "node_modules/modify-values": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/modify-values/-/modify-values-1.0.1.tgz",
-      "integrity": "sha512-xV2bxeN6F7oYjZWTe/YPAy6MN2M+sL4u/Rlm2AHCIVGfo2p1yGmBHQ6vHehl4bRTZBdHu3TSkWdYgkwpYzAGSw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -12512,8 +10800,6 @@
     },
     "node_modules/months": {
       "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/months/-/months-2.1.0.tgz",
-      "integrity": "sha512-2M9gdDB/uVt304/hJ3k2UIquJhOV5dRjp9BovHmZSINaRp7pdJuHXxOcuSjmJaKNomFyYyu0y3LBigdWiAUEmQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -12522,15 +10808,11 @@
     },
     "node_modules/moo": {
       "version": "0.5.2",
-      "resolved": "https://registry.npmjs.org/moo/-/moo-0.5.2.tgz",
-      "integrity": "sha512-iSAJLHYKnX41mKcJKjqvnAN9sf0LMDTXDEvFv+ffuRR9a1MIuXLjMNL6EsnDHSkKLTWNqQQ5uo61P4EbU4NU+Q==",
       "dev": true,
       "license": "BSD-3-Clause"
     },
     "node_modules/mri": {
       "version": "1.2.0",
-      "resolved": "https://registry.npmjs.org/mri/-/mri-1.2.0.tgz",
-      "integrity": "sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -12539,15 +10821,11 @@
     },
     "node_modules/ms": {
       "version": "2.1.3",
-      "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
-      "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
       "inBundle": true,
       "license": "MIT"
     },
     "node_modules/mute-stream": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-2.0.0.tgz",
-      "integrity": "sha512-WWdIxpyjEn+FhQJQQv9aQAYlHoNVdzIzUySNV1gHUPDSdZJ3yZn7pAAbQcV7B56Mvu881q9FZV+0Vx2xC44VWA==",
       "inBundle": true,
       "license": "ISC",
       "engines": {
@@ -12556,15 +10834,11 @@
     },
     "node_modules/natural-compare": {
       "version": "1.4.0",
-      "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz",
-      "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/nearley": {
       "version": "2.20.1",
-      "resolved": "https://registry.npmjs.org/nearley/-/nearley-2.20.1.tgz",
-      "integrity": "sha512-+Mc8UaAebFzgV+KpI5n7DasuuQCHA89dmwm7JXw3TV43ukfNQ9DnBH3Mdb2g/I4Fdxc26pwimBWvjIw0UAILSQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12586,8 +10860,6 @@
     },
     "node_modules/negotiator": {
       "version": "0.6.4",
-      "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.4.tgz",
-      "integrity": "sha512-myRT3DiWPHqho5PrJaIRyaMv2kgYf0mUVgBNOYMuCH5Ki1yEiQaf/ZJuQ62nvpc44wL5WDbTX7yGJi1Neevw8w==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -12596,15 +10868,11 @@
     },
     "node_modules/neo-async": {
       "version": "2.6.2",
-      "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz",
-      "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/nock": {
       "version": "13.5.6",
-      "resolved": "https://registry.npmjs.org/nock/-/nock-13.5.6.tgz",
-      "integrity": "sha512-o2zOYiCpzRqSzPj0Zt/dQ/DqZeYoaQ7TUonc/xUPjCGl9WeHpNbxgVvOquXYAaJzI0M9BXV3HTzG0p8IUAbBTQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12618,8 +10886,6 @@
     },
     "node_modules/node-fetch": {
       "version": "2.7.0",
-      "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz",
-      "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12639,8 +10905,6 @@
     },
     "node_modules/node-gyp": {
       "version": "11.2.0",
-      "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-11.2.0.tgz",
-      "integrity": "sha512-T0S1zqskVUSxcsSTkAsLc7xCycrRYmtDHadDinzocrThjyQCn5kMlEBSj6H4qDbgsIOSLmmlRIeb0lZXj+UArA==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -12664,8 +10928,6 @@
     },
     "node_modules/node-gyp/node_modules/cacache": {
       "version": "19.0.1",
-      "resolved": "https://registry.npmjs.org/cacache/-/cacache-19.0.1.tgz",
-      "integrity": "sha512-hdsUxulXCi5STId78vRVYEtDAjq99ICAUktLTeTYsLoTE6Z8dS0c8pWNCxwdrk9YfJeobDZc2Y186hD/5ZQgFQ==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -12688,8 +10950,6 @@
     },
     "node_modules/node-gyp/node_modules/chownr": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz",
-      "integrity": "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==",
       "inBundle": true,
       "license": "BlueOak-1.0.0",
       "engines": {
@@ -12698,8 +10958,6 @@
     },
     "node_modules/node-gyp/node_modules/make-fetch-happen": {
       "version": "14.0.3",
-      "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-14.0.3.tgz",
-      "integrity": "sha512-QMjGbFTP0blj97EeidG5hk/QhKQ3T4ICckQGLgz38QF7Vgbk6e6FTARN8KhKxyBbWn8R0HU+bnw8aSoFPD4qtQ==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -12721,8 +10979,6 @@
     },
     "node_modules/node-gyp/node_modules/minizlib": {
       "version": "3.0.2",
-      "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.2.tgz",
-      "integrity": "sha512-oG62iEk+CYt5Xj2YqI5Xi9xWUeZhDI8jjQmC5oThVH5JGCTgIjr7ciJDzC7MBzYd//WvR1OTmP5Q38Q8ShQtVA==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -12734,8 +10990,6 @@
     },
     "node_modules/node-gyp/node_modules/mkdirp": {
       "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz",
-      "integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==",
       "inBundle": true,
       "license": "MIT",
       "bin": {
@@ -12750,8 +11004,6 @@
     },
     "node_modules/node-gyp/node_modules/negotiator": {
       "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz",
-      "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==",
       "inBundle": true,
       "license": "MIT",
       "engines": {
@@ -12760,8 +11012,6 @@
     },
     "node_modules/node-gyp/node_modules/tar": {
       "version": "7.4.3",
-      "resolved": "https://registry.npmjs.org/tar/-/tar-7.4.3.tgz",
-      "integrity": "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -12778,8 +11028,6 @@
     },
     "node_modules/node-gyp/node_modules/yallist": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz",
-      "integrity": "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==",
       "inBundle": true,
       "license": "BlueOak-1.0.0",
       "engines": {
@@ -12788,8 +11036,6 @@
     },
     "node_modules/node-html-parser": {
       "version": "6.1.13",
-      "resolved": "https://registry.npmjs.org/node-html-parser/-/node-html-parser-6.1.13.tgz",
-      "integrity": "sha512-qIsTMOY4C/dAa5Q5vsobRpOOvPfC4pB61UVW2uSwZNUp0QU/jCekTal1vMmbO0DgdHeLUJpv/ARmDqErVxA3Sg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12799,8 +11045,6 @@
     },
     "node_modules/node-preload": {
       "version": "0.2.1",
-      "resolved": "https://registry.npmjs.org/node-preload/-/node-preload-0.2.1.tgz",
-      "integrity": "sha512-RM5oyBy45cLEoHqCeh+MNuFAxO0vTFBLskvQbOKnEE7YTTSN4tbN8QWDIPQ6L+WvKsB/qLEGpYe2ZZ9d4W9OIQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12812,15 +11056,11 @@
     },
     "node_modules/node-releases": {
       "version": "2.0.19",
-      "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.19.tgz",
-      "integrity": "sha512-xxOWJsBKtzAq7DY0J+DTzuz58K8e7sJbdgwkbMWQe8UYB6ekmsQ45q0M/tJDsGaZmbC+l7n57UV8Hl5tHxO9uw==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/nopt": {
       "version": "8.1.0",
-      "resolved": "https://registry.npmjs.org/nopt/-/nopt-8.1.0.tgz",
-      "integrity": "sha512-ieGu42u/Qsa4TFktmaKEwM6MQH0pOWnaB3htzh0JRtx84+Mebc0cbZYN5bC+6WTZ4+77xrL9Pn5m7CV6VIkV7A==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -12835,8 +11075,6 @@
     },
     "node_modules/normalize-package-data": {
       "version": "8.0.0",
-      "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-8.0.0.tgz",
-      "integrity": "sha512-RWk+PI433eESQ7ounYxIp67CYuVsS1uYSonX3kA6ps/3LWfjVQa/ptEg6Y3T6uAMq1mWpX9PQ+qx+QaHpsc7gQ==",
       "inBundle": true,
       "license": "BSD-2-Clause",
       "dependencies": {
@@ -12850,8 +11088,6 @@
     },
     "node_modules/normalize-path": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz",
-      "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -12860,8 +11096,6 @@
     },
     "node_modules/npm-audit-report": {
       "version": "6.0.0",
-      "resolved": "https://registry.npmjs.org/npm-audit-report/-/npm-audit-report-6.0.0.tgz",
-      "integrity": "sha512-Ag6Y1irw/+CdSLqEEAn69T8JBgBThj5mw0vuFIKeP7hATYuQuS5jkMjK6xmVB8pr7U4g5Audbun0lHhBDMIBRA==",
       "inBundle": true,
       "license": "ISC",
       "engines": {
@@ -12870,8 +11104,6 @@
     },
     "node_modules/npm-bundled": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-4.0.0.tgz",
-      "integrity": "sha512-IxaQZDMsqfQ2Lz37VvyyEtKLe8FsRZuysmedy/N06TU1RyVppYKXrO4xIhR0F+7ubIBox6Q7nir6fQI3ej39iA==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -12883,8 +11115,6 @@
     },
     "node_modules/npm-install-checks": {
       "version": "7.1.1",
-      "resolved": "https://registry.npmjs.org/npm-install-checks/-/npm-install-checks-7.1.1.tgz",
-      "integrity": "sha512-u6DCwbow5ynAX5BdiHQ9qvexme4U3qHW3MWe5NqH+NeBm0LbiH6zvGjNNew1fY+AZZUtVHbOPF3j7mJxbUzpXg==",
       "inBundle": true,
       "license": "BSD-2-Clause",
       "dependencies": {
@@ -12896,8 +11126,6 @@
     },
     "node_modules/npm-normalize-package-bin": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-4.0.0.tgz",
-      "integrity": "sha512-TZKxPvItzai9kN9H/TkmCtx/ZN/hvr3vUycjlfmH0ootY9yFBzNOpiXAdIn1Iteqsvk4lQn6B5PTrt+n6h8k/w==",
       "inBundle": true,
       "license": "ISC",
       "engines": {
@@ -12906,8 +11134,6 @@
     },
     "node_modules/npm-package-arg": {
       "version": "13.0.0",
-      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-13.0.0.tgz",
-      "integrity": "sha512-+t2etZAGcB7TbbLHfDwooV9ppB2LhhcT6A+L9cahsf9mEUAoQ6CktLEVvEnpD0N5CkX7zJqnPGaFtoQDy9EkHQ==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -12922,8 +11148,6 @@
     },
     "node_modules/npm-packlist": {
       "version": "10.0.1",
-      "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-10.0.1.tgz",
-      "integrity": "sha512-vaC03b2PqJA6QqmwHi1jNU8fAPXEnnyv4j/W4PVfgm24C4/zZGSVut3z0YUeN0WIFCo1oGOL02+6LbvFK7JL4Q==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -12935,8 +11159,6 @@
     },
     "node_modules/npm-packlist/node_modules/ignore-walk": {
       "version": "8.0.0",
-      "resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-8.0.0.tgz",
-      "integrity": "sha512-FCeMZT4NiRQGh+YkeKMtWrOmBgWjHjMJ26WQWrRQyoyzqevdaGSakUaJW5xQYmjLlUVk2qUnCjYVBax9EKKg8A==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -12948,8 +11170,6 @@
     },
     "node_modules/npm-packlist/node_modules/minimatch": {
       "version": "10.0.3",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.0.3.tgz",
-      "integrity": "sha512-IPZ167aShDZZUMdRk66cyQAW3qr0WzbHkPdMYa8bzZhlHhO3jALbKdxcaak7W9FfT2rZNpQuUu4Od7ILEpXSaw==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -12964,8 +11184,6 @@
     },
     "node_modules/npm-pick-manifest": {
       "version": "11.0.1",
-      "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-11.0.1.tgz",
-      "integrity": "sha512-HnU7FYSWbo7dTVHtK0G+BXbZ0aIfxz/aUCVLN0979Ec6rGUX5cJ6RbgVx5fqb5G31ufz+BVFA7y1SkRTPVNoVQ==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -12980,8 +11198,6 @@
     },
     "node_modules/npm-profile": {
       "version": "12.0.0",
-      "resolved": "https://registry.npmjs.org/npm-profile/-/npm-profile-12.0.0.tgz",
-      "integrity": "sha512-ZrtDFhNpLCcH7b7kQIpegK4Bt66DpkHojcWdm41/qie+i9dYg2Mc+BenwHVnfjNnw8/bpYuBj8wf+6iI4GoF+g==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -12994,8 +11210,6 @@
     },
     "node_modules/npm-registry-fetch": {
       "version": "19.0.0",
-      "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-19.0.0.tgz",
-      "integrity": "sha512-DFxSAemHUwT/POaXAOY4NJmEWBPB0oKbwD6FFDE9hnt1nORkt/FXvgjD4hQjoKoHw9u0Ezws9SPXwV7xE/Gyww==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -13014,8 +11228,6 @@
     },
     "node_modules/npm-registry-fetch/node_modules/minizlib": {
       "version": "3.0.2",
-      "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.2.tgz",
-      "integrity": "sha512-oG62iEk+CYt5Xj2YqI5Xi9xWUeZhDI8jjQmC5oThVH5JGCTgIjr7ciJDzC7MBzYd//WvR1OTmP5Q38Q8ShQtVA==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -13027,8 +11239,6 @@
     },
     "node_modules/npm-user-validate": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/npm-user-validate/-/npm-user-validate-3.0.0.tgz",
-      "integrity": "sha512-9xi0RdSmJ4mPYTC393VJPz1Sp8LyCx9cUnm/L9Qcb3cFO8gjT4mN20P9FAsea8qDHdQ7LtcN8VLh2UT47SdKCw==",
       "inBundle": true,
       "license": "BSD-2-Clause",
       "engines": {
@@ -13037,8 +11247,6 @@
     },
     "node_modules/nth-check": {
       "version": "2.1.1",
-      "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz",
-      "integrity": "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==",
       "dev": true,
       "license": "BSD-2-Clause",
       "dependencies": {
@@ -13050,15 +11258,11 @@
     },
     "node_modules/nwsapi": {
       "version": "2.2.20",
-      "resolved": "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.20.tgz",
-      "integrity": "sha512-/ieB+mDe4MrrKMT8z+mQL8klXydZWGR5Dowt4RAGKbJ3kIGEx3X4ljUo+6V73IXtUPWgfOlU5B9MlGxFO5T+cA==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/nyc": {
       "version": "15.1.0",
-      "resolved": "https://registry.npmjs.org/nyc/-/nyc-15.1.0.tgz",
-      "integrity": "sha512-jMW04n9SxKdKi1ZMGhvUTHBN0EICCRkHemEoE5jm6mTYcqcdas0ATzgUgejlQUHMvpnOZqGB5Xxsv9KxJW1j8A==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -13099,8 +11303,6 @@
     },
     "node_modules/nyc/node_modules/ansi-styles": {
       "version": "4.3.0",
-      "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
-      "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13115,8 +11317,6 @@
     },
     "node_modules/nyc/node_modules/brace-expansion": {
       "version": "1.1.12",
-      "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
-      "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13126,8 +11326,6 @@
     },
     "node_modules/nyc/node_modules/cliui": {
       "version": "6.0.0",
-      "resolved": "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz",
-      "integrity": "sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -13138,8 +11336,6 @@
     },
     "node_modules/nyc/node_modules/find-up": {
       "version": "4.1.0",
-      "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz",
-      "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13152,8 +11348,6 @@
     },
     "node_modules/nyc/node_modules/foreground-child": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-2.0.0.tgz",
-      "integrity": "sha512-dCIq9FpEcyQyXKCkyzmlPTFNgrCzPudOe+mhvJU5zAtlBnGVy2yKxtfsxK2tQBThwq225jcvBjpw1Gr40uzZCA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -13166,9 +11360,6 @@
     },
     "node_modules/nyc/node_modules/glob": {
       "version": "7.2.3",
-      "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
-      "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
-      "deprecated": "Glob versions prior to v9 are no longer supported",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -13188,8 +11379,6 @@
     },
     "node_modules/nyc/node_modules/locate-path": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz",
-      "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13201,8 +11390,6 @@
     },
     "node_modules/nyc/node_modules/minimatch": {
       "version": "3.1.2",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
-      "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -13214,8 +11401,6 @@
     },
     "node_modules/nyc/node_modules/p-limit": {
       "version": "2.3.0",
-      "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz",
-      "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13230,8 +11415,6 @@
     },
     "node_modules/nyc/node_modules/p-locate": {
       "version": "4.1.0",
-      "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz",
-      "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13243,8 +11426,6 @@
     },
     "node_modules/nyc/node_modules/p-map": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/p-map/-/p-map-3.0.0.tgz",
-      "integrity": "sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13256,8 +11437,6 @@
     },
     "node_modules/nyc/node_modules/path-exists": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
-      "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -13266,9 +11445,6 @@
     },
     "node_modules/nyc/node_modules/rimraf": {
       "version": "3.0.2",
-      "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz",
-      "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==",
-      "deprecated": "Rimraf versions prior to v4 are no longer supported",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -13283,15 +11459,11 @@
     },
     "node_modules/nyc/node_modules/signal-exit": {
       "version": "3.0.7",
-      "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz",
-      "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/nyc/node_modules/wrap-ansi": {
       "version": "6.2.0",
-      "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz",
-      "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13305,15 +11477,11 @@
     },
     "node_modules/nyc/node_modules/y18n": {
       "version": "4.0.3",
-      "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.3.tgz",
-      "integrity": "sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/nyc/node_modules/yargs": {
       "version": "15.4.1",
-      "resolved": "https://registry.npmjs.org/yargs/-/yargs-15.4.1.tgz",
-      "integrity": "sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13335,8 +11503,6 @@
     },
     "node_modules/nyc/node_modules/yargs-parser": {
       "version": "18.1.3",
-      "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-18.1.3.tgz",
-      "integrity": "sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -13349,8 +11515,6 @@
     },
     "node_modules/object-inspect": {
       "version": "1.13.4",
-      "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz",
-      "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -13362,8 +11526,6 @@
     },
     "node_modules/object-keys": {
       "version": "1.1.1",
-      "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz",
-      "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -13372,8 +11534,6 @@
     },
     "node_modules/object.assign": {
       "version": "4.1.7",
-      "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.7.tgz",
-      "integrity": "sha512-nK28WOo+QIjBkDduTINE4JkF/UJJKyf2EJxvJKfblDpyg0Q+pkOHNTL0Qwy6NP6FhE/EnzV73BxxqcJaXY9anw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13393,8 +11553,6 @@
     },
     "node_modules/object.fromentries": {
       "version": "2.0.8",
-      "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.8.tgz",
-      "integrity": "sha512-k6E21FzySsSK5a21KRADBd/NGneRegFO5pLHfdQLpRDETUNJueLXs3WCzyQ3tFRDYgbq3KHGXfTbi2bs8WQ6rQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13412,8 +11570,6 @@
     },
     "node_modules/object.groupby": {
       "version": "1.0.3",
-      "resolved": "https://registry.npmjs.org/object.groupby/-/object.groupby-1.0.3.tgz",
-      "integrity": "sha512-+Lhy3TQTuzXI5hevh8sBGqbmurHbbIjAi0Z4S63nthVLmLxfbj4T54a4CfZrXIrt9iP4mVAPYMo/v99taj3wjQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13427,8 +11583,6 @@
     },
     "node_modules/object.values": {
       "version": "1.2.1",
-      "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.2.1.tgz",
-      "integrity": "sha512-gXah6aZrcUxjWg2zR2MwouP2eHlCBzdV4pygudehaKXSGW4v2AsRQUK+lwwXhii6KFZcunEnmSUoYp5CXibxtA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13446,8 +11600,6 @@
     },
     "node_modules/once": {
       "version": "1.4.0",
-      "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
-      "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -13456,8 +11608,6 @@
     },
     "node_modules/opener": {
       "version": "1.5.2",
-      "resolved": "https://registry.npmjs.org/opener/-/opener-1.5.2.tgz",
-      "integrity": "sha512-ur5UIdyw5Y7yEj9wLzhqXiy6GZ3Mwx0yGI+5sMn2r0N0v3cKJvUmFH5yPP+WXh9e0xfyzyJX95D8l088DNFj7A==",
       "dev": true,
       "license": "(WTFPL OR MIT)",
       "bin": {
@@ -13466,8 +11616,6 @@
     },
     "node_modules/optionator": {
       "version": "0.9.4",
-      "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz",
-      "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13484,8 +11632,6 @@
     },
     "node_modules/own-keys": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/own-keys/-/own-keys-1.0.1.tgz",
-      "integrity": "sha512-qFOyK5PjiWZd+QQIh+1jhdb9LpxTF0qs7Pm8o5QHYZ0M3vKqSqzsZaEB6oWlxZ+q2sJBMI/Ktgd2N5ZwQoRHfg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13502,15 +11648,11 @@
     },
     "node_modules/own-or": {
       "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/own-or/-/own-or-1.0.0.tgz",
-      "integrity": "sha512-NfZr5+Tdf6MB8UI9GLvKRs4cXY8/yB0w3xtt84xFdWy8hkGjn+JFc60VhzS/hFRfbyxFcGYMTjnF4Me+RbbqrA==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/own-or-env": {
       "version": "1.0.2",
-      "resolved": "https://registry.npmjs.org/own-or-env/-/own-or-env-1.0.2.tgz",
-      "integrity": "sha512-NQ7v0fliWtK7Lkb+WdFqe6ky9XAzYmlkXthQrBbzlYbmFKoAYbDDcwmOm6q8kOuwSRXW8bdL5ORksploUJmWgw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -13519,8 +11661,6 @@
     },
     "node_modules/p-limit": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-4.0.0.tgz",
-      "integrity": "sha512-5b0R4txpzjPWVw/cXXUResoD4hb6U/x9BH08L7nw+GN1sezDzPdxeRvpc9c433fZhBan/wusjbCsqwqm4EIBIQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13535,8 +11675,6 @@
     },
     "node_modules/p-locate": {
       "version": "6.0.0",
-      "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-6.0.0.tgz",
-      "integrity": "sha512-wPrq66Llhl7/4AGC6I+cqxT07LhXvWL08LNXz1fENOw0Ap4sRZZ/gZpTTJ5jpurzzzfS2W/Ge9BY3LgLjCShcw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13551,8 +11689,6 @@
     },
     "node_modules/p-map": {
       "version": "7.0.3",
-      "resolved": "https://registry.npmjs.org/p-map/-/p-map-7.0.3.tgz",
-      "integrity": "sha512-VkndIv2fIB99swvQoA65bm+fsmt6UNdGeIB0oxBs+WhAhdh08QA04JXpI7rbB9r08/nkbysKoya9rtDERYOYMA==",
       "inBundle": true,
       "license": "MIT",
       "engines": {
@@ -13564,8 +11700,6 @@
     },
     "node_modules/p-try": {
       "version": "2.2.0",
-      "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz",
-      "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -13574,8 +11708,6 @@
     },
     "node_modules/package-hash": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/package-hash/-/package-hash-4.0.0.tgz",
-      "integrity": "sha512-whdkPIooSu/bASggZ96BWVvZTRMOFxnyUG5PnTSGKoJE2gd5mbVNmR2Nj20QFzxYYgAXpoqC+AiXzl+UMRh7zQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -13590,15 +11722,11 @@
     },
     "node_modules/package-json-from-dist": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz",
-      "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==",
       "inBundle": true,
       "license": "BlueOak-1.0.0"
     },
     "node_modules/pacote": {
       "version": "21.0.3",
-      "resolved": "https://registry.npmjs.org/pacote/-/pacote-21.0.3.tgz",
-      "integrity": "sha512-itdFlanxO0nmQv4ORsvA9K1wv40IPfB9OmWqfaJWvoJ30VKyHsqNgDVeG+TVhI7Gk7XW8slUy7cA9r6dF5qohw==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -13627,50 +11755,16 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/pacote/node_modules/@npmcli/git": {
-      "version": "7.0.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-7.0.0.tgz",
-      "integrity": "sha512-vnz7BVGtOctJAIHouCJdvWBhsTVSICMeUgZo2c7XAi5d5Rrl80S1H7oPym7K03cRuinK5Q6s2dw36+PgXQTcMA==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "@npmcli/promise-spawn": "^8.0.0",
-        "ini": "^5.0.0",
-        "lru-cache": "^11.2.1",
-        "npm-pick-manifest": "^11.0.1",
-        "proc-log": "^5.0.0",
-        "promise-retry": "^2.0.1",
-        "semver": "^7.3.5",
-        "which": "^5.0.0"
-      },
-      "engines": {
-        "node": "^20.17.0 || >=22.9.0"
-      }
-    },
     "node_modules/pacote/node_modules/chownr": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz",
-      "integrity": "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==",
       "inBundle": true,
       "license": "BlueOak-1.0.0",
       "engines": {
         "node": ">=18"
       }
     },
-    "node_modules/pacote/node_modules/lru-cache": {
-      "version": "11.2.1",
-      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.1.tgz",
-      "integrity": "sha512-r8LA6i4LP4EeWOhqBaZZjDWwehd1xUJPCJd9Sv300H0ZmcUER4+JPh7bqqZeqs1o5pgtgvXm+d9UGrB5zZGDiQ==",
-      "inBundle": true,
-      "license": "ISC",
-      "engines": {
-        "node": "20 || >=22"
-      }
-    },
     "node_modules/pacote/node_modules/minizlib": {
       "version": "3.0.2",
-      "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.2.tgz",
-      "integrity": "sha512-oG62iEk+CYt5Xj2YqI5Xi9xWUeZhDI8jjQmC5oThVH5JGCTgIjr7ciJDzC7MBzYd//WvR1OTmP5Q38Q8ShQtVA==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -13682,8 +11776,6 @@
     },
     "node_modules/pacote/node_modules/mkdirp": {
       "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz",
-      "integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==",
       "inBundle": true,
       "license": "MIT",
       "bin": {
@@ -13698,8 +11790,6 @@
     },
     "node_modules/pacote/node_modules/tar": {
       "version": "7.4.3",
-      "resolved": "https://registry.npmjs.org/tar/-/tar-7.4.3.tgz",
-      "integrity": "sha512-5S7Va8hKfV7W5U6g3aYxXmlPoZVAwUMy9AOKyF2fVuZa2UD3qZjg578OrLRt8PcNN1PleVaL/5/yYATNL0ICUw==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -13716,8 +11806,6 @@
     },
     "node_modules/pacote/node_modules/yallist": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz",
-      "integrity": "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==",
       "inBundle": true,
       "license": "BlueOak-1.0.0",
       "engines": {
@@ -13726,8 +11814,6 @@
     },
     "node_modules/parent-module": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz",
-      "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13739,8 +11825,6 @@
     },
     "node_modules/parse-conflict-json": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/parse-conflict-json/-/parse-conflict-json-4.0.0.tgz",
-      "integrity": "sha512-37CN2VtcuvKgHUs8+0b1uJeEsbGn61GRHz469C94P5xiOoqpDYJYwjg4RY9Vmz39WyZAVkR5++nbJwLMIgOCnQ==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -13754,22 +11838,16 @@
     },
     "node_modules/parse-diff": {
       "version": "0.11.1",
-      "resolved": "https://registry.npmjs.org/parse-diff/-/parse-diff-0.11.1.tgz",
-      "integrity": "sha512-Oq4j8LAOPOcssanQkIjxosjATBIEJhCxMCxPhMu+Ci4wdNmAEdx0O+a7gzbR2PyKXgKPvRLIN5g224+dJAsKHA==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/parse-github-repo-url": {
       "version": "1.4.1",
-      "resolved": "https://registry.npmjs.org/parse-github-repo-url/-/parse-github-repo-url-1.4.1.tgz",
-      "integrity": "sha512-bSWyzBKqcSL4RrncTpGsEKoJ7H8a4L3++ifTAbTFeMHyq2wRV+42DGmQcHIrJIvdcacjIOxEuKH/w4tthF17gg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/parse-json": {
       "version": "5.2.0",
-      "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz",
-      "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13787,15 +11865,11 @@
     },
     "node_modules/parse-json/node_modules/json-parse-even-better-errors": {
       "version": "2.3.1",
-      "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz",
-      "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/parse5": {
       "version": "7.3.0",
-      "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.3.0.tgz",
-      "integrity": "sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13807,8 +11881,6 @@
     },
     "node_modules/parse5/node_modules/entities": {
       "version": "6.0.1",
-      "resolved": "https://registry.npmjs.org/entities/-/entities-6.0.1.tgz",
-      "integrity": "sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g==",
       "dev": true,
       "license": "BSD-2-Clause",
       "engines": {
@@ -13820,8 +11892,6 @@
     },
     "node_modules/path-exists": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-5.0.0.tgz",
-      "integrity": "sha512-RjhtfwJOxzcFmNOi6ltcbcu4Iu+FL3zEj83dk4kAS+fVpTxXLO1b38RvJgT/0QwvV/L3aY9TAnyv0EOqW4GoMQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -13830,8 +11900,6 @@
     },
     "node_modules/path-is-absolute": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
-      "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -13840,8 +11908,6 @@
     },
     "node_modules/path-key": {
       "version": "3.1.1",
-      "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz",
-      "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==",
       "inBundle": true,
       "license": "MIT",
       "engines": {
@@ -13850,15 +11916,11 @@
     },
     "node_modules/path-parse": {
       "version": "1.0.7",
-      "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz",
-      "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/path-scurry": {
       "version": "1.11.1",
-      "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz",
-      "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==",
       "inBundle": true,
       "license": "BlueOak-1.0.0",
       "dependencies": {
@@ -13874,15 +11936,11 @@
     },
     "node_modules/picocolors": {
       "version": "1.1.1",
-      "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
-      "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/picomatch": {
       "version": "2.3.1",
-      "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
-      "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -13894,8 +11952,6 @@
     },
     "node_modules/pkg-dir": {
       "version": "4.2.0",
-      "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz",
-      "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13907,8 +11963,6 @@
     },
     "node_modules/pkg-dir/node_modules/find-up": {
       "version": "4.1.0",
-      "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz",
-      "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13921,8 +11975,6 @@
     },
     "node_modules/pkg-dir/node_modules/locate-path": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz",
-      "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13934,8 +11986,6 @@
     },
     "node_modules/pkg-dir/node_modules/p-limit": {
       "version": "2.3.0",
-      "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz",
-      "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13950,8 +12000,6 @@
     },
     "node_modules/pkg-dir/node_modules/p-locate": {
       "version": "4.1.0",
-      "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz",
-      "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13963,8 +12011,6 @@
     },
     "node_modules/pkg-dir/node_modules/path-exists": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
-      "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -13973,15 +12019,11 @@
     },
     "node_modules/platform": {
       "version": "1.3.6",
-      "resolved": "https://registry.npmjs.org/platform/-/platform-1.3.6.tgz",
-      "integrity": "sha512-fnWVljUchTro6RiCFvCXBbNhJc2NijN7oIQxbwsyL0buWJPG85v81ehlHI9fXrJsMNgTofEoWIQeClKpgxFLrg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/possible-typed-array-names": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.1.0.tgz",
-      "integrity": "sha512-/+5VFTchJDoVj3bhoqi6UeymcD00DAwb1nJwamzPvHEszJ4FpF6SNNbUbOS8yI56qHzdV8eK0qEfOSiodkTdxg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -13990,8 +12032,6 @@
     },
     "node_modules/postcss-selector-parser": {
       "version": "7.1.0",
-      "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.0.tgz",
-      "integrity": "sha512-8sLjZwK0R+JlxlYcTuVnyT2v+htpdrjDOKuMcOVdYjt52Lh8hWRYpxBPoKx/Zg+bcjc3wx6fmQevMmUztS/ccA==",
       "license": "MIT",
       "dependencies": {
         "cssesc": "^3.0.0",
@@ -14003,8 +12043,6 @@
     },
     "node_modules/prelude-ls": {
       "version": "1.2.1",
-      "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz",
-      "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -14013,8 +12051,6 @@
     },
     "node_modules/proc-log": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-5.0.0.tgz",
-      "integrity": "sha512-Azwzvl90HaF0aCz1JrDdXQykFakSSNPaPoiZ9fm5qJIMHioDZEi7OAdRwSm6rSoPtY3Qutnm3L7ogmg3dc+wbQ==",
       "inBundle": true,
       "license": "ISC",
       "engines": {
@@ -14023,8 +12059,6 @@
     },
     "node_modules/process-on-spawn": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/process-on-spawn/-/process-on-spawn-1.1.0.tgz",
-      "integrity": "sha512-JOnOPQ/8TZgjs1JIH/m9ni7FfimjNa/PRx7y/Wb5qdItsnhO0jE4AT7fC0HjC28DUQWDr50dwSYZLdRMlqDq3Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14036,8 +12070,6 @@
     },
     "node_modules/proggy": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/proggy/-/proggy-3.0.0.tgz",
-      "integrity": "sha512-QE8RApCM3IaRRxVzxrjbgNMpQEX6Wu0p0KBeoSiSEw5/bsGwZHsshF4LCxH2jp/r6BU+bqA3LrMDEYNfJnpD8Q==",
       "license": "ISC",
       "engines": {
         "node": "^18.17.0 || >=20.5.0"
@@ -14045,8 +12077,6 @@
     },
     "node_modules/promise-all-reject-late": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/promise-all-reject-late/-/promise-all-reject-late-1.0.1.tgz",
-      "integrity": "sha512-vuf0Lf0lOxyQREH7GDIOUMLS7kz+gs8i6B+Yi8dC68a2sychGrHTJYghMBD6k7eUcH0H5P73EckCA48xijWqXw==",
       "license": "ISC",
       "funding": {
         "url": "https://github.com/sponsors/isaacs"
@@ -14054,8 +12084,6 @@
     },
     "node_modules/promise-call-limit": {
       "version": "3.0.2",
-      "resolved": "https://registry.npmjs.org/promise-call-limit/-/promise-call-limit-3.0.2.tgz",
-      "integrity": "sha512-mRPQO2T1QQVw11E7+UdCJu7S61eJVWknzml9sC1heAdj1jxl0fWMBypIt9ZOcLFf8FkG995ZD7RnVk7HH72fZw==",
       "license": "ISC",
       "funding": {
         "url": "https://github.com/sponsors/isaacs"
@@ -14070,8 +12098,6 @@
     },
     "node_modules/promise-retry": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/promise-retry/-/promise-retry-2.0.1.tgz",
-      "integrity": "sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -14084,8 +12110,6 @@
     },
     "node_modules/promzard": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/promzard/-/promzard-2.0.0.tgz",
-      "integrity": "sha512-Ncd0vyS2eXGOjchIRg6PVCYKetJYrW1BSbbIo+bKdig61TB6nH2RQNF2uP+qMpsI73L/jURLWojcw8JNIKZ3gg==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -14097,8 +12121,6 @@
     },
     "node_modules/propagate": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/propagate/-/propagate-2.0.1.tgz",
-      "integrity": "sha512-vGrhOavPSTz4QVNuBNdcNXePNdNMaO1xj9yBeH1ScQPjk/rhg9sSlCXPhMkFuaNNW/syTvYqsnbIJxMBfRbbag==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -14107,8 +12129,6 @@
     },
     "node_modules/property-information": {
       "version": "6.5.0",
-      "resolved": "https://registry.npmjs.org/property-information/-/property-information-6.5.0.tgz",
-      "integrity": "sha512-PgTgs/BlvHxOu8QuEN7wi5A0OmXaBcHpmCSTehcs6Uuu9IkDIEo13Hy7n898RHfrQ49vKCoGeWZSaAK01nwVig==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -14118,8 +12138,6 @@
     },
     "node_modules/proxy": {
       "version": "2.2.0",
-      "resolved": "https://registry.npmjs.org/proxy/-/proxy-2.2.0.tgz",
-      "integrity": "sha512-nYclNIWj9UpXbVJ3W5EXIYiGR88AKZoGt90kyh3zoOBY5QW+7bbtPvMFgKGD4VJmpS3UXQXtlGXSg3lRNLOFLg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14136,8 +12154,6 @@
     },
     "node_modules/psl": {
       "version": "1.15.0",
-      "resolved": "https://registry.npmjs.org/psl/-/psl-1.15.0.tgz",
-      "integrity": "sha512-JZd3gMVBAVQkSs6HdNZo9Sdo0LNcQeMNP3CozBJb3JYC/QUYZTnKxP+f8oWRX4rHP5EurWxqAHTSwUCjlNKa1w==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14149,8 +12165,6 @@
     },
     "node_modules/punycode": {
       "version": "2.3.1",
-      "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz",
-      "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -14159,8 +12173,6 @@
     },
     "node_modules/qrcode-terminal": {
       "version": "0.12.0",
-      "resolved": "https://registry.npmjs.org/qrcode-terminal/-/qrcode-terminal-0.12.0.tgz",
-      "integrity": "sha512-EXtzRZmC+YGmGlDFbXKxQiMZNwCLEO6BANKXG4iCtSIM0yqc/pappSx3RIKr4r0uh5JsBckOXeKrB3Iz7mdQpQ==",
       "inBundle": true,
       "bin": {
         "qrcode-terminal": "bin/qrcode-terminal.js"
@@ -14168,15 +12180,11 @@
     },
     "node_modules/querystringify": {
       "version": "2.2.0",
-      "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz",
-      "integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/queue-microtask": {
       "version": "1.2.3",
-      "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
-      "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==",
       "dev": true,
       "funding": [
         {
@@ -14196,8 +12204,6 @@
     },
     "node_modules/quick-lru": {
       "version": "4.0.1",
-      "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-4.0.1.tgz",
-      "integrity": "sha512-ARhCpm70fzdcvNQfPoy49IaanKkTlRWF2JMzqhcJbhSFRZv7nPTvZJdcY7301IPmvW+/p0RgIWnQDLJxifsQ7g==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -14206,15 +12212,11 @@
     },
     "node_modules/railroad-diagrams": {
       "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/railroad-diagrams/-/railroad-diagrams-1.0.0.tgz",
-      "integrity": "sha512-cz93DjNeLY0idrCNOH6PviZGRN9GJhsdm9hpn1YCS879fj4W+x5IFJhhkRZcwVgMmFF7R82UA/7Oh+R8lLZg6A==",
       "dev": true,
       "license": "CC0-1.0"
     },
     "node_modules/randexp": {
       "version": "0.4.6",
-      "resolved": "https://registry.npmjs.org/randexp/-/randexp-0.4.6.tgz",
-      "integrity": "sha512-80WNmd9DA0tmZrw9qQa62GPPWfuXJknrmVmLcxvq4uZBdYqb1wYoKTmnlGUchvVWe0XiLupYkBoXVOxz3C8DYQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14227,8 +12229,6 @@
     },
     "node_modules/read": {
       "version": "4.1.0",
-      "resolved": "https://registry.npmjs.org/read/-/read-4.1.0.tgz",
-      "integrity": "sha512-uRfX6K+f+R8OOrYScaM3ixPY4erg69f8DN6pgTvMcA9iRc8iDhwrA4m3Yu8YYKsXJgVvum+m8PkRboZwwuLzYA==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -14240,8 +12240,6 @@
     },
     "node_modules/read-cmd-shim": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/read-cmd-shim/-/read-cmd-shim-5.0.0.tgz",
-      "integrity": "sha512-SEbJV7tohp3DAAILbEMPXavBjAnMN0tVnh4+9G8ihV4Pq3HYF9h8QNez9zkJ1ILkv9G2BjdzwctznGZXgu/HGw==",
       "license": "ISC",
       "engines": {
         "node": "^18.17.0 || >=20.5.0"
@@ -14249,8 +12247,6 @@
     },
     "node_modules/read-pkg": {
       "version": "5.2.0",
-      "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-5.2.0.tgz",
-      "integrity": "sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14265,8 +12261,6 @@
     },
     "node_modules/read-pkg-up": {
       "version": "7.0.1",
-      "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-7.0.1.tgz",
-      "integrity": "sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14283,8 +12277,6 @@
     },
     "node_modules/read-pkg-up/node_modules/find-up": {
       "version": "4.1.0",
-      "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz",
-      "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14297,8 +12289,6 @@
     },
     "node_modules/read-pkg-up/node_modules/locate-path": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz",
-      "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14310,8 +12300,6 @@
     },
     "node_modules/read-pkg-up/node_modules/p-limit": {
       "version": "2.3.0",
-      "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz",
-      "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14326,8 +12314,6 @@
     },
     "node_modules/read-pkg-up/node_modules/p-locate": {
       "version": "4.1.0",
-      "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz",
-      "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14339,8 +12325,6 @@
     },
     "node_modules/read-pkg-up/node_modules/path-exists": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
-      "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -14349,8 +12333,6 @@
     },
     "node_modules/read-pkg-up/node_modules/type-fest": {
       "version": "0.8.1",
-      "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz",
-      "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==",
       "dev": true,
       "license": "(MIT OR CC0-1.0)",
       "engines": {
@@ -14359,15 +12341,11 @@
     },
     "node_modules/read-pkg/node_modules/hosted-git-info": {
       "version": "2.8.9",
-      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz",
-      "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/read-pkg/node_modules/normalize-package-data": {
       "version": "2.5.0",
-      "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz",
-      "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==",
       "dev": true,
       "license": "BSD-2-Clause",
       "dependencies": {
@@ -14379,8 +12357,6 @@
     },
     "node_modules/read-pkg/node_modules/semver": {
       "version": "5.7.2",
-      "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz",
-      "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==",
       "dev": true,
       "license": "ISC",
       "bin": {
@@ -14389,8 +12365,6 @@
     },
     "node_modules/read-pkg/node_modules/type-fest": {
       "version": "0.6.0",
-      "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.6.0.tgz",
-      "integrity": "sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==",
       "dev": true,
       "license": "(MIT OR CC0-1.0)",
       "engines": {
@@ -14399,8 +12373,6 @@
     },
     "node_modules/readdirp": {
       "version": "3.6.0",
-      "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz",
-      "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14412,8 +12384,6 @@
     },
     "node_modules/redent": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/redent/-/redent-3.0.0.tgz",
-      "integrity": "sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14426,8 +12396,6 @@
     },
     "node_modules/reflect.getprototypeof": {
       "version": "1.0.10",
-      "resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.10.tgz",
-      "integrity": "sha512-00o4I+DVrefhv+nX0ulyi3biSHCPDe+yLv5o/p6d/UVlirijB8E16FtfwSAi4g3tcqrQ4lRAqQSoFEZJehYEcw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14449,8 +12417,6 @@
     },
     "node_modules/regexp.prototype.flags": {
       "version": "1.5.4",
-      "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.4.tgz",
-      "integrity": "sha512-dYqgNSZbDwkaJ2ceRd9ojCGjBq+mOm9LmtXnAnEGyHhN/5R7iDW2TRw3h+o/jCFxus3P2LfWIIiwowAjANm7IA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14470,8 +12436,6 @@
     },
     "node_modules/regexpp": {
       "version": "3.2.0",
-      "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz",
-      "integrity": "sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -14483,8 +12447,6 @@
     },
     "node_modules/release-please": {
       "version": "16.15.0",
-      "resolved": "https://registry.npmjs.org/release-please/-/release-please-16.15.0.tgz",
-      "integrity": "sha512-C55PsUOMzAbPSrdqF/KKAqhaYVRGlarNNWgW/DyAsg15U4g/TkxXVpEZqAV1o38CoEoKhssnKTGnb5/eT4/DUw==",
       "dev": true,
       "license": "Apache-2.0",
       "dependencies": {
@@ -14529,8 +12491,6 @@
     },
     "node_modules/release-please/node_modules/ansi-styles": {
       "version": "4.3.0",
-      "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
-      "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14545,8 +12505,6 @@
     },
     "node_modules/release-please/node_modules/chalk": {
       "version": "4.1.2",
-      "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
-      "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14562,8 +12520,6 @@
     },
     "node_modules/release-please/node_modules/conventional-changelog-conventionalcommits": {
       "version": "6.1.0",
-      "resolved": "https://registry.npmjs.org/conventional-changelog-conventionalcommits/-/conventional-changelog-conventionalcommits-6.1.0.tgz",
-      "integrity": "sha512-3cS3GEtR78zTfMzk0AizXKKIdN4OvSh7ibNz6/DPbhWWQu7LqE/8+/GqSodV+sywUR2gpJAdP/1JFf4XtN7Zpw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -14575,8 +12531,6 @@
     },
     "node_modules/release-please/node_modules/has-flag": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
-      "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -14585,8 +12539,6 @@
     },
     "node_modules/release-please/node_modules/supports-color": {
       "version": "7.2.0",
-      "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
-      "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14598,8 +12550,6 @@
     },
     "node_modules/release-please/node_modules/type-fest": {
       "version": "3.13.1",
-      "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-3.13.1.tgz",
-      "integrity": "sha512-tLq3bSNx+xSpwvAJnzrK0Ep5CLNWjvFTOp71URMaAEWBfRb9nnJiBoUe0tF8bI4ZFO3omgBR6NvnbzVUT3Ly4g==",
       "dev": true,
       "license": "(MIT OR CC0-1.0)",
       "engines": {
@@ -14611,8 +12561,6 @@
     },
     "node_modules/release-please/node_modules/typescript": {
       "version": "4.9.5",
-      "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.9.5.tgz",
-      "integrity": "sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g==",
       "dev": true,
       "license": "Apache-2.0",
       "bin": {
@@ -14625,8 +12573,6 @@
     },
     "node_modules/release-zalgo": {
       "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/release-zalgo/-/release-zalgo-1.0.0.tgz",
-      "integrity": "sha512-gUAyHVHPPC5wdqX/LG4LWtRYtgjxyX78oanFNTMMyFEfOqdC54s3eE82imuWKbOeqYht2CrNf64Qb8vgmmtZGA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -14638,8 +12584,6 @@
     },
     "node_modules/remark": {
       "version": "14.0.3",
-      "resolved": "https://registry.npmjs.org/remark/-/remark-14.0.3.tgz",
-      "integrity": "sha512-bfmJW1dmR2LvaMJuAnE88pZP9DktIFYXazkTfOIKZzi3Knk9lT0roItIA24ydOucI3bV/g/tXBA6hzqq3FV9Ew==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14655,8 +12599,6 @@
     },
     "node_modules/remark-gfm": {
       "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/remark-gfm/-/remark-gfm-3.0.1.tgz",
-      "integrity": "sha512-lEFDoi2PICJyNrACFOfDD3JlLkuSbOa5Wd8EPt06HUdptv8Gn0bxYTdbU/XXQ3swAPkEaGxxPN9cbnMHvVu1Ig==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14672,8 +12614,6 @@
     },
     "node_modules/remark-github": {
       "version": "11.2.4",
-      "resolved": "https://registry.npmjs.org/remark-github/-/remark-github-11.2.4.tgz",
-      "integrity": "sha512-GJjWFpwqdrHHhPWqMbb8+lqFLiHQ9pCzUmXmRrhMFXGpYov5n2ljsZzuWgXlfzArfQYkiKIZczA2I8IHYMHqCA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14690,8 +12630,6 @@
     },
     "node_modules/remark-github/node_modules/unist-util-visit": {
       "version": "4.1.2",
-      "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-4.1.2.tgz",
-      "integrity": "sha512-MSd8OUGISqHdVvfY9TPhyK2VdUrPgxkUtWSuMHF6XAAFuL4LokseigBnZtPnJMu+FbynTkFNnFlyjxpVKujMRg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14706,8 +12644,6 @@
     },
     "node_modules/remark-github/node_modules/unist-util-visit-parents": {
       "version": "5.1.3",
-      "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-5.1.3.tgz",
-      "integrity": "sha512-x6+y8g7wWMyQhL1iZfhIPhDAs7Xwbn9nRosDXl7qoPTSCy0yNxnKc+hWokFifWQIDGi154rdUqKvbCa4+1kLhg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14721,8 +12657,6 @@
     },
     "node_modules/remark-parse": {
       "version": "10.0.2",
-      "resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-10.0.2.tgz",
-      "integrity": "sha512-3ydxgHa/ZQzG8LvC7jTXccARYDcRld3VfcgIIFs7bI6vbRSxJJmzgLEIIoYKyrfhaY+ujuWaf/PJiMZXoiCXgw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14737,8 +12671,6 @@
     },
     "node_modules/remark-stringify": {
       "version": "10.0.3",
-      "resolved": "https://registry.npmjs.org/remark-stringify/-/remark-stringify-10.0.3.tgz",
-      "integrity": "sha512-koyOzCMYoUHudypbj4XpnAKFbkddRMYZHwghnxd7ue5210WzGw6kOBwauJTRUMq16jsovXx8dYNvSSWP89kZ3A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14753,8 +12685,6 @@
     },
     "node_modules/require-directory": {
       "version": "2.1.1",
-      "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
-      "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -14763,8 +12693,6 @@
     },
     "node_modules/require-from-string": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz",
-      "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -14773,8 +12701,6 @@
     },
     "node_modules/require-inject": {
       "version": "1.4.4",
-      "resolved": "https://registry.npmjs.org/require-inject/-/require-inject-1.4.4.tgz",
-      "integrity": "sha512-5Y5ctRN84+I4iOZO61gm+48tgP/6Hcd3VZydkaEM3MCuOvnHRsTJYQBOc01faI/Z9at5nsCAJVHhlfPA6Pc0Og==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -14783,22 +12709,16 @@
     },
     "node_modules/require-main-filename": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz",
-      "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/requires-port": {
       "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz",
-      "integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/resolve": {
       "version": "1.22.10",
-      "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz",
-      "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14818,8 +12738,6 @@
     },
     "node_modules/resolve-from": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz",
-      "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -14828,8 +12746,6 @@
     },
     "node_modules/ret": {
       "version": "0.1.15",
-      "resolved": "https://registry.npmjs.org/ret/-/ret-0.1.15.tgz",
-      "integrity": "sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -14838,8 +12754,6 @@
     },
     "node_modules/retry": {
       "version": "0.12.0",
-      "resolved": "https://registry.npmjs.org/retry/-/retry-0.12.0.tgz",
-      "integrity": "sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==",
       "inBundle": true,
       "license": "MIT",
       "engines": {
@@ -14848,8 +12762,6 @@
     },
     "node_modules/reusify": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz",
-      "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -14859,8 +12771,6 @@
     },
     "node_modules/rimraf": {
       "version": "5.0.10",
-      "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-5.0.10.tgz",
-      "integrity": "sha512-l0OE8wL34P4nJH/H2ffoaniAokM2qSmrtXHmlpvYr5AVVX8msAyW0l8NVJFDxlSK4u3Uh/f41cQheDVdnYijwQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -14875,15 +12785,11 @@
     },
     "node_modules/rrweb-cssom": {
       "version": "0.7.1",
-      "resolved": "https://registry.npmjs.org/rrweb-cssom/-/rrweb-cssom-0.7.1.tgz",
-      "integrity": "sha512-TrEMa7JGdVm0UThDJSx7ddw5nVm3UJS9o9CCIZ72B1vSyEZoziDqBYP3XIoi/12lKrJR8rE3jeFHMok2F/Mnsg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/run-parallel": {
       "version": "1.2.0",
-      "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz",
-      "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==",
       "dev": true,
       "funding": [
         {
@@ -14906,8 +12812,6 @@
     },
     "node_modules/sade": {
       "version": "1.8.1",
-      "resolved": "https://registry.npmjs.org/sade/-/sade-1.8.1.tgz",
-      "integrity": "sha512-xal3CZX1Xlo/k4ApwCFrHVACi9fBqJ7V+mwhBsuf/1IOKbBy098Fex+Wa/5QMubw09pSZ/u8EY8PWgevJsXp1A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14919,8 +12823,6 @@
     },
     "node_modules/safe-array-concat": {
       "version": "1.1.3",
-      "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.3.tgz",
-      "integrity": "sha512-AURm5f0jYEOydBj7VQlVvDrjeFgthDdEF5H1dP+6mNpoXOMo1quQqJ4wvJDyRZ9+pO3kGWoOdmV08cSv2aJV6Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14939,8 +12841,6 @@
     },
     "node_modules/safe-push-apply": {
       "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/safe-push-apply/-/safe-push-apply-1.0.0.tgz",
-      "integrity": "sha512-iKE9w/Z7xCzUMIZqdBsp6pEQvwuEebH4vdpjcDWnyzaI6yl6O9FHvVpmGelvEHNsoY6wGblkxR6Zty/h00WiSA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14956,8 +12856,6 @@
     },
     "node_modules/safe-regex-test": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.1.0.tgz",
-      "integrity": "sha512-x/+Cz4YrimQxQccJf5mKEbIa1NzeCRNI5Ecl/ekmlYaampdNLPalVyIcCZNNH3MvmqBugV5TMYZXv0ljslUlaw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14974,16 +12872,12 @@
     },
     "node_modules/safer-buffer": {
       "version": "2.1.2",
-      "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
-      "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==",
       "devOptional": true,
       "inBundle": true,
       "license": "MIT"
     },
     "node_modules/saxes": {
       "version": "6.0.0",
-      "resolved": "https://registry.npmjs.org/saxes/-/saxes-6.0.0.tgz",
-      "integrity": "sha512-xAg7SOnEhrm5zI3puOOKyy1OMcMlIJZYNJY7xLBwSze0UjhPLnWfj2GF2EpT0jmzaJKIWKHLsaSSajf35bcYnA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -14995,8 +12889,6 @@
     },
     "node_modules/schemes": {
       "version": "1.4.0",
-      "resolved": "https://registry.npmjs.org/schemes/-/schemes-1.4.0.tgz",
-      "integrity": "sha512-ImFy9FbCsQlVgnE3TCWmLPCFnVzx0lHL/l+umHplDqAKd0dzFpnS6lFZIpagBlYhKwzVmlV36ec0Y1XTu8JBAQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -15005,8 +12897,6 @@
     },
     "node_modules/semver": {
       "version": "7.7.2",
-      "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz",
-      "integrity": "sha512-RF0Fw+rO5AMf9MAyaRXI4AV0Ulj5lMHqVxxdSgiVbixSCXoEmmX/jk0CuJw4+3SqroYO9VoUh+HcuJivvtJemA==",
       "inBundle": true,
       "license": "ISC",
       "bin": {
@@ -15018,15 +12908,11 @@
     },
     "node_modules/set-blocking": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz",
-      "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/set-function-length": {
       "version": "1.2.2",
-      "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz",
-      "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -15043,8 +12929,6 @@
     },
     "node_modules/set-function-name": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.2.tgz",
-      "integrity": "sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -15059,8 +12943,6 @@
     },
     "node_modules/set-proto": {
       "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/set-proto/-/set-proto-1.0.0.tgz",
-      "integrity": "sha512-RJRdvCo6IAnPdsvP/7m6bsQqNnn1FCBX5ZNtFL98MmFF/4xAIJTIg1YbHW5DC2W5SKZanrC6i4HsJqlajw/dZw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -15074,8 +12956,6 @@
     },
     "node_modules/shebang-command": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz",
-      "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -15087,8 +12967,6 @@
     },
     "node_modules/shebang-regex": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz",
-      "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==",
       "inBundle": true,
       "license": "MIT",
       "engines": {
@@ -15097,8 +12975,6 @@
     },
     "node_modules/side-channel": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz",
-      "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -15117,8 +12993,6 @@
     },
     "node_modules/side-channel-list": {
       "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz",
-      "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -15134,8 +13008,6 @@
     },
     "node_modules/side-channel-map": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz",
-      "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -15153,8 +13025,6 @@
     },
     "node_modules/side-channel-weakmap": {
       "version": "1.0.2",
-      "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz",
-      "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -15173,8 +13043,6 @@
     },
     "node_modules/signal-exit": {
       "version": "4.1.0",
-      "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz",
-      "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==",
       "inBundle": true,
       "license": "ISC",
       "engines": {
@@ -15186,8 +13054,6 @@
     },
     "node_modules/sigstore": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/sigstore/-/sigstore-4.0.0.tgz",
-      "integrity": "sha512-Gw/FgHtrLM9WP8P5lLcSGh9OQcrTruWCELAiS48ik1QbL0cH+dfjomiRTUE9zzz+D1N6rOLkwXUvVmXZAsNE0Q==",
       "inBundle": true,
       "license": "Apache-2.0",
       "dependencies": {
@@ -15204,8 +13070,6 @@
     },
     "node_modules/smart-buffer": {
       "version": "4.2.0",
-      "resolved": "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz",
-      "integrity": "sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg==",
       "inBundle": true,
       "license": "MIT",
       "engines": {
@@ -15215,8 +13079,6 @@
     },
     "node_modules/smtp-address-parser": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/smtp-address-parser/-/smtp-address-parser-1.1.0.tgz",
-      "integrity": "sha512-Gz11jbNU0plrReU9Sj7fmshSBxxJ9ShdD2q4ktHIHo/rpTH6lFyQoYHYKINPJtPe8aHFnsbtW46Ls0tCCBsIZg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -15228,8 +13090,6 @@
     },
     "node_modules/socks": {
       "version": "2.8.6",
-      "resolved": "https://registry.npmjs.org/socks/-/socks-2.8.6.tgz",
-      "integrity": "sha512-pe4Y2yzru68lXCb38aAqRf5gvN8YdjP1lok5o0J7BOHljkyCGKVz7H3vpVIXKD27rj2giOJ7DwVyk/GWrPHDWA==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -15243,8 +13103,6 @@
     },
     "node_modules/socks-proxy-agent": {
       "version": "8.0.5",
-      "resolved": "https://registry.npmjs.org/socks-proxy-agent/-/socks-proxy-agent-8.0.5.tgz",
-      "integrity": "sha512-HehCEsotFqbPW9sJ8WVYB6UbmIMv7kUUORIF2Nncq4VQvBfNBLibW9YZR5dlYCSUhwcD628pRllm7n+E+YTzJw==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -15258,8 +13116,6 @@
     },
     "node_modules/source-map": {
       "version": "0.6.1",
-      "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
-      "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==",
       "dev": true,
       "license": "BSD-3-Clause",
       "engines": {
@@ -15268,8 +13124,6 @@
     },
     "node_modules/source-map-support": {
       "version": "0.5.21",
-      "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz",
-      "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -15279,8 +13133,6 @@
     },
     "node_modules/space-separated-tokens": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-2.0.2.tgz",
-      "integrity": "sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -15290,8 +13142,6 @@
     },
     "node_modules/spawk": {
       "version": "1.8.2",
-      "resolved": "https://registry.npmjs.org/spawk/-/spawk-1.8.2.tgz",
-      "integrity": "sha512-3Dl+ekoMHRvXo+Xc3EUSnjySawnc9SpkaBuA3kU2wYiuSEAIYB4b5cGjvmq5olexBsO/fCLZUKHjSMQlzSU4Ww==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -15300,8 +13150,6 @@
     },
     "node_modules/spawn-wrap": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/spawn-wrap/-/spawn-wrap-2.0.0.tgz",
-      "integrity": "sha512-EeajNjfN9zMnULLwhZZQU3GWBoFNkbngTUPfaawT4RkMiviTxcX0qfhVbGey39mfctfDHkWtuecgQ8NJcyQWHg==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -15318,8 +13166,6 @@
     },
     "node_modules/spawn-wrap/node_modules/brace-expansion": {
       "version": "1.1.12",
-      "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
-      "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -15329,8 +13175,6 @@
     },
     "node_modules/spawn-wrap/node_modules/foreground-child": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-2.0.0.tgz",
-      "integrity": "sha512-dCIq9FpEcyQyXKCkyzmlPTFNgrCzPudOe+mhvJU5zAtlBnGVy2yKxtfsxK2tQBThwq225jcvBjpw1Gr40uzZCA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -15343,9 +13187,6 @@
     },
     "node_modules/spawn-wrap/node_modules/glob": {
       "version": "7.2.3",
-      "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
-      "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
-      "deprecated": "Glob versions prior to v9 are no longer supported",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -15365,8 +13206,6 @@
     },
     "node_modules/spawn-wrap/node_modules/minimatch": {
       "version": "3.1.2",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
-      "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -15378,9 +13217,6 @@
     },
     "node_modules/spawn-wrap/node_modules/rimraf": {
       "version": "3.0.2",
-      "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz",
-      "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==",
-      "deprecated": "Rimraf versions prior to v4 are no longer supported",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -15395,15 +13231,11 @@
     },
     "node_modules/spawn-wrap/node_modules/signal-exit": {
       "version": "3.0.7",
-      "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz",
-      "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/spawn-wrap/node_modules/which": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
-      "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -15418,8 +13250,6 @@
     },
     "node_modules/spdx-correct": {
       "version": "3.2.0",
-      "resolved": "https://registry.npmjs.org/spdx-correct/-/spdx-correct-3.2.0.tgz",
-      "integrity": "sha512-kN9dJbvnySHULIluDHy32WHRUu3Og7B9sbY7tsFLctQkIqnMh3hErYgdMjTYuqmcXX+lK5T1lnUt3G7zNswmZA==",
       "inBundle": true,
       "license": "Apache-2.0",
       "dependencies": {
@@ -15429,8 +13259,6 @@
     },
     "node_modules/spdx-correct/node_modules/spdx-expression-parse": {
       "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz",
-      "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -15440,15 +13268,11 @@
     },
     "node_modules/spdx-exceptions": {
       "version": "2.5.0",
-      "resolved": "https://registry.npmjs.org/spdx-exceptions/-/spdx-exceptions-2.5.0.tgz",
-      "integrity": "sha512-PiU42r+xO4UbUS1buo3LPJkjlO7430Xn5SVAhdpzzsPHsjbYVflnnFdATgabnLude+Cqu25p6N+g2lw/PFsa4w==",
       "inBundle": true,
       "license": "CC-BY-3.0"
     },
     "node_modules/spdx-expression-parse": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-4.0.0.tgz",
-      "integrity": "sha512-Clya5JIij/7C6bRR22+tnGXbc4VKlibKSVj2iHvVeX5iMW7s1SIQlqu699JkODJJIhh/pUu8L0/VLh8xflD+LQ==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -15458,15 +13282,11 @@
     },
     "node_modules/spdx-license-ids": {
       "version": "3.0.21",
-      "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.21.tgz",
-      "integrity": "sha512-Bvg/8F5XephndSK3JffaRqdT+gyhfqIPwDHpX80tJrF8QQRYMo8sNMeaZ2Dp5+jhwKnUmIOyFFQfHRkjJm5nXg==",
       "inBundle": true,
       "license": "CC0-1.0"
     },
     "node_modules/split": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz",
-      "integrity": "sha512-mTyOoPbrivtXnwnIxZRFYRrPNtEFKlpB2fvjSnCQUiAA6qAZzqwna5envK4uk6OIeP17CsdF3rSBGYVBsU0Tkg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -15478,8 +13298,6 @@
     },
     "node_modules/split2": {
       "version": "4.2.0",
-      "resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz",
-      "integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -15488,15 +13306,11 @@
     },
     "node_modules/sprintf-js": {
       "version": "1.1.3",
-      "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.1.3.tgz",
-      "integrity": "sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA==",
       "inBundle": true,
       "license": "BSD-3-Clause"
     },
     "node_modules/ssri": {
       "version": "12.0.0",
-      "resolved": "https://registry.npmjs.org/ssri/-/ssri-12.0.0.tgz",
-      "integrity": "sha512-S7iGNosepx9RadX82oimUkvr0Ct7IjJbEbs4mJcTxst8um95J3sDYU1RBEOvdu6oL1Wek2ODI5i4MAw+dZ6cAQ==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -15508,8 +13322,6 @@
     },
     "node_modules/stack-utils": {
       "version": "2.0.6",
-      "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.6.tgz",
-      "integrity": "sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -15521,8 +13333,6 @@
     },
     "node_modules/stack-utils/node_modules/escape-string-regexp": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz",
-      "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -15531,8 +13341,6 @@
     },
     "node_modules/stop-iteration-iterator": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/stop-iteration-iterator/-/stop-iteration-iterator-1.1.0.tgz",
-      "integrity": "sha512-eLoXW/DHyl62zxY4SCaIgnRhuMr6ri4juEYARS8E6sCEqzKpOiE521Ucofdx+KnDZl5xmvGYaaKCk5FEOxJCoQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -15545,8 +13353,6 @@
     },
     "node_modules/streamx": {
       "version": "2.22.1",
-      "resolved": "https://registry.npmjs.org/streamx/-/streamx-2.22.1.tgz",
-      "integrity": "sha512-znKXEBxfatz2GBNK02kRnCXjV+AA4kjZIUxeWSr3UGirZMJfTE9uiwKHobnbgxWyL/JWro8tTq+vOqAK1/qbSA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -15559,8 +13365,6 @@
     },
     "node_modules/string-width": {
       "version": "4.2.3",
-      "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
-      "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -15575,8 +13379,6 @@
     "node_modules/string-width-cjs": {
       "name": "string-width",
       "version": "4.2.3",
-      "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
-      "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -15590,8 +13392,6 @@
     },
     "node_modules/string.prototype.trim": {
       "version": "1.2.10",
-      "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.10.tgz",
-      "integrity": "sha512-Rs66F0P/1kedk5lyYyH9uBzuiI/kNRmwJAR9quK6VOtIpZ2G+hMZd+HQbbv25MgCA6gEffoMZYxlTod4WcdrKA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -15612,8 +13412,6 @@
     },
     "node_modules/string.prototype.trimend": {
       "version": "1.0.9",
-      "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.9.tgz",
-      "integrity": "sha512-G7Ok5C6E/j4SGfyLCloXTrngQIQU3PWtXGst3yM7Bea9FRURf1S42ZHlZZtsNque2FN2PoUhfZXYLNWwEr4dLQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -15631,8 +13429,6 @@
     },
     "node_modules/string.prototype.trimstart": {
       "version": "1.0.8",
-      "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.8.tgz",
-      "integrity": "sha512-UXSH262CSZY1tfu3G3Secr6uGLCFVPMhIqHjlgCUtCCcgihYc/xKs9djMTMUOb2j1mVSeU8EU6NWc/iQKU6Gfg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -15649,8 +13445,6 @@
     },
     "node_modules/stringify-entities": {
       "version": "4.0.4",
-      "resolved": "https://registry.npmjs.org/stringify-entities/-/stringify-entities-4.0.4.tgz",
-      "integrity": "sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -15664,8 +13458,6 @@
     },
     "node_modules/strip-ansi": {
       "version": "6.0.1",
-      "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
-      "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -15678,8 +13470,6 @@
     "node_modules/strip-ansi-cjs": {
       "name": "strip-ansi",
       "version": "6.0.1",
-      "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
-      "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -15691,8 +13481,6 @@
     },
     "node_modules/strip-bom": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz",
-      "integrity": "sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -15701,8 +13489,6 @@
     },
     "node_modules/strip-indent": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-3.0.0.tgz",
-      "integrity": "sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -15714,8 +13500,6 @@
     },
     "node_modules/strip-json-comments": {
       "version": "3.1.1",
-      "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz",
-      "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -15727,8 +13511,6 @@
     },
     "node_modules/supports-color": {
       "version": "10.0.0",
-      "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-10.0.0.tgz",
-      "integrity": "sha512-HRVVSbCCMbj7/kdWF9Q+bbckjBHLtHMEoJWlkmYzzdwhYMkjkOwubLM6t7NbWKjgKamGDrWL1++KrjUO1t9oAQ==",
       "inBundle": true,
       "license": "MIT",
       "engines": {
@@ -15740,8 +13522,6 @@
     },
     "node_modules/supports-preserve-symlinks-flag": {
       "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz",
-      "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -15753,15 +13533,11 @@
     },
     "node_modules/symbol-tree": {
       "version": "3.2.4",
-      "resolved": "https://registry.npmjs.org/symbol-tree/-/symbol-tree-3.2.4.tgz",
-      "integrity": "sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/tap": {
       "version": "16.3.10",
-      "resolved": "https://registry.npmjs.org/tap/-/tap-16.3.10.tgz",
-      "integrity": "sha512-q5Am+PpGHS6JSjk/Zn4bCRBihmZVM15v/MYXUy60wenw5HDe7pVrevLCEoMEz7tuw6jaPOJJqni1y8apN23IGw==",
       "bundleDependencies": [
         "ink",
         "treport",
@@ -15831,8 +13607,6 @@
     },
     "node_modules/tap-mocha-reporter": {
       "version": "5.0.4",
-      "resolved": "https://registry.npmjs.org/tap-mocha-reporter/-/tap-mocha-reporter-5.0.4.tgz",
-      "integrity": "sha512-J+YMO8B7lq1O6Zxd/jeuG27vJ+Y4tLiRMKPSb7KR6FVh86k3Rq1TwYc2GKPyIjCbzzdMdReh3Vfz9L5cg1Z2Bw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -15854,8 +13628,6 @@
     },
     "node_modules/tap-mocha-reporter/node_modules/brace-expansion": {
       "version": "1.1.12",
-      "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
-      "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -15865,8 +13637,6 @@
     },
     "node_modules/tap-mocha-reporter/node_modules/diff": {
       "version": "4.0.2",
-      "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz",
-      "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==",
       "dev": true,
       "license": "BSD-3-Clause",
       "engines": {
@@ -15875,8 +13645,6 @@
     },
     "node_modules/tap-mocha-reporter/node_modules/escape-string-regexp": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz",
-      "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -15885,9 +13653,6 @@
     },
     "node_modules/tap-mocha-reporter/node_modules/glob": {
       "version": "7.2.3",
-      "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
-      "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
-      "deprecated": "Glob versions prior to v9 are no longer supported",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -15907,8 +13672,6 @@
     },
     "node_modules/tap-mocha-reporter/node_modules/minimatch": {
       "version": "3.1.2",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
-      "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -15920,8 +13683,6 @@
     },
     "node_modules/tap-parser": {
       "version": "11.0.2",
-      "resolved": "https://registry.npmjs.org/tap-parser/-/tap-parser-11.0.2.tgz",
-      "integrity": "sha512-6qGlC956rcORw+fg7Fv1iCRAY8/bU9UabUAhs3mXRH6eRmVZcNPLheSXCYaVaYeSwx5xa/1HXZb1537YSvwDZg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -15938,8 +13699,6 @@
     },
     "node_modules/tap-parser/node_modules/minipass": {
       "version": "3.3.6",
-      "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz",
-      "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -15951,8 +13710,6 @@
     },
     "node_modules/tap-yaml": {
       "version": "1.0.2",
-      "resolved": "https://registry.npmjs.org/tap-yaml/-/tap-yaml-1.0.2.tgz",
-      "integrity": "sha512-GegASpuqBnRNdT1U+yuUPZ8rEU64pL35WPBpCISWwff4dErS2/438barz7WFJl4Nzh3Y05tfPidZnH+GaV1wMg==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -15961,8 +13718,6 @@
     },
     "node_modules/tap-yaml/node_modules/yaml": {
       "version": "1.10.2",
-      "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz",
-      "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -16737,8 +14492,6 @@
     },
     "node_modules/tap/node_modules/cliui": {
       "version": "7.0.4",
-      "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz",
-      "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -16749,8 +14502,6 @@
     },
     "node_modules/tap/node_modules/cliui/node_modules/ansi-styles": {
       "version": "4.3.0",
-      "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
-      "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -16765,8 +14516,6 @@
     },
     "node_modules/tap/node_modules/cliui/node_modules/color-convert": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
-      "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -16778,15 +14527,11 @@
     },
     "node_modules/tap/node_modules/cliui/node_modules/color-name": {
       "version": "1.1.4",
-      "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
-      "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/tap/node_modules/cliui/node_modules/wrap-ansi": {
       "version": "7.0.0",
-      "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
-      "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -16959,8 +14704,6 @@
     },
     "node_modules/tap/node_modules/foreground-child": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-2.0.0.tgz",
-      "integrity": "sha512-dCIq9FpEcyQyXKCkyzmlPTFNgrCzPudOe+mhvJU5zAtlBnGVy2yKxtfsxK2tQBThwq225jcvBjpw1Gr40uzZCA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -17185,8 +14928,6 @@
     },
     "node_modules/tap/node_modules/jackspeak": {
       "version": "1.4.2",
-      "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-1.4.2.tgz",
-      "integrity": "sha512-GHeGTmnuaHnvS+ZctRB01bfxARuu9wW83ENbuiweu07SFcVlZrJpcshSre/keGT7YGBhLHg/+rXCNSrsEHKU4Q==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -17855,8 +15596,6 @@
     },
     "node_modules/tap/node_modules/which": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
-      "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -17984,8 +15723,6 @@
     },
     "node_modules/tar": {
       "version": "6.2.1",
-      "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz",
-      "integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -18002,8 +15739,6 @@
     },
     "node_modules/tar-stream": {
       "version": "3.1.7",
-      "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-3.1.7.tgz",
-      "integrity": "sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -18014,8 +15749,6 @@
     },
     "node_modules/tar/node_modules/fs-minipass": {
       "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz",
-      "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -18027,8 +15760,6 @@
     },
     "node_modules/tar/node_modules/fs-minipass/node_modules/minipass": {
       "version": "3.3.6",
-      "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz",
-      "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -18040,8 +15771,6 @@
     },
     "node_modules/tar/node_modules/minipass": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz",
-      "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==",
       "inBundle": true,
       "license": "ISC",
       "engines": {
@@ -18050,8 +15779,6 @@
     },
     "node_modules/tcompare": {
       "version": "5.0.7",
-      "resolved": "https://registry.npmjs.org/tcompare/-/tcompare-5.0.7.tgz",
-      "integrity": "sha512-d9iddt6YYGgyxJw5bjsN7UJUO1kGOtjSlNy/4PoGYAjQS5pAT/hzIoLf1bZCw+uUxRmZJh7Yy1aA7xKVRT9B4w==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -18063,8 +15790,6 @@
     },
     "node_modules/tcompare/node_modules/diff": {
       "version": "4.0.2",
-      "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz",
-      "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==",
       "dev": true,
       "license": "BSD-3-Clause",
       "engines": {
@@ -18073,8 +15798,6 @@
     },
     "node_modules/test-exclude": {
       "version": "6.0.0",
-      "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz",
-      "integrity": "sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -18088,8 +15811,6 @@
     },
     "node_modules/test-exclude/node_modules/brace-expansion": {
       "version": "1.1.12",
-      "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
-      "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -18099,9 +15820,6 @@
     },
     "node_modules/test-exclude/node_modules/glob": {
       "version": "7.2.3",
-      "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
-      "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
-      "deprecated": "Glob versions prior to v9 are no longer supported",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -18121,8 +15839,6 @@
     },
     "node_modules/test-exclude/node_modules/minimatch": {
       "version": "3.1.2",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
-      "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -18134,8 +15850,6 @@
     },
     "node_modules/text-decoder": {
       "version": "1.2.3",
-      "resolved": "https://registry.npmjs.org/text-decoder/-/text-decoder-1.2.3.tgz",
-      "integrity": "sha512-3/o9z3X0X0fTupwsYvR03pJ/DjWuqqrfwBgTQzdWDiQSm9KitAyz/9WqsT2JQW7KV2m+bC2ol/zqpW37NHxLaA==",
       "dev": true,
       "license": "Apache-2.0",
       "dependencies": {
@@ -18144,8 +15858,6 @@
     },
     "node_modules/text-extensions": {
       "version": "2.4.0",
-      "resolved": "https://registry.npmjs.org/text-extensions/-/text-extensions-2.4.0.tgz",
-      "integrity": "sha512-te/NtwBwfiNRLf9Ijqx3T0nlqZiQ2XrrtBvu+cLL8ZRrGkO0NHTug8MYFKyoSrv/sHTaSKfilUkizV6XhxMJ3g==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -18157,36 +15869,26 @@
     },
     "node_modules/text-table": {
       "version": "0.2.0",
-      "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz",
-      "integrity": "sha512-N+8UisAXDGk8PFXP4HAzVR9nbfmVJ3zYLAWiTIoqC5v5isinhr+r5uaO8+7r3BMfuNIufIsA7RdpVgacC2cSpw==",
       "inBundle": true,
       "license": "MIT"
     },
     "node_modules/through": {
       "version": "2.3.8",
-      "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz",
-      "integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/tiny-relative-date": {
       "version": "1.3.0",
-      "resolved": "https://registry.npmjs.org/tiny-relative-date/-/tiny-relative-date-1.3.0.tgz",
-      "integrity": "sha512-MOQHpzllWxDCHHaDno30hhLfbouoYlOI8YlMNtvKe1zXbjEVhbcEovQxvZrPvtiYW630GQDoMMarCnjfyfHA+A==",
       "inBundle": true,
       "license": "MIT"
     },
     "node_modules/tinyexec": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-1.0.1.tgz",
-      "integrity": "sha512-5uC6DDlmeqiOwCPmK9jMSdOuZTh8bU39Ys6yidB+UTt5hfZUPGAypSgFRiEp+jbi9qH40BLDvy85jIU88wKSqw==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/tinyglobby": {
       "version": "0.2.14",
-      "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.14.tgz",
-      "integrity": "sha512-tX5e7OM1HnYr2+a2C/4V0htOcSQcoSTH9KgJnVvNm5zm/cyEWKJ7j7YutsH9CxMdtOkkLFy2AHrMci9IM8IPZQ==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -18202,8 +15904,6 @@
     },
     "node_modules/tinyglobby/node_modules/fdir": {
       "version": "6.4.6",
-      "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.6.tgz",
-      "integrity": "sha512-hiFoqpyZcfNm1yc4u8oWCf9A2c4D3QjCrks3zmoVKVxpQRzmPNar1hUJcBG2RQHvEVGDN+Jm81ZheVLAQMK6+w==",
       "inBundle": true,
       "license": "MIT",
       "peerDependencies": {
@@ -18217,8 +15917,6 @@
     },
     "node_modules/tinyglobby/node_modules/picomatch": {
       "version": "4.0.3",
-      "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz",
-      "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==",
       "inBundle": true,
       "license": "MIT",
       "peer": true,
@@ -18231,8 +15929,6 @@
     },
     "node_modules/to-regex-range": {
       "version": "5.0.1",
-      "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
-      "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -18244,15 +15940,11 @@
     },
     "node_modules/tr46": {
       "version": "0.0.3",
-      "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz",
-      "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/treeverse": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/treeverse/-/treeverse-3.0.0.tgz",
-      "integrity": "sha512-gcANaAnd2QDZFmHFEOF4k7uc1J/6a6z3DJMd/QwEyxLoKGiptJRwid582r7QIsFlFMIZ3SnxfS52S4hm2DHkuQ==",
       "inBundle": true,
       "license": "ISC",
       "engines": {
@@ -18261,8 +15953,6 @@
     },
     "node_modules/trim-lines": {
       "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/trim-lines/-/trim-lines-3.0.1.tgz",
-      "integrity": "sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -18272,8 +15962,6 @@
     },
     "node_modules/trim-newlines": {
       "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-3.0.1.tgz",
-      "integrity": "sha512-c1PTsA3tYrIsLGkJkzHF+w9F2EyxfXGo4UyJc4pFL++FMjnq0HJS69T3M7d//gKrFKwy429bouPescbjecU+Zw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -18282,8 +15970,6 @@
     },
     "node_modules/trivial-deferred": {
       "version": "1.1.2",
-      "resolved": "https://registry.npmjs.org/trivial-deferred/-/trivial-deferred-1.1.2.tgz",
-      "integrity": "sha512-vDPiDBC3hyP6O4JrJYMImW3nl3c03Tsj9fEXc7Qc/XKa1O7gf5ZtFfIR/E0dun9SnDHdwjna1Z2rSzYgqpxh/g==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -18292,8 +15978,6 @@
     },
     "node_modules/trough": {
       "version": "2.2.0",
-      "resolved": "https://registry.npmjs.org/trough/-/trough-2.2.0.tgz",
-      "integrity": "sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -18303,8 +15987,6 @@
     },
     "node_modules/tsconfig-paths": {
       "version": "3.15.0",
-      "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.15.0.tgz",
-      "integrity": "sha512-2Ac2RgzDe/cn48GvOe3M+o82pEFewD3UPbyoUHHdKasHwJKjds4fLXWf/Ux5kATBKN20oaFGu+jbElp1pos0mg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -18316,8 +15998,6 @@
     },
     "node_modules/tsconfig-paths/node_modules/json5": {
       "version": "1.0.2",
-      "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.2.tgz",
-      "integrity": "sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -18329,8 +16009,6 @@
     },
     "node_modules/tsconfig-paths/node_modules/strip-bom": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz",
-      "integrity": "sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -18339,8 +16017,6 @@
     },
     "node_modules/tuf-js": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-4.0.0.tgz",
-      "integrity": "sha512-Lq7ieeGvXDXwpoSmOSgLWVdsGGV9J4a77oDTAPe/Ltrqnnm/ETaRlBAQTH5JatEh8KXuE6sddf9qAv1Q2282Hg==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -18354,8 +16030,6 @@
     },
     "node_modules/tuf-js/node_modules/@tufjs/models": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/@tufjs/models/-/models-4.0.0.tgz",
-      "integrity": "sha512-h5x5ga/hh82COe+GoD4+gKUeV4T3iaYOxqLt41GRKApinPI7DMidhCmNVTjKfhCWFJIGXaFJee07XczdT4jdZQ==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -18368,8 +16042,6 @@
     },
     "node_modules/tunnel": {
       "version": "0.0.6",
-      "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz",
-      "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -18378,8 +16050,6 @@
     },
     "node_modules/type-check": {
       "version": "0.4.0",
-      "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz",
-      "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -18391,8 +16061,6 @@
     },
     "node_modules/type-fest": {
       "version": "0.20.2",
-      "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz",
-      "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==",
       "dev": true,
       "license": "(MIT OR CC0-1.0)",
       "engines": {
@@ -18404,8 +16072,6 @@
     },
     "node_modules/typed-array-buffer": {
       "version": "1.0.3",
-      "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.3.tgz",
-      "integrity": "sha512-nAYYwfY3qnzX30IkA6AQZjVbtK6duGontcQm1WSG1MD94YLqK0515GNApXkoxKOWMusVssAHWLh9SeaoefYFGw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -18419,8 +16085,6 @@
     },
     "node_modules/typed-array-byte-length": {
       "version": "1.0.3",
-      "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.3.tgz",
-      "integrity": "sha512-BaXgOuIxz8n8pIq3e7Atg/7s+DpiYrxn4vdot3w9KbnBhcRQq6o3xemQdIfynqSeXeDrF32x+WvfzmOjPiY9lg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -18439,8 +16103,6 @@
     },
     "node_modules/typed-array-byte-offset": {
       "version": "1.0.4",
-      "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.4.tgz",
-      "integrity": "sha512-bTlAFB/FBYMcuX81gbL4OcpH5PmlFHqlCCpAl8AlEzMz5k53oNDvN8p1PNOWLEmI2x4orp3raOFB51tv9X+MFQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -18461,8 +16123,6 @@
     },
     "node_modules/typed-array-length": {
       "version": "1.0.7",
-      "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.7.tgz",
-      "integrity": "sha512-3KS2b+kL7fsuk/eJZ7EQdnEmQoaho/r6KUef7hxvltNA5DR8NAUM+8wJMbJyZ4G9/7i3v5zPBIMN5aybAh2/Jg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -18482,8 +16142,6 @@
     },
     "node_modules/typedarray-to-buffer": {
       "version": "3.1.5",
-      "resolved": "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz",
-      "integrity": "sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -18492,8 +16150,6 @@
     },
     "node_modules/typescript": {
       "version": "5.8.3",
-      "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.8.3.tgz",
-      "integrity": "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ==",
       "dev": true,
       "license": "Apache-2.0",
       "peer": true,
@@ -18507,8 +16163,6 @@
     },
     "node_modules/uglify-js": {
       "version": "3.19.3",
-      "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.19.3.tgz",
-      "integrity": "sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ==",
       "dev": true,
       "license": "BSD-2-Clause",
       "optional": true,
@@ -18521,8 +16175,6 @@
     },
     "node_modules/unbox-primitive": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.1.0.tgz",
-      "integrity": "sha512-nWJ91DjeOkej/TA8pXQ3myruKpKEYgqvpw9lz4OPHj/NWFNluYrjbz9j01CJ8yKQd2g4jFoOkINCTW2I5LEEyw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -18540,8 +16192,6 @@
     },
     "node_modules/undici": {
       "version": "6.21.3",
-      "resolved": "https://registry.npmjs.org/undici/-/undici-6.21.3.tgz",
-      "integrity": "sha512-gBLkYIlEnSp8pFbT64yFgGE6UIB9tAkhukC23PmMDCe5Nd+cRqKxSjw5y54MK2AZMgZfJWMaNE4nYUHgi1XEOw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -18550,15 +16200,11 @@
     },
     "node_modules/undici-types": {
       "version": "7.8.0",
-      "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.8.0.tgz",
-      "integrity": "sha512-9UJ2xGDvQ43tYyVMpuHlsgApydB8ZKfVYTsLDhXkFL/6gfkp+U8xTGdh8pMJv1SpZna0zxG1DwsKZsreLbXBxw==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/unicode-length": {
       "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/unicode-length/-/unicode-length-2.1.0.tgz",
-      "integrity": "sha512-4bV582zTV9Q02RXBxSUMiuN/KHo5w4aTojuKTNT96DIKps/SIawFp7cS5Mu25VuY1AioGXrmYyzKZUzh8OqoUw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -18567,8 +16213,6 @@
     },
     "node_modules/unicorn-magic": {
       "version": "0.1.0",
-      "resolved": "https://registry.npmjs.org/unicorn-magic/-/unicorn-magic-0.1.0.tgz",
-      "integrity": "sha512-lRfVq8fE8gz6QMBuDM6a+LO3IAzTi05H6gCVaUpir2E1Rwpo4ZUog45KpNXKC/Mn3Yb9UDuHumeFTo9iV/D9FQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -18580,8 +16224,6 @@
     },
     "node_modules/unified": {
       "version": "10.1.2",
-      "resolved": "https://registry.npmjs.org/unified/-/unified-10.1.2.tgz",
-      "integrity": "sha512-pUSWAi/RAnVy1Pif2kAoeWNBa3JVrx0MId2LASj8G+7AiHWoKZNTomq6LG326T68U7/e263X6fTdcXIy7XnF7Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -18600,8 +16242,6 @@
     },
     "node_modules/unique-filename": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-4.0.0.tgz",
-      "integrity": "sha512-XSnEewXmQ+veP7xX2dS5Q4yZAvO40cBN2MWkJ7D/6sW4Dg6wYBNwM1Vrnz1FhH5AdeLIlUXRI9e28z1YZi71NQ==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -18613,8 +16253,6 @@
     },
     "node_modules/unique-slug": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/unique-slug/-/unique-slug-5.0.0.tgz",
-      "integrity": "sha512-9OdaqO5kwqR+1kVgHAhsp5vPNU0hnxRa26rBFNfNgM7M6pNtgzeBn3s/xbyCQL3dcjzOatcef6UUHpB/6MaETg==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -18626,8 +16264,6 @@
     },
     "node_modules/unist-util-generated": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/unist-util-generated/-/unist-util-generated-2.0.1.tgz",
-      "integrity": "sha512-qF72kLmPxAw0oN2fwpWIqbXAVyEqUzDHMsbtPvOudIlUzXYFIeQIuxXQCRCFh22B7cixvU0MG7m3MW8FTq/S+A==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -18637,8 +16273,6 @@
     },
     "node_modules/unist-util-is": {
       "version": "5.2.1",
-      "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-5.2.1.tgz",
-      "integrity": "sha512-u9njyyfEh43npf1M+yGKDGVPbY/JWEemg5nH05ncKPfi+kBbKBJoTdsogMu33uhytuLlv9y0O7GH7fEdwLdLQw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -18651,8 +16285,6 @@
     },
     "node_modules/unist-util-stringify-position": {
       "version": "3.0.3",
-      "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-3.0.3.tgz",
-      "integrity": "sha512-k5GzIBZ/QatR8N5X2y+drfpWG8IDBzdnVj6OInRNWm1oXrzydiaAT2OQiA8DPRRZyAKb9b6I2a6PxYklZD0gKg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -18665,8 +16297,6 @@
     },
     "node_modules/unist-util-visit": {
       "version": "2.0.3",
-      "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-2.0.3.tgz",
-      "integrity": "sha512-iJ4/RczbJMkD0712mGktuGpm/U4By4FfDonL7N/9tATGIF4imikjOuagyMY53tnZq3NP6BcmlrHhEKAfGWjh7Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -18681,8 +16311,6 @@
     },
     "node_modules/unist-util-visit-parents": {
       "version": "3.1.1",
-      "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-3.1.1.tgz",
-      "integrity": "sha512-1KROIZWo6bcMrZEwiH2UrXDyalAa0uqzWCxCJj6lPOvTve2WkfgCytoDTPaMnodXh1WrXOq0haVYHj99ynJlsg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -18696,8 +16324,6 @@
     },
     "node_modules/unist-util-visit-parents/node_modules/unist-util-is": {
       "version": "4.1.0",
-      "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-4.1.0.tgz",
-      "integrity": "sha512-ZOQSsnce92GrxSqlnEEseX0gi7GH9zTJZ0p9dtu87WRb/37mMPO2Ilx1s/t9vBHrFhbgweUwb+t7cIn5dxPhZg==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -18707,8 +16333,6 @@
     },
     "node_modules/unist-util-visit/node_modules/unist-util-is": {
       "version": "4.1.0",
-      "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-4.1.0.tgz",
-      "integrity": "sha512-ZOQSsnce92GrxSqlnEEseX0gi7GH9zTJZ0p9dtu87WRb/37mMPO2Ilx1s/t9vBHrFhbgweUwb+t7cIn5dxPhZg==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -18718,15 +16342,11 @@
     },
     "node_modules/universal-user-agent": {
       "version": "6.0.1",
-      "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz",
-      "integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/universalify": {
       "version": "0.2.0",
-      "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz",
-      "integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -18735,8 +16355,6 @@
     },
     "node_modules/update-browserslist-db": {
       "version": "1.1.3",
-      "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.3.tgz",
-      "integrity": "sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw==",
       "dev": true,
       "funding": [
         {
@@ -18766,8 +16384,6 @@
     },
     "node_modules/uri-js": {
       "version": "4.4.1",
-      "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz",
-      "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==",
       "dev": true,
       "license": "BSD-2-Clause",
       "dependencies": {
@@ -18776,8 +16392,6 @@
     },
     "node_modules/url-parse": {
       "version": "1.5.10",
-      "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz",
-      "integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -18787,14 +16401,10 @@
     },
     "node_modules/util-deprecate": {
       "version": "1.0.2",
-      "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
-      "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==",
       "license": "MIT"
     },
     "node_modules/uuid": {
       "version": "8.3.2",
-      "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
-      "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==",
       "dev": true,
       "license": "MIT",
       "bin": {
@@ -18803,8 +16413,6 @@
     },
     "node_modules/uvu": {
       "version": "0.5.6",
-      "resolved": "https://registry.npmjs.org/uvu/-/uvu-0.5.6.tgz",
-      "integrity": "sha512-+g8ENReyr8YsOc6fv/NVJs2vFdHBnBNdfE49rshrTzDWOlUx4Gq7KOS2GD8eqhy2j+Ejq29+SbKH8yjkAqXqoA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -18822,8 +16430,6 @@
     },
     "node_modules/uvu/node_modules/diff": {
       "version": "5.2.0",
-      "resolved": "https://registry.npmjs.org/diff/-/diff-5.2.0.tgz",
-      "integrity": "sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A==",
       "dev": true,
       "license": "BSD-3-Clause",
       "engines": {
@@ -18832,8 +16438,6 @@
     },
     "node_modules/validate-npm-package-license": {
       "version": "3.0.4",
-      "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz",
-      "integrity": "sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew==",
       "inBundle": true,
       "license": "Apache-2.0",
       "dependencies": {
@@ -18843,8 +16447,6 @@
     },
     "node_modules/validate-npm-package-license/node_modules/spdx-expression-parse": {
       "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz",
-      "integrity": "sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -18854,8 +16456,6 @@
     },
     "node_modules/validate-npm-package-name": {
       "version": "6.0.2",
-      "resolved": "https://registry.npmjs.org/validate-npm-package-name/-/validate-npm-package-name-6.0.2.tgz",
-      "integrity": "sha512-IUoow1YUtvoBBC06dXs8bR8B9vuA3aJfmQNKMoaPG/OFsPmoQvw8xh+6Ye25Gx9DQhoEom3Pcu9MKHerm/NpUQ==",
       "inBundle": true,
       "license": "ISC",
       "engines": {
@@ -18864,8 +16464,6 @@
     },
     "node_modules/vfile": {
       "version": "5.3.7",
-      "resolved": "https://registry.npmjs.org/vfile/-/vfile-5.3.7.tgz",
-      "integrity": "sha512-r7qlzkgErKjobAmyNIkkSpizsFPYiUPuJb5pNW1RB4JcYVZhs4lIbVqk8XPk033CV/1z8ss5pkax8SuhGpcG8g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -18881,8 +16479,6 @@
     },
     "node_modules/vfile-location": {
       "version": "4.1.0",
-      "resolved": "https://registry.npmjs.org/vfile-location/-/vfile-location-4.1.0.tgz",
-      "integrity": "sha512-YF23YMyASIIJXpktBa4vIGLJ5Gs88UB/XePgqPmTa7cDA+JeO3yclbpheQYCHjVHBn/yePzrXuygIL+xbvRYHw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -18896,8 +16492,6 @@
     },
     "node_modules/vfile-message": {
       "version": "3.1.4",
-      "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-3.1.4.tgz",
-      "integrity": "sha512-fa0Z6P8HUrQN4BZaX05SIVXic+7kE3b05PWAtPuYP9QLHsLKYR7/AlLW3NtOrpXRLeawpDLMsVkmk5DG0NXgWw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -18911,8 +16505,6 @@
     },
     "node_modules/w3c-xmlserializer": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/w3c-xmlserializer/-/w3c-xmlserializer-5.0.0.tgz",
-      "integrity": "sha512-o8qghlI8NZHU1lLPrpi2+Uq7abh4GGPpYANlalzWxyWteJOCsr/P+oPBA49TOLu5FTZO4d3F9MnWJfiMo4BkmA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -18924,8 +16516,6 @@
     },
     "node_modules/walk-up-path": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/walk-up-path/-/walk-up-path-4.0.0.tgz",
-      "integrity": "sha512-3hu+tD8YzSLGuFYtPRb48vdhKMi0KQV5sn+uWr8+7dMEq/2G/dtLrdDinkLjqq5TIbIBjYJ4Ax/n3YiaW7QM8A==",
       "license": "ISC",
       "engines": {
         "node": "20 || >=22"
@@ -18933,8 +16523,6 @@
     },
     "node_modules/web-namespaces": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/web-namespaces/-/web-namespaces-2.0.1.tgz",
-      "integrity": "sha512-bKr1DkiNa2krS7qxNtdrtHAmzuYGFQLiQ13TsorsdT6ULTkPLKuu5+GsFpDlg6JFjUTwX2DyhMPG2be8uPrqsQ==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -18944,15 +16532,11 @@
     },
     "node_modules/webidl-conversions": {
       "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
-      "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==",
       "dev": true,
       "license": "BSD-2-Clause"
     },
     "node_modules/whatwg-encoding": {
       "version": "3.1.1",
-      "resolved": "https://registry.npmjs.org/whatwg-encoding/-/whatwg-encoding-3.1.1.tgz",
-      "integrity": "sha512-6qN4hJdMwfYBtE3YBTTHhoeuUrDBPZmbQaxWAqSALV/MeEnR5z1xd8UKud2RAkFoPkmB+hli1TZSnyi84xz1vQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -18964,8 +16548,6 @@
     },
     "node_modules/whatwg-mimetype": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-4.0.0.tgz",
-      "integrity": "sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -18974,8 +16556,6 @@
     },
     "node_modules/whatwg-url": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz",
-      "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -18985,8 +16565,6 @@
     },
     "node_modules/which": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/which/-/which-5.0.0.tgz",
-      "integrity": "sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -19001,8 +16579,6 @@
     },
     "node_modules/which-boxed-primitive": {
       "version": "1.1.1",
-      "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.1.1.tgz",
-      "integrity": "sha512-TbX3mj8n0odCBFVlY8AxkqcHASw3L60jIuF8jFP78az3C2YhmGvqbHBpAjTRH2/xqYunrJ9g1jSyjCjpoWzIAA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -19021,8 +16597,6 @@
     },
     "node_modules/which-builtin-type": {
       "version": "1.2.1",
-      "resolved": "https://registry.npmjs.org/which-builtin-type/-/which-builtin-type-1.2.1.tgz",
-      "integrity": "sha512-6iBczoX+kDQ7a3+YJBnh3T+KZRxM/iYNPXicqk66/Qfm1b93iu+yOImkg0zHbj5LNOcNv1TEADiZ0xa34B4q6Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -19049,8 +16623,6 @@
     },
     "node_modules/which-collection": {
       "version": "1.0.2",
-      "resolved": "https://registry.npmjs.org/which-collection/-/which-collection-1.0.2.tgz",
-      "integrity": "sha512-K4jVyjnBdgvc86Y6BkaLZEN933SwYOuBFkdmBu9ZfkcAbdVbpITnDmjvZ/aQjRXQrv5EPkTnD1s39GiiqbngCw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -19068,15 +16640,11 @@
     },
     "node_modules/which-module": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.1.tgz",
-      "integrity": "sha512-iBdZ57RDvnOR9AGBhML2vFZf7h8vmBjhoaZqODJBFWHVtKkDmKuHai3cx5PgVMrX5YDNp27AofYbAwctSS+vhQ==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/which-typed-array": {
       "version": "1.1.19",
-      "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.19.tgz",
-      "integrity": "sha512-rEvr90Bck4WZt9HHFC4DJMsjvu7x+r6bImz0/BrbWb7A2djJ8hnZMrWnHo9F8ssv0OMErasDhftrfROTyqSDrw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -19097,8 +16665,6 @@
     },
     "node_modules/which/node_modules/isexe": {
       "version": "3.1.1",
-      "resolved": "https://registry.npmjs.org/isexe/-/isexe-3.1.1.tgz",
-      "integrity": "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==",
       "inBundle": true,
       "license": "ISC",
       "engines": {
@@ -19107,8 +16673,6 @@
     },
     "node_modules/word-wrap": {
       "version": "1.2.5",
-      "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz",
-      "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -19117,15 +16681,11 @@
     },
     "node_modules/wordwrap": {
       "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz",
-      "integrity": "sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/wrap-ansi": {
       "version": "8.1.0",
-      "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz",
-      "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -19143,8 +16703,6 @@
     "node_modules/wrap-ansi-cjs": {
       "name": "wrap-ansi",
       "version": "7.0.0",
-      "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
-      "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -19161,8 +16719,6 @@
     },
     "node_modules/wrap-ansi-cjs/node_modules/ansi-styles": {
       "version": "4.3.0",
-      "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
-      "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -19177,8 +16733,6 @@
     },
     "node_modules/wrap-ansi/node_modules/ansi-regex": {
       "version": "6.1.0",
-      "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz",
-      "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==",
       "inBundle": true,
       "license": "MIT",
       "engines": {
@@ -19190,15 +16744,11 @@
     },
     "node_modules/wrap-ansi/node_modules/emoji-regex": {
       "version": "9.2.2",
-      "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz",
-      "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==",
       "inBundle": true,
       "license": "MIT"
     },
     "node_modules/wrap-ansi/node_modules/string-width": {
       "version": "5.1.2",
-      "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz",
-      "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -19215,8 +16765,6 @@
     },
     "node_modules/wrap-ansi/node_modules/strip-ansi": {
       "version": "7.1.0",
-      "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz",
-      "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -19231,15 +16779,11 @@
     },
     "node_modules/wrappy": {
       "version": "1.0.2",
-      "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
-      "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/write-file-atomic": {
       "version": "6.0.0",
-      "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-6.0.0.tgz",
-      "integrity": "sha512-GmqrO8WJ1NuzJ2DrziEI2o57jKAVIQNf8a18W3nCYU3H7PNWqCCVTeH6/NQE93CIllIgQS98rrmVkYgTX9fFJQ==",
       "license": "ISC",
       "dependencies": {
         "imurmurhash": "^0.1.4",
@@ -19251,8 +16795,6 @@
     },
     "node_modules/ws": {
       "version": "8.18.3",
-      "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.3.tgz",
-      "integrity": "sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -19273,8 +16815,6 @@
     },
     "node_modules/xml-name-validator": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/xml-name-validator/-/xml-name-validator-5.0.0.tgz",
-      "integrity": "sha512-EvGK8EJ3DhaHfbRlETOWAS5pO9MZITeauHKJyb8wyajUfQUenkIg2MvLDTZ4T/TgIcm3HU0TFBgWWboAZ30UHg==",
       "dev": true,
       "license": "Apache-2.0",
       "engines": {
@@ -19283,15 +16823,11 @@
     },
     "node_modules/xmlchars": {
       "version": "2.2.0",
-      "resolved": "https://registry.npmjs.org/xmlchars/-/xmlchars-2.2.0.tgz",
-      "integrity": "sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/xpath": {
       "version": "0.0.34",
-      "resolved": "https://registry.npmjs.org/xpath/-/xpath-0.0.34.tgz",
-      "integrity": "sha512-FxF6+rkr1rNSQrhUNYrAFJpRXNzlDoMxeXN5qI84939ylEv3qqPFKa85Oxr6tDaJKqwW6KKyo2v26TSv3k6LeA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -19300,8 +16836,6 @@
     },
     "node_modules/y18n": {
       "version": "5.0.8",
-      "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz",
-      "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -19310,15 +16844,11 @@
     },
     "node_modules/yallist": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
-      "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==",
       "inBundle": true,
       "license": "ISC"
     },
     "node_modules/yaml": {
       "version": "2.8.0",
-      "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.0.tgz",
-      "integrity": "sha512-4lLa/EcQCB0cJkyts+FpIRx5G/llPxfP6VQU5KByHEhLxY3IJCH0f0Hy1MHI8sClTvsIb8qwRJ6R/ZdlDJ/leQ==",
       "dev": true,
       "license": "ISC",
       "bin": {
@@ -19330,8 +16860,6 @@
     },
     "node_modules/yargs": {
       "version": "17.7.2",
-      "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz",
-      "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -19349,8 +16877,6 @@
     },
     "node_modules/yargs-parser": {
       "version": "21.1.1",
-      "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz",
-      "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -19359,8 +16885,6 @@
     },
     "node_modules/yocto-queue": {
       "version": "1.2.1",
-      "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-1.2.1.tgz",
-      "integrity": "sha512-AyeEbWOu/TAXdxlV9wmGcR0+yh2j3vYPGOECcIj2S7MkrLyC7ne+oye2BKTItt0ii2PHk4cDy+95+LshzbXnGg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -19372,8 +16896,6 @@
     },
     "node_modules/zwitch": {
       "version": "2.0.4",
-      "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.4.tgz",
-      "integrity": "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -19401,8 +16923,6 @@
     },
     "smoke-tests/node_modules/glob": {
       "version": "11.0.3",
-      "resolved": "https://registry.npmjs.org/glob/-/glob-11.0.3.tgz",
-      "integrity": "sha512-2Nim7dha1KVkaiF4q6Dj+ngPPMdfvLJEOpZk/jKiUAkqKebpGAWQXAq9z1xu9HKu5lWfqw/FASuccEjyznjPaA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -19425,8 +16945,6 @@
     },
     "smoke-tests/node_modules/jackspeak": {
       "version": "4.1.1",
-      "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-4.1.1.tgz",
-      "integrity": "sha512-zptv57P3GpL+O0I7VdMJNBZCu+BPHVQUk55Ft8/QCJjTVxrnJHuVuX/0Bl2A6/+2oyR/ZMEuFKwmzqqZ/U5nPQ==",
       "dev": true,
       "license": "BlueOak-1.0.0",
       "dependencies": {
@@ -19441,8 +16959,6 @@
     },
     "smoke-tests/node_modules/lru-cache": {
       "version": "11.1.0",
-      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.1.0.tgz",
-      "integrity": "sha512-QIXZUBJUx+2zHUdQujWejBkcD9+cs94tLn0+YL8UrCh+D5sCXZ4c7LaEH48pNwRY3MLDgqUFyhlCyjJPf1WP0A==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -19451,8 +16967,6 @@
     },
     "smoke-tests/node_modules/minimatch": {
       "version": "10.0.3",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.0.3.tgz",
-      "integrity": "sha512-IPZ167aShDZZUMdRk66cyQAW3qr0WzbHkPdMYa8bzZhlHhO3jALbKdxcaak7W9FfT2rZNpQuUu4Od7ILEpXSaw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -19467,8 +16981,6 @@
     },
     "smoke-tests/node_modules/path-scurry": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-2.0.0.tgz",
-      "integrity": "sha512-ypGJsmGtdXUOeM5u93TyeIEfEhM6s+ljAhrk5vAvSx8uyY/02OvrZnA0YNGUrPXfpJMgI1ODd3nwz8Npx4O4cg==",
       "dev": true,
       "license": "BlueOak-1.0.0",
       "dependencies": {
@@ -19484,8 +16996,6 @@
     },
     "smoke-tests/node_modules/rimraf": {
       "version": "6.0.1",
-      "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-6.0.1.tgz",
-      "integrity": "sha512-9dkvaxAsk/xNXSJzMgFqqMCuFgt2+KsOFek3TMLfo8NCPfWpBmqwyNn5Y+NX56QUYfCtsyhF3ayiboEoUmJk/A==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -19767,7 +17277,7 @@
       "version": "8.0.1",
       "license": "ISC",
       "dependencies": {
-        "@npmcli/git": "^6.0.1",
+        "@npmcli/git": "^7.0.0",
         "@npmcli/run-script": "^10.0.0",
         "json-parse-even-better-errors": "^4.0.0",
         "proc-log": "^5.0.0",
diff --git a/package.json b/package.json
index 4f00629e1949d..49bd059ce391a 100644
--- a/package.json
+++ b/package.json
@@ -189,7 +189,7 @@
   "devDependencies": {
     "@npmcli/docs": "^1.0.0",
     "@npmcli/eslint-config": "^5.1.0",
-    "@npmcli/git": "^6.0.3",
+    "@npmcli/git": "^7.0.0",
     "@npmcli/mock-globals": "^1.0.0",
     "@npmcli/mock-registry": "^1.0.0",
     "@npmcli/template-oss": "4.24.4",
diff --git a/workspaces/libnpmversion/package.json b/workspaces/libnpmversion/package.json
index 6d6c774570644..ff41399b65140 100644
--- a/workspaces/libnpmversion/package.json
+++ b/workspaces/libnpmversion/package.json
@@ -38,7 +38,7 @@
     "tap": "^16.3.8"
   },
   "dependencies": {
-    "@npmcli/git": "^6.0.1",
+    "@npmcli/git": "^7.0.0",
     "@npmcli/run-script": "^10.0.0",
     "json-parse-even-better-errors": "^4.0.0",
     "proc-log": "^5.0.0",

From ea7ca5f49d6cab81e9ce3d412963c48acd87b7c0 Mon Sep 17 00:00:00 2001
From: Gar 
Date: Thu, 18 Sep 2025 09:09:57 -0700
Subject: [PATCH 22/63] deps: lru-cache@11.2.1

---
 node_modules/.gitignore                       |   16 +-
 .../node_modules/lru-cache/LICENSE            |    0
 .../lru-cache/dist/commonjs/index.js          |  118 +-
 .../lru-cache/dist/commonjs/index.min.js      |    2 +
 .../lru-cache/dist/commonjs/package.json      |    0
 .../node_modules/lru-cache/dist/esm/index.js  |  118 +-
 .../lru-cache/dist/esm/index.min.js           |    2 +
 .../lru-cache/dist/esm/package.json           |    0
 .../node_modules/lru-cache/package.json       |   27 +-
 .../lru-cache/dist/commonjs/index.min.js      |    2 -
 .../lru-cache/dist/esm/index.min.js           |    2 -
 .../lru-cache/dist/commonjs/index.min.js      |    2 -
 .../node_modules/lru-cache/dist/esm/index.js  | 1560 ----------------
 .../lru-cache/dist/esm/index.min.js           |    2 -
 .../lru-cache/dist/commonjs/index.min.js      |    2 -
 .../lru-cache/dist/esm/index.min.js           |    2 -
 .../cacache/node_modules/lru-cache/LICENSE    |   15 -
 .../lru-cache/dist/commonjs/index.js          | 1564 -----------------
 .../lru-cache/dist/commonjs/index.min.js      |    2 -
 .../lru-cache/dist/commonjs/package.json      |    3 -
 .../lru-cache/dist/esm/index.min.js           |    2 -
 .../lru-cache/dist/esm/package.json           |    3 -
 .../node_modules/lru-cache/package.json       |  113 --
 .../node_modules/lru-cache/LICENSE            |   15 -
 .../lru-cache/dist/commonjs/index.js          | 1564 -----------------
 .../lru-cache/dist/commonjs/index.min.js      |    2 -
 .../lru-cache/dist/commonjs/package.json      |    3 -
 .../node_modules/lru-cache/dist/esm/index.js  | 1560 ----------------
 .../lru-cache/dist/esm/index.min.js           |    2 -
 .../lru-cache/dist/esm/package.json           |    3 -
 .../node_modules/lru-cache/package.json       |  113 --
 node_modules/lru-cache/dist/commonjs/index.js |  118 +-
 .../lru-cache/dist/commonjs/index.min.js      |    2 +-
 node_modules/lru-cache/dist/esm/index.js      |  118 +-
 node_modules/lru-cache/dist/esm/index.min.js  |    2 +-
 node_modules/lru-cache/package.json           |   27 +-
 .../node_modules/lru-cache/LICENSE            |   15 -
 .../lru-cache/dist/commonjs/index.js          | 1564 -----------------
 .../lru-cache/dist/commonjs/index.min.js      |    2 -
 .../lru-cache/dist/commonjs/package.json      |    3 -
 .../node_modules/lru-cache/dist/esm/index.js  | 1560 ----------------
 .../lru-cache/dist/esm/index.min.js           |    2 -
 .../lru-cache/dist/esm/package.json           |    3 -
 .../node_modules/lru-cache/package.json       |  113 --
 .../node_modules/lru-cache/LICENSE            |    0
 .../lru-cache/dist/commonjs/index.js          |  118 +-
 .../lru-cache/dist/commonjs/index.min.js      |    2 +
 .../lru-cache/dist/commonjs/package.json      |    0
 .../node_modules/lru-cache/dist/esm/index.js  |  118 +-
 .../lru-cache/dist/esm/index.min.js           |    2 +
 .../lru-cache/dist/esm/package.json           |    0
 .../node_modules/lru-cache/package.json       |   27 +-
 .../node_modules/lru-cache/LICENSE            |    0
 .../lru-cache/dist/commonjs/index.js          |  118 +-
 .../lru-cache/dist/commonjs/index.min.js      |    2 +
 .../lru-cache/dist/commonjs/package.json      |    0
 .../node_modules/lru-cache/dist/esm/index.js  |  118 +-
 .../lru-cache/dist/esm/index.min.js           |    2 +
 .../lru-cache/dist/esm/package.json           |    0
 .../node_modules/lru-cache/package.json       |   27 +-
 package-lock.json                             |  115 +-
 workspaces/arborist/package.json              |    2 +-
 62 files changed, 543 insertions(+), 10456 deletions(-)
 rename node_modules/@npmcli/{git => agent}/node_modules/lru-cache/LICENSE (100%)
 rename node_modules/@npmcli/{git => agent}/node_modules/lru-cache/dist/commonjs/index.js (94%)
 create mode 100644 node_modules/@npmcli/agent/node_modules/lru-cache/dist/commonjs/index.min.js
 rename node_modules/@npmcli/{git => agent}/node_modules/lru-cache/dist/commonjs/package.json (100%)
 rename node_modules/@npmcli/{package-json => agent}/node_modules/lru-cache/dist/esm/index.js (94%)
 create mode 100644 node_modules/@npmcli/agent/node_modules/lru-cache/dist/esm/index.min.js
 rename node_modules/@npmcli/{git => agent}/node_modules/lru-cache/dist/esm/package.json (100%)
 rename node_modules/@npmcli/{git => agent}/node_modules/lru-cache/package.json (87%)
 delete mode 100644 node_modules/@npmcli/git/node_modules/lru-cache/dist/commonjs/index.min.js
 delete mode 100644 node_modules/@npmcli/git/node_modules/lru-cache/dist/esm/index.min.js
 delete mode 100644 node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/commonjs/index.min.js
 delete mode 100644 node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/esm/index.js
 delete mode 100644 node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/esm/index.min.js
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/lru-cache/dist/commonjs/index.min.js
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/lru-cache/dist/esm/index.min.js
 delete mode 100644 node_modules/cacache/node_modules/lru-cache/LICENSE
 delete mode 100644 node_modules/cacache/node_modules/lru-cache/dist/commonjs/index.js
 delete mode 100644 node_modules/cacache/node_modules/lru-cache/dist/commonjs/index.min.js
 delete mode 100644 node_modules/cacache/node_modules/lru-cache/dist/commonjs/package.json
 delete mode 100644 node_modules/cacache/node_modules/lru-cache/dist/esm/index.min.js
 delete mode 100644 node_modules/cacache/node_modules/lru-cache/dist/esm/package.json
 delete mode 100644 node_modules/cacache/node_modules/lru-cache/package.json
 delete mode 100644 node_modules/hosted-git-info/node_modules/lru-cache/LICENSE
 delete mode 100644 node_modules/hosted-git-info/node_modules/lru-cache/dist/commonjs/index.js
 delete mode 100644 node_modules/hosted-git-info/node_modules/lru-cache/dist/commonjs/index.min.js
 delete mode 100644 node_modules/hosted-git-info/node_modules/lru-cache/dist/commonjs/package.json
 delete mode 100644 node_modules/hosted-git-info/node_modules/lru-cache/dist/esm/index.js
 delete mode 100644 node_modules/hosted-git-info/node_modules/lru-cache/dist/esm/index.min.js
 delete mode 100644 node_modules/hosted-git-info/node_modules/lru-cache/dist/esm/package.json
 delete mode 100644 node_modules/hosted-git-info/node_modules/lru-cache/package.json
 delete mode 100644 node_modules/make-fetch-happen/node_modules/lru-cache/LICENSE
 delete mode 100644 node_modules/make-fetch-happen/node_modules/lru-cache/dist/commonjs/index.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/lru-cache/dist/commonjs/index.min.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/lru-cache/dist/commonjs/package.json
 delete mode 100644 node_modules/make-fetch-happen/node_modules/lru-cache/dist/esm/index.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/lru-cache/dist/esm/index.min.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/lru-cache/dist/esm/package.json
 delete mode 100644 node_modules/make-fetch-happen/node_modules/lru-cache/package.json
 rename node_modules/{@npmcli/map-workspaces => node-gyp}/node_modules/lru-cache/LICENSE (100%)
 rename node_modules/{@npmcli/map-workspaces => node-gyp}/node_modules/lru-cache/dist/commonjs/index.js (94%)
 create mode 100644 node_modules/node-gyp/node_modules/lru-cache/dist/commonjs/index.min.js
 rename node_modules/{@npmcli/map-workspaces => node-gyp}/node_modules/lru-cache/dist/commonjs/package.json (100%)
 rename node_modules/{cacache => node-gyp}/node_modules/lru-cache/dist/esm/index.js (94%)
 create mode 100644 node_modules/node-gyp/node_modules/lru-cache/dist/esm/index.min.js
 rename node_modules/{@npmcli/map-workspaces => node-gyp}/node_modules/lru-cache/dist/esm/package.json (100%)
 rename node_modules/{@npmcli/map-workspaces => node-gyp}/node_modules/lru-cache/package.json (87%)
 rename node_modules/{@npmcli/package-json => path-scurry}/node_modules/lru-cache/LICENSE (100%)
 rename node_modules/{@npmcli/package-json => path-scurry}/node_modules/lru-cache/dist/commonjs/index.js (94%)
 create mode 100644 node_modules/path-scurry/node_modules/lru-cache/dist/commonjs/index.min.js
 rename node_modules/{@npmcli/package-json => path-scurry}/node_modules/lru-cache/dist/commonjs/package.json (100%)
 rename node_modules/{@npmcli/git => path-scurry}/node_modules/lru-cache/dist/esm/index.js (94%)
 create mode 100644 node_modules/path-scurry/node_modules/lru-cache/dist/esm/index.min.js
 rename node_modules/{@npmcli/package-json => path-scurry}/node_modules/lru-cache/dist/esm/package.json (100%)
 rename node_modules/{@npmcli/package-json => path-scurry}/node_modules/lru-cache/package.json (87%)

diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 26bf0a2939aef..21cc085017b8d 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -19,18 +19,17 @@
 !/@npmcli/
 /@npmcli/*
 !/@npmcli/agent
+!/@npmcli/agent/node_modules/
+/@npmcli/agent/node_modules/*
+!/@npmcli/agent/node_modules/lru-cache
 !/@npmcli/fs
 !/@npmcli/git
-!/@npmcli/git/node_modules/
-/@npmcli/git/node_modules/*
-!/@npmcli/git/node_modules/lru-cache
 !/@npmcli/installed-package-contents
 !/@npmcli/map-workspaces
 !/@npmcli/map-workspaces/node_modules/
 /@npmcli/map-workspaces/node_modules/*
 !/@npmcli/map-workspaces/node_modules/glob
 !/@npmcli/map-workspaces/node_modules/jackspeak
-!/@npmcli/map-workspaces/node_modules/lru-cache
 !/@npmcli/map-workspaces/node_modules/minimatch
 !/@npmcli/map-workspaces/node_modules/path-scurry
 !/@npmcli/metavuln-calculator
@@ -41,7 +40,6 @@
 /@npmcli/package-json/node_modules/*
 !/@npmcli/package-json/node_modules/glob
 !/@npmcli/package-json/node_modules/jackspeak
-!/@npmcli/package-json/node_modules/lru-cache
 !/@npmcli/package-json/node_modules/minimatch
 !/@npmcli/package-json/node_modules/path-scurry
 !/@npmcli/promise-spawn
@@ -77,7 +75,6 @@
 /cacache/node_modules/*
 !/cacache/node_modules/glob
 !/cacache/node_modules/jackspeak
-!/cacache/node_modules/lru-cache
 !/cacache/node_modules/minimatch
 !/cacache/node_modules/path-scurry
 !/chalk
@@ -108,9 +105,6 @@
 !/glob
 !/graceful-fs
 !/hosted-git-info
-!/hosted-git-info/node_modules/
-/hosted-git-info/node_modules/*
-!/hosted-git-info/node_modules/lru-cache
 !/http-cache-semantics
 !/http-proxy-agent
 !/https-proxy-agent
@@ -170,6 +164,7 @@
 /node-gyp/node_modules/*
 !/node-gyp/node_modules/cacache
 !/node-gyp/node_modules/chownr
+!/node-gyp/node_modules/lru-cache
 !/node-gyp/node_modules/make-fetch-happen
 !/node-gyp/node_modules/minizlib
 !/node-gyp/node_modules/mkdirp
@@ -210,6 +205,9 @@
 !/parse-conflict-json
 !/path-key
 !/path-scurry
+!/path-scurry/node_modules/
+/path-scurry/node_modules/*
+!/path-scurry/node_modules/lru-cache
 !/postcss-selector-parser
 !/proc-log
 !/proggy
diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/LICENSE b/node_modules/@npmcli/agent/node_modules/lru-cache/LICENSE
similarity index 100%
rename from node_modules/@npmcli/git/node_modules/lru-cache/LICENSE
rename to node_modules/@npmcli/agent/node_modules/lru-cache/LICENSE
diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/dist/commonjs/index.js b/node_modules/@npmcli/agent/node_modules/lru-cache/dist/commonjs/index.js
similarity index 94%
rename from node_modules/@npmcli/git/node_modules/lru-cache/dist/commonjs/index.js
rename to node_modules/@npmcli/agent/node_modules/lru-cache/dist/commonjs/index.js
index 921b8f10f71b1..0589231885c68 100644
--- a/node_modules/@npmcli/git/node_modules/lru-cache/dist/commonjs/index.js
+++ b/node_modules/@npmcli/agent/node_modules/lru-cache/dist/commonjs/index.js
@@ -4,20 +4,18 @@
  */
 Object.defineProperty(exports, "__esModule", { value: true });
 exports.LRUCache = void 0;
-const defaultPerf = (typeof performance === 'object' &&
+const perf = typeof performance === 'object' &&
     performance &&
-    typeof performance.now === 'function') ?
-    performance
+    typeof performance.now === 'function'
+    ? performance
     : Date;
 const warned = new Set();
 /* c8 ignore start */
-const PROCESS = (typeof process === 'object' && !!process ?
-    process
-    : {});
+const PROCESS = (typeof process === 'object' && !!process ? process : {});
 /* c8 ignore start */
 const emitWarning = (msg, type, code, fn) => {
-    typeof PROCESS.emitWarning === 'function' ?
-        PROCESS.emitWarning(msg, type, code, fn)
+    typeof PROCESS.emitWarning === 'function'
+        ? PROCESS.emitWarning(msg, type, code, fn)
         : console.error(`[${code}] ${type}: ${msg}`);
 };
 let AC = globalThis.AbortController;
@@ -81,11 +79,16 @@ const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
 // zeroes at init time is brutal when you get that big.
 // But why not be complete?
 // Maybe in the future, these limits will have expanded.
-const getUintArray = (max) => !isPosInt(max) ? null
-    : max <= Math.pow(2, 8) ? Uint8Array
-        : max <= Math.pow(2, 16) ? Uint16Array
-            : max <= Math.pow(2, 32) ? Uint32Array
-                : max <= Number.MAX_SAFE_INTEGER ? ZeroArray
+const getUintArray = (max) => !isPosInt(max)
+    ? null
+    : max <= Math.pow(2, 8)
+        ? Uint8Array
+        : max <= Math.pow(2, 16)
+            ? Uint16Array
+            : max <= Math.pow(2, 32)
+                ? Uint32Array
+                : max <= Number.MAX_SAFE_INTEGER
+                    ? ZeroArray
                     : null;
 /* c8 ignore stop */
 class ZeroArray extends Array {
@@ -144,17 +147,9 @@ class LRUCache {
     #max;
     #maxSize;
     #dispose;
-    #onInsert;
     #disposeAfter;
     #fetchMethod;
     #memoMethod;
-    #perf;
-    /**
-     * {@link LRUCache.OptionsBase.perf}
-     */
-    get perf() {
-        return this.#perf;
-    }
     /**
      * {@link LRUCache.OptionsBase.ttl}
      */
@@ -233,7 +228,6 @@ class LRUCache {
     #hasDispose;
     #hasFetchMethod;
     #hasDisposeAfter;
-    #hasOnInsert;
     /**
      * Do not call this method unless you need to inspect the
      * inner workings of the cache.  If anything returned by this
@@ -310,12 +304,6 @@ class LRUCache {
     get dispose() {
         return this.#dispose;
     }
-    /**
-     * {@link LRUCache.OptionsBase.onInsert} (read-only)
-     */
-    get onInsert() {
-        return this.#onInsert;
-    }
     /**
      * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
      */
@@ -323,13 +311,7 @@ class LRUCache {
         return this.#disposeAfter;
     }
     constructor(options) {
-        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, onInsert, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, perf, } = options;
-        if (perf !== undefined) {
-            if (typeof perf?.now !== 'function') {
-                throw new TypeError('perf option must have a now() method if specified');
-            }
-        }
-        this.#perf = perf ?? defaultPerf;
+        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options;
         if (max !== 0 && !isPosInt(max)) {
             throw new TypeError('max option must be a nonnegative integer');
         }
@@ -373,9 +355,6 @@ class LRUCache {
         if (typeof dispose === 'function') {
             this.#dispose = dispose;
         }
-        if (typeof onInsert === 'function') {
-            this.#onInsert = onInsert;
-        }
         if (typeof disposeAfter === 'function') {
             this.#disposeAfter = disposeAfter;
             this.#disposed = [];
@@ -385,7 +364,6 @@ class LRUCache {
             this.#disposed = undefined;
         }
         this.#hasDispose = !!this.#dispose;
-        this.#hasOnInsert = !!this.#onInsert;
         this.#hasDisposeAfter = !!this.#disposeAfter;
         this.noDisposeOnSet = !!noDisposeOnSet;
         this.noUpdateTTL = !!noUpdateTTL;
@@ -410,8 +388,8 @@ class LRUCache {
         this.updateAgeOnGet = !!updateAgeOnGet;
         this.updateAgeOnHas = !!updateAgeOnHas;
         this.ttlResolution =
-            isPosInt(ttlResolution) || ttlResolution === 0 ?
-                ttlResolution
+            isPosInt(ttlResolution) || ttlResolution === 0
+                ? ttlResolution
                 : 1;
         this.ttlAutopurge = !!ttlAutopurge;
         this.ttl = ttl || 0;
@@ -447,7 +425,7 @@ class LRUCache {
         const starts = new ZeroArray(this.#max);
         this.#ttls = ttls;
         this.#starts = starts;
-        this.#setItemTTL = (index, ttl, start = this.#perf.now()) => {
+        this.#setItemTTL = (index, ttl, start = perf.now()) => {
             starts[index] = ttl !== 0 ? start : 0;
             ttls[index] = ttl;
             if (ttl !== 0 && this.ttlAutopurge) {
@@ -465,7 +443,7 @@ class LRUCache {
             }
         };
         this.#updateItemAge = index => {
-            starts[index] = ttls[index] !== 0 ? this.#perf.now() : 0;
+            starts[index] = ttls[index] !== 0 ? perf.now() : 0;
         };
         this.#statusTTL = (status, index) => {
             if (ttls[index]) {
@@ -485,7 +463,7 @@ class LRUCache {
         // that costly call repeatedly.
         let cachedNow = 0;
         const getNow = () => {
-            const n = this.#perf.now();
+            const n = perf.now();
             if (this.ttlResolution > 0) {
                 cachedNow = n;
                 const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
@@ -722,7 +700,9 @@ class LRUCache {
     find(fn, getOptions = {}) {
         for (const i of this.#indexes()) {
             const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            const value = this.#isBackgroundFetch(v)
+                ? v.__staleWhileFetching
+                : v;
             if (value === undefined)
                 continue;
             if (fn(value, this.#keyList[i], this)) {
@@ -744,7 +724,9 @@ class LRUCache {
     forEach(fn, thisp = this) {
         for (const i of this.#indexes()) {
             const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            const value = this.#isBackgroundFetch(v)
+                ? v.__staleWhileFetching
+                : v;
             if (value === undefined)
                 continue;
             fn.call(thisp, value, this.#keyList[i], this);
@@ -757,7 +739,9 @@ class LRUCache {
     rforEach(fn, thisp = this) {
         for (const i of this.#rindexes()) {
             const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            const value = this.#isBackgroundFetch(v)
+                ? v.__staleWhileFetching
+                : v;
             if (value === undefined)
                 continue;
             fn.call(thisp, value, this.#keyList[i], this);
@@ -794,18 +778,17 @@ class LRUCache {
         if (i === undefined)
             return undefined;
         const v = this.#valList[i];
-        /* c8 ignore start - this isn't tested for the info function,
-         * but it's the same logic as found in other places. */
-        const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+        const value = this.#isBackgroundFetch(v)
+            ? v.__staleWhileFetching
+            : v;
         if (value === undefined)
             return undefined;
-        /* c8 ignore end */
         const entry = { value };
         if (this.#ttls && this.#starts) {
             const ttl = this.#ttls[i];
             const start = this.#starts[i];
             if (ttl && start) {
-                const remain = ttl - (this.#perf.now() - start);
+                const remain = ttl - (perf.now() - start);
                 entry.ttl = remain;
                 entry.start = Date.now();
             }
@@ -817,7 +800,7 @@ class LRUCache {
     }
     /**
      * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
-     * passed to {@link LRUCache#load}.
+     * passed to {@link LRLUCache#load}.
      *
      * The `start` fields are calculated relative to a portable `Date.now()`
      * timestamp, even if `performance.now()` is available.
@@ -833,7 +816,9 @@ class LRUCache {
         for (const i of this.#indexes({ allowStale: true })) {
             const key = this.#keyList[i];
             const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            const value = this.#isBackgroundFetch(v)
+                ? v.__staleWhileFetching
+                : v;
             if (value === undefined || key === undefined)
                 continue;
             const entry = { value };
@@ -841,7 +826,7 @@ class LRUCache {
                 entry.ttl = this.#ttls[i];
                 // always dump the start relative to a portable timestamp
                 // it's ok for this to be a bit slow, it's a rare operation.
-                const age = this.#perf.now() - this.#starts[i];
+                const age = perf.now() - this.#starts[i];
                 entry.start = Math.floor(Date.now() - age);
             }
             if (this.#sizes) {
@@ -871,7 +856,7 @@ class LRUCache {
                 //
                 // it's ok for this to be a bit slow, it's a rare operation.
                 const age = Date.now() - entry.start;
-                entry.start = this.#perf.now() - age;
+                entry.start = perf.now() - age;
             }
             this.set(key, entry.value, entry);
         }
@@ -928,9 +913,12 @@ class LRUCache {
         let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
         if (index === undefined) {
             // addition
-            index = (this.#size === 0 ? this.#tail
-                : this.#free.length !== 0 ? this.#free.pop()
-                    : this.#size === this.#max ? this.#evict(false)
+            index = (this.#size === 0
+                ? this.#tail
+                : this.#free.length !== 0
+                    ? this.#free.pop()
+                    : this.#size === this.#max
+                        ? this.#evict(false)
                         : this.#size);
             this.#keyList[index] = k;
             this.#valList[index] = v;
@@ -943,9 +931,6 @@ class LRUCache {
             if (status)
                 status.set = 'add';
             noUpdateTTL = false;
-            if (this.#hasOnInsert) {
-                this.#onInsert?.(v, k, 'add');
-            }
         }
         else {
             // update
@@ -977,8 +962,8 @@ class LRUCache {
                 this.#valList[index] = v;
                 if (status) {
                     status.set = 'replace';
-                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal) ?
-                        oldVal.__staleWhileFetching
+                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal)
+                        ? oldVal.__staleWhileFetching
                         : oldVal;
                     if (oldValue !== undefined)
                         status.oldValue = oldValue;
@@ -987,9 +972,6 @@ class LRUCache {
             else if (status) {
                 status.set = 'update';
             }
-            if (this.#hasOnInsert) {
-                this.onInsert?.(v, k, v === oldVal ? 'update' : 'replace');
-            }
         }
         if (ttl !== 0 && !this.#ttls) {
             this.#initializeTTLTracking();
@@ -1172,7 +1154,7 @@ class LRUCache {
             const bf = p;
             if (this.#valList[index] === p) {
                 if (v === undefined) {
-                    if (bf.__staleWhileFetching !== undefined) {
+                    if (bf.__staleWhileFetching) {
                         this.#valList[index] = bf.__staleWhileFetching;
                     }
                     else {
diff --git a/node_modules/@npmcli/agent/node_modules/lru-cache/dist/commonjs/index.min.js b/node_modules/@npmcli/agent/node_modules/lru-cache/dist/commonjs/index.min.js
new file mode 100644
index 0000000000000..ad643b0badc90
--- /dev/null
+++ b/node_modules/@npmcli/agent/node_modules/lru-cache/dist/commonjs/index.min.js
@@ -0,0 +1,2 @@
+"use strict";var G=(l,t,e)=>{if(!t.has(l))throw TypeError("Cannot "+e)};var j=(l,t,e)=>(G(l,t,"read from private field"),e?e.call(l):t.get(l)),I=(l,t,e)=>{if(t.has(l))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(l):t.set(l,e)},x=(l,t,e,i)=>(G(l,t,"write to private field"),i?i.call(l,e):t.set(l,e),e);Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var T=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,P=new Set,U=typeof process=="object"&&process?process:{},H=(l,t,e,i)=>{typeof U.emitWarning=="function"?U.emitWarning(l,t,e,i):console.error(`[${e}] ${t}: ${l}`)},D=globalThis.AbortController,N=globalThis.AbortSignal;if(typeof D>"u"){N=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},D=class{constructor(){t()}signal=new N;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let l=U.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{l&&(l=!1,H("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=l=>!P.has(l),Y=Symbol("type"),A=l=>l&&l===Math.floor(l)&&l>0&&isFinite(l),k=l=>A(l)?l<=Math.pow(2,8)?Uint8Array:l<=Math.pow(2,16)?Uint16Array:l<=Math.pow(2,32)?Uint32Array:l<=Number.MAX_SAFE_INTEGER?E:null:null,E=class extends Array{constructor(t){super(t),this.fill(0)}},v,O=class{heap;length;static create(t){let e=k(t);if(!e)return[];x(O,v,!0);let i=new O(t,e);return x(O,v,!1),i}constructor(t,e){if(!j(O,v))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},W=O;v=new WeakMap,I(W,v,!1);var C=class{#g;#f;#p;#w;#R;#W;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#S;#s;#i;#t;#l;#c;#o;#h;#_;#r;#b;#m;#u;#y;#E;#a;static unsafeExposeInternals(t){return{starts:t.#m,ttls:t.#u,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#_,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#x(e,i,s,n),moveToTail:e=>t.#C(e),indexes:e=>t.#A(e),rindexes:e=>t.#F(e),isStale:e=>t.#d(e)}}get max(){return this.#g}get maxSize(){return this.#f}get calculatedSize(){return this.#S}get size(){return this.#n}get fetchMethod(){return this.#R}get memoMethod(){return this.#W}get dispose(){return this.#p}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,memoMethod:a,noDeleteOnFetchRejection:w,noDeleteOnStaleGet:m,allowStaleOnFetchRejection:p,allowStaleOnFetchAbort:_,ignoreFetchAbort:z}=t;if(e!==0&&!A(e))throw new TypeError("max option must be a nonnegative integer");let y=e?k(e):Array;if(!y)throw new Error("invalid max value: "+e);if(this.#g=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(a!==void 0&&typeof a!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#W=a,S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#R=S,this.#E=!!S,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new y(e),this.#c=new y(e),this.#o=0,this.#h=0,this.#_=W.create(e),this.#n=0,this.#S=0,typeof g=="function"&&(this.#p=g),typeof b=="function"?(this.#w=b,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#y=!!this.#p,this.#a=!!this.#w,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!w,this.allowStaleOnFetchRejection=!!p,this.allowStaleOnFetchAbort=!!_,this.ignoreFetchAbort=!!z,this.maxEntrySize!==0){if(this.#f!==0&&!A(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!A(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#P()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!m,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=A(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!A(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#U()}if(this.#g===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#g&&!this.#f){let R="LRU_CACHE_UNBOUNDED";V(R)&&(P.add(R),H("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",R,C))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#U(){let t=new E(this.#g),e=new E(this.#g);this.#u=t,this.#m=e,this.#M=(n,h,o=T.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#d(n)&&this.#T(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#v=n=>{e[n]=t[n]!==0?T.now():0},this.#O=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=T.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#d=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#v=()=>{};#O=()=>{};#M=()=>{};#d=()=>!1;#P(){let t=new E(this.#g);this.#S=0,this.#b=t,this.#z=e=>{this.#S-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!A(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!A(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#D=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#S>n;)this.#L(!0)}this.#S+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#S)}}#z=t=>{};#D=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#A({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#j(e)||((t||!this.#d(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#j(e)||((t||!this.#d(e))&&(yield e),e===this.#h));)e=this.#l[e]}#j(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#F({allowStale:!0}))this.#d(e)&&(this.#T(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#u&&this.#m){let h=this.#u[e],o=this.#m[e];if(h&&o){let r=h-(T.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#A({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#u&&this.#m){h.ttl=this.#u[e];let o=T.now()-this.#m[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=T.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,o);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#T(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#_.length!==0?this.#_.pop():this.#n===this.#g?this.#L(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#n++,this.#D(f,b,r),r&&(r.set="add"),g=!1;else{this.#C(f);let u=this.#t[f];if(e!==u){if(this.#E&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#y&&this.#p?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#y&&this.#p?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#z(f),this.#D(f,b,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#u&&this.#U(),this.#u&&(g||this.#M(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#w?.(...c)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#L(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#L(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#E&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#y||this.#a)&&(this.#y&&this.#p?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#z(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#_.push(e)),this.#n===1?(this.#o=this.#h=0,this.#_.length=0):this.#o=this.#l[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#d(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#v(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#d(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#x(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new D,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:a}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(a&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),a&&!w&&!S)return f(h.signal.reason);let m=c;return this.#t[e]===c&&(d===void 0?m.__staleWhileFetching?this.#t[e]=m.__staleWhileFetching:this.#T(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},b=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,a=S&&i.allowStaleOnFetchAbort,w=a||i.allowStaleOnFetchRejection,m=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!m||p.__staleWhileFetching===void 0?this.#T(t,"fetch"):a||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let a=this.#R?.(t,n,r);a&&a instanceof Promise&&a.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,b),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=F,F}#e(t){if(!this.#E)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof D}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:a,signal:w}=e;if(!this.#E)return a&&(a.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:a});let m={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:a,signal:w},p=this.#s.get(t);if(p===void 0){a&&(a.fetch="miss");let _=this.#x(t,p,m,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let M=i&&_.__staleWhileFetching!==void 0;return a&&(a.fetch="inflight",M&&(a.returnedStale=!0)),M?_.__staleWhileFetching:_.__returned=_}let z=this.#d(p);if(!S&&!z)return a&&(a.fetch="hit"),this.#C(p),s&&this.#v(p),a&&this.#O(a,p),_;let y=this.#x(t,p,m,d),L=y.__staleWhileFetching!==void 0&&i;return a&&(a.fetch=z?"stale":"refresh",L&&z&&(a.returnedStale=!0)),L?y.__staleWhileFetching:y.__returned=y}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#W;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#O(h,o),this.#d(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#T(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#C(o),s&&this.#v(o),r))}else h&&(h.get="miss")}#I(t,e){this.#c[e]=t,this.#l[t]=e}#C(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#I(this.#c[t],this.#l[t]),this.#I(this.#h,t),this.#h=t)}delete(t){return this.#T(t,"delete")}#T(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#N(e);else{this.#z(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#y||this.#a)&&(this.#y&&this.#p?.(n,t,e),this.#a&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#c[s];else if(s===this.#o)this.#o=this.#l[s];else{let h=this.#c[s];this.#l[h]=this.#l[s];let o=this.#l[s];this.#c[o]=this.#c[s]}this.#n--,this.#_.push(s)}}if(this.#a&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#N("delete")}#N(t){for(let e of this.#F({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#y&&this.#p?.(i,s,t),this.#a&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#u&&this.#m&&(this.#u.fill(0),this.#m.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#_.length=0,this.#S=0,this.#n=0,this.#a&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};exports.LRUCache=C;
+//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/dist/commonjs/package.json b/node_modules/@npmcli/agent/node_modules/lru-cache/dist/commonjs/package.json
similarity index 100%
rename from node_modules/@npmcli/git/node_modules/lru-cache/dist/commonjs/package.json
rename to node_modules/@npmcli/agent/node_modules/lru-cache/dist/commonjs/package.json
diff --git a/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/esm/index.js b/node_modules/@npmcli/agent/node_modules/lru-cache/dist/esm/index.js
similarity index 94%
rename from node_modules/@npmcli/package-json/node_modules/lru-cache/dist/esm/index.js
rename to node_modules/@npmcli/agent/node_modules/lru-cache/dist/esm/index.js
index 8fd8fc5f31507..555654a57c4d7 100644
--- a/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/esm/index.js
+++ b/node_modules/@npmcli/agent/node_modules/lru-cache/dist/esm/index.js
@@ -1,20 +1,18 @@
 /**
  * @module LRUCache
  */
-const defaultPerf = (typeof performance === 'object' &&
+const perf = typeof performance === 'object' &&
     performance &&
-    typeof performance.now === 'function') ?
-    performance
+    typeof performance.now === 'function'
+    ? performance
     : Date;
 const warned = new Set();
 /* c8 ignore start */
-const PROCESS = (typeof process === 'object' && !!process ?
-    process
-    : {});
+const PROCESS = (typeof process === 'object' && !!process ? process : {});
 /* c8 ignore start */
 const emitWarning = (msg, type, code, fn) => {
-    typeof PROCESS.emitWarning === 'function' ?
-        PROCESS.emitWarning(msg, type, code, fn)
+    typeof PROCESS.emitWarning === 'function'
+        ? PROCESS.emitWarning(msg, type, code, fn)
         : console.error(`[${code}] ${type}: ${msg}`);
 };
 let AC = globalThis.AbortController;
@@ -78,11 +76,16 @@ const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
 // zeroes at init time is brutal when you get that big.
 // But why not be complete?
 // Maybe in the future, these limits will have expanded.
-const getUintArray = (max) => !isPosInt(max) ? null
-    : max <= Math.pow(2, 8) ? Uint8Array
-        : max <= Math.pow(2, 16) ? Uint16Array
-            : max <= Math.pow(2, 32) ? Uint32Array
-                : max <= Number.MAX_SAFE_INTEGER ? ZeroArray
+const getUintArray = (max) => !isPosInt(max)
+    ? null
+    : max <= Math.pow(2, 8)
+        ? Uint8Array
+        : max <= Math.pow(2, 16)
+            ? Uint16Array
+            : max <= Math.pow(2, 32)
+                ? Uint32Array
+                : max <= Number.MAX_SAFE_INTEGER
+                    ? ZeroArray
                     : null;
 /* c8 ignore stop */
 class ZeroArray extends Array {
@@ -141,17 +144,9 @@ export class LRUCache {
     #max;
     #maxSize;
     #dispose;
-    #onInsert;
     #disposeAfter;
     #fetchMethod;
     #memoMethod;
-    #perf;
-    /**
-     * {@link LRUCache.OptionsBase.perf}
-     */
-    get perf() {
-        return this.#perf;
-    }
     /**
      * {@link LRUCache.OptionsBase.ttl}
      */
@@ -230,7 +225,6 @@ export class LRUCache {
     #hasDispose;
     #hasFetchMethod;
     #hasDisposeAfter;
-    #hasOnInsert;
     /**
      * Do not call this method unless you need to inspect the
      * inner workings of the cache.  If anything returned by this
@@ -307,12 +301,6 @@ export class LRUCache {
     get dispose() {
         return this.#dispose;
     }
-    /**
-     * {@link LRUCache.OptionsBase.onInsert} (read-only)
-     */
-    get onInsert() {
-        return this.#onInsert;
-    }
     /**
      * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
      */
@@ -320,13 +308,7 @@ export class LRUCache {
         return this.#disposeAfter;
     }
     constructor(options) {
-        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, onInsert, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, perf, } = options;
-        if (perf !== undefined) {
-            if (typeof perf?.now !== 'function') {
-                throw new TypeError('perf option must have a now() method if specified');
-            }
-        }
-        this.#perf = perf ?? defaultPerf;
+        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options;
         if (max !== 0 && !isPosInt(max)) {
             throw new TypeError('max option must be a nonnegative integer');
         }
@@ -370,9 +352,6 @@ export class LRUCache {
         if (typeof dispose === 'function') {
             this.#dispose = dispose;
         }
-        if (typeof onInsert === 'function') {
-            this.#onInsert = onInsert;
-        }
         if (typeof disposeAfter === 'function') {
             this.#disposeAfter = disposeAfter;
             this.#disposed = [];
@@ -382,7 +361,6 @@ export class LRUCache {
             this.#disposed = undefined;
         }
         this.#hasDispose = !!this.#dispose;
-        this.#hasOnInsert = !!this.#onInsert;
         this.#hasDisposeAfter = !!this.#disposeAfter;
         this.noDisposeOnSet = !!noDisposeOnSet;
         this.noUpdateTTL = !!noUpdateTTL;
@@ -407,8 +385,8 @@ export class LRUCache {
         this.updateAgeOnGet = !!updateAgeOnGet;
         this.updateAgeOnHas = !!updateAgeOnHas;
         this.ttlResolution =
-            isPosInt(ttlResolution) || ttlResolution === 0 ?
-                ttlResolution
+            isPosInt(ttlResolution) || ttlResolution === 0
+                ? ttlResolution
                 : 1;
         this.ttlAutopurge = !!ttlAutopurge;
         this.ttl = ttl || 0;
@@ -444,7 +422,7 @@ export class LRUCache {
         const starts = new ZeroArray(this.#max);
         this.#ttls = ttls;
         this.#starts = starts;
-        this.#setItemTTL = (index, ttl, start = this.#perf.now()) => {
+        this.#setItemTTL = (index, ttl, start = perf.now()) => {
             starts[index] = ttl !== 0 ? start : 0;
             ttls[index] = ttl;
             if (ttl !== 0 && this.ttlAutopurge) {
@@ -462,7 +440,7 @@ export class LRUCache {
             }
         };
         this.#updateItemAge = index => {
-            starts[index] = ttls[index] !== 0 ? this.#perf.now() : 0;
+            starts[index] = ttls[index] !== 0 ? perf.now() : 0;
         };
         this.#statusTTL = (status, index) => {
             if (ttls[index]) {
@@ -482,7 +460,7 @@ export class LRUCache {
         // that costly call repeatedly.
         let cachedNow = 0;
         const getNow = () => {
-            const n = this.#perf.now();
+            const n = perf.now();
             if (this.ttlResolution > 0) {
                 cachedNow = n;
                 const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
@@ -719,7 +697,9 @@ export class LRUCache {
     find(fn, getOptions = {}) {
         for (const i of this.#indexes()) {
             const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            const value = this.#isBackgroundFetch(v)
+                ? v.__staleWhileFetching
+                : v;
             if (value === undefined)
                 continue;
             if (fn(value, this.#keyList[i], this)) {
@@ -741,7 +721,9 @@ export class LRUCache {
     forEach(fn, thisp = this) {
         for (const i of this.#indexes()) {
             const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            const value = this.#isBackgroundFetch(v)
+                ? v.__staleWhileFetching
+                : v;
             if (value === undefined)
                 continue;
             fn.call(thisp, value, this.#keyList[i], this);
@@ -754,7 +736,9 @@ export class LRUCache {
     rforEach(fn, thisp = this) {
         for (const i of this.#rindexes()) {
             const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            const value = this.#isBackgroundFetch(v)
+                ? v.__staleWhileFetching
+                : v;
             if (value === undefined)
                 continue;
             fn.call(thisp, value, this.#keyList[i], this);
@@ -791,18 +775,17 @@ export class LRUCache {
         if (i === undefined)
             return undefined;
         const v = this.#valList[i];
-        /* c8 ignore start - this isn't tested for the info function,
-         * but it's the same logic as found in other places. */
-        const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+        const value = this.#isBackgroundFetch(v)
+            ? v.__staleWhileFetching
+            : v;
         if (value === undefined)
             return undefined;
-        /* c8 ignore end */
         const entry = { value };
         if (this.#ttls && this.#starts) {
             const ttl = this.#ttls[i];
             const start = this.#starts[i];
             if (ttl && start) {
-                const remain = ttl - (this.#perf.now() - start);
+                const remain = ttl - (perf.now() - start);
                 entry.ttl = remain;
                 entry.start = Date.now();
             }
@@ -814,7 +797,7 @@ export class LRUCache {
     }
     /**
      * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
-     * passed to {@link LRUCache#load}.
+     * passed to {@link LRLUCache#load}.
      *
      * The `start` fields are calculated relative to a portable `Date.now()`
      * timestamp, even if `performance.now()` is available.
@@ -830,7 +813,9 @@ export class LRUCache {
         for (const i of this.#indexes({ allowStale: true })) {
             const key = this.#keyList[i];
             const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            const value = this.#isBackgroundFetch(v)
+                ? v.__staleWhileFetching
+                : v;
             if (value === undefined || key === undefined)
                 continue;
             const entry = { value };
@@ -838,7 +823,7 @@ export class LRUCache {
                 entry.ttl = this.#ttls[i];
                 // always dump the start relative to a portable timestamp
                 // it's ok for this to be a bit slow, it's a rare operation.
-                const age = this.#perf.now() - this.#starts[i];
+                const age = perf.now() - this.#starts[i];
                 entry.start = Math.floor(Date.now() - age);
             }
             if (this.#sizes) {
@@ -868,7 +853,7 @@ export class LRUCache {
                 //
                 // it's ok for this to be a bit slow, it's a rare operation.
                 const age = Date.now() - entry.start;
-                entry.start = this.#perf.now() - age;
+                entry.start = perf.now() - age;
             }
             this.set(key, entry.value, entry);
         }
@@ -925,9 +910,12 @@ export class LRUCache {
         let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
         if (index === undefined) {
             // addition
-            index = (this.#size === 0 ? this.#tail
-                : this.#free.length !== 0 ? this.#free.pop()
-                    : this.#size === this.#max ? this.#evict(false)
+            index = (this.#size === 0
+                ? this.#tail
+                : this.#free.length !== 0
+                    ? this.#free.pop()
+                    : this.#size === this.#max
+                        ? this.#evict(false)
                         : this.#size);
             this.#keyList[index] = k;
             this.#valList[index] = v;
@@ -940,9 +928,6 @@ export class LRUCache {
             if (status)
                 status.set = 'add';
             noUpdateTTL = false;
-            if (this.#hasOnInsert) {
-                this.#onInsert?.(v, k, 'add');
-            }
         }
         else {
             // update
@@ -974,8 +959,8 @@ export class LRUCache {
                 this.#valList[index] = v;
                 if (status) {
                     status.set = 'replace';
-                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal) ?
-                        oldVal.__staleWhileFetching
+                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal)
+                        ? oldVal.__staleWhileFetching
                         : oldVal;
                     if (oldValue !== undefined)
                         status.oldValue = oldValue;
@@ -984,9 +969,6 @@ export class LRUCache {
             else if (status) {
                 status.set = 'update';
             }
-            if (this.#hasOnInsert) {
-                this.onInsert?.(v, k, v === oldVal ? 'update' : 'replace');
-            }
         }
         if (ttl !== 0 && !this.#ttls) {
             this.#initializeTTLTracking();
@@ -1169,7 +1151,7 @@ export class LRUCache {
             const bf = p;
             if (this.#valList[index] === p) {
                 if (v === undefined) {
-                    if (bf.__staleWhileFetching !== undefined) {
+                    if (bf.__staleWhileFetching) {
                         this.#valList[index] = bf.__staleWhileFetching;
                     }
                     else {
diff --git a/node_modules/@npmcli/agent/node_modules/lru-cache/dist/esm/index.min.js b/node_modules/@npmcli/agent/node_modules/lru-cache/dist/esm/index.min.js
new file mode 100644
index 0000000000000..4571d0254e27d
--- /dev/null
+++ b/node_modules/@npmcli/agent/node_modules/lru-cache/dist/esm/index.min.js
@@ -0,0 +1,2 @@
+var G=(l,t,e)=>{if(!t.has(l))throw TypeError("Cannot "+e)};var I=(l,t,e)=>(G(l,t,"read from private field"),e?e.call(l):t.get(l)),j=(l,t,e)=>{if(t.has(l))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(l):t.set(l,e)},x=(l,t,e,i)=>(G(l,t,"write to private field"),i?i.call(l,e):t.set(l,e),e);var T=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,P=new Set,M=typeof process=="object"&&process?process:{},H=(l,t,e,i)=>{typeof M.emitWarning=="function"?M.emitWarning(l,t,e,i):console.error(`[${e}] ${t}: ${l}`)},W=globalThis.AbortController,N=globalThis.AbortSignal;if(typeof W>"u"){N=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new N;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let l=M.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{l&&(l=!1,H("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=l=>!P.has(l),Y=Symbol("type"),A=l=>l&&l===Math.floor(l)&&l>0&&isFinite(l),k=l=>A(l)?l<=Math.pow(2,8)?Uint8Array:l<=Math.pow(2,16)?Uint16Array:l<=Math.pow(2,32)?Uint32Array:l<=Number.MAX_SAFE_INTEGER?O:null:null,O=class extends Array{constructor(t){super(t),this.fill(0)}},z,E=class{heap;length;static create(t){let e=k(t);if(!e)return[];x(E,z,!0);let i=new E(t,e);return x(E,z,!1),i}constructor(t,e){if(!I(E,z))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},R=E;z=new WeakMap,j(R,z,!1);var D=class{#g;#f;#p;#w;#R;#W;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#S;#s;#i;#t;#l;#c;#o;#h;#_;#r;#m;#b;#u;#y;#O;#a;static unsafeExposeInternals(t){return{starts:t.#b,ttls:t.#u,sizes:t.#m,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#_,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#x(e,i,s,n),moveToTail:e=>t.#C(e),indexes:e=>t.#A(e),rindexes:e=>t.#F(e),isStale:e=>t.#d(e)}}get max(){return this.#g}get maxSize(){return this.#f}get calculatedSize(){return this.#S}get size(){return this.#n}get fetchMethod(){return this.#R}get memoMethod(){return this.#W}get dispose(){return this.#p}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,disposeAfter:m,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,memoMethod:a,noDeleteOnFetchRejection:w,noDeleteOnStaleGet:b,allowStaleOnFetchRejection:p,allowStaleOnFetchAbort:_,ignoreFetchAbort:v}=t;if(e!==0&&!A(e))throw new TypeError("max option must be a nonnegative integer");let y=e?k(e):Array;if(!y)throw new Error("invalid max value: "+e);if(this.#g=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(a!==void 0&&typeof a!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#W=a,S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#R=S,this.#O=!!S,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new y(e),this.#c=new y(e),this.#o=0,this.#h=0,this.#_=R.create(e),this.#n=0,this.#S=0,typeof g=="function"&&(this.#p=g),typeof m=="function"?(this.#w=m,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#y=!!this.#p,this.#a=!!this.#w,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!w,this.allowStaleOnFetchRejection=!!p,this.allowStaleOnFetchAbort=!!_,this.ignoreFetchAbort=!!v,this.maxEntrySize!==0){if(this.#f!==0&&!A(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!A(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#P()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!b,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=A(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!A(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#M()}if(this.#g===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#g&&!this.#f){let C="LRU_CACHE_UNBOUNDED";V(C)&&(P.add(C),H("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",C,D))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#M(){let t=new O(this.#g),e=new O(this.#g);this.#u=t,this.#b=e,this.#U=(n,h,o=T.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#d(n)&&this.#T(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?T.now():0},this.#E=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=T.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#d=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#z=()=>{};#E=()=>{};#U=()=>{};#d=()=>!1;#P(){let t=new O(this.#g);this.#S=0,this.#m=t,this.#v=e=>{this.#S-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!A(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!A(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#D=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#S>n;)this.#L(!0)}this.#S+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#S)}}#v=t=>{};#D=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#A({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#I(e)||((t||!this.#d(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#I(e)||((t||!this.#d(e))&&(yield e),e===this.#h));)e=this.#l[e]}#I(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#F({allowStale:!0}))this.#d(e)&&(this.#T(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#u&&this.#b){let h=this.#u[e],o=this.#b[e];if(h&&o){let r=h-(T.now()-o);n.ttl=r,n.start=Date.now()}}return this.#m&&(n.size=this.#m[e]),n}dump(){let t=[];for(let e of this.#A({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#u&&this.#b){h.ttl=this.#u[e];let o=T.now()-this.#b[e];h.start=Math.floor(Date.now()-o)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=T.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,m=this.#G(t,e,i.size||0,o);if(this.maxEntrySize&&m>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#T(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#_.length!==0?this.#_.pop():this.#n===this.#g?this.#L(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#n++,this.#D(f,m,r),r&&(r.set="add"),g=!1;else{this.#C(f);let u=this.#t[f];if(e!==u){if(this.#O&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#y&&this.#p?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#y&&this.#p?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#v(f),this.#D(f,m,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#u&&this.#M(),this.#u&&(g||this.#U(f,s,n),r&&this.#E(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#w?.(...c)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#L(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#L(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#O&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#y||this.#a)&&(this.#y&&this.#p?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#v(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#_.push(e)),this.#n===1?(this.#o=this.#h=0,this.#_.length=0):this.#o=this.#l[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#d(n))s&&(s.has="stale",this.#E(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#E(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#d(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#x(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:a}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(a&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),a&&!w&&!S)return f(h.signal.reason);let b=c;return this.#t[e]===c&&(d===void 0?b.__staleWhileFetching?this.#t[e]=b.__staleWhileFetching:this.#T(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},m=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,a=S&&i.allowStaleOnFetchAbort,w=a||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!b||p.__staleWhileFetching===void 0?this.#T(t,"fetch"):a||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let a=this.#R?.(t,n,r);a&&a instanceof Promise&&a.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,m),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=F,F}#e(t){if(!this.#O)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:m=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:a,signal:w}=e;if(!this.#O)return a&&(a.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:a});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:m,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:a,signal:w},p=this.#s.get(t);if(p===void 0){a&&(a.fetch="miss");let _=this.#x(t,p,b,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let U=i&&_.__staleWhileFetching!==void 0;return a&&(a.fetch="inflight",U&&(a.returnedStale=!0)),U?_.__staleWhileFetching:_.__returned=_}let v=this.#d(p);if(!S&&!v)return a&&(a.fetch="hit"),this.#C(p),s&&this.#z(p),a&&this.#E(a,p),_;let y=this.#x(t,p,b,d),L=y.__staleWhileFetching!==void 0&&i;return a&&(a.fetch=v?"stale":"refresh",L&&v&&(a.returnedStale=!0)),L?y.__staleWhileFetching:y.__returned=y}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#W;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#E(h,o),this.#d(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#T(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#C(o),s&&this.#z(o),r))}else h&&(h.get="miss")}#j(t,e){this.#c[e]=t,this.#l[t]=e}#C(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#j(this.#c[t],this.#l[t]),this.#j(this.#h,t),this.#h=t)}delete(t){return this.#T(t,"delete")}#T(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#N(e);else{this.#v(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#y||this.#a)&&(this.#y&&this.#p?.(n,t,e),this.#a&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#c[s];else if(s===this.#o)this.#o=this.#l[s];else{let h=this.#c[s];this.#l[h]=this.#l[s];let o=this.#l[s];this.#c[o]=this.#c[s]}this.#n--,this.#_.push(s)}}if(this.#a&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#N("delete")}#N(t){for(let e of this.#F({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#y&&this.#p?.(i,s,t),this.#a&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#u&&this.#b&&(this.#u.fill(0),this.#b.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#_.length=0,this.#S=0,this.#n=0,this.#a&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};export{D as LRUCache};
+//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/dist/esm/package.json b/node_modules/@npmcli/agent/node_modules/lru-cache/dist/esm/package.json
similarity index 100%
rename from node_modules/@npmcli/git/node_modules/lru-cache/dist/esm/package.json
rename to node_modules/@npmcli/agent/node_modules/lru-cache/dist/esm/package.json
diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/package.json b/node_modules/@npmcli/agent/node_modules/lru-cache/package.json
similarity index 87%
rename from node_modules/@npmcli/git/node_modules/lru-cache/package.json
rename to node_modules/@npmcli/agent/node_modules/lru-cache/package.json
index 4953bdf4a7a35..f3cd4c0cc53f7 100644
--- a/node_modules/@npmcli/git/node_modules/lru-cache/package.json
+++ b/node_modules/@npmcli/agent/node_modules/lru-cache/package.json
@@ -1,7 +1,10 @@
 {
   "name": "lru-cache",
+  "publishConfig": {
+    "tag": "legacy-v10"
+  },
   "description": "A cache object that deletes the least-recently-used items.",
-  "version": "11.2.1",
+  "version": "10.4.3",
   "author": "Isaac Z. Schlueter ",
   "keywords": [
     "mru",
@@ -49,25 +52,25 @@
     "url": "git://github.com/isaacs/node-lru-cache.git"
   },
   "devDependencies": {
-    "@types/node": "^24.3.0",
+    "@types/node": "^20.2.5",
+    "@types/tap": "^15.0.6",
     "benchmark": "^2.1.4",
-    "esbuild": "^0.25.9",
+    "esbuild": "^0.17.11",
+    "eslint-config-prettier": "^8.5.0",
     "marked": "^4.2.12",
-    "mkdirp": "^3.0.1",
-    "prettier": "^3.6.2",
-    "tap": "^21.1.0",
-    "tshy": "^3.0.2",
-    "typedoc": "^0.28.12"
+    "mkdirp": "^2.1.5",
+    "prettier": "^2.6.2",
+    "tap": "^20.0.3",
+    "tshy": "^2.0.0",
+    "tslib": "^2.4.0",
+    "typedoc": "^0.25.3",
+    "typescript": "^5.2.2"
   },
   "license": "ISC",
   "files": [
     "dist"
   ],
-  "engines": {
-    "node": "20 || >=22"
-  },
   "prettier": {
-    "experimentalTernaries": true,
     "semi": false,
     "printWidth": 70,
     "tabWidth": 2,
diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/dist/commonjs/index.min.js b/node_modules/@npmcli/git/node_modules/lru-cache/dist/commonjs/index.min.js
deleted file mode 100644
index ef5027b91650d..0000000000000
--- a/node_modules/@npmcli/git/node_modules/lru-cache/dist/commonjs/index.min.js
+++ /dev/null
@@ -1,2 +0,0 @@
-"use strict";Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var M=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,x=new Set,R=typeof process=="object"&&process?process:{},U=(a,t,e,i)=>{typeof R.emitWarning=="function"?R.emitWarning(a,t,e,i):console.error(`[${e}] ${t}: ${a}`)},C=globalThis.AbortController,L=globalThis.AbortSignal;if(typeof C>"u"){L=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},C=class{constructor(){t()}signal=new L;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let a=R.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{a&&(a=!1,U("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var G=a=>!x.has(a),H=Symbol("type"),y=a=>a&&a===Math.floor(a)&&a>0&&isFinite(a),I=a=>y(a)?a<=Math.pow(2,8)?Uint8Array:a<=Math.pow(2,16)?Uint16Array:a<=Math.pow(2,32)?Uint32Array:a<=Number.MAX_SAFE_INTEGER?E:null:null,E=class extends Array{constructor(t){super(t),this.fill(0)}},W=class a{heap;length;static#l=!1;static create(t){let e=I(t);if(!e)return[];a.#l=!0;let i=new a(t,e);return a.#l=!1,i}constructor(t,e){if(!a.#l)throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},D=class a{#l;#c;#p;#v;#w;#D;#L;#S;get perf(){return this.#S}ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#_;#s;#i;#t;#a;#u;#o;#h;#m;#r;#b;#y;#d;#A;#z;#f;#x;static unsafeExposeInternals(t){return{starts:t.#y,ttls:t.#d,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#a,prev:t.#u,get head(){return t.#o},get tail(){return t.#h},free:t.#m,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#M(e,i,s,n),moveToTail:e=>t.#W(e),indexes:e=>t.#F(e),rindexes:e=>t.#T(e),isStale:e=>t.#g(e)}}get max(){return this.#l}get maxSize(){return this.#c}get calculatedSize(){return this.#_}get size(){return this.#n}get fetchMethod(){return this.#D}get memoMethod(){return this.#L}get dispose(){return this.#p}get onInsert(){return this.#v}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,onInsert:_,disposeAfter:f,noDisposeOnSet:c,noUpdateTTL:u,maxSize:A=0,maxEntrySize:d=0,sizeCalculation:m,fetchMethod:l,memoMethod:w,noDeleteOnFetchRejection:b,noDeleteOnStaleGet:p,allowStaleOnFetchRejection:S,allowStaleOnFetchAbort:z,ignoreFetchAbort:F,perf:v}=t;if(v!==void 0&&typeof v?.now!="function")throw new TypeError("perf option must have a now() method if specified");if(this.#S=v??M,e!==0&&!y(e))throw new TypeError("max option must be a nonnegative integer");let T=e?I(e):Array;if(!T)throw new Error("invalid max value: "+e);if(this.#l=e,this.#c=A,this.maxEntrySize=d||this.#c,this.sizeCalculation=m,this.sizeCalculation){if(!this.#c&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#L=w,l!==void 0&&typeof l!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#D=l,this.#z=!!l,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#a=new T(e),this.#u=new T(e),this.#o=0,this.#h=0,this.#m=W.create(e),this.#n=0,this.#_=0,typeof g=="function"&&(this.#p=g),typeof _=="function"&&(this.#v=_),typeof f=="function"?(this.#w=f,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#A=!!this.#p,this.#x=!!this.#v,this.#f=!!this.#w,this.noDisposeOnSet=!!c,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!b,this.allowStaleOnFetchRejection=!!S,this.allowStaleOnFetchAbort=!!z,this.ignoreFetchAbort=!!F,this.maxEntrySize!==0){if(this.#c!==0&&!y(this.#c))throw new TypeError("maxSize must be a positive integer if specified");if(!y(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#V()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!p,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=y(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!y(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#G()}if(this.#l===0&&this.ttl===0&&this.#c===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#l&&!this.#c){let O="LRU_CACHE_UNBOUNDED";G(O)&&(x.add(O),U("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",O,a))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#G(){let t=new E(this.#l),e=new E(this.#l);this.#d=t,this.#y=e,this.#j=(n,h,o=this.#S.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#g(n)&&this.#O(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#C=n=>{e[n]=t[n]!==0?this.#S.now():0},this.#E=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=this.#S.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#g=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#C=()=>{};#E=()=>{};#j=()=>{};#g=()=>!1;#V(){let t=new E(this.#l);this.#_=0,this.#b=t,this.#R=e=>{this.#_-=t[e],t[e]=0},this.#N=(e,i,s,n)=>{if(this.#e(i))return 0;if(!y(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!y(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#U=(e,i,s)=>{if(t[e]=i,this.#c){let n=this.#c-t[e];for(;this.#_>n;)this.#I(!0)}this.#_+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#_)}}#R=t=>{};#U=(t,e,i)=>{};#N=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#o));)e=this.#u[e]}*#T({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#h));)e=this.#a[e]}#P(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#T())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#T()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#T())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#T()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#T({allowStale:!0}))this.#g(e)&&(this.#O(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#d&&this.#y){let h=this.#d[e],o=this.#y[e];if(h&&o){let r=h-(this.#S.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#F({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#d&&this.#y){h.ttl=this.#d[e];let o=this.#S.now()-this.#y[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=this.#S.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,_=this.#N(t,e,i.size||0,o);if(this.maxEntrySize&&_>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#O(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#m.length!==0?this.#m.pop():this.#n===this.#l?this.#I(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#a[this.#h]=f,this.#u[f]=this.#h,this.#h=f,this.#n++,this.#U(f,_,r),r&&(r.set="add"),g=!1,this.#x&&this.#v?.(e,t,"add");else{this.#W(f);let c=this.#t[f];if(e!==c){if(this.#z&&this.#e(c)){c.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:u}=c;u!==void 0&&!h&&(this.#A&&this.#p?.(u,t,"set"),this.#f&&this.#r?.push([u,t,"set"]))}else h||(this.#A&&this.#p?.(c,t,"set"),this.#f&&this.#r?.push([c,t,"set"]));if(this.#R(f),this.#U(f,_,r),this.#t[f]=e,r){r.set="replace";let u=c&&this.#e(c)?c.__staleWhileFetching:c;u!==void 0&&(r.oldValue=u)}}else r&&(r.set="update");this.#x&&this.onInsert?.(e,t,e===c?"update":"replace")}if(s!==0&&!this.#d&&this.#G(),this.#d&&(g||this.#j(f,s,n),r&&this.#E(r,f)),!h&&this.#f&&this.#r){let c=this.#r,u;for(;u=c?.shift();)this.#w?.(...u)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#I(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#f&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#I(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#z&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(s,i,"evict"),this.#f&&this.#r?.push([s,i,"evict"])),this.#R(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#m.push(e)),this.#n===1?(this.#o=this.#h=0,this.#m.length=0):this.#o=this.#a[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#g(n))s&&(s.has="stale",this.#E(s,n));else return i&&this.#C(n),s&&(s.has="hit",this.#E(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#g(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#M(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new C,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,m=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!m?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!m)return f(h.signal.reason);let b=u;return this.#t[e]===u&&(d===void 0?b.__staleWhileFetching!==void 0?this.#t[e]=b.__staleWhileFetching:this.#O(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},_=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:m}=h.signal,l=m&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=u;if(this.#t[e]===u&&(!b||p.__staleWhileFetching===void 0?this.#O(t,"fetch"):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},c=(d,m)=>{let l=this.#D?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),m),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let u=new Promise(c).then(g,_),A=Object.assign(u,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,A,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=A,A}#e(t){if(!this.#z)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof C}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:_=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:c=this.allowStaleOnFetchRejection,ignoreFetchAbort:u=this.ignoreFetchAbort,allowStaleOnFetchAbort:A=this.allowStaleOnFetchAbort,context:d,forceRefresh:m=!1,status:l,signal:w}=e;if(!this.#z)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:_,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:c,allowStaleOnFetchAbort:A,ignoreFetchAbort:u,status:l,signal:w},p=this.#s.get(t);if(p===void 0){l&&(l.fetch="miss");let S=this.#M(t,p,b,d);return S.__returned=S}else{let S=this.#t[p];if(this.#e(S)){let O=i&&S.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",O&&(l.returnedStale=!0)),O?S.__staleWhileFetching:S.__returned=S}let z=this.#g(p);if(!m&&!z)return l&&(l.fetch="hit"),this.#W(p),s&&this.#C(p),l&&this.#E(l,p),S;let F=this.#M(t,p,b,d),T=F.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=z?"stale":"refresh",T&&z&&(l.returnedStale=!0)),T?F.__staleWhileFetching:F.__returned=F}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#L;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#E(h,o),this.#g(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#O(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#W(o),s&&this.#C(o),r))}else h&&(h.get="miss")}#H(t,e){this.#u[e]=t,this.#a[t]=e}#W(t){t!==this.#h&&(t===this.#o?this.#o=this.#a[t]:this.#H(this.#u[t],this.#a[t]),this.#H(this.#h,t),this.#h=t)}delete(t){return this.#O(t,"delete")}#O(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#k(e);else{this.#R(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(n,t,e),this.#f&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#u[s];else if(s===this.#o)this.#o=this.#a[s];else{let h=this.#u[s];this.#a[h]=this.#a[s];let o=this.#a[s];this.#u[o]=this.#u[s]}this.#n--,this.#m.push(s)}}if(this.#f&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#k("delete")}#k(t){for(let e of this.#T({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#A&&this.#p?.(i,s,t),this.#f&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#d&&this.#y&&(this.#d.fill(0),this.#y.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#m.length=0,this.#_=0,this.#n=0,this.#f&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};exports.LRUCache=D;
-//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/dist/esm/index.min.js b/node_modules/@npmcli/git/node_modules/lru-cache/dist/esm/index.min.js
deleted file mode 100644
index 07dd8fc3c59d8..0000000000000
--- a/node_modules/@npmcli/git/node_modules/lru-cache/dist/esm/index.min.js
+++ /dev/null
@@ -1,2 +0,0 @@
-var M=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,x=new Set,R=typeof process=="object"&&process?process:{},I=(a,t,e,i)=>{typeof R.emitWarning=="function"?R.emitWarning(a,t,e,i):console.error(`[${e}] ${t}: ${a}`)},C=globalThis.AbortController,D=globalThis.AbortSignal;if(typeof C>"u"){D=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},C=class{constructor(){t()}signal=new D;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let a=R.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{a&&(a=!1,I("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var G=a=>!x.has(a),H=Symbol("type"),y=a=>a&&a===Math.floor(a)&&a>0&&isFinite(a),U=a=>y(a)?a<=Math.pow(2,8)?Uint8Array:a<=Math.pow(2,16)?Uint16Array:a<=Math.pow(2,32)?Uint32Array:a<=Number.MAX_SAFE_INTEGER?O:null:null,O=class extends Array{constructor(t){super(t),this.fill(0)}},W=class a{heap;length;static#l=!1;static create(t){let e=U(t);if(!e)return[];a.#l=!0;let i=new a(t,e);return a.#l=!1,i}constructor(t,e){if(!a.#l)throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},L=class a{#l;#c;#p;#v;#w;#D;#L;#S;get perf(){return this.#S}ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#_;#s;#i;#t;#a;#u;#o;#h;#m;#r;#b;#y;#d;#A;#z;#f;#x;static unsafeExposeInternals(t){return{starts:t.#y,ttls:t.#d,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#a,prev:t.#u,get head(){return t.#o},get tail(){return t.#h},free:t.#m,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#M(e,i,s,n),moveToTail:e=>t.#W(e),indexes:e=>t.#F(e),rindexes:e=>t.#T(e),isStale:e=>t.#g(e)}}get max(){return this.#l}get maxSize(){return this.#c}get calculatedSize(){return this.#_}get size(){return this.#n}get fetchMethod(){return this.#D}get memoMethod(){return this.#L}get dispose(){return this.#p}get onInsert(){return this.#v}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,onInsert:_,disposeAfter:f,noDisposeOnSet:c,noUpdateTTL:u,maxSize:A=0,maxEntrySize:d=0,sizeCalculation:m,fetchMethod:l,memoMethod:w,noDeleteOnFetchRejection:b,noDeleteOnStaleGet:p,allowStaleOnFetchRejection:S,allowStaleOnFetchAbort:z,ignoreFetchAbort:F,perf:v}=t;if(v!==void 0&&typeof v?.now!="function")throw new TypeError("perf option must have a now() method if specified");if(this.#S=v??M,e!==0&&!y(e))throw new TypeError("max option must be a nonnegative integer");let T=e?U(e):Array;if(!T)throw new Error("invalid max value: "+e);if(this.#l=e,this.#c=A,this.maxEntrySize=d||this.#c,this.sizeCalculation=m,this.sizeCalculation){if(!this.#c&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#L=w,l!==void 0&&typeof l!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#D=l,this.#z=!!l,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#a=new T(e),this.#u=new T(e),this.#o=0,this.#h=0,this.#m=W.create(e),this.#n=0,this.#_=0,typeof g=="function"&&(this.#p=g),typeof _=="function"&&(this.#v=_),typeof f=="function"?(this.#w=f,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#A=!!this.#p,this.#x=!!this.#v,this.#f=!!this.#w,this.noDisposeOnSet=!!c,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!b,this.allowStaleOnFetchRejection=!!S,this.allowStaleOnFetchAbort=!!z,this.ignoreFetchAbort=!!F,this.maxEntrySize!==0){if(this.#c!==0&&!y(this.#c))throw new TypeError("maxSize must be a positive integer if specified");if(!y(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#V()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!p,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=y(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!y(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#G()}if(this.#l===0&&this.ttl===0&&this.#c===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#l&&!this.#c){let E="LRU_CACHE_UNBOUNDED";G(E)&&(x.add(E),I("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",E,a))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#G(){let t=new O(this.#l),e=new O(this.#l);this.#d=t,this.#y=e,this.#j=(n,h,o=this.#S.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#g(n)&&this.#E(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#C=n=>{e[n]=t[n]!==0?this.#S.now():0},this.#O=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=this.#S.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#g=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#C=()=>{};#O=()=>{};#j=()=>{};#g=()=>!1;#V(){let t=new O(this.#l);this.#_=0,this.#b=t,this.#R=e=>{this.#_-=t[e],t[e]=0},this.#N=(e,i,s,n)=>{if(this.#e(i))return 0;if(!y(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!y(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#I=(e,i,s)=>{if(t[e]=i,this.#c){let n=this.#c-t[e];for(;this.#_>n;)this.#U(!0)}this.#_+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#_)}}#R=t=>{};#I=(t,e,i)=>{};#N=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#o));)e=this.#u[e]}*#T({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#h));)e=this.#a[e]}#P(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#T())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#T()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#T())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#T()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#T({allowStale:!0}))this.#g(e)&&(this.#E(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#d&&this.#y){let h=this.#d[e],o=this.#y[e];if(h&&o){let r=h-(this.#S.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#F({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#d&&this.#y){h.ttl=this.#d[e];let o=this.#S.now()-this.#y[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=this.#S.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,_=this.#N(t,e,i.size||0,o);if(this.maxEntrySize&&_>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#E(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#m.length!==0?this.#m.pop():this.#n===this.#l?this.#U(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#a[this.#h]=f,this.#u[f]=this.#h,this.#h=f,this.#n++,this.#I(f,_,r),r&&(r.set="add"),g=!1,this.#x&&this.#v?.(e,t,"add");else{this.#W(f);let c=this.#t[f];if(e!==c){if(this.#z&&this.#e(c)){c.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:u}=c;u!==void 0&&!h&&(this.#A&&this.#p?.(u,t,"set"),this.#f&&this.#r?.push([u,t,"set"]))}else h||(this.#A&&this.#p?.(c,t,"set"),this.#f&&this.#r?.push([c,t,"set"]));if(this.#R(f),this.#I(f,_,r),this.#t[f]=e,r){r.set="replace";let u=c&&this.#e(c)?c.__staleWhileFetching:c;u!==void 0&&(r.oldValue=u)}}else r&&(r.set="update");this.#x&&this.onInsert?.(e,t,e===c?"update":"replace")}if(s!==0&&!this.#d&&this.#G(),this.#d&&(g||this.#j(f,s,n),r&&this.#O(r,f)),!h&&this.#f&&this.#r){let c=this.#r,u;for(;u=c?.shift();)this.#w?.(...u)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#U(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#f&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#U(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#z&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(s,i,"evict"),this.#f&&this.#r?.push([s,i,"evict"])),this.#R(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#m.push(e)),this.#n===1?(this.#o=this.#h=0,this.#m.length=0):this.#o=this.#a[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#g(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#C(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#g(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#M(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new C,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,m=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!m?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!m)return f(h.signal.reason);let b=u;return this.#t[e]===u&&(d===void 0?b.__staleWhileFetching!==void 0?this.#t[e]=b.__staleWhileFetching:this.#E(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},_=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:m}=h.signal,l=m&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=u;if(this.#t[e]===u&&(!b||p.__staleWhileFetching===void 0?this.#E(t,"fetch"):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},c=(d,m)=>{let l=this.#D?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),m),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let u=new Promise(c).then(g,_),A=Object.assign(u,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,A,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=A,A}#e(t){if(!this.#z)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof C}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:_=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:c=this.allowStaleOnFetchRejection,ignoreFetchAbort:u=this.ignoreFetchAbort,allowStaleOnFetchAbort:A=this.allowStaleOnFetchAbort,context:d,forceRefresh:m=!1,status:l,signal:w}=e;if(!this.#z)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:_,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:c,allowStaleOnFetchAbort:A,ignoreFetchAbort:u,status:l,signal:w},p=this.#s.get(t);if(p===void 0){l&&(l.fetch="miss");let S=this.#M(t,p,b,d);return S.__returned=S}else{let S=this.#t[p];if(this.#e(S)){let E=i&&S.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",E&&(l.returnedStale=!0)),E?S.__staleWhileFetching:S.__returned=S}let z=this.#g(p);if(!m&&!z)return l&&(l.fetch="hit"),this.#W(p),s&&this.#C(p),l&&this.#O(l,p),S;let F=this.#M(t,p,b,d),T=F.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=z?"stale":"refresh",T&&z&&(l.returnedStale=!0)),T?F.__staleWhileFetching:F.__returned=F}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#L;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#O(h,o),this.#g(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#E(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#W(o),s&&this.#C(o),r))}else h&&(h.get="miss")}#H(t,e){this.#u[e]=t,this.#a[t]=e}#W(t){t!==this.#h&&(t===this.#o?this.#o=this.#a[t]:this.#H(this.#u[t],this.#a[t]),this.#H(this.#h,t),this.#h=t)}delete(t){return this.#E(t,"delete")}#E(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#k(e);else{this.#R(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(n,t,e),this.#f&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#u[s];else if(s===this.#o)this.#o=this.#a[s];else{let h=this.#u[s];this.#a[h]=this.#a[s];let o=this.#a[s];this.#u[o]=this.#u[s]}this.#n--,this.#m.push(s)}}if(this.#f&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#k("delete")}#k(t){for(let e of this.#T({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#A&&this.#p?.(i,s,t),this.#f&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#d&&this.#y&&(this.#d.fill(0),this.#y.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#m.length=0,this.#_=0,this.#n=0,this.#f&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};export{L as LRUCache};
-//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/commonjs/index.min.js b/node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/commonjs/index.min.js
deleted file mode 100644
index ef5027b91650d..0000000000000
--- a/node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/commonjs/index.min.js
+++ /dev/null
@@ -1,2 +0,0 @@
-"use strict";Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var M=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,x=new Set,R=typeof process=="object"&&process?process:{},U=(a,t,e,i)=>{typeof R.emitWarning=="function"?R.emitWarning(a,t,e,i):console.error(`[${e}] ${t}: ${a}`)},C=globalThis.AbortController,L=globalThis.AbortSignal;if(typeof C>"u"){L=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},C=class{constructor(){t()}signal=new L;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let a=R.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{a&&(a=!1,U("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var G=a=>!x.has(a),H=Symbol("type"),y=a=>a&&a===Math.floor(a)&&a>0&&isFinite(a),I=a=>y(a)?a<=Math.pow(2,8)?Uint8Array:a<=Math.pow(2,16)?Uint16Array:a<=Math.pow(2,32)?Uint32Array:a<=Number.MAX_SAFE_INTEGER?E:null:null,E=class extends Array{constructor(t){super(t),this.fill(0)}},W=class a{heap;length;static#l=!1;static create(t){let e=I(t);if(!e)return[];a.#l=!0;let i=new a(t,e);return a.#l=!1,i}constructor(t,e){if(!a.#l)throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},D=class a{#l;#c;#p;#v;#w;#D;#L;#S;get perf(){return this.#S}ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#_;#s;#i;#t;#a;#u;#o;#h;#m;#r;#b;#y;#d;#A;#z;#f;#x;static unsafeExposeInternals(t){return{starts:t.#y,ttls:t.#d,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#a,prev:t.#u,get head(){return t.#o},get tail(){return t.#h},free:t.#m,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#M(e,i,s,n),moveToTail:e=>t.#W(e),indexes:e=>t.#F(e),rindexes:e=>t.#T(e),isStale:e=>t.#g(e)}}get max(){return this.#l}get maxSize(){return this.#c}get calculatedSize(){return this.#_}get size(){return this.#n}get fetchMethod(){return this.#D}get memoMethod(){return this.#L}get dispose(){return this.#p}get onInsert(){return this.#v}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,onInsert:_,disposeAfter:f,noDisposeOnSet:c,noUpdateTTL:u,maxSize:A=0,maxEntrySize:d=0,sizeCalculation:m,fetchMethod:l,memoMethod:w,noDeleteOnFetchRejection:b,noDeleteOnStaleGet:p,allowStaleOnFetchRejection:S,allowStaleOnFetchAbort:z,ignoreFetchAbort:F,perf:v}=t;if(v!==void 0&&typeof v?.now!="function")throw new TypeError("perf option must have a now() method if specified");if(this.#S=v??M,e!==0&&!y(e))throw new TypeError("max option must be a nonnegative integer");let T=e?I(e):Array;if(!T)throw new Error("invalid max value: "+e);if(this.#l=e,this.#c=A,this.maxEntrySize=d||this.#c,this.sizeCalculation=m,this.sizeCalculation){if(!this.#c&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#L=w,l!==void 0&&typeof l!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#D=l,this.#z=!!l,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#a=new T(e),this.#u=new T(e),this.#o=0,this.#h=0,this.#m=W.create(e),this.#n=0,this.#_=0,typeof g=="function"&&(this.#p=g),typeof _=="function"&&(this.#v=_),typeof f=="function"?(this.#w=f,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#A=!!this.#p,this.#x=!!this.#v,this.#f=!!this.#w,this.noDisposeOnSet=!!c,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!b,this.allowStaleOnFetchRejection=!!S,this.allowStaleOnFetchAbort=!!z,this.ignoreFetchAbort=!!F,this.maxEntrySize!==0){if(this.#c!==0&&!y(this.#c))throw new TypeError("maxSize must be a positive integer if specified");if(!y(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#V()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!p,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=y(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!y(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#G()}if(this.#l===0&&this.ttl===0&&this.#c===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#l&&!this.#c){let O="LRU_CACHE_UNBOUNDED";G(O)&&(x.add(O),U("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",O,a))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#G(){let t=new E(this.#l),e=new E(this.#l);this.#d=t,this.#y=e,this.#j=(n,h,o=this.#S.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#g(n)&&this.#O(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#C=n=>{e[n]=t[n]!==0?this.#S.now():0},this.#E=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=this.#S.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#g=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#C=()=>{};#E=()=>{};#j=()=>{};#g=()=>!1;#V(){let t=new E(this.#l);this.#_=0,this.#b=t,this.#R=e=>{this.#_-=t[e],t[e]=0},this.#N=(e,i,s,n)=>{if(this.#e(i))return 0;if(!y(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!y(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#U=(e,i,s)=>{if(t[e]=i,this.#c){let n=this.#c-t[e];for(;this.#_>n;)this.#I(!0)}this.#_+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#_)}}#R=t=>{};#U=(t,e,i)=>{};#N=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#o));)e=this.#u[e]}*#T({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#h));)e=this.#a[e]}#P(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#T())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#T()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#T())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#T()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#T({allowStale:!0}))this.#g(e)&&(this.#O(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#d&&this.#y){let h=this.#d[e],o=this.#y[e];if(h&&o){let r=h-(this.#S.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#F({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#d&&this.#y){h.ttl=this.#d[e];let o=this.#S.now()-this.#y[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=this.#S.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,_=this.#N(t,e,i.size||0,o);if(this.maxEntrySize&&_>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#O(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#m.length!==0?this.#m.pop():this.#n===this.#l?this.#I(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#a[this.#h]=f,this.#u[f]=this.#h,this.#h=f,this.#n++,this.#U(f,_,r),r&&(r.set="add"),g=!1,this.#x&&this.#v?.(e,t,"add");else{this.#W(f);let c=this.#t[f];if(e!==c){if(this.#z&&this.#e(c)){c.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:u}=c;u!==void 0&&!h&&(this.#A&&this.#p?.(u,t,"set"),this.#f&&this.#r?.push([u,t,"set"]))}else h||(this.#A&&this.#p?.(c,t,"set"),this.#f&&this.#r?.push([c,t,"set"]));if(this.#R(f),this.#U(f,_,r),this.#t[f]=e,r){r.set="replace";let u=c&&this.#e(c)?c.__staleWhileFetching:c;u!==void 0&&(r.oldValue=u)}}else r&&(r.set="update");this.#x&&this.onInsert?.(e,t,e===c?"update":"replace")}if(s!==0&&!this.#d&&this.#G(),this.#d&&(g||this.#j(f,s,n),r&&this.#E(r,f)),!h&&this.#f&&this.#r){let c=this.#r,u;for(;u=c?.shift();)this.#w?.(...u)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#I(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#f&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#I(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#z&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(s,i,"evict"),this.#f&&this.#r?.push([s,i,"evict"])),this.#R(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#m.push(e)),this.#n===1?(this.#o=this.#h=0,this.#m.length=0):this.#o=this.#a[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#g(n))s&&(s.has="stale",this.#E(s,n));else return i&&this.#C(n),s&&(s.has="hit",this.#E(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#g(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#M(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new C,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,m=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!m?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!m)return f(h.signal.reason);let b=u;return this.#t[e]===u&&(d===void 0?b.__staleWhileFetching!==void 0?this.#t[e]=b.__staleWhileFetching:this.#O(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},_=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:m}=h.signal,l=m&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=u;if(this.#t[e]===u&&(!b||p.__staleWhileFetching===void 0?this.#O(t,"fetch"):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},c=(d,m)=>{let l=this.#D?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),m),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let u=new Promise(c).then(g,_),A=Object.assign(u,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,A,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=A,A}#e(t){if(!this.#z)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof C}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:_=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:c=this.allowStaleOnFetchRejection,ignoreFetchAbort:u=this.ignoreFetchAbort,allowStaleOnFetchAbort:A=this.allowStaleOnFetchAbort,context:d,forceRefresh:m=!1,status:l,signal:w}=e;if(!this.#z)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:_,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:c,allowStaleOnFetchAbort:A,ignoreFetchAbort:u,status:l,signal:w},p=this.#s.get(t);if(p===void 0){l&&(l.fetch="miss");let S=this.#M(t,p,b,d);return S.__returned=S}else{let S=this.#t[p];if(this.#e(S)){let O=i&&S.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",O&&(l.returnedStale=!0)),O?S.__staleWhileFetching:S.__returned=S}let z=this.#g(p);if(!m&&!z)return l&&(l.fetch="hit"),this.#W(p),s&&this.#C(p),l&&this.#E(l,p),S;let F=this.#M(t,p,b,d),T=F.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=z?"stale":"refresh",T&&z&&(l.returnedStale=!0)),T?F.__staleWhileFetching:F.__returned=F}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#L;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#E(h,o),this.#g(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#O(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#W(o),s&&this.#C(o),r))}else h&&(h.get="miss")}#H(t,e){this.#u[e]=t,this.#a[t]=e}#W(t){t!==this.#h&&(t===this.#o?this.#o=this.#a[t]:this.#H(this.#u[t],this.#a[t]),this.#H(this.#h,t),this.#h=t)}delete(t){return this.#O(t,"delete")}#O(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#k(e);else{this.#R(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(n,t,e),this.#f&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#u[s];else if(s===this.#o)this.#o=this.#a[s];else{let h=this.#u[s];this.#a[h]=this.#a[s];let o=this.#a[s];this.#u[o]=this.#u[s]}this.#n--,this.#m.push(s)}}if(this.#f&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#k("delete")}#k(t){for(let e of this.#T({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#A&&this.#p?.(i,s,t),this.#f&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#d&&this.#y&&(this.#d.fill(0),this.#y.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#m.length=0,this.#_=0,this.#n=0,this.#f&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};exports.LRUCache=D;
-//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/esm/index.js b/node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/esm/index.js
deleted file mode 100644
index 8fd8fc5f31507..0000000000000
--- a/node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/esm/index.js
+++ /dev/null
@@ -1,1560 +0,0 @@
-/**
- * @module LRUCache
- */
-const defaultPerf = (typeof performance === 'object' &&
-    performance &&
-    typeof performance.now === 'function') ?
-    performance
-    : Date;
-const warned = new Set();
-/* c8 ignore start */
-const PROCESS = (typeof process === 'object' && !!process ?
-    process
-    : {});
-/* c8 ignore start */
-const emitWarning = (msg, type, code, fn) => {
-    typeof PROCESS.emitWarning === 'function' ?
-        PROCESS.emitWarning(msg, type, code, fn)
-        : console.error(`[${code}] ${type}: ${msg}`);
-};
-let AC = globalThis.AbortController;
-let AS = globalThis.AbortSignal;
-/* c8 ignore start */
-if (typeof AC === 'undefined') {
-    //@ts-ignore
-    AS = class AbortSignal {
-        onabort;
-        _onabort = [];
-        reason;
-        aborted = false;
-        addEventListener(_, fn) {
-            this._onabort.push(fn);
-        }
-    };
-    //@ts-ignore
-    AC = class AbortController {
-        constructor() {
-            warnACPolyfill();
-        }
-        signal = new AS();
-        abort(reason) {
-            if (this.signal.aborted)
-                return;
-            //@ts-ignore
-            this.signal.reason = reason;
-            //@ts-ignore
-            this.signal.aborted = true;
-            //@ts-ignore
-            for (const fn of this.signal._onabort) {
-                fn(reason);
-            }
-            this.signal.onabort?.(reason);
-        }
-    };
-    let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
-    const warnACPolyfill = () => {
-        if (!printACPolyfillWarning)
-            return;
-        printACPolyfillWarning = false;
-        emitWarning('AbortController is not defined. If using lru-cache in ' +
-            'node 14, load an AbortController polyfill from the ' +
-            '`node-abort-controller` package. A minimal polyfill is ' +
-            'provided for use by LRUCache.fetch(), but it should not be ' +
-            'relied upon in other contexts (eg, passing it to other APIs that ' +
-            'use AbortController/AbortSignal might have undesirable effects). ' +
-            'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
-    };
-}
-/* c8 ignore stop */
-const shouldWarn = (code) => !warned.has(code);
-const TYPE = Symbol('type');
-const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
-/* c8 ignore start */
-// This is a little bit ridiculous, tbh.
-// The maximum array length is 2^32-1 or thereabouts on most JS impls.
-// And well before that point, you're caching the entire world, I mean,
-// that's ~32GB of just integers for the next/prev links, plus whatever
-// else to hold that many keys and values.  Just filling the memory with
-// zeroes at init time is brutal when you get that big.
-// But why not be complete?
-// Maybe in the future, these limits will have expanded.
-const getUintArray = (max) => !isPosInt(max) ? null
-    : max <= Math.pow(2, 8) ? Uint8Array
-        : max <= Math.pow(2, 16) ? Uint16Array
-            : max <= Math.pow(2, 32) ? Uint32Array
-                : max <= Number.MAX_SAFE_INTEGER ? ZeroArray
-                    : null;
-/* c8 ignore stop */
-class ZeroArray extends Array {
-    constructor(size) {
-        super(size);
-        this.fill(0);
-    }
-}
-class Stack {
-    heap;
-    length;
-    // private constructor
-    static #constructing = false;
-    static create(max) {
-        const HeapCls = getUintArray(max);
-        if (!HeapCls)
-            return [];
-        Stack.#constructing = true;
-        const s = new Stack(max, HeapCls);
-        Stack.#constructing = false;
-        return s;
-    }
-    constructor(max, HeapCls) {
-        /* c8 ignore start */
-        if (!Stack.#constructing) {
-            throw new TypeError('instantiate Stack using Stack.create(n)');
-        }
-        /* c8 ignore stop */
-        this.heap = new HeapCls(max);
-        this.length = 0;
-    }
-    push(n) {
-        this.heap[this.length++] = n;
-    }
-    pop() {
-        return this.heap[--this.length];
-    }
-}
-/**
- * Default export, the thing you're using this module to get.
- *
- * The `K` and `V` types define the key and value types, respectively. The
- * optional `FC` type defines the type of the `context` object passed to
- * `cache.fetch()` and `cache.memo()`.
- *
- * Keys and values **must not** be `null` or `undefined`.
- *
- * All properties from the options object (with the exception of `max`,
- * `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are
- * added as normal public members. (The listed options are read-only getters.)
- *
- * Changing any of these will alter the defaults for subsequent method calls.
- */
-export class LRUCache {
-    // options that cannot be changed without disaster
-    #max;
-    #maxSize;
-    #dispose;
-    #onInsert;
-    #disposeAfter;
-    #fetchMethod;
-    #memoMethod;
-    #perf;
-    /**
-     * {@link LRUCache.OptionsBase.perf}
-     */
-    get perf() {
-        return this.#perf;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.ttl}
-     */
-    ttl;
-    /**
-     * {@link LRUCache.OptionsBase.ttlResolution}
-     */
-    ttlResolution;
-    /**
-     * {@link LRUCache.OptionsBase.ttlAutopurge}
-     */
-    ttlAutopurge;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnGet}
-     */
-    updateAgeOnGet;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnHas}
-     */
-    updateAgeOnHas;
-    /**
-     * {@link LRUCache.OptionsBase.allowStale}
-     */
-    allowStale;
-    /**
-     * {@link LRUCache.OptionsBase.noDisposeOnSet}
-     */
-    noDisposeOnSet;
-    /**
-     * {@link LRUCache.OptionsBase.noUpdateTTL}
-     */
-    noUpdateTTL;
-    /**
-     * {@link LRUCache.OptionsBase.maxEntrySize}
-     */
-    maxEntrySize;
-    /**
-     * {@link LRUCache.OptionsBase.sizeCalculation}
-     */
-    sizeCalculation;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
-     */
-    noDeleteOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
-     */
-    noDeleteOnStaleGet;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
-     */
-    allowStaleOnFetchAbort;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
-     */
-    allowStaleOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
-     */
-    ignoreFetchAbort;
-    // computed properties
-    #size;
-    #calculatedSize;
-    #keyMap;
-    #keyList;
-    #valList;
-    #next;
-    #prev;
-    #head;
-    #tail;
-    #free;
-    #disposed;
-    #sizes;
-    #starts;
-    #ttls;
-    #hasDispose;
-    #hasFetchMethod;
-    #hasDisposeAfter;
-    #hasOnInsert;
-    /**
-     * Do not call this method unless you need to inspect the
-     * inner workings of the cache.  If anything returned by this
-     * object is modified in any way, strange breakage may occur.
-     *
-     * These fields are private for a reason!
-     *
-     * @internal
-     */
-    static unsafeExposeInternals(c) {
-        return {
-            // properties
-            starts: c.#starts,
-            ttls: c.#ttls,
-            sizes: c.#sizes,
-            keyMap: c.#keyMap,
-            keyList: c.#keyList,
-            valList: c.#valList,
-            next: c.#next,
-            prev: c.#prev,
-            get head() {
-                return c.#head;
-            },
-            get tail() {
-                return c.#tail;
-            },
-            free: c.#free,
-            // methods
-            isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
-            backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
-            moveToTail: (index) => c.#moveToTail(index),
-            indexes: (options) => c.#indexes(options),
-            rindexes: (options) => c.#rindexes(options),
-            isStale: (index) => c.#isStale(index),
-        };
-    }
-    // Protected read-only members
-    /**
-     * {@link LRUCache.OptionsBase.max} (read-only)
-     */
-    get max() {
-        return this.#max;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.maxSize} (read-only)
-     */
-    get maxSize() {
-        return this.#maxSize;
-    }
-    /**
-     * The total computed size of items in the cache (read-only)
-     */
-    get calculatedSize() {
-        return this.#calculatedSize;
-    }
-    /**
-     * The number of items stored in the cache (read-only)
-     */
-    get size() {
-        return this.#size;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
-     */
-    get fetchMethod() {
-        return this.#fetchMethod;
-    }
-    get memoMethod() {
-        return this.#memoMethod;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.dispose} (read-only)
-     */
-    get dispose() {
-        return this.#dispose;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.onInsert} (read-only)
-     */
-    get onInsert() {
-        return this.#onInsert;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
-     */
-    get disposeAfter() {
-        return this.#disposeAfter;
-    }
-    constructor(options) {
-        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, onInsert, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, perf, } = options;
-        if (perf !== undefined) {
-            if (typeof perf?.now !== 'function') {
-                throw new TypeError('perf option must have a now() method if specified');
-            }
-        }
-        this.#perf = perf ?? defaultPerf;
-        if (max !== 0 && !isPosInt(max)) {
-            throw new TypeError('max option must be a nonnegative integer');
-        }
-        const UintArray = max ? getUintArray(max) : Array;
-        if (!UintArray) {
-            throw new Error('invalid max value: ' + max);
-        }
-        this.#max = max;
-        this.#maxSize = maxSize;
-        this.maxEntrySize = maxEntrySize || this.#maxSize;
-        this.sizeCalculation = sizeCalculation;
-        if (this.sizeCalculation) {
-            if (!this.#maxSize && !this.maxEntrySize) {
-                throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
-            }
-            if (typeof this.sizeCalculation !== 'function') {
-                throw new TypeError('sizeCalculation set to non-function');
-            }
-        }
-        if (memoMethod !== undefined &&
-            typeof memoMethod !== 'function') {
-            throw new TypeError('memoMethod must be a function if defined');
-        }
-        this.#memoMethod = memoMethod;
-        if (fetchMethod !== undefined &&
-            typeof fetchMethod !== 'function') {
-            throw new TypeError('fetchMethod must be a function if specified');
-        }
-        this.#fetchMethod = fetchMethod;
-        this.#hasFetchMethod = !!fetchMethod;
-        this.#keyMap = new Map();
-        this.#keyList = new Array(max).fill(undefined);
-        this.#valList = new Array(max).fill(undefined);
-        this.#next = new UintArray(max);
-        this.#prev = new UintArray(max);
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free = Stack.create(max);
-        this.#size = 0;
-        this.#calculatedSize = 0;
-        if (typeof dispose === 'function') {
-            this.#dispose = dispose;
-        }
-        if (typeof onInsert === 'function') {
-            this.#onInsert = onInsert;
-        }
-        if (typeof disposeAfter === 'function') {
-            this.#disposeAfter = disposeAfter;
-            this.#disposed = [];
-        }
-        else {
-            this.#disposeAfter = undefined;
-            this.#disposed = undefined;
-        }
-        this.#hasDispose = !!this.#dispose;
-        this.#hasOnInsert = !!this.#onInsert;
-        this.#hasDisposeAfter = !!this.#disposeAfter;
-        this.noDisposeOnSet = !!noDisposeOnSet;
-        this.noUpdateTTL = !!noUpdateTTL;
-        this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
-        this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
-        this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
-        this.ignoreFetchAbort = !!ignoreFetchAbort;
-        // NB: maxEntrySize is set to maxSize if it's set
-        if (this.maxEntrySize !== 0) {
-            if (this.#maxSize !== 0) {
-                if (!isPosInt(this.#maxSize)) {
-                    throw new TypeError('maxSize must be a positive integer if specified');
-                }
-            }
-            if (!isPosInt(this.maxEntrySize)) {
-                throw new TypeError('maxEntrySize must be a positive integer if specified');
-            }
-            this.#initializeSizeTracking();
-        }
-        this.allowStale = !!allowStale;
-        this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
-        this.updateAgeOnGet = !!updateAgeOnGet;
-        this.updateAgeOnHas = !!updateAgeOnHas;
-        this.ttlResolution =
-            isPosInt(ttlResolution) || ttlResolution === 0 ?
-                ttlResolution
-                : 1;
-        this.ttlAutopurge = !!ttlAutopurge;
-        this.ttl = ttl || 0;
-        if (this.ttl) {
-            if (!isPosInt(this.ttl)) {
-                throw new TypeError('ttl must be a positive integer if specified');
-            }
-            this.#initializeTTLTracking();
-        }
-        // do not allow completely unbounded caches
-        if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
-            throw new TypeError('At least one of max, maxSize, or ttl is required');
-        }
-        if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
-            const code = 'LRU_CACHE_UNBOUNDED';
-            if (shouldWarn(code)) {
-                warned.add(code);
-                const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
-                    'result in unbounded memory consumption.';
-                emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
-            }
-        }
-    }
-    /**
-     * Return the number of ms left in the item's TTL. If item is not in cache,
-     * returns `0`. Returns `Infinity` if item is in cache without a defined TTL.
-     */
-    getRemainingTTL(key) {
-        return this.#keyMap.has(key) ? Infinity : 0;
-    }
-    #initializeTTLTracking() {
-        const ttls = new ZeroArray(this.#max);
-        const starts = new ZeroArray(this.#max);
-        this.#ttls = ttls;
-        this.#starts = starts;
-        this.#setItemTTL = (index, ttl, start = this.#perf.now()) => {
-            starts[index] = ttl !== 0 ? start : 0;
-            ttls[index] = ttl;
-            if (ttl !== 0 && this.ttlAutopurge) {
-                const t = setTimeout(() => {
-                    if (this.#isStale(index)) {
-                        this.#delete(this.#keyList[index], 'expire');
-                    }
-                }, ttl + 1);
-                // unref() not supported on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-        };
-        this.#updateItemAge = index => {
-            starts[index] = ttls[index] !== 0 ? this.#perf.now() : 0;
-        };
-        this.#statusTTL = (status, index) => {
-            if (ttls[index]) {
-                const ttl = ttls[index];
-                const start = starts[index];
-                /* c8 ignore next */
-                if (!ttl || !start)
-                    return;
-                status.ttl = ttl;
-                status.start = start;
-                status.now = cachedNow || getNow();
-                const age = status.now - start;
-                status.remainingTTL = ttl - age;
-            }
-        };
-        // debounce calls to perf.now() to 1s so we're not hitting
-        // that costly call repeatedly.
-        let cachedNow = 0;
-        const getNow = () => {
-            const n = this.#perf.now();
-            if (this.ttlResolution > 0) {
-                cachedNow = n;
-                const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
-                // not available on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-            return n;
-        };
-        this.getRemainingTTL = key => {
-            const index = this.#keyMap.get(key);
-            if (index === undefined) {
-                return 0;
-            }
-            const ttl = ttls[index];
-            const start = starts[index];
-            if (!ttl || !start) {
-                return Infinity;
-            }
-            const age = (cachedNow || getNow()) - start;
-            return ttl - age;
-        };
-        this.#isStale = index => {
-            const s = starts[index];
-            const t = ttls[index];
-            return !!t && !!s && (cachedNow || getNow()) - s > t;
-        };
-    }
-    // conditionally set private methods related to TTL
-    #updateItemAge = () => { };
-    #statusTTL = () => { };
-    #setItemTTL = () => { };
-    /* c8 ignore stop */
-    #isStale = () => false;
-    #initializeSizeTracking() {
-        const sizes = new ZeroArray(this.#max);
-        this.#calculatedSize = 0;
-        this.#sizes = sizes;
-        this.#removeItemSize = index => {
-            this.#calculatedSize -= sizes[index];
-            sizes[index] = 0;
-        };
-        this.#requireSize = (k, v, size, sizeCalculation) => {
-            // provisionally accept background fetches.
-            // actual value size will be checked when they return.
-            if (this.#isBackgroundFetch(v)) {
-                return 0;
-            }
-            if (!isPosInt(size)) {
-                if (sizeCalculation) {
-                    if (typeof sizeCalculation !== 'function') {
-                        throw new TypeError('sizeCalculation must be a function');
-                    }
-                    size = sizeCalculation(v, k);
-                    if (!isPosInt(size)) {
-                        throw new TypeError('sizeCalculation return invalid (expect positive integer)');
-                    }
-                }
-                else {
-                    throw new TypeError('invalid size value (must be positive integer). ' +
-                        'When maxSize or maxEntrySize is used, sizeCalculation ' +
-                        'or size must be set.');
-                }
-            }
-            return size;
-        };
-        this.#addItemSize = (index, size, status) => {
-            sizes[index] = size;
-            if (this.#maxSize) {
-                const maxSize = this.#maxSize - sizes[index];
-                while (this.#calculatedSize > maxSize) {
-                    this.#evict(true);
-                }
-            }
-            this.#calculatedSize += sizes[index];
-            if (status) {
-                status.entrySize = size;
-                status.totalCalculatedSize = this.#calculatedSize;
-            }
-        };
-    }
-    #removeItemSize = _i => { };
-    #addItemSize = (_i, _s, _st) => { };
-    #requireSize = (_k, _v, size, sizeCalculation) => {
-        if (size || sizeCalculation) {
-            throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
-        }
-        return 0;
-    };
-    *#indexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#tail; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#head) {
-                    break;
-                }
-                else {
-                    i = this.#prev[i];
-                }
-            }
-        }
-    }
-    *#rindexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#head; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#tail) {
-                    break;
-                }
-                else {
-                    i = this.#next[i];
-                }
-            }
-        }
-    }
-    #isValidIndex(index) {
-        return (index !== undefined &&
-            this.#keyMap.get(this.#keyList[index]) === index);
-    }
-    /**
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from most recently used to least recently used.
-     */
-    *entries() {
-        for (const i of this.#indexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.entries}
-     *
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from least recently used to most recently used.
-     */
-    *rentries() {
-        for (const i of this.#rindexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the keys in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *keys() {
-        for (const i of this.#indexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.keys}
-     *
-     * Return a generator yielding the keys in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rkeys() {
-        for (const i of this.#rindexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the values in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *values() {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.values}
-     *
-     * Return a generator yielding the values in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rvalues() {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Iterating over the cache itself yields the same results as
-     * {@link LRUCache.entries}
-     */
-    [Symbol.iterator]() {
-        return this.entries();
-    }
-    /**
-     * A String value that is used in the creation of the default string
-     * description of an object. Called by the built-in method
-     * `Object.prototype.toString`.
-     */
-    [Symbol.toStringTag] = 'LRUCache';
-    /**
-     * Find a value for which the supplied fn method returns a truthy value,
-     * similar to `Array.find()`. fn is called as `fn(value, key, cache)`.
-     */
-    find(fn, getOptions = {}) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-            if (value === undefined)
-                continue;
-            if (fn(value, this.#keyList[i], this)) {
-                return this.get(this.#keyList[i], getOptions);
-            }
-        }
-    }
-    /**
-     * Call the supplied function on each item in the cache, in order from most
-     * recently used to least recently used.
-     *
-     * `fn` is called as `fn(value, key, cache)`.
-     *
-     * If `thisp` is provided, function will be called in the `this`-context of
-     * the provided object, or the cache if no `thisp` object is provided.
-     *
-     * Does not update age or recenty of use, or iterate over stale values.
-     */
-    forEach(fn, thisp = this) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * The same as {@link LRUCache.forEach} but items are iterated over in
-     * reverse order.  (ie, less recently used items are iterated over first.)
-     */
-    rforEach(fn, thisp = this) {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * Delete any stale entries. Returns true if anything was removed,
-     * false otherwise.
-     */
-    purgeStale() {
-        let deleted = false;
-        for (const i of this.#rindexes({ allowStale: true })) {
-            if (this.#isStale(i)) {
-                this.#delete(this.#keyList[i], 'expire');
-                deleted = true;
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Get the extended info about a given entry, to get its value, size, and
-     * TTL info simultaneously. Returns `undefined` if the key is not present.
-     *
-     * Unlike {@link LRUCache#dump}, which is designed to be portable and survive
-     * serialization, the `start` value is always the current timestamp, and the
-     * `ttl` is a calculated remaining time to live (negative if expired).
-     *
-     * Always returns stale values, if their info is found in the cache, so be
-     * sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl})
-     * if relevant.
-     */
-    info(key) {
-        const i = this.#keyMap.get(key);
-        if (i === undefined)
-            return undefined;
-        const v = this.#valList[i];
-        /* c8 ignore start - this isn't tested for the info function,
-         * but it's the same logic as found in other places. */
-        const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-        if (value === undefined)
-            return undefined;
-        /* c8 ignore end */
-        const entry = { value };
-        if (this.#ttls && this.#starts) {
-            const ttl = this.#ttls[i];
-            const start = this.#starts[i];
-            if (ttl && start) {
-                const remain = ttl - (this.#perf.now() - start);
-                entry.ttl = remain;
-                entry.start = Date.now();
-            }
-        }
-        if (this.#sizes) {
-            entry.size = this.#sizes[i];
-        }
-        return entry;
-    }
-    /**
-     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
-     * passed to {@link LRUCache#load}.
-     *
-     * The `start` fields are calculated relative to a portable `Date.now()`
-     * timestamp, even if `performance.now()` is available.
-     *
-     * Stale entries are always included in the `dump`, even if
-     * {@link LRUCache.OptionsBase.allowStale} is false.
-     *
-     * Note: this returns an actual array, not a generator, so it can be more
-     * easily passed around.
-     */
-    dump() {
-        const arr = [];
-        for (const i of this.#indexes({ allowStale: true })) {
-            const key = this.#keyList[i];
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-            if (value === undefined || key === undefined)
-                continue;
-            const entry = { value };
-            if (this.#ttls && this.#starts) {
-                entry.ttl = this.#ttls[i];
-                // always dump the start relative to a portable timestamp
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = this.#perf.now() - this.#starts[i];
-                entry.start = Math.floor(Date.now() - age);
-            }
-            if (this.#sizes) {
-                entry.size = this.#sizes[i];
-            }
-            arr.unshift([key, entry]);
-        }
-        return arr;
-    }
-    /**
-     * Reset the cache and load in the items in entries in the order listed.
-     *
-     * The shape of the resulting cache may be different if the same options are
-     * not used in both caches.
-     *
-     * The `start` fields are assumed to be calculated relative to a portable
-     * `Date.now()` timestamp, even if `performance.now()` is available.
-     */
-    load(arr) {
-        this.clear();
-        for (const [key, entry] of arr) {
-            if (entry.start) {
-                // entry.start is a portable timestamp, but we may be using
-                // node's performance.now(), so calculate the offset, so that
-                // we get the intended remaining TTL, no matter how long it's
-                // been on ice.
-                //
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = Date.now() - entry.start;
-                entry.start = this.#perf.now() - age;
-            }
-            this.set(key, entry.value, entry);
-        }
-    }
-    /**
-     * Add a value to the cache.
-     *
-     * Note: if `undefined` is specified as a value, this is an alias for
-     * {@link LRUCache#delete}
-     *
-     * Fields on the {@link LRUCache.SetOptions} options param will override
-     * their corresponding values in the constructor options for the scope
-     * of this single `set()` operation.
-     *
-     * If `start` is provided, then that will set the effective start
-     * time for the TTL calculation. Note that this must be a previous
-     * value of `performance.now()` if supported, or a previous value of
-     * `Date.now()` if not.
-     *
-     * Options object may also include `size`, which will prevent
-     * calling the `sizeCalculation` function and just use the specified
-     * number if it is a positive integer, and `noDisposeOnSet` which
-     * will prevent calling a `dispose` function in the case of
-     * overwrites.
-     *
-     * If the `size` (or return value of `sizeCalculation`) for a given
-     * entry is greater than `maxEntrySize`, then the item will not be
-     * added to the cache.
-     *
-     * Will update the recency of the entry.
-     *
-     * If the value is `undefined`, then this is an alias for
-     * `cache.delete(key)`. `undefined` is never stored in the cache.
-     */
-    set(k, v, setOptions = {}) {
-        if (v === undefined) {
-            this.delete(k);
-            return this;
-        }
-        const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
-        let { noUpdateTTL = this.noUpdateTTL } = setOptions;
-        const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
-        // if the item doesn't fit, don't do anything
-        // NB: maxEntrySize set to maxSize by default
-        if (this.maxEntrySize && size > this.maxEntrySize) {
-            if (status) {
-                status.set = 'miss';
-                status.maxEntrySizeExceeded = true;
-            }
-            // have to delete, in case something is there already.
-            this.#delete(k, 'set');
-            return this;
-        }
-        let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
-        if (index === undefined) {
-            // addition
-            index = (this.#size === 0 ? this.#tail
-                : this.#free.length !== 0 ? this.#free.pop()
-                    : this.#size === this.#max ? this.#evict(false)
-                        : this.#size);
-            this.#keyList[index] = k;
-            this.#valList[index] = v;
-            this.#keyMap.set(k, index);
-            this.#next[this.#tail] = index;
-            this.#prev[index] = this.#tail;
-            this.#tail = index;
-            this.#size++;
-            this.#addItemSize(index, size, status);
-            if (status)
-                status.set = 'add';
-            noUpdateTTL = false;
-            if (this.#hasOnInsert) {
-                this.#onInsert?.(v, k, 'add');
-            }
-        }
-        else {
-            // update
-            this.#moveToTail(index);
-            const oldVal = this.#valList[index];
-            if (v !== oldVal) {
-                if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
-                    oldVal.__abortController.abort(new Error('replaced'));
-                    const { __staleWhileFetching: s } = oldVal;
-                    if (s !== undefined && !noDisposeOnSet) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(s, k, 'set');
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([s, k, 'set']);
-                        }
-                    }
-                }
-                else if (!noDisposeOnSet) {
-                    if (this.#hasDispose) {
-                        this.#dispose?.(oldVal, k, 'set');
-                    }
-                    if (this.#hasDisposeAfter) {
-                        this.#disposed?.push([oldVal, k, 'set']);
-                    }
-                }
-                this.#removeItemSize(index);
-                this.#addItemSize(index, size, status);
-                this.#valList[index] = v;
-                if (status) {
-                    status.set = 'replace';
-                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal) ?
-                        oldVal.__staleWhileFetching
-                        : oldVal;
-                    if (oldValue !== undefined)
-                        status.oldValue = oldValue;
-                }
-            }
-            else if (status) {
-                status.set = 'update';
-            }
-            if (this.#hasOnInsert) {
-                this.onInsert?.(v, k, v === oldVal ? 'update' : 'replace');
-            }
-        }
-        if (ttl !== 0 && !this.#ttls) {
-            this.#initializeTTLTracking();
-        }
-        if (this.#ttls) {
-            if (!noUpdateTTL) {
-                this.#setItemTTL(index, ttl, start);
-            }
-            if (status)
-                this.#statusTTL(status, index);
-        }
-        if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return this;
-    }
-    /**
-     * Evict the least recently used item, returning its value or
-     * `undefined` if cache is empty.
-     */
-    pop() {
-        try {
-            while (this.#size) {
-                const val = this.#valList[this.#head];
-                this.#evict(true);
-                if (this.#isBackgroundFetch(val)) {
-                    if (val.__staleWhileFetching) {
-                        return val.__staleWhileFetching;
-                    }
-                }
-                else if (val !== undefined) {
-                    return val;
-                }
-            }
-        }
-        finally {
-            if (this.#hasDisposeAfter && this.#disposed) {
-                const dt = this.#disposed;
-                let task;
-                while ((task = dt?.shift())) {
-                    this.#disposeAfter?.(...task);
-                }
-            }
-        }
-    }
-    #evict(free) {
-        const head = this.#head;
-        const k = this.#keyList[head];
-        const v = this.#valList[head];
-        if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
-            v.__abortController.abort(new Error('evicted'));
-        }
-        else if (this.#hasDispose || this.#hasDisposeAfter) {
-            if (this.#hasDispose) {
-                this.#dispose?.(v, k, 'evict');
-            }
-            if (this.#hasDisposeAfter) {
-                this.#disposed?.push([v, k, 'evict']);
-            }
-        }
-        this.#removeItemSize(head);
-        // if we aren't about to use the index, then null these out
-        if (free) {
-            this.#keyList[head] = undefined;
-            this.#valList[head] = undefined;
-            this.#free.push(head);
-        }
-        if (this.#size === 1) {
-            this.#head = this.#tail = 0;
-            this.#free.length = 0;
-        }
-        else {
-            this.#head = this.#next[head];
-        }
-        this.#keyMap.delete(k);
-        this.#size--;
-        return head;
-    }
-    /**
-     * Check if a key is in the cache, without updating the recency of use.
-     * Will return false if the item is stale, even though it is technically
-     * in the cache.
-     *
-     * Check if a key is in the cache, without updating the recency of
-     * use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set
-     * to `true` in either the options or the constructor.
-     *
-     * Will return `false` if the item is stale, even though it is technically in
-     * the cache. The difference can be determined (if it matters) by using a
-     * `status` argument, and inspecting the `has` field.
-     *
-     * Will not update item age unless
-     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
-     */
-    has(k, hasOptions = {}) {
-        const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v) &&
-                v.__staleWhileFetching === undefined) {
-                return false;
-            }
-            if (!this.#isStale(index)) {
-                if (updateAgeOnHas) {
-                    this.#updateItemAge(index);
-                }
-                if (status) {
-                    status.has = 'hit';
-                    this.#statusTTL(status, index);
-                }
-                return true;
-            }
-            else if (status) {
-                status.has = 'stale';
-                this.#statusTTL(status, index);
-            }
-        }
-        else if (status) {
-            status.has = 'miss';
-        }
-        return false;
-    }
-    /**
-     * Like {@link LRUCache#get} but doesn't update recency or delete stale
-     * items.
-     *
-     * Returns `undefined` if the item is stale, unless
-     * {@link LRUCache.OptionsBase.allowStale} is set.
-     */
-    peek(k, peekOptions = {}) {
-        const { allowStale = this.allowStale } = peekOptions;
-        const index = this.#keyMap.get(k);
-        if (index === undefined ||
-            (!allowStale && this.#isStale(index))) {
-            return;
-        }
-        const v = this.#valList[index];
-        // either stale and allowed, or forcing a refresh of non-stale value
-        return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-    }
-    #backgroundFetch(k, index, options, context) {
-        const v = index === undefined ? undefined : this.#valList[index];
-        if (this.#isBackgroundFetch(v)) {
-            return v;
-        }
-        const ac = new AC();
-        const { signal } = options;
-        // when/if our AC signals, then stop listening to theirs.
-        signal?.addEventListener('abort', () => ac.abort(signal.reason), {
-            signal: ac.signal,
-        });
-        const fetchOpts = {
-            signal: ac.signal,
-            options,
-            context,
-        };
-        const cb = (v, updateCache = false) => {
-            const { aborted } = ac.signal;
-            const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
-            if (options.status) {
-                if (aborted && !updateCache) {
-                    options.status.fetchAborted = true;
-                    options.status.fetchError = ac.signal.reason;
-                    if (ignoreAbort)
-                        options.status.fetchAbortIgnored = true;
-                }
-                else {
-                    options.status.fetchResolved = true;
-                }
-            }
-            if (aborted && !ignoreAbort && !updateCache) {
-                return fetchFail(ac.signal.reason);
-            }
-            // either we didn't abort, and are still here, or we did, and ignored
-            const bf = p;
-            if (this.#valList[index] === p) {
-                if (v === undefined) {
-                    if (bf.__staleWhileFetching !== undefined) {
-                        this.#valList[index] = bf.__staleWhileFetching;
-                    }
-                    else {
-                        this.#delete(k, 'fetch');
-                    }
-                }
-                else {
-                    if (options.status)
-                        options.status.fetchUpdated = true;
-                    this.set(k, v, fetchOpts.options);
-                }
-            }
-            return v;
-        };
-        const eb = (er) => {
-            if (options.status) {
-                options.status.fetchRejected = true;
-                options.status.fetchError = er;
-            }
-            return fetchFail(er);
-        };
-        const fetchFail = (er) => {
-            const { aborted } = ac.signal;
-            const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
-            const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
-            const noDelete = allowStale || options.noDeleteOnFetchRejection;
-            const bf = p;
-            if (this.#valList[index] === p) {
-                // if we allow stale on fetch rejections, then we need to ensure that
-                // the stale value is not removed from the cache when the fetch fails.
-                const del = !noDelete || bf.__staleWhileFetching === undefined;
-                if (del) {
-                    this.#delete(k, 'fetch');
-                }
-                else if (!allowStaleAborted) {
-                    // still replace the *promise* with the stale value,
-                    // since we are done with the promise at this point.
-                    // leave it untouched if we're still waiting for an
-                    // aborted background fetch that hasn't yet returned.
-                    this.#valList[index] = bf.__staleWhileFetching;
-                }
-            }
-            if (allowStale) {
-                if (options.status && bf.__staleWhileFetching !== undefined) {
-                    options.status.returnedStale = true;
-                }
-                return bf.__staleWhileFetching;
-            }
-            else if (bf.__returned === bf) {
-                throw er;
-            }
-        };
-        const pcall = (res, rej) => {
-            const fmp = this.#fetchMethod?.(k, v, fetchOpts);
-            if (fmp && fmp instanceof Promise) {
-                fmp.then(v => res(v === undefined ? undefined : v), rej);
-            }
-            // ignored, we go until we finish, regardless.
-            // defer check until we are actually aborting,
-            // so fetchMethod can override.
-            ac.signal.addEventListener('abort', () => {
-                if (!options.ignoreFetchAbort ||
-                    options.allowStaleOnFetchAbort) {
-                    res(undefined);
-                    // when it eventually resolves, update the cache.
-                    if (options.allowStaleOnFetchAbort) {
-                        res = v => cb(v, true);
-                    }
-                }
-            });
-        };
-        if (options.status)
-            options.status.fetchDispatched = true;
-        const p = new Promise(pcall).then(cb, eb);
-        const bf = Object.assign(p, {
-            __abortController: ac,
-            __staleWhileFetching: v,
-            __returned: undefined,
-        });
-        if (index === undefined) {
-            // internal, don't expose status.
-            this.set(k, bf, { ...fetchOpts.options, status: undefined });
-            index = this.#keyMap.get(k);
-        }
-        else {
-            this.#valList[index] = bf;
-        }
-        return bf;
-    }
-    #isBackgroundFetch(p) {
-        if (!this.#hasFetchMethod)
-            return false;
-        const b = p;
-        return (!!b &&
-            b instanceof Promise &&
-            b.hasOwnProperty('__staleWhileFetching') &&
-            b.__abortController instanceof AC);
-    }
-    async fetch(k, fetchOptions = {}) {
-        const { 
-        // get options
-        allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, 
-        // set options
-        ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, 
-        // fetch exclusive options
-        noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
-        if (!this.#hasFetchMethod) {
-            if (status)
-                status.fetch = 'get';
-            return this.get(k, {
-                allowStale,
-                updateAgeOnGet,
-                noDeleteOnStaleGet,
-                status,
-            });
-        }
-        const options = {
-            allowStale,
-            updateAgeOnGet,
-            noDeleteOnStaleGet,
-            ttl,
-            noDisposeOnSet,
-            size,
-            sizeCalculation,
-            noUpdateTTL,
-            noDeleteOnFetchRejection,
-            allowStaleOnFetchRejection,
-            allowStaleOnFetchAbort,
-            ignoreFetchAbort,
-            status,
-            signal,
-        };
-        let index = this.#keyMap.get(k);
-        if (index === undefined) {
-            if (status)
-                status.fetch = 'miss';
-            const p = this.#backgroundFetch(k, index, options, context);
-            return (p.__returned = p);
-        }
-        else {
-            // in cache, maybe already fetching
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                const stale = allowStale && v.__staleWhileFetching !== undefined;
-                if (status) {
-                    status.fetch = 'inflight';
-                    if (stale)
-                        status.returnedStale = true;
-                }
-                return stale ? v.__staleWhileFetching : (v.__returned = v);
-            }
-            // if we force a refresh, that means do NOT serve the cached value,
-            // unless we are already in the process of refreshing the cache.
-            const isStale = this.#isStale(index);
-            if (!forceRefresh && !isStale) {
-                if (status)
-                    status.fetch = 'hit';
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                if (status)
-                    this.#statusTTL(status, index);
-                return v;
-            }
-            // ok, it is stale or a forced refresh, and not already fetching.
-            // refresh the cache.
-            const p = this.#backgroundFetch(k, index, options, context);
-            const hasStale = p.__staleWhileFetching !== undefined;
-            const staleVal = hasStale && allowStale;
-            if (status) {
-                status.fetch = isStale ? 'stale' : 'refresh';
-                if (staleVal && isStale)
-                    status.returnedStale = true;
-            }
-            return staleVal ? p.__staleWhileFetching : (p.__returned = p);
-        }
-    }
-    async forceFetch(k, fetchOptions = {}) {
-        const v = await this.fetch(k, fetchOptions);
-        if (v === undefined)
-            throw new Error('fetch() returned undefined');
-        return v;
-    }
-    memo(k, memoOptions = {}) {
-        const memoMethod = this.#memoMethod;
-        if (!memoMethod) {
-            throw new Error('no memoMethod provided to constructor');
-        }
-        const { context, forceRefresh, ...options } = memoOptions;
-        const v = this.get(k, options);
-        if (!forceRefresh && v !== undefined)
-            return v;
-        const vv = memoMethod(k, v, {
-            options,
-            context,
-        });
-        this.set(k, vv, options);
-        return vv;
-    }
-    /**
-     * Return a value from the cache. Will update the recency of the cache
-     * entry found.
-     *
-     * If the key is not found, get() will return `undefined`.
-     */
-    get(k, getOptions = {}) {
-        const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const value = this.#valList[index];
-            const fetching = this.#isBackgroundFetch(value);
-            if (status)
-                this.#statusTTL(status, index);
-            if (this.#isStale(index)) {
-                if (status)
-                    status.get = 'stale';
-                // delete only if not an in-flight background fetch
-                if (!fetching) {
-                    if (!noDeleteOnStaleGet) {
-                        this.#delete(k, 'expire');
-                    }
-                    if (status && allowStale)
-                        status.returnedStale = true;
-                    return allowStale ? value : undefined;
-                }
-                else {
-                    if (status &&
-                        allowStale &&
-                        value.__staleWhileFetching !== undefined) {
-                        status.returnedStale = true;
-                    }
-                    return allowStale ? value.__staleWhileFetching : undefined;
-                }
-            }
-            else {
-                if (status)
-                    status.get = 'hit';
-                // if we're currently fetching it, we don't actually have it yet
-                // it's not stale, which means this isn't a staleWhileRefetching.
-                // If it's not stale, and fetching, AND has a __staleWhileFetching
-                // value, then that means the user fetched with {forceRefresh:true},
-                // so it's safe to return that value.
-                if (fetching) {
-                    return value.__staleWhileFetching;
-                }
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                return value;
-            }
-        }
-        else if (status) {
-            status.get = 'miss';
-        }
-    }
-    #connect(p, n) {
-        this.#prev[n] = p;
-        this.#next[p] = n;
-    }
-    #moveToTail(index) {
-        // if tail already, nothing to do
-        // if head, move head to next[index]
-        // else
-        //   move next[prev[index]] to next[index] (head has no prev)
-        //   move prev[next[index]] to prev[index]
-        // prev[index] = tail
-        // next[tail] = index
-        // tail = index
-        if (index !== this.#tail) {
-            if (index === this.#head) {
-                this.#head = this.#next[index];
-            }
-            else {
-                this.#connect(this.#prev[index], this.#next[index]);
-            }
-            this.#connect(this.#tail, index);
-            this.#tail = index;
-        }
-    }
-    /**
-     * Deletes a key out of the cache.
-     *
-     * Returns true if the key was deleted, false otherwise.
-     */
-    delete(k) {
-        return this.#delete(k, 'delete');
-    }
-    #delete(k, reason) {
-        let deleted = false;
-        if (this.#size !== 0) {
-            const index = this.#keyMap.get(k);
-            if (index !== undefined) {
-                deleted = true;
-                if (this.#size === 1) {
-                    this.#clear(reason);
-                }
-                else {
-                    this.#removeItemSize(index);
-                    const v = this.#valList[index];
-                    if (this.#isBackgroundFetch(v)) {
-                        v.__abortController.abort(new Error('deleted'));
-                    }
-                    else if (this.#hasDispose || this.#hasDisposeAfter) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(v, k, reason);
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([v, k, reason]);
-                        }
-                    }
-                    this.#keyMap.delete(k);
-                    this.#keyList[index] = undefined;
-                    this.#valList[index] = undefined;
-                    if (index === this.#tail) {
-                        this.#tail = this.#prev[index];
-                    }
-                    else if (index === this.#head) {
-                        this.#head = this.#next[index];
-                    }
-                    else {
-                        const pi = this.#prev[index];
-                        this.#next[pi] = this.#next[index];
-                        const ni = this.#next[index];
-                        this.#prev[ni] = this.#prev[index];
-                    }
-                    this.#size--;
-                    this.#free.push(index);
-                }
-            }
-        }
-        if (this.#hasDisposeAfter && this.#disposed?.length) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Clear the cache entirely, throwing away all values.
-     */
-    clear() {
-        return this.#clear('delete');
-    }
-    #clear(reason) {
-        for (const index of this.#rindexes({ allowStale: true })) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                v.__abortController.abort(new Error('deleted'));
-            }
-            else {
-                const k = this.#keyList[index];
-                if (this.#hasDispose) {
-                    this.#dispose?.(v, k, reason);
-                }
-                if (this.#hasDisposeAfter) {
-                    this.#disposed?.push([v, k, reason]);
-                }
-            }
-        }
-        this.#keyMap.clear();
-        this.#valList.fill(undefined);
-        this.#keyList.fill(undefined);
-        if (this.#ttls && this.#starts) {
-            this.#ttls.fill(0);
-            this.#starts.fill(0);
-        }
-        if (this.#sizes) {
-            this.#sizes.fill(0);
-        }
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free.length = 0;
-        this.#calculatedSize = 0;
-        this.#size = 0;
-        if (this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-    }
-}
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/esm/index.min.js b/node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/esm/index.min.js
deleted file mode 100644
index 07dd8fc3c59d8..0000000000000
--- a/node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/esm/index.min.js
+++ /dev/null
@@ -1,2 +0,0 @@
-var M=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,x=new Set,R=typeof process=="object"&&process?process:{},I=(a,t,e,i)=>{typeof R.emitWarning=="function"?R.emitWarning(a,t,e,i):console.error(`[${e}] ${t}: ${a}`)},C=globalThis.AbortController,D=globalThis.AbortSignal;if(typeof C>"u"){D=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},C=class{constructor(){t()}signal=new D;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let a=R.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{a&&(a=!1,I("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var G=a=>!x.has(a),H=Symbol("type"),y=a=>a&&a===Math.floor(a)&&a>0&&isFinite(a),U=a=>y(a)?a<=Math.pow(2,8)?Uint8Array:a<=Math.pow(2,16)?Uint16Array:a<=Math.pow(2,32)?Uint32Array:a<=Number.MAX_SAFE_INTEGER?O:null:null,O=class extends Array{constructor(t){super(t),this.fill(0)}},W=class a{heap;length;static#l=!1;static create(t){let e=U(t);if(!e)return[];a.#l=!0;let i=new a(t,e);return a.#l=!1,i}constructor(t,e){if(!a.#l)throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},L=class a{#l;#c;#p;#v;#w;#D;#L;#S;get perf(){return this.#S}ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#_;#s;#i;#t;#a;#u;#o;#h;#m;#r;#b;#y;#d;#A;#z;#f;#x;static unsafeExposeInternals(t){return{starts:t.#y,ttls:t.#d,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#a,prev:t.#u,get head(){return t.#o},get tail(){return t.#h},free:t.#m,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#M(e,i,s,n),moveToTail:e=>t.#W(e),indexes:e=>t.#F(e),rindexes:e=>t.#T(e),isStale:e=>t.#g(e)}}get max(){return this.#l}get maxSize(){return this.#c}get calculatedSize(){return this.#_}get size(){return this.#n}get fetchMethod(){return this.#D}get memoMethod(){return this.#L}get dispose(){return this.#p}get onInsert(){return this.#v}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,onInsert:_,disposeAfter:f,noDisposeOnSet:c,noUpdateTTL:u,maxSize:A=0,maxEntrySize:d=0,sizeCalculation:m,fetchMethod:l,memoMethod:w,noDeleteOnFetchRejection:b,noDeleteOnStaleGet:p,allowStaleOnFetchRejection:S,allowStaleOnFetchAbort:z,ignoreFetchAbort:F,perf:v}=t;if(v!==void 0&&typeof v?.now!="function")throw new TypeError("perf option must have a now() method if specified");if(this.#S=v??M,e!==0&&!y(e))throw new TypeError("max option must be a nonnegative integer");let T=e?U(e):Array;if(!T)throw new Error("invalid max value: "+e);if(this.#l=e,this.#c=A,this.maxEntrySize=d||this.#c,this.sizeCalculation=m,this.sizeCalculation){if(!this.#c&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#L=w,l!==void 0&&typeof l!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#D=l,this.#z=!!l,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#a=new T(e),this.#u=new T(e),this.#o=0,this.#h=0,this.#m=W.create(e),this.#n=0,this.#_=0,typeof g=="function"&&(this.#p=g),typeof _=="function"&&(this.#v=_),typeof f=="function"?(this.#w=f,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#A=!!this.#p,this.#x=!!this.#v,this.#f=!!this.#w,this.noDisposeOnSet=!!c,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!b,this.allowStaleOnFetchRejection=!!S,this.allowStaleOnFetchAbort=!!z,this.ignoreFetchAbort=!!F,this.maxEntrySize!==0){if(this.#c!==0&&!y(this.#c))throw new TypeError("maxSize must be a positive integer if specified");if(!y(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#V()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!p,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=y(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!y(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#G()}if(this.#l===0&&this.ttl===0&&this.#c===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#l&&!this.#c){let E="LRU_CACHE_UNBOUNDED";G(E)&&(x.add(E),I("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",E,a))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#G(){let t=new O(this.#l),e=new O(this.#l);this.#d=t,this.#y=e,this.#j=(n,h,o=this.#S.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#g(n)&&this.#E(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#C=n=>{e[n]=t[n]!==0?this.#S.now():0},this.#O=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=this.#S.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#g=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#C=()=>{};#O=()=>{};#j=()=>{};#g=()=>!1;#V(){let t=new O(this.#l);this.#_=0,this.#b=t,this.#R=e=>{this.#_-=t[e],t[e]=0},this.#N=(e,i,s,n)=>{if(this.#e(i))return 0;if(!y(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!y(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#I=(e,i,s)=>{if(t[e]=i,this.#c){let n=this.#c-t[e];for(;this.#_>n;)this.#U(!0)}this.#_+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#_)}}#R=t=>{};#I=(t,e,i)=>{};#N=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#o));)e=this.#u[e]}*#T({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#h));)e=this.#a[e]}#P(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#T())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#T()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#T())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#T()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#T({allowStale:!0}))this.#g(e)&&(this.#E(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#d&&this.#y){let h=this.#d[e],o=this.#y[e];if(h&&o){let r=h-(this.#S.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#F({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#d&&this.#y){h.ttl=this.#d[e];let o=this.#S.now()-this.#y[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=this.#S.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,_=this.#N(t,e,i.size||0,o);if(this.maxEntrySize&&_>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#E(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#m.length!==0?this.#m.pop():this.#n===this.#l?this.#U(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#a[this.#h]=f,this.#u[f]=this.#h,this.#h=f,this.#n++,this.#I(f,_,r),r&&(r.set="add"),g=!1,this.#x&&this.#v?.(e,t,"add");else{this.#W(f);let c=this.#t[f];if(e!==c){if(this.#z&&this.#e(c)){c.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:u}=c;u!==void 0&&!h&&(this.#A&&this.#p?.(u,t,"set"),this.#f&&this.#r?.push([u,t,"set"]))}else h||(this.#A&&this.#p?.(c,t,"set"),this.#f&&this.#r?.push([c,t,"set"]));if(this.#R(f),this.#I(f,_,r),this.#t[f]=e,r){r.set="replace";let u=c&&this.#e(c)?c.__staleWhileFetching:c;u!==void 0&&(r.oldValue=u)}}else r&&(r.set="update");this.#x&&this.onInsert?.(e,t,e===c?"update":"replace")}if(s!==0&&!this.#d&&this.#G(),this.#d&&(g||this.#j(f,s,n),r&&this.#O(r,f)),!h&&this.#f&&this.#r){let c=this.#r,u;for(;u=c?.shift();)this.#w?.(...u)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#U(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#f&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#U(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#z&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(s,i,"evict"),this.#f&&this.#r?.push([s,i,"evict"])),this.#R(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#m.push(e)),this.#n===1?(this.#o=this.#h=0,this.#m.length=0):this.#o=this.#a[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#g(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#C(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#g(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#M(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new C,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,m=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!m?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!m)return f(h.signal.reason);let b=u;return this.#t[e]===u&&(d===void 0?b.__staleWhileFetching!==void 0?this.#t[e]=b.__staleWhileFetching:this.#E(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},_=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:m}=h.signal,l=m&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=u;if(this.#t[e]===u&&(!b||p.__staleWhileFetching===void 0?this.#E(t,"fetch"):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},c=(d,m)=>{let l=this.#D?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),m),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let u=new Promise(c).then(g,_),A=Object.assign(u,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,A,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=A,A}#e(t){if(!this.#z)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof C}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:_=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:c=this.allowStaleOnFetchRejection,ignoreFetchAbort:u=this.ignoreFetchAbort,allowStaleOnFetchAbort:A=this.allowStaleOnFetchAbort,context:d,forceRefresh:m=!1,status:l,signal:w}=e;if(!this.#z)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:_,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:c,allowStaleOnFetchAbort:A,ignoreFetchAbort:u,status:l,signal:w},p=this.#s.get(t);if(p===void 0){l&&(l.fetch="miss");let S=this.#M(t,p,b,d);return S.__returned=S}else{let S=this.#t[p];if(this.#e(S)){let E=i&&S.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",E&&(l.returnedStale=!0)),E?S.__staleWhileFetching:S.__returned=S}let z=this.#g(p);if(!m&&!z)return l&&(l.fetch="hit"),this.#W(p),s&&this.#C(p),l&&this.#O(l,p),S;let F=this.#M(t,p,b,d),T=F.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=z?"stale":"refresh",T&&z&&(l.returnedStale=!0)),T?F.__staleWhileFetching:F.__returned=F}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#L;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#O(h,o),this.#g(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#E(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#W(o),s&&this.#C(o),r))}else h&&(h.get="miss")}#H(t,e){this.#u[e]=t,this.#a[t]=e}#W(t){t!==this.#h&&(t===this.#o?this.#o=this.#a[t]:this.#H(this.#u[t],this.#a[t]),this.#H(this.#h,t),this.#h=t)}delete(t){return this.#E(t,"delete")}#E(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#k(e);else{this.#R(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(n,t,e),this.#f&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#u[s];else if(s===this.#o)this.#o=this.#a[s];else{let h=this.#u[s];this.#a[h]=this.#a[s];let o=this.#a[s];this.#u[o]=this.#u[s]}this.#n--,this.#m.push(s)}}if(this.#f&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#k("delete")}#k(t){for(let e of this.#T({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#A&&this.#p?.(i,s,t),this.#f&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#d&&this.#y&&(this.#d.fill(0),this.#y.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#m.length=0,this.#_=0,this.#n=0,this.#f&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};export{L as LRUCache};
-//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/commonjs/index.min.js b/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/commonjs/index.min.js
deleted file mode 100644
index ef5027b91650d..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/commonjs/index.min.js
+++ /dev/null
@@ -1,2 +0,0 @@
-"use strict";Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var M=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,x=new Set,R=typeof process=="object"&&process?process:{},U=(a,t,e,i)=>{typeof R.emitWarning=="function"?R.emitWarning(a,t,e,i):console.error(`[${e}] ${t}: ${a}`)},C=globalThis.AbortController,L=globalThis.AbortSignal;if(typeof C>"u"){L=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},C=class{constructor(){t()}signal=new L;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let a=R.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{a&&(a=!1,U("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var G=a=>!x.has(a),H=Symbol("type"),y=a=>a&&a===Math.floor(a)&&a>0&&isFinite(a),I=a=>y(a)?a<=Math.pow(2,8)?Uint8Array:a<=Math.pow(2,16)?Uint16Array:a<=Math.pow(2,32)?Uint32Array:a<=Number.MAX_SAFE_INTEGER?E:null:null,E=class extends Array{constructor(t){super(t),this.fill(0)}},W=class a{heap;length;static#l=!1;static create(t){let e=I(t);if(!e)return[];a.#l=!0;let i=new a(t,e);return a.#l=!1,i}constructor(t,e){if(!a.#l)throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},D=class a{#l;#c;#p;#v;#w;#D;#L;#S;get perf(){return this.#S}ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#_;#s;#i;#t;#a;#u;#o;#h;#m;#r;#b;#y;#d;#A;#z;#f;#x;static unsafeExposeInternals(t){return{starts:t.#y,ttls:t.#d,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#a,prev:t.#u,get head(){return t.#o},get tail(){return t.#h},free:t.#m,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#M(e,i,s,n),moveToTail:e=>t.#W(e),indexes:e=>t.#F(e),rindexes:e=>t.#T(e),isStale:e=>t.#g(e)}}get max(){return this.#l}get maxSize(){return this.#c}get calculatedSize(){return this.#_}get size(){return this.#n}get fetchMethod(){return this.#D}get memoMethod(){return this.#L}get dispose(){return this.#p}get onInsert(){return this.#v}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,onInsert:_,disposeAfter:f,noDisposeOnSet:c,noUpdateTTL:u,maxSize:A=0,maxEntrySize:d=0,sizeCalculation:m,fetchMethod:l,memoMethod:w,noDeleteOnFetchRejection:b,noDeleteOnStaleGet:p,allowStaleOnFetchRejection:S,allowStaleOnFetchAbort:z,ignoreFetchAbort:F,perf:v}=t;if(v!==void 0&&typeof v?.now!="function")throw new TypeError("perf option must have a now() method if specified");if(this.#S=v??M,e!==0&&!y(e))throw new TypeError("max option must be a nonnegative integer");let T=e?I(e):Array;if(!T)throw new Error("invalid max value: "+e);if(this.#l=e,this.#c=A,this.maxEntrySize=d||this.#c,this.sizeCalculation=m,this.sizeCalculation){if(!this.#c&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#L=w,l!==void 0&&typeof l!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#D=l,this.#z=!!l,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#a=new T(e),this.#u=new T(e),this.#o=0,this.#h=0,this.#m=W.create(e),this.#n=0,this.#_=0,typeof g=="function"&&(this.#p=g),typeof _=="function"&&(this.#v=_),typeof f=="function"?(this.#w=f,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#A=!!this.#p,this.#x=!!this.#v,this.#f=!!this.#w,this.noDisposeOnSet=!!c,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!b,this.allowStaleOnFetchRejection=!!S,this.allowStaleOnFetchAbort=!!z,this.ignoreFetchAbort=!!F,this.maxEntrySize!==0){if(this.#c!==0&&!y(this.#c))throw new TypeError("maxSize must be a positive integer if specified");if(!y(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#V()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!p,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=y(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!y(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#G()}if(this.#l===0&&this.ttl===0&&this.#c===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#l&&!this.#c){let O="LRU_CACHE_UNBOUNDED";G(O)&&(x.add(O),U("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",O,a))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#G(){let t=new E(this.#l),e=new E(this.#l);this.#d=t,this.#y=e,this.#j=(n,h,o=this.#S.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#g(n)&&this.#O(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#C=n=>{e[n]=t[n]!==0?this.#S.now():0},this.#E=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=this.#S.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#g=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#C=()=>{};#E=()=>{};#j=()=>{};#g=()=>!1;#V(){let t=new E(this.#l);this.#_=0,this.#b=t,this.#R=e=>{this.#_-=t[e],t[e]=0},this.#N=(e,i,s,n)=>{if(this.#e(i))return 0;if(!y(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!y(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#U=(e,i,s)=>{if(t[e]=i,this.#c){let n=this.#c-t[e];for(;this.#_>n;)this.#I(!0)}this.#_+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#_)}}#R=t=>{};#U=(t,e,i)=>{};#N=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#o));)e=this.#u[e]}*#T({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#h));)e=this.#a[e]}#P(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#T())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#T()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#T())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#T()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#T({allowStale:!0}))this.#g(e)&&(this.#O(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#d&&this.#y){let h=this.#d[e],o=this.#y[e];if(h&&o){let r=h-(this.#S.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#F({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#d&&this.#y){h.ttl=this.#d[e];let o=this.#S.now()-this.#y[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=this.#S.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,_=this.#N(t,e,i.size||0,o);if(this.maxEntrySize&&_>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#O(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#m.length!==0?this.#m.pop():this.#n===this.#l?this.#I(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#a[this.#h]=f,this.#u[f]=this.#h,this.#h=f,this.#n++,this.#U(f,_,r),r&&(r.set="add"),g=!1,this.#x&&this.#v?.(e,t,"add");else{this.#W(f);let c=this.#t[f];if(e!==c){if(this.#z&&this.#e(c)){c.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:u}=c;u!==void 0&&!h&&(this.#A&&this.#p?.(u,t,"set"),this.#f&&this.#r?.push([u,t,"set"]))}else h||(this.#A&&this.#p?.(c,t,"set"),this.#f&&this.#r?.push([c,t,"set"]));if(this.#R(f),this.#U(f,_,r),this.#t[f]=e,r){r.set="replace";let u=c&&this.#e(c)?c.__staleWhileFetching:c;u!==void 0&&(r.oldValue=u)}}else r&&(r.set="update");this.#x&&this.onInsert?.(e,t,e===c?"update":"replace")}if(s!==0&&!this.#d&&this.#G(),this.#d&&(g||this.#j(f,s,n),r&&this.#E(r,f)),!h&&this.#f&&this.#r){let c=this.#r,u;for(;u=c?.shift();)this.#w?.(...u)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#I(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#f&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#I(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#z&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(s,i,"evict"),this.#f&&this.#r?.push([s,i,"evict"])),this.#R(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#m.push(e)),this.#n===1?(this.#o=this.#h=0,this.#m.length=0):this.#o=this.#a[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#g(n))s&&(s.has="stale",this.#E(s,n));else return i&&this.#C(n),s&&(s.has="hit",this.#E(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#g(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#M(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new C,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,m=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!m?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!m)return f(h.signal.reason);let b=u;return this.#t[e]===u&&(d===void 0?b.__staleWhileFetching!==void 0?this.#t[e]=b.__staleWhileFetching:this.#O(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},_=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:m}=h.signal,l=m&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=u;if(this.#t[e]===u&&(!b||p.__staleWhileFetching===void 0?this.#O(t,"fetch"):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},c=(d,m)=>{let l=this.#D?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),m),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let u=new Promise(c).then(g,_),A=Object.assign(u,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,A,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=A,A}#e(t){if(!this.#z)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof C}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:_=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:c=this.allowStaleOnFetchRejection,ignoreFetchAbort:u=this.ignoreFetchAbort,allowStaleOnFetchAbort:A=this.allowStaleOnFetchAbort,context:d,forceRefresh:m=!1,status:l,signal:w}=e;if(!this.#z)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:_,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:c,allowStaleOnFetchAbort:A,ignoreFetchAbort:u,status:l,signal:w},p=this.#s.get(t);if(p===void 0){l&&(l.fetch="miss");let S=this.#M(t,p,b,d);return S.__returned=S}else{let S=this.#t[p];if(this.#e(S)){let O=i&&S.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",O&&(l.returnedStale=!0)),O?S.__staleWhileFetching:S.__returned=S}let z=this.#g(p);if(!m&&!z)return l&&(l.fetch="hit"),this.#W(p),s&&this.#C(p),l&&this.#E(l,p),S;let F=this.#M(t,p,b,d),T=F.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=z?"stale":"refresh",T&&z&&(l.returnedStale=!0)),T?F.__staleWhileFetching:F.__returned=F}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#L;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#E(h,o),this.#g(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#O(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#W(o),s&&this.#C(o),r))}else h&&(h.get="miss")}#H(t,e){this.#u[e]=t,this.#a[t]=e}#W(t){t!==this.#h&&(t===this.#o?this.#o=this.#a[t]:this.#H(this.#u[t],this.#a[t]),this.#H(this.#h,t),this.#h=t)}delete(t){return this.#O(t,"delete")}#O(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#k(e);else{this.#R(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(n,t,e),this.#f&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#u[s];else if(s===this.#o)this.#o=this.#a[s];else{let h=this.#u[s];this.#a[h]=this.#a[s];let o=this.#a[s];this.#u[o]=this.#u[s]}this.#n--,this.#m.push(s)}}if(this.#f&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#k("delete")}#k(t){for(let e of this.#T({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#A&&this.#p?.(i,s,t),this.#f&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#d&&this.#y&&(this.#d.fill(0),this.#y.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#m.length=0,this.#_=0,this.#n=0,this.#f&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};exports.LRUCache=D;
-//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/esm/index.min.js b/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/esm/index.min.js
deleted file mode 100644
index 07dd8fc3c59d8..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/esm/index.min.js
+++ /dev/null
@@ -1,2 +0,0 @@
-var M=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,x=new Set,R=typeof process=="object"&&process?process:{},I=(a,t,e,i)=>{typeof R.emitWarning=="function"?R.emitWarning(a,t,e,i):console.error(`[${e}] ${t}: ${a}`)},C=globalThis.AbortController,D=globalThis.AbortSignal;if(typeof C>"u"){D=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},C=class{constructor(){t()}signal=new D;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let a=R.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{a&&(a=!1,I("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var G=a=>!x.has(a),H=Symbol("type"),y=a=>a&&a===Math.floor(a)&&a>0&&isFinite(a),U=a=>y(a)?a<=Math.pow(2,8)?Uint8Array:a<=Math.pow(2,16)?Uint16Array:a<=Math.pow(2,32)?Uint32Array:a<=Number.MAX_SAFE_INTEGER?O:null:null,O=class extends Array{constructor(t){super(t),this.fill(0)}},W=class a{heap;length;static#l=!1;static create(t){let e=U(t);if(!e)return[];a.#l=!0;let i=new a(t,e);return a.#l=!1,i}constructor(t,e){if(!a.#l)throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},L=class a{#l;#c;#p;#v;#w;#D;#L;#S;get perf(){return this.#S}ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#_;#s;#i;#t;#a;#u;#o;#h;#m;#r;#b;#y;#d;#A;#z;#f;#x;static unsafeExposeInternals(t){return{starts:t.#y,ttls:t.#d,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#a,prev:t.#u,get head(){return t.#o},get tail(){return t.#h},free:t.#m,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#M(e,i,s,n),moveToTail:e=>t.#W(e),indexes:e=>t.#F(e),rindexes:e=>t.#T(e),isStale:e=>t.#g(e)}}get max(){return this.#l}get maxSize(){return this.#c}get calculatedSize(){return this.#_}get size(){return this.#n}get fetchMethod(){return this.#D}get memoMethod(){return this.#L}get dispose(){return this.#p}get onInsert(){return this.#v}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,onInsert:_,disposeAfter:f,noDisposeOnSet:c,noUpdateTTL:u,maxSize:A=0,maxEntrySize:d=0,sizeCalculation:m,fetchMethod:l,memoMethod:w,noDeleteOnFetchRejection:b,noDeleteOnStaleGet:p,allowStaleOnFetchRejection:S,allowStaleOnFetchAbort:z,ignoreFetchAbort:F,perf:v}=t;if(v!==void 0&&typeof v?.now!="function")throw new TypeError("perf option must have a now() method if specified");if(this.#S=v??M,e!==0&&!y(e))throw new TypeError("max option must be a nonnegative integer");let T=e?U(e):Array;if(!T)throw new Error("invalid max value: "+e);if(this.#l=e,this.#c=A,this.maxEntrySize=d||this.#c,this.sizeCalculation=m,this.sizeCalculation){if(!this.#c&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#L=w,l!==void 0&&typeof l!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#D=l,this.#z=!!l,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#a=new T(e),this.#u=new T(e),this.#o=0,this.#h=0,this.#m=W.create(e),this.#n=0,this.#_=0,typeof g=="function"&&(this.#p=g),typeof _=="function"&&(this.#v=_),typeof f=="function"?(this.#w=f,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#A=!!this.#p,this.#x=!!this.#v,this.#f=!!this.#w,this.noDisposeOnSet=!!c,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!b,this.allowStaleOnFetchRejection=!!S,this.allowStaleOnFetchAbort=!!z,this.ignoreFetchAbort=!!F,this.maxEntrySize!==0){if(this.#c!==0&&!y(this.#c))throw new TypeError("maxSize must be a positive integer if specified");if(!y(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#V()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!p,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=y(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!y(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#G()}if(this.#l===0&&this.ttl===0&&this.#c===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#l&&!this.#c){let E="LRU_CACHE_UNBOUNDED";G(E)&&(x.add(E),I("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",E,a))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#G(){let t=new O(this.#l),e=new O(this.#l);this.#d=t,this.#y=e,this.#j=(n,h,o=this.#S.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#g(n)&&this.#E(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#C=n=>{e[n]=t[n]!==0?this.#S.now():0},this.#O=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=this.#S.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#g=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#C=()=>{};#O=()=>{};#j=()=>{};#g=()=>!1;#V(){let t=new O(this.#l);this.#_=0,this.#b=t,this.#R=e=>{this.#_-=t[e],t[e]=0},this.#N=(e,i,s,n)=>{if(this.#e(i))return 0;if(!y(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!y(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#I=(e,i,s)=>{if(t[e]=i,this.#c){let n=this.#c-t[e];for(;this.#_>n;)this.#U(!0)}this.#_+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#_)}}#R=t=>{};#I=(t,e,i)=>{};#N=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#o));)e=this.#u[e]}*#T({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#h));)e=this.#a[e]}#P(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#T())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#T()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#T())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#T()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#T({allowStale:!0}))this.#g(e)&&(this.#E(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#d&&this.#y){let h=this.#d[e],o=this.#y[e];if(h&&o){let r=h-(this.#S.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#F({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#d&&this.#y){h.ttl=this.#d[e];let o=this.#S.now()-this.#y[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=this.#S.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,_=this.#N(t,e,i.size||0,o);if(this.maxEntrySize&&_>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#E(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#m.length!==0?this.#m.pop():this.#n===this.#l?this.#U(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#a[this.#h]=f,this.#u[f]=this.#h,this.#h=f,this.#n++,this.#I(f,_,r),r&&(r.set="add"),g=!1,this.#x&&this.#v?.(e,t,"add");else{this.#W(f);let c=this.#t[f];if(e!==c){if(this.#z&&this.#e(c)){c.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:u}=c;u!==void 0&&!h&&(this.#A&&this.#p?.(u,t,"set"),this.#f&&this.#r?.push([u,t,"set"]))}else h||(this.#A&&this.#p?.(c,t,"set"),this.#f&&this.#r?.push([c,t,"set"]));if(this.#R(f),this.#I(f,_,r),this.#t[f]=e,r){r.set="replace";let u=c&&this.#e(c)?c.__staleWhileFetching:c;u!==void 0&&(r.oldValue=u)}}else r&&(r.set="update");this.#x&&this.onInsert?.(e,t,e===c?"update":"replace")}if(s!==0&&!this.#d&&this.#G(),this.#d&&(g||this.#j(f,s,n),r&&this.#O(r,f)),!h&&this.#f&&this.#r){let c=this.#r,u;for(;u=c?.shift();)this.#w?.(...u)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#U(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#f&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#U(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#z&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(s,i,"evict"),this.#f&&this.#r?.push([s,i,"evict"])),this.#R(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#m.push(e)),this.#n===1?(this.#o=this.#h=0,this.#m.length=0):this.#o=this.#a[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#g(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#C(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#g(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#M(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new C,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,m=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!m?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!m)return f(h.signal.reason);let b=u;return this.#t[e]===u&&(d===void 0?b.__staleWhileFetching!==void 0?this.#t[e]=b.__staleWhileFetching:this.#E(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},_=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:m}=h.signal,l=m&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=u;if(this.#t[e]===u&&(!b||p.__staleWhileFetching===void 0?this.#E(t,"fetch"):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},c=(d,m)=>{let l=this.#D?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),m),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let u=new Promise(c).then(g,_),A=Object.assign(u,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,A,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=A,A}#e(t){if(!this.#z)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof C}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:_=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:c=this.allowStaleOnFetchRejection,ignoreFetchAbort:u=this.ignoreFetchAbort,allowStaleOnFetchAbort:A=this.allowStaleOnFetchAbort,context:d,forceRefresh:m=!1,status:l,signal:w}=e;if(!this.#z)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:_,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:c,allowStaleOnFetchAbort:A,ignoreFetchAbort:u,status:l,signal:w},p=this.#s.get(t);if(p===void 0){l&&(l.fetch="miss");let S=this.#M(t,p,b,d);return S.__returned=S}else{let S=this.#t[p];if(this.#e(S)){let E=i&&S.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",E&&(l.returnedStale=!0)),E?S.__staleWhileFetching:S.__returned=S}let z=this.#g(p);if(!m&&!z)return l&&(l.fetch="hit"),this.#W(p),s&&this.#C(p),l&&this.#O(l,p),S;let F=this.#M(t,p,b,d),T=F.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=z?"stale":"refresh",T&&z&&(l.returnedStale=!0)),T?F.__staleWhileFetching:F.__returned=F}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#L;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#O(h,o),this.#g(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#E(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#W(o),s&&this.#C(o),r))}else h&&(h.get="miss")}#H(t,e){this.#u[e]=t,this.#a[t]=e}#W(t){t!==this.#h&&(t===this.#o?this.#o=this.#a[t]:this.#H(this.#u[t],this.#a[t]),this.#H(this.#h,t),this.#h=t)}delete(t){return this.#E(t,"delete")}#E(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#k(e);else{this.#R(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(n,t,e),this.#f&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#u[s];else if(s===this.#o)this.#o=this.#a[s];else{let h=this.#u[s];this.#a[h]=this.#a[s];let o=this.#a[s];this.#u[o]=this.#u[s]}this.#n--,this.#m.push(s)}}if(this.#f&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#k("delete")}#k(t){for(let e of this.#T({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#A&&this.#p?.(i,s,t),this.#f&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#d&&this.#y&&(this.#d.fill(0),this.#y.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#m.length=0,this.#_=0,this.#n=0,this.#f&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};export{L as LRUCache};
-//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/cacache/node_modules/lru-cache/LICENSE b/node_modules/cacache/node_modules/lru-cache/LICENSE
deleted file mode 100644
index f785757cd63f8..0000000000000
--- a/node_modules/cacache/node_modules/lru-cache/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/cacache/node_modules/lru-cache/dist/commonjs/index.js b/node_modules/cacache/node_modules/lru-cache/dist/commonjs/index.js
deleted file mode 100644
index 921b8f10f71b1..0000000000000
--- a/node_modules/cacache/node_modules/lru-cache/dist/commonjs/index.js
+++ /dev/null
@@ -1,1564 +0,0 @@
-"use strict";
-/**
- * @module LRUCache
- */
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.LRUCache = void 0;
-const defaultPerf = (typeof performance === 'object' &&
-    performance &&
-    typeof performance.now === 'function') ?
-    performance
-    : Date;
-const warned = new Set();
-/* c8 ignore start */
-const PROCESS = (typeof process === 'object' && !!process ?
-    process
-    : {});
-/* c8 ignore start */
-const emitWarning = (msg, type, code, fn) => {
-    typeof PROCESS.emitWarning === 'function' ?
-        PROCESS.emitWarning(msg, type, code, fn)
-        : console.error(`[${code}] ${type}: ${msg}`);
-};
-let AC = globalThis.AbortController;
-let AS = globalThis.AbortSignal;
-/* c8 ignore start */
-if (typeof AC === 'undefined') {
-    //@ts-ignore
-    AS = class AbortSignal {
-        onabort;
-        _onabort = [];
-        reason;
-        aborted = false;
-        addEventListener(_, fn) {
-            this._onabort.push(fn);
-        }
-    };
-    //@ts-ignore
-    AC = class AbortController {
-        constructor() {
-            warnACPolyfill();
-        }
-        signal = new AS();
-        abort(reason) {
-            if (this.signal.aborted)
-                return;
-            //@ts-ignore
-            this.signal.reason = reason;
-            //@ts-ignore
-            this.signal.aborted = true;
-            //@ts-ignore
-            for (const fn of this.signal._onabort) {
-                fn(reason);
-            }
-            this.signal.onabort?.(reason);
-        }
-    };
-    let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
-    const warnACPolyfill = () => {
-        if (!printACPolyfillWarning)
-            return;
-        printACPolyfillWarning = false;
-        emitWarning('AbortController is not defined. If using lru-cache in ' +
-            'node 14, load an AbortController polyfill from the ' +
-            '`node-abort-controller` package. A minimal polyfill is ' +
-            'provided for use by LRUCache.fetch(), but it should not be ' +
-            'relied upon in other contexts (eg, passing it to other APIs that ' +
-            'use AbortController/AbortSignal might have undesirable effects). ' +
-            'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
-    };
-}
-/* c8 ignore stop */
-const shouldWarn = (code) => !warned.has(code);
-const TYPE = Symbol('type');
-const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
-/* c8 ignore start */
-// This is a little bit ridiculous, tbh.
-// The maximum array length is 2^32-1 or thereabouts on most JS impls.
-// And well before that point, you're caching the entire world, I mean,
-// that's ~32GB of just integers for the next/prev links, plus whatever
-// else to hold that many keys and values.  Just filling the memory with
-// zeroes at init time is brutal when you get that big.
-// But why not be complete?
-// Maybe in the future, these limits will have expanded.
-const getUintArray = (max) => !isPosInt(max) ? null
-    : max <= Math.pow(2, 8) ? Uint8Array
-        : max <= Math.pow(2, 16) ? Uint16Array
-            : max <= Math.pow(2, 32) ? Uint32Array
-                : max <= Number.MAX_SAFE_INTEGER ? ZeroArray
-                    : null;
-/* c8 ignore stop */
-class ZeroArray extends Array {
-    constructor(size) {
-        super(size);
-        this.fill(0);
-    }
-}
-class Stack {
-    heap;
-    length;
-    // private constructor
-    static #constructing = false;
-    static create(max) {
-        const HeapCls = getUintArray(max);
-        if (!HeapCls)
-            return [];
-        Stack.#constructing = true;
-        const s = new Stack(max, HeapCls);
-        Stack.#constructing = false;
-        return s;
-    }
-    constructor(max, HeapCls) {
-        /* c8 ignore start */
-        if (!Stack.#constructing) {
-            throw new TypeError('instantiate Stack using Stack.create(n)');
-        }
-        /* c8 ignore stop */
-        this.heap = new HeapCls(max);
-        this.length = 0;
-    }
-    push(n) {
-        this.heap[this.length++] = n;
-    }
-    pop() {
-        return this.heap[--this.length];
-    }
-}
-/**
- * Default export, the thing you're using this module to get.
- *
- * The `K` and `V` types define the key and value types, respectively. The
- * optional `FC` type defines the type of the `context` object passed to
- * `cache.fetch()` and `cache.memo()`.
- *
- * Keys and values **must not** be `null` or `undefined`.
- *
- * All properties from the options object (with the exception of `max`,
- * `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are
- * added as normal public members. (The listed options are read-only getters.)
- *
- * Changing any of these will alter the defaults for subsequent method calls.
- */
-class LRUCache {
-    // options that cannot be changed without disaster
-    #max;
-    #maxSize;
-    #dispose;
-    #onInsert;
-    #disposeAfter;
-    #fetchMethod;
-    #memoMethod;
-    #perf;
-    /**
-     * {@link LRUCache.OptionsBase.perf}
-     */
-    get perf() {
-        return this.#perf;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.ttl}
-     */
-    ttl;
-    /**
-     * {@link LRUCache.OptionsBase.ttlResolution}
-     */
-    ttlResolution;
-    /**
-     * {@link LRUCache.OptionsBase.ttlAutopurge}
-     */
-    ttlAutopurge;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnGet}
-     */
-    updateAgeOnGet;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnHas}
-     */
-    updateAgeOnHas;
-    /**
-     * {@link LRUCache.OptionsBase.allowStale}
-     */
-    allowStale;
-    /**
-     * {@link LRUCache.OptionsBase.noDisposeOnSet}
-     */
-    noDisposeOnSet;
-    /**
-     * {@link LRUCache.OptionsBase.noUpdateTTL}
-     */
-    noUpdateTTL;
-    /**
-     * {@link LRUCache.OptionsBase.maxEntrySize}
-     */
-    maxEntrySize;
-    /**
-     * {@link LRUCache.OptionsBase.sizeCalculation}
-     */
-    sizeCalculation;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
-     */
-    noDeleteOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
-     */
-    noDeleteOnStaleGet;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
-     */
-    allowStaleOnFetchAbort;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
-     */
-    allowStaleOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
-     */
-    ignoreFetchAbort;
-    // computed properties
-    #size;
-    #calculatedSize;
-    #keyMap;
-    #keyList;
-    #valList;
-    #next;
-    #prev;
-    #head;
-    #tail;
-    #free;
-    #disposed;
-    #sizes;
-    #starts;
-    #ttls;
-    #hasDispose;
-    #hasFetchMethod;
-    #hasDisposeAfter;
-    #hasOnInsert;
-    /**
-     * Do not call this method unless you need to inspect the
-     * inner workings of the cache.  If anything returned by this
-     * object is modified in any way, strange breakage may occur.
-     *
-     * These fields are private for a reason!
-     *
-     * @internal
-     */
-    static unsafeExposeInternals(c) {
-        return {
-            // properties
-            starts: c.#starts,
-            ttls: c.#ttls,
-            sizes: c.#sizes,
-            keyMap: c.#keyMap,
-            keyList: c.#keyList,
-            valList: c.#valList,
-            next: c.#next,
-            prev: c.#prev,
-            get head() {
-                return c.#head;
-            },
-            get tail() {
-                return c.#tail;
-            },
-            free: c.#free,
-            // methods
-            isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
-            backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
-            moveToTail: (index) => c.#moveToTail(index),
-            indexes: (options) => c.#indexes(options),
-            rindexes: (options) => c.#rindexes(options),
-            isStale: (index) => c.#isStale(index),
-        };
-    }
-    // Protected read-only members
-    /**
-     * {@link LRUCache.OptionsBase.max} (read-only)
-     */
-    get max() {
-        return this.#max;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.maxSize} (read-only)
-     */
-    get maxSize() {
-        return this.#maxSize;
-    }
-    /**
-     * The total computed size of items in the cache (read-only)
-     */
-    get calculatedSize() {
-        return this.#calculatedSize;
-    }
-    /**
-     * The number of items stored in the cache (read-only)
-     */
-    get size() {
-        return this.#size;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
-     */
-    get fetchMethod() {
-        return this.#fetchMethod;
-    }
-    get memoMethod() {
-        return this.#memoMethod;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.dispose} (read-only)
-     */
-    get dispose() {
-        return this.#dispose;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.onInsert} (read-only)
-     */
-    get onInsert() {
-        return this.#onInsert;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
-     */
-    get disposeAfter() {
-        return this.#disposeAfter;
-    }
-    constructor(options) {
-        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, onInsert, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, perf, } = options;
-        if (perf !== undefined) {
-            if (typeof perf?.now !== 'function') {
-                throw new TypeError('perf option must have a now() method if specified');
-            }
-        }
-        this.#perf = perf ?? defaultPerf;
-        if (max !== 0 && !isPosInt(max)) {
-            throw new TypeError('max option must be a nonnegative integer');
-        }
-        const UintArray = max ? getUintArray(max) : Array;
-        if (!UintArray) {
-            throw new Error('invalid max value: ' + max);
-        }
-        this.#max = max;
-        this.#maxSize = maxSize;
-        this.maxEntrySize = maxEntrySize || this.#maxSize;
-        this.sizeCalculation = sizeCalculation;
-        if (this.sizeCalculation) {
-            if (!this.#maxSize && !this.maxEntrySize) {
-                throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
-            }
-            if (typeof this.sizeCalculation !== 'function') {
-                throw new TypeError('sizeCalculation set to non-function');
-            }
-        }
-        if (memoMethod !== undefined &&
-            typeof memoMethod !== 'function') {
-            throw new TypeError('memoMethod must be a function if defined');
-        }
-        this.#memoMethod = memoMethod;
-        if (fetchMethod !== undefined &&
-            typeof fetchMethod !== 'function') {
-            throw new TypeError('fetchMethod must be a function if specified');
-        }
-        this.#fetchMethod = fetchMethod;
-        this.#hasFetchMethod = !!fetchMethod;
-        this.#keyMap = new Map();
-        this.#keyList = new Array(max).fill(undefined);
-        this.#valList = new Array(max).fill(undefined);
-        this.#next = new UintArray(max);
-        this.#prev = new UintArray(max);
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free = Stack.create(max);
-        this.#size = 0;
-        this.#calculatedSize = 0;
-        if (typeof dispose === 'function') {
-            this.#dispose = dispose;
-        }
-        if (typeof onInsert === 'function') {
-            this.#onInsert = onInsert;
-        }
-        if (typeof disposeAfter === 'function') {
-            this.#disposeAfter = disposeAfter;
-            this.#disposed = [];
-        }
-        else {
-            this.#disposeAfter = undefined;
-            this.#disposed = undefined;
-        }
-        this.#hasDispose = !!this.#dispose;
-        this.#hasOnInsert = !!this.#onInsert;
-        this.#hasDisposeAfter = !!this.#disposeAfter;
-        this.noDisposeOnSet = !!noDisposeOnSet;
-        this.noUpdateTTL = !!noUpdateTTL;
-        this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
-        this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
-        this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
-        this.ignoreFetchAbort = !!ignoreFetchAbort;
-        // NB: maxEntrySize is set to maxSize if it's set
-        if (this.maxEntrySize !== 0) {
-            if (this.#maxSize !== 0) {
-                if (!isPosInt(this.#maxSize)) {
-                    throw new TypeError('maxSize must be a positive integer if specified');
-                }
-            }
-            if (!isPosInt(this.maxEntrySize)) {
-                throw new TypeError('maxEntrySize must be a positive integer if specified');
-            }
-            this.#initializeSizeTracking();
-        }
-        this.allowStale = !!allowStale;
-        this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
-        this.updateAgeOnGet = !!updateAgeOnGet;
-        this.updateAgeOnHas = !!updateAgeOnHas;
-        this.ttlResolution =
-            isPosInt(ttlResolution) || ttlResolution === 0 ?
-                ttlResolution
-                : 1;
-        this.ttlAutopurge = !!ttlAutopurge;
-        this.ttl = ttl || 0;
-        if (this.ttl) {
-            if (!isPosInt(this.ttl)) {
-                throw new TypeError('ttl must be a positive integer if specified');
-            }
-            this.#initializeTTLTracking();
-        }
-        // do not allow completely unbounded caches
-        if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
-            throw new TypeError('At least one of max, maxSize, or ttl is required');
-        }
-        if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
-            const code = 'LRU_CACHE_UNBOUNDED';
-            if (shouldWarn(code)) {
-                warned.add(code);
-                const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
-                    'result in unbounded memory consumption.';
-                emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
-            }
-        }
-    }
-    /**
-     * Return the number of ms left in the item's TTL. If item is not in cache,
-     * returns `0`. Returns `Infinity` if item is in cache without a defined TTL.
-     */
-    getRemainingTTL(key) {
-        return this.#keyMap.has(key) ? Infinity : 0;
-    }
-    #initializeTTLTracking() {
-        const ttls = new ZeroArray(this.#max);
-        const starts = new ZeroArray(this.#max);
-        this.#ttls = ttls;
-        this.#starts = starts;
-        this.#setItemTTL = (index, ttl, start = this.#perf.now()) => {
-            starts[index] = ttl !== 0 ? start : 0;
-            ttls[index] = ttl;
-            if (ttl !== 0 && this.ttlAutopurge) {
-                const t = setTimeout(() => {
-                    if (this.#isStale(index)) {
-                        this.#delete(this.#keyList[index], 'expire');
-                    }
-                }, ttl + 1);
-                // unref() not supported on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-        };
-        this.#updateItemAge = index => {
-            starts[index] = ttls[index] !== 0 ? this.#perf.now() : 0;
-        };
-        this.#statusTTL = (status, index) => {
-            if (ttls[index]) {
-                const ttl = ttls[index];
-                const start = starts[index];
-                /* c8 ignore next */
-                if (!ttl || !start)
-                    return;
-                status.ttl = ttl;
-                status.start = start;
-                status.now = cachedNow || getNow();
-                const age = status.now - start;
-                status.remainingTTL = ttl - age;
-            }
-        };
-        // debounce calls to perf.now() to 1s so we're not hitting
-        // that costly call repeatedly.
-        let cachedNow = 0;
-        const getNow = () => {
-            const n = this.#perf.now();
-            if (this.ttlResolution > 0) {
-                cachedNow = n;
-                const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
-                // not available on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-            return n;
-        };
-        this.getRemainingTTL = key => {
-            const index = this.#keyMap.get(key);
-            if (index === undefined) {
-                return 0;
-            }
-            const ttl = ttls[index];
-            const start = starts[index];
-            if (!ttl || !start) {
-                return Infinity;
-            }
-            const age = (cachedNow || getNow()) - start;
-            return ttl - age;
-        };
-        this.#isStale = index => {
-            const s = starts[index];
-            const t = ttls[index];
-            return !!t && !!s && (cachedNow || getNow()) - s > t;
-        };
-    }
-    // conditionally set private methods related to TTL
-    #updateItemAge = () => { };
-    #statusTTL = () => { };
-    #setItemTTL = () => { };
-    /* c8 ignore stop */
-    #isStale = () => false;
-    #initializeSizeTracking() {
-        const sizes = new ZeroArray(this.#max);
-        this.#calculatedSize = 0;
-        this.#sizes = sizes;
-        this.#removeItemSize = index => {
-            this.#calculatedSize -= sizes[index];
-            sizes[index] = 0;
-        };
-        this.#requireSize = (k, v, size, sizeCalculation) => {
-            // provisionally accept background fetches.
-            // actual value size will be checked when they return.
-            if (this.#isBackgroundFetch(v)) {
-                return 0;
-            }
-            if (!isPosInt(size)) {
-                if (sizeCalculation) {
-                    if (typeof sizeCalculation !== 'function') {
-                        throw new TypeError('sizeCalculation must be a function');
-                    }
-                    size = sizeCalculation(v, k);
-                    if (!isPosInt(size)) {
-                        throw new TypeError('sizeCalculation return invalid (expect positive integer)');
-                    }
-                }
-                else {
-                    throw new TypeError('invalid size value (must be positive integer). ' +
-                        'When maxSize or maxEntrySize is used, sizeCalculation ' +
-                        'or size must be set.');
-                }
-            }
-            return size;
-        };
-        this.#addItemSize = (index, size, status) => {
-            sizes[index] = size;
-            if (this.#maxSize) {
-                const maxSize = this.#maxSize - sizes[index];
-                while (this.#calculatedSize > maxSize) {
-                    this.#evict(true);
-                }
-            }
-            this.#calculatedSize += sizes[index];
-            if (status) {
-                status.entrySize = size;
-                status.totalCalculatedSize = this.#calculatedSize;
-            }
-        };
-    }
-    #removeItemSize = _i => { };
-    #addItemSize = (_i, _s, _st) => { };
-    #requireSize = (_k, _v, size, sizeCalculation) => {
-        if (size || sizeCalculation) {
-            throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
-        }
-        return 0;
-    };
-    *#indexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#tail; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#head) {
-                    break;
-                }
-                else {
-                    i = this.#prev[i];
-                }
-            }
-        }
-    }
-    *#rindexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#head; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#tail) {
-                    break;
-                }
-                else {
-                    i = this.#next[i];
-                }
-            }
-        }
-    }
-    #isValidIndex(index) {
-        return (index !== undefined &&
-            this.#keyMap.get(this.#keyList[index]) === index);
-    }
-    /**
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from most recently used to least recently used.
-     */
-    *entries() {
-        for (const i of this.#indexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.entries}
-     *
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from least recently used to most recently used.
-     */
-    *rentries() {
-        for (const i of this.#rindexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the keys in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *keys() {
-        for (const i of this.#indexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.keys}
-     *
-     * Return a generator yielding the keys in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rkeys() {
-        for (const i of this.#rindexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the values in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *values() {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.values}
-     *
-     * Return a generator yielding the values in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rvalues() {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Iterating over the cache itself yields the same results as
-     * {@link LRUCache.entries}
-     */
-    [Symbol.iterator]() {
-        return this.entries();
-    }
-    /**
-     * A String value that is used in the creation of the default string
-     * description of an object. Called by the built-in method
-     * `Object.prototype.toString`.
-     */
-    [Symbol.toStringTag] = 'LRUCache';
-    /**
-     * Find a value for which the supplied fn method returns a truthy value,
-     * similar to `Array.find()`. fn is called as `fn(value, key, cache)`.
-     */
-    find(fn, getOptions = {}) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-            if (value === undefined)
-                continue;
-            if (fn(value, this.#keyList[i], this)) {
-                return this.get(this.#keyList[i], getOptions);
-            }
-        }
-    }
-    /**
-     * Call the supplied function on each item in the cache, in order from most
-     * recently used to least recently used.
-     *
-     * `fn` is called as `fn(value, key, cache)`.
-     *
-     * If `thisp` is provided, function will be called in the `this`-context of
-     * the provided object, or the cache if no `thisp` object is provided.
-     *
-     * Does not update age or recenty of use, or iterate over stale values.
-     */
-    forEach(fn, thisp = this) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * The same as {@link LRUCache.forEach} but items are iterated over in
-     * reverse order.  (ie, less recently used items are iterated over first.)
-     */
-    rforEach(fn, thisp = this) {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * Delete any stale entries. Returns true if anything was removed,
-     * false otherwise.
-     */
-    purgeStale() {
-        let deleted = false;
-        for (const i of this.#rindexes({ allowStale: true })) {
-            if (this.#isStale(i)) {
-                this.#delete(this.#keyList[i], 'expire');
-                deleted = true;
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Get the extended info about a given entry, to get its value, size, and
-     * TTL info simultaneously. Returns `undefined` if the key is not present.
-     *
-     * Unlike {@link LRUCache#dump}, which is designed to be portable and survive
-     * serialization, the `start` value is always the current timestamp, and the
-     * `ttl` is a calculated remaining time to live (negative if expired).
-     *
-     * Always returns stale values, if their info is found in the cache, so be
-     * sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl})
-     * if relevant.
-     */
-    info(key) {
-        const i = this.#keyMap.get(key);
-        if (i === undefined)
-            return undefined;
-        const v = this.#valList[i];
-        /* c8 ignore start - this isn't tested for the info function,
-         * but it's the same logic as found in other places. */
-        const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-        if (value === undefined)
-            return undefined;
-        /* c8 ignore end */
-        const entry = { value };
-        if (this.#ttls && this.#starts) {
-            const ttl = this.#ttls[i];
-            const start = this.#starts[i];
-            if (ttl && start) {
-                const remain = ttl - (this.#perf.now() - start);
-                entry.ttl = remain;
-                entry.start = Date.now();
-            }
-        }
-        if (this.#sizes) {
-            entry.size = this.#sizes[i];
-        }
-        return entry;
-    }
-    /**
-     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
-     * passed to {@link LRUCache#load}.
-     *
-     * The `start` fields are calculated relative to a portable `Date.now()`
-     * timestamp, even if `performance.now()` is available.
-     *
-     * Stale entries are always included in the `dump`, even if
-     * {@link LRUCache.OptionsBase.allowStale} is false.
-     *
-     * Note: this returns an actual array, not a generator, so it can be more
-     * easily passed around.
-     */
-    dump() {
-        const arr = [];
-        for (const i of this.#indexes({ allowStale: true })) {
-            const key = this.#keyList[i];
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-            if (value === undefined || key === undefined)
-                continue;
-            const entry = { value };
-            if (this.#ttls && this.#starts) {
-                entry.ttl = this.#ttls[i];
-                // always dump the start relative to a portable timestamp
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = this.#perf.now() - this.#starts[i];
-                entry.start = Math.floor(Date.now() - age);
-            }
-            if (this.#sizes) {
-                entry.size = this.#sizes[i];
-            }
-            arr.unshift([key, entry]);
-        }
-        return arr;
-    }
-    /**
-     * Reset the cache and load in the items in entries in the order listed.
-     *
-     * The shape of the resulting cache may be different if the same options are
-     * not used in both caches.
-     *
-     * The `start` fields are assumed to be calculated relative to a portable
-     * `Date.now()` timestamp, even if `performance.now()` is available.
-     */
-    load(arr) {
-        this.clear();
-        for (const [key, entry] of arr) {
-            if (entry.start) {
-                // entry.start is a portable timestamp, but we may be using
-                // node's performance.now(), so calculate the offset, so that
-                // we get the intended remaining TTL, no matter how long it's
-                // been on ice.
-                //
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = Date.now() - entry.start;
-                entry.start = this.#perf.now() - age;
-            }
-            this.set(key, entry.value, entry);
-        }
-    }
-    /**
-     * Add a value to the cache.
-     *
-     * Note: if `undefined` is specified as a value, this is an alias for
-     * {@link LRUCache#delete}
-     *
-     * Fields on the {@link LRUCache.SetOptions} options param will override
-     * their corresponding values in the constructor options for the scope
-     * of this single `set()` operation.
-     *
-     * If `start` is provided, then that will set the effective start
-     * time for the TTL calculation. Note that this must be a previous
-     * value of `performance.now()` if supported, or a previous value of
-     * `Date.now()` if not.
-     *
-     * Options object may also include `size`, which will prevent
-     * calling the `sizeCalculation` function and just use the specified
-     * number if it is a positive integer, and `noDisposeOnSet` which
-     * will prevent calling a `dispose` function in the case of
-     * overwrites.
-     *
-     * If the `size` (or return value of `sizeCalculation`) for a given
-     * entry is greater than `maxEntrySize`, then the item will not be
-     * added to the cache.
-     *
-     * Will update the recency of the entry.
-     *
-     * If the value is `undefined`, then this is an alias for
-     * `cache.delete(key)`. `undefined` is never stored in the cache.
-     */
-    set(k, v, setOptions = {}) {
-        if (v === undefined) {
-            this.delete(k);
-            return this;
-        }
-        const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
-        let { noUpdateTTL = this.noUpdateTTL } = setOptions;
-        const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
-        // if the item doesn't fit, don't do anything
-        // NB: maxEntrySize set to maxSize by default
-        if (this.maxEntrySize && size > this.maxEntrySize) {
-            if (status) {
-                status.set = 'miss';
-                status.maxEntrySizeExceeded = true;
-            }
-            // have to delete, in case something is there already.
-            this.#delete(k, 'set');
-            return this;
-        }
-        let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
-        if (index === undefined) {
-            // addition
-            index = (this.#size === 0 ? this.#tail
-                : this.#free.length !== 0 ? this.#free.pop()
-                    : this.#size === this.#max ? this.#evict(false)
-                        : this.#size);
-            this.#keyList[index] = k;
-            this.#valList[index] = v;
-            this.#keyMap.set(k, index);
-            this.#next[this.#tail] = index;
-            this.#prev[index] = this.#tail;
-            this.#tail = index;
-            this.#size++;
-            this.#addItemSize(index, size, status);
-            if (status)
-                status.set = 'add';
-            noUpdateTTL = false;
-            if (this.#hasOnInsert) {
-                this.#onInsert?.(v, k, 'add');
-            }
-        }
-        else {
-            // update
-            this.#moveToTail(index);
-            const oldVal = this.#valList[index];
-            if (v !== oldVal) {
-                if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
-                    oldVal.__abortController.abort(new Error('replaced'));
-                    const { __staleWhileFetching: s } = oldVal;
-                    if (s !== undefined && !noDisposeOnSet) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(s, k, 'set');
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([s, k, 'set']);
-                        }
-                    }
-                }
-                else if (!noDisposeOnSet) {
-                    if (this.#hasDispose) {
-                        this.#dispose?.(oldVal, k, 'set');
-                    }
-                    if (this.#hasDisposeAfter) {
-                        this.#disposed?.push([oldVal, k, 'set']);
-                    }
-                }
-                this.#removeItemSize(index);
-                this.#addItemSize(index, size, status);
-                this.#valList[index] = v;
-                if (status) {
-                    status.set = 'replace';
-                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal) ?
-                        oldVal.__staleWhileFetching
-                        : oldVal;
-                    if (oldValue !== undefined)
-                        status.oldValue = oldValue;
-                }
-            }
-            else if (status) {
-                status.set = 'update';
-            }
-            if (this.#hasOnInsert) {
-                this.onInsert?.(v, k, v === oldVal ? 'update' : 'replace');
-            }
-        }
-        if (ttl !== 0 && !this.#ttls) {
-            this.#initializeTTLTracking();
-        }
-        if (this.#ttls) {
-            if (!noUpdateTTL) {
-                this.#setItemTTL(index, ttl, start);
-            }
-            if (status)
-                this.#statusTTL(status, index);
-        }
-        if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return this;
-    }
-    /**
-     * Evict the least recently used item, returning its value or
-     * `undefined` if cache is empty.
-     */
-    pop() {
-        try {
-            while (this.#size) {
-                const val = this.#valList[this.#head];
-                this.#evict(true);
-                if (this.#isBackgroundFetch(val)) {
-                    if (val.__staleWhileFetching) {
-                        return val.__staleWhileFetching;
-                    }
-                }
-                else if (val !== undefined) {
-                    return val;
-                }
-            }
-        }
-        finally {
-            if (this.#hasDisposeAfter && this.#disposed) {
-                const dt = this.#disposed;
-                let task;
-                while ((task = dt?.shift())) {
-                    this.#disposeAfter?.(...task);
-                }
-            }
-        }
-    }
-    #evict(free) {
-        const head = this.#head;
-        const k = this.#keyList[head];
-        const v = this.#valList[head];
-        if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
-            v.__abortController.abort(new Error('evicted'));
-        }
-        else if (this.#hasDispose || this.#hasDisposeAfter) {
-            if (this.#hasDispose) {
-                this.#dispose?.(v, k, 'evict');
-            }
-            if (this.#hasDisposeAfter) {
-                this.#disposed?.push([v, k, 'evict']);
-            }
-        }
-        this.#removeItemSize(head);
-        // if we aren't about to use the index, then null these out
-        if (free) {
-            this.#keyList[head] = undefined;
-            this.#valList[head] = undefined;
-            this.#free.push(head);
-        }
-        if (this.#size === 1) {
-            this.#head = this.#tail = 0;
-            this.#free.length = 0;
-        }
-        else {
-            this.#head = this.#next[head];
-        }
-        this.#keyMap.delete(k);
-        this.#size--;
-        return head;
-    }
-    /**
-     * Check if a key is in the cache, without updating the recency of use.
-     * Will return false if the item is stale, even though it is technically
-     * in the cache.
-     *
-     * Check if a key is in the cache, without updating the recency of
-     * use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set
-     * to `true` in either the options or the constructor.
-     *
-     * Will return `false` if the item is stale, even though it is technically in
-     * the cache. The difference can be determined (if it matters) by using a
-     * `status` argument, and inspecting the `has` field.
-     *
-     * Will not update item age unless
-     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
-     */
-    has(k, hasOptions = {}) {
-        const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v) &&
-                v.__staleWhileFetching === undefined) {
-                return false;
-            }
-            if (!this.#isStale(index)) {
-                if (updateAgeOnHas) {
-                    this.#updateItemAge(index);
-                }
-                if (status) {
-                    status.has = 'hit';
-                    this.#statusTTL(status, index);
-                }
-                return true;
-            }
-            else if (status) {
-                status.has = 'stale';
-                this.#statusTTL(status, index);
-            }
-        }
-        else if (status) {
-            status.has = 'miss';
-        }
-        return false;
-    }
-    /**
-     * Like {@link LRUCache#get} but doesn't update recency or delete stale
-     * items.
-     *
-     * Returns `undefined` if the item is stale, unless
-     * {@link LRUCache.OptionsBase.allowStale} is set.
-     */
-    peek(k, peekOptions = {}) {
-        const { allowStale = this.allowStale } = peekOptions;
-        const index = this.#keyMap.get(k);
-        if (index === undefined ||
-            (!allowStale && this.#isStale(index))) {
-            return;
-        }
-        const v = this.#valList[index];
-        // either stale and allowed, or forcing a refresh of non-stale value
-        return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-    }
-    #backgroundFetch(k, index, options, context) {
-        const v = index === undefined ? undefined : this.#valList[index];
-        if (this.#isBackgroundFetch(v)) {
-            return v;
-        }
-        const ac = new AC();
-        const { signal } = options;
-        // when/if our AC signals, then stop listening to theirs.
-        signal?.addEventListener('abort', () => ac.abort(signal.reason), {
-            signal: ac.signal,
-        });
-        const fetchOpts = {
-            signal: ac.signal,
-            options,
-            context,
-        };
-        const cb = (v, updateCache = false) => {
-            const { aborted } = ac.signal;
-            const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
-            if (options.status) {
-                if (aborted && !updateCache) {
-                    options.status.fetchAborted = true;
-                    options.status.fetchError = ac.signal.reason;
-                    if (ignoreAbort)
-                        options.status.fetchAbortIgnored = true;
-                }
-                else {
-                    options.status.fetchResolved = true;
-                }
-            }
-            if (aborted && !ignoreAbort && !updateCache) {
-                return fetchFail(ac.signal.reason);
-            }
-            // either we didn't abort, and are still here, or we did, and ignored
-            const bf = p;
-            if (this.#valList[index] === p) {
-                if (v === undefined) {
-                    if (bf.__staleWhileFetching !== undefined) {
-                        this.#valList[index] = bf.__staleWhileFetching;
-                    }
-                    else {
-                        this.#delete(k, 'fetch');
-                    }
-                }
-                else {
-                    if (options.status)
-                        options.status.fetchUpdated = true;
-                    this.set(k, v, fetchOpts.options);
-                }
-            }
-            return v;
-        };
-        const eb = (er) => {
-            if (options.status) {
-                options.status.fetchRejected = true;
-                options.status.fetchError = er;
-            }
-            return fetchFail(er);
-        };
-        const fetchFail = (er) => {
-            const { aborted } = ac.signal;
-            const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
-            const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
-            const noDelete = allowStale || options.noDeleteOnFetchRejection;
-            const bf = p;
-            if (this.#valList[index] === p) {
-                // if we allow stale on fetch rejections, then we need to ensure that
-                // the stale value is not removed from the cache when the fetch fails.
-                const del = !noDelete || bf.__staleWhileFetching === undefined;
-                if (del) {
-                    this.#delete(k, 'fetch');
-                }
-                else if (!allowStaleAborted) {
-                    // still replace the *promise* with the stale value,
-                    // since we are done with the promise at this point.
-                    // leave it untouched if we're still waiting for an
-                    // aborted background fetch that hasn't yet returned.
-                    this.#valList[index] = bf.__staleWhileFetching;
-                }
-            }
-            if (allowStale) {
-                if (options.status && bf.__staleWhileFetching !== undefined) {
-                    options.status.returnedStale = true;
-                }
-                return bf.__staleWhileFetching;
-            }
-            else if (bf.__returned === bf) {
-                throw er;
-            }
-        };
-        const pcall = (res, rej) => {
-            const fmp = this.#fetchMethod?.(k, v, fetchOpts);
-            if (fmp && fmp instanceof Promise) {
-                fmp.then(v => res(v === undefined ? undefined : v), rej);
-            }
-            // ignored, we go until we finish, regardless.
-            // defer check until we are actually aborting,
-            // so fetchMethod can override.
-            ac.signal.addEventListener('abort', () => {
-                if (!options.ignoreFetchAbort ||
-                    options.allowStaleOnFetchAbort) {
-                    res(undefined);
-                    // when it eventually resolves, update the cache.
-                    if (options.allowStaleOnFetchAbort) {
-                        res = v => cb(v, true);
-                    }
-                }
-            });
-        };
-        if (options.status)
-            options.status.fetchDispatched = true;
-        const p = new Promise(pcall).then(cb, eb);
-        const bf = Object.assign(p, {
-            __abortController: ac,
-            __staleWhileFetching: v,
-            __returned: undefined,
-        });
-        if (index === undefined) {
-            // internal, don't expose status.
-            this.set(k, bf, { ...fetchOpts.options, status: undefined });
-            index = this.#keyMap.get(k);
-        }
-        else {
-            this.#valList[index] = bf;
-        }
-        return bf;
-    }
-    #isBackgroundFetch(p) {
-        if (!this.#hasFetchMethod)
-            return false;
-        const b = p;
-        return (!!b &&
-            b instanceof Promise &&
-            b.hasOwnProperty('__staleWhileFetching') &&
-            b.__abortController instanceof AC);
-    }
-    async fetch(k, fetchOptions = {}) {
-        const { 
-        // get options
-        allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, 
-        // set options
-        ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, 
-        // fetch exclusive options
-        noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
-        if (!this.#hasFetchMethod) {
-            if (status)
-                status.fetch = 'get';
-            return this.get(k, {
-                allowStale,
-                updateAgeOnGet,
-                noDeleteOnStaleGet,
-                status,
-            });
-        }
-        const options = {
-            allowStale,
-            updateAgeOnGet,
-            noDeleteOnStaleGet,
-            ttl,
-            noDisposeOnSet,
-            size,
-            sizeCalculation,
-            noUpdateTTL,
-            noDeleteOnFetchRejection,
-            allowStaleOnFetchRejection,
-            allowStaleOnFetchAbort,
-            ignoreFetchAbort,
-            status,
-            signal,
-        };
-        let index = this.#keyMap.get(k);
-        if (index === undefined) {
-            if (status)
-                status.fetch = 'miss';
-            const p = this.#backgroundFetch(k, index, options, context);
-            return (p.__returned = p);
-        }
-        else {
-            // in cache, maybe already fetching
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                const stale = allowStale && v.__staleWhileFetching !== undefined;
-                if (status) {
-                    status.fetch = 'inflight';
-                    if (stale)
-                        status.returnedStale = true;
-                }
-                return stale ? v.__staleWhileFetching : (v.__returned = v);
-            }
-            // if we force a refresh, that means do NOT serve the cached value,
-            // unless we are already in the process of refreshing the cache.
-            const isStale = this.#isStale(index);
-            if (!forceRefresh && !isStale) {
-                if (status)
-                    status.fetch = 'hit';
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                if (status)
-                    this.#statusTTL(status, index);
-                return v;
-            }
-            // ok, it is stale or a forced refresh, and not already fetching.
-            // refresh the cache.
-            const p = this.#backgroundFetch(k, index, options, context);
-            const hasStale = p.__staleWhileFetching !== undefined;
-            const staleVal = hasStale && allowStale;
-            if (status) {
-                status.fetch = isStale ? 'stale' : 'refresh';
-                if (staleVal && isStale)
-                    status.returnedStale = true;
-            }
-            return staleVal ? p.__staleWhileFetching : (p.__returned = p);
-        }
-    }
-    async forceFetch(k, fetchOptions = {}) {
-        const v = await this.fetch(k, fetchOptions);
-        if (v === undefined)
-            throw new Error('fetch() returned undefined');
-        return v;
-    }
-    memo(k, memoOptions = {}) {
-        const memoMethod = this.#memoMethod;
-        if (!memoMethod) {
-            throw new Error('no memoMethod provided to constructor');
-        }
-        const { context, forceRefresh, ...options } = memoOptions;
-        const v = this.get(k, options);
-        if (!forceRefresh && v !== undefined)
-            return v;
-        const vv = memoMethod(k, v, {
-            options,
-            context,
-        });
-        this.set(k, vv, options);
-        return vv;
-    }
-    /**
-     * Return a value from the cache. Will update the recency of the cache
-     * entry found.
-     *
-     * If the key is not found, get() will return `undefined`.
-     */
-    get(k, getOptions = {}) {
-        const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const value = this.#valList[index];
-            const fetching = this.#isBackgroundFetch(value);
-            if (status)
-                this.#statusTTL(status, index);
-            if (this.#isStale(index)) {
-                if (status)
-                    status.get = 'stale';
-                // delete only if not an in-flight background fetch
-                if (!fetching) {
-                    if (!noDeleteOnStaleGet) {
-                        this.#delete(k, 'expire');
-                    }
-                    if (status && allowStale)
-                        status.returnedStale = true;
-                    return allowStale ? value : undefined;
-                }
-                else {
-                    if (status &&
-                        allowStale &&
-                        value.__staleWhileFetching !== undefined) {
-                        status.returnedStale = true;
-                    }
-                    return allowStale ? value.__staleWhileFetching : undefined;
-                }
-            }
-            else {
-                if (status)
-                    status.get = 'hit';
-                // if we're currently fetching it, we don't actually have it yet
-                // it's not stale, which means this isn't a staleWhileRefetching.
-                // If it's not stale, and fetching, AND has a __staleWhileFetching
-                // value, then that means the user fetched with {forceRefresh:true},
-                // so it's safe to return that value.
-                if (fetching) {
-                    return value.__staleWhileFetching;
-                }
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                return value;
-            }
-        }
-        else if (status) {
-            status.get = 'miss';
-        }
-    }
-    #connect(p, n) {
-        this.#prev[n] = p;
-        this.#next[p] = n;
-    }
-    #moveToTail(index) {
-        // if tail already, nothing to do
-        // if head, move head to next[index]
-        // else
-        //   move next[prev[index]] to next[index] (head has no prev)
-        //   move prev[next[index]] to prev[index]
-        // prev[index] = tail
-        // next[tail] = index
-        // tail = index
-        if (index !== this.#tail) {
-            if (index === this.#head) {
-                this.#head = this.#next[index];
-            }
-            else {
-                this.#connect(this.#prev[index], this.#next[index]);
-            }
-            this.#connect(this.#tail, index);
-            this.#tail = index;
-        }
-    }
-    /**
-     * Deletes a key out of the cache.
-     *
-     * Returns true if the key was deleted, false otherwise.
-     */
-    delete(k) {
-        return this.#delete(k, 'delete');
-    }
-    #delete(k, reason) {
-        let deleted = false;
-        if (this.#size !== 0) {
-            const index = this.#keyMap.get(k);
-            if (index !== undefined) {
-                deleted = true;
-                if (this.#size === 1) {
-                    this.#clear(reason);
-                }
-                else {
-                    this.#removeItemSize(index);
-                    const v = this.#valList[index];
-                    if (this.#isBackgroundFetch(v)) {
-                        v.__abortController.abort(new Error('deleted'));
-                    }
-                    else if (this.#hasDispose || this.#hasDisposeAfter) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(v, k, reason);
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([v, k, reason]);
-                        }
-                    }
-                    this.#keyMap.delete(k);
-                    this.#keyList[index] = undefined;
-                    this.#valList[index] = undefined;
-                    if (index === this.#tail) {
-                        this.#tail = this.#prev[index];
-                    }
-                    else if (index === this.#head) {
-                        this.#head = this.#next[index];
-                    }
-                    else {
-                        const pi = this.#prev[index];
-                        this.#next[pi] = this.#next[index];
-                        const ni = this.#next[index];
-                        this.#prev[ni] = this.#prev[index];
-                    }
-                    this.#size--;
-                    this.#free.push(index);
-                }
-            }
-        }
-        if (this.#hasDisposeAfter && this.#disposed?.length) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Clear the cache entirely, throwing away all values.
-     */
-    clear() {
-        return this.#clear('delete');
-    }
-    #clear(reason) {
-        for (const index of this.#rindexes({ allowStale: true })) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                v.__abortController.abort(new Error('deleted'));
-            }
-            else {
-                const k = this.#keyList[index];
-                if (this.#hasDispose) {
-                    this.#dispose?.(v, k, reason);
-                }
-                if (this.#hasDisposeAfter) {
-                    this.#disposed?.push([v, k, reason]);
-                }
-            }
-        }
-        this.#keyMap.clear();
-        this.#valList.fill(undefined);
-        this.#keyList.fill(undefined);
-        if (this.#ttls && this.#starts) {
-            this.#ttls.fill(0);
-            this.#starts.fill(0);
-        }
-        if (this.#sizes) {
-            this.#sizes.fill(0);
-        }
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free.length = 0;
-        this.#calculatedSize = 0;
-        this.#size = 0;
-        if (this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-    }
-}
-exports.LRUCache = LRUCache;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/cacache/node_modules/lru-cache/dist/commonjs/index.min.js b/node_modules/cacache/node_modules/lru-cache/dist/commonjs/index.min.js
deleted file mode 100644
index ef5027b91650d..0000000000000
--- a/node_modules/cacache/node_modules/lru-cache/dist/commonjs/index.min.js
+++ /dev/null
@@ -1,2 +0,0 @@
-"use strict";Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var M=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,x=new Set,R=typeof process=="object"&&process?process:{},U=(a,t,e,i)=>{typeof R.emitWarning=="function"?R.emitWarning(a,t,e,i):console.error(`[${e}] ${t}: ${a}`)},C=globalThis.AbortController,L=globalThis.AbortSignal;if(typeof C>"u"){L=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},C=class{constructor(){t()}signal=new L;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let a=R.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{a&&(a=!1,U("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var G=a=>!x.has(a),H=Symbol("type"),y=a=>a&&a===Math.floor(a)&&a>0&&isFinite(a),I=a=>y(a)?a<=Math.pow(2,8)?Uint8Array:a<=Math.pow(2,16)?Uint16Array:a<=Math.pow(2,32)?Uint32Array:a<=Number.MAX_SAFE_INTEGER?E:null:null,E=class extends Array{constructor(t){super(t),this.fill(0)}},W=class a{heap;length;static#l=!1;static create(t){let e=I(t);if(!e)return[];a.#l=!0;let i=new a(t,e);return a.#l=!1,i}constructor(t,e){if(!a.#l)throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},D=class a{#l;#c;#p;#v;#w;#D;#L;#S;get perf(){return this.#S}ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#_;#s;#i;#t;#a;#u;#o;#h;#m;#r;#b;#y;#d;#A;#z;#f;#x;static unsafeExposeInternals(t){return{starts:t.#y,ttls:t.#d,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#a,prev:t.#u,get head(){return t.#o},get tail(){return t.#h},free:t.#m,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#M(e,i,s,n),moveToTail:e=>t.#W(e),indexes:e=>t.#F(e),rindexes:e=>t.#T(e),isStale:e=>t.#g(e)}}get max(){return this.#l}get maxSize(){return this.#c}get calculatedSize(){return this.#_}get size(){return this.#n}get fetchMethod(){return this.#D}get memoMethod(){return this.#L}get dispose(){return this.#p}get onInsert(){return this.#v}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,onInsert:_,disposeAfter:f,noDisposeOnSet:c,noUpdateTTL:u,maxSize:A=0,maxEntrySize:d=0,sizeCalculation:m,fetchMethod:l,memoMethod:w,noDeleteOnFetchRejection:b,noDeleteOnStaleGet:p,allowStaleOnFetchRejection:S,allowStaleOnFetchAbort:z,ignoreFetchAbort:F,perf:v}=t;if(v!==void 0&&typeof v?.now!="function")throw new TypeError("perf option must have a now() method if specified");if(this.#S=v??M,e!==0&&!y(e))throw new TypeError("max option must be a nonnegative integer");let T=e?I(e):Array;if(!T)throw new Error("invalid max value: "+e);if(this.#l=e,this.#c=A,this.maxEntrySize=d||this.#c,this.sizeCalculation=m,this.sizeCalculation){if(!this.#c&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#L=w,l!==void 0&&typeof l!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#D=l,this.#z=!!l,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#a=new T(e),this.#u=new T(e),this.#o=0,this.#h=0,this.#m=W.create(e),this.#n=0,this.#_=0,typeof g=="function"&&(this.#p=g),typeof _=="function"&&(this.#v=_),typeof f=="function"?(this.#w=f,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#A=!!this.#p,this.#x=!!this.#v,this.#f=!!this.#w,this.noDisposeOnSet=!!c,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!b,this.allowStaleOnFetchRejection=!!S,this.allowStaleOnFetchAbort=!!z,this.ignoreFetchAbort=!!F,this.maxEntrySize!==0){if(this.#c!==0&&!y(this.#c))throw new TypeError("maxSize must be a positive integer if specified");if(!y(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#V()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!p,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=y(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!y(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#G()}if(this.#l===0&&this.ttl===0&&this.#c===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#l&&!this.#c){let O="LRU_CACHE_UNBOUNDED";G(O)&&(x.add(O),U("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",O,a))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#G(){let t=new E(this.#l),e=new E(this.#l);this.#d=t,this.#y=e,this.#j=(n,h,o=this.#S.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#g(n)&&this.#O(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#C=n=>{e[n]=t[n]!==0?this.#S.now():0},this.#E=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=this.#S.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#g=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#C=()=>{};#E=()=>{};#j=()=>{};#g=()=>!1;#V(){let t=new E(this.#l);this.#_=0,this.#b=t,this.#R=e=>{this.#_-=t[e],t[e]=0},this.#N=(e,i,s,n)=>{if(this.#e(i))return 0;if(!y(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!y(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#U=(e,i,s)=>{if(t[e]=i,this.#c){let n=this.#c-t[e];for(;this.#_>n;)this.#I(!0)}this.#_+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#_)}}#R=t=>{};#U=(t,e,i)=>{};#N=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#o));)e=this.#u[e]}*#T({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#h));)e=this.#a[e]}#P(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#T())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#T()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#T())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#T()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#T({allowStale:!0}))this.#g(e)&&(this.#O(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#d&&this.#y){let h=this.#d[e],o=this.#y[e];if(h&&o){let r=h-(this.#S.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#F({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#d&&this.#y){h.ttl=this.#d[e];let o=this.#S.now()-this.#y[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=this.#S.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,_=this.#N(t,e,i.size||0,o);if(this.maxEntrySize&&_>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#O(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#m.length!==0?this.#m.pop():this.#n===this.#l?this.#I(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#a[this.#h]=f,this.#u[f]=this.#h,this.#h=f,this.#n++,this.#U(f,_,r),r&&(r.set="add"),g=!1,this.#x&&this.#v?.(e,t,"add");else{this.#W(f);let c=this.#t[f];if(e!==c){if(this.#z&&this.#e(c)){c.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:u}=c;u!==void 0&&!h&&(this.#A&&this.#p?.(u,t,"set"),this.#f&&this.#r?.push([u,t,"set"]))}else h||(this.#A&&this.#p?.(c,t,"set"),this.#f&&this.#r?.push([c,t,"set"]));if(this.#R(f),this.#U(f,_,r),this.#t[f]=e,r){r.set="replace";let u=c&&this.#e(c)?c.__staleWhileFetching:c;u!==void 0&&(r.oldValue=u)}}else r&&(r.set="update");this.#x&&this.onInsert?.(e,t,e===c?"update":"replace")}if(s!==0&&!this.#d&&this.#G(),this.#d&&(g||this.#j(f,s,n),r&&this.#E(r,f)),!h&&this.#f&&this.#r){let c=this.#r,u;for(;u=c?.shift();)this.#w?.(...u)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#I(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#f&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#I(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#z&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(s,i,"evict"),this.#f&&this.#r?.push([s,i,"evict"])),this.#R(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#m.push(e)),this.#n===1?(this.#o=this.#h=0,this.#m.length=0):this.#o=this.#a[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#g(n))s&&(s.has="stale",this.#E(s,n));else return i&&this.#C(n),s&&(s.has="hit",this.#E(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#g(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#M(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new C,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,m=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!m?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!m)return f(h.signal.reason);let b=u;return this.#t[e]===u&&(d===void 0?b.__staleWhileFetching!==void 0?this.#t[e]=b.__staleWhileFetching:this.#O(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},_=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:m}=h.signal,l=m&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=u;if(this.#t[e]===u&&(!b||p.__staleWhileFetching===void 0?this.#O(t,"fetch"):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},c=(d,m)=>{let l=this.#D?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),m),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let u=new Promise(c).then(g,_),A=Object.assign(u,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,A,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=A,A}#e(t){if(!this.#z)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof C}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:_=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:c=this.allowStaleOnFetchRejection,ignoreFetchAbort:u=this.ignoreFetchAbort,allowStaleOnFetchAbort:A=this.allowStaleOnFetchAbort,context:d,forceRefresh:m=!1,status:l,signal:w}=e;if(!this.#z)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:_,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:c,allowStaleOnFetchAbort:A,ignoreFetchAbort:u,status:l,signal:w},p=this.#s.get(t);if(p===void 0){l&&(l.fetch="miss");let S=this.#M(t,p,b,d);return S.__returned=S}else{let S=this.#t[p];if(this.#e(S)){let O=i&&S.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",O&&(l.returnedStale=!0)),O?S.__staleWhileFetching:S.__returned=S}let z=this.#g(p);if(!m&&!z)return l&&(l.fetch="hit"),this.#W(p),s&&this.#C(p),l&&this.#E(l,p),S;let F=this.#M(t,p,b,d),T=F.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=z?"stale":"refresh",T&&z&&(l.returnedStale=!0)),T?F.__staleWhileFetching:F.__returned=F}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#L;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#E(h,o),this.#g(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#O(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#W(o),s&&this.#C(o),r))}else h&&(h.get="miss")}#H(t,e){this.#u[e]=t,this.#a[t]=e}#W(t){t!==this.#h&&(t===this.#o?this.#o=this.#a[t]:this.#H(this.#u[t],this.#a[t]),this.#H(this.#h,t),this.#h=t)}delete(t){return this.#O(t,"delete")}#O(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#k(e);else{this.#R(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(n,t,e),this.#f&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#u[s];else if(s===this.#o)this.#o=this.#a[s];else{let h=this.#u[s];this.#a[h]=this.#a[s];let o=this.#a[s];this.#u[o]=this.#u[s]}this.#n--,this.#m.push(s)}}if(this.#f&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#k("delete")}#k(t){for(let e of this.#T({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#A&&this.#p?.(i,s,t),this.#f&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#d&&this.#y&&(this.#d.fill(0),this.#y.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#m.length=0,this.#_=0,this.#n=0,this.#f&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};exports.LRUCache=D;
-//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/cacache/node_modules/lru-cache/dist/commonjs/package.json b/node_modules/cacache/node_modules/lru-cache/dist/commonjs/package.json
deleted file mode 100644
index 5bbefffbabee3..0000000000000
--- a/node_modules/cacache/node_modules/lru-cache/dist/commonjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "commonjs"
-}
diff --git a/node_modules/cacache/node_modules/lru-cache/dist/esm/index.min.js b/node_modules/cacache/node_modules/lru-cache/dist/esm/index.min.js
deleted file mode 100644
index 07dd8fc3c59d8..0000000000000
--- a/node_modules/cacache/node_modules/lru-cache/dist/esm/index.min.js
+++ /dev/null
@@ -1,2 +0,0 @@
-var M=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,x=new Set,R=typeof process=="object"&&process?process:{},I=(a,t,e,i)=>{typeof R.emitWarning=="function"?R.emitWarning(a,t,e,i):console.error(`[${e}] ${t}: ${a}`)},C=globalThis.AbortController,D=globalThis.AbortSignal;if(typeof C>"u"){D=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},C=class{constructor(){t()}signal=new D;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let a=R.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{a&&(a=!1,I("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var G=a=>!x.has(a),H=Symbol("type"),y=a=>a&&a===Math.floor(a)&&a>0&&isFinite(a),U=a=>y(a)?a<=Math.pow(2,8)?Uint8Array:a<=Math.pow(2,16)?Uint16Array:a<=Math.pow(2,32)?Uint32Array:a<=Number.MAX_SAFE_INTEGER?O:null:null,O=class extends Array{constructor(t){super(t),this.fill(0)}},W=class a{heap;length;static#l=!1;static create(t){let e=U(t);if(!e)return[];a.#l=!0;let i=new a(t,e);return a.#l=!1,i}constructor(t,e){if(!a.#l)throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},L=class a{#l;#c;#p;#v;#w;#D;#L;#S;get perf(){return this.#S}ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#_;#s;#i;#t;#a;#u;#o;#h;#m;#r;#b;#y;#d;#A;#z;#f;#x;static unsafeExposeInternals(t){return{starts:t.#y,ttls:t.#d,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#a,prev:t.#u,get head(){return t.#o},get tail(){return t.#h},free:t.#m,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#M(e,i,s,n),moveToTail:e=>t.#W(e),indexes:e=>t.#F(e),rindexes:e=>t.#T(e),isStale:e=>t.#g(e)}}get max(){return this.#l}get maxSize(){return this.#c}get calculatedSize(){return this.#_}get size(){return this.#n}get fetchMethod(){return this.#D}get memoMethod(){return this.#L}get dispose(){return this.#p}get onInsert(){return this.#v}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,onInsert:_,disposeAfter:f,noDisposeOnSet:c,noUpdateTTL:u,maxSize:A=0,maxEntrySize:d=0,sizeCalculation:m,fetchMethod:l,memoMethod:w,noDeleteOnFetchRejection:b,noDeleteOnStaleGet:p,allowStaleOnFetchRejection:S,allowStaleOnFetchAbort:z,ignoreFetchAbort:F,perf:v}=t;if(v!==void 0&&typeof v?.now!="function")throw new TypeError("perf option must have a now() method if specified");if(this.#S=v??M,e!==0&&!y(e))throw new TypeError("max option must be a nonnegative integer");let T=e?U(e):Array;if(!T)throw new Error("invalid max value: "+e);if(this.#l=e,this.#c=A,this.maxEntrySize=d||this.#c,this.sizeCalculation=m,this.sizeCalculation){if(!this.#c&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#L=w,l!==void 0&&typeof l!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#D=l,this.#z=!!l,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#a=new T(e),this.#u=new T(e),this.#o=0,this.#h=0,this.#m=W.create(e),this.#n=0,this.#_=0,typeof g=="function"&&(this.#p=g),typeof _=="function"&&(this.#v=_),typeof f=="function"?(this.#w=f,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#A=!!this.#p,this.#x=!!this.#v,this.#f=!!this.#w,this.noDisposeOnSet=!!c,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!b,this.allowStaleOnFetchRejection=!!S,this.allowStaleOnFetchAbort=!!z,this.ignoreFetchAbort=!!F,this.maxEntrySize!==0){if(this.#c!==0&&!y(this.#c))throw new TypeError("maxSize must be a positive integer if specified");if(!y(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#V()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!p,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=y(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!y(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#G()}if(this.#l===0&&this.ttl===0&&this.#c===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#l&&!this.#c){let E="LRU_CACHE_UNBOUNDED";G(E)&&(x.add(E),I("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",E,a))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#G(){let t=new O(this.#l),e=new O(this.#l);this.#d=t,this.#y=e,this.#j=(n,h,o=this.#S.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#g(n)&&this.#E(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#C=n=>{e[n]=t[n]!==0?this.#S.now():0},this.#O=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=this.#S.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#g=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#C=()=>{};#O=()=>{};#j=()=>{};#g=()=>!1;#V(){let t=new O(this.#l);this.#_=0,this.#b=t,this.#R=e=>{this.#_-=t[e],t[e]=0},this.#N=(e,i,s,n)=>{if(this.#e(i))return 0;if(!y(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!y(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#I=(e,i,s)=>{if(t[e]=i,this.#c){let n=this.#c-t[e];for(;this.#_>n;)this.#U(!0)}this.#_+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#_)}}#R=t=>{};#I=(t,e,i)=>{};#N=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#o));)e=this.#u[e]}*#T({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#h));)e=this.#a[e]}#P(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#T())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#T()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#T())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#T()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#T({allowStale:!0}))this.#g(e)&&(this.#E(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#d&&this.#y){let h=this.#d[e],o=this.#y[e];if(h&&o){let r=h-(this.#S.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#F({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#d&&this.#y){h.ttl=this.#d[e];let o=this.#S.now()-this.#y[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=this.#S.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,_=this.#N(t,e,i.size||0,o);if(this.maxEntrySize&&_>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#E(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#m.length!==0?this.#m.pop():this.#n===this.#l?this.#U(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#a[this.#h]=f,this.#u[f]=this.#h,this.#h=f,this.#n++,this.#I(f,_,r),r&&(r.set="add"),g=!1,this.#x&&this.#v?.(e,t,"add");else{this.#W(f);let c=this.#t[f];if(e!==c){if(this.#z&&this.#e(c)){c.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:u}=c;u!==void 0&&!h&&(this.#A&&this.#p?.(u,t,"set"),this.#f&&this.#r?.push([u,t,"set"]))}else h||(this.#A&&this.#p?.(c,t,"set"),this.#f&&this.#r?.push([c,t,"set"]));if(this.#R(f),this.#I(f,_,r),this.#t[f]=e,r){r.set="replace";let u=c&&this.#e(c)?c.__staleWhileFetching:c;u!==void 0&&(r.oldValue=u)}}else r&&(r.set="update");this.#x&&this.onInsert?.(e,t,e===c?"update":"replace")}if(s!==0&&!this.#d&&this.#G(),this.#d&&(g||this.#j(f,s,n),r&&this.#O(r,f)),!h&&this.#f&&this.#r){let c=this.#r,u;for(;u=c?.shift();)this.#w?.(...u)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#U(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#f&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#U(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#z&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(s,i,"evict"),this.#f&&this.#r?.push([s,i,"evict"])),this.#R(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#m.push(e)),this.#n===1?(this.#o=this.#h=0,this.#m.length=0):this.#o=this.#a[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#g(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#C(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#g(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#M(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new C,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,m=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!m?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!m)return f(h.signal.reason);let b=u;return this.#t[e]===u&&(d===void 0?b.__staleWhileFetching!==void 0?this.#t[e]=b.__staleWhileFetching:this.#E(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},_=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:m}=h.signal,l=m&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=u;if(this.#t[e]===u&&(!b||p.__staleWhileFetching===void 0?this.#E(t,"fetch"):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},c=(d,m)=>{let l=this.#D?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),m),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let u=new Promise(c).then(g,_),A=Object.assign(u,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,A,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=A,A}#e(t){if(!this.#z)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof C}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:_=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:c=this.allowStaleOnFetchRejection,ignoreFetchAbort:u=this.ignoreFetchAbort,allowStaleOnFetchAbort:A=this.allowStaleOnFetchAbort,context:d,forceRefresh:m=!1,status:l,signal:w}=e;if(!this.#z)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:_,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:c,allowStaleOnFetchAbort:A,ignoreFetchAbort:u,status:l,signal:w},p=this.#s.get(t);if(p===void 0){l&&(l.fetch="miss");let S=this.#M(t,p,b,d);return S.__returned=S}else{let S=this.#t[p];if(this.#e(S)){let E=i&&S.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",E&&(l.returnedStale=!0)),E?S.__staleWhileFetching:S.__returned=S}let z=this.#g(p);if(!m&&!z)return l&&(l.fetch="hit"),this.#W(p),s&&this.#C(p),l&&this.#O(l,p),S;let F=this.#M(t,p,b,d),T=F.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=z?"stale":"refresh",T&&z&&(l.returnedStale=!0)),T?F.__staleWhileFetching:F.__returned=F}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#L;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#O(h,o),this.#g(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#E(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#W(o),s&&this.#C(o),r))}else h&&(h.get="miss")}#H(t,e){this.#u[e]=t,this.#a[t]=e}#W(t){t!==this.#h&&(t===this.#o?this.#o=this.#a[t]:this.#H(this.#u[t],this.#a[t]),this.#H(this.#h,t),this.#h=t)}delete(t){return this.#E(t,"delete")}#E(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#k(e);else{this.#R(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(n,t,e),this.#f&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#u[s];else if(s===this.#o)this.#o=this.#a[s];else{let h=this.#u[s];this.#a[h]=this.#a[s];let o=this.#a[s];this.#u[o]=this.#u[s]}this.#n--,this.#m.push(s)}}if(this.#f&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#k("delete")}#k(t){for(let e of this.#T({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#A&&this.#p?.(i,s,t),this.#f&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#d&&this.#y&&(this.#d.fill(0),this.#y.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#m.length=0,this.#_=0,this.#n=0,this.#f&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};export{L as LRUCache};
-//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/cacache/node_modules/lru-cache/dist/esm/package.json b/node_modules/cacache/node_modules/lru-cache/dist/esm/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/cacache/node_modules/lru-cache/dist/esm/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/cacache/node_modules/lru-cache/package.json b/node_modules/cacache/node_modules/lru-cache/package.json
deleted file mode 100644
index 4953bdf4a7a35..0000000000000
--- a/node_modules/cacache/node_modules/lru-cache/package.json
+++ /dev/null
@@ -1,113 +0,0 @@
-{
-  "name": "lru-cache",
-  "description": "A cache object that deletes the least-recently-used items.",
-  "version": "11.2.1",
-  "author": "Isaac Z. Schlueter ",
-  "keywords": [
-    "mru",
-    "lru",
-    "cache"
-  ],
-  "sideEffects": false,
-  "scripts": {
-    "build": "npm run prepare",
-    "prepare": "tshy && bash fixup.sh",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "test": "tap",
-    "snap": "tap",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "format": "prettier --write .",
-    "typedoc": "typedoc --tsconfig ./.tshy/esm.json ./src/*.ts",
-    "benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh",
-    "prebenchmark": "npm run prepare",
-    "benchmark": "make -C benchmark",
-    "preprofile": "npm run prepare",
-    "profile": "make -C benchmark profile"
-  },
-  "main": "./dist/commonjs/index.js",
-  "types": "./dist/commonjs/index.d.ts",
-  "tshy": {
-    "exports": {
-      ".": "./src/index.ts",
-      "./min": {
-        "import": {
-          "types": "./dist/esm/index.d.ts",
-          "default": "./dist/esm/index.min.js"
-        },
-        "require": {
-          "types": "./dist/commonjs/index.d.ts",
-          "default": "./dist/commonjs/index.min.js"
-        }
-      }
-    }
-  },
-  "repository": {
-    "type": "git",
-    "url": "git://github.com/isaacs/node-lru-cache.git"
-  },
-  "devDependencies": {
-    "@types/node": "^24.3.0",
-    "benchmark": "^2.1.4",
-    "esbuild": "^0.25.9",
-    "marked": "^4.2.12",
-    "mkdirp": "^3.0.1",
-    "prettier": "^3.6.2",
-    "tap": "^21.1.0",
-    "tshy": "^3.0.2",
-    "typedoc": "^0.28.12"
-  },
-  "license": "ISC",
-  "files": [
-    "dist"
-  ],
-  "engines": {
-    "node": "20 || >=22"
-  },
-  "prettier": {
-    "experimentalTernaries": true,
-    "semi": false,
-    "printWidth": 70,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "tap": {
-    "node-arg": [
-      "--expose-gc"
-    ],
-    "plugin": [
-      "@tapjs/clock"
-    ]
-  },
-  "exports": {
-    ".": {
-      "import": {
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.js"
-      },
-      "require": {
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.js"
-      }
-    },
-    "./min": {
-      "import": {
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.min.js"
-      },
-      "require": {
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.min.js"
-      }
-    }
-  },
-  "type": "module",
-  "module": "./dist/esm/index.js"
-}
diff --git a/node_modules/hosted-git-info/node_modules/lru-cache/LICENSE b/node_modules/hosted-git-info/node_modules/lru-cache/LICENSE
deleted file mode 100644
index f785757cd63f8..0000000000000
--- a/node_modules/hosted-git-info/node_modules/lru-cache/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/hosted-git-info/node_modules/lru-cache/dist/commonjs/index.js b/node_modules/hosted-git-info/node_modules/lru-cache/dist/commonjs/index.js
deleted file mode 100644
index 921b8f10f71b1..0000000000000
--- a/node_modules/hosted-git-info/node_modules/lru-cache/dist/commonjs/index.js
+++ /dev/null
@@ -1,1564 +0,0 @@
-"use strict";
-/**
- * @module LRUCache
- */
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.LRUCache = void 0;
-const defaultPerf = (typeof performance === 'object' &&
-    performance &&
-    typeof performance.now === 'function') ?
-    performance
-    : Date;
-const warned = new Set();
-/* c8 ignore start */
-const PROCESS = (typeof process === 'object' && !!process ?
-    process
-    : {});
-/* c8 ignore start */
-const emitWarning = (msg, type, code, fn) => {
-    typeof PROCESS.emitWarning === 'function' ?
-        PROCESS.emitWarning(msg, type, code, fn)
-        : console.error(`[${code}] ${type}: ${msg}`);
-};
-let AC = globalThis.AbortController;
-let AS = globalThis.AbortSignal;
-/* c8 ignore start */
-if (typeof AC === 'undefined') {
-    //@ts-ignore
-    AS = class AbortSignal {
-        onabort;
-        _onabort = [];
-        reason;
-        aborted = false;
-        addEventListener(_, fn) {
-            this._onabort.push(fn);
-        }
-    };
-    //@ts-ignore
-    AC = class AbortController {
-        constructor() {
-            warnACPolyfill();
-        }
-        signal = new AS();
-        abort(reason) {
-            if (this.signal.aborted)
-                return;
-            //@ts-ignore
-            this.signal.reason = reason;
-            //@ts-ignore
-            this.signal.aborted = true;
-            //@ts-ignore
-            for (const fn of this.signal._onabort) {
-                fn(reason);
-            }
-            this.signal.onabort?.(reason);
-        }
-    };
-    let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
-    const warnACPolyfill = () => {
-        if (!printACPolyfillWarning)
-            return;
-        printACPolyfillWarning = false;
-        emitWarning('AbortController is not defined. If using lru-cache in ' +
-            'node 14, load an AbortController polyfill from the ' +
-            '`node-abort-controller` package. A minimal polyfill is ' +
-            'provided for use by LRUCache.fetch(), but it should not be ' +
-            'relied upon in other contexts (eg, passing it to other APIs that ' +
-            'use AbortController/AbortSignal might have undesirable effects). ' +
-            'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
-    };
-}
-/* c8 ignore stop */
-const shouldWarn = (code) => !warned.has(code);
-const TYPE = Symbol('type');
-const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
-/* c8 ignore start */
-// This is a little bit ridiculous, tbh.
-// The maximum array length is 2^32-1 or thereabouts on most JS impls.
-// And well before that point, you're caching the entire world, I mean,
-// that's ~32GB of just integers for the next/prev links, plus whatever
-// else to hold that many keys and values.  Just filling the memory with
-// zeroes at init time is brutal when you get that big.
-// But why not be complete?
-// Maybe in the future, these limits will have expanded.
-const getUintArray = (max) => !isPosInt(max) ? null
-    : max <= Math.pow(2, 8) ? Uint8Array
-        : max <= Math.pow(2, 16) ? Uint16Array
-            : max <= Math.pow(2, 32) ? Uint32Array
-                : max <= Number.MAX_SAFE_INTEGER ? ZeroArray
-                    : null;
-/* c8 ignore stop */
-class ZeroArray extends Array {
-    constructor(size) {
-        super(size);
-        this.fill(0);
-    }
-}
-class Stack {
-    heap;
-    length;
-    // private constructor
-    static #constructing = false;
-    static create(max) {
-        const HeapCls = getUintArray(max);
-        if (!HeapCls)
-            return [];
-        Stack.#constructing = true;
-        const s = new Stack(max, HeapCls);
-        Stack.#constructing = false;
-        return s;
-    }
-    constructor(max, HeapCls) {
-        /* c8 ignore start */
-        if (!Stack.#constructing) {
-            throw new TypeError('instantiate Stack using Stack.create(n)');
-        }
-        /* c8 ignore stop */
-        this.heap = new HeapCls(max);
-        this.length = 0;
-    }
-    push(n) {
-        this.heap[this.length++] = n;
-    }
-    pop() {
-        return this.heap[--this.length];
-    }
-}
-/**
- * Default export, the thing you're using this module to get.
- *
- * The `K` and `V` types define the key and value types, respectively. The
- * optional `FC` type defines the type of the `context` object passed to
- * `cache.fetch()` and `cache.memo()`.
- *
- * Keys and values **must not** be `null` or `undefined`.
- *
- * All properties from the options object (with the exception of `max`,
- * `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are
- * added as normal public members. (The listed options are read-only getters.)
- *
- * Changing any of these will alter the defaults for subsequent method calls.
- */
-class LRUCache {
-    // options that cannot be changed without disaster
-    #max;
-    #maxSize;
-    #dispose;
-    #onInsert;
-    #disposeAfter;
-    #fetchMethod;
-    #memoMethod;
-    #perf;
-    /**
-     * {@link LRUCache.OptionsBase.perf}
-     */
-    get perf() {
-        return this.#perf;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.ttl}
-     */
-    ttl;
-    /**
-     * {@link LRUCache.OptionsBase.ttlResolution}
-     */
-    ttlResolution;
-    /**
-     * {@link LRUCache.OptionsBase.ttlAutopurge}
-     */
-    ttlAutopurge;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnGet}
-     */
-    updateAgeOnGet;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnHas}
-     */
-    updateAgeOnHas;
-    /**
-     * {@link LRUCache.OptionsBase.allowStale}
-     */
-    allowStale;
-    /**
-     * {@link LRUCache.OptionsBase.noDisposeOnSet}
-     */
-    noDisposeOnSet;
-    /**
-     * {@link LRUCache.OptionsBase.noUpdateTTL}
-     */
-    noUpdateTTL;
-    /**
-     * {@link LRUCache.OptionsBase.maxEntrySize}
-     */
-    maxEntrySize;
-    /**
-     * {@link LRUCache.OptionsBase.sizeCalculation}
-     */
-    sizeCalculation;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
-     */
-    noDeleteOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
-     */
-    noDeleteOnStaleGet;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
-     */
-    allowStaleOnFetchAbort;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
-     */
-    allowStaleOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
-     */
-    ignoreFetchAbort;
-    // computed properties
-    #size;
-    #calculatedSize;
-    #keyMap;
-    #keyList;
-    #valList;
-    #next;
-    #prev;
-    #head;
-    #tail;
-    #free;
-    #disposed;
-    #sizes;
-    #starts;
-    #ttls;
-    #hasDispose;
-    #hasFetchMethod;
-    #hasDisposeAfter;
-    #hasOnInsert;
-    /**
-     * Do not call this method unless you need to inspect the
-     * inner workings of the cache.  If anything returned by this
-     * object is modified in any way, strange breakage may occur.
-     *
-     * These fields are private for a reason!
-     *
-     * @internal
-     */
-    static unsafeExposeInternals(c) {
-        return {
-            // properties
-            starts: c.#starts,
-            ttls: c.#ttls,
-            sizes: c.#sizes,
-            keyMap: c.#keyMap,
-            keyList: c.#keyList,
-            valList: c.#valList,
-            next: c.#next,
-            prev: c.#prev,
-            get head() {
-                return c.#head;
-            },
-            get tail() {
-                return c.#tail;
-            },
-            free: c.#free,
-            // methods
-            isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
-            backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
-            moveToTail: (index) => c.#moveToTail(index),
-            indexes: (options) => c.#indexes(options),
-            rindexes: (options) => c.#rindexes(options),
-            isStale: (index) => c.#isStale(index),
-        };
-    }
-    // Protected read-only members
-    /**
-     * {@link LRUCache.OptionsBase.max} (read-only)
-     */
-    get max() {
-        return this.#max;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.maxSize} (read-only)
-     */
-    get maxSize() {
-        return this.#maxSize;
-    }
-    /**
-     * The total computed size of items in the cache (read-only)
-     */
-    get calculatedSize() {
-        return this.#calculatedSize;
-    }
-    /**
-     * The number of items stored in the cache (read-only)
-     */
-    get size() {
-        return this.#size;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
-     */
-    get fetchMethod() {
-        return this.#fetchMethod;
-    }
-    get memoMethod() {
-        return this.#memoMethod;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.dispose} (read-only)
-     */
-    get dispose() {
-        return this.#dispose;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.onInsert} (read-only)
-     */
-    get onInsert() {
-        return this.#onInsert;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
-     */
-    get disposeAfter() {
-        return this.#disposeAfter;
-    }
-    constructor(options) {
-        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, onInsert, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, perf, } = options;
-        if (perf !== undefined) {
-            if (typeof perf?.now !== 'function') {
-                throw new TypeError('perf option must have a now() method if specified');
-            }
-        }
-        this.#perf = perf ?? defaultPerf;
-        if (max !== 0 && !isPosInt(max)) {
-            throw new TypeError('max option must be a nonnegative integer');
-        }
-        const UintArray = max ? getUintArray(max) : Array;
-        if (!UintArray) {
-            throw new Error('invalid max value: ' + max);
-        }
-        this.#max = max;
-        this.#maxSize = maxSize;
-        this.maxEntrySize = maxEntrySize || this.#maxSize;
-        this.sizeCalculation = sizeCalculation;
-        if (this.sizeCalculation) {
-            if (!this.#maxSize && !this.maxEntrySize) {
-                throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
-            }
-            if (typeof this.sizeCalculation !== 'function') {
-                throw new TypeError('sizeCalculation set to non-function');
-            }
-        }
-        if (memoMethod !== undefined &&
-            typeof memoMethod !== 'function') {
-            throw new TypeError('memoMethod must be a function if defined');
-        }
-        this.#memoMethod = memoMethod;
-        if (fetchMethod !== undefined &&
-            typeof fetchMethod !== 'function') {
-            throw new TypeError('fetchMethod must be a function if specified');
-        }
-        this.#fetchMethod = fetchMethod;
-        this.#hasFetchMethod = !!fetchMethod;
-        this.#keyMap = new Map();
-        this.#keyList = new Array(max).fill(undefined);
-        this.#valList = new Array(max).fill(undefined);
-        this.#next = new UintArray(max);
-        this.#prev = new UintArray(max);
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free = Stack.create(max);
-        this.#size = 0;
-        this.#calculatedSize = 0;
-        if (typeof dispose === 'function') {
-            this.#dispose = dispose;
-        }
-        if (typeof onInsert === 'function') {
-            this.#onInsert = onInsert;
-        }
-        if (typeof disposeAfter === 'function') {
-            this.#disposeAfter = disposeAfter;
-            this.#disposed = [];
-        }
-        else {
-            this.#disposeAfter = undefined;
-            this.#disposed = undefined;
-        }
-        this.#hasDispose = !!this.#dispose;
-        this.#hasOnInsert = !!this.#onInsert;
-        this.#hasDisposeAfter = !!this.#disposeAfter;
-        this.noDisposeOnSet = !!noDisposeOnSet;
-        this.noUpdateTTL = !!noUpdateTTL;
-        this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
-        this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
-        this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
-        this.ignoreFetchAbort = !!ignoreFetchAbort;
-        // NB: maxEntrySize is set to maxSize if it's set
-        if (this.maxEntrySize !== 0) {
-            if (this.#maxSize !== 0) {
-                if (!isPosInt(this.#maxSize)) {
-                    throw new TypeError('maxSize must be a positive integer if specified');
-                }
-            }
-            if (!isPosInt(this.maxEntrySize)) {
-                throw new TypeError('maxEntrySize must be a positive integer if specified');
-            }
-            this.#initializeSizeTracking();
-        }
-        this.allowStale = !!allowStale;
-        this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
-        this.updateAgeOnGet = !!updateAgeOnGet;
-        this.updateAgeOnHas = !!updateAgeOnHas;
-        this.ttlResolution =
-            isPosInt(ttlResolution) || ttlResolution === 0 ?
-                ttlResolution
-                : 1;
-        this.ttlAutopurge = !!ttlAutopurge;
-        this.ttl = ttl || 0;
-        if (this.ttl) {
-            if (!isPosInt(this.ttl)) {
-                throw new TypeError('ttl must be a positive integer if specified');
-            }
-            this.#initializeTTLTracking();
-        }
-        // do not allow completely unbounded caches
-        if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
-            throw new TypeError('At least one of max, maxSize, or ttl is required');
-        }
-        if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
-            const code = 'LRU_CACHE_UNBOUNDED';
-            if (shouldWarn(code)) {
-                warned.add(code);
-                const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
-                    'result in unbounded memory consumption.';
-                emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
-            }
-        }
-    }
-    /**
-     * Return the number of ms left in the item's TTL. If item is not in cache,
-     * returns `0`. Returns `Infinity` if item is in cache without a defined TTL.
-     */
-    getRemainingTTL(key) {
-        return this.#keyMap.has(key) ? Infinity : 0;
-    }
-    #initializeTTLTracking() {
-        const ttls = new ZeroArray(this.#max);
-        const starts = new ZeroArray(this.#max);
-        this.#ttls = ttls;
-        this.#starts = starts;
-        this.#setItemTTL = (index, ttl, start = this.#perf.now()) => {
-            starts[index] = ttl !== 0 ? start : 0;
-            ttls[index] = ttl;
-            if (ttl !== 0 && this.ttlAutopurge) {
-                const t = setTimeout(() => {
-                    if (this.#isStale(index)) {
-                        this.#delete(this.#keyList[index], 'expire');
-                    }
-                }, ttl + 1);
-                // unref() not supported on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-        };
-        this.#updateItemAge = index => {
-            starts[index] = ttls[index] !== 0 ? this.#perf.now() : 0;
-        };
-        this.#statusTTL = (status, index) => {
-            if (ttls[index]) {
-                const ttl = ttls[index];
-                const start = starts[index];
-                /* c8 ignore next */
-                if (!ttl || !start)
-                    return;
-                status.ttl = ttl;
-                status.start = start;
-                status.now = cachedNow || getNow();
-                const age = status.now - start;
-                status.remainingTTL = ttl - age;
-            }
-        };
-        // debounce calls to perf.now() to 1s so we're not hitting
-        // that costly call repeatedly.
-        let cachedNow = 0;
-        const getNow = () => {
-            const n = this.#perf.now();
-            if (this.ttlResolution > 0) {
-                cachedNow = n;
-                const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
-                // not available on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-            return n;
-        };
-        this.getRemainingTTL = key => {
-            const index = this.#keyMap.get(key);
-            if (index === undefined) {
-                return 0;
-            }
-            const ttl = ttls[index];
-            const start = starts[index];
-            if (!ttl || !start) {
-                return Infinity;
-            }
-            const age = (cachedNow || getNow()) - start;
-            return ttl - age;
-        };
-        this.#isStale = index => {
-            const s = starts[index];
-            const t = ttls[index];
-            return !!t && !!s && (cachedNow || getNow()) - s > t;
-        };
-    }
-    // conditionally set private methods related to TTL
-    #updateItemAge = () => { };
-    #statusTTL = () => { };
-    #setItemTTL = () => { };
-    /* c8 ignore stop */
-    #isStale = () => false;
-    #initializeSizeTracking() {
-        const sizes = new ZeroArray(this.#max);
-        this.#calculatedSize = 0;
-        this.#sizes = sizes;
-        this.#removeItemSize = index => {
-            this.#calculatedSize -= sizes[index];
-            sizes[index] = 0;
-        };
-        this.#requireSize = (k, v, size, sizeCalculation) => {
-            // provisionally accept background fetches.
-            // actual value size will be checked when they return.
-            if (this.#isBackgroundFetch(v)) {
-                return 0;
-            }
-            if (!isPosInt(size)) {
-                if (sizeCalculation) {
-                    if (typeof sizeCalculation !== 'function') {
-                        throw new TypeError('sizeCalculation must be a function');
-                    }
-                    size = sizeCalculation(v, k);
-                    if (!isPosInt(size)) {
-                        throw new TypeError('sizeCalculation return invalid (expect positive integer)');
-                    }
-                }
-                else {
-                    throw new TypeError('invalid size value (must be positive integer). ' +
-                        'When maxSize or maxEntrySize is used, sizeCalculation ' +
-                        'or size must be set.');
-                }
-            }
-            return size;
-        };
-        this.#addItemSize = (index, size, status) => {
-            sizes[index] = size;
-            if (this.#maxSize) {
-                const maxSize = this.#maxSize - sizes[index];
-                while (this.#calculatedSize > maxSize) {
-                    this.#evict(true);
-                }
-            }
-            this.#calculatedSize += sizes[index];
-            if (status) {
-                status.entrySize = size;
-                status.totalCalculatedSize = this.#calculatedSize;
-            }
-        };
-    }
-    #removeItemSize = _i => { };
-    #addItemSize = (_i, _s, _st) => { };
-    #requireSize = (_k, _v, size, sizeCalculation) => {
-        if (size || sizeCalculation) {
-            throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
-        }
-        return 0;
-    };
-    *#indexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#tail; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#head) {
-                    break;
-                }
-                else {
-                    i = this.#prev[i];
-                }
-            }
-        }
-    }
-    *#rindexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#head; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#tail) {
-                    break;
-                }
-                else {
-                    i = this.#next[i];
-                }
-            }
-        }
-    }
-    #isValidIndex(index) {
-        return (index !== undefined &&
-            this.#keyMap.get(this.#keyList[index]) === index);
-    }
-    /**
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from most recently used to least recently used.
-     */
-    *entries() {
-        for (const i of this.#indexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.entries}
-     *
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from least recently used to most recently used.
-     */
-    *rentries() {
-        for (const i of this.#rindexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the keys in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *keys() {
-        for (const i of this.#indexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.keys}
-     *
-     * Return a generator yielding the keys in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rkeys() {
-        for (const i of this.#rindexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the values in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *values() {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.values}
-     *
-     * Return a generator yielding the values in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rvalues() {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Iterating over the cache itself yields the same results as
-     * {@link LRUCache.entries}
-     */
-    [Symbol.iterator]() {
-        return this.entries();
-    }
-    /**
-     * A String value that is used in the creation of the default string
-     * description of an object. Called by the built-in method
-     * `Object.prototype.toString`.
-     */
-    [Symbol.toStringTag] = 'LRUCache';
-    /**
-     * Find a value for which the supplied fn method returns a truthy value,
-     * similar to `Array.find()`. fn is called as `fn(value, key, cache)`.
-     */
-    find(fn, getOptions = {}) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-            if (value === undefined)
-                continue;
-            if (fn(value, this.#keyList[i], this)) {
-                return this.get(this.#keyList[i], getOptions);
-            }
-        }
-    }
-    /**
-     * Call the supplied function on each item in the cache, in order from most
-     * recently used to least recently used.
-     *
-     * `fn` is called as `fn(value, key, cache)`.
-     *
-     * If `thisp` is provided, function will be called in the `this`-context of
-     * the provided object, or the cache if no `thisp` object is provided.
-     *
-     * Does not update age or recenty of use, or iterate over stale values.
-     */
-    forEach(fn, thisp = this) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * The same as {@link LRUCache.forEach} but items are iterated over in
-     * reverse order.  (ie, less recently used items are iterated over first.)
-     */
-    rforEach(fn, thisp = this) {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * Delete any stale entries. Returns true if anything was removed,
-     * false otherwise.
-     */
-    purgeStale() {
-        let deleted = false;
-        for (const i of this.#rindexes({ allowStale: true })) {
-            if (this.#isStale(i)) {
-                this.#delete(this.#keyList[i], 'expire');
-                deleted = true;
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Get the extended info about a given entry, to get its value, size, and
-     * TTL info simultaneously. Returns `undefined` if the key is not present.
-     *
-     * Unlike {@link LRUCache#dump}, which is designed to be portable and survive
-     * serialization, the `start` value is always the current timestamp, and the
-     * `ttl` is a calculated remaining time to live (negative if expired).
-     *
-     * Always returns stale values, if their info is found in the cache, so be
-     * sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl})
-     * if relevant.
-     */
-    info(key) {
-        const i = this.#keyMap.get(key);
-        if (i === undefined)
-            return undefined;
-        const v = this.#valList[i];
-        /* c8 ignore start - this isn't tested for the info function,
-         * but it's the same logic as found in other places. */
-        const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-        if (value === undefined)
-            return undefined;
-        /* c8 ignore end */
-        const entry = { value };
-        if (this.#ttls && this.#starts) {
-            const ttl = this.#ttls[i];
-            const start = this.#starts[i];
-            if (ttl && start) {
-                const remain = ttl - (this.#perf.now() - start);
-                entry.ttl = remain;
-                entry.start = Date.now();
-            }
-        }
-        if (this.#sizes) {
-            entry.size = this.#sizes[i];
-        }
-        return entry;
-    }
-    /**
-     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
-     * passed to {@link LRUCache#load}.
-     *
-     * The `start` fields are calculated relative to a portable `Date.now()`
-     * timestamp, even if `performance.now()` is available.
-     *
-     * Stale entries are always included in the `dump`, even if
-     * {@link LRUCache.OptionsBase.allowStale} is false.
-     *
-     * Note: this returns an actual array, not a generator, so it can be more
-     * easily passed around.
-     */
-    dump() {
-        const arr = [];
-        for (const i of this.#indexes({ allowStale: true })) {
-            const key = this.#keyList[i];
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-            if (value === undefined || key === undefined)
-                continue;
-            const entry = { value };
-            if (this.#ttls && this.#starts) {
-                entry.ttl = this.#ttls[i];
-                // always dump the start relative to a portable timestamp
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = this.#perf.now() - this.#starts[i];
-                entry.start = Math.floor(Date.now() - age);
-            }
-            if (this.#sizes) {
-                entry.size = this.#sizes[i];
-            }
-            arr.unshift([key, entry]);
-        }
-        return arr;
-    }
-    /**
-     * Reset the cache and load in the items in entries in the order listed.
-     *
-     * The shape of the resulting cache may be different if the same options are
-     * not used in both caches.
-     *
-     * The `start` fields are assumed to be calculated relative to a portable
-     * `Date.now()` timestamp, even if `performance.now()` is available.
-     */
-    load(arr) {
-        this.clear();
-        for (const [key, entry] of arr) {
-            if (entry.start) {
-                // entry.start is a portable timestamp, but we may be using
-                // node's performance.now(), so calculate the offset, so that
-                // we get the intended remaining TTL, no matter how long it's
-                // been on ice.
-                //
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = Date.now() - entry.start;
-                entry.start = this.#perf.now() - age;
-            }
-            this.set(key, entry.value, entry);
-        }
-    }
-    /**
-     * Add a value to the cache.
-     *
-     * Note: if `undefined` is specified as a value, this is an alias for
-     * {@link LRUCache#delete}
-     *
-     * Fields on the {@link LRUCache.SetOptions} options param will override
-     * their corresponding values in the constructor options for the scope
-     * of this single `set()` operation.
-     *
-     * If `start` is provided, then that will set the effective start
-     * time for the TTL calculation. Note that this must be a previous
-     * value of `performance.now()` if supported, or a previous value of
-     * `Date.now()` if not.
-     *
-     * Options object may also include `size`, which will prevent
-     * calling the `sizeCalculation` function and just use the specified
-     * number if it is a positive integer, and `noDisposeOnSet` which
-     * will prevent calling a `dispose` function in the case of
-     * overwrites.
-     *
-     * If the `size` (or return value of `sizeCalculation`) for a given
-     * entry is greater than `maxEntrySize`, then the item will not be
-     * added to the cache.
-     *
-     * Will update the recency of the entry.
-     *
-     * If the value is `undefined`, then this is an alias for
-     * `cache.delete(key)`. `undefined` is never stored in the cache.
-     */
-    set(k, v, setOptions = {}) {
-        if (v === undefined) {
-            this.delete(k);
-            return this;
-        }
-        const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
-        let { noUpdateTTL = this.noUpdateTTL } = setOptions;
-        const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
-        // if the item doesn't fit, don't do anything
-        // NB: maxEntrySize set to maxSize by default
-        if (this.maxEntrySize && size > this.maxEntrySize) {
-            if (status) {
-                status.set = 'miss';
-                status.maxEntrySizeExceeded = true;
-            }
-            // have to delete, in case something is there already.
-            this.#delete(k, 'set');
-            return this;
-        }
-        let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
-        if (index === undefined) {
-            // addition
-            index = (this.#size === 0 ? this.#tail
-                : this.#free.length !== 0 ? this.#free.pop()
-                    : this.#size === this.#max ? this.#evict(false)
-                        : this.#size);
-            this.#keyList[index] = k;
-            this.#valList[index] = v;
-            this.#keyMap.set(k, index);
-            this.#next[this.#tail] = index;
-            this.#prev[index] = this.#tail;
-            this.#tail = index;
-            this.#size++;
-            this.#addItemSize(index, size, status);
-            if (status)
-                status.set = 'add';
-            noUpdateTTL = false;
-            if (this.#hasOnInsert) {
-                this.#onInsert?.(v, k, 'add');
-            }
-        }
-        else {
-            // update
-            this.#moveToTail(index);
-            const oldVal = this.#valList[index];
-            if (v !== oldVal) {
-                if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
-                    oldVal.__abortController.abort(new Error('replaced'));
-                    const { __staleWhileFetching: s } = oldVal;
-                    if (s !== undefined && !noDisposeOnSet) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(s, k, 'set');
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([s, k, 'set']);
-                        }
-                    }
-                }
-                else if (!noDisposeOnSet) {
-                    if (this.#hasDispose) {
-                        this.#dispose?.(oldVal, k, 'set');
-                    }
-                    if (this.#hasDisposeAfter) {
-                        this.#disposed?.push([oldVal, k, 'set']);
-                    }
-                }
-                this.#removeItemSize(index);
-                this.#addItemSize(index, size, status);
-                this.#valList[index] = v;
-                if (status) {
-                    status.set = 'replace';
-                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal) ?
-                        oldVal.__staleWhileFetching
-                        : oldVal;
-                    if (oldValue !== undefined)
-                        status.oldValue = oldValue;
-                }
-            }
-            else if (status) {
-                status.set = 'update';
-            }
-            if (this.#hasOnInsert) {
-                this.onInsert?.(v, k, v === oldVal ? 'update' : 'replace');
-            }
-        }
-        if (ttl !== 0 && !this.#ttls) {
-            this.#initializeTTLTracking();
-        }
-        if (this.#ttls) {
-            if (!noUpdateTTL) {
-                this.#setItemTTL(index, ttl, start);
-            }
-            if (status)
-                this.#statusTTL(status, index);
-        }
-        if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return this;
-    }
-    /**
-     * Evict the least recently used item, returning its value or
-     * `undefined` if cache is empty.
-     */
-    pop() {
-        try {
-            while (this.#size) {
-                const val = this.#valList[this.#head];
-                this.#evict(true);
-                if (this.#isBackgroundFetch(val)) {
-                    if (val.__staleWhileFetching) {
-                        return val.__staleWhileFetching;
-                    }
-                }
-                else if (val !== undefined) {
-                    return val;
-                }
-            }
-        }
-        finally {
-            if (this.#hasDisposeAfter && this.#disposed) {
-                const dt = this.#disposed;
-                let task;
-                while ((task = dt?.shift())) {
-                    this.#disposeAfter?.(...task);
-                }
-            }
-        }
-    }
-    #evict(free) {
-        const head = this.#head;
-        const k = this.#keyList[head];
-        const v = this.#valList[head];
-        if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
-            v.__abortController.abort(new Error('evicted'));
-        }
-        else if (this.#hasDispose || this.#hasDisposeAfter) {
-            if (this.#hasDispose) {
-                this.#dispose?.(v, k, 'evict');
-            }
-            if (this.#hasDisposeAfter) {
-                this.#disposed?.push([v, k, 'evict']);
-            }
-        }
-        this.#removeItemSize(head);
-        // if we aren't about to use the index, then null these out
-        if (free) {
-            this.#keyList[head] = undefined;
-            this.#valList[head] = undefined;
-            this.#free.push(head);
-        }
-        if (this.#size === 1) {
-            this.#head = this.#tail = 0;
-            this.#free.length = 0;
-        }
-        else {
-            this.#head = this.#next[head];
-        }
-        this.#keyMap.delete(k);
-        this.#size--;
-        return head;
-    }
-    /**
-     * Check if a key is in the cache, without updating the recency of use.
-     * Will return false if the item is stale, even though it is technically
-     * in the cache.
-     *
-     * Check if a key is in the cache, without updating the recency of
-     * use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set
-     * to `true` in either the options or the constructor.
-     *
-     * Will return `false` if the item is stale, even though it is technically in
-     * the cache. The difference can be determined (if it matters) by using a
-     * `status` argument, and inspecting the `has` field.
-     *
-     * Will not update item age unless
-     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
-     */
-    has(k, hasOptions = {}) {
-        const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v) &&
-                v.__staleWhileFetching === undefined) {
-                return false;
-            }
-            if (!this.#isStale(index)) {
-                if (updateAgeOnHas) {
-                    this.#updateItemAge(index);
-                }
-                if (status) {
-                    status.has = 'hit';
-                    this.#statusTTL(status, index);
-                }
-                return true;
-            }
-            else if (status) {
-                status.has = 'stale';
-                this.#statusTTL(status, index);
-            }
-        }
-        else if (status) {
-            status.has = 'miss';
-        }
-        return false;
-    }
-    /**
-     * Like {@link LRUCache#get} but doesn't update recency or delete stale
-     * items.
-     *
-     * Returns `undefined` if the item is stale, unless
-     * {@link LRUCache.OptionsBase.allowStale} is set.
-     */
-    peek(k, peekOptions = {}) {
-        const { allowStale = this.allowStale } = peekOptions;
-        const index = this.#keyMap.get(k);
-        if (index === undefined ||
-            (!allowStale && this.#isStale(index))) {
-            return;
-        }
-        const v = this.#valList[index];
-        // either stale and allowed, or forcing a refresh of non-stale value
-        return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-    }
-    #backgroundFetch(k, index, options, context) {
-        const v = index === undefined ? undefined : this.#valList[index];
-        if (this.#isBackgroundFetch(v)) {
-            return v;
-        }
-        const ac = new AC();
-        const { signal } = options;
-        // when/if our AC signals, then stop listening to theirs.
-        signal?.addEventListener('abort', () => ac.abort(signal.reason), {
-            signal: ac.signal,
-        });
-        const fetchOpts = {
-            signal: ac.signal,
-            options,
-            context,
-        };
-        const cb = (v, updateCache = false) => {
-            const { aborted } = ac.signal;
-            const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
-            if (options.status) {
-                if (aborted && !updateCache) {
-                    options.status.fetchAborted = true;
-                    options.status.fetchError = ac.signal.reason;
-                    if (ignoreAbort)
-                        options.status.fetchAbortIgnored = true;
-                }
-                else {
-                    options.status.fetchResolved = true;
-                }
-            }
-            if (aborted && !ignoreAbort && !updateCache) {
-                return fetchFail(ac.signal.reason);
-            }
-            // either we didn't abort, and are still here, or we did, and ignored
-            const bf = p;
-            if (this.#valList[index] === p) {
-                if (v === undefined) {
-                    if (bf.__staleWhileFetching !== undefined) {
-                        this.#valList[index] = bf.__staleWhileFetching;
-                    }
-                    else {
-                        this.#delete(k, 'fetch');
-                    }
-                }
-                else {
-                    if (options.status)
-                        options.status.fetchUpdated = true;
-                    this.set(k, v, fetchOpts.options);
-                }
-            }
-            return v;
-        };
-        const eb = (er) => {
-            if (options.status) {
-                options.status.fetchRejected = true;
-                options.status.fetchError = er;
-            }
-            return fetchFail(er);
-        };
-        const fetchFail = (er) => {
-            const { aborted } = ac.signal;
-            const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
-            const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
-            const noDelete = allowStale || options.noDeleteOnFetchRejection;
-            const bf = p;
-            if (this.#valList[index] === p) {
-                // if we allow stale on fetch rejections, then we need to ensure that
-                // the stale value is not removed from the cache when the fetch fails.
-                const del = !noDelete || bf.__staleWhileFetching === undefined;
-                if (del) {
-                    this.#delete(k, 'fetch');
-                }
-                else if (!allowStaleAborted) {
-                    // still replace the *promise* with the stale value,
-                    // since we are done with the promise at this point.
-                    // leave it untouched if we're still waiting for an
-                    // aborted background fetch that hasn't yet returned.
-                    this.#valList[index] = bf.__staleWhileFetching;
-                }
-            }
-            if (allowStale) {
-                if (options.status && bf.__staleWhileFetching !== undefined) {
-                    options.status.returnedStale = true;
-                }
-                return bf.__staleWhileFetching;
-            }
-            else if (bf.__returned === bf) {
-                throw er;
-            }
-        };
-        const pcall = (res, rej) => {
-            const fmp = this.#fetchMethod?.(k, v, fetchOpts);
-            if (fmp && fmp instanceof Promise) {
-                fmp.then(v => res(v === undefined ? undefined : v), rej);
-            }
-            // ignored, we go until we finish, regardless.
-            // defer check until we are actually aborting,
-            // so fetchMethod can override.
-            ac.signal.addEventListener('abort', () => {
-                if (!options.ignoreFetchAbort ||
-                    options.allowStaleOnFetchAbort) {
-                    res(undefined);
-                    // when it eventually resolves, update the cache.
-                    if (options.allowStaleOnFetchAbort) {
-                        res = v => cb(v, true);
-                    }
-                }
-            });
-        };
-        if (options.status)
-            options.status.fetchDispatched = true;
-        const p = new Promise(pcall).then(cb, eb);
-        const bf = Object.assign(p, {
-            __abortController: ac,
-            __staleWhileFetching: v,
-            __returned: undefined,
-        });
-        if (index === undefined) {
-            // internal, don't expose status.
-            this.set(k, bf, { ...fetchOpts.options, status: undefined });
-            index = this.#keyMap.get(k);
-        }
-        else {
-            this.#valList[index] = bf;
-        }
-        return bf;
-    }
-    #isBackgroundFetch(p) {
-        if (!this.#hasFetchMethod)
-            return false;
-        const b = p;
-        return (!!b &&
-            b instanceof Promise &&
-            b.hasOwnProperty('__staleWhileFetching') &&
-            b.__abortController instanceof AC);
-    }
-    async fetch(k, fetchOptions = {}) {
-        const { 
-        // get options
-        allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, 
-        // set options
-        ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, 
-        // fetch exclusive options
-        noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
-        if (!this.#hasFetchMethod) {
-            if (status)
-                status.fetch = 'get';
-            return this.get(k, {
-                allowStale,
-                updateAgeOnGet,
-                noDeleteOnStaleGet,
-                status,
-            });
-        }
-        const options = {
-            allowStale,
-            updateAgeOnGet,
-            noDeleteOnStaleGet,
-            ttl,
-            noDisposeOnSet,
-            size,
-            sizeCalculation,
-            noUpdateTTL,
-            noDeleteOnFetchRejection,
-            allowStaleOnFetchRejection,
-            allowStaleOnFetchAbort,
-            ignoreFetchAbort,
-            status,
-            signal,
-        };
-        let index = this.#keyMap.get(k);
-        if (index === undefined) {
-            if (status)
-                status.fetch = 'miss';
-            const p = this.#backgroundFetch(k, index, options, context);
-            return (p.__returned = p);
-        }
-        else {
-            // in cache, maybe already fetching
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                const stale = allowStale && v.__staleWhileFetching !== undefined;
-                if (status) {
-                    status.fetch = 'inflight';
-                    if (stale)
-                        status.returnedStale = true;
-                }
-                return stale ? v.__staleWhileFetching : (v.__returned = v);
-            }
-            // if we force a refresh, that means do NOT serve the cached value,
-            // unless we are already in the process of refreshing the cache.
-            const isStale = this.#isStale(index);
-            if (!forceRefresh && !isStale) {
-                if (status)
-                    status.fetch = 'hit';
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                if (status)
-                    this.#statusTTL(status, index);
-                return v;
-            }
-            // ok, it is stale or a forced refresh, and not already fetching.
-            // refresh the cache.
-            const p = this.#backgroundFetch(k, index, options, context);
-            const hasStale = p.__staleWhileFetching !== undefined;
-            const staleVal = hasStale && allowStale;
-            if (status) {
-                status.fetch = isStale ? 'stale' : 'refresh';
-                if (staleVal && isStale)
-                    status.returnedStale = true;
-            }
-            return staleVal ? p.__staleWhileFetching : (p.__returned = p);
-        }
-    }
-    async forceFetch(k, fetchOptions = {}) {
-        const v = await this.fetch(k, fetchOptions);
-        if (v === undefined)
-            throw new Error('fetch() returned undefined');
-        return v;
-    }
-    memo(k, memoOptions = {}) {
-        const memoMethod = this.#memoMethod;
-        if (!memoMethod) {
-            throw new Error('no memoMethod provided to constructor');
-        }
-        const { context, forceRefresh, ...options } = memoOptions;
-        const v = this.get(k, options);
-        if (!forceRefresh && v !== undefined)
-            return v;
-        const vv = memoMethod(k, v, {
-            options,
-            context,
-        });
-        this.set(k, vv, options);
-        return vv;
-    }
-    /**
-     * Return a value from the cache. Will update the recency of the cache
-     * entry found.
-     *
-     * If the key is not found, get() will return `undefined`.
-     */
-    get(k, getOptions = {}) {
-        const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const value = this.#valList[index];
-            const fetching = this.#isBackgroundFetch(value);
-            if (status)
-                this.#statusTTL(status, index);
-            if (this.#isStale(index)) {
-                if (status)
-                    status.get = 'stale';
-                // delete only if not an in-flight background fetch
-                if (!fetching) {
-                    if (!noDeleteOnStaleGet) {
-                        this.#delete(k, 'expire');
-                    }
-                    if (status && allowStale)
-                        status.returnedStale = true;
-                    return allowStale ? value : undefined;
-                }
-                else {
-                    if (status &&
-                        allowStale &&
-                        value.__staleWhileFetching !== undefined) {
-                        status.returnedStale = true;
-                    }
-                    return allowStale ? value.__staleWhileFetching : undefined;
-                }
-            }
-            else {
-                if (status)
-                    status.get = 'hit';
-                // if we're currently fetching it, we don't actually have it yet
-                // it's not stale, which means this isn't a staleWhileRefetching.
-                // If it's not stale, and fetching, AND has a __staleWhileFetching
-                // value, then that means the user fetched with {forceRefresh:true},
-                // so it's safe to return that value.
-                if (fetching) {
-                    return value.__staleWhileFetching;
-                }
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                return value;
-            }
-        }
-        else if (status) {
-            status.get = 'miss';
-        }
-    }
-    #connect(p, n) {
-        this.#prev[n] = p;
-        this.#next[p] = n;
-    }
-    #moveToTail(index) {
-        // if tail already, nothing to do
-        // if head, move head to next[index]
-        // else
-        //   move next[prev[index]] to next[index] (head has no prev)
-        //   move prev[next[index]] to prev[index]
-        // prev[index] = tail
-        // next[tail] = index
-        // tail = index
-        if (index !== this.#tail) {
-            if (index === this.#head) {
-                this.#head = this.#next[index];
-            }
-            else {
-                this.#connect(this.#prev[index], this.#next[index]);
-            }
-            this.#connect(this.#tail, index);
-            this.#tail = index;
-        }
-    }
-    /**
-     * Deletes a key out of the cache.
-     *
-     * Returns true if the key was deleted, false otherwise.
-     */
-    delete(k) {
-        return this.#delete(k, 'delete');
-    }
-    #delete(k, reason) {
-        let deleted = false;
-        if (this.#size !== 0) {
-            const index = this.#keyMap.get(k);
-            if (index !== undefined) {
-                deleted = true;
-                if (this.#size === 1) {
-                    this.#clear(reason);
-                }
-                else {
-                    this.#removeItemSize(index);
-                    const v = this.#valList[index];
-                    if (this.#isBackgroundFetch(v)) {
-                        v.__abortController.abort(new Error('deleted'));
-                    }
-                    else if (this.#hasDispose || this.#hasDisposeAfter) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(v, k, reason);
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([v, k, reason]);
-                        }
-                    }
-                    this.#keyMap.delete(k);
-                    this.#keyList[index] = undefined;
-                    this.#valList[index] = undefined;
-                    if (index === this.#tail) {
-                        this.#tail = this.#prev[index];
-                    }
-                    else if (index === this.#head) {
-                        this.#head = this.#next[index];
-                    }
-                    else {
-                        const pi = this.#prev[index];
-                        this.#next[pi] = this.#next[index];
-                        const ni = this.#next[index];
-                        this.#prev[ni] = this.#prev[index];
-                    }
-                    this.#size--;
-                    this.#free.push(index);
-                }
-            }
-        }
-        if (this.#hasDisposeAfter && this.#disposed?.length) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Clear the cache entirely, throwing away all values.
-     */
-    clear() {
-        return this.#clear('delete');
-    }
-    #clear(reason) {
-        for (const index of this.#rindexes({ allowStale: true })) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                v.__abortController.abort(new Error('deleted'));
-            }
-            else {
-                const k = this.#keyList[index];
-                if (this.#hasDispose) {
-                    this.#dispose?.(v, k, reason);
-                }
-                if (this.#hasDisposeAfter) {
-                    this.#disposed?.push([v, k, reason]);
-                }
-            }
-        }
-        this.#keyMap.clear();
-        this.#valList.fill(undefined);
-        this.#keyList.fill(undefined);
-        if (this.#ttls && this.#starts) {
-            this.#ttls.fill(0);
-            this.#starts.fill(0);
-        }
-        if (this.#sizes) {
-            this.#sizes.fill(0);
-        }
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free.length = 0;
-        this.#calculatedSize = 0;
-        this.#size = 0;
-        if (this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-    }
-}
-exports.LRUCache = LRUCache;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/hosted-git-info/node_modules/lru-cache/dist/commonjs/index.min.js b/node_modules/hosted-git-info/node_modules/lru-cache/dist/commonjs/index.min.js
deleted file mode 100644
index ef5027b91650d..0000000000000
--- a/node_modules/hosted-git-info/node_modules/lru-cache/dist/commonjs/index.min.js
+++ /dev/null
@@ -1,2 +0,0 @@
-"use strict";Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var M=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,x=new Set,R=typeof process=="object"&&process?process:{},U=(a,t,e,i)=>{typeof R.emitWarning=="function"?R.emitWarning(a,t,e,i):console.error(`[${e}] ${t}: ${a}`)},C=globalThis.AbortController,L=globalThis.AbortSignal;if(typeof C>"u"){L=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},C=class{constructor(){t()}signal=new L;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let a=R.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{a&&(a=!1,U("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var G=a=>!x.has(a),H=Symbol("type"),y=a=>a&&a===Math.floor(a)&&a>0&&isFinite(a),I=a=>y(a)?a<=Math.pow(2,8)?Uint8Array:a<=Math.pow(2,16)?Uint16Array:a<=Math.pow(2,32)?Uint32Array:a<=Number.MAX_SAFE_INTEGER?E:null:null,E=class extends Array{constructor(t){super(t),this.fill(0)}},W=class a{heap;length;static#l=!1;static create(t){let e=I(t);if(!e)return[];a.#l=!0;let i=new a(t,e);return a.#l=!1,i}constructor(t,e){if(!a.#l)throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},D=class a{#l;#c;#p;#v;#w;#D;#L;#S;get perf(){return this.#S}ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#_;#s;#i;#t;#a;#u;#o;#h;#m;#r;#b;#y;#d;#A;#z;#f;#x;static unsafeExposeInternals(t){return{starts:t.#y,ttls:t.#d,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#a,prev:t.#u,get head(){return t.#o},get tail(){return t.#h},free:t.#m,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#M(e,i,s,n),moveToTail:e=>t.#W(e),indexes:e=>t.#F(e),rindexes:e=>t.#T(e),isStale:e=>t.#g(e)}}get max(){return this.#l}get maxSize(){return this.#c}get calculatedSize(){return this.#_}get size(){return this.#n}get fetchMethod(){return this.#D}get memoMethod(){return this.#L}get dispose(){return this.#p}get onInsert(){return this.#v}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,onInsert:_,disposeAfter:f,noDisposeOnSet:c,noUpdateTTL:u,maxSize:A=0,maxEntrySize:d=0,sizeCalculation:m,fetchMethod:l,memoMethod:w,noDeleteOnFetchRejection:b,noDeleteOnStaleGet:p,allowStaleOnFetchRejection:S,allowStaleOnFetchAbort:z,ignoreFetchAbort:F,perf:v}=t;if(v!==void 0&&typeof v?.now!="function")throw new TypeError("perf option must have a now() method if specified");if(this.#S=v??M,e!==0&&!y(e))throw new TypeError("max option must be a nonnegative integer");let T=e?I(e):Array;if(!T)throw new Error("invalid max value: "+e);if(this.#l=e,this.#c=A,this.maxEntrySize=d||this.#c,this.sizeCalculation=m,this.sizeCalculation){if(!this.#c&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#L=w,l!==void 0&&typeof l!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#D=l,this.#z=!!l,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#a=new T(e),this.#u=new T(e),this.#o=0,this.#h=0,this.#m=W.create(e),this.#n=0,this.#_=0,typeof g=="function"&&(this.#p=g),typeof _=="function"&&(this.#v=_),typeof f=="function"?(this.#w=f,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#A=!!this.#p,this.#x=!!this.#v,this.#f=!!this.#w,this.noDisposeOnSet=!!c,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!b,this.allowStaleOnFetchRejection=!!S,this.allowStaleOnFetchAbort=!!z,this.ignoreFetchAbort=!!F,this.maxEntrySize!==0){if(this.#c!==0&&!y(this.#c))throw new TypeError("maxSize must be a positive integer if specified");if(!y(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#V()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!p,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=y(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!y(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#G()}if(this.#l===0&&this.ttl===0&&this.#c===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#l&&!this.#c){let O="LRU_CACHE_UNBOUNDED";G(O)&&(x.add(O),U("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",O,a))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#G(){let t=new E(this.#l),e=new E(this.#l);this.#d=t,this.#y=e,this.#j=(n,h,o=this.#S.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#g(n)&&this.#O(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#C=n=>{e[n]=t[n]!==0?this.#S.now():0},this.#E=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=this.#S.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#g=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#C=()=>{};#E=()=>{};#j=()=>{};#g=()=>!1;#V(){let t=new E(this.#l);this.#_=0,this.#b=t,this.#R=e=>{this.#_-=t[e],t[e]=0},this.#N=(e,i,s,n)=>{if(this.#e(i))return 0;if(!y(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!y(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#U=(e,i,s)=>{if(t[e]=i,this.#c){let n=this.#c-t[e];for(;this.#_>n;)this.#I(!0)}this.#_+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#_)}}#R=t=>{};#U=(t,e,i)=>{};#N=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#o));)e=this.#u[e]}*#T({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#h));)e=this.#a[e]}#P(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#T())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#T()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#T())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#T()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#T({allowStale:!0}))this.#g(e)&&(this.#O(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#d&&this.#y){let h=this.#d[e],o=this.#y[e];if(h&&o){let r=h-(this.#S.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#F({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#d&&this.#y){h.ttl=this.#d[e];let o=this.#S.now()-this.#y[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=this.#S.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,_=this.#N(t,e,i.size||0,o);if(this.maxEntrySize&&_>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#O(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#m.length!==0?this.#m.pop():this.#n===this.#l?this.#I(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#a[this.#h]=f,this.#u[f]=this.#h,this.#h=f,this.#n++,this.#U(f,_,r),r&&(r.set="add"),g=!1,this.#x&&this.#v?.(e,t,"add");else{this.#W(f);let c=this.#t[f];if(e!==c){if(this.#z&&this.#e(c)){c.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:u}=c;u!==void 0&&!h&&(this.#A&&this.#p?.(u,t,"set"),this.#f&&this.#r?.push([u,t,"set"]))}else h||(this.#A&&this.#p?.(c,t,"set"),this.#f&&this.#r?.push([c,t,"set"]));if(this.#R(f),this.#U(f,_,r),this.#t[f]=e,r){r.set="replace";let u=c&&this.#e(c)?c.__staleWhileFetching:c;u!==void 0&&(r.oldValue=u)}}else r&&(r.set="update");this.#x&&this.onInsert?.(e,t,e===c?"update":"replace")}if(s!==0&&!this.#d&&this.#G(),this.#d&&(g||this.#j(f,s,n),r&&this.#E(r,f)),!h&&this.#f&&this.#r){let c=this.#r,u;for(;u=c?.shift();)this.#w?.(...u)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#I(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#f&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#I(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#z&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(s,i,"evict"),this.#f&&this.#r?.push([s,i,"evict"])),this.#R(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#m.push(e)),this.#n===1?(this.#o=this.#h=0,this.#m.length=0):this.#o=this.#a[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#g(n))s&&(s.has="stale",this.#E(s,n));else return i&&this.#C(n),s&&(s.has="hit",this.#E(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#g(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#M(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new C,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,m=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!m?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!m)return f(h.signal.reason);let b=u;return this.#t[e]===u&&(d===void 0?b.__staleWhileFetching!==void 0?this.#t[e]=b.__staleWhileFetching:this.#O(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},_=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:m}=h.signal,l=m&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=u;if(this.#t[e]===u&&(!b||p.__staleWhileFetching===void 0?this.#O(t,"fetch"):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},c=(d,m)=>{let l=this.#D?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),m),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let u=new Promise(c).then(g,_),A=Object.assign(u,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,A,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=A,A}#e(t){if(!this.#z)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof C}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:_=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:c=this.allowStaleOnFetchRejection,ignoreFetchAbort:u=this.ignoreFetchAbort,allowStaleOnFetchAbort:A=this.allowStaleOnFetchAbort,context:d,forceRefresh:m=!1,status:l,signal:w}=e;if(!this.#z)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:_,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:c,allowStaleOnFetchAbort:A,ignoreFetchAbort:u,status:l,signal:w},p=this.#s.get(t);if(p===void 0){l&&(l.fetch="miss");let S=this.#M(t,p,b,d);return S.__returned=S}else{let S=this.#t[p];if(this.#e(S)){let O=i&&S.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",O&&(l.returnedStale=!0)),O?S.__staleWhileFetching:S.__returned=S}let z=this.#g(p);if(!m&&!z)return l&&(l.fetch="hit"),this.#W(p),s&&this.#C(p),l&&this.#E(l,p),S;let F=this.#M(t,p,b,d),T=F.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=z?"stale":"refresh",T&&z&&(l.returnedStale=!0)),T?F.__staleWhileFetching:F.__returned=F}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#L;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#E(h,o),this.#g(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#O(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#W(o),s&&this.#C(o),r))}else h&&(h.get="miss")}#H(t,e){this.#u[e]=t,this.#a[t]=e}#W(t){t!==this.#h&&(t===this.#o?this.#o=this.#a[t]:this.#H(this.#u[t],this.#a[t]),this.#H(this.#h,t),this.#h=t)}delete(t){return this.#O(t,"delete")}#O(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#k(e);else{this.#R(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(n,t,e),this.#f&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#u[s];else if(s===this.#o)this.#o=this.#a[s];else{let h=this.#u[s];this.#a[h]=this.#a[s];let o=this.#a[s];this.#u[o]=this.#u[s]}this.#n--,this.#m.push(s)}}if(this.#f&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#k("delete")}#k(t){for(let e of this.#T({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#A&&this.#p?.(i,s,t),this.#f&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#d&&this.#y&&(this.#d.fill(0),this.#y.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#m.length=0,this.#_=0,this.#n=0,this.#f&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};exports.LRUCache=D;
-//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/hosted-git-info/node_modules/lru-cache/dist/commonjs/package.json b/node_modules/hosted-git-info/node_modules/lru-cache/dist/commonjs/package.json
deleted file mode 100644
index 5bbefffbabee3..0000000000000
--- a/node_modules/hosted-git-info/node_modules/lru-cache/dist/commonjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "commonjs"
-}
diff --git a/node_modules/hosted-git-info/node_modules/lru-cache/dist/esm/index.js b/node_modules/hosted-git-info/node_modules/lru-cache/dist/esm/index.js
deleted file mode 100644
index 8fd8fc5f31507..0000000000000
--- a/node_modules/hosted-git-info/node_modules/lru-cache/dist/esm/index.js
+++ /dev/null
@@ -1,1560 +0,0 @@
-/**
- * @module LRUCache
- */
-const defaultPerf = (typeof performance === 'object' &&
-    performance &&
-    typeof performance.now === 'function') ?
-    performance
-    : Date;
-const warned = new Set();
-/* c8 ignore start */
-const PROCESS = (typeof process === 'object' && !!process ?
-    process
-    : {});
-/* c8 ignore start */
-const emitWarning = (msg, type, code, fn) => {
-    typeof PROCESS.emitWarning === 'function' ?
-        PROCESS.emitWarning(msg, type, code, fn)
-        : console.error(`[${code}] ${type}: ${msg}`);
-};
-let AC = globalThis.AbortController;
-let AS = globalThis.AbortSignal;
-/* c8 ignore start */
-if (typeof AC === 'undefined') {
-    //@ts-ignore
-    AS = class AbortSignal {
-        onabort;
-        _onabort = [];
-        reason;
-        aborted = false;
-        addEventListener(_, fn) {
-            this._onabort.push(fn);
-        }
-    };
-    //@ts-ignore
-    AC = class AbortController {
-        constructor() {
-            warnACPolyfill();
-        }
-        signal = new AS();
-        abort(reason) {
-            if (this.signal.aborted)
-                return;
-            //@ts-ignore
-            this.signal.reason = reason;
-            //@ts-ignore
-            this.signal.aborted = true;
-            //@ts-ignore
-            for (const fn of this.signal._onabort) {
-                fn(reason);
-            }
-            this.signal.onabort?.(reason);
-        }
-    };
-    let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
-    const warnACPolyfill = () => {
-        if (!printACPolyfillWarning)
-            return;
-        printACPolyfillWarning = false;
-        emitWarning('AbortController is not defined. If using lru-cache in ' +
-            'node 14, load an AbortController polyfill from the ' +
-            '`node-abort-controller` package. A minimal polyfill is ' +
-            'provided for use by LRUCache.fetch(), but it should not be ' +
-            'relied upon in other contexts (eg, passing it to other APIs that ' +
-            'use AbortController/AbortSignal might have undesirable effects). ' +
-            'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
-    };
-}
-/* c8 ignore stop */
-const shouldWarn = (code) => !warned.has(code);
-const TYPE = Symbol('type');
-const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
-/* c8 ignore start */
-// This is a little bit ridiculous, tbh.
-// The maximum array length is 2^32-1 or thereabouts on most JS impls.
-// And well before that point, you're caching the entire world, I mean,
-// that's ~32GB of just integers for the next/prev links, plus whatever
-// else to hold that many keys and values.  Just filling the memory with
-// zeroes at init time is brutal when you get that big.
-// But why not be complete?
-// Maybe in the future, these limits will have expanded.
-const getUintArray = (max) => !isPosInt(max) ? null
-    : max <= Math.pow(2, 8) ? Uint8Array
-        : max <= Math.pow(2, 16) ? Uint16Array
-            : max <= Math.pow(2, 32) ? Uint32Array
-                : max <= Number.MAX_SAFE_INTEGER ? ZeroArray
-                    : null;
-/* c8 ignore stop */
-class ZeroArray extends Array {
-    constructor(size) {
-        super(size);
-        this.fill(0);
-    }
-}
-class Stack {
-    heap;
-    length;
-    // private constructor
-    static #constructing = false;
-    static create(max) {
-        const HeapCls = getUintArray(max);
-        if (!HeapCls)
-            return [];
-        Stack.#constructing = true;
-        const s = new Stack(max, HeapCls);
-        Stack.#constructing = false;
-        return s;
-    }
-    constructor(max, HeapCls) {
-        /* c8 ignore start */
-        if (!Stack.#constructing) {
-            throw new TypeError('instantiate Stack using Stack.create(n)');
-        }
-        /* c8 ignore stop */
-        this.heap = new HeapCls(max);
-        this.length = 0;
-    }
-    push(n) {
-        this.heap[this.length++] = n;
-    }
-    pop() {
-        return this.heap[--this.length];
-    }
-}
-/**
- * Default export, the thing you're using this module to get.
- *
- * The `K` and `V` types define the key and value types, respectively. The
- * optional `FC` type defines the type of the `context` object passed to
- * `cache.fetch()` and `cache.memo()`.
- *
- * Keys and values **must not** be `null` or `undefined`.
- *
- * All properties from the options object (with the exception of `max`,
- * `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are
- * added as normal public members. (The listed options are read-only getters.)
- *
- * Changing any of these will alter the defaults for subsequent method calls.
- */
-export class LRUCache {
-    // options that cannot be changed without disaster
-    #max;
-    #maxSize;
-    #dispose;
-    #onInsert;
-    #disposeAfter;
-    #fetchMethod;
-    #memoMethod;
-    #perf;
-    /**
-     * {@link LRUCache.OptionsBase.perf}
-     */
-    get perf() {
-        return this.#perf;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.ttl}
-     */
-    ttl;
-    /**
-     * {@link LRUCache.OptionsBase.ttlResolution}
-     */
-    ttlResolution;
-    /**
-     * {@link LRUCache.OptionsBase.ttlAutopurge}
-     */
-    ttlAutopurge;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnGet}
-     */
-    updateAgeOnGet;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnHas}
-     */
-    updateAgeOnHas;
-    /**
-     * {@link LRUCache.OptionsBase.allowStale}
-     */
-    allowStale;
-    /**
-     * {@link LRUCache.OptionsBase.noDisposeOnSet}
-     */
-    noDisposeOnSet;
-    /**
-     * {@link LRUCache.OptionsBase.noUpdateTTL}
-     */
-    noUpdateTTL;
-    /**
-     * {@link LRUCache.OptionsBase.maxEntrySize}
-     */
-    maxEntrySize;
-    /**
-     * {@link LRUCache.OptionsBase.sizeCalculation}
-     */
-    sizeCalculation;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
-     */
-    noDeleteOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
-     */
-    noDeleteOnStaleGet;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
-     */
-    allowStaleOnFetchAbort;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
-     */
-    allowStaleOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
-     */
-    ignoreFetchAbort;
-    // computed properties
-    #size;
-    #calculatedSize;
-    #keyMap;
-    #keyList;
-    #valList;
-    #next;
-    #prev;
-    #head;
-    #tail;
-    #free;
-    #disposed;
-    #sizes;
-    #starts;
-    #ttls;
-    #hasDispose;
-    #hasFetchMethod;
-    #hasDisposeAfter;
-    #hasOnInsert;
-    /**
-     * Do not call this method unless you need to inspect the
-     * inner workings of the cache.  If anything returned by this
-     * object is modified in any way, strange breakage may occur.
-     *
-     * These fields are private for a reason!
-     *
-     * @internal
-     */
-    static unsafeExposeInternals(c) {
-        return {
-            // properties
-            starts: c.#starts,
-            ttls: c.#ttls,
-            sizes: c.#sizes,
-            keyMap: c.#keyMap,
-            keyList: c.#keyList,
-            valList: c.#valList,
-            next: c.#next,
-            prev: c.#prev,
-            get head() {
-                return c.#head;
-            },
-            get tail() {
-                return c.#tail;
-            },
-            free: c.#free,
-            // methods
-            isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
-            backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
-            moveToTail: (index) => c.#moveToTail(index),
-            indexes: (options) => c.#indexes(options),
-            rindexes: (options) => c.#rindexes(options),
-            isStale: (index) => c.#isStale(index),
-        };
-    }
-    // Protected read-only members
-    /**
-     * {@link LRUCache.OptionsBase.max} (read-only)
-     */
-    get max() {
-        return this.#max;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.maxSize} (read-only)
-     */
-    get maxSize() {
-        return this.#maxSize;
-    }
-    /**
-     * The total computed size of items in the cache (read-only)
-     */
-    get calculatedSize() {
-        return this.#calculatedSize;
-    }
-    /**
-     * The number of items stored in the cache (read-only)
-     */
-    get size() {
-        return this.#size;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
-     */
-    get fetchMethod() {
-        return this.#fetchMethod;
-    }
-    get memoMethod() {
-        return this.#memoMethod;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.dispose} (read-only)
-     */
-    get dispose() {
-        return this.#dispose;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.onInsert} (read-only)
-     */
-    get onInsert() {
-        return this.#onInsert;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
-     */
-    get disposeAfter() {
-        return this.#disposeAfter;
-    }
-    constructor(options) {
-        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, onInsert, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, perf, } = options;
-        if (perf !== undefined) {
-            if (typeof perf?.now !== 'function') {
-                throw new TypeError('perf option must have a now() method if specified');
-            }
-        }
-        this.#perf = perf ?? defaultPerf;
-        if (max !== 0 && !isPosInt(max)) {
-            throw new TypeError('max option must be a nonnegative integer');
-        }
-        const UintArray = max ? getUintArray(max) : Array;
-        if (!UintArray) {
-            throw new Error('invalid max value: ' + max);
-        }
-        this.#max = max;
-        this.#maxSize = maxSize;
-        this.maxEntrySize = maxEntrySize || this.#maxSize;
-        this.sizeCalculation = sizeCalculation;
-        if (this.sizeCalculation) {
-            if (!this.#maxSize && !this.maxEntrySize) {
-                throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
-            }
-            if (typeof this.sizeCalculation !== 'function') {
-                throw new TypeError('sizeCalculation set to non-function');
-            }
-        }
-        if (memoMethod !== undefined &&
-            typeof memoMethod !== 'function') {
-            throw new TypeError('memoMethod must be a function if defined');
-        }
-        this.#memoMethod = memoMethod;
-        if (fetchMethod !== undefined &&
-            typeof fetchMethod !== 'function') {
-            throw new TypeError('fetchMethod must be a function if specified');
-        }
-        this.#fetchMethod = fetchMethod;
-        this.#hasFetchMethod = !!fetchMethod;
-        this.#keyMap = new Map();
-        this.#keyList = new Array(max).fill(undefined);
-        this.#valList = new Array(max).fill(undefined);
-        this.#next = new UintArray(max);
-        this.#prev = new UintArray(max);
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free = Stack.create(max);
-        this.#size = 0;
-        this.#calculatedSize = 0;
-        if (typeof dispose === 'function') {
-            this.#dispose = dispose;
-        }
-        if (typeof onInsert === 'function') {
-            this.#onInsert = onInsert;
-        }
-        if (typeof disposeAfter === 'function') {
-            this.#disposeAfter = disposeAfter;
-            this.#disposed = [];
-        }
-        else {
-            this.#disposeAfter = undefined;
-            this.#disposed = undefined;
-        }
-        this.#hasDispose = !!this.#dispose;
-        this.#hasOnInsert = !!this.#onInsert;
-        this.#hasDisposeAfter = !!this.#disposeAfter;
-        this.noDisposeOnSet = !!noDisposeOnSet;
-        this.noUpdateTTL = !!noUpdateTTL;
-        this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
-        this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
-        this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
-        this.ignoreFetchAbort = !!ignoreFetchAbort;
-        // NB: maxEntrySize is set to maxSize if it's set
-        if (this.maxEntrySize !== 0) {
-            if (this.#maxSize !== 0) {
-                if (!isPosInt(this.#maxSize)) {
-                    throw new TypeError('maxSize must be a positive integer if specified');
-                }
-            }
-            if (!isPosInt(this.maxEntrySize)) {
-                throw new TypeError('maxEntrySize must be a positive integer if specified');
-            }
-            this.#initializeSizeTracking();
-        }
-        this.allowStale = !!allowStale;
-        this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
-        this.updateAgeOnGet = !!updateAgeOnGet;
-        this.updateAgeOnHas = !!updateAgeOnHas;
-        this.ttlResolution =
-            isPosInt(ttlResolution) || ttlResolution === 0 ?
-                ttlResolution
-                : 1;
-        this.ttlAutopurge = !!ttlAutopurge;
-        this.ttl = ttl || 0;
-        if (this.ttl) {
-            if (!isPosInt(this.ttl)) {
-                throw new TypeError('ttl must be a positive integer if specified');
-            }
-            this.#initializeTTLTracking();
-        }
-        // do not allow completely unbounded caches
-        if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
-            throw new TypeError('At least one of max, maxSize, or ttl is required');
-        }
-        if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
-            const code = 'LRU_CACHE_UNBOUNDED';
-            if (shouldWarn(code)) {
-                warned.add(code);
-                const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
-                    'result in unbounded memory consumption.';
-                emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
-            }
-        }
-    }
-    /**
-     * Return the number of ms left in the item's TTL. If item is not in cache,
-     * returns `0`. Returns `Infinity` if item is in cache without a defined TTL.
-     */
-    getRemainingTTL(key) {
-        return this.#keyMap.has(key) ? Infinity : 0;
-    }
-    #initializeTTLTracking() {
-        const ttls = new ZeroArray(this.#max);
-        const starts = new ZeroArray(this.#max);
-        this.#ttls = ttls;
-        this.#starts = starts;
-        this.#setItemTTL = (index, ttl, start = this.#perf.now()) => {
-            starts[index] = ttl !== 0 ? start : 0;
-            ttls[index] = ttl;
-            if (ttl !== 0 && this.ttlAutopurge) {
-                const t = setTimeout(() => {
-                    if (this.#isStale(index)) {
-                        this.#delete(this.#keyList[index], 'expire');
-                    }
-                }, ttl + 1);
-                // unref() not supported on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-        };
-        this.#updateItemAge = index => {
-            starts[index] = ttls[index] !== 0 ? this.#perf.now() : 0;
-        };
-        this.#statusTTL = (status, index) => {
-            if (ttls[index]) {
-                const ttl = ttls[index];
-                const start = starts[index];
-                /* c8 ignore next */
-                if (!ttl || !start)
-                    return;
-                status.ttl = ttl;
-                status.start = start;
-                status.now = cachedNow || getNow();
-                const age = status.now - start;
-                status.remainingTTL = ttl - age;
-            }
-        };
-        // debounce calls to perf.now() to 1s so we're not hitting
-        // that costly call repeatedly.
-        let cachedNow = 0;
-        const getNow = () => {
-            const n = this.#perf.now();
-            if (this.ttlResolution > 0) {
-                cachedNow = n;
-                const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
-                // not available on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-            return n;
-        };
-        this.getRemainingTTL = key => {
-            const index = this.#keyMap.get(key);
-            if (index === undefined) {
-                return 0;
-            }
-            const ttl = ttls[index];
-            const start = starts[index];
-            if (!ttl || !start) {
-                return Infinity;
-            }
-            const age = (cachedNow || getNow()) - start;
-            return ttl - age;
-        };
-        this.#isStale = index => {
-            const s = starts[index];
-            const t = ttls[index];
-            return !!t && !!s && (cachedNow || getNow()) - s > t;
-        };
-    }
-    // conditionally set private methods related to TTL
-    #updateItemAge = () => { };
-    #statusTTL = () => { };
-    #setItemTTL = () => { };
-    /* c8 ignore stop */
-    #isStale = () => false;
-    #initializeSizeTracking() {
-        const sizes = new ZeroArray(this.#max);
-        this.#calculatedSize = 0;
-        this.#sizes = sizes;
-        this.#removeItemSize = index => {
-            this.#calculatedSize -= sizes[index];
-            sizes[index] = 0;
-        };
-        this.#requireSize = (k, v, size, sizeCalculation) => {
-            // provisionally accept background fetches.
-            // actual value size will be checked when they return.
-            if (this.#isBackgroundFetch(v)) {
-                return 0;
-            }
-            if (!isPosInt(size)) {
-                if (sizeCalculation) {
-                    if (typeof sizeCalculation !== 'function') {
-                        throw new TypeError('sizeCalculation must be a function');
-                    }
-                    size = sizeCalculation(v, k);
-                    if (!isPosInt(size)) {
-                        throw new TypeError('sizeCalculation return invalid (expect positive integer)');
-                    }
-                }
-                else {
-                    throw new TypeError('invalid size value (must be positive integer). ' +
-                        'When maxSize or maxEntrySize is used, sizeCalculation ' +
-                        'or size must be set.');
-                }
-            }
-            return size;
-        };
-        this.#addItemSize = (index, size, status) => {
-            sizes[index] = size;
-            if (this.#maxSize) {
-                const maxSize = this.#maxSize - sizes[index];
-                while (this.#calculatedSize > maxSize) {
-                    this.#evict(true);
-                }
-            }
-            this.#calculatedSize += sizes[index];
-            if (status) {
-                status.entrySize = size;
-                status.totalCalculatedSize = this.#calculatedSize;
-            }
-        };
-    }
-    #removeItemSize = _i => { };
-    #addItemSize = (_i, _s, _st) => { };
-    #requireSize = (_k, _v, size, sizeCalculation) => {
-        if (size || sizeCalculation) {
-            throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
-        }
-        return 0;
-    };
-    *#indexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#tail; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#head) {
-                    break;
-                }
-                else {
-                    i = this.#prev[i];
-                }
-            }
-        }
-    }
-    *#rindexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#head; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#tail) {
-                    break;
-                }
-                else {
-                    i = this.#next[i];
-                }
-            }
-        }
-    }
-    #isValidIndex(index) {
-        return (index !== undefined &&
-            this.#keyMap.get(this.#keyList[index]) === index);
-    }
-    /**
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from most recently used to least recently used.
-     */
-    *entries() {
-        for (const i of this.#indexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.entries}
-     *
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from least recently used to most recently used.
-     */
-    *rentries() {
-        for (const i of this.#rindexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the keys in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *keys() {
-        for (const i of this.#indexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.keys}
-     *
-     * Return a generator yielding the keys in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rkeys() {
-        for (const i of this.#rindexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the values in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *values() {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.values}
-     *
-     * Return a generator yielding the values in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rvalues() {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Iterating over the cache itself yields the same results as
-     * {@link LRUCache.entries}
-     */
-    [Symbol.iterator]() {
-        return this.entries();
-    }
-    /**
-     * A String value that is used in the creation of the default string
-     * description of an object. Called by the built-in method
-     * `Object.prototype.toString`.
-     */
-    [Symbol.toStringTag] = 'LRUCache';
-    /**
-     * Find a value for which the supplied fn method returns a truthy value,
-     * similar to `Array.find()`. fn is called as `fn(value, key, cache)`.
-     */
-    find(fn, getOptions = {}) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-            if (value === undefined)
-                continue;
-            if (fn(value, this.#keyList[i], this)) {
-                return this.get(this.#keyList[i], getOptions);
-            }
-        }
-    }
-    /**
-     * Call the supplied function on each item in the cache, in order from most
-     * recently used to least recently used.
-     *
-     * `fn` is called as `fn(value, key, cache)`.
-     *
-     * If `thisp` is provided, function will be called in the `this`-context of
-     * the provided object, or the cache if no `thisp` object is provided.
-     *
-     * Does not update age or recenty of use, or iterate over stale values.
-     */
-    forEach(fn, thisp = this) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * The same as {@link LRUCache.forEach} but items are iterated over in
-     * reverse order.  (ie, less recently used items are iterated over first.)
-     */
-    rforEach(fn, thisp = this) {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * Delete any stale entries. Returns true if anything was removed,
-     * false otherwise.
-     */
-    purgeStale() {
-        let deleted = false;
-        for (const i of this.#rindexes({ allowStale: true })) {
-            if (this.#isStale(i)) {
-                this.#delete(this.#keyList[i], 'expire');
-                deleted = true;
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Get the extended info about a given entry, to get its value, size, and
-     * TTL info simultaneously. Returns `undefined` if the key is not present.
-     *
-     * Unlike {@link LRUCache#dump}, which is designed to be portable and survive
-     * serialization, the `start` value is always the current timestamp, and the
-     * `ttl` is a calculated remaining time to live (negative if expired).
-     *
-     * Always returns stale values, if their info is found in the cache, so be
-     * sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl})
-     * if relevant.
-     */
-    info(key) {
-        const i = this.#keyMap.get(key);
-        if (i === undefined)
-            return undefined;
-        const v = this.#valList[i];
-        /* c8 ignore start - this isn't tested for the info function,
-         * but it's the same logic as found in other places. */
-        const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-        if (value === undefined)
-            return undefined;
-        /* c8 ignore end */
-        const entry = { value };
-        if (this.#ttls && this.#starts) {
-            const ttl = this.#ttls[i];
-            const start = this.#starts[i];
-            if (ttl && start) {
-                const remain = ttl - (this.#perf.now() - start);
-                entry.ttl = remain;
-                entry.start = Date.now();
-            }
-        }
-        if (this.#sizes) {
-            entry.size = this.#sizes[i];
-        }
-        return entry;
-    }
-    /**
-     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
-     * passed to {@link LRUCache#load}.
-     *
-     * The `start` fields are calculated relative to a portable `Date.now()`
-     * timestamp, even if `performance.now()` is available.
-     *
-     * Stale entries are always included in the `dump`, even if
-     * {@link LRUCache.OptionsBase.allowStale} is false.
-     *
-     * Note: this returns an actual array, not a generator, so it can be more
-     * easily passed around.
-     */
-    dump() {
-        const arr = [];
-        for (const i of this.#indexes({ allowStale: true })) {
-            const key = this.#keyList[i];
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-            if (value === undefined || key === undefined)
-                continue;
-            const entry = { value };
-            if (this.#ttls && this.#starts) {
-                entry.ttl = this.#ttls[i];
-                // always dump the start relative to a portable timestamp
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = this.#perf.now() - this.#starts[i];
-                entry.start = Math.floor(Date.now() - age);
-            }
-            if (this.#sizes) {
-                entry.size = this.#sizes[i];
-            }
-            arr.unshift([key, entry]);
-        }
-        return arr;
-    }
-    /**
-     * Reset the cache and load in the items in entries in the order listed.
-     *
-     * The shape of the resulting cache may be different if the same options are
-     * not used in both caches.
-     *
-     * The `start` fields are assumed to be calculated relative to a portable
-     * `Date.now()` timestamp, even if `performance.now()` is available.
-     */
-    load(arr) {
-        this.clear();
-        for (const [key, entry] of arr) {
-            if (entry.start) {
-                // entry.start is a portable timestamp, but we may be using
-                // node's performance.now(), so calculate the offset, so that
-                // we get the intended remaining TTL, no matter how long it's
-                // been on ice.
-                //
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = Date.now() - entry.start;
-                entry.start = this.#perf.now() - age;
-            }
-            this.set(key, entry.value, entry);
-        }
-    }
-    /**
-     * Add a value to the cache.
-     *
-     * Note: if `undefined` is specified as a value, this is an alias for
-     * {@link LRUCache#delete}
-     *
-     * Fields on the {@link LRUCache.SetOptions} options param will override
-     * their corresponding values in the constructor options for the scope
-     * of this single `set()` operation.
-     *
-     * If `start` is provided, then that will set the effective start
-     * time for the TTL calculation. Note that this must be a previous
-     * value of `performance.now()` if supported, or a previous value of
-     * `Date.now()` if not.
-     *
-     * Options object may also include `size`, which will prevent
-     * calling the `sizeCalculation` function and just use the specified
-     * number if it is a positive integer, and `noDisposeOnSet` which
-     * will prevent calling a `dispose` function in the case of
-     * overwrites.
-     *
-     * If the `size` (or return value of `sizeCalculation`) for a given
-     * entry is greater than `maxEntrySize`, then the item will not be
-     * added to the cache.
-     *
-     * Will update the recency of the entry.
-     *
-     * If the value is `undefined`, then this is an alias for
-     * `cache.delete(key)`. `undefined` is never stored in the cache.
-     */
-    set(k, v, setOptions = {}) {
-        if (v === undefined) {
-            this.delete(k);
-            return this;
-        }
-        const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
-        let { noUpdateTTL = this.noUpdateTTL } = setOptions;
-        const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
-        // if the item doesn't fit, don't do anything
-        // NB: maxEntrySize set to maxSize by default
-        if (this.maxEntrySize && size > this.maxEntrySize) {
-            if (status) {
-                status.set = 'miss';
-                status.maxEntrySizeExceeded = true;
-            }
-            // have to delete, in case something is there already.
-            this.#delete(k, 'set');
-            return this;
-        }
-        let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
-        if (index === undefined) {
-            // addition
-            index = (this.#size === 0 ? this.#tail
-                : this.#free.length !== 0 ? this.#free.pop()
-                    : this.#size === this.#max ? this.#evict(false)
-                        : this.#size);
-            this.#keyList[index] = k;
-            this.#valList[index] = v;
-            this.#keyMap.set(k, index);
-            this.#next[this.#tail] = index;
-            this.#prev[index] = this.#tail;
-            this.#tail = index;
-            this.#size++;
-            this.#addItemSize(index, size, status);
-            if (status)
-                status.set = 'add';
-            noUpdateTTL = false;
-            if (this.#hasOnInsert) {
-                this.#onInsert?.(v, k, 'add');
-            }
-        }
-        else {
-            // update
-            this.#moveToTail(index);
-            const oldVal = this.#valList[index];
-            if (v !== oldVal) {
-                if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
-                    oldVal.__abortController.abort(new Error('replaced'));
-                    const { __staleWhileFetching: s } = oldVal;
-                    if (s !== undefined && !noDisposeOnSet) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(s, k, 'set');
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([s, k, 'set']);
-                        }
-                    }
-                }
-                else if (!noDisposeOnSet) {
-                    if (this.#hasDispose) {
-                        this.#dispose?.(oldVal, k, 'set');
-                    }
-                    if (this.#hasDisposeAfter) {
-                        this.#disposed?.push([oldVal, k, 'set']);
-                    }
-                }
-                this.#removeItemSize(index);
-                this.#addItemSize(index, size, status);
-                this.#valList[index] = v;
-                if (status) {
-                    status.set = 'replace';
-                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal) ?
-                        oldVal.__staleWhileFetching
-                        : oldVal;
-                    if (oldValue !== undefined)
-                        status.oldValue = oldValue;
-                }
-            }
-            else if (status) {
-                status.set = 'update';
-            }
-            if (this.#hasOnInsert) {
-                this.onInsert?.(v, k, v === oldVal ? 'update' : 'replace');
-            }
-        }
-        if (ttl !== 0 && !this.#ttls) {
-            this.#initializeTTLTracking();
-        }
-        if (this.#ttls) {
-            if (!noUpdateTTL) {
-                this.#setItemTTL(index, ttl, start);
-            }
-            if (status)
-                this.#statusTTL(status, index);
-        }
-        if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return this;
-    }
-    /**
-     * Evict the least recently used item, returning its value or
-     * `undefined` if cache is empty.
-     */
-    pop() {
-        try {
-            while (this.#size) {
-                const val = this.#valList[this.#head];
-                this.#evict(true);
-                if (this.#isBackgroundFetch(val)) {
-                    if (val.__staleWhileFetching) {
-                        return val.__staleWhileFetching;
-                    }
-                }
-                else if (val !== undefined) {
-                    return val;
-                }
-            }
-        }
-        finally {
-            if (this.#hasDisposeAfter && this.#disposed) {
-                const dt = this.#disposed;
-                let task;
-                while ((task = dt?.shift())) {
-                    this.#disposeAfter?.(...task);
-                }
-            }
-        }
-    }
-    #evict(free) {
-        const head = this.#head;
-        const k = this.#keyList[head];
-        const v = this.#valList[head];
-        if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
-            v.__abortController.abort(new Error('evicted'));
-        }
-        else if (this.#hasDispose || this.#hasDisposeAfter) {
-            if (this.#hasDispose) {
-                this.#dispose?.(v, k, 'evict');
-            }
-            if (this.#hasDisposeAfter) {
-                this.#disposed?.push([v, k, 'evict']);
-            }
-        }
-        this.#removeItemSize(head);
-        // if we aren't about to use the index, then null these out
-        if (free) {
-            this.#keyList[head] = undefined;
-            this.#valList[head] = undefined;
-            this.#free.push(head);
-        }
-        if (this.#size === 1) {
-            this.#head = this.#tail = 0;
-            this.#free.length = 0;
-        }
-        else {
-            this.#head = this.#next[head];
-        }
-        this.#keyMap.delete(k);
-        this.#size--;
-        return head;
-    }
-    /**
-     * Check if a key is in the cache, without updating the recency of use.
-     * Will return false if the item is stale, even though it is technically
-     * in the cache.
-     *
-     * Check if a key is in the cache, without updating the recency of
-     * use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set
-     * to `true` in either the options or the constructor.
-     *
-     * Will return `false` if the item is stale, even though it is technically in
-     * the cache. The difference can be determined (if it matters) by using a
-     * `status` argument, and inspecting the `has` field.
-     *
-     * Will not update item age unless
-     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
-     */
-    has(k, hasOptions = {}) {
-        const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v) &&
-                v.__staleWhileFetching === undefined) {
-                return false;
-            }
-            if (!this.#isStale(index)) {
-                if (updateAgeOnHas) {
-                    this.#updateItemAge(index);
-                }
-                if (status) {
-                    status.has = 'hit';
-                    this.#statusTTL(status, index);
-                }
-                return true;
-            }
-            else if (status) {
-                status.has = 'stale';
-                this.#statusTTL(status, index);
-            }
-        }
-        else if (status) {
-            status.has = 'miss';
-        }
-        return false;
-    }
-    /**
-     * Like {@link LRUCache#get} but doesn't update recency or delete stale
-     * items.
-     *
-     * Returns `undefined` if the item is stale, unless
-     * {@link LRUCache.OptionsBase.allowStale} is set.
-     */
-    peek(k, peekOptions = {}) {
-        const { allowStale = this.allowStale } = peekOptions;
-        const index = this.#keyMap.get(k);
-        if (index === undefined ||
-            (!allowStale && this.#isStale(index))) {
-            return;
-        }
-        const v = this.#valList[index];
-        // either stale and allowed, or forcing a refresh of non-stale value
-        return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-    }
-    #backgroundFetch(k, index, options, context) {
-        const v = index === undefined ? undefined : this.#valList[index];
-        if (this.#isBackgroundFetch(v)) {
-            return v;
-        }
-        const ac = new AC();
-        const { signal } = options;
-        // when/if our AC signals, then stop listening to theirs.
-        signal?.addEventListener('abort', () => ac.abort(signal.reason), {
-            signal: ac.signal,
-        });
-        const fetchOpts = {
-            signal: ac.signal,
-            options,
-            context,
-        };
-        const cb = (v, updateCache = false) => {
-            const { aborted } = ac.signal;
-            const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
-            if (options.status) {
-                if (aborted && !updateCache) {
-                    options.status.fetchAborted = true;
-                    options.status.fetchError = ac.signal.reason;
-                    if (ignoreAbort)
-                        options.status.fetchAbortIgnored = true;
-                }
-                else {
-                    options.status.fetchResolved = true;
-                }
-            }
-            if (aborted && !ignoreAbort && !updateCache) {
-                return fetchFail(ac.signal.reason);
-            }
-            // either we didn't abort, and are still here, or we did, and ignored
-            const bf = p;
-            if (this.#valList[index] === p) {
-                if (v === undefined) {
-                    if (bf.__staleWhileFetching !== undefined) {
-                        this.#valList[index] = bf.__staleWhileFetching;
-                    }
-                    else {
-                        this.#delete(k, 'fetch');
-                    }
-                }
-                else {
-                    if (options.status)
-                        options.status.fetchUpdated = true;
-                    this.set(k, v, fetchOpts.options);
-                }
-            }
-            return v;
-        };
-        const eb = (er) => {
-            if (options.status) {
-                options.status.fetchRejected = true;
-                options.status.fetchError = er;
-            }
-            return fetchFail(er);
-        };
-        const fetchFail = (er) => {
-            const { aborted } = ac.signal;
-            const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
-            const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
-            const noDelete = allowStale || options.noDeleteOnFetchRejection;
-            const bf = p;
-            if (this.#valList[index] === p) {
-                // if we allow stale on fetch rejections, then we need to ensure that
-                // the stale value is not removed from the cache when the fetch fails.
-                const del = !noDelete || bf.__staleWhileFetching === undefined;
-                if (del) {
-                    this.#delete(k, 'fetch');
-                }
-                else if (!allowStaleAborted) {
-                    // still replace the *promise* with the stale value,
-                    // since we are done with the promise at this point.
-                    // leave it untouched if we're still waiting for an
-                    // aborted background fetch that hasn't yet returned.
-                    this.#valList[index] = bf.__staleWhileFetching;
-                }
-            }
-            if (allowStale) {
-                if (options.status && bf.__staleWhileFetching !== undefined) {
-                    options.status.returnedStale = true;
-                }
-                return bf.__staleWhileFetching;
-            }
-            else if (bf.__returned === bf) {
-                throw er;
-            }
-        };
-        const pcall = (res, rej) => {
-            const fmp = this.#fetchMethod?.(k, v, fetchOpts);
-            if (fmp && fmp instanceof Promise) {
-                fmp.then(v => res(v === undefined ? undefined : v), rej);
-            }
-            // ignored, we go until we finish, regardless.
-            // defer check until we are actually aborting,
-            // so fetchMethod can override.
-            ac.signal.addEventListener('abort', () => {
-                if (!options.ignoreFetchAbort ||
-                    options.allowStaleOnFetchAbort) {
-                    res(undefined);
-                    // when it eventually resolves, update the cache.
-                    if (options.allowStaleOnFetchAbort) {
-                        res = v => cb(v, true);
-                    }
-                }
-            });
-        };
-        if (options.status)
-            options.status.fetchDispatched = true;
-        const p = new Promise(pcall).then(cb, eb);
-        const bf = Object.assign(p, {
-            __abortController: ac,
-            __staleWhileFetching: v,
-            __returned: undefined,
-        });
-        if (index === undefined) {
-            // internal, don't expose status.
-            this.set(k, bf, { ...fetchOpts.options, status: undefined });
-            index = this.#keyMap.get(k);
-        }
-        else {
-            this.#valList[index] = bf;
-        }
-        return bf;
-    }
-    #isBackgroundFetch(p) {
-        if (!this.#hasFetchMethod)
-            return false;
-        const b = p;
-        return (!!b &&
-            b instanceof Promise &&
-            b.hasOwnProperty('__staleWhileFetching') &&
-            b.__abortController instanceof AC);
-    }
-    async fetch(k, fetchOptions = {}) {
-        const { 
-        // get options
-        allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, 
-        // set options
-        ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, 
-        // fetch exclusive options
-        noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
-        if (!this.#hasFetchMethod) {
-            if (status)
-                status.fetch = 'get';
-            return this.get(k, {
-                allowStale,
-                updateAgeOnGet,
-                noDeleteOnStaleGet,
-                status,
-            });
-        }
-        const options = {
-            allowStale,
-            updateAgeOnGet,
-            noDeleteOnStaleGet,
-            ttl,
-            noDisposeOnSet,
-            size,
-            sizeCalculation,
-            noUpdateTTL,
-            noDeleteOnFetchRejection,
-            allowStaleOnFetchRejection,
-            allowStaleOnFetchAbort,
-            ignoreFetchAbort,
-            status,
-            signal,
-        };
-        let index = this.#keyMap.get(k);
-        if (index === undefined) {
-            if (status)
-                status.fetch = 'miss';
-            const p = this.#backgroundFetch(k, index, options, context);
-            return (p.__returned = p);
-        }
-        else {
-            // in cache, maybe already fetching
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                const stale = allowStale && v.__staleWhileFetching !== undefined;
-                if (status) {
-                    status.fetch = 'inflight';
-                    if (stale)
-                        status.returnedStale = true;
-                }
-                return stale ? v.__staleWhileFetching : (v.__returned = v);
-            }
-            // if we force a refresh, that means do NOT serve the cached value,
-            // unless we are already in the process of refreshing the cache.
-            const isStale = this.#isStale(index);
-            if (!forceRefresh && !isStale) {
-                if (status)
-                    status.fetch = 'hit';
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                if (status)
-                    this.#statusTTL(status, index);
-                return v;
-            }
-            // ok, it is stale or a forced refresh, and not already fetching.
-            // refresh the cache.
-            const p = this.#backgroundFetch(k, index, options, context);
-            const hasStale = p.__staleWhileFetching !== undefined;
-            const staleVal = hasStale && allowStale;
-            if (status) {
-                status.fetch = isStale ? 'stale' : 'refresh';
-                if (staleVal && isStale)
-                    status.returnedStale = true;
-            }
-            return staleVal ? p.__staleWhileFetching : (p.__returned = p);
-        }
-    }
-    async forceFetch(k, fetchOptions = {}) {
-        const v = await this.fetch(k, fetchOptions);
-        if (v === undefined)
-            throw new Error('fetch() returned undefined');
-        return v;
-    }
-    memo(k, memoOptions = {}) {
-        const memoMethod = this.#memoMethod;
-        if (!memoMethod) {
-            throw new Error('no memoMethod provided to constructor');
-        }
-        const { context, forceRefresh, ...options } = memoOptions;
-        const v = this.get(k, options);
-        if (!forceRefresh && v !== undefined)
-            return v;
-        const vv = memoMethod(k, v, {
-            options,
-            context,
-        });
-        this.set(k, vv, options);
-        return vv;
-    }
-    /**
-     * Return a value from the cache. Will update the recency of the cache
-     * entry found.
-     *
-     * If the key is not found, get() will return `undefined`.
-     */
-    get(k, getOptions = {}) {
-        const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const value = this.#valList[index];
-            const fetching = this.#isBackgroundFetch(value);
-            if (status)
-                this.#statusTTL(status, index);
-            if (this.#isStale(index)) {
-                if (status)
-                    status.get = 'stale';
-                // delete only if not an in-flight background fetch
-                if (!fetching) {
-                    if (!noDeleteOnStaleGet) {
-                        this.#delete(k, 'expire');
-                    }
-                    if (status && allowStale)
-                        status.returnedStale = true;
-                    return allowStale ? value : undefined;
-                }
-                else {
-                    if (status &&
-                        allowStale &&
-                        value.__staleWhileFetching !== undefined) {
-                        status.returnedStale = true;
-                    }
-                    return allowStale ? value.__staleWhileFetching : undefined;
-                }
-            }
-            else {
-                if (status)
-                    status.get = 'hit';
-                // if we're currently fetching it, we don't actually have it yet
-                // it's not stale, which means this isn't a staleWhileRefetching.
-                // If it's not stale, and fetching, AND has a __staleWhileFetching
-                // value, then that means the user fetched with {forceRefresh:true},
-                // so it's safe to return that value.
-                if (fetching) {
-                    return value.__staleWhileFetching;
-                }
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                return value;
-            }
-        }
-        else if (status) {
-            status.get = 'miss';
-        }
-    }
-    #connect(p, n) {
-        this.#prev[n] = p;
-        this.#next[p] = n;
-    }
-    #moveToTail(index) {
-        // if tail already, nothing to do
-        // if head, move head to next[index]
-        // else
-        //   move next[prev[index]] to next[index] (head has no prev)
-        //   move prev[next[index]] to prev[index]
-        // prev[index] = tail
-        // next[tail] = index
-        // tail = index
-        if (index !== this.#tail) {
-            if (index === this.#head) {
-                this.#head = this.#next[index];
-            }
-            else {
-                this.#connect(this.#prev[index], this.#next[index]);
-            }
-            this.#connect(this.#tail, index);
-            this.#tail = index;
-        }
-    }
-    /**
-     * Deletes a key out of the cache.
-     *
-     * Returns true if the key was deleted, false otherwise.
-     */
-    delete(k) {
-        return this.#delete(k, 'delete');
-    }
-    #delete(k, reason) {
-        let deleted = false;
-        if (this.#size !== 0) {
-            const index = this.#keyMap.get(k);
-            if (index !== undefined) {
-                deleted = true;
-                if (this.#size === 1) {
-                    this.#clear(reason);
-                }
-                else {
-                    this.#removeItemSize(index);
-                    const v = this.#valList[index];
-                    if (this.#isBackgroundFetch(v)) {
-                        v.__abortController.abort(new Error('deleted'));
-                    }
-                    else if (this.#hasDispose || this.#hasDisposeAfter) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(v, k, reason);
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([v, k, reason]);
-                        }
-                    }
-                    this.#keyMap.delete(k);
-                    this.#keyList[index] = undefined;
-                    this.#valList[index] = undefined;
-                    if (index === this.#tail) {
-                        this.#tail = this.#prev[index];
-                    }
-                    else if (index === this.#head) {
-                        this.#head = this.#next[index];
-                    }
-                    else {
-                        const pi = this.#prev[index];
-                        this.#next[pi] = this.#next[index];
-                        const ni = this.#next[index];
-                        this.#prev[ni] = this.#prev[index];
-                    }
-                    this.#size--;
-                    this.#free.push(index);
-                }
-            }
-        }
-        if (this.#hasDisposeAfter && this.#disposed?.length) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Clear the cache entirely, throwing away all values.
-     */
-    clear() {
-        return this.#clear('delete');
-    }
-    #clear(reason) {
-        for (const index of this.#rindexes({ allowStale: true })) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                v.__abortController.abort(new Error('deleted'));
-            }
-            else {
-                const k = this.#keyList[index];
-                if (this.#hasDispose) {
-                    this.#dispose?.(v, k, reason);
-                }
-                if (this.#hasDisposeAfter) {
-                    this.#disposed?.push([v, k, reason]);
-                }
-            }
-        }
-        this.#keyMap.clear();
-        this.#valList.fill(undefined);
-        this.#keyList.fill(undefined);
-        if (this.#ttls && this.#starts) {
-            this.#ttls.fill(0);
-            this.#starts.fill(0);
-        }
-        if (this.#sizes) {
-            this.#sizes.fill(0);
-        }
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free.length = 0;
-        this.#calculatedSize = 0;
-        this.#size = 0;
-        if (this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-    }
-}
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/hosted-git-info/node_modules/lru-cache/dist/esm/index.min.js b/node_modules/hosted-git-info/node_modules/lru-cache/dist/esm/index.min.js
deleted file mode 100644
index 07dd8fc3c59d8..0000000000000
--- a/node_modules/hosted-git-info/node_modules/lru-cache/dist/esm/index.min.js
+++ /dev/null
@@ -1,2 +0,0 @@
-var M=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,x=new Set,R=typeof process=="object"&&process?process:{},I=(a,t,e,i)=>{typeof R.emitWarning=="function"?R.emitWarning(a,t,e,i):console.error(`[${e}] ${t}: ${a}`)},C=globalThis.AbortController,D=globalThis.AbortSignal;if(typeof C>"u"){D=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},C=class{constructor(){t()}signal=new D;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let a=R.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{a&&(a=!1,I("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var G=a=>!x.has(a),H=Symbol("type"),y=a=>a&&a===Math.floor(a)&&a>0&&isFinite(a),U=a=>y(a)?a<=Math.pow(2,8)?Uint8Array:a<=Math.pow(2,16)?Uint16Array:a<=Math.pow(2,32)?Uint32Array:a<=Number.MAX_SAFE_INTEGER?O:null:null,O=class extends Array{constructor(t){super(t),this.fill(0)}},W=class a{heap;length;static#l=!1;static create(t){let e=U(t);if(!e)return[];a.#l=!0;let i=new a(t,e);return a.#l=!1,i}constructor(t,e){if(!a.#l)throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},L=class a{#l;#c;#p;#v;#w;#D;#L;#S;get perf(){return this.#S}ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#_;#s;#i;#t;#a;#u;#o;#h;#m;#r;#b;#y;#d;#A;#z;#f;#x;static unsafeExposeInternals(t){return{starts:t.#y,ttls:t.#d,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#a,prev:t.#u,get head(){return t.#o},get tail(){return t.#h},free:t.#m,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#M(e,i,s,n),moveToTail:e=>t.#W(e),indexes:e=>t.#F(e),rindexes:e=>t.#T(e),isStale:e=>t.#g(e)}}get max(){return this.#l}get maxSize(){return this.#c}get calculatedSize(){return this.#_}get size(){return this.#n}get fetchMethod(){return this.#D}get memoMethod(){return this.#L}get dispose(){return this.#p}get onInsert(){return this.#v}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,onInsert:_,disposeAfter:f,noDisposeOnSet:c,noUpdateTTL:u,maxSize:A=0,maxEntrySize:d=0,sizeCalculation:m,fetchMethod:l,memoMethod:w,noDeleteOnFetchRejection:b,noDeleteOnStaleGet:p,allowStaleOnFetchRejection:S,allowStaleOnFetchAbort:z,ignoreFetchAbort:F,perf:v}=t;if(v!==void 0&&typeof v?.now!="function")throw new TypeError("perf option must have a now() method if specified");if(this.#S=v??M,e!==0&&!y(e))throw new TypeError("max option must be a nonnegative integer");let T=e?U(e):Array;if(!T)throw new Error("invalid max value: "+e);if(this.#l=e,this.#c=A,this.maxEntrySize=d||this.#c,this.sizeCalculation=m,this.sizeCalculation){if(!this.#c&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#L=w,l!==void 0&&typeof l!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#D=l,this.#z=!!l,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#a=new T(e),this.#u=new T(e),this.#o=0,this.#h=0,this.#m=W.create(e),this.#n=0,this.#_=0,typeof g=="function"&&(this.#p=g),typeof _=="function"&&(this.#v=_),typeof f=="function"?(this.#w=f,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#A=!!this.#p,this.#x=!!this.#v,this.#f=!!this.#w,this.noDisposeOnSet=!!c,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!b,this.allowStaleOnFetchRejection=!!S,this.allowStaleOnFetchAbort=!!z,this.ignoreFetchAbort=!!F,this.maxEntrySize!==0){if(this.#c!==0&&!y(this.#c))throw new TypeError("maxSize must be a positive integer if specified");if(!y(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#V()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!p,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=y(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!y(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#G()}if(this.#l===0&&this.ttl===0&&this.#c===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#l&&!this.#c){let E="LRU_CACHE_UNBOUNDED";G(E)&&(x.add(E),I("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",E,a))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#G(){let t=new O(this.#l),e=new O(this.#l);this.#d=t,this.#y=e,this.#j=(n,h,o=this.#S.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#g(n)&&this.#E(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#C=n=>{e[n]=t[n]!==0?this.#S.now():0},this.#O=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=this.#S.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#g=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#C=()=>{};#O=()=>{};#j=()=>{};#g=()=>!1;#V(){let t=new O(this.#l);this.#_=0,this.#b=t,this.#R=e=>{this.#_-=t[e],t[e]=0},this.#N=(e,i,s,n)=>{if(this.#e(i))return 0;if(!y(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!y(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#I=(e,i,s)=>{if(t[e]=i,this.#c){let n=this.#c-t[e];for(;this.#_>n;)this.#U(!0)}this.#_+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#_)}}#R=t=>{};#I=(t,e,i)=>{};#N=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#o));)e=this.#u[e]}*#T({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#h));)e=this.#a[e]}#P(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#T())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#T()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#T())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#T()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#T({allowStale:!0}))this.#g(e)&&(this.#E(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#d&&this.#y){let h=this.#d[e],o=this.#y[e];if(h&&o){let r=h-(this.#S.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#F({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#d&&this.#y){h.ttl=this.#d[e];let o=this.#S.now()-this.#y[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=this.#S.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,_=this.#N(t,e,i.size||0,o);if(this.maxEntrySize&&_>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#E(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#m.length!==0?this.#m.pop():this.#n===this.#l?this.#U(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#a[this.#h]=f,this.#u[f]=this.#h,this.#h=f,this.#n++,this.#I(f,_,r),r&&(r.set="add"),g=!1,this.#x&&this.#v?.(e,t,"add");else{this.#W(f);let c=this.#t[f];if(e!==c){if(this.#z&&this.#e(c)){c.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:u}=c;u!==void 0&&!h&&(this.#A&&this.#p?.(u,t,"set"),this.#f&&this.#r?.push([u,t,"set"]))}else h||(this.#A&&this.#p?.(c,t,"set"),this.#f&&this.#r?.push([c,t,"set"]));if(this.#R(f),this.#I(f,_,r),this.#t[f]=e,r){r.set="replace";let u=c&&this.#e(c)?c.__staleWhileFetching:c;u!==void 0&&(r.oldValue=u)}}else r&&(r.set="update");this.#x&&this.onInsert?.(e,t,e===c?"update":"replace")}if(s!==0&&!this.#d&&this.#G(),this.#d&&(g||this.#j(f,s,n),r&&this.#O(r,f)),!h&&this.#f&&this.#r){let c=this.#r,u;for(;u=c?.shift();)this.#w?.(...u)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#U(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#f&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#U(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#z&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(s,i,"evict"),this.#f&&this.#r?.push([s,i,"evict"])),this.#R(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#m.push(e)),this.#n===1?(this.#o=this.#h=0,this.#m.length=0):this.#o=this.#a[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#g(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#C(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#g(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#M(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new C,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,m=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!m?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!m)return f(h.signal.reason);let b=u;return this.#t[e]===u&&(d===void 0?b.__staleWhileFetching!==void 0?this.#t[e]=b.__staleWhileFetching:this.#E(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},_=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:m}=h.signal,l=m&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=u;if(this.#t[e]===u&&(!b||p.__staleWhileFetching===void 0?this.#E(t,"fetch"):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},c=(d,m)=>{let l=this.#D?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),m),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let u=new Promise(c).then(g,_),A=Object.assign(u,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,A,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=A,A}#e(t){if(!this.#z)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof C}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:_=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:c=this.allowStaleOnFetchRejection,ignoreFetchAbort:u=this.ignoreFetchAbort,allowStaleOnFetchAbort:A=this.allowStaleOnFetchAbort,context:d,forceRefresh:m=!1,status:l,signal:w}=e;if(!this.#z)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:_,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:c,allowStaleOnFetchAbort:A,ignoreFetchAbort:u,status:l,signal:w},p=this.#s.get(t);if(p===void 0){l&&(l.fetch="miss");let S=this.#M(t,p,b,d);return S.__returned=S}else{let S=this.#t[p];if(this.#e(S)){let E=i&&S.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",E&&(l.returnedStale=!0)),E?S.__staleWhileFetching:S.__returned=S}let z=this.#g(p);if(!m&&!z)return l&&(l.fetch="hit"),this.#W(p),s&&this.#C(p),l&&this.#O(l,p),S;let F=this.#M(t,p,b,d),T=F.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=z?"stale":"refresh",T&&z&&(l.returnedStale=!0)),T?F.__staleWhileFetching:F.__returned=F}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#L;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#O(h,o),this.#g(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#E(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#W(o),s&&this.#C(o),r))}else h&&(h.get="miss")}#H(t,e){this.#u[e]=t,this.#a[t]=e}#W(t){t!==this.#h&&(t===this.#o?this.#o=this.#a[t]:this.#H(this.#u[t],this.#a[t]),this.#H(this.#h,t),this.#h=t)}delete(t){return this.#E(t,"delete")}#E(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#k(e);else{this.#R(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(n,t,e),this.#f&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#u[s];else if(s===this.#o)this.#o=this.#a[s];else{let h=this.#u[s];this.#a[h]=this.#a[s];let o=this.#a[s];this.#u[o]=this.#u[s]}this.#n--,this.#m.push(s)}}if(this.#f&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#k("delete")}#k(t){for(let e of this.#T({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#A&&this.#p?.(i,s,t),this.#f&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#d&&this.#y&&(this.#d.fill(0),this.#y.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#m.length=0,this.#_=0,this.#n=0,this.#f&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};export{L as LRUCache};
-//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/hosted-git-info/node_modules/lru-cache/dist/esm/package.json b/node_modules/hosted-git-info/node_modules/lru-cache/dist/esm/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/hosted-git-info/node_modules/lru-cache/dist/esm/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/hosted-git-info/node_modules/lru-cache/package.json b/node_modules/hosted-git-info/node_modules/lru-cache/package.json
deleted file mode 100644
index 4953bdf4a7a35..0000000000000
--- a/node_modules/hosted-git-info/node_modules/lru-cache/package.json
+++ /dev/null
@@ -1,113 +0,0 @@
-{
-  "name": "lru-cache",
-  "description": "A cache object that deletes the least-recently-used items.",
-  "version": "11.2.1",
-  "author": "Isaac Z. Schlueter ",
-  "keywords": [
-    "mru",
-    "lru",
-    "cache"
-  ],
-  "sideEffects": false,
-  "scripts": {
-    "build": "npm run prepare",
-    "prepare": "tshy && bash fixup.sh",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "test": "tap",
-    "snap": "tap",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "format": "prettier --write .",
-    "typedoc": "typedoc --tsconfig ./.tshy/esm.json ./src/*.ts",
-    "benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh",
-    "prebenchmark": "npm run prepare",
-    "benchmark": "make -C benchmark",
-    "preprofile": "npm run prepare",
-    "profile": "make -C benchmark profile"
-  },
-  "main": "./dist/commonjs/index.js",
-  "types": "./dist/commonjs/index.d.ts",
-  "tshy": {
-    "exports": {
-      ".": "./src/index.ts",
-      "./min": {
-        "import": {
-          "types": "./dist/esm/index.d.ts",
-          "default": "./dist/esm/index.min.js"
-        },
-        "require": {
-          "types": "./dist/commonjs/index.d.ts",
-          "default": "./dist/commonjs/index.min.js"
-        }
-      }
-    }
-  },
-  "repository": {
-    "type": "git",
-    "url": "git://github.com/isaacs/node-lru-cache.git"
-  },
-  "devDependencies": {
-    "@types/node": "^24.3.0",
-    "benchmark": "^2.1.4",
-    "esbuild": "^0.25.9",
-    "marked": "^4.2.12",
-    "mkdirp": "^3.0.1",
-    "prettier": "^3.6.2",
-    "tap": "^21.1.0",
-    "tshy": "^3.0.2",
-    "typedoc": "^0.28.12"
-  },
-  "license": "ISC",
-  "files": [
-    "dist"
-  ],
-  "engines": {
-    "node": "20 || >=22"
-  },
-  "prettier": {
-    "experimentalTernaries": true,
-    "semi": false,
-    "printWidth": 70,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "tap": {
-    "node-arg": [
-      "--expose-gc"
-    ],
-    "plugin": [
-      "@tapjs/clock"
-    ]
-  },
-  "exports": {
-    ".": {
-      "import": {
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.js"
-      },
-      "require": {
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.js"
-      }
-    },
-    "./min": {
-      "import": {
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.min.js"
-      },
-      "require": {
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.min.js"
-      }
-    }
-  },
-  "type": "module",
-  "module": "./dist/esm/index.js"
-}
diff --git a/node_modules/lru-cache/dist/commonjs/index.js b/node_modules/lru-cache/dist/commonjs/index.js
index 0589231885c68..921b8f10f71b1 100644
--- a/node_modules/lru-cache/dist/commonjs/index.js
+++ b/node_modules/lru-cache/dist/commonjs/index.js
@@ -4,18 +4,20 @@
  */
 Object.defineProperty(exports, "__esModule", { value: true });
 exports.LRUCache = void 0;
-const perf = typeof performance === 'object' &&
+const defaultPerf = (typeof performance === 'object' &&
     performance &&
-    typeof performance.now === 'function'
-    ? performance
+    typeof performance.now === 'function') ?
+    performance
     : Date;
 const warned = new Set();
 /* c8 ignore start */
-const PROCESS = (typeof process === 'object' && !!process ? process : {});
+const PROCESS = (typeof process === 'object' && !!process ?
+    process
+    : {});
 /* c8 ignore start */
 const emitWarning = (msg, type, code, fn) => {
-    typeof PROCESS.emitWarning === 'function'
-        ? PROCESS.emitWarning(msg, type, code, fn)
+    typeof PROCESS.emitWarning === 'function' ?
+        PROCESS.emitWarning(msg, type, code, fn)
         : console.error(`[${code}] ${type}: ${msg}`);
 };
 let AC = globalThis.AbortController;
@@ -79,16 +81,11 @@ const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
 // zeroes at init time is brutal when you get that big.
 // But why not be complete?
 // Maybe in the future, these limits will have expanded.
-const getUintArray = (max) => !isPosInt(max)
-    ? null
-    : max <= Math.pow(2, 8)
-        ? Uint8Array
-        : max <= Math.pow(2, 16)
-            ? Uint16Array
-            : max <= Math.pow(2, 32)
-                ? Uint32Array
-                : max <= Number.MAX_SAFE_INTEGER
-                    ? ZeroArray
+const getUintArray = (max) => !isPosInt(max) ? null
+    : max <= Math.pow(2, 8) ? Uint8Array
+        : max <= Math.pow(2, 16) ? Uint16Array
+            : max <= Math.pow(2, 32) ? Uint32Array
+                : max <= Number.MAX_SAFE_INTEGER ? ZeroArray
                     : null;
 /* c8 ignore stop */
 class ZeroArray extends Array {
@@ -147,9 +144,17 @@ class LRUCache {
     #max;
     #maxSize;
     #dispose;
+    #onInsert;
     #disposeAfter;
     #fetchMethod;
     #memoMethod;
+    #perf;
+    /**
+     * {@link LRUCache.OptionsBase.perf}
+     */
+    get perf() {
+        return this.#perf;
+    }
     /**
      * {@link LRUCache.OptionsBase.ttl}
      */
@@ -228,6 +233,7 @@ class LRUCache {
     #hasDispose;
     #hasFetchMethod;
     #hasDisposeAfter;
+    #hasOnInsert;
     /**
      * Do not call this method unless you need to inspect the
      * inner workings of the cache.  If anything returned by this
@@ -304,6 +310,12 @@ class LRUCache {
     get dispose() {
         return this.#dispose;
     }
+    /**
+     * {@link LRUCache.OptionsBase.onInsert} (read-only)
+     */
+    get onInsert() {
+        return this.#onInsert;
+    }
     /**
      * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
      */
@@ -311,7 +323,13 @@ class LRUCache {
         return this.#disposeAfter;
     }
     constructor(options) {
-        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options;
+        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, onInsert, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, perf, } = options;
+        if (perf !== undefined) {
+            if (typeof perf?.now !== 'function') {
+                throw new TypeError('perf option must have a now() method if specified');
+            }
+        }
+        this.#perf = perf ?? defaultPerf;
         if (max !== 0 && !isPosInt(max)) {
             throw new TypeError('max option must be a nonnegative integer');
         }
@@ -355,6 +373,9 @@ class LRUCache {
         if (typeof dispose === 'function') {
             this.#dispose = dispose;
         }
+        if (typeof onInsert === 'function') {
+            this.#onInsert = onInsert;
+        }
         if (typeof disposeAfter === 'function') {
             this.#disposeAfter = disposeAfter;
             this.#disposed = [];
@@ -364,6 +385,7 @@ class LRUCache {
             this.#disposed = undefined;
         }
         this.#hasDispose = !!this.#dispose;
+        this.#hasOnInsert = !!this.#onInsert;
         this.#hasDisposeAfter = !!this.#disposeAfter;
         this.noDisposeOnSet = !!noDisposeOnSet;
         this.noUpdateTTL = !!noUpdateTTL;
@@ -388,8 +410,8 @@ class LRUCache {
         this.updateAgeOnGet = !!updateAgeOnGet;
         this.updateAgeOnHas = !!updateAgeOnHas;
         this.ttlResolution =
-            isPosInt(ttlResolution) || ttlResolution === 0
-                ? ttlResolution
+            isPosInt(ttlResolution) || ttlResolution === 0 ?
+                ttlResolution
                 : 1;
         this.ttlAutopurge = !!ttlAutopurge;
         this.ttl = ttl || 0;
@@ -425,7 +447,7 @@ class LRUCache {
         const starts = new ZeroArray(this.#max);
         this.#ttls = ttls;
         this.#starts = starts;
-        this.#setItemTTL = (index, ttl, start = perf.now()) => {
+        this.#setItemTTL = (index, ttl, start = this.#perf.now()) => {
             starts[index] = ttl !== 0 ? start : 0;
             ttls[index] = ttl;
             if (ttl !== 0 && this.ttlAutopurge) {
@@ -443,7 +465,7 @@ class LRUCache {
             }
         };
         this.#updateItemAge = index => {
-            starts[index] = ttls[index] !== 0 ? perf.now() : 0;
+            starts[index] = ttls[index] !== 0 ? this.#perf.now() : 0;
         };
         this.#statusTTL = (status, index) => {
             if (ttls[index]) {
@@ -463,7 +485,7 @@ class LRUCache {
         // that costly call repeatedly.
         let cachedNow = 0;
         const getNow = () => {
-            const n = perf.now();
+            const n = this.#perf.now();
             if (this.ttlResolution > 0) {
                 cachedNow = n;
                 const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
@@ -700,9 +722,7 @@ class LRUCache {
     find(fn, getOptions = {}) {
         for (const i of this.#indexes()) {
             const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
             if (value === undefined)
                 continue;
             if (fn(value, this.#keyList[i], this)) {
@@ -724,9 +744,7 @@ class LRUCache {
     forEach(fn, thisp = this) {
         for (const i of this.#indexes()) {
             const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
             if (value === undefined)
                 continue;
             fn.call(thisp, value, this.#keyList[i], this);
@@ -739,9 +757,7 @@ class LRUCache {
     rforEach(fn, thisp = this) {
         for (const i of this.#rindexes()) {
             const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
             if (value === undefined)
                 continue;
             fn.call(thisp, value, this.#keyList[i], this);
@@ -778,17 +794,18 @@ class LRUCache {
         if (i === undefined)
             return undefined;
         const v = this.#valList[i];
-        const value = this.#isBackgroundFetch(v)
-            ? v.__staleWhileFetching
-            : v;
+        /* c8 ignore start - this isn't tested for the info function,
+         * but it's the same logic as found in other places. */
+        const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
         if (value === undefined)
             return undefined;
+        /* c8 ignore end */
         const entry = { value };
         if (this.#ttls && this.#starts) {
             const ttl = this.#ttls[i];
             const start = this.#starts[i];
             if (ttl && start) {
-                const remain = ttl - (perf.now() - start);
+                const remain = ttl - (this.#perf.now() - start);
                 entry.ttl = remain;
                 entry.start = Date.now();
             }
@@ -800,7 +817,7 @@ class LRUCache {
     }
     /**
      * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
-     * passed to {@link LRLUCache#load}.
+     * passed to {@link LRUCache#load}.
      *
      * The `start` fields are calculated relative to a portable `Date.now()`
      * timestamp, even if `performance.now()` is available.
@@ -816,9 +833,7 @@ class LRUCache {
         for (const i of this.#indexes({ allowStale: true })) {
             const key = this.#keyList[i];
             const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
             if (value === undefined || key === undefined)
                 continue;
             const entry = { value };
@@ -826,7 +841,7 @@ class LRUCache {
                 entry.ttl = this.#ttls[i];
                 // always dump the start relative to a portable timestamp
                 // it's ok for this to be a bit slow, it's a rare operation.
-                const age = perf.now() - this.#starts[i];
+                const age = this.#perf.now() - this.#starts[i];
                 entry.start = Math.floor(Date.now() - age);
             }
             if (this.#sizes) {
@@ -856,7 +871,7 @@ class LRUCache {
                 //
                 // it's ok for this to be a bit slow, it's a rare operation.
                 const age = Date.now() - entry.start;
-                entry.start = perf.now() - age;
+                entry.start = this.#perf.now() - age;
             }
             this.set(key, entry.value, entry);
         }
@@ -913,12 +928,9 @@ class LRUCache {
         let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
         if (index === undefined) {
             // addition
-            index = (this.#size === 0
-                ? this.#tail
-                : this.#free.length !== 0
-                    ? this.#free.pop()
-                    : this.#size === this.#max
-                        ? this.#evict(false)
+            index = (this.#size === 0 ? this.#tail
+                : this.#free.length !== 0 ? this.#free.pop()
+                    : this.#size === this.#max ? this.#evict(false)
                         : this.#size);
             this.#keyList[index] = k;
             this.#valList[index] = v;
@@ -931,6 +943,9 @@ class LRUCache {
             if (status)
                 status.set = 'add';
             noUpdateTTL = false;
+            if (this.#hasOnInsert) {
+                this.#onInsert?.(v, k, 'add');
+            }
         }
         else {
             // update
@@ -962,8 +977,8 @@ class LRUCache {
                 this.#valList[index] = v;
                 if (status) {
                     status.set = 'replace';
-                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal)
-                        ? oldVal.__staleWhileFetching
+                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal) ?
+                        oldVal.__staleWhileFetching
                         : oldVal;
                     if (oldValue !== undefined)
                         status.oldValue = oldValue;
@@ -972,6 +987,9 @@ class LRUCache {
             else if (status) {
                 status.set = 'update';
             }
+            if (this.#hasOnInsert) {
+                this.onInsert?.(v, k, v === oldVal ? 'update' : 'replace');
+            }
         }
         if (ttl !== 0 && !this.#ttls) {
             this.#initializeTTLTracking();
@@ -1154,7 +1172,7 @@ class LRUCache {
             const bf = p;
             if (this.#valList[index] === p) {
                 if (v === undefined) {
-                    if (bf.__staleWhileFetching) {
+                    if (bf.__staleWhileFetching !== undefined) {
                         this.#valList[index] = bf.__staleWhileFetching;
                     }
                     else {
diff --git a/node_modules/lru-cache/dist/commonjs/index.min.js b/node_modules/lru-cache/dist/commonjs/index.min.js
index ad643b0badc90..ef5027b91650d 100644
--- a/node_modules/lru-cache/dist/commonjs/index.min.js
+++ b/node_modules/lru-cache/dist/commonjs/index.min.js
@@ -1,2 +1,2 @@
-"use strict";var G=(l,t,e)=>{if(!t.has(l))throw TypeError("Cannot "+e)};var j=(l,t,e)=>(G(l,t,"read from private field"),e?e.call(l):t.get(l)),I=(l,t,e)=>{if(t.has(l))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(l):t.set(l,e)},x=(l,t,e,i)=>(G(l,t,"write to private field"),i?i.call(l,e):t.set(l,e),e);Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var T=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,P=new Set,U=typeof process=="object"&&process?process:{},H=(l,t,e,i)=>{typeof U.emitWarning=="function"?U.emitWarning(l,t,e,i):console.error(`[${e}] ${t}: ${l}`)},D=globalThis.AbortController,N=globalThis.AbortSignal;if(typeof D>"u"){N=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},D=class{constructor(){t()}signal=new N;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let l=U.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{l&&(l=!1,H("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=l=>!P.has(l),Y=Symbol("type"),A=l=>l&&l===Math.floor(l)&&l>0&&isFinite(l),k=l=>A(l)?l<=Math.pow(2,8)?Uint8Array:l<=Math.pow(2,16)?Uint16Array:l<=Math.pow(2,32)?Uint32Array:l<=Number.MAX_SAFE_INTEGER?E:null:null,E=class extends Array{constructor(t){super(t),this.fill(0)}},v,O=class{heap;length;static create(t){let e=k(t);if(!e)return[];x(O,v,!0);let i=new O(t,e);return x(O,v,!1),i}constructor(t,e){if(!j(O,v))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},W=O;v=new WeakMap,I(W,v,!1);var C=class{#g;#f;#p;#w;#R;#W;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#S;#s;#i;#t;#l;#c;#o;#h;#_;#r;#b;#m;#u;#y;#E;#a;static unsafeExposeInternals(t){return{starts:t.#m,ttls:t.#u,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#_,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#x(e,i,s,n),moveToTail:e=>t.#C(e),indexes:e=>t.#A(e),rindexes:e=>t.#F(e),isStale:e=>t.#d(e)}}get max(){return this.#g}get maxSize(){return this.#f}get calculatedSize(){return this.#S}get size(){return this.#n}get fetchMethod(){return this.#R}get memoMethod(){return this.#W}get dispose(){return this.#p}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,memoMethod:a,noDeleteOnFetchRejection:w,noDeleteOnStaleGet:m,allowStaleOnFetchRejection:p,allowStaleOnFetchAbort:_,ignoreFetchAbort:z}=t;if(e!==0&&!A(e))throw new TypeError("max option must be a nonnegative integer");let y=e?k(e):Array;if(!y)throw new Error("invalid max value: "+e);if(this.#g=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(a!==void 0&&typeof a!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#W=a,S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#R=S,this.#E=!!S,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new y(e),this.#c=new y(e),this.#o=0,this.#h=0,this.#_=W.create(e),this.#n=0,this.#S=0,typeof g=="function"&&(this.#p=g),typeof b=="function"?(this.#w=b,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#y=!!this.#p,this.#a=!!this.#w,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!w,this.allowStaleOnFetchRejection=!!p,this.allowStaleOnFetchAbort=!!_,this.ignoreFetchAbort=!!z,this.maxEntrySize!==0){if(this.#f!==0&&!A(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!A(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#P()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!m,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=A(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!A(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#U()}if(this.#g===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#g&&!this.#f){let R="LRU_CACHE_UNBOUNDED";V(R)&&(P.add(R),H("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",R,C))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#U(){let t=new E(this.#g),e=new E(this.#g);this.#u=t,this.#m=e,this.#M=(n,h,o=T.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#d(n)&&this.#T(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#v=n=>{e[n]=t[n]!==0?T.now():0},this.#O=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=T.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#d=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#v=()=>{};#O=()=>{};#M=()=>{};#d=()=>!1;#P(){let t=new E(this.#g);this.#S=0,this.#b=t,this.#z=e=>{this.#S-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!A(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!A(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#D=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#S>n;)this.#L(!0)}this.#S+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#S)}}#z=t=>{};#D=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#A({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#j(e)||((t||!this.#d(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#j(e)||((t||!this.#d(e))&&(yield e),e===this.#h));)e=this.#l[e]}#j(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#F({allowStale:!0}))this.#d(e)&&(this.#T(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#u&&this.#m){let h=this.#u[e],o=this.#m[e];if(h&&o){let r=h-(T.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#A({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#u&&this.#m){h.ttl=this.#u[e];let o=T.now()-this.#m[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=T.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,o);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#T(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#_.length!==0?this.#_.pop():this.#n===this.#g?this.#L(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#n++,this.#D(f,b,r),r&&(r.set="add"),g=!1;else{this.#C(f);let u=this.#t[f];if(e!==u){if(this.#E&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#y&&this.#p?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#y&&this.#p?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#z(f),this.#D(f,b,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#u&&this.#U(),this.#u&&(g||this.#M(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#w?.(...c)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#L(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#L(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#E&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#y||this.#a)&&(this.#y&&this.#p?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#z(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#_.push(e)),this.#n===1?(this.#o=this.#h=0,this.#_.length=0):this.#o=this.#l[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#d(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#v(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#d(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#x(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new D,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:a}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(a&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),a&&!w&&!S)return f(h.signal.reason);let m=c;return this.#t[e]===c&&(d===void 0?m.__staleWhileFetching?this.#t[e]=m.__staleWhileFetching:this.#T(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},b=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,a=S&&i.allowStaleOnFetchAbort,w=a||i.allowStaleOnFetchRejection,m=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!m||p.__staleWhileFetching===void 0?this.#T(t,"fetch"):a||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let a=this.#R?.(t,n,r);a&&a instanceof Promise&&a.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,b),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=F,F}#e(t){if(!this.#E)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof D}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:a,signal:w}=e;if(!this.#E)return a&&(a.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:a});let m={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:a,signal:w},p=this.#s.get(t);if(p===void 0){a&&(a.fetch="miss");let _=this.#x(t,p,m,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let M=i&&_.__staleWhileFetching!==void 0;return a&&(a.fetch="inflight",M&&(a.returnedStale=!0)),M?_.__staleWhileFetching:_.__returned=_}let z=this.#d(p);if(!S&&!z)return a&&(a.fetch="hit"),this.#C(p),s&&this.#v(p),a&&this.#O(a,p),_;let y=this.#x(t,p,m,d),L=y.__staleWhileFetching!==void 0&&i;return a&&(a.fetch=z?"stale":"refresh",L&&z&&(a.returnedStale=!0)),L?y.__staleWhileFetching:y.__returned=y}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#W;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#O(h,o),this.#d(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#T(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#C(o),s&&this.#v(o),r))}else h&&(h.get="miss")}#I(t,e){this.#c[e]=t,this.#l[t]=e}#C(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#I(this.#c[t],this.#l[t]),this.#I(this.#h,t),this.#h=t)}delete(t){return this.#T(t,"delete")}#T(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#N(e);else{this.#z(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#y||this.#a)&&(this.#y&&this.#p?.(n,t,e),this.#a&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#c[s];else if(s===this.#o)this.#o=this.#l[s];else{let h=this.#c[s];this.#l[h]=this.#l[s];let o=this.#l[s];this.#c[o]=this.#c[s]}this.#n--,this.#_.push(s)}}if(this.#a&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#N("delete")}#N(t){for(let e of this.#F({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#y&&this.#p?.(i,s,t),this.#a&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#u&&this.#m&&(this.#u.fill(0),this.#m.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#_.length=0,this.#S=0,this.#n=0,this.#a&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};exports.LRUCache=C;
+"use strict";Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var M=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,x=new Set,R=typeof process=="object"&&process?process:{},U=(a,t,e,i)=>{typeof R.emitWarning=="function"?R.emitWarning(a,t,e,i):console.error(`[${e}] ${t}: ${a}`)},C=globalThis.AbortController,L=globalThis.AbortSignal;if(typeof C>"u"){L=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},C=class{constructor(){t()}signal=new L;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let a=R.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{a&&(a=!1,U("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var G=a=>!x.has(a),H=Symbol("type"),y=a=>a&&a===Math.floor(a)&&a>0&&isFinite(a),I=a=>y(a)?a<=Math.pow(2,8)?Uint8Array:a<=Math.pow(2,16)?Uint16Array:a<=Math.pow(2,32)?Uint32Array:a<=Number.MAX_SAFE_INTEGER?E:null:null,E=class extends Array{constructor(t){super(t),this.fill(0)}},W=class a{heap;length;static#l=!1;static create(t){let e=I(t);if(!e)return[];a.#l=!0;let i=new a(t,e);return a.#l=!1,i}constructor(t,e){if(!a.#l)throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},D=class a{#l;#c;#p;#v;#w;#D;#L;#S;get perf(){return this.#S}ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#_;#s;#i;#t;#a;#u;#o;#h;#m;#r;#b;#y;#d;#A;#z;#f;#x;static unsafeExposeInternals(t){return{starts:t.#y,ttls:t.#d,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#a,prev:t.#u,get head(){return t.#o},get tail(){return t.#h},free:t.#m,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#M(e,i,s,n),moveToTail:e=>t.#W(e),indexes:e=>t.#F(e),rindexes:e=>t.#T(e),isStale:e=>t.#g(e)}}get max(){return this.#l}get maxSize(){return this.#c}get calculatedSize(){return this.#_}get size(){return this.#n}get fetchMethod(){return this.#D}get memoMethod(){return this.#L}get dispose(){return this.#p}get onInsert(){return this.#v}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,onInsert:_,disposeAfter:f,noDisposeOnSet:c,noUpdateTTL:u,maxSize:A=0,maxEntrySize:d=0,sizeCalculation:m,fetchMethod:l,memoMethod:w,noDeleteOnFetchRejection:b,noDeleteOnStaleGet:p,allowStaleOnFetchRejection:S,allowStaleOnFetchAbort:z,ignoreFetchAbort:F,perf:v}=t;if(v!==void 0&&typeof v?.now!="function")throw new TypeError("perf option must have a now() method if specified");if(this.#S=v??M,e!==0&&!y(e))throw new TypeError("max option must be a nonnegative integer");let T=e?I(e):Array;if(!T)throw new Error("invalid max value: "+e);if(this.#l=e,this.#c=A,this.maxEntrySize=d||this.#c,this.sizeCalculation=m,this.sizeCalculation){if(!this.#c&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#L=w,l!==void 0&&typeof l!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#D=l,this.#z=!!l,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#a=new T(e),this.#u=new T(e),this.#o=0,this.#h=0,this.#m=W.create(e),this.#n=0,this.#_=0,typeof g=="function"&&(this.#p=g),typeof _=="function"&&(this.#v=_),typeof f=="function"?(this.#w=f,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#A=!!this.#p,this.#x=!!this.#v,this.#f=!!this.#w,this.noDisposeOnSet=!!c,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!b,this.allowStaleOnFetchRejection=!!S,this.allowStaleOnFetchAbort=!!z,this.ignoreFetchAbort=!!F,this.maxEntrySize!==0){if(this.#c!==0&&!y(this.#c))throw new TypeError("maxSize must be a positive integer if specified");if(!y(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#V()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!p,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=y(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!y(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#G()}if(this.#l===0&&this.ttl===0&&this.#c===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#l&&!this.#c){let O="LRU_CACHE_UNBOUNDED";G(O)&&(x.add(O),U("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",O,a))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#G(){let t=new E(this.#l),e=new E(this.#l);this.#d=t,this.#y=e,this.#j=(n,h,o=this.#S.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#g(n)&&this.#O(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#C=n=>{e[n]=t[n]!==0?this.#S.now():0},this.#E=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=this.#S.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#g=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#C=()=>{};#E=()=>{};#j=()=>{};#g=()=>!1;#V(){let t=new E(this.#l);this.#_=0,this.#b=t,this.#R=e=>{this.#_-=t[e],t[e]=0},this.#N=(e,i,s,n)=>{if(this.#e(i))return 0;if(!y(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!y(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#U=(e,i,s)=>{if(t[e]=i,this.#c){let n=this.#c-t[e];for(;this.#_>n;)this.#I(!0)}this.#_+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#_)}}#R=t=>{};#U=(t,e,i)=>{};#N=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#o));)e=this.#u[e]}*#T({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#h));)e=this.#a[e]}#P(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#T())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#T()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#T())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#T()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#T({allowStale:!0}))this.#g(e)&&(this.#O(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#d&&this.#y){let h=this.#d[e],o=this.#y[e];if(h&&o){let r=h-(this.#S.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#F({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#d&&this.#y){h.ttl=this.#d[e];let o=this.#S.now()-this.#y[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=this.#S.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,_=this.#N(t,e,i.size||0,o);if(this.maxEntrySize&&_>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#O(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#m.length!==0?this.#m.pop():this.#n===this.#l?this.#I(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#a[this.#h]=f,this.#u[f]=this.#h,this.#h=f,this.#n++,this.#U(f,_,r),r&&(r.set="add"),g=!1,this.#x&&this.#v?.(e,t,"add");else{this.#W(f);let c=this.#t[f];if(e!==c){if(this.#z&&this.#e(c)){c.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:u}=c;u!==void 0&&!h&&(this.#A&&this.#p?.(u,t,"set"),this.#f&&this.#r?.push([u,t,"set"]))}else h||(this.#A&&this.#p?.(c,t,"set"),this.#f&&this.#r?.push([c,t,"set"]));if(this.#R(f),this.#U(f,_,r),this.#t[f]=e,r){r.set="replace";let u=c&&this.#e(c)?c.__staleWhileFetching:c;u!==void 0&&(r.oldValue=u)}}else r&&(r.set="update");this.#x&&this.onInsert?.(e,t,e===c?"update":"replace")}if(s!==0&&!this.#d&&this.#G(),this.#d&&(g||this.#j(f,s,n),r&&this.#E(r,f)),!h&&this.#f&&this.#r){let c=this.#r,u;for(;u=c?.shift();)this.#w?.(...u)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#I(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#f&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#I(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#z&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(s,i,"evict"),this.#f&&this.#r?.push([s,i,"evict"])),this.#R(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#m.push(e)),this.#n===1?(this.#o=this.#h=0,this.#m.length=0):this.#o=this.#a[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#g(n))s&&(s.has="stale",this.#E(s,n));else return i&&this.#C(n),s&&(s.has="hit",this.#E(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#g(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#M(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new C,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,m=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!m?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!m)return f(h.signal.reason);let b=u;return this.#t[e]===u&&(d===void 0?b.__staleWhileFetching!==void 0?this.#t[e]=b.__staleWhileFetching:this.#O(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},_=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:m}=h.signal,l=m&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=u;if(this.#t[e]===u&&(!b||p.__staleWhileFetching===void 0?this.#O(t,"fetch"):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},c=(d,m)=>{let l=this.#D?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),m),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let u=new Promise(c).then(g,_),A=Object.assign(u,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,A,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=A,A}#e(t){if(!this.#z)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof C}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:_=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:c=this.allowStaleOnFetchRejection,ignoreFetchAbort:u=this.ignoreFetchAbort,allowStaleOnFetchAbort:A=this.allowStaleOnFetchAbort,context:d,forceRefresh:m=!1,status:l,signal:w}=e;if(!this.#z)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:_,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:c,allowStaleOnFetchAbort:A,ignoreFetchAbort:u,status:l,signal:w},p=this.#s.get(t);if(p===void 0){l&&(l.fetch="miss");let S=this.#M(t,p,b,d);return S.__returned=S}else{let S=this.#t[p];if(this.#e(S)){let O=i&&S.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",O&&(l.returnedStale=!0)),O?S.__staleWhileFetching:S.__returned=S}let z=this.#g(p);if(!m&&!z)return l&&(l.fetch="hit"),this.#W(p),s&&this.#C(p),l&&this.#E(l,p),S;let F=this.#M(t,p,b,d),T=F.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=z?"stale":"refresh",T&&z&&(l.returnedStale=!0)),T?F.__staleWhileFetching:F.__returned=F}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#L;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#E(h,o),this.#g(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#O(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#W(o),s&&this.#C(o),r))}else h&&(h.get="miss")}#H(t,e){this.#u[e]=t,this.#a[t]=e}#W(t){t!==this.#h&&(t===this.#o?this.#o=this.#a[t]:this.#H(this.#u[t],this.#a[t]),this.#H(this.#h,t),this.#h=t)}delete(t){return this.#O(t,"delete")}#O(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#k(e);else{this.#R(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(n,t,e),this.#f&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#u[s];else if(s===this.#o)this.#o=this.#a[s];else{let h=this.#u[s];this.#a[h]=this.#a[s];let o=this.#a[s];this.#u[o]=this.#u[s]}this.#n--,this.#m.push(s)}}if(this.#f&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#k("delete")}#k(t){for(let e of this.#T({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#A&&this.#p?.(i,s,t),this.#f&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#d&&this.#y&&(this.#d.fill(0),this.#y.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#m.length=0,this.#_=0,this.#n=0,this.#f&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};exports.LRUCache=D;
 //# sourceMappingURL=index.min.js.map
diff --git a/node_modules/lru-cache/dist/esm/index.js b/node_modules/lru-cache/dist/esm/index.js
index 555654a57c4d7..8fd8fc5f31507 100644
--- a/node_modules/lru-cache/dist/esm/index.js
+++ b/node_modules/lru-cache/dist/esm/index.js
@@ -1,18 +1,20 @@
 /**
  * @module LRUCache
  */
-const perf = typeof performance === 'object' &&
+const defaultPerf = (typeof performance === 'object' &&
     performance &&
-    typeof performance.now === 'function'
-    ? performance
+    typeof performance.now === 'function') ?
+    performance
     : Date;
 const warned = new Set();
 /* c8 ignore start */
-const PROCESS = (typeof process === 'object' && !!process ? process : {});
+const PROCESS = (typeof process === 'object' && !!process ?
+    process
+    : {});
 /* c8 ignore start */
 const emitWarning = (msg, type, code, fn) => {
-    typeof PROCESS.emitWarning === 'function'
-        ? PROCESS.emitWarning(msg, type, code, fn)
+    typeof PROCESS.emitWarning === 'function' ?
+        PROCESS.emitWarning(msg, type, code, fn)
         : console.error(`[${code}] ${type}: ${msg}`);
 };
 let AC = globalThis.AbortController;
@@ -76,16 +78,11 @@ const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
 // zeroes at init time is brutal when you get that big.
 // But why not be complete?
 // Maybe in the future, these limits will have expanded.
-const getUintArray = (max) => !isPosInt(max)
-    ? null
-    : max <= Math.pow(2, 8)
-        ? Uint8Array
-        : max <= Math.pow(2, 16)
-            ? Uint16Array
-            : max <= Math.pow(2, 32)
-                ? Uint32Array
-                : max <= Number.MAX_SAFE_INTEGER
-                    ? ZeroArray
+const getUintArray = (max) => !isPosInt(max) ? null
+    : max <= Math.pow(2, 8) ? Uint8Array
+        : max <= Math.pow(2, 16) ? Uint16Array
+            : max <= Math.pow(2, 32) ? Uint32Array
+                : max <= Number.MAX_SAFE_INTEGER ? ZeroArray
                     : null;
 /* c8 ignore stop */
 class ZeroArray extends Array {
@@ -144,9 +141,17 @@ export class LRUCache {
     #max;
     #maxSize;
     #dispose;
+    #onInsert;
     #disposeAfter;
     #fetchMethod;
     #memoMethod;
+    #perf;
+    /**
+     * {@link LRUCache.OptionsBase.perf}
+     */
+    get perf() {
+        return this.#perf;
+    }
     /**
      * {@link LRUCache.OptionsBase.ttl}
      */
@@ -225,6 +230,7 @@ export class LRUCache {
     #hasDispose;
     #hasFetchMethod;
     #hasDisposeAfter;
+    #hasOnInsert;
     /**
      * Do not call this method unless you need to inspect the
      * inner workings of the cache.  If anything returned by this
@@ -301,6 +307,12 @@ export class LRUCache {
     get dispose() {
         return this.#dispose;
     }
+    /**
+     * {@link LRUCache.OptionsBase.onInsert} (read-only)
+     */
+    get onInsert() {
+        return this.#onInsert;
+    }
     /**
      * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
      */
@@ -308,7 +320,13 @@ export class LRUCache {
         return this.#disposeAfter;
     }
     constructor(options) {
-        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options;
+        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, onInsert, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, perf, } = options;
+        if (perf !== undefined) {
+            if (typeof perf?.now !== 'function') {
+                throw new TypeError('perf option must have a now() method if specified');
+            }
+        }
+        this.#perf = perf ?? defaultPerf;
         if (max !== 0 && !isPosInt(max)) {
             throw new TypeError('max option must be a nonnegative integer');
         }
@@ -352,6 +370,9 @@ export class LRUCache {
         if (typeof dispose === 'function') {
             this.#dispose = dispose;
         }
+        if (typeof onInsert === 'function') {
+            this.#onInsert = onInsert;
+        }
         if (typeof disposeAfter === 'function') {
             this.#disposeAfter = disposeAfter;
             this.#disposed = [];
@@ -361,6 +382,7 @@ export class LRUCache {
             this.#disposed = undefined;
         }
         this.#hasDispose = !!this.#dispose;
+        this.#hasOnInsert = !!this.#onInsert;
         this.#hasDisposeAfter = !!this.#disposeAfter;
         this.noDisposeOnSet = !!noDisposeOnSet;
         this.noUpdateTTL = !!noUpdateTTL;
@@ -385,8 +407,8 @@ export class LRUCache {
         this.updateAgeOnGet = !!updateAgeOnGet;
         this.updateAgeOnHas = !!updateAgeOnHas;
         this.ttlResolution =
-            isPosInt(ttlResolution) || ttlResolution === 0
-                ? ttlResolution
+            isPosInt(ttlResolution) || ttlResolution === 0 ?
+                ttlResolution
                 : 1;
         this.ttlAutopurge = !!ttlAutopurge;
         this.ttl = ttl || 0;
@@ -422,7 +444,7 @@ export class LRUCache {
         const starts = new ZeroArray(this.#max);
         this.#ttls = ttls;
         this.#starts = starts;
-        this.#setItemTTL = (index, ttl, start = perf.now()) => {
+        this.#setItemTTL = (index, ttl, start = this.#perf.now()) => {
             starts[index] = ttl !== 0 ? start : 0;
             ttls[index] = ttl;
             if (ttl !== 0 && this.ttlAutopurge) {
@@ -440,7 +462,7 @@ export class LRUCache {
             }
         };
         this.#updateItemAge = index => {
-            starts[index] = ttls[index] !== 0 ? perf.now() : 0;
+            starts[index] = ttls[index] !== 0 ? this.#perf.now() : 0;
         };
         this.#statusTTL = (status, index) => {
             if (ttls[index]) {
@@ -460,7 +482,7 @@ export class LRUCache {
         // that costly call repeatedly.
         let cachedNow = 0;
         const getNow = () => {
-            const n = perf.now();
+            const n = this.#perf.now();
             if (this.ttlResolution > 0) {
                 cachedNow = n;
                 const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
@@ -697,9 +719,7 @@ export class LRUCache {
     find(fn, getOptions = {}) {
         for (const i of this.#indexes()) {
             const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
             if (value === undefined)
                 continue;
             if (fn(value, this.#keyList[i], this)) {
@@ -721,9 +741,7 @@ export class LRUCache {
     forEach(fn, thisp = this) {
         for (const i of this.#indexes()) {
             const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
             if (value === undefined)
                 continue;
             fn.call(thisp, value, this.#keyList[i], this);
@@ -736,9 +754,7 @@ export class LRUCache {
     rforEach(fn, thisp = this) {
         for (const i of this.#rindexes()) {
             const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
             if (value === undefined)
                 continue;
             fn.call(thisp, value, this.#keyList[i], this);
@@ -775,17 +791,18 @@ export class LRUCache {
         if (i === undefined)
             return undefined;
         const v = this.#valList[i];
-        const value = this.#isBackgroundFetch(v)
-            ? v.__staleWhileFetching
-            : v;
+        /* c8 ignore start - this isn't tested for the info function,
+         * but it's the same logic as found in other places. */
+        const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
         if (value === undefined)
             return undefined;
+        /* c8 ignore end */
         const entry = { value };
         if (this.#ttls && this.#starts) {
             const ttl = this.#ttls[i];
             const start = this.#starts[i];
             if (ttl && start) {
-                const remain = ttl - (perf.now() - start);
+                const remain = ttl - (this.#perf.now() - start);
                 entry.ttl = remain;
                 entry.start = Date.now();
             }
@@ -797,7 +814,7 @@ export class LRUCache {
     }
     /**
      * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
-     * passed to {@link LRLUCache#load}.
+     * passed to {@link LRUCache#load}.
      *
      * The `start` fields are calculated relative to a portable `Date.now()`
      * timestamp, even if `performance.now()` is available.
@@ -813,9 +830,7 @@ export class LRUCache {
         for (const i of this.#indexes({ allowStale: true })) {
             const key = this.#keyList[i];
             const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
+            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
             if (value === undefined || key === undefined)
                 continue;
             const entry = { value };
@@ -823,7 +838,7 @@ export class LRUCache {
                 entry.ttl = this.#ttls[i];
                 // always dump the start relative to a portable timestamp
                 // it's ok for this to be a bit slow, it's a rare operation.
-                const age = perf.now() - this.#starts[i];
+                const age = this.#perf.now() - this.#starts[i];
                 entry.start = Math.floor(Date.now() - age);
             }
             if (this.#sizes) {
@@ -853,7 +868,7 @@ export class LRUCache {
                 //
                 // it's ok for this to be a bit slow, it's a rare operation.
                 const age = Date.now() - entry.start;
-                entry.start = perf.now() - age;
+                entry.start = this.#perf.now() - age;
             }
             this.set(key, entry.value, entry);
         }
@@ -910,12 +925,9 @@ export class LRUCache {
         let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
         if (index === undefined) {
             // addition
-            index = (this.#size === 0
-                ? this.#tail
-                : this.#free.length !== 0
-                    ? this.#free.pop()
-                    : this.#size === this.#max
-                        ? this.#evict(false)
+            index = (this.#size === 0 ? this.#tail
+                : this.#free.length !== 0 ? this.#free.pop()
+                    : this.#size === this.#max ? this.#evict(false)
                         : this.#size);
             this.#keyList[index] = k;
             this.#valList[index] = v;
@@ -928,6 +940,9 @@ export class LRUCache {
             if (status)
                 status.set = 'add';
             noUpdateTTL = false;
+            if (this.#hasOnInsert) {
+                this.#onInsert?.(v, k, 'add');
+            }
         }
         else {
             // update
@@ -959,8 +974,8 @@ export class LRUCache {
                 this.#valList[index] = v;
                 if (status) {
                     status.set = 'replace';
-                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal)
-                        ? oldVal.__staleWhileFetching
+                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal) ?
+                        oldVal.__staleWhileFetching
                         : oldVal;
                     if (oldValue !== undefined)
                         status.oldValue = oldValue;
@@ -969,6 +984,9 @@ export class LRUCache {
             else if (status) {
                 status.set = 'update';
             }
+            if (this.#hasOnInsert) {
+                this.onInsert?.(v, k, v === oldVal ? 'update' : 'replace');
+            }
         }
         if (ttl !== 0 && !this.#ttls) {
             this.#initializeTTLTracking();
@@ -1151,7 +1169,7 @@ export class LRUCache {
             const bf = p;
             if (this.#valList[index] === p) {
                 if (v === undefined) {
-                    if (bf.__staleWhileFetching) {
+                    if (bf.__staleWhileFetching !== undefined) {
                         this.#valList[index] = bf.__staleWhileFetching;
                     }
                     else {
diff --git a/node_modules/lru-cache/dist/esm/index.min.js b/node_modules/lru-cache/dist/esm/index.min.js
index 4571d0254e27d..07dd8fc3c59d8 100644
--- a/node_modules/lru-cache/dist/esm/index.min.js
+++ b/node_modules/lru-cache/dist/esm/index.min.js
@@ -1,2 +1,2 @@
-var G=(l,t,e)=>{if(!t.has(l))throw TypeError("Cannot "+e)};var I=(l,t,e)=>(G(l,t,"read from private field"),e?e.call(l):t.get(l)),j=(l,t,e)=>{if(t.has(l))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(l):t.set(l,e)},x=(l,t,e,i)=>(G(l,t,"write to private field"),i?i.call(l,e):t.set(l,e),e);var T=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,P=new Set,M=typeof process=="object"&&process?process:{},H=(l,t,e,i)=>{typeof M.emitWarning=="function"?M.emitWarning(l,t,e,i):console.error(`[${e}] ${t}: ${l}`)},W=globalThis.AbortController,N=globalThis.AbortSignal;if(typeof W>"u"){N=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new N;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let l=M.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{l&&(l=!1,H("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=l=>!P.has(l),Y=Symbol("type"),A=l=>l&&l===Math.floor(l)&&l>0&&isFinite(l),k=l=>A(l)?l<=Math.pow(2,8)?Uint8Array:l<=Math.pow(2,16)?Uint16Array:l<=Math.pow(2,32)?Uint32Array:l<=Number.MAX_SAFE_INTEGER?O:null:null,O=class extends Array{constructor(t){super(t),this.fill(0)}},z,E=class{heap;length;static create(t){let e=k(t);if(!e)return[];x(E,z,!0);let i=new E(t,e);return x(E,z,!1),i}constructor(t,e){if(!I(E,z))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},R=E;z=new WeakMap,j(R,z,!1);var D=class{#g;#f;#p;#w;#R;#W;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#S;#s;#i;#t;#l;#c;#o;#h;#_;#r;#m;#b;#u;#y;#O;#a;static unsafeExposeInternals(t){return{starts:t.#b,ttls:t.#u,sizes:t.#m,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#_,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#x(e,i,s,n),moveToTail:e=>t.#C(e),indexes:e=>t.#A(e),rindexes:e=>t.#F(e),isStale:e=>t.#d(e)}}get max(){return this.#g}get maxSize(){return this.#f}get calculatedSize(){return this.#S}get size(){return this.#n}get fetchMethod(){return this.#R}get memoMethod(){return this.#W}get dispose(){return this.#p}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,disposeAfter:m,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,memoMethod:a,noDeleteOnFetchRejection:w,noDeleteOnStaleGet:b,allowStaleOnFetchRejection:p,allowStaleOnFetchAbort:_,ignoreFetchAbort:v}=t;if(e!==0&&!A(e))throw new TypeError("max option must be a nonnegative integer");let y=e?k(e):Array;if(!y)throw new Error("invalid max value: "+e);if(this.#g=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(a!==void 0&&typeof a!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#W=a,S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#R=S,this.#O=!!S,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new y(e),this.#c=new y(e),this.#o=0,this.#h=0,this.#_=R.create(e),this.#n=0,this.#S=0,typeof g=="function"&&(this.#p=g),typeof m=="function"?(this.#w=m,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#y=!!this.#p,this.#a=!!this.#w,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!w,this.allowStaleOnFetchRejection=!!p,this.allowStaleOnFetchAbort=!!_,this.ignoreFetchAbort=!!v,this.maxEntrySize!==0){if(this.#f!==0&&!A(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!A(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#P()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!b,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=A(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!A(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#M()}if(this.#g===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#g&&!this.#f){let C="LRU_CACHE_UNBOUNDED";V(C)&&(P.add(C),H("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",C,D))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#M(){let t=new O(this.#g),e=new O(this.#g);this.#u=t,this.#b=e,this.#U=(n,h,o=T.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#d(n)&&this.#T(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?T.now():0},this.#E=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=T.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#d=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#z=()=>{};#E=()=>{};#U=()=>{};#d=()=>!1;#P(){let t=new O(this.#g);this.#S=0,this.#m=t,this.#v=e=>{this.#S-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!A(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!A(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#D=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#S>n;)this.#L(!0)}this.#S+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#S)}}#v=t=>{};#D=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#A({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#I(e)||((t||!this.#d(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#I(e)||((t||!this.#d(e))&&(yield e),e===this.#h));)e=this.#l[e]}#I(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#F({allowStale:!0}))this.#d(e)&&(this.#T(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#u&&this.#b){let h=this.#u[e],o=this.#b[e];if(h&&o){let r=h-(T.now()-o);n.ttl=r,n.start=Date.now()}}return this.#m&&(n.size=this.#m[e]),n}dump(){let t=[];for(let e of this.#A({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#u&&this.#b){h.ttl=this.#u[e];let o=T.now()-this.#b[e];h.start=Math.floor(Date.now()-o)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=T.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,m=this.#G(t,e,i.size||0,o);if(this.maxEntrySize&&m>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#T(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#_.length!==0?this.#_.pop():this.#n===this.#g?this.#L(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#n++,this.#D(f,m,r),r&&(r.set="add"),g=!1;else{this.#C(f);let u=this.#t[f];if(e!==u){if(this.#O&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#y&&this.#p?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#y&&this.#p?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#v(f),this.#D(f,m,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#u&&this.#M(),this.#u&&(g||this.#U(f,s,n),r&&this.#E(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#w?.(...c)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#L(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#L(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#O&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#y||this.#a)&&(this.#y&&this.#p?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#v(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#_.push(e)),this.#n===1?(this.#o=this.#h=0,this.#_.length=0):this.#o=this.#l[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#d(n))s&&(s.has="stale",this.#E(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#E(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#d(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#x(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:a}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(a&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),a&&!w&&!S)return f(h.signal.reason);let b=c;return this.#t[e]===c&&(d===void 0?b.__staleWhileFetching?this.#t[e]=b.__staleWhileFetching:this.#T(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},m=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,a=S&&i.allowStaleOnFetchAbort,w=a||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!b||p.__staleWhileFetching===void 0?this.#T(t,"fetch"):a||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let a=this.#R?.(t,n,r);a&&a instanceof Promise&&a.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,m),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=F,F}#e(t){if(!this.#O)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:m=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:a,signal:w}=e;if(!this.#O)return a&&(a.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:a});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:m,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:a,signal:w},p=this.#s.get(t);if(p===void 0){a&&(a.fetch="miss");let _=this.#x(t,p,b,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let U=i&&_.__staleWhileFetching!==void 0;return a&&(a.fetch="inflight",U&&(a.returnedStale=!0)),U?_.__staleWhileFetching:_.__returned=_}let v=this.#d(p);if(!S&&!v)return a&&(a.fetch="hit"),this.#C(p),s&&this.#z(p),a&&this.#E(a,p),_;let y=this.#x(t,p,b,d),L=y.__staleWhileFetching!==void 0&&i;return a&&(a.fetch=v?"stale":"refresh",L&&v&&(a.returnedStale=!0)),L?y.__staleWhileFetching:y.__returned=y}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#W;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#E(h,o),this.#d(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#T(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#C(o),s&&this.#z(o),r))}else h&&(h.get="miss")}#j(t,e){this.#c[e]=t,this.#l[t]=e}#C(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#j(this.#c[t],this.#l[t]),this.#j(this.#h,t),this.#h=t)}delete(t){return this.#T(t,"delete")}#T(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#N(e);else{this.#v(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#y||this.#a)&&(this.#y&&this.#p?.(n,t,e),this.#a&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#c[s];else if(s===this.#o)this.#o=this.#l[s];else{let h=this.#c[s];this.#l[h]=this.#l[s];let o=this.#l[s];this.#c[o]=this.#c[s]}this.#n--,this.#_.push(s)}}if(this.#a&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#N("delete")}#N(t){for(let e of this.#F({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#y&&this.#p?.(i,s,t),this.#a&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#u&&this.#b&&(this.#u.fill(0),this.#b.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#_.length=0,this.#S=0,this.#n=0,this.#a&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};export{D as LRUCache};
+var M=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,x=new Set,R=typeof process=="object"&&process?process:{},I=(a,t,e,i)=>{typeof R.emitWarning=="function"?R.emitWarning(a,t,e,i):console.error(`[${e}] ${t}: ${a}`)},C=globalThis.AbortController,D=globalThis.AbortSignal;if(typeof C>"u"){D=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},C=class{constructor(){t()}signal=new D;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let a=R.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{a&&(a=!1,I("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var G=a=>!x.has(a),H=Symbol("type"),y=a=>a&&a===Math.floor(a)&&a>0&&isFinite(a),U=a=>y(a)?a<=Math.pow(2,8)?Uint8Array:a<=Math.pow(2,16)?Uint16Array:a<=Math.pow(2,32)?Uint32Array:a<=Number.MAX_SAFE_INTEGER?O:null:null,O=class extends Array{constructor(t){super(t),this.fill(0)}},W=class a{heap;length;static#l=!1;static create(t){let e=U(t);if(!e)return[];a.#l=!0;let i=new a(t,e);return a.#l=!1,i}constructor(t,e){if(!a.#l)throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},L=class a{#l;#c;#p;#v;#w;#D;#L;#S;get perf(){return this.#S}ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#_;#s;#i;#t;#a;#u;#o;#h;#m;#r;#b;#y;#d;#A;#z;#f;#x;static unsafeExposeInternals(t){return{starts:t.#y,ttls:t.#d,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#a,prev:t.#u,get head(){return t.#o},get tail(){return t.#h},free:t.#m,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#M(e,i,s,n),moveToTail:e=>t.#W(e),indexes:e=>t.#F(e),rindexes:e=>t.#T(e),isStale:e=>t.#g(e)}}get max(){return this.#l}get maxSize(){return this.#c}get calculatedSize(){return this.#_}get size(){return this.#n}get fetchMethod(){return this.#D}get memoMethod(){return this.#L}get dispose(){return this.#p}get onInsert(){return this.#v}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,onInsert:_,disposeAfter:f,noDisposeOnSet:c,noUpdateTTL:u,maxSize:A=0,maxEntrySize:d=0,sizeCalculation:m,fetchMethod:l,memoMethod:w,noDeleteOnFetchRejection:b,noDeleteOnStaleGet:p,allowStaleOnFetchRejection:S,allowStaleOnFetchAbort:z,ignoreFetchAbort:F,perf:v}=t;if(v!==void 0&&typeof v?.now!="function")throw new TypeError("perf option must have a now() method if specified");if(this.#S=v??M,e!==0&&!y(e))throw new TypeError("max option must be a nonnegative integer");let T=e?U(e):Array;if(!T)throw new Error("invalid max value: "+e);if(this.#l=e,this.#c=A,this.maxEntrySize=d||this.#c,this.sizeCalculation=m,this.sizeCalculation){if(!this.#c&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#L=w,l!==void 0&&typeof l!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#D=l,this.#z=!!l,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#a=new T(e),this.#u=new T(e),this.#o=0,this.#h=0,this.#m=W.create(e),this.#n=0,this.#_=0,typeof g=="function"&&(this.#p=g),typeof _=="function"&&(this.#v=_),typeof f=="function"?(this.#w=f,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#A=!!this.#p,this.#x=!!this.#v,this.#f=!!this.#w,this.noDisposeOnSet=!!c,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!b,this.allowStaleOnFetchRejection=!!S,this.allowStaleOnFetchAbort=!!z,this.ignoreFetchAbort=!!F,this.maxEntrySize!==0){if(this.#c!==0&&!y(this.#c))throw new TypeError("maxSize must be a positive integer if specified");if(!y(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#V()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!p,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=y(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!y(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#G()}if(this.#l===0&&this.ttl===0&&this.#c===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#l&&!this.#c){let E="LRU_CACHE_UNBOUNDED";G(E)&&(x.add(E),I("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",E,a))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#G(){let t=new O(this.#l),e=new O(this.#l);this.#d=t,this.#y=e,this.#j=(n,h,o=this.#S.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#g(n)&&this.#E(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#C=n=>{e[n]=t[n]!==0?this.#S.now():0},this.#O=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=this.#S.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#g=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#C=()=>{};#O=()=>{};#j=()=>{};#g=()=>!1;#V(){let t=new O(this.#l);this.#_=0,this.#b=t,this.#R=e=>{this.#_-=t[e],t[e]=0},this.#N=(e,i,s,n)=>{if(this.#e(i))return 0;if(!y(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!y(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#I=(e,i,s)=>{if(t[e]=i,this.#c){let n=this.#c-t[e];for(;this.#_>n;)this.#U(!0)}this.#_+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#_)}}#R=t=>{};#I=(t,e,i)=>{};#N=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#o));)e=this.#u[e]}*#T({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#h));)e=this.#a[e]}#P(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#T())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#T()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#T())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#T()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#T({allowStale:!0}))this.#g(e)&&(this.#E(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#d&&this.#y){let h=this.#d[e],o=this.#y[e];if(h&&o){let r=h-(this.#S.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#F({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#d&&this.#y){h.ttl=this.#d[e];let o=this.#S.now()-this.#y[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=this.#S.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,_=this.#N(t,e,i.size||0,o);if(this.maxEntrySize&&_>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#E(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#m.length!==0?this.#m.pop():this.#n===this.#l?this.#U(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#a[this.#h]=f,this.#u[f]=this.#h,this.#h=f,this.#n++,this.#I(f,_,r),r&&(r.set="add"),g=!1,this.#x&&this.#v?.(e,t,"add");else{this.#W(f);let c=this.#t[f];if(e!==c){if(this.#z&&this.#e(c)){c.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:u}=c;u!==void 0&&!h&&(this.#A&&this.#p?.(u,t,"set"),this.#f&&this.#r?.push([u,t,"set"]))}else h||(this.#A&&this.#p?.(c,t,"set"),this.#f&&this.#r?.push([c,t,"set"]));if(this.#R(f),this.#I(f,_,r),this.#t[f]=e,r){r.set="replace";let u=c&&this.#e(c)?c.__staleWhileFetching:c;u!==void 0&&(r.oldValue=u)}}else r&&(r.set="update");this.#x&&this.onInsert?.(e,t,e===c?"update":"replace")}if(s!==0&&!this.#d&&this.#G(),this.#d&&(g||this.#j(f,s,n),r&&this.#O(r,f)),!h&&this.#f&&this.#r){let c=this.#r,u;for(;u=c?.shift();)this.#w?.(...u)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#U(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#f&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#U(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#z&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(s,i,"evict"),this.#f&&this.#r?.push([s,i,"evict"])),this.#R(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#m.push(e)),this.#n===1?(this.#o=this.#h=0,this.#m.length=0):this.#o=this.#a[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#g(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#C(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#g(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#M(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new C,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,m=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!m?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!m)return f(h.signal.reason);let b=u;return this.#t[e]===u&&(d===void 0?b.__staleWhileFetching!==void 0?this.#t[e]=b.__staleWhileFetching:this.#E(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},_=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:m}=h.signal,l=m&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=u;if(this.#t[e]===u&&(!b||p.__staleWhileFetching===void 0?this.#E(t,"fetch"):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},c=(d,m)=>{let l=this.#D?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),m),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let u=new Promise(c).then(g,_),A=Object.assign(u,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,A,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=A,A}#e(t){if(!this.#z)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof C}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:_=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:c=this.allowStaleOnFetchRejection,ignoreFetchAbort:u=this.ignoreFetchAbort,allowStaleOnFetchAbort:A=this.allowStaleOnFetchAbort,context:d,forceRefresh:m=!1,status:l,signal:w}=e;if(!this.#z)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:_,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:c,allowStaleOnFetchAbort:A,ignoreFetchAbort:u,status:l,signal:w},p=this.#s.get(t);if(p===void 0){l&&(l.fetch="miss");let S=this.#M(t,p,b,d);return S.__returned=S}else{let S=this.#t[p];if(this.#e(S)){let E=i&&S.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",E&&(l.returnedStale=!0)),E?S.__staleWhileFetching:S.__returned=S}let z=this.#g(p);if(!m&&!z)return l&&(l.fetch="hit"),this.#W(p),s&&this.#C(p),l&&this.#O(l,p),S;let F=this.#M(t,p,b,d),T=F.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=z?"stale":"refresh",T&&z&&(l.returnedStale=!0)),T?F.__staleWhileFetching:F.__returned=F}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#L;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#O(h,o),this.#g(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#E(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#W(o),s&&this.#C(o),r))}else h&&(h.get="miss")}#H(t,e){this.#u[e]=t,this.#a[t]=e}#W(t){t!==this.#h&&(t===this.#o?this.#o=this.#a[t]:this.#H(this.#u[t],this.#a[t]),this.#H(this.#h,t),this.#h=t)}delete(t){return this.#E(t,"delete")}#E(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#k(e);else{this.#R(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(n,t,e),this.#f&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#u[s];else if(s===this.#o)this.#o=this.#a[s];else{let h=this.#u[s];this.#a[h]=this.#a[s];let o=this.#a[s];this.#u[o]=this.#u[s]}this.#n--,this.#m.push(s)}}if(this.#f&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#k("delete")}#k(t){for(let e of this.#T({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#A&&this.#p?.(i,s,t),this.#f&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#d&&this.#y&&(this.#d.fill(0),this.#y.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#m.length=0,this.#_=0,this.#n=0,this.#f&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};export{L as LRUCache};
 //# sourceMappingURL=index.min.js.map
diff --git a/node_modules/lru-cache/package.json b/node_modules/lru-cache/package.json
index f3cd4c0cc53f7..4953bdf4a7a35 100644
--- a/node_modules/lru-cache/package.json
+++ b/node_modules/lru-cache/package.json
@@ -1,10 +1,7 @@
 {
   "name": "lru-cache",
-  "publishConfig": {
-    "tag": "legacy-v10"
-  },
   "description": "A cache object that deletes the least-recently-used items.",
-  "version": "10.4.3",
+  "version": "11.2.1",
   "author": "Isaac Z. Schlueter ",
   "keywords": [
     "mru",
@@ -52,25 +49,25 @@
     "url": "git://github.com/isaacs/node-lru-cache.git"
   },
   "devDependencies": {
-    "@types/node": "^20.2.5",
-    "@types/tap": "^15.0.6",
+    "@types/node": "^24.3.0",
     "benchmark": "^2.1.4",
-    "esbuild": "^0.17.11",
-    "eslint-config-prettier": "^8.5.0",
+    "esbuild": "^0.25.9",
     "marked": "^4.2.12",
-    "mkdirp": "^2.1.5",
-    "prettier": "^2.6.2",
-    "tap": "^20.0.3",
-    "tshy": "^2.0.0",
-    "tslib": "^2.4.0",
-    "typedoc": "^0.25.3",
-    "typescript": "^5.2.2"
+    "mkdirp": "^3.0.1",
+    "prettier": "^3.6.2",
+    "tap": "^21.1.0",
+    "tshy": "^3.0.2",
+    "typedoc": "^0.28.12"
   },
   "license": "ISC",
   "files": [
     "dist"
   ],
+  "engines": {
+    "node": "20 || >=22"
+  },
   "prettier": {
+    "experimentalTernaries": true,
     "semi": false,
     "printWidth": 70,
     "tabWidth": 2,
diff --git a/node_modules/make-fetch-happen/node_modules/lru-cache/LICENSE b/node_modules/make-fetch-happen/node_modules/lru-cache/LICENSE
deleted file mode 100644
index f785757cd63f8..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/lru-cache/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/make-fetch-happen/node_modules/lru-cache/dist/commonjs/index.js b/node_modules/make-fetch-happen/node_modules/lru-cache/dist/commonjs/index.js
deleted file mode 100644
index 921b8f10f71b1..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/lru-cache/dist/commonjs/index.js
+++ /dev/null
@@ -1,1564 +0,0 @@
-"use strict";
-/**
- * @module LRUCache
- */
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.LRUCache = void 0;
-const defaultPerf = (typeof performance === 'object' &&
-    performance &&
-    typeof performance.now === 'function') ?
-    performance
-    : Date;
-const warned = new Set();
-/* c8 ignore start */
-const PROCESS = (typeof process === 'object' && !!process ?
-    process
-    : {});
-/* c8 ignore start */
-const emitWarning = (msg, type, code, fn) => {
-    typeof PROCESS.emitWarning === 'function' ?
-        PROCESS.emitWarning(msg, type, code, fn)
-        : console.error(`[${code}] ${type}: ${msg}`);
-};
-let AC = globalThis.AbortController;
-let AS = globalThis.AbortSignal;
-/* c8 ignore start */
-if (typeof AC === 'undefined') {
-    //@ts-ignore
-    AS = class AbortSignal {
-        onabort;
-        _onabort = [];
-        reason;
-        aborted = false;
-        addEventListener(_, fn) {
-            this._onabort.push(fn);
-        }
-    };
-    //@ts-ignore
-    AC = class AbortController {
-        constructor() {
-            warnACPolyfill();
-        }
-        signal = new AS();
-        abort(reason) {
-            if (this.signal.aborted)
-                return;
-            //@ts-ignore
-            this.signal.reason = reason;
-            //@ts-ignore
-            this.signal.aborted = true;
-            //@ts-ignore
-            for (const fn of this.signal._onabort) {
-                fn(reason);
-            }
-            this.signal.onabort?.(reason);
-        }
-    };
-    let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
-    const warnACPolyfill = () => {
-        if (!printACPolyfillWarning)
-            return;
-        printACPolyfillWarning = false;
-        emitWarning('AbortController is not defined. If using lru-cache in ' +
-            'node 14, load an AbortController polyfill from the ' +
-            '`node-abort-controller` package. A minimal polyfill is ' +
-            'provided for use by LRUCache.fetch(), but it should not be ' +
-            'relied upon in other contexts (eg, passing it to other APIs that ' +
-            'use AbortController/AbortSignal might have undesirable effects). ' +
-            'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
-    };
-}
-/* c8 ignore stop */
-const shouldWarn = (code) => !warned.has(code);
-const TYPE = Symbol('type');
-const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
-/* c8 ignore start */
-// This is a little bit ridiculous, tbh.
-// The maximum array length is 2^32-1 or thereabouts on most JS impls.
-// And well before that point, you're caching the entire world, I mean,
-// that's ~32GB of just integers for the next/prev links, plus whatever
-// else to hold that many keys and values.  Just filling the memory with
-// zeroes at init time is brutal when you get that big.
-// But why not be complete?
-// Maybe in the future, these limits will have expanded.
-const getUintArray = (max) => !isPosInt(max) ? null
-    : max <= Math.pow(2, 8) ? Uint8Array
-        : max <= Math.pow(2, 16) ? Uint16Array
-            : max <= Math.pow(2, 32) ? Uint32Array
-                : max <= Number.MAX_SAFE_INTEGER ? ZeroArray
-                    : null;
-/* c8 ignore stop */
-class ZeroArray extends Array {
-    constructor(size) {
-        super(size);
-        this.fill(0);
-    }
-}
-class Stack {
-    heap;
-    length;
-    // private constructor
-    static #constructing = false;
-    static create(max) {
-        const HeapCls = getUintArray(max);
-        if (!HeapCls)
-            return [];
-        Stack.#constructing = true;
-        const s = new Stack(max, HeapCls);
-        Stack.#constructing = false;
-        return s;
-    }
-    constructor(max, HeapCls) {
-        /* c8 ignore start */
-        if (!Stack.#constructing) {
-            throw new TypeError('instantiate Stack using Stack.create(n)');
-        }
-        /* c8 ignore stop */
-        this.heap = new HeapCls(max);
-        this.length = 0;
-    }
-    push(n) {
-        this.heap[this.length++] = n;
-    }
-    pop() {
-        return this.heap[--this.length];
-    }
-}
-/**
- * Default export, the thing you're using this module to get.
- *
- * The `K` and `V` types define the key and value types, respectively. The
- * optional `FC` type defines the type of the `context` object passed to
- * `cache.fetch()` and `cache.memo()`.
- *
- * Keys and values **must not** be `null` or `undefined`.
- *
- * All properties from the options object (with the exception of `max`,
- * `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are
- * added as normal public members. (The listed options are read-only getters.)
- *
- * Changing any of these will alter the defaults for subsequent method calls.
- */
-class LRUCache {
-    // options that cannot be changed without disaster
-    #max;
-    #maxSize;
-    #dispose;
-    #onInsert;
-    #disposeAfter;
-    #fetchMethod;
-    #memoMethod;
-    #perf;
-    /**
-     * {@link LRUCache.OptionsBase.perf}
-     */
-    get perf() {
-        return this.#perf;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.ttl}
-     */
-    ttl;
-    /**
-     * {@link LRUCache.OptionsBase.ttlResolution}
-     */
-    ttlResolution;
-    /**
-     * {@link LRUCache.OptionsBase.ttlAutopurge}
-     */
-    ttlAutopurge;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnGet}
-     */
-    updateAgeOnGet;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnHas}
-     */
-    updateAgeOnHas;
-    /**
-     * {@link LRUCache.OptionsBase.allowStale}
-     */
-    allowStale;
-    /**
-     * {@link LRUCache.OptionsBase.noDisposeOnSet}
-     */
-    noDisposeOnSet;
-    /**
-     * {@link LRUCache.OptionsBase.noUpdateTTL}
-     */
-    noUpdateTTL;
-    /**
-     * {@link LRUCache.OptionsBase.maxEntrySize}
-     */
-    maxEntrySize;
-    /**
-     * {@link LRUCache.OptionsBase.sizeCalculation}
-     */
-    sizeCalculation;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
-     */
-    noDeleteOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
-     */
-    noDeleteOnStaleGet;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
-     */
-    allowStaleOnFetchAbort;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
-     */
-    allowStaleOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
-     */
-    ignoreFetchAbort;
-    // computed properties
-    #size;
-    #calculatedSize;
-    #keyMap;
-    #keyList;
-    #valList;
-    #next;
-    #prev;
-    #head;
-    #tail;
-    #free;
-    #disposed;
-    #sizes;
-    #starts;
-    #ttls;
-    #hasDispose;
-    #hasFetchMethod;
-    #hasDisposeAfter;
-    #hasOnInsert;
-    /**
-     * Do not call this method unless you need to inspect the
-     * inner workings of the cache.  If anything returned by this
-     * object is modified in any way, strange breakage may occur.
-     *
-     * These fields are private for a reason!
-     *
-     * @internal
-     */
-    static unsafeExposeInternals(c) {
-        return {
-            // properties
-            starts: c.#starts,
-            ttls: c.#ttls,
-            sizes: c.#sizes,
-            keyMap: c.#keyMap,
-            keyList: c.#keyList,
-            valList: c.#valList,
-            next: c.#next,
-            prev: c.#prev,
-            get head() {
-                return c.#head;
-            },
-            get tail() {
-                return c.#tail;
-            },
-            free: c.#free,
-            // methods
-            isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
-            backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
-            moveToTail: (index) => c.#moveToTail(index),
-            indexes: (options) => c.#indexes(options),
-            rindexes: (options) => c.#rindexes(options),
-            isStale: (index) => c.#isStale(index),
-        };
-    }
-    // Protected read-only members
-    /**
-     * {@link LRUCache.OptionsBase.max} (read-only)
-     */
-    get max() {
-        return this.#max;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.maxSize} (read-only)
-     */
-    get maxSize() {
-        return this.#maxSize;
-    }
-    /**
-     * The total computed size of items in the cache (read-only)
-     */
-    get calculatedSize() {
-        return this.#calculatedSize;
-    }
-    /**
-     * The number of items stored in the cache (read-only)
-     */
-    get size() {
-        return this.#size;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
-     */
-    get fetchMethod() {
-        return this.#fetchMethod;
-    }
-    get memoMethod() {
-        return this.#memoMethod;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.dispose} (read-only)
-     */
-    get dispose() {
-        return this.#dispose;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.onInsert} (read-only)
-     */
-    get onInsert() {
-        return this.#onInsert;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
-     */
-    get disposeAfter() {
-        return this.#disposeAfter;
-    }
-    constructor(options) {
-        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, onInsert, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, perf, } = options;
-        if (perf !== undefined) {
-            if (typeof perf?.now !== 'function') {
-                throw new TypeError('perf option must have a now() method if specified');
-            }
-        }
-        this.#perf = perf ?? defaultPerf;
-        if (max !== 0 && !isPosInt(max)) {
-            throw new TypeError('max option must be a nonnegative integer');
-        }
-        const UintArray = max ? getUintArray(max) : Array;
-        if (!UintArray) {
-            throw new Error('invalid max value: ' + max);
-        }
-        this.#max = max;
-        this.#maxSize = maxSize;
-        this.maxEntrySize = maxEntrySize || this.#maxSize;
-        this.sizeCalculation = sizeCalculation;
-        if (this.sizeCalculation) {
-            if (!this.#maxSize && !this.maxEntrySize) {
-                throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
-            }
-            if (typeof this.sizeCalculation !== 'function') {
-                throw new TypeError('sizeCalculation set to non-function');
-            }
-        }
-        if (memoMethod !== undefined &&
-            typeof memoMethod !== 'function') {
-            throw new TypeError('memoMethod must be a function if defined');
-        }
-        this.#memoMethod = memoMethod;
-        if (fetchMethod !== undefined &&
-            typeof fetchMethod !== 'function') {
-            throw new TypeError('fetchMethod must be a function if specified');
-        }
-        this.#fetchMethod = fetchMethod;
-        this.#hasFetchMethod = !!fetchMethod;
-        this.#keyMap = new Map();
-        this.#keyList = new Array(max).fill(undefined);
-        this.#valList = new Array(max).fill(undefined);
-        this.#next = new UintArray(max);
-        this.#prev = new UintArray(max);
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free = Stack.create(max);
-        this.#size = 0;
-        this.#calculatedSize = 0;
-        if (typeof dispose === 'function') {
-            this.#dispose = dispose;
-        }
-        if (typeof onInsert === 'function') {
-            this.#onInsert = onInsert;
-        }
-        if (typeof disposeAfter === 'function') {
-            this.#disposeAfter = disposeAfter;
-            this.#disposed = [];
-        }
-        else {
-            this.#disposeAfter = undefined;
-            this.#disposed = undefined;
-        }
-        this.#hasDispose = !!this.#dispose;
-        this.#hasOnInsert = !!this.#onInsert;
-        this.#hasDisposeAfter = !!this.#disposeAfter;
-        this.noDisposeOnSet = !!noDisposeOnSet;
-        this.noUpdateTTL = !!noUpdateTTL;
-        this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
-        this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
-        this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
-        this.ignoreFetchAbort = !!ignoreFetchAbort;
-        // NB: maxEntrySize is set to maxSize if it's set
-        if (this.maxEntrySize !== 0) {
-            if (this.#maxSize !== 0) {
-                if (!isPosInt(this.#maxSize)) {
-                    throw new TypeError('maxSize must be a positive integer if specified');
-                }
-            }
-            if (!isPosInt(this.maxEntrySize)) {
-                throw new TypeError('maxEntrySize must be a positive integer if specified');
-            }
-            this.#initializeSizeTracking();
-        }
-        this.allowStale = !!allowStale;
-        this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
-        this.updateAgeOnGet = !!updateAgeOnGet;
-        this.updateAgeOnHas = !!updateAgeOnHas;
-        this.ttlResolution =
-            isPosInt(ttlResolution) || ttlResolution === 0 ?
-                ttlResolution
-                : 1;
-        this.ttlAutopurge = !!ttlAutopurge;
-        this.ttl = ttl || 0;
-        if (this.ttl) {
-            if (!isPosInt(this.ttl)) {
-                throw new TypeError('ttl must be a positive integer if specified');
-            }
-            this.#initializeTTLTracking();
-        }
-        // do not allow completely unbounded caches
-        if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
-            throw new TypeError('At least one of max, maxSize, or ttl is required');
-        }
-        if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
-            const code = 'LRU_CACHE_UNBOUNDED';
-            if (shouldWarn(code)) {
-                warned.add(code);
-                const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
-                    'result in unbounded memory consumption.';
-                emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
-            }
-        }
-    }
-    /**
-     * Return the number of ms left in the item's TTL. If item is not in cache,
-     * returns `0`. Returns `Infinity` if item is in cache without a defined TTL.
-     */
-    getRemainingTTL(key) {
-        return this.#keyMap.has(key) ? Infinity : 0;
-    }
-    #initializeTTLTracking() {
-        const ttls = new ZeroArray(this.#max);
-        const starts = new ZeroArray(this.#max);
-        this.#ttls = ttls;
-        this.#starts = starts;
-        this.#setItemTTL = (index, ttl, start = this.#perf.now()) => {
-            starts[index] = ttl !== 0 ? start : 0;
-            ttls[index] = ttl;
-            if (ttl !== 0 && this.ttlAutopurge) {
-                const t = setTimeout(() => {
-                    if (this.#isStale(index)) {
-                        this.#delete(this.#keyList[index], 'expire');
-                    }
-                }, ttl + 1);
-                // unref() not supported on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-        };
-        this.#updateItemAge = index => {
-            starts[index] = ttls[index] !== 0 ? this.#perf.now() : 0;
-        };
-        this.#statusTTL = (status, index) => {
-            if (ttls[index]) {
-                const ttl = ttls[index];
-                const start = starts[index];
-                /* c8 ignore next */
-                if (!ttl || !start)
-                    return;
-                status.ttl = ttl;
-                status.start = start;
-                status.now = cachedNow || getNow();
-                const age = status.now - start;
-                status.remainingTTL = ttl - age;
-            }
-        };
-        // debounce calls to perf.now() to 1s so we're not hitting
-        // that costly call repeatedly.
-        let cachedNow = 0;
-        const getNow = () => {
-            const n = this.#perf.now();
-            if (this.ttlResolution > 0) {
-                cachedNow = n;
-                const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
-                // not available on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-            return n;
-        };
-        this.getRemainingTTL = key => {
-            const index = this.#keyMap.get(key);
-            if (index === undefined) {
-                return 0;
-            }
-            const ttl = ttls[index];
-            const start = starts[index];
-            if (!ttl || !start) {
-                return Infinity;
-            }
-            const age = (cachedNow || getNow()) - start;
-            return ttl - age;
-        };
-        this.#isStale = index => {
-            const s = starts[index];
-            const t = ttls[index];
-            return !!t && !!s && (cachedNow || getNow()) - s > t;
-        };
-    }
-    // conditionally set private methods related to TTL
-    #updateItemAge = () => { };
-    #statusTTL = () => { };
-    #setItemTTL = () => { };
-    /* c8 ignore stop */
-    #isStale = () => false;
-    #initializeSizeTracking() {
-        const sizes = new ZeroArray(this.#max);
-        this.#calculatedSize = 0;
-        this.#sizes = sizes;
-        this.#removeItemSize = index => {
-            this.#calculatedSize -= sizes[index];
-            sizes[index] = 0;
-        };
-        this.#requireSize = (k, v, size, sizeCalculation) => {
-            // provisionally accept background fetches.
-            // actual value size will be checked when they return.
-            if (this.#isBackgroundFetch(v)) {
-                return 0;
-            }
-            if (!isPosInt(size)) {
-                if (sizeCalculation) {
-                    if (typeof sizeCalculation !== 'function') {
-                        throw new TypeError('sizeCalculation must be a function');
-                    }
-                    size = sizeCalculation(v, k);
-                    if (!isPosInt(size)) {
-                        throw new TypeError('sizeCalculation return invalid (expect positive integer)');
-                    }
-                }
-                else {
-                    throw new TypeError('invalid size value (must be positive integer). ' +
-                        'When maxSize or maxEntrySize is used, sizeCalculation ' +
-                        'or size must be set.');
-                }
-            }
-            return size;
-        };
-        this.#addItemSize = (index, size, status) => {
-            sizes[index] = size;
-            if (this.#maxSize) {
-                const maxSize = this.#maxSize - sizes[index];
-                while (this.#calculatedSize > maxSize) {
-                    this.#evict(true);
-                }
-            }
-            this.#calculatedSize += sizes[index];
-            if (status) {
-                status.entrySize = size;
-                status.totalCalculatedSize = this.#calculatedSize;
-            }
-        };
-    }
-    #removeItemSize = _i => { };
-    #addItemSize = (_i, _s, _st) => { };
-    #requireSize = (_k, _v, size, sizeCalculation) => {
-        if (size || sizeCalculation) {
-            throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
-        }
-        return 0;
-    };
-    *#indexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#tail; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#head) {
-                    break;
-                }
-                else {
-                    i = this.#prev[i];
-                }
-            }
-        }
-    }
-    *#rindexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#head; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#tail) {
-                    break;
-                }
-                else {
-                    i = this.#next[i];
-                }
-            }
-        }
-    }
-    #isValidIndex(index) {
-        return (index !== undefined &&
-            this.#keyMap.get(this.#keyList[index]) === index);
-    }
-    /**
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from most recently used to least recently used.
-     */
-    *entries() {
-        for (const i of this.#indexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.entries}
-     *
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from least recently used to most recently used.
-     */
-    *rentries() {
-        for (const i of this.#rindexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the keys in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *keys() {
-        for (const i of this.#indexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.keys}
-     *
-     * Return a generator yielding the keys in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rkeys() {
-        for (const i of this.#rindexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the values in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *values() {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.values}
-     *
-     * Return a generator yielding the values in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rvalues() {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Iterating over the cache itself yields the same results as
-     * {@link LRUCache.entries}
-     */
-    [Symbol.iterator]() {
-        return this.entries();
-    }
-    /**
-     * A String value that is used in the creation of the default string
-     * description of an object. Called by the built-in method
-     * `Object.prototype.toString`.
-     */
-    [Symbol.toStringTag] = 'LRUCache';
-    /**
-     * Find a value for which the supplied fn method returns a truthy value,
-     * similar to `Array.find()`. fn is called as `fn(value, key, cache)`.
-     */
-    find(fn, getOptions = {}) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-            if (value === undefined)
-                continue;
-            if (fn(value, this.#keyList[i], this)) {
-                return this.get(this.#keyList[i], getOptions);
-            }
-        }
-    }
-    /**
-     * Call the supplied function on each item in the cache, in order from most
-     * recently used to least recently used.
-     *
-     * `fn` is called as `fn(value, key, cache)`.
-     *
-     * If `thisp` is provided, function will be called in the `this`-context of
-     * the provided object, or the cache if no `thisp` object is provided.
-     *
-     * Does not update age or recenty of use, or iterate over stale values.
-     */
-    forEach(fn, thisp = this) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * The same as {@link LRUCache.forEach} but items are iterated over in
-     * reverse order.  (ie, less recently used items are iterated over first.)
-     */
-    rforEach(fn, thisp = this) {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * Delete any stale entries. Returns true if anything was removed,
-     * false otherwise.
-     */
-    purgeStale() {
-        let deleted = false;
-        for (const i of this.#rindexes({ allowStale: true })) {
-            if (this.#isStale(i)) {
-                this.#delete(this.#keyList[i], 'expire');
-                deleted = true;
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Get the extended info about a given entry, to get its value, size, and
-     * TTL info simultaneously. Returns `undefined` if the key is not present.
-     *
-     * Unlike {@link LRUCache#dump}, which is designed to be portable and survive
-     * serialization, the `start` value is always the current timestamp, and the
-     * `ttl` is a calculated remaining time to live (negative if expired).
-     *
-     * Always returns stale values, if their info is found in the cache, so be
-     * sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl})
-     * if relevant.
-     */
-    info(key) {
-        const i = this.#keyMap.get(key);
-        if (i === undefined)
-            return undefined;
-        const v = this.#valList[i];
-        /* c8 ignore start - this isn't tested for the info function,
-         * but it's the same logic as found in other places. */
-        const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-        if (value === undefined)
-            return undefined;
-        /* c8 ignore end */
-        const entry = { value };
-        if (this.#ttls && this.#starts) {
-            const ttl = this.#ttls[i];
-            const start = this.#starts[i];
-            if (ttl && start) {
-                const remain = ttl - (this.#perf.now() - start);
-                entry.ttl = remain;
-                entry.start = Date.now();
-            }
-        }
-        if (this.#sizes) {
-            entry.size = this.#sizes[i];
-        }
-        return entry;
-    }
-    /**
-     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
-     * passed to {@link LRUCache#load}.
-     *
-     * The `start` fields are calculated relative to a portable `Date.now()`
-     * timestamp, even if `performance.now()` is available.
-     *
-     * Stale entries are always included in the `dump`, even if
-     * {@link LRUCache.OptionsBase.allowStale} is false.
-     *
-     * Note: this returns an actual array, not a generator, so it can be more
-     * easily passed around.
-     */
-    dump() {
-        const arr = [];
-        for (const i of this.#indexes({ allowStale: true })) {
-            const key = this.#keyList[i];
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-            if (value === undefined || key === undefined)
-                continue;
-            const entry = { value };
-            if (this.#ttls && this.#starts) {
-                entry.ttl = this.#ttls[i];
-                // always dump the start relative to a portable timestamp
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = this.#perf.now() - this.#starts[i];
-                entry.start = Math.floor(Date.now() - age);
-            }
-            if (this.#sizes) {
-                entry.size = this.#sizes[i];
-            }
-            arr.unshift([key, entry]);
-        }
-        return arr;
-    }
-    /**
-     * Reset the cache and load in the items in entries in the order listed.
-     *
-     * The shape of the resulting cache may be different if the same options are
-     * not used in both caches.
-     *
-     * The `start` fields are assumed to be calculated relative to a portable
-     * `Date.now()` timestamp, even if `performance.now()` is available.
-     */
-    load(arr) {
-        this.clear();
-        for (const [key, entry] of arr) {
-            if (entry.start) {
-                // entry.start is a portable timestamp, but we may be using
-                // node's performance.now(), so calculate the offset, so that
-                // we get the intended remaining TTL, no matter how long it's
-                // been on ice.
-                //
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = Date.now() - entry.start;
-                entry.start = this.#perf.now() - age;
-            }
-            this.set(key, entry.value, entry);
-        }
-    }
-    /**
-     * Add a value to the cache.
-     *
-     * Note: if `undefined` is specified as a value, this is an alias for
-     * {@link LRUCache#delete}
-     *
-     * Fields on the {@link LRUCache.SetOptions} options param will override
-     * their corresponding values in the constructor options for the scope
-     * of this single `set()` operation.
-     *
-     * If `start` is provided, then that will set the effective start
-     * time for the TTL calculation. Note that this must be a previous
-     * value of `performance.now()` if supported, or a previous value of
-     * `Date.now()` if not.
-     *
-     * Options object may also include `size`, which will prevent
-     * calling the `sizeCalculation` function and just use the specified
-     * number if it is a positive integer, and `noDisposeOnSet` which
-     * will prevent calling a `dispose` function in the case of
-     * overwrites.
-     *
-     * If the `size` (or return value of `sizeCalculation`) for a given
-     * entry is greater than `maxEntrySize`, then the item will not be
-     * added to the cache.
-     *
-     * Will update the recency of the entry.
-     *
-     * If the value is `undefined`, then this is an alias for
-     * `cache.delete(key)`. `undefined` is never stored in the cache.
-     */
-    set(k, v, setOptions = {}) {
-        if (v === undefined) {
-            this.delete(k);
-            return this;
-        }
-        const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
-        let { noUpdateTTL = this.noUpdateTTL } = setOptions;
-        const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
-        // if the item doesn't fit, don't do anything
-        // NB: maxEntrySize set to maxSize by default
-        if (this.maxEntrySize && size > this.maxEntrySize) {
-            if (status) {
-                status.set = 'miss';
-                status.maxEntrySizeExceeded = true;
-            }
-            // have to delete, in case something is there already.
-            this.#delete(k, 'set');
-            return this;
-        }
-        let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
-        if (index === undefined) {
-            // addition
-            index = (this.#size === 0 ? this.#tail
-                : this.#free.length !== 0 ? this.#free.pop()
-                    : this.#size === this.#max ? this.#evict(false)
-                        : this.#size);
-            this.#keyList[index] = k;
-            this.#valList[index] = v;
-            this.#keyMap.set(k, index);
-            this.#next[this.#tail] = index;
-            this.#prev[index] = this.#tail;
-            this.#tail = index;
-            this.#size++;
-            this.#addItemSize(index, size, status);
-            if (status)
-                status.set = 'add';
-            noUpdateTTL = false;
-            if (this.#hasOnInsert) {
-                this.#onInsert?.(v, k, 'add');
-            }
-        }
-        else {
-            // update
-            this.#moveToTail(index);
-            const oldVal = this.#valList[index];
-            if (v !== oldVal) {
-                if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
-                    oldVal.__abortController.abort(new Error('replaced'));
-                    const { __staleWhileFetching: s } = oldVal;
-                    if (s !== undefined && !noDisposeOnSet) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(s, k, 'set');
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([s, k, 'set']);
-                        }
-                    }
-                }
-                else if (!noDisposeOnSet) {
-                    if (this.#hasDispose) {
-                        this.#dispose?.(oldVal, k, 'set');
-                    }
-                    if (this.#hasDisposeAfter) {
-                        this.#disposed?.push([oldVal, k, 'set']);
-                    }
-                }
-                this.#removeItemSize(index);
-                this.#addItemSize(index, size, status);
-                this.#valList[index] = v;
-                if (status) {
-                    status.set = 'replace';
-                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal) ?
-                        oldVal.__staleWhileFetching
-                        : oldVal;
-                    if (oldValue !== undefined)
-                        status.oldValue = oldValue;
-                }
-            }
-            else if (status) {
-                status.set = 'update';
-            }
-            if (this.#hasOnInsert) {
-                this.onInsert?.(v, k, v === oldVal ? 'update' : 'replace');
-            }
-        }
-        if (ttl !== 0 && !this.#ttls) {
-            this.#initializeTTLTracking();
-        }
-        if (this.#ttls) {
-            if (!noUpdateTTL) {
-                this.#setItemTTL(index, ttl, start);
-            }
-            if (status)
-                this.#statusTTL(status, index);
-        }
-        if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return this;
-    }
-    /**
-     * Evict the least recently used item, returning its value or
-     * `undefined` if cache is empty.
-     */
-    pop() {
-        try {
-            while (this.#size) {
-                const val = this.#valList[this.#head];
-                this.#evict(true);
-                if (this.#isBackgroundFetch(val)) {
-                    if (val.__staleWhileFetching) {
-                        return val.__staleWhileFetching;
-                    }
-                }
-                else if (val !== undefined) {
-                    return val;
-                }
-            }
-        }
-        finally {
-            if (this.#hasDisposeAfter && this.#disposed) {
-                const dt = this.#disposed;
-                let task;
-                while ((task = dt?.shift())) {
-                    this.#disposeAfter?.(...task);
-                }
-            }
-        }
-    }
-    #evict(free) {
-        const head = this.#head;
-        const k = this.#keyList[head];
-        const v = this.#valList[head];
-        if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
-            v.__abortController.abort(new Error('evicted'));
-        }
-        else if (this.#hasDispose || this.#hasDisposeAfter) {
-            if (this.#hasDispose) {
-                this.#dispose?.(v, k, 'evict');
-            }
-            if (this.#hasDisposeAfter) {
-                this.#disposed?.push([v, k, 'evict']);
-            }
-        }
-        this.#removeItemSize(head);
-        // if we aren't about to use the index, then null these out
-        if (free) {
-            this.#keyList[head] = undefined;
-            this.#valList[head] = undefined;
-            this.#free.push(head);
-        }
-        if (this.#size === 1) {
-            this.#head = this.#tail = 0;
-            this.#free.length = 0;
-        }
-        else {
-            this.#head = this.#next[head];
-        }
-        this.#keyMap.delete(k);
-        this.#size--;
-        return head;
-    }
-    /**
-     * Check if a key is in the cache, without updating the recency of use.
-     * Will return false if the item is stale, even though it is technically
-     * in the cache.
-     *
-     * Check if a key is in the cache, without updating the recency of
-     * use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set
-     * to `true` in either the options or the constructor.
-     *
-     * Will return `false` if the item is stale, even though it is technically in
-     * the cache. The difference can be determined (if it matters) by using a
-     * `status` argument, and inspecting the `has` field.
-     *
-     * Will not update item age unless
-     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
-     */
-    has(k, hasOptions = {}) {
-        const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v) &&
-                v.__staleWhileFetching === undefined) {
-                return false;
-            }
-            if (!this.#isStale(index)) {
-                if (updateAgeOnHas) {
-                    this.#updateItemAge(index);
-                }
-                if (status) {
-                    status.has = 'hit';
-                    this.#statusTTL(status, index);
-                }
-                return true;
-            }
-            else if (status) {
-                status.has = 'stale';
-                this.#statusTTL(status, index);
-            }
-        }
-        else if (status) {
-            status.has = 'miss';
-        }
-        return false;
-    }
-    /**
-     * Like {@link LRUCache#get} but doesn't update recency or delete stale
-     * items.
-     *
-     * Returns `undefined` if the item is stale, unless
-     * {@link LRUCache.OptionsBase.allowStale} is set.
-     */
-    peek(k, peekOptions = {}) {
-        const { allowStale = this.allowStale } = peekOptions;
-        const index = this.#keyMap.get(k);
-        if (index === undefined ||
-            (!allowStale && this.#isStale(index))) {
-            return;
-        }
-        const v = this.#valList[index];
-        // either stale and allowed, or forcing a refresh of non-stale value
-        return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-    }
-    #backgroundFetch(k, index, options, context) {
-        const v = index === undefined ? undefined : this.#valList[index];
-        if (this.#isBackgroundFetch(v)) {
-            return v;
-        }
-        const ac = new AC();
-        const { signal } = options;
-        // when/if our AC signals, then stop listening to theirs.
-        signal?.addEventListener('abort', () => ac.abort(signal.reason), {
-            signal: ac.signal,
-        });
-        const fetchOpts = {
-            signal: ac.signal,
-            options,
-            context,
-        };
-        const cb = (v, updateCache = false) => {
-            const { aborted } = ac.signal;
-            const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
-            if (options.status) {
-                if (aborted && !updateCache) {
-                    options.status.fetchAborted = true;
-                    options.status.fetchError = ac.signal.reason;
-                    if (ignoreAbort)
-                        options.status.fetchAbortIgnored = true;
-                }
-                else {
-                    options.status.fetchResolved = true;
-                }
-            }
-            if (aborted && !ignoreAbort && !updateCache) {
-                return fetchFail(ac.signal.reason);
-            }
-            // either we didn't abort, and are still here, or we did, and ignored
-            const bf = p;
-            if (this.#valList[index] === p) {
-                if (v === undefined) {
-                    if (bf.__staleWhileFetching !== undefined) {
-                        this.#valList[index] = bf.__staleWhileFetching;
-                    }
-                    else {
-                        this.#delete(k, 'fetch');
-                    }
-                }
-                else {
-                    if (options.status)
-                        options.status.fetchUpdated = true;
-                    this.set(k, v, fetchOpts.options);
-                }
-            }
-            return v;
-        };
-        const eb = (er) => {
-            if (options.status) {
-                options.status.fetchRejected = true;
-                options.status.fetchError = er;
-            }
-            return fetchFail(er);
-        };
-        const fetchFail = (er) => {
-            const { aborted } = ac.signal;
-            const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
-            const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
-            const noDelete = allowStale || options.noDeleteOnFetchRejection;
-            const bf = p;
-            if (this.#valList[index] === p) {
-                // if we allow stale on fetch rejections, then we need to ensure that
-                // the stale value is not removed from the cache when the fetch fails.
-                const del = !noDelete || bf.__staleWhileFetching === undefined;
-                if (del) {
-                    this.#delete(k, 'fetch');
-                }
-                else if (!allowStaleAborted) {
-                    // still replace the *promise* with the stale value,
-                    // since we are done with the promise at this point.
-                    // leave it untouched if we're still waiting for an
-                    // aborted background fetch that hasn't yet returned.
-                    this.#valList[index] = bf.__staleWhileFetching;
-                }
-            }
-            if (allowStale) {
-                if (options.status && bf.__staleWhileFetching !== undefined) {
-                    options.status.returnedStale = true;
-                }
-                return bf.__staleWhileFetching;
-            }
-            else if (bf.__returned === bf) {
-                throw er;
-            }
-        };
-        const pcall = (res, rej) => {
-            const fmp = this.#fetchMethod?.(k, v, fetchOpts);
-            if (fmp && fmp instanceof Promise) {
-                fmp.then(v => res(v === undefined ? undefined : v), rej);
-            }
-            // ignored, we go until we finish, regardless.
-            // defer check until we are actually aborting,
-            // so fetchMethod can override.
-            ac.signal.addEventListener('abort', () => {
-                if (!options.ignoreFetchAbort ||
-                    options.allowStaleOnFetchAbort) {
-                    res(undefined);
-                    // when it eventually resolves, update the cache.
-                    if (options.allowStaleOnFetchAbort) {
-                        res = v => cb(v, true);
-                    }
-                }
-            });
-        };
-        if (options.status)
-            options.status.fetchDispatched = true;
-        const p = new Promise(pcall).then(cb, eb);
-        const bf = Object.assign(p, {
-            __abortController: ac,
-            __staleWhileFetching: v,
-            __returned: undefined,
-        });
-        if (index === undefined) {
-            // internal, don't expose status.
-            this.set(k, bf, { ...fetchOpts.options, status: undefined });
-            index = this.#keyMap.get(k);
-        }
-        else {
-            this.#valList[index] = bf;
-        }
-        return bf;
-    }
-    #isBackgroundFetch(p) {
-        if (!this.#hasFetchMethod)
-            return false;
-        const b = p;
-        return (!!b &&
-            b instanceof Promise &&
-            b.hasOwnProperty('__staleWhileFetching') &&
-            b.__abortController instanceof AC);
-    }
-    async fetch(k, fetchOptions = {}) {
-        const { 
-        // get options
-        allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, 
-        // set options
-        ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, 
-        // fetch exclusive options
-        noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
-        if (!this.#hasFetchMethod) {
-            if (status)
-                status.fetch = 'get';
-            return this.get(k, {
-                allowStale,
-                updateAgeOnGet,
-                noDeleteOnStaleGet,
-                status,
-            });
-        }
-        const options = {
-            allowStale,
-            updateAgeOnGet,
-            noDeleteOnStaleGet,
-            ttl,
-            noDisposeOnSet,
-            size,
-            sizeCalculation,
-            noUpdateTTL,
-            noDeleteOnFetchRejection,
-            allowStaleOnFetchRejection,
-            allowStaleOnFetchAbort,
-            ignoreFetchAbort,
-            status,
-            signal,
-        };
-        let index = this.#keyMap.get(k);
-        if (index === undefined) {
-            if (status)
-                status.fetch = 'miss';
-            const p = this.#backgroundFetch(k, index, options, context);
-            return (p.__returned = p);
-        }
-        else {
-            // in cache, maybe already fetching
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                const stale = allowStale && v.__staleWhileFetching !== undefined;
-                if (status) {
-                    status.fetch = 'inflight';
-                    if (stale)
-                        status.returnedStale = true;
-                }
-                return stale ? v.__staleWhileFetching : (v.__returned = v);
-            }
-            // if we force a refresh, that means do NOT serve the cached value,
-            // unless we are already in the process of refreshing the cache.
-            const isStale = this.#isStale(index);
-            if (!forceRefresh && !isStale) {
-                if (status)
-                    status.fetch = 'hit';
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                if (status)
-                    this.#statusTTL(status, index);
-                return v;
-            }
-            // ok, it is stale or a forced refresh, and not already fetching.
-            // refresh the cache.
-            const p = this.#backgroundFetch(k, index, options, context);
-            const hasStale = p.__staleWhileFetching !== undefined;
-            const staleVal = hasStale && allowStale;
-            if (status) {
-                status.fetch = isStale ? 'stale' : 'refresh';
-                if (staleVal && isStale)
-                    status.returnedStale = true;
-            }
-            return staleVal ? p.__staleWhileFetching : (p.__returned = p);
-        }
-    }
-    async forceFetch(k, fetchOptions = {}) {
-        const v = await this.fetch(k, fetchOptions);
-        if (v === undefined)
-            throw new Error('fetch() returned undefined');
-        return v;
-    }
-    memo(k, memoOptions = {}) {
-        const memoMethod = this.#memoMethod;
-        if (!memoMethod) {
-            throw new Error('no memoMethod provided to constructor');
-        }
-        const { context, forceRefresh, ...options } = memoOptions;
-        const v = this.get(k, options);
-        if (!forceRefresh && v !== undefined)
-            return v;
-        const vv = memoMethod(k, v, {
-            options,
-            context,
-        });
-        this.set(k, vv, options);
-        return vv;
-    }
-    /**
-     * Return a value from the cache. Will update the recency of the cache
-     * entry found.
-     *
-     * If the key is not found, get() will return `undefined`.
-     */
-    get(k, getOptions = {}) {
-        const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const value = this.#valList[index];
-            const fetching = this.#isBackgroundFetch(value);
-            if (status)
-                this.#statusTTL(status, index);
-            if (this.#isStale(index)) {
-                if (status)
-                    status.get = 'stale';
-                // delete only if not an in-flight background fetch
-                if (!fetching) {
-                    if (!noDeleteOnStaleGet) {
-                        this.#delete(k, 'expire');
-                    }
-                    if (status && allowStale)
-                        status.returnedStale = true;
-                    return allowStale ? value : undefined;
-                }
-                else {
-                    if (status &&
-                        allowStale &&
-                        value.__staleWhileFetching !== undefined) {
-                        status.returnedStale = true;
-                    }
-                    return allowStale ? value.__staleWhileFetching : undefined;
-                }
-            }
-            else {
-                if (status)
-                    status.get = 'hit';
-                // if we're currently fetching it, we don't actually have it yet
-                // it's not stale, which means this isn't a staleWhileRefetching.
-                // If it's not stale, and fetching, AND has a __staleWhileFetching
-                // value, then that means the user fetched with {forceRefresh:true},
-                // so it's safe to return that value.
-                if (fetching) {
-                    return value.__staleWhileFetching;
-                }
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                return value;
-            }
-        }
-        else if (status) {
-            status.get = 'miss';
-        }
-    }
-    #connect(p, n) {
-        this.#prev[n] = p;
-        this.#next[p] = n;
-    }
-    #moveToTail(index) {
-        // if tail already, nothing to do
-        // if head, move head to next[index]
-        // else
-        //   move next[prev[index]] to next[index] (head has no prev)
-        //   move prev[next[index]] to prev[index]
-        // prev[index] = tail
-        // next[tail] = index
-        // tail = index
-        if (index !== this.#tail) {
-            if (index === this.#head) {
-                this.#head = this.#next[index];
-            }
-            else {
-                this.#connect(this.#prev[index], this.#next[index]);
-            }
-            this.#connect(this.#tail, index);
-            this.#tail = index;
-        }
-    }
-    /**
-     * Deletes a key out of the cache.
-     *
-     * Returns true if the key was deleted, false otherwise.
-     */
-    delete(k) {
-        return this.#delete(k, 'delete');
-    }
-    #delete(k, reason) {
-        let deleted = false;
-        if (this.#size !== 0) {
-            const index = this.#keyMap.get(k);
-            if (index !== undefined) {
-                deleted = true;
-                if (this.#size === 1) {
-                    this.#clear(reason);
-                }
-                else {
-                    this.#removeItemSize(index);
-                    const v = this.#valList[index];
-                    if (this.#isBackgroundFetch(v)) {
-                        v.__abortController.abort(new Error('deleted'));
-                    }
-                    else if (this.#hasDispose || this.#hasDisposeAfter) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(v, k, reason);
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([v, k, reason]);
-                        }
-                    }
-                    this.#keyMap.delete(k);
-                    this.#keyList[index] = undefined;
-                    this.#valList[index] = undefined;
-                    if (index === this.#tail) {
-                        this.#tail = this.#prev[index];
-                    }
-                    else if (index === this.#head) {
-                        this.#head = this.#next[index];
-                    }
-                    else {
-                        const pi = this.#prev[index];
-                        this.#next[pi] = this.#next[index];
-                        const ni = this.#next[index];
-                        this.#prev[ni] = this.#prev[index];
-                    }
-                    this.#size--;
-                    this.#free.push(index);
-                }
-            }
-        }
-        if (this.#hasDisposeAfter && this.#disposed?.length) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Clear the cache entirely, throwing away all values.
-     */
-    clear() {
-        return this.#clear('delete');
-    }
-    #clear(reason) {
-        for (const index of this.#rindexes({ allowStale: true })) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                v.__abortController.abort(new Error('deleted'));
-            }
-            else {
-                const k = this.#keyList[index];
-                if (this.#hasDispose) {
-                    this.#dispose?.(v, k, reason);
-                }
-                if (this.#hasDisposeAfter) {
-                    this.#disposed?.push([v, k, reason]);
-                }
-            }
-        }
-        this.#keyMap.clear();
-        this.#valList.fill(undefined);
-        this.#keyList.fill(undefined);
-        if (this.#ttls && this.#starts) {
-            this.#ttls.fill(0);
-            this.#starts.fill(0);
-        }
-        if (this.#sizes) {
-            this.#sizes.fill(0);
-        }
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free.length = 0;
-        this.#calculatedSize = 0;
-        this.#size = 0;
-        if (this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-    }
-}
-exports.LRUCache = LRUCache;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/lru-cache/dist/commonjs/index.min.js b/node_modules/make-fetch-happen/node_modules/lru-cache/dist/commonjs/index.min.js
deleted file mode 100644
index ef5027b91650d..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/lru-cache/dist/commonjs/index.min.js
+++ /dev/null
@@ -1,2 +0,0 @@
-"use strict";Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var M=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,x=new Set,R=typeof process=="object"&&process?process:{},U=(a,t,e,i)=>{typeof R.emitWarning=="function"?R.emitWarning(a,t,e,i):console.error(`[${e}] ${t}: ${a}`)},C=globalThis.AbortController,L=globalThis.AbortSignal;if(typeof C>"u"){L=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},C=class{constructor(){t()}signal=new L;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let a=R.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{a&&(a=!1,U("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var G=a=>!x.has(a),H=Symbol("type"),y=a=>a&&a===Math.floor(a)&&a>0&&isFinite(a),I=a=>y(a)?a<=Math.pow(2,8)?Uint8Array:a<=Math.pow(2,16)?Uint16Array:a<=Math.pow(2,32)?Uint32Array:a<=Number.MAX_SAFE_INTEGER?E:null:null,E=class extends Array{constructor(t){super(t),this.fill(0)}},W=class a{heap;length;static#l=!1;static create(t){let e=I(t);if(!e)return[];a.#l=!0;let i=new a(t,e);return a.#l=!1,i}constructor(t,e){if(!a.#l)throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},D=class a{#l;#c;#p;#v;#w;#D;#L;#S;get perf(){return this.#S}ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#_;#s;#i;#t;#a;#u;#o;#h;#m;#r;#b;#y;#d;#A;#z;#f;#x;static unsafeExposeInternals(t){return{starts:t.#y,ttls:t.#d,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#a,prev:t.#u,get head(){return t.#o},get tail(){return t.#h},free:t.#m,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#M(e,i,s,n),moveToTail:e=>t.#W(e),indexes:e=>t.#F(e),rindexes:e=>t.#T(e),isStale:e=>t.#g(e)}}get max(){return this.#l}get maxSize(){return this.#c}get calculatedSize(){return this.#_}get size(){return this.#n}get fetchMethod(){return this.#D}get memoMethod(){return this.#L}get dispose(){return this.#p}get onInsert(){return this.#v}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,onInsert:_,disposeAfter:f,noDisposeOnSet:c,noUpdateTTL:u,maxSize:A=0,maxEntrySize:d=0,sizeCalculation:m,fetchMethod:l,memoMethod:w,noDeleteOnFetchRejection:b,noDeleteOnStaleGet:p,allowStaleOnFetchRejection:S,allowStaleOnFetchAbort:z,ignoreFetchAbort:F,perf:v}=t;if(v!==void 0&&typeof v?.now!="function")throw new TypeError("perf option must have a now() method if specified");if(this.#S=v??M,e!==0&&!y(e))throw new TypeError("max option must be a nonnegative integer");let T=e?I(e):Array;if(!T)throw new Error("invalid max value: "+e);if(this.#l=e,this.#c=A,this.maxEntrySize=d||this.#c,this.sizeCalculation=m,this.sizeCalculation){if(!this.#c&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#L=w,l!==void 0&&typeof l!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#D=l,this.#z=!!l,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#a=new T(e),this.#u=new T(e),this.#o=0,this.#h=0,this.#m=W.create(e),this.#n=0,this.#_=0,typeof g=="function"&&(this.#p=g),typeof _=="function"&&(this.#v=_),typeof f=="function"?(this.#w=f,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#A=!!this.#p,this.#x=!!this.#v,this.#f=!!this.#w,this.noDisposeOnSet=!!c,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!b,this.allowStaleOnFetchRejection=!!S,this.allowStaleOnFetchAbort=!!z,this.ignoreFetchAbort=!!F,this.maxEntrySize!==0){if(this.#c!==0&&!y(this.#c))throw new TypeError("maxSize must be a positive integer if specified");if(!y(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#V()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!p,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=y(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!y(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#G()}if(this.#l===0&&this.ttl===0&&this.#c===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#l&&!this.#c){let O="LRU_CACHE_UNBOUNDED";G(O)&&(x.add(O),U("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",O,a))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#G(){let t=new E(this.#l),e=new E(this.#l);this.#d=t,this.#y=e,this.#j=(n,h,o=this.#S.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#g(n)&&this.#O(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#C=n=>{e[n]=t[n]!==0?this.#S.now():0},this.#E=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=this.#S.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#g=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#C=()=>{};#E=()=>{};#j=()=>{};#g=()=>!1;#V(){let t=new E(this.#l);this.#_=0,this.#b=t,this.#R=e=>{this.#_-=t[e],t[e]=0},this.#N=(e,i,s,n)=>{if(this.#e(i))return 0;if(!y(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!y(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#U=(e,i,s)=>{if(t[e]=i,this.#c){let n=this.#c-t[e];for(;this.#_>n;)this.#I(!0)}this.#_+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#_)}}#R=t=>{};#U=(t,e,i)=>{};#N=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#o));)e=this.#u[e]}*#T({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#h));)e=this.#a[e]}#P(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#T())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#T()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#T())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#T()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#T({allowStale:!0}))this.#g(e)&&(this.#O(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#d&&this.#y){let h=this.#d[e],o=this.#y[e];if(h&&o){let r=h-(this.#S.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#F({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#d&&this.#y){h.ttl=this.#d[e];let o=this.#S.now()-this.#y[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=this.#S.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,_=this.#N(t,e,i.size||0,o);if(this.maxEntrySize&&_>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#O(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#m.length!==0?this.#m.pop():this.#n===this.#l?this.#I(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#a[this.#h]=f,this.#u[f]=this.#h,this.#h=f,this.#n++,this.#U(f,_,r),r&&(r.set="add"),g=!1,this.#x&&this.#v?.(e,t,"add");else{this.#W(f);let c=this.#t[f];if(e!==c){if(this.#z&&this.#e(c)){c.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:u}=c;u!==void 0&&!h&&(this.#A&&this.#p?.(u,t,"set"),this.#f&&this.#r?.push([u,t,"set"]))}else h||(this.#A&&this.#p?.(c,t,"set"),this.#f&&this.#r?.push([c,t,"set"]));if(this.#R(f),this.#U(f,_,r),this.#t[f]=e,r){r.set="replace";let u=c&&this.#e(c)?c.__staleWhileFetching:c;u!==void 0&&(r.oldValue=u)}}else r&&(r.set="update");this.#x&&this.onInsert?.(e,t,e===c?"update":"replace")}if(s!==0&&!this.#d&&this.#G(),this.#d&&(g||this.#j(f,s,n),r&&this.#E(r,f)),!h&&this.#f&&this.#r){let c=this.#r,u;for(;u=c?.shift();)this.#w?.(...u)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#I(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#f&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#I(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#z&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(s,i,"evict"),this.#f&&this.#r?.push([s,i,"evict"])),this.#R(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#m.push(e)),this.#n===1?(this.#o=this.#h=0,this.#m.length=0):this.#o=this.#a[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#g(n))s&&(s.has="stale",this.#E(s,n));else return i&&this.#C(n),s&&(s.has="hit",this.#E(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#g(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#M(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new C,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,m=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!m?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!m)return f(h.signal.reason);let b=u;return this.#t[e]===u&&(d===void 0?b.__staleWhileFetching!==void 0?this.#t[e]=b.__staleWhileFetching:this.#O(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},_=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:m}=h.signal,l=m&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=u;if(this.#t[e]===u&&(!b||p.__staleWhileFetching===void 0?this.#O(t,"fetch"):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},c=(d,m)=>{let l=this.#D?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),m),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let u=new Promise(c).then(g,_),A=Object.assign(u,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,A,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=A,A}#e(t){if(!this.#z)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof C}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:_=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:c=this.allowStaleOnFetchRejection,ignoreFetchAbort:u=this.ignoreFetchAbort,allowStaleOnFetchAbort:A=this.allowStaleOnFetchAbort,context:d,forceRefresh:m=!1,status:l,signal:w}=e;if(!this.#z)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:_,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:c,allowStaleOnFetchAbort:A,ignoreFetchAbort:u,status:l,signal:w},p=this.#s.get(t);if(p===void 0){l&&(l.fetch="miss");let S=this.#M(t,p,b,d);return S.__returned=S}else{let S=this.#t[p];if(this.#e(S)){let O=i&&S.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",O&&(l.returnedStale=!0)),O?S.__staleWhileFetching:S.__returned=S}let z=this.#g(p);if(!m&&!z)return l&&(l.fetch="hit"),this.#W(p),s&&this.#C(p),l&&this.#E(l,p),S;let F=this.#M(t,p,b,d),T=F.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=z?"stale":"refresh",T&&z&&(l.returnedStale=!0)),T?F.__staleWhileFetching:F.__returned=F}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#L;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#E(h,o),this.#g(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#O(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#W(o),s&&this.#C(o),r))}else h&&(h.get="miss")}#H(t,e){this.#u[e]=t,this.#a[t]=e}#W(t){t!==this.#h&&(t===this.#o?this.#o=this.#a[t]:this.#H(this.#u[t],this.#a[t]),this.#H(this.#h,t),this.#h=t)}delete(t){return this.#O(t,"delete")}#O(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#k(e);else{this.#R(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(n,t,e),this.#f&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#u[s];else if(s===this.#o)this.#o=this.#a[s];else{let h=this.#u[s];this.#a[h]=this.#a[s];let o=this.#a[s];this.#u[o]=this.#u[s]}this.#n--,this.#m.push(s)}}if(this.#f&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#k("delete")}#k(t){for(let e of this.#T({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#A&&this.#p?.(i,s,t),this.#f&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#d&&this.#y&&(this.#d.fill(0),this.#y.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#m.length=0,this.#_=0,this.#n=0,this.#f&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};exports.LRUCache=D;
-//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/make-fetch-happen/node_modules/lru-cache/dist/commonjs/package.json b/node_modules/make-fetch-happen/node_modules/lru-cache/dist/commonjs/package.json
deleted file mode 100644
index 5bbefffbabee3..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/lru-cache/dist/commonjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "commonjs"
-}
diff --git a/node_modules/make-fetch-happen/node_modules/lru-cache/dist/esm/index.js b/node_modules/make-fetch-happen/node_modules/lru-cache/dist/esm/index.js
deleted file mode 100644
index 8fd8fc5f31507..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/lru-cache/dist/esm/index.js
+++ /dev/null
@@ -1,1560 +0,0 @@
-/**
- * @module LRUCache
- */
-const defaultPerf = (typeof performance === 'object' &&
-    performance &&
-    typeof performance.now === 'function') ?
-    performance
-    : Date;
-const warned = new Set();
-/* c8 ignore start */
-const PROCESS = (typeof process === 'object' && !!process ?
-    process
-    : {});
-/* c8 ignore start */
-const emitWarning = (msg, type, code, fn) => {
-    typeof PROCESS.emitWarning === 'function' ?
-        PROCESS.emitWarning(msg, type, code, fn)
-        : console.error(`[${code}] ${type}: ${msg}`);
-};
-let AC = globalThis.AbortController;
-let AS = globalThis.AbortSignal;
-/* c8 ignore start */
-if (typeof AC === 'undefined') {
-    //@ts-ignore
-    AS = class AbortSignal {
-        onabort;
-        _onabort = [];
-        reason;
-        aborted = false;
-        addEventListener(_, fn) {
-            this._onabort.push(fn);
-        }
-    };
-    //@ts-ignore
-    AC = class AbortController {
-        constructor() {
-            warnACPolyfill();
-        }
-        signal = new AS();
-        abort(reason) {
-            if (this.signal.aborted)
-                return;
-            //@ts-ignore
-            this.signal.reason = reason;
-            //@ts-ignore
-            this.signal.aborted = true;
-            //@ts-ignore
-            for (const fn of this.signal._onabort) {
-                fn(reason);
-            }
-            this.signal.onabort?.(reason);
-        }
-    };
-    let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
-    const warnACPolyfill = () => {
-        if (!printACPolyfillWarning)
-            return;
-        printACPolyfillWarning = false;
-        emitWarning('AbortController is not defined. If using lru-cache in ' +
-            'node 14, load an AbortController polyfill from the ' +
-            '`node-abort-controller` package. A minimal polyfill is ' +
-            'provided for use by LRUCache.fetch(), but it should not be ' +
-            'relied upon in other contexts (eg, passing it to other APIs that ' +
-            'use AbortController/AbortSignal might have undesirable effects). ' +
-            'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
-    };
-}
-/* c8 ignore stop */
-const shouldWarn = (code) => !warned.has(code);
-const TYPE = Symbol('type');
-const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
-/* c8 ignore start */
-// This is a little bit ridiculous, tbh.
-// The maximum array length is 2^32-1 or thereabouts on most JS impls.
-// And well before that point, you're caching the entire world, I mean,
-// that's ~32GB of just integers for the next/prev links, plus whatever
-// else to hold that many keys and values.  Just filling the memory with
-// zeroes at init time is brutal when you get that big.
-// But why not be complete?
-// Maybe in the future, these limits will have expanded.
-const getUintArray = (max) => !isPosInt(max) ? null
-    : max <= Math.pow(2, 8) ? Uint8Array
-        : max <= Math.pow(2, 16) ? Uint16Array
-            : max <= Math.pow(2, 32) ? Uint32Array
-                : max <= Number.MAX_SAFE_INTEGER ? ZeroArray
-                    : null;
-/* c8 ignore stop */
-class ZeroArray extends Array {
-    constructor(size) {
-        super(size);
-        this.fill(0);
-    }
-}
-class Stack {
-    heap;
-    length;
-    // private constructor
-    static #constructing = false;
-    static create(max) {
-        const HeapCls = getUintArray(max);
-        if (!HeapCls)
-            return [];
-        Stack.#constructing = true;
-        const s = new Stack(max, HeapCls);
-        Stack.#constructing = false;
-        return s;
-    }
-    constructor(max, HeapCls) {
-        /* c8 ignore start */
-        if (!Stack.#constructing) {
-            throw new TypeError('instantiate Stack using Stack.create(n)');
-        }
-        /* c8 ignore stop */
-        this.heap = new HeapCls(max);
-        this.length = 0;
-    }
-    push(n) {
-        this.heap[this.length++] = n;
-    }
-    pop() {
-        return this.heap[--this.length];
-    }
-}
-/**
- * Default export, the thing you're using this module to get.
- *
- * The `K` and `V` types define the key and value types, respectively. The
- * optional `FC` type defines the type of the `context` object passed to
- * `cache.fetch()` and `cache.memo()`.
- *
- * Keys and values **must not** be `null` or `undefined`.
- *
- * All properties from the options object (with the exception of `max`,
- * `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are
- * added as normal public members. (The listed options are read-only getters.)
- *
- * Changing any of these will alter the defaults for subsequent method calls.
- */
-export class LRUCache {
-    // options that cannot be changed without disaster
-    #max;
-    #maxSize;
-    #dispose;
-    #onInsert;
-    #disposeAfter;
-    #fetchMethod;
-    #memoMethod;
-    #perf;
-    /**
-     * {@link LRUCache.OptionsBase.perf}
-     */
-    get perf() {
-        return this.#perf;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.ttl}
-     */
-    ttl;
-    /**
-     * {@link LRUCache.OptionsBase.ttlResolution}
-     */
-    ttlResolution;
-    /**
-     * {@link LRUCache.OptionsBase.ttlAutopurge}
-     */
-    ttlAutopurge;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnGet}
-     */
-    updateAgeOnGet;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnHas}
-     */
-    updateAgeOnHas;
-    /**
-     * {@link LRUCache.OptionsBase.allowStale}
-     */
-    allowStale;
-    /**
-     * {@link LRUCache.OptionsBase.noDisposeOnSet}
-     */
-    noDisposeOnSet;
-    /**
-     * {@link LRUCache.OptionsBase.noUpdateTTL}
-     */
-    noUpdateTTL;
-    /**
-     * {@link LRUCache.OptionsBase.maxEntrySize}
-     */
-    maxEntrySize;
-    /**
-     * {@link LRUCache.OptionsBase.sizeCalculation}
-     */
-    sizeCalculation;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
-     */
-    noDeleteOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
-     */
-    noDeleteOnStaleGet;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
-     */
-    allowStaleOnFetchAbort;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
-     */
-    allowStaleOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
-     */
-    ignoreFetchAbort;
-    // computed properties
-    #size;
-    #calculatedSize;
-    #keyMap;
-    #keyList;
-    #valList;
-    #next;
-    #prev;
-    #head;
-    #tail;
-    #free;
-    #disposed;
-    #sizes;
-    #starts;
-    #ttls;
-    #hasDispose;
-    #hasFetchMethod;
-    #hasDisposeAfter;
-    #hasOnInsert;
-    /**
-     * Do not call this method unless you need to inspect the
-     * inner workings of the cache.  If anything returned by this
-     * object is modified in any way, strange breakage may occur.
-     *
-     * These fields are private for a reason!
-     *
-     * @internal
-     */
-    static unsafeExposeInternals(c) {
-        return {
-            // properties
-            starts: c.#starts,
-            ttls: c.#ttls,
-            sizes: c.#sizes,
-            keyMap: c.#keyMap,
-            keyList: c.#keyList,
-            valList: c.#valList,
-            next: c.#next,
-            prev: c.#prev,
-            get head() {
-                return c.#head;
-            },
-            get tail() {
-                return c.#tail;
-            },
-            free: c.#free,
-            // methods
-            isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
-            backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
-            moveToTail: (index) => c.#moveToTail(index),
-            indexes: (options) => c.#indexes(options),
-            rindexes: (options) => c.#rindexes(options),
-            isStale: (index) => c.#isStale(index),
-        };
-    }
-    // Protected read-only members
-    /**
-     * {@link LRUCache.OptionsBase.max} (read-only)
-     */
-    get max() {
-        return this.#max;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.maxSize} (read-only)
-     */
-    get maxSize() {
-        return this.#maxSize;
-    }
-    /**
-     * The total computed size of items in the cache (read-only)
-     */
-    get calculatedSize() {
-        return this.#calculatedSize;
-    }
-    /**
-     * The number of items stored in the cache (read-only)
-     */
-    get size() {
-        return this.#size;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
-     */
-    get fetchMethod() {
-        return this.#fetchMethod;
-    }
-    get memoMethod() {
-        return this.#memoMethod;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.dispose} (read-only)
-     */
-    get dispose() {
-        return this.#dispose;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.onInsert} (read-only)
-     */
-    get onInsert() {
-        return this.#onInsert;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
-     */
-    get disposeAfter() {
-        return this.#disposeAfter;
-    }
-    constructor(options) {
-        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, onInsert, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, perf, } = options;
-        if (perf !== undefined) {
-            if (typeof perf?.now !== 'function') {
-                throw new TypeError('perf option must have a now() method if specified');
-            }
-        }
-        this.#perf = perf ?? defaultPerf;
-        if (max !== 0 && !isPosInt(max)) {
-            throw new TypeError('max option must be a nonnegative integer');
-        }
-        const UintArray = max ? getUintArray(max) : Array;
-        if (!UintArray) {
-            throw new Error('invalid max value: ' + max);
-        }
-        this.#max = max;
-        this.#maxSize = maxSize;
-        this.maxEntrySize = maxEntrySize || this.#maxSize;
-        this.sizeCalculation = sizeCalculation;
-        if (this.sizeCalculation) {
-            if (!this.#maxSize && !this.maxEntrySize) {
-                throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
-            }
-            if (typeof this.sizeCalculation !== 'function') {
-                throw new TypeError('sizeCalculation set to non-function');
-            }
-        }
-        if (memoMethod !== undefined &&
-            typeof memoMethod !== 'function') {
-            throw new TypeError('memoMethod must be a function if defined');
-        }
-        this.#memoMethod = memoMethod;
-        if (fetchMethod !== undefined &&
-            typeof fetchMethod !== 'function') {
-            throw new TypeError('fetchMethod must be a function if specified');
-        }
-        this.#fetchMethod = fetchMethod;
-        this.#hasFetchMethod = !!fetchMethod;
-        this.#keyMap = new Map();
-        this.#keyList = new Array(max).fill(undefined);
-        this.#valList = new Array(max).fill(undefined);
-        this.#next = new UintArray(max);
-        this.#prev = new UintArray(max);
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free = Stack.create(max);
-        this.#size = 0;
-        this.#calculatedSize = 0;
-        if (typeof dispose === 'function') {
-            this.#dispose = dispose;
-        }
-        if (typeof onInsert === 'function') {
-            this.#onInsert = onInsert;
-        }
-        if (typeof disposeAfter === 'function') {
-            this.#disposeAfter = disposeAfter;
-            this.#disposed = [];
-        }
-        else {
-            this.#disposeAfter = undefined;
-            this.#disposed = undefined;
-        }
-        this.#hasDispose = !!this.#dispose;
-        this.#hasOnInsert = !!this.#onInsert;
-        this.#hasDisposeAfter = !!this.#disposeAfter;
-        this.noDisposeOnSet = !!noDisposeOnSet;
-        this.noUpdateTTL = !!noUpdateTTL;
-        this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
-        this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
-        this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
-        this.ignoreFetchAbort = !!ignoreFetchAbort;
-        // NB: maxEntrySize is set to maxSize if it's set
-        if (this.maxEntrySize !== 0) {
-            if (this.#maxSize !== 0) {
-                if (!isPosInt(this.#maxSize)) {
-                    throw new TypeError('maxSize must be a positive integer if specified');
-                }
-            }
-            if (!isPosInt(this.maxEntrySize)) {
-                throw new TypeError('maxEntrySize must be a positive integer if specified');
-            }
-            this.#initializeSizeTracking();
-        }
-        this.allowStale = !!allowStale;
-        this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
-        this.updateAgeOnGet = !!updateAgeOnGet;
-        this.updateAgeOnHas = !!updateAgeOnHas;
-        this.ttlResolution =
-            isPosInt(ttlResolution) || ttlResolution === 0 ?
-                ttlResolution
-                : 1;
-        this.ttlAutopurge = !!ttlAutopurge;
-        this.ttl = ttl || 0;
-        if (this.ttl) {
-            if (!isPosInt(this.ttl)) {
-                throw new TypeError('ttl must be a positive integer if specified');
-            }
-            this.#initializeTTLTracking();
-        }
-        // do not allow completely unbounded caches
-        if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
-            throw new TypeError('At least one of max, maxSize, or ttl is required');
-        }
-        if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
-            const code = 'LRU_CACHE_UNBOUNDED';
-            if (shouldWarn(code)) {
-                warned.add(code);
-                const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
-                    'result in unbounded memory consumption.';
-                emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
-            }
-        }
-    }
-    /**
-     * Return the number of ms left in the item's TTL. If item is not in cache,
-     * returns `0`. Returns `Infinity` if item is in cache without a defined TTL.
-     */
-    getRemainingTTL(key) {
-        return this.#keyMap.has(key) ? Infinity : 0;
-    }
-    #initializeTTLTracking() {
-        const ttls = new ZeroArray(this.#max);
-        const starts = new ZeroArray(this.#max);
-        this.#ttls = ttls;
-        this.#starts = starts;
-        this.#setItemTTL = (index, ttl, start = this.#perf.now()) => {
-            starts[index] = ttl !== 0 ? start : 0;
-            ttls[index] = ttl;
-            if (ttl !== 0 && this.ttlAutopurge) {
-                const t = setTimeout(() => {
-                    if (this.#isStale(index)) {
-                        this.#delete(this.#keyList[index], 'expire');
-                    }
-                }, ttl + 1);
-                // unref() not supported on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-        };
-        this.#updateItemAge = index => {
-            starts[index] = ttls[index] !== 0 ? this.#perf.now() : 0;
-        };
-        this.#statusTTL = (status, index) => {
-            if (ttls[index]) {
-                const ttl = ttls[index];
-                const start = starts[index];
-                /* c8 ignore next */
-                if (!ttl || !start)
-                    return;
-                status.ttl = ttl;
-                status.start = start;
-                status.now = cachedNow || getNow();
-                const age = status.now - start;
-                status.remainingTTL = ttl - age;
-            }
-        };
-        // debounce calls to perf.now() to 1s so we're not hitting
-        // that costly call repeatedly.
-        let cachedNow = 0;
-        const getNow = () => {
-            const n = this.#perf.now();
-            if (this.ttlResolution > 0) {
-                cachedNow = n;
-                const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
-                // not available on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-            return n;
-        };
-        this.getRemainingTTL = key => {
-            const index = this.#keyMap.get(key);
-            if (index === undefined) {
-                return 0;
-            }
-            const ttl = ttls[index];
-            const start = starts[index];
-            if (!ttl || !start) {
-                return Infinity;
-            }
-            const age = (cachedNow || getNow()) - start;
-            return ttl - age;
-        };
-        this.#isStale = index => {
-            const s = starts[index];
-            const t = ttls[index];
-            return !!t && !!s && (cachedNow || getNow()) - s > t;
-        };
-    }
-    // conditionally set private methods related to TTL
-    #updateItemAge = () => { };
-    #statusTTL = () => { };
-    #setItemTTL = () => { };
-    /* c8 ignore stop */
-    #isStale = () => false;
-    #initializeSizeTracking() {
-        const sizes = new ZeroArray(this.#max);
-        this.#calculatedSize = 0;
-        this.#sizes = sizes;
-        this.#removeItemSize = index => {
-            this.#calculatedSize -= sizes[index];
-            sizes[index] = 0;
-        };
-        this.#requireSize = (k, v, size, sizeCalculation) => {
-            // provisionally accept background fetches.
-            // actual value size will be checked when they return.
-            if (this.#isBackgroundFetch(v)) {
-                return 0;
-            }
-            if (!isPosInt(size)) {
-                if (sizeCalculation) {
-                    if (typeof sizeCalculation !== 'function') {
-                        throw new TypeError('sizeCalculation must be a function');
-                    }
-                    size = sizeCalculation(v, k);
-                    if (!isPosInt(size)) {
-                        throw new TypeError('sizeCalculation return invalid (expect positive integer)');
-                    }
-                }
-                else {
-                    throw new TypeError('invalid size value (must be positive integer). ' +
-                        'When maxSize or maxEntrySize is used, sizeCalculation ' +
-                        'or size must be set.');
-                }
-            }
-            return size;
-        };
-        this.#addItemSize = (index, size, status) => {
-            sizes[index] = size;
-            if (this.#maxSize) {
-                const maxSize = this.#maxSize - sizes[index];
-                while (this.#calculatedSize > maxSize) {
-                    this.#evict(true);
-                }
-            }
-            this.#calculatedSize += sizes[index];
-            if (status) {
-                status.entrySize = size;
-                status.totalCalculatedSize = this.#calculatedSize;
-            }
-        };
-    }
-    #removeItemSize = _i => { };
-    #addItemSize = (_i, _s, _st) => { };
-    #requireSize = (_k, _v, size, sizeCalculation) => {
-        if (size || sizeCalculation) {
-            throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
-        }
-        return 0;
-    };
-    *#indexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#tail; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#head) {
-                    break;
-                }
-                else {
-                    i = this.#prev[i];
-                }
-            }
-        }
-    }
-    *#rindexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#head; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#tail) {
-                    break;
-                }
-                else {
-                    i = this.#next[i];
-                }
-            }
-        }
-    }
-    #isValidIndex(index) {
-        return (index !== undefined &&
-            this.#keyMap.get(this.#keyList[index]) === index);
-    }
-    /**
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from most recently used to least recently used.
-     */
-    *entries() {
-        for (const i of this.#indexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.entries}
-     *
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from least recently used to most recently used.
-     */
-    *rentries() {
-        for (const i of this.#rindexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the keys in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *keys() {
-        for (const i of this.#indexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.keys}
-     *
-     * Return a generator yielding the keys in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rkeys() {
-        for (const i of this.#rindexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the values in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *values() {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.values}
-     *
-     * Return a generator yielding the values in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rvalues() {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Iterating over the cache itself yields the same results as
-     * {@link LRUCache.entries}
-     */
-    [Symbol.iterator]() {
-        return this.entries();
-    }
-    /**
-     * A String value that is used in the creation of the default string
-     * description of an object. Called by the built-in method
-     * `Object.prototype.toString`.
-     */
-    [Symbol.toStringTag] = 'LRUCache';
-    /**
-     * Find a value for which the supplied fn method returns a truthy value,
-     * similar to `Array.find()`. fn is called as `fn(value, key, cache)`.
-     */
-    find(fn, getOptions = {}) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-            if (value === undefined)
-                continue;
-            if (fn(value, this.#keyList[i], this)) {
-                return this.get(this.#keyList[i], getOptions);
-            }
-        }
-    }
-    /**
-     * Call the supplied function on each item in the cache, in order from most
-     * recently used to least recently used.
-     *
-     * `fn` is called as `fn(value, key, cache)`.
-     *
-     * If `thisp` is provided, function will be called in the `this`-context of
-     * the provided object, or the cache if no `thisp` object is provided.
-     *
-     * Does not update age or recenty of use, or iterate over stale values.
-     */
-    forEach(fn, thisp = this) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * The same as {@link LRUCache.forEach} but items are iterated over in
-     * reverse order.  (ie, less recently used items are iterated over first.)
-     */
-    rforEach(fn, thisp = this) {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * Delete any stale entries. Returns true if anything was removed,
-     * false otherwise.
-     */
-    purgeStale() {
-        let deleted = false;
-        for (const i of this.#rindexes({ allowStale: true })) {
-            if (this.#isStale(i)) {
-                this.#delete(this.#keyList[i], 'expire');
-                deleted = true;
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Get the extended info about a given entry, to get its value, size, and
-     * TTL info simultaneously. Returns `undefined` if the key is not present.
-     *
-     * Unlike {@link LRUCache#dump}, which is designed to be portable and survive
-     * serialization, the `start` value is always the current timestamp, and the
-     * `ttl` is a calculated remaining time to live (negative if expired).
-     *
-     * Always returns stale values, if their info is found in the cache, so be
-     * sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl})
-     * if relevant.
-     */
-    info(key) {
-        const i = this.#keyMap.get(key);
-        if (i === undefined)
-            return undefined;
-        const v = this.#valList[i];
-        /* c8 ignore start - this isn't tested for the info function,
-         * but it's the same logic as found in other places. */
-        const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-        if (value === undefined)
-            return undefined;
-        /* c8 ignore end */
-        const entry = { value };
-        if (this.#ttls && this.#starts) {
-            const ttl = this.#ttls[i];
-            const start = this.#starts[i];
-            if (ttl && start) {
-                const remain = ttl - (this.#perf.now() - start);
-                entry.ttl = remain;
-                entry.start = Date.now();
-            }
-        }
-        if (this.#sizes) {
-            entry.size = this.#sizes[i];
-        }
-        return entry;
-    }
-    /**
-     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
-     * passed to {@link LRUCache#load}.
-     *
-     * The `start` fields are calculated relative to a portable `Date.now()`
-     * timestamp, even if `performance.now()` is available.
-     *
-     * Stale entries are always included in the `dump`, even if
-     * {@link LRUCache.OptionsBase.allowStale} is false.
-     *
-     * Note: this returns an actual array, not a generator, so it can be more
-     * easily passed around.
-     */
-    dump() {
-        const arr = [];
-        for (const i of this.#indexes({ allowStale: true })) {
-            const key = this.#keyList[i];
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-            if (value === undefined || key === undefined)
-                continue;
-            const entry = { value };
-            if (this.#ttls && this.#starts) {
-                entry.ttl = this.#ttls[i];
-                // always dump the start relative to a portable timestamp
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = this.#perf.now() - this.#starts[i];
-                entry.start = Math.floor(Date.now() - age);
-            }
-            if (this.#sizes) {
-                entry.size = this.#sizes[i];
-            }
-            arr.unshift([key, entry]);
-        }
-        return arr;
-    }
-    /**
-     * Reset the cache and load in the items in entries in the order listed.
-     *
-     * The shape of the resulting cache may be different if the same options are
-     * not used in both caches.
-     *
-     * The `start` fields are assumed to be calculated relative to a portable
-     * `Date.now()` timestamp, even if `performance.now()` is available.
-     */
-    load(arr) {
-        this.clear();
-        for (const [key, entry] of arr) {
-            if (entry.start) {
-                // entry.start is a portable timestamp, but we may be using
-                // node's performance.now(), so calculate the offset, so that
-                // we get the intended remaining TTL, no matter how long it's
-                // been on ice.
-                //
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = Date.now() - entry.start;
-                entry.start = this.#perf.now() - age;
-            }
-            this.set(key, entry.value, entry);
-        }
-    }
-    /**
-     * Add a value to the cache.
-     *
-     * Note: if `undefined` is specified as a value, this is an alias for
-     * {@link LRUCache#delete}
-     *
-     * Fields on the {@link LRUCache.SetOptions} options param will override
-     * their corresponding values in the constructor options for the scope
-     * of this single `set()` operation.
-     *
-     * If `start` is provided, then that will set the effective start
-     * time for the TTL calculation. Note that this must be a previous
-     * value of `performance.now()` if supported, or a previous value of
-     * `Date.now()` if not.
-     *
-     * Options object may also include `size`, which will prevent
-     * calling the `sizeCalculation` function and just use the specified
-     * number if it is a positive integer, and `noDisposeOnSet` which
-     * will prevent calling a `dispose` function in the case of
-     * overwrites.
-     *
-     * If the `size` (or return value of `sizeCalculation`) for a given
-     * entry is greater than `maxEntrySize`, then the item will not be
-     * added to the cache.
-     *
-     * Will update the recency of the entry.
-     *
-     * If the value is `undefined`, then this is an alias for
-     * `cache.delete(key)`. `undefined` is never stored in the cache.
-     */
-    set(k, v, setOptions = {}) {
-        if (v === undefined) {
-            this.delete(k);
-            return this;
-        }
-        const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
-        let { noUpdateTTL = this.noUpdateTTL } = setOptions;
-        const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
-        // if the item doesn't fit, don't do anything
-        // NB: maxEntrySize set to maxSize by default
-        if (this.maxEntrySize && size > this.maxEntrySize) {
-            if (status) {
-                status.set = 'miss';
-                status.maxEntrySizeExceeded = true;
-            }
-            // have to delete, in case something is there already.
-            this.#delete(k, 'set');
-            return this;
-        }
-        let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
-        if (index === undefined) {
-            // addition
-            index = (this.#size === 0 ? this.#tail
-                : this.#free.length !== 0 ? this.#free.pop()
-                    : this.#size === this.#max ? this.#evict(false)
-                        : this.#size);
-            this.#keyList[index] = k;
-            this.#valList[index] = v;
-            this.#keyMap.set(k, index);
-            this.#next[this.#tail] = index;
-            this.#prev[index] = this.#tail;
-            this.#tail = index;
-            this.#size++;
-            this.#addItemSize(index, size, status);
-            if (status)
-                status.set = 'add';
-            noUpdateTTL = false;
-            if (this.#hasOnInsert) {
-                this.#onInsert?.(v, k, 'add');
-            }
-        }
-        else {
-            // update
-            this.#moveToTail(index);
-            const oldVal = this.#valList[index];
-            if (v !== oldVal) {
-                if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
-                    oldVal.__abortController.abort(new Error('replaced'));
-                    const { __staleWhileFetching: s } = oldVal;
-                    if (s !== undefined && !noDisposeOnSet) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(s, k, 'set');
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([s, k, 'set']);
-                        }
-                    }
-                }
-                else if (!noDisposeOnSet) {
-                    if (this.#hasDispose) {
-                        this.#dispose?.(oldVal, k, 'set');
-                    }
-                    if (this.#hasDisposeAfter) {
-                        this.#disposed?.push([oldVal, k, 'set']);
-                    }
-                }
-                this.#removeItemSize(index);
-                this.#addItemSize(index, size, status);
-                this.#valList[index] = v;
-                if (status) {
-                    status.set = 'replace';
-                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal) ?
-                        oldVal.__staleWhileFetching
-                        : oldVal;
-                    if (oldValue !== undefined)
-                        status.oldValue = oldValue;
-                }
-            }
-            else if (status) {
-                status.set = 'update';
-            }
-            if (this.#hasOnInsert) {
-                this.onInsert?.(v, k, v === oldVal ? 'update' : 'replace');
-            }
-        }
-        if (ttl !== 0 && !this.#ttls) {
-            this.#initializeTTLTracking();
-        }
-        if (this.#ttls) {
-            if (!noUpdateTTL) {
-                this.#setItemTTL(index, ttl, start);
-            }
-            if (status)
-                this.#statusTTL(status, index);
-        }
-        if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return this;
-    }
-    /**
-     * Evict the least recently used item, returning its value or
-     * `undefined` if cache is empty.
-     */
-    pop() {
-        try {
-            while (this.#size) {
-                const val = this.#valList[this.#head];
-                this.#evict(true);
-                if (this.#isBackgroundFetch(val)) {
-                    if (val.__staleWhileFetching) {
-                        return val.__staleWhileFetching;
-                    }
-                }
-                else if (val !== undefined) {
-                    return val;
-                }
-            }
-        }
-        finally {
-            if (this.#hasDisposeAfter && this.#disposed) {
-                const dt = this.#disposed;
-                let task;
-                while ((task = dt?.shift())) {
-                    this.#disposeAfter?.(...task);
-                }
-            }
-        }
-    }
-    #evict(free) {
-        const head = this.#head;
-        const k = this.#keyList[head];
-        const v = this.#valList[head];
-        if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
-            v.__abortController.abort(new Error('evicted'));
-        }
-        else if (this.#hasDispose || this.#hasDisposeAfter) {
-            if (this.#hasDispose) {
-                this.#dispose?.(v, k, 'evict');
-            }
-            if (this.#hasDisposeAfter) {
-                this.#disposed?.push([v, k, 'evict']);
-            }
-        }
-        this.#removeItemSize(head);
-        // if we aren't about to use the index, then null these out
-        if (free) {
-            this.#keyList[head] = undefined;
-            this.#valList[head] = undefined;
-            this.#free.push(head);
-        }
-        if (this.#size === 1) {
-            this.#head = this.#tail = 0;
-            this.#free.length = 0;
-        }
-        else {
-            this.#head = this.#next[head];
-        }
-        this.#keyMap.delete(k);
-        this.#size--;
-        return head;
-    }
-    /**
-     * Check if a key is in the cache, without updating the recency of use.
-     * Will return false if the item is stale, even though it is technically
-     * in the cache.
-     *
-     * Check if a key is in the cache, without updating the recency of
-     * use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set
-     * to `true` in either the options or the constructor.
-     *
-     * Will return `false` if the item is stale, even though it is technically in
-     * the cache. The difference can be determined (if it matters) by using a
-     * `status` argument, and inspecting the `has` field.
-     *
-     * Will not update item age unless
-     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
-     */
-    has(k, hasOptions = {}) {
-        const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v) &&
-                v.__staleWhileFetching === undefined) {
-                return false;
-            }
-            if (!this.#isStale(index)) {
-                if (updateAgeOnHas) {
-                    this.#updateItemAge(index);
-                }
-                if (status) {
-                    status.has = 'hit';
-                    this.#statusTTL(status, index);
-                }
-                return true;
-            }
-            else if (status) {
-                status.has = 'stale';
-                this.#statusTTL(status, index);
-            }
-        }
-        else if (status) {
-            status.has = 'miss';
-        }
-        return false;
-    }
-    /**
-     * Like {@link LRUCache#get} but doesn't update recency or delete stale
-     * items.
-     *
-     * Returns `undefined` if the item is stale, unless
-     * {@link LRUCache.OptionsBase.allowStale} is set.
-     */
-    peek(k, peekOptions = {}) {
-        const { allowStale = this.allowStale } = peekOptions;
-        const index = this.#keyMap.get(k);
-        if (index === undefined ||
-            (!allowStale && this.#isStale(index))) {
-            return;
-        }
-        const v = this.#valList[index];
-        // either stale and allowed, or forcing a refresh of non-stale value
-        return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-    }
-    #backgroundFetch(k, index, options, context) {
-        const v = index === undefined ? undefined : this.#valList[index];
-        if (this.#isBackgroundFetch(v)) {
-            return v;
-        }
-        const ac = new AC();
-        const { signal } = options;
-        // when/if our AC signals, then stop listening to theirs.
-        signal?.addEventListener('abort', () => ac.abort(signal.reason), {
-            signal: ac.signal,
-        });
-        const fetchOpts = {
-            signal: ac.signal,
-            options,
-            context,
-        };
-        const cb = (v, updateCache = false) => {
-            const { aborted } = ac.signal;
-            const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
-            if (options.status) {
-                if (aborted && !updateCache) {
-                    options.status.fetchAborted = true;
-                    options.status.fetchError = ac.signal.reason;
-                    if (ignoreAbort)
-                        options.status.fetchAbortIgnored = true;
-                }
-                else {
-                    options.status.fetchResolved = true;
-                }
-            }
-            if (aborted && !ignoreAbort && !updateCache) {
-                return fetchFail(ac.signal.reason);
-            }
-            // either we didn't abort, and are still here, or we did, and ignored
-            const bf = p;
-            if (this.#valList[index] === p) {
-                if (v === undefined) {
-                    if (bf.__staleWhileFetching !== undefined) {
-                        this.#valList[index] = bf.__staleWhileFetching;
-                    }
-                    else {
-                        this.#delete(k, 'fetch');
-                    }
-                }
-                else {
-                    if (options.status)
-                        options.status.fetchUpdated = true;
-                    this.set(k, v, fetchOpts.options);
-                }
-            }
-            return v;
-        };
-        const eb = (er) => {
-            if (options.status) {
-                options.status.fetchRejected = true;
-                options.status.fetchError = er;
-            }
-            return fetchFail(er);
-        };
-        const fetchFail = (er) => {
-            const { aborted } = ac.signal;
-            const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
-            const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
-            const noDelete = allowStale || options.noDeleteOnFetchRejection;
-            const bf = p;
-            if (this.#valList[index] === p) {
-                // if we allow stale on fetch rejections, then we need to ensure that
-                // the stale value is not removed from the cache when the fetch fails.
-                const del = !noDelete || bf.__staleWhileFetching === undefined;
-                if (del) {
-                    this.#delete(k, 'fetch');
-                }
-                else if (!allowStaleAborted) {
-                    // still replace the *promise* with the stale value,
-                    // since we are done with the promise at this point.
-                    // leave it untouched if we're still waiting for an
-                    // aborted background fetch that hasn't yet returned.
-                    this.#valList[index] = bf.__staleWhileFetching;
-                }
-            }
-            if (allowStale) {
-                if (options.status && bf.__staleWhileFetching !== undefined) {
-                    options.status.returnedStale = true;
-                }
-                return bf.__staleWhileFetching;
-            }
-            else if (bf.__returned === bf) {
-                throw er;
-            }
-        };
-        const pcall = (res, rej) => {
-            const fmp = this.#fetchMethod?.(k, v, fetchOpts);
-            if (fmp && fmp instanceof Promise) {
-                fmp.then(v => res(v === undefined ? undefined : v), rej);
-            }
-            // ignored, we go until we finish, regardless.
-            // defer check until we are actually aborting,
-            // so fetchMethod can override.
-            ac.signal.addEventListener('abort', () => {
-                if (!options.ignoreFetchAbort ||
-                    options.allowStaleOnFetchAbort) {
-                    res(undefined);
-                    // when it eventually resolves, update the cache.
-                    if (options.allowStaleOnFetchAbort) {
-                        res = v => cb(v, true);
-                    }
-                }
-            });
-        };
-        if (options.status)
-            options.status.fetchDispatched = true;
-        const p = new Promise(pcall).then(cb, eb);
-        const bf = Object.assign(p, {
-            __abortController: ac,
-            __staleWhileFetching: v,
-            __returned: undefined,
-        });
-        if (index === undefined) {
-            // internal, don't expose status.
-            this.set(k, bf, { ...fetchOpts.options, status: undefined });
-            index = this.#keyMap.get(k);
-        }
-        else {
-            this.#valList[index] = bf;
-        }
-        return bf;
-    }
-    #isBackgroundFetch(p) {
-        if (!this.#hasFetchMethod)
-            return false;
-        const b = p;
-        return (!!b &&
-            b instanceof Promise &&
-            b.hasOwnProperty('__staleWhileFetching') &&
-            b.__abortController instanceof AC);
-    }
-    async fetch(k, fetchOptions = {}) {
-        const { 
-        // get options
-        allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, 
-        // set options
-        ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, 
-        // fetch exclusive options
-        noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
-        if (!this.#hasFetchMethod) {
-            if (status)
-                status.fetch = 'get';
-            return this.get(k, {
-                allowStale,
-                updateAgeOnGet,
-                noDeleteOnStaleGet,
-                status,
-            });
-        }
-        const options = {
-            allowStale,
-            updateAgeOnGet,
-            noDeleteOnStaleGet,
-            ttl,
-            noDisposeOnSet,
-            size,
-            sizeCalculation,
-            noUpdateTTL,
-            noDeleteOnFetchRejection,
-            allowStaleOnFetchRejection,
-            allowStaleOnFetchAbort,
-            ignoreFetchAbort,
-            status,
-            signal,
-        };
-        let index = this.#keyMap.get(k);
-        if (index === undefined) {
-            if (status)
-                status.fetch = 'miss';
-            const p = this.#backgroundFetch(k, index, options, context);
-            return (p.__returned = p);
-        }
-        else {
-            // in cache, maybe already fetching
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                const stale = allowStale && v.__staleWhileFetching !== undefined;
-                if (status) {
-                    status.fetch = 'inflight';
-                    if (stale)
-                        status.returnedStale = true;
-                }
-                return stale ? v.__staleWhileFetching : (v.__returned = v);
-            }
-            // if we force a refresh, that means do NOT serve the cached value,
-            // unless we are already in the process of refreshing the cache.
-            const isStale = this.#isStale(index);
-            if (!forceRefresh && !isStale) {
-                if (status)
-                    status.fetch = 'hit';
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                if (status)
-                    this.#statusTTL(status, index);
-                return v;
-            }
-            // ok, it is stale or a forced refresh, and not already fetching.
-            // refresh the cache.
-            const p = this.#backgroundFetch(k, index, options, context);
-            const hasStale = p.__staleWhileFetching !== undefined;
-            const staleVal = hasStale && allowStale;
-            if (status) {
-                status.fetch = isStale ? 'stale' : 'refresh';
-                if (staleVal && isStale)
-                    status.returnedStale = true;
-            }
-            return staleVal ? p.__staleWhileFetching : (p.__returned = p);
-        }
-    }
-    async forceFetch(k, fetchOptions = {}) {
-        const v = await this.fetch(k, fetchOptions);
-        if (v === undefined)
-            throw new Error('fetch() returned undefined');
-        return v;
-    }
-    memo(k, memoOptions = {}) {
-        const memoMethod = this.#memoMethod;
-        if (!memoMethod) {
-            throw new Error('no memoMethod provided to constructor');
-        }
-        const { context, forceRefresh, ...options } = memoOptions;
-        const v = this.get(k, options);
-        if (!forceRefresh && v !== undefined)
-            return v;
-        const vv = memoMethod(k, v, {
-            options,
-            context,
-        });
-        this.set(k, vv, options);
-        return vv;
-    }
-    /**
-     * Return a value from the cache. Will update the recency of the cache
-     * entry found.
-     *
-     * If the key is not found, get() will return `undefined`.
-     */
-    get(k, getOptions = {}) {
-        const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const value = this.#valList[index];
-            const fetching = this.#isBackgroundFetch(value);
-            if (status)
-                this.#statusTTL(status, index);
-            if (this.#isStale(index)) {
-                if (status)
-                    status.get = 'stale';
-                // delete only if not an in-flight background fetch
-                if (!fetching) {
-                    if (!noDeleteOnStaleGet) {
-                        this.#delete(k, 'expire');
-                    }
-                    if (status && allowStale)
-                        status.returnedStale = true;
-                    return allowStale ? value : undefined;
-                }
-                else {
-                    if (status &&
-                        allowStale &&
-                        value.__staleWhileFetching !== undefined) {
-                        status.returnedStale = true;
-                    }
-                    return allowStale ? value.__staleWhileFetching : undefined;
-                }
-            }
-            else {
-                if (status)
-                    status.get = 'hit';
-                // if we're currently fetching it, we don't actually have it yet
-                // it's not stale, which means this isn't a staleWhileRefetching.
-                // If it's not stale, and fetching, AND has a __staleWhileFetching
-                // value, then that means the user fetched with {forceRefresh:true},
-                // so it's safe to return that value.
-                if (fetching) {
-                    return value.__staleWhileFetching;
-                }
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                return value;
-            }
-        }
-        else if (status) {
-            status.get = 'miss';
-        }
-    }
-    #connect(p, n) {
-        this.#prev[n] = p;
-        this.#next[p] = n;
-    }
-    #moveToTail(index) {
-        // if tail already, nothing to do
-        // if head, move head to next[index]
-        // else
-        //   move next[prev[index]] to next[index] (head has no prev)
-        //   move prev[next[index]] to prev[index]
-        // prev[index] = tail
-        // next[tail] = index
-        // tail = index
-        if (index !== this.#tail) {
-            if (index === this.#head) {
-                this.#head = this.#next[index];
-            }
-            else {
-                this.#connect(this.#prev[index], this.#next[index]);
-            }
-            this.#connect(this.#tail, index);
-            this.#tail = index;
-        }
-    }
-    /**
-     * Deletes a key out of the cache.
-     *
-     * Returns true if the key was deleted, false otherwise.
-     */
-    delete(k) {
-        return this.#delete(k, 'delete');
-    }
-    #delete(k, reason) {
-        let deleted = false;
-        if (this.#size !== 0) {
-            const index = this.#keyMap.get(k);
-            if (index !== undefined) {
-                deleted = true;
-                if (this.#size === 1) {
-                    this.#clear(reason);
-                }
-                else {
-                    this.#removeItemSize(index);
-                    const v = this.#valList[index];
-                    if (this.#isBackgroundFetch(v)) {
-                        v.__abortController.abort(new Error('deleted'));
-                    }
-                    else if (this.#hasDispose || this.#hasDisposeAfter) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(v, k, reason);
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([v, k, reason]);
-                        }
-                    }
-                    this.#keyMap.delete(k);
-                    this.#keyList[index] = undefined;
-                    this.#valList[index] = undefined;
-                    if (index === this.#tail) {
-                        this.#tail = this.#prev[index];
-                    }
-                    else if (index === this.#head) {
-                        this.#head = this.#next[index];
-                    }
-                    else {
-                        const pi = this.#prev[index];
-                        this.#next[pi] = this.#next[index];
-                        const ni = this.#next[index];
-                        this.#prev[ni] = this.#prev[index];
-                    }
-                    this.#size--;
-                    this.#free.push(index);
-                }
-            }
-        }
-        if (this.#hasDisposeAfter && this.#disposed?.length) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Clear the cache entirely, throwing away all values.
-     */
-    clear() {
-        return this.#clear('delete');
-    }
-    #clear(reason) {
-        for (const index of this.#rindexes({ allowStale: true })) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                v.__abortController.abort(new Error('deleted'));
-            }
-            else {
-                const k = this.#keyList[index];
-                if (this.#hasDispose) {
-                    this.#dispose?.(v, k, reason);
-                }
-                if (this.#hasDisposeAfter) {
-                    this.#disposed?.push([v, k, reason]);
-                }
-            }
-        }
-        this.#keyMap.clear();
-        this.#valList.fill(undefined);
-        this.#keyList.fill(undefined);
-        if (this.#ttls && this.#starts) {
-            this.#ttls.fill(0);
-            this.#starts.fill(0);
-        }
-        if (this.#sizes) {
-            this.#sizes.fill(0);
-        }
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free.length = 0;
-        this.#calculatedSize = 0;
-        this.#size = 0;
-        if (this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-    }
-}
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/make-fetch-happen/node_modules/lru-cache/dist/esm/index.min.js b/node_modules/make-fetch-happen/node_modules/lru-cache/dist/esm/index.min.js
deleted file mode 100644
index 07dd8fc3c59d8..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/lru-cache/dist/esm/index.min.js
+++ /dev/null
@@ -1,2 +0,0 @@
-var M=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,x=new Set,R=typeof process=="object"&&process?process:{},I=(a,t,e,i)=>{typeof R.emitWarning=="function"?R.emitWarning(a,t,e,i):console.error(`[${e}] ${t}: ${a}`)},C=globalThis.AbortController,D=globalThis.AbortSignal;if(typeof C>"u"){D=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},C=class{constructor(){t()}signal=new D;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let a=R.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{a&&(a=!1,I("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var G=a=>!x.has(a),H=Symbol("type"),y=a=>a&&a===Math.floor(a)&&a>0&&isFinite(a),U=a=>y(a)?a<=Math.pow(2,8)?Uint8Array:a<=Math.pow(2,16)?Uint16Array:a<=Math.pow(2,32)?Uint32Array:a<=Number.MAX_SAFE_INTEGER?O:null:null,O=class extends Array{constructor(t){super(t),this.fill(0)}},W=class a{heap;length;static#l=!1;static create(t){let e=U(t);if(!e)return[];a.#l=!0;let i=new a(t,e);return a.#l=!1,i}constructor(t,e){if(!a.#l)throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},L=class a{#l;#c;#p;#v;#w;#D;#L;#S;get perf(){return this.#S}ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#_;#s;#i;#t;#a;#u;#o;#h;#m;#r;#b;#y;#d;#A;#z;#f;#x;static unsafeExposeInternals(t){return{starts:t.#y,ttls:t.#d,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#a,prev:t.#u,get head(){return t.#o},get tail(){return t.#h},free:t.#m,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#M(e,i,s,n),moveToTail:e=>t.#W(e),indexes:e=>t.#F(e),rindexes:e=>t.#T(e),isStale:e=>t.#g(e)}}get max(){return this.#l}get maxSize(){return this.#c}get calculatedSize(){return this.#_}get size(){return this.#n}get fetchMethod(){return this.#D}get memoMethod(){return this.#L}get dispose(){return this.#p}get onInsert(){return this.#v}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,onInsert:_,disposeAfter:f,noDisposeOnSet:c,noUpdateTTL:u,maxSize:A=0,maxEntrySize:d=0,sizeCalculation:m,fetchMethod:l,memoMethod:w,noDeleteOnFetchRejection:b,noDeleteOnStaleGet:p,allowStaleOnFetchRejection:S,allowStaleOnFetchAbort:z,ignoreFetchAbort:F,perf:v}=t;if(v!==void 0&&typeof v?.now!="function")throw new TypeError("perf option must have a now() method if specified");if(this.#S=v??M,e!==0&&!y(e))throw new TypeError("max option must be a nonnegative integer");let T=e?U(e):Array;if(!T)throw new Error("invalid max value: "+e);if(this.#l=e,this.#c=A,this.maxEntrySize=d||this.#c,this.sizeCalculation=m,this.sizeCalculation){if(!this.#c&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(w!==void 0&&typeof w!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#L=w,l!==void 0&&typeof l!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#D=l,this.#z=!!l,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#a=new T(e),this.#u=new T(e),this.#o=0,this.#h=0,this.#m=W.create(e),this.#n=0,this.#_=0,typeof g=="function"&&(this.#p=g),typeof _=="function"&&(this.#v=_),typeof f=="function"?(this.#w=f,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#A=!!this.#p,this.#x=!!this.#v,this.#f=!!this.#w,this.noDisposeOnSet=!!c,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!b,this.allowStaleOnFetchRejection=!!S,this.allowStaleOnFetchAbort=!!z,this.ignoreFetchAbort=!!F,this.maxEntrySize!==0){if(this.#c!==0&&!y(this.#c))throw new TypeError("maxSize must be a positive integer if specified");if(!y(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#V()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!p,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=y(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!y(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#G()}if(this.#l===0&&this.ttl===0&&this.#c===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#l&&!this.#c){let E="LRU_CACHE_UNBOUNDED";G(E)&&(x.add(E),I("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",E,a))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#G(){let t=new O(this.#l),e=new O(this.#l);this.#d=t,this.#y=e,this.#j=(n,h,o=this.#S.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#g(n)&&this.#E(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#C=n=>{e[n]=t[n]!==0?this.#S.now():0},this.#O=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=this.#S.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#g=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#C=()=>{};#O=()=>{};#j=()=>{};#g=()=>!1;#V(){let t=new O(this.#l);this.#_=0,this.#b=t,this.#R=e=>{this.#_-=t[e],t[e]=0},this.#N=(e,i,s,n)=>{if(this.#e(i))return 0;if(!y(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!y(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#I=(e,i,s)=>{if(t[e]=i,this.#c){let n=this.#c-t[e];for(;this.#_>n;)this.#U(!0)}this.#_+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#_)}}#R=t=>{};#I=(t,e,i)=>{};#N=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#o));)e=this.#u[e]}*#T({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#P(e)||((t||!this.#g(e))&&(yield e),e===this.#h));)e=this.#a[e]}#P(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#T())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#T()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#T())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#T()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#T({allowStale:!0}))this.#g(e)&&(this.#E(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#d&&this.#y){let h=this.#d[e],o=this.#y[e];if(h&&o){let r=h-(this.#S.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#F({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#d&&this.#y){h.ttl=this.#d[e];let o=this.#S.now()-this.#y[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=this.#S.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,_=this.#N(t,e,i.size||0,o);if(this.maxEntrySize&&_>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#E(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#m.length!==0?this.#m.pop():this.#n===this.#l?this.#U(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#a[this.#h]=f,this.#u[f]=this.#h,this.#h=f,this.#n++,this.#I(f,_,r),r&&(r.set="add"),g=!1,this.#x&&this.#v?.(e,t,"add");else{this.#W(f);let c=this.#t[f];if(e!==c){if(this.#z&&this.#e(c)){c.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:u}=c;u!==void 0&&!h&&(this.#A&&this.#p?.(u,t,"set"),this.#f&&this.#r?.push([u,t,"set"]))}else h||(this.#A&&this.#p?.(c,t,"set"),this.#f&&this.#r?.push([c,t,"set"]));if(this.#R(f),this.#I(f,_,r),this.#t[f]=e,r){r.set="replace";let u=c&&this.#e(c)?c.__staleWhileFetching:c;u!==void 0&&(r.oldValue=u)}}else r&&(r.set="update");this.#x&&this.onInsert?.(e,t,e===c?"update":"replace")}if(s!==0&&!this.#d&&this.#G(),this.#d&&(g||this.#j(f,s,n),r&&this.#O(r,f)),!h&&this.#f&&this.#r){let c=this.#r,u;for(;u=c?.shift();)this.#w?.(...u)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#U(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#f&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#U(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#z&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(s,i,"evict"),this.#f&&this.#r?.push([s,i,"evict"])),this.#R(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#m.push(e)),this.#n===1?(this.#o=this.#h=0,this.#m.length=0):this.#o=this.#a[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#g(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#C(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#g(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#M(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new C,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,m=!1)=>{let{aborted:l}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(l&&!m?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),l&&!w&&!m)return f(h.signal.reason);let b=u;return this.#t[e]===u&&(d===void 0?b.__staleWhileFetching!==void 0?this.#t[e]=b.__staleWhileFetching:this.#E(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},_=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:m}=h.signal,l=m&&i.allowStaleOnFetchAbort,w=l||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=u;if(this.#t[e]===u&&(!b||p.__staleWhileFetching===void 0?this.#E(t,"fetch"):l||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},c=(d,m)=>{let l=this.#D?.(t,n,r);l&&l instanceof Promise&&l.then(w=>d(w===void 0?void 0:w),m),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let u=new Promise(c).then(g,_),A=Object.assign(u,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,A,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=A,A}#e(t){if(!this.#z)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof C}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:_=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:c=this.allowStaleOnFetchRejection,ignoreFetchAbort:u=this.ignoreFetchAbort,allowStaleOnFetchAbort:A=this.allowStaleOnFetchAbort,context:d,forceRefresh:m=!1,status:l,signal:w}=e;if(!this.#z)return l&&(l.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:l});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:_,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:c,allowStaleOnFetchAbort:A,ignoreFetchAbort:u,status:l,signal:w},p=this.#s.get(t);if(p===void 0){l&&(l.fetch="miss");let S=this.#M(t,p,b,d);return S.__returned=S}else{let S=this.#t[p];if(this.#e(S)){let E=i&&S.__staleWhileFetching!==void 0;return l&&(l.fetch="inflight",E&&(l.returnedStale=!0)),E?S.__staleWhileFetching:S.__returned=S}let z=this.#g(p);if(!m&&!z)return l&&(l.fetch="hit"),this.#W(p),s&&this.#C(p),l&&this.#O(l,p),S;let F=this.#M(t,p,b,d),T=F.__staleWhileFetching!==void 0&&i;return l&&(l.fetch=z?"stale":"refresh",T&&z&&(l.returnedStale=!0)),T?F.__staleWhileFetching:F.__returned=F}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#L;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#O(h,o),this.#g(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#E(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#W(o),s&&this.#C(o),r))}else h&&(h.get="miss")}#H(t,e){this.#u[e]=t,this.#a[t]=e}#W(t){t!==this.#h&&(t===this.#o?this.#o=this.#a[t]:this.#H(this.#u[t],this.#a[t]),this.#H(this.#h,t),this.#h=t)}delete(t){return this.#E(t,"delete")}#E(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#k(e);else{this.#R(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#A||this.#f)&&(this.#A&&this.#p?.(n,t,e),this.#f&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#u[s];else if(s===this.#o)this.#o=this.#a[s];else{let h=this.#u[s];this.#a[h]=this.#a[s];let o=this.#a[s];this.#u[o]=this.#u[s]}this.#n--,this.#m.push(s)}}if(this.#f&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#k("delete")}#k(t){for(let e of this.#T({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#A&&this.#p?.(i,s,t),this.#f&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#d&&this.#y&&(this.#d.fill(0),this.#y.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#m.length=0,this.#_=0,this.#n=0,this.#f&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};export{L as LRUCache};
-//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/make-fetch-happen/node_modules/lru-cache/dist/esm/package.json b/node_modules/make-fetch-happen/node_modules/lru-cache/dist/esm/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/lru-cache/dist/esm/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/make-fetch-happen/node_modules/lru-cache/package.json b/node_modules/make-fetch-happen/node_modules/lru-cache/package.json
deleted file mode 100644
index 4953bdf4a7a35..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/lru-cache/package.json
+++ /dev/null
@@ -1,113 +0,0 @@
-{
-  "name": "lru-cache",
-  "description": "A cache object that deletes the least-recently-used items.",
-  "version": "11.2.1",
-  "author": "Isaac Z. Schlueter ",
-  "keywords": [
-    "mru",
-    "lru",
-    "cache"
-  ],
-  "sideEffects": false,
-  "scripts": {
-    "build": "npm run prepare",
-    "prepare": "tshy && bash fixup.sh",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "test": "tap",
-    "snap": "tap",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "format": "prettier --write .",
-    "typedoc": "typedoc --tsconfig ./.tshy/esm.json ./src/*.ts",
-    "benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh",
-    "prebenchmark": "npm run prepare",
-    "benchmark": "make -C benchmark",
-    "preprofile": "npm run prepare",
-    "profile": "make -C benchmark profile"
-  },
-  "main": "./dist/commonjs/index.js",
-  "types": "./dist/commonjs/index.d.ts",
-  "tshy": {
-    "exports": {
-      ".": "./src/index.ts",
-      "./min": {
-        "import": {
-          "types": "./dist/esm/index.d.ts",
-          "default": "./dist/esm/index.min.js"
-        },
-        "require": {
-          "types": "./dist/commonjs/index.d.ts",
-          "default": "./dist/commonjs/index.min.js"
-        }
-      }
-    }
-  },
-  "repository": {
-    "type": "git",
-    "url": "git://github.com/isaacs/node-lru-cache.git"
-  },
-  "devDependencies": {
-    "@types/node": "^24.3.0",
-    "benchmark": "^2.1.4",
-    "esbuild": "^0.25.9",
-    "marked": "^4.2.12",
-    "mkdirp": "^3.0.1",
-    "prettier": "^3.6.2",
-    "tap": "^21.1.0",
-    "tshy": "^3.0.2",
-    "typedoc": "^0.28.12"
-  },
-  "license": "ISC",
-  "files": [
-    "dist"
-  ],
-  "engines": {
-    "node": "20 || >=22"
-  },
-  "prettier": {
-    "experimentalTernaries": true,
-    "semi": false,
-    "printWidth": 70,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "tap": {
-    "node-arg": [
-      "--expose-gc"
-    ],
-    "plugin": [
-      "@tapjs/clock"
-    ]
-  },
-  "exports": {
-    ".": {
-      "import": {
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.js"
-      },
-      "require": {
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.js"
-      }
-    },
-    "./min": {
-      "import": {
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.min.js"
-      },
-      "require": {
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.min.js"
-      }
-    }
-  },
-  "type": "module",
-  "module": "./dist/esm/index.js"
-}
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/lru-cache/LICENSE b/node_modules/node-gyp/node_modules/lru-cache/LICENSE
similarity index 100%
rename from node_modules/@npmcli/map-workspaces/node_modules/lru-cache/LICENSE
rename to node_modules/node-gyp/node_modules/lru-cache/LICENSE
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/commonjs/index.js b/node_modules/node-gyp/node_modules/lru-cache/dist/commonjs/index.js
similarity index 94%
rename from node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/commonjs/index.js
rename to node_modules/node-gyp/node_modules/lru-cache/dist/commonjs/index.js
index 921b8f10f71b1..0589231885c68 100644
--- a/node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/commonjs/index.js
+++ b/node_modules/node-gyp/node_modules/lru-cache/dist/commonjs/index.js
@@ -4,20 +4,18 @@
  */
 Object.defineProperty(exports, "__esModule", { value: true });
 exports.LRUCache = void 0;
-const defaultPerf = (typeof performance === 'object' &&
+const perf = typeof performance === 'object' &&
     performance &&
-    typeof performance.now === 'function') ?
-    performance
+    typeof performance.now === 'function'
+    ? performance
     : Date;
 const warned = new Set();
 /* c8 ignore start */
-const PROCESS = (typeof process === 'object' && !!process ?
-    process
-    : {});
+const PROCESS = (typeof process === 'object' && !!process ? process : {});
 /* c8 ignore start */
 const emitWarning = (msg, type, code, fn) => {
-    typeof PROCESS.emitWarning === 'function' ?
-        PROCESS.emitWarning(msg, type, code, fn)
+    typeof PROCESS.emitWarning === 'function'
+        ? PROCESS.emitWarning(msg, type, code, fn)
         : console.error(`[${code}] ${type}: ${msg}`);
 };
 let AC = globalThis.AbortController;
@@ -81,11 +79,16 @@ const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
 // zeroes at init time is brutal when you get that big.
 // But why not be complete?
 // Maybe in the future, these limits will have expanded.
-const getUintArray = (max) => !isPosInt(max) ? null
-    : max <= Math.pow(2, 8) ? Uint8Array
-        : max <= Math.pow(2, 16) ? Uint16Array
-            : max <= Math.pow(2, 32) ? Uint32Array
-                : max <= Number.MAX_SAFE_INTEGER ? ZeroArray
+const getUintArray = (max) => !isPosInt(max)
+    ? null
+    : max <= Math.pow(2, 8)
+        ? Uint8Array
+        : max <= Math.pow(2, 16)
+            ? Uint16Array
+            : max <= Math.pow(2, 32)
+                ? Uint32Array
+                : max <= Number.MAX_SAFE_INTEGER
+                    ? ZeroArray
                     : null;
 /* c8 ignore stop */
 class ZeroArray extends Array {
@@ -144,17 +147,9 @@ class LRUCache {
     #max;
     #maxSize;
     #dispose;
-    #onInsert;
     #disposeAfter;
     #fetchMethod;
     #memoMethod;
-    #perf;
-    /**
-     * {@link LRUCache.OptionsBase.perf}
-     */
-    get perf() {
-        return this.#perf;
-    }
     /**
      * {@link LRUCache.OptionsBase.ttl}
      */
@@ -233,7 +228,6 @@ class LRUCache {
     #hasDispose;
     #hasFetchMethod;
     #hasDisposeAfter;
-    #hasOnInsert;
     /**
      * Do not call this method unless you need to inspect the
      * inner workings of the cache.  If anything returned by this
@@ -310,12 +304,6 @@ class LRUCache {
     get dispose() {
         return this.#dispose;
     }
-    /**
-     * {@link LRUCache.OptionsBase.onInsert} (read-only)
-     */
-    get onInsert() {
-        return this.#onInsert;
-    }
     /**
      * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
      */
@@ -323,13 +311,7 @@ class LRUCache {
         return this.#disposeAfter;
     }
     constructor(options) {
-        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, onInsert, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, perf, } = options;
-        if (perf !== undefined) {
-            if (typeof perf?.now !== 'function') {
-                throw new TypeError('perf option must have a now() method if specified');
-            }
-        }
-        this.#perf = perf ?? defaultPerf;
+        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options;
         if (max !== 0 && !isPosInt(max)) {
             throw new TypeError('max option must be a nonnegative integer');
         }
@@ -373,9 +355,6 @@ class LRUCache {
         if (typeof dispose === 'function') {
             this.#dispose = dispose;
         }
-        if (typeof onInsert === 'function') {
-            this.#onInsert = onInsert;
-        }
         if (typeof disposeAfter === 'function') {
             this.#disposeAfter = disposeAfter;
             this.#disposed = [];
@@ -385,7 +364,6 @@ class LRUCache {
             this.#disposed = undefined;
         }
         this.#hasDispose = !!this.#dispose;
-        this.#hasOnInsert = !!this.#onInsert;
         this.#hasDisposeAfter = !!this.#disposeAfter;
         this.noDisposeOnSet = !!noDisposeOnSet;
         this.noUpdateTTL = !!noUpdateTTL;
@@ -410,8 +388,8 @@ class LRUCache {
         this.updateAgeOnGet = !!updateAgeOnGet;
         this.updateAgeOnHas = !!updateAgeOnHas;
         this.ttlResolution =
-            isPosInt(ttlResolution) || ttlResolution === 0 ?
-                ttlResolution
+            isPosInt(ttlResolution) || ttlResolution === 0
+                ? ttlResolution
                 : 1;
         this.ttlAutopurge = !!ttlAutopurge;
         this.ttl = ttl || 0;
@@ -447,7 +425,7 @@ class LRUCache {
         const starts = new ZeroArray(this.#max);
         this.#ttls = ttls;
         this.#starts = starts;
-        this.#setItemTTL = (index, ttl, start = this.#perf.now()) => {
+        this.#setItemTTL = (index, ttl, start = perf.now()) => {
             starts[index] = ttl !== 0 ? start : 0;
             ttls[index] = ttl;
             if (ttl !== 0 && this.ttlAutopurge) {
@@ -465,7 +443,7 @@ class LRUCache {
             }
         };
         this.#updateItemAge = index => {
-            starts[index] = ttls[index] !== 0 ? this.#perf.now() : 0;
+            starts[index] = ttls[index] !== 0 ? perf.now() : 0;
         };
         this.#statusTTL = (status, index) => {
             if (ttls[index]) {
@@ -485,7 +463,7 @@ class LRUCache {
         // that costly call repeatedly.
         let cachedNow = 0;
         const getNow = () => {
-            const n = this.#perf.now();
+            const n = perf.now();
             if (this.ttlResolution > 0) {
                 cachedNow = n;
                 const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
@@ -722,7 +700,9 @@ class LRUCache {
     find(fn, getOptions = {}) {
         for (const i of this.#indexes()) {
             const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            const value = this.#isBackgroundFetch(v)
+                ? v.__staleWhileFetching
+                : v;
             if (value === undefined)
                 continue;
             if (fn(value, this.#keyList[i], this)) {
@@ -744,7 +724,9 @@ class LRUCache {
     forEach(fn, thisp = this) {
         for (const i of this.#indexes()) {
             const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            const value = this.#isBackgroundFetch(v)
+                ? v.__staleWhileFetching
+                : v;
             if (value === undefined)
                 continue;
             fn.call(thisp, value, this.#keyList[i], this);
@@ -757,7 +739,9 @@ class LRUCache {
     rforEach(fn, thisp = this) {
         for (const i of this.#rindexes()) {
             const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            const value = this.#isBackgroundFetch(v)
+                ? v.__staleWhileFetching
+                : v;
             if (value === undefined)
                 continue;
             fn.call(thisp, value, this.#keyList[i], this);
@@ -794,18 +778,17 @@ class LRUCache {
         if (i === undefined)
             return undefined;
         const v = this.#valList[i];
-        /* c8 ignore start - this isn't tested for the info function,
-         * but it's the same logic as found in other places. */
-        const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+        const value = this.#isBackgroundFetch(v)
+            ? v.__staleWhileFetching
+            : v;
         if (value === undefined)
             return undefined;
-        /* c8 ignore end */
         const entry = { value };
         if (this.#ttls && this.#starts) {
             const ttl = this.#ttls[i];
             const start = this.#starts[i];
             if (ttl && start) {
-                const remain = ttl - (this.#perf.now() - start);
+                const remain = ttl - (perf.now() - start);
                 entry.ttl = remain;
                 entry.start = Date.now();
             }
@@ -817,7 +800,7 @@ class LRUCache {
     }
     /**
      * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
-     * passed to {@link LRUCache#load}.
+     * passed to {@link LRLUCache#load}.
      *
      * The `start` fields are calculated relative to a portable `Date.now()`
      * timestamp, even if `performance.now()` is available.
@@ -833,7 +816,9 @@ class LRUCache {
         for (const i of this.#indexes({ allowStale: true })) {
             const key = this.#keyList[i];
             const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            const value = this.#isBackgroundFetch(v)
+                ? v.__staleWhileFetching
+                : v;
             if (value === undefined || key === undefined)
                 continue;
             const entry = { value };
@@ -841,7 +826,7 @@ class LRUCache {
                 entry.ttl = this.#ttls[i];
                 // always dump the start relative to a portable timestamp
                 // it's ok for this to be a bit slow, it's a rare operation.
-                const age = this.#perf.now() - this.#starts[i];
+                const age = perf.now() - this.#starts[i];
                 entry.start = Math.floor(Date.now() - age);
             }
             if (this.#sizes) {
@@ -871,7 +856,7 @@ class LRUCache {
                 //
                 // it's ok for this to be a bit slow, it's a rare operation.
                 const age = Date.now() - entry.start;
-                entry.start = this.#perf.now() - age;
+                entry.start = perf.now() - age;
             }
             this.set(key, entry.value, entry);
         }
@@ -928,9 +913,12 @@ class LRUCache {
         let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
         if (index === undefined) {
             // addition
-            index = (this.#size === 0 ? this.#tail
-                : this.#free.length !== 0 ? this.#free.pop()
-                    : this.#size === this.#max ? this.#evict(false)
+            index = (this.#size === 0
+                ? this.#tail
+                : this.#free.length !== 0
+                    ? this.#free.pop()
+                    : this.#size === this.#max
+                        ? this.#evict(false)
                         : this.#size);
             this.#keyList[index] = k;
             this.#valList[index] = v;
@@ -943,9 +931,6 @@ class LRUCache {
             if (status)
                 status.set = 'add';
             noUpdateTTL = false;
-            if (this.#hasOnInsert) {
-                this.#onInsert?.(v, k, 'add');
-            }
         }
         else {
             // update
@@ -977,8 +962,8 @@ class LRUCache {
                 this.#valList[index] = v;
                 if (status) {
                     status.set = 'replace';
-                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal) ?
-                        oldVal.__staleWhileFetching
+                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal)
+                        ? oldVal.__staleWhileFetching
                         : oldVal;
                     if (oldValue !== undefined)
                         status.oldValue = oldValue;
@@ -987,9 +972,6 @@ class LRUCache {
             else if (status) {
                 status.set = 'update';
             }
-            if (this.#hasOnInsert) {
-                this.onInsert?.(v, k, v === oldVal ? 'update' : 'replace');
-            }
         }
         if (ttl !== 0 && !this.#ttls) {
             this.#initializeTTLTracking();
@@ -1172,7 +1154,7 @@ class LRUCache {
             const bf = p;
             if (this.#valList[index] === p) {
                 if (v === undefined) {
-                    if (bf.__staleWhileFetching !== undefined) {
+                    if (bf.__staleWhileFetching) {
                         this.#valList[index] = bf.__staleWhileFetching;
                     }
                     else {
diff --git a/node_modules/node-gyp/node_modules/lru-cache/dist/commonjs/index.min.js b/node_modules/node-gyp/node_modules/lru-cache/dist/commonjs/index.min.js
new file mode 100644
index 0000000000000..ad643b0badc90
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/lru-cache/dist/commonjs/index.min.js
@@ -0,0 +1,2 @@
+"use strict";var G=(l,t,e)=>{if(!t.has(l))throw TypeError("Cannot "+e)};var j=(l,t,e)=>(G(l,t,"read from private field"),e?e.call(l):t.get(l)),I=(l,t,e)=>{if(t.has(l))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(l):t.set(l,e)},x=(l,t,e,i)=>(G(l,t,"write to private field"),i?i.call(l,e):t.set(l,e),e);Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var T=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,P=new Set,U=typeof process=="object"&&process?process:{},H=(l,t,e,i)=>{typeof U.emitWarning=="function"?U.emitWarning(l,t,e,i):console.error(`[${e}] ${t}: ${l}`)},D=globalThis.AbortController,N=globalThis.AbortSignal;if(typeof D>"u"){N=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},D=class{constructor(){t()}signal=new N;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let l=U.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{l&&(l=!1,H("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=l=>!P.has(l),Y=Symbol("type"),A=l=>l&&l===Math.floor(l)&&l>0&&isFinite(l),k=l=>A(l)?l<=Math.pow(2,8)?Uint8Array:l<=Math.pow(2,16)?Uint16Array:l<=Math.pow(2,32)?Uint32Array:l<=Number.MAX_SAFE_INTEGER?E:null:null,E=class extends Array{constructor(t){super(t),this.fill(0)}},v,O=class{heap;length;static create(t){let e=k(t);if(!e)return[];x(O,v,!0);let i=new O(t,e);return x(O,v,!1),i}constructor(t,e){if(!j(O,v))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},W=O;v=new WeakMap,I(W,v,!1);var C=class{#g;#f;#p;#w;#R;#W;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#S;#s;#i;#t;#l;#c;#o;#h;#_;#r;#b;#m;#u;#y;#E;#a;static unsafeExposeInternals(t){return{starts:t.#m,ttls:t.#u,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#_,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#x(e,i,s,n),moveToTail:e=>t.#C(e),indexes:e=>t.#A(e),rindexes:e=>t.#F(e),isStale:e=>t.#d(e)}}get max(){return this.#g}get maxSize(){return this.#f}get calculatedSize(){return this.#S}get size(){return this.#n}get fetchMethod(){return this.#R}get memoMethod(){return this.#W}get dispose(){return this.#p}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,memoMethod:a,noDeleteOnFetchRejection:w,noDeleteOnStaleGet:m,allowStaleOnFetchRejection:p,allowStaleOnFetchAbort:_,ignoreFetchAbort:z}=t;if(e!==0&&!A(e))throw new TypeError("max option must be a nonnegative integer");let y=e?k(e):Array;if(!y)throw new Error("invalid max value: "+e);if(this.#g=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(a!==void 0&&typeof a!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#W=a,S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#R=S,this.#E=!!S,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new y(e),this.#c=new y(e),this.#o=0,this.#h=0,this.#_=W.create(e),this.#n=0,this.#S=0,typeof g=="function"&&(this.#p=g),typeof b=="function"?(this.#w=b,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#y=!!this.#p,this.#a=!!this.#w,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!w,this.allowStaleOnFetchRejection=!!p,this.allowStaleOnFetchAbort=!!_,this.ignoreFetchAbort=!!z,this.maxEntrySize!==0){if(this.#f!==0&&!A(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!A(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#P()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!m,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=A(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!A(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#U()}if(this.#g===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#g&&!this.#f){let R="LRU_CACHE_UNBOUNDED";V(R)&&(P.add(R),H("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",R,C))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#U(){let t=new E(this.#g),e=new E(this.#g);this.#u=t,this.#m=e,this.#M=(n,h,o=T.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#d(n)&&this.#T(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#v=n=>{e[n]=t[n]!==0?T.now():0},this.#O=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=T.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#d=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#v=()=>{};#O=()=>{};#M=()=>{};#d=()=>!1;#P(){let t=new E(this.#g);this.#S=0,this.#b=t,this.#z=e=>{this.#S-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!A(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!A(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#D=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#S>n;)this.#L(!0)}this.#S+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#S)}}#z=t=>{};#D=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#A({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#j(e)||((t||!this.#d(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#j(e)||((t||!this.#d(e))&&(yield e),e===this.#h));)e=this.#l[e]}#j(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#F({allowStale:!0}))this.#d(e)&&(this.#T(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#u&&this.#m){let h=this.#u[e],o=this.#m[e];if(h&&o){let r=h-(T.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#A({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#u&&this.#m){h.ttl=this.#u[e];let o=T.now()-this.#m[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=T.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,o);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#T(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#_.length!==0?this.#_.pop():this.#n===this.#g?this.#L(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#n++,this.#D(f,b,r),r&&(r.set="add"),g=!1;else{this.#C(f);let u=this.#t[f];if(e!==u){if(this.#E&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#y&&this.#p?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#y&&this.#p?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#z(f),this.#D(f,b,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#u&&this.#U(),this.#u&&(g||this.#M(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#w?.(...c)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#L(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#L(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#E&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#y||this.#a)&&(this.#y&&this.#p?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#z(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#_.push(e)),this.#n===1?(this.#o=this.#h=0,this.#_.length=0):this.#o=this.#l[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#d(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#v(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#d(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#x(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new D,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:a}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(a&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),a&&!w&&!S)return f(h.signal.reason);let m=c;return this.#t[e]===c&&(d===void 0?m.__staleWhileFetching?this.#t[e]=m.__staleWhileFetching:this.#T(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},b=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,a=S&&i.allowStaleOnFetchAbort,w=a||i.allowStaleOnFetchRejection,m=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!m||p.__staleWhileFetching===void 0?this.#T(t,"fetch"):a||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let a=this.#R?.(t,n,r);a&&a instanceof Promise&&a.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,b),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=F,F}#e(t){if(!this.#E)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof D}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:a,signal:w}=e;if(!this.#E)return a&&(a.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:a});let m={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:a,signal:w},p=this.#s.get(t);if(p===void 0){a&&(a.fetch="miss");let _=this.#x(t,p,m,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let M=i&&_.__staleWhileFetching!==void 0;return a&&(a.fetch="inflight",M&&(a.returnedStale=!0)),M?_.__staleWhileFetching:_.__returned=_}let z=this.#d(p);if(!S&&!z)return a&&(a.fetch="hit"),this.#C(p),s&&this.#v(p),a&&this.#O(a,p),_;let y=this.#x(t,p,m,d),L=y.__staleWhileFetching!==void 0&&i;return a&&(a.fetch=z?"stale":"refresh",L&&z&&(a.returnedStale=!0)),L?y.__staleWhileFetching:y.__returned=y}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#W;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#O(h,o),this.#d(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#T(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#C(o),s&&this.#v(o),r))}else h&&(h.get="miss")}#I(t,e){this.#c[e]=t,this.#l[t]=e}#C(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#I(this.#c[t],this.#l[t]),this.#I(this.#h,t),this.#h=t)}delete(t){return this.#T(t,"delete")}#T(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#N(e);else{this.#z(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#y||this.#a)&&(this.#y&&this.#p?.(n,t,e),this.#a&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#c[s];else if(s===this.#o)this.#o=this.#l[s];else{let h=this.#c[s];this.#l[h]=this.#l[s];let o=this.#l[s];this.#c[o]=this.#c[s]}this.#n--,this.#_.push(s)}}if(this.#a&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#N("delete")}#N(t){for(let e of this.#F({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#y&&this.#p?.(i,s,t),this.#a&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#u&&this.#m&&(this.#u.fill(0),this.#m.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#_.length=0,this.#S=0,this.#n=0,this.#a&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};exports.LRUCache=C;
+//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/commonjs/package.json b/node_modules/node-gyp/node_modules/lru-cache/dist/commonjs/package.json
similarity index 100%
rename from node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/commonjs/package.json
rename to node_modules/node-gyp/node_modules/lru-cache/dist/commonjs/package.json
diff --git a/node_modules/cacache/node_modules/lru-cache/dist/esm/index.js b/node_modules/node-gyp/node_modules/lru-cache/dist/esm/index.js
similarity index 94%
rename from node_modules/cacache/node_modules/lru-cache/dist/esm/index.js
rename to node_modules/node-gyp/node_modules/lru-cache/dist/esm/index.js
index 8fd8fc5f31507..555654a57c4d7 100644
--- a/node_modules/cacache/node_modules/lru-cache/dist/esm/index.js
+++ b/node_modules/node-gyp/node_modules/lru-cache/dist/esm/index.js
@@ -1,20 +1,18 @@
 /**
  * @module LRUCache
  */
-const defaultPerf = (typeof performance === 'object' &&
+const perf = typeof performance === 'object' &&
     performance &&
-    typeof performance.now === 'function') ?
-    performance
+    typeof performance.now === 'function'
+    ? performance
     : Date;
 const warned = new Set();
 /* c8 ignore start */
-const PROCESS = (typeof process === 'object' && !!process ?
-    process
-    : {});
+const PROCESS = (typeof process === 'object' && !!process ? process : {});
 /* c8 ignore start */
 const emitWarning = (msg, type, code, fn) => {
-    typeof PROCESS.emitWarning === 'function' ?
-        PROCESS.emitWarning(msg, type, code, fn)
+    typeof PROCESS.emitWarning === 'function'
+        ? PROCESS.emitWarning(msg, type, code, fn)
         : console.error(`[${code}] ${type}: ${msg}`);
 };
 let AC = globalThis.AbortController;
@@ -78,11 +76,16 @@ const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
 // zeroes at init time is brutal when you get that big.
 // But why not be complete?
 // Maybe in the future, these limits will have expanded.
-const getUintArray = (max) => !isPosInt(max) ? null
-    : max <= Math.pow(2, 8) ? Uint8Array
-        : max <= Math.pow(2, 16) ? Uint16Array
-            : max <= Math.pow(2, 32) ? Uint32Array
-                : max <= Number.MAX_SAFE_INTEGER ? ZeroArray
+const getUintArray = (max) => !isPosInt(max)
+    ? null
+    : max <= Math.pow(2, 8)
+        ? Uint8Array
+        : max <= Math.pow(2, 16)
+            ? Uint16Array
+            : max <= Math.pow(2, 32)
+                ? Uint32Array
+                : max <= Number.MAX_SAFE_INTEGER
+                    ? ZeroArray
                     : null;
 /* c8 ignore stop */
 class ZeroArray extends Array {
@@ -141,17 +144,9 @@ export class LRUCache {
     #max;
     #maxSize;
     #dispose;
-    #onInsert;
     #disposeAfter;
     #fetchMethod;
     #memoMethod;
-    #perf;
-    /**
-     * {@link LRUCache.OptionsBase.perf}
-     */
-    get perf() {
-        return this.#perf;
-    }
     /**
      * {@link LRUCache.OptionsBase.ttl}
      */
@@ -230,7 +225,6 @@ export class LRUCache {
     #hasDispose;
     #hasFetchMethod;
     #hasDisposeAfter;
-    #hasOnInsert;
     /**
      * Do not call this method unless you need to inspect the
      * inner workings of the cache.  If anything returned by this
@@ -307,12 +301,6 @@ export class LRUCache {
     get dispose() {
         return this.#dispose;
     }
-    /**
-     * {@link LRUCache.OptionsBase.onInsert} (read-only)
-     */
-    get onInsert() {
-        return this.#onInsert;
-    }
     /**
      * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
      */
@@ -320,13 +308,7 @@ export class LRUCache {
         return this.#disposeAfter;
     }
     constructor(options) {
-        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, onInsert, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, perf, } = options;
-        if (perf !== undefined) {
-            if (typeof perf?.now !== 'function') {
-                throw new TypeError('perf option must have a now() method if specified');
-            }
-        }
-        this.#perf = perf ?? defaultPerf;
+        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options;
         if (max !== 0 && !isPosInt(max)) {
             throw new TypeError('max option must be a nonnegative integer');
         }
@@ -370,9 +352,6 @@ export class LRUCache {
         if (typeof dispose === 'function') {
             this.#dispose = dispose;
         }
-        if (typeof onInsert === 'function') {
-            this.#onInsert = onInsert;
-        }
         if (typeof disposeAfter === 'function') {
             this.#disposeAfter = disposeAfter;
             this.#disposed = [];
@@ -382,7 +361,6 @@ export class LRUCache {
             this.#disposed = undefined;
         }
         this.#hasDispose = !!this.#dispose;
-        this.#hasOnInsert = !!this.#onInsert;
         this.#hasDisposeAfter = !!this.#disposeAfter;
         this.noDisposeOnSet = !!noDisposeOnSet;
         this.noUpdateTTL = !!noUpdateTTL;
@@ -407,8 +385,8 @@ export class LRUCache {
         this.updateAgeOnGet = !!updateAgeOnGet;
         this.updateAgeOnHas = !!updateAgeOnHas;
         this.ttlResolution =
-            isPosInt(ttlResolution) || ttlResolution === 0 ?
-                ttlResolution
+            isPosInt(ttlResolution) || ttlResolution === 0
+                ? ttlResolution
                 : 1;
         this.ttlAutopurge = !!ttlAutopurge;
         this.ttl = ttl || 0;
@@ -444,7 +422,7 @@ export class LRUCache {
         const starts = new ZeroArray(this.#max);
         this.#ttls = ttls;
         this.#starts = starts;
-        this.#setItemTTL = (index, ttl, start = this.#perf.now()) => {
+        this.#setItemTTL = (index, ttl, start = perf.now()) => {
             starts[index] = ttl !== 0 ? start : 0;
             ttls[index] = ttl;
             if (ttl !== 0 && this.ttlAutopurge) {
@@ -462,7 +440,7 @@ export class LRUCache {
             }
         };
         this.#updateItemAge = index => {
-            starts[index] = ttls[index] !== 0 ? this.#perf.now() : 0;
+            starts[index] = ttls[index] !== 0 ? perf.now() : 0;
         };
         this.#statusTTL = (status, index) => {
             if (ttls[index]) {
@@ -482,7 +460,7 @@ export class LRUCache {
         // that costly call repeatedly.
         let cachedNow = 0;
         const getNow = () => {
-            const n = this.#perf.now();
+            const n = perf.now();
             if (this.ttlResolution > 0) {
                 cachedNow = n;
                 const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
@@ -719,7 +697,9 @@ export class LRUCache {
     find(fn, getOptions = {}) {
         for (const i of this.#indexes()) {
             const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            const value = this.#isBackgroundFetch(v)
+                ? v.__staleWhileFetching
+                : v;
             if (value === undefined)
                 continue;
             if (fn(value, this.#keyList[i], this)) {
@@ -741,7 +721,9 @@ export class LRUCache {
     forEach(fn, thisp = this) {
         for (const i of this.#indexes()) {
             const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            const value = this.#isBackgroundFetch(v)
+                ? v.__staleWhileFetching
+                : v;
             if (value === undefined)
                 continue;
             fn.call(thisp, value, this.#keyList[i], this);
@@ -754,7 +736,9 @@ export class LRUCache {
     rforEach(fn, thisp = this) {
         for (const i of this.#rindexes()) {
             const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            const value = this.#isBackgroundFetch(v)
+                ? v.__staleWhileFetching
+                : v;
             if (value === undefined)
                 continue;
             fn.call(thisp, value, this.#keyList[i], this);
@@ -791,18 +775,17 @@ export class LRUCache {
         if (i === undefined)
             return undefined;
         const v = this.#valList[i];
-        /* c8 ignore start - this isn't tested for the info function,
-         * but it's the same logic as found in other places. */
-        const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+        const value = this.#isBackgroundFetch(v)
+            ? v.__staleWhileFetching
+            : v;
         if (value === undefined)
             return undefined;
-        /* c8 ignore end */
         const entry = { value };
         if (this.#ttls && this.#starts) {
             const ttl = this.#ttls[i];
             const start = this.#starts[i];
             if (ttl && start) {
-                const remain = ttl - (this.#perf.now() - start);
+                const remain = ttl - (perf.now() - start);
                 entry.ttl = remain;
                 entry.start = Date.now();
             }
@@ -814,7 +797,7 @@ export class LRUCache {
     }
     /**
      * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
-     * passed to {@link LRUCache#load}.
+     * passed to {@link LRLUCache#load}.
      *
      * The `start` fields are calculated relative to a portable `Date.now()`
      * timestamp, even if `performance.now()` is available.
@@ -830,7 +813,9 @@ export class LRUCache {
         for (const i of this.#indexes({ allowStale: true })) {
             const key = this.#keyList[i];
             const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            const value = this.#isBackgroundFetch(v)
+                ? v.__staleWhileFetching
+                : v;
             if (value === undefined || key === undefined)
                 continue;
             const entry = { value };
@@ -838,7 +823,7 @@ export class LRUCache {
                 entry.ttl = this.#ttls[i];
                 // always dump the start relative to a portable timestamp
                 // it's ok for this to be a bit slow, it's a rare operation.
-                const age = this.#perf.now() - this.#starts[i];
+                const age = perf.now() - this.#starts[i];
                 entry.start = Math.floor(Date.now() - age);
             }
             if (this.#sizes) {
@@ -868,7 +853,7 @@ export class LRUCache {
                 //
                 // it's ok for this to be a bit slow, it's a rare operation.
                 const age = Date.now() - entry.start;
-                entry.start = this.#perf.now() - age;
+                entry.start = perf.now() - age;
             }
             this.set(key, entry.value, entry);
         }
@@ -925,9 +910,12 @@ export class LRUCache {
         let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
         if (index === undefined) {
             // addition
-            index = (this.#size === 0 ? this.#tail
-                : this.#free.length !== 0 ? this.#free.pop()
-                    : this.#size === this.#max ? this.#evict(false)
+            index = (this.#size === 0
+                ? this.#tail
+                : this.#free.length !== 0
+                    ? this.#free.pop()
+                    : this.#size === this.#max
+                        ? this.#evict(false)
                         : this.#size);
             this.#keyList[index] = k;
             this.#valList[index] = v;
@@ -940,9 +928,6 @@ export class LRUCache {
             if (status)
                 status.set = 'add';
             noUpdateTTL = false;
-            if (this.#hasOnInsert) {
-                this.#onInsert?.(v, k, 'add');
-            }
         }
         else {
             // update
@@ -974,8 +959,8 @@ export class LRUCache {
                 this.#valList[index] = v;
                 if (status) {
                     status.set = 'replace';
-                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal) ?
-                        oldVal.__staleWhileFetching
+                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal)
+                        ? oldVal.__staleWhileFetching
                         : oldVal;
                     if (oldValue !== undefined)
                         status.oldValue = oldValue;
@@ -984,9 +969,6 @@ export class LRUCache {
             else if (status) {
                 status.set = 'update';
             }
-            if (this.#hasOnInsert) {
-                this.onInsert?.(v, k, v === oldVal ? 'update' : 'replace');
-            }
         }
         if (ttl !== 0 && !this.#ttls) {
             this.#initializeTTLTracking();
@@ -1169,7 +1151,7 @@ export class LRUCache {
             const bf = p;
             if (this.#valList[index] === p) {
                 if (v === undefined) {
-                    if (bf.__staleWhileFetching !== undefined) {
+                    if (bf.__staleWhileFetching) {
                         this.#valList[index] = bf.__staleWhileFetching;
                     }
                     else {
diff --git a/node_modules/node-gyp/node_modules/lru-cache/dist/esm/index.min.js b/node_modules/node-gyp/node_modules/lru-cache/dist/esm/index.min.js
new file mode 100644
index 0000000000000..4571d0254e27d
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/lru-cache/dist/esm/index.min.js
@@ -0,0 +1,2 @@
+var G=(l,t,e)=>{if(!t.has(l))throw TypeError("Cannot "+e)};var I=(l,t,e)=>(G(l,t,"read from private field"),e?e.call(l):t.get(l)),j=(l,t,e)=>{if(t.has(l))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(l):t.set(l,e)},x=(l,t,e,i)=>(G(l,t,"write to private field"),i?i.call(l,e):t.set(l,e),e);var T=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,P=new Set,M=typeof process=="object"&&process?process:{},H=(l,t,e,i)=>{typeof M.emitWarning=="function"?M.emitWarning(l,t,e,i):console.error(`[${e}] ${t}: ${l}`)},W=globalThis.AbortController,N=globalThis.AbortSignal;if(typeof W>"u"){N=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new N;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let l=M.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{l&&(l=!1,H("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=l=>!P.has(l),Y=Symbol("type"),A=l=>l&&l===Math.floor(l)&&l>0&&isFinite(l),k=l=>A(l)?l<=Math.pow(2,8)?Uint8Array:l<=Math.pow(2,16)?Uint16Array:l<=Math.pow(2,32)?Uint32Array:l<=Number.MAX_SAFE_INTEGER?O:null:null,O=class extends Array{constructor(t){super(t),this.fill(0)}},z,E=class{heap;length;static create(t){let e=k(t);if(!e)return[];x(E,z,!0);let i=new E(t,e);return x(E,z,!1),i}constructor(t,e){if(!I(E,z))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},R=E;z=new WeakMap,j(R,z,!1);var D=class{#g;#f;#p;#w;#R;#W;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#S;#s;#i;#t;#l;#c;#o;#h;#_;#r;#m;#b;#u;#y;#O;#a;static unsafeExposeInternals(t){return{starts:t.#b,ttls:t.#u,sizes:t.#m,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#_,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#x(e,i,s,n),moveToTail:e=>t.#C(e),indexes:e=>t.#A(e),rindexes:e=>t.#F(e),isStale:e=>t.#d(e)}}get max(){return this.#g}get maxSize(){return this.#f}get calculatedSize(){return this.#S}get size(){return this.#n}get fetchMethod(){return this.#R}get memoMethod(){return this.#W}get dispose(){return this.#p}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,disposeAfter:m,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,memoMethod:a,noDeleteOnFetchRejection:w,noDeleteOnStaleGet:b,allowStaleOnFetchRejection:p,allowStaleOnFetchAbort:_,ignoreFetchAbort:v}=t;if(e!==0&&!A(e))throw new TypeError("max option must be a nonnegative integer");let y=e?k(e):Array;if(!y)throw new Error("invalid max value: "+e);if(this.#g=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(a!==void 0&&typeof a!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#W=a,S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#R=S,this.#O=!!S,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new y(e),this.#c=new y(e),this.#o=0,this.#h=0,this.#_=R.create(e),this.#n=0,this.#S=0,typeof g=="function"&&(this.#p=g),typeof m=="function"?(this.#w=m,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#y=!!this.#p,this.#a=!!this.#w,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!w,this.allowStaleOnFetchRejection=!!p,this.allowStaleOnFetchAbort=!!_,this.ignoreFetchAbort=!!v,this.maxEntrySize!==0){if(this.#f!==0&&!A(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!A(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#P()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!b,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=A(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!A(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#M()}if(this.#g===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#g&&!this.#f){let C="LRU_CACHE_UNBOUNDED";V(C)&&(P.add(C),H("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",C,D))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#M(){let t=new O(this.#g),e=new O(this.#g);this.#u=t,this.#b=e,this.#U=(n,h,o=T.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#d(n)&&this.#T(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?T.now():0},this.#E=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=T.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#d=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#z=()=>{};#E=()=>{};#U=()=>{};#d=()=>!1;#P(){let t=new O(this.#g);this.#S=0,this.#m=t,this.#v=e=>{this.#S-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!A(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!A(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#D=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#S>n;)this.#L(!0)}this.#S+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#S)}}#v=t=>{};#D=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#A({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#I(e)||((t||!this.#d(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#I(e)||((t||!this.#d(e))&&(yield e),e===this.#h));)e=this.#l[e]}#I(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#F({allowStale:!0}))this.#d(e)&&(this.#T(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#u&&this.#b){let h=this.#u[e],o=this.#b[e];if(h&&o){let r=h-(T.now()-o);n.ttl=r,n.start=Date.now()}}return this.#m&&(n.size=this.#m[e]),n}dump(){let t=[];for(let e of this.#A({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#u&&this.#b){h.ttl=this.#u[e];let o=T.now()-this.#b[e];h.start=Math.floor(Date.now()-o)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=T.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,m=this.#G(t,e,i.size||0,o);if(this.maxEntrySize&&m>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#T(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#_.length!==0?this.#_.pop():this.#n===this.#g?this.#L(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#n++,this.#D(f,m,r),r&&(r.set="add"),g=!1;else{this.#C(f);let u=this.#t[f];if(e!==u){if(this.#O&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#y&&this.#p?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#y&&this.#p?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#v(f),this.#D(f,m,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#u&&this.#M(),this.#u&&(g||this.#U(f,s,n),r&&this.#E(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#w?.(...c)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#L(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#L(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#O&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#y||this.#a)&&(this.#y&&this.#p?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#v(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#_.push(e)),this.#n===1?(this.#o=this.#h=0,this.#_.length=0):this.#o=this.#l[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#d(n))s&&(s.has="stale",this.#E(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#E(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#d(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#x(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:a}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(a&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),a&&!w&&!S)return f(h.signal.reason);let b=c;return this.#t[e]===c&&(d===void 0?b.__staleWhileFetching?this.#t[e]=b.__staleWhileFetching:this.#T(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},m=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,a=S&&i.allowStaleOnFetchAbort,w=a||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!b||p.__staleWhileFetching===void 0?this.#T(t,"fetch"):a||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let a=this.#R?.(t,n,r);a&&a instanceof Promise&&a.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,m),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=F,F}#e(t){if(!this.#O)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:m=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:a,signal:w}=e;if(!this.#O)return a&&(a.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:a});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:m,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:a,signal:w},p=this.#s.get(t);if(p===void 0){a&&(a.fetch="miss");let _=this.#x(t,p,b,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let U=i&&_.__staleWhileFetching!==void 0;return a&&(a.fetch="inflight",U&&(a.returnedStale=!0)),U?_.__staleWhileFetching:_.__returned=_}let v=this.#d(p);if(!S&&!v)return a&&(a.fetch="hit"),this.#C(p),s&&this.#z(p),a&&this.#E(a,p),_;let y=this.#x(t,p,b,d),L=y.__staleWhileFetching!==void 0&&i;return a&&(a.fetch=v?"stale":"refresh",L&&v&&(a.returnedStale=!0)),L?y.__staleWhileFetching:y.__returned=y}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#W;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#E(h,o),this.#d(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#T(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#C(o),s&&this.#z(o),r))}else h&&(h.get="miss")}#j(t,e){this.#c[e]=t,this.#l[t]=e}#C(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#j(this.#c[t],this.#l[t]),this.#j(this.#h,t),this.#h=t)}delete(t){return this.#T(t,"delete")}#T(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#N(e);else{this.#v(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#y||this.#a)&&(this.#y&&this.#p?.(n,t,e),this.#a&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#c[s];else if(s===this.#o)this.#o=this.#l[s];else{let h=this.#c[s];this.#l[h]=this.#l[s];let o=this.#l[s];this.#c[o]=this.#c[s]}this.#n--,this.#_.push(s)}}if(this.#a&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#N("delete")}#N(t){for(let e of this.#F({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#y&&this.#p?.(i,s,t),this.#a&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#u&&this.#b&&(this.#u.fill(0),this.#b.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#_.length=0,this.#S=0,this.#n=0,this.#a&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};export{D as LRUCache};
+//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/esm/package.json b/node_modules/node-gyp/node_modules/lru-cache/dist/esm/package.json
similarity index 100%
rename from node_modules/@npmcli/map-workspaces/node_modules/lru-cache/dist/esm/package.json
rename to node_modules/node-gyp/node_modules/lru-cache/dist/esm/package.json
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/lru-cache/package.json b/node_modules/node-gyp/node_modules/lru-cache/package.json
similarity index 87%
rename from node_modules/@npmcli/map-workspaces/node_modules/lru-cache/package.json
rename to node_modules/node-gyp/node_modules/lru-cache/package.json
index 4953bdf4a7a35..f3cd4c0cc53f7 100644
--- a/node_modules/@npmcli/map-workspaces/node_modules/lru-cache/package.json
+++ b/node_modules/node-gyp/node_modules/lru-cache/package.json
@@ -1,7 +1,10 @@
 {
   "name": "lru-cache",
+  "publishConfig": {
+    "tag": "legacy-v10"
+  },
   "description": "A cache object that deletes the least-recently-used items.",
-  "version": "11.2.1",
+  "version": "10.4.3",
   "author": "Isaac Z. Schlueter ",
   "keywords": [
     "mru",
@@ -49,25 +52,25 @@
     "url": "git://github.com/isaacs/node-lru-cache.git"
   },
   "devDependencies": {
-    "@types/node": "^24.3.0",
+    "@types/node": "^20.2.5",
+    "@types/tap": "^15.0.6",
     "benchmark": "^2.1.4",
-    "esbuild": "^0.25.9",
+    "esbuild": "^0.17.11",
+    "eslint-config-prettier": "^8.5.0",
     "marked": "^4.2.12",
-    "mkdirp": "^3.0.1",
-    "prettier": "^3.6.2",
-    "tap": "^21.1.0",
-    "tshy": "^3.0.2",
-    "typedoc": "^0.28.12"
+    "mkdirp": "^2.1.5",
+    "prettier": "^2.6.2",
+    "tap": "^20.0.3",
+    "tshy": "^2.0.0",
+    "tslib": "^2.4.0",
+    "typedoc": "^0.25.3",
+    "typescript": "^5.2.2"
   },
   "license": "ISC",
   "files": [
     "dist"
   ],
-  "engines": {
-    "node": "20 || >=22"
-  },
   "prettier": {
-    "experimentalTernaries": true,
     "semi": false,
     "printWidth": 70,
     "tabWidth": 2,
diff --git a/node_modules/@npmcli/package-json/node_modules/lru-cache/LICENSE b/node_modules/path-scurry/node_modules/lru-cache/LICENSE
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/lru-cache/LICENSE
rename to node_modules/path-scurry/node_modules/lru-cache/LICENSE
diff --git a/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/commonjs/index.js b/node_modules/path-scurry/node_modules/lru-cache/dist/commonjs/index.js
similarity index 94%
rename from node_modules/@npmcli/package-json/node_modules/lru-cache/dist/commonjs/index.js
rename to node_modules/path-scurry/node_modules/lru-cache/dist/commonjs/index.js
index 921b8f10f71b1..0589231885c68 100644
--- a/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/commonjs/index.js
+++ b/node_modules/path-scurry/node_modules/lru-cache/dist/commonjs/index.js
@@ -4,20 +4,18 @@
  */
 Object.defineProperty(exports, "__esModule", { value: true });
 exports.LRUCache = void 0;
-const defaultPerf = (typeof performance === 'object' &&
+const perf = typeof performance === 'object' &&
     performance &&
-    typeof performance.now === 'function') ?
-    performance
+    typeof performance.now === 'function'
+    ? performance
     : Date;
 const warned = new Set();
 /* c8 ignore start */
-const PROCESS = (typeof process === 'object' && !!process ?
-    process
-    : {});
+const PROCESS = (typeof process === 'object' && !!process ? process : {});
 /* c8 ignore start */
 const emitWarning = (msg, type, code, fn) => {
-    typeof PROCESS.emitWarning === 'function' ?
-        PROCESS.emitWarning(msg, type, code, fn)
+    typeof PROCESS.emitWarning === 'function'
+        ? PROCESS.emitWarning(msg, type, code, fn)
         : console.error(`[${code}] ${type}: ${msg}`);
 };
 let AC = globalThis.AbortController;
@@ -81,11 +79,16 @@ const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
 // zeroes at init time is brutal when you get that big.
 // But why not be complete?
 // Maybe in the future, these limits will have expanded.
-const getUintArray = (max) => !isPosInt(max) ? null
-    : max <= Math.pow(2, 8) ? Uint8Array
-        : max <= Math.pow(2, 16) ? Uint16Array
-            : max <= Math.pow(2, 32) ? Uint32Array
-                : max <= Number.MAX_SAFE_INTEGER ? ZeroArray
+const getUintArray = (max) => !isPosInt(max)
+    ? null
+    : max <= Math.pow(2, 8)
+        ? Uint8Array
+        : max <= Math.pow(2, 16)
+            ? Uint16Array
+            : max <= Math.pow(2, 32)
+                ? Uint32Array
+                : max <= Number.MAX_SAFE_INTEGER
+                    ? ZeroArray
                     : null;
 /* c8 ignore stop */
 class ZeroArray extends Array {
@@ -144,17 +147,9 @@ class LRUCache {
     #max;
     #maxSize;
     #dispose;
-    #onInsert;
     #disposeAfter;
     #fetchMethod;
     #memoMethod;
-    #perf;
-    /**
-     * {@link LRUCache.OptionsBase.perf}
-     */
-    get perf() {
-        return this.#perf;
-    }
     /**
      * {@link LRUCache.OptionsBase.ttl}
      */
@@ -233,7 +228,6 @@ class LRUCache {
     #hasDispose;
     #hasFetchMethod;
     #hasDisposeAfter;
-    #hasOnInsert;
     /**
      * Do not call this method unless you need to inspect the
      * inner workings of the cache.  If anything returned by this
@@ -310,12 +304,6 @@ class LRUCache {
     get dispose() {
         return this.#dispose;
     }
-    /**
-     * {@link LRUCache.OptionsBase.onInsert} (read-only)
-     */
-    get onInsert() {
-        return this.#onInsert;
-    }
     /**
      * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
      */
@@ -323,13 +311,7 @@ class LRUCache {
         return this.#disposeAfter;
     }
     constructor(options) {
-        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, onInsert, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, perf, } = options;
-        if (perf !== undefined) {
-            if (typeof perf?.now !== 'function') {
-                throw new TypeError('perf option must have a now() method if specified');
-            }
-        }
-        this.#perf = perf ?? defaultPerf;
+        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options;
         if (max !== 0 && !isPosInt(max)) {
             throw new TypeError('max option must be a nonnegative integer');
         }
@@ -373,9 +355,6 @@ class LRUCache {
         if (typeof dispose === 'function') {
             this.#dispose = dispose;
         }
-        if (typeof onInsert === 'function') {
-            this.#onInsert = onInsert;
-        }
         if (typeof disposeAfter === 'function') {
             this.#disposeAfter = disposeAfter;
             this.#disposed = [];
@@ -385,7 +364,6 @@ class LRUCache {
             this.#disposed = undefined;
         }
         this.#hasDispose = !!this.#dispose;
-        this.#hasOnInsert = !!this.#onInsert;
         this.#hasDisposeAfter = !!this.#disposeAfter;
         this.noDisposeOnSet = !!noDisposeOnSet;
         this.noUpdateTTL = !!noUpdateTTL;
@@ -410,8 +388,8 @@ class LRUCache {
         this.updateAgeOnGet = !!updateAgeOnGet;
         this.updateAgeOnHas = !!updateAgeOnHas;
         this.ttlResolution =
-            isPosInt(ttlResolution) || ttlResolution === 0 ?
-                ttlResolution
+            isPosInt(ttlResolution) || ttlResolution === 0
+                ? ttlResolution
                 : 1;
         this.ttlAutopurge = !!ttlAutopurge;
         this.ttl = ttl || 0;
@@ -447,7 +425,7 @@ class LRUCache {
         const starts = new ZeroArray(this.#max);
         this.#ttls = ttls;
         this.#starts = starts;
-        this.#setItemTTL = (index, ttl, start = this.#perf.now()) => {
+        this.#setItemTTL = (index, ttl, start = perf.now()) => {
             starts[index] = ttl !== 0 ? start : 0;
             ttls[index] = ttl;
             if (ttl !== 0 && this.ttlAutopurge) {
@@ -465,7 +443,7 @@ class LRUCache {
             }
         };
         this.#updateItemAge = index => {
-            starts[index] = ttls[index] !== 0 ? this.#perf.now() : 0;
+            starts[index] = ttls[index] !== 0 ? perf.now() : 0;
         };
         this.#statusTTL = (status, index) => {
             if (ttls[index]) {
@@ -485,7 +463,7 @@ class LRUCache {
         // that costly call repeatedly.
         let cachedNow = 0;
         const getNow = () => {
-            const n = this.#perf.now();
+            const n = perf.now();
             if (this.ttlResolution > 0) {
                 cachedNow = n;
                 const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
@@ -722,7 +700,9 @@ class LRUCache {
     find(fn, getOptions = {}) {
         for (const i of this.#indexes()) {
             const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            const value = this.#isBackgroundFetch(v)
+                ? v.__staleWhileFetching
+                : v;
             if (value === undefined)
                 continue;
             if (fn(value, this.#keyList[i], this)) {
@@ -744,7 +724,9 @@ class LRUCache {
     forEach(fn, thisp = this) {
         for (const i of this.#indexes()) {
             const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            const value = this.#isBackgroundFetch(v)
+                ? v.__staleWhileFetching
+                : v;
             if (value === undefined)
                 continue;
             fn.call(thisp, value, this.#keyList[i], this);
@@ -757,7 +739,9 @@ class LRUCache {
     rforEach(fn, thisp = this) {
         for (const i of this.#rindexes()) {
             const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            const value = this.#isBackgroundFetch(v)
+                ? v.__staleWhileFetching
+                : v;
             if (value === undefined)
                 continue;
             fn.call(thisp, value, this.#keyList[i], this);
@@ -794,18 +778,17 @@ class LRUCache {
         if (i === undefined)
             return undefined;
         const v = this.#valList[i];
-        /* c8 ignore start - this isn't tested for the info function,
-         * but it's the same logic as found in other places. */
-        const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+        const value = this.#isBackgroundFetch(v)
+            ? v.__staleWhileFetching
+            : v;
         if (value === undefined)
             return undefined;
-        /* c8 ignore end */
         const entry = { value };
         if (this.#ttls && this.#starts) {
             const ttl = this.#ttls[i];
             const start = this.#starts[i];
             if (ttl && start) {
-                const remain = ttl - (this.#perf.now() - start);
+                const remain = ttl - (perf.now() - start);
                 entry.ttl = remain;
                 entry.start = Date.now();
             }
@@ -817,7 +800,7 @@ class LRUCache {
     }
     /**
      * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
-     * passed to {@link LRUCache#load}.
+     * passed to {@link LRLUCache#load}.
      *
      * The `start` fields are calculated relative to a portable `Date.now()`
      * timestamp, even if `performance.now()` is available.
@@ -833,7 +816,9 @@ class LRUCache {
         for (const i of this.#indexes({ allowStale: true })) {
             const key = this.#keyList[i];
             const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            const value = this.#isBackgroundFetch(v)
+                ? v.__staleWhileFetching
+                : v;
             if (value === undefined || key === undefined)
                 continue;
             const entry = { value };
@@ -841,7 +826,7 @@ class LRUCache {
                 entry.ttl = this.#ttls[i];
                 // always dump the start relative to a portable timestamp
                 // it's ok for this to be a bit slow, it's a rare operation.
-                const age = this.#perf.now() - this.#starts[i];
+                const age = perf.now() - this.#starts[i];
                 entry.start = Math.floor(Date.now() - age);
             }
             if (this.#sizes) {
@@ -871,7 +856,7 @@ class LRUCache {
                 //
                 // it's ok for this to be a bit slow, it's a rare operation.
                 const age = Date.now() - entry.start;
-                entry.start = this.#perf.now() - age;
+                entry.start = perf.now() - age;
             }
             this.set(key, entry.value, entry);
         }
@@ -928,9 +913,12 @@ class LRUCache {
         let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
         if (index === undefined) {
             // addition
-            index = (this.#size === 0 ? this.#tail
-                : this.#free.length !== 0 ? this.#free.pop()
-                    : this.#size === this.#max ? this.#evict(false)
+            index = (this.#size === 0
+                ? this.#tail
+                : this.#free.length !== 0
+                    ? this.#free.pop()
+                    : this.#size === this.#max
+                        ? this.#evict(false)
                         : this.#size);
             this.#keyList[index] = k;
             this.#valList[index] = v;
@@ -943,9 +931,6 @@ class LRUCache {
             if (status)
                 status.set = 'add';
             noUpdateTTL = false;
-            if (this.#hasOnInsert) {
-                this.#onInsert?.(v, k, 'add');
-            }
         }
         else {
             // update
@@ -977,8 +962,8 @@ class LRUCache {
                 this.#valList[index] = v;
                 if (status) {
                     status.set = 'replace';
-                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal) ?
-                        oldVal.__staleWhileFetching
+                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal)
+                        ? oldVal.__staleWhileFetching
                         : oldVal;
                     if (oldValue !== undefined)
                         status.oldValue = oldValue;
@@ -987,9 +972,6 @@ class LRUCache {
             else if (status) {
                 status.set = 'update';
             }
-            if (this.#hasOnInsert) {
-                this.onInsert?.(v, k, v === oldVal ? 'update' : 'replace');
-            }
         }
         if (ttl !== 0 && !this.#ttls) {
             this.#initializeTTLTracking();
@@ -1172,7 +1154,7 @@ class LRUCache {
             const bf = p;
             if (this.#valList[index] === p) {
                 if (v === undefined) {
-                    if (bf.__staleWhileFetching !== undefined) {
+                    if (bf.__staleWhileFetching) {
                         this.#valList[index] = bf.__staleWhileFetching;
                     }
                     else {
diff --git a/node_modules/path-scurry/node_modules/lru-cache/dist/commonjs/index.min.js b/node_modules/path-scurry/node_modules/lru-cache/dist/commonjs/index.min.js
new file mode 100644
index 0000000000000..ad643b0badc90
--- /dev/null
+++ b/node_modules/path-scurry/node_modules/lru-cache/dist/commonjs/index.min.js
@@ -0,0 +1,2 @@
+"use strict";var G=(l,t,e)=>{if(!t.has(l))throw TypeError("Cannot "+e)};var j=(l,t,e)=>(G(l,t,"read from private field"),e?e.call(l):t.get(l)),I=(l,t,e)=>{if(t.has(l))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(l):t.set(l,e)},x=(l,t,e,i)=>(G(l,t,"write to private field"),i?i.call(l,e):t.set(l,e),e);Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var T=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,P=new Set,U=typeof process=="object"&&process?process:{},H=(l,t,e,i)=>{typeof U.emitWarning=="function"?U.emitWarning(l,t,e,i):console.error(`[${e}] ${t}: ${l}`)},D=globalThis.AbortController,N=globalThis.AbortSignal;if(typeof D>"u"){N=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},D=class{constructor(){t()}signal=new N;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let l=U.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{l&&(l=!1,H("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=l=>!P.has(l),Y=Symbol("type"),A=l=>l&&l===Math.floor(l)&&l>0&&isFinite(l),k=l=>A(l)?l<=Math.pow(2,8)?Uint8Array:l<=Math.pow(2,16)?Uint16Array:l<=Math.pow(2,32)?Uint32Array:l<=Number.MAX_SAFE_INTEGER?E:null:null,E=class extends Array{constructor(t){super(t),this.fill(0)}},v,O=class{heap;length;static create(t){let e=k(t);if(!e)return[];x(O,v,!0);let i=new O(t,e);return x(O,v,!1),i}constructor(t,e){if(!j(O,v))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},W=O;v=new WeakMap,I(W,v,!1);var C=class{#g;#f;#p;#w;#R;#W;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#S;#s;#i;#t;#l;#c;#o;#h;#_;#r;#b;#m;#u;#y;#E;#a;static unsafeExposeInternals(t){return{starts:t.#m,ttls:t.#u,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#_,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#x(e,i,s,n),moveToTail:e=>t.#C(e),indexes:e=>t.#A(e),rindexes:e=>t.#F(e),isStale:e=>t.#d(e)}}get max(){return this.#g}get maxSize(){return this.#f}get calculatedSize(){return this.#S}get size(){return this.#n}get fetchMethod(){return this.#R}get memoMethod(){return this.#W}get dispose(){return this.#p}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,memoMethod:a,noDeleteOnFetchRejection:w,noDeleteOnStaleGet:m,allowStaleOnFetchRejection:p,allowStaleOnFetchAbort:_,ignoreFetchAbort:z}=t;if(e!==0&&!A(e))throw new TypeError("max option must be a nonnegative integer");let y=e?k(e):Array;if(!y)throw new Error("invalid max value: "+e);if(this.#g=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(a!==void 0&&typeof a!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#W=a,S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#R=S,this.#E=!!S,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new y(e),this.#c=new y(e),this.#o=0,this.#h=0,this.#_=W.create(e),this.#n=0,this.#S=0,typeof g=="function"&&(this.#p=g),typeof b=="function"?(this.#w=b,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#y=!!this.#p,this.#a=!!this.#w,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!w,this.allowStaleOnFetchRejection=!!p,this.allowStaleOnFetchAbort=!!_,this.ignoreFetchAbort=!!z,this.maxEntrySize!==0){if(this.#f!==0&&!A(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!A(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#P()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!m,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=A(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!A(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#U()}if(this.#g===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#g&&!this.#f){let R="LRU_CACHE_UNBOUNDED";V(R)&&(P.add(R),H("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",R,C))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#U(){let t=new E(this.#g),e=new E(this.#g);this.#u=t,this.#m=e,this.#M=(n,h,o=T.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#d(n)&&this.#T(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#v=n=>{e[n]=t[n]!==0?T.now():0},this.#O=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=T.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#d=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#v=()=>{};#O=()=>{};#M=()=>{};#d=()=>!1;#P(){let t=new E(this.#g);this.#S=0,this.#b=t,this.#z=e=>{this.#S-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!A(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!A(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#D=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#S>n;)this.#L(!0)}this.#S+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#S)}}#z=t=>{};#D=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#A({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#j(e)||((t||!this.#d(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#j(e)||((t||!this.#d(e))&&(yield e),e===this.#h));)e=this.#l[e]}#j(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#F({allowStale:!0}))this.#d(e)&&(this.#T(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#u&&this.#m){let h=this.#u[e],o=this.#m[e];if(h&&o){let r=h-(T.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#A({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#u&&this.#m){h.ttl=this.#u[e];let o=T.now()-this.#m[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=T.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,o);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#T(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#_.length!==0?this.#_.pop():this.#n===this.#g?this.#L(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#n++,this.#D(f,b,r),r&&(r.set="add"),g=!1;else{this.#C(f);let u=this.#t[f];if(e!==u){if(this.#E&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#y&&this.#p?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#y&&this.#p?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#z(f),this.#D(f,b,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#u&&this.#U(),this.#u&&(g||this.#M(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#w?.(...c)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#L(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#L(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#E&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#y||this.#a)&&(this.#y&&this.#p?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#z(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#_.push(e)),this.#n===1?(this.#o=this.#h=0,this.#_.length=0):this.#o=this.#l[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#d(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#v(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#d(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#x(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new D,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:a}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(a&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),a&&!w&&!S)return f(h.signal.reason);let m=c;return this.#t[e]===c&&(d===void 0?m.__staleWhileFetching?this.#t[e]=m.__staleWhileFetching:this.#T(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},b=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,a=S&&i.allowStaleOnFetchAbort,w=a||i.allowStaleOnFetchRejection,m=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!m||p.__staleWhileFetching===void 0?this.#T(t,"fetch"):a||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let a=this.#R?.(t,n,r);a&&a instanceof Promise&&a.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,b),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=F,F}#e(t){if(!this.#E)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof D}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:a,signal:w}=e;if(!this.#E)return a&&(a.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:a});let m={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:a,signal:w},p=this.#s.get(t);if(p===void 0){a&&(a.fetch="miss");let _=this.#x(t,p,m,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let M=i&&_.__staleWhileFetching!==void 0;return a&&(a.fetch="inflight",M&&(a.returnedStale=!0)),M?_.__staleWhileFetching:_.__returned=_}let z=this.#d(p);if(!S&&!z)return a&&(a.fetch="hit"),this.#C(p),s&&this.#v(p),a&&this.#O(a,p),_;let y=this.#x(t,p,m,d),L=y.__staleWhileFetching!==void 0&&i;return a&&(a.fetch=z?"stale":"refresh",L&&z&&(a.returnedStale=!0)),L?y.__staleWhileFetching:y.__returned=y}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#W;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#O(h,o),this.#d(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#T(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#C(o),s&&this.#v(o),r))}else h&&(h.get="miss")}#I(t,e){this.#c[e]=t,this.#l[t]=e}#C(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#I(this.#c[t],this.#l[t]),this.#I(this.#h,t),this.#h=t)}delete(t){return this.#T(t,"delete")}#T(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#N(e);else{this.#z(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#y||this.#a)&&(this.#y&&this.#p?.(n,t,e),this.#a&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#c[s];else if(s===this.#o)this.#o=this.#l[s];else{let h=this.#c[s];this.#l[h]=this.#l[s];let o=this.#l[s];this.#c[o]=this.#c[s]}this.#n--,this.#_.push(s)}}if(this.#a&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#N("delete")}#N(t){for(let e of this.#F({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#y&&this.#p?.(i,s,t),this.#a&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#u&&this.#m&&(this.#u.fill(0),this.#m.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#_.length=0,this.#S=0,this.#n=0,this.#a&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};exports.LRUCache=C;
+//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/commonjs/package.json b/node_modules/path-scurry/node_modules/lru-cache/dist/commonjs/package.json
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/lru-cache/dist/commonjs/package.json
rename to node_modules/path-scurry/node_modules/lru-cache/dist/commonjs/package.json
diff --git a/node_modules/@npmcli/git/node_modules/lru-cache/dist/esm/index.js b/node_modules/path-scurry/node_modules/lru-cache/dist/esm/index.js
similarity index 94%
rename from node_modules/@npmcli/git/node_modules/lru-cache/dist/esm/index.js
rename to node_modules/path-scurry/node_modules/lru-cache/dist/esm/index.js
index 8fd8fc5f31507..555654a57c4d7 100644
--- a/node_modules/@npmcli/git/node_modules/lru-cache/dist/esm/index.js
+++ b/node_modules/path-scurry/node_modules/lru-cache/dist/esm/index.js
@@ -1,20 +1,18 @@
 /**
  * @module LRUCache
  */
-const defaultPerf = (typeof performance === 'object' &&
+const perf = typeof performance === 'object' &&
     performance &&
-    typeof performance.now === 'function') ?
-    performance
+    typeof performance.now === 'function'
+    ? performance
     : Date;
 const warned = new Set();
 /* c8 ignore start */
-const PROCESS = (typeof process === 'object' && !!process ?
-    process
-    : {});
+const PROCESS = (typeof process === 'object' && !!process ? process : {});
 /* c8 ignore start */
 const emitWarning = (msg, type, code, fn) => {
-    typeof PROCESS.emitWarning === 'function' ?
-        PROCESS.emitWarning(msg, type, code, fn)
+    typeof PROCESS.emitWarning === 'function'
+        ? PROCESS.emitWarning(msg, type, code, fn)
         : console.error(`[${code}] ${type}: ${msg}`);
 };
 let AC = globalThis.AbortController;
@@ -78,11 +76,16 @@ const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
 // zeroes at init time is brutal when you get that big.
 // But why not be complete?
 // Maybe in the future, these limits will have expanded.
-const getUintArray = (max) => !isPosInt(max) ? null
-    : max <= Math.pow(2, 8) ? Uint8Array
-        : max <= Math.pow(2, 16) ? Uint16Array
-            : max <= Math.pow(2, 32) ? Uint32Array
-                : max <= Number.MAX_SAFE_INTEGER ? ZeroArray
+const getUintArray = (max) => !isPosInt(max)
+    ? null
+    : max <= Math.pow(2, 8)
+        ? Uint8Array
+        : max <= Math.pow(2, 16)
+            ? Uint16Array
+            : max <= Math.pow(2, 32)
+                ? Uint32Array
+                : max <= Number.MAX_SAFE_INTEGER
+                    ? ZeroArray
                     : null;
 /* c8 ignore stop */
 class ZeroArray extends Array {
@@ -141,17 +144,9 @@ export class LRUCache {
     #max;
     #maxSize;
     #dispose;
-    #onInsert;
     #disposeAfter;
     #fetchMethod;
     #memoMethod;
-    #perf;
-    /**
-     * {@link LRUCache.OptionsBase.perf}
-     */
-    get perf() {
-        return this.#perf;
-    }
     /**
      * {@link LRUCache.OptionsBase.ttl}
      */
@@ -230,7 +225,6 @@ export class LRUCache {
     #hasDispose;
     #hasFetchMethod;
     #hasDisposeAfter;
-    #hasOnInsert;
     /**
      * Do not call this method unless you need to inspect the
      * inner workings of the cache.  If anything returned by this
@@ -307,12 +301,6 @@ export class LRUCache {
     get dispose() {
         return this.#dispose;
     }
-    /**
-     * {@link LRUCache.OptionsBase.onInsert} (read-only)
-     */
-    get onInsert() {
-        return this.#onInsert;
-    }
     /**
      * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
      */
@@ -320,13 +308,7 @@ export class LRUCache {
         return this.#disposeAfter;
     }
     constructor(options) {
-        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, onInsert, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, perf, } = options;
-        if (perf !== undefined) {
-            if (typeof perf?.now !== 'function') {
-                throw new TypeError('perf option must have a now() method if specified');
-            }
-        }
-        this.#perf = perf ?? defaultPerf;
+        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options;
         if (max !== 0 && !isPosInt(max)) {
             throw new TypeError('max option must be a nonnegative integer');
         }
@@ -370,9 +352,6 @@ export class LRUCache {
         if (typeof dispose === 'function') {
             this.#dispose = dispose;
         }
-        if (typeof onInsert === 'function') {
-            this.#onInsert = onInsert;
-        }
         if (typeof disposeAfter === 'function') {
             this.#disposeAfter = disposeAfter;
             this.#disposed = [];
@@ -382,7 +361,6 @@ export class LRUCache {
             this.#disposed = undefined;
         }
         this.#hasDispose = !!this.#dispose;
-        this.#hasOnInsert = !!this.#onInsert;
         this.#hasDisposeAfter = !!this.#disposeAfter;
         this.noDisposeOnSet = !!noDisposeOnSet;
         this.noUpdateTTL = !!noUpdateTTL;
@@ -407,8 +385,8 @@ export class LRUCache {
         this.updateAgeOnGet = !!updateAgeOnGet;
         this.updateAgeOnHas = !!updateAgeOnHas;
         this.ttlResolution =
-            isPosInt(ttlResolution) || ttlResolution === 0 ?
-                ttlResolution
+            isPosInt(ttlResolution) || ttlResolution === 0
+                ? ttlResolution
                 : 1;
         this.ttlAutopurge = !!ttlAutopurge;
         this.ttl = ttl || 0;
@@ -444,7 +422,7 @@ export class LRUCache {
         const starts = new ZeroArray(this.#max);
         this.#ttls = ttls;
         this.#starts = starts;
-        this.#setItemTTL = (index, ttl, start = this.#perf.now()) => {
+        this.#setItemTTL = (index, ttl, start = perf.now()) => {
             starts[index] = ttl !== 0 ? start : 0;
             ttls[index] = ttl;
             if (ttl !== 0 && this.ttlAutopurge) {
@@ -462,7 +440,7 @@ export class LRUCache {
             }
         };
         this.#updateItemAge = index => {
-            starts[index] = ttls[index] !== 0 ? this.#perf.now() : 0;
+            starts[index] = ttls[index] !== 0 ? perf.now() : 0;
         };
         this.#statusTTL = (status, index) => {
             if (ttls[index]) {
@@ -482,7 +460,7 @@ export class LRUCache {
         // that costly call repeatedly.
         let cachedNow = 0;
         const getNow = () => {
-            const n = this.#perf.now();
+            const n = perf.now();
             if (this.ttlResolution > 0) {
                 cachedNow = n;
                 const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
@@ -719,7 +697,9 @@ export class LRUCache {
     find(fn, getOptions = {}) {
         for (const i of this.#indexes()) {
             const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            const value = this.#isBackgroundFetch(v)
+                ? v.__staleWhileFetching
+                : v;
             if (value === undefined)
                 continue;
             if (fn(value, this.#keyList[i], this)) {
@@ -741,7 +721,9 @@ export class LRUCache {
     forEach(fn, thisp = this) {
         for (const i of this.#indexes()) {
             const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            const value = this.#isBackgroundFetch(v)
+                ? v.__staleWhileFetching
+                : v;
             if (value === undefined)
                 continue;
             fn.call(thisp, value, this.#keyList[i], this);
@@ -754,7 +736,9 @@ export class LRUCache {
     rforEach(fn, thisp = this) {
         for (const i of this.#rindexes()) {
             const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            const value = this.#isBackgroundFetch(v)
+                ? v.__staleWhileFetching
+                : v;
             if (value === undefined)
                 continue;
             fn.call(thisp, value, this.#keyList[i], this);
@@ -791,18 +775,17 @@ export class LRUCache {
         if (i === undefined)
             return undefined;
         const v = this.#valList[i];
-        /* c8 ignore start - this isn't tested for the info function,
-         * but it's the same logic as found in other places. */
-        const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+        const value = this.#isBackgroundFetch(v)
+            ? v.__staleWhileFetching
+            : v;
         if (value === undefined)
             return undefined;
-        /* c8 ignore end */
         const entry = { value };
         if (this.#ttls && this.#starts) {
             const ttl = this.#ttls[i];
             const start = this.#starts[i];
             if (ttl && start) {
-                const remain = ttl - (this.#perf.now() - start);
+                const remain = ttl - (perf.now() - start);
                 entry.ttl = remain;
                 entry.start = Date.now();
             }
@@ -814,7 +797,7 @@ export class LRUCache {
     }
     /**
      * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
-     * passed to {@link LRUCache#load}.
+     * passed to {@link LRLUCache#load}.
      *
      * The `start` fields are calculated relative to a portable `Date.now()`
      * timestamp, even if `performance.now()` is available.
@@ -830,7 +813,9 @@ export class LRUCache {
         for (const i of this.#indexes({ allowStale: true })) {
             const key = this.#keyList[i];
             const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
+            const value = this.#isBackgroundFetch(v)
+                ? v.__staleWhileFetching
+                : v;
             if (value === undefined || key === undefined)
                 continue;
             const entry = { value };
@@ -838,7 +823,7 @@ export class LRUCache {
                 entry.ttl = this.#ttls[i];
                 // always dump the start relative to a portable timestamp
                 // it's ok for this to be a bit slow, it's a rare operation.
-                const age = this.#perf.now() - this.#starts[i];
+                const age = perf.now() - this.#starts[i];
                 entry.start = Math.floor(Date.now() - age);
             }
             if (this.#sizes) {
@@ -868,7 +853,7 @@ export class LRUCache {
                 //
                 // it's ok for this to be a bit slow, it's a rare operation.
                 const age = Date.now() - entry.start;
-                entry.start = this.#perf.now() - age;
+                entry.start = perf.now() - age;
             }
             this.set(key, entry.value, entry);
         }
@@ -925,9 +910,12 @@ export class LRUCache {
         let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
         if (index === undefined) {
             // addition
-            index = (this.#size === 0 ? this.#tail
-                : this.#free.length !== 0 ? this.#free.pop()
-                    : this.#size === this.#max ? this.#evict(false)
+            index = (this.#size === 0
+                ? this.#tail
+                : this.#free.length !== 0
+                    ? this.#free.pop()
+                    : this.#size === this.#max
+                        ? this.#evict(false)
                         : this.#size);
             this.#keyList[index] = k;
             this.#valList[index] = v;
@@ -940,9 +928,6 @@ export class LRUCache {
             if (status)
                 status.set = 'add';
             noUpdateTTL = false;
-            if (this.#hasOnInsert) {
-                this.#onInsert?.(v, k, 'add');
-            }
         }
         else {
             // update
@@ -974,8 +959,8 @@ export class LRUCache {
                 this.#valList[index] = v;
                 if (status) {
                     status.set = 'replace';
-                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal) ?
-                        oldVal.__staleWhileFetching
+                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal)
+                        ? oldVal.__staleWhileFetching
                         : oldVal;
                     if (oldValue !== undefined)
                         status.oldValue = oldValue;
@@ -984,9 +969,6 @@ export class LRUCache {
             else if (status) {
                 status.set = 'update';
             }
-            if (this.#hasOnInsert) {
-                this.onInsert?.(v, k, v === oldVal ? 'update' : 'replace');
-            }
         }
         if (ttl !== 0 && !this.#ttls) {
             this.#initializeTTLTracking();
@@ -1169,7 +1151,7 @@ export class LRUCache {
             const bf = p;
             if (this.#valList[index] === p) {
                 if (v === undefined) {
-                    if (bf.__staleWhileFetching !== undefined) {
+                    if (bf.__staleWhileFetching) {
                         this.#valList[index] = bf.__staleWhileFetching;
                     }
                     else {
diff --git a/node_modules/path-scurry/node_modules/lru-cache/dist/esm/index.min.js b/node_modules/path-scurry/node_modules/lru-cache/dist/esm/index.min.js
new file mode 100644
index 0000000000000..4571d0254e27d
--- /dev/null
+++ b/node_modules/path-scurry/node_modules/lru-cache/dist/esm/index.min.js
@@ -0,0 +1,2 @@
+var G=(l,t,e)=>{if(!t.has(l))throw TypeError("Cannot "+e)};var I=(l,t,e)=>(G(l,t,"read from private field"),e?e.call(l):t.get(l)),j=(l,t,e)=>{if(t.has(l))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(l):t.set(l,e)},x=(l,t,e,i)=>(G(l,t,"write to private field"),i?i.call(l,e):t.set(l,e),e);var T=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,P=new Set,M=typeof process=="object"&&process?process:{},H=(l,t,e,i)=>{typeof M.emitWarning=="function"?M.emitWarning(l,t,e,i):console.error(`[${e}] ${t}: ${l}`)},W=globalThis.AbortController,N=globalThis.AbortSignal;if(typeof W>"u"){N=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new N;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let l=M.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{l&&(l=!1,H("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=l=>!P.has(l),Y=Symbol("type"),A=l=>l&&l===Math.floor(l)&&l>0&&isFinite(l),k=l=>A(l)?l<=Math.pow(2,8)?Uint8Array:l<=Math.pow(2,16)?Uint16Array:l<=Math.pow(2,32)?Uint32Array:l<=Number.MAX_SAFE_INTEGER?O:null:null,O=class extends Array{constructor(t){super(t),this.fill(0)}},z,E=class{heap;length;static create(t){let e=k(t);if(!e)return[];x(E,z,!0);let i=new E(t,e);return x(E,z,!1),i}constructor(t,e){if(!I(E,z))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},R=E;z=new WeakMap,j(R,z,!1);var D=class{#g;#f;#p;#w;#R;#W;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#S;#s;#i;#t;#l;#c;#o;#h;#_;#r;#m;#b;#u;#y;#O;#a;static unsafeExposeInternals(t){return{starts:t.#b,ttls:t.#u,sizes:t.#m,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#_,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#x(e,i,s,n),moveToTail:e=>t.#C(e),indexes:e=>t.#A(e),rindexes:e=>t.#F(e),isStale:e=>t.#d(e)}}get max(){return this.#g}get maxSize(){return this.#f}get calculatedSize(){return this.#S}get size(){return this.#n}get fetchMethod(){return this.#R}get memoMethod(){return this.#W}get dispose(){return this.#p}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,disposeAfter:m,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,memoMethod:a,noDeleteOnFetchRejection:w,noDeleteOnStaleGet:b,allowStaleOnFetchRejection:p,allowStaleOnFetchAbort:_,ignoreFetchAbort:v}=t;if(e!==0&&!A(e))throw new TypeError("max option must be a nonnegative integer");let y=e?k(e):Array;if(!y)throw new Error("invalid max value: "+e);if(this.#g=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(a!==void 0&&typeof a!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#W=a,S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#R=S,this.#O=!!S,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new y(e),this.#c=new y(e),this.#o=0,this.#h=0,this.#_=R.create(e),this.#n=0,this.#S=0,typeof g=="function"&&(this.#p=g),typeof m=="function"?(this.#w=m,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#y=!!this.#p,this.#a=!!this.#w,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!w,this.allowStaleOnFetchRejection=!!p,this.allowStaleOnFetchAbort=!!_,this.ignoreFetchAbort=!!v,this.maxEntrySize!==0){if(this.#f!==0&&!A(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!A(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#P()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!b,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=A(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!A(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#M()}if(this.#g===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#g&&!this.#f){let C="LRU_CACHE_UNBOUNDED";V(C)&&(P.add(C),H("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",C,D))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#M(){let t=new O(this.#g),e=new O(this.#g);this.#u=t,this.#b=e,this.#U=(n,h,o=T.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#d(n)&&this.#T(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?T.now():0},this.#E=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=T.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#d=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#z=()=>{};#E=()=>{};#U=()=>{};#d=()=>!1;#P(){let t=new O(this.#g);this.#S=0,this.#m=t,this.#v=e=>{this.#S-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!A(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!A(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#D=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#S>n;)this.#L(!0)}this.#S+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#S)}}#v=t=>{};#D=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#A({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#I(e)||((t||!this.#d(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#I(e)||((t||!this.#d(e))&&(yield e),e===this.#h));)e=this.#l[e]}#I(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#F({allowStale:!0}))this.#d(e)&&(this.#T(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#u&&this.#b){let h=this.#u[e],o=this.#b[e];if(h&&o){let r=h-(T.now()-o);n.ttl=r,n.start=Date.now()}}return this.#m&&(n.size=this.#m[e]),n}dump(){let t=[];for(let e of this.#A({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#u&&this.#b){h.ttl=this.#u[e];let o=T.now()-this.#b[e];h.start=Math.floor(Date.now()-o)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=T.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,m=this.#G(t,e,i.size||0,o);if(this.maxEntrySize&&m>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#T(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#_.length!==0?this.#_.pop():this.#n===this.#g?this.#L(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#n++,this.#D(f,m,r),r&&(r.set="add"),g=!1;else{this.#C(f);let u=this.#t[f];if(e!==u){if(this.#O&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#y&&this.#p?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#y&&this.#p?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#v(f),this.#D(f,m,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#u&&this.#M(),this.#u&&(g||this.#U(f,s,n),r&&this.#E(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#w?.(...c)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#L(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#L(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#O&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#y||this.#a)&&(this.#y&&this.#p?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#v(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#_.push(e)),this.#n===1?(this.#o=this.#h=0,this.#_.length=0):this.#o=this.#l[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#d(n))s&&(s.has="stale",this.#E(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#E(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#d(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#x(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:a}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(a&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),a&&!w&&!S)return f(h.signal.reason);let b=c;return this.#t[e]===c&&(d===void 0?b.__staleWhileFetching?this.#t[e]=b.__staleWhileFetching:this.#T(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},m=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,a=S&&i.allowStaleOnFetchAbort,w=a||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!b||p.__staleWhileFetching===void 0?this.#T(t,"fetch"):a||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let a=this.#R?.(t,n,r);a&&a instanceof Promise&&a.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,m),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=F,F}#e(t){if(!this.#O)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:m=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:a,signal:w}=e;if(!this.#O)return a&&(a.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:a});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:m,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:a,signal:w},p=this.#s.get(t);if(p===void 0){a&&(a.fetch="miss");let _=this.#x(t,p,b,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let U=i&&_.__staleWhileFetching!==void 0;return a&&(a.fetch="inflight",U&&(a.returnedStale=!0)),U?_.__staleWhileFetching:_.__returned=_}let v=this.#d(p);if(!S&&!v)return a&&(a.fetch="hit"),this.#C(p),s&&this.#z(p),a&&this.#E(a,p),_;let y=this.#x(t,p,b,d),L=y.__staleWhileFetching!==void 0&&i;return a&&(a.fetch=v?"stale":"refresh",L&&v&&(a.returnedStale=!0)),L?y.__staleWhileFetching:y.__returned=y}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#W;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#E(h,o),this.#d(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#T(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#C(o),s&&this.#z(o),r))}else h&&(h.get="miss")}#j(t,e){this.#c[e]=t,this.#l[t]=e}#C(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#j(this.#c[t],this.#l[t]),this.#j(this.#h,t),this.#h=t)}delete(t){return this.#T(t,"delete")}#T(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#N(e);else{this.#v(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#y||this.#a)&&(this.#y&&this.#p?.(n,t,e),this.#a&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#c[s];else if(s===this.#o)this.#o=this.#l[s];else{let h=this.#c[s];this.#l[h]=this.#l[s];let o=this.#l[s];this.#c[o]=this.#c[s]}this.#n--,this.#_.push(s)}}if(this.#a&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#N("delete")}#N(t){for(let e of this.#F({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#y&&this.#p?.(i,s,t),this.#a&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#u&&this.#b&&(this.#u.fill(0),this.#b.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#_.length=0,this.#S=0,this.#n=0,this.#a&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};export{D as LRUCache};
+//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/@npmcli/package-json/node_modules/lru-cache/dist/esm/package.json b/node_modules/path-scurry/node_modules/lru-cache/dist/esm/package.json
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/lru-cache/dist/esm/package.json
rename to node_modules/path-scurry/node_modules/lru-cache/dist/esm/package.json
diff --git a/node_modules/@npmcli/package-json/node_modules/lru-cache/package.json b/node_modules/path-scurry/node_modules/lru-cache/package.json
similarity index 87%
rename from node_modules/@npmcli/package-json/node_modules/lru-cache/package.json
rename to node_modules/path-scurry/node_modules/lru-cache/package.json
index 4953bdf4a7a35..f3cd4c0cc53f7 100644
--- a/node_modules/@npmcli/package-json/node_modules/lru-cache/package.json
+++ b/node_modules/path-scurry/node_modules/lru-cache/package.json
@@ -1,7 +1,10 @@
 {
   "name": "lru-cache",
+  "publishConfig": {
+    "tag": "legacy-v10"
+  },
   "description": "A cache object that deletes the least-recently-used items.",
-  "version": "11.2.1",
+  "version": "10.4.3",
   "author": "Isaac Z. Schlueter ",
   "keywords": [
     "mru",
@@ -49,25 +52,25 @@
     "url": "git://github.com/isaacs/node-lru-cache.git"
   },
   "devDependencies": {
-    "@types/node": "^24.3.0",
+    "@types/node": "^20.2.5",
+    "@types/tap": "^15.0.6",
     "benchmark": "^2.1.4",
-    "esbuild": "^0.25.9",
+    "esbuild": "^0.17.11",
+    "eslint-config-prettier": "^8.5.0",
     "marked": "^4.2.12",
-    "mkdirp": "^3.0.1",
-    "prettier": "^3.6.2",
-    "tap": "^21.1.0",
-    "tshy": "^3.0.2",
-    "typedoc": "^0.28.12"
+    "mkdirp": "^2.1.5",
+    "prettier": "^2.6.2",
+    "tap": "^20.0.3",
+    "tshy": "^2.0.0",
+    "tslib": "^2.4.0",
+    "typedoc": "^0.25.3",
+    "typescript": "^5.2.2"
   },
   "license": "ISC",
   "files": [
     "dist"
   ],
-  "engines": {
-    "node": "20 || >=22"
-  },
   "prettier": {
-    "experimentalTernaries": true,
     "semi": false,
     "printWidth": 70,
     "tabWidth": 2,
diff --git a/package-lock.json b/package-lock.json
index ce454aa2e587d..688563ecb7729 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -2958,6 +2958,11 @@
         "node": "^18.17.0 || >=20.5.0"
       }
     },
+    "node_modules/@npmcli/agent/node_modules/lru-cache": {
+      "version": "10.4.3",
+      "inBundle": true,
+      "license": "ISC"
+    },
     "node_modules/@npmcli/arborist": {
       "resolved": "workspaces/arborist",
       "link": true
@@ -3019,14 +3024,6 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/@npmcli/git/node_modules/lru-cache": {
-      "version": "11.2.1",
-      "inBundle": true,
-      "license": "ISC",
-      "engines": {
-        "node": "20 || >=22"
-      }
-    },
     "node_modules/@npmcli/installed-package-contents": {
       "version": "3.0.0",
       "inBundle": true,
@@ -3092,14 +3089,6 @@
         "url": "https://github.com/sponsors/isaacs"
       }
     },
-    "node_modules/@npmcli/map-workspaces/node_modules/lru-cache": {
-      "version": "11.2.1",
-      "inBundle": true,
-      "license": "ISC",
-      "engines": {
-        "node": "20 || >=22"
-      }
-    },
     "node_modules/@npmcli/map-workspaces/node_modules/minimatch": {
       "version": "10.0.3",
       "inBundle": true,
@@ -3220,14 +3209,6 @@
         "url": "https://github.com/sponsors/isaacs"
       }
     },
-    "node_modules/@npmcli/package-json/node_modules/lru-cache": {
-      "version": "11.2.1",
-      "inBundle": true,
-      "license": "ISC",
-      "engines": {
-        "node": "20 || >=22"
-      }
-    },
     "node_modules/@npmcli/package-json/node_modules/minimatch": {
       "version": "10.0.3",
       "inBundle": true,
@@ -3418,8 +3399,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist/node_modules/@npmcli/git": {
       "version": "5.0.8",
-      "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-5.0.8.tgz",
-      "integrity": "sha512-liASfw5cqhjNW9UFd+ruwwdEf/lbOAQjLL2XY2dFW/bkJheXDYZgOyul/4gVvEV4BWkTXjYGmDqMw9uegdbJNQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3481,8 +3460,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist/node_modules/ini": {
       "version": "4.1.3",
-      "resolved": "https://registry.npmjs.org/ini/-/ini-4.1.3.tgz",
-      "integrity": "sha512-X7rqawQBvfdjS10YU1y1YVreA3SsLrW9dX2CewP2EbBJM4ypVNLDkO5y04gejPwKIY9lR+7r9gn3rFPt/kmWFg==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -3524,8 +3501,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/git": {
       "version": "6.0.3",
-      "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-6.0.3.tgz",
-      "integrity": "sha512-GUYESQlxZRAdhs3UhbB6pVRNUELQOHXwK9ruDkwmCv2aZ5y0SApQzUJCg02p3A7Ue2J5hxvlk1YI53c00NmRyQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3544,8 +3519,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/git/node_modules/@npmcli/promise-spawn": {
       "version": "8.0.3",
-      "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-8.0.3.tgz",
-      "integrity": "sha512-Yb00SWaL4F8w+K8YGhQ55+xE4RUNdMHV43WZGsiTM92gS+lC0mGsn7I4hLug7pbao035S6bj3Y3w0cUNGLfmkg==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3557,8 +3530,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/git/node_modules/npm-install-checks": {
       "version": "7.1.2",
-      "resolved": "https://registry.npmjs.org/npm-install-checks/-/npm-install-checks-7.1.2.tgz",
-      "integrity": "sha512-z9HJBCYw9Zr8BqXcllKIs5nI+QggAImbBdHphOzVYrz2CB4iQ6FzWyKmlqDZua+51nAu7FcemlbTc9VgQN5XDQ==",
       "dev": true,
       "license": "BSD-2-Clause",
       "dependencies": {
@@ -3570,8 +3541,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/git/node_modules/npm-normalize-package-bin": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-4.0.0.tgz",
-      "integrity": "sha512-TZKxPvItzai9kN9H/TkmCtx/ZN/hvr3vUycjlfmH0ootY9yFBzNOpiXAdIn1Iteqsvk4lQn6B5PTrt+n6h8k/w==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -3580,8 +3549,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/git/node_modules/npm-pick-manifest": {
       "version": "10.0.0",
-      "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-10.0.0.tgz",
-      "integrity": "sha512-r4fFa4FqYY8xaM7fHecQ9Z2nE9hgNfJR+EmoKv0+chvzWkBcORX3r0FpTByP+CbOVJDladMXnPQGVN8PBLGuTQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3596,8 +3563,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/git/node_modules/proc-log": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-5.0.0.tgz",
-      "integrity": "sha512-Azwzvl90HaF0aCz1JrDdXQykFakSSNPaPoiZ9fm5qJIMHioDZEi7OAdRwSm6rSoPtY3Qutnm3L7ogmg3dc+wbQ==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -3606,8 +3571,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/git/node_modules/which": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/which/-/which-5.0.0.tgz",
-      "integrity": "sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3769,8 +3732,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/run-script/node_modules/@npmcli/git": {
       "version": "5.0.8",
-      "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-5.0.8.tgz",
-      "integrity": "sha512-liASfw5cqhjNW9UFd+ruwwdEf/lbOAQjLL2XY2dFW/bkJheXDYZgOyul/4gVvEV4BWkTXjYGmDqMw9uegdbJNQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3818,8 +3779,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/run-script/node_modules/ini": {
       "version": "4.1.3",
-      "resolved": "https://registry.npmjs.org/ini/-/ini-4.1.3.tgz",
-      "integrity": "sha512-X7rqawQBvfdjS10YU1y1YVreA3SsLrW9dX2CewP2EbBJM4ypVNLDkO5y04gejPwKIY9lR+7r9gn3rFPt/kmWFg==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -3996,6 +3955,13 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/lru-cache": {
+      "version": "10.4.3",
+      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz",
+      "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==",
+      "dev": true,
+      "license": "ISC"
+    },
     "node_modules/@npmcli/template-oss/node_modules/make-fetch-happen": {
       "version": "13.0.1",
       "dev": true,
@@ -4302,8 +4268,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/pacote/node_modules/@npmcli/git": {
       "version": "5.0.8",
-      "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-5.0.8.tgz",
-      "integrity": "sha512-liASfw5cqhjNW9UFd+ruwwdEf/lbOAQjLL2XY2dFW/bkJheXDYZgOyul/4gVvEV4BWkTXjYGmDqMw9uegdbJNQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4351,8 +4315,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/pacote/node_modules/ini": {
       "version": "4.1.3",
-      "resolved": "https://registry.npmjs.org/ini/-/ini-4.1.3.tgz",
-      "integrity": "sha512-X7rqawQBvfdjS10YU1y1YVreA3SsLrW9dX2CewP2EbBJM4ypVNLDkO5y04gejPwKIY9lR+7r9gn3rFPt/kmWFg==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -5504,14 +5466,6 @@
         "url": "https://github.com/sponsors/isaacs"
       }
     },
-    "node_modules/cacache/node_modules/lru-cache": {
-      "version": "11.2.1",
-      "inBundle": true,
-      "license": "ISC",
-      "engines": {
-        "node": "20 || >=22"
-      }
-    },
     "node_modules/cacache/node_modules/minimatch": {
       "version": "10.0.3",
       "inBundle": true,
@@ -8404,14 +8358,6 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/hosted-git-info/node_modules/lru-cache": {
-      "version": "11.2.1",
-      "inBundle": true,
-      "license": "ISC",
-      "engines": {
-        "node": "20 || >=22"
-      }
-    },
     "node_modules/html-encoding-sniffer": {
       "version": "4.0.0",
       "dev": true,
@@ -9693,9 +9639,12 @@
       }
     },
     "node_modules/lru-cache": {
-      "version": "10.4.3",
+      "version": "11.2.1",
       "inBundle": true,
-      "license": "ISC"
+      "license": "ISC",
+      "engines": {
+        "node": "20 || >=22"
+      }
     },
     "node_modules/make-dir": {
       "version": "3.1.0",
@@ -9755,14 +9704,6 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/make-fetch-happen/node_modules/lru-cache": {
-      "version": "11.2.1",
-      "inBundle": true,
-      "license": "ISC",
-      "engines": {
-        "node": "20 || >=22"
-      }
-    },
     "node_modules/make-fetch-happen/node_modules/negotiator": {
       "version": "1.0.0",
       "inBundle": true,
@@ -10956,6 +10897,11 @@
         "node": ">=18"
       }
     },
+    "node_modules/node-gyp/node_modules/lru-cache": {
+      "version": "10.4.3",
+      "inBundle": true,
+      "license": "ISC"
+    },
     "node_modules/node-gyp/node_modules/make-fetch-happen": {
       "version": "14.0.3",
       "inBundle": true,
@@ -11934,6 +11880,11 @@
         "url": "https://github.com/sponsors/isaacs"
       }
     },
+    "node_modules/path-scurry/node_modules/lru-cache": {
+      "version": "10.4.3",
+      "inBundle": true,
+      "license": "ISC"
+    },
     "node_modules/picocolors": {
       "version": "1.1.1",
       "dev": true,
@@ -12091,8 +12042,6 @@
     },
     "node_modules/promise-inflight": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/promise-inflight/-/promise-inflight-1.0.1.tgz",
-      "integrity": "sha512-6zWPyEOFaQBJYcGMHBKTKJ3u6TBsnMFOIZSa6ce1e/ZrrsOlnHRHbabMjLiBYKp+n44X9eUI6VUPaukCXHuG4g==",
       "dev": true,
       "license": "ISC"
     },
@@ -16957,14 +16906,6 @@
         "url": "https://github.com/sponsors/isaacs"
       }
     },
-    "smoke-tests/node_modules/lru-cache": {
-      "version": "11.1.0",
-      "dev": true,
-      "license": "ISC",
-      "engines": {
-        "node": "20 || >=22"
-      }
-    },
     "smoke-tests/node_modules/minimatch": {
       "version": "10.0.3",
       "dev": true,
@@ -17033,7 +16974,7 @@
         "common-ancestor-path": "^1.0.1",
         "hosted-git-info": "^9.0.0",
         "json-stringify-nice": "^1.1.4",
-        "lru-cache": "^10.2.2",
+        "lru-cache": "^11.2.1",
         "minimatch": "^9.0.4",
         "nopt": "^8.0.0",
         "npm-install-checks": "^7.1.0",
diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json
index 8a23dedfa2dd8..ba306144941c8 100644
--- a/workspaces/arborist/package.json
+++ b/workspaces/arborist/package.json
@@ -19,7 +19,7 @@
     "common-ancestor-path": "^1.0.1",
     "hosted-git-info": "^9.0.0",
     "json-stringify-nice": "^1.1.4",
-    "lru-cache": "^10.2.2",
+    "lru-cache": "^11.2.1",
     "minimatch": "^9.0.4",
     "nopt": "^8.0.0",
     "npm-install-checks": "^7.1.0",

From 24252a16fc45bfa6a4c1112269016568484006e1 Mon Sep 17 00:00:00 2001
From: Gar 
Date: Thu, 18 Sep 2025 10:01:44 -0700
Subject: [PATCH 23/63] deps: @npmcli/agent@4.0.0

---
 .../agent/node_modules/lru-cache/LICENSE      |   15 -
 .../lru-cache/dist/commonjs/index.js          | 1546 -----------------
 .../lru-cache/dist/commonjs/index.min.js      |    2 -
 .../lru-cache/dist/commonjs/package.json      |    3 -
 .../node_modules/lru-cache/dist/esm/index.js  | 1542 ----------------
 .../lru-cache/dist/esm/index.min.js           |    2 -
 .../lru-cache/dist/esm/package.json           |    3 -
 .../agent/node_modules/lru-cache/package.json |  116 --
 node_modules/@npmcli/agent/package.json       |   14 +-
 .../node_modules/@npmcli/agent/lib/agents.js  |  206 ---
 .../node_modules/@npmcli/agent/lib/dns.js     |   53 -
 .../node_modules/@npmcli/agent/lib/errors.js  |   61 -
 .../node_modules/@npmcli/agent/lib/index.js   |   56 -
 .../node_modules/@npmcli/agent/lib/options.js |   86 -
 .../node_modules/@npmcli/agent/lib/proxy.js   |   88 -
 .../node_modules/@npmcli/agent/package.json   |   60 -
 package-lock.json                             |   45 +-
 17 files changed, 29 insertions(+), 3869 deletions(-)
 delete mode 100644 node_modules/@npmcli/agent/node_modules/lru-cache/LICENSE
 delete mode 100644 node_modules/@npmcli/agent/node_modules/lru-cache/dist/commonjs/index.js
 delete mode 100644 node_modules/@npmcli/agent/node_modules/lru-cache/dist/commonjs/index.min.js
 delete mode 100644 node_modules/@npmcli/agent/node_modules/lru-cache/dist/commonjs/package.json
 delete mode 100644 node_modules/@npmcli/agent/node_modules/lru-cache/dist/esm/index.js
 delete mode 100644 node_modules/@npmcli/agent/node_modules/lru-cache/dist/esm/index.min.js
 delete mode 100644 node_modules/@npmcli/agent/node_modules/lru-cache/dist/esm/package.json
 delete mode 100644 node_modules/@npmcli/agent/node_modules/lru-cache/package.json
 delete mode 100644 node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/agents.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/dns.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/errors.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/index.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/options.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/proxy.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/@npmcli/agent/package.json

diff --git a/node_modules/@npmcli/agent/node_modules/lru-cache/LICENSE b/node_modules/@npmcli/agent/node_modules/lru-cache/LICENSE
deleted file mode 100644
index f785757cd63f8..0000000000000
--- a/node_modules/@npmcli/agent/node_modules/lru-cache/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/@npmcli/agent/node_modules/lru-cache/dist/commonjs/index.js b/node_modules/@npmcli/agent/node_modules/lru-cache/dist/commonjs/index.js
deleted file mode 100644
index 0589231885c68..0000000000000
--- a/node_modules/@npmcli/agent/node_modules/lru-cache/dist/commonjs/index.js
+++ /dev/null
@@ -1,1546 +0,0 @@
-"use strict";
-/**
- * @module LRUCache
- */
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.LRUCache = void 0;
-const perf = typeof performance === 'object' &&
-    performance &&
-    typeof performance.now === 'function'
-    ? performance
-    : Date;
-const warned = new Set();
-/* c8 ignore start */
-const PROCESS = (typeof process === 'object' && !!process ? process : {});
-/* c8 ignore start */
-const emitWarning = (msg, type, code, fn) => {
-    typeof PROCESS.emitWarning === 'function'
-        ? PROCESS.emitWarning(msg, type, code, fn)
-        : console.error(`[${code}] ${type}: ${msg}`);
-};
-let AC = globalThis.AbortController;
-let AS = globalThis.AbortSignal;
-/* c8 ignore start */
-if (typeof AC === 'undefined') {
-    //@ts-ignore
-    AS = class AbortSignal {
-        onabort;
-        _onabort = [];
-        reason;
-        aborted = false;
-        addEventListener(_, fn) {
-            this._onabort.push(fn);
-        }
-    };
-    //@ts-ignore
-    AC = class AbortController {
-        constructor() {
-            warnACPolyfill();
-        }
-        signal = new AS();
-        abort(reason) {
-            if (this.signal.aborted)
-                return;
-            //@ts-ignore
-            this.signal.reason = reason;
-            //@ts-ignore
-            this.signal.aborted = true;
-            //@ts-ignore
-            for (const fn of this.signal._onabort) {
-                fn(reason);
-            }
-            this.signal.onabort?.(reason);
-        }
-    };
-    let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
-    const warnACPolyfill = () => {
-        if (!printACPolyfillWarning)
-            return;
-        printACPolyfillWarning = false;
-        emitWarning('AbortController is not defined. If using lru-cache in ' +
-            'node 14, load an AbortController polyfill from the ' +
-            '`node-abort-controller` package. A minimal polyfill is ' +
-            'provided for use by LRUCache.fetch(), but it should not be ' +
-            'relied upon in other contexts (eg, passing it to other APIs that ' +
-            'use AbortController/AbortSignal might have undesirable effects). ' +
-            'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
-    };
-}
-/* c8 ignore stop */
-const shouldWarn = (code) => !warned.has(code);
-const TYPE = Symbol('type');
-const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
-/* c8 ignore start */
-// This is a little bit ridiculous, tbh.
-// The maximum array length is 2^32-1 or thereabouts on most JS impls.
-// And well before that point, you're caching the entire world, I mean,
-// that's ~32GB of just integers for the next/prev links, plus whatever
-// else to hold that many keys and values.  Just filling the memory with
-// zeroes at init time is brutal when you get that big.
-// But why not be complete?
-// Maybe in the future, these limits will have expanded.
-const getUintArray = (max) => !isPosInt(max)
-    ? null
-    : max <= Math.pow(2, 8)
-        ? Uint8Array
-        : max <= Math.pow(2, 16)
-            ? Uint16Array
-            : max <= Math.pow(2, 32)
-                ? Uint32Array
-                : max <= Number.MAX_SAFE_INTEGER
-                    ? ZeroArray
-                    : null;
-/* c8 ignore stop */
-class ZeroArray extends Array {
-    constructor(size) {
-        super(size);
-        this.fill(0);
-    }
-}
-class Stack {
-    heap;
-    length;
-    // private constructor
-    static #constructing = false;
-    static create(max) {
-        const HeapCls = getUintArray(max);
-        if (!HeapCls)
-            return [];
-        Stack.#constructing = true;
-        const s = new Stack(max, HeapCls);
-        Stack.#constructing = false;
-        return s;
-    }
-    constructor(max, HeapCls) {
-        /* c8 ignore start */
-        if (!Stack.#constructing) {
-            throw new TypeError('instantiate Stack using Stack.create(n)');
-        }
-        /* c8 ignore stop */
-        this.heap = new HeapCls(max);
-        this.length = 0;
-    }
-    push(n) {
-        this.heap[this.length++] = n;
-    }
-    pop() {
-        return this.heap[--this.length];
-    }
-}
-/**
- * Default export, the thing you're using this module to get.
- *
- * The `K` and `V` types define the key and value types, respectively. The
- * optional `FC` type defines the type of the `context` object passed to
- * `cache.fetch()` and `cache.memo()`.
- *
- * Keys and values **must not** be `null` or `undefined`.
- *
- * All properties from the options object (with the exception of `max`,
- * `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are
- * added as normal public members. (The listed options are read-only getters.)
- *
- * Changing any of these will alter the defaults for subsequent method calls.
- */
-class LRUCache {
-    // options that cannot be changed without disaster
-    #max;
-    #maxSize;
-    #dispose;
-    #disposeAfter;
-    #fetchMethod;
-    #memoMethod;
-    /**
-     * {@link LRUCache.OptionsBase.ttl}
-     */
-    ttl;
-    /**
-     * {@link LRUCache.OptionsBase.ttlResolution}
-     */
-    ttlResolution;
-    /**
-     * {@link LRUCache.OptionsBase.ttlAutopurge}
-     */
-    ttlAutopurge;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnGet}
-     */
-    updateAgeOnGet;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnHas}
-     */
-    updateAgeOnHas;
-    /**
-     * {@link LRUCache.OptionsBase.allowStale}
-     */
-    allowStale;
-    /**
-     * {@link LRUCache.OptionsBase.noDisposeOnSet}
-     */
-    noDisposeOnSet;
-    /**
-     * {@link LRUCache.OptionsBase.noUpdateTTL}
-     */
-    noUpdateTTL;
-    /**
-     * {@link LRUCache.OptionsBase.maxEntrySize}
-     */
-    maxEntrySize;
-    /**
-     * {@link LRUCache.OptionsBase.sizeCalculation}
-     */
-    sizeCalculation;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
-     */
-    noDeleteOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
-     */
-    noDeleteOnStaleGet;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
-     */
-    allowStaleOnFetchAbort;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
-     */
-    allowStaleOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
-     */
-    ignoreFetchAbort;
-    // computed properties
-    #size;
-    #calculatedSize;
-    #keyMap;
-    #keyList;
-    #valList;
-    #next;
-    #prev;
-    #head;
-    #tail;
-    #free;
-    #disposed;
-    #sizes;
-    #starts;
-    #ttls;
-    #hasDispose;
-    #hasFetchMethod;
-    #hasDisposeAfter;
-    /**
-     * Do not call this method unless you need to inspect the
-     * inner workings of the cache.  If anything returned by this
-     * object is modified in any way, strange breakage may occur.
-     *
-     * These fields are private for a reason!
-     *
-     * @internal
-     */
-    static unsafeExposeInternals(c) {
-        return {
-            // properties
-            starts: c.#starts,
-            ttls: c.#ttls,
-            sizes: c.#sizes,
-            keyMap: c.#keyMap,
-            keyList: c.#keyList,
-            valList: c.#valList,
-            next: c.#next,
-            prev: c.#prev,
-            get head() {
-                return c.#head;
-            },
-            get tail() {
-                return c.#tail;
-            },
-            free: c.#free,
-            // methods
-            isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
-            backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
-            moveToTail: (index) => c.#moveToTail(index),
-            indexes: (options) => c.#indexes(options),
-            rindexes: (options) => c.#rindexes(options),
-            isStale: (index) => c.#isStale(index),
-        };
-    }
-    // Protected read-only members
-    /**
-     * {@link LRUCache.OptionsBase.max} (read-only)
-     */
-    get max() {
-        return this.#max;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.maxSize} (read-only)
-     */
-    get maxSize() {
-        return this.#maxSize;
-    }
-    /**
-     * The total computed size of items in the cache (read-only)
-     */
-    get calculatedSize() {
-        return this.#calculatedSize;
-    }
-    /**
-     * The number of items stored in the cache (read-only)
-     */
-    get size() {
-        return this.#size;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
-     */
-    get fetchMethod() {
-        return this.#fetchMethod;
-    }
-    get memoMethod() {
-        return this.#memoMethod;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.dispose} (read-only)
-     */
-    get dispose() {
-        return this.#dispose;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
-     */
-    get disposeAfter() {
-        return this.#disposeAfter;
-    }
-    constructor(options) {
-        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options;
-        if (max !== 0 && !isPosInt(max)) {
-            throw new TypeError('max option must be a nonnegative integer');
-        }
-        const UintArray = max ? getUintArray(max) : Array;
-        if (!UintArray) {
-            throw new Error('invalid max value: ' + max);
-        }
-        this.#max = max;
-        this.#maxSize = maxSize;
-        this.maxEntrySize = maxEntrySize || this.#maxSize;
-        this.sizeCalculation = sizeCalculation;
-        if (this.sizeCalculation) {
-            if (!this.#maxSize && !this.maxEntrySize) {
-                throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
-            }
-            if (typeof this.sizeCalculation !== 'function') {
-                throw new TypeError('sizeCalculation set to non-function');
-            }
-        }
-        if (memoMethod !== undefined &&
-            typeof memoMethod !== 'function') {
-            throw new TypeError('memoMethod must be a function if defined');
-        }
-        this.#memoMethod = memoMethod;
-        if (fetchMethod !== undefined &&
-            typeof fetchMethod !== 'function') {
-            throw new TypeError('fetchMethod must be a function if specified');
-        }
-        this.#fetchMethod = fetchMethod;
-        this.#hasFetchMethod = !!fetchMethod;
-        this.#keyMap = new Map();
-        this.#keyList = new Array(max).fill(undefined);
-        this.#valList = new Array(max).fill(undefined);
-        this.#next = new UintArray(max);
-        this.#prev = new UintArray(max);
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free = Stack.create(max);
-        this.#size = 0;
-        this.#calculatedSize = 0;
-        if (typeof dispose === 'function') {
-            this.#dispose = dispose;
-        }
-        if (typeof disposeAfter === 'function') {
-            this.#disposeAfter = disposeAfter;
-            this.#disposed = [];
-        }
-        else {
-            this.#disposeAfter = undefined;
-            this.#disposed = undefined;
-        }
-        this.#hasDispose = !!this.#dispose;
-        this.#hasDisposeAfter = !!this.#disposeAfter;
-        this.noDisposeOnSet = !!noDisposeOnSet;
-        this.noUpdateTTL = !!noUpdateTTL;
-        this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
-        this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
-        this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
-        this.ignoreFetchAbort = !!ignoreFetchAbort;
-        // NB: maxEntrySize is set to maxSize if it's set
-        if (this.maxEntrySize !== 0) {
-            if (this.#maxSize !== 0) {
-                if (!isPosInt(this.#maxSize)) {
-                    throw new TypeError('maxSize must be a positive integer if specified');
-                }
-            }
-            if (!isPosInt(this.maxEntrySize)) {
-                throw new TypeError('maxEntrySize must be a positive integer if specified');
-            }
-            this.#initializeSizeTracking();
-        }
-        this.allowStale = !!allowStale;
-        this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
-        this.updateAgeOnGet = !!updateAgeOnGet;
-        this.updateAgeOnHas = !!updateAgeOnHas;
-        this.ttlResolution =
-            isPosInt(ttlResolution) || ttlResolution === 0
-                ? ttlResolution
-                : 1;
-        this.ttlAutopurge = !!ttlAutopurge;
-        this.ttl = ttl || 0;
-        if (this.ttl) {
-            if (!isPosInt(this.ttl)) {
-                throw new TypeError('ttl must be a positive integer if specified');
-            }
-            this.#initializeTTLTracking();
-        }
-        // do not allow completely unbounded caches
-        if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
-            throw new TypeError('At least one of max, maxSize, or ttl is required');
-        }
-        if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
-            const code = 'LRU_CACHE_UNBOUNDED';
-            if (shouldWarn(code)) {
-                warned.add(code);
-                const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
-                    'result in unbounded memory consumption.';
-                emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
-            }
-        }
-    }
-    /**
-     * Return the number of ms left in the item's TTL. If item is not in cache,
-     * returns `0`. Returns `Infinity` if item is in cache without a defined TTL.
-     */
-    getRemainingTTL(key) {
-        return this.#keyMap.has(key) ? Infinity : 0;
-    }
-    #initializeTTLTracking() {
-        const ttls = new ZeroArray(this.#max);
-        const starts = new ZeroArray(this.#max);
-        this.#ttls = ttls;
-        this.#starts = starts;
-        this.#setItemTTL = (index, ttl, start = perf.now()) => {
-            starts[index] = ttl !== 0 ? start : 0;
-            ttls[index] = ttl;
-            if (ttl !== 0 && this.ttlAutopurge) {
-                const t = setTimeout(() => {
-                    if (this.#isStale(index)) {
-                        this.#delete(this.#keyList[index], 'expire');
-                    }
-                }, ttl + 1);
-                // unref() not supported on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-        };
-        this.#updateItemAge = index => {
-            starts[index] = ttls[index] !== 0 ? perf.now() : 0;
-        };
-        this.#statusTTL = (status, index) => {
-            if (ttls[index]) {
-                const ttl = ttls[index];
-                const start = starts[index];
-                /* c8 ignore next */
-                if (!ttl || !start)
-                    return;
-                status.ttl = ttl;
-                status.start = start;
-                status.now = cachedNow || getNow();
-                const age = status.now - start;
-                status.remainingTTL = ttl - age;
-            }
-        };
-        // debounce calls to perf.now() to 1s so we're not hitting
-        // that costly call repeatedly.
-        let cachedNow = 0;
-        const getNow = () => {
-            const n = perf.now();
-            if (this.ttlResolution > 0) {
-                cachedNow = n;
-                const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
-                // not available on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-            return n;
-        };
-        this.getRemainingTTL = key => {
-            const index = this.#keyMap.get(key);
-            if (index === undefined) {
-                return 0;
-            }
-            const ttl = ttls[index];
-            const start = starts[index];
-            if (!ttl || !start) {
-                return Infinity;
-            }
-            const age = (cachedNow || getNow()) - start;
-            return ttl - age;
-        };
-        this.#isStale = index => {
-            const s = starts[index];
-            const t = ttls[index];
-            return !!t && !!s && (cachedNow || getNow()) - s > t;
-        };
-    }
-    // conditionally set private methods related to TTL
-    #updateItemAge = () => { };
-    #statusTTL = () => { };
-    #setItemTTL = () => { };
-    /* c8 ignore stop */
-    #isStale = () => false;
-    #initializeSizeTracking() {
-        const sizes = new ZeroArray(this.#max);
-        this.#calculatedSize = 0;
-        this.#sizes = sizes;
-        this.#removeItemSize = index => {
-            this.#calculatedSize -= sizes[index];
-            sizes[index] = 0;
-        };
-        this.#requireSize = (k, v, size, sizeCalculation) => {
-            // provisionally accept background fetches.
-            // actual value size will be checked when they return.
-            if (this.#isBackgroundFetch(v)) {
-                return 0;
-            }
-            if (!isPosInt(size)) {
-                if (sizeCalculation) {
-                    if (typeof sizeCalculation !== 'function') {
-                        throw new TypeError('sizeCalculation must be a function');
-                    }
-                    size = sizeCalculation(v, k);
-                    if (!isPosInt(size)) {
-                        throw new TypeError('sizeCalculation return invalid (expect positive integer)');
-                    }
-                }
-                else {
-                    throw new TypeError('invalid size value (must be positive integer). ' +
-                        'When maxSize or maxEntrySize is used, sizeCalculation ' +
-                        'or size must be set.');
-                }
-            }
-            return size;
-        };
-        this.#addItemSize = (index, size, status) => {
-            sizes[index] = size;
-            if (this.#maxSize) {
-                const maxSize = this.#maxSize - sizes[index];
-                while (this.#calculatedSize > maxSize) {
-                    this.#evict(true);
-                }
-            }
-            this.#calculatedSize += sizes[index];
-            if (status) {
-                status.entrySize = size;
-                status.totalCalculatedSize = this.#calculatedSize;
-            }
-        };
-    }
-    #removeItemSize = _i => { };
-    #addItemSize = (_i, _s, _st) => { };
-    #requireSize = (_k, _v, size, sizeCalculation) => {
-        if (size || sizeCalculation) {
-            throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
-        }
-        return 0;
-    };
-    *#indexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#tail; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#head) {
-                    break;
-                }
-                else {
-                    i = this.#prev[i];
-                }
-            }
-        }
-    }
-    *#rindexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#head; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#tail) {
-                    break;
-                }
-                else {
-                    i = this.#next[i];
-                }
-            }
-        }
-    }
-    #isValidIndex(index) {
-        return (index !== undefined &&
-            this.#keyMap.get(this.#keyList[index]) === index);
-    }
-    /**
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from most recently used to least recently used.
-     */
-    *entries() {
-        for (const i of this.#indexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.entries}
-     *
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from least recently used to most recently used.
-     */
-    *rentries() {
-        for (const i of this.#rindexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the keys in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *keys() {
-        for (const i of this.#indexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.keys}
-     *
-     * Return a generator yielding the keys in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rkeys() {
-        for (const i of this.#rindexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the values in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *values() {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.values}
-     *
-     * Return a generator yielding the values in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rvalues() {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Iterating over the cache itself yields the same results as
-     * {@link LRUCache.entries}
-     */
-    [Symbol.iterator]() {
-        return this.entries();
-    }
-    /**
-     * A String value that is used in the creation of the default string
-     * description of an object. Called by the built-in method
-     * `Object.prototype.toString`.
-     */
-    [Symbol.toStringTag] = 'LRUCache';
-    /**
-     * Find a value for which the supplied fn method returns a truthy value,
-     * similar to `Array.find()`. fn is called as `fn(value, key, cache)`.
-     */
-    find(fn, getOptions = {}) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined)
-                continue;
-            if (fn(value, this.#keyList[i], this)) {
-                return this.get(this.#keyList[i], getOptions);
-            }
-        }
-    }
-    /**
-     * Call the supplied function on each item in the cache, in order from most
-     * recently used to least recently used.
-     *
-     * `fn` is called as `fn(value, key, cache)`.
-     *
-     * If `thisp` is provided, function will be called in the `this`-context of
-     * the provided object, or the cache if no `thisp` object is provided.
-     *
-     * Does not update age or recenty of use, or iterate over stale values.
-     */
-    forEach(fn, thisp = this) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * The same as {@link LRUCache.forEach} but items are iterated over in
-     * reverse order.  (ie, less recently used items are iterated over first.)
-     */
-    rforEach(fn, thisp = this) {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * Delete any stale entries. Returns true if anything was removed,
-     * false otherwise.
-     */
-    purgeStale() {
-        let deleted = false;
-        for (const i of this.#rindexes({ allowStale: true })) {
-            if (this.#isStale(i)) {
-                this.#delete(this.#keyList[i], 'expire');
-                deleted = true;
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Get the extended info about a given entry, to get its value, size, and
-     * TTL info simultaneously. Returns `undefined` if the key is not present.
-     *
-     * Unlike {@link LRUCache#dump}, which is designed to be portable and survive
-     * serialization, the `start` value is always the current timestamp, and the
-     * `ttl` is a calculated remaining time to live (negative if expired).
-     *
-     * Always returns stale values, if their info is found in the cache, so be
-     * sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl})
-     * if relevant.
-     */
-    info(key) {
-        const i = this.#keyMap.get(key);
-        if (i === undefined)
-            return undefined;
-        const v = this.#valList[i];
-        const value = this.#isBackgroundFetch(v)
-            ? v.__staleWhileFetching
-            : v;
-        if (value === undefined)
-            return undefined;
-        const entry = { value };
-        if (this.#ttls && this.#starts) {
-            const ttl = this.#ttls[i];
-            const start = this.#starts[i];
-            if (ttl && start) {
-                const remain = ttl - (perf.now() - start);
-                entry.ttl = remain;
-                entry.start = Date.now();
-            }
-        }
-        if (this.#sizes) {
-            entry.size = this.#sizes[i];
-        }
-        return entry;
-    }
-    /**
-     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
-     * passed to {@link LRLUCache#load}.
-     *
-     * The `start` fields are calculated relative to a portable `Date.now()`
-     * timestamp, even if `performance.now()` is available.
-     *
-     * Stale entries are always included in the `dump`, even if
-     * {@link LRUCache.OptionsBase.allowStale} is false.
-     *
-     * Note: this returns an actual array, not a generator, so it can be more
-     * easily passed around.
-     */
-    dump() {
-        const arr = [];
-        for (const i of this.#indexes({ allowStale: true })) {
-            const key = this.#keyList[i];
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined || key === undefined)
-                continue;
-            const entry = { value };
-            if (this.#ttls && this.#starts) {
-                entry.ttl = this.#ttls[i];
-                // always dump the start relative to a portable timestamp
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = perf.now() - this.#starts[i];
-                entry.start = Math.floor(Date.now() - age);
-            }
-            if (this.#sizes) {
-                entry.size = this.#sizes[i];
-            }
-            arr.unshift([key, entry]);
-        }
-        return arr;
-    }
-    /**
-     * Reset the cache and load in the items in entries in the order listed.
-     *
-     * The shape of the resulting cache may be different if the same options are
-     * not used in both caches.
-     *
-     * The `start` fields are assumed to be calculated relative to a portable
-     * `Date.now()` timestamp, even if `performance.now()` is available.
-     */
-    load(arr) {
-        this.clear();
-        for (const [key, entry] of arr) {
-            if (entry.start) {
-                // entry.start is a portable timestamp, but we may be using
-                // node's performance.now(), so calculate the offset, so that
-                // we get the intended remaining TTL, no matter how long it's
-                // been on ice.
-                //
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = Date.now() - entry.start;
-                entry.start = perf.now() - age;
-            }
-            this.set(key, entry.value, entry);
-        }
-    }
-    /**
-     * Add a value to the cache.
-     *
-     * Note: if `undefined` is specified as a value, this is an alias for
-     * {@link LRUCache#delete}
-     *
-     * Fields on the {@link LRUCache.SetOptions} options param will override
-     * their corresponding values in the constructor options for the scope
-     * of this single `set()` operation.
-     *
-     * If `start` is provided, then that will set the effective start
-     * time for the TTL calculation. Note that this must be a previous
-     * value of `performance.now()` if supported, or a previous value of
-     * `Date.now()` if not.
-     *
-     * Options object may also include `size`, which will prevent
-     * calling the `sizeCalculation` function and just use the specified
-     * number if it is a positive integer, and `noDisposeOnSet` which
-     * will prevent calling a `dispose` function in the case of
-     * overwrites.
-     *
-     * If the `size` (or return value of `sizeCalculation`) for a given
-     * entry is greater than `maxEntrySize`, then the item will not be
-     * added to the cache.
-     *
-     * Will update the recency of the entry.
-     *
-     * If the value is `undefined`, then this is an alias for
-     * `cache.delete(key)`. `undefined` is never stored in the cache.
-     */
-    set(k, v, setOptions = {}) {
-        if (v === undefined) {
-            this.delete(k);
-            return this;
-        }
-        const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
-        let { noUpdateTTL = this.noUpdateTTL } = setOptions;
-        const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
-        // if the item doesn't fit, don't do anything
-        // NB: maxEntrySize set to maxSize by default
-        if (this.maxEntrySize && size > this.maxEntrySize) {
-            if (status) {
-                status.set = 'miss';
-                status.maxEntrySizeExceeded = true;
-            }
-            // have to delete, in case something is there already.
-            this.#delete(k, 'set');
-            return this;
-        }
-        let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
-        if (index === undefined) {
-            // addition
-            index = (this.#size === 0
-                ? this.#tail
-                : this.#free.length !== 0
-                    ? this.#free.pop()
-                    : this.#size === this.#max
-                        ? this.#evict(false)
-                        : this.#size);
-            this.#keyList[index] = k;
-            this.#valList[index] = v;
-            this.#keyMap.set(k, index);
-            this.#next[this.#tail] = index;
-            this.#prev[index] = this.#tail;
-            this.#tail = index;
-            this.#size++;
-            this.#addItemSize(index, size, status);
-            if (status)
-                status.set = 'add';
-            noUpdateTTL = false;
-        }
-        else {
-            // update
-            this.#moveToTail(index);
-            const oldVal = this.#valList[index];
-            if (v !== oldVal) {
-                if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
-                    oldVal.__abortController.abort(new Error('replaced'));
-                    const { __staleWhileFetching: s } = oldVal;
-                    if (s !== undefined && !noDisposeOnSet) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(s, k, 'set');
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([s, k, 'set']);
-                        }
-                    }
-                }
-                else if (!noDisposeOnSet) {
-                    if (this.#hasDispose) {
-                        this.#dispose?.(oldVal, k, 'set');
-                    }
-                    if (this.#hasDisposeAfter) {
-                        this.#disposed?.push([oldVal, k, 'set']);
-                    }
-                }
-                this.#removeItemSize(index);
-                this.#addItemSize(index, size, status);
-                this.#valList[index] = v;
-                if (status) {
-                    status.set = 'replace';
-                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal)
-                        ? oldVal.__staleWhileFetching
-                        : oldVal;
-                    if (oldValue !== undefined)
-                        status.oldValue = oldValue;
-                }
-            }
-            else if (status) {
-                status.set = 'update';
-            }
-        }
-        if (ttl !== 0 && !this.#ttls) {
-            this.#initializeTTLTracking();
-        }
-        if (this.#ttls) {
-            if (!noUpdateTTL) {
-                this.#setItemTTL(index, ttl, start);
-            }
-            if (status)
-                this.#statusTTL(status, index);
-        }
-        if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return this;
-    }
-    /**
-     * Evict the least recently used item, returning its value or
-     * `undefined` if cache is empty.
-     */
-    pop() {
-        try {
-            while (this.#size) {
-                const val = this.#valList[this.#head];
-                this.#evict(true);
-                if (this.#isBackgroundFetch(val)) {
-                    if (val.__staleWhileFetching) {
-                        return val.__staleWhileFetching;
-                    }
-                }
-                else if (val !== undefined) {
-                    return val;
-                }
-            }
-        }
-        finally {
-            if (this.#hasDisposeAfter && this.#disposed) {
-                const dt = this.#disposed;
-                let task;
-                while ((task = dt?.shift())) {
-                    this.#disposeAfter?.(...task);
-                }
-            }
-        }
-    }
-    #evict(free) {
-        const head = this.#head;
-        const k = this.#keyList[head];
-        const v = this.#valList[head];
-        if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
-            v.__abortController.abort(new Error('evicted'));
-        }
-        else if (this.#hasDispose || this.#hasDisposeAfter) {
-            if (this.#hasDispose) {
-                this.#dispose?.(v, k, 'evict');
-            }
-            if (this.#hasDisposeAfter) {
-                this.#disposed?.push([v, k, 'evict']);
-            }
-        }
-        this.#removeItemSize(head);
-        // if we aren't about to use the index, then null these out
-        if (free) {
-            this.#keyList[head] = undefined;
-            this.#valList[head] = undefined;
-            this.#free.push(head);
-        }
-        if (this.#size === 1) {
-            this.#head = this.#tail = 0;
-            this.#free.length = 0;
-        }
-        else {
-            this.#head = this.#next[head];
-        }
-        this.#keyMap.delete(k);
-        this.#size--;
-        return head;
-    }
-    /**
-     * Check if a key is in the cache, without updating the recency of use.
-     * Will return false if the item is stale, even though it is technically
-     * in the cache.
-     *
-     * Check if a key is in the cache, without updating the recency of
-     * use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set
-     * to `true` in either the options or the constructor.
-     *
-     * Will return `false` if the item is stale, even though it is technically in
-     * the cache. The difference can be determined (if it matters) by using a
-     * `status` argument, and inspecting the `has` field.
-     *
-     * Will not update item age unless
-     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
-     */
-    has(k, hasOptions = {}) {
-        const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v) &&
-                v.__staleWhileFetching === undefined) {
-                return false;
-            }
-            if (!this.#isStale(index)) {
-                if (updateAgeOnHas) {
-                    this.#updateItemAge(index);
-                }
-                if (status) {
-                    status.has = 'hit';
-                    this.#statusTTL(status, index);
-                }
-                return true;
-            }
-            else if (status) {
-                status.has = 'stale';
-                this.#statusTTL(status, index);
-            }
-        }
-        else if (status) {
-            status.has = 'miss';
-        }
-        return false;
-    }
-    /**
-     * Like {@link LRUCache#get} but doesn't update recency or delete stale
-     * items.
-     *
-     * Returns `undefined` if the item is stale, unless
-     * {@link LRUCache.OptionsBase.allowStale} is set.
-     */
-    peek(k, peekOptions = {}) {
-        const { allowStale = this.allowStale } = peekOptions;
-        const index = this.#keyMap.get(k);
-        if (index === undefined ||
-            (!allowStale && this.#isStale(index))) {
-            return;
-        }
-        const v = this.#valList[index];
-        // either stale and allowed, or forcing a refresh of non-stale value
-        return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-    }
-    #backgroundFetch(k, index, options, context) {
-        const v = index === undefined ? undefined : this.#valList[index];
-        if (this.#isBackgroundFetch(v)) {
-            return v;
-        }
-        const ac = new AC();
-        const { signal } = options;
-        // when/if our AC signals, then stop listening to theirs.
-        signal?.addEventListener('abort', () => ac.abort(signal.reason), {
-            signal: ac.signal,
-        });
-        const fetchOpts = {
-            signal: ac.signal,
-            options,
-            context,
-        };
-        const cb = (v, updateCache = false) => {
-            const { aborted } = ac.signal;
-            const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
-            if (options.status) {
-                if (aborted && !updateCache) {
-                    options.status.fetchAborted = true;
-                    options.status.fetchError = ac.signal.reason;
-                    if (ignoreAbort)
-                        options.status.fetchAbortIgnored = true;
-                }
-                else {
-                    options.status.fetchResolved = true;
-                }
-            }
-            if (aborted && !ignoreAbort && !updateCache) {
-                return fetchFail(ac.signal.reason);
-            }
-            // either we didn't abort, and are still here, or we did, and ignored
-            const bf = p;
-            if (this.#valList[index] === p) {
-                if (v === undefined) {
-                    if (bf.__staleWhileFetching) {
-                        this.#valList[index] = bf.__staleWhileFetching;
-                    }
-                    else {
-                        this.#delete(k, 'fetch');
-                    }
-                }
-                else {
-                    if (options.status)
-                        options.status.fetchUpdated = true;
-                    this.set(k, v, fetchOpts.options);
-                }
-            }
-            return v;
-        };
-        const eb = (er) => {
-            if (options.status) {
-                options.status.fetchRejected = true;
-                options.status.fetchError = er;
-            }
-            return fetchFail(er);
-        };
-        const fetchFail = (er) => {
-            const { aborted } = ac.signal;
-            const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
-            const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
-            const noDelete = allowStale || options.noDeleteOnFetchRejection;
-            const bf = p;
-            if (this.#valList[index] === p) {
-                // if we allow stale on fetch rejections, then we need to ensure that
-                // the stale value is not removed from the cache when the fetch fails.
-                const del = !noDelete || bf.__staleWhileFetching === undefined;
-                if (del) {
-                    this.#delete(k, 'fetch');
-                }
-                else if (!allowStaleAborted) {
-                    // still replace the *promise* with the stale value,
-                    // since we are done with the promise at this point.
-                    // leave it untouched if we're still waiting for an
-                    // aborted background fetch that hasn't yet returned.
-                    this.#valList[index] = bf.__staleWhileFetching;
-                }
-            }
-            if (allowStale) {
-                if (options.status && bf.__staleWhileFetching !== undefined) {
-                    options.status.returnedStale = true;
-                }
-                return bf.__staleWhileFetching;
-            }
-            else if (bf.__returned === bf) {
-                throw er;
-            }
-        };
-        const pcall = (res, rej) => {
-            const fmp = this.#fetchMethod?.(k, v, fetchOpts);
-            if (fmp && fmp instanceof Promise) {
-                fmp.then(v => res(v === undefined ? undefined : v), rej);
-            }
-            // ignored, we go until we finish, regardless.
-            // defer check until we are actually aborting,
-            // so fetchMethod can override.
-            ac.signal.addEventListener('abort', () => {
-                if (!options.ignoreFetchAbort ||
-                    options.allowStaleOnFetchAbort) {
-                    res(undefined);
-                    // when it eventually resolves, update the cache.
-                    if (options.allowStaleOnFetchAbort) {
-                        res = v => cb(v, true);
-                    }
-                }
-            });
-        };
-        if (options.status)
-            options.status.fetchDispatched = true;
-        const p = new Promise(pcall).then(cb, eb);
-        const bf = Object.assign(p, {
-            __abortController: ac,
-            __staleWhileFetching: v,
-            __returned: undefined,
-        });
-        if (index === undefined) {
-            // internal, don't expose status.
-            this.set(k, bf, { ...fetchOpts.options, status: undefined });
-            index = this.#keyMap.get(k);
-        }
-        else {
-            this.#valList[index] = bf;
-        }
-        return bf;
-    }
-    #isBackgroundFetch(p) {
-        if (!this.#hasFetchMethod)
-            return false;
-        const b = p;
-        return (!!b &&
-            b instanceof Promise &&
-            b.hasOwnProperty('__staleWhileFetching') &&
-            b.__abortController instanceof AC);
-    }
-    async fetch(k, fetchOptions = {}) {
-        const { 
-        // get options
-        allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, 
-        // set options
-        ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, 
-        // fetch exclusive options
-        noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
-        if (!this.#hasFetchMethod) {
-            if (status)
-                status.fetch = 'get';
-            return this.get(k, {
-                allowStale,
-                updateAgeOnGet,
-                noDeleteOnStaleGet,
-                status,
-            });
-        }
-        const options = {
-            allowStale,
-            updateAgeOnGet,
-            noDeleteOnStaleGet,
-            ttl,
-            noDisposeOnSet,
-            size,
-            sizeCalculation,
-            noUpdateTTL,
-            noDeleteOnFetchRejection,
-            allowStaleOnFetchRejection,
-            allowStaleOnFetchAbort,
-            ignoreFetchAbort,
-            status,
-            signal,
-        };
-        let index = this.#keyMap.get(k);
-        if (index === undefined) {
-            if (status)
-                status.fetch = 'miss';
-            const p = this.#backgroundFetch(k, index, options, context);
-            return (p.__returned = p);
-        }
-        else {
-            // in cache, maybe already fetching
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                const stale = allowStale && v.__staleWhileFetching !== undefined;
-                if (status) {
-                    status.fetch = 'inflight';
-                    if (stale)
-                        status.returnedStale = true;
-                }
-                return stale ? v.__staleWhileFetching : (v.__returned = v);
-            }
-            // if we force a refresh, that means do NOT serve the cached value,
-            // unless we are already in the process of refreshing the cache.
-            const isStale = this.#isStale(index);
-            if (!forceRefresh && !isStale) {
-                if (status)
-                    status.fetch = 'hit';
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                if (status)
-                    this.#statusTTL(status, index);
-                return v;
-            }
-            // ok, it is stale or a forced refresh, and not already fetching.
-            // refresh the cache.
-            const p = this.#backgroundFetch(k, index, options, context);
-            const hasStale = p.__staleWhileFetching !== undefined;
-            const staleVal = hasStale && allowStale;
-            if (status) {
-                status.fetch = isStale ? 'stale' : 'refresh';
-                if (staleVal && isStale)
-                    status.returnedStale = true;
-            }
-            return staleVal ? p.__staleWhileFetching : (p.__returned = p);
-        }
-    }
-    async forceFetch(k, fetchOptions = {}) {
-        const v = await this.fetch(k, fetchOptions);
-        if (v === undefined)
-            throw new Error('fetch() returned undefined');
-        return v;
-    }
-    memo(k, memoOptions = {}) {
-        const memoMethod = this.#memoMethod;
-        if (!memoMethod) {
-            throw new Error('no memoMethod provided to constructor');
-        }
-        const { context, forceRefresh, ...options } = memoOptions;
-        const v = this.get(k, options);
-        if (!forceRefresh && v !== undefined)
-            return v;
-        const vv = memoMethod(k, v, {
-            options,
-            context,
-        });
-        this.set(k, vv, options);
-        return vv;
-    }
-    /**
-     * Return a value from the cache. Will update the recency of the cache
-     * entry found.
-     *
-     * If the key is not found, get() will return `undefined`.
-     */
-    get(k, getOptions = {}) {
-        const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const value = this.#valList[index];
-            const fetching = this.#isBackgroundFetch(value);
-            if (status)
-                this.#statusTTL(status, index);
-            if (this.#isStale(index)) {
-                if (status)
-                    status.get = 'stale';
-                // delete only if not an in-flight background fetch
-                if (!fetching) {
-                    if (!noDeleteOnStaleGet) {
-                        this.#delete(k, 'expire');
-                    }
-                    if (status && allowStale)
-                        status.returnedStale = true;
-                    return allowStale ? value : undefined;
-                }
-                else {
-                    if (status &&
-                        allowStale &&
-                        value.__staleWhileFetching !== undefined) {
-                        status.returnedStale = true;
-                    }
-                    return allowStale ? value.__staleWhileFetching : undefined;
-                }
-            }
-            else {
-                if (status)
-                    status.get = 'hit';
-                // if we're currently fetching it, we don't actually have it yet
-                // it's not stale, which means this isn't a staleWhileRefetching.
-                // If it's not stale, and fetching, AND has a __staleWhileFetching
-                // value, then that means the user fetched with {forceRefresh:true},
-                // so it's safe to return that value.
-                if (fetching) {
-                    return value.__staleWhileFetching;
-                }
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                return value;
-            }
-        }
-        else if (status) {
-            status.get = 'miss';
-        }
-    }
-    #connect(p, n) {
-        this.#prev[n] = p;
-        this.#next[p] = n;
-    }
-    #moveToTail(index) {
-        // if tail already, nothing to do
-        // if head, move head to next[index]
-        // else
-        //   move next[prev[index]] to next[index] (head has no prev)
-        //   move prev[next[index]] to prev[index]
-        // prev[index] = tail
-        // next[tail] = index
-        // tail = index
-        if (index !== this.#tail) {
-            if (index === this.#head) {
-                this.#head = this.#next[index];
-            }
-            else {
-                this.#connect(this.#prev[index], this.#next[index]);
-            }
-            this.#connect(this.#tail, index);
-            this.#tail = index;
-        }
-    }
-    /**
-     * Deletes a key out of the cache.
-     *
-     * Returns true if the key was deleted, false otherwise.
-     */
-    delete(k) {
-        return this.#delete(k, 'delete');
-    }
-    #delete(k, reason) {
-        let deleted = false;
-        if (this.#size !== 0) {
-            const index = this.#keyMap.get(k);
-            if (index !== undefined) {
-                deleted = true;
-                if (this.#size === 1) {
-                    this.#clear(reason);
-                }
-                else {
-                    this.#removeItemSize(index);
-                    const v = this.#valList[index];
-                    if (this.#isBackgroundFetch(v)) {
-                        v.__abortController.abort(new Error('deleted'));
-                    }
-                    else if (this.#hasDispose || this.#hasDisposeAfter) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(v, k, reason);
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([v, k, reason]);
-                        }
-                    }
-                    this.#keyMap.delete(k);
-                    this.#keyList[index] = undefined;
-                    this.#valList[index] = undefined;
-                    if (index === this.#tail) {
-                        this.#tail = this.#prev[index];
-                    }
-                    else if (index === this.#head) {
-                        this.#head = this.#next[index];
-                    }
-                    else {
-                        const pi = this.#prev[index];
-                        this.#next[pi] = this.#next[index];
-                        const ni = this.#next[index];
-                        this.#prev[ni] = this.#prev[index];
-                    }
-                    this.#size--;
-                    this.#free.push(index);
-                }
-            }
-        }
-        if (this.#hasDisposeAfter && this.#disposed?.length) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Clear the cache entirely, throwing away all values.
-     */
-    clear() {
-        return this.#clear('delete');
-    }
-    #clear(reason) {
-        for (const index of this.#rindexes({ allowStale: true })) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                v.__abortController.abort(new Error('deleted'));
-            }
-            else {
-                const k = this.#keyList[index];
-                if (this.#hasDispose) {
-                    this.#dispose?.(v, k, reason);
-                }
-                if (this.#hasDisposeAfter) {
-                    this.#disposed?.push([v, k, reason]);
-                }
-            }
-        }
-        this.#keyMap.clear();
-        this.#valList.fill(undefined);
-        this.#keyList.fill(undefined);
-        if (this.#ttls && this.#starts) {
-            this.#ttls.fill(0);
-            this.#starts.fill(0);
-        }
-        if (this.#sizes) {
-            this.#sizes.fill(0);
-        }
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free.length = 0;
-        this.#calculatedSize = 0;
-        this.#size = 0;
-        if (this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-    }
-}
-exports.LRUCache = LRUCache;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/agent/node_modules/lru-cache/dist/commonjs/index.min.js b/node_modules/@npmcli/agent/node_modules/lru-cache/dist/commonjs/index.min.js
deleted file mode 100644
index ad643b0badc90..0000000000000
--- a/node_modules/@npmcli/agent/node_modules/lru-cache/dist/commonjs/index.min.js
+++ /dev/null
@@ -1,2 +0,0 @@
-"use strict";var G=(l,t,e)=>{if(!t.has(l))throw TypeError("Cannot "+e)};var j=(l,t,e)=>(G(l,t,"read from private field"),e?e.call(l):t.get(l)),I=(l,t,e)=>{if(t.has(l))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(l):t.set(l,e)},x=(l,t,e,i)=>(G(l,t,"write to private field"),i?i.call(l,e):t.set(l,e),e);Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var T=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,P=new Set,U=typeof process=="object"&&process?process:{},H=(l,t,e,i)=>{typeof U.emitWarning=="function"?U.emitWarning(l,t,e,i):console.error(`[${e}] ${t}: ${l}`)},D=globalThis.AbortController,N=globalThis.AbortSignal;if(typeof D>"u"){N=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},D=class{constructor(){t()}signal=new N;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let l=U.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{l&&(l=!1,H("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=l=>!P.has(l),Y=Symbol("type"),A=l=>l&&l===Math.floor(l)&&l>0&&isFinite(l),k=l=>A(l)?l<=Math.pow(2,8)?Uint8Array:l<=Math.pow(2,16)?Uint16Array:l<=Math.pow(2,32)?Uint32Array:l<=Number.MAX_SAFE_INTEGER?E:null:null,E=class extends Array{constructor(t){super(t),this.fill(0)}},v,O=class{heap;length;static create(t){let e=k(t);if(!e)return[];x(O,v,!0);let i=new O(t,e);return x(O,v,!1),i}constructor(t,e){if(!j(O,v))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},W=O;v=new WeakMap,I(W,v,!1);var C=class{#g;#f;#p;#w;#R;#W;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#S;#s;#i;#t;#l;#c;#o;#h;#_;#r;#b;#m;#u;#y;#E;#a;static unsafeExposeInternals(t){return{starts:t.#m,ttls:t.#u,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#_,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#x(e,i,s,n),moveToTail:e=>t.#C(e),indexes:e=>t.#A(e),rindexes:e=>t.#F(e),isStale:e=>t.#d(e)}}get max(){return this.#g}get maxSize(){return this.#f}get calculatedSize(){return this.#S}get size(){return this.#n}get fetchMethod(){return this.#R}get memoMethod(){return this.#W}get dispose(){return this.#p}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,memoMethod:a,noDeleteOnFetchRejection:w,noDeleteOnStaleGet:m,allowStaleOnFetchRejection:p,allowStaleOnFetchAbort:_,ignoreFetchAbort:z}=t;if(e!==0&&!A(e))throw new TypeError("max option must be a nonnegative integer");let y=e?k(e):Array;if(!y)throw new Error("invalid max value: "+e);if(this.#g=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(a!==void 0&&typeof a!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#W=a,S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#R=S,this.#E=!!S,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new y(e),this.#c=new y(e),this.#o=0,this.#h=0,this.#_=W.create(e),this.#n=0,this.#S=0,typeof g=="function"&&(this.#p=g),typeof b=="function"?(this.#w=b,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#y=!!this.#p,this.#a=!!this.#w,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!w,this.allowStaleOnFetchRejection=!!p,this.allowStaleOnFetchAbort=!!_,this.ignoreFetchAbort=!!z,this.maxEntrySize!==0){if(this.#f!==0&&!A(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!A(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#P()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!m,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=A(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!A(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#U()}if(this.#g===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#g&&!this.#f){let R="LRU_CACHE_UNBOUNDED";V(R)&&(P.add(R),H("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",R,C))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#U(){let t=new E(this.#g),e=new E(this.#g);this.#u=t,this.#m=e,this.#M=(n,h,o=T.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#d(n)&&this.#T(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#v=n=>{e[n]=t[n]!==0?T.now():0},this.#O=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=T.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#d=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#v=()=>{};#O=()=>{};#M=()=>{};#d=()=>!1;#P(){let t=new E(this.#g);this.#S=0,this.#b=t,this.#z=e=>{this.#S-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!A(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!A(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#D=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#S>n;)this.#L(!0)}this.#S+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#S)}}#z=t=>{};#D=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#A({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#j(e)||((t||!this.#d(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#j(e)||((t||!this.#d(e))&&(yield e),e===this.#h));)e=this.#l[e]}#j(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#F({allowStale:!0}))this.#d(e)&&(this.#T(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#u&&this.#m){let h=this.#u[e],o=this.#m[e];if(h&&o){let r=h-(T.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#A({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#u&&this.#m){h.ttl=this.#u[e];let o=T.now()-this.#m[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=T.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,o);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#T(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#_.length!==0?this.#_.pop():this.#n===this.#g?this.#L(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#n++,this.#D(f,b,r),r&&(r.set="add"),g=!1;else{this.#C(f);let u=this.#t[f];if(e!==u){if(this.#E&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#y&&this.#p?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#y&&this.#p?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#z(f),this.#D(f,b,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#u&&this.#U(),this.#u&&(g||this.#M(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#w?.(...c)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#L(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#L(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#E&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#y||this.#a)&&(this.#y&&this.#p?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#z(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#_.push(e)),this.#n===1?(this.#o=this.#h=0,this.#_.length=0):this.#o=this.#l[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#d(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#v(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#d(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#x(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new D,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:a}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(a&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),a&&!w&&!S)return f(h.signal.reason);let m=c;return this.#t[e]===c&&(d===void 0?m.__staleWhileFetching?this.#t[e]=m.__staleWhileFetching:this.#T(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},b=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,a=S&&i.allowStaleOnFetchAbort,w=a||i.allowStaleOnFetchRejection,m=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!m||p.__staleWhileFetching===void 0?this.#T(t,"fetch"):a||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let a=this.#R?.(t,n,r);a&&a instanceof Promise&&a.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,b),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=F,F}#e(t){if(!this.#E)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof D}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:a,signal:w}=e;if(!this.#E)return a&&(a.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:a});let m={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:a,signal:w},p=this.#s.get(t);if(p===void 0){a&&(a.fetch="miss");let _=this.#x(t,p,m,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let M=i&&_.__staleWhileFetching!==void 0;return a&&(a.fetch="inflight",M&&(a.returnedStale=!0)),M?_.__staleWhileFetching:_.__returned=_}let z=this.#d(p);if(!S&&!z)return a&&(a.fetch="hit"),this.#C(p),s&&this.#v(p),a&&this.#O(a,p),_;let y=this.#x(t,p,m,d),L=y.__staleWhileFetching!==void 0&&i;return a&&(a.fetch=z?"stale":"refresh",L&&z&&(a.returnedStale=!0)),L?y.__staleWhileFetching:y.__returned=y}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#W;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#O(h,o),this.#d(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#T(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#C(o),s&&this.#v(o),r))}else h&&(h.get="miss")}#I(t,e){this.#c[e]=t,this.#l[t]=e}#C(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#I(this.#c[t],this.#l[t]),this.#I(this.#h,t),this.#h=t)}delete(t){return this.#T(t,"delete")}#T(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#N(e);else{this.#z(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#y||this.#a)&&(this.#y&&this.#p?.(n,t,e),this.#a&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#c[s];else if(s===this.#o)this.#o=this.#l[s];else{let h=this.#c[s];this.#l[h]=this.#l[s];let o=this.#l[s];this.#c[o]=this.#c[s]}this.#n--,this.#_.push(s)}}if(this.#a&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#N("delete")}#N(t){for(let e of this.#F({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#y&&this.#p?.(i,s,t),this.#a&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#u&&this.#m&&(this.#u.fill(0),this.#m.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#_.length=0,this.#S=0,this.#n=0,this.#a&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};exports.LRUCache=C;
-//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/@npmcli/agent/node_modules/lru-cache/dist/commonjs/package.json b/node_modules/@npmcli/agent/node_modules/lru-cache/dist/commonjs/package.json
deleted file mode 100644
index 5bbefffbabee3..0000000000000
--- a/node_modules/@npmcli/agent/node_modules/lru-cache/dist/commonjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "commonjs"
-}
diff --git a/node_modules/@npmcli/agent/node_modules/lru-cache/dist/esm/index.js b/node_modules/@npmcli/agent/node_modules/lru-cache/dist/esm/index.js
deleted file mode 100644
index 555654a57c4d7..0000000000000
--- a/node_modules/@npmcli/agent/node_modules/lru-cache/dist/esm/index.js
+++ /dev/null
@@ -1,1542 +0,0 @@
-/**
- * @module LRUCache
- */
-const perf = typeof performance === 'object' &&
-    performance &&
-    typeof performance.now === 'function'
-    ? performance
-    : Date;
-const warned = new Set();
-/* c8 ignore start */
-const PROCESS = (typeof process === 'object' && !!process ? process : {});
-/* c8 ignore start */
-const emitWarning = (msg, type, code, fn) => {
-    typeof PROCESS.emitWarning === 'function'
-        ? PROCESS.emitWarning(msg, type, code, fn)
-        : console.error(`[${code}] ${type}: ${msg}`);
-};
-let AC = globalThis.AbortController;
-let AS = globalThis.AbortSignal;
-/* c8 ignore start */
-if (typeof AC === 'undefined') {
-    //@ts-ignore
-    AS = class AbortSignal {
-        onabort;
-        _onabort = [];
-        reason;
-        aborted = false;
-        addEventListener(_, fn) {
-            this._onabort.push(fn);
-        }
-    };
-    //@ts-ignore
-    AC = class AbortController {
-        constructor() {
-            warnACPolyfill();
-        }
-        signal = new AS();
-        abort(reason) {
-            if (this.signal.aborted)
-                return;
-            //@ts-ignore
-            this.signal.reason = reason;
-            //@ts-ignore
-            this.signal.aborted = true;
-            //@ts-ignore
-            for (const fn of this.signal._onabort) {
-                fn(reason);
-            }
-            this.signal.onabort?.(reason);
-        }
-    };
-    let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
-    const warnACPolyfill = () => {
-        if (!printACPolyfillWarning)
-            return;
-        printACPolyfillWarning = false;
-        emitWarning('AbortController is not defined. If using lru-cache in ' +
-            'node 14, load an AbortController polyfill from the ' +
-            '`node-abort-controller` package. A minimal polyfill is ' +
-            'provided for use by LRUCache.fetch(), but it should not be ' +
-            'relied upon in other contexts (eg, passing it to other APIs that ' +
-            'use AbortController/AbortSignal might have undesirable effects). ' +
-            'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
-    };
-}
-/* c8 ignore stop */
-const shouldWarn = (code) => !warned.has(code);
-const TYPE = Symbol('type');
-const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
-/* c8 ignore start */
-// This is a little bit ridiculous, tbh.
-// The maximum array length is 2^32-1 or thereabouts on most JS impls.
-// And well before that point, you're caching the entire world, I mean,
-// that's ~32GB of just integers for the next/prev links, plus whatever
-// else to hold that many keys and values.  Just filling the memory with
-// zeroes at init time is brutal when you get that big.
-// But why not be complete?
-// Maybe in the future, these limits will have expanded.
-const getUintArray = (max) => !isPosInt(max)
-    ? null
-    : max <= Math.pow(2, 8)
-        ? Uint8Array
-        : max <= Math.pow(2, 16)
-            ? Uint16Array
-            : max <= Math.pow(2, 32)
-                ? Uint32Array
-                : max <= Number.MAX_SAFE_INTEGER
-                    ? ZeroArray
-                    : null;
-/* c8 ignore stop */
-class ZeroArray extends Array {
-    constructor(size) {
-        super(size);
-        this.fill(0);
-    }
-}
-class Stack {
-    heap;
-    length;
-    // private constructor
-    static #constructing = false;
-    static create(max) {
-        const HeapCls = getUintArray(max);
-        if (!HeapCls)
-            return [];
-        Stack.#constructing = true;
-        const s = new Stack(max, HeapCls);
-        Stack.#constructing = false;
-        return s;
-    }
-    constructor(max, HeapCls) {
-        /* c8 ignore start */
-        if (!Stack.#constructing) {
-            throw new TypeError('instantiate Stack using Stack.create(n)');
-        }
-        /* c8 ignore stop */
-        this.heap = new HeapCls(max);
-        this.length = 0;
-    }
-    push(n) {
-        this.heap[this.length++] = n;
-    }
-    pop() {
-        return this.heap[--this.length];
-    }
-}
-/**
- * Default export, the thing you're using this module to get.
- *
- * The `K` and `V` types define the key and value types, respectively. The
- * optional `FC` type defines the type of the `context` object passed to
- * `cache.fetch()` and `cache.memo()`.
- *
- * Keys and values **must not** be `null` or `undefined`.
- *
- * All properties from the options object (with the exception of `max`,
- * `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are
- * added as normal public members. (The listed options are read-only getters.)
- *
- * Changing any of these will alter the defaults for subsequent method calls.
- */
-export class LRUCache {
-    // options that cannot be changed without disaster
-    #max;
-    #maxSize;
-    #dispose;
-    #disposeAfter;
-    #fetchMethod;
-    #memoMethod;
-    /**
-     * {@link LRUCache.OptionsBase.ttl}
-     */
-    ttl;
-    /**
-     * {@link LRUCache.OptionsBase.ttlResolution}
-     */
-    ttlResolution;
-    /**
-     * {@link LRUCache.OptionsBase.ttlAutopurge}
-     */
-    ttlAutopurge;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnGet}
-     */
-    updateAgeOnGet;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnHas}
-     */
-    updateAgeOnHas;
-    /**
-     * {@link LRUCache.OptionsBase.allowStale}
-     */
-    allowStale;
-    /**
-     * {@link LRUCache.OptionsBase.noDisposeOnSet}
-     */
-    noDisposeOnSet;
-    /**
-     * {@link LRUCache.OptionsBase.noUpdateTTL}
-     */
-    noUpdateTTL;
-    /**
-     * {@link LRUCache.OptionsBase.maxEntrySize}
-     */
-    maxEntrySize;
-    /**
-     * {@link LRUCache.OptionsBase.sizeCalculation}
-     */
-    sizeCalculation;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
-     */
-    noDeleteOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
-     */
-    noDeleteOnStaleGet;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
-     */
-    allowStaleOnFetchAbort;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
-     */
-    allowStaleOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
-     */
-    ignoreFetchAbort;
-    // computed properties
-    #size;
-    #calculatedSize;
-    #keyMap;
-    #keyList;
-    #valList;
-    #next;
-    #prev;
-    #head;
-    #tail;
-    #free;
-    #disposed;
-    #sizes;
-    #starts;
-    #ttls;
-    #hasDispose;
-    #hasFetchMethod;
-    #hasDisposeAfter;
-    /**
-     * Do not call this method unless you need to inspect the
-     * inner workings of the cache.  If anything returned by this
-     * object is modified in any way, strange breakage may occur.
-     *
-     * These fields are private for a reason!
-     *
-     * @internal
-     */
-    static unsafeExposeInternals(c) {
-        return {
-            // properties
-            starts: c.#starts,
-            ttls: c.#ttls,
-            sizes: c.#sizes,
-            keyMap: c.#keyMap,
-            keyList: c.#keyList,
-            valList: c.#valList,
-            next: c.#next,
-            prev: c.#prev,
-            get head() {
-                return c.#head;
-            },
-            get tail() {
-                return c.#tail;
-            },
-            free: c.#free,
-            // methods
-            isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
-            backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
-            moveToTail: (index) => c.#moveToTail(index),
-            indexes: (options) => c.#indexes(options),
-            rindexes: (options) => c.#rindexes(options),
-            isStale: (index) => c.#isStale(index),
-        };
-    }
-    // Protected read-only members
-    /**
-     * {@link LRUCache.OptionsBase.max} (read-only)
-     */
-    get max() {
-        return this.#max;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.maxSize} (read-only)
-     */
-    get maxSize() {
-        return this.#maxSize;
-    }
-    /**
-     * The total computed size of items in the cache (read-only)
-     */
-    get calculatedSize() {
-        return this.#calculatedSize;
-    }
-    /**
-     * The number of items stored in the cache (read-only)
-     */
-    get size() {
-        return this.#size;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
-     */
-    get fetchMethod() {
-        return this.#fetchMethod;
-    }
-    get memoMethod() {
-        return this.#memoMethod;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.dispose} (read-only)
-     */
-    get dispose() {
-        return this.#dispose;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
-     */
-    get disposeAfter() {
-        return this.#disposeAfter;
-    }
-    constructor(options) {
-        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options;
-        if (max !== 0 && !isPosInt(max)) {
-            throw new TypeError('max option must be a nonnegative integer');
-        }
-        const UintArray = max ? getUintArray(max) : Array;
-        if (!UintArray) {
-            throw new Error('invalid max value: ' + max);
-        }
-        this.#max = max;
-        this.#maxSize = maxSize;
-        this.maxEntrySize = maxEntrySize || this.#maxSize;
-        this.sizeCalculation = sizeCalculation;
-        if (this.sizeCalculation) {
-            if (!this.#maxSize && !this.maxEntrySize) {
-                throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
-            }
-            if (typeof this.sizeCalculation !== 'function') {
-                throw new TypeError('sizeCalculation set to non-function');
-            }
-        }
-        if (memoMethod !== undefined &&
-            typeof memoMethod !== 'function') {
-            throw new TypeError('memoMethod must be a function if defined');
-        }
-        this.#memoMethod = memoMethod;
-        if (fetchMethod !== undefined &&
-            typeof fetchMethod !== 'function') {
-            throw new TypeError('fetchMethod must be a function if specified');
-        }
-        this.#fetchMethod = fetchMethod;
-        this.#hasFetchMethod = !!fetchMethod;
-        this.#keyMap = new Map();
-        this.#keyList = new Array(max).fill(undefined);
-        this.#valList = new Array(max).fill(undefined);
-        this.#next = new UintArray(max);
-        this.#prev = new UintArray(max);
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free = Stack.create(max);
-        this.#size = 0;
-        this.#calculatedSize = 0;
-        if (typeof dispose === 'function') {
-            this.#dispose = dispose;
-        }
-        if (typeof disposeAfter === 'function') {
-            this.#disposeAfter = disposeAfter;
-            this.#disposed = [];
-        }
-        else {
-            this.#disposeAfter = undefined;
-            this.#disposed = undefined;
-        }
-        this.#hasDispose = !!this.#dispose;
-        this.#hasDisposeAfter = !!this.#disposeAfter;
-        this.noDisposeOnSet = !!noDisposeOnSet;
-        this.noUpdateTTL = !!noUpdateTTL;
-        this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
-        this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
-        this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
-        this.ignoreFetchAbort = !!ignoreFetchAbort;
-        // NB: maxEntrySize is set to maxSize if it's set
-        if (this.maxEntrySize !== 0) {
-            if (this.#maxSize !== 0) {
-                if (!isPosInt(this.#maxSize)) {
-                    throw new TypeError('maxSize must be a positive integer if specified');
-                }
-            }
-            if (!isPosInt(this.maxEntrySize)) {
-                throw new TypeError('maxEntrySize must be a positive integer if specified');
-            }
-            this.#initializeSizeTracking();
-        }
-        this.allowStale = !!allowStale;
-        this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
-        this.updateAgeOnGet = !!updateAgeOnGet;
-        this.updateAgeOnHas = !!updateAgeOnHas;
-        this.ttlResolution =
-            isPosInt(ttlResolution) || ttlResolution === 0
-                ? ttlResolution
-                : 1;
-        this.ttlAutopurge = !!ttlAutopurge;
-        this.ttl = ttl || 0;
-        if (this.ttl) {
-            if (!isPosInt(this.ttl)) {
-                throw new TypeError('ttl must be a positive integer if specified');
-            }
-            this.#initializeTTLTracking();
-        }
-        // do not allow completely unbounded caches
-        if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
-            throw new TypeError('At least one of max, maxSize, or ttl is required');
-        }
-        if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
-            const code = 'LRU_CACHE_UNBOUNDED';
-            if (shouldWarn(code)) {
-                warned.add(code);
-                const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
-                    'result in unbounded memory consumption.';
-                emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
-            }
-        }
-    }
-    /**
-     * Return the number of ms left in the item's TTL. If item is not in cache,
-     * returns `0`. Returns `Infinity` if item is in cache without a defined TTL.
-     */
-    getRemainingTTL(key) {
-        return this.#keyMap.has(key) ? Infinity : 0;
-    }
-    #initializeTTLTracking() {
-        const ttls = new ZeroArray(this.#max);
-        const starts = new ZeroArray(this.#max);
-        this.#ttls = ttls;
-        this.#starts = starts;
-        this.#setItemTTL = (index, ttl, start = perf.now()) => {
-            starts[index] = ttl !== 0 ? start : 0;
-            ttls[index] = ttl;
-            if (ttl !== 0 && this.ttlAutopurge) {
-                const t = setTimeout(() => {
-                    if (this.#isStale(index)) {
-                        this.#delete(this.#keyList[index], 'expire');
-                    }
-                }, ttl + 1);
-                // unref() not supported on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-        };
-        this.#updateItemAge = index => {
-            starts[index] = ttls[index] !== 0 ? perf.now() : 0;
-        };
-        this.#statusTTL = (status, index) => {
-            if (ttls[index]) {
-                const ttl = ttls[index];
-                const start = starts[index];
-                /* c8 ignore next */
-                if (!ttl || !start)
-                    return;
-                status.ttl = ttl;
-                status.start = start;
-                status.now = cachedNow || getNow();
-                const age = status.now - start;
-                status.remainingTTL = ttl - age;
-            }
-        };
-        // debounce calls to perf.now() to 1s so we're not hitting
-        // that costly call repeatedly.
-        let cachedNow = 0;
-        const getNow = () => {
-            const n = perf.now();
-            if (this.ttlResolution > 0) {
-                cachedNow = n;
-                const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
-                // not available on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-            return n;
-        };
-        this.getRemainingTTL = key => {
-            const index = this.#keyMap.get(key);
-            if (index === undefined) {
-                return 0;
-            }
-            const ttl = ttls[index];
-            const start = starts[index];
-            if (!ttl || !start) {
-                return Infinity;
-            }
-            const age = (cachedNow || getNow()) - start;
-            return ttl - age;
-        };
-        this.#isStale = index => {
-            const s = starts[index];
-            const t = ttls[index];
-            return !!t && !!s && (cachedNow || getNow()) - s > t;
-        };
-    }
-    // conditionally set private methods related to TTL
-    #updateItemAge = () => { };
-    #statusTTL = () => { };
-    #setItemTTL = () => { };
-    /* c8 ignore stop */
-    #isStale = () => false;
-    #initializeSizeTracking() {
-        const sizes = new ZeroArray(this.#max);
-        this.#calculatedSize = 0;
-        this.#sizes = sizes;
-        this.#removeItemSize = index => {
-            this.#calculatedSize -= sizes[index];
-            sizes[index] = 0;
-        };
-        this.#requireSize = (k, v, size, sizeCalculation) => {
-            // provisionally accept background fetches.
-            // actual value size will be checked when they return.
-            if (this.#isBackgroundFetch(v)) {
-                return 0;
-            }
-            if (!isPosInt(size)) {
-                if (sizeCalculation) {
-                    if (typeof sizeCalculation !== 'function') {
-                        throw new TypeError('sizeCalculation must be a function');
-                    }
-                    size = sizeCalculation(v, k);
-                    if (!isPosInt(size)) {
-                        throw new TypeError('sizeCalculation return invalid (expect positive integer)');
-                    }
-                }
-                else {
-                    throw new TypeError('invalid size value (must be positive integer). ' +
-                        'When maxSize or maxEntrySize is used, sizeCalculation ' +
-                        'or size must be set.');
-                }
-            }
-            return size;
-        };
-        this.#addItemSize = (index, size, status) => {
-            sizes[index] = size;
-            if (this.#maxSize) {
-                const maxSize = this.#maxSize - sizes[index];
-                while (this.#calculatedSize > maxSize) {
-                    this.#evict(true);
-                }
-            }
-            this.#calculatedSize += sizes[index];
-            if (status) {
-                status.entrySize = size;
-                status.totalCalculatedSize = this.#calculatedSize;
-            }
-        };
-    }
-    #removeItemSize = _i => { };
-    #addItemSize = (_i, _s, _st) => { };
-    #requireSize = (_k, _v, size, sizeCalculation) => {
-        if (size || sizeCalculation) {
-            throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
-        }
-        return 0;
-    };
-    *#indexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#tail; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#head) {
-                    break;
-                }
-                else {
-                    i = this.#prev[i];
-                }
-            }
-        }
-    }
-    *#rindexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#head; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#tail) {
-                    break;
-                }
-                else {
-                    i = this.#next[i];
-                }
-            }
-        }
-    }
-    #isValidIndex(index) {
-        return (index !== undefined &&
-            this.#keyMap.get(this.#keyList[index]) === index);
-    }
-    /**
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from most recently used to least recently used.
-     */
-    *entries() {
-        for (const i of this.#indexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.entries}
-     *
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from least recently used to most recently used.
-     */
-    *rentries() {
-        for (const i of this.#rindexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the keys in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *keys() {
-        for (const i of this.#indexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.keys}
-     *
-     * Return a generator yielding the keys in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rkeys() {
-        for (const i of this.#rindexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the values in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *values() {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.values}
-     *
-     * Return a generator yielding the values in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rvalues() {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Iterating over the cache itself yields the same results as
-     * {@link LRUCache.entries}
-     */
-    [Symbol.iterator]() {
-        return this.entries();
-    }
-    /**
-     * A String value that is used in the creation of the default string
-     * description of an object. Called by the built-in method
-     * `Object.prototype.toString`.
-     */
-    [Symbol.toStringTag] = 'LRUCache';
-    /**
-     * Find a value for which the supplied fn method returns a truthy value,
-     * similar to `Array.find()`. fn is called as `fn(value, key, cache)`.
-     */
-    find(fn, getOptions = {}) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined)
-                continue;
-            if (fn(value, this.#keyList[i], this)) {
-                return this.get(this.#keyList[i], getOptions);
-            }
-        }
-    }
-    /**
-     * Call the supplied function on each item in the cache, in order from most
-     * recently used to least recently used.
-     *
-     * `fn` is called as `fn(value, key, cache)`.
-     *
-     * If `thisp` is provided, function will be called in the `this`-context of
-     * the provided object, or the cache if no `thisp` object is provided.
-     *
-     * Does not update age or recenty of use, or iterate over stale values.
-     */
-    forEach(fn, thisp = this) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * The same as {@link LRUCache.forEach} but items are iterated over in
-     * reverse order.  (ie, less recently used items are iterated over first.)
-     */
-    rforEach(fn, thisp = this) {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * Delete any stale entries. Returns true if anything was removed,
-     * false otherwise.
-     */
-    purgeStale() {
-        let deleted = false;
-        for (const i of this.#rindexes({ allowStale: true })) {
-            if (this.#isStale(i)) {
-                this.#delete(this.#keyList[i], 'expire');
-                deleted = true;
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Get the extended info about a given entry, to get its value, size, and
-     * TTL info simultaneously. Returns `undefined` if the key is not present.
-     *
-     * Unlike {@link LRUCache#dump}, which is designed to be portable and survive
-     * serialization, the `start` value is always the current timestamp, and the
-     * `ttl` is a calculated remaining time to live (negative if expired).
-     *
-     * Always returns stale values, if their info is found in the cache, so be
-     * sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl})
-     * if relevant.
-     */
-    info(key) {
-        const i = this.#keyMap.get(key);
-        if (i === undefined)
-            return undefined;
-        const v = this.#valList[i];
-        const value = this.#isBackgroundFetch(v)
-            ? v.__staleWhileFetching
-            : v;
-        if (value === undefined)
-            return undefined;
-        const entry = { value };
-        if (this.#ttls && this.#starts) {
-            const ttl = this.#ttls[i];
-            const start = this.#starts[i];
-            if (ttl && start) {
-                const remain = ttl - (perf.now() - start);
-                entry.ttl = remain;
-                entry.start = Date.now();
-            }
-        }
-        if (this.#sizes) {
-            entry.size = this.#sizes[i];
-        }
-        return entry;
-    }
-    /**
-     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
-     * passed to {@link LRLUCache#load}.
-     *
-     * The `start` fields are calculated relative to a portable `Date.now()`
-     * timestamp, even if `performance.now()` is available.
-     *
-     * Stale entries are always included in the `dump`, even if
-     * {@link LRUCache.OptionsBase.allowStale} is false.
-     *
-     * Note: this returns an actual array, not a generator, so it can be more
-     * easily passed around.
-     */
-    dump() {
-        const arr = [];
-        for (const i of this.#indexes({ allowStale: true })) {
-            const key = this.#keyList[i];
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined || key === undefined)
-                continue;
-            const entry = { value };
-            if (this.#ttls && this.#starts) {
-                entry.ttl = this.#ttls[i];
-                // always dump the start relative to a portable timestamp
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = perf.now() - this.#starts[i];
-                entry.start = Math.floor(Date.now() - age);
-            }
-            if (this.#sizes) {
-                entry.size = this.#sizes[i];
-            }
-            arr.unshift([key, entry]);
-        }
-        return arr;
-    }
-    /**
-     * Reset the cache and load in the items in entries in the order listed.
-     *
-     * The shape of the resulting cache may be different if the same options are
-     * not used in both caches.
-     *
-     * The `start` fields are assumed to be calculated relative to a portable
-     * `Date.now()` timestamp, even if `performance.now()` is available.
-     */
-    load(arr) {
-        this.clear();
-        for (const [key, entry] of arr) {
-            if (entry.start) {
-                // entry.start is a portable timestamp, but we may be using
-                // node's performance.now(), so calculate the offset, so that
-                // we get the intended remaining TTL, no matter how long it's
-                // been on ice.
-                //
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = Date.now() - entry.start;
-                entry.start = perf.now() - age;
-            }
-            this.set(key, entry.value, entry);
-        }
-    }
-    /**
-     * Add a value to the cache.
-     *
-     * Note: if `undefined` is specified as a value, this is an alias for
-     * {@link LRUCache#delete}
-     *
-     * Fields on the {@link LRUCache.SetOptions} options param will override
-     * their corresponding values in the constructor options for the scope
-     * of this single `set()` operation.
-     *
-     * If `start` is provided, then that will set the effective start
-     * time for the TTL calculation. Note that this must be a previous
-     * value of `performance.now()` if supported, or a previous value of
-     * `Date.now()` if not.
-     *
-     * Options object may also include `size`, which will prevent
-     * calling the `sizeCalculation` function and just use the specified
-     * number if it is a positive integer, and `noDisposeOnSet` which
-     * will prevent calling a `dispose` function in the case of
-     * overwrites.
-     *
-     * If the `size` (or return value of `sizeCalculation`) for a given
-     * entry is greater than `maxEntrySize`, then the item will not be
-     * added to the cache.
-     *
-     * Will update the recency of the entry.
-     *
-     * If the value is `undefined`, then this is an alias for
-     * `cache.delete(key)`. `undefined` is never stored in the cache.
-     */
-    set(k, v, setOptions = {}) {
-        if (v === undefined) {
-            this.delete(k);
-            return this;
-        }
-        const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
-        let { noUpdateTTL = this.noUpdateTTL } = setOptions;
-        const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
-        // if the item doesn't fit, don't do anything
-        // NB: maxEntrySize set to maxSize by default
-        if (this.maxEntrySize && size > this.maxEntrySize) {
-            if (status) {
-                status.set = 'miss';
-                status.maxEntrySizeExceeded = true;
-            }
-            // have to delete, in case something is there already.
-            this.#delete(k, 'set');
-            return this;
-        }
-        let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
-        if (index === undefined) {
-            // addition
-            index = (this.#size === 0
-                ? this.#tail
-                : this.#free.length !== 0
-                    ? this.#free.pop()
-                    : this.#size === this.#max
-                        ? this.#evict(false)
-                        : this.#size);
-            this.#keyList[index] = k;
-            this.#valList[index] = v;
-            this.#keyMap.set(k, index);
-            this.#next[this.#tail] = index;
-            this.#prev[index] = this.#tail;
-            this.#tail = index;
-            this.#size++;
-            this.#addItemSize(index, size, status);
-            if (status)
-                status.set = 'add';
-            noUpdateTTL = false;
-        }
-        else {
-            // update
-            this.#moveToTail(index);
-            const oldVal = this.#valList[index];
-            if (v !== oldVal) {
-                if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
-                    oldVal.__abortController.abort(new Error('replaced'));
-                    const { __staleWhileFetching: s } = oldVal;
-                    if (s !== undefined && !noDisposeOnSet) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(s, k, 'set');
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([s, k, 'set']);
-                        }
-                    }
-                }
-                else if (!noDisposeOnSet) {
-                    if (this.#hasDispose) {
-                        this.#dispose?.(oldVal, k, 'set');
-                    }
-                    if (this.#hasDisposeAfter) {
-                        this.#disposed?.push([oldVal, k, 'set']);
-                    }
-                }
-                this.#removeItemSize(index);
-                this.#addItemSize(index, size, status);
-                this.#valList[index] = v;
-                if (status) {
-                    status.set = 'replace';
-                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal)
-                        ? oldVal.__staleWhileFetching
-                        : oldVal;
-                    if (oldValue !== undefined)
-                        status.oldValue = oldValue;
-                }
-            }
-            else if (status) {
-                status.set = 'update';
-            }
-        }
-        if (ttl !== 0 && !this.#ttls) {
-            this.#initializeTTLTracking();
-        }
-        if (this.#ttls) {
-            if (!noUpdateTTL) {
-                this.#setItemTTL(index, ttl, start);
-            }
-            if (status)
-                this.#statusTTL(status, index);
-        }
-        if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return this;
-    }
-    /**
-     * Evict the least recently used item, returning its value or
-     * `undefined` if cache is empty.
-     */
-    pop() {
-        try {
-            while (this.#size) {
-                const val = this.#valList[this.#head];
-                this.#evict(true);
-                if (this.#isBackgroundFetch(val)) {
-                    if (val.__staleWhileFetching) {
-                        return val.__staleWhileFetching;
-                    }
-                }
-                else if (val !== undefined) {
-                    return val;
-                }
-            }
-        }
-        finally {
-            if (this.#hasDisposeAfter && this.#disposed) {
-                const dt = this.#disposed;
-                let task;
-                while ((task = dt?.shift())) {
-                    this.#disposeAfter?.(...task);
-                }
-            }
-        }
-    }
-    #evict(free) {
-        const head = this.#head;
-        const k = this.#keyList[head];
-        const v = this.#valList[head];
-        if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
-            v.__abortController.abort(new Error('evicted'));
-        }
-        else if (this.#hasDispose || this.#hasDisposeAfter) {
-            if (this.#hasDispose) {
-                this.#dispose?.(v, k, 'evict');
-            }
-            if (this.#hasDisposeAfter) {
-                this.#disposed?.push([v, k, 'evict']);
-            }
-        }
-        this.#removeItemSize(head);
-        // if we aren't about to use the index, then null these out
-        if (free) {
-            this.#keyList[head] = undefined;
-            this.#valList[head] = undefined;
-            this.#free.push(head);
-        }
-        if (this.#size === 1) {
-            this.#head = this.#tail = 0;
-            this.#free.length = 0;
-        }
-        else {
-            this.#head = this.#next[head];
-        }
-        this.#keyMap.delete(k);
-        this.#size--;
-        return head;
-    }
-    /**
-     * Check if a key is in the cache, without updating the recency of use.
-     * Will return false if the item is stale, even though it is technically
-     * in the cache.
-     *
-     * Check if a key is in the cache, without updating the recency of
-     * use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set
-     * to `true` in either the options or the constructor.
-     *
-     * Will return `false` if the item is stale, even though it is technically in
-     * the cache. The difference can be determined (if it matters) by using a
-     * `status` argument, and inspecting the `has` field.
-     *
-     * Will not update item age unless
-     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
-     */
-    has(k, hasOptions = {}) {
-        const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v) &&
-                v.__staleWhileFetching === undefined) {
-                return false;
-            }
-            if (!this.#isStale(index)) {
-                if (updateAgeOnHas) {
-                    this.#updateItemAge(index);
-                }
-                if (status) {
-                    status.has = 'hit';
-                    this.#statusTTL(status, index);
-                }
-                return true;
-            }
-            else if (status) {
-                status.has = 'stale';
-                this.#statusTTL(status, index);
-            }
-        }
-        else if (status) {
-            status.has = 'miss';
-        }
-        return false;
-    }
-    /**
-     * Like {@link LRUCache#get} but doesn't update recency or delete stale
-     * items.
-     *
-     * Returns `undefined` if the item is stale, unless
-     * {@link LRUCache.OptionsBase.allowStale} is set.
-     */
-    peek(k, peekOptions = {}) {
-        const { allowStale = this.allowStale } = peekOptions;
-        const index = this.#keyMap.get(k);
-        if (index === undefined ||
-            (!allowStale && this.#isStale(index))) {
-            return;
-        }
-        const v = this.#valList[index];
-        // either stale and allowed, or forcing a refresh of non-stale value
-        return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-    }
-    #backgroundFetch(k, index, options, context) {
-        const v = index === undefined ? undefined : this.#valList[index];
-        if (this.#isBackgroundFetch(v)) {
-            return v;
-        }
-        const ac = new AC();
-        const { signal } = options;
-        // when/if our AC signals, then stop listening to theirs.
-        signal?.addEventListener('abort', () => ac.abort(signal.reason), {
-            signal: ac.signal,
-        });
-        const fetchOpts = {
-            signal: ac.signal,
-            options,
-            context,
-        };
-        const cb = (v, updateCache = false) => {
-            const { aborted } = ac.signal;
-            const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
-            if (options.status) {
-                if (aborted && !updateCache) {
-                    options.status.fetchAborted = true;
-                    options.status.fetchError = ac.signal.reason;
-                    if (ignoreAbort)
-                        options.status.fetchAbortIgnored = true;
-                }
-                else {
-                    options.status.fetchResolved = true;
-                }
-            }
-            if (aborted && !ignoreAbort && !updateCache) {
-                return fetchFail(ac.signal.reason);
-            }
-            // either we didn't abort, and are still here, or we did, and ignored
-            const bf = p;
-            if (this.#valList[index] === p) {
-                if (v === undefined) {
-                    if (bf.__staleWhileFetching) {
-                        this.#valList[index] = bf.__staleWhileFetching;
-                    }
-                    else {
-                        this.#delete(k, 'fetch');
-                    }
-                }
-                else {
-                    if (options.status)
-                        options.status.fetchUpdated = true;
-                    this.set(k, v, fetchOpts.options);
-                }
-            }
-            return v;
-        };
-        const eb = (er) => {
-            if (options.status) {
-                options.status.fetchRejected = true;
-                options.status.fetchError = er;
-            }
-            return fetchFail(er);
-        };
-        const fetchFail = (er) => {
-            const { aborted } = ac.signal;
-            const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
-            const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
-            const noDelete = allowStale || options.noDeleteOnFetchRejection;
-            const bf = p;
-            if (this.#valList[index] === p) {
-                // if we allow stale on fetch rejections, then we need to ensure that
-                // the stale value is not removed from the cache when the fetch fails.
-                const del = !noDelete || bf.__staleWhileFetching === undefined;
-                if (del) {
-                    this.#delete(k, 'fetch');
-                }
-                else if (!allowStaleAborted) {
-                    // still replace the *promise* with the stale value,
-                    // since we are done with the promise at this point.
-                    // leave it untouched if we're still waiting for an
-                    // aborted background fetch that hasn't yet returned.
-                    this.#valList[index] = bf.__staleWhileFetching;
-                }
-            }
-            if (allowStale) {
-                if (options.status && bf.__staleWhileFetching !== undefined) {
-                    options.status.returnedStale = true;
-                }
-                return bf.__staleWhileFetching;
-            }
-            else if (bf.__returned === bf) {
-                throw er;
-            }
-        };
-        const pcall = (res, rej) => {
-            const fmp = this.#fetchMethod?.(k, v, fetchOpts);
-            if (fmp && fmp instanceof Promise) {
-                fmp.then(v => res(v === undefined ? undefined : v), rej);
-            }
-            // ignored, we go until we finish, regardless.
-            // defer check until we are actually aborting,
-            // so fetchMethod can override.
-            ac.signal.addEventListener('abort', () => {
-                if (!options.ignoreFetchAbort ||
-                    options.allowStaleOnFetchAbort) {
-                    res(undefined);
-                    // when it eventually resolves, update the cache.
-                    if (options.allowStaleOnFetchAbort) {
-                        res = v => cb(v, true);
-                    }
-                }
-            });
-        };
-        if (options.status)
-            options.status.fetchDispatched = true;
-        const p = new Promise(pcall).then(cb, eb);
-        const bf = Object.assign(p, {
-            __abortController: ac,
-            __staleWhileFetching: v,
-            __returned: undefined,
-        });
-        if (index === undefined) {
-            // internal, don't expose status.
-            this.set(k, bf, { ...fetchOpts.options, status: undefined });
-            index = this.#keyMap.get(k);
-        }
-        else {
-            this.#valList[index] = bf;
-        }
-        return bf;
-    }
-    #isBackgroundFetch(p) {
-        if (!this.#hasFetchMethod)
-            return false;
-        const b = p;
-        return (!!b &&
-            b instanceof Promise &&
-            b.hasOwnProperty('__staleWhileFetching') &&
-            b.__abortController instanceof AC);
-    }
-    async fetch(k, fetchOptions = {}) {
-        const { 
-        // get options
-        allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, 
-        // set options
-        ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, 
-        // fetch exclusive options
-        noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
-        if (!this.#hasFetchMethod) {
-            if (status)
-                status.fetch = 'get';
-            return this.get(k, {
-                allowStale,
-                updateAgeOnGet,
-                noDeleteOnStaleGet,
-                status,
-            });
-        }
-        const options = {
-            allowStale,
-            updateAgeOnGet,
-            noDeleteOnStaleGet,
-            ttl,
-            noDisposeOnSet,
-            size,
-            sizeCalculation,
-            noUpdateTTL,
-            noDeleteOnFetchRejection,
-            allowStaleOnFetchRejection,
-            allowStaleOnFetchAbort,
-            ignoreFetchAbort,
-            status,
-            signal,
-        };
-        let index = this.#keyMap.get(k);
-        if (index === undefined) {
-            if (status)
-                status.fetch = 'miss';
-            const p = this.#backgroundFetch(k, index, options, context);
-            return (p.__returned = p);
-        }
-        else {
-            // in cache, maybe already fetching
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                const stale = allowStale && v.__staleWhileFetching !== undefined;
-                if (status) {
-                    status.fetch = 'inflight';
-                    if (stale)
-                        status.returnedStale = true;
-                }
-                return stale ? v.__staleWhileFetching : (v.__returned = v);
-            }
-            // if we force a refresh, that means do NOT serve the cached value,
-            // unless we are already in the process of refreshing the cache.
-            const isStale = this.#isStale(index);
-            if (!forceRefresh && !isStale) {
-                if (status)
-                    status.fetch = 'hit';
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                if (status)
-                    this.#statusTTL(status, index);
-                return v;
-            }
-            // ok, it is stale or a forced refresh, and not already fetching.
-            // refresh the cache.
-            const p = this.#backgroundFetch(k, index, options, context);
-            const hasStale = p.__staleWhileFetching !== undefined;
-            const staleVal = hasStale && allowStale;
-            if (status) {
-                status.fetch = isStale ? 'stale' : 'refresh';
-                if (staleVal && isStale)
-                    status.returnedStale = true;
-            }
-            return staleVal ? p.__staleWhileFetching : (p.__returned = p);
-        }
-    }
-    async forceFetch(k, fetchOptions = {}) {
-        const v = await this.fetch(k, fetchOptions);
-        if (v === undefined)
-            throw new Error('fetch() returned undefined');
-        return v;
-    }
-    memo(k, memoOptions = {}) {
-        const memoMethod = this.#memoMethod;
-        if (!memoMethod) {
-            throw new Error('no memoMethod provided to constructor');
-        }
-        const { context, forceRefresh, ...options } = memoOptions;
-        const v = this.get(k, options);
-        if (!forceRefresh && v !== undefined)
-            return v;
-        const vv = memoMethod(k, v, {
-            options,
-            context,
-        });
-        this.set(k, vv, options);
-        return vv;
-    }
-    /**
-     * Return a value from the cache. Will update the recency of the cache
-     * entry found.
-     *
-     * If the key is not found, get() will return `undefined`.
-     */
-    get(k, getOptions = {}) {
-        const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const value = this.#valList[index];
-            const fetching = this.#isBackgroundFetch(value);
-            if (status)
-                this.#statusTTL(status, index);
-            if (this.#isStale(index)) {
-                if (status)
-                    status.get = 'stale';
-                // delete only if not an in-flight background fetch
-                if (!fetching) {
-                    if (!noDeleteOnStaleGet) {
-                        this.#delete(k, 'expire');
-                    }
-                    if (status && allowStale)
-                        status.returnedStale = true;
-                    return allowStale ? value : undefined;
-                }
-                else {
-                    if (status &&
-                        allowStale &&
-                        value.__staleWhileFetching !== undefined) {
-                        status.returnedStale = true;
-                    }
-                    return allowStale ? value.__staleWhileFetching : undefined;
-                }
-            }
-            else {
-                if (status)
-                    status.get = 'hit';
-                // if we're currently fetching it, we don't actually have it yet
-                // it's not stale, which means this isn't a staleWhileRefetching.
-                // If it's not stale, and fetching, AND has a __staleWhileFetching
-                // value, then that means the user fetched with {forceRefresh:true},
-                // so it's safe to return that value.
-                if (fetching) {
-                    return value.__staleWhileFetching;
-                }
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                return value;
-            }
-        }
-        else if (status) {
-            status.get = 'miss';
-        }
-    }
-    #connect(p, n) {
-        this.#prev[n] = p;
-        this.#next[p] = n;
-    }
-    #moveToTail(index) {
-        // if tail already, nothing to do
-        // if head, move head to next[index]
-        // else
-        //   move next[prev[index]] to next[index] (head has no prev)
-        //   move prev[next[index]] to prev[index]
-        // prev[index] = tail
-        // next[tail] = index
-        // tail = index
-        if (index !== this.#tail) {
-            if (index === this.#head) {
-                this.#head = this.#next[index];
-            }
-            else {
-                this.#connect(this.#prev[index], this.#next[index]);
-            }
-            this.#connect(this.#tail, index);
-            this.#tail = index;
-        }
-    }
-    /**
-     * Deletes a key out of the cache.
-     *
-     * Returns true if the key was deleted, false otherwise.
-     */
-    delete(k) {
-        return this.#delete(k, 'delete');
-    }
-    #delete(k, reason) {
-        let deleted = false;
-        if (this.#size !== 0) {
-            const index = this.#keyMap.get(k);
-            if (index !== undefined) {
-                deleted = true;
-                if (this.#size === 1) {
-                    this.#clear(reason);
-                }
-                else {
-                    this.#removeItemSize(index);
-                    const v = this.#valList[index];
-                    if (this.#isBackgroundFetch(v)) {
-                        v.__abortController.abort(new Error('deleted'));
-                    }
-                    else if (this.#hasDispose || this.#hasDisposeAfter) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(v, k, reason);
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([v, k, reason]);
-                        }
-                    }
-                    this.#keyMap.delete(k);
-                    this.#keyList[index] = undefined;
-                    this.#valList[index] = undefined;
-                    if (index === this.#tail) {
-                        this.#tail = this.#prev[index];
-                    }
-                    else if (index === this.#head) {
-                        this.#head = this.#next[index];
-                    }
-                    else {
-                        const pi = this.#prev[index];
-                        this.#next[pi] = this.#next[index];
-                        const ni = this.#next[index];
-                        this.#prev[ni] = this.#prev[index];
-                    }
-                    this.#size--;
-                    this.#free.push(index);
-                }
-            }
-        }
-        if (this.#hasDisposeAfter && this.#disposed?.length) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Clear the cache entirely, throwing away all values.
-     */
-    clear() {
-        return this.#clear('delete');
-    }
-    #clear(reason) {
-        for (const index of this.#rindexes({ allowStale: true })) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                v.__abortController.abort(new Error('deleted'));
-            }
-            else {
-                const k = this.#keyList[index];
-                if (this.#hasDispose) {
-                    this.#dispose?.(v, k, reason);
-                }
-                if (this.#hasDisposeAfter) {
-                    this.#disposed?.push([v, k, reason]);
-                }
-            }
-        }
-        this.#keyMap.clear();
-        this.#valList.fill(undefined);
-        this.#keyList.fill(undefined);
-        if (this.#ttls && this.#starts) {
-            this.#ttls.fill(0);
-            this.#starts.fill(0);
-        }
-        if (this.#sizes) {
-            this.#sizes.fill(0);
-        }
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free.length = 0;
-        this.#calculatedSize = 0;
-        this.#size = 0;
-        if (this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-    }
-}
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/agent/node_modules/lru-cache/dist/esm/index.min.js b/node_modules/@npmcli/agent/node_modules/lru-cache/dist/esm/index.min.js
deleted file mode 100644
index 4571d0254e27d..0000000000000
--- a/node_modules/@npmcli/agent/node_modules/lru-cache/dist/esm/index.min.js
+++ /dev/null
@@ -1,2 +0,0 @@
-var G=(l,t,e)=>{if(!t.has(l))throw TypeError("Cannot "+e)};var I=(l,t,e)=>(G(l,t,"read from private field"),e?e.call(l):t.get(l)),j=(l,t,e)=>{if(t.has(l))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(l):t.set(l,e)},x=(l,t,e,i)=>(G(l,t,"write to private field"),i?i.call(l,e):t.set(l,e),e);var T=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,P=new Set,M=typeof process=="object"&&process?process:{},H=(l,t,e,i)=>{typeof M.emitWarning=="function"?M.emitWarning(l,t,e,i):console.error(`[${e}] ${t}: ${l}`)},W=globalThis.AbortController,N=globalThis.AbortSignal;if(typeof W>"u"){N=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new N;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let l=M.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{l&&(l=!1,H("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=l=>!P.has(l),Y=Symbol("type"),A=l=>l&&l===Math.floor(l)&&l>0&&isFinite(l),k=l=>A(l)?l<=Math.pow(2,8)?Uint8Array:l<=Math.pow(2,16)?Uint16Array:l<=Math.pow(2,32)?Uint32Array:l<=Number.MAX_SAFE_INTEGER?O:null:null,O=class extends Array{constructor(t){super(t),this.fill(0)}},z,E=class{heap;length;static create(t){let e=k(t);if(!e)return[];x(E,z,!0);let i=new E(t,e);return x(E,z,!1),i}constructor(t,e){if(!I(E,z))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},R=E;z=new WeakMap,j(R,z,!1);var D=class{#g;#f;#p;#w;#R;#W;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#S;#s;#i;#t;#l;#c;#o;#h;#_;#r;#m;#b;#u;#y;#O;#a;static unsafeExposeInternals(t){return{starts:t.#b,ttls:t.#u,sizes:t.#m,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#_,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#x(e,i,s,n),moveToTail:e=>t.#C(e),indexes:e=>t.#A(e),rindexes:e=>t.#F(e),isStale:e=>t.#d(e)}}get max(){return this.#g}get maxSize(){return this.#f}get calculatedSize(){return this.#S}get size(){return this.#n}get fetchMethod(){return this.#R}get memoMethod(){return this.#W}get dispose(){return this.#p}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,disposeAfter:m,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,memoMethod:a,noDeleteOnFetchRejection:w,noDeleteOnStaleGet:b,allowStaleOnFetchRejection:p,allowStaleOnFetchAbort:_,ignoreFetchAbort:v}=t;if(e!==0&&!A(e))throw new TypeError("max option must be a nonnegative integer");let y=e?k(e):Array;if(!y)throw new Error("invalid max value: "+e);if(this.#g=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(a!==void 0&&typeof a!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#W=a,S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#R=S,this.#O=!!S,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new y(e),this.#c=new y(e),this.#o=0,this.#h=0,this.#_=R.create(e),this.#n=0,this.#S=0,typeof g=="function"&&(this.#p=g),typeof m=="function"?(this.#w=m,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#y=!!this.#p,this.#a=!!this.#w,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!w,this.allowStaleOnFetchRejection=!!p,this.allowStaleOnFetchAbort=!!_,this.ignoreFetchAbort=!!v,this.maxEntrySize!==0){if(this.#f!==0&&!A(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!A(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#P()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!b,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=A(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!A(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#M()}if(this.#g===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#g&&!this.#f){let C="LRU_CACHE_UNBOUNDED";V(C)&&(P.add(C),H("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",C,D))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#M(){let t=new O(this.#g),e=new O(this.#g);this.#u=t,this.#b=e,this.#U=(n,h,o=T.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#d(n)&&this.#T(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?T.now():0},this.#E=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=T.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#d=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#z=()=>{};#E=()=>{};#U=()=>{};#d=()=>!1;#P(){let t=new O(this.#g);this.#S=0,this.#m=t,this.#v=e=>{this.#S-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!A(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!A(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#D=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#S>n;)this.#L(!0)}this.#S+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#S)}}#v=t=>{};#D=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#A({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#I(e)||((t||!this.#d(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#I(e)||((t||!this.#d(e))&&(yield e),e===this.#h));)e=this.#l[e]}#I(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#F({allowStale:!0}))this.#d(e)&&(this.#T(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#u&&this.#b){let h=this.#u[e],o=this.#b[e];if(h&&o){let r=h-(T.now()-o);n.ttl=r,n.start=Date.now()}}return this.#m&&(n.size=this.#m[e]),n}dump(){let t=[];for(let e of this.#A({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#u&&this.#b){h.ttl=this.#u[e];let o=T.now()-this.#b[e];h.start=Math.floor(Date.now()-o)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=T.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,m=this.#G(t,e,i.size||0,o);if(this.maxEntrySize&&m>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#T(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#_.length!==0?this.#_.pop():this.#n===this.#g?this.#L(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#n++,this.#D(f,m,r),r&&(r.set="add"),g=!1;else{this.#C(f);let u=this.#t[f];if(e!==u){if(this.#O&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#y&&this.#p?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#y&&this.#p?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#v(f),this.#D(f,m,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#u&&this.#M(),this.#u&&(g||this.#U(f,s,n),r&&this.#E(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#w?.(...c)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#L(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#L(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#O&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#y||this.#a)&&(this.#y&&this.#p?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#v(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#_.push(e)),this.#n===1?(this.#o=this.#h=0,this.#_.length=0):this.#o=this.#l[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#d(n))s&&(s.has="stale",this.#E(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#E(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#d(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#x(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:a}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(a&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),a&&!w&&!S)return f(h.signal.reason);let b=c;return this.#t[e]===c&&(d===void 0?b.__staleWhileFetching?this.#t[e]=b.__staleWhileFetching:this.#T(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},m=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,a=S&&i.allowStaleOnFetchAbort,w=a||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!b||p.__staleWhileFetching===void 0?this.#T(t,"fetch"):a||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let a=this.#R?.(t,n,r);a&&a instanceof Promise&&a.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,m),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=F,F}#e(t){if(!this.#O)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:m=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:a,signal:w}=e;if(!this.#O)return a&&(a.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:a});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:m,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:a,signal:w},p=this.#s.get(t);if(p===void 0){a&&(a.fetch="miss");let _=this.#x(t,p,b,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let U=i&&_.__staleWhileFetching!==void 0;return a&&(a.fetch="inflight",U&&(a.returnedStale=!0)),U?_.__staleWhileFetching:_.__returned=_}let v=this.#d(p);if(!S&&!v)return a&&(a.fetch="hit"),this.#C(p),s&&this.#z(p),a&&this.#E(a,p),_;let y=this.#x(t,p,b,d),L=y.__staleWhileFetching!==void 0&&i;return a&&(a.fetch=v?"stale":"refresh",L&&v&&(a.returnedStale=!0)),L?y.__staleWhileFetching:y.__returned=y}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#W;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#E(h,o),this.#d(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#T(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#C(o),s&&this.#z(o),r))}else h&&(h.get="miss")}#j(t,e){this.#c[e]=t,this.#l[t]=e}#C(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#j(this.#c[t],this.#l[t]),this.#j(this.#h,t),this.#h=t)}delete(t){return this.#T(t,"delete")}#T(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#N(e);else{this.#v(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#y||this.#a)&&(this.#y&&this.#p?.(n,t,e),this.#a&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#c[s];else if(s===this.#o)this.#o=this.#l[s];else{let h=this.#c[s];this.#l[h]=this.#l[s];let o=this.#l[s];this.#c[o]=this.#c[s]}this.#n--,this.#_.push(s)}}if(this.#a&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#N("delete")}#N(t){for(let e of this.#F({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#y&&this.#p?.(i,s,t),this.#a&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#u&&this.#b&&(this.#u.fill(0),this.#b.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#_.length=0,this.#S=0,this.#n=0,this.#a&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};export{D as LRUCache};
-//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/@npmcli/agent/node_modules/lru-cache/dist/esm/package.json b/node_modules/@npmcli/agent/node_modules/lru-cache/dist/esm/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/@npmcli/agent/node_modules/lru-cache/dist/esm/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/@npmcli/agent/node_modules/lru-cache/package.json b/node_modules/@npmcli/agent/node_modules/lru-cache/package.json
deleted file mode 100644
index f3cd4c0cc53f7..0000000000000
--- a/node_modules/@npmcli/agent/node_modules/lru-cache/package.json
+++ /dev/null
@@ -1,116 +0,0 @@
-{
-  "name": "lru-cache",
-  "publishConfig": {
-    "tag": "legacy-v10"
-  },
-  "description": "A cache object that deletes the least-recently-used items.",
-  "version": "10.4.3",
-  "author": "Isaac Z. Schlueter ",
-  "keywords": [
-    "mru",
-    "lru",
-    "cache"
-  ],
-  "sideEffects": false,
-  "scripts": {
-    "build": "npm run prepare",
-    "prepare": "tshy && bash fixup.sh",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "test": "tap",
-    "snap": "tap",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "format": "prettier --write .",
-    "typedoc": "typedoc --tsconfig ./.tshy/esm.json ./src/*.ts",
-    "benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh",
-    "prebenchmark": "npm run prepare",
-    "benchmark": "make -C benchmark",
-    "preprofile": "npm run prepare",
-    "profile": "make -C benchmark profile"
-  },
-  "main": "./dist/commonjs/index.js",
-  "types": "./dist/commonjs/index.d.ts",
-  "tshy": {
-    "exports": {
-      ".": "./src/index.ts",
-      "./min": {
-        "import": {
-          "types": "./dist/esm/index.d.ts",
-          "default": "./dist/esm/index.min.js"
-        },
-        "require": {
-          "types": "./dist/commonjs/index.d.ts",
-          "default": "./dist/commonjs/index.min.js"
-        }
-      }
-    }
-  },
-  "repository": {
-    "type": "git",
-    "url": "git://github.com/isaacs/node-lru-cache.git"
-  },
-  "devDependencies": {
-    "@types/node": "^20.2.5",
-    "@types/tap": "^15.0.6",
-    "benchmark": "^2.1.4",
-    "esbuild": "^0.17.11",
-    "eslint-config-prettier": "^8.5.0",
-    "marked": "^4.2.12",
-    "mkdirp": "^2.1.5",
-    "prettier": "^2.6.2",
-    "tap": "^20.0.3",
-    "tshy": "^2.0.0",
-    "tslib": "^2.4.0",
-    "typedoc": "^0.25.3",
-    "typescript": "^5.2.2"
-  },
-  "license": "ISC",
-  "files": [
-    "dist"
-  ],
-  "prettier": {
-    "semi": false,
-    "printWidth": 70,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "tap": {
-    "node-arg": [
-      "--expose-gc"
-    ],
-    "plugin": [
-      "@tapjs/clock"
-    ]
-  },
-  "exports": {
-    ".": {
-      "import": {
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.js"
-      },
-      "require": {
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.js"
-      }
-    },
-    "./min": {
-      "import": {
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.min.js"
-      },
-      "require": {
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.min.js"
-      }
-    }
-  },
-  "type": "module",
-  "module": "./dist/esm/index.js"
-}
diff --git a/node_modules/@npmcli/agent/package.json b/node_modules/@npmcli/agent/package.json
index 4d648fb5dfe05..67670a0c1c484 100644
--- a/node_modules/@npmcli/agent/package.json
+++ b/node_modules/@npmcli/agent/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@npmcli/agent",
-  "version": "3.0.0",
+  "version": "4.0.0",
   "description": "the http/https agent used by the npm cli",
   "main": "lib/index.js",
   "scripts": {
@@ -25,25 +25,25 @@
     "lib/"
   ],
   "engines": {
-    "node": "^18.17.0 || >=20.5.0"
+    "node": "^20.17.0 || >=22.9.0"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.23.1",
+    "version": "4.25.0",
     "publish": "true"
   },
   "dependencies": {
     "agent-base": "^7.1.0",
     "http-proxy-agent": "^7.0.0",
     "https-proxy-agent": "^7.0.1",
-    "lru-cache": "^10.0.1",
+    "lru-cache": "^11.2.1",
     "socks-proxy-agent": "^8.0.3"
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.23.1",
-    "minipass-fetch": "^3.0.3",
-    "nock": "^13.2.7",
+    "@npmcli/template-oss": "4.25.0",
+    "minipass-fetch": "^4.0.1",
+    "nock": "^14.0.3",
     "socksv5": "^0.0.6",
     "tap": "^16.3.0"
   },
diff --git a/node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/agents.js b/node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/agents.js
deleted file mode 100644
index c541b93001517..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/agents.js
+++ /dev/null
@@ -1,206 +0,0 @@
-'use strict'
-
-const net = require('net')
-const tls = require('tls')
-const { once } = require('events')
-const timers = require('timers/promises')
-const { normalizeOptions, cacheOptions } = require('./options')
-const { getProxy, getProxyAgent, proxyCache } = require('./proxy.js')
-const Errors = require('./errors.js')
-const { Agent: AgentBase } = require('agent-base')
-
-module.exports = class Agent extends AgentBase {
-  #options
-  #timeouts
-  #proxy
-  #noProxy
-  #ProxyAgent
-
-  constructor (options = {}) {
-    const { timeouts, proxy, noProxy, ...normalizedOptions } = normalizeOptions(options)
-
-    super(normalizedOptions)
-
-    this.#options = normalizedOptions
-    this.#timeouts = timeouts
-
-    if (proxy) {
-      this.#proxy = new URL(proxy)
-      this.#noProxy = noProxy
-      this.#ProxyAgent = getProxyAgent(proxy)
-    }
-  }
-
-  get proxy () {
-    return this.#proxy ? { url: this.#proxy } : {}
-  }
-
-  #getProxy (options) {
-    if (!this.#proxy) {
-      return
-    }
-
-    const proxy = getProxy(`${options.protocol}//${options.host}:${options.port}`, {
-      proxy: this.#proxy,
-      noProxy: this.#noProxy,
-    })
-
-    if (!proxy) {
-      return
-    }
-
-    const cacheKey = cacheOptions({
-      ...options,
-      ...this.#options,
-      timeouts: this.#timeouts,
-      proxy,
-    })
-
-    if (proxyCache.has(cacheKey)) {
-      return proxyCache.get(cacheKey)
-    }
-
-    let ProxyAgent = this.#ProxyAgent
-    if (Array.isArray(ProxyAgent)) {
-      ProxyAgent = this.isSecureEndpoint(options) ? ProxyAgent[1] : ProxyAgent[0]
-    }
-
-    const proxyAgent = new ProxyAgent(proxy, {
-      ...this.#options,
-      socketOptions: { family: this.#options.family },
-    })
-    proxyCache.set(cacheKey, proxyAgent)
-
-    return proxyAgent
-  }
-
-  // takes an array of promises and races them against the connection timeout
-  // which will throw the necessary error if it is hit. This will return the
-  // result of the promise race.
-  async #timeoutConnection ({ promises, options, timeout }, ac = new AbortController()) {
-    if (timeout) {
-      const connectionTimeout = timers.setTimeout(timeout, null, { signal: ac.signal })
-        .then(() => {
-          throw new Errors.ConnectionTimeoutError(`${options.host}:${options.port}`)
-        }).catch((err) => {
-          if (err.name === 'AbortError') {
-            return
-          }
-          throw err
-        })
-      promises.push(connectionTimeout)
-    }
-
-    let result
-    try {
-      result = await Promise.race(promises)
-      ac.abort()
-    } catch (err) {
-      ac.abort()
-      throw err
-    }
-    return result
-  }
-
-  async connect (request, options) {
-    // if the connection does not have its own lookup function
-    // set, then use the one from our options
-    options.lookup ??= this.#options.lookup
-
-    let socket
-    let timeout = this.#timeouts.connection
-    const isSecureEndpoint = this.isSecureEndpoint(options)
-
-    const proxy = this.#getProxy(options)
-    if (proxy) {
-      // some of the proxies will wait for the socket to fully connect before
-      // returning so we have to await this while also racing it against the
-      // connection timeout.
-      const start = Date.now()
-      socket = await this.#timeoutConnection({
-        options,
-        timeout,
-        promises: [proxy.connect(request, options)],
-      })
-      // see how much time proxy.connect took and subtract it from
-      // the timeout
-      if (timeout) {
-        timeout = timeout - (Date.now() - start)
-      }
-    } else {
-      socket = (isSecureEndpoint ? tls : net).connect(options)
-    }
-
-    socket.setKeepAlive(this.keepAlive, this.keepAliveMsecs)
-    socket.setNoDelay(this.keepAlive)
-
-    const abortController = new AbortController()
-    const { signal } = abortController
-
-    const connectPromise = socket[isSecureEndpoint ? 'secureConnecting' : 'connecting']
-      ? once(socket, isSecureEndpoint ? 'secureConnect' : 'connect', { signal })
-      : Promise.resolve()
-
-    await this.#timeoutConnection({
-      options,
-      timeout,
-      promises: [
-        connectPromise,
-        once(socket, 'error', { signal }).then((err) => {
-          throw err[0]
-        }),
-      ],
-    }, abortController)
-
-    if (this.#timeouts.idle) {
-      socket.setTimeout(this.#timeouts.idle, () => {
-        socket.destroy(new Errors.IdleTimeoutError(`${options.host}:${options.port}`))
-      })
-    }
-
-    return socket
-  }
-
-  addRequest (request, options) {
-    const proxy = this.#getProxy(options)
-    // it would be better to call proxy.addRequest here but this causes the
-    // http-proxy-agent to call its super.addRequest which causes the request
-    // to be added to the agent twice. since we only support 3 agents
-    // currently (see the required agents in proxy.js) we have manually
-    // checked that the only public methods we need to call are called in the
-    // next block. this could change in the future and presumably we would get
-    // failing tests until we have properly called the necessary methods on
-    // each of our proxy agents
-    if (proxy?.setRequestProps) {
-      proxy.setRequestProps(request, options)
-    }
-
-    request.setHeader('connection', this.keepAlive ? 'keep-alive' : 'close')
-
-    if (this.#timeouts.response) {
-      let responseTimeout
-      request.once('finish', () => {
-        setTimeout(() => {
-          request.destroy(new Errors.ResponseTimeoutError(request, this.#proxy))
-        }, this.#timeouts.response)
-      })
-      request.once('response', () => {
-        clearTimeout(responseTimeout)
-      })
-    }
-
-    if (this.#timeouts.transfer) {
-      let transferTimeout
-      request.once('response', (res) => {
-        setTimeout(() => {
-          res.destroy(new Errors.TransferTimeoutError(request, this.#proxy))
-        }, this.#timeouts.transfer)
-        res.once('close', () => {
-          clearTimeout(transferTimeout)
-        })
-      })
-    }
-
-    return super.addRequest(request, options)
-  }
-}
diff --git a/node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/dns.js b/node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/dns.js
deleted file mode 100644
index 3c6946c566d73..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/dns.js
+++ /dev/null
@@ -1,53 +0,0 @@
-'use strict'
-
-const { LRUCache } = require('lru-cache')
-const dns = require('dns')
-
-// this is a factory so that each request can have its own opts (i.e. ttl)
-// while still sharing the cache across all requests
-const cache = new LRUCache({ max: 50 })
-
-const getOptions = ({
-  family = 0,
-  hints = dns.ADDRCONFIG,
-  all = false,
-  verbatim = undefined,
-  ttl = 5 * 60 * 1000,
-  lookup = dns.lookup,
-}) => ({
-  // hints and lookup are returned since both are top level properties to (net|tls).connect
-  hints,
-  lookup: (hostname, ...args) => {
-    const callback = args.pop() // callback is always last arg
-    const lookupOptions = args[0] ?? {}
-
-    const options = {
-      family,
-      hints,
-      all,
-      verbatim,
-      ...(typeof lookupOptions === 'number' ? { family: lookupOptions } : lookupOptions),
-    }
-
-    const key = JSON.stringify({ hostname, ...options })
-
-    if (cache.has(key)) {
-      const cached = cache.get(key)
-      return process.nextTick(callback, null, ...cached)
-    }
-
-    lookup(hostname, options, (err, ...result) => {
-      if (err) {
-        return callback(err)
-      }
-
-      cache.set(key, result, { ttl })
-      return callback(null, ...result)
-    })
-  },
-})
-
-module.exports = {
-  cache,
-  getOptions,
-}
diff --git a/node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/errors.js b/node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/errors.js
deleted file mode 100644
index 70475aec8eb35..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/errors.js
+++ /dev/null
@@ -1,61 +0,0 @@
-'use strict'
-
-class InvalidProxyProtocolError extends Error {
-  constructor (url) {
-    super(`Invalid protocol \`${url.protocol}\` connecting to proxy \`${url.host}\``)
-    this.code = 'EINVALIDPROXY'
-    this.proxy = url
-  }
-}
-
-class ConnectionTimeoutError extends Error {
-  constructor (host) {
-    super(`Timeout connecting to host \`${host}\``)
-    this.code = 'ECONNECTIONTIMEOUT'
-    this.host = host
-  }
-}
-
-class IdleTimeoutError extends Error {
-  constructor (host) {
-    super(`Idle timeout reached for host \`${host}\``)
-    this.code = 'EIDLETIMEOUT'
-    this.host = host
-  }
-}
-
-class ResponseTimeoutError extends Error {
-  constructor (request, proxy) {
-    let msg = 'Response timeout '
-    if (proxy) {
-      msg += `from proxy \`${proxy.host}\` `
-    }
-    msg += `connecting to host \`${request.host}\``
-    super(msg)
-    this.code = 'ERESPONSETIMEOUT'
-    this.proxy = proxy
-    this.request = request
-  }
-}
-
-class TransferTimeoutError extends Error {
-  constructor (request, proxy) {
-    let msg = 'Transfer timeout '
-    if (proxy) {
-      msg += `from proxy \`${proxy.host}\` `
-    }
-    msg += `for \`${request.host}\``
-    super(msg)
-    this.code = 'ETRANSFERTIMEOUT'
-    this.proxy = proxy
-    this.request = request
-  }
-}
-
-module.exports = {
-  InvalidProxyProtocolError,
-  ConnectionTimeoutError,
-  IdleTimeoutError,
-  ResponseTimeoutError,
-  TransferTimeoutError,
-}
diff --git a/node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/index.js b/node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/index.js
deleted file mode 100644
index b33d6eaef07a2..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/index.js
+++ /dev/null
@@ -1,56 +0,0 @@
-'use strict'
-
-const { LRUCache } = require('lru-cache')
-const { normalizeOptions, cacheOptions } = require('./options')
-const { getProxy, proxyCache } = require('./proxy.js')
-const dns = require('./dns.js')
-const Agent = require('./agents.js')
-
-const agentCache = new LRUCache({ max: 20 })
-
-const getAgent = (url, { agent, proxy, noProxy, ...options } = {}) => {
-  // false has meaning so this can't be a simple truthiness check
-  if (agent != null) {
-    return agent
-  }
-
-  url = new URL(url)
-
-  const proxyForUrl = getProxy(url, { proxy, noProxy })
-  const normalizedOptions = {
-    ...normalizeOptions(options),
-    proxy: proxyForUrl,
-  }
-
-  const cacheKey = cacheOptions({
-    ...normalizedOptions,
-    secureEndpoint: url.protocol === 'https:',
-  })
-
-  if (agentCache.has(cacheKey)) {
-    return agentCache.get(cacheKey)
-  }
-
-  const newAgent = new Agent(normalizedOptions)
-  agentCache.set(cacheKey, newAgent)
-
-  return newAgent
-}
-
-module.exports = {
-  getAgent,
-  Agent,
-  // these are exported for backwards compatability
-  HttpAgent: Agent,
-  HttpsAgent: Agent,
-  cache: {
-    proxy: proxyCache,
-    agent: agentCache,
-    dns: dns.cache,
-    clear: () => {
-      proxyCache.clear()
-      agentCache.clear()
-      dns.cache.clear()
-    },
-  },
-}
diff --git a/node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/options.js b/node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/options.js
deleted file mode 100644
index 0bf53f725f084..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/options.js
+++ /dev/null
@@ -1,86 +0,0 @@
-'use strict'
-
-const dns = require('./dns')
-
-const normalizeOptions = (opts) => {
-  const family = parseInt(opts.family ?? '0', 10)
-  const keepAlive = opts.keepAlive ?? true
-
-  const normalized = {
-    // nodejs http agent options. these are all the defaults
-    // but kept here to increase the likelihood of cache hits
-    // https://nodejs.org/api/http.html#new-agentoptions
-    keepAliveMsecs: keepAlive ? 1000 : undefined,
-    maxSockets: opts.maxSockets ?? 15,
-    maxTotalSockets: Infinity,
-    maxFreeSockets: keepAlive ? 256 : undefined,
-    scheduling: 'fifo',
-    // then spread the rest of the options
-    ...opts,
-    // we already set these to their defaults that we want
-    family,
-    keepAlive,
-    // our custom timeout options
-    timeouts: {
-      // the standard timeout option is mapped to our idle timeout
-      // and then deleted below
-      idle: opts.timeout ?? 0,
-      connection: 0,
-      response: 0,
-      transfer: 0,
-      ...opts.timeouts,
-    },
-    // get the dns options that go at the top level of socket connection
-    ...dns.getOptions({ family, ...opts.dns }),
-  }
-
-  // remove timeout since we already used it to set our own idle timeout
-  delete normalized.timeout
-
-  return normalized
-}
-
-const createKey = (obj) => {
-  let key = ''
-  const sorted = Object.entries(obj).sort((a, b) => a[0] - b[0])
-  for (let [k, v] of sorted) {
-    if (v == null) {
-      v = 'null'
-    } else if (v instanceof URL) {
-      v = v.toString()
-    } else if (typeof v === 'object') {
-      v = createKey(v)
-    }
-    key += `${k}:${v}:`
-  }
-  return key
-}
-
-const cacheOptions = ({ secureEndpoint, ...options }) => createKey({
-  secureEndpoint: !!secureEndpoint,
-  // socket connect options
-  family: options.family,
-  hints: options.hints,
-  localAddress: options.localAddress,
-  // tls specific connect options
-  strictSsl: secureEndpoint ? !!options.rejectUnauthorized : false,
-  ca: secureEndpoint ? options.ca : null,
-  cert: secureEndpoint ? options.cert : null,
-  key: secureEndpoint ? options.key : null,
-  // http agent options
-  keepAlive: options.keepAlive,
-  keepAliveMsecs: options.keepAliveMsecs,
-  maxSockets: options.maxSockets,
-  maxTotalSockets: options.maxTotalSockets,
-  maxFreeSockets: options.maxFreeSockets,
-  scheduling: options.scheduling,
-  // timeout options
-  timeouts: options.timeouts,
-  // proxy
-  proxy: options.proxy,
-})
-
-module.exports = {
-  normalizeOptions,
-  cacheOptions,
-}
diff --git a/node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/proxy.js b/node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/proxy.js
deleted file mode 100644
index 6272e929e57bc..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/@npmcli/agent/lib/proxy.js
+++ /dev/null
@@ -1,88 +0,0 @@
-'use strict'
-
-const { HttpProxyAgent } = require('http-proxy-agent')
-const { HttpsProxyAgent } = require('https-proxy-agent')
-const { SocksProxyAgent } = require('socks-proxy-agent')
-const { LRUCache } = require('lru-cache')
-const { InvalidProxyProtocolError } = require('./errors.js')
-
-const PROXY_CACHE = new LRUCache({ max: 20 })
-
-const SOCKS_PROTOCOLS = new Set(SocksProxyAgent.protocols)
-
-const PROXY_ENV_KEYS = new Set(['https_proxy', 'http_proxy', 'proxy', 'no_proxy'])
-
-const PROXY_ENV = Object.entries(process.env).reduce((acc, [key, value]) => {
-  key = key.toLowerCase()
-  if (PROXY_ENV_KEYS.has(key)) {
-    acc[key] = value
-  }
-  return acc
-}, {})
-
-const getProxyAgent = (url) => {
-  url = new URL(url)
-
-  const protocol = url.protocol.slice(0, -1)
-  if (SOCKS_PROTOCOLS.has(protocol)) {
-    return SocksProxyAgent
-  }
-  if (protocol === 'https' || protocol === 'http') {
-    return [HttpProxyAgent, HttpsProxyAgent]
-  }
-
-  throw new InvalidProxyProtocolError(url)
-}
-
-const isNoProxy = (url, noProxy) => {
-  if (typeof noProxy === 'string') {
-    noProxy = noProxy.split(',').map((p) => p.trim()).filter(Boolean)
-  }
-
-  if (!noProxy || !noProxy.length) {
-    return false
-  }
-
-  const hostSegments = url.hostname.split('.').reverse()
-
-  return noProxy.some((no) => {
-    const noSegments = no.split('.').filter(Boolean).reverse()
-    if (!noSegments.length) {
-      return false
-    }
-
-    for (let i = 0; i < noSegments.length; i++) {
-      if (hostSegments[i] !== noSegments[i]) {
-        return false
-      }
-    }
-
-    return true
-  })
-}
-
-const getProxy = (url, { proxy, noProxy }) => {
-  url = new URL(url)
-
-  if (!proxy) {
-    proxy = url.protocol === 'https:'
-      ? PROXY_ENV.https_proxy
-      : PROXY_ENV.https_proxy || PROXY_ENV.http_proxy || PROXY_ENV.proxy
-  }
-
-  if (!noProxy) {
-    noProxy = PROXY_ENV.no_proxy
-  }
-
-  if (!proxy || isNoProxy(url, noProxy)) {
-    return null
-  }
-
-  return new URL(proxy)
-}
-
-module.exports = {
-  getProxyAgent,
-  getProxy,
-  proxyCache: PROXY_CACHE,
-}
diff --git a/node_modules/make-fetch-happen/node_modules/@npmcli/agent/package.json b/node_modules/make-fetch-happen/node_modules/@npmcli/agent/package.json
deleted file mode 100644
index 67670a0c1c484..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/@npmcli/agent/package.json
+++ /dev/null
@@ -1,60 +0,0 @@
-{
-  "name": "@npmcli/agent",
-  "version": "4.0.0",
-  "description": "the http/https agent used by the npm cli",
-  "main": "lib/index.js",
-  "scripts": {
-    "gencerts": "bash scripts/create-cert.sh",
-    "test": "tap",
-    "lint": "npm run eslint",
-    "postlint": "template-oss-check",
-    "template-oss-apply": "template-oss-apply --force",
-    "lintfix": "npm run eslint -- --fix",
-    "snap": "tap",
-    "posttest": "npm run lint",
-    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
-  },
-  "author": "GitHub Inc.",
-  "license": "ISC",
-  "bugs": {
-    "url": "https://github.com/npm/agent/issues"
-  },
-  "homepage": "https://github.com/npm/agent#readme",
-  "files": [
-    "bin/",
-    "lib/"
-  ],
-  "engines": {
-    "node": "^20.17.0 || >=22.9.0"
-  },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.25.0",
-    "publish": "true"
-  },
-  "dependencies": {
-    "agent-base": "^7.1.0",
-    "http-proxy-agent": "^7.0.0",
-    "https-proxy-agent": "^7.0.1",
-    "lru-cache": "^11.2.1",
-    "socks-proxy-agent": "^8.0.3"
-  },
-  "devDependencies": {
-    "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.25.0",
-    "minipass-fetch": "^4.0.1",
-    "nock": "^14.0.3",
-    "socksv5": "^0.0.6",
-    "tap": "^16.3.0"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/npm/agent.git"
-  },
-  "tap": {
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
-  }
-}
diff --git a/package-lock.json b/package-lock.json
index 688563ecb7729..1ce97778e7f04 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -2944,25 +2944,22 @@
       }
     },
     "node_modules/@npmcli/agent": {
-      "version": "3.0.0",
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/@npmcli/agent/-/agent-4.0.0.tgz",
+      "integrity": "sha512-kAQTcEN9E8ERLVg5AsGwLNoFb+oEG6engbqAU2P43gD4JEIkNGMHdVQ096FsOAAYpZPB0RSt0zgInKIAS1l5QA==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
         "agent-base": "^7.1.0",
         "http-proxy-agent": "^7.0.0",
         "https-proxy-agent": "^7.0.1",
-        "lru-cache": "^10.0.1",
+        "lru-cache": "^11.2.1",
         "socks-proxy-agent": "^8.0.3"
       },
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/@npmcli/agent/node_modules/lru-cache": {
-      "version": "10.4.3",
-      "inBundle": true,
-      "license": "ISC"
-    },
     "node_modules/@npmcli/arborist": {
       "resolved": "workspaces/arborist",
       "link": true
@@ -9689,21 +9686,6 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/make-fetch-happen/node_modules/@npmcli/agent": {
-      "version": "4.0.0",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "agent-base": "^7.1.0",
-        "http-proxy-agent": "^7.0.0",
-        "https-proxy-agent": "^7.0.1",
-        "lru-cache": "^11.2.1",
-        "socks-proxy-agent": "^8.0.3"
-      },
-      "engines": {
-        "node": "^20.17.0 || >=22.9.0"
-      }
-    },
     "node_modules/make-fetch-happen/node_modules/negotiator": {
       "version": "1.0.0",
       "inBundle": true,
@@ -10867,6 +10849,23 @@
         "node": "^18.17.0 || >=20.5.0"
       }
     },
+    "node_modules/node-gyp/node_modules/@npmcli/agent": {
+      "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/@npmcli/agent/-/agent-3.0.0.tgz",
+      "integrity": "sha512-S79NdEgDQd/NGCay6TCoVzXSj74skRZIKJcpJjC5lOq34SZzyI6MqtiiWoiVWoVrTcGjNeC4ipbh1VIHlpfF5Q==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "agent-base": "^7.1.0",
+        "http-proxy-agent": "^7.0.0",
+        "https-proxy-agent": "^7.0.1",
+        "lru-cache": "^10.0.1",
+        "socks-proxy-agent": "^8.0.3"
+      },
+      "engines": {
+        "node": "^18.17.0 || >=20.5.0"
+      }
+    },
     "node_modules/node-gyp/node_modules/cacache": {
       "version": "19.0.1",
       "inBundle": true,

From 38fa2c2e67bed4c6e69d894cdbed0175d30ad085 Mon Sep 17 00:00:00 2001
From: Gar 
Date: Thu, 18 Sep 2025 10:07:24 -0700
Subject: [PATCH 24/63] deps: negotiator@1.0.0

---
 .../node_modules/negotiator/HISTORY.md        | 114 -------
 .../node_modules/negotiator/LICENSE           |  24 --
 .../node_modules/negotiator/index.js          |  83 -----
 .../node_modules/negotiator/lib/charset.js    | 169 ----------
 .../node_modules/negotiator/lib/encoding.js   | 205 ------------
 .../node_modules/negotiator/lib/language.js   | 179 -----------
 .../node_modules/negotiator/lib/mediaType.js  | 294 ------------------
 .../node_modules/negotiator/package.json      |  43 ---
 .../node_modules/negotiator/HISTORY.md        | 114 -------
 .../node-gyp/node_modules/negotiator/LICENSE  |  24 --
 .../node-gyp/node_modules/negotiator/index.js |  83 -----
 .../node_modules/negotiator/lib/charset.js    | 169 ----------
 .../node_modules/negotiator/lib/encoding.js   | 205 ------------
 .../node_modules/negotiator/lib/language.js   | 179 -----------
 .../node_modules/negotiator/lib/mediaType.js  | 294 ------------------
 .../node_modules/negotiator/package.json      |  43 ---
 package-lock.json                             |  32 +-
 17 files changed, 14 insertions(+), 2240 deletions(-)
 delete mode 100644 node_modules/make-fetch-happen/node_modules/negotiator/HISTORY.md
 delete mode 100644 node_modules/make-fetch-happen/node_modules/negotiator/LICENSE
 delete mode 100644 node_modules/make-fetch-happen/node_modules/negotiator/index.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/negotiator/lib/charset.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/negotiator/lib/encoding.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/negotiator/lib/language.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/negotiator/lib/mediaType.js
 delete mode 100644 node_modules/make-fetch-happen/node_modules/negotiator/package.json
 delete mode 100644 node_modules/node-gyp/node_modules/negotiator/HISTORY.md
 delete mode 100644 node_modules/node-gyp/node_modules/negotiator/LICENSE
 delete mode 100644 node_modules/node-gyp/node_modules/negotiator/index.js
 delete mode 100644 node_modules/node-gyp/node_modules/negotiator/lib/charset.js
 delete mode 100644 node_modules/node-gyp/node_modules/negotiator/lib/encoding.js
 delete mode 100644 node_modules/node-gyp/node_modules/negotiator/lib/language.js
 delete mode 100644 node_modules/node-gyp/node_modules/negotiator/lib/mediaType.js
 delete mode 100644 node_modules/node-gyp/node_modules/negotiator/package.json

diff --git a/node_modules/make-fetch-happen/node_modules/negotiator/HISTORY.md b/node_modules/make-fetch-happen/node_modules/negotiator/HISTORY.md
deleted file mode 100644
index 63d537d3f6811..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/negotiator/HISTORY.md
+++ /dev/null
@@ -1,114 +0,0 @@
-1.0.0 / 2024-08-31
-==================
-
-  * Drop support for node <18
-  * Added an option preferred encodings array #59
-
-0.6.3 / 2022-01-22
-==================
-
-  * Revert "Lazy-load modules from main entry point"
-
-0.6.2 / 2019-04-29
-==================
-
-  * Fix sorting charset, encoding, and language with extra parameters
-
-0.6.1 / 2016-05-02
-==================
-
-  * perf: improve `Accept` parsing speed
-  * perf: improve `Accept-Charset` parsing speed
-  * perf: improve `Accept-Encoding` parsing speed
-  * perf: improve `Accept-Language` parsing speed
-
-0.6.0 / 2015-09-29
-==================
-
-  * Fix including type extensions in parameters in `Accept` parsing
-  * Fix parsing `Accept` parameters with quoted equals
-  * Fix parsing `Accept` parameters with quoted semicolons
-  * Lazy-load modules from main entry point
-  * perf: delay type concatenation until needed
-  * perf: enable strict mode
-  * perf: hoist regular expressions
-  * perf: remove closures getting spec properties
-  * perf: remove a closure from media type parsing
-  * perf: remove property delete from media type parsing
-
-0.5.3 / 2015-05-10
-==================
-
-  * Fix media type parameter matching to be case-insensitive
-
-0.5.2 / 2015-05-06
-==================
-
-  * Fix comparing media types with quoted values
-  * Fix splitting media types with quoted commas
-
-0.5.1 / 2015-02-14
-==================
-
-  * Fix preference sorting to be stable for long acceptable lists
-
-0.5.0 / 2014-12-18
-==================
-
-  * Fix list return order when large accepted list
-  * Fix missing identity encoding when q=0 exists
-  * Remove dynamic building of Negotiator class
-
-0.4.9 / 2014-10-14
-==================
-
-  * Fix error when media type has invalid parameter
-
-0.4.8 / 2014-09-28
-==================
-
-  * Fix all negotiations to be case-insensitive
-  * Stable sort preferences of same quality according to client order
-  * Support Node.js 0.6
-
-0.4.7 / 2014-06-24
-==================
-
-  * Handle invalid provided languages
-  * Handle invalid provided media types
-
-0.4.6 / 2014-06-11
-==================
-
-  *  Order by specificity when quality is the same
-
-0.4.5 / 2014-05-29
-==================
-
-  * Fix regression in empty header handling
-
-0.4.4 / 2014-05-29
-==================
-
-  * Fix behaviors when headers are not present
-
-0.4.3 / 2014-04-16
-==================
-
-  * Handle slashes on media params correctly
-
-0.4.2 / 2014-02-28
-==================
-
-  * Fix media type sorting
-  * Handle media types params strictly
-
-0.4.1 / 2014-01-16
-==================
-
-  * Use most specific matches
-
-0.4.0 / 2014-01-09
-==================
-
-  * Remove preferred prefix from methods
diff --git a/node_modules/make-fetch-happen/node_modules/negotiator/LICENSE b/node_modules/make-fetch-happen/node_modules/negotiator/LICENSE
deleted file mode 100644
index ea6b9e2e9ac25..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/negotiator/LICENSE
+++ /dev/null
@@ -1,24 +0,0 @@
-(The MIT License)
-
-Copyright (c) 2012-2014 Federico Romero
-Copyright (c) 2012-2014 Isaac Z. Schlueter
-Copyright (c) 2014-2015 Douglas Christopher Wilson
-
-Permission is hereby granted, free of charge, to any person obtaining
-a copy of this software and associated documentation files (the
-'Software'), to deal in the Software without restriction, including
-without limitation the rights to use, copy, modify, merge, publish,
-distribute, sublicense, and/or sell copies of the Software, and to
-permit persons to whom the Software is furnished to do so, subject to
-the following conditions:
-
-The above copyright notice and this permission notice shall be
-included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/make-fetch-happen/node_modules/negotiator/index.js b/node_modules/make-fetch-happen/node_modules/negotiator/index.js
deleted file mode 100644
index 4f51315d6af4b..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/negotiator/index.js
+++ /dev/null
@@ -1,83 +0,0 @@
-/*!
- * negotiator
- * Copyright(c) 2012 Federico Romero
- * Copyright(c) 2012-2014 Isaac Z. Schlueter
- * Copyright(c) 2015 Douglas Christopher Wilson
- * MIT Licensed
- */
-
-'use strict';
-
-var preferredCharsets = require('./lib/charset')
-var preferredEncodings = require('./lib/encoding')
-var preferredLanguages = require('./lib/language')
-var preferredMediaTypes = require('./lib/mediaType')
-
-/**
- * Module exports.
- * @public
- */
-
-module.exports = Negotiator;
-module.exports.Negotiator = Negotiator;
-
-/**
- * Create a Negotiator instance from a request.
- * @param {object} request
- * @public
- */
-
-function Negotiator(request) {
-  if (!(this instanceof Negotiator)) {
-    return new Negotiator(request);
-  }
-
-  this.request = request;
-}
-
-Negotiator.prototype.charset = function charset(available) {
-  var set = this.charsets(available);
-  return set && set[0];
-};
-
-Negotiator.prototype.charsets = function charsets(available) {
-  return preferredCharsets(this.request.headers['accept-charset'], available);
-};
-
-Negotiator.prototype.encoding = function encoding(available, opts) {
-  var set = this.encodings(available, opts);
-  return set && set[0];
-};
-
-Negotiator.prototype.encodings = function encodings(available, options) {
-  var opts = options || {};
-  return preferredEncodings(this.request.headers['accept-encoding'], available, opts.preferred);
-};
-
-Negotiator.prototype.language = function language(available) {
-  var set = this.languages(available);
-  return set && set[0];
-};
-
-Negotiator.prototype.languages = function languages(available) {
-  return preferredLanguages(this.request.headers['accept-language'], available);
-};
-
-Negotiator.prototype.mediaType = function mediaType(available) {
-  var set = this.mediaTypes(available);
-  return set && set[0];
-};
-
-Negotiator.prototype.mediaTypes = function mediaTypes(available) {
-  return preferredMediaTypes(this.request.headers.accept, available);
-};
-
-// Backwards compatibility
-Negotiator.prototype.preferredCharset = Negotiator.prototype.charset;
-Negotiator.prototype.preferredCharsets = Negotiator.prototype.charsets;
-Negotiator.prototype.preferredEncoding = Negotiator.prototype.encoding;
-Negotiator.prototype.preferredEncodings = Negotiator.prototype.encodings;
-Negotiator.prototype.preferredLanguage = Negotiator.prototype.language;
-Negotiator.prototype.preferredLanguages = Negotiator.prototype.languages;
-Negotiator.prototype.preferredMediaType = Negotiator.prototype.mediaType;
-Negotiator.prototype.preferredMediaTypes = Negotiator.prototype.mediaTypes;
diff --git a/node_modules/make-fetch-happen/node_modules/negotiator/lib/charset.js b/node_modules/make-fetch-happen/node_modules/negotiator/lib/charset.js
deleted file mode 100644
index cdd014803474a..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/negotiator/lib/charset.js
+++ /dev/null
@@ -1,169 +0,0 @@
-/**
- * negotiator
- * Copyright(c) 2012 Isaac Z. Schlueter
- * Copyright(c) 2014 Federico Romero
- * Copyright(c) 2014-2015 Douglas Christopher Wilson
- * MIT Licensed
- */
-
-'use strict';
-
-/**
- * Module exports.
- * @public
- */
-
-module.exports = preferredCharsets;
-module.exports.preferredCharsets = preferredCharsets;
-
-/**
- * Module variables.
- * @private
- */
-
-var simpleCharsetRegExp = /^\s*([^\s;]+)\s*(?:;(.*))?$/;
-
-/**
- * Parse the Accept-Charset header.
- * @private
- */
-
-function parseAcceptCharset(accept) {
-  var accepts = accept.split(',');
-
-  for (var i = 0, j = 0; i < accepts.length; i++) {
-    var charset = parseCharset(accepts[i].trim(), i);
-
-    if (charset) {
-      accepts[j++] = charset;
-    }
-  }
-
-  // trim accepts
-  accepts.length = j;
-
-  return accepts;
-}
-
-/**
- * Parse a charset from the Accept-Charset header.
- * @private
- */
-
-function parseCharset(str, i) {
-  var match = simpleCharsetRegExp.exec(str);
-  if (!match) return null;
-
-  var charset = match[1];
-  var q = 1;
-  if (match[2]) {
-    var params = match[2].split(';')
-    for (var j = 0; j < params.length; j++) {
-      var p = params[j].trim().split('=');
-      if (p[0] === 'q') {
-        q = parseFloat(p[1]);
-        break;
-      }
-    }
-  }
-
-  return {
-    charset: charset,
-    q: q,
-    i: i
-  };
-}
-
-/**
- * Get the priority of a charset.
- * @private
- */
-
-function getCharsetPriority(charset, accepted, index) {
-  var priority = {o: -1, q: 0, s: 0};
-
-  for (var i = 0; i < accepted.length; i++) {
-    var spec = specify(charset, accepted[i], index);
-
-    if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
-      priority = spec;
-    }
-  }
-
-  return priority;
-}
-
-/**
- * Get the specificity of the charset.
- * @private
- */
-
-function specify(charset, spec, index) {
-  var s = 0;
-  if(spec.charset.toLowerCase() === charset.toLowerCase()){
-    s |= 1;
-  } else if (spec.charset !== '*' ) {
-    return null
-  }
-
-  return {
-    i: index,
-    o: spec.i,
-    q: spec.q,
-    s: s
-  }
-}
-
-/**
- * Get the preferred charsets from an Accept-Charset header.
- * @public
- */
-
-function preferredCharsets(accept, provided) {
-  // RFC 2616 sec 14.2: no header = *
-  var accepts = parseAcceptCharset(accept === undefined ? '*' : accept || '');
-
-  if (!provided) {
-    // sorted list of all charsets
-    return accepts
-      .filter(isQuality)
-      .sort(compareSpecs)
-      .map(getFullCharset);
-  }
-
-  var priorities = provided.map(function getPriority(type, index) {
-    return getCharsetPriority(type, accepts, index);
-  });
-
-  // sorted list of accepted charsets
-  return priorities.filter(isQuality).sort(compareSpecs).map(function getCharset(priority) {
-    return provided[priorities.indexOf(priority)];
-  });
-}
-
-/**
- * Compare two specs.
- * @private
- */
-
-function compareSpecs(a, b) {
-  return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0;
-}
-
-/**
- * Get full charset string.
- * @private
- */
-
-function getFullCharset(spec) {
-  return spec.charset;
-}
-
-/**
- * Check if a spec has any quality.
- * @private
- */
-
-function isQuality(spec) {
-  return spec.q > 0;
-}
diff --git a/node_modules/make-fetch-happen/node_modules/negotiator/lib/encoding.js b/node_modules/make-fetch-happen/node_modules/negotiator/lib/encoding.js
deleted file mode 100644
index 9ebb633d67743..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/negotiator/lib/encoding.js
+++ /dev/null
@@ -1,205 +0,0 @@
-/**
- * negotiator
- * Copyright(c) 2012 Isaac Z. Schlueter
- * Copyright(c) 2014 Federico Romero
- * Copyright(c) 2014-2015 Douglas Christopher Wilson
- * MIT Licensed
- */
-
-'use strict';
-
-/**
- * Module exports.
- * @public
- */
-
-module.exports = preferredEncodings;
-module.exports.preferredEncodings = preferredEncodings;
-
-/**
- * Module variables.
- * @private
- */
-
-var simpleEncodingRegExp = /^\s*([^\s;]+)\s*(?:;(.*))?$/;
-
-/**
- * Parse the Accept-Encoding header.
- * @private
- */
-
-function parseAcceptEncoding(accept) {
-  var accepts = accept.split(',');
-  var hasIdentity = false;
-  var minQuality = 1;
-
-  for (var i = 0, j = 0; i < accepts.length; i++) {
-    var encoding = parseEncoding(accepts[i].trim(), i);
-
-    if (encoding) {
-      accepts[j++] = encoding;
-      hasIdentity = hasIdentity || specify('identity', encoding);
-      minQuality = Math.min(minQuality, encoding.q || 1);
-    }
-  }
-
-  if (!hasIdentity) {
-    /*
-     * If identity doesn't explicitly appear in the accept-encoding header,
-     * it's added to the list of acceptable encoding with the lowest q
-     */
-    accepts[j++] = {
-      encoding: 'identity',
-      q: minQuality,
-      i: i
-    };
-  }
-
-  // trim accepts
-  accepts.length = j;
-
-  return accepts;
-}
-
-/**
- * Parse an encoding from the Accept-Encoding header.
- * @private
- */
-
-function parseEncoding(str, i) {
-  var match = simpleEncodingRegExp.exec(str);
-  if (!match) return null;
-
-  var encoding = match[1];
-  var q = 1;
-  if (match[2]) {
-    var params = match[2].split(';');
-    for (var j = 0; j < params.length; j++) {
-      var p = params[j].trim().split('=');
-      if (p[0] === 'q') {
-        q = parseFloat(p[1]);
-        break;
-      }
-    }
-  }
-
-  return {
-    encoding: encoding,
-    q: q,
-    i: i
-  };
-}
-
-/**
- * Get the priority of an encoding.
- * @private
- */
-
-function getEncodingPriority(encoding, accepted, index) {
-  var priority = {encoding: encoding, o: -1, q: 0, s: 0};
-
-  for (var i = 0; i < accepted.length; i++) {
-    var spec = specify(encoding, accepted[i], index);
-
-    if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
-      priority = spec;
-    }
-  }
-
-  return priority;
-}
-
-/**
- * Get the specificity of the encoding.
- * @private
- */
-
-function specify(encoding, spec, index) {
-  var s = 0;
-  if(spec.encoding.toLowerCase() === encoding.toLowerCase()){
-    s |= 1;
-  } else if (spec.encoding !== '*' ) {
-    return null
-  }
-
-  return {
-    encoding: encoding,
-    i: index,
-    o: spec.i,
-    q: spec.q,
-    s: s
-  }
-};
-
-/**
- * Get the preferred encodings from an Accept-Encoding header.
- * @public
- */
-
-function preferredEncodings(accept, provided, preferred) {
-  var accepts = parseAcceptEncoding(accept || '');
-
-  var comparator = preferred ? function comparator (a, b) {
-    if (a.q !== b.q) {
-      return b.q - a.q // higher quality first
-    }
-
-    var aPreferred = preferred.indexOf(a.encoding)
-    var bPreferred = preferred.indexOf(b.encoding)
-
-    if (aPreferred === -1 && bPreferred === -1) {
-      // consider the original specifity/order
-      return (b.s - a.s) || (a.o - b.o) || (a.i - b.i)
-    }
-
-    if (aPreferred !== -1 && bPreferred !== -1) {
-      return aPreferred - bPreferred // consider the preferred order
-    }
-
-    return aPreferred === -1 ? 1 : -1 // preferred first
-  } : compareSpecs;
-
-  if (!provided) {
-    // sorted list of all encodings
-    return accepts
-      .filter(isQuality)
-      .sort(comparator)
-      .map(getFullEncoding);
-  }
-
-  var priorities = provided.map(function getPriority(type, index) {
-    return getEncodingPriority(type, accepts, index);
-  });
-
-  // sorted list of accepted encodings
-  return priorities.filter(isQuality).sort(comparator).map(function getEncoding(priority) {
-    return provided[priorities.indexOf(priority)];
-  });
-}
-
-/**
- * Compare two specs.
- * @private
- */
-
-function compareSpecs(a, b) {
-  return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i);
-}
-
-/**
- * Get full encoding string.
- * @private
- */
-
-function getFullEncoding(spec) {
-  return spec.encoding;
-}
-
-/**
- * Check if a spec has any quality.
- * @private
- */
-
-function isQuality(spec) {
-  return spec.q > 0;
-}
diff --git a/node_modules/make-fetch-happen/node_modules/negotiator/lib/language.js b/node_modules/make-fetch-happen/node_modules/negotiator/lib/language.js
deleted file mode 100644
index a23167252719b..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/negotiator/lib/language.js
+++ /dev/null
@@ -1,179 +0,0 @@
-/**
- * negotiator
- * Copyright(c) 2012 Isaac Z. Schlueter
- * Copyright(c) 2014 Federico Romero
- * Copyright(c) 2014-2015 Douglas Christopher Wilson
- * MIT Licensed
- */
-
-'use strict';
-
-/**
- * Module exports.
- * @public
- */
-
-module.exports = preferredLanguages;
-module.exports.preferredLanguages = preferredLanguages;
-
-/**
- * Module variables.
- * @private
- */
-
-var simpleLanguageRegExp = /^\s*([^\s\-;]+)(?:-([^\s;]+))?\s*(?:;(.*))?$/;
-
-/**
- * Parse the Accept-Language header.
- * @private
- */
-
-function parseAcceptLanguage(accept) {
-  var accepts = accept.split(',');
-
-  for (var i = 0, j = 0; i < accepts.length; i++) {
-    var language = parseLanguage(accepts[i].trim(), i);
-
-    if (language) {
-      accepts[j++] = language;
-    }
-  }
-
-  // trim accepts
-  accepts.length = j;
-
-  return accepts;
-}
-
-/**
- * Parse a language from the Accept-Language header.
- * @private
- */
-
-function parseLanguage(str, i) {
-  var match = simpleLanguageRegExp.exec(str);
-  if (!match) return null;
-
-  var prefix = match[1]
-  var suffix = match[2]
-  var full = prefix
-
-  if (suffix) full += "-" + suffix;
-
-  var q = 1;
-  if (match[3]) {
-    var params = match[3].split(';')
-    for (var j = 0; j < params.length; j++) {
-      var p = params[j].split('=');
-      if (p[0] === 'q') q = parseFloat(p[1]);
-    }
-  }
-
-  return {
-    prefix: prefix,
-    suffix: suffix,
-    q: q,
-    i: i,
-    full: full
-  };
-}
-
-/**
- * Get the priority of a language.
- * @private
- */
-
-function getLanguagePriority(language, accepted, index) {
-  var priority = {o: -1, q: 0, s: 0};
-
-  for (var i = 0; i < accepted.length; i++) {
-    var spec = specify(language, accepted[i], index);
-
-    if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
-      priority = spec;
-    }
-  }
-
-  return priority;
-}
-
-/**
- * Get the specificity of the language.
- * @private
- */
-
-function specify(language, spec, index) {
-  var p = parseLanguage(language)
-  if (!p) return null;
-  var s = 0;
-  if(spec.full.toLowerCase() === p.full.toLowerCase()){
-    s |= 4;
-  } else if (spec.prefix.toLowerCase() === p.full.toLowerCase()) {
-    s |= 2;
-  } else if (spec.full.toLowerCase() === p.prefix.toLowerCase()) {
-    s |= 1;
-  } else if (spec.full !== '*' ) {
-    return null
-  }
-
-  return {
-    i: index,
-    o: spec.i,
-    q: spec.q,
-    s: s
-  }
-};
-
-/**
- * Get the preferred languages from an Accept-Language header.
- * @public
- */
-
-function preferredLanguages(accept, provided) {
-  // RFC 2616 sec 14.4: no header = *
-  var accepts = parseAcceptLanguage(accept === undefined ? '*' : accept || '');
-
-  if (!provided) {
-    // sorted list of all languages
-    return accepts
-      .filter(isQuality)
-      .sort(compareSpecs)
-      .map(getFullLanguage);
-  }
-
-  var priorities = provided.map(function getPriority(type, index) {
-    return getLanguagePriority(type, accepts, index);
-  });
-
-  // sorted list of accepted languages
-  return priorities.filter(isQuality).sort(compareSpecs).map(function getLanguage(priority) {
-    return provided[priorities.indexOf(priority)];
-  });
-}
-
-/**
- * Compare two specs.
- * @private
- */
-
-function compareSpecs(a, b) {
-  return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0;
-}
-
-/**
- * Get full language string.
- * @private
- */
-
-function getFullLanguage(spec) {
-  return spec.full;
-}
-
-/**
- * Check if a spec has any quality.
- * @private
- */
-
-function isQuality(spec) {
-  return spec.q > 0;
-}
diff --git a/node_modules/make-fetch-happen/node_modules/negotiator/lib/mediaType.js b/node_modules/make-fetch-happen/node_modules/negotiator/lib/mediaType.js
deleted file mode 100644
index 8e402ea88394c..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/negotiator/lib/mediaType.js
+++ /dev/null
@@ -1,294 +0,0 @@
-/**
- * negotiator
- * Copyright(c) 2012 Isaac Z. Schlueter
- * Copyright(c) 2014 Federico Romero
- * Copyright(c) 2014-2015 Douglas Christopher Wilson
- * MIT Licensed
- */
-
-'use strict';
-
-/**
- * Module exports.
- * @public
- */
-
-module.exports = preferredMediaTypes;
-module.exports.preferredMediaTypes = preferredMediaTypes;
-
-/**
- * Module variables.
- * @private
- */
-
-var simpleMediaTypeRegExp = /^\s*([^\s\/;]+)\/([^;\s]+)\s*(?:;(.*))?$/;
-
-/**
- * Parse the Accept header.
- * @private
- */
-
-function parseAccept(accept) {
-  var accepts = splitMediaTypes(accept);
-
-  for (var i = 0, j = 0; i < accepts.length; i++) {
-    var mediaType = parseMediaType(accepts[i].trim(), i);
-
-    if (mediaType) {
-      accepts[j++] = mediaType;
-    }
-  }
-
-  // trim accepts
-  accepts.length = j;
-
-  return accepts;
-}
-
-/**
- * Parse a media type from the Accept header.
- * @private
- */
-
-function parseMediaType(str, i) {
-  var match = simpleMediaTypeRegExp.exec(str);
-  if (!match) return null;
-
-  var params = Object.create(null);
-  var q = 1;
-  var subtype = match[2];
-  var type = match[1];
-
-  if (match[3]) {
-    var kvps = splitParameters(match[3]).map(splitKeyValuePair);
-
-    for (var j = 0; j < kvps.length; j++) {
-      var pair = kvps[j];
-      var key = pair[0].toLowerCase();
-      var val = pair[1];
-
-      // get the value, unwrapping quotes
-      var value = val && val[0] === '"' && val[val.length - 1] === '"'
-        ? val.slice(1, -1)
-        : val;
-
-      if (key === 'q') {
-        q = parseFloat(value);
-        break;
-      }
-
-      // store parameter
-      params[key] = value;
-    }
-  }
-
-  return {
-    type: type,
-    subtype: subtype,
-    params: params,
-    q: q,
-    i: i
-  };
-}
-
-/**
- * Get the priority of a media type.
- * @private
- */
-
-function getMediaTypePriority(type, accepted, index) {
-  var priority = {o: -1, q: 0, s: 0};
-
-  for (var i = 0; i < accepted.length; i++) {
-    var spec = specify(type, accepted[i], index);
-
-    if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
-      priority = spec;
-    }
-  }
-
-  return priority;
-}
-
-/**
- * Get the specificity of the media type.
- * @private
- */
-
-function specify(type, spec, index) {
-  var p = parseMediaType(type);
-  var s = 0;
-
-  if (!p) {
-    return null;
-  }
-
-  if(spec.type.toLowerCase() == p.type.toLowerCase()) {
-    s |= 4
-  } else if(spec.type != '*') {
-    return null;
-  }
-
-  if(spec.subtype.toLowerCase() == p.subtype.toLowerCase()) {
-    s |= 2
-  } else if(spec.subtype != '*') {
-    return null;
-  }
-
-  var keys = Object.keys(spec.params);
-  if (keys.length > 0) {
-    if (keys.every(function (k) {
-      return spec.params[k] == '*' || (spec.params[k] || '').toLowerCase() == (p.params[k] || '').toLowerCase();
-    })) {
-      s |= 1
-    } else {
-      return null
-    }
-  }
-
-  return {
-    i: index,
-    o: spec.i,
-    q: spec.q,
-    s: s,
-  }
-}
-
-/**
- * Get the preferred media types from an Accept header.
- * @public
- */
-
-function preferredMediaTypes(accept, provided) {
-  // RFC 2616 sec 14.2: no header = */*
-  var accepts = parseAccept(accept === undefined ? '*/*' : accept || '');
-
-  if (!provided) {
-    // sorted list of all types
-    return accepts
-      .filter(isQuality)
-      .sort(compareSpecs)
-      .map(getFullType);
-  }
-
-  var priorities = provided.map(function getPriority(type, index) {
-    return getMediaTypePriority(type, accepts, index);
-  });
-
-  // sorted list of accepted types
-  return priorities.filter(isQuality).sort(compareSpecs).map(function getType(priority) {
-    return provided[priorities.indexOf(priority)];
-  });
-}
-
-/**
- * Compare two specs.
- * @private
- */
-
-function compareSpecs(a, b) {
-  return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0;
-}
-
-/**
- * Get full type string.
- * @private
- */
-
-function getFullType(spec) {
-  return spec.type + '/' + spec.subtype;
-}
-
-/**
- * Check if a spec has any quality.
- * @private
- */
-
-function isQuality(spec) {
-  return spec.q > 0;
-}
-
-/**
- * Count the number of quotes in a string.
- * @private
- */
-
-function quoteCount(string) {
-  var count = 0;
-  var index = 0;
-
-  while ((index = string.indexOf('"', index)) !== -1) {
-    count++;
-    index++;
-  }
-
-  return count;
-}
-
-/**
- * Split a key value pair.
- * @private
- */
-
-function splitKeyValuePair(str) {
-  var index = str.indexOf('=');
-  var key;
-  var val;
-
-  if (index === -1) {
-    key = str;
-  } else {
-    key = str.slice(0, index);
-    val = str.slice(index + 1);
-  }
-
-  return [key, val];
-}
-
-/**
- * Split an Accept header into media types.
- * @private
- */
-
-function splitMediaTypes(accept) {
-  var accepts = accept.split(',');
-
-  for (var i = 1, j = 0; i < accepts.length; i++) {
-    if (quoteCount(accepts[j]) % 2 == 0) {
-      accepts[++j] = accepts[i];
-    } else {
-      accepts[j] += ',' + accepts[i];
-    }
-  }
-
-  // trim accepts
-  accepts.length = j + 1;
-
-  return accepts;
-}
-
-/**
- * Split a string of parameters.
- * @private
- */
-
-function splitParameters(str) {
-  var parameters = str.split(';');
-
-  for (var i = 1, j = 0; i < parameters.length; i++) {
-    if (quoteCount(parameters[j]) % 2 == 0) {
-      parameters[++j] = parameters[i];
-    } else {
-      parameters[j] += ';' + parameters[i];
-    }
-  }
-
-  // trim parameters
-  parameters.length = j + 1;
-
-  for (var i = 0; i < parameters.length; i++) {
-    parameters[i] = parameters[i].trim();
-  }
-
-  return parameters;
-}
diff --git a/node_modules/make-fetch-happen/node_modules/negotiator/package.json b/node_modules/make-fetch-happen/node_modules/negotiator/package.json
deleted file mode 100644
index e4bdc1ef4f748..0000000000000
--- a/node_modules/make-fetch-happen/node_modules/negotiator/package.json
+++ /dev/null
@@ -1,43 +0,0 @@
-{
-  "name": "negotiator",
-  "description": "HTTP content negotiation",
-  "version": "1.0.0",
-  "contributors": [
-    "Douglas Christopher Wilson ",
-    "Federico Romero ",
-    "Isaac Z. Schlueter  (http://blog.izs.me/)"
-  ],
-  "license": "MIT",
-  "keywords": [
-    "http",
-    "content negotiation",
-    "accept",
-    "accept-language",
-    "accept-encoding",
-    "accept-charset"
-  ],
-  "repository": "jshttp/negotiator",
-  "devDependencies": {
-    "eslint": "7.32.0",
-    "eslint-plugin-markdown": "2.2.1",
-    "mocha": "9.1.3",
-    "nyc": "15.1.0"
-  },
-  "files": [
-    "lib/",
-    "HISTORY.md",
-    "LICENSE",
-    "index.js",
-    "README.md"
-  ],
-  "engines": {
-    "node": ">= 0.6"
-  },
-  "scripts": {
-    "lint": "eslint .",
-    "test": "mocha --reporter spec --check-leaks --bail test/",
-    "test:debug": "mocha --reporter spec --check-leaks --inspect --inspect-brk test/",
-    "test-ci": "nyc --reporter=lcov --reporter=text npm test",
-    "test-cov": "nyc --reporter=html --reporter=text npm test"
-  }
-}
diff --git a/node_modules/node-gyp/node_modules/negotiator/HISTORY.md b/node_modules/node-gyp/node_modules/negotiator/HISTORY.md
deleted file mode 100644
index 63d537d3f6811..0000000000000
--- a/node_modules/node-gyp/node_modules/negotiator/HISTORY.md
+++ /dev/null
@@ -1,114 +0,0 @@
-1.0.0 / 2024-08-31
-==================
-
-  * Drop support for node <18
-  * Added an option preferred encodings array #59
-
-0.6.3 / 2022-01-22
-==================
-
-  * Revert "Lazy-load modules from main entry point"
-
-0.6.2 / 2019-04-29
-==================
-
-  * Fix sorting charset, encoding, and language with extra parameters
-
-0.6.1 / 2016-05-02
-==================
-
-  * perf: improve `Accept` parsing speed
-  * perf: improve `Accept-Charset` parsing speed
-  * perf: improve `Accept-Encoding` parsing speed
-  * perf: improve `Accept-Language` parsing speed
-
-0.6.0 / 2015-09-29
-==================
-
-  * Fix including type extensions in parameters in `Accept` parsing
-  * Fix parsing `Accept` parameters with quoted equals
-  * Fix parsing `Accept` parameters with quoted semicolons
-  * Lazy-load modules from main entry point
-  * perf: delay type concatenation until needed
-  * perf: enable strict mode
-  * perf: hoist regular expressions
-  * perf: remove closures getting spec properties
-  * perf: remove a closure from media type parsing
-  * perf: remove property delete from media type parsing
-
-0.5.3 / 2015-05-10
-==================
-
-  * Fix media type parameter matching to be case-insensitive
-
-0.5.2 / 2015-05-06
-==================
-
-  * Fix comparing media types with quoted values
-  * Fix splitting media types with quoted commas
-
-0.5.1 / 2015-02-14
-==================
-
-  * Fix preference sorting to be stable for long acceptable lists
-
-0.5.0 / 2014-12-18
-==================
-
-  * Fix list return order when large accepted list
-  * Fix missing identity encoding when q=0 exists
-  * Remove dynamic building of Negotiator class
-
-0.4.9 / 2014-10-14
-==================
-
-  * Fix error when media type has invalid parameter
-
-0.4.8 / 2014-09-28
-==================
-
-  * Fix all negotiations to be case-insensitive
-  * Stable sort preferences of same quality according to client order
-  * Support Node.js 0.6
-
-0.4.7 / 2014-06-24
-==================
-
-  * Handle invalid provided languages
-  * Handle invalid provided media types
-
-0.4.6 / 2014-06-11
-==================
-
-  *  Order by specificity when quality is the same
-
-0.4.5 / 2014-05-29
-==================
-
-  * Fix regression in empty header handling
-
-0.4.4 / 2014-05-29
-==================
-
-  * Fix behaviors when headers are not present
-
-0.4.3 / 2014-04-16
-==================
-
-  * Handle slashes on media params correctly
-
-0.4.2 / 2014-02-28
-==================
-
-  * Fix media type sorting
-  * Handle media types params strictly
-
-0.4.1 / 2014-01-16
-==================
-
-  * Use most specific matches
-
-0.4.0 / 2014-01-09
-==================
-
-  * Remove preferred prefix from methods
diff --git a/node_modules/node-gyp/node_modules/negotiator/LICENSE b/node_modules/node-gyp/node_modules/negotiator/LICENSE
deleted file mode 100644
index ea6b9e2e9ac25..0000000000000
--- a/node_modules/node-gyp/node_modules/negotiator/LICENSE
+++ /dev/null
@@ -1,24 +0,0 @@
-(The MIT License)
-
-Copyright (c) 2012-2014 Federico Romero
-Copyright (c) 2012-2014 Isaac Z. Schlueter
-Copyright (c) 2014-2015 Douglas Christopher Wilson
-
-Permission is hereby granted, free of charge, to any person obtaining
-a copy of this software and associated documentation files (the
-'Software'), to deal in the Software without restriction, including
-without limitation the rights to use, copy, modify, merge, publish,
-distribute, sublicense, and/or sell copies of the Software, and to
-permit persons to whom the Software is furnished to do so, subject to
-the following conditions:
-
-The above copyright notice and this permission notice shall be
-included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/node-gyp/node_modules/negotiator/index.js b/node_modules/node-gyp/node_modules/negotiator/index.js
deleted file mode 100644
index 4f51315d6af4b..0000000000000
--- a/node_modules/node-gyp/node_modules/negotiator/index.js
+++ /dev/null
@@ -1,83 +0,0 @@
-/*!
- * negotiator
- * Copyright(c) 2012 Federico Romero
- * Copyright(c) 2012-2014 Isaac Z. Schlueter
- * Copyright(c) 2015 Douglas Christopher Wilson
- * MIT Licensed
- */
-
-'use strict';
-
-var preferredCharsets = require('./lib/charset')
-var preferredEncodings = require('./lib/encoding')
-var preferredLanguages = require('./lib/language')
-var preferredMediaTypes = require('./lib/mediaType')
-
-/**
- * Module exports.
- * @public
- */
-
-module.exports = Negotiator;
-module.exports.Negotiator = Negotiator;
-
-/**
- * Create a Negotiator instance from a request.
- * @param {object} request
- * @public
- */
-
-function Negotiator(request) {
-  if (!(this instanceof Negotiator)) {
-    return new Negotiator(request);
-  }
-
-  this.request = request;
-}
-
-Negotiator.prototype.charset = function charset(available) {
-  var set = this.charsets(available);
-  return set && set[0];
-};
-
-Negotiator.prototype.charsets = function charsets(available) {
-  return preferredCharsets(this.request.headers['accept-charset'], available);
-};
-
-Negotiator.prototype.encoding = function encoding(available, opts) {
-  var set = this.encodings(available, opts);
-  return set && set[0];
-};
-
-Negotiator.prototype.encodings = function encodings(available, options) {
-  var opts = options || {};
-  return preferredEncodings(this.request.headers['accept-encoding'], available, opts.preferred);
-};
-
-Negotiator.prototype.language = function language(available) {
-  var set = this.languages(available);
-  return set && set[0];
-};
-
-Negotiator.prototype.languages = function languages(available) {
-  return preferredLanguages(this.request.headers['accept-language'], available);
-};
-
-Negotiator.prototype.mediaType = function mediaType(available) {
-  var set = this.mediaTypes(available);
-  return set && set[0];
-};
-
-Negotiator.prototype.mediaTypes = function mediaTypes(available) {
-  return preferredMediaTypes(this.request.headers.accept, available);
-};
-
-// Backwards compatibility
-Negotiator.prototype.preferredCharset = Negotiator.prototype.charset;
-Negotiator.prototype.preferredCharsets = Negotiator.prototype.charsets;
-Negotiator.prototype.preferredEncoding = Negotiator.prototype.encoding;
-Negotiator.prototype.preferredEncodings = Negotiator.prototype.encodings;
-Negotiator.prototype.preferredLanguage = Negotiator.prototype.language;
-Negotiator.prototype.preferredLanguages = Negotiator.prototype.languages;
-Negotiator.prototype.preferredMediaType = Negotiator.prototype.mediaType;
-Negotiator.prototype.preferredMediaTypes = Negotiator.prototype.mediaTypes;
diff --git a/node_modules/node-gyp/node_modules/negotiator/lib/charset.js b/node_modules/node-gyp/node_modules/negotiator/lib/charset.js
deleted file mode 100644
index cdd014803474a..0000000000000
--- a/node_modules/node-gyp/node_modules/negotiator/lib/charset.js
+++ /dev/null
@@ -1,169 +0,0 @@
-/**
- * negotiator
- * Copyright(c) 2012 Isaac Z. Schlueter
- * Copyright(c) 2014 Federico Romero
- * Copyright(c) 2014-2015 Douglas Christopher Wilson
- * MIT Licensed
- */
-
-'use strict';
-
-/**
- * Module exports.
- * @public
- */
-
-module.exports = preferredCharsets;
-module.exports.preferredCharsets = preferredCharsets;
-
-/**
- * Module variables.
- * @private
- */
-
-var simpleCharsetRegExp = /^\s*([^\s;]+)\s*(?:;(.*))?$/;
-
-/**
- * Parse the Accept-Charset header.
- * @private
- */
-
-function parseAcceptCharset(accept) {
-  var accepts = accept.split(',');
-
-  for (var i = 0, j = 0; i < accepts.length; i++) {
-    var charset = parseCharset(accepts[i].trim(), i);
-
-    if (charset) {
-      accepts[j++] = charset;
-    }
-  }
-
-  // trim accepts
-  accepts.length = j;
-
-  return accepts;
-}
-
-/**
- * Parse a charset from the Accept-Charset header.
- * @private
- */
-
-function parseCharset(str, i) {
-  var match = simpleCharsetRegExp.exec(str);
-  if (!match) return null;
-
-  var charset = match[1];
-  var q = 1;
-  if (match[2]) {
-    var params = match[2].split(';')
-    for (var j = 0; j < params.length; j++) {
-      var p = params[j].trim().split('=');
-      if (p[0] === 'q') {
-        q = parseFloat(p[1]);
-        break;
-      }
-    }
-  }
-
-  return {
-    charset: charset,
-    q: q,
-    i: i
-  };
-}
-
-/**
- * Get the priority of a charset.
- * @private
- */
-
-function getCharsetPriority(charset, accepted, index) {
-  var priority = {o: -1, q: 0, s: 0};
-
-  for (var i = 0; i < accepted.length; i++) {
-    var spec = specify(charset, accepted[i], index);
-
-    if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
-      priority = spec;
-    }
-  }
-
-  return priority;
-}
-
-/**
- * Get the specificity of the charset.
- * @private
- */
-
-function specify(charset, spec, index) {
-  var s = 0;
-  if(spec.charset.toLowerCase() === charset.toLowerCase()){
-    s |= 1;
-  } else if (spec.charset !== '*' ) {
-    return null
-  }
-
-  return {
-    i: index,
-    o: spec.i,
-    q: spec.q,
-    s: s
-  }
-}
-
-/**
- * Get the preferred charsets from an Accept-Charset header.
- * @public
- */
-
-function preferredCharsets(accept, provided) {
-  // RFC 2616 sec 14.2: no header = *
-  var accepts = parseAcceptCharset(accept === undefined ? '*' : accept || '');
-
-  if (!provided) {
-    // sorted list of all charsets
-    return accepts
-      .filter(isQuality)
-      .sort(compareSpecs)
-      .map(getFullCharset);
-  }
-
-  var priorities = provided.map(function getPriority(type, index) {
-    return getCharsetPriority(type, accepts, index);
-  });
-
-  // sorted list of accepted charsets
-  return priorities.filter(isQuality).sort(compareSpecs).map(function getCharset(priority) {
-    return provided[priorities.indexOf(priority)];
-  });
-}
-
-/**
- * Compare two specs.
- * @private
- */
-
-function compareSpecs(a, b) {
-  return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0;
-}
-
-/**
- * Get full charset string.
- * @private
- */
-
-function getFullCharset(spec) {
-  return spec.charset;
-}
-
-/**
- * Check if a spec has any quality.
- * @private
- */
-
-function isQuality(spec) {
-  return spec.q > 0;
-}
diff --git a/node_modules/node-gyp/node_modules/negotiator/lib/encoding.js b/node_modules/node-gyp/node_modules/negotiator/lib/encoding.js
deleted file mode 100644
index 9ebb633d67743..0000000000000
--- a/node_modules/node-gyp/node_modules/negotiator/lib/encoding.js
+++ /dev/null
@@ -1,205 +0,0 @@
-/**
- * negotiator
- * Copyright(c) 2012 Isaac Z. Schlueter
- * Copyright(c) 2014 Federico Romero
- * Copyright(c) 2014-2015 Douglas Christopher Wilson
- * MIT Licensed
- */
-
-'use strict';
-
-/**
- * Module exports.
- * @public
- */
-
-module.exports = preferredEncodings;
-module.exports.preferredEncodings = preferredEncodings;
-
-/**
- * Module variables.
- * @private
- */
-
-var simpleEncodingRegExp = /^\s*([^\s;]+)\s*(?:;(.*))?$/;
-
-/**
- * Parse the Accept-Encoding header.
- * @private
- */
-
-function parseAcceptEncoding(accept) {
-  var accepts = accept.split(',');
-  var hasIdentity = false;
-  var minQuality = 1;
-
-  for (var i = 0, j = 0; i < accepts.length; i++) {
-    var encoding = parseEncoding(accepts[i].trim(), i);
-
-    if (encoding) {
-      accepts[j++] = encoding;
-      hasIdentity = hasIdentity || specify('identity', encoding);
-      minQuality = Math.min(minQuality, encoding.q || 1);
-    }
-  }
-
-  if (!hasIdentity) {
-    /*
-     * If identity doesn't explicitly appear in the accept-encoding header,
-     * it's added to the list of acceptable encoding with the lowest q
-     */
-    accepts[j++] = {
-      encoding: 'identity',
-      q: minQuality,
-      i: i
-    };
-  }
-
-  // trim accepts
-  accepts.length = j;
-
-  return accepts;
-}
-
-/**
- * Parse an encoding from the Accept-Encoding header.
- * @private
- */
-
-function parseEncoding(str, i) {
-  var match = simpleEncodingRegExp.exec(str);
-  if (!match) return null;
-
-  var encoding = match[1];
-  var q = 1;
-  if (match[2]) {
-    var params = match[2].split(';');
-    for (var j = 0; j < params.length; j++) {
-      var p = params[j].trim().split('=');
-      if (p[0] === 'q') {
-        q = parseFloat(p[1]);
-        break;
-      }
-    }
-  }
-
-  return {
-    encoding: encoding,
-    q: q,
-    i: i
-  };
-}
-
-/**
- * Get the priority of an encoding.
- * @private
- */
-
-function getEncodingPriority(encoding, accepted, index) {
-  var priority = {encoding: encoding, o: -1, q: 0, s: 0};
-
-  for (var i = 0; i < accepted.length; i++) {
-    var spec = specify(encoding, accepted[i], index);
-
-    if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
-      priority = spec;
-    }
-  }
-
-  return priority;
-}
-
-/**
- * Get the specificity of the encoding.
- * @private
- */
-
-function specify(encoding, spec, index) {
-  var s = 0;
-  if(spec.encoding.toLowerCase() === encoding.toLowerCase()){
-    s |= 1;
-  } else if (spec.encoding !== '*' ) {
-    return null
-  }
-
-  return {
-    encoding: encoding,
-    i: index,
-    o: spec.i,
-    q: spec.q,
-    s: s
-  }
-};
-
-/**
- * Get the preferred encodings from an Accept-Encoding header.
- * @public
- */
-
-function preferredEncodings(accept, provided, preferred) {
-  var accepts = parseAcceptEncoding(accept || '');
-
-  var comparator = preferred ? function comparator (a, b) {
-    if (a.q !== b.q) {
-      return b.q - a.q // higher quality first
-    }
-
-    var aPreferred = preferred.indexOf(a.encoding)
-    var bPreferred = preferred.indexOf(b.encoding)
-
-    if (aPreferred === -1 && bPreferred === -1) {
-      // consider the original specifity/order
-      return (b.s - a.s) || (a.o - b.o) || (a.i - b.i)
-    }
-
-    if (aPreferred !== -1 && bPreferred !== -1) {
-      return aPreferred - bPreferred // consider the preferred order
-    }
-
-    return aPreferred === -1 ? 1 : -1 // preferred first
-  } : compareSpecs;
-
-  if (!provided) {
-    // sorted list of all encodings
-    return accepts
-      .filter(isQuality)
-      .sort(comparator)
-      .map(getFullEncoding);
-  }
-
-  var priorities = provided.map(function getPriority(type, index) {
-    return getEncodingPriority(type, accepts, index);
-  });
-
-  // sorted list of accepted encodings
-  return priorities.filter(isQuality).sort(comparator).map(function getEncoding(priority) {
-    return provided[priorities.indexOf(priority)];
-  });
-}
-
-/**
- * Compare two specs.
- * @private
- */
-
-function compareSpecs(a, b) {
-  return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i);
-}
-
-/**
- * Get full encoding string.
- * @private
- */
-
-function getFullEncoding(spec) {
-  return spec.encoding;
-}
-
-/**
- * Check if a spec has any quality.
- * @private
- */
-
-function isQuality(spec) {
-  return spec.q > 0;
-}
diff --git a/node_modules/node-gyp/node_modules/negotiator/lib/language.js b/node_modules/node-gyp/node_modules/negotiator/lib/language.js
deleted file mode 100644
index a23167252719b..0000000000000
--- a/node_modules/node-gyp/node_modules/negotiator/lib/language.js
+++ /dev/null
@@ -1,179 +0,0 @@
-/**
- * negotiator
- * Copyright(c) 2012 Isaac Z. Schlueter
- * Copyright(c) 2014 Federico Romero
- * Copyright(c) 2014-2015 Douglas Christopher Wilson
- * MIT Licensed
- */
-
-'use strict';
-
-/**
- * Module exports.
- * @public
- */
-
-module.exports = preferredLanguages;
-module.exports.preferredLanguages = preferredLanguages;
-
-/**
- * Module variables.
- * @private
- */
-
-var simpleLanguageRegExp = /^\s*([^\s\-;]+)(?:-([^\s;]+))?\s*(?:;(.*))?$/;
-
-/**
- * Parse the Accept-Language header.
- * @private
- */
-
-function parseAcceptLanguage(accept) {
-  var accepts = accept.split(',');
-
-  for (var i = 0, j = 0; i < accepts.length; i++) {
-    var language = parseLanguage(accepts[i].trim(), i);
-
-    if (language) {
-      accepts[j++] = language;
-    }
-  }
-
-  // trim accepts
-  accepts.length = j;
-
-  return accepts;
-}
-
-/**
- * Parse a language from the Accept-Language header.
- * @private
- */
-
-function parseLanguage(str, i) {
-  var match = simpleLanguageRegExp.exec(str);
-  if (!match) return null;
-
-  var prefix = match[1]
-  var suffix = match[2]
-  var full = prefix
-
-  if (suffix) full += "-" + suffix;
-
-  var q = 1;
-  if (match[3]) {
-    var params = match[3].split(';')
-    for (var j = 0; j < params.length; j++) {
-      var p = params[j].split('=');
-      if (p[0] === 'q') q = parseFloat(p[1]);
-    }
-  }
-
-  return {
-    prefix: prefix,
-    suffix: suffix,
-    q: q,
-    i: i,
-    full: full
-  };
-}
-
-/**
- * Get the priority of a language.
- * @private
- */
-
-function getLanguagePriority(language, accepted, index) {
-  var priority = {o: -1, q: 0, s: 0};
-
-  for (var i = 0; i < accepted.length; i++) {
-    var spec = specify(language, accepted[i], index);
-
-    if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
-      priority = spec;
-    }
-  }
-
-  return priority;
-}
-
-/**
- * Get the specificity of the language.
- * @private
- */
-
-function specify(language, spec, index) {
-  var p = parseLanguage(language)
-  if (!p) return null;
-  var s = 0;
-  if(spec.full.toLowerCase() === p.full.toLowerCase()){
-    s |= 4;
-  } else if (spec.prefix.toLowerCase() === p.full.toLowerCase()) {
-    s |= 2;
-  } else if (spec.full.toLowerCase() === p.prefix.toLowerCase()) {
-    s |= 1;
-  } else if (spec.full !== '*' ) {
-    return null
-  }
-
-  return {
-    i: index,
-    o: spec.i,
-    q: spec.q,
-    s: s
-  }
-};
-
-/**
- * Get the preferred languages from an Accept-Language header.
- * @public
- */
-
-function preferredLanguages(accept, provided) {
-  // RFC 2616 sec 14.4: no header = *
-  var accepts = parseAcceptLanguage(accept === undefined ? '*' : accept || '');
-
-  if (!provided) {
-    // sorted list of all languages
-    return accepts
-      .filter(isQuality)
-      .sort(compareSpecs)
-      .map(getFullLanguage);
-  }
-
-  var priorities = provided.map(function getPriority(type, index) {
-    return getLanguagePriority(type, accepts, index);
-  });
-
-  // sorted list of accepted languages
-  return priorities.filter(isQuality).sort(compareSpecs).map(function getLanguage(priority) {
-    return provided[priorities.indexOf(priority)];
-  });
-}
-
-/**
- * Compare two specs.
- * @private
- */
-
-function compareSpecs(a, b) {
-  return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0;
-}
-
-/**
- * Get full language string.
- * @private
- */
-
-function getFullLanguage(spec) {
-  return spec.full;
-}
-
-/**
- * Check if a spec has any quality.
- * @private
- */
-
-function isQuality(spec) {
-  return spec.q > 0;
-}
diff --git a/node_modules/node-gyp/node_modules/negotiator/lib/mediaType.js b/node_modules/node-gyp/node_modules/negotiator/lib/mediaType.js
deleted file mode 100644
index 8e402ea88394c..0000000000000
--- a/node_modules/node-gyp/node_modules/negotiator/lib/mediaType.js
+++ /dev/null
@@ -1,294 +0,0 @@
-/**
- * negotiator
- * Copyright(c) 2012 Isaac Z. Schlueter
- * Copyright(c) 2014 Federico Romero
- * Copyright(c) 2014-2015 Douglas Christopher Wilson
- * MIT Licensed
- */
-
-'use strict';
-
-/**
- * Module exports.
- * @public
- */
-
-module.exports = preferredMediaTypes;
-module.exports.preferredMediaTypes = preferredMediaTypes;
-
-/**
- * Module variables.
- * @private
- */
-
-var simpleMediaTypeRegExp = /^\s*([^\s\/;]+)\/([^;\s]+)\s*(?:;(.*))?$/;
-
-/**
- * Parse the Accept header.
- * @private
- */
-
-function parseAccept(accept) {
-  var accepts = splitMediaTypes(accept);
-
-  for (var i = 0, j = 0; i < accepts.length; i++) {
-    var mediaType = parseMediaType(accepts[i].trim(), i);
-
-    if (mediaType) {
-      accepts[j++] = mediaType;
-    }
-  }
-
-  // trim accepts
-  accepts.length = j;
-
-  return accepts;
-}
-
-/**
- * Parse a media type from the Accept header.
- * @private
- */
-
-function parseMediaType(str, i) {
-  var match = simpleMediaTypeRegExp.exec(str);
-  if (!match) return null;
-
-  var params = Object.create(null);
-  var q = 1;
-  var subtype = match[2];
-  var type = match[1];
-
-  if (match[3]) {
-    var kvps = splitParameters(match[3]).map(splitKeyValuePair);
-
-    for (var j = 0; j < kvps.length; j++) {
-      var pair = kvps[j];
-      var key = pair[0].toLowerCase();
-      var val = pair[1];
-
-      // get the value, unwrapping quotes
-      var value = val && val[0] === '"' && val[val.length - 1] === '"'
-        ? val.slice(1, -1)
-        : val;
-
-      if (key === 'q') {
-        q = parseFloat(value);
-        break;
-      }
-
-      // store parameter
-      params[key] = value;
-    }
-  }
-
-  return {
-    type: type,
-    subtype: subtype,
-    params: params,
-    q: q,
-    i: i
-  };
-}
-
-/**
- * Get the priority of a media type.
- * @private
- */
-
-function getMediaTypePriority(type, accepted, index) {
-  var priority = {o: -1, q: 0, s: 0};
-
-  for (var i = 0; i < accepted.length; i++) {
-    var spec = specify(type, accepted[i], index);
-
-    if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
-      priority = spec;
-    }
-  }
-
-  return priority;
-}
-
-/**
- * Get the specificity of the media type.
- * @private
- */
-
-function specify(type, spec, index) {
-  var p = parseMediaType(type);
-  var s = 0;
-
-  if (!p) {
-    return null;
-  }
-
-  if(spec.type.toLowerCase() == p.type.toLowerCase()) {
-    s |= 4
-  } else if(spec.type != '*') {
-    return null;
-  }
-
-  if(spec.subtype.toLowerCase() == p.subtype.toLowerCase()) {
-    s |= 2
-  } else if(spec.subtype != '*') {
-    return null;
-  }
-
-  var keys = Object.keys(spec.params);
-  if (keys.length > 0) {
-    if (keys.every(function (k) {
-      return spec.params[k] == '*' || (spec.params[k] || '').toLowerCase() == (p.params[k] || '').toLowerCase();
-    })) {
-      s |= 1
-    } else {
-      return null
-    }
-  }
-
-  return {
-    i: index,
-    o: spec.i,
-    q: spec.q,
-    s: s,
-  }
-}
-
-/**
- * Get the preferred media types from an Accept header.
- * @public
- */
-
-function preferredMediaTypes(accept, provided) {
-  // RFC 2616 sec 14.2: no header = */*
-  var accepts = parseAccept(accept === undefined ? '*/*' : accept || '');
-
-  if (!provided) {
-    // sorted list of all types
-    return accepts
-      .filter(isQuality)
-      .sort(compareSpecs)
-      .map(getFullType);
-  }
-
-  var priorities = provided.map(function getPriority(type, index) {
-    return getMediaTypePriority(type, accepts, index);
-  });
-
-  // sorted list of accepted types
-  return priorities.filter(isQuality).sort(compareSpecs).map(function getType(priority) {
-    return provided[priorities.indexOf(priority)];
-  });
-}
-
-/**
- * Compare two specs.
- * @private
- */
-
-function compareSpecs(a, b) {
-  return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0;
-}
-
-/**
- * Get full type string.
- * @private
- */
-
-function getFullType(spec) {
-  return spec.type + '/' + spec.subtype;
-}
-
-/**
- * Check if a spec has any quality.
- * @private
- */
-
-function isQuality(spec) {
-  return spec.q > 0;
-}
-
-/**
- * Count the number of quotes in a string.
- * @private
- */
-
-function quoteCount(string) {
-  var count = 0;
-  var index = 0;
-
-  while ((index = string.indexOf('"', index)) !== -1) {
-    count++;
-    index++;
-  }
-
-  return count;
-}
-
-/**
- * Split a key value pair.
- * @private
- */
-
-function splitKeyValuePair(str) {
-  var index = str.indexOf('=');
-  var key;
-  var val;
-
-  if (index === -1) {
-    key = str;
-  } else {
-    key = str.slice(0, index);
-    val = str.slice(index + 1);
-  }
-
-  return [key, val];
-}
-
-/**
- * Split an Accept header into media types.
- * @private
- */
-
-function splitMediaTypes(accept) {
-  var accepts = accept.split(',');
-
-  for (var i = 1, j = 0; i < accepts.length; i++) {
-    if (quoteCount(accepts[j]) % 2 == 0) {
-      accepts[++j] = accepts[i];
-    } else {
-      accepts[j] += ',' + accepts[i];
-    }
-  }
-
-  // trim accepts
-  accepts.length = j + 1;
-
-  return accepts;
-}
-
-/**
- * Split a string of parameters.
- * @private
- */
-
-function splitParameters(str) {
-  var parameters = str.split(';');
-
-  for (var i = 1, j = 0; i < parameters.length; i++) {
-    if (quoteCount(parameters[j]) % 2 == 0) {
-      parameters[++j] = parameters[i];
-    } else {
-      parameters[j] += ';' + parameters[i];
-    }
-  }
-
-  // trim parameters
-  parameters.length = j + 1;
-
-  for (var i = 0; i < parameters.length; i++) {
-    parameters[i] = parameters[i].trim();
-  }
-
-  return parameters;
-}
diff --git a/node_modules/node-gyp/node_modules/negotiator/package.json b/node_modules/node-gyp/node_modules/negotiator/package.json
deleted file mode 100644
index e4bdc1ef4f748..0000000000000
--- a/node_modules/node-gyp/node_modules/negotiator/package.json
+++ /dev/null
@@ -1,43 +0,0 @@
-{
-  "name": "negotiator",
-  "description": "HTTP content negotiation",
-  "version": "1.0.0",
-  "contributors": [
-    "Douglas Christopher Wilson ",
-    "Federico Romero ",
-    "Isaac Z. Schlueter  (http://blog.izs.me/)"
-  ],
-  "license": "MIT",
-  "keywords": [
-    "http",
-    "content negotiation",
-    "accept",
-    "accept-language",
-    "accept-encoding",
-    "accept-charset"
-  ],
-  "repository": "jshttp/negotiator",
-  "devDependencies": {
-    "eslint": "7.32.0",
-    "eslint-plugin-markdown": "2.2.1",
-    "mocha": "9.1.3",
-    "nyc": "15.1.0"
-  },
-  "files": [
-    "lib/",
-    "HISTORY.md",
-    "LICENSE",
-    "index.js",
-    "README.md"
-  ],
-  "engines": {
-    "node": ">= 0.6"
-  },
-  "scripts": {
-    "lint": "eslint .",
-    "test": "mocha --reporter spec --check-leaks --bail test/",
-    "test:debug": "mocha --reporter spec --check-leaks --inspect --inspect-brk test/",
-    "test-ci": "nyc --reporter=lcov --reporter=text npm test",
-    "test-cov": "nyc --reporter=html --reporter=text npm test"
-  }
-}
diff --git a/package-lock.json b/package-lock.json
index 1ce97778e7f04..17b77a34c919d 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -3997,6 +3997,16 @@
         "encoding": "^0.1.13"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/negotiator": {
+      "version": "0.6.4",
+      "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.4.tgz",
+      "integrity": "sha512-myRT3DiWPHqho5PrJaIRyaMv2kgYf0mUVgBNOYMuCH5Ki1yEiQaf/ZJuQ62nvpc44wL5WDbTX7yGJi1Neevw8w==",
+      "dev": true,
+      "license": "MIT",
+      "engines": {
+        "node": ">= 0.6"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/node-gyp": {
       "version": "10.3.1",
       "dev": true,
@@ -9686,14 +9696,6 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/make-fetch-happen/node_modules/negotiator": {
-      "version": "1.0.0",
-      "inBundle": true,
-      "license": "MIT",
-      "engines": {
-        "node": ">= 0.6"
-      }
-    },
     "node_modules/map-obj": {
       "version": "4.3.0",
       "dev": true,
@@ -10782,8 +10784,10 @@
       }
     },
     "node_modules/negotiator": {
-      "version": "0.6.4",
-      "dev": true,
+      "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz",
+      "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==",
+      "inBundle": true,
       "license": "MIT",
       "engines": {
         "node": ">= 0.6"
@@ -10947,14 +10951,6 @@
         "url": "https://github.com/sponsors/isaacs"
       }
     },
-    "node_modules/node-gyp/node_modules/negotiator": {
-      "version": "1.0.0",
-      "inBundle": true,
-      "license": "MIT",
-      "engines": {
-        "node": ">= 0.6"
-      }
-    },
     "node_modules/node-gyp/node_modules/tar": {
       "version": "7.4.3",
       "inBundle": true,

From 79a4e67c358b491f0456162fa9307e0f5a99167b Mon Sep 17 00:00:00 2001
From: Gar 
Date: Thu, 18 Sep 2025 10:13:40 -0700
Subject: [PATCH 25/63] deps: minizlib@3.0.2

---
 .../node_modules/minizlib/LICENSE             |  26 -
 .../node_modules/minizlib/package.json        |  80 ---
 node_modules/minizlib/LICENSE                 |   6 +-
 node_modules/minizlib/constants.js            | 115 ----
 .../minizlib/dist/commonjs/constants.js       |   0
 .../minizlib/dist/commonjs/index.js           |   0
 .../minizlib/dist/commonjs/package.json       |   0
 .../minizlib/dist/esm/constants.js            |   0
 .../minizlib/dist/esm/index.js                |   0
 .../minizlib/dist/esm/package.json            |   0
 node_modules/minizlib/index.js                | 348 ----------
 .../minizlib/node_modules/minipass/LICENSE    |  15 -
 .../minizlib/node_modules/minipass/index.js   | 649 ------------------
 .../node_modules/minipass/package.json        |  56 --
 node_modules/minizlib/package.json            |  60 +-
 .../node-gyp/node_modules/minizlib/LICENSE    |  26 -
 .../minizlib/dist/commonjs/constants.js       | 123 ----
 .../minizlib/dist/commonjs/index.js           | 392 -----------
 .../minizlib/dist/commonjs/package.json       |   3 -
 .../minizlib/dist/esm/constants.js            | 117 ----
 .../node_modules/minizlib/dist/esm/index.js   | 340 ---------
 .../minizlib/dist/esm/package.json            |   3 -
 .../node_modules/minizlib/package.json        |  80 ---
 .../node_modules/minizlib/LICENSE             |  26 -
 .../minizlib/dist/commonjs/constants.js       | 123 ----
 .../minizlib/dist/commonjs/index.js           | 392 -----------
 .../minizlib/dist/commonjs/package.json       |   3 -
 .../minizlib/dist/esm/constants.js            | 117 ----
 .../node_modules/minizlib/dist/esm/index.js   | 340 ---------
 .../minizlib/dist/esm/package.json            |   3 -
 .../node_modules/minizlib/package.json        |  80 ---
 .../pacote/node_modules/minizlib/LICENSE      |  26 -
 .../minizlib/dist/commonjs/constants.js       | 123 ----
 .../minizlib/dist/commonjs/index.js           | 392 -----------
 .../minizlib/dist/commonjs/package.json       |   3 -
 .../minizlib/dist/esm/constants.js            | 117 ----
 .../node_modules/minizlib/dist/esm/index.js   | 340 ---------
 .../minizlib/dist/esm/package.json            |   3 -
 .../pacote/node_modules/minizlib/package.json |  80 ---
 package-lock.json                             | 118 ++--
 40 files changed, 111 insertions(+), 4614 deletions(-)
 delete mode 100644 node_modules/minipass-fetch/node_modules/minizlib/LICENSE
 delete mode 100644 node_modules/minipass-fetch/node_modules/minizlib/package.json
 delete mode 100644 node_modules/minizlib/constants.js
 rename node_modules/{minipass-fetch/node_modules => }/minizlib/dist/commonjs/constants.js (100%)
 rename node_modules/{minipass-fetch/node_modules => }/minizlib/dist/commonjs/index.js (100%)
 rename node_modules/{minipass-fetch/node_modules => }/minizlib/dist/commonjs/package.json (100%)
 rename node_modules/{minipass-fetch/node_modules => }/minizlib/dist/esm/constants.js (100%)
 rename node_modules/{minipass-fetch/node_modules => }/minizlib/dist/esm/index.js (100%)
 rename node_modules/{minipass-fetch/node_modules => }/minizlib/dist/esm/package.json (100%)
 delete mode 100644 node_modules/minizlib/index.js
 delete mode 100644 node_modules/minizlib/node_modules/minipass/LICENSE
 delete mode 100644 node_modules/minizlib/node_modules/minipass/index.js
 delete mode 100644 node_modules/minizlib/node_modules/minipass/package.json
 delete mode 100644 node_modules/node-gyp/node_modules/minizlib/LICENSE
 delete mode 100644 node_modules/node-gyp/node_modules/minizlib/dist/commonjs/constants.js
 delete mode 100644 node_modules/node-gyp/node_modules/minizlib/dist/commonjs/index.js
 delete mode 100644 node_modules/node-gyp/node_modules/minizlib/dist/commonjs/package.json
 delete mode 100644 node_modules/node-gyp/node_modules/minizlib/dist/esm/constants.js
 delete mode 100644 node_modules/node-gyp/node_modules/minizlib/dist/esm/index.js
 delete mode 100644 node_modules/node-gyp/node_modules/minizlib/dist/esm/package.json
 delete mode 100644 node_modules/node-gyp/node_modules/minizlib/package.json
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/minizlib/LICENSE
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/constants.js
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/index.js
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/package.json
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/constants.js
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/index.js
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/package.json
 delete mode 100644 node_modules/npm-registry-fetch/node_modules/minizlib/package.json
 delete mode 100644 node_modules/pacote/node_modules/minizlib/LICENSE
 delete mode 100644 node_modules/pacote/node_modules/minizlib/dist/commonjs/constants.js
 delete mode 100644 node_modules/pacote/node_modules/minizlib/dist/commonjs/index.js
 delete mode 100644 node_modules/pacote/node_modules/minizlib/dist/commonjs/package.json
 delete mode 100644 node_modules/pacote/node_modules/minizlib/dist/esm/constants.js
 delete mode 100644 node_modules/pacote/node_modules/minizlib/dist/esm/index.js
 delete mode 100644 node_modules/pacote/node_modules/minizlib/dist/esm/package.json
 delete mode 100644 node_modules/pacote/node_modules/minizlib/package.json

diff --git a/node_modules/minipass-fetch/node_modules/minizlib/LICENSE b/node_modules/minipass-fetch/node_modules/minizlib/LICENSE
deleted file mode 100644
index 49f7efe431c9e..0000000000000
--- a/node_modules/minipass-fetch/node_modules/minizlib/LICENSE
+++ /dev/null
@@ -1,26 +0,0 @@
-Minizlib was created by Isaac Z. Schlueter.
-It is a derivative work of the Node.js project.
-
-"""
-Copyright (c) 2017-2023 Isaac Z. Schlueter and Contributors
-Copyright (c) 2017-2023 Node.js contributors. All rights reserved.
-Copyright (c) 2017-2023 Joyent, Inc. and other Node contributors. All rights reserved.
-
-Permission is hereby granted, free of charge, to any person obtaining a
-copy of this software and associated documentation files (the "Software"),
-to deal in the Software without restriction, including without limitation
-the rights to use, copy, modify, merge, publish, distribute, sublicense,
-and/or sell copies of the Software, and to permit persons to whom the
-Software is furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-"""
diff --git a/node_modules/minipass-fetch/node_modules/minizlib/package.json b/node_modules/minipass-fetch/node_modules/minizlib/package.json
deleted file mode 100644
index 43cb855e15a5d..0000000000000
--- a/node_modules/minipass-fetch/node_modules/minizlib/package.json
+++ /dev/null
@@ -1,80 +0,0 @@
-{
-  "name": "minizlib",
-  "version": "3.0.2",
-  "description": "A small fast zlib stream built on [minipass](http://npm.im/minipass) and Node.js's zlib binding.",
-  "main": "./dist/commonjs/index.js",
-  "dependencies": {
-    "minipass": "^7.1.2"
-  },
-  "scripts": {
-    "prepare": "tshy",
-    "pretest": "npm run prepare",
-    "test": "tap",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "format": "prettier --write . --loglevel warn",
-    "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/isaacs/minizlib.git"
-  },
-  "keywords": [
-    "zlib",
-    "gzip",
-    "gunzip",
-    "deflate",
-    "inflate",
-    "compression",
-    "zip",
-    "unzip"
-  ],
-  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
-  "license": "MIT",
-  "devDependencies": {
-    "@types/node": "^22.13.14",
-    "tap": "^21.1.0",
-    "tshy": "^3.0.2",
-    "typedoc": "^0.28.1"
-  },
-  "files": [
-    "dist"
-  ],
-  "engines": {
-    "node": ">= 18"
-  },
-  "tshy": {
-    "exports": {
-      "./package.json": "./package.json",
-      ".": "./src/index.ts"
-    }
-  },
-  "exports": {
-    "./package.json": "./package.json",
-    ".": {
-      "import": {
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.js"
-      },
-      "require": {
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.js"
-      }
-    }
-  },
-  "types": "./dist/commonjs/index.d.ts",
-  "type": "module",
-  "prettier": {
-    "semi": false,
-    "printWidth": 75,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "module": "./dist/esm/index.js"
-}
diff --git a/node_modules/minizlib/LICENSE b/node_modules/minizlib/LICENSE
index ffce7383f53e7..49f7efe431c9e 100644
--- a/node_modules/minizlib/LICENSE
+++ b/node_modules/minizlib/LICENSE
@@ -2,9 +2,9 @@ Minizlib was created by Isaac Z. Schlueter.
 It is a derivative work of the Node.js project.
 
 """
-Copyright Isaac Z. Schlueter and Contributors
-Copyright Node.js contributors. All rights reserved.
-Copyright Joyent, Inc. and other Node contributors. All rights reserved.
+Copyright (c) 2017-2023 Isaac Z. Schlueter and Contributors
+Copyright (c) 2017-2023 Node.js contributors. All rights reserved.
+Copyright (c) 2017-2023 Joyent, Inc. and other Node contributors. All rights reserved.
 
 Permission is hereby granted, free of charge, to any person obtaining a
 copy of this software and associated documentation files (the "Software"),
diff --git a/node_modules/minizlib/constants.js b/node_modules/minizlib/constants.js
deleted file mode 100644
index 641ebc73129bf..0000000000000
--- a/node_modules/minizlib/constants.js
+++ /dev/null
@@ -1,115 +0,0 @@
-// Update with any zlib constants that are added or changed in the future.
-// Node v6 didn't export this, so we just hard code the version and rely
-// on all the other hard-coded values from zlib v4736.  When node v6
-// support drops, we can just export the realZlibConstants object.
-const realZlibConstants = require('zlib').constants ||
-  /* istanbul ignore next */ { ZLIB_VERNUM: 4736 }
-
-module.exports = Object.freeze(Object.assign(Object.create(null), {
-  Z_NO_FLUSH: 0,
-  Z_PARTIAL_FLUSH: 1,
-  Z_SYNC_FLUSH: 2,
-  Z_FULL_FLUSH: 3,
-  Z_FINISH: 4,
-  Z_BLOCK: 5,
-  Z_OK: 0,
-  Z_STREAM_END: 1,
-  Z_NEED_DICT: 2,
-  Z_ERRNO: -1,
-  Z_STREAM_ERROR: -2,
-  Z_DATA_ERROR: -3,
-  Z_MEM_ERROR: -4,
-  Z_BUF_ERROR: -5,
-  Z_VERSION_ERROR: -6,
-  Z_NO_COMPRESSION: 0,
-  Z_BEST_SPEED: 1,
-  Z_BEST_COMPRESSION: 9,
-  Z_DEFAULT_COMPRESSION: -1,
-  Z_FILTERED: 1,
-  Z_HUFFMAN_ONLY: 2,
-  Z_RLE: 3,
-  Z_FIXED: 4,
-  Z_DEFAULT_STRATEGY: 0,
-  DEFLATE: 1,
-  INFLATE: 2,
-  GZIP: 3,
-  GUNZIP: 4,
-  DEFLATERAW: 5,
-  INFLATERAW: 6,
-  UNZIP: 7,
-  BROTLI_DECODE: 8,
-  BROTLI_ENCODE: 9,
-  Z_MIN_WINDOWBITS: 8,
-  Z_MAX_WINDOWBITS: 15,
-  Z_DEFAULT_WINDOWBITS: 15,
-  Z_MIN_CHUNK: 64,
-  Z_MAX_CHUNK: Infinity,
-  Z_DEFAULT_CHUNK: 16384,
-  Z_MIN_MEMLEVEL: 1,
-  Z_MAX_MEMLEVEL: 9,
-  Z_DEFAULT_MEMLEVEL: 8,
-  Z_MIN_LEVEL: -1,
-  Z_MAX_LEVEL: 9,
-  Z_DEFAULT_LEVEL: -1,
-  BROTLI_OPERATION_PROCESS: 0,
-  BROTLI_OPERATION_FLUSH: 1,
-  BROTLI_OPERATION_FINISH: 2,
-  BROTLI_OPERATION_EMIT_METADATA: 3,
-  BROTLI_MODE_GENERIC: 0,
-  BROTLI_MODE_TEXT: 1,
-  BROTLI_MODE_FONT: 2,
-  BROTLI_DEFAULT_MODE: 0,
-  BROTLI_MIN_QUALITY: 0,
-  BROTLI_MAX_QUALITY: 11,
-  BROTLI_DEFAULT_QUALITY: 11,
-  BROTLI_MIN_WINDOW_BITS: 10,
-  BROTLI_MAX_WINDOW_BITS: 24,
-  BROTLI_LARGE_MAX_WINDOW_BITS: 30,
-  BROTLI_DEFAULT_WINDOW: 22,
-  BROTLI_MIN_INPUT_BLOCK_BITS: 16,
-  BROTLI_MAX_INPUT_BLOCK_BITS: 24,
-  BROTLI_PARAM_MODE: 0,
-  BROTLI_PARAM_QUALITY: 1,
-  BROTLI_PARAM_LGWIN: 2,
-  BROTLI_PARAM_LGBLOCK: 3,
-  BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4,
-  BROTLI_PARAM_SIZE_HINT: 5,
-  BROTLI_PARAM_LARGE_WINDOW: 6,
-  BROTLI_PARAM_NPOSTFIX: 7,
-  BROTLI_PARAM_NDIRECT: 8,
-  BROTLI_DECODER_RESULT_ERROR: 0,
-  BROTLI_DECODER_RESULT_SUCCESS: 1,
-  BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2,
-  BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3,
-  BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0,
-  BROTLI_DECODER_PARAM_LARGE_WINDOW: 1,
-  BROTLI_DECODER_NO_ERROR: 0,
-  BROTLI_DECODER_SUCCESS: 1,
-  BROTLI_DECODER_NEEDS_MORE_INPUT: 2,
-  BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3,
-  BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1,
-  BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2,
-  BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3,
-  BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4,
-  BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5,
-  BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6,
-  BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7,
-  BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8,
-  BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9,
-  BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10,
-  BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11,
-  BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12,
-  BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13,
-  BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14,
-  BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15,
-  BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16,
-  BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19,
-  BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20,
-  BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21,
-  BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22,
-  BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25,
-  BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26,
-  BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27,
-  BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30,
-  BROTLI_DECODER_ERROR_UNREACHABLE: -31,
-}, realZlibConstants))
diff --git a/node_modules/minipass-fetch/node_modules/minizlib/dist/commonjs/constants.js b/node_modules/minizlib/dist/commonjs/constants.js
similarity index 100%
rename from node_modules/minipass-fetch/node_modules/minizlib/dist/commonjs/constants.js
rename to node_modules/minizlib/dist/commonjs/constants.js
diff --git a/node_modules/minipass-fetch/node_modules/minizlib/dist/commonjs/index.js b/node_modules/minizlib/dist/commonjs/index.js
similarity index 100%
rename from node_modules/minipass-fetch/node_modules/minizlib/dist/commonjs/index.js
rename to node_modules/minizlib/dist/commonjs/index.js
diff --git a/node_modules/minipass-fetch/node_modules/minizlib/dist/commonjs/package.json b/node_modules/minizlib/dist/commonjs/package.json
similarity index 100%
rename from node_modules/minipass-fetch/node_modules/minizlib/dist/commonjs/package.json
rename to node_modules/minizlib/dist/commonjs/package.json
diff --git a/node_modules/minipass-fetch/node_modules/minizlib/dist/esm/constants.js b/node_modules/minizlib/dist/esm/constants.js
similarity index 100%
rename from node_modules/minipass-fetch/node_modules/minizlib/dist/esm/constants.js
rename to node_modules/minizlib/dist/esm/constants.js
diff --git a/node_modules/minipass-fetch/node_modules/minizlib/dist/esm/index.js b/node_modules/minizlib/dist/esm/index.js
similarity index 100%
rename from node_modules/minipass-fetch/node_modules/minizlib/dist/esm/index.js
rename to node_modules/minizlib/dist/esm/index.js
diff --git a/node_modules/minipass-fetch/node_modules/minizlib/dist/esm/package.json b/node_modules/minizlib/dist/esm/package.json
similarity index 100%
rename from node_modules/minipass-fetch/node_modules/minizlib/dist/esm/package.json
rename to node_modules/minizlib/dist/esm/package.json
diff --git a/node_modules/minizlib/index.js b/node_modules/minizlib/index.js
deleted file mode 100644
index fbaf69e19f209..0000000000000
--- a/node_modules/minizlib/index.js
+++ /dev/null
@@ -1,348 +0,0 @@
-'use strict'
-
-const assert = require('assert')
-const Buffer = require('buffer').Buffer
-const realZlib = require('zlib')
-
-const constants = exports.constants = require('./constants.js')
-const Minipass = require('minipass')
-
-const OriginalBufferConcat = Buffer.concat
-
-const _superWrite = Symbol('_superWrite')
-class ZlibError extends Error {
-  constructor (err) {
-    super('zlib: ' + err.message)
-    this.code = err.code
-    this.errno = err.errno
-    /* istanbul ignore if */
-    if (!this.code)
-      this.code = 'ZLIB_ERROR'
-
-    this.message = 'zlib: ' + err.message
-    Error.captureStackTrace(this, this.constructor)
-  }
-
-  get name () {
-    return 'ZlibError'
-  }
-}
-
-// the Zlib class they all inherit from
-// This thing manages the queue of requests, and returns
-// true or false if there is anything in the queue when
-// you call the .write() method.
-const _opts = Symbol('opts')
-const _flushFlag = Symbol('flushFlag')
-const _finishFlushFlag = Symbol('finishFlushFlag')
-const _fullFlushFlag = Symbol('fullFlushFlag')
-const _handle = Symbol('handle')
-const _onError = Symbol('onError')
-const _sawError = Symbol('sawError')
-const _level = Symbol('level')
-const _strategy = Symbol('strategy')
-const _ended = Symbol('ended')
-const _defaultFullFlush = Symbol('_defaultFullFlush')
-
-class ZlibBase extends Minipass {
-  constructor (opts, mode) {
-    if (!opts || typeof opts !== 'object')
-      throw new TypeError('invalid options for ZlibBase constructor')
-
-    super(opts)
-    this[_sawError] = false
-    this[_ended] = false
-    this[_opts] = opts
-
-    this[_flushFlag] = opts.flush
-    this[_finishFlushFlag] = opts.finishFlush
-    // this will throw if any options are invalid for the class selected
-    try {
-      this[_handle] = new realZlib[mode](opts)
-    } catch (er) {
-      // make sure that all errors get decorated properly
-      throw new ZlibError(er)
-    }
-
-    this[_onError] = (err) => {
-      // no sense raising multiple errors, since we abort on the first one.
-      if (this[_sawError])
-        return
-
-      this[_sawError] = true
-
-      // there is no way to cleanly recover.
-      // continuing only obscures problems.
-      this.close()
-      this.emit('error', err)
-    }
-
-    this[_handle].on('error', er => this[_onError](new ZlibError(er)))
-    this.once('end', () => this.close)
-  }
-
-  close () {
-    if (this[_handle]) {
-      this[_handle].close()
-      this[_handle] = null
-      this.emit('close')
-    }
-  }
-
-  reset () {
-    if (!this[_sawError]) {
-      assert(this[_handle], 'zlib binding closed')
-      return this[_handle].reset()
-    }
-  }
-
-  flush (flushFlag) {
-    if (this.ended)
-      return
-
-    if (typeof flushFlag !== 'number')
-      flushFlag = this[_fullFlushFlag]
-    this.write(Object.assign(Buffer.alloc(0), { [_flushFlag]: flushFlag }))
-  }
-
-  end (chunk, encoding, cb) {
-    if (chunk)
-      this.write(chunk, encoding)
-    this.flush(this[_finishFlushFlag])
-    this[_ended] = true
-    return super.end(null, null, cb)
-  }
-
-  get ended () {
-    return this[_ended]
-  }
-
-  write (chunk, encoding, cb) {
-    // process the chunk using the sync process
-    // then super.write() all the outputted chunks
-    if (typeof encoding === 'function')
-      cb = encoding, encoding = 'utf8'
-
-    if (typeof chunk === 'string')
-      chunk = Buffer.from(chunk, encoding)
-
-    if (this[_sawError])
-      return
-    assert(this[_handle], 'zlib binding closed')
-
-    // _processChunk tries to .close() the native handle after it's done, so we
-    // intercept that by temporarily making it a no-op.
-    const nativeHandle = this[_handle]._handle
-    const originalNativeClose = nativeHandle.close
-    nativeHandle.close = () => {}
-    const originalClose = this[_handle].close
-    this[_handle].close = () => {}
-    // It also calls `Buffer.concat()` at the end, which may be convenient
-    // for some, but which we are not interested in as it slows us down.
-    Buffer.concat = (args) => args
-    let result
-    try {
-      const flushFlag = typeof chunk[_flushFlag] === 'number'
-        ? chunk[_flushFlag] : this[_flushFlag]
-      result = this[_handle]._processChunk(chunk, flushFlag)
-      // if we don't throw, reset it back how it was
-      Buffer.concat = OriginalBufferConcat
-    } catch (err) {
-      // or if we do, put Buffer.concat() back before we emit error
-      // Error events call into user code, which may call Buffer.concat()
-      Buffer.concat = OriginalBufferConcat
-      this[_onError](new ZlibError(err))
-    } finally {
-      if (this[_handle]) {
-        // Core zlib resets `_handle` to null after attempting to close the
-        // native handle. Our no-op handler prevented actual closure, but we
-        // need to restore the `._handle` property.
-        this[_handle]._handle = nativeHandle
-        nativeHandle.close = originalNativeClose
-        this[_handle].close = originalClose
-        // `_processChunk()` adds an 'error' listener. If we don't remove it
-        // after each call, these handlers start piling up.
-        this[_handle].removeAllListeners('error')
-        // make sure OUR error listener is still attached tho
-      }
-    }
-
-    if (this[_handle])
-      this[_handle].on('error', er => this[_onError](new ZlibError(er)))
-
-    let writeReturn
-    if (result) {
-      if (Array.isArray(result) && result.length > 0) {
-        // The first buffer is always `handle._outBuffer`, which would be
-        // re-used for later invocations; so, we always have to copy that one.
-        writeReturn = this[_superWrite](Buffer.from(result[0]))
-        for (let i = 1; i < result.length; i++) {
-          writeReturn = this[_superWrite](result[i])
-        }
-      } else {
-        writeReturn = this[_superWrite](Buffer.from(result))
-      }
-    }
-
-    if (cb)
-      cb()
-    return writeReturn
-  }
-
-  [_superWrite] (data) {
-    return super.write(data)
-  }
-}
-
-class Zlib extends ZlibBase {
-  constructor (opts, mode) {
-    opts = opts || {}
-
-    opts.flush = opts.flush || constants.Z_NO_FLUSH
-    opts.finishFlush = opts.finishFlush || constants.Z_FINISH
-    super(opts, mode)
-
-    this[_fullFlushFlag] = constants.Z_FULL_FLUSH
-    this[_level] = opts.level
-    this[_strategy] = opts.strategy
-  }
-
-  params (level, strategy) {
-    if (this[_sawError])
-      return
-
-    if (!this[_handle])
-      throw new Error('cannot switch params when binding is closed')
-
-    // no way to test this without also not supporting params at all
-    /* istanbul ignore if */
-    if (!this[_handle].params)
-      throw new Error('not supported in this implementation')
-
-    if (this[_level] !== level || this[_strategy] !== strategy) {
-      this.flush(constants.Z_SYNC_FLUSH)
-      assert(this[_handle], 'zlib binding closed')
-      // .params() calls .flush(), but the latter is always async in the
-      // core zlib. We override .flush() temporarily to intercept that and
-      // flush synchronously.
-      const origFlush = this[_handle].flush
-      this[_handle].flush = (flushFlag, cb) => {
-        this.flush(flushFlag)
-        cb()
-      }
-      try {
-        this[_handle].params(level, strategy)
-      } finally {
-        this[_handle].flush = origFlush
-      }
-      /* istanbul ignore else */
-      if (this[_handle]) {
-        this[_level] = level
-        this[_strategy] = strategy
-      }
-    }
-  }
-}
-
-// minimal 2-byte header
-class Deflate extends Zlib {
-  constructor (opts) {
-    super(opts, 'Deflate')
-  }
-}
-
-class Inflate extends Zlib {
-  constructor (opts) {
-    super(opts, 'Inflate')
-  }
-}
-
-// gzip - bigger header, same deflate compression
-const _portable = Symbol('_portable')
-class Gzip extends Zlib {
-  constructor (opts) {
-    super(opts, 'Gzip')
-    this[_portable] = opts && !!opts.portable
-  }
-
-  [_superWrite] (data) {
-    if (!this[_portable])
-      return super[_superWrite](data)
-
-    // we'll always get the header emitted in one first chunk
-    // overwrite the OS indicator byte with 0xFF
-    this[_portable] = false
-    data[9] = 255
-    return super[_superWrite](data)
-  }
-}
-
-class Gunzip extends Zlib {
-  constructor (opts) {
-    super(opts, 'Gunzip')
-  }
-}
-
-// raw - no header
-class DeflateRaw extends Zlib {
-  constructor (opts) {
-    super(opts, 'DeflateRaw')
-  }
-}
-
-class InflateRaw extends Zlib {
-  constructor (opts) {
-    super(opts, 'InflateRaw')
-  }
-}
-
-// auto-detect header.
-class Unzip extends Zlib {
-  constructor (opts) {
-    super(opts, 'Unzip')
-  }
-}
-
-class Brotli extends ZlibBase {
-  constructor (opts, mode) {
-    opts = opts || {}
-
-    opts.flush = opts.flush || constants.BROTLI_OPERATION_PROCESS
-    opts.finishFlush = opts.finishFlush || constants.BROTLI_OPERATION_FINISH
-
-    super(opts, mode)
-
-    this[_fullFlushFlag] = constants.BROTLI_OPERATION_FLUSH
-  }
-}
-
-class BrotliCompress extends Brotli {
-  constructor (opts) {
-    super(opts, 'BrotliCompress')
-  }
-}
-
-class BrotliDecompress extends Brotli {
-  constructor (opts) {
-    super(opts, 'BrotliDecompress')
-  }
-}
-
-exports.Deflate = Deflate
-exports.Inflate = Inflate
-exports.Gzip = Gzip
-exports.Gunzip = Gunzip
-exports.DeflateRaw = DeflateRaw
-exports.InflateRaw = InflateRaw
-exports.Unzip = Unzip
-/* istanbul ignore else */
-if (typeof realZlib.BrotliCompress === 'function') {
-  exports.BrotliCompress = BrotliCompress
-  exports.BrotliDecompress = BrotliDecompress
-} else {
-  exports.BrotliCompress = exports.BrotliDecompress = class {
-    constructor () {
-      throw new Error('Brotli is not supported in this version of Node.js')
-    }
-  }
-}
diff --git a/node_modules/minizlib/node_modules/minipass/LICENSE b/node_modules/minizlib/node_modules/minipass/LICENSE
deleted file mode 100644
index bf1dece2e1f12..0000000000000
--- a/node_modules/minizlib/node_modules/minipass/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) 2017-2022 npm, Inc., Isaac Z. Schlueter, and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/minizlib/node_modules/minipass/index.js b/node_modules/minizlib/node_modules/minipass/index.js
deleted file mode 100644
index e8797aab6cc27..0000000000000
--- a/node_modules/minizlib/node_modules/minipass/index.js
+++ /dev/null
@@ -1,649 +0,0 @@
-'use strict'
-const proc = typeof process === 'object' && process ? process : {
-  stdout: null,
-  stderr: null,
-}
-const EE = require('events')
-const Stream = require('stream')
-const SD = require('string_decoder').StringDecoder
-
-const EOF = Symbol('EOF')
-const MAYBE_EMIT_END = Symbol('maybeEmitEnd')
-const EMITTED_END = Symbol('emittedEnd')
-const EMITTING_END = Symbol('emittingEnd')
-const EMITTED_ERROR = Symbol('emittedError')
-const CLOSED = Symbol('closed')
-const READ = Symbol('read')
-const FLUSH = Symbol('flush')
-const FLUSHCHUNK = Symbol('flushChunk')
-const ENCODING = Symbol('encoding')
-const DECODER = Symbol('decoder')
-const FLOWING = Symbol('flowing')
-const PAUSED = Symbol('paused')
-const RESUME = Symbol('resume')
-const BUFFERLENGTH = Symbol('bufferLength')
-const BUFFERPUSH = Symbol('bufferPush')
-const BUFFERSHIFT = Symbol('bufferShift')
-const OBJECTMODE = Symbol('objectMode')
-const DESTROYED = Symbol('destroyed')
-const EMITDATA = Symbol('emitData')
-const EMITEND = Symbol('emitEnd')
-const EMITEND2 = Symbol('emitEnd2')
-const ASYNC = Symbol('async')
-
-const defer = fn => Promise.resolve().then(fn)
-
-// TODO remove when Node v8 support drops
-const doIter = global._MP_NO_ITERATOR_SYMBOLS_  !== '1'
-const ASYNCITERATOR = doIter && Symbol.asyncIterator
-  || Symbol('asyncIterator not implemented')
-const ITERATOR = doIter && Symbol.iterator
-  || Symbol('iterator not implemented')
-
-// events that mean 'the stream is over'
-// these are treated specially, and re-emitted
-// if they are listened for after emitting.
-const isEndish = ev =>
-  ev === 'end' ||
-  ev === 'finish' ||
-  ev === 'prefinish'
-
-const isArrayBuffer = b => b instanceof ArrayBuffer ||
-  typeof b === 'object' &&
-  b.constructor &&
-  b.constructor.name === 'ArrayBuffer' &&
-  b.byteLength >= 0
-
-const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b)
-
-class Pipe {
-  constructor (src, dest, opts) {
-    this.src = src
-    this.dest = dest
-    this.opts = opts
-    this.ondrain = () => src[RESUME]()
-    dest.on('drain', this.ondrain)
-  }
-  unpipe () {
-    this.dest.removeListener('drain', this.ondrain)
-  }
-  // istanbul ignore next - only here for the prototype
-  proxyErrors () {}
-  end () {
-    this.unpipe()
-    if (this.opts.end)
-      this.dest.end()
-  }
-}
-
-class PipeProxyErrors extends Pipe {
-  unpipe () {
-    this.src.removeListener('error', this.proxyErrors)
-    super.unpipe()
-  }
-  constructor (src, dest, opts) {
-    super(src, dest, opts)
-    this.proxyErrors = er => dest.emit('error', er)
-    src.on('error', this.proxyErrors)
-  }
-}
-
-module.exports = class Minipass extends Stream {
-  constructor (options) {
-    super()
-    this[FLOWING] = false
-    // whether we're explicitly paused
-    this[PAUSED] = false
-    this.pipes = []
-    this.buffer = []
-    this[OBJECTMODE] = options && options.objectMode || false
-    if (this[OBJECTMODE])
-      this[ENCODING] = null
-    else
-      this[ENCODING] = options && options.encoding || null
-    if (this[ENCODING] === 'buffer')
-      this[ENCODING] = null
-    this[ASYNC] = options && !!options.async || false
-    this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null
-    this[EOF] = false
-    this[EMITTED_END] = false
-    this[EMITTING_END] = false
-    this[CLOSED] = false
-    this[EMITTED_ERROR] = null
-    this.writable = true
-    this.readable = true
-    this[BUFFERLENGTH] = 0
-    this[DESTROYED] = false
-  }
-
-  get bufferLength () { return this[BUFFERLENGTH] }
-
-  get encoding () { return this[ENCODING] }
-  set encoding (enc) {
-    if (this[OBJECTMODE])
-      throw new Error('cannot set encoding in objectMode')
-
-    if (this[ENCODING] && enc !== this[ENCODING] &&
-        (this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH]))
-      throw new Error('cannot change encoding')
-
-    if (this[ENCODING] !== enc) {
-      this[DECODER] = enc ? new SD(enc) : null
-      if (this.buffer.length)
-        this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk))
-    }
-
-    this[ENCODING] = enc
-  }
-
-  setEncoding (enc) {
-    this.encoding = enc
-  }
-
-  get objectMode () { return this[OBJECTMODE] }
-  set objectMode (om) { this[OBJECTMODE] = this[OBJECTMODE] || !!om }
-
-  get ['async'] () { return this[ASYNC] }
-  set ['async'] (a) { this[ASYNC] = this[ASYNC] || !!a }
-
-  write (chunk, encoding, cb) {
-    if (this[EOF])
-      throw new Error('write after end')
-
-    if (this[DESTROYED]) {
-      this.emit('error', Object.assign(
-        new Error('Cannot call write after a stream was destroyed'),
-        { code: 'ERR_STREAM_DESTROYED' }
-      ))
-      return true
-    }
-
-    if (typeof encoding === 'function')
-      cb = encoding, encoding = 'utf8'
-
-    if (!encoding)
-      encoding = 'utf8'
-
-    const fn = this[ASYNC] ? defer : f => f()
-
-    // convert array buffers and typed array views into buffers
-    // at some point in the future, we may want to do the opposite!
-    // leave strings and buffers as-is
-    // anything else switches us into object mode
-    if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
-      if (isArrayBufferView(chunk))
-        chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
-      else if (isArrayBuffer(chunk))
-        chunk = Buffer.from(chunk)
-      else if (typeof chunk !== 'string')
-        // use the setter so we throw if we have encoding set
-        this.objectMode = true
-    }
-
-    // handle object mode up front, since it's simpler
-    // this yields better performance, fewer checks later.
-    if (this[OBJECTMODE]) {
-      /* istanbul ignore if - maybe impossible? */
-      if (this.flowing && this[BUFFERLENGTH] !== 0)
-        this[FLUSH](true)
-
-      if (this.flowing)
-        this.emit('data', chunk)
-      else
-        this[BUFFERPUSH](chunk)
-
-      if (this[BUFFERLENGTH] !== 0)
-        this.emit('readable')
-
-      if (cb)
-        fn(cb)
-
-      return this.flowing
-    }
-
-    // at this point the chunk is a buffer or string
-    // don't buffer it up or send it to the decoder
-    if (!chunk.length) {
-      if (this[BUFFERLENGTH] !== 0)
-        this.emit('readable')
-      if (cb)
-        fn(cb)
-      return this.flowing
-    }
-
-    // fast-path writing strings of same encoding to a stream with
-    // an empty buffer, skipping the buffer/decoder dance
-    if (typeof chunk === 'string' &&
-        // unless it is a string already ready for us to use
-        !(encoding === this[ENCODING] && !this[DECODER].lastNeed)) {
-      chunk = Buffer.from(chunk, encoding)
-    }
-
-    if (Buffer.isBuffer(chunk) && this[ENCODING])
-      chunk = this[DECODER].write(chunk)
-
-    // Note: flushing CAN potentially switch us into not-flowing mode
-    if (this.flowing && this[BUFFERLENGTH] !== 0)
-      this[FLUSH](true)
-
-    if (this.flowing)
-      this.emit('data', chunk)
-    else
-      this[BUFFERPUSH](chunk)
-
-    if (this[BUFFERLENGTH] !== 0)
-      this.emit('readable')
-
-    if (cb)
-      fn(cb)
-
-    return this.flowing
-  }
-
-  read (n) {
-    if (this[DESTROYED])
-      return null
-
-    if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) {
-      this[MAYBE_EMIT_END]()
-      return null
-    }
-
-    if (this[OBJECTMODE])
-      n = null
-
-    if (this.buffer.length > 1 && !this[OBJECTMODE]) {
-      if (this.encoding)
-        this.buffer = [this.buffer.join('')]
-      else
-        this.buffer = [Buffer.concat(this.buffer, this[BUFFERLENGTH])]
-    }
-
-    const ret = this[READ](n || null, this.buffer[0])
-    this[MAYBE_EMIT_END]()
-    return ret
-  }
-
-  [READ] (n, chunk) {
-    if (n === chunk.length || n === null)
-      this[BUFFERSHIFT]()
-    else {
-      this.buffer[0] = chunk.slice(n)
-      chunk = chunk.slice(0, n)
-      this[BUFFERLENGTH] -= n
-    }
-
-    this.emit('data', chunk)
-
-    if (!this.buffer.length && !this[EOF])
-      this.emit('drain')
-
-    return chunk
-  }
-
-  end (chunk, encoding, cb) {
-    if (typeof chunk === 'function')
-      cb = chunk, chunk = null
-    if (typeof encoding === 'function')
-      cb = encoding, encoding = 'utf8'
-    if (chunk)
-      this.write(chunk, encoding)
-    if (cb)
-      this.once('end', cb)
-    this[EOF] = true
-    this.writable = false
-
-    // if we haven't written anything, then go ahead and emit,
-    // even if we're not reading.
-    // we'll re-emit if a new 'end' listener is added anyway.
-    // This makes MP more suitable to write-only use cases.
-    if (this.flowing || !this[PAUSED])
-      this[MAYBE_EMIT_END]()
-    return this
-  }
-
-  // don't let the internal resume be overwritten
-  [RESUME] () {
-    if (this[DESTROYED])
-      return
-
-    this[PAUSED] = false
-    this[FLOWING] = true
-    this.emit('resume')
-    if (this.buffer.length)
-      this[FLUSH]()
-    else if (this[EOF])
-      this[MAYBE_EMIT_END]()
-    else
-      this.emit('drain')
-  }
-
-  resume () {
-    return this[RESUME]()
-  }
-
-  pause () {
-    this[FLOWING] = false
-    this[PAUSED] = true
-  }
-
-  get destroyed () {
-    return this[DESTROYED]
-  }
-
-  get flowing () {
-    return this[FLOWING]
-  }
-
-  get paused () {
-    return this[PAUSED]
-  }
-
-  [BUFFERPUSH] (chunk) {
-    if (this[OBJECTMODE])
-      this[BUFFERLENGTH] += 1
-    else
-      this[BUFFERLENGTH] += chunk.length
-    this.buffer.push(chunk)
-  }
-
-  [BUFFERSHIFT] () {
-    if (this.buffer.length) {
-      if (this[OBJECTMODE])
-        this[BUFFERLENGTH] -= 1
-      else
-        this[BUFFERLENGTH] -= this.buffer[0].length
-    }
-    return this.buffer.shift()
-  }
-
-  [FLUSH] (noDrain) {
-    do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()))
-
-    if (!noDrain && !this.buffer.length && !this[EOF])
-      this.emit('drain')
-  }
-
-  [FLUSHCHUNK] (chunk) {
-    return chunk ? (this.emit('data', chunk), this.flowing) : false
-  }
-
-  pipe (dest, opts) {
-    if (this[DESTROYED])
-      return
-
-    const ended = this[EMITTED_END]
-    opts = opts || {}
-    if (dest === proc.stdout || dest === proc.stderr)
-      opts.end = false
-    else
-      opts.end = opts.end !== false
-    opts.proxyErrors = !!opts.proxyErrors
-
-    // piping an ended stream ends immediately
-    if (ended) {
-      if (opts.end)
-        dest.end()
-    } else {
-      this.pipes.push(!opts.proxyErrors ? new Pipe(this, dest, opts)
-        : new PipeProxyErrors(this, dest, opts))
-      if (this[ASYNC])
-        defer(() => this[RESUME]())
-      else
-        this[RESUME]()
-    }
-
-    return dest
-  }
-
-  unpipe (dest) {
-    const p = this.pipes.find(p => p.dest === dest)
-    if (p) {
-      this.pipes.splice(this.pipes.indexOf(p), 1)
-      p.unpipe()
-    }
-  }
-
-  addListener (ev, fn) {
-    return this.on(ev, fn)
-  }
-
-  on (ev, fn) {
-    const ret = super.on(ev, fn)
-    if (ev === 'data' && !this.pipes.length && !this.flowing)
-      this[RESUME]()
-    else if (ev === 'readable' && this[BUFFERLENGTH] !== 0)
-      super.emit('readable')
-    else if (isEndish(ev) && this[EMITTED_END]) {
-      super.emit(ev)
-      this.removeAllListeners(ev)
-    } else if (ev === 'error' && this[EMITTED_ERROR]) {
-      if (this[ASYNC])
-        defer(() => fn.call(this, this[EMITTED_ERROR]))
-      else
-        fn.call(this, this[EMITTED_ERROR])
-    }
-    return ret
-  }
-
-  get emittedEnd () {
-    return this[EMITTED_END]
-  }
-
-  [MAYBE_EMIT_END] () {
-    if (!this[EMITTING_END] &&
-        !this[EMITTED_END] &&
-        !this[DESTROYED] &&
-        this.buffer.length === 0 &&
-        this[EOF]) {
-      this[EMITTING_END] = true
-      this.emit('end')
-      this.emit('prefinish')
-      this.emit('finish')
-      if (this[CLOSED])
-        this.emit('close')
-      this[EMITTING_END] = false
-    }
-  }
-
-  emit (ev, data, ...extra) {
-    // error and close are only events allowed after calling destroy()
-    if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED])
-      return
-    else if (ev === 'data') {
-      return !data ? false
-        : this[ASYNC] ? defer(() => this[EMITDATA](data))
-        : this[EMITDATA](data)
-    } else if (ev === 'end') {
-      return this[EMITEND]()
-    } else if (ev === 'close') {
-      this[CLOSED] = true
-      // don't emit close before 'end' and 'finish'
-      if (!this[EMITTED_END] && !this[DESTROYED])
-        return
-      const ret = super.emit('close')
-      this.removeAllListeners('close')
-      return ret
-    } else if (ev === 'error') {
-      this[EMITTED_ERROR] = data
-      const ret = super.emit('error', data)
-      this[MAYBE_EMIT_END]()
-      return ret
-    } else if (ev === 'resume') {
-      const ret = super.emit('resume')
-      this[MAYBE_EMIT_END]()
-      return ret
-    } else if (ev === 'finish' || ev === 'prefinish') {
-      const ret = super.emit(ev)
-      this.removeAllListeners(ev)
-      return ret
-    }
-
-    // Some other unknown event
-    const ret = super.emit(ev, data, ...extra)
-    this[MAYBE_EMIT_END]()
-    return ret
-  }
-
-  [EMITDATA] (data) {
-    for (const p of this.pipes) {
-      if (p.dest.write(data) === false)
-        this.pause()
-    }
-    const ret = super.emit('data', data)
-    this[MAYBE_EMIT_END]()
-    return ret
-  }
-
-  [EMITEND] () {
-    if (this[EMITTED_END])
-      return
-
-    this[EMITTED_END] = true
-    this.readable = false
-    if (this[ASYNC])
-      defer(() => this[EMITEND2]())
-    else
-      this[EMITEND2]()
-  }
-
-  [EMITEND2] () {
-    if (this[DECODER]) {
-      const data = this[DECODER].end()
-      if (data) {
-        for (const p of this.pipes) {
-          p.dest.write(data)
-        }
-        super.emit('data', data)
-      }
-    }
-
-    for (const p of this.pipes) {
-      p.end()
-    }
-    const ret = super.emit('end')
-    this.removeAllListeners('end')
-    return ret
-  }
-
-  // const all = await stream.collect()
-  collect () {
-    const buf = []
-    if (!this[OBJECTMODE])
-      buf.dataLength = 0
-    // set the promise first, in case an error is raised
-    // by triggering the flow here.
-    const p = this.promise()
-    this.on('data', c => {
-      buf.push(c)
-      if (!this[OBJECTMODE])
-        buf.dataLength += c.length
-    })
-    return p.then(() => buf)
-  }
-
-  // const data = await stream.concat()
-  concat () {
-    return this[OBJECTMODE]
-      ? Promise.reject(new Error('cannot concat in objectMode'))
-      : this.collect().then(buf =>
-          this[OBJECTMODE]
-            ? Promise.reject(new Error('cannot concat in objectMode'))
-            : this[ENCODING] ? buf.join('') : Buffer.concat(buf, buf.dataLength))
-  }
-
-  // stream.promise().then(() => done, er => emitted error)
-  promise () {
-    return new Promise((resolve, reject) => {
-      this.on(DESTROYED, () => reject(new Error('stream destroyed')))
-      this.on('error', er => reject(er))
-      this.on('end', () => resolve())
-    })
-  }
-
-  // for await (let chunk of stream)
-  [ASYNCITERATOR] () {
-    const next = () => {
-      const res = this.read()
-      if (res !== null)
-        return Promise.resolve({ done: false, value: res })
-
-      if (this[EOF])
-        return Promise.resolve({ done: true })
-
-      let resolve = null
-      let reject = null
-      const onerr = er => {
-        this.removeListener('data', ondata)
-        this.removeListener('end', onend)
-        reject(er)
-      }
-      const ondata = value => {
-        this.removeListener('error', onerr)
-        this.removeListener('end', onend)
-        this.pause()
-        resolve({ value: value, done: !!this[EOF] })
-      }
-      const onend = () => {
-        this.removeListener('error', onerr)
-        this.removeListener('data', ondata)
-        resolve({ done: true })
-      }
-      const ondestroy = () => onerr(new Error('stream destroyed'))
-      return new Promise((res, rej) => {
-        reject = rej
-        resolve = res
-        this.once(DESTROYED, ondestroy)
-        this.once('error', onerr)
-        this.once('end', onend)
-        this.once('data', ondata)
-      })
-    }
-
-    return { next }
-  }
-
-  // for (let chunk of stream)
-  [ITERATOR] () {
-    const next = () => {
-      const value = this.read()
-      const done = value === null
-      return { value, done }
-    }
-    return { next }
-  }
-
-  destroy (er) {
-    if (this[DESTROYED]) {
-      if (er)
-        this.emit('error', er)
-      else
-        this.emit(DESTROYED)
-      return this
-    }
-
-    this[DESTROYED] = true
-
-    // throw away all buffered data, it's never coming out
-    this.buffer.length = 0
-    this[BUFFERLENGTH] = 0
-
-    if (typeof this.close === 'function' && !this[CLOSED])
-      this.close()
-
-    if (er)
-      this.emit('error', er)
-    else // if no error to emit, still reject pending promises
-      this.emit(DESTROYED)
-
-    return this
-  }
-
-  static isStream (s) {
-    return !!s && (s instanceof Minipass || s instanceof Stream ||
-      s instanceof EE && (
-        typeof s.pipe === 'function' || // readable
-        (typeof s.write === 'function' && typeof s.end === 'function') // writable
-      ))
-  }
-}
diff --git a/node_modules/minizlib/node_modules/minipass/package.json b/node_modules/minizlib/node_modules/minipass/package.json
deleted file mode 100644
index 548d03fa6d5d4..0000000000000
--- a/node_modules/minizlib/node_modules/minipass/package.json
+++ /dev/null
@@ -1,56 +0,0 @@
-{
-  "name": "minipass",
-  "version": "3.3.6",
-  "description": "minimal implementation of a PassThrough stream",
-  "main": "index.js",
-  "types": "index.d.ts",
-  "dependencies": {
-    "yallist": "^4.0.0"
-  },
-  "devDependencies": {
-    "@types/node": "^17.0.41",
-    "end-of-stream": "^1.4.0",
-    "prettier": "^2.6.2",
-    "tap": "^16.2.0",
-    "through2": "^2.0.3",
-    "ts-node": "^10.8.1",
-    "typescript": "^4.7.3"
-  },
-  "scripts": {
-    "test": "tap",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "postpublish": "git push origin --follow-tags"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/isaacs/minipass.git"
-  },
-  "keywords": [
-    "passthrough",
-    "stream"
-  ],
-  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
-  "license": "ISC",
-  "files": [
-    "index.d.ts",
-    "index.js"
-  ],
-  "tap": {
-    "check-coverage": true
-  },
-  "engines": {
-    "node": ">=8"
-  },
-  "prettier": {
-    "semi": false,
-    "printWidth": 80,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  }
-}
diff --git a/node_modules/minizlib/package.json b/node_modules/minizlib/package.json
index 98825a549f3fd..43cb855e15a5d 100644
--- a/node_modules/minizlib/package.json
+++ b/node_modules/minizlib/package.json
@@ -1,17 +1,20 @@
 {
   "name": "minizlib",
-  "version": "2.1.2",
+  "version": "3.0.2",
   "description": "A small fast zlib stream built on [minipass](http://npm.im/minipass) and Node.js's zlib binding.",
-  "main": "index.js",
+  "main": "./dist/commonjs/index.js",
   "dependencies": {
-    "minipass": "^3.0.0",
-    "yallist": "^4.0.0"
+    "minipass": "^7.1.2"
   },
   "scripts": {
-    "test": "tap test/*.js --100 -J",
+    "prepare": "tshy",
+    "pretest": "npm run prepare",
+    "test": "tap",
     "preversion": "npm test",
     "postversion": "npm publish",
-    "postpublish": "git push origin --all; git push origin --tags"
+    "prepublishOnly": "git push origin --follow-tags",
+    "format": "prettier --write . --loglevel warn",
+    "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
   },
   "repository": {
     "type": "git",
@@ -30,13 +33,48 @@
   "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
   "license": "MIT",
   "devDependencies": {
-    "tap": "^14.6.9"
+    "@types/node": "^22.13.14",
+    "tap": "^21.1.0",
+    "tshy": "^3.0.2",
+    "typedoc": "^0.28.1"
   },
   "files": [
-    "index.js",
-    "constants.js"
+    "dist"
   ],
   "engines": {
-    "node": ">= 8"
-  }
+    "node": ">= 18"
+  },
+  "tshy": {
+    "exports": {
+      "./package.json": "./package.json",
+      ".": "./src/index.ts"
+    }
+  },
+  "exports": {
+    "./package.json": "./package.json",
+    ".": {
+      "import": {
+        "types": "./dist/esm/index.d.ts",
+        "default": "./dist/esm/index.js"
+      },
+      "require": {
+        "types": "./dist/commonjs/index.d.ts",
+        "default": "./dist/commonjs/index.js"
+      }
+    }
+  },
+  "types": "./dist/commonjs/index.d.ts",
+  "type": "module",
+  "prettier": {
+    "semi": false,
+    "printWidth": 75,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "module": "./dist/esm/index.js"
 }
diff --git a/node_modules/node-gyp/node_modules/minizlib/LICENSE b/node_modules/node-gyp/node_modules/minizlib/LICENSE
deleted file mode 100644
index 49f7efe431c9e..0000000000000
--- a/node_modules/node-gyp/node_modules/minizlib/LICENSE
+++ /dev/null
@@ -1,26 +0,0 @@
-Minizlib was created by Isaac Z. Schlueter.
-It is a derivative work of the Node.js project.
-
-"""
-Copyright (c) 2017-2023 Isaac Z. Schlueter and Contributors
-Copyright (c) 2017-2023 Node.js contributors. All rights reserved.
-Copyright (c) 2017-2023 Joyent, Inc. and other Node contributors. All rights reserved.
-
-Permission is hereby granted, free of charge, to any person obtaining a
-copy of this software and associated documentation files (the "Software"),
-to deal in the Software without restriction, including without limitation
-the rights to use, copy, modify, merge, publish, distribute, sublicense,
-and/or sell copies of the Software, and to permit persons to whom the
-Software is furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-"""
diff --git a/node_modules/node-gyp/node_modules/minizlib/dist/commonjs/constants.js b/node_modules/node-gyp/node_modules/minizlib/dist/commonjs/constants.js
deleted file mode 100644
index dfc2c1957bfc9..0000000000000
--- a/node_modules/node-gyp/node_modules/minizlib/dist/commonjs/constants.js
+++ /dev/null
@@ -1,123 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.constants = void 0;
-// Update with any zlib constants that are added or changed in the future.
-// Node v6 didn't export this, so we just hard code the version and rely
-// on all the other hard-coded values from zlib v4736.  When node v6
-// support drops, we can just export the realZlibConstants object.
-const zlib_1 = __importDefault(require("zlib"));
-/* c8 ignore start */
-const realZlibConstants = zlib_1.default.constants || { ZLIB_VERNUM: 4736 };
-/* c8 ignore stop */
-exports.constants = Object.freeze(Object.assign(Object.create(null), {
-    Z_NO_FLUSH: 0,
-    Z_PARTIAL_FLUSH: 1,
-    Z_SYNC_FLUSH: 2,
-    Z_FULL_FLUSH: 3,
-    Z_FINISH: 4,
-    Z_BLOCK: 5,
-    Z_OK: 0,
-    Z_STREAM_END: 1,
-    Z_NEED_DICT: 2,
-    Z_ERRNO: -1,
-    Z_STREAM_ERROR: -2,
-    Z_DATA_ERROR: -3,
-    Z_MEM_ERROR: -4,
-    Z_BUF_ERROR: -5,
-    Z_VERSION_ERROR: -6,
-    Z_NO_COMPRESSION: 0,
-    Z_BEST_SPEED: 1,
-    Z_BEST_COMPRESSION: 9,
-    Z_DEFAULT_COMPRESSION: -1,
-    Z_FILTERED: 1,
-    Z_HUFFMAN_ONLY: 2,
-    Z_RLE: 3,
-    Z_FIXED: 4,
-    Z_DEFAULT_STRATEGY: 0,
-    DEFLATE: 1,
-    INFLATE: 2,
-    GZIP: 3,
-    GUNZIP: 4,
-    DEFLATERAW: 5,
-    INFLATERAW: 6,
-    UNZIP: 7,
-    BROTLI_DECODE: 8,
-    BROTLI_ENCODE: 9,
-    Z_MIN_WINDOWBITS: 8,
-    Z_MAX_WINDOWBITS: 15,
-    Z_DEFAULT_WINDOWBITS: 15,
-    Z_MIN_CHUNK: 64,
-    Z_MAX_CHUNK: Infinity,
-    Z_DEFAULT_CHUNK: 16384,
-    Z_MIN_MEMLEVEL: 1,
-    Z_MAX_MEMLEVEL: 9,
-    Z_DEFAULT_MEMLEVEL: 8,
-    Z_MIN_LEVEL: -1,
-    Z_MAX_LEVEL: 9,
-    Z_DEFAULT_LEVEL: -1,
-    BROTLI_OPERATION_PROCESS: 0,
-    BROTLI_OPERATION_FLUSH: 1,
-    BROTLI_OPERATION_FINISH: 2,
-    BROTLI_OPERATION_EMIT_METADATA: 3,
-    BROTLI_MODE_GENERIC: 0,
-    BROTLI_MODE_TEXT: 1,
-    BROTLI_MODE_FONT: 2,
-    BROTLI_DEFAULT_MODE: 0,
-    BROTLI_MIN_QUALITY: 0,
-    BROTLI_MAX_QUALITY: 11,
-    BROTLI_DEFAULT_QUALITY: 11,
-    BROTLI_MIN_WINDOW_BITS: 10,
-    BROTLI_MAX_WINDOW_BITS: 24,
-    BROTLI_LARGE_MAX_WINDOW_BITS: 30,
-    BROTLI_DEFAULT_WINDOW: 22,
-    BROTLI_MIN_INPUT_BLOCK_BITS: 16,
-    BROTLI_MAX_INPUT_BLOCK_BITS: 24,
-    BROTLI_PARAM_MODE: 0,
-    BROTLI_PARAM_QUALITY: 1,
-    BROTLI_PARAM_LGWIN: 2,
-    BROTLI_PARAM_LGBLOCK: 3,
-    BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4,
-    BROTLI_PARAM_SIZE_HINT: 5,
-    BROTLI_PARAM_LARGE_WINDOW: 6,
-    BROTLI_PARAM_NPOSTFIX: 7,
-    BROTLI_PARAM_NDIRECT: 8,
-    BROTLI_DECODER_RESULT_ERROR: 0,
-    BROTLI_DECODER_RESULT_SUCCESS: 1,
-    BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2,
-    BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3,
-    BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0,
-    BROTLI_DECODER_PARAM_LARGE_WINDOW: 1,
-    BROTLI_DECODER_NO_ERROR: 0,
-    BROTLI_DECODER_SUCCESS: 1,
-    BROTLI_DECODER_NEEDS_MORE_INPUT: 2,
-    BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3,
-    BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1,
-    BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2,
-    BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3,
-    BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4,
-    BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5,
-    BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6,
-    BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7,
-    BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8,
-    BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9,
-    BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10,
-    BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11,
-    BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12,
-    BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13,
-    BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14,
-    BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15,
-    BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16,
-    BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19,
-    BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20,
-    BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21,
-    BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22,
-    BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25,
-    BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26,
-    BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27,
-    BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30,
-    BROTLI_DECODER_ERROR_UNREACHABLE: -31,
-}, realZlibConstants));
-//# sourceMappingURL=constants.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/minizlib/dist/commonjs/index.js b/node_modules/node-gyp/node_modules/minizlib/dist/commonjs/index.js
deleted file mode 100644
index b4906d2783372..0000000000000
--- a/node_modules/node-gyp/node_modules/minizlib/dist/commonjs/index.js
+++ /dev/null
@@ -1,392 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || (function () {
-    var ownKeys = function(o) {
-        ownKeys = Object.getOwnPropertyNames || function (o) {
-            var ar = [];
-            for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
-            return ar;
-        };
-        return ownKeys(o);
-    };
-    return function (mod) {
-        if (mod && mod.__esModule) return mod;
-        var result = {};
-        if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
-        __setModuleDefault(result, mod);
-        return result;
-    };
-})();
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.BrotliDecompress = exports.BrotliCompress = exports.Brotli = exports.Unzip = exports.InflateRaw = exports.DeflateRaw = exports.Gunzip = exports.Gzip = exports.Inflate = exports.Deflate = exports.Zlib = exports.ZlibError = exports.constants = void 0;
-const assert_1 = __importDefault(require("assert"));
-const buffer_1 = require("buffer");
-const minipass_1 = require("minipass");
-const realZlib = __importStar(require("zlib"));
-const constants_js_1 = require("./constants.js");
-var constants_js_2 = require("./constants.js");
-Object.defineProperty(exports, "constants", { enumerable: true, get: function () { return constants_js_2.constants; } });
-const OriginalBufferConcat = buffer_1.Buffer.concat;
-const desc = Object.getOwnPropertyDescriptor(buffer_1.Buffer, 'concat');
-const noop = (args) => args;
-const passthroughBufferConcat = desc?.writable === true || desc?.set !== undefined
-    ? (makeNoOp) => {
-        buffer_1.Buffer.concat = makeNoOp ? noop : OriginalBufferConcat;
-    }
-    : (_) => { };
-const _superWrite = Symbol('_superWrite');
-class ZlibError extends Error {
-    code;
-    errno;
-    constructor(err) {
-        super('zlib: ' + err.message);
-        this.code = err.code;
-        this.errno = err.errno;
-        /* c8 ignore next */
-        if (!this.code)
-            this.code = 'ZLIB_ERROR';
-        this.message = 'zlib: ' + err.message;
-        Error.captureStackTrace(this, this.constructor);
-    }
-    get name() {
-        return 'ZlibError';
-    }
-}
-exports.ZlibError = ZlibError;
-// the Zlib class they all inherit from
-// This thing manages the queue of requests, and returns
-// true or false if there is anything in the queue when
-// you call the .write() method.
-const _flushFlag = Symbol('flushFlag');
-class ZlibBase extends minipass_1.Minipass {
-    #sawError = false;
-    #ended = false;
-    #flushFlag;
-    #finishFlushFlag;
-    #fullFlushFlag;
-    #handle;
-    #onError;
-    get sawError() {
-        return this.#sawError;
-    }
-    get handle() {
-        return this.#handle;
-    }
-    /* c8 ignore start */
-    get flushFlag() {
-        return this.#flushFlag;
-    }
-    /* c8 ignore stop */
-    constructor(opts, mode) {
-        if (!opts || typeof opts !== 'object')
-            throw new TypeError('invalid options for ZlibBase constructor');
-        //@ts-ignore
-        super(opts);
-        /* c8 ignore start */
-        this.#flushFlag = opts.flush ?? 0;
-        this.#finishFlushFlag = opts.finishFlush ?? 0;
-        this.#fullFlushFlag = opts.fullFlushFlag ?? 0;
-        /* c8 ignore stop */
-        // this will throw if any options are invalid for the class selected
-        try {
-            // @types/node doesn't know that it exports the classes, but they're there
-            //@ts-ignore
-            this.#handle = new realZlib[mode](opts);
-        }
-        catch (er) {
-            // make sure that all errors get decorated properly
-            throw new ZlibError(er);
-        }
-        this.#onError = err => {
-            // no sense raising multiple errors, since we abort on the first one.
-            if (this.#sawError)
-                return;
-            this.#sawError = true;
-            // there is no way to cleanly recover.
-            // continuing only obscures problems.
-            this.close();
-            this.emit('error', err);
-        };
-        this.#handle?.on('error', er => this.#onError(new ZlibError(er)));
-        this.once('end', () => this.close);
-    }
-    close() {
-        if (this.#handle) {
-            this.#handle.close();
-            this.#handle = undefined;
-            this.emit('close');
-        }
-    }
-    reset() {
-        if (!this.#sawError) {
-            (0, assert_1.default)(this.#handle, 'zlib binding closed');
-            //@ts-ignore
-            return this.#handle.reset?.();
-        }
-    }
-    flush(flushFlag) {
-        if (this.ended)
-            return;
-        if (typeof flushFlag !== 'number')
-            flushFlag = this.#fullFlushFlag;
-        this.write(Object.assign(buffer_1.Buffer.alloc(0), { [_flushFlag]: flushFlag }));
-    }
-    end(chunk, encoding, cb) {
-        /* c8 ignore start */
-        if (typeof chunk === 'function') {
-            cb = chunk;
-            encoding = undefined;
-            chunk = undefined;
-        }
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        /* c8 ignore stop */
-        if (chunk) {
-            if (encoding)
-                this.write(chunk, encoding);
-            else
-                this.write(chunk);
-        }
-        this.flush(this.#finishFlushFlag);
-        this.#ended = true;
-        return super.end(cb);
-    }
-    get ended() {
-        return this.#ended;
-    }
-    // overridden in the gzip classes to do portable writes
-    [_superWrite](data) {
-        return super.write(data);
-    }
-    write(chunk, encoding, cb) {
-        // process the chunk using the sync process
-        // then super.write() all the outputted chunks
-        if (typeof encoding === 'function')
-            (cb = encoding), (encoding = 'utf8');
-        if (typeof chunk === 'string')
-            chunk = buffer_1.Buffer.from(chunk, encoding);
-        if (this.#sawError)
-            return;
-        (0, assert_1.default)(this.#handle, 'zlib binding closed');
-        // _processChunk tries to .close() the native handle after it's done, so we
-        // intercept that by temporarily making it a no-op.
-        // diving into the node:zlib internals a bit here
-        const nativeHandle = this.#handle
-            ._handle;
-        const originalNativeClose = nativeHandle.close;
-        nativeHandle.close = () => { };
-        const originalClose = this.#handle.close;
-        this.#handle.close = () => { };
-        // It also calls `Buffer.concat()` at the end, which may be convenient
-        // for some, but which we are not interested in as it slows us down.
-        passthroughBufferConcat(true);
-        let result = undefined;
-        try {
-            const flushFlag = typeof chunk[_flushFlag] === 'number'
-                ? chunk[_flushFlag]
-                : this.#flushFlag;
-            result = this.#handle._processChunk(chunk, flushFlag);
-            // if we don't throw, reset it back how it was
-            passthroughBufferConcat(false);
-        }
-        catch (err) {
-            // or if we do, put Buffer.concat() back before we emit error
-            // Error events call into user code, which may call Buffer.concat()
-            passthroughBufferConcat(false);
-            this.#onError(new ZlibError(err));
-        }
-        finally {
-            if (this.#handle) {
-                // Core zlib resets `_handle` to null after attempting to close the
-                // native handle. Our no-op handler prevented actual closure, but we
-                // need to restore the `._handle` property.
-                ;
-                this.#handle._handle =
-                    nativeHandle;
-                nativeHandle.close = originalNativeClose;
-                this.#handle.close = originalClose;
-                // `_processChunk()` adds an 'error' listener. If we don't remove it
-                // after each call, these handlers start piling up.
-                this.#handle.removeAllListeners('error');
-                // make sure OUR error listener is still attached tho
-            }
-        }
-        if (this.#handle)
-            this.#handle.on('error', er => this.#onError(new ZlibError(er)));
-        let writeReturn;
-        if (result) {
-            if (Array.isArray(result) && result.length > 0) {
-                const r = result[0];
-                // The first buffer is always `handle._outBuffer`, which would be
-                // re-used for later invocations; so, we always have to copy that one.
-                writeReturn = this[_superWrite](buffer_1.Buffer.from(r));
-                for (let i = 1; i < result.length; i++) {
-                    writeReturn = this[_superWrite](result[i]);
-                }
-            }
-            else {
-                // either a single Buffer or an empty array
-                writeReturn = this[_superWrite](buffer_1.Buffer.from(result));
-            }
-        }
-        if (cb)
-            cb();
-        return writeReturn;
-    }
-}
-class Zlib extends ZlibBase {
-    #level;
-    #strategy;
-    constructor(opts, mode) {
-        opts = opts || {};
-        opts.flush = opts.flush || constants_js_1.constants.Z_NO_FLUSH;
-        opts.finishFlush = opts.finishFlush || constants_js_1.constants.Z_FINISH;
-        opts.fullFlushFlag = constants_js_1.constants.Z_FULL_FLUSH;
-        super(opts, mode);
-        this.#level = opts.level;
-        this.#strategy = opts.strategy;
-    }
-    params(level, strategy) {
-        if (this.sawError)
-            return;
-        if (!this.handle)
-            throw new Error('cannot switch params when binding is closed');
-        // no way to test this without also not supporting params at all
-        /* c8 ignore start */
-        if (!this.handle.params)
-            throw new Error('not supported in this implementation');
-        /* c8 ignore stop */
-        if (this.#level !== level || this.#strategy !== strategy) {
-            this.flush(constants_js_1.constants.Z_SYNC_FLUSH);
-            (0, assert_1.default)(this.handle, 'zlib binding closed');
-            // .params() calls .flush(), but the latter is always async in the
-            // core zlib. We override .flush() temporarily to intercept that and
-            // flush synchronously.
-            const origFlush = this.handle.flush;
-            this.handle.flush = (flushFlag, cb) => {
-                /* c8 ignore start */
-                if (typeof flushFlag === 'function') {
-                    cb = flushFlag;
-                    flushFlag = this.flushFlag;
-                }
-                /* c8 ignore stop */
-                this.flush(flushFlag);
-                cb?.();
-            };
-            try {
-                ;
-                this.handle.params(level, strategy);
-            }
-            finally {
-                this.handle.flush = origFlush;
-            }
-            /* c8 ignore start */
-            if (this.handle) {
-                this.#level = level;
-                this.#strategy = strategy;
-            }
-            /* c8 ignore stop */
-        }
-    }
-}
-exports.Zlib = Zlib;
-// minimal 2-byte header
-class Deflate extends Zlib {
-    constructor(opts) {
-        super(opts, 'Deflate');
-    }
-}
-exports.Deflate = Deflate;
-class Inflate extends Zlib {
-    constructor(opts) {
-        super(opts, 'Inflate');
-    }
-}
-exports.Inflate = Inflate;
-class Gzip extends Zlib {
-    #portable;
-    constructor(opts) {
-        super(opts, 'Gzip');
-        this.#portable = opts && !!opts.portable;
-    }
-    [_superWrite](data) {
-        if (!this.#portable)
-            return super[_superWrite](data);
-        // we'll always get the header emitted in one first chunk
-        // overwrite the OS indicator byte with 0xFF
-        this.#portable = false;
-        data[9] = 255;
-        return super[_superWrite](data);
-    }
-}
-exports.Gzip = Gzip;
-class Gunzip extends Zlib {
-    constructor(opts) {
-        super(opts, 'Gunzip');
-    }
-}
-exports.Gunzip = Gunzip;
-// raw - no header
-class DeflateRaw extends Zlib {
-    constructor(opts) {
-        super(opts, 'DeflateRaw');
-    }
-}
-exports.DeflateRaw = DeflateRaw;
-class InflateRaw extends Zlib {
-    constructor(opts) {
-        super(opts, 'InflateRaw');
-    }
-}
-exports.InflateRaw = InflateRaw;
-// auto-detect header.
-class Unzip extends Zlib {
-    constructor(opts) {
-        super(opts, 'Unzip');
-    }
-}
-exports.Unzip = Unzip;
-class Brotli extends ZlibBase {
-    constructor(opts, mode) {
-        opts = opts || {};
-        opts.flush = opts.flush || constants_js_1.constants.BROTLI_OPERATION_PROCESS;
-        opts.finishFlush =
-            opts.finishFlush || constants_js_1.constants.BROTLI_OPERATION_FINISH;
-        opts.fullFlushFlag = constants_js_1.constants.BROTLI_OPERATION_FLUSH;
-        super(opts, mode);
-    }
-}
-exports.Brotli = Brotli;
-class BrotliCompress extends Brotli {
-    constructor(opts) {
-        super(opts, 'BrotliCompress');
-    }
-}
-exports.BrotliCompress = BrotliCompress;
-class BrotliDecompress extends Brotli {
-    constructor(opts) {
-        super(opts, 'BrotliDecompress');
-    }
-}
-exports.BrotliDecompress = BrotliDecompress;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/minizlib/dist/commonjs/package.json b/node_modules/node-gyp/node_modules/minizlib/dist/commonjs/package.json
deleted file mode 100644
index 5bbefffbabee3..0000000000000
--- a/node_modules/node-gyp/node_modules/minizlib/dist/commonjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "commonjs"
-}
diff --git a/node_modules/node-gyp/node_modules/minizlib/dist/esm/constants.js b/node_modules/node-gyp/node_modules/minizlib/dist/esm/constants.js
deleted file mode 100644
index 7faf40be5068d..0000000000000
--- a/node_modules/node-gyp/node_modules/minizlib/dist/esm/constants.js
+++ /dev/null
@@ -1,117 +0,0 @@
-// Update with any zlib constants that are added or changed in the future.
-// Node v6 didn't export this, so we just hard code the version and rely
-// on all the other hard-coded values from zlib v4736.  When node v6
-// support drops, we can just export the realZlibConstants object.
-import realZlib from 'zlib';
-/* c8 ignore start */
-const realZlibConstants = realZlib.constants || { ZLIB_VERNUM: 4736 };
-/* c8 ignore stop */
-export const constants = Object.freeze(Object.assign(Object.create(null), {
-    Z_NO_FLUSH: 0,
-    Z_PARTIAL_FLUSH: 1,
-    Z_SYNC_FLUSH: 2,
-    Z_FULL_FLUSH: 3,
-    Z_FINISH: 4,
-    Z_BLOCK: 5,
-    Z_OK: 0,
-    Z_STREAM_END: 1,
-    Z_NEED_DICT: 2,
-    Z_ERRNO: -1,
-    Z_STREAM_ERROR: -2,
-    Z_DATA_ERROR: -3,
-    Z_MEM_ERROR: -4,
-    Z_BUF_ERROR: -5,
-    Z_VERSION_ERROR: -6,
-    Z_NO_COMPRESSION: 0,
-    Z_BEST_SPEED: 1,
-    Z_BEST_COMPRESSION: 9,
-    Z_DEFAULT_COMPRESSION: -1,
-    Z_FILTERED: 1,
-    Z_HUFFMAN_ONLY: 2,
-    Z_RLE: 3,
-    Z_FIXED: 4,
-    Z_DEFAULT_STRATEGY: 0,
-    DEFLATE: 1,
-    INFLATE: 2,
-    GZIP: 3,
-    GUNZIP: 4,
-    DEFLATERAW: 5,
-    INFLATERAW: 6,
-    UNZIP: 7,
-    BROTLI_DECODE: 8,
-    BROTLI_ENCODE: 9,
-    Z_MIN_WINDOWBITS: 8,
-    Z_MAX_WINDOWBITS: 15,
-    Z_DEFAULT_WINDOWBITS: 15,
-    Z_MIN_CHUNK: 64,
-    Z_MAX_CHUNK: Infinity,
-    Z_DEFAULT_CHUNK: 16384,
-    Z_MIN_MEMLEVEL: 1,
-    Z_MAX_MEMLEVEL: 9,
-    Z_DEFAULT_MEMLEVEL: 8,
-    Z_MIN_LEVEL: -1,
-    Z_MAX_LEVEL: 9,
-    Z_DEFAULT_LEVEL: -1,
-    BROTLI_OPERATION_PROCESS: 0,
-    BROTLI_OPERATION_FLUSH: 1,
-    BROTLI_OPERATION_FINISH: 2,
-    BROTLI_OPERATION_EMIT_METADATA: 3,
-    BROTLI_MODE_GENERIC: 0,
-    BROTLI_MODE_TEXT: 1,
-    BROTLI_MODE_FONT: 2,
-    BROTLI_DEFAULT_MODE: 0,
-    BROTLI_MIN_QUALITY: 0,
-    BROTLI_MAX_QUALITY: 11,
-    BROTLI_DEFAULT_QUALITY: 11,
-    BROTLI_MIN_WINDOW_BITS: 10,
-    BROTLI_MAX_WINDOW_BITS: 24,
-    BROTLI_LARGE_MAX_WINDOW_BITS: 30,
-    BROTLI_DEFAULT_WINDOW: 22,
-    BROTLI_MIN_INPUT_BLOCK_BITS: 16,
-    BROTLI_MAX_INPUT_BLOCK_BITS: 24,
-    BROTLI_PARAM_MODE: 0,
-    BROTLI_PARAM_QUALITY: 1,
-    BROTLI_PARAM_LGWIN: 2,
-    BROTLI_PARAM_LGBLOCK: 3,
-    BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4,
-    BROTLI_PARAM_SIZE_HINT: 5,
-    BROTLI_PARAM_LARGE_WINDOW: 6,
-    BROTLI_PARAM_NPOSTFIX: 7,
-    BROTLI_PARAM_NDIRECT: 8,
-    BROTLI_DECODER_RESULT_ERROR: 0,
-    BROTLI_DECODER_RESULT_SUCCESS: 1,
-    BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2,
-    BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3,
-    BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0,
-    BROTLI_DECODER_PARAM_LARGE_WINDOW: 1,
-    BROTLI_DECODER_NO_ERROR: 0,
-    BROTLI_DECODER_SUCCESS: 1,
-    BROTLI_DECODER_NEEDS_MORE_INPUT: 2,
-    BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3,
-    BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1,
-    BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2,
-    BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3,
-    BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4,
-    BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5,
-    BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6,
-    BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7,
-    BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8,
-    BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9,
-    BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10,
-    BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11,
-    BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12,
-    BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13,
-    BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14,
-    BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15,
-    BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16,
-    BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19,
-    BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20,
-    BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21,
-    BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22,
-    BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25,
-    BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26,
-    BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27,
-    BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30,
-    BROTLI_DECODER_ERROR_UNREACHABLE: -31,
-}, realZlibConstants));
-//# sourceMappingURL=constants.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/minizlib/dist/esm/index.js b/node_modules/node-gyp/node_modules/minizlib/dist/esm/index.js
deleted file mode 100644
index f33586a8ab0ec..0000000000000
--- a/node_modules/node-gyp/node_modules/minizlib/dist/esm/index.js
+++ /dev/null
@@ -1,340 +0,0 @@
-import assert from 'assert';
-import { Buffer } from 'buffer';
-import { Minipass } from 'minipass';
-import * as realZlib from 'zlib';
-import { constants } from './constants.js';
-export { constants } from './constants.js';
-const OriginalBufferConcat = Buffer.concat;
-const desc = Object.getOwnPropertyDescriptor(Buffer, 'concat');
-const noop = (args) => args;
-const passthroughBufferConcat = desc?.writable === true || desc?.set !== undefined
-    ? (makeNoOp) => {
-        Buffer.concat = makeNoOp ? noop : OriginalBufferConcat;
-    }
-    : (_) => { };
-const _superWrite = Symbol('_superWrite');
-export class ZlibError extends Error {
-    code;
-    errno;
-    constructor(err) {
-        super('zlib: ' + err.message);
-        this.code = err.code;
-        this.errno = err.errno;
-        /* c8 ignore next */
-        if (!this.code)
-            this.code = 'ZLIB_ERROR';
-        this.message = 'zlib: ' + err.message;
-        Error.captureStackTrace(this, this.constructor);
-    }
-    get name() {
-        return 'ZlibError';
-    }
-}
-// the Zlib class they all inherit from
-// This thing manages the queue of requests, and returns
-// true or false if there is anything in the queue when
-// you call the .write() method.
-const _flushFlag = Symbol('flushFlag');
-class ZlibBase extends Minipass {
-    #sawError = false;
-    #ended = false;
-    #flushFlag;
-    #finishFlushFlag;
-    #fullFlushFlag;
-    #handle;
-    #onError;
-    get sawError() {
-        return this.#sawError;
-    }
-    get handle() {
-        return this.#handle;
-    }
-    /* c8 ignore start */
-    get flushFlag() {
-        return this.#flushFlag;
-    }
-    /* c8 ignore stop */
-    constructor(opts, mode) {
-        if (!opts || typeof opts !== 'object')
-            throw new TypeError('invalid options for ZlibBase constructor');
-        //@ts-ignore
-        super(opts);
-        /* c8 ignore start */
-        this.#flushFlag = opts.flush ?? 0;
-        this.#finishFlushFlag = opts.finishFlush ?? 0;
-        this.#fullFlushFlag = opts.fullFlushFlag ?? 0;
-        /* c8 ignore stop */
-        // this will throw if any options are invalid for the class selected
-        try {
-            // @types/node doesn't know that it exports the classes, but they're there
-            //@ts-ignore
-            this.#handle = new realZlib[mode](opts);
-        }
-        catch (er) {
-            // make sure that all errors get decorated properly
-            throw new ZlibError(er);
-        }
-        this.#onError = err => {
-            // no sense raising multiple errors, since we abort on the first one.
-            if (this.#sawError)
-                return;
-            this.#sawError = true;
-            // there is no way to cleanly recover.
-            // continuing only obscures problems.
-            this.close();
-            this.emit('error', err);
-        };
-        this.#handle?.on('error', er => this.#onError(new ZlibError(er)));
-        this.once('end', () => this.close);
-    }
-    close() {
-        if (this.#handle) {
-            this.#handle.close();
-            this.#handle = undefined;
-            this.emit('close');
-        }
-    }
-    reset() {
-        if (!this.#sawError) {
-            assert(this.#handle, 'zlib binding closed');
-            //@ts-ignore
-            return this.#handle.reset?.();
-        }
-    }
-    flush(flushFlag) {
-        if (this.ended)
-            return;
-        if (typeof flushFlag !== 'number')
-            flushFlag = this.#fullFlushFlag;
-        this.write(Object.assign(Buffer.alloc(0), { [_flushFlag]: flushFlag }));
-    }
-    end(chunk, encoding, cb) {
-        /* c8 ignore start */
-        if (typeof chunk === 'function') {
-            cb = chunk;
-            encoding = undefined;
-            chunk = undefined;
-        }
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        /* c8 ignore stop */
-        if (chunk) {
-            if (encoding)
-                this.write(chunk, encoding);
-            else
-                this.write(chunk);
-        }
-        this.flush(this.#finishFlushFlag);
-        this.#ended = true;
-        return super.end(cb);
-    }
-    get ended() {
-        return this.#ended;
-    }
-    // overridden in the gzip classes to do portable writes
-    [_superWrite](data) {
-        return super.write(data);
-    }
-    write(chunk, encoding, cb) {
-        // process the chunk using the sync process
-        // then super.write() all the outputted chunks
-        if (typeof encoding === 'function')
-            (cb = encoding), (encoding = 'utf8');
-        if (typeof chunk === 'string')
-            chunk = Buffer.from(chunk, encoding);
-        if (this.#sawError)
-            return;
-        assert(this.#handle, 'zlib binding closed');
-        // _processChunk tries to .close() the native handle after it's done, so we
-        // intercept that by temporarily making it a no-op.
-        // diving into the node:zlib internals a bit here
-        const nativeHandle = this.#handle
-            ._handle;
-        const originalNativeClose = nativeHandle.close;
-        nativeHandle.close = () => { };
-        const originalClose = this.#handle.close;
-        this.#handle.close = () => { };
-        // It also calls `Buffer.concat()` at the end, which may be convenient
-        // for some, but which we are not interested in as it slows us down.
-        passthroughBufferConcat(true);
-        let result = undefined;
-        try {
-            const flushFlag = typeof chunk[_flushFlag] === 'number'
-                ? chunk[_flushFlag]
-                : this.#flushFlag;
-            result = this.#handle._processChunk(chunk, flushFlag);
-            // if we don't throw, reset it back how it was
-            passthroughBufferConcat(false);
-        }
-        catch (err) {
-            // or if we do, put Buffer.concat() back before we emit error
-            // Error events call into user code, which may call Buffer.concat()
-            passthroughBufferConcat(false);
-            this.#onError(new ZlibError(err));
-        }
-        finally {
-            if (this.#handle) {
-                // Core zlib resets `_handle` to null after attempting to close the
-                // native handle. Our no-op handler prevented actual closure, but we
-                // need to restore the `._handle` property.
-                ;
-                this.#handle._handle =
-                    nativeHandle;
-                nativeHandle.close = originalNativeClose;
-                this.#handle.close = originalClose;
-                // `_processChunk()` adds an 'error' listener. If we don't remove it
-                // after each call, these handlers start piling up.
-                this.#handle.removeAllListeners('error');
-                // make sure OUR error listener is still attached tho
-            }
-        }
-        if (this.#handle)
-            this.#handle.on('error', er => this.#onError(new ZlibError(er)));
-        let writeReturn;
-        if (result) {
-            if (Array.isArray(result) && result.length > 0) {
-                const r = result[0];
-                // The first buffer is always `handle._outBuffer`, which would be
-                // re-used for later invocations; so, we always have to copy that one.
-                writeReturn = this[_superWrite](Buffer.from(r));
-                for (let i = 1; i < result.length; i++) {
-                    writeReturn = this[_superWrite](result[i]);
-                }
-            }
-            else {
-                // either a single Buffer or an empty array
-                writeReturn = this[_superWrite](Buffer.from(result));
-            }
-        }
-        if (cb)
-            cb();
-        return writeReturn;
-    }
-}
-export class Zlib extends ZlibBase {
-    #level;
-    #strategy;
-    constructor(opts, mode) {
-        opts = opts || {};
-        opts.flush = opts.flush || constants.Z_NO_FLUSH;
-        opts.finishFlush = opts.finishFlush || constants.Z_FINISH;
-        opts.fullFlushFlag = constants.Z_FULL_FLUSH;
-        super(opts, mode);
-        this.#level = opts.level;
-        this.#strategy = opts.strategy;
-    }
-    params(level, strategy) {
-        if (this.sawError)
-            return;
-        if (!this.handle)
-            throw new Error('cannot switch params when binding is closed');
-        // no way to test this without also not supporting params at all
-        /* c8 ignore start */
-        if (!this.handle.params)
-            throw new Error('not supported in this implementation');
-        /* c8 ignore stop */
-        if (this.#level !== level || this.#strategy !== strategy) {
-            this.flush(constants.Z_SYNC_FLUSH);
-            assert(this.handle, 'zlib binding closed');
-            // .params() calls .flush(), but the latter is always async in the
-            // core zlib. We override .flush() temporarily to intercept that and
-            // flush synchronously.
-            const origFlush = this.handle.flush;
-            this.handle.flush = (flushFlag, cb) => {
-                /* c8 ignore start */
-                if (typeof flushFlag === 'function') {
-                    cb = flushFlag;
-                    flushFlag = this.flushFlag;
-                }
-                /* c8 ignore stop */
-                this.flush(flushFlag);
-                cb?.();
-            };
-            try {
-                ;
-                this.handle.params(level, strategy);
-            }
-            finally {
-                this.handle.flush = origFlush;
-            }
-            /* c8 ignore start */
-            if (this.handle) {
-                this.#level = level;
-                this.#strategy = strategy;
-            }
-            /* c8 ignore stop */
-        }
-    }
-}
-// minimal 2-byte header
-export class Deflate extends Zlib {
-    constructor(opts) {
-        super(opts, 'Deflate');
-    }
-}
-export class Inflate extends Zlib {
-    constructor(opts) {
-        super(opts, 'Inflate');
-    }
-}
-export class Gzip extends Zlib {
-    #portable;
-    constructor(opts) {
-        super(opts, 'Gzip');
-        this.#portable = opts && !!opts.portable;
-    }
-    [_superWrite](data) {
-        if (!this.#portable)
-            return super[_superWrite](data);
-        // we'll always get the header emitted in one first chunk
-        // overwrite the OS indicator byte with 0xFF
-        this.#portable = false;
-        data[9] = 255;
-        return super[_superWrite](data);
-    }
-}
-export class Gunzip extends Zlib {
-    constructor(opts) {
-        super(opts, 'Gunzip');
-    }
-}
-// raw - no header
-export class DeflateRaw extends Zlib {
-    constructor(opts) {
-        super(opts, 'DeflateRaw');
-    }
-}
-export class InflateRaw extends Zlib {
-    constructor(opts) {
-        super(opts, 'InflateRaw');
-    }
-}
-// auto-detect header.
-export class Unzip extends Zlib {
-    constructor(opts) {
-        super(opts, 'Unzip');
-    }
-}
-export class Brotli extends ZlibBase {
-    constructor(opts, mode) {
-        opts = opts || {};
-        opts.flush = opts.flush || constants.BROTLI_OPERATION_PROCESS;
-        opts.finishFlush =
-            opts.finishFlush || constants.BROTLI_OPERATION_FINISH;
-        opts.fullFlushFlag = constants.BROTLI_OPERATION_FLUSH;
-        super(opts, mode);
-    }
-}
-export class BrotliCompress extends Brotli {
-    constructor(opts) {
-        super(opts, 'BrotliCompress');
-    }
-}
-export class BrotliDecompress extends Brotli {
-    constructor(opts) {
-        super(opts, 'BrotliDecompress');
-    }
-}
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/minizlib/dist/esm/package.json b/node_modules/node-gyp/node_modules/minizlib/dist/esm/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/node-gyp/node_modules/minizlib/dist/esm/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/node-gyp/node_modules/minizlib/package.json b/node_modules/node-gyp/node_modules/minizlib/package.json
deleted file mode 100644
index 43cb855e15a5d..0000000000000
--- a/node_modules/node-gyp/node_modules/minizlib/package.json
+++ /dev/null
@@ -1,80 +0,0 @@
-{
-  "name": "minizlib",
-  "version": "3.0.2",
-  "description": "A small fast zlib stream built on [minipass](http://npm.im/minipass) and Node.js's zlib binding.",
-  "main": "./dist/commonjs/index.js",
-  "dependencies": {
-    "minipass": "^7.1.2"
-  },
-  "scripts": {
-    "prepare": "tshy",
-    "pretest": "npm run prepare",
-    "test": "tap",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "format": "prettier --write . --loglevel warn",
-    "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/isaacs/minizlib.git"
-  },
-  "keywords": [
-    "zlib",
-    "gzip",
-    "gunzip",
-    "deflate",
-    "inflate",
-    "compression",
-    "zip",
-    "unzip"
-  ],
-  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
-  "license": "MIT",
-  "devDependencies": {
-    "@types/node": "^22.13.14",
-    "tap": "^21.1.0",
-    "tshy": "^3.0.2",
-    "typedoc": "^0.28.1"
-  },
-  "files": [
-    "dist"
-  ],
-  "engines": {
-    "node": ">= 18"
-  },
-  "tshy": {
-    "exports": {
-      "./package.json": "./package.json",
-      ".": "./src/index.ts"
-    }
-  },
-  "exports": {
-    "./package.json": "./package.json",
-    ".": {
-      "import": {
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.js"
-      },
-      "require": {
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.js"
-      }
-    }
-  },
-  "types": "./dist/commonjs/index.d.ts",
-  "type": "module",
-  "prettier": {
-    "semi": false,
-    "printWidth": 75,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "module": "./dist/esm/index.js"
-}
diff --git a/node_modules/npm-registry-fetch/node_modules/minizlib/LICENSE b/node_modules/npm-registry-fetch/node_modules/minizlib/LICENSE
deleted file mode 100644
index 49f7efe431c9e..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/minizlib/LICENSE
+++ /dev/null
@@ -1,26 +0,0 @@
-Minizlib was created by Isaac Z. Schlueter.
-It is a derivative work of the Node.js project.
-
-"""
-Copyright (c) 2017-2023 Isaac Z. Schlueter and Contributors
-Copyright (c) 2017-2023 Node.js contributors. All rights reserved.
-Copyright (c) 2017-2023 Joyent, Inc. and other Node contributors. All rights reserved.
-
-Permission is hereby granted, free of charge, to any person obtaining a
-copy of this software and associated documentation files (the "Software"),
-to deal in the Software without restriction, including without limitation
-the rights to use, copy, modify, merge, publish, distribute, sublicense,
-and/or sell copies of the Software, and to permit persons to whom the
-Software is furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-"""
diff --git a/node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/constants.js b/node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/constants.js
deleted file mode 100644
index dfc2c1957bfc9..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/constants.js
+++ /dev/null
@@ -1,123 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.constants = void 0;
-// Update with any zlib constants that are added or changed in the future.
-// Node v6 didn't export this, so we just hard code the version and rely
-// on all the other hard-coded values from zlib v4736.  When node v6
-// support drops, we can just export the realZlibConstants object.
-const zlib_1 = __importDefault(require("zlib"));
-/* c8 ignore start */
-const realZlibConstants = zlib_1.default.constants || { ZLIB_VERNUM: 4736 };
-/* c8 ignore stop */
-exports.constants = Object.freeze(Object.assign(Object.create(null), {
-    Z_NO_FLUSH: 0,
-    Z_PARTIAL_FLUSH: 1,
-    Z_SYNC_FLUSH: 2,
-    Z_FULL_FLUSH: 3,
-    Z_FINISH: 4,
-    Z_BLOCK: 5,
-    Z_OK: 0,
-    Z_STREAM_END: 1,
-    Z_NEED_DICT: 2,
-    Z_ERRNO: -1,
-    Z_STREAM_ERROR: -2,
-    Z_DATA_ERROR: -3,
-    Z_MEM_ERROR: -4,
-    Z_BUF_ERROR: -5,
-    Z_VERSION_ERROR: -6,
-    Z_NO_COMPRESSION: 0,
-    Z_BEST_SPEED: 1,
-    Z_BEST_COMPRESSION: 9,
-    Z_DEFAULT_COMPRESSION: -1,
-    Z_FILTERED: 1,
-    Z_HUFFMAN_ONLY: 2,
-    Z_RLE: 3,
-    Z_FIXED: 4,
-    Z_DEFAULT_STRATEGY: 0,
-    DEFLATE: 1,
-    INFLATE: 2,
-    GZIP: 3,
-    GUNZIP: 4,
-    DEFLATERAW: 5,
-    INFLATERAW: 6,
-    UNZIP: 7,
-    BROTLI_DECODE: 8,
-    BROTLI_ENCODE: 9,
-    Z_MIN_WINDOWBITS: 8,
-    Z_MAX_WINDOWBITS: 15,
-    Z_DEFAULT_WINDOWBITS: 15,
-    Z_MIN_CHUNK: 64,
-    Z_MAX_CHUNK: Infinity,
-    Z_DEFAULT_CHUNK: 16384,
-    Z_MIN_MEMLEVEL: 1,
-    Z_MAX_MEMLEVEL: 9,
-    Z_DEFAULT_MEMLEVEL: 8,
-    Z_MIN_LEVEL: -1,
-    Z_MAX_LEVEL: 9,
-    Z_DEFAULT_LEVEL: -1,
-    BROTLI_OPERATION_PROCESS: 0,
-    BROTLI_OPERATION_FLUSH: 1,
-    BROTLI_OPERATION_FINISH: 2,
-    BROTLI_OPERATION_EMIT_METADATA: 3,
-    BROTLI_MODE_GENERIC: 0,
-    BROTLI_MODE_TEXT: 1,
-    BROTLI_MODE_FONT: 2,
-    BROTLI_DEFAULT_MODE: 0,
-    BROTLI_MIN_QUALITY: 0,
-    BROTLI_MAX_QUALITY: 11,
-    BROTLI_DEFAULT_QUALITY: 11,
-    BROTLI_MIN_WINDOW_BITS: 10,
-    BROTLI_MAX_WINDOW_BITS: 24,
-    BROTLI_LARGE_MAX_WINDOW_BITS: 30,
-    BROTLI_DEFAULT_WINDOW: 22,
-    BROTLI_MIN_INPUT_BLOCK_BITS: 16,
-    BROTLI_MAX_INPUT_BLOCK_BITS: 24,
-    BROTLI_PARAM_MODE: 0,
-    BROTLI_PARAM_QUALITY: 1,
-    BROTLI_PARAM_LGWIN: 2,
-    BROTLI_PARAM_LGBLOCK: 3,
-    BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4,
-    BROTLI_PARAM_SIZE_HINT: 5,
-    BROTLI_PARAM_LARGE_WINDOW: 6,
-    BROTLI_PARAM_NPOSTFIX: 7,
-    BROTLI_PARAM_NDIRECT: 8,
-    BROTLI_DECODER_RESULT_ERROR: 0,
-    BROTLI_DECODER_RESULT_SUCCESS: 1,
-    BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2,
-    BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3,
-    BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0,
-    BROTLI_DECODER_PARAM_LARGE_WINDOW: 1,
-    BROTLI_DECODER_NO_ERROR: 0,
-    BROTLI_DECODER_SUCCESS: 1,
-    BROTLI_DECODER_NEEDS_MORE_INPUT: 2,
-    BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3,
-    BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1,
-    BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2,
-    BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3,
-    BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4,
-    BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5,
-    BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6,
-    BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7,
-    BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8,
-    BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9,
-    BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10,
-    BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11,
-    BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12,
-    BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13,
-    BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14,
-    BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15,
-    BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16,
-    BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19,
-    BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20,
-    BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21,
-    BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22,
-    BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25,
-    BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26,
-    BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27,
-    BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30,
-    BROTLI_DECODER_ERROR_UNREACHABLE: -31,
-}, realZlibConstants));
-//# sourceMappingURL=constants.js.map
\ No newline at end of file
diff --git a/node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/index.js b/node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/index.js
deleted file mode 100644
index b4906d2783372..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/index.js
+++ /dev/null
@@ -1,392 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || (function () {
-    var ownKeys = function(o) {
-        ownKeys = Object.getOwnPropertyNames || function (o) {
-            var ar = [];
-            for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
-            return ar;
-        };
-        return ownKeys(o);
-    };
-    return function (mod) {
-        if (mod && mod.__esModule) return mod;
-        var result = {};
-        if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
-        __setModuleDefault(result, mod);
-        return result;
-    };
-})();
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.BrotliDecompress = exports.BrotliCompress = exports.Brotli = exports.Unzip = exports.InflateRaw = exports.DeflateRaw = exports.Gunzip = exports.Gzip = exports.Inflate = exports.Deflate = exports.Zlib = exports.ZlibError = exports.constants = void 0;
-const assert_1 = __importDefault(require("assert"));
-const buffer_1 = require("buffer");
-const minipass_1 = require("minipass");
-const realZlib = __importStar(require("zlib"));
-const constants_js_1 = require("./constants.js");
-var constants_js_2 = require("./constants.js");
-Object.defineProperty(exports, "constants", { enumerable: true, get: function () { return constants_js_2.constants; } });
-const OriginalBufferConcat = buffer_1.Buffer.concat;
-const desc = Object.getOwnPropertyDescriptor(buffer_1.Buffer, 'concat');
-const noop = (args) => args;
-const passthroughBufferConcat = desc?.writable === true || desc?.set !== undefined
-    ? (makeNoOp) => {
-        buffer_1.Buffer.concat = makeNoOp ? noop : OriginalBufferConcat;
-    }
-    : (_) => { };
-const _superWrite = Symbol('_superWrite');
-class ZlibError extends Error {
-    code;
-    errno;
-    constructor(err) {
-        super('zlib: ' + err.message);
-        this.code = err.code;
-        this.errno = err.errno;
-        /* c8 ignore next */
-        if (!this.code)
-            this.code = 'ZLIB_ERROR';
-        this.message = 'zlib: ' + err.message;
-        Error.captureStackTrace(this, this.constructor);
-    }
-    get name() {
-        return 'ZlibError';
-    }
-}
-exports.ZlibError = ZlibError;
-// the Zlib class they all inherit from
-// This thing manages the queue of requests, and returns
-// true or false if there is anything in the queue when
-// you call the .write() method.
-const _flushFlag = Symbol('flushFlag');
-class ZlibBase extends minipass_1.Minipass {
-    #sawError = false;
-    #ended = false;
-    #flushFlag;
-    #finishFlushFlag;
-    #fullFlushFlag;
-    #handle;
-    #onError;
-    get sawError() {
-        return this.#sawError;
-    }
-    get handle() {
-        return this.#handle;
-    }
-    /* c8 ignore start */
-    get flushFlag() {
-        return this.#flushFlag;
-    }
-    /* c8 ignore stop */
-    constructor(opts, mode) {
-        if (!opts || typeof opts !== 'object')
-            throw new TypeError('invalid options for ZlibBase constructor');
-        //@ts-ignore
-        super(opts);
-        /* c8 ignore start */
-        this.#flushFlag = opts.flush ?? 0;
-        this.#finishFlushFlag = opts.finishFlush ?? 0;
-        this.#fullFlushFlag = opts.fullFlushFlag ?? 0;
-        /* c8 ignore stop */
-        // this will throw if any options are invalid for the class selected
-        try {
-            // @types/node doesn't know that it exports the classes, but they're there
-            //@ts-ignore
-            this.#handle = new realZlib[mode](opts);
-        }
-        catch (er) {
-            // make sure that all errors get decorated properly
-            throw new ZlibError(er);
-        }
-        this.#onError = err => {
-            // no sense raising multiple errors, since we abort on the first one.
-            if (this.#sawError)
-                return;
-            this.#sawError = true;
-            // there is no way to cleanly recover.
-            // continuing only obscures problems.
-            this.close();
-            this.emit('error', err);
-        };
-        this.#handle?.on('error', er => this.#onError(new ZlibError(er)));
-        this.once('end', () => this.close);
-    }
-    close() {
-        if (this.#handle) {
-            this.#handle.close();
-            this.#handle = undefined;
-            this.emit('close');
-        }
-    }
-    reset() {
-        if (!this.#sawError) {
-            (0, assert_1.default)(this.#handle, 'zlib binding closed');
-            //@ts-ignore
-            return this.#handle.reset?.();
-        }
-    }
-    flush(flushFlag) {
-        if (this.ended)
-            return;
-        if (typeof flushFlag !== 'number')
-            flushFlag = this.#fullFlushFlag;
-        this.write(Object.assign(buffer_1.Buffer.alloc(0), { [_flushFlag]: flushFlag }));
-    }
-    end(chunk, encoding, cb) {
-        /* c8 ignore start */
-        if (typeof chunk === 'function') {
-            cb = chunk;
-            encoding = undefined;
-            chunk = undefined;
-        }
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        /* c8 ignore stop */
-        if (chunk) {
-            if (encoding)
-                this.write(chunk, encoding);
-            else
-                this.write(chunk);
-        }
-        this.flush(this.#finishFlushFlag);
-        this.#ended = true;
-        return super.end(cb);
-    }
-    get ended() {
-        return this.#ended;
-    }
-    // overridden in the gzip classes to do portable writes
-    [_superWrite](data) {
-        return super.write(data);
-    }
-    write(chunk, encoding, cb) {
-        // process the chunk using the sync process
-        // then super.write() all the outputted chunks
-        if (typeof encoding === 'function')
-            (cb = encoding), (encoding = 'utf8');
-        if (typeof chunk === 'string')
-            chunk = buffer_1.Buffer.from(chunk, encoding);
-        if (this.#sawError)
-            return;
-        (0, assert_1.default)(this.#handle, 'zlib binding closed');
-        // _processChunk tries to .close() the native handle after it's done, so we
-        // intercept that by temporarily making it a no-op.
-        // diving into the node:zlib internals a bit here
-        const nativeHandle = this.#handle
-            ._handle;
-        const originalNativeClose = nativeHandle.close;
-        nativeHandle.close = () => { };
-        const originalClose = this.#handle.close;
-        this.#handle.close = () => { };
-        // It also calls `Buffer.concat()` at the end, which may be convenient
-        // for some, but which we are not interested in as it slows us down.
-        passthroughBufferConcat(true);
-        let result = undefined;
-        try {
-            const flushFlag = typeof chunk[_flushFlag] === 'number'
-                ? chunk[_flushFlag]
-                : this.#flushFlag;
-            result = this.#handle._processChunk(chunk, flushFlag);
-            // if we don't throw, reset it back how it was
-            passthroughBufferConcat(false);
-        }
-        catch (err) {
-            // or if we do, put Buffer.concat() back before we emit error
-            // Error events call into user code, which may call Buffer.concat()
-            passthroughBufferConcat(false);
-            this.#onError(new ZlibError(err));
-        }
-        finally {
-            if (this.#handle) {
-                // Core zlib resets `_handle` to null after attempting to close the
-                // native handle. Our no-op handler prevented actual closure, but we
-                // need to restore the `._handle` property.
-                ;
-                this.#handle._handle =
-                    nativeHandle;
-                nativeHandle.close = originalNativeClose;
-                this.#handle.close = originalClose;
-                // `_processChunk()` adds an 'error' listener. If we don't remove it
-                // after each call, these handlers start piling up.
-                this.#handle.removeAllListeners('error');
-                // make sure OUR error listener is still attached tho
-            }
-        }
-        if (this.#handle)
-            this.#handle.on('error', er => this.#onError(new ZlibError(er)));
-        let writeReturn;
-        if (result) {
-            if (Array.isArray(result) && result.length > 0) {
-                const r = result[0];
-                // The first buffer is always `handle._outBuffer`, which would be
-                // re-used for later invocations; so, we always have to copy that one.
-                writeReturn = this[_superWrite](buffer_1.Buffer.from(r));
-                for (let i = 1; i < result.length; i++) {
-                    writeReturn = this[_superWrite](result[i]);
-                }
-            }
-            else {
-                // either a single Buffer or an empty array
-                writeReturn = this[_superWrite](buffer_1.Buffer.from(result));
-            }
-        }
-        if (cb)
-            cb();
-        return writeReturn;
-    }
-}
-class Zlib extends ZlibBase {
-    #level;
-    #strategy;
-    constructor(opts, mode) {
-        opts = opts || {};
-        opts.flush = opts.flush || constants_js_1.constants.Z_NO_FLUSH;
-        opts.finishFlush = opts.finishFlush || constants_js_1.constants.Z_FINISH;
-        opts.fullFlushFlag = constants_js_1.constants.Z_FULL_FLUSH;
-        super(opts, mode);
-        this.#level = opts.level;
-        this.#strategy = opts.strategy;
-    }
-    params(level, strategy) {
-        if (this.sawError)
-            return;
-        if (!this.handle)
-            throw new Error('cannot switch params when binding is closed');
-        // no way to test this without also not supporting params at all
-        /* c8 ignore start */
-        if (!this.handle.params)
-            throw new Error('not supported in this implementation');
-        /* c8 ignore stop */
-        if (this.#level !== level || this.#strategy !== strategy) {
-            this.flush(constants_js_1.constants.Z_SYNC_FLUSH);
-            (0, assert_1.default)(this.handle, 'zlib binding closed');
-            // .params() calls .flush(), but the latter is always async in the
-            // core zlib. We override .flush() temporarily to intercept that and
-            // flush synchronously.
-            const origFlush = this.handle.flush;
-            this.handle.flush = (flushFlag, cb) => {
-                /* c8 ignore start */
-                if (typeof flushFlag === 'function') {
-                    cb = flushFlag;
-                    flushFlag = this.flushFlag;
-                }
-                /* c8 ignore stop */
-                this.flush(flushFlag);
-                cb?.();
-            };
-            try {
-                ;
-                this.handle.params(level, strategy);
-            }
-            finally {
-                this.handle.flush = origFlush;
-            }
-            /* c8 ignore start */
-            if (this.handle) {
-                this.#level = level;
-                this.#strategy = strategy;
-            }
-            /* c8 ignore stop */
-        }
-    }
-}
-exports.Zlib = Zlib;
-// minimal 2-byte header
-class Deflate extends Zlib {
-    constructor(opts) {
-        super(opts, 'Deflate');
-    }
-}
-exports.Deflate = Deflate;
-class Inflate extends Zlib {
-    constructor(opts) {
-        super(opts, 'Inflate');
-    }
-}
-exports.Inflate = Inflate;
-class Gzip extends Zlib {
-    #portable;
-    constructor(opts) {
-        super(opts, 'Gzip');
-        this.#portable = opts && !!opts.portable;
-    }
-    [_superWrite](data) {
-        if (!this.#portable)
-            return super[_superWrite](data);
-        // we'll always get the header emitted in one first chunk
-        // overwrite the OS indicator byte with 0xFF
-        this.#portable = false;
-        data[9] = 255;
-        return super[_superWrite](data);
-    }
-}
-exports.Gzip = Gzip;
-class Gunzip extends Zlib {
-    constructor(opts) {
-        super(opts, 'Gunzip');
-    }
-}
-exports.Gunzip = Gunzip;
-// raw - no header
-class DeflateRaw extends Zlib {
-    constructor(opts) {
-        super(opts, 'DeflateRaw');
-    }
-}
-exports.DeflateRaw = DeflateRaw;
-class InflateRaw extends Zlib {
-    constructor(opts) {
-        super(opts, 'InflateRaw');
-    }
-}
-exports.InflateRaw = InflateRaw;
-// auto-detect header.
-class Unzip extends Zlib {
-    constructor(opts) {
-        super(opts, 'Unzip');
-    }
-}
-exports.Unzip = Unzip;
-class Brotli extends ZlibBase {
-    constructor(opts, mode) {
-        opts = opts || {};
-        opts.flush = opts.flush || constants_js_1.constants.BROTLI_OPERATION_PROCESS;
-        opts.finishFlush =
-            opts.finishFlush || constants_js_1.constants.BROTLI_OPERATION_FINISH;
-        opts.fullFlushFlag = constants_js_1.constants.BROTLI_OPERATION_FLUSH;
-        super(opts, mode);
-    }
-}
-exports.Brotli = Brotli;
-class BrotliCompress extends Brotli {
-    constructor(opts) {
-        super(opts, 'BrotliCompress');
-    }
-}
-exports.BrotliCompress = BrotliCompress;
-class BrotliDecompress extends Brotli {
-    constructor(opts) {
-        super(opts, 'BrotliDecompress');
-    }
-}
-exports.BrotliDecompress = BrotliDecompress;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/package.json b/node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/package.json
deleted file mode 100644
index 5bbefffbabee3..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "commonjs"
-}
diff --git a/node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/constants.js b/node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/constants.js
deleted file mode 100644
index 7faf40be5068d..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/constants.js
+++ /dev/null
@@ -1,117 +0,0 @@
-// Update with any zlib constants that are added or changed in the future.
-// Node v6 didn't export this, so we just hard code the version and rely
-// on all the other hard-coded values from zlib v4736.  When node v6
-// support drops, we can just export the realZlibConstants object.
-import realZlib from 'zlib';
-/* c8 ignore start */
-const realZlibConstants = realZlib.constants || { ZLIB_VERNUM: 4736 };
-/* c8 ignore stop */
-export const constants = Object.freeze(Object.assign(Object.create(null), {
-    Z_NO_FLUSH: 0,
-    Z_PARTIAL_FLUSH: 1,
-    Z_SYNC_FLUSH: 2,
-    Z_FULL_FLUSH: 3,
-    Z_FINISH: 4,
-    Z_BLOCK: 5,
-    Z_OK: 0,
-    Z_STREAM_END: 1,
-    Z_NEED_DICT: 2,
-    Z_ERRNO: -1,
-    Z_STREAM_ERROR: -2,
-    Z_DATA_ERROR: -3,
-    Z_MEM_ERROR: -4,
-    Z_BUF_ERROR: -5,
-    Z_VERSION_ERROR: -6,
-    Z_NO_COMPRESSION: 0,
-    Z_BEST_SPEED: 1,
-    Z_BEST_COMPRESSION: 9,
-    Z_DEFAULT_COMPRESSION: -1,
-    Z_FILTERED: 1,
-    Z_HUFFMAN_ONLY: 2,
-    Z_RLE: 3,
-    Z_FIXED: 4,
-    Z_DEFAULT_STRATEGY: 0,
-    DEFLATE: 1,
-    INFLATE: 2,
-    GZIP: 3,
-    GUNZIP: 4,
-    DEFLATERAW: 5,
-    INFLATERAW: 6,
-    UNZIP: 7,
-    BROTLI_DECODE: 8,
-    BROTLI_ENCODE: 9,
-    Z_MIN_WINDOWBITS: 8,
-    Z_MAX_WINDOWBITS: 15,
-    Z_DEFAULT_WINDOWBITS: 15,
-    Z_MIN_CHUNK: 64,
-    Z_MAX_CHUNK: Infinity,
-    Z_DEFAULT_CHUNK: 16384,
-    Z_MIN_MEMLEVEL: 1,
-    Z_MAX_MEMLEVEL: 9,
-    Z_DEFAULT_MEMLEVEL: 8,
-    Z_MIN_LEVEL: -1,
-    Z_MAX_LEVEL: 9,
-    Z_DEFAULT_LEVEL: -1,
-    BROTLI_OPERATION_PROCESS: 0,
-    BROTLI_OPERATION_FLUSH: 1,
-    BROTLI_OPERATION_FINISH: 2,
-    BROTLI_OPERATION_EMIT_METADATA: 3,
-    BROTLI_MODE_GENERIC: 0,
-    BROTLI_MODE_TEXT: 1,
-    BROTLI_MODE_FONT: 2,
-    BROTLI_DEFAULT_MODE: 0,
-    BROTLI_MIN_QUALITY: 0,
-    BROTLI_MAX_QUALITY: 11,
-    BROTLI_DEFAULT_QUALITY: 11,
-    BROTLI_MIN_WINDOW_BITS: 10,
-    BROTLI_MAX_WINDOW_BITS: 24,
-    BROTLI_LARGE_MAX_WINDOW_BITS: 30,
-    BROTLI_DEFAULT_WINDOW: 22,
-    BROTLI_MIN_INPUT_BLOCK_BITS: 16,
-    BROTLI_MAX_INPUT_BLOCK_BITS: 24,
-    BROTLI_PARAM_MODE: 0,
-    BROTLI_PARAM_QUALITY: 1,
-    BROTLI_PARAM_LGWIN: 2,
-    BROTLI_PARAM_LGBLOCK: 3,
-    BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4,
-    BROTLI_PARAM_SIZE_HINT: 5,
-    BROTLI_PARAM_LARGE_WINDOW: 6,
-    BROTLI_PARAM_NPOSTFIX: 7,
-    BROTLI_PARAM_NDIRECT: 8,
-    BROTLI_DECODER_RESULT_ERROR: 0,
-    BROTLI_DECODER_RESULT_SUCCESS: 1,
-    BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2,
-    BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3,
-    BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0,
-    BROTLI_DECODER_PARAM_LARGE_WINDOW: 1,
-    BROTLI_DECODER_NO_ERROR: 0,
-    BROTLI_DECODER_SUCCESS: 1,
-    BROTLI_DECODER_NEEDS_MORE_INPUT: 2,
-    BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3,
-    BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1,
-    BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2,
-    BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3,
-    BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4,
-    BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5,
-    BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6,
-    BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7,
-    BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8,
-    BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9,
-    BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10,
-    BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11,
-    BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12,
-    BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13,
-    BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14,
-    BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15,
-    BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16,
-    BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19,
-    BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20,
-    BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21,
-    BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22,
-    BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25,
-    BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26,
-    BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27,
-    BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30,
-    BROTLI_DECODER_ERROR_UNREACHABLE: -31,
-}, realZlibConstants));
-//# sourceMappingURL=constants.js.map
\ No newline at end of file
diff --git a/node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/index.js b/node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/index.js
deleted file mode 100644
index f33586a8ab0ec..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/index.js
+++ /dev/null
@@ -1,340 +0,0 @@
-import assert from 'assert';
-import { Buffer } from 'buffer';
-import { Minipass } from 'minipass';
-import * as realZlib from 'zlib';
-import { constants } from './constants.js';
-export { constants } from './constants.js';
-const OriginalBufferConcat = Buffer.concat;
-const desc = Object.getOwnPropertyDescriptor(Buffer, 'concat');
-const noop = (args) => args;
-const passthroughBufferConcat = desc?.writable === true || desc?.set !== undefined
-    ? (makeNoOp) => {
-        Buffer.concat = makeNoOp ? noop : OriginalBufferConcat;
-    }
-    : (_) => { };
-const _superWrite = Symbol('_superWrite');
-export class ZlibError extends Error {
-    code;
-    errno;
-    constructor(err) {
-        super('zlib: ' + err.message);
-        this.code = err.code;
-        this.errno = err.errno;
-        /* c8 ignore next */
-        if (!this.code)
-            this.code = 'ZLIB_ERROR';
-        this.message = 'zlib: ' + err.message;
-        Error.captureStackTrace(this, this.constructor);
-    }
-    get name() {
-        return 'ZlibError';
-    }
-}
-// the Zlib class they all inherit from
-// This thing manages the queue of requests, and returns
-// true or false if there is anything in the queue when
-// you call the .write() method.
-const _flushFlag = Symbol('flushFlag');
-class ZlibBase extends Minipass {
-    #sawError = false;
-    #ended = false;
-    #flushFlag;
-    #finishFlushFlag;
-    #fullFlushFlag;
-    #handle;
-    #onError;
-    get sawError() {
-        return this.#sawError;
-    }
-    get handle() {
-        return this.#handle;
-    }
-    /* c8 ignore start */
-    get flushFlag() {
-        return this.#flushFlag;
-    }
-    /* c8 ignore stop */
-    constructor(opts, mode) {
-        if (!opts || typeof opts !== 'object')
-            throw new TypeError('invalid options for ZlibBase constructor');
-        //@ts-ignore
-        super(opts);
-        /* c8 ignore start */
-        this.#flushFlag = opts.flush ?? 0;
-        this.#finishFlushFlag = opts.finishFlush ?? 0;
-        this.#fullFlushFlag = opts.fullFlushFlag ?? 0;
-        /* c8 ignore stop */
-        // this will throw if any options are invalid for the class selected
-        try {
-            // @types/node doesn't know that it exports the classes, but they're there
-            //@ts-ignore
-            this.#handle = new realZlib[mode](opts);
-        }
-        catch (er) {
-            // make sure that all errors get decorated properly
-            throw new ZlibError(er);
-        }
-        this.#onError = err => {
-            // no sense raising multiple errors, since we abort on the first one.
-            if (this.#sawError)
-                return;
-            this.#sawError = true;
-            // there is no way to cleanly recover.
-            // continuing only obscures problems.
-            this.close();
-            this.emit('error', err);
-        };
-        this.#handle?.on('error', er => this.#onError(new ZlibError(er)));
-        this.once('end', () => this.close);
-    }
-    close() {
-        if (this.#handle) {
-            this.#handle.close();
-            this.#handle = undefined;
-            this.emit('close');
-        }
-    }
-    reset() {
-        if (!this.#sawError) {
-            assert(this.#handle, 'zlib binding closed');
-            //@ts-ignore
-            return this.#handle.reset?.();
-        }
-    }
-    flush(flushFlag) {
-        if (this.ended)
-            return;
-        if (typeof flushFlag !== 'number')
-            flushFlag = this.#fullFlushFlag;
-        this.write(Object.assign(Buffer.alloc(0), { [_flushFlag]: flushFlag }));
-    }
-    end(chunk, encoding, cb) {
-        /* c8 ignore start */
-        if (typeof chunk === 'function') {
-            cb = chunk;
-            encoding = undefined;
-            chunk = undefined;
-        }
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        /* c8 ignore stop */
-        if (chunk) {
-            if (encoding)
-                this.write(chunk, encoding);
-            else
-                this.write(chunk);
-        }
-        this.flush(this.#finishFlushFlag);
-        this.#ended = true;
-        return super.end(cb);
-    }
-    get ended() {
-        return this.#ended;
-    }
-    // overridden in the gzip classes to do portable writes
-    [_superWrite](data) {
-        return super.write(data);
-    }
-    write(chunk, encoding, cb) {
-        // process the chunk using the sync process
-        // then super.write() all the outputted chunks
-        if (typeof encoding === 'function')
-            (cb = encoding), (encoding = 'utf8');
-        if (typeof chunk === 'string')
-            chunk = Buffer.from(chunk, encoding);
-        if (this.#sawError)
-            return;
-        assert(this.#handle, 'zlib binding closed');
-        // _processChunk tries to .close() the native handle after it's done, so we
-        // intercept that by temporarily making it a no-op.
-        // diving into the node:zlib internals a bit here
-        const nativeHandle = this.#handle
-            ._handle;
-        const originalNativeClose = nativeHandle.close;
-        nativeHandle.close = () => { };
-        const originalClose = this.#handle.close;
-        this.#handle.close = () => { };
-        // It also calls `Buffer.concat()` at the end, which may be convenient
-        // for some, but which we are not interested in as it slows us down.
-        passthroughBufferConcat(true);
-        let result = undefined;
-        try {
-            const flushFlag = typeof chunk[_flushFlag] === 'number'
-                ? chunk[_flushFlag]
-                : this.#flushFlag;
-            result = this.#handle._processChunk(chunk, flushFlag);
-            // if we don't throw, reset it back how it was
-            passthroughBufferConcat(false);
-        }
-        catch (err) {
-            // or if we do, put Buffer.concat() back before we emit error
-            // Error events call into user code, which may call Buffer.concat()
-            passthroughBufferConcat(false);
-            this.#onError(new ZlibError(err));
-        }
-        finally {
-            if (this.#handle) {
-                // Core zlib resets `_handle` to null after attempting to close the
-                // native handle. Our no-op handler prevented actual closure, but we
-                // need to restore the `._handle` property.
-                ;
-                this.#handle._handle =
-                    nativeHandle;
-                nativeHandle.close = originalNativeClose;
-                this.#handle.close = originalClose;
-                // `_processChunk()` adds an 'error' listener. If we don't remove it
-                // after each call, these handlers start piling up.
-                this.#handle.removeAllListeners('error');
-                // make sure OUR error listener is still attached tho
-            }
-        }
-        if (this.#handle)
-            this.#handle.on('error', er => this.#onError(new ZlibError(er)));
-        let writeReturn;
-        if (result) {
-            if (Array.isArray(result) && result.length > 0) {
-                const r = result[0];
-                // The first buffer is always `handle._outBuffer`, which would be
-                // re-used for later invocations; so, we always have to copy that one.
-                writeReturn = this[_superWrite](Buffer.from(r));
-                for (let i = 1; i < result.length; i++) {
-                    writeReturn = this[_superWrite](result[i]);
-                }
-            }
-            else {
-                // either a single Buffer or an empty array
-                writeReturn = this[_superWrite](Buffer.from(result));
-            }
-        }
-        if (cb)
-            cb();
-        return writeReturn;
-    }
-}
-export class Zlib extends ZlibBase {
-    #level;
-    #strategy;
-    constructor(opts, mode) {
-        opts = opts || {};
-        opts.flush = opts.flush || constants.Z_NO_FLUSH;
-        opts.finishFlush = opts.finishFlush || constants.Z_FINISH;
-        opts.fullFlushFlag = constants.Z_FULL_FLUSH;
-        super(opts, mode);
-        this.#level = opts.level;
-        this.#strategy = opts.strategy;
-    }
-    params(level, strategy) {
-        if (this.sawError)
-            return;
-        if (!this.handle)
-            throw new Error('cannot switch params when binding is closed');
-        // no way to test this without also not supporting params at all
-        /* c8 ignore start */
-        if (!this.handle.params)
-            throw new Error('not supported in this implementation');
-        /* c8 ignore stop */
-        if (this.#level !== level || this.#strategy !== strategy) {
-            this.flush(constants.Z_SYNC_FLUSH);
-            assert(this.handle, 'zlib binding closed');
-            // .params() calls .flush(), but the latter is always async in the
-            // core zlib. We override .flush() temporarily to intercept that and
-            // flush synchronously.
-            const origFlush = this.handle.flush;
-            this.handle.flush = (flushFlag, cb) => {
-                /* c8 ignore start */
-                if (typeof flushFlag === 'function') {
-                    cb = flushFlag;
-                    flushFlag = this.flushFlag;
-                }
-                /* c8 ignore stop */
-                this.flush(flushFlag);
-                cb?.();
-            };
-            try {
-                ;
-                this.handle.params(level, strategy);
-            }
-            finally {
-                this.handle.flush = origFlush;
-            }
-            /* c8 ignore start */
-            if (this.handle) {
-                this.#level = level;
-                this.#strategy = strategy;
-            }
-            /* c8 ignore stop */
-        }
-    }
-}
-// minimal 2-byte header
-export class Deflate extends Zlib {
-    constructor(opts) {
-        super(opts, 'Deflate');
-    }
-}
-export class Inflate extends Zlib {
-    constructor(opts) {
-        super(opts, 'Inflate');
-    }
-}
-export class Gzip extends Zlib {
-    #portable;
-    constructor(opts) {
-        super(opts, 'Gzip');
-        this.#portable = opts && !!opts.portable;
-    }
-    [_superWrite](data) {
-        if (!this.#portable)
-            return super[_superWrite](data);
-        // we'll always get the header emitted in one first chunk
-        // overwrite the OS indicator byte with 0xFF
-        this.#portable = false;
-        data[9] = 255;
-        return super[_superWrite](data);
-    }
-}
-export class Gunzip extends Zlib {
-    constructor(opts) {
-        super(opts, 'Gunzip');
-    }
-}
-// raw - no header
-export class DeflateRaw extends Zlib {
-    constructor(opts) {
-        super(opts, 'DeflateRaw');
-    }
-}
-export class InflateRaw extends Zlib {
-    constructor(opts) {
-        super(opts, 'InflateRaw');
-    }
-}
-// auto-detect header.
-export class Unzip extends Zlib {
-    constructor(opts) {
-        super(opts, 'Unzip');
-    }
-}
-export class Brotli extends ZlibBase {
-    constructor(opts, mode) {
-        opts = opts || {};
-        opts.flush = opts.flush || constants.BROTLI_OPERATION_PROCESS;
-        opts.finishFlush =
-            opts.finishFlush || constants.BROTLI_OPERATION_FINISH;
-        opts.fullFlushFlag = constants.BROTLI_OPERATION_FLUSH;
-        super(opts, mode);
-    }
-}
-export class BrotliCompress extends Brotli {
-    constructor(opts) {
-        super(opts, 'BrotliCompress');
-    }
-}
-export class BrotliDecompress extends Brotli {
-    constructor(opts) {
-        super(opts, 'BrotliDecompress');
-    }
-}
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/package.json b/node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/npm-registry-fetch/node_modules/minizlib/package.json b/node_modules/npm-registry-fetch/node_modules/minizlib/package.json
deleted file mode 100644
index 43cb855e15a5d..0000000000000
--- a/node_modules/npm-registry-fetch/node_modules/minizlib/package.json
+++ /dev/null
@@ -1,80 +0,0 @@
-{
-  "name": "minizlib",
-  "version": "3.0.2",
-  "description": "A small fast zlib stream built on [minipass](http://npm.im/minipass) and Node.js's zlib binding.",
-  "main": "./dist/commonjs/index.js",
-  "dependencies": {
-    "minipass": "^7.1.2"
-  },
-  "scripts": {
-    "prepare": "tshy",
-    "pretest": "npm run prepare",
-    "test": "tap",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "format": "prettier --write . --loglevel warn",
-    "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/isaacs/minizlib.git"
-  },
-  "keywords": [
-    "zlib",
-    "gzip",
-    "gunzip",
-    "deflate",
-    "inflate",
-    "compression",
-    "zip",
-    "unzip"
-  ],
-  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
-  "license": "MIT",
-  "devDependencies": {
-    "@types/node": "^22.13.14",
-    "tap": "^21.1.0",
-    "tshy": "^3.0.2",
-    "typedoc": "^0.28.1"
-  },
-  "files": [
-    "dist"
-  ],
-  "engines": {
-    "node": ">= 18"
-  },
-  "tshy": {
-    "exports": {
-      "./package.json": "./package.json",
-      ".": "./src/index.ts"
-    }
-  },
-  "exports": {
-    "./package.json": "./package.json",
-    ".": {
-      "import": {
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.js"
-      },
-      "require": {
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.js"
-      }
-    }
-  },
-  "types": "./dist/commonjs/index.d.ts",
-  "type": "module",
-  "prettier": {
-    "semi": false,
-    "printWidth": 75,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "module": "./dist/esm/index.js"
-}
diff --git a/node_modules/pacote/node_modules/minizlib/LICENSE b/node_modules/pacote/node_modules/minizlib/LICENSE
deleted file mode 100644
index 49f7efe431c9e..0000000000000
--- a/node_modules/pacote/node_modules/minizlib/LICENSE
+++ /dev/null
@@ -1,26 +0,0 @@
-Minizlib was created by Isaac Z. Schlueter.
-It is a derivative work of the Node.js project.
-
-"""
-Copyright (c) 2017-2023 Isaac Z. Schlueter and Contributors
-Copyright (c) 2017-2023 Node.js contributors. All rights reserved.
-Copyright (c) 2017-2023 Joyent, Inc. and other Node contributors. All rights reserved.
-
-Permission is hereby granted, free of charge, to any person obtaining a
-copy of this software and associated documentation files (the "Software"),
-to deal in the Software without restriction, including without limitation
-the rights to use, copy, modify, merge, publish, distribute, sublicense,
-and/or sell copies of the Software, and to permit persons to whom the
-Software is furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-"""
diff --git a/node_modules/pacote/node_modules/minizlib/dist/commonjs/constants.js b/node_modules/pacote/node_modules/minizlib/dist/commonjs/constants.js
deleted file mode 100644
index dfc2c1957bfc9..0000000000000
--- a/node_modules/pacote/node_modules/minizlib/dist/commonjs/constants.js
+++ /dev/null
@@ -1,123 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.constants = void 0;
-// Update with any zlib constants that are added or changed in the future.
-// Node v6 didn't export this, so we just hard code the version and rely
-// on all the other hard-coded values from zlib v4736.  When node v6
-// support drops, we can just export the realZlibConstants object.
-const zlib_1 = __importDefault(require("zlib"));
-/* c8 ignore start */
-const realZlibConstants = zlib_1.default.constants || { ZLIB_VERNUM: 4736 };
-/* c8 ignore stop */
-exports.constants = Object.freeze(Object.assign(Object.create(null), {
-    Z_NO_FLUSH: 0,
-    Z_PARTIAL_FLUSH: 1,
-    Z_SYNC_FLUSH: 2,
-    Z_FULL_FLUSH: 3,
-    Z_FINISH: 4,
-    Z_BLOCK: 5,
-    Z_OK: 0,
-    Z_STREAM_END: 1,
-    Z_NEED_DICT: 2,
-    Z_ERRNO: -1,
-    Z_STREAM_ERROR: -2,
-    Z_DATA_ERROR: -3,
-    Z_MEM_ERROR: -4,
-    Z_BUF_ERROR: -5,
-    Z_VERSION_ERROR: -6,
-    Z_NO_COMPRESSION: 0,
-    Z_BEST_SPEED: 1,
-    Z_BEST_COMPRESSION: 9,
-    Z_DEFAULT_COMPRESSION: -1,
-    Z_FILTERED: 1,
-    Z_HUFFMAN_ONLY: 2,
-    Z_RLE: 3,
-    Z_FIXED: 4,
-    Z_DEFAULT_STRATEGY: 0,
-    DEFLATE: 1,
-    INFLATE: 2,
-    GZIP: 3,
-    GUNZIP: 4,
-    DEFLATERAW: 5,
-    INFLATERAW: 6,
-    UNZIP: 7,
-    BROTLI_DECODE: 8,
-    BROTLI_ENCODE: 9,
-    Z_MIN_WINDOWBITS: 8,
-    Z_MAX_WINDOWBITS: 15,
-    Z_DEFAULT_WINDOWBITS: 15,
-    Z_MIN_CHUNK: 64,
-    Z_MAX_CHUNK: Infinity,
-    Z_DEFAULT_CHUNK: 16384,
-    Z_MIN_MEMLEVEL: 1,
-    Z_MAX_MEMLEVEL: 9,
-    Z_DEFAULT_MEMLEVEL: 8,
-    Z_MIN_LEVEL: -1,
-    Z_MAX_LEVEL: 9,
-    Z_DEFAULT_LEVEL: -1,
-    BROTLI_OPERATION_PROCESS: 0,
-    BROTLI_OPERATION_FLUSH: 1,
-    BROTLI_OPERATION_FINISH: 2,
-    BROTLI_OPERATION_EMIT_METADATA: 3,
-    BROTLI_MODE_GENERIC: 0,
-    BROTLI_MODE_TEXT: 1,
-    BROTLI_MODE_FONT: 2,
-    BROTLI_DEFAULT_MODE: 0,
-    BROTLI_MIN_QUALITY: 0,
-    BROTLI_MAX_QUALITY: 11,
-    BROTLI_DEFAULT_QUALITY: 11,
-    BROTLI_MIN_WINDOW_BITS: 10,
-    BROTLI_MAX_WINDOW_BITS: 24,
-    BROTLI_LARGE_MAX_WINDOW_BITS: 30,
-    BROTLI_DEFAULT_WINDOW: 22,
-    BROTLI_MIN_INPUT_BLOCK_BITS: 16,
-    BROTLI_MAX_INPUT_BLOCK_BITS: 24,
-    BROTLI_PARAM_MODE: 0,
-    BROTLI_PARAM_QUALITY: 1,
-    BROTLI_PARAM_LGWIN: 2,
-    BROTLI_PARAM_LGBLOCK: 3,
-    BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4,
-    BROTLI_PARAM_SIZE_HINT: 5,
-    BROTLI_PARAM_LARGE_WINDOW: 6,
-    BROTLI_PARAM_NPOSTFIX: 7,
-    BROTLI_PARAM_NDIRECT: 8,
-    BROTLI_DECODER_RESULT_ERROR: 0,
-    BROTLI_DECODER_RESULT_SUCCESS: 1,
-    BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2,
-    BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3,
-    BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0,
-    BROTLI_DECODER_PARAM_LARGE_WINDOW: 1,
-    BROTLI_DECODER_NO_ERROR: 0,
-    BROTLI_DECODER_SUCCESS: 1,
-    BROTLI_DECODER_NEEDS_MORE_INPUT: 2,
-    BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3,
-    BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1,
-    BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2,
-    BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3,
-    BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4,
-    BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5,
-    BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6,
-    BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7,
-    BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8,
-    BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9,
-    BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10,
-    BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11,
-    BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12,
-    BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13,
-    BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14,
-    BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15,
-    BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16,
-    BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19,
-    BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20,
-    BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21,
-    BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22,
-    BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25,
-    BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26,
-    BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27,
-    BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30,
-    BROTLI_DECODER_ERROR_UNREACHABLE: -31,
-}, realZlibConstants));
-//# sourceMappingURL=constants.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/minizlib/dist/commonjs/index.js b/node_modules/pacote/node_modules/minizlib/dist/commonjs/index.js
deleted file mode 100644
index b4906d2783372..0000000000000
--- a/node_modules/pacote/node_modules/minizlib/dist/commonjs/index.js
+++ /dev/null
@@ -1,392 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || (function () {
-    var ownKeys = function(o) {
-        ownKeys = Object.getOwnPropertyNames || function (o) {
-            var ar = [];
-            for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
-            return ar;
-        };
-        return ownKeys(o);
-    };
-    return function (mod) {
-        if (mod && mod.__esModule) return mod;
-        var result = {};
-        if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
-        __setModuleDefault(result, mod);
-        return result;
-    };
-})();
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.BrotliDecompress = exports.BrotliCompress = exports.Brotli = exports.Unzip = exports.InflateRaw = exports.DeflateRaw = exports.Gunzip = exports.Gzip = exports.Inflate = exports.Deflate = exports.Zlib = exports.ZlibError = exports.constants = void 0;
-const assert_1 = __importDefault(require("assert"));
-const buffer_1 = require("buffer");
-const minipass_1 = require("minipass");
-const realZlib = __importStar(require("zlib"));
-const constants_js_1 = require("./constants.js");
-var constants_js_2 = require("./constants.js");
-Object.defineProperty(exports, "constants", { enumerable: true, get: function () { return constants_js_2.constants; } });
-const OriginalBufferConcat = buffer_1.Buffer.concat;
-const desc = Object.getOwnPropertyDescriptor(buffer_1.Buffer, 'concat');
-const noop = (args) => args;
-const passthroughBufferConcat = desc?.writable === true || desc?.set !== undefined
-    ? (makeNoOp) => {
-        buffer_1.Buffer.concat = makeNoOp ? noop : OriginalBufferConcat;
-    }
-    : (_) => { };
-const _superWrite = Symbol('_superWrite');
-class ZlibError extends Error {
-    code;
-    errno;
-    constructor(err) {
-        super('zlib: ' + err.message);
-        this.code = err.code;
-        this.errno = err.errno;
-        /* c8 ignore next */
-        if (!this.code)
-            this.code = 'ZLIB_ERROR';
-        this.message = 'zlib: ' + err.message;
-        Error.captureStackTrace(this, this.constructor);
-    }
-    get name() {
-        return 'ZlibError';
-    }
-}
-exports.ZlibError = ZlibError;
-// the Zlib class they all inherit from
-// This thing manages the queue of requests, and returns
-// true or false if there is anything in the queue when
-// you call the .write() method.
-const _flushFlag = Symbol('flushFlag');
-class ZlibBase extends minipass_1.Minipass {
-    #sawError = false;
-    #ended = false;
-    #flushFlag;
-    #finishFlushFlag;
-    #fullFlushFlag;
-    #handle;
-    #onError;
-    get sawError() {
-        return this.#sawError;
-    }
-    get handle() {
-        return this.#handle;
-    }
-    /* c8 ignore start */
-    get flushFlag() {
-        return this.#flushFlag;
-    }
-    /* c8 ignore stop */
-    constructor(opts, mode) {
-        if (!opts || typeof opts !== 'object')
-            throw new TypeError('invalid options for ZlibBase constructor');
-        //@ts-ignore
-        super(opts);
-        /* c8 ignore start */
-        this.#flushFlag = opts.flush ?? 0;
-        this.#finishFlushFlag = opts.finishFlush ?? 0;
-        this.#fullFlushFlag = opts.fullFlushFlag ?? 0;
-        /* c8 ignore stop */
-        // this will throw if any options are invalid for the class selected
-        try {
-            // @types/node doesn't know that it exports the classes, but they're there
-            //@ts-ignore
-            this.#handle = new realZlib[mode](opts);
-        }
-        catch (er) {
-            // make sure that all errors get decorated properly
-            throw new ZlibError(er);
-        }
-        this.#onError = err => {
-            // no sense raising multiple errors, since we abort on the first one.
-            if (this.#sawError)
-                return;
-            this.#sawError = true;
-            // there is no way to cleanly recover.
-            // continuing only obscures problems.
-            this.close();
-            this.emit('error', err);
-        };
-        this.#handle?.on('error', er => this.#onError(new ZlibError(er)));
-        this.once('end', () => this.close);
-    }
-    close() {
-        if (this.#handle) {
-            this.#handle.close();
-            this.#handle = undefined;
-            this.emit('close');
-        }
-    }
-    reset() {
-        if (!this.#sawError) {
-            (0, assert_1.default)(this.#handle, 'zlib binding closed');
-            //@ts-ignore
-            return this.#handle.reset?.();
-        }
-    }
-    flush(flushFlag) {
-        if (this.ended)
-            return;
-        if (typeof flushFlag !== 'number')
-            flushFlag = this.#fullFlushFlag;
-        this.write(Object.assign(buffer_1.Buffer.alloc(0), { [_flushFlag]: flushFlag }));
-    }
-    end(chunk, encoding, cb) {
-        /* c8 ignore start */
-        if (typeof chunk === 'function') {
-            cb = chunk;
-            encoding = undefined;
-            chunk = undefined;
-        }
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        /* c8 ignore stop */
-        if (chunk) {
-            if (encoding)
-                this.write(chunk, encoding);
-            else
-                this.write(chunk);
-        }
-        this.flush(this.#finishFlushFlag);
-        this.#ended = true;
-        return super.end(cb);
-    }
-    get ended() {
-        return this.#ended;
-    }
-    // overridden in the gzip classes to do portable writes
-    [_superWrite](data) {
-        return super.write(data);
-    }
-    write(chunk, encoding, cb) {
-        // process the chunk using the sync process
-        // then super.write() all the outputted chunks
-        if (typeof encoding === 'function')
-            (cb = encoding), (encoding = 'utf8');
-        if (typeof chunk === 'string')
-            chunk = buffer_1.Buffer.from(chunk, encoding);
-        if (this.#sawError)
-            return;
-        (0, assert_1.default)(this.#handle, 'zlib binding closed');
-        // _processChunk tries to .close() the native handle after it's done, so we
-        // intercept that by temporarily making it a no-op.
-        // diving into the node:zlib internals a bit here
-        const nativeHandle = this.#handle
-            ._handle;
-        const originalNativeClose = nativeHandle.close;
-        nativeHandle.close = () => { };
-        const originalClose = this.#handle.close;
-        this.#handle.close = () => { };
-        // It also calls `Buffer.concat()` at the end, which may be convenient
-        // for some, but which we are not interested in as it slows us down.
-        passthroughBufferConcat(true);
-        let result = undefined;
-        try {
-            const flushFlag = typeof chunk[_flushFlag] === 'number'
-                ? chunk[_flushFlag]
-                : this.#flushFlag;
-            result = this.#handle._processChunk(chunk, flushFlag);
-            // if we don't throw, reset it back how it was
-            passthroughBufferConcat(false);
-        }
-        catch (err) {
-            // or if we do, put Buffer.concat() back before we emit error
-            // Error events call into user code, which may call Buffer.concat()
-            passthroughBufferConcat(false);
-            this.#onError(new ZlibError(err));
-        }
-        finally {
-            if (this.#handle) {
-                // Core zlib resets `_handle` to null after attempting to close the
-                // native handle. Our no-op handler prevented actual closure, but we
-                // need to restore the `._handle` property.
-                ;
-                this.#handle._handle =
-                    nativeHandle;
-                nativeHandle.close = originalNativeClose;
-                this.#handle.close = originalClose;
-                // `_processChunk()` adds an 'error' listener. If we don't remove it
-                // after each call, these handlers start piling up.
-                this.#handle.removeAllListeners('error');
-                // make sure OUR error listener is still attached tho
-            }
-        }
-        if (this.#handle)
-            this.#handle.on('error', er => this.#onError(new ZlibError(er)));
-        let writeReturn;
-        if (result) {
-            if (Array.isArray(result) && result.length > 0) {
-                const r = result[0];
-                // The first buffer is always `handle._outBuffer`, which would be
-                // re-used for later invocations; so, we always have to copy that one.
-                writeReturn = this[_superWrite](buffer_1.Buffer.from(r));
-                for (let i = 1; i < result.length; i++) {
-                    writeReturn = this[_superWrite](result[i]);
-                }
-            }
-            else {
-                // either a single Buffer or an empty array
-                writeReturn = this[_superWrite](buffer_1.Buffer.from(result));
-            }
-        }
-        if (cb)
-            cb();
-        return writeReturn;
-    }
-}
-class Zlib extends ZlibBase {
-    #level;
-    #strategy;
-    constructor(opts, mode) {
-        opts = opts || {};
-        opts.flush = opts.flush || constants_js_1.constants.Z_NO_FLUSH;
-        opts.finishFlush = opts.finishFlush || constants_js_1.constants.Z_FINISH;
-        opts.fullFlushFlag = constants_js_1.constants.Z_FULL_FLUSH;
-        super(opts, mode);
-        this.#level = opts.level;
-        this.#strategy = opts.strategy;
-    }
-    params(level, strategy) {
-        if (this.sawError)
-            return;
-        if (!this.handle)
-            throw new Error('cannot switch params when binding is closed');
-        // no way to test this without also not supporting params at all
-        /* c8 ignore start */
-        if (!this.handle.params)
-            throw new Error('not supported in this implementation');
-        /* c8 ignore stop */
-        if (this.#level !== level || this.#strategy !== strategy) {
-            this.flush(constants_js_1.constants.Z_SYNC_FLUSH);
-            (0, assert_1.default)(this.handle, 'zlib binding closed');
-            // .params() calls .flush(), but the latter is always async in the
-            // core zlib. We override .flush() temporarily to intercept that and
-            // flush synchronously.
-            const origFlush = this.handle.flush;
-            this.handle.flush = (flushFlag, cb) => {
-                /* c8 ignore start */
-                if (typeof flushFlag === 'function') {
-                    cb = flushFlag;
-                    flushFlag = this.flushFlag;
-                }
-                /* c8 ignore stop */
-                this.flush(flushFlag);
-                cb?.();
-            };
-            try {
-                ;
-                this.handle.params(level, strategy);
-            }
-            finally {
-                this.handle.flush = origFlush;
-            }
-            /* c8 ignore start */
-            if (this.handle) {
-                this.#level = level;
-                this.#strategy = strategy;
-            }
-            /* c8 ignore stop */
-        }
-    }
-}
-exports.Zlib = Zlib;
-// minimal 2-byte header
-class Deflate extends Zlib {
-    constructor(opts) {
-        super(opts, 'Deflate');
-    }
-}
-exports.Deflate = Deflate;
-class Inflate extends Zlib {
-    constructor(opts) {
-        super(opts, 'Inflate');
-    }
-}
-exports.Inflate = Inflate;
-class Gzip extends Zlib {
-    #portable;
-    constructor(opts) {
-        super(opts, 'Gzip');
-        this.#portable = opts && !!opts.portable;
-    }
-    [_superWrite](data) {
-        if (!this.#portable)
-            return super[_superWrite](data);
-        // we'll always get the header emitted in one first chunk
-        // overwrite the OS indicator byte with 0xFF
-        this.#portable = false;
-        data[9] = 255;
-        return super[_superWrite](data);
-    }
-}
-exports.Gzip = Gzip;
-class Gunzip extends Zlib {
-    constructor(opts) {
-        super(opts, 'Gunzip');
-    }
-}
-exports.Gunzip = Gunzip;
-// raw - no header
-class DeflateRaw extends Zlib {
-    constructor(opts) {
-        super(opts, 'DeflateRaw');
-    }
-}
-exports.DeflateRaw = DeflateRaw;
-class InflateRaw extends Zlib {
-    constructor(opts) {
-        super(opts, 'InflateRaw');
-    }
-}
-exports.InflateRaw = InflateRaw;
-// auto-detect header.
-class Unzip extends Zlib {
-    constructor(opts) {
-        super(opts, 'Unzip');
-    }
-}
-exports.Unzip = Unzip;
-class Brotli extends ZlibBase {
-    constructor(opts, mode) {
-        opts = opts || {};
-        opts.flush = opts.flush || constants_js_1.constants.BROTLI_OPERATION_PROCESS;
-        opts.finishFlush =
-            opts.finishFlush || constants_js_1.constants.BROTLI_OPERATION_FINISH;
-        opts.fullFlushFlag = constants_js_1.constants.BROTLI_OPERATION_FLUSH;
-        super(opts, mode);
-    }
-}
-exports.Brotli = Brotli;
-class BrotliCompress extends Brotli {
-    constructor(opts) {
-        super(opts, 'BrotliCompress');
-    }
-}
-exports.BrotliCompress = BrotliCompress;
-class BrotliDecompress extends Brotli {
-    constructor(opts) {
-        super(opts, 'BrotliDecompress');
-    }
-}
-exports.BrotliDecompress = BrotliDecompress;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/minizlib/dist/commonjs/package.json b/node_modules/pacote/node_modules/minizlib/dist/commonjs/package.json
deleted file mode 100644
index 5bbefffbabee3..0000000000000
--- a/node_modules/pacote/node_modules/minizlib/dist/commonjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "commonjs"
-}
diff --git a/node_modules/pacote/node_modules/minizlib/dist/esm/constants.js b/node_modules/pacote/node_modules/minizlib/dist/esm/constants.js
deleted file mode 100644
index 7faf40be5068d..0000000000000
--- a/node_modules/pacote/node_modules/minizlib/dist/esm/constants.js
+++ /dev/null
@@ -1,117 +0,0 @@
-// Update with any zlib constants that are added or changed in the future.
-// Node v6 didn't export this, so we just hard code the version and rely
-// on all the other hard-coded values from zlib v4736.  When node v6
-// support drops, we can just export the realZlibConstants object.
-import realZlib from 'zlib';
-/* c8 ignore start */
-const realZlibConstants = realZlib.constants || { ZLIB_VERNUM: 4736 };
-/* c8 ignore stop */
-export const constants = Object.freeze(Object.assign(Object.create(null), {
-    Z_NO_FLUSH: 0,
-    Z_PARTIAL_FLUSH: 1,
-    Z_SYNC_FLUSH: 2,
-    Z_FULL_FLUSH: 3,
-    Z_FINISH: 4,
-    Z_BLOCK: 5,
-    Z_OK: 0,
-    Z_STREAM_END: 1,
-    Z_NEED_DICT: 2,
-    Z_ERRNO: -1,
-    Z_STREAM_ERROR: -2,
-    Z_DATA_ERROR: -3,
-    Z_MEM_ERROR: -4,
-    Z_BUF_ERROR: -5,
-    Z_VERSION_ERROR: -6,
-    Z_NO_COMPRESSION: 0,
-    Z_BEST_SPEED: 1,
-    Z_BEST_COMPRESSION: 9,
-    Z_DEFAULT_COMPRESSION: -1,
-    Z_FILTERED: 1,
-    Z_HUFFMAN_ONLY: 2,
-    Z_RLE: 3,
-    Z_FIXED: 4,
-    Z_DEFAULT_STRATEGY: 0,
-    DEFLATE: 1,
-    INFLATE: 2,
-    GZIP: 3,
-    GUNZIP: 4,
-    DEFLATERAW: 5,
-    INFLATERAW: 6,
-    UNZIP: 7,
-    BROTLI_DECODE: 8,
-    BROTLI_ENCODE: 9,
-    Z_MIN_WINDOWBITS: 8,
-    Z_MAX_WINDOWBITS: 15,
-    Z_DEFAULT_WINDOWBITS: 15,
-    Z_MIN_CHUNK: 64,
-    Z_MAX_CHUNK: Infinity,
-    Z_DEFAULT_CHUNK: 16384,
-    Z_MIN_MEMLEVEL: 1,
-    Z_MAX_MEMLEVEL: 9,
-    Z_DEFAULT_MEMLEVEL: 8,
-    Z_MIN_LEVEL: -1,
-    Z_MAX_LEVEL: 9,
-    Z_DEFAULT_LEVEL: -1,
-    BROTLI_OPERATION_PROCESS: 0,
-    BROTLI_OPERATION_FLUSH: 1,
-    BROTLI_OPERATION_FINISH: 2,
-    BROTLI_OPERATION_EMIT_METADATA: 3,
-    BROTLI_MODE_GENERIC: 0,
-    BROTLI_MODE_TEXT: 1,
-    BROTLI_MODE_FONT: 2,
-    BROTLI_DEFAULT_MODE: 0,
-    BROTLI_MIN_QUALITY: 0,
-    BROTLI_MAX_QUALITY: 11,
-    BROTLI_DEFAULT_QUALITY: 11,
-    BROTLI_MIN_WINDOW_BITS: 10,
-    BROTLI_MAX_WINDOW_BITS: 24,
-    BROTLI_LARGE_MAX_WINDOW_BITS: 30,
-    BROTLI_DEFAULT_WINDOW: 22,
-    BROTLI_MIN_INPUT_BLOCK_BITS: 16,
-    BROTLI_MAX_INPUT_BLOCK_BITS: 24,
-    BROTLI_PARAM_MODE: 0,
-    BROTLI_PARAM_QUALITY: 1,
-    BROTLI_PARAM_LGWIN: 2,
-    BROTLI_PARAM_LGBLOCK: 3,
-    BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4,
-    BROTLI_PARAM_SIZE_HINT: 5,
-    BROTLI_PARAM_LARGE_WINDOW: 6,
-    BROTLI_PARAM_NPOSTFIX: 7,
-    BROTLI_PARAM_NDIRECT: 8,
-    BROTLI_DECODER_RESULT_ERROR: 0,
-    BROTLI_DECODER_RESULT_SUCCESS: 1,
-    BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2,
-    BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3,
-    BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0,
-    BROTLI_DECODER_PARAM_LARGE_WINDOW: 1,
-    BROTLI_DECODER_NO_ERROR: 0,
-    BROTLI_DECODER_SUCCESS: 1,
-    BROTLI_DECODER_NEEDS_MORE_INPUT: 2,
-    BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3,
-    BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1,
-    BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2,
-    BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3,
-    BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4,
-    BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5,
-    BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6,
-    BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7,
-    BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8,
-    BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9,
-    BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10,
-    BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11,
-    BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12,
-    BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13,
-    BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14,
-    BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15,
-    BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16,
-    BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19,
-    BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20,
-    BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21,
-    BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22,
-    BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25,
-    BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26,
-    BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27,
-    BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30,
-    BROTLI_DECODER_ERROR_UNREACHABLE: -31,
-}, realZlibConstants));
-//# sourceMappingURL=constants.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/minizlib/dist/esm/index.js b/node_modules/pacote/node_modules/minizlib/dist/esm/index.js
deleted file mode 100644
index f33586a8ab0ec..0000000000000
--- a/node_modules/pacote/node_modules/minizlib/dist/esm/index.js
+++ /dev/null
@@ -1,340 +0,0 @@
-import assert from 'assert';
-import { Buffer } from 'buffer';
-import { Minipass } from 'minipass';
-import * as realZlib from 'zlib';
-import { constants } from './constants.js';
-export { constants } from './constants.js';
-const OriginalBufferConcat = Buffer.concat;
-const desc = Object.getOwnPropertyDescriptor(Buffer, 'concat');
-const noop = (args) => args;
-const passthroughBufferConcat = desc?.writable === true || desc?.set !== undefined
-    ? (makeNoOp) => {
-        Buffer.concat = makeNoOp ? noop : OriginalBufferConcat;
-    }
-    : (_) => { };
-const _superWrite = Symbol('_superWrite');
-export class ZlibError extends Error {
-    code;
-    errno;
-    constructor(err) {
-        super('zlib: ' + err.message);
-        this.code = err.code;
-        this.errno = err.errno;
-        /* c8 ignore next */
-        if (!this.code)
-            this.code = 'ZLIB_ERROR';
-        this.message = 'zlib: ' + err.message;
-        Error.captureStackTrace(this, this.constructor);
-    }
-    get name() {
-        return 'ZlibError';
-    }
-}
-// the Zlib class they all inherit from
-// This thing manages the queue of requests, and returns
-// true or false if there is anything in the queue when
-// you call the .write() method.
-const _flushFlag = Symbol('flushFlag');
-class ZlibBase extends Minipass {
-    #sawError = false;
-    #ended = false;
-    #flushFlag;
-    #finishFlushFlag;
-    #fullFlushFlag;
-    #handle;
-    #onError;
-    get sawError() {
-        return this.#sawError;
-    }
-    get handle() {
-        return this.#handle;
-    }
-    /* c8 ignore start */
-    get flushFlag() {
-        return this.#flushFlag;
-    }
-    /* c8 ignore stop */
-    constructor(opts, mode) {
-        if (!opts || typeof opts !== 'object')
-            throw new TypeError('invalid options for ZlibBase constructor');
-        //@ts-ignore
-        super(opts);
-        /* c8 ignore start */
-        this.#flushFlag = opts.flush ?? 0;
-        this.#finishFlushFlag = opts.finishFlush ?? 0;
-        this.#fullFlushFlag = opts.fullFlushFlag ?? 0;
-        /* c8 ignore stop */
-        // this will throw if any options are invalid for the class selected
-        try {
-            // @types/node doesn't know that it exports the classes, but they're there
-            //@ts-ignore
-            this.#handle = new realZlib[mode](opts);
-        }
-        catch (er) {
-            // make sure that all errors get decorated properly
-            throw new ZlibError(er);
-        }
-        this.#onError = err => {
-            // no sense raising multiple errors, since we abort on the first one.
-            if (this.#sawError)
-                return;
-            this.#sawError = true;
-            // there is no way to cleanly recover.
-            // continuing only obscures problems.
-            this.close();
-            this.emit('error', err);
-        };
-        this.#handle?.on('error', er => this.#onError(new ZlibError(er)));
-        this.once('end', () => this.close);
-    }
-    close() {
-        if (this.#handle) {
-            this.#handle.close();
-            this.#handle = undefined;
-            this.emit('close');
-        }
-    }
-    reset() {
-        if (!this.#sawError) {
-            assert(this.#handle, 'zlib binding closed');
-            //@ts-ignore
-            return this.#handle.reset?.();
-        }
-    }
-    flush(flushFlag) {
-        if (this.ended)
-            return;
-        if (typeof flushFlag !== 'number')
-            flushFlag = this.#fullFlushFlag;
-        this.write(Object.assign(Buffer.alloc(0), { [_flushFlag]: flushFlag }));
-    }
-    end(chunk, encoding, cb) {
-        /* c8 ignore start */
-        if (typeof chunk === 'function') {
-            cb = chunk;
-            encoding = undefined;
-            chunk = undefined;
-        }
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        /* c8 ignore stop */
-        if (chunk) {
-            if (encoding)
-                this.write(chunk, encoding);
-            else
-                this.write(chunk);
-        }
-        this.flush(this.#finishFlushFlag);
-        this.#ended = true;
-        return super.end(cb);
-    }
-    get ended() {
-        return this.#ended;
-    }
-    // overridden in the gzip classes to do portable writes
-    [_superWrite](data) {
-        return super.write(data);
-    }
-    write(chunk, encoding, cb) {
-        // process the chunk using the sync process
-        // then super.write() all the outputted chunks
-        if (typeof encoding === 'function')
-            (cb = encoding), (encoding = 'utf8');
-        if (typeof chunk === 'string')
-            chunk = Buffer.from(chunk, encoding);
-        if (this.#sawError)
-            return;
-        assert(this.#handle, 'zlib binding closed');
-        // _processChunk tries to .close() the native handle after it's done, so we
-        // intercept that by temporarily making it a no-op.
-        // diving into the node:zlib internals a bit here
-        const nativeHandle = this.#handle
-            ._handle;
-        const originalNativeClose = nativeHandle.close;
-        nativeHandle.close = () => { };
-        const originalClose = this.#handle.close;
-        this.#handle.close = () => { };
-        // It also calls `Buffer.concat()` at the end, which may be convenient
-        // for some, but which we are not interested in as it slows us down.
-        passthroughBufferConcat(true);
-        let result = undefined;
-        try {
-            const flushFlag = typeof chunk[_flushFlag] === 'number'
-                ? chunk[_flushFlag]
-                : this.#flushFlag;
-            result = this.#handle._processChunk(chunk, flushFlag);
-            // if we don't throw, reset it back how it was
-            passthroughBufferConcat(false);
-        }
-        catch (err) {
-            // or if we do, put Buffer.concat() back before we emit error
-            // Error events call into user code, which may call Buffer.concat()
-            passthroughBufferConcat(false);
-            this.#onError(new ZlibError(err));
-        }
-        finally {
-            if (this.#handle) {
-                // Core zlib resets `_handle` to null after attempting to close the
-                // native handle. Our no-op handler prevented actual closure, but we
-                // need to restore the `._handle` property.
-                ;
-                this.#handle._handle =
-                    nativeHandle;
-                nativeHandle.close = originalNativeClose;
-                this.#handle.close = originalClose;
-                // `_processChunk()` adds an 'error' listener. If we don't remove it
-                // after each call, these handlers start piling up.
-                this.#handle.removeAllListeners('error');
-                // make sure OUR error listener is still attached tho
-            }
-        }
-        if (this.#handle)
-            this.#handle.on('error', er => this.#onError(new ZlibError(er)));
-        let writeReturn;
-        if (result) {
-            if (Array.isArray(result) && result.length > 0) {
-                const r = result[0];
-                // The first buffer is always `handle._outBuffer`, which would be
-                // re-used for later invocations; so, we always have to copy that one.
-                writeReturn = this[_superWrite](Buffer.from(r));
-                for (let i = 1; i < result.length; i++) {
-                    writeReturn = this[_superWrite](result[i]);
-                }
-            }
-            else {
-                // either a single Buffer or an empty array
-                writeReturn = this[_superWrite](Buffer.from(result));
-            }
-        }
-        if (cb)
-            cb();
-        return writeReturn;
-    }
-}
-export class Zlib extends ZlibBase {
-    #level;
-    #strategy;
-    constructor(opts, mode) {
-        opts = opts || {};
-        opts.flush = opts.flush || constants.Z_NO_FLUSH;
-        opts.finishFlush = opts.finishFlush || constants.Z_FINISH;
-        opts.fullFlushFlag = constants.Z_FULL_FLUSH;
-        super(opts, mode);
-        this.#level = opts.level;
-        this.#strategy = opts.strategy;
-    }
-    params(level, strategy) {
-        if (this.sawError)
-            return;
-        if (!this.handle)
-            throw new Error('cannot switch params when binding is closed');
-        // no way to test this without also not supporting params at all
-        /* c8 ignore start */
-        if (!this.handle.params)
-            throw new Error('not supported in this implementation');
-        /* c8 ignore stop */
-        if (this.#level !== level || this.#strategy !== strategy) {
-            this.flush(constants.Z_SYNC_FLUSH);
-            assert(this.handle, 'zlib binding closed');
-            // .params() calls .flush(), but the latter is always async in the
-            // core zlib. We override .flush() temporarily to intercept that and
-            // flush synchronously.
-            const origFlush = this.handle.flush;
-            this.handle.flush = (flushFlag, cb) => {
-                /* c8 ignore start */
-                if (typeof flushFlag === 'function') {
-                    cb = flushFlag;
-                    flushFlag = this.flushFlag;
-                }
-                /* c8 ignore stop */
-                this.flush(flushFlag);
-                cb?.();
-            };
-            try {
-                ;
-                this.handle.params(level, strategy);
-            }
-            finally {
-                this.handle.flush = origFlush;
-            }
-            /* c8 ignore start */
-            if (this.handle) {
-                this.#level = level;
-                this.#strategy = strategy;
-            }
-            /* c8 ignore stop */
-        }
-    }
-}
-// minimal 2-byte header
-export class Deflate extends Zlib {
-    constructor(opts) {
-        super(opts, 'Deflate');
-    }
-}
-export class Inflate extends Zlib {
-    constructor(opts) {
-        super(opts, 'Inflate');
-    }
-}
-export class Gzip extends Zlib {
-    #portable;
-    constructor(opts) {
-        super(opts, 'Gzip');
-        this.#portable = opts && !!opts.portable;
-    }
-    [_superWrite](data) {
-        if (!this.#portable)
-            return super[_superWrite](data);
-        // we'll always get the header emitted in one first chunk
-        // overwrite the OS indicator byte with 0xFF
-        this.#portable = false;
-        data[9] = 255;
-        return super[_superWrite](data);
-    }
-}
-export class Gunzip extends Zlib {
-    constructor(opts) {
-        super(opts, 'Gunzip');
-    }
-}
-// raw - no header
-export class DeflateRaw extends Zlib {
-    constructor(opts) {
-        super(opts, 'DeflateRaw');
-    }
-}
-export class InflateRaw extends Zlib {
-    constructor(opts) {
-        super(opts, 'InflateRaw');
-    }
-}
-// auto-detect header.
-export class Unzip extends Zlib {
-    constructor(opts) {
-        super(opts, 'Unzip');
-    }
-}
-export class Brotli extends ZlibBase {
-    constructor(opts, mode) {
-        opts = opts || {};
-        opts.flush = opts.flush || constants.BROTLI_OPERATION_PROCESS;
-        opts.finishFlush =
-            opts.finishFlush || constants.BROTLI_OPERATION_FINISH;
-        opts.fullFlushFlag = constants.BROTLI_OPERATION_FLUSH;
-        super(opts, mode);
-    }
-}
-export class BrotliCompress extends Brotli {
-    constructor(opts) {
-        super(opts, 'BrotliCompress');
-    }
-}
-export class BrotliDecompress extends Brotli {
-    constructor(opts) {
-        super(opts, 'BrotliDecompress');
-    }
-}
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/minizlib/dist/esm/package.json b/node_modules/pacote/node_modules/minizlib/dist/esm/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/pacote/node_modules/minizlib/dist/esm/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/pacote/node_modules/minizlib/package.json b/node_modules/pacote/node_modules/minizlib/package.json
deleted file mode 100644
index 43cb855e15a5d..0000000000000
--- a/node_modules/pacote/node_modules/minizlib/package.json
+++ /dev/null
@@ -1,80 +0,0 @@
-{
-  "name": "minizlib",
-  "version": "3.0.2",
-  "description": "A small fast zlib stream built on [minipass](http://npm.im/minipass) and Node.js's zlib binding.",
-  "main": "./dist/commonjs/index.js",
-  "dependencies": {
-    "minipass": "^7.1.2"
-  },
-  "scripts": {
-    "prepare": "tshy",
-    "pretest": "npm run prepare",
-    "test": "tap",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "format": "prettier --write . --loglevel warn",
-    "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/isaacs/minizlib.git"
-  },
-  "keywords": [
-    "zlib",
-    "gzip",
-    "gunzip",
-    "deflate",
-    "inflate",
-    "compression",
-    "zip",
-    "unzip"
-  ],
-  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
-  "license": "MIT",
-  "devDependencies": {
-    "@types/node": "^22.13.14",
-    "tap": "^21.1.0",
-    "tshy": "^3.0.2",
-    "typedoc": "^0.28.1"
-  },
-  "files": [
-    "dist"
-  ],
-  "engines": {
-    "node": ">= 18"
-  },
-  "tshy": {
-    "exports": {
-      "./package.json": "./package.json",
-      ".": "./src/index.ts"
-    }
-  },
-  "exports": {
-    "./package.json": "./package.json",
-    ".": {
-      "import": {
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.js"
-      },
-      "require": {
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.js"
-      }
-    }
-  },
-  "types": "./dist/commonjs/index.d.ts",
-  "type": "module",
-  "prettier": {
-    "semi": false,
-    "printWidth": 75,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "module": "./dist/esm/index.js"
-}
diff --git a/package-lock.json b/package-lock.json
index 17b77a34c919d..a3afc342ec939 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -3997,6 +3997,33 @@
         "encoding": "^0.1.13"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/minizlib": {
+      "version": "2.1.2",
+      "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz",
+      "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "minipass": "^3.0.0",
+        "yallist": "^4.0.0"
+      },
+      "engines": {
+        "node": ">= 8"
+      }
+    },
+    "node_modules/@npmcli/template-oss/node_modules/minizlib/node_modules/minipass": {
+      "version": "3.3.6",
+      "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz",
+      "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "yallist": "^4.0.0"
+      },
+      "engines": {
+        "node": ">=8"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/negotiator": {
       "version": "0.6.4",
       "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.4.tgz",
@@ -10604,17 +10631,6 @@
         "encoding": "^0.1.13"
       }
     },
-    "node_modules/minipass-fetch/node_modules/minizlib": {
-      "version": "3.0.2",
-      "inBundle": true,
-      "license": "MIT",
-      "dependencies": {
-        "minipass": "^7.1.2"
-      },
-      "engines": {
-        "node": ">= 18"
-      }
-    },
     "node_modules/minipass-flush": {
       "version": "1.0.5",
       "inBundle": true,
@@ -10682,26 +10698,16 @@
       }
     },
     "node_modules/minizlib": {
-      "version": "2.1.2",
+      "version": "3.0.2",
+      "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.2.tgz",
+      "integrity": "sha512-oG62iEk+CYt5Xj2YqI5Xi9xWUeZhDI8jjQmC5oThVH5JGCTgIjr7ciJDzC7MBzYd//WvR1OTmP5Q38Q8ShQtVA==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
-        "minipass": "^3.0.0",
-        "yallist": "^4.0.0"
-      },
-      "engines": {
-        "node": ">= 8"
-      }
-    },
-    "node_modules/minizlib/node_modules/minipass": {
-      "version": "3.3.6",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "yallist": "^4.0.0"
+        "minipass": "^7.1.2"
       },
       "engines": {
-        "node": ">=8"
+        "node": ">= 18"
       }
     },
     "node_modules/mkdirp": {
@@ -10926,17 +10932,6 @@
         "node": "^18.17.0 || >=20.5.0"
       }
     },
-    "node_modules/node-gyp/node_modules/minizlib": {
-      "version": "3.0.2",
-      "inBundle": true,
-      "license": "MIT",
-      "dependencies": {
-        "minipass": "^7.1.2"
-      },
-      "engines": {
-        "node": ">= 18"
-      }
-    },
     "node_modules/node-gyp/node_modules/mkdirp": {
       "version": "3.0.1",
       "inBundle": true,
@@ -11167,17 +11162,6 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/npm-registry-fetch/node_modules/minizlib": {
-      "version": "3.0.2",
-      "inBundle": true,
-      "license": "MIT",
-      "dependencies": {
-        "minipass": "^7.1.2"
-      },
-      "engines": {
-        "node": ">= 18"
-      }
-    },
     "node_modules/npm-user-validate": {
       "version": "3.0.0",
       "inBundle": true,
@@ -11704,17 +11688,6 @@
         "node": ">=18"
       }
     },
-    "node_modules/pacote/node_modules/minizlib": {
-      "version": "3.0.2",
-      "inBundle": true,
-      "license": "MIT",
-      "dependencies": {
-        "minipass": "^7.1.2"
-      },
-      "engines": {
-        "node": ">= 18"
-      }
-    },
     "node_modules/pacote/node_modules/mkdirp": {
       "version": "3.0.1",
       "inBundle": true,
@@ -15721,6 +15694,33 @@
         "node": ">=8"
       }
     },
+    "node_modules/tar/node_modules/minizlib": {
+      "version": "2.1.2",
+      "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz",
+      "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==",
+      "inBundle": true,
+      "license": "MIT",
+      "dependencies": {
+        "minipass": "^3.0.0",
+        "yallist": "^4.0.0"
+      },
+      "engines": {
+        "node": ">= 8"
+      }
+    },
+    "node_modules/tar/node_modules/minizlib/node_modules/minipass": {
+      "version": "3.3.6",
+      "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz",
+      "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "yallist": "^4.0.0"
+      },
+      "engines": {
+        "node": ">=8"
+      }
+    },
     "node_modules/tcompare": {
       "version": "5.0.7",
       "dev": true,

From 817f0b1eb57b9b0e5893beac11f053e3a7d3f765 Mon Sep 17 00:00:00 2001
From: Gar 
Date: Thu, 18 Sep 2025 10:16:22 -0700
Subject: [PATCH 26/63] deps: ignore-walk@8.0.0

---
 docs/package.json                             |   2 +-
 node_modules/.gitignore                       |  40 +-
 .../node_modules => }/ignore-walk/LICENSE     |   0
 .../ignore-walk/lib/index.js                  |   0
 .../node_modules/minimatch/LICENSE            |   0
 .../dist/commonjs/assert-valid-pattern.js     |   0
 .../minimatch/dist/commonjs/ast.js            |   0
 .../dist/commonjs/brace-expressions.js        |   0
 .../minimatch/dist/commonjs/escape.js         |   0
 .../minimatch/dist/commonjs/index.js          |   0
 .../minimatch/dist/commonjs/package.json      |   0
 .../minimatch/dist/commonjs/unescape.js       |   0
 .../dist/esm/assert-valid-pattern.js          |   0
 .../node_modules/minimatch/dist/esm/ast.js    |   0
 .../minimatch/dist/esm/brace-expressions.js   |   0
 .../node_modules/minimatch/dist/esm/escape.js |   0
 .../node_modules/minimatch/dist/esm/index.js  |   0
 .../minimatch/dist/esm/package.json           |   0
 .../minimatch/dist/esm/unescape.js            |   0
 .../node_modules/minimatch/package.json       |   0
 .../ignore-walk/package.json                  |   0
 node_modules/negotiator/HISTORY.md            | 114 +++
 node_modules/negotiator/LICENSE               |  24 +
 node_modules/negotiator/index.js              |  83 +++
 node_modules/negotiator/lib/charset.js        | 169 +++++
 node_modules/negotiator/lib/encoding.js       | 205 ++++++
 node_modules/negotiator/lib/language.js       | 179 +++++
 node_modules/negotiator/lib/mediaType.js      | 294 ++++++++
 node_modules/negotiator/package.json          |  43 ++
 .../node_modules/@npmcli/agent/lib/agents.js  | 206 ++++++
 .../node_modules/@npmcli/agent/lib/dns.js     |  53 ++
 .../node_modules/@npmcli/agent/lib/errors.js  |  61 ++
 .../node_modules/@npmcli/agent/lib/index.js   |  56 ++
 .../node_modules/@npmcli/agent/lib/options.js |  86 +++
 .../node_modules/@npmcli/agent/lib/proxy.js   |  88 +++
 .../node_modules/@npmcli/agent/package.json   |  60 ++
 .../tar/node_modules/minizlib/LICENSE         |  26 +
 .../tar/node_modules/minizlib/constants.js    | 115 ++++
 .../tar/node_modules/minizlib/index.js        | 348 ++++++++++
 .../minizlib/node_modules/minipass/LICENSE    |  15 +
 .../minizlib/node_modules/minipass/index.js   | 649 ++++++++++++++++++
 .../node_modules/minipass/package.json        |  56 ++
 .../tar/node_modules/minizlib/package.json    |  42 ++
 package-lock.json                             |  53 +-
 44 files changed, 3008 insertions(+), 59 deletions(-)
 rename node_modules/{npm-packlist/node_modules => }/ignore-walk/LICENSE (100%)
 rename node_modules/{npm-packlist/node_modules => }/ignore-walk/lib/index.js (100%)
 rename node_modules/{npm-packlist => ignore-walk}/node_modules/minimatch/LICENSE (100%)
 rename node_modules/{npm-packlist => ignore-walk}/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js (100%)
 rename node_modules/{npm-packlist => ignore-walk}/node_modules/minimatch/dist/commonjs/ast.js (100%)
 rename node_modules/{npm-packlist => ignore-walk}/node_modules/minimatch/dist/commonjs/brace-expressions.js (100%)
 rename node_modules/{npm-packlist => ignore-walk}/node_modules/minimatch/dist/commonjs/escape.js (100%)
 rename node_modules/{npm-packlist => ignore-walk}/node_modules/minimatch/dist/commonjs/index.js (100%)
 rename node_modules/{npm-packlist => ignore-walk}/node_modules/minimatch/dist/commonjs/package.json (100%)
 rename node_modules/{npm-packlist => ignore-walk}/node_modules/minimatch/dist/commonjs/unescape.js (100%)
 rename node_modules/{npm-packlist => ignore-walk}/node_modules/minimatch/dist/esm/assert-valid-pattern.js (100%)
 rename node_modules/{npm-packlist => ignore-walk}/node_modules/minimatch/dist/esm/ast.js (100%)
 rename node_modules/{npm-packlist => ignore-walk}/node_modules/minimatch/dist/esm/brace-expressions.js (100%)
 rename node_modules/{npm-packlist => ignore-walk}/node_modules/minimatch/dist/esm/escape.js (100%)
 rename node_modules/{npm-packlist => ignore-walk}/node_modules/minimatch/dist/esm/index.js (100%)
 rename node_modules/{npm-packlist => ignore-walk}/node_modules/minimatch/dist/esm/package.json (100%)
 rename node_modules/{npm-packlist => ignore-walk}/node_modules/minimatch/dist/esm/unescape.js (100%)
 rename node_modules/{npm-packlist => ignore-walk}/node_modules/minimatch/package.json (100%)
 rename node_modules/{npm-packlist/node_modules => }/ignore-walk/package.json (100%)
 create mode 100644 node_modules/negotiator/HISTORY.md
 create mode 100644 node_modules/negotiator/LICENSE
 create mode 100644 node_modules/negotiator/index.js
 create mode 100644 node_modules/negotiator/lib/charset.js
 create mode 100644 node_modules/negotiator/lib/encoding.js
 create mode 100644 node_modules/negotiator/lib/language.js
 create mode 100644 node_modules/negotiator/lib/mediaType.js
 create mode 100644 node_modules/negotiator/package.json
 create mode 100644 node_modules/node-gyp/node_modules/@npmcli/agent/lib/agents.js
 create mode 100644 node_modules/node-gyp/node_modules/@npmcli/agent/lib/dns.js
 create mode 100644 node_modules/node-gyp/node_modules/@npmcli/agent/lib/errors.js
 create mode 100644 node_modules/node-gyp/node_modules/@npmcli/agent/lib/index.js
 create mode 100644 node_modules/node-gyp/node_modules/@npmcli/agent/lib/options.js
 create mode 100644 node_modules/node-gyp/node_modules/@npmcli/agent/lib/proxy.js
 create mode 100644 node_modules/node-gyp/node_modules/@npmcli/agent/package.json
 create mode 100644 node_modules/tar/node_modules/minizlib/LICENSE
 create mode 100644 node_modules/tar/node_modules/minizlib/constants.js
 create mode 100644 node_modules/tar/node_modules/minizlib/index.js
 create mode 100644 node_modules/tar/node_modules/minizlib/node_modules/minipass/LICENSE
 create mode 100644 node_modules/tar/node_modules/minizlib/node_modules/minipass/index.js
 create mode 100644 node_modules/tar/node_modules/minizlib/node_modules/minipass/package.json
 create mode 100644 node_modules/tar/node_modules/minizlib/package.json

diff --git a/docs/package.json b/docs/package.json
index 74c9e7da32114..d1d1884e4ba65 100644
--- a/docs/package.json
+++ b/docs/package.json
@@ -25,7 +25,7 @@
     "@npmcli/eslint-config": "^5.0.1",
     "@npmcli/template-oss": "4.24.4",
     "front-matter": "^4.0.2",
-    "ignore-walk": "^7.0.0",
+    "ignore-walk": "^8.0.0",
     "jsdom": "^24.0.0",
     "rehype-stringify": "^9.0.3",
     "remark-gfm": "^3.0.1",
diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 21cc085017b8d..0bb774f820179 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -19,9 +19,6 @@
 !/@npmcli/
 /@npmcli/*
 !/@npmcli/agent
-!/@npmcli/agent/node_modules/
-/@npmcli/agent/node_modules/*
-!/@npmcli/agent/node_modules/lru-cache
 !/@npmcli/fs
 !/@npmcli/git
 !/@npmcli/installed-package-contents
@@ -109,6 +106,10 @@
 !/http-proxy-agent
 !/https-proxy-agent
 !/iconv-lite
+!/ignore-walk
+!/ignore-walk/node_modules/
+/ignore-walk/node_modules/*
+!/ignore-walk/node_modules/minimatch
 !/imurmurhash
 !/ini
 !/init-package-json
@@ -126,19 +127,9 @@
 !/just-diff
 !/lru-cache
 !/make-fetch-happen
-!/make-fetch-happen/node_modules/
-/make-fetch-happen/node_modules/*
-!/make-fetch-happen/node_modules/@npmcli/
-/make-fetch-happen/node_modules/@npmcli/*
-!/make-fetch-happen/node_modules/@npmcli/agent
-!/make-fetch-happen/node_modules/lru-cache
-!/make-fetch-happen/node_modules/negotiator
 !/minimatch
 !/minipass-collect
 !/minipass-fetch
-!/minipass-fetch/node_modules/
-/minipass-fetch/node_modules/*
-!/minipass-fetch/node_modules/minizlib
 !/minipass-flush
 !/minipass-flush/node_modules/
 /minipass-flush/node_modules/*
@@ -153,22 +144,21 @@
 !/minipass-sized/node_modules/minipass
 !/minipass
 !/minizlib
-!/minizlib/node_modules/
-/minizlib/node_modules/*
-!/minizlib/node_modules/minipass
 !/mkdirp
 !/ms
 !/mute-stream
+!/negotiator
 !/node-gyp
 !/node-gyp/node_modules/
 /node-gyp/node_modules/*
+!/node-gyp/node_modules/@npmcli/
+/node-gyp/node_modules/@npmcli/*
+!/node-gyp/node_modules/@npmcli/agent
 !/node-gyp/node_modules/cacache
 !/node-gyp/node_modules/chownr
 !/node-gyp/node_modules/lru-cache
 !/node-gyp/node_modules/make-fetch-happen
-!/node-gyp/node_modules/minizlib
 !/node-gyp/node_modules/mkdirp
-!/node-gyp/node_modules/negotiator
 !/node-gyp/node_modules/tar
 !/node-gyp/node_modules/yallist
 !/nopt
@@ -179,16 +169,9 @@
 !/npm-normalize-package-bin
 !/npm-package-arg
 !/npm-packlist
-!/npm-packlist/node_modules/
-/npm-packlist/node_modules/*
-!/npm-packlist/node_modules/ignore-walk
-!/npm-packlist/node_modules/minimatch
 !/npm-pick-manifest
 !/npm-profile
 !/npm-registry-fetch
-!/npm-registry-fetch/node_modules/
-/npm-registry-fetch/node_modules/*
-!/npm-registry-fetch/node_modules/minizlib
 !/npm-user-validate
 !/p-map
 !/package-json-from-dist
@@ -196,10 +179,7 @@
 !/pacote/node_modules/
 /pacote/node_modules/*
 !/pacote/node_modules/chownr
-!/pacote/node_modules/minizlib
 !/pacote/node_modules/mkdirp
-!/pacote/node_modules/npm-package-arg
-!/pacote/node_modules/npm-pick-manifest
 !/pacote/node_modules/tar
 !/pacote/node_modules/yallist
 !/parse-conflict-json
@@ -250,6 +230,10 @@
 /tar/node_modules/fs-minipass/node_modules/*
 !/tar/node_modules/fs-minipass/node_modules/minipass
 !/tar/node_modules/minipass
+!/tar/node_modules/minizlib
+!/tar/node_modules/minizlib/node_modules/
+/tar/node_modules/minizlib/node_modules/*
+!/tar/node_modules/minizlib/node_modules/minipass
 !/text-table
 !/tiny-relative-date
 !/tinyglobby
diff --git a/node_modules/npm-packlist/node_modules/ignore-walk/LICENSE b/node_modules/ignore-walk/LICENSE
similarity index 100%
rename from node_modules/npm-packlist/node_modules/ignore-walk/LICENSE
rename to node_modules/ignore-walk/LICENSE
diff --git a/node_modules/npm-packlist/node_modules/ignore-walk/lib/index.js b/node_modules/ignore-walk/lib/index.js
similarity index 100%
rename from node_modules/npm-packlist/node_modules/ignore-walk/lib/index.js
rename to node_modules/ignore-walk/lib/index.js
diff --git a/node_modules/npm-packlist/node_modules/minimatch/LICENSE b/node_modules/ignore-walk/node_modules/minimatch/LICENSE
similarity index 100%
rename from node_modules/npm-packlist/node_modules/minimatch/LICENSE
rename to node_modules/ignore-walk/node_modules/minimatch/LICENSE
diff --git a/node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js b/node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
similarity index 100%
rename from node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
rename to node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
diff --git a/node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/ast.js b/node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/ast.js
similarity index 100%
rename from node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/ast.js
rename to node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/ast.js
diff --git a/node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/brace-expressions.js b/node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/brace-expressions.js
similarity index 100%
rename from node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/brace-expressions.js
rename to node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/brace-expressions.js
diff --git a/node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/escape.js b/node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/escape.js
similarity index 100%
rename from node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/escape.js
rename to node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/escape.js
diff --git a/node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/index.js b/node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/index.js
similarity index 100%
rename from node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/index.js
rename to node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/index.js
diff --git a/node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/package.json b/node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/package.json
similarity index 100%
rename from node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/package.json
rename to node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/package.json
diff --git a/node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/unescape.js b/node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/unescape.js
similarity index 100%
rename from node_modules/npm-packlist/node_modules/minimatch/dist/commonjs/unescape.js
rename to node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/unescape.js
diff --git a/node_modules/npm-packlist/node_modules/minimatch/dist/esm/assert-valid-pattern.js b/node_modules/ignore-walk/node_modules/minimatch/dist/esm/assert-valid-pattern.js
similarity index 100%
rename from node_modules/npm-packlist/node_modules/minimatch/dist/esm/assert-valid-pattern.js
rename to node_modules/ignore-walk/node_modules/minimatch/dist/esm/assert-valid-pattern.js
diff --git a/node_modules/npm-packlist/node_modules/minimatch/dist/esm/ast.js b/node_modules/ignore-walk/node_modules/minimatch/dist/esm/ast.js
similarity index 100%
rename from node_modules/npm-packlist/node_modules/minimatch/dist/esm/ast.js
rename to node_modules/ignore-walk/node_modules/minimatch/dist/esm/ast.js
diff --git a/node_modules/npm-packlist/node_modules/minimatch/dist/esm/brace-expressions.js b/node_modules/ignore-walk/node_modules/minimatch/dist/esm/brace-expressions.js
similarity index 100%
rename from node_modules/npm-packlist/node_modules/minimatch/dist/esm/brace-expressions.js
rename to node_modules/ignore-walk/node_modules/minimatch/dist/esm/brace-expressions.js
diff --git a/node_modules/npm-packlist/node_modules/minimatch/dist/esm/escape.js b/node_modules/ignore-walk/node_modules/minimatch/dist/esm/escape.js
similarity index 100%
rename from node_modules/npm-packlist/node_modules/minimatch/dist/esm/escape.js
rename to node_modules/ignore-walk/node_modules/minimatch/dist/esm/escape.js
diff --git a/node_modules/npm-packlist/node_modules/minimatch/dist/esm/index.js b/node_modules/ignore-walk/node_modules/minimatch/dist/esm/index.js
similarity index 100%
rename from node_modules/npm-packlist/node_modules/minimatch/dist/esm/index.js
rename to node_modules/ignore-walk/node_modules/minimatch/dist/esm/index.js
diff --git a/node_modules/npm-packlist/node_modules/minimatch/dist/esm/package.json b/node_modules/ignore-walk/node_modules/minimatch/dist/esm/package.json
similarity index 100%
rename from node_modules/npm-packlist/node_modules/minimatch/dist/esm/package.json
rename to node_modules/ignore-walk/node_modules/minimatch/dist/esm/package.json
diff --git a/node_modules/npm-packlist/node_modules/minimatch/dist/esm/unescape.js b/node_modules/ignore-walk/node_modules/minimatch/dist/esm/unescape.js
similarity index 100%
rename from node_modules/npm-packlist/node_modules/minimatch/dist/esm/unescape.js
rename to node_modules/ignore-walk/node_modules/minimatch/dist/esm/unescape.js
diff --git a/node_modules/npm-packlist/node_modules/minimatch/package.json b/node_modules/ignore-walk/node_modules/minimatch/package.json
similarity index 100%
rename from node_modules/npm-packlist/node_modules/minimatch/package.json
rename to node_modules/ignore-walk/node_modules/minimatch/package.json
diff --git a/node_modules/npm-packlist/node_modules/ignore-walk/package.json b/node_modules/ignore-walk/package.json
similarity index 100%
rename from node_modules/npm-packlist/node_modules/ignore-walk/package.json
rename to node_modules/ignore-walk/package.json
diff --git a/node_modules/negotiator/HISTORY.md b/node_modules/negotiator/HISTORY.md
new file mode 100644
index 0000000000000..63d537d3f6811
--- /dev/null
+++ b/node_modules/negotiator/HISTORY.md
@@ -0,0 +1,114 @@
+1.0.0 / 2024-08-31
+==================
+
+  * Drop support for node <18
+  * Added an option preferred encodings array #59
+
+0.6.3 / 2022-01-22
+==================
+
+  * Revert "Lazy-load modules from main entry point"
+
+0.6.2 / 2019-04-29
+==================
+
+  * Fix sorting charset, encoding, and language with extra parameters
+
+0.6.1 / 2016-05-02
+==================
+
+  * perf: improve `Accept` parsing speed
+  * perf: improve `Accept-Charset` parsing speed
+  * perf: improve `Accept-Encoding` parsing speed
+  * perf: improve `Accept-Language` parsing speed
+
+0.6.0 / 2015-09-29
+==================
+
+  * Fix including type extensions in parameters in `Accept` parsing
+  * Fix parsing `Accept` parameters with quoted equals
+  * Fix parsing `Accept` parameters with quoted semicolons
+  * Lazy-load modules from main entry point
+  * perf: delay type concatenation until needed
+  * perf: enable strict mode
+  * perf: hoist regular expressions
+  * perf: remove closures getting spec properties
+  * perf: remove a closure from media type parsing
+  * perf: remove property delete from media type parsing
+
+0.5.3 / 2015-05-10
+==================
+
+  * Fix media type parameter matching to be case-insensitive
+
+0.5.2 / 2015-05-06
+==================
+
+  * Fix comparing media types with quoted values
+  * Fix splitting media types with quoted commas
+
+0.5.1 / 2015-02-14
+==================
+
+  * Fix preference sorting to be stable for long acceptable lists
+
+0.5.0 / 2014-12-18
+==================
+
+  * Fix list return order when large accepted list
+  * Fix missing identity encoding when q=0 exists
+  * Remove dynamic building of Negotiator class
+
+0.4.9 / 2014-10-14
+==================
+
+  * Fix error when media type has invalid parameter
+
+0.4.8 / 2014-09-28
+==================
+
+  * Fix all negotiations to be case-insensitive
+  * Stable sort preferences of same quality according to client order
+  * Support Node.js 0.6
+
+0.4.7 / 2014-06-24
+==================
+
+  * Handle invalid provided languages
+  * Handle invalid provided media types
+
+0.4.6 / 2014-06-11
+==================
+
+  *  Order by specificity when quality is the same
+
+0.4.5 / 2014-05-29
+==================
+
+  * Fix regression in empty header handling
+
+0.4.4 / 2014-05-29
+==================
+
+  * Fix behaviors when headers are not present
+
+0.4.3 / 2014-04-16
+==================
+
+  * Handle slashes on media params correctly
+
+0.4.2 / 2014-02-28
+==================
+
+  * Fix media type sorting
+  * Handle media types params strictly
+
+0.4.1 / 2014-01-16
+==================
+
+  * Use most specific matches
+
+0.4.0 / 2014-01-09
+==================
+
+  * Remove preferred prefix from methods
diff --git a/node_modules/negotiator/LICENSE b/node_modules/negotiator/LICENSE
new file mode 100644
index 0000000000000..ea6b9e2e9ac25
--- /dev/null
+++ b/node_modules/negotiator/LICENSE
@@ -0,0 +1,24 @@
+(The MIT License)
+
+Copyright (c) 2012-2014 Federico Romero
+Copyright (c) 2012-2014 Isaac Z. Schlueter
+Copyright (c) 2014-2015 Douglas Christopher Wilson
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+'Software'), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/negotiator/index.js b/node_modules/negotiator/index.js
new file mode 100644
index 0000000000000..4f51315d6af4b
--- /dev/null
+++ b/node_modules/negotiator/index.js
@@ -0,0 +1,83 @@
+/*!
+ * negotiator
+ * Copyright(c) 2012 Federico Romero
+ * Copyright(c) 2012-2014 Isaac Z. Schlueter
+ * Copyright(c) 2015 Douglas Christopher Wilson
+ * MIT Licensed
+ */
+
+'use strict';
+
+var preferredCharsets = require('./lib/charset')
+var preferredEncodings = require('./lib/encoding')
+var preferredLanguages = require('./lib/language')
+var preferredMediaTypes = require('./lib/mediaType')
+
+/**
+ * Module exports.
+ * @public
+ */
+
+module.exports = Negotiator;
+module.exports.Negotiator = Negotiator;
+
+/**
+ * Create a Negotiator instance from a request.
+ * @param {object} request
+ * @public
+ */
+
+function Negotiator(request) {
+  if (!(this instanceof Negotiator)) {
+    return new Negotiator(request);
+  }
+
+  this.request = request;
+}
+
+Negotiator.prototype.charset = function charset(available) {
+  var set = this.charsets(available);
+  return set && set[0];
+};
+
+Negotiator.prototype.charsets = function charsets(available) {
+  return preferredCharsets(this.request.headers['accept-charset'], available);
+};
+
+Negotiator.prototype.encoding = function encoding(available, opts) {
+  var set = this.encodings(available, opts);
+  return set && set[0];
+};
+
+Negotiator.prototype.encodings = function encodings(available, options) {
+  var opts = options || {};
+  return preferredEncodings(this.request.headers['accept-encoding'], available, opts.preferred);
+};
+
+Negotiator.prototype.language = function language(available) {
+  var set = this.languages(available);
+  return set && set[0];
+};
+
+Negotiator.prototype.languages = function languages(available) {
+  return preferredLanguages(this.request.headers['accept-language'], available);
+};
+
+Negotiator.prototype.mediaType = function mediaType(available) {
+  var set = this.mediaTypes(available);
+  return set && set[0];
+};
+
+Negotiator.prototype.mediaTypes = function mediaTypes(available) {
+  return preferredMediaTypes(this.request.headers.accept, available);
+};
+
+// Backwards compatibility
+Negotiator.prototype.preferredCharset = Negotiator.prototype.charset;
+Negotiator.prototype.preferredCharsets = Negotiator.prototype.charsets;
+Negotiator.prototype.preferredEncoding = Negotiator.prototype.encoding;
+Negotiator.prototype.preferredEncodings = Negotiator.prototype.encodings;
+Negotiator.prototype.preferredLanguage = Negotiator.prototype.language;
+Negotiator.prototype.preferredLanguages = Negotiator.prototype.languages;
+Negotiator.prototype.preferredMediaType = Negotiator.prototype.mediaType;
+Negotiator.prototype.preferredMediaTypes = Negotiator.prototype.mediaTypes;
diff --git a/node_modules/negotiator/lib/charset.js b/node_modules/negotiator/lib/charset.js
new file mode 100644
index 0000000000000..cdd014803474a
--- /dev/null
+++ b/node_modules/negotiator/lib/charset.js
@@ -0,0 +1,169 @@
+/**
+ * negotiator
+ * Copyright(c) 2012 Isaac Z. Schlueter
+ * Copyright(c) 2014 Federico Romero
+ * Copyright(c) 2014-2015 Douglas Christopher Wilson
+ * MIT Licensed
+ */
+
+'use strict';
+
+/**
+ * Module exports.
+ * @public
+ */
+
+module.exports = preferredCharsets;
+module.exports.preferredCharsets = preferredCharsets;
+
+/**
+ * Module variables.
+ * @private
+ */
+
+var simpleCharsetRegExp = /^\s*([^\s;]+)\s*(?:;(.*))?$/;
+
+/**
+ * Parse the Accept-Charset header.
+ * @private
+ */
+
+function parseAcceptCharset(accept) {
+  var accepts = accept.split(',');
+
+  for (var i = 0, j = 0; i < accepts.length; i++) {
+    var charset = parseCharset(accepts[i].trim(), i);
+
+    if (charset) {
+      accepts[j++] = charset;
+    }
+  }
+
+  // trim accepts
+  accepts.length = j;
+
+  return accepts;
+}
+
+/**
+ * Parse a charset from the Accept-Charset header.
+ * @private
+ */
+
+function parseCharset(str, i) {
+  var match = simpleCharsetRegExp.exec(str);
+  if (!match) return null;
+
+  var charset = match[1];
+  var q = 1;
+  if (match[2]) {
+    var params = match[2].split(';')
+    for (var j = 0; j < params.length; j++) {
+      var p = params[j].trim().split('=');
+      if (p[0] === 'q') {
+        q = parseFloat(p[1]);
+        break;
+      }
+    }
+  }
+
+  return {
+    charset: charset,
+    q: q,
+    i: i
+  };
+}
+
+/**
+ * Get the priority of a charset.
+ * @private
+ */
+
+function getCharsetPriority(charset, accepted, index) {
+  var priority = {o: -1, q: 0, s: 0};
+
+  for (var i = 0; i < accepted.length; i++) {
+    var spec = specify(charset, accepted[i], index);
+
+    if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
+      priority = spec;
+    }
+  }
+
+  return priority;
+}
+
+/**
+ * Get the specificity of the charset.
+ * @private
+ */
+
+function specify(charset, spec, index) {
+  var s = 0;
+  if(spec.charset.toLowerCase() === charset.toLowerCase()){
+    s |= 1;
+  } else if (spec.charset !== '*' ) {
+    return null
+  }
+
+  return {
+    i: index,
+    o: spec.i,
+    q: spec.q,
+    s: s
+  }
+}
+
+/**
+ * Get the preferred charsets from an Accept-Charset header.
+ * @public
+ */
+
+function preferredCharsets(accept, provided) {
+  // RFC 2616 sec 14.2: no header = *
+  var accepts = parseAcceptCharset(accept === undefined ? '*' : accept || '');
+
+  if (!provided) {
+    // sorted list of all charsets
+    return accepts
+      .filter(isQuality)
+      .sort(compareSpecs)
+      .map(getFullCharset);
+  }
+
+  var priorities = provided.map(function getPriority(type, index) {
+    return getCharsetPriority(type, accepts, index);
+  });
+
+  // sorted list of accepted charsets
+  return priorities.filter(isQuality).sort(compareSpecs).map(function getCharset(priority) {
+    return provided[priorities.indexOf(priority)];
+  });
+}
+
+/**
+ * Compare two specs.
+ * @private
+ */
+
+function compareSpecs(a, b) {
+  return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0;
+}
+
+/**
+ * Get full charset string.
+ * @private
+ */
+
+function getFullCharset(spec) {
+  return spec.charset;
+}
+
+/**
+ * Check if a spec has any quality.
+ * @private
+ */
+
+function isQuality(spec) {
+  return spec.q > 0;
+}
diff --git a/node_modules/negotiator/lib/encoding.js b/node_modules/negotiator/lib/encoding.js
new file mode 100644
index 0000000000000..9ebb633d67743
--- /dev/null
+++ b/node_modules/negotiator/lib/encoding.js
@@ -0,0 +1,205 @@
+/**
+ * negotiator
+ * Copyright(c) 2012 Isaac Z. Schlueter
+ * Copyright(c) 2014 Federico Romero
+ * Copyright(c) 2014-2015 Douglas Christopher Wilson
+ * MIT Licensed
+ */
+
+'use strict';
+
+/**
+ * Module exports.
+ * @public
+ */
+
+module.exports = preferredEncodings;
+module.exports.preferredEncodings = preferredEncodings;
+
+/**
+ * Module variables.
+ * @private
+ */
+
+var simpleEncodingRegExp = /^\s*([^\s;]+)\s*(?:;(.*))?$/;
+
+/**
+ * Parse the Accept-Encoding header.
+ * @private
+ */
+
+function parseAcceptEncoding(accept) {
+  var accepts = accept.split(',');
+  var hasIdentity = false;
+  var minQuality = 1;
+
+  for (var i = 0, j = 0; i < accepts.length; i++) {
+    var encoding = parseEncoding(accepts[i].trim(), i);
+
+    if (encoding) {
+      accepts[j++] = encoding;
+      hasIdentity = hasIdentity || specify('identity', encoding);
+      minQuality = Math.min(minQuality, encoding.q || 1);
+    }
+  }
+
+  if (!hasIdentity) {
+    /*
+     * If identity doesn't explicitly appear in the accept-encoding header,
+     * it's added to the list of acceptable encoding with the lowest q
+     */
+    accepts[j++] = {
+      encoding: 'identity',
+      q: minQuality,
+      i: i
+    };
+  }
+
+  // trim accepts
+  accepts.length = j;
+
+  return accepts;
+}
+
+/**
+ * Parse an encoding from the Accept-Encoding header.
+ * @private
+ */
+
+function parseEncoding(str, i) {
+  var match = simpleEncodingRegExp.exec(str);
+  if (!match) return null;
+
+  var encoding = match[1];
+  var q = 1;
+  if (match[2]) {
+    var params = match[2].split(';');
+    for (var j = 0; j < params.length; j++) {
+      var p = params[j].trim().split('=');
+      if (p[0] === 'q') {
+        q = parseFloat(p[1]);
+        break;
+      }
+    }
+  }
+
+  return {
+    encoding: encoding,
+    q: q,
+    i: i
+  };
+}
+
+/**
+ * Get the priority of an encoding.
+ * @private
+ */
+
+function getEncodingPriority(encoding, accepted, index) {
+  var priority = {encoding: encoding, o: -1, q: 0, s: 0};
+
+  for (var i = 0; i < accepted.length; i++) {
+    var spec = specify(encoding, accepted[i], index);
+
+    if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
+      priority = spec;
+    }
+  }
+
+  return priority;
+}
+
+/**
+ * Get the specificity of the encoding.
+ * @private
+ */
+
+function specify(encoding, spec, index) {
+  var s = 0;
+  if(spec.encoding.toLowerCase() === encoding.toLowerCase()){
+    s |= 1;
+  } else if (spec.encoding !== '*' ) {
+    return null
+  }
+
+  return {
+    encoding: encoding,
+    i: index,
+    o: spec.i,
+    q: spec.q,
+    s: s
+  }
+};
+
+/**
+ * Get the preferred encodings from an Accept-Encoding header.
+ * @public
+ */
+
+function preferredEncodings(accept, provided, preferred) {
+  var accepts = parseAcceptEncoding(accept || '');
+
+  var comparator = preferred ? function comparator (a, b) {
+    if (a.q !== b.q) {
+      return b.q - a.q // higher quality first
+    }
+
+    var aPreferred = preferred.indexOf(a.encoding)
+    var bPreferred = preferred.indexOf(b.encoding)
+
+    if (aPreferred === -1 && bPreferred === -1) {
+      // consider the original specifity/order
+      return (b.s - a.s) || (a.o - b.o) || (a.i - b.i)
+    }
+
+    if (aPreferred !== -1 && bPreferred !== -1) {
+      return aPreferred - bPreferred // consider the preferred order
+    }
+
+    return aPreferred === -1 ? 1 : -1 // preferred first
+  } : compareSpecs;
+
+  if (!provided) {
+    // sorted list of all encodings
+    return accepts
+      .filter(isQuality)
+      .sort(comparator)
+      .map(getFullEncoding);
+  }
+
+  var priorities = provided.map(function getPriority(type, index) {
+    return getEncodingPriority(type, accepts, index);
+  });
+
+  // sorted list of accepted encodings
+  return priorities.filter(isQuality).sort(comparator).map(function getEncoding(priority) {
+    return provided[priorities.indexOf(priority)];
+  });
+}
+
+/**
+ * Compare two specs.
+ * @private
+ */
+
+function compareSpecs(a, b) {
+  return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i);
+}
+
+/**
+ * Get full encoding string.
+ * @private
+ */
+
+function getFullEncoding(spec) {
+  return spec.encoding;
+}
+
+/**
+ * Check if a spec has any quality.
+ * @private
+ */
+
+function isQuality(spec) {
+  return spec.q > 0;
+}
diff --git a/node_modules/negotiator/lib/language.js b/node_modules/negotiator/lib/language.js
new file mode 100644
index 0000000000000..a23167252719b
--- /dev/null
+++ b/node_modules/negotiator/lib/language.js
@@ -0,0 +1,179 @@
+/**
+ * negotiator
+ * Copyright(c) 2012 Isaac Z. Schlueter
+ * Copyright(c) 2014 Federico Romero
+ * Copyright(c) 2014-2015 Douglas Christopher Wilson
+ * MIT Licensed
+ */
+
+'use strict';
+
+/**
+ * Module exports.
+ * @public
+ */
+
+module.exports = preferredLanguages;
+module.exports.preferredLanguages = preferredLanguages;
+
+/**
+ * Module variables.
+ * @private
+ */
+
+var simpleLanguageRegExp = /^\s*([^\s\-;]+)(?:-([^\s;]+))?\s*(?:;(.*))?$/;
+
+/**
+ * Parse the Accept-Language header.
+ * @private
+ */
+
+function parseAcceptLanguage(accept) {
+  var accepts = accept.split(',');
+
+  for (var i = 0, j = 0; i < accepts.length; i++) {
+    var language = parseLanguage(accepts[i].trim(), i);
+
+    if (language) {
+      accepts[j++] = language;
+    }
+  }
+
+  // trim accepts
+  accepts.length = j;
+
+  return accepts;
+}
+
+/**
+ * Parse a language from the Accept-Language header.
+ * @private
+ */
+
+function parseLanguage(str, i) {
+  var match = simpleLanguageRegExp.exec(str);
+  if (!match) return null;
+
+  var prefix = match[1]
+  var suffix = match[2]
+  var full = prefix
+
+  if (suffix) full += "-" + suffix;
+
+  var q = 1;
+  if (match[3]) {
+    var params = match[3].split(';')
+    for (var j = 0; j < params.length; j++) {
+      var p = params[j].split('=');
+      if (p[0] === 'q') q = parseFloat(p[1]);
+    }
+  }
+
+  return {
+    prefix: prefix,
+    suffix: suffix,
+    q: q,
+    i: i,
+    full: full
+  };
+}
+
+/**
+ * Get the priority of a language.
+ * @private
+ */
+
+function getLanguagePriority(language, accepted, index) {
+  var priority = {o: -1, q: 0, s: 0};
+
+  for (var i = 0; i < accepted.length; i++) {
+    var spec = specify(language, accepted[i], index);
+
+    if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
+      priority = spec;
+    }
+  }
+
+  return priority;
+}
+
+/**
+ * Get the specificity of the language.
+ * @private
+ */
+
+function specify(language, spec, index) {
+  var p = parseLanguage(language)
+  if (!p) return null;
+  var s = 0;
+  if(spec.full.toLowerCase() === p.full.toLowerCase()){
+    s |= 4;
+  } else if (spec.prefix.toLowerCase() === p.full.toLowerCase()) {
+    s |= 2;
+  } else if (spec.full.toLowerCase() === p.prefix.toLowerCase()) {
+    s |= 1;
+  } else if (spec.full !== '*' ) {
+    return null
+  }
+
+  return {
+    i: index,
+    o: spec.i,
+    q: spec.q,
+    s: s
+  }
+};
+
+/**
+ * Get the preferred languages from an Accept-Language header.
+ * @public
+ */
+
+function preferredLanguages(accept, provided) {
+  // RFC 2616 sec 14.4: no header = *
+  var accepts = parseAcceptLanguage(accept === undefined ? '*' : accept || '');
+
+  if (!provided) {
+    // sorted list of all languages
+    return accepts
+      .filter(isQuality)
+      .sort(compareSpecs)
+      .map(getFullLanguage);
+  }
+
+  var priorities = provided.map(function getPriority(type, index) {
+    return getLanguagePriority(type, accepts, index);
+  });
+
+  // sorted list of accepted languages
+  return priorities.filter(isQuality).sort(compareSpecs).map(function getLanguage(priority) {
+    return provided[priorities.indexOf(priority)];
+  });
+}
+
+/**
+ * Compare two specs.
+ * @private
+ */
+
+function compareSpecs(a, b) {
+  return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0;
+}
+
+/**
+ * Get full language string.
+ * @private
+ */
+
+function getFullLanguage(spec) {
+  return spec.full;
+}
+
+/**
+ * Check if a spec has any quality.
+ * @private
+ */
+
+function isQuality(spec) {
+  return spec.q > 0;
+}
diff --git a/node_modules/negotiator/lib/mediaType.js b/node_modules/negotiator/lib/mediaType.js
new file mode 100644
index 0000000000000..8e402ea88394c
--- /dev/null
+++ b/node_modules/negotiator/lib/mediaType.js
@@ -0,0 +1,294 @@
+/**
+ * negotiator
+ * Copyright(c) 2012 Isaac Z. Schlueter
+ * Copyright(c) 2014 Federico Romero
+ * Copyright(c) 2014-2015 Douglas Christopher Wilson
+ * MIT Licensed
+ */
+
+'use strict';
+
+/**
+ * Module exports.
+ * @public
+ */
+
+module.exports = preferredMediaTypes;
+module.exports.preferredMediaTypes = preferredMediaTypes;
+
+/**
+ * Module variables.
+ * @private
+ */
+
+var simpleMediaTypeRegExp = /^\s*([^\s\/;]+)\/([^;\s]+)\s*(?:;(.*))?$/;
+
+/**
+ * Parse the Accept header.
+ * @private
+ */
+
+function parseAccept(accept) {
+  var accepts = splitMediaTypes(accept);
+
+  for (var i = 0, j = 0; i < accepts.length; i++) {
+    var mediaType = parseMediaType(accepts[i].trim(), i);
+
+    if (mediaType) {
+      accepts[j++] = mediaType;
+    }
+  }
+
+  // trim accepts
+  accepts.length = j;
+
+  return accepts;
+}
+
+/**
+ * Parse a media type from the Accept header.
+ * @private
+ */
+
+function parseMediaType(str, i) {
+  var match = simpleMediaTypeRegExp.exec(str);
+  if (!match) return null;
+
+  var params = Object.create(null);
+  var q = 1;
+  var subtype = match[2];
+  var type = match[1];
+
+  if (match[3]) {
+    var kvps = splitParameters(match[3]).map(splitKeyValuePair);
+
+    for (var j = 0; j < kvps.length; j++) {
+      var pair = kvps[j];
+      var key = pair[0].toLowerCase();
+      var val = pair[1];
+
+      // get the value, unwrapping quotes
+      var value = val && val[0] === '"' && val[val.length - 1] === '"'
+        ? val.slice(1, -1)
+        : val;
+
+      if (key === 'q') {
+        q = parseFloat(value);
+        break;
+      }
+
+      // store parameter
+      params[key] = value;
+    }
+  }
+
+  return {
+    type: type,
+    subtype: subtype,
+    params: params,
+    q: q,
+    i: i
+  };
+}
+
+/**
+ * Get the priority of a media type.
+ * @private
+ */
+
+function getMediaTypePriority(type, accepted, index) {
+  var priority = {o: -1, q: 0, s: 0};
+
+  for (var i = 0; i < accepted.length; i++) {
+    var spec = specify(type, accepted[i], index);
+
+    if (spec && (priority.s - spec.s || priority.q - spec.q || priority.o - spec.o) < 0) {
+      priority = spec;
+    }
+  }
+
+  return priority;
+}
+
+/**
+ * Get the specificity of the media type.
+ * @private
+ */
+
+function specify(type, spec, index) {
+  var p = parseMediaType(type);
+  var s = 0;
+
+  if (!p) {
+    return null;
+  }
+
+  if(spec.type.toLowerCase() == p.type.toLowerCase()) {
+    s |= 4
+  } else if(spec.type != '*') {
+    return null;
+  }
+
+  if(spec.subtype.toLowerCase() == p.subtype.toLowerCase()) {
+    s |= 2
+  } else if(spec.subtype != '*') {
+    return null;
+  }
+
+  var keys = Object.keys(spec.params);
+  if (keys.length > 0) {
+    if (keys.every(function (k) {
+      return spec.params[k] == '*' || (spec.params[k] || '').toLowerCase() == (p.params[k] || '').toLowerCase();
+    })) {
+      s |= 1
+    } else {
+      return null
+    }
+  }
+
+  return {
+    i: index,
+    o: spec.i,
+    q: spec.q,
+    s: s,
+  }
+}
+
+/**
+ * Get the preferred media types from an Accept header.
+ * @public
+ */
+
+function preferredMediaTypes(accept, provided) {
+  // RFC 2616 sec 14.2: no header = */*
+  var accepts = parseAccept(accept === undefined ? '*/*' : accept || '');
+
+  if (!provided) {
+    // sorted list of all types
+    return accepts
+      .filter(isQuality)
+      .sort(compareSpecs)
+      .map(getFullType);
+  }
+
+  var priorities = provided.map(function getPriority(type, index) {
+    return getMediaTypePriority(type, accepts, index);
+  });
+
+  // sorted list of accepted types
+  return priorities.filter(isQuality).sort(compareSpecs).map(function getType(priority) {
+    return provided[priorities.indexOf(priority)];
+  });
+}
+
+/**
+ * Compare two specs.
+ * @private
+ */
+
+function compareSpecs(a, b) {
+  return (b.q - a.q) || (b.s - a.s) || (a.o - b.o) || (a.i - b.i) || 0;
+}
+
+/**
+ * Get full type string.
+ * @private
+ */
+
+function getFullType(spec) {
+  return spec.type + '/' + spec.subtype;
+}
+
+/**
+ * Check if a spec has any quality.
+ * @private
+ */
+
+function isQuality(spec) {
+  return spec.q > 0;
+}
+
+/**
+ * Count the number of quotes in a string.
+ * @private
+ */
+
+function quoteCount(string) {
+  var count = 0;
+  var index = 0;
+
+  while ((index = string.indexOf('"', index)) !== -1) {
+    count++;
+    index++;
+  }
+
+  return count;
+}
+
+/**
+ * Split a key value pair.
+ * @private
+ */
+
+function splitKeyValuePair(str) {
+  var index = str.indexOf('=');
+  var key;
+  var val;
+
+  if (index === -1) {
+    key = str;
+  } else {
+    key = str.slice(0, index);
+    val = str.slice(index + 1);
+  }
+
+  return [key, val];
+}
+
+/**
+ * Split an Accept header into media types.
+ * @private
+ */
+
+function splitMediaTypes(accept) {
+  var accepts = accept.split(',');
+
+  for (var i = 1, j = 0; i < accepts.length; i++) {
+    if (quoteCount(accepts[j]) % 2 == 0) {
+      accepts[++j] = accepts[i];
+    } else {
+      accepts[j] += ',' + accepts[i];
+    }
+  }
+
+  // trim accepts
+  accepts.length = j + 1;
+
+  return accepts;
+}
+
+/**
+ * Split a string of parameters.
+ * @private
+ */
+
+function splitParameters(str) {
+  var parameters = str.split(';');
+
+  for (var i = 1, j = 0; i < parameters.length; i++) {
+    if (quoteCount(parameters[j]) % 2 == 0) {
+      parameters[++j] = parameters[i];
+    } else {
+      parameters[j] += ';' + parameters[i];
+    }
+  }
+
+  // trim parameters
+  parameters.length = j + 1;
+
+  for (var i = 0; i < parameters.length; i++) {
+    parameters[i] = parameters[i].trim();
+  }
+
+  return parameters;
+}
diff --git a/node_modules/negotiator/package.json b/node_modules/negotiator/package.json
new file mode 100644
index 0000000000000..e4bdc1ef4f748
--- /dev/null
+++ b/node_modules/negotiator/package.json
@@ -0,0 +1,43 @@
+{
+  "name": "negotiator",
+  "description": "HTTP content negotiation",
+  "version": "1.0.0",
+  "contributors": [
+    "Douglas Christopher Wilson ",
+    "Federico Romero ",
+    "Isaac Z. Schlueter  (http://blog.izs.me/)"
+  ],
+  "license": "MIT",
+  "keywords": [
+    "http",
+    "content negotiation",
+    "accept",
+    "accept-language",
+    "accept-encoding",
+    "accept-charset"
+  ],
+  "repository": "jshttp/negotiator",
+  "devDependencies": {
+    "eslint": "7.32.0",
+    "eslint-plugin-markdown": "2.2.1",
+    "mocha": "9.1.3",
+    "nyc": "15.1.0"
+  },
+  "files": [
+    "lib/",
+    "HISTORY.md",
+    "LICENSE",
+    "index.js",
+    "README.md"
+  ],
+  "engines": {
+    "node": ">= 0.6"
+  },
+  "scripts": {
+    "lint": "eslint .",
+    "test": "mocha --reporter spec --check-leaks --bail test/",
+    "test:debug": "mocha --reporter spec --check-leaks --inspect --inspect-brk test/",
+    "test-ci": "nyc --reporter=lcov --reporter=text npm test",
+    "test-cov": "nyc --reporter=html --reporter=text npm test"
+  }
+}
diff --git a/node_modules/node-gyp/node_modules/@npmcli/agent/lib/agents.js b/node_modules/node-gyp/node_modules/@npmcli/agent/lib/agents.js
new file mode 100644
index 0000000000000..c541b93001517
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/@npmcli/agent/lib/agents.js
@@ -0,0 +1,206 @@
+'use strict'
+
+const net = require('net')
+const tls = require('tls')
+const { once } = require('events')
+const timers = require('timers/promises')
+const { normalizeOptions, cacheOptions } = require('./options')
+const { getProxy, getProxyAgent, proxyCache } = require('./proxy.js')
+const Errors = require('./errors.js')
+const { Agent: AgentBase } = require('agent-base')
+
+module.exports = class Agent extends AgentBase {
+  #options
+  #timeouts
+  #proxy
+  #noProxy
+  #ProxyAgent
+
+  constructor (options = {}) {
+    const { timeouts, proxy, noProxy, ...normalizedOptions } = normalizeOptions(options)
+
+    super(normalizedOptions)
+
+    this.#options = normalizedOptions
+    this.#timeouts = timeouts
+
+    if (proxy) {
+      this.#proxy = new URL(proxy)
+      this.#noProxy = noProxy
+      this.#ProxyAgent = getProxyAgent(proxy)
+    }
+  }
+
+  get proxy () {
+    return this.#proxy ? { url: this.#proxy } : {}
+  }
+
+  #getProxy (options) {
+    if (!this.#proxy) {
+      return
+    }
+
+    const proxy = getProxy(`${options.protocol}//${options.host}:${options.port}`, {
+      proxy: this.#proxy,
+      noProxy: this.#noProxy,
+    })
+
+    if (!proxy) {
+      return
+    }
+
+    const cacheKey = cacheOptions({
+      ...options,
+      ...this.#options,
+      timeouts: this.#timeouts,
+      proxy,
+    })
+
+    if (proxyCache.has(cacheKey)) {
+      return proxyCache.get(cacheKey)
+    }
+
+    let ProxyAgent = this.#ProxyAgent
+    if (Array.isArray(ProxyAgent)) {
+      ProxyAgent = this.isSecureEndpoint(options) ? ProxyAgent[1] : ProxyAgent[0]
+    }
+
+    const proxyAgent = new ProxyAgent(proxy, {
+      ...this.#options,
+      socketOptions: { family: this.#options.family },
+    })
+    proxyCache.set(cacheKey, proxyAgent)
+
+    return proxyAgent
+  }
+
+  // takes an array of promises and races them against the connection timeout
+  // which will throw the necessary error if it is hit. This will return the
+  // result of the promise race.
+  async #timeoutConnection ({ promises, options, timeout }, ac = new AbortController()) {
+    if (timeout) {
+      const connectionTimeout = timers.setTimeout(timeout, null, { signal: ac.signal })
+        .then(() => {
+          throw new Errors.ConnectionTimeoutError(`${options.host}:${options.port}`)
+        }).catch((err) => {
+          if (err.name === 'AbortError') {
+            return
+          }
+          throw err
+        })
+      promises.push(connectionTimeout)
+    }
+
+    let result
+    try {
+      result = await Promise.race(promises)
+      ac.abort()
+    } catch (err) {
+      ac.abort()
+      throw err
+    }
+    return result
+  }
+
+  async connect (request, options) {
+    // if the connection does not have its own lookup function
+    // set, then use the one from our options
+    options.lookup ??= this.#options.lookup
+
+    let socket
+    let timeout = this.#timeouts.connection
+    const isSecureEndpoint = this.isSecureEndpoint(options)
+
+    const proxy = this.#getProxy(options)
+    if (proxy) {
+      // some of the proxies will wait for the socket to fully connect before
+      // returning so we have to await this while also racing it against the
+      // connection timeout.
+      const start = Date.now()
+      socket = await this.#timeoutConnection({
+        options,
+        timeout,
+        promises: [proxy.connect(request, options)],
+      })
+      // see how much time proxy.connect took and subtract it from
+      // the timeout
+      if (timeout) {
+        timeout = timeout - (Date.now() - start)
+      }
+    } else {
+      socket = (isSecureEndpoint ? tls : net).connect(options)
+    }
+
+    socket.setKeepAlive(this.keepAlive, this.keepAliveMsecs)
+    socket.setNoDelay(this.keepAlive)
+
+    const abortController = new AbortController()
+    const { signal } = abortController
+
+    const connectPromise = socket[isSecureEndpoint ? 'secureConnecting' : 'connecting']
+      ? once(socket, isSecureEndpoint ? 'secureConnect' : 'connect', { signal })
+      : Promise.resolve()
+
+    await this.#timeoutConnection({
+      options,
+      timeout,
+      promises: [
+        connectPromise,
+        once(socket, 'error', { signal }).then((err) => {
+          throw err[0]
+        }),
+      ],
+    }, abortController)
+
+    if (this.#timeouts.idle) {
+      socket.setTimeout(this.#timeouts.idle, () => {
+        socket.destroy(new Errors.IdleTimeoutError(`${options.host}:${options.port}`))
+      })
+    }
+
+    return socket
+  }
+
+  addRequest (request, options) {
+    const proxy = this.#getProxy(options)
+    // it would be better to call proxy.addRequest here but this causes the
+    // http-proxy-agent to call its super.addRequest which causes the request
+    // to be added to the agent twice. since we only support 3 agents
+    // currently (see the required agents in proxy.js) we have manually
+    // checked that the only public methods we need to call are called in the
+    // next block. this could change in the future and presumably we would get
+    // failing tests until we have properly called the necessary methods on
+    // each of our proxy agents
+    if (proxy?.setRequestProps) {
+      proxy.setRequestProps(request, options)
+    }
+
+    request.setHeader('connection', this.keepAlive ? 'keep-alive' : 'close')
+
+    if (this.#timeouts.response) {
+      let responseTimeout
+      request.once('finish', () => {
+        setTimeout(() => {
+          request.destroy(new Errors.ResponseTimeoutError(request, this.#proxy))
+        }, this.#timeouts.response)
+      })
+      request.once('response', () => {
+        clearTimeout(responseTimeout)
+      })
+    }
+
+    if (this.#timeouts.transfer) {
+      let transferTimeout
+      request.once('response', (res) => {
+        setTimeout(() => {
+          res.destroy(new Errors.TransferTimeoutError(request, this.#proxy))
+        }, this.#timeouts.transfer)
+        res.once('close', () => {
+          clearTimeout(transferTimeout)
+        })
+      })
+    }
+
+    return super.addRequest(request, options)
+  }
+}
diff --git a/node_modules/node-gyp/node_modules/@npmcli/agent/lib/dns.js b/node_modules/node-gyp/node_modules/@npmcli/agent/lib/dns.js
new file mode 100644
index 0000000000000..3c6946c566d73
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/@npmcli/agent/lib/dns.js
@@ -0,0 +1,53 @@
+'use strict'
+
+const { LRUCache } = require('lru-cache')
+const dns = require('dns')
+
+// this is a factory so that each request can have its own opts (i.e. ttl)
+// while still sharing the cache across all requests
+const cache = new LRUCache({ max: 50 })
+
+const getOptions = ({
+  family = 0,
+  hints = dns.ADDRCONFIG,
+  all = false,
+  verbatim = undefined,
+  ttl = 5 * 60 * 1000,
+  lookup = dns.lookup,
+}) => ({
+  // hints and lookup are returned since both are top level properties to (net|tls).connect
+  hints,
+  lookup: (hostname, ...args) => {
+    const callback = args.pop() // callback is always last arg
+    const lookupOptions = args[0] ?? {}
+
+    const options = {
+      family,
+      hints,
+      all,
+      verbatim,
+      ...(typeof lookupOptions === 'number' ? { family: lookupOptions } : lookupOptions),
+    }
+
+    const key = JSON.stringify({ hostname, ...options })
+
+    if (cache.has(key)) {
+      const cached = cache.get(key)
+      return process.nextTick(callback, null, ...cached)
+    }
+
+    lookup(hostname, options, (err, ...result) => {
+      if (err) {
+        return callback(err)
+      }
+
+      cache.set(key, result, { ttl })
+      return callback(null, ...result)
+    })
+  },
+})
+
+module.exports = {
+  cache,
+  getOptions,
+}
diff --git a/node_modules/node-gyp/node_modules/@npmcli/agent/lib/errors.js b/node_modules/node-gyp/node_modules/@npmcli/agent/lib/errors.js
new file mode 100644
index 0000000000000..70475aec8eb35
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/@npmcli/agent/lib/errors.js
@@ -0,0 +1,61 @@
+'use strict'
+
+class InvalidProxyProtocolError extends Error {
+  constructor (url) {
+    super(`Invalid protocol \`${url.protocol}\` connecting to proxy \`${url.host}\``)
+    this.code = 'EINVALIDPROXY'
+    this.proxy = url
+  }
+}
+
+class ConnectionTimeoutError extends Error {
+  constructor (host) {
+    super(`Timeout connecting to host \`${host}\``)
+    this.code = 'ECONNECTIONTIMEOUT'
+    this.host = host
+  }
+}
+
+class IdleTimeoutError extends Error {
+  constructor (host) {
+    super(`Idle timeout reached for host \`${host}\``)
+    this.code = 'EIDLETIMEOUT'
+    this.host = host
+  }
+}
+
+class ResponseTimeoutError extends Error {
+  constructor (request, proxy) {
+    let msg = 'Response timeout '
+    if (proxy) {
+      msg += `from proxy \`${proxy.host}\` `
+    }
+    msg += `connecting to host \`${request.host}\``
+    super(msg)
+    this.code = 'ERESPONSETIMEOUT'
+    this.proxy = proxy
+    this.request = request
+  }
+}
+
+class TransferTimeoutError extends Error {
+  constructor (request, proxy) {
+    let msg = 'Transfer timeout '
+    if (proxy) {
+      msg += `from proxy \`${proxy.host}\` `
+    }
+    msg += `for \`${request.host}\``
+    super(msg)
+    this.code = 'ETRANSFERTIMEOUT'
+    this.proxy = proxy
+    this.request = request
+  }
+}
+
+module.exports = {
+  InvalidProxyProtocolError,
+  ConnectionTimeoutError,
+  IdleTimeoutError,
+  ResponseTimeoutError,
+  TransferTimeoutError,
+}
diff --git a/node_modules/node-gyp/node_modules/@npmcli/agent/lib/index.js b/node_modules/node-gyp/node_modules/@npmcli/agent/lib/index.js
new file mode 100644
index 0000000000000..b33d6eaef07a2
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/@npmcli/agent/lib/index.js
@@ -0,0 +1,56 @@
+'use strict'
+
+const { LRUCache } = require('lru-cache')
+const { normalizeOptions, cacheOptions } = require('./options')
+const { getProxy, proxyCache } = require('./proxy.js')
+const dns = require('./dns.js')
+const Agent = require('./agents.js')
+
+const agentCache = new LRUCache({ max: 20 })
+
+const getAgent = (url, { agent, proxy, noProxy, ...options } = {}) => {
+  // false has meaning so this can't be a simple truthiness check
+  if (agent != null) {
+    return agent
+  }
+
+  url = new URL(url)
+
+  const proxyForUrl = getProxy(url, { proxy, noProxy })
+  const normalizedOptions = {
+    ...normalizeOptions(options),
+    proxy: proxyForUrl,
+  }
+
+  const cacheKey = cacheOptions({
+    ...normalizedOptions,
+    secureEndpoint: url.protocol === 'https:',
+  })
+
+  if (agentCache.has(cacheKey)) {
+    return agentCache.get(cacheKey)
+  }
+
+  const newAgent = new Agent(normalizedOptions)
+  agentCache.set(cacheKey, newAgent)
+
+  return newAgent
+}
+
+module.exports = {
+  getAgent,
+  Agent,
+  // these are exported for backwards compatability
+  HttpAgent: Agent,
+  HttpsAgent: Agent,
+  cache: {
+    proxy: proxyCache,
+    agent: agentCache,
+    dns: dns.cache,
+    clear: () => {
+      proxyCache.clear()
+      agentCache.clear()
+      dns.cache.clear()
+    },
+  },
+}
diff --git a/node_modules/node-gyp/node_modules/@npmcli/agent/lib/options.js b/node_modules/node-gyp/node_modules/@npmcli/agent/lib/options.js
new file mode 100644
index 0000000000000..0bf53f725f084
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/@npmcli/agent/lib/options.js
@@ -0,0 +1,86 @@
+'use strict'
+
+const dns = require('./dns')
+
+const normalizeOptions = (opts) => {
+  const family = parseInt(opts.family ?? '0', 10)
+  const keepAlive = opts.keepAlive ?? true
+
+  const normalized = {
+    // nodejs http agent options. these are all the defaults
+    // but kept here to increase the likelihood of cache hits
+    // https://nodejs.org/api/http.html#new-agentoptions
+    keepAliveMsecs: keepAlive ? 1000 : undefined,
+    maxSockets: opts.maxSockets ?? 15,
+    maxTotalSockets: Infinity,
+    maxFreeSockets: keepAlive ? 256 : undefined,
+    scheduling: 'fifo',
+    // then spread the rest of the options
+    ...opts,
+    // we already set these to their defaults that we want
+    family,
+    keepAlive,
+    // our custom timeout options
+    timeouts: {
+      // the standard timeout option is mapped to our idle timeout
+      // and then deleted below
+      idle: opts.timeout ?? 0,
+      connection: 0,
+      response: 0,
+      transfer: 0,
+      ...opts.timeouts,
+    },
+    // get the dns options that go at the top level of socket connection
+    ...dns.getOptions({ family, ...opts.dns }),
+  }
+
+  // remove timeout since we already used it to set our own idle timeout
+  delete normalized.timeout
+
+  return normalized
+}
+
+const createKey = (obj) => {
+  let key = ''
+  const sorted = Object.entries(obj).sort((a, b) => a[0] - b[0])
+  for (let [k, v] of sorted) {
+    if (v == null) {
+      v = 'null'
+    } else if (v instanceof URL) {
+      v = v.toString()
+    } else if (typeof v === 'object') {
+      v = createKey(v)
+    }
+    key += `${k}:${v}:`
+  }
+  return key
+}
+
+const cacheOptions = ({ secureEndpoint, ...options }) => createKey({
+  secureEndpoint: !!secureEndpoint,
+  // socket connect options
+  family: options.family,
+  hints: options.hints,
+  localAddress: options.localAddress,
+  // tls specific connect options
+  strictSsl: secureEndpoint ? !!options.rejectUnauthorized : false,
+  ca: secureEndpoint ? options.ca : null,
+  cert: secureEndpoint ? options.cert : null,
+  key: secureEndpoint ? options.key : null,
+  // http agent options
+  keepAlive: options.keepAlive,
+  keepAliveMsecs: options.keepAliveMsecs,
+  maxSockets: options.maxSockets,
+  maxTotalSockets: options.maxTotalSockets,
+  maxFreeSockets: options.maxFreeSockets,
+  scheduling: options.scheduling,
+  // timeout options
+  timeouts: options.timeouts,
+  // proxy
+  proxy: options.proxy,
+})
+
+module.exports = {
+  normalizeOptions,
+  cacheOptions,
+}
diff --git a/node_modules/node-gyp/node_modules/@npmcli/agent/lib/proxy.js b/node_modules/node-gyp/node_modules/@npmcli/agent/lib/proxy.js
new file mode 100644
index 0000000000000..6272e929e57bc
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/@npmcli/agent/lib/proxy.js
@@ -0,0 +1,88 @@
+'use strict'
+
+const { HttpProxyAgent } = require('http-proxy-agent')
+const { HttpsProxyAgent } = require('https-proxy-agent')
+const { SocksProxyAgent } = require('socks-proxy-agent')
+const { LRUCache } = require('lru-cache')
+const { InvalidProxyProtocolError } = require('./errors.js')
+
+const PROXY_CACHE = new LRUCache({ max: 20 })
+
+const SOCKS_PROTOCOLS = new Set(SocksProxyAgent.protocols)
+
+const PROXY_ENV_KEYS = new Set(['https_proxy', 'http_proxy', 'proxy', 'no_proxy'])
+
+const PROXY_ENV = Object.entries(process.env).reduce((acc, [key, value]) => {
+  key = key.toLowerCase()
+  if (PROXY_ENV_KEYS.has(key)) {
+    acc[key] = value
+  }
+  return acc
+}, {})
+
+const getProxyAgent = (url) => {
+  url = new URL(url)
+
+  const protocol = url.protocol.slice(0, -1)
+  if (SOCKS_PROTOCOLS.has(protocol)) {
+    return SocksProxyAgent
+  }
+  if (protocol === 'https' || protocol === 'http') {
+    return [HttpProxyAgent, HttpsProxyAgent]
+  }
+
+  throw new InvalidProxyProtocolError(url)
+}
+
+const isNoProxy = (url, noProxy) => {
+  if (typeof noProxy === 'string') {
+    noProxy = noProxy.split(',').map((p) => p.trim()).filter(Boolean)
+  }
+
+  if (!noProxy || !noProxy.length) {
+    return false
+  }
+
+  const hostSegments = url.hostname.split('.').reverse()
+
+  return noProxy.some((no) => {
+    const noSegments = no.split('.').filter(Boolean).reverse()
+    if (!noSegments.length) {
+      return false
+    }
+
+    for (let i = 0; i < noSegments.length; i++) {
+      if (hostSegments[i] !== noSegments[i]) {
+        return false
+      }
+    }
+
+    return true
+  })
+}
+
+const getProxy = (url, { proxy, noProxy }) => {
+  url = new URL(url)
+
+  if (!proxy) {
+    proxy = url.protocol === 'https:'
+      ? PROXY_ENV.https_proxy
+      : PROXY_ENV.https_proxy || PROXY_ENV.http_proxy || PROXY_ENV.proxy
+  }
+
+  if (!noProxy) {
+    noProxy = PROXY_ENV.no_proxy
+  }
+
+  if (!proxy || isNoProxy(url, noProxy)) {
+    return null
+  }
+
+  return new URL(proxy)
+}
+
+module.exports = {
+  getProxyAgent,
+  getProxy,
+  proxyCache: PROXY_CACHE,
+}
diff --git a/node_modules/node-gyp/node_modules/@npmcli/agent/package.json b/node_modules/node-gyp/node_modules/@npmcli/agent/package.json
new file mode 100644
index 0000000000000..4d648fb5dfe05
--- /dev/null
+++ b/node_modules/node-gyp/node_modules/@npmcli/agent/package.json
@@ -0,0 +1,60 @@
+{
+  "name": "@npmcli/agent",
+  "version": "3.0.0",
+  "description": "the http/https agent used by the npm cli",
+  "main": "lib/index.js",
+  "scripts": {
+    "gencerts": "bash scripts/create-cert.sh",
+    "test": "tap",
+    "lint": "npm run eslint",
+    "postlint": "template-oss-check",
+    "template-oss-apply": "template-oss-apply --force",
+    "lintfix": "npm run eslint -- --fix",
+    "snap": "tap",
+    "posttest": "npm run lint",
+    "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\""
+  },
+  "author": "GitHub Inc.",
+  "license": "ISC",
+  "bugs": {
+    "url": "https://github.com/npm/agent/issues"
+  },
+  "homepage": "https://github.com/npm/agent#readme",
+  "files": [
+    "bin/",
+    "lib/"
+  ],
+  "engines": {
+    "node": "^18.17.0 || >=20.5.0"
+  },
+  "templateOSS": {
+    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
+    "version": "4.23.1",
+    "publish": "true"
+  },
+  "dependencies": {
+    "agent-base": "^7.1.0",
+    "http-proxy-agent": "^7.0.0",
+    "https-proxy-agent": "^7.0.1",
+    "lru-cache": "^10.0.1",
+    "socks-proxy-agent": "^8.0.3"
+  },
+  "devDependencies": {
+    "@npmcli/eslint-config": "^5.0.0",
+    "@npmcli/template-oss": "4.23.1",
+    "minipass-fetch": "^3.0.3",
+    "nock": "^13.2.7",
+    "socksv5": "^0.0.6",
+    "tap": "^16.3.0"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/npm/agent.git"
+  },
+  "tap": {
+    "nyc-arg": [
+      "--exclude",
+      "tap-snapshots/**"
+    ]
+  }
+}
diff --git a/node_modules/tar/node_modules/minizlib/LICENSE b/node_modules/tar/node_modules/minizlib/LICENSE
new file mode 100644
index 0000000000000..ffce7383f53e7
--- /dev/null
+++ b/node_modules/tar/node_modules/minizlib/LICENSE
@@ -0,0 +1,26 @@
+Minizlib was created by Isaac Z. Schlueter.
+It is a derivative work of the Node.js project.
+
+"""
+Copyright Isaac Z. Schlueter and Contributors
+Copyright Node.js contributors. All rights reserved.
+Copyright Joyent, Inc. and other Node contributors. All rights reserved.
+
+Permission is hereby granted, free of charge, to any person obtaining a
+copy of this software and associated documentation files (the "Software"),
+to deal in the Software without restriction, including without limitation
+the rights to use, copy, modify, merge, publish, distribute, sublicense,
+and/or sell copies of the Software, and to permit persons to whom the
+Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+"""
diff --git a/node_modules/tar/node_modules/minizlib/constants.js b/node_modules/tar/node_modules/minizlib/constants.js
new file mode 100644
index 0000000000000..641ebc73129bf
--- /dev/null
+++ b/node_modules/tar/node_modules/minizlib/constants.js
@@ -0,0 +1,115 @@
+// Update with any zlib constants that are added or changed in the future.
+// Node v6 didn't export this, so we just hard code the version and rely
+// on all the other hard-coded values from zlib v4736.  When node v6
+// support drops, we can just export the realZlibConstants object.
+const realZlibConstants = require('zlib').constants ||
+  /* istanbul ignore next */ { ZLIB_VERNUM: 4736 }
+
+module.exports = Object.freeze(Object.assign(Object.create(null), {
+  Z_NO_FLUSH: 0,
+  Z_PARTIAL_FLUSH: 1,
+  Z_SYNC_FLUSH: 2,
+  Z_FULL_FLUSH: 3,
+  Z_FINISH: 4,
+  Z_BLOCK: 5,
+  Z_OK: 0,
+  Z_STREAM_END: 1,
+  Z_NEED_DICT: 2,
+  Z_ERRNO: -1,
+  Z_STREAM_ERROR: -2,
+  Z_DATA_ERROR: -3,
+  Z_MEM_ERROR: -4,
+  Z_BUF_ERROR: -5,
+  Z_VERSION_ERROR: -6,
+  Z_NO_COMPRESSION: 0,
+  Z_BEST_SPEED: 1,
+  Z_BEST_COMPRESSION: 9,
+  Z_DEFAULT_COMPRESSION: -1,
+  Z_FILTERED: 1,
+  Z_HUFFMAN_ONLY: 2,
+  Z_RLE: 3,
+  Z_FIXED: 4,
+  Z_DEFAULT_STRATEGY: 0,
+  DEFLATE: 1,
+  INFLATE: 2,
+  GZIP: 3,
+  GUNZIP: 4,
+  DEFLATERAW: 5,
+  INFLATERAW: 6,
+  UNZIP: 7,
+  BROTLI_DECODE: 8,
+  BROTLI_ENCODE: 9,
+  Z_MIN_WINDOWBITS: 8,
+  Z_MAX_WINDOWBITS: 15,
+  Z_DEFAULT_WINDOWBITS: 15,
+  Z_MIN_CHUNK: 64,
+  Z_MAX_CHUNK: Infinity,
+  Z_DEFAULT_CHUNK: 16384,
+  Z_MIN_MEMLEVEL: 1,
+  Z_MAX_MEMLEVEL: 9,
+  Z_DEFAULT_MEMLEVEL: 8,
+  Z_MIN_LEVEL: -1,
+  Z_MAX_LEVEL: 9,
+  Z_DEFAULT_LEVEL: -1,
+  BROTLI_OPERATION_PROCESS: 0,
+  BROTLI_OPERATION_FLUSH: 1,
+  BROTLI_OPERATION_FINISH: 2,
+  BROTLI_OPERATION_EMIT_METADATA: 3,
+  BROTLI_MODE_GENERIC: 0,
+  BROTLI_MODE_TEXT: 1,
+  BROTLI_MODE_FONT: 2,
+  BROTLI_DEFAULT_MODE: 0,
+  BROTLI_MIN_QUALITY: 0,
+  BROTLI_MAX_QUALITY: 11,
+  BROTLI_DEFAULT_QUALITY: 11,
+  BROTLI_MIN_WINDOW_BITS: 10,
+  BROTLI_MAX_WINDOW_BITS: 24,
+  BROTLI_LARGE_MAX_WINDOW_BITS: 30,
+  BROTLI_DEFAULT_WINDOW: 22,
+  BROTLI_MIN_INPUT_BLOCK_BITS: 16,
+  BROTLI_MAX_INPUT_BLOCK_BITS: 24,
+  BROTLI_PARAM_MODE: 0,
+  BROTLI_PARAM_QUALITY: 1,
+  BROTLI_PARAM_LGWIN: 2,
+  BROTLI_PARAM_LGBLOCK: 3,
+  BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4,
+  BROTLI_PARAM_SIZE_HINT: 5,
+  BROTLI_PARAM_LARGE_WINDOW: 6,
+  BROTLI_PARAM_NPOSTFIX: 7,
+  BROTLI_PARAM_NDIRECT: 8,
+  BROTLI_DECODER_RESULT_ERROR: 0,
+  BROTLI_DECODER_RESULT_SUCCESS: 1,
+  BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2,
+  BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3,
+  BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0,
+  BROTLI_DECODER_PARAM_LARGE_WINDOW: 1,
+  BROTLI_DECODER_NO_ERROR: 0,
+  BROTLI_DECODER_SUCCESS: 1,
+  BROTLI_DECODER_NEEDS_MORE_INPUT: 2,
+  BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3,
+  BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1,
+  BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2,
+  BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3,
+  BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4,
+  BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5,
+  BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6,
+  BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7,
+  BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8,
+  BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9,
+  BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10,
+  BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11,
+  BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12,
+  BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13,
+  BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14,
+  BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15,
+  BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16,
+  BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19,
+  BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20,
+  BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21,
+  BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22,
+  BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25,
+  BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26,
+  BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27,
+  BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30,
+  BROTLI_DECODER_ERROR_UNREACHABLE: -31,
+}, realZlibConstants))
diff --git a/node_modules/tar/node_modules/minizlib/index.js b/node_modules/tar/node_modules/minizlib/index.js
new file mode 100644
index 0000000000000..fbaf69e19f209
--- /dev/null
+++ b/node_modules/tar/node_modules/minizlib/index.js
@@ -0,0 +1,348 @@
+'use strict'
+
+const assert = require('assert')
+const Buffer = require('buffer').Buffer
+const realZlib = require('zlib')
+
+const constants = exports.constants = require('./constants.js')
+const Minipass = require('minipass')
+
+const OriginalBufferConcat = Buffer.concat
+
+const _superWrite = Symbol('_superWrite')
+class ZlibError extends Error {
+  constructor (err) {
+    super('zlib: ' + err.message)
+    this.code = err.code
+    this.errno = err.errno
+    /* istanbul ignore if */
+    if (!this.code)
+      this.code = 'ZLIB_ERROR'
+
+    this.message = 'zlib: ' + err.message
+    Error.captureStackTrace(this, this.constructor)
+  }
+
+  get name () {
+    return 'ZlibError'
+  }
+}
+
+// the Zlib class they all inherit from
+// This thing manages the queue of requests, and returns
+// true or false if there is anything in the queue when
+// you call the .write() method.
+const _opts = Symbol('opts')
+const _flushFlag = Symbol('flushFlag')
+const _finishFlushFlag = Symbol('finishFlushFlag')
+const _fullFlushFlag = Symbol('fullFlushFlag')
+const _handle = Symbol('handle')
+const _onError = Symbol('onError')
+const _sawError = Symbol('sawError')
+const _level = Symbol('level')
+const _strategy = Symbol('strategy')
+const _ended = Symbol('ended')
+const _defaultFullFlush = Symbol('_defaultFullFlush')
+
+class ZlibBase extends Minipass {
+  constructor (opts, mode) {
+    if (!opts || typeof opts !== 'object')
+      throw new TypeError('invalid options for ZlibBase constructor')
+
+    super(opts)
+    this[_sawError] = false
+    this[_ended] = false
+    this[_opts] = opts
+
+    this[_flushFlag] = opts.flush
+    this[_finishFlushFlag] = opts.finishFlush
+    // this will throw if any options are invalid for the class selected
+    try {
+      this[_handle] = new realZlib[mode](opts)
+    } catch (er) {
+      // make sure that all errors get decorated properly
+      throw new ZlibError(er)
+    }
+
+    this[_onError] = (err) => {
+      // no sense raising multiple errors, since we abort on the first one.
+      if (this[_sawError])
+        return
+
+      this[_sawError] = true
+
+      // there is no way to cleanly recover.
+      // continuing only obscures problems.
+      this.close()
+      this.emit('error', err)
+    }
+
+    this[_handle].on('error', er => this[_onError](new ZlibError(er)))
+    this.once('end', () => this.close)
+  }
+
+  close () {
+    if (this[_handle]) {
+      this[_handle].close()
+      this[_handle] = null
+      this.emit('close')
+    }
+  }
+
+  reset () {
+    if (!this[_sawError]) {
+      assert(this[_handle], 'zlib binding closed')
+      return this[_handle].reset()
+    }
+  }
+
+  flush (flushFlag) {
+    if (this.ended)
+      return
+
+    if (typeof flushFlag !== 'number')
+      flushFlag = this[_fullFlushFlag]
+    this.write(Object.assign(Buffer.alloc(0), { [_flushFlag]: flushFlag }))
+  }
+
+  end (chunk, encoding, cb) {
+    if (chunk)
+      this.write(chunk, encoding)
+    this.flush(this[_finishFlushFlag])
+    this[_ended] = true
+    return super.end(null, null, cb)
+  }
+
+  get ended () {
+    return this[_ended]
+  }
+
+  write (chunk, encoding, cb) {
+    // process the chunk using the sync process
+    // then super.write() all the outputted chunks
+    if (typeof encoding === 'function')
+      cb = encoding, encoding = 'utf8'
+
+    if (typeof chunk === 'string')
+      chunk = Buffer.from(chunk, encoding)
+
+    if (this[_sawError])
+      return
+    assert(this[_handle], 'zlib binding closed')
+
+    // _processChunk tries to .close() the native handle after it's done, so we
+    // intercept that by temporarily making it a no-op.
+    const nativeHandle = this[_handle]._handle
+    const originalNativeClose = nativeHandle.close
+    nativeHandle.close = () => {}
+    const originalClose = this[_handle].close
+    this[_handle].close = () => {}
+    // It also calls `Buffer.concat()` at the end, which may be convenient
+    // for some, but which we are not interested in as it slows us down.
+    Buffer.concat = (args) => args
+    let result
+    try {
+      const flushFlag = typeof chunk[_flushFlag] === 'number'
+        ? chunk[_flushFlag] : this[_flushFlag]
+      result = this[_handle]._processChunk(chunk, flushFlag)
+      // if we don't throw, reset it back how it was
+      Buffer.concat = OriginalBufferConcat
+    } catch (err) {
+      // or if we do, put Buffer.concat() back before we emit error
+      // Error events call into user code, which may call Buffer.concat()
+      Buffer.concat = OriginalBufferConcat
+      this[_onError](new ZlibError(err))
+    } finally {
+      if (this[_handle]) {
+        // Core zlib resets `_handle` to null after attempting to close the
+        // native handle. Our no-op handler prevented actual closure, but we
+        // need to restore the `._handle` property.
+        this[_handle]._handle = nativeHandle
+        nativeHandle.close = originalNativeClose
+        this[_handle].close = originalClose
+        // `_processChunk()` adds an 'error' listener. If we don't remove it
+        // after each call, these handlers start piling up.
+        this[_handle].removeAllListeners('error')
+        // make sure OUR error listener is still attached tho
+      }
+    }
+
+    if (this[_handle])
+      this[_handle].on('error', er => this[_onError](new ZlibError(er)))
+
+    let writeReturn
+    if (result) {
+      if (Array.isArray(result) && result.length > 0) {
+        // The first buffer is always `handle._outBuffer`, which would be
+        // re-used for later invocations; so, we always have to copy that one.
+        writeReturn = this[_superWrite](Buffer.from(result[0]))
+        for (let i = 1; i < result.length; i++) {
+          writeReturn = this[_superWrite](result[i])
+        }
+      } else {
+        writeReturn = this[_superWrite](Buffer.from(result))
+      }
+    }
+
+    if (cb)
+      cb()
+    return writeReturn
+  }
+
+  [_superWrite] (data) {
+    return super.write(data)
+  }
+}
+
+class Zlib extends ZlibBase {
+  constructor (opts, mode) {
+    opts = opts || {}
+
+    opts.flush = opts.flush || constants.Z_NO_FLUSH
+    opts.finishFlush = opts.finishFlush || constants.Z_FINISH
+    super(opts, mode)
+
+    this[_fullFlushFlag] = constants.Z_FULL_FLUSH
+    this[_level] = opts.level
+    this[_strategy] = opts.strategy
+  }
+
+  params (level, strategy) {
+    if (this[_sawError])
+      return
+
+    if (!this[_handle])
+      throw new Error('cannot switch params when binding is closed')
+
+    // no way to test this without also not supporting params at all
+    /* istanbul ignore if */
+    if (!this[_handle].params)
+      throw new Error('not supported in this implementation')
+
+    if (this[_level] !== level || this[_strategy] !== strategy) {
+      this.flush(constants.Z_SYNC_FLUSH)
+      assert(this[_handle], 'zlib binding closed')
+      // .params() calls .flush(), but the latter is always async in the
+      // core zlib. We override .flush() temporarily to intercept that and
+      // flush synchronously.
+      const origFlush = this[_handle].flush
+      this[_handle].flush = (flushFlag, cb) => {
+        this.flush(flushFlag)
+        cb()
+      }
+      try {
+        this[_handle].params(level, strategy)
+      } finally {
+        this[_handle].flush = origFlush
+      }
+      /* istanbul ignore else */
+      if (this[_handle]) {
+        this[_level] = level
+        this[_strategy] = strategy
+      }
+    }
+  }
+}
+
+// minimal 2-byte header
+class Deflate extends Zlib {
+  constructor (opts) {
+    super(opts, 'Deflate')
+  }
+}
+
+class Inflate extends Zlib {
+  constructor (opts) {
+    super(opts, 'Inflate')
+  }
+}
+
+// gzip - bigger header, same deflate compression
+const _portable = Symbol('_portable')
+class Gzip extends Zlib {
+  constructor (opts) {
+    super(opts, 'Gzip')
+    this[_portable] = opts && !!opts.portable
+  }
+
+  [_superWrite] (data) {
+    if (!this[_portable])
+      return super[_superWrite](data)
+
+    // we'll always get the header emitted in one first chunk
+    // overwrite the OS indicator byte with 0xFF
+    this[_portable] = false
+    data[9] = 255
+    return super[_superWrite](data)
+  }
+}
+
+class Gunzip extends Zlib {
+  constructor (opts) {
+    super(opts, 'Gunzip')
+  }
+}
+
+// raw - no header
+class DeflateRaw extends Zlib {
+  constructor (opts) {
+    super(opts, 'DeflateRaw')
+  }
+}
+
+class InflateRaw extends Zlib {
+  constructor (opts) {
+    super(opts, 'InflateRaw')
+  }
+}
+
+// auto-detect header.
+class Unzip extends Zlib {
+  constructor (opts) {
+    super(opts, 'Unzip')
+  }
+}
+
+class Brotli extends ZlibBase {
+  constructor (opts, mode) {
+    opts = opts || {}
+
+    opts.flush = opts.flush || constants.BROTLI_OPERATION_PROCESS
+    opts.finishFlush = opts.finishFlush || constants.BROTLI_OPERATION_FINISH
+
+    super(opts, mode)
+
+    this[_fullFlushFlag] = constants.BROTLI_OPERATION_FLUSH
+  }
+}
+
+class BrotliCompress extends Brotli {
+  constructor (opts) {
+    super(opts, 'BrotliCompress')
+  }
+}
+
+class BrotliDecompress extends Brotli {
+  constructor (opts) {
+    super(opts, 'BrotliDecompress')
+  }
+}
+
+exports.Deflate = Deflate
+exports.Inflate = Inflate
+exports.Gzip = Gzip
+exports.Gunzip = Gunzip
+exports.DeflateRaw = DeflateRaw
+exports.InflateRaw = InflateRaw
+exports.Unzip = Unzip
+/* istanbul ignore else */
+if (typeof realZlib.BrotliCompress === 'function') {
+  exports.BrotliCompress = BrotliCompress
+  exports.BrotliDecompress = BrotliDecompress
+} else {
+  exports.BrotliCompress = exports.BrotliDecompress = class {
+    constructor () {
+      throw new Error('Brotli is not supported in this version of Node.js')
+    }
+  }
+}
diff --git a/node_modules/tar/node_modules/minizlib/node_modules/minipass/LICENSE b/node_modules/tar/node_modules/minizlib/node_modules/minipass/LICENSE
new file mode 100644
index 0000000000000..bf1dece2e1f12
--- /dev/null
+++ b/node_modules/tar/node_modules/minizlib/node_modules/minipass/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) 2017-2022 npm, Inc., Isaac Z. Schlueter, and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/tar/node_modules/minizlib/node_modules/minipass/index.js b/node_modules/tar/node_modules/minizlib/node_modules/minipass/index.js
new file mode 100644
index 0000000000000..e8797aab6cc27
--- /dev/null
+++ b/node_modules/tar/node_modules/minizlib/node_modules/minipass/index.js
@@ -0,0 +1,649 @@
+'use strict'
+const proc = typeof process === 'object' && process ? process : {
+  stdout: null,
+  stderr: null,
+}
+const EE = require('events')
+const Stream = require('stream')
+const SD = require('string_decoder').StringDecoder
+
+const EOF = Symbol('EOF')
+const MAYBE_EMIT_END = Symbol('maybeEmitEnd')
+const EMITTED_END = Symbol('emittedEnd')
+const EMITTING_END = Symbol('emittingEnd')
+const EMITTED_ERROR = Symbol('emittedError')
+const CLOSED = Symbol('closed')
+const READ = Symbol('read')
+const FLUSH = Symbol('flush')
+const FLUSHCHUNK = Symbol('flushChunk')
+const ENCODING = Symbol('encoding')
+const DECODER = Symbol('decoder')
+const FLOWING = Symbol('flowing')
+const PAUSED = Symbol('paused')
+const RESUME = Symbol('resume')
+const BUFFERLENGTH = Symbol('bufferLength')
+const BUFFERPUSH = Symbol('bufferPush')
+const BUFFERSHIFT = Symbol('bufferShift')
+const OBJECTMODE = Symbol('objectMode')
+const DESTROYED = Symbol('destroyed')
+const EMITDATA = Symbol('emitData')
+const EMITEND = Symbol('emitEnd')
+const EMITEND2 = Symbol('emitEnd2')
+const ASYNC = Symbol('async')
+
+const defer = fn => Promise.resolve().then(fn)
+
+// TODO remove when Node v8 support drops
+const doIter = global._MP_NO_ITERATOR_SYMBOLS_  !== '1'
+const ASYNCITERATOR = doIter && Symbol.asyncIterator
+  || Symbol('asyncIterator not implemented')
+const ITERATOR = doIter && Symbol.iterator
+  || Symbol('iterator not implemented')
+
+// events that mean 'the stream is over'
+// these are treated specially, and re-emitted
+// if they are listened for after emitting.
+const isEndish = ev =>
+  ev === 'end' ||
+  ev === 'finish' ||
+  ev === 'prefinish'
+
+const isArrayBuffer = b => b instanceof ArrayBuffer ||
+  typeof b === 'object' &&
+  b.constructor &&
+  b.constructor.name === 'ArrayBuffer' &&
+  b.byteLength >= 0
+
+const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b)
+
+class Pipe {
+  constructor (src, dest, opts) {
+    this.src = src
+    this.dest = dest
+    this.opts = opts
+    this.ondrain = () => src[RESUME]()
+    dest.on('drain', this.ondrain)
+  }
+  unpipe () {
+    this.dest.removeListener('drain', this.ondrain)
+  }
+  // istanbul ignore next - only here for the prototype
+  proxyErrors () {}
+  end () {
+    this.unpipe()
+    if (this.opts.end)
+      this.dest.end()
+  }
+}
+
+class PipeProxyErrors extends Pipe {
+  unpipe () {
+    this.src.removeListener('error', this.proxyErrors)
+    super.unpipe()
+  }
+  constructor (src, dest, opts) {
+    super(src, dest, opts)
+    this.proxyErrors = er => dest.emit('error', er)
+    src.on('error', this.proxyErrors)
+  }
+}
+
+module.exports = class Minipass extends Stream {
+  constructor (options) {
+    super()
+    this[FLOWING] = false
+    // whether we're explicitly paused
+    this[PAUSED] = false
+    this.pipes = []
+    this.buffer = []
+    this[OBJECTMODE] = options && options.objectMode || false
+    if (this[OBJECTMODE])
+      this[ENCODING] = null
+    else
+      this[ENCODING] = options && options.encoding || null
+    if (this[ENCODING] === 'buffer')
+      this[ENCODING] = null
+    this[ASYNC] = options && !!options.async || false
+    this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null
+    this[EOF] = false
+    this[EMITTED_END] = false
+    this[EMITTING_END] = false
+    this[CLOSED] = false
+    this[EMITTED_ERROR] = null
+    this.writable = true
+    this.readable = true
+    this[BUFFERLENGTH] = 0
+    this[DESTROYED] = false
+  }
+
+  get bufferLength () { return this[BUFFERLENGTH] }
+
+  get encoding () { return this[ENCODING] }
+  set encoding (enc) {
+    if (this[OBJECTMODE])
+      throw new Error('cannot set encoding in objectMode')
+
+    if (this[ENCODING] && enc !== this[ENCODING] &&
+        (this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH]))
+      throw new Error('cannot change encoding')
+
+    if (this[ENCODING] !== enc) {
+      this[DECODER] = enc ? new SD(enc) : null
+      if (this.buffer.length)
+        this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk))
+    }
+
+    this[ENCODING] = enc
+  }
+
+  setEncoding (enc) {
+    this.encoding = enc
+  }
+
+  get objectMode () { return this[OBJECTMODE] }
+  set objectMode (om) { this[OBJECTMODE] = this[OBJECTMODE] || !!om }
+
+  get ['async'] () { return this[ASYNC] }
+  set ['async'] (a) { this[ASYNC] = this[ASYNC] || !!a }
+
+  write (chunk, encoding, cb) {
+    if (this[EOF])
+      throw new Error('write after end')
+
+    if (this[DESTROYED]) {
+      this.emit('error', Object.assign(
+        new Error('Cannot call write after a stream was destroyed'),
+        { code: 'ERR_STREAM_DESTROYED' }
+      ))
+      return true
+    }
+
+    if (typeof encoding === 'function')
+      cb = encoding, encoding = 'utf8'
+
+    if (!encoding)
+      encoding = 'utf8'
+
+    const fn = this[ASYNC] ? defer : f => f()
+
+    // convert array buffers and typed array views into buffers
+    // at some point in the future, we may want to do the opposite!
+    // leave strings and buffers as-is
+    // anything else switches us into object mode
+    if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
+      if (isArrayBufferView(chunk))
+        chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
+      else if (isArrayBuffer(chunk))
+        chunk = Buffer.from(chunk)
+      else if (typeof chunk !== 'string')
+        // use the setter so we throw if we have encoding set
+        this.objectMode = true
+    }
+
+    // handle object mode up front, since it's simpler
+    // this yields better performance, fewer checks later.
+    if (this[OBJECTMODE]) {
+      /* istanbul ignore if - maybe impossible? */
+      if (this.flowing && this[BUFFERLENGTH] !== 0)
+        this[FLUSH](true)
+
+      if (this.flowing)
+        this.emit('data', chunk)
+      else
+        this[BUFFERPUSH](chunk)
+
+      if (this[BUFFERLENGTH] !== 0)
+        this.emit('readable')
+
+      if (cb)
+        fn(cb)
+
+      return this.flowing
+    }
+
+    // at this point the chunk is a buffer or string
+    // don't buffer it up or send it to the decoder
+    if (!chunk.length) {
+      if (this[BUFFERLENGTH] !== 0)
+        this.emit('readable')
+      if (cb)
+        fn(cb)
+      return this.flowing
+    }
+
+    // fast-path writing strings of same encoding to a stream with
+    // an empty buffer, skipping the buffer/decoder dance
+    if (typeof chunk === 'string' &&
+        // unless it is a string already ready for us to use
+        !(encoding === this[ENCODING] && !this[DECODER].lastNeed)) {
+      chunk = Buffer.from(chunk, encoding)
+    }
+
+    if (Buffer.isBuffer(chunk) && this[ENCODING])
+      chunk = this[DECODER].write(chunk)
+
+    // Note: flushing CAN potentially switch us into not-flowing mode
+    if (this.flowing && this[BUFFERLENGTH] !== 0)
+      this[FLUSH](true)
+
+    if (this.flowing)
+      this.emit('data', chunk)
+    else
+      this[BUFFERPUSH](chunk)
+
+    if (this[BUFFERLENGTH] !== 0)
+      this.emit('readable')
+
+    if (cb)
+      fn(cb)
+
+    return this.flowing
+  }
+
+  read (n) {
+    if (this[DESTROYED])
+      return null
+
+    if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) {
+      this[MAYBE_EMIT_END]()
+      return null
+    }
+
+    if (this[OBJECTMODE])
+      n = null
+
+    if (this.buffer.length > 1 && !this[OBJECTMODE]) {
+      if (this.encoding)
+        this.buffer = [this.buffer.join('')]
+      else
+        this.buffer = [Buffer.concat(this.buffer, this[BUFFERLENGTH])]
+    }
+
+    const ret = this[READ](n || null, this.buffer[0])
+    this[MAYBE_EMIT_END]()
+    return ret
+  }
+
+  [READ] (n, chunk) {
+    if (n === chunk.length || n === null)
+      this[BUFFERSHIFT]()
+    else {
+      this.buffer[0] = chunk.slice(n)
+      chunk = chunk.slice(0, n)
+      this[BUFFERLENGTH] -= n
+    }
+
+    this.emit('data', chunk)
+
+    if (!this.buffer.length && !this[EOF])
+      this.emit('drain')
+
+    return chunk
+  }
+
+  end (chunk, encoding, cb) {
+    if (typeof chunk === 'function')
+      cb = chunk, chunk = null
+    if (typeof encoding === 'function')
+      cb = encoding, encoding = 'utf8'
+    if (chunk)
+      this.write(chunk, encoding)
+    if (cb)
+      this.once('end', cb)
+    this[EOF] = true
+    this.writable = false
+
+    // if we haven't written anything, then go ahead and emit,
+    // even if we're not reading.
+    // we'll re-emit if a new 'end' listener is added anyway.
+    // This makes MP more suitable to write-only use cases.
+    if (this.flowing || !this[PAUSED])
+      this[MAYBE_EMIT_END]()
+    return this
+  }
+
+  // don't let the internal resume be overwritten
+  [RESUME] () {
+    if (this[DESTROYED])
+      return
+
+    this[PAUSED] = false
+    this[FLOWING] = true
+    this.emit('resume')
+    if (this.buffer.length)
+      this[FLUSH]()
+    else if (this[EOF])
+      this[MAYBE_EMIT_END]()
+    else
+      this.emit('drain')
+  }
+
+  resume () {
+    return this[RESUME]()
+  }
+
+  pause () {
+    this[FLOWING] = false
+    this[PAUSED] = true
+  }
+
+  get destroyed () {
+    return this[DESTROYED]
+  }
+
+  get flowing () {
+    return this[FLOWING]
+  }
+
+  get paused () {
+    return this[PAUSED]
+  }
+
+  [BUFFERPUSH] (chunk) {
+    if (this[OBJECTMODE])
+      this[BUFFERLENGTH] += 1
+    else
+      this[BUFFERLENGTH] += chunk.length
+    this.buffer.push(chunk)
+  }
+
+  [BUFFERSHIFT] () {
+    if (this.buffer.length) {
+      if (this[OBJECTMODE])
+        this[BUFFERLENGTH] -= 1
+      else
+        this[BUFFERLENGTH] -= this.buffer[0].length
+    }
+    return this.buffer.shift()
+  }
+
+  [FLUSH] (noDrain) {
+    do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()))
+
+    if (!noDrain && !this.buffer.length && !this[EOF])
+      this.emit('drain')
+  }
+
+  [FLUSHCHUNK] (chunk) {
+    return chunk ? (this.emit('data', chunk), this.flowing) : false
+  }
+
+  pipe (dest, opts) {
+    if (this[DESTROYED])
+      return
+
+    const ended = this[EMITTED_END]
+    opts = opts || {}
+    if (dest === proc.stdout || dest === proc.stderr)
+      opts.end = false
+    else
+      opts.end = opts.end !== false
+    opts.proxyErrors = !!opts.proxyErrors
+
+    // piping an ended stream ends immediately
+    if (ended) {
+      if (opts.end)
+        dest.end()
+    } else {
+      this.pipes.push(!opts.proxyErrors ? new Pipe(this, dest, opts)
+        : new PipeProxyErrors(this, dest, opts))
+      if (this[ASYNC])
+        defer(() => this[RESUME]())
+      else
+        this[RESUME]()
+    }
+
+    return dest
+  }
+
+  unpipe (dest) {
+    const p = this.pipes.find(p => p.dest === dest)
+    if (p) {
+      this.pipes.splice(this.pipes.indexOf(p), 1)
+      p.unpipe()
+    }
+  }
+
+  addListener (ev, fn) {
+    return this.on(ev, fn)
+  }
+
+  on (ev, fn) {
+    const ret = super.on(ev, fn)
+    if (ev === 'data' && !this.pipes.length && !this.flowing)
+      this[RESUME]()
+    else if (ev === 'readable' && this[BUFFERLENGTH] !== 0)
+      super.emit('readable')
+    else if (isEndish(ev) && this[EMITTED_END]) {
+      super.emit(ev)
+      this.removeAllListeners(ev)
+    } else if (ev === 'error' && this[EMITTED_ERROR]) {
+      if (this[ASYNC])
+        defer(() => fn.call(this, this[EMITTED_ERROR]))
+      else
+        fn.call(this, this[EMITTED_ERROR])
+    }
+    return ret
+  }
+
+  get emittedEnd () {
+    return this[EMITTED_END]
+  }
+
+  [MAYBE_EMIT_END] () {
+    if (!this[EMITTING_END] &&
+        !this[EMITTED_END] &&
+        !this[DESTROYED] &&
+        this.buffer.length === 0 &&
+        this[EOF]) {
+      this[EMITTING_END] = true
+      this.emit('end')
+      this.emit('prefinish')
+      this.emit('finish')
+      if (this[CLOSED])
+        this.emit('close')
+      this[EMITTING_END] = false
+    }
+  }
+
+  emit (ev, data, ...extra) {
+    // error and close are only events allowed after calling destroy()
+    if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED])
+      return
+    else if (ev === 'data') {
+      return !data ? false
+        : this[ASYNC] ? defer(() => this[EMITDATA](data))
+        : this[EMITDATA](data)
+    } else if (ev === 'end') {
+      return this[EMITEND]()
+    } else if (ev === 'close') {
+      this[CLOSED] = true
+      // don't emit close before 'end' and 'finish'
+      if (!this[EMITTED_END] && !this[DESTROYED])
+        return
+      const ret = super.emit('close')
+      this.removeAllListeners('close')
+      return ret
+    } else if (ev === 'error') {
+      this[EMITTED_ERROR] = data
+      const ret = super.emit('error', data)
+      this[MAYBE_EMIT_END]()
+      return ret
+    } else if (ev === 'resume') {
+      const ret = super.emit('resume')
+      this[MAYBE_EMIT_END]()
+      return ret
+    } else if (ev === 'finish' || ev === 'prefinish') {
+      const ret = super.emit(ev)
+      this.removeAllListeners(ev)
+      return ret
+    }
+
+    // Some other unknown event
+    const ret = super.emit(ev, data, ...extra)
+    this[MAYBE_EMIT_END]()
+    return ret
+  }
+
+  [EMITDATA] (data) {
+    for (const p of this.pipes) {
+      if (p.dest.write(data) === false)
+        this.pause()
+    }
+    const ret = super.emit('data', data)
+    this[MAYBE_EMIT_END]()
+    return ret
+  }
+
+  [EMITEND] () {
+    if (this[EMITTED_END])
+      return
+
+    this[EMITTED_END] = true
+    this.readable = false
+    if (this[ASYNC])
+      defer(() => this[EMITEND2]())
+    else
+      this[EMITEND2]()
+  }
+
+  [EMITEND2] () {
+    if (this[DECODER]) {
+      const data = this[DECODER].end()
+      if (data) {
+        for (const p of this.pipes) {
+          p.dest.write(data)
+        }
+        super.emit('data', data)
+      }
+    }
+
+    for (const p of this.pipes) {
+      p.end()
+    }
+    const ret = super.emit('end')
+    this.removeAllListeners('end')
+    return ret
+  }
+
+  // const all = await stream.collect()
+  collect () {
+    const buf = []
+    if (!this[OBJECTMODE])
+      buf.dataLength = 0
+    // set the promise first, in case an error is raised
+    // by triggering the flow here.
+    const p = this.promise()
+    this.on('data', c => {
+      buf.push(c)
+      if (!this[OBJECTMODE])
+        buf.dataLength += c.length
+    })
+    return p.then(() => buf)
+  }
+
+  // const data = await stream.concat()
+  concat () {
+    return this[OBJECTMODE]
+      ? Promise.reject(new Error('cannot concat in objectMode'))
+      : this.collect().then(buf =>
+          this[OBJECTMODE]
+            ? Promise.reject(new Error('cannot concat in objectMode'))
+            : this[ENCODING] ? buf.join('') : Buffer.concat(buf, buf.dataLength))
+  }
+
+  // stream.promise().then(() => done, er => emitted error)
+  promise () {
+    return new Promise((resolve, reject) => {
+      this.on(DESTROYED, () => reject(new Error('stream destroyed')))
+      this.on('error', er => reject(er))
+      this.on('end', () => resolve())
+    })
+  }
+
+  // for await (let chunk of stream)
+  [ASYNCITERATOR] () {
+    const next = () => {
+      const res = this.read()
+      if (res !== null)
+        return Promise.resolve({ done: false, value: res })
+
+      if (this[EOF])
+        return Promise.resolve({ done: true })
+
+      let resolve = null
+      let reject = null
+      const onerr = er => {
+        this.removeListener('data', ondata)
+        this.removeListener('end', onend)
+        reject(er)
+      }
+      const ondata = value => {
+        this.removeListener('error', onerr)
+        this.removeListener('end', onend)
+        this.pause()
+        resolve({ value: value, done: !!this[EOF] })
+      }
+      const onend = () => {
+        this.removeListener('error', onerr)
+        this.removeListener('data', ondata)
+        resolve({ done: true })
+      }
+      const ondestroy = () => onerr(new Error('stream destroyed'))
+      return new Promise((res, rej) => {
+        reject = rej
+        resolve = res
+        this.once(DESTROYED, ondestroy)
+        this.once('error', onerr)
+        this.once('end', onend)
+        this.once('data', ondata)
+      })
+    }
+
+    return { next }
+  }
+
+  // for (let chunk of stream)
+  [ITERATOR] () {
+    const next = () => {
+      const value = this.read()
+      const done = value === null
+      return { value, done }
+    }
+    return { next }
+  }
+
+  destroy (er) {
+    if (this[DESTROYED]) {
+      if (er)
+        this.emit('error', er)
+      else
+        this.emit(DESTROYED)
+      return this
+    }
+
+    this[DESTROYED] = true
+
+    // throw away all buffered data, it's never coming out
+    this.buffer.length = 0
+    this[BUFFERLENGTH] = 0
+
+    if (typeof this.close === 'function' && !this[CLOSED])
+      this.close()
+
+    if (er)
+      this.emit('error', er)
+    else // if no error to emit, still reject pending promises
+      this.emit(DESTROYED)
+
+    return this
+  }
+
+  static isStream (s) {
+    return !!s && (s instanceof Minipass || s instanceof Stream ||
+      s instanceof EE && (
+        typeof s.pipe === 'function' || // readable
+        (typeof s.write === 'function' && typeof s.end === 'function') // writable
+      ))
+  }
+}
diff --git a/node_modules/tar/node_modules/minizlib/node_modules/minipass/package.json b/node_modules/tar/node_modules/minizlib/node_modules/minipass/package.json
new file mode 100644
index 0000000000000..548d03fa6d5d4
--- /dev/null
+++ b/node_modules/tar/node_modules/minizlib/node_modules/minipass/package.json
@@ -0,0 +1,56 @@
+{
+  "name": "minipass",
+  "version": "3.3.6",
+  "description": "minimal implementation of a PassThrough stream",
+  "main": "index.js",
+  "types": "index.d.ts",
+  "dependencies": {
+    "yallist": "^4.0.0"
+  },
+  "devDependencies": {
+    "@types/node": "^17.0.41",
+    "end-of-stream": "^1.4.0",
+    "prettier": "^2.6.2",
+    "tap": "^16.2.0",
+    "through2": "^2.0.3",
+    "ts-node": "^10.8.1",
+    "typescript": "^4.7.3"
+  },
+  "scripts": {
+    "test": "tap",
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "postpublish": "git push origin --follow-tags"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/isaacs/minipass.git"
+  },
+  "keywords": [
+    "passthrough",
+    "stream"
+  ],
+  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
+  "license": "ISC",
+  "files": [
+    "index.d.ts",
+    "index.js"
+  ],
+  "tap": {
+    "check-coverage": true
+  },
+  "engines": {
+    "node": ">=8"
+  },
+  "prettier": {
+    "semi": false,
+    "printWidth": 80,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  }
+}
diff --git a/node_modules/tar/node_modules/minizlib/package.json b/node_modules/tar/node_modules/minizlib/package.json
new file mode 100644
index 0000000000000..98825a549f3fd
--- /dev/null
+++ b/node_modules/tar/node_modules/minizlib/package.json
@@ -0,0 +1,42 @@
+{
+  "name": "minizlib",
+  "version": "2.1.2",
+  "description": "A small fast zlib stream built on [minipass](http://npm.im/minipass) and Node.js's zlib binding.",
+  "main": "index.js",
+  "dependencies": {
+    "minipass": "^3.0.0",
+    "yallist": "^4.0.0"
+  },
+  "scripts": {
+    "test": "tap test/*.js --100 -J",
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "postpublish": "git push origin --all; git push origin --tags"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/isaacs/minizlib.git"
+  },
+  "keywords": [
+    "zlib",
+    "gzip",
+    "gunzip",
+    "deflate",
+    "inflate",
+    "compression",
+    "zip",
+    "unzip"
+  ],
+  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
+  "license": "MIT",
+  "devDependencies": {
+    "tap": "^14.6.9"
+  },
+  "files": [
+    "index.js",
+    "constants.js"
+  ],
+  "engines": {
+    "node": ">= 8"
+  }
+}
diff --git a/package-lock.json b/package-lock.json
index a3afc342ec939..76d6eff67aa06 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -190,7 +190,7 @@
         "@npmcli/eslint-config": "^5.0.1",
         "@npmcli/template-oss": "4.24.4",
         "front-matter": "^4.0.2",
-        "ignore-walk": "^7.0.0",
+        "ignore-walk": "^8.0.0",
         "jsdom": "^24.0.0",
         "rehype-stringify": "^9.0.3",
         "remark-gfm": "^3.0.1",
@@ -8458,14 +8458,32 @@
       }
     },
     "node_modules/ignore-walk": {
-      "version": "7.0.0",
-      "dev": true,
+      "version": "8.0.0",
+      "resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-8.0.0.tgz",
+      "integrity": "sha512-FCeMZT4NiRQGh+YkeKMtWrOmBgWjHjMJ26WQWrRQyoyzqevdaGSakUaJW5xQYmjLlUVk2qUnCjYVBax9EKKg8A==",
+      "inBundle": true,
       "license": "ISC",
       "dependencies": {
-        "minimatch": "^9.0.0"
+        "minimatch": "^10.0.3"
       },
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^20.17.0 || >=22.9.0"
+      }
+    },
+    "node_modules/ignore-walk/node_modules/minimatch": {
+      "version": "10.0.3",
+      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.0.3.tgz",
+      "integrity": "sha512-IPZ167aShDZZUMdRk66cyQAW3qr0WzbHkPdMYa8bzZhlHhO3jALbKdxcaak7W9FfT2rZNpQuUu4Od7ILEpXSaw==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "@isaacs/brace-expansion": "^5.0.0"
+      },
+      "engines": {
+        "node": "20 || >=22"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
       }
     },
     "node_modules/import-fresh": {
@@ -11093,31 +11111,6 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/npm-packlist/node_modules/ignore-walk": {
-      "version": "8.0.0",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "minimatch": "^10.0.3"
-      },
-      "engines": {
-        "node": "^20.17.0 || >=22.9.0"
-      }
-    },
-    "node_modules/npm-packlist/node_modules/minimatch": {
-      "version": "10.0.3",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "@isaacs/brace-expansion": "^5.0.0"
-      },
-      "engines": {
-        "node": "20 || >=22"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      }
-    },
     "node_modules/npm-pick-manifest": {
       "version": "11.0.1",
       "inBundle": true,

From 167662683d7ebbb34b1d65cf1cb74d69db12c871 Mon Sep 17 00:00:00 2001
From: Gar 
Date: Thu, 18 Sep 2025 10:18:32 -0700
Subject: [PATCH 27/63] deps: glob@11.0.3

---
 node_modules/.gitignore                       |   24 +-
 .../cliui/node_modules/ansi-regex/index.js    |   12 +-
 .../node_modules/ansi-regex/package.json      |    2 +-
 .../node_modules/strip-ansi/package.json      |    4 +-
 .../node_modules/glob/package.json            |   97 -
 .../jackspeak/dist/commonjs/index.js          |  947 --------
 .../node_modules/jackspeak/dist/esm/index.js  |  936 --------
 .../path-scurry/dist/esm/index.js             | 1981 ----------------
 .../node_modules/path-scurry/package.json     |   88 -
 .../package-json/node_modules/glob/LICENSE    |   15 -
 .../node_modules/glob/dist/commonjs/glob.js   |  247 --
 .../glob/dist/commonjs/has-magic.js           |   27 -
 .../node_modules/glob/dist/commonjs/ignore.js |  119 -
 .../node_modules/glob/dist/commonjs/index.js  |   68 -
 .../glob/dist/commonjs/pattern.js             |  219 --
 .../glob/dist/commonjs/processor.js           |  301 ---
 .../node_modules/glob/dist/commonjs/walker.js |  387 ----
 .../node_modules/glob/dist/esm/bin.d.mts      |    3 -
 .../node_modules/glob/dist/esm/bin.mjs        |  276 ---
 .../node_modules/glob/dist/esm/glob.js        |  243 --
 .../node_modules/glob/dist/esm/has-magic.js   |   23 -
 .../node_modules/glob/dist/esm/ignore.js      |  115 -
 .../node_modules/glob/dist/esm/index.js       |   55 -
 .../node_modules/glob/dist/esm/pattern.js     |  215 --
 .../node_modules/glob/dist/esm/processor.js   |  294 ---
 .../node_modules/glob/dist/esm/walker.js      |  381 ----
 .../node_modules/glob/package.json            |   97 -
 .../node_modules/jackspeak/LICENSE.md         |   55 -
 .../jackspeak/dist/commonjs/index.js          |  947 --------
 .../jackspeak/dist/commonjs/package.json      |    3 -
 .../node_modules/jackspeak/dist/esm/index.js  |  936 --------
 .../jackspeak/dist/esm/package.json           |    3 -
 .../node_modules/jackspeak/package.json       |   94 -
 .../minimatch/dist/commonjs/package.json      |    3 -
 .../minimatch/dist/esm/package.json           |    3 -
 .../node_modules/path-scurry/LICENSE.md       |   55 -
 .../path-scurry/dist/commonjs/index.js        | 2016 -----------------
 .../path-scurry/dist/commonjs/package.json    |    3 -
 .../path-scurry/dist/esm/index.js             | 1981 ----------------
 .../path-scurry/dist/esm/package.json         |    3 -
 .../node_modules/path-scurry/package.json     |   88 -
 node_modules/ansi-styles/index.js             |    6 +-
 node_modules/ansi-styles/package.json         |    8 +-
 .../cacache/node_modules/glob/LICENSE         |   15 -
 .../node_modules/glob/dist/commonjs/glob.js   |  247 --
 .../glob/dist/commonjs/has-magic.js           |   27 -
 .../node_modules/glob/dist/commonjs/ignore.js |  119 -
 .../node_modules/glob/dist/commonjs/index.js  |   68 -
 .../glob/dist/commonjs/package.json           |    3 -
 .../glob/dist/commonjs/pattern.js             |  219 --
 .../glob/dist/commonjs/processor.js           |  301 ---
 .../node_modules/glob/dist/commonjs/walker.js |  387 ----
 .../node_modules/glob/dist/esm/bin.d.mts      |    3 -
 .../node_modules/glob/dist/esm/bin.mjs        |  276 ---
 .../node_modules/glob/dist/esm/glob.js        |  243 --
 .../node_modules/glob/dist/esm/has-magic.js   |   23 -
 .../node_modules/glob/dist/esm/ignore.js      |  115 -
 .../node_modules/glob/dist/esm/index.js       |   55 -
 .../node_modules/glob/dist/esm/package.json   |    3 -
 .../node_modules/glob/dist/esm/pattern.js     |  215 --
 .../node_modules/glob/dist/esm/processor.js   |  294 ---
 .../node_modules/glob/dist/esm/walker.js      |  381 ----
 .../cacache/node_modules/jackspeak/LICENSE.md |   55 -
 .../jackspeak/dist/commonjs/package.json      |    3 -
 .../jackspeak/dist/esm/package.json           |    3 -
 .../node_modules/jackspeak/package.json       |   94 -
 .../cacache/node_modules/minimatch/LICENSE    |   15 -
 .../dist/commonjs/assert-valid-pattern.js     |   14 -
 .../minimatch/dist/commonjs/ast.js            |  592 -----
 .../dist/commonjs/brace-expressions.js        |  152 --
 .../minimatch/dist/commonjs/escape.js         |   22 -
 .../minimatch/dist/commonjs/index.js          | 1014 ---------
 .../minimatch/dist/commonjs/package.json      |    3 -
 .../minimatch/dist/commonjs/unescape.js       |   24 -
 .../dist/esm/assert-valid-pattern.js          |   10 -
 .../node_modules/minimatch/dist/esm/ast.js    |  588 -----
 .../minimatch/dist/esm/brace-expressions.js   |  148 --
 .../node_modules/minimatch/dist/esm/escape.js |   18 -
 .../node_modules/minimatch/dist/esm/index.js  | 1001 --------
 .../minimatch/dist/esm/package.json           |    3 -
 .../minimatch/dist/esm/unescape.js            |   20 -
 .../node_modules/minimatch/package.json       |   79 -
 .../node_modules/path-scurry/LICENSE.md       |   55 -
 .../path-scurry/dist/commonjs/index.js        | 2016 -----------------
 .../path-scurry/dist/commonjs/package.json    |    3 -
 .../path-scurry/dist/esm/package.json         |    3 -
 node_modules/glob/dist/esm/bin.mjs            |   10 +-
 .../node_modules/minimatch/LICENSE            |    0
 .../dist/commonjs/assert-valid-pattern.js     |    0
 .../minimatch/dist/commonjs/ast.js            |    0
 .../dist/commonjs/brace-expressions.js        |    0
 .../minimatch/dist/commonjs/escape.js         |    0
 .../minimatch/dist/commonjs/index.js          |    0
 .../minimatch}/dist/commonjs/package.json     |    0
 .../minimatch/dist/commonjs/unescape.js       |    0
 .../dist/esm/assert-valid-pattern.js          |    0
 .../node_modules/minimatch/dist/esm/ast.js    |    0
 .../minimatch/dist/esm/brace-expressions.js   |    0
 .../node_modules/minimatch/dist/esm/escape.js |    0
 .../node_modules/minimatch/dist/esm/index.js  |    0
 .../minimatch}/dist/esm/package.json          |    0
 .../minimatch/dist/esm/unescape.js            |    0
 .../node_modules/minimatch/package.json       |    0
 node_modules/glob/package.json                |   34 +-
 node_modules/jackspeak/dist/commonjs/index.js |  537 ++---
 node_modules/jackspeak/dist/esm/index.js      |  528 ++---
 node_modules/jackspeak/package.json           |   27 +-
 .../node_modules/glob/LICENSE                 |    0
 .../node_modules/glob/dist/commonjs/glob.js   |    0
 .../glob/dist/commonjs/has-magic.js           |    0
 .../node_modules/glob/dist/commonjs/ignore.js |    0
 .../node_modules/glob/dist/commonjs/index.js  |    0
 .../glob}/dist/commonjs/package.json          |    0
 .../glob/dist/commonjs/pattern.js             |    0
 .../glob/dist/commonjs/processor.js           |    0
 .../node_modules/glob/dist/commonjs/walker.js |    0
 .../node_modules/glob/dist/esm/bin.d.mts      |    0
 .../node_modules/glob/dist/esm/bin.mjs        |   10 +-
 .../node_modules/glob/dist/esm/glob.js        |    0
 .../node_modules/glob/dist/esm/has-magic.js   |    0
 .../node_modules/glob/dist/esm/ignore.js      |    0
 .../node_modules/glob/dist/esm/index.js       |    0
 .../node_modules/glob}/dist/esm/package.json  |    0
 .../node_modules/glob/dist/esm/pattern.js     |    0
 .../node_modules/glob/dist/esm/processor.js   |    0
 .../node_modules/glob/dist/esm/walker.js      |    0
 .../node_modules/glob/package.json            |   34 +-
 .../node_modules/jackspeak/LICENSE.md         |    0
 .../jackspeak/dist/commonjs/index.js          |  537 +++--
 .../jackspeak}/dist/commonjs/package.json     |    0
 .../jackspeak/dist/commonjs/parse-args.js     |    0
 .../node_modules/jackspeak/dist/esm/index.js  |  528 +++--
 .../jackspeak}/dist/esm/package.json          |    0
 .../jackspeak/dist/esm/parse-args.js          |    0
 .../node_modules/jackspeak/package.json       |   27 +-
 .../node_modules/path-scurry/LICENSE.md       |    0
 .../path-scurry/dist/commonjs/index.js        |    2 -
 .../path-scurry}/dist/commonjs/package.json   |    0
 .../path-scurry/dist/esm/index.js             |    2 -
 .../path-scurry}/dist/esm/package.json        |    0
 .../node_modules/path-scurry/package.json     |   31 +-
 .../path-scurry/dist/commonjs/index.js        |    2 +
 node_modules/path-scurry/dist/esm/index.js    |    2 +
 .../node_modules/lru-cache/LICENSE            |   15 -
 .../lru-cache/dist/commonjs/index.js          | 1546 -------------
 .../lru-cache/dist/commonjs/index.min.js      |    2 -
 .../lru-cache/dist/commonjs/package.json      |    3 -
 .../node_modules/lru-cache/dist/esm/index.js  | 1542 -------------
 .../lru-cache/dist/esm/index.min.js           |    2 -
 .../lru-cache/dist/esm/package.json           |    3 -
 .../node_modules/lru-cache/package.json       |  116 -
 node_modules/path-scurry/package.json         |   31 +-
 .../node_modules/ansi-regex/index.js          |   12 +-
 .../node_modules/ansi-regex/package.json      |    2 +-
 .../node_modules/strip-ansi/package.json      |    4 +-
 package-lock.json                             |  517 ++---
 package.json                                  |    2 +-
 157 files changed, 1456 insertions(+), 26961 deletions(-)
 delete mode 100644 node_modules/@npmcli/map-workspaces/node_modules/glob/package.json
 delete mode 100644 node_modules/@npmcli/map-workspaces/node_modules/jackspeak/dist/commonjs/index.js
 delete mode 100644 node_modules/@npmcli/map-workspaces/node_modules/jackspeak/dist/esm/index.js
 delete mode 100644 node_modules/@npmcli/map-workspaces/node_modules/path-scurry/dist/esm/index.js
 delete mode 100644 node_modules/@npmcli/map-workspaces/node_modules/path-scurry/package.json
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/glob/LICENSE
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/glob.js
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/has-magic.js
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/ignore.js
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/index.js
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/pattern.js
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/processor.js
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/walker.js
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/glob/dist/esm/bin.d.mts
 delete mode 100755 node_modules/@npmcli/package-json/node_modules/glob/dist/esm/bin.mjs
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/glob/dist/esm/glob.js
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/glob/dist/esm/has-magic.js
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/glob/dist/esm/ignore.js
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/glob/dist/esm/index.js
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/glob/dist/esm/pattern.js
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/glob/dist/esm/processor.js
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/glob/dist/esm/walker.js
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/glob/package.json
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/jackspeak/LICENSE.md
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/jackspeak/dist/commonjs/index.js
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/jackspeak/dist/commonjs/package.json
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/jackspeak/dist/esm/index.js
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/jackspeak/dist/esm/package.json
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/jackspeak/package.json
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/package.json
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/package.json
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/path-scurry/LICENSE.md
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/path-scurry/dist/commonjs/index.js
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/path-scurry/dist/commonjs/package.json
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/path-scurry/dist/esm/index.js
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/path-scurry/dist/esm/package.json
 delete mode 100644 node_modules/@npmcli/package-json/node_modules/path-scurry/package.json
 delete mode 100644 node_modules/cacache/node_modules/glob/LICENSE
 delete mode 100644 node_modules/cacache/node_modules/glob/dist/commonjs/glob.js
 delete mode 100644 node_modules/cacache/node_modules/glob/dist/commonjs/has-magic.js
 delete mode 100644 node_modules/cacache/node_modules/glob/dist/commonjs/ignore.js
 delete mode 100644 node_modules/cacache/node_modules/glob/dist/commonjs/index.js
 delete mode 100644 node_modules/cacache/node_modules/glob/dist/commonjs/package.json
 delete mode 100644 node_modules/cacache/node_modules/glob/dist/commonjs/pattern.js
 delete mode 100644 node_modules/cacache/node_modules/glob/dist/commonjs/processor.js
 delete mode 100644 node_modules/cacache/node_modules/glob/dist/commonjs/walker.js
 delete mode 100644 node_modules/cacache/node_modules/glob/dist/esm/bin.d.mts
 delete mode 100755 node_modules/cacache/node_modules/glob/dist/esm/bin.mjs
 delete mode 100644 node_modules/cacache/node_modules/glob/dist/esm/glob.js
 delete mode 100644 node_modules/cacache/node_modules/glob/dist/esm/has-magic.js
 delete mode 100644 node_modules/cacache/node_modules/glob/dist/esm/ignore.js
 delete mode 100644 node_modules/cacache/node_modules/glob/dist/esm/index.js
 delete mode 100644 node_modules/cacache/node_modules/glob/dist/esm/package.json
 delete mode 100644 node_modules/cacache/node_modules/glob/dist/esm/pattern.js
 delete mode 100644 node_modules/cacache/node_modules/glob/dist/esm/processor.js
 delete mode 100644 node_modules/cacache/node_modules/glob/dist/esm/walker.js
 delete mode 100644 node_modules/cacache/node_modules/jackspeak/LICENSE.md
 delete mode 100644 node_modules/cacache/node_modules/jackspeak/dist/commonjs/package.json
 delete mode 100644 node_modules/cacache/node_modules/jackspeak/dist/esm/package.json
 delete mode 100644 node_modules/cacache/node_modules/jackspeak/package.json
 delete mode 100644 node_modules/cacache/node_modules/minimatch/LICENSE
 delete mode 100644 node_modules/cacache/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
 delete mode 100644 node_modules/cacache/node_modules/minimatch/dist/commonjs/ast.js
 delete mode 100644 node_modules/cacache/node_modules/minimatch/dist/commonjs/brace-expressions.js
 delete mode 100644 node_modules/cacache/node_modules/minimatch/dist/commonjs/escape.js
 delete mode 100644 node_modules/cacache/node_modules/minimatch/dist/commonjs/index.js
 delete mode 100644 node_modules/cacache/node_modules/minimatch/dist/commonjs/package.json
 delete mode 100644 node_modules/cacache/node_modules/minimatch/dist/commonjs/unescape.js
 delete mode 100644 node_modules/cacache/node_modules/minimatch/dist/esm/assert-valid-pattern.js
 delete mode 100644 node_modules/cacache/node_modules/minimatch/dist/esm/ast.js
 delete mode 100644 node_modules/cacache/node_modules/minimatch/dist/esm/brace-expressions.js
 delete mode 100644 node_modules/cacache/node_modules/minimatch/dist/esm/escape.js
 delete mode 100644 node_modules/cacache/node_modules/minimatch/dist/esm/index.js
 delete mode 100644 node_modules/cacache/node_modules/minimatch/dist/esm/package.json
 delete mode 100644 node_modules/cacache/node_modules/minimatch/dist/esm/unescape.js
 delete mode 100644 node_modules/cacache/node_modules/minimatch/package.json
 delete mode 100644 node_modules/cacache/node_modules/path-scurry/LICENSE.md
 delete mode 100644 node_modules/cacache/node_modules/path-scurry/dist/commonjs/index.js
 delete mode 100644 node_modules/cacache/node_modules/path-scurry/dist/commonjs/package.json
 delete mode 100644 node_modules/cacache/node_modules/path-scurry/dist/esm/package.json
 rename node_modules/{@npmcli/package-json => glob}/node_modules/minimatch/LICENSE (100%)
 rename node_modules/{@npmcli/package-json => glob}/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js (100%)
 rename node_modules/{@npmcli/package-json => glob}/node_modules/minimatch/dist/commonjs/ast.js (100%)
 rename node_modules/{@npmcli/package-json => glob}/node_modules/minimatch/dist/commonjs/brace-expressions.js (100%)
 rename node_modules/{@npmcli/package-json => glob}/node_modules/minimatch/dist/commonjs/escape.js (100%)
 rename node_modules/{@npmcli/package-json => glob}/node_modules/minimatch/dist/commonjs/index.js (100%)
 rename node_modules/{@npmcli/map-workspaces/node_modules/glob => glob/node_modules/minimatch}/dist/commonjs/package.json (100%)
 rename node_modules/{@npmcli/package-json => glob}/node_modules/minimatch/dist/commonjs/unescape.js (100%)
 rename node_modules/{@npmcli/package-json => glob}/node_modules/minimatch/dist/esm/assert-valid-pattern.js (100%)
 rename node_modules/{@npmcli/package-json => glob}/node_modules/minimatch/dist/esm/ast.js (100%)
 rename node_modules/{@npmcli/package-json => glob}/node_modules/minimatch/dist/esm/brace-expressions.js (100%)
 rename node_modules/{@npmcli/package-json => glob}/node_modules/minimatch/dist/esm/escape.js (100%)
 rename node_modules/{@npmcli/package-json => glob}/node_modules/minimatch/dist/esm/index.js (100%)
 rename node_modules/{@npmcli/map-workspaces/node_modules/glob => glob/node_modules/minimatch}/dist/esm/package.json (100%)
 rename node_modules/{@npmcli/package-json => glob}/node_modules/minimatch/dist/esm/unescape.js (100%)
 rename node_modules/{@npmcli/package-json => glob}/node_modules/minimatch/package.json (100%)
 rename node_modules/{@npmcli/map-workspaces => node-gyp}/node_modules/glob/LICENSE (100%)
 rename node_modules/{@npmcli/map-workspaces => node-gyp}/node_modules/glob/dist/commonjs/glob.js (100%)
 rename node_modules/{@npmcli/map-workspaces => node-gyp}/node_modules/glob/dist/commonjs/has-magic.js (100%)
 rename node_modules/{@npmcli/map-workspaces => node-gyp}/node_modules/glob/dist/commonjs/ignore.js (100%)
 rename node_modules/{@npmcli/map-workspaces => node-gyp}/node_modules/glob/dist/commonjs/index.js (100%)
 rename node_modules/{@npmcli/map-workspaces/node_modules/jackspeak => node-gyp/node_modules/glob}/dist/commonjs/package.json (100%)
 rename node_modules/{@npmcli/map-workspaces => node-gyp}/node_modules/glob/dist/commonjs/pattern.js (100%)
 rename node_modules/{@npmcli/map-workspaces => node-gyp}/node_modules/glob/dist/commonjs/processor.js (100%)
 rename node_modules/{@npmcli/map-workspaces => node-gyp}/node_modules/glob/dist/commonjs/walker.js (100%)
 rename node_modules/{@npmcli/map-workspaces => node-gyp}/node_modules/glob/dist/esm/bin.d.mts (100%)
 rename node_modules/{@npmcli/map-workspaces => node-gyp}/node_modules/glob/dist/esm/bin.mjs (98%)
 rename node_modules/{@npmcli/map-workspaces => node-gyp}/node_modules/glob/dist/esm/glob.js (100%)
 rename node_modules/{@npmcli/map-workspaces => node-gyp}/node_modules/glob/dist/esm/has-magic.js (100%)
 rename node_modules/{@npmcli/map-workspaces => node-gyp}/node_modules/glob/dist/esm/ignore.js (100%)
 rename node_modules/{@npmcli/map-workspaces => node-gyp}/node_modules/glob/dist/esm/index.js (100%)
 rename node_modules/{@npmcli/map-workspaces/node_modules/jackspeak => node-gyp/node_modules/glob}/dist/esm/package.json (100%)
 rename node_modules/{@npmcli/map-workspaces => node-gyp}/node_modules/glob/dist/esm/pattern.js (100%)
 rename node_modules/{@npmcli/map-workspaces => node-gyp}/node_modules/glob/dist/esm/processor.js (100%)
 rename node_modules/{@npmcli/map-workspaces => node-gyp}/node_modules/glob/dist/esm/walker.js (100%)
 rename node_modules/{cacache => node-gyp}/node_modules/glob/package.json (81%)
 rename node_modules/{@npmcli/map-workspaces => node-gyp}/node_modules/jackspeak/LICENSE.md (100%)
 rename node_modules/{cacache => node-gyp}/node_modules/jackspeak/dist/commonjs/index.js (77%)
 rename node_modules/{@npmcli/map-workspaces/node_modules/path-scurry => node-gyp/node_modules/jackspeak}/dist/commonjs/package.json (100%)
 rename node_modules/{ => node-gyp/node_modules}/jackspeak/dist/commonjs/parse-args.js (100%)
 rename node_modules/{cacache => node-gyp}/node_modules/jackspeak/dist/esm/index.js (77%)
 rename node_modules/{@npmcli/map-workspaces/node_modules/path-scurry => node-gyp/node_modules/jackspeak}/dist/esm/package.json (100%)
 rename node_modules/{ => node-gyp/node_modules}/jackspeak/dist/esm/parse-args.js (100%)
 rename node_modules/{@npmcli/map-workspaces => node-gyp}/node_modules/jackspeak/package.json (85%)
 rename node_modules/{@npmcli/map-workspaces => node-gyp}/node_modules/path-scurry/LICENSE.md (100%)
 rename node_modules/{@npmcli/map-workspaces => node-gyp}/node_modules/path-scurry/dist/commonjs/index.js (99%)
 rename node_modules/{@npmcli/package-json/node_modules/glob => node-gyp/node_modules/path-scurry}/dist/commonjs/package.json (100%)
 rename node_modules/{cacache => node-gyp}/node_modules/path-scurry/dist/esm/index.js (99%)
 rename node_modules/{@npmcli/package-json/node_modules/glob => node-gyp/node_modules/path-scurry}/dist/esm/package.json (100%)
 rename node_modules/{cacache => node-gyp}/node_modules/path-scurry/package.json (77%)
 delete mode 100644 node_modules/path-scurry/node_modules/lru-cache/LICENSE
 delete mode 100644 node_modules/path-scurry/node_modules/lru-cache/dist/commonjs/index.js
 delete mode 100644 node_modules/path-scurry/node_modules/lru-cache/dist/commonjs/index.min.js
 delete mode 100644 node_modules/path-scurry/node_modules/lru-cache/dist/commonjs/package.json
 delete mode 100644 node_modules/path-scurry/node_modules/lru-cache/dist/esm/index.js
 delete mode 100644 node_modules/path-scurry/node_modules/lru-cache/dist/esm/index.min.js
 delete mode 100644 node_modules/path-scurry/node_modules/lru-cache/dist/esm/package.json
 delete mode 100644 node_modules/path-scurry/node_modules/lru-cache/package.json

diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 0bb774f820179..f4705d305a386 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -25,20 +25,11 @@
 !/@npmcli/map-workspaces
 !/@npmcli/map-workspaces/node_modules/
 /@npmcli/map-workspaces/node_modules/*
-!/@npmcli/map-workspaces/node_modules/glob
-!/@npmcli/map-workspaces/node_modules/jackspeak
 !/@npmcli/map-workspaces/node_modules/minimatch
-!/@npmcli/map-workspaces/node_modules/path-scurry
 !/@npmcli/metavuln-calculator
 !/@npmcli/name-from-folder
 !/@npmcli/node-gyp
 !/@npmcli/package-json
-!/@npmcli/package-json/node_modules/
-/@npmcli/package-json/node_modules/*
-!/@npmcli/package-json/node_modules/glob
-!/@npmcli/package-json/node_modules/jackspeak
-!/@npmcli/package-json/node_modules/minimatch
-!/@npmcli/package-json/node_modules/path-scurry
 !/@npmcli/promise-spawn
 !/@npmcli/query
 !/@npmcli/redact
@@ -68,12 +59,6 @@
 !/binary-extensions
 !/brace-expansion
 !/cacache
-!/cacache/node_modules/
-/cacache/node_modules/*
-!/cacache/node_modules/glob
-!/cacache/node_modules/jackspeak
-!/cacache/node_modules/minimatch
-!/cacache/node_modules/path-scurry
 !/chalk
 !/chownr
 !/ci-info
@@ -100,6 +85,9 @@
 !/foreground-child
 !/fs-minipass
 !/glob
+!/glob/node_modules/
+/glob/node_modules/*
+!/glob/node_modules/minimatch
 !/graceful-fs
 !/hosted-git-info
 !/http-cache-semantics
@@ -156,9 +144,12 @@
 !/node-gyp/node_modules/@npmcli/agent
 !/node-gyp/node_modules/cacache
 !/node-gyp/node_modules/chownr
+!/node-gyp/node_modules/glob
+!/node-gyp/node_modules/jackspeak
 !/node-gyp/node_modules/lru-cache
 !/node-gyp/node_modules/make-fetch-happen
 !/node-gyp/node_modules/mkdirp
+!/node-gyp/node_modules/path-scurry
 !/node-gyp/node_modules/tar
 !/node-gyp/node_modules/yallist
 !/nopt
@@ -185,9 +176,6 @@
 !/parse-conflict-json
 !/path-key
 !/path-scurry
-!/path-scurry/node_modules/
-/path-scurry/node_modules/*
-!/path-scurry/node_modules/lru-cache
 !/postcss-selector-parser
 !/proc-log
 !/proggy
diff --git a/node_modules/@isaacs/cliui/node_modules/ansi-regex/index.js b/node_modules/@isaacs/cliui/node_modules/ansi-regex/index.js
index ddfdba39a783a..2cc5ca2419f1b 100644
--- a/node_modules/@isaacs/cliui/node_modules/ansi-regex/index.js
+++ b/node_modules/@isaacs/cliui/node_modules/ansi-regex/index.js
@@ -1,10 +1,14 @@
 export default function ansiRegex({onlyFirst = false} = {}) {
 	// Valid string terminator sequences are BEL, ESC\, and 0x9c
 	const ST = '(?:\\u0007|\\u001B\\u005C|\\u009C)';
-	const pattern = [
-		`[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]+)*|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?${ST})`,
-		'(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-nq-uy=><~]))',
-	].join('|');
+
+	// OSC sequences only: ESC ] ... ST (non-greedy until the first ST)
+	const osc = `(?:\\u001B\\][\\s\\S]*?${ST})`;
+
+	// CSI and related: ESC/C1, optional intermediates, optional params (supports ; and :) then final byte
+	const csi = '[\\u001B\\u009B][[\\]()#;?]*(?:\\d{1,4}(?:[;:]\\d{0,4})*)?[\\dA-PR-TZcf-nq-uy=><~]';
+
+	const pattern = `${osc}|${csi}`;
 
 	return new RegExp(pattern, onlyFirst ? undefined : 'g');
 }
diff --git a/node_modules/@isaacs/cliui/node_modules/ansi-regex/package.json b/node_modules/@isaacs/cliui/node_modules/ansi-regex/package.json
index 49f3f61021512..2efe9ebbe66be 100644
--- a/node_modules/@isaacs/cliui/node_modules/ansi-regex/package.json
+++ b/node_modules/@isaacs/cliui/node_modules/ansi-regex/package.json
@@ -1,6 +1,6 @@
 {
 	"name": "ansi-regex",
-	"version": "6.1.0",
+	"version": "6.2.2",
 	"description": "Regular expression for matching ANSI escape codes",
 	"license": "MIT",
 	"repository": "chalk/ansi-regex",
diff --git a/node_modules/@isaacs/cliui/node_modules/strip-ansi/package.json b/node_modules/@isaacs/cliui/node_modules/strip-ansi/package.json
index e1f455c325b00..2a59216e424fc 100644
--- a/node_modules/@isaacs/cliui/node_modules/strip-ansi/package.json
+++ b/node_modules/@isaacs/cliui/node_modules/strip-ansi/package.json
@@ -1,6 +1,6 @@
 {
 	"name": "strip-ansi",
-	"version": "7.1.0",
+	"version": "7.1.2",
 	"description": "Strip ANSI escape codes from a string",
 	"license": "MIT",
 	"repository": "chalk/strip-ansi",
@@ -12,6 +12,8 @@
 	},
 	"type": "module",
 	"exports": "./index.js",
+	"types": "./index.d.ts",
+	"sideEffects": false,
 	"engines": {
 		"node": ">=12"
 	},
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/package.json b/node_modules/@npmcli/map-workspaces/node_modules/glob/package.json
deleted file mode 100644
index 7be2c53bd5c9f..0000000000000
--- a/node_modules/@npmcli/map-workspaces/node_modules/glob/package.json
+++ /dev/null
@@ -1,97 +0,0 @@
-{
-  "author": "Isaac Z. Schlueter  (https://blog.izs.me/)",
-  "name": "glob",
-  "description": "the most correct and second fastest glob implementation in JavaScript",
-  "version": "11.0.3",
-  "type": "module",
-  "tshy": {
-    "main": true,
-    "exports": {
-      "./package.json": "./package.json",
-      ".": "./src/index.ts"
-    }
-  },
-  "bin": "./dist/esm/bin.mjs",
-  "main": "./dist/commonjs/index.js",
-  "types": "./dist/commonjs/index.d.ts",
-  "exports": {
-    "./package.json": "./package.json",
-    ".": {
-      "import": {
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.js"
-      },
-      "require": {
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.js"
-      }
-    }
-  },
-  "repository": {
-    "type": "git",
-    "url": "git://github.com/isaacs/node-glob.git"
-  },
-  "files": [
-    "dist"
-  ],
-  "scripts": {
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "npm run benchclean; git push origin --follow-tags",
-    "prepare": "tshy",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "test": "tap",
-    "snap": "tap",
-    "format": "prettier --write . --log-level warn",
-    "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts",
-    "profclean": "rm -f v8.log profile.txt",
-    "test-regen": "npm run profclean && TEST_REGEN=1 node --no-warnings --loader ts-node/esm test/00-setup.ts",
-    "prebench": "npm run prepare",
-    "bench": "bash benchmark.sh",
-    "preprof": "npm run prepare",
-    "prof": "bash prof.sh",
-    "benchclean": "node benchclean.cjs"
-  },
-  "prettier": {
-    "experimentalTernaries": true,
-    "semi": false,
-    "printWidth": 75,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "dependencies": {
-    "foreground-child": "^3.3.1",
-    "jackspeak": "^4.1.1",
-    "minimatch": "^10.0.3",
-    "minipass": "^7.1.2",
-    "package-json-from-dist": "^1.0.0",
-    "path-scurry": "^2.0.0"
-  },
-  "devDependencies": {
-    "@types/node": "^24.0.1",
-    "memfs": "^4.17.2",
-    "mkdirp": "^3.0.1",
-    "prettier": "^3.5.3",
-    "rimraf": "^6.0.1",
-    "tap": "^21.1.0",
-    "tshy": "^3.0.2",
-    "typedoc": "^0.28.5"
-  },
-  "tap": {
-    "before": "test/00-setup.ts"
-  },
-  "license": "ISC",
-  "funding": {
-    "url": "https://github.com/sponsors/isaacs"
-  },
-  "engines": {
-    "node": "20 || >=22"
-  },
-  "module": "./dist/esm/index.js"
-}
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/jackspeak/dist/commonjs/index.js b/node_modules/@npmcli/map-workspaces/node_modules/jackspeak/dist/commonjs/index.js
deleted file mode 100644
index 543412746cc8f..0000000000000
--- a/node_modules/@npmcli/map-workspaces/node_modules/jackspeak/dist/commonjs/index.js
+++ /dev/null
@@ -1,947 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.jack = exports.Jack = exports.isConfigOption = exports.isConfigOptionOfType = exports.isConfigType = void 0;
-const node_util_1 = require("node:util");
-// it's a tiny API, just cast it inline, it's fine
-//@ts-ignore
-const cliui_1 = __importDefault(require("@isaacs/cliui"));
-const node_path_1 = require("node:path");
-const isConfigType = (t) => typeof t === 'string' &&
-    (t === 'string' || t === 'number' || t === 'boolean');
-exports.isConfigType = isConfigType;
-const isValidValue = (v, type, multi) => {
-    if (multi) {
-        if (!Array.isArray(v))
-            return false;
-        return !v.some((v) => !isValidValue(v, type, false));
-    }
-    if (Array.isArray(v))
-        return false;
-    return typeof v === type;
-};
-const isValidOption = (v, vo) => !!vo &&
-    (Array.isArray(v) ? v.every(x => isValidOption(x, vo)) : vo.includes(v));
-/**
- * Determine whether an unknown object is a {@link ConfigOption} based only
- * on its `type` and `multiple` property
- */
-const isConfigOptionOfType = (o, type, multi) => !!o &&
-    typeof o === 'object' &&
-    (0, exports.isConfigType)(o.type) &&
-    o.type === type &&
-    !!o.multiple === multi;
-exports.isConfigOptionOfType = isConfigOptionOfType;
-/**
- * Determine whether an unknown object is a {@link ConfigOption} based on
- * it having all valid properties
- */
-const isConfigOption = (o, type, multi) => (0, exports.isConfigOptionOfType)(o, type, multi) &&
-    undefOrType(o.short, 'string') &&
-    undefOrType(o.description, 'string') &&
-    undefOrType(o.hint, 'string') &&
-    undefOrType(o.validate, 'function') &&
-    (o.type === 'boolean' ?
-        o.validOptions === undefined
-        : undefOrTypeArray(o.validOptions, o.type)) &&
-    (o.default === undefined || isValidValue(o.default, type, multi));
-exports.isConfigOption = isConfigOption;
-const isHeading = (r) => r.type === 'heading';
-const isDescription = (r) => r.type === 'description';
-const width = Math.min(process?.stdout?.columns ?? 80, 80);
-// indentation spaces from heading level
-const indent = (n) => (n - 1) * 2;
-const toEnvKey = (pref, key) => [pref, key.replace(/[^a-zA-Z0-9]+/g, ' ')]
-    .join(' ')
-    .trim()
-    .toUpperCase()
-    .replace(/ /g, '_');
-const toEnvVal = (value, delim = '\n') => {
-    const str = typeof value === 'string' ? value
-        : typeof value === 'boolean' ?
-            value ? '1'
-                : '0'
-            : typeof value === 'number' ? String(value)
-                : Array.isArray(value) ?
-                    value.map((v) => toEnvVal(v)).join(delim)
-                    : /* c8 ignore start */ undefined;
-    if (typeof str !== 'string') {
-        throw new Error(`could not serialize value to environment: ${JSON.stringify(value)}`, { cause: { code: 'JACKSPEAK' } });
-    }
-    /* c8 ignore stop */
-    return str;
-};
-const fromEnvVal = (env, type, multiple, delim = '\n') => (multiple ?
-    env ? env.split(delim).map(v => fromEnvVal(v, type, false))
-        : []
-    : type === 'string' ? env
-        : type === 'boolean' ? env === '1'
-            : +env.trim());
-const undefOrType = (v, t) => v === undefined || typeof v === t;
-const undefOrTypeArray = (v, t) => v === undefined || (Array.isArray(v) && v.every(x => typeof x === t));
-// print the value type, for error message reporting
-const valueType = (v) => typeof v === 'string' ? 'string'
-    : typeof v === 'boolean' ? 'boolean'
-        : typeof v === 'number' ? 'number'
-            : Array.isArray(v) ?
-                `${joinTypes([...new Set(v.map(v => valueType(v)))])}[]`
-                : `${v.type}${v.multiple ? '[]' : ''}`;
-const joinTypes = (types) => types.length === 1 && typeof types[0] === 'string' ?
-    types[0]
-    : `(${types.join('|')})`;
-const validateFieldMeta = (field, fieldMeta) => {
-    if (fieldMeta) {
-        if (field.type !== undefined && field.type !== fieldMeta.type) {
-            throw new TypeError(`invalid type`, {
-                cause: {
-                    found: field.type,
-                    wanted: [fieldMeta.type, undefined],
-                },
-            });
-        }
-        if (field.multiple !== undefined &&
-            !!field.multiple !== fieldMeta.multiple) {
-            throw new TypeError(`invalid multiple`, {
-                cause: {
-                    found: field.multiple,
-                    wanted: [fieldMeta.multiple, undefined],
-                },
-            });
-        }
-        return fieldMeta;
-    }
-    if (!(0, exports.isConfigType)(field.type)) {
-        throw new TypeError(`invalid type`, {
-            cause: {
-                found: field.type,
-                wanted: ['string', 'number', 'boolean'],
-            },
-        });
-    }
-    return {
-        type: field.type,
-        multiple: !!field.multiple,
-    };
-};
-const validateField = (o, type, multiple) => {
-    const validateValidOptions = (def, validOptions) => {
-        if (!undefOrTypeArray(validOptions, type)) {
-            throw new TypeError('invalid validOptions', {
-                cause: {
-                    found: validOptions,
-                    wanted: valueType({ type, multiple: true }),
-                },
-            });
-        }
-        if (def !== undefined && validOptions !== undefined) {
-            const valid = Array.isArray(def) ?
-                def.every(v => validOptions.includes(v))
-                : validOptions.includes(def);
-            if (!valid) {
-                throw new TypeError('invalid default value not in validOptions', {
-                    cause: {
-                        found: def,
-                        wanted: validOptions,
-                    },
-                });
-            }
-        }
-    };
-    if (o.default !== undefined &&
-        !isValidValue(o.default, type, multiple)) {
-        throw new TypeError('invalid default value', {
-            cause: {
-                found: o.default,
-                wanted: valueType({ type, multiple }),
-            },
-        });
-    }
-    if ((0, exports.isConfigOptionOfType)(o, 'number', false) ||
-        (0, exports.isConfigOptionOfType)(o, 'number', true)) {
-        validateValidOptions(o.default, o.validOptions);
-    }
-    else if ((0, exports.isConfigOptionOfType)(o, 'string', false) ||
-        (0, exports.isConfigOptionOfType)(o, 'string', true)) {
-        validateValidOptions(o.default, o.validOptions);
-    }
-    else if ((0, exports.isConfigOptionOfType)(o, 'boolean', false) ||
-        (0, exports.isConfigOptionOfType)(o, 'boolean', true)) {
-        if (o.hint !== undefined) {
-            throw new TypeError('cannot provide hint for flag');
-        }
-        if (o.validOptions !== undefined) {
-            throw new TypeError('cannot provide validOptions for flag');
-        }
-    }
-    return o;
-};
-const toParseArgsOptionsConfig = (options) => {
-    return Object.entries(options).reduce((acc, [longOption, o]) => {
-        const p = {
-            type: 'string',
-            multiple: !!o.multiple,
-            ...(typeof o.short === 'string' ? { short: o.short } : undefined),
-        };
-        const setNoBool = () => {
-            if (!longOption.startsWith('no-') && !options[`no-${longOption}`]) {
-                acc[`no-${longOption}`] = {
-                    type: 'boolean',
-                    multiple: !!o.multiple,
-                };
-            }
-        };
-        const setDefault = (def, fn) => {
-            if (def !== undefined) {
-                p.default = fn(def);
-            }
-        };
-        if ((0, exports.isConfigOption)(o, 'number', false)) {
-            setDefault(o.default, String);
-        }
-        else if ((0, exports.isConfigOption)(o, 'number', true)) {
-            setDefault(o.default, d => d.map(v => String(v)));
-        }
-        else if ((0, exports.isConfigOption)(o, 'string', false) ||
-            (0, exports.isConfigOption)(o, 'string', true)) {
-            setDefault(o.default, v => v);
-        }
-        else if ((0, exports.isConfigOption)(o, 'boolean', false) ||
-            (0, exports.isConfigOption)(o, 'boolean', true)) {
-            p.type = 'boolean';
-            setDefault(o.default, v => v);
-            setNoBool();
-        }
-        acc[longOption] = p;
-        return acc;
-    }, {});
-};
-/**
- * Class returned by the {@link jack} function and all configuration
- * definition methods.  This is what gets chained together.
- */
-class Jack {
-    #configSet;
-    #shorts;
-    #options;
-    #fields = [];
-    #env;
-    #envPrefix;
-    #allowPositionals;
-    #usage;
-    #usageMarkdown;
-    constructor(options = {}) {
-        this.#options = options;
-        this.#allowPositionals = options.allowPositionals !== false;
-        this.#env =
-            this.#options.env === undefined ? process.env : this.#options.env;
-        this.#envPrefix = options.envPrefix;
-        // We need to fib a little, because it's always the same object, but it
-        // starts out as having an empty config set.  Then each method that adds
-        // fields returns `this as Jack`
-        this.#configSet = Object.create(null);
-        this.#shorts = Object.create(null);
-    }
-    /**
-     * Resulting definitions, suitable to be passed to Node's `util.parseArgs`,
-     * but also including `description` and `short` fields, if set.
-     */
-    get definitions() {
-        return this.#configSet;
-    }
-    /** map of `{ :  }` strings for each short name defined */
-    get shorts() {
-        return this.#shorts;
-    }
-    /**
-     * options passed to the {@link Jack} constructor
-     */
-    get jackOptions() {
-        return this.#options;
-    }
-    /**
-     * the data used to generate {@link Jack#usage} and
-     * {@link Jack#usageMarkdown} content.
-     */
-    get usageFields() {
-        return this.#fields;
-    }
-    /**
-     * Set the default value (which will still be overridden by env or cli)
-     * as if from a parsed config file. The optional `source` param, if
-     * provided, will be included in error messages if a value is invalid or
-     * unknown.
-     */
-    setConfigValues(values, source = '') {
-        try {
-            this.validate(values);
-        }
-        catch (er) {
-            if (source && er instanceof Error) {
-                /* c8 ignore next */
-                const cause = typeof er.cause === 'object' ? er.cause : {};
-                er.cause = { ...cause, path: source };
-                Error.captureStackTrace(er, this.setConfigValues);
-            }
-            throw er;
-        }
-        for (const [field, value] of Object.entries(values)) {
-            const my = this.#configSet[field];
-            // already validated, just for TS's benefit
-            /* c8 ignore start */
-            if (!my) {
-                throw new Error('unexpected field in config set: ' + field, {
-                    cause: {
-                        code: 'JACKSPEAK',
-                        found: field,
-                    },
-                });
-            }
-            /* c8 ignore stop */
-            my.default = value;
-        }
-        return this;
-    }
-    /**
-     * Parse a string of arguments, and return the resulting
-     * `{ values, positionals }` object.
-     *
-     * If an {@link JackOptions#envPrefix} is set, then it will read default
-     * values from the environment, and write the resulting values back
-     * to the environment as well.
-     *
-     * Environment values always take precedence over any other value, except
-     * an explicit CLI setting.
-     */
-    parse(args = process.argv) {
-        this.loadEnvDefaults();
-        const p = this.parseRaw(args);
-        this.applyDefaults(p);
-        this.writeEnv(p);
-        return p;
-    }
-    loadEnvDefaults() {
-        if (this.#envPrefix) {
-            for (const [field, my] of Object.entries(this.#configSet)) {
-                const ek = toEnvKey(this.#envPrefix, field);
-                const env = this.#env[ek];
-                if (env !== undefined) {
-                    my.default = fromEnvVal(env, my.type, !!my.multiple, my.delim);
-                }
-            }
-        }
-    }
-    applyDefaults(p) {
-        for (const [field, c] of Object.entries(this.#configSet)) {
-            if (c.default !== undefined && !(field in p.values)) {
-                //@ts-ignore
-                p.values[field] = c.default;
-            }
-        }
-    }
-    /**
-     * Only parse the command line arguments passed in.
-     * Does not strip off the `node script.js` bits, so it must be just the
-     * arguments you wish to have parsed.
-     * Does not read from or write to the environment, or set defaults.
-     */
-    parseRaw(args) {
-        if (args === process.argv) {
-            args = args.slice(process._eval !== undefined ? 1 : 2);
-        }
-        const result = (0, node_util_1.parseArgs)({
-            args,
-            options: toParseArgsOptionsConfig(this.#configSet),
-            // always strict, but using our own logic
-            strict: false,
-            allowPositionals: this.#allowPositionals,
-            tokens: true,
-        });
-        const p = {
-            values: {},
-            positionals: [],
-        };
-        for (const token of result.tokens) {
-            if (token.kind === 'positional') {
-                p.positionals.push(token.value);
-                if (this.#options.stopAtPositional ||
-                    this.#options.stopAtPositionalTest?.(token.value)) {
-                    p.positionals.push(...args.slice(token.index + 1));
-                    break;
-                }
-            }
-            else if (token.kind === 'option') {
-                let value = undefined;
-                if (token.name.startsWith('no-')) {
-                    const my = this.#configSet[token.name];
-                    const pname = token.name.substring('no-'.length);
-                    const pos = this.#configSet[pname];
-                    if (pos &&
-                        pos.type === 'boolean' &&
-                        (!my ||
-                            (my.type === 'boolean' && !!my.multiple === !!pos.multiple))) {
-                        value = false;
-                        token.name = pname;
-                    }
-                }
-                const my = this.#configSet[token.name];
-                if (!my) {
-                    throw new Error(`Unknown option '${token.rawName}'. ` +
-                        `To specify a positional argument starting with a '-', ` +
-                        `place it at the end of the command after '--', as in ` +
-                        `'-- ${token.rawName}'`, {
-                        cause: {
-                            code: 'JACKSPEAK',
-                            found: token.rawName + (token.value ? `=${token.value}` : ''),
-                        },
-                    });
-                }
-                if (value === undefined) {
-                    if (token.value === undefined) {
-                        if (my.type !== 'boolean') {
-                            throw new Error(`No value provided for ${token.rawName}, expected ${my.type}`, {
-                                cause: {
-                                    code: 'JACKSPEAK',
-                                    name: token.rawName,
-                                    wanted: valueType(my),
-                                },
-                            });
-                        }
-                        value = true;
-                    }
-                    else {
-                        if (my.type === 'boolean') {
-                            throw new Error(`Flag ${token.rawName} does not take a value, received '${token.value}'`, { cause: { code: 'JACKSPEAK', found: token } });
-                        }
-                        if (my.type === 'string') {
-                            value = token.value;
-                        }
-                        else {
-                            value = +token.value;
-                            if (value !== value) {
-                                throw new Error(`Invalid value '${token.value}' provided for ` +
-                                    `'${token.rawName}' option, expected number`, {
-                                    cause: {
-                                        code: 'JACKSPEAK',
-                                        name: token.rawName,
-                                        found: token.value,
-                                        wanted: 'number',
-                                    },
-                                });
-                            }
-                        }
-                    }
-                }
-                if (my.multiple) {
-                    const pv = p.values;
-                    const tn = pv[token.name] ?? [];
-                    pv[token.name] = tn;
-                    tn.push(value);
-                }
-                else {
-                    const pv = p.values;
-                    pv[token.name] = value;
-                }
-            }
-        }
-        for (const [field, value] of Object.entries(p.values)) {
-            const valid = this.#configSet[field]?.validate;
-            const validOptions = this.#configSet[field]?.validOptions;
-            const cause = validOptions && !isValidOption(value, validOptions) ?
-                { name: field, found: value, validOptions }
-                : valid && !valid(value) ? { name: field, found: value }
-                    : undefined;
-            if (cause) {
-                throw new Error(`Invalid value provided for --${field}: ${JSON.stringify(value)}`, { cause: { ...cause, code: 'JACKSPEAK' } });
-            }
-        }
-        return p;
-    }
-    /**
-     * do not set fields as 'no-foo' if 'foo' exists and both are bools
-     * just set foo.
-     */
-    #noNoFields(f, val, s = f) {
-        if (!f.startsWith('no-') || typeof val !== 'boolean')
-            return;
-        const yes = f.substring('no-'.length);
-        // recurse so we get the core config key we care about.
-        this.#noNoFields(yes, val, s);
-        if (this.#configSet[yes]?.type === 'boolean') {
-            throw new Error(`do not set '${s}', instead set '${yes}' as desired.`, { cause: { code: 'JACKSPEAK', found: s, wanted: yes } });
-        }
-    }
-    /**
-     * Validate that any arbitrary object is a valid configuration `values`
-     * object.  Useful when loading config files or other sources.
-     */
-    validate(o) {
-        if (!o || typeof o !== 'object') {
-            throw new Error('Invalid config: not an object', {
-                cause: { code: 'JACKSPEAK', found: o },
-            });
-        }
-        const opts = o;
-        for (const field in o) {
-            const value = opts[field];
-            /* c8 ignore next - for TS */
-            if (value === undefined)
-                continue;
-            this.#noNoFields(field, value);
-            const config = this.#configSet[field];
-            if (!config) {
-                throw new Error(`Unknown config option: ${field}`, {
-                    cause: { code: 'JACKSPEAK', found: field },
-                });
-            }
-            if (!isValidValue(value, config.type, !!config.multiple)) {
-                throw new Error(`Invalid value ${valueType(value)} for ${field}, expected ${valueType(config)}`, {
-                    cause: {
-                        code: 'JACKSPEAK',
-                        name: field,
-                        found: value,
-                        wanted: valueType(config),
-                    },
-                });
-            }
-            const cause = config.validOptions && !isValidOption(value, config.validOptions) ?
-                { name: field, found: value, validOptions: config.validOptions }
-                : config.validate && !config.validate(value) ?
-                    { name: field, found: value }
-                    : undefined;
-            if (cause) {
-                throw new Error(`Invalid config value for ${field}: ${value}`, {
-                    cause: { ...cause, code: 'JACKSPEAK' },
-                });
-            }
-        }
-    }
-    writeEnv(p) {
-        if (!this.#env || !this.#envPrefix)
-            return;
-        for (const [field, value] of Object.entries(p.values)) {
-            const my = this.#configSet[field];
-            this.#env[toEnvKey(this.#envPrefix, field)] = toEnvVal(value, my?.delim);
-        }
-    }
-    /**
-     * Add a heading to the usage output banner
-     */
-    heading(text, level, { pre = false } = {}) {
-        if (level === undefined) {
-            level = this.#fields.some(r => isHeading(r)) ? 2 : 1;
-        }
-        this.#fields.push({ type: 'heading', text, level, pre });
-        return this;
-    }
-    /**
-     * Add a long-form description to the usage output at this position.
-     */
-    description(text, { pre } = {}) {
-        this.#fields.push({ type: 'description', text, pre });
-        return this;
-    }
-    /**
-     * Add one or more number fields.
-     */
-    num(fields) {
-        return this.#addFieldsWith(fields, 'number', false);
-    }
-    /**
-     * Add one or more multiple number fields.
-     */
-    numList(fields) {
-        return this.#addFieldsWith(fields, 'number', true);
-    }
-    /**
-     * Add one or more string option fields.
-     */
-    opt(fields) {
-        return this.#addFieldsWith(fields, 'string', false);
-    }
-    /**
-     * Add one or more multiple string option fields.
-     */
-    optList(fields) {
-        return this.#addFieldsWith(fields, 'string', true);
-    }
-    /**
-     * Add one or more flag fields.
-     */
-    flag(fields) {
-        return this.#addFieldsWith(fields, 'boolean', false);
-    }
-    /**
-     * Add one or more multiple flag fields.
-     */
-    flagList(fields) {
-        return this.#addFieldsWith(fields, 'boolean', true);
-    }
-    /**
-     * Generic field definition method. Similar to flag/flagList/number/etc,
-     * but you must specify the `type` (and optionally `multiple` and `delim`)
-     * fields on each one, or Jack won't know how to define them.
-     */
-    addFields(fields) {
-        return this.#addFields(this, fields);
-    }
-    #addFieldsWith(fields, type, multiple) {
-        return this.#addFields(this, fields, {
-            type,
-            multiple,
-        });
-    }
-    #addFields(next, fields, opt) {
-        Object.assign(next.#configSet, Object.fromEntries(Object.entries(fields).map(([name, field]) => {
-            this.#validateName(name, field);
-            const { type, multiple } = validateFieldMeta(field, opt);
-            const value = { ...field, type, multiple };
-            validateField(value, type, multiple);
-            next.#fields.push({ type: 'config', name, value });
-            return [name, value];
-        })));
-        return next;
-    }
-    #validateName(name, field) {
-        if (!/^[a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?$/.test(name)) {
-            throw new TypeError(`Invalid option name: ${name}, ` +
-                `must be '-' delimited ASCII alphanumeric`);
-        }
-        if (this.#configSet[name]) {
-            throw new TypeError(`Cannot redefine option ${field}`);
-        }
-        if (this.#shorts[name]) {
-            throw new TypeError(`Cannot redefine option ${name}, already ` +
-                `in use for ${this.#shorts[name]}`);
-        }
-        if (field.short) {
-            if (!/^[a-zA-Z0-9]$/.test(field.short)) {
-                throw new TypeError(`Invalid ${name} short option: ${field.short}, ` +
-                    'must be 1 ASCII alphanumeric character');
-            }
-            if (this.#shorts[field.short]) {
-                throw new TypeError(`Invalid ${name} short option: ${field.short}, ` +
-                    `already in use for ${this.#shorts[field.short]}`);
-            }
-            this.#shorts[field.short] = name;
-            this.#shorts[name] = name;
-        }
-    }
-    /**
-     * Return the usage banner for the given configuration
-     */
-    usage() {
-        if (this.#usage)
-            return this.#usage;
-        let headingLevel = 1;
-        //@ts-ignore
-        const ui = (0, cliui_1.default)({ width });
-        const first = this.#fields[0];
-        let start = first?.type === 'heading' ? 1 : 0;
-        if (first?.type === 'heading') {
-            ui.div({
-                padding: [0, 0, 0, 0],
-                text: normalize(first.text),
-            });
-        }
-        ui.div({ padding: [0, 0, 0, 0], text: 'Usage:' });
-        if (this.#options.usage) {
-            ui.div({
-                text: this.#options.usage,
-                padding: [0, 0, 0, 2],
-            });
-        }
-        else {
-            const cmd = (0, node_path_1.basename)(String(process.argv[1]));
-            const shortFlags = [];
-            const shorts = [];
-            const flags = [];
-            const opts = [];
-            for (const [field, config] of Object.entries(this.#configSet)) {
-                if (config.short) {
-                    if (config.type === 'boolean')
-                        shortFlags.push(config.short);
-                    else
-                        shorts.push([config.short, config.hint || field]);
-                }
-                else {
-                    if (config.type === 'boolean')
-                        flags.push(field);
-                    else
-                        opts.push([field, config.hint || field]);
-                }
-            }
-            const sf = shortFlags.length ? ' -' + shortFlags.join('') : '';
-            const so = shorts.map(([k, v]) => ` --${k}=<${v}>`).join('');
-            const lf = flags.map(k => ` --${k}`).join('');
-            const lo = opts.map(([k, v]) => ` --${k}=<${v}>`).join('');
-            const usage = `${cmd}${sf}${so}${lf}${lo}`.trim();
-            ui.div({
-                text: usage,
-                padding: [0, 0, 0, 2],
-            });
-        }
-        ui.div({ padding: [0, 0, 0, 0], text: '' });
-        const maybeDesc = this.#fields[start];
-        if (maybeDesc && isDescription(maybeDesc)) {
-            const print = normalize(maybeDesc.text, maybeDesc.pre);
-            start++;
-            ui.div({ padding: [0, 0, 0, 0], text: print });
-            ui.div({ padding: [0, 0, 0, 0], text: '' });
-        }
-        const { rows, maxWidth } = this.#usageRows(start);
-        // every heading/description after the first gets indented by 2
-        // extra spaces.
-        for (const row of rows) {
-            if (row.left) {
-                // If the row is too long, don't wrap it
-                // Bump the right-hand side down a line to make room
-                const configIndent = indent(Math.max(headingLevel, 2));
-                if (row.left.length > maxWidth - 3) {
-                    ui.div({ text: row.left, padding: [0, 0, 0, configIndent] });
-                    ui.div({ text: row.text, padding: [0, 0, 0, maxWidth] });
-                }
-                else {
-                    ui.div({
-                        text: row.left,
-                        padding: [0, 1, 0, configIndent],
-                        width: maxWidth,
-                    }, { padding: [0, 0, 0, 0], text: row.text });
-                }
-                if (row.skipLine) {
-                    ui.div({ padding: [0, 0, 0, 0], text: '' });
-                }
-            }
-            else {
-                if (isHeading(row)) {
-                    const { level } = row;
-                    headingLevel = level;
-                    // only h1 and h2 have bottom padding
-                    // h3-h6 do not
-                    const b = level <= 2 ? 1 : 0;
-                    ui.div({ ...row, padding: [0, 0, b, indent(level)] });
-                }
-                else {
-                    ui.div({ ...row, padding: [0, 0, 1, indent(headingLevel + 1)] });
-                }
-            }
-        }
-        return (this.#usage = ui.toString());
-    }
-    /**
-     * Return the usage banner markdown for the given configuration
-     */
-    usageMarkdown() {
-        if (this.#usageMarkdown)
-            return this.#usageMarkdown;
-        const out = [];
-        let headingLevel = 1;
-        const first = this.#fields[0];
-        let start = first?.type === 'heading' ? 1 : 0;
-        if (first?.type === 'heading') {
-            out.push(`# ${normalizeOneLine(first.text)}`);
-        }
-        out.push('Usage:');
-        if (this.#options.usage) {
-            out.push(normalizeMarkdown(this.#options.usage, true));
-        }
-        else {
-            const cmd = (0, node_path_1.basename)(String(process.argv[1]));
-            const shortFlags = [];
-            const shorts = [];
-            const flags = [];
-            const opts = [];
-            for (const [field, config] of Object.entries(this.#configSet)) {
-                if (config.short) {
-                    if (config.type === 'boolean')
-                        shortFlags.push(config.short);
-                    else
-                        shorts.push([config.short, config.hint || field]);
-                }
-                else {
-                    if (config.type === 'boolean')
-                        flags.push(field);
-                    else
-                        opts.push([field, config.hint || field]);
-                }
-            }
-            const sf = shortFlags.length ? ' -' + shortFlags.join('') : '';
-            const so = shorts.map(([k, v]) => ` --${k}=<${v}>`).join('');
-            const lf = flags.map(k => ` --${k}`).join('');
-            const lo = opts.map(([k, v]) => ` --${k}=<${v}>`).join('');
-            const usage = `${cmd}${sf}${so}${lf}${lo}`.trim();
-            out.push(normalizeMarkdown(usage, true));
-        }
-        const maybeDesc = this.#fields[start];
-        if (maybeDesc && isDescription(maybeDesc)) {
-            out.push(normalizeMarkdown(maybeDesc.text, maybeDesc.pre));
-            start++;
-        }
-        const { rows } = this.#usageRows(start);
-        // heading level in markdown is number of # ahead of text
-        for (const row of rows) {
-            if (row.left) {
-                out.push('#'.repeat(headingLevel + 1) +
-                    ' ' +
-                    normalizeOneLine(row.left, true));
-                if (row.text)
-                    out.push(normalizeMarkdown(row.text));
-            }
-            else if (isHeading(row)) {
-                const { level } = row;
-                headingLevel = level;
-                out.push(`${'#'.repeat(headingLevel)} ${normalizeOneLine(row.text, row.pre)}`);
-            }
-            else {
-                out.push(normalizeMarkdown(row.text, !!row.pre));
-            }
-        }
-        return (this.#usageMarkdown = out.join('\n\n') + '\n');
-    }
-    #usageRows(start) {
-        // turn each config type into a row, and figure out the width of the
-        // left hand indentation for the option descriptions.
-        let maxMax = Math.max(12, Math.min(26, Math.floor(width / 3)));
-        let maxWidth = 8;
-        let prev = undefined;
-        const rows = [];
-        for (const field of this.#fields.slice(start)) {
-            if (field.type !== 'config') {
-                if (prev?.type === 'config')
-                    prev.skipLine = true;
-                prev = undefined;
-                field.text = normalize(field.text, !!field.pre);
-                rows.push(field);
-                continue;
-            }
-            const { value } = field;
-            const desc = value.description || '';
-            const mult = value.multiple ? 'Can be set multiple times' : '';
-            const opts = value.validOptions?.length ?
-                `Valid options:${value.validOptions.map(v => ` ${JSON.stringify(v)}`)}`
-                : '';
-            const dmDelim = desc.includes('\n') ? '\n\n' : '\n';
-            const extra = [opts, mult].join(dmDelim).trim();
-            const text = (normalize(desc) + dmDelim + extra).trim();
-            const hint = value.hint ||
-                (value.type === 'number' ? 'n'
-                    : value.type === 'string' ? field.name
-                        : undefined);
-            const short = !value.short ? ''
-                : value.type === 'boolean' ? `-${value.short} `
-                    : `-${value.short}<${hint}> `;
-            const left = value.type === 'boolean' ?
-                `${short}--${field.name}`
-                : `${short}--${field.name}=<${hint}>`;
-            const row = { text, left, type: 'config' };
-            if (text.length > width - maxMax) {
-                row.skipLine = true;
-            }
-            if (prev && left.length > maxMax)
-                prev.skipLine = true;
-            prev = row;
-            const len = left.length + 4;
-            if (len > maxWidth && len < maxMax) {
-                maxWidth = len;
-            }
-            rows.push(row);
-        }
-        return { rows, maxWidth };
-    }
-    /**
-     * Return the configuration options as a plain object
-     */
-    toJSON() {
-        return Object.fromEntries(Object.entries(this.#configSet).map(([field, def]) => [
-            field,
-            {
-                type: def.type,
-                ...(def.multiple ? { multiple: true } : {}),
-                ...(def.delim ? { delim: def.delim } : {}),
-                ...(def.short ? { short: def.short } : {}),
-                ...(def.description ?
-                    { description: normalize(def.description) }
-                    : {}),
-                ...(def.validate ? { validate: def.validate } : {}),
-                ...(def.validOptions ? { validOptions: def.validOptions } : {}),
-                ...(def.default !== undefined ? { default: def.default } : {}),
-                ...(def.hint ? { hint: def.hint } : {}),
-            },
-        ]));
-    }
-    /**
-     * Custom printer for `util.inspect`
-     */
-    [node_util_1.inspect.custom](_, options) {
-        return `Jack ${(0, node_util_1.inspect)(this.toJSON(), options)}`;
-    }
-}
-exports.Jack = Jack;
-/**
- * Main entry point. Create and return a {@link Jack} object.
- */
-const jack = (options = {}) => new Jack(options);
-exports.jack = jack;
-// Unwrap and un-indent, so we can wrap description
-// strings however makes them look nice in the code.
-const normalize = (s, pre = false) => {
-    if (pre)
-        // prepend a ZWSP to each line so cliui doesn't strip it.
-        return s
-            .split('\n')
-            .map(l => `\u200b${l}`)
-            .join('\n');
-    return s
-        .split(/^\s*```\s*$/gm)
-        .map((s, i) => {
-        if (i % 2 === 1) {
-            if (!s.trim()) {
-                return `\`\`\`\n\`\`\`\n`;
-            }
-            // outdent the ``` blocks, but preserve whitespace otherwise.
-            const split = s.split('\n');
-            // throw out the \n at the start and end
-            split.pop();
-            split.shift();
-            const si = split.reduce((shortest, l) => {
-                /* c8 ignore next */
-                const ind = l.match(/^\s*/)?.[0] ?? '';
-                if (ind.length)
-                    return Math.min(ind.length, shortest);
-                else
-                    return shortest;
-            }, Infinity);
-            /* c8 ignore next */
-            const i = isFinite(si) ? si : 0;
-            return ('\n```\n' +
-                split.map(s => `\u200b${s.substring(i)}`).join('\n') +
-                '\n```\n');
-        }
-        return (s
-            // remove single line breaks, except for lists
-            .replace(/([^\n])\n[ \t]*([^\n])/g, (_, $1, $2) => !/^[-*]/.test($2) ? `${$1} ${$2}` : `${$1}\n${$2}`)
-            // normalize mid-line whitespace
-            .replace(/([^\n])[ \t]+([^\n])/g, '$1 $2')
-            // two line breaks are enough
-            .replace(/\n{3,}/g, '\n\n')
-            // remove any spaces at the start of a line
-            .replace(/\n[ \t]+/g, '\n')
-            .trim());
-    })
-        .join('\n');
-};
-// normalize for markdown printing, remove leading spaces on lines
-const normalizeMarkdown = (s, pre = false) => {
-    const n = normalize(s, pre).replace(/\\/g, '\\\\');
-    return pre ?
-        `\`\`\`\n${n.replace(/\u200b/g, '')}\n\`\`\``
-        : n.replace(/\n +/g, '\n').trim();
-};
-const normalizeOneLine = (s, pre = false) => {
-    const n = normalize(s, pre)
-        .replace(/[\s\u200b]+/g, ' ')
-        .trim();
-    return pre ? `\`${n}\`` : n;
-};
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/jackspeak/dist/esm/index.js b/node_modules/@npmcli/map-workspaces/node_modules/jackspeak/dist/esm/index.js
deleted file mode 100644
index b959f5126423c..0000000000000
--- a/node_modules/@npmcli/map-workspaces/node_modules/jackspeak/dist/esm/index.js
+++ /dev/null
@@ -1,936 +0,0 @@
-import { inspect, parseArgs, } from 'node:util';
-// it's a tiny API, just cast it inline, it's fine
-//@ts-ignore
-import cliui from '@isaacs/cliui';
-import { basename } from 'node:path';
-export const isConfigType = (t) => typeof t === 'string' &&
-    (t === 'string' || t === 'number' || t === 'boolean');
-const isValidValue = (v, type, multi) => {
-    if (multi) {
-        if (!Array.isArray(v))
-            return false;
-        return !v.some((v) => !isValidValue(v, type, false));
-    }
-    if (Array.isArray(v))
-        return false;
-    return typeof v === type;
-};
-const isValidOption = (v, vo) => !!vo &&
-    (Array.isArray(v) ? v.every(x => isValidOption(x, vo)) : vo.includes(v));
-/**
- * Determine whether an unknown object is a {@link ConfigOption} based only
- * on its `type` and `multiple` property
- */
-export const isConfigOptionOfType = (o, type, multi) => !!o &&
-    typeof o === 'object' &&
-    isConfigType(o.type) &&
-    o.type === type &&
-    !!o.multiple === multi;
-/**
- * Determine whether an unknown object is a {@link ConfigOption} based on
- * it having all valid properties
- */
-export const isConfigOption = (o, type, multi) => isConfigOptionOfType(o, type, multi) &&
-    undefOrType(o.short, 'string') &&
-    undefOrType(o.description, 'string') &&
-    undefOrType(o.hint, 'string') &&
-    undefOrType(o.validate, 'function') &&
-    (o.type === 'boolean' ?
-        o.validOptions === undefined
-        : undefOrTypeArray(o.validOptions, o.type)) &&
-    (o.default === undefined || isValidValue(o.default, type, multi));
-const isHeading = (r) => r.type === 'heading';
-const isDescription = (r) => r.type === 'description';
-const width = Math.min(process?.stdout?.columns ?? 80, 80);
-// indentation spaces from heading level
-const indent = (n) => (n - 1) * 2;
-const toEnvKey = (pref, key) => [pref, key.replace(/[^a-zA-Z0-9]+/g, ' ')]
-    .join(' ')
-    .trim()
-    .toUpperCase()
-    .replace(/ /g, '_');
-const toEnvVal = (value, delim = '\n') => {
-    const str = typeof value === 'string' ? value
-        : typeof value === 'boolean' ?
-            value ? '1'
-                : '0'
-            : typeof value === 'number' ? String(value)
-                : Array.isArray(value) ?
-                    value.map((v) => toEnvVal(v)).join(delim)
-                    : /* c8 ignore start */ undefined;
-    if (typeof str !== 'string') {
-        throw new Error(`could not serialize value to environment: ${JSON.stringify(value)}`, { cause: { code: 'JACKSPEAK' } });
-    }
-    /* c8 ignore stop */
-    return str;
-};
-const fromEnvVal = (env, type, multiple, delim = '\n') => (multiple ?
-    env ? env.split(delim).map(v => fromEnvVal(v, type, false))
-        : []
-    : type === 'string' ? env
-        : type === 'boolean' ? env === '1'
-            : +env.trim());
-const undefOrType = (v, t) => v === undefined || typeof v === t;
-const undefOrTypeArray = (v, t) => v === undefined || (Array.isArray(v) && v.every(x => typeof x === t));
-// print the value type, for error message reporting
-const valueType = (v) => typeof v === 'string' ? 'string'
-    : typeof v === 'boolean' ? 'boolean'
-        : typeof v === 'number' ? 'number'
-            : Array.isArray(v) ?
-                `${joinTypes([...new Set(v.map(v => valueType(v)))])}[]`
-                : `${v.type}${v.multiple ? '[]' : ''}`;
-const joinTypes = (types) => types.length === 1 && typeof types[0] === 'string' ?
-    types[0]
-    : `(${types.join('|')})`;
-const validateFieldMeta = (field, fieldMeta) => {
-    if (fieldMeta) {
-        if (field.type !== undefined && field.type !== fieldMeta.type) {
-            throw new TypeError(`invalid type`, {
-                cause: {
-                    found: field.type,
-                    wanted: [fieldMeta.type, undefined],
-                },
-            });
-        }
-        if (field.multiple !== undefined &&
-            !!field.multiple !== fieldMeta.multiple) {
-            throw new TypeError(`invalid multiple`, {
-                cause: {
-                    found: field.multiple,
-                    wanted: [fieldMeta.multiple, undefined],
-                },
-            });
-        }
-        return fieldMeta;
-    }
-    if (!isConfigType(field.type)) {
-        throw new TypeError(`invalid type`, {
-            cause: {
-                found: field.type,
-                wanted: ['string', 'number', 'boolean'],
-            },
-        });
-    }
-    return {
-        type: field.type,
-        multiple: !!field.multiple,
-    };
-};
-const validateField = (o, type, multiple) => {
-    const validateValidOptions = (def, validOptions) => {
-        if (!undefOrTypeArray(validOptions, type)) {
-            throw new TypeError('invalid validOptions', {
-                cause: {
-                    found: validOptions,
-                    wanted: valueType({ type, multiple: true }),
-                },
-            });
-        }
-        if (def !== undefined && validOptions !== undefined) {
-            const valid = Array.isArray(def) ?
-                def.every(v => validOptions.includes(v))
-                : validOptions.includes(def);
-            if (!valid) {
-                throw new TypeError('invalid default value not in validOptions', {
-                    cause: {
-                        found: def,
-                        wanted: validOptions,
-                    },
-                });
-            }
-        }
-    };
-    if (o.default !== undefined &&
-        !isValidValue(o.default, type, multiple)) {
-        throw new TypeError('invalid default value', {
-            cause: {
-                found: o.default,
-                wanted: valueType({ type, multiple }),
-            },
-        });
-    }
-    if (isConfigOptionOfType(o, 'number', false) ||
-        isConfigOptionOfType(o, 'number', true)) {
-        validateValidOptions(o.default, o.validOptions);
-    }
-    else if (isConfigOptionOfType(o, 'string', false) ||
-        isConfigOptionOfType(o, 'string', true)) {
-        validateValidOptions(o.default, o.validOptions);
-    }
-    else if (isConfigOptionOfType(o, 'boolean', false) ||
-        isConfigOptionOfType(o, 'boolean', true)) {
-        if (o.hint !== undefined) {
-            throw new TypeError('cannot provide hint for flag');
-        }
-        if (o.validOptions !== undefined) {
-            throw new TypeError('cannot provide validOptions for flag');
-        }
-    }
-    return o;
-};
-const toParseArgsOptionsConfig = (options) => {
-    return Object.entries(options).reduce((acc, [longOption, o]) => {
-        const p = {
-            type: 'string',
-            multiple: !!o.multiple,
-            ...(typeof o.short === 'string' ? { short: o.short } : undefined),
-        };
-        const setNoBool = () => {
-            if (!longOption.startsWith('no-') && !options[`no-${longOption}`]) {
-                acc[`no-${longOption}`] = {
-                    type: 'boolean',
-                    multiple: !!o.multiple,
-                };
-            }
-        };
-        const setDefault = (def, fn) => {
-            if (def !== undefined) {
-                p.default = fn(def);
-            }
-        };
-        if (isConfigOption(o, 'number', false)) {
-            setDefault(o.default, String);
-        }
-        else if (isConfigOption(o, 'number', true)) {
-            setDefault(o.default, d => d.map(v => String(v)));
-        }
-        else if (isConfigOption(o, 'string', false) ||
-            isConfigOption(o, 'string', true)) {
-            setDefault(o.default, v => v);
-        }
-        else if (isConfigOption(o, 'boolean', false) ||
-            isConfigOption(o, 'boolean', true)) {
-            p.type = 'boolean';
-            setDefault(o.default, v => v);
-            setNoBool();
-        }
-        acc[longOption] = p;
-        return acc;
-    }, {});
-};
-/**
- * Class returned by the {@link jack} function and all configuration
- * definition methods.  This is what gets chained together.
- */
-export class Jack {
-    #configSet;
-    #shorts;
-    #options;
-    #fields = [];
-    #env;
-    #envPrefix;
-    #allowPositionals;
-    #usage;
-    #usageMarkdown;
-    constructor(options = {}) {
-        this.#options = options;
-        this.#allowPositionals = options.allowPositionals !== false;
-        this.#env =
-            this.#options.env === undefined ? process.env : this.#options.env;
-        this.#envPrefix = options.envPrefix;
-        // We need to fib a little, because it's always the same object, but it
-        // starts out as having an empty config set.  Then each method that adds
-        // fields returns `this as Jack`
-        this.#configSet = Object.create(null);
-        this.#shorts = Object.create(null);
-    }
-    /**
-     * Resulting definitions, suitable to be passed to Node's `util.parseArgs`,
-     * but also including `description` and `short` fields, if set.
-     */
-    get definitions() {
-        return this.#configSet;
-    }
-    /** map of `{ :  }` strings for each short name defined */
-    get shorts() {
-        return this.#shorts;
-    }
-    /**
-     * options passed to the {@link Jack} constructor
-     */
-    get jackOptions() {
-        return this.#options;
-    }
-    /**
-     * the data used to generate {@link Jack#usage} and
-     * {@link Jack#usageMarkdown} content.
-     */
-    get usageFields() {
-        return this.#fields;
-    }
-    /**
-     * Set the default value (which will still be overridden by env or cli)
-     * as if from a parsed config file. The optional `source` param, if
-     * provided, will be included in error messages if a value is invalid or
-     * unknown.
-     */
-    setConfigValues(values, source = '') {
-        try {
-            this.validate(values);
-        }
-        catch (er) {
-            if (source && er instanceof Error) {
-                /* c8 ignore next */
-                const cause = typeof er.cause === 'object' ? er.cause : {};
-                er.cause = { ...cause, path: source };
-                Error.captureStackTrace(er, this.setConfigValues);
-            }
-            throw er;
-        }
-        for (const [field, value] of Object.entries(values)) {
-            const my = this.#configSet[field];
-            // already validated, just for TS's benefit
-            /* c8 ignore start */
-            if (!my) {
-                throw new Error('unexpected field in config set: ' + field, {
-                    cause: {
-                        code: 'JACKSPEAK',
-                        found: field,
-                    },
-                });
-            }
-            /* c8 ignore stop */
-            my.default = value;
-        }
-        return this;
-    }
-    /**
-     * Parse a string of arguments, and return the resulting
-     * `{ values, positionals }` object.
-     *
-     * If an {@link JackOptions#envPrefix} is set, then it will read default
-     * values from the environment, and write the resulting values back
-     * to the environment as well.
-     *
-     * Environment values always take precedence over any other value, except
-     * an explicit CLI setting.
-     */
-    parse(args = process.argv) {
-        this.loadEnvDefaults();
-        const p = this.parseRaw(args);
-        this.applyDefaults(p);
-        this.writeEnv(p);
-        return p;
-    }
-    loadEnvDefaults() {
-        if (this.#envPrefix) {
-            for (const [field, my] of Object.entries(this.#configSet)) {
-                const ek = toEnvKey(this.#envPrefix, field);
-                const env = this.#env[ek];
-                if (env !== undefined) {
-                    my.default = fromEnvVal(env, my.type, !!my.multiple, my.delim);
-                }
-            }
-        }
-    }
-    applyDefaults(p) {
-        for (const [field, c] of Object.entries(this.#configSet)) {
-            if (c.default !== undefined && !(field in p.values)) {
-                //@ts-ignore
-                p.values[field] = c.default;
-            }
-        }
-    }
-    /**
-     * Only parse the command line arguments passed in.
-     * Does not strip off the `node script.js` bits, so it must be just the
-     * arguments you wish to have parsed.
-     * Does not read from or write to the environment, or set defaults.
-     */
-    parseRaw(args) {
-        if (args === process.argv) {
-            args = args.slice(process._eval !== undefined ? 1 : 2);
-        }
-        const result = parseArgs({
-            args,
-            options: toParseArgsOptionsConfig(this.#configSet),
-            // always strict, but using our own logic
-            strict: false,
-            allowPositionals: this.#allowPositionals,
-            tokens: true,
-        });
-        const p = {
-            values: {},
-            positionals: [],
-        };
-        for (const token of result.tokens) {
-            if (token.kind === 'positional') {
-                p.positionals.push(token.value);
-                if (this.#options.stopAtPositional ||
-                    this.#options.stopAtPositionalTest?.(token.value)) {
-                    p.positionals.push(...args.slice(token.index + 1));
-                    break;
-                }
-            }
-            else if (token.kind === 'option') {
-                let value = undefined;
-                if (token.name.startsWith('no-')) {
-                    const my = this.#configSet[token.name];
-                    const pname = token.name.substring('no-'.length);
-                    const pos = this.#configSet[pname];
-                    if (pos &&
-                        pos.type === 'boolean' &&
-                        (!my ||
-                            (my.type === 'boolean' && !!my.multiple === !!pos.multiple))) {
-                        value = false;
-                        token.name = pname;
-                    }
-                }
-                const my = this.#configSet[token.name];
-                if (!my) {
-                    throw new Error(`Unknown option '${token.rawName}'. ` +
-                        `To specify a positional argument starting with a '-', ` +
-                        `place it at the end of the command after '--', as in ` +
-                        `'-- ${token.rawName}'`, {
-                        cause: {
-                            code: 'JACKSPEAK',
-                            found: token.rawName + (token.value ? `=${token.value}` : ''),
-                        },
-                    });
-                }
-                if (value === undefined) {
-                    if (token.value === undefined) {
-                        if (my.type !== 'boolean') {
-                            throw new Error(`No value provided for ${token.rawName}, expected ${my.type}`, {
-                                cause: {
-                                    code: 'JACKSPEAK',
-                                    name: token.rawName,
-                                    wanted: valueType(my),
-                                },
-                            });
-                        }
-                        value = true;
-                    }
-                    else {
-                        if (my.type === 'boolean') {
-                            throw new Error(`Flag ${token.rawName} does not take a value, received '${token.value}'`, { cause: { code: 'JACKSPEAK', found: token } });
-                        }
-                        if (my.type === 'string') {
-                            value = token.value;
-                        }
-                        else {
-                            value = +token.value;
-                            if (value !== value) {
-                                throw new Error(`Invalid value '${token.value}' provided for ` +
-                                    `'${token.rawName}' option, expected number`, {
-                                    cause: {
-                                        code: 'JACKSPEAK',
-                                        name: token.rawName,
-                                        found: token.value,
-                                        wanted: 'number',
-                                    },
-                                });
-                            }
-                        }
-                    }
-                }
-                if (my.multiple) {
-                    const pv = p.values;
-                    const tn = pv[token.name] ?? [];
-                    pv[token.name] = tn;
-                    tn.push(value);
-                }
-                else {
-                    const pv = p.values;
-                    pv[token.name] = value;
-                }
-            }
-        }
-        for (const [field, value] of Object.entries(p.values)) {
-            const valid = this.#configSet[field]?.validate;
-            const validOptions = this.#configSet[field]?.validOptions;
-            const cause = validOptions && !isValidOption(value, validOptions) ?
-                { name: field, found: value, validOptions }
-                : valid && !valid(value) ? { name: field, found: value }
-                    : undefined;
-            if (cause) {
-                throw new Error(`Invalid value provided for --${field}: ${JSON.stringify(value)}`, { cause: { ...cause, code: 'JACKSPEAK' } });
-            }
-        }
-        return p;
-    }
-    /**
-     * do not set fields as 'no-foo' if 'foo' exists and both are bools
-     * just set foo.
-     */
-    #noNoFields(f, val, s = f) {
-        if (!f.startsWith('no-') || typeof val !== 'boolean')
-            return;
-        const yes = f.substring('no-'.length);
-        // recurse so we get the core config key we care about.
-        this.#noNoFields(yes, val, s);
-        if (this.#configSet[yes]?.type === 'boolean') {
-            throw new Error(`do not set '${s}', instead set '${yes}' as desired.`, { cause: { code: 'JACKSPEAK', found: s, wanted: yes } });
-        }
-    }
-    /**
-     * Validate that any arbitrary object is a valid configuration `values`
-     * object.  Useful when loading config files or other sources.
-     */
-    validate(o) {
-        if (!o || typeof o !== 'object') {
-            throw new Error('Invalid config: not an object', {
-                cause: { code: 'JACKSPEAK', found: o },
-            });
-        }
-        const opts = o;
-        for (const field in o) {
-            const value = opts[field];
-            /* c8 ignore next - for TS */
-            if (value === undefined)
-                continue;
-            this.#noNoFields(field, value);
-            const config = this.#configSet[field];
-            if (!config) {
-                throw new Error(`Unknown config option: ${field}`, {
-                    cause: { code: 'JACKSPEAK', found: field },
-                });
-            }
-            if (!isValidValue(value, config.type, !!config.multiple)) {
-                throw new Error(`Invalid value ${valueType(value)} for ${field}, expected ${valueType(config)}`, {
-                    cause: {
-                        code: 'JACKSPEAK',
-                        name: field,
-                        found: value,
-                        wanted: valueType(config),
-                    },
-                });
-            }
-            const cause = config.validOptions && !isValidOption(value, config.validOptions) ?
-                { name: field, found: value, validOptions: config.validOptions }
-                : config.validate && !config.validate(value) ?
-                    { name: field, found: value }
-                    : undefined;
-            if (cause) {
-                throw new Error(`Invalid config value for ${field}: ${value}`, {
-                    cause: { ...cause, code: 'JACKSPEAK' },
-                });
-            }
-        }
-    }
-    writeEnv(p) {
-        if (!this.#env || !this.#envPrefix)
-            return;
-        for (const [field, value] of Object.entries(p.values)) {
-            const my = this.#configSet[field];
-            this.#env[toEnvKey(this.#envPrefix, field)] = toEnvVal(value, my?.delim);
-        }
-    }
-    /**
-     * Add a heading to the usage output banner
-     */
-    heading(text, level, { pre = false } = {}) {
-        if (level === undefined) {
-            level = this.#fields.some(r => isHeading(r)) ? 2 : 1;
-        }
-        this.#fields.push({ type: 'heading', text, level, pre });
-        return this;
-    }
-    /**
-     * Add a long-form description to the usage output at this position.
-     */
-    description(text, { pre } = {}) {
-        this.#fields.push({ type: 'description', text, pre });
-        return this;
-    }
-    /**
-     * Add one or more number fields.
-     */
-    num(fields) {
-        return this.#addFieldsWith(fields, 'number', false);
-    }
-    /**
-     * Add one or more multiple number fields.
-     */
-    numList(fields) {
-        return this.#addFieldsWith(fields, 'number', true);
-    }
-    /**
-     * Add one or more string option fields.
-     */
-    opt(fields) {
-        return this.#addFieldsWith(fields, 'string', false);
-    }
-    /**
-     * Add one or more multiple string option fields.
-     */
-    optList(fields) {
-        return this.#addFieldsWith(fields, 'string', true);
-    }
-    /**
-     * Add one or more flag fields.
-     */
-    flag(fields) {
-        return this.#addFieldsWith(fields, 'boolean', false);
-    }
-    /**
-     * Add one or more multiple flag fields.
-     */
-    flagList(fields) {
-        return this.#addFieldsWith(fields, 'boolean', true);
-    }
-    /**
-     * Generic field definition method. Similar to flag/flagList/number/etc,
-     * but you must specify the `type` (and optionally `multiple` and `delim`)
-     * fields on each one, or Jack won't know how to define them.
-     */
-    addFields(fields) {
-        return this.#addFields(this, fields);
-    }
-    #addFieldsWith(fields, type, multiple) {
-        return this.#addFields(this, fields, {
-            type,
-            multiple,
-        });
-    }
-    #addFields(next, fields, opt) {
-        Object.assign(next.#configSet, Object.fromEntries(Object.entries(fields).map(([name, field]) => {
-            this.#validateName(name, field);
-            const { type, multiple } = validateFieldMeta(field, opt);
-            const value = { ...field, type, multiple };
-            validateField(value, type, multiple);
-            next.#fields.push({ type: 'config', name, value });
-            return [name, value];
-        })));
-        return next;
-    }
-    #validateName(name, field) {
-        if (!/^[a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?$/.test(name)) {
-            throw new TypeError(`Invalid option name: ${name}, ` +
-                `must be '-' delimited ASCII alphanumeric`);
-        }
-        if (this.#configSet[name]) {
-            throw new TypeError(`Cannot redefine option ${field}`);
-        }
-        if (this.#shorts[name]) {
-            throw new TypeError(`Cannot redefine option ${name}, already ` +
-                `in use for ${this.#shorts[name]}`);
-        }
-        if (field.short) {
-            if (!/^[a-zA-Z0-9]$/.test(field.short)) {
-                throw new TypeError(`Invalid ${name} short option: ${field.short}, ` +
-                    'must be 1 ASCII alphanumeric character');
-            }
-            if (this.#shorts[field.short]) {
-                throw new TypeError(`Invalid ${name} short option: ${field.short}, ` +
-                    `already in use for ${this.#shorts[field.short]}`);
-            }
-            this.#shorts[field.short] = name;
-            this.#shorts[name] = name;
-        }
-    }
-    /**
-     * Return the usage banner for the given configuration
-     */
-    usage() {
-        if (this.#usage)
-            return this.#usage;
-        let headingLevel = 1;
-        //@ts-ignore
-        const ui = cliui({ width });
-        const first = this.#fields[0];
-        let start = first?.type === 'heading' ? 1 : 0;
-        if (first?.type === 'heading') {
-            ui.div({
-                padding: [0, 0, 0, 0],
-                text: normalize(first.text),
-            });
-        }
-        ui.div({ padding: [0, 0, 0, 0], text: 'Usage:' });
-        if (this.#options.usage) {
-            ui.div({
-                text: this.#options.usage,
-                padding: [0, 0, 0, 2],
-            });
-        }
-        else {
-            const cmd = basename(String(process.argv[1]));
-            const shortFlags = [];
-            const shorts = [];
-            const flags = [];
-            const opts = [];
-            for (const [field, config] of Object.entries(this.#configSet)) {
-                if (config.short) {
-                    if (config.type === 'boolean')
-                        shortFlags.push(config.short);
-                    else
-                        shorts.push([config.short, config.hint || field]);
-                }
-                else {
-                    if (config.type === 'boolean')
-                        flags.push(field);
-                    else
-                        opts.push([field, config.hint || field]);
-                }
-            }
-            const sf = shortFlags.length ? ' -' + shortFlags.join('') : '';
-            const so = shorts.map(([k, v]) => ` --${k}=<${v}>`).join('');
-            const lf = flags.map(k => ` --${k}`).join('');
-            const lo = opts.map(([k, v]) => ` --${k}=<${v}>`).join('');
-            const usage = `${cmd}${sf}${so}${lf}${lo}`.trim();
-            ui.div({
-                text: usage,
-                padding: [0, 0, 0, 2],
-            });
-        }
-        ui.div({ padding: [0, 0, 0, 0], text: '' });
-        const maybeDesc = this.#fields[start];
-        if (maybeDesc && isDescription(maybeDesc)) {
-            const print = normalize(maybeDesc.text, maybeDesc.pre);
-            start++;
-            ui.div({ padding: [0, 0, 0, 0], text: print });
-            ui.div({ padding: [0, 0, 0, 0], text: '' });
-        }
-        const { rows, maxWidth } = this.#usageRows(start);
-        // every heading/description after the first gets indented by 2
-        // extra spaces.
-        for (const row of rows) {
-            if (row.left) {
-                // If the row is too long, don't wrap it
-                // Bump the right-hand side down a line to make room
-                const configIndent = indent(Math.max(headingLevel, 2));
-                if (row.left.length > maxWidth - 3) {
-                    ui.div({ text: row.left, padding: [0, 0, 0, configIndent] });
-                    ui.div({ text: row.text, padding: [0, 0, 0, maxWidth] });
-                }
-                else {
-                    ui.div({
-                        text: row.left,
-                        padding: [0, 1, 0, configIndent],
-                        width: maxWidth,
-                    }, { padding: [0, 0, 0, 0], text: row.text });
-                }
-                if (row.skipLine) {
-                    ui.div({ padding: [0, 0, 0, 0], text: '' });
-                }
-            }
-            else {
-                if (isHeading(row)) {
-                    const { level } = row;
-                    headingLevel = level;
-                    // only h1 and h2 have bottom padding
-                    // h3-h6 do not
-                    const b = level <= 2 ? 1 : 0;
-                    ui.div({ ...row, padding: [0, 0, b, indent(level)] });
-                }
-                else {
-                    ui.div({ ...row, padding: [0, 0, 1, indent(headingLevel + 1)] });
-                }
-            }
-        }
-        return (this.#usage = ui.toString());
-    }
-    /**
-     * Return the usage banner markdown for the given configuration
-     */
-    usageMarkdown() {
-        if (this.#usageMarkdown)
-            return this.#usageMarkdown;
-        const out = [];
-        let headingLevel = 1;
-        const first = this.#fields[0];
-        let start = first?.type === 'heading' ? 1 : 0;
-        if (first?.type === 'heading') {
-            out.push(`# ${normalizeOneLine(first.text)}`);
-        }
-        out.push('Usage:');
-        if (this.#options.usage) {
-            out.push(normalizeMarkdown(this.#options.usage, true));
-        }
-        else {
-            const cmd = basename(String(process.argv[1]));
-            const shortFlags = [];
-            const shorts = [];
-            const flags = [];
-            const opts = [];
-            for (const [field, config] of Object.entries(this.#configSet)) {
-                if (config.short) {
-                    if (config.type === 'boolean')
-                        shortFlags.push(config.short);
-                    else
-                        shorts.push([config.short, config.hint || field]);
-                }
-                else {
-                    if (config.type === 'boolean')
-                        flags.push(field);
-                    else
-                        opts.push([field, config.hint || field]);
-                }
-            }
-            const sf = shortFlags.length ? ' -' + shortFlags.join('') : '';
-            const so = shorts.map(([k, v]) => ` --${k}=<${v}>`).join('');
-            const lf = flags.map(k => ` --${k}`).join('');
-            const lo = opts.map(([k, v]) => ` --${k}=<${v}>`).join('');
-            const usage = `${cmd}${sf}${so}${lf}${lo}`.trim();
-            out.push(normalizeMarkdown(usage, true));
-        }
-        const maybeDesc = this.#fields[start];
-        if (maybeDesc && isDescription(maybeDesc)) {
-            out.push(normalizeMarkdown(maybeDesc.text, maybeDesc.pre));
-            start++;
-        }
-        const { rows } = this.#usageRows(start);
-        // heading level in markdown is number of # ahead of text
-        for (const row of rows) {
-            if (row.left) {
-                out.push('#'.repeat(headingLevel + 1) +
-                    ' ' +
-                    normalizeOneLine(row.left, true));
-                if (row.text)
-                    out.push(normalizeMarkdown(row.text));
-            }
-            else if (isHeading(row)) {
-                const { level } = row;
-                headingLevel = level;
-                out.push(`${'#'.repeat(headingLevel)} ${normalizeOneLine(row.text, row.pre)}`);
-            }
-            else {
-                out.push(normalizeMarkdown(row.text, !!row.pre));
-            }
-        }
-        return (this.#usageMarkdown = out.join('\n\n') + '\n');
-    }
-    #usageRows(start) {
-        // turn each config type into a row, and figure out the width of the
-        // left hand indentation for the option descriptions.
-        let maxMax = Math.max(12, Math.min(26, Math.floor(width / 3)));
-        let maxWidth = 8;
-        let prev = undefined;
-        const rows = [];
-        for (const field of this.#fields.slice(start)) {
-            if (field.type !== 'config') {
-                if (prev?.type === 'config')
-                    prev.skipLine = true;
-                prev = undefined;
-                field.text = normalize(field.text, !!field.pre);
-                rows.push(field);
-                continue;
-            }
-            const { value } = field;
-            const desc = value.description || '';
-            const mult = value.multiple ? 'Can be set multiple times' : '';
-            const opts = value.validOptions?.length ?
-                `Valid options:${value.validOptions.map(v => ` ${JSON.stringify(v)}`)}`
-                : '';
-            const dmDelim = desc.includes('\n') ? '\n\n' : '\n';
-            const extra = [opts, mult].join(dmDelim).trim();
-            const text = (normalize(desc) + dmDelim + extra).trim();
-            const hint = value.hint ||
-                (value.type === 'number' ? 'n'
-                    : value.type === 'string' ? field.name
-                        : undefined);
-            const short = !value.short ? ''
-                : value.type === 'boolean' ? `-${value.short} `
-                    : `-${value.short}<${hint}> `;
-            const left = value.type === 'boolean' ?
-                `${short}--${field.name}`
-                : `${short}--${field.name}=<${hint}>`;
-            const row = { text, left, type: 'config' };
-            if (text.length > width - maxMax) {
-                row.skipLine = true;
-            }
-            if (prev && left.length > maxMax)
-                prev.skipLine = true;
-            prev = row;
-            const len = left.length + 4;
-            if (len > maxWidth && len < maxMax) {
-                maxWidth = len;
-            }
-            rows.push(row);
-        }
-        return { rows, maxWidth };
-    }
-    /**
-     * Return the configuration options as a plain object
-     */
-    toJSON() {
-        return Object.fromEntries(Object.entries(this.#configSet).map(([field, def]) => [
-            field,
-            {
-                type: def.type,
-                ...(def.multiple ? { multiple: true } : {}),
-                ...(def.delim ? { delim: def.delim } : {}),
-                ...(def.short ? { short: def.short } : {}),
-                ...(def.description ?
-                    { description: normalize(def.description) }
-                    : {}),
-                ...(def.validate ? { validate: def.validate } : {}),
-                ...(def.validOptions ? { validOptions: def.validOptions } : {}),
-                ...(def.default !== undefined ? { default: def.default } : {}),
-                ...(def.hint ? { hint: def.hint } : {}),
-            },
-        ]));
-    }
-    /**
-     * Custom printer for `util.inspect`
-     */
-    [inspect.custom](_, options) {
-        return `Jack ${inspect(this.toJSON(), options)}`;
-    }
-}
-/**
- * Main entry point. Create and return a {@link Jack} object.
- */
-export const jack = (options = {}) => new Jack(options);
-// Unwrap and un-indent, so we can wrap description
-// strings however makes them look nice in the code.
-const normalize = (s, pre = false) => {
-    if (pre)
-        // prepend a ZWSP to each line so cliui doesn't strip it.
-        return s
-            .split('\n')
-            .map(l => `\u200b${l}`)
-            .join('\n');
-    return s
-        .split(/^\s*```\s*$/gm)
-        .map((s, i) => {
-        if (i % 2 === 1) {
-            if (!s.trim()) {
-                return `\`\`\`\n\`\`\`\n`;
-            }
-            // outdent the ``` blocks, but preserve whitespace otherwise.
-            const split = s.split('\n');
-            // throw out the \n at the start and end
-            split.pop();
-            split.shift();
-            const si = split.reduce((shortest, l) => {
-                /* c8 ignore next */
-                const ind = l.match(/^\s*/)?.[0] ?? '';
-                if (ind.length)
-                    return Math.min(ind.length, shortest);
-                else
-                    return shortest;
-            }, Infinity);
-            /* c8 ignore next */
-            const i = isFinite(si) ? si : 0;
-            return ('\n```\n' +
-                split.map(s => `\u200b${s.substring(i)}`).join('\n') +
-                '\n```\n');
-        }
-        return (s
-            // remove single line breaks, except for lists
-            .replace(/([^\n])\n[ \t]*([^\n])/g, (_, $1, $2) => !/^[-*]/.test($2) ? `${$1} ${$2}` : `${$1}\n${$2}`)
-            // normalize mid-line whitespace
-            .replace(/([^\n])[ \t]+([^\n])/g, '$1 $2')
-            // two line breaks are enough
-            .replace(/\n{3,}/g, '\n\n')
-            // remove any spaces at the start of a line
-            .replace(/\n[ \t]+/g, '\n')
-            .trim());
-    })
-        .join('\n');
-};
-// normalize for markdown printing, remove leading spaces on lines
-const normalizeMarkdown = (s, pre = false) => {
-    const n = normalize(s, pre).replace(/\\/g, '\\\\');
-    return pre ?
-        `\`\`\`\n${n.replace(/\u200b/g, '')}\n\`\`\``
-        : n.replace(/\n +/g, '\n').trim();
-};
-const normalizeOneLine = (s, pre = false) => {
-    const n = normalize(s, pre)
-        .replace(/[\s\u200b]+/g, ' ')
-        .trim();
-    return pre ? `\`${n}\`` : n;
-};
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/path-scurry/dist/esm/index.js b/node_modules/@npmcli/map-workspaces/node_modules/path-scurry/dist/esm/index.js
deleted file mode 100644
index 42be74c37ad9d..0000000000000
--- a/node_modules/@npmcli/map-workspaces/node_modules/path-scurry/dist/esm/index.js
+++ /dev/null
@@ -1,1981 +0,0 @@
-import { LRUCache } from 'lru-cache';
-import { posix, win32 } from 'node:path';
-import { fileURLToPath } from 'node:url';
-import { lstatSync, readdir as readdirCB, readdirSync, readlinkSync, realpathSync as rps, } from 'fs';
-import * as actualFS from 'node:fs';
-const realpathSync = rps.native;
-// TODO: test perf of fs/promises realpath vs realpathCB,
-// since the promises one uses realpath.native
-import { lstat, readdir, readlink, realpath } from 'node:fs/promises';
-import { Minipass } from 'minipass';
-const defaultFS = {
-    lstatSync,
-    readdir: readdirCB,
-    readdirSync,
-    readlinkSync,
-    realpathSync,
-    promises: {
-        lstat,
-        readdir,
-        readlink,
-        realpath,
-    },
-};
-// if they just gave us require('fs') then use our default
-const fsFromOption = (fsOption) => !fsOption || fsOption === defaultFS || fsOption === actualFS ?
-    defaultFS
-    : {
-        ...defaultFS,
-        ...fsOption,
-        promises: {
-            ...defaultFS.promises,
-            ...(fsOption.promises || {}),
-        },
-    };
-// turn something like //?/c:/ into c:\
-const uncDriveRegexp = /^\\\\\?\\([a-z]:)\\?$/i;
-const uncToDrive = (rootPath) => rootPath.replace(/\//g, '\\').replace(uncDriveRegexp, '$1\\');
-// windows paths are separated by either / or \
-const eitherSep = /[\\\/]/;
-const UNKNOWN = 0; // may not even exist, for all we know
-const IFIFO = 0b0001;
-const IFCHR = 0b0010;
-const IFDIR = 0b0100;
-const IFBLK = 0b0110;
-const IFREG = 0b1000;
-const IFLNK = 0b1010;
-const IFSOCK = 0b1100;
-const IFMT = 0b1111;
-// mask to unset low 4 bits
-const IFMT_UNKNOWN = ~IFMT;
-// set after successfully calling readdir() and getting entries.
-const READDIR_CALLED = 0b0000_0001_0000;
-// set after a successful lstat()
-const LSTAT_CALLED = 0b0000_0010_0000;
-// set if an entry (or one of its parents) is definitely not a dir
-const ENOTDIR = 0b0000_0100_0000;
-// set if an entry (or one of its parents) does not exist
-// (can also be set on lstat errors like EACCES or ENAMETOOLONG)
-const ENOENT = 0b0000_1000_0000;
-// cannot have child entries -- also verify &IFMT is either IFDIR or IFLNK
-// set if we fail to readlink
-const ENOREADLINK = 0b0001_0000_0000;
-// set if we know realpath() will fail
-const ENOREALPATH = 0b0010_0000_0000;
-const ENOCHILD = ENOTDIR | ENOENT | ENOREALPATH;
-const TYPEMASK = 0b0011_1111_1111;
-const entToType = (s) => s.isFile() ? IFREG
-    : s.isDirectory() ? IFDIR
-        : s.isSymbolicLink() ? IFLNK
-            : s.isCharacterDevice() ? IFCHR
-                : s.isBlockDevice() ? IFBLK
-                    : s.isSocket() ? IFSOCK
-                        : s.isFIFO() ? IFIFO
-                            : UNKNOWN;
-// normalize unicode path names
-const normalizeCache = new Map();
-const normalize = (s) => {
-    const c = normalizeCache.get(s);
-    if (c)
-        return c;
-    const n = s.normalize('NFKD');
-    normalizeCache.set(s, n);
-    return n;
-};
-const normalizeNocaseCache = new Map();
-const normalizeNocase = (s) => {
-    const c = normalizeNocaseCache.get(s);
-    if (c)
-        return c;
-    const n = normalize(s.toLowerCase());
-    normalizeNocaseCache.set(s, n);
-    return n;
-};
-/**
- * An LRUCache for storing resolved path strings or Path objects.
- * @internal
- */
-export class ResolveCache extends LRUCache {
-    constructor() {
-        super({ max: 256 });
-    }
-}
-// In order to prevent blowing out the js heap by allocating hundreds of
-// thousands of Path entries when walking extremely large trees, the "children"
-// in this tree are represented by storing an array of Path entries in an
-// LRUCache, indexed by the parent.  At any time, Path.children() may return an
-// empty array, indicating that it doesn't know about any of its children, and
-// thus has to rebuild that cache.  This is fine, it just means that we don't
-// benefit as much from having the cached entries, but huge directory walks
-// don't blow out the stack, and smaller ones are still as fast as possible.
-//
-//It does impose some complexity when building up the readdir data, because we
-//need to pass a reference to the children array that we started with.
-/**
- * an LRUCache for storing child entries.
- * @internal
- */
-export class ChildrenCache extends LRUCache {
-    constructor(maxSize = 16 * 1024) {
-        super({
-            maxSize,
-            // parent + children
-            sizeCalculation: a => a.length + 1,
-        });
-    }
-}
-const setAsCwd = Symbol('PathScurry setAsCwd');
-/**
- * Path objects are sort of like a super-powered
- * {@link https://nodejs.org/docs/latest/api/fs.html#class-fsdirent fs.Dirent}
- *
- * Each one represents a single filesystem entry on disk, which may or may not
- * exist. It includes methods for reading various types of information via
- * lstat, readlink, and readdir, and caches all information to the greatest
- * degree possible.
- *
- * Note that fs operations that would normally throw will instead return an
- * "empty" value. This is in order to prevent excessive overhead from error
- * stack traces.
- */
-export class PathBase {
-    /**
-     * the basename of this path
-     *
-     * **Important**: *always* test the path name against any test string
-     * usingthe {@link isNamed} method, and not by directly comparing this
-     * string. Otherwise, unicode path strings that the system sees as identical
-     * will not be properly treated as the same path, leading to incorrect
-     * behavior and possible security issues.
-     */
-    name;
-    /**
-     * the Path entry corresponding to the path root.
-     *
-     * @internal
-     */
-    root;
-    /**
-     * All roots found within the current PathScurry family
-     *
-     * @internal
-     */
-    roots;
-    /**
-     * a reference to the parent path, or undefined in the case of root entries
-     *
-     * @internal
-     */
-    parent;
-    /**
-     * boolean indicating whether paths are compared case-insensitively
-     * @internal
-     */
-    nocase;
-    /**
-     * boolean indicating that this path is the current working directory
-     * of the PathScurry collection that contains it.
-     */
-    isCWD = false;
-    // potential default fs override
-    #fs;
-    // Stats fields
-    #dev;
-    get dev() {
-        return this.#dev;
-    }
-    #mode;
-    get mode() {
-        return this.#mode;
-    }
-    #nlink;
-    get nlink() {
-        return this.#nlink;
-    }
-    #uid;
-    get uid() {
-        return this.#uid;
-    }
-    #gid;
-    get gid() {
-        return this.#gid;
-    }
-    #rdev;
-    get rdev() {
-        return this.#rdev;
-    }
-    #blksize;
-    get blksize() {
-        return this.#blksize;
-    }
-    #ino;
-    get ino() {
-        return this.#ino;
-    }
-    #size;
-    get size() {
-        return this.#size;
-    }
-    #blocks;
-    get blocks() {
-        return this.#blocks;
-    }
-    #atimeMs;
-    get atimeMs() {
-        return this.#atimeMs;
-    }
-    #mtimeMs;
-    get mtimeMs() {
-        return this.#mtimeMs;
-    }
-    #ctimeMs;
-    get ctimeMs() {
-        return this.#ctimeMs;
-    }
-    #birthtimeMs;
-    get birthtimeMs() {
-        return this.#birthtimeMs;
-    }
-    #atime;
-    get atime() {
-        return this.#atime;
-    }
-    #mtime;
-    get mtime() {
-        return this.#mtime;
-    }
-    #ctime;
-    get ctime() {
-        return this.#ctime;
-    }
-    #birthtime;
-    get birthtime() {
-        return this.#birthtime;
-    }
-    #matchName;
-    #depth;
-    #fullpath;
-    #fullpathPosix;
-    #relative;
-    #relativePosix;
-    #type;
-    #children;
-    #linkTarget;
-    #realpath;
-    /**
-     * This property is for compatibility with the Dirent class as of
-     * Node v20, where Dirent['parentPath'] refers to the path of the
-     * directory that was passed to readdir. For root entries, it's the path
-     * to the entry itself.
-     */
-    get parentPath() {
-        return (this.parent || this).fullpath();
-    }
-    /**
-     * Deprecated alias for Dirent['parentPath'] Somewhat counterintuitively,
-     * this property refers to the *parent* path, not the path object itself.
-     *
-     * @deprecated
-     */
-    get path() {
-        return this.parentPath;
-    }
-    /**
-     * Do not create new Path objects directly.  They should always be accessed
-     * via the PathScurry class or other methods on the Path class.
-     *
-     * @internal
-     */
-    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
-        this.name = name;
-        this.#matchName = nocase ? normalizeNocase(name) : normalize(name);
-        this.#type = type & TYPEMASK;
-        this.nocase = nocase;
-        this.roots = roots;
-        this.root = root || this;
-        this.#children = children;
-        this.#fullpath = opts.fullpath;
-        this.#relative = opts.relative;
-        this.#relativePosix = opts.relativePosix;
-        this.parent = opts.parent;
-        if (this.parent) {
-            this.#fs = this.parent.#fs;
-        }
-        else {
-            this.#fs = fsFromOption(opts.fs);
-        }
-    }
-    /**
-     * Returns the depth of the Path object from its root.
-     *
-     * For example, a path at `/foo/bar` would have a depth of 2.
-     */
-    depth() {
-        if (this.#depth !== undefined)
-            return this.#depth;
-        if (!this.parent)
-            return (this.#depth = 0);
-        return (this.#depth = this.parent.depth() + 1);
-    }
-    /**
-     * @internal
-     */
-    childrenCache() {
-        return this.#children;
-    }
-    /**
-     * Get the Path object referenced by the string path, resolved from this Path
-     */
-    resolve(path) {
-        if (!path) {
-            return this;
-        }
-        const rootPath = this.getRootString(path);
-        const dir = path.substring(rootPath.length);
-        const dirParts = dir.split(this.splitSep);
-        const result = rootPath ?
-            this.getRoot(rootPath).#resolveParts(dirParts)
-            : this.#resolveParts(dirParts);
-        return result;
-    }
-    #resolveParts(dirParts) {
-        let p = this;
-        for (const part of dirParts) {
-            p = p.child(part);
-        }
-        return p;
-    }
-    /**
-     * Returns the cached children Path objects, if still available.  If they
-     * have fallen out of the cache, then returns an empty array, and resets the
-     * READDIR_CALLED bit, so that future calls to readdir() will require an fs
-     * lookup.
-     *
-     * @internal
-     */
-    children() {
-        const cached = this.#children.get(this);
-        if (cached) {
-            return cached;
-        }
-        const children = Object.assign([], { provisional: 0 });
-        this.#children.set(this, children);
-        this.#type &= ~READDIR_CALLED;
-        return children;
-    }
-    /**
-     * Resolves a path portion and returns or creates the child Path.
-     *
-     * Returns `this` if pathPart is `''` or `'.'`, or `parent` if pathPart is
-     * `'..'`.
-     *
-     * This should not be called directly.  If `pathPart` contains any path
-     * separators, it will lead to unsafe undefined behavior.
-     *
-     * Use `Path.resolve()` instead.
-     *
-     * @internal
-     */
-    child(pathPart, opts) {
-        if (pathPart === '' || pathPart === '.') {
-            return this;
-        }
-        if (pathPart === '..') {
-            return this.parent || this;
-        }
-        // find the child
-        const children = this.children();
-        const name = this.nocase ? normalizeNocase(pathPart) : normalize(pathPart);
-        for (const p of children) {
-            if (p.#matchName === name) {
-                return p;
-            }
-        }
-        // didn't find it, create provisional child, since it might not
-        // actually exist.  If we know the parent isn't a dir, then
-        // in fact it CAN'T exist.
-        const s = this.parent ? this.sep : '';
-        const fullpath = this.#fullpath ? this.#fullpath + s + pathPart : undefined;
-        const pchild = this.newChild(pathPart, UNKNOWN, {
-            ...opts,
-            parent: this,
-            fullpath,
-        });
-        if (!this.canReaddir()) {
-            pchild.#type |= ENOENT;
-        }
-        // don't have to update provisional, because if we have real children,
-        // then provisional is set to children.length, otherwise a lower number
-        children.push(pchild);
-        return pchild;
-    }
-    /**
-     * The relative path from the cwd. If it does not share an ancestor with
-     * the cwd, then this ends up being equivalent to the fullpath()
-     */
-    relative() {
-        if (this.isCWD)
-            return '';
-        if (this.#relative !== undefined) {
-            return this.#relative;
-        }
-        const name = this.name;
-        const p = this.parent;
-        if (!p) {
-            return (this.#relative = this.name);
-        }
-        const pv = p.relative();
-        return pv + (!pv || !p.parent ? '' : this.sep) + name;
-    }
-    /**
-     * The relative path from the cwd, using / as the path separator.
-     * If it does not share an ancestor with
-     * the cwd, then this ends up being equivalent to the fullpathPosix()
-     * On posix systems, this is identical to relative().
-     */
-    relativePosix() {
-        if (this.sep === '/')
-            return this.relative();
-        if (this.isCWD)
-            return '';
-        if (this.#relativePosix !== undefined)
-            return this.#relativePosix;
-        const name = this.name;
-        const p = this.parent;
-        if (!p) {
-            return (this.#relativePosix = this.fullpathPosix());
-        }
-        const pv = p.relativePosix();
-        return pv + (!pv || !p.parent ? '' : '/') + name;
-    }
-    /**
-     * The fully resolved path string for this Path entry
-     */
-    fullpath() {
-        if (this.#fullpath !== undefined) {
-            return this.#fullpath;
-        }
-        const name = this.name;
-        const p = this.parent;
-        if (!p) {
-            return (this.#fullpath = this.name);
-        }
-        const pv = p.fullpath();
-        const fp = pv + (!p.parent ? '' : this.sep) + name;
-        return (this.#fullpath = fp);
-    }
-    /**
-     * On platforms other than windows, this is identical to fullpath.
-     *
-     * On windows, this is overridden to return the forward-slash form of the
-     * full UNC path.
-     */
-    fullpathPosix() {
-        if (this.#fullpathPosix !== undefined)
-            return this.#fullpathPosix;
-        if (this.sep === '/')
-            return (this.#fullpathPosix = this.fullpath());
-        if (!this.parent) {
-            const p = this.fullpath().replace(/\\/g, '/');
-            if (/^[a-z]:\//i.test(p)) {
-                return (this.#fullpathPosix = `//?/${p}`);
-            }
-            else {
-                return (this.#fullpathPosix = p);
-            }
-        }
-        const p = this.parent;
-        const pfpp = p.fullpathPosix();
-        const fpp = pfpp + (!pfpp || !p.parent ? '' : '/') + this.name;
-        return (this.#fullpathPosix = fpp);
-    }
-    /**
-     * Is the Path of an unknown type?
-     *
-     * Note that we might know *something* about it if there has been a previous
-     * filesystem operation, for example that it does not exist, or is not a
-     * link, or whether it has child entries.
-     */
-    isUnknown() {
-        return (this.#type & IFMT) === UNKNOWN;
-    }
-    isType(type) {
-        return this[`is${type}`]();
-    }
-    getType() {
-        return (this.isUnknown() ? 'Unknown'
-            : this.isDirectory() ? 'Directory'
-                : this.isFile() ? 'File'
-                    : this.isSymbolicLink() ? 'SymbolicLink'
-                        : this.isFIFO() ? 'FIFO'
-                            : this.isCharacterDevice() ? 'CharacterDevice'
-                                : this.isBlockDevice() ? 'BlockDevice'
-                                    : /* c8 ignore start */ this.isSocket() ? 'Socket'
-                                        : 'Unknown');
-        /* c8 ignore stop */
-    }
-    /**
-     * Is the Path a regular file?
-     */
-    isFile() {
-        return (this.#type & IFMT) === IFREG;
-    }
-    /**
-     * Is the Path a directory?
-     */
-    isDirectory() {
-        return (this.#type & IFMT) === IFDIR;
-    }
-    /**
-     * Is the path a character device?
-     */
-    isCharacterDevice() {
-        return (this.#type & IFMT) === IFCHR;
-    }
-    /**
-     * Is the path a block device?
-     */
-    isBlockDevice() {
-        return (this.#type & IFMT) === IFBLK;
-    }
-    /**
-     * Is the path a FIFO pipe?
-     */
-    isFIFO() {
-        return (this.#type & IFMT) === IFIFO;
-    }
-    /**
-     * Is the path a socket?
-     */
-    isSocket() {
-        return (this.#type & IFMT) === IFSOCK;
-    }
-    /**
-     * Is the path a symbolic link?
-     */
-    isSymbolicLink() {
-        return (this.#type & IFLNK) === IFLNK;
-    }
-    /**
-     * Return the entry if it has been subject of a successful lstat, or
-     * undefined otherwise.
-     *
-     * Does not read the filesystem, so an undefined result *could* simply
-     * mean that we haven't called lstat on it.
-     */
-    lstatCached() {
-        return this.#type & LSTAT_CALLED ? this : undefined;
-    }
-    /**
-     * Return the cached link target if the entry has been the subject of a
-     * successful readlink, or undefined otherwise.
-     *
-     * Does not read the filesystem, so an undefined result *could* just mean we
-     * don't have any cached data. Only use it if you are very sure that a
-     * readlink() has been called at some point.
-     */
-    readlinkCached() {
-        return this.#linkTarget;
-    }
-    /**
-     * Returns the cached realpath target if the entry has been the subject
-     * of a successful realpath, or undefined otherwise.
-     *
-     * Does not read the filesystem, so an undefined result *could* just mean we
-     * don't have any cached data. Only use it if you are very sure that a
-     * realpath() has been called at some point.
-     */
-    realpathCached() {
-        return this.#realpath;
-    }
-    /**
-     * Returns the cached child Path entries array if the entry has been the
-     * subject of a successful readdir(), or [] otherwise.
-     *
-     * Does not read the filesystem, so an empty array *could* just mean we
-     * don't have any cached data. Only use it if you are very sure that a
-     * readdir() has been called recently enough to still be valid.
-     */
-    readdirCached() {
-        const children = this.children();
-        return children.slice(0, children.provisional);
-    }
-    /**
-     * Return true if it's worth trying to readlink.  Ie, we don't (yet) have
-     * any indication that readlink will definitely fail.
-     *
-     * Returns false if the path is known to not be a symlink, if a previous
-     * readlink failed, or if the entry does not exist.
-     */
-    canReadlink() {
-        if (this.#linkTarget)
-            return true;
-        if (!this.parent)
-            return false;
-        // cases where it cannot possibly succeed
-        const ifmt = this.#type & IFMT;
-        return !((ifmt !== UNKNOWN && ifmt !== IFLNK) ||
-            this.#type & ENOREADLINK ||
-            this.#type & ENOENT);
-    }
-    /**
-     * Return true if readdir has previously been successfully called on this
-     * path, indicating that cachedReaddir() is likely valid.
-     */
-    calledReaddir() {
-        return !!(this.#type & READDIR_CALLED);
-    }
-    /**
-     * Returns true if the path is known to not exist. That is, a previous lstat
-     * or readdir failed to verify its existence when that would have been
-     * expected, or a parent entry was marked either enoent or enotdir.
-     */
-    isENOENT() {
-        return !!(this.#type & ENOENT);
-    }
-    /**
-     * Return true if the path is a match for the given path name.  This handles
-     * case sensitivity and unicode normalization.
-     *
-     * Note: even on case-sensitive systems, it is **not** safe to test the
-     * equality of the `.name` property to determine whether a given pathname
-     * matches, due to unicode normalization mismatches.
-     *
-     * Always use this method instead of testing the `path.name` property
-     * directly.
-     */
-    isNamed(n) {
-        return !this.nocase ?
-            this.#matchName === normalize(n)
-            : this.#matchName === normalizeNocase(n);
-    }
-    /**
-     * Return the Path object corresponding to the target of a symbolic link.
-     *
-     * If the Path is not a symbolic link, or if the readlink call fails for any
-     * reason, `undefined` is returned.
-     *
-     * Result is cached, and thus may be outdated if the filesystem is mutated.
-     */
-    async readlink() {
-        const target = this.#linkTarget;
-        if (target) {
-            return target;
-        }
-        if (!this.canReadlink()) {
-            return undefined;
-        }
-        /* c8 ignore start */
-        // already covered by the canReadlink test, here for ts grumples
-        if (!this.parent) {
-            return undefined;
-        }
-        /* c8 ignore stop */
-        try {
-            const read = await this.#fs.promises.readlink(this.fullpath());
-            const linkTarget = (await this.parent.realpath())?.resolve(read);
-            if (linkTarget) {
-                return (this.#linkTarget = linkTarget);
-            }
-        }
-        catch (er) {
-            this.#readlinkFail(er.code);
-            return undefined;
-        }
-    }
-    /**
-     * Synchronous {@link PathBase.readlink}
-     */
-    readlinkSync() {
-        const target = this.#linkTarget;
-        if (target) {
-            return target;
-        }
-        if (!this.canReadlink()) {
-            return undefined;
-        }
-        /* c8 ignore start */
-        // already covered by the canReadlink test, here for ts grumples
-        if (!this.parent) {
-            return undefined;
-        }
-        /* c8 ignore stop */
-        try {
-            const read = this.#fs.readlinkSync(this.fullpath());
-            const linkTarget = this.parent.realpathSync()?.resolve(read);
-            if (linkTarget) {
-                return (this.#linkTarget = linkTarget);
-            }
-        }
-        catch (er) {
-            this.#readlinkFail(er.code);
-            return undefined;
-        }
-    }
-    #readdirSuccess(children) {
-        // succeeded, mark readdir called bit
-        this.#type |= READDIR_CALLED;
-        // mark all remaining provisional children as ENOENT
-        for (let p = children.provisional; p < children.length; p++) {
-            const c = children[p];
-            if (c)
-                c.#markENOENT();
-        }
-    }
-    #markENOENT() {
-        // mark as UNKNOWN and ENOENT
-        if (this.#type & ENOENT)
-            return;
-        this.#type = (this.#type | ENOENT) & IFMT_UNKNOWN;
-        this.#markChildrenENOENT();
-    }
-    #markChildrenENOENT() {
-        // all children are provisional and do not exist
-        const children = this.children();
-        children.provisional = 0;
-        for (const p of children) {
-            p.#markENOENT();
-        }
-    }
-    #markENOREALPATH() {
-        this.#type |= ENOREALPATH;
-        this.#markENOTDIR();
-    }
-    // save the information when we know the entry is not a dir
-    #markENOTDIR() {
-        // entry is not a directory, so any children can't exist.
-        // this *should* be impossible, since any children created
-        // after it's been marked ENOTDIR should be marked ENOENT,
-        // so it won't even get to this point.
-        /* c8 ignore start */
-        if (this.#type & ENOTDIR)
-            return;
-        /* c8 ignore stop */
-        let t = this.#type;
-        // this could happen if we stat a dir, then delete it,
-        // then try to read it or one of its children.
-        if ((t & IFMT) === IFDIR)
-            t &= IFMT_UNKNOWN;
-        this.#type = t | ENOTDIR;
-        this.#markChildrenENOENT();
-    }
-    #readdirFail(code = '') {
-        // markENOTDIR and markENOENT also set provisional=0
-        if (code === 'ENOTDIR' || code === 'EPERM') {
-            this.#markENOTDIR();
-        }
-        else if (code === 'ENOENT') {
-            this.#markENOENT();
-        }
-        else {
-            this.children().provisional = 0;
-        }
-    }
-    #lstatFail(code = '') {
-        // Windows just raises ENOENT in this case, disable for win CI
-        /* c8 ignore start */
-        if (code === 'ENOTDIR') {
-            // already know it has a parent by this point
-            const p = this.parent;
-            p.#markENOTDIR();
-        }
-        else if (code === 'ENOENT') {
-            /* c8 ignore stop */
-            this.#markENOENT();
-        }
-    }
-    #readlinkFail(code = '') {
-        let ter = this.#type;
-        ter |= ENOREADLINK;
-        if (code === 'ENOENT')
-            ter |= ENOENT;
-        // windows gets a weird error when you try to readlink a file
-        if (code === 'EINVAL' || code === 'UNKNOWN') {
-            // exists, but not a symlink, we don't know WHAT it is, so remove
-            // all IFMT bits.
-            ter &= IFMT_UNKNOWN;
-        }
-        this.#type = ter;
-        // windows just gets ENOENT in this case.  We do cover the case,
-        // just disabled because it's impossible on Windows CI
-        /* c8 ignore start */
-        if (code === 'ENOTDIR' && this.parent) {
-            this.parent.#markENOTDIR();
-        }
-        /* c8 ignore stop */
-    }
-    #readdirAddChild(e, c) {
-        return (this.#readdirMaybePromoteChild(e, c) ||
-            this.#readdirAddNewChild(e, c));
-    }
-    #readdirAddNewChild(e, c) {
-        // alloc new entry at head, so it's never provisional
-        const type = entToType(e);
-        const child = this.newChild(e.name, type, { parent: this });
-        const ifmt = child.#type & IFMT;
-        if (ifmt !== IFDIR && ifmt !== IFLNK && ifmt !== UNKNOWN) {
-            child.#type |= ENOTDIR;
-        }
-        c.unshift(child);
-        c.provisional++;
-        return child;
-    }
-    #readdirMaybePromoteChild(e, c) {
-        for (let p = c.provisional; p < c.length; p++) {
-            const pchild = c[p];
-            const name = this.nocase ? normalizeNocase(e.name) : normalize(e.name);
-            if (name !== pchild.#matchName) {
-                continue;
-            }
-            return this.#readdirPromoteChild(e, pchild, p, c);
-        }
-    }
-    #readdirPromoteChild(e, p, index, c) {
-        const v = p.name;
-        // retain any other flags, but set ifmt from dirent
-        p.#type = (p.#type & IFMT_UNKNOWN) | entToType(e);
-        // case sensitivity fixing when we learn the true name.
-        if (v !== e.name)
-            p.name = e.name;
-        // just advance provisional index (potentially off the list),
-        // otherwise we have to splice/pop it out and re-insert at head
-        if (index !== c.provisional) {
-            if (index === c.length - 1)
-                c.pop();
-            else
-                c.splice(index, 1);
-            c.unshift(p);
-        }
-        c.provisional++;
-        return p;
-    }
-    /**
-     * Call lstat() on this Path, and update all known information that can be
-     * determined.
-     *
-     * Note that unlike `fs.lstat()`, the returned value does not contain some
-     * information, such as `mode`, `dev`, `nlink`, and `ino`.  If that
-     * information is required, you will need to call `fs.lstat` yourself.
-     *
-     * If the Path refers to a nonexistent file, or if the lstat call fails for
-     * any reason, `undefined` is returned.  Otherwise the updated Path object is
-     * returned.
-     *
-     * Results are cached, and thus may be out of date if the filesystem is
-     * mutated.
-     */
-    async lstat() {
-        if ((this.#type & ENOENT) === 0) {
-            try {
-                this.#applyStat(await this.#fs.promises.lstat(this.fullpath()));
-                return this;
-            }
-            catch (er) {
-                this.#lstatFail(er.code);
-            }
-        }
-    }
-    /**
-     * synchronous {@link PathBase.lstat}
-     */
-    lstatSync() {
-        if ((this.#type & ENOENT) === 0) {
-            try {
-                this.#applyStat(this.#fs.lstatSync(this.fullpath()));
-                return this;
-            }
-            catch (er) {
-                this.#lstatFail(er.code);
-            }
-        }
-    }
-    #applyStat(st) {
-        const { atime, atimeMs, birthtime, birthtimeMs, blksize, blocks, ctime, ctimeMs, dev, gid, ino, mode, mtime, mtimeMs, nlink, rdev, size, uid, } = st;
-        this.#atime = atime;
-        this.#atimeMs = atimeMs;
-        this.#birthtime = birthtime;
-        this.#birthtimeMs = birthtimeMs;
-        this.#blksize = blksize;
-        this.#blocks = blocks;
-        this.#ctime = ctime;
-        this.#ctimeMs = ctimeMs;
-        this.#dev = dev;
-        this.#gid = gid;
-        this.#ino = ino;
-        this.#mode = mode;
-        this.#mtime = mtime;
-        this.#mtimeMs = mtimeMs;
-        this.#nlink = nlink;
-        this.#rdev = rdev;
-        this.#size = size;
-        this.#uid = uid;
-        const ifmt = entToType(st);
-        // retain any other flags, but set the ifmt
-        this.#type = (this.#type & IFMT_UNKNOWN) | ifmt | LSTAT_CALLED;
-        if (ifmt !== UNKNOWN && ifmt !== IFDIR && ifmt !== IFLNK) {
-            this.#type |= ENOTDIR;
-        }
-    }
-    #onReaddirCB = [];
-    #readdirCBInFlight = false;
-    #callOnReaddirCB(children) {
-        this.#readdirCBInFlight = false;
-        const cbs = this.#onReaddirCB.slice();
-        this.#onReaddirCB.length = 0;
-        cbs.forEach(cb => cb(null, children));
-    }
-    /**
-     * Standard node-style callback interface to get list of directory entries.
-     *
-     * If the Path cannot or does not contain any children, then an empty array
-     * is returned.
-     *
-     * Results are cached, and thus may be out of date if the filesystem is
-     * mutated.
-     *
-     * @param cb The callback called with (er, entries).  Note that the `er`
-     * param is somewhat extraneous, as all readdir() errors are handled and
-     * simply result in an empty set of entries being returned.
-     * @param allowZalgo Boolean indicating that immediately known results should
-     * *not* be deferred with `queueMicrotask`. Defaults to `false`. Release
-     * zalgo at your peril, the dark pony lord is devious and unforgiving.
-     */
-    readdirCB(cb, allowZalgo = false) {
-        if (!this.canReaddir()) {
-            if (allowZalgo)
-                cb(null, []);
-            else
-                queueMicrotask(() => cb(null, []));
-            return;
-        }
-        const children = this.children();
-        if (this.calledReaddir()) {
-            const c = children.slice(0, children.provisional);
-            if (allowZalgo)
-                cb(null, c);
-            else
-                queueMicrotask(() => cb(null, c));
-            return;
-        }
-        // don't have to worry about zalgo at this point.
-        this.#onReaddirCB.push(cb);
-        if (this.#readdirCBInFlight) {
-            return;
-        }
-        this.#readdirCBInFlight = true;
-        // else read the directory, fill up children
-        // de-provisionalize any provisional children.
-        const fullpath = this.fullpath();
-        this.#fs.readdir(fullpath, { withFileTypes: true }, (er, entries) => {
-            if (er) {
-                this.#readdirFail(er.code);
-                children.provisional = 0;
-            }
-            else {
-                // if we didn't get an error, we always get entries.
-                //@ts-ignore
-                for (const e of entries) {
-                    this.#readdirAddChild(e, children);
-                }
-                this.#readdirSuccess(children);
-            }
-            this.#callOnReaddirCB(children.slice(0, children.provisional));
-            return;
-        });
-    }
-    #asyncReaddirInFlight;
-    /**
-     * Return an array of known child entries.
-     *
-     * If the Path cannot or does not contain any children, then an empty array
-     * is returned.
-     *
-     * Results are cached, and thus may be out of date if the filesystem is
-     * mutated.
-     */
-    async readdir() {
-        if (!this.canReaddir()) {
-            return [];
-        }
-        const children = this.children();
-        if (this.calledReaddir()) {
-            return children.slice(0, children.provisional);
-        }
-        // else read the directory, fill up children
-        // de-provisionalize any provisional children.
-        const fullpath = this.fullpath();
-        if (this.#asyncReaddirInFlight) {
-            await this.#asyncReaddirInFlight;
-        }
-        else {
-            /* c8 ignore start */
-            let resolve = () => { };
-            /* c8 ignore stop */
-            this.#asyncReaddirInFlight = new Promise(res => (resolve = res));
-            try {
-                for (const e of await this.#fs.promises.readdir(fullpath, {
-                    withFileTypes: true,
-                })) {
-                    this.#readdirAddChild(e, children);
-                }
-                this.#readdirSuccess(children);
-            }
-            catch (er) {
-                this.#readdirFail(er.code);
-                children.provisional = 0;
-            }
-            this.#asyncReaddirInFlight = undefined;
-            resolve();
-        }
-        return children.slice(0, children.provisional);
-    }
-    /**
-     * synchronous {@link PathBase.readdir}
-     */
-    readdirSync() {
-        if (!this.canReaddir()) {
-            return [];
-        }
-        const children = this.children();
-        if (this.calledReaddir()) {
-            return children.slice(0, children.provisional);
-        }
-        // else read the directory, fill up children
-        // de-provisionalize any provisional children.
-        const fullpath = this.fullpath();
-        try {
-            for (const e of this.#fs.readdirSync(fullpath, {
-                withFileTypes: true,
-            })) {
-                this.#readdirAddChild(e, children);
-            }
-            this.#readdirSuccess(children);
-        }
-        catch (er) {
-            this.#readdirFail(er.code);
-            children.provisional = 0;
-        }
-        return children.slice(0, children.provisional);
-    }
-    canReaddir() {
-        if (this.#type & ENOCHILD)
-            return false;
-        const ifmt = IFMT & this.#type;
-        // we always set ENOTDIR when setting IFMT, so should be impossible
-        /* c8 ignore start */
-        if (!(ifmt === UNKNOWN || ifmt === IFDIR || ifmt === IFLNK)) {
-            return false;
-        }
-        /* c8 ignore stop */
-        return true;
-    }
-    shouldWalk(dirs, walkFilter) {
-        return ((this.#type & IFDIR) === IFDIR &&
-            !(this.#type & ENOCHILD) &&
-            !dirs.has(this) &&
-            (!walkFilter || walkFilter(this)));
-    }
-    /**
-     * Return the Path object corresponding to path as resolved
-     * by realpath(3).
-     *
-     * If the realpath call fails for any reason, `undefined` is returned.
-     *
-     * Result is cached, and thus may be outdated if the filesystem is mutated.
-     * On success, returns a Path object.
-     */
-    async realpath() {
-        if (this.#realpath)
-            return this.#realpath;
-        if ((ENOREALPATH | ENOREADLINK | ENOENT) & this.#type)
-            return undefined;
-        try {
-            const rp = await this.#fs.promises.realpath(this.fullpath());
-            return (this.#realpath = this.resolve(rp));
-        }
-        catch (_) {
-            this.#markENOREALPATH();
-        }
-    }
-    /**
-     * Synchronous {@link realpath}
-     */
-    realpathSync() {
-        if (this.#realpath)
-            return this.#realpath;
-        if ((ENOREALPATH | ENOREADLINK | ENOENT) & this.#type)
-            return undefined;
-        try {
-            const rp = this.#fs.realpathSync(this.fullpath());
-            return (this.#realpath = this.resolve(rp));
-        }
-        catch (_) {
-            this.#markENOREALPATH();
-        }
-    }
-    /**
-     * Internal method to mark this Path object as the scurry cwd,
-     * called by {@link PathScurry#chdir}
-     *
-     * @internal
-     */
-    [setAsCwd](oldCwd) {
-        if (oldCwd === this)
-            return;
-        oldCwd.isCWD = false;
-        this.isCWD = true;
-        const changed = new Set([]);
-        let rp = [];
-        let p = this;
-        while (p && p.parent) {
-            changed.add(p);
-            p.#relative = rp.join(this.sep);
-            p.#relativePosix = rp.join('/');
-            p = p.parent;
-            rp.push('..');
-        }
-        // now un-memoize parents of old cwd
-        p = oldCwd;
-        while (p && p.parent && !changed.has(p)) {
-            p.#relative = undefined;
-            p.#relativePosix = undefined;
-            p = p.parent;
-        }
-    }
-}
-/**
- * Path class used on win32 systems
- *
- * Uses `'\\'` as the path separator for returned paths, either `'\\'` or `'/'`
- * as the path separator for parsing paths.
- */
-export class PathWin32 extends PathBase {
-    /**
-     * Separator for generating path strings.
-     */
-    sep = '\\';
-    /**
-     * Separator for parsing path strings.
-     */
-    splitSep = eitherSep;
-    /**
-     * Do not create new Path objects directly.  They should always be accessed
-     * via the PathScurry class or other methods on the Path class.
-     *
-     * @internal
-     */
-    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
-        super(name, type, root, roots, nocase, children, opts);
-    }
-    /**
-     * @internal
-     */
-    newChild(name, type = UNKNOWN, opts = {}) {
-        return new PathWin32(name, type, this.root, this.roots, this.nocase, this.childrenCache(), opts);
-    }
-    /**
-     * @internal
-     */
-    getRootString(path) {
-        return win32.parse(path).root;
-    }
-    /**
-     * @internal
-     */
-    getRoot(rootPath) {
-        rootPath = uncToDrive(rootPath.toUpperCase());
-        if (rootPath === this.root.name) {
-            return this.root;
-        }
-        // ok, not that one, check if it matches another we know about
-        for (const [compare, root] of Object.entries(this.roots)) {
-            if (this.sameRoot(rootPath, compare)) {
-                return (this.roots[rootPath] = root);
-            }
-        }
-        // otherwise, have to create a new one.
-        return (this.roots[rootPath] = new PathScurryWin32(rootPath, this).root);
-    }
-    /**
-     * @internal
-     */
-    sameRoot(rootPath, compare = this.root.name) {
-        // windows can (rarely) have case-sensitive filesystem, but
-        // UNC and drive letters are always case-insensitive, and canonically
-        // represented uppercase.
-        rootPath = rootPath
-            .toUpperCase()
-            .replace(/\//g, '\\')
-            .replace(uncDriveRegexp, '$1\\');
-        return rootPath === compare;
-    }
-}
-/**
- * Path class used on all posix systems.
- *
- * Uses `'/'` as the path separator.
- */
-export class PathPosix extends PathBase {
-    /**
-     * separator for parsing path strings
-     */
-    splitSep = '/';
-    /**
-     * separator for generating path strings
-     */
-    sep = '/';
-    /**
-     * Do not create new Path objects directly.  They should always be accessed
-     * via the PathScurry class or other methods on the Path class.
-     *
-     * @internal
-     */
-    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
-        super(name, type, root, roots, nocase, children, opts);
-    }
-    /**
-     * @internal
-     */
-    getRootString(path) {
-        return path.startsWith('/') ? '/' : '';
-    }
-    /**
-     * @internal
-     */
-    getRoot(_rootPath) {
-        return this.root;
-    }
-    /**
-     * @internal
-     */
-    newChild(name, type = UNKNOWN, opts = {}) {
-        return new PathPosix(name, type, this.root, this.roots, this.nocase, this.childrenCache(), opts);
-    }
-}
-/**
- * The base class for all PathScurry classes, providing the interface for path
- * resolution and filesystem operations.
- *
- * Typically, you should *not* instantiate this class directly, but rather one
- * of the platform-specific classes, or the exported {@link PathScurry} which
- * defaults to the current platform.
- */
-export class PathScurryBase {
-    /**
-     * The root Path entry for the current working directory of this Scurry
-     */
-    root;
-    /**
-     * The string path for the root of this Scurry's current working directory
-     */
-    rootPath;
-    /**
-     * A collection of all roots encountered, referenced by rootPath
-     */
-    roots;
-    /**
-     * The Path entry corresponding to this PathScurry's current working directory.
-     */
-    cwd;
-    #resolveCache;
-    #resolvePosixCache;
-    #children;
-    /**
-     * Perform path comparisons case-insensitively.
-     *
-     * Defaults true on Darwin and Windows systems, false elsewhere.
-     */
-    nocase;
-    #fs;
-    /**
-     * This class should not be instantiated directly.
-     *
-     * Use PathScurryWin32, PathScurryDarwin, PathScurryPosix, or PathScurry
-     *
-     * @internal
-     */
-    constructor(cwd = process.cwd(), pathImpl, sep, { nocase, childrenCacheSize = 16 * 1024, fs = defaultFS, } = {}) {
-        this.#fs = fsFromOption(fs);
-        if (cwd instanceof URL || cwd.startsWith('file://')) {
-            cwd = fileURLToPath(cwd);
-        }
-        // resolve and split root, and then add to the store.
-        // this is the only time we call path.resolve()
-        const cwdPath = pathImpl.resolve(cwd);
-        this.roots = Object.create(null);
-        this.rootPath = this.parseRootPath(cwdPath);
-        this.#resolveCache = new ResolveCache();
-        this.#resolvePosixCache = new ResolveCache();
-        this.#children = new ChildrenCache(childrenCacheSize);
-        const split = cwdPath.substring(this.rootPath.length).split(sep);
-        // resolve('/') leaves '', splits to [''], we don't want that.
-        if (split.length === 1 && !split[0]) {
-            split.pop();
-        }
-        /* c8 ignore start */
-        if (nocase === undefined) {
-            throw new TypeError('must provide nocase setting to PathScurryBase ctor');
-        }
-        /* c8 ignore stop */
-        this.nocase = nocase;
-        this.root = this.newRoot(this.#fs);
-        this.roots[this.rootPath] = this.root;
-        let prev = this.root;
-        let len = split.length - 1;
-        const joinSep = pathImpl.sep;
-        let abs = this.rootPath;
-        let sawFirst = false;
-        for (const part of split) {
-            const l = len--;
-            prev = prev.child(part, {
-                relative: new Array(l).fill('..').join(joinSep),
-                relativePosix: new Array(l).fill('..').join('/'),
-                fullpath: (abs += (sawFirst ? '' : joinSep) + part),
-            });
-            sawFirst = true;
-        }
-        this.cwd = prev;
-    }
-    /**
-     * Get the depth of a provided path, string, or the cwd
-     */
-    depth(path = this.cwd) {
-        if (typeof path === 'string') {
-            path = this.cwd.resolve(path);
-        }
-        return path.depth();
-    }
-    /**
-     * Return the cache of child entries.  Exposed so subclasses can create
-     * child Path objects in a platform-specific way.
-     *
-     * @internal
-     */
-    childrenCache() {
-        return this.#children;
-    }
-    /**
-     * Resolve one or more path strings to a resolved string
-     *
-     * Same interface as require('path').resolve.
-     *
-     * Much faster than path.resolve() when called multiple times for the same
-     * path, because the resolved Path objects are cached.  Much slower
-     * otherwise.
-     */
-    resolve(...paths) {
-        // first figure out the minimum number of paths we have to test
-        // we always start at cwd, but any absolutes will bump the start
-        let r = '';
-        for (let i = paths.length - 1; i >= 0; i--) {
-            const p = paths[i];
-            if (!p || p === '.')
-                continue;
-            r = r ? `${p}/${r}` : p;
-            if (this.isAbsolute(p)) {
-                break;
-            }
-        }
-        const cached = this.#resolveCache.get(r);
-        if (cached !== undefined) {
-            return cached;
-        }
-        const result = this.cwd.resolve(r).fullpath();
-        this.#resolveCache.set(r, result);
-        return result;
-    }
-    /**
-     * Resolve one or more path strings to a resolved string, returning
-     * the posix path.  Identical to .resolve() on posix systems, but on
-     * windows will return a forward-slash separated UNC path.
-     *
-     * Same interface as require('path').resolve.
-     *
-     * Much faster than path.resolve() when called multiple times for the same
-     * path, because the resolved Path objects are cached.  Much slower
-     * otherwise.
-     */
-    resolvePosix(...paths) {
-        // first figure out the minimum number of paths we have to test
-        // we always start at cwd, but any absolutes will bump the start
-        let r = '';
-        for (let i = paths.length - 1; i >= 0; i--) {
-            const p = paths[i];
-            if (!p || p === '.')
-                continue;
-            r = r ? `${p}/${r}` : p;
-            if (this.isAbsolute(p)) {
-                break;
-            }
-        }
-        const cached = this.#resolvePosixCache.get(r);
-        if (cached !== undefined) {
-            return cached;
-        }
-        const result = this.cwd.resolve(r).fullpathPosix();
-        this.#resolvePosixCache.set(r, result);
-        return result;
-    }
-    /**
-     * find the relative path from the cwd to the supplied path string or entry
-     */
-    relative(entry = this.cwd) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        return entry.relative();
-    }
-    /**
-     * find the relative path from the cwd to the supplied path string or
-     * entry, using / as the path delimiter, even on Windows.
-     */
-    relativePosix(entry = this.cwd) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        return entry.relativePosix();
-    }
-    /**
-     * Return the basename for the provided string or Path object
-     */
-    basename(entry = this.cwd) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        return entry.name;
-    }
-    /**
-     * Return the dirname for the provided string or Path object
-     */
-    dirname(entry = this.cwd) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        return (entry.parent || entry).fullpath();
-    }
-    async readdir(entry = this.cwd, opts = {
-        withFileTypes: true,
-    }) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes } = opts;
-        if (!entry.canReaddir()) {
-            return [];
-        }
-        else {
-            const p = await entry.readdir();
-            return withFileTypes ? p : p.map(e => e.name);
-        }
-    }
-    readdirSync(entry = this.cwd, opts = {
-        withFileTypes: true,
-    }) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes = true } = opts;
-        if (!entry.canReaddir()) {
-            return [];
-        }
-        else if (withFileTypes) {
-            return entry.readdirSync();
-        }
-        else {
-            return entry.readdirSync().map(e => e.name);
-        }
-    }
-    /**
-     * Call lstat() on the string or Path object, and update all known
-     * information that can be determined.
-     *
-     * Note that unlike `fs.lstat()`, the returned value does not contain some
-     * information, such as `mode`, `dev`, `nlink`, and `ino`.  If that
-     * information is required, you will need to call `fs.lstat` yourself.
-     *
-     * If the Path refers to a nonexistent file, or if the lstat call fails for
-     * any reason, `undefined` is returned.  Otherwise the updated Path object is
-     * returned.
-     *
-     * Results are cached, and thus may be out of date if the filesystem is
-     * mutated.
-     */
-    async lstat(entry = this.cwd) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        return entry.lstat();
-    }
-    /**
-     * synchronous {@link PathScurryBase.lstat}
-     */
-    lstatSync(entry = this.cwd) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        return entry.lstatSync();
-    }
-    async readlink(entry = this.cwd, { withFileTypes } = {
-        withFileTypes: false,
-    }) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            withFileTypes = entry.withFileTypes;
-            entry = this.cwd;
-        }
-        const e = await entry.readlink();
-        return withFileTypes ? e : e?.fullpath();
-    }
-    readlinkSync(entry = this.cwd, { withFileTypes } = {
-        withFileTypes: false,
-    }) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            withFileTypes = entry.withFileTypes;
-            entry = this.cwd;
-        }
-        const e = entry.readlinkSync();
-        return withFileTypes ? e : e?.fullpath();
-    }
-    async realpath(entry = this.cwd, { withFileTypes } = {
-        withFileTypes: false,
-    }) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            withFileTypes = entry.withFileTypes;
-            entry = this.cwd;
-        }
-        const e = await entry.realpath();
-        return withFileTypes ? e : e?.fullpath();
-    }
-    realpathSync(entry = this.cwd, { withFileTypes } = {
-        withFileTypes: false,
-    }) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            withFileTypes = entry.withFileTypes;
-            entry = this.cwd;
-        }
-        const e = entry.realpathSync();
-        return withFileTypes ? e : e?.fullpath();
-    }
-    async walk(entry = this.cwd, opts = {}) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
-        const results = [];
-        if (!filter || filter(entry)) {
-            results.push(withFileTypes ? entry : entry.fullpath());
-        }
-        const dirs = new Set();
-        const walk = (dir, cb) => {
-            dirs.add(dir);
-            dir.readdirCB((er, entries) => {
-                /* c8 ignore start */
-                if (er) {
-                    return cb(er);
-                }
-                /* c8 ignore stop */
-                let len = entries.length;
-                if (!len)
-                    return cb();
-                const next = () => {
-                    if (--len === 0) {
-                        cb();
-                    }
-                };
-                for (const e of entries) {
-                    if (!filter || filter(e)) {
-                        results.push(withFileTypes ? e : e.fullpath());
-                    }
-                    if (follow && e.isSymbolicLink()) {
-                        e.realpath()
-                            .then(r => (r?.isUnknown() ? r.lstat() : r))
-                            .then(r => r?.shouldWalk(dirs, walkFilter) ? walk(r, next) : next());
-                    }
-                    else {
-                        if (e.shouldWalk(dirs, walkFilter)) {
-                            walk(e, next);
-                        }
-                        else {
-                            next();
-                        }
-                    }
-                }
-            }, true); // zalgooooooo
-        };
-        const start = entry;
-        return new Promise((res, rej) => {
-            walk(start, er => {
-                /* c8 ignore start */
-                if (er)
-                    return rej(er);
-                /* c8 ignore stop */
-                res(results);
-            });
-        });
-    }
-    walkSync(entry = this.cwd, opts = {}) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
-        const results = [];
-        if (!filter || filter(entry)) {
-            results.push(withFileTypes ? entry : entry.fullpath());
-        }
-        const dirs = new Set([entry]);
-        for (const dir of dirs) {
-            const entries = dir.readdirSync();
-            for (const e of entries) {
-                if (!filter || filter(e)) {
-                    results.push(withFileTypes ? e : e.fullpath());
-                }
-                let r = e;
-                if (e.isSymbolicLink()) {
-                    if (!(follow && (r = e.realpathSync())))
-                        continue;
-                    if (r.isUnknown())
-                        r.lstatSync();
-                }
-                if (r.shouldWalk(dirs, walkFilter)) {
-                    dirs.add(r);
-                }
-            }
-        }
-        return results;
-    }
-    /**
-     * Support for `for await`
-     *
-     * Alias for {@link PathScurryBase.iterate}
-     *
-     * Note: As of Node 19, this is very slow, compared to other methods of
-     * walking.  Consider using {@link PathScurryBase.stream} if memory overhead
-     * and backpressure are concerns, or {@link PathScurryBase.walk} if not.
-     */
-    [Symbol.asyncIterator]() {
-        return this.iterate();
-    }
-    iterate(entry = this.cwd, options = {}) {
-        // iterating async over the stream is significantly more performant,
-        // especially in the warm-cache scenario, because it buffers up directory
-        // entries in the background instead of waiting for a yield for each one.
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            options = entry;
-            entry = this.cwd;
-        }
-        return this.stream(entry, options)[Symbol.asyncIterator]();
-    }
-    /**
-     * Iterating over a PathScurry performs a synchronous walk.
-     *
-     * Alias for {@link PathScurryBase.iterateSync}
-     */
-    [Symbol.iterator]() {
-        return this.iterateSync();
-    }
-    *iterateSync(entry = this.cwd, opts = {}) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
-        if (!filter || filter(entry)) {
-            yield withFileTypes ? entry : entry.fullpath();
-        }
-        const dirs = new Set([entry]);
-        for (const dir of dirs) {
-            const entries = dir.readdirSync();
-            for (const e of entries) {
-                if (!filter || filter(e)) {
-                    yield withFileTypes ? e : e.fullpath();
-                }
-                let r = e;
-                if (e.isSymbolicLink()) {
-                    if (!(follow && (r = e.realpathSync())))
-                        continue;
-                    if (r.isUnknown())
-                        r.lstatSync();
-                }
-                if (r.shouldWalk(dirs, walkFilter)) {
-                    dirs.add(r);
-                }
-            }
-        }
-    }
-    stream(entry = this.cwd, opts = {}) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
-        const results = new Minipass({ objectMode: true });
-        if (!filter || filter(entry)) {
-            results.write(withFileTypes ? entry : entry.fullpath());
-        }
-        const dirs = new Set();
-        const queue = [entry];
-        let processing = 0;
-        const process = () => {
-            let paused = false;
-            while (!paused) {
-                const dir = queue.shift();
-                if (!dir) {
-                    if (processing === 0)
-                        results.end();
-                    return;
-                }
-                processing++;
-                dirs.add(dir);
-                const onReaddir = (er, entries, didRealpaths = false) => {
-                    /* c8 ignore start */
-                    if (er)
-                        return results.emit('error', er);
-                    /* c8 ignore stop */
-                    if (follow && !didRealpaths) {
-                        const promises = [];
-                        for (const e of entries) {
-                            if (e.isSymbolicLink()) {
-                                promises.push(e
-                                    .realpath()
-                                    .then((r) => r?.isUnknown() ? r.lstat() : r));
-                            }
-                        }
-                        if (promises.length) {
-                            Promise.all(promises).then(() => onReaddir(null, entries, true));
-                            return;
-                        }
-                    }
-                    for (const e of entries) {
-                        if (e && (!filter || filter(e))) {
-                            if (!results.write(withFileTypes ? e : e.fullpath())) {
-                                paused = true;
-                            }
-                        }
-                    }
-                    processing--;
-                    for (const e of entries) {
-                        const r = e.realpathCached() || e;
-                        if (r.shouldWalk(dirs, walkFilter)) {
-                            queue.push(r);
-                        }
-                    }
-                    if (paused && !results.flowing) {
-                        results.once('drain', process);
-                    }
-                    else if (!sync) {
-                        process();
-                    }
-                };
-                // zalgo containment
-                let sync = true;
-                dir.readdirCB(onReaddir, true);
-                sync = false;
-            }
-        };
-        process();
-        return results;
-    }
-    streamSync(entry = this.cwd, opts = {}) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
-        const results = new Minipass({ objectMode: true });
-        const dirs = new Set();
-        if (!filter || filter(entry)) {
-            results.write(withFileTypes ? entry : entry.fullpath());
-        }
-        const queue = [entry];
-        let processing = 0;
-        const process = () => {
-            let paused = false;
-            while (!paused) {
-                const dir = queue.shift();
-                if (!dir) {
-                    if (processing === 0)
-                        results.end();
-                    return;
-                }
-                processing++;
-                dirs.add(dir);
-                const entries = dir.readdirSync();
-                for (const e of entries) {
-                    if (!filter || filter(e)) {
-                        if (!results.write(withFileTypes ? e : e.fullpath())) {
-                            paused = true;
-                        }
-                    }
-                }
-                processing--;
-                for (const e of entries) {
-                    let r = e;
-                    if (e.isSymbolicLink()) {
-                        if (!(follow && (r = e.realpathSync())))
-                            continue;
-                        if (r.isUnknown())
-                            r.lstatSync();
-                    }
-                    if (r.shouldWalk(dirs, walkFilter)) {
-                        queue.push(r);
-                    }
-                }
-            }
-            if (paused && !results.flowing)
-                results.once('drain', process);
-        };
-        process();
-        return results;
-    }
-    chdir(path = this.cwd) {
-        const oldCwd = this.cwd;
-        this.cwd = typeof path === 'string' ? this.cwd.resolve(path) : path;
-        this.cwd[setAsCwd](oldCwd);
-    }
-}
-/**
- * Windows implementation of {@link PathScurryBase}
- *
- * Defaults to case insensitve, uses `'\\'` to generate path strings.  Uses
- * {@link PathWin32} for Path objects.
- */
-export class PathScurryWin32 extends PathScurryBase {
-    /**
-     * separator for generating path strings
-     */
-    sep = '\\';
-    constructor(cwd = process.cwd(), opts = {}) {
-        const { nocase = true } = opts;
-        super(cwd, win32, '\\', { ...opts, nocase });
-        this.nocase = nocase;
-        for (let p = this.cwd; p; p = p.parent) {
-            p.nocase = this.nocase;
-        }
-    }
-    /**
-     * @internal
-     */
-    parseRootPath(dir) {
-        // if the path starts with a single separator, it's not a UNC, and we'll
-        // just get separator as the root, and driveFromUNC will return \
-        // In that case, mount \ on the root from the cwd.
-        return win32.parse(dir).root.toUpperCase();
-    }
-    /**
-     * @internal
-     */
-    newRoot(fs) {
-        return new PathWin32(this.rootPath, IFDIR, undefined, this.roots, this.nocase, this.childrenCache(), { fs });
-    }
-    /**
-     * Return true if the provided path string is an absolute path
-     */
-    isAbsolute(p) {
-        return (p.startsWith('/') || p.startsWith('\\') || /^[a-z]:(\/|\\)/i.test(p));
-    }
-}
-/**
- * {@link PathScurryBase} implementation for all posix systems other than Darwin.
- *
- * Defaults to case-sensitive matching, uses `'/'` to generate path strings.
- *
- * Uses {@link PathPosix} for Path objects.
- */
-export class PathScurryPosix extends PathScurryBase {
-    /**
-     * separator for generating path strings
-     */
-    sep = '/';
-    constructor(cwd = process.cwd(), opts = {}) {
-        const { nocase = false } = opts;
-        super(cwd, posix, '/', { ...opts, nocase });
-        this.nocase = nocase;
-    }
-    /**
-     * @internal
-     */
-    parseRootPath(_dir) {
-        return '/';
-    }
-    /**
-     * @internal
-     */
-    newRoot(fs) {
-        return new PathPosix(this.rootPath, IFDIR, undefined, this.roots, this.nocase, this.childrenCache(), { fs });
-    }
-    /**
-     * Return true if the provided path string is an absolute path
-     */
-    isAbsolute(p) {
-        return p.startsWith('/');
-    }
-}
-/**
- * {@link PathScurryBase} implementation for Darwin (macOS) systems.
- *
- * Defaults to case-insensitive matching, uses `'/'` for generating path
- * strings.
- *
- * Uses {@link PathPosix} for Path objects.
- */
-export class PathScurryDarwin extends PathScurryPosix {
-    constructor(cwd = process.cwd(), opts = {}) {
-        const { nocase = true } = opts;
-        super(cwd, { ...opts, nocase });
-    }
-}
-/**
- * Default {@link PathBase} implementation for the current platform.
- *
- * {@link PathWin32} on Windows systems, {@link PathPosix} on all others.
- */
-export const Path = process.platform === 'win32' ? PathWin32 : PathPosix;
-/**
- * Default {@link PathScurryBase} implementation for the current platform.
- *
- * {@link PathScurryWin32} on Windows systems, {@link PathScurryDarwin} on
- * Darwin (macOS) systems, {@link PathScurryPosix} on all others.
- */
-export const PathScurry = process.platform === 'win32' ? PathScurryWin32
-    : process.platform === 'darwin' ? PathScurryDarwin
-        : PathScurryPosix;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/path-scurry/package.json b/node_modules/@npmcli/map-workspaces/node_modules/path-scurry/package.json
deleted file mode 100644
index c3cb39dced545..0000000000000
--- a/node_modules/@npmcli/map-workspaces/node_modules/path-scurry/package.json
+++ /dev/null
@@ -1,88 +0,0 @@
-{
-  "name": "path-scurry",
-  "version": "2.0.0",
-  "description": "walk paths fast and efficiently",
-  "author": "Isaac Z. Schlueter  (https://blog.izs.me)",
-  "main": "./dist/commonjs/index.js",
-  "type": "module",
-  "exports": {
-    "./package.json": "./package.json",
-    ".": {
-      "import": {
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.js"
-      },
-      "require": {
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.js"
-      }
-    }
-  },
-  "files": [
-    "dist"
-  ],
-  "license": "BlueOak-1.0.0",
-  "scripts": {
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "prepare": "tshy",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "test": "tap",
-    "snap": "tap",
-    "format": "prettier --write . --log-level warn",
-    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts",
-    "bench": "bash ./scripts/bench.sh"
-  },
-  "prettier": {
-    "experimentalTernaries": true,
-    "semi": false,
-    "printWidth": 75,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "devDependencies": {
-    "@nodelib/fs.walk": "^2.0.0",
-    "@types/node": "^20.14.10",
-    "mkdirp": "^3.0.0",
-    "prettier": "^3.3.2",
-    "rimraf": "^5.0.8",
-    "tap": "^20.0.3",
-    "ts-node": "^10.9.2",
-    "tshy": "^2.0.1",
-    "typedoc": "^0.26.3",
-    "typescript": "^5.5.3"
-  },
-  "tap": {
-    "typecheck": true
-  },
-  "engines": {
-    "node": "20 || >=22"
-  },
-  "funding": {
-    "url": "https://github.com/sponsors/isaacs"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/isaacs/path-scurry"
-  },
-  "dependencies": {
-    "lru-cache": "^11.0.0",
-    "minipass": "^7.1.2"
-  },
-  "tshy": {
-    "selfLink": false,
-    "exports": {
-      "./package.json": "./package.json",
-      ".": "./src/index.ts"
-    }
-  },
-  "types": "./dist/commonjs/index.d.ts",
-  "module": "./dist/esm/index.js"
-}
diff --git a/node_modules/@npmcli/package-json/node_modules/glob/LICENSE b/node_modules/@npmcli/package-json/node_modules/glob/LICENSE
deleted file mode 100644
index ec7df93329abf..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/glob/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) 2009-2023 Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/glob.js b/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/glob.js
deleted file mode 100644
index e1339bbbcf57f..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/glob.js
+++ /dev/null
@@ -1,247 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Glob = void 0;
-const minimatch_1 = require("minimatch");
-const node_url_1 = require("node:url");
-const path_scurry_1 = require("path-scurry");
-const pattern_js_1 = require("./pattern.js");
-const walker_js_1 = require("./walker.js");
-// if no process global, just call it linux.
-// so we default to case-sensitive, / separators
-const defaultPlatform = (typeof process === 'object' &&
-    process &&
-    typeof process.platform === 'string') ?
-    process.platform
-    : 'linux';
-/**
- * An object that can perform glob pattern traversals.
- */
-class Glob {
-    absolute;
-    cwd;
-    root;
-    dot;
-    dotRelative;
-    follow;
-    ignore;
-    magicalBraces;
-    mark;
-    matchBase;
-    maxDepth;
-    nobrace;
-    nocase;
-    nodir;
-    noext;
-    noglobstar;
-    pattern;
-    platform;
-    realpath;
-    scurry;
-    stat;
-    signal;
-    windowsPathsNoEscape;
-    withFileTypes;
-    includeChildMatches;
-    /**
-     * The options provided to the constructor.
-     */
-    opts;
-    /**
-     * An array of parsed immutable {@link Pattern} objects.
-     */
-    patterns;
-    /**
-     * All options are stored as properties on the `Glob` object.
-     *
-     * See {@link GlobOptions} for full options descriptions.
-     *
-     * Note that a previous `Glob` object can be passed as the
-     * `GlobOptions` to another `Glob` instantiation to re-use settings
-     * and caches with a new pattern.
-     *
-     * Traversal functions can be called multiple times to run the walk
-     * again.
-     */
-    constructor(pattern, opts) {
-        /* c8 ignore start */
-        if (!opts)
-            throw new TypeError('glob options required');
-        /* c8 ignore stop */
-        this.withFileTypes = !!opts.withFileTypes;
-        this.signal = opts.signal;
-        this.follow = !!opts.follow;
-        this.dot = !!opts.dot;
-        this.dotRelative = !!opts.dotRelative;
-        this.nodir = !!opts.nodir;
-        this.mark = !!opts.mark;
-        if (!opts.cwd) {
-            this.cwd = '';
-        }
-        else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) {
-            opts.cwd = (0, node_url_1.fileURLToPath)(opts.cwd);
-        }
-        this.cwd = opts.cwd || '';
-        this.root = opts.root;
-        this.magicalBraces = !!opts.magicalBraces;
-        this.nobrace = !!opts.nobrace;
-        this.noext = !!opts.noext;
-        this.realpath = !!opts.realpath;
-        this.absolute = opts.absolute;
-        this.includeChildMatches = opts.includeChildMatches !== false;
-        this.noglobstar = !!opts.noglobstar;
-        this.matchBase = !!opts.matchBase;
-        this.maxDepth =
-            typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity;
-        this.stat = !!opts.stat;
-        this.ignore = opts.ignore;
-        if (this.withFileTypes && this.absolute !== undefined) {
-            throw new Error('cannot set absolute and withFileTypes:true');
-        }
-        if (typeof pattern === 'string') {
-            pattern = [pattern];
-        }
-        this.windowsPathsNoEscape =
-            !!opts.windowsPathsNoEscape ||
-                opts.allowWindowsEscape ===
-                    false;
-        if (this.windowsPathsNoEscape) {
-            pattern = pattern.map(p => p.replace(/\\/g, '/'));
-        }
-        if (this.matchBase) {
-            if (opts.noglobstar) {
-                throw new TypeError('base matching requires globstar');
-            }
-            pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`));
-        }
-        this.pattern = pattern;
-        this.platform = opts.platform || defaultPlatform;
-        this.opts = { ...opts, platform: this.platform };
-        if (opts.scurry) {
-            this.scurry = opts.scurry;
-            if (opts.nocase !== undefined &&
-                opts.nocase !== opts.scurry.nocase) {
-                throw new Error('nocase option contradicts provided scurry option');
-            }
-        }
-        else {
-            const Scurry = opts.platform === 'win32' ? path_scurry_1.PathScurryWin32
-                : opts.platform === 'darwin' ? path_scurry_1.PathScurryDarwin
-                    : opts.platform ? path_scurry_1.PathScurryPosix
-                        : path_scurry_1.PathScurry;
-            this.scurry = new Scurry(this.cwd, {
-                nocase: opts.nocase,
-                fs: opts.fs,
-            });
-        }
-        this.nocase = this.scurry.nocase;
-        // If you do nocase:true on a case-sensitive file system, then
-        // we need to use regexps instead of strings for non-magic
-        // path portions, because statting `aBc` won't return results
-        // for the file `AbC` for example.
-        const nocaseMagicOnly = this.platform === 'darwin' || this.platform === 'win32';
-        const mmo = {
-            // default nocase based on platform
-            ...opts,
-            dot: this.dot,
-            matchBase: this.matchBase,
-            nobrace: this.nobrace,
-            nocase: this.nocase,
-            nocaseMagicOnly,
-            nocomment: true,
-            noext: this.noext,
-            nonegate: true,
-            optimizationLevel: 2,
-            platform: this.platform,
-            windowsPathsNoEscape: this.windowsPathsNoEscape,
-            debug: !!this.opts.debug,
-        };
-        const mms = this.pattern.map(p => new minimatch_1.Minimatch(p, mmo));
-        const [matchSet, globParts] = mms.reduce((set, m) => {
-            set[0].push(...m.set);
-            set[1].push(...m.globParts);
-            return set;
-        }, [[], []]);
-        this.patterns = matchSet.map((set, i) => {
-            const g = globParts[i];
-            /* c8 ignore start */
-            if (!g)
-                throw new Error('invalid pattern object');
-            /* c8 ignore stop */
-            return new pattern_js_1.Pattern(set, g, 0, this.platform);
-        });
-    }
-    async walk() {
-        // Walkers always return array of Path objects, so we just have to
-        // coerce them into the right shape.  It will have already called
-        // realpath() if the option was set to do so, so we know that's cached.
-        // start out knowing the cwd, at least
-        return [
-            ...(await new walker_js_1.GlobWalker(this.patterns, this.scurry.cwd, {
-                ...this.opts,
-                maxDepth: this.maxDepth !== Infinity ?
-                    this.maxDepth + this.scurry.cwd.depth()
-                    : Infinity,
-                platform: this.platform,
-                nocase: this.nocase,
-                includeChildMatches: this.includeChildMatches,
-            }).walk()),
-        ];
-    }
-    walkSync() {
-        return [
-            ...new walker_js_1.GlobWalker(this.patterns, this.scurry.cwd, {
-                ...this.opts,
-                maxDepth: this.maxDepth !== Infinity ?
-                    this.maxDepth + this.scurry.cwd.depth()
-                    : Infinity,
-                platform: this.platform,
-                nocase: this.nocase,
-                includeChildMatches: this.includeChildMatches,
-            }).walkSync(),
-        ];
-    }
-    stream() {
-        return new walker_js_1.GlobStream(this.patterns, this.scurry.cwd, {
-            ...this.opts,
-            maxDepth: this.maxDepth !== Infinity ?
-                this.maxDepth + this.scurry.cwd.depth()
-                : Infinity,
-            platform: this.platform,
-            nocase: this.nocase,
-            includeChildMatches: this.includeChildMatches,
-        }).stream();
-    }
-    streamSync() {
-        return new walker_js_1.GlobStream(this.patterns, this.scurry.cwd, {
-            ...this.opts,
-            maxDepth: this.maxDepth !== Infinity ?
-                this.maxDepth + this.scurry.cwd.depth()
-                : Infinity,
-            platform: this.platform,
-            nocase: this.nocase,
-            includeChildMatches: this.includeChildMatches,
-        }).streamSync();
-    }
-    /**
-     * Default sync iteration function. Returns a Generator that
-     * iterates over the results.
-     */
-    iterateSync() {
-        return this.streamSync()[Symbol.iterator]();
-    }
-    [Symbol.iterator]() {
-        return this.iterateSync();
-    }
-    /**
-     * Default async iteration function. Returns an AsyncGenerator that
-     * iterates over the results.
-     */
-    iterate() {
-        return this.stream()[Symbol.asyncIterator]();
-    }
-    [Symbol.asyncIterator]() {
-        return this.iterate();
-    }
-}
-exports.Glob = Glob;
-//# sourceMappingURL=glob.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/has-magic.js b/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/has-magic.js
deleted file mode 100644
index 0918bd57e0f1c..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/has-magic.js
+++ /dev/null
@@ -1,27 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.hasMagic = void 0;
-const minimatch_1 = require("minimatch");
-/**
- * Return true if the patterns provided contain any magic glob characters,
- * given the options provided.
- *
- * Brace expansion is not considered "magic" unless the `magicalBraces` option
- * is set, as brace expansion just turns one string into an array of strings.
- * So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and
- * `'xby'` both do not contain any magic glob characters, and it's treated the
- * same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true`
- * is in the options, brace expansion _is_ treated as a pattern having magic.
- */
-const hasMagic = (pattern, options = {}) => {
-    if (!Array.isArray(pattern)) {
-        pattern = [pattern];
-    }
-    for (const p of pattern) {
-        if (new minimatch_1.Minimatch(p, options).hasMagic())
-            return true;
-    }
-    return false;
-};
-exports.hasMagic = hasMagic;
-//# sourceMappingURL=has-magic.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/ignore.js b/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/ignore.js
deleted file mode 100644
index 5f1fde0680dea..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/ignore.js
+++ /dev/null
@@ -1,119 +0,0 @@
-"use strict";
-// give it a pattern, and it'll be able to tell you if
-// a given path should be ignored.
-// Ignoring a path ignores its children if the pattern ends in /**
-// Ignores are always parsed in dot:true mode
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Ignore = void 0;
-const minimatch_1 = require("minimatch");
-const pattern_js_1 = require("./pattern.js");
-const defaultPlatform = (typeof process === 'object' &&
-    process &&
-    typeof process.platform === 'string') ?
-    process.platform
-    : 'linux';
-/**
- * Class used to process ignored patterns
- */
-class Ignore {
-    relative;
-    relativeChildren;
-    absolute;
-    absoluteChildren;
-    platform;
-    mmopts;
-    constructor(ignored, { nobrace, nocase, noext, noglobstar, platform = defaultPlatform, }) {
-        this.relative = [];
-        this.absolute = [];
-        this.relativeChildren = [];
-        this.absoluteChildren = [];
-        this.platform = platform;
-        this.mmopts = {
-            dot: true,
-            nobrace,
-            nocase,
-            noext,
-            noglobstar,
-            optimizationLevel: 2,
-            platform,
-            nocomment: true,
-            nonegate: true,
-        };
-        for (const ign of ignored)
-            this.add(ign);
-    }
-    add(ign) {
-        // this is a little weird, but it gives us a clean set of optimized
-        // minimatch matchers, without getting tripped up if one of them
-        // ends in /** inside a brace section, and it's only inefficient at
-        // the start of the walk, not along it.
-        // It'd be nice if the Pattern class just had a .test() method, but
-        // handling globstars is a bit of a pita, and that code already lives
-        // in minimatch anyway.
-        // Another way would be if maybe Minimatch could take its set/globParts
-        // as an option, and then we could at least just use Pattern to test
-        // for absolute-ness.
-        // Yet another way, Minimatch could take an array of glob strings, and
-        // a cwd option, and do the right thing.
-        const mm = new minimatch_1.Minimatch(ign, this.mmopts);
-        for (let i = 0; i < mm.set.length; i++) {
-            const parsed = mm.set[i];
-            const globParts = mm.globParts[i];
-            /* c8 ignore start */
-            if (!parsed || !globParts) {
-                throw new Error('invalid pattern object');
-            }
-            // strip off leading ./ portions
-            // https://github.com/isaacs/node-glob/issues/570
-            while (parsed[0] === '.' && globParts[0] === '.') {
-                parsed.shift();
-                globParts.shift();
-            }
-            /* c8 ignore stop */
-            const p = new pattern_js_1.Pattern(parsed, globParts, 0, this.platform);
-            const m = new minimatch_1.Minimatch(p.globString(), this.mmopts);
-            const children = globParts[globParts.length - 1] === '**';
-            const absolute = p.isAbsolute();
-            if (absolute)
-                this.absolute.push(m);
-            else
-                this.relative.push(m);
-            if (children) {
-                if (absolute)
-                    this.absoluteChildren.push(m);
-                else
-                    this.relativeChildren.push(m);
-            }
-        }
-    }
-    ignored(p) {
-        const fullpath = p.fullpath();
-        const fullpaths = `${fullpath}/`;
-        const relative = p.relative() || '.';
-        const relatives = `${relative}/`;
-        for (const m of this.relative) {
-            if (m.match(relative) || m.match(relatives))
-                return true;
-        }
-        for (const m of this.absolute) {
-            if (m.match(fullpath) || m.match(fullpaths))
-                return true;
-        }
-        return false;
-    }
-    childrenIgnored(p) {
-        const fullpath = p.fullpath() + '/';
-        const relative = (p.relative() || '.') + '/';
-        for (const m of this.relativeChildren) {
-            if (m.match(relative))
-                return true;
-        }
-        for (const m of this.absoluteChildren) {
-            if (m.match(fullpath))
-                return true;
-        }
-        return false;
-    }
-}
-exports.Ignore = Ignore;
-//# sourceMappingURL=ignore.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/index.js b/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/index.js
deleted file mode 100644
index 151495d170efa..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/index.js
+++ /dev/null
@@ -1,68 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.glob = exports.sync = exports.iterate = exports.iterateSync = exports.stream = exports.streamSync = exports.Ignore = exports.hasMagic = exports.Glob = exports.unescape = exports.escape = void 0;
-exports.globStreamSync = globStreamSync;
-exports.globStream = globStream;
-exports.globSync = globSync;
-exports.globIterateSync = globIterateSync;
-exports.globIterate = globIterate;
-const minimatch_1 = require("minimatch");
-const glob_js_1 = require("./glob.js");
-const has_magic_js_1 = require("./has-magic.js");
-var minimatch_2 = require("minimatch");
-Object.defineProperty(exports, "escape", { enumerable: true, get: function () { return minimatch_2.escape; } });
-Object.defineProperty(exports, "unescape", { enumerable: true, get: function () { return minimatch_2.unescape; } });
-var glob_js_2 = require("./glob.js");
-Object.defineProperty(exports, "Glob", { enumerable: true, get: function () { return glob_js_2.Glob; } });
-var has_magic_js_2 = require("./has-magic.js");
-Object.defineProperty(exports, "hasMagic", { enumerable: true, get: function () { return has_magic_js_2.hasMagic; } });
-var ignore_js_1 = require("./ignore.js");
-Object.defineProperty(exports, "Ignore", { enumerable: true, get: function () { return ignore_js_1.Ignore; } });
-function globStreamSync(pattern, options = {}) {
-    return new glob_js_1.Glob(pattern, options).streamSync();
-}
-function globStream(pattern, options = {}) {
-    return new glob_js_1.Glob(pattern, options).stream();
-}
-function globSync(pattern, options = {}) {
-    return new glob_js_1.Glob(pattern, options).walkSync();
-}
-async function glob_(pattern, options = {}) {
-    return new glob_js_1.Glob(pattern, options).walk();
-}
-function globIterateSync(pattern, options = {}) {
-    return new glob_js_1.Glob(pattern, options).iterateSync();
-}
-function globIterate(pattern, options = {}) {
-    return new glob_js_1.Glob(pattern, options).iterate();
-}
-// aliases: glob.sync.stream() glob.stream.sync() glob.sync() etc
-exports.streamSync = globStreamSync;
-exports.stream = Object.assign(globStream, { sync: globStreamSync });
-exports.iterateSync = globIterateSync;
-exports.iterate = Object.assign(globIterate, {
-    sync: globIterateSync,
-});
-exports.sync = Object.assign(globSync, {
-    stream: globStreamSync,
-    iterate: globIterateSync,
-});
-exports.glob = Object.assign(glob_, {
-    glob: glob_,
-    globSync,
-    sync: exports.sync,
-    globStream,
-    stream: exports.stream,
-    globStreamSync,
-    streamSync: exports.streamSync,
-    globIterate,
-    iterate: exports.iterate,
-    globIterateSync,
-    iterateSync: exports.iterateSync,
-    Glob: glob_js_1.Glob,
-    hasMagic: has_magic_js_1.hasMagic,
-    escape: minimatch_1.escape,
-    unescape: minimatch_1.unescape,
-});
-exports.glob.glob = exports.glob;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/pattern.js b/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/pattern.js
deleted file mode 100644
index f0de35fb5bed9..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/pattern.js
+++ /dev/null
@@ -1,219 +0,0 @@
-"use strict";
-// this is just a very light wrapper around 2 arrays with an offset index
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Pattern = void 0;
-const minimatch_1 = require("minimatch");
-const isPatternList = (pl) => pl.length >= 1;
-const isGlobList = (gl) => gl.length >= 1;
-/**
- * An immutable-ish view on an array of glob parts and their parsed
- * results
- */
-class Pattern {
-    #patternList;
-    #globList;
-    #index;
-    length;
-    #platform;
-    #rest;
-    #globString;
-    #isDrive;
-    #isUNC;
-    #isAbsolute;
-    #followGlobstar = true;
-    constructor(patternList, globList, index, platform) {
-        if (!isPatternList(patternList)) {
-            throw new TypeError('empty pattern list');
-        }
-        if (!isGlobList(globList)) {
-            throw new TypeError('empty glob list');
-        }
-        if (globList.length !== patternList.length) {
-            throw new TypeError('mismatched pattern list and glob list lengths');
-        }
-        this.length = patternList.length;
-        if (index < 0 || index >= this.length) {
-            throw new TypeError('index out of range');
-        }
-        this.#patternList = patternList;
-        this.#globList = globList;
-        this.#index = index;
-        this.#platform = platform;
-        // normalize root entries of absolute patterns on initial creation.
-        if (this.#index === 0) {
-            // c: => ['c:/']
-            // C:/ => ['C:/']
-            // C:/x => ['C:/', 'x']
-            // //host/share => ['//host/share/']
-            // //host/share/ => ['//host/share/']
-            // //host/share/x => ['//host/share/', 'x']
-            // /etc => ['/', 'etc']
-            // / => ['/']
-            if (this.isUNC()) {
-                // '' / '' / 'host' / 'share'
-                const [p0, p1, p2, p3, ...prest] = this.#patternList;
-                const [g0, g1, g2, g3, ...grest] = this.#globList;
-                if (prest[0] === '') {
-                    // ends in /
-                    prest.shift();
-                    grest.shift();
-                }
-                const p = [p0, p1, p2, p3, ''].join('/');
-                const g = [g0, g1, g2, g3, ''].join('/');
-                this.#patternList = [p, ...prest];
-                this.#globList = [g, ...grest];
-                this.length = this.#patternList.length;
-            }
-            else if (this.isDrive() || this.isAbsolute()) {
-                const [p1, ...prest] = this.#patternList;
-                const [g1, ...grest] = this.#globList;
-                if (prest[0] === '') {
-                    // ends in /
-                    prest.shift();
-                    grest.shift();
-                }
-                const p = p1 + '/';
-                const g = g1 + '/';
-                this.#patternList = [p, ...prest];
-                this.#globList = [g, ...grest];
-                this.length = this.#patternList.length;
-            }
-        }
-    }
-    /**
-     * The first entry in the parsed list of patterns
-     */
-    pattern() {
-        return this.#patternList[this.#index];
-    }
-    /**
-     * true of if pattern() returns a string
-     */
-    isString() {
-        return typeof this.#patternList[this.#index] === 'string';
-    }
-    /**
-     * true of if pattern() returns GLOBSTAR
-     */
-    isGlobstar() {
-        return this.#patternList[this.#index] === minimatch_1.GLOBSTAR;
-    }
-    /**
-     * true if pattern() returns a regexp
-     */
-    isRegExp() {
-        return this.#patternList[this.#index] instanceof RegExp;
-    }
-    /**
-     * The /-joined set of glob parts that make up this pattern
-     */
-    globString() {
-        return (this.#globString =
-            this.#globString ||
-                (this.#index === 0 ?
-                    this.isAbsolute() ?
-                        this.#globList[0] + this.#globList.slice(1).join('/')
-                        : this.#globList.join('/')
-                    : this.#globList.slice(this.#index).join('/')));
-    }
-    /**
-     * true if there are more pattern parts after this one
-     */
-    hasMore() {
-        return this.length > this.#index + 1;
-    }
-    /**
-     * The rest of the pattern after this part, or null if this is the end
-     */
-    rest() {
-        if (this.#rest !== undefined)
-            return this.#rest;
-        if (!this.hasMore())
-            return (this.#rest = null);
-        this.#rest = new Pattern(this.#patternList, this.#globList, this.#index + 1, this.#platform);
-        this.#rest.#isAbsolute = this.#isAbsolute;
-        this.#rest.#isUNC = this.#isUNC;
-        this.#rest.#isDrive = this.#isDrive;
-        return this.#rest;
-    }
-    /**
-     * true if the pattern represents a //unc/path/ on windows
-     */
-    isUNC() {
-        const pl = this.#patternList;
-        return this.#isUNC !== undefined ?
-            this.#isUNC
-            : (this.#isUNC =
-                this.#platform === 'win32' &&
-                    this.#index === 0 &&
-                    pl[0] === '' &&
-                    pl[1] === '' &&
-                    typeof pl[2] === 'string' &&
-                    !!pl[2] &&
-                    typeof pl[3] === 'string' &&
-                    !!pl[3]);
-    }
-    // pattern like C:/...
-    // split = ['C:', ...]
-    // XXX: would be nice to handle patterns like `c:*` to test the cwd
-    // in c: for *, but I don't know of a way to even figure out what that
-    // cwd is without actually chdir'ing into it?
-    /**
-     * True if the pattern starts with a drive letter on Windows
-     */
-    isDrive() {
-        const pl = this.#patternList;
-        return this.#isDrive !== undefined ?
-            this.#isDrive
-            : (this.#isDrive =
-                this.#platform === 'win32' &&
-                    this.#index === 0 &&
-                    this.length > 1 &&
-                    typeof pl[0] === 'string' &&
-                    /^[a-z]:$/i.test(pl[0]));
-    }
-    // pattern = '/' or '/...' or '/x/...'
-    // split = ['', ''] or ['', ...] or ['', 'x', ...]
-    // Drive and UNC both considered absolute on windows
-    /**
-     * True if the pattern is rooted on an absolute path
-     */
-    isAbsolute() {
-        const pl = this.#patternList;
-        return this.#isAbsolute !== undefined ?
-            this.#isAbsolute
-            : (this.#isAbsolute =
-                (pl[0] === '' && pl.length > 1) ||
-                    this.isDrive() ||
-                    this.isUNC());
-    }
-    /**
-     * consume the root of the pattern, and return it
-     */
-    root() {
-        const p = this.#patternList[0];
-        return (typeof p === 'string' && this.isAbsolute() && this.#index === 0) ?
-            p
-            : '';
-    }
-    /**
-     * Check to see if the current globstar pattern is allowed to follow
-     * a symbolic link.
-     */
-    checkFollowGlobstar() {
-        return !(this.#index === 0 ||
-            !this.isGlobstar() ||
-            !this.#followGlobstar);
-    }
-    /**
-     * Mark that the current globstar pattern is following a symbolic link
-     */
-    markFollowGlobstar() {
-        if (this.#index === 0 || !this.isGlobstar() || !this.#followGlobstar)
-            return false;
-        this.#followGlobstar = false;
-        return true;
-    }
-}
-exports.Pattern = Pattern;
-//# sourceMappingURL=pattern.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/processor.js b/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/processor.js
deleted file mode 100644
index ee3bb4397e0b2..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/processor.js
+++ /dev/null
@@ -1,301 +0,0 @@
-"use strict";
-// synchronous utility for filtering entries and calculating subwalks
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Processor = exports.SubWalks = exports.MatchRecord = exports.HasWalkedCache = void 0;
-const minimatch_1 = require("minimatch");
-/**
- * A cache of which patterns have been processed for a given Path
- */
-class HasWalkedCache {
-    store;
-    constructor(store = new Map()) {
-        this.store = store;
-    }
-    copy() {
-        return new HasWalkedCache(new Map(this.store));
-    }
-    hasWalked(target, pattern) {
-        return this.store.get(target.fullpath())?.has(pattern.globString());
-    }
-    storeWalked(target, pattern) {
-        const fullpath = target.fullpath();
-        const cached = this.store.get(fullpath);
-        if (cached)
-            cached.add(pattern.globString());
-        else
-            this.store.set(fullpath, new Set([pattern.globString()]));
-    }
-}
-exports.HasWalkedCache = HasWalkedCache;
-/**
- * A record of which paths have been matched in a given walk step,
- * and whether they only are considered a match if they are a directory,
- * and whether their absolute or relative path should be returned.
- */
-class MatchRecord {
-    store = new Map();
-    add(target, absolute, ifDir) {
-        const n = (absolute ? 2 : 0) | (ifDir ? 1 : 0);
-        const current = this.store.get(target);
-        this.store.set(target, current === undefined ? n : n & current);
-    }
-    // match, absolute, ifdir
-    entries() {
-        return [...this.store.entries()].map(([path, n]) => [
-            path,
-            !!(n & 2),
-            !!(n & 1),
-        ]);
-    }
-}
-exports.MatchRecord = MatchRecord;
-/**
- * A collection of patterns that must be processed in a subsequent step
- * for a given path.
- */
-class SubWalks {
-    store = new Map();
-    add(target, pattern) {
-        if (!target.canReaddir()) {
-            return;
-        }
-        const subs = this.store.get(target);
-        if (subs) {
-            if (!subs.find(p => p.globString() === pattern.globString())) {
-                subs.push(pattern);
-            }
-        }
-        else
-            this.store.set(target, [pattern]);
-    }
-    get(target) {
-        const subs = this.store.get(target);
-        /* c8 ignore start */
-        if (!subs) {
-            throw new Error('attempting to walk unknown path');
-        }
-        /* c8 ignore stop */
-        return subs;
-    }
-    entries() {
-        return this.keys().map(k => [k, this.store.get(k)]);
-    }
-    keys() {
-        return [...this.store.keys()].filter(t => t.canReaddir());
-    }
-}
-exports.SubWalks = SubWalks;
-/**
- * The class that processes patterns for a given path.
- *
- * Handles child entry filtering, and determining whether a path's
- * directory contents must be read.
- */
-class Processor {
-    hasWalkedCache;
-    matches = new MatchRecord();
-    subwalks = new SubWalks();
-    patterns;
-    follow;
-    dot;
-    opts;
-    constructor(opts, hasWalkedCache) {
-        this.opts = opts;
-        this.follow = !!opts.follow;
-        this.dot = !!opts.dot;
-        this.hasWalkedCache =
-            hasWalkedCache ? hasWalkedCache.copy() : new HasWalkedCache();
-    }
-    processPatterns(target, patterns) {
-        this.patterns = patterns;
-        const processingSet = patterns.map(p => [target, p]);
-        // map of paths to the magic-starting subwalks they need to walk
-        // first item in patterns is the filter
-        for (let [t, pattern] of processingSet) {
-            this.hasWalkedCache.storeWalked(t, pattern);
-            const root = pattern.root();
-            const absolute = pattern.isAbsolute() && this.opts.absolute !== false;
-            // start absolute patterns at root
-            if (root) {
-                t = t.resolve(root === '/' && this.opts.root !== undefined ?
-                    this.opts.root
-                    : root);
-                const rest = pattern.rest();
-                if (!rest) {
-                    this.matches.add(t, true, false);
-                    continue;
-                }
-                else {
-                    pattern = rest;
-                }
-            }
-            if (t.isENOENT())
-                continue;
-            let p;
-            let rest;
-            let changed = false;
-            while (typeof (p = pattern.pattern()) === 'string' &&
-                (rest = pattern.rest())) {
-                const c = t.resolve(p);
-                t = c;
-                pattern = rest;
-                changed = true;
-            }
-            p = pattern.pattern();
-            rest = pattern.rest();
-            if (changed) {
-                if (this.hasWalkedCache.hasWalked(t, pattern))
-                    continue;
-                this.hasWalkedCache.storeWalked(t, pattern);
-            }
-            // now we have either a final string for a known entry,
-            // more strings for an unknown entry,
-            // or a pattern starting with magic, mounted on t.
-            if (typeof p === 'string') {
-                // must not be final entry, otherwise we would have
-                // concatenated it earlier.
-                const ifDir = p === '..' || p === '' || p === '.';
-                this.matches.add(t.resolve(p), absolute, ifDir);
-                continue;
-            }
-            else if (p === minimatch_1.GLOBSTAR) {
-                // if no rest, match and subwalk pattern
-                // if rest, process rest and subwalk pattern
-                // if it's a symlink, but we didn't get here by way of a
-                // globstar match (meaning it's the first time THIS globstar
-                // has traversed a symlink), then we follow it. Otherwise, stop.
-                if (!t.isSymbolicLink() ||
-                    this.follow ||
-                    pattern.checkFollowGlobstar()) {
-                    this.subwalks.add(t, pattern);
-                }
-                const rp = rest?.pattern();
-                const rrest = rest?.rest();
-                if (!rest || ((rp === '' || rp === '.') && !rrest)) {
-                    // only HAS to be a dir if it ends in **/ or **/.
-                    // but ending in ** will match files as well.
-                    this.matches.add(t, absolute, rp === '' || rp === '.');
-                }
-                else {
-                    if (rp === '..') {
-                        // this would mean you're matching **/.. at the fs root,
-                        // and no thanks, I'm not gonna test that specific case.
-                        /* c8 ignore start */
-                        const tp = t.parent || t;
-                        /* c8 ignore stop */
-                        if (!rrest)
-                            this.matches.add(tp, absolute, true);
-                        else if (!this.hasWalkedCache.hasWalked(tp, rrest)) {
-                            this.subwalks.add(tp, rrest);
-                        }
-                    }
-                }
-            }
-            else if (p instanceof RegExp) {
-                this.subwalks.add(t, pattern);
-            }
-        }
-        return this;
-    }
-    subwalkTargets() {
-        return this.subwalks.keys();
-    }
-    child() {
-        return new Processor(this.opts, this.hasWalkedCache);
-    }
-    // return a new Processor containing the subwalks for each
-    // child entry, and a set of matches, and
-    // a hasWalkedCache that's a copy of this one
-    // then we're going to call
-    filterEntries(parent, entries) {
-        const patterns = this.subwalks.get(parent);
-        // put matches and entry walks into the results processor
-        const results = this.child();
-        for (const e of entries) {
-            for (const pattern of patterns) {
-                const absolute = pattern.isAbsolute();
-                const p = pattern.pattern();
-                const rest = pattern.rest();
-                if (p === minimatch_1.GLOBSTAR) {
-                    results.testGlobstar(e, pattern, rest, absolute);
-                }
-                else if (p instanceof RegExp) {
-                    results.testRegExp(e, p, rest, absolute);
-                }
-                else {
-                    results.testString(e, p, rest, absolute);
-                }
-            }
-        }
-        return results;
-    }
-    testGlobstar(e, pattern, rest, absolute) {
-        if (this.dot || !e.name.startsWith('.')) {
-            if (!pattern.hasMore()) {
-                this.matches.add(e, absolute, false);
-            }
-            if (e.canReaddir()) {
-                // if we're in follow mode or it's not a symlink, just keep
-                // testing the same pattern. If there's more after the globstar,
-                // then this symlink consumes the globstar. If not, then we can
-                // follow at most ONE symlink along the way, so we mark it, which
-                // also checks to ensure that it wasn't already marked.
-                if (this.follow || !e.isSymbolicLink()) {
-                    this.subwalks.add(e, pattern);
-                }
-                else if (e.isSymbolicLink()) {
-                    if (rest && pattern.checkFollowGlobstar()) {
-                        this.subwalks.add(e, rest);
-                    }
-                    else if (pattern.markFollowGlobstar()) {
-                        this.subwalks.add(e, pattern);
-                    }
-                }
-            }
-        }
-        // if the NEXT thing matches this entry, then also add
-        // the rest.
-        if (rest) {
-            const rp = rest.pattern();
-            if (typeof rp === 'string' &&
-                // dots and empty were handled already
-                rp !== '..' &&
-                rp !== '' &&
-                rp !== '.') {
-                this.testString(e, rp, rest.rest(), absolute);
-            }
-            else if (rp === '..') {
-                /* c8 ignore start */
-                const ep = e.parent || e;
-                /* c8 ignore stop */
-                this.subwalks.add(ep, rest);
-            }
-            else if (rp instanceof RegExp) {
-                this.testRegExp(e, rp, rest.rest(), absolute);
-            }
-        }
-    }
-    testRegExp(e, p, rest, absolute) {
-        if (!p.test(e.name))
-            return;
-        if (!rest) {
-            this.matches.add(e, absolute, false);
-        }
-        else {
-            this.subwalks.add(e, rest);
-        }
-    }
-    testString(e, p, rest, absolute) {
-        // should never happen?
-        if (!e.isNamed(p))
-            return;
-        if (!rest) {
-            this.matches.add(e, absolute, false);
-        }
-        else {
-            this.subwalks.add(e, rest);
-        }
-    }
-}
-exports.Processor = Processor;
-//# sourceMappingURL=processor.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/walker.js b/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/walker.js
deleted file mode 100644
index cb15946d9a852..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/walker.js
+++ /dev/null
@@ -1,387 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.GlobStream = exports.GlobWalker = exports.GlobUtil = void 0;
-/**
- * Single-use utility classes to provide functionality to the {@link Glob}
- * methods.
- *
- * @module
- */
-const minipass_1 = require("minipass");
-const ignore_js_1 = require("./ignore.js");
-const processor_js_1 = require("./processor.js");
-const makeIgnore = (ignore, opts) => typeof ignore === 'string' ? new ignore_js_1.Ignore([ignore], opts)
-    : Array.isArray(ignore) ? new ignore_js_1.Ignore(ignore, opts)
-        : ignore;
-/**
- * basic walking utilities that all the glob walker types use
- */
-class GlobUtil {
-    path;
-    patterns;
-    opts;
-    seen = new Set();
-    paused = false;
-    aborted = false;
-    #onResume = [];
-    #ignore;
-    #sep;
-    signal;
-    maxDepth;
-    includeChildMatches;
-    constructor(patterns, path, opts) {
-        this.patterns = patterns;
-        this.path = path;
-        this.opts = opts;
-        this.#sep = !opts.posix && opts.platform === 'win32' ? '\\' : '/';
-        this.includeChildMatches = opts.includeChildMatches !== false;
-        if (opts.ignore || !this.includeChildMatches) {
-            this.#ignore = makeIgnore(opts.ignore ?? [], opts);
-            if (!this.includeChildMatches &&
-                typeof this.#ignore.add !== 'function') {
-                const m = 'cannot ignore child matches, ignore lacks add() method.';
-                throw new Error(m);
-            }
-        }
-        // ignore, always set with maxDepth, but it's optional on the
-        // GlobOptions type
-        /* c8 ignore start */
-        this.maxDepth = opts.maxDepth || Infinity;
-        /* c8 ignore stop */
-        if (opts.signal) {
-            this.signal = opts.signal;
-            this.signal.addEventListener('abort', () => {
-                this.#onResume.length = 0;
-            });
-        }
-    }
-    #ignored(path) {
-        return this.seen.has(path) || !!this.#ignore?.ignored?.(path);
-    }
-    #childrenIgnored(path) {
-        return !!this.#ignore?.childrenIgnored?.(path);
-    }
-    // backpressure mechanism
-    pause() {
-        this.paused = true;
-    }
-    resume() {
-        /* c8 ignore start */
-        if (this.signal?.aborted)
-            return;
-        /* c8 ignore stop */
-        this.paused = false;
-        let fn = undefined;
-        while (!this.paused && (fn = this.#onResume.shift())) {
-            fn();
-        }
-    }
-    onResume(fn) {
-        if (this.signal?.aborted)
-            return;
-        /* c8 ignore start */
-        if (!this.paused) {
-            fn();
-        }
-        else {
-            /* c8 ignore stop */
-            this.#onResume.push(fn);
-        }
-    }
-    // do the requisite realpath/stat checking, and return the path
-    // to add or undefined to filter it out.
-    async matchCheck(e, ifDir) {
-        if (ifDir && this.opts.nodir)
-            return undefined;
-        let rpc;
-        if (this.opts.realpath) {
-            rpc = e.realpathCached() || (await e.realpath());
-            if (!rpc)
-                return undefined;
-            e = rpc;
-        }
-        const needStat = e.isUnknown() || this.opts.stat;
-        const s = needStat ? await e.lstat() : e;
-        if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) {
-            const target = await s.realpath();
-            /* c8 ignore start */
-            if (target && (target.isUnknown() || this.opts.stat)) {
-                await target.lstat();
-            }
-            /* c8 ignore stop */
-        }
-        return this.matchCheckTest(s, ifDir);
-    }
-    matchCheckTest(e, ifDir) {
-        return (e &&
-            (this.maxDepth === Infinity || e.depth() <= this.maxDepth) &&
-            (!ifDir || e.canReaddir()) &&
-            (!this.opts.nodir || !e.isDirectory()) &&
-            (!this.opts.nodir ||
-                !this.opts.follow ||
-                !e.isSymbolicLink() ||
-                !e.realpathCached()?.isDirectory()) &&
-            !this.#ignored(e)) ?
-            e
-            : undefined;
-    }
-    matchCheckSync(e, ifDir) {
-        if (ifDir && this.opts.nodir)
-            return undefined;
-        let rpc;
-        if (this.opts.realpath) {
-            rpc = e.realpathCached() || e.realpathSync();
-            if (!rpc)
-                return undefined;
-            e = rpc;
-        }
-        const needStat = e.isUnknown() || this.opts.stat;
-        const s = needStat ? e.lstatSync() : e;
-        if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) {
-            const target = s.realpathSync();
-            if (target && (target?.isUnknown() || this.opts.stat)) {
-                target.lstatSync();
-            }
-        }
-        return this.matchCheckTest(s, ifDir);
-    }
-    matchFinish(e, absolute) {
-        if (this.#ignored(e))
-            return;
-        // we know we have an ignore if this is false, but TS doesn't
-        if (!this.includeChildMatches && this.#ignore?.add) {
-            const ign = `${e.relativePosix()}/**`;
-            this.#ignore.add(ign);
-        }
-        const abs = this.opts.absolute === undefined ? absolute : this.opts.absolute;
-        this.seen.add(e);
-        const mark = this.opts.mark && e.isDirectory() ? this.#sep : '';
-        // ok, we have what we need!
-        if (this.opts.withFileTypes) {
-            this.matchEmit(e);
-        }
-        else if (abs) {
-            const abs = this.opts.posix ? e.fullpathPosix() : e.fullpath();
-            this.matchEmit(abs + mark);
-        }
-        else {
-            const rel = this.opts.posix ? e.relativePosix() : e.relative();
-            const pre = this.opts.dotRelative && !rel.startsWith('..' + this.#sep) ?
-                '.' + this.#sep
-                : '';
-            this.matchEmit(!rel ? '.' + mark : pre + rel + mark);
-        }
-    }
-    async match(e, absolute, ifDir) {
-        const p = await this.matchCheck(e, ifDir);
-        if (p)
-            this.matchFinish(p, absolute);
-    }
-    matchSync(e, absolute, ifDir) {
-        const p = this.matchCheckSync(e, ifDir);
-        if (p)
-            this.matchFinish(p, absolute);
-    }
-    walkCB(target, patterns, cb) {
-        /* c8 ignore start */
-        if (this.signal?.aborted)
-            cb();
-        /* c8 ignore stop */
-        this.walkCB2(target, patterns, new processor_js_1.Processor(this.opts), cb);
-    }
-    walkCB2(target, patterns, processor, cb) {
-        if (this.#childrenIgnored(target))
-            return cb();
-        if (this.signal?.aborted)
-            cb();
-        if (this.paused) {
-            this.onResume(() => this.walkCB2(target, patterns, processor, cb));
-            return;
-        }
-        processor.processPatterns(target, patterns);
-        // done processing.  all of the above is sync, can be abstracted out.
-        // subwalks is a map of paths to the entry filters they need
-        // matches is a map of paths to [absolute, ifDir] tuples.
-        let tasks = 1;
-        const next = () => {
-            if (--tasks === 0)
-                cb();
-        };
-        for (const [m, absolute, ifDir] of processor.matches.entries()) {
-            if (this.#ignored(m))
-                continue;
-            tasks++;
-            this.match(m, absolute, ifDir).then(() => next());
-        }
-        for (const t of processor.subwalkTargets()) {
-            if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) {
-                continue;
-            }
-            tasks++;
-            const childrenCached = t.readdirCached();
-            if (t.calledReaddir())
-                this.walkCB3(t, childrenCached, processor, next);
-            else {
-                t.readdirCB((_, entries) => this.walkCB3(t, entries, processor, next), true);
-            }
-        }
-        next();
-    }
-    walkCB3(target, entries, processor, cb) {
-        processor = processor.filterEntries(target, entries);
-        let tasks = 1;
-        const next = () => {
-            if (--tasks === 0)
-                cb();
-        };
-        for (const [m, absolute, ifDir] of processor.matches.entries()) {
-            if (this.#ignored(m))
-                continue;
-            tasks++;
-            this.match(m, absolute, ifDir).then(() => next());
-        }
-        for (const [target, patterns] of processor.subwalks.entries()) {
-            tasks++;
-            this.walkCB2(target, patterns, processor.child(), next);
-        }
-        next();
-    }
-    walkCBSync(target, patterns, cb) {
-        /* c8 ignore start */
-        if (this.signal?.aborted)
-            cb();
-        /* c8 ignore stop */
-        this.walkCB2Sync(target, patterns, new processor_js_1.Processor(this.opts), cb);
-    }
-    walkCB2Sync(target, patterns, processor, cb) {
-        if (this.#childrenIgnored(target))
-            return cb();
-        if (this.signal?.aborted)
-            cb();
-        if (this.paused) {
-            this.onResume(() => this.walkCB2Sync(target, patterns, processor, cb));
-            return;
-        }
-        processor.processPatterns(target, patterns);
-        // done processing.  all of the above is sync, can be abstracted out.
-        // subwalks is a map of paths to the entry filters they need
-        // matches is a map of paths to [absolute, ifDir] tuples.
-        let tasks = 1;
-        const next = () => {
-            if (--tasks === 0)
-                cb();
-        };
-        for (const [m, absolute, ifDir] of processor.matches.entries()) {
-            if (this.#ignored(m))
-                continue;
-            this.matchSync(m, absolute, ifDir);
-        }
-        for (const t of processor.subwalkTargets()) {
-            if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) {
-                continue;
-            }
-            tasks++;
-            const children = t.readdirSync();
-            this.walkCB3Sync(t, children, processor, next);
-        }
-        next();
-    }
-    walkCB3Sync(target, entries, processor, cb) {
-        processor = processor.filterEntries(target, entries);
-        let tasks = 1;
-        const next = () => {
-            if (--tasks === 0)
-                cb();
-        };
-        for (const [m, absolute, ifDir] of processor.matches.entries()) {
-            if (this.#ignored(m))
-                continue;
-            this.matchSync(m, absolute, ifDir);
-        }
-        for (const [target, patterns] of processor.subwalks.entries()) {
-            tasks++;
-            this.walkCB2Sync(target, patterns, processor.child(), next);
-        }
-        next();
-    }
-}
-exports.GlobUtil = GlobUtil;
-class GlobWalker extends GlobUtil {
-    matches = new Set();
-    constructor(patterns, path, opts) {
-        super(patterns, path, opts);
-    }
-    matchEmit(e) {
-        this.matches.add(e);
-    }
-    async walk() {
-        if (this.signal?.aborted)
-            throw this.signal.reason;
-        if (this.path.isUnknown()) {
-            await this.path.lstat();
-        }
-        await new Promise((res, rej) => {
-            this.walkCB(this.path, this.patterns, () => {
-                if (this.signal?.aborted) {
-                    rej(this.signal.reason);
-                }
-                else {
-                    res(this.matches);
-                }
-            });
-        });
-        return this.matches;
-    }
-    walkSync() {
-        if (this.signal?.aborted)
-            throw this.signal.reason;
-        if (this.path.isUnknown()) {
-            this.path.lstatSync();
-        }
-        // nothing for the callback to do, because this never pauses
-        this.walkCBSync(this.path, this.patterns, () => {
-            if (this.signal?.aborted)
-                throw this.signal.reason;
-        });
-        return this.matches;
-    }
-}
-exports.GlobWalker = GlobWalker;
-class GlobStream extends GlobUtil {
-    results;
-    constructor(patterns, path, opts) {
-        super(patterns, path, opts);
-        this.results = new minipass_1.Minipass({
-            signal: this.signal,
-            objectMode: true,
-        });
-        this.results.on('drain', () => this.resume());
-        this.results.on('resume', () => this.resume());
-    }
-    matchEmit(e) {
-        this.results.write(e);
-        if (!this.results.flowing)
-            this.pause();
-    }
-    stream() {
-        const target = this.path;
-        if (target.isUnknown()) {
-            target.lstat().then(() => {
-                this.walkCB(target, this.patterns, () => this.results.end());
-            });
-        }
-        else {
-            this.walkCB(target, this.patterns, () => this.results.end());
-        }
-        return this.results;
-    }
-    streamSync() {
-        if (this.path.isUnknown()) {
-            this.path.lstatSync();
-        }
-        this.walkCBSync(this.path, this.patterns, () => this.results.end());
-        return this.results;
-    }
-}
-exports.GlobStream = GlobStream;
-//# sourceMappingURL=walker.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/bin.d.mts b/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/bin.d.mts
deleted file mode 100644
index 77298e4770817..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/bin.d.mts
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/usr/bin/env node
-export {};
-//# sourceMappingURL=bin.d.mts.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/bin.mjs b/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/bin.mjs
deleted file mode 100755
index 553bb79303d90..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/bin.mjs
+++ /dev/null
@@ -1,276 +0,0 @@
-#!/usr/bin/env node
-import { foregroundChild } from 'foreground-child';
-import { existsSync } from 'fs';
-import { jack } from 'jackspeak';
-import { loadPackageJson } from 'package-json-from-dist';
-import { join } from 'path';
-import { globStream } from './index.js';
-const { version } = loadPackageJson(import.meta.url, '../package.json');
-const j = jack({
-    usage: 'glob [options] [ [ ...]]',
-})
-    .description(`
-    Glob v${version}
-
-    Expand the positional glob expression arguments into any matching file
-    system paths found.
-  `)
-    .opt({
-    cmd: {
-        short: 'c',
-        hint: 'command',
-        description: `Run the command provided, passing the glob expression
-                    matches as arguments.`,
-    },
-})
-    .opt({
-    default: {
-        short: 'p',
-        hint: 'pattern',
-        description: `If no positional arguments are provided, glob will use
-                    this pattern`,
-    },
-})
-    .flag({
-    all: {
-        short: 'A',
-        description: `By default, the glob cli command will not expand any
-                    arguments that are an exact match to a file on disk.
-
-                    This prevents double-expanding, in case the shell expands
-                    an argument whose filename is a glob expression.
-
-                    For example, if 'app/*.ts' would match 'app/[id].ts', then
-                    on Windows powershell or cmd.exe, 'glob app/*.ts' will
-                    expand to 'app/[id].ts', as expected. However, in posix
-                    shells such as bash or zsh, the shell will first expand
-                    'app/*.ts' to a list of filenames. Then glob will look
-                    for a file matching 'app/[id].ts' (ie, 'app/i.ts' or
-                    'app/d.ts'), which is unexpected.
-
-                    Setting '--all' prevents this behavior, causing glob
-                    to treat ALL patterns as glob expressions to be expanded,
-                    even if they are an exact match to a file on disk.
-
-                    When setting this option, be sure to enquote arguments
-                    so that the shell will not expand them prior to passing
-                    them to the glob command process.
-      `,
-    },
-    absolute: {
-        short: 'a',
-        description: 'Expand to absolute paths',
-    },
-    'dot-relative': {
-        short: 'd',
-        description: `Prepend './' on relative matches`,
-    },
-    mark: {
-        short: 'm',
-        description: `Append a / on any directories matched`,
-    },
-    posix: {
-        short: 'x',
-        description: `Always resolve to posix style paths, using '/' as the
-                    directory separator, even on Windows. Drive letter
-                    absolute matches on Windows will be expanded to their
-                    full resolved UNC maths, eg instead of 'C:\\foo\\bar',
-                    it will expand to '//?/C:/foo/bar'.
-      `,
-    },
-    follow: {
-        short: 'f',
-        description: `Follow symlinked directories when expanding '**'`,
-    },
-    realpath: {
-        short: 'R',
-        description: `Call 'fs.realpath' on all of the results. In the case
-                    of an entry that cannot be resolved, the entry is
-                    omitted. This incurs a slight performance penalty, of
-                    course, because of the added system calls.`,
-    },
-    stat: {
-        short: 's',
-        description: `Call 'fs.lstat' on all entries, whether required or not
-                    to determine if it's a valid match.`,
-    },
-    'match-base': {
-        short: 'b',
-        description: `Perform a basename-only match if the pattern does not
-                    contain any slash characters. That is, '*.js' would be
-                    treated as equivalent to '**/*.js', matching js files
-                    in all directories.
-      `,
-    },
-    dot: {
-        description: `Allow patterns to match files/directories that start
-                    with '.', even if the pattern does not start with '.'
-      `,
-    },
-    nobrace: {
-        description: 'Do not expand {...} patterns',
-    },
-    nocase: {
-        description: `Perform a case-insensitive match. This defaults to
-                    'true' on macOS and Windows platforms, and false on
-                    all others.
-
-                    Note: 'nocase' should only be explicitly set when it is
-                    known that the filesystem's case sensitivity differs
-                    from the platform default. If set 'true' on
-                    case-insensitive file systems, then the walk may return
-                    more or less results than expected.
-      `,
-    },
-    nodir: {
-        description: `Do not match directories, only files.
-
-                    Note: to *only* match directories, append a '/' at the
-                    end of the pattern.
-      `,
-    },
-    noext: {
-        description: `Do not expand extglob patterns, such as '+(a|b)'`,
-    },
-    noglobstar: {
-        description: `Do not expand '**' against multiple path portions.
-                    Ie, treat it as a normal '*' instead.`,
-    },
-    'windows-path-no-escape': {
-        description: `Use '\\' as a path separator *only*, and *never* as an
-                    escape character. If set, all '\\' characters are
-                    replaced with '/' in the pattern.`,
-    },
-})
-    .num({
-    'max-depth': {
-        short: 'D',
-        description: `Maximum depth to traverse from the current
-                    working directory`,
-    },
-})
-    .opt({
-    cwd: {
-        short: 'C',
-        description: 'Current working directory to execute/match in',
-        default: process.cwd(),
-    },
-    root: {
-        short: 'r',
-        description: `A string path resolved against the 'cwd', which is
-                    used as the starting point for absolute patterns that
-                    start with '/' (but not drive letters or UNC paths
-                    on Windows).
-
-                    Note that this *doesn't* necessarily limit the walk to
-                    the 'root' directory, and doesn't affect the cwd
-                    starting point for non-absolute patterns. A pattern
-                    containing '..' will still be able to traverse out of
-                    the root directory, if it is not an actual root directory
-                    on the filesystem, and any non-absolute patterns will
-                    still be matched in the 'cwd'.
-
-                    To start absolute and non-absolute patterns in the same
-                    path, you can use '--root=' to set it to the empty
-                    string. However, be aware that on Windows systems, a
-                    pattern like 'x:/*' or '//host/share/*' will *always*
-                    start in the 'x:/' or '//host/share/' directory,
-                    regardless of the --root setting.
-      `,
-    },
-    platform: {
-        description: `Defaults to the value of 'process.platform' if
-                    available, or 'linux' if not. Setting --platform=win32
-                    on non-Windows systems may cause strange behavior!`,
-        validOptions: [
-            'aix',
-            'android',
-            'darwin',
-            'freebsd',
-            'haiku',
-            'linux',
-            'openbsd',
-            'sunos',
-            'win32',
-            'cygwin',
-            'netbsd',
-        ],
-    },
-})
-    .optList({
-    ignore: {
-        short: 'i',
-        description: `Glob patterns to ignore`,
-    },
-})
-    .flag({
-    debug: {
-        short: 'v',
-        description: `Output a huge amount of noisy debug information about
-                    patterns as they are parsed and used to match files.`,
-    },
-    version: {
-        short: 'V',
-        description: `Output the version (${version})`,
-    },
-    help: {
-        short: 'h',
-        description: 'Show this usage information',
-    },
-});
-try {
-    const { positionals, values } = j.parse();
-    if (values.version) {
-        console.log(version);
-        process.exit(0);
-    }
-    if (values.help) {
-        console.log(j.usage());
-        process.exit(0);
-    }
-    if (positionals.length === 0 && !values.default)
-        throw 'No patterns provided';
-    if (positionals.length === 0 && values.default)
-        positionals.push(values.default);
-    const patterns = values.all ? positionals : positionals.filter(p => !existsSync(p));
-    const matches = values.all ?
-        []
-        : positionals.filter(p => existsSync(p)).map(p => join(p));
-    const stream = globStream(patterns, {
-        absolute: values.absolute,
-        cwd: values.cwd,
-        dot: values.dot,
-        dotRelative: values['dot-relative'],
-        follow: values.follow,
-        ignore: values.ignore,
-        mark: values.mark,
-        matchBase: values['match-base'],
-        maxDepth: values['max-depth'],
-        nobrace: values.nobrace,
-        nocase: values.nocase,
-        nodir: values.nodir,
-        noext: values.noext,
-        noglobstar: values.noglobstar,
-        platform: values.platform,
-        realpath: values.realpath,
-        root: values.root,
-        stat: values.stat,
-        debug: values.debug,
-        posix: values.posix,
-    });
-    const cmd = values.cmd;
-    if (!cmd) {
-        matches.forEach(m => console.log(m));
-        stream.on('data', f => console.log(f));
-    }
-    else {
-        stream.on('data', f => matches.push(f));
-        stream.on('end', () => foregroundChild(cmd, matches, { shell: true }));
-    }
-}
-catch (e) {
-    console.error(j.usage());
-    console.error(e instanceof Error ? e.message : String(e));
-    process.exit(1);
-}
-//# sourceMappingURL=bin.mjs.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/glob.js b/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/glob.js
deleted file mode 100644
index c9ff3b0036d94..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/glob.js
+++ /dev/null
@@ -1,243 +0,0 @@
-import { Minimatch } from 'minimatch';
-import { fileURLToPath } from 'node:url';
-import { PathScurry, PathScurryDarwin, PathScurryPosix, PathScurryWin32, } from 'path-scurry';
-import { Pattern } from './pattern.js';
-import { GlobStream, GlobWalker } from './walker.js';
-// if no process global, just call it linux.
-// so we default to case-sensitive, / separators
-const defaultPlatform = (typeof process === 'object' &&
-    process &&
-    typeof process.platform === 'string') ?
-    process.platform
-    : 'linux';
-/**
- * An object that can perform glob pattern traversals.
- */
-export class Glob {
-    absolute;
-    cwd;
-    root;
-    dot;
-    dotRelative;
-    follow;
-    ignore;
-    magicalBraces;
-    mark;
-    matchBase;
-    maxDepth;
-    nobrace;
-    nocase;
-    nodir;
-    noext;
-    noglobstar;
-    pattern;
-    platform;
-    realpath;
-    scurry;
-    stat;
-    signal;
-    windowsPathsNoEscape;
-    withFileTypes;
-    includeChildMatches;
-    /**
-     * The options provided to the constructor.
-     */
-    opts;
-    /**
-     * An array of parsed immutable {@link Pattern} objects.
-     */
-    patterns;
-    /**
-     * All options are stored as properties on the `Glob` object.
-     *
-     * See {@link GlobOptions} for full options descriptions.
-     *
-     * Note that a previous `Glob` object can be passed as the
-     * `GlobOptions` to another `Glob` instantiation to re-use settings
-     * and caches with a new pattern.
-     *
-     * Traversal functions can be called multiple times to run the walk
-     * again.
-     */
-    constructor(pattern, opts) {
-        /* c8 ignore start */
-        if (!opts)
-            throw new TypeError('glob options required');
-        /* c8 ignore stop */
-        this.withFileTypes = !!opts.withFileTypes;
-        this.signal = opts.signal;
-        this.follow = !!opts.follow;
-        this.dot = !!opts.dot;
-        this.dotRelative = !!opts.dotRelative;
-        this.nodir = !!opts.nodir;
-        this.mark = !!opts.mark;
-        if (!opts.cwd) {
-            this.cwd = '';
-        }
-        else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) {
-            opts.cwd = fileURLToPath(opts.cwd);
-        }
-        this.cwd = opts.cwd || '';
-        this.root = opts.root;
-        this.magicalBraces = !!opts.magicalBraces;
-        this.nobrace = !!opts.nobrace;
-        this.noext = !!opts.noext;
-        this.realpath = !!opts.realpath;
-        this.absolute = opts.absolute;
-        this.includeChildMatches = opts.includeChildMatches !== false;
-        this.noglobstar = !!opts.noglobstar;
-        this.matchBase = !!opts.matchBase;
-        this.maxDepth =
-            typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity;
-        this.stat = !!opts.stat;
-        this.ignore = opts.ignore;
-        if (this.withFileTypes && this.absolute !== undefined) {
-            throw new Error('cannot set absolute and withFileTypes:true');
-        }
-        if (typeof pattern === 'string') {
-            pattern = [pattern];
-        }
-        this.windowsPathsNoEscape =
-            !!opts.windowsPathsNoEscape ||
-                opts.allowWindowsEscape ===
-                    false;
-        if (this.windowsPathsNoEscape) {
-            pattern = pattern.map(p => p.replace(/\\/g, '/'));
-        }
-        if (this.matchBase) {
-            if (opts.noglobstar) {
-                throw new TypeError('base matching requires globstar');
-            }
-            pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`));
-        }
-        this.pattern = pattern;
-        this.platform = opts.platform || defaultPlatform;
-        this.opts = { ...opts, platform: this.platform };
-        if (opts.scurry) {
-            this.scurry = opts.scurry;
-            if (opts.nocase !== undefined &&
-                opts.nocase !== opts.scurry.nocase) {
-                throw new Error('nocase option contradicts provided scurry option');
-            }
-        }
-        else {
-            const Scurry = opts.platform === 'win32' ? PathScurryWin32
-                : opts.platform === 'darwin' ? PathScurryDarwin
-                    : opts.platform ? PathScurryPosix
-                        : PathScurry;
-            this.scurry = new Scurry(this.cwd, {
-                nocase: opts.nocase,
-                fs: opts.fs,
-            });
-        }
-        this.nocase = this.scurry.nocase;
-        // If you do nocase:true on a case-sensitive file system, then
-        // we need to use regexps instead of strings for non-magic
-        // path portions, because statting `aBc` won't return results
-        // for the file `AbC` for example.
-        const nocaseMagicOnly = this.platform === 'darwin' || this.platform === 'win32';
-        const mmo = {
-            // default nocase based on platform
-            ...opts,
-            dot: this.dot,
-            matchBase: this.matchBase,
-            nobrace: this.nobrace,
-            nocase: this.nocase,
-            nocaseMagicOnly,
-            nocomment: true,
-            noext: this.noext,
-            nonegate: true,
-            optimizationLevel: 2,
-            platform: this.platform,
-            windowsPathsNoEscape: this.windowsPathsNoEscape,
-            debug: !!this.opts.debug,
-        };
-        const mms = this.pattern.map(p => new Minimatch(p, mmo));
-        const [matchSet, globParts] = mms.reduce((set, m) => {
-            set[0].push(...m.set);
-            set[1].push(...m.globParts);
-            return set;
-        }, [[], []]);
-        this.patterns = matchSet.map((set, i) => {
-            const g = globParts[i];
-            /* c8 ignore start */
-            if (!g)
-                throw new Error('invalid pattern object');
-            /* c8 ignore stop */
-            return new Pattern(set, g, 0, this.platform);
-        });
-    }
-    async walk() {
-        // Walkers always return array of Path objects, so we just have to
-        // coerce them into the right shape.  It will have already called
-        // realpath() if the option was set to do so, so we know that's cached.
-        // start out knowing the cwd, at least
-        return [
-            ...(await new GlobWalker(this.patterns, this.scurry.cwd, {
-                ...this.opts,
-                maxDepth: this.maxDepth !== Infinity ?
-                    this.maxDepth + this.scurry.cwd.depth()
-                    : Infinity,
-                platform: this.platform,
-                nocase: this.nocase,
-                includeChildMatches: this.includeChildMatches,
-            }).walk()),
-        ];
-    }
-    walkSync() {
-        return [
-            ...new GlobWalker(this.patterns, this.scurry.cwd, {
-                ...this.opts,
-                maxDepth: this.maxDepth !== Infinity ?
-                    this.maxDepth + this.scurry.cwd.depth()
-                    : Infinity,
-                platform: this.platform,
-                nocase: this.nocase,
-                includeChildMatches: this.includeChildMatches,
-            }).walkSync(),
-        ];
-    }
-    stream() {
-        return new GlobStream(this.patterns, this.scurry.cwd, {
-            ...this.opts,
-            maxDepth: this.maxDepth !== Infinity ?
-                this.maxDepth + this.scurry.cwd.depth()
-                : Infinity,
-            platform: this.platform,
-            nocase: this.nocase,
-            includeChildMatches: this.includeChildMatches,
-        }).stream();
-    }
-    streamSync() {
-        return new GlobStream(this.patterns, this.scurry.cwd, {
-            ...this.opts,
-            maxDepth: this.maxDepth !== Infinity ?
-                this.maxDepth + this.scurry.cwd.depth()
-                : Infinity,
-            platform: this.platform,
-            nocase: this.nocase,
-            includeChildMatches: this.includeChildMatches,
-        }).streamSync();
-    }
-    /**
-     * Default sync iteration function. Returns a Generator that
-     * iterates over the results.
-     */
-    iterateSync() {
-        return this.streamSync()[Symbol.iterator]();
-    }
-    [Symbol.iterator]() {
-        return this.iterateSync();
-    }
-    /**
-     * Default async iteration function. Returns an AsyncGenerator that
-     * iterates over the results.
-     */
-    iterate() {
-        return this.stream()[Symbol.asyncIterator]();
-    }
-    [Symbol.asyncIterator]() {
-        return this.iterate();
-    }
-}
-//# sourceMappingURL=glob.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/has-magic.js b/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/has-magic.js
deleted file mode 100644
index ba2321ab868d0..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/has-magic.js
+++ /dev/null
@@ -1,23 +0,0 @@
-import { Minimatch } from 'minimatch';
-/**
- * Return true if the patterns provided contain any magic glob characters,
- * given the options provided.
- *
- * Brace expansion is not considered "magic" unless the `magicalBraces` option
- * is set, as brace expansion just turns one string into an array of strings.
- * So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and
- * `'xby'` both do not contain any magic glob characters, and it's treated the
- * same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true`
- * is in the options, brace expansion _is_ treated as a pattern having magic.
- */
-export const hasMagic = (pattern, options = {}) => {
-    if (!Array.isArray(pattern)) {
-        pattern = [pattern];
-    }
-    for (const p of pattern) {
-        if (new Minimatch(p, options).hasMagic())
-            return true;
-    }
-    return false;
-};
-//# sourceMappingURL=has-magic.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/ignore.js b/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/ignore.js
deleted file mode 100644
index 539c4a4fdebc4..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/ignore.js
+++ /dev/null
@@ -1,115 +0,0 @@
-// give it a pattern, and it'll be able to tell you if
-// a given path should be ignored.
-// Ignoring a path ignores its children if the pattern ends in /**
-// Ignores are always parsed in dot:true mode
-import { Minimatch } from 'minimatch';
-import { Pattern } from './pattern.js';
-const defaultPlatform = (typeof process === 'object' &&
-    process &&
-    typeof process.platform === 'string') ?
-    process.platform
-    : 'linux';
-/**
- * Class used to process ignored patterns
- */
-export class Ignore {
-    relative;
-    relativeChildren;
-    absolute;
-    absoluteChildren;
-    platform;
-    mmopts;
-    constructor(ignored, { nobrace, nocase, noext, noglobstar, platform = defaultPlatform, }) {
-        this.relative = [];
-        this.absolute = [];
-        this.relativeChildren = [];
-        this.absoluteChildren = [];
-        this.platform = platform;
-        this.mmopts = {
-            dot: true,
-            nobrace,
-            nocase,
-            noext,
-            noglobstar,
-            optimizationLevel: 2,
-            platform,
-            nocomment: true,
-            nonegate: true,
-        };
-        for (const ign of ignored)
-            this.add(ign);
-    }
-    add(ign) {
-        // this is a little weird, but it gives us a clean set of optimized
-        // minimatch matchers, without getting tripped up if one of them
-        // ends in /** inside a brace section, and it's only inefficient at
-        // the start of the walk, not along it.
-        // It'd be nice if the Pattern class just had a .test() method, but
-        // handling globstars is a bit of a pita, and that code already lives
-        // in minimatch anyway.
-        // Another way would be if maybe Minimatch could take its set/globParts
-        // as an option, and then we could at least just use Pattern to test
-        // for absolute-ness.
-        // Yet another way, Minimatch could take an array of glob strings, and
-        // a cwd option, and do the right thing.
-        const mm = new Minimatch(ign, this.mmopts);
-        for (let i = 0; i < mm.set.length; i++) {
-            const parsed = mm.set[i];
-            const globParts = mm.globParts[i];
-            /* c8 ignore start */
-            if (!parsed || !globParts) {
-                throw new Error('invalid pattern object');
-            }
-            // strip off leading ./ portions
-            // https://github.com/isaacs/node-glob/issues/570
-            while (parsed[0] === '.' && globParts[0] === '.') {
-                parsed.shift();
-                globParts.shift();
-            }
-            /* c8 ignore stop */
-            const p = new Pattern(parsed, globParts, 0, this.platform);
-            const m = new Minimatch(p.globString(), this.mmopts);
-            const children = globParts[globParts.length - 1] === '**';
-            const absolute = p.isAbsolute();
-            if (absolute)
-                this.absolute.push(m);
-            else
-                this.relative.push(m);
-            if (children) {
-                if (absolute)
-                    this.absoluteChildren.push(m);
-                else
-                    this.relativeChildren.push(m);
-            }
-        }
-    }
-    ignored(p) {
-        const fullpath = p.fullpath();
-        const fullpaths = `${fullpath}/`;
-        const relative = p.relative() || '.';
-        const relatives = `${relative}/`;
-        for (const m of this.relative) {
-            if (m.match(relative) || m.match(relatives))
-                return true;
-        }
-        for (const m of this.absolute) {
-            if (m.match(fullpath) || m.match(fullpaths))
-                return true;
-        }
-        return false;
-    }
-    childrenIgnored(p) {
-        const fullpath = p.fullpath() + '/';
-        const relative = (p.relative() || '.') + '/';
-        for (const m of this.relativeChildren) {
-            if (m.match(relative))
-                return true;
-        }
-        for (const m of this.absoluteChildren) {
-            if (m.match(fullpath))
-                return true;
-        }
-        return false;
-    }
-}
-//# sourceMappingURL=ignore.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/index.js b/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/index.js
deleted file mode 100644
index e15c1f9c4cb03..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/index.js
+++ /dev/null
@@ -1,55 +0,0 @@
-import { escape, unescape } from 'minimatch';
-import { Glob } from './glob.js';
-import { hasMagic } from './has-magic.js';
-export { escape, unescape } from 'minimatch';
-export { Glob } from './glob.js';
-export { hasMagic } from './has-magic.js';
-export { Ignore } from './ignore.js';
-export function globStreamSync(pattern, options = {}) {
-    return new Glob(pattern, options).streamSync();
-}
-export function globStream(pattern, options = {}) {
-    return new Glob(pattern, options).stream();
-}
-export function globSync(pattern, options = {}) {
-    return new Glob(pattern, options).walkSync();
-}
-async function glob_(pattern, options = {}) {
-    return new Glob(pattern, options).walk();
-}
-export function globIterateSync(pattern, options = {}) {
-    return new Glob(pattern, options).iterateSync();
-}
-export function globIterate(pattern, options = {}) {
-    return new Glob(pattern, options).iterate();
-}
-// aliases: glob.sync.stream() glob.stream.sync() glob.sync() etc
-export const streamSync = globStreamSync;
-export const stream = Object.assign(globStream, { sync: globStreamSync });
-export const iterateSync = globIterateSync;
-export const iterate = Object.assign(globIterate, {
-    sync: globIterateSync,
-});
-export const sync = Object.assign(globSync, {
-    stream: globStreamSync,
-    iterate: globIterateSync,
-});
-export const glob = Object.assign(glob_, {
-    glob: glob_,
-    globSync,
-    sync,
-    globStream,
-    stream,
-    globStreamSync,
-    streamSync,
-    globIterate,
-    iterate,
-    globIterateSync,
-    iterateSync,
-    Glob,
-    hasMagic,
-    escape,
-    unescape,
-});
-glob.glob = glob;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/pattern.js b/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/pattern.js
deleted file mode 100644
index b41defa10c6a3..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/pattern.js
+++ /dev/null
@@ -1,215 +0,0 @@
-// this is just a very light wrapper around 2 arrays with an offset index
-import { GLOBSTAR } from 'minimatch';
-const isPatternList = (pl) => pl.length >= 1;
-const isGlobList = (gl) => gl.length >= 1;
-/**
- * An immutable-ish view on an array of glob parts and their parsed
- * results
- */
-export class Pattern {
-    #patternList;
-    #globList;
-    #index;
-    length;
-    #platform;
-    #rest;
-    #globString;
-    #isDrive;
-    #isUNC;
-    #isAbsolute;
-    #followGlobstar = true;
-    constructor(patternList, globList, index, platform) {
-        if (!isPatternList(patternList)) {
-            throw new TypeError('empty pattern list');
-        }
-        if (!isGlobList(globList)) {
-            throw new TypeError('empty glob list');
-        }
-        if (globList.length !== patternList.length) {
-            throw new TypeError('mismatched pattern list and glob list lengths');
-        }
-        this.length = patternList.length;
-        if (index < 0 || index >= this.length) {
-            throw new TypeError('index out of range');
-        }
-        this.#patternList = patternList;
-        this.#globList = globList;
-        this.#index = index;
-        this.#platform = platform;
-        // normalize root entries of absolute patterns on initial creation.
-        if (this.#index === 0) {
-            // c: => ['c:/']
-            // C:/ => ['C:/']
-            // C:/x => ['C:/', 'x']
-            // //host/share => ['//host/share/']
-            // //host/share/ => ['//host/share/']
-            // //host/share/x => ['//host/share/', 'x']
-            // /etc => ['/', 'etc']
-            // / => ['/']
-            if (this.isUNC()) {
-                // '' / '' / 'host' / 'share'
-                const [p0, p1, p2, p3, ...prest] = this.#patternList;
-                const [g0, g1, g2, g3, ...grest] = this.#globList;
-                if (prest[0] === '') {
-                    // ends in /
-                    prest.shift();
-                    grest.shift();
-                }
-                const p = [p0, p1, p2, p3, ''].join('/');
-                const g = [g0, g1, g2, g3, ''].join('/');
-                this.#patternList = [p, ...prest];
-                this.#globList = [g, ...grest];
-                this.length = this.#patternList.length;
-            }
-            else if (this.isDrive() || this.isAbsolute()) {
-                const [p1, ...prest] = this.#patternList;
-                const [g1, ...grest] = this.#globList;
-                if (prest[0] === '') {
-                    // ends in /
-                    prest.shift();
-                    grest.shift();
-                }
-                const p = p1 + '/';
-                const g = g1 + '/';
-                this.#patternList = [p, ...prest];
-                this.#globList = [g, ...grest];
-                this.length = this.#patternList.length;
-            }
-        }
-    }
-    /**
-     * The first entry in the parsed list of patterns
-     */
-    pattern() {
-        return this.#patternList[this.#index];
-    }
-    /**
-     * true of if pattern() returns a string
-     */
-    isString() {
-        return typeof this.#patternList[this.#index] === 'string';
-    }
-    /**
-     * true of if pattern() returns GLOBSTAR
-     */
-    isGlobstar() {
-        return this.#patternList[this.#index] === GLOBSTAR;
-    }
-    /**
-     * true if pattern() returns a regexp
-     */
-    isRegExp() {
-        return this.#patternList[this.#index] instanceof RegExp;
-    }
-    /**
-     * The /-joined set of glob parts that make up this pattern
-     */
-    globString() {
-        return (this.#globString =
-            this.#globString ||
-                (this.#index === 0 ?
-                    this.isAbsolute() ?
-                        this.#globList[0] + this.#globList.slice(1).join('/')
-                        : this.#globList.join('/')
-                    : this.#globList.slice(this.#index).join('/')));
-    }
-    /**
-     * true if there are more pattern parts after this one
-     */
-    hasMore() {
-        return this.length > this.#index + 1;
-    }
-    /**
-     * The rest of the pattern after this part, or null if this is the end
-     */
-    rest() {
-        if (this.#rest !== undefined)
-            return this.#rest;
-        if (!this.hasMore())
-            return (this.#rest = null);
-        this.#rest = new Pattern(this.#patternList, this.#globList, this.#index + 1, this.#platform);
-        this.#rest.#isAbsolute = this.#isAbsolute;
-        this.#rest.#isUNC = this.#isUNC;
-        this.#rest.#isDrive = this.#isDrive;
-        return this.#rest;
-    }
-    /**
-     * true if the pattern represents a //unc/path/ on windows
-     */
-    isUNC() {
-        const pl = this.#patternList;
-        return this.#isUNC !== undefined ?
-            this.#isUNC
-            : (this.#isUNC =
-                this.#platform === 'win32' &&
-                    this.#index === 0 &&
-                    pl[0] === '' &&
-                    pl[1] === '' &&
-                    typeof pl[2] === 'string' &&
-                    !!pl[2] &&
-                    typeof pl[3] === 'string' &&
-                    !!pl[3]);
-    }
-    // pattern like C:/...
-    // split = ['C:', ...]
-    // XXX: would be nice to handle patterns like `c:*` to test the cwd
-    // in c: for *, but I don't know of a way to even figure out what that
-    // cwd is without actually chdir'ing into it?
-    /**
-     * True if the pattern starts with a drive letter on Windows
-     */
-    isDrive() {
-        const pl = this.#patternList;
-        return this.#isDrive !== undefined ?
-            this.#isDrive
-            : (this.#isDrive =
-                this.#platform === 'win32' &&
-                    this.#index === 0 &&
-                    this.length > 1 &&
-                    typeof pl[0] === 'string' &&
-                    /^[a-z]:$/i.test(pl[0]));
-    }
-    // pattern = '/' or '/...' or '/x/...'
-    // split = ['', ''] or ['', ...] or ['', 'x', ...]
-    // Drive and UNC both considered absolute on windows
-    /**
-     * True if the pattern is rooted on an absolute path
-     */
-    isAbsolute() {
-        const pl = this.#patternList;
-        return this.#isAbsolute !== undefined ?
-            this.#isAbsolute
-            : (this.#isAbsolute =
-                (pl[0] === '' && pl.length > 1) ||
-                    this.isDrive() ||
-                    this.isUNC());
-    }
-    /**
-     * consume the root of the pattern, and return it
-     */
-    root() {
-        const p = this.#patternList[0];
-        return (typeof p === 'string' && this.isAbsolute() && this.#index === 0) ?
-            p
-            : '';
-    }
-    /**
-     * Check to see if the current globstar pattern is allowed to follow
-     * a symbolic link.
-     */
-    checkFollowGlobstar() {
-        return !(this.#index === 0 ||
-            !this.isGlobstar() ||
-            !this.#followGlobstar);
-    }
-    /**
-     * Mark that the current globstar pattern is following a symbolic link
-     */
-    markFollowGlobstar() {
-        if (this.#index === 0 || !this.isGlobstar() || !this.#followGlobstar)
-            return false;
-        this.#followGlobstar = false;
-        return true;
-    }
-}
-//# sourceMappingURL=pattern.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/processor.js b/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/processor.js
deleted file mode 100644
index f874892ffed0c..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/processor.js
+++ /dev/null
@@ -1,294 +0,0 @@
-// synchronous utility for filtering entries and calculating subwalks
-import { GLOBSTAR } from 'minimatch';
-/**
- * A cache of which patterns have been processed for a given Path
- */
-export class HasWalkedCache {
-    store;
-    constructor(store = new Map()) {
-        this.store = store;
-    }
-    copy() {
-        return new HasWalkedCache(new Map(this.store));
-    }
-    hasWalked(target, pattern) {
-        return this.store.get(target.fullpath())?.has(pattern.globString());
-    }
-    storeWalked(target, pattern) {
-        const fullpath = target.fullpath();
-        const cached = this.store.get(fullpath);
-        if (cached)
-            cached.add(pattern.globString());
-        else
-            this.store.set(fullpath, new Set([pattern.globString()]));
-    }
-}
-/**
- * A record of which paths have been matched in a given walk step,
- * and whether they only are considered a match if they are a directory,
- * and whether their absolute or relative path should be returned.
- */
-export class MatchRecord {
-    store = new Map();
-    add(target, absolute, ifDir) {
-        const n = (absolute ? 2 : 0) | (ifDir ? 1 : 0);
-        const current = this.store.get(target);
-        this.store.set(target, current === undefined ? n : n & current);
-    }
-    // match, absolute, ifdir
-    entries() {
-        return [...this.store.entries()].map(([path, n]) => [
-            path,
-            !!(n & 2),
-            !!(n & 1),
-        ]);
-    }
-}
-/**
- * A collection of patterns that must be processed in a subsequent step
- * for a given path.
- */
-export class SubWalks {
-    store = new Map();
-    add(target, pattern) {
-        if (!target.canReaddir()) {
-            return;
-        }
-        const subs = this.store.get(target);
-        if (subs) {
-            if (!subs.find(p => p.globString() === pattern.globString())) {
-                subs.push(pattern);
-            }
-        }
-        else
-            this.store.set(target, [pattern]);
-    }
-    get(target) {
-        const subs = this.store.get(target);
-        /* c8 ignore start */
-        if (!subs) {
-            throw new Error('attempting to walk unknown path');
-        }
-        /* c8 ignore stop */
-        return subs;
-    }
-    entries() {
-        return this.keys().map(k => [k, this.store.get(k)]);
-    }
-    keys() {
-        return [...this.store.keys()].filter(t => t.canReaddir());
-    }
-}
-/**
- * The class that processes patterns for a given path.
- *
- * Handles child entry filtering, and determining whether a path's
- * directory contents must be read.
- */
-export class Processor {
-    hasWalkedCache;
-    matches = new MatchRecord();
-    subwalks = new SubWalks();
-    patterns;
-    follow;
-    dot;
-    opts;
-    constructor(opts, hasWalkedCache) {
-        this.opts = opts;
-        this.follow = !!opts.follow;
-        this.dot = !!opts.dot;
-        this.hasWalkedCache =
-            hasWalkedCache ? hasWalkedCache.copy() : new HasWalkedCache();
-    }
-    processPatterns(target, patterns) {
-        this.patterns = patterns;
-        const processingSet = patterns.map(p => [target, p]);
-        // map of paths to the magic-starting subwalks they need to walk
-        // first item in patterns is the filter
-        for (let [t, pattern] of processingSet) {
-            this.hasWalkedCache.storeWalked(t, pattern);
-            const root = pattern.root();
-            const absolute = pattern.isAbsolute() && this.opts.absolute !== false;
-            // start absolute patterns at root
-            if (root) {
-                t = t.resolve(root === '/' && this.opts.root !== undefined ?
-                    this.opts.root
-                    : root);
-                const rest = pattern.rest();
-                if (!rest) {
-                    this.matches.add(t, true, false);
-                    continue;
-                }
-                else {
-                    pattern = rest;
-                }
-            }
-            if (t.isENOENT())
-                continue;
-            let p;
-            let rest;
-            let changed = false;
-            while (typeof (p = pattern.pattern()) === 'string' &&
-                (rest = pattern.rest())) {
-                const c = t.resolve(p);
-                t = c;
-                pattern = rest;
-                changed = true;
-            }
-            p = pattern.pattern();
-            rest = pattern.rest();
-            if (changed) {
-                if (this.hasWalkedCache.hasWalked(t, pattern))
-                    continue;
-                this.hasWalkedCache.storeWalked(t, pattern);
-            }
-            // now we have either a final string for a known entry,
-            // more strings for an unknown entry,
-            // or a pattern starting with magic, mounted on t.
-            if (typeof p === 'string') {
-                // must not be final entry, otherwise we would have
-                // concatenated it earlier.
-                const ifDir = p === '..' || p === '' || p === '.';
-                this.matches.add(t.resolve(p), absolute, ifDir);
-                continue;
-            }
-            else if (p === GLOBSTAR) {
-                // if no rest, match and subwalk pattern
-                // if rest, process rest and subwalk pattern
-                // if it's a symlink, but we didn't get here by way of a
-                // globstar match (meaning it's the first time THIS globstar
-                // has traversed a symlink), then we follow it. Otherwise, stop.
-                if (!t.isSymbolicLink() ||
-                    this.follow ||
-                    pattern.checkFollowGlobstar()) {
-                    this.subwalks.add(t, pattern);
-                }
-                const rp = rest?.pattern();
-                const rrest = rest?.rest();
-                if (!rest || ((rp === '' || rp === '.') && !rrest)) {
-                    // only HAS to be a dir if it ends in **/ or **/.
-                    // but ending in ** will match files as well.
-                    this.matches.add(t, absolute, rp === '' || rp === '.');
-                }
-                else {
-                    if (rp === '..') {
-                        // this would mean you're matching **/.. at the fs root,
-                        // and no thanks, I'm not gonna test that specific case.
-                        /* c8 ignore start */
-                        const tp = t.parent || t;
-                        /* c8 ignore stop */
-                        if (!rrest)
-                            this.matches.add(tp, absolute, true);
-                        else if (!this.hasWalkedCache.hasWalked(tp, rrest)) {
-                            this.subwalks.add(tp, rrest);
-                        }
-                    }
-                }
-            }
-            else if (p instanceof RegExp) {
-                this.subwalks.add(t, pattern);
-            }
-        }
-        return this;
-    }
-    subwalkTargets() {
-        return this.subwalks.keys();
-    }
-    child() {
-        return new Processor(this.opts, this.hasWalkedCache);
-    }
-    // return a new Processor containing the subwalks for each
-    // child entry, and a set of matches, and
-    // a hasWalkedCache that's a copy of this one
-    // then we're going to call
-    filterEntries(parent, entries) {
-        const patterns = this.subwalks.get(parent);
-        // put matches and entry walks into the results processor
-        const results = this.child();
-        for (const e of entries) {
-            for (const pattern of patterns) {
-                const absolute = pattern.isAbsolute();
-                const p = pattern.pattern();
-                const rest = pattern.rest();
-                if (p === GLOBSTAR) {
-                    results.testGlobstar(e, pattern, rest, absolute);
-                }
-                else if (p instanceof RegExp) {
-                    results.testRegExp(e, p, rest, absolute);
-                }
-                else {
-                    results.testString(e, p, rest, absolute);
-                }
-            }
-        }
-        return results;
-    }
-    testGlobstar(e, pattern, rest, absolute) {
-        if (this.dot || !e.name.startsWith('.')) {
-            if (!pattern.hasMore()) {
-                this.matches.add(e, absolute, false);
-            }
-            if (e.canReaddir()) {
-                // if we're in follow mode or it's not a symlink, just keep
-                // testing the same pattern. If there's more after the globstar,
-                // then this symlink consumes the globstar. If not, then we can
-                // follow at most ONE symlink along the way, so we mark it, which
-                // also checks to ensure that it wasn't already marked.
-                if (this.follow || !e.isSymbolicLink()) {
-                    this.subwalks.add(e, pattern);
-                }
-                else if (e.isSymbolicLink()) {
-                    if (rest && pattern.checkFollowGlobstar()) {
-                        this.subwalks.add(e, rest);
-                    }
-                    else if (pattern.markFollowGlobstar()) {
-                        this.subwalks.add(e, pattern);
-                    }
-                }
-            }
-        }
-        // if the NEXT thing matches this entry, then also add
-        // the rest.
-        if (rest) {
-            const rp = rest.pattern();
-            if (typeof rp === 'string' &&
-                // dots and empty were handled already
-                rp !== '..' &&
-                rp !== '' &&
-                rp !== '.') {
-                this.testString(e, rp, rest.rest(), absolute);
-            }
-            else if (rp === '..') {
-                /* c8 ignore start */
-                const ep = e.parent || e;
-                /* c8 ignore stop */
-                this.subwalks.add(ep, rest);
-            }
-            else if (rp instanceof RegExp) {
-                this.testRegExp(e, rp, rest.rest(), absolute);
-            }
-        }
-    }
-    testRegExp(e, p, rest, absolute) {
-        if (!p.test(e.name))
-            return;
-        if (!rest) {
-            this.matches.add(e, absolute, false);
-        }
-        else {
-            this.subwalks.add(e, rest);
-        }
-    }
-    testString(e, p, rest, absolute) {
-        // should never happen?
-        if (!e.isNamed(p))
-            return;
-        if (!rest) {
-            this.matches.add(e, absolute, false);
-        }
-        else {
-            this.subwalks.add(e, rest);
-        }
-    }
-}
-//# sourceMappingURL=processor.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/walker.js b/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/walker.js
deleted file mode 100644
index 3d68196c4f175..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/walker.js
+++ /dev/null
@@ -1,381 +0,0 @@
-/**
- * Single-use utility classes to provide functionality to the {@link Glob}
- * methods.
- *
- * @module
- */
-import { Minipass } from 'minipass';
-import { Ignore } from './ignore.js';
-import { Processor } from './processor.js';
-const makeIgnore = (ignore, opts) => typeof ignore === 'string' ? new Ignore([ignore], opts)
-    : Array.isArray(ignore) ? new Ignore(ignore, opts)
-        : ignore;
-/**
- * basic walking utilities that all the glob walker types use
- */
-export class GlobUtil {
-    path;
-    patterns;
-    opts;
-    seen = new Set();
-    paused = false;
-    aborted = false;
-    #onResume = [];
-    #ignore;
-    #sep;
-    signal;
-    maxDepth;
-    includeChildMatches;
-    constructor(patterns, path, opts) {
-        this.patterns = patterns;
-        this.path = path;
-        this.opts = opts;
-        this.#sep = !opts.posix && opts.platform === 'win32' ? '\\' : '/';
-        this.includeChildMatches = opts.includeChildMatches !== false;
-        if (opts.ignore || !this.includeChildMatches) {
-            this.#ignore = makeIgnore(opts.ignore ?? [], opts);
-            if (!this.includeChildMatches &&
-                typeof this.#ignore.add !== 'function') {
-                const m = 'cannot ignore child matches, ignore lacks add() method.';
-                throw new Error(m);
-            }
-        }
-        // ignore, always set with maxDepth, but it's optional on the
-        // GlobOptions type
-        /* c8 ignore start */
-        this.maxDepth = opts.maxDepth || Infinity;
-        /* c8 ignore stop */
-        if (opts.signal) {
-            this.signal = opts.signal;
-            this.signal.addEventListener('abort', () => {
-                this.#onResume.length = 0;
-            });
-        }
-    }
-    #ignored(path) {
-        return this.seen.has(path) || !!this.#ignore?.ignored?.(path);
-    }
-    #childrenIgnored(path) {
-        return !!this.#ignore?.childrenIgnored?.(path);
-    }
-    // backpressure mechanism
-    pause() {
-        this.paused = true;
-    }
-    resume() {
-        /* c8 ignore start */
-        if (this.signal?.aborted)
-            return;
-        /* c8 ignore stop */
-        this.paused = false;
-        let fn = undefined;
-        while (!this.paused && (fn = this.#onResume.shift())) {
-            fn();
-        }
-    }
-    onResume(fn) {
-        if (this.signal?.aborted)
-            return;
-        /* c8 ignore start */
-        if (!this.paused) {
-            fn();
-        }
-        else {
-            /* c8 ignore stop */
-            this.#onResume.push(fn);
-        }
-    }
-    // do the requisite realpath/stat checking, and return the path
-    // to add or undefined to filter it out.
-    async matchCheck(e, ifDir) {
-        if (ifDir && this.opts.nodir)
-            return undefined;
-        let rpc;
-        if (this.opts.realpath) {
-            rpc = e.realpathCached() || (await e.realpath());
-            if (!rpc)
-                return undefined;
-            e = rpc;
-        }
-        const needStat = e.isUnknown() || this.opts.stat;
-        const s = needStat ? await e.lstat() : e;
-        if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) {
-            const target = await s.realpath();
-            /* c8 ignore start */
-            if (target && (target.isUnknown() || this.opts.stat)) {
-                await target.lstat();
-            }
-            /* c8 ignore stop */
-        }
-        return this.matchCheckTest(s, ifDir);
-    }
-    matchCheckTest(e, ifDir) {
-        return (e &&
-            (this.maxDepth === Infinity || e.depth() <= this.maxDepth) &&
-            (!ifDir || e.canReaddir()) &&
-            (!this.opts.nodir || !e.isDirectory()) &&
-            (!this.opts.nodir ||
-                !this.opts.follow ||
-                !e.isSymbolicLink() ||
-                !e.realpathCached()?.isDirectory()) &&
-            !this.#ignored(e)) ?
-            e
-            : undefined;
-    }
-    matchCheckSync(e, ifDir) {
-        if (ifDir && this.opts.nodir)
-            return undefined;
-        let rpc;
-        if (this.opts.realpath) {
-            rpc = e.realpathCached() || e.realpathSync();
-            if (!rpc)
-                return undefined;
-            e = rpc;
-        }
-        const needStat = e.isUnknown() || this.opts.stat;
-        const s = needStat ? e.lstatSync() : e;
-        if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) {
-            const target = s.realpathSync();
-            if (target && (target?.isUnknown() || this.opts.stat)) {
-                target.lstatSync();
-            }
-        }
-        return this.matchCheckTest(s, ifDir);
-    }
-    matchFinish(e, absolute) {
-        if (this.#ignored(e))
-            return;
-        // we know we have an ignore if this is false, but TS doesn't
-        if (!this.includeChildMatches && this.#ignore?.add) {
-            const ign = `${e.relativePosix()}/**`;
-            this.#ignore.add(ign);
-        }
-        const abs = this.opts.absolute === undefined ? absolute : this.opts.absolute;
-        this.seen.add(e);
-        const mark = this.opts.mark && e.isDirectory() ? this.#sep : '';
-        // ok, we have what we need!
-        if (this.opts.withFileTypes) {
-            this.matchEmit(e);
-        }
-        else if (abs) {
-            const abs = this.opts.posix ? e.fullpathPosix() : e.fullpath();
-            this.matchEmit(abs + mark);
-        }
-        else {
-            const rel = this.opts.posix ? e.relativePosix() : e.relative();
-            const pre = this.opts.dotRelative && !rel.startsWith('..' + this.#sep) ?
-                '.' + this.#sep
-                : '';
-            this.matchEmit(!rel ? '.' + mark : pre + rel + mark);
-        }
-    }
-    async match(e, absolute, ifDir) {
-        const p = await this.matchCheck(e, ifDir);
-        if (p)
-            this.matchFinish(p, absolute);
-    }
-    matchSync(e, absolute, ifDir) {
-        const p = this.matchCheckSync(e, ifDir);
-        if (p)
-            this.matchFinish(p, absolute);
-    }
-    walkCB(target, patterns, cb) {
-        /* c8 ignore start */
-        if (this.signal?.aborted)
-            cb();
-        /* c8 ignore stop */
-        this.walkCB2(target, patterns, new Processor(this.opts), cb);
-    }
-    walkCB2(target, patterns, processor, cb) {
-        if (this.#childrenIgnored(target))
-            return cb();
-        if (this.signal?.aborted)
-            cb();
-        if (this.paused) {
-            this.onResume(() => this.walkCB2(target, patterns, processor, cb));
-            return;
-        }
-        processor.processPatterns(target, patterns);
-        // done processing.  all of the above is sync, can be abstracted out.
-        // subwalks is a map of paths to the entry filters they need
-        // matches is a map of paths to [absolute, ifDir] tuples.
-        let tasks = 1;
-        const next = () => {
-            if (--tasks === 0)
-                cb();
-        };
-        for (const [m, absolute, ifDir] of processor.matches.entries()) {
-            if (this.#ignored(m))
-                continue;
-            tasks++;
-            this.match(m, absolute, ifDir).then(() => next());
-        }
-        for (const t of processor.subwalkTargets()) {
-            if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) {
-                continue;
-            }
-            tasks++;
-            const childrenCached = t.readdirCached();
-            if (t.calledReaddir())
-                this.walkCB3(t, childrenCached, processor, next);
-            else {
-                t.readdirCB((_, entries) => this.walkCB3(t, entries, processor, next), true);
-            }
-        }
-        next();
-    }
-    walkCB3(target, entries, processor, cb) {
-        processor = processor.filterEntries(target, entries);
-        let tasks = 1;
-        const next = () => {
-            if (--tasks === 0)
-                cb();
-        };
-        for (const [m, absolute, ifDir] of processor.matches.entries()) {
-            if (this.#ignored(m))
-                continue;
-            tasks++;
-            this.match(m, absolute, ifDir).then(() => next());
-        }
-        for (const [target, patterns] of processor.subwalks.entries()) {
-            tasks++;
-            this.walkCB2(target, patterns, processor.child(), next);
-        }
-        next();
-    }
-    walkCBSync(target, patterns, cb) {
-        /* c8 ignore start */
-        if (this.signal?.aborted)
-            cb();
-        /* c8 ignore stop */
-        this.walkCB2Sync(target, patterns, new Processor(this.opts), cb);
-    }
-    walkCB2Sync(target, patterns, processor, cb) {
-        if (this.#childrenIgnored(target))
-            return cb();
-        if (this.signal?.aborted)
-            cb();
-        if (this.paused) {
-            this.onResume(() => this.walkCB2Sync(target, patterns, processor, cb));
-            return;
-        }
-        processor.processPatterns(target, patterns);
-        // done processing.  all of the above is sync, can be abstracted out.
-        // subwalks is a map of paths to the entry filters they need
-        // matches is a map of paths to [absolute, ifDir] tuples.
-        let tasks = 1;
-        const next = () => {
-            if (--tasks === 0)
-                cb();
-        };
-        for (const [m, absolute, ifDir] of processor.matches.entries()) {
-            if (this.#ignored(m))
-                continue;
-            this.matchSync(m, absolute, ifDir);
-        }
-        for (const t of processor.subwalkTargets()) {
-            if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) {
-                continue;
-            }
-            tasks++;
-            const children = t.readdirSync();
-            this.walkCB3Sync(t, children, processor, next);
-        }
-        next();
-    }
-    walkCB3Sync(target, entries, processor, cb) {
-        processor = processor.filterEntries(target, entries);
-        let tasks = 1;
-        const next = () => {
-            if (--tasks === 0)
-                cb();
-        };
-        for (const [m, absolute, ifDir] of processor.matches.entries()) {
-            if (this.#ignored(m))
-                continue;
-            this.matchSync(m, absolute, ifDir);
-        }
-        for (const [target, patterns] of processor.subwalks.entries()) {
-            tasks++;
-            this.walkCB2Sync(target, patterns, processor.child(), next);
-        }
-        next();
-    }
-}
-export class GlobWalker extends GlobUtil {
-    matches = new Set();
-    constructor(patterns, path, opts) {
-        super(patterns, path, opts);
-    }
-    matchEmit(e) {
-        this.matches.add(e);
-    }
-    async walk() {
-        if (this.signal?.aborted)
-            throw this.signal.reason;
-        if (this.path.isUnknown()) {
-            await this.path.lstat();
-        }
-        await new Promise((res, rej) => {
-            this.walkCB(this.path, this.patterns, () => {
-                if (this.signal?.aborted) {
-                    rej(this.signal.reason);
-                }
-                else {
-                    res(this.matches);
-                }
-            });
-        });
-        return this.matches;
-    }
-    walkSync() {
-        if (this.signal?.aborted)
-            throw this.signal.reason;
-        if (this.path.isUnknown()) {
-            this.path.lstatSync();
-        }
-        // nothing for the callback to do, because this never pauses
-        this.walkCBSync(this.path, this.patterns, () => {
-            if (this.signal?.aborted)
-                throw this.signal.reason;
-        });
-        return this.matches;
-    }
-}
-export class GlobStream extends GlobUtil {
-    results;
-    constructor(patterns, path, opts) {
-        super(patterns, path, opts);
-        this.results = new Minipass({
-            signal: this.signal,
-            objectMode: true,
-        });
-        this.results.on('drain', () => this.resume());
-        this.results.on('resume', () => this.resume());
-    }
-    matchEmit(e) {
-        this.results.write(e);
-        if (!this.results.flowing)
-            this.pause();
-    }
-    stream() {
-        const target = this.path;
-        if (target.isUnknown()) {
-            target.lstat().then(() => {
-                this.walkCB(target, this.patterns, () => this.results.end());
-            });
-        }
-        else {
-            this.walkCB(target, this.patterns, () => this.results.end());
-        }
-        return this.results;
-    }
-    streamSync() {
-        if (this.path.isUnknown()) {
-            this.path.lstatSync();
-        }
-        this.walkCBSync(this.path, this.patterns, () => this.results.end());
-        return this.results;
-    }
-}
-//# sourceMappingURL=walker.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/package-json/node_modules/glob/package.json b/node_modules/@npmcli/package-json/node_modules/glob/package.json
deleted file mode 100644
index 7be2c53bd5c9f..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/glob/package.json
+++ /dev/null
@@ -1,97 +0,0 @@
-{
-  "author": "Isaac Z. Schlueter  (https://blog.izs.me/)",
-  "name": "glob",
-  "description": "the most correct and second fastest glob implementation in JavaScript",
-  "version": "11.0.3",
-  "type": "module",
-  "tshy": {
-    "main": true,
-    "exports": {
-      "./package.json": "./package.json",
-      ".": "./src/index.ts"
-    }
-  },
-  "bin": "./dist/esm/bin.mjs",
-  "main": "./dist/commonjs/index.js",
-  "types": "./dist/commonjs/index.d.ts",
-  "exports": {
-    "./package.json": "./package.json",
-    ".": {
-      "import": {
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.js"
-      },
-      "require": {
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.js"
-      }
-    }
-  },
-  "repository": {
-    "type": "git",
-    "url": "git://github.com/isaacs/node-glob.git"
-  },
-  "files": [
-    "dist"
-  ],
-  "scripts": {
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "npm run benchclean; git push origin --follow-tags",
-    "prepare": "tshy",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "test": "tap",
-    "snap": "tap",
-    "format": "prettier --write . --log-level warn",
-    "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts",
-    "profclean": "rm -f v8.log profile.txt",
-    "test-regen": "npm run profclean && TEST_REGEN=1 node --no-warnings --loader ts-node/esm test/00-setup.ts",
-    "prebench": "npm run prepare",
-    "bench": "bash benchmark.sh",
-    "preprof": "npm run prepare",
-    "prof": "bash prof.sh",
-    "benchclean": "node benchclean.cjs"
-  },
-  "prettier": {
-    "experimentalTernaries": true,
-    "semi": false,
-    "printWidth": 75,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "dependencies": {
-    "foreground-child": "^3.3.1",
-    "jackspeak": "^4.1.1",
-    "minimatch": "^10.0.3",
-    "minipass": "^7.1.2",
-    "package-json-from-dist": "^1.0.0",
-    "path-scurry": "^2.0.0"
-  },
-  "devDependencies": {
-    "@types/node": "^24.0.1",
-    "memfs": "^4.17.2",
-    "mkdirp": "^3.0.1",
-    "prettier": "^3.5.3",
-    "rimraf": "^6.0.1",
-    "tap": "^21.1.0",
-    "tshy": "^3.0.2",
-    "typedoc": "^0.28.5"
-  },
-  "tap": {
-    "before": "test/00-setup.ts"
-  },
-  "license": "ISC",
-  "funding": {
-    "url": "https://github.com/sponsors/isaacs"
-  },
-  "engines": {
-    "node": "20 || >=22"
-  },
-  "module": "./dist/esm/index.js"
-}
diff --git a/node_modules/@npmcli/package-json/node_modules/jackspeak/LICENSE.md b/node_modules/@npmcli/package-json/node_modules/jackspeak/LICENSE.md
deleted file mode 100644
index 8cb5cc6e616c0..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/jackspeak/LICENSE.md
+++ /dev/null
@@ -1,55 +0,0 @@
-# Blue Oak Model License
-
-Version 1.0.0
-
-## Purpose
-
-This license gives everyone as much permission to work with
-this software as possible, while protecting contributors
-from liability.
-
-## Acceptance
-
-In order to receive this license, you must agree to its
-rules. The rules of this license are both obligations
-under that agreement and conditions to your license.
-You must not do anything with this software that triggers
-a rule that you cannot or will not follow.
-
-## Copyright
-
-Each contributor licenses you to do everything with this
-software that would otherwise infringe that contributor's
-copyright in it.
-
-## Notices
-
-You must ensure that everyone who gets a copy of
-any part of this software from you, with or without
-changes, also gets the text of this license or a link to
-.
-
-## Excuse
-
-If anyone notifies you in writing that you have not
-complied with [Notices](#notices), you can keep your
-license by taking all practical steps to comply within 30
-days after the notice. If you do not do so, your license
-ends immediately.
-
-## Patent
-
-Each contributor licenses you to do everything with this
-software that would otherwise infringe any patent claims
-they can license or become able to license.
-
-## Reliability
-
-No contributor can revoke this license.
-
-## No Liability
-
-**_As far as the law allows, this software comes as is,
-without any warranty or condition, and no contributor
-will be liable to anyone for any damages related to this
-software or this license, under any kind of legal claim._**
diff --git a/node_modules/@npmcli/package-json/node_modules/jackspeak/dist/commonjs/index.js b/node_modules/@npmcli/package-json/node_modules/jackspeak/dist/commonjs/index.js
deleted file mode 100644
index 543412746cc8f..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/jackspeak/dist/commonjs/index.js
+++ /dev/null
@@ -1,947 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.jack = exports.Jack = exports.isConfigOption = exports.isConfigOptionOfType = exports.isConfigType = void 0;
-const node_util_1 = require("node:util");
-// it's a tiny API, just cast it inline, it's fine
-//@ts-ignore
-const cliui_1 = __importDefault(require("@isaacs/cliui"));
-const node_path_1 = require("node:path");
-const isConfigType = (t) => typeof t === 'string' &&
-    (t === 'string' || t === 'number' || t === 'boolean');
-exports.isConfigType = isConfigType;
-const isValidValue = (v, type, multi) => {
-    if (multi) {
-        if (!Array.isArray(v))
-            return false;
-        return !v.some((v) => !isValidValue(v, type, false));
-    }
-    if (Array.isArray(v))
-        return false;
-    return typeof v === type;
-};
-const isValidOption = (v, vo) => !!vo &&
-    (Array.isArray(v) ? v.every(x => isValidOption(x, vo)) : vo.includes(v));
-/**
- * Determine whether an unknown object is a {@link ConfigOption} based only
- * on its `type` and `multiple` property
- */
-const isConfigOptionOfType = (o, type, multi) => !!o &&
-    typeof o === 'object' &&
-    (0, exports.isConfigType)(o.type) &&
-    o.type === type &&
-    !!o.multiple === multi;
-exports.isConfigOptionOfType = isConfigOptionOfType;
-/**
- * Determine whether an unknown object is a {@link ConfigOption} based on
- * it having all valid properties
- */
-const isConfigOption = (o, type, multi) => (0, exports.isConfigOptionOfType)(o, type, multi) &&
-    undefOrType(o.short, 'string') &&
-    undefOrType(o.description, 'string') &&
-    undefOrType(o.hint, 'string') &&
-    undefOrType(o.validate, 'function') &&
-    (o.type === 'boolean' ?
-        o.validOptions === undefined
-        : undefOrTypeArray(o.validOptions, o.type)) &&
-    (o.default === undefined || isValidValue(o.default, type, multi));
-exports.isConfigOption = isConfigOption;
-const isHeading = (r) => r.type === 'heading';
-const isDescription = (r) => r.type === 'description';
-const width = Math.min(process?.stdout?.columns ?? 80, 80);
-// indentation spaces from heading level
-const indent = (n) => (n - 1) * 2;
-const toEnvKey = (pref, key) => [pref, key.replace(/[^a-zA-Z0-9]+/g, ' ')]
-    .join(' ')
-    .trim()
-    .toUpperCase()
-    .replace(/ /g, '_');
-const toEnvVal = (value, delim = '\n') => {
-    const str = typeof value === 'string' ? value
-        : typeof value === 'boolean' ?
-            value ? '1'
-                : '0'
-            : typeof value === 'number' ? String(value)
-                : Array.isArray(value) ?
-                    value.map((v) => toEnvVal(v)).join(delim)
-                    : /* c8 ignore start */ undefined;
-    if (typeof str !== 'string') {
-        throw new Error(`could not serialize value to environment: ${JSON.stringify(value)}`, { cause: { code: 'JACKSPEAK' } });
-    }
-    /* c8 ignore stop */
-    return str;
-};
-const fromEnvVal = (env, type, multiple, delim = '\n') => (multiple ?
-    env ? env.split(delim).map(v => fromEnvVal(v, type, false))
-        : []
-    : type === 'string' ? env
-        : type === 'boolean' ? env === '1'
-            : +env.trim());
-const undefOrType = (v, t) => v === undefined || typeof v === t;
-const undefOrTypeArray = (v, t) => v === undefined || (Array.isArray(v) && v.every(x => typeof x === t));
-// print the value type, for error message reporting
-const valueType = (v) => typeof v === 'string' ? 'string'
-    : typeof v === 'boolean' ? 'boolean'
-        : typeof v === 'number' ? 'number'
-            : Array.isArray(v) ?
-                `${joinTypes([...new Set(v.map(v => valueType(v)))])}[]`
-                : `${v.type}${v.multiple ? '[]' : ''}`;
-const joinTypes = (types) => types.length === 1 && typeof types[0] === 'string' ?
-    types[0]
-    : `(${types.join('|')})`;
-const validateFieldMeta = (field, fieldMeta) => {
-    if (fieldMeta) {
-        if (field.type !== undefined && field.type !== fieldMeta.type) {
-            throw new TypeError(`invalid type`, {
-                cause: {
-                    found: field.type,
-                    wanted: [fieldMeta.type, undefined],
-                },
-            });
-        }
-        if (field.multiple !== undefined &&
-            !!field.multiple !== fieldMeta.multiple) {
-            throw new TypeError(`invalid multiple`, {
-                cause: {
-                    found: field.multiple,
-                    wanted: [fieldMeta.multiple, undefined],
-                },
-            });
-        }
-        return fieldMeta;
-    }
-    if (!(0, exports.isConfigType)(field.type)) {
-        throw new TypeError(`invalid type`, {
-            cause: {
-                found: field.type,
-                wanted: ['string', 'number', 'boolean'],
-            },
-        });
-    }
-    return {
-        type: field.type,
-        multiple: !!field.multiple,
-    };
-};
-const validateField = (o, type, multiple) => {
-    const validateValidOptions = (def, validOptions) => {
-        if (!undefOrTypeArray(validOptions, type)) {
-            throw new TypeError('invalid validOptions', {
-                cause: {
-                    found: validOptions,
-                    wanted: valueType({ type, multiple: true }),
-                },
-            });
-        }
-        if (def !== undefined && validOptions !== undefined) {
-            const valid = Array.isArray(def) ?
-                def.every(v => validOptions.includes(v))
-                : validOptions.includes(def);
-            if (!valid) {
-                throw new TypeError('invalid default value not in validOptions', {
-                    cause: {
-                        found: def,
-                        wanted: validOptions,
-                    },
-                });
-            }
-        }
-    };
-    if (o.default !== undefined &&
-        !isValidValue(o.default, type, multiple)) {
-        throw new TypeError('invalid default value', {
-            cause: {
-                found: o.default,
-                wanted: valueType({ type, multiple }),
-            },
-        });
-    }
-    if ((0, exports.isConfigOptionOfType)(o, 'number', false) ||
-        (0, exports.isConfigOptionOfType)(o, 'number', true)) {
-        validateValidOptions(o.default, o.validOptions);
-    }
-    else if ((0, exports.isConfigOptionOfType)(o, 'string', false) ||
-        (0, exports.isConfigOptionOfType)(o, 'string', true)) {
-        validateValidOptions(o.default, o.validOptions);
-    }
-    else if ((0, exports.isConfigOptionOfType)(o, 'boolean', false) ||
-        (0, exports.isConfigOptionOfType)(o, 'boolean', true)) {
-        if (o.hint !== undefined) {
-            throw new TypeError('cannot provide hint for flag');
-        }
-        if (o.validOptions !== undefined) {
-            throw new TypeError('cannot provide validOptions for flag');
-        }
-    }
-    return o;
-};
-const toParseArgsOptionsConfig = (options) => {
-    return Object.entries(options).reduce((acc, [longOption, o]) => {
-        const p = {
-            type: 'string',
-            multiple: !!o.multiple,
-            ...(typeof o.short === 'string' ? { short: o.short } : undefined),
-        };
-        const setNoBool = () => {
-            if (!longOption.startsWith('no-') && !options[`no-${longOption}`]) {
-                acc[`no-${longOption}`] = {
-                    type: 'boolean',
-                    multiple: !!o.multiple,
-                };
-            }
-        };
-        const setDefault = (def, fn) => {
-            if (def !== undefined) {
-                p.default = fn(def);
-            }
-        };
-        if ((0, exports.isConfigOption)(o, 'number', false)) {
-            setDefault(o.default, String);
-        }
-        else if ((0, exports.isConfigOption)(o, 'number', true)) {
-            setDefault(o.default, d => d.map(v => String(v)));
-        }
-        else if ((0, exports.isConfigOption)(o, 'string', false) ||
-            (0, exports.isConfigOption)(o, 'string', true)) {
-            setDefault(o.default, v => v);
-        }
-        else if ((0, exports.isConfigOption)(o, 'boolean', false) ||
-            (0, exports.isConfigOption)(o, 'boolean', true)) {
-            p.type = 'boolean';
-            setDefault(o.default, v => v);
-            setNoBool();
-        }
-        acc[longOption] = p;
-        return acc;
-    }, {});
-};
-/**
- * Class returned by the {@link jack} function and all configuration
- * definition methods.  This is what gets chained together.
- */
-class Jack {
-    #configSet;
-    #shorts;
-    #options;
-    #fields = [];
-    #env;
-    #envPrefix;
-    #allowPositionals;
-    #usage;
-    #usageMarkdown;
-    constructor(options = {}) {
-        this.#options = options;
-        this.#allowPositionals = options.allowPositionals !== false;
-        this.#env =
-            this.#options.env === undefined ? process.env : this.#options.env;
-        this.#envPrefix = options.envPrefix;
-        // We need to fib a little, because it's always the same object, but it
-        // starts out as having an empty config set.  Then each method that adds
-        // fields returns `this as Jack`
-        this.#configSet = Object.create(null);
-        this.#shorts = Object.create(null);
-    }
-    /**
-     * Resulting definitions, suitable to be passed to Node's `util.parseArgs`,
-     * but also including `description` and `short` fields, if set.
-     */
-    get definitions() {
-        return this.#configSet;
-    }
-    /** map of `{ :  }` strings for each short name defined */
-    get shorts() {
-        return this.#shorts;
-    }
-    /**
-     * options passed to the {@link Jack} constructor
-     */
-    get jackOptions() {
-        return this.#options;
-    }
-    /**
-     * the data used to generate {@link Jack#usage} and
-     * {@link Jack#usageMarkdown} content.
-     */
-    get usageFields() {
-        return this.#fields;
-    }
-    /**
-     * Set the default value (which will still be overridden by env or cli)
-     * as if from a parsed config file. The optional `source` param, if
-     * provided, will be included in error messages if a value is invalid or
-     * unknown.
-     */
-    setConfigValues(values, source = '') {
-        try {
-            this.validate(values);
-        }
-        catch (er) {
-            if (source && er instanceof Error) {
-                /* c8 ignore next */
-                const cause = typeof er.cause === 'object' ? er.cause : {};
-                er.cause = { ...cause, path: source };
-                Error.captureStackTrace(er, this.setConfigValues);
-            }
-            throw er;
-        }
-        for (const [field, value] of Object.entries(values)) {
-            const my = this.#configSet[field];
-            // already validated, just for TS's benefit
-            /* c8 ignore start */
-            if (!my) {
-                throw new Error('unexpected field in config set: ' + field, {
-                    cause: {
-                        code: 'JACKSPEAK',
-                        found: field,
-                    },
-                });
-            }
-            /* c8 ignore stop */
-            my.default = value;
-        }
-        return this;
-    }
-    /**
-     * Parse a string of arguments, and return the resulting
-     * `{ values, positionals }` object.
-     *
-     * If an {@link JackOptions#envPrefix} is set, then it will read default
-     * values from the environment, and write the resulting values back
-     * to the environment as well.
-     *
-     * Environment values always take precedence over any other value, except
-     * an explicit CLI setting.
-     */
-    parse(args = process.argv) {
-        this.loadEnvDefaults();
-        const p = this.parseRaw(args);
-        this.applyDefaults(p);
-        this.writeEnv(p);
-        return p;
-    }
-    loadEnvDefaults() {
-        if (this.#envPrefix) {
-            for (const [field, my] of Object.entries(this.#configSet)) {
-                const ek = toEnvKey(this.#envPrefix, field);
-                const env = this.#env[ek];
-                if (env !== undefined) {
-                    my.default = fromEnvVal(env, my.type, !!my.multiple, my.delim);
-                }
-            }
-        }
-    }
-    applyDefaults(p) {
-        for (const [field, c] of Object.entries(this.#configSet)) {
-            if (c.default !== undefined && !(field in p.values)) {
-                //@ts-ignore
-                p.values[field] = c.default;
-            }
-        }
-    }
-    /**
-     * Only parse the command line arguments passed in.
-     * Does not strip off the `node script.js` bits, so it must be just the
-     * arguments you wish to have parsed.
-     * Does not read from or write to the environment, or set defaults.
-     */
-    parseRaw(args) {
-        if (args === process.argv) {
-            args = args.slice(process._eval !== undefined ? 1 : 2);
-        }
-        const result = (0, node_util_1.parseArgs)({
-            args,
-            options: toParseArgsOptionsConfig(this.#configSet),
-            // always strict, but using our own logic
-            strict: false,
-            allowPositionals: this.#allowPositionals,
-            tokens: true,
-        });
-        const p = {
-            values: {},
-            positionals: [],
-        };
-        for (const token of result.tokens) {
-            if (token.kind === 'positional') {
-                p.positionals.push(token.value);
-                if (this.#options.stopAtPositional ||
-                    this.#options.stopAtPositionalTest?.(token.value)) {
-                    p.positionals.push(...args.slice(token.index + 1));
-                    break;
-                }
-            }
-            else if (token.kind === 'option') {
-                let value = undefined;
-                if (token.name.startsWith('no-')) {
-                    const my = this.#configSet[token.name];
-                    const pname = token.name.substring('no-'.length);
-                    const pos = this.#configSet[pname];
-                    if (pos &&
-                        pos.type === 'boolean' &&
-                        (!my ||
-                            (my.type === 'boolean' && !!my.multiple === !!pos.multiple))) {
-                        value = false;
-                        token.name = pname;
-                    }
-                }
-                const my = this.#configSet[token.name];
-                if (!my) {
-                    throw new Error(`Unknown option '${token.rawName}'. ` +
-                        `To specify a positional argument starting with a '-', ` +
-                        `place it at the end of the command after '--', as in ` +
-                        `'-- ${token.rawName}'`, {
-                        cause: {
-                            code: 'JACKSPEAK',
-                            found: token.rawName + (token.value ? `=${token.value}` : ''),
-                        },
-                    });
-                }
-                if (value === undefined) {
-                    if (token.value === undefined) {
-                        if (my.type !== 'boolean') {
-                            throw new Error(`No value provided for ${token.rawName}, expected ${my.type}`, {
-                                cause: {
-                                    code: 'JACKSPEAK',
-                                    name: token.rawName,
-                                    wanted: valueType(my),
-                                },
-                            });
-                        }
-                        value = true;
-                    }
-                    else {
-                        if (my.type === 'boolean') {
-                            throw new Error(`Flag ${token.rawName} does not take a value, received '${token.value}'`, { cause: { code: 'JACKSPEAK', found: token } });
-                        }
-                        if (my.type === 'string') {
-                            value = token.value;
-                        }
-                        else {
-                            value = +token.value;
-                            if (value !== value) {
-                                throw new Error(`Invalid value '${token.value}' provided for ` +
-                                    `'${token.rawName}' option, expected number`, {
-                                    cause: {
-                                        code: 'JACKSPEAK',
-                                        name: token.rawName,
-                                        found: token.value,
-                                        wanted: 'number',
-                                    },
-                                });
-                            }
-                        }
-                    }
-                }
-                if (my.multiple) {
-                    const pv = p.values;
-                    const tn = pv[token.name] ?? [];
-                    pv[token.name] = tn;
-                    tn.push(value);
-                }
-                else {
-                    const pv = p.values;
-                    pv[token.name] = value;
-                }
-            }
-        }
-        for (const [field, value] of Object.entries(p.values)) {
-            const valid = this.#configSet[field]?.validate;
-            const validOptions = this.#configSet[field]?.validOptions;
-            const cause = validOptions && !isValidOption(value, validOptions) ?
-                { name: field, found: value, validOptions }
-                : valid && !valid(value) ? { name: field, found: value }
-                    : undefined;
-            if (cause) {
-                throw new Error(`Invalid value provided for --${field}: ${JSON.stringify(value)}`, { cause: { ...cause, code: 'JACKSPEAK' } });
-            }
-        }
-        return p;
-    }
-    /**
-     * do not set fields as 'no-foo' if 'foo' exists and both are bools
-     * just set foo.
-     */
-    #noNoFields(f, val, s = f) {
-        if (!f.startsWith('no-') || typeof val !== 'boolean')
-            return;
-        const yes = f.substring('no-'.length);
-        // recurse so we get the core config key we care about.
-        this.#noNoFields(yes, val, s);
-        if (this.#configSet[yes]?.type === 'boolean') {
-            throw new Error(`do not set '${s}', instead set '${yes}' as desired.`, { cause: { code: 'JACKSPEAK', found: s, wanted: yes } });
-        }
-    }
-    /**
-     * Validate that any arbitrary object is a valid configuration `values`
-     * object.  Useful when loading config files or other sources.
-     */
-    validate(o) {
-        if (!o || typeof o !== 'object') {
-            throw new Error('Invalid config: not an object', {
-                cause: { code: 'JACKSPEAK', found: o },
-            });
-        }
-        const opts = o;
-        for (const field in o) {
-            const value = opts[field];
-            /* c8 ignore next - for TS */
-            if (value === undefined)
-                continue;
-            this.#noNoFields(field, value);
-            const config = this.#configSet[field];
-            if (!config) {
-                throw new Error(`Unknown config option: ${field}`, {
-                    cause: { code: 'JACKSPEAK', found: field },
-                });
-            }
-            if (!isValidValue(value, config.type, !!config.multiple)) {
-                throw new Error(`Invalid value ${valueType(value)} for ${field}, expected ${valueType(config)}`, {
-                    cause: {
-                        code: 'JACKSPEAK',
-                        name: field,
-                        found: value,
-                        wanted: valueType(config),
-                    },
-                });
-            }
-            const cause = config.validOptions && !isValidOption(value, config.validOptions) ?
-                { name: field, found: value, validOptions: config.validOptions }
-                : config.validate && !config.validate(value) ?
-                    { name: field, found: value }
-                    : undefined;
-            if (cause) {
-                throw new Error(`Invalid config value for ${field}: ${value}`, {
-                    cause: { ...cause, code: 'JACKSPEAK' },
-                });
-            }
-        }
-    }
-    writeEnv(p) {
-        if (!this.#env || !this.#envPrefix)
-            return;
-        for (const [field, value] of Object.entries(p.values)) {
-            const my = this.#configSet[field];
-            this.#env[toEnvKey(this.#envPrefix, field)] = toEnvVal(value, my?.delim);
-        }
-    }
-    /**
-     * Add a heading to the usage output banner
-     */
-    heading(text, level, { pre = false } = {}) {
-        if (level === undefined) {
-            level = this.#fields.some(r => isHeading(r)) ? 2 : 1;
-        }
-        this.#fields.push({ type: 'heading', text, level, pre });
-        return this;
-    }
-    /**
-     * Add a long-form description to the usage output at this position.
-     */
-    description(text, { pre } = {}) {
-        this.#fields.push({ type: 'description', text, pre });
-        return this;
-    }
-    /**
-     * Add one or more number fields.
-     */
-    num(fields) {
-        return this.#addFieldsWith(fields, 'number', false);
-    }
-    /**
-     * Add one or more multiple number fields.
-     */
-    numList(fields) {
-        return this.#addFieldsWith(fields, 'number', true);
-    }
-    /**
-     * Add one or more string option fields.
-     */
-    opt(fields) {
-        return this.#addFieldsWith(fields, 'string', false);
-    }
-    /**
-     * Add one or more multiple string option fields.
-     */
-    optList(fields) {
-        return this.#addFieldsWith(fields, 'string', true);
-    }
-    /**
-     * Add one or more flag fields.
-     */
-    flag(fields) {
-        return this.#addFieldsWith(fields, 'boolean', false);
-    }
-    /**
-     * Add one or more multiple flag fields.
-     */
-    flagList(fields) {
-        return this.#addFieldsWith(fields, 'boolean', true);
-    }
-    /**
-     * Generic field definition method. Similar to flag/flagList/number/etc,
-     * but you must specify the `type` (and optionally `multiple` and `delim`)
-     * fields on each one, or Jack won't know how to define them.
-     */
-    addFields(fields) {
-        return this.#addFields(this, fields);
-    }
-    #addFieldsWith(fields, type, multiple) {
-        return this.#addFields(this, fields, {
-            type,
-            multiple,
-        });
-    }
-    #addFields(next, fields, opt) {
-        Object.assign(next.#configSet, Object.fromEntries(Object.entries(fields).map(([name, field]) => {
-            this.#validateName(name, field);
-            const { type, multiple } = validateFieldMeta(field, opt);
-            const value = { ...field, type, multiple };
-            validateField(value, type, multiple);
-            next.#fields.push({ type: 'config', name, value });
-            return [name, value];
-        })));
-        return next;
-    }
-    #validateName(name, field) {
-        if (!/^[a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?$/.test(name)) {
-            throw new TypeError(`Invalid option name: ${name}, ` +
-                `must be '-' delimited ASCII alphanumeric`);
-        }
-        if (this.#configSet[name]) {
-            throw new TypeError(`Cannot redefine option ${field}`);
-        }
-        if (this.#shorts[name]) {
-            throw new TypeError(`Cannot redefine option ${name}, already ` +
-                `in use for ${this.#shorts[name]}`);
-        }
-        if (field.short) {
-            if (!/^[a-zA-Z0-9]$/.test(field.short)) {
-                throw new TypeError(`Invalid ${name} short option: ${field.short}, ` +
-                    'must be 1 ASCII alphanumeric character');
-            }
-            if (this.#shorts[field.short]) {
-                throw new TypeError(`Invalid ${name} short option: ${field.short}, ` +
-                    `already in use for ${this.#shorts[field.short]}`);
-            }
-            this.#shorts[field.short] = name;
-            this.#shorts[name] = name;
-        }
-    }
-    /**
-     * Return the usage banner for the given configuration
-     */
-    usage() {
-        if (this.#usage)
-            return this.#usage;
-        let headingLevel = 1;
-        //@ts-ignore
-        const ui = (0, cliui_1.default)({ width });
-        const first = this.#fields[0];
-        let start = first?.type === 'heading' ? 1 : 0;
-        if (first?.type === 'heading') {
-            ui.div({
-                padding: [0, 0, 0, 0],
-                text: normalize(first.text),
-            });
-        }
-        ui.div({ padding: [0, 0, 0, 0], text: 'Usage:' });
-        if (this.#options.usage) {
-            ui.div({
-                text: this.#options.usage,
-                padding: [0, 0, 0, 2],
-            });
-        }
-        else {
-            const cmd = (0, node_path_1.basename)(String(process.argv[1]));
-            const shortFlags = [];
-            const shorts = [];
-            const flags = [];
-            const opts = [];
-            for (const [field, config] of Object.entries(this.#configSet)) {
-                if (config.short) {
-                    if (config.type === 'boolean')
-                        shortFlags.push(config.short);
-                    else
-                        shorts.push([config.short, config.hint || field]);
-                }
-                else {
-                    if (config.type === 'boolean')
-                        flags.push(field);
-                    else
-                        opts.push([field, config.hint || field]);
-                }
-            }
-            const sf = shortFlags.length ? ' -' + shortFlags.join('') : '';
-            const so = shorts.map(([k, v]) => ` --${k}=<${v}>`).join('');
-            const lf = flags.map(k => ` --${k}`).join('');
-            const lo = opts.map(([k, v]) => ` --${k}=<${v}>`).join('');
-            const usage = `${cmd}${sf}${so}${lf}${lo}`.trim();
-            ui.div({
-                text: usage,
-                padding: [0, 0, 0, 2],
-            });
-        }
-        ui.div({ padding: [0, 0, 0, 0], text: '' });
-        const maybeDesc = this.#fields[start];
-        if (maybeDesc && isDescription(maybeDesc)) {
-            const print = normalize(maybeDesc.text, maybeDesc.pre);
-            start++;
-            ui.div({ padding: [0, 0, 0, 0], text: print });
-            ui.div({ padding: [0, 0, 0, 0], text: '' });
-        }
-        const { rows, maxWidth } = this.#usageRows(start);
-        // every heading/description after the first gets indented by 2
-        // extra spaces.
-        for (const row of rows) {
-            if (row.left) {
-                // If the row is too long, don't wrap it
-                // Bump the right-hand side down a line to make room
-                const configIndent = indent(Math.max(headingLevel, 2));
-                if (row.left.length > maxWidth - 3) {
-                    ui.div({ text: row.left, padding: [0, 0, 0, configIndent] });
-                    ui.div({ text: row.text, padding: [0, 0, 0, maxWidth] });
-                }
-                else {
-                    ui.div({
-                        text: row.left,
-                        padding: [0, 1, 0, configIndent],
-                        width: maxWidth,
-                    }, { padding: [0, 0, 0, 0], text: row.text });
-                }
-                if (row.skipLine) {
-                    ui.div({ padding: [0, 0, 0, 0], text: '' });
-                }
-            }
-            else {
-                if (isHeading(row)) {
-                    const { level } = row;
-                    headingLevel = level;
-                    // only h1 and h2 have bottom padding
-                    // h3-h6 do not
-                    const b = level <= 2 ? 1 : 0;
-                    ui.div({ ...row, padding: [0, 0, b, indent(level)] });
-                }
-                else {
-                    ui.div({ ...row, padding: [0, 0, 1, indent(headingLevel + 1)] });
-                }
-            }
-        }
-        return (this.#usage = ui.toString());
-    }
-    /**
-     * Return the usage banner markdown for the given configuration
-     */
-    usageMarkdown() {
-        if (this.#usageMarkdown)
-            return this.#usageMarkdown;
-        const out = [];
-        let headingLevel = 1;
-        const first = this.#fields[0];
-        let start = first?.type === 'heading' ? 1 : 0;
-        if (first?.type === 'heading') {
-            out.push(`# ${normalizeOneLine(first.text)}`);
-        }
-        out.push('Usage:');
-        if (this.#options.usage) {
-            out.push(normalizeMarkdown(this.#options.usage, true));
-        }
-        else {
-            const cmd = (0, node_path_1.basename)(String(process.argv[1]));
-            const shortFlags = [];
-            const shorts = [];
-            const flags = [];
-            const opts = [];
-            for (const [field, config] of Object.entries(this.#configSet)) {
-                if (config.short) {
-                    if (config.type === 'boolean')
-                        shortFlags.push(config.short);
-                    else
-                        shorts.push([config.short, config.hint || field]);
-                }
-                else {
-                    if (config.type === 'boolean')
-                        flags.push(field);
-                    else
-                        opts.push([field, config.hint || field]);
-                }
-            }
-            const sf = shortFlags.length ? ' -' + shortFlags.join('') : '';
-            const so = shorts.map(([k, v]) => ` --${k}=<${v}>`).join('');
-            const lf = flags.map(k => ` --${k}`).join('');
-            const lo = opts.map(([k, v]) => ` --${k}=<${v}>`).join('');
-            const usage = `${cmd}${sf}${so}${lf}${lo}`.trim();
-            out.push(normalizeMarkdown(usage, true));
-        }
-        const maybeDesc = this.#fields[start];
-        if (maybeDesc && isDescription(maybeDesc)) {
-            out.push(normalizeMarkdown(maybeDesc.text, maybeDesc.pre));
-            start++;
-        }
-        const { rows } = this.#usageRows(start);
-        // heading level in markdown is number of # ahead of text
-        for (const row of rows) {
-            if (row.left) {
-                out.push('#'.repeat(headingLevel + 1) +
-                    ' ' +
-                    normalizeOneLine(row.left, true));
-                if (row.text)
-                    out.push(normalizeMarkdown(row.text));
-            }
-            else if (isHeading(row)) {
-                const { level } = row;
-                headingLevel = level;
-                out.push(`${'#'.repeat(headingLevel)} ${normalizeOneLine(row.text, row.pre)}`);
-            }
-            else {
-                out.push(normalizeMarkdown(row.text, !!row.pre));
-            }
-        }
-        return (this.#usageMarkdown = out.join('\n\n') + '\n');
-    }
-    #usageRows(start) {
-        // turn each config type into a row, and figure out the width of the
-        // left hand indentation for the option descriptions.
-        let maxMax = Math.max(12, Math.min(26, Math.floor(width / 3)));
-        let maxWidth = 8;
-        let prev = undefined;
-        const rows = [];
-        for (const field of this.#fields.slice(start)) {
-            if (field.type !== 'config') {
-                if (prev?.type === 'config')
-                    prev.skipLine = true;
-                prev = undefined;
-                field.text = normalize(field.text, !!field.pre);
-                rows.push(field);
-                continue;
-            }
-            const { value } = field;
-            const desc = value.description || '';
-            const mult = value.multiple ? 'Can be set multiple times' : '';
-            const opts = value.validOptions?.length ?
-                `Valid options:${value.validOptions.map(v => ` ${JSON.stringify(v)}`)}`
-                : '';
-            const dmDelim = desc.includes('\n') ? '\n\n' : '\n';
-            const extra = [opts, mult].join(dmDelim).trim();
-            const text = (normalize(desc) + dmDelim + extra).trim();
-            const hint = value.hint ||
-                (value.type === 'number' ? 'n'
-                    : value.type === 'string' ? field.name
-                        : undefined);
-            const short = !value.short ? ''
-                : value.type === 'boolean' ? `-${value.short} `
-                    : `-${value.short}<${hint}> `;
-            const left = value.type === 'boolean' ?
-                `${short}--${field.name}`
-                : `${short}--${field.name}=<${hint}>`;
-            const row = { text, left, type: 'config' };
-            if (text.length > width - maxMax) {
-                row.skipLine = true;
-            }
-            if (prev && left.length > maxMax)
-                prev.skipLine = true;
-            prev = row;
-            const len = left.length + 4;
-            if (len > maxWidth && len < maxMax) {
-                maxWidth = len;
-            }
-            rows.push(row);
-        }
-        return { rows, maxWidth };
-    }
-    /**
-     * Return the configuration options as a plain object
-     */
-    toJSON() {
-        return Object.fromEntries(Object.entries(this.#configSet).map(([field, def]) => [
-            field,
-            {
-                type: def.type,
-                ...(def.multiple ? { multiple: true } : {}),
-                ...(def.delim ? { delim: def.delim } : {}),
-                ...(def.short ? { short: def.short } : {}),
-                ...(def.description ?
-                    { description: normalize(def.description) }
-                    : {}),
-                ...(def.validate ? { validate: def.validate } : {}),
-                ...(def.validOptions ? { validOptions: def.validOptions } : {}),
-                ...(def.default !== undefined ? { default: def.default } : {}),
-                ...(def.hint ? { hint: def.hint } : {}),
-            },
-        ]));
-    }
-    /**
-     * Custom printer for `util.inspect`
-     */
-    [node_util_1.inspect.custom](_, options) {
-        return `Jack ${(0, node_util_1.inspect)(this.toJSON(), options)}`;
-    }
-}
-exports.Jack = Jack;
-/**
- * Main entry point. Create and return a {@link Jack} object.
- */
-const jack = (options = {}) => new Jack(options);
-exports.jack = jack;
-// Unwrap and un-indent, so we can wrap description
-// strings however makes them look nice in the code.
-const normalize = (s, pre = false) => {
-    if (pre)
-        // prepend a ZWSP to each line so cliui doesn't strip it.
-        return s
-            .split('\n')
-            .map(l => `\u200b${l}`)
-            .join('\n');
-    return s
-        .split(/^\s*```\s*$/gm)
-        .map((s, i) => {
-        if (i % 2 === 1) {
-            if (!s.trim()) {
-                return `\`\`\`\n\`\`\`\n`;
-            }
-            // outdent the ``` blocks, but preserve whitespace otherwise.
-            const split = s.split('\n');
-            // throw out the \n at the start and end
-            split.pop();
-            split.shift();
-            const si = split.reduce((shortest, l) => {
-                /* c8 ignore next */
-                const ind = l.match(/^\s*/)?.[0] ?? '';
-                if (ind.length)
-                    return Math.min(ind.length, shortest);
-                else
-                    return shortest;
-            }, Infinity);
-            /* c8 ignore next */
-            const i = isFinite(si) ? si : 0;
-            return ('\n```\n' +
-                split.map(s => `\u200b${s.substring(i)}`).join('\n') +
-                '\n```\n');
-        }
-        return (s
-            // remove single line breaks, except for lists
-            .replace(/([^\n])\n[ \t]*([^\n])/g, (_, $1, $2) => !/^[-*]/.test($2) ? `${$1} ${$2}` : `${$1}\n${$2}`)
-            // normalize mid-line whitespace
-            .replace(/([^\n])[ \t]+([^\n])/g, '$1 $2')
-            // two line breaks are enough
-            .replace(/\n{3,}/g, '\n\n')
-            // remove any spaces at the start of a line
-            .replace(/\n[ \t]+/g, '\n')
-            .trim());
-    })
-        .join('\n');
-};
-// normalize for markdown printing, remove leading spaces on lines
-const normalizeMarkdown = (s, pre = false) => {
-    const n = normalize(s, pre).replace(/\\/g, '\\\\');
-    return pre ?
-        `\`\`\`\n${n.replace(/\u200b/g, '')}\n\`\`\``
-        : n.replace(/\n +/g, '\n').trim();
-};
-const normalizeOneLine = (s, pre = false) => {
-    const n = normalize(s, pre)
-        .replace(/[\s\u200b]+/g, ' ')
-        .trim();
-    return pre ? `\`${n}\`` : n;
-};
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/package-json/node_modules/jackspeak/dist/commonjs/package.json b/node_modules/@npmcli/package-json/node_modules/jackspeak/dist/commonjs/package.json
deleted file mode 100644
index 5bbefffbabee3..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/jackspeak/dist/commonjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "commonjs"
-}
diff --git a/node_modules/@npmcli/package-json/node_modules/jackspeak/dist/esm/index.js b/node_modules/@npmcli/package-json/node_modules/jackspeak/dist/esm/index.js
deleted file mode 100644
index b959f5126423c..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/jackspeak/dist/esm/index.js
+++ /dev/null
@@ -1,936 +0,0 @@
-import { inspect, parseArgs, } from 'node:util';
-// it's a tiny API, just cast it inline, it's fine
-//@ts-ignore
-import cliui from '@isaacs/cliui';
-import { basename } from 'node:path';
-export const isConfigType = (t) => typeof t === 'string' &&
-    (t === 'string' || t === 'number' || t === 'boolean');
-const isValidValue = (v, type, multi) => {
-    if (multi) {
-        if (!Array.isArray(v))
-            return false;
-        return !v.some((v) => !isValidValue(v, type, false));
-    }
-    if (Array.isArray(v))
-        return false;
-    return typeof v === type;
-};
-const isValidOption = (v, vo) => !!vo &&
-    (Array.isArray(v) ? v.every(x => isValidOption(x, vo)) : vo.includes(v));
-/**
- * Determine whether an unknown object is a {@link ConfigOption} based only
- * on its `type` and `multiple` property
- */
-export const isConfigOptionOfType = (o, type, multi) => !!o &&
-    typeof o === 'object' &&
-    isConfigType(o.type) &&
-    o.type === type &&
-    !!o.multiple === multi;
-/**
- * Determine whether an unknown object is a {@link ConfigOption} based on
- * it having all valid properties
- */
-export const isConfigOption = (o, type, multi) => isConfigOptionOfType(o, type, multi) &&
-    undefOrType(o.short, 'string') &&
-    undefOrType(o.description, 'string') &&
-    undefOrType(o.hint, 'string') &&
-    undefOrType(o.validate, 'function') &&
-    (o.type === 'boolean' ?
-        o.validOptions === undefined
-        : undefOrTypeArray(o.validOptions, o.type)) &&
-    (o.default === undefined || isValidValue(o.default, type, multi));
-const isHeading = (r) => r.type === 'heading';
-const isDescription = (r) => r.type === 'description';
-const width = Math.min(process?.stdout?.columns ?? 80, 80);
-// indentation spaces from heading level
-const indent = (n) => (n - 1) * 2;
-const toEnvKey = (pref, key) => [pref, key.replace(/[^a-zA-Z0-9]+/g, ' ')]
-    .join(' ')
-    .trim()
-    .toUpperCase()
-    .replace(/ /g, '_');
-const toEnvVal = (value, delim = '\n') => {
-    const str = typeof value === 'string' ? value
-        : typeof value === 'boolean' ?
-            value ? '1'
-                : '0'
-            : typeof value === 'number' ? String(value)
-                : Array.isArray(value) ?
-                    value.map((v) => toEnvVal(v)).join(delim)
-                    : /* c8 ignore start */ undefined;
-    if (typeof str !== 'string') {
-        throw new Error(`could not serialize value to environment: ${JSON.stringify(value)}`, { cause: { code: 'JACKSPEAK' } });
-    }
-    /* c8 ignore stop */
-    return str;
-};
-const fromEnvVal = (env, type, multiple, delim = '\n') => (multiple ?
-    env ? env.split(delim).map(v => fromEnvVal(v, type, false))
-        : []
-    : type === 'string' ? env
-        : type === 'boolean' ? env === '1'
-            : +env.trim());
-const undefOrType = (v, t) => v === undefined || typeof v === t;
-const undefOrTypeArray = (v, t) => v === undefined || (Array.isArray(v) && v.every(x => typeof x === t));
-// print the value type, for error message reporting
-const valueType = (v) => typeof v === 'string' ? 'string'
-    : typeof v === 'boolean' ? 'boolean'
-        : typeof v === 'number' ? 'number'
-            : Array.isArray(v) ?
-                `${joinTypes([...new Set(v.map(v => valueType(v)))])}[]`
-                : `${v.type}${v.multiple ? '[]' : ''}`;
-const joinTypes = (types) => types.length === 1 && typeof types[0] === 'string' ?
-    types[0]
-    : `(${types.join('|')})`;
-const validateFieldMeta = (field, fieldMeta) => {
-    if (fieldMeta) {
-        if (field.type !== undefined && field.type !== fieldMeta.type) {
-            throw new TypeError(`invalid type`, {
-                cause: {
-                    found: field.type,
-                    wanted: [fieldMeta.type, undefined],
-                },
-            });
-        }
-        if (field.multiple !== undefined &&
-            !!field.multiple !== fieldMeta.multiple) {
-            throw new TypeError(`invalid multiple`, {
-                cause: {
-                    found: field.multiple,
-                    wanted: [fieldMeta.multiple, undefined],
-                },
-            });
-        }
-        return fieldMeta;
-    }
-    if (!isConfigType(field.type)) {
-        throw new TypeError(`invalid type`, {
-            cause: {
-                found: field.type,
-                wanted: ['string', 'number', 'boolean'],
-            },
-        });
-    }
-    return {
-        type: field.type,
-        multiple: !!field.multiple,
-    };
-};
-const validateField = (o, type, multiple) => {
-    const validateValidOptions = (def, validOptions) => {
-        if (!undefOrTypeArray(validOptions, type)) {
-            throw new TypeError('invalid validOptions', {
-                cause: {
-                    found: validOptions,
-                    wanted: valueType({ type, multiple: true }),
-                },
-            });
-        }
-        if (def !== undefined && validOptions !== undefined) {
-            const valid = Array.isArray(def) ?
-                def.every(v => validOptions.includes(v))
-                : validOptions.includes(def);
-            if (!valid) {
-                throw new TypeError('invalid default value not in validOptions', {
-                    cause: {
-                        found: def,
-                        wanted: validOptions,
-                    },
-                });
-            }
-        }
-    };
-    if (o.default !== undefined &&
-        !isValidValue(o.default, type, multiple)) {
-        throw new TypeError('invalid default value', {
-            cause: {
-                found: o.default,
-                wanted: valueType({ type, multiple }),
-            },
-        });
-    }
-    if (isConfigOptionOfType(o, 'number', false) ||
-        isConfigOptionOfType(o, 'number', true)) {
-        validateValidOptions(o.default, o.validOptions);
-    }
-    else if (isConfigOptionOfType(o, 'string', false) ||
-        isConfigOptionOfType(o, 'string', true)) {
-        validateValidOptions(o.default, o.validOptions);
-    }
-    else if (isConfigOptionOfType(o, 'boolean', false) ||
-        isConfigOptionOfType(o, 'boolean', true)) {
-        if (o.hint !== undefined) {
-            throw new TypeError('cannot provide hint for flag');
-        }
-        if (o.validOptions !== undefined) {
-            throw new TypeError('cannot provide validOptions for flag');
-        }
-    }
-    return o;
-};
-const toParseArgsOptionsConfig = (options) => {
-    return Object.entries(options).reduce((acc, [longOption, o]) => {
-        const p = {
-            type: 'string',
-            multiple: !!o.multiple,
-            ...(typeof o.short === 'string' ? { short: o.short } : undefined),
-        };
-        const setNoBool = () => {
-            if (!longOption.startsWith('no-') && !options[`no-${longOption}`]) {
-                acc[`no-${longOption}`] = {
-                    type: 'boolean',
-                    multiple: !!o.multiple,
-                };
-            }
-        };
-        const setDefault = (def, fn) => {
-            if (def !== undefined) {
-                p.default = fn(def);
-            }
-        };
-        if (isConfigOption(o, 'number', false)) {
-            setDefault(o.default, String);
-        }
-        else if (isConfigOption(o, 'number', true)) {
-            setDefault(o.default, d => d.map(v => String(v)));
-        }
-        else if (isConfigOption(o, 'string', false) ||
-            isConfigOption(o, 'string', true)) {
-            setDefault(o.default, v => v);
-        }
-        else if (isConfigOption(o, 'boolean', false) ||
-            isConfigOption(o, 'boolean', true)) {
-            p.type = 'boolean';
-            setDefault(o.default, v => v);
-            setNoBool();
-        }
-        acc[longOption] = p;
-        return acc;
-    }, {});
-};
-/**
- * Class returned by the {@link jack} function and all configuration
- * definition methods.  This is what gets chained together.
- */
-export class Jack {
-    #configSet;
-    #shorts;
-    #options;
-    #fields = [];
-    #env;
-    #envPrefix;
-    #allowPositionals;
-    #usage;
-    #usageMarkdown;
-    constructor(options = {}) {
-        this.#options = options;
-        this.#allowPositionals = options.allowPositionals !== false;
-        this.#env =
-            this.#options.env === undefined ? process.env : this.#options.env;
-        this.#envPrefix = options.envPrefix;
-        // We need to fib a little, because it's always the same object, but it
-        // starts out as having an empty config set.  Then each method that adds
-        // fields returns `this as Jack`
-        this.#configSet = Object.create(null);
-        this.#shorts = Object.create(null);
-    }
-    /**
-     * Resulting definitions, suitable to be passed to Node's `util.parseArgs`,
-     * but also including `description` and `short` fields, if set.
-     */
-    get definitions() {
-        return this.#configSet;
-    }
-    /** map of `{ :  }` strings for each short name defined */
-    get shorts() {
-        return this.#shorts;
-    }
-    /**
-     * options passed to the {@link Jack} constructor
-     */
-    get jackOptions() {
-        return this.#options;
-    }
-    /**
-     * the data used to generate {@link Jack#usage} and
-     * {@link Jack#usageMarkdown} content.
-     */
-    get usageFields() {
-        return this.#fields;
-    }
-    /**
-     * Set the default value (which will still be overridden by env or cli)
-     * as if from a parsed config file. The optional `source` param, if
-     * provided, will be included in error messages if a value is invalid or
-     * unknown.
-     */
-    setConfigValues(values, source = '') {
-        try {
-            this.validate(values);
-        }
-        catch (er) {
-            if (source && er instanceof Error) {
-                /* c8 ignore next */
-                const cause = typeof er.cause === 'object' ? er.cause : {};
-                er.cause = { ...cause, path: source };
-                Error.captureStackTrace(er, this.setConfigValues);
-            }
-            throw er;
-        }
-        for (const [field, value] of Object.entries(values)) {
-            const my = this.#configSet[field];
-            // already validated, just for TS's benefit
-            /* c8 ignore start */
-            if (!my) {
-                throw new Error('unexpected field in config set: ' + field, {
-                    cause: {
-                        code: 'JACKSPEAK',
-                        found: field,
-                    },
-                });
-            }
-            /* c8 ignore stop */
-            my.default = value;
-        }
-        return this;
-    }
-    /**
-     * Parse a string of arguments, and return the resulting
-     * `{ values, positionals }` object.
-     *
-     * If an {@link JackOptions#envPrefix} is set, then it will read default
-     * values from the environment, and write the resulting values back
-     * to the environment as well.
-     *
-     * Environment values always take precedence over any other value, except
-     * an explicit CLI setting.
-     */
-    parse(args = process.argv) {
-        this.loadEnvDefaults();
-        const p = this.parseRaw(args);
-        this.applyDefaults(p);
-        this.writeEnv(p);
-        return p;
-    }
-    loadEnvDefaults() {
-        if (this.#envPrefix) {
-            for (const [field, my] of Object.entries(this.#configSet)) {
-                const ek = toEnvKey(this.#envPrefix, field);
-                const env = this.#env[ek];
-                if (env !== undefined) {
-                    my.default = fromEnvVal(env, my.type, !!my.multiple, my.delim);
-                }
-            }
-        }
-    }
-    applyDefaults(p) {
-        for (const [field, c] of Object.entries(this.#configSet)) {
-            if (c.default !== undefined && !(field in p.values)) {
-                //@ts-ignore
-                p.values[field] = c.default;
-            }
-        }
-    }
-    /**
-     * Only parse the command line arguments passed in.
-     * Does not strip off the `node script.js` bits, so it must be just the
-     * arguments you wish to have parsed.
-     * Does not read from or write to the environment, or set defaults.
-     */
-    parseRaw(args) {
-        if (args === process.argv) {
-            args = args.slice(process._eval !== undefined ? 1 : 2);
-        }
-        const result = parseArgs({
-            args,
-            options: toParseArgsOptionsConfig(this.#configSet),
-            // always strict, but using our own logic
-            strict: false,
-            allowPositionals: this.#allowPositionals,
-            tokens: true,
-        });
-        const p = {
-            values: {},
-            positionals: [],
-        };
-        for (const token of result.tokens) {
-            if (token.kind === 'positional') {
-                p.positionals.push(token.value);
-                if (this.#options.stopAtPositional ||
-                    this.#options.stopAtPositionalTest?.(token.value)) {
-                    p.positionals.push(...args.slice(token.index + 1));
-                    break;
-                }
-            }
-            else if (token.kind === 'option') {
-                let value = undefined;
-                if (token.name.startsWith('no-')) {
-                    const my = this.#configSet[token.name];
-                    const pname = token.name.substring('no-'.length);
-                    const pos = this.#configSet[pname];
-                    if (pos &&
-                        pos.type === 'boolean' &&
-                        (!my ||
-                            (my.type === 'boolean' && !!my.multiple === !!pos.multiple))) {
-                        value = false;
-                        token.name = pname;
-                    }
-                }
-                const my = this.#configSet[token.name];
-                if (!my) {
-                    throw new Error(`Unknown option '${token.rawName}'. ` +
-                        `To specify a positional argument starting with a '-', ` +
-                        `place it at the end of the command after '--', as in ` +
-                        `'-- ${token.rawName}'`, {
-                        cause: {
-                            code: 'JACKSPEAK',
-                            found: token.rawName + (token.value ? `=${token.value}` : ''),
-                        },
-                    });
-                }
-                if (value === undefined) {
-                    if (token.value === undefined) {
-                        if (my.type !== 'boolean') {
-                            throw new Error(`No value provided for ${token.rawName}, expected ${my.type}`, {
-                                cause: {
-                                    code: 'JACKSPEAK',
-                                    name: token.rawName,
-                                    wanted: valueType(my),
-                                },
-                            });
-                        }
-                        value = true;
-                    }
-                    else {
-                        if (my.type === 'boolean') {
-                            throw new Error(`Flag ${token.rawName} does not take a value, received '${token.value}'`, { cause: { code: 'JACKSPEAK', found: token } });
-                        }
-                        if (my.type === 'string') {
-                            value = token.value;
-                        }
-                        else {
-                            value = +token.value;
-                            if (value !== value) {
-                                throw new Error(`Invalid value '${token.value}' provided for ` +
-                                    `'${token.rawName}' option, expected number`, {
-                                    cause: {
-                                        code: 'JACKSPEAK',
-                                        name: token.rawName,
-                                        found: token.value,
-                                        wanted: 'number',
-                                    },
-                                });
-                            }
-                        }
-                    }
-                }
-                if (my.multiple) {
-                    const pv = p.values;
-                    const tn = pv[token.name] ?? [];
-                    pv[token.name] = tn;
-                    tn.push(value);
-                }
-                else {
-                    const pv = p.values;
-                    pv[token.name] = value;
-                }
-            }
-        }
-        for (const [field, value] of Object.entries(p.values)) {
-            const valid = this.#configSet[field]?.validate;
-            const validOptions = this.#configSet[field]?.validOptions;
-            const cause = validOptions && !isValidOption(value, validOptions) ?
-                { name: field, found: value, validOptions }
-                : valid && !valid(value) ? { name: field, found: value }
-                    : undefined;
-            if (cause) {
-                throw new Error(`Invalid value provided for --${field}: ${JSON.stringify(value)}`, { cause: { ...cause, code: 'JACKSPEAK' } });
-            }
-        }
-        return p;
-    }
-    /**
-     * do not set fields as 'no-foo' if 'foo' exists and both are bools
-     * just set foo.
-     */
-    #noNoFields(f, val, s = f) {
-        if (!f.startsWith('no-') || typeof val !== 'boolean')
-            return;
-        const yes = f.substring('no-'.length);
-        // recurse so we get the core config key we care about.
-        this.#noNoFields(yes, val, s);
-        if (this.#configSet[yes]?.type === 'boolean') {
-            throw new Error(`do not set '${s}', instead set '${yes}' as desired.`, { cause: { code: 'JACKSPEAK', found: s, wanted: yes } });
-        }
-    }
-    /**
-     * Validate that any arbitrary object is a valid configuration `values`
-     * object.  Useful when loading config files or other sources.
-     */
-    validate(o) {
-        if (!o || typeof o !== 'object') {
-            throw new Error('Invalid config: not an object', {
-                cause: { code: 'JACKSPEAK', found: o },
-            });
-        }
-        const opts = o;
-        for (const field in o) {
-            const value = opts[field];
-            /* c8 ignore next - for TS */
-            if (value === undefined)
-                continue;
-            this.#noNoFields(field, value);
-            const config = this.#configSet[field];
-            if (!config) {
-                throw new Error(`Unknown config option: ${field}`, {
-                    cause: { code: 'JACKSPEAK', found: field },
-                });
-            }
-            if (!isValidValue(value, config.type, !!config.multiple)) {
-                throw new Error(`Invalid value ${valueType(value)} for ${field}, expected ${valueType(config)}`, {
-                    cause: {
-                        code: 'JACKSPEAK',
-                        name: field,
-                        found: value,
-                        wanted: valueType(config),
-                    },
-                });
-            }
-            const cause = config.validOptions && !isValidOption(value, config.validOptions) ?
-                { name: field, found: value, validOptions: config.validOptions }
-                : config.validate && !config.validate(value) ?
-                    { name: field, found: value }
-                    : undefined;
-            if (cause) {
-                throw new Error(`Invalid config value for ${field}: ${value}`, {
-                    cause: { ...cause, code: 'JACKSPEAK' },
-                });
-            }
-        }
-    }
-    writeEnv(p) {
-        if (!this.#env || !this.#envPrefix)
-            return;
-        for (const [field, value] of Object.entries(p.values)) {
-            const my = this.#configSet[field];
-            this.#env[toEnvKey(this.#envPrefix, field)] = toEnvVal(value, my?.delim);
-        }
-    }
-    /**
-     * Add a heading to the usage output banner
-     */
-    heading(text, level, { pre = false } = {}) {
-        if (level === undefined) {
-            level = this.#fields.some(r => isHeading(r)) ? 2 : 1;
-        }
-        this.#fields.push({ type: 'heading', text, level, pre });
-        return this;
-    }
-    /**
-     * Add a long-form description to the usage output at this position.
-     */
-    description(text, { pre } = {}) {
-        this.#fields.push({ type: 'description', text, pre });
-        return this;
-    }
-    /**
-     * Add one or more number fields.
-     */
-    num(fields) {
-        return this.#addFieldsWith(fields, 'number', false);
-    }
-    /**
-     * Add one or more multiple number fields.
-     */
-    numList(fields) {
-        return this.#addFieldsWith(fields, 'number', true);
-    }
-    /**
-     * Add one or more string option fields.
-     */
-    opt(fields) {
-        return this.#addFieldsWith(fields, 'string', false);
-    }
-    /**
-     * Add one or more multiple string option fields.
-     */
-    optList(fields) {
-        return this.#addFieldsWith(fields, 'string', true);
-    }
-    /**
-     * Add one or more flag fields.
-     */
-    flag(fields) {
-        return this.#addFieldsWith(fields, 'boolean', false);
-    }
-    /**
-     * Add one or more multiple flag fields.
-     */
-    flagList(fields) {
-        return this.#addFieldsWith(fields, 'boolean', true);
-    }
-    /**
-     * Generic field definition method. Similar to flag/flagList/number/etc,
-     * but you must specify the `type` (and optionally `multiple` and `delim`)
-     * fields on each one, or Jack won't know how to define them.
-     */
-    addFields(fields) {
-        return this.#addFields(this, fields);
-    }
-    #addFieldsWith(fields, type, multiple) {
-        return this.#addFields(this, fields, {
-            type,
-            multiple,
-        });
-    }
-    #addFields(next, fields, opt) {
-        Object.assign(next.#configSet, Object.fromEntries(Object.entries(fields).map(([name, field]) => {
-            this.#validateName(name, field);
-            const { type, multiple } = validateFieldMeta(field, opt);
-            const value = { ...field, type, multiple };
-            validateField(value, type, multiple);
-            next.#fields.push({ type: 'config', name, value });
-            return [name, value];
-        })));
-        return next;
-    }
-    #validateName(name, field) {
-        if (!/^[a-zA-Z0-9]([a-zA-Z0-9-]*[a-zA-Z0-9])?$/.test(name)) {
-            throw new TypeError(`Invalid option name: ${name}, ` +
-                `must be '-' delimited ASCII alphanumeric`);
-        }
-        if (this.#configSet[name]) {
-            throw new TypeError(`Cannot redefine option ${field}`);
-        }
-        if (this.#shorts[name]) {
-            throw new TypeError(`Cannot redefine option ${name}, already ` +
-                `in use for ${this.#shorts[name]}`);
-        }
-        if (field.short) {
-            if (!/^[a-zA-Z0-9]$/.test(field.short)) {
-                throw new TypeError(`Invalid ${name} short option: ${field.short}, ` +
-                    'must be 1 ASCII alphanumeric character');
-            }
-            if (this.#shorts[field.short]) {
-                throw new TypeError(`Invalid ${name} short option: ${field.short}, ` +
-                    `already in use for ${this.#shorts[field.short]}`);
-            }
-            this.#shorts[field.short] = name;
-            this.#shorts[name] = name;
-        }
-    }
-    /**
-     * Return the usage banner for the given configuration
-     */
-    usage() {
-        if (this.#usage)
-            return this.#usage;
-        let headingLevel = 1;
-        //@ts-ignore
-        const ui = cliui({ width });
-        const first = this.#fields[0];
-        let start = first?.type === 'heading' ? 1 : 0;
-        if (first?.type === 'heading') {
-            ui.div({
-                padding: [0, 0, 0, 0],
-                text: normalize(first.text),
-            });
-        }
-        ui.div({ padding: [0, 0, 0, 0], text: 'Usage:' });
-        if (this.#options.usage) {
-            ui.div({
-                text: this.#options.usage,
-                padding: [0, 0, 0, 2],
-            });
-        }
-        else {
-            const cmd = basename(String(process.argv[1]));
-            const shortFlags = [];
-            const shorts = [];
-            const flags = [];
-            const opts = [];
-            for (const [field, config] of Object.entries(this.#configSet)) {
-                if (config.short) {
-                    if (config.type === 'boolean')
-                        shortFlags.push(config.short);
-                    else
-                        shorts.push([config.short, config.hint || field]);
-                }
-                else {
-                    if (config.type === 'boolean')
-                        flags.push(field);
-                    else
-                        opts.push([field, config.hint || field]);
-                }
-            }
-            const sf = shortFlags.length ? ' -' + shortFlags.join('') : '';
-            const so = shorts.map(([k, v]) => ` --${k}=<${v}>`).join('');
-            const lf = flags.map(k => ` --${k}`).join('');
-            const lo = opts.map(([k, v]) => ` --${k}=<${v}>`).join('');
-            const usage = `${cmd}${sf}${so}${lf}${lo}`.trim();
-            ui.div({
-                text: usage,
-                padding: [0, 0, 0, 2],
-            });
-        }
-        ui.div({ padding: [0, 0, 0, 0], text: '' });
-        const maybeDesc = this.#fields[start];
-        if (maybeDesc && isDescription(maybeDesc)) {
-            const print = normalize(maybeDesc.text, maybeDesc.pre);
-            start++;
-            ui.div({ padding: [0, 0, 0, 0], text: print });
-            ui.div({ padding: [0, 0, 0, 0], text: '' });
-        }
-        const { rows, maxWidth } = this.#usageRows(start);
-        // every heading/description after the first gets indented by 2
-        // extra spaces.
-        for (const row of rows) {
-            if (row.left) {
-                // If the row is too long, don't wrap it
-                // Bump the right-hand side down a line to make room
-                const configIndent = indent(Math.max(headingLevel, 2));
-                if (row.left.length > maxWidth - 3) {
-                    ui.div({ text: row.left, padding: [0, 0, 0, configIndent] });
-                    ui.div({ text: row.text, padding: [0, 0, 0, maxWidth] });
-                }
-                else {
-                    ui.div({
-                        text: row.left,
-                        padding: [0, 1, 0, configIndent],
-                        width: maxWidth,
-                    }, { padding: [0, 0, 0, 0], text: row.text });
-                }
-                if (row.skipLine) {
-                    ui.div({ padding: [0, 0, 0, 0], text: '' });
-                }
-            }
-            else {
-                if (isHeading(row)) {
-                    const { level } = row;
-                    headingLevel = level;
-                    // only h1 and h2 have bottom padding
-                    // h3-h6 do not
-                    const b = level <= 2 ? 1 : 0;
-                    ui.div({ ...row, padding: [0, 0, b, indent(level)] });
-                }
-                else {
-                    ui.div({ ...row, padding: [0, 0, 1, indent(headingLevel + 1)] });
-                }
-            }
-        }
-        return (this.#usage = ui.toString());
-    }
-    /**
-     * Return the usage banner markdown for the given configuration
-     */
-    usageMarkdown() {
-        if (this.#usageMarkdown)
-            return this.#usageMarkdown;
-        const out = [];
-        let headingLevel = 1;
-        const first = this.#fields[0];
-        let start = first?.type === 'heading' ? 1 : 0;
-        if (first?.type === 'heading') {
-            out.push(`# ${normalizeOneLine(first.text)}`);
-        }
-        out.push('Usage:');
-        if (this.#options.usage) {
-            out.push(normalizeMarkdown(this.#options.usage, true));
-        }
-        else {
-            const cmd = basename(String(process.argv[1]));
-            const shortFlags = [];
-            const shorts = [];
-            const flags = [];
-            const opts = [];
-            for (const [field, config] of Object.entries(this.#configSet)) {
-                if (config.short) {
-                    if (config.type === 'boolean')
-                        shortFlags.push(config.short);
-                    else
-                        shorts.push([config.short, config.hint || field]);
-                }
-                else {
-                    if (config.type === 'boolean')
-                        flags.push(field);
-                    else
-                        opts.push([field, config.hint || field]);
-                }
-            }
-            const sf = shortFlags.length ? ' -' + shortFlags.join('') : '';
-            const so = shorts.map(([k, v]) => ` --${k}=<${v}>`).join('');
-            const lf = flags.map(k => ` --${k}`).join('');
-            const lo = opts.map(([k, v]) => ` --${k}=<${v}>`).join('');
-            const usage = `${cmd}${sf}${so}${lf}${lo}`.trim();
-            out.push(normalizeMarkdown(usage, true));
-        }
-        const maybeDesc = this.#fields[start];
-        if (maybeDesc && isDescription(maybeDesc)) {
-            out.push(normalizeMarkdown(maybeDesc.text, maybeDesc.pre));
-            start++;
-        }
-        const { rows } = this.#usageRows(start);
-        // heading level in markdown is number of # ahead of text
-        for (const row of rows) {
-            if (row.left) {
-                out.push('#'.repeat(headingLevel + 1) +
-                    ' ' +
-                    normalizeOneLine(row.left, true));
-                if (row.text)
-                    out.push(normalizeMarkdown(row.text));
-            }
-            else if (isHeading(row)) {
-                const { level } = row;
-                headingLevel = level;
-                out.push(`${'#'.repeat(headingLevel)} ${normalizeOneLine(row.text, row.pre)}`);
-            }
-            else {
-                out.push(normalizeMarkdown(row.text, !!row.pre));
-            }
-        }
-        return (this.#usageMarkdown = out.join('\n\n') + '\n');
-    }
-    #usageRows(start) {
-        // turn each config type into a row, and figure out the width of the
-        // left hand indentation for the option descriptions.
-        let maxMax = Math.max(12, Math.min(26, Math.floor(width / 3)));
-        let maxWidth = 8;
-        let prev = undefined;
-        const rows = [];
-        for (const field of this.#fields.slice(start)) {
-            if (field.type !== 'config') {
-                if (prev?.type === 'config')
-                    prev.skipLine = true;
-                prev = undefined;
-                field.text = normalize(field.text, !!field.pre);
-                rows.push(field);
-                continue;
-            }
-            const { value } = field;
-            const desc = value.description || '';
-            const mult = value.multiple ? 'Can be set multiple times' : '';
-            const opts = value.validOptions?.length ?
-                `Valid options:${value.validOptions.map(v => ` ${JSON.stringify(v)}`)}`
-                : '';
-            const dmDelim = desc.includes('\n') ? '\n\n' : '\n';
-            const extra = [opts, mult].join(dmDelim).trim();
-            const text = (normalize(desc) + dmDelim + extra).trim();
-            const hint = value.hint ||
-                (value.type === 'number' ? 'n'
-                    : value.type === 'string' ? field.name
-                        : undefined);
-            const short = !value.short ? ''
-                : value.type === 'boolean' ? `-${value.short} `
-                    : `-${value.short}<${hint}> `;
-            const left = value.type === 'boolean' ?
-                `${short}--${field.name}`
-                : `${short}--${field.name}=<${hint}>`;
-            const row = { text, left, type: 'config' };
-            if (text.length > width - maxMax) {
-                row.skipLine = true;
-            }
-            if (prev && left.length > maxMax)
-                prev.skipLine = true;
-            prev = row;
-            const len = left.length + 4;
-            if (len > maxWidth && len < maxMax) {
-                maxWidth = len;
-            }
-            rows.push(row);
-        }
-        return { rows, maxWidth };
-    }
-    /**
-     * Return the configuration options as a plain object
-     */
-    toJSON() {
-        return Object.fromEntries(Object.entries(this.#configSet).map(([field, def]) => [
-            field,
-            {
-                type: def.type,
-                ...(def.multiple ? { multiple: true } : {}),
-                ...(def.delim ? { delim: def.delim } : {}),
-                ...(def.short ? { short: def.short } : {}),
-                ...(def.description ?
-                    { description: normalize(def.description) }
-                    : {}),
-                ...(def.validate ? { validate: def.validate } : {}),
-                ...(def.validOptions ? { validOptions: def.validOptions } : {}),
-                ...(def.default !== undefined ? { default: def.default } : {}),
-                ...(def.hint ? { hint: def.hint } : {}),
-            },
-        ]));
-    }
-    /**
-     * Custom printer for `util.inspect`
-     */
-    [inspect.custom](_, options) {
-        return `Jack ${inspect(this.toJSON(), options)}`;
-    }
-}
-/**
- * Main entry point. Create and return a {@link Jack} object.
- */
-export const jack = (options = {}) => new Jack(options);
-// Unwrap and un-indent, so we can wrap description
-// strings however makes them look nice in the code.
-const normalize = (s, pre = false) => {
-    if (pre)
-        // prepend a ZWSP to each line so cliui doesn't strip it.
-        return s
-            .split('\n')
-            .map(l => `\u200b${l}`)
-            .join('\n');
-    return s
-        .split(/^\s*```\s*$/gm)
-        .map((s, i) => {
-        if (i % 2 === 1) {
-            if (!s.trim()) {
-                return `\`\`\`\n\`\`\`\n`;
-            }
-            // outdent the ``` blocks, but preserve whitespace otherwise.
-            const split = s.split('\n');
-            // throw out the \n at the start and end
-            split.pop();
-            split.shift();
-            const si = split.reduce((shortest, l) => {
-                /* c8 ignore next */
-                const ind = l.match(/^\s*/)?.[0] ?? '';
-                if (ind.length)
-                    return Math.min(ind.length, shortest);
-                else
-                    return shortest;
-            }, Infinity);
-            /* c8 ignore next */
-            const i = isFinite(si) ? si : 0;
-            return ('\n```\n' +
-                split.map(s => `\u200b${s.substring(i)}`).join('\n') +
-                '\n```\n');
-        }
-        return (s
-            // remove single line breaks, except for lists
-            .replace(/([^\n])\n[ \t]*([^\n])/g, (_, $1, $2) => !/^[-*]/.test($2) ? `${$1} ${$2}` : `${$1}\n${$2}`)
-            // normalize mid-line whitespace
-            .replace(/([^\n])[ \t]+([^\n])/g, '$1 $2')
-            // two line breaks are enough
-            .replace(/\n{3,}/g, '\n\n')
-            // remove any spaces at the start of a line
-            .replace(/\n[ \t]+/g, '\n')
-            .trim());
-    })
-        .join('\n');
-};
-// normalize for markdown printing, remove leading spaces on lines
-const normalizeMarkdown = (s, pre = false) => {
-    const n = normalize(s, pre).replace(/\\/g, '\\\\');
-    return pre ?
-        `\`\`\`\n${n.replace(/\u200b/g, '')}\n\`\`\``
-        : n.replace(/\n +/g, '\n').trim();
-};
-const normalizeOneLine = (s, pre = false) => {
-    const n = normalize(s, pre)
-        .replace(/[\s\u200b]+/g, ' ')
-        .trim();
-    return pre ? `\`${n}\`` : n;
-};
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/package-json/node_modules/jackspeak/dist/esm/package.json b/node_modules/@npmcli/package-json/node_modules/jackspeak/dist/esm/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/jackspeak/dist/esm/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/@npmcli/package-json/node_modules/jackspeak/package.json b/node_modules/@npmcli/package-json/node_modules/jackspeak/package.json
deleted file mode 100644
index aa85d230f6d24..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/jackspeak/package.json
+++ /dev/null
@@ -1,94 +0,0 @@
-{
-  "name": "jackspeak",
-  "version": "4.1.1",
-  "description": "A very strict and proper argument parser.",
-  "tshy": {
-    "main": true,
-    "exports": {
-      "./package.json": "./package.json",
-      ".": "./src/index.js"
-    }
-  },
-  "main": "./dist/commonjs/index.js",
-  "types": "./dist/commonjs/index.d.ts",
-  "type": "module",
-  "exports": {
-    "./package.json": "./package.json",
-    ".": {
-      "import": {
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.js"
-      },
-      "require": {
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.js"
-      }
-    }
-  },
-  "files": [
-    "dist"
-  ],
-  "scripts": {
-    "build-examples": "for i in examples/*.js ; do node $i -h > ${i/.js/.txt}; done",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "prepare": "tshy",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "test": "tap",
-    "snap": "tap",
-    "format": "prettier --write . --log-level warn",
-    "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
-  },
-  "license": "BlueOak-1.0.0",
-  "prettier": {
-    "experimentalTernaries": true,
-    "semi": false,
-    "printWidth": 75,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "devDependencies": {
-    "@types/node": "^22.6.0",
-    "prettier": "^3.3.3",
-    "tap": "^21.0.1",
-    "tshy": "^3.0.2",
-    "typedoc": "^0.26.7"
-  },
-  "dependencies": {
-    "@isaacs/cliui": "^8.0.2"
-  },
-  "engines": {
-    "node": "20 || >=22"
-  },
-  "funding": {
-    "url": "https://github.com/sponsors/isaacs"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/isaacs/jackspeak.git"
-  },
-  "keywords": [
-    "argument",
-    "parser",
-    "args",
-    "option",
-    "flag",
-    "cli",
-    "command",
-    "line",
-    "parse",
-    "parsing"
-  ],
-  "author": "Isaac Z. Schlueter ",
-  "tap": {
-    "typecheck": true
-  },
-  "module": "./dist/esm/index.js"
-}
diff --git a/node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/package.json b/node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/package.json
deleted file mode 100644
index 5bbefffbabee3..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "commonjs"
-}
diff --git a/node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/package.json b/node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/@npmcli/package-json/node_modules/path-scurry/LICENSE.md b/node_modules/@npmcli/package-json/node_modules/path-scurry/LICENSE.md
deleted file mode 100644
index c5402b9577a8c..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/path-scurry/LICENSE.md
+++ /dev/null
@@ -1,55 +0,0 @@
-# Blue Oak Model License
-
-Version 1.0.0
-
-## Purpose
-
-This license gives everyone as much permission to work with
-this software as possible, while protecting contributors
-from liability.
-
-## Acceptance
-
-In order to receive this license, you must agree to its
-rules.  The rules of this license are both obligations
-under that agreement and conditions to your license.
-You must not do anything with this software that triggers
-a rule that you cannot or will not follow.
-
-## Copyright
-
-Each contributor licenses you to do everything with this
-software that would otherwise infringe that contributor's
-copyright in it.
-
-## Notices
-
-You must ensure that everyone who gets a copy of
-any part of this software from you, with or without
-changes, also gets the text of this license or a link to
-.
-
-## Excuse
-
-If anyone notifies you in writing that you have not
-complied with [Notices](#notices), you can keep your
-license by taking all practical steps to comply within 30
-days after the notice.  If you do not do so, your license
-ends immediately.
-
-## Patent
-
-Each contributor licenses you to do everything with this
-software that would otherwise infringe any patent claims
-they can license or become able to license.
-
-## Reliability
-
-No contributor can revoke this license.
-
-## No Liability
-
-***As far as the law allows, this software comes as is,
-without any warranty or condition, and no contributor
-will be liable to anyone for any damages related to this
-software or this license, under any kind of legal claim.***
diff --git a/node_modules/@npmcli/package-json/node_modules/path-scurry/dist/commonjs/index.js b/node_modules/@npmcli/package-json/node_modules/path-scurry/dist/commonjs/index.js
deleted file mode 100644
index af3e7595f577f..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/path-scurry/dist/commonjs/index.js
+++ /dev/null
@@ -1,2016 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.PathScurry = exports.Path = exports.PathScurryDarwin = exports.PathScurryPosix = exports.PathScurryWin32 = exports.PathScurryBase = exports.PathPosix = exports.PathWin32 = exports.PathBase = exports.ChildrenCache = exports.ResolveCache = void 0;
-const lru_cache_1 = require("lru-cache");
-const node_path_1 = require("node:path");
-const node_url_1 = require("node:url");
-const fs_1 = require("fs");
-const actualFS = __importStar(require("node:fs"));
-const realpathSync = fs_1.realpathSync.native;
-// TODO: test perf of fs/promises realpath vs realpathCB,
-// since the promises one uses realpath.native
-const promises_1 = require("node:fs/promises");
-const minipass_1 = require("minipass");
-const defaultFS = {
-    lstatSync: fs_1.lstatSync,
-    readdir: fs_1.readdir,
-    readdirSync: fs_1.readdirSync,
-    readlinkSync: fs_1.readlinkSync,
-    realpathSync,
-    promises: {
-        lstat: promises_1.lstat,
-        readdir: promises_1.readdir,
-        readlink: promises_1.readlink,
-        realpath: promises_1.realpath,
-    },
-};
-// if they just gave us require('fs') then use our default
-const fsFromOption = (fsOption) => !fsOption || fsOption === defaultFS || fsOption === actualFS ?
-    defaultFS
-    : {
-        ...defaultFS,
-        ...fsOption,
-        promises: {
-            ...defaultFS.promises,
-            ...(fsOption.promises || {}),
-        },
-    };
-// turn something like //?/c:/ into c:\
-const uncDriveRegexp = /^\\\\\?\\([a-z]:)\\?$/i;
-const uncToDrive = (rootPath) => rootPath.replace(/\//g, '\\').replace(uncDriveRegexp, '$1\\');
-// windows paths are separated by either / or \
-const eitherSep = /[\\\/]/;
-const UNKNOWN = 0; // may not even exist, for all we know
-const IFIFO = 0b0001;
-const IFCHR = 0b0010;
-const IFDIR = 0b0100;
-const IFBLK = 0b0110;
-const IFREG = 0b1000;
-const IFLNK = 0b1010;
-const IFSOCK = 0b1100;
-const IFMT = 0b1111;
-// mask to unset low 4 bits
-const IFMT_UNKNOWN = ~IFMT;
-// set after successfully calling readdir() and getting entries.
-const READDIR_CALLED = 0b0000_0001_0000;
-// set after a successful lstat()
-const LSTAT_CALLED = 0b0000_0010_0000;
-// set if an entry (or one of its parents) is definitely not a dir
-const ENOTDIR = 0b0000_0100_0000;
-// set if an entry (or one of its parents) does not exist
-// (can also be set on lstat errors like EACCES or ENAMETOOLONG)
-const ENOENT = 0b0000_1000_0000;
-// cannot have child entries -- also verify &IFMT is either IFDIR or IFLNK
-// set if we fail to readlink
-const ENOREADLINK = 0b0001_0000_0000;
-// set if we know realpath() will fail
-const ENOREALPATH = 0b0010_0000_0000;
-const ENOCHILD = ENOTDIR | ENOENT | ENOREALPATH;
-const TYPEMASK = 0b0011_1111_1111;
-const entToType = (s) => s.isFile() ? IFREG
-    : s.isDirectory() ? IFDIR
-        : s.isSymbolicLink() ? IFLNK
-            : s.isCharacterDevice() ? IFCHR
-                : s.isBlockDevice() ? IFBLK
-                    : s.isSocket() ? IFSOCK
-                        : s.isFIFO() ? IFIFO
-                            : UNKNOWN;
-// normalize unicode path names
-const normalizeCache = new Map();
-const normalize = (s) => {
-    const c = normalizeCache.get(s);
-    if (c)
-        return c;
-    const n = s.normalize('NFKD');
-    normalizeCache.set(s, n);
-    return n;
-};
-const normalizeNocaseCache = new Map();
-const normalizeNocase = (s) => {
-    const c = normalizeNocaseCache.get(s);
-    if (c)
-        return c;
-    const n = normalize(s.toLowerCase());
-    normalizeNocaseCache.set(s, n);
-    return n;
-};
-/**
- * An LRUCache for storing resolved path strings or Path objects.
- * @internal
- */
-class ResolveCache extends lru_cache_1.LRUCache {
-    constructor() {
-        super({ max: 256 });
-    }
-}
-exports.ResolveCache = ResolveCache;
-// In order to prevent blowing out the js heap by allocating hundreds of
-// thousands of Path entries when walking extremely large trees, the "children"
-// in this tree are represented by storing an array of Path entries in an
-// LRUCache, indexed by the parent.  At any time, Path.children() may return an
-// empty array, indicating that it doesn't know about any of its children, and
-// thus has to rebuild that cache.  This is fine, it just means that we don't
-// benefit as much from having the cached entries, but huge directory walks
-// don't blow out the stack, and smaller ones are still as fast as possible.
-//
-//It does impose some complexity when building up the readdir data, because we
-//need to pass a reference to the children array that we started with.
-/**
- * an LRUCache for storing child entries.
- * @internal
- */
-class ChildrenCache extends lru_cache_1.LRUCache {
-    constructor(maxSize = 16 * 1024) {
-        super({
-            maxSize,
-            // parent + children
-            sizeCalculation: a => a.length + 1,
-        });
-    }
-}
-exports.ChildrenCache = ChildrenCache;
-const setAsCwd = Symbol('PathScurry setAsCwd');
-/**
- * Path objects are sort of like a super-powered
- * {@link https://nodejs.org/docs/latest/api/fs.html#class-fsdirent fs.Dirent}
- *
- * Each one represents a single filesystem entry on disk, which may or may not
- * exist. It includes methods for reading various types of information via
- * lstat, readlink, and readdir, and caches all information to the greatest
- * degree possible.
- *
- * Note that fs operations that would normally throw will instead return an
- * "empty" value. This is in order to prevent excessive overhead from error
- * stack traces.
- */
-class PathBase {
-    /**
-     * the basename of this path
-     *
-     * **Important**: *always* test the path name against any test string
-     * usingthe {@link isNamed} method, and not by directly comparing this
-     * string. Otherwise, unicode path strings that the system sees as identical
-     * will not be properly treated as the same path, leading to incorrect
-     * behavior and possible security issues.
-     */
-    name;
-    /**
-     * the Path entry corresponding to the path root.
-     *
-     * @internal
-     */
-    root;
-    /**
-     * All roots found within the current PathScurry family
-     *
-     * @internal
-     */
-    roots;
-    /**
-     * a reference to the parent path, or undefined in the case of root entries
-     *
-     * @internal
-     */
-    parent;
-    /**
-     * boolean indicating whether paths are compared case-insensitively
-     * @internal
-     */
-    nocase;
-    /**
-     * boolean indicating that this path is the current working directory
-     * of the PathScurry collection that contains it.
-     */
-    isCWD = false;
-    // potential default fs override
-    #fs;
-    // Stats fields
-    #dev;
-    get dev() {
-        return this.#dev;
-    }
-    #mode;
-    get mode() {
-        return this.#mode;
-    }
-    #nlink;
-    get nlink() {
-        return this.#nlink;
-    }
-    #uid;
-    get uid() {
-        return this.#uid;
-    }
-    #gid;
-    get gid() {
-        return this.#gid;
-    }
-    #rdev;
-    get rdev() {
-        return this.#rdev;
-    }
-    #blksize;
-    get blksize() {
-        return this.#blksize;
-    }
-    #ino;
-    get ino() {
-        return this.#ino;
-    }
-    #size;
-    get size() {
-        return this.#size;
-    }
-    #blocks;
-    get blocks() {
-        return this.#blocks;
-    }
-    #atimeMs;
-    get atimeMs() {
-        return this.#atimeMs;
-    }
-    #mtimeMs;
-    get mtimeMs() {
-        return this.#mtimeMs;
-    }
-    #ctimeMs;
-    get ctimeMs() {
-        return this.#ctimeMs;
-    }
-    #birthtimeMs;
-    get birthtimeMs() {
-        return this.#birthtimeMs;
-    }
-    #atime;
-    get atime() {
-        return this.#atime;
-    }
-    #mtime;
-    get mtime() {
-        return this.#mtime;
-    }
-    #ctime;
-    get ctime() {
-        return this.#ctime;
-    }
-    #birthtime;
-    get birthtime() {
-        return this.#birthtime;
-    }
-    #matchName;
-    #depth;
-    #fullpath;
-    #fullpathPosix;
-    #relative;
-    #relativePosix;
-    #type;
-    #children;
-    #linkTarget;
-    #realpath;
-    /**
-     * This property is for compatibility with the Dirent class as of
-     * Node v20, where Dirent['parentPath'] refers to the path of the
-     * directory that was passed to readdir. For root entries, it's the path
-     * to the entry itself.
-     */
-    get parentPath() {
-        return (this.parent || this).fullpath();
-    }
-    /**
-     * Deprecated alias for Dirent['parentPath'] Somewhat counterintuitively,
-     * this property refers to the *parent* path, not the path object itself.
-     *
-     * @deprecated
-     */
-    get path() {
-        return this.parentPath;
-    }
-    /**
-     * Do not create new Path objects directly.  They should always be accessed
-     * via the PathScurry class or other methods on the Path class.
-     *
-     * @internal
-     */
-    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
-        this.name = name;
-        this.#matchName = nocase ? normalizeNocase(name) : normalize(name);
-        this.#type = type & TYPEMASK;
-        this.nocase = nocase;
-        this.roots = roots;
-        this.root = root || this;
-        this.#children = children;
-        this.#fullpath = opts.fullpath;
-        this.#relative = opts.relative;
-        this.#relativePosix = opts.relativePosix;
-        this.parent = opts.parent;
-        if (this.parent) {
-            this.#fs = this.parent.#fs;
-        }
-        else {
-            this.#fs = fsFromOption(opts.fs);
-        }
-    }
-    /**
-     * Returns the depth of the Path object from its root.
-     *
-     * For example, a path at `/foo/bar` would have a depth of 2.
-     */
-    depth() {
-        if (this.#depth !== undefined)
-            return this.#depth;
-        if (!this.parent)
-            return (this.#depth = 0);
-        return (this.#depth = this.parent.depth() + 1);
-    }
-    /**
-     * @internal
-     */
-    childrenCache() {
-        return this.#children;
-    }
-    /**
-     * Get the Path object referenced by the string path, resolved from this Path
-     */
-    resolve(path) {
-        if (!path) {
-            return this;
-        }
-        const rootPath = this.getRootString(path);
-        const dir = path.substring(rootPath.length);
-        const dirParts = dir.split(this.splitSep);
-        const result = rootPath ?
-            this.getRoot(rootPath).#resolveParts(dirParts)
-            : this.#resolveParts(dirParts);
-        return result;
-    }
-    #resolveParts(dirParts) {
-        let p = this;
-        for (const part of dirParts) {
-            p = p.child(part);
-        }
-        return p;
-    }
-    /**
-     * Returns the cached children Path objects, if still available.  If they
-     * have fallen out of the cache, then returns an empty array, and resets the
-     * READDIR_CALLED bit, so that future calls to readdir() will require an fs
-     * lookup.
-     *
-     * @internal
-     */
-    children() {
-        const cached = this.#children.get(this);
-        if (cached) {
-            return cached;
-        }
-        const children = Object.assign([], { provisional: 0 });
-        this.#children.set(this, children);
-        this.#type &= ~READDIR_CALLED;
-        return children;
-    }
-    /**
-     * Resolves a path portion and returns or creates the child Path.
-     *
-     * Returns `this` if pathPart is `''` or `'.'`, or `parent` if pathPart is
-     * `'..'`.
-     *
-     * This should not be called directly.  If `pathPart` contains any path
-     * separators, it will lead to unsafe undefined behavior.
-     *
-     * Use `Path.resolve()` instead.
-     *
-     * @internal
-     */
-    child(pathPart, opts) {
-        if (pathPart === '' || pathPart === '.') {
-            return this;
-        }
-        if (pathPart === '..') {
-            return this.parent || this;
-        }
-        // find the child
-        const children = this.children();
-        const name = this.nocase ? normalizeNocase(pathPart) : normalize(pathPart);
-        for (const p of children) {
-            if (p.#matchName === name) {
-                return p;
-            }
-        }
-        // didn't find it, create provisional child, since it might not
-        // actually exist.  If we know the parent isn't a dir, then
-        // in fact it CAN'T exist.
-        const s = this.parent ? this.sep : '';
-        const fullpath = this.#fullpath ? this.#fullpath + s + pathPart : undefined;
-        const pchild = this.newChild(pathPart, UNKNOWN, {
-            ...opts,
-            parent: this,
-            fullpath,
-        });
-        if (!this.canReaddir()) {
-            pchild.#type |= ENOENT;
-        }
-        // don't have to update provisional, because if we have real children,
-        // then provisional is set to children.length, otherwise a lower number
-        children.push(pchild);
-        return pchild;
-    }
-    /**
-     * The relative path from the cwd. If it does not share an ancestor with
-     * the cwd, then this ends up being equivalent to the fullpath()
-     */
-    relative() {
-        if (this.isCWD)
-            return '';
-        if (this.#relative !== undefined) {
-            return this.#relative;
-        }
-        const name = this.name;
-        const p = this.parent;
-        if (!p) {
-            return (this.#relative = this.name);
-        }
-        const pv = p.relative();
-        return pv + (!pv || !p.parent ? '' : this.sep) + name;
-    }
-    /**
-     * The relative path from the cwd, using / as the path separator.
-     * If it does not share an ancestor with
-     * the cwd, then this ends up being equivalent to the fullpathPosix()
-     * On posix systems, this is identical to relative().
-     */
-    relativePosix() {
-        if (this.sep === '/')
-            return this.relative();
-        if (this.isCWD)
-            return '';
-        if (this.#relativePosix !== undefined)
-            return this.#relativePosix;
-        const name = this.name;
-        const p = this.parent;
-        if (!p) {
-            return (this.#relativePosix = this.fullpathPosix());
-        }
-        const pv = p.relativePosix();
-        return pv + (!pv || !p.parent ? '' : '/') + name;
-    }
-    /**
-     * The fully resolved path string for this Path entry
-     */
-    fullpath() {
-        if (this.#fullpath !== undefined) {
-            return this.#fullpath;
-        }
-        const name = this.name;
-        const p = this.parent;
-        if (!p) {
-            return (this.#fullpath = this.name);
-        }
-        const pv = p.fullpath();
-        const fp = pv + (!p.parent ? '' : this.sep) + name;
-        return (this.#fullpath = fp);
-    }
-    /**
-     * On platforms other than windows, this is identical to fullpath.
-     *
-     * On windows, this is overridden to return the forward-slash form of the
-     * full UNC path.
-     */
-    fullpathPosix() {
-        if (this.#fullpathPosix !== undefined)
-            return this.#fullpathPosix;
-        if (this.sep === '/')
-            return (this.#fullpathPosix = this.fullpath());
-        if (!this.parent) {
-            const p = this.fullpath().replace(/\\/g, '/');
-            if (/^[a-z]:\//i.test(p)) {
-                return (this.#fullpathPosix = `//?/${p}`);
-            }
-            else {
-                return (this.#fullpathPosix = p);
-            }
-        }
-        const p = this.parent;
-        const pfpp = p.fullpathPosix();
-        const fpp = pfpp + (!pfpp || !p.parent ? '' : '/') + this.name;
-        return (this.#fullpathPosix = fpp);
-    }
-    /**
-     * Is the Path of an unknown type?
-     *
-     * Note that we might know *something* about it if there has been a previous
-     * filesystem operation, for example that it does not exist, or is not a
-     * link, or whether it has child entries.
-     */
-    isUnknown() {
-        return (this.#type & IFMT) === UNKNOWN;
-    }
-    isType(type) {
-        return this[`is${type}`]();
-    }
-    getType() {
-        return (this.isUnknown() ? 'Unknown'
-            : this.isDirectory() ? 'Directory'
-                : this.isFile() ? 'File'
-                    : this.isSymbolicLink() ? 'SymbolicLink'
-                        : this.isFIFO() ? 'FIFO'
-                            : this.isCharacterDevice() ? 'CharacterDevice'
-                                : this.isBlockDevice() ? 'BlockDevice'
-                                    : /* c8 ignore start */ this.isSocket() ? 'Socket'
-                                        : 'Unknown');
-        /* c8 ignore stop */
-    }
-    /**
-     * Is the Path a regular file?
-     */
-    isFile() {
-        return (this.#type & IFMT) === IFREG;
-    }
-    /**
-     * Is the Path a directory?
-     */
-    isDirectory() {
-        return (this.#type & IFMT) === IFDIR;
-    }
-    /**
-     * Is the path a character device?
-     */
-    isCharacterDevice() {
-        return (this.#type & IFMT) === IFCHR;
-    }
-    /**
-     * Is the path a block device?
-     */
-    isBlockDevice() {
-        return (this.#type & IFMT) === IFBLK;
-    }
-    /**
-     * Is the path a FIFO pipe?
-     */
-    isFIFO() {
-        return (this.#type & IFMT) === IFIFO;
-    }
-    /**
-     * Is the path a socket?
-     */
-    isSocket() {
-        return (this.#type & IFMT) === IFSOCK;
-    }
-    /**
-     * Is the path a symbolic link?
-     */
-    isSymbolicLink() {
-        return (this.#type & IFLNK) === IFLNK;
-    }
-    /**
-     * Return the entry if it has been subject of a successful lstat, or
-     * undefined otherwise.
-     *
-     * Does not read the filesystem, so an undefined result *could* simply
-     * mean that we haven't called lstat on it.
-     */
-    lstatCached() {
-        return this.#type & LSTAT_CALLED ? this : undefined;
-    }
-    /**
-     * Return the cached link target if the entry has been the subject of a
-     * successful readlink, or undefined otherwise.
-     *
-     * Does not read the filesystem, so an undefined result *could* just mean we
-     * don't have any cached data. Only use it if you are very sure that a
-     * readlink() has been called at some point.
-     */
-    readlinkCached() {
-        return this.#linkTarget;
-    }
-    /**
-     * Returns the cached realpath target if the entry has been the subject
-     * of a successful realpath, or undefined otherwise.
-     *
-     * Does not read the filesystem, so an undefined result *could* just mean we
-     * don't have any cached data. Only use it if you are very sure that a
-     * realpath() has been called at some point.
-     */
-    realpathCached() {
-        return this.#realpath;
-    }
-    /**
-     * Returns the cached child Path entries array if the entry has been the
-     * subject of a successful readdir(), or [] otherwise.
-     *
-     * Does not read the filesystem, so an empty array *could* just mean we
-     * don't have any cached data. Only use it if you are very sure that a
-     * readdir() has been called recently enough to still be valid.
-     */
-    readdirCached() {
-        const children = this.children();
-        return children.slice(0, children.provisional);
-    }
-    /**
-     * Return true if it's worth trying to readlink.  Ie, we don't (yet) have
-     * any indication that readlink will definitely fail.
-     *
-     * Returns false if the path is known to not be a symlink, if a previous
-     * readlink failed, or if the entry does not exist.
-     */
-    canReadlink() {
-        if (this.#linkTarget)
-            return true;
-        if (!this.parent)
-            return false;
-        // cases where it cannot possibly succeed
-        const ifmt = this.#type & IFMT;
-        return !((ifmt !== UNKNOWN && ifmt !== IFLNK) ||
-            this.#type & ENOREADLINK ||
-            this.#type & ENOENT);
-    }
-    /**
-     * Return true if readdir has previously been successfully called on this
-     * path, indicating that cachedReaddir() is likely valid.
-     */
-    calledReaddir() {
-        return !!(this.#type & READDIR_CALLED);
-    }
-    /**
-     * Returns true if the path is known to not exist. That is, a previous lstat
-     * or readdir failed to verify its existence when that would have been
-     * expected, or a parent entry was marked either enoent or enotdir.
-     */
-    isENOENT() {
-        return !!(this.#type & ENOENT);
-    }
-    /**
-     * Return true if the path is a match for the given path name.  This handles
-     * case sensitivity and unicode normalization.
-     *
-     * Note: even on case-sensitive systems, it is **not** safe to test the
-     * equality of the `.name` property to determine whether a given pathname
-     * matches, due to unicode normalization mismatches.
-     *
-     * Always use this method instead of testing the `path.name` property
-     * directly.
-     */
-    isNamed(n) {
-        return !this.nocase ?
-            this.#matchName === normalize(n)
-            : this.#matchName === normalizeNocase(n);
-    }
-    /**
-     * Return the Path object corresponding to the target of a symbolic link.
-     *
-     * If the Path is not a symbolic link, or if the readlink call fails for any
-     * reason, `undefined` is returned.
-     *
-     * Result is cached, and thus may be outdated if the filesystem is mutated.
-     */
-    async readlink() {
-        const target = this.#linkTarget;
-        if (target) {
-            return target;
-        }
-        if (!this.canReadlink()) {
-            return undefined;
-        }
-        /* c8 ignore start */
-        // already covered by the canReadlink test, here for ts grumples
-        if (!this.parent) {
-            return undefined;
-        }
-        /* c8 ignore stop */
-        try {
-            const read = await this.#fs.promises.readlink(this.fullpath());
-            const linkTarget = (await this.parent.realpath())?.resolve(read);
-            if (linkTarget) {
-                return (this.#linkTarget = linkTarget);
-            }
-        }
-        catch (er) {
-            this.#readlinkFail(er.code);
-            return undefined;
-        }
-    }
-    /**
-     * Synchronous {@link PathBase.readlink}
-     */
-    readlinkSync() {
-        const target = this.#linkTarget;
-        if (target) {
-            return target;
-        }
-        if (!this.canReadlink()) {
-            return undefined;
-        }
-        /* c8 ignore start */
-        // already covered by the canReadlink test, here for ts grumples
-        if (!this.parent) {
-            return undefined;
-        }
-        /* c8 ignore stop */
-        try {
-            const read = this.#fs.readlinkSync(this.fullpath());
-            const linkTarget = this.parent.realpathSync()?.resolve(read);
-            if (linkTarget) {
-                return (this.#linkTarget = linkTarget);
-            }
-        }
-        catch (er) {
-            this.#readlinkFail(er.code);
-            return undefined;
-        }
-    }
-    #readdirSuccess(children) {
-        // succeeded, mark readdir called bit
-        this.#type |= READDIR_CALLED;
-        // mark all remaining provisional children as ENOENT
-        for (let p = children.provisional; p < children.length; p++) {
-            const c = children[p];
-            if (c)
-                c.#markENOENT();
-        }
-    }
-    #markENOENT() {
-        // mark as UNKNOWN and ENOENT
-        if (this.#type & ENOENT)
-            return;
-        this.#type = (this.#type | ENOENT) & IFMT_UNKNOWN;
-        this.#markChildrenENOENT();
-    }
-    #markChildrenENOENT() {
-        // all children are provisional and do not exist
-        const children = this.children();
-        children.provisional = 0;
-        for (const p of children) {
-            p.#markENOENT();
-        }
-    }
-    #markENOREALPATH() {
-        this.#type |= ENOREALPATH;
-        this.#markENOTDIR();
-    }
-    // save the information when we know the entry is not a dir
-    #markENOTDIR() {
-        // entry is not a directory, so any children can't exist.
-        // this *should* be impossible, since any children created
-        // after it's been marked ENOTDIR should be marked ENOENT,
-        // so it won't even get to this point.
-        /* c8 ignore start */
-        if (this.#type & ENOTDIR)
-            return;
-        /* c8 ignore stop */
-        let t = this.#type;
-        // this could happen if we stat a dir, then delete it,
-        // then try to read it or one of its children.
-        if ((t & IFMT) === IFDIR)
-            t &= IFMT_UNKNOWN;
-        this.#type = t | ENOTDIR;
-        this.#markChildrenENOENT();
-    }
-    #readdirFail(code = '') {
-        // markENOTDIR and markENOENT also set provisional=0
-        if (code === 'ENOTDIR' || code === 'EPERM') {
-            this.#markENOTDIR();
-        }
-        else if (code === 'ENOENT') {
-            this.#markENOENT();
-        }
-        else {
-            this.children().provisional = 0;
-        }
-    }
-    #lstatFail(code = '') {
-        // Windows just raises ENOENT in this case, disable for win CI
-        /* c8 ignore start */
-        if (code === 'ENOTDIR') {
-            // already know it has a parent by this point
-            const p = this.parent;
-            p.#markENOTDIR();
-        }
-        else if (code === 'ENOENT') {
-            /* c8 ignore stop */
-            this.#markENOENT();
-        }
-    }
-    #readlinkFail(code = '') {
-        let ter = this.#type;
-        ter |= ENOREADLINK;
-        if (code === 'ENOENT')
-            ter |= ENOENT;
-        // windows gets a weird error when you try to readlink a file
-        if (code === 'EINVAL' || code === 'UNKNOWN') {
-            // exists, but not a symlink, we don't know WHAT it is, so remove
-            // all IFMT bits.
-            ter &= IFMT_UNKNOWN;
-        }
-        this.#type = ter;
-        // windows just gets ENOENT in this case.  We do cover the case,
-        // just disabled because it's impossible on Windows CI
-        /* c8 ignore start */
-        if (code === 'ENOTDIR' && this.parent) {
-            this.parent.#markENOTDIR();
-        }
-        /* c8 ignore stop */
-    }
-    #readdirAddChild(e, c) {
-        return (this.#readdirMaybePromoteChild(e, c) ||
-            this.#readdirAddNewChild(e, c));
-    }
-    #readdirAddNewChild(e, c) {
-        // alloc new entry at head, so it's never provisional
-        const type = entToType(e);
-        const child = this.newChild(e.name, type, { parent: this });
-        const ifmt = child.#type & IFMT;
-        if (ifmt !== IFDIR && ifmt !== IFLNK && ifmt !== UNKNOWN) {
-            child.#type |= ENOTDIR;
-        }
-        c.unshift(child);
-        c.provisional++;
-        return child;
-    }
-    #readdirMaybePromoteChild(e, c) {
-        for (let p = c.provisional; p < c.length; p++) {
-            const pchild = c[p];
-            const name = this.nocase ? normalizeNocase(e.name) : normalize(e.name);
-            if (name !== pchild.#matchName) {
-                continue;
-            }
-            return this.#readdirPromoteChild(e, pchild, p, c);
-        }
-    }
-    #readdirPromoteChild(e, p, index, c) {
-        const v = p.name;
-        // retain any other flags, but set ifmt from dirent
-        p.#type = (p.#type & IFMT_UNKNOWN) | entToType(e);
-        // case sensitivity fixing when we learn the true name.
-        if (v !== e.name)
-            p.name = e.name;
-        // just advance provisional index (potentially off the list),
-        // otherwise we have to splice/pop it out and re-insert at head
-        if (index !== c.provisional) {
-            if (index === c.length - 1)
-                c.pop();
-            else
-                c.splice(index, 1);
-            c.unshift(p);
-        }
-        c.provisional++;
-        return p;
-    }
-    /**
-     * Call lstat() on this Path, and update all known information that can be
-     * determined.
-     *
-     * Note that unlike `fs.lstat()`, the returned value does not contain some
-     * information, such as `mode`, `dev`, `nlink`, and `ino`.  If that
-     * information is required, you will need to call `fs.lstat` yourself.
-     *
-     * If the Path refers to a nonexistent file, or if the lstat call fails for
-     * any reason, `undefined` is returned.  Otherwise the updated Path object is
-     * returned.
-     *
-     * Results are cached, and thus may be out of date if the filesystem is
-     * mutated.
-     */
-    async lstat() {
-        if ((this.#type & ENOENT) === 0) {
-            try {
-                this.#applyStat(await this.#fs.promises.lstat(this.fullpath()));
-                return this;
-            }
-            catch (er) {
-                this.#lstatFail(er.code);
-            }
-        }
-    }
-    /**
-     * synchronous {@link PathBase.lstat}
-     */
-    lstatSync() {
-        if ((this.#type & ENOENT) === 0) {
-            try {
-                this.#applyStat(this.#fs.lstatSync(this.fullpath()));
-                return this;
-            }
-            catch (er) {
-                this.#lstatFail(er.code);
-            }
-        }
-    }
-    #applyStat(st) {
-        const { atime, atimeMs, birthtime, birthtimeMs, blksize, blocks, ctime, ctimeMs, dev, gid, ino, mode, mtime, mtimeMs, nlink, rdev, size, uid, } = st;
-        this.#atime = atime;
-        this.#atimeMs = atimeMs;
-        this.#birthtime = birthtime;
-        this.#birthtimeMs = birthtimeMs;
-        this.#blksize = blksize;
-        this.#blocks = blocks;
-        this.#ctime = ctime;
-        this.#ctimeMs = ctimeMs;
-        this.#dev = dev;
-        this.#gid = gid;
-        this.#ino = ino;
-        this.#mode = mode;
-        this.#mtime = mtime;
-        this.#mtimeMs = mtimeMs;
-        this.#nlink = nlink;
-        this.#rdev = rdev;
-        this.#size = size;
-        this.#uid = uid;
-        const ifmt = entToType(st);
-        // retain any other flags, but set the ifmt
-        this.#type = (this.#type & IFMT_UNKNOWN) | ifmt | LSTAT_CALLED;
-        if (ifmt !== UNKNOWN && ifmt !== IFDIR && ifmt !== IFLNK) {
-            this.#type |= ENOTDIR;
-        }
-    }
-    #onReaddirCB = [];
-    #readdirCBInFlight = false;
-    #callOnReaddirCB(children) {
-        this.#readdirCBInFlight = false;
-        const cbs = this.#onReaddirCB.slice();
-        this.#onReaddirCB.length = 0;
-        cbs.forEach(cb => cb(null, children));
-    }
-    /**
-     * Standard node-style callback interface to get list of directory entries.
-     *
-     * If the Path cannot or does not contain any children, then an empty array
-     * is returned.
-     *
-     * Results are cached, and thus may be out of date if the filesystem is
-     * mutated.
-     *
-     * @param cb The callback called with (er, entries).  Note that the `er`
-     * param is somewhat extraneous, as all readdir() errors are handled and
-     * simply result in an empty set of entries being returned.
-     * @param allowZalgo Boolean indicating that immediately known results should
-     * *not* be deferred with `queueMicrotask`. Defaults to `false`. Release
-     * zalgo at your peril, the dark pony lord is devious and unforgiving.
-     */
-    readdirCB(cb, allowZalgo = false) {
-        if (!this.canReaddir()) {
-            if (allowZalgo)
-                cb(null, []);
-            else
-                queueMicrotask(() => cb(null, []));
-            return;
-        }
-        const children = this.children();
-        if (this.calledReaddir()) {
-            const c = children.slice(0, children.provisional);
-            if (allowZalgo)
-                cb(null, c);
-            else
-                queueMicrotask(() => cb(null, c));
-            return;
-        }
-        // don't have to worry about zalgo at this point.
-        this.#onReaddirCB.push(cb);
-        if (this.#readdirCBInFlight) {
-            return;
-        }
-        this.#readdirCBInFlight = true;
-        // else read the directory, fill up children
-        // de-provisionalize any provisional children.
-        const fullpath = this.fullpath();
-        this.#fs.readdir(fullpath, { withFileTypes: true }, (er, entries) => {
-            if (er) {
-                this.#readdirFail(er.code);
-                children.provisional = 0;
-            }
-            else {
-                // if we didn't get an error, we always get entries.
-                //@ts-ignore
-                for (const e of entries) {
-                    this.#readdirAddChild(e, children);
-                }
-                this.#readdirSuccess(children);
-            }
-            this.#callOnReaddirCB(children.slice(0, children.provisional));
-            return;
-        });
-    }
-    #asyncReaddirInFlight;
-    /**
-     * Return an array of known child entries.
-     *
-     * If the Path cannot or does not contain any children, then an empty array
-     * is returned.
-     *
-     * Results are cached, and thus may be out of date if the filesystem is
-     * mutated.
-     */
-    async readdir() {
-        if (!this.canReaddir()) {
-            return [];
-        }
-        const children = this.children();
-        if (this.calledReaddir()) {
-            return children.slice(0, children.provisional);
-        }
-        // else read the directory, fill up children
-        // de-provisionalize any provisional children.
-        const fullpath = this.fullpath();
-        if (this.#asyncReaddirInFlight) {
-            await this.#asyncReaddirInFlight;
-        }
-        else {
-            /* c8 ignore start */
-            let resolve = () => { };
-            /* c8 ignore stop */
-            this.#asyncReaddirInFlight = new Promise(res => (resolve = res));
-            try {
-                for (const e of await this.#fs.promises.readdir(fullpath, {
-                    withFileTypes: true,
-                })) {
-                    this.#readdirAddChild(e, children);
-                }
-                this.#readdirSuccess(children);
-            }
-            catch (er) {
-                this.#readdirFail(er.code);
-                children.provisional = 0;
-            }
-            this.#asyncReaddirInFlight = undefined;
-            resolve();
-        }
-        return children.slice(0, children.provisional);
-    }
-    /**
-     * synchronous {@link PathBase.readdir}
-     */
-    readdirSync() {
-        if (!this.canReaddir()) {
-            return [];
-        }
-        const children = this.children();
-        if (this.calledReaddir()) {
-            return children.slice(0, children.provisional);
-        }
-        // else read the directory, fill up children
-        // de-provisionalize any provisional children.
-        const fullpath = this.fullpath();
-        try {
-            for (const e of this.#fs.readdirSync(fullpath, {
-                withFileTypes: true,
-            })) {
-                this.#readdirAddChild(e, children);
-            }
-            this.#readdirSuccess(children);
-        }
-        catch (er) {
-            this.#readdirFail(er.code);
-            children.provisional = 0;
-        }
-        return children.slice(0, children.provisional);
-    }
-    canReaddir() {
-        if (this.#type & ENOCHILD)
-            return false;
-        const ifmt = IFMT & this.#type;
-        // we always set ENOTDIR when setting IFMT, so should be impossible
-        /* c8 ignore start */
-        if (!(ifmt === UNKNOWN || ifmt === IFDIR || ifmt === IFLNK)) {
-            return false;
-        }
-        /* c8 ignore stop */
-        return true;
-    }
-    shouldWalk(dirs, walkFilter) {
-        return ((this.#type & IFDIR) === IFDIR &&
-            !(this.#type & ENOCHILD) &&
-            !dirs.has(this) &&
-            (!walkFilter || walkFilter(this)));
-    }
-    /**
-     * Return the Path object corresponding to path as resolved
-     * by realpath(3).
-     *
-     * If the realpath call fails for any reason, `undefined` is returned.
-     *
-     * Result is cached, and thus may be outdated if the filesystem is mutated.
-     * On success, returns a Path object.
-     */
-    async realpath() {
-        if (this.#realpath)
-            return this.#realpath;
-        if ((ENOREALPATH | ENOREADLINK | ENOENT) & this.#type)
-            return undefined;
-        try {
-            const rp = await this.#fs.promises.realpath(this.fullpath());
-            return (this.#realpath = this.resolve(rp));
-        }
-        catch (_) {
-            this.#markENOREALPATH();
-        }
-    }
-    /**
-     * Synchronous {@link realpath}
-     */
-    realpathSync() {
-        if (this.#realpath)
-            return this.#realpath;
-        if ((ENOREALPATH | ENOREADLINK | ENOENT) & this.#type)
-            return undefined;
-        try {
-            const rp = this.#fs.realpathSync(this.fullpath());
-            return (this.#realpath = this.resolve(rp));
-        }
-        catch (_) {
-            this.#markENOREALPATH();
-        }
-    }
-    /**
-     * Internal method to mark this Path object as the scurry cwd,
-     * called by {@link PathScurry#chdir}
-     *
-     * @internal
-     */
-    [setAsCwd](oldCwd) {
-        if (oldCwd === this)
-            return;
-        oldCwd.isCWD = false;
-        this.isCWD = true;
-        const changed = new Set([]);
-        let rp = [];
-        let p = this;
-        while (p && p.parent) {
-            changed.add(p);
-            p.#relative = rp.join(this.sep);
-            p.#relativePosix = rp.join('/');
-            p = p.parent;
-            rp.push('..');
-        }
-        // now un-memoize parents of old cwd
-        p = oldCwd;
-        while (p && p.parent && !changed.has(p)) {
-            p.#relative = undefined;
-            p.#relativePosix = undefined;
-            p = p.parent;
-        }
-    }
-}
-exports.PathBase = PathBase;
-/**
- * Path class used on win32 systems
- *
- * Uses `'\\'` as the path separator for returned paths, either `'\\'` or `'/'`
- * as the path separator for parsing paths.
- */
-class PathWin32 extends PathBase {
-    /**
-     * Separator for generating path strings.
-     */
-    sep = '\\';
-    /**
-     * Separator for parsing path strings.
-     */
-    splitSep = eitherSep;
-    /**
-     * Do not create new Path objects directly.  They should always be accessed
-     * via the PathScurry class or other methods on the Path class.
-     *
-     * @internal
-     */
-    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
-        super(name, type, root, roots, nocase, children, opts);
-    }
-    /**
-     * @internal
-     */
-    newChild(name, type = UNKNOWN, opts = {}) {
-        return new PathWin32(name, type, this.root, this.roots, this.nocase, this.childrenCache(), opts);
-    }
-    /**
-     * @internal
-     */
-    getRootString(path) {
-        return node_path_1.win32.parse(path).root;
-    }
-    /**
-     * @internal
-     */
-    getRoot(rootPath) {
-        rootPath = uncToDrive(rootPath.toUpperCase());
-        if (rootPath === this.root.name) {
-            return this.root;
-        }
-        // ok, not that one, check if it matches another we know about
-        for (const [compare, root] of Object.entries(this.roots)) {
-            if (this.sameRoot(rootPath, compare)) {
-                return (this.roots[rootPath] = root);
-            }
-        }
-        // otherwise, have to create a new one.
-        return (this.roots[rootPath] = new PathScurryWin32(rootPath, this).root);
-    }
-    /**
-     * @internal
-     */
-    sameRoot(rootPath, compare = this.root.name) {
-        // windows can (rarely) have case-sensitive filesystem, but
-        // UNC and drive letters are always case-insensitive, and canonically
-        // represented uppercase.
-        rootPath = rootPath
-            .toUpperCase()
-            .replace(/\//g, '\\')
-            .replace(uncDriveRegexp, '$1\\');
-        return rootPath === compare;
-    }
-}
-exports.PathWin32 = PathWin32;
-/**
- * Path class used on all posix systems.
- *
- * Uses `'/'` as the path separator.
- */
-class PathPosix extends PathBase {
-    /**
-     * separator for parsing path strings
-     */
-    splitSep = '/';
-    /**
-     * separator for generating path strings
-     */
-    sep = '/';
-    /**
-     * Do not create new Path objects directly.  They should always be accessed
-     * via the PathScurry class or other methods on the Path class.
-     *
-     * @internal
-     */
-    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
-        super(name, type, root, roots, nocase, children, opts);
-    }
-    /**
-     * @internal
-     */
-    getRootString(path) {
-        return path.startsWith('/') ? '/' : '';
-    }
-    /**
-     * @internal
-     */
-    getRoot(_rootPath) {
-        return this.root;
-    }
-    /**
-     * @internal
-     */
-    newChild(name, type = UNKNOWN, opts = {}) {
-        return new PathPosix(name, type, this.root, this.roots, this.nocase, this.childrenCache(), opts);
-    }
-}
-exports.PathPosix = PathPosix;
-/**
- * The base class for all PathScurry classes, providing the interface for path
- * resolution and filesystem operations.
- *
- * Typically, you should *not* instantiate this class directly, but rather one
- * of the platform-specific classes, or the exported {@link PathScurry} which
- * defaults to the current platform.
- */
-class PathScurryBase {
-    /**
-     * The root Path entry for the current working directory of this Scurry
-     */
-    root;
-    /**
-     * The string path for the root of this Scurry's current working directory
-     */
-    rootPath;
-    /**
-     * A collection of all roots encountered, referenced by rootPath
-     */
-    roots;
-    /**
-     * The Path entry corresponding to this PathScurry's current working directory.
-     */
-    cwd;
-    #resolveCache;
-    #resolvePosixCache;
-    #children;
-    /**
-     * Perform path comparisons case-insensitively.
-     *
-     * Defaults true on Darwin and Windows systems, false elsewhere.
-     */
-    nocase;
-    #fs;
-    /**
-     * This class should not be instantiated directly.
-     *
-     * Use PathScurryWin32, PathScurryDarwin, PathScurryPosix, or PathScurry
-     *
-     * @internal
-     */
-    constructor(cwd = process.cwd(), pathImpl, sep, { nocase, childrenCacheSize = 16 * 1024, fs = defaultFS, } = {}) {
-        this.#fs = fsFromOption(fs);
-        if (cwd instanceof URL || cwd.startsWith('file://')) {
-            cwd = (0, node_url_1.fileURLToPath)(cwd);
-        }
-        // resolve and split root, and then add to the store.
-        // this is the only time we call path.resolve()
-        const cwdPath = pathImpl.resolve(cwd);
-        this.roots = Object.create(null);
-        this.rootPath = this.parseRootPath(cwdPath);
-        this.#resolveCache = new ResolveCache();
-        this.#resolvePosixCache = new ResolveCache();
-        this.#children = new ChildrenCache(childrenCacheSize);
-        const split = cwdPath.substring(this.rootPath.length).split(sep);
-        // resolve('/') leaves '', splits to [''], we don't want that.
-        if (split.length === 1 && !split[0]) {
-            split.pop();
-        }
-        /* c8 ignore start */
-        if (nocase === undefined) {
-            throw new TypeError('must provide nocase setting to PathScurryBase ctor');
-        }
-        /* c8 ignore stop */
-        this.nocase = nocase;
-        this.root = this.newRoot(this.#fs);
-        this.roots[this.rootPath] = this.root;
-        let prev = this.root;
-        let len = split.length - 1;
-        const joinSep = pathImpl.sep;
-        let abs = this.rootPath;
-        let sawFirst = false;
-        for (const part of split) {
-            const l = len--;
-            prev = prev.child(part, {
-                relative: new Array(l).fill('..').join(joinSep),
-                relativePosix: new Array(l).fill('..').join('/'),
-                fullpath: (abs += (sawFirst ? '' : joinSep) + part),
-            });
-            sawFirst = true;
-        }
-        this.cwd = prev;
-    }
-    /**
-     * Get the depth of a provided path, string, or the cwd
-     */
-    depth(path = this.cwd) {
-        if (typeof path === 'string') {
-            path = this.cwd.resolve(path);
-        }
-        return path.depth();
-    }
-    /**
-     * Return the cache of child entries.  Exposed so subclasses can create
-     * child Path objects in a platform-specific way.
-     *
-     * @internal
-     */
-    childrenCache() {
-        return this.#children;
-    }
-    /**
-     * Resolve one or more path strings to a resolved string
-     *
-     * Same interface as require('path').resolve.
-     *
-     * Much faster than path.resolve() when called multiple times for the same
-     * path, because the resolved Path objects are cached.  Much slower
-     * otherwise.
-     */
-    resolve(...paths) {
-        // first figure out the minimum number of paths we have to test
-        // we always start at cwd, but any absolutes will bump the start
-        let r = '';
-        for (let i = paths.length - 1; i >= 0; i--) {
-            const p = paths[i];
-            if (!p || p === '.')
-                continue;
-            r = r ? `${p}/${r}` : p;
-            if (this.isAbsolute(p)) {
-                break;
-            }
-        }
-        const cached = this.#resolveCache.get(r);
-        if (cached !== undefined) {
-            return cached;
-        }
-        const result = this.cwd.resolve(r).fullpath();
-        this.#resolveCache.set(r, result);
-        return result;
-    }
-    /**
-     * Resolve one or more path strings to a resolved string, returning
-     * the posix path.  Identical to .resolve() on posix systems, but on
-     * windows will return a forward-slash separated UNC path.
-     *
-     * Same interface as require('path').resolve.
-     *
-     * Much faster than path.resolve() when called multiple times for the same
-     * path, because the resolved Path objects are cached.  Much slower
-     * otherwise.
-     */
-    resolvePosix(...paths) {
-        // first figure out the minimum number of paths we have to test
-        // we always start at cwd, but any absolutes will bump the start
-        let r = '';
-        for (let i = paths.length - 1; i >= 0; i--) {
-            const p = paths[i];
-            if (!p || p === '.')
-                continue;
-            r = r ? `${p}/${r}` : p;
-            if (this.isAbsolute(p)) {
-                break;
-            }
-        }
-        const cached = this.#resolvePosixCache.get(r);
-        if (cached !== undefined) {
-            return cached;
-        }
-        const result = this.cwd.resolve(r).fullpathPosix();
-        this.#resolvePosixCache.set(r, result);
-        return result;
-    }
-    /**
-     * find the relative path from the cwd to the supplied path string or entry
-     */
-    relative(entry = this.cwd) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        return entry.relative();
-    }
-    /**
-     * find the relative path from the cwd to the supplied path string or
-     * entry, using / as the path delimiter, even on Windows.
-     */
-    relativePosix(entry = this.cwd) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        return entry.relativePosix();
-    }
-    /**
-     * Return the basename for the provided string or Path object
-     */
-    basename(entry = this.cwd) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        return entry.name;
-    }
-    /**
-     * Return the dirname for the provided string or Path object
-     */
-    dirname(entry = this.cwd) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        return (entry.parent || entry).fullpath();
-    }
-    async readdir(entry = this.cwd, opts = {
-        withFileTypes: true,
-    }) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes } = opts;
-        if (!entry.canReaddir()) {
-            return [];
-        }
-        else {
-            const p = await entry.readdir();
-            return withFileTypes ? p : p.map(e => e.name);
-        }
-    }
-    readdirSync(entry = this.cwd, opts = {
-        withFileTypes: true,
-    }) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes = true } = opts;
-        if (!entry.canReaddir()) {
-            return [];
-        }
-        else if (withFileTypes) {
-            return entry.readdirSync();
-        }
-        else {
-            return entry.readdirSync().map(e => e.name);
-        }
-    }
-    /**
-     * Call lstat() on the string or Path object, and update all known
-     * information that can be determined.
-     *
-     * Note that unlike `fs.lstat()`, the returned value does not contain some
-     * information, such as `mode`, `dev`, `nlink`, and `ino`.  If that
-     * information is required, you will need to call `fs.lstat` yourself.
-     *
-     * If the Path refers to a nonexistent file, or if the lstat call fails for
-     * any reason, `undefined` is returned.  Otherwise the updated Path object is
-     * returned.
-     *
-     * Results are cached, and thus may be out of date if the filesystem is
-     * mutated.
-     */
-    async lstat(entry = this.cwd) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        return entry.lstat();
-    }
-    /**
-     * synchronous {@link PathScurryBase.lstat}
-     */
-    lstatSync(entry = this.cwd) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        return entry.lstatSync();
-    }
-    async readlink(entry = this.cwd, { withFileTypes } = {
-        withFileTypes: false,
-    }) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            withFileTypes = entry.withFileTypes;
-            entry = this.cwd;
-        }
-        const e = await entry.readlink();
-        return withFileTypes ? e : e?.fullpath();
-    }
-    readlinkSync(entry = this.cwd, { withFileTypes } = {
-        withFileTypes: false,
-    }) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            withFileTypes = entry.withFileTypes;
-            entry = this.cwd;
-        }
-        const e = entry.readlinkSync();
-        return withFileTypes ? e : e?.fullpath();
-    }
-    async realpath(entry = this.cwd, { withFileTypes } = {
-        withFileTypes: false,
-    }) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            withFileTypes = entry.withFileTypes;
-            entry = this.cwd;
-        }
-        const e = await entry.realpath();
-        return withFileTypes ? e : e?.fullpath();
-    }
-    realpathSync(entry = this.cwd, { withFileTypes } = {
-        withFileTypes: false,
-    }) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            withFileTypes = entry.withFileTypes;
-            entry = this.cwd;
-        }
-        const e = entry.realpathSync();
-        return withFileTypes ? e : e?.fullpath();
-    }
-    async walk(entry = this.cwd, opts = {}) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
-        const results = [];
-        if (!filter || filter(entry)) {
-            results.push(withFileTypes ? entry : entry.fullpath());
-        }
-        const dirs = new Set();
-        const walk = (dir, cb) => {
-            dirs.add(dir);
-            dir.readdirCB((er, entries) => {
-                /* c8 ignore start */
-                if (er) {
-                    return cb(er);
-                }
-                /* c8 ignore stop */
-                let len = entries.length;
-                if (!len)
-                    return cb();
-                const next = () => {
-                    if (--len === 0) {
-                        cb();
-                    }
-                };
-                for (const e of entries) {
-                    if (!filter || filter(e)) {
-                        results.push(withFileTypes ? e : e.fullpath());
-                    }
-                    if (follow && e.isSymbolicLink()) {
-                        e.realpath()
-                            .then(r => (r?.isUnknown() ? r.lstat() : r))
-                            .then(r => r?.shouldWalk(dirs, walkFilter) ? walk(r, next) : next());
-                    }
-                    else {
-                        if (e.shouldWalk(dirs, walkFilter)) {
-                            walk(e, next);
-                        }
-                        else {
-                            next();
-                        }
-                    }
-                }
-            }, true); // zalgooooooo
-        };
-        const start = entry;
-        return new Promise((res, rej) => {
-            walk(start, er => {
-                /* c8 ignore start */
-                if (er)
-                    return rej(er);
-                /* c8 ignore stop */
-                res(results);
-            });
-        });
-    }
-    walkSync(entry = this.cwd, opts = {}) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
-        const results = [];
-        if (!filter || filter(entry)) {
-            results.push(withFileTypes ? entry : entry.fullpath());
-        }
-        const dirs = new Set([entry]);
-        for (const dir of dirs) {
-            const entries = dir.readdirSync();
-            for (const e of entries) {
-                if (!filter || filter(e)) {
-                    results.push(withFileTypes ? e : e.fullpath());
-                }
-                let r = e;
-                if (e.isSymbolicLink()) {
-                    if (!(follow && (r = e.realpathSync())))
-                        continue;
-                    if (r.isUnknown())
-                        r.lstatSync();
-                }
-                if (r.shouldWalk(dirs, walkFilter)) {
-                    dirs.add(r);
-                }
-            }
-        }
-        return results;
-    }
-    /**
-     * Support for `for await`
-     *
-     * Alias for {@link PathScurryBase.iterate}
-     *
-     * Note: As of Node 19, this is very slow, compared to other methods of
-     * walking.  Consider using {@link PathScurryBase.stream} if memory overhead
-     * and backpressure are concerns, or {@link PathScurryBase.walk} if not.
-     */
-    [Symbol.asyncIterator]() {
-        return this.iterate();
-    }
-    iterate(entry = this.cwd, options = {}) {
-        // iterating async over the stream is significantly more performant,
-        // especially in the warm-cache scenario, because it buffers up directory
-        // entries in the background instead of waiting for a yield for each one.
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            options = entry;
-            entry = this.cwd;
-        }
-        return this.stream(entry, options)[Symbol.asyncIterator]();
-    }
-    /**
-     * Iterating over a PathScurry performs a synchronous walk.
-     *
-     * Alias for {@link PathScurryBase.iterateSync}
-     */
-    [Symbol.iterator]() {
-        return this.iterateSync();
-    }
-    *iterateSync(entry = this.cwd, opts = {}) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
-        if (!filter || filter(entry)) {
-            yield withFileTypes ? entry : entry.fullpath();
-        }
-        const dirs = new Set([entry]);
-        for (const dir of dirs) {
-            const entries = dir.readdirSync();
-            for (const e of entries) {
-                if (!filter || filter(e)) {
-                    yield withFileTypes ? e : e.fullpath();
-                }
-                let r = e;
-                if (e.isSymbolicLink()) {
-                    if (!(follow && (r = e.realpathSync())))
-                        continue;
-                    if (r.isUnknown())
-                        r.lstatSync();
-                }
-                if (r.shouldWalk(dirs, walkFilter)) {
-                    dirs.add(r);
-                }
-            }
-        }
-    }
-    stream(entry = this.cwd, opts = {}) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
-        const results = new minipass_1.Minipass({ objectMode: true });
-        if (!filter || filter(entry)) {
-            results.write(withFileTypes ? entry : entry.fullpath());
-        }
-        const dirs = new Set();
-        const queue = [entry];
-        let processing = 0;
-        const process = () => {
-            let paused = false;
-            while (!paused) {
-                const dir = queue.shift();
-                if (!dir) {
-                    if (processing === 0)
-                        results.end();
-                    return;
-                }
-                processing++;
-                dirs.add(dir);
-                const onReaddir = (er, entries, didRealpaths = false) => {
-                    /* c8 ignore start */
-                    if (er)
-                        return results.emit('error', er);
-                    /* c8 ignore stop */
-                    if (follow && !didRealpaths) {
-                        const promises = [];
-                        for (const e of entries) {
-                            if (e.isSymbolicLink()) {
-                                promises.push(e
-                                    .realpath()
-                                    .then((r) => r?.isUnknown() ? r.lstat() : r));
-                            }
-                        }
-                        if (promises.length) {
-                            Promise.all(promises).then(() => onReaddir(null, entries, true));
-                            return;
-                        }
-                    }
-                    for (const e of entries) {
-                        if (e && (!filter || filter(e))) {
-                            if (!results.write(withFileTypes ? e : e.fullpath())) {
-                                paused = true;
-                            }
-                        }
-                    }
-                    processing--;
-                    for (const e of entries) {
-                        const r = e.realpathCached() || e;
-                        if (r.shouldWalk(dirs, walkFilter)) {
-                            queue.push(r);
-                        }
-                    }
-                    if (paused && !results.flowing) {
-                        results.once('drain', process);
-                    }
-                    else if (!sync) {
-                        process();
-                    }
-                };
-                // zalgo containment
-                let sync = true;
-                dir.readdirCB(onReaddir, true);
-                sync = false;
-            }
-        };
-        process();
-        return results;
-    }
-    streamSync(entry = this.cwd, opts = {}) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
-        const results = new minipass_1.Minipass({ objectMode: true });
-        const dirs = new Set();
-        if (!filter || filter(entry)) {
-            results.write(withFileTypes ? entry : entry.fullpath());
-        }
-        const queue = [entry];
-        let processing = 0;
-        const process = () => {
-            let paused = false;
-            while (!paused) {
-                const dir = queue.shift();
-                if (!dir) {
-                    if (processing === 0)
-                        results.end();
-                    return;
-                }
-                processing++;
-                dirs.add(dir);
-                const entries = dir.readdirSync();
-                for (const e of entries) {
-                    if (!filter || filter(e)) {
-                        if (!results.write(withFileTypes ? e : e.fullpath())) {
-                            paused = true;
-                        }
-                    }
-                }
-                processing--;
-                for (const e of entries) {
-                    let r = e;
-                    if (e.isSymbolicLink()) {
-                        if (!(follow && (r = e.realpathSync())))
-                            continue;
-                        if (r.isUnknown())
-                            r.lstatSync();
-                    }
-                    if (r.shouldWalk(dirs, walkFilter)) {
-                        queue.push(r);
-                    }
-                }
-            }
-            if (paused && !results.flowing)
-                results.once('drain', process);
-        };
-        process();
-        return results;
-    }
-    chdir(path = this.cwd) {
-        const oldCwd = this.cwd;
-        this.cwd = typeof path === 'string' ? this.cwd.resolve(path) : path;
-        this.cwd[setAsCwd](oldCwd);
-    }
-}
-exports.PathScurryBase = PathScurryBase;
-/**
- * Windows implementation of {@link PathScurryBase}
- *
- * Defaults to case insensitve, uses `'\\'` to generate path strings.  Uses
- * {@link PathWin32} for Path objects.
- */
-class PathScurryWin32 extends PathScurryBase {
-    /**
-     * separator for generating path strings
-     */
-    sep = '\\';
-    constructor(cwd = process.cwd(), opts = {}) {
-        const { nocase = true } = opts;
-        super(cwd, node_path_1.win32, '\\', { ...opts, nocase });
-        this.nocase = nocase;
-        for (let p = this.cwd; p; p = p.parent) {
-            p.nocase = this.nocase;
-        }
-    }
-    /**
-     * @internal
-     */
-    parseRootPath(dir) {
-        // if the path starts with a single separator, it's not a UNC, and we'll
-        // just get separator as the root, and driveFromUNC will return \
-        // In that case, mount \ on the root from the cwd.
-        return node_path_1.win32.parse(dir).root.toUpperCase();
-    }
-    /**
-     * @internal
-     */
-    newRoot(fs) {
-        return new PathWin32(this.rootPath, IFDIR, undefined, this.roots, this.nocase, this.childrenCache(), { fs });
-    }
-    /**
-     * Return true if the provided path string is an absolute path
-     */
-    isAbsolute(p) {
-        return (p.startsWith('/') || p.startsWith('\\') || /^[a-z]:(\/|\\)/i.test(p));
-    }
-}
-exports.PathScurryWin32 = PathScurryWin32;
-/**
- * {@link PathScurryBase} implementation for all posix systems other than Darwin.
- *
- * Defaults to case-sensitive matching, uses `'/'` to generate path strings.
- *
- * Uses {@link PathPosix} for Path objects.
- */
-class PathScurryPosix extends PathScurryBase {
-    /**
-     * separator for generating path strings
-     */
-    sep = '/';
-    constructor(cwd = process.cwd(), opts = {}) {
-        const { nocase = false } = opts;
-        super(cwd, node_path_1.posix, '/', { ...opts, nocase });
-        this.nocase = nocase;
-    }
-    /**
-     * @internal
-     */
-    parseRootPath(_dir) {
-        return '/';
-    }
-    /**
-     * @internal
-     */
-    newRoot(fs) {
-        return new PathPosix(this.rootPath, IFDIR, undefined, this.roots, this.nocase, this.childrenCache(), { fs });
-    }
-    /**
-     * Return true if the provided path string is an absolute path
-     */
-    isAbsolute(p) {
-        return p.startsWith('/');
-    }
-}
-exports.PathScurryPosix = PathScurryPosix;
-/**
- * {@link PathScurryBase} implementation for Darwin (macOS) systems.
- *
- * Defaults to case-insensitive matching, uses `'/'` for generating path
- * strings.
- *
- * Uses {@link PathPosix} for Path objects.
- */
-class PathScurryDarwin extends PathScurryPosix {
-    constructor(cwd = process.cwd(), opts = {}) {
-        const { nocase = true } = opts;
-        super(cwd, { ...opts, nocase });
-    }
-}
-exports.PathScurryDarwin = PathScurryDarwin;
-/**
- * Default {@link PathBase} implementation for the current platform.
- *
- * {@link PathWin32} on Windows systems, {@link PathPosix} on all others.
- */
-exports.Path = process.platform === 'win32' ? PathWin32 : PathPosix;
-/**
- * Default {@link PathScurryBase} implementation for the current platform.
- *
- * {@link PathScurryWin32} on Windows systems, {@link PathScurryDarwin} on
- * Darwin (macOS) systems, {@link PathScurryPosix} on all others.
- */
-exports.PathScurry = process.platform === 'win32' ? PathScurryWin32
-    : process.platform === 'darwin' ? PathScurryDarwin
-        : PathScurryPosix;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/package-json/node_modules/path-scurry/dist/commonjs/package.json b/node_modules/@npmcli/package-json/node_modules/path-scurry/dist/commonjs/package.json
deleted file mode 100644
index 5bbefffbabee3..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/path-scurry/dist/commonjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "commonjs"
-}
diff --git a/node_modules/@npmcli/package-json/node_modules/path-scurry/dist/esm/index.js b/node_modules/@npmcli/package-json/node_modules/path-scurry/dist/esm/index.js
deleted file mode 100644
index 42be74c37ad9d..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/path-scurry/dist/esm/index.js
+++ /dev/null
@@ -1,1981 +0,0 @@
-import { LRUCache } from 'lru-cache';
-import { posix, win32 } from 'node:path';
-import { fileURLToPath } from 'node:url';
-import { lstatSync, readdir as readdirCB, readdirSync, readlinkSync, realpathSync as rps, } from 'fs';
-import * as actualFS from 'node:fs';
-const realpathSync = rps.native;
-// TODO: test perf of fs/promises realpath vs realpathCB,
-// since the promises one uses realpath.native
-import { lstat, readdir, readlink, realpath } from 'node:fs/promises';
-import { Minipass } from 'minipass';
-const defaultFS = {
-    lstatSync,
-    readdir: readdirCB,
-    readdirSync,
-    readlinkSync,
-    realpathSync,
-    promises: {
-        lstat,
-        readdir,
-        readlink,
-        realpath,
-    },
-};
-// if they just gave us require('fs') then use our default
-const fsFromOption = (fsOption) => !fsOption || fsOption === defaultFS || fsOption === actualFS ?
-    defaultFS
-    : {
-        ...defaultFS,
-        ...fsOption,
-        promises: {
-            ...defaultFS.promises,
-            ...(fsOption.promises || {}),
-        },
-    };
-// turn something like //?/c:/ into c:\
-const uncDriveRegexp = /^\\\\\?\\([a-z]:)\\?$/i;
-const uncToDrive = (rootPath) => rootPath.replace(/\//g, '\\').replace(uncDriveRegexp, '$1\\');
-// windows paths are separated by either / or \
-const eitherSep = /[\\\/]/;
-const UNKNOWN = 0; // may not even exist, for all we know
-const IFIFO = 0b0001;
-const IFCHR = 0b0010;
-const IFDIR = 0b0100;
-const IFBLK = 0b0110;
-const IFREG = 0b1000;
-const IFLNK = 0b1010;
-const IFSOCK = 0b1100;
-const IFMT = 0b1111;
-// mask to unset low 4 bits
-const IFMT_UNKNOWN = ~IFMT;
-// set after successfully calling readdir() and getting entries.
-const READDIR_CALLED = 0b0000_0001_0000;
-// set after a successful lstat()
-const LSTAT_CALLED = 0b0000_0010_0000;
-// set if an entry (or one of its parents) is definitely not a dir
-const ENOTDIR = 0b0000_0100_0000;
-// set if an entry (or one of its parents) does not exist
-// (can also be set on lstat errors like EACCES or ENAMETOOLONG)
-const ENOENT = 0b0000_1000_0000;
-// cannot have child entries -- also verify &IFMT is either IFDIR or IFLNK
-// set if we fail to readlink
-const ENOREADLINK = 0b0001_0000_0000;
-// set if we know realpath() will fail
-const ENOREALPATH = 0b0010_0000_0000;
-const ENOCHILD = ENOTDIR | ENOENT | ENOREALPATH;
-const TYPEMASK = 0b0011_1111_1111;
-const entToType = (s) => s.isFile() ? IFREG
-    : s.isDirectory() ? IFDIR
-        : s.isSymbolicLink() ? IFLNK
-            : s.isCharacterDevice() ? IFCHR
-                : s.isBlockDevice() ? IFBLK
-                    : s.isSocket() ? IFSOCK
-                        : s.isFIFO() ? IFIFO
-                            : UNKNOWN;
-// normalize unicode path names
-const normalizeCache = new Map();
-const normalize = (s) => {
-    const c = normalizeCache.get(s);
-    if (c)
-        return c;
-    const n = s.normalize('NFKD');
-    normalizeCache.set(s, n);
-    return n;
-};
-const normalizeNocaseCache = new Map();
-const normalizeNocase = (s) => {
-    const c = normalizeNocaseCache.get(s);
-    if (c)
-        return c;
-    const n = normalize(s.toLowerCase());
-    normalizeNocaseCache.set(s, n);
-    return n;
-};
-/**
- * An LRUCache for storing resolved path strings or Path objects.
- * @internal
- */
-export class ResolveCache extends LRUCache {
-    constructor() {
-        super({ max: 256 });
-    }
-}
-// In order to prevent blowing out the js heap by allocating hundreds of
-// thousands of Path entries when walking extremely large trees, the "children"
-// in this tree are represented by storing an array of Path entries in an
-// LRUCache, indexed by the parent.  At any time, Path.children() may return an
-// empty array, indicating that it doesn't know about any of its children, and
-// thus has to rebuild that cache.  This is fine, it just means that we don't
-// benefit as much from having the cached entries, but huge directory walks
-// don't blow out the stack, and smaller ones are still as fast as possible.
-//
-//It does impose some complexity when building up the readdir data, because we
-//need to pass a reference to the children array that we started with.
-/**
- * an LRUCache for storing child entries.
- * @internal
- */
-export class ChildrenCache extends LRUCache {
-    constructor(maxSize = 16 * 1024) {
-        super({
-            maxSize,
-            // parent + children
-            sizeCalculation: a => a.length + 1,
-        });
-    }
-}
-const setAsCwd = Symbol('PathScurry setAsCwd');
-/**
- * Path objects are sort of like a super-powered
- * {@link https://nodejs.org/docs/latest/api/fs.html#class-fsdirent fs.Dirent}
- *
- * Each one represents a single filesystem entry on disk, which may or may not
- * exist. It includes methods for reading various types of information via
- * lstat, readlink, and readdir, and caches all information to the greatest
- * degree possible.
- *
- * Note that fs operations that would normally throw will instead return an
- * "empty" value. This is in order to prevent excessive overhead from error
- * stack traces.
- */
-export class PathBase {
-    /**
-     * the basename of this path
-     *
-     * **Important**: *always* test the path name against any test string
-     * usingthe {@link isNamed} method, and not by directly comparing this
-     * string. Otherwise, unicode path strings that the system sees as identical
-     * will not be properly treated as the same path, leading to incorrect
-     * behavior and possible security issues.
-     */
-    name;
-    /**
-     * the Path entry corresponding to the path root.
-     *
-     * @internal
-     */
-    root;
-    /**
-     * All roots found within the current PathScurry family
-     *
-     * @internal
-     */
-    roots;
-    /**
-     * a reference to the parent path, or undefined in the case of root entries
-     *
-     * @internal
-     */
-    parent;
-    /**
-     * boolean indicating whether paths are compared case-insensitively
-     * @internal
-     */
-    nocase;
-    /**
-     * boolean indicating that this path is the current working directory
-     * of the PathScurry collection that contains it.
-     */
-    isCWD = false;
-    // potential default fs override
-    #fs;
-    // Stats fields
-    #dev;
-    get dev() {
-        return this.#dev;
-    }
-    #mode;
-    get mode() {
-        return this.#mode;
-    }
-    #nlink;
-    get nlink() {
-        return this.#nlink;
-    }
-    #uid;
-    get uid() {
-        return this.#uid;
-    }
-    #gid;
-    get gid() {
-        return this.#gid;
-    }
-    #rdev;
-    get rdev() {
-        return this.#rdev;
-    }
-    #blksize;
-    get blksize() {
-        return this.#blksize;
-    }
-    #ino;
-    get ino() {
-        return this.#ino;
-    }
-    #size;
-    get size() {
-        return this.#size;
-    }
-    #blocks;
-    get blocks() {
-        return this.#blocks;
-    }
-    #atimeMs;
-    get atimeMs() {
-        return this.#atimeMs;
-    }
-    #mtimeMs;
-    get mtimeMs() {
-        return this.#mtimeMs;
-    }
-    #ctimeMs;
-    get ctimeMs() {
-        return this.#ctimeMs;
-    }
-    #birthtimeMs;
-    get birthtimeMs() {
-        return this.#birthtimeMs;
-    }
-    #atime;
-    get atime() {
-        return this.#atime;
-    }
-    #mtime;
-    get mtime() {
-        return this.#mtime;
-    }
-    #ctime;
-    get ctime() {
-        return this.#ctime;
-    }
-    #birthtime;
-    get birthtime() {
-        return this.#birthtime;
-    }
-    #matchName;
-    #depth;
-    #fullpath;
-    #fullpathPosix;
-    #relative;
-    #relativePosix;
-    #type;
-    #children;
-    #linkTarget;
-    #realpath;
-    /**
-     * This property is for compatibility with the Dirent class as of
-     * Node v20, where Dirent['parentPath'] refers to the path of the
-     * directory that was passed to readdir. For root entries, it's the path
-     * to the entry itself.
-     */
-    get parentPath() {
-        return (this.parent || this).fullpath();
-    }
-    /**
-     * Deprecated alias for Dirent['parentPath'] Somewhat counterintuitively,
-     * this property refers to the *parent* path, not the path object itself.
-     *
-     * @deprecated
-     */
-    get path() {
-        return this.parentPath;
-    }
-    /**
-     * Do not create new Path objects directly.  They should always be accessed
-     * via the PathScurry class or other methods on the Path class.
-     *
-     * @internal
-     */
-    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
-        this.name = name;
-        this.#matchName = nocase ? normalizeNocase(name) : normalize(name);
-        this.#type = type & TYPEMASK;
-        this.nocase = nocase;
-        this.roots = roots;
-        this.root = root || this;
-        this.#children = children;
-        this.#fullpath = opts.fullpath;
-        this.#relative = opts.relative;
-        this.#relativePosix = opts.relativePosix;
-        this.parent = opts.parent;
-        if (this.parent) {
-            this.#fs = this.parent.#fs;
-        }
-        else {
-            this.#fs = fsFromOption(opts.fs);
-        }
-    }
-    /**
-     * Returns the depth of the Path object from its root.
-     *
-     * For example, a path at `/foo/bar` would have a depth of 2.
-     */
-    depth() {
-        if (this.#depth !== undefined)
-            return this.#depth;
-        if (!this.parent)
-            return (this.#depth = 0);
-        return (this.#depth = this.parent.depth() + 1);
-    }
-    /**
-     * @internal
-     */
-    childrenCache() {
-        return this.#children;
-    }
-    /**
-     * Get the Path object referenced by the string path, resolved from this Path
-     */
-    resolve(path) {
-        if (!path) {
-            return this;
-        }
-        const rootPath = this.getRootString(path);
-        const dir = path.substring(rootPath.length);
-        const dirParts = dir.split(this.splitSep);
-        const result = rootPath ?
-            this.getRoot(rootPath).#resolveParts(dirParts)
-            : this.#resolveParts(dirParts);
-        return result;
-    }
-    #resolveParts(dirParts) {
-        let p = this;
-        for (const part of dirParts) {
-            p = p.child(part);
-        }
-        return p;
-    }
-    /**
-     * Returns the cached children Path objects, if still available.  If they
-     * have fallen out of the cache, then returns an empty array, and resets the
-     * READDIR_CALLED bit, so that future calls to readdir() will require an fs
-     * lookup.
-     *
-     * @internal
-     */
-    children() {
-        const cached = this.#children.get(this);
-        if (cached) {
-            return cached;
-        }
-        const children = Object.assign([], { provisional: 0 });
-        this.#children.set(this, children);
-        this.#type &= ~READDIR_CALLED;
-        return children;
-    }
-    /**
-     * Resolves a path portion and returns or creates the child Path.
-     *
-     * Returns `this` if pathPart is `''` or `'.'`, or `parent` if pathPart is
-     * `'..'`.
-     *
-     * This should not be called directly.  If `pathPart` contains any path
-     * separators, it will lead to unsafe undefined behavior.
-     *
-     * Use `Path.resolve()` instead.
-     *
-     * @internal
-     */
-    child(pathPart, opts) {
-        if (pathPart === '' || pathPart === '.') {
-            return this;
-        }
-        if (pathPart === '..') {
-            return this.parent || this;
-        }
-        // find the child
-        const children = this.children();
-        const name = this.nocase ? normalizeNocase(pathPart) : normalize(pathPart);
-        for (const p of children) {
-            if (p.#matchName === name) {
-                return p;
-            }
-        }
-        // didn't find it, create provisional child, since it might not
-        // actually exist.  If we know the parent isn't a dir, then
-        // in fact it CAN'T exist.
-        const s = this.parent ? this.sep : '';
-        const fullpath = this.#fullpath ? this.#fullpath + s + pathPart : undefined;
-        const pchild = this.newChild(pathPart, UNKNOWN, {
-            ...opts,
-            parent: this,
-            fullpath,
-        });
-        if (!this.canReaddir()) {
-            pchild.#type |= ENOENT;
-        }
-        // don't have to update provisional, because if we have real children,
-        // then provisional is set to children.length, otherwise a lower number
-        children.push(pchild);
-        return pchild;
-    }
-    /**
-     * The relative path from the cwd. If it does not share an ancestor with
-     * the cwd, then this ends up being equivalent to the fullpath()
-     */
-    relative() {
-        if (this.isCWD)
-            return '';
-        if (this.#relative !== undefined) {
-            return this.#relative;
-        }
-        const name = this.name;
-        const p = this.parent;
-        if (!p) {
-            return (this.#relative = this.name);
-        }
-        const pv = p.relative();
-        return pv + (!pv || !p.parent ? '' : this.sep) + name;
-    }
-    /**
-     * The relative path from the cwd, using / as the path separator.
-     * If it does not share an ancestor with
-     * the cwd, then this ends up being equivalent to the fullpathPosix()
-     * On posix systems, this is identical to relative().
-     */
-    relativePosix() {
-        if (this.sep === '/')
-            return this.relative();
-        if (this.isCWD)
-            return '';
-        if (this.#relativePosix !== undefined)
-            return this.#relativePosix;
-        const name = this.name;
-        const p = this.parent;
-        if (!p) {
-            return (this.#relativePosix = this.fullpathPosix());
-        }
-        const pv = p.relativePosix();
-        return pv + (!pv || !p.parent ? '' : '/') + name;
-    }
-    /**
-     * The fully resolved path string for this Path entry
-     */
-    fullpath() {
-        if (this.#fullpath !== undefined) {
-            return this.#fullpath;
-        }
-        const name = this.name;
-        const p = this.parent;
-        if (!p) {
-            return (this.#fullpath = this.name);
-        }
-        const pv = p.fullpath();
-        const fp = pv + (!p.parent ? '' : this.sep) + name;
-        return (this.#fullpath = fp);
-    }
-    /**
-     * On platforms other than windows, this is identical to fullpath.
-     *
-     * On windows, this is overridden to return the forward-slash form of the
-     * full UNC path.
-     */
-    fullpathPosix() {
-        if (this.#fullpathPosix !== undefined)
-            return this.#fullpathPosix;
-        if (this.sep === '/')
-            return (this.#fullpathPosix = this.fullpath());
-        if (!this.parent) {
-            const p = this.fullpath().replace(/\\/g, '/');
-            if (/^[a-z]:\//i.test(p)) {
-                return (this.#fullpathPosix = `//?/${p}`);
-            }
-            else {
-                return (this.#fullpathPosix = p);
-            }
-        }
-        const p = this.parent;
-        const pfpp = p.fullpathPosix();
-        const fpp = pfpp + (!pfpp || !p.parent ? '' : '/') + this.name;
-        return (this.#fullpathPosix = fpp);
-    }
-    /**
-     * Is the Path of an unknown type?
-     *
-     * Note that we might know *something* about it if there has been a previous
-     * filesystem operation, for example that it does not exist, or is not a
-     * link, or whether it has child entries.
-     */
-    isUnknown() {
-        return (this.#type & IFMT) === UNKNOWN;
-    }
-    isType(type) {
-        return this[`is${type}`]();
-    }
-    getType() {
-        return (this.isUnknown() ? 'Unknown'
-            : this.isDirectory() ? 'Directory'
-                : this.isFile() ? 'File'
-                    : this.isSymbolicLink() ? 'SymbolicLink'
-                        : this.isFIFO() ? 'FIFO'
-                            : this.isCharacterDevice() ? 'CharacterDevice'
-                                : this.isBlockDevice() ? 'BlockDevice'
-                                    : /* c8 ignore start */ this.isSocket() ? 'Socket'
-                                        : 'Unknown');
-        /* c8 ignore stop */
-    }
-    /**
-     * Is the Path a regular file?
-     */
-    isFile() {
-        return (this.#type & IFMT) === IFREG;
-    }
-    /**
-     * Is the Path a directory?
-     */
-    isDirectory() {
-        return (this.#type & IFMT) === IFDIR;
-    }
-    /**
-     * Is the path a character device?
-     */
-    isCharacterDevice() {
-        return (this.#type & IFMT) === IFCHR;
-    }
-    /**
-     * Is the path a block device?
-     */
-    isBlockDevice() {
-        return (this.#type & IFMT) === IFBLK;
-    }
-    /**
-     * Is the path a FIFO pipe?
-     */
-    isFIFO() {
-        return (this.#type & IFMT) === IFIFO;
-    }
-    /**
-     * Is the path a socket?
-     */
-    isSocket() {
-        return (this.#type & IFMT) === IFSOCK;
-    }
-    /**
-     * Is the path a symbolic link?
-     */
-    isSymbolicLink() {
-        return (this.#type & IFLNK) === IFLNK;
-    }
-    /**
-     * Return the entry if it has been subject of a successful lstat, or
-     * undefined otherwise.
-     *
-     * Does not read the filesystem, so an undefined result *could* simply
-     * mean that we haven't called lstat on it.
-     */
-    lstatCached() {
-        return this.#type & LSTAT_CALLED ? this : undefined;
-    }
-    /**
-     * Return the cached link target if the entry has been the subject of a
-     * successful readlink, or undefined otherwise.
-     *
-     * Does not read the filesystem, so an undefined result *could* just mean we
-     * don't have any cached data. Only use it if you are very sure that a
-     * readlink() has been called at some point.
-     */
-    readlinkCached() {
-        return this.#linkTarget;
-    }
-    /**
-     * Returns the cached realpath target if the entry has been the subject
-     * of a successful realpath, or undefined otherwise.
-     *
-     * Does not read the filesystem, so an undefined result *could* just mean we
-     * don't have any cached data. Only use it if you are very sure that a
-     * realpath() has been called at some point.
-     */
-    realpathCached() {
-        return this.#realpath;
-    }
-    /**
-     * Returns the cached child Path entries array if the entry has been the
-     * subject of a successful readdir(), or [] otherwise.
-     *
-     * Does not read the filesystem, so an empty array *could* just mean we
-     * don't have any cached data. Only use it if you are very sure that a
-     * readdir() has been called recently enough to still be valid.
-     */
-    readdirCached() {
-        const children = this.children();
-        return children.slice(0, children.provisional);
-    }
-    /**
-     * Return true if it's worth trying to readlink.  Ie, we don't (yet) have
-     * any indication that readlink will definitely fail.
-     *
-     * Returns false if the path is known to not be a symlink, if a previous
-     * readlink failed, or if the entry does not exist.
-     */
-    canReadlink() {
-        if (this.#linkTarget)
-            return true;
-        if (!this.parent)
-            return false;
-        // cases where it cannot possibly succeed
-        const ifmt = this.#type & IFMT;
-        return !((ifmt !== UNKNOWN && ifmt !== IFLNK) ||
-            this.#type & ENOREADLINK ||
-            this.#type & ENOENT);
-    }
-    /**
-     * Return true if readdir has previously been successfully called on this
-     * path, indicating that cachedReaddir() is likely valid.
-     */
-    calledReaddir() {
-        return !!(this.#type & READDIR_CALLED);
-    }
-    /**
-     * Returns true if the path is known to not exist. That is, a previous lstat
-     * or readdir failed to verify its existence when that would have been
-     * expected, or a parent entry was marked either enoent or enotdir.
-     */
-    isENOENT() {
-        return !!(this.#type & ENOENT);
-    }
-    /**
-     * Return true if the path is a match for the given path name.  This handles
-     * case sensitivity and unicode normalization.
-     *
-     * Note: even on case-sensitive systems, it is **not** safe to test the
-     * equality of the `.name` property to determine whether a given pathname
-     * matches, due to unicode normalization mismatches.
-     *
-     * Always use this method instead of testing the `path.name` property
-     * directly.
-     */
-    isNamed(n) {
-        return !this.nocase ?
-            this.#matchName === normalize(n)
-            : this.#matchName === normalizeNocase(n);
-    }
-    /**
-     * Return the Path object corresponding to the target of a symbolic link.
-     *
-     * If the Path is not a symbolic link, or if the readlink call fails for any
-     * reason, `undefined` is returned.
-     *
-     * Result is cached, and thus may be outdated if the filesystem is mutated.
-     */
-    async readlink() {
-        const target = this.#linkTarget;
-        if (target) {
-            return target;
-        }
-        if (!this.canReadlink()) {
-            return undefined;
-        }
-        /* c8 ignore start */
-        // already covered by the canReadlink test, here for ts grumples
-        if (!this.parent) {
-            return undefined;
-        }
-        /* c8 ignore stop */
-        try {
-            const read = await this.#fs.promises.readlink(this.fullpath());
-            const linkTarget = (await this.parent.realpath())?.resolve(read);
-            if (linkTarget) {
-                return (this.#linkTarget = linkTarget);
-            }
-        }
-        catch (er) {
-            this.#readlinkFail(er.code);
-            return undefined;
-        }
-    }
-    /**
-     * Synchronous {@link PathBase.readlink}
-     */
-    readlinkSync() {
-        const target = this.#linkTarget;
-        if (target) {
-            return target;
-        }
-        if (!this.canReadlink()) {
-            return undefined;
-        }
-        /* c8 ignore start */
-        // already covered by the canReadlink test, here for ts grumples
-        if (!this.parent) {
-            return undefined;
-        }
-        /* c8 ignore stop */
-        try {
-            const read = this.#fs.readlinkSync(this.fullpath());
-            const linkTarget = this.parent.realpathSync()?.resolve(read);
-            if (linkTarget) {
-                return (this.#linkTarget = linkTarget);
-            }
-        }
-        catch (er) {
-            this.#readlinkFail(er.code);
-            return undefined;
-        }
-    }
-    #readdirSuccess(children) {
-        // succeeded, mark readdir called bit
-        this.#type |= READDIR_CALLED;
-        // mark all remaining provisional children as ENOENT
-        for (let p = children.provisional; p < children.length; p++) {
-            const c = children[p];
-            if (c)
-                c.#markENOENT();
-        }
-    }
-    #markENOENT() {
-        // mark as UNKNOWN and ENOENT
-        if (this.#type & ENOENT)
-            return;
-        this.#type = (this.#type | ENOENT) & IFMT_UNKNOWN;
-        this.#markChildrenENOENT();
-    }
-    #markChildrenENOENT() {
-        // all children are provisional and do not exist
-        const children = this.children();
-        children.provisional = 0;
-        for (const p of children) {
-            p.#markENOENT();
-        }
-    }
-    #markENOREALPATH() {
-        this.#type |= ENOREALPATH;
-        this.#markENOTDIR();
-    }
-    // save the information when we know the entry is not a dir
-    #markENOTDIR() {
-        // entry is not a directory, so any children can't exist.
-        // this *should* be impossible, since any children created
-        // after it's been marked ENOTDIR should be marked ENOENT,
-        // so it won't even get to this point.
-        /* c8 ignore start */
-        if (this.#type & ENOTDIR)
-            return;
-        /* c8 ignore stop */
-        let t = this.#type;
-        // this could happen if we stat a dir, then delete it,
-        // then try to read it or one of its children.
-        if ((t & IFMT) === IFDIR)
-            t &= IFMT_UNKNOWN;
-        this.#type = t | ENOTDIR;
-        this.#markChildrenENOENT();
-    }
-    #readdirFail(code = '') {
-        // markENOTDIR and markENOENT also set provisional=0
-        if (code === 'ENOTDIR' || code === 'EPERM') {
-            this.#markENOTDIR();
-        }
-        else if (code === 'ENOENT') {
-            this.#markENOENT();
-        }
-        else {
-            this.children().provisional = 0;
-        }
-    }
-    #lstatFail(code = '') {
-        // Windows just raises ENOENT in this case, disable for win CI
-        /* c8 ignore start */
-        if (code === 'ENOTDIR') {
-            // already know it has a parent by this point
-            const p = this.parent;
-            p.#markENOTDIR();
-        }
-        else if (code === 'ENOENT') {
-            /* c8 ignore stop */
-            this.#markENOENT();
-        }
-    }
-    #readlinkFail(code = '') {
-        let ter = this.#type;
-        ter |= ENOREADLINK;
-        if (code === 'ENOENT')
-            ter |= ENOENT;
-        // windows gets a weird error when you try to readlink a file
-        if (code === 'EINVAL' || code === 'UNKNOWN') {
-            // exists, but not a symlink, we don't know WHAT it is, so remove
-            // all IFMT bits.
-            ter &= IFMT_UNKNOWN;
-        }
-        this.#type = ter;
-        // windows just gets ENOENT in this case.  We do cover the case,
-        // just disabled because it's impossible on Windows CI
-        /* c8 ignore start */
-        if (code === 'ENOTDIR' && this.parent) {
-            this.parent.#markENOTDIR();
-        }
-        /* c8 ignore stop */
-    }
-    #readdirAddChild(e, c) {
-        return (this.#readdirMaybePromoteChild(e, c) ||
-            this.#readdirAddNewChild(e, c));
-    }
-    #readdirAddNewChild(e, c) {
-        // alloc new entry at head, so it's never provisional
-        const type = entToType(e);
-        const child = this.newChild(e.name, type, { parent: this });
-        const ifmt = child.#type & IFMT;
-        if (ifmt !== IFDIR && ifmt !== IFLNK && ifmt !== UNKNOWN) {
-            child.#type |= ENOTDIR;
-        }
-        c.unshift(child);
-        c.provisional++;
-        return child;
-    }
-    #readdirMaybePromoteChild(e, c) {
-        for (let p = c.provisional; p < c.length; p++) {
-            const pchild = c[p];
-            const name = this.nocase ? normalizeNocase(e.name) : normalize(e.name);
-            if (name !== pchild.#matchName) {
-                continue;
-            }
-            return this.#readdirPromoteChild(e, pchild, p, c);
-        }
-    }
-    #readdirPromoteChild(e, p, index, c) {
-        const v = p.name;
-        // retain any other flags, but set ifmt from dirent
-        p.#type = (p.#type & IFMT_UNKNOWN) | entToType(e);
-        // case sensitivity fixing when we learn the true name.
-        if (v !== e.name)
-            p.name = e.name;
-        // just advance provisional index (potentially off the list),
-        // otherwise we have to splice/pop it out and re-insert at head
-        if (index !== c.provisional) {
-            if (index === c.length - 1)
-                c.pop();
-            else
-                c.splice(index, 1);
-            c.unshift(p);
-        }
-        c.provisional++;
-        return p;
-    }
-    /**
-     * Call lstat() on this Path, and update all known information that can be
-     * determined.
-     *
-     * Note that unlike `fs.lstat()`, the returned value does not contain some
-     * information, such as `mode`, `dev`, `nlink`, and `ino`.  If that
-     * information is required, you will need to call `fs.lstat` yourself.
-     *
-     * If the Path refers to a nonexistent file, or if the lstat call fails for
-     * any reason, `undefined` is returned.  Otherwise the updated Path object is
-     * returned.
-     *
-     * Results are cached, and thus may be out of date if the filesystem is
-     * mutated.
-     */
-    async lstat() {
-        if ((this.#type & ENOENT) === 0) {
-            try {
-                this.#applyStat(await this.#fs.promises.lstat(this.fullpath()));
-                return this;
-            }
-            catch (er) {
-                this.#lstatFail(er.code);
-            }
-        }
-    }
-    /**
-     * synchronous {@link PathBase.lstat}
-     */
-    lstatSync() {
-        if ((this.#type & ENOENT) === 0) {
-            try {
-                this.#applyStat(this.#fs.lstatSync(this.fullpath()));
-                return this;
-            }
-            catch (er) {
-                this.#lstatFail(er.code);
-            }
-        }
-    }
-    #applyStat(st) {
-        const { atime, atimeMs, birthtime, birthtimeMs, blksize, blocks, ctime, ctimeMs, dev, gid, ino, mode, mtime, mtimeMs, nlink, rdev, size, uid, } = st;
-        this.#atime = atime;
-        this.#atimeMs = atimeMs;
-        this.#birthtime = birthtime;
-        this.#birthtimeMs = birthtimeMs;
-        this.#blksize = blksize;
-        this.#blocks = blocks;
-        this.#ctime = ctime;
-        this.#ctimeMs = ctimeMs;
-        this.#dev = dev;
-        this.#gid = gid;
-        this.#ino = ino;
-        this.#mode = mode;
-        this.#mtime = mtime;
-        this.#mtimeMs = mtimeMs;
-        this.#nlink = nlink;
-        this.#rdev = rdev;
-        this.#size = size;
-        this.#uid = uid;
-        const ifmt = entToType(st);
-        // retain any other flags, but set the ifmt
-        this.#type = (this.#type & IFMT_UNKNOWN) | ifmt | LSTAT_CALLED;
-        if (ifmt !== UNKNOWN && ifmt !== IFDIR && ifmt !== IFLNK) {
-            this.#type |= ENOTDIR;
-        }
-    }
-    #onReaddirCB = [];
-    #readdirCBInFlight = false;
-    #callOnReaddirCB(children) {
-        this.#readdirCBInFlight = false;
-        const cbs = this.#onReaddirCB.slice();
-        this.#onReaddirCB.length = 0;
-        cbs.forEach(cb => cb(null, children));
-    }
-    /**
-     * Standard node-style callback interface to get list of directory entries.
-     *
-     * If the Path cannot or does not contain any children, then an empty array
-     * is returned.
-     *
-     * Results are cached, and thus may be out of date if the filesystem is
-     * mutated.
-     *
-     * @param cb The callback called with (er, entries).  Note that the `er`
-     * param is somewhat extraneous, as all readdir() errors are handled and
-     * simply result in an empty set of entries being returned.
-     * @param allowZalgo Boolean indicating that immediately known results should
-     * *not* be deferred with `queueMicrotask`. Defaults to `false`. Release
-     * zalgo at your peril, the dark pony lord is devious and unforgiving.
-     */
-    readdirCB(cb, allowZalgo = false) {
-        if (!this.canReaddir()) {
-            if (allowZalgo)
-                cb(null, []);
-            else
-                queueMicrotask(() => cb(null, []));
-            return;
-        }
-        const children = this.children();
-        if (this.calledReaddir()) {
-            const c = children.slice(0, children.provisional);
-            if (allowZalgo)
-                cb(null, c);
-            else
-                queueMicrotask(() => cb(null, c));
-            return;
-        }
-        // don't have to worry about zalgo at this point.
-        this.#onReaddirCB.push(cb);
-        if (this.#readdirCBInFlight) {
-            return;
-        }
-        this.#readdirCBInFlight = true;
-        // else read the directory, fill up children
-        // de-provisionalize any provisional children.
-        const fullpath = this.fullpath();
-        this.#fs.readdir(fullpath, { withFileTypes: true }, (er, entries) => {
-            if (er) {
-                this.#readdirFail(er.code);
-                children.provisional = 0;
-            }
-            else {
-                // if we didn't get an error, we always get entries.
-                //@ts-ignore
-                for (const e of entries) {
-                    this.#readdirAddChild(e, children);
-                }
-                this.#readdirSuccess(children);
-            }
-            this.#callOnReaddirCB(children.slice(0, children.provisional));
-            return;
-        });
-    }
-    #asyncReaddirInFlight;
-    /**
-     * Return an array of known child entries.
-     *
-     * If the Path cannot or does not contain any children, then an empty array
-     * is returned.
-     *
-     * Results are cached, and thus may be out of date if the filesystem is
-     * mutated.
-     */
-    async readdir() {
-        if (!this.canReaddir()) {
-            return [];
-        }
-        const children = this.children();
-        if (this.calledReaddir()) {
-            return children.slice(0, children.provisional);
-        }
-        // else read the directory, fill up children
-        // de-provisionalize any provisional children.
-        const fullpath = this.fullpath();
-        if (this.#asyncReaddirInFlight) {
-            await this.#asyncReaddirInFlight;
-        }
-        else {
-            /* c8 ignore start */
-            let resolve = () => { };
-            /* c8 ignore stop */
-            this.#asyncReaddirInFlight = new Promise(res => (resolve = res));
-            try {
-                for (const e of await this.#fs.promises.readdir(fullpath, {
-                    withFileTypes: true,
-                })) {
-                    this.#readdirAddChild(e, children);
-                }
-                this.#readdirSuccess(children);
-            }
-            catch (er) {
-                this.#readdirFail(er.code);
-                children.provisional = 0;
-            }
-            this.#asyncReaddirInFlight = undefined;
-            resolve();
-        }
-        return children.slice(0, children.provisional);
-    }
-    /**
-     * synchronous {@link PathBase.readdir}
-     */
-    readdirSync() {
-        if (!this.canReaddir()) {
-            return [];
-        }
-        const children = this.children();
-        if (this.calledReaddir()) {
-            return children.slice(0, children.provisional);
-        }
-        // else read the directory, fill up children
-        // de-provisionalize any provisional children.
-        const fullpath = this.fullpath();
-        try {
-            for (const e of this.#fs.readdirSync(fullpath, {
-                withFileTypes: true,
-            })) {
-                this.#readdirAddChild(e, children);
-            }
-            this.#readdirSuccess(children);
-        }
-        catch (er) {
-            this.#readdirFail(er.code);
-            children.provisional = 0;
-        }
-        return children.slice(0, children.provisional);
-    }
-    canReaddir() {
-        if (this.#type & ENOCHILD)
-            return false;
-        const ifmt = IFMT & this.#type;
-        // we always set ENOTDIR when setting IFMT, so should be impossible
-        /* c8 ignore start */
-        if (!(ifmt === UNKNOWN || ifmt === IFDIR || ifmt === IFLNK)) {
-            return false;
-        }
-        /* c8 ignore stop */
-        return true;
-    }
-    shouldWalk(dirs, walkFilter) {
-        return ((this.#type & IFDIR) === IFDIR &&
-            !(this.#type & ENOCHILD) &&
-            !dirs.has(this) &&
-            (!walkFilter || walkFilter(this)));
-    }
-    /**
-     * Return the Path object corresponding to path as resolved
-     * by realpath(3).
-     *
-     * If the realpath call fails for any reason, `undefined` is returned.
-     *
-     * Result is cached, and thus may be outdated if the filesystem is mutated.
-     * On success, returns a Path object.
-     */
-    async realpath() {
-        if (this.#realpath)
-            return this.#realpath;
-        if ((ENOREALPATH | ENOREADLINK | ENOENT) & this.#type)
-            return undefined;
-        try {
-            const rp = await this.#fs.promises.realpath(this.fullpath());
-            return (this.#realpath = this.resolve(rp));
-        }
-        catch (_) {
-            this.#markENOREALPATH();
-        }
-    }
-    /**
-     * Synchronous {@link realpath}
-     */
-    realpathSync() {
-        if (this.#realpath)
-            return this.#realpath;
-        if ((ENOREALPATH | ENOREADLINK | ENOENT) & this.#type)
-            return undefined;
-        try {
-            const rp = this.#fs.realpathSync(this.fullpath());
-            return (this.#realpath = this.resolve(rp));
-        }
-        catch (_) {
-            this.#markENOREALPATH();
-        }
-    }
-    /**
-     * Internal method to mark this Path object as the scurry cwd,
-     * called by {@link PathScurry#chdir}
-     *
-     * @internal
-     */
-    [setAsCwd](oldCwd) {
-        if (oldCwd === this)
-            return;
-        oldCwd.isCWD = false;
-        this.isCWD = true;
-        const changed = new Set([]);
-        let rp = [];
-        let p = this;
-        while (p && p.parent) {
-            changed.add(p);
-            p.#relative = rp.join(this.sep);
-            p.#relativePosix = rp.join('/');
-            p = p.parent;
-            rp.push('..');
-        }
-        // now un-memoize parents of old cwd
-        p = oldCwd;
-        while (p && p.parent && !changed.has(p)) {
-            p.#relative = undefined;
-            p.#relativePosix = undefined;
-            p = p.parent;
-        }
-    }
-}
-/**
- * Path class used on win32 systems
- *
- * Uses `'\\'` as the path separator for returned paths, either `'\\'` or `'/'`
- * as the path separator for parsing paths.
- */
-export class PathWin32 extends PathBase {
-    /**
-     * Separator for generating path strings.
-     */
-    sep = '\\';
-    /**
-     * Separator for parsing path strings.
-     */
-    splitSep = eitherSep;
-    /**
-     * Do not create new Path objects directly.  They should always be accessed
-     * via the PathScurry class or other methods on the Path class.
-     *
-     * @internal
-     */
-    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
-        super(name, type, root, roots, nocase, children, opts);
-    }
-    /**
-     * @internal
-     */
-    newChild(name, type = UNKNOWN, opts = {}) {
-        return new PathWin32(name, type, this.root, this.roots, this.nocase, this.childrenCache(), opts);
-    }
-    /**
-     * @internal
-     */
-    getRootString(path) {
-        return win32.parse(path).root;
-    }
-    /**
-     * @internal
-     */
-    getRoot(rootPath) {
-        rootPath = uncToDrive(rootPath.toUpperCase());
-        if (rootPath === this.root.name) {
-            return this.root;
-        }
-        // ok, not that one, check if it matches another we know about
-        for (const [compare, root] of Object.entries(this.roots)) {
-            if (this.sameRoot(rootPath, compare)) {
-                return (this.roots[rootPath] = root);
-            }
-        }
-        // otherwise, have to create a new one.
-        return (this.roots[rootPath] = new PathScurryWin32(rootPath, this).root);
-    }
-    /**
-     * @internal
-     */
-    sameRoot(rootPath, compare = this.root.name) {
-        // windows can (rarely) have case-sensitive filesystem, but
-        // UNC and drive letters are always case-insensitive, and canonically
-        // represented uppercase.
-        rootPath = rootPath
-            .toUpperCase()
-            .replace(/\//g, '\\')
-            .replace(uncDriveRegexp, '$1\\');
-        return rootPath === compare;
-    }
-}
-/**
- * Path class used on all posix systems.
- *
- * Uses `'/'` as the path separator.
- */
-export class PathPosix extends PathBase {
-    /**
-     * separator for parsing path strings
-     */
-    splitSep = '/';
-    /**
-     * separator for generating path strings
-     */
-    sep = '/';
-    /**
-     * Do not create new Path objects directly.  They should always be accessed
-     * via the PathScurry class or other methods on the Path class.
-     *
-     * @internal
-     */
-    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
-        super(name, type, root, roots, nocase, children, opts);
-    }
-    /**
-     * @internal
-     */
-    getRootString(path) {
-        return path.startsWith('/') ? '/' : '';
-    }
-    /**
-     * @internal
-     */
-    getRoot(_rootPath) {
-        return this.root;
-    }
-    /**
-     * @internal
-     */
-    newChild(name, type = UNKNOWN, opts = {}) {
-        return new PathPosix(name, type, this.root, this.roots, this.nocase, this.childrenCache(), opts);
-    }
-}
-/**
- * The base class for all PathScurry classes, providing the interface for path
- * resolution and filesystem operations.
- *
- * Typically, you should *not* instantiate this class directly, but rather one
- * of the platform-specific classes, or the exported {@link PathScurry} which
- * defaults to the current platform.
- */
-export class PathScurryBase {
-    /**
-     * The root Path entry for the current working directory of this Scurry
-     */
-    root;
-    /**
-     * The string path for the root of this Scurry's current working directory
-     */
-    rootPath;
-    /**
-     * A collection of all roots encountered, referenced by rootPath
-     */
-    roots;
-    /**
-     * The Path entry corresponding to this PathScurry's current working directory.
-     */
-    cwd;
-    #resolveCache;
-    #resolvePosixCache;
-    #children;
-    /**
-     * Perform path comparisons case-insensitively.
-     *
-     * Defaults true on Darwin and Windows systems, false elsewhere.
-     */
-    nocase;
-    #fs;
-    /**
-     * This class should not be instantiated directly.
-     *
-     * Use PathScurryWin32, PathScurryDarwin, PathScurryPosix, or PathScurry
-     *
-     * @internal
-     */
-    constructor(cwd = process.cwd(), pathImpl, sep, { nocase, childrenCacheSize = 16 * 1024, fs = defaultFS, } = {}) {
-        this.#fs = fsFromOption(fs);
-        if (cwd instanceof URL || cwd.startsWith('file://')) {
-            cwd = fileURLToPath(cwd);
-        }
-        // resolve and split root, and then add to the store.
-        // this is the only time we call path.resolve()
-        const cwdPath = pathImpl.resolve(cwd);
-        this.roots = Object.create(null);
-        this.rootPath = this.parseRootPath(cwdPath);
-        this.#resolveCache = new ResolveCache();
-        this.#resolvePosixCache = new ResolveCache();
-        this.#children = new ChildrenCache(childrenCacheSize);
-        const split = cwdPath.substring(this.rootPath.length).split(sep);
-        // resolve('/') leaves '', splits to [''], we don't want that.
-        if (split.length === 1 && !split[0]) {
-            split.pop();
-        }
-        /* c8 ignore start */
-        if (nocase === undefined) {
-            throw new TypeError('must provide nocase setting to PathScurryBase ctor');
-        }
-        /* c8 ignore stop */
-        this.nocase = nocase;
-        this.root = this.newRoot(this.#fs);
-        this.roots[this.rootPath] = this.root;
-        let prev = this.root;
-        let len = split.length - 1;
-        const joinSep = pathImpl.sep;
-        let abs = this.rootPath;
-        let sawFirst = false;
-        for (const part of split) {
-            const l = len--;
-            prev = prev.child(part, {
-                relative: new Array(l).fill('..').join(joinSep),
-                relativePosix: new Array(l).fill('..').join('/'),
-                fullpath: (abs += (sawFirst ? '' : joinSep) + part),
-            });
-            sawFirst = true;
-        }
-        this.cwd = prev;
-    }
-    /**
-     * Get the depth of a provided path, string, or the cwd
-     */
-    depth(path = this.cwd) {
-        if (typeof path === 'string') {
-            path = this.cwd.resolve(path);
-        }
-        return path.depth();
-    }
-    /**
-     * Return the cache of child entries.  Exposed so subclasses can create
-     * child Path objects in a platform-specific way.
-     *
-     * @internal
-     */
-    childrenCache() {
-        return this.#children;
-    }
-    /**
-     * Resolve one or more path strings to a resolved string
-     *
-     * Same interface as require('path').resolve.
-     *
-     * Much faster than path.resolve() when called multiple times for the same
-     * path, because the resolved Path objects are cached.  Much slower
-     * otherwise.
-     */
-    resolve(...paths) {
-        // first figure out the minimum number of paths we have to test
-        // we always start at cwd, but any absolutes will bump the start
-        let r = '';
-        for (let i = paths.length - 1; i >= 0; i--) {
-            const p = paths[i];
-            if (!p || p === '.')
-                continue;
-            r = r ? `${p}/${r}` : p;
-            if (this.isAbsolute(p)) {
-                break;
-            }
-        }
-        const cached = this.#resolveCache.get(r);
-        if (cached !== undefined) {
-            return cached;
-        }
-        const result = this.cwd.resolve(r).fullpath();
-        this.#resolveCache.set(r, result);
-        return result;
-    }
-    /**
-     * Resolve one or more path strings to a resolved string, returning
-     * the posix path.  Identical to .resolve() on posix systems, but on
-     * windows will return a forward-slash separated UNC path.
-     *
-     * Same interface as require('path').resolve.
-     *
-     * Much faster than path.resolve() when called multiple times for the same
-     * path, because the resolved Path objects are cached.  Much slower
-     * otherwise.
-     */
-    resolvePosix(...paths) {
-        // first figure out the minimum number of paths we have to test
-        // we always start at cwd, but any absolutes will bump the start
-        let r = '';
-        for (let i = paths.length - 1; i >= 0; i--) {
-            const p = paths[i];
-            if (!p || p === '.')
-                continue;
-            r = r ? `${p}/${r}` : p;
-            if (this.isAbsolute(p)) {
-                break;
-            }
-        }
-        const cached = this.#resolvePosixCache.get(r);
-        if (cached !== undefined) {
-            return cached;
-        }
-        const result = this.cwd.resolve(r).fullpathPosix();
-        this.#resolvePosixCache.set(r, result);
-        return result;
-    }
-    /**
-     * find the relative path from the cwd to the supplied path string or entry
-     */
-    relative(entry = this.cwd) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        return entry.relative();
-    }
-    /**
-     * find the relative path from the cwd to the supplied path string or
-     * entry, using / as the path delimiter, even on Windows.
-     */
-    relativePosix(entry = this.cwd) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        return entry.relativePosix();
-    }
-    /**
-     * Return the basename for the provided string or Path object
-     */
-    basename(entry = this.cwd) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        return entry.name;
-    }
-    /**
-     * Return the dirname for the provided string or Path object
-     */
-    dirname(entry = this.cwd) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        return (entry.parent || entry).fullpath();
-    }
-    async readdir(entry = this.cwd, opts = {
-        withFileTypes: true,
-    }) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes } = opts;
-        if (!entry.canReaddir()) {
-            return [];
-        }
-        else {
-            const p = await entry.readdir();
-            return withFileTypes ? p : p.map(e => e.name);
-        }
-    }
-    readdirSync(entry = this.cwd, opts = {
-        withFileTypes: true,
-    }) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes = true } = opts;
-        if (!entry.canReaddir()) {
-            return [];
-        }
-        else if (withFileTypes) {
-            return entry.readdirSync();
-        }
-        else {
-            return entry.readdirSync().map(e => e.name);
-        }
-    }
-    /**
-     * Call lstat() on the string or Path object, and update all known
-     * information that can be determined.
-     *
-     * Note that unlike `fs.lstat()`, the returned value does not contain some
-     * information, such as `mode`, `dev`, `nlink`, and `ino`.  If that
-     * information is required, you will need to call `fs.lstat` yourself.
-     *
-     * If the Path refers to a nonexistent file, or if the lstat call fails for
-     * any reason, `undefined` is returned.  Otherwise the updated Path object is
-     * returned.
-     *
-     * Results are cached, and thus may be out of date if the filesystem is
-     * mutated.
-     */
-    async lstat(entry = this.cwd) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        return entry.lstat();
-    }
-    /**
-     * synchronous {@link PathScurryBase.lstat}
-     */
-    lstatSync(entry = this.cwd) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        return entry.lstatSync();
-    }
-    async readlink(entry = this.cwd, { withFileTypes } = {
-        withFileTypes: false,
-    }) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            withFileTypes = entry.withFileTypes;
-            entry = this.cwd;
-        }
-        const e = await entry.readlink();
-        return withFileTypes ? e : e?.fullpath();
-    }
-    readlinkSync(entry = this.cwd, { withFileTypes } = {
-        withFileTypes: false,
-    }) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            withFileTypes = entry.withFileTypes;
-            entry = this.cwd;
-        }
-        const e = entry.readlinkSync();
-        return withFileTypes ? e : e?.fullpath();
-    }
-    async realpath(entry = this.cwd, { withFileTypes } = {
-        withFileTypes: false,
-    }) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            withFileTypes = entry.withFileTypes;
-            entry = this.cwd;
-        }
-        const e = await entry.realpath();
-        return withFileTypes ? e : e?.fullpath();
-    }
-    realpathSync(entry = this.cwd, { withFileTypes } = {
-        withFileTypes: false,
-    }) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            withFileTypes = entry.withFileTypes;
-            entry = this.cwd;
-        }
-        const e = entry.realpathSync();
-        return withFileTypes ? e : e?.fullpath();
-    }
-    async walk(entry = this.cwd, opts = {}) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
-        const results = [];
-        if (!filter || filter(entry)) {
-            results.push(withFileTypes ? entry : entry.fullpath());
-        }
-        const dirs = new Set();
-        const walk = (dir, cb) => {
-            dirs.add(dir);
-            dir.readdirCB((er, entries) => {
-                /* c8 ignore start */
-                if (er) {
-                    return cb(er);
-                }
-                /* c8 ignore stop */
-                let len = entries.length;
-                if (!len)
-                    return cb();
-                const next = () => {
-                    if (--len === 0) {
-                        cb();
-                    }
-                };
-                for (const e of entries) {
-                    if (!filter || filter(e)) {
-                        results.push(withFileTypes ? e : e.fullpath());
-                    }
-                    if (follow && e.isSymbolicLink()) {
-                        e.realpath()
-                            .then(r => (r?.isUnknown() ? r.lstat() : r))
-                            .then(r => r?.shouldWalk(dirs, walkFilter) ? walk(r, next) : next());
-                    }
-                    else {
-                        if (e.shouldWalk(dirs, walkFilter)) {
-                            walk(e, next);
-                        }
-                        else {
-                            next();
-                        }
-                    }
-                }
-            }, true); // zalgooooooo
-        };
-        const start = entry;
-        return new Promise((res, rej) => {
-            walk(start, er => {
-                /* c8 ignore start */
-                if (er)
-                    return rej(er);
-                /* c8 ignore stop */
-                res(results);
-            });
-        });
-    }
-    walkSync(entry = this.cwd, opts = {}) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
-        const results = [];
-        if (!filter || filter(entry)) {
-            results.push(withFileTypes ? entry : entry.fullpath());
-        }
-        const dirs = new Set([entry]);
-        for (const dir of dirs) {
-            const entries = dir.readdirSync();
-            for (const e of entries) {
-                if (!filter || filter(e)) {
-                    results.push(withFileTypes ? e : e.fullpath());
-                }
-                let r = e;
-                if (e.isSymbolicLink()) {
-                    if (!(follow && (r = e.realpathSync())))
-                        continue;
-                    if (r.isUnknown())
-                        r.lstatSync();
-                }
-                if (r.shouldWalk(dirs, walkFilter)) {
-                    dirs.add(r);
-                }
-            }
-        }
-        return results;
-    }
-    /**
-     * Support for `for await`
-     *
-     * Alias for {@link PathScurryBase.iterate}
-     *
-     * Note: As of Node 19, this is very slow, compared to other methods of
-     * walking.  Consider using {@link PathScurryBase.stream} if memory overhead
-     * and backpressure are concerns, or {@link PathScurryBase.walk} if not.
-     */
-    [Symbol.asyncIterator]() {
-        return this.iterate();
-    }
-    iterate(entry = this.cwd, options = {}) {
-        // iterating async over the stream is significantly more performant,
-        // especially in the warm-cache scenario, because it buffers up directory
-        // entries in the background instead of waiting for a yield for each one.
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            options = entry;
-            entry = this.cwd;
-        }
-        return this.stream(entry, options)[Symbol.asyncIterator]();
-    }
-    /**
-     * Iterating over a PathScurry performs a synchronous walk.
-     *
-     * Alias for {@link PathScurryBase.iterateSync}
-     */
-    [Symbol.iterator]() {
-        return this.iterateSync();
-    }
-    *iterateSync(entry = this.cwd, opts = {}) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
-        if (!filter || filter(entry)) {
-            yield withFileTypes ? entry : entry.fullpath();
-        }
-        const dirs = new Set([entry]);
-        for (const dir of dirs) {
-            const entries = dir.readdirSync();
-            for (const e of entries) {
-                if (!filter || filter(e)) {
-                    yield withFileTypes ? e : e.fullpath();
-                }
-                let r = e;
-                if (e.isSymbolicLink()) {
-                    if (!(follow && (r = e.realpathSync())))
-                        continue;
-                    if (r.isUnknown())
-                        r.lstatSync();
-                }
-                if (r.shouldWalk(dirs, walkFilter)) {
-                    dirs.add(r);
-                }
-            }
-        }
-    }
-    stream(entry = this.cwd, opts = {}) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
-        const results = new Minipass({ objectMode: true });
-        if (!filter || filter(entry)) {
-            results.write(withFileTypes ? entry : entry.fullpath());
-        }
-        const dirs = new Set();
-        const queue = [entry];
-        let processing = 0;
-        const process = () => {
-            let paused = false;
-            while (!paused) {
-                const dir = queue.shift();
-                if (!dir) {
-                    if (processing === 0)
-                        results.end();
-                    return;
-                }
-                processing++;
-                dirs.add(dir);
-                const onReaddir = (er, entries, didRealpaths = false) => {
-                    /* c8 ignore start */
-                    if (er)
-                        return results.emit('error', er);
-                    /* c8 ignore stop */
-                    if (follow && !didRealpaths) {
-                        const promises = [];
-                        for (const e of entries) {
-                            if (e.isSymbolicLink()) {
-                                promises.push(e
-                                    .realpath()
-                                    .then((r) => r?.isUnknown() ? r.lstat() : r));
-                            }
-                        }
-                        if (promises.length) {
-                            Promise.all(promises).then(() => onReaddir(null, entries, true));
-                            return;
-                        }
-                    }
-                    for (const e of entries) {
-                        if (e && (!filter || filter(e))) {
-                            if (!results.write(withFileTypes ? e : e.fullpath())) {
-                                paused = true;
-                            }
-                        }
-                    }
-                    processing--;
-                    for (const e of entries) {
-                        const r = e.realpathCached() || e;
-                        if (r.shouldWalk(dirs, walkFilter)) {
-                            queue.push(r);
-                        }
-                    }
-                    if (paused && !results.flowing) {
-                        results.once('drain', process);
-                    }
-                    else if (!sync) {
-                        process();
-                    }
-                };
-                // zalgo containment
-                let sync = true;
-                dir.readdirCB(onReaddir, true);
-                sync = false;
-            }
-        };
-        process();
-        return results;
-    }
-    streamSync(entry = this.cwd, opts = {}) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
-        const results = new Minipass({ objectMode: true });
-        const dirs = new Set();
-        if (!filter || filter(entry)) {
-            results.write(withFileTypes ? entry : entry.fullpath());
-        }
-        const queue = [entry];
-        let processing = 0;
-        const process = () => {
-            let paused = false;
-            while (!paused) {
-                const dir = queue.shift();
-                if (!dir) {
-                    if (processing === 0)
-                        results.end();
-                    return;
-                }
-                processing++;
-                dirs.add(dir);
-                const entries = dir.readdirSync();
-                for (const e of entries) {
-                    if (!filter || filter(e)) {
-                        if (!results.write(withFileTypes ? e : e.fullpath())) {
-                            paused = true;
-                        }
-                    }
-                }
-                processing--;
-                for (const e of entries) {
-                    let r = e;
-                    if (e.isSymbolicLink()) {
-                        if (!(follow && (r = e.realpathSync())))
-                            continue;
-                        if (r.isUnknown())
-                            r.lstatSync();
-                    }
-                    if (r.shouldWalk(dirs, walkFilter)) {
-                        queue.push(r);
-                    }
-                }
-            }
-            if (paused && !results.flowing)
-                results.once('drain', process);
-        };
-        process();
-        return results;
-    }
-    chdir(path = this.cwd) {
-        const oldCwd = this.cwd;
-        this.cwd = typeof path === 'string' ? this.cwd.resolve(path) : path;
-        this.cwd[setAsCwd](oldCwd);
-    }
-}
-/**
- * Windows implementation of {@link PathScurryBase}
- *
- * Defaults to case insensitve, uses `'\\'` to generate path strings.  Uses
- * {@link PathWin32} for Path objects.
- */
-export class PathScurryWin32 extends PathScurryBase {
-    /**
-     * separator for generating path strings
-     */
-    sep = '\\';
-    constructor(cwd = process.cwd(), opts = {}) {
-        const { nocase = true } = opts;
-        super(cwd, win32, '\\', { ...opts, nocase });
-        this.nocase = nocase;
-        for (let p = this.cwd; p; p = p.parent) {
-            p.nocase = this.nocase;
-        }
-    }
-    /**
-     * @internal
-     */
-    parseRootPath(dir) {
-        // if the path starts with a single separator, it's not a UNC, and we'll
-        // just get separator as the root, and driveFromUNC will return \
-        // In that case, mount \ on the root from the cwd.
-        return win32.parse(dir).root.toUpperCase();
-    }
-    /**
-     * @internal
-     */
-    newRoot(fs) {
-        return new PathWin32(this.rootPath, IFDIR, undefined, this.roots, this.nocase, this.childrenCache(), { fs });
-    }
-    /**
-     * Return true if the provided path string is an absolute path
-     */
-    isAbsolute(p) {
-        return (p.startsWith('/') || p.startsWith('\\') || /^[a-z]:(\/|\\)/i.test(p));
-    }
-}
-/**
- * {@link PathScurryBase} implementation for all posix systems other than Darwin.
- *
- * Defaults to case-sensitive matching, uses `'/'` to generate path strings.
- *
- * Uses {@link PathPosix} for Path objects.
- */
-export class PathScurryPosix extends PathScurryBase {
-    /**
-     * separator for generating path strings
-     */
-    sep = '/';
-    constructor(cwd = process.cwd(), opts = {}) {
-        const { nocase = false } = opts;
-        super(cwd, posix, '/', { ...opts, nocase });
-        this.nocase = nocase;
-    }
-    /**
-     * @internal
-     */
-    parseRootPath(_dir) {
-        return '/';
-    }
-    /**
-     * @internal
-     */
-    newRoot(fs) {
-        return new PathPosix(this.rootPath, IFDIR, undefined, this.roots, this.nocase, this.childrenCache(), { fs });
-    }
-    /**
-     * Return true if the provided path string is an absolute path
-     */
-    isAbsolute(p) {
-        return p.startsWith('/');
-    }
-}
-/**
- * {@link PathScurryBase} implementation for Darwin (macOS) systems.
- *
- * Defaults to case-insensitive matching, uses `'/'` for generating path
- * strings.
- *
- * Uses {@link PathPosix} for Path objects.
- */
-export class PathScurryDarwin extends PathScurryPosix {
-    constructor(cwd = process.cwd(), opts = {}) {
-        const { nocase = true } = opts;
-        super(cwd, { ...opts, nocase });
-    }
-}
-/**
- * Default {@link PathBase} implementation for the current platform.
- *
- * {@link PathWin32} on Windows systems, {@link PathPosix} on all others.
- */
-export const Path = process.platform === 'win32' ? PathWin32 : PathPosix;
-/**
- * Default {@link PathScurryBase} implementation for the current platform.
- *
- * {@link PathScurryWin32} on Windows systems, {@link PathScurryDarwin} on
- * Darwin (macOS) systems, {@link PathScurryPosix} on all others.
- */
-export const PathScurry = process.platform === 'win32' ? PathScurryWin32
-    : process.platform === 'darwin' ? PathScurryDarwin
-        : PathScurryPosix;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/package-json/node_modules/path-scurry/dist/esm/package.json b/node_modules/@npmcli/package-json/node_modules/path-scurry/dist/esm/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/path-scurry/dist/esm/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/@npmcli/package-json/node_modules/path-scurry/package.json b/node_modules/@npmcli/package-json/node_modules/path-scurry/package.json
deleted file mode 100644
index c3cb39dced545..0000000000000
--- a/node_modules/@npmcli/package-json/node_modules/path-scurry/package.json
+++ /dev/null
@@ -1,88 +0,0 @@
-{
-  "name": "path-scurry",
-  "version": "2.0.0",
-  "description": "walk paths fast and efficiently",
-  "author": "Isaac Z. Schlueter  (https://blog.izs.me)",
-  "main": "./dist/commonjs/index.js",
-  "type": "module",
-  "exports": {
-    "./package.json": "./package.json",
-    ".": {
-      "import": {
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.js"
-      },
-      "require": {
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.js"
-      }
-    }
-  },
-  "files": [
-    "dist"
-  ],
-  "license": "BlueOak-1.0.0",
-  "scripts": {
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "prepare": "tshy",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "test": "tap",
-    "snap": "tap",
-    "format": "prettier --write . --log-level warn",
-    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts",
-    "bench": "bash ./scripts/bench.sh"
-  },
-  "prettier": {
-    "experimentalTernaries": true,
-    "semi": false,
-    "printWidth": 75,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "devDependencies": {
-    "@nodelib/fs.walk": "^2.0.0",
-    "@types/node": "^20.14.10",
-    "mkdirp": "^3.0.0",
-    "prettier": "^3.3.2",
-    "rimraf": "^5.0.8",
-    "tap": "^20.0.3",
-    "ts-node": "^10.9.2",
-    "tshy": "^2.0.1",
-    "typedoc": "^0.26.3",
-    "typescript": "^5.5.3"
-  },
-  "tap": {
-    "typecheck": true
-  },
-  "engines": {
-    "node": "20 || >=22"
-  },
-  "funding": {
-    "url": "https://github.com/sponsors/isaacs"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/isaacs/path-scurry"
-  },
-  "dependencies": {
-    "lru-cache": "^11.0.0",
-    "minipass": "^7.1.2"
-  },
-  "tshy": {
-    "selfLink": false,
-    "exports": {
-      "./package.json": "./package.json",
-      ".": "./src/index.ts"
-    }
-  },
-  "types": "./dist/commonjs/index.d.ts",
-  "module": "./dist/esm/index.js"
-}
diff --git a/node_modules/ansi-styles/index.js b/node_modules/ansi-styles/index.js
index d7bede44b7b6b..eaa7bed6cb1ed 100644
--- a/node_modules/ansi-styles/index.js
+++ b/node_modules/ansi-styles/index.js
@@ -109,7 +109,7 @@ function assembleStyles() {
 	// From https://github.com/Qix-/color-convert/blob/3f0e0d4e92e235796ccb17f6e85c72094a651f49/conversions.js
 	Object.defineProperties(styles, {
 		rgbToAnsi256: {
-			value: (red, green, blue) => {
+			value(red, green, blue) {
 				// We use the extended greyscale palette here, with the exception of
 				// black and white. normal palette only has 4 greyscale shades.
 				if (red === green && green === blue) {
@@ -132,7 +132,7 @@ function assembleStyles() {
 			enumerable: false,
 		},
 		hexToRgb: {
-			value: hex => {
+			value(hex) {
 				const matches = /[a-f\d]{6}|[a-f\d]{3}/i.exec(hex.toString(16));
 				if (!matches) {
 					return [0, 0, 0];
@@ -161,7 +161,7 @@ function assembleStyles() {
 			enumerable: false,
 		},
 		ansi256ToAnsi: {
-			value: code => {
+			value(code) {
 				if (code < 8) {
 					return 30 + code;
 				}
diff --git a/node_modules/ansi-styles/package.json b/node_modules/ansi-styles/package.json
index 6cd3ca5bf95d0..16b508f0f3a04 100644
--- a/node_modules/ansi-styles/package.json
+++ b/node_modules/ansi-styles/package.json
@@ -1,6 +1,6 @@
 {
 	"name": "ansi-styles",
-	"version": "6.2.1",
+	"version": "6.2.3",
 	"description": "ANSI escape codes for styling strings in the terminal",
 	"license": "MIT",
 	"repository": "chalk/ansi-styles",
@@ -46,9 +46,9 @@
 		"text"
 	],
 	"devDependencies": {
-		"ava": "^3.15.0",
+		"ava": "^6.1.3",
 		"svg-term-cli": "^2.1.1",
-		"tsd": "^0.19.0",
-		"xo": "^0.47.0"
+		"tsd": "^0.31.1",
+		"xo": "^0.58.0"
 	}
 }
diff --git a/node_modules/cacache/node_modules/glob/LICENSE b/node_modules/cacache/node_modules/glob/LICENSE
deleted file mode 100644
index ec7df93329abf..0000000000000
--- a/node_modules/cacache/node_modules/glob/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) 2009-2023 Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/cacache/node_modules/glob/dist/commonjs/glob.js b/node_modules/cacache/node_modules/glob/dist/commonjs/glob.js
deleted file mode 100644
index e1339bbbcf57f..0000000000000
--- a/node_modules/cacache/node_modules/glob/dist/commonjs/glob.js
+++ /dev/null
@@ -1,247 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Glob = void 0;
-const minimatch_1 = require("minimatch");
-const node_url_1 = require("node:url");
-const path_scurry_1 = require("path-scurry");
-const pattern_js_1 = require("./pattern.js");
-const walker_js_1 = require("./walker.js");
-// if no process global, just call it linux.
-// so we default to case-sensitive, / separators
-const defaultPlatform = (typeof process === 'object' &&
-    process &&
-    typeof process.platform === 'string') ?
-    process.platform
-    : 'linux';
-/**
- * An object that can perform glob pattern traversals.
- */
-class Glob {
-    absolute;
-    cwd;
-    root;
-    dot;
-    dotRelative;
-    follow;
-    ignore;
-    magicalBraces;
-    mark;
-    matchBase;
-    maxDepth;
-    nobrace;
-    nocase;
-    nodir;
-    noext;
-    noglobstar;
-    pattern;
-    platform;
-    realpath;
-    scurry;
-    stat;
-    signal;
-    windowsPathsNoEscape;
-    withFileTypes;
-    includeChildMatches;
-    /**
-     * The options provided to the constructor.
-     */
-    opts;
-    /**
-     * An array of parsed immutable {@link Pattern} objects.
-     */
-    patterns;
-    /**
-     * All options are stored as properties on the `Glob` object.
-     *
-     * See {@link GlobOptions} for full options descriptions.
-     *
-     * Note that a previous `Glob` object can be passed as the
-     * `GlobOptions` to another `Glob` instantiation to re-use settings
-     * and caches with a new pattern.
-     *
-     * Traversal functions can be called multiple times to run the walk
-     * again.
-     */
-    constructor(pattern, opts) {
-        /* c8 ignore start */
-        if (!opts)
-            throw new TypeError('glob options required');
-        /* c8 ignore stop */
-        this.withFileTypes = !!opts.withFileTypes;
-        this.signal = opts.signal;
-        this.follow = !!opts.follow;
-        this.dot = !!opts.dot;
-        this.dotRelative = !!opts.dotRelative;
-        this.nodir = !!opts.nodir;
-        this.mark = !!opts.mark;
-        if (!opts.cwd) {
-            this.cwd = '';
-        }
-        else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) {
-            opts.cwd = (0, node_url_1.fileURLToPath)(opts.cwd);
-        }
-        this.cwd = opts.cwd || '';
-        this.root = opts.root;
-        this.magicalBraces = !!opts.magicalBraces;
-        this.nobrace = !!opts.nobrace;
-        this.noext = !!opts.noext;
-        this.realpath = !!opts.realpath;
-        this.absolute = opts.absolute;
-        this.includeChildMatches = opts.includeChildMatches !== false;
-        this.noglobstar = !!opts.noglobstar;
-        this.matchBase = !!opts.matchBase;
-        this.maxDepth =
-            typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity;
-        this.stat = !!opts.stat;
-        this.ignore = opts.ignore;
-        if (this.withFileTypes && this.absolute !== undefined) {
-            throw new Error('cannot set absolute and withFileTypes:true');
-        }
-        if (typeof pattern === 'string') {
-            pattern = [pattern];
-        }
-        this.windowsPathsNoEscape =
-            !!opts.windowsPathsNoEscape ||
-                opts.allowWindowsEscape ===
-                    false;
-        if (this.windowsPathsNoEscape) {
-            pattern = pattern.map(p => p.replace(/\\/g, '/'));
-        }
-        if (this.matchBase) {
-            if (opts.noglobstar) {
-                throw new TypeError('base matching requires globstar');
-            }
-            pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`));
-        }
-        this.pattern = pattern;
-        this.platform = opts.platform || defaultPlatform;
-        this.opts = { ...opts, platform: this.platform };
-        if (opts.scurry) {
-            this.scurry = opts.scurry;
-            if (opts.nocase !== undefined &&
-                opts.nocase !== opts.scurry.nocase) {
-                throw new Error('nocase option contradicts provided scurry option');
-            }
-        }
-        else {
-            const Scurry = opts.platform === 'win32' ? path_scurry_1.PathScurryWin32
-                : opts.platform === 'darwin' ? path_scurry_1.PathScurryDarwin
-                    : opts.platform ? path_scurry_1.PathScurryPosix
-                        : path_scurry_1.PathScurry;
-            this.scurry = new Scurry(this.cwd, {
-                nocase: opts.nocase,
-                fs: opts.fs,
-            });
-        }
-        this.nocase = this.scurry.nocase;
-        // If you do nocase:true on a case-sensitive file system, then
-        // we need to use regexps instead of strings for non-magic
-        // path portions, because statting `aBc` won't return results
-        // for the file `AbC` for example.
-        const nocaseMagicOnly = this.platform === 'darwin' || this.platform === 'win32';
-        const mmo = {
-            // default nocase based on platform
-            ...opts,
-            dot: this.dot,
-            matchBase: this.matchBase,
-            nobrace: this.nobrace,
-            nocase: this.nocase,
-            nocaseMagicOnly,
-            nocomment: true,
-            noext: this.noext,
-            nonegate: true,
-            optimizationLevel: 2,
-            platform: this.platform,
-            windowsPathsNoEscape: this.windowsPathsNoEscape,
-            debug: !!this.opts.debug,
-        };
-        const mms = this.pattern.map(p => new minimatch_1.Minimatch(p, mmo));
-        const [matchSet, globParts] = mms.reduce((set, m) => {
-            set[0].push(...m.set);
-            set[1].push(...m.globParts);
-            return set;
-        }, [[], []]);
-        this.patterns = matchSet.map((set, i) => {
-            const g = globParts[i];
-            /* c8 ignore start */
-            if (!g)
-                throw new Error('invalid pattern object');
-            /* c8 ignore stop */
-            return new pattern_js_1.Pattern(set, g, 0, this.platform);
-        });
-    }
-    async walk() {
-        // Walkers always return array of Path objects, so we just have to
-        // coerce them into the right shape.  It will have already called
-        // realpath() if the option was set to do so, so we know that's cached.
-        // start out knowing the cwd, at least
-        return [
-            ...(await new walker_js_1.GlobWalker(this.patterns, this.scurry.cwd, {
-                ...this.opts,
-                maxDepth: this.maxDepth !== Infinity ?
-                    this.maxDepth + this.scurry.cwd.depth()
-                    : Infinity,
-                platform: this.platform,
-                nocase: this.nocase,
-                includeChildMatches: this.includeChildMatches,
-            }).walk()),
-        ];
-    }
-    walkSync() {
-        return [
-            ...new walker_js_1.GlobWalker(this.patterns, this.scurry.cwd, {
-                ...this.opts,
-                maxDepth: this.maxDepth !== Infinity ?
-                    this.maxDepth + this.scurry.cwd.depth()
-                    : Infinity,
-                platform: this.platform,
-                nocase: this.nocase,
-                includeChildMatches: this.includeChildMatches,
-            }).walkSync(),
-        ];
-    }
-    stream() {
-        return new walker_js_1.GlobStream(this.patterns, this.scurry.cwd, {
-            ...this.opts,
-            maxDepth: this.maxDepth !== Infinity ?
-                this.maxDepth + this.scurry.cwd.depth()
-                : Infinity,
-            platform: this.platform,
-            nocase: this.nocase,
-            includeChildMatches: this.includeChildMatches,
-        }).stream();
-    }
-    streamSync() {
-        return new walker_js_1.GlobStream(this.patterns, this.scurry.cwd, {
-            ...this.opts,
-            maxDepth: this.maxDepth !== Infinity ?
-                this.maxDepth + this.scurry.cwd.depth()
-                : Infinity,
-            platform: this.platform,
-            nocase: this.nocase,
-            includeChildMatches: this.includeChildMatches,
-        }).streamSync();
-    }
-    /**
-     * Default sync iteration function. Returns a Generator that
-     * iterates over the results.
-     */
-    iterateSync() {
-        return this.streamSync()[Symbol.iterator]();
-    }
-    [Symbol.iterator]() {
-        return this.iterateSync();
-    }
-    /**
-     * Default async iteration function. Returns an AsyncGenerator that
-     * iterates over the results.
-     */
-    iterate() {
-        return this.stream()[Symbol.asyncIterator]();
-    }
-    [Symbol.asyncIterator]() {
-        return this.iterate();
-    }
-}
-exports.Glob = Glob;
-//# sourceMappingURL=glob.js.map
\ No newline at end of file
diff --git a/node_modules/cacache/node_modules/glob/dist/commonjs/has-magic.js b/node_modules/cacache/node_modules/glob/dist/commonjs/has-magic.js
deleted file mode 100644
index 0918bd57e0f1c..0000000000000
--- a/node_modules/cacache/node_modules/glob/dist/commonjs/has-magic.js
+++ /dev/null
@@ -1,27 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.hasMagic = void 0;
-const minimatch_1 = require("minimatch");
-/**
- * Return true if the patterns provided contain any magic glob characters,
- * given the options provided.
- *
- * Brace expansion is not considered "magic" unless the `magicalBraces` option
- * is set, as brace expansion just turns one string into an array of strings.
- * So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and
- * `'xby'` both do not contain any magic glob characters, and it's treated the
- * same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true`
- * is in the options, brace expansion _is_ treated as a pattern having magic.
- */
-const hasMagic = (pattern, options = {}) => {
-    if (!Array.isArray(pattern)) {
-        pattern = [pattern];
-    }
-    for (const p of pattern) {
-        if (new minimatch_1.Minimatch(p, options).hasMagic())
-            return true;
-    }
-    return false;
-};
-exports.hasMagic = hasMagic;
-//# sourceMappingURL=has-magic.js.map
\ No newline at end of file
diff --git a/node_modules/cacache/node_modules/glob/dist/commonjs/ignore.js b/node_modules/cacache/node_modules/glob/dist/commonjs/ignore.js
deleted file mode 100644
index 5f1fde0680dea..0000000000000
--- a/node_modules/cacache/node_modules/glob/dist/commonjs/ignore.js
+++ /dev/null
@@ -1,119 +0,0 @@
-"use strict";
-// give it a pattern, and it'll be able to tell you if
-// a given path should be ignored.
-// Ignoring a path ignores its children if the pattern ends in /**
-// Ignores are always parsed in dot:true mode
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Ignore = void 0;
-const minimatch_1 = require("minimatch");
-const pattern_js_1 = require("./pattern.js");
-const defaultPlatform = (typeof process === 'object' &&
-    process &&
-    typeof process.platform === 'string') ?
-    process.platform
-    : 'linux';
-/**
- * Class used to process ignored patterns
- */
-class Ignore {
-    relative;
-    relativeChildren;
-    absolute;
-    absoluteChildren;
-    platform;
-    mmopts;
-    constructor(ignored, { nobrace, nocase, noext, noglobstar, platform = defaultPlatform, }) {
-        this.relative = [];
-        this.absolute = [];
-        this.relativeChildren = [];
-        this.absoluteChildren = [];
-        this.platform = platform;
-        this.mmopts = {
-            dot: true,
-            nobrace,
-            nocase,
-            noext,
-            noglobstar,
-            optimizationLevel: 2,
-            platform,
-            nocomment: true,
-            nonegate: true,
-        };
-        for (const ign of ignored)
-            this.add(ign);
-    }
-    add(ign) {
-        // this is a little weird, but it gives us a clean set of optimized
-        // minimatch matchers, without getting tripped up if one of them
-        // ends in /** inside a brace section, and it's only inefficient at
-        // the start of the walk, not along it.
-        // It'd be nice if the Pattern class just had a .test() method, but
-        // handling globstars is a bit of a pita, and that code already lives
-        // in minimatch anyway.
-        // Another way would be if maybe Minimatch could take its set/globParts
-        // as an option, and then we could at least just use Pattern to test
-        // for absolute-ness.
-        // Yet another way, Minimatch could take an array of glob strings, and
-        // a cwd option, and do the right thing.
-        const mm = new minimatch_1.Minimatch(ign, this.mmopts);
-        for (let i = 0; i < mm.set.length; i++) {
-            const parsed = mm.set[i];
-            const globParts = mm.globParts[i];
-            /* c8 ignore start */
-            if (!parsed || !globParts) {
-                throw new Error('invalid pattern object');
-            }
-            // strip off leading ./ portions
-            // https://github.com/isaacs/node-glob/issues/570
-            while (parsed[0] === '.' && globParts[0] === '.') {
-                parsed.shift();
-                globParts.shift();
-            }
-            /* c8 ignore stop */
-            const p = new pattern_js_1.Pattern(parsed, globParts, 0, this.platform);
-            const m = new minimatch_1.Minimatch(p.globString(), this.mmopts);
-            const children = globParts[globParts.length - 1] === '**';
-            const absolute = p.isAbsolute();
-            if (absolute)
-                this.absolute.push(m);
-            else
-                this.relative.push(m);
-            if (children) {
-                if (absolute)
-                    this.absoluteChildren.push(m);
-                else
-                    this.relativeChildren.push(m);
-            }
-        }
-    }
-    ignored(p) {
-        const fullpath = p.fullpath();
-        const fullpaths = `${fullpath}/`;
-        const relative = p.relative() || '.';
-        const relatives = `${relative}/`;
-        for (const m of this.relative) {
-            if (m.match(relative) || m.match(relatives))
-                return true;
-        }
-        for (const m of this.absolute) {
-            if (m.match(fullpath) || m.match(fullpaths))
-                return true;
-        }
-        return false;
-    }
-    childrenIgnored(p) {
-        const fullpath = p.fullpath() + '/';
-        const relative = (p.relative() || '.') + '/';
-        for (const m of this.relativeChildren) {
-            if (m.match(relative))
-                return true;
-        }
-        for (const m of this.absoluteChildren) {
-            if (m.match(fullpath))
-                return true;
-        }
-        return false;
-    }
-}
-exports.Ignore = Ignore;
-//# sourceMappingURL=ignore.js.map
\ No newline at end of file
diff --git a/node_modules/cacache/node_modules/glob/dist/commonjs/index.js b/node_modules/cacache/node_modules/glob/dist/commonjs/index.js
deleted file mode 100644
index 151495d170efa..0000000000000
--- a/node_modules/cacache/node_modules/glob/dist/commonjs/index.js
+++ /dev/null
@@ -1,68 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.glob = exports.sync = exports.iterate = exports.iterateSync = exports.stream = exports.streamSync = exports.Ignore = exports.hasMagic = exports.Glob = exports.unescape = exports.escape = void 0;
-exports.globStreamSync = globStreamSync;
-exports.globStream = globStream;
-exports.globSync = globSync;
-exports.globIterateSync = globIterateSync;
-exports.globIterate = globIterate;
-const minimatch_1 = require("minimatch");
-const glob_js_1 = require("./glob.js");
-const has_magic_js_1 = require("./has-magic.js");
-var minimatch_2 = require("minimatch");
-Object.defineProperty(exports, "escape", { enumerable: true, get: function () { return minimatch_2.escape; } });
-Object.defineProperty(exports, "unescape", { enumerable: true, get: function () { return minimatch_2.unescape; } });
-var glob_js_2 = require("./glob.js");
-Object.defineProperty(exports, "Glob", { enumerable: true, get: function () { return glob_js_2.Glob; } });
-var has_magic_js_2 = require("./has-magic.js");
-Object.defineProperty(exports, "hasMagic", { enumerable: true, get: function () { return has_magic_js_2.hasMagic; } });
-var ignore_js_1 = require("./ignore.js");
-Object.defineProperty(exports, "Ignore", { enumerable: true, get: function () { return ignore_js_1.Ignore; } });
-function globStreamSync(pattern, options = {}) {
-    return new glob_js_1.Glob(pattern, options).streamSync();
-}
-function globStream(pattern, options = {}) {
-    return new glob_js_1.Glob(pattern, options).stream();
-}
-function globSync(pattern, options = {}) {
-    return new glob_js_1.Glob(pattern, options).walkSync();
-}
-async function glob_(pattern, options = {}) {
-    return new glob_js_1.Glob(pattern, options).walk();
-}
-function globIterateSync(pattern, options = {}) {
-    return new glob_js_1.Glob(pattern, options).iterateSync();
-}
-function globIterate(pattern, options = {}) {
-    return new glob_js_1.Glob(pattern, options).iterate();
-}
-// aliases: glob.sync.stream() glob.stream.sync() glob.sync() etc
-exports.streamSync = globStreamSync;
-exports.stream = Object.assign(globStream, { sync: globStreamSync });
-exports.iterateSync = globIterateSync;
-exports.iterate = Object.assign(globIterate, {
-    sync: globIterateSync,
-});
-exports.sync = Object.assign(globSync, {
-    stream: globStreamSync,
-    iterate: globIterateSync,
-});
-exports.glob = Object.assign(glob_, {
-    glob: glob_,
-    globSync,
-    sync: exports.sync,
-    globStream,
-    stream: exports.stream,
-    globStreamSync,
-    streamSync: exports.streamSync,
-    globIterate,
-    iterate: exports.iterate,
-    globIterateSync,
-    iterateSync: exports.iterateSync,
-    Glob: glob_js_1.Glob,
-    hasMagic: has_magic_js_1.hasMagic,
-    escape: minimatch_1.escape,
-    unescape: minimatch_1.unescape,
-});
-exports.glob.glob = exports.glob;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/cacache/node_modules/glob/dist/commonjs/package.json b/node_modules/cacache/node_modules/glob/dist/commonjs/package.json
deleted file mode 100644
index 5bbefffbabee3..0000000000000
--- a/node_modules/cacache/node_modules/glob/dist/commonjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "commonjs"
-}
diff --git a/node_modules/cacache/node_modules/glob/dist/commonjs/pattern.js b/node_modules/cacache/node_modules/glob/dist/commonjs/pattern.js
deleted file mode 100644
index f0de35fb5bed9..0000000000000
--- a/node_modules/cacache/node_modules/glob/dist/commonjs/pattern.js
+++ /dev/null
@@ -1,219 +0,0 @@
-"use strict";
-// this is just a very light wrapper around 2 arrays with an offset index
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Pattern = void 0;
-const minimatch_1 = require("minimatch");
-const isPatternList = (pl) => pl.length >= 1;
-const isGlobList = (gl) => gl.length >= 1;
-/**
- * An immutable-ish view on an array of glob parts and their parsed
- * results
- */
-class Pattern {
-    #patternList;
-    #globList;
-    #index;
-    length;
-    #platform;
-    #rest;
-    #globString;
-    #isDrive;
-    #isUNC;
-    #isAbsolute;
-    #followGlobstar = true;
-    constructor(patternList, globList, index, platform) {
-        if (!isPatternList(patternList)) {
-            throw new TypeError('empty pattern list');
-        }
-        if (!isGlobList(globList)) {
-            throw new TypeError('empty glob list');
-        }
-        if (globList.length !== patternList.length) {
-            throw new TypeError('mismatched pattern list and glob list lengths');
-        }
-        this.length = patternList.length;
-        if (index < 0 || index >= this.length) {
-            throw new TypeError('index out of range');
-        }
-        this.#patternList = patternList;
-        this.#globList = globList;
-        this.#index = index;
-        this.#platform = platform;
-        // normalize root entries of absolute patterns on initial creation.
-        if (this.#index === 0) {
-            // c: => ['c:/']
-            // C:/ => ['C:/']
-            // C:/x => ['C:/', 'x']
-            // //host/share => ['//host/share/']
-            // //host/share/ => ['//host/share/']
-            // //host/share/x => ['//host/share/', 'x']
-            // /etc => ['/', 'etc']
-            // / => ['/']
-            if (this.isUNC()) {
-                // '' / '' / 'host' / 'share'
-                const [p0, p1, p2, p3, ...prest] = this.#patternList;
-                const [g0, g1, g2, g3, ...grest] = this.#globList;
-                if (prest[0] === '') {
-                    // ends in /
-                    prest.shift();
-                    grest.shift();
-                }
-                const p = [p0, p1, p2, p3, ''].join('/');
-                const g = [g0, g1, g2, g3, ''].join('/');
-                this.#patternList = [p, ...prest];
-                this.#globList = [g, ...grest];
-                this.length = this.#patternList.length;
-            }
-            else if (this.isDrive() || this.isAbsolute()) {
-                const [p1, ...prest] = this.#patternList;
-                const [g1, ...grest] = this.#globList;
-                if (prest[0] === '') {
-                    // ends in /
-                    prest.shift();
-                    grest.shift();
-                }
-                const p = p1 + '/';
-                const g = g1 + '/';
-                this.#patternList = [p, ...prest];
-                this.#globList = [g, ...grest];
-                this.length = this.#patternList.length;
-            }
-        }
-    }
-    /**
-     * The first entry in the parsed list of patterns
-     */
-    pattern() {
-        return this.#patternList[this.#index];
-    }
-    /**
-     * true of if pattern() returns a string
-     */
-    isString() {
-        return typeof this.#patternList[this.#index] === 'string';
-    }
-    /**
-     * true of if pattern() returns GLOBSTAR
-     */
-    isGlobstar() {
-        return this.#patternList[this.#index] === minimatch_1.GLOBSTAR;
-    }
-    /**
-     * true if pattern() returns a regexp
-     */
-    isRegExp() {
-        return this.#patternList[this.#index] instanceof RegExp;
-    }
-    /**
-     * The /-joined set of glob parts that make up this pattern
-     */
-    globString() {
-        return (this.#globString =
-            this.#globString ||
-                (this.#index === 0 ?
-                    this.isAbsolute() ?
-                        this.#globList[0] + this.#globList.slice(1).join('/')
-                        : this.#globList.join('/')
-                    : this.#globList.slice(this.#index).join('/')));
-    }
-    /**
-     * true if there are more pattern parts after this one
-     */
-    hasMore() {
-        return this.length > this.#index + 1;
-    }
-    /**
-     * The rest of the pattern after this part, or null if this is the end
-     */
-    rest() {
-        if (this.#rest !== undefined)
-            return this.#rest;
-        if (!this.hasMore())
-            return (this.#rest = null);
-        this.#rest = new Pattern(this.#patternList, this.#globList, this.#index + 1, this.#platform);
-        this.#rest.#isAbsolute = this.#isAbsolute;
-        this.#rest.#isUNC = this.#isUNC;
-        this.#rest.#isDrive = this.#isDrive;
-        return this.#rest;
-    }
-    /**
-     * true if the pattern represents a //unc/path/ on windows
-     */
-    isUNC() {
-        const pl = this.#patternList;
-        return this.#isUNC !== undefined ?
-            this.#isUNC
-            : (this.#isUNC =
-                this.#platform === 'win32' &&
-                    this.#index === 0 &&
-                    pl[0] === '' &&
-                    pl[1] === '' &&
-                    typeof pl[2] === 'string' &&
-                    !!pl[2] &&
-                    typeof pl[3] === 'string' &&
-                    !!pl[3]);
-    }
-    // pattern like C:/...
-    // split = ['C:', ...]
-    // XXX: would be nice to handle patterns like `c:*` to test the cwd
-    // in c: for *, but I don't know of a way to even figure out what that
-    // cwd is without actually chdir'ing into it?
-    /**
-     * True if the pattern starts with a drive letter on Windows
-     */
-    isDrive() {
-        const pl = this.#patternList;
-        return this.#isDrive !== undefined ?
-            this.#isDrive
-            : (this.#isDrive =
-                this.#platform === 'win32' &&
-                    this.#index === 0 &&
-                    this.length > 1 &&
-                    typeof pl[0] === 'string' &&
-                    /^[a-z]:$/i.test(pl[0]));
-    }
-    // pattern = '/' or '/...' or '/x/...'
-    // split = ['', ''] or ['', ...] or ['', 'x', ...]
-    // Drive and UNC both considered absolute on windows
-    /**
-     * True if the pattern is rooted on an absolute path
-     */
-    isAbsolute() {
-        const pl = this.#patternList;
-        return this.#isAbsolute !== undefined ?
-            this.#isAbsolute
-            : (this.#isAbsolute =
-                (pl[0] === '' && pl.length > 1) ||
-                    this.isDrive() ||
-                    this.isUNC());
-    }
-    /**
-     * consume the root of the pattern, and return it
-     */
-    root() {
-        const p = this.#patternList[0];
-        return (typeof p === 'string' && this.isAbsolute() && this.#index === 0) ?
-            p
-            : '';
-    }
-    /**
-     * Check to see if the current globstar pattern is allowed to follow
-     * a symbolic link.
-     */
-    checkFollowGlobstar() {
-        return !(this.#index === 0 ||
-            !this.isGlobstar() ||
-            !this.#followGlobstar);
-    }
-    /**
-     * Mark that the current globstar pattern is following a symbolic link
-     */
-    markFollowGlobstar() {
-        if (this.#index === 0 || !this.isGlobstar() || !this.#followGlobstar)
-            return false;
-        this.#followGlobstar = false;
-        return true;
-    }
-}
-exports.Pattern = Pattern;
-//# sourceMappingURL=pattern.js.map
\ No newline at end of file
diff --git a/node_modules/cacache/node_modules/glob/dist/commonjs/processor.js b/node_modules/cacache/node_modules/glob/dist/commonjs/processor.js
deleted file mode 100644
index ee3bb4397e0b2..0000000000000
--- a/node_modules/cacache/node_modules/glob/dist/commonjs/processor.js
+++ /dev/null
@@ -1,301 +0,0 @@
-"use strict";
-// synchronous utility for filtering entries and calculating subwalks
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Processor = exports.SubWalks = exports.MatchRecord = exports.HasWalkedCache = void 0;
-const minimatch_1 = require("minimatch");
-/**
- * A cache of which patterns have been processed for a given Path
- */
-class HasWalkedCache {
-    store;
-    constructor(store = new Map()) {
-        this.store = store;
-    }
-    copy() {
-        return new HasWalkedCache(new Map(this.store));
-    }
-    hasWalked(target, pattern) {
-        return this.store.get(target.fullpath())?.has(pattern.globString());
-    }
-    storeWalked(target, pattern) {
-        const fullpath = target.fullpath();
-        const cached = this.store.get(fullpath);
-        if (cached)
-            cached.add(pattern.globString());
-        else
-            this.store.set(fullpath, new Set([pattern.globString()]));
-    }
-}
-exports.HasWalkedCache = HasWalkedCache;
-/**
- * A record of which paths have been matched in a given walk step,
- * and whether they only are considered a match if they are a directory,
- * and whether their absolute or relative path should be returned.
- */
-class MatchRecord {
-    store = new Map();
-    add(target, absolute, ifDir) {
-        const n = (absolute ? 2 : 0) | (ifDir ? 1 : 0);
-        const current = this.store.get(target);
-        this.store.set(target, current === undefined ? n : n & current);
-    }
-    // match, absolute, ifdir
-    entries() {
-        return [...this.store.entries()].map(([path, n]) => [
-            path,
-            !!(n & 2),
-            !!(n & 1),
-        ]);
-    }
-}
-exports.MatchRecord = MatchRecord;
-/**
- * A collection of patterns that must be processed in a subsequent step
- * for a given path.
- */
-class SubWalks {
-    store = new Map();
-    add(target, pattern) {
-        if (!target.canReaddir()) {
-            return;
-        }
-        const subs = this.store.get(target);
-        if (subs) {
-            if (!subs.find(p => p.globString() === pattern.globString())) {
-                subs.push(pattern);
-            }
-        }
-        else
-            this.store.set(target, [pattern]);
-    }
-    get(target) {
-        const subs = this.store.get(target);
-        /* c8 ignore start */
-        if (!subs) {
-            throw new Error('attempting to walk unknown path');
-        }
-        /* c8 ignore stop */
-        return subs;
-    }
-    entries() {
-        return this.keys().map(k => [k, this.store.get(k)]);
-    }
-    keys() {
-        return [...this.store.keys()].filter(t => t.canReaddir());
-    }
-}
-exports.SubWalks = SubWalks;
-/**
- * The class that processes patterns for a given path.
- *
- * Handles child entry filtering, and determining whether a path's
- * directory contents must be read.
- */
-class Processor {
-    hasWalkedCache;
-    matches = new MatchRecord();
-    subwalks = new SubWalks();
-    patterns;
-    follow;
-    dot;
-    opts;
-    constructor(opts, hasWalkedCache) {
-        this.opts = opts;
-        this.follow = !!opts.follow;
-        this.dot = !!opts.dot;
-        this.hasWalkedCache =
-            hasWalkedCache ? hasWalkedCache.copy() : new HasWalkedCache();
-    }
-    processPatterns(target, patterns) {
-        this.patterns = patterns;
-        const processingSet = patterns.map(p => [target, p]);
-        // map of paths to the magic-starting subwalks they need to walk
-        // first item in patterns is the filter
-        for (let [t, pattern] of processingSet) {
-            this.hasWalkedCache.storeWalked(t, pattern);
-            const root = pattern.root();
-            const absolute = pattern.isAbsolute() && this.opts.absolute !== false;
-            // start absolute patterns at root
-            if (root) {
-                t = t.resolve(root === '/' && this.opts.root !== undefined ?
-                    this.opts.root
-                    : root);
-                const rest = pattern.rest();
-                if (!rest) {
-                    this.matches.add(t, true, false);
-                    continue;
-                }
-                else {
-                    pattern = rest;
-                }
-            }
-            if (t.isENOENT())
-                continue;
-            let p;
-            let rest;
-            let changed = false;
-            while (typeof (p = pattern.pattern()) === 'string' &&
-                (rest = pattern.rest())) {
-                const c = t.resolve(p);
-                t = c;
-                pattern = rest;
-                changed = true;
-            }
-            p = pattern.pattern();
-            rest = pattern.rest();
-            if (changed) {
-                if (this.hasWalkedCache.hasWalked(t, pattern))
-                    continue;
-                this.hasWalkedCache.storeWalked(t, pattern);
-            }
-            // now we have either a final string for a known entry,
-            // more strings for an unknown entry,
-            // or a pattern starting with magic, mounted on t.
-            if (typeof p === 'string') {
-                // must not be final entry, otherwise we would have
-                // concatenated it earlier.
-                const ifDir = p === '..' || p === '' || p === '.';
-                this.matches.add(t.resolve(p), absolute, ifDir);
-                continue;
-            }
-            else if (p === minimatch_1.GLOBSTAR) {
-                // if no rest, match and subwalk pattern
-                // if rest, process rest and subwalk pattern
-                // if it's a symlink, but we didn't get here by way of a
-                // globstar match (meaning it's the first time THIS globstar
-                // has traversed a symlink), then we follow it. Otherwise, stop.
-                if (!t.isSymbolicLink() ||
-                    this.follow ||
-                    pattern.checkFollowGlobstar()) {
-                    this.subwalks.add(t, pattern);
-                }
-                const rp = rest?.pattern();
-                const rrest = rest?.rest();
-                if (!rest || ((rp === '' || rp === '.') && !rrest)) {
-                    // only HAS to be a dir if it ends in **/ or **/.
-                    // but ending in ** will match files as well.
-                    this.matches.add(t, absolute, rp === '' || rp === '.');
-                }
-                else {
-                    if (rp === '..') {
-                        // this would mean you're matching **/.. at the fs root,
-                        // and no thanks, I'm not gonna test that specific case.
-                        /* c8 ignore start */
-                        const tp = t.parent || t;
-                        /* c8 ignore stop */
-                        if (!rrest)
-                            this.matches.add(tp, absolute, true);
-                        else if (!this.hasWalkedCache.hasWalked(tp, rrest)) {
-                            this.subwalks.add(tp, rrest);
-                        }
-                    }
-                }
-            }
-            else if (p instanceof RegExp) {
-                this.subwalks.add(t, pattern);
-            }
-        }
-        return this;
-    }
-    subwalkTargets() {
-        return this.subwalks.keys();
-    }
-    child() {
-        return new Processor(this.opts, this.hasWalkedCache);
-    }
-    // return a new Processor containing the subwalks for each
-    // child entry, and a set of matches, and
-    // a hasWalkedCache that's a copy of this one
-    // then we're going to call
-    filterEntries(parent, entries) {
-        const patterns = this.subwalks.get(parent);
-        // put matches and entry walks into the results processor
-        const results = this.child();
-        for (const e of entries) {
-            for (const pattern of patterns) {
-                const absolute = pattern.isAbsolute();
-                const p = pattern.pattern();
-                const rest = pattern.rest();
-                if (p === minimatch_1.GLOBSTAR) {
-                    results.testGlobstar(e, pattern, rest, absolute);
-                }
-                else if (p instanceof RegExp) {
-                    results.testRegExp(e, p, rest, absolute);
-                }
-                else {
-                    results.testString(e, p, rest, absolute);
-                }
-            }
-        }
-        return results;
-    }
-    testGlobstar(e, pattern, rest, absolute) {
-        if (this.dot || !e.name.startsWith('.')) {
-            if (!pattern.hasMore()) {
-                this.matches.add(e, absolute, false);
-            }
-            if (e.canReaddir()) {
-                // if we're in follow mode or it's not a symlink, just keep
-                // testing the same pattern. If there's more after the globstar,
-                // then this symlink consumes the globstar. If not, then we can
-                // follow at most ONE symlink along the way, so we mark it, which
-                // also checks to ensure that it wasn't already marked.
-                if (this.follow || !e.isSymbolicLink()) {
-                    this.subwalks.add(e, pattern);
-                }
-                else if (e.isSymbolicLink()) {
-                    if (rest && pattern.checkFollowGlobstar()) {
-                        this.subwalks.add(e, rest);
-                    }
-                    else if (pattern.markFollowGlobstar()) {
-                        this.subwalks.add(e, pattern);
-                    }
-                }
-            }
-        }
-        // if the NEXT thing matches this entry, then also add
-        // the rest.
-        if (rest) {
-            const rp = rest.pattern();
-            if (typeof rp === 'string' &&
-                // dots and empty were handled already
-                rp !== '..' &&
-                rp !== '' &&
-                rp !== '.') {
-                this.testString(e, rp, rest.rest(), absolute);
-            }
-            else if (rp === '..') {
-                /* c8 ignore start */
-                const ep = e.parent || e;
-                /* c8 ignore stop */
-                this.subwalks.add(ep, rest);
-            }
-            else if (rp instanceof RegExp) {
-                this.testRegExp(e, rp, rest.rest(), absolute);
-            }
-        }
-    }
-    testRegExp(e, p, rest, absolute) {
-        if (!p.test(e.name))
-            return;
-        if (!rest) {
-            this.matches.add(e, absolute, false);
-        }
-        else {
-            this.subwalks.add(e, rest);
-        }
-    }
-    testString(e, p, rest, absolute) {
-        // should never happen?
-        if (!e.isNamed(p))
-            return;
-        if (!rest) {
-            this.matches.add(e, absolute, false);
-        }
-        else {
-            this.subwalks.add(e, rest);
-        }
-    }
-}
-exports.Processor = Processor;
-//# sourceMappingURL=processor.js.map
\ No newline at end of file
diff --git a/node_modules/cacache/node_modules/glob/dist/commonjs/walker.js b/node_modules/cacache/node_modules/glob/dist/commonjs/walker.js
deleted file mode 100644
index cb15946d9a852..0000000000000
--- a/node_modules/cacache/node_modules/glob/dist/commonjs/walker.js
+++ /dev/null
@@ -1,387 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.GlobStream = exports.GlobWalker = exports.GlobUtil = void 0;
-/**
- * Single-use utility classes to provide functionality to the {@link Glob}
- * methods.
- *
- * @module
- */
-const minipass_1 = require("minipass");
-const ignore_js_1 = require("./ignore.js");
-const processor_js_1 = require("./processor.js");
-const makeIgnore = (ignore, opts) => typeof ignore === 'string' ? new ignore_js_1.Ignore([ignore], opts)
-    : Array.isArray(ignore) ? new ignore_js_1.Ignore(ignore, opts)
-        : ignore;
-/**
- * basic walking utilities that all the glob walker types use
- */
-class GlobUtil {
-    path;
-    patterns;
-    opts;
-    seen = new Set();
-    paused = false;
-    aborted = false;
-    #onResume = [];
-    #ignore;
-    #sep;
-    signal;
-    maxDepth;
-    includeChildMatches;
-    constructor(patterns, path, opts) {
-        this.patterns = patterns;
-        this.path = path;
-        this.opts = opts;
-        this.#sep = !opts.posix && opts.platform === 'win32' ? '\\' : '/';
-        this.includeChildMatches = opts.includeChildMatches !== false;
-        if (opts.ignore || !this.includeChildMatches) {
-            this.#ignore = makeIgnore(opts.ignore ?? [], opts);
-            if (!this.includeChildMatches &&
-                typeof this.#ignore.add !== 'function') {
-                const m = 'cannot ignore child matches, ignore lacks add() method.';
-                throw new Error(m);
-            }
-        }
-        // ignore, always set with maxDepth, but it's optional on the
-        // GlobOptions type
-        /* c8 ignore start */
-        this.maxDepth = opts.maxDepth || Infinity;
-        /* c8 ignore stop */
-        if (opts.signal) {
-            this.signal = opts.signal;
-            this.signal.addEventListener('abort', () => {
-                this.#onResume.length = 0;
-            });
-        }
-    }
-    #ignored(path) {
-        return this.seen.has(path) || !!this.#ignore?.ignored?.(path);
-    }
-    #childrenIgnored(path) {
-        return !!this.#ignore?.childrenIgnored?.(path);
-    }
-    // backpressure mechanism
-    pause() {
-        this.paused = true;
-    }
-    resume() {
-        /* c8 ignore start */
-        if (this.signal?.aborted)
-            return;
-        /* c8 ignore stop */
-        this.paused = false;
-        let fn = undefined;
-        while (!this.paused && (fn = this.#onResume.shift())) {
-            fn();
-        }
-    }
-    onResume(fn) {
-        if (this.signal?.aborted)
-            return;
-        /* c8 ignore start */
-        if (!this.paused) {
-            fn();
-        }
-        else {
-            /* c8 ignore stop */
-            this.#onResume.push(fn);
-        }
-    }
-    // do the requisite realpath/stat checking, and return the path
-    // to add or undefined to filter it out.
-    async matchCheck(e, ifDir) {
-        if (ifDir && this.opts.nodir)
-            return undefined;
-        let rpc;
-        if (this.opts.realpath) {
-            rpc = e.realpathCached() || (await e.realpath());
-            if (!rpc)
-                return undefined;
-            e = rpc;
-        }
-        const needStat = e.isUnknown() || this.opts.stat;
-        const s = needStat ? await e.lstat() : e;
-        if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) {
-            const target = await s.realpath();
-            /* c8 ignore start */
-            if (target && (target.isUnknown() || this.opts.stat)) {
-                await target.lstat();
-            }
-            /* c8 ignore stop */
-        }
-        return this.matchCheckTest(s, ifDir);
-    }
-    matchCheckTest(e, ifDir) {
-        return (e &&
-            (this.maxDepth === Infinity || e.depth() <= this.maxDepth) &&
-            (!ifDir || e.canReaddir()) &&
-            (!this.opts.nodir || !e.isDirectory()) &&
-            (!this.opts.nodir ||
-                !this.opts.follow ||
-                !e.isSymbolicLink() ||
-                !e.realpathCached()?.isDirectory()) &&
-            !this.#ignored(e)) ?
-            e
-            : undefined;
-    }
-    matchCheckSync(e, ifDir) {
-        if (ifDir && this.opts.nodir)
-            return undefined;
-        let rpc;
-        if (this.opts.realpath) {
-            rpc = e.realpathCached() || e.realpathSync();
-            if (!rpc)
-                return undefined;
-            e = rpc;
-        }
-        const needStat = e.isUnknown() || this.opts.stat;
-        const s = needStat ? e.lstatSync() : e;
-        if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) {
-            const target = s.realpathSync();
-            if (target && (target?.isUnknown() || this.opts.stat)) {
-                target.lstatSync();
-            }
-        }
-        return this.matchCheckTest(s, ifDir);
-    }
-    matchFinish(e, absolute) {
-        if (this.#ignored(e))
-            return;
-        // we know we have an ignore if this is false, but TS doesn't
-        if (!this.includeChildMatches && this.#ignore?.add) {
-            const ign = `${e.relativePosix()}/**`;
-            this.#ignore.add(ign);
-        }
-        const abs = this.opts.absolute === undefined ? absolute : this.opts.absolute;
-        this.seen.add(e);
-        const mark = this.opts.mark && e.isDirectory() ? this.#sep : '';
-        // ok, we have what we need!
-        if (this.opts.withFileTypes) {
-            this.matchEmit(e);
-        }
-        else if (abs) {
-            const abs = this.opts.posix ? e.fullpathPosix() : e.fullpath();
-            this.matchEmit(abs + mark);
-        }
-        else {
-            const rel = this.opts.posix ? e.relativePosix() : e.relative();
-            const pre = this.opts.dotRelative && !rel.startsWith('..' + this.#sep) ?
-                '.' + this.#sep
-                : '';
-            this.matchEmit(!rel ? '.' + mark : pre + rel + mark);
-        }
-    }
-    async match(e, absolute, ifDir) {
-        const p = await this.matchCheck(e, ifDir);
-        if (p)
-            this.matchFinish(p, absolute);
-    }
-    matchSync(e, absolute, ifDir) {
-        const p = this.matchCheckSync(e, ifDir);
-        if (p)
-            this.matchFinish(p, absolute);
-    }
-    walkCB(target, patterns, cb) {
-        /* c8 ignore start */
-        if (this.signal?.aborted)
-            cb();
-        /* c8 ignore stop */
-        this.walkCB2(target, patterns, new processor_js_1.Processor(this.opts), cb);
-    }
-    walkCB2(target, patterns, processor, cb) {
-        if (this.#childrenIgnored(target))
-            return cb();
-        if (this.signal?.aborted)
-            cb();
-        if (this.paused) {
-            this.onResume(() => this.walkCB2(target, patterns, processor, cb));
-            return;
-        }
-        processor.processPatterns(target, patterns);
-        // done processing.  all of the above is sync, can be abstracted out.
-        // subwalks is a map of paths to the entry filters they need
-        // matches is a map of paths to [absolute, ifDir] tuples.
-        let tasks = 1;
-        const next = () => {
-            if (--tasks === 0)
-                cb();
-        };
-        for (const [m, absolute, ifDir] of processor.matches.entries()) {
-            if (this.#ignored(m))
-                continue;
-            tasks++;
-            this.match(m, absolute, ifDir).then(() => next());
-        }
-        for (const t of processor.subwalkTargets()) {
-            if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) {
-                continue;
-            }
-            tasks++;
-            const childrenCached = t.readdirCached();
-            if (t.calledReaddir())
-                this.walkCB3(t, childrenCached, processor, next);
-            else {
-                t.readdirCB((_, entries) => this.walkCB3(t, entries, processor, next), true);
-            }
-        }
-        next();
-    }
-    walkCB3(target, entries, processor, cb) {
-        processor = processor.filterEntries(target, entries);
-        let tasks = 1;
-        const next = () => {
-            if (--tasks === 0)
-                cb();
-        };
-        for (const [m, absolute, ifDir] of processor.matches.entries()) {
-            if (this.#ignored(m))
-                continue;
-            tasks++;
-            this.match(m, absolute, ifDir).then(() => next());
-        }
-        for (const [target, patterns] of processor.subwalks.entries()) {
-            tasks++;
-            this.walkCB2(target, patterns, processor.child(), next);
-        }
-        next();
-    }
-    walkCBSync(target, patterns, cb) {
-        /* c8 ignore start */
-        if (this.signal?.aborted)
-            cb();
-        /* c8 ignore stop */
-        this.walkCB2Sync(target, patterns, new processor_js_1.Processor(this.opts), cb);
-    }
-    walkCB2Sync(target, patterns, processor, cb) {
-        if (this.#childrenIgnored(target))
-            return cb();
-        if (this.signal?.aborted)
-            cb();
-        if (this.paused) {
-            this.onResume(() => this.walkCB2Sync(target, patterns, processor, cb));
-            return;
-        }
-        processor.processPatterns(target, patterns);
-        // done processing.  all of the above is sync, can be abstracted out.
-        // subwalks is a map of paths to the entry filters they need
-        // matches is a map of paths to [absolute, ifDir] tuples.
-        let tasks = 1;
-        const next = () => {
-            if (--tasks === 0)
-                cb();
-        };
-        for (const [m, absolute, ifDir] of processor.matches.entries()) {
-            if (this.#ignored(m))
-                continue;
-            this.matchSync(m, absolute, ifDir);
-        }
-        for (const t of processor.subwalkTargets()) {
-            if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) {
-                continue;
-            }
-            tasks++;
-            const children = t.readdirSync();
-            this.walkCB3Sync(t, children, processor, next);
-        }
-        next();
-    }
-    walkCB3Sync(target, entries, processor, cb) {
-        processor = processor.filterEntries(target, entries);
-        let tasks = 1;
-        const next = () => {
-            if (--tasks === 0)
-                cb();
-        };
-        for (const [m, absolute, ifDir] of processor.matches.entries()) {
-            if (this.#ignored(m))
-                continue;
-            this.matchSync(m, absolute, ifDir);
-        }
-        for (const [target, patterns] of processor.subwalks.entries()) {
-            tasks++;
-            this.walkCB2Sync(target, patterns, processor.child(), next);
-        }
-        next();
-    }
-}
-exports.GlobUtil = GlobUtil;
-class GlobWalker extends GlobUtil {
-    matches = new Set();
-    constructor(patterns, path, opts) {
-        super(patterns, path, opts);
-    }
-    matchEmit(e) {
-        this.matches.add(e);
-    }
-    async walk() {
-        if (this.signal?.aborted)
-            throw this.signal.reason;
-        if (this.path.isUnknown()) {
-            await this.path.lstat();
-        }
-        await new Promise((res, rej) => {
-            this.walkCB(this.path, this.patterns, () => {
-                if (this.signal?.aborted) {
-                    rej(this.signal.reason);
-                }
-                else {
-                    res(this.matches);
-                }
-            });
-        });
-        return this.matches;
-    }
-    walkSync() {
-        if (this.signal?.aborted)
-            throw this.signal.reason;
-        if (this.path.isUnknown()) {
-            this.path.lstatSync();
-        }
-        // nothing for the callback to do, because this never pauses
-        this.walkCBSync(this.path, this.patterns, () => {
-            if (this.signal?.aborted)
-                throw this.signal.reason;
-        });
-        return this.matches;
-    }
-}
-exports.GlobWalker = GlobWalker;
-class GlobStream extends GlobUtil {
-    results;
-    constructor(patterns, path, opts) {
-        super(patterns, path, opts);
-        this.results = new minipass_1.Minipass({
-            signal: this.signal,
-            objectMode: true,
-        });
-        this.results.on('drain', () => this.resume());
-        this.results.on('resume', () => this.resume());
-    }
-    matchEmit(e) {
-        this.results.write(e);
-        if (!this.results.flowing)
-            this.pause();
-    }
-    stream() {
-        const target = this.path;
-        if (target.isUnknown()) {
-            target.lstat().then(() => {
-                this.walkCB(target, this.patterns, () => this.results.end());
-            });
-        }
-        else {
-            this.walkCB(target, this.patterns, () => this.results.end());
-        }
-        return this.results;
-    }
-    streamSync() {
-        if (this.path.isUnknown()) {
-            this.path.lstatSync();
-        }
-        this.walkCBSync(this.path, this.patterns, () => this.results.end());
-        return this.results;
-    }
-}
-exports.GlobStream = GlobStream;
-//# sourceMappingURL=walker.js.map
\ No newline at end of file
diff --git a/node_modules/cacache/node_modules/glob/dist/esm/bin.d.mts b/node_modules/cacache/node_modules/glob/dist/esm/bin.d.mts
deleted file mode 100644
index 77298e4770817..0000000000000
--- a/node_modules/cacache/node_modules/glob/dist/esm/bin.d.mts
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/usr/bin/env node
-export {};
-//# sourceMappingURL=bin.d.mts.map
\ No newline at end of file
diff --git a/node_modules/cacache/node_modules/glob/dist/esm/bin.mjs b/node_modules/cacache/node_modules/glob/dist/esm/bin.mjs
deleted file mode 100755
index 553bb79303d90..0000000000000
--- a/node_modules/cacache/node_modules/glob/dist/esm/bin.mjs
+++ /dev/null
@@ -1,276 +0,0 @@
-#!/usr/bin/env node
-import { foregroundChild } from 'foreground-child';
-import { existsSync } from 'fs';
-import { jack } from 'jackspeak';
-import { loadPackageJson } from 'package-json-from-dist';
-import { join } from 'path';
-import { globStream } from './index.js';
-const { version } = loadPackageJson(import.meta.url, '../package.json');
-const j = jack({
-    usage: 'glob [options] [ [ ...]]',
-})
-    .description(`
-    Glob v${version}
-
-    Expand the positional glob expression arguments into any matching file
-    system paths found.
-  `)
-    .opt({
-    cmd: {
-        short: 'c',
-        hint: 'command',
-        description: `Run the command provided, passing the glob expression
-                    matches as arguments.`,
-    },
-})
-    .opt({
-    default: {
-        short: 'p',
-        hint: 'pattern',
-        description: `If no positional arguments are provided, glob will use
-                    this pattern`,
-    },
-})
-    .flag({
-    all: {
-        short: 'A',
-        description: `By default, the glob cli command will not expand any
-                    arguments that are an exact match to a file on disk.
-
-                    This prevents double-expanding, in case the shell expands
-                    an argument whose filename is a glob expression.
-
-                    For example, if 'app/*.ts' would match 'app/[id].ts', then
-                    on Windows powershell or cmd.exe, 'glob app/*.ts' will
-                    expand to 'app/[id].ts', as expected. However, in posix
-                    shells such as bash or zsh, the shell will first expand
-                    'app/*.ts' to a list of filenames. Then glob will look
-                    for a file matching 'app/[id].ts' (ie, 'app/i.ts' or
-                    'app/d.ts'), which is unexpected.
-
-                    Setting '--all' prevents this behavior, causing glob
-                    to treat ALL patterns as glob expressions to be expanded,
-                    even if they are an exact match to a file on disk.
-
-                    When setting this option, be sure to enquote arguments
-                    so that the shell will not expand them prior to passing
-                    them to the glob command process.
-      `,
-    },
-    absolute: {
-        short: 'a',
-        description: 'Expand to absolute paths',
-    },
-    'dot-relative': {
-        short: 'd',
-        description: `Prepend './' on relative matches`,
-    },
-    mark: {
-        short: 'm',
-        description: `Append a / on any directories matched`,
-    },
-    posix: {
-        short: 'x',
-        description: `Always resolve to posix style paths, using '/' as the
-                    directory separator, even on Windows. Drive letter
-                    absolute matches on Windows will be expanded to their
-                    full resolved UNC maths, eg instead of 'C:\\foo\\bar',
-                    it will expand to '//?/C:/foo/bar'.
-      `,
-    },
-    follow: {
-        short: 'f',
-        description: `Follow symlinked directories when expanding '**'`,
-    },
-    realpath: {
-        short: 'R',
-        description: `Call 'fs.realpath' on all of the results. In the case
-                    of an entry that cannot be resolved, the entry is
-                    omitted. This incurs a slight performance penalty, of
-                    course, because of the added system calls.`,
-    },
-    stat: {
-        short: 's',
-        description: `Call 'fs.lstat' on all entries, whether required or not
-                    to determine if it's a valid match.`,
-    },
-    'match-base': {
-        short: 'b',
-        description: `Perform a basename-only match if the pattern does not
-                    contain any slash characters. That is, '*.js' would be
-                    treated as equivalent to '**/*.js', matching js files
-                    in all directories.
-      `,
-    },
-    dot: {
-        description: `Allow patterns to match files/directories that start
-                    with '.', even if the pattern does not start with '.'
-      `,
-    },
-    nobrace: {
-        description: 'Do not expand {...} patterns',
-    },
-    nocase: {
-        description: `Perform a case-insensitive match. This defaults to
-                    'true' on macOS and Windows platforms, and false on
-                    all others.
-
-                    Note: 'nocase' should only be explicitly set when it is
-                    known that the filesystem's case sensitivity differs
-                    from the platform default. If set 'true' on
-                    case-insensitive file systems, then the walk may return
-                    more or less results than expected.
-      `,
-    },
-    nodir: {
-        description: `Do not match directories, only files.
-
-                    Note: to *only* match directories, append a '/' at the
-                    end of the pattern.
-      `,
-    },
-    noext: {
-        description: `Do not expand extglob patterns, such as '+(a|b)'`,
-    },
-    noglobstar: {
-        description: `Do not expand '**' against multiple path portions.
-                    Ie, treat it as a normal '*' instead.`,
-    },
-    'windows-path-no-escape': {
-        description: `Use '\\' as a path separator *only*, and *never* as an
-                    escape character. If set, all '\\' characters are
-                    replaced with '/' in the pattern.`,
-    },
-})
-    .num({
-    'max-depth': {
-        short: 'D',
-        description: `Maximum depth to traverse from the current
-                    working directory`,
-    },
-})
-    .opt({
-    cwd: {
-        short: 'C',
-        description: 'Current working directory to execute/match in',
-        default: process.cwd(),
-    },
-    root: {
-        short: 'r',
-        description: `A string path resolved against the 'cwd', which is
-                    used as the starting point for absolute patterns that
-                    start with '/' (but not drive letters or UNC paths
-                    on Windows).
-
-                    Note that this *doesn't* necessarily limit the walk to
-                    the 'root' directory, and doesn't affect the cwd
-                    starting point for non-absolute patterns. A pattern
-                    containing '..' will still be able to traverse out of
-                    the root directory, if it is not an actual root directory
-                    on the filesystem, and any non-absolute patterns will
-                    still be matched in the 'cwd'.
-
-                    To start absolute and non-absolute patterns in the same
-                    path, you can use '--root=' to set it to the empty
-                    string. However, be aware that on Windows systems, a
-                    pattern like 'x:/*' or '//host/share/*' will *always*
-                    start in the 'x:/' or '//host/share/' directory,
-                    regardless of the --root setting.
-      `,
-    },
-    platform: {
-        description: `Defaults to the value of 'process.platform' if
-                    available, or 'linux' if not. Setting --platform=win32
-                    on non-Windows systems may cause strange behavior!`,
-        validOptions: [
-            'aix',
-            'android',
-            'darwin',
-            'freebsd',
-            'haiku',
-            'linux',
-            'openbsd',
-            'sunos',
-            'win32',
-            'cygwin',
-            'netbsd',
-        ],
-    },
-})
-    .optList({
-    ignore: {
-        short: 'i',
-        description: `Glob patterns to ignore`,
-    },
-})
-    .flag({
-    debug: {
-        short: 'v',
-        description: `Output a huge amount of noisy debug information about
-                    patterns as they are parsed and used to match files.`,
-    },
-    version: {
-        short: 'V',
-        description: `Output the version (${version})`,
-    },
-    help: {
-        short: 'h',
-        description: 'Show this usage information',
-    },
-});
-try {
-    const { positionals, values } = j.parse();
-    if (values.version) {
-        console.log(version);
-        process.exit(0);
-    }
-    if (values.help) {
-        console.log(j.usage());
-        process.exit(0);
-    }
-    if (positionals.length === 0 && !values.default)
-        throw 'No patterns provided';
-    if (positionals.length === 0 && values.default)
-        positionals.push(values.default);
-    const patterns = values.all ? positionals : positionals.filter(p => !existsSync(p));
-    const matches = values.all ?
-        []
-        : positionals.filter(p => existsSync(p)).map(p => join(p));
-    const stream = globStream(patterns, {
-        absolute: values.absolute,
-        cwd: values.cwd,
-        dot: values.dot,
-        dotRelative: values['dot-relative'],
-        follow: values.follow,
-        ignore: values.ignore,
-        mark: values.mark,
-        matchBase: values['match-base'],
-        maxDepth: values['max-depth'],
-        nobrace: values.nobrace,
-        nocase: values.nocase,
-        nodir: values.nodir,
-        noext: values.noext,
-        noglobstar: values.noglobstar,
-        platform: values.platform,
-        realpath: values.realpath,
-        root: values.root,
-        stat: values.stat,
-        debug: values.debug,
-        posix: values.posix,
-    });
-    const cmd = values.cmd;
-    if (!cmd) {
-        matches.forEach(m => console.log(m));
-        stream.on('data', f => console.log(f));
-    }
-    else {
-        stream.on('data', f => matches.push(f));
-        stream.on('end', () => foregroundChild(cmd, matches, { shell: true }));
-    }
-}
-catch (e) {
-    console.error(j.usage());
-    console.error(e instanceof Error ? e.message : String(e));
-    process.exit(1);
-}
-//# sourceMappingURL=bin.mjs.map
\ No newline at end of file
diff --git a/node_modules/cacache/node_modules/glob/dist/esm/glob.js b/node_modules/cacache/node_modules/glob/dist/esm/glob.js
deleted file mode 100644
index c9ff3b0036d94..0000000000000
--- a/node_modules/cacache/node_modules/glob/dist/esm/glob.js
+++ /dev/null
@@ -1,243 +0,0 @@
-import { Minimatch } from 'minimatch';
-import { fileURLToPath } from 'node:url';
-import { PathScurry, PathScurryDarwin, PathScurryPosix, PathScurryWin32, } from 'path-scurry';
-import { Pattern } from './pattern.js';
-import { GlobStream, GlobWalker } from './walker.js';
-// if no process global, just call it linux.
-// so we default to case-sensitive, / separators
-const defaultPlatform = (typeof process === 'object' &&
-    process &&
-    typeof process.platform === 'string') ?
-    process.platform
-    : 'linux';
-/**
- * An object that can perform glob pattern traversals.
- */
-export class Glob {
-    absolute;
-    cwd;
-    root;
-    dot;
-    dotRelative;
-    follow;
-    ignore;
-    magicalBraces;
-    mark;
-    matchBase;
-    maxDepth;
-    nobrace;
-    nocase;
-    nodir;
-    noext;
-    noglobstar;
-    pattern;
-    platform;
-    realpath;
-    scurry;
-    stat;
-    signal;
-    windowsPathsNoEscape;
-    withFileTypes;
-    includeChildMatches;
-    /**
-     * The options provided to the constructor.
-     */
-    opts;
-    /**
-     * An array of parsed immutable {@link Pattern} objects.
-     */
-    patterns;
-    /**
-     * All options are stored as properties on the `Glob` object.
-     *
-     * See {@link GlobOptions} for full options descriptions.
-     *
-     * Note that a previous `Glob` object can be passed as the
-     * `GlobOptions` to another `Glob` instantiation to re-use settings
-     * and caches with a new pattern.
-     *
-     * Traversal functions can be called multiple times to run the walk
-     * again.
-     */
-    constructor(pattern, opts) {
-        /* c8 ignore start */
-        if (!opts)
-            throw new TypeError('glob options required');
-        /* c8 ignore stop */
-        this.withFileTypes = !!opts.withFileTypes;
-        this.signal = opts.signal;
-        this.follow = !!opts.follow;
-        this.dot = !!opts.dot;
-        this.dotRelative = !!opts.dotRelative;
-        this.nodir = !!opts.nodir;
-        this.mark = !!opts.mark;
-        if (!opts.cwd) {
-            this.cwd = '';
-        }
-        else if (opts.cwd instanceof URL || opts.cwd.startsWith('file://')) {
-            opts.cwd = fileURLToPath(opts.cwd);
-        }
-        this.cwd = opts.cwd || '';
-        this.root = opts.root;
-        this.magicalBraces = !!opts.magicalBraces;
-        this.nobrace = !!opts.nobrace;
-        this.noext = !!opts.noext;
-        this.realpath = !!opts.realpath;
-        this.absolute = opts.absolute;
-        this.includeChildMatches = opts.includeChildMatches !== false;
-        this.noglobstar = !!opts.noglobstar;
-        this.matchBase = !!opts.matchBase;
-        this.maxDepth =
-            typeof opts.maxDepth === 'number' ? opts.maxDepth : Infinity;
-        this.stat = !!opts.stat;
-        this.ignore = opts.ignore;
-        if (this.withFileTypes && this.absolute !== undefined) {
-            throw new Error('cannot set absolute and withFileTypes:true');
-        }
-        if (typeof pattern === 'string') {
-            pattern = [pattern];
-        }
-        this.windowsPathsNoEscape =
-            !!opts.windowsPathsNoEscape ||
-                opts.allowWindowsEscape ===
-                    false;
-        if (this.windowsPathsNoEscape) {
-            pattern = pattern.map(p => p.replace(/\\/g, '/'));
-        }
-        if (this.matchBase) {
-            if (opts.noglobstar) {
-                throw new TypeError('base matching requires globstar');
-            }
-            pattern = pattern.map(p => (p.includes('/') ? p : `./**/${p}`));
-        }
-        this.pattern = pattern;
-        this.platform = opts.platform || defaultPlatform;
-        this.opts = { ...opts, platform: this.platform };
-        if (opts.scurry) {
-            this.scurry = opts.scurry;
-            if (opts.nocase !== undefined &&
-                opts.nocase !== opts.scurry.nocase) {
-                throw new Error('nocase option contradicts provided scurry option');
-            }
-        }
-        else {
-            const Scurry = opts.platform === 'win32' ? PathScurryWin32
-                : opts.platform === 'darwin' ? PathScurryDarwin
-                    : opts.platform ? PathScurryPosix
-                        : PathScurry;
-            this.scurry = new Scurry(this.cwd, {
-                nocase: opts.nocase,
-                fs: opts.fs,
-            });
-        }
-        this.nocase = this.scurry.nocase;
-        // If you do nocase:true on a case-sensitive file system, then
-        // we need to use regexps instead of strings for non-magic
-        // path portions, because statting `aBc` won't return results
-        // for the file `AbC` for example.
-        const nocaseMagicOnly = this.platform === 'darwin' || this.platform === 'win32';
-        const mmo = {
-            // default nocase based on platform
-            ...opts,
-            dot: this.dot,
-            matchBase: this.matchBase,
-            nobrace: this.nobrace,
-            nocase: this.nocase,
-            nocaseMagicOnly,
-            nocomment: true,
-            noext: this.noext,
-            nonegate: true,
-            optimizationLevel: 2,
-            platform: this.platform,
-            windowsPathsNoEscape: this.windowsPathsNoEscape,
-            debug: !!this.opts.debug,
-        };
-        const mms = this.pattern.map(p => new Minimatch(p, mmo));
-        const [matchSet, globParts] = mms.reduce((set, m) => {
-            set[0].push(...m.set);
-            set[1].push(...m.globParts);
-            return set;
-        }, [[], []]);
-        this.patterns = matchSet.map((set, i) => {
-            const g = globParts[i];
-            /* c8 ignore start */
-            if (!g)
-                throw new Error('invalid pattern object');
-            /* c8 ignore stop */
-            return new Pattern(set, g, 0, this.platform);
-        });
-    }
-    async walk() {
-        // Walkers always return array of Path objects, so we just have to
-        // coerce them into the right shape.  It will have already called
-        // realpath() if the option was set to do so, so we know that's cached.
-        // start out knowing the cwd, at least
-        return [
-            ...(await new GlobWalker(this.patterns, this.scurry.cwd, {
-                ...this.opts,
-                maxDepth: this.maxDepth !== Infinity ?
-                    this.maxDepth + this.scurry.cwd.depth()
-                    : Infinity,
-                platform: this.platform,
-                nocase: this.nocase,
-                includeChildMatches: this.includeChildMatches,
-            }).walk()),
-        ];
-    }
-    walkSync() {
-        return [
-            ...new GlobWalker(this.patterns, this.scurry.cwd, {
-                ...this.opts,
-                maxDepth: this.maxDepth !== Infinity ?
-                    this.maxDepth + this.scurry.cwd.depth()
-                    : Infinity,
-                platform: this.platform,
-                nocase: this.nocase,
-                includeChildMatches: this.includeChildMatches,
-            }).walkSync(),
-        ];
-    }
-    stream() {
-        return new GlobStream(this.patterns, this.scurry.cwd, {
-            ...this.opts,
-            maxDepth: this.maxDepth !== Infinity ?
-                this.maxDepth + this.scurry.cwd.depth()
-                : Infinity,
-            platform: this.platform,
-            nocase: this.nocase,
-            includeChildMatches: this.includeChildMatches,
-        }).stream();
-    }
-    streamSync() {
-        return new GlobStream(this.patterns, this.scurry.cwd, {
-            ...this.opts,
-            maxDepth: this.maxDepth !== Infinity ?
-                this.maxDepth + this.scurry.cwd.depth()
-                : Infinity,
-            platform: this.platform,
-            nocase: this.nocase,
-            includeChildMatches: this.includeChildMatches,
-        }).streamSync();
-    }
-    /**
-     * Default sync iteration function. Returns a Generator that
-     * iterates over the results.
-     */
-    iterateSync() {
-        return this.streamSync()[Symbol.iterator]();
-    }
-    [Symbol.iterator]() {
-        return this.iterateSync();
-    }
-    /**
-     * Default async iteration function. Returns an AsyncGenerator that
-     * iterates over the results.
-     */
-    iterate() {
-        return this.stream()[Symbol.asyncIterator]();
-    }
-    [Symbol.asyncIterator]() {
-        return this.iterate();
-    }
-}
-//# sourceMappingURL=glob.js.map
\ No newline at end of file
diff --git a/node_modules/cacache/node_modules/glob/dist/esm/has-magic.js b/node_modules/cacache/node_modules/glob/dist/esm/has-magic.js
deleted file mode 100644
index ba2321ab868d0..0000000000000
--- a/node_modules/cacache/node_modules/glob/dist/esm/has-magic.js
+++ /dev/null
@@ -1,23 +0,0 @@
-import { Minimatch } from 'minimatch';
-/**
- * Return true if the patterns provided contain any magic glob characters,
- * given the options provided.
- *
- * Brace expansion is not considered "magic" unless the `magicalBraces` option
- * is set, as brace expansion just turns one string into an array of strings.
- * So a pattern like `'x{a,b}y'` would return `false`, because `'xay'` and
- * `'xby'` both do not contain any magic glob characters, and it's treated the
- * same as if you had called it on `['xay', 'xby']`. When `magicalBraces:true`
- * is in the options, brace expansion _is_ treated as a pattern having magic.
- */
-export const hasMagic = (pattern, options = {}) => {
-    if (!Array.isArray(pattern)) {
-        pattern = [pattern];
-    }
-    for (const p of pattern) {
-        if (new Minimatch(p, options).hasMagic())
-            return true;
-    }
-    return false;
-};
-//# sourceMappingURL=has-magic.js.map
\ No newline at end of file
diff --git a/node_modules/cacache/node_modules/glob/dist/esm/ignore.js b/node_modules/cacache/node_modules/glob/dist/esm/ignore.js
deleted file mode 100644
index 539c4a4fdebc4..0000000000000
--- a/node_modules/cacache/node_modules/glob/dist/esm/ignore.js
+++ /dev/null
@@ -1,115 +0,0 @@
-// give it a pattern, and it'll be able to tell you if
-// a given path should be ignored.
-// Ignoring a path ignores its children if the pattern ends in /**
-// Ignores are always parsed in dot:true mode
-import { Minimatch } from 'minimatch';
-import { Pattern } from './pattern.js';
-const defaultPlatform = (typeof process === 'object' &&
-    process &&
-    typeof process.platform === 'string') ?
-    process.platform
-    : 'linux';
-/**
- * Class used to process ignored patterns
- */
-export class Ignore {
-    relative;
-    relativeChildren;
-    absolute;
-    absoluteChildren;
-    platform;
-    mmopts;
-    constructor(ignored, { nobrace, nocase, noext, noglobstar, platform = defaultPlatform, }) {
-        this.relative = [];
-        this.absolute = [];
-        this.relativeChildren = [];
-        this.absoluteChildren = [];
-        this.platform = platform;
-        this.mmopts = {
-            dot: true,
-            nobrace,
-            nocase,
-            noext,
-            noglobstar,
-            optimizationLevel: 2,
-            platform,
-            nocomment: true,
-            nonegate: true,
-        };
-        for (const ign of ignored)
-            this.add(ign);
-    }
-    add(ign) {
-        // this is a little weird, but it gives us a clean set of optimized
-        // minimatch matchers, without getting tripped up if one of them
-        // ends in /** inside a brace section, and it's only inefficient at
-        // the start of the walk, not along it.
-        // It'd be nice if the Pattern class just had a .test() method, but
-        // handling globstars is a bit of a pita, and that code already lives
-        // in minimatch anyway.
-        // Another way would be if maybe Minimatch could take its set/globParts
-        // as an option, and then we could at least just use Pattern to test
-        // for absolute-ness.
-        // Yet another way, Minimatch could take an array of glob strings, and
-        // a cwd option, and do the right thing.
-        const mm = new Minimatch(ign, this.mmopts);
-        for (let i = 0; i < mm.set.length; i++) {
-            const parsed = mm.set[i];
-            const globParts = mm.globParts[i];
-            /* c8 ignore start */
-            if (!parsed || !globParts) {
-                throw new Error('invalid pattern object');
-            }
-            // strip off leading ./ portions
-            // https://github.com/isaacs/node-glob/issues/570
-            while (parsed[0] === '.' && globParts[0] === '.') {
-                parsed.shift();
-                globParts.shift();
-            }
-            /* c8 ignore stop */
-            const p = new Pattern(parsed, globParts, 0, this.platform);
-            const m = new Minimatch(p.globString(), this.mmopts);
-            const children = globParts[globParts.length - 1] === '**';
-            const absolute = p.isAbsolute();
-            if (absolute)
-                this.absolute.push(m);
-            else
-                this.relative.push(m);
-            if (children) {
-                if (absolute)
-                    this.absoluteChildren.push(m);
-                else
-                    this.relativeChildren.push(m);
-            }
-        }
-    }
-    ignored(p) {
-        const fullpath = p.fullpath();
-        const fullpaths = `${fullpath}/`;
-        const relative = p.relative() || '.';
-        const relatives = `${relative}/`;
-        for (const m of this.relative) {
-            if (m.match(relative) || m.match(relatives))
-                return true;
-        }
-        for (const m of this.absolute) {
-            if (m.match(fullpath) || m.match(fullpaths))
-                return true;
-        }
-        return false;
-    }
-    childrenIgnored(p) {
-        const fullpath = p.fullpath() + '/';
-        const relative = (p.relative() || '.') + '/';
-        for (const m of this.relativeChildren) {
-            if (m.match(relative))
-                return true;
-        }
-        for (const m of this.absoluteChildren) {
-            if (m.match(fullpath))
-                return true;
-        }
-        return false;
-    }
-}
-//# sourceMappingURL=ignore.js.map
\ No newline at end of file
diff --git a/node_modules/cacache/node_modules/glob/dist/esm/index.js b/node_modules/cacache/node_modules/glob/dist/esm/index.js
deleted file mode 100644
index e15c1f9c4cb03..0000000000000
--- a/node_modules/cacache/node_modules/glob/dist/esm/index.js
+++ /dev/null
@@ -1,55 +0,0 @@
-import { escape, unescape } from 'minimatch';
-import { Glob } from './glob.js';
-import { hasMagic } from './has-magic.js';
-export { escape, unescape } from 'minimatch';
-export { Glob } from './glob.js';
-export { hasMagic } from './has-magic.js';
-export { Ignore } from './ignore.js';
-export function globStreamSync(pattern, options = {}) {
-    return new Glob(pattern, options).streamSync();
-}
-export function globStream(pattern, options = {}) {
-    return new Glob(pattern, options).stream();
-}
-export function globSync(pattern, options = {}) {
-    return new Glob(pattern, options).walkSync();
-}
-async function glob_(pattern, options = {}) {
-    return new Glob(pattern, options).walk();
-}
-export function globIterateSync(pattern, options = {}) {
-    return new Glob(pattern, options).iterateSync();
-}
-export function globIterate(pattern, options = {}) {
-    return new Glob(pattern, options).iterate();
-}
-// aliases: glob.sync.stream() glob.stream.sync() glob.sync() etc
-export const streamSync = globStreamSync;
-export const stream = Object.assign(globStream, { sync: globStreamSync });
-export const iterateSync = globIterateSync;
-export const iterate = Object.assign(globIterate, {
-    sync: globIterateSync,
-});
-export const sync = Object.assign(globSync, {
-    stream: globStreamSync,
-    iterate: globIterateSync,
-});
-export const glob = Object.assign(glob_, {
-    glob: glob_,
-    globSync,
-    sync,
-    globStream,
-    stream,
-    globStreamSync,
-    streamSync,
-    globIterate,
-    iterate,
-    globIterateSync,
-    iterateSync,
-    Glob,
-    hasMagic,
-    escape,
-    unescape,
-});
-glob.glob = glob;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/cacache/node_modules/glob/dist/esm/package.json b/node_modules/cacache/node_modules/glob/dist/esm/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/cacache/node_modules/glob/dist/esm/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/cacache/node_modules/glob/dist/esm/pattern.js b/node_modules/cacache/node_modules/glob/dist/esm/pattern.js
deleted file mode 100644
index b41defa10c6a3..0000000000000
--- a/node_modules/cacache/node_modules/glob/dist/esm/pattern.js
+++ /dev/null
@@ -1,215 +0,0 @@
-// this is just a very light wrapper around 2 arrays with an offset index
-import { GLOBSTAR } from 'minimatch';
-const isPatternList = (pl) => pl.length >= 1;
-const isGlobList = (gl) => gl.length >= 1;
-/**
- * An immutable-ish view on an array of glob parts and their parsed
- * results
- */
-export class Pattern {
-    #patternList;
-    #globList;
-    #index;
-    length;
-    #platform;
-    #rest;
-    #globString;
-    #isDrive;
-    #isUNC;
-    #isAbsolute;
-    #followGlobstar = true;
-    constructor(patternList, globList, index, platform) {
-        if (!isPatternList(patternList)) {
-            throw new TypeError('empty pattern list');
-        }
-        if (!isGlobList(globList)) {
-            throw new TypeError('empty glob list');
-        }
-        if (globList.length !== patternList.length) {
-            throw new TypeError('mismatched pattern list and glob list lengths');
-        }
-        this.length = patternList.length;
-        if (index < 0 || index >= this.length) {
-            throw new TypeError('index out of range');
-        }
-        this.#patternList = patternList;
-        this.#globList = globList;
-        this.#index = index;
-        this.#platform = platform;
-        // normalize root entries of absolute patterns on initial creation.
-        if (this.#index === 0) {
-            // c: => ['c:/']
-            // C:/ => ['C:/']
-            // C:/x => ['C:/', 'x']
-            // //host/share => ['//host/share/']
-            // //host/share/ => ['//host/share/']
-            // //host/share/x => ['//host/share/', 'x']
-            // /etc => ['/', 'etc']
-            // / => ['/']
-            if (this.isUNC()) {
-                // '' / '' / 'host' / 'share'
-                const [p0, p1, p2, p3, ...prest] = this.#patternList;
-                const [g0, g1, g2, g3, ...grest] = this.#globList;
-                if (prest[0] === '') {
-                    // ends in /
-                    prest.shift();
-                    grest.shift();
-                }
-                const p = [p0, p1, p2, p3, ''].join('/');
-                const g = [g0, g1, g2, g3, ''].join('/');
-                this.#patternList = [p, ...prest];
-                this.#globList = [g, ...grest];
-                this.length = this.#patternList.length;
-            }
-            else if (this.isDrive() || this.isAbsolute()) {
-                const [p1, ...prest] = this.#patternList;
-                const [g1, ...grest] = this.#globList;
-                if (prest[0] === '') {
-                    // ends in /
-                    prest.shift();
-                    grest.shift();
-                }
-                const p = p1 + '/';
-                const g = g1 + '/';
-                this.#patternList = [p, ...prest];
-                this.#globList = [g, ...grest];
-                this.length = this.#patternList.length;
-            }
-        }
-    }
-    /**
-     * The first entry in the parsed list of patterns
-     */
-    pattern() {
-        return this.#patternList[this.#index];
-    }
-    /**
-     * true of if pattern() returns a string
-     */
-    isString() {
-        return typeof this.#patternList[this.#index] === 'string';
-    }
-    /**
-     * true of if pattern() returns GLOBSTAR
-     */
-    isGlobstar() {
-        return this.#patternList[this.#index] === GLOBSTAR;
-    }
-    /**
-     * true if pattern() returns a regexp
-     */
-    isRegExp() {
-        return this.#patternList[this.#index] instanceof RegExp;
-    }
-    /**
-     * The /-joined set of glob parts that make up this pattern
-     */
-    globString() {
-        return (this.#globString =
-            this.#globString ||
-                (this.#index === 0 ?
-                    this.isAbsolute() ?
-                        this.#globList[0] + this.#globList.slice(1).join('/')
-                        : this.#globList.join('/')
-                    : this.#globList.slice(this.#index).join('/')));
-    }
-    /**
-     * true if there are more pattern parts after this one
-     */
-    hasMore() {
-        return this.length > this.#index + 1;
-    }
-    /**
-     * The rest of the pattern after this part, or null if this is the end
-     */
-    rest() {
-        if (this.#rest !== undefined)
-            return this.#rest;
-        if (!this.hasMore())
-            return (this.#rest = null);
-        this.#rest = new Pattern(this.#patternList, this.#globList, this.#index + 1, this.#platform);
-        this.#rest.#isAbsolute = this.#isAbsolute;
-        this.#rest.#isUNC = this.#isUNC;
-        this.#rest.#isDrive = this.#isDrive;
-        return this.#rest;
-    }
-    /**
-     * true if the pattern represents a //unc/path/ on windows
-     */
-    isUNC() {
-        const pl = this.#patternList;
-        return this.#isUNC !== undefined ?
-            this.#isUNC
-            : (this.#isUNC =
-                this.#platform === 'win32' &&
-                    this.#index === 0 &&
-                    pl[0] === '' &&
-                    pl[1] === '' &&
-                    typeof pl[2] === 'string' &&
-                    !!pl[2] &&
-                    typeof pl[3] === 'string' &&
-                    !!pl[3]);
-    }
-    // pattern like C:/...
-    // split = ['C:', ...]
-    // XXX: would be nice to handle patterns like `c:*` to test the cwd
-    // in c: for *, but I don't know of a way to even figure out what that
-    // cwd is without actually chdir'ing into it?
-    /**
-     * True if the pattern starts with a drive letter on Windows
-     */
-    isDrive() {
-        const pl = this.#patternList;
-        return this.#isDrive !== undefined ?
-            this.#isDrive
-            : (this.#isDrive =
-                this.#platform === 'win32' &&
-                    this.#index === 0 &&
-                    this.length > 1 &&
-                    typeof pl[0] === 'string' &&
-                    /^[a-z]:$/i.test(pl[0]));
-    }
-    // pattern = '/' or '/...' or '/x/...'
-    // split = ['', ''] or ['', ...] or ['', 'x', ...]
-    // Drive and UNC both considered absolute on windows
-    /**
-     * True if the pattern is rooted on an absolute path
-     */
-    isAbsolute() {
-        const pl = this.#patternList;
-        return this.#isAbsolute !== undefined ?
-            this.#isAbsolute
-            : (this.#isAbsolute =
-                (pl[0] === '' && pl.length > 1) ||
-                    this.isDrive() ||
-                    this.isUNC());
-    }
-    /**
-     * consume the root of the pattern, and return it
-     */
-    root() {
-        const p = this.#patternList[0];
-        return (typeof p === 'string' && this.isAbsolute() && this.#index === 0) ?
-            p
-            : '';
-    }
-    /**
-     * Check to see if the current globstar pattern is allowed to follow
-     * a symbolic link.
-     */
-    checkFollowGlobstar() {
-        return !(this.#index === 0 ||
-            !this.isGlobstar() ||
-            !this.#followGlobstar);
-    }
-    /**
-     * Mark that the current globstar pattern is following a symbolic link
-     */
-    markFollowGlobstar() {
-        if (this.#index === 0 || !this.isGlobstar() || !this.#followGlobstar)
-            return false;
-        this.#followGlobstar = false;
-        return true;
-    }
-}
-//# sourceMappingURL=pattern.js.map
\ No newline at end of file
diff --git a/node_modules/cacache/node_modules/glob/dist/esm/processor.js b/node_modules/cacache/node_modules/glob/dist/esm/processor.js
deleted file mode 100644
index f874892ffed0c..0000000000000
--- a/node_modules/cacache/node_modules/glob/dist/esm/processor.js
+++ /dev/null
@@ -1,294 +0,0 @@
-// synchronous utility for filtering entries and calculating subwalks
-import { GLOBSTAR } from 'minimatch';
-/**
- * A cache of which patterns have been processed for a given Path
- */
-export class HasWalkedCache {
-    store;
-    constructor(store = new Map()) {
-        this.store = store;
-    }
-    copy() {
-        return new HasWalkedCache(new Map(this.store));
-    }
-    hasWalked(target, pattern) {
-        return this.store.get(target.fullpath())?.has(pattern.globString());
-    }
-    storeWalked(target, pattern) {
-        const fullpath = target.fullpath();
-        const cached = this.store.get(fullpath);
-        if (cached)
-            cached.add(pattern.globString());
-        else
-            this.store.set(fullpath, new Set([pattern.globString()]));
-    }
-}
-/**
- * A record of which paths have been matched in a given walk step,
- * and whether they only are considered a match if they are a directory,
- * and whether their absolute or relative path should be returned.
- */
-export class MatchRecord {
-    store = new Map();
-    add(target, absolute, ifDir) {
-        const n = (absolute ? 2 : 0) | (ifDir ? 1 : 0);
-        const current = this.store.get(target);
-        this.store.set(target, current === undefined ? n : n & current);
-    }
-    // match, absolute, ifdir
-    entries() {
-        return [...this.store.entries()].map(([path, n]) => [
-            path,
-            !!(n & 2),
-            !!(n & 1),
-        ]);
-    }
-}
-/**
- * A collection of patterns that must be processed in a subsequent step
- * for a given path.
- */
-export class SubWalks {
-    store = new Map();
-    add(target, pattern) {
-        if (!target.canReaddir()) {
-            return;
-        }
-        const subs = this.store.get(target);
-        if (subs) {
-            if (!subs.find(p => p.globString() === pattern.globString())) {
-                subs.push(pattern);
-            }
-        }
-        else
-            this.store.set(target, [pattern]);
-    }
-    get(target) {
-        const subs = this.store.get(target);
-        /* c8 ignore start */
-        if (!subs) {
-            throw new Error('attempting to walk unknown path');
-        }
-        /* c8 ignore stop */
-        return subs;
-    }
-    entries() {
-        return this.keys().map(k => [k, this.store.get(k)]);
-    }
-    keys() {
-        return [...this.store.keys()].filter(t => t.canReaddir());
-    }
-}
-/**
- * The class that processes patterns for a given path.
- *
- * Handles child entry filtering, and determining whether a path's
- * directory contents must be read.
- */
-export class Processor {
-    hasWalkedCache;
-    matches = new MatchRecord();
-    subwalks = new SubWalks();
-    patterns;
-    follow;
-    dot;
-    opts;
-    constructor(opts, hasWalkedCache) {
-        this.opts = opts;
-        this.follow = !!opts.follow;
-        this.dot = !!opts.dot;
-        this.hasWalkedCache =
-            hasWalkedCache ? hasWalkedCache.copy() : new HasWalkedCache();
-    }
-    processPatterns(target, patterns) {
-        this.patterns = patterns;
-        const processingSet = patterns.map(p => [target, p]);
-        // map of paths to the magic-starting subwalks they need to walk
-        // first item in patterns is the filter
-        for (let [t, pattern] of processingSet) {
-            this.hasWalkedCache.storeWalked(t, pattern);
-            const root = pattern.root();
-            const absolute = pattern.isAbsolute() && this.opts.absolute !== false;
-            // start absolute patterns at root
-            if (root) {
-                t = t.resolve(root === '/' && this.opts.root !== undefined ?
-                    this.opts.root
-                    : root);
-                const rest = pattern.rest();
-                if (!rest) {
-                    this.matches.add(t, true, false);
-                    continue;
-                }
-                else {
-                    pattern = rest;
-                }
-            }
-            if (t.isENOENT())
-                continue;
-            let p;
-            let rest;
-            let changed = false;
-            while (typeof (p = pattern.pattern()) === 'string' &&
-                (rest = pattern.rest())) {
-                const c = t.resolve(p);
-                t = c;
-                pattern = rest;
-                changed = true;
-            }
-            p = pattern.pattern();
-            rest = pattern.rest();
-            if (changed) {
-                if (this.hasWalkedCache.hasWalked(t, pattern))
-                    continue;
-                this.hasWalkedCache.storeWalked(t, pattern);
-            }
-            // now we have either a final string for a known entry,
-            // more strings for an unknown entry,
-            // or a pattern starting with magic, mounted on t.
-            if (typeof p === 'string') {
-                // must not be final entry, otherwise we would have
-                // concatenated it earlier.
-                const ifDir = p === '..' || p === '' || p === '.';
-                this.matches.add(t.resolve(p), absolute, ifDir);
-                continue;
-            }
-            else if (p === GLOBSTAR) {
-                // if no rest, match and subwalk pattern
-                // if rest, process rest and subwalk pattern
-                // if it's a symlink, but we didn't get here by way of a
-                // globstar match (meaning it's the first time THIS globstar
-                // has traversed a symlink), then we follow it. Otherwise, stop.
-                if (!t.isSymbolicLink() ||
-                    this.follow ||
-                    pattern.checkFollowGlobstar()) {
-                    this.subwalks.add(t, pattern);
-                }
-                const rp = rest?.pattern();
-                const rrest = rest?.rest();
-                if (!rest || ((rp === '' || rp === '.') && !rrest)) {
-                    // only HAS to be a dir if it ends in **/ or **/.
-                    // but ending in ** will match files as well.
-                    this.matches.add(t, absolute, rp === '' || rp === '.');
-                }
-                else {
-                    if (rp === '..') {
-                        // this would mean you're matching **/.. at the fs root,
-                        // and no thanks, I'm not gonna test that specific case.
-                        /* c8 ignore start */
-                        const tp = t.parent || t;
-                        /* c8 ignore stop */
-                        if (!rrest)
-                            this.matches.add(tp, absolute, true);
-                        else if (!this.hasWalkedCache.hasWalked(tp, rrest)) {
-                            this.subwalks.add(tp, rrest);
-                        }
-                    }
-                }
-            }
-            else if (p instanceof RegExp) {
-                this.subwalks.add(t, pattern);
-            }
-        }
-        return this;
-    }
-    subwalkTargets() {
-        return this.subwalks.keys();
-    }
-    child() {
-        return new Processor(this.opts, this.hasWalkedCache);
-    }
-    // return a new Processor containing the subwalks for each
-    // child entry, and a set of matches, and
-    // a hasWalkedCache that's a copy of this one
-    // then we're going to call
-    filterEntries(parent, entries) {
-        const patterns = this.subwalks.get(parent);
-        // put matches and entry walks into the results processor
-        const results = this.child();
-        for (const e of entries) {
-            for (const pattern of patterns) {
-                const absolute = pattern.isAbsolute();
-                const p = pattern.pattern();
-                const rest = pattern.rest();
-                if (p === GLOBSTAR) {
-                    results.testGlobstar(e, pattern, rest, absolute);
-                }
-                else if (p instanceof RegExp) {
-                    results.testRegExp(e, p, rest, absolute);
-                }
-                else {
-                    results.testString(e, p, rest, absolute);
-                }
-            }
-        }
-        return results;
-    }
-    testGlobstar(e, pattern, rest, absolute) {
-        if (this.dot || !e.name.startsWith('.')) {
-            if (!pattern.hasMore()) {
-                this.matches.add(e, absolute, false);
-            }
-            if (e.canReaddir()) {
-                // if we're in follow mode or it's not a symlink, just keep
-                // testing the same pattern. If there's more after the globstar,
-                // then this symlink consumes the globstar. If not, then we can
-                // follow at most ONE symlink along the way, so we mark it, which
-                // also checks to ensure that it wasn't already marked.
-                if (this.follow || !e.isSymbolicLink()) {
-                    this.subwalks.add(e, pattern);
-                }
-                else if (e.isSymbolicLink()) {
-                    if (rest && pattern.checkFollowGlobstar()) {
-                        this.subwalks.add(e, rest);
-                    }
-                    else if (pattern.markFollowGlobstar()) {
-                        this.subwalks.add(e, pattern);
-                    }
-                }
-            }
-        }
-        // if the NEXT thing matches this entry, then also add
-        // the rest.
-        if (rest) {
-            const rp = rest.pattern();
-            if (typeof rp === 'string' &&
-                // dots and empty were handled already
-                rp !== '..' &&
-                rp !== '' &&
-                rp !== '.') {
-                this.testString(e, rp, rest.rest(), absolute);
-            }
-            else if (rp === '..') {
-                /* c8 ignore start */
-                const ep = e.parent || e;
-                /* c8 ignore stop */
-                this.subwalks.add(ep, rest);
-            }
-            else if (rp instanceof RegExp) {
-                this.testRegExp(e, rp, rest.rest(), absolute);
-            }
-        }
-    }
-    testRegExp(e, p, rest, absolute) {
-        if (!p.test(e.name))
-            return;
-        if (!rest) {
-            this.matches.add(e, absolute, false);
-        }
-        else {
-            this.subwalks.add(e, rest);
-        }
-    }
-    testString(e, p, rest, absolute) {
-        // should never happen?
-        if (!e.isNamed(p))
-            return;
-        if (!rest) {
-            this.matches.add(e, absolute, false);
-        }
-        else {
-            this.subwalks.add(e, rest);
-        }
-    }
-}
-//# sourceMappingURL=processor.js.map
\ No newline at end of file
diff --git a/node_modules/cacache/node_modules/glob/dist/esm/walker.js b/node_modules/cacache/node_modules/glob/dist/esm/walker.js
deleted file mode 100644
index 3d68196c4f175..0000000000000
--- a/node_modules/cacache/node_modules/glob/dist/esm/walker.js
+++ /dev/null
@@ -1,381 +0,0 @@
-/**
- * Single-use utility classes to provide functionality to the {@link Glob}
- * methods.
- *
- * @module
- */
-import { Minipass } from 'minipass';
-import { Ignore } from './ignore.js';
-import { Processor } from './processor.js';
-const makeIgnore = (ignore, opts) => typeof ignore === 'string' ? new Ignore([ignore], opts)
-    : Array.isArray(ignore) ? new Ignore(ignore, opts)
-        : ignore;
-/**
- * basic walking utilities that all the glob walker types use
- */
-export class GlobUtil {
-    path;
-    patterns;
-    opts;
-    seen = new Set();
-    paused = false;
-    aborted = false;
-    #onResume = [];
-    #ignore;
-    #sep;
-    signal;
-    maxDepth;
-    includeChildMatches;
-    constructor(patterns, path, opts) {
-        this.patterns = patterns;
-        this.path = path;
-        this.opts = opts;
-        this.#sep = !opts.posix && opts.platform === 'win32' ? '\\' : '/';
-        this.includeChildMatches = opts.includeChildMatches !== false;
-        if (opts.ignore || !this.includeChildMatches) {
-            this.#ignore = makeIgnore(opts.ignore ?? [], opts);
-            if (!this.includeChildMatches &&
-                typeof this.#ignore.add !== 'function') {
-                const m = 'cannot ignore child matches, ignore lacks add() method.';
-                throw new Error(m);
-            }
-        }
-        // ignore, always set with maxDepth, but it's optional on the
-        // GlobOptions type
-        /* c8 ignore start */
-        this.maxDepth = opts.maxDepth || Infinity;
-        /* c8 ignore stop */
-        if (opts.signal) {
-            this.signal = opts.signal;
-            this.signal.addEventListener('abort', () => {
-                this.#onResume.length = 0;
-            });
-        }
-    }
-    #ignored(path) {
-        return this.seen.has(path) || !!this.#ignore?.ignored?.(path);
-    }
-    #childrenIgnored(path) {
-        return !!this.#ignore?.childrenIgnored?.(path);
-    }
-    // backpressure mechanism
-    pause() {
-        this.paused = true;
-    }
-    resume() {
-        /* c8 ignore start */
-        if (this.signal?.aborted)
-            return;
-        /* c8 ignore stop */
-        this.paused = false;
-        let fn = undefined;
-        while (!this.paused && (fn = this.#onResume.shift())) {
-            fn();
-        }
-    }
-    onResume(fn) {
-        if (this.signal?.aborted)
-            return;
-        /* c8 ignore start */
-        if (!this.paused) {
-            fn();
-        }
-        else {
-            /* c8 ignore stop */
-            this.#onResume.push(fn);
-        }
-    }
-    // do the requisite realpath/stat checking, and return the path
-    // to add or undefined to filter it out.
-    async matchCheck(e, ifDir) {
-        if (ifDir && this.opts.nodir)
-            return undefined;
-        let rpc;
-        if (this.opts.realpath) {
-            rpc = e.realpathCached() || (await e.realpath());
-            if (!rpc)
-                return undefined;
-            e = rpc;
-        }
-        const needStat = e.isUnknown() || this.opts.stat;
-        const s = needStat ? await e.lstat() : e;
-        if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) {
-            const target = await s.realpath();
-            /* c8 ignore start */
-            if (target && (target.isUnknown() || this.opts.stat)) {
-                await target.lstat();
-            }
-            /* c8 ignore stop */
-        }
-        return this.matchCheckTest(s, ifDir);
-    }
-    matchCheckTest(e, ifDir) {
-        return (e &&
-            (this.maxDepth === Infinity || e.depth() <= this.maxDepth) &&
-            (!ifDir || e.canReaddir()) &&
-            (!this.opts.nodir || !e.isDirectory()) &&
-            (!this.opts.nodir ||
-                !this.opts.follow ||
-                !e.isSymbolicLink() ||
-                !e.realpathCached()?.isDirectory()) &&
-            !this.#ignored(e)) ?
-            e
-            : undefined;
-    }
-    matchCheckSync(e, ifDir) {
-        if (ifDir && this.opts.nodir)
-            return undefined;
-        let rpc;
-        if (this.opts.realpath) {
-            rpc = e.realpathCached() || e.realpathSync();
-            if (!rpc)
-                return undefined;
-            e = rpc;
-        }
-        const needStat = e.isUnknown() || this.opts.stat;
-        const s = needStat ? e.lstatSync() : e;
-        if (this.opts.follow && this.opts.nodir && s?.isSymbolicLink()) {
-            const target = s.realpathSync();
-            if (target && (target?.isUnknown() || this.opts.stat)) {
-                target.lstatSync();
-            }
-        }
-        return this.matchCheckTest(s, ifDir);
-    }
-    matchFinish(e, absolute) {
-        if (this.#ignored(e))
-            return;
-        // we know we have an ignore if this is false, but TS doesn't
-        if (!this.includeChildMatches && this.#ignore?.add) {
-            const ign = `${e.relativePosix()}/**`;
-            this.#ignore.add(ign);
-        }
-        const abs = this.opts.absolute === undefined ? absolute : this.opts.absolute;
-        this.seen.add(e);
-        const mark = this.opts.mark && e.isDirectory() ? this.#sep : '';
-        // ok, we have what we need!
-        if (this.opts.withFileTypes) {
-            this.matchEmit(e);
-        }
-        else if (abs) {
-            const abs = this.opts.posix ? e.fullpathPosix() : e.fullpath();
-            this.matchEmit(abs + mark);
-        }
-        else {
-            const rel = this.opts.posix ? e.relativePosix() : e.relative();
-            const pre = this.opts.dotRelative && !rel.startsWith('..' + this.#sep) ?
-                '.' + this.#sep
-                : '';
-            this.matchEmit(!rel ? '.' + mark : pre + rel + mark);
-        }
-    }
-    async match(e, absolute, ifDir) {
-        const p = await this.matchCheck(e, ifDir);
-        if (p)
-            this.matchFinish(p, absolute);
-    }
-    matchSync(e, absolute, ifDir) {
-        const p = this.matchCheckSync(e, ifDir);
-        if (p)
-            this.matchFinish(p, absolute);
-    }
-    walkCB(target, patterns, cb) {
-        /* c8 ignore start */
-        if (this.signal?.aborted)
-            cb();
-        /* c8 ignore stop */
-        this.walkCB2(target, patterns, new Processor(this.opts), cb);
-    }
-    walkCB2(target, patterns, processor, cb) {
-        if (this.#childrenIgnored(target))
-            return cb();
-        if (this.signal?.aborted)
-            cb();
-        if (this.paused) {
-            this.onResume(() => this.walkCB2(target, patterns, processor, cb));
-            return;
-        }
-        processor.processPatterns(target, patterns);
-        // done processing.  all of the above is sync, can be abstracted out.
-        // subwalks is a map of paths to the entry filters they need
-        // matches is a map of paths to [absolute, ifDir] tuples.
-        let tasks = 1;
-        const next = () => {
-            if (--tasks === 0)
-                cb();
-        };
-        for (const [m, absolute, ifDir] of processor.matches.entries()) {
-            if (this.#ignored(m))
-                continue;
-            tasks++;
-            this.match(m, absolute, ifDir).then(() => next());
-        }
-        for (const t of processor.subwalkTargets()) {
-            if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) {
-                continue;
-            }
-            tasks++;
-            const childrenCached = t.readdirCached();
-            if (t.calledReaddir())
-                this.walkCB3(t, childrenCached, processor, next);
-            else {
-                t.readdirCB((_, entries) => this.walkCB3(t, entries, processor, next), true);
-            }
-        }
-        next();
-    }
-    walkCB3(target, entries, processor, cb) {
-        processor = processor.filterEntries(target, entries);
-        let tasks = 1;
-        const next = () => {
-            if (--tasks === 0)
-                cb();
-        };
-        for (const [m, absolute, ifDir] of processor.matches.entries()) {
-            if (this.#ignored(m))
-                continue;
-            tasks++;
-            this.match(m, absolute, ifDir).then(() => next());
-        }
-        for (const [target, patterns] of processor.subwalks.entries()) {
-            tasks++;
-            this.walkCB2(target, patterns, processor.child(), next);
-        }
-        next();
-    }
-    walkCBSync(target, patterns, cb) {
-        /* c8 ignore start */
-        if (this.signal?.aborted)
-            cb();
-        /* c8 ignore stop */
-        this.walkCB2Sync(target, patterns, new Processor(this.opts), cb);
-    }
-    walkCB2Sync(target, patterns, processor, cb) {
-        if (this.#childrenIgnored(target))
-            return cb();
-        if (this.signal?.aborted)
-            cb();
-        if (this.paused) {
-            this.onResume(() => this.walkCB2Sync(target, patterns, processor, cb));
-            return;
-        }
-        processor.processPatterns(target, patterns);
-        // done processing.  all of the above is sync, can be abstracted out.
-        // subwalks is a map of paths to the entry filters they need
-        // matches is a map of paths to [absolute, ifDir] tuples.
-        let tasks = 1;
-        const next = () => {
-            if (--tasks === 0)
-                cb();
-        };
-        for (const [m, absolute, ifDir] of processor.matches.entries()) {
-            if (this.#ignored(m))
-                continue;
-            this.matchSync(m, absolute, ifDir);
-        }
-        for (const t of processor.subwalkTargets()) {
-            if (this.maxDepth !== Infinity && t.depth() >= this.maxDepth) {
-                continue;
-            }
-            tasks++;
-            const children = t.readdirSync();
-            this.walkCB3Sync(t, children, processor, next);
-        }
-        next();
-    }
-    walkCB3Sync(target, entries, processor, cb) {
-        processor = processor.filterEntries(target, entries);
-        let tasks = 1;
-        const next = () => {
-            if (--tasks === 0)
-                cb();
-        };
-        for (const [m, absolute, ifDir] of processor.matches.entries()) {
-            if (this.#ignored(m))
-                continue;
-            this.matchSync(m, absolute, ifDir);
-        }
-        for (const [target, patterns] of processor.subwalks.entries()) {
-            tasks++;
-            this.walkCB2Sync(target, patterns, processor.child(), next);
-        }
-        next();
-    }
-}
-export class GlobWalker extends GlobUtil {
-    matches = new Set();
-    constructor(patterns, path, opts) {
-        super(patterns, path, opts);
-    }
-    matchEmit(e) {
-        this.matches.add(e);
-    }
-    async walk() {
-        if (this.signal?.aborted)
-            throw this.signal.reason;
-        if (this.path.isUnknown()) {
-            await this.path.lstat();
-        }
-        await new Promise((res, rej) => {
-            this.walkCB(this.path, this.patterns, () => {
-                if (this.signal?.aborted) {
-                    rej(this.signal.reason);
-                }
-                else {
-                    res(this.matches);
-                }
-            });
-        });
-        return this.matches;
-    }
-    walkSync() {
-        if (this.signal?.aborted)
-            throw this.signal.reason;
-        if (this.path.isUnknown()) {
-            this.path.lstatSync();
-        }
-        // nothing for the callback to do, because this never pauses
-        this.walkCBSync(this.path, this.patterns, () => {
-            if (this.signal?.aborted)
-                throw this.signal.reason;
-        });
-        return this.matches;
-    }
-}
-export class GlobStream extends GlobUtil {
-    results;
-    constructor(patterns, path, opts) {
-        super(patterns, path, opts);
-        this.results = new Minipass({
-            signal: this.signal,
-            objectMode: true,
-        });
-        this.results.on('drain', () => this.resume());
-        this.results.on('resume', () => this.resume());
-    }
-    matchEmit(e) {
-        this.results.write(e);
-        if (!this.results.flowing)
-            this.pause();
-    }
-    stream() {
-        const target = this.path;
-        if (target.isUnknown()) {
-            target.lstat().then(() => {
-                this.walkCB(target, this.patterns, () => this.results.end());
-            });
-        }
-        else {
-            this.walkCB(target, this.patterns, () => this.results.end());
-        }
-        return this.results;
-    }
-    streamSync() {
-        if (this.path.isUnknown()) {
-            this.path.lstatSync();
-        }
-        this.walkCBSync(this.path, this.patterns, () => this.results.end());
-        return this.results;
-    }
-}
-//# sourceMappingURL=walker.js.map
\ No newline at end of file
diff --git a/node_modules/cacache/node_modules/jackspeak/LICENSE.md b/node_modules/cacache/node_modules/jackspeak/LICENSE.md
deleted file mode 100644
index 8cb5cc6e616c0..0000000000000
--- a/node_modules/cacache/node_modules/jackspeak/LICENSE.md
+++ /dev/null
@@ -1,55 +0,0 @@
-# Blue Oak Model License
-
-Version 1.0.0
-
-## Purpose
-
-This license gives everyone as much permission to work with
-this software as possible, while protecting contributors
-from liability.
-
-## Acceptance
-
-In order to receive this license, you must agree to its
-rules. The rules of this license are both obligations
-under that agreement and conditions to your license.
-You must not do anything with this software that triggers
-a rule that you cannot or will not follow.
-
-## Copyright
-
-Each contributor licenses you to do everything with this
-software that would otherwise infringe that contributor's
-copyright in it.
-
-## Notices
-
-You must ensure that everyone who gets a copy of
-any part of this software from you, with or without
-changes, also gets the text of this license or a link to
-.
-
-## Excuse
-
-If anyone notifies you in writing that you have not
-complied with [Notices](#notices), you can keep your
-license by taking all practical steps to comply within 30
-days after the notice. If you do not do so, your license
-ends immediately.
-
-## Patent
-
-Each contributor licenses you to do everything with this
-software that would otherwise infringe any patent claims
-they can license or become able to license.
-
-## Reliability
-
-No contributor can revoke this license.
-
-## No Liability
-
-**_As far as the law allows, this software comes as is,
-without any warranty or condition, and no contributor
-will be liable to anyone for any damages related to this
-software or this license, under any kind of legal claim._**
diff --git a/node_modules/cacache/node_modules/jackspeak/dist/commonjs/package.json b/node_modules/cacache/node_modules/jackspeak/dist/commonjs/package.json
deleted file mode 100644
index 5bbefffbabee3..0000000000000
--- a/node_modules/cacache/node_modules/jackspeak/dist/commonjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "commonjs"
-}
diff --git a/node_modules/cacache/node_modules/jackspeak/dist/esm/package.json b/node_modules/cacache/node_modules/jackspeak/dist/esm/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/cacache/node_modules/jackspeak/dist/esm/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/cacache/node_modules/jackspeak/package.json b/node_modules/cacache/node_modules/jackspeak/package.json
deleted file mode 100644
index aa85d230f6d24..0000000000000
--- a/node_modules/cacache/node_modules/jackspeak/package.json
+++ /dev/null
@@ -1,94 +0,0 @@
-{
-  "name": "jackspeak",
-  "version": "4.1.1",
-  "description": "A very strict and proper argument parser.",
-  "tshy": {
-    "main": true,
-    "exports": {
-      "./package.json": "./package.json",
-      ".": "./src/index.js"
-    }
-  },
-  "main": "./dist/commonjs/index.js",
-  "types": "./dist/commonjs/index.d.ts",
-  "type": "module",
-  "exports": {
-    "./package.json": "./package.json",
-    ".": {
-      "import": {
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.js"
-      },
-      "require": {
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.js"
-      }
-    }
-  },
-  "files": [
-    "dist"
-  ],
-  "scripts": {
-    "build-examples": "for i in examples/*.js ; do node $i -h > ${i/.js/.txt}; done",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "prepare": "tshy",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "test": "tap",
-    "snap": "tap",
-    "format": "prettier --write . --log-level warn",
-    "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
-  },
-  "license": "BlueOak-1.0.0",
-  "prettier": {
-    "experimentalTernaries": true,
-    "semi": false,
-    "printWidth": 75,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "devDependencies": {
-    "@types/node": "^22.6.0",
-    "prettier": "^3.3.3",
-    "tap": "^21.0.1",
-    "tshy": "^3.0.2",
-    "typedoc": "^0.26.7"
-  },
-  "dependencies": {
-    "@isaacs/cliui": "^8.0.2"
-  },
-  "engines": {
-    "node": "20 || >=22"
-  },
-  "funding": {
-    "url": "https://github.com/sponsors/isaacs"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/isaacs/jackspeak.git"
-  },
-  "keywords": [
-    "argument",
-    "parser",
-    "args",
-    "option",
-    "flag",
-    "cli",
-    "command",
-    "line",
-    "parse",
-    "parsing"
-  ],
-  "author": "Isaac Z. Schlueter ",
-  "tap": {
-    "typecheck": true
-  },
-  "module": "./dist/esm/index.js"
-}
diff --git a/node_modules/cacache/node_modules/minimatch/LICENSE b/node_modules/cacache/node_modules/minimatch/LICENSE
deleted file mode 100644
index 1493534e60dce..0000000000000
--- a/node_modules/cacache/node_modules/minimatch/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) 2011-2023 Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/cacache/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js b/node_modules/cacache/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
deleted file mode 100644
index 5fc86bbd0116c..0000000000000
--- a/node_modules/cacache/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
+++ /dev/null
@@ -1,14 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.assertValidPattern = void 0;
-const MAX_PATTERN_LENGTH = 1024 * 64;
-const assertValidPattern = (pattern) => {
-    if (typeof pattern !== 'string') {
-        throw new TypeError('invalid pattern');
-    }
-    if (pattern.length > MAX_PATTERN_LENGTH) {
-        throw new TypeError('pattern is too long');
-    }
-};
-exports.assertValidPattern = assertValidPattern;
-//# sourceMappingURL=assert-valid-pattern.js.map
\ No newline at end of file
diff --git a/node_modules/cacache/node_modules/minimatch/dist/commonjs/ast.js b/node_modules/cacache/node_modules/minimatch/dist/commonjs/ast.js
deleted file mode 100644
index 7b2109625eaeb..0000000000000
--- a/node_modules/cacache/node_modules/minimatch/dist/commonjs/ast.js
+++ /dev/null
@@ -1,592 +0,0 @@
-"use strict";
-// parse a single path portion
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.AST = void 0;
-const brace_expressions_js_1 = require("./brace-expressions.js");
-const unescape_js_1 = require("./unescape.js");
-const types = new Set(['!', '?', '+', '*', '@']);
-const isExtglobType = (c) => types.has(c);
-// Patterns that get prepended to bind to the start of either the
-// entire string, or just a single path portion, to prevent dots
-// and/or traversal patterns, when needed.
-// Exts don't need the ^ or / bit, because the root binds that already.
-const startNoTraversal = '(?!(?:^|/)\\.\\.?(?:$|/))';
-const startNoDot = '(?!\\.)';
-// characters that indicate a start of pattern needs the "no dots" bit,
-// because a dot *might* be matched. ( is not in the list, because in
-// the case of a child extglob, it will handle the prevention itself.
-const addPatternStart = new Set(['[', '.']);
-// cases where traversal is A-OK, no dot prevention needed
-const justDots = new Set(['..', '.']);
-const reSpecials = new Set('().*{}+?[]^$\\!');
-const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
-// any single thing other than /
-const qmark = '[^/]';
-// * => any number of characters
-const star = qmark + '*?';
-// use + when we need to ensure that *something* matches, because the * is
-// the only thing in the path portion.
-const starNoEmpty = qmark + '+?';
-// remove the \ chars that we added if we end up doing a nonmagic compare
-// const deslash = (s: string) => s.replace(/\\(.)/g, '$1')
-class AST {
-    type;
-    #root;
-    #hasMagic;
-    #uflag = false;
-    #parts = [];
-    #parent;
-    #parentIndex;
-    #negs;
-    #filledNegs = false;
-    #options;
-    #toString;
-    // set to true if it's an extglob with no children
-    // (which really means one child of '')
-    #emptyExt = false;
-    constructor(type, parent, options = {}) {
-        this.type = type;
-        // extglobs are inherently magical
-        if (type)
-            this.#hasMagic = true;
-        this.#parent = parent;
-        this.#root = this.#parent ? this.#parent.#root : this;
-        this.#options = this.#root === this ? options : this.#root.#options;
-        this.#negs = this.#root === this ? [] : this.#root.#negs;
-        if (type === '!' && !this.#root.#filledNegs)
-            this.#negs.push(this);
-        this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0;
-    }
-    get hasMagic() {
-        /* c8 ignore start */
-        if (this.#hasMagic !== undefined)
-            return this.#hasMagic;
-        /* c8 ignore stop */
-        for (const p of this.#parts) {
-            if (typeof p === 'string')
-                continue;
-            if (p.type || p.hasMagic)
-                return (this.#hasMagic = true);
-        }
-        // note: will be undefined until we generate the regexp src and find out
-        return this.#hasMagic;
-    }
-    // reconstructs the pattern
-    toString() {
-        if (this.#toString !== undefined)
-            return this.#toString;
-        if (!this.type) {
-            return (this.#toString = this.#parts.map(p => String(p)).join(''));
-        }
-        else {
-            return (this.#toString =
-                this.type + '(' + this.#parts.map(p => String(p)).join('|') + ')');
-        }
-    }
-    #fillNegs() {
-        /* c8 ignore start */
-        if (this !== this.#root)
-            throw new Error('should only call on root');
-        if (this.#filledNegs)
-            return this;
-        /* c8 ignore stop */
-        // call toString() once to fill this out
-        this.toString();
-        this.#filledNegs = true;
-        let n;
-        while ((n = this.#negs.pop())) {
-            if (n.type !== '!')
-                continue;
-            // walk up the tree, appending everthing that comes AFTER parentIndex
-            let p = n;
-            let pp = p.#parent;
-            while (pp) {
-                for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) {
-                    for (const part of n.#parts) {
-                        /* c8 ignore start */
-                        if (typeof part === 'string') {
-                            throw new Error('string part in extglob AST??');
-                        }
-                        /* c8 ignore stop */
-                        part.copyIn(pp.#parts[i]);
-                    }
-                }
-                p = pp;
-                pp = p.#parent;
-            }
-        }
-        return this;
-    }
-    push(...parts) {
-        for (const p of parts) {
-            if (p === '')
-                continue;
-            /* c8 ignore start */
-            if (typeof p !== 'string' && !(p instanceof AST && p.#parent === this)) {
-                throw new Error('invalid part: ' + p);
-            }
-            /* c8 ignore stop */
-            this.#parts.push(p);
-        }
-    }
-    toJSON() {
-        const ret = this.type === null
-            ? this.#parts.slice().map(p => (typeof p === 'string' ? p : p.toJSON()))
-            : [this.type, ...this.#parts.map(p => p.toJSON())];
-        if (this.isStart() && !this.type)
-            ret.unshift([]);
-        if (this.isEnd() &&
-            (this === this.#root ||
-                (this.#root.#filledNegs && this.#parent?.type === '!'))) {
-            ret.push({});
-        }
-        return ret;
-    }
-    isStart() {
-        if (this.#root === this)
-            return true;
-        // if (this.type) return !!this.#parent?.isStart()
-        if (!this.#parent?.isStart())
-            return false;
-        if (this.#parentIndex === 0)
-            return true;
-        // if everything AHEAD of this is a negation, then it's still the "start"
-        const p = this.#parent;
-        for (let i = 0; i < this.#parentIndex; i++) {
-            const pp = p.#parts[i];
-            if (!(pp instanceof AST && pp.type === '!')) {
-                return false;
-            }
-        }
-        return true;
-    }
-    isEnd() {
-        if (this.#root === this)
-            return true;
-        if (this.#parent?.type === '!')
-            return true;
-        if (!this.#parent?.isEnd())
-            return false;
-        if (!this.type)
-            return this.#parent?.isEnd();
-        // if not root, it'll always have a parent
-        /* c8 ignore start */
-        const pl = this.#parent ? this.#parent.#parts.length : 0;
-        /* c8 ignore stop */
-        return this.#parentIndex === pl - 1;
-    }
-    copyIn(part) {
-        if (typeof part === 'string')
-            this.push(part);
-        else
-            this.push(part.clone(this));
-    }
-    clone(parent) {
-        const c = new AST(this.type, parent);
-        for (const p of this.#parts) {
-            c.copyIn(p);
-        }
-        return c;
-    }
-    static #parseAST(str, ast, pos, opt) {
-        let escaping = false;
-        let inBrace = false;
-        let braceStart = -1;
-        let braceNeg = false;
-        if (ast.type === null) {
-            // outside of a extglob, append until we find a start
-            let i = pos;
-            let acc = '';
-            while (i < str.length) {
-                const c = str.charAt(i++);
-                // still accumulate escapes at this point, but we do ignore
-                // starts that are escaped
-                if (escaping || c === '\\') {
-                    escaping = !escaping;
-                    acc += c;
-                    continue;
-                }
-                if (inBrace) {
-                    if (i === braceStart + 1) {
-                        if (c === '^' || c === '!') {
-                            braceNeg = true;
-                        }
-                    }
-                    else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
-                        inBrace = false;
-                    }
-                    acc += c;
-                    continue;
-                }
-                else if (c === '[') {
-                    inBrace = true;
-                    braceStart = i;
-                    braceNeg = false;
-                    acc += c;
-                    continue;
-                }
-                if (!opt.noext && isExtglobType(c) && str.charAt(i) === '(') {
-                    ast.push(acc);
-                    acc = '';
-                    const ext = new AST(c, ast);
-                    i = AST.#parseAST(str, ext, i, opt);
-                    ast.push(ext);
-                    continue;
-                }
-                acc += c;
-            }
-            ast.push(acc);
-            return i;
-        }
-        // some kind of extglob, pos is at the (
-        // find the next | or )
-        let i = pos + 1;
-        let part = new AST(null, ast);
-        const parts = [];
-        let acc = '';
-        while (i < str.length) {
-            const c = str.charAt(i++);
-            // still accumulate escapes at this point, but we do ignore
-            // starts that are escaped
-            if (escaping || c === '\\') {
-                escaping = !escaping;
-                acc += c;
-                continue;
-            }
-            if (inBrace) {
-                if (i === braceStart + 1) {
-                    if (c === '^' || c === '!') {
-                        braceNeg = true;
-                    }
-                }
-                else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
-                    inBrace = false;
-                }
-                acc += c;
-                continue;
-            }
-            else if (c === '[') {
-                inBrace = true;
-                braceStart = i;
-                braceNeg = false;
-                acc += c;
-                continue;
-            }
-            if (isExtglobType(c) && str.charAt(i) === '(') {
-                part.push(acc);
-                acc = '';
-                const ext = new AST(c, part);
-                part.push(ext);
-                i = AST.#parseAST(str, ext, i, opt);
-                continue;
-            }
-            if (c === '|') {
-                part.push(acc);
-                acc = '';
-                parts.push(part);
-                part = new AST(null, ast);
-                continue;
-            }
-            if (c === ')') {
-                if (acc === '' && ast.#parts.length === 0) {
-                    ast.#emptyExt = true;
-                }
-                part.push(acc);
-                acc = '';
-                ast.push(...parts, part);
-                return i;
-            }
-            acc += c;
-        }
-        // unfinished extglob
-        // if we got here, it was a malformed extglob! not an extglob, but
-        // maybe something else in there.
-        ast.type = null;
-        ast.#hasMagic = undefined;
-        ast.#parts = [str.substring(pos - 1)];
-        return i;
-    }
-    static fromGlob(pattern, options = {}) {
-        const ast = new AST(null, undefined, options);
-        AST.#parseAST(pattern, ast, 0, options);
-        return ast;
-    }
-    // returns the regular expression if there's magic, or the unescaped
-    // string if not.
-    toMMPattern() {
-        // should only be called on root
-        /* c8 ignore start */
-        if (this !== this.#root)
-            return this.#root.toMMPattern();
-        /* c8 ignore stop */
-        const glob = this.toString();
-        const [re, body, hasMagic, uflag] = this.toRegExpSource();
-        // if we're in nocase mode, and not nocaseMagicOnly, then we do
-        // still need a regular expression if we have to case-insensitively
-        // match capital/lowercase characters.
-        const anyMagic = hasMagic ||
-            this.#hasMagic ||
-            (this.#options.nocase &&
-                !this.#options.nocaseMagicOnly &&
-                glob.toUpperCase() !== glob.toLowerCase());
-        if (!anyMagic) {
-            return body;
-        }
-        const flags = (this.#options.nocase ? 'i' : '') + (uflag ? 'u' : '');
-        return Object.assign(new RegExp(`^${re}$`, flags), {
-            _src: re,
-            _glob: glob,
-        });
-    }
-    get options() {
-        return this.#options;
-    }
-    // returns the string match, the regexp source, whether there's magic
-    // in the regexp (so a regular expression is required) and whether or
-    // not the uflag is needed for the regular expression (for posix classes)
-    // TODO: instead of injecting the start/end at this point, just return
-    // the BODY of the regexp, along with the start/end portions suitable
-    // for binding the start/end in either a joined full-path makeRe context
-    // (where we bind to (^|/), or a standalone matchPart context (where
-    // we bind to ^, and not /).  Otherwise slashes get duped!
-    //
-    // In part-matching mode, the start is:
-    // - if not isStart: nothing
-    // - if traversal possible, but not allowed: ^(?!\.\.?$)
-    // - if dots allowed or not possible: ^
-    // - if dots possible and not allowed: ^(?!\.)
-    // end is:
-    // - if not isEnd(): nothing
-    // - else: $
-    //
-    // In full-path matching mode, we put the slash at the START of the
-    // pattern, so start is:
-    // - if first pattern: same as part-matching mode
-    // - if not isStart(): nothing
-    // - if traversal possible, but not allowed: /(?!\.\.?(?:$|/))
-    // - if dots allowed or not possible: /
-    // - if dots possible and not allowed: /(?!\.)
-    // end is:
-    // - if last pattern, same as part-matching mode
-    // - else nothing
-    //
-    // Always put the (?:$|/) on negated tails, though, because that has to be
-    // there to bind the end of the negated pattern portion, and it's easier to
-    // just stick it in now rather than try to inject it later in the middle of
-    // the pattern.
-    //
-    // We can just always return the same end, and leave it up to the caller
-    // to know whether it's going to be used joined or in parts.
-    // And, if the start is adjusted slightly, can do the same there:
-    // - if not isStart: nothing
-    // - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$)
-    // - if dots allowed or not possible: (?:/|^)
-    // - if dots possible and not allowed: (?:/|^)(?!\.)
-    //
-    // But it's better to have a simpler binding without a conditional, for
-    // performance, so probably better to return both start options.
-    //
-    // Then the caller just ignores the end if it's not the first pattern,
-    // and the start always gets applied.
-    //
-    // But that's always going to be $ if it's the ending pattern, or nothing,
-    // so the caller can just attach $ at the end of the pattern when building.
-    //
-    // So the todo is:
-    // - better detect what kind of start is needed
-    // - return both flavors of starting pattern
-    // - attach $ at the end of the pattern when creating the actual RegExp
-    //
-    // Ah, but wait, no, that all only applies to the root when the first pattern
-    // is not an extglob. If the first pattern IS an extglob, then we need all
-    // that dot prevention biz to live in the extglob portions, because eg
-    // +(*|.x*) can match .xy but not .yx.
-    //
-    // So, return the two flavors if it's #root and the first child is not an
-    // AST, otherwise leave it to the child AST to handle it, and there,
-    // use the (?:^|/) style of start binding.
-    //
-    // Even simplified further:
-    // - Since the start for a join is eg /(?!\.) and the start for a part
-    // is ^(?!\.), we can just prepend (?!\.) to the pattern (either root
-    // or start or whatever) and prepend ^ or / at the Regexp construction.
-    toRegExpSource(allowDot) {
-        const dot = allowDot ?? !!this.#options.dot;
-        if (this.#root === this)
-            this.#fillNegs();
-        if (!this.type) {
-            const noEmpty = this.isStart() && this.isEnd();
-            const src = this.#parts
-                .map(p => {
-                const [re, _, hasMagic, uflag] = typeof p === 'string'
-                    ? AST.#parseGlob(p, this.#hasMagic, noEmpty)
-                    : p.toRegExpSource(allowDot);
-                this.#hasMagic = this.#hasMagic || hasMagic;
-                this.#uflag = this.#uflag || uflag;
-                return re;
-            })
-                .join('');
-            let start = '';
-            if (this.isStart()) {
-                if (typeof this.#parts[0] === 'string') {
-                    // this is the string that will match the start of the pattern,
-                    // so we need to protect against dots and such.
-                    // '.' and '..' cannot match unless the pattern is that exactly,
-                    // even if it starts with . or dot:true is set.
-                    const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]);
-                    if (!dotTravAllowed) {
-                        const aps = addPatternStart;
-                        // check if we have a possibility of matching . or ..,
-                        // and prevent that.
-                        const needNoTrav = 
-                        // dots are allowed, and the pattern starts with [ or .
-                        (dot && aps.has(src.charAt(0))) ||
-                            // the pattern starts with \., and then [ or .
-                            (src.startsWith('\\.') && aps.has(src.charAt(2))) ||
-                            // the pattern starts with \.\., and then [ or .
-                            (src.startsWith('\\.\\.') && aps.has(src.charAt(4)));
-                        // no need to prevent dots if it can't match a dot, or if a
-                        // sub-pattern will be preventing it anyway.
-                        const needNoDot = !dot && !allowDot && aps.has(src.charAt(0));
-                        start = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : '';
-                    }
-                }
-            }
-            // append the "end of path portion" pattern to negation tails
-            let end = '';
-            if (this.isEnd() &&
-                this.#root.#filledNegs &&
-                this.#parent?.type === '!') {
-                end = '(?:$|\\/)';
-            }
-            const final = start + src + end;
-            return [
-                final,
-                (0, unescape_js_1.unescape)(src),
-                (this.#hasMagic = !!this.#hasMagic),
-                this.#uflag,
-            ];
-        }
-        // We need to calculate the body *twice* if it's a repeat pattern
-        // at the start, once in nodot mode, then again in dot mode, so a
-        // pattern like *(?) can match 'x.y'
-        const repeated = this.type === '*' || this.type === '+';
-        // some kind of extglob
-        const start = this.type === '!' ? '(?:(?!(?:' : '(?:';
-        let body = this.#partsToRegExp(dot);
-        if (this.isStart() && this.isEnd() && !body && this.type !== '!') {
-            // invalid extglob, has to at least be *something* present, if it's
-            // the entire path portion.
-            const s = this.toString();
-            this.#parts = [s];
-            this.type = null;
-            this.#hasMagic = undefined;
-            return [s, (0, unescape_js_1.unescape)(this.toString()), false, false];
-        }
-        // XXX abstract out this map method
-        let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot
-            ? ''
-            : this.#partsToRegExp(true);
-        if (bodyDotAllowed === body) {
-            bodyDotAllowed = '';
-        }
-        if (bodyDotAllowed) {
-            body = `(?:${body})(?:${bodyDotAllowed})*?`;
-        }
-        // an empty !() is exactly equivalent to a starNoEmpty
-        let final = '';
-        if (this.type === '!' && this.#emptyExt) {
-            final = (this.isStart() && !dot ? startNoDot : '') + starNoEmpty;
-        }
-        else {
-            const close = this.type === '!'
-                ? // !() must match something,but !(x) can match ''
-                    '))' +
-                        (this.isStart() && !dot && !allowDot ? startNoDot : '') +
-                        star +
-                        ')'
-                : this.type === '@'
-                    ? ')'
-                    : this.type === '?'
-                        ? ')?'
-                        : this.type === '+' && bodyDotAllowed
-                            ? ')'
-                            : this.type === '*' && bodyDotAllowed
-                                ? `)?`
-                                : `)${this.type}`;
-            final = start + body + close;
-        }
-        return [
-            final,
-            (0, unescape_js_1.unescape)(body),
-            (this.#hasMagic = !!this.#hasMagic),
-            this.#uflag,
-        ];
-    }
-    #partsToRegExp(dot) {
-        return this.#parts
-            .map(p => {
-            // extglob ASTs should only contain parent ASTs
-            /* c8 ignore start */
-            if (typeof p === 'string') {
-                throw new Error('string type in extglob ast??');
-            }
-            /* c8 ignore stop */
-            // can ignore hasMagic, because extglobs are already always magic
-            const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot);
-            this.#uflag = this.#uflag || uflag;
-            return re;
-        })
-            .filter(p => !(this.isStart() && this.isEnd()) || !!p)
-            .join('|');
-    }
-    static #parseGlob(glob, hasMagic, noEmpty = false) {
-        let escaping = false;
-        let re = '';
-        let uflag = false;
-        for (let i = 0; i < glob.length; i++) {
-            const c = glob.charAt(i);
-            if (escaping) {
-                escaping = false;
-                re += (reSpecials.has(c) ? '\\' : '') + c;
-                continue;
-            }
-            if (c === '\\') {
-                if (i === glob.length - 1) {
-                    re += '\\\\';
-                }
-                else {
-                    escaping = true;
-                }
-                continue;
-            }
-            if (c === '[') {
-                const [src, needUflag, consumed, magic] = (0, brace_expressions_js_1.parseClass)(glob, i);
-                if (consumed) {
-                    re += src;
-                    uflag = uflag || needUflag;
-                    i += consumed - 1;
-                    hasMagic = hasMagic || magic;
-                    continue;
-                }
-            }
-            if (c === '*') {
-                if (noEmpty && glob === '*')
-                    re += starNoEmpty;
-                else
-                    re += star;
-                hasMagic = true;
-                continue;
-            }
-            if (c === '?') {
-                re += qmark;
-                hasMagic = true;
-                continue;
-            }
-            re += regExpEscape(c);
-        }
-        return [re, (0, unescape_js_1.unescape)(glob), !!hasMagic, uflag];
-    }
-}
-exports.AST = AST;
-//# sourceMappingURL=ast.js.map
\ No newline at end of file
diff --git a/node_modules/cacache/node_modules/minimatch/dist/commonjs/brace-expressions.js b/node_modules/cacache/node_modules/minimatch/dist/commonjs/brace-expressions.js
deleted file mode 100644
index 0e13eefc4cfee..0000000000000
--- a/node_modules/cacache/node_modules/minimatch/dist/commonjs/brace-expressions.js
+++ /dev/null
@@ -1,152 +0,0 @@
-"use strict";
-// translate the various posix character classes into unicode properties
-// this works across all unicode locales
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.parseClass = void 0;
-// { : [, /u flag required, negated]
-const posixClasses = {
-    '[:alnum:]': ['\\p{L}\\p{Nl}\\p{Nd}', true],
-    '[:alpha:]': ['\\p{L}\\p{Nl}', true],
-    '[:ascii:]': ['\\x' + '00-\\x' + '7f', false],
-    '[:blank:]': ['\\p{Zs}\\t', true],
-    '[:cntrl:]': ['\\p{Cc}', true],
-    '[:digit:]': ['\\p{Nd}', true],
-    '[:graph:]': ['\\p{Z}\\p{C}', true, true],
-    '[:lower:]': ['\\p{Ll}', true],
-    '[:print:]': ['\\p{C}', true],
-    '[:punct:]': ['\\p{P}', true],
-    '[:space:]': ['\\p{Z}\\t\\r\\n\\v\\f', true],
-    '[:upper:]': ['\\p{Lu}', true],
-    '[:word:]': ['\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}', true],
-    '[:xdigit:]': ['A-Fa-f0-9', false],
-};
-// only need to escape a few things inside of brace expressions
-// escapes: [ \ ] -
-const braceEscape = (s) => s.replace(/[[\]\\-]/g, '\\$&');
-// escape all regexp magic characters
-const regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
-// everything has already been escaped, we just have to join
-const rangesToString = (ranges) => ranges.join('');
-// takes a glob string at a posix brace expression, and returns
-// an equivalent regular expression source, and boolean indicating
-// whether the /u flag needs to be applied, and the number of chars
-// consumed to parse the character class.
-// This also removes out of order ranges, and returns ($.) if the
-// entire class just no good.
-const parseClass = (glob, position) => {
-    const pos = position;
-    /* c8 ignore start */
-    if (glob.charAt(pos) !== '[') {
-        throw new Error('not in a brace expression');
-    }
-    /* c8 ignore stop */
-    const ranges = [];
-    const negs = [];
-    let i = pos + 1;
-    let sawStart = false;
-    let uflag = false;
-    let escaping = false;
-    let negate = false;
-    let endPos = pos;
-    let rangeStart = '';
-    WHILE: while (i < glob.length) {
-        const c = glob.charAt(i);
-        if ((c === '!' || c === '^') && i === pos + 1) {
-            negate = true;
-            i++;
-            continue;
-        }
-        if (c === ']' && sawStart && !escaping) {
-            endPos = i + 1;
-            break;
-        }
-        sawStart = true;
-        if (c === '\\') {
-            if (!escaping) {
-                escaping = true;
-                i++;
-                continue;
-            }
-            // escaped \ char, fall through and treat like normal char
-        }
-        if (c === '[' && !escaping) {
-            // either a posix class, a collation equivalent, or just a [
-            for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) {
-                if (glob.startsWith(cls, i)) {
-                    // invalid, [a-[] is fine, but not [a-[:alpha]]
-                    if (rangeStart) {
-                        return ['$.', false, glob.length - pos, true];
-                    }
-                    i += cls.length;
-                    if (neg)
-                        negs.push(unip);
-                    else
-                        ranges.push(unip);
-                    uflag = uflag || u;
-                    continue WHILE;
-                }
-            }
-        }
-        // now it's just a normal character, effectively
-        escaping = false;
-        if (rangeStart) {
-            // throw this range away if it's not valid, but others
-            // can still match.
-            if (c > rangeStart) {
-                ranges.push(braceEscape(rangeStart) + '-' + braceEscape(c));
-            }
-            else if (c === rangeStart) {
-                ranges.push(braceEscape(c));
-            }
-            rangeStart = '';
-            i++;
-            continue;
-        }
-        // now might be the start of a range.
-        // can be either c-d or c-] or c] or c] at this point
-        if (glob.startsWith('-]', i + 1)) {
-            ranges.push(braceEscape(c + '-'));
-            i += 2;
-            continue;
-        }
-        if (glob.startsWith('-', i + 1)) {
-            rangeStart = c;
-            i += 2;
-            continue;
-        }
-        // not the start of a range, just a single character
-        ranges.push(braceEscape(c));
-        i++;
-    }
-    if (endPos < i) {
-        // didn't see the end of the class, not a valid class,
-        // but might still be valid as a literal match.
-        return ['', false, 0, false];
-    }
-    // if we got no ranges and no negates, then we have a range that
-    // cannot possibly match anything, and that poisons the whole glob
-    if (!ranges.length && !negs.length) {
-        return ['$.', false, glob.length - pos, true];
-    }
-    // if we got one positive range, and it's a single character, then that's
-    // not actually a magic pattern, it's just that one literal character.
-    // we should not treat that as "magic", we should just return the literal
-    // character. [_] is a perfectly valid way to escape glob magic chars.
-    if (negs.length === 0 &&
-        ranges.length === 1 &&
-        /^\\?.$/.test(ranges[0]) &&
-        !negate) {
-        const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0];
-        return [regexpEscape(r), false, endPos - pos, false];
-    }
-    const sranges = '[' + (negate ? '^' : '') + rangesToString(ranges) + ']';
-    const snegs = '[' + (negate ? '' : '^') + rangesToString(negs) + ']';
-    const comb = ranges.length && negs.length
-        ? '(' + sranges + '|' + snegs + ')'
-        : ranges.length
-            ? sranges
-            : snegs;
-    return [comb, uflag, endPos - pos, true];
-};
-exports.parseClass = parseClass;
-//# sourceMappingURL=brace-expressions.js.map
\ No newline at end of file
diff --git a/node_modules/cacache/node_modules/minimatch/dist/commonjs/escape.js b/node_modules/cacache/node_modules/minimatch/dist/commonjs/escape.js
deleted file mode 100644
index 02a4f8a8e0a58..0000000000000
--- a/node_modules/cacache/node_modules/minimatch/dist/commonjs/escape.js
+++ /dev/null
@@ -1,22 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.escape = void 0;
-/**
- * Escape all magic characters in a glob pattern.
- *
- * If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape}
- * option is used, then characters are escaped by wrapping in `[]`, because
- * a magic character wrapped in a character class can only be satisfied by
- * that exact character.  In this mode, `\` is _not_ escaped, because it is
- * not interpreted as a magic character, but instead as a path separator.
- */
-const escape = (s, { windowsPathsNoEscape = false, } = {}) => {
-    // don't need to escape +@! because we escape the parens
-    // that make those magic, and escaping ! as [!] isn't valid,
-    // because [!]] is a valid glob class meaning not ']'.
-    return windowsPathsNoEscape
-        ? s.replace(/[?*()[\]]/g, '[$&]')
-        : s.replace(/[?*()[\]\\]/g, '\\$&');
-};
-exports.escape = escape;
-//# sourceMappingURL=escape.js.map
\ No newline at end of file
diff --git a/node_modules/cacache/node_modules/minimatch/dist/commonjs/index.js b/node_modules/cacache/node_modules/minimatch/dist/commonjs/index.js
deleted file mode 100644
index f58fb8616aa9a..0000000000000
--- a/node_modules/cacache/node_modules/minimatch/dist/commonjs/index.js
+++ /dev/null
@@ -1,1014 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.unescape = exports.escape = exports.AST = exports.Minimatch = exports.match = exports.makeRe = exports.braceExpand = exports.defaults = exports.filter = exports.GLOBSTAR = exports.sep = exports.minimatch = void 0;
-const brace_expansion_1 = require("@isaacs/brace-expansion");
-const assert_valid_pattern_js_1 = require("./assert-valid-pattern.js");
-const ast_js_1 = require("./ast.js");
-const escape_js_1 = require("./escape.js");
-const unescape_js_1 = require("./unescape.js");
-const minimatch = (p, pattern, options = {}) => {
-    (0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
-    // shortcut: comments match nothing.
-    if (!options.nocomment && pattern.charAt(0) === '#') {
-        return false;
-    }
-    return new Minimatch(pattern, options).match(p);
-};
-exports.minimatch = minimatch;
-// Optimized checking for the most common glob patterns.
-const starDotExtRE = /^\*+([^+@!?\*\[\(]*)$/;
-const starDotExtTest = (ext) => (f) => !f.startsWith('.') && f.endsWith(ext);
-const starDotExtTestDot = (ext) => (f) => f.endsWith(ext);
-const starDotExtTestNocase = (ext) => {
-    ext = ext.toLowerCase();
-    return (f) => !f.startsWith('.') && f.toLowerCase().endsWith(ext);
-};
-const starDotExtTestNocaseDot = (ext) => {
-    ext = ext.toLowerCase();
-    return (f) => f.toLowerCase().endsWith(ext);
-};
-const starDotStarRE = /^\*+\.\*+$/;
-const starDotStarTest = (f) => !f.startsWith('.') && f.includes('.');
-const starDotStarTestDot = (f) => f !== '.' && f !== '..' && f.includes('.');
-const dotStarRE = /^\.\*+$/;
-const dotStarTest = (f) => f !== '.' && f !== '..' && f.startsWith('.');
-const starRE = /^\*+$/;
-const starTest = (f) => f.length !== 0 && !f.startsWith('.');
-const starTestDot = (f) => f.length !== 0 && f !== '.' && f !== '..';
-const qmarksRE = /^\?+([^+@!?\*\[\(]*)?$/;
-const qmarksTestNocase = ([$0, ext = '']) => {
-    const noext = qmarksTestNoExt([$0]);
-    if (!ext)
-        return noext;
-    ext = ext.toLowerCase();
-    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
-};
-const qmarksTestNocaseDot = ([$0, ext = '']) => {
-    const noext = qmarksTestNoExtDot([$0]);
-    if (!ext)
-        return noext;
-    ext = ext.toLowerCase();
-    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
-};
-const qmarksTestDot = ([$0, ext = '']) => {
-    const noext = qmarksTestNoExtDot([$0]);
-    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
-};
-const qmarksTest = ([$0, ext = '']) => {
-    const noext = qmarksTestNoExt([$0]);
-    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
-};
-const qmarksTestNoExt = ([$0]) => {
-    const len = $0.length;
-    return (f) => f.length === len && !f.startsWith('.');
-};
-const qmarksTestNoExtDot = ([$0]) => {
-    const len = $0.length;
-    return (f) => f.length === len && f !== '.' && f !== '..';
-};
-/* c8 ignore start */
-const defaultPlatform = (typeof process === 'object' && process
-    ? (typeof process.env === 'object' &&
-        process.env &&
-        process.env.__MINIMATCH_TESTING_PLATFORM__) ||
-        process.platform
-    : 'posix');
-const path = {
-    win32: { sep: '\\' },
-    posix: { sep: '/' },
-};
-/* c8 ignore stop */
-exports.sep = defaultPlatform === 'win32' ? path.win32.sep : path.posix.sep;
-exports.minimatch.sep = exports.sep;
-exports.GLOBSTAR = Symbol('globstar **');
-exports.minimatch.GLOBSTAR = exports.GLOBSTAR;
-// any single thing other than /
-// don't need to escape / when using new RegExp()
-const qmark = '[^/]';
-// * => any number of characters
-const star = qmark + '*?';
-// ** when dots are allowed.  Anything goes, except .. and .
-// not (^ or / followed by one or two dots followed by $ or /),
-// followed by anything, any number of times.
-const twoStarDot = '(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?';
-// not a ^ or / followed by a dot,
-// followed by anything, any number of times.
-const twoStarNoDot = '(?:(?!(?:\\/|^)\\.).)*?';
-const filter = (pattern, options = {}) => (p) => (0, exports.minimatch)(p, pattern, options);
-exports.filter = filter;
-exports.minimatch.filter = exports.filter;
-const ext = (a, b = {}) => Object.assign({}, a, b);
-const defaults = (def) => {
-    if (!def || typeof def !== 'object' || !Object.keys(def).length) {
-        return exports.minimatch;
-    }
-    const orig = exports.minimatch;
-    const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options));
-    return Object.assign(m, {
-        Minimatch: class Minimatch extends orig.Minimatch {
-            constructor(pattern, options = {}) {
-                super(pattern, ext(def, options));
-            }
-            static defaults(options) {
-                return orig.defaults(ext(def, options)).Minimatch;
-            }
-        },
-        AST: class AST extends orig.AST {
-            /* c8 ignore start */
-            constructor(type, parent, options = {}) {
-                super(type, parent, ext(def, options));
-            }
-            /* c8 ignore stop */
-            static fromGlob(pattern, options = {}) {
-                return orig.AST.fromGlob(pattern, ext(def, options));
-            }
-        },
-        unescape: (s, options = {}) => orig.unescape(s, ext(def, options)),
-        escape: (s, options = {}) => orig.escape(s, ext(def, options)),
-        filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)),
-        defaults: (options) => orig.defaults(ext(def, options)),
-        makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)),
-        braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)),
-        match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)),
-        sep: orig.sep,
-        GLOBSTAR: exports.GLOBSTAR,
-    });
-};
-exports.defaults = defaults;
-exports.minimatch.defaults = exports.defaults;
-// Brace expansion:
-// a{b,c}d -> abd acd
-// a{b,}c -> abc ac
-// a{0..3}d -> a0d a1d a2d a3d
-// a{b,c{d,e}f}g -> abg acdfg acefg
-// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
-//
-// Invalid sets are not expanded.
-// a{2..}b -> a{2..}b
-// a{b}c -> a{b}c
-const braceExpand = (pattern, options = {}) => {
-    (0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
-    // Thanks to Yeting Li  for
-    // improving this regexp to avoid a ReDOS vulnerability.
-    if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
-        // shortcut. no need to expand.
-        return [pattern];
-    }
-    return (0, brace_expansion_1.expand)(pattern);
-};
-exports.braceExpand = braceExpand;
-exports.minimatch.braceExpand = exports.braceExpand;
-// parse a component of the expanded set.
-// At this point, no pattern may contain "/" in it
-// so we're going to return a 2d array, where each entry is the full
-// pattern, split on '/', and then turned into a regular expression.
-// A regexp is made at the end which joins each array with an
-// escaped /, and another full one which joins each regexp with |.
-//
-// Following the lead of Bash 4.1, note that "**" only has special meaning
-// when it is the *only* thing in a path portion.  Otherwise, any series
-// of * is equivalent to a single *.  Globstar behavior is enabled by
-// default, and can be disabled by setting options.noglobstar.
-const makeRe = (pattern, options = {}) => new Minimatch(pattern, options).makeRe();
-exports.makeRe = makeRe;
-exports.minimatch.makeRe = exports.makeRe;
-const match = (list, pattern, options = {}) => {
-    const mm = new Minimatch(pattern, options);
-    list = list.filter(f => mm.match(f));
-    if (mm.options.nonull && !list.length) {
-        list.push(pattern);
-    }
-    return list;
-};
-exports.match = match;
-exports.minimatch.match = exports.match;
-// replace stuff like \* with *
-const globMagic = /[?*]|[+@!]\(.*?\)|\[|\]/;
-const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
-class Minimatch {
-    options;
-    set;
-    pattern;
-    windowsPathsNoEscape;
-    nonegate;
-    negate;
-    comment;
-    empty;
-    preserveMultipleSlashes;
-    partial;
-    globSet;
-    globParts;
-    nocase;
-    isWindows;
-    platform;
-    windowsNoMagicRoot;
-    regexp;
-    constructor(pattern, options = {}) {
-        (0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
-        options = options || {};
-        this.options = options;
-        this.pattern = pattern;
-        this.platform = options.platform || defaultPlatform;
-        this.isWindows = this.platform === 'win32';
-        this.windowsPathsNoEscape =
-            !!options.windowsPathsNoEscape || options.allowWindowsEscape === false;
-        if (this.windowsPathsNoEscape) {
-            this.pattern = this.pattern.replace(/\\/g, '/');
-        }
-        this.preserveMultipleSlashes = !!options.preserveMultipleSlashes;
-        this.regexp = null;
-        this.negate = false;
-        this.nonegate = !!options.nonegate;
-        this.comment = false;
-        this.empty = false;
-        this.partial = !!options.partial;
-        this.nocase = !!this.options.nocase;
-        this.windowsNoMagicRoot =
-            options.windowsNoMagicRoot !== undefined
-                ? options.windowsNoMagicRoot
-                : !!(this.isWindows && this.nocase);
-        this.globSet = [];
-        this.globParts = [];
-        this.set = [];
-        // make the set of regexps etc.
-        this.make();
-    }
-    hasMagic() {
-        if (this.options.magicalBraces && this.set.length > 1) {
-            return true;
-        }
-        for (const pattern of this.set) {
-            for (const part of pattern) {
-                if (typeof part !== 'string')
-                    return true;
-            }
-        }
-        return false;
-    }
-    debug(..._) { }
-    make() {
-        const pattern = this.pattern;
-        const options = this.options;
-        // empty patterns and comments match nothing.
-        if (!options.nocomment && pattern.charAt(0) === '#') {
-            this.comment = true;
-            return;
-        }
-        if (!pattern) {
-            this.empty = true;
-            return;
-        }
-        // step 1: figure out negation, etc.
-        this.parseNegate();
-        // step 2: expand braces
-        this.globSet = [...new Set(this.braceExpand())];
-        if (options.debug) {
-            this.debug = (...args) => console.error(...args);
-        }
-        this.debug(this.pattern, this.globSet);
-        // step 3: now we have a set, so turn each one into a series of
-        // path-portion matching patterns.
-        // These will be regexps, except in the case of "**", which is
-        // set to the GLOBSTAR object for globstar behavior,
-        // and will not contain any / characters
-        //
-        // First, we preprocess to make the glob pattern sets a bit simpler
-        // and deduped.  There are some perf-killing patterns that can cause
-        // problems with a glob walk, but we can simplify them down a bit.
-        const rawGlobParts = this.globSet.map(s => this.slashSplit(s));
-        this.globParts = this.preprocess(rawGlobParts);
-        this.debug(this.pattern, this.globParts);
-        // glob --> regexps
-        let set = this.globParts.map((s, _, __) => {
-            if (this.isWindows && this.windowsNoMagicRoot) {
-                // check if it's a drive or unc path.
-                const isUNC = s[0] === '' &&
-                    s[1] === '' &&
-                    (s[2] === '?' || !globMagic.test(s[2])) &&
-                    !globMagic.test(s[3]);
-                const isDrive = /^[a-z]:/i.test(s[0]);
-                if (isUNC) {
-                    return [...s.slice(0, 4), ...s.slice(4).map(ss => this.parse(ss))];
-                }
-                else if (isDrive) {
-                    return [s[0], ...s.slice(1).map(ss => this.parse(ss))];
-                }
-            }
-            return s.map(ss => this.parse(ss));
-        });
-        this.debug(this.pattern, set);
-        // filter out everything that didn't compile properly.
-        this.set = set.filter(s => s.indexOf(false) === -1);
-        // do not treat the ? in UNC paths as magic
-        if (this.isWindows) {
-            for (let i = 0; i < this.set.length; i++) {
-                const p = this.set[i];
-                if (p[0] === '' &&
-                    p[1] === '' &&
-                    this.globParts[i][2] === '?' &&
-                    typeof p[3] === 'string' &&
-                    /^[a-z]:$/i.test(p[3])) {
-                    p[2] = '?';
-                }
-            }
-        }
-        this.debug(this.pattern, this.set);
-    }
-    // various transforms to equivalent pattern sets that are
-    // faster to process in a filesystem walk.  The goal is to
-    // eliminate what we can, and push all ** patterns as far
-    // to the right as possible, even if it increases the number
-    // of patterns that we have to process.
-    preprocess(globParts) {
-        // if we're not in globstar mode, then turn all ** into *
-        if (this.options.noglobstar) {
-            for (let i = 0; i < globParts.length; i++) {
-                for (let j = 0; j < globParts[i].length; j++) {
-                    if (globParts[i][j] === '**') {
-                        globParts[i][j] = '*';
-                    }
-                }
-            }
-        }
-        const { optimizationLevel = 1 } = this.options;
-        if (optimizationLevel >= 2) {
-            // aggressive optimization for the purpose of fs walking
-            globParts = this.firstPhasePreProcess(globParts);
-            globParts = this.secondPhasePreProcess(globParts);
-        }
-        else if (optimizationLevel >= 1) {
-            // just basic optimizations to remove some .. parts
-            globParts = this.levelOneOptimize(globParts);
-        }
-        else {
-            // just collapse multiple ** portions into one
-            globParts = this.adjascentGlobstarOptimize(globParts);
-        }
-        return globParts;
-    }
-    // just get rid of adjascent ** portions
-    adjascentGlobstarOptimize(globParts) {
-        return globParts.map(parts => {
-            let gs = -1;
-            while (-1 !== (gs = parts.indexOf('**', gs + 1))) {
-                let i = gs;
-                while (parts[i + 1] === '**') {
-                    i++;
-                }
-                if (i !== gs) {
-                    parts.splice(gs, i - gs);
-                }
-            }
-            return parts;
-        });
-    }
-    // get rid of adjascent ** and resolve .. portions
-    levelOneOptimize(globParts) {
-        return globParts.map(parts => {
-            parts = parts.reduce((set, part) => {
-                const prev = set[set.length - 1];
-                if (part === '**' && prev === '**') {
-                    return set;
-                }
-                if (part === '..') {
-                    if (prev && prev !== '..' && prev !== '.' && prev !== '**') {
-                        set.pop();
-                        return set;
-                    }
-                }
-                set.push(part);
-                return set;
-            }, []);
-            return parts.length === 0 ? [''] : parts;
-        });
-    }
-    levelTwoFileOptimize(parts) {
-        if (!Array.isArray(parts)) {
-            parts = this.slashSplit(parts);
-        }
-        let didSomething = false;
-        do {
-            didSomething = false;
-            // 
// -> 
/
-            if (!this.preserveMultipleSlashes) {
-                for (let i = 1; i < parts.length - 1; i++) {
-                    const p = parts[i];
-                    // don't squeeze out UNC patterns
-                    if (i === 1 && p === '' && parts[0] === '')
-                        continue;
-                    if (p === '.' || p === '') {
-                        didSomething = true;
-                        parts.splice(i, 1);
-                        i--;
-                    }
-                }
-                if (parts[0] === '.' &&
-                    parts.length === 2 &&
-                    (parts[1] === '.' || parts[1] === '')) {
-                    didSomething = true;
-                    parts.pop();
-                }
-            }
-            // 
/

/../ ->

/
-            let dd = 0;
-            while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
-                const p = parts[dd - 1];
-                if (p && p !== '.' && p !== '..' && p !== '**') {
-                    didSomething = true;
-                    parts.splice(dd - 1, 2);
-                    dd -= 2;
-                }
-            }
-        } while (didSomething);
-        return parts.length === 0 ? [''] : parts;
-    }
-    // First phase: single-pattern processing
-    // 
 is 1 or more portions
-    //  is 1 or more portions
-    // 

is any portion other than ., .., '', or ** - // is . or '' - // - // **/.. is *brutal* for filesystem walking performance, because - // it effectively resets the recursive walk each time it occurs, - // and ** cannot be reduced out by a .. pattern part like a regexp - // or most strings (other than .., ., and '') can be. - // - //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/} - //

// -> 
/
-    // 
/

/../ ->

/
-    // **/**/ -> **/
-    //
-    // **/*/ -> */**/ <== not valid because ** doesn't follow
-    // this WOULD be allowed if ** did follow symlinks, or * didn't
-    firstPhasePreProcess(globParts) {
-        let didSomething = false;
-        do {
-            didSomething = false;
-            // 
/**/../

/

/ -> {

/../

/

/,

/**/

/

/} - for (let parts of globParts) { - let gs = -1; - while (-1 !== (gs = parts.indexOf('**', gs + 1))) { - let gss = gs; - while (parts[gss + 1] === '**') { - //

/**/**/ -> 
/**/
-                        gss++;
-                    }
-                    // eg, if gs is 2 and gss is 4, that means we have 3 **
-                    // parts, and can remove 2 of them.
-                    if (gss > gs) {
-                        parts.splice(gs + 1, gss - gs);
-                    }
-                    let next = parts[gs + 1];
-                    const p = parts[gs + 2];
-                    const p2 = parts[gs + 3];
-                    if (next !== '..')
-                        continue;
-                    if (!p ||
-                        p === '.' ||
-                        p === '..' ||
-                        !p2 ||
-                        p2 === '.' ||
-                        p2 === '..') {
-                        continue;
-                    }
-                    didSomething = true;
-                    // edit parts in place, and push the new one
-                    parts.splice(gs, 1);
-                    const other = parts.slice(0);
-                    other[gs] = '**';
-                    globParts.push(other);
-                    gs--;
-                }
-                // 
// -> 
/
-                if (!this.preserveMultipleSlashes) {
-                    for (let i = 1; i < parts.length - 1; i++) {
-                        const p = parts[i];
-                        // don't squeeze out UNC patterns
-                        if (i === 1 && p === '' && parts[0] === '')
-                            continue;
-                        if (p === '.' || p === '') {
-                            didSomething = true;
-                            parts.splice(i, 1);
-                            i--;
-                        }
-                    }
-                    if (parts[0] === '.' &&
-                        parts.length === 2 &&
-                        (parts[1] === '.' || parts[1] === '')) {
-                        didSomething = true;
-                        parts.pop();
-                    }
-                }
-                // 
/

/../ ->

/
-                let dd = 0;
-                while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
-                    const p = parts[dd - 1];
-                    if (p && p !== '.' && p !== '..' && p !== '**') {
-                        didSomething = true;
-                        const needDot = dd === 1 && parts[dd + 1] === '**';
-                        const splin = needDot ? ['.'] : [];
-                        parts.splice(dd - 1, 2, ...splin);
-                        if (parts.length === 0)
-                            parts.push('');
-                        dd -= 2;
-                    }
-                }
-            }
-        } while (didSomething);
-        return globParts;
-    }
-    // second phase: multi-pattern dedupes
-    // {
/*/,
/

/} ->

/*/
-    // {
/,
/} -> 
/
-    // {
/**/,
/} -> 
/**/
-    //
-    // {
/**/,
/**/

/} ->

/**/
-    // ^-- not valid because ** doens't follow symlinks
-    secondPhasePreProcess(globParts) {
-        for (let i = 0; i < globParts.length - 1; i++) {
-            for (let j = i + 1; j < globParts.length; j++) {
-                const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);
-                if (matched) {
-                    globParts[i] = [];
-                    globParts[j] = matched;
-                    break;
-                }
-            }
-        }
-        return globParts.filter(gs => gs.length);
-    }
-    partsMatch(a, b, emptyGSMatch = false) {
-        let ai = 0;
-        let bi = 0;
-        let result = [];
-        let which = '';
-        while (ai < a.length && bi < b.length) {
-            if (a[ai] === b[bi]) {
-                result.push(which === 'b' ? b[bi] : a[ai]);
-                ai++;
-                bi++;
-            }
-            else if (emptyGSMatch && a[ai] === '**' && b[bi] === a[ai + 1]) {
-                result.push(a[ai]);
-                ai++;
-            }
-            else if (emptyGSMatch && b[bi] === '**' && a[ai] === b[bi + 1]) {
-                result.push(b[bi]);
-                bi++;
-            }
-            else if (a[ai] === '*' &&
-                b[bi] &&
-                (this.options.dot || !b[bi].startsWith('.')) &&
-                b[bi] !== '**') {
-                if (which === 'b')
-                    return false;
-                which = 'a';
-                result.push(a[ai]);
-                ai++;
-                bi++;
-            }
-            else if (b[bi] === '*' &&
-                a[ai] &&
-                (this.options.dot || !a[ai].startsWith('.')) &&
-                a[ai] !== '**') {
-                if (which === 'a')
-                    return false;
-                which = 'b';
-                result.push(b[bi]);
-                ai++;
-                bi++;
-            }
-            else {
-                return false;
-            }
-        }
-        // if we fall out of the loop, it means they two are identical
-        // as long as their lengths match
-        return a.length === b.length && result;
-    }
-    parseNegate() {
-        if (this.nonegate)
-            return;
-        const pattern = this.pattern;
-        let negate = false;
-        let negateOffset = 0;
-        for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {
-            negate = !negate;
-            negateOffset++;
-        }
-        if (negateOffset)
-            this.pattern = pattern.slice(negateOffset);
-        this.negate = negate;
-    }
-    // set partial to true to test if, for example,
-    // "/a/b" matches the start of "/*/b/*/d"
-    // Partial means, if you run out of file before you run
-    // out of pattern, then that's fine, as long as all
-    // the parts match.
-    matchOne(file, pattern, partial = false) {
-        const options = this.options;
-        // UNC paths like //?/X:/... can match X:/... and vice versa
-        // Drive letters in absolute drive or unc paths are always compared
-        // case-insensitively.
-        if (this.isWindows) {
-            const fileDrive = typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0]);
-            const fileUNC = !fileDrive &&
-                file[0] === '' &&
-                file[1] === '' &&
-                file[2] === '?' &&
-                /^[a-z]:$/i.test(file[3]);
-            const patternDrive = typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0]);
-            const patternUNC = !patternDrive &&
-                pattern[0] === '' &&
-                pattern[1] === '' &&
-                pattern[2] === '?' &&
-                typeof pattern[3] === 'string' &&
-                /^[a-z]:$/i.test(pattern[3]);
-            const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined;
-            const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined;
-            if (typeof fdi === 'number' && typeof pdi === 'number') {
-                const [fd, pd] = [file[fdi], pattern[pdi]];
-                if (fd.toLowerCase() === pd.toLowerCase()) {
-                    pattern[pdi] = fd;
-                    if (pdi > fdi) {
-                        pattern = pattern.slice(pdi);
-                    }
-                    else if (fdi > pdi) {
-                        file = file.slice(fdi);
-                    }
-                }
-            }
-        }
-        // resolve and reduce . and .. portions in the file as well.
-        // dont' need to do the second phase, because it's only one string[]
-        const { optimizationLevel = 1 } = this.options;
-        if (optimizationLevel >= 2) {
-            file = this.levelTwoFileOptimize(file);
-        }
-        this.debug('matchOne', this, { file, pattern });
-        this.debug('matchOne', file.length, pattern.length);
-        for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
-            this.debug('matchOne loop');
-            var p = pattern[pi];
-            var f = file[fi];
-            this.debug(pattern, p, f);
-            // should be impossible.
-            // some invalid regexp stuff in the set.
-            /* c8 ignore start */
-            if (p === false) {
-                return false;
-            }
-            /* c8 ignore stop */
-            if (p === exports.GLOBSTAR) {
-                this.debug('GLOBSTAR', [pattern, p, f]);
-                // "**"
-                // a/**/b/**/c would match the following:
-                // a/b/x/y/z/c
-                // a/x/y/z/b/c
-                // a/b/x/b/x/c
-                // a/b/c
-                // To do this, take the rest of the pattern after
-                // the **, and see if it would match the file remainder.
-                // If so, return success.
-                // If not, the ** "swallows" a segment, and try again.
-                // This is recursively awful.
-                //
-                // a/**/b/**/c matching a/b/x/y/z/c
-                // - a matches a
-                // - doublestar
-                //   - matchOne(b/x/y/z/c, b/**/c)
-                //     - b matches b
-                //     - doublestar
-                //       - matchOne(x/y/z/c, c) -> no
-                //       - matchOne(y/z/c, c) -> no
-                //       - matchOne(z/c, c) -> no
-                //       - matchOne(c, c) yes, hit
-                var fr = fi;
-                var pr = pi + 1;
-                if (pr === pl) {
-                    this.debug('** at the end');
-                    // a ** at the end will just swallow the rest.
-                    // We have found a match.
-                    // however, it will not swallow /.x, unless
-                    // options.dot is set.
-                    // . and .. are *never* matched by **, for explosively
-                    // exponential reasons.
-                    for (; fi < fl; fi++) {
-                        if (file[fi] === '.' ||
-                            file[fi] === '..' ||
-                            (!options.dot && file[fi].charAt(0) === '.'))
-                            return false;
-                    }
-                    return true;
-                }
-                // ok, let's see if we can swallow whatever we can.
-                while (fr < fl) {
-                    var swallowee = file[fr];
-                    this.debug('\nglobstar while', file, fr, pattern, pr, swallowee);
-                    // XXX remove this slice.  Just pass the start index.
-                    if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
-                        this.debug('globstar found match!', fr, fl, swallowee);
-                        // found a match.
-                        return true;
-                    }
-                    else {
-                        // can't swallow "." or ".." ever.
-                        // can only swallow ".foo" when explicitly asked.
-                        if (swallowee === '.' ||
-                            swallowee === '..' ||
-                            (!options.dot && swallowee.charAt(0) === '.')) {
-                            this.debug('dot detected!', file, fr, pattern, pr);
-                            break;
-                        }
-                        // ** swallows a segment, and continue.
-                        this.debug('globstar swallow a segment, and continue');
-                        fr++;
-                    }
-                }
-                // no match was found.
-                // However, in partial mode, we can't say this is necessarily over.
-                /* c8 ignore start */
-                if (partial) {
-                    // ran out of file
-                    this.debug('\n>>> no match, partial?', file, fr, pattern, pr);
-                    if (fr === fl) {
-                        return true;
-                    }
-                }
-                /* c8 ignore stop */
-                return false;
-            }
-            // something other than **
-            // non-magic patterns just have to match exactly
-            // patterns with magic have been turned into regexps.
-            let hit;
-            if (typeof p === 'string') {
-                hit = f === p;
-                this.debug('string match', p, f, hit);
-            }
-            else {
-                hit = p.test(f);
-                this.debug('pattern match', p, f, hit);
-            }
-            if (!hit)
-                return false;
-        }
-        // Note: ending in / means that we'll get a final ""
-        // at the end of the pattern.  This can only match a
-        // corresponding "" at the end of the file.
-        // If the file ends in /, then it can only match a
-        // a pattern that ends in /, unless the pattern just
-        // doesn't have any more for it. But, a/b/ should *not*
-        // match "a/b/*", even though "" matches against the
-        // [^/]*? pattern, except in partial mode, where it might
-        // simply not be reached yet.
-        // However, a/b/ should still satisfy a/*
-        // now either we fell off the end of the pattern, or we're done.
-        if (fi === fl && pi === pl) {
-            // ran out of pattern and filename at the same time.
-            // an exact hit!
-            return true;
-        }
-        else if (fi === fl) {
-            // ran out of file, but still had pattern left.
-            // this is ok if we're doing the match as part of
-            // a glob fs traversal.
-            return partial;
-        }
-        else if (pi === pl) {
-            // ran out of pattern, still have file left.
-            // this is only acceptable if we're on the very last
-            // empty segment of a file with a trailing slash.
-            // a/* should match a/b/
-            return fi === fl - 1 && file[fi] === '';
-            /* c8 ignore start */
-        }
-        else {
-            // should be unreachable.
-            throw new Error('wtf?');
-        }
-        /* c8 ignore stop */
-    }
-    braceExpand() {
-        return (0, exports.braceExpand)(this.pattern, this.options);
-    }
-    parse(pattern) {
-        (0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
-        const options = this.options;
-        // shortcuts
-        if (pattern === '**')
-            return exports.GLOBSTAR;
-        if (pattern === '')
-            return '';
-        // far and away, the most common glob pattern parts are
-        // *, *.*, and *.  Add a fast check method for those.
-        let m;
-        let fastTest = null;
-        if ((m = pattern.match(starRE))) {
-            fastTest = options.dot ? starTestDot : starTest;
-        }
-        else if ((m = pattern.match(starDotExtRE))) {
-            fastTest = (options.nocase
-                ? options.dot
-                    ? starDotExtTestNocaseDot
-                    : starDotExtTestNocase
-                : options.dot
-                    ? starDotExtTestDot
-                    : starDotExtTest)(m[1]);
-        }
-        else if ((m = pattern.match(qmarksRE))) {
-            fastTest = (options.nocase
-                ? options.dot
-                    ? qmarksTestNocaseDot
-                    : qmarksTestNocase
-                : options.dot
-                    ? qmarksTestDot
-                    : qmarksTest)(m);
-        }
-        else if ((m = pattern.match(starDotStarRE))) {
-            fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
-        }
-        else if ((m = pattern.match(dotStarRE))) {
-            fastTest = dotStarTest;
-        }
-        const re = ast_js_1.AST.fromGlob(pattern, this.options).toMMPattern();
-        if (fastTest && typeof re === 'object') {
-            // Avoids overriding in frozen environments
-            Reflect.defineProperty(re, 'test', { value: fastTest });
-        }
-        return re;
-    }
-    makeRe() {
-        if (this.regexp || this.regexp === false)
-            return this.regexp;
-        // at this point, this.set is a 2d array of partial
-        // pattern strings, or "**".
-        //
-        // It's better to use .match().  This function shouldn't
-        // be used, really, but it's pretty convenient sometimes,
-        // when you just want to work with a regex.
-        const set = this.set;
-        if (!set.length) {
-            this.regexp = false;
-            return this.regexp;
-        }
-        const options = this.options;
-        const twoStar = options.noglobstar
-            ? star
-            : options.dot
-                ? twoStarDot
-                : twoStarNoDot;
-        const flags = new Set(options.nocase ? ['i'] : []);
-        // regexpify non-globstar patterns
-        // if ** is only item, then we just do one twoStar
-        // if ** is first, and there are more, prepend (\/|twoStar\/)? to next
-        // if ** is last, append (\/twoStar|) to previous
-        // if ** is in the middle, append (\/|\/twoStar\/) to previous
-        // then filter out GLOBSTAR symbols
-        let re = set
-            .map(pattern => {
-            const pp = pattern.map(p => {
-                if (p instanceof RegExp) {
-                    for (const f of p.flags.split(''))
-                        flags.add(f);
-                }
-                return typeof p === 'string'
-                    ? regExpEscape(p)
-                    : p === exports.GLOBSTAR
-                        ? exports.GLOBSTAR
-                        : p._src;
-            });
-            pp.forEach((p, i) => {
-                const next = pp[i + 1];
-                const prev = pp[i - 1];
-                if (p !== exports.GLOBSTAR || prev === exports.GLOBSTAR) {
-                    return;
-                }
-                if (prev === undefined) {
-                    if (next !== undefined && next !== exports.GLOBSTAR) {
-                        pp[i + 1] = '(?:\\/|' + twoStar + '\\/)?' + next;
-                    }
-                    else {
-                        pp[i] = twoStar;
-                    }
-                }
-                else if (next === undefined) {
-                    pp[i - 1] = prev + '(?:\\/|' + twoStar + ')?';
-                }
-                else if (next !== exports.GLOBSTAR) {
-                    pp[i - 1] = prev + '(?:\\/|\\/' + twoStar + '\\/)' + next;
-                    pp[i + 1] = exports.GLOBSTAR;
-                }
-            });
-            return pp.filter(p => p !== exports.GLOBSTAR).join('/');
-        })
-            .join('|');
-        // need to wrap in parens if we had more than one thing with |,
-        // otherwise only the first will be anchored to ^ and the last to $
-        const [open, close] = set.length > 1 ? ['(?:', ')'] : ['', ''];
-        // must match entire pattern
-        // ending in a * or ** will make it less strict.
-        re = '^' + open + re + close + '$';
-        // can match anything, as long as it's not this.
-        if (this.negate)
-            re = '^(?!' + re + ').+$';
-        try {
-            this.regexp = new RegExp(re, [...flags].join(''));
-            /* c8 ignore start */
-        }
-        catch (ex) {
-            // should be impossible
-            this.regexp = false;
-        }
-        /* c8 ignore stop */
-        return this.regexp;
-    }
-    slashSplit(p) {
-        // if p starts with // on windows, we preserve that
-        // so that UNC paths aren't broken.  Otherwise, any number of
-        // / characters are coalesced into one, unless
-        // preserveMultipleSlashes is set to true.
-        if (this.preserveMultipleSlashes) {
-            return p.split('/');
-        }
-        else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
-            // add an extra '' for the one we lose
-            return ['', ...p.split(/\/+/)];
-        }
-        else {
-            return p.split(/\/+/);
-        }
-    }
-    match(f, partial = this.partial) {
-        this.debug('match', f, this.pattern);
-        // short-circuit in the case of busted things.
-        // comments, etc.
-        if (this.comment) {
-            return false;
-        }
-        if (this.empty) {
-            return f === '';
-        }
-        if (f === '/' && partial) {
-            return true;
-        }
-        const options = this.options;
-        // windows: need to use /, not \
-        if (this.isWindows) {
-            f = f.split('\\').join('/');
-        }
-        // treat the test path as a set of pathparts.
-        const ff = this.slashSplit(f);
-        this.debug(this.pattern, 'split', ff);
-        // just ONE of the pattern sets in this.set needs to match
-        // in order for it to be valid.  If negating, then just one
-        // match means that we have failed.
-        // Either way, return on the first hit.
-        const set = this.set;
-        this.debug(this.pattern, 'set', set);
-        // Find the basename of the path by looking for the last non-empty segment
-        let filename = ff[ff.length - 1];
-        if (!filename) {
-            for (let i = ff.length - 2; !filename && i >= 0; i--) {
-                filename = ff[i];
-            }
-        }
-        for (let i = 0; i < set.length; i++) {
-            const pattern = set[i];
-            let file = ff;
-            if (options.matchBase && pattern.length === 1) {
-                file = [filename];
-            }
-            const hit = this.matchOne(file, pattern, partial);
-            if (hit) {
-                if (options.flipNegate) {
-                    return true;
-                }
-                return !this.negate;
-            }
-        }
-        // didn't get any hits.  this is success if it's a negative
-        // pattern, failure otherwise.
-        if (options.flipNegate) {
-            return false;
-        }
-        return this.negate;
-    }
-    static defaults(def) {
-        return exports.minimatch.defaults(def).Minimatch;
-    }
-}
-exports.Minimatch = Minimatch;
-/* c8 ignore start */
-var ast_js_2 = require("./ast.js");
-Object.defineProperty(exports, "AST", { enumerable: true, get: function () { return ast_js_2.AST; } });
-var escape_js_2 = require("./escape.js");
-Object.defineProperty(exports, "escape", { enumerable: true, get: function () { return escape_js_2.escape; } });
-var unescape_js_2 = require("./unescape.js");
-Object.defineProperty(exports, "unescape", { enumerable: true, get: function () { return unescape_js_2.unescape; } });
-/* c8 ignore stop */
-exports.minimatch.AST = ast_js_1.AST;
-exports.minimatch.Minimatch = Minimatch;
-exports.minimatch.escape = escape_js_1.escape;
-exports.minimatch.unescape = unescape_js_1.unescape;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/cacache/node_modules/minimatch/dist/commonjs/package.json b/node_modules/cacache/node_modules/minimatch/dist/commonjs/package.json
deleted file mode 100644
index 5bbefffbabee3..0000000000000
--- a/node_modules/cacache/node_modules/minimatch/dist/commonjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "commonjs"
-}
diff --git a/node_modules/cacache/node_modules/minimatch/dist/commonjs/unescape.js b/node_modules/cacache/node_modules/minimatch/dist/commonjs/unescape.js
deleted file mode 100644
index 47c36bcee5a02..0000000000000
--- a/node_modules/cacache/node_modules/minimatch/dist/commonjs/unescape.js
+++ /dev/null
@@ -1,24 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.unescape = void 0;
-/**
- * Un-escape a string that has been escaped with {@link escape}.
- *
- * If the {@link windowsPathsNoEscape} option is used, then square-brace
- * escapes are removed, but not backslash escapes.  For example, it will turn
- * the string `'[*]'` into `*`, but it will not turn `'\\*'` into `'*'`,
- * becuase `\` is a path separator in `windowsPathsNoEscape` mode.
- *
- * When `windowsPathsNoEscape` is not set, then both brace escapes and
- * backslash escapes are removed.
- *
- * Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot be escaped
- * or unescaped.
- */
-const unescape = (s, { windowsPathsNoEscape = false, } = {}) => {
-    return windowsPathsNoEscape
-        ? s.replace(/\[([^\/\\])\]/g, '$1')
-        : s.replace(/((?!\\).|^)\[([^\/\\])\]/g, '$1$2').replace(/\\([^\/])/g, '$1');
-};
-exports.unescape = unescape;
-//# sourceMappingURL=unescape.js.map
\ No newline at end of file
diff --git a/node_modules/cacache/node_modules/minimatch/dist/esm/assert-valid-pattern.js b/node_modules/cacache/node_modules/minimatch/dist/esm/assert-valid-pattern.js
deleted file mode 100644
index 7b534fc30200b..0000000000000
--- a/node_modules/cacache/node_modules/minimatch/dist/esm/assert-valid-pattern.js
+++ /dev/null
@@ -1,10 +0,0 @@
-const MAX_PATTERN_LENGTH = 1024 * 64;
-export const assertValidPattern = (pattern) => {
-    if (typeof pattern !== 'string') {
-        throw new TypeError('invalid pattern');
-    }
-    if (pattern.length > MAX_PATTERN_LENGTH) {
-        throw new TypeError('pattern is too long');
-    }
-};
-//# sourceMappingURL=assert-valid-pattern.js.map
\ No newline at end of file
diff --git a/node_modules/cacache/node_modules/minimatch/dist/esm/ast.js b/node_modules/cacache/node_modules/minimatch/dist/esm/ast.js
deleted file mode 100644
index 2d2bced6533de..0000000000000
--- a/node_modules/cacache/node_modules/minimatch/dist/esm/ast.js
+++ /dev/null
@@ -1,588 +0,0 @@
-// parse a single path portion
-import { parseClass } from './brace-expressions.js';
-import { unescape } from './unescape.js';
-const types = new Set(['!', '?', '+', '*', '@']);
-const isExtglobType = (c) => types.has(c);
-// Patterns that get prepended to bind to the start of either the
-// entire string, or just a single path portion, to prevent dots
-// and/or traversal patterns, when needed.
-// Exts don't need the ^ or / bit, because the root binds that already.
-const startNoTraversal = '(?!(?:^|/)\\.\\.?(?:$|/))';
-const startNoDot = '(?!\\.)';
-// characters that indicate a start of pattern needs the "no dots" bit,
-// because a dot *might* be matched. ( is not in the list, because in
-// the case of a child extglob, it will handle the prevention itself.
-const addPatternStart = new Set(['[', '.']);
-// cases where traversal is A-OK, no dot prevention needed
-const justDots = new Set(['..', '.']);
-const reSpecials = new Set('().*{}+?[]^$\\!');
-const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
-// any single thing other than /
-const qmark = '[^/]';
-// * => any number of characters
-const star = qmark + '*?';
-// use + when we need to ensure that *something* matches, because the * is
-// the only thing in the path portion.
-const starNoEmpty = qmark + '+?';
-// remove the \ chars that we added if we end up doing a nonmagic compare
-// const deslash = (s: string) => s.replace(/\\(.)/g, '$1')
-export class AST {
-    type;
-    #root;
-    #hasMagic;
-    #uflag = false;
-    #parts = [];
-    #parent;
-    #parentIndex;
-    #negs;
-    #filledNegs = false;
-    #options;
-    #toString;
-    // set to true if it's an extglob with no children
-    // (which really means one child of '')
-    #emptyExt = false;
-    constructor(type, parent, options = {}) {
-        this.type = type;
-        // extglobs are inherently magical
-        if (type)
-            this.#hasMagic = true;
-        this.#parent = parent;
-        this.#root = this.#parent ? this.#parent.#root : this;
-        this.#options = this.#root === this ? options : this.#root.#options;
-        this.#negs = this.#root === this ? [] : this.#root.#negs;
-        if (type === '!' && !this.#root.#filledNegs)
-            this.#negs.push(this);
-        this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0;
-    }
-    get hasMagic() {
-        /* c8 ignore start */
-        if (this.#hasMagic !== undefined)
-            return this.#hasMagic;
-        /* c8 ignore stop */
-        for (const p of this.#parts) {
-            if (typeof p === 'string')
-                continue;
-            if (p.type || p.hasMagic)
-                return (this.#hasMagic = true);
-        }
-        // note: will be undefined until we generate the regexp src and find out
-        return this.#hasMagic;
-    }
-    // reconstructs the pattern
-    toString() {
-        if (this.#toString !== undefined)
-            return this.#toString;
-        if (!this.type) {
-            return (this.#toString = this.#parts.map(p => String(p)).join(''));
-        }
-        else {
-            return (this.#toString =
-                this.type + '(' + this.#parts.map(p => String(p)).join('|') + ')');
-        }
-    }
-    #fillNegs() {
-        /* c8 ignore start */
-        if (this !== this.#root)
-            throw new Error('should only call on root');
-        if (this.#filledNegs)
-            return this;
-        /* c8 ignore stop */
-        // call toString() once to fill this out
-        this.toString();
-        this.#filledNegs = true;
-        let n;
-        while ((n = this.#negs.pop())) {
-            if (n.type !== '!')
-                continue;
-            // walk up the tree, appending everthing that comes AFTER parentIndex
-            let p = n;
-            let pp = p.#parent;
-            while (pp) {
-                for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) {
-                    for (const part of n.#parts) {
-                        /* c8 ignore start */
-                        if (typeof part === 'string') {
-                            throw new Error('string part in extglob AST??');
-                        }
-                        /* c8 ignore stop */
-                        part.copyIn(pp.#parts[i]);
-                    }
-                }
-                p = pp;
-                pp = p.#parent;
-            }
-        }
-        return this;
-    }
-    push(...parts) {
-        for (const p of parts) {
-            if (p === '')
-                continue;
-            /* c8 ignore start */
-            if (typeof p !== 'string' && !(p instanceof AST && p.#parent === this)) {
-                throw new Error('invalid part: ' + p);
-            }
-            /* c8 ignore stop */
-            this.#parts.push(p);
-        }
-    }
-    toJSON() {
-        const ret = this.type === null
-            ? this.#parts.slice().map(p => (typeof p === 'string' ? p : p.toJSON()))
-            : [this.type, ...this.#parts.map(p => p.toJSON())];
-        if (this.isStart() && !this.type)
-            ret.unshift([]);
-        if (this.isEnd() &&
-            (this === this.#root ||
-                (this.#root.#filledNegs && this.#parent?.type === '!'))) {
-            ret.push({});
-        }
-        return ret;
-    }
-    isStart() {
-        if (this.#root === this)
-            return true;
-        // if (this.type) return !!this.#parent?.isStart()
-        if (!this.#parent?.isStart())
-            return false;
-        if (this.#parentIndex === 0)
-            return true;
-        // if everything AHEAD of this is a negation, then it's still the "start"
-        const p = this.#parent;
-        for (let i = 0; i < this.#parentIndex; i++) {
-            const pp = p.#parts[i];
-            if (!(pp instanceof AST && pp.type === '!')) {
-                return false;
-            }
-        }
-        return true;
-    }
-    isEnd() {
-        if (this.#root === this)
-            return true;
-        if (this.#parent?.type === '!')
-            return true;
-        if (!this.#parent?.isEnd())
-            return false;
-        if (!this.type)
-            return this.#parent?.isEnd();
-        // if not root, it'll always have a parent
-        /* c8 ignore start */
-        const pl = this.#parent ? this.#parent.#parts.length : 0;
-        /* c8 ignore stop */
-        return this.#parentIndex === pl - 1;
-    }
-    copyIn(part) {
-        if (typeof part === 'string')
-            this.push(part);
-        else
-            this.push(part.clone(this));
-    }
-    clone(parent) {
-        const c = new AST(this.type, parent);
-        for (const p of this.#parts) {
-            c.copyIn(p);
-        }
-        return c;
-    }
-    static #parseAST(str, ast, pos, opt) {
-        let escaping = false;
-        let inBrace = false;
-        let braceStart = -1;
-        let braceNeg = false;
-        if (ast.type === null) {
-            // outside of a extglob, append until we find a start
-            let i = pos;
-            let acc = '';
-            while (i < str.length) {
-                const c = str.charAt(i++);
-                // still accumulate escapes at this point, but we do ignore
-                // starts that are escaped
-                if (escaping || c === '\\') {
-                    escaping = !escaping;
-                    acc += c;
-                    continue;
-                }
-                if (inBrace) {
-                    if (i === braceStart + 1) {
-                        if (c === '^' || c === '!') {
-                            braceNeg = true;
-                        }
-                    }
-                    else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
-                        inBrace = false;
-                    }
-                    acc += c;
-                    continue;
-                }
-                else if (c === '[') {
-                    inBrace = true;
-                    braceStart = i;
-                    braceNeg = false;
-                    acc += c;
-                    continue;
-                }
-                if (!opt.noext && isExtglobType(c) && str.charAt(i) === '(') {
-                    ast.push(acc);
-                    acc = '';
-                    const ext = new AST(c, ast);
-                    i = AST.#parseAST(str, ext, i, opt);
-                    ast.push(ext);
-                    continue;
-                }
-                acc += c;
-            }
-            ast.push(acc);
-            return i;
-        }
-        // some kind of extglob, pos is at the (
-        // find the next | or )
-        let i = pos + 1;
-        let part = new AST(null, ast);
-        const parts = [];
-        let acc = '';
-        while (i < str.length) {
-            const c = str.charAt(i++);
-            // still accumulate escapes at this point, but we do ignore
-            // starts that are escaped
-            if (escaping || c === '\\') {
-                escaping = !escaping;
-                acc += c;
-                continue;
-            }
-            if (inBrace) {
-                if (i === braceStart + 1) {
-                    if (c === '^' || c === '!') {
-                        braceNeg = true;
-                    }
-                }
-                else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
-                    inBrace = false;
-                }
-                acc += c;
-                continue;
-            }
-            else if (c === '[') {
-                inBrace = true;
-                braceStart = i;
-                braceNeg = false;
-                acc += c;
-                continue;
-            }
-            if (isExtglobType(c) && str.charAt(i) === '(') {
-                part.push(acc);
-                acc = '';
-                const ext = new AST(c, part);
-                part.push(ext);
-                i = AST.#parseAST(str, ext, i, opt);
-                continue;
-            }
-            if (c === '|') {
-                part.push(acc);
-                acc = '';
-                parts.push(part);
-                part = new AST(null, ast);
-                continue;
-            }
-            if (c === ')') {
-                if (acc === '' && ast.#parts.length === 0) {
-                    ast.#emptyExt = true;
-                }
-                part.push(acc);
-                acc = '';
-                ast.push(...parts, part);
-                return i;
-            }
-            acc += c;
-        }
-        // unfinished extglob
-        // if we got here, it was a malformed extglob! not an extglob, but
-        // maybe something else in there.
-        ast.type = null;
-        ast.#hasMagic = undefined;
-        ast.#parts = [str.substring(pos - 1)];
-        return i;
-    }
-    static fromGlob(pattern, options = {}) {
-        const ast = new AST(null, undefined, options);
-        AST.#parseAST(pattern, ast, 0, options);
-        return ast;
-    }
-    // returns the regular expression if there's magic, or the unescaped
-    // string if not.
-    toMMPattern() {
-        // should only be called on root
-        /* c8 ignore start */
-        if (this !== this.#root)
-            return this.#root.toMMPattern();
-        /* c8 ignore stop */
-        const glob = this.toString();
-        const [re, body, hasMagic, uflag] = this.toRegExpSource();
-        // if we're in nocase mode, and not nocaseMagicOnly, then we do
-        // still need a regular expression if we have to case-insensitively
-        // match capital/lowercase characters.
-        const anyMagic = hasMagic ||
-            this.#hasMagic ||
-            (this.#options.nocase &&
-                !this.#options.nocaseMagicOnly &&
-                glob.toUpperCase() !== glob.toLowerCase());
-        if (!anyMagic) {
-            return body;
-        }
-        const flags = (this.#options.nocase ? 'i' : '') + (uflag ? 'u' : '');
-        return Object.assign(new RegExp(`^${re}$`, flags), {
-            _src: re,
-            _glob: glob,
-        });
-    }
-    get options() {
-        return this.#options;
-    }
-    // returns the string match, the regexp source, whether there's magic
-    // in the regexp (so a regular expression is required) and whether or
-    // not the uflag is needed for the regular expression (for posix classes)
-    // TODO: instead of injecting the start/end at this point, just return
-    // the BODY of the regexp, along with the start/end portions suitable
-    // for binding the start/end in either a joined full-path makeRe context
-    // (where we bind to (^|/), or a standalone matchPart context (where
-    // we bind to ^, and not /).  Otherwise slashes get duped!
-    //
-    // In part-matching mode, the start is:
-    // - if not isStart: nothing
-    // - if traversal possible, but not allowed: ^(?!\.\.?$)
-    // - if dots allowed or not possible: ^
-    // - if dots possible and not allowed: ^(?!\.)
-    // end is:
-    // - if not isEnd(): nothing
-    // - else: $
-    //
-    // In full-path matching mode, we put the slash at the START of the
-    // pattern, so start is:
-    // - if first pattern: same as part-matching mode
-    // - if not isStart(): nothing
-    // - if traversal possible, but not allowed: /(?!\.\.?(?:$|/))
-    // - if dots allowed or not possible: /
-    // - if dots possible and not allowed: /(?!\.)
-    // end is:
-    // - if last pattern, same as part-matching mode
-    // - else nothing
-    //
-    // Always put the (?:$|/) on negated tails, though, because that has to be
-    // there to bind the end of the negated pattern portion, and it's easier to
-    // just stick it in now rather than try to inject it later in the middle of
-    // the pattern.
-    //
-    // We can just always return the same end, and leave it up to the caller
-    // to know whether it's going to be used joined or in parts.
-    // And, if the start is adjusted slightly, can do the same there:
-    // - if not isStart: nothing
-    // - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$)
-    // - if dots allowed or not possible: (?:/|^)
-    // - if dots possible and not allowed: (?:/|^)(?!\.)
-    //
-    // But it's better to have a simpler binding without a conditional, for
-    // performance, so probably better to return both start options.
-    //
-    // Then the caller just ignores the end if it's not the first pattern,
-    // and the start always gets applied.
-    //
-    // But that's always going to be $ if it's the ending pattern, or nothing,
-    // so the caller can just attach $ at the end of the pattern when building.
-    //
-    // So the todo is:
-    // - better detect what kind of start is needed
-    // - return both flavors of starting pattern
-    // - attach $ at the end of the pattern when creating the actual RegExp
-    //
-    // Ah, but wait, no, that all only applies to the root when the first pattern
-    // is not an extglob. If the first pattern IS an extglob, then we need all
-    // that dot prevention biz to live in the extglob portions, because eg
-    // +(*|.x*) can match .xy but not .yx.
-    //
-    // So, return the two flavors if it's #root and the first child is not an
-    // AST, otherwise leave it to the child AST to handle it, and there,
-    // use the (?:^|/) style of start binding.
-    //
-    // Even simplified further:
-    // - Since the start for a join is eg /(?!\.) and the start for a part
-    // is ^(?!\.), we can just prepend (?!\.) to the pattern (either root
-    // or start or whatever) and prepend ^ or / at the Regexp construction.
-    toRegExpSource(allowDot) {
-        const dot = allowDot ?? !!this.#options.dot;
-        if (this.#root === this)
-            this.#fillNegs();
-        if (!this.type) {
-            const noEmpty = this.isStart() && this.isEnd();
-            const src = this.#parts
-                .map(p => {
-                const [re, _, hasMagic, uflag] = typeof p === 'string'
-                    ? AST.#parseGlob(p, this.#hasMagic, noEmpty)
-                    : p.toRegExpSource(allowDot);
-                this.#hasMagic = this.#hasMagic || hasMagic;
-                this.#uflag = this.#uflag || uflag;
-                return re;
-            })
-                .join('');
-            let start = '';
-            if (this.isStart()) {
-                if (typeof this.#parts[0] === 'string') {
-                    // this is the string that will match the start of the pattern,
-                    // so we need to protect against dots and such.
-                    // '.' and '..' cannot match unless the pattern is that exactly,
-                    // even if it starts with . or dot:true is set.
-                    const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]);
-                    if (!dotTravAllowed) {
-                        const aps = addPatternStart;
-                        // check if we have a possibility of matching . or ..,
-                        // and prevent that.
-                        const needNoTrav = 
-                        // dots are allowed, and the pattern starts with [ or .
-                        (dot && aps.has(src.charAt(0))) ||
-                            // the pattern starts with \., and then [ or .
-                            (src.startsWith('\\.') && aps.has(src.charAt(2))) ||
-                            // the pattern starts with \.\., and then [ or .
-                            (src.startsWith('\\.\\.') && aps.has(src.charAt(4)));
-                        // no need to prevent dots if it can't match a dot, or if a
-                        // sub-pattern will be preventing it anyway.
-                        const needNoDot = !dot && !allowDot && aps.has(src.charAt(0));
-                        start = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : '';
-                    }
-                }
-            }
-            // append the "end of path portion" pattern to negation tails
-            let end = '';
-            if (this.isEnd() &&
-                this.#root.#filledNegs &&
-                this.#parent?.type === '!') {
-                end = '(?:$|\\/)';
-            }
-            const final = start + src + end;
-            return [
-                final,
-                unescape(src),
-                (this.#hasMagic = !!this.#hasMagic),
-                this.#uflag,
-            ];
-        }
-        // We need to calculate the body *twice* if it's a repeat pattern
-        // at the start, once in nodot mode, then again in dot mode, so a
-        // pattern like *(?) can match 'x.y'
-        const repeated = this.type === '*' || this.type === '+';
-        // some kind of extglob
-        const start = this.type === '!' ? '(?:(?!(?:' : '(?:';
-        let body = this.#partsToRegExp(dot);
-        if (this.isStart() && this.isEnd() && !body && this.type !== '!') {
-            // invalid extglob, has to at least be *something* present, if it's
-            // the entire path portion.
-            const s = this.toString();
-            this.#parts = [s];
-            this.type = null;
-            this.#hasMagic = undefined;
-            return [s, unescape(this.toString()), false, false];
-        }
-        // XXX abstract out this map method
-        let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot
-            ? ''
-            : this.#partsToRegExp(true);
-        if (bodyDotAllowed === body) {
-            bodyDotAllowed = '';
-        }
-        if (bodyDotAllowed) {
-            body = `(?:${body})(?:${bodyDotAllowed})*?`;
-        }
-        // an empty !() is exactly equivalent to a starNoEmpty
-        let final = '';
-        if (this.type === '!' && this.#emptyExt) {
-            final = (this.isStart() && !dot ? startNoDot : '') + starNoEmpty;
-        }
-        else {
-            const close = this.type === '!'
-                ? // !() must match something,but !(x) can match ''
-                    '))' +
-                        (this.isStart() && !dot && !allowDot ? startNoDot : '') +
-                        star +
-                        ')'
-                : this.type === '@'
-                    ? ')'
-                    : this.type === '?'
-                        ? ')?'
-                        : this.type === '+' && bodyDotAllowed
-                            ? ')'
-                            : this.type === '*' && bodyDotAllowed
-                                ? `)?`
-                                : `)${this.type}`;
-            final = start + body + close;
-        }
-        return [
-            final,
-            unescape(body),
-            (this.#hasMagic = !!this.#hasMagic),
-            this.#uflag,
-        ];
-    }
-    #partsToRegExp(dot) {
-        return this.#parts
-            .map(p => {
-            // extglob ASTs should only contain parent ASTs
-            /* c8 ignore start */
-            if (typeof p === 'string') {
-                throw new Error('string type in extglob ast??');
-            }
-            /* c8 ignore stop */
-            // can ignore hasMagic, because extglobs are already always magic
-            const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot);
-            this.#uflag = this.#uflag || uflag;
-            return re;
-        })
-            .filter(p => !(this.isStart() && this.isEnd()) || !!p)
-            .join('|');
-    }
-    static #parseGlob(glob, hasMagic, noEmpty = false) {
-        let escaping = false;
-        let re = '';
-        let uflag = false;
-        for (let i = 0; i < glob.length; i++) {
-            const c = glob.charAt(i);
-            if (escaping) {
-                escaping = false;
-                re += (reSpecials.has(c) ? '\\' : '') + c;
-                continue;
-            }
-            if (c === '\\') {
-                if (i === glob.length - 1) {
-                    re += '\\\\';
-                }
-                else {
-                    escaping = true;
-                }
-                continue;
-            }
-            if (c === '[') {
-                const [src, needUflag, consumed, magic] = parseClass(glob, i);
-                if (consumed) {
-                    re += src;
-                    uflag = uflag || needUflag;
-                    i += consumed - 1;
-                    hasMagic = hasMagic || magic;
-                    continue;
-                }
-            }
-            if (c === '*') {
-                if (noEmpty && glob === '*')
-                    re += starNoEmpty;
-                else
-                    re += star;
-                hasMagic = true;
-                continue;
-            }
-            if (c === '?') {
-                re += qmark;
-                hasMagic = true;
-                continue;
-            }
-            re += regExpEscape(c);
-        }
-        return [re, unescape(glob), !!hasMagic, uflag];
-    }
-}
-//# sourceMappingURL=ast.js.map
\ No newline at end of file
diff --git a/node_modules/cacache/node_modules/minimatch/dist/esm/brace-expressions.js b/node_modules/cacache/node_modules/minimatch/dist/esm/brace-expressions.js
deleted file mode 100644
index c629d6ae816e2..0000000000000
--- a/node_modules/cacache/node_modules/minimatch/dist/esm/brace-expressions.js
+++ /dev/null
@@ -1,148 +0,0 @@
-// translate the various posix character classes into unicode properties
-// this works across all unicode locales
-// { : [, /u flag required, negated]
-const posixClasses = {
-    '[:alnum:]': ['\\p{L}\\p{Nl}\\p{Nd}', true],
-    '[:alpha:]': ['\\p{L}\\p{Nl}', true],
-    '[:ascii:]': ['\\x' + '00-\\x' + '7f', false],
-    '[:blank:]': ['\\p{Zs}\\t', true],
-    '[:cntrl:]': ['\\p{Cc}', true],
-    '[:digit:]': ['\\p{Nd}', true],
-    '[:graph:]': ['\\p{Z}\\p{C}', true, true],
-    '[:lower:]': ['\\p{Ll}', true],
-    '[:print:]': ['\\p{C}', true],
-    '[:punct:]': ['\\p{P}', true],
-    '[:space:]': ['\\p{Z}\\t\\r\\n\\v\\f', true],
-    '[:upper:]': ['\\p{Lu}', true],
-    '[:word:]': ['\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}', true],
-    '[:xdigit:]': ['A-Fa-f0-9', false],
-};
-// only need to escape a few things inside of brace expressions
-// escapes: [ \ ] -
-const braceEscape = (s) => s.replace(/[[\]\\-]/g, '\\$&');
-// escape all regexp magic characters
-const regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
-// everything has already been escaped, we just have to join
-const rangesToString = (ranges) => ranges.join('');
-// takes a glob string at a posix brace expression, and returns
-// an equivalent regular expression source, and boolean indicating
-// whether the /u flag needs to be applied, and the number of chars
-// consumed to parse the character class.
-// This also removes out of order ranges, and returns ($.) if the
-// entire class just no good.
-export const parseClass = (glob, position) => {
-    const pos = position;
-    /* c8 ignore start */
-    if (glob.charAt(pos) !== '[') {
-        throw new Error('not in a brace expression');
-    }
-    /* c8 ignore stop */
-    const ranges = [];
-    const negs = [];
-    let i = pos + 1;
-    let sawStart = false;
-    let uflag = false;
-    let escaping = false;
-    let negate = false;
-    let endPos = pos;
-    let rangeStart = '';
-    WHILE: while (i < glob.length) {
-        const c = glob.charAt(i);
-        if ((c === '!' || c === '^') && i === pos + 1) {
-            negate = true;
-            i++;
-            continue;
-        }
-        if (c === ']' && sawStart && !escaping) {
-            endPos = i + 1;
-            break;
-        }
-        sawStart = true;
-        if (c === '\\') {
-            if (!escaping) {
-                escaping = true;
-                i++;
-                continue;
-            }
-            // escaped \ char, fall through and treat like normal char
-        }
-        if (c === '[' && !escaping) {
-            // either a posix class, a collation equivalent, or just a [
-            for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) {
-                if (glob.startsWith(cls, i)) {
-                    // invalid, [a-[] is fine, but not [a-[:alpha]]
-                    if (rangeStart) {
-                        return ['$.', false, glob.length - pos, true];
-                    }
-                    i += cls.length;
-                    if (neg)
-                        negs.push(unip);
-                    else
-                        ranges.push(unip);
-                    uflag = uflag || u;
-                    continue WHILE;
-                }
-            }
-        }
-        // now it's just a normal character, effectively
-        escaping = false;
-        if (rangeStart) {
-            // throw this range away if it's not valid, but others
-            // can still match.
-            if (c > rangeStart) {
-                ranges.push(braceEscape(rangeStart) + '-' + braceEscape(c));
-            }
-            else if (c === rangeStart) {
-                ranges.push(braceEscape(c));
-            }
-            rangeStart = '';
-            i++;
-            continue;
-        }
-        // now might be the start of a range.
-        // can be either c-d or c-] or c] or c] at this point
-        if (glob.startsWith('-]', i + 1)) {
-            ranges.push(braceEscape(c + '-'));
-            i += 2;
-            continue;
-        }
-        if (glob.startsWith('-', i + 1)) {
-            rangeStart = c;
-            i += 2;
-            continue;
-        }
-        // not the start of a range, just a single character
-        ranges.push(braceEscape(c));
-        i++;
-    }
-    if (endPos < i) {
-        // didn't see the end of the class, not a valid class,
-        // but might still be valid as a literal match.
-        return ['', false, 0, false];
-    }
-    // if we got no ranges and no negates, then we have a range that
-    // cannot possibly match anything, and that poisons the whole glob
-    if (!ranges.length && !negs.length) {
-        return ['$.', false, glob.length - pos, true];
-    }
-    // if we got one positive range, and it's a single character, then that's
-    // not actually a magic pattern, it's just that one literal character.
-    // we should not treat that as "magic", we should just return the literal
-    // character. [_] is a perfectly valid way to escape glob magic chars.
-    if (negs.length === 0 &&
-        ranges.length === 1 &&
-        /^\\?.$/.test(ranges[0]) &&
-        !negate) {
-        const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0];
-        return [regexpEscape(r), false, endPos - pos, false];
-    }
-    const sranges = '[' + (negate ? '^' : '') + rangesToString(ranges) + ']';
-    const snegs = '[' + (negate ? '' : '^') + rangesToString(negs) + ']';
-    const comb = ranges.length && negs.length
-        ? '(' + sranges + '|' + snegs + ')'
-        : ranges.length
-            ? sranges
-            : snegs;
-    return [comb, uflag, endPos - pos, true];
-};
-//# sourceMappingURL=brace-expressions.js.map
\ No newline at end of file
diff --git a/node_modules/cacache/node_modules/minimatch/dist/esm/escape.js b/node_modules/cacache/node_modules/minimatch/dist/esm/escape.js
deleted file mode 100644
index 16f7c8c7bdc64..0000000000000
--- a/node_modules/cacache/node_modules/minimatch/dist/esm/escape.js
+++ /dev/null
@@ -1,18 +0,0 @@
-/**
- * Escape all magic characters in a glob pattern.
- *
- * If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape}
- * option is used, then characters are escaped by wrapping in `[]`, because
- * a magic character wrapped in a character class can only be satisfied by
- * that exact character.  In this mode, `\` is _not_ escaped, because it is
- * not interpreted as a magic character, but instead as a path separator.
- */
-export const escape = (s, { windowsPathsNoEscape = false, } = {}) => {
-    // don't need to escape +@! because we escape the parens
-    // that make those magic, and escaping ! as [!] isn't valid,
-    // because [!]] is a valid glob class meaning not ']'.
-    return windowsPathsNoEscape
-        ? s.replace(/[?*()[\]]/g, '[$&]')
-        : s.replace(/[?*()[\]\\]/g, '\\$&');
-};
-//# sourceMappingURL=escape.js.map
\ No newline at end of file
diff --git a/node_modules/cacache/node_modules/minimatch/dist/esm/index.js b/node_modules/cacache/node_modules/minimatch/dist/esm/index.js
deleted file mode 100644
index 790d6c02a2f22..0000000000000
--- a/node_modules/cacache/node_modules/minimatch/dist/esm/index.js
+++ /dev/null
@@ -1,1001 +0,0 @@
-import { expand } from '@isaacs/brace-expansion';
-import { assertValidPattern } from './assert-valid-pattern.js';
-import { AST } from './ast.js';
-import { escape } from './escape.js';
-import { unescape } from './unescape.js';
-export const minimatch = (p, pattern, options = {}) => {
-    assertValidPattern(pattern);
-    // shortcut: comments match nothing.
-    if (!options.nocomment && pattern.charAt(0) === '#') {
-        return false;
-    }
-    return new Minimatch(pattern, options).match(p);
-};
-// Optimized checking for the most common glob patterns.
-const starDotExtRE = /^\*+([^+@!?\*\[\(]*)$/;
-const starDotExtTest = (ext) => (f) => !f.startsWith('.') && f.endsWith(ext);
-const starDotExtTestDot = (ext) => (f) => f.endsWith(ext);
-const starDotExtTestNocase = (ext) => {
-    ext = ext.toLowerCase();
-    return (f) => !f.startsWith('.') && f.toLowerCase().endsWith(ext);
-};
-const starDotExtTestNocaseDot = (ext) => {
-    ext = ext.toLowerCase();
-    return (f) => f.toLowerCase().endsWith(ext);
-};
-const starDotStarRE = /^\*+\.\*+$/;
-const starDotStarTest = (f) => !f.startsWith('.') && f.includes('.');
-const starDotStarTestDot = (f) => f !== '.' && f !== '..' && f.includes('.');
-const dotStarRE = /^\.\*+$/;
-const dotStarTest = (f) => f !== '.' && f !== '..' && f.startsWith('.');
-const starRE = /^\*+$/;
-const starTest = (f) => f.length !== 0 && !f.startsWith('.');
-const starTestDot = (f) => f.length !== 0 && f !== '.' && f !== '..';
-const qmarksRE = /^\?+([^+@!?\*\[\(]*)?$/;
-const qmarksTestNocase = ([$0, ext = '']) => {
-    const noext = qmarksTestNoExt([$0]);
-    if (!ext)
-        return noext;
-    ext = ext.toLowerCase();
-    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
-};
-const qmarksTestNocaseDot = ([$0, ext = '']) => {
-    const noext = qmarksTestNoExtDot([$0]);
-    if (!ext)
-        return noext;
-    ext = ext.toLowerCase();
-    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
-};
-const qmarksTestDot = ([$0, ext = '']) => {
-    const noext = qmarksTestNoExtDot([$0]);
-    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
-};
-const qmarksTest = ([$0, ext = '']) => {
-    const noext = qmarksTestNoExt([$0]);
-    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
-};
-const qmarksTestNoExt = ([$0]) => {
-    const len = $0.length;
-    return (f) => f.length === len && !f.startsWith('.');
-};
-const qmarksTestNoExtDot = ([$0]) => {
-    const len = $0.length;
-    return (f) => f.length === len && f !== '.' && f !== '..';
-};
-/* c8 ignore start */
-const defaultPlatform = (typeof process === 'object' && process
-    ? (typeof process.env === 'object' &&
-        process.env &&
-        process.env.__MINIMATCH_TESTING_PLATFORM__) ||
-        process.platform
-    : 'posix');
-const path = {
-    win32: { sep: '\\' },
-    posix: { sep: '/' },
-};
-/* c8 ignore stop */
-export const sep = defaultPlatform === 'win32' ? path.win32.sep : path.posix.sep;
-minimatch.sep = sep;
-export const GLOBSTAR = Symbol('globstar **');
-minimatch.GLOBSTAR = GLOBSTAR;
-// any single thing other than /
-// don't need to escape / when using new RegExp()
-const qmark = '[^/]';
-// * => any number of characters
-const star = qmark + '*?';
-// ** when dots are allowed.  Anything goes, except .. and .
-// not (^ or / followed by one or two dots followed by $ or /),
-// followed by anything, any number of times.
-const twoStarDot = '(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?';
-// not a ^ or / followed by a dot,
-// followed by anything, any number of times.
-const twoStarNoDot = '(?:(?!(?:\\/|^)\\.).)*?';
-export const filter = (pattern, options = {}) => (p) => minimatch(p, pattern, options);
-minimatch.filter = filter;
-const ext = (a, b = {}) => Object.assign({}, a, b);
-export const defaults = (def) => {
-    if (!def || typeof def !== 'object' || !Object.keys(def).length) {
-        return minimatch;
-    }
-    const orig = minimatch;
-    const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options));
-    return Object.assign(m, {
-        Minimatch: class Minimatch extends orig.Minimatch {
-            constructor(pattern, options = {}) {
-                super(pattern, ext(def, options));
-            }
-            static defaults(options) {
-                return orig.defaults(ext(def, options)).Minimatch;
-            }
-        },
-        AST: class AST extends orig.AST {
-            /* c8 ignore start */
-            constructor(type, parent, options = {}) {
-                super(type, parent, ext(def, options));
-            }
-            /* c8 ignore stop */
-            static fromGlob(pattern, options = {}) {
-                return orig.AST.fromGlob(pattern, ext(def, options));
-            }
-        },
-        unescape: (s, options = {}) => orig.unescape(s, ext(def, options)),
-        escape: (s, options = {}) => orig.escape(s, ext(def, options)),
-        filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)),
-        defaults: (options) => orig.defaults(ext(def, options)),
-        makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)),
-        braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)),
-        match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)),
-        sep: orig.sep,
-        GLOBSTAR: GLOBSTAR,
-    });
-};
-minimatch.defaults = defaults;
-// Brace expansion:
-// a{b,c}d -> abd acd
-// a{b,}c -> abc ac
-// a{0..3}d -> a0d a1d a2d a3d
-// a{b,c{d,e}f}g -> abg acdfg acefg
-// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
-//
-// Invalid sets are not expanded.
-// a{2..}b -> a{2..}b
-// a{b}c -> a{b}c
-export const braceExpand = (pattern, options = {}) => {
-    assertValidPattern(pattern);
-    // Thanks to Yeting Li  for
-    // improving this regexp to avoid a ReDOS vulnerability.
-    if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
-        // shortcut. no need to expand.
-        return [pattern];
-    }
-    return expand(pattern);
-};
-minimatch.braceExpand = braceExpand;
-// parse a component of the expanded set.
-// At this point, no pattern may contain "/" in it
-// so we're going to return a 2d array, where each entry is the full
-// pattern, split on '/', and then turned into a regular expression.
-// A regexp is made at the end which joins each array with an
-// escaped /, and another full one which joins each regexp with |.
-//
-// Following the lead of Bash 4.1, note that "**" only has special meaning
-// when it is the *only* thing in a path portion.  Otherwise, any series
-// of * is equivalent to a single *.  Globstar behavior is enabled by
-// default, and can be disabled by setting options.noglobstar.
-export const makeRe = (pattern, options = {}) => new Minimatch(pattern, options).makeRe();
-minimatch.makeRe = makeRe;
-export const match = (list, pattern, options = {}) => {
-    const mm = new Minimatch(pattern, options);
-    list = list.filter(f => mm.match(f));
-    if (mm.options.nonull && !list.length) {
-        list.push(pattern);
-    }
-    return list;
-};
-minimatch.match = match;
-// replace stuff like \* with *
-const globMagic = /[?*]|[+@!]\(.*?\)|\[|\]/;
-const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
-export class Minimatch {
-    options;
-    set;
-    pattern;
-    windowsPathsNoEscape;
-    nonegate;
-    negate;
-    comment;
-    empty;
-    preserveMultipleSlashes;
-    partial;
-    globSet;
-    globParts;
-    nocase;
-    isWindows;
-    platform;
-    windowsNoMagicRoot;
-    regexp;
-    constructor(pattern, options = {}) {
-        assertValidPattern(pattern);
-        options = options || {};
-        this.options = options;
-        this.pattern = pattern;
-        this.platform = options.platform || defaultPlatform;
-        this.isWindows = this.platform === 'win32';
-        this.windowsPathsNoEscape =
-            !!options.windowsPathsNoEscape || options.allowWindowsEscape === false;
-        if (this.windowsPathsNoEscape) {
-            this.pattern = this.pattern.replace(/\\/g, '/');
-        }
-        this.preserveMultipleSlashes = !!options.preserveMultipleSlashes;
-        this.regexp = null;
-        this.negate = false;
-        this.nonegate = !!options.nonegate;
-        this.comment = false;
-        this.empty = false;
-        this.partial = !!options.partial;
-        this.nocase = !!this.options.nocase;
-        this.windowsNoMagicRoot =
-            options.windowsNoMagicRoot !== undefined
-                ? options.windowsNoMagicRoot
-                : !!(this.isWindows && this.nocase);
-        this.globSet = [];
-        this.globParts = [];
-        this.set = [];
-        // make the set of regexps etc.
-        this.make();
-    }
-    hasMagic() {
-        if (this.options.magicalBraces && this.set.length > 1) {
-            return true;
-        }
-        for (const pattern of this.set) {
-            for (const part of pattern) {
-                if (typeof part !== 'string')
-                    return true;
-            }
-        }
-        return false;
-    }
-    debug(..._) { }
-    make() {
-        const pattern = this.pattern;
-        const options = this.options;
-        // empty patterns and comments match nothing.
-        if (!options.nocomment && pattern.charAt(0) === '#') {
-            this.comment = true;
-            return;
-        }
-        if (!pattern) {
-            this.empty = true;
-            return;
-        }
-        // step 1: figure out negation, etc.
-        this.parseNegate();
-        // step 2: expand braces
-        this.globSet = [...new Set(this.braceExpand())];
-        if (options.debug) {
-            this.debug = (...args) => console.error(...args);
-        }
-        this.debug(this.pattern, this.globSet);
-        // step 3: now we have a set, so turn each one into a series of
-        // path-portion matching patterns.
-        // These will be regexps, except in the case of "**", which is
-        // set to the GLOBSTAR object for globstar behavior,
-        // and will not contain any / characters
-        //
-        // First, we preprocess to make the glob pattern sets a bit simpler
-        // and deduped.  There are some perf-killing patterns that can cause
-        // problems with a glob walk, but we can simplify them down a bit.
-        const rawGlobParts = this.globSet.map(s => this.slashSplit(s));
-        this.globParts = this.preprocess(rawGlobParts);
-        this.debug(this.pattern, this.globParts);
-        // glob --> regexps
-        let set = this.globParts.map((s, _, __) => {
-            if (this.isWindows && this.windowsNoMagicRoot) {
-                // check if it's a drive or unc path.
-                const isUNC = s[0] === '' &&
-                    s[1] === '' &&
-                    (s[2] === '?' || !globMagic.test(s[2])) &&
-                    !globMagic.test(s[3]);
-                const isDrive = /^[a-z]:/i.test(s[0]);
-                if (isUNC) {
-                    return [...s.slice(0, 4), ...s.slice(4).map(ss => this.parse(ss))];
-                }
-                else if (isDrive) {
-                    return [s[0], ...s.slice(1).map(ss => this.parse(ss))];
-                }
-            }
-            return s.map(ss => this.parse(ss));
-        });
-        this.debug(this.pattern, set);
-        // filter out everything that didn't compile properly.
-        this.set = set.filter(s => s.indexOf(false) === -1);
-        // do not treat the ? in UNC paths as magic
-        if (this.isWindows) {
-            for (let i = 0; i < this.set.length; i++) {
-                const p = this.set[i];
-                if (p[0] === '' &&
-                    p[1] === '' &&
-                    this.globParts[i][2] === '?' &&
-                    typeof p[3] === 'string' &&
-                    /^[a-z]:$/i.test(p[3])) {
-                    p[2] = '?';
-                }
-            }
-        }
-        this.debug(this.pattern, this.set);
-    }
-    // various transforms to equivalent pattern sets that are
-    // faster to process in a filesystem walk.  The goal is to
-    // eliminate what we can, and push all ** patterns as far
-    // to the right as possible, even if it increases the number
-    // of patterns that we have to process.
-    preprocess(globParts) {
-        // if we're not in globstar mode, then turn all ** into *
-        if (this.options.noglobstar) {
-            for (let i = 0; i < globParts.length; i++) {
-                for (let j = 0; j < globParts[i].length; j++) {
-                    if (globParts[i][j] === '**') {
-                        globParts[i][j] = '*';
-                    }
-                }
-            }
-        }
-        const { optimizationLevel = 1 } = this.options;
-        if (optimizationLevel >= 2) {
-            // aggressive optimization for the purpose of fs walking
-            globParts = this.firstPhasePreProcess(globParts);
-            globParts = this.secondPhasePreProcess(globParts);
-        }
-        else if (optimizationLevel >= 1) {
-            // just basic optimizations to remove some .. parts
-            globParts = this.levelOneOptimize(globParts);
-        }
-        else {
-            // just collapse multiple ** portions into one
-            globParts = this.adjascentGlobstarOptimize(globParts);
-        }
-        return globParts;
-    }
-    // just get rid of adjascent ** portions
-    adjascentGlobstarOptimize(globParts) {
-        return globParts.map(parts => {
-            let gs = -1;
-            while (-1 !== (gs = parts.indexOf('**', gs + 1))) {
-                let i = gs;
-                while (parts[i + 1] === '**') {
-                    i++;
-                }
-                if (i !== gs) {
-                    parts.splice(gs, i - gs);
-                }
-            }
-            return parts;
-        });
-    }
-    // get rid of adjascent ** and resolve .. portions
-    levelOneOptimize(globParts) {
-        return globParts.map(parts => {
-            parts = parts.reduce((set, part) => {
-                const prev = set[set.length - 1];
-                if (part === '**' && prev === '**') {
-                    return set;
-                }
-                if (part === '..') {
-                    if (prev && prev !== '..' && prev !== '.' && prev !== '**') {
-                        set.pop();
-                        return set;
-                    }
-                }
-                set.push(part);
-                return set;
-            }, []);
-            return parts.length === 0 ? [''] : parts;
-        });
-    }
-    levelTwoFileOptimize(parts) {
-        if (!Array.isArray(parts)) {
-            parts = this.slashSplit(parts);
-        }
-        let didSomething = false;
-        do {
-            didSomething = false;
-            // 
// -> 
/
-            if (!this.preserveMultipleSlashes) {
-                for (let i = 1; i < parts.length - 1; i++) {
-                    const p = parts[i];
-                    // don't squeeze out UNC patterns
-                    if (i === 1 && p === '' && parts[0] === '')
-                        continue;
-                    if (p === '.' || p === '') {
-                        didSomething = true;
-                        parts.splice(i, 1);
-                        i--;
-                    }
-                }
-                if (parts[0] === '.' &&
-                    parts.length === 2 &&
-                    (parts[1] === '.' || parts[1] === '')) {
-                    didSomething = true;
-                    parts.pop();
-                }
-            }
-            // 
/

/../ ->

/
-            let dd = 0;
-            while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
-                const p = parts[dd - 1];
-                if (p && p !== '.' && p !== '..' && p !== '**') {
-                    didSomething = true;
-                    parts.splice(dd - 1, 2);
-                    dd -= 2;
-                }
-            }
-        } while (didSomething);
-        return parts.length === 0 ? [''] : parts;
-    }
-    // First phase: single-pattern processing
-    // 
 is 1 or more portions
-    //  is 1 or more portions
-    // 

is any portion other than ., .., '', or ** - // is . or '' - // - // **/.. is *brutal* for filesystem walking performance, because - // it effectively resets the recursive walk each time it occurs, - // and ** cannot be reduced out by a .. pattern part like a regexp - // or most strings (other than .., ., and '') can be. - // - //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/} - //

// -> 
/
-    // 
/

/../ ->

/
-    // **/**/ -> **/
-    //
-    // **/*/ -> */**/ <== not valid because ** doesn't follow
-    // this WOULD be allowed if ** did follow symlinks, or * didn't
-    firstPhasePreProcess(globParts) {
-        let didSomething = false;
-        do {
-            didSomething = false;
-            // 
/**/../

/

/ -> {

/../

/

/,

/**/

/

/} - for (let parts of globParts) { - let gs = -1; - while (-1 !== (gs = parts.indexOf('**', gs + 1))) { - let gss = gs; - while (parts[gss + 1] === '**') { - //

/**/**/ -> 
/**/
-                        gss++;
-                    }
-                    // eg, if gs is 2 and gss is 4, that means we have 3 **
-                    // parts, and can remove 2 of them.
-                    if (gss > gs) {
-                        parts.splice(gs + 1, gss - gs);
-                    }
-                    let next = parts[gs + 1];
-                    const p = parts[gs + 2];
-                    const p2 = parts[gs + 3];
-                    if (next !== '..')
-                        continue;
-                    if (!p ||
-                        p === '.' ||
-                        p === '..' ||
-                        !p2 ||
-                        p2 === '.' ||
-                        p2 === '..') {
-                        continue;
-                    }
-                    didSomething = true;
-                    // edit parts in place, and push the new one
-                    parts.splice(gs, 1);
-                    const other = parts.slice(0);
-                    other[gs] = '**';
-                    globParts.push(other);
-                    gs--;
-                }
-                // 
// -> 
/
-                if (!this.preserveMultipleSlashes) {
-                    for (let i = 1; i < parts.length - 1; i++) {
-                        const p = parts[i];
-                        // don't squeeze out UNC patterns
-                        if (i === 1 && p === '' && parts[0] === '')
-                            continue;
-                        if (p === '.' || p === '') {
-                            didSomething = true;
-                            parts.splice(i, 1);
-                            i--;
-                        }
-                    }
-                    if (parts[0] === '.' &&
-                        parts.length === 2 &&
-                        (parts[1] === '.' || parts[1] === '')) {
-                        didSomething = true;
-                        parts.pop();
-                    }
-                }
-                // 
/

/../ ->

/
-                let dd = 0;
-                while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
-                    const p = parts[dd - 1];
-                    if (p && p !== '.' && p !== '..' && p !== '**') {
-                        didSomething = true;
-                        const needDot = dd === 1 && parts[dd + 1] === '**';
-                        const splin = needDot ? ['.'] : [];
-                        parts.splice(dd - 1, 2, ...splin);
-                        if (parts.length === 0)
-                            parts.push('');
-                        dd -= 2;
-                    }
-                }
-            }
-        } while (didSomething);
-        return globParts;
-    }
-    // second phase: multi-pattern dedupes
-    // {
/*/,
/

/} ->

/*/
-    // {
/,
/} -> 
/
-    // {
/**/,
/} -> 
/**/
-    //
-    // {
/**/,
/**/

/} ->

/**/
-    // ^-- not valid because ** doens't follow symlinks
-    secondPhasePreProcess(globParts) {
-        for (let i = 0; i < globParts.length - 1; i++) {
-            for (let j = i + 1; j < globParts.length; j++) {
-                const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);
-                if (matched) {
-                    globParts[i] = [];
-                    globParts[j] = matched;
-                    break;
-                }
-            }
-        }
-        return globParts.filter(gs => gs.length);
-    }
-    partsMatch(a, b, emptyGSMatch = false) {
-        let ai = 0;
-        let bi = 0;
-        let result = [];
-        let which = '';
-        while (ai < a.length && bi < b.length) {
-            if (a[ai] === b[bi]) {
-                result.push(which === 'b' ? b[bi] : a[ai]);
-                ai++;
-                bi++;
-            }
-            else if (emptyGSMatch && a[ai] === '**' && b[bi] === a[ai + 1]) {
-                result.push(a[ai]);
-                ai++;
-            }
-            else if (emptyGSMatch && b[bi] === '**' && a[ai] === b[bi + 1]) {
-                result.push(b[bi]);
-                bi++;
-            }
-            else if (a[ai] === '*' &&
-                b[bi] &&
-                (this.options.dot || !b[bi].startsWith('.')) &&
-                b[bi] !== '**') {
-                if (which === 'b')
-                    return false;
-                which = 'a';
-                result.push(a[ai]);
-                ai++;
-                bi++;
-            }
-            else if (b[bi] === '*' &&
-                a[ai] &&
-                (this.options.dot || !a[ai].startsWith('.')) &&
-                a[ai] !== '**') {
-                if (which === 'a')
-                    return false;
-                which = 'b';
-                result.push(b[bi]);
-                ai++;
-                bi++;
-            }
-            else {
-                return false;
-            }
-        }
-        // if we fall out of the loop, it means they two are identical
-        // as long as their lengths match
-        return a.length === b.length && result;
-    }
-    parseNegate() {
-        if (this.nonegate)
-            return;
-        const pattern = this.pattern;
-        let negate = false;
-        let negateOffset = 0;
-        for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {
-            negate = !negate;
-            negateOffset++;
-        }
-        if (negateOffset)
-            this.pattern = pattern.slice(negateOffset);
-        this.negate = negate;
-    }
-    // set partial to true to test if, for example,
-    // "/a/b" matches the start of "/*/b/*/d"
-    // Partial means, if you run out of file before you run
-    // out of pattern, then that's fine, as long as all
-    // the parts match.
-    matchOne(file, pattern, partial = false) {
-        const options = this.options;
-        // UNC paths like //?/X:/... can match X:/... and vice versa
-        // Drive letters in absolute drive or unc paths are always compared
-        // case-insensitively.
-        if (this.isWindows) {
-            const fileDrive = typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0]);
-            const fileUNC = !fileDrive &&
-                file[0] === '' &&
-                file[1] === '' &&
-                file[2] === '?' &&
-                /^[a-z]:$/i.test(file[3]);
-            const patternDrive = typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0]);
-            const patternUNC = !patternDrive &&
-                pattern[0] === '' &&
-                pattern[1] === '' &&
-                pattern[2] === '?' &&
-                typeof pattern[3] === 'string' &&
-                /^[a-z]:$/i.test(pattern[3]);
-            const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined;
-            const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined;
-            if (typeof fdi === 'number' && typeof pdi === 'number') {
-                const [fd, pd] = [file[fdi], pattern[pdi]];
-                if (fd.toLowerCase() === pd.toLowerCase()) {
-                    pattern[pdi] = fd;
-                    if (pdi > fdi) {
-                        pattern = pattern.slice(pdi);
-                    }
-                    else if (fdi > pdi) {
-                        file = file.slice(fdi);
-                    }
-                }
-            }
-        }
-        // resolve and reduce . and .. portions in the file as well.
-        // dont' need to do the second phase, because it's only one string[]
-        const { optimizationLevel = 1 } = this.options;
-        if (optimizationLevel >= 2) {
-            file = this.levelTwoFileOptimize(file);
-        }
-        this.debug('matchOne', this, { file, pattern });
-        this.debug('matchOne', file.length, pattern.length);
-        for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
-            this.debug('matchOne loop');
-            var p = pattern[pi];
-            var f = file[fi];
-            this.debug(pattern, p, f);
-            // should be impossible.
-            // some invalid regexp stuff in the set.
-            /* c8 ignore start */
-            if (p === false) {
-                return false;
-            }
-            /* c8 ignore stop */
-            if (p === GLOBSTAR) {
-                this.debug('GLOBSTAR', [pattern, p, f]);
-                // "**"
-                // a/**/b/**/c would match the following:
-                // a/b/x/y/z/c
-                // a/x/y/z/b/c
-                // a/b/x/b/x/c
-                // a/b/c
-                // To do this, take the rest of the pattern after
-                // the **, and see if it would match the file remainder.
-                // If so, return success.
-                // If not, the ** "swallows" a segment, and try again.
-                // This is recursively awful.
-                //
-                // a/**/b/**/c matching a/b/x/y/z/c
-                // - a matches a
-                // - doublestar
-                //   - matchOne(b/x/y/z/c, b/**/c)
-                //     - b matches b
-                //     - doublestar
-                //       - matchOne(x/y/z/c, c) -> no
-                //       - matchOne(y/z/c, c) -> no
-                //       - matchOne(z/c, c) -> no
-                //       - matchOne(c, c) yes, hit
-                var fr = fi;
-                var pr = pi + 1;
-                if (pr === pl) {
-                    this.debug('** at the end');
-                    // a ** at the end will just swallow the rest.
-                    // We have found a match.
-                    // however, it will not swallow /.x, unless
-                    // options.dot is set.
-                    // . and .. are *never* matched by **, for explosively
-                    // exponential reasons.
-                    for (; fi < fl; fi++) {
-                        if (file[fi] === '.' ||
-                            file[fi] === '..' ||
-                            (!options.dot && file[fi].charAt(0) === '.'))
-                            return false;
-                    }
-                    return true;
-                }
-                // ok, let's see if we can swallow whatever we can.
-                while (fr < fl) {
-                    var swallowee = file[fr];
-                    this.debug('\nglobstar while', file, fr, pattern, pr, swallowee);
-                    // XXX remove this slice.  Just pass the start index.
-                    if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
-                        this.debug('globstar found match!', fr, fl, swallowee);
-                        // found a match.
-                        return true;
-                    }
-                    else {
-                        // can't swallow "." or ".." ever.
-                        // can only swallow ".foo" when explicitly asked.
-                        if (swallowee === '.' ||
-                            swallowee === '..' ||
-                            (!options.dot && swallowee.charAt(0) === '.')) {
-                            this.debug('dot detected!', file, fr, pattern, pr);
-                            break;
-                        }
-                        // ** swallows a segment, and continue.
-                        this.debug('globstar swallow a segment, and continue');
-                        fr++;
-                    }
-                }
-                // no match was found.
-                // However, in partial mode, we can't say this is necessarily over.
-                /* c8 ignore start */
-                if (partial) {
-                    // ran out of file
-                    this.debug('\n>>> no match, partial?', file, fr, pattern, pr);
-                    if (fr === fl) {
-                        return true;
-                    }
-                }
-                /* c8 ignore stop */
-                return false;
-            }
-            // something other than **
-            // non-magic patterns just have to match exactly
-            // patterns with magic have been turned into regexps.
-            let hit;
-            if (typeof p === 'string') {
-                hit = f === p;
-                this.debug('string match', p, f, hit);
-            }
-            else {
-                hit = p.test(f);
-                this.debug('pattern match', p, f, hit);
-            }
-            if (!hit)
-                return false;
-        }
-        // Note: ending in / means that we'll get a final ""
-        // at the end of the pattern.  This can only match a
-        // corresponding "" at the end of the file.
-        // If the file ends in /, then it can only match a
-        // a pattern that ends in /, unless the pattern just
-        // doesn't have any more for it. But, a/b/ should *not*
-        // match "a/b/*", even though "" matches against the
-        // [^/]*? pattern, except in partial mode, where it might
-        // simply not be reached yet.
-        // However, a/b/ should still satisfy a/*
-        // now either we fell off the end of the pattern, or we're done.
-        if (fi === fl && pi === pl) {
-            // ran out of pattern and filename at the same time.
-            // an exact hit!
-            return true;
-        }
-        else if (fi === fl) {
-            // ran out of file, but still had pattern left.
-            // this is ok if we're doing the match as part of
-            // a glob fs traversal.
-            return partial;
-        }
-        else if (pi === pl) {
-            // ran out of pattern, still have file left.
-            // this is only acceptable if we're on the very last
-            // empty segment of a file with a trailing slash.
-            // a/* should match a/b/
-            return fi === fl - 1 && file[fi] === '';
-            /* c8 ignore start */
-        }
-        else {
-            // should be unreachable.
-            throw new Error('wtf?');
-        }
-        /* c8 ignore stop */
-    }
-    braceExpand() {
-        return braceExpand(this.pattern, this.options);
-    }
-    parse(pattern) {
-        assertValidPattern(pattern);
-        const options = this.options;
-        // shortcuts
-        if (pattern === '**')
-            return GLOBSTAR;
-        if (pattern === '')
-            return '';
-        // far and away, the most common glob pattern parts are
-        // *, *.*, and *.  Add a fast check method for those.
-        let m;
-        let fastTest = null;
-        if ((m = pattern.match(starRE))) {
-            fastTest = options.dot ? starTestDot : starTest;
-        }
-        else if ((m = pattern.match(starDotExtRE))) {
-            fastTest = (options.nocase
-                ? options.dot
-                    ? starDotExtTestNocaseDot
-                    : starDotExtTestNocase
-                : options.dot
-                    ? starDotExtTestDot
-                    : starDotExtTest)(m[1]);
-        }
-        else if ((m = pattern.match(qmarksRE))) {
-            fastTest = (options.nocase
-                ? options.dot
-                    ? qmarksTestNocaseDot
-                    : qmarksTestNocase
-                : options.dot
-                    ? qmarksTestDot
-                    : qmarksTest)(m);
-        }
-        else if ((m = pattern.match(starDotStarRE))) {
-            fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
-        }
-        else if ((m = pattern.match(dotStarRE))) {
-            fastTest = dotStarTest;
-        }
-        const re = AST.fromGlob(pattern, this.options).toMMPattern();
-        if (fastTest && typeof re === 'object') {
-            // Avoids overriding in frozen environments
-            Reflect.defineProperty(re, 'test', { value: fastTest });
-        }
-        return re;
-    }
-    makeRe() {
-        if (this.regexp || this.regexp === false)
-            return this.regexp;
-        // at this point, this.set is a 2d array of partial
-        // pattern strings, or "**".
-        //
-        // It's better to use .match().  This function shouldn't
-        // be used, really, but it's pretty convenient sometimes,
-        // when you just want to work with a regex.
-        const set = this.set;
-        if (!set.length) {
-            this.regexp = false;
-            return this.regexp;
-        }
-        const options = this.options;
-        const twoStar = options.noglobstar
-            ? star
-            : options.dot
-                ? twoStarDot
-                : twoStarNoDot;
-        const flags = new Set(options.nocase ? ['i'] : []);
-        // regexpify non-globstar patterns
-        // if ** is only item, then we just do one twoStar
-        // if ** is first, and there are more, prepend (\/|twoStar\/)? to next
-        // if ** is last, append (\/twoStar|) to previous
-        // if ** is in the middle, append (\/|\/twoStar\/) to previous
-        // then filter out GLOBSTAR symbols
-        let re = set
-            .map(pattern => {
-            const pp = pattern.map(p => {
-                if (p instanceof RegExp) {
-                    for (const f of p.flags.split(''))
-                        flags.add(f);
-                }
-                return typeof p === 'string'
-                    ? regExpEscape(p)
-                    : p === GLOBSTAR
-                        ? GLOBSTAR
-                        : p._src;
-            });
-            pp.forEach((p, i) => {
-                const next = pp[i + 1];
-                const prev = pp[i - 1];
-                if (p !== GLOBSTAR || prev === GLOBSTAR) {
-                    return;
-                }
-                if (prev === undefined) {
-                    if (next !== undefined && next !== GLOBSTAR) {
-                        pp[i + 1] = '(?:\\/|' + twoStar + '\\/)?' + next;
-                    }
-                    else {
-                        pp[i] = twoStar;
-                    }
-                }
-                else if (next === undefined) {
-                    pp[i - 1] = prev + '(?:\\/|' + twoStar + ')?';
-                }
-                else if (next !== GLOBSTAR) {
-                    pp[i - 1] = prev + '(?:\\/|\\/' + twoStar + '\\/)' + next;
-                    pp[i + 1] = GLOBSTAR;
-                }
-            });
-            return pp.filter(p => p !== GLOBSTAR).join('/');
-        })
-            .join('|');
-        // need to wrap in parens if we had more than one thing with |,
-        // otherwise only the first will be anchored to ^ and the last to $
-        const [open, close] = set.length > 1 ? ['(?:', ')'] : ['', ''];
-        // must match entire pattern
-        // ending in a * or ** will make it less strict.
-        re = '^' + open + re + close + '$';
-        // can match anything, as long as it's not this.
-        if (this.negate)
-            re = '^(?!' + re + ').+$';
-        try {
-            this.regexp = new RegExp(re, [...flags].join(''));
-            /* c8 ignore start */
-        }
-        catch (ex) {
-            // should be impossible
-            this.regexp = false;
-        }
-        /* c8 ignore stop */
-        return this.regexp;
-    }
-    slashSplit(p) {
-        // if p starts with // on windows, we preserve that
-        // so that UNC paths aren't broken.  Otherwise, any number of
-        // / characters are coalesced into one, unless
-        // preserveMultipleSlashes is set to true.
-        if (this.preserveMultipleSlashes) {
-            return p.split('/');
-        }
-        else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
-            // add an extra '' for the one we lose
-            return ['', ...p.split(/\/+/)];
-        }
-        else {
-            return p.split(/\/+/);
-        }
-    }
-    match(f, partial = this.partial) {
-        this.debug('match', f, this.pattern);
-        // short-circuit in the case of busted things.
-        // comments, etc.
-        if (this.comment) {
-            return false;
-        }
-        if (this.empty) {
-            return f === '';
-        }
-        if (f === '/' && partial) {
-            return true;
-        }
-        const options = this.options;
-        // windows: need to use /, not \
-        if (this.isWindows) {
-            f = f.split('\\').join('/');
-        }
-        // treat the test path as a set of pathparts.
-        const ff = this.slashSplit(f);
-        this.debug(this.pattern, 'split', ff);
-        // just ONE of the pattern sets in this.set needs to match
-        // in order for it to be valid.  If negating, then just one
-        // match means that we have failed.
-        // Either way, return on the first hit.
-        const set = this.set;
-        this.debug(this.pattern, 'set', set);
-        // Find the basename of the path by looking for the last non-empty segment
-        let filename = ff[ff.length - 1];
-        if (!filename) {
-            for (let i = ff.length - 2; !filename && i >= 0; i--) {
-                filename = ff[i];
-            }
-        }
-        for (let i = 0; i < set.length; i++) {
-            const pattern = set[i];
-            let file = ff;
-            if (options.matchBase && pattern.length === 1) {
-                file = [filename];
-            }
-            const hit = this.matchOne(file, pattern, partial);
-            if (hit) {
-                if (options.flipNegate) {
-                    return true;
-                }
-                return !this.negate;
-            }
-        }
-        // didn't get any hits.  this is success if it's a negative
-        // pattern, failure otherwise.
-        if (options.flipNegate) {
-            return false;
-        }
-        return this.negate;
-    }
-    static defaults(def) {
-        return minimatch.defaults(def).Minimatch;
-    }
-}
-/* c8 ignore start */
-export { AST } from './ast.js';
-export { escape } from './escape.js';
-export { unescape } from './unescape.js';
-/* c8 ignore stop */
-minimatch.AST = AST;
-minimatch.Minimatch = Minimatch;
-minimatch.escape = escape;
-minimatch.unescape = unescape;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/cacache/node_modules/minimatch/dist/esm/package.json b/node_modules/cacache/node_modules/minimatch/dist/esm/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/cacache/node_modules/minimatch/dist/esm/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/cacache/node_modules/minimatch/dist/esm/unescape.js b/node_modules/cacache/node_modules/minimatch/dist/esm/unescape.js
deleted file mode 100644
index 0faf9a2b7306f..0000000000000
--- a/node_modules/cacache/node_modules/minimatch/dist/esm/unescape.js
+++ /dev/null
@@ -1,20 +0,0 @@
-/**
- * Un-escape a string that has been escaped with {@link escape}.
- *
- * If the {@link windowsPathsNoEscape} option is used, then square-brace
- * escapes are removed, but not backslash escapes.  For example, it will turn
- * the string `'[*]'` into `*`, but it will not turn `'\\*'` into `'*'`,
- * becuase `\` is a path separator in `windowsPathsNoEscape` mode.
- *
- * When `windowsPathsNoEscape` is not set, then both brace escapes and
- * backslash escapes are removed.
- *
- * Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot be escaped
- * or unescaped.
- */
-export const unescape = (s, { windowsPathsNoEscape = false, } = {}) => {
-    return windowsPathsNoEscape
-        ? s.replace(/\[([^\/\\])\]/g, '$1')
-        : s.replace(/((?!\\).|^)\[([^\/\\])\]/g, '$1$2').replace(/\\([^\/])/g, '$1');
-};
-//# sourceMappingURL=unescape.js.map
\ No newline at end of file
diff --git a/node_modules/cacache/node_modules/minimatch/package.json b/node_modules/cacache/node_modules/minimatch/package.json
deleted file mode 100644
index bfa2423f50b5e..0000000000000
--- a/node_modules/cacache/node_modules/minimatch/package.json
+++ /dev/null
@@ -1,79 +0,0 @@
-{
-  "author": "Isaac Z. Schlueter  (http://blog.izs.me)",
-  "name": "minimatch",
-  "description": "a glob matcher in javascript",
-  "version": "10.0.3",
-  "repository": {
-    "type": "git",
-    "url": "git://github.com/isaacs/minimatch.git"
-  },
-  "main": "./dist/commonjs/index.js",
-  "types": "./dist/commonjs/index.d.ts",
-  "exports": {
-    "./package.json": "./package.json",
-    ".": {
-      "import": {
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.js"
-      },
-      "require": {
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.js"
-      }
-    }
-  },
-  "files": [
-    "dist"
-  ],
-  "scripts": {
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "prepare": "tshy",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "test": "tap",
-    "snap": "tap",
-    "format": "prettier --write . --loglevel warn",
-    "benchmark": "node benchmark/index.js",
-    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
-  },
-  "prettier": {
-    "semi": false,
-    "printWidth": 80,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "engines": {
-    "node": "20 || >=22"
-  },
-  "devDependencies": {
-    "@types/brace-expansion": "^1.1.2",
-    "@types/node": "^24.0.0",
-    "mkdirp": "^3.0.1",
-    "prettier": "^3.3.2",
-    "tap": "^21.1.0",
-    "tshy": "^3.0.2",
-    "typedoc": "^0.28.5"
-  },
-  "funding": {
-    "url": "https://github.com/sponsors/isaacs"
-  },
-  "license": "ISC",
-  "tshy": {
-    "exports": {
-      "./package.json": "./package.json",
-      ".": "./src/index.ts"
-    }
-  },
-  "type": "module",
-  "module": "./dist/esm/index.js",
-  "dependencies": {
-    "@isaacs/brace-expansion": "^5.0.0"
-  }
-}
diff --git a/node_modules/cacache/node_modules/path-scurry/LICENSE.md b/node_modules/cacache/node_modules/path-scurry/LICENSE.md
deleted file mode 100644
index c5402b9577a8c..0000000000000
--- a/node_modules/cacache/node_modules/path-scurry/LICENSE.md
+++ /dev/null
@@ -1,55 +0,0 @@
-# Blue Oak Model License
-
-Version 1.0.0
-
-## Purpose
-
-This license gives everyone as much permission to work with
-this software as possible, while protecting contributors
-from liability.
-
-## Acceptance
-
-In order to receive this license, you must agree to its
-rules.  The rules of this license are both obligations
-under that agreement and conditions to your license.
-You must not do anything with this software that triggers
-a rule that you cannot or will not follow.
-
-## Copyright
-
-Each contributor licenses you to do everything with this
-software that would otherwise infringe that contributor's
-copyright in it.
-
-## Notices
-
-You must ensure that everyone who gets a copy of
-any part of this software from you, with or without
-changes, also gets the text of this license or a link to
-.
-
-## Excuse
-
-If anyone notifies you in writing that you have not
-complied with [Notices](#notices), you can keep your
-license by taking all practical steps to comply within 30
-days after the notice.  If you do not do so, your license
-ends immediately.
-
-## Patent
-
-Each contributor licenses you to do everything with this
-software that would otherwise infringe any patent claims
-they can license or become able to license.
-
-## Reliability
-
-No contributor can revoke this license.
-
-## No Liability
-
-***As far as the law allows, this software comes as is,
-without any warranty or condition, and no contributor
-will be liable to anyone for any damages related to this
-software or this license, under any kind of legal claim.***
diff --git a/node_modules/cacache/node_modules/path-scurry/dist/commonjs/index.js b/node_modules/cacache/node_modules/path-scurry/dist/commonjs/index.js
deleted file mode 100644
index af3e7595f577f..0000000000000
--- a/node_modules/cacache/node_modules/path-scurry/dist/commonjs/index.js
+++ /dev/null
@@ -1,2016 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.PathScurry = exports.Path = exports.PathScurryDarwin = exports.PathScurryPosix = exports.PathScurryWin32 = exports.PathScurryBase = exports.PathPosix = exports.PathWin32 = exports.PathBase = exports.ChildrenCache = exports.ResolveCache = void 0;
-const lru_cache_1 = require("lru-cache");
-const node_path_1 = require("node:path");
-const node_url_1 = require("node:url");
-const fs_1 = require("fs");
-const actualFS = __importStar(require("node:fs"));
-const realpathSync = fs_1.realpathSync.native;
-// TODO: test perf of fs/promises realpath vs realpathCB,
-// since the promises one uses realpath.native
-const promises_1 = require("node:fs/promises");
-const minipass_1 = require("minipass");
-const defaultFS = {
-    lstatSync: fs_1.lstatSync,
-    readdir: fs_1.readdir,
-    readdirSync: fs_1.readdirSync,
-    readlinkSync: fs_1.readlinkSync,
-    realpathSync,
-    promises: {
-        lstat: promises_1.lstat,
-        readdir: promises_1.readdir,
-        readlink: promises_1.readlink,
-        realpath: promises_1.realpath,
-    },
-};
-// if they just gave us require('fs') then use our default
-const fsFromOption = (fsOption) => !fsOption || fsOption === defaultFS || fsOption === actualFS ?
-    defaultFS
-    : {
-        ...defaultFS,
-        ...fsOption,
-        promises: {
-            ...defaultFS.promises,
-            ...(fsOption.promises || {}),
-        },
-    };
-// turn something like //?/c:/ into c:\
-const uncDriveRegexp = /^\\\\\?\\([a-z]:)\\?$/i;
-const uncToDrive = (rootPath) => rootPath.replace(/\//g, '\\').replace(uncDriveRegexp, '$1\\');
-// windows paths are separated by either / or \
-const eitherSep = /[\\\/]/;
-const UNKNOWN = 0; // may not even exist, for all we know
-const IFIFO = 0b0001;
-const IFCHR = 0b0010;
-const IFDIR = 0b0100;
-const IFBLK = 0b0110;
-const IFREG = 0b1000;
-const IFLNK = 0b1010;
-const IFSOCK = 0b1100;
-const IFMT = 0b1111;
-// mask to unset low 4 bits
-const IFMT_UNKNOWN = ~IFMT;
-// set after successfully calling readdir() and getting entries.
-const READDIR_CALLED = 0b0000_0001_0000;
-// set after a successful lstat()
-const LSTAT_CALLED = 0b0000_0010_0000;
-// set if an entry (or one of its parents) is definitely not a dir
-const ENOTDIR = 0b0000_0100_0000;
-// set if an entry (or one of its parents) does not exist
-// (can also be set on lstat errors like EACCES or ENAMETOOLONG)
-const ENOENT = 0b0000_1000_0000;
-// cannot have child entries -- also verify &IFMT is either IFDIR or IFLNK
-// set if we fail to readlink
-const ENOREADLINK = 0b0001_0000_0000;
-// set if we know realpath() will fail
-const ENOREALPATH = 0b0010_0000_0000;
-const ENOCHILD = ENOTDIR | ENOENT | ENOREALPATH;
-const TYPEMASK = 0b0011_1111_1111;
-const entToType = (s) => s.isFile() ? IFREG
-    : s.isDirectory() ? IFDIR
-        : s.isSymbolicLink() ? IFLNK
-            : s.isCharacterDevice() ? IFCHR
-                : s.isBlockDevice() ? IFBLK
-                    : s.isSocket() ? IFSOCK
-                        : s.isFIFO() ? IFIFO
-                            : UNKNOWN;
-// normalize unicode path names
-const normalizeCache = new Map();
-const normalize = (s) => {
-    const c = normalizeCache.get(s);
-    if (c)
-        return c;
-    const n = s.normalize('NFKD');
-    normalizeCache.set(s, n);
-    return n;
-};
-const normalizeNocaseCache = new Map();
-const normalizeNocase = (s) => {
-    const c = normalizeNocaseCache.get(s);
-    if (c)
-        return c;
-    const n = normalize(s.toLowerCase());
-    normalizeNocaseCache.set(s, n);
-    return n;
-};
-/**
- * An LRUCache for storing resolved path strings or Path objects.
- * @internal
- */
-class ResolveCache extends lru_cache_1.LRUCache {
-    constructor() {
-        super({ max: 256 });
-    }
-}
-exports.ResolveCache = ResolveCache;
-// In order to prevent blowing out the js heap by allocating hundreds of
-// thousands of Path entries when walking extremely large trees, the "children"
-// in this tree are represented by storing an array of Path entries in an
-// LRUCache, indexed by the parent.  At any time, Path.children() may return an
-// empty array, indicating that it doesn't know about any of its children, and
-// thus has to rebuild that cache.  This is fine, it just means that we don't
-// benefit as much from having the cached entries, but huge directory walks
-// don't blow out the stack, and smaller ones are still as fast as possible.
-//
-//It does impose some complexity when building up the readdir data, because we
-//need to pass a reference to the children array that we started with.
-/**
- * an LRUCache for storing child entries.
- * @internal
- */
-class ChildrenCache extends lru_cache_1.LRUCache {
-    constructor(maxSize = 16 * 1024) {
-        super({
-            maxSize,
-            // parent + children
-            sizeCalculation: a => a.length + 1,
-        });
-    }
-}
-exports.ChildrenCache = ChildrenCache;
-const setAsCwd = Symbol('PathScurry setAsCwd');
-/**
- * Path objects are sort of like a super-powered
- * {@link https://nodejs.org/docs/latest/api/fs.html#class-fsdirent fs.Dirent}
- *
- * Each one represents a single filesystem entry on disk, which may or may not
- * exist. It includes methods for reading various types of information via
- * lstat, readlink, and readdir, and caches all information to the greatest
- * degree possible.
- *
- * Note that fs operations that would normally throw will instead return an
- * "empty" value. This is in order to prevent excessive overhead from error
- * stack traces.
- */
-class PathBase {
-    /**
-     * the basename of this path
-     *
-     * **Important**: *always* test the path name against any test string
-     * usingthe {@link isNamed} method, and not by directly comparing this
-     * string. Otherwise, unicode path strings that the system sees as identical
-     * will not be properly treated as the same path, leading to incorrect
-     * behavior and possible security issues.
-     */
-    name;
-    /**
-     * the Path entry corresponding to the path root.
-     *
-     * @internal
-     */
-    root;
-    /**
-     * All roots found within the current PathScurry family
-     *
-     * @internal
-     */
-    roots;
-    /**
-     * a reference to the parent path, or undefined in the case of root entries
-     *
-     * @internal
-     */
-    parent;
-    /**
-     * boolean indicating whether paths are compared case-insensitively
-     * @internal
-     */
-    nocase;
-    /**
-     * boolean indicating that this path is the current working directory
-     * of the PathScurry collection that contains it.
-     */
-    isCWD = false;
-    // potential default fs override
-    #fs;
-    // Stats fields
-    #dev;
-    get dev() {
-        return this.#dev;
-    }
-    #mode;
-    get mode() {
-        return this.#mode;
-    }
-    #nlink;
-    get nlink() {
-        return this.#nlink;
-    }
-    #uid;
-    get uid() {
-        return this.#uid;
-    }
-    #gid;
-    get gid() {
-        return this.#gid;
-    }
-    #rdev;
-    get rdev() {
-        return this.#rdev;
-    }
-    #blksize;
-    get blksize() {
-        return this.#blksize;
-    }
-    #ino;
-    get ino() {
-        return this.#ino;
-    }
-    #size;
-    get size() {
-        return this.#size;
-    }
-    #blocks;
-    get blocks() {
-        return this.#blocks;
-    }
-    #atimeMs;
-    get atimeMs() {
-        return this.#atimeMs;
-    }
-    #mtimeMs;
-    get mtimeMs() {
-        return this.#mtimeMs;
-    }
-    #ctimeMs;
-    get ctimeMs() {
-        return this.#ctimeMs;
-    }
-    #birthtimeMs;
-    get birthtimeMs() {
-        return this.#birthtimeMs;
-    }
-    #atime;
-    get atime() {
-        return this.#atime;
-    }
-    #mtime;
-    get mtime() {
-        return this.#mtime;
-    }
-    #ctime;
-    get ctime() {
-        return this.#ctime;
-    }
-    #birthtime;
-    get birthtime() {
-        return this.#birthtime;
-    }
-    #matchName;
-    #depth;
-    #fullpath;
-    #fullpathPosix;
-    #relative;
-    #relativePosix;
-    #type;
-    #children;
-    #linkTarget;
-    #realpath;
-    /**
-     * This property is for compatibility with the Dirent class as of
-     * Node v20, where Dirent['parentPath'] refers to the path of the
-     * directory that was passed to readdir. For root entries, it's the path
-     * to the entry itself.
-     */
-    get parentPath() {
-        return (this.parent || this).fullpath();
-    }
-    /**
-     * Deprecated alias for Dirent['parentPath'] Somewhat counterintuitively,
-     * this property refers to the *parent* path, not the path object itself.
-     *
-     * @deprecated
-     */
-    get path() {
-        return this.parentPath;
-    }
-    /**
-     * Do not create new Path objects directly.  They should always be accessed
-     * via the PathScurry class or other methods on the Path class.
-     *
-     * @internal
-     */
-    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
-        this.name = name;
-        this.#matchName = nocase ? normalizeNocase(name) : normalize(name);
-        this.#type = type & TYPEMASK;
-        this.nocase = nocase;
-        this.roots = roots;
-        this.root = root || this;
-        this.#children = children;
-        this.#fullpath = opts.fullpath;
-        this.#relative = opts.relative;
-        this.#relativePosix = opts.relativePosix;
-        this.parent = opts.parent;
-        if (this.parent) {
-            this.#fs = this.parent.#fs;
-        }
-        else {
-            this.#fs = fsFromOption(opts.fs);
-        }
-    }
-    /**
-     * Returns the depth of the Path object from its root.
-     *
-     * For example, a path at `/foo/bar` would have a depth of 2.
-     */
-    depth() {
-        if (this.#depth !== undefined)
-            return this.#depth;
-        if (!this.parent)
-            return (this.#depth = 0);
-        return (this.#depth = this.parent.depth() + 1);
-    }
-    /**
-     * @internal
-     */
-    childrenCache() {
-        return this.#children;
-    }
-    /**
-     * Get the Path object referenced by the string path, resolved from this Path
-     */
-    resolve(path) {
-        if (!path) {
-            return this;
-        }
-        const rootPath = this.getRootString(path);
-        const dir = path.substring(rootPath.length);
-        const dirParts = dir.split(this.splitSep);
-        const result = rootPath ?
-            this.getRoot(rootPath).#resolveParts(dirParts)
-            : this.#resolveParts(dirParts);
-        return result;
-    }
-    #resolveParts(dirParts) {
-        let p = this;
-        for (const part of dirParts) {
-            p = p.child(part);
-        }
-        return p;
-    }
-    /**
-     * Returns the cached children Path objects, if still available.  If they
-     * have fallen out of the cache, then returns an empty array, and resets the
-     * READDIR_CALLED bit, so that future calls to readdir() will require an fs
-     * lookup.
-     *
-     * @internal
-     */
-    children() {
-        const cached = this.#children.get(this);
-        if (cached) {
-            return cached;
-        }
-        const children = Object.assign([], { provisional: 0 });
-        this.#children.set(this, children);
-        this.#type &= ~READDIR_CALLED;
-        return children;
-    }
-    /**
-     * Resolves a path portion and returns or creates the child Path.
-     *
-     * Returns `this` if pathPart is `''` or `'.'`, or `parent` if pathPart is
-     * `'..'`.
-     *
-     * This should not be called directly.  If `pathPart` contains any path
-     * separators, it will lead to unsafe undefined behavior.
-     *
-     * Use `Path.resolve()` instead.
-     *
-     * @internal
-     */
-    child(pathPart, opts) {
-        if (pathPart === '' || pathPart === '.') {
-            return this;
-        }
-        if (pathPart === '..') {
-            return this.parent || this;
-        }
-        // find the child
-        const children = this.children();
-        const name = this.nocase ? normalizeNocase(pathPart) : normalize(pathPart);
-        for (const p of children) {
-            if (p.#matchName === name) {
-                return p;
-            }
-        }
-        // didn't find it, create provisional child, since it might not
-        // actually exist.  If we know the parent isn't a dir, then
-        // in fact it CAN'T exist.
-        const s = this.parent ? this.sep : '';
-        const fullpath = this.#fullpath ? this.#fullpath + s + pathPart : undefined;
-        const pchild = this.newChild(pathPart, UNKNOWN, {
-            ...opts,
-            parent: this,
-            fullpath,
-        });
-        if (!this.canReaddir()) {
-            pchild.#type |= ENOENT;
-        }
-        // don't have to update provisional, because if we have real children,
-        // then provisional is set to children.length, otherwise a lower number
-        children.push(pchild);
-        return pchild;
-    }
-    /**
-     * The relative path from the cwd. If it does not share an ancestor with
-     * the cwd, then this ends up being equivalent to the fullpath()
-     */
-    relative() {
-        if (this.isCWD)
-            return '';
-        if (this.#relative !== undefined) {
-            return this.#relative;
-        }
-        const name = this.name;
-        const p = this.parent;
-        if (!p) {
-            return (this.#relative = this.name);
-        }
-        const pv = p.relative();
-        return pv + (!pv || !p.parent ? '' : this.sep) + name;
-    }
-    /**
-     * The relative path from the cwd, using / as the path separator.
-     * If it does not share an ancestor with
-     * the cwd, then this ends up being equivalent to the fullpathPosix()
-     * On posix systems, this is identical to relative().
-     */
-    relativePosix() {
-        if (this.sep === '/')
-            return this.relative();
-        if (this.isCWD)
-            return '';
-        if (this.#relativePosix !== undefined)
-            return this.#relativePosix;
-        const name = this.name;
-        const p = this.parent;
-        if (!p) {
-            return (this.#relativePosix = this.fullpathPosix());
-        }
-        const pv = p.relativePosix();
-        return pv + (!pv || !p.parent ? '' : '/') + name;
-    }
-    /**
-     * The fully resolved path string for this Path entry
-     */
-    fullpath() {
-        if (this.#fullpath !== undefined) {
-            return this.#fullpath;
-        }
-        const name = this.name;
-        const p = this.parent;
-        if (!p) {
-            return (this.#fullpath = this.name);
-        }
-        const pv = p.fullpath();
-        const fp = pv + (!p.parent ? '' : this.sep) + name;
-        return (this.#fullpath = fp);
-    }
-    /**
-     * On platforms other than windows, this is identical to fullpath.
-     *
-     * On windows, this is overridden to return the forward-slash form of the
-     * full UNC path.
-     */
-    fullpathPosix() {
-        if (this.#fullpathPosix !== undefined)
-            return this.#fullpathPosix;
-        if (this.sep === '/')
-            return (this.#fullpathPosix = this.fullpath());
-        if (!this.parent) {
-            const p = this.fullpath().replace(/\\/g, '/');
-            if (/^[a-z]:\//i.test(p)) {
-                return (this.#fullpathPosix = `//?/${p}`);
-            }
-            else {
-                return (this.#fullpathPosix = p);
-            }
-        }
-        const p = this.parent;
-        const pfpp = p.fullpathPosix();
-        const fpp = pfpp + (!pfpp || !p.parent ? '' : '/') + this.name;
-        return (this.#fullpathPosix = fpp);
-    }
-    /**
-     * Is the Path of an unknown type?
-     *
-     * Note that we might know *something* about it if there has been a previous
-     * filesystem operation, for example that it does not exist, or is not a
-     * link, or whether it has child entries.
-     */
-    isUnknown() {
-        return (this.#type & IFMT) === UNKNOWN;
-    }
-    isType(type) {
-        return this[`is${type}`]();
-    }
-    getType() {
-        return (this.isUnknown() ? 'Unknown'
-            : this.isDirectory() ? 'Directory'
-                : this.isFile() ? 'File'
-                    : this.isSymbolicLink() ? 'SymbolicLink'
-                        : this.isFIFO() ? 'FIFO'
-                            : this.isCharacterDevice() ? 'CharacterDevice'
-                                : this.isBlockDevice() ? 'BlockDevice'
-                                    : /* c8 ignore start */ this.isSocket() ? 'Socket'
-                                        : 'Unknown');
-        /* c8 ignore stop */
-    }
-    /**
-     * Is the Path a regular file?
-     */
-    isFile() {
-        return (this.#type & IFMT) === IFREG;
-    }
-    /**
-     * Is the Path a directory?
-     */
-    isDirectory() {
-        return (this.#type & IFMT) === IFDIR;
-    }
-    /**
-     * Is the path a character device?
-     */
-    isCharacterDevice() {
-        return (this.#type & IFMT) === IFCHR;
-    }
-    /**
-     * Is the path a block device?
-     */
-    isBlockDevice() {
-        return (this.#type & IFMT) === IFBLK;
-    }
-    /**
-     * Is the path a FIFO pipe?
-     */
-    isFIFO() {
-        return (this.#type & IFMT) === IFIFO;
-    }
-    /**
-     * Is the path a socket?
-     */
-    isSocket() {
-        return (this.#type & IFMT) === IFSOCK;
-    }
-    /**
-     * Is the path a symbolic link?
-     */
-    isSymbolicLink() {
-        return (this.#type & IFLNK) === IFLNK;
-    }
-    /**
-     * Return the entry if it has been subject of a successful lstat, or
-     * undefined otherwise.
-     *
-     * Does not read the filesystem, so an undefined result *could* simply
-     * mean that we haven't called lstat on it.
-     */
-    lstatCached() {
-        return this.#type & LSTAT_CALLED ? this : undefined;
-    }
-    /**
-     * Return the cached link target if the entry has been the subject of a
-     * successful readlink, or undefined otherwise.
-     *
-     * Does not read the filesystem, so an undefined result *could* just mean we
-     * don't have any cached data. Only use it if you are very sure that a
-     * readlink() has been called at some point.
-     */
-    readlinkCached() {
-        return this.#linkTarget;
-    }
-    /**
-     * Returns the cached realpath target if the entry has been the subject
-     * of a successful realpath, or undefined otherwise.
-     *
-     * Does not read the filesystem, so an undefined result *could* just mean we
-     * don't have any cached data. Only use it if you are very sure that a
-     * realpath() has been called at some point.
-     */
-    realpathCached() {
-        return this.#realpath;
-    }
-    /**
-     * Returns the cached child Path entries array if the entry has been the
-     * subject of a successful readdir(), or [] otherwise.
-     *
-     * Does not read the filesystem, so an empty array *could* just mean we
-     * don't have any cached data. Only use it if you are very sure that a
-     * readdir() has been called recently enough to still be valid.
-     */
-    readdirCached() {
-        const children = this.children();
-        return children.slice(0, children.provisional);
-    }
-    /**
-     * Return true if it's worth trying to readlink.  Ie, we don't (yet) have
-     * any indication that readlink will definitely fail.
-     *
-     * Returns false if the path is known to not be a symlink, if a previous
-     * readlink failed, or if the entry does not exist.
-     */
-    canReadlink() {
-        if (this.#linkTarget)
-            return true;
-        if (!this.parent)
-            return false;
-        // cases where it cannot possibly succeed
-        const ifmt = this.#type & IFMT;
-        return !((ifmt !== UNKNOWN && ifmt !== IFLNK) ||
-            this.#type & ENOREADLINK ||
-            this.#type & ENOENT);
-    }
-    /**
-     * Return true if readdir has previously been successfully called on this
-     * path, indicating that cachedReaddir() is likely valid.
-     */
-    calledReaddir() {
-        return !!(this.#type & READDIR_CALLED);
-    }
-    /**
-     * Returns true if the path is known to not exist. That is, a previous lstat
-     * or readdir failed to verify its existence when that would have been
-     * expected, or a parent entry was marked either enoent or enotdir.
-     */
-    isENOENT() {
-        return !!(this.#type & ENOENT);
-    }
-    /**
-     * Return true if the path is a match for the given path name.  This handles
-     * case sensitivity and unicode normalization.
-     *
-     * Note: even on case-sensitive systems, it is **not** safe to test the
-     * equality of the `.name` property to determine whether a given pathname
-     * matches, due to unicode normalization mismatches.
-     *
-     * Always use this method instead of testing the `path.name` property
-     * directly.
-     */
-    isNamed(n) {
-        return !this.nocase ?
-            this.#matchName === normalize(n)
-            : this.#matchName === normalizeNocase(n);
-    }
-    /**
-     * Return the Path object corresponding to the target of a symbolic link.
-     *
-     * If the Path is not a symbolic link, or if the readlink call fails for any
-     * reason, `undefined` is returned.
-     *
-     * Result is cached, and thus may be outdated if the filesystem is mutated.
-     */
-    async readlink() {
-        const target = this.#linkTarget;
-        if (target) {
-            return target;
-        }
-        if (!this.canReadlink()) {
-            return undefined;
-        }
-        /* c8 ignore start */
-        // already covered by the canReadlink test, here for ts grumples
-        if (!this.parent) {
-            return undefined;
-        }
-        /* c8 ignore stop */
-        try {
-            const read = await this.#fs.promises.readlink(this.fullpath());
-            const linkTarget = (await this.parent.realpath())?.resolve(read);
-            if (linkTarget) {
-                return (this.#linkTarget = linkTarget);
-            }
-        }
-        catch (er) {
-            this.#readlinkFail(er.code);
-            return undefined;
-        }
-    }
-    /**
-     * Synchronous {@link PathBase.readlink}
-     */
-    readlinkSync() {
-        const target = this.#linkTarget;
-        if (target) {
-            return target;
-        }
-        if (!this.canReadlink()) {
-            return undefined;
-        }
-        /* c8 ignore start */
-        // already covered by the canReadlink test, here for ts grumples
-        if (!this.parent) {
-            return undefined;
-        }
-        /* c8 ignore stop */
-        try {
-            const read = this.#fs.readlinkSync(this.fullpath());
-            const linkTarget = this.parent.realpathSync()?.resolve(read);
-            if (linkTarget) {
-                return (this.#linkTarget = linkTarget);
-            }
-        }
-        catch (er) {
-            this.#readlinkFail(er.code);
-            return undefined;
-        }
-    }
-    #readdirSuccess(children) {
-        // succeeded, mark readdir called bit
-        this.#type |= READDIR_CALLED;
-        // mark all remaining provisional children as ENOENT
-        for (let p = children.provisional; p < children.length; p++) {
-            const c = children[p];
-            if (c)
-                c.#markENOENT();
-        }
-    }
-    #markENOENT() {
-        // mark as UNKNOWN and ENOENT
-        if (this.#type & ENOENT)
-            return;
-        this.#type = (this.#type | ENOENT) & IFMT_UNKNOWN;
-        this.#markChildrenENOENT();
-    }
-    #markChildrenENOENT() {
-        // all children are provisional and do not exist
-        const children = this.children();
-        children.provisional = 0;
-        for (const p of children) {
-            p.#markENOENT();
-        }
-    }
-    #markENOREALPATH() {
-        this.#type |= ENOREALPATH;
-        this.#markENOTDIR();
-    }
-    // save the information when we know the entry is not a dir
-    #markENOTDIR() {
-        // entry is not a directory, so any children can't exist.
-        // this *should* be impossible, since any children created
-        // after it's been marked ENOTDIR should be marked ENOENT,
-        // so it won't even get to this point.
-        /* c8 ignore start */
-        if (this.#type & ENOTDIR)
-            return;
-        /* c8 ignore stop */
-        let t = this.#type;
-        // this could happen if we stat a dir, then delete it,
-        // then try to read it or one of its children.
-        if ((t & IFMT) === IFDIR)
-            t &= IFMT_UNKNOWN;
-        this.#type = t | ENOTDIR;
-        this.#markChildrenENOENT();
-    }
-    #readdirFail(code = '') {
-        // markENOTDIR and markENOENT also set provisional=0
-        if (code === 'ENOTDIR' || code === 'EPERM') {
-            this.#markENOTDIR();
-        }
-        else if (code === 'ENOENT') {
-            this.#markENOENT();
-        }
-        else {
-            this.children().provisional = 0;
-        }
-    }
-    #lstatFail(code = '') {
-        // Windows just raises ENOENT in this case, disable for win CI
-        /* c8 ignore start */
-        if (code === 'ENOTDIR') {
-            // already know it has a parent by this point
-            const p = this.parent;
-            p.#markENOTDIR();
-        }
-        else if (code === 'ENOENT') {
-            /* c8 ignore stop */
-            this.#markENOENT();
-        }
-    }
-    #readlinkFail(code = '') {
-        let ter = this.#type;
-        ter |= ENOREADLINK;
-        if (code === 'ENOENT')
-            ter |= ENOENT;
-        // windows gets a weird error when you try to readlink a file
-        if (code === 'EINVAL' || code === 'UNKNOWN') {
-            // exists, but not a symlink, we don't know WHAT it is, so remove
-            // all IFMT bits.
-            ter &= IFMT_UNKNOWN;
-        }
-        this.#type = ter;
-        // windows just gets ENOENT in this case.  We do cover the case,
-        // just disabled because it's impossible on Windows CI
-        /* c8 ignore start */
-        if (code === 'ENOTDIR' && this.parent) {
-            this.parent.#markENOTDIR();
-        }
-        /* c8 ignore stop */
-    }
-    #readdirAddChild(e, c) {
-        return (this.#readdirMaybePromoteChild(e, c) ||
-            this.#readdirAddNewChild(e, c));
-    }
-    #readdirAddNewChild(e, c) {
-        // alloc new entry at head, so it's never provisional
-        const type = entToType(e);
-        const child = this.newChild(e.name, type, { parent: this });
-        const ifmt = child.#type & IFMT;
-        if (ifmt !== IFDIR && ifmt !== IFLNK && ifmt !== UNKNOWN) {
-            child.#type |= ENOTDIR;
-        }
-        c.unshift(child);
-        c.provisional++;
-        return child;
-    }
-    #readdirMaybePromoteChild(e, c) {
-        for (let p = c.provisional; p < c.length; p++) {
-            const pchild = c[p];
-            const name = this.nocase ? normalizeNocase(e.name) : normalize(e.name);
-            if (name !== pchild.#matchName) {
-                continue;
-            }
-            return this.#readdirPromoteChild(e, pchild, p, c);
-        }
-    }
-    #readdirPromoteChild(e, p, index, c) {
-        const v = p.name;
-        // retain any other flags, but set ifmt from dirent
-        p.#type = (p.#type & IFMT_UNKNOWN) | entToType(e);
-        // case sensitivity fixing when we learn the true name.
-        if (v !== e.name)
-            p.name = e.name;
-        // just advance provisional index (potentially off the list),
-        // otherwise we have to splice/pop it out and re-insert at head
-        if (index !== c.provisional) {
-            if (index === c.length - 1)
-                c.pop();
-            else
-                c.splice(index, 1);
-            c.unshift(p);
-        }
-        c.provisional++;
-        return p;
-    }
-    /**
-     * Call lstat() on this Path, and update all known information that can be
-     * determined.
-     *
-     * Note that unlike `fs.lstat()`, the returned value does not contain some
-     * information, such as `mode`, `dev`, `nlink`, and `ino`.  If that
-     * information is required, you will need to call `fs.lstat` yourself.
-     *
-     * If the Path refers to a nonexistent file, or if the lstat call fails for
-     * any reason, `undefined` is returned.  Otherwise the updated Path object is
-     * returned.
-     *
-     * Results are cached, and thus may be out of date if the filesystem is
-     * mutated.
-     */
-    async lstat() {
-        if ((this.#type & ENOENT) === 0) {
-            try {
-                this.#applyStat(await this.#fs.promises.lstat(this.fullpath()));
-                return this;
-            }
-            catch (er) {
-                this.#lstatFail(er.code);
-            }
-        }
-    }
-    /**
-     * synchronous {@link PathBase.lstat}
-     */
-    lstatSync() {
-        if ((this.#type & ENOENT) === 0) {
-            try {
-                this.#applyStat(this.#fs.lstatSync(this.fullpath()));
-                return this;
-            }
-            catch (er) {
-                this.#lstatFail(er.code);
-            }
-        }
-    }
-    #applyStat(st) {
-        const { atime, atimeMs, birthtime, birthtimeMs, blksize, blocks, ctime, ctimeMs, dev, gid, ino, mode, mtime, mtimeMs, nlink, rdev, size, uid, } = st;
-        this.#atime = atime;
-        this.#atimeMs = atimeMs;
-        this.#birthtime = birthtime;
-        this.#birthtimeMs = birthtimeMs;
-        this.#blksize = blksize;
-        this.#blocks = blocks;
-        this.#ctime = ctime;
-        this.#ctimeMs = ctimeMs;
-        this.#dev = dev;
-        this.#gid = gid;
-        this.#ino = ino;
-        this.#mode = mode;
-        this.#mtime = mtime;
-        this.#mtimeMs = mtimeMs;
-        this.#nlink = nlink;
-        this.#rdev = rdev;
-        this.#size = size;
-        this.#uid = uid;
-        const ifmt = entToType(st);
-        // retain any other flags, but set the ifmt
-        this.#type = (this.#type & IFMT_UNKNOWN) | ifmt | LSTAT_CALLED;
-        if (ifmt !== UNKNOWN && ifmt !== IFDIR && ifmt !== IFLNK) {
-            this.#type |= ENOTDIR;
-        }
-    }
-    #onReaddirCB = [];
-    #readdirCBInFlight = false;
-    #callOnReaddirCB(children) {
-        this.#readdirCBInFlight = false;
-        const cbs = this.#onReaddirCB.slice();
-        this.#onReaddirCB.length = 0;
-        cbs.forEach(cb => cb(null, children));
-    }
-    /**
-     * Standard node-style callback interface to get list of directory entries.
-     *
-     * If the Path cannot or does not contain any children, then an empty array
-     * is returned.
-     *
-     * Results are cached, and thus may be out of date if the filesystem is
-     * mutated.
-     *
-     * @param cb The callback called with (er, entries).  Note that the `er`
-     * param is somewhat extraneous, as all readdir() errors are handled and
-     * simply result in an empty set of entries being returned.
-     * @param allowZalgo Boolean indicating that immediately known results should
-     * *not* be deferred with `queueMicrotask`. Defaults to `false`. Release
-     * zalgo at your peril, the dark pony lord is devious and unforgiving.
-     */
-    readdirCB(cb, allowZalgo = false) {
-        if (!this.canReaddir()) {
-            if (allowZalgo)
-                cb(null, []);
-            else
-                queueMicrotask(() => cb(null, []));
-            return;
-        }
-        const children = this.children();
-        if (this.calledReaddir()) {
-            const c = children.slice(0, children.provisional);
-            if (allowZalgo)
-                cb(null, c);
-            else
-                queueMicrotask(() => cb(null, c));
-            return;
-        }
-        // don't have to worry about zalgo at this point.
-        this.#onReaddirCB.push(cb);
-        if (this.#readdirCBInFlight) {
-            return;
-        }
-        this.#readdirCBInFlight = true;
-        // else read the directory, fill up children
-        // de-provisionalize any provisional children.
-        const fullpath = this.fullpath();
-        this.#fs.readdir(fullpath, { withFileTypes: true }, (er, entries) => {
-            if (er) {
-                this.#readdirFail(er.code);
-                children.provisional = 0;
-            }
-            else {
-                // if we didn't get an error, we always get entries.
-                //@ts-ignore
-                for (const e of entries) {
-                    this.#readdirAddChild(e, children);
-                }
-                this.#readdirSuccess(children);
-            }
-            this.#callOnReaddirCB(children.slice(0, children.provisional));
-            return;
-        });
-    }
-    #asyncReaddirInFlight;
-    /**
-     * Return an array of known child entries.
-     *
-     * If the Path cannot or does not contain any children, then an empty array
-     * is returned.
-     *
-     * Results are cached, and thus may be out of date if the filesystem is
-     * mutated.
-     */
-    async readdir() {
-        if (!this.canReaddir()) {
-            return [];
-        }
-        const children = this.children();
-        if (this.calledReaddir()) {
-            return children.slice(0, children.provisional);
-        }
-        // else read the directory, fill up children
-        // de-provisionalize any provisional children.
-        const fullpath = this.fullpath();
-        if (this.#asyncReaddirInFlight) {
-            await this.#asyncReaddirInFlight;
-        }
-        else {
-            /* c8 ignore start */
-            let resolve = () => { };
-            /* c8 ignore stop */
-            this.#asyncReaddirInFlight = new Promise(res => (resolve = res));
-            try {
-                for (const e of await this.#fs.promises.readdir(fullpath, {
-                    withFileTypes: true,
-                })) {
-                    this.#readdirAddChild(e, children);
-                }
-                this.#readdirSuccess(children);
-            }
-            catch (er) {
-                this.#readdirFail(er.code);
-                children.provisional = 0;
-            }
-            this.#asyncReaddirInFlight = undefined;
-            resolve();
-        }
-        return children.slice(0, children.provisional);
-    }
-    /**
-     * synchronous {@link PathBase.readdir}
-     */
-    readdirSync() {
-        if (!this.canReaddir()) {
-            return [];
-        }
-        const children = this.children();
-        if (this.calledReaddir()) {
-            return children.slice(0, children.provisional);
-        }
-        // else read the directory, fill up children
-        // de-provisionalize any provisional children.
-        const fullpath = this.fullpath();
-        try {
-            for (const e of this.#fs.readdirSync(fullpath, {
-                withFileTypes: true,
-            })) {
-                this.#readdirAddChild(e, children);
-            }
-            this.#readdirSuccess(children);
-        }
-        catch (er) {
-            this.#readdirFail(er.code);
-            children.provisional = 0;
-        }
-        return children.slice(0, children.provisional);
-    }
-    canReaddir() {
-        if (this.#type & ENOCHILD)
-            return false;
-        const ifmt = IFMT & this.#type;
-        // we always set ENOTDIR when setting IFMT, so should be impossible
-        /* c8 ignore start */
-        if (!(ifmt === UNKNOWN || ifmt === IFDIR || ifmt === IFLNK)) {
-            return false;
-        }
-        /* c8 ignore stop */
-        return true;
-    }
-    shouldWalk(dirs, walkFilter) {
-        return ((this.#type & IFDIR) === IFDIR &&
-            !(this.#type & ENOCHILD) &&
-            !dirs.has(this) &&
-            (!walkFilter || walkFilter(this)));
-    }
-    /**
-     * Return the Path object corresponding to path as resolved
-     * by realpath(3).
-     *
-     * If the realpath call fails for any reason, `undefined` is returned.
-     *
-     * Result is cached, and thus may be outdated if the filesystem is mutated.
-     * On success, returns a Path object.
-     */
-    async realpath() {
-        if (this.#realpath)
-            return this.#realpath;
-        if ((ENOREALPATH | ENOREADLINK | ENOENT) & this.#type)
-            return undefined;
-        try {
-            const rp = await this.#fs.promises.realpath(this.fullpath());
-            return (this.#realpath = this.resolve(rp));
-        }
-        catch (_) {
-            this.#markENOREALPATH();
-        }
-    }
-    /**
-     * Synchronous {@link realpath}
-     */
-    realpathSync() {
-        if (this.#realpath)
-            return this.#realpath;
-        if ((ENOREALPATH | ENOREADLINK | ENOENT) & this.#type)
-            return undefined;
-        try {
-            const rp = this.#fs.realpathSync(this.fullpath());
-            return (this.#realpath = this.resolve(rp));
-        }
-        catch (_) {
-            this.#markENOREALPATH();
-        }
-    }
-    /**
-     * Internal method to mark this Path object as the scurry cwd,
-     * called by {@link PathScurry#chdir}
-     *
-     * @internal
-     */
-    [setAsCwd](oldCwd) {
-        if (oldCwd === this)
-            return;
-        oldCwd.isCWD = false;
-        this.isCWD = true;
-        const changed = new Set([]);
-        let rp = [];
-        let p = this;
-        while (p && p.parent) {
-            changed.add(p);
-            p.#relative = rp.join(this.sep);
-            p.#relativePosix = rp.join('/');
-            p = p.parent;
-            rp.push('..');
-        }
-        // now un-memoize parents of old cwd
-        p = oldCwd;
-        while (p && p.parent && !changed.has(p)) {
-            p.#relative = undefined;
-            p.#relativePosix = undefined;
-            p = p.parent;
-        }
-    }
-}
-exports.PathBase = PathBase;
-/**
- * Path class used on win32 systems
- *
- * Uses `'\\'` as the path separator for returned paths, either `'\\'` or `'/'`
- * as the path separator for parsing paths.
- */
-class PathWin32 extends PathBase {
-    /**
-     * Separator for generating path strings.
-     */
-    sep = '\\';
-    /**
-     * Separator for parsing path strings.
-     */
-    splitSep = eitherSep;
-    /**
-     * Do not create new Path objects directly.  They should always be accessed
-     * via the PathScurry class or other methods on the Path class.
-     *
-     * @internal
-     */
-    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
-        super(name, type, root, roots, nocase, children, opts);
-    }
-    /**
-     * @internal
-     */
-    newChild(name, type = UNKNOWN, opts = {}) {
-        return new PathWin32(name, type, this.root, this.roots, this.nocase, this.childrenCache(), opts);
-    }
-    /**
-     * @internal
-     */
-    getRootString(path) {
-        return node_path_1.win32.parse(path).root;
-    }
-    /**
-     * @internal
-     */
-    getRoot(rootPath) {
-        rootPath = uncToDrive(rootPath.toUpperCase());
-        if (rootPath === this.root.name) {
-            return this.root;
-        }
-        // ok, not that one, check if it matches another we know about
-        for (const [compare, root] of Object.entries(this.roots)) {
-            if (this.sameRoot(rootPath, compare)) {
-                return (this.roots[rootPath] = root);
-            }
-        }
-        // otherwise, have to create a new one.
-        return (this.roots[rootPath] = new PathScurryWin32(rootPath, this).root);
-    }
-    /**
-     * @internal
-     */
-    sameRoot(rootPath, compare = this.root.name) {
-        // windows can (rarely) have case-sensitive filesystem, but
-        // UNC and drive letters are always case-insensitive, and canonically
-        // represented uppercase.
-        rootPath = rootPath
-            .toUpperCase()
-            .replace(/\//g, '\\')
-            .replace(uncDriveRegexp, '$1\\');
-        return rootPath === compare;
-    }
-}
-exports.PathWin32 = PathWin32;
-/**
- * Path class used on all posix systems.
- *
- * Uses `'/'` as the path separator.
- */
-class PathPosix extends PathBase {
-    /**
-     * separator for parsing path strings
-     */
-    splitSep = '/';
-    /**
-     * separator for generating path strings
-     */
-    sep = '/';
-    /**
-     * Do not create new Path objects directly.  They should always be accessed
-     * via the PathScurry class or other methods on the Path class.
-     *
-     * @internal
-     */
-    constructor(name, type = UNKNOWN, root, roots, nocase, children, opts) {
-        super(name, type, root, roots, nocase, children, opts);
-    }
-    /**
-     * @internal
-     */
-    getRootString(path) {
-        return path.startsWith('/') ? '/' : '';
-    }
-    /**
-     * @internal
-     */
-    getRoot(_rootPath) {
-        return this.root;
-    }
-    /**
-     * @internal
-     */
-    newChild(name, type = UNKNOWN, opts = {}) {
-        return new PathPosix(name, type, this.root, this.roots, this.nocase, this.childrenCache(), opts);
-    }
-}
-exports.PathPosix = PathPosix;
-/**
- * The base class for all PathScurry classes, providing the interface for path
- * resolution and filesystem operations.
- *
- * Typically, you should *not* instantiate this class directly, but rather one
- * of the platform-specific classes, or the exported {@link PathScurry} which
- * defaults to the current platform.
- */
-class PathScurryBase {
-    /**
-     * The root Path entry for the current working directory of this Scurry
-     */
-    root;
-    /**
-     * The string path for the root of this Scurry's current working directory
-     */
-    rootPath;
-    /**
-     * A collection of all roots encountered, referenced by rootPath
-     */
-    roots;
-    /**
-     * The Path entry corresponding to this PathScurry's current working directory.
-     */
-    cwd;
-    #resolveCache;
-    #resolvePosixCache;
-    #children;
-    /**
-     * Perform path comparisons case-insensitively.
-     *
-     * Defaults true on Darwin and Windows systems, false elsewhere.
-     */
-    nocase;
-    #fs;
-    /**
-     * This class should not be instantiated directly.
-     *
-     * Use PathScurryWin32, PathScurryDarwin, PathScurryPosix, or PathScurry
-     *
-     * @internal
-     */
-    constructor(cwd = process.cwd(), pathImpl, sep, { nocase, childrenCacheSize = 16 * 1024, fs = defaultFS, } = {}) {
-        this.#fs = fsFromOption(fs);
-        if (cwd instanceof URL || cwd.startsWith('file://')) {
-            cwd = (0, node_url_1.fileURLToPath)(cwd);
-        }
-        // resolve and split root, and then add to the store.
-        // this is the only time we call path.resolve()
-        const cwdPath = pathImpl.resolve(cwd);
-        this.roots = Object.create(null);
-        this.rootPath = this.parseRootPath(cwdPath);
-        this.#resolveCache = new ResolveCache();
-        this.#resolvePosixCache = new ResolveCache();
-        this.#children = new ChildrenCache(childrenCacheSize);
-        const split = cwdPath.substring(this.rootPath.length).split(sep);
-        // resolve('/') leaves '', splits to [''], we don't want that.
-        if (split.length === 1 && !split[0]) {
-            split.pop();
-        }
-        /* c8 ignore start */
-        if (nocase === undefined) {
-            throw new TypeError('must provide nocase setting to PathScurryBase ctor');
-        }
-        /* c8 ignore stop */
-        this.nocase = nocase;
-        this.root = this.newRoot(this.#fs);
-        this.roots[this.rootPath] = this.root;
-        let prev = this.root;
-        let len = split.length - 1;
-        const joinSep = pathImpl.sep;
-        let abs = this.rootPath;
-        let sawFirst = false;
-        for (const part of split) {
-            const l = len--;
-            prev = prev.child(part, {
-                relative: new Array(l).fill('..').join(joinSep),
-                relativePosix: new Array(l).fill('..').join('/'),
-                fullpath: (abs += (sawFirst ? '' : joinSep) + part),
-            });
-            sawFirst = true;
-        }
-        this.cwd = prev;
-    }
-    /**
-     * Get the depth of a provided path, string, or the cwd
-     */
-    depth(path = this.cwd) {
-        if (typeof path === 'string') {
-            path = this.cwd.resolve(path);
-        }
-        return path.depth();
-    }
-    /**
-     * Return the cache of child entries.  Exposed so subclasses can create
-     * child Path objects in a platform-specific way.
-     *
-     * @internal
-     */
-    childrenCache() {
-        return this.#children;
-    }
-    /**
-     * Resolve one or more path strings to a resolved string
-     *
-     * Same interface as require('path').resolve.
-     *
-     * Much faster than path.resolve() when called multiple times for the same
-     * path, because the resolved Path objects are cached.  Much slower
-     * otherwise.
-     */
-    resolve(...paths) {
-        // first figure out the minimum number of paths we have to test
-        // we always start at cwd, but any absolutes will bump the start
-        let r = '';
-        for (let i = paths.length - 1; i >= 0; i--) {
-            const p = paths[i];
-            if (!p || p === '.')
-                continue;
-            r = r ? `${p}/${r}` : p;
-            if (this.isAbsolute(p)) {
-                break;
-            }
-        }
-        const cached = this.#resolveCache.get(r);
-        if (cached !== undefined) {
-            return cached;
-        }
-        const result = this.cwd.resolve(r).fullpath();
-        this.#resolveCache.set(r, result);
-        return result;
-    }
-    /**
-     * Resolve one or more path strings to a resolved string, returning
-     * the posix path.  Identical to .resolve() on posix systems, but on
-     * windows will return a forward-slash separated UNC path.
-     *
-     * Same interface as require('path').resolve.
-     *
-     * Much faster than path.resolve() when called multiple times for the same
-     * path, because the resolved Path objects are cached.  Much slower
-     * otherwise.
-     */
-    resolvePosix(...paths) {
-        // first figure out the minimum number of paths we have to test
-        // we always start at cwd, but any absolutes will bump the start
-        let r = '';
-        for (let i = paths.length - 1; i >= 0; i--) {
-            const p = paths[i];
-            if (!p || p === '.')
-                continue;
-            r = r ? `${p}/${r}` : p;
-            if (this.isAbsolute(p)) {
-                break;
-            }
-        }
-        const cached = this.#resolvePosixCache.get(r);
-        if (cached !== undefined) {
-            return cached;
-        }
-        const result = this.cwd.resolve(r).fullpathPosix();
-        this.#resolvePosixCache.set(r, result);
-        return result;
-    }
-    /**
-     * find the relative path from the cwd to the supplied path string or entry
-     */
-    relative(entry = this.cwd) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        return entry.relative();
-    }
-    /**
-     * find the relative path from the cwd to the supplied path string or
-     * entry, using / as the path delimiter, even on Windows.
-     */
-    relativePosix(entry = this.cwd) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        return entry.relativePosix();
-    }
-    /**
-     * Return the basename for the provided string or Path object
-     */
-    basename(entry = this.cwd) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        return entry.name;
-    }
-    /**
-     * Return the dirname for the provided string or Path object
-     */
-    dirname(entry = this.cwd) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        return (entry.parent || entry).fullpath();
-    }
-    async readdir(entry = this.cwd, opts = {
-        withFileTypes: true,
-    }) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes } = opts;
-        if (!entry.canReaddir()) {
-            return [];
-        }
-        else {
-            const p = await entry.readdir();
-            return withFileTypes ? p : p.map(e => e.name);
-        }
-    }
-    readdirSync(entry = this.cwd, opts = {
-        withFileTypes: true,
-    }) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes = true } = opts;
-        if (!entry.canReaddir()) {
-            return [];
-        }
-        else if (withFileTypes) {
-            return entry.readdirSync();
-        }
-        else {
-            return entry.readdirSync().map(e => e.name);
-        }
-    }
-    /**
-     * Call lstat() on the string or Path object, and update all known
-     * information that can be determined.
-     *
-     * Note that unlike `fs.lstat()`, the returned value does not contain some
-     * information, such as `mode`, `dev`, `nlink`, and `ino`.  If that
-     * information is required, you will need to call `fs.lstat` yourself.
-     *
-     * If the Path refers to a nonexistent file, or if the lstat call fails for
-     * any reason, `undefined` is returned.  Otherwise the updated Path object is
-     * returned.
-     *
-     * Results are cached, and thus may be out of date if the filesystem is
-     * mutated.
-     */
-    async lstat(entry = this.cwd) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        return entry.lstat();
-    }
-    /**
-     * synchronous {@link PathScurryBase.lstat}
-     */
-    lstatSync(entry = this.cwd) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        return entry.lstatSync();
-    }
-    async readlink(entry = this.cwd, { withFileTypes } = {
-        withFileTypes: false,
-    }) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            withFileTypes = entry.withFileTypes;
-            entry = this.cwd;
-        }
-        const e = await entry.readlink();
-        return withFileTypes ? e : e?.fullpath();
-    }
-    readlinkSync(entry = this.cwd, { withFileTypes } = {
-        withFileTypes: false,
-    }) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            withFileTypes = entry.withFileTypes;
-            entry = this.cwd;
-        }
-        const e = entry.readlinkSync();
-        return withFileTypes ? e : e?.fullpath();
-    }
-    async realpath(entry = this.cwd, { withFileTypes } = {
-        withFileTypes: false,
-    }) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            withFileTypes = entry.withFileTypes;
-            entry = this.cwd;
-        }
-        const e = await entry.realpath();
-        return withFileTypes ? e : e?.fullpath();
-    }
-    realpathSync(entry = this.cwd, { withFileTypes } = {
-        withFileTypes: false,
-    }) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            withFileTypes = entry.withFileTypes;
-            entry = this.cwd;
-        }
-        const e = entry.realpathSync();
-        return withFileTypes ? e : e?.fullpath();
-    }
-    async walk(entry = this.cwd, opts = {}) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
-        const results = [];
-        if (!filter || filter(entry)) {
-            results.push(withFileTypes ? entry : entry.fullpath());
-        }
-        const dirs = new Set();
-        const walk = (dir, cb) => {
-            dirs.add(dir);
-            dir.readdirCB((er, entries) => {
-                /* c8 ignore start */
-                if (er) {
-                    return cb(er);
-                }
-                /* c8 ignore stop */
-                let len = entries.length;
-                if (!len)
-                    return cb();
-                const next = () => {
-                    if (--len === 0) {
-                        cb();
-                    }
-                };
-                for (const e of entries) {
-                    if (!filter || filter(e)) {
-                        results.push(withFileTypes ? e : e.fullpath());
-                    }
-                    if (follow && e.isSymbolicLink()) {
-                        e.realpath()
-                            .then(r => (r?.isUnknown() ? r.lstat() : r))
-                            .then(r => r?.shouldWalk(dirs, walkFilter) ? walk(r, next) : next());
-                    }
-                    else {
-                        if (e.shouldWalk(dirs, walkFilter)) {
-                            walk(e, next);
-                        }
-                        else {
-                            next();
-                        }
-                    }
-                }
-            }, true); // zalgooooooo
-        };
-        const start = entry;
-        return new Promise((res, rej) => {
-            walk(start, er => {
-                /* c8 ignore start */
-                if (er)
-                    return rej(er);
-                /* c8 ignore stop */
-                res(results);
-            });
-        });
-    }
-    walkSync(entry = this.cwd, opts = {}) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
-        const results = [];
-        if (!filter || filter(entry)) {
-            results.push(withFileTypes ? entry : entry.fullpath());
-        }
-        const dirs = new Set([entry]);
-        for (const dir of dirs) {
-            const entries = dir.readdirSync();
-            for (const e of entries) {
-                if (!filter || filter(e)) {
-                    results.push(withFileTypes ? e : e.fullpath());
-                }
-                let r = e;
-                if (e.isSymbolicLink()) {
-                    if (!(follow && (r = e.realpathSync())))
-                        continue;
-                    if (r.isUnknown())
-                        r.lstatSync();
-                }
-                if (r.shouldWalk(dirs, walkFilter)) {
-                    dirs.add(r);
-                }
-            }
-        }
-        return results;
-    }
-    /**
-     * Support for `for await`
-     *
-     * Alias for {@link PathScurryBase.iterate}
-     *
-     * Note: As of Node 19, this is very slow, compared to other methods of
-     * walking.  Consider using {@link PathScurryBase.stream} if memory overhead
-     * and backpressure are concerns, or {@link PathScurryBase.walk} if not.
-     */
-    [Symbol.asyncIterator]() {
-        return this.iterate();
-    }
-    iterate(entry = this.cwd, options = {}) {
-        // iterating async over the stream is significantly more performant,
-        // especially in the warm-cache scenario, because it buffers up directory
-        // entries in the background instead of waiting for a yield for each one.
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            options = entry;
-            entry = this.cwd;
-        }
-        return this.stream(entry, options)[Symbol.asyncIterator]();
-    }
-    /**
-     * Iterating over a PathScurry performs a synchronous walk.
-     *
-     * Alias for {@link PathScurryBase.iterateSync}
-     */
-    [Symbol.iterator]() {
-        return this.iterateSync();
-    }
-    *iterateSync(entry = this.cwd, opts = {}) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
-        if (!filter || filter(entry)) {
-            yield withFileTypes ? entry : entry.fullpath();
-        }
-        const dirs = new Set([entry]);
-        for (const dir of dirs) {
-            const entries = dir.readdirSync();
-            for (const e of entries) {
-                if (!filter || filter(e)) {
-                    yield withFileTypes ? e : e.fullpath();
-                }
-                let r = e;
-                if (e.isSymbolicLink()) {
-                    if (!(follow && (r = e.realpathSync())))
-                        continue;
-                    if (r.isUnknown())
-                        r.lstatSync();
-                }
-                if (r.shouldWalk(dirs, walkFilter)) {
-                    dirs.add(r);
-                }
-            }
-        }
-    }
-    stream(entry = this.cwd, opts = {}) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
-        const results = new minipass_1.Minipass({ objectMode: true });
-        if (!filter || filter(entry)) {
-            results.write(withFileTypes ? entry : entry.fullpath());
-        }
-        const dirs = new Set();
-        const queue = [entry];
-        let processing = 0;
-        const process = () => {
-            let paused = false;
-            while (!paused) {
-                const dir = queue.shift();
-                if (!dir) {
-                    if (processing === 0)
-                        results.end();
-                    return;
-                }
-                processing++;
-                dirs.add(dir);
-                const onReaddir = (er, entries, didRealpaths = false) => {
-                    /* c8 ignore start */
-                    if (er)
-                        return results.emit('error', er);
-                    /* c8 ignore stop */
-                    if (follow && !didRealpaths) {
-                        const promises = [];
-                        for (const e of entries) {
-                            if (e.isSymbolicLink()) {
-                                promises.push(e
-                                    .realpath()
-                                    .then((r) => r?.isUnknown() ? r.lstat() : r));
-                            }
-                        }
-                        if (promises.length) {
-                            Promise.all(promises).then(() => onReaddir(null, entries, true));
-                            return;
-                        }
-                    }
-                    for (const e of entries) {
-                        if (e && (!filter || filter(e))) {
-                            if (!results.write(withFileTypes ? e : e.fullpath())) {
-                                paused = true;
-                            }
-                        }
-                    }
-                    processing--;
-                    for (const e of entries) {
-                        const r = e.realpathCached() || e;
-                        if (r.shouldWalk(dirs, walkFilter)) {
-                            queue.push(r);
-                        }
-                    }
-                    if (paused && !results.flowing) {
-                        results.once('drain', process);
-                    }
-                    else if (!sync) {
-                        process();
-                    }
-                };
-                // zalgo containment
-                let sync = true;
-                dir.readdirCB(onReaddir, true);
-                sync = false;
-            }
-        };
-        process();
-        return results;
-    }
-    streamSync(entry = this.cwd, opts = {}) {
-        if (typeof entry === 'string') {
-            entry = this.cwd.resolve(entry);
-        }
-        else if (!(entry instanceof PathBase)) {
-            opts = entry;
-            entry = this.cwd;
-        }
-        const { withFileTypes = true, follow = false, filter, walkFilter, } = opts;
-        const results = new minipass_1.Minipass({ objectMode: true });
-        const dirs = new Set();
-        if (!filter || filter(entry)) {
-            results.write(withFileTypes ? entry : entry.fullpath());
-        }
-        const queue = [entry];
-        let processing = 0;
-        const process = () => {
-            let paused = false;
-            while (!paused) {
-                const dir = queue.shift();
-                if (!dir) {
-                    if (processing === 0)
-                        results.end();
-                    return;
-                }
-                processing++;
-                dirs.add(dir);
-                const entries = dir.readdirSync();
-                for (const e of entries) {
-                    if (!filter || filter(e)) {
-                        if (!results.write(withFileTypes ? e : e.fullpath())) {
-                            paused = true;
-                        }
-                    }
-                }
-                processing--;
-                for (const e of entries) {
-                    let r = e;
-                    if (e.isSymbolicLink()) {
-                        if (!(follow && (r = e.realpathSync())))
-                            continue;
-                        if (r.isUnknown())
-                            r.lstatSync();
-                    }
-                    if (r.shouldWalk(dirs, walkFilter)) {
-                        queue.push(r);
-                    }
-                }
-            }
-            if (paused && !results.flowing)
-                results.once('drain', process);
-        };
-        process();
-        return results;
-    }
-    chdir(path = this.cwd) {
-        const oldCwd = this.cwd;
-        this.cwd = typeof path === 'string' ? this.cwd.resolve(path) : path;
-        this.cwd[setAsCwd](oldCwd);
-    }
-}
-exports.PathScurryBase = PathScurryBase;
-/**
- * Windows implementation of {@link PathScurryBase}
- *
- * Defaults to case insensitve, uses `'\\'` to generate path strings.  Uses
- * {@link PathWin32} for Path objects.
- */
-class PathScurryWin32 extends PathScurryBase {
-    /**
-     * separator for generating path strings
-     */
-    sep = '\\';
-    constructor(cwd = process.cwd(), opts = {}) {
-        const { nocase = true } = opts;
-        super(cwd, node_path_1.win32, '\\', { ...opts, nocase });
-        this.nocase = nocase;
-        for (let p = this.cwd; p; p = p.parent) {
-            p.nocase = this.nocase;
-        }
-    }
-    /**
-     * @internal
-     */
-    parseRootPath(dir) {
-        // if the path starts with a single separator, it's not a UNC, and we'll
-        // just get separator as the root, and driveFromUNC will return \
-        // In that case, mount \ on the root from the cwd.
-        return node_path_1.win32.parse(dir).root.toUpperCase();
-    }
-    /**
-     * @internal
-     */
-    newRoot(fs) {
-        return new PathWin32(this.rootPath, IFDIR, undefined, this.roots, this.nocase, this.childrenCache(), { fs });
-    }
-    /**
-     * Return true if the provided path string is an absolute path
-     */
-    isAbsolute(p) {
-        return (p.startsWith('/') || p.startsWith('\\') || /^[a-z]:(\/|\\)/i.test(p));
-    }
-}
-exports.PathScurryWin32 = PathScurryWin32;
-/**
- * {@link PathScurryBase} implementation for all posix systems other than Darwin.
- *
- * Defaults to case-sensitive matching, uses `'/'` to generate path strings.
- *
- * Uses {@link PathPosix} for Path objects.
- */
-class PathScurryPosix extends PathScurryBase {
-    /**
-     * separator for generating path strings
-     */
-    sep = '/';
-    constructor(cwd = process.cwd(), opts = {}) {
-        const { nocase = false } = opts;
-        super(cwd, node_path_1.posix, '/', { ...opts, nocase });
-        this.nocase = nocase;
-    }
-    /**
-     * @internal
-     */
-    parseRootPath(_dir) {
-        return '/';
-    }
-    /**
-     * @internal
-     */
-    newRoot(fs) {
-        return new PathPosix(this.rootPath, IFDIR, undefined, this.roots, this.nocase, this.childrenCache(), { fs });
-    }
-    /**
-     * Return true if the provided path string is an absolute path
-     */
-    isAbsolute(p) {
-        return p.startsWith('/');
-    }
-}
-exports.PathScurryPosix = PathScurryPosix;
-/**
- * {@link PathScurryBase} implementation for Darwin (macOS) systems.
- *
- * Defaults to case-insensitive matching, uses `'/'` for generating path
- * strings.
- *
- * Uses {@link PathPosix} for Path objects.
- */
-class PathScurryDarwin extends PathScurryPosix {
-    constructor(cwd = process.cwd(), opts = {}) {
-        const { nocase = true } = opts;
-        super(cwd, { ...opts, nocase });
-    }
-}
-exports.PathScurryDarwin = PathScurryDarwin;
-/**
- * Default {@link PathBase} implementation for the current platform.
- *
- * {@link PathWin32} on Windows systems, {@link PathPosix} on all others.
- */
-exports.Path = process.platform === 'win32' ? PathWin32 : PathPosix;
-/**
- * Default {@link PathScurryBase} implementation for the current platform.
- *
- * {@link PathScurryWin32} on Windows systems, {@link PathScurryDarwin} on
- * Darwin (macOS) systems, {@link PathScurryPosix} on all others.
- */
-exports.PathScurry = process.platform === 'win32' ? PathScurryWin32
-    : process.platform === 'darwin' ? PathScurryDarwin
-        : PathScurryPosix;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/cacache/node_modules/path-scurry/dist/commonjs/package.json b/node_modules/cacache/node_modules/path-scurry/dist/commonjs/package.json
deleted file mode 100644
index 5bbefffbabee3..0000000000000
--- a/node_modules/cacache/node_modules/path-scurry/dist/commonjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "commonjs"
-}
diff --git a/node_modules/cacache/node_modules/path-scurry/dist/esm/package.json b/node_modules/cacache/node_modules/path-scurry/dist/esm/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/cacache/node_modules/path-scurry/dist/esm/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/glob/dist/esm/bin.mjs b/node_modules/glob/dist/esm/bin.mjs
index 5c7bf1e925610..553bb79303d90 100755
--- a/node_modules/glob/dist/esm/bin.mjs
+++ b/node_modules/glob/dist/esm/bin.mjs
@@ -209,8 +209,10 @@ const j = jack({
         description: `Output a huge amount of noisy debug information about
                     patterns as they are parsed and used to match files.`,
     },
-})
-    .flag({
+    version: {
+        short: 'V',
+        description: `Output the version (${version})`,
+    },
     help: {
         short: 'h',
         description: 'Show this usage information',
@@ -218,6 +220,10 @@ const j = jack({
 });
 try {
     const { positionals, values } = j.parse();
+    if (values.version) {
+        console.log(version);
+        process.exit(0);
+    }
     if (values.help) {
         console.log(j.usage());
         process.exit(0);
diff --git a/node_modules/@npmcli/package-json/node_modules/minimatch/LICENSE b/node_modules/glob/node_modules/minimatch/LICENSE
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/minimatch/LICENSE
rename to node_modules/glob/node_modules/minimatch/LICENSE
diff --git a/node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js b/node_modules/glob/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
rename to node_modules/glob/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
diff --git a/node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/ast.js b/node_modules/glob/node_modules/minimatch/dist/commonjs/ast.js
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/ast.js
rename to node_modules/glob/node_modules/minimatch/dist/commonjs/ast.js
diff --git a/node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/brace-expressions.js b/node_modules/glob/node_modules/minimatch/dist/commonjs/brace-expressions.js
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/brace-expressions.js
rename to node_modules/glob/node_modules/minimatch/dist/commonjs/brace-expressions.js
diff --git a/node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/escape.js b/node_modules/glob/node_modules/minimatch/dist/commonjs/escape.js
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/escape.js
rename to node_modules/glob/node_modules/minimatch/dist/commonjs/escape.js
diff --git a/node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/index.js b/node_modules/glob/node_modules/minimatch/dist/commonjs/index.js
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/index.js
rename to node_modules/glob/node_modules/minimatch/dist/commonjs/index.js
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/package.json b/node_modules/glob/node_modules/minimatch/dist/commonjs/package.json
similarity index 100%
rename from node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/package.json
rename to node_modules/glob/node_modules/minimatch/dist/commonjs/package.json
diff --git a/node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/unescape.js b/node_modules/glob/node_modules/minimatch/dist/commonjs/unescape.js
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/minimatch/dist/commonjs/unescape.js
rename to node_modules/glob/node_modules/minimatch/dist/commonjs/unescape.js
diff --git a/node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/assert-valid-pattern.js b/node_modules/glob/node_modules/minimatch/dist/esm/assert-valid-pattern.js
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/assert-valid-pattern.js
rename to node_modules/glob/node_modules/minimatch/dist/esm/assert-valid-pattern.js
diff --git a/node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/ast.js b/node_modules/glob/node_modules/minimatch/dist/esm/ast.js
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/ast.js
rename to node_modules/glob/node_modules/minimatch/dist/esm/ast.js
diff --git a/node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/brace-expressions.js b/node_modules/glob/node_modules/minimatch/dist/esm/brace-expressions.js
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/brace-expressions.js
rename to node_modules/glob/node_modules/minimatch/dist/esm/brace-expressions.js
diff --git a/node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/escape.js b/node_modules/glob/node_modules/minimatch/dist/esm/escape.js
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/escape.js
rename to node_modules/glob/node_modules/minimatch/dist/esm/escape.js
diff --git a/node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/index.js b/node_modules/glob/node_modules/minimatch/dist/esm/index.js
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/index.js
rename to node_modules/glob/node_modules/minimatch/dist/esm/index.js
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/package.json b/node_modules/glob/node_modules/minimatch/dist/esm/package.json
similarity index 100%
rename from node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/package.json
rename to node_modules/glob/node_modules/minimatch/dist/esm/package.json
diff --git a/node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/unescape.js b/node_modules/glob/node_modules/minimatch/dist/esm/unescape.js
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/minimatch/dist/esm/unescape.js
rename to node_modules/glob/node_modules/minimatch/dist/esm/unescape.js
diff --git a/node_modules/@npmcli/package-json/node_modules/minimatch/package.json b/node_modules/glob/node_modules/minimatch/package.json
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/minimatch/package.json
rename to node_modules/glob/node_modules/minimatch/package.json
diff --git a/node_modules/glob/package.json b/node_modules/glob/package.json
index 6d4893b5f327b..7be2c53bd5c9f 100644
--- a/node_modules/glob/package.json
+++ b/node_modules/glob/package.json
@@ -1,11 +1,8 @@
 {
   "author": "Isaac Z. Schlueter  (https://blog.izs.me/)",
-  "publishConfig": {
-    "tag": "legacy-v10"
-  },
   "name": "glob",
   "description": "the most correct and second fastest glob implementation in JavaScript",
-  "version": "10.4.5",
+  "version": "11.0.3",
   "type": "module",
   "tshy": {
     "main": true,
@@ -40,7 +37,7 @@
   "scripts": {
     "preversion": "npm test",
     "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
+    "prepublishOnly": "npm run benchclean; git push origin --follow-tags",
     "prepare": "tshy",
     "pretest": "npm run prepare",
     "presnap": "npm run prepare",
@@ -48,7 +45,6 @@
     "snap": "tap",
     "format": "prettier --write . --log-level warn",
     "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts",
-    "prepublish": "npm run benchclean",
     "profclean": "rm -f v8.log profile.txt",
     "test-regen": "npm run profclean && TEST_REGEN=1 node --no-warnings --loader ts-node/esm test/00-setup.ts",
     "prebench": "npm run prepare",
@@ -70,23 +66,22 @@
     "endOfLine": "lf"
   },
   "dependencies": {
-    "foreground-child": "^3.1.0",
-    "jackspeak": "^3.1.2",
-    "minimatch": "^9.0.4",
+    "foreground-child": "^3.3.1",
+    "jackspeak": "^4.1.1",
+    "minimatch": "^10.0.3",
     "minipass": "^7.1.2",
     "package-json-from-dist": "^1.0.0",
-    "path-scurry": "^1.11.1"
+    "path-scurry": "^2.0.0"
   },
   "devDependencies": {
-    "@types/node": "^20.11.30",
-    "memfs": "^3.4.13",
+    "@types/node": "^24.0.1",
+    "memfs": "^4.17.2",
     "mkdirp": "^3.0.1",
-    "prettier": "^3.2.5",
-    "rimraf": "^5.0.7",
-    "sync-content": "^1.0.2",
-    "tap": "^19.0.0",
-    "tshy": "^1.14.0",
-    "typedoc": "^0.25.12"
+    "prettier": "^3.5.3",
+    "rimraf": "^6.0.1",
+    "tap": "^21.1.0",
+    "tshy": "^3.0.2",
+    "typedoc": "^0.28.5"
   },
   "tap": {
     "before": "test/00-setup.ts"
@@ -95,5 +90,8 @@
   "funding": {
     "url": "https://github.com/sponsors/isaacs"
   },
+  "engines": {
+    "node": "20 || >=22"
+  },
   "module": "./dist/esm/index.js"
 }
diff --git a/node_modules/jackspeak/dist/commonjs/index.js b/node_modules/jackspeak/dist/commonjs/index.js
index f7fc9cb69a2af..543412746cc8f 100644
--- a/node_modules/jackspeak/dist/commonjs/index.js
+++ b/node_modules/jackspeak/dist/commonjs/index.js
@@ -3,23 +3,61 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
     return (mod && mod.__esModule) ? mod : { "default": mod };
 };
 Object.defineProperty(exports, "__esModule", { value: true });
-exports.jack = exports.Jack = exports.isConfigOption = exports.isConfigType = void 0;
+exports.jack = exports.Jack = exports.isConfigOption = exports.isConfigOptionOfType = exports.isConfigType = void 0;
 const node_util_1 = require("node:util");
-const parse_args_js_1 = require("./parse-args.js");
 // it's a tiny API, just cast it inline, it's fine
 //@ts-ignore
 const cliui_1 = __importDefault(require("@isaacs/cliui"));
 const node_path_1 = require("node:path");
-const width = Math.min((process && process.stdout && process.stdout.columns) || 80, 80);
+const isConfigType = (t) => typeof t === 'string' &&
+    (t === 'string' || t === 'number' || t === 'boolean');
+exports.isConfigType = isConfigType;
+const isValidValue = (v, type, multi) => {
+    if (multi) {
+        if (!Array.isArray(v))
+            return false;
+        return !v.some((v) => !isValidValue(v, type, false));
+    }
+    if (Array.isArray(v))
+        return false;
+    return typeof v === type;
+};
+const isValidOption = (v, vo) => !!vo &&
+    (Array.isArray(v) ? v.every(x => isValidOption(x, vo)) : vo.includes(v));
+/**
+ * Determine whether an unknown object is a {@link ConfigOption} based only
+ * on its `type` and `multiple` property
+ */
+const isConfigOptionOfType = (o, type, multi) => !!o &&
+    typeof o === 'object' &&
+    (0, exports.isConfigType)(o.type) &&
+    o.type === type &&
+    !!o.multiple === multi;
+exports.isConfigOptionOfType = isConfigOptionOfType;
+/**
+ * Determine whether an unknown object is a {@link ConfigOption} based on
+ * it having all valid properties
+ */
+const isConfigOption = (o, type, multi) => (0, exports.isConfigOptionOfType)(o, type, multi) &&
+    undefOrType(o.short, 'string') &&
+    undefOrType(o.description, 'string') &&
+    undefOrType(o.hint, 'string') &&
+    undefOrType(o.validate, 'function') &&
+    (o.type === 'boolean' ?
+        o.validOptions === undefined
+        : undefOrTypeArray(o.validOptions, o.type)) &&
+    (o.default === undefined || isValidValue(o.default, type, multi));
+exports.isConfigOption = isConfigOption;
+const isHeading = (r) => r.type === 'heading';
+const isDescription = (r) => r.type === 'description';
+const width = Math.min(process?.stdout?.columns ?? 80, 80);
 // indentation spaces from heading level
 const indent = (n) => (n - 1) * 2;
-const toEnvKey = (pref, key) => {
-    return [pref, key.replace(/[^a-zA-Z0-9]+/g, ' ')]
-        .join(' ')
-        .trim()
-        .toUpperCase()
-        .replace(/ /g, '_');
-};
+const toEnvKey = (pref, key) => [pref, key.replace(/[^a-zA-Z0-9]+/g, ' ')]
+    .join(' ')
+    .trim()
+    .toUpperCase()
+    .replace(/ /g, '_');
 const toEnvVal = (value, delim = '\n') => {
     const str = typeof value === 'string' ? value
         : typeof value === 'boolean' ?
@@ -30,7 +68,7 @@ const toEnvVal = (value, delim = '\n') => {
                     value.map((v) => toEnvVal(v)).join(delim)
                     : /* c8 ignore start */ undefined;
     if (typeof str !== 'string') {
-        throw new Error(`could not serialize value to environment: ${JSON.stringify(value)}`);
+        throw new Error(`could not serialize value to environment: ${JSON.stringify(value)}`, { cause: { code: 'JACKSPEAK' } });
     }
     /* c8 ignore stop */
     return str;
@@ -41,256 +79,144 @@ const fromEnvVal = (env, type, multiple, delim = '\n') => (multiple ?
     : type === 'string' ? env
         : type === 'boolean' ? env === '1'
             : +env.trim());
-const isConfigType = (t) => typeof t === 'string' &&
-    (t === 'string' || t === 'number' || t === 'boolean');
-exports.isConfigType = isConfigType;
 const undefOrType = (v, t) => v === undefined || typeof v === t;
 const undefOrTypeArray = (v, t) => v === undefined || (Array.isArray(v) && v.every(x => typeof x === t));
-const isValidOption = (v, vo) => Array.isArray(v) ? v.every(x => isValidOption(x, vo)) : vo.includes(v);
 // print the value type, for error message reporting
 const valueType = (v) => typeof v === 'string' ? 'string'
     : typeof v === 'boolean' ? 'boolean'
         : typeof v === 'number' ? 'number'
             : Array.isArray(v) ?
-                joinTypes([...new Set(v.map(v => valueType(v)))]) + '[]'
+                `${joinTypes([...new Set(v.map(v => valueType(v)))])}[]`
                 : `${v.type}${v.multiple ? '[]' : ''}`;
 const joinTypes = (types) => types.length === 1 && typeof types[0] === 'string' ?
     types[0]
     : `(${types.join('|')})`;
-const isValidValue = (v, type, multi) => {
-    if (multi) {
-        if (!Array.isArray(v))
-            return false;
-        return !v.some((v) => !isValidValue(v, type, false));
-    }
-    if (Array.isArray(v))
-        return false;
-    return typeof v === type;
-};
-const isConfigOption = (o, type, multi) => !!o &&
-    typeof o === 'object' &&
-    (0, exports.isConfigType)(o.type) &&
-    o.type === type &&
-    undefOrType(o.short, 'string') &&
-    undefOrType(o.description, 'string') &&
-    undefOrType(o.hint, 'string') &&
-    undefOrType(o.validate, 'function') &&
-    (o.type === 'boolean' ?
-        o.validOptions === undefined
-        : undefOrTypeArray(o.validOptions, o.type)) &&
-    (o.default === undefined || isValidValue(o.default, type, multi)) &&
-    !!o.multiple === multi;
-exports.isConfigOption = isConfigOption;
-function num(o = {}) {
-    const { default: def, validate: val, validOptions, ...rest } = o;
-    if (def !== undefined && !isValidValue(def, 'number', false)) {
-        throw new TypeError('invalid default value', {
-            cause: {
-                found: def,
-                wanted: 'number',
-            },
-        });
-    }
-    if (!undefOrTypeArray(validOptions, 'number')) {
-        throw new TypeError('invalid validOptions', {
-            cause: {
-                found: validOptions,
-                wanted: 'number[]',
-            },
-        });
-    }
-    const validate = val ?
-        val
-        : undefined;
-    return {
-        ...rest,
-        default: def,
-        validate,
-        validOptions,
-        type: 'number',
-        multiple: false,
-    };
-}
-function numList(o = {}) {
-    const { default: def, validate: val, validOptions, ...rest } = o;
-    if (def !== undefined && !isValidValue(def, 'number', true)) {
-        throw new TypeError('invalid default value', {
-            cause: {
-                found: def,
-                wanted: 'number[]',
-            },
-        });
+const validateFieldMeta = (field, fieldMeta) => {
+    if (fieldMeta) {
+        if (field.type !== undefined && field.type !== fieldMeta.type) {
+            throw new TypeError(`invalid type`, {
+                cause: {
+                    found: field.type,
+                    wanted: [fieldMeta.type, undefined],
+                },
+            });
+        }
+        if (field.multiple !== undefined &&
+            !!field.multiple !== fieldMeta.multiple) {
+            throw new TypeError(`invalid multiple`, {
+                cause: {
+                    found: field.multiple,
+                    wanted: [fieldMeta.multiple, undefined],
+                },
+            });
+        }
+        return fieldMeta;
     }
-    if (!undefOrTypeArray(validOptions, 'number')) {
-        throw new TypeError('invalid validOptions', {
+    if (!(0, exports.isConfigType)(field.type)) {
+        throw new TypeError(`invalid type`, {
             cause: {
-                found: validOptions,
-                wanted: 'number[]',
+                found: field.type,
+                wanted: ['string', 'number', 'boolean'],
             },
         });
     }
-    const validate = val ?
-        val
-        : undefined;
     return {
-        ...rest,
-        default: def,
-        validate,
-        validOptions,
-        type: 'number',
-        multiple: true,
+        type: field.type,
+        multiple: !!field.multiple,
     };
-}
-function opt(o = {}) {
-    const { default: def, validate: val, validOptions, ...rest } = o;
-    if (def !== undefined && !isValidValue(def, 'string', false)) {
-        throw new TypeError('invalid default value', {
-            cause: {
-                found: def,
-                wanted: 'string',
-            },
-        });
-    }
-    if (!undefOrTypeArray(validOptions, 'string')) {
-        throw new TypeError('invalid validOptions', {
-            cause: {
-                found: validOptions,
-                wanted: 'string[]',
-            },
-        });
-    }
-    const validate = val ?
-        val
-        : undefined;
-    return {
-        ...rest,
-        default: def,
-        validate,
-        validOptions,
-        type: 'string',
-        multiple: false,
+};
+const validateField = (o, type, multiple) => {
+    const validateValidOptions = (def, validOptions) => {
+        if (!undefOrTypeArray(validOptions, type)) {
+            throw new TypeError('invalid validOptions', {
+                cause: {
+                    found: validOptions,
+                    wanted: valueType({ type, multiple: true }),
+                },
+            });
+        }
+        if (def !== undefined && validOptions !== undefined) {
+            const valid = Array.isArray(def) ?
+                def.every(v => validOptions.includes(v))
+                : validOptions.includes(def);
+            if (!valid) {
+                throw new TypeError('invalid default value not in validOptions', {
+                    cause: {
+                        found: def,
+                        wanted: validOptions,
+                    },
+                });
+            }
+        }
     };
-}
-function optList(o = {}) {
-    const { default: def, validate: val, validOptions, ...rest } = o;
-    if (def !== undefined && !isValidValue(def, 'string', true)) {
+    if (o.default !== undefined &&
+        !isValidValue(o.default, type, multiple)) {
         throw new TypeError('invalid default value', {
             cause: {
-                found: def,
-                wanted: 'string[]',
+                found: o.default,
+                wanted: valueType({ type, multiple }),
             },
         });
     }
-    if (!undefOrTypeArray(validOptions, 'string')) {
-        throw new TypeError('invalid validOptions', {
-            cause: {
-                found: validOptions,
-                wanted: 'string[]',
-            },
-        });
+    if ((0, exports.isConfigOptionOfType)(o, 'number', false) ||
+        (0, exports.isConfigOptionOfType)(o, 'number', true)) {
+        validateValidOptions(o.default, o.validOptions);
     }
-    const validate = val ?
-        val
-        : undefined;
-    return {
-        ...rest,
-        default: def,
-        validate,
-        validOptions,
-        type: 'string',
-        multiple: true,
-    };
-}
-function flag(o = {}) {
-    const { hint, default: def, validate: val, ...rest } = o;
-    delete rest.validOptions;
-    if (def !== undefined && !isValidValue(def, 'boolean', false)) {
-        throw new TypeError('invalid default value');
-    }
-    const validate = val ?
-        val
-        : undefined;
-    if (hint !== undefined) {
-        throw new TypeError('cannot provide hint for flag');
+    else if ((0, exports.isConfigOptionOfType)(o, 'string', false) ||
+        (0, exports.isConfigOptionOfType)(o, 'string', true)) {
+        validateValidOptions(o.default, o.validOptions);
     }
-    return {
-        ...rest,
-        default: def,
-        validate,
-        type: 'boolean',
-        multiple: false,
-    };
-}
-function flagList(o = {}) {
-    const { hint, default: def, validate: val, ...rest } = o;
-    delete rest.validOptions;
-    if (def !== undefined && !isValidValue(def, 'boolean', true)) {
-        throw new TypeError('invalid default value');
-    }
-    const validate = val ?
-        val
-        : undefined;
-    if (hint !== undefined) {
-        throw new TypeError('cannot provide hint for flag list');
+    else if ((0, exports.isConfigOptionOfType)(o, 'boolean', false) ||
+        (0, exports.isConfigOptionOfType)(o, 'boolean', true)) {
+        if (o.hint !== undefined) {
+            throw new TypeError('cannot provide hint for flag');
+        }
+        if (o.validOptions !== undefined) {
+            throw new TypeError('cannot provide validOptions for flag');
+        }
     }
-    return {
-        ...rest,
-        default: def,
-        validate,
-        type: 'boolean',
-        multiple: true,
-    };
-}
+    return o;
+};
 const toParseArgsOptionsConfig = (options) => {
-    const c = {};
-    for (const longOption in options) {
-        const config = options[longOption];
-        /* c8 ignore start */
-        if (!config) {
-            throw new Error('config must be an object: ' + longOption);
-        }
-        /* c8 ignore start */
-        if ((0, exports.isConfigOption)(config, 'number', true)) {
-            c[longOption] = {
-                type: 'string',
-                multiple: true,
-                default: config.default?.map(c => String(c)),
-            };
-        }
-        else if ((0, exports.isConfigOption)(config, 'number', false)) {
-            c[longOption] = {
-                type: 'string',
-                multiple: false,
-                default: config.default === undefined ?
-                    undefined
-                    : String(config.default),
-            };
+    return Object.entries(options).reduce((acc, [longOption, o]) => {
+        const p = {
+            type: 'string',
+            multiple: !!o.multiple,
+            ...(typeof o.short === 'string' ? { short: o.short } : undefined),
+        };
+        const setNoBool = () => {
+            if (!longOption.startsWith('no-') && !options[`no-${longOption}`]) {
+                acc[`no-${longOption}`] = {
+                    type: 'boolean',
+                    multiple: !!o.multiple,
+                };
+            }
+        };
+        const setDefault = (def, fn) => {
+            if (def !== undefined) {
+                p.default = fn(def);
+            }
+        };
+        if ((0, exports.isConfigOption)(o, 'number', false)) {
+            setDefault(o.default, String);
         }
-        else {
-            const conf = config;
-            c[longOption] = {
-                type: conf.type,
-                multiple: !!conf.multiple,
-                default: conf.default,
-            };
-        }
-        const clo = c[longOption];
-        if (typeof config.short === 'string') {
-            clo.short = config.short;
-        }
-        if (config.type === 'boolean' &&
-            !longOption.startsWith('no-') &&
-            !options[`no-${longOption}`]) {
-            c[`no-${longOption}`] = {
-                type: 'boolean',
-                multiple: config.multiple,
-            };
-        }
-    }
-    return c;
+        else if ((0, exports.isConfigOption)(o, 'number', true)) {
+            setDefault(o.default, d => d.map(v => String(v)));
+        }
+        else if ((0, exports.isConfigOption)(o, 'string', false) ||
+            (0, exports.isConfigOption)(o, 'string', true)) {
+            setDefault(o.default, v => v);
+        }
+        else if ((0, exports.isConfigOption)(o, 'boolean', false) ||
+            (0, exports.isConfigOption)(o, 'boolean', true)) {
+            p.type = 'boolean';
+            setDefault(o.default, v => v);
+            setNoBool();
+        }
+        acc[longOption] = p;
+        return acc;
+    }, {});
 };
-const isHeading = (r) => r.type === 'heading';
-const isDescription = (r) => r.type === 'description';
 /**
  * Class returned by the {@link jack} function and all configuration
  * definition methods.  This is what gets chained together.
@@ -317,6 +243,30 @@ class Jack {
         this.#configSet = Object.create(null);
         this.#shorts = Object.create(null);
     }
+    /**
+     * Resulting definitions, suitable to be passed to Node's `util.parseArgs`,
+     * but also including `description` and `short` fields, if set.
+     */
+    get definitions() {
+        return this.#configSet;
+    }
+    /** map of `{ :  }` strings for each short name defined */
+    get shorts() {
+        return this.#shorts;
+    }
+    /**
+     * options passed to the {@link Jack} constructor
+     */
+    get jackOptions() {
+        return this.#options;
+    }
+    /**
+     * the data used to generate {@link Jack#usage} and
+     * {@link Jack#usageMarkdown} content.
+     */
+    get usageFields() {
+        return this.#fields;
+    }
     /**
      * Set the default value (which will still be overridden by env or cli)
      * as if from a parsed config file. The optional `source` param, if
@@ -328,16 +278,13 @@ class Jack {
             this.validate(values);
         }
         catch (er) {
-            const e = er;
-            if (source && e && typeof e === 'object') {
-                if (e.cause && typeof e.cause === 'object') {
-                    Object.assign(e.cause, { path: source });
-                }
-                else {
-                    e.cause = { path: source };
-                }
+            if (source && er instanceof Error) {
+                /* c8 ignore next */
+                const cause = typeof er.cause === 'object' ? er.cause : {};
+                er.cause = { ...cause, path: source };
+                Error.captureStackTrace(er, this.setConfigValues);
             }
-            throw e;
+            throw er;
         }
         for (const [field, value] of Object.entries(values)) {
             const my = this.#configSet[field];
@@ -345,7 +292,10 @@ class Jack {
             /* c8 ignore start */
             if (!my) {
                 throw new Error('unexpected field in config set: ' + field, {
-                    cause: { found: field },
+                    cause: {
+                        code: 'JACKSPEAK',
+                        found: field,
+                    },
                 });
             }
             /* c8 ignore stop */
@@ -400,10 +350,9 @@ class Jack {
         if (args === process.argv) {
             args = args.slice(process._eval !== undefined ? 1 : 2);
         }
-        const options = toParseArgsOptionsConfig(this.#configSet);
-        const result = (0, parse_args_js_1.parseArgs)({
+        const result = (0, node_util_1.parseArgs)({
             args,
-            options,
+            options: toParseArgsOptionsConfig(this.#configSet),
             // always strict, but using our own logic
             strict: false,
             allowPositionals: this.#allowPositionals,
@@ -443,6 +392,7 @@ class Jack {
                         `place it at the end of the command after '--', as in ` +
                         `'-- ${token.rawName}'`, {
                         cause: {
+                            code: 'JACKSPEAK',
                             found: token.rawName + (token.value ? `=${token.value}` : ''),
                         },
                     });
@@ -452,6 +402,7 @@ class Jack {
                         if (my.type !== 'boolean') {
                             throw new Error(`No value provided for ${token.rawName}, expected ${my.type}`, {
                                 cause: {
+                                    code: 'JACKSPEAK',
                                     name: token.rawName,
                                     wanted: valueType(my),
                                 },
@@ -461,7 +412,7 @@ class Jack {
                     }
                     else {
                         if (my.type === 'boolean') {
-                            throw new Error(`Flag ${token.rawName} does not take a value, received '${token.value}'`, { cause: { found: token } });
+                            throw new Error(`Flag ${token.rawName} does not take a value, received '${token.value}'`, { cause: { code: 'JACKSPEAK', found: token } });
                         }
                         if (my.type === 'string') {
                             value = token.value;
@@ -472,6 +423,7 @@ class Jack {
                                 throw new Error(`Invalid value '${token.value}' provided for ` +
                                     `'${token.rawName}' option, expected number`, {
                                     cause: {
+                                        code: 'JACKSPEAK',
                                         name: token.rawName,
                                         found: token.value,
                                         wanted: 'number',
@@ -496,15 +448,12 @@ class Jack {
         for (const [field, value] of Object.entries(p.values)) {
             const valid = this.#configSet[field]?.validate;
             const validOptions = this.#configSet[field]?.validOptions;
-            let cause;
-            if (validOptions && !isValidOption(value, validOptions)) {
-                cause = { name: field, found: value, validOptions: validOptions };
-            }
-            if (valid && !valid(value)) {
-                cause = cause || { name: field, found: value };
-            }
+            const cause = validOptions && !isValidOption(value, validOptions) ?
+                { name: field, found: value, validOptions }
+                : valid && !valid(value) ? { name: field, found: value }
+                    : undefined;
             if (cause) {
-                throw new Error(`Invalid value provided for --${field}: ${JSON.stringify(value)}`, { cause });
+                throw new Error(`Invalid value provided for --${field}: ${JSON.stringify(value)}`, { cause: { ...cause, code: 'JACKSPEAK' } });
             }
         }
         return p;
@@ -520,7 +469,7 @@ class Jack {
         // recurse so we get the core config key we care about.
         this.#noNoFields(yes, val, s);
         if (this.#configSet[yes]?.type === 'boolean') {
-            throw new Error(`do not set '${s}', instead set '${yes}' as desired.`, { cause: { found: s, wanted: yes } });
+            throw new Error(`do not set '${s}', instead set '${yes}' as desired.`, { cause: { code: 'JACKSPEAK', found: s, wanted: yes } });
         }
     }
     /**
@@ -530,7 +479,7 @@ class Jack {
     validate(o) {
         if (!o || typeof o !== 'object') {
             throw new Error('Invalid config: not an object', {
-                cause: { found: o },
+                cause: { code: 'JACKSPEAK', found: o },
             });
         }
         const opts = o;
@@ -543,33 +492,27 @@ class Jack {
             const config = this.#configSet[field];
             if (!config) {
                 throw new Error(`Unknown config option: ${field}`, {
-                    cause: { found: field },
+                    cause: { code: 'JACKSPEAK', found: field },
                 });
             }
             if (!isValidValue(value, config.type, !!config.multiple)) {
                 throw new Error(`Invalid value ${valueType(value)} for ${field}, expected ${valueType(config)}`, {
                     cause: {
+                        code: 'JACKSPEAK',
                         name: field,
                         found: value,
                         wanted: valueType(config),
                     },
                 });
             }
-            let cause;
-            if (config.validOptions &&
-                !isValidOption(value, config.validOptions)) {
-                cause = {
-                    name: field,
-                    found: value,
-                    validOptions: config.validOptions,
-                };
-            }
-            if (config.validate && !config.validate(value)) {
-                cause = cause || { name: field, found: value };
-            }
+            const cause = config.validOptions && !isValidOption(value, config.validOptions) ?
+                { name: field, found: value, validOptions: config.validOptions }
+                : config.validate && !config.validate(value) ?
+                    { name: field, found: value }
+                    : undefined;
             if (cause) {
                 throw new Error(`Invalid config value for ${field}: ${value}`, {
-                    cause,
+                    cause: { ...cause, code: 'JACKSPEAK' },
                 });
             }
         }
@@ -603,37 +546,37 @@ class Jack {
      * Add one or more number fields.
      */
     num(fields) {
-        return this.#addFields(fields, num);
+        return this.#addFieldsWith(fields, 'number', false);
     }
     /**
      * Add one or more multiple number fields.
      */
     numList(fields) {
-        return this.#addFields(fields, numList);
+        return this.#addFieldsWith(fields, 'number', true);
     }
     /**
      * Add one or more string option fields.
      */
     opt(fields) {
-        return this.#addFields(fields, opt);
+        return this.#addFieldsWith(fields, 'string', false);
     }
     /**
      * Add one or more multiple string option fields.
      */
     optList(fields) {
-        return this.#addFields(fields, optList);
+        return this.#addFieldsWith(fields, 'string', true);
     }
     /**
      * Add one or more flag fields.
      */
     flag(fields) {
-        return this.#addFields(fields, flag);
+        return this.#addFieldsWith(fields, 'boolean', false);
     }
     /**
      * Add one or more multiple flag fields.
      */
     flagList(fields) {
-        return this.#addFields(fields, flagList);
+        return this.#addFieldsWith(fields, 'boolean', true);
     }
     /**
      * Generic field definition method. Similar to flag/flagList/number/etc,
@@ -641,29 +584,22 @@ class Jack {
      * fields on each one, or Jack won't know how to define them.
      */
     addFields(fields) {
-        const next = this;
-        for (const [name, field] of Object.entries(fields)) {
-            this.#validateName(name, field);
-            next.#fields.push({
-                type: 'config',
-                name,
-                value: field,
-            });
-        }
-        Object.assign(next.#configSet, fields);
-        return next;
+        return this.#addFields(this, fields);
     }
-    #addFields(fields, fn) {
-        const next = this;
+    #addFieldsWith(fields, type, multiple) {
+        return this.#addFields(this, fields, {
+            type,
+            multiple,
+        });
+    }
+    #addFields(next, fields, opt) {
         Object.assign(next.#configSet, Object.fromEntries(Object.entries(fields).map(([name, field]) => {
             this.#validateName(name, field);
-            const option = fn(field);
-            next.#fields.push({
-                type: 'config',
-                name,
-                value: option,
-            });
-            return [name, option];
+            const { type, multiple } = validateFieldMeta(field, opt);
+            const value = { ...field, type, multiple };
+            validateField(value, type, multiple);
+            next.#fields.push({ type: 'config', name, value });
+            return [name, value];
         })));
         return next;
     }
@@ -699,6 +635,7 @@ class Jack {
         if (this.#usage)
             return this.#usage;
         let headingLevel = 1;
+        //@ts-ignore
         const ui = (0, cliui_1.default)({ width });
         const first = this.#fields[0];
         let start = first?.type === 'heading' ? 1 : 0;
@@ -941,6 +878,11 @@ class Jack {
     }
 }
 exports.Jack = Jack;
+/**
+ * Main entry point. Create and return a {@link Jack} object.
+ */
+const jack = (options = {}) => new Jack(options);
+exports.jack = jack;
 // Unwrap and un-indent, so we can wrap description
 // strings however makes them look nice in the code.
 const normalize = (s, pre = false) => {
@@ -1002,9 +944,4 @@ const normalizeOneLine = (s, pre = false) => {
         .trim();
     return pre ? `\`${n}\`` : n;
 };
-/**
- * Main entry point. Create and return a {@link Jack} object.
- */
-const jack = (options = {}) => new Jack(options);
-exports.jack = jack;
 //# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/jackspeak/dist/esm/index.js b/node_modules/jackspeak/dist/esm/index.js
index 78fdfa8155472..b959f5126423c 100644
--- a/node_modules/jackspeak/dist/esm/index.js
+++ b/node_modules/jackspeak/dist/esm/index.js
@@ -1,19 +1,54 @@
-import { inspect } from 'node:util';
-import { parseArgs } from './parse-args.js';
+import { inspect, parseArgs, } from 'node:util';
 // it's a tiny API, just cast it inline, it's fine
 //@ts-ignore
 import cliui from '@isaacs/cliui';
 import { basename } from 'node:path';
-const width = Math.min((process && process.stdout && process.stdout.columns) || 80, 80);
+export const isConfigType = (t) => typeof t === 'string' &&
+    (t === 'string' || t === 'number' || t === 'boolean');
+const isValidValue = (v, type, multi) => {
+    if (multi) {
+        if (!Array.isArray(v))
+            return false;
+        return !v.some((v) => !isValidValue(v, type, false));
+    }
+    if (Array.isArray(v))
+        return false;
+    return typeof v === type;
+};
+const isValidOption = (v, vo) => !!vo &&
+    (Array.isArray(v) ? v.every(x => isValidOption(x, vo)) : vo.includes(v));
+/**
+ * Determine whether an unknown object is a {@link ConfigOption} based only
+ * on its `type` and `multiple` property
+ */
+export const isConfigOptionOfType = (o, type, multi) => !!o &&
+    typeof o === 'object' &&
+    isConfigType(o.type) &&
+    o.type === type &&
+    !!o.multiple === multi;
+/**
+ * Determine whether an unknown object is a {@link ConfigOption} based on
+ * it having all valid properties
+ */
+export const isConfigOption = (o, type, multi) => isConfigOptionOfType(o, type, multi) &&
+    undefOrType(o.short, 'string') &&
+    undefOrType(o.description, 'string') &&
+    undefOrType(o.hint, 'string') &&
+    undefOrType(o.validate, 'function') &&
+    (o.type === 'boolean' ?
+        o.validOptions === undefined
+        : undefOrTypeArray(o.validOptions, o.type)) &&
+    (o.default === undefined || isValidValue(o.default, type, multi));
+const isHeading = (r) => r.type === 'heading';
+const isDescription = (r) => r.type === 'description';
+const width = Math.min(process?.stdout?.columns ?? 80, 80);
 // indentation spaces from heading level
 const indent = (n) => (n - 1) * 2;
-const toEnvKey = (pref, key) => {
-    return [pref, key.replace(/[^a-zA-Z0-9]+/g, ' ')]
-        .join(' ')
-        .trim()
-        .toUpperCase()
-        .replace(/ /g, '_');
-};
+const toEnvKey = (pref, key) => [pref, key.replace(/[^a-zA-Z0-9]+/g, ' ')]
+    .join(' ')
+    .trim()
+    .toUpperCase()
+    .replace(/ /g, '_');
 const toEnvVal = (value, delim = '\n') => {
     const str = typeof value === 'string' ? value
         : typeof value === 'boolean' ?
@@ -24,7 +59,7 @@ const toEnvVal = (value, delim = '\n') => {
                     value.map((v) => toEnvVal(v)).join(delim)
                     : /* c8 ignore start */ undefined;
     if (typeof str !== 'string') {
-        throw new Error(`could not serialize value to environment: ${JSON.stringify(value)}`);
+        throw new Error(`could not serialize value to environment: ${JSON.stringify(value)}`, { cause: { code: 'JACKSPEAK' } });
     }
     /* c8 ignore stop */
     return str;
@@ -35,254 +70,144 @@ const fromEnvVal = (env, type, multiple, delim = '\n') => (multiple ?
     : type === 'string' ? env
         : type === 'boolean' ? env === '1'
             : +env.trim());
-export const isConfigType = (t) => typeof t === 'string' &&
-    (t === 'string' || t === 'number' || t === 'boolean');
 const undefOrType = (v, t) => v === undefined || typeof v === t;
 const undefOrTypeArray = (v, t) => v === undefined || (Array.isArray(v) && v.every(x => typeof x === t));
-const isValidOption = (v, vo) => Array.isArray(v) ? v.every(x => isValidOption(x, vo)) : vo.includes(v);
 // print the value type, for error message reporting
 const valueType = (v) => typeof v === 'string' ? 'string'
     : typeof v === 'boolean' ? 'boolean'
         : typeof v === 'number' ? 'number'
             : Array.isArray(v) ?
-                joinTypes([...new Set(v.map(v => valueType(v)))]) + '[]'
+                `${joinTypes([...new Set(v.map(v => valueType(v)))])}[]`
                 : `${v.type}${v.multiple ? '[]' : ''}`;
 const joinTypes = (types) => types.length === 1 && typeof types[0] === 'string' ?
     types[0]
     : `(${types.join('|')})`;
-const isValidValue = (v, type, multi) => {
-    if (multi) {
-        if (!Array.isArray(v))
-            return false;
-        return !v.some((v) => !isValidValue(v, type, false));
-    }
-    if (Array.isArray(v))
-        return false;
-    return typeof v === type;
-};
-export const isConfigOption = (o, type, multi) => !!o &&
-    typeof o === 'object' &&
-    isConfigType(o.type) &&
-    o.type === type &&
-    undefOrType(o.short, 'string') &&
-    undefOrType(o.description, 'string') &&
-    undefOrType(o.hint, 'string') &&
-    undefOrType(o.validate, 'function') &&
-    (o.type === 'boolean' ?
-        o.validOptions === undefined
-        : undefOrTypeArray(o.validOptions, o.type)) &&
-    (o.default === undefined || isValidValue(o.default, type, multi)) &&
-    !!o.multiple === multi;
-function num(o = {}) {
-    const { default: def, validate: val, validOptions, ...rest } = o;
-    if (def !== undefined && !isValidValue(def, 'number', false)) {
-        throw new TypeError('invalid default value', {
-            cause: {
-                found: def,
-                wanted: 'number',
-            },
-        });
-    }
-    if (!undefOrTypeArray(validOptions, 'number')) {
-        throw new TypeError('invalid validOptions', {
-            cause: {
-                found: validOptions,
-                wanted: 'number[]',
-            },
-        });
-    }
-    const validate = val ?
-        val
-        : undefined;
-    return {
-        ...rest,
-        default: def,
-        validate,
-        validOptions,
-        type: 'number',
-        multiple: false,
-    };
-}
-function numList(o = {}) {
-    const { default: def, validate: val, validOptions, ...rest } = o;
-    if (def !== undefined && !isValidValue(def, 'number', true)) {
-        throw new TypeError('invalid default value', {
-            cause: {
-                found: def,
-                wanted: 'number[]',
-            },
-        });
+const validateFieldMeta = (field, fieldMeta) => {
+    if (fieldMeta) {
+        if (field.type !== undefined && field.type !== fieldMeta.type) {
+            throw new TypeError(`invalid type`, {
+                cause: {
+                    found: field.type,
+                    wanted: [fieldMeta.type, undefined],
+                },
+            });
+        }
+        if (field.multiple !== undefined &&
+            !!field.multiple !== fieldMeta.multiple) {
+            throw new TypeError(`invalid multiple`, {
+                cause: {
+                    found: field.multiple,
+                    wanted: [fieldMeta.multiple, undefined],
+                },
+            });
+        }
+        return fieldMeta;
     }
-    if (!undefOrTypeArray(validOptions, 'number')) {
-        throw new TypeError('invalid validOptions', {
+    if (!isConfigType(field.type)) {
+        throw new TypeError(`invalid type`, {
             cause: {
-                found: validOptions,
-                wanted: 'number[]',
+                found: field.type,
+                wanted: ['string', 'number', 'boolean'],
             },
         });
     }
-    const validate = val ?
-        val
-        : undefined;
     return {
-        ...rest,
-        default: def,
-        validate,
-        validOptions,
-        type: 'number',
-        multiple: true,
+        type: field.type,
+        multiple: !!field.multiple,
     };
-}
-function opt(o = {}) {
-    const { default: def, validate: val, validOptions, ...rest } = o;
-    if (def !== undefined && !isValidValue(def, 'string', false)) {
-        throw new TypeError('invalid default value', {
-            cause: {
-                found: def,
-                wanted: 'string',
-            },
-        });
-    }
-    if (!undefOrTypeArray(validOptions, 'string')) {
-        throw new TypeError('invalid validOptions', {
-            cause: {
-                found: validOptions,
-                wanted: 'string[]',
-            },
-        });
-    }
-    const validate = val ?
-        val
-        : undefined;
-    return {
-        ...rest,
-        default: def,
-        validate,
-        validOptions,
-        type: 'string',
-        multiple: false,
+};
+const validateField = (o, type, multiple) => {
+    const validateValidOptions = (def, validOptions) => {
+        if (!undefOrTypeArray(validOptions, type)) {
+            throw new TypeError('invalid validOptions', {
+                cause: {
+                    found: validOptions,
+                    wanted: valueType({ type, multiple: true }),
+                },
+            });
+        }
+        if (def !== undefined && validOptions !== undefined) {
+            const valid = Array.isArray(def) ?
+                def.every(v => validOptions.includes(v))
+                : validOptions.includes(def);
+            if (!valid) {
+                throw new TypeError('invalid default value not in validOptions', {
+                    cause: {
+                        found: def,
+                        wanted: validOptions,
+                    },
+                });
+            }
+        }
     };
-}
-function optList(o = {}) {
-    const { default: def, validate: val, validOptions, ...rest } = o;
-    if (def !== undefined && !isValidValue(def, 'string', true)) {
+    if (o.default !== undefined &&
+        !isValidValue(o.default, type, multiple)) {
         throw new TypeError('invalid default value', {
             cause: {
-                found: def,
-                wanted: 'string[]',
+                found: o.default,
+                wanted: valueType({ type, multiple }),
             },
         });
     }
-    if (!undefOrTypeArray(validOptions, 'string')) {
-        throw new TypeError('invalid validOptions', {
-            cause: {
-                found: validOptions,
-                wanted: 'string[]',
-            },
-        });
+    if (isConfigOptionOfType(o, 'number', false) ||
+        isConfigOptionOfType(o, 'number', true)) {
+        validateValidOptions(o.default, o.validOptions);
     }
-    const validate = val ?
-        val
-        : undefined;
-    return {
-        ...rest,
-        default: def,
-        validate,
-        validOptions,
-        type: 'string',
-        multiple: true,
-    };
-}
-function flag(o = {}) {
-    const { hint, default: def, validate: val, ...rest } = o;
-    delete rest.validOptions;
-    if (def !== undefined && !isValidValue(def, 'boolean', false)) {
-        throw new TypeError('invalid default value');
-    }
-    const validate = val ?
-        val
-        : undefined;
-    if (hint !== undefined) {
-        throw new TypeError('cannot provide hint for flag');
+    else if (isConfigOptionOfType(o, 'string', false) ||
+        isConfigOptionOfType(o, 'string', true)) {
+        validateValidOptions(o.default, o.validOptions);
     }
-    return {
-        ...rest,
-        default: def,
-        validate,
-        type: 'boolean',
-        multiple: false,
-    };
-}
-function flagList(o = {}) {
-    const { hint, default: def, validate: val, ...rest } = o;
-    delete rest.validOptions;
-    if (def !== undefined && !isValidValue(def, 'boolean', true)) {
-        throw new TypeError('invalid default value');
-    }
-    const validate = val ?
-        val
-        : undefined;
-    if (hint !== undefined) {
-        throw new TypeError('cannot provide hint for flag list');
+    else if (isConfigOptionOfType(o, 'boolean', false) ||
+        isConfigOptionOfType(o, 'boolean', true)) {
+        if (o.hint !== undefined) {
+            throw new TypeError('cannot provide hint for flag');
+        }
+        if (o.validOptions !== undefined) {
+            throw new TypeError('cannot provide validOptions for flag');
+        }
     }
-    return {
-        ...rest,
-        default: def,
-        validate,
-        type: 'boolean',
-        multiple: true,
-    };
-}
+    return o;
+};
 const toParseArgsOptionsConfig = (options) => {
-    const c = {};
-    for (const longOption in options) {
-        const config = options[longOption];
-        /* c8 ignore start */
-        if (!config) {
-            throw new Error('config must be an object: ' + longOption);
-        }
-        /* c8 ignore start */
-        if (isConfigOption(config, 'number', true)) {
-            c[longOption] = {
-                type: 'string',
-                multiple: true,
-                default: config.default?.map(c => String(c)),
-            };
-        }
-        else if (isConfigOption(config, 'number', false)) {
-            c[longOption] = {
-                type: 'string',
-                multiple: false,
-                default: config.default === undefined ?
-                    undefined
-                    : String(config.default),
-            };
+    return Object.entries(options).reduce((acc, [longOption, o]) => {
+        const p = {
+            type: 'string',
+            multiple: !!o.multiple,
+            ...(typeof o.short === 'string' ? { short: o.short } : undefined),
+        };
+        const setNoBool = () => {
+            if (!longOption.startsWith('no-') && !options[`no-${longOption}`]) {
+                acc[`no-${longOption}`] = {
+                    type: 'boolean',
+                    multiple: !!o.multiple,
+                };
+            }
+        };
+        const setDefault = (def, fn) => {
+            if (def !== undefined) {
+                p.default = fn(def);
+            }
+        };
+        if (isConfigOption(o, 'number', false)) {
+            setDefault(o.default, String);
         }
-        else {
-            const conf = config;
-            c[longOption] = {
-                type: conf.type,
-                multiple: !!conf.multiple,
-                default: conf.default,
-            };
-        }
-        const clo = c[longOption];
-        if (typeof config.short === 'string') {
-            clo.short = config.short;
-        }
-        if (config.type === 'boolean' &&
-            !longOption.startsWith('no-') &&
-            !options[`no-${longOption}`]) {
-            c[`no-${longOption}`] = {
-                type: 'boolean',
-                multiple: config.multiple,
-            };
-        }
-    }
-    return c;
+        else if (isConfigOption(o, 'number', true)) {
+            setDefault(o.default, d => d.map(v => String(v)));
+        }
+        else if (isConfigOption(o, 'string', false) ||
+            isConfigOption(o, 'string', true)) {
+            setDefault(o.default, v => v);
+        }
+        else if (isConfigOption(o, 'boolean', false) ||
+            isConfigOption(o, 'boolean', true)) {
+            p.type = 'boolean';
+            setDefault(o.default, v => v);
+            setNoBool();
+        }
+        acc[longOption] = p;
+        return acc;
+    }, {});
 };
-const isHeading = (r) => r.type === 'heading';
-const isDescription = (r) => r.type === 'description';
 /**
  * Class returned by the {@link jack} function and all configuration
  * definition methods.  This is what gets chained together.
@@ -309,6 +234,30 @@ export class Jack {
         this.#configSet = Object.create(null);
         this.#shorts = Object.create(null);
     }
+    /**
+     * Resulting definitions, suitable to be passed to Node's `util.parseArgs`,
+     * but also including `description` and `short` fields, if set.
+     */
+    get definitions() {
+        return this.#configSet;
+    }
+    /** map of `{ :  }` strings for each short name defined */
+    get shorts() {
+        return this.#shorts;
+    }
+    /**
+     * options passed to the {@link Jack} constructor
+     */
+    get jackOptions() {
+        return this.#options;
+    }
+    /**
+     * the data used to generate {@link Jack#usage} and
+     * {@link Jack#usageMarkdown} content.
+     */
+    get usageFields() {
+        return this.#fields;
+    }
     /**
      * Set the default value (which will still be overridden by env or cli)
      * as if from a parsed config file. The optional `source` param, if
@@ -320,16 +269,13 @@ export class Jack {
             this.validate(values);
         }
         catch (er) {
-            const e = er;
-            if (source && e && typeof e === 'object') {
-                if (e.cause && typeof e.cause === 'object') {
-                    Object.assign(e.cause, { path: source });
-                }
-                else {
-                    e.cause = { path: source };
-                }
+            if (source && er instanceof Error) {
+                /* c8 ignore next */
+                const cause = typeof er.cause === 'object' ? er.cause : {};
+                er.cause = { ...cause, path: source };
+                Error.captureStackTrace(er, this.setConfigValues);
             }
-            throw e;
+            throw er;
         }
         for (const [field, value] of Object.entries(values)) {
             const my = this.#configSet[field];
@@ -337,7 +283,10 @@ export class Jack {
             /* c8 ignore start */
             if (!my) {
                 throw new Error('unexpected field in config set: ' + field, {
-                    cause: { found: field },
+                    cause: {
+                        code: 'JACKSPEAK',
+                        found: field,
+                    },
                 });
             }
             /* c8 ignore stop */
@@ -392,10 +341,9 @@ export class Jack {
         if (args === process.argv) {
             args = args.slice(process._eval !== undefined ? 1 : 2);
         }
-        const options = toParseArgsOptionsConfig(this.#configSet);
         const result = parseArgs({
             args,
-            options,
+            options: toParseArgsOptionsConfig(this.#configSet),
             // always strict, but using our own logic
             strict: false,
             allowPositionals: this.#allowPositionals,
@@ -435,6 +383,7 @@ export class Jack {
                         `place it at the end of the command after '--', as in ` +
                         `'-- ${token.rawName}'`, {
                         cause: {
+                            code: 'JACKSPEAK',
                             found: token.rawName + (token.value ? `=${token.value}` : ''),
                         },
                     });
@@ -444,6 +393,7 @@ export class Jack {
                         if (my.type !== 'boolean') {
                             throw new Error(`No value provided for ${token.rawName}, expected ${my.type}`, {
                                 cause: {
+                                    code: 'JACKSPEAK',
                                     name: token.rawName,
                                     wanted: valueType(my),
                                 },
@@ -453,7 +403,7 @@ export class Jack {
                     }
                     else {
                         if (my.type === 'boolean') {
-                            throw new Error(`Flag ${token.rawName} does not take a value, received '${token.value}'`, { cause: { found: token } });
+                            throw new Error(`Flag ${token.rawName} does not take a value, received '${token.value}'`, { cause: { code: 'JACKSPEAK', found: token } });
                         }
                         if (my.type === 'string') {
                             value = token.value;
@@ -464,6 +414,7 @@ export class Jack {
                                 throw new Error(`Invalid value '${token.value}' provided for ` +
                                     `'${token.rawName}' option, expected number`, {
                                     cause: {
+                                        code: 'JACKSPEAK',
                                         name: token.rawName,
                                         found: token.value,
                                         wanted: 'number',
@@ -488,15 +439,12 @@ export class Jack {
         for (const [field, value] of Object.entries(p.values)) {
             const valid = this.#configSet[field]?.validate;
             const validOptions = this.#configSet[field]?.validOptions;
-            let cause;
-            if (validOptions && !isValidOption(value, validOptions)) {
-                cause = { name: field, found: value, validOptions: validOptions };
-            }
-            if (valid && !valid(value)) {
-                cause = cause || { name: field, found: value };
-            }
+            const cause = validOptions && !isValidOption(value, validOptions) ?
+                { name: field, found: value, validOptions }
+                : valid && !valid(value) ? { name: field, found: value }
+                    : undefined;
             if (cause) {
-                throw new Error(`Invalid value provided for --${field}: ${JSON.stringify(value)}`, { cause });
+                throw new Error(`Invalid value provided for --${field}: ${JSON.stringify(value)}`, { cause: { ...cause, code: 'JACKSPEAK' } });
             }
         }
         return p;
@@ -512,7 +460,7 @@ export class Jack {
         // recurse so we get the core config key we care about.
         this.#noNoFields(yes, val, s);
         if (this.#configSet[yes]?.type === 'boolean') {
-            throw new Error(`do not set '${s}', instead set '${yes}' as desired.`, { cause: { found: s, wanted: yes } });
+            throw new Error(`do not set '${s}', instead set '${yes}' as desired.`, { cause: { code: 'JACKSPEAK', found: s, wanted: yes } });
         }
     }
     /**
@@ -522,7 +470,7 @@ export class Jack {
     validate(o) {
         if (!o || typeof o !== 'object') {
             throw new Error('Invalid config: not an object', {
-                cause: { found: o },
+                cause: { code: 'JACKSPEAK', found: o },
             });
         }
         const opts = o;
@@ -535,33 +483,27 @@ export class Jack {
             const config = this.#configSet[field];
             if (!config) {
                 throw new Error(`Unknown config option: ${field}`, {
-                    cause: { found: field },
+                    cause: { code: 'JACKSPEAK', found: field },
                 });
             }
             if (!isValidValue(value, config.type, !!config.multiple)) {
                 throw new Error(`Invalid value ${valueType(value)} for ${field}, expected ${valueType(config)}`, {
                     cause: {
+                        code: 'JACKSPEAK',
                         name: field,
                         found: value,
                         wanted: valueType(config),
                     },
                 });
             }
-            let cause;
-            if (config.validOptions &&
-                !isValidOption(value, config.validOptions)) {
-                cause = {
-                    name: field,
-                    found: value,
-                    validOptions: config.validOptions,
-                };
-            }
-            if (config.validate && !config.validate(value)) {
-                cause = cause || { name: field, found: value };
-            }
+            const cause = config.validOptions && !isValidOption(value, config.validOptions) ?
+                { name: field, found: value, validOptions: config.validOptions }
+                : config.validate && !config.validate(value) ?
+                    { name: field, found: value }
+                    : undefined;
             if (cause) {
                 throw new Error(`Invalid config value for ${field}: ${value}`, {
-                    cause,
+                    cause: { ...cause, code: 'JACKSPEAK' },
                 });
             }
         }
@@ -595,37 +537,37 @@ export class Jack {
      * Add one or more number fields.
      */
     num(fields) {
-        return this.#addFields(fields, num);
+        return this.#addFieldsWith(fields, 'number', false);
     }
     /**
      * Add one or more multiple number fields.
      */
     numList(fields) {
-        return this.#addFields(fields, numList);
+        return this.#addFieldsWith(fields, 'number', true);
     }
     /**
      * Add one or more string option fields.
      */
     opt(fields) {
-        return this.#addFields(fields, opt);
+        return this.#addFieldsWith(fields, 'string', false);
     }
     /**
      * Add one or more multiple string option fields.
      */
     optList(fields) {
-        return this.#addFields(fields, optList);
+        return this.#addFieldsWith(fields, 'string', true);
     }
     /**
      * Add one or more flag fields.
      */
     flag(fields) {
-        return this.#addFields(fields, flag);
+        return this.#addFieldsWith(fields, 'boolean', false);
     }
     /**
      * Add one or more multiple flag fields.
      */
     flagList(fields) {
-        return this.#addFields(fields, flagList);
+        return this.#addFieldsWith(fields, 'boolean', true);
     }
     /**
      * Generic field definition method. Similar to flag/flagList/number/etc,
@@ -633,29 +575,22 @@ export class Jack {
      * fields on each one, or Jack won't know how to define them.
      */
     addFields(fields) {
-        const next = this;
-        for (const [name, field] of Object.entries(fields)) {
-            this.#validateName(name, field);
-            next.#fields.push({
-                type: 'config',
-                name,
-                value: field,
-            });
-        }
-        Object.assign(next.#configSet, fields);
-        return next;
+        return this.#addFields(this, fields);
+    }
+    #addFieldsWith(fields, type, multiple) {
+        return this.#addFields(this, fields, {
+            type,
+            multiple,
+        });
     }
-    #addFields(fields, fn) {
-        const next = this;
+    #addFields(next, fields, opt) {
         Object.assign(next.#configSet, Object.fromEntries(Object.entries(fields).map(([name, field]) => {
             this.#validateName(name, field);
-            const option = fn(field);
-            next.#fields.push({
-                type: 'config',
-                name,
-                value: option,
-            });
-            return [name, option];
+            const { type, multiple } = validateFieldMeta(field, opt);
+            const value = { ...field, type, multiple };
+            validateField(value, type, multiple);
+            next.#fields.push({ type: 'config', name, value });
+            return [name, value];
         })));
         return next;
     }
@@ -691,6 +626,7 @@ export class Jack {
         if (this.#usage)
             return this.#usage;
         let headingLevel = 1;
+        //@ts-ignore
         const ui = cliui({ width });
         const first = this.#fields[0];
         let start = first?.type === 'heading' ? 1 : 0;
@@ -932,6 +868,10 @@ export class Jack {
         return `Jack ${inspect(this.toJSON(), options)}`;
     }
 }
+/**
+ * Main entry point. Create and return a {@link Jack} object.
+ */
+export const jack = (options = {}) => new Jack(options);
 // Unwrap and un-indent, so we can wrap description
 // strings however makes them look nice in the code.
 const normalize = (s, pre = false) => {
@@ -993,8 +933,4 @@ const normalizeOneLine = (s, pre = false) => {
         .trim();
     return pre ? `\`${n}\`` : n;
 };
-/**
- * Main entry point. Create and return a {@link Jack} object.
- */
-export const jack = (options = {}) => new Jack(options);
 //# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/jackspeak/package.json b/node_modules/jackspeak/package.json
index 51eaabdf35469..aa85d230f6d24 100644
--- a/node_modules/jackspeak/package.json
+++ b/node_modules/jackspeak/package.json
@@ -1,9 +1,6 @@
 {
   "name": "jackspeak",
-  "publishConfig": {
-    "tag": "v3-legacy"
-  },
-  "version": "3.4.3",
+  "version": "4.1.1",
   "description": "A very strict and proper argument parser.",
   "tshy": {
     "main": true,
@@ -58,17 +55,18 @@
     "endOfLine": "lf"
   },
   "devDependencies": {
-    "@types/node": "^20.7.0",
-    "@types/pkgjs__parseargs": "^0.10.1",
-    "prettier": "^3.2.5",
-    "tap": "^18.8.0",
-    "tshy": "^1.14.0",
-    "typedoc": "^0.25.1",
-    "typescript": "^5.2.2"
+    "@types/node": "^22.6.0",
+    "prettier": "^3.3.3",
+    "tap": "^21.0.1",
+    "tshy": "^3.0.2",
+    "typedoc": "^0.26.7"
   },
   "dependencies": {
     "@isaacs/cliui": "^8.0.2"
   },
+  "engines": {
+    "node": "20 || >=22"
+  },
   "funding": {
     "url": "https://github.com/sponsors/isaacs"
   },
@@ -89,7 +87,8 @@
     "parsing"
   ],
   "author": "Isaac Z. Schlueter ",
-  "optionalDependencies": {
-    "@pkgjs/parseargs": "^0.11.0"
-  }
+  "tap": {
+    "typecheck": true
+  },
+  "module": "./dist/esm/index.js"
 }
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/LICENSE b/node_modules/node-gyp/node_modules/glob/LICENSE
similarity index 100%
rename from node_modules/@npmcli/map-workspaces/node_modules/glob/LICENSE
rename to node_modules/node-gyp/node_modules/glob/LICENSE
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/glob.js b/node_modules/node-gyp/node_modules/glob/dist/commonjs/glob.js
similarity index 100%
rename from node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/glob.js
rename to node_modules/node-gyp/node_modules/glob/dist/commonjs/glob.js
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/has-magic.js b/node_modules/node-gyp/node_modules/glob/dist/commonjs/has-magic.js
similarity index 100%
rename from node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/has-magic.js
rename to node_modules/node-gyp/node_modules/glob/dist/commonjs/has-magic.js
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/ignore.js b/node_modules/node-gyp/node_modules/glob/dist/commonjs/ignore.js
similarity index 100%
rename from node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/ignore.js
rename to node_modules/node-gyp/node_modules/glob/dist/commonjs/ignore.js
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/index.js b/node_modules/node-gyp/node_modules/glob/dist/commonjs/index.js
similarity index 100%
rename from node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/index.js
rename to node_modules/node-gyp/node_modules/glob/dist/commonjs/index.js
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/jackspeak/dist/commonjs/package.json b/node_modules/node-gyp/node_modules/glob/dist/commonjs/package.json
similarity index 100%
rename from node_modules/@npmcli/map-workspaces/node_modules/jackspeak/dist/commonjs/package.json
rename to node_modules/node-gyp/node_modules/glob/dist/commonjs/package.json
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/pattern.js b/node_modules/node-gyp/node_modules/glob/dist/commonjs/pattern.js
similarity index 100%
rename from node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/pattern.js
rename to node_modules/node-gyp/node_modules/glob/dist/commonjs/pattern.js
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/processor.js b/node_modules/node-gyp/node_modules/glob/dist/commonjs/processor.js
similarity index 100%
rename from node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/processor.js
rename to node_modules/node-gyp/node_modules/glob/dist/commonjs/processor.js
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/walker.js b/node_modules/node-gyp/node_modules/glob/dist/commonjs/walker.js
similarity index 100%
rename from node_modules/@npmcli/map-workspaces/node_modules/glob/dist/commonjs/walker.js
rename to node_modules/node-gyp/node_modules/glob/dist/commonjs/walker.js
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/bin.d.mts b/node_modules/node-gyp/node_modules/glob/dist/esm/bin.d.mts
similarity index 100%
rename from node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/bin.d.mts
rename to node_modules/node-gyp/node_modules/glob/dist/esm/bin.d.mts
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/bin.mjs b/node_modules/node-gyp/node_modules/glob/dist/esm/bin.mjs
similarity index 98%
rename from node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/bin.mjs
rename to node_modules/node-gyp/node_modules/glob/dist/esm/bin.mjs
index 553bb79303d90..5c7bf1e925610 100755
--- a/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/bin.mjs
+++ b/node_modules/node-gyp/node_modules/glob/dist/esm/bin.mjs
@@ -209,10 +209,8 @@ const j = jack({
         description: `Output a huge amount of noisy debug information about
                     patterns as they are parsed and used to match files.`,
     },
-    version: {
-        short: 'V',
-        description: `Output the version (${version})`,
-    },
+})
+    .flag({
     help: {
         short: 'h',
         description: 'Show this usage information',
@@ -220,10 +218,6 @@ const j = jack({
 });
 try {
     const { positionals, values } = j.parse();
-    if (values.version) {
-        console.log(version);
-        process.exit(0);
-    }
     if (values.help) {
         console.log(j.usage());
         process.exit(0);
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/glob.js b/node_modules/node-gyp/node_modules/glob/dist/esm/glob.js
similarity index 100%
rename from node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/glob.js
rename to node_modules/node-gyp/node_modules/glob/dist/esm/glob.js
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/has-magic.js b/node_modules/node-gyp/node_modules/glob/dist/esm/has-magic.js
similarity index 100%
rename from node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/has-magic.js
rename to node_modules/node-gyp/node_modules/glob/dist/esm/has-magic.js
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/ignore.js b/node_modules/node-gyp/node_modules/glob/dist/esm/ignore.js
similarity index 100%
rename from node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/ignore.js
rename to node_modules/node-gyp/node_modules/glob/dist/esm/ignore.js
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/index.js b/node_modules/node-gyp/node_modules/glob/dist/esm/index.js
similarity index 100%
rename from node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/index.js
rename to node_modules/node-gyp/node_modules/glob/dist/esm/index.js
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/jackspeak/dist/esm/package.json b/node_modules/node-gyp/node_modules/glob/dist/esm/package.json
similarity index 100%
rename from node_modules/@npmcli/map-workspaces/node_modules/jackspeak/dist/esm/package.json
rename to node_modules/node-gyp/node_modules/glob/dist/esm/package.json
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/pattern.js b/node_modules/node-gyp/node_modules/glob/dist/esm/pattern.js
similarity index 100%
rename from node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/pattern.js
rename to node_modules/node-gyp/node_modules/glob/dist/esm/pattern.js
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/processor.js b/node_modules/node-gyp/node_modules/glob/dist/esm/processor.js
similarity index 100%
rename from node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/processor.js
rename to node_modules/node-gyp/node_modules/glob/dist/esm/processor.js
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/walker.js b/node_modules/node-gyp/node_modules/glob/dist/esm/walker.js
similarity index 100%
rename from node_modules/@npmcli/map-workspaces/node_modules/glob/dist/esm/walker.js
rename to node_modules/node-gyp/node_modules/glob/dist/esm/walker.js
diff --git a/node_modules/cacache/node_modules/glob/package.json b/node_modules/node-gyp/node_modules/glob/package.json
similarity index 81%
rename from node_modules/cacache/node_modules/glob/package.json
rename to node_modules/node-gyp/node_modules/glob/package.json
index 7be2c53bd5c9f..6d4893b5f327b 100644
--- a/node_modules/cacache/node_modules/glob/package.json
+++ b/node_modules/node-gyp/node_modules/glob/package.json
@@ -1,8 +1,11 @@
 {
   "author": "Isaac Z. Schlueter  (https://blog.izs.me/)",
+  "publishConfig": {
+    "tag": "legacy-v10"
+  },
   "name": "glob",
   "description": "the most correct and second fastest glob implementation in JavaScript",
-  "version": "11.0.3",
+  "version": "10.4.5",
   "type": "module",
   "tshy": {
     "main": true,
@@ -37,7 +40,7 @@
   "scripts": {
     "preversion": "npm test",
     "postversion": "npm publish",
-    "prepublishOnly": "npm run benchclean; git push origin --follow-tags",
+    "prepublishOnly": "git push origin --follow-tags",
     "prepare": "tshy",
     "pretest": "npm run prepare",
     "presnap": "npm run prepare",
@@ -45,6 +48,7 @@
     "snap": "tap",
     "format": "prettier --write . --log-level warn",
     "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts",
+    "prepublish": "npm run benchclean",
     "profclean": "rm -f v8.log profile.txt",
     "test-regen": "npm run profclean && TEST_REGEN=1 node --no-warnings --loader ts-node/esm test/00-setup.ts",
     "prebench": "npm run prepare",
@@ -66,22 +70,23 @@
     "endOfLine": "lf"
   },
   "dependencies": {
-    "foreground-child": "^3.3.1",
-    "jackspeak": "^4.1.1",
-    "minimatch": "^10.0.3",
+    "foreground-child": "^3.1.0",
+    "jackspeak": "^3.1.2",
+    "minimatch": "^9.0.4",
     "minipass": "^7.1.2",
     "package-json-from-dist": "^1.0.0",
-    "path-scurry": "^2.0.0"
+    "path-scurry": "^1.11.1"
   },
   "devDependencies": {
-    "@types/node": "^24.0.1",
-    "memfs": "^4.17.2",
+    "@types/node": "^20.11.30",
+    "memfs": "^3.4.13",
     "mkdirp": "^3.0.1",
-    "prettier": "^3.5.3",
-    "rimraf": "^6.0.1",
-    "tap": "^21.1.0",
-    "tshy": "^3.0.2",
-    "typedoc": "^0.28.5"
+    "prettier": "^3.2.5",
+    "rimraf": "^5.0.7",
+    "sync-content": "^1.0.2",
+    "tap": "^19.0.0",
+    "tshy": "^1.14.0",
+    "typedoc": "^0.25.12"
   },
   "tap": {
     "before": "test/00-setup.ts"
@@ -90,8 +95,5 @@
   "funding": {
     "url": "https://github.com/sponsors/isaacs"
   },
-  "engines": {
-    "node": "20 || >=22"
-  },
   "module": "./dist/esm/index.js"
 }
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/jackspeak/LICENSE.md b/node_modules/node-gyp/node_modules/jackspeak/LICENSE.md
similarity index 100%
rename from node_modules/@npmcli/map-workspaces/node_modules/jackspeak/LICENSE.md
rename to node_modules/node-gyp/node_modules/jackspeak/LICENSE.md
diff --git a/node_modules/cacache/node_modules/jackspeak/dist/commonjs/index.js b/node_modules/node-gyp/node_modules/jackspeak/dist/commonjs/index.js
similarity index 77%
rename from node_modules/cacache/node_modules/jackspeak/dist/commonjs/index.js
rename to node_modules/node-gyp/node_modules/jackspeak/dist/commonjs/index.js
index 543412746cc8f..f7fc9cb69a2af 100644
--- a/node_modules/cacache/node_modules/jackspeak/dist/commonjs/index.js
+++ b/node_modules/node-gyp/node_modules/jackspeak/dist/commonjs/index.js
@@ -3,61 +3,23 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
     return (mod && mod.__esModule) ? mod : { "default": mod };
 };
 Object.defineProperty(exports, "__esModule", { value: true });
-exports.jack = exports.Jack = exports.isConfigOption = exports.isConfigOptionOfType = exports.isConfigType = void 0;
+exports.jack = exports.Jack = exports.isConfigOption = exports.isConfigType = void 0;
 const node_util_1 = require("node:util");
+const parse_args_js_1 = require("./parse-args.js");
 // it's a tiny API, just cast it inline, it's fine
 //@ts-ignore
 const cliui_1 = __importDefault(require("@isaacs/cliui"));
 const node_path_1 = require("node:path");
-const isConfigType = (t) => typeof t === 'string' &&
-    (t === 'string' || t === 'number' || t === 'boolean');
-exports.isConfigType = isConfigType;
-const isValidValue = (v, type, multi) => {
-    if (multi) {
-        if (!Array.isArray(v))
-            return false;
-        return !v.some((v) => !isValidValue(v, type, false));
-    }
-    if (Array.isArray(v))
-        return false;
-    return typeof v === type;
-};
-const isValidOption = (v, vo) => !!vo &&
-    (Array.isArray(v) ? v.every(x => isValidOption(x, vo)) : vo.includes(v));
-/**
- * Determine whether an unknown object is a {@link ConfigOption} based only
- * on its `type` and `multiple` property
- */
-const isConfigOptionOfType = (o, type, multi) => !!o &&
-    typeof o === 'object' &&
-    (0, exports.isConfigType)(o.type) &&
-    o.type === type &&
-    !!o.multiple === multi;
-exports.isConfigOptionOfType = isConfigOptionOfType;
-/**
- * Determine whether an unknown object is a {@link ConfigOption} based on
- * it having all valid properties
- */
-const isConfigOption = (o, type, multi) => (0, exports.isConfigOptionOfType)(o, type, multi) &&
-    undefOrType(o.short, 'string') &&
-    undefOrType(o.description, 'string') &&
-    undefOrType(o.hint, 'string') &&
-    undefOrType(o.validate, 'function') &&
-    (o.type === 'boolean' ?
-        o.validOptions === undefined
-        : undefOrTypeArray(o.validOptions, o.type)) &&
-    (o.default === undefined || isValidValue(o.default, type, multi));
-exports.isConfigOption = isConfigOption;
-const isHeading = (r) => r.type === 'heading';
-const isDescription = (r) => r.type === 'description';
-const width = Math.min(process?.stdout?.columns ?? 80, 80);
+const width = Math.min((process && process.stdout && process.stdout.columns) || 80, 80);
 // indentation spaces from heading level
 const indent = (n) => (n - 1) * 2;
-const toEnvKey = (pref, key) => [pref, key.replace(/[^a-zA-Z0-9]+/g, ' ')]
-    .join(' ')
-    .trim()
-    .toUpperCase()
-    .replace(/ /g, '_');
+const toEnvKey = (pref, key) => {
+    return [pref, key.replace(/[^a-zA-Z0-9]+/g, ' ')]
+        .join(' ')
+        .trim()
+        .toUpperCase()
+        .replace(/ /g, '_');
+};
 const toEnvVal = (value, delim = '\n') => {
     const str = typeof value === 'string' ? value
         : typeof value === 'boolean' ?
@@ -68,7 +30,7 @@ const toEnvVal = (value, delim = '\n') => {
                     value.map((v) => toEnvVal(v)).join(delim)
                     : /* c8 ignore start */ undefined;
     if (typeof str !== 'string') {
-        throw new Error(`could not serialize value to environment: ${JSON.stringify(value)}`, { cause: { code: 'JACKSPEAK' } });
+        throw new Error(`could not serialize value to environment: ${JSON.stringify(value)}`);
     }
     /* c8 ignore stop */
     return str;
@@ -79,144 +41,256 @@ const fromEnvVal = (env, type, multiple, delim = '\n') => (multiple ?
     : type === 'string' ? env
         : type === 'boolean' ? env === '1'
             : +env.trim());
+const isConfigType = (t) => typeof t === 'string' &&
+    (t === 'string' || t === 'number' || t === 'boolean');
+exports.isConfigType = isConfigType;
 const undefOrType = (v, t) => v === undefined || typeof v === t;
 const undefOrTypeArray = (v, t) => v === undefined || (Array.isArray(v) && v.every(x => typeof x === t));
+const isValidOption = (v, vo) => Array.isArray(v) ? v.every(x => isValidOption(x, vo)) : vo.includes(v);
 // print the value type, for error message reporting
 const valueType = (v) => typeof v === 'string' ? 'string'
     : typeof v === 'boolean' ? 'boolean'
         : typeof v === 'number' ? 'number'
             : Array.isArray(v) ?
-                `${joinTypes([...new Set(v.map(v => valueType(v)))])}[]`
+                joinTypes([...new Set(v.map(v => valueType(v)))]) + '[]'
                 : `${v.type}${v.multiple ? '[]' : ''}`;
 const joinTypes = (types) => types.length === 1 && typeof types[0] === 'string' ?
     types[0]
     : `(${types.join('|')})`;
-const validateFieldMeta = (field, fieldMeta) => {
-    if (fieldMeta) {
-        if (field.type !== undefined && field.type !== fieldMeta.type) {
-            throw new TypeError(`invalid type`, {
-                cause: {
-                    found: field.type,
-                    wanted: [fieldMeta.type, undefined],
-                },
-            });
-        }
-        if (field.multiple !== undefined &&
-            !!field.multiple !== fieldMeta.multiple) {
-            throw new TypeError(`invalid multiple`, {
-                cause: {
-                    found: field.multiple,
-                    wanted: [fieldMeta.multiple, undefined],
-                },
-            });
-        }
-        return fieldMeta;
+const isValidValue = (v, type, multi) => {
+    if (multi) {
+        if (!Array.isArray(v))
+            return false;
+        return !v.some((v) => !isValidValue(v, type, false));
     }
-    if (!(0, exports.isConfigType)(field.type)) {
-        throw new TypeError(`invalid type`, {
+    if (Array.isArray(v))
+        return false;
+    return typeof v === type;
+};
+const isConfigOption = (o, type, multi) => !!o &&
+    typeof o === 'object' &&
+    (0, exports.isConfigType)(o.type) &&
+    o.type === type &&
+    undefOrType(o.short, 'string') &&
+    undefOrType(o.description, 'string') &&
+    undefOrType(o.hint, 'string') &&
+    undefOrType(o.validate, 'function') &&
+    (o.type === 'boolean' ?
+        o.validOptions === undefined
+        : undefOrTypeArray(o.validOptions, o.type)) &&
+    (o.default === undefined || isValidValue(o.default, type, multi)) &&
+    !!o.multiple === multi;
+exports.isConfigOption = isConfigOption;
+function num(o = {}) {
+    const { default: def, validate: val, validOptions, ...rest } = o;
+    if (def !== undefined && !isValidValue(def, 'number', false)) {
+        throw new TypeError('invalid default value', {
             cause: {
-                found: field.type,
-                wanted: ['string', 'number', 'boolean'],
+                found: def,
+                wanted: 'number',
             },
         });
     }
+    if (!undefOrTypeArray(validOptions, 'number')) {
+        throw new TypeError('invalid validOptions', {
+            cause: {
+                found: validOptions,
+                wanted: 'number[]',
+            },
+        });
+    }
+    const validate = val ?
+        val
+        : undefined;
     return {
-        type: field.type,
-        multiple: !!field.multiple,
+        ...rest,
+        default: def,
+        validate,
+        validOptions,
+        type: 'number',
+        multiple: false,
     };
-};
-const validateField = (o, type, multiple) => {
-    const validateValidOptions = (def, validOptions) => {
-        if (!undefOrTypeArray(validOptions, type)) {
-            throw new TypeError('invalid validOptions', {
-                cause: {
-                    found: validOptions,
-                    wanted: valueType({ type, multiple: true }),
-                },
-            });
-        }
-        if (def !== undefined && validOptions !== undefined) {
-            const valid = Array.isArray(def) ?
-                def.every(v => validOptions.includes(v))
-                : validOptions.includes(def);
-            if (!valid) {
-                throw new TypeError('invalid default value not in validOptions', {
-                    cause: {
-                        found: def,
-                        wanted: validOptions,
-                    },
-                });
-            }
-        }
+}
+function numList(o = {}) {
+    const { default: def, validate: val, validOptions, ...rest } = o;
+    if (def !== undefined && !isValidValue(def, 'number', true)) {
+        throw new TypeError('invalid default value', {
+            cause: {
+                found: def,
+                wanted: 'number[]',
+            },
+        });
+    }
+    if (!undefOrTypeArray(validOptions, 'number')) {
+        throw new TypeError('invalid validOptions', {
+            cause: {
+                found: validOptions,
+                wanted: 'number[]',
+            },
+        });
+    }
+    const validate = val ?
+        val
+        : undefined;
+    return {
+        ...rest,
+        default: def,
+        validate,
+        validOptions,
+        type: 'number',
+        multiple: true,
     };
-    if (o.default !== undefined &&
-        !isValidValue(o.default, type, multiple)) {
+}
+function opt(o = {}) {
+    const { default: def, validate: val, validOptions, ...rest } = o;
+    if (def !== undefined && !isValidValue(def, 'string', false)) {
         throw new TypeError('invalid default value', {
             cause: {
-                found: o.default,
-                wanted: valueType({ type, multiple }),
+                found: def,
+                wanted: 'string',
             },
         });
     }
-    if ((0, exports.isConfigOptionOfType)(o, 'number', false) ||
-        (0, exports.isConfigOptionOfType)(o, 'number', true)) {
-        validateValidOptions(o.default, o.validOptions);
+    if (!undefOrTypeArray(validOptions, 'string')) {
+        throw new TypeError('invalid validOptions', {
+            cause: {
+                found: validOptions,
+                wanted: 'string[]',
+            },
+        });
     }
-    else if ((0, exports.isConfigOptionOfType)(o, 'string', false) ||
-        (0, exports.isConfigOptionOfType)(o, 'string', true)) {
-        validateValidOptions(o.default, o.validOptions);
+    const validate = val ?
+        val
+        : undefined;
+    return {
+        ...rest,
+        default: def,
+        validate,
+        validOptions,
+        type: 'string',
+        multiple: false,
+    };
+}
+function optList(o = {}) {
+    const { default: def, validate: val, validOptions, ...rest } = o;
+    if (def !== undefined && !isValidValue(def, 'string', true)) {
+        throw new TypeError('invalid default value', {
+            cause: {
+                found: def,
+                wanted: 'string[]',
+            },
+        });
     }
-    else if ((0, exports.isConfigOptionOfType)(o, 'boolean', false) ||
-        (0, exports.isConfigOptionOfType)(o, 'boolean', true)) {
-        if (o.hint !== undefined) {
-            throw new TypeError('cannot provide hint for flag');
-        }
-        if (o.validOptions !== undefined) {
-            throw new TypeError('cannot provide validOptions for flag');
-        }
+    if (!undefOrTypeArray(validOptions, 'string')) {
+        throw new TypeError('invalid validOptions', {
+            cause: {
+                found: validOptions,
+                wanted: 'string[]',
+            },
+        });
     }
-    return o;
-};
+    const validate = val ?
+        val
+        : undefined;
+    return {
+        ...rest,
+        default: def,
+        validate,
+        validOptions,
+        type: 'string',
+        multiple: true,
+    };
+}
+function flag(o = {}) {
+    const { hint, default: def, validate: val, ...rest } = o;
+    delete rest.validOptions;
+    if (def !== undefined && !isValidValue(def, 'boolean', false)) {
+        throw new TypeError('invalid default value');
+    }
+    const validate = val ?
+        val
+        : undefined;
+    if (hint !== undefined) {
+        throw new TypeError('cannot provide hint for flag');
+    }
+    return {
+        ...rest,
+        default: def,
+        validate,
+        type: 'boolean',
+        multiple: false,
+    };
+}
+function flagList(o = {}) {
+    const { hint, default: def, validate: val, ...rest } = o;
+    delete rest.validOptions;
+    if (def !== undefined && !isValidValue(def, 'boolean', true)) {
+        throw new TypeError('invalid default value');
+    }
+    const validate = val ?
+        val
+        : undefined;
+    if (hint !== undefined) {
+        throw new TypeError('cannot provide hint for flag list');
+    }
+    return {
+        ...rest,
+        default: def,
+        validate,
+        type: 'boolean',
+        multiple: true,
+    };
+}
 const toParseArgsOptionsConfig = (options) => {
-    return Object.entries(options).reduce((acc, [longOption, o]) => {
-        const p = {
-            type: 'string',
-            multiple: !!o.multiple,
-            ...(typeof o.short === 'string' ? { short: o.short } : undefined),
-        };
-        const setNoBool = () => {
-            if (!longOption.startsWith('no-') && !options[`no-${longOption}`]) {
-                acc[`no-${longOption}`] = {
-                    type: 'boolean',
-                    multiple: !!o.multiple,
-                };
-            }
-        };
-        const setDefault = (def, fn) => {
-            if (def !== undefined) {
-                p.default = fn(def);
-            }
-        };
-        if ((0, exports.isConfigOption)(o, 'number', false)) {
-            setDefault(o.default, String);
-        }
-        else if ((0, exports.isConfigOption)(o, 'number', true)) {
-            setDefault(o.default, d => d.map(v => String(v)));
-        }
-        else if ((0, exports.isConfigOption)(o, 'string', false) ||
-            (0, exports.isConfigOption)(o, 'string', true)) {
-            setDefault(o.default, v => v);
+    const c = {};
+    for (const longOption in options) {
+        const config = options[longOption];
+        /* c8 ignore start */
+        if (!config) {
+            throw new Error('config must be an object: ' + longOption);
+        }
+        /* c8 ignore start */
+        if ((0, exports.isConfigOption)(config, 'number', true)) {
+            c[longOption] = {
+                type: 'string',
+                multiple: true,
+                default: config.default?.map(c => String(c)),
+            };
+        }
+        else if ((0, exports.isConfigOption)(config, 'number', false)) {
+            c[longOption] = {
+                type: 'string',
+                multiple: false,
+                default: config.default === undefined ?
+                    undefined
+                    : String(config.default),
+            };
         }
-        else if ((0, exports.isConfigOption)(o, 'boolean', false) ||
-            (0, exports.isConfigOption)(o, 'boolean', true)) {
-            p.type = 'boolean';
-            setDefault(o.default, v => v);
-            setNoBool();
-        }
-        acc[longOption] = p;
-        return acc;
-    }, {});
+        else {
+            const conf = config;
+            c[longOption] = {
+                type: conf.type,
+                multiple: !!conf.multiple,
+                default: conf.default,
+            };
+        }
+        const clo = c[longOption];
+        if (typeof config.short === 'string') {
+            clo.short = config.short;
+        }
+        if (config.type === 'boolean' &&
+            !longOption.startsWith('no-') &&
+            !options[`no-${longOption}`]) {
+            c[`no-${longOption}`] = {
+                type: 'boolean',
+                multiple: config.multiple,
+            };
+        }
+    }
+    return c;
 };
+const isHeading = (r) => r.type === 'heading';
+const isDescription = (r) => r.type === 'description';
 /**
  * Class returned by the {@link jack} function and all configuration
  * definition methods.  This is what gets chained together.
@@ -243,30 +317,6 @@ class Jack {
         this.#configSet = Object.create(null);
         this.#shorts = Object.create(null);
     }
-    /**
-     * Resulting definitions, suitable to be passed to Node's `util.parseArgs`,
-     * but also including `description` and `short` fields, if set.
-     */
-    get definitions() {
-        return this.#configSet;
-    }
-    /** map of `{ :  }` strings for each short name defined */
-    get shorts() {
-        return this.#shorts;
-    }
-    /**
-     * options passed to the {@link Jack} constructor
-     */
-    get jackOptions() {
-        return this.#options;
-    }
-    /**
-     * the data used to generate {@link Jack#usage} and
-     * {@link Jack#usageMarkdown} content.
-     */
-    get usageFields() {
-        return this.#fields;
-    }
     /**
      * Set the default value (which will still be overridden by env or cli)
      * as if from a parsed config file. The optional `source` param, if
@@ -278,13 +328,16 @@ class Jack {
             this.validate(values);
         }
         catch (er) {
-            if (source && er instanceof Error) {
-                /* c8 ignore next */
-                const cause = typeof er.cause === 'object' ? er.cause : {};
-                er.cause = { ...cause, path: source };
-                Error.captureStackTrace(er, this.setConfigValues);
+            const e = er;
+            if (source && e && typeof e === 'object') {
+                if (e.cause && typeof e.cause === 'object') {
+                    Object.assign(e.cause, { path: source });
+                }
+                else {
+                    e.cause = { path: source };
+                }
             }
-            throw er;
+            throw e;
         }
         for (const [field, value] of Object.entries(values)) {
             const my = this.#configSet[field];
@@ -292,10 +345,7 @@ class Jack {
             /* c8 ignore start */
             if (!my) {
                 throw new Error('unexpected field in config set: ' + field, {
-                    cause: {
-                        code: 'JACKSPEAK',
-                        found: field,
-                    },
+                    cause: { found: field },
                 });
             }
             /* c8 ignore stop */
@@ -350,9 +400,10 @@ class Jack {
         if (args === process.argv) {
             args = args.slice(process._eval !== undefined ? 1 : 2);
         }
-        const result = (0, node_util_1.parseArgs)({
+        const options = toParseArgsOptionsConfig(this.#configSet);
+        const result = (0, parse_args_js_1.parseArgs)({
             args,
-            options: toParseArgsOptionsConfig(this.#configSet),
+            options,
             // always strict, but using our own logic
             strict: false,
             allowPositionals: this.#allowPositionals,
@@ -392,7 +443,6 @@ class Jack {
                         `place it at the end of the command after '--', as in ` +
                         `'-- ${token.rawName}'`, {
                         cause: {
-                            code: 'JACKSPEAK',
                             found: token.rawName + (token.value ? `=${token.value}` : ''),
                         },
                     });
@@ -402,7 +452,6 @@ class Jack {
                         if (my.type !== 'boolean') {
                             throw new Error(`No value provided for ${token.rawName}, expected ${my.type}`, {
                                 cause: {
-                                    code: 'JACKSPEAK',
                                     name: token.rawName,
                                     wanted: valueType(my),
                                 },
@@ -412,7 +461,7 @@ class Jack {
                     }
                     else {
                         if (my.type === 'boolean') {
-                            throw new Error(`Flag ${token.rawName} does not take a value, received '${token.value}'`, { cause: { code: 'JACKSPEAK', found: token } });
+                            throw new Error(`Flag ${token.rawName} does not take a value, received '${token.value}'`, { cause: { found: token } });
                         }
                         if (my.type === 'string') {
                             value = token.value;
@@ -423,7 +472,6 @@ class Jack {
                                 throw new Error(`Invalid value '${token.value}' provided for ` +
                                     `'${token.rawName}' option, expected number`, {
                                     cause: {
-                                        code: 'JACKSPEAK',
                                         name: token.rawName,
                                         found: token.value,
                                         wanted: 'number',
@@ -448,12 +496,15 @@ class Jack {
         for (const [field, value] of Object.entries(p.values)) {
             const valid = this.#configSet[field]?.validate;
             const validOptions = this.#configSet[field]?.validOptions;
-            const cause = validOptions && !isValidOption(value, validOptions) ?
-                { name: field, found: value, validOptions }
-                : valid && !valid(value) ? { name: field, found: value }
-                    : undefined;
+            let cause;
+            if (validOptions && !isValidOption(value, validOptions)) {
+                cause = { name: field, found: value, validOptions: validOptions };
+            }
+            if (valid && !valid(value)) {
+                cause = cause || { name: field, found: value };
+            }
             if (cause) {
-                throw new Error(`Invalid value provided for --${field}: ${JSON.stringify(value)}`, { cause: { ...cause, code: 'JACKSPEAK' } });
+                throw new Error(`Invalid value provided for --${field}: ${JSON.stringify(value)}`, { cause });
             }
         }
         return p;
@@ -469,7 +520,7 @@ class Jack {
         // recurse so we get the core config key we care about.
         this.#noNoFields(yes, val, s);
         if (this.#configSet[yes]?.type === 'boolean') {
-            throw new Error(`do not set '${s}', instead set '${yes}' as desired.`, { cause: { code: 'JACKSPEAK', found: s, wanted: yes } });
+            throw new Error(`do not set '${s}', instead set '${yes}' as desired.`, { cause: { found: s, wanted: yes } });
         }
     }
     /**
@@ -479,7 +530,7 @@ class Jack {
     validate(o) {
         if (!o || typeof o !== 'object') {
             throw new Error('Invalid config: not an object', {
-                cause: { code: 'JACKSPEAK', found: o },
+                cause: { found: o },
             });
         }
         const opts = o;
@@ -492,27 +543,33 @@ class Jack {
             const config = this.#configSet[field];
             if (!config) {
                 throw new Error(`Unknown config option: ${field}`, {
-                    cause: { code: 'JACKSPEAK', found: field },
+                    cause: { found: field },
                 });
             }
             if (!isValidValue(value, config.type, !!config.multiple)) {
                 throw new Error(`Invalid value ${valueType(value)} for ${field}, expected ${valueType(config)}`, {
                     cause: {
-                        code: 'JACKSPEAK',
                         name: field,
                         found: value,
                         wanted: valueType(config),
                     },
                 });
             }
-            const cause = config.validOptions && !isValidOption(value, config.validOptions) ?
-                { name: field, found: value, validOptions: config.validOptions }
-                : config.validate && !config.validate(value) ?
-                    { name: field, found: value }
-                    : undefined;
+            let cause;
+            if (config.validOptions &&
+                !isValidOption(value, config.validOptions)) {
+                cause = {
+                    name: field,
+                    found: value,
+                    validOptions: config.validOptions,
+                };
+            }
+            if (config.validate && !config.validate(value)) {
+                cause = cause || { name: field, found: value };
+            }
             if (cause) {
                 throw new Error(`Invalid config value for ${field}: ${value}`, {
-                    cause: { ...cause, code: 'JACKSPEAK' },
+                    cause,
                 });
             }
         }
@@ -546,37 +603,37 @@ class Jack {
      * Add one or more number fields.
      */
     num(fields) {
-        return this.#addFieldsWith(fields, 'number', false);
+        return this.#addFields(fields, num);
     }
     /**
      * Add one or more multiple number fields.
      */
     numList(fields) {
-        return this.#addFieldsWith(fields, 'number', true);
+        return this.#addFields(fields, numList);
     }
     /**
      * Add one or more string option fields.
      */
     opt(fields) {
-        return this.#addFieldsWith(fields, 'string', false);
+        return this.#addFields(fields, opt);
     }
     /**
      * Add one or more multiple string option fields.
      */
     optList(fields) {
-        return this.#addFieldsWith(fields, 'string', true);
+        return this.#addFields(fields, optList);
     }
     /**
      * Add one or more flag fields.
      */
     flag(fields) {
-        return this.#addFieldsWith(fields, 'boolean', false);
+        return this.#addFields(fields, flag);
     }
     /**
      * Add one or more multiple flag fields.
      */
     flagList(fields) {
-        return this.#addFieldsWith(fields, 'boolean', true);
+        return this.#addFields(fields, flagList);
     }
     /**
      * Generic field definition method. Similar to flag/flagList/number/etc,
@@ -584,22 +641,29 @@ class Jack {
      * fields on each one, or Jack won't know how to define them.
      */
     addFields(fields) {
-        return this.#addFields(this, fields);
-    }
-    #addFieldsWith(fields, type, multiple) {
-        return this.#addFields(this, fields, {
-            type,
-            multiple,
-        });
+        const next = this;
+        for (const [name, field] of Object.entries(fields)) {
+            this.#validateName(name, field);
+            next.#fields.push({
+                type: 'config',
+                name,
+                value: field,
+            });
+        }
+        Object.assign(next.#configSet, fields);
+        return next;
     }
-    #addFields(next, fields, opt) {
+    #addFields(fields, fn) {
+        const next = this;
         Object.assign(next.#configSet, Object.fromEntries(Object.entries(fields).map(([name, field]) => {
             this.#validateName(name, field);
-            const { type, multiple } = validateFieldMeta(field, opt);
-            const value = { ...field, type, multiple };
-            validateField(value, type, multiple);
-            next.#fields.push({ type: 'config', name, value });
-            return [name, value];
+            const option = fn(field);
+            next.#fields.push({
+                type: 'config',
+                name,
+                value: option,
+            });
+            return [name, option];
         })));
         return next;
     }
@@ -635,7 +699,6 @@ class Jack {
         if (this.#usage)
             return this.#usage;
         let headingLevel = 1;
-        //@ts-ignore
         const ui = (0, cliui_1.default)({ width });
         const first = this.#fields[0];
         let start = first?.type === 'heading' ? 1 : 0;
@@ -878,11 +941,6 @@ class Jack {
     }
 }
 exports.Jack = Jack;
-/**
- * Main entry point. Create and return a {@link Jack} object.
- */
-const jack = (options = {}) => new Jack(options);
-exports.jack = jack;
 // Unwrap and un-indent, so we can wrap description
 // strings however makes them look nice in the code.
 const normalize = (s, pre = false) => {
@@ -944,4 +1002,9 @@ const normalizeOneLine = (s, pre = false) => {
         .trim();
     return pre ? `\`${n}\`` : n;
 };
+/**
+ * Main entry point. Create and return a {@link Jack} object.
+ */
+const jack = (options = {}) => new Jack(options);
+exports.jack = jack;
 //# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/path-scurry/dist/commonjs/package.json b/node_modules/node-gyp/node_modules/jackspeak/dist/commonjs/package.json
similarity index 100%
rename from node_modules/@npmcli/map-workspaces/node_modules/path-scurry/dist/commonjs/package.json
rename to node_modules/node-gyp/node_modules/jackspeak/dist/commonjs/package.json
diff --git a/node_modules/jackspeak/dist/commonjs/parse-args.js b/node_modules/node-gyp/node_modules/jackspeak/dist/commonjs/parse-args.js
similarity index 100%
rename from node_modules/jackspeak/dist/commonjs/parse-args.js
rename to node_modules/node-gyp/node_modules/jackspeak/dist/commonjs/parse-args.js
diff --git a/node_modules/cacache/node_modules/jackspeak/dist/esm/index.js b/node_modules/node-gyp/node_modules/jackspeak/dist/esm/index.js
similarity index 77%
rename from node_modules/cacache/node_modules/jackspeak/dist/esm/index.js
rename to node_modules/node-gyp/node_modules/jackspeak/dist/esm/index.js
index b959f5126423c..78fdfa8155472 100644
--- a/node_modules/cacache/node_modules/jackspeak/dist/esm/index.js
+++ b/node_modules/node-gyp/node_modules/jackspeak/dist/esm/index.js
@@ -1,54 +1,19 @@
-import { inspect, parseArgs, } from 'node:util';
+import { inspect } from 'node:util';
+import { parseArgs } from './parse-args.js';
 // it's a tiny API, just cast it inline, it's fine
 //@ts-ignore
 import cliui from '@isaacs/cliui';
 import { basename } from 'node:path';
-export const isConfigType = (t) => typeof t === 'string' &&
-    (t === 'string' || t === 'number' || t === 'boolean');
-const isValidValue = (v, type, multi) => {
-    if (multi) {
-        if (!Array.isArray(v))
-            return false;
-        return !v.some((v) => !isValidValue(v, type, false));
-    }
-    if (Array.isArray(v))
-        return false;
-    return typeof v === type;
-};
-const isValidOption = (v, vo) => !!vo &&
-    (Array.isArray(v) ? v.every(x => isValidOption(x, vo)) : vo.includes(v));
-/**
- * Determine whether an unknown object is a {@link ConfigOption} based only
- * on its `type` and `multiple` property
- */
-export const isConfigOptionOfType = (o, type, multi) => !!o &&
-    typeof o === 'object' &&
-    isConfigType(o.type) &&
-    o.type === type &&
-    !!o.multiple === multi;
-/**
- * Determine whether an unknown object is a {@link ConfigOption} based on
- * it having all valid properties
- */
-export const isConfigOption = (o, type, multi) => isConfigOptionOfType(o, type, multi) &&
-    undefOrType(o.short, 'string') &&
-    undefOrType(o.description, 'string') &&
-    undefOrType(o.hint, 'string') &&
-    undefOrType(o.validate, 'function') &&
-    (o.type === 'boolean' ?
-        o.validOptions === undefined
-        : undefOrTypeArray(o.validOptions, o.type)) &&
-    (o.default === undefined || isValidValue(o.default, type, multi));
-const isHeading = (r) => r.type === 'heading';
-const isDescription = (r) => r.type === 'description';
-const width = Math.min(process?.stdout?.columns ?? 80, 80);
+const width = Math.min((process && process.stdout && process.stdout.columns) || 80, 80);
 // indentation spaces from heading level
 const indent = (n) => (n - 1) * 2;
-const toEnvKey = (pref, key) => [pref, key.replace(/[^a-zA-Z0-9]+/g, ' ')]
-    .join(' ')
-    .trim()
-    .toUpperCase()
-    .replace(/ /g, '_');
+const toEnvKey = (pref, key) => {
+    return [pref, key.replace(/[^a-zA-Z0-9]+/g, ' ')]
+        .join(' ')
+        .trim()
+        .toUpperCase()
+        .replace(/ /g, '_');
+};
 const toEnvVal = (value, delim = '\n') => {
     const str = typeof value === 'string' ? value
         : typeof value === 'boolean' ?
@@ -59,7 +24,7 @@ const toEnvVal = (value, delim = '\n') => {
                     value.map((v) => toEnvVal(v)).join(delim)
                     : /* c8 ignore start */ undefined;
     if (typeof str !== 'string') {
-        throw new Error(`could not serialize value to environment: ${JSON.stringify(value)}`, { cause: { code: 'JACKSPEAK' } });
+        throw new Error(`could not serialize value to environment: ${JSON.stringify(value)}`);
     }
     /* c8 ignore stop */
     return str;
@@ -70,144 +35,254 @@ const fromEnvVal = (env, type, multiple, delim = '\n') => (multiple ?
     : type === 'string' ? env
         : type === 'boolean' ? env === '1'
             : +env.trim());
+export const isConfigType = (t) => typeof t === 'string' &&
+    (t === 'string' || t === 'number' || t === 'boolean');
 const undefOrType = (v, t) => v === undefined || typeof v === t;
 const undefOrTypeArray = (v, t) => v === undefined || (Array.isArray(v) && v.every(x => typeof x === t));
+const isValidOption = (v, vo) => Array.isArray(v) ? v.every(x => isValidOption(x, vo)) : vo.includes(v);
 // print the value type, for error message reporting
 const valueType = (v) => typeof v === 'string' ? 'string'
     : typeof v === 'boolean' ? 'boolean'
         : typeof v === 'number' ? 'number'
             : Array.isArray(v) ?
-                `${joinTypes([...new Set(v.map(v => valueType(v)))])}[]`
+                joinTypes([...new Set(v.map(v => valueType(v)))]) + '[]'
                 : `${v.type}${v.multiple ? '[]' : ''}`;
 const joinTypes = (types) => types.length === 1 && typeof types[0] === 'string' ?
     types[0]
     : `(${types.join('|')})`;
-const validateFieldMeta = (field, fieldMeta) => {
-    if (fieldMeta) {
-        if (field.type !== undefined && field.type !== fieldMeta.type) {
-            throw new TypeError(`invalid type`, {
-                cause: {
-                    found: field.type,
-                    wanted: [fieldMeta.type, undefined],
-                },
-            });
-        }
-        if (field.multiple !== undefined &&
-            !!field.multiple !== fieldMeta.multiple) {
-            throw new TypeError(`invalid multiple`, {
-                cause: {
-                    found: field.multiple,
-                    wanted: [fieldMeta.multiple, undefined],
-                },
-            });
-        }
-        return fieldMeta;
+const isValidValue = (v, type, multi) => {
+    if (multi) {
+        if (!Array.isArray(v))
+            return false;
+        return !v.some((v) => !isValidValue(v, type, false));
     }
-    if (!isConfigType(field.type)) {
-        throw new TypeError(`invalid type`, {
+    if (Array.isArray(v))
+        return false;
+    return typeof v === type;
+};
+export const isConfigOption = (o, type, multi) => !!o &&
+    typeof o === 'object' &&
+    isConfigType(o.type) &&
+    o.type === type &&
+    undefOrType(o.short, 'string') &&
+    undefOrType(o.description, 'string') &&
+    undefOrType(o.hint, 'string') &&
+    undefOrType(o.validate, 'function') &&
+    (o.type === 'boolean' ?
+        o.validOptions === undefined
+        : undefOrTypeArray(o.validOptions, o.type)) &&
+    (o.default === undefined || isValidValue(o.default, type, multi)) &&
+    !!o.multiple === multi;
+function num(o = {}) {
+    const { default: def, validate: val, validOptions, ...rest } = o;
+    if (def !== undefined && !isValidValue(def, 'number', false)) {
+        throw new TypeError('invalid default value', {
             cause: {
-                found: field.type,
-                wanted: ['string', 'number', 'boolean'],
+                found: def,
+                wanted: 'number',
             },
         });
     }
+    if (!undefOrTypeArray(validOptions, 'number')) {
+        throw new TypeError('invalid validOptions', {
+            cause: {
+                found: validOptions,
+                wanted: 'number[]',
+            },
+        });
+    }
+    const validate = val ?
+        val
+        : undefined;
     return {
-        type: field.type,
-        multiple: !!field.multiple,
+        ...rest,
+        default: def,
+        validate,
+        validOptions,
+        type: 'number',
+        multiple: false,
     };
-};
-const validateField = (o, type, multiple) => {
-    const validateValidOptions = (def, validOptions) => {
-        if (!undefOrTypeArray(validOptions, type)) {
-            throw new TypeError('invalid validOptions', {
-                cause: {
-                    found: validOptions,
-                    wanted: valueType({ type, multiple: true }),
-                },
-            });
-        }
-        if (def !== undefined && validOptions !== undefined) {
-            const valid = Array.isArray(def) ?
-                def.every(v => validOptions.includes(v))
-                : validOptions.includes(def);
-            if (!valid) {
-                throw new TypeError('invalid default value not in validOptions', {
-                    cause: {
-                        found: def,
-                        wanted: validOptions,
-                    },
-                });
-            }
-        }
+}
+function numList(o = {}) {
+    const { default: def, validate: val, validOptions, ...rest } = o;
+    if (def !== undefined && !isValidValue(def, 'number', true)) {
+        throw new TypeError('invalid default value', {
+            cause: {
+                found: def,
+                wanted: 'number[]',
+            },
+        });
+    }
+    if (!undefOrTypeArray(validOptions, 'number')) {
+        throw new TypeError('invalid validOptions', {
+            cause: {
+                found: validOptions,
+                wanted: 'number[]',
+            },
+        });
+    }
+    const validate = val ?
+        val
+        : undefined;
+    return {
+        ...rest,
+        default: def,
+        validate,
+        validOptions,
+        type: 'number',
+        multiple: true,
     };
-    if (o.default !== undefined &&
-        !isValidValue(o.default, type, multiple)) {
+}
+function opt(o = {}) {
+    const { default: def, validate: val, validOptions, ...rest } = o;
+    if (def !== undefined && !isValidValue(def, 'string', false)) {
         throw new TypeError('invalid default value', {
             cause: {
-                found: o.default,
-                wanted: valueType({ type, multiple }),
+                found: def,
+                wanted: 'string',
             },
         });
     }
-    if (isConfigOptionOfType(o, 'number', false) ||
-        isConfigOptionOfType(o, 'number', true)) {
-        validateValidOptions(o.default, o.validOptions);
+    if (!undefOrTypeArray(validOptions, 'string')) {
+        throw new TypeError('invalid validOptions', {
+            cause: {
+                found: validOptions,
+                wanted: 'string[]',
+            },
+        });
     }
-    else if (isConfigOptionOfType(o, 'string', false) ||
-        isConfigOptionOfType(o, 'string', true)) {
-        validateValidOptions(o.default, o.validOptions);
+    const validate = val ?
+        val
+        : undefined;
+    return {
+        ...rest,
+        default: def,
+        validate,
+        validOptions,
+        type: 'string',
+        multiple: false,
+    };
+}
+function optList(o = {}) {
+    const { default: def, validate: val, validOptions, ...rest } = o;
+    if (def !== undefined && !isValidValue(def, 'string', true)) {
+        throw new TypeError('invalid default value', {
+            cause: {
+                found: def,
+                wanted: 'string[]',
+            },
+        });
     }
-    else if (isConfigOptionOfType(o, 'boolean', false) ||
-        isConfigOptionOfType(o, 'boolean', true)) {
-        if (o.hint !== undefined) {
-            throw new TypeError('cannot provide hint for flag');
-        }
-        if (o.validOptions !== undefined) {
-            throw new TypeError('cannot provide validOptions for flag');
-        }
+    if (!undefOrTypeArray(validOptions, 'string')) {
+        throw new TypeError('invalid validOptions', {
+            cause: {
+                found: validOptions,
+                wanted: 'string[]',
+            },
+        });
     }
-    return o;
-};
+    const validate = val ?
+        val
+        : undefined;
+    return {
+        ...rest,
+        default: def,
+        validate,
+        validOptions,
+        type: 'string',
+        multiple: true,
+    };
+}
+function flag(o = {}) {
+    const { hint, default: def, validate: val, ...rest } = o;
+    delete rest.validOptions;
+    if (def !== undefined && !isValidValue(def, 'boolean', false)) {
+        throw new TypeError('invalid default value');
+    }
+    const validate = val ?
+        val
+        : undefined;
+    if (hint !== undefined) {
+        throw new TypeError('cannot provide hint for flag');
+    }
+    return {
+        ...rest,
+        default: def,
+        validate,
+        type: 'boolean',
+        multiple: false,
+    };
+}
+function flagList(o = {}) {
+    const { hint, default: def, validate: val, ...rest } = o;
+    delete rest.validOptions;
+    if (def !== undefined && !isValidValue(def, 'boolean', true)) {
+        throw new TypeError('invalid default value');
+    }
+    const validate = val ?
+        val
+        : undefined;
+    if (hint !== undefined) {
+        throw new TypeError('cannot provide hint for flag list');
+    }
+    return {
+        ...rest,
+        default: def,
+        validate,
+        type: 'boolean',
+        multiple: true,
+    };
+}
 const toParseArgsOptionsConfig = (options) => {
-    return Object.entries(options).reduce((acc, [longOption, o]) => {
-        const p = {
-            type: 'string',
-            multiple: !!o.multiple,
-            ...(typeof o.short === 'string' ? { short: o.short } : undefined),
-        };
-        const setNoBool = () => {
-            if (!longOption.startsWith('no-') && !options[`no-${longOption}`]) {
-                acc[`no-${longOption}`] = {
-                    type: 'boolean',
-                    multiple: !!o.multiple,
-                };
-            }
-        };
-        const setDefault = (def, fn) => {
-            if (def !== undefined) {
-                p.default = fn(def);
-            }
-        };
-        if (isConfigOption(o, 'number', false)) {
-            setDefault(o.default, String);
-        }
-        else if (isConfigOption(o, 'number', true)) {
-            setDefault(o.default, d => d.map(v => String(v)));
-        }
-        else if (isConfigOption(o, 'string', false) ||
-            isConfigOption(o, 'string', true)) {
-            setDefault(o.default, v => v);
-        }
-        else if (isConfigOption(o, 'boolean', false) ||
-            isConfigOption(o, 'boolean', true)) {
-            p.type = 'boolean';
-            setDefault(o.default, v => v);
-            setNoBool();
+    const c = {};
+    for (const longOption in options) {
+        const config = options[longOption];
+        /* c8 ignore start */
+        if (!config) {
+            throw new Error('config must be an object: ' + longOption);
+        }
+        /* c8 ignore start */
+        if (isConfigOption(config, 'number', true)) {
+            c[longOption] = {
+                type: 'string',
+                multiple: true,
+                default: config.default?.map(c => String(c)),
+            };
+        }
+        else if (isConfigOption(config, 'number', false)) {
+            c[longOption] = {
+                type: 'string',
+                multiple: false,
+                default: config.default === undefined ?
+                    undefined
+                    : String(config.default),
+            };
         }
-        acc[longOption] = p;
-        return acc;
-    }, {});
+        else {
+            const conf = config;
+            c[longOption] = {
+                type: conf.type,
+                multiple: !!conf.multiple,
+                default: conf.default,
+            };
+        }
+        const clo = c[longOption];
+        if (typeof config.short === 'string') {
+            clo.short = config.short;
+        }
+        if (config.type === 'boolean' &&
+            !longOption.startsWith('no-') &&
+            !options[`no-${longOption}`]) {
+            c[`no-${longOption}`] = {
+                type: 'boolean',
+                multiple: config.multiple,
+            };
+        }
+    }
+    return c;
 };
+const isHeading = (r) => r.type === 'heading';
+const isDescription = (r) => r.type === 'description';
 /**
  * Class returned by the {@link jack} function and all configuration
  * definition methods.  This is what gets chained together.
@@ -234,30 +309,6 @@ export class Jack {
         this.#configSet = Object.create(null);
         this.#shorts = Object.create(null);
     }
-    /**
-     * Resulting definitions, suitable to be passed to Node's `util.parseArgs`,
-     * but also including `description` and `short` fields, if set.
-     */
-    get definitions() {
-        return this.#configSet;
-    }
-    /** map of `{ :  }` strings for each short name defined */
-    get shorts() {
-        return this.#shorts;
-    }
-    /**
-     * options passed to the {@link Jack} constructor
-     */
-    get jackOptions() {
-        return this.#options;
-    }
-    /**
-     * the data used to generate {@link Jack#usage} and
-     * {@link Jack#usageMarkdown} content.
-     */
-    get usageFields() {
-        return this.#fields;
-    }
     /**
      * Set the default value (which will still be overridden by env or cli)
      * as if from a parsed config file. The optional `source` param, if
@@ -269,13 +320,16 @@ export class Jack {
             this.validate(values);
         }
         catch (er) {
-            if (source && er instanceof Error) {
-                /* c8 ignore next */
-                const cause = typeof er.cause === 'object' ? er.cause : {};
-                er.cause = { ...cause, path: source };
-                Error.captureStackTrace(er, this.setConfigValues);
+            const e = er;
+            if (source && e && typeof e === 'object') {
+                if (e.cause && typeof e.cause === 'object') {
+                    Object.assign(e.cause, { path: source });
+                }
+                else {
+                    e.cause = { path: source };
+                }
             }
-            throw er;
+            throw e;
         }
         for (const [field, value] of Object.entries(values)) {
             const my = this.#configSet[field];
@@ -283,10 +337,7 @@ export class Jack {
             /* c8 ignore start */
             if (!my) {
                 throw new Error('unexpected field in config set: ' + field, {
-                    cause: {
-                        code: 'JACKSPEAK',
-                        found: field,
-                    },
+                    cause: { found: field },
                 });
             }
             /* c8 ignore stop */
@@ -341,9 +392,10 @@ export class Jack {
         if (args === process.argv) {
             args = args.slice(process._eval !== undefined ? 1 : 2);
         }
+        const options = toParseArgsOptionsConfig(this.#configSet);
         const result = parseArgs({
             args,
-            options: toParseArgsOptionsConfig(this.#configSet),
+            options,
             // always strict, but using our own logic
             strict: false,
             allowPositionals: this.#allowPositionals,
@@ -383,7 +435,6 @@ export class Jack {
                         `place it at the end of the command after '--', as in ` +
                         `'-- ${token.rawName}'`, {
                         cause: {
-                            code: 'JACKSPEAK',
                             found: token.rawName + (token.value ? `=${token.value}` : ''),
                         },
                     });
@@ -393,7 +444,6 @@ export class Jack {
                         if (my.type !== 'boolean') {
                             throw new Error(`No value provided for ${token.rawName}, expected ${my.type}`, {
                                 cause: {
-                                    code: 'JACKSPEAK',
                                     name: token.rawName,
                                     wanted: valueType(my),
                                 },
@@ -403,7 +453,7 @@ export class Jack {
                     }
                     else {
                         if (my.type === 'boolean') {
-                            throw new Error(`Flag ${token.rawName} does not take a value, received '${token.value}'`, { cause: { code: 'JACKSPEAK', found: token } });
+                            throw new Error(`Flag ${token.rawName} does not take a value, received '${token.value}'`, { cause: { found: token } });
                         }
                         if (my.type === 'string') {
                             value = token.value;
@@ -414,7 +464,6 @@ export class Jack {
                                 throw new Error(`Invalid value '${token.value}' provided for ` +
                                     `'${token.rawName}' option, expected number`, {
                                     cause: {
-                                        code: 'JACKSPEAK',
                                         name: token.rawName,
                                         found: token.value,
                                         wanted: 'number',
@@ -439,12 +488,15 @@ export class Jack {
         for (const [field, value] of Object.entries(p.values)) {
             const valid = this.#configSet[field]?.validate;
             const validOptions = this.#configSet[field]?.validOptions;
-            const cause = validOptions && !isValidOption(value, validOptions) ?
-                { name: field, found: value, validOptions }
-                : valid && !valid(value) ? { name: field, found: value }
-                    : undefined;
+            let cause;
+            if (validOptions && !isValidOption(value, validOptions)) {
+                cause = { name: field, found: value, validOptions: validOptions };
+            }
+            if (valid && !valid(value)) {
+                cause = cause || { name: field, found: value };
+            }
             if (cause) {
-                throw new Error(`Invalid value provided for --${field}: ${JSON.stringify(value)}`, { cause: { ...cause, code: 'JACKSPEAK' } });
+                throw new Error(`Invalid value provided for --${field}: ${JSON.stringify(value)}`, { cause });
             }
         }
         return p;
@@ -460,7 +512,7 @@ export class Jack {
         // recurse so we get the core config key we care about.
         this.#noNoFields(yes, val, s);
         if (this.#configSet[yes]?.type === 'boolean') {
-            throw new Error(`do not set '${s}', instead set '${yes}' as desired.`, { cause: { code: 'JACKSPEAK', found: s, wanted: yes } });
+            throw new Error(`do not set '${s}', instead set '${yes}' as desired.`, { cause: { found: s, wanted: yes } });
         }
     }
     /**
@@ -470,7 +522,7 @@ export class Jack {
     validate(o) {
         if (!o || typeof o !== 'object') {
             throw new Error('Invalid config: not an object', {
-                cause: { code: 'JACKSPEAK', found: o },
+                cause: { found: o },
             });
         }
         const opts = o;
@@ -483,27 +535,33 @@ export class Jack {
             const config = this.#configSet[field];
             if (!config) {
                 throw new Error(`Unknown config option: ${field}`, {
-                    cause: { code: 'JACKSPEAK', found: field },
+                    cause: { found: field },
                 });
             }
             if (!isValidValue(value, config.type, !!config.multiple)) {
                 throw new Error(`Invalid value ${valueType(value)} for ${field}, expected ${valueType(config)}`, {
                     cause: {
-                        code: 'JACKSPEAK',
                         name: field,
                         found: value,
                         wanted: valueType(config),
                     },
                 });
             }
-            const cause = config.validOptions && !isValidOption(value, config.validOptions) ?
-                { name: field, found: value, validOptions: config.validOptions }
-                : config.validate && !config.validate(value) ?
-                    { name: field, found: value }
-                    : undefined;
+            let cause;
+            if (config.validOptions &&
+                !isValidOption(value, config.validOptions)) {
+                cause = {
+                    name: field,
+                    found: value,
+                    validOptions: config.validOptions,
+                };
+            }
+            if (config.validate && !config.validate(value)) {
+                cause = cause || { name: field, found: value };
+            }
             if (cause) {
                 throw new Error(`Invalid config value for ${field}: ${value}`, {
-                    cause: { ...cause, code: 'JACKSPEAK' },
+                    cause,
                 });
             }
         }
@@ -537,37 +595,37 @@ export class Jack {
      * Add one or more number fields.
      */
     num(fields) {
-        return this.#addFieldsWith(fields, 'number', false);
+        return this.#addFields(fields, num);
     }
     /**
      * Add one or more multiple number fields.
      */
     numList(fields) {
-        return this.#addFieldsWith(fields, 'number', true);
+        return this.#addFields(fields, numList);
     }
     /**
      * Add one or more string option fields.
      */
     opt(fields) {
-        return this.#addFieldsWith(fields, 'string', false);
+        return this.#addFields(fields, opt);
     }
     /**
      * Add one or more multiple string option fields.
      */
     optList(fields) {
-        return this.#addFieldsWith(fields, 'string', true);
+        return this.#addFields(fields, optList);
     }
     /**
      * Add one or more flag fields.
      */
     flag(fields) {
-        return this.#addFieldsWith(fields, 'boolean', false);
+        return this.#addFields(fields, flag);
     }
     /**
      * Add one or more multiple flag fields.
      */
     flagList(fields) {
-        return this.#addFieldsWith(fields, 'boolean', true);
+        return this.#addFields(fields, flagList);
     }
     /**
      * Generic field definition method. Similar to flag/flagList/number/etc,
@@ -575,22 +633,29 @@ export class Jack {
      * fields on each one, or Jack won't know how to define them.
      */
     addFields(fields) {
-        return this.#addFields(this, fields);
-    }
-    #addFieldsWith(fields, type, multiple) {
-        return this.#addFields(this, fields, {
-            type,
-            multiple,
-        });
+        const next = this;
+        for (const [name, field] of Object.entries(fields)) {
+            this.#validateName(name, field);
+            next.#fields.push({
+                type: 'config',
+                name,
+                value: field,
+            });
+        }
+        Object.assign(next.#configSet, fields);
+        return next;
     }
-    #addFields(next, fields, opt) {
+    #addFields(fields, fn) {
+        const next = this;
         Object.assign(next.#configSet, Object.fromEntries(Object.entries(fields).map(([name, field]) => {
             this.#validateName(name, field);
-            const { type, multiple } = validateFieldMeta(field, opt);
-            const value = { ...field, type, multiple };
-            validateField(value, type, multiple);
-            next.#fields.push({ type: 'config', name, value });
-            return [name, value];
+            const option = fn(field);
+            next.#fields.push({
+                type: 'config',
+                name,
+                value: option,
+            });
+            return [name, option];
         })));
         return next;
     }
@@ -626,7 +691,6 @@ export class Jack {
         if (this.#usage)
             return this.#usage;
         let headingLevel = 1;
-        //@ts-ignore
         const ui = cliui({ width });
         const first = this.#fields[0];
         let start = first?.type === 'heading' ? 1 : 0;
@@ -868,10 +932,6 @@ export class Jack {
         return `Jack ${inspect(this.toJSON(), options)}`;
     }
 }
-/**
- * Main entry point. Create and return a {@link Jack} object.
- */
-export const jack = (options = {}) => new Jack(options);
 // Unwrap and un-indent, so we can wrap description
 // strings however makes them look nice in the code.
 const normalize = (s, pre = false) => {
@@ -933,4 +993,8 @@ const normalizeOneLine = (s, pre = false) => {
         .trim();
     return pre ? `\`${n}\`` : n;
 };
+/**
+ * Main entry point. Create and return a {@link Jack} object.
+ */
+export const jack = (options = {}) => new Jack(options);
 //# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/path-scurry/dist/esm/package.json b/node_modules/node-gyp/node_modules/jackspeak/dist/esm/package.json
similarity index 100%
rename from node_modules/@npmcli/map-workspaces/node_modules/path-scurry/dist/esm/package.json
rename to node_modules/node-gyp/node_modules/jackspeak/dist/esm/package.json
diff --git a/node_modules/jackspeak/dist/esm/parse-args.js b/node_modules/node-gyp/node_modules/jackspeak/dist/esm/parse-args.js
similarity index 100%
rename from node_modules/jackspeak/dist/esm/parse-args.js
rename to node_modules/node-gyp/node_modules/jackspeak/dist/esm/parse-args.js
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/jackspeak/package.json b/node_modules/node-gyp/node_modules/jackspeak/package.json
similarity index 85%
rename from node_modules/@npmcli/map-workspaces/node_modules/jackspeak/package.json
rename to node_modules/node-gyp/node_modules/jackspeak/package.json
index aa85d230f6d24..51eaabdf35469 100644
--- a/node_modules/@npmcli/map-workspaces/node_modules/jackspeak/package.json
+++ b/node_modules/node-gyp/node_modules/jackspeak/package.json
@@ -1,6 +1,9 @@
 {
   "name": "jackspeak",
-  "version": "4.1.1",
+  "publishConfig": {
+    "tag": "v3-legacy"
+  },
+  "version": "3.4.3",
   "description": "A very strict and proper argument parser.",
   "tshy": {
     "main": true,
@@ -55,18 +58,17 @@
     "endOfLine": "lf"
   },
   "devDependencies": {
-    "@types/node": "^22.6.0",
-    "prettier": "^3.3.3",
-    "tap": "^21.0.1",
-    "tshy": "^3.0.2",
-    "typedoc": "^0.26.7"
+    "@types/node": "^20.7.0",
+    "@types/pkgjs__parseargs": "^0.10.1",
+    "prettier": "^3.2.5",
+    "tap": "^18.8.0",
+    "tshy": "^1.14.0",
+    "typedoc": "^0.25.1",
+    "typescript": "^5.2.2"
   },
   "dependencies": {
     "@isaacs/cliui": "^8.0.2"
   },
-  "engines": {
-    "node": "20 || >=22"
-  },
   "funding": {
     "url": "https://github.com/sponsors/isaacs"
   },
@@ -87,8 +89,7 @@
     "parsing"
   ],
   "author": "Isaac Z. Schlueter ",
-  "tap": {
-    "typecheck": true
-  },
-  "module": "./dist/esm/index.js"
+  "optionalDependencies": {
+    "@pkgjs/parseargs": "^0.11.0"
+  }
 }
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/path-scurry/LICENSE.md b/node_modules/node-gyp/node_modules/path-scurry/LICENSE.md
similarity index 100%
rename from node_modules/@npmcli/map-workspaces/node_modules/path-scurry/LICENSE.md
rename to node_modules/node-gyp/node_modules/path-scurry/LICENSE.md
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/path-scurry/dist/commonjs/index.js b/node_modules/node-gyp/node_modules/path-scurry/dist/commonjs/index.js
similarity index 99%
rename from node_modules/@npmcli/map-workspaces/node_modules/path-scurry/dist/commonjs/index.js
rename to node_modules/node-gyp/node_modules/path-scurry/dist/commonjs/index.js
index af3e7595f577f..555de62f04c90 100644
--- a/node_modules/@npmcli/map-workspaces/node_modules/path-scurry/dist/commonjs/index.js
+++ b/node_modules/node-gyp/node_modules/path-scurry/dist/commonjs/index.js
@@ -302,8 +302,6 @@ class PathBase {
     /**
      * Deprecated alias for Dirent['parentPath'] Somewhat counterintuitively,
      * this property refers to the *parent* path, not the path object itself.
-     *
-     * @deprecated
      */
     get path() {
         return this.parentPath;
diff --git a/node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/package.json b/node_modules/node-gyp/node_modules/path-scurry/dist/commonjs/package.json
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/glob/dist/commonjs/package.json
rename to node_modules/node-gyp/node_modules/path-scurry/dist/commonjs/package.json
diff --git a/node_modules/cacache/node_modules/path-scurry/dist/esm/index.js b/node_modules/node-gyp/node_modules/path-scurry/dist/esm/index.js
similarity index 99%
rename from node_modules/cacache/node_modules/path-scurry/dist/esm/index.js
rename to node_modules/node-gyp/node_modules/path-scurry/dist/esm/index.js
index 42be74c37ad9d..3b11b819faece 100644
--- a/node_modules/cacache/node_modules/path-scurry/dist/esm/index.js
+++ b/node_modules/node-gyp/node_modules/path-scurry/dist/esm/index.js
@@ -274,8 +274,6 @@ export class PathBase {
     /**
      * Deprecated alias for Dirent['parentPath'] Somewhat counterintuitively,
      * this property refers to the *parent* path, not the path object itself.
-     *
-     * @deprecated
      */
     get path() {
         return this.parentPath;
diff --git a/node_modules/@npmcli/package-json/node_modules/glob/dist/esm/package.json b/node_modules/node-gyp/node_modules/path-scurry/dist/esm/package.json
similarity index 100%
rename from node_modules/@npmcli/package-json/node_modules/glob/dist/esm/package.json
rename to node_modules/node-gyp/node_modules/path-scurry/dist/esm/package.json
diff --git a/node_modules/cacache/node_modules/path-scurry/package.json b/node_modules/node-gyp/node_modules/path-scurry/package.json
similarity index 77%
rename from node_modules/cacache/node_modules/path-scurry/package.json
rename to node_modules/node-gyp/node_modules/path-scurry/package.json
index c3cb39dced545..e1766157894c8 100644
--- a/node_modules/cacache/node_modules/path-scurry/package.json
+++ b/node_modules/node-gyp/node_modules/path-scurry/package.json
@@ -1,6 +1,6 @@
 {
   "name": "path-scurry",
-  "version": "2.0.0",
+  "version": "1.11.1",
   "description": "walk paths fast and efficiently",
   "author": "Isaac Z. Schlueter  (https://blog.izs.me)",
   "main": "./dist/commonjs/index.js",
@@ -31,7 +31,7 @@
     "presnap": "npm run prepare",
     "test": "tap",
     "snap": "tap",
-    "format": "prettier --write . --log-level warn",
+    "format": "prettier --write . --loglevel warn",
     "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts",
     "bench": "bash ./scripts/bench.sh"
   },
@@ -48,22 +48,24 @@
     "endOfLine": "lf"
   },
   "devDependencies": {
-    "@nodelib/fs.walk": "^2.0.0",
-    "@types/node": "^20.14.10",
+    "@nodelib/fs.walk": "^1.2.8",
+    "@types/node": "^20.12.11",
+    "c8": "^7.12.0",
+    "eslint-config-prettier": "^8.6.0",
     "mkdirp": "^3.0.0",
-    "prettier": "^3.3.2",
-    "rimraf": "^5.0.8",
-    "tap": "^20.0.3",
+    "prettier": "^3.2.5",
+    "rimraf": "^5.0.1",
+    "tap": "^18.7.2",
     "ts-node": "^10.9.2",
-    "tshy": "^2.0.1",
-    "typedoc": "^0.26.3",
-    "typescript": "^5.5.3"
+    "tshy": "^1.14.0",
+    "typedoc": "^0.25.12",
+    "typescript": "^5.4.3"
   },
   "tap": {
     "typecheck": true
   },
   "engines": {
-    "node": "20 || >=22"
+    "node": ">=16 || 14 >=14.18"
   },
   "funding": {
     "url": "https://github.com/sponsors/isaacs"
@@ -73,8 +75,8 @@
     "url": "git+https://github.com/isaacs/path-scurry"
   },
   "dependencies": {
-    "lru-cache": "^11.0.0",
-    "minipass": "^7.1.2"
+    "lru-cache": "^10.2.0",
+    "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0"
   },
   "tshy": {
     "selfLink": false,
@@ -83,6 +85,5 @@
       ".": "./src/index.ts"
     }
   },
-  "types": "./dist/commonjs/index.d.ts",
-  "module": "./dist/esm/index.js"
+  "types": "./dist/commonjs/index.d.ts"
 }
diff --git a/node_modules/path-scurry/dist/commonjs/index.js b/node_modules/path-scurry/dist/commonjs/index.js
index 555de62f04c90..af3e7595f577f 100644
--- a/node_modules/path-scurry/dist/commonjs/index.js
+++ b/node_modules/path-scurry/dist/commonjs/index.js
@@ -302,6 +302,8 @@ class PathBase {
     /**
      * Deprecated alias for Dirent['parentPath'] Somewhat counterintuitively,
      * this property refers to the *parent* path, not the path object itself.
+     *
+     * @deprecated
      */
     get path() {
         return this.parentPath;
diff --git a/node_modules/path-scurry/dist/esm/index.js b/node_modules/path-scurry/dist/esm/index.js
index 3b11b819faece..42be74c37ad9d 100644
--- a/node_modules/path-scurry/dist/esm/index.js
+++ b/node_modules/path-scurry/dist/esm/index.js
@@ -274,6 +274,8 @@ export class PathBase {
     /**
      * Deprecated alias for Dirent['parentPath'] Somewhat counterintuitively,
      * this property refers to the *parent* path, not the path object itself.
+     *
+     * @deprecated
      */
     get path() {
         return this.parentPath;
diff --git a/node_modules/path-scurry/node_modules/lru-cache/LICENSE b/node_modules/path-scurry/node_modules/lru-cache/LICENSE
deleted file mode 100644
index f785757cd63f8..0000000000000
--- a/node_modules/path-scurry/node_modules/lru-cache/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) 2010-2023 Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/path-scurry/node_modules/lru-cache/dist/commonjs/index.js b/node_modules/path-scurry/node_modules/lru-cache/dist/commonjs/index.js
deleted file mode 100644
index 0589231885c68..0000000000000
--- a/node_modules/path-scurry/node_modules/lru-cache/dist/commonjs/index.js
+++ /dev/null
@@ -1,1546 +0,0 @@
-"use strict";
-/**
- * @module LRUCache
- */
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.LRUCache = void 0;
-const perf = typeof performance === 'object' &&
-    performance &&
-    typeof performance.now === 'function'
-    ? performance
-    : Date;
-const warned = new Set();
-/* c8 ignore start */
-const PROCESS = (typeof process === 'object' && !!process ? process : {});
-/* c8 ignore start */
-const emitWarning = (msg, type, code, fn) => {
-    typeof PROCESS.emitWarning === 'function'
-        ? PROCESS.emitWarning(msg, type, code, fn)
-        : console.error(`[${code}] ${type}: ${msg}`);
-};
-let AC = globalThis.AbortController;
-let AS = globalThis.AbortSignal;
-/* c8 ignore start */
-if (typeof AC === 'undefined') {
-    //@ts-ignore
-    AS = class AbortSignal {
-        onabort;
-        _onabort = [];
-        reason;
-        aborted = false;
-        addEventListener(_, fn) {
-            this._onabort.push(fn);
-        }
-    };
-    //@ts-ignore
-    AC = class AbortController {
-        constructor() {
-            warnACPolyfill();
-        }
-        signal = new AS();
-        abort(reason) {
-            if (this.signal.aborted)
-                return;
-            //@ts-ignore
-            this.signal.reason = reason;
-            //@ts-ignore
-            this.signal.aborted = true;
-            //@ts-ignore
-            for (const fn of this.signal._onabort) {
-                fn(reason);
-            }
-            this.signal.onabort?.(reason);
-        }
-    };
-    let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
-    const warnACPolyfill = () => {
-        if (!printACPolyfillWarning)
-            return;
-        printACPolyfillWarning = false;
-        emitWarning('AbortController is not defined. If using lru-cache in ' +
-            'node 14, load an AbortController polyfill from the ' +
-            '`node-abort-controller` package. A minimal polyfill is ' +
-            'provided for use by LRUCache.fetch(), but it should not be ' +
-            'relied upon in other contexts (eg, passing it to other APIs that ' +
-            'use AbortController/AbortSignal might have undesirable effects). ' +
-            'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
-    };
-}
-/* c8 ignore stop */
-const shouldWarn = (code) => !warned.has(code);
-const TYPE = Symbol('type');
-const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
-/* c8 ignore start */
-// This is a little bit ridiculous, tbh.
-// The maximum array length is 2^32-1 or thereabouts on most JS impls.
-// And well before that point, you're caching the entire world, I mean,
-// that's ~32GB of just integers for the next/prev links, plus whatever
-// else to hold that many keys and values.  Just filling the memory with
-// zeroes at init time is brutal when you get that big.
-// But why not be complete?
-// Maybe in the future, these limits will have expanded.
-const getUintArray = (max) => !isPosInt(max)
-    ? null
-    : max <= Math.pow(2, 8)
-        ? Uint8Array
-        : max <= Math.pow(2, 16)
-            ? Uint16Array
-            : max <= Math.pow(2, 32)
-                ? Uint32Array
-                : max <= Number.MAX_SAFE_INTEGER
-                    ? ZeroArray
-                    : null;
-/* c8 ignore stop */
-class ZeroArray extends Array {
-    constructor(size) {
-        super(size);
-        this.fill(0);
-    }
-}
-class Stack {
-    heap;
-    length;
-    // private constructor
-    static #constructing = false;
-    static create(max) {
-        const HeapCls = getUintArray(max);
-        if (!HeapCls)
-            return [];
-        Stack.#constructing = true;
-        const s = new Stack(max, HeapCls);
-        Stack.#constructing = false;
-        return s;
-    }
-    constructor(max, HeapCls) {
-        /* c8 ignore start */
-        if (!Stack.#constructing) {
-            throw new TypeError('instantiate Stack using Stack.create(n)');
-        }
-        /* c8 ignore stop */
-        this.heap = new HeapCls(max);
-        this.length = 0;
-    }
-    push(n) {
-        this.heap[this.length++] = n;
-    }
-    pop() {
-        return this.heap[--this.length];
-    }
-}
-/**
- * Default export, the thing you're using this module to get.
- *
- * The `K` and `V` types define the key and value types, respectively. The
- * optional `FC` type defines the type of the `context` object passed to
- * `cache.fetch()` and `cache.memo()`.
- *
- * Keys and values **must not** be `null` or `undefined`.
- *
- * All properties from the options object (with the exception of `max`,
- * `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are
- * added as normal public members. (The listed options are read-only getters.)
- *
- * Changing any of these will alter the defaults for subsequent method calls.
- */
-class LRUCache {
-    // options that cannot be changed without disaster
-    #max;
-    #maxSize;
-    #dispose;
-    #disposeAfter;
-    #fetchMethod;
-    #memoMethod;
-    /**
-     * {@link LRUCache.OptionsBase.ttl}
-     */
-    ttl;
-    /**
-     * {@link LRUCache.OptionsBase.ttlResolution}
-     */
-    ttlResolution;
-    /**
-     * {@link LRUCache.OptionsBase.ttlAutopurge}
-     */
-    ttlAutopurge;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnGet}
-     */
-    updateAgeOnGet;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnHas}
-     */
-    updateAgeOnHas;
-    /**
-     * {@link LRUCache.OptionsBase.allowStale}
-     */
-    allowStale;
-    /**
-     * {@link LRUCache.OptionsBase.noDisposeOnSet}
-     */
-    noDisposeOnSet;
-    /**
-     * {@link LRUCache.OptionsBase.noUpdateTTL}
-     */
-    noUpdateTTL;
-    /**
-     * {@link LRUCache.OptionsBase.maxEntrySize}
-     */
-    maxEntrySize;
-    /**
-     * {@link LRUCache.OptionsBase.sizeCalculation}
-     */
-    sizeCalculation;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
-     */
-    noDeleteOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
-     */
-    noDeleteOnStaleGet;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
-     */
-    allowStaleOnFetchAbort;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
-     */
-    allowStaleOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
-     */
-    ignoreFetchAbort;
-    // computed properties
-    #size;
-    #calculatedSize;
-    #keyMap;
-    #keyList;
-    #valList;
-    #next;
-    #prev;
-    #head;
-    #tail;
-    #free;
-    #disposed;
-    #sizes;
-    #starts;
-    #ttls;
-    #hasDispose;
-    #hasFetchMethod;
-    #hasDisposeAfter;
-    /**
-     * Do not call this method unless you need to inspect the
-     * inner workings of the cache.  If anything returned by this
-     * object is modified in any way, strange breakage may occur.
-     *
-     * These fields are private for a reason!
-     *
-     * @internal
-     */
-    static unsafeExposeInternals(c) {
-        return {
-            // properties
-            starts: c.#starts,
-            ttls: c.#ttls,
-            sizes: c.#sizes,
-            keyMap: c.#keyMap,
-            keyList: c.#keyList,
-            valList: c.#valList,
-            next: c.#next,
-            prev: c.#prev,
-            get head() {
-                return c.#head;
-            },
-            get tail() {
-                return c.#tail;
-            },
-            free: c.#free,
-            // methods
-            isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
-            backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
-            moveToTail: (index) => c.#moveToTail(index),
-            indexes: (options) => c.#indexes(options),
-            rindexes: (options) => c.#rindexes(options),
-            isStale: (index) => c.#isStale(index),
-        };
-    }
-    // Protected read-only members
-    /**
-     * {@link LRUCache.OptionsBase.max} (read-only)
-     */
-    get max() {
-        return this.#max;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.maxSize} (read-only)
-     */
-    get maxSize() {
-        return this.#maxSize;
-    }
-    /**
-     * The total computed size of items in the cache (read-only)
-     */
-    get calculatedSize() {
-        return this.#calculatedSize;
-    }
-    /**
-     * The number of items stored in the cache (read-only)
-     */
-    get size() {
-        return this.#size;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
-     */
-    get fetchMethod() {
-        return this.#fetchMethod;
-    }
-    get memoMethod() {
-        return this.#memoMethod;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.dispose} (read-only)
-     */
-    get dispose() {
-        return this.#dispose;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
-     */
-    get disposeAfter() {
-        return this.#disposeAfter;
-    }
-    constructor(options) {
-        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options;
-        if (max !== 0 && !isPosInt(max)) {
-            throw new TypeError('max option must be a nonnegative integer');
-        }
-        const UintArray = max ? getUintArray(max) : Array;
-        if (!UintArray) {
-            throw new Error('invalid max value: ' + max);
-        }
-        this.#max = max;
-        this.#maxSize = maxSize;
-        this.maxEntrySize = maxEntrySize || this.#maxSize;
-        this.sizeCalculation = sizeCalculation;
-        if (this.sizeCalculation) {
-            if (!this.#maxSize && !this.maxEntrySize) {
-                throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
-            }
-            if (typeof this.sizeCalculation !== 'function') {
-                throw new TypeError('sizeCalculation set to non-function');
-            }
-        }
-        if (memoMethod !== undefined &&
-            typeof memoMethod !== 'function') {
-            throw new TypeError('memoMethod must be a function if defined');
-        }
-        this.#memoMethod = memoMethod;
-        if (fetchMethod !== undefined &&
-            typeof fetchMethod !== 'function') {
-            throw new TypeError('fetchMethod must be a function if specified');
-        }
-        this.#fetchMethod = fetchMethod;
-        this.#hasFetchMethod = !!fetchMethod;
-        this.#keyMap = new Map();
-        this.#keyList = new Array(max).fill(undefined);
-        this.#valList = new Array(max).fill(undefined);
-        this.#next = new UintArray(max);
-        this.#prev = new UintArray(max);
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free = Stack.create(max);
-        this.#size = 0;
-        this.#calculatedSize = 0;
-        if (typeof dispose === 'function') {
-            this.#dispose = dispose;
-        }
-        if (typeof disposeAfter === 'function') {
-            this.#disposeAfter = disposeAfter;
-            this.#disposed = [];
-        }
-        else {
-            this.#disposeAfter = undefined;
-            this.#disposed = undefined;
-        }
-        this.#hasDispose = !!this.#dispose;
-        this.#hasDisposeAfter = !!this.#disposeAfter;
-        this.noDisposeOnSet = !!noDisposeOnSet;
-        this.noUpdateTTL = !!noUpdateTTL;
-        this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
-        this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
-        this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
-        this.ignoreFetchAbort = !!ignoreFetchAbort;
-        // NB: maxEntrySize is set to maxSize if it's set
-        if (this.maxEntrySize !== 0) {
-            if (this.#maxSize !== 0) {
-                if (!isPosInt(this.#maxSize)) {
-                    throw new TypeError('maxSize must be a positive integer if specified');
-                }
-            }
-            if (!isPosInt(this.maxEntrySize)) {
-                throw new TypeError('maxEntrySize must be a positive integer if specified');
-            }
-            this.#initializeSizeTracking();
-        }
-        this.allowStale = !!allowStale;
-        this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
-        this.updateAgeOnGet = !!updateAgeOnGet;
-        this.updateAgeOnHas = !!updateAgeOnHas;
-        this.ttlResolution =
-            isPosInt(ttlResolution) || ttlResolution === 0
-                ? ttlResolution
-                : 1;
-        this.ttlAutopurge = !!ttlAutopurge;
-        this.ttl = ttl || 0;
-        if (this.ttl) {
-            if (!isPosInt(this.ttl)) {
-                throw new TypeError('ttl must be a positive integer if specified');
-            }
-            this.#initializeTTLTracking();
-        }
-        // do not allow completely unbounded caches
-        if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
-            throw new TypeError('At least one of max, maxSize, or ttl is required');
-        }
-        if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
-            const code = 'LRU_CACHE_UNBOUNDED';
-            if (shouldWarn(code)) {
-                warned.add(code);
-                const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
-                    'result in unbounded memory consumption.';
-                emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
-            }
-        }
-    }
-    /**
-     * Return the number of ms left in the item's TTL. If item is not in cache,
-     * returns `0`. Returns `Infinity` if item is in cache without a defined TTL.
-     */
-    getRemainingTTL(key) {
-        return this.#keyMap.has(key) ? Infinity : 0;
-    }
-    #initializeTTLTracking() {
-        const ttls = new ZeroArray(this.#max);
-        const starts = new ZeroArray(this.#max);
-        this.#ttls = ttls;
-        this.#starts = starts;
-        this.#setItemTTL = (index, ttl, start = perf.now()) => {
-            starts[index] = ttl !== 0 ? start : 0;
-            ttls[index] = ttl;
-            if (ttl !== 0 && this.ttlAutopurge) {
-                const t = setTimeout(() => {
-                    if (this.#isStale(index)) {
-                        this.#delete(this.#keyList[index], 'expire');
-                    }
-                }, ttl + 1);
-                // unref() not supported on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-        };
-        this.#updateItemAge = index => {
-            starts[index] = ttls[index] !== 0 ? perf.now() : 0;
-        };
-        this.#statusTTL = (status, index) => {
-            if (ttls[index]) {
-                const ttl = ttls[index];
-                const start = starts[index];
-                /* c8 ignore next */
-                if (!ttl || !start)
-                    return;
-                status.ttl = ttl;
-                status.start = start;
-                status.now = cachedNow || getNow();
-                const age = status.now - start;
-                status.remainingTTL = ttl - age;
-            }
-        };
-        // debounce calls to perf.now() to 1s so we're not hitting
-        // that costly call repeatedly.
-        let cachedNow = 0;
-        const getNow = () => {
-            const n = perf.now();
-            if (this.ttlResolution > 0) {
-                cachedNow = n;
-                const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
-                // not available on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-            return n;
-        };
-        this.getRemainingTTL = key => {
-            const index = this.#keyMap.get(key);
-            if (index === undefined) {
-                return 0;
-            }
-            const ttl = ttls[index];
-            const start = starts[index];
-            if (!ttl || !start) {
-                return Infinity;
-            }
-            const age = (cachedNow || getNow()) - start;
-            return ttl - age;
-        };
-        this.#isStale = index => {
-            const s = starts[index];
-            const t = ttls[index];
-            return !!t && !!s && (cachedNow || getNow()) - s > t;
-        };
-    }
-    // conditionally set private methods related to TTL
-    #updateItemAge = () => { };
-    #statusTTL = () => { };
-    #setItemTTL = () => { };
-    /* c8 ignore stop */
-    #isStale = () => false;
-    #initializeSizeTracking() {
-        const sizes = new ZeroArray(this.#max);
-        this.#calculatedSize = 0;
-        this.#sizes = sizes;
-        this.#removeItemSize = index => {
-            this.#calculatedSize -= sizes[index];
-            sizes[index] = 0;
-        };
-        this.#requireSize = (k, v, size, sizeCalculation) => {
-            // provisionally accept background fetches.
-            // actual value size will be checked when they return.
-            if (this.#isBackgroundFetch(v)) {
-                return 0;
-            }
-            if (!isPosInt(size)) {
-                if (sizeCalculation) {
-                    if (typeof sizeCalculation !== 'function') {
-                        throw new TypeError('sizeCalculation must be a function');
-                    }
-                    size = sizeCalculation(v, k);
-                    if (!isPosInt(size)) {
-                        throw new TypeError('sizeCalculation return invalid (expect positive integer)');
-                    }
-                }
-                else {
-                    throw new TypeError('invalid size value (must be positive integer). ' +
-                        'When maxSize or maxEntrySize is used, sizeCalculation ' +
-                        'or size must be set.');
-                }
-            }
-            return size;
-        };
-        this.#addItemSize = (index, size, status) => {
-            sizes[index] = size;
-            if (this.#maxSize) {
-                const maxSize = this.#maxSize - sizes[index];
-                while (this.#calculatedSize > maxSize) {
-                    this.#evict(true);
-                }
-            }
-            this.#calculatedSize += sizes[index];
-            if (status) {
-                status.entrySize = size;
-                status.totalCalculatedSize = this.#calculatedSize;
-            }
-        };
-    }
-    #removeItemSize = _i => { };
-    #addItemSize = (_i, _s, _st) => { };
-    #requireSize = (_k, _v, size, sizeCalculation) => {
-        if (size || sizeCalculation) {
-            throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
-        }
-        return 0;
-    };
-    *#indexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#tail; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#head) {
-                    break;
-                }
-                else {
-                    i = this.#prev[i];
-                }
-            }
-        }
-    }
-    *#rindexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#head; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#tail) {
-                    break;
-                }
-                else {
-                    i = this.#next[i];
-                }
-            }
-        }
-    }
-    #isValidIndex(index) {
-        return (index !== undefined &&
-            this.#keyMap.get(this.#keyList[index]) === index);
-    }
-    /**
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from most recently used to least recently used.
-     */
-    *entries() {
-        for (const i of this.#indexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.entries}
-     *
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from least recently used to most recently used.
-     */
-    *rentries() {
-        for (const i of this.#rindexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the keys in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *keys() {
-        for (const i of this.#indexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.keys}
-     *
-     * Return a generator yielding the keys in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rkeys() {
-        for (const i of this.#rindexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the values in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *values() {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.values}
-     *
-     * Return a generator yielding the values in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rvalues() {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Iterating over the cache itself yields the same results as
-     * {@link LRUCache.entries}
-     */
-    [Symbol.iterator]() {
-        return this.entries();
-    }
-    /**
-     * A String value that is used in the creation of the default string
-     * description of an object. Called by the built-in method
-     * `Object.prototype.toString`.
-     */
-    [Symbol.toStringTag] = 'LRUCache';
-    /**
-     * Find a value for which the supplied fn method returns a truthy value,
-     * similar to `Array.find()`. fn is called as `fn(value, key, cache)`.
-     */
-    find(fn, getOptions = {}) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined)
-                continue;
-            if (fn(value, this.#keyList[i], this)) {
-                return this.get(this.#keyList[i], getOptions);
-            }
-        }
-    }
-    /**
-     * Call the supplied function on each item in the cache, in order from most
-     * recently used to least recently used.
-     *
-     * `fn` is called as `fn(value, key, cache)`.
-     *
-     * If `thisp` is provided, function will be called in the `this`-context of
-     * the provided object, or the cache if no `thisp` object is provided.
-     *
-     * Does not update age or recenty of use, or iterate over stale values.
-     */
-    forEach(fn, thisp = this) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * The same as {@link LRUCache.forEach} but items are iterated over in
-     * reverse order.  (ie, less recently used items are iterated over first.)
-     */
-    rforEach(fn, thisp = this) {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * Delete any stale entries. Returns true if anything was removed,
-     * false otherwise.
-     */
-    purgeStale() {
-        let deleted = false;
-        for (const i of this.#rindexes({ allowStale: true })) {
-            if (this.#isStale(i)) {
-                this.#delete(this.#keyList[i], 'expire');
-                deleted = true;
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Get the extended info about a given entry, to get its value, size, and
-     * TTL info simultaneously. Returns `undefined` if the key is not present.
-     *
-     * Unlike {@link LRUCache#dump}, which is designed to be portable and survive
-     * serialization, the `start` value is always the current timestamp, and the
-     * `ttl` is a calculated remaining time to live (negative if expired).
-     *
-     * Always returns stale values, if their info is found in the cache, so be
-     * sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl})
-     * if relevant.
-     */
-    info(key) {
-        const i = this.#keyMap.get(key);
-        if (i === undefined)
-            return undefined;
-        const v = this.#valList[i];
-        const value = this.#isBackgroundFetch(v)
-            ? v.__staleWhileFetching
-            : v;
-        if (value === undefined)
-            return undefined;
-        const entry = { value };
-        if (this.#ttls && this.#starts) {
-            const ttl = this.#ttls[i];
-            const start = this.#starts[i];
-            if (ttl && start) {
-                const remain = ttl - (perf.now() - start);
-                entry.ttl = remain;
-                entry.start = Date.now();
-            }
-        }
-        if (this.#sizes) {
-            entry.size = this.#sizes[i];
-        }
-        return entry;
-    }
-    /**
-     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
-     * passed to {@link LRLUCache#load}.
-     *
-     * The `start` fields are calculated relative to a portable `Date.now()`
-     * timestamp, even if `performance.now()` is available.
-     *
-     * Stale entries are always included in the `dump`, even if
-     * {@link LRUCache.OptionsBase.allowStale} is false.
-     *
-     * Note: this returns an actual array, not a generator, so it can be more
-     * easily passed around.
-     */
-    dump() {
-        const arr = [];
-        for (const i of this.#indexes({ allowStale: true })) {
-            const key = this.#keyList[i];
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined || key === undefined)
-                continue;
-            const entry = { value };
-            if (this.#ttls && this.#starts) {
-                entry.ttl = this.#ttls[i];
-                // always dump the start relative to a portable timestamp
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = perf.now() - this.#starts[i];
-                entry.start = Math.floor(Date.now() - age);
-            }
-            if (this.#sizes) {
-                entry.size = this.#sizes[i];
-            }
-            arr.unshift([key, entry]);
-        }
-        return arr;
-    }
-    /**
-     * Reset the cache and load in the items in entries in the order listed.
-     *
-     * The shape of the resulting cache may be different if the same options are
-     * not used in both caches.
-     *
-     * The `start` fields are assumed to be calculated relative to a portable
-     * `Date.now()` timestamp, even if `performance.now()` is available.
-     */
-    load(arr) {
-        this.clear();
-        for (const [key, entry] of arr) {
-            if (entry.start) {
-                // entry.start is a portable timestamp, but we may be using
-                // node's performance.now(), so calculate the offset, so that
-                // we get the intended remaining TTL, no matter how long it's
-                // been on ice.
-                //
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = Date.now() - entry.start;
-                entry.start = perf.now() - age;
-            }
-            this.set(key, entry.value, entry);
-        }
-    }
-    /**
-     * Add a value to the cache.
-     *
-     * Note: if `undefined` is specified as a value, this is an alias for
-     * {@link LRUCache#delete}
-     *
-     * Fields on the {@link LRUCache.SetOptions} options param will override
-     * their corresponding values in the constructor options for the scope
-     * of this single `set()` operation.
-     *
-     * If `start` is provided, then that will set the effective start
-     * time for the TTL calculation. Note that this must be a previous
-     * value of `performance.now()` if supported, or a previous value of
-     * `Date.now()` if not.
-     *
-     * Options object may also include `size`, which will prevent
-     * calling the `sizeCalculation` function and just use the specified
-     * number if it is a positive integer, and `noDisposeOnSet` which
-     * will prevent calling a `dispose` function in the case of
-     * overwrites.
-     *
-     * If the `size` (or return value of `sizeCalculation`) for a given
-     * entry is greater than `maxEntrySize`, then the item will not be
-     * added to the cache.
-     *
-     * Will update the recency of the entry.
-     *
-     * If the value is `undefined`, then this is an alias for
-     * `cache.delete(key)`. `undefined` is never stored in the cache.
-     */
-    set(k, v, setOptions = {}) {
-        if (v === undefined) {
-            this.delete(k);
-            return this;
-        }
-        const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
-        let { noUpdateTTL = this.noUpdateTTL } = setOptions;
-        const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
-        // if the item doesn't fit, don't do anything
-        // NB: maxEntrySize set to maxSize by default
-        if (this.maxEntrySize && size > this.maxEntrySize) {
-            if (status) {
-                status.set = 'miss';
-                status.maxEntrySizeExceeded = true;
-            }
-            // have to delete, in case something is there already.
-            this.#delete(k, 'set');
-            return this;
-        }
-        let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
-        if (index === undefined) {
-            // addition
-            index = (this.#size === 0
-                ? this.#tail
-                : this.#free.length !== 0
-                    ? this.#free.pop()
-                    : this.#size === this.#max
-                        ? this.#evict(false)
-                        : this.#size);
-            this.#keyList[index] = k;
-            this.#valList[index] = v;
-            this.#keyMap.set(k, index);
-            this.#next[this.#tail] = index;
-            this.#prev[index] = this.#tail;
-            this.#tail = index;
-            this.#size++;
-            this.#addItemSize(index, size, status);
-            if (status)
-                status.set = 'add';
-            noUpdateTTL = false;
-        }
-        else {
-            // update
-            this.#moveToTail(index);
-            const oldVal = this.#valList[index];
-            if (v !== oldVal) {
-                if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
-                    oldVal.__abortController.abort(new Error('replaced'));
-                    const { __staleWhileFetching: s } = oldVal;
-                    if (s !== undefined && !noDisposeOnSet) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(s, k, 'set');
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([s, k, 'set']);
-                        }
-                    }
-                }
-                else if (!noDisposeOnSet) {
-                    if (this.#hasDispose) {
-                        this.#dispose?.(oldVal, k, 'set');
-                    }
-                    if (this.#hasDisposeAfter) {
-                        this.#disposed?.push([oldVal, k, 'set']);
-                    }
-                }
-                this.#removeItemSize(index);
-                this.#addItemSize(index, size, status);
-                this.#valList[index] = v;
-                if (status) {
-                    status.set = 'replace';
-                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal)
-                        ? oldVal.__staleWhileFetching
-                        : oldVal;
-                    if (oldValue !== undefined)
-                        status.oldValue = oldValue;
-                }
-            }
-            else if (status) {
-                status.set = 'update';
-            }
-        }
-        if (ttl !== 0 && !this.#ttls) {
-            this.#initializeTTLTracking();
-        }
-        if (this.#ttls) {
-            if (!noUpdateTTL) {
-                this.#setItemTTL(index, ttl, start);
-            }
-            if (status)
-                this.#statusTTL(status, index);
-        }
-        if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return this;
-    }
-    /**
-     * Evict the least recently used item, returning its value or
-     * `undefined` if cache is empty.
-     */
-    pop() {
-        try {
-            while (this.#size) {
-                const val = this.#valList[this.#head];
-                this.#evict(true);
-                if (this.#isBackgroundFetch(val)) {
-                    if (val.__staleWhileFetching) {
-                        return val.__staleWhileFetching;
-                    }
-                }
-                else if (val !== undefined) {
-                    return val;
-                }
-            }
-        }
-        finally {
-            if (this.#hasDisposeAfter && this.#disposed) {
-                const dt = this.#disposed;
-                let task;
-                while ((task = dt?.shift())) {
-                    this.#disposeAfter?.(...task);
-                }
-            }
-        }
-    }
-    #evict(free) {
-        const head = this.#head;
-        const k = this.#keyList[head];
-        const v = this.#valList[head];
-        if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
-            v.__abortController.abort(new Error('evicted'));
-        }
-        else if (this.#hasDispose || this.#hasDisposeAfter) {
-            if (this.#hasDispose) {
-                this.#dispose?.(v, k, 'evict');
-            }
-            if (this.#hasDisposeAfter) {
-                this.#disposed?.push([v, k, 'evict']);
-            }
-        }
-        this.#removeItemSize(head);
-        // if we aren't about to use the index, then null these out
-        if (free) {
-            this.#keyList[head] = undefined;
-            this.#valList[head] = undefined;
-            this.#free.push(head);
-        }
-        if (this.#size === 1) {
-            this.#head = this.#tail = 0;
-            this.#free.length = 0;
-        }
-        else {
-            this.#head = this.#next[head];
-        }
-        this.#keyMap.delete(k);
-        this.#size--;
-        return head;
-    }
-    /**
-     * Check if a key is in the cache, without updating the recency of use.
-     * Will return false if the item is stale, even though it is technically
-     * in the cache.
-     *
-     * Check if a key is in the cache, without updating the recency of
-     * use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set
-     * to `true` in either the options or the constructor.
-     *
-     * Will return `false` if the item is stale, even though it is technically in
-     * the cache. The difference can be determined (if it matters) by using a
-     * `status` argument, and inspecting the `has` field.
-     *
-     * Will not update item age unless
-     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
-     */
-    has(k, hasOptions = {}) {
-        const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v) &&
-                v.__staleWhileFetching === undefined) {
-                return false;
-            }
-            if (!this.#isStale(index)) {
-                if (updateAgeOnHas) {
-                    this.#updateItemAge(index);
-                }
-                if (status) {
-                    status.has = 'hit';
-                    this.#statusTTL(status, index);
-                }
-                return true;
-            }
-            else if (status) {
-                status.has = 'stale';
-                this.#statusTTL(status, index);
-            }
-        }
-        else if (status) {
-            status.has = 'miss';
-        }
-        return false;
-    }
-    /**
-     * Like {@link LRUCache#get} but doesn't update recency or delete stale
-     * items.
-     *
-     * Returns `undefined` if the item is stale, unless
-     * {@link LRUCache.OptionsBase.allowStale} is set.
-     */
-    peek(k, peekOptions = {}) {
-        const { allowStale = this.allowStale } = peekOptions;
-        const index = this.#keyMap.get(k);
-        if (index === undefined ||
-            (!allowStale && this.#isStale(index))) {
-            return;
-        }
-        const v = this.#valList[index];
-        // either stale and allowed, or forcing a refresh of non-stale value
-        return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-    }
-    #backgroundFetch(k, index, options, context) {
-        const v = index === undefined ? undefined : this.#valList[index];
-        if (this.#isBackgroundFetch(v)) {
-            return v;
-        }
-        const ac = new AC();
-        const { signal } = options;
-        // when/if our AC signals, then stop listening to theirs.
-        signal?.addEventListener('abort', () => ac.abort(signal.reason), {
-            signal: ac.signal,
-        });
-        const fetchOpts = {
-            signal: ac.signal,
-            options,
-            context,
-        };
-        const cb = (v, updateCache = false) => {
-            const { aborted } = ac.signal;
-            const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
-            if (options.status) {
-                if (aborted && !updateCache) {
-                    options.status.fetchAborted = true;
-                    options.status.fetchError = ac.signal.reason;
-                    if (ignoreAbort)
-                        options.status.fetchAbortIgnored = true;
-                }
-                else {
-                    options.status.fetchResolved = true;
-                }
-            }
-            if (aborted && !ignoreAbort && !updateCache) {
-                return fetchFail(ac.signal.reason);
-            }
-            // either we didn't abort, and are still here, or we did, and ignored
-            const bf = p;
-            if (this.#valList[index] === p) {
-                if (v === undefined) {
-                    if (bf.__staleWhileFetching) {
-                        this.#valList[index] = bf.__staleWhileFetching;
-                    }
-                    else {
-                        this.#delete(k, 'fetch');
-                    }
-                }
-                else {
-                    if (options.status)
-                        options.status.fetchUpdated = true;
-                    this.set(k, v, fetchOpts.options);
-                }
-            }
-            return v;
-        };
-        const eb = (er) => {
-            if (options.status) {
-                options.status.fetchRejected = true;
-                options.status.fetchError = er;
-            }
-            return fetchFail(er);
-        };
-        const fetchFail = (er) => {
-            const { aborted } = ac.signal;
-            const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
-            const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
-            const noDelete = allowStale || options.noDeleteOnFetchRejection;
-            const bf = p;
-            if (this.#valList[index] === p) {
-                // if we allow stale on fetch rejections, then we need to ensure that
-                // the stale value is not removed from the cache when the fetch fails.
-                const del = !noDelete || bf.__staleWhileFetching === undefined;
-                if (del) {
-                    this.#delete(k, 'fetch');
-                }
-                else if (!allowStaleAborted) {
-                    // still replace the *promise* with the stale value,
-                    // since we are done with the promise at this point.
-                    // leave it untouched if we're still waiting for an
-                    // aborted background fetch that hasn't yet returned.
-                    this.#valList[index] = bf.__staleWhileFetching;
-                }
-            }
-            if (allowStale) {
-                if (options.status && bf.__staleWhileFetching !== undefined) {
-                    options.status.returnedStale = true;
-                }
-                return bf.__staleWhileFetching;
-            }
-            else if (bf.__returned === bf) {
-                throw er;
-            }
-        };
-        const pcall = (res, rej) => {
-            const fmp = this.#fetchMethod?.(k, v, fetchOpts);
-            if (fmp && fmp instanceof Promise) {
-                fmp.then(v => res(v === undefined ? undefined : v), rej);
-            }
-            // ignored, we go until we finish, regardless.
-            // defer check until we are actually aborting,
-            // so fetchMethod can override.
-            ac.signal.addEventListener('abort', () => {
-                if (!options.ignoreFetchAbort ||
-                    options.allowStaleOnFetchAbort) {
-                    res(undefined);
-                    // when it eventually resolves, update the cache.
-                    if (options.allowStaleOnFetchAbort) {
-                        res = v => cb(v, true);
-                    }
-                }
-            });
-        };
-        if (options.status)
-            options.status.fetchDispatched = true;
-        const p = new Promise(pcall).then(cb, eb);
-        const bf = Object.assign(p, {
-            __abortController: ac,
-            __staleWhileFetching: v,
-            __returned: undefined,
-        });
-        if (index === undefined) {
-            // internal, don't expose status.
-            this.set(k, bf, { ...fetchOpts.options, status: undefined });
-            index = this.#keyMap.get(k);
-        }
-        else {
-            this.#valList[index] = bf;
-        }
-        return bf;
-    }
-    #isBackgroundFetch(p) {
-        if (!this.#hasFetchMethod)
-            return false;
-        const b = p;
-        return (!!b &&
-            b instanceof Promise &&
-            b.hasOwnProperty('__staleWhileFetching') &&
-            b.__abortController instanceof AC);
-    }
-    async fetch(k, fetchOptions = {}) {
-        const { 
-        // get options
-        allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, 
-        // set options
-        ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, 
-        // fetch exclusive options
-        noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
-        if (!this.#hasFetchMethod) {
-            if (status)
-                status.fetch = 'get';
-            return this.get(k, {
-                allowStale,
-                updateAgeOnGet,
-                noDeleteOnStaleGet,
-                status,
-            });
-        }
-        const options = {
-            allowStale,
-            updateAgeOnGet,
-            noDeleteOnStaleGet,
-            ttl,
-            noDisposeOnSet,
-            size,
-            sizeCalculation,
-            noUpdateTTL,
-            noDeleteOnFetchRejection,
-            allowStaleOnFetchRejection,
-            allowStaleOnFetchAbort,
-            ignoreFetchAbort,
-            status,
-            signal,
-        };
-        let index = this.#keyMap.get(k);
-        if (index === undefined) {
-            if (status)
-                status.fetch = 'miss';
-            const p = this.#backgroundFetch(k, index, options, context);
-            return (p.__returned = p);
-        }
-        else {
-            // in cache, maybe already fetching
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                const stale = allowStale && v.__staleWhileFetching !== undefined;
-                if (status) {
-                    status.fetch = 'inflight';
-                    if (stale)
-                        status.returnedStale = true;
-                }
-                return stale ? v.__staleWhileFetching : (v.__returned = v);
-            }
-            // if we force a refresh, that means do NOT serve the cached value,
-            // unless we are already in the process of refreshing the cache.
-            const isStale = this.#isStale(index);
-            if (!forceRefresh && !isStale) {
-                if (status)
-                    status.fetch = 'hit';
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                if (status)
-                    this.#statusTTL(status, index);
-                return v;
-            }
-            // ok, it is stale or a forced refresh, and not already fetching.
-            // refresh the cache.
-            const p = this.#backgroundFetch(k, index, options, context);
-            const hasStale = p.__staleWhileFetching !== undefined;
-            const staleVal = hasStale && allowStale;
-            if (status) {
-                status.fetch = isStale ? 'stale' : 'refresh';
-                if (staleVal && isStale)
-                    status.returnedStale = true;
-            }
-            return staleVal ? p.__staleWhileFetching : (p.__returned = p);
-        }
-    }
-    async forceFetch(k, fetchOptions = {}) {
-        const v = await this.fetch(k, fetchOptions);
-        if (v === undefined)
-            throw new Error('fetch() returned undefined');
-        return v;
-    }
-    memo(k, memoOptions = {}) {
-        const memoMethod = this.#memoMethod;
-        if (!memoMethod) {
-            throw new Error('no memoMethod provided to constructor');
-        }
-        const { context, forceRefresh, ...options } = memoOptions;
-        const v = this.get(k, options);
-        if (!forceRefresh && v !== undefined)
-            return v;
-        const vv = memoMethod(k, v, {
-            options,
-            context,
-        });
-        this.set(k, vv, options);
-        return vv;
-    }
-    /**
-     * Return a value from the cache. Will update the recency of the cache
-     * entry found.
-     *
-     * If the key is not found, get() will return `undefined`.
-     */
-    get(k, getOptions = {}) {
-        const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const value = this.#valList[index];
-            const fetching = this.#isBackgroundFetch(value);
-            if (status)
-                this.#statusTTL(status, index);
-            if (this.#isStale(index)) {
-                if (status)
-                    status.get = 'stale';
-                // delete only if not an in-flight background fetch
-                if (!fetching) {
-                    if (!noDeleteOnStaleGet) {
-                        this.#delete(k, 'expire');
-                    }
-                    if (status && allowStale)
-                        status.returnedStale = true;
-                    return allowStale ? value : undefined;
-                }
-                else {
-                    if (status &&
-                        allowStale &&
-                        value.__staleWhileFetching !== undefined) {
-                        status.returnedStale = true;
-                    }
-                    return allowStale ? value.__staleWhileFetching : undefined;
-                }
-            }
-            else {
-                if (status)
-                    status.get = 'hit';
-                // if we're currently fetching it, we don't actually have it yet
-                // it's not stale, which means this isn't a staleWhileRefetching.
-                // If it's not stale, and fetching, AND has a __staleWhileFetching
-                // value, then that means the user fetched with {forceRefresh:true},
-                // so it's safe to return that value.
-                if (fetching) {
-                    return value.__staleWhileFetching;
-                }
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                return value;
-            }
-        }
-        else if (status) {
-            status.get = 'miss';
-        }
-    }
-    #connect(p, n) {
-        this.#prev[n] = p;
-        this.#next[p] = n;
-    }
-    #moveToTail(index) {
-        // if tail already, nothing to do
-        // if head, move head to next[index]
-        // else
-        //   move next[prev[index]] to next[index] (head has no prev)
-        //   move prev[next[index]] to prev[index]
-        // prev[index] = tail
-        // next[tail] = index
-        // tail = index
-        if (index !== this.#tail) {
-            if (index === this.#head) {
-                this.#head = this.#next[index];
-            }
-            else {
-                this.#connect(this.#prev[index], this.#next[index]);
-            }
-            this.#connect(this.#tail, index);
-            this.#tail = index;
-        }
-    }
-    /**
-     * Deletes a key out of the cache.
-     *
-     * Returns true if the key was deleted, false otherwise.
-     */
-    delete(k) {
-        return this.#delete(k, 'delete');
-    }
-    #delete(k, reason) {
-        let deleted = false;
-        if (this.#size !== 0) {
-            const index = this.#keyMap.get(k);
-            if (index !== undefined) {
-                deleted = true;
-                if (this.#size === 1) {
-                    this.#clear(reason);
-                }
-                else {
-                    this.#removeItemSize(index);
-                    const v = this.#valList[index];
-                    if (this.#isBackgroundFetch(v)) {
-                        v.__abortController.abort(new Error('deleted'));
-                    }
-                    else if (this.#hasDispose || this.#hasDisposeAfter) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(v, k, reason);
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([v, k, reason]);
-                        }
-                    }
-                    this.#keyMap.delete(k);
-                    this.#keyList[index] = undefined;
-                    this.#valList[index] = undefined;
-                    if (index === this.#tail) {
-                        this.#tail = this.#prev[index];
-                    }
-                    else if (index === this.#head) {
-                        this.#head = this.#next[index];
-                    }
-                    else {
-                        const pi = this.#prev[index];
-                        this.#next[pi] = this.#next[index];
-                        const ni = this.#next[index];
-                        this.#prev[ni] = this.#prev[index];
-                    }
-                    this.#size--;
-                    this.#free.push(index);
-                }
-            }
-        }
-        if (this.#hasDisposeAfter && this.#disposed?.length) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Clear the cache entirely, throwing away all values.
-     */
-    clear() {
-        return this.#clear('delete');
-    }
-    #clear(reason) {
-        for (const index of this.#rindexes({ allowStale: true })) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                v.__abortController.abort(new Error('deleted'));
-            }
-            else {
-                const k = this.#keyList[index];
-                if (this.#hasDispose) {
-                    this.#dispose?.(v, k, reason);
-                }
-                if (this.#hasDisposeAfter) {
-                    this.#disposed?.push([v, k, reason]);
-                }
-            }
-        }
-        this.#keyMap.clear();
-        this.#valList.fill(undefined);
-        this.#keyList.fill(undefined);
-        if (this.#ttls && this.#starts) {
-            this.#ttls.fill(0);
-            this.#starts.fill(0);
-        }
-        if (this.#sizes) {
-            this.#sizes.fill(0);
-        }
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free.length = 0;
-        this.#calculatedSize = 0;
-        this.#size = 0;
-        if (this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-    }
-}
-exports.LRUCache = LRUCache;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/path-scurry/node_modules/lru-cache/dist/commonjs/index.min.js b/node_modules/path-scurry/node_modules/lru-cache/dist/commonjs/index.min.js
deleted file mode 100644
index ad643b0badc90..0000000000000
--- a/node_modules/path-scurry/node_modules/lru-cache/dist/commonjs/index.min.js
+++ /dev/null
@@ -1,2 +0,0 @@
-"use strict";var G=(l,t,e)=>{if(!t.has(l))throw TypeError("Cannot "+e)};var j=(l,t,e)=>(G(l,t,"read from private field"),e?e.call(l):t.get(l)),I=(l,t,e)=>{if(t.has(l))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(l):t.set(l,e)},x=(l,t,e,i)=>(G(l,t,"write to private field"),i?i.call(l,e):t.set(l,e),e);Object.defineProperty(exports,"__esModule",{value:!0});exports.LRUCache=void 0;var T=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,P=new Set,U=typeof process=="object"&&process?process:{},H=(l,t,e,i)=>{typeof U.emitWarning=="function"?U.emitWarning(l,t,e,i):console.error(`[${e}] ${t}: ${l}`)},D=globalThis.AbortController,N=globalThis.AbortSignal;if(typeof D>"u"){N=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},D=class{constructor(){t()}signal=new N;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let l=U.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{l&&(l=!1,H("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=l=>!P.has(l),Y=Symbol("type"),A=l=>l&&l===Math.floor(l)&&l>0&&isFinite(l),k=l=>A(l)?l<=Math.pow(2,8)?Uint8Array:l<=Math.pow(2,16)?Uint16Array:l<=Math.pow(2,32)?Uint32Array:l<=Number.MAX_SAFE_INTEGER?E:null:null,E=class extends Array{constructor(t){super(t),this.fill(0)}},v,O=class{heap;length;static create(t){let e=k(t);if(!e)return[];x(O,v,!0);let i=new O(t,e);return x(O,v,!1),i}constructor(t,e){if(!j(O,v))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},W=O;v=new WeakMap,I(W,v,!1);var C=class{#g;#f;#p;#w;#R;#W;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#S;#s;#i;#t;#l;#c;#o;#h;#_;#r;#b;#m;#u;#y;#E;#a;static unsafeExposeInternals(t){return{starts:t.#m,ttls:t.#u,sizes:t.#b,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#_,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#x(e,i,s,n),moveToTail:e=>t.#C(e),indexes:e=>t.#A(e),rindexes:e=>t.#F(e),isStale:e=>t.#d(e)}}get max(){return this.#g}get maxSize(){return this.#f}get calculatedSize(){return this.#S}get size(){return this.#n}get fetchMethod(){return this.#R}get memoMethod(){return this.#W}get dispose(){return this.#p}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,disposeAfter:b,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,memoMethod:a,noDeleteOnFetchRejection:w,noDeleteOnStaleGet:m,allowStaleOnFetchRejection:p,allowStaleOnFetchAbort:_,ignoreFetchAbort:z}=t;if(e!==0&&!A(e))throw new TypeError("max option must be a nonnegative integer");let y=e?k(e):Array;if(!y)throw new Error("invalid max value: "+e);if(this.#g=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(a!==void 0&&typeof a!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#W=a,S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#R=S,this.#E=!!S,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new y(e),this.#c=new y(e),this.#o=0,this.#h=0,this.#_=W.create(e),this.#n=0,this.#S=0,typeof g=="function"&&(this.#p=g),typeof b=="function"?(this.#w=b,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#y=!!this.#p,this.#a=!!this.#w,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!w,this.allowStaleOnFetchRejection=!!p,this.allowStaleOnFetchAbort=!!_,this.ignoreFetchAbort=!!z,this.maxEntrySize!==0){if(this.#f!==0&&!A(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!A(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#P()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!m,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=A(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!A(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#U()}if(this.#g===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#g&&!this.#f){let R="LRU_CACHE_UNBOUNDED";V(R)&&(P.add(R),H("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",R,C))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#U(){let t=new E(this.#g),e=new E(this.#g);this.#u=t,this.#m=e,this.#M=(n,h,o=T.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#d(n)&&this.#T(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#v=n=>{e[n]=t[n]!==0?T.now():0},this.#O=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=T.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#d=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#v=()=>{};#O=()=>{};#M=()=>{};#d=()=>!1;#P(){let t=new E(this.#g);this.#S=0,this.#b=t,this.#z=e=>{this.#S-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!A(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!A(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#D=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#S>n;)this.#L(!0)}this.#S+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#S)}}#z=t=>{};#D=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#A({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#j(e)||((t||!this.#d(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#j(e)||((t||!this.#d(e))&&(yield e),e===this.#h));)e=this.#l[e]}#j(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#F({allowStale:!0}))this.#d(e)&&(this.#T(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#u&&this.#m){let h=this.#u[e],o=this.#m[e];if(h&&o){let r=h-(T.now()-o);n.ttl=r,n.start=Date.now()}}return this.#b&&(n.size=this.#b[e]),n}dump(){let t=[];for(let e of this.#A({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#u&&this.#m){h.ttl=this.#u[e];let o=T.now()-this.#m[e];h.start=Math.floor(Date.now()-o)}this.#b&&(h.size=this.#b[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=T.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,b=this.#G(t,e,i.size||0,o);if(this.maxEntrySize&&b>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#T(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#_.length!==0?this.#_.pop():this.#n===this.#g?this.#L(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#n++,this.#D(f,b,r),r&&(r.set="add"),g=!1;else{this.#C(f);let u=this.#t[f];if(e!==u){if(this.#E&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#y&&this.#p?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#y&&this.#p?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#z(f),this.#D(f,b,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#u&&this.#U(),this.#u&&(g||this.#M(f,s,n),r&&this.#O(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#w?.(...c)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#L(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#L(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#E&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#y||this.#a)&&(this.#y&&this.#p?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#z(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#_.push(e)),this.#n===1?(this.#o=this.#h=0,this.#_.length=0):this.#o=this.#l[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#d(n))s&&(s.has="stale",this.#O(s,n));else return i&&this.#v(n),s&&(s.has="hit",this.#O(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#d(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#x(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new D,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:a}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(a&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),a&&!w&&!S)return f(h.signal.reason);let m=c;return this.#t[e]===c&&(d===void 0?m.__staleWhileFetching?this.#t[e]=m.__staleWhileFetching:this.#T(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},b=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,a=S&&i.allowStaleOnFetchAbort,w=a||i.allowStaleOnFetchRejection,m=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!m||p.__staleWhileFetching===void 0?this.#T(t,"fetch"):a||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let a=this.#R?.(t,n,r);a&&a instanceof Promise&&a.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,b),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=F,F}#e(t){if(!this.#E)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof D}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:b=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:a,signal:w}=e;if(!this.#E)return a&&(a.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:a});let m={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:b,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:a,signal:w},p=this.#s.get(t);if(p===void 0){a&&(a.fetch="miss");let _=this.#x(t,p,m,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let M=i&&_.__staleWhileFetching!==void 0;return a&&(a.fetch="inflight",M&&(a.returnedStale=!0)),M?_.__staleWhileFetching:_.__returned=_}let z=this.#d(p);if(!S&&!z)return a&&(a.fetch="hit"),this.#C(p),s&&this.#v(p),a&&this.#O(a,p),_;let y=this.#x(t,p,m,d),L=y.__staleWhileFetching!==void 0&&i;return a&&(a.fetch=z?"stale":"refresh",L&&z&&(a.returnedStale=!0)),L?y.__staleWhileFetching:y.__returned=y}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#W;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#O(h,o),this.#d(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#T(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#C(o),s&&this.#v(o),r))}else h&&(h.get="miss")}#I(t,e){this.#c[e]=t,this.#l[t]=e}#C(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#I(this.#c[t],this.#l[t]),this.#I(this.#h,t),this.#h=t)}delete(t){return this.#T(t,"delete")}#T(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#N(e);else{this.#z(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#y||this.#a)&&(this.#y&&this.#p?.(n,t,e),this.#a&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#c[s];else if(s===this.#o)this.#o=this.#l[s];else{let h=this.#c[s];this.#l[h]=this.#l[s];let o=this.#l[s];this.#c[o]=this.#c[s]}this.#n--,this.#_.push(s)}}if(this.#a&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#N("delete")}#N(t){for(let e of this.#F({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#y&&this.#p?.(i,s,t),this.#a&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#u&&this.#m&&(this.#u.fill(0),this.#m.fill(0)),this.#b&&this.#b.fill(0),this.#o=0,this.#h=0,this.#_.length=0,this.#S=0,this.#n=0,this.#a&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};exports.LRUCache=C;
-//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/path-scurry/node_modules/lru-cache/dist/commonjs/package.json b/node_modules/path-scurry/node_modules/lru-cache/dist/commonjs/package.json
deleted file mode 100644
index 5bbefffbabee3..0000000000000
--- a/node_modules/path-scurry/node_modules/lru-cache/dist/commonjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "commonjs"
-}
diff --git a/node_modules/path-scurry/node_modules/lru-cache/dist/esm/index.js b/node_modules/path-scurry/node_modules/lru-cache/dist/esm/index.js
deleted file mode 100644
index 555654a57c4d7..0000000000000
--- a/node_modules/path-scurry/node_modules/lru-cache/dist/esm/index.js
+++ /dev/null
@@ -1,1542 +0,0 @@
-/**
- * @module LRUCache
- */
-const perf = typeof performance === 'object' &&
-    performance &&
-    typeof performance.now === 'function'
-    ? performance
-    : Date;
-const warned = new Set();
-/* c8 ignore start */
-const PROCESS = (typeof process === 'object' && !!process ? process : {});
-/* c8 ignore start */
-const emitWarning = (msg, type, code, fn) => {
-    typeof PROCESS.emitWarning === 'function'
-        ? PROCESS.emitWarning(msg, type, code, fn)
-        : console.error(`[${code}] ${type}: ${msg}`);
-};
-let AC = globalThis.AbortController;
-let AS = globalThis.AbortSignal;
-/* c8 ignore start */
-if (typeof AC === 'undefined') {
-    //@ts-ignore
-    AS = class AbortSignal {
-        onabort;
-        _onabort = [];
-        reason;
-        aborted = false;
-        addEventListener(_, fn) {
-            this._onabort.push(fn);
-        }
-    };
-    //@ts-ignore
-    AC = class AbortController {
-        constructor() {
-            warnACPolyfill();
-        }
-        signal = new AS();
-        abort(reason) {
-            if (this.signal.aborted)
-                return;
-            //@ts-ignore
-            this.signal.reason = reason;
-            //@ts-ignore
-            this.signal.aborted = true;
-            //@ts-ignore
-            for (const fn of this.signal._onabort) {
-                fn(reason);
-            }
-            this.signal.onabort?.(reason);
-        }
-    };
-    let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
-    const warnACPolyfill = () => {
-        if (!printACPolyfillWarning)
-            return;
-        printACPolyfillWarning = false;
-        emitWarning('AbortController is not defined. If using lru-cache in ' +
-            'node 14, load an AbortController polyfill from the ' +
-            '`node-abort-controller` package. A minimal polyfill is ' +
-            'provided for use by LRUCache.fetch(), but it should not be ' +
-            'relied upon in other contexts (eg, passing it to other APIs that ' +
-            'use AbortController/AbortSignal might have undesirable effects). ' +
-            'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
-    };
-}
-/* c8 ignore stop */
-const shouldWarn = (code) => !warned.has(code);
-const TYPE = Symbol('type');
-const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
-/* c8 ignore start */
-// This is a little bit ridiculous, tbh.
-// The maximum array length is 2^32-1 or thereabouts on most JS impls.
-// And well before that point, you're caching the entire world, I mean,
-// that's ~32GB of just integers for the next/prev links, plus whatever
-// else to hold that many keys and values.  Just filling the memory with
-// zeroes at init time is brutal when you get that big.
-// But why not be complete?
-// Maybe in the future, these limits will have expanded.
-const getUintArray = (max) => !isPosInt(max)
-    ? null
-    : max <= Math.pow(2, 8)
-        ? Uint8Array
-        : max <= Math.pow(2, 16)
-            ? Uint16Array
-            : max <= Math.pow(2, 32)
-                ? Uint32Array
-                : max <= Number.MAX_SAFE_INTEGER
-                    ? ZeroArray
-                    : null;
-/* c8 ignore stop */
-class ZeroArray extends Array {
-    constructor(size) {
-        super(size);
-        this.fill(0);
-    }
-}
-class Stack {
-    heap;
-    length;
-    // private constructor
-    static #constructing = false;
-    static create(max) {
-        const HeapCls = getUintArray(max);
-        if (!HeapCls)
-            return [];
-        Stack.#constructing = true;
-        const s = new Stack(max, HeapCls);
-        Stack.#constructing = false;
-        return s;
-    }
-    constructor(max, HeapCls) {
-        /* c8 ignore start */
-        if (!Stack.#constructing) {
-            throw new TypeError('instantiate Stack using Stack.create(n)');
-        }
-        /* c8 ignore stop */
-        this.heap = new HeapCls(max);
-        this.length = 0;
-    }
-    push(n) {
-        this.heap[this.length++] = n;
-    }
-    pop() {
-        return this.heap[--this.length];
-    }
-}
-/**
- * Default export, the thing you're using this module to get.
- *
- * The `K` and `V` types define the key and value types, respectively. The
- * optional `FC` type defines the type of the `context` object passed to
- * `cache.fetch()` and `cache.memo()`.
- *
- * Keys and values **must not** be `null` or `undefined`.
- *
- * All properties from the options object (with the exception of `max`,
- * `maxSize`, `fetchMethod`, `memoMethod`, `dispose` and `disposeAfter`) are
- * added as normal public members. (The listed options are read-only getters.)
- *
- * Changing any of these will alter the defaults for subsequent method calls.
- */
-export class LRUCache {
-    // options that cannot be changed without disaster
-    #max;
-    #maxSize;
-    #dispose;
-    #disposeAfter;
-    #fetchMethod;
-    #memoMethod;
-    /**
-     * {@link LRUCache.OptionsBase.ttl}
-     */
-    ttl;
-    /**
-     * {@link LRUCache.OptionsBase.ttlResolution}
-     */
-    ttlResolution;
-    /**
-     * {@link LRUCache.OptionsBase.ttlAutopurge}
-     */
-    ttlAutopurge;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnGet}
-     */
-    updateAgeOnGet;
-    /**
-     * {@link LRUCache.OptionsBase.updateAgeOnHas}
-     */
-    updateAgeOnHas;
-    /**
-     * {@link LRUCache.OptionsBase.allowStale}
-     */
-    allowStale;
-    /**
-     * {@link LRUCache.OptionsBase.noDisposeOnSet}
-     */
-    noDisposeOnSet;
-    /**
-     * {@link LRUCache.OptionsBase.noUpdateTTL}
-     */
-    noUpdateTTL;
-    /**
-     * {@link LRUCache.OptionsBase.maxEntrySize}
-     */
-    maxEntrySize;
-    /**
-     * {@link LRUCache.OptionsBase.sizeCalculation}
-     */
-    sizeCalculation;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
-     */
-    noDeleteOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
-     */
-    noDeleteOnStaleGet;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
-     */
-    allowStaleOnFetchAbort;
-    /**
-     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
-     */
-    allowStaleOnFetchRejection;
-    /**
-     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
-     */
-    ignoreFetchAbort;
-    // computed properties
-    #size;
-    #calculatedSize;
-    #keyMap;
-    #keyList;
-    #valList;
-    #next;
-    #prev;
-    #head;
-    #tail;
-    #free;
-    #disposed;
-    #sizes;
-    #starts;
-    #ttls;
-    #hasDispose;
-    #hasFetchMethod;
-    #hasDisposeAfter;
-    /**
-     * Do not call this method unless you need to inspect the
-     * inner workings of the cache.  If anything returned by this
-     * object is modified in any way, strange breakage may occur.
-     *
-     * These fields are private for a reason!
-     *
-     * @internal
-     */
-    static unsafeExposeInternals(c) {
-        return {
-            // properties
-            starts: c.#starts,
-            ttls: c.#ttls,
-            sizes: c.#sizes,
-            keyMap: c.#keyMap,
-            keyList: c.#keyList,
-            valList: c.#valList,
-            next: c.#next,
-            prev: c.#prev,
-            get head() {
-                return c.#head;
-            },
-            get tail() {
-                return c.#tail;
-            },
-            free: c.#free,
-            // methods
-            isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
-            backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
-            moveToTail: (index) => c.#moveToTail(index),
-            indexes: (options) => c.#indexes(options),
-            rindexes: (options) => c.#rindexes(options),
-            isStale: (index) => c.#isStale(index),
-        };
-    }
-    // Protected read-only members
-    /**
-     * {@link LRUCache.OptionsBase.max} (read-only)
-     */
-    get max() {
-        return this.#max;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.maxSize} (read-only)
-     */
-    get maxSize() {
-        return this.#maxSize;
-    }
-    /**
-     * The total computed size of items in the cache (read-only)
-     */
-    get calculatedSize() {
-        return this.#calculatedSize;
-    }
-    /**
-     * The number of items stored in the cache (read-only)
-     */
-    get size() {
-        return this.#size;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
-     */
-    get fetchMethod() {
-        return this.#fetchMethod;
-    }
-    get memoMethod() {
-        return this.#memoMethod;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.dispose} (read-only)
-     */
-    get dispose() {
-        return this.#dispose;
-    }
-    /**
-     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
-     */
-    get disposeAfter() {
-        return this.#disposeAfter;
-    }
-    constructor(options) {
-        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, memoMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options;
-        if (max !== 0 && !isPosInt(max)) {
-            throw new TypeError('max option must be a nonnegative integer');
-        }
-        const UintArray = max ? getUintArray(max) : Array;
-        if (!UintArray) {
-            throw new Error('invalid max value: ' + max);
-        }
-        this.#max = max;
-        this.#maxSize = maxSize;
-        this.maxEntrySize = maxEntrySize || this.#maxSize;
-        this.sizeCalculation = sizeCalculation;
-        if (this.sizeCalculation) {
-            if (!this.#maxSize && !this.maxEntrySize) {
-                throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
-            }
-            if (typeof this.sizeCalculation !== 'function') {
-                throw new TypeError('sizeCalculation set to non-function');
-            }
-        }
-        if (memoMethod !== undefined &&
-            typeof memoMethod !== 'function') {
-            throw new TypeError('memoMethod must be a function if defined');
-        }
-        this.#memoMethod = memoMethod;
-        if (fetchMethod !== undefined &&
-            typeof fetchMethod !== 'function') {
-            throw new TypeError('fetchMethod must be a function if specified');
-        }
-        this.#fetchMethod = fetchMethod;
-        this.#hasFetchMethod = !!fetchMethod;
-        this.#keyMap = new Map();
-        this.#keyList = new Array(max).fill(undefined);
-        this.#valList = new Array(max).fill(undefined);
-        this.#next = new UintArray(max);
-        this.#prev = new UintArray(max);
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free = Stack.create(max);
-        this.#size = 0;
-        this.#calculatedSize = 0;
-        if (typeof dispose === 'function') {
-            this.#dispose = dispose;
-        }
-        if (typeof disposeAfter === 'function') {
-            this.#disposeAfter = disposeAfter;
-            this.#disposed = [];
-        }
-        else {
-            this.#disposeAfter = undefined;
-            this.#disposed = undefined;
-        }
-        this.#hasDispose = !!this.#dispose;
-        this.#hasDisposeAfter = !!this.#disposeAfter;
-        this.noDisposeOnSet = !!noDisposeOnSet;
-        this.noUpdateTTL = !!noUpdateTTL;
-        this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
-        this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
-        this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
-        this.ignoreFetchAbort = !!ignoreFetchAbort;
-        // NB: maxEntrySize is set to maxSize if it's set
-        if (this.maxEntrySize !== 0) {
-            if (this.#maxSize !== 0) {
-                if (!isPosInt(this.#maxSize)) {
-                    throw new TypeError('maxSize must be a positive integer if specified');
-                }
-            }
-            if (!isPosInt(this.maxEntrySize)) {
-                throw new TypeError('maxEntrySize must be a positive integer if specified');
-            }
-            this.#initializeSizeTracking();
-        }
-        this.allowStale = !!allowStale;
-        this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
-        this.updateAgeOnGet = !!updateAgeOnGet;
-        this.updateAgeOnHas = !!updateAgeOnHas;
-        this.ttlResolution =
-            isPosInt(ttlResolution) || ttlResolution === 0
-                ? ttlResolution
-                : 1;
-        this.ttlAutopurge = !!ttlAutopurge;
-        this.ttl = ttl || 0;
-        if (this.ttl) {
-            if (!isPosInt(this.ttl)) {
-                throw new TypeError('ttl must be a positive integer if specified');
-            }
-            this.#initializeTTLTracking();
-        }
-        // do not allow completely unbounded caches
-        if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
-            throw new TypeError('At least one of max, maxSize, or ttl is required');
-        }
-        if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
-            const code = 'LRU_CACHE_UNBOUNDED';
-            if (shouldWarn(code)) {
-                warned.add(code);
-                const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
-                    'result in unbounded memory consumption.';
-                emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
-            }
-        }
-    }
-    /**
-     * Return the number of ms left in the item's TTL. If item is not in cache,
-     * returns `0`. Returns `Infinity` if item is in cache without a defined TTL.
-     */
-    getRemainingTTL(key) {
-        return this.#keyMap.has(key) ? Infinity : 0;
-    }
-    #initializeTTLTracking() {
-        const ttls = new ZeroArray(this.#max);
-        const starts = new ZeroArray(this.#max);
-        this.#ttls = ttls;
-        this.#starts = starts;
-        this.#setItemTTL = (index, ttl, start = perf.now()) => {
-            starts[index] = ttl !== 0 ? start : 0;
-            ttls[index] = ttl;
-            if (ttl !== 0 && this.ttlAutopurge) {
-                const t = setTimeout(() => {
-                    if (this.#isStale(index)) {
-                        this.#delete(this.#keyList[index], 'expire');
-                    }
-                }, ttl + 1);
-                // unref() not supported on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-        };
-        this.#updateItemAge = index => {
-            starts[index] = ttls[index] !== 0 ? perf.now() : 0;
-        };
-        this.#statusTTL = (status, index) => {
-            if (ttls[index]) {
-                const ttl = ttls[index];
-                const start = starts[index];
-                /* c8 ignore next */
-                if (!ttl || !start)
-                    return;
-                status.ttl = ttl;
-                status.start = start;
-                status.now = cachedNow || getNow();
-                const age = status.now - start;
-                status.remainingTTL = ttl - age;
-            }
-        };
-        // debounce calls to perf.now() to 1s so we're not hitting
-        // that costly call repeatedly.
-        let cachedNow = 0;
-        const getNow = () => {
-            const n = perf.now();
-            if (this.ttlResolution > 0) {
-                cachedNow = n;
-                const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
-                // not available on all platforms
-                /* c8 ignore start */
-                if (t.unref) {
-                    t.unref();
-                }
-                /* c8 ignore stop */
-            }
-            return n;
-        };
-        this.getRemainingTTL = key => {
-            const index = this.#keyMap.get(key);
-            if (index === undefined) {
-                return 0;
-            }
-            const ttl = ttls[index];
-            const start = starts[index];
-            if (!ttl || !start) {
-                return Infinity;
-            }
-            const age = (cachedNow || getNow()) - start;
-            return ttl - age;
-        };
-        this.#isStale = index => {
-            const s = starts[index];
-            const t = ttls[index];
-            return !!t && !!s && (cachedNow || getNow()) - s > t;
-        };
-    }
-    // conditionally set private methods related to TTL
-    #updateItemAge = () => { };
-    #statusTTL = () => { };
-    #setItemTTL = () => { };
-    /* c8 ignore stop */
-    #isStale = () => false;
-    #initializeSizeTracking() {
-        const sizes = new ZeroArray(this.#max);
-        this.#calculatedSize = 0;
-        this.#sizes = sizes;
-        this.#removeItemSize = index => {
-            this.#calculatedSize -= sizes[index];
-            sizes[index] = 0;
-        };
-        this.#requireSize = (k, v, size, sizeCalculation) => {
-            // provisionally accept background fetches.
-            // actual value size will be checked when they return.
-            if (this.#isBackgroundFetch(v)) {
-                return 0;
-            }
-            if (!isPosInt(size)) {
-                if (sizeCalculation) {
-                    if (typeof sizeCalculation !== 'function') {
-                        throw new TypeError('sizeCalculation must be a function');
-                    }
-                    size = sizeCalculation(v, k);
-                    if (!isPosInt(size)) {
-                        throw new TypeError('sizeCalculation return invalid (expect positive integer)');
-                    }
-                }
-                else {
-                    throw new TypeError('invalid size value (must be positive integer). ' +
-                        'When maxSize or maxEntrySize is used, sizeCalculation ' +
-                        'or size must be set.');
-                }
-            }
-            return size;
-        };
-        this.#addItemSize = (index, size, status) => {
-            sizes[index] = size;
-            if (this.#maxSize) {
-                const maxSize = this.#maxSize - sizes[index];
-                while (this.#calculatedSize > maxSize) {
-                    this.#evict(true);
-                }
-            }
-            this.#calculatedSize += sizes[index];
-            if (status) {
-                status.entrySize = size;
-                status.totalCalculatedSize = this.#calculatedSize;
-            }
-        };
-    }
-    #removeItemSize = _i => { };
-    #addItemSize = (_i, _s, _st) => { };
-    #requireSize = (_k, _v, size, sizeCalculation) => {
-        if (size || sizeCalculation) {
-            throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
-        }
-        return 0;
-    };
-    *#indexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#tail; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#head) {
-                    break;
-                }
-                else {
-                    i = this.#prev[i];
-                }
-            }
-        }
-    }
-    *#rindexes({ allowStale = this.allowStale } = {}) {
-        if (this.#size) {
-            for (let i = this.#head; true;) {
-                if (!this.#isValidIndex(i)) {
-                    break;
-                }
-                if (allowStale || !this.#isStale(i)) {
-                    yield i;
-                }
-                if (i === this.#tail) {
-                    break;
-                }
-                else {
-                    i = this.#next[i];
-                }
-            }
-        }
-    }
-    #isValidIndex(index) {
-        return (index !== undefined &&
-            this.#keyMap.get(this.#keyList[index]) === index);
-    }
-    /**
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from most recently used to least recently used.
-     */
-    *entries() {
-        for (const i of this.#indexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.entries}
-     *
-     * Return a generator yielding `[key, value]` pairs,
-     * in order from least recently used to most recently used.
-     */
-    *rentries() {
-        for (const i of this.#rindexes()) {
-            if (this.#valList[i] !== undefined &&
-                this.#keyList[i] !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield [this.#keyList[i], this.#valList[i]];
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the keys in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *keys() {
-        for (const i of this.#indexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.keys}
-     *
-     * Return a generator yielding the keys in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rkeys() {
-        for (const i of this.#rindexes()) {
-            const k = this.#keyList[i];
-            if (k !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield k;
-            }
-        }
-    }
-    /**
-     * Return a generator yielding the values in the cache,
-     * in order from most recently used to least recently used.
-     */
-    *values() {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Inverse order version of {@link LRUCache.values}
-     *
-     * Return a generator yielding the values in the cache,
-     * in order from least recently used to most recently used.
-     */
-    *rvalues() {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            if (v !== undefined &&
-                !this.#isBackgroundFetch(this.#valList[i])) {
-                yield this.#valList[i];
-            }
-        }
-    }
-    /**
-     * Iterating over the cache itself yields the same results as
-     * {@link LRUCache.entries}
-     */
-    [Symbol.iterator]() {
-        return this.entries();
-    }
-    /**
-     * A String value that is used in the creation of the default string
-     * description of an object. Called by the built-in method
-     * `Object.prototype.toString`.
-     */
-    [Symbol.toStringTag] = 'LRUCache';
-    /**
-     * Find a value for which the supplied fn method returns a truthy value,
-     * similar to `Array.find()`. fn is called as `fn(value, key, cache)`.
-     */
-    find(fn, getOptions = {}) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined)
-                continue;
-            if (fn(value, this.#keyList[i], this)) {
-                return this.get(this.#keyList[i], getOptions);
-            }
-        }
-    }
-    /**
-     * Call the supplied function on each item in the cache, in order from most
-     * recently used to least recently used.
-     *
-     * `fn` is called as `fn(value, key, cache)`.
-     *
-     * If `thisp` is provided, function will be called in the `this`-context of
-     * the provided object, or the cache if no `thisp` object is provided.
-     *
-     * Does not update age or recenty of use, or iterate over stale values.
-     */
-    forEach(fn, thisp = this) {
-        for (const i of this.#indexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * The same as {@link LRUCache.forEach} but items are iterated over in
-     * reverse order.  (ie, less recently used items are iterated over first.)
-     */
-    rforEach(fn, thisp = this) {
-        for (const i of this.#rindexes()) {
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined)
-                continue;
-            fn.call(thisp, value, this.#keyList[i], this);
-        }
-    }
-    /**
-     * Delete any stale entries. Returns true if anything was removed,
-     * false otherwise.
-     */
-    purgeStale() {
-        let deleted = false;
-        for (const i of this.#rindexes({ allowStale: true })) {
-            if (this.#isStale(i)) {
-                this.#delete(this.#keyList[i], 'expire');
-                deleted = true;
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Get the extended info about a given entry, to get its value, size, and
-     * TTL info simultaneously. Returns `undefined` if the key is not present.
-     *
-     * Unlike {@link LRUCache#dump}, which is designed to be portable and survive
-     * serialization, the `start` value is always the current timestamp, and the
-     * `ttl` is a calculated remaining time to live (negative if expired).
-     *
-     * Always returns stale values, if their info is found in the cache, so be
-     * sure to check for expirations (ie, a negative {@link LRUCache.Entry#ttl})
-     * if relevant.
-     */
-    info(key) {
-        const i = this.#keyMap.get(key);
-        if (i === undefined)
-            return undefined;
-        const v = this.#valList[i];
-        const value = this.#isBackgroundFetch(v)
-            ? v.__staleWhileFetching
-            : v;
-        if (value === undefined)
-            return undefined;
-        const entry = { value };
-        if (this.#ttls && this.#starts) {
-            const ttl = this.#ttls[i];
-            const start = this.#starts[i];
-            if (ttl && start) {
-                const remain = ttl - (perf.now() - start);
-                entry.ttl = remain;
-                entry.start = Date.now();
-            }
-        }
-        if (this.#sizes) {
-            entry.size = this.#sizes[i];
-        }
-        return entry;
-    }
-    /**
-     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
-     * passed to {@link LRLUCache#load}.
-     *
-     * The `start` fields are calculated relative to a portable `Date.now()`
-     * timestamp, even if `performance.now()` is available.
-     *
-     * Stale entries are always included in the `dump`, even if
-     * {@link LRUCache.OptionsBase.allowStale} is false.
-     *
-     * Note: this returns an actual array, not a generator, so it can be more
-     * easily passed around.
-     */
-    dump() {
-        const arr = [];
-        for (const i of this.#indexes({ allowStale: true })) {
-            const key = this.#keyList[i];
-            const v = this.#valList[i];
-            const value = this.#isBackgroundFetch(v)
-                ? v.__staleWhileFetching
-                : v;
-            if (value === undefined || key === undefined)
-                continue;
-            const entry = { value };
-            if (this.#ttls && this.#starts) {
-                entry.ttl = this.#ttls[i];
-                // always dump the start relative to a portable timestamp
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = perf.now() - this.#starts[i];
-                entry.start = Math.floor(Date.now() - age);
-            }
-            if (this.#sizes) {
-                entry.size = this.#sizes[i];
-            }
-            arr.unshift([key, entry]);
-        }
-        return arr;
-    }
-    /**
-     * Reset the cache and load in the items in entries in the order listed.
-     *
-     * The shape of the resulting cache may be different if the same options are
-     * not used in both caches.
-     *
-     * The `start` fields are assumed to be calculated relative to a portable
-     * `Date.now()` timestamp, even if `performance.now()` is available.
-     */
-    load(arr) {
-        this.clear();
-        for (const [key, entry] of arr) {
-            if (entry.start) {
-                // entry.start is a portable timestamp, but we may be using
-                // node's performance.now(), so calculate the offset, so that
-                // we get the intended remaining TTL, no matter how long it's
-                // been on ice.
-                //
-                // it's ok for this to be a bit slow, it's a rare operation.
-                const age = Date.now() - entry.start;
-                entry.start = perf.now() - age;
-            }
-            this.set(key, entry.value, entry);
-        }
-    }
-    /**
-     * Add a value to the cache.
-     *
-     * Note: if `undefined` is specified as a value, this is an alias for
-     * {@link LRUCache#delete}
-     *
-     * Fields on the {@link LRUCache.SetOptions} options param will override
-     * their corresponding values in the constructor options for the scope
-     * of this single `set()` operation.
-     *
-     * If `start` is provided, then that will set the effective start
-     * time for the TTL calculation. Note that this must be a previous
-     * value of `performance.now()` if supported, or a previous value of
-     * `Date.now()` if not.
-     *
-     * Options object may also include `size`, which will prevent
-     * calling the `sizeCalculation` function and just use the specified
-     * number if it is a positive integer, and `noDisposeOnSet` which
-     * will prevent calling a `dispose` function in the case of
-     * overwrites.
-     *
-     * If the `size` (or return value of `sizeCalculation`) for a given
-     * entry is greater than `maxEntrySize`, then the item will not be
-     * added to the cache.
-     *
-     * Will update the recency of the entry.
-     *
-     * If the value is `undefined`, then this is an alias for
-     * `cache.delete(key)`. `undefined` is never stored in the cache.
-     */
-    set(k, v, setOptions = {}) {
-        if (v === undefined) {
-            this.delete(k);
-            return this;
-        }
-        const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
-        let { noUpdateTTL = this.noUpdateTTL } = setOptions;
-        const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
-        // if the item doesn't fit, don't do anything
-        // NB: maxEntrySize set to maxSize by default
-        if (this.maxEntrySize && size > this.maxEntrySize) {
-            if (status) {
-                status.set = 'miss';
-                status.maxEntrySizeExceeded = true;
-            }
-            // have to delete, in case something is there already.
-            this.#delete(k, 'set');
-            return this;
-        }
-        let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
-        if (index === undefined) {
-            // addition
-            index = (this.#size === 0
-                ? this.#tail
-                : this.#free.length !== 0
-                    ? this.#free.pop()
-                    : this.#size === this.#max
-                        ? this.#evict(false)
-                        : this.#size);
-            this.#keyList[index] = k;
-            this.#valList[index] = v;
-            this.#keyMap.set(k, index);
-            this.#next[this.#tail] = index;
-            this.#prev[index] = this.#tail;
-            this.#tail = index;
-            this.#size++;
-            this.#addItemSize(index, size, status);
-            if (status)
-                status.set = 'add';
-            noUpdateTTL = false;
-        }
-        else {
-            // update
-            this.#moveToTail(index);
-            const oldVal = this.#valList[index];
-            if (v !== oldVal) {
-                if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
-                    oldVal.__abortController.abort(new Error('replaced'));
-                    const { __staleWhileFetching: s } = oldVal;
-                    if (s !== undefined && !noDisposeOnSet) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(s, k, 'set');
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([s, k, 'set']);
-                        }
-                    }
-                }
-                else if (!noDisposeOnSet) {
-                    if (this.#hasDispose) {
-                        this.#dispose?.(oldVal, k, 'set');
-                    }
-                    if (this.#hasDisposeAfter) {
-                        this.#disposed?.push([oldVal, k, 'set']);
-                    }
-                }
-                this.#removeItemSize(index);
-                this.#addItemSize(index, size, status);
-                this.#valList[index] = v;
-                if (status) {
-                    status.set = 'replace';
-                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal)
-                        ? oldVal.__staleWhileFetching
-                        : oldVal;
-                    if (oldValue !== undefined)
-                        status.oldValue = oldValue;
-                }
-            }
-            else if (status) {
-                status.set = 'update';
-            }
-        }
-        if (ttl !== 0 && !this.#ttls) {
-            this.#initializeTTLTracking();
-        }
-        if (this.#ttls) {
-            if (!noUpdateTTL) {
-                this.#setItemTTL(index, ttl, start);
-            }
-            if (status)
-                this.#statusTTL(status, index);
-        }
-        if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return this;
-    }
-    /**
-     * Evict the least recently used item, returning its value or
-     * `undefined` if cache is empty.
-     */
-    pop() {
-        try {
-            while (this.#size) {
-                const val = this.#valList[this.#head];
-                this.#evict(true);
-                if (this.#isBackgroundFetch(val)) {
-                    if (val.__staleWhileFetching) {
-                        return val.__staleWhileFetching;
-                    }
-                }
-                else if (val !== undefined) {
-                    return val;
-                }
-            }
-        }
-        finally {
-            if (this.#hasDisposeAfter && this.#disposed) {
-                const dt = this.#disposed;
-                let task;
-                while ((task = dt?.shift())) {
-                    this.#disposeAfter?.(...task);
-                }
-            }
-        }
-    }
-    #evict(free) {
-        const head = this.#head;
-        const k = this.#keyList[head];
-        const v = this.#valList[head];
-        if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
-            v.__abortController.abort(new Error('evicted'));
-        }
-        else if (this.#hasDispose || this.#hasDisposeAfter) {
-            if (this.#hasDispose) {
-                this.#dispose?.(v, k, 'evict');
-            }
-            if (this.#hasDisposeAfter) {
-                this.#disposed?.push([v, k, 'evict']);
-            }
-        }
-        this.#removeItemSize(head);
-        // if we aren't about to use the index, then null these out
-        if (free) {
-            this.#keyList[head] = undefined;
-            this.#valList[head] = undefined;
-            this.#free.push(head);
-        }
-        if (this.#size === 1) {
-            this.#head = this.#tail = 0;
-            this.#free.length = 0;
-        }
-        else {
-            this.#head = this.#next[head];
-        }
-        this.#keyMap.delete(k);
-        this.#size--;
-        return head;
-    }
-    /**
-     * Check if a key is in the cache, without updating the recency of use.
-     * Will return false if the item is stale, even though it is technically
-     * in the cache.
-     *
-     * Check if a key is in the cache, without updating the recency of
-     * use. Age is updated if {@link LRUCache.OptionsBase.updateAgeOnHas} is set
-     * to `true` in either the options or the constructor.
-     *
-     * Will return `false` if the item is stale, even though it is technically in
-     * the cache. The difference can be determined (if it matters) by using a
-     * `status` argument, and inspecting the `has` field.
-     *
-     * Will not update item age unless
-     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
-     */
-    has(k, hasOptions = {}) {
-        const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v) &&
-                v.__staleWhileFetching === undefined) {
-                return false;
-            }
-            if (!this.#isStale(index)) {
-                if (updateAgeOnHas) {
-                    this.#updateItemAge(index);
-                }
-                if (status) {
-                    status.has = 'hit';
-                    this.#statusTTL(status, index);
-                }
-                return true;
-            }
-            else if (status) {
-                status.has = 'stale';
-                this.#statusTTL(status, index);
-            }
-        }
-        else if (status) {
-            status.has = 'miss';
-        }
-        return false;
-    }
-    /**
-     * Like {@link LRUCache#get} but doesn't update recency or delete stale
-     * items.
-     *
-     * Returns `undefined` if the item is stale, unless
-     * {@link LRUCache.OptionsBase.allowStale} is set.
-     */
-    peek(k, peekOptions = {}) {
-        const { allowStale = this.allowStale } = peekOptions;
-        const index = this.#keyMap.get(k);
-        if (index === undefined ||
-            (!allowStale && this.#isStale(index))) {
-            return;
-        }
-        const v = this.#valList[index];
-        // either stale and allowed, or forcing a refresh of non-stale value
-        return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
-    }
-    #backgroundFetch(k, index, options, context) {
-        const v = index === undefined ? undefined : this.#valList[index];
-        if (this.#isBackgroundFetch(v)) {
-            return v;
-        }
-        const ac = new AC();
-        const { signal } = options;
-        // when/if our AC signals, then stop listening to theirs.
-        signal?.addEventListener('abort', () => ac.abort(signal.reason), {
-            signal: ac.signal,
-        });
-        const fetchOpts = {
-            signal: ac.signal,
-            options,
-            context,
-        };
-        const cb = (v, updateCache = false) => {
-            const { aborted } = ac.signal;
-            const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
-            if (options.status) {
-                if (aborted && !updateCache) {
-                    options.status.fetchAborted = true;
-                    options.status.fetchError = ac.signal.reason;
-                    if (ignoreAbort)
-                        options.status.fetchAbortIgnored = true;
-                }
-                else {
-                    options.status.fetchResolved = true;
-                }
-            }
-            if (aborted && !ignoreAbort && !updateCache) {
-                return fetchFail(ac.signal.reason);
-            }
-            // either we didn't abort, and are still here, or we did, and ignored
-            const bf = p;
-            if (this.#valList[index] === p) {
-                if (v === undefined) {
-                    if (bf.__staleWhileFetching) {
-                        this.#valList[index] = bf.__staleWhileFetching;
-                    }
-                    else {
-                        this.#delete(k, 'fetch');
-                    }
-                }
-                else {
-                    if (options.status)
-                        options.status.fetchUpdated = true;
-                    this.set(k, v, fetchOpts.options);
-                }
-            }
-            return v;
-        };
-        const eb = (er) => {
-            if (options.status) {
-                options.status.fetchRejected = true;
-                options.status.fetchError = er;
-            }
-            return fetchFail(er);
-        };
-        const fetchFail = (er) => {
-            const { aborted } = ac.signal;
-            const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
-            const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
-            const noDelete = allowStale || options.noDeleteOnFetchRejection;
-            const bf = p;
-            if (this.#valList[index] === p) {
-                // if we allow stale on fetch rejections, then we need to ensure that
-                // the stale value is not removed from the cache when the fetch fails.
-                const del = !noDelete || bf.__staleWhileFetching === undefined;
-                if (del) {
-                    this.#delete(k, 'fetch');
-                }
-                else if (!allowStaleAborted) {
-                    // still replace the *promise* with the stale value,
-                    // since we are done with the promise at this point.
-                    // leave it untouched if we're still waiting for an
-                    // aborted background fetch that hasn't yet returned.
-                    this.#valList[index] = bf.__staleWhileFetching;
-                }
-            }
-            if (allowStale) {
-                if (options.status && bf.__staleWhileFetching !== undefined) {
-                    options.status.returnedStale = true;
-                }
-                return bf.__staleWhileFetching;
-            }
-            else if (bf.__returned === bf) {
-                throw er;
-            }
-        };
-        const pcall = (res, rej) => {
-            const fmp = this.#fetchMethod?.(k, v, fetchOpts);
-            if (fmp && fmp instanceof Promise) {
-                fmp.then(v => res(v === undefined ? undefined : v), rej);
-            }
-            // ignored, we go until we finish, regardless.
-            // defer check until we are actually aborting,
-            // so fetchMethod can override.
-            ac.signal.addEventListener('abort', () => {
-                if (!options.ignoreFetchAbort ||
-                    options.allowStaleOnFetchAbort) {
-                    res(undefined);
-                    // when it eventually resolves, update the cache.
-                    if (options.allowStaleOnFetchAbort) {
-                        res = v => cb(v, true);
-                    }
-                }
-            });
-        };
-        if (options.status)
-            options.status.fetchDispatched = true;
-        const p = new Promise(pcall).then(cb, eb);
-        const bf = Object.assign(p, {
-            __abortController: ac,
-            __staleWhileFetching: v,
-            __returned: undefined,
-        });
-        if (index === undefined) {
-            // internal, don't expose status.
-            this.set(k, bf, { ...fetchOpts.options, status: undefined });
-            index = this.#keyMap.get(k);
-        }
-        else {
-            this.#valList[index] = bf;
-        }
-        return bf;
-    }
-    #isBackgroundFetch(p) {
-        if (!this.#hasFetchMethod)
-            return false;
-        const b = p;
-        return (!!b &&
-            b instanceof Promise &&
-            b.hasOwnProperty('__staleWhileFetching') &&
-            b.__abortController instanceof AC);
-    }
-    async fetch(k, fetchOptions = {}) {
-        const { 
-        // get options
-        allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, 
-        // set options
-        ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL, 
-        // fetch exclusive options
-        noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
-        if (!this.#hasFetchMethod) {
-            if (status)
-                status.fetch = 'get';
-            return this.get(k, {
-                allowStale,
-                updateAgeOnGet,
-                noDeleteOnStaleGet,
-                status,
-            });
-        }
-        const options = {
-            allowStale,
-            updateAgeOnGet,
-            noDeleteOnStaleGet,
-            ttl,
-            noDisposeOnSet,
-            size,
-            sizeCalculation,
-            noUpdateTTL,
-            noDeleteOnFetchRejection,
-            allowStaleOnFetchRejection,
-            allowStaleOnFetchAbort,
-            ignoreFetchAbort,
-            status,
-            signal,
-        };
-        let index = this.#keyMap.get(k);
-        if (index === undefined) {
-            if (status)
-                status.fetch = 'miss';
-            const p = this.#backgroundFetch(k, index, options, context);
-            return (p.__returned = p);
-        }
-        else {
-            // in cache, maybe already fetching
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                const stale = allowStale && v.__staleWhileFetching !== undefined;
-                if (status) {
-                    status.fetch = 'inflight';
-                    if (stale)
-                        status.returnedStale = true;
-                }
-                return stale ? v.__staleWhileFetching : (v.__returned = v);
-            }
-            // if we force a refresh, that means do NOT serve the cached value,
-            // unless we are already in the process of refreshing the cache.
-            const isStale = this.#isStale(index);
-            if (!forceRefresh && !isStale) {
-                if (status)
-                    status.fetch = 'hit';
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                if (status)
-                    this.#statusTTL(status, index);
-                return v;
-            }
-            // ok, it is stale or a forced refresh, and not already fetching.
-            // refresh the cache.
-            const p = this.#backgroundFetch(k, index, options, context);
-            const hasStale = p.__staleWhileFetching !== undefined;
-            const staleVal = hasStale && allowStale;
-            if (status) {
-                status.fetch = isStale ? 'stale' : 'refresh';
-                if (staleVal && isStale)
-                    status.returnedStale = true;
-            }
-            return staleVal ? p.__staleWhileFetching : (p.__returned = p);
-        }
-    }
-    async forceFetch(k, fetchOptions = {}) {
-        const v = await this.fetch(k, fetchOptions);
-        if (v === undefined)
-            throw new Error('fetch() returned undefined');
-        return v;
-    }
-    memo(k, memoOptions = {}) {
-        const memoMethod = this.#memoMethod;
-        if (!memoMethod) {
-            throw new Error('no memoMethod provided to constructor');
-        }
-        const { context, forceRefresh, ...options } = memoOptions;
-        const v = this.get(k, options);
-        if (!forceRefresh && v !== undefined)
-            return v;
-        const vv = memoMethod(k, v, {
-            options,
-            context,
-        });
-        this.set(k, vv, options);
-        return vv;
-    }
-    /**
-     * Return a value from the cache. Will update the recency of the cache
-     * entry found.
-     *
-     * If the key is not found, get() will return `undefined`.
-     */
-    get(k, getOptions = {}) {
-        const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
-        const index = this.#keyMap.get(k);
-        if (index !== undefined) {
-            const value = this.#valList[index];
-            const fetching = this.#isBackgroundFetch(value);
-            if (status)
-                this.#statusTTL(status, index);
-            if (this.#isStale(index)) {
-                if (status)
-                    status.get = 'stale';
-                // delete only if not an in-flight background fetch
-                if (!fetching) {
-                    if (!noDeleteOnStaleGet) {
-                        this.#delete(k, 'expire');
-                    }
-                    if (status && allowStale)
-                        status.returnedStale = true;
-                    return allowStale ? value : undefined;
-                }
-                else {
-                    if (status &&
-                        allowStale &&
-                        value.__staleWhileFetching !== undefined) {
-                        status.returnedStale = true;
-                    }
-                    return allowStale ? value.__staleWhileFetching : undefined;
-                }
-            }
-            else {
-                if (status)
-                    status.get = 'hit';
-                // if we're currently fetching it, we don't actually have it yet
-                // it's not stale, which means this isn't a staleWhileRefetching.
-                // If it's not stale, and fetching, AND has a __staleWhileFetching
-                // value, then that means the user fetched with {forceRefresh:true},
-                // so it's safe to return that value.
-                if (fetching) {
-                    return value.__staleWhileFetching;
-                }
-                this.#moveToTail(index);
-                if (updateAgeOnGet) {
-                    this.#updateItemAge(index);
-                }
-                return value;
-            }
-        }
-        else if (status) {
-            status.get = 'miss';
-        }
-    }
-    #connect(p, n) {
-        this.#prev[n] = p;
-        this.#next[p] = n;
-    }
-    #moveToTail(index) {
-        // if tail already, nothing to do
-        // if head, move head to next[index]
-        // else
-        //   move next[prev[index]] to next[index] (head has no prev)
-        //   move prev[next[index]] to prev[index]
-        // prev[index] = tail
-        // next[tail] = index
-        // tail = index
-        if (index !== this.#tail) {
-            if (index === this.#head) {
-                this.#head = this.#next[index];
-            }
-            else {
-                this.#connect(this.#prev[index], this.#next[index]);
-            }
-            this.#connect(this.#tail, index);
-            this.#tail = index;
-        }
-    }
-    /**
-     * Deletes a key out of the cache.
-     *
-     * Returns true if the key was deleted, false otherwise.
-     */
-    delete(k) {
-        return this.#delete(k, 'delete');
-    }
-    #delete(k, reason) {
-        let deleted = false;
-        if (this.#size !== 0) {
-            const index = this.#keyMap.get(k);
-            if (index !== undefined) {
-                deleted = true;
-                if (this.#size === 1) {
-                    this.#clear(reason);
-                }
-                else {
-                    this.#removeItemSize(index);
-                    const v = this.#valList[index];
-                    if (this.#isBackgroundFetch(v)) {
-                        v.__abortController.abort(new Error('deleted'));
-                    }
-                    else if (this.#hasDispose || this.#hasDisposeAfter) {
-                        if (this.#hasDispose) {
-                            this.#dispose?.(v, k, reason);
-                        }
-                        if (this.#hasDisposeAfter) {
-                            this.#disposed?.push([v, k, reason]);
-                        }
-                    }
-                    this.#keyMap.delete(k);
-                    this.#keyList[index] = undefined;
-                    this.#valList[index] = undefined;
-                    if (index === this.#tail) {
-                        this.#tail = this.#prev[index];
-                    }
-                    else if (index === this.#head) {
-                        this.#head = this.#next[index];
-                    }
-                    else {
-                        const pi = this.#prev[index];
-                        this.#next[pi] = this.#next[index];
-                        const ni = this.#next[index];
-                        this.#prev[ni] = this.#prev[index];
-                    }
-                    this.#size--;
-                    this.#free.push(index);
-                }
-            }
-        }
-        if (this.#hasDisposeAfter && this.#disposed?.length) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-        return deleted;
-    }
-    /**
-     * Clear the cache entirely, throwing away all values.
-     */
-    clear() {
-        return this.#clear('delete');
-    }
-    #clear(reason) {
-        for (const index of this.#rindexes({ allowStale: true })) {
-            const v = this.#valList[index];
-            if (this.#isBackgroundFetch(v)) {
-                v.__abortController.abort(new Error('deleted'));
-            }
-            else {
-                const k = this.#keyList[index];
-                if (this.#hasDispose) {
-                    this.#dispose?.(v, k, reason);
-                }
-                if (this.#hasDisposeAfter) {
-                    this.#disposed?.push([v, k, reason]);
-                }
-            }
-        }
-        this.#keyMap.clear();
-        this.#valList.fill(undefined);
-        this.#keyList.fill(undefined);
-        if (this.#ttls && this.#starts) {
-            this.#ttls.fill(0);
-            this.#starts.fill(0);
-        }
-        if (this.#sizes) {
-            this.#sizes.fill(0);
-        }
-        this.#head = 0;
-        this.#tail = 0;
-        this.#free.length = 0;
-        this.#calculatedSize = 0;
-        this.#size = 0;
-        if (this.#hasDisposeAfter && this.#disposed) {
-            const dt = this.#disposed;
-            let task;
-            while ((task = dt?.shift())) {
-                this.#disposeAfter?.(...task);
-            }
-        }
-    }
-}
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/path-scurry/node_modules/lru-cache/dist/esm/index.min.js b/node_modules/path-scurry/node_modules/lru-cache/dist/esm/index.min.js
deleted file mode 100644
index 4571d0254e27d..0000000000000
--- a/node_modules/path-scurry/node_modules/lru-cache/dist/esm/index.min.js
+++ /dev/null
@@ -1,2 +0,0 @@
-var G=(l,t,e)=>{if(!t.has(l))throw TypeError("Cannot "+e)};var I=(l,t,e)=>(G(l,t,"read from private field"),e?e.call(l):t.get(l)),j=(l,t,e)=>{if(t.has(l))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(l):t.set(l,e)},x=(l,t,e,i)=>(G(l,t,"write to private field"),i?i.call(l,e):t.set(l,e),e);var T=typeof performance=="object"&&performance&&typeof performance.now=="function"?performance:Date,P=new Set,M=typeof process=="object"&&process?process:{},H=(l,t,e,i)=>{typeof M.emitWarning=="function"?M.emitWarning(l,t,e,i):console.error(`[${e}] ${t}: ${l}`)},W=globalThis.AbortController,N=globalThis.AbortSignal;if(typeof W>"u"){N=class{onabort;_onabort=[];reason;aborted=!1;addEventListener(i,s){this._onabort.push(s)}},W=class{constructor(){t()}signal=new N;abort(i){if(!this.signal.aborted){this.signal.reason=i,this.signal.aborted=!0;for(let s of this.signal._onabort)s(i);this.signal.onabort?.(i)}}};let l=M.env?.LRU_CACHE_IGNORE_AC_WARNING!=="1",t=()=>{l&&(l=!1,H("AbortController is not defined. If using lru-cache in node 14, load an AbortController polyfill from the `node-abort-controller` package. A minimal polyfill is provided for use by LRUCache.fetch(), but it should not be relied upon in other contexts (eg, passing it to other APIs that use AbortController/AbortSignal might have undesirable effects). You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.","NO_ABORT_CONTROLLER","ENOTSUP",t))}}var V=l=>!P.has(l),Y=Symbol("type"),A=l=>l&&l===Math.floor(l)&&l>0&&isFinite(l),k=l=>A(l)?l<=Math.pow(2,8)?Uint8Array:l<=Math.pow(2,16)?Uint16Array:l<=Math.pow(2,32)?Uint32Array:l<=Number.MAX_SAFE_INTEGER?O:null:null,O=class extends Array{constructor(t){super(t),this.fill(0)}},z,E=class{heap;length;static create(t){let e=k(t);if(!e)return[];x(E,z,!0);let i=new E(t,e);return x(E,z,!1),i}constructor(t,e){if(!I(E,z))throw new TypeError("instantiate Stack using Stack.create(n)");this.heap=new e(t),this.length=0}push(t){this.heap[this.length++]=t}pop(){return this.heap[--this.length]}},R=E;z=new WeakMap,j(R,z,!1);var D=class{#g;#f;#p;#w;#R;#W;ttl;ttlResolution;ttlAutopurge;updateAgeOnGet;updateAgeOnHas;allowStale;noDisposeOnSet;noUpdateTTL;maxEntrySize;sizeCalculation;noDeleteOnFetchRejection;noDeleteOnStaleGet;allowStaleOnFetchAbort;allowStaleOnFetchRejection;ignoreFetchAbort;#n;#S;#s;#i;#t;#l;#c;#o;#h;#_;#r;#m;#b;#u;#y;#O;#a;static unsafeExposeInternals(t){return{starts:t.#b,ttls:t.#u,sizes:t.#m,keyMap:t.#s,keyList:t.#i,valList:t.#t,next:t.#l,prev:t.#c,get head(){return t.#o},get tail(){return t.#h},free:t.#_,isBackgroundFetch:e=>t.#e(e),backgroundFetch:(e,i,s,n)=>t.#x(e,i,s,n),moveToTail:e=>t.#C(e),indexes:e=>t.#A(e),rindexes:e=>t.#F(e),isStale:e=>t.#d(e)}}get max(){return this.#g}get maxSize(){return this.#f}get calculatedSize(){return this.#S}get size(){return this.#n}get fetchMethod(){return this.#R}get memoMethod(){return this.#W}get dispose(){return this.#p}get disposeAfter(){return this.#w}constructor(t){let{max:e=0,ttl:i,ttlResolution:s=1,ttlAutopurge:n,updateAgeOnGet:h,updateAgeOnHas:o,allowStale:r,dispose:g,disposeAfter:m,noDisposeOnSet:f,noUpdateTTL:u,maxSize:c=0,maxEntrySize:F=0,sizeCalculation:d,fetchMethod:S,memoMethod:a,noDeleteOnFetchRejection:w,noDeleteOnStaleGet:b,allowStaleOnFetchRejection:p,allowStaleOnFetchAbort:_,ignoreFetchAbort:v}=t;if(e!==0&&!A(e))throw new TypeError("max option must be a nonnegative integer");let y=e?k(e):Array;if(!y)throw new Error("invalid max value: "+e);if(this.#g=e,this.#f=c,this.maxEntrySize=F||this.#f,this.sizeCalculation=d,this.sizeCalculation){if(!this.#f&&!this.maxEntrySize)throw new TypeError("cannot set sizeCalculation without setting maxSize or maxEntrySize");if(typeof this.sizeCalculation!="function")throw new TypeError("sizeCalculation set to non-function")}if(a!==void 0&&typeof a!="function")throw new TypeError("memoMethod must be a function if defined");if(this.#W=a,S!==void 0&&typeof S!="function")throw new TypeError("fetchMethod must be a function if specified");if(this.#R=S,this.#O=!!S,this.#s=new Map,this.#i=new Array(e).fill(void 0),this.#t=new Array(e).fill(void 0),this.#l=new y(e),this.#c=new y(e),this.#o=0,this.#h=0,this.#_=R.create(e),this.#n=0,this.#S=0,typeof g=="function"&&(this.#p=g),typeof m=="function"?(this.#w=m,this.#r=[]):(this.#w=void 0,this.#r=void 0),this.#y=!!this.#p,this.#a=!!this.#w,this.noDisposeOnSet=!!f,this.noUpdateTTL=!!u,this.noDeleteOnFetchRejection=!!w,this.allowStaleOnFetchRejection=!!p,this.allowStaleOnFetchAbort=!!_,this.ignoreFetchAbort=!!v,this.maxEntrySize!==0){if(this.#f!==0&&!A(this.#f))throw new TypeError("maxSize must be a positive integer if specified");if(!A(this.maxEntrySize))throw new TypeError("maxEntrySize must be a positive integer if specified");this.#P()}if(this.allowStale=!!r,this.noDeleteOnStaleGet=!!b,this.updateAgeOnGet=!!h,this.updateAgeOnHas=!!o,this.ttlResolution=A(s)||s===0?s:1,this.ttlAutopurge=!!n,this.ttl=i||0,this.ttl){if(!A(this.ttl))throw new TypeError("ttl must be a positive integer if specified");this.#M()}if(this.#g===0&&this.ttl===0&&this.#f===0)throw new TypeError("At least one of max, maxSize, or ttl is required");if(!this.ttlAutopurge&&!this.#g&&!this.#f){let C="LRU_CACHE_UNBOUNDED";V(C)&&(P.add(C),H("TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.","UnboundedCacheWarning",C,D))}}getRemainingTTL(t){return this.#s.has(t)?1/0:0}#M(){let t=new O(this.#g),e=new O(this.#g);this.#u=t,this.#b=e,this.#U=(n,h,o=T.now())=>{if(e[n]=h!==0?o:0,t[n]=h,h!==0&&this.ttlAutopurge){let r=setTimeout(()=>{this.#d(n)&&this.#T(this.#i[n],"expire")},h+1);r.unref&&r.unref()}},this.#z=n=>{e[n]=t[n]!==0?T.now():0},this.#E=(n,h)=>{if(t[h]){let o=t[h],r=e[h];if(!o||!r)return;n.ttl=o,n.start=r,n.now=i||s();let g=n.now-r;n.remainingTTL=o-g}};let i=0,s=()=>{let n=T.now();if(this.ttlResolution>0){i=n;let h=setTimeout(()=>i=0,this.ttlResolution);h.unref&&h.unref()}return n};this.getRemainingTTL=n=>{let h=this.#s.get(n);if(h===void 0)return 0;let o=t[h],r=e[h];if(!o||!r)return 1/0;let g=(i||s())-r;return o-g},this.#d=n=>{let h=e[n],o=t[n];return!!o&&!!h&&(i||s())-h>o}}#z=()=>{};#E=()=>{};#U=()=>{};#d=()=>!1;#P(){let t=new O(this.#g);this.#S=0,this.#m=t,this.#v=e=>{this.#S-=t[e],t[e]=0},this.#G=(e,i,s,n)=>{if(this.#e(i))return 0;if(!A(s))if(n){if(typeof n!="function")throw new TypeError("sizeCalculation must be a function");if(s=n(i,e),!A(s))throw new TypeError("sizeCalculation return invalid (expect positive integer)")}else throw new TypeError("invalid size value (must be positive integer). When maxSize or maxEntrySize is used, sizeCalculation or size must be set.");return s},this.#D=(e,i,s)=>{if(t[e]=i,this.#f){let n=this.#f-t[e];for(;this.#S>n;)this.#L(!0)}this.#S+=t[e],s&&(s.entrySize=i,s.totalCalculatedSize=this.#S)}}#v=t=>{};#D=(t,e,i)=>{};#G=(t,e,i,s)=>{if(i||s)throw new TypeError("cannot set size without setting maxSize or maxEntrySize on cache");return 0};*#A({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#h;!(!this.#I(e)||((t||!this.#d(e))&&(yield e),e===this.#o));)e=this.#c[e]}*#F({allowStale:t=this.allowStale}={}){if(this.#n)for(let e=this.#o;!(!this.#I(e)||((t||!this.#d(e))&&(yield e),e===this.#h));)e=this.#l[e]}#I(t){return t!==void 0&&this.#s.get(this.#i[t])===t}*entries(){for(let t of this.#A())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*rentries(){for(let t of this.#F())this.#t[t]!==void 0&&this.#i[t]!==void 0&&!this.#e(this.#t[t])&&(yield[this.#i[t],this.#t[t]])}*keys(){for(let t of this.#A()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*rkeys(){for(let t of this.#F()){let e=this.#i[t];e!==void 0&&!this.#e(this.#t[t])&&(yield e)}}*values(){for(let t of this.#A())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}*rvalues(){for(let t of this.#F())this.#t[t]!==void 0&&!this.#e(this.#t[t])&&(yield this.#t[t])}[Symbol.iterator](){return this.entries()}[Symbol.toStringTag]="LRUCache";find(t,e={}){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;if(n!==void 0&&t(n,this.#i[i],this))return this.get(this.#i[i],e)}}forEach(t,e=this){for(let i of this.#A()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}rforEach(t,e=this){for(let i of this.#F()){let s=this.#t[i],n=this.#e(s)?s.__staleWhileFetching:s;n!==void 0&&t.call(e,n,this.#i[i],this)}}purgeStale(){let t=!1;for(let e of this.#F({allowStale:!0}))this.#d(e)&&(this.#T(this.#i[e],"expire"),t=!0);return t}info(t){let e=this.#s.get(t);if(e===void 0)return;let i=this.#t[e],s=this.#e(i)?i.__staleWhileFetching:i;if(s===void 0)return;let n={value:s};if(this.#u&&this.#b){let h=this.#u[e],o=this.#b[e];if(h&&o){let r=h-(T.now()-o);n.ttl=r,n.start=Date.now()}}return this.#m&&(n.size=this.#m[e]),n}dump(){let t=[];for(let e of this.#A({allowStale:!0})){let i=this.#i[e],s=this.#t[e],n=this.#e(s)?s.__staleWhileFetching:s;if(n===void 0||i===void 0)continue;let h={value:n};if(this.#u&&this.#b){h.ttl=this.#u[e];let o=T.now()-this.#b[e];h.start=Math.floor(Date.now()-o)}this.#m&&(h.size=this.#m[e]),t.unshift([i,h])}return t}load(t){this.clear();for(let[e,i]of t){if(i.start){let s=Date.now()-i.start;i.start=T.now()-s}this.set(e,i.value,i)}}set(t,e,i={}){if(e===void 0)return this.delete(t),this;let{ttl:s=this.ttl,start:n,noDisposeOnSet:h=this.noDisposeOnSet,sizeCalculation:o=this.sizeCalculation,status:r}=i,{noUpdateTTL:g=this.noUpdateTTL}=i,m=this.#G(t,e,i.size||0,o);if(this.maxEntrySize&&m>this.maxEntrySize)return r&&(r.set="miss",r.maxEntrySizeExceeded=!0),this.#T(t,"set"),this;let f=this.#n===0?void 0:this.#s.get(t);if(f===void 0)f=this.#n===0?this.#h:this.#_.length!==0?this.#_.pop():this.#n===this.#g?this.#L(!1):this.#n,this.#i[f]=t,this.#t[f]=e,this.#s.set(t,f),this.#l[this.#h]=f,this.#c[f]=this.#h,this.#h=f,this.#n++,this.#D(f,m,r),r&&(r.set="add"),g=!1;else{this.#C(f);let u=this.#t[f];if(e!==u){if(this.#O&&this.#e(u)){u.__abortController.abort(new Error("replaced"));let{__staleWhileFetching:c}=u;c!==void 0&&!h&&(this.#y&&this.#p?.(c,t,"set"),this.#a&&this.#r?.push([c,t,"set"]))}else h||(this.#y&&this.#p?.(u,t,"set"),this.#a&&this.#r?.push([u,t,"set"]));if(this.#v(f),this.#D(f,m,r),this.#t[f]=e,r){r.set="replace";let c=u&&this.#e(u)?u.__staleWhileFetching:u;c!==void 0&&(r.oldValue=c)}}else r&&(r.set="update")}if(s!==0&&!this.#u&&this.#M(),this.#u&&(g||this.#U(f,s,n),r&&this.#E(r,f)),!h&&this.#a&&this.#r){let u=this.#r,c;for(;c=u?.shift();)this.#w?.(...c)}return this}pop(){try{for(;this.#n;){let t=this.#t[this.#o];if(this.#L(!0),this.#e(t)){if(t.__staleWhileFetching)return t.__staleWhileFetching}else if(t!==void 0)return t}}finally{if(this.#a&&this.#r){let t=this.#r,e;for(;e=t?.shift();)this.#w?.(...e)}}}#L(t){let e=this.#o,i=this.#i[e],s=this.#t[e];return this.#O&&this.#e(s)?s.__abortController.abort(new Error("evicted")):(this.#y||this.#a)&&(this.#y&&this.#p?.(s,i,"evict"),this.#a&&this.#r?.push([s,i,"evict"])),this.#v(e),t&&(this.#i[e]=void 0,this.#t[e]=void 0,this.#_.push(e)),this.#n===1?(this.#o=this.#h=0,this.#_.length=0):this.#o=this.#l[e],this.#s.delete(i),this.#n--,e}has(t,e={}){let{updateAgeOnHas:i=this.updateAgeOnHas,status:s}=e,n=this.#s.get(t);if(n!==void 0){let h=this.#t[n];if(this.#e(h)&&h.__staleWhileFetching===void 0)return!1;if(this.#d(n))s&&(s.has="stale",this.#E(s,n));else return i&&this.#z(n),s&&(s.has="hit",this.#E(s,n)),!0}else s&&(s.has="miss");return!1}peek(t,e={}){let{allowStale:i=this.allowStale}=e,s=this.#s.get(t);if(s===void 0||!i&&this.#d(s))return;let n=this.#t[s];return this.#e(n)?n.__staleWhileFetching:n}#x(t,e,i,s){let n=e===void 0?void 0:this.#t[e];if(this.#e(n))return n;let h=new W,{signal:o}=i;o?.addEventListener("abort",()=>h.abort(o.reason),{signal:h.signal});let r={signal:h.signal,options:i,context:s},g=(d,S=!1)=>{let{aborted:a}=h.signal,w=i.ignoreFetchAbort&&d!==void 0;if(i.status&&(a&&!S?(i.status.fetchAborted=!0,i.status.fetchError=h.signal.reason,w&&(i.status.fetchAbortIgnored=!0)):i.status.fetchResolved=!0),a&&!w&&!S)return f(h.signal.reason);let b=c;return this.#t[e]===c&&(d===void 0?b.__staleWhileFetching?this.#t[e]=b.__staleWhileFetching:this.#T(t,"fetch"):(i.status&&(i.status.fetchUpdated=!0),this.set(t,d,r.options))),d},m=d=>(i.status&&(i.status.fetchRejected=!0,i.status.fetchError=d),f(d)),f=d=>{let{aborted:S}=h.signal,a=S&&i.allowStaleOnFetchAbort,w=a||i.allowStaleOnFetchRejection,b=w||i.noDeleteOnFetchRejection,p=c;if(this.#t[e]===c&&(!b||p.__staleWhileFetching===void 0?this.#T(t,"fetch"):a||(this.#t[e]=p.__staleWhileFetching)),w)return i.status&&p.__staleWhileFetching!==void 0&&(i.status.returnedStale=!0),p.__staleWhileFetching;if(p.__returned===p)throw d},u=(d,S)=>{let a=this.#R?.(t,n,r);a&&a instanceof Promise&&a.then(w=>d(w===void 0?void 0:w),S),h.signal.addEventListener("abort",()=>{(!i.ignoreFetchAbort||i.allowStaleOnFetchAbort)&&(d(void 0),i.allowStaleOnFetchAbort&&(d=w=>g(w,!0)))})};i.status&&(i.status.fetchDispatched=!0);let c=new Promise(u).then(g,m),F=Object.assign(c,{__abortController:h,__staleWhileFetching:n,__returned:void 0});return e===void 0?(this.set(t,F,{...r.options,status:void 0}),e=this.#s.get(t)):this.#t[e]=F,F}#e(t){if(!this.#O)return!1;let e=t;return!!e&&e instanceof Promise&&e.hasOwnProperty("__staleWhileFetching")&&e.__abortController instanceof W}async fetch(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,ttl:h=this.ttl,noDisposeOnSet:o=this.noDisposeOnSet,size:r=0,sizeCalculation:g=this.sizeCalculation,noUpdateTTL:m=this.noUpdateTTL,noDeleteOnFetchRejection:f=this.noDeleteOnFetchRejection,allowStaleOnFetchRejection:u=this.allowStaleOnFetchRejection,ignoreFetchAbort:c=this.ignoreFetchAbort,allowStaleOnFetchAbort:F=this.allowStaleOnFetchAbort,context:d,forceRefresh:S=!1,status:a,signal:w}=e;if(!this.#O)return a&&(a.fetch="get"),this.get(t,{allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,status:a});let b={allowStale:i,updateAgeOnGet:s,noDeleteOnStaleGet:n,ttl:h,noDisposeOnSet:o,size:r,sizeCalculation:g,noUpdateTTL:m,noDeleteOnFetchRejection:f,allowStaleOnFetchRejection:u,allowStaleOnFetchAbort:F,ignoreFetchAbort:c,status:a,signal:w},p=this.#s.get(t);if(p===void 0){a&&(a.fetch="miss");let _=this.#x(t,p,b,d);return _.__returned=_}else{let _=this.#t[p];if(this.#e(_)){let U=i&&_.__staleWhileFetching!==void 0;return a&&(a.fetch="inflight",U&&(a.returnedStale=!0)),U?_.__staleWhileFetching:_.__returned=_}let v=this.#d(p);if(!S&&!v)return a&&(a.fetch="hit"),this.#C(p),s&&this.#z(p),a&&this.#E(a,p),_;let y=this.#x(t,p,b,d),L=y.__staleWhileFetching!==void 0&&i;return a&&(a.fetch=v?"stale":"refresh",L&&v&&(a.returnedStale=!0)),L?y.__staleWhileFetching:y.__returned=y}}async forceFetch(t,e={}){let i=await this.fetch(t,e);if(i===void 0)throw new Error("fetch() returned undefined");return i}memo(t,e={}){let i=this.#W;if(!i)throw new Error("no memoMethod provided to constructor");let{context:s,forceRefresh:n,...h}=e,o=this.get(t,h);if(!n&&o!==void 0)return o;let r=i(t,o,{options:h,context:s});return this.set(t,r,h),r}get(t,e={}){let{allowStale:i=this.allowStale,updateAgeOnGet:s=this.updateAgeOnGet,noDeleteOnStaleGet:n=this.noDeleteOnStaleGet,status:h}=e,o=this.#s.get(t);if(o!==void 0){let r=this.#t[o],g=this.#e(r);return h&&this.#E(h,o),this.#d(o)?(h&&(h.get="stale"),g?(h&&i&&r.__staleWhileFetching!==void 0&&(h.returnedStale=!0),i?r.__staleWhileFetching:void 0):(n||this.#T(t,"expire"),h&&i&&(h.returnedStale=!0),i?r:void 0)):(h&&(h.get="hit"),g?r.__staleWhileFetching:(this.#C(o),s&&this.#z(o),r))}else h&&(h.get="miss")}#j(t,e){this.#c[e]=t,this.#l[t]=e}#C(t){t!==this.#h&&(t===this.#o?this.#o=this.#l[t]:this.#j(this.#c[t],this.#l[t]),this.#j(this.#h,t),this.#h=t)}delete(t){return this.#T(t,"delete")}#T(t,e){let i=!1;if(this.#n!==0){let s=this.#s.get(t);if(s!==void 0)if(i=!0,this.#n===1)this.#N(e);else{this.#v(s);let n=this.#t[s];if(this.#e(n)?n.__abortController.abort(new Error("deleted")):(this.#y||this.#a)&&(this.#y&&this.#p?.(n,t,e),this.#a&&this.#r?.push([n,t,e])),this.#s.delete(t),this.#i[s]=void 0,this.#t[s]=void 0,s===this.#h)this.#h=this.#c[s];else if(s===this.#o)this.#o=this.#l[s];else{let h=this.#c[s];this.#l[h]=this.#l[s];let o=this.#l[s];this.#c[o]=this.#c[s]}this.#n--,this.#_.push(s)}}if(this.#a&&this.#r?.length){let s=this.#r,n;for(;n=s?.shift();)this.#w?.(...n)}return i}clear(){return this.#N("delete")}#N(t){for(let e of this.#F({allowStale:!0})){let i=this.#t[e];if(this.#e(i))i.__abortController.abort(new Error("deleted"));else{let s=this.#i[e];this.#y&&this.#p?.(i,s,t),this.#a&&this.#r?.push([i,s,t])}}if(this.#s.clear(),this.#t.fill(void 0),this.#i.fill(void 0),this.#u&&this.#b&&(this.#u.fill(0),this.#b.fill(0)),this.#m&&this.#m.fill(0),this.#o=0,this.#h=0,this.#_.length=0,this.#S=0,this.#n=0,this.#a&&this.#r){let e=this.#r,i;for(;i=e?.shift();)this.#w?.(...i)}}};export{D as LRUCache};
-//# sourceMappingURL=index.min.js.map
diff --git a/node_modules/path-scurry/node_modules/lru-cache/dist/esm/package.json b/node_modules/path-scurry/node_modules/lru-cache/dist/esm/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/path-scurry/node_modules/lru-cache/dist/esm/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/path-scurry/node_modules/lru-cache/package.json b/node_modules/path-scurry/node_modules/lru-cache/package.json
deleted file mode 100644
index f3cd4c0cc53f7..0000000000000
--- a/node_modules/path-scurry/node_modules/lru-cache/package.json
+++ /dev/null
@@ -1,116 +0,0 @@
-{
-  "name": "lru-cache",
-  "publishConfig": {
-    "tag": "legacy-v10"
-  },
-  "description": "A cache object that deletes the least-recently-used items.",
-  "version": "10.4.3",
-  "author": "Isaac Z. Schlueter ",
-  "keywords": [
-    "mru",
-    "lru",
-    "cache"
-  ],
-  "sideEffects": false,
-  "scripts": {
-    "build": "npm run prepare",
-    "prepare": "tshy && bash fixup.sh",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "test": "tap",
-    "snap": "tap",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "format": "prettier --write .",
-    "typedoc": "typedoc --tsconfig ./.tshy/esm.json ./src/*.ts",
-    "benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh",
-    "prebenchmark": "npm run prepare",
-    "benchmark": "make -C benchmark",
-    "preprofile": "npm run prepare",
-    "profile": "make -C benchmark profile"
-  },
-  "main": "./dist/commonjs/index.js",
-  "types": "./dist/commonjs/index.d.ts",
-  "tshy": {
-    "exports": {
-      ".": "./src/index.ts",
-      "./min": {
-        "import": {
-          "types": "./dist/esm/index.d.ts",
-          "default": "./dist/esm/index.min.js"
-        },
-        "require": {
-          "types": "./dist/commonjs/index.d.ts",
-          "default": "./dist/commonjs/index.min.js"
-        }
-      }
-    }
-  },
-  "repository": {
-    "type": "git",
-    "url": "git://github.com/isaacs/node-lru-cache.git"
-  },
-  "devDependencies": {
-    "@types/node": "^20.2.5",
-    "@types/tap": "^15.0.6",
-    "benchmark": "^2.1.4",
-    "esbuild": "^0.17.11",
-    "eslint-config-prettier": "^8.5.0",
-    "marked": "^4.2.12",
-    "mkdirp": "^2.1.5",
-    "prettier": "^2.6.2",
-    "tap": "^20.0.3",
-    "tshy": "^2.0.0",
-    "tslib": "^2.4.0",
-    "typedoc": "^0.25.3",
-    "typescript": "^5.2.2"
-  },
-  "license": "ISC",
-  "files": [
-    "dist"
-  ],
-  "prettier": {
-    "semi": false,
-    "printWidth": 70,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "tap": {
-    "node-arg": [
-      "--expose-gc"
-    ],
-    "plugin": [
-      "@tapjs/clock"
-    ]
-  },
-  "exports": {
-    ".": {
-      "import": {
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.js"
-      },
-      "require": {
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.js"
-      }
-    },
-    "./min": {
-      "import": {
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.min.js"
-      },
-      "require": {
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.min.js"
-      }
-    }
-  },
-  "type": "module",
-  "module": "./dist/esm/index.js"
-}
diff --git a/node_modules/path-scurry/package.json b/node_modules/path-scurry/package.json
index e1766157894c8..c3cb39dced545 100644
--- a/node_modules/path-scurry/package.json
+++ b/node_modules/path-scurry/package.json
@@ -1,6 +1,6 @@
 {
   "name": "path-scurry",
-  "version": "1.11.1",
+  "version": "2.0.0",
   "description": "walk paths fast and efficiently",
   "author": "Isaac Z. Schlueter  (https://blog.izs.me)",
   "main": "./dist/commonjs/index.js",
@@ -31,7 +31,7 @@
     "presnap": "npm run prepare",
     "test": "tap",
     "snap": "tap",
-    "format": "prettier --write . --loglevel warn",
+    "format": "prettier --write . --log-level warn",
     "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts",
     "bench": "bash ./scripts/bench.sh"
   },
@@ -48,24 +48,22 @@
     "endOfLine": "lf"
   },
   "devDependencies": {
-    "@nodelib/fs.walk": "^1.2.8",
-    "@types/node": "^20.12.11",
-    "c8": "^7.12.0",
-    "eslint-config-prettier": "^8.6.0",
+    "@nodelib/fs.walk": "^2.0.0",
+    "@types/node": "^20.14.10",
     "mkdirp": "^3.0.0",
-    "prettier": "^3.2.5",
-    "rimraf": "^5.0.1",
-    "tap": "^18.7.2",
+    "prettier": "^3.3.2",
+    "rimraf": "^5.0.8",
+    "tap": "^20.0.3",
     "ts-node": "^10.9.2",
-    "tshy": "^1.14.0",
-    "typedoc": "^0.25.12",
-    "typescript": "^5.4.3"
+    "tshy": "^2.0.1",
+    "typedoc": "^0.26.3",
+    "typescript": "^5.5.3"
   },
   "tap": {
     "typecheck": true
   },
   "engines": {
-    "node": ">=16 || 14 >=14.18"
+    "node": "20 || >=22"
   },
   "funding": {
     "url": "https://github.com/sponsors/isaacs"
@@ -75,8 +73,8 @@
     "url": "git+https://github.com/isaacs/path-scurry"
   },
   "dependencies": {
-    "lru-cache": "^10.2.0",
-    "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0"
+    "lru-cache": "^11.0.0",
+    "minipass": "^7.1.2"
   },
   "tshy": {
     "selfLink": false,
@@ -85,5 +83,6 @@
       ".": "./src/index.ts"
     }
   },
-  "types": "./dist/commonjs/index.d.ts"
+  "types": "./dist/commonjs/index.d.ts",
+  "module": "./dist/esm/index.js"
 }
diff --git a/node_modules/wrap-ansi/node_modules/ansi-regex/index.js b/node_modules/wrap-ansi/node_modules/ansi-regex/index.js
index ddfdba39a783a..2cc5ca2419f1b 100644
--- a/node_modules/wrap-ansi/node_modules/ansi-regex/index.js
+++ b/node_modules/wrap-ansi/node_modules/ansi-regex/index.js
@@ -1,10 +1,14 @@
 export default function ansiRegex({onlyFirst = false} = {}) {
 	// Valid string terminator sequences are BEL, ESC\, and 0x9c
 	const ST = '(?:\\u0007|\\u001B\\u005C|\\u009C)';
-	const pattern = [
-		`[\\u001B\\u009B][[\\]()#;?]*(?:(?:(?:(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]+)*|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?${ST})`,
-		'(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-nq-uy=><~]))',
-	].join('|');
+
+	// OSC sequences only: ESC ] ... ST (non-greedy until the first ST)
+	const osc = `(?:\\u001B\\][\\s\\S]*?${ST})`;
+
+	// CSI and related: ESC/C1, optional intermediates, optional params (supports ; and :) then final byte
+	const csi = '[\\u001B\\u009B][[\\]()#;?]*(?:\\d{1,4}(?:[;:]\\d{0,4})*)?[\\dA-PR-TZcf-nq-uy=><~]';
+
+	const pattern = `${osc}|${csi}`;
 
 	return new RegExp(pattern, onlyFirst ? undefined : 'g');
 }
diff --git a/node_modules/wrap-ansi/node_modules/ansi-regex/package.json b/node_modules/wrap-ansi/node_modules/ansi-regex/package.json
index 49f3f61021512..2efe9ebbe66be 100644
--- a/node_modules/wrap-ansi/node_modules/ansi-regex/package.json
+++ b/node_modules/wrap-ansi/node_modules/ansi-regex/package.json
@@ -1,6 +1,6 @@
 {
 	"name": "ansi-regex",
-	"version": "6.1.0",
+	"version": "6.2.2",
 	"description": "Regular expression for matching ANSI escape codes",
 	"license": "MIT",
 	"repository": "chalk/ansi-regex",
diff --git a/node_modules/wrap-ansi/node_modules/strip-ansi/package.json b/node_modules/wrap-ansi/node_modules/strip-ansi/package.json
index e1f455c325b00..2a59216e424fc 100644
--- a/node_modules/wrap-ansi/node_modules/strip-ansi/package.json
+++ b/node_modules/wrap-ansi/node_modules/strip-ansi/package.json
@@ -1,6 +1,6 @@
 {
 	"name": "strip-ansi",
-	"version": "7.1.0",
+	"version": "7.1.2",
 	"description": "Strip ANSI escape codes from a string",
 	"license": "MIT",
 	"repository": "chalk/strip-ansi",
@@ -12,6 +12,8 @@
 	},
 	"type": "module",
 	"exports": "./index.js",
+	"types": "./index.d.ts",
+	"sideEffects": false,
 	"engines": {
 		"node": ">=12"
 	},
diff --git a/package-lock.json b/package-lock.json
index 76d6eff67aa06..700934ca25464 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -102,7 +102,7 @@
         "cli-columns": "^4.0.0",
         "fastest-levenshtein": "^1.0.16",
         "fs-minipass": "^3.0.3",
-        "glob": "^10.4.5",
+        "glob": "^11.0.3",
         "graceful-fs": "^4.2.11",
         "hosted-git-info": "^9.0.0",
         "ini": "^5.0.0",
@@ -2666,6 +2666,8 @@
     },
     "node_modules/@isaacs/cliui": {
       "version": "8.0.2",
+      "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz",
+      "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -2681,7 +2683,9 @@
       }
     },
     "node_modules/@isaacs/cliui/node_modules/ansi-regex": {
-      "version": "6.1.0",
+      "version": "6.2.2",
+      "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz",
+      "integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==",
       "inBundle": true,
       "license": "MIT",
       "engines": {
@@ -2693,11 +2697,15 @@
     },
     "node_modules/@isaacs/cliui/node_modules/emoji-regex": {
       "version": "9.2.2",
+      "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz",
+      "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==",
       "inBundle": true,
       "license": "MIT"
     },
     "node_modules/@isaacs/cliui/node_modules/string-width": {
       "version": "5.1.2",
+      "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz",
+      "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -2713,7 +2721,9 @@
       }
     },
     "node_modules/@isaacs/cliui/node_modules/strip-ansi": {
-      "version": "7.1.0",
+      "version": "7.1.2",
+      "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz",
+      "integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -3050,42 +3060,6 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/@npmcli/map-workspaces/node_modules/glob": {
-      "version": "11.0.3",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "foreground-child": "^3.3.1",
-        "jackspeak": "^4.1.1",
-        "minimatch": "^10.0.3",
-        "minipass": "^7.1.2",
-        "package-json-from-dist": "^1.0.0",
-        "path-scurry": "^2.0.0"
-      },
-      "bin": {
-        "glob": "dist/esm/bin.mjs"
-      },
-      "engines": {
-        "node": "20 || >=22"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      }
-    },
-    "node_modules/@npmcli/map-workspaces/node_modules/jackspeak": {
-      "version": "4.1.1",
-      "inBundle": true,
-      "license": "BlueOak-1.0.0",
-      "dependencies": {
-        "@isaacs/cliui": "^8.0.2"
-      },
-      "engines": {
-        "node": "20 || >=22"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      }
-    },
     "node_modules/@npmcli/map-workspaces/node_modules/minimatch": {
       "version": "10.0.3",
       "inBundle": true,
@@ -3100,21 +3074,6 @@
         "url": "https://github.com/sponsors/isaacs"
       }
     },
-    "node_modules/@npmcli/map-workspaces/node_modules/path-scurry": {
-      "version": "2.0.0",
-      "inBundle": true,
-      "license": "BlueOak-1.0.0",
-      "dependencies": {
-        "lru-cache": "^11.0.0",
-        "minipass": "^7.1.2"
-      },
-      "engines": {
-        "node": "20 || >=22"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      }
-    },
     "node_modules/@npmcli/metavuln-calculator": {
       "version": "9.0.2",
       "license": "ISC",
@@ -3170,71 +3129,6 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/@npmcli/package-json/node_modules/glob": {
-      "version": "11.0.3",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "foreground-child": "^3.3.1",
-        "jackspeak": "^4.1.1",
-        "minimatch": "^10.0.3",
-        "minipass": "^7.1.2",
-        "package-json-from-dist": "^1.0.0",
-        "path-scurry": "^2.0.0"
-      },
-      "bin": {
-        "glob": "dist/esm/bin.mjs"
-      },
-      "engines": {
-        "node": "20 || >=22"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      }
-    },
-    "node_modules/@npmcli/package-json/node_modules/jackspeak": {
-      "version": "4.1.1",
-      "inBundle": true,
-      "license": "BlueOak-1.0.0",
-      "dependencies": {
-        "@isaacs/cliui": "^8.0.2"
-      },
-      "engines": {
-        "node": "20 || >=22"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      }
-    },
-    "node_modules/@npmcli/package-json/node_modules/minimatch": {
-      "version": "10.0.3",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "@isaacs/brace-expansion": "^5.0.0"
-      },
-      "engines": {
-        "node": "20 || >=22"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      }
-    },
-    "node_modules/@npmcli/package-json/node_modules/path-scurry": {
-      "version": "2.0.0",
-      "inBundle": true,
-      "license": "BlueOak-1.0.0",
-      "dependencies": {
-        "lru-cache": "^11.0.0",
-        "minipass": "^7.1.2"
-      },
-      "engines": {
-        "node": "20 || >=22"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      }
-    },
     "node_modules/@npmcli/promise-spawn": {
       "version": "8.0.2",
       "inBundle": true,
@@ -3914,6 +3808,27 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/glob": {
+      "version": "10.4.5",
+      "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz",
+      "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "foreground-child": "^3.1.0",
+        "jackspeak": "^3.1.2",
+        "minimatch": "^9.0.4",
+        "minipass": "^7.1.2",
+        "package-json-from-dist": "^1.0.0",
+        "path-scurry": "^1.11.1"
+      },
+      "bin": {
+        "glob": "dist/esm/bin.mjs"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/hosted-git-info": {
       "version": "8.1.0",
       "dev": true,
@@ -3944,6 +3859,22 @@
         "node": ">=16"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/jackspeak": {
+      "version": "3.4.3",
+      "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz",
+      "integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==",
+      "dev": true,
+      "license": "BlueOak-1.0.0",
+      "dependencies": {
+        "@isaacs/cliui": "^8.0.2"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
+      },
+      "optionalDependencies": {
+        "@pkgjs/parseargs": "^0.11.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/json-parse-even-better-errors": {
       "version": "3.0.2",
       "dev": true,
@@ -4390,6 +4321,23 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/path-scurry": {
+      "version": "1.11.1",
+      "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz",
+      "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==",
+      "dev": true,
+      "license": "BlueOak-1.0.0",
+      "dependencies": {
+        "lru-cache": "^10.2.0",
+        "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0"
+      },
+      "engines": {
+        "node": ">=16 || 14 >=14.18"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/postcss-selector-parser": {
       "version": "6.1.2",
       "dev": true,
@@ -4691,6 +4639,8 @@
     },
     "node_modules/@pkgjs/parseargs": {
       "version": "0.11.0",
+      "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz",
+      "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==",
       "inBundle": true,
       "license": "MIT",
       "optional": true,
@@ -5000,7 +4950,9 @@
       }
     },
     "node_modules/ansi-styles": {
-      "version": "6.2.1",
+      "version": "6.2.3",
+      "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz",
+      "integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==",
       "inBundle": true,
       "license": "MIT",
       "engines": {
@@ -5464,71 +5416,6 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/cacache/node_modules/glob": {
-      "version": "11.0.3",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "foreground-child": "^3.3.1",
-        "jackspeak": "^4.1.1",
-        "minimatch": "^10.0.3",
-        "minipass": "^7.1.2",
-        "package-json-from-dist": "^1.0.0",
-        "path-scurry": "^2.0.0"
-      },
-      "bin": {
-        "glob": "dist/esm/bin.mjs"
-      },
-      "engines": {
-        "node": "20 || >=22"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      }
-    },
-    "node_modules/cacache/node_modules/jackspeak": {
-      "version": "4.1.1",
-      "inBundle": true,
-      "license": "BlueOak-1.0.0",
-      "dependencies": {
-        "@isaacs/cliui": "^8.0.2"
-      },
-      "engines": {
-        "node": "20 || >=22"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      }
-    },
-    "node_modules/cacache/node_modules/minimatch": {
-      "version": "10.0.3",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "@isaacs/brace-expansion": "^5.0.0"
-      },
-      "engines": {
-        "node": "20 || >=22"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      }
-    },
-    "node_modules/cacache/node_modules/path-scurry": {
-      "version": "2.0.0",
-      "inBundle": true,
-      "license": "BlueOak-1.0.0",
-      "dependencies": {
-        "lru-cache": "^11.0.0",
-        "minipass": "^7.1.2"
-      },
-      "engines": {
-        "node": "20 || >=22"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      }
-    },
     "node_modules/caching-transform": {
       "version": "4.0.0",
       "dev": true,
@@ -6717,6 +6604,8 @@
     },
     "node_modules/eastasianwidth": {
       "version": "0.2.0",
+      "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz",
+      "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==",
       "inBundle": true,
       "license": "MIT"
     },
@@ -7943,20 +7832,25 @@
       }
     },
     "node_modules/glob": {
-      "version": "10.4.5",
+      "version": "11.0.3",
+      "resolved": "https://registry.npmjs.org/glob/-/glob-11.0.3.tgz",
+      "integrity": "sha512-2Nim7dha1KVkaiF4q6Dj+ngPPMdfvLJEOpZk/jKiUAkqKebpGAWQXAq9z1xu9HKu5lWfqw/FASuccEjyznjPaA==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
-        "foreground-child": "^3.1.0",
-        "jackspeak": "^3.1.2",
-        "minimatch": "^9.0.4",
+        "foreground-child": "^3.3.1",
+        "jackspeak": "^4.1.1",
+        "minimatch": "^10.0.3",
         "minipass": "^7.1.2",
         "package-json-from-dist": "^1.0.0",
-        "path-scurry": "^1.11.1"
+        "path-scurry": "^2.0.0"
       },
       "bin": {
         "glob": "dist/esm/bin.mjs"
       },
+      "engines": {
+        "node": "20 || >=22"
+      },
       "funding": {
         "url": "https://github.com/sponsors/isaacs"
       }
@@ -7972,6 +7866,22 @@
         "node": ">=10.13.0"
       }
     },
+    "node_modules/glob/node_modules/minimatch": {
+      "version": "10.0.3",
+      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.0.3.tgz",
+      "integrity": "sha512-IPZ167aShDZZUMdRk66cyQAW3qr0WzbHkPdMYa8bzZhlHhO3jALbKdxcaak7W9FfT2rZNpQuUu4Od7ILEpXSaw==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "@isaacs/brace-expansion": "^5.0.0"
+      },
+      "engines": {
+        "node": "20 || >=22"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
+      }
+    },
     "node_modules/global-directory": {
       "version": "4.0.1",
       "dev": true,
@@ -9290,17 +9200,19 @@
       }
     },
     "node_modules/jackspeak": {
-      "version": "3.4.3",
+      "version": "4.1.1",
+      "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-4.1.1.tgz",
+      "integrity": "sha512-zptv57P3GpL+O0I7VdMJNBZCu+BPHVQUk55Ft8/QCJjTVxrnJHuVuX/0Bl2A6/+2oyR/ZMEuFKwmzqqZ/U5nPQ==",
       "inBundle": true,
       "license": "BlueOak-1.0.0",
       "dependencies": {
         "@isaacs/cliui": "^8.0.2"
       },
+      "engines": {
+        "node": "20 || >=22"
+      },
       "funding": {
         "url": "https://github.com/sponsors/isaacs"
-      },
-      "optionalDependencies": {
-        "@pkgjs/parseargs": "^0.11.0"
       }
     },
     "node_modules/jiti": {
@@ -10924,6 +10836,43 @@
         "node": ">=18"
       }
     },
+    "node_modules/node-gyp/node_modules/glob": {
+      "version": "10.4.5",
+      "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz",
+      "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "foreground-child": "^3.1.0",
+        "jackspeak": "^3.1.2",
+        "minimatch": "^9.0.4",
+        "minipass": "^7.1.2",
+        "package-json-from-dist": "^1.0.0",
+        "path-scurry": "^1.11.1"
+      },
+      "bin": {
+        "glob": "dist/esm/bin.mjs"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
+      }
+    },
+    "node_modules/node-gyp/node_modules/jackspeak": {
+      "version": "3.4.3",
+      "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz",
+      "integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==",
+      "inBundle": true,
+      "license": "BlueOak-1.0.0",
+      "dependencies": {
+        "@isaacs/cliui": "^8.0.2"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
+      },
+      "optionalDependencies": {
+        "@pkgjs/parseargs": "^0.11.0"
+      }
+    },
     "node_modules/node-gyp/node_modules/lru-cache": {
       "version": "10.4.3",
       "inBundle": true,
@@ -10964,6 +10913,23 @@
         "url": "https://github.com/sponsors/isaacs"
       }
     },
+    "node_modules/node-gyp/node_modules/path-scurry": {
+      "version": "1.11.1",
+      "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz",
+      "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==",
+      "inBundle": true,
+      "license": "BlueOak-1.0.0",
+      "dependencies": {
+        "lru-cache": "^10.2.0",
+        "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0"
+      },
+      "engines": {
+        "node": ">=16 || 14 >=14.18"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
+      }
+    },
     "node_modules/node-gyp/node_modules/tar": {
       "version": "7.4.3",
       "inBundle": true,
@@ -11827,25 +11793,22 @@
       "license": "MIT"
     },
     "node_modules/path-scurry": {
-      "version": "1.11.1",
+      "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-2.0.0.tgz",
+      "integrity": "sha512-ypGJsmGtdXUOeM5u93TyeIEfEhM6s+ljAhrk5vAvSx8uyY/02OvrZnA0YNGUrPXfpJMgI1ODd3nwz8Npx4O4cg==",
       "inBundle": true,
       "license": "BlueOak-1.0.0",
       "dependencies": {
-        "lru-cache": "^10.2.0",
-        "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0"
+        "lru-cache": "^11.0.0",
+        "minipass": "^7.1.2"
       },
       "engines": {
-        "node": ">=16 || 14 >=14.18"
+        "node": "20 || >=22"
       },
       "funding": {
         "url": "https://github.com/sponsors/isaacs"
       }
     },
-    "node_modules/path-scurry/node_modules/lru-cache": {
-      "version": "10.4.3",
-      "inBundle": true,
-      "license": "ISC"
-    },
     "node_modules/picocolors": {
       "version": "1.1.1",
       "dev": true,
@@ -12693,6 +12656,67 @@
         "url": "https://github.com/sponsors/isaacs"
       }
     },
+    "node_modules/rimraf/node_modules/glob": {
+      "version": "10.4.5",
+      "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz",
+      "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "foreground-child": "^3.1.0",
+        "jackspeak": "^3.1.2",
+        "minimatch": "^9.0.4",
+        "minipass": "^7.1.2",
+        "package-json-from-dist": "^1.0.0",
+        "path-scurry": "^1.11.1"
+      },
+      "bin": {
+        "glob": "dist/esm/bin.mjs"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
+      }
+    },
+    "node_modules/rimraf/node_modules/jackspeak": {
+      "version": "3.4.3",
+      "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz",
+      "integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==",
+      "dev": true,
+      "license": "BlueOak-1.0.0",
+      "dependencies": {
+        "@isaacs/cliui": "^8.0.2"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
+      },
+      "optionalDependencies": {
+        "@pkgjs/parseargs": "^0.11.0"
+      }
+    },
+    "node_modules/rimraf/node_modules/lru-cache": {
+      "version": "10.4.3",
+      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz",
+      "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==",
+      "dev": true,
+      "license": "ISC"
+    },
+    "node_modules/rimraf/node_modules/path-scurry": {
+      "version": "1.11.1",
+      "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz",
+      "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==",
+      "dev": true,
+      "license": "BlueOak-1.0.0",
+      "dependencies": {
+        "lru-cache": "^10.2.0",
+        "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0"
+      },
+      "engines": {
+        "node": ">=16 || 14 >=14.18"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
+      }
+    },
     "node_modules/rrweb-cssom": {
       "version": "0.7.1",
       "dev": true,
@@ -13289,6 +13313,8 @@
     "node_modules/string-width-cjs": {
       "name": "string-width",
       "version": "4.2.3",
+      "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
+      "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -13380,6 +13406,8 @@
     "node_modules/strip-ansi-cjs": {
       "name": "strip-ansi",
       "version": "6.0.1",
+      "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
+      "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -16623,6 +16651,8 @@
     },
     "node_modules/wrap-ansi": {
       "version": "8.1.0",
+      "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz",
+      "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -16640,6 +16670,8 @@
     "node_modules/wrap-ansi-cjs": {
       "name": "wrap-ansi",
       "version": "7.0.0",
+      "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
+      "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -16656,6 +16688,8 @@
     },
     "node_modules/wrap-ansi-cjs/node_modules/ansi-styles": {
       "version": "4.3.0",
+      "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
+      "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -16669,7 +16703,9 @@
       }
     },
     "node_modules/wrap-ansi/node_modules/ansi-regex": {
-      "version": "6.1.0",
+      "version": "6.2.2",
+      "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz",
+      "integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==",
       "inBundle": true,
       "license": "MIT",
       "engines": {
@@ -16681,11 +16717,15 @@
     },
     "node_modules/wrap-ansi/node_modules/emoji-regex": {
       "version": "9.2.2",
+      "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz",
+      "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==",
       "inBundle": true,
       "license": "MIT"
     },
     "node_modules/wrap-ansi/node_modules/string-width": {
       "version": "5.1.2",
+      "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz",
+      "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -16701,7 +16741,9 @@
       }
     },
     "node_modules/wrap-ansi/node_modules/strip-ansi": {
-      "version": "7.1.0",
+      "version": "7.1.2",
+      "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz",
+      "integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -16858,71 +16900,6 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "smoke-tests/node_modules/glob": {
-      "version": "11.0.3",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "foreground-child": "^3.3.1",
-        "jackspeak": "^4.1.1",
-        "minimatch": "^10.0.3",
-        "minipass": "^7.1.2",
-        "package-json-from-dist": "^1.0.0",
-        "path-scurry": "^2.0.0"
-      },
-      "bin": {
-        "glob": "dist/esm/bin.mjs"
-      },
-      "engines": {
-        "node": "20 || >=22"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      }
-    },
-    "smoke-tests/node_modules/jackspeak": {
-      "version": "4.1.1",
-      "dev": true,
-      "license": "BlueOak-1.0.0",
-      "dependencies": {
-        "@isaacs/cliui": "^8.0.2"
-      },
-      "engines": {
-        "node": "20 || >=22"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      }
-    },
-    "smoke-tests/node_modules/minimatch": {
-      "version": "10.0.3",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "@isaacs/brace-expansion": "^5.0.0"
-      },
-      "engines": {
-        "node": "20 || >=22"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      }
-    },
-    "smoke-tests/node_modules/path-scurry": {
-      "version": "2.0.0",
-      "dev": true,
-      "license": "BlueOak-1.0.0",
-      "dependencies": {
-        "lru-cache": "^11.0.0",
-        "minipass": "^7.1.2"
-      },
-      "engines": {
-        "node": "20 || >=22"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      }
-    },
     "smoke-tests/node_modules/rimraf": {
       "version": "6.0.1",
       "dev": true,
diff --git a/package.json b/package.json
index 49bd059ce391a..60e507e310df7 100644
--- a/package.json
+++ b/package.json
@@ -69,7 +69,7 @@
     "cli-columns": "^4.0.0",
     "fastest-levenshtein": "^1.0.16",
     "fs-minipass": "^3.0.3",
-    "glob": "^10.4.5",
+    "glob": "^11.0.3",
     "graceful-fs": "^4.2.11",
     "hosted-git-info": "^9.0.0",
     "ini": "^5.0.0",

From ac334979ab94a52085b81a276c64788fa688e735 Mon Sep 17 00:00:00 2001
From: Gar 
Date: Thu, 18 Sep 2025 10:21:30 -0700
Subject: [PATCH 28/63] deps: mkdirp@3.0.1

---
 node_modules/mkdirp/LICENSE                   |  2 +-
 node_modules/mkdirp/bin/cmd.js                | 68 --------------
 .../mkdirp/dist/cjs/package.json              |  0
 .../mkdirp/dist/cjs/src/bin.js                |  0
 .../mkdirp/dist/cjs/src/find-made.js          |  0
 .../mkdirp/dist/cjs/src/index.js              |  0
 .../mkdirp/dist/cjs/src/mkdirp-manual.js      |  0
 .../mkdirp/dist/cjs/src/mkdirp-native.js      |  0
 .../mkdirp/dist/cjs/src/opts-arg.js           |  0
 .../mkdirp/dist/cjs/src/path-arg.js           |  0
 .../mkdirp/dist/cjs/src/use-native.js         |  0
 .../mkdirp/dist/mjs/find-made.js              |  0
 .../mkdirp/dist/mjs/index.js                  |  0
 .../mkdirp/dist/mjs/mkdirp-manual.js          |  0
 .../mkdirp/dist/mjs/mkdirp-native.js          |  0
 .../mkdirp/dist/mjs/opts-arg.js               |  0
 .../mkdirp/dist/mjs/package.json              |  0
 .../mkdirp/dist/mjs/path-arg.js               |  0
 .../mkdirp/dist/mjs/use-native.js             |  0
 node_modules/mkdirp/index.js                  | 31 -------
 node_modules/mkdirp/lib/find-made.js          | 29 ------
 node_modules/mkdirp/lib/mkdirp-manual.js      | 64 -------------
 node_modules/mkdirp/lib/mkdirp-native.js      | 39 --------
 node_modules/mkdirp/lib/opts-arg.js           | 23 -----
 node_modules/mkdirp/lib/path-arg.js           | 29 ------
 node_modules/mkdirp/lib/use-native.js         | 10 --
 node_modules/mkdirp/package.json              | 87 ++++++++++++++----
 .../node-gyp/node_modules/mkdirp/LICENSE      | 21 -----
 .../node-gyp/node_modules/mkdirp/package.json | 91 -------------------
 .../pacote/node_modules/mkdirp/LICENSE        | 21 -----
 .../node_modules/mkdirp/dist/cjs/package.json | 91 -------------------
 .../node_modules/mkdirp/dist/cjs/src/bin.js   | 80 ----------------
 .../mkdirp/dist/cjs/src/find-made.js          | 35 -------
 .../node_modules/mkdirp/dist/cjs/src/index.js | 53 -----------
 .../mkdirp/dist/cjs/src/mkdirp-manual.js      | 79 ----------------
 .../mkdirp/dist/cjs/src/mkdirp-native.js      | 50 ----------
 .../mkdirp/dist/cjs/src/opts-arg.js           | 38 --------
 .../mkdirp/dist/cjs/src/path-arg.js           | 28 ------
 .../mkdirp/dist/cjs/src/use-native.js         | 17 ----
 .../node_modules/mkdirp/dist/mjs/find-made.js | 30 ------
 .../node_modules/mkdirp/dist/mjs/index.js     | 43 ---------
 .../mkdirp/dist/mjs/mkdirp-manual.js          | 75 ---------------
 .../mkdirp/dist/mjs/mkdirp-native.js          | 46 ----------
 .../node_modules/mkdirp/dist/mjs/opts-arg.js  | 34 -------
 .../node_modules/mkdirp/dist/mjs/package.json |  3 -
 .../node_modules/mkdirp/dist/mjs/path-arg.js  | 24 -----
 .../mkdirp/dist/mjs/use-native.js             | 14 ---
 .../pacote/node_modules/mkdirp/package.json   | 91 -------------------
 package-lock.json                             | 63 +++++++------
 49 files changed, 101 insertions(+), 1308 deletions(-)
 delete mode 100755 node_modules/mkdirp/bin/cmd.js
 rename node_modules/{node-gyp/node_modules => }/mkdirp/dist/cjs/package.json (100%)
 rename node_modules/{node-gyp/node_modules => }/mkdirp/dist/cjs/src/bin.js (100%)
 rename node_modules/{node-gyp/node_modules => }/mkdirp/dist/cjs/src/find-made.js (100%)
 rename node_modules/{node-gyp/node_modules => }/mkdirp/dist/cjs/src/index.js (100%)
 rename node_modules/{node-gyp/node_modules => }/mkdirp/dist/cjs/src/mkdirp-manual.js (100%)
 rename node_modules/{node-gyp/node_modules => }/mkdirp/dist/cjs/src/mkdirp-native.js (100%)
 rename node_modules/{node-gyp/node_modules => }/mkdirp/dist/cjs/src/opts-arg.js (100%)
 rename node_modules/{node-gyp/node_modules => }/mkdirp/dist/cjs/src/path-arg.js (100%)
 rename node_modules/{node-gyp/node_modules => }/mkdirp/dist/cjs/src/use-native.js (100%)
 rename node_modules/{node-gyp/node_modules => }/mkdirp/dist/mjs/find-made.js (100%)
 rename node_modules/{node-gyp/node_modules => }/mkdirp/dist/mjs/index.js (100%)
 rename node_modules/{node-gyp/node_modules => }/mkdirp/dist/mjs/mkdirp-manual.js (100%)
 rename node_modules/{node-gyp/node_modules => }/mkdirp/dist/mjs/mkdirp-native.js (100%)
 rename node_modules/{node-gyp/node_modules => }/mkdirp/dist/mjs/opts-arg.js (100%)
 rename node_modules/{node-gyp/node_modules => }/mkdirp/dist/mjs/package.json (100%)
 rename node_modules/{node-gyp/node_modules => }/mkdirp/dist/mjs/path-arg.js (100%)
 rename node_modules/{node-gyp/node_modules => }/mkdirp/dist/mjs/use-native.js (100%)
 delete mode 100644 node_modules/mkdirp/index.js
 delete mode 100644 node_modules/mkdirp/lib/find-made.js
 delete mode 100644 node_modules/mkdirp/lib/mkdirp-manual.js
 delete mode 100644 node_modules/mkdirp/lib/mkdirp-native.js
 delete mode 100644 node_modules/mkdirp/lib/opts-arg.js
 delete mode 100644 node_modules/mkdirp/lib/path-arg.js
 delete mode 100644 node_modules/mkdirp/lib/use-native.js
 delete mode 100644 node_modules/node-gyp/node_modules/mkdirp/LICENSE
 delete mode 100644 node_modules/node-gyp/node_modules/mkdirp/package.json
 delete mode 100644 node_modules/pacote/node_modules/mkdirp/LICENSE
 delete mode 100644 node_modules/pacote/node_modules/mkdirp/dist/cjs/package.json
 delete mode 100755 node_modules/pacote/node_modules/mkdirp/dist/cjs/src/bin.js
 delete mode 100644 node_modules/pacote/node_modules/mkdirp/dist/cjs/src/find-made.js
 delete mode 100644 node_modules/pacote/node_modules/mkdirp/dist/cjs/src/index.js
 delete mode 100644 node_modules/pacote/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js
 delete mode 100644 node_modules/pacote/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js
 delete mode 100644 node_modules/pacote/node_modules/mkdirp/dist/cjs/src/opts-arg.js
 delete mode 100644 node_modules/pacote/node_modules/mkdirp/dist/cjs/src/path-arg.js
 delete mode 100644 node_modules/pacote/node_modules/mkdirp/dist/cjs/src/use-native.js
 delete mode 100644 node_modules/pacote/node_modules/mkdirp/dist/mjs/find-made.js
 delete mode 100644 node_modules/pacote/node_modules/mkdirp/dist/mjs/index.js
 delete mode 100644 node_modules/pacote/node_modules/mkdirp/dist/mjs/mkdirp-manual.js
 delete mode 100644 node_modules/pacote/node_modules/mkdirp/dist/mjs/mkdirp-native.js
 delete mode 100644 node_modules/pacote/node_modules/mkdirp/dist/mjs/opts-arg.js
 delete mode 100644 node_modules/pacote/node_modules/mkdirp/dist/mjs/package.json
 delete mode 100644 node_modules/pacote/node_modules/mkdirp/dist/mjs/path-arg.js
 delete mode 100644 node_modules/pacote/node_modules/mkdirp/dist/mjs/use-native.js
 delete mode 100644 node_modules/pacote/node_modules/mkdirp/package.json

diff --git a/node_modules/mkdirp/LICENSE b/node_modules/mkdirp/LICENSE
index 13fcd15f0e0be..0a034db7a73b5 100644
--- a/node_modules/mkdirp/LICENSE
+++ b/node_modules/mkdirp/LICENSE
@@ -1,4 +1,4 @@
-Copyright James Halliday (mail@substack.net) and Isaac Z. Schlueter (i@izs.me)
+Copyright (c) 2011-2023 James Halliday (mail@substack.net) and Isaac Z. Schlueter (i@izs.me)
 
 This project is free software released under the MIT license:
 
diff --git a/node_modules/mkdirp/bin/cmd.js b/node_modules/mkdirp/bin/cmd.js
deleted file mode 100755
index 6e0aa8dc4667b..0000000000000
--- a/node_modules/mkdirp/bin/cmd.js
+++ /dev/null
@@ -1,68 +0,0 @@
-#!/usr/bin/env node
-
-const usage = () => `
-usage: mkdirp [DIR1,DIR2..] {OPTIONS}
-
-  Create each supplied directory including any necessary parent directories
-  that don't yet exist.
-
-  If the directory already exists, do nothing.
-
-OPTIONS are:
-
-  -m       If a directory needs to be created, set the mode as an octal
-  --mode=  permission string.
-
-  -v --version   Print the mkdirp version number
-
-  -h --help      Print this helpful banner
-
-  -p --print     Print the first directories created for each path provided
-
-  --manual       Use manual implementation, even if native is available
-`
-
-const dirs = []
-const opts = {}
-let print = false
-let dashdash = false
-let manual = false
-for (const arg of process.argv.slice(2)) {
-  if (dashdash)
-    dirs.push(arg)
-  else if (arg === '--')
-    dashdash = true
-  else if (arg === '--manual')
-    manual = true
-  else if (/^-h/.test(arg) || /^--help/.test(arg)) {
-    console.log(usage())
-    process.exit(0)
-  } else if (arg === '-v' || arg === '--version') {
-    console.log(require('../package.json').version)
-    process.exit(0)
-  } else if (arg === '-p' || arg === '--print') {
-    print = true
-  } else if (/^-m/.test(arg) || /^--mode=/.test(arg)) {
-    const mode = parseInt(arg.replace(/^(-m|--mode=)/, ''), 8)
-    if (isNaN(mode)) {
-      console.error(`invalid mode argument: ${arg}\nMust be an octal number.`)
-      process.exit(1)
-    }
-    opts.mode = mode
-  } else
-    dirs.push(arg)
-}
-
-const mkdirp = require('../')
-const impl = manual ? mkdirp.manual : mkdirp
-if (dirs.length === 0)
-  console.error(usage())
-
-Promise.all(dirs.map(dir => impl(dir, opts)))
-  .then(made => print ? made.forEach(m => m && console.log(m)) : null)
-  .catch(er => {
-    console.error(er.message)
-    if (er.code)
-      console.error('  code: ' + er.code)
-    process.exit(1)
-  })
diff --git a/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/package.json b/node_modules/mkdirp/dist/cjs/package.json
similarity index 100%
rename from node_modules/node-gyp/node_modules/mkdirp/dist/cjs/package.json
rename to node_modules/mkdirp/dist/cjs/package.json
diff --git a/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/bin.js b/node_modules/mkdirp/dist/cjs/src/bin.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/bin.js
rename to node_modules/mkdirp/dist/cjs/src/bin.js
diff --git a/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/find-made.js b/node_modules/mkdirp/dist/cjs/src/find-made.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/find-made.js
rename to node_modules/mkdirp/dist/cjs/src/find-made.js
diff --git a/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/index.js b/node_modules/mkdirp/dist/cjs/src/index.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/index.js
rename to node_modules/mkdirp/dist/cjs/src/index.js
diff --git a/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js b/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js
rename to node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js
diff --git a/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js b/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js
rename to node_modules/mkdirp/dist/cjs/src/mkdirp-native.js
diff --git a/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/opts-arg.js b/node_modules/mkdirp/dist/cjs/src/opts-arg.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/opts-arg.js
rename to node_modules/mkdirp/dist/cjs/src/opts-arg.js
diff --git a/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/path-arg.js b/node_modules/mkdirp/dist/cjs/src/path-arg.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/path-arg.js
rename to node_modules/mkdirp/dist/cjs/src/path-arg.js
diff --git a/node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/use-native.js b/node_modules/mkdirp/dist/cjs/src/use-native.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/mkdirp/dist/cjs/src/use-native.js
rename to node_modules/mkdirp/dist/cjs/src/use-native.js
diff --git a/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/find-made.js b/node_modules/mkdirp/dist/mjs/find-made.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/mkdirp/dist/mjs/find-made.js
rename to node_modules/mkdirp/dist/mjs/find-made.js
diff --git a/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/index.js b/node_modules/mkdirp/dist/mjs/index.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/mkdirp/dist/mjs/index.js
rename to node_modules/mkdirp/dist/mjs/index.js
diff --git a/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/mkdirp-manual.js b/node_modules/mkdirp/dist/mjs/mkdirp-manual.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/mkdirp/dist/mjs/mkdirp-manual.js
rename to node_modules/mkdirp/dist/mjs/mkdirp-manual.js
diff --git a/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/mkdirp-native.js b/node_modules/mkdirp/dist/mjs/mkdirp-native.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/mkdirp/dist/mjs/mkdirp-native.js
rename to node_modules/mkdirp/dist/mjs/mkdirp-native.js
diff --git a/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/opts-arg.js b/node_modules/mkdirp/dist/mjs/opts-arg.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/mkdirp/dist/mjs/opts-arg.js
rename to node_modules/mkdirp/dist/mjs/opts-arg.js
diff --git a/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/package.json b/node_modules/mkdirp/dist/mjs/package.json
similarity index 100%
rename from node_modules/node-gyp/node_modules/mkdirp/dist/mjs/package.json
rename to node_modules/mkdirp/dist/mjs/package.json
diff --git a/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/path-arg.js b/node_modules/mkdirp/dist/mjs/path-arg.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/mkdirp/dist/mjs/path-arg.js
rename to node_modules/mkdirp/dist/mjs/path-arg.js
diff --git a/node_modules/node-gyp/node_modules/mkdirp/dist/mjs/use-native.js b/node_modules/mkdirp/dist/mjs/use-native.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/mkdirp/dist/mjs/use-native.js
rename to node_modules/mkdirp/dist/mjs/use-native.js
diff --git a/node_modules/mkdirp/index.js b/node_modules/mkdirp/index.js
deleted file mode 100644
index ad7a16c9f45d9..0000000000000
--- a/node_modules/mkdirp/index.js
+++ /dev/null
@@ -1,31 +0,0 @@
-const optsArg = require('./lib/opts-arg.js')
-const pathArg = require('./lib/path-arg.js')
-
-const {mkdirpNative, mkdirpNativeSync} = require('./lib/mkdirp-native.js')
-const {mkdirpManual, mkdirpManualSync} = require('./lib/mkdirp-manual.js')
-const {useNative, useNativeSync} = require('./lib/use-native.js')
-
-
-const mkdirp = (path, opts) => {
-  path = pathArg(path)
-  opts = optsArg(opts)
-  return useNative(opts)
-    ? mkdirpNative(path, opts)
-    : mkdirpManual(path, opts)
-}
-
-const mkdirpSync = (path, opts) => {
-  path = pathArg(path)
-  opts = optsArg(opts)
-  return useNativeSync(opts)
-    ? mkdirpNativeSync(path, opts)
-    : mkdirpManualSync(path, opts)
-}
-
-mkdirp.sync = mkdirpSync
-mkdirp.native = (path, opts) => mkdirpNative(pathArg(path), optsArg(opts))
-mkdirp.manual = (path, opts) => mkdirpManual(pathArg(path), optsArg(opts))
-mkdirp.nativeSync = (path, opts) => mkdirpNativeSync(pathArg(path), optsArg(opts))
-mkdirp.manualSync = (path, opts) => mkdirpManualSync(pathArg(path), optsArg(opts))
-
-module.exports = mkdirp
diff --git a/node_modules/mkdirp/lib/find-made.js b/node_modules/mkdirp/lib/find-made.js
deleted file mode 100644
index 022e492c085da..0000000000000
--- a/node_modules/mkdirp/lib/find-made.js
+++ /dev/null
@@ -1,29 +0,0 @@
-const {dirname} = require('path')
-
-const findMade = (opts, parent, path = undefined) => {
-  // we never want the 'made' return value to be a root directory
-  if (path === parent)
-    return Promise.resolve()
-
-  return opts.statAsync(parent).then(
-    st => st.isDirectory() ? path : undefined, // will fail later
-    er => er.code === 'ENOENT'
-      ? findMade(opts, dirname(parent), parent)
-      : undefined
-  )
-}
-
-const findMadeSync = (opts, parent, path = undefined) => {
-  if (path === parent)
-    return undefined
-
-  try {
-    return opts.statSync(parent).isDirectory() ? path : undefined
-  } catch (er) {
-    return er.code === 'ENOENT'
-      ? findMadeSync(opts, dirname(parent), parent)
-      : undefined
-  }
-}
-
-module.exports = {findMade, findMadeSync}
diff --git a/node_modules/mkdirp/lib/mkdirp-manual.js b/node_modules/mkdirp/lib/mkdirp-manual.js
deleted file mode 100644
index 2eb18cd64eb79..0000000000000
--- a/node_modules/mkdirp/lib/mkdirp-manual.js
+++ /dev/null
@@ -1,64 +0,0 @@
-const {dirname} = require('path')
-
-const mkdirpManual = (path, opts, made) => {
-  opts.recursive = false
-  const parent = dirname(path)
-  if (parent === path) {
-    return opts.mkdirAsync(path, opts).catch(er => {
-      // swallowed by recursive implementation on posix systems
-      // any other error is a failure
-      if (er.code !== 'EISDIR')
-        throw er
-    })
-  }
-
-  return opts.mkdirAsync(path, opts).then(() => made || path, er => {
-    if (er.code === 'ENOENT')
-      return mkdirpManual(parent, opts)
-        .then(made => mkdirpManual(path, opts, made))
-    if (er.code !== 'EEXIST' && er.code !== 'EROFS')
-      throw er
-    return opts.statAsync(path).then(st => {
-      if (st.isDirectory())
-        return made
-      else
-        throw er
-    }, () => { throw er })
-  })
-}
-
-const mkdirpManualSync = (path, opts, made) => {
-  const parent = dirname(path)
-  opts.recursive = false
-
-  if (parent === path) {
-    try {
-      return opts.mkdirSync(path, opts)
-    } catch (er) {
-      // swallowed by recursive implementation on posix systems
-      // any other error is a failure
-      if (er.code !== 'EISDIR')
-        throw er
-      else
-        return
-    }
-  }
-
-  try {
-    opts.mkdirSync(path, opts)
-    return made || path
-  } catch (er) {
-    if (er.code === 'ENOENT')
-      return mkdirpManualSync(path, opts, mkdirpManualSync(parent, opts, made))
-    if (er.code !== 'EEXIST' && er.code !== 'EROFS')
-      throw er
-    try {
-      if (!opts.statSync(path).isDirectory())
-        throw er
-    } catch (_) {
-      throw er
-    }
-  }
-}
-
-module.exports = {mkdirpManual, mkdirpManualSync}
diff --git a/node_modules/mkdirp/lib/mkdirp-native.js b/node_modules/mkdirp/lib/mkdirp-native.js
deleted file mode 100644
index c7a6b69800f62..0000000000000
--- a/node_modules/mkdirp/lib/mkdirp-native.js
+++ /dev/null
@@ -1,39 +0,0 @@
-const {dirname} = require('path')
-const {findMade, findMadeSync} = require('./find-made.js')
-const {mkdirpManual, mkdirpManualSync} = require('./mkdirp-manual.js')
-
-const mkdirpNative = (path, opts) => {
-  opts.recursive = true
-  const parent = dirname(path)
-  if (parent === path)
-    return opts.mkdirAsync(path, opts)
-
-  return findMade(opts, path).then(made =>
-    opts.mkdirAsync(path, opts).then(() => made)
-    .catch(er => {
-      if (er.code === 'ENOENT')
-        return mkdirpManual(path, opts)
-      else
-        throw er
-    }))
-}
-
-const mkdirpNativeSync = (path, opts) => {
-  opts.recursive = true
-  const parent = dirname(path)
-  if (parent === path)
-    return opts.mkdirSync(path, opts)
-
-  const made = findMadeSync(opts, path)
-  try {
-    opts.mkdirSync(path, opts)
-    return made
-  } catch (er) {
-    if (er.code === 'ENOENT')
-      return mkdirpManualSync(path, opts)
-    else
-      throw er
-  }
-}
-
-module.exports = {mkdirpNative, mkdirpNativeSync}
diff --git a/node_modules/mkdirp/lib/opts-arg.js b/node_modules/mkdirp/lib/opts-arg.js
deleted file mode 100644
index 2fa4833faacc7..0000000000000
--- a/node_modules/mkdirp/lib/opts-arg.js
+++ /dev/null
@@ -1,23 +0,0 @@
-const { promisify } = require('util')
-const fs = require('fs')
-const optsArg = opts => {
-  if (!opts)
-    opts = { mode: 0o777, fs }
-  else if (typeof opts === 'object')
-    opts = { mode: 0o777, fs, ...opts }
-  else if (typeof opts === 'number')
-    opts = { mode: opts, fs }
-  else if (typeof opts === 'string')
-    opts = { mode: parseInt(opts, 8), fs }
-  else
-    throw new TypeError('invalid options argument')
-
-  opts.mkdir = opts.mkdir || opts.fs.mkdir || fs.mkdir
-  opts.mkdirAsync = promisify(opts.mkdir)
-  opts.stat = opts.stat || opts.fs.stat || fs.stat
-  opts.statAsync = promisify(opts.stat)
-  opts.statSync = opts.statSync || opts.fs.statSync || fs.statSync
-  opts.mkdirSync = opts.mkdirSync || opts.fs.mkdirSync || fs.mkdirSync
-  return opts
-}
-module.exports = optsArg
diff --git a/node_modules/mkdirp/lib/path-arg.js b/node_modules/mkdirp/lib/path-arg.js
deleted file mode 100644
index cc07de5a6f992..0000000000000
--- a/node_modules/mkdirp/lib/path-arg.js
+++ /dev/null
@@ -1,29 +0,0 @@
-const platform = process.env.__TESTING_MKDIRP_PLATFORM__ || process.platform
-const { resolve, parse } = require('path')
-const pathArg = path => {
-  if (/\0/.test(path)) {
-    // simulate same failure that node raises
-    throw Object.assign(
-      new TypeError('path must be a string without null bytes'),
-      {
-        path,
-        code: 'ERR_INVALID_ARG_VALUE',
-      }
-    )
-  }
-
-  path = resolve(path)
-  if (platform === 'win32') {
-    const badWinChars = /[*|"<>?:]/
-    const {root} = parse(path)
-    if (badWinChars.test(path.substr(root.length))) {
-      throw Object.assign(new Error('Illegal characters in path.'), {
-        path,
-        code: 'EINVAL',
-      })
-    }
-  }
-
-  return path
-}
-module.exports = pathArg
diff --git a/node_modules/mkdirp/lib/use-native.js b/node_modules/mkdirp/lib/use-native.js
deleted file mode 100644
index 079361de19fd8..0000000000000
--- a/node_modules/mkdirp/lib/use-native.js
+++ /dev/null
@@ -1,10 +0,0 @@
-const fs = require('fs')
-
-const version = process.env.__TESTING_MKDIRP_NODE_VERSION__ || process.version
-const versArr = version.replace(/^v/, '').split('.')
-const hasNative = +versArr[0] > 10 || +versArr[0] === 10 && +versArr[1] >= 12
-
-const useNative = !hasNative ? () => false : opts => opts.mkdir === fs.mkdir
-const useNativeSync = !hasNative ? () => false : opts => opts.mkdirSync === fs.mkdirSync
-
-module.exports = {useNative, useNativeSync}
diff --git a/node_modules/mkdirp/package.json b/node_modules/mkdirp/package.json
index 2913ed09bddd6..f31ac3314d6f6 100644
--- a/node_modules/mkdirp/package.json
+++ b/node_modules/mkdirp/package.json
@@ -1,8 +1,7 @@
 {
   "name": "mkdirp",
   "description": "Recursively mkdir, like `mkdir -p`",
-  "version": "1.0.4",
-  "main": "index.js",
+  "version": "3.0.1",
   "keywords": [
     "mkdir",
     "directory",
@@ -12,33 +11,81 @@
     "recursive",
     "native"
   ],
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/isaacs/node-mkdirp.git"
+  "bin": "./dist/cjs/src/bin.js",
+  "main": "./dist/cjs/src/index.js",
+  "module": "./dist/mjs/index.js",
+  "types": "./dist/mjs/index.d.ts",
+  "exports": {
+    ".": {
+      "import": {
+        "types": "./dist/mjs/index.d.ts",
+        "default": "./dist/mjs/index.js"
+      },
+      "require": {
+        "types": "./dist/cjs/src/index.d.ts",
+        "default": "./dist/cjs/src/index.js"
+      }
+    }
   },
+  "files": [
+    "dist"
+  ],
   "scripts": {
-    "test": "tap",
-    "snap": "tap",
     "preversion": "npm test",
     "postversion": "npm publish",
-    "postpublish": "git push origin --follow-tags"
+    "prepublishOnly": "git push origin --follow-tags",
+    "preprepare": "rm -rf dist",
+    "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json",
+    "postprepare": "bash fixup.sh",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "test": "c8 tap",
+    "snap": "c8 tap",
+    "format": "prettier --write . --loglevel warn",
+    "benchmark": "node benchmark/index.js",
+    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
   },
-  "tap": {
-    "check-coverage": true,
-    "coverage-map": "map.js"
+  "prettier": {
+    "semi": false,
+    "printWidth": 80,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
   },
   "devDependencies": {
-    "require-inject": "^1.4.4",
-    "tap": "^14.10.7"
+    "@types/brace-expansion": "^1.1.0",
+    "@types/node": "^18.11.9",
+    "@types/tap": "^15.0.7",
+    "c8": "^7.12.0",
+    "eslint-config-prettier": "^8.6.0",
+    "prettier": "^2.8.2",
+    "tap": "^16.3.3",
+    "ts-node": "^10.9.1",
+    "typedoc": "^0.23.21",
+    "typescript": "^4.9.3"
+  },
+  "tap": {
+    "coverage": false,
+    "node-arg": [
+      "--no-warnings",
+      "--loader",
+      "ts-node/esm"
+    ],
+    "ts": false
+  },
+  "funding": {
+    "url": "https://github.com/sponsors/isaacs"
+  },
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/isaacs/node-mkdirp.git"
   },
-  "bin": "bin/cmd.js",
   "license": "MIT",
   "engines": {
     "node": ">=10"
-  },
-  "files": [
-    "bin",
-    "lib",
-    "index.js"
-  ]
+  }
 }
diff --git a/node_modules/node-gyp/node_modules/mkdirp/LICENSE b/node_modules/node-gyp/node_modules/mkdirp/LICENSE
deleted file mode 100644
index 0a034db7a73b5..0000000000000
--- a/node_modules/node-gyp/node_modules/mkdirp/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-Copyright (c) 2011-2023 James Halliday (mail@substack.net) and Isaac Z. Schlueter (i@izs.me)
-
-This project is free software released under the MIT license:
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
diff --git a/node_modules/node-gyp/node_modules/mkdirp/package.json b/node_modules/node-gyp/node_modules/mkdirp/package.json
deleted file mode 100644
index f31ac3314d6f6..0000000000000
--- a/node_modules/node-gyp/node_modules/mkdirp/package.json
+++ /dev/null
@@ -1,91 +0,0 @@
-{
-  "name": "mkdirp",
-  "description": "Recursively mkdir, like `mkdir -p`",
-  "version": "3.0.1",
-  "keywords": [
-    "mkdir",
-    "directory",
-    "make dir",
-    "make",
-    "dir",
-    "recursive",
-    "native"
-  ],
-  "bin": "./dist/cjs/src/bin.js",
-  "main": "./dist/cjs/src/index.js",
-  "module": "./dist/mjs/index.js",
-  "types": "./dist/mjs/index.d.ts",
-  "exports": {
-    ".": {
-      "import": {
-        "types": "./dist/mjs/index.d.ts",
-        "default": "./dist/mjs/index.js"
-      },
-      "require": {
-        "types": "./dist/cjs/src/index.d.ts",
-        "default": "./dist/cjs/src/index.js"
-      }
-    }
-  },
-  "files": [
-    "dist"
-  ],
-  "scripts": {
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "preprepare": "rm -rf dist",
-    "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json",
-    "postprepare": "bash fixup.sh",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "test": "c8 tap",
-    "snap": "c8 tap",
-    "format": "prettier --write . --loglevel warn",
-    "benchmark": "node benchmark/index.js",
-    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
-  },
-  "prettier": {
-    "semi": false,
-    "printWidth": 80,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "devDependencies": {
-    "@types/brace-expansion": "^1.1.0",
-    "@types/node": "^18.11.9",
-    "@types/tap": "^15.0.7",
-    "c8": "^7.12.0",
-    "eslint-config-prettier": "^8.6.0",
-    "prettier": "^2.8.2",
-    "tap": "^16.3.3",
-    "ts-node": "^10.9.1",
-    "typedoc": "^0.23.21",
-    "typescript": "^4.9.3"
-  },
-  "tap": {
-    "coverage": false,
-    "node-arg": [
-      "--no-warnings",
-      "--loader",
-      "ts-node/esm"
-    ],
-    "ts": false
-  },
-  "funding": {
-    "url": "https://github.com/sponsors/isaacs"
-  },
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/isaacs/node-mkdirp.git"
-  },
-  "license": "MIT",
-  "engines": {
-    "node": ">=10"
-  }
-}
diff --git a/node_modules/pacote/node_modules/mkdirp/LICENSE b/node_modules/pacote/node_modules/mkdirp/LICENSE
deleted file mode 100644
index 0a034db7a73b5..0000000000000
--- a/node_modules/pacote/node_modules/mkdirp/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-Copyright (c) 2011-2023 James Halliday (mail@substack.net) and Isaac Z. Schlueter (i@izs.me)
-
-This project is free software released under the MIT license:
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
diff --git a/node_modules/pacote/node_modules/mkdirp/dist/cjs/package.json b/node_modules/pacote/node_modules/mkdirp/dist/cjs/package.json
deleted file mode 100644
index 9d04a66e16cd9..0000000000000
--- a/node_modules/pacote/node_modules/mkdirp/dist/cjs/package.json
+++ /dev/null
@@ -1,91 +0,0 @@
-{
-    "name": "mkdirp",
-    "description": "Recursively mkdir, like `mkdir -p`",
-    "version": "3.0.1",
-    "keywords": [
-        "mkdir",
-        "directory",
-        "make dir",
-        "make",
-        "dir",
-        "recursive",
-        "native"
-    ],
-    "bin": "./dist/cjs/src/bin.js",
-    "main": "./dist/cjs/src/index.js",
-    "module": "./dist/mjs/index.js",
-    "types": "./dist/mjs/index.d.ts",
-    "exports": {
-        ".": {
-            "import": {
-                "types": "./dist/mjs/index.d.ts",
-                "default": "./dist/mjs/index.js"
-            },
-            "require": {
-                "types": "./dist/cjs/src/index.d.ts",
-                "default": "./dist/cjs/src/index.js"
-            }
-        }
-    },
-    "files": [
-        "dist"
-    ],
-    "scripts": {
-        "preversion": "npm test",
-        "postversion": "npm publish",
-        "prepublishOnly": "git push origin --follow-tags",
-        "preprepare": "rm -rf dist",
-        "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json",
-        "postprepare": "bash fixup.sh",
-        "pretest": "npm run prepare",
-        "presnap": "npm run prepare",
-        "test": "c8 tap",
-        "snap": "c8 tap",
-        "format": "prettier --write . --loglevel warn",
-        "benchmark": "node benchmark/index.js",
-        "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
-    },
-    "prettier": {
-        "semi": false,
-        "printWidth": 80,
-        "tabWidth": 2,
-        "useTabs": false,
-        "singleQuote": true,
-        "jsxSingleQuote": false,
-        "bracketSameLine": true,
-        "arrowParens": "avoid",
-        "endOfLine": "lf"
-    },
-    "devDependencies": {
-        "@types/brace-expansion": "^1.1.0",
-        "@types/node": "^18.11.9",
-        "@types/tap": "^15.0.7",
-        "c8": "^7.12.0",
-        "eslint-config-prettier": "^8.6.0",
-        "prettier": "^2.8.2",
-        "tap": "^16.3.3",
-        "ts-node": "^10.9.1",
-        "typedoc": "^0.23.21",
-        "typescript": "^4.9.3"
-    },
-    "tap": {
-        "coverage": false,
-        "node-arg": [
-            "--no-warnings",
-            "--loader",
-            "ts-node/esm"
-        ],
-        "ts": false
-    },
-    "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-    },
-    "repository": {
-        "type": "git",
-        "url": "https://github.com/isaacs/node-mkdirp.git"
-    },
-    "license": "MIT",
-    "engines": {
-        "node": ">=10"
-    }
-}
diff --git a/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/bin.js b/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/bin.js
deleted file mode 100755
index 757aae1fd96cb..0000000000000
--- a/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/bin.js
+++ /dev/null
@@ -1,80 +0,0 @@
-#!/usr/bin/env node
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-const package_json_1 = require("../package.json");
-const usage = () => `
-usage: mkdirp [DIR1,DIR2..] {OPTIONS}
-
-  Create each supplied directory including any necessary parent directories
-  that don't yet exist.
-
-  If the directory already exists, do nothing.
-
-OPTIONS are:
-
-  -m       If a directory needs to be created, set the mode as an octal
-  --mode=  permission string.
-
-  -v --version   Print the mkdirp version number
-
-  -h --help      Print this helpful banner
-
-  -p --print     Print the first directories created for each path provided
-
-  --manual       Use manual implementation, even if native is available
-`;
-const dirs = [];
-const opts = {};
-let doPrint = false;
-let dashdash = false;
-let manual = false;
-for (const arg of process.argv.slice(2)) {
-    if (dashdash)
-        dirs.push(arg);
-    else if (arg === '--')
-        dashdash = true;
-    else if (arg === '--manual')
-        manual = true;
-    else if (/^-h/.test(arg) || /^--help/.test(arg)) {
-        console.log(usage());
-        process.exit(0);
-    }
-    else if (arg === '-v' || arg === '--version') {
-        console.log(package_json_1.version);
-        process.exit(0);
-    }
-    else if (arg === '-p' || arg === '--print') {
-        doPrint = true;
-    }
-    else if (/^-m/.test(arg) || /^--mode=/.test(arg)) {
-        // these don't get covered in CI, but work locally
-        // weird because the tests below show as passing in the output.
-        /* c8 ignore start */
-        const mode = parseInt(arg.replace(/^(-m|--mode=)/, ''), 8);
-        if (isNaN(mode)) {
-            console.error(`invalid mode argument: ${arg}\nMust be an octal number.`);
-            process.exit(1);
-        }
-        /* c8 ignore stop */
-        opts.mode = mode;
-    }
-    else
-        dirs.push(arg);
-}
-const index_js_1 = require("./index.js");
-const impl = manual ? index_js_1.mkdirp.manual : index_js_1.mkdirp;
-if (dirs.length === 0) {
-    console.error(usage());
-}
-// these don't get covered in CI, but work locally
-/* c8 ignore start */
-Promise.all(dirs.map(dir => impl(dir, opts)))
-    .then(made => (doPrint ? made.forEach(m => m && console.log(m)) : null))
-    .catch(er => {
-    console.error(er.message);
-    if (er.code)
-        console.error('  code: ' + er.code);
-    process.exit(1);
-});
-/* c8 ignore stop */
-//# sourceMappingURL=bin.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/find-made.js b/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/find-made.js
deleted file mode 100644
index e831ef27cadc1..0000000000000
--- a/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/find-made.js
+++ /dev/null
@@ -1,35 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.findMadeSync = exports.findMade = void 0;
-const path_1 = require("path");
-const findMade = async (opts, parent, path) => {
-    // we never want the 'made' return value to be a root directory
-    if (path === parent) {
-        return;
-    }
-    return opts.statAsync(parent).then(st => (st.isDirectory() ? path : undefined), // will fail later
-    // will fail later
-    er => {
-        const fer = er;
-        return fer && fer.code === 'ENOENT'
-            ? (0, exports.findMade)(opts, (0, path_1.dirname)(parent), parent)
-            : undefined;
-    });
-};
-exports.findMade = findMade;
-const findMadeSync = (opts, parent, path) => {
-    if (path === parent) {
-        return undefined;
-    }
-    try {
-        return opts.statSync(parent).isDirectory() ? path : undefined;
-    }
-    catch (er) {
-        const fer = er;
-        return fer && fer.code === 'ENOENT'
-            ? (0, exports.findMadeSync)(opts, (0, path_1.dirname)(parent), parent)
-            : undefined;
-    }
-};
-exports.findMadeSync = findMadeSync;
-//# sourceMappingURL=find-made.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/index.js b/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/index.js
deleted file mode 100644
index ab9dc62cddda3..0000000000000
--- a/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/index.js
+++ /dev/null
@@ -1,53 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.mkdirp = exports.nativeSync = exports.native = exports.manualSync = exports.manual = exports.sync = exports.mkdirpSync = exports.useNativeSync = exports.useNative = exports.mkdirpNativeSync = exports.mkdirpNative = exports.mkdirpManualSync = exports.mkdirpManual = void 0;
-const mkdirp_manual_js_1 = require("./mkdirp-manual.js");
-const mkdirp_native_js_1 = require("./mkdirp-native.js");
-const opts_arg_js_1 = require("./opts-arg.js");
-const path_arg_js_1 = require("./path-arg.js");
-const use_native_js_1 = require("./use-native.js");
-/* c8 ignore start */
-var mkdirp_manual_js_2 = require("./mkdirp-manual.js");
-Object.defineProperty(exports, "mkdirpManual", { enumerable: true, get: function () { return mkdirp_manual_js_2.mkdirpManual; } });
-Object.defineProperty(exports, "mkdirpManualSync", { enumerable: true, get: function () { return mkdirp_manual_js_2.mkdirpManualSync; } });
-var mkdirp_native_js_2 = require("./mkdirp-native.js");
-Object.defineProperty(exports, "mkdirpNative", { enumerable: true, get: function () { return mkdirp_native_js_2.mkdirpNative; } });
-Object.defineProperty(exports, "mkdirpNativeSync", { enumerable: true, get: function () { return mkdirp_native_js_2.mkdirpNativeSync; } });
-var use_native_js_2 = require("./use-native.js");
-Object.defineProperty(exports, "useNative", { enumerable: true, get: function () { return use_native_js_2.useNative; } });
-Object.defineProperty(exports, "useNativeSync", { enumerable: true, get: function () { return use_native_js_2.useNativeSync; } });
-/* c8 ignore stop */
-const mkdirpSync = (path, opts) => {
-    path = (0, path_arg_js_1.pathArg)(path);
-    const resolved = (0, opts_arg_js_1.optsArg)(opts);
-    return (0, use_native_js_1.useNativeSync)(resolved)
-        ? (0, mkdirp_native_js_1.mkdirpNativeSync)(path, resolved)
-        : (0, mkdirp_manual_js_1.mkdirpManualSync)(path, resolved);
-};
-exports.mkdirpSync = mkdirpSync;
-exports.sync = exports.mkdirpSync;
-exports.manual = mkdirp_manual_js_1.mkdirpManual;
-exports.manualSync = mkdirp_manual_js_1.mkdirpManualSync;
-exports.native = mkdirp_native_js_1.mkdirpNative;
-exports.nativeSync = mkdirp_native_js_1.mkdirpNativeSync;
-exports.mkdirp = Object.assign(async (path, opts) => {
-    path = (0, path_arg_js_1.pathArg)(path);
-    const resolved = (0, opts_arg_js_1.optsArg)(opts);
-    return (0, use_native_js_1.useNative)(resolved)
-        ? (0, mkdirp_native_js_1.mkdirpNative)(path, resolved)
-        : (0, mkdirp_manual_js_1.mkdirpManual)(path, resolved);
-}, {
-    mkdirpSync: exports.mkdirpSync,
-    mkdirpNative: mkdirp_native_js_1.mkdirpNative,
-    mkdirpNativeSync: mkdirp_native_js_1.mkdirpNativeSync,
-    mkdirpManual: mkdirp_manual_js_1.mkdirpManual,
-    mkdirpManualSync: mkdirp_manual_js_1.mkdirpManualSync,
-    sync: exports.mkdirpSync,
-    native: mkdirp_native_js_1.mkdirpNative,
-    nativeSync: mkdirp_native_js_1.mkdirpNativeSync,
-    manual: mkdirp_manual_js_1.mkdirpManual,
-    manualSync: mkdirp_manual_js_1.mkdirpManualSync,
-    useNative: use_native_js_1.useNative,
-    useNativeSync: use_native_js_1.useNativeSync,
-});
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js b/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js
deleted file mode 100644
index d9bd1d8bb5a49..0000000000000
--- a/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js
+++ /dev/null
@@ -1,79 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.mkdirpManual = exports.mkdirpManualSync = void 0;
-const path_1 = require("path");
-const opts_arg_js_1 = require("./opts-arg.js");
-const mkdirpManualSync = (path, options, made) => {
-    const parent = (0, path_1.dirname)(path);
-    const opts = { ...(0, opts_arg_js_1.optsArg)(options), recursive: false };
-    if (parent === path) {
-        try {
-            return opts.mkdirSync(path, opts);
-        }
-        catch (er) {
-            // swallowed by recursive implementation on posix systems
-            // any other error is a failure
-            const fer = er;
-            if (fer && fer.code !== 'EISDIR') {
-                throw er;
-            }
-            return;
-        }
-    }
-    try {
-        opts.mkdirSync(path, opts);
-        return made || path;
-    }
-    catch (er) {
-        const fer = er;
-        if (fer && fer.code === 'ENOENT') {
-            return (0, exports.mkdirpManualSync)(path, opts, (0, exports.mkdirpManualSync)(parent, opts, made));
-        }
-        if (fer && fer.code !== 'EEXIST' && fer && fer.code !== 'EROFS') {
-            throw er;
-        }
-        try {
-            if (!opts.statSync(path).isDirectory())
-                throw er;
-        }
-        catch (_) {
-            throw er;
-        }
-    }
-};
-exports.mkdirpManualSync = mkdirpManualSync;
-exports.mkdirpManual = Object.assign(async (path, options, made) => {
-    const opts = (0, opts_arg_js_1.optsArg)(options);
-    opts.recursive = false;
-    const parent = (0, path_1.dirname)(path);
-    if (parent === path) {
-        return opts.mkdirAsync(path, opts).catch(er => {
-            // swallowed by recursive implementation on posix systems
-            // any other error is a failure
-            const fer = er;
-            if (fer && fer.code !== 'EISDIR') {
-                throw er;
-            }
-        });
-    }
-    return opts.mkdirAsync(path, opts).then(() => made || path, async (er) => {
-        const fer = er;
-        if (fer && fer.code === 'ENOENT') {
-            return (0, exports.mkdirpManual)(parent, opts).then((made) => (0, exports.mkdirpManual)(path, opts, made));
-        }
-        if (fer && fer.code !== 'EEXIST' && fer.code !== 'EROFS') {
-            throw er;
-        }
-        return opts.statAsync(path).then(st => {
-            if (st.isDirectory()) {
-                return made;
-            }
-            else {
-                throw er;
-            }
-        }, () => {
-            throw er;
-        });
-    });
-}, { sync: exports.mkdirpManualSync });
-//# sourceMappingURL=mkdirp-manual.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js b/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js
deleted file mode 100644
index 9f00567d7cc20..0000000000000
--- a/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js
+++ /dev/null
@@ -1,50 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.mkdirpNative = exports.mkdirpNativeSync = void 0;
-const path_1 = require("path");
-const find_made_js_1 = require("./find-made.js");
-const mkdirp_manual_js_1 = require("./mkdirp-manual.js");
-const opts_arg_js_1 = require("./opts-arg.js");
-const mkdirpNativeSync = (path, options) => {
-    const opts = (0, opts_arg_js_1.optsArg)(options);
-    opts.recursive = true;
-    const parent = (0, path_1.dirname)(path);
-    if (parent === path) {
-        return opts.mkdirSync(path, opts);
-    }
-    const made = (0, find_made_js_1.findMadeSync)(opts, path);
-    try {
-        opts.mkdirSync(path, opts);
-        return made;
-    }
-    catch (er) {
-        const fer = er;
-        if (fer && fer.code === 'ENOENT') {
-            return (0, mkdirp_manual_js_1.mkdirpManualSync)(path, opts);
-        }
-        else {
-            throw er;
-        }
-    }
-};
-exports.mkdirpNativeSync = mkdirpNativeSync;
-exports.mkdirpNative = Object.assign(async (path, options) => {
-    const opts = { ...(0, opts_arg_js_1.optsArg)(options), recursive: true };
-    const parent = (0, path_1.dirname)(path);
-    if (parent === path) {
-        return await opts.mkdirAsync(path, opts);
-    }
-    return (0, find_made_js_1.findMade)(opts, path).then((made) => opts
-        .mkdirAsync(path, opts)
-        .then(m => made || m)
-        .catch(er => {
-        const fer = er;
-        if (fer && fer.code === 'ENOENT') {
-            return (0, mkdirp_manual_js_1.mkdirpManual)(path, opts);
-        }
-        else {
-            throw er;
-        }
-    }));
-}, { sync: exports.mkdirpNativeSync });
-//# sourceMappingURL=mkdirp-native.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/opts-arg.js b/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/opts-arg.js
deleted file mode 100644
index e8f486c090595..0000000000000
--- a/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/opts-arg.js
+++ /dev/null
@@ -1,38 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.optsArg = void 0;
-const fs_1 = require("fs");
-const optsArg = (opts) => {
-    if (!opts) {
-        opts = { mode: 0o777 };
-    }
-    else if (typeof opts === 'object') {
-        opts = { mode: 0o777, ...opts };
-    }
-    else if (typeof opts === 'number') {
-        opts = { mode: opts };
-    }
-    else if (typeof opts === 'string') {
-        opts = { mode: parseInt(opts, 8) };
-    }
-    else {
-        throw new TypeError('invalid options argument');
-    }
-    const resolved = opts;
-    const optsFs = opts.fs || {};
-    opts.mkdir = opts.mkdir || optsFs.mkdir || fs_1.mkdir;
-    opts.mkdirAsync = opts.mkdirAsync
-        ? opts.mkdirAsync
-        : async (path, options) => {
-            return new Promise((res, rej) => resolved.mkdir(path, options, (er, made) => er ? rej(er) : res(made)));
-        };
-    opts.stat = opts.stat || optsFs.stat || fs_1.stat;
-    opts.statAsync = opts.statAsync
-        ? opts.statAsync
-        : async (path) => new Promise((res, rej) => resolved.stat(path, (err, stats) => (err ? rej(err) : res(stats))));
-    opts.statSync = opts.statSync || optsFs.statSync || fs_1.statSync;
-    opts.mkdirSync = opts.mkdirSync || optsFs.mkdirSync || fs_1.mkdirSync;
-    return resolved;
-};
-exports.optsArg = optsArg;
-//# sourceMappingURL=opts-arg.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/path-arg.js b/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/path-arg.js
deleted file mode 100644
index a6b457f6e23d5..0000000000000
--- a/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/path-arg.js
+++ /dev/null
@@ -1,28 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.pathArg = void 0;
-const platform = process.env.__TESTING_MKDIRP_PLATFORM__ || process.platform;
-const path_1 = require("path");
-const pathArg = (path) => {
-    if (/\0/.test(path)) {
-        // simulate same failure that node raises
-        throw Object.assign(new TypeError('path must be a string without null bytes'), {
-            path,
-            code: 'ERR_INVALID_ARG_VALUE',
-        });
-    }
-    path = (0, path_1.resolve)(path);
-    if (platform === 'win32') {
-        const badWinChars = /[*|"<>?:]/;
-        const { root } = (0, path_1.parse)(path);
-        if (badWinChars.test(path.substring(root.length))) {
-            throw Object.assign(new Error('Illegal characters in path.'), {
-                path,
-                code: 'EINVAL',
-            });
-        }
-    }
-    return path;
-};
-exports.pathArg = pathArg;
-//# sourceMappingURL=path-arg.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/use-native.js b/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/use-native.js
deleted file mode 100644
index 550b3452688ee..0000000000000
--- a/node_modules/pacote/node_modules/mkdirp/dist/cjs/src/use-native.js
+++ /dev/null
@@ -1,17 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.useNative = exports.useNativeSync = void 0;
-const fs_1 = require("fs");
-const opts_arg_js_1 = require("./opts-arg.js");
-const version = process.env.__TESTING_MKDIRP_NODE_VERSION__ || process.version;
-const versArr = version.replace(/^v/, '').split('.');
-const hasNative = +versArr[0] > 10 || (+versArr[0] === 10 && +versArr[1] >= 12);
-exports.useNativeSync = !hasNative
-    ? () => false
-    : (opts) => (0, opts_arg_js_1.optsArg)(opts).mkdirSync === fs_1.mkdirSync;
-exports.useNative = Object.assign(!hasNative
-    ? () => false
-    : (opts) => (0, opts_arg_js_1.optsArg)(opts).mkdir === fs_1.mkdir, {
-    sync: exports.useNativeSync,
-});
-//# sourceMappingURL=use-native.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/mkdirp/dist/mjs/find-made.js b/node_modules/pacote/node_modules/mkdirp/dist/mjs/find-made.js
deleted file mode 100644
index 3e72fd59a2c1f..0000000000000
--- a/node_modules/pacote/node_modules/mkdirp/dist/mjs/find-made.js
+++ /dev/null
@@ -1,30 +0,0 @@
-import { dirname } from 'path';
-export const findMade = async (opts, parent, path) => {
-    // we never want the 'made' return value to be a root directory
-    if (path === parent) {
-        return;
-    }
-    return opts.statAsync(parent).then(st => (st.isDirectory() ? path : undefined), // will fail later
-    // will fail later
-    er => {
-        const fer = er;
-        return fer && fer.code === 'ENOENT'
-            ? findMade(opts, dirname(parent), parent)
-            : undefined;
-    });
-};
-export const findMadeSync = (opts, parent, path) => {
-    if (path === parent) {
-        return undefined;
-    }
-    try {
-        return opts.statSync(parent).isDirectory() ? path : undefined;
-    }
-    catch (er) {
-        const fer = er;
-        return fer && fer.code === 'ENOENT'
-            ? findMadeSync(opts, dirname(parent), parent)
-            : undefined;
-    }
-};
-//# sourceMappingURL=find-made.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/mkdirp/dist/mjs/index.js b/node_modules/pacote/node_modules/mkdirp/dist/mjs/index.js
deleted file mode 100644
index 0217ecc8cdd83..0000000000000
--- a/node_modules/pacote/node_modules/mkdirp/dist/mjs/index.js
+++ /dev/null
@@ -1,43 +0,0 @@
-import { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js';
-import { mkdirpNative, mkdirpNativeSync } from './mkdirp-native.js';
-import { optsArg } from './opts-arg.js';
-import { pathArg } from './path-arg.js';
-import { useNative, useNativeSync } from './use-native.js';
-/* c8 ignore start */
-export { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js';
-export { mkdirpNative, mkdirpNativeSync } from './mkdirp-native.js';
-export { useNative, useNativeSync } from './use-native.js';
-/* c8 ignore stop */
-export const mkdirpSync = (path, opts) => {
-    path = pathArg(path);
-    const resolved = optsArg(opts);
-    return useNativeSync(resolved)
-        ? mkdirpNativeSync(path, resolved)
-        : mkdirpManualSync(path, resolved);
-};
-export const sync = mkdirpSync;
-export const manual = mkdirpManual;
-export const manualSync = mkdirpManualSync;
-export const native = mkdirpNative;
-export const nativeSync = mkdirpNativeSync;
-export const mkdirp = Object.assign(async (path, opts) => {
-    path = pathArg(path);
-    const resolved = optsArg(opts);
-    return useNative(resolved)
-        ? mkdirpNative(path, resolved)
-        : mkdirpManual(path, resolved);
-}, {
-    mkdirpSync,
-    mkdirpNative,
-    mkdirpNativeSync,
-    mkdirpManual,
-    mkdirpManualSync,
-    sync: mkdirpSync,
-    native: mkdirpNative,
-    nativeSync: mkdirpNativeSync,
-    manual: mkdirpManual,
-    manualSync: mkdirpManualSync,
-    useNative,
-    useNativeSync,
-});
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/mkdirp/dist/mjs/mkdirp-manual.js b/node_modules/pacote/node_modules/mkdirp/dist/mjs/mkdirp-manual.js
deleted file mode 100644
index a4d044e02d3bf..0000000000000
--- a/node_modules/pacote/node_modules/mkdirp/dist/mjs/mkdirp-manual.js
+++ /dev/null
@@ -1,75 +0,0 @@
-import { dirname } from 'path';
-import { optsArg } from './opts-arg.js';
-export const mkdirpManualSync = (path, options, made) => {
-    const parent = dirname(path);
-    const opts = { ...optsArg(options), recursive: false };
-    if (parent === path) {
-        try {
-            return opts.mkdirSync(path, opts);
-        }
-        catch (er) {
-            // swallowed by recursive implementation on posix systems
-            // any other error is a failure
-            const fer = er;
-            if (fer && fer.code !== 'EISDIR') {
-                throw er;
-            }
-            return;
-        }
-    }
-    try {
-        opts.mkdirSync(path, opts);
-        return made || path;
-    }
-    catch (er) {
-        const fer = er;
-        if (fer && fer.code === 'ENOENT') {
-            return mkdirpManualSync(path, opts, mkdirpManualSync(parent, opts, made));
-        }
-        if (fer && fer.code !== 'EEXIST' && fer && fer.code !== 'EROFS') {
-            throw er;
-        }
-        try {
-            if (!opts.statSync(path).isDirectory())
-                throw er;
-        }
-        catch (_) {
-            throw er;
-        }
-    }
-};
-export const mkdirpManual = Object.assign(async (path, options, made) => {
-    const opts = optsArg(options);
-    opts.recursive = false;
-    const parent = dirname(path);
-    if (parent === path) {
-        return opts.mkdirAsync(path, opts).catch(er => {
-            // swallowed by recursive implementation on posix systems
-            // any other error is a failure
-            const fer = er;
-            if (fer && fer.code !== 'EISDIR') {
-                throw er;
-            }
-        });
-    }
-    return opts.mkdirAsync(path, opts).then(() => made || path, async (er) => {
-        const fer = er;
-        if (fer && fer.code === 'ENOENT') {
-            return mkdirpManual(parent, opts).then((made) => mkdirpManual(path, opts, made));
-        }
-        if (fer && fer.code !== 'EEXIST' && fer.code !== 'EROFS') {
-            throw er;
-        }
-        return opts.statAsync(path).then(st => {
-            if (st.isDirectory()) {
-                return made;
-            }
-            else {
-                throw er;
-            }
-        }, () => {
-            throw er;
-        });
-    });
-}, { sync: mkdirpManualSync });
-//# sourceMappingURL=mkdirp-manual.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/mkdirp/dist/mjs/mkdirp-native.js b/node_modules/pacote/node_modules/mkdirp/dist/mjs/mkdirp-native.js
deleted file mode 100644
index 99d10a5425dad..0000000000000
--- a/node_modules/pacote/node_modules/mkdirp/dist/mjs/mkdirp-native.js
+++ /dev/null
@@ -1,46 +0,0 @@
-import { dirname } from 'path';
-import { findMade, findMadeSync } from './find-made.js';
-import { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js';
-import { optsArg } from './opts-arg.js';
-export const mkdirpNativeSync = (path, options) => {
-    const opts = optsArg(options);
-    opts.recursive = true;
-    const parent = dirname(path);
-    if (parent === path) {
-        return opts.mkdirSync(path, opts);
-    }
-    const made = findMadeSync(opts, path);
-    try {
-        opts.mkdirSync(path, opts);
-        return made;
-    }
-    catch (er) {
-        const fer = er;
-        if (fer && fer.code === 'ENOENT') {
-            return mkdirpManualSync(path, opts);
-        }
-        else {
-            throw er;
-        }
-    }
-};
-export const mkdirpNative = Object.assign(async (path, options) => {
-    const opts = { ...optsArg(options), recursive: true };
-    const parent = dirname(path);
-    if (parent === path) {
-        return await opts.mkdirAsync(path, opts);
-    }
-    return findMade(opts, path).then((made) => opts
-        .mkdirAsync(path, opts)
-        .then(m => made || m)
-        .catch(er => {
-        const fer = er;
-        if (fer && fer.code === 'ENOENT') {
-            return mkdirpManual(path, opts);
-        }
-        else {
-            throw er;
-        }
-    }));
-}, { sync: mkdirpNativeSync });
-//# sourceMappingURL=mkdirp-native.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/mkdirp/dist/mjs/opts-arg.js b/node_modules/pacote/node_modules/mkdirp/dist/mjs/opts-arg.js
deleted file mode 100644
index d47e2927fee4c..0000000000000
--- a/node_modules/pacote/node_modules/mkdirp/dist/mjs/opts-arg.js
+++ /dev/null
@@ -1,34 +0,0 @@
-import { mkdir, mkdirSync, stat, statSync, } from 'fs';
-export const optsArg = (opts) => {
-    if (!opts) {
-        opts = { mode: 0o777 };
-    }
-    else if (typeof opts === 'object') {
-        opts = { mode: 0o777, ...opts };
-    }
-    else if (typeof opts === 'number') {
-        opts = { mode: opts };
-    }
-    else if (typeof opts === 'string') {
-        opts = { mode: parseInt(opts, 8) };
-    }
-    else {
-        throw new TypeError('invalid options argument');
-    }
-    const resolved = opts;
-    const optsFs = opts.fs || {};
-    opts.mkdir = opts.mkdir || optsFs.mkdir || mkdir;
-    opts.mkdirAsync = opts.mkdirAsync
-        ? opts.mkdirAsync
-        : async (path, options) => {
-            return new Promise((res, rej) => resolved.mkdir(path, options, (er, made) => er ? rej(er) : res(made)));
-        };
-    opts.stat = opts.stat || optsFs.stat || stat;
-    opts.statAsync = opts.statAsync
-        ? opts.statAsync
-        : async (path) => new Promise((res, rej) => resolved.stat(path, (err, stats) => (err ? rej(err) : res(stats))));
-    opts.statSync = opts.statSync || optsFs.statSync || statSync;
-    opts.mkdirSync = opts.mkdirSync || optsFs.mkdirSync || mkdirSync;
-    return resolved;
-};
-//# sourceMappingURL=opts-arg.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/mkdirp/dist/mjs/package.json b/node_modules/pacote/node_modules/mkdirp/dist/mjs/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/pacote/node_modules/mkdirp/dist/mjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/pacote/node_modules/mkdirp/dist/mjs/path-arg.js b/node_modules/pacote/node_modules/mkdirp/dist/mjs/path-arg.js
deleted file mode 100644
index 03539cc5a94f9..0000000000000
--- a/node_modules/pacote/node_modules/mkdirp/dist/mjs/path-arg.js
+++ /dev/null
@@ -1,24 +0,0 @@
-const platform = process.env.__TESTING_MKDIRP_PLATFORM__ || process.platform;
-import { parse, resolve } from 'path';
-export const pathArg = (path) => {
-    if (/\0/.test(path)) {
-        // simulate same failure that node raises
-        throw Object.assign(new TypeError('path must be a string without null bytes'), {
-            path,
-            code: 'ERR_INVALID_ARG_VALUE',
-        });
-    }
-    path = resolve(path);
-    if (platform === 'win32') {
-        const badWinChars = /[*|"<>?:]/;
-        const { root } = parse(path);
-        if (badWinChars.test(path.substring(root.length))) {
-            throw Object.assign(new Error('Illegal characters in path.'), {
-                path,
-                code: 'EINVAL',
-            });
-        }
-    }
-    return path;
-};
-//# sourceMappingURL=path-arg.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/mkdirp/dist/mjs/use-native.js b/node_modules/pacote/node_modules/mkdirp/dist/mjs/use-native.js
deleted file mode 100644
index ad2093867eb74..0000000000000
--- a/node_modules/pacote/node_modules/mkdirp/dist/mjs/use-native.js
+++ /dev/null
@@ -1,14 +0,0 @@
-import { mkdir, mkdirSync } from 'fs';
-import { optsArg } from './opts-arg.js';
-const version = process.env.__TESTING_MKDIRP_NODE_VERSION__ || process.version;
-const versArr = version.replace(/^v/, '').split('.');
-const hasNative = +versArr[0] > 10 || (+versArr[0] === 10 && +versArr[1] >= 12);
-export const useNativeSync = !hasNative
-    ? () => false
-    : (opts) => optsArg(opts).mkdirSync === mkdirSync;
-export const useNative = Object.assign(!hasNative
-    ? () => false
-    : (opts) => optsArg(opts).mkdir === mkdir, {
-    sync: useNativeSync,
-});
-//# sourceMappingURL=use-native.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/mkdirp/package.json b/node_modules/pacote/node_modules/mkdirp/package.json
deleted file mode 100644
index f31ac3314d6f6..0000000000000
--- a/node_modules/pacote/node_modules/mkdirp/package.json
+++ /dev/null
@@ -1,91 +0,0 @@
-{
-  "name": "mkdirp",
-  "description": "Recursively mkdir, like `mkdir -p`",
-  "version": "3.0.1",
-  "keywords": [
-    "mkdir",
-    "directory",
-    "make dir",
-    "make",
-    "dir",
-    "recursive",
-    "native"
-  ],
-  "bin": "./dist/cjs/src/bin.js",
-  "main": "./dist/cjs/src/index.js",
-  "module": "./dist/mjs/index.js",
-  "types": "./dist/mjs/index.d.ts",
-  "exports": {
-    ".": {
-      "import": {
-        "types": "./dist/mjs/index.d.ts",
-        "default": "./dist/mjs/index.js"
-      },
-      "require": {
-        "types": "./dist/cjs/src/index.d.ts",
-        "default": "./dist/cjs/src/index.js"
-      }
-    }
-  },
-  "files": [
-    "dist"
-  ],
-  "scripts": {
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "preprepare": "rm -rf dist",
-    "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json",
-    "postprepare": "bash fixup.sh",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "test": "c8 tap",
-    "snap": "c8 tap",
-    "format": "prettier --write . --loglevel warn",
-    "benchmark": "node benchmark/index.js",
-    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
-  },
-  "prettier": {
-    "semi": false,
-    "printWidth": 80,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "devDependencies": {
-    "@types/brace-expansion": "^1.1.0",
-    "@types/node": "^18.11.9",
-    "@types/tap": "^15.0.7",
-    "c8": "^7.12.0",
-    "eslint-config-prettier": "^8.6.0",
-    "prettier": "^2.8.2",
-    "tap": "^16.3.3",
-    "ts-node": "^10.9.1",
-    "typedoc": "^0.23.21",
-    "typescript": "^4.9.3"
-  },
-  "tap": {
-    "coverage": false,
-    "node-arg": [
-      "--no-warnings",
-      "--loader",
-      "ts-node/esm"
-    ],
-    "ts": false
-  },
-  "funding": {
-    "url": "https://github.com/sponsors/isaacs"
-  },
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/isaacs/node-mkdirp.git"
-  },
-  "license": "MIT",
-  "engines": {
-    "node": ">=10"
-  }
-}
diff --git a/package-lock.json b/package-lock.json
index 700934ca25464..e529358d95de3 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -10641,14 +10641,19 @@
       }
     },
     "node_modules/mkdirp": {
-      "version": "1.0.4",
+      "version": "3.0.1",
+      "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz",
+      "integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==",
       "inBundle": true,
       "license": "MIT",
       "bin": {
-        "mkdirp": "bin/cmd.js"
+        "mkdirp": "dist/cjs/src/bin.js"
       },
       "engines": {
         "node": ">=10"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
       }
     },
     "node_modules/modify-values": {
@@ -10899,20 +10904,6 @@
         "node": "^18.17.0 || >=20.5.0"
       }
     },
-    "node_modules/node-gyp/node_modules/mkdirp": {
-      "version": "3.0.1",
-      "inBundle": true,
-      "license": "MIT",
-      "bin": {
-        "mkdirp": "dist/cjs/src/bin.js"
-      },
-      "engines": {
-        "node": ">=10"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      }
-    },
     "node_modules/node-gyp/node_modules/path-scurry": {
       "version": "1.11.1",
       "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz",
@@ -11647,20 +11638,6 @@
         "node": ">=18"
       }
     },
-    "node_modules/pacote/node_modules/mkdirp": {
-      "version": "3.0.1",
-      "inBundle": true,
-      "license": "MIT",
-      "bin": {
-        "mkdirp": "dist/cjs/src/bin.js"
-      },
-      "engines": {
-        "node": ">=10"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      }
-    },
     "node_modules/pacote/node_modules/tar": {
       "version": "7.4.3",
       "inBundle": true,
@@ -14998,6 +14975,19 @@
       "inBundle": true,
       "license": "ISC"
     },
+    "node_modules/tap/node_modules/mkdirp": {
+      "version": "1.0.4",
+      "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz",
+      "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==",
+      "dev": true,
+      "license": "MIT",
+      "bin": {
+        "mkdirp": "bin/cmd.js"
+      },
+      "engines": {
+        "node": ">=10"
+      }
+    },
     "node_modules/tap/node_modules/ms": {
       "version": "2.1.2",
       "dev": true,
@@ -15742,6 +15732,19 @@
         "node": ">=8"
       }
     },
+    "node_modules/tar/node_modules/mkdirp": {
+      "version": "1.0.4",
+      "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz",
+      "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==",
+      "inBundle": true,
+      "license": "MIT",
+      "bin": {
+        "mkdirp": "bin/cmd.js"
+      },
+      "engines": {
+        "node": ">=10"
+      }
+    },
     "node_modules/tcompare": {
       "version": "5.0.7",
       "dev": true,

From 566f1b7b487ad80604c61162ddde769d5ac2b241 Mon Sep 17 00:00:00 2001
From: Gar 
Date: Thu, 18 Sep 2025 10:23:55 -0700
Subject: [PATCH 29/63] deps: minimatch@10.0.3

---
 node_modules/.gitignore                       |   14 +-
 .../node_modules/minimatch/package.json       |   79 --
 .../minimatch/dist/commonjs/index.js          | 1014 -----------------
 .../node_modules/minimatch/dist/esm/index.js  | 1001 ----------------
 .../node_modules/minimatch/LICENSE            |   15 -
 .../dist/commonjs/assert-valid-pattern.js     |   14 -
 .../minimatch/dist/commonjs/ast.js            |  592 ----------
 .../dist/commonjs/brace-expressions.js        |  152 ---
 .../minimatch/dist/commonjs/escape.js         |   22 -
 .../minimatch/dist/commonjs/package.json      |    3 -
 .../minimatch/dist/commonjs/unescape.js       |   24 -
 .../dist/esm/assert-valid-pattern.js          |   10 -
 .../node_modules/minimatch/dist/esm/ast.js    |  588 ----------
 .../minimatch/dist/esm/brace-expressions.js   |  148 ---
 .../node_modules/minimatch/dist/esm/escape.js |   18 -
 .../minimatch/dist/esm/package.json           |    3 -
 .../minimatch/dist/esm/unescape.js            |   20 -
 node_modules/minimatch/dist/commonjs/index.js |    7 +-
 node_modules/minimatch/dist/esm/index.js      |    2 +-
 node_modules/minimatch/package.json           |   31 +-
 .../node_modules/minimatch/LICENSE            |    0
 .../dist/commonjs/assert-valid-pattern.js     |    0
 .../minimatch/dist/commonjs/ast.js            |    0
 .../dist/commonjs/brace-expressions.js        |    0
 .../minimatch/dist/commonjs/escape.js         |    0
 .../minimatch/dist/commonjs/index.js          |    7 +-
 .../minimatch/dist/commonjs/package.json      |    0
 .../minimatch/dist/commonjs/unescape.js       |    0
 .../dist/esm/assert-valid-pattern.js          |    0
 .../node_modules/minimatch/dist/esm/ast.js    |    0
 .../minimatch/dist/esm/brace-expressions.js   |    0
 .../node_modules/minimatch/dist/esm/escape.js |    0
 .../node_modules/minimatch/dist/esm/index.js  |    2 +-
 .../minimatch/dist/esm/package.json           |    0
 .../minimatch/dist/esm/unescape.js            |    0
 .../node_modules/minimatch/package.json       |   31 +-
 node_modules/tar/node_modules/mkdirp/LICENSE  |   21 +
 .../tar/node_modules/mkdirp/bin/cmd.js        |   68 ++
 node_modules/tar/node_modules/mkdirp/index.js |   31 +
 .../tar/node_modules/mkdirp/lib/find-made.js  |   29 +
 .../node_modules/mkdirp/lib/mkdirp-manual.js  |   64 ++
 .../node_modules/mkdirp/lib/mkdirp-native.js  |   39 +
 .../tar/node_modules/mkdirp/lib/opts-arg.js   |   23 +
 .../tar/node_modules/mkdirp/lib/path-arg.js   |   29 +
 .../tar/node_modules/mkdirp/lib/use-native.js |   10 +
 .../tar/node_modules/mkdirp/package.json      |   44 +
 .../node_modules/minimatch/LICENSE            |    0
 .../dist/commonjs/assert-valid-pattern.js     |    0
 .../minimatch/dist/commonjs/ast.js            |    0
 .../dist/commonjs/brace-expressions.js        |    0
 .../minimatch/dist/commonjs/escape.js         |    0
 .../minimatch/dist/commonjs/index.js          |    7 +-
 .../minimatch/dist/commonjs/package.json      |    0
 .../minimatch/dist/commonjs/unescape.js       |    0
 .../dist/esm/assert-valid-pattern.js          |    0
 .../node_modules/minimatch/dist/esm/ast.js    |    0
 .../minimatch/dist/esm/brace-expressions.js   |    0
 .../node_modules/minimatch/dist/esm/escape.js |    0
 .../node_modules/minimatch/dist/esm/index.js  |    2 +-
 .../minimatch/dist/esm/package.json           |    0
 .../minimatch/dist/esm/unescape.js            |    0
 .../node_modules/minimatch/package.json       |   31 +-
 package-lock.json                             |  140 ++-
 package.json                                  |    2 +-
 workspaces/arborist/package.json              |    2 +-
 workspaces/libnpmdiff/package.json            |    2 +-
 66 files changed, 515 insertions(+), 3826 deletions(-)
 delete mode 100644 node_modules/@npmcli/map-workspaces/node_modules/minimatch/package.json
 delete mode 100644 node_modules/glob/node_modules/minimatch/dist/commonjs/index.js
 delete mode 100644 node_modules/glob/node_modules/minimatch/dist/esm/index.js
 delete mode 100644 node_modules/ignore-walk/node_modules/minimatch/LICENSE
 delete mode 100644 node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
 delete mode 100644 node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/ast.js
 delete mode 100644 node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/brace-expressions.js
 delete mode 100644 node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/escape.js
 delete mode 100644 node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/package.json
 delete mode 100644 node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/unescape.js
 delete mode 100644 node_modules/ignore-walk/node_modules/minimatch/dist/esm/assert-valid-pattern.js
 delete mode 100644 node_modules/ignore-walk/node_modules/minimatch/dist/esm/ast.js
 delete mode 100644 node_modules/ignore-walk/node_modules/minimatch/dist/esm/brace-expressions.js
 delete mode 100644 node_modules/ignore-walk/node_modules/minimatch/dist/esm/escape.js
 delete mode 100644 node_modules/ignore-walk/node_modules/minimatch/dist/esm/package.json
 delete mode 100644 node_modules/ignore-walk/node_modules/minimatch/dist/esm/unescape.js
 rename node_modules/{@npmcli/map-workspaces => node-gyp}/node_modules/minimatch/LICENSE (100%)
 rename node_modules/{@npmcli/map-workspaces => node-gyp}/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js (100%)
 rename node_modules/{@npmcli/map-workspaces => node-gyp}/node_modules/minimatch/dist/commonjs/ast.js (100%)
 rename node_modules/{@npmcli/map-workspaces => node-gyp}/node_modules/minimatch/dist/commonjs/brace-expressions.js (100%)
 rename node_modules/{@npmcli/map-workspaces => node-gyp}/node_modules/minimatch/dist/commonjs/escape.js (100%)
 rename node_modules/{@npmcli/map-workspaces => node-gyp}/node_modules/minimatch/dist/commonjs/index.js (99%)
 rename node_modules/{@npmcli/map-workspaces => node-gyp}/node_modules/minimatch/dist/commonjs/package.json (100%)
 rename node_modules/{@npmcli/map-workspaces => node-gyp}/node_modules/minimatch/dist/commonjs/unescape.js (100%)
 rename node_modules/{@npmcli/map-workspaces => node-gyp}/node_modules/minimatch/dist/esm/assert-valid-pattern.js (100%)
 rename node_modules/{@npmcli/map-workspaces => node-gyp}/node_modules/minimatch/dist/esm/ast.js (100%)
 rename node_modules/{@npmcli/map-workspaces => node-gyp}/node_modules/minimatch/dist/esm/brace-expressions.js (100%)
 rename node_modules/{@npmcli/map-workspaces => node-gyp}/node_modules/minimatch/dist/esm/escape.js (100%)
 rename node_modules/{ignore-walk => node-gyp}/node_modules/minimatch/dist/esm/index.js (99%)
 rename node_modules/{@npmcli/map-workspaces => node-gyp}/node_modules/minimatch/dist/esm/package.json (100%)
 rename node_modules/{@npmcli/map-workspaces => node-gyp}/node_modules/minimatch/dist/esm/unescape.js (100%)
 rename node_modules/{glob => node-gyp}/node_modules/minimatch/package.json (78%)
 create mode 100644 node_modules/tar/node_modules/mkdirp/LICENSE
 create mode 100755 node_modules/tar/node_modules/mkdirp/bin/cmd.js
 create mode 100644 node_modules/tar/node_modules/mkdirp/index.js
 create mode 100644 node_modules/tar/node_modules/mkdirp/lib/find-made.js
 create mode 100644 node_modules/tar/node_modules/mkdirp/lib/mkdirp-manual.js
 create mode 100644 node_modules/tar/node_modules/mkdirp/lib/mkdirp-native.js
 create mode 100644 node_modules/tar/node_modules/mkdirp/lib/opts-arg.js
 create mode 100644 node_modules/tar/node_modules/mkdirp/lib/path-arg.js
 create mode 100644 node_modules/tar/node_modules/mkdirp/lib/use-native.js
 create mode 100644 node_modules/tar/node_modules/mkdirp/package.json
 rename node_modules/{glob => tuf-js}/node_modules/minimatch/LICENSE (100%)
 rename node_modules/{glob => tuf-js}/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js (100%)
 rename node_modules/{glob => tuf-js}/node_modules/minimatch/dist/commonjs/ast.js (100%)
 rename node_modules/{glob => tuf-js}/node_modules/minimatch/dist/commonjs/brace-expressions.js (100%)
 rename node_modules/{glob => tuf-js}/node_modules/minimatch/dist/commonjs/escape.js (100%)
 rename node_modules/{ignore-walk => tuf-js}/node_modules/minimatch/dist/commonjs/index.js (99%)
 rename node_modules/{glob => tuf-js}/node_modules/minimatch/dist/commonjs/package.json (100%)
 rename node_modules/{glob => tuf-js}/node_modules/minimatch/dist/commonjs/unescape.js (100%)
 rename node_modules/{glob => tuf-js}/node_modules/minimatch/dist/esm/assert-valid-pattern.js (100%)
 rename node_modules/{glob => tuf-js}/node_modules/minimatch/dist/esm/ast.js (100%)
 rename node_modules/{glob => tuf-js}/node_modules/minimatch/dist/esm/brace-expressions.js (100%)
 rename node_modules/{glob => tuf-js}/node_modules/minimatch/dist/esm/escape.js (100%)
 rename node_modules/{@npmcli/map-workspaces => tuf-js}/node_modules/minimatch/dist/esm/index.js (99%)
 rename node_modules/{glob => tuf-js}/node_modules/minimatch/dist/esm/package.json (100%)
 rename node_modules/{glob => tuf-js}/node_modules/minimatch/dist/esm/unescape.js (100%)
 rename node_modules/{ignore-walk => tuf-js}/node_modules/minimatch/package.json (78%)

diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index f4705d305a386..12d25ef01bec3 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -23,9 +23,6 @@
 !/@npmcli/git
 !/@npmcli/installed-package-contents
 !/@npmcli/map-workspaces
-!/@npmcli/map-workspaces/node_modules/
-/@npmcli/map-workspaces/node_modules/*
-!/@npmcli/map-workspaces/node_modules/minimatch
 !/@npmcli/metavuln-calculator
 !/@npmcli/name-from-folder
 !/@npmcli/node-gyp
@@ -85,9 +82,6 @@
 !/foreground-child
 !/fs-minipass
 !/glob
-!/glob/node_modules/
-/glob/node_modules/*
-!/glob/node_modules/minimatch
 !/graceful-fs
 !/hosted-git-info
 !/http-cache-semantics
@@ -95,9 +89,6 @@
 !/https-proxy-agent
 !/iconv-lite
 !/ignore-walk
-!/ignore-walk/node_modules/
-/ignore-walk/node_modules/*
-!/ignore-walk/node_modules/minimatch
 !/imurmurhash
 !/ini
 !/init-package-json
@@ -148,7 +139,7 @@
 !/node-gyp/node_modules/jackspeak
 !/node-gyp/node_modules/lru-cache
 !/node-gyp/node_modules/make-fetch-happen
-!/node-gyp/node_modules/mkdirp
+!/node-gyp/node_modules/minimatch
 !/node-gyp/node_modules/path-scurry
 !/node-gyp/node_modules/tar
 !/node-gyp/node_modules/yallist
@@ -170,7 +161,6 @@
 !/pacote/node_modules/
 /pacote/node_modules/*
 !/pacote/node_modules/chownr
-!/pacote/node_modules/mkdirp
 !/pacote/node_modules/tar
 !/pacote/node_modules/yallist
 !/parse-conflict-json
@@ -222,6 +212,7 @@
 !/tar/node_modules/minizlib/node_modules/
 /tar/node_modules/minizlib/node_modules/*
 !/tar/node_modules/minizlib/node_modules/minipass
+!/tar/node_modules/mkdirp
 !/text-table
 !/tiny-relative-date
 !/tinyglobby
@@ -236,6 +227,7 @@
 !/tuf-js/node_modules/@tufjs/
 /tuf-js/node_modules/@tufjs/*
 !/tuf-js/node_modules/@tufjs/models
+!/tuf-js/node_modules/minimatch
 !/unique-filename
 !/unique-slug
 !/util-deprecate
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/minimatch/package.json b/node_modules/@npmcli/map-workspaces/node_modules/minimatch/package.json
deleted file mode 100644
index bfa2423f50b5e..0000000000000
--- a/node_modules/@npmcli/map-workspaces/node_modules/minimatch/package.json
+++ /dev/null
@@ -1,79 +0,0 @@
-{
-  "author": "Isaac Z. Schlueter  (http://blog.izs.me)",
-  "name": "minimatch",
-  "description": "a glob matcher in javascript",
-  "version": "10.0.3",
-  "repository": {
-    "type": "git",
-    "url": "git://github.com/isaacs/minimatch.git"
-  },
-  "main": "./dist/commonjs/index.js",
-  "types": "./dist/commonjs/index.d.ts",
-  "exports": {
-    "./package.json": "./package.json",
-    ".": {
-      "import": {
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.js"
-      },
-      "require": {
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.js"
-      }
-    }
-  },
-  "files": [
-    "dist"
-  ],
-  "scripts": {
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "prepare": "tshy",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "test": "tap",
-    "snap": "tap",
-    "format": "prettier --write . --loglevel warn",
-    "benchmark": "node benchmark/index.js",
-    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
-  },
-  "prettier": {
-    "semi": false,
-    "printWidth": 80,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "engines": {
-    "node": "20 || >=22"
-  },
-  "devDependencies": {
-    "@types/brace-expansion": "^1.1.2",
-    "@types/node": "^24.0.0",
-    "mkdirp": "^3.0.1",
-    "prettier": "^3.3.2",
-    "tap": "^21.1.0",
-    "tshy": "^3.0.2",
-    "typedoc": "^0.28.5"
-  },
-  "funding": {
-    "url": "https://github.com/sponsors/isaacs"
-  },
-  "license": "ISC",
-  "tshy": {
-    "exports": {
-      "./package.json": "./package.json",
-      ".": "./src/index.ts"
-    }
-  },
-  "type": "module",
-  "module": "./dist/esm/index.js",
-  "dependencies": {
-    "@isaacs/brace-expansion": "^5.0.0"
-  }
-}
diff --git a/node_modules/glob/node_modules/minimatch/dist/commonjs/index.js b/node_modules/glob/node_modules/minimatch/dist/commonjs/index.js
deleted file mode 100644
index f58fb8616aa9a..0000000000000
--- a/node_modules/glob/node_modules/minimatch/dist/commonjs/index.js
+++ /dev/null
@@ -1,1014 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.unescape = exports.escape = exports.AST = exports.Minimatch = exports.match = exports.makeRe = exports.braceExpand = exports.defaults = exports.filter = exports.GLOBSTAR = exports.sep = exports.minimatch = void 0;
-const brace_expansion_1 = require("@isaacs/brace-expansion");
-const assert_valid_pattern_js_1 = require("./assert-valid-pattern.js");
-const ast_js_1 = require("./ast.js");
-const escape_js_1 = require("./escape.js");
-const unescape_js_1 = require("./unescape.js");
-const minimatch = (p, pattern, options = {}) => {
-    (0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
-    // shortcut: comments match nothing.
-    if (!options.nocomment && pattern.charAt(0) === '#') {
-        return false;
-    }
-    return new Minimatch(pattern, options).match(p);
-};
-exports.minimatch = minimatch;
-// Optimized checking for the most common glob patterns.
-const starDotExtRE = /^\*+([^+@!?\*\[\(]*)$/;
-const starDotExtTest = (ext) => (f) => !f.startsWith('.') && f.endsWith(ext);
-const starDotExtTestDot = (ext) => (f) => f.endsWith(ext);
-const starDotExtTestNocase = (ext) => {
-    ext = ext.toLowerCase();
-    return (f) => !f.startsWith('.') && f.toLowerCase().endsWith(ext);
-};
-const starDotExtTestNocaseDot = (ext) => {
-    ext = ext.toLowerCase();
-    return (f) => f.toLowerCase().endsWith(ext);
-};
-const starDotStarRE = /^\*+\.\*+$/;
-const starDotStarTest = (f) => !f.startsWith('.') && f.includes('.');
-const starDotStarTestDot = (f) => f !== '.' && f !== '..' && f.includes('.');
-const dotStarRE = /^\.\*+$/;
-const dotStarTest = (f) => f !== '.' && f !== '..' && f.startsWith('.');
-const starRE = /^\*+$/;
-const starTest = (f) => f.length !== 0 && !f.startsWith('.');
-const starTestDot = (f) => f.length !== 0 && f !== '.' && f !== '..';
-const qmarksRE = /^\?+([^+@!?\*\[\(]*)?$/;
-const qmarksTestNocase = ([$0, ext = '']) => {
-    const noext = qmarksTestNoExt([$0]);
-    if (!ext)
-        return noext;
-    ext = ext.toLowerCase();
-    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
-};
-const qmarksTestNocaseDot = ([$0, ext = '']) => {
-    const noext = qmarksTestNoExtDot([$0]);
-    if (!ext)
-        return noext;
-    ext = ext.toLowerCase();
-    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
-};
-const qmarksTestDot = ([$0, ext = '']) => {
-    const noext = qmarksTestNoExtDot([$0]);
-    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
-};
-const qmarksTest = ([$0, ext = '']) => {
-    const noext = qmarksTestNoExt([$0]);
-    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
-};
-const qmarksTestNoExt = ([$0]) => {
-    const len = $0.length;
-    return (f) => f.length === len && !f.startsWith('.');
-};
-const qmarksTestNoExtDot = ([$0]) => {
-    const len = $0.length;
-    return (f) => f.length === len && f !== '.' && f !== '..';
-};
-/* c8 ignore start */
-const defaultPlatform = (typeof process === 'object' && process
-    ? (typeof process.env === 'object' &&
-        process.env &&
-        process.env.__MINIMATCH_TESTING_PLATFORM__) ||
-        process.platform
-    : 'posix');
-const path = {
-    win32: { sep: '\\' },
-    posix: { sep: '/' },
-};
-/* c8 ignore stop */
-exports.sep = defaultPlatform === 'win32' ? path.win32.sep : path.posix.sep;
-exports.minimatch.sep = exports.sep;
-exports.GLOBSTAR = Symbol('globstar **');
-exports.minimatch.GLOBSTAR = exports.GLOBSTAR;
-// any single thing other than /
-// don't need to escape / when using new RegExp()
-const qmark = '[^/]';
-// * => any number of characters
-const star = qmark + '*?';
-// ** when dots are allowed.  Anything goes, except .. and .
-// not (^ or / followed by one or two dots followed by $ or /),
-// followed by anything, any number of times.
-const twoStarDot = '(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?';
-// not a ^ or / followed by a dot,
-// followed by anything, any number of times.
-const twoStarNoDot = '(?:(?!(?:\\/|^)\\.).)*?';
-const filter = (pattern, options = {}) => (p) => (0, exports.minimatch)(p, pattern, options);
-exports.filter = filter;
-exports.minimatch.filter = exports.filter;
-const ext = (a, b = {}) => Object.assign({}, a, b);
-const defaults = (def) => {
-    if (!def || typeof def !== 'object' || !Object.keys(def).length) {
-        return exports.minimatch;
-    }
-    const orig = exports.minimatch;
-    const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options));
-    return Object.assign(m, {
-        Minimatch: class Minimatch extends orig.Minimatch {
-            constructor(pattern, options = {}) {
-                super(pattern, ext(def, options));
-            }
-            static defaults(options) {
-                return orig.defaults(ext(def, options)).Minimatch;
-            }
-        },
-        AST: class AST extends orig.AST {
-            /* c8 ignore start */
-            constructor(type, parent, options = {}) {
-                super(type, parent, ext(def, options));
-            }
-            /* c8 ignore stop */
-            static fromGlob(pattern, options = {}) {
-                return orig.AST.fromGlob(pattern, ext(def, options));
-            }
-        },
-        unescape: (s, options = {}) => orig.unescape(s, ext(def, options)),
-        escape: (s, options = {}) => orig.escape(s, ext(def, options)),
-        filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)),
-        defaults: (options) => orig.defaults(ext(def, options)),
-        makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)),
-        braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)),
-        match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)),
-        sep: orig.sep,
-        GLOBSTAR: exports.GLOBSTAR,
-    });
-};
-exports.defaults = defaults;
-exports.minimatch.defaults = exports.defaults;
-// Brace expansion:
-// a{b,c}d -> abd acd
-// a{b,}c -> abc ac
-// a{0..3}d -> a0d a1d a2d a3d
-// a{b,c{d,e}f}g -> abg acdfg acefg
-// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
-//
-// Invalid sets are not expanded.
-// a{2..}b -> a{2..}b
-// a{b}c -> a{b}c
-const braceExpand = (pattern, options = {}) => {
-    (0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
-    // Thanks to Yeting Li  for
-    // improving this regexp to avoid a ReDOS vulnerability.
-    if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
-        // shortcut. no need to expand.
-        return [pattern];
-    }
-    return (0, brace_expansion_1.expand)(pattern);
-};
-exports.braceExpand = braceExpand;
-exports.minimatch.braceExpand = exports.braceExpand;
-// parse a component of the expanded set.
-// At this point, no pattern may contain "/" in it
-// so we're going to return a 2d array, where each entry is the full
-// pattern, split on '/', and then turned into a regular expression.
-// A regexp is made at the end which joins each array with an
-// escaped /, and another full one which joins each regexp with |.
-//
-// Following the lead of Bash 4.1, note that "**" only has special meaning
-// when it is the *only* thing in a path portion.  Otherwise, any series
-// of * is equivalent to a single *.  Globstar behavior is enabled by
-// default, and can be disabled by setting options.noglobstar.
-const makeRe = (pattern, options = {}) => new Minimatch(pattern, options).makeRe();
-exports.makeRe = makeRe;
-exports.minimatch.makeRe = exports.makeRe;
-const match = (list, pattern, options = {}) => {
-    const mm = new Minimatch(pattern, options);
-    list = list.filter(f => mm.match(f));
-    if (mm.options.nonull && !list.length) {
-        list.push(pattern);
-    }
-    return list;
-};
-exports.match = match;
-exports.minimatch.match = exports.match;
-// replace stuff like \* with *
-const globMagic = /[?*]|[+@!]\(.*?\)|\[|\]/;
-const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
-class Minimatch {
-    options;
-    set;
-    pattern;
-    windowsPathsNoEscape;
-    nonegate;
-    negate;
-    comment;
-    empty;
-    preserveMultipleSlashes;
-    partial;
-    globSet;
-    globParts;
-    nocase;
-    isWindows;
-    platform;
-    windowsNoMagicRoot;
-    regexp;
-    constructor(pattern, options = {}) {
-        (0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
-        options = options || {};
-        this.options = options;
-        this.pattern = pattern;
-        this.platform = options.platform || defaultPlatform;
-        this.isWindows = this.platform === 'win32';
-        this.windowsPathsNoEscape =
-            !!options.windowsPathsNoEscape || options.allowWindowsEscape === false;
-        if (this.windowsPathsNoEscape) {
-            this.pattern = this.pattern.replace(/\\/g, '/');
-        }
-        this.preserveMultipleSlashes = !!options.preserveMultipleSlashes;
-        this.regexp = null;
-        this.negate = false;
-        this.nonegate = !!options.nonegate;
-        this.comment = false;
-        this.empty = false;
-        this.partial = !!options.partial;
-        this.nocase = !!this.options.nocase;
-        this.windowsNoMagicRoot =
-            options.windowsNoMagicRoot !== undefined
-                ? options.windowsNoMagicRoot
-                : !!(this.isWindows && this.nocase);
-        this.globSet = [];
-        this.globParts = [];
-        this.set = [];
-        // make the set of regexps etc.
-        this.make();
-    }
-    hasMagic() {
-        if (this.options.magicalBraces && this.set.length > 1) {
-            return true;
-        }
-        for (const pattern of this.set) {
-            for (const part of pattern) {
-                if (typeof part !== 'string')
-                    return true;
-            }
-        }
-        return false;
-    }
-    debug(..._) { }
-    make() {
-        const pattern = this.pattern;
-        const options = this.options;
-        // empty patterns and comments match nothing.
-        if (!options.nocomment && pattern.charAt(0) === '#') {
-            this.comment = true;
-            return;
-        }
-        if (!pattern) {
-            this.empty = true;
-            return;
-        }
-        // step 1: figure out negation, etc.
-        this.parseNegate();
-        // step 2: expand braces
-        this.globSet = [...new Set(this.braceExpand())];
-        if (options.debug) {
-            this.debug = (...args) => console.error(...args);
-        }
-        this.debug(this.pattern, this.globSet);
-        // step 3: now we have a set, so turn each one into a series of
-        // path-portion matching patterns.
-        // These will be regexps, except in the case of "**", which is
-        // set to the GLOBSTAR object for globstar behavior,
-        // and will not contain any / characters
-        //
-        // First, we preprocess to make the glob pattern sets a bit simpler
-        // and deduped.  There are some perf-killing patterns that can cause
-        // problems with a glob walk, but we can simplify them down a bit.
-        const rawGlobParts = this.globSet.map(s => this.slashSplit(s));
-        this.globParts = this.preprocess(rawGlobParts);
-        this.debug(this.pattern, this.globParts);
-        // glob --> regexps
-        let set = this.globParts.map((s, _, __) => {
-            if (this.isWindows && this.windowsNoMagicRoot) {
-                // check if it's a drive or unc path.
-                const isUNC = s[0] === '' &&
-                    s[1] === '' &&
-                    (s[2] === '?' || !globMagic.test(s[2])) &&
-                    !globMagic.test(s[3]);
-                const isDrive = /^[a-z]:/i.test(s[0]);
-                if (isUNC) {
-                    return [...s.slice(0, 4), ...s.slice(4).map(ss => this.parse(ss))];
-                }
-                else if (isDrive) {
-                    return [s[0], ...s.slice(1).map(ss => this.parse(ss))];
-                }
-            }
-            return s.map(ss => this.parse(ss));
-        });
-        this.debug(this.pattern, set);
-        // filter out everything that didn't compile properly.
-        this.set = set.filter(s => s.indexOf(false) === -1);
-        // do not treat the ? in UNC paths as magic
-        if (this.isWindows) {
-            for (let i = 0; i < this.set.length; i++) {
-                const p = this.set[i];
-                if (p[0] === '' &&
-                    p[1] === '' &&
-                    this.globParts[i][2] === '?' &&
-                    typeof p[3] === 'string' &&
-                    /^[a-z]:$/i.test(p[3])) {
-                    p[2] = '?';
-                }
-            }
-        }
-        this.debug(this.pattern, this.set);
-    }
-    // various transforms to equivalent pattern sets that are
-    // faster to process in a filesystem walk.  The goal is to
-    // eliminate what we can, and push all ** patterns as far
-    // to the right as possible, even if it increases the number
-    // of patterns that we have to process.
-    preprocess(globParts) {
-        // if we're not in globstar mode, then turn all ** into *
-        if (this.options.noglobstar) {
-            for (let i = 0; i < globParts.length; i++) {
-                for (let j = 0; j < globParts[i].length; j++) {
-                    if (globParts[i][j] === '**') {
-                        globParts[i][j] = '*';
-                    }
-                }
-            }
-        }
-        const { optimizationLevel = 1 } = this.options;
-        if (optimizationLevel >= 2) {
-            // aggressive optimization for the purpose of fs walking
-            globParts = this.firstPhasePreProcess(globParts);
-            globParts = this.secondPhasePreProcess(globParts);
-        }
-        else if (optimizationLevel >= 1) {
-            // just basic optimizations to remove some .. parts
-            globParts = this.levelOneOptimize(globParts);
-        }
-        else {
-            // just collapse multiple ** portions into one
-            globParts = this.adjascentGlobstarOptimize(globParts);
-        }
-        return globParts;
-    }
-    // just get rid of adjascent ** portions
-    adjascentGlobstarOptimize(globParts) {
-        return globParts.map(parts => {
-            let gs = -1;
-            while (-1 !== (gs = parts.indexOf('**', gs + 1))) {
-                let i = gs;
-                while (parts[i + 1] === '**') {
-                    i++;
-                }
-                if (i !== gs) {
-                    parts.splice(gs, i - gs);
-                }
-            }
-            return parts;
-        });
-    }
-    // get rid of adjascent ** and resolve .. portions
-    levelOneOptimize(globParts) {
-        return globParts.map(parts => {
-            parts = parts.reduce((set, part) => {
-                const prev = set[set.length - 1];
-                if (part === '**' && prev === '**') {
-                    return set;
-                }
-                if (part === '..') {
-                    if (prev && prev !== '..' && prev !== '.' && prev !== '**') {
-                        set.pop();
-                        return set;
-                    }
-                }
-                set.push(part);
-                return set;
-            }, []);
-            return parts.length === 0 ? [''] : parts;
-        });
-    }
-    levelTwoFileOptimize(parts) {
-        if (!Array.isArray(parts)) {
-            parts = this.slashSplit(parts);
-        }
-        let didSomething = false;
-        do {
-            didSomething = false;
-            // 
// -> 
/
-            if (!this.preserveMultipleSlashes) {
-                for (let i = 1; i < parts.length - 1; i++) {
-                    const p = parts[i];
-                    // don't squeeze out UNC patterns
-                    if (i === 1 && p === '' && parts[0] === '')
-                        continue;
-                    if (p === '.' || p === '') {
-                        didSomething = true;
-                        parts.splice(i, 1);
-                        i--;
-                    }
-                }
-                if (parts[0] === '.' &&
-                    parts.length === 2 &&
-                    (parts[1] === '.' || parts[1] === '')) {
-                    didSomething = true;
-                    parts.pop();
-                }
-            }
-            // 
/

/../ ->

/
-            let dd = 0;
-            while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
-                const p = parts[dd - 1];
-                if (p && p !== '.' && p !== '..' && p !== '**') {
-                    didSomething = true;
-                    parts.splice(dd - 1, 2);
-                    dd -= 2;
-                }
-            }
-        } while (didSomething);
-        return parts.length === 0 ? [''] : parts;
-    }
-    // First phase: single-pattern processing
-    // 
 is 1 or more portions
-    //  is 1 or more portions
-    // 

is any portion other than ., .., '', or ** - // is . or '' - // - // **/.. is *brutal* for filesystem walking performance, because - // it effectively resets the recursive walk each time it occurs, - // and ** cannot be reduced out by a .. pattern part like a regexp - // or most strings (other than .., ., and '') can be. - // - //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/} - //

// -> 
/
-    // 
/

/../ ->

/
-    // **/**/ -> **/
-    //
-    // **/*/ -> */**/ <== not valid because ** doesn't follow
-    // this WOULD be allowed if ** did follow symlinks, or * didn't
-    firstPhasePreProcess(globParts) {
-        let didSomething = false;
-        do {
-            didSomething = false;
-            // 
/**/../

/

/ -> {

/../

/

/,

/**/

/

/} - for (let parts of globParts) { - let gs = -1; - while (-1 !== (gs = parts.indexOf('**', gs + 1))) { - let gss = gs; - while (parts[gss + 1] === '**') { - //

/**/**/ -> 
/**/
-                        gss++;
-                    }
-                    // eg, if gs is 2 and gss is 4, that means we have 3 **
-                    // parts, and can remove 2 of them.
-                    if (gss > gs) {
-                        parts.splice(gs + 1, gss - gs);
-                    }
-                    let next = parts[gs + 1];
-                    const p = parts[gs + 2];
-                    const p2 = parts[gs + 3];
-                    if (next !== '..')
-                        continue;
-                    if (!p ||
-                        p === '.' ||
-                        p === '..' ||
-                        !p2 ||
-                        p2 === '.' ||
-                        p2 === '..') {
-                        continue;
-                    }
-                    didSomething = true;
-                    // edit parts in place, and push the new one
-                    parts.splice(gs, 1);
-                    const other = parts.slice(0);
-                    other[gs] = '**';
-                    globParts.push(other);
-                    gs--;
-                }
-                // 
// -> 
/
-                if (!this.preserveMultipleSlashes) {
-                    for (let i = 1; i < parts.length - 1; i++) {
-                        const p = parts[i];
-                        // don't squeeze out UNC patterns
-                        if (i === 1 && p === '' && parts[0] === '')
-                            continue;
-                        if (p === '.' || p === '') {
-                            didSomething = true;
-                            parts.splice(i, 1);
-                            i--;
-                        }
-                    }
-                    if (parts[0] === '.' &&
-                        parts.length === 2 &&
-                        (parts[1] === '.' || parts[1] === '')) {
-                        didSomething = true;
-                        parts.pop();
-                    }
-                }
-                // 
/

/../ ->

/
-                let dd = 0;
-                while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
-                    const p = parts[dd - 1];
-                    if (p && p !== '.' && p !== '..' && p !== '**') {
-                        didSomething = true;
-                        const needDot = dd === 1 && parts[dd + 1] === '**';
-                        const splin = needDot ? ['.'] : [];
-                        parts.splice(dd - 1, 2, ...splin);
-                        if (parts.length === 0)
-                            parts.push('');
-                        dd -= 2;
-                    }
-                }
-            }
-        } while (didSomething);
-        return globParts;
-    }
-    // second phase: multi-pattern dedupes
-    // {
/*/,
/

/} ->

/*/
-    // {
/,
/} -> 
/
-    // {
/**/,
/} -> 
/**/
-    //
-    // {
/**/,
/**/

/} ->

/**/
-    // ^-- not valid because ** doens't follow symlinks
-    secondPhasePreProcess(globParts) {
-        for (let i = 0; i < globParts.length - 1; i++) {
-            for (let j = i + 1; j < globParts.length; j++) {
-                const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);
-                if (matched) {
-                    globParts[i] = [];
-                    globParts[j] = matched;
-                    break;
-                }
-            }
-        }
-        return globParts.filter(gs => gs.length);
-    }
-    partsMatch(a, b, emptyGSMatch = false) {
-        let ai = 0;
-        let bi = 0;
-        let result = [];
-        let which = '';
-        while (ai < a.length && bi < b.length) {
-            if (a[ai] === b[bi]) {
-                result.push(which === 'b' ? b[bi] : a[ai]);
-                ai++;
-                bi++;
-            }
-            else if (emptyGSMatch && a[ai] === '**' && b[bi] === a[ai + 1]) {
-                result.push(a[ai]);
-                ai++;
-            }
-            else if (emptyGSMatch && b[bi] === '**' && a[ai] === b[bi + 1]) {
-                result.push(b[bi]);
-                bi++;
-            }
-            else if (a[ai] === '*' &&
-                b[bi] &&
-                (this.options.dot || !b[bi].startsWith('.')) &&
-                b[bi] !== '**') {
-                if (which === 'b')
-                    return false;
-                which = 'a';
-                result.push(a[ai]);
-                ai++;
-                bi++;
-            }
-            else if (b[bi] === '*' &&
-                a[ai] &&
-                (this.options.dot || !a[ai].startsWith('.')) &&
-                a[ai] !== '**') {
-                if (which === 'a')
-                    return false;
-                which = 'b';
-                result.push(b[bi]);
-                ai++;
-                bi++;
-            }
-            else {
-                return false;
-            }
-        }
-        // if we fall out of the loop, it means they two are identical
-        // as long as their lengths match
-        return a.length === b.length && result;
-    }
-    parseNegate() {
-        if (this.nonegate)
-            return;
-        const pattern = this.pattern;
-        let negate = false;
-        let negateOffset = 0;
-        for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {
-            negate = !negate;
-            negateOffset++;
-        }
-        if (negateOffset)
-            this.pattern = pattern.slice(negateOffset);
-        this.negate = negate;
-    }
-    // set partial to true to test if, for example,
-    // "/a/b" matches the start of "/*/b/*/d"
-    // Partial means, if you run out of file before you run
-    // out of pattern, then that's fine, as long as all
-    // the parts match.
-    matchOne(file, pattern, partial = false) {
-        const options = this.options;
-        // UNC paths like //?/X:/... can match X:/... and vice versa
-        // Drive letters in absolute drive or unc paths are always compared
-        // case-insensitively.
-        if (this.isWindows) {
-            const fileDrive = typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0]);
-            const fileUNC = !fileDrive &&
-                file[0] === '' &&
-                file[1] === '' &&
-                file[2] === '?' &&
-                /^[a-z]:$/i.test(file[3]);
-            const patternDrive = typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0]);
-            const patternUNC = !patternDrive &&
-                pattern[0] === '' &&
-                pattern[1] === '' &&
-                pattern[2] === '?' &&
-                typeof pattern[3] === 'string' &&
-                /^[a-z]:$/i.test(pattern[3]);
-            const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined;
-            const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined;
-            if (typeof fdi === 'number' && typeof pdi === 'number') {
-                const [fd, pd] = [file[fdi], pattern[pdi]];
-                if (fd.toLowerCase() === pd.toLowerCase()) {
-                    pattern[pdi] = fd;
-                    if (pdi > fdi) {
-                        pattern = pattern.slice(pdi);
-                    }
-                    else if (fdi > pdi) {
-                        file = file.slice(fdi);
-                    }
-                }
-            }
-        }
-        // resolve and reduce . and .. portions in the file as well.
-        // dont' need to do the second phase, because it's only one string[]
-        const { optimizationLevel = 1 } = this.options;
-        if (optimizationLevel >= 2) {
-            file = this.levelTwoFileOptimize(file);
-        }
-        this.debug('matchOne', this, { file, pattern });
-        this.debug('matchOne', file.length, pattern.length);
-        for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
-            this.debug('matchOne loop');
-            var p = pattern[pi];
-            var f = file[fi];
-            this.debug(pattern, p, f);
-            // should be impossible.
-            // some invalid regexp stuff in the set.
-            /* c8 ignore start */
-            if (p === false) {
-                return false;
-            }
-            /* c8 ignore stop */
-            if (p === exports.GLOBSTAR) {
-                this.debug('GLOBSTAR', [pattern, p, f]);
-                // "**"
-                // a/**/b/**/c would match the following:
-                // a/b/x/y/z/c
-                // a/x/y/z/b/c
-                // a/b/x/b/x/c
-                // a/b/c
-                // To do this, take the rest of the pattern after
-                // the **, and see if it would match the file remainder.
-                // If so, return success.
-                // If not, the ** "swallows" a segment, and try again.
-                // This is recursively awful.
-                //
-                // a/**/b/**/c matching a/b/x/y/z/c
-                // - a matches a
-                // - doublestar
-                //   - matchOne(b/x/y/z/c, b/**/c)
-                //     - b matches b
-                //     - doublestar
-                //       - matchOne(x/y/z/c, c) -> no
-                //       - matchOne(y/z/c, c) -> no
-                //       - matchOne(z/c, c) -> no
-                //       - matchOne(c, c) yes, hit
-                var fr = fi;
-                var pr = pi + 1;
-                if (pr === pl) {
-                    this.debug('** at the end');
-                    // a ** at the end will just swallow the rest.
-                    // We have found a match.
-                    // however, it will not swallow /.x, unless
-                    // options.dot is set.
-                    // . and .. are *never* matched by **, for explosively
-                    // exponential reasons.
-                    for (; fi < fl; fi++) {
-                        if (file[fi] === '.' ||
-                            file[fi] === '..' ||
-                            (!options.dot && file[fi].charAt(0) === '.'))
-                            return false;
-                    }
-                    return true;
-                }
-                // ok, let's see if we can swallow whatever we can.
-                while (fr < fl) {
-                    var swallowee = file[fr];
-                    this.debug('\nglobstar while', file, fr, pattern, pr, swallowee);
-                    // XXX remove this slice.  Just pass the start index.
-                    if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
-                        this.debug('globstar found match!', fr, fl, swallowee);
-                        // found a match.
-                        return true;
-                    }
-                    else {
-                        // can't swallow "." or ".." ever.
-                        // can only swallow ".foo" when explicitly asked.
-                        if (swallowee === '.' ||
-                            swallowee === '..' ||
-                            (!options.dot && swallowee.charAt(0) === '.')) {
-                            this.debug('dot detected!', file, fr, pattern, pr);
-                            break;
-                        }
-                        // ** swallows a segment, and continue.
-                        this.debug('globstar swallow a segment, and continue');
-                        fr++;
-                    }
-                }
-                // no match was found.
-                // However, in partial mode, we can't say this is necessarily over.
-                /* c8 ignore start */
-                if (partial) {
-                    // ran out of file
-                    this.debug('\n>>> no match, partial?', file, fr, pattern, pr);
-                    if (fr === fl) {
-                        return true;
-                    }
-                }
-                /* c8 ignore stop */
-                return false;
-            }
-            // something other than **
-            // non-magic patterns just have to match exactly
-            // patterns with magic have been turned into regexps.
-            let hit;
-            if (typeof p === 'string') {
-                hit = f === p;
-                this.debug('string match', p, f, hit);
-            }
-            else {
-                hit = p.test(f);
-                this.debug('pattern match', p, f, hit);
-            }
-            if (!hit)
-                return false;
-        }
-        // Note: ending in / means that we'll get a final ""
-        // at the end of the pattern.  This can only match a
-        // corresponding "" at the end of the file.
-        // If the file ends in /, then it can only match a
-        // a pattern that ends in /, unless the pattern just
-        // doesn't have any more for it. But, a/b/ should *not*
-        // match "a/b/*", even though "" matches against the
-        // [^/]*? pattern, except in partial mode, where it might
-        // simply not be reached yet.
-        // However, a/b/ should still satisfy a/*
-        // now either we fell off the end of the pattern, or we're done.
-        if (fi === fl && pi === pl) {
-            // ran out of pattern and filename at the same time.
-            // an exact hit!
-            return true;
-        }
-        else if (fi === fl) {
-            // ran out of file, but still had pattern left.
-            // this is ok if we're doing the match as part of
-            // a glob fs traversal.
-            return partial;
-        }
-        else if (pi === pl) {
-            // ran out of pattern, still have file left.
-            // this is only acceptable if we're on the very last
-            // empty segment of a file with a trailing slash.
-            // a/* should match a/b/
-            return fi === fl - 1 && file[fi] === '';
-            /* c8 ignore start */
-        }
-        else {
-            // should be unreachable.
-            throw new Error('wtf?');
-        }
-        /* c8 ignore stop */
-    }
-    braceExpand() {
-        return (0, exports.braceExpand)(this.pattern, this.options);
-    }
-    parse(pattern) {
-        (0, assert_valid_pattern_js_1.assertValidPattern)(pattern);
-        const options = this.options;
-        // shortcuts
-        if (pattern === '**')
-            return exports.GLOBSTAR;
-        if (pattern === '')
-            return '';
-        // far and away, the most common glob pattern parts are
-        // *, *.*, and *.  Add a fast check method for those.
-        let m;
-        let fastTest = null;
-        if ((m = pattern.match(starRE))) {
-            fastTest = options.dot ? starTestDot : starTest;
-        }
-        else if ((m = pattern.match(starDotExtRE))) {
-            fastTest = (options.nocase
-                ? options.dot
-                    ? starDotExtTestNocaseDot
-                    : starDotExtTestNocase
-                : options.dot
-                    ? starDotExtTestDot
-                    : starDotExtTest)(m[1]);
-        }
-        else if ((m = pattern.match(qmarksRE))) {
-            fastTest = (options.nocase
-                ? options.dot
-                    ? qmarksTestNocaseDot
-                    : qmarksTestNocase
-                : options.dot
-                    ? qmarksTestDot
-                    : qmarksTest)(m);
-        }
-        else if ((m = pattern.match(starDotStarRE))) {
-            fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
-        }
-        else if ((m = pattern.match(dotStarRE))) {
-            fastTest = dotStarTest;
-        }
-        const re = ast_js_1.AST.fromGlob(pattern, this.options).toMMPattern();
-        if (fastTest && typeof re === 'object') {
-            // Avoids overriding in frozen environments
-            Reflect.defineProperty(re, 'test', { value: fastTest });
-        }
-        return re;
-    }
-    makeRe() {
-        if (this.regexp || this.regexp === false)
-            return this.regexp;
-        // at this point, this.set is a 2d array of partial
-        // pattern strings, or "**".
-        //
-        // It's better to use .match().  This function shouldn't
-        // be used, really, but it's pretty convenient sometimes,
-        // when you just want to work with a regex.
-        const set = this.set;
-        if (!set.length) {
-            this.regexp = false;
-            return this.regexp;
-        }
-        const options = this.options;
-        const twoStar = options.noglobstar
-            ? star
-            : options.dot
-                ? twoStarDot
-                : twoStarNoDot;
-        const flags = new Set(options.nocase ? ['i'] : []);
-        // regexpify non-globstar patterns
-        // if ** is only item, then we just do one twoStar
-        // if ** is first, and there are more, prepend (\/|twoStar\/)? to next
-        // if ** is last, append (\/twoStar|) to previous
-        // if ** is in the middle, append (\/|\/twoStar\/) to previous
-        // then filter out GLOBSTAR symbols
-        let re = set
-            .map(pattern => {
-            const pp = pattern.map(p => {
-                if (p instanceof RegExp) {
-                    for (const f of p.flags.split(''))
-                        flags.add(f);
-                }
-                return typeof p === 'string'
-                    ? regExpEscape(p)
-                    : p === exports.GLOBSTAR
-                        ? exports.GLOBSTAR
-                        : p._src;
-            });
-            pp.forEach((p, i) => {
-                const next = pp[i + 1];
-                const prev = pp[i - 1];
-                if (p !== exports.GLOBSTAR || prev === exports.GLOBSTAR) {
-                    return;
-                }
-                if (prev === undefined) {
-                    if (next !== undefined && next !== exports.GLOBSTAR) {
-                        pp[i + 1] = '(?:\\/|' + twoStar + '\\/)?' + next;
-                    }
-                    else {
-                        pp[i] = twoStar;
-                    }
-                }
-                else if (next === undefined) {
-                    pp[i - 1] = prev + '(?:\\/|' + twoStar + ')?';
-                }
-                else if (next !== exports.GLOBSTAR) {
-                    pp[i - 1] = prev + '(?:\\/|\\/' + twoStar + '\\/)' + next;
-                    pp[i + 1] = exports.GLOBSTAR;
-                }
-            });
-            return pp.filter(p => p !== exports.GLOBSTAR).join('/');
-        })
-            .join('|');
-        // need to wrap in parens if we had more than one thing with |,
-        // otherwise only the first will be anchored to ^ and the last to $
-        const [open, close] = set.length > 1 ? ['(?:', ')'] : ['', ''];
-        // must match entire pattern
-        // ending in a * or ** will make it less strict.
-        re = '^' + open + re + close + '$';
-        // can match anything, as long as it's not this.
-        if (this.negate)
-            re = '^(?!' + re + ').+$';
-        try {
-            this.regexp = new RegExp(re, [...flags].join(''));
-            /* c8 ignore start */
-        }
-        catch (ex) {
-            // should be impossible
-            this.regexp = false;
-        }
-        /* c8 ignore stop */
-        return this.regexp;
-    }
-    slashSplit(p) {
-        // if p starts with // on windows, we preserve that
-        // so that UNC paths aren't broken.  Otherwise, any number of
-        // / characters are coalesced into one, unless
-        // preserveMultipleSlashes is set to true.
-        if (this.preserveMultipleSlashes) {
-            return p.split('/');
-        }
-        else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
-            // add an extra '' for the one we lose
-            return ['', ...p.split(/\/+/)];
-        }
-        else {
-            return p.split(/\/+/);
-        }
-    }
-    match(f, partial = this.partial) {
-        this.debug('match', f, this.pattern);
-        // short-circuit in the case of busted things.
-        // comments, etc.
-        if (this.comment) {
-            return false;
-        }
-        if (this.empty) {
-            return f === '';
-        }
-        if (f === '/' && partial) {
-            return true;
-        }
-        const options = this.options;
-        // windows: need to use /, not \
-        if (this.isWindows) {
-            f = f.split('\\').join('/');
-        }
-        // treat the test path as a set of pathparts.
-        const ff = this.slashSplit(f);
-        this.debug(this.pattern, 'split', ff);
-        // just ONE of the pattern sets in this.set needs to match
-        // in order for it to be valid.  If negating, then just one
-        // match means that we have failed.
-        // Either way, return on the first hit.
-        const set = this.set;
-        this.debug(this.pattern, 'set', set);
-        // Find the basename of the path by looking for the last non-empty segment
-        let filename = ff[ff.length - 1];
-        if (!filename) {
-            for (let i = ff.length - 2; !filename && i >= 0; i--) {
-                filename = ff[i];
-            }
-        }
-        for (let i = 0; i < set.length; i++) {
-            const pattern = set[i];
-            let file = ff;
-            if (options.matchBase && pattern.length === 1) {
-                file = [filename];
-            }
-            const hit = this.matchOne(file, pattern, partial);
-            if (hit) {
-                if (options.flipNegate) {
-                    return true;
-                }
-                return !this.negate;
-            }
-        }
-        // didn't get any hits.  this is success if it's a negative
-        // pattern, failure otherwise.
-        if (options.flipNegate) {
-            return false;
-        }
-        return this.negate;
-    }
-    static defaults(def) {
-        return exports.minimatch.defaults(def).Minimatch;
-    }
-}
-exports.Minimatch = Minimatch;
-/* c8 ignore start */
-var ast_js_2 = require("./ast.js");
-Object.defineProperty(exports, "AST", { enumerable: true, get: function () { return ast_js_2.AST; } });
-var escape_js_2 = require("./escape.js");
-Object.defineProperty(exports, "escape", { enumerable: true, get: function () { return escape_js_2.escape; } });
-var unescape_js_2 = require("./unescape.js");
-Object.defineProperty(exports, "unescape", { enumerable: true, get: function () { return unescape_js_2.unescape; } });
-/* c8 ignore stop */
-exports.minimatch.AST = ast_js_1.AST;
-exports.minimatch.Minimatch = Minimatch;
-exports.minimatch.escape = escape_js_1.escape;
-exports.minimatch.unescape = unescape_js_1.unescape;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/glob/node_modules/minimatch/dist/esm/index.js b/node_modules/glob/node_modules/minimatch/dist/esm/index.js
deleted file mode 100644
index 790d6c02a2f22..0000000000000
--- a/node_modules/glob/node_modules/minimatch/dist/esm/index.js
+++ /dev/null
@@ -1,1001 +0,0 @@
-import { expand } from '@isaacs/brace-expansion';
-import { assertValidPattern } from './assert-valid-pattern.js';
-import { AST } from './ast.js';
-import { escape } from './escape.js';
-import { unescape } from './unescape.js';
-export const minimatch = (p, pattern, options = {}) => {
-    assertValidPattern(pattern);
-    // shortcut: comments match nothing.
-    if (!options.nocomment && pattern.charAt(0) === '#') {
-        return false;
-    }
-    return new Minimatch(pattern, options).match(p);
-};
-// Optimized checking for the most common glob patterns.
-const starDotExtRE = /^\*+([^+@!?\*\[\(]*)$/;
-const starDotExtTest = (ext) => (f) => !f.startsWith('.') && f.endsWith(ext);
-const starDotExtTestDot = (ext) => (f) => f.endsWith(ext);
-const starDotExtTestNocase = (ext) => {
-    ext = ext.toLowerCase();
-    return (f) => !f.startsWith('.') && f.toLowerCase().endsWith(ext);
-};
-const starDotExtTestNocaseDot = (ext) => {
-    ext = ext.toLowerCase();
-    return (f) => f.toLowerCase().endsWith(ext);
-};
-const starDotStarRE = /^\*+\.\*+$/;
-const starDotStarTest = (f) => !f.startsWith('.') && f.includes('.');
-const starDotStarTestDot = (f) => f !== '.' && f !== '..' && f.includes('.');
-const dotStarRE = /^\.\*+$/;
-const dotStarTest = (f) => f !== '.' && f !== '..' && f.startsWith('.');
-const starRE = /^\*+$/;
-const starTest = (f) => f.length !== 0 && !f.startsWith('.');
-const starTestDot = (f) => f.length !== 0 && f !== '.' && f !== '..';
-const qmarksRE = /^\?+([^+@!?\*\[\(]*)?$/;
-const qmarksTestNocase = ([$0, ext = '']) => {
-    const noext = qmarksTestNoExt([$0]);
-    if (!ext)
-        return noext;
-    ext = ext.toLowerCase();
-    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
-};
-const qmarksTestNocaseDot = ([$0, ext = '']) => {
-    const noext = qmarksTestNoExtDot([$0]);
-    if (!ext)
-        return noext;
-    ext = ext.toLowerCase();
-    return (f) => noext(f) && f.toLowerCase().endsWith(ext);
-};
-const qmarksTestDot = ([$0, ext = '']) => {
-    const noext = qmarksTestNoExtDot([$0]);
-    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
-};
-const qmarksTest = ([$0, ext = '']) => {
-    const noext = qmarksTestNoExt([$0]);
-    return !ext ? noext : (f) => noext(f) && f.endsWith(ext);
-};
-const qmarksTestNoExt = ([$0]) => {
-    const len = $0.length;
-    return (f) => f.length === len && !f.startsWith('.');
-};
-const qmarksTestNoExtDot = ([$0]) => {
-    const len = $0.length;
-    return (f) => f.length === len && f !== '.' && f !== '..';
-};
-/* c8 ignore start */
-const defaultPlatform = (typeof process === 'object' && process
-    ? (typeof process.env === 'object' &&
-        process.env &&
-        process.env.__MINIMATCH_TESTING_PLATFORM__) ||
-        process.platform
-    : 'posix');
-const path = {
-    win32: { sep: '\\' },
-    posix: { sep: '/' },
-};
-/* c8 ignore stop */
-export const sep = defaultPlatform === 'win32' ? path.win32.sep : path.posix.sep;
-minimatch.sep = sep;
-export const GLOBSTAR = Symbol('globstar **');
-minimatch.GLOBSTAR = GLOBSTAR;
-// any single thing other than /
-// don't need to escape / when using new RegExp()
-const qmark = '[^/]';
-// * => any number of characters
-const star = qmark + '*?';
-// ** when dots are allowed.  Anything goes, except .. and .
-// not (^ or / followed by one or two dots followed by $ or /),
-// followed by anything, any number of times.
-const twoStarDot = '(?:(?!(?:\\/|^)(?:\\.{1,2})($|\\/)).)*?';
-// not a ^ or / followed by a dot,
-// followed by anything, any number of times.
-const twoStarNoDot = '(?:(?!(?:\\/|^)\\.).)*?';
-export const filter = (pattern, options = {}) => (p) => minimatch(p, pattern, options);
-minimatch.filter = filter;
-const ext = (a, b = {}) => Object.assign({}, a, b);
-export const defaults = (def) => {
-    if (!def || typeof def !== 'object' || !Object.keys(def).length) {
-        return minimatch;
-    }
-    const orig = minimatch;
-    const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options));
-    return Object.assign(m, {
-        Minimatch: class Minimatch extends orig.Minimatch {
-            constructor(pattern, options = {}) {
-                super(pattern, ext(def, options));
-            }
-            static defaults(options) {
-                return orig.defaults(ext(def, options)).Minimatch;
-            }
-        },
-        AST: class AST extends orig.AST {
-            /* c8 ignore start */
-            constructor(type, parent, options = {}) {
-                super(type, parent, ext(def, options));
-            }
-            /* c8 ignore stop */
-            static fromGlob(pattern, options = {}) {
-                return orig.AST.fromGlob(pattern, ext(def, options));
-            }
-        },
-        unescape: (s, options = {}) => orig.unescape(s, ext(def, options)),
-        escape: (s, options = {}) => orig.escape(s, ext(def, options)),
-        filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)),
-        defaults: (options) => orig.defaults(ext(def, options)),
-        makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)),
-        braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)),
-        match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)),
-        sep: orig.sep,
-        GLOBSTAR: GLOBSTAR,
-    });
-};
-minimatch.defaults = defaults;
-// Brace expansion:
-// a{b,c}d -> abd acd
-// a{b,}c -> abc ac
-// a{0..3}d -> a0d a1d a2d a3d
-// a{b,c{d,e}f}g -> abg acdfg acefg
-// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
-//
-// Invalid sets are not expanded.
-// a{2..}b -> a{2..}b
-// a{b}c -> a{b}c
-export const braceExpand = (pattern, options = {}) => {
-    assertValidPattern(pattern);
-    // Thanks to Yeting Li  for
-    // improving this regexp to avoid a ReDOS vulnerability.
-    if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
-        // shortcut. no need to expand.
-        return [pattern];
-    }
-    return expand(pattern);
-};
-minimatch.braceExpand = braceExpand;
-// parse a component of the expanded set.
-// At this point, no pattern may contain "/" in it
-// so we're going to return a 2d array, where each entry is the full
-// pattern, split on '/', and then turned into a regular expression.
-// A regexp is made at the end which joins each array with an
-// escaped /, and another full one which joins each regexp with |.
-//
-// Following the lead of Bash 4.1, note that "**" only has special meaning
-// when it is the *only* thing in a path portion.  Otherwise, any series
-// of * is equivalent to a single *.  Globstar behavior is enabled by
-// default, and can be disabled by setting options.noglobstar.
-export const makeRe = (pattern, options = {}) => new Minimatch(pattern, options).makeRe();
-minimatch.makeRe = makeRe;
-export const match = (list, pattern, options = {}) => {
-    const mm = new Minimatch(pattern, options);
-    list = list.filter(f => mm.match(f));
-    if (mm.options.nonull && !list.length) {
-        list.push(pattern);
-    }
-    return list;
-};
-minimatch.match = match;
-// replace stuff like \* with *
-const globMagic = /[?*]|[+@!]\(.*?\)|\[|\]/;
-const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
-export class Minimatch {
-    options;
-    set;
-    pattern;
-    windowsPathsNoEscape;
-    nonegate;
-    negate;
-    comment;
-    empty;
-    preserveMultipleSlashes;
-    partial;
-    globSet;
-    globParts;
-    nocase;
-    isWindows;
-    platform;
-    windowsNoMagicRoot;
-    regexp;
-    constructor(pattern, options = {}) {
-        assertValidPattern(pattern);
-        options = options || {};
-        this.options = options;
-        this.pattern = pattern;
-        this.platform = options.platform || defaultPlatform;
-        this.isWindows = this.platform === 'win32';
-        this.windowsPathsNoEscape =
-            !!options.windowsPathsNoEscape || options.allowWindowsEscape === false;
-        if (this.windowsPathsNoEscape) {
-            this.pattern = this.pattern.replace(/\\/g, '/');
-        }
-        this.preserveMultipleSlashes = !!options.preserveMultipleSlashes;
-        this.regexp = null;
-        this.negate = false;
-        this.nonegate = !!options.nonegate;
-        this.comment = false;
-        this.empty = false;
-        this.partial = !!options.partial;
-        this.nocase = !!this.options.nocase;
-        this.windowsNoMagicRoot =
-            options.windowsNoMagicRoot !== undefined
-                ? options.windowsNoMagicRoot
-                : !!(this.isWindows && this.nocase);
-        this.globSet = [];
-        this.globParts = [];
-        this.set = [];
-        // make the set of regexps etc.
-        this.make();
-    }
-    hasMagic() {
-        if (this.options.magicalBraces && this.set.length > 1) {
-            return true;
-        }
-        for (const pattern of this.set) {
-            for (const part of pattern) {
-                if (typeof part !== 'string')
-                    return true;
-            }
-        }
-        return false;
-    }
-    debug(..._) { }
-    make() {
-        const pattern = this.pattern;
-        const options = this.options;
-        // empty patterns and comments match nothing.
-        if (!options.nocomment && pattern.charAt(0) === '#') {
-            this.comment = true;
-            return;
-        }
-        if (!pattern) {
-            this.empty = true;
-            return;
-        }
-        // step 1: figure out negation, etc.
-        this.parseNegate();
-        // step 2: expand braces
-        this.globSet = [...new Set(this.braceExpand())];
-        if (options.debug) {
-            this.debug = (...args) => console.error(...args);
-        }
-        this.debug(this.pattern, this.globSet);
-        // step 3: now we have a set, so turn each one into a series of
-        // path-portion matching patterns.
-        // These will be regexps, except in the case of "**", which is
-        // set to the GLOBSTAR object for globstar behavior,
-        // and will not contain any / characters
-        //
-        // First, we preprocess to make the glob pattern sets a bit simpler
-        // and deduped.  There are some perf-killing patterns that can cause
-        // problems with a glob walk, but we can simplify them down a bit.
-        const rawGlobParts = this.globSet.map(s => this.slashSplit(s));
-        this.globParts = this.preprocess(rawGlobParts);
-        this.debug(this.pattern, this.globParts);
-        // glob --> regexps
-        let set = this.globParts.map((s, _, __) => {
-            if (this.isWindows && this.windowsNoMagicRoot) {
-                // check if it's a drive or unc path.
-                const isUNC = s[0] === '' &&
-                    s[1] === '' &&
-                    (s[2] === '?' || !globMagic.test(s[2])) &&
-                    !globMagic.test(s[3]);
-                const isDrive = /^[a-z]:/i.test(s[0]);
-                if (isUNC) {
-                    return [...s.slice(0, 4), ...s.slice(4).map(ss => this.parse(ss))];
-                }
-                else if (isDrive) {
-                    return [s[0], ...s.slice(1).map(ss => this.parse(ss))];
-                }
-            }
-            return s.map(ss => this.parse(ss));
-        });
-        this.debug(this.pattern, set);
-        // filter out everything that didn't compile properly.
-        this.set = set.filter(s => s.indexOf(false) === -1);
-        // do not treat the ? in UNC paths as magic
-        if (this.isWindows) {
-            for (let i = 0; i < this.set.length; i++) {
-                const p = this.set[i];
-                if (p[0] === '' &&
-                    p[1] === '' &&
-                    this.globParts[i][2] === '?' &&
-                    typeof p[3] === 'string' &&
-                    /^[a-z]:$/i.test(p[3])) {
-                    p[2] = '?';
-                }
-            }
-        }
-        this.debug(this.pattern, this.set);
-    }
-    // various transforms to equivalent pattern sets that are
-    // faster to process in a filesystem walk.  The goal is to
-    // eliminate what we can, and push all ** patterns as far
-    // to the right as possible, even if it increases the number
-    // of patterns that we have to process.
-    preprocess(globParts) {
-        // if we're not in globstar mode, then turn all ** into *
-        if (this.options.noglobstar) {
-            for (let i = 0; i < globParts.length; i++) {
-                for (let j = 0; j < globParts[i].length; j++) {
-                    if (globParts[i][j] === '**') {
-                        globParts[i][j] = '*';
-                    }
-                }
-            }
-        }
-        const { optimizationLevel = 1 } = this.options;
-        if (optimizationLevel >= 2) {
-            // aggressive optimization for the purpose of fs walking
-            globParts = this.firstPhasePreProcess(globParts);
-            globParts = this.secondPhasePreProcess(globParts);
-        }
-        else if (optimizationLevel >= 1) {
-            // just basic optimizations to remove some .. parts
-            globParts = this.levelOneOptimize(globParts);
-        }
-        else {
-            // just collapse multiple ** portions into one
-            globParts = this.adjascentGlobstarOptimize(globParts);
-        }
-        return globParts;
-    }
-    // just get rid of adjascent ** portions
-    adjascentGlobstarOptimize(globParts) {
-        return globParts.map(parts => {
-            let gs = -1;
-            while (-1 !== (gs = parts.indexOf('**', gs + 1))) {
-                let i = gs;
-                while (parts[i + 1] === '**') {
-                    i++;
-                }
-                if (i !== gs) {
-                    parts.splice(gs, i - gs);
-                }
-            }
-            return parts;
-        });
-    }
-    // get rid of adjascent ** and resolve .. portions
-    levelOneOptimize(globParts) {
-        return globParts.map(parts => {
-            parts = parts.reduce((set, part) => {
-                const prev = set[set.length - 1];
-                if (part === '**' && prev === '**') {
-                    return set;
-                }
-                if (part === '..') {
-                    if (prev && prev !== '..' && prev !== '.' && prev !== '**') {
-                        set.pop();
-                        return set;
-                    }
-                }
-                set.push(part);
-                return set;
-            }, []);
-            return parts.length === 0 ? [''] : parts;
-        });
-    }
-    levelTwoFileOptimize(parts) {
-        if (!Array.isArray(parts)) {
-            parts = this.slashSplit(parts);
-        }
-        let didSomething = false;
-        do {
-            didSomething = false;
-            // 
// -> 
/
-            if (!this.preserveMultipleSlashes) {
-                for (let i = 1; i < parts.length - 1; i++) {
-                    const p = parts[i];
-                    // don't squeeze out UNC patterns
-                    if (i === 1 && p === '' && parts[0] === '')
-                        continue;
-                    if (p === '.' || p === '') {
-                        didSomething = true;
-                        parts.splice(i, 1);
-                        i--;
-                    }
-                }
-                if (parts[0] === '.' &&
-                    parts.length === 2 &&
-                    (parts[1] === '.' || parts[1] === '')) {
-                    didSomething = true;
-                    parts.pop();
-                }
-            }
-            // 
/

/../ ->

/
-            let dd = 0;
-            while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
-                const p = parts[dd - 1];
-                if (p && p !== '.' && p !== '..' && p !== '**') {
-                    didSomething = true;
-                    parts.splice(dd - 1, 2);
-                    dd -= 2;
-                }
-            }
-        } while (didSomething);
-        return parts.length === 0 ? [''] : parts;
-    }
-    // First phase: single-pattern processing
-    // 
 is 1 or more portions
-    //  is 1 or more portions
-    // 

is any portion other than ., .., '', or ** - // is . or '' - // - // **/.. is *brutal* for filesystem walking performance, because - // it effectively resets the recursive walk each time it occurs, - // and ** cannot be reduced out by a .. pattern part like a regexp - // or most strings (other than .., ., and '') can be. - // - //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/} - //

// -> 
/
-    // 
/

/../ ->

/
-    // **/**/ -> **/
-    //
-    // **/*/ -> */**/ <== not valid because ** doesn't follow
-    // this WOULD be allowed if ** did follow symlinks, or * didn't
-    firstPhasePreProcess(globParts) {
-        let didSomething = false;
-        do {
-            didSomething = false;
-            // 
/**/../

/

/ -> {

/../

/

/,

/**/

/

/} - for (let parts of globParts) { - let gs = -1; - while (-1 !== (gs = parts.indexOf('**', gs + 1))) { - let gss = gs; - while (parts[gss + 1] === '**') { - //

/**/**/ -> 
/**/
-                        gss++;
-                    }
-                    // eg, if gs is 2 and gss is 4, that means we have 3 **
-                    // parts, and can remove 2 of them.
-                    if (gss > gs) {
-                        parts.splice(gs + 1, gss - gs);
-                    }
-                    let next = parts[gs + 1];
-                    const p = parts[gs + 2];
-                    const p2 = parts[gs + 3];
-                    if (next !== '..')
-                        continue;
-                    if (!p ||
-                        p === '.' ||
-                        p === '..' ||
-                        !p2 ||
-                        p2 === '.' ||
-                        p2 === '..') {
-                        continue;
-                    }
-                    didSomething = true;
-                    // edit parts in place, and push the new one
-                    parts.splice(gs, 1);
-                    const other = parts.slice(0);
-                    other[gs] = '**';
-                    globParts.push(other);
-                    gs--;
-                }
-                // 
// -> 
/
-                if (!this.preserveMultipleSlashes) {
-                    for (let i = 1; i < parts.length - 1; i++) {
-                        const p = parts[i];
-                        // don't squeeze out UNC patterns
-                        if (i === 1 && p === '' && parts[0] === '')
-                            continue;
-                        if (p === '.' || p === '') {
-                            didSomething = true;
-                            parts.splice(i, 1);
-                            i--;
-                        }
-                    }
-                    if (parts[0] === '.' &&
-                        parts.length === 2 &&
-                        (parts[1] === '.' || parts[1] === '')) {
-                        didSomething = true;
-                        parts.pop();
-                    }
-                }
-                // 
/

/../ ->

/
-                let dd = 0;
-                while (-1 !== (dd = parts.indexOf('..', dd + 1))) {
-                    const p = parts[dd - 1];
-                    if (p && p !== '.' && p !== '..' && p !== '**') {
-                        didSomething = true;
-                        const needDot = dd === 1 && parts[dd + 1] === '**';
-                        const splin = needDot ? ['.'] : [];
-                        parts.splice(dd - 1, 2, ...splin);
-                        if (parts.length === 0)
-                            parts.push('');
-                        dd -= 2;
-                    }
-                }
-            }
-        } while (didSomething);
-        return globParts;
-    }
-    // second phase: multi-pattern dedupes
-    // {
/*/,
/

/} ->

/*/
-    // {
/,
/} -> 
/
-    // {
/**/,
/} -> 
/**/
-    //
-    // {
/**/,
/**/

/} ->

/**/
-    // ^-- not valid because ** doens't follow symlinks
-    secondPhasePreProcess(globParts) {
-        for (let i = 0; i < globParts.length - 1; i++) {
-            for (let j = i + 1; j < globParts.length; j++) {
-                const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);
-                if (matched) {
-                    globParts[i] = [];
-                    globParts[j] = matched;
-                    break;
-                }
-            }
-        }
-        return globParts.filter(gs => gs.length);
-    }
-    partsMatch(a, b, emptyGSMatch = false) {
-        let ai = 0;
-        let bi = 0;
-        let result = [];
-        let which = '';
-        while (ai < a.length && bi < b.length) {
-            if (a[ai] === b[bi]) {
-                result.push(which === 'b' ? b[bi] : a[ai]);
-                ai++;
-                bi++;
-            }
-            else if (emptyGSMatch && a[ai] === '**' && b[bi] === a[ai + 1]) {
-                result.push(a[ai]);
-                ai++;
-            }
-            else if (emptyGSMatch && b[bi] === '**' && a[ai] === b[bi + 1]) {
-                result.push(b[bi]);
-                bi++;
-            }
-            else if (a[ai] === '*' &&
-                b[bi] &&
-                (this.options.dot || !b[bi].startsWith('.')) &&
-                b[bi] !== '**') {
-                if (which === 'b')
-                    return false;
-                which = 'a';
-                result.push(a[ai]);
-                ai++;
-                bi++;
-            }
-            else if (b[bi] === '*' &&
-                a[ai] &&
-                (this.options.dot || !a[ai].startsWith('.')) &&
-                a[ai] !== '**') {
-                if (which === 'a')
-                    return false;
-                which = 'b';
-                result.push(b[bi]);
-                ai++;
-                bi++;
-            }
-            else {
-                return false;
-            }
-        }
-        // if we fall out of the loop, it means they two are identical
-        // as long as their lengths match
-        return a.length === b.length && result;
-    }
-    parseNegate() {
-        if (this.nonegate)
-            return;
-        const pattern = this.pattern;
-        let negate = false;
-        let negateOffset = 0;
-        for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {
-            negate = !negate;
-            negateOffset++;
-        }
-        if (negateOffset)
-            this.pattern = pattern.slice(negateOffset);
-        this.negate = negate;
-    }
-    // set partial to true to test if, for example,
-    // "/a/b" matches the start of "/*/b/*/d"
-    // Partial means, if you run out of file before you run
-    // out of pattern, then that's fine, as long as all
-    // the parts match.
-    matchOne(file, pattern, partial = false) {
-        const options = this.options;
-        // UNC paths like //?/X:/... can match X:/... and vice versa
-        // Drive letters in absolute drive or unc paths are always compared
-        // case-insensitively.
-        if (this.isWindows) {
-            const fileDrive = typeof file[0] === 'string' && /^[a-z]:$/i.test(file[0]);
-            const fileUNC = !fileDrive &&
-                file[0] === '' &&
-                file[1] === '' &&
-                file[2] === '?' &&
-                /^[a-z]:$/i.test(file[3]);
-            const patternDrive = typeof pattern[0] === 'string' && /^[a-z]:$/i.test(pattern[0]);
-            const patternUNC = !patternDrive &&
-                pattern[0] === '' &&
-                pattern[1] === '' &&
-                pattern[2] === '?' &&
-                typeof pattern[3] === 'string' &&
-                /^[a-z]:$/i.test(pattern[3]);
-            const fdi = fileUNC ? 3 : fileDrive ? 0 : undefined;
-            const pdi = patternUNC ? 3 : patternDrive ? 0 : undefined;
-            if (typeof fdi === 'number' && typeof pdi === 'number') {
-                const [fd, pd] = [file[fdi], pattern[pdi]];
-                if (fd.toLowerCase() === pd.toLowerCase()) {
-                    pattern[pdi] = fd;
-                    if (pdi > fdi) {
-                        pattern = pattern.slice(pdi);
-                    }
-                    else if (fdi > pdi) {
-                        file = file.slice(fdi);
-                    }
-                }
-            }
-        }
-        // resolve and reduce . and .. portions in the file as well.
-        // dont' need to do the second phase, because it's only one string[]
-        const { optimizationLevel = 1 } = this.options;
-        if (optimizationLevel >= 2) {
-            file = this.levelTwoFileOptimize(file);
-        }
-        this.debug('matchOne', this, { file, pattern });
-        this.debug('matchOne', file.length, pattern.length);
-        for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {
-            this.debug('matchOne loop');
-            var p = pattern[pi];
-            var f = file[fi];
-            this.debug(pattern, p, f);
-            // should be impossible.
-            // some invalid regexp stuff in the set.
-            /* c8 ignore start */
-            if (p === false) {
-                return false;
-            }
-            /* c8 ignore stop */
-            if (p === GLOBSTAR) {
-                this.debug('GLOBSTAR', [pattern, p, f]);
-                // "**"
-                // a/**/b/**/c would match the following:
-                // a/b/x/y/z/c
-                // a/x/y/z/b/c
-                // a/b/x/b/x/c
-                // a/b/c
-                // To do this, take the rest of the pattern after
-                // the **, and see if it would match the file remainder.
-                // If so, return success.
-                // If not, the ** "swallows" a segment, and try again.
-                // This is recursively awful.
-                //
-                // a/**/b/**/c matching a/b/x/y/z/c
-                // - a matches a
-                // - doublestar
-                //   - matchOne(b/x/y/z/c, b/**/c)
-                //     - b matches b
-                //     - doublestar
-                //       - matchOne(x/y/z/c, c) -> no
-                //       - matchOne(y/z/c, c) -> no
-                //       - matchOne(z/c, c) -> no
-                //       - matchOne(c, c) yes, hit
-                var fr = fi;
-                var pr = pi + 1;
-                if (pr === pl) {
-                    this.debug('** at the end');
-                    // a ** at the end will just swallow the rest.
-                    // We have found a match.
-                    // however, it will not swallow /.x, unless
-                    // options.dot is set.
-                    // . and .. are *never* matched by **, for explosively
-                    // exponential reasons.
-                    for (; fi < fl; fi++) {
-                        if (file[fi] === '.' ||
-                            file[fi] === '..' ||
-                            (!options.dot && file[fi].charAt(0) === '.'))
-                            return false;
-                    }
-                    return true;
-                }
-                // ok, let's see if we can swallow whatever we can.
-                while (fr < fl) {
-                    var swallowee = file[fr];
-                    this.debug('\nglobstar while', file, fr, pattern, pr, swallowee);
-                    // XXX remove this slice.  Just pass the start index.
-                    if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
-                        this.debug('globstar found match!', fr, fl, swallowee);
-                        // found a match.
-                        return true;
-                    }
-                    else {
-                        // can't swallow "." or ".." ever.
-                        // can only swallow ".foo" when explicitly asked.
-                        if (swallowee === '.' ||
-                            swallowee === '..' ||
-                            (!options.dot && swallowee.charAt(0) === '.')) {
-                            this.debug('dot detected!', file, fr, pattern, pr);
-                            break;
-                        }
-                        // ** swallows a segment, and continue.
-                        this.debug('globstar swallow a segment, and continue');
-                        fr++;
-                    }
-                }
-                // no match was found.
-                // However, in partial mode, we can't say this is necessarily over.
-                /* c8 ignore start */
-                if (partial) {
-                    // ran out of file
-                    this.debug('\n>>> no match, partial?', file, fr, pattern, pr);
-                    if (fr === fl) {
-                        return true;
-                    }
-                }
-                /* c8 ignore stop */
-                return false;
-            }
-            // something other than **
-            // non-magic patterns just have to match exactly
-            // patterns with magic have been turned into regexps.
-            let hit;
-            if (typeof p === 'string') {
-                hit = f === p;
-                this.debug('string match', p, f, hit);
-            }
-            else {
-                hit = p.test(f);
-                this.debug('pattern match', p, f, hit);
-            }
-            if (!hit)
-                return false;
-        }
-        // Note: ending in / means that we'll get a final ""
-        // at the end of the pattern.  This can only match a
-        // corresponding "" at the end of the file.
-        // If the file ends in /, then it can only match a
-        // a pattern that ends in /, unless the pattern just
-        // doesn't have any more for it. But, a/b/ should *not*
-        // match "a/b/*", even though "" matches against the
-        // [^/]*? pattern, except in partial mode, where it might
-        // simply not be reached yet.
-        // However, a/b/ should still satisfy a/*
-        // now either we fell off the end of the pattern, or we're done.
-        if (fi === fl && pi === pl) {
-            // ran out of pattern and filename at the same time.
-            // an exact hit!
-            return true;
-        }
-        else if (fi === fl) {
-            // ran out of file, but still had pattern left.
-            // this is ok if we're doing the match as part of
-            // a glob fs traversal.
-            return partial;
-        }
-        else if (pi === pl) {
-            // ran out of pattern, still have file left.
-            // this is only acceptable if we're on the very last
-            // empty segment of a file with a trailing slash.
-            // a/* should match a/b/
-            return fi === fl - 1 && file[fi] === '';
-            /* c8 ignore start */
-        }
-        else {
-            // should be unreachable.
-            throw new Error('wtf?');
-        }
-        /* c8 ignore stop */
-    }
-    braceExpand() {
-        return braceExpand(this.pattern, this.options);
-    }
-    parse(pattern) {
-        assertValidPattern(pattern);
-        const options = this.options;
-        // shortcuts
-        if (pattern === '**')
-            return GLOBSTAR;
-        if (pattern === '')
-            return '';
-        // far and away, the most common glob pattern parts are
-        // *, *.*, and *.  Add a fast check method for those.
-        let m;
-        let fastTest = null;
-        if ((m = pattern.match(starRE))) {
-            fastTest = options.dot ? starTestDot : starTest;
-        }
-        else if ((m = pattern.match(starDotExtRE))) {
-            fastTest = (options.nocase
-                ? options.dot
-                    ? starDotExtTestNocaseDot
-                    : starDotExtTestNocase
-                : options.dot
-                    ? starDotExtTestDot
-                    : starDotExtTest)(m[1]);
-        }
-        else if ((m = pattern.match(qmarksRE))) {
-            fastTest = (options.nocase
-                ? options.dot
-                    ? qmarksTestNocaseDot
-                    : qmarksTestNocase
-                : options.dot
-                    ? qmarksTestDot
-                    : qmarksTest)(m);
-        }
-        else if ((m = pattern.match(starDotStarRE))) {
-            fastTest = options.dot ? starDotStarTestDot : starDotStarTest;
-        }
-        else if ((m = pattern.match(dotStarRE))) {
-            fastTest = dotStarTest;
-        }
-        const re = AST.fromGlob(pattern, this.options).toMMPattern();
-        if (fastTest && typeof re === 'object') {
-            // Avoids overriding in frozen environments
-            Reflect.defineProperty(re, 'test', { value: fastTest });
-        }
-        return re;
-    }
-    makeRe() {
-        if (this.regexp || this.regexp === false)
-            return this.regexp;
-        // at this point, this.set is a 2d array of partial
-        // pattern strings, or "**".
-        //
-        // It's better to use .match().  This function shouldn't
-        // be used, really, but it's pretty convenient sometimes,
-        // when you just want to work with a regex.
-        const set = this.set;
-        if (!set.length) {
-            this.regexp = false;
-            return this.regexp;
-        }
-        const options = this.options;
-        const twoStar = options.noglobstar
-            ? star
-            : options.dot
-                ? twoStarDot
-                : twoStarNoDot;
-        const flags = new Set(options.nocase ? ['i'] : []);
-        // regexpify non-globstar patterns
-        // if ** is only item, then we just do one twoStar
-        // if ** is first, and there are more, prepend (\/|twoStar\/)? to next
-        // if ** is last, append (\/twoStar|) to previous
-        // if ** is in the middle, append (\/|\/twoStar\/) to previous
-        // then filter out GLOBSTAR symbols
-        let re = set
-            .map(pattern => {
-            const pp = pattern.map(p => {
-                if (p instanceof RegExp) {
-                    for (const f of p.flags.split(''))
-                        flags.add(f);
-                }
-                return typeof p === 'string'
-                    ? regExpEscape(p)
-                    : p === GLOBSTAR
-                        ? GLOBSTAR
-                        : p._src;
-            });
-            pp.forEach((p, i) => {
-                const next = pp[i + 1];
-                const prev = pp[i - 1];
-                if (p !== GLOBSTAR || prev === GLOBSTAR) {
-                    return;
-                }
-                if (prev === undefined) {
-                    if (next !== undefined && next !== GLOBSTAR) {
-                        pp[i + 1] = '(?:\\/|' + twoStar + '\\/)?' + next;
-                    }
-                    else {
-                        pp[i] = twoStar;
-                    }
-                }
-                else if (next === undefined) {
-                    pp[i - 1] = prev + '(?:\\/|' + twoStar + ')?';
-                }
-                else if (next !== GLOBSTAR) {
-                    pp[i - 1] = prev + '(?:\\/|\\/' + twoStar + '\\/)' + next;
-                    pp[i + 1] = GLOBSTAR;
-                }
-            });
-            return pp.filter(p => p !== GLOBSTAR).join('/');
-        })
-            .join('|');
-        // need to wrap in parens if we had more than one thing with |,
-        // otherwise only the first will be anchored to ^ and the last to $
-        const [open, close] = set.length > 1 ? ['(?:', ')'] : ['', ''];
-        // must match entire pattern
-        // ending in a * or ** will make it less strict.
-        re = '^' + open + re + close + '$';
-        // can match anything, as long as it's not this.
-        if (this.negate)
-            re = '^(?!' + re + ').+$';
-        try {
-            this.regexp = new RegExp(re, [...flags].join(''));
-            /* c8 ignore start */
-        }
-        catch (ex) {
-            // should be impossible
-            this.regexp = false;
-        }
-        /* c8 ignore stop */
-        return this.regexp;
-    }
-    slashSplit(p) {
-        // if p starts with // on windows, we preserve that
-        // so that UNC paths aren't broken.  Otherwise, any number of
-        // / characters are coalesced into one, unless
-        // preserveMultipleSlashes is set to true.
-        if (this.preserveMultipleSlashes) {
-            return p.split('/');
-        }
-        else if (this.isWindows && /^\/\/[^\/]+/.test(p)) {
-            // add an extra '' for the one we lose
-            return ['', ...p.split(/\/+/)];
-        }
-        else {
-            return p.split(/\/+/);
-        }
-    }
-    match(f, partial = this.partial) {
-        this.debug('match', f, this.pattern);
-        // short-circuit in the case of busted things.
-        // comments, etc.
-        if (this.comment) {
-            return false;
-        }
-        if (this.empty) {
-            return f === '';
-        }
-        if (f === '/' && partial) {
-            return true;
-        }
-        const options = this.options;
-        // windows: need to use /, not \
-        if (this.isWindows) {
-            f = f.split('\\').join('/');
-        }
-        // treat the test path as a set of pathparts.
-        const ff = this.slashSplit(f);
-        this.debug(this.pattern, 'split', ff);
-        // just ONE of the pattern sets in this.set needs to match
-        // in order for it to be valid.  If negating, then just one
-        // match means that we have failed.
-        // Either way, return on the first hit.
-        const set = this.set;
-        this.debug(this.pattern, 'set', set);
-        // Find the basename of the path by looking for the last non-empty segment
-        let filename = ff[ff.length - 1];
-        if (!filename) {
-            for (let i = ff.length - 2; !filename && i >= 0; i--) {
-                filename = ff[i];
-            }
-        }
-        for (let i = 0; i < set.length; i++) {
-            const pattern = set[i];
-            let file = ff;
-            if (options.matchBase && pattern.length === 1) {
-                file = [filename];
-            }
-            const hit = this.matchOne(file, pattern, partial);
-            if (hit) {
-                if (options.flipNegate) {
-                    return true;
-                }
-                return !this.negate;
-            }
-        }
-        // didn't get any hits.  this is success if it's a negative
-        // pattern, failure otherwise.
-        if (options.flipNegate) {
-            return false;
-        }
-        return this.negate;
-    }
-    static defaults(def) {
-        return minimatch.defaults(def).Minimatch;
-    }
-}
-/* c8 ignore start */
-export { AST } from './ast.js';
-export { escape } from './escape.js';
-export { unescape } from './unescape.js';
-/* c8 ignore stop */
-minimatch.AST = AST;
-minimatch.Minimatch = Minimatch;
-minimatch.escape = escape;
-minimatch.unescape = unescape;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/ignore-walk/node_modules/minimatch/LICENSE b/node_modules/ignore-walk/node_modules/minimatch/LICENSE
deleted file mode 100644
index 1493534e60dce..0000000000000
--- a/node_modules/ignore-walk/node_modules/minimatch/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) 2011-2023 Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js b/node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
deleted file mode 100644
index 5fc86bbd0116c..0000000000000
--- a/node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
+++ /dev/null
@@ -1,14 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.assertValidPattern = void 0;
-const MAX_PATTERN_LENGTH = 1024 * 64;
-const assertValidPattern = (pattern) => {
-    if (typeof pattern !== 'string') {
-        throw new TypeError('invalid pattern');
-    }
-    if (pattern.length > MAX_PATTERN_LENGTH) {
-        throw new TypeError('pattern is too long');
-    }
-};
-exports.assertValidPattern = assertValidPattern;
-//# sourceMappingURL=assert-valid-pattern.js.map
\ No newline at end of file
diff --git a/node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/ast.js b/node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/ast.js
deleted file mode 100644
index 7b2109625eaeb..0000000000000
--- a/node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/ast.js
+++ /dev/null
@@ -1,592 +0,0 @@
-"use strict";
-// parse a single path portion
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.AST = void 0;
-const brace_expressions_js_1 = require("./brace-expressions.js");
-const unescape_js_1 = require("./unescape.js");
-const types = new Set(['!', '?', '+', '*', '@']);
-const isExtglobType = (c) => types.has(c);
-// Patterns that get prepended to bind to the start of either the
-// entire string, or just a single path portion, to prevent dots
-// and/or traversal patterns, when needed.
-// Exts don't need the ^ or / bit, because the root binds that already.
-const startNoTraversal = '(?!(?:^|/)\\.\\.?(?:$|/))';
-const startNoDot = '(?!\\.)';
-// characters that indicate a start of pattern needs the "no dots" bit,
-// because a dot *might* be matched. ( is not in the list, because in
-// the case of a child extglob, it will handle the prevention itself.
-const addPatternStart = new Set(['[', '.']);
-// cases where traversal is A-OK, no dot prevention needed
-const justDots = new Set(['..', '.']);
-const reSpecials = new Set('().*{}+?[]^$\\!');
-const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
-// any single thing other than /
-const qmark = '[^/]';
-// * => any number of characters
-const star = qmark + '*?';
-// use + when we need to ensure that *something* matches, because the * is
-// the only thing in the path portion.
-const starNoEmpty = qmark + '+?';
-// remove the \ chars that we added if we end up doing a nonmagic compare
-// const deslash = (s: string) => s.replace(/\\(.)/g, '$1')
-class AST {
-    type;
-    #root;
-    #hasMagic;
-    #uflag = false;
-    #parts = [];
-    #parent;
-    #parentIndex;
-    #negs;
-    #filledNegs = false;
-    #options;
-    #toString;
-    // set to true if it's an extglob with no children
-    // (which really means one child of '')
-    #emptyExt = false;
-    constructor(type, parent, options = {}) {
-        this.type = type;
-        // extglobs are inherently magical
-        if (type)
-            this.#hasMagic = true;
-        this.#parent = parent;
-        this.#root = this.#parent ? this.#parent.#root : this;
-        this.#options = this.#root === this ? options : this.#root.#options;
-        this.#negs = this.#root === this ? [] : this.#root.#negs;
-        if (type === '!' && !this.#root.#filledNegs)
-            this.#negs.push(this);
-        this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0;
-    }
-    get hasMagic() {
-        /* c8 ignore start */
-        if (this.#hasMagic !== undefined)
-            return this.#hasMagic;
-        /* c8 ignore stop */
-        for (const p of this.#parts) {
-            if (typeof p === 'string')
-                continue;
-            if (p.type || p.hasMagic)
-                return (this.#hasMagic = true);
-        }
-        // note: will be undefined until we generate the regexp src and find out
-        return this.#hasMagic;
-    }
-    // reconstructs the pattern
-    toString() {
-        if (this.#toString !== undefined)
-            return this.#toString;
-        if (!this.type) {
-            return (this.#toString = this.#parts.map(p => String(p)).join(''));
-        }
-        else {
-            return (this.#toString =
-                this.type + '(' + this.#parts.map(p => String(p)).join('|') + ')');
-        }
-    }
-    #fillNegs() {
-        /* c8 ignore start */
-        if (this !== this.#root)
-            throw new Error('should only call on root');
-        if (this.#filledNegs)
-            return this;
-        /* c8 ignore stop */
-        // call toString() once to fill this out
-        this.toString();
-        this.#filledNegs = true;
-        let n;
-        while ((n = this.#negs.pop())) {
-            if (n.type !== '!')
-                continue;
-            // walk up the tree, appending everthing that comes AFTER parentIndex
-            let p = n;
-            let pp = p.#parent;
-            while (pp) {
-                for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) {
-                    for (const part of n.#parts) {
-                        /* c8 ignore start */
-                        if (typeof part === 'string') {
-                            throw new Error('string part in extglob AST??');
-                        }
-                        /* c8 ignore stop */
-                        part.copyIn(pp.#parts[i]);
-                    }
-                }
-                p = pp;
-                pp = p.#parent;
-            }
-        }
-        return this;
-    }
-    push(...parts) {
-        for (const p of parts) {
-            if (p === '')
-                continue;
-            /* c8 ignore start */
-            if (typeof p !== 'string' && !(p instanceof AST && p.#parent === this)) {
-                throw new Error('invalid part: ' + p);
-            }
-            /* c8 ignore stop */
-            this.#parts.push(p);
-        }
-    }
-    toJSON() {
-        const ret = this.type === null
-            ? this.#parts.slice().map(p => (typeof p === 'string' ? p : p.toJSON()))
-            : [this.type, ...this.#parts.map(p => p.toJSON())];
-        if (this.isStart() && !this.type)
-            ret.unshift([]);
-        if (this.isEnd() &&
-            (this === this.#root ||
-                (this.#root.#filledNegs && this.#parent?.type === '!'))) {
-            ret.push({});
-        }
-        return ret;
-    }
-    isStart() {
-        if (this.#root === this)
-            return true;
-        // if (this.type) return !!this.#parent?.isStart()
-        if (!this.#parent?.isStart())
-            return false;
-        if (this.#parentIndex === 0)
-            return true;
-        // if everything AHEAD of this is a negation, then it's still the "start"
-        const p = this.#parent;
-        for (let i = 0; i < this.#parentIndex; i++) {
-            const pp = p.#parts[i];
-            if (!(pp instanceof AST && pp.type === '!')) {
-                return false;
-            }
-        }
-        return true;
-    }
-    isEnd() {
-        if (this.#root === this)
-            return true;
-        if (this.#parent?.type === '!')
-            return true;
-        if (!this.#parent?.isEnd())
-            return false;
-        if (!this.type)
-            return this.#parent?.isEnd();
-        // if not root, it'll always have a parent
-        /* c8 ignore start */
-        const pl = this.#parent ? this.#parent.#parts.length : 0;
-        /* c8 ignore stop */
-        return this.#parentIndex === pl - 1;
-    }
-    copyIn(part) {
-        if (typeof part === 'string')
-            this.push(part);
-        else
-            this.push(part.clone(this));
-    }
-    clone(parent) {
-        const c = new AST(this.type, parent);
-        for (const p of this.#parts) {
-            c.copyIn(p);
-        }
-        return c;
-    }
-    static #parseAST(str, ast, pos, opt) {
-        let escaping = false;
-        let inBrace = false;
-        let braceStart = -1;
-        let braceNeg = false;
-        if (ast.type === null) {
-            // outside of a extglob, append until we find a start
-            let i = pos;
-            let acc = '';
-            while (i < str.length) {
-                const c = str.charAt(i++);
-                // still accumulate escapes at this point, but we do ignore
-                // starts that are escaped
-                if (escaping || c === '\\') {
-                    escaping = !escaping;
-                    acc += c;
-                    continue;
-                }
-                if (inBrace) {
-                    if (i === braceStart + 1) {
-                        if (c === '^' || c === '!') {
-                            braceNeg = true;
-                        }
-                    }
-                    else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
-                        inBrace = false;
-                    }
-                    acc += c;
-                    continue;
-                }
-                else if (c === '[') {
-                    inBrace = true;
-                    braceStart = i;
-                    braceNeg = false;
-                    acc += c;
-                    continue;
-                }
-                if (!opt.noext && isExtglobType(c) && str.charAt(i) === '(') {
-                    ast.push(acc);
-                    acc = '';
-                    const ext = new AST(c, ast);
-                    i = AST.#parseAST(str, ext, i, opt);
-                    ast.push(ext);
-                    continue;
-                }
-                acc += c;
-            }
-            ast.push(acc);
-            return i;
-        }
-        // some kind of extglob, pos is at the (
-        // find the next | or )
-        let i = pos + 1;
-        let part = new AST(null, ast);
-        const parts = [];
-        let acc = '';
-        while (i < str.length) {
-            const c = str.charAt(i++);
-            // still accumulate escapes at this point, but we do ignore
-            // starts that are escaped
-            if (escaping || c === '\\') {
-                escaping = !escaping;
-                acc += c;
-                continue;
-            }
-            if (inBrace) {
-                if (i === braceStart + 1) {
-                    if (c === '^' || c === '!') {
-                        braceNeg = true;
-                    }
-                }
-                else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
-                    inBrace = false;
-                }
-                acc += c;
-                continue;
-            }
-            else if (c === '[') {
-                inBrace = true;
-                braceStart = i;
-                braceNeg = false;
-                acc += c;
-                continue;
-            }
-            if (isExtglobType(c) && str.charAt(i) === '(') {
-                part.push(acc);
-                acc = '';
-                const ext = new AST(c, part);
-                part.push(ext);
-                i = AST.#parseAST(str, ext, i, opt);
-                continue;
-            }
-            if (c === '|') {
-                part.push(acc);
-                acc = '';
-                parts.push(part);
-                part = new AST(null, ast);
-                continue;
-            }
-            if (c === ')') {
-                if (acc === '' && ast.#parts.length === 0) {
-                    ast.#emptyExt = true;
-                }
-                part.push(acc);
-                acc = '';
-                ast.push(...parts, part);
-                return i;
-            }
-            acc += c;
-        }
-        // unfinished extglob
-        // if we got here, it was a malformed extglob! not an extglob, but
-        // maybe something else in there.
-        ast.type = null;
-        ast.#hasMagic = undefined;
-        ast.#parts = [str.substring(pos - 1)];
-        return i;
-    }
-    static fromGlob(pattern, options = {}) {
-        const ast = new AST(null, undefined, options);
-        AST.#parseAST(pattern, ast, 0, options);
-        return ast;
-    }
-    // returns the regular expression if there's magic, or the unescaped
-    // string if not.
-    toMMPattern() {
-        // should only be called on root
-        /* c8 ignore start */
-        if (this !== this.#root)
-            return this.#root.toMMPattern();
-        /* c8 ignore stop */
-        const glob = this.toString();
-        const [re, body, hasMagic, uflag] = this.toRegExpSource();
-        // if we're in nocase mode, and not nocaseMagicOnly, then we do
-        // still need a regular expression if we have to case-insensitively
-        // match capital/lowercase characters.
-        const anyMagic = hasMagic ||
-            this.#hasMagic ||
-            (this.#options.nocase &&
-                !this.#options.nocaseMagicOnly &&
-                glob.toUpperCase() !== glob.toLowerCase());
-        if (!anyMagic) {
-            return body;
-        }
-        const flags = (this.#options.nocase ? 'i' : '') + (uflag ? 'u' : '');
-        return Object.assign(new RegExp(`^${re}$`, flags), {
-            _src: re,
-            _glob: glob,
-        });
-    }
-    get options() {
-        return this.#options;
-    }
-    // returns the string match, the regexp source, whether there's magic
-    // in the regexp (so a regular expression is required) and whether or
-    // not the uflag is needed for the regular expression (for posix classes)
-    // TODO: instead of injecting the start/end at this point, just return
-    // the BODY of the regexp, along with the start/end portions suitable
-    // for binding the start/end in either a joined full-path makeRe context
-    // (where we bind to (^|/), or a standalone matchPart context (where
-    // we bind to ^, and not /).  Otherwise slashes get duped!
-    //
-    // In part-matching mode, the start is:
-    // - if not isStart: nothing
-    // - if traversal possible, but not allowed: ^(?!\.\.?$)
-    // - if dots allowed or not possible: ^
-    // - if dots possible and not allowed: ^(?!\.)
-    // end is:
-    // - if not isEnd(): nothing
-    // - else: $
-    //
-    // In full-path matching mode, we put the slash at the START of the
-    // pattern, so start is:
-    // - if first pattern: same as part-matching mode
-    // - if not isStart(): nothing
-    // - if traversal possible, but not allowed: /(?!\.\.?(?:$|/))
-    // - if dots allowed or not possible: /
-    // - if dots possible and not allowed: /(?!\.)
-    // end is:
-    // - if last pattern, same as part-matching mode
-    // - else nothing
-    //
-    // Always put the (?:$|/) on negated tails, though, because that has to be
-    // there to bind the end of the negated pattern portion, and it's easier to
-    // just stick it in now rather than try to inject it later in the middle of
-    // the pattern.
-    //
-    // We can just always return the same end, and leave it up to the caller
-    // to know whether it's going to be used joined or in parts.
-    // And, if the start is adjusted slightly, can do the same there:
-    // - if not isStart: nothing
-    // - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$)
-    // - if dots allowed or not possible: (?:/|^)
-    // - if dots possible and not allowed: (?:/|^)(?!\.)
-    //
-    // But it's better to have a simpler binding without a conditional, for
-    // performance, so probably better to return both start options.
-    //
-    // Then the caller just ignores the end if it's not the first pattern,
-    // and the start always gets applied.
-    //
-    // But that's always going to be $ if it's the ending pattern, or nothing,
-    // so the caller can just attach $ at the end of the pattern when building.
-    //
-    // So the todo is:
-    // - better detect what kind of start is needed
-    // - return both flavors of starting pattern
-    // - attach $ at the end of the pattern when creating the actual RegExp
-    //
-    // Ah, but wait, no, that all only applies to the root when the first pattern
-    // is not an extglob. If the first pattern IS an extglob, then we need all
-    // that dot prevention biz to live in the extglob portions, because eg
-    // +(*|.x*) can match .xy but not .yx.
-    //
-    // So, return the two flavors if it's #root and the first child is not an
-    // AST, otherwise leave it to the child AST to handle it, and there,
-    // use the (?:^|/) style of start binding.
-    //
-    // Even simplified further:
-    // - Since the start for a join is eg /(?!\.) and the start for a part
-    // is ^(?!\.), we can just prepend (?!\.) to the pattern (either root
-    // or start or whatever) and prepend ^ or / at the Regexp construction.
-    toRegExpSource(allowDot) {
-        const dot = allowDot ?? !!this.#options.dot;
-        if (this.#root === this)
-            this.#fillNegs();
-        if (!this.type) {
-            const noEmpty = this.isStart() && this.isEnd();
-            const src = this.#parts
-                .map(p => {
-                const [re, _, hasMagic, uflag] = typeof p === 'string'
-                    ? AST.#parseGlob(p, this.#hasMagic, noEmpty)
-                    : p.toRegExpSource(allowDot);
-                this.#hasMagic = this.#hasMagic || hasMagic;
-                this.#uflag = this.#uflag || uflag;
-                return re;
-            })
-                .join('');
-            let start = '';
-            if (this.isStart()) {
-                if (typeof this.#parts[0] === 'string') {
-                    // this is the string that will match the start of the pattern,
-                    // so we need to protect against dots and such.
-                    // '.' and '..' cannot match unless the pattern is that exactly,
-                    // even if it starts with . or dot:true is set.
-                    const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]);
-                    if (!dotTravAllowed) {
-                        const aps = addPatternStart;
-                        // check if we have a possibility of matching . or ..,
-                        // and prevent that.
-                        const needNoTrav = 
-                        // dots are allowed, and the pattern starts with [ or .
-                        (dot && aps.has(src.charAt(0))) ||
-                            // the pattern starts with \., and then [ or .
-                            (src.startsWith('\\.') && aps.has(src.charAt(2))) ||
-                            // the pattern starts with \.\., and then [ or .
-                            (src.startsWith('\\.\\.') && aps.has(src.charAt(4)));
-                        // no need to prevent dots if it can't match a dot, or if a
-                        // sub-pattern will be preventing it anyway.
-                        const needNoDot = !dot && !allowDot && aps.has(src.charAt(0));
-                        start = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : '';
-                    }
-                }
-            }
-            // append the "end of path portion" pattern to negation tails
-            let end = '';
-            if (this.isEnd() &&
-                this.#root.#filledNegs &&
-                this.#parent?.type === '!') {
-                end = '(?:$|\\/)';
-            }
-            const final = start + src + end;
-            return [
-                final,
-                (0, unescape_js_1.unescape)(src),
-                (this.#hasMagic = !!this.#hasMagic),
-                this.#uflag,
-            ];
-        }
-        // We need to calculate the body *twice* if it's a repeat pattern
-        // at the start, once in nodot mode, then again in dot mode, so a
-        // pattern like *(?) can match 'x.y'
-        const repeated = this.type === '*' || this.type === '+';
-        // some kind of extglob
-        const start = this.type === '!' ? '(?:(?!(?:' : '(?:';
-        let body = this.#partsToRegExp(dot);
-        if (this.isStart() && this.isEnd() && !body && this.type !== '!') {
-            // invalid extglob, has to at least be *something* present, if it's
-            // the entire path portion.
-            const s = this.toString();
-            this.#parts = [s];
-            this.type = null;
-            this.#hasMagic = undefined;
-            return [s, (0, unescape_js_1.unescape)(this.toString()), false, false];
-        }
-        // XXX abstract out this map method
-        let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot
-            ? ''
-            : this.#partsToRegExp(true);
-        if (bodyDotAllowed === body) {
-            bodyDotAllowed = '';
-        }
-        if (bodyDotAllowed) {
-            body = `(?:${body})(?:${bodyDotAllowed})*?`;
-        }
-        // an empty !() is exactly equivalent to a starNoEmpty
-        let final = '';
-        if (this.type === '!' && this.#emptyExt) {
-            final = (this.isStart() && !dot ? startNoDot : '') + starNoEmpty;
-        }
-        else {
-            const close = this.type === '!'
-                ? // !() must match something,but !(x) can match ''
-                    '))' +
-                        (this.isStart() && !dot && !allowDot ? startNoDot : '') +
-                        star +
-                        ')'
-                : this.type === '@'
-                    ? ')'
-                    : this.type === '?'
-                        ? ')?'
-                        : this.type === '+' && bodyDotAllowed
-                            ? ')'
-                            : this.type === '*' && bodyDotAllowed
-                                ? `)?`
-                                : `)${this.type}`;
-            final = start + body + close;
-        }
-        return [
-            final,
-            (0, unescape_js_1.unescape)(body),
-            (this.#hasMagic = !!this.#hasMagic),
-            this.#uflag,
-        ];
-    }
-    #partsToRegExp(dot) {
-        return this.#parts
-            .map(p => {
-            // extglob ASTs should only contain parent ASTs
-            /* c8 ignore start */
-            if (typeof p === 'string') {
-                throw new Error('string type in extglob ast??');
-            }
-            /* c8 ignore stop */
-            // can ignore hasMagic, because extglobs are already always magic
-            const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot);
-            this.#uflag = this.#uflag || uflag;
-            return re;
-        })
-            .filter(p => !(this.isStart() && this.isEnd()) || !!p)
-            .join('|');
-    }
-    static #parseGlob(glob, hasMagic, noEmpty = false) {
-        let escaping = false;
-        let re = '';
-        let uflag = false;
-        for (let i = 0; i < glob.length; i++) {
-            const c = glob.charAt(i);
-            if (escaping) {
-                escaping = false;
-                re += (reSpecials.has(c) ? '\\' : '') + c;
-                continue;
-            }
-            if (c === '\\') {
-                if (i === glob.length - 1) {
-                    re += '\\\\';
-                }
-                else {
-                    escaping = true;
-                }
-                continue;
-            }
-            if (c === '[') {
-                const [src, needUflag, consumed, magic] = (0, brace_expressions_js_1.parseClass)(glob, i);
-                if (consumed) {
-                    re += src;
-                    uflag = uflag || needUflag;
-                    i += consumed - 1;
-                    hasMagic = hasMagic || magic;
-                    continue;
-                }
-            }
-            if (c === '*') {
-                if (noEmpty && glob === '*')
-                    re += starNoEmpty;
-                else
-                    re += star;
-                hasMagic = true;
-                continue;
-            }
-            if (c === '?') {
-                re += qmark;
-                hasMagic = true;
-                continue;
-            }
-            re += regExpEscape(c);
-        }
-        return [re, (0, unescape_js_1.unescape)(glob), !!hasMagic, uflag];
-    }
-}
-exports.AST = AST;
-//# sourceMappingURL=ast.js.map
\ No newline at end of file
diff --git a/node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/brace-expressions.js b/node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/brace-expressions.js
deleted file mode 100644
index 0e13eefc4cfee..0000000000000
--- a/node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/brace-expressions.js
+++ /dev/null
@@ -1,152 +0,0 @@
-"use strict";
-// translate the various posix character classes into unicode properties
-// this works across all unicode locales
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.parseClass = void 0;
-// { : [, /u flag required, negated]
-const posixClasses = {
-    '[:alnum:]': ['\\p{L}\\p{Nl}\\p{Nd}', true],
-    '[:alpha:]': ['\\p{L}\\p{Nl}', true],
-    '[:ascii:]': ['\\x' + '00-\\x' + '7f', false],
-    '[:blank:]': ['\\p{Zs}\\t', true],
-    '[:cntrl:]': ['\\p{Cc}', true],
-    '[:digit:]': ['\\p{Nd}', true],
-    '[:graph:]': ['\\p{Z}\\p{C}', true, true],
-    '[:lower:]': ['\\p{Ll}', true],
-    '[:print:]': ['\\p{C}', true],
-    '[:punct:]': ['\\p{P}', true],
-    '[:space:]': ['\\p{Z}\\t\\r\\n\\v\\f', true],
-    '[:upper:]': ['\\p{Lu}', true],
-    '[:word:]': ['\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}', true],
-    '[:xdigit:]': ['A-Fa-f0-9', false],
-};
-// only need to escape a few things inside of brace expressions
-// escapes: [ \ ] -
-const braceEscape = (s) => s.replace(/[[\]\\-]/g, '\\$&');
-// escape all regexp magic characters
-const regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
-// everything has already been escaped, we just have to join
-const rangesToString = (ranges) => ranges.join('');
-// takes a glob string at a posix brace expression, and returns
-// an equivalent regular expression source, and boolean indicating
-// whether the /u flag needs to be applied, and the number of chars
-// consumed to parse the character class.
-// This also removes out of order ranges, and returns ($.) if the
-// entire class just no good.
-const parseClass = (glob, position) => {
-    const pos = position;
-    /* c8 ignore start */
-    if (glob.charAt(pos) !== '[') {
-        throw new Error('not in a brace expression');
-    }
-    /* c8 ignore stop */
-    const ranges = [];
-    const negs = [];
-    let i = pos + 1;
-    let sawStart = false;
-    let uflag = false;
-    let escaping = false;
-    let negate = false;
-    let endPos = pos;
-    let rangeStart = '';
-    WHILE: while (i < glob.length) {
-        const c = glob.charAt(i);
-        if ((c === '!' || c === '^') && i === pos + 1) {
-            negate = true;
-            i++;
-            continue;
-        }
-        if (c === ']' && sawStart && !escaping) {
-            endPos = i + 1;
-            break;
-        }
-        sawStart = true;
-        if (c === '\\') {
-            if (!escaping) {
-                escaping = true;
-                i++;
-                continue;
-            }
-            // escaped \ char, fall through and treat like normal char
-        }
-        if (c === '[' && !escaping) {
-            // either a posix class, a collation equivalent, or just a [
-            for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) {
-                if (glob.startsWith(cls, i)) {
-                    // invalid, [a-[] is fine, but not [a-[:alpha]]
-                    if (rangeStart) {
-                        return ['$.', false, glob.length - pos, true];
-                    }
-                    i += cls.length;
-                    if (neg)
-                        negs.push(unip);
-                    else
-                        ranges.push(unip);
-                    uflag = uflag || u;
-                    continue WHILE;
-                }
-            }
-        }
-        // now it's just a normal character, effectively
-        escaping = false;
-        if (rangeStart) {
-            // throw this range away if it's not valid, but others
-            // can still match.
-            if (c > rangeStart) {
-                ranges.push(braceEscape(rangeStart) + '-' + braceEscape(c));
-            }
-            else if (c === rangeStart) {
-                ranges.push(braceEscape(c));
-            }
-            rangeStart = '';
-            i++;
-            continue;
-        }
-        // now might be the start of a range.
-        // can be either c-d or c-] or c] or c] at this point
-        if (glob.startsWith('-]', i + 1)) {
-            ranges.push(braceEscape(c + '-'));
-            i += 2;
-            continue;
-        }
-        if (glob.startsWith('-', i + 1)) {
-            rangeStart = c;
-            i += 2;
-            continue;
-        }
-        // not the start of a range, just a single character
-        ranges.push(braceEscape(c));
-        i++;
-    }
-    if (endPos < i) {
-        // didn't see the end of the class, not a valid class,
-        // but might still be valid as a literal match.
-        return ['', false, 0, false];
-    }
-    // if we got no ranges and no negates, then we have a range that
-    // cannot possibly match anything, and that poisons the whole glob
-    if (!ranges.length && !negs.length) {
-        return ['$.', false, glob.length - pos, true];
-    }
-    // if we got one positive range, and it's a single character, then that's
-    // not actually a magic pattern, it's just that one literal character.
-    // we should not treat that as "magic", we should just return the literal
-    // character. [_] is a perfectly valid way to escape glob magic chars.
-    if (negs.length === 0 &&
-        ranges.length === 1 &&
-        /^\\?.$/.test(ranges[0]) &&
-        !negate) {
-        const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0];
-        return [regexpEscape(r), false, endPos - pos, false];
-    }
-    const sranges = '[' + (negate ? '^' : '') + rangesToString(ranges) + ']';
-    const snegs = '[' + (negate ? '' : '^') + rangesToString(negs) + ']';
-    const comb = ranges.length && negs.length
-        ? '(' + sranges + '|' + snegs + ')'
-        : ranges.length
-            ? sranges
-            : snegs;
-    return [comb, uflag, endPos - pos, true];
-};
-exports.parseClass = parseClass;
-//# sourceMappingURL=brace-expressions.js.map
\ No newline at end of file
diff --git a/node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/escape.js b/node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/escape.js
deleted file mode 100644
index 02a4f8a8e0a58..0000000000000
--- a/node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/escape.js
+++ /dev/null
@@ -1,22 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.escape = void 0;
-/**
- * Escape all magic characters in a glob pattern.
- *
- * If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape}
- * option is used, then characters are escaped by wrapping in `[]`, because
- * a magic character wrapped in a character class can only be satisfied by
- * that exact character.  In this mode, `\` is _not_ escaped, because it is
- * not interpreted as a magic character, but instead as a path separator.
- */
-const escape = (s, { windowsPathsNoEscape = false, } = {}) => {
-    // don't need to escape +@! because we escape the parens
-    // that make those magic, and escaping ! as [!] isn't valid,
-    // because [!]] is a valid glob class meaning not ']'.
-    return windowsPathsNoEscape
-        ? s.replace(/[?*()[\]]/g, '[$&]')
-        : s.replace(/[?*()[\]\\]/g, '\\$&');
-};
-exports.escape = escape;
-//# sourceMappingURL=escape.js.map
\ No newline at end of file
diff --git a/node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/package.json b/node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/package.json
deleted file mode 100644
index 5bbefffbabee3..0000000000000
--- a/node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "commonjs"
-}
diff --git a/node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/unescape.js b/node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/unescape.js
deleted file mode 100644
index 47c36bcee5a02..0000000000000
--- a/node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/unescape.js
+++ /dev/null
@@ -1,24 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.unescape = void 0;
-/**
- * Un-escape a string that has been escaped with {@link escape}.
- *
- * If the {@link windowsPathsNoEscape} option is used, then square-brace
- * escapes are removed, but not backslash escapes.  For example, it will turn
- * the string `'[*]'` into `*`, but it will not turn `'\\*'` into `'*'`,
- * becuase `\` is a path separator in `windowsPathsNoEscape` mode.
- *
- * When `windowsPathsNoEscape` is not set, then both brace escapes and
- * backslash escapes are removed.
- *
- * Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot be escaped
- * or unescaped.
- */
-const unescape = (s, { windowsPathsNoEscape = false, } = {}) => {
-    return windowsPathsNoEscape
-        ? s.replace(/\[([^\/\\])\]/g, '$1')
-        : s.replace(/((?!\\).|^)\[([^\/\\])\]/g, '$1$2').replace(/\\([^\/])/g, '$1');
-};
-exports.unescape = unescape;
-//# sourceMappingURL=unescape.js.map
\ No newline at end of file
diff --git a/node_modules/ignore-walk/node_modules/minimatch/dist/esm/assert-valid-pattern.js b/node_modules/ignore-walk/node_modules/minimatch/dist/esm/assert-valid-pattern.js
deleted file mode 100644
index 7b534fc30200b..0000000000000
--- a/node_modules/ignore-walk/node_modules/minimatch/dist/esm/assert-valid-pattern.js
+++ /dev/null
@@ -1,10 +0,0 @@
-const MAX_PATTERN_LENGTH = 1024 * 64;
-export const assertValidPattern = (pattern) => {
-    if (typeof pattern !== 'string') {
-        throw new TypeError('invalid pattern');
-    }
-    if (pattern.length > MAX_PATTERN_LENGTH) {
-        throw new TypeError('pattern is too long');
-    }
-};
-//# sourceMappingURL=assert-valid-pattern.js.map
\ No newline at end of file
diff --git a/node_modules/ignore-walk/node_modules/minimatch/dist/esm/ast.js b/node_modules/ignore-walk/node_modules/minimatch/dist/esm/ast.js
deleted file mode 100644
index 2d2bced6533de..0000000000000
--- a/node_modules/ignore-walk/node_modules/minimatch/dist/esm/ast.js
+++ /dev/null
@@ -1,588 +0,0 @@
-// parse a single path portion
-import { parseClass } from './brace-expressions.js';
-import { unescape } from './unescape.js';
-const types = new Set(['!', '?', '+', '*', '@']);
-const isExtglobType = (c) => types.has(c);
-// Patterns that get prepended to bind to the start of either the
-// entire string, or just a single path portion, to prevent dots
-// and/or traversal patterns, when needed.
-// Exts don't need the ^ or / bit, because the root binds that already.
-const startNoTraversal = '(?!(?:^|/)\\.\\.?(?:$|/))';
-const startNoDot = '(?!\\.)';
-// characters that indicate a start of pattern needs the "no dots" bit,
-// because a dot *might* be matched. ( is not in the list, because in
-// the case of a child extglob, it will handle the prevention itself.
-const addPatternStart = new Set(['[', '.']);
-// cases where traversal is A-OK, no dot prevention needed
-const justDots = new Set(['..', '.']);
-const reSpecials = new Set('().*{}+?[]^$\\!');
-const regExpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
-// any single thing other than /
-const qmark = '[^/]';
-// * => any number of characters
-const star = qmark + '*?';
-// use + when we need to ensure that *something* matches, because the * is
-// the only thing in the path portion.
-const starNoEmpty = qmark + '+?';
-// remove the \ chars that we added if we end up doing a nonmagic compare
-// const deslash = (s: string) => s.replace(/\\(.)/g, '$1')
-export class AST {
-    type;
-    #root;
-    #hasMagic;
-    #uflag = false;
-    #parts = [];
-    #parent;
-    #parentIndex;
-    #negs;
-    #filledNegs = false;
-    #options;
-    #toString;
-    // set to true if it's an extglob with no children
-    // (which really means one child of '')
-    #emptyExt = false;
-    constructor(type, parent, options = {}) {
-        this.type = type;
-        // extglobs are inherently magical
-        if (type)
-            this.#hasMagic = true;
-        this.#parent = parent;
-        this.#root = this.#parent ? this.#parent.#root : this;
-        this.#options = this.#root === this ? options : this.#root.#options;
-        this.#negs = this.#root === this ? [] : this.#root.#negs;
-        if (type === '!' && !this.#root.#filledNegs)
-            this.#negs.push(this);
-        this.#parentIndex = this.#parent ? this.#parent.#parts.length : 0;
-    }
-    get hasMagic() {
-        /* c8 ignore start */
-        if (this.#hasMagic !== undefined)
-            return this.#hasMagic;
-        /* c8 ignore stop */
-        for (const p of this.#parts) {
-            if (typeof p === 'string')
-                continue;
-            if (p.type || p.hasMagic)
-                return (this.#hasMagic = true);
-        }
-        // note: will be undefined until we generate the regexp src and find out
-        return this.#hasMagic;
-    }
-    // reconstructs the pattern
-    toString() {
-        if (this.#toString !== undefined)
-            return this.#toString;
-        if (!this.type) {
-            return (this.#toString = this.#parts.map(p => String(p)).join(''));
-        }
-        else {
-            return (this.#toString =
-                this.type + '(' + this.#parts.map(p => String(p)).join('|') + ')');
-        }
-    }
-    #fillNegs() {
-        /* c8 ignore start */
-        if (this !== this.#root)
-            throw new Error('should only call on root');
-        if (this.#filledNegs)
-            return this;
-        /* c8 ignore stop */
-        // call toString() once to fill this out
-        this.toString();
-        this.#filledNegs = true;
-        let n;
-        while ((n = this.#negs.pop())) {
-            if (n.type !== '!')
-                continue;
-            // walk up the tree, appending everthing that comes AFTER parentIndex
-            let p = n;
-            let pp = p.#parent;
-            while (pp) {
-                for (let i = p.#parentIndex + 1; !pp.type && i < pp.#parts.length; i++) {
-                    for (const part of n.#parts) {
-                        /* c8 ignore start */
-                        if (typeof part === 'string') {
-                            throw new Error('string part in extglob AST??');
-                        }
-                        /* c8 ignore stop */
-                        part.copyIn(pp.#parts[i]);
-                    }
-                }
-                p = pp;
-                pp = p.#parent;
-            }
-        }
-        return this;
-    }
-    push(...parts) {
-        for (const p of parts) {
-            if (p === '')
-                continue;
-            /* c8 ignore start */
-            if (typeof p !== 'string' && !(p instanceof AST && p.#parent === this)) {
-                throw new Error('invalid part: ' + p);
-            }
-            /* c8 ignore stop */
-            this.#parts.push(p);
-        }
-    }
-    toJSON() {
-        const ret = this.type === null
-            ? this.#parts.slice().map(p => (typeof p === 'string' ? p : p.toJSON()))
-            : [this.type, ...this.#parts.map(p => p.toJSON())];
-        if (this.isStart() && !this.type)
-            ret.unshift([]);
-        if (this.isEnd() &&
-            (this === this.#root ||
-                (this.#root.#filledNegs && this.#parent?.type === '!'))) {
-            ret.push({});
-        }
-        return ret;
-    }
-    isStart() {
-        if (this.#root === this)
-            return true;
-        // if (this.type) return !!this.#parent?.isStart()
-        if (!this.#parent?.isStart())
-            return false;
-        if (this.#parentIndex === 0)
-            return true;
-        // if everything AHEAD of this is a negation, then it's still the "start"
-        const p = this.#parent;
-        for (let i = 0; i < this.#parentIndex; i++) {
-            const pp = p.#parts[i];
-            if (!(pp instanceof AST && pp.type === '!')) {
-                return false;
-            }
-        }
-        return true;
-    }
-    isEnd() {
-        if (this.#root === this)
-            return true;
-        if (this.#parent?.type === '!')
-            return true;
-        if (!this.#parent?.isEnd())
-            return false;
-        if (!this.type)
-            return this.#parent?.isEnd();
-        // if not root, it'll always have a parent
-        /* c8 ignore start */
-        const pl = this.#parent ? this.#parent.#parts.length : 0;
-        /* c8 ignore stop */
-        return this.#parentIndex === pl - 1;
-    }
-    copyIn(part) {
-        if (typeof part === 'string')
-            this.push(part);
-        else
-            this.push(part.clone(this));
-    }
-    clone(parent) {
-        const c = new AST(this.type, parent);
-        for (const p of this.#parts) {
-            c.copyIn(p);
-        }
-        return c;
-    }
-    static #parseAST(str, ast, pos, opt) {
-        let escaping = false;
-        let inBrace = false;
-        let braceStart = -1;
-        let braceNeg = false;
-        if (ast.type === null) {
-            // outside of a extglob, append until we find a start
-            let i = pos;
-            let acc = '';
-            while (i < str.length) {
-                const c = str.charAt(i++);
-                // still accumulate escapes at this point, but we do ignore
-                // starts that are escaped
-                if (escaping || c === '\\') {
-                    escaping = !escaping;
-                    acc += c;
-                    continue;
-                }
-                if (inBrace) {
-                    if (i === braceStart + 1) {
-                        if (c === '^' || c === '!') {
-                            braceNeg = true;
-                        }
-                    }
-                    else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
-                        inBrace = false;
-                    }
-                    acc += c;
-                    continue;
-                }
-                else if (c === '[') {
-                    inBrace = true;
-                    braceStart = i;
-                    braceNeg = false;
-                    acc += c;
-                    continue;
-                }
-                if (!opt.noext && isExtglobType(c) && str.charAt(i) === '(') {
-                    ast.push(acc);
-                    acc = '';
-                    const ext = new AST(c, ast);
-                    i = AST.#parseAST(str, ext, i, opt);
-                    ast.push(ext);
-                    continue;
-                }
-                acc += c;
-            }
-            ast.push(acc);
-            return i;
-        }
-        // some kind of extglob, pos is at the (
-        // find the next | or )
-        let i = pos + 1;
-        let part = new AST(null, ast);
-        const parts = [];
-        let acc = '';
-        while (i < str.length) {
-            const c = str.charAt(i++);
-            // still accumulate escapes at this point, but we do ignore
-            // starts that are escaped
-            if (escaping || c === '\\') {
-                escaping = !escaping;
-                acc += c;
-                continue;
-            }
-            if (inBrace) {
-                if (i === braceStart + 1) {
-                    if (c === '^' || c === '!') {
-                        braceNeg = true;
-                    }
-                }
-                else if (c === ']' && !(i === braceStart + 2 && braceNeg)) {
-                    inBrace = false;
-                }
-                acc += c;
-                continue;
-            }
-            else if (c === '[') {
-                inBrace = true;
-                braceStart = i;
-                braceNeg = false;
-                acc += c;
-                continue;
-            }
-            if (isExtglobType(c) && str.charAt(i) === '(') {
-                part.push(acc);
-                acc = '';
-                const ext = new AST(c, part);
-                part.push(ext);
-                i = AST.#parseAST(str, ext, i, opt);
-                continue;
-            }
-            if (c === '|') {
-                part.push(acc);
-                acc = '';
-                parts.push(part);
-                part = new AST(null, ast);
-                continue;
-            }
-            if (c === ')') {
-                if (acc === '' && ast.#parts.length === 0) {
-                    ast.#emptyExt = true;
-                }
-                part.push(acc);
-                acc = '';
-                ast.push(...parts, part);
-                return i;
-            }
-            acc += c;
-        }
-        // unfinished extglob
-        // if we got here, it was a malformed extglob! not an extglob, but
-        // maybe something else in there.
-        ast.type = null;
-        ast.#hasMagic = undefined;
-        ast.#parts = [str.substring(pos - 1)];
-        return i;
-    }
-    static fromGlob(pattern, options = {}) {
-        const ast = new AST(null, undefined, options);
-        AST.#parseAST(pattern, ast, 0, options);
-        return ast;
-    }
-    // returns the regular expression if there's magic, or the unescaped
-    // string if not.
-    toMMPattern() {
-        // should only be called on root
-        /* c8 ignore start */
-        if (this !== this.#root)
-            return this.#root.toMMPattern();
-        /* c8 ignore stop */
-        const glob = this.toString();
-        const [re, body, hasMagic, uflag] = this.toRegExpSource();
-        // if we're in nocase mode, and not nocaseMagicOnly, then we do
-        // still need a regular expression if we have to case-insensitively
-        // match capital/lowercase characters.
-        const anyMagic = hasMagic ||
-            this.#hasMagic ||
-            (this.#options.nocase &&
-                !this.#options.nocaseMagicOnly &&
-                glob.toUpperCase() !== glob.toLowerCase());
-        if (!anyMagic) {
-            return body;
-        }
-        const flags = (this.#options.nocase ? 'i' : '') + (uflag ? 'u' : '');
-        return Object.assign(new RegExp(`^${re}$`, flags), {
-            _src: re,
-            _glob: glob,
-        });
-    }
-    get options() {
-        return this.#options;
-    }
-    // returns the string match, the regexp source, whether there's magic
-    // in the regexp (so a regular expression is required) and whether or
-    // not the uflag is needed for the regular expression (for posix classes)
-    // TODO: instead of injecting the start/end at this point, just return
-    // the BODY of the regexp, along with the start/end portions suitable
-    // for binding the start/end in either a joined full-path makeRe context
-    // (where we bind to (^|/), or a standalone matchPart context (where
-    // we bind to ^, and not /).  Otherwise slashes get duped!
-    //
-    // In part-matching mode, the start is:
-    // - if not isStart: nothing
-    // - if traversal possible, but not allowed: ^(?!\.\.?$)
-    // - if dots allowed or not possible: ^
-    // - if dots possible and not allowed: ^(?!\.)
-    // end is:
-    // - if not isEnd(): nothing
-    // - else: $
-    //
-    // In full-path matching mode, we put the slash at the START of the
-    // pattern, so start is:
-    // - if first pattern: same as part-matching mode
-    // - if not isStart(): nothing
-    // - if traversal possible, but not allowed: /(?!\.\.?(?:$|/))
-    // - if dots allowed or not possible: /
-    // - if dots possible and not allowed: /(?!\.)
-    // end is:
-    // - if last pattern, same as part-matching mode
-    // - else nothing
-    //
-    // Always put the (?:$|/) on negated tails, though, because that has to be
-    // there to bind the end of the negated pattern portion, and it's easier to
-    // just stick it in now rather than try to inject it later in the middle of
-    // the pattern.
-    //
-    // We can just always return the same end, and leave it up to the caller
-    // to know whether it's going to be used joined or in parts.
-    // And, if the start is adjusted slightly, can do the same there:
-    // - if not isStart: nothing
-    // - if traversal possible, but not allowed: (?:/|^)(?!\.\.?$)
-    // - if dots allowed or not possible: (?:/|^)
-    // - if dots possible and not allowed: (?:/|^)(?!\.)
-    //
-    // But it's better to have a simpler binding without a conditional, for
-    // performance, so probably better to return both start options.
-    //
-    // Then the caller just ignores the end if it's not the first pattern,
-    // and the start always gets applied.
-    //
-    // But that's always going to be $ if it's the ending pattern, or nothing,
-    // so the caller can just attach $ at the end of the pattern when building.
-    //
-    // So the todo is:
-    // - better detect what kind of start is needed
-    // - return both flavors of starting pattern
-    // - attach $ at the end of the pattern when creating the actual RegExp
-    //
-    // Ah, but wait, no, that all only applies to the root when the first pattern
-    // is not an extglob. If the first pattern IS an extglob, then we need all
-    // that dot prevention biz to live in the extglob portions, because eg
-    // +(*|.x*) can match .xy but not .yx.
-    //
-    // So, return the two flavors if it's #root and the first child is not an
-    // AST, otherwise leave it to the child AST to handle it, and there,
-    // use the (?:^|/) style of start binding.
-    //
-    // Even simplified further:
-    // - Since the start for a join is eg /(?!\.) and the start for a part
-    // is ^(?!\.), we can just prepend (?!\.) to the pattern (either root
-    // or start or whatever) and prepend ^ or / at the Regexp construction.
-    toRegExpSource(allowDot) {
-        const dot = allowDot ?? !!this.#options.dot;
-        if (this.#root === this)
-            this.#fillNegs();
-        if (!this.type) {
-            const noEmpty = this.isStart() && this.isEnd();
-            const src = this.#parts
-                .map(p => {
-                const [re, _, hasMagic, uflag] = typeof p === 'string'
-                    ? AST.#parseGlob(p, this.#hasMagic, noEmpty)
-                    : p.toRegExpSource(allowDot);
-                this.#hasMagic = this.#hasMagic || hasMagic;
-                this.#uflag = this.#uflag || uflag;
-                return re;
-            })
-                .join('');
-            let start = '';
-            if (this.isStart()) {
-                if (typeof this.#parts[0] === 'string') {
-                    // this is the string that will match the start of the pattern,
-                    // so we need to protect against dots and such.
-                    // '.' and '..' cannot match unless the pattern is that exactly,
-                    // even if it starts with . or dot:true is set.
-                    const dotTravAllowed = this.#parts.length === 1 && justDots.has(this.#parts[0]);
-                    if (!dotTravAllowed) {
-                        const aps = addPatternStart;
-                        // check if we have a possibility of matching . or ..,
-                        // and prevent that.
-                        const needNoTrav = 
-                        // dots are allowed, and the pattern starts with [ or .
-                        (dot && aps.has(src.charAt(0))) ||
-                            // the pattern starts with \., and then [ or .
-                            (src.startsWith('\\.') && aps.has(src.charAt(2))) ||
-                            // the pattern starts with \.\., and then [ or .
-                            (src.startsWith('\\.\\.') && aps.has(src.charAt(4)));
-                        // no need to prevent dots if it can't match a dot, or if a
-                        // sub-pattern will be preventing it anyway.
-                        const needNoDot = !dot && !allowDot && aps.has(src.charAt(0));
-                        start = needNoTrav ? startNoTraversal : needNoDot ? startNoDot : '';
-                    }
-                }
-            }
-            // append the "end of path portion" pattern to negation tails
-            let end = '';
-            if (this.isEnd() &&
-                this.#root.#filledNegs &&
-                this.#parent?.type === '!') {
-                end = '(?:$|\\/)';
-            }
-            const final = start + src + end;
-            return [
-                final,
-                unescape(src),
-                (this.#hasMagic = !!this.#hasMagic),
-                this.#uflag,
-            ];
-        }
-        // We need to calculate the body *twice* if it's a repeat pattern
-        // at the start, once in nodot mode, then again in dot mode, so a
-        // pattern like *(?) can match 'x.y'
-        const repeated = this.type === '*' || this.type === '+';
-        // some kind of extglob
-        const start = this.type === '!' ? '(?:(?!(?:' : '(?:';
-        let body = this.#partsToRegExp(dot);
-        if (this.isStart() && this.isEnd() && !body && this.type !== '!') {
-            // invalid extglob, has to at least be *something* present, if it's
-            // the entire path portion.
-            const s = this.toString();
-            this.#parts = [s];
-            this.type = null;
-            this.#hasMagic = undefined;
-            return [s, unescape(this.toString()), false, false];
-        }
-        // XXX abstract out this map method
-        let bodyDotAllowed = !repeated || allowDot || dot || !startNoDot
-            ? ''
-            : this.#partsToRegExp(true);
-        if (bodyDotAllowed === body) {
-            bodyDotAllowed = '';
-        }
-        if (bodyDotAllowed) {
-            body = `(?:${body})(?:${bodyDotAllowed})*?`;
-        }
-        // an empty !() is exactly equivalent to a starNoEmpty
-        let final = '';
-        if (this.type === '!' && this.#emptyExt) {
-            final = (this.isStart() && !dot ? startNoDot : '') + starNoEmpty;
-        }
-        else {
-            const close = this.type === '!'
-                ? // !() must match something,but !(x) can match ''
-                    '))' +
-                        (this.isStart() && !dot && !allowDot ? startNoDot : '') +
-                        star +
-                        ')'
-                : this.type === '@'
-                    ? ')'
-                    : this.type === '?'
-                        ? ')?'
-                        : this.type === '+' && bodyDotAllowed
-                            ? ')'
-                            : this.type === '*' && bodyDotAllowed
-                                ? `)?`
-                                : `)${this.type}`;
-            final = start + body + close;
-        }
-        return [
-            final,
-            unescape(body),
-            (this.#hasMagic = !!this.#hasMagic),
-            this.#uflag,
-        ];
-    }
-    #partsToRegExp(dot) {
-        return this.#parts
-            .map(p => {
-            // extglob ASTs should only contain parent ASTs
-            /* c8 ignore start */
-            if (typeof p === 'string') {
-                throw new Error('string type in extglob ast??');
-            }
-            /* c8 ignore stop */
-            // can ignore hasMagic, because extglobs are already always magic
-            const [re, _, _hasMagic, uflag] = p.toRegExpSource(dot);
-            this.#uflag = this.#uflag || uflag;
-            return re;
-        })
-            .filter(p => !(this.isStart() && this.isEnd()) || !!p)
-            .join('|');
-    }
-    static #parseGlob(glob, hasMagic, noEmpty = false) {
-        let escaping = false;
-        let re = '';
-        let uflag = false;
-        for (let i = 0; i < glob.length; i++) {
-            const c = glob.charAt(i);
-            if (escaping) {
-                escaping = false;
-                re += (reSpecials.has(c) ? '\\' : '') + c;
-                continue;
-            }
-            if (c === '\\') {
-                if (i === glob.length - 1) {
-                    re += '\\\\';
-                }
-                else {
-                    escaping = true;
-                }
-                continue;
-            }
-            if (c === '[') {
-                const [src, needUflag, consumed, magic] = parseClass(glob, i);
-                if (consumed) {
-                    re += src;
-                    uflag = uflag || needUflag;
-                    i += consumed - 1;
-                    hasMagic = hasMagic || magic;
-                    continue;
-                }
-            }
-            if (c === '*') {
-                if (noEmpty && glob === '*')
-                    re += starNoEmpty;
-                else
-                    re += star;
-                hasMagic = true;
-                continue;
-            }
-            if (c === '?') {
-                re += qmark;
-                hasMagic = true;
-                continue;
-            }
-            re += regExpEscape(c);
-        }
-        return [re, unescape(glob), !!hasMagic, uflag];
-    }
-}
-//# sourceMappingURL=ast.js.map
\ No newline at end of file
diff --git a/node_modules/ignore-walk/node_modules/minimatch/dist/esm/brace-expressions.js b/node_modules/ignore-walk/node_modules/minimatch/dist/esm/brace-expressions.js
deleted file mode 100644
index c629d6ae816e2..0000000000000
--- a/node_modules/ignore-walk/node_modules/minimatch/dist/esm/brace-expressions.js
+++ /dev/null
@@ -1,148 +0,0 @@
-// translate the various posix character classes into unicode properties
-// this works across all unicode locales
-// { : [, /u flag required, negated]
-const posixClasses = {
-    '[:alnum:]': ['\\p{L}\\p{Nl}\\p{Nd}', true],
-    '[:alpha:]': ['\\p{L}\\p{Nl}', true],
-    '[:ascii:]': ['\\x' + '00-\\x' + '7f', false],
-    '[:blank:]': ['\\p{Zs}\\t', true],
-    '[:cntrl:]': ['\\p{Cc}', true],
-    '[:digit:]': ['\\p{Nd}', true],
-    '[:graph:]': ['\\p{Z}\\p{C}', true, true],
-    '[:lower:]': ['\\p{Ll}', true],
-    '[:print:]': ['\\p{C}', true],
-    '[:punct:]': ['\\p{P}', true],
-    '[:space:]': ['\\p{Z}\\t\\r\\n\\v\\f', true],
-    '[:upper:]': ['\\p{Lu}', true],
-    '[:word:]': ['\\p{L}\\p{Nl}\\p{Nd}\\p{Pc}', true],
-    '[:xdigit:]': ['A-Fa-f0-9', false],
-};
-// only need to escape a few things inside of brace expressions
-// escapes: [ \ ] -
-const braceEscape = (s) => s.replace(/[[\]\\-]/g, '\\$&');
-// escape all regexp magic characters
-const regexpEscape = (s) => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&');
-// everything has already been escaped, we just have to join
-const rangesToString = (ranges) => ranges.join('');
-// takes a glob string at a posix brace expression, and returns
-// an equivalent regular expression source, and boolean indicating
-// whether the /u flag needs to be applied, and the number of chars
-// consumed to parse the character class.
-// This also removes out of order ranges, and returns ($.) if the
-// entire class just no good.
-export const parseClass = (glob, position) => {
-    const pos = position;
-    /* c8 ignore start */
-    if (glob.charAt(pos) !== '[') {
-        throw new Error('not in a brace expression');
-    }
-    /* c8 ignore stop */
-    const ranges = [];
-    const negs = [];
-    let i = pos + 1;
-    let sawStart = false;
-    let uflag = false;
-    let escaping = false;
-    let negate = false;
-    let endPos = pos;
-    let rangeStart = '';
-    WHILE: while (i < glob.length) {
-        const c = glob.charAt(i);
-        if ((c === '!' || c === '^') && i === pos + 1) {
-            negate = true;
-            i++;
-            continue;
-        }
-        if (c === ']' && sawStart && !escaping) {
-            endPos = i + 1;
-            break;
-        }
-        sawStart = true;
-        if (c === '\\') {
-            if (!escaping) {
-                escaping = true;
-                i++;
-                continue;
-            }
-            // escaped \ char, fall through and treat like normal char
-        }
-        if (c === '[' && !escaping) {
-            // either a posix class, a collation equivalent, or just a [
-            for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) {
-                if (glob.startsWith(cls, i)) {
-                    // invalid, [a-[] is fine, but not [a-[:alpha]]
-                    if (rangeStart) {
-                        return ['$.', false, glob.length - pos, true];
-                    }
-                    i += cls.length;
-                    if (neg)
-                        negs.push(unip);
-                    else
-                        ranges.push(unip);
-                    uflag = uflag || u;
-                    continue WHILE;
-                }
-            }
-        }
-        // now it's just a normal character, effectively
-        escaping = false;
-        if (rangeStart) {
-            // throw this range away if it's not valid, but others
-            // can still match.
-            if (c > rangeStart) {
-                ranges.push(braceEscape(rangeStart) + '-' + braceEscape(c));
-            }
-            else if (c === rangeStart) {
-                ranges.push(braceEscape(c));
-            }
-            rangeStart = '';
-            i++;
-            continue;
-        }
-        // now might be the start of a range.
-        // can be either c-d or c-] or c] or c] at this point
-        if (glob.startsWith('-]', i + 1)) {
-            ranges.push(braceEscape(c + '-'));
-            i += 2;
-            continue;
-        }
-        if (glob.startsWith('-', i + 1)) {
-            rangeStart = c;
-            i += 2;
-            continue;
-        }
-        // not the start of a range, just a single character
-        ranges.push(braceEscape(c));
-        i++;
-    }
-    if (endPos < i) {
-        // didn't see the end of the class, not a valid class,
-        // but might still be valid as a literal match.
-        return ['', false, 0, false];
-    }
-    // if we got no ranges and no negates, then we have a range that
-    // cannot possibly match anything, and that poisons the whole glob
-    if (!ranges.length && !negs.length) {
-        return ['$.', false, glob.length - pos, true];
-    }
-    // if we got one positive range, and it's a single character, then that's
-    // not actually a magic pattern, it's just that one literal character.
-    // we should not treat that as "magic", we should just return the literal
-    // character. [_] is a perfectly valid way to escape glob magic chars.
-    if (negs.length === 0 &&
-        ranges.length === 1 &&
-        /^\\?.$/.test(ranges[0]) &&
-        !negate) {
-        const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0];
-        return [regexpEscape(r), false, endPos - pos, false];
-    }
-    const sranges = '[' + (negate ? '^' : '') + rangesToString(ranges) + ']';
-    const snegs = '[' + (negate ? '' : '^') + rangesToString(negs) + ']';
-    const comb = ranges.length && negs.length
-        ? '(' + sranges + '|' + snegs + ')'
-        : ranges.length
-            ? sranges
-            : snegs;
-    return [comb, uflag, endPos - pos, true];
-};
-//# sourceMappingURL=brace-expressions.js.map
\ No newline at end of file
diff --git a/node_modules/ignore-walk/node_modules/minimatch/dist/esm/escape.js b/node_modules/ignore-walk/node_modules/minimatch/dist/esm/escape.js
deleted file mode 100644
index 16f7c8c7bdc64..0000000000000
--- a/node_modules/ignore-walk/node_modules/minimatch/dist/esm/escape.js
+++ /dev/null
@@ -1,18 +0,0 @@
-/**
- * Escape all magic characters in a glob pattern.
- *
- * If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape}
- * option is used, then characters are escaped by wrapping in `[]`, because
- * a magic character wrapped in a character class can only be satisfied by
- * that exact character.  In this mode, `\` is _not_ escaped, because it is
- * not interpreted as a magic character, but instead as a path separator.
- */
-export const escape = (s, { windowsPathsNoEscape = false, } = {}) => {
-    // don't need to escape +@! because we escape the parens
-    // that make those magic, and escaping ! as [!] isn't valid,
-    // because [!]] is a valid glob class meaning not ']'.
-    return windowsPathsNoEscape
-        ? s.replace(/[?*()[\]]/g, '[$&]')
-        : s.replace(/[?*()[\]\\]/g, '\\$&');
-};
-//# sourceMappingURL=escape.js.map
\ No newline at end of file
diff --git a/node_modules/ignore-walk/node_modules/minimatch/dist/esm/package.json b/node_modules/ignore-walk/node_modules/minimatch/dist/esm/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/ignore-walk/node_modules/minimatch/dist/esm/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/ignore-walk/node_modules/minimatch/dist/esm/unescape.js b/node_modules/ignore-walk/node_modules/minimatch/dist/esm/unescape.js
deleted file mode 100644
index 0faf9a2b7306f..0000000000000
--- a/node_modules/ignore-walk/node_modules/minimatch/dist/esm/unescape.js
+++ /dev/null
@@ -1,20 +0,0 @@
-/**
- * Un-escape a string that has been escaped with {@link escape}.
- *
- * If the {@link windowsPathsNoEscape} option is used, then square-brace
- * escapes are removed, but not backslash escapes.  For example, it will turn
- * the string `'[*]'` into `*`, but it will not turn `'\\*'` into `'*'`,
- * becuase `\` is a path separator in `windowsPathsNoEscape` mode.
- *
- * When `windowsPathsNoEscape` is not set, then both brace escapes and
- * backslash escapes are removed.
- *
- * Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot be escaped
- * or unescaped.
- */
-export const unescape = (s, { windowsPathsNoEscape = false, } = {}) => {
-    return windowsPathsNoEscape
-        ? s.replace(/\[([^\/\\])\]/g, '$1')
-        : s.replace(/((?!\\).|^)\[([^\/\\])\]/g, '$1$2').replace(/\\([^\/])/g, '$1');
-};
-//# sourceMappingURL=unescape.js.map
\ No newline at end of file
diff --git a/node_modules/minimatch/dist/commonjs/index.js b/node_modules/minimatch/dist/commonjs/index.js
index 64a0f1f833222..f58fb8616aa9a 100644
--- a/node_modules/minimatch/dist/commonjs/index.js
+++ b/node_modules/minimatch/dist/commonjs/index.js
@@ -1,10 +1,7 @@
 "use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
 Object.defineProperty(exports, "__esModule", { value: true });
 exports.unescape = exports.escape = exports.AST = exports.Minimatch = exports.match = exports.makeRe = exports.braceExpand = exports.defaults = exports.filter = exports.GLOBSTAR = exports.sep = exports.minimatch = void 0;
-const brace_expansion_1 = __importDefault(require("brace-expansion"));
+const brace_expansion_1 = require("@isaacs/brace-expansion");
 const assert_valid_pattern_js_1 = require("./assert-valid-pattern.js");
 const ast_js_1 = require("./ast.js");
 const escape_js_1 = require("./escape.js");
@@ -157,7 +154,7 @@ const braceExpand = (pattern, options = {}) => {
         // shortcut. no need to expand.
         return [pattern];
     }
-    return (0, brace_expansion_1.default)(pattern);
+    return (0, brace_expansion_1.expand)(pattern);
 };
 exports.braceExpand = braceExpand;
 exports.minimatch.braceExpand = exports.braceExpand;
diff --git a/node_modules/minimatch/dist/esm/index.js b/node_modules/minimatch/dist/esm/index.js
index 84b577b0472cb..790d6c02a2f22 100644
--- a/node_modules/minimatch/dist/esm/index.js
+++ b/node_modules/minimatch/dist/esm/index.js
@@ -1,4 +1,4 @@
-import expand from 'brace-expansion';
+import { expand } from '@isaacs/brace-expansion';
 import { assertValidPattern } from './assert-valid-pattern.js';
 import { AST } from './ast.js';
 import { escape } from './escape.js';
diff --git a/node_modules/minimatch/package.json b/node_modules/minimatch/package.json
index 01fc48ecfd6a9..bfa2423f50b5e 100644
--- a/node_modules/minimatch/package.json
+++ b/node_modules/minimatch/package.json
@@ -2,7 +2,7 @@
   "author": "Isaac Z. Schlueter  (http://blog.izs.me)",
   "name": "minimatch",
   "description": "a glob matcher in javascript",
-  "version": "9.0.5",
+  "version": "10.0.3",
   "repository": {
     "type": "git",
     "url": "git://github.com/isaacs/minimatch.git"
@@ -50,23 +50,16 @@
     "endOfLine": "lf"
   },
   "engines": {
-    "node": ">=16 || 14 >=14.17"
-  },
-  "dependencies": {
-    "brace-expansion": "^2.0.1"
+    "node": "20 || >=22"
   },
   "devDependencies": {
-    "@types/brace-expansion": "^1.1.0",
-    "@types/node": "^18.15.11",
-    "@types/tap": "^15.0.8",
-    "eslint-config-prettier": "^8.6.0",
-    "mkdirp": "1",
-    "prettier": "^2.8.2",
-    "tap": "^18.7.2",
-    "ts-node": "^10.9.1",
-    "tshy": "^1.12.0",
-    "typedoc": "^0.23.21",
-    "typescript": "^4.9.3"
+    "@types/brace-expansion": "^1.1.2",
+    "@types/node": "^24.0.0",
+    "mkdirp": "^3.0.1",
+    "prettier": "^3.3.2",
+    "tap": "^21.1.0",
+    "tshy": "^3.0.2",
+    "typedoc": "^0.28.5"
   },
   "funding": {
     "url": "https://github.com/sponsors/isaacs"
@@ -78,5 +71,9 @@
       ".": "./src/index.ts"
     }
   },
-  "type": "module"
+  "type": "module",
+  "module": "./dist/esm/index.js",
+  "dependencies": {
+    "@isaacs/brace-expansion": "^5.0.0"
+  }
 }
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/minimatch/LICENSE b/node_modules/node-gyp/node_modules/minimatch/LICENSE
similarity index 100%
rename from node_modules/@npmcli/map-workspaces/node_modules/minimatch/LICENSE
rename to node_modules/node-gyp/node_modules/minimatch/LICENSE
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js b/node_modules/node-gyp/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
similarity index 100%
rename from node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
rename to node_modules/node-gyp/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/ast.js b/node_modules/node-gyp/node_modules/minimatch/dist/commonjs/ast.js
similarity index 100%
rename from node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/ast.js
rename to node_modules/node-gyp/node_modules/minimatch/dist/commonjs/ast.js
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/brace-expressions.js b/node_modules/node-gyp/node_modules/minimatch/dist/commonjs/brace-expressions.js
similarity index 100%
rename from node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/brace-expressions.js
rename to node_modules/node-gyp/node_modules/minimatch/dist/commonjs/brace-expressions.js
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/escape.js b/node_modules/node-gyp/node_modules/minimatch/dist/commonjs/escape.js
similarity index 100%
rename from node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/escape.js
rename to node_modules/node-gyp/node_modules/minimatch/dist/commonjs/escape.js
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/index.js b/node_modules/node-gyp/node_modules/minimatch/dist/commonjs/index.js
similarity index 99%
rename from node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/index.js
rename to node_modules/node-gyp/node_modules/minimatch/dist/commonjs/index.js
index f58fb8616aa9a..64a0f1f833222 100644
--- a/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/index.js
+++ b/node_modules/node-gyp/node_modules/minimatch/dist/commonjs/index.js
@@ -1,7 +1,10 @@
 "use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
 Object.defineProperty(exports, "__esModule", { value: true });
 exports.unescape = exports.escape = exports.AST = exports.Minimatch = exports.match = exports.makeRe = exports.braceExpand = exports.defaults = exports.filter = exports.GLOBSTAR = exports.sep = exports.minimatch = void 0;
-const brace_expansion_1 = require("@isaacs/brace-expansion");
+const brace_expansion_1 = __importDefault(require("brace-expansion"));
 const assert_valid_pattern_js_1 = require("./assert-valid-pattern.js");
 const ast_js_1 = require("./ast.js");
 const escape_js_1 = require("./escape.js");
@@ -154,7 +157,7 @@ const braceExpand = (pattern, options = {}) => {
         // shortcut. no need to expand.
         return [pattern];
     }
-    return (0, brace_expansion_1.expand)(pattern);
+    return (0, brace_expansion_1.default)(pattern);
 };
 exports.braceExpand = braceExpand;
 exports.minimatch.braceExpand = exports.braceExpand;
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/package.json b/node_modules/node-gyp/node_modules/minimatch/dist/commonjs/package.json
similarity index 100%
rename from node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/package.json
rename to node_modules/node-gyp/node_modules/minimatch/dist/commonjs/package.json
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/unescape.js b/node_modules/node-gyp/node_modules/minimatch/dist/commonjs/unescape.js
similarity index 100%
rename from node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/commonjs/unescape.js
rename to node_modules/node-gyp/node_modules/minimatch/dist/commonjs/unescape.js
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/assert-valid-pattern.js b/node_modules/node-gyp/node_modules/minimatch/dist/esm/assert-valid-pattern.js
similarity index 100%
rename from node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/assert-valid-pattern.js
rename to node_modules/node-gyp/node_modules/minimatch/dist/esm/assert-valid-pattern.js
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/ast.js b/node_modules/node-gyp/node_modules/minimatch/dist/esm/ast.js
similarity index 100%
rename from node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/ast.js
rename to node_modules/node-gyp/node_modules/minimatch/dist/esm/ast.js
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/brace-expressions.js b/node_modules/node-gyp/node_modules/minimatch/dist/esm/brace-expressions.js
similarity index 100%
rename from node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/brace-expressions.js
rename to node_modules/node-gyp/node_modules/minimatch/dist/esm/brace-expressions.js
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/escape.js b/node_modules/node-gyp/node_modules/minimatch/dist/esm/escape.js
similarity index 100%
rename from node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/escape.js
rename to node_modules/node-gyp/node_modules/minimatch/dist/esm/escape.js
diff --git a/node_modules/ignore-walk/node_modules/minimatch/dist/esm/index.js b/node_modules/node-gyp/node_modules/minimatch/dist/esm/index.js
similarity index 99%
rename from node_modules/ignore-walk/node_modules/minimatch/dist/esm/index.js
rename to node_modules/node-gyp/node_modules/minimatch/dist/esm/index.js
index 790d6c02a2f22..84b577b0472cb 100644
--- a/node_modules/ignore-walk/node_modules/minimatch/dist/esm/index.js
+++ b/node_modules/node-gyp/node_modules/minimatch/dist/esm/index.js
@@ -1,4 +1,4 @@
-import { expand } from '@isaacs/brace-expansion';
+import expand from 'brace-expansion';
 import { assertValidPattern } from './assert-valid-pattern.js';
 import { AST } from './ast.js';
 import { escape } from './escape.js';
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/package.json b/node_modules/node-gyp/node_modules/minimatch/dist/esm/package.json
similarity index 100%
rename from node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/package.json
rename to node_modules/node-gyp/node_modules/minimatch/dist/esm/package.json
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/unescape.js b/node_modules/node-gyp/node_modules/minimatch/dist/esm/unescape.js
similarity index 100%
rename from node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/unescape.js
rename to node_modules/node-gyp/node_modules/minimatch/dist/esm/unescape.js
diff --git a/node_modules/glob/node_modules/minimatch/package.json b/node_modules/node-gyp/node_modules/minimatch/package.json
similarity index 78%
rename from node_modules/glob/node_modules/minimatch/package.json
rename to node_modules/node-gyp/node_modules/minimatch/package.json
index bfa2423f50b5e..01fc48ecfd6a9 100644
--- a/node_modules/glob/node_modules/minimatch/package.json
+++ b/node_modules/node-gyp/node_modules/minimatch/package.json
@@ -2,7 +2,7 @@
   "author": "Isaac Z. Schlueter  (http://blog.izs.me)",
   "name": "minimatch",
   "description": "a glob matcher in javascript",
-  "version": "10.0.3",
+  "version": "9.0.5",
   "repository": {
     "type": "git",
     "url": "git://github.com/isaacs/minimatch.git"
@@ -50,16 +50,23 @@
     "endOfLine": "lf"
   },
   "engines": {
-    "node": "20 || >=22"
+    "node": ">=16 || 14 >=14.17"
+  },
+  "dependencies": {
+    "brace-expansion": "^2.0.1"
   },
   "devDependencies": {
-    "@types/brace-expansion": "^1.1.2",
-    "@types/node": "^24.0.0",
-    "mkdirp": "^3.0.1",
-    "prettier": "^3.3.2",
-    "tap": "^21.1.0",
-    "tshy": "^3.0.2",
-    "typedoc": "^0.28.5"
+    "@types/brace-expansion": "^1.1.0",
+    "@types/node": "^18.15.11",
+    "@types/tap": "^15.0.8",
+    "eslint-config-prettier": "^8.6.0",
+    "mkdirp": "1",
+    "prettier": "^2.8.2",
+    "tap": "^18.7.2",
+    "ts-node": "^10.9.1",
+    "tshy": "^1.12.0",
+    "typedoc": "^0.23.21",
+    "typescript": "^4.9.3"
   },
   "funding": {
     "url": "https://github.com/sponsors/isaacs"
@@ -71,9 +78,5 @@
       ".": "./src/index.ts"
     }
   },
-  "type": "module",
-  "module": "./dist/esm/index.js",
-  "dependencies": {
-    "@isaacs/brace-expansion": "^5.0.0"
-  }
+  "type": "module"
 }
diff --git a/node_modules/tar/node_modules/mkdirp/LICENSE b/node_modules/tar/node_modules/mkdirp/LICENSE
new file mode 100644
index 0000000000000..13fcd15f0e0be
--- /dev/null
+++ b/node_modules/tar/node_modules/mkdirp/LICENSE
@@ -0,0 +1,21 @@
+Copyright James Halliday (mail@substack.net) and Isaac Z. Schlueter (i@izs.me)
+
+This project is free software released under the MIT license:
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/node_modules/tar/node_modules/mkdirp/bin/cmd.js b/node_modules/tar/node_modules/mkdirp/bin/cmd.js
new file mode 100755
index 0000000000000..6e0aa8dc4667b
--- /dev/null
+++ b/node_modules/tar/node_modules/mkdirp/bin/cmd.js
@@ -0,0 +1,68 @@
+#!/usr/bin/env node
+
+const usage = () => `
+usage: mkdirp [DIR1,DIR2..] {OPTIONS}
+
+  Create each supplied directory including any necessary parent directories
+  that don't yet exist.
+
+  If the directory already exists, do nothing.
+
+OPTIONS are:
+
+  -m       If a directory needs to be created, set the mode as an octal
+  --mode=  permission string.
+
+  -v --version   Print the mkdirp version number
+
+  -h --help      Print this helpful banner
+
+  -p --print     Print the first directories created for each path provided
+
+  --manual       Use manual implementation, even if native is available
+`
+
+const dirs = []
+const opts = {}
+let print = false
+let dashdash = false
+let manual = false
+for (const arg of process.argv.slice(2)) {
+  if (dashdash)
+    dirs.push(arg)
+  else if (arg === '--')
+    dashdash = true
+  else if (arg === '--manual')
+    manual = true
+  else if (/^-h/.test(arg) || /^--help/.test(arg)) {
+    console.log(usage())
+    process.exit(0)
+  } else if (arg === '-v' || arg === '--version') {
+    console.log(require('../package.json').version)
+    process.exit(0)
+  } else if (arg === '-p' || arg === '--print') {
+    print = true
+  } else if (/^-m/.test(arg) || /^--mode=/.test(arg)) {
+    const mode = parseInt(arg.replace(/^(-m|--mode=)/, ''), 8)
+    if (isNaN(mode)) {
+      console.error(`invalid mode argument: ${arg}\nMust be an octal number.`)
+      process.exit(1)
+    }
+    opts.mode = mode
+  } else
+    dirs.push(arg)
+}
+
+const mkdirp = require('../')
+const impl = manual ? mkdirp.manual : mkdirp
+if (dirs.length === 0)
+  console.error(usage())
+
+Promise.all(dirs.map(dir => impl(dir, opts)))
+  .then(made => print ? made.forEach(m => m && console.log(m)) : null)
+  .catch(er => {
+    console.error(er.message)
+    if (er.code)
+      console.error('  code: ' + er.code)
+    process.exit(1)
+  })
diff --git a/node_modules/tar/node_modules/mkdirp/index.js b/node_modules/tar/node_modules/mkdirp/index.js
new file mode 100644
index 0000000000000..ad7a16c9f45d9
--- /dev/null
+++ b/node_modules/tar/node_modules/mkdirp/index.js
@@ -0,0 +1,31 @@
+const optsArg = require('./lib/opts-arg.js')
+const pathArg = require('./lib/path-arg.js')
+
+const {mkdirpNative, mkdirpNativeSync} = require('./lib/mkdirp-native.js')
+const {mkdirpManual, mkdirpManualSync} = require('./lib/mkdirp-manual.js')
+const {useNative, useNativeSync} = require('./lib/use-native.js')
+
+
+const mkdirp = (path, opts) => {
+  path = pathArg(path)
+  opts = optsArg(opts)
+  return useNative(opts)
+    ? mkdirpNative(path, opts)
+    : mkdirpManual(path, opts)
+}
+
+const mkdirpSync = (path, opts) => {
+  path = pathArg(path)
+  opts = optsArg(opts)
+  return useNativeSync(opts)
+    ? mkdirpNativeSync(path, opts)
+    : mkdirpManualSync(path, opts)
+}
+
+mkdirp.sync = mkdirpSync
+mkdirp.native = (path, opts) => mkdirpNative(pathArg(path), optsArg(opts))
+mkdirp.manual = (path, opts) => mkdirpManual(pathArg(path), optsArg(opts))
+mkdirp.nativeSync = (path, opts) => mkdirpNativeSync(pathArg(path), optsArg(opts))
+mkdirp.manualSync = (path, opts) => mkdirpManualSync(pathArg(path), optsArg(opts))
+
+module.exports = mkdirp
diff --git a/node_modules/tar/node_modules/mkdirp/lib/find-made.js b/node_modules/tar/node_modules/mkdirp/lib/find-made.js
new file mode 100644
index 0000000000000..022e492c085da
--- /dev/null
+++ b/node_modules/tar/node_modules/mkdirp/lib/find-made.js
@@ -0,0 +1,29 @@
+const {dirname} = require('path')
+
+const findMade = (opts, parent, path = undefined) => {
+  // we never want the 'made' return value to be a root directory
+  if (path === parent)
+    return Promise.resolve()
+
+  return opts.statAsync(parent).then(
+    st => st.isDirectory() ? path : undefined, // will fail later
+    er => er.code === 'ENOENT'
+      ? findMade(opts, dirname(parent), parent)
+      : undefined
+  )
+}
+
+const findMadeSync = (opts, parent, path = undefined) => {
+  if (path === parent)
+    return undefined
+
+  try {
+    return opts.statSync(parent).isDirectory() ? path : undefined
+  } catch (er) {
+    return er.code === 'ENOENT'
+      ? findMadeSync(opts, dirname(parent), parent)
+      : undefined
+  }
+}
+
+module.exports = {findMade, findMadeSync}
diff --git a/node_modules/tar/node_modules/mkdirp/lib/mkdirp-manual.js b/node_modules/tar/node_modules/mkdirp/lib/mkdirp-manual.js
new file mode 100644
index 0000000000000..2eb18cd64eb79
--- /dev/null
+++ b/node_modules/tar/node_modules/mkdirp/lib/mkdirp-manual.js
@@ -0,0 +1,64 @@
+const {dirname} = require('path')
+
+const mkdirpManual = (path, opts, made) => {
+  opts.recursive = false
+  const parent = dirname(path)
+  if (parent === path) {
+    return opts.mkdirAsync(path, opts).catch(er => {
+      // swallowed by recursive implementation on posix systems
+      // any other error is a failure
+      if (er.code !== 'EISDIR')
+        throw er
+    })
+  }
+
+  return opts.mkdirAsync(path, opts).then(() => made || path, er => {
+    if (er.code === 'ENOENT')
+      return mkdirpManual(parent, opts)
+        .then(made => mkdirpManual(path, opts, made))
+    if (er.code !== 'EEXIST' && er.code !== 'EROFS')
+      throw er
+    return opts.statAsync(path).then(st => {
+      if (st.isDirectory())
+        return made
+      else
+        throw er
+    }, () => { throw er })
+  })
+}
+
+const mkdirpManualSync = (path, opts, made) => {
+  const parent = dirname(path)
+  opts.recursive = false
+
+  if (parent === path) {
+    try {
+      return opts.mkdirSync(path, opts)
+    } catch (er) {
+      // swallowed by recursive implementation on posix systems
+      // any other error is a failure
+      if (er.code !== 'EISDIR')
+        throw er
+      else
+        return
+    }
+  }
+
+  try {
+    opts.mkdirSync(path, opts)
+    return made || path
+  } catch (er) {
+    if (er.code === 'ENOENT')
+      return mkdirpManualSync(path, opts, mkdirpManualSync(parent, opts, made))
+    if (er.code !== 'EEXIST' && er.code !== 'EROFS')
+      throw er
+    try {
+      if (!opts.statSync(path).isDirectory())
+        throw er
+    } catch (_) {
+      throw er
+    }
+  }
+}
+
+module.exports = {mkdirpManual, mkdirpManualSync}
diff --git a/node_modules/tar/node_modules/mkdirp/lib/mkdirp-native.js b/node_modules/tar/node_modules/mkdirp/lib/mkdirp-native.js
new file mode 100644
index 0000000000000..c7a6b69800f62
--- /dev/null
+++ b/node_modules/tar/node_modules/mkdirp/lib/mkdirp-native.js
@@ -0,0 +1,39 @@
+const {dirname} = require('path')
+const {findMade, findMadeSync} = require('./find-made.js')
+const {mkdirpManual, mkdirpManualSync} = require('./mkdirp-manual.js')
+
+const mkdirpNative = (path, opts) => {
+  opts.recursive = true
+  const parent = dirname(path)
+  if (parent === path)
+    return opts.mkdirAsync(path, opts)
+
+  return findMade(opts, path).then(made =>
+    opts.mkdirAsync(path, opts).then(() => made)
+    .catch(er => {
+      if (er.code === 'ENOENT')
+        return mkdirpManual(path, opts)
+      else
+        throw er
+    }))
+}
+
+const mkdirpNativeSync = (path, opts) => {
+  opts.recursive = true
+  const parent = dirname(path)
+  if (parent === path)
+    return opts.mkdirSync(path, opts)
+
+  const made = findMadeSync(opts, path)
+  try {
+    opts.mkdirSync(path, opts)
+    return made
+  } catch (er) {
+    if (er.code === 'ENOENT')
+      return mkdirpManualSync(path, opts)
+    else
+      throw er
+  }
+}
+
+module.exports = {mkdirpNative, mkdirpNativeSync}
diff --git a/node_modules/tar/node_modules/mkdirp/lib/opts-arg.js b/node_modules/tar/node_modules/mkdirp/lib/opts-arg.js
new file mode 100644
index 0000000000000..2fa4833faacc7
--- /dev/null
+++ b/node_modules/tar/node_modules/mkdirp/lib/opts-arg.js
@@ -0,0 +1,23 @@
+const { promisify } = require('util')
+const fs = require('fs')
+const optsArg = opts => {
+  if (!opts)
+    opts = { mode: 0o777, fs }
+  else if (typeof opts === 'object')
+    opts = { mode: 0o777, fs, ...opts }
+  else if (typeof opts === 'number')
+    opts = { mode: opts, fs }
+  else if (typeof opts === 'string')
+    opts = { mode: parseInt(opts, 8), fs }
+  else
+    throw new TypeError('invalid options argument')
+
+  opts.mkdir = opts.mkdir || opts.fs.mkdir || fs.mkdir
+  opts.mkdirAsync = promisify(opts.mkdir)
+  opts.stat = opts.stat || opts.fs.stat || fs.stat
+  opts.statAsync = promisify(opts.stat)
+  opts.statSync = opts.statSync || opts.fs.statSync || fs.statSync
+  opts.mkdirSync = opts.mkdirSync || opts.fs.mkdirSync || fs.mkdirSync
+  return opts
+}
+module.exports = optsArg
diff --git a/node_modules/tar/node_modules/mkdirp/lib/path-arg.js b/node_modules/tar/node_modules/mkdirp/lib/path-arg.js
new file mode 100644
index 0000000000000..cc07de5a6f992
--- /dev/null
+++ b/node_modules/tar/node_modules/mkdirp/lib/path-arg.js
@@ -0,0 +1,29 @@
+const platform = process.env.__TESTING_MKDIRP_PLATFORM__ || process.platform
+const { resolve, parse } = require('path')
+const pathArg = path => {
+  if (/\0/.test(path)) {
+    // simulate same failure that node raises
+    throw Object.assign(
+      new TypeError('path must be a string without null bytes'),
+      {
+        path,
+        code: 'ERR_INVALID_ARG_VALUE',
+      }
+    )
+  }
+
+  path = resolve(path)
+  if (platform === 'win32') {
+    const badWinChars = /[*|"<>?:]/
+    const {root} = parse(path)
+    if (badWinChars.test(path.substr(root.length))) {
+      throw Object.assign(new Error('Illegal characters in path.'), {
+        path,
+        code: 'EINVAL',
+      })
+    }
+  }
+
+  return path
+}
+module.exports = pathArg
diff --git a/node_modules/tar/node_modules/mkdirp/lib/use-native.js b/node_modules/tar/node_modules/mkdirp/lib/use-native.js
new file mode 100644
index 0000000000000..079361de19fd8
--- /dev/null
+++ b/node_modules/tar/node_modules/mkdirp/lib/use-native.js
@@ -0,0 +1,10 @@
+const fs = require('fs')
+
+const version = process.env.__TESTING_MKDIRP_NODE_VERSION__ || process.version
+const versArr = version.replace(/^v/, '').split('.')
+const hasNative = +versArr[0] > 10 || +versArr[0] === 10 && +versArr[1] >= 12
+
+const useNative = !hasNative ? () => false : opts => opts.mkdir === fs.mkdir
+const useNativeSync = !hasNative ? () => false : opts => opts.mkdirSync === fs.mkdirSync
+
+module.exports = {useNative, useNativeSync}
diff --git a/node_modules/tar/node_modules/mkdirp/package.json b/node_modules/tar/node_modules/mkdirp/package.json
new file mode 100644
index 0000000000000..2913ed09bddd6
--- /dev/null
+++ b/node_modules/tar/node_modules/mkdirp/package.json
@@ -0,0 +1,44 @@
+{
+  "name": "mkdirp",
+  "description": "Recursively mkdir, like `mkdir -p`",
+  "version": "1.0.4",
+  "main": "index.js",
+  "keywords": [
+    "mkdir",
+    "directory",
+    "make dir",
+    "make",
+    "dir",
+    "recursive",
+    "native"
+  ],
+  "repository": {
+    "type": "git",
+    "url": "https://github.com/isaacs/node-mkdirp.git"
+  },
+  "scripts": {
+    "test": "tap",
+    "snap": "tap",
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "postpublish": "git push origin --follow-tags"
+  },
+  "tap": {
+    "check-coverage": true,
+    "coverage-map": "map.js"
+  },
+  "devDependencies": {
+    "require-inject": "^1.4.4",
+    "tap": "^14.10.7"
+  },
+  "bin": "bin/cmd.js",
+  "license": "MIT",
+  "engines": {
+    "node": ">=10"
+  },
+  "files": [
+    "bin",
+    "lib",
+    "index.js"
+  ]
+}
diff --git a/node_modules/glob/node_modules/minimatch/LICENSE b/node_modules/tuf-js/node_modules/minimatch/LICENSE
similarity index 100%
rename from node_modules/glob/node_modules/minimatch/LICENSE
rename to node_modules/tuf-js/node_modules/minimatch/LICENSE
diff --git a/node_modules/glob/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js b/node_modules/tuf-js/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
similarity index 100%
rename from node_modules/glob/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
rename to node_modules/tuf-js/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
diff --git a/node_modules/glob/node_modules/minimatch/dist/commonjs/ast.js b/node_modules/tuf-js/node_modules/minimatch/dist/commonjs/ast.js
similarity index 100%
rename from node_modules/glob/node_modules/minimatch/dist/commonjs/ast.js
rename to node_modules/tuf-js/node_modules/minimatch/dist/commonjs/ast.js
diff --git a/node_modules/glob/node_modules/minimatch/dist/commonjs/brace-expressions.js b/node_modules/tuf-js/node_modules/minimatch/dist/commonjs/brace-expressions.js
similarity index 100%
rename from node_modules/glob/node_modules/minimatch/dist/commonjs/brace-expressions.js
rename to node_modules/tuf-js/node_modules/minimatch/dist/commonjs/brace-expressions.js
diff --git a/node_modules/glob/node_modules/minimatch/dist/commonjs/escape.js b/node_modules/tuf-js/node_modules/minimatch/dist/commonjs/escape.js
similarity index 100%
rename from node_modules/glob/node_modules/minimatch/dist/commonjs/escape.js
rename to node_modules/tuf-js/node_modules/minimatch/dist/commonjs/escape.js
diff --git a/node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/index.js b/node_modules/tuf-js/node_modules/minimatch/dist/commonjs/index.js
similarity index 99%
rename from node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/index.js
rename to node_modules/tuf-js/node_modules/minimatch/dist/commonjs/index.js
index f58fb8616aa9a..64a0f1f833222 100644
--- a/node_modules/ignore-walk/node_modules/minimatch/dist/commonjs/index.js
+++ b/node_modules/tuf-js/node_modules/minimatch/dist/commonjs/index.js
@@ -1,7 +1,10 @@
 "use strict";
+var __importDefault = (this && this.__importDefault) || function (mod) {
+    return (mod && mod.__esModule) ? mod : { "default": mod };
+};
 Object.defineProperty(exports, "__esModule", { value: true });
 exports.unescape = exports.escape = exports.AST = exports.Minimatch = exports.match = exports.makeRe = exports.braceExpand = exports.defaults = exports.filter = exports.GLOBSTAR = exports.sep = exports.minimatch = void 0;
-const brace_expansion_1 = require("@isaacs/brace-expansion");
+const brace_expansion_1 = __importDefault(require("brace-expansion"));
 const assert_valid_pattern_js_1 = require("./assert-valid-pattern.js");
 const ast_js_1 = require("./ast.js");
 const escape_js_1 = require("./escape.js");
@@ -154,7 +157,7 @@ const braceExpand = (pattern, options = {}) => {
         // shortcut. no need to expand.
         return [pattern];
     }
-    return (0, brace_expansion_1.expand)(pattern);
+    return (0, brace_expansion_1.default)(pattern);
 };
 exports.braceExpand = braceExpand;
 exports.minimatch.braceExpand = exports.braceExpand;
diff --git a/node_modules/glob/node_modules/minimatch/dist/commonjs/package.json b/node_modules/tuf-js/node_modules/minimatch/dist/commonjs/package.json
similarity index 100%
rename from node_modules/glob/node_modules/minimatch/dist/commonjs/package.json
rename to node_modules/tuf-js/node_modules/minimatch/dist/commonjs/package.json
diff --git a/node_modules/glob/node_modules/minimatch/dist/commonjs/unescape.js b/node_modules/tuf-js/node_modules/minimatch/dist/commonjs/unescape.js
similarity index 100%
rename from node_modules/glob/node_modules/minimatch/dist/commonjs/unescape.js
rename to node_modules/tuf-js/node_modules/minimatch/dist/commonjs/unescape.js
diff --git a/node_modules/glob/node_modules/minimatch/dist/esm/assert-valid-pattern.js b/node_modules/tuf-js/node_modules/minimatch/dist/esm/assert-valid-pattern.js
similarity index 100%
rename from node_modules/glob/node_modules/minimatch/dist/esm/assert-valid-pattern.js
rename to node_modules/tuf-js/node_modules/minimatch/dist/esm/assert-valid-pattern.js
diff --git a/node_modules/glob/node_modules/minimatch/dist/esm/ast.js b/node_modules/tuf-js/node_modules/minimatch/dist/esm/ast.js
similarity index 100%
rename from node_modules/glob/node_modules/minimatch/dist/esm/ast.js
rename to node_modules/tuf-js/node_modules/minimatch/dist/esm/ast.js
diff --git a/node_modules/glob/node_modules/minimatch/dist/esm/brace-expressions.js b/node_modules/tuf-js/node_modules/minimatch/dist/esm/brace-expressions.js
similarity index 100%
rename from node_modules/glob/node_modules/minimatch/dist/esm/brace-expressions.js
rename to node_modules/tuf-js/node_modules/minimatch/dist/esm/brace-expressions.js
diff --git a/node_modules/glob/node_modules/minimatch/dist/esm/escape.js b/node_modules/tuf-js/node_modules/minimatch/dist/esm/escape.js
similarity index 100%
rename from node_modules/glob/node_modules/minimatch/dist/esm/escape.js
rename to node_modules/tuf-js/node_modules/minimatch/dist/esm/escape.js
diff --git a/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/index.js b/node_modules/tuf-js/node_modules/minimatch/dist/esm/index.js
similarity index 99%
rename from node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/index.js
rename to node_modules/tuf-js/node_modules/minimatch/dist/esm/index.js
index 790d6c02a2f22..84b577b0472cb 100644
--- a/node_modules/@npmcli/map-workspaces/node_modules/minimatch/dist/esm/index.js
+++ b/node_modules/tuf-js/node_modules/minimatch/dist/esm/index.js
@@ -1,4 +1,4 @@
-import { expand } from '@isaacs/brace-expansion';
+import expand from 'brace-expansion';
 import { assertValidPattern } from './assert-valid-pattern.js';
 import { AST } from './ast.js';
 import { escape } from './escape.js';
diff --git a/node_modules/glob/node_modules/minimatch/dist/esm/package.json b/node_modules/tuf-js/node_modules/minimatch/dist/esm/package.json
similarity index 100%
rename from node_modules/glob/node_modules/minimatch/dist/esm/package.json
rename to node_modules/tuf-js/node_modules/minimatch/dist/esm/package.json
diff --git a/node_modules/glob/node_modules/minimatch/dist/esm/unescape.js b/node_modules/tuf-js/node_modules/minimatch/dist/esm/unescape.js
similarity index 100%
rename from node_modules/glob/node_modules/minimatch/dist/esm/unescape.js
rename to node_modules/tuf-js/node_modules/minimatch/dist/esm/unescape.js
diff --git a/node_modules/ignore-walk/node_modules/minimatch/package.json b/node_modules/tuf-js/node_modules/minimatch/package.json
similarity index 78%
rename from node_modules/ignore-walk/node_modules/minimatch/package.json
rename to node_modules/tuf-js/node_modules/minimatch/package.json
index bfa2423f50b5e..01fc48ecfd6a9 100644
--- a/node_modules/ignore-walk/node_modules/minimatch/package.json
+++ b/node_modules/tuf-js/node_modules/minimatch/package.json
@@ -2,7 +2,7 @@
   "author": "Isaac Z. Schlueter  (http://blog.izs.me)",
   "name": "minimatch",
   "description": "a glob matcher in javascript",
-  "version": "10.0.3",
+  "version": "9.0.5",
   "repository": {
     "type": "git",
     "url": "git://github.com/isaacs/minimatch.git"
@@ -50,16 +50,23 @@
     "endOfLine": "lf"
   },
   "engines": {
-    "node": "20 || >=22"
+    "node": ">=16 || 14 >=14.17"
+  },
+  "dependencies": {
+    "brace-expansion": "^2.0.1"
   },
   "devDependencies": {
-    "@types/brace-expansion": "^1.1.2",
-    "@types/node": "^24.0.0",
-    "mkdirp": "^3.0.1",
-    "prettier": "^3.3.2",
-    "tap": "^21.1.0",
-    "tshy": "^3.0.2",
-    "typedoc": "^0.28.5"
+    "@types/brace-expansion": "^1.1.0",
+    "@types/node": "^18.15.11",
+    "@types/tap": "^15.0.8",
+    "eslint-config-prettier": "^8.6.0",
+    "mkdirp": "1",
+    "prettier": "^2.8.2",
+    "tap": "^18.7.2",
+    "ts-node": "^10.9.1",
+    "tshy": "^1.12.0",
+    "typedoc": "^0.23.21",
+    "typescript": "^4.9.3"
   },
   "funding": {
     "url": "https://github.com/sponsors/isaacs"
@@ -71,9 +78,5 @@
       ".": "./src/index.ts"
     }
   },
-  "type": "module",
-  "module": "./dist/esm/index.js",
-  "dependencies": {
-    "@isaacs/brace-expansion": "^5.0.0"
-  }
+  "type": "module"
 }
diff --git a/package-lock.json b/package-lock.json
index e529358d95de3..91d0751d03ec1 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -120,7 +120,7 @@
         "libnpmteam": "^8.0.1",
         "libnpmversion": "^8.0.1",
         "make-fetch-happen": "^15.0.2",
-        "minimatch": "^9.0.5",
+        "minimatch": "^10.0.3",
         "minipass": "^7.1.1",
         "minipass-pipeline": "^1.2.4",
         "ms": "^2.1.2",
@@ -3060,20 +3060,6 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/@npmcli/map-workspaces/node_modules/minimatch": {
-      "version": "10.0.3",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "@isaacs/brace-expansion": "^5.0.0"
-      },
-      "engines": {
-        "node": "20 || >=22"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      }
-    },
     "node_modules/@npmcli/metavuln-calculator": {
       "version": "9.0.2",
       "license": "ISC",
@@ -3912,6 +3898,22 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/minimatch": {
+      "version": "9.0.5",
+      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz",
+      "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "brace-expansion": "^2.0.1"
+      },
+      "engines": {
+        "node": ">=16 || 14 >=14.17"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/minipass-fetch": {
       "version": "3.0.5",
       "dev": true,
@@ -4741,6 +4743,22 @@
         "node": "^18.17.0 || >=20.5.0"
       }
     },
+    "node_modules/@tufjs/models/node_modules/minimatch": {
+      "version": "9.0.5",
+      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz",
+      "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "brace-expansion": "^2.0.1"
+      },
+      "engines": {
+        "node": ">=16 || 14 >=14.17"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
+      }
+    },
     "node_modules/@tufjs/repo-mock": {
       "version": "3.0.1",
       "dev": true,
@@ -7866,22 +7884,6 @@
         "node": ">=10.13.0"
       }
     },
-    "node_modules/glob/node_modules/minimatch": {
-      "version": "10.0.3",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.0.3.tgz",
-      "integrity": "sha512-IPZ167aShDZZUMdRk66cyQAW3qr0WzbHkPdMYa8bzZhlHhO3jALbKdxcaak7W9FfT2rZNpQuUu4Od7ILEpXSaw==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "@isaacs/brace-expansion": "^5.0.0"
-      },
-      "engines": {
-        "node": "20 || >=22"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      }
-    },
     "node_modules/global-directory": {
       "version": "4.0.1",
       "dev": true,
@@ -8380,22 +8382,6 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/ignore-walk/node_modules/minimatch": {
-      "version": "10.0.3",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.0.3.tgz",
-      "integrity": "sha512-IPZ167aShDZZUMdRk66cyQAW3qr0WzbHkPdMYa8bzZhlHhO3jALbKdxcaak7W9FfT2rZNpQuUu4Od7ILEpXSaw==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "@isaacs/brace-expansion": "^5.0.0"
-      },
-      "engines": {
-        "node": "20 || >=22"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      }
-    },
     "node_modules/import-fresh": {
       "version": "3.3.1",
       "dev": true,
@@ -10484,14 +10470,16 @@
       }
     },
     "node_modules/minimatch": {
-      "version": "9.0.5",
+      "version": "10.0.3",
+      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.0.3.tgz",
+      "integrity": "sha512-IPZ167aShDZZUMdRk66cyQAW3qr0WzbHkPdMYa8bzZhlHhO3jALbKdxcaak7W9FfT2rZNpQuUu4Od7ILEpXSaw==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
-        "brace-expansion": "^2.0.1"
+        "@isaacs/brace-expansion": "^5.0.0"
       },
       "engines": {
-        "node": ">=16 || 14 >=14.17"
+        "node": "20 || >=22"
       },
       "funding": {
         "url": "https://github.com/sponsors/isaacs"
@@ -10904,6 +10892,22 @@
         "node": "^18.17.0 || >=20.5.0"
       }
     },
+    "node_modules/node-gyp/node_modules/minimatch": {
+      "version": "9.0.5",
+      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz",
+      "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "brace-expansion": "^2.0.1"
+      },
+      "engines": {
+        "node": ">=16 || 14 >=14.17"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
+      }
+    },
     "node_modules/node-gyp/node_modules/path-scurry": {
       "version": "1.11.1",
       "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz",
@@ -12677,6 +12681,22 @@
       "dev": true,
       "license": "ISC"
     },
+    "node_modules/rimraf/node_modules/minimatch": {
+      "version": "9.0.5",
+      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz",
+      "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "brace-expansion": "^2.0.1"
+      },
+      "engines": {
+        "node": ">=16 || 14 >=14.17"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
+      }
+    },
     "node_modules/rimraf/node_modules/path-scurry": {
       "version": "1.11.1",
       "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz",
@@ -16008,6 +16028,22 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
+    "node_modules/tuf-js/node_modules/minimatch": {
+      "version": "9.0.5",
+      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz",
+      "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==",
+      "inBundle": true,
+      "license": "ISC",
+      "dependencies": {
+        "brace-expansion": "^2.0.1"
+      },
+      "engines": {
+        "node": ">=16 || 14 >=14.17"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/isaacs"
+      }
+    },
     "node_modules/tunnel": {
       "version": "0.0.6",
       "dev": true,
@@ -16943,7 +16979,7 @@
         "hosted-git-info": "^9.0.0",
         "json-stringify-nice": "^1.1.4",
         "lru-cache": "^11.2.1",
-        "minimatch": "^9.0.4",
+        "minimatch": "^10.0.3",
         "nopt": "^8.0.0",
         "npm-install-checks": "^7.1.0",
         "npm-package-arg": "^13.0.0",
@@ -17027,7 +17063,7 @@
         "@npmcli/installed-package-contents": "^3.0.0",
         "binary-extensions": "^3.0.0",
         "diff": "^7.0.0",
-        "minimatch": "^9.0.4",
+        "minimatch": "^10.0.3",
         "npm-package-arg": "^13.0.0",
         "pacote": "^21.0.2",
         "tar": "^6.2.1"
diff --git a/package.json b/package.json
index 60e507e310df7..eea618c7628f3 100644
--- a/package.json
+++ b/package.json
@@ -87,7 +87,7 @@
     "libnpmteam": "^8.0.1",
     "libnpmversion": "^8.0.1",
     "make-fetch-happen": "^15.0.2",
-    "minimatch": "^9.0.5",
+    "minimatch": "^10.0.3",
     "minipass": "^7.1.1",
     "minipass-pipeline": "^1.2.4",
     "ms": "^2.1.2",
diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json
index ba306144941c8..3788403162f0c 100644
--- a/workspaces/arborist/package.json
+++ b/workspaces/arborist/package.json
@@ -20,7 +20,7 @@
     "hosted-git-info": "^9.0.0",
     "json-stringify-nice": "^1.1.4",
     "lru-cache": "^11.2.1",
-    "minimatch": "^9.0.4",
+    "minimatch": "^10.0.3",
     "nopt": "^8.0.0",
     "npm-install-checks": "^7.1.0",
     "npm-package-arg": "^13.0.0",
diff --git a/workspaces/libnpmdiff/package.json b/workspaces/libnpmdiff/package.json
index f1ef61ca4cc62..f04552f4f3c9e 100644
--- a/workspaces/libnpmdiff/package.json
+++ b/workspaces/libnpmdiff/package.json
@@ -51,7 +51,7 @@
     "@npmcli/installed-package-contents": "^3.0.0",
     "binary-extensions": "^3.0.0",
     "diff": "^7.0.0",
-    "minimatch": "^9.0.4",
+    "minimatch": "^10.0.3",
     "npm-package-arg": "^13.0.0",
     "pacote": "^21.0.2",
     "tar": "^6.2.1"

From 9f9146f99c638361aed606a67156854c7cf2c2cf Mon Sep 17 00:00:00 2001
From: Gar 
Date: Thu, 18 Sep 2025 10:26:07 -0700
Subject: [PATCH 30/63] chore: @tufjs/repo-mock@4.0.0

---
 node_modules/.gitignore                       | 10 ++--
 .../node_modules => }/@tufjs/models/LICENSE   |  0
 .../@tufjs/models/dist/base.js                |  0
 .../@tufjs/models/dist/delegations.js         |  0
 .../@tufjs/models/dist/error.js               |  0
 .../@tufjs/models/dist/file.js                |  0
 .../@tufjs/models/dist/index.js               |  0
 .../@tufjs/models/dist/key.js                 |  0
 .../@tufjs/models/dist/metadata.js            |  0
 .../@tufjs/models/dist/role.js                |  0
 .../@tufjs/models/dist/root.js                |  0
 .../@tufjs/models/dist/signature.js           |  0
 .../@tufjs/models/dist/snapshot.js            |  0
 .../@tufjs/models/dist/targets.js             |  0
 .../@tufjs/models/dist/timestamp.js           |  0
 .../@tufjs/models/dist/utils/guard.js         |  0
 .../@tufjs/models/dist/utils/index.js         |  0
 .../@tufjs/models/dist/utils/key.js           |  0
 .../@tufjs/models/dist/utils/oid.js           |  0
 .../@tufjs/models/dist/utils/types.js         |  0
 .../@tufjs/models/dist/utils/verify.js        |  0
 .../models}/node_modules/minimatch/LICENSE    |  0
 .../dist/commonjs/assert-valid-pattern.js     |  0
 .../minimatch/dist/commonjs/ast.js            |  0
 .../dist/commonjs/brace-expressions.js        |  0
 .../minimatch/dist/commonjs/escape.js         |  0
 .../minimatch/dist/commonjs/index.js          |  0
 .../minimatch/dist/commonjs/package.json      |  0
 .../minimatch/dist/commonjs/unescape.js       |  0
 .../dist/esm/assert-valid-pattern.js          |  0
 .../node_modules/minimatch/dist/esm/ast.js    |  0
 .../minimatch/dist/esm/brace-expressions.js   |  0
 .../node_modules/minimatch/dist/esm/escape.js |  0
 .../node_modules/minimatch/dist/esm/index.js  |  0
 .../minimatch/dist/esm/package.json           |  0
 .../minimatch/dist/esm/unescape.js            |  0
 .../node_modules/minimatch/package.json       |  0
 .../@tufjs/models/package.json                |  0
 package-lock.json                             | 48 +++++--------------
 package.json                                  |  2 +-
 40 files changed, 17 insertions(+), 43 deletions(-)
 rename node_modules/{tuf-js/node_modules => }/@tufjs/models/LICENSE (100%)
 rename node_modules/{tuf-js/node_modules => }/@tufjs/models/dist/base.js (100%)
 rename node_modules/{tuf-js/node_modules => }/@tufjs/models/dist/delegations.js (100%)
 rename node_modules/{tuf-js/node_modules => }/@tufjs/models/dist/error.js (100%)
 rename node_modules/{tuf-js/node_modules => }/@tufjs/models/dist/file.js (100%)
 rename node_modules/{tuf-js/node_modules => }/@tufjs/models/dist/index.js (100%)
 rename node_modules/{tuf-js/node_modules => }/@tufjs/models/dist/key.js (100%)
 rename node_modules/{tuf-js/node_modules => }/@tufjs/models/dist/metadata.js (100%)
 rename node_modules/{tuf-js/node_modules => }/@tufjs/models/dist/role.js (100%)
 rename node_modules/{tuf-js/node_modules => }/@tufjs/models/dist/root.js (100%)
 rename node_modules/{tuf-js/node_modules => }/@tufjs/models/dist/signature.js (100%)
 rename node_modules/{tuf-js/node_modules => }/@tufjs/models/dist/snapshot.js (100%)
 rename node_modules/{tuf-js/node_modules => }/@tufjs/models/dist/targets.js (100%)
 rename node_modules/{tuf-js/node_modules => }/@tufjs/models/dist/timestamp.js (100%)
 rename node_modules/{tuf-js/node_modules => }/@tufjs/models/dist/utils/guard.js (100%)
 rename node_modules/{tuf-js/node_modules => }/@tufjs/models/dist/utils/index.js (100%)
 rename node_modules/{tuf-js/node_modules => }/@tufjs/models/dist/utils/key.js (100%)
 rename node_modules/{tuf-js/node_modules => }/@tufjs/models/dist/utils/oid.js (100%)
 rename node_modules/{tuf-js/node_modules => }/@tufjs/models/dist/utils/types.js (100%)
 rename node_modules/{tuf-js/node_modules => }/@tufjs/models/dist/utils/verify.js (100%)
 rename node_modules/{tuf-js => @tufjs/models}/node_modules/minimatch/LICENSE (100%)
 rename node_modules/{tuf-js => @tufjs/models}/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js (100%)
 rename node_modules/{tuf-js => @tufjs/models}/node_modules/minimatch/dist/commonjs/ast.js (100%)
 rename node_modules/{tuf-js => @tufjs/models}/node_modules/minimatch/dist/commonjs/brace-expressions.js (100%)
 rename node_modules/{tuf-js => @tufjs/models}/node_modules/minimatch/dist/commonjs/escape.js (100%)
 rename node_modules/{tuf-js => @tufjs/models}/node_modules/minimatch/dist/commonjs/index.js (100%)
 rename node_modules/{tuf-js => @tufjs/models}/node_modules/minimatch/dist/commonjs/package.json (100%)
 rename node_modules/{tuf-js => @tufjs/models}/node_modules/minimatch/dist/commonjs/unescape.js (100%)
 rename node_modules/{tuf-js => @tufjs/models}/node_modules/minimatch/dist/esm/assert-valid-pattern.js (100%)
 rename node_modules/{tuf-js => @tufjs/models}/node_modules/minimatch/dist/esm/ast.js (100%)
 rename node_modules/{tuf-js => @tufjs/models}/node_modules/minimatch/dist/esm/brace-expressions.js (100%)
 rename node_modules/{tuf-js => @tufjs/models}/node_modules/minimatch/dist/esm/escape.js (100%)
 rename node_modules/{tuf-js => @tufjs/models}/node_modules/minimatch/dist/esm/index.js (100%)
 rename node_modules/{tuf-js => @tufjs/models}/node_modules/minimatch/dist/esm/package.json (100%)
 rename node_modules/{tuf-js => @tufjs/models}/node_modules/minimatch/dist/esm/unescape.js (100%)
 rename node_modules/{tuf-js => @tufjs/models}/node_modules/minimatch/package.json (100%)
 rename node_modules/{tuf-js/node_modules => }/@tufjs/models/package.json (100%)

diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 12d25ef01bec3..34ea99e02a122 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -45,6 +45,10 @@
 !/@tufjs/
 /@tufjs/*
 !/@tufjs/canonical-json
+!/@tufjs/models
+!/@tufjs/models/node_modules/
+/@tufjs/models/node_modules/*
+!/@tufjs/models/node_modules/minimatch
 !/abbrev
 !/agent-base
 !/ansi-regex
@@ -222,12 +226,6 @@
 !/tinyglobby/node_modules/picomatch
 !/treeverse
 !/tuf-js
-!/tuf-js/node_modules/
-/tuf-js/node_modules/*
-!/tuf-js/node_modules/@tufjs/
-/tuf-js/node_modules/@tufjs/*
-!/tuf-js/node_modules/@tufjs/models
-!/tuf-js/node_modules/minimatch
 !/unique-filename
 !/unique-slug
 !/util-deprecate
diff --git a/node_modules/tuf-js/node_modules/@tufjs/models/LICENSE b/node_modules/@tufjs/models/LICENSE
similarity index 100%
rename from node_modules/tuf-js/node_modules/@tufjs/models/LICENSE
rename to node_modules/@tufjs/models/LICENSE
diff --git a/node_modules/tuf-js/node_modules/@tufjs/models/dist/base.js b/node_modules/@tufjs/models/dist/base.js
similarity index 100%
rename from node_modules/tuf-js/node_modules/@tufjs/models/dist/base.js
rename to node_modules/@tufjs/models/dist/base.js
diff --git a/node_modules/tuf-js/node_modules/@tufjs/models/dist/delegations.js b/node_modules/@tufjs/models/dist/delegations.js
similarity index 100%
rename from node_modules/tuf-js/node_modules/@tufjs/models/dist/delegations.js
rename to node_modules/@tufjs/models/dist/delegations.js
diff --git a/node_modules/tuf-js/node_modules/@tufjs/models/dist/error.js b/node_modules/@tufjs/models/dist/error.js
similarity index 100%
rename from node_modules/tuf-js/node_modules/@tufjs/models/dist/error.js
rename to node_modules/@tufjs/models/dist/error.js
diff --git a/node_modules/tuf-js/node_modules/@tufjs/models/dist/file.js b/node_modules/@tufjs/models/dist/file.js
similarity index 100%
rename from node_modules/tuf-js/node_modules/@tufjs/models/dist/file.js
rename to node_modules/@tufjs/models/dist/file.js
diff --git a/node_modules/tuf-js/node_modules/@tufjs/models/dist/index.js b/node_modules/@tufjs/models/dist/index.js
similarity index 100%
rename from node_modules/tuf-js/node_modules/@tufjs/models/dist/index.js
rename to node_modules/@tufjs/models/dist/index.js
diff --git a/node_modules/tuf-js/node_modules/@tufjs/models/dist/key.js b/node_modules/@tufjs/models/dist/key.js
similarity index 100%
rename from node_modules/tuf-js/node_modules/@tufjs/models/dist/key.js
rename to node_modules/@tufjs/models/dist/key.js
diff --git a/node_modules/tuf-js/node_modules/@tufjs/models/dist/metadata.js b/node_modules/@tufjs/models/dist/metadata.js
similarity index 100%
rename from node_modules/tuf-js/node_modules/@tufjs/models/dist/metadata.js
rename to node_modules/@tufjs/models/dist/metadata.js
diff --git a/node_modules/tuf-js/node_modules/@tufjs/models/dist/role.js b/node_modules/@tufjs/models/dist/role.js
similarity index 100%
rename from node_modules/tuf-js/node_modules/@tufjs/models/dist/role.js
rename to node_modules/@tufjs/models/dist/role.js
diff --git a/node_modules/tuf-js/node_modules/@tufjs/models/dist/root.js b/node_modules/@tufjs/models/dist/root.js
similarity index 100%
rename from node_modules/tuf-js/node_modules/@tufjs/models/dist/root.js
rename to node_modules/@tufjs/models/dist/root.js
diff --git a/node_modules/tuf-js/node_modules/@tufjs/models/dist/signature.js b/node_modules/@tufjs/models/dist/signature.js
similarity index 100%
rename from node_modules/tuf-js/node_modules/@tufjs/models/dist/signature.js
rename to node_modules/@tufjs/models/dist/signature.js
diff --git a/node_modules/tuf-js/node_modules/@tufjs/models/dist/snapshot.js b/node_modules/@tufjs/models/dist/snapshot.js
similarity index 100%
rename from node_modules/tuf-js/node_modules/@tufjs/models/dist/snapshot.js
rename to node_modules/@tufjs/models/dist/snapshot.js
diff --git a/node_modules/tuf-js/node_modules/@tufjs/models/dist/targets.js b/node_modules/@tufjs/models/dist/targets.js
similarity index 100%
rename from node_modules/tuf-js/node_modules/@tufjs/models/dist/targets.js
rename to node_modules/@tufjs/models/dist/targets.js
diff --git a/node_modules/tuf-js/node_modules/@tufjs/models/dist/timestamp.js b/node_modules/@tufjs/models/dist/timestamp.js
similarity index 100%
rename from node_modules/tuf-js/node_modules/@tufjs/models/dist/timestamp.js
rename to node_modules/@tufjs/models/dist/timestamp.js
diff --git a/node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/guard.js b/node_modules/@tufjs/models/dist/utils/guard.js
similarity index 100%
rename from node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/guard.js
rename to node_modules/@tufjs/models/dist/utils/guard.js
diff --git a/node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/index.js b/node_modules/@tufjs/models/dist/utils/index.js
similarity index 100%
rename from node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/index.js
rename to node_modules/@tufjs/models/dist/utils/index.js
diff --git a/node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/key.js b/node_modules/@tufjs/models/dist/utils/key.js
similarity index 100%
rename from node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/key.js
rename to node_modules/@tufjs/models/dist/utils/key.js
diff --git a/node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/oid.js b/node_modules/@tufjs/models/dist/utils/oid.js
similarity index 100%
rename from node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/oid.js
rename to node_modules/@tufjs/models/dist/utils/oid.js
diff --git a/node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/types.js b/node_modules/@tufjs/models/dist/utils/types.js
similarity index 100%
rename from node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/types.js
rename to node_modules/@tufjs/models/dist/utils/types.js
diff --git a/node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/verify.js b/node_modules/@tufjs/models/dist/utils/verify.js
similarity index 100%
rename from node_modules/tuf-js/node_modules/@tufjs/models/dist/utils/verify.js
rename to node_modules/@tufjs/models/dist/utils/verify.js
diff --git a/node_modules/tuf-js/node_modules/minimatch/LICENSE b/node_modules/@tufjs/models/node_modules/minimatch/LICENSE
similarity index 100%
rename from node_modules/tuf-js/node_modules/minimatch/LICENSE
rename to node_modules/@tufjs/models/node_modules/minimatch/LICENSE
diff --git a/node_modules/tuf-js/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js b/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
similarity index 100%
rename from node_modules/tuf-js/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
rename to node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/assert-valid-pattern.js
diff --git a/node_modules/tuf-js/node_modules/minimatch/dist/commonjs/ast.js b/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/ast.js
similarity index 100%
rename from node_modules/tuf-js/node_modules/minimatch/dist/commonjs/ast.js
rename to node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/ast.js
diff --git a/node_modules/tuf-js/node_modules/minimatch/dist/commonjs/brace-expressions.js b/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/brace-expressions.js
similarity index 100%
rename from node_modules/tuf-js/node_modules/minimatch/dist/commonjs/brace-expressions.js
rename to node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/brace-expressions.js
diff --git a/node_modules/tuf-js/node_modules/minimatch/dist/commonjs/escape.js b/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/escape.js
similarity index 100%
rename from node_modules/tuf-js/node_modules/minimatch/dist/commonjs/escape.js
rename to node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/escape.js
diff --git a/node_modules/tuf-js/node_modules/minimatch/dist/commonjs/index.js b/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/index.js
similarity index 100%
rename from node_modules/tuf-js/node_modules/minimatch/dist/commonjs/index.js
rename to node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/index.js
diff --git a/node_modules/tuf-js/node_modules/minimatch/dist/commonjs/package.json b/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/package.json
similarity index 100%
rename from node_modules/tuf-js/node_modules/minimatch/dist/commonjs/package.json
rename to node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/package.json
diff --git a/node_modules/tuf-js/node_modules/minimatch/dist/commonjs/unescape.js b/node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/unescape.js
similarity index 100%
rename from node_modules/tuf-js/node_modules/minimatch/dist/commonjs/unescape.js
rename to node_modules/@tufjs/models/node_modules/minimatch/dist/commonjs/unescape.js
diff --git a/node_modules/tuf-js/node_modules/minimatch/dist/esm/assert-valid-pattern.js b/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/assert-valid-pattern.js
similarity index 100%
rename from node_modules/tuf-js/node_modules/minimatch/dist/esm/assert-valid-pattern.js
rename to node_modules/@tufjs/models/node_modules/minimatch/dist/esm/assert-valid-pattern.js
diff --git a/node_modules/tuf-js/node_modules/minimatch/dist/esm/ast.js b/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/ast.js
similarity index 100%
rename from node_modules/tuf-js/node_modules/minimatch/dist/esm/ast.js
rename to node_modules/@tufjs/models/node_modules/minimatch/dist/esm/ast.js
diff --git a/node_modules/tuf-js/node_modules/minimatch/dist/esm/brace-expressions.js b/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/brace-expressions.js
similarity index 100%
rename from node_modules/tuf-js/node_modules/minimatch/dist/esm/brace-expressions.js
rename to node_modules/@tufjs/models/node_modules/minimatch/dist/esm/brace-expressions.js
diff --git a/node_modules/tuf-js/node_modules/minimatch/dist/esm/escape.js b/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/escape.js
similarity index 100%
rename from node_modules/tuf-js/node_modules/minimatch/dist/esm/escape.js
rename to node_modules/@tufjs/models/node_modules/minimatch/dist/esm/escape.js
diff --git a/node_modules/tuf-js/node_modules/minimatch/dist/esm/index.js b/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/index.js
similarity index 100%
rename from node_modules/tuf-js/node_modules/minimatch/dist/esm/index.js
rename to node_modules/@tufjs/models/node_modules/minimatch/dist/esm/index.js
diff --git a/node_modules/tuf-js/node_modules/minimatch/dist/esm/package.json b/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/package.json
similarity index 100%
rename from node_modules/tuf-js/node_modules/minimatch/dist/esm/package.json
rename to node_modules/@tufjs/models/node_modules/minimatch/dist/esm/package.json
diff --git a/node_modules/tuf-js/node_modules/minimatch/dist/esm/unescape.js b/node_modules/@tufjs/models/node_modules/minimatch/dist/esm/unescape.js
similarity index 100%
rename from node_modules/tuf-js/node_modules/minimatch/dist/esm/unescape.js
rename to node_modules/@tufjs/models/node_modules/minimatch/dist/esm/unescape.js
diff --git a/node_modules/tuf-js/node_modules/minimatch/package.json b/node_modules/@tufjs/models/node_modules/minimatch/package.json
similarity index 100%
rename from node_modules/tuf-js/node_modules/minimatch/package.json
rename to node_modules/@tufjs/models/node_modules/minimatch/package.json
diff --git a/node_modules/tuf-js/node_modules/@tufjs/models/package.json b/node_modules/@tufjs/models/package.json
similarity index 100%
rename from node_modules/tuf-js/node_modules/@tufjs/models/package.json
rename to node_modules/@tufjs/models/package.json
diff --git a/package-lock.json b/package-lock.json
index 91d0751d03ec1..dc05938570499 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -162,7 +162,7 @@
         "@npmcli/mock-globals": "^1.0.0",
         "@npmcli/mock-registry": "^1.0.0",
         "@npmcli/template-oss": "4.24.4",
-        "@tufjs/repo-mock": "^3.0.1",
+        "@tufjs/repo-mock": "^4.0.0",
         "ajv": "^8.12.0",
         "ajv-formats": "^2.1.1",
         "ajv-formats-draft2019": "^1.6.1",
@@ -4732,22 +4732,24 @@
       }
     },
     "node_modules/@tufjs/models": {
-      "version": "3.0.1",
-      "dev": true,
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/@tufjs/models/-/models-4.0.0.tgz",
+      "integrity": "sha512-h5x5ga/hh82COe+GoD4+gKUeV4T3iaYOxqLt41GRKApinPI7DMidhCmNVTjKfhCWFJIGXaFJee07XczdT4jdZQ==",
+      "inBundle": true,
       "license": "MIT",
       "dependencies": {
         "@tufjs/canonical-json": "2.0.0",
         "minimatch": "^9.0.5"
       },
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^20.17.0 || >=22.9.0"
       }
     },
     "node_modules/@tufjs/models/node_modules/minimatch": {
       "version": "9.0.5",
       "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz",
       "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==",
-      "dev": true,
+      "inBundle": true,
       "license": "ISC",
       "dependencies": {
         "brace-expansion": "^2.0.1"
@@ -4760,15 +4762,17 @@
       }
     },
     "node_modules/@tufjs/repo-mock": {
-      "version": "3.0.1",
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/@tufjs/repo-mock/-/repo-mock-4.0.0.tgz",
+      "integrity": "sha512-Z/w5mFJC26ZbrGYduDkWzGCxui9rSXkJqWROSOhaLk8s+PcVAv/W03nOBqpcfbgMVLYVYtMYaopoGSuC1mbNsQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@tufjs/models": "3.0.1",
+        "@tufjs/models": "4.0.0",
         "nock": "^13.5.5"
       },
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^20.17.0 || >=22.9.0"
       }
     },
     "node_modules/@types/conventional-commits-parser": {
@@ -16016,34 +16020,6 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/tuf-js/node_modules/@tufjs/models": {
-      "version": "4.0.0",
-      "inBundle": true,
-      "license": "MIT",
-      "dependencies": {
-        "@tufjs/canonical-json": "2.0.0",
-        "minimatch": "^9.0.5"
-      },
-      "engines": {
-        "node": "^20.17.0 || >=22.9.0"
-      }
-    },
-    "node_modules/tuf-js/node_modules/minimatch": {
-      "version": "9.0.5",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz",
-      "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "brace-expansion": "^2.0.1"
-      },
-      "engines": {
-        "node": ">=16 || 14 >=14.17"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      }
-    },
     "node_modules/tunnel": {
       "version": "0.0.6",
       "dev": true,
diff --git a/package.json b/package.json
index eea618c7628f3..3b97bec446757 100644
--- a/package.json
+++ b/package.json
@@ -193,7 +193,7 @@
     "@npmcli/mock-globals": "^1.0.0",
     "@npmcli/mock-registry": "^1.0.0",
     "@npmcli/template-oss": "4.24.4",
-    "@tufjs/repo-mock": "^3.0.1",
+    "@tufjs/repo-mock": "^4.0.0",
     "ajv": "^8.12.0",
     "ajv-formats": "^2.1.1",
     "ajv-formats-draft2019": "^1.6.1",

From d4eef14dcdc30ef3a09e88180168b649ea82d72e Mon Sep 17 00:00:00 2001
From: Gar 
Date: Thu, 18 Sep 2025 10:27:45 -0700
Subject: [PATCH 31/63] deps: rimraf@6.0.1

---
 package-lock.json | 103 ++++------------------------------------------
 package.json      |   2 +-
 2 files changed, 8 insertions(+), 97 deletions(-)

diff --git a/package-lock.json b/package-lock.json
index dc05938570499..db83f35d53b13 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -173,7 +173,7 @@
         "remark": "^14.0.2",
         "remark-gfm": "^3.0.1",
         "remark-github": "^11.2.4",
-        "rimraf": "^5.0.5",
+        "rimraf": "^6.0.1",
         "spawk": "^1.7.1",
         "tap": "^16.3.9"
       },
@@ -12628,91 +12628,20 @@
       }
     },
     "node_modules/rimraf": {
-      "version": "5.0.10",
+      "version": "6.0.1",
+      "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-6.0.1.tgz",
+      "integrity": "sha512-9dkvaxAsk/xNXSJzMgFqqMCuFgt2+KsOFek3TMLfo8NCPfWpBmqwyNn5Y+NX56QUYfCtsyhF3ayiboEoUmJk/A==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
-        "glob": "^10.3.7"
+        "glob": "^11.0.0",
+        "package-json-from-dist": "^1.0.0"
       },
       "bin": {
         "rimraf": "dist/esm/bin.mjs"
       },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      }
-    },
-    "node_modules/rimraf/node_modules/glob": {
-      "version": "10.4.5",
-      "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz",
-      "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "foreground-child": "^3.1.0",
-        "jackspeak": "^3.1.2",
-        "minimatch": "^9.0.4",
-        "minipass": "^7.1.2",
-        "package-json-from-dist": "^1.0.0",
-        "path-scurry": "^1.11.1"
-      },
-      "bin": {
-        "glob": "dist/esm/bin.mjs"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      }
-    },
-    "node_modules/rimraf/node_modules/jackspeak": {
-      "version": "3.4.3",
-      "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz",
-      "integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==",
-      "dev": true,
-      "license": "BlueOak-1.0.0",
-      "dependencies": {
-        "@isaacs/cliui": "^8.0.2"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      },
-      "optionalDependencies": {
-        "@pkgjs/parseargs": "^0.11.0"
-      }
-    },
-    "node_modules/rimraf/node_modules/lru-cache": {
-      "version": "10.4.3",
-      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz",
-      "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==",
-      "dev": true,
-      "license": "ISC"
-    },
-    "node_modules/rimraf/node_modules/minimatch": {
-      "version": "9.0.5",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz",
-      "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "brace-expansion": "^2.0.1"
-      },
-      "engines": {
-        "node": ">=16 || 14 >=14.17"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      }
-    },
-    "node_modules/rimraf/node_modules/path-scurry": {
-      "version": "1.11.1",
-      "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz",
-      "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==",
-      "dev": true,
-      "license": "BlueOak-1.0.0",
-      "dependencies": {
-        "lru-cache": "^10.2.0",
-        "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0"
-      },
       "engines": {
-        "node": ">=16 || 14 >=14.18"
+        "node": "20 || >=22"
       },
       "funding": {
         "url": "https://github.com/sponsors/isaacs"
@@ -16915,24 +16844,6 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "smoke-tests/node_modules/rimraf": {
-      "version": "6.0.1",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "glob": "^11.0.0",
-        "package-json-from-dist": "^1.0.0"
-      },
-      "bin": {
-        "rimraf": "dist/esm/bin.mjs"
-      },
-      "engines": {
-        "node": "20 || >=22"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      }
-    },
     "workspaces/arborist": {
       "name": "@npmcli/arborist",
       "version": "9.1.4",
diff --git a/package.json b/package.json
index 3b97bec446757..926f9909b613b 100644
--- a/package.json
+++ b/package.json
@@ -204,7 +204,7 @@
     "remark": "^14.0.2",
     "remark-gfm": "^3.0.1",
     "remark-github": "^11.2.4",
-    "rimraf": "^5.0.5",
+    "rimraf": "^6.0.1",
     "spawk": "^1.7.1",
     "tap": "^16.3.9"
   },

From dfd034eaf9c8fac8c40276aab42c65e2736158c8 Mon Sep 17 00:00:00 2001
From: Gar 
Date: Thu, 18 Sep 2025 10:30:34 -0700
Subject: [PATCH 32/63] deps: @npmcli/promise-spawn@8.0.3

---
 node_modules/.gitignore                       |   3 +-
 .../@npmcli/promise-spawn/lib/index.js        |   2 +-
 .../@npmcli/promise-spawn/package.json        |   6 +-
 node_modules/isexe/LICENSE                    |  15 --
 node_modules/isexe/index.js                   |  57 -----
 node_modules/isexe/mode.js                    |  41 ----
 node_modules/isexe/package.json               |  31 ---
 node_modules/isexe/test/basic.js              | 221 ------------------
 node_modules/isexe/windows.js                 |  42 ----
 package-lock.json                             |   8 +-
 package.json                                  |   2 +-
 11 files changed, 13 insertions(+), 415 deletions(-)
 delete mode 100644 node_modules/isexe/LICENSE
 delete mode 100644 node_modules/isexe/index.js
 delete mode 100644 node_modules/isexe/mode.js
 delete mode 100644 node_modules/isexe/package.json
 delete mode 100644 node_modules/isexe/test/basic.js
 delete mode 100644 node_modules/isexe/windows.js

diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 34ea99e02a122..3729ec7a958fa 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -72,6 +72,7 @@
 !/cross-spawn
 !/cross-spawn/node_modules/
 /cross-spawn/node_modules/*
+!/cross-spawn/node_modules/isexe
 !/cross-spawn/node_modules/which
 !/cssesc
 !/debug
@@ -100,7 +101,6 @@
 !/ip-regex
 !/is-cidr
 !/is-fullwidth-code-point
-!/isexe
 !/jackspeak
 !/jsbn
 !/json-parse-even-better-errors
@@ -217,6 +217,7 @@
 /tar/node_modules/minizlib/node_modules/*
 !/tar/node_modules/minizlib/node_modules/minipass
 !/tar/node_modules/mkdirp
+!/tar/node_modules/yallist
 !/text-table
 !/tiny-relative-date
 !/tinyglobby
diff --git a/node_modules/@npmcli/promise-spawn/lib/index.js b/node_modules/@npmcli/promise-spawn/lib/index.js
index aa7b55d8f038d..1faf62c9157df 100644
--- a/node_modules/@npmcli/promise-spawn/lib/index.js
+++ b/node_modules/@npmcli/promise-spawn/lib/index.js
@@ -70,7 +70,7 @@ const spawnWithShell = (cmd, args, opts, extra) => {
   // ahead of time so that we can escape arguments properly. we don't need coverage here.
   if (command === true) {
     // istanbul ignore next
-    command = process.platform === 'win32' ? process.env.ComSpec : 'sh'
+    command = process.platform === 'win32' ? (process.env.ComSpec || 'cmd.exe') : 'sh'
   }
 
   const options = { ...opts, shell: false }
diff --git a/node_modules/@npmcli/promise-spawn/package.json b/node_modules/@npmcli/promise-spawn/package.json
index f5fb026be50e8..1436659a44612 100644
--- a/node_modules/@npmcli/promise-spawn/package.json
+++ b/node_modules/@npmcli/promise-spawn/package.json
@@ -1,6 +1,6 @@
 {
   "name": "@npmcli/promise-spawn",
-  "version": "8.0.2",
+  "version": "8.0.3",
   "files": [
     "bin/",
     "lib/"
@@ -33,7 +33,7 @@
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.23.4",
+    "@npmcli/template-oss": "4.25.0",
     "spawk": "^1.7.1",
     "tap": "^16.0.1"
   },
@@ -42,7 +42,7 @@
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.23.4",
+    "version": "4.25.0",
     "publish": true
   },
   "dependencies": {
diff --git a/node_modules/isexe/LICENSE b/node_modules/isexe/LICENSE
deleted file mode 100644
index 19129e315fe59..0000000000000
--- a/node_modules/isexe/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/isexe/index.js b/node_modules/isexe/index.js
deleted file mode 100644
index 553fb32b119bd..0000000000000
--- a/node_modules/isexe/index.js
+++ /dev/null
@@ -1,57 +0,0 @@
-var fs = require('fs')
-var core
-if (process.platform === 'win32' || global.TESTING_WINDOWS) {
-  core = require('./windows.js')
-} else {
-  core = require('./mode.js')
-}
-
-module.exports = isexe
-isexe.sync = sync
-
-function isexe (path, options, cb) {
-  if (typeof options === 'function') {
-    cb = options
-    options = {}
-  }
-
-  if (!cb) {
-    if (typeof Promise !== 'function') {
-      throw new TypeError('callback not provided')
-    }
-
-    return new Promise(function (resolve, reject) {
-      isexe(path, options || {}, function (er, is) {
-        if (er) {
-          reject(er)
-        } else {
-          resolve(is)
-        }
-      })
-    })
-  }
-
-  core(path, options || {}, function (er, is) {
-    // ignore EACCES because that just means we aren't allowed to run it
-    if (er) {
-      if (er.code === 'EACCES' || options && options.ignoreErrors) {
-        er = null
-        is = false
-      }
-    }
-    cb(er, is)
-  })
-}
-
-function sync (path, options) {
-  // my kingdom for a filtered catch
-  try {
-    return core.sync(path, options || {})
-  } catch (er) {
-    if (options && options.ignoreErrors || er.code === 'EACCES') {
-      return false
-    } else {
-      throw er
-    }
-  }
-}
diff --git a/node_modules/isexe/mode.js b/node_modules/isexe/mode.js
deleted file mode 100644
index 1995ea4a06aec..0000000000000
--- a/node_modules/isexe/mode.js
+++ /dev/null
@@ -1,41 +0,0 @@
-module.exports = isexe
-isexe.sync = sync
-
-var fs = require('fs')
-
-function isexe (path, options, cb) {
-  fs.stat(path, function (er, stat) {
-    cb(er, er ? false : checkStat(stat, options))
-  })
-}
-
-function sync (path, options) {
-  return checkStat(fs.statSync(path), options)
-}
-
-function checkStat (stat, options) {
-  return stat.isFile() && checkMode(stat, options)
-}
-
-function checkMode (stat, options) {
-  var mod = stat.mode
-  var uid = stat.uid
-  var gid = stat.gid
-
-  var myUid = options.uid !== undefined ?
-    options.uid : process.getuid && process.getuid()
-  var myGid = options.gid !== undefined ?
-    options.gid : process.getgid && process.getgid()
-
-  var u = parseInt('100', 8)
-  var g = parseInt('010', 8)
-  var o = parseInt('001', 8)
-  var ug = u | g
-
-  var ret = (mod & o) ||
-    (mod & g) && gid === myGid ||
-    (mod & u) && uid === myUid ||
-    (mod & ug) && myUid === 0
-
-  return ret
-}
diff --git a/node_modules/isexe/package.json b/node_modules/isexe/package.json
deleted file mode 100644
index e452689442f20..0000000000000
--- a/node_modules/isexe/package.json
+++ /dev/null
@@ -1,31 +0,0 @@
-{
-  "name": "isexe",
-  "version": "2.0.0",
-  "description": "Minimal module to check if a file is executable.",
-  "main": "index.js",
-  "directories": {
-    "test": "test"
-  },
-  "devDependencies": {
-    "mkdirp": "^0.5.1",
-    "rimraf": "^2.5.0",
-    "tap": "^10.3.0"
-  },
-  "scripts": {
-    "test": "tap test/*.js --100",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "postpublish": "git push origin --all; git push origin --tags"
-  },
-  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
-  "license": "ISC",
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/isaacs/isexe.git"
-  },
-  "keywords": [],
-  "bugs": {
-    "url": "https://github.com/isaacs/isexe/issues"
-  },
-  "homepage": "https://github.com/isaacs/isexe#readme"
-}
diff --git a/node_modules/isexe/test/basic.js b/node_modules/isexe/test/basic.js
deleted file mode 100644
index d926df64b9024..0000000000000
--- a/node_modules/isexe/test/basic.js
+++ /dev/null
@@ -1,221 +0,0 @@
-var t = require('tap')
-var fs = require('fs')
-var path = require('path')
-var fixture = path.resolve(__dirname, 'fixtures')
-var meow = fixture + '/meow.cat'
-var mine = fixture + '/mine.cat'
-var ours = fixture + '/ours.cat'
-var fail = fixture + '/fail.false'
-var noent = fixture + '/enoent.exe'
-var mkdirp = require('mkdirp')
-var rimraf = require('rimraf')
-
-var isWindows = process.platform === 'win32'
-var hasAccess = typeof fs.access === 'function'
-var winSkip = isWindows && 'windows'
-var accessSkip = !hasAccess && 'no fs.access function'
-var hasPromise = typeof Promise === 'function'
-var promiseSkip = !hasPromise && 'no global Promise'
-
-function reset () {
-  delete require.cache[require.resolve('../')]
-  return require('../')
-}
-
-t.test('setup fixtures', function (t) {
-  rimraf.sync(fixture)
-  mkdirp.sync(fixture)
-  fs.writeFileSync(meow, '#!/usr/bin/env cat\nmeow\n')
-  fs.chmodSync(meow, parseInt('0755', 8))
-  fs.writeFileSync(fail, '#!/usr/bin/env false\n')
-  fs.chmodSync(fail, parseInt('0644', 8))
-  fs.writeFileSync(mine, '#!/usr/bin/env cat\nmine\n')
-  fs.chmodSync(mine, parseInt('0744', 8))
-  fs.writeFileSync(ours, '#!/usr/bin/env cat\nours\n')
-  fs.chmodSync(ours, parseInt('0754', 8))
-  t.end()
-})
-
-t.test('promise', { skip: promiseSkip }, function (t) {
-  var isexe = reset()
-  t.test('meow async', function (t) {
-    isexe(meow).then(function (is) {
-      t.ok(is)
-      t.end()
-    })
-  })
-  t.test('fail async', function (t) {
-    isexe(fail).then(function (is) {
-      t.notOk(is)
-      t.end()
-    })
-  })
-  t.test('noent async', function (t) {
-    isexe(noent).catch(function (er) {
-      t.ok(er)
-      t.end()
-    })
-  })
-  t.test('noent ignore async', function (t) {
-    isexe(noent, { ignoreErrors: true }).then(function (is) {
-      t.notOk(is)
-      t.end()
-    })
-  })
-  t.end()
-})
-
-t.test('no promise', function (t) {
-  global.Promise = null
-  var isexe = reset()
-  t.throws('try to meow a promise', function () {
-    isexe(meow)
-  })
-  t.end()
-})
-
-t.test('access', { skip: accessSkip || winSkip }, function (t) {
-  runTest(t)
-})
-
-t.test('mode', { skip: winSkip }, function (t) {
-  delete fs.access
-  delete fs.accessSync
-  var isexe = reset()
-  t.ok(isexe.sync(ours, { uid: 0, gid: 0 }))
-  t.ok(isexe.sync(mine, { uid: 0, gid: 0 }))
-  runTest(t)
-})
-
-t.test('windows', function (t) {
-  global.TESTING_WINDOWS = true
-  var pathExt = '.EXE;.CAT;.CMD;.COM'
-  t.test('pathExt option', function (t) {
-    runTest(t, { pathExt: '.EXE;.CAT;.CMD;.COM' })
-  })
-  t.test('pathExt env', function (t) {
-    process.env.PATHEXT = pathExt
-    runTest(t)
-  })
-  t.test('no pathExt', function (t) {
-    // with a pathExt of '', any filename is fine.
-    // so the "fail" one would still pass.
-    runTest(t, { pathExt: '', skipFail: true })
-  })
-  t.test('pathext with empty entry', function (t) {
-    // with a pathExt of '', any filename is fine.
-    // so the "fail" one would still pass.
-    runTest(t, { pathExt: ';' + pathExt, skipFail: true })
-  })
-  t.end()
-})
-
-t.test('cleanup', function (t) {
-  rimraf.sync(fixture)
-  t.end()
-})
-
-function runTest (t, options) {
-  var isexe = reset()
-
-  var optionsIgnore = Object.create(options || {})
-  optionsIgnore.ignoreErrors = true
-
-  if (!options || !options.skipFail) {
-    t.notOk(isexe.sync(fail, options))
-  }
-  t.notOk(isexe.sync(noent, optionsIgnore))
-  if (!options) {
-    t.ok(isexe.sync(meow))
-  } else {
-    t.ok(isexe.sync(meow, options))
-  }
-
-  t.ok(isexe.sync(mine, options))
-  t.ok(isexe.sync(ours, options))
-  t.throws(function () {
-    isexe.sync(noent, options)
-  })
-
-  t.test('meow async', function (t) {
-    if (!options) {
-      isexe(meow, function (er, is) {
-        if (er) {
-          throw er
-        }
-        t.ok(is)
-        t.end()
-      })
-    } else {
-      isexe(meow, options, function (er, is) {
-        if (er) {
-          throw er
-        }
-        t.ok(is)
-        t.end()
-      })
-    }
-  })
-
-  t.test('mine async', function (t) {
-    isexe(mine, options, function (er, is) {
-      if (er) {
-        throw er
-      }
-      t.ok(is)
-      t.end()
-    })
-  })
-
-  t.test('ours async', function (t) {
-    isexe(ours, options, function (er, is) {
-      if (er) {
-        throw er
-      }
-      t.ok(is)
-      t.end()
-    })
-  })
-
-  if (!options || !options.skipFail) {
-    t.test('fail async', function (t) {
-      isexe(fail, options, function (er, is) {
-        if (er) {
-          throw er
-        }
-        t.notOk(is)
-        t.end()
-      })
-    })
-  }
-
-  t.test('noent async', function (t) {
-    isexe(noent, options, function (er, is) {
-      t.ok(er)
-      t.notOk(is)
-      t.end()
-    })
-  })
-
-  t.test('noent ignore async', function (t) {
-    isexe(noent, optionsIgnore, function (er, is) {
-      if (er) {
-        throw er
-      }
-      t.notOk(is)
-      t.end()
-    })
-  })
-
-  t.test('directory is not executable', function (t) {
-    isexe(__dirname, options, function (er, is) {
-      if (er) {
-        throw er
-      }
-      t.notOk(is)
-      t.end()
-    })
-  })
-
-  t.end()
-}
diff --git a/node_modules/isexe/windows.js b/node_modules/isexe/windows.js
deleted file mode 100644
index 34996734d8ef3..0000000000000
--- a/node_modules/isexe/windows.js
+++ /dev/null
@@ -1,42 +0,0 @@
-module.exports = isexe
-isexe.sync = sync
-
-var fs = require('fs')
-
-function checkPathExt (path, options) {
-  var pathext = options.pathExt !== undefined ?
-    options.pathExt : process.env.PATHEXT
-
-  if (!pathext) {
-    return true
-  }
-
-  pathext = pathext.split(';')
-  if (pathext.indexOf('') !== -1) {
-    return true
-  }
-  for (var i = 0; i < pathext.length; i++) {
-    var p = pathext[i].toLowerCase()
-    if (p && path.substr(-p.length).toLowerCase() === p) {
-      return true
-    }
-  }
-  return false
-}
-
-function checkStat (stat, path, options) {
-  if (!stat.isSymbolicLink() && !stat.isFile()) {
-    return false
-  }
-  return checkPathExt(path, options)
-}
-
-function isexe (path, options, cb) {
-  fs.stat(path, function (er, stat) {
-    cb(er, er ? false : checkStat(stat, path, options))
-  })
-}
-
-function sync (path, options) {
-  return checkStat(fs.statSync(path), path, options)
-}
diff --git a/package-lock.json b/package-lock.json
index db83f35d53b13..b996ef59ed876 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -90,7 +90,7 @@
         "@npmcli/fs": "^4.0.0",
         "@npmcli/map-workspaces": "^5.0.0",
         "@npmcli/package-json": "^7.0.1",
-        "@npmcli/promise-spawn": "^8.0.2",
+        "@npmcli/promise-spawn": "^8.0.3",
         "@npmcli/redact": "^3.2.2",
         "@npmcli/run-script": "^10.0.0",
         "@sigstore/tuf": "^4.0.0",
@@ -3116,7 +3116,9 @@
       }
     },
     "node_modules/@npmcli/promise-spawn": {
-      "version": "8.0.2",
+      "version": "8.0.3",
+      "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-8.0.3.tgz",
+      "integrity": "sha512-Yb00SWaL4F8w+K8YGhQ55+xE4RUNdMHV43WZGsiTM92gS+lC0mGsn7I4hLug7pbao035S6bj3Y3w0cUNGLfmkg==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -16767,6 +16769,8 @@
     },
     "node_modules/yallist": {
       "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
+      "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==",
       "inBundle": true,
       "license": "ISC"
     },
diff --git a/package.json b/package.json
index 926f9909b613b..7492b8730e67d 100644
--- a/package.json
+++ b/package.json
@@ -57,7 +57,7 @@
     "@npmcli/fs": "^4.0.0",
     "@npmcli/map-workspaces": "^5.0.0",
     "@npmcli/package-json": "^7.0.1",
-    "@npmcli/promise-spawn": "^8.0.2",
+    "@npmcli/promise-spawn": "^8.0.3",
     "@npmcli/redact": "^3.2.2",
     "@npmcli/run-script": "^10.0.0",
     "@sigstore/tuf": "^4.0.0",

From 34bafd153f20954b5f8efdbf068fe1ec384ab489 Mon Sep 17 00:00:00 2001
From: Gar 
Date: Thu, 18 Sep 2025 10:50:27 -0700
Subject: [PATCH 33/63] deps: node-gyp@11.4.2

---
 .../node-gyp/.release-please-manifest.json    |   2 +-
 node_modules/node-gyp/addon.gypi              |   4 +-
 .../gyp/.release-please-manifest.json         |   2 +-
 .../node-gyp/gyp/pylib/gyp/MSVSNew.py         |  94 +-
 .../node-gyp/gyp/pylib/gyp/MSVSProject.py     | 104 +--
 .../node-gyp/gyp/pylib/gyp/MSVSSettings.py    | 239 +++---
 .../gyp/pylib/gyp/MSVSSettings_test.py        |  86 +-
 .../node-gyp/gyp/pylib/gyp/MSVSToolFile.py    |  24 +-
 .../node-gyp/gyp/pylib/gyp/MSVSUserFile.py    |  38 +-
 .../node-gyp/gyp/pylib/gyp/MSVSUtil.py        | 100 +--
 .../node-gyp/gyp/pylib/gyp/MSVSVersion.py     | 140 +--
 .../node-gyp/gyp/pylib/gyp/__init__.py        |  79 +-
 node_modules/node-gyp/gyp/pylib/gyp/common.py | 136 +--
 .../node-gyp/gyp/pylib/gyp/common_test.py     |  85 +-
 .../node-gyp/gyp/pylib/gyp/easy_xml.py        | 111 +--
 .../node-gyp/gyp/pylib/gyp/easy_xml_test.py   |   2 +-
 .../gyp/pylib/gyp/generator/analyzer.py       | 128 +--
 .../gyp/pylib/gyp/generator/android.py        |  31 +-
 .../node-gyp/gyp/pylib/gyp/generator/cmake.py | 133 ++-
 .../gyp/generator/dump_dependency_json.py     |   2 +-
 .../gyp/pylib/gyp/generator/eclipse.py        |  35 +-
 .../node-gyp/gyp/pylib/gyp/generator/gypd.py  |   1 -
 .../node-gyp/gyp/pylib/gyp/generator/gypsh.py |   1 -
 .../node-gyp/gyp/pylib/gyp/generator/make.py  |  74 +-
 .../node-gyp/gyp/pylib/gyp/generator/msvs.py  | 808 +++++++++---------
 .../gyp/pylib/gyp/generator/msvs_test.py      |   2 +-
 .../node-gyp/gyp/pylib/gyp/generator/ninja.py |  50 +-
 .../gyp/pylib/gyp/generator/ninja_test.py     |   2 +-
 .../node-gyp/gyp/pylib/gyp/generator/xcode.py |  32 +-
 .../gyp/pylib/gyp/generator/xcode_test.py     |   2 +-
 node_modules/node-gyp/gyp/pylib/gyp/input.py  | 313 ++++---
 .../node-gyp/gyp/pylib/gyp/mac_tool.py        | 196 +++--
 .../node-gyp/gyp/pylib/gyp/msvs_emulation.py  |  33 +-
 .../node-gyp/gyp/pylib/gyp/simple_copy.py     |   4 +-
 .../node-gyp/gyp/pylib/gyp/win_tool.py        |  36 +-
 .../node-gyp/gyp/pylib/gyp/xcode_emulation.py | 365 ++++----
 .../node-gyp/gyp/pylib/gyp/xcode_ninja.py     |  44 +-
 .../node-gyp/gyp/pylib/gyp/xcodeproj_file.py  | 608 +++++++------
 .../node-gyp/gyp/pylib/gyp/xml_fix.py         |   1 -
 .../node-gyp/gyp/pylib/packaging/_elffile.py  |   3 +-
 .../node-gyp/gyp/pylib/packaging/markers.py   |   3 +-
 .../node-gyp/gyp/pylib/packaging/metadata.py  |   3 +-
 node_modules/node-gyp/gyp/pyproject.toml      |   3 +-
 node_modules/node-gyp/gyp/test_gyp.py         |   5 +-
 node_modules/node-gyp/lib/install.js          |  20 +-
 node_modules/node-gyp/lib/node-gyp.js         |  49 +-
 node_modules/node-gyp/package.json            |   2 +-
 package-lock.json                             |   6 +-
 package.json                                  |   2 +-
 49 files changed, 2113 insertions(+), 2130 deletions(-)

diff --git a/node_modules/node-gyp/.release-please-manifest.json b/node_modules/node-gyp/.release-please-manifest.json
index f098464b1facd..a94451c9e1342 100644
--- a/node_modules/node-gyp/.release-please-manifest.json
+++ b/node_modules/node-gyp/.release-please-manifest.json
@@ -1,3 +1,3 @@
 {
-    ".": "11.2.0"
+    ".": "11.4.2"
 }
diff --git a/node_modules/node-gyp/addon.gypi b/node_modules/node-gyp/addon.gypi
index b4ac369acb4f1..4f112df81c771 100644
--- a/node_modules/node-gyp/addon.gypi
+++ b/node_modules/node-gyp/addon.gypi
@@ -179,7 +179,7 @@
           '-loleaut32.lib',
           '-luuid.lib',
           '-lodbc32.lib',
-          '-lDelayImp.lib',
+          '-ldelayimp.lib',
           '-l"<(node_lib_file)"'
         ],
         'msvs_disabled_warnings': [
@@ -195,7 +195,7 @@
           '_FILE_OFFSET_BITS=64'
         ],
       }],
-      [ 'OS in "freebsd openbsd netbsd solaris android" or \
+      [ 'OS in "freebsd openbsd netbsd solaris android openharmony" or \
          (OS=="linux" and target_arch!="ia32")', {
         'cflags': [ '-fPIC' ],
       }],
diff --git a/node_modules/node-gyp/gyp/.release-please-manifest.json b/node_modules/node-gyp/gyp/.release-please-manifest.json
index 589cd4553e1bd..bdb726346fc28 100644
--- a/node_modules/node-gyp/gyp/.release-please-manifest.json
+++ b/node_modules/node-gyp/gyp/.release-please-manifest.json
@@ -1,3 +1,3 @@
 {
-    ".": "0.20.0"
+    ".": "0.20.4"
 }
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py b/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py
index bc0e93d07f890..f8e4993d94cdf 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/MSVSNew.py
@@ -32,18 +32,18 @@ def cmp(x, y):
 def MakeGuid(name, seed="msvs_new"):
     """Returns a GUID for the specified target name.
 
-  Args:
-    name: Target name.
-    seed: Seed for MD5 hash.
-  Returns:
-    A GUID-line string calculated from the name and seed.
-
-  This generates something which looks like a GUID, but depends only on the
-  name and seed.  This means the same name/seed will always generate the same
-  GUID, so that projects and solutions which refer to each other can explicitly
-  determine the GUID to refer to explicitly.  It also means that the GUID will
-  not change when the project for a target is rebuilt.
-  """
+    Args:
+      name: Target name.
+      seed: Seed for MD5 hash.
+    Returns:
+      A GUID-line string calculated from the name and seed.
+
+    This generates something which looks like a GUID, but depends only on the
+    name and seed.  This means the same name/seed will always generate the same
+    GUID, so that projects and solutions which refer to each other can explicitly
+    determine the GUID to refer to explicitly.  It also means that the GUID will
+    not change when the project for a target is rebuilt.
+    """
     # Calculate a MD5 signature for the seed and name.
     d = hashlib.md5((str(seed) + str(name)).encode("utf-8")).hexdigest().upper()
     # Convert most of the signature to GUID form (discard the rest)
@@ -78,15 +78,15 @@ class MSVSFolder(MSVSSolutionEntry):
     def __init__(self, path, name=None, entries=None, guid=None, items=None):
         """Initializes the folder.
 
-    Args:
-      path: Full path to the folder.
-      name: Name of the folder.
-      entries: List of folder entries to nest inside this folder.  May contain
-          Folder or Project objects.  May be None, if the folder is empty.
-      guid: GUID to use for folder, if not None.
-      items: List of solution items to include in the folder project.  May be
-          None, if the folder does not directly contain items.
-    """
+        Args:
+          path: Full path to the folder.
+          name: Name of the folder.
+          entries: List of folder entries to nest inside this folder.  May contain
+              Folder or Project objects.  May be None, if the folder is empty.
+          guid: GUID to use for folder, if not None.
+          items: List of solution items to include in the folder project.  May be
+              None, if the folder does not directly contain items.
+        """
         if name:
             self.name = name
         else:
@@ -128,19 +128,19 @@ def __init__(
     ):
         """Initializes the project.
 
-    Args:
-      path: Absolute path to the project file.
-      name: Name of project.  If None, the name will be the same as the base
-          name of the project file.
-      dependencies: List of other Project objects this project is dependent
-          upon, if not None.
-      guid: GUID to use for project, if not None.
-      spec: Dictionary specifying how to build this project.
-      build_file: Filename of the .gyp file that the vcproj file comes from.
-      config_platform_overrides: optional dict of configuration platforms to
-          used in place of the default for this target.
-      fixpath_prefix: the path used to adjust the behavior of _fixpath
-    """
+        Args:
+          path: Absolute path to the project file.
+          name: Name of project.  If None, the name will be the same as the base
+              name of the project file.
+          dependencies: List of other Project objects this project is dependent
+              upon, if not None.
+          guid: GUID to use for project, if not None.
+          spec: Dictionary specifying how to build this project.
+          build_file: Filename of the .gyp file that the vcproj file comes from.
+          config_platform_overrides: optional dict of configuration platforms to
+              used in place of the default for this target.
+          fixpath_prefix: the path used to adjust the behavior of _fixpath
+        """
         self.path = path
         self.guid = guid
         self.spec = spec
@@ -195,16 +195,16 @@ def __init__(
     ):
         """Initializes the solution.
 
-    Args:
-      path: Path to solution file.
-      version: Format version to emit.
-      entries: List of entries in solution.  May contain Folder or Project
-          objects.  May be None, if the folder is empty.
-      variants: List of build variant strings.  If none, a default list will
-          be used.
-      websiteProperties: Flag to decide if the website properties section
-          is generated.
-    """
+        Args:
+          path: Path to solution file.
+          version: Format version to emit.
+          entries: List of entries in solution.  May contain Folder or Project
+              objects.  May be None, if the folder is empty.
+          variants: List of build variant strings.  If none, a default list will
+              be used.
+          websiteProperties: Flag to decide if the website properties section
+              is generated.
+        """
         self.path = path
         self.websiteProperties = websiteProperties
         self.version = version
@@ -230,9 +230,9 @@ def __init__(
     def Write(self, writer=gyp.common.WriteOnDiff):
         """Writes the solution file to disk.
 
-    Raises:
-      IndexError: An entry appears multiple times.
-    """
+        Raises:
+          IndexError: An entry appears multiple times.
+        """
         # Walk the entry tree and collect all the folders and projects.
         all_entries = set()
         entries_to_check = self.entries[:]
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py b/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py
index 339d27d4029fc..17bb2bbdb8a55 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/MSVSProject.py
@@ -15,19 +15,19 @@ class Tool:
     def __init__(self, name, attrs=None):
         """Initializes the tool.
 
-    Args:
-      name: Tool name.
-      attrs: Dict of tool attributes; may be None.
-    """
+        Args:
+          name: Tool name.
+          attrs: Dict of tool attributes; may be None.
+        """
         self._attrs = attrs or {}
         self._attrs["Name"] = name
 
     def _GetSpecification(self):
         """Creates an element for the tool.
 
-    Returns:
-      A new xml.dom.Element for the tool.
-    """
+        Returns:
+          A new xml.dom.Element for the tool.
+        """
         return ["Tool", self._attrs]
 
 
@@ -37,10 +37,10 @@ class Filter:
     def __init__(self, name, contents=None):
         """Initializes the folder.
 
-    Args:
-      name: Filter (folder) name.
-      contents: List of filenames and/or Filter objects contained.
-    """
+        Args:
+          name: Filter (folder) name.
+          contents: List of filenames and/or Filter objects contained.
+        """
         self.name = name
         self.contents = list(contents or [])
 
@@ -54,13 +54,13 @@ class Writer:
     def __init__(self, project_path, version, name, guid=None, platforms=None):
         """Initializes the project.
 
-    Args:
-      project_path: Path to the project file.
-      version: Format version to emit.
-      name: Name of the project.
-      guid: GUID to use for project, if not None.
-      platforms: Array of string, the supported platforms.  If null, ['Win32']
-    """
+        Args:
+          project_path: Path to the project file.
+          version: Format version to emit.
+          name: Name of the project.
+          guid: GUID to use for project, if not None.
+          platforms: Array of string, the supported platforms.  If null, ['Win32']
+        """
         self.project_path = project_path
         self.version = version
         self.name = name
@@ -84,21 +84,21 @@ def __init__(self, project_path, version, name, guid=None, platforms=None):
     def AddToolFile(self, path):
         """Adds a tool file to the project.
 
-    Args:
-      path: Relative path from project to tool file.
-    """
+        Args:
+          path: Relative path from project to tool file.
+        """
         self.tool_files_section.append(["ToolFile", {"RelativePath": path}])
 
     def _GetSpecForConfiguration(self, config_type, config_name, attrs, tools):
         """Returns the specification for a configuration.
 
-    Args:
-      config_type: Type of configuration node.
-      config_name: Configuration name.
-      attrs: Dict of configuration attributes; may be None.
-      tools: List of tools (strings or Tool objects); may be None.
-    Returns:
-    """
+        Args:
+          config_type: Type of configuration node.
+          config_name: Configuration name.
+          attrs: Dict of configuration attributes; may be None.
+          tools: List of tools (strings or Tool objects); may be None.
+        Returns:
+        """
         # Handle defaults
         if not attrs:
             attrs = {}
@@ -122,23 +122,23 @@ def _GetSpecForConfiguration(self, config_type, config_name, attrs, tools):
     def AddConfig(self, name, attrs=None, tools=None):
         """Adds a configuration to the project.
 
-    Args:
-      name: Configuration name.
-      attrs: Dict of configuration attributes; may be None.
-      tools: List of tools (strings or Tool objects); may be None.
-    """
+        Args:
+          name: Configuration name.
+          attrs: Dict of configuration attributes; may be None.
+          tools: List of tools (strings or Tool objects); may be None.
+        """
         spec = self._GetSpecForConfiguration("Configuration", name, attrs, tools)
         self.configurations_section.append(spec)
 
     def _AddFilesToNode(self, parent, files):
         """Adds files and/or filters to the parent node.
 
-    Args:
-      parent: Destination node
-      files: A list of Filter objects and/or relative paths to files.
+        Args:
+          parent: Destination node
+          files: A list of Filter objects and/or relative paths to files.
 
-    Will call itself recursively, if the files list contains Filter objects.
-    """
+        Will call itself recursively, if the files list contains Filter objects.
+        """
         for f in files:
             if isinstance(f, Filter):
                 node = ["Filter", {"Name": f.name}]
@@ -151,13 +151,13 @@ def _AddFilesToNode(self, parent, files):
     def AddFiles(self, files):
         """Adds files to the project.
 
-    Args:
-      files: A list of Filter objects and/or relative paths to files.
+        Args:
+          files: A list of Filter objects and/or relative paths to files.
 
-    This makes a copy of the file/filter tree at the time of this call.  If you
-    later add files to a Filter object which was passed into a previous call
-    to AddFiles(), it will not be reflected in this project.
-    """
+        This makes a copy of the file/filter tree at the time of this call.  If you
+        later add files to a Filter object which was passed into a previous call
+        to AddFiles(), it will not be reflected in this project.
+        """
         self._AddFilesToNode(self.files_section, files)
         # TODO(rspangler) This also doesn't handle adding files to an existing
         # filter.  That is, it doesn't merge the trees.
@@ -165,15 +165,15 @@ def AddFiles(self, files):
     def AddFileConfig(self, path, config, attrs=None, tools=None):
         """Adds a configuration to a file.
 
-    Args:
-      path: Relative path to the file.
-      config: Name of configuration to add.
-      attrs: Dict of configuration attributes; may be None.
-      tools: List of tools (strings or Tool objects); may be None.
+        Args:
+          path: Relative path to the file.
+          config: Name of configuration to add.
+          attrs: Dict of configuration attributes; may be None.
+          tools: List of tools (strings or Tool objects); may be None.
 
-    Raises:
-      ValueError: Relative path does not match any file added via AddFiles().
-    """
+        Raises:
+          ValueError: Relative path does not match any file added via AddFiles().
+        """
         # Find the file node with the right relative path
         parent = self.files_dict.get(path)
         if not parent:
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py b/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py
index fea6e672865bf..155fc3a1cbc69 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings.py
@@ -35,10 +35,10 @@
 class _Tool:
     """Represents a tool used by MSVS or MSBuild.
 
-  Attributes:
-      msvs_name: The name of the tool in MSVS.
-      msbuild_name: The name of the tool in MSBuild.
-  """
+    Attributes:
+        msvs_name: The name of the tool in MSVS.
+        msbuild_name: The name of the tool in MSBuild.
+    """
 
     def __init__(self, msvs_name, msbuild_name):
         self.msvs_name = msvs_name
@@ -48,11 +48,11 @@ def __init__(self, msvs_name, msbuild_name):
 def _AddTool(tool):
     """Adds a tool to the four dictionaries used to process settings.
 
-  This only defines the tool.  Each setting also needs to be added.
+    This only defines the tool.  Each setting also needs to be added.
 
-  Args:
-    tool: The _Tool object to be added.
-  """
+    Args:
+      tool: The _Tool object to be added.
+    """
     _msvs_validators[tool.msvs_name] = {}
     _msbuild_validators[tool.msbuild_name] = {}
     _msvs_to_msbuild_converters[tool.msvs_name] = {}
@@ -70,35 +70,35 @@ class _Type:
     def ValidateMSVS(self, value):
         """Verifies that the value is legal for MSVS.
 
-    Args:
-      value: the value to check for this type.
+        Args:
+          value: the value to check for this type.
 
-    Raises:
-      ValueError if value is not valid for MSVS.
-    """
+        Raises:
+          ValueError if value is not valid for MSVS.
+        """
 
     def ValidateMSBuild(self, value):
         """Verifies that the value is legal for MSBuild.
 
-    Args:
-      value: the value to check for this type.
+        Args:
+          value: the value to check for this type.
 
-    Raises:
-      ValueError if value is not valid for MSBuild.
-    """
+        Raises:
+          ValueError if value is not valid for MSBuild.
+        """
 
     def ConvertToMSBuild(self, value):
         """Returns the MSBuild equivalent of the MSVS value given.
 
-    Args:
-      value: the MSVS value to convert.
+        Args:
+          value: the MSVS value to convert.
 
-    Returns:
-      the MSBuild equivalent.
+        Returns:
+          the MSBuild equivalent.
 
-    Raises:
-      ValueError if value is not valid.
-    """
+        Raises:
+          ValueError if value is not valid.
+        """
         return value
 
 
@@ -178,15 +178,15 @@ def ConvertToMSBuild(self, value):
 class _Enumeration(_Type):
     """Type of settings that is an enumeration.
 
-  In MSVS, the values are indexes like '0', '1', and '2'.
-  MSBuild uses text labels that are more representative, like 'Win32'.
+    In MSVS, the values are indexes like '0', '1', and '2'.
+    MSBuild uses text labels that are more representative, like 'Win32'.
 
-  Constructor args:
-    label_list: an array of MSBuild labels that correspond to the MSVS index.
-        In the rare cases where MSVS has skipped an index value, None is
-        used in the array to indicate the unused spot.
-    new: an array of labels that are new to MSBuild.
-  """
+    Constructor args:
+      label_list: an array of MSBuild labels that correspond to the MSVS index.
+          In the rare cases where MSVS has skipped an index value, None is
+          used in the array to indicate the unused spot.
+      new: an array of labels that are new to MSBuild.
+    """
 
     def __init__(self, label_list, new=None):
         _Type.__init__(self)
@@ -234,23 +234,23 @@ def ConvertToMSBuild(self, value):
 def _Same(tool, name, setting_type):
     """Defines a setting that has the same name in MSVS and MSBuild.
 
-  Args:
-    tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
-    name: the name of the setting.
-    setting_type: the type of this setting.
-  """
+    Args:
+      tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
+      name: the name of the setting.
+      setting_type: the type of this setting.
+    """
     _Renamed(tool, name, name, setting_type)
 
 
 def _Renamed(tool, msvs_name, msbuild_name, setting_type):
     """Defines a setting for which the name has changed.
 
-  Args:
-    tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
-    msvs_name: the name of the MSVS setting.
-    msbuild_name: the name of the MSBuild setting.
-    setting_type: the type of this setting.
-  """
+    Args:
+      tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
+      msvs_name: the name of the MSVS setting.
+      msbuild_name: the name of the MSBuild setting.
+      setting_type: the type of this setting.
+    """
 
     def _Translate(value, msbuild_settings):
         msbuild_tool_settings = _GetMSBuildToolSettings(msbuild_settings, tool)
@@ -272,13 +272,13 @@ def _MovedAndRenamed(
 ):
     """Defines a setting that may have moved to a new section.
 
-  Args:
-    tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
-    msvs_settings_name: the MSVS name of the setting.
-    msbuild_tool_name: the name of the MSBuild tool to place the setting under.
-    msbuild_settings_name: the MSBuild name of the setting.
-    setting_type: the type of this setting.
-  """
+    Args:
+      tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
+      msvs_settings_name: the MSVS name of the setting.
+      msbuild_tool_name: the name of the MSBuild tool to place the setting under.
+      msbuild_settings_name: the MSBuild name of the setting.
+      setting_type: the type of this setting.
+    """
 
     def _Translate(value, msbuild_settings):
         tool_settings = msbuild_settings.setdefault(msbuild_tool_name, {})
@@ -293,11 +293,11 @@ def _Translate(value, msbuild_settings):
 def _MSVSOnly(tool, name, setting_type):
     """Defines a setting that is only found in MSVS.
 
-  Args:
-    tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
-    name: the name of the setting.
-    setting_type: the type of this setting.
-  """
+    Args:
+      tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
+      name: the name of the setting.
+      setting_type: the type of this setting.
+    """
 
     def _Translate(unused_value, unused_msbuild_settings):
         # Since this is for MSVS only settings, no translation will happen.
@@ -310,11 +310,11 @@ def _Translate(unused_value, unused_msbuild_settings):
 def _MSBuildOnly(tool, name, setting_type):
     """Defines a setting that is only found in MSBuild.
 
-  Args:
-    tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
-    name: the name of the setting.
-    setting_type: the type of this setting.
-  """
+    Args:
+      tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
+      name: the name of the setting.
+      setting_type: the type of this setting.
+    """
 
     def _Translate(value, msbuild_settings):
         # Let msbuild-only properties get translated as-is from msvs_settings.
@@ -328,11 +328,11 @@ def _Translate(value, msbuild_settings):
 def _ConvertedToAdditionalOption(tool, msvs_name, flag):
     """Defines a setting that's handled via a command line option in MSBuild.
 
-  Args:
-    tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
-    msvs_name: the name of the MSVS setting that if 'true' becomes a flag
-    flag: the flag to insert at the end of the AdditionalOptions
-  """
+    Args:
+      tool: a dictionary that gives the names of the tool for MSVS and MSBuild.
+      msvs_name: the name of the MSVS setting that if 'true' becomes a flag
+      flag: the flag to insert at the end of the AdditionalOptions
+    """
 
     def _Translate(value, msbuild_settings):
         if value == "true":
@@ -384,20 +384,19 @@ def _Translate(value, msbuild_settings):
 def _ValidateExclusionSetting(setting, settings, error_msg, stderr=sys.stderr):
     """Verify that 'setting' is valid if it is generated from an exclusion list.
 
-  If the setting appears to be generated from an exclusion list, the root name
-  is checked.
+    If the setting appears to be generated from an exclusion list, the root name
+    is checked.
 
-  Args:
-      setting:   A string that is the setting name to validate
-      settings:  A dictionary where the keys are valid settings
-      error_msg: The message to emit in the event of error
-      stderr:    The stream receiving the error messages.
-  """
+    Args:
+        setting:   A string that is the setting name to validate
+        settings:  A dictionary where the keys are valid settings
+        error_msg: The message to emit in the event of error
+        stderr:    The stream receiving the error messages.
+    """
     # This may be unrecognized because it's an exclusion list. If the
     # setting name has the _excluded suffix, then check the root name.
     unrecognized = True
-    m = re.match(_EXCLUDED_SUFFIX_RE, setting)
-    if m:
+    if m := re.match(_EXCLUDED_SUFFIX_RE, setting):
         root_setting = m.group(1)
         unrecognized = root_setting not in settings
 
@@ -409,11 +408,11 @@ def _ValidateExclusionSetting(setting, settings, error_msg, stderr=sys.stderr):
 def FixVCMacroSlashes(s):
     """Replace macros which have excessive following slashes.
 
-  These macros are known to have a built-in trailing slash. Furthermore, many
-  scripts hiccup on processing paths with extra slashes in the middle.
+    These macros are known to have a built-in trailing slash. Furthermore, many
+    scripts hiccup on processing paths with extra slashes in the middle.
 
-  This list is probably not exhaustive.  Add as needed.
-  """
+    This list is probably not exhaustive.  Add as needed.
+    """
     if "$" in s:
         s = fix_vc_macro_slashes_regex.sub(r"\1", s)
     return s
@@ -422,8 +421,8 @@ def FixVCMacroSlashes(s):
 def ConvertVCMacrosToMSBuild(s):
     """Convert the MSVS macros found in the string to the MSBuild equivalent.
 
-  This list is probably not exhaustive.  Add as needed.
-  """
+    This list is probably not exhaustive.  Add as needed.
+    """
     if "$" in s:
         replace_map = {
             "$(ConfigurationName)": "$(Configuration)",
@@ -445,16 +444,16 @@ def ConvertVCMacrosToMSBuild(s):
 def ConvertToMSBuildSettings(msvs_settings, stderr=sys.stderr):
     """Converts MSVS settings (VS2008 and earlier) to MSBuild settings (VS2010+).
 
-  Args:
-      msvs_settings: A dictionary.  The key is the tool name.  The values are
-          themselves dictionaries of settings and their values.
-      stderr: The stream receiving the error messages.
+    Args:
+        msvs_settings: A dictionary.  The key is the tool name.  The values are
+            themselves dictionaries of settings and their values.
+        stderr: The stream receiving the error messages.
 
-  Returns:
-      A dictionary of MSBuild settings.  The key is either the MSBuild tool name
-      or the empty string (for the global settings).  The values are themselves
-      dictionaries of settings and their values.
-  """
+    Returns:
+        A dictionary of MSBuild settings.  The key is either the MSBuild tool name
+        or the empty string (for the global settings).  The values are themselves
+        dictionaries of settings and their values.
+    """
     msbuild_settings = {}
     for msvs_tool_name, msvs_tool_settings in msvs_settings.items():
         if msvs_tool_name in _msvs_to_msbuild_converters:
@@ -493,36 +492,36 @@ def ConvertToMSBuildSettings(msvs_settings, stderr=sys.stderr):
 def ValidateMSVSSettings(settings, stderr=sys.stderr):
     """Validates that the names of the settings are valid for MSVS.
 
-  Args:
-      settings: A dictionary.  The key is the tool name.  The values are
-          themselves dictionaries of settings and their values.
-      stderr: The stream receiving the error messages.
-  """
+    Args:
+        settings: A dictionary.  The key is the tool name.  The values are
+            themselves dictionaries of settings and their values.
+        stderr: The stream receiving the error messages.
+    """
     _ValidateSettings(_msvs_validators, settings, stderr)
 
 
 def ValidateMSBuildSettings(settings, stderr=sys.stderr):
     """Validates that the names of the settings are valid for MSBuild.
 
-  Args:
-      settings: A dictionary.  The key is the tool name.  The values are
-          themselves dictionaries of settings and their values.
-      stderr: The stream receiving the error messages.
-  """
+    Args:
+        settings: A dictionary.  The key is the tool name.  The values are
+            themselves dictionaries of settings and their values.
+        stderr: The stream receiving the error messages.
+    """
     _ValidateSettings(_msbuild_validators, settings, stderr)
 
 
 def _ValidateSettings(validators, settings, stderr):
     """Validates that the settings are valid for MSBuild or MSVS.
 
-  We currently only validate the names of the settings, not their values.
+    We currently only validate the names of the settings, not their values.
 
-  Args:
-      validators: A dictionary of tools and their validators.
-      settings: A dictionary.  The key is the tool name.  The values are
-          themselves dictionaries of settings and their values.
-      stderr: The stream receiving the error messages.
-  """
+    Args:
+        validators: A dictionary of tools and their validators.
+        settings: A dictionary.  The key is the tool name.  The values are
+            themselves dictionaries of settings and their values.
+        stderr: The stream receiving the error messages.
+    """
     for tool_name in settings:
         if tool_name in validators:
             tool_validators = validators[tool_name]
@@ -638,7 +637,9 @@ def _ValidateSettings(validators, settings, stderr):
     ),
 )  # /RTC1
 _Same(
-    _compile, "BrowseInformation", _Enumeration(["false", "true", "true"])  # /FR
+    _compile,
+    "BrowseInformation",
+    _Enumeration(["false", "true", "true"]),  # /FR
 )  # /Fr
 _Same(
     _compile,
@@ -696,7 +697,9 @@ def _ValidateSettings(validators, settings, stderr):
     _Enumeration(["false", "Sync", "Async"], new=["SyncCThrow"]),  # /EHsc  # /EHa
 )  # /EHs
 _Same(
-    _compile, "FavorSizeOrSpeed", _Enumeration(["Neither", "Speed", "Size"])  # /Ot
+    _compile,
+    "FavorSizeOrSpeed",
+    _Enumeration(["Neither", "Speed", "Size"]),  # /Ot
 )  # /Os
 _Same(
     _compile,
@@ -909,7 +912,9 @@ def _ValidateSettings(validators, settings, stderr):
 )  # /MACHINE:X64
 
 _Same(
-    _link, "AssemblyDebug", _Enumeration(["", "true", "false"])  # /ASSEMBLYDEBUG
+    _link,
+    "AssemblyDebug",
+    _Enumeration(["", "true", "false"]),  # /ASSEMBLYDEBUG
 )  # /ASSEMBLYDEBUG:DISABLE
 _Same(
     _link,
@@ -1159,17 +1164,23 @@ def _ValidateSettings(validators, settings, stderr):
 _MSBuildOnly(_midl, "ApplicationConfigurationMode", _boolean)  # /app_config
 _MSBuildOnly(_midl, "ClientStubFile", _file_name)  # /cstub
 _MSBuildOnly(
-    _midl, "GenerateClientFiles", _Enumeration([], new=["Stub", "None"])  # /client stub
+    _midl,
+    "GenerateClientFiles",
+    _Enumeration([], new=["Stub", "None"]),  # /client stub
 )  # /client none
 _MSBuildOnly(
-    _midl, "GenerateServerFiles", _Enumeration([], new=["Stub", "None"])  # /client stub
+    _midl,
+    "GenerateServerFiles",
+    _Enumeration([], new=["Stub", "None"]),  # /client stub
 )  # /client none
 _MSBuildOnly(_midl, "LocaleID", _integer)  # /lcid DECIMAL
 _MSBuildOnly(_midl, "ServerStubFile", _file_name)  # /sstub
 _MSBuildOnly(_midl, "SuppressCompilerWarnings", _boolean)  # /no_warn
 _MSBuildOnly(_midl, "TrackerLogDirectory", _folder_name)
 _MSBuildOnly(
-    _midl, "TypeLibFormat", _Enumeration([], new=["NewFormat", "OldFormat"])  # /newtlb
+    _midl,
+    "TypeLibFormat",
+    _Enumeration([], new=["NewFormat", "OldFormat"]),  # /newtlb
 )  # /oldtlb
 
 
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py b/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py
index 0504728d994ca..0e661995fbcd9 100755
--- a/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/MSVSSettings_test.py
@@ -1143,47 +1143,47 @@ def testConvertToMSBuildSettings_full_synthetic(self):
     def testConvertToMSBuildSettings_actual(self):
         """Tests the conversion of an actual project.
 
-    A VS2008 project with most of the options defined was created through the
-    VS2008 IDE.  It was then converted to VS2010.  The tool settings found in
-    the .vcproj and .vcxproj files were converted to the two dictionaries
-    msvs_settings and expected_msbuild_settings.
+        A VS2008 project with most of the options defined was created through the
+        VS2008 IDE.  It was then converted to VS2010.  The tool settings found in
+        the .vcproj and .vcxproj files were converted to the two dictionaries
+        msvs_settings and expected_msbuild_settings.
 
-    Note that for many settings, the VS2010 converter adds macros like
-    %(AdditionalIncludeDirectories) to make sure than inherited values are
-    included.  Since the Gyp projects we generate do not use inheritance,
-    we removed these macros.  They were:
-        ClCompile:
-            AdditionalIncludeDirectories:  ';%(AdditionalIncludeDirectories)'
-            AdditionalOptions:  ' %(AdditionalOptions)'
-            AdditionalUsingDirectories:  ';%(AdditionalUsingDirectories)'
-            DisableSpecificWarnings: ';%(DisableSpecificWarnings)',
-            ForcedIncludeFiles:  ';%(ForcedIncludeFiles)',
-            ForcedUsingFiles:  ';%(ForcedUsingFiles)',
-            PreprocessorDefinitions:  ';%(PreprocessorDefinitions)',
-            UndefinePreprocessorDefinitions:
-                ';%(UndefinePreprocessorDefinitions)',
-        Link:
-            AdditionalDependencies:  ';%(AdditionalDependencies)',
-            AdditionalLibraryDirectories:  ';%(AdditionalLibraryDirectories)',
-            AdditionalManifestDependencies:
-                ';%(AdditionalManifestDependencies)',
-            AdditionalOptions:  ' %(AdditionalOptions)',
-            AddModuleNamesToAssembly:  ';%(AddModuleNamesToAssembly)',
-            AssemblyLinkResource:  ';%(AssemblyLinkResource)',
-            DelayLoadDLLs:  ';%(DelayLoadDLLs)',
-            EmbedManagedResourceFile:  ';%(EmbedManagedResourceFile)',
-            ForceSymbolReferences:  ';%(ForceSymbolReferences)',
-            IgnoreSpecificDefaultLibraries:
-                ';%(IgnoreSpecificDefaultLibraries)',
-        ResourceCompile:
-            AdditionalIncludeDirectories:  ';%(AdditionalIncludeDirectories)',
-            AdditionalOptions:  ' %(AdditionalOptions)',
-            PreprocessorDefinitions:  ';%(PreprocessorDefinitions)',
-        Manifest:
-            AdditionalManifestFiles:  ';%(AdditionalManifestFiles)',
-            AdditionalOptions:  ' %(AdditionalOptions)',
-            InputResourceManifests:  ';%(InputResourceManifests)',
-    """
+        Note that for many settings, the VS2010 converter adds macros like
+        %(AdditionalIncludeDirectories) to make sure than inherited values are
+        included.  Since the Gyp projects we generate do not use inheritance,
+        we removed these macros.  They were:
+            ClCompile:
+                AdditionalIncludeDirectories:  ';%(AdditionalIncludeDirectories)'
+                AdditionalOptions:  ' %(AdditionalOptions)'
+                AdditionalUsingDirectories:  ';%(AdditionalUsingDirectories)'
+                DisableSpecificWarnings: ';%(DisableSpecificWarnings)',
+                ForcedIncludeFiles:  ';%(ForcedIncludeFiles)',
+                ForcedUsingFiles:  ';%(ForcedUsingFiles)',
+                PreprocessorDefinitions:  ';%(PreprocessorDefinitions)',
+                UndefinePreprocessorDefinitions:
+                    ';%(UndefinePreprocessorDefinitions)',
+            Link:
+                AdditionalDependencies:  ';%(AdditionalDependencies)',
+                AdditionalLibraryDirectories:  ';%(AdditionalLibraryDirectories)',
+                AdditionalManifestDependencies:
+                    ';%(AdditionalManifestDependencies)',
+                AdditionalOptions:  ' %(AdditionalOptions)',
+                AddModuleNamesToAssembly:  ';%(AddModuleNamesToAssembly)',
+                AssemblyLinkResource:  ';%(AssemblyLinkResource)',
+                DelayLoadDLLs:  ';%(DelayLoadDLLs)',
+                EmbedManagedResourceFile:  ';%(EmbedManagedResourceFile)',
+                ForceSymbolReferences:  ';%(ForceSymbolReferences)',
+                IgnoreSpecificDefaultLibraries:
+                    ';%(IgnoreSpecificDefaultLibraries)',
+            ResourceCompile:
+                AdditionalIncludeDirectories:  ';%(AdditionalIncludeDirectories)',
+                AdditionalOptions:  ' %(AdditionalOptions)',
+                PreprocessorDefinitions:  ';%(PreprocessorDefinitions)',
+            Manifest:
+                AdditionalManifestFiles:  ';%(AdditionalManifestFiles)',
+                AdditionalOptions:  ' %(AdditionalOptions)',
+                InputResourceManifests:  ';%(InputResourceManifests)',
+        """
         msvs_settings = {
             "VCCLCompilerTool": {
                 "AdditionalIncludeDirectories": "dir1",
@@ -1346,8 +1346,7 @@ def testConvertToMSBuildSettings_actual(self):
                 "EmbedManifest": "false",
                 "GenerateCatalogFiles": "true",
                 "InputResourceManifests": "asfsfdafs",
-                "ManifestResourceFile":
-                    "$(IntDir)\\$(TargetFileName).embed.manifest.resfdsf",
+                "ManifestResourceFile": "$(IntDir)\\$(TargetFileName).embed.manifest.resfdsf",  # noqa: E501
                 "OutputManifestFile": "$(TargetPath).manifestdfs",
                 "RegistrarScriptFile": "sdfsfd",
                 "ReplacementsFile": "sdffsd",
@@ -1531,8 +1530,7 @@ def testConvertToMSBuildSettings_actual(self):
                 "LinkIncremental": "",
             },
             "ManifestResourceCompile": {
-                "ResourceOutputFileName":
-                    "$(IntDir)$(TargetFileName).embed.manifest.resfdsf"
+                "ResourceOutputFileName": "$(IntDir)$(TargetFileName).embed.manifest.resfdsf"  # noqa: E501
             },
         }
         self.maxDiff = 9999  # on failure display a long diff
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py b/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py
index 901ba84588589..61ca37c12d09d 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/MSVSToolFile.py
@@ -13,10 +13,10 @@ class Writer:
     def __init__(self, tool_file_path, name):
         """Initializes the tool file.
 
-    Args:
-      tool_file_path: Path to the tool file.
-      name: Name of the tool file.
-    """
+        Args:
+          tool_file_path: Path to the tool file.
+          name: Name of the tool file.
+        """
         self.tool_file_path = tool_file_path
         self.name = name
         self.rules_section = ["Rules"]
@@ -26,14 +26,14 @@ def AddCustomBuildRule(
     ):
         """Adds a rule to the tool file.
 
-    Args:
-      name: Name of the rule.
-      description: Description of the rule.
-      cmd: Command line of the rule.
-      additional_dependencies: other files which may trigger the rule.
-      outputs: outputs of the rule.
-      extensions: extensions handled by the rule.
-    """
+        Args:
+          name: Name of the rule.
+          description: Description of the rule.
+          cmd: Command line of the rule.
+          additional_dependencies: other files which may trigger the rule.
+          outputs: outputs of the rule.
+          extensions: extensions handled by the rule.
+        """
         rule = [
             "CustomBuildRule",
             {
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py b/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py
index 23d3e16953c43..b93613bd1d2e4 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py
@@ -15,11 +15,11 @@
 
 def _FindCommandInPath(command):
     """If there are no slashes in the command given, this function
-     searches the PATH env to find the given command, and converts it
-     to an absolute path.  We have to do this because MSVS is looking
-     for an actual file to launch a debugger on, not just a command
-     line.  Note that this happens at GYP time, so anything needing to
-     be built needs to have a full path."""
+    searches the PATH env to find the given command, and converts it
+    to an absolute path.  We have to do this because MSVS is looking
+    for an actual file to launch a debugger on, not just a command
+    line.  Note that this happens at GYP time, so anything needing to
+    be built needs to have a full path."""
     if "/" in command or "\\" in command:
         # If the command already has path elements (either relative or
         # absolute), then assume it is constructed properly.
@@ -58,11 +58,11 @@ class Writer:
     def __init__(self, user_file_path, version, name):
         """Initializes the user file.
 
-    Args:
-      user_file_path: Path to the user file.
-      version: Version info.
-      name: Name of the user file.
-    """
+        Args:
+          user_file_path: Path to the user file.
+          version: Version info.
+          name: Name of the user file.
+        """
         self.user_file_path = user_file_path
         self.version = version
         self.name = name
@@ -71,9 +71,9 @@ def __init__(self, user_file_path, version, name):
     def AddConfig(self, name):
         """Adds a configuration to the project.
 
-    Args:
-      name: Configuration name.
-    """
+        Args:
+          name: Configuration name.
+        """
         self.configurations[name] = ["Configuration", {"Name": name}]
 
     def AddDebugSettings(
@@ -81,12 +81,12 @@ def AddDebugSettings(
     ):
         """Adds a DebugSettings node to the user file for a particular config.
 
-    Args:
-      command: command line to run.  First element in the list is the
-        executable.  All elements of the command will be quoted if
-        necessary.
-      working_directory: other files which may trigger the rule. (optional)
-    """
+        Args:
+          command: command line to run.  First element in the list is the
+            executable.  All elements of the command will be quoted if
+            necessary.
+          working_directory: other files which may trigger the rule. (optional)
+        """
         command = _QuoteWin32CommandLineArgs(command)
 
         abs_command = _FindCommandInPath(command[0])
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py b/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py
index 27647f11d0746..5a1b4ae3198d6 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/MSVSUtil.py
@@ -29,13 +29,13 @@ def _GetLargePdbShimCcPath():
 def _DeepCopySomeKeys(in_dict, keys):
     """Performs a partial deep-copy on |in_dict|, only copying the keys in |keys|.
 
-  Arguments:
-    in_dict: The dictionary to copy.
-    keys: The keys to be copied. If a key is in this list and doesn't exist in
-        |in_dict| this is not an error.
-  Returns:
-    The partially deep-copied dictionary.
-  """
+    Arguments:
+      in_dict: The dictionary to copy.
+      keys: The keys to be copied. If a key is in this list and doesn't exist in
+          |in_dict| this is not an error.
+    Returns:
+      The partially deep-copied dictionary.
+    """
     d = {}
     for key in keys:
         if key not in in_dict:
@@ -47,12 +47,12 @@ def _DeepCopySomeKeys(in_dict, keys):
 def _SuffixName(name, suffix):
     """Add a suffix to the end of a target.
 
-  Arguments:
-    name: name of the target (foo#target)
-    suffix: the suffix to be added
-  Returns:
-    Target name with suffix added (foo_suffix#target)
-  """
+    Arguments:
+      name: name of the target (foo#target)
+      suffix: the suffix to be added
+    Returns:
+      Target name with suffix added (foo_suffix#target)
+    """
     parts = name.rsplit("#", 1)
     parts[0] = f"{parts[0]}_{suffix}"
     return "#".join(parts)
@@ -61,24 +61,24 @@ def _SuffixName(name, suffix):
 def _ShardName(name, number):
     """Add a shard number to the end of a target.
 
-  Arguments:
-    name: name of the target (foo#target)
-    number: shard number
-  Returns:
-    Target name with shard added (foo_1#target)
-  """
+    Arguments:
+      name: name of the target (foo#target)
+      number: shard number
+    Returns:
+      Target name with shard added (foo_1#target)
+    """
     return _SuffixName(name, str(number))
 
 
 def ShardTargets(target_list, target_dicts):
     """Shard some targets apart to work around the linkers limits.
 
-  Arguments:
-    target_list: List of target pairs: 'base/base.gyp:base'.
-    target_dicts: Dict of target properties keyed on target pair.
-  Returns:
-    Tuple of the new sharded versions of the inputs.
-  """
+    Arguments:
+      target_list: List of target pairs: 'base/base.gyp:base'.
+      target_dicts: Dict of target properties keyed on target pair.
+    Returns:
+      Tuple of the new sharded versions of the inputs.
+    """
     # Gather the targets to shard, and how many pieces.
     targets_to_shard = {}
     for t in target_dicts:
@@ -128,22 +128,22 @@ def ShardTargets(target_list, target_dicts):
 
 def _GetPdbPath(target_dict, config_name, vars):
     """Returns the path to the PDB file that will be generated by a given
-  configuration.
-
-  The lookup proceeds as follows:
-    - Look for an explicit path in the VCLinkerTool configuration block.
-    - Look for an 'msvs_large_pdb_path' variable.
-    - Use '<(PRODUCT_DIR)/<(product_name).(exe|dll).pdb' if 'product_name' is
-      specified.
-    - Use '<(PRODUCT_DIR)/<(target_name).(exe|dll).pdb'.
-
-  Arguments:
-    target_dict: The target dictionary to be searched.
-    config_name: The name of the configuration of interest.
-    vars: A dictionary of common GYP variables with generator-specific values.
-  Returns:
-    The path of the corresponding PDB file.
-  """
+    configuration.
+
+    The lookup proceeds as follows:
+      - Look for an explicit path in the VCLinkerTool configuration block.
+      - Look for an 'msvs_large_pdb_path' variable.
+      - Use '<(PRODUCT_DIR)/<(product_name).(exe|dll).pdb' if 'product_name' is
+        specified.
+      - Use '<(PRODUCT_DIR)/<(target_name).(exe|dll).pdb'.
+
+    Arguments:
+      target_dict: The target dictionary to be searched.
+      config_name: The name of the configuration of interest.
+      vars: A dictionary of common GYP variables with generator-specific values.
+    Returns:
+      The path of the corresponding PDB file.
+    """
     config = target_dict["configurations"][config_name]
     msvs = config.setdefault("msvs_settings", {})
 
@@ -168,16 +168,16 @@ def _GetPdbPath(target_dict, config_name, vars):
 def InsertLargePdbShims(target_list, target_dicts, vars):
     """Insert a shim target that forces the linker to use 4KB pagesize PDBs.
 
-  This is a workaround for targets with PDBs greater than 1GB in size, the
-  limit for the 1KB pagesize PDBs created by the linker by default.
+    This is a workaround for targets with PDBs greater than 1GB in size, the
+    limit for the 1KB pagesize PDBs created by the linker by default.
 
-  Arguments:
-    target_list: List of target pairs: 'base/base.gyp:base'.
-    target_dicts: Dict of target properties keyed on target pair.
-    vars: A dictionary of common GYP variables with generator-specific values.
-  Returns:
-    Tuple of the shimmed version of the inputs.
-  """
+    Arguments:
+      target_list: List of target pairs: 'base/base.gyp:base'.
+      target_dicts: Dict of target properties keyed on target pair.
+      vars: A dictionary of common GYP variables with generator-specific values.
+    Returns:
+      Tuple of the shimmed version of the inputs.
+    """
     # Determine which targets need shimming.
     targets_to_shim = []
     for t in target_dicts:
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py b/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py
index 93f48bc05c8dc..09baf44b2b0f8 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/MSVSVersion.py
@@ -76,17 +76,17 @@ def Path(self):
         return self.path
 
     def ToolPath(self, tool):
-        """Returns the path to a given compiler tool. """
+        """Returns the path to a given compiler tool."""
         return os.path.normpath(os.path.join(self.path, "VC/bin", tool))
 
     def DefaultToolset(self):
         """Returns the msbuild toolset version that will be used in the absence
-    of a user override."""
+        of a user override."""
         return self.default_toolset
 
     def _SetupScriptInternal(self, target_arch):
         """Returns a command (with arguments) to be used to set up the
-    environment."""
+        environment."""
         assert target_arch in ("x86", "x64"), "target_arch not supported"
         # If WindowsSDKDir is set and SetEnv.Cmd exists then we are using the
         # depot_tools build tools and should run SetEnv.Cmd to set up the
@@ -154,16 +154,16 @@ def SetupScript(self, target_arch):
 def _RegistryQueryBase(sysdir, key, value):
     """Use reg.exe to read a particular key.
 
-  While ideally we might use the win32 module, we would like gyp to be
-  python neutral, so for instance cygwin python lacks this module.
+    While ideally we might use the win32 module, we would like gyp to be
+    python neutral, so for instance cygwin python lacks this module.
 
-  Arguments:
-    sysdir: The system subdirectory to attempt to launch reg.exe from.
-    key: The registry key to read from.
-    value: The particular value to read.
-  Return:
-    stdout from reg.exe, or None for failure.
-  """
+    Arguments:
+      sysdir: The system subdirectory to attempt to launch reg.exe from.
+      key: The registry key to read from.
+      value: The particular value to read.
+    Return:
+      stdout from reg.exe, or None for failure.
+    """
     # Skip if not on Windows or Python Win32 setup issue
     if sys.platform not in ("win32", "cygwin"):
         return None
@@ -184,20 +184,20 @@ def _RegistryQueryBase(sysdir, key, value):
 def _RegistryQuery(key, value=None):
     r"""Use reg.exe to read a particular key through _RegistryQueryBase.
 
-  First tries to launch from %WinDir%\Sysnative to avoid WoW64 redirection. If
-  that fails, it falls back to System32.  Sysnative is available on Vista and
-  up and available on Windows Server 2003 and XP through KB patch 942589. Note
-  that Sysnative will always fail if using 64-bit python due to it being a
-  virtual directory and System32 will work correctly in the first place.
+    First tries to launch from %WinDir%\Sysnative to avoid WoW64 redirection. If
+    that fails, it falls back to System32.  Sysnative is available on Vista and
+    up and available on Windows Server 2003 and XP through KB patch 942589. Note
+    that Sysnative will always fail if using 64-bit python due to it being a
+    virtual directory and System32 will work correctly in the first place.
 
-  KB 942589 - http://support.microsoft.com/kb/942589/en-us.
+    KB 942589 - http://support.microsoft.com/kb/942589/en-us.
 
-  Arguments:
-    key: The registry key.
-    value: The particular registry value to read (optional).
-  Return:
-    stdout from reg.exe, or None for failure.
-  """
+    Arguments:
+      key: The registry key.
+      value: The particular registry value to read (optional).
+    Return:
+      stdout from reg.exe, or None for failure.
+    """
     text = None
     try:
         text = _RegistryQueryBase("Sysnative", key, value)
@@ -212,14 +212,15 @@ def _RegistryQuery(key, value=None):
 def _RegistryGetValueUsingWinReg(key, value):
     """Use the _winreg module to obtain the value of a registry key.
 
-  Args:
-    key: The registry key.
-    value: The particular registry value to read.
-  Return:
-    contents of the registry key's value, or None on failure.  Throws
-    ImportError if winreg is unavailable.
-  """
-    from winreg import HKEY_LOCAL_MACHINE, OpenKey, QueryValueEx
+    Args:
+      key: The registry key.
+      value: The particular registry value to read.
+    Return:
+      contents of the registry key's value, or None on failure.  Throws
+      ImportError if winreg is unavailable.
+    """
+    from winreg import HKEY_LOCAL_MACHINE, OpenKey, QueryValueEx  # noqa: PLC0415
+
     try:
         root, subkey = key.split("\\", 1)
         assert root == "HKLM"  # Only need HKLM for now.
@@ -232,17 +233,17 @@ def _RegistryGetValueUsingWinReg(key, value):
 def _RegistryGetValue(key, value):
     """Use _winreg or reg.exe to obtain the value of a registry key.
 
-  Using _winreg is preferable because it solves an issue on some corporate
-  environments where access to reg.exe is locked down. However, we still need
-  to fallback to reg.exe for the case where the _winreg module is not available
-  (for example in cygwin python).
-
-  Args:
-    key: The registry key.
-    value: The particular registry value to read.
-  Return:
-    contents of the registry key's value, or None on failure.
-  """
+    Using _winreg is preferable because it solves an issue on some corporate
+    environments where access to reg.exe is locked down. However, we still need
+    to fallback to reg.exe for the case where the _winreg module is not available
+    (for example in cygwin python).
+
+    Args:
+      key: The registry key.
+      value: The particular registry value to read.
+    Return:
+      contents of the registry key's value, or None on failure.
+    """
     try:
         return _RegistryGetValueUsingWinReg(key, value)
     except ImportError:
@@ -262,10 +263,10 @@ def _RegistryGetValue(key, value):
 def _CreateVersion(name, path, sdk_based=False):
     """Sets up MSVS project generation.
 
-  Setup is based off the GYP_MSVS_VERSION environment variable or whatever is
-  autodetected if GYP_MSVS_VERSION is not explicitly specified. If a version is
-  passed in that doesn't match a value in versions python will throw a error.
-  """
+    Setup is based off the GYP_MSVS_VERSION environment variable or whatever is
+    autodetected if GYP_MSVS_VERSION is not explicitly specified. If a version is
+    passed in that doesn't match a value in versions python will throw a error.
+    """
     if path:
         path = os.path.normpath(path)
     versions = {
@@ -435,22 +436,22 @@ def _ConvertToCygpath(path):
 def _DetectVisualStudioVersions(versions_to_check, force_express):
     """Collect the list of installed visual studio versions.
 
-  Returns:
-    A list of visual studio versions installed in descending order of
-    usage preference.
-    Base this on the registry and a quick check if devenv.exe exists.
-    Possibilities are:
-      2005(e) - Visual Studio 2005 (8)
-      2008(e) - Visual Studio 2008 (9)
-      2010(e) - Visual Studio 2010 (10)
-      2012(e) - Visual Studio 2012 (11)
-      2013(e) - Visual Studio 2013 (12)
-      2015    - Visual Studio 2015 (14)
-      2017    - Visual Studio 2017 (15)
-      2019    - Visual Studio 2019 (16)
-      2022    - Visual Studio 2022 (17)
-    Where (e) is e for express editions of MSVS and blank otherwise.
-  """
+    Returns:
+      A list of visual studio versions installed in descending order of
+      usage preference.
+      Base this on the registry and a quick check if devenv.exe exists.
+      Possibilities are:
+        2005(e) - Visual Studio 2005 (8)
+        2008(e) - Visual Studio 2008 (9)
+        2010(e) - Visual Studio 2010 (10)
+        2012(e) - Visual Studio 2012 (11)
+        2013(e) - Visual Studio 2013 (12)
+        2015    - Visual Studio 2015 (14)
+        2017    - Visual Studio 2017 (15)
+        2019    - Visual Studio 2019 (16)
+        2022    - Visual Studio 2022 (17)
+      Where (e) is e for express editions of MSVS and blank otherwise.
+    """
     version_to_year = {
         "8.0": "2005",
         "9.0": "2008",
@@ -527,11 +528,11 @@ def _DetectVisualStudioVersions(versions_to_check, force_express):
 def SelectVisualStudioVersion(version="auto", allow_fallback=True):
     """Select which version of Visual Studio projects to generate.
 
-  Arguments:
-    version: Hook to allow caller to force a particular version (vs auto).
-  Returns:
-    An object representing a visual studio project format version.
-  """
+    Arguments:
+      version: Hook to allow caller to force a particular version (vs auto).
+    Returns:
+      An object representing a visual studio project format version.
+    """
     # In auto mode, check environment variable for override.
     if version == "auto":
         version = os.environ.get("GYP_MSVS_VERSION", "auto")
@@ -552,8 +553,7 @@ def SelectVisualStudioVersion(version="auto", allow_fallback=True):
         "2019": ("16.0",),
         "2022": ("17.0",),
     }
-    override_path = os.environ.get("GYP_MSVS_OVERRIDE_PATH")
-    if override_path:
+    if override_path := os.environ.get("GYP_MSVS_OVERRIDE_PATH"):
         msvs_version = os.environ.get("GYP_MSVS_VERSION")
         if not msvs_version:
             raise ValueError(
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/__init__.py b/node_modules/node-gyp/gyp/pylib/gyp/__init__.py
index 77800661a48c0..3a70cf076c8b4 100755
--- a/node_modules/node-gyp/gyp/pylib/gyp/__init__.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/__init__.py
@@ -25,19 +25,21 @@
 DEBUG_VARIABLES = "variables"
 DEBUG_INCLUDES = "includes"
 
+
 def EscapeForCString(string: bytes | str) -> str:
     if isinstance(string, str):
-        string = string.encode(encoding='utf8')
+        string = string.encode(encoding="utf8")
 
-    backslash_or_double_quote = {ord('\\'), ord('"')}
-    result = ''
+    backslash_or_double_quote = {ord("\\"), ord('"')}
+    result = ""
     for char in string:
         if char in backslash_or_double_quote or not 32 <= char < 127:
-            result += '\\%03o' % char
+            result += "\\%03o" % char
         else:
             result += chr(char)
     return result
 
+
 def DebugOutput(mode, message, *args):
     if "all" in gyp.debug or mode in gyp.debug:
         ctx = ("unknown", 0, "unknown")
@@ -76,11 +78,11 @@ def Load(
     circular_check=True,
 ):
     """
-  Loads one or more specified build files.
-  default_variables and includes will be copied before use.
-  Returns the generator for the specified format and the
-  data returned by loading the specified build files.
-  """
+    Loads one or more specified build files.
+    default_variables and includes will be copied before use.
+    Returns the generator for the specified format and the
+    data returned by loading the specified build files.
+    """
     if params is None:
         params = {}
 
@@ -114,7 +116,7 @@ def Load(
     # These parameters are passed in order (as opposed to by key)
     # because ActivePython cannot handle key parameters to __import__.
     generator = __import__(generator_name, globals(), locals(), generator_name)
-    for (key, val) in generator.generator_default_variables.items():
+    for key, val in generator.generator_default_variables.items():
         default_variables.setdefault(key, val)
 
     output_dir = params["options"].generator_output or params["options"].toplevel_dir
@@ -184,10 +186,10 @@ def Load(
 
 def NameValueListToDict(name_value_list):
     """
-  Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary
-  of the pairs.  If a string is simply NAME, then the value in the dictionary
-  is set to True.  If VALUE can be converted to an integer, it is.
-  """
+    Takes an array of strings of the form 'NAME=VALUE' and creates a dictionary
+    of the pairs.  If a string is simply NAME, then the value in the dictionary
+    is set to True.  If VALUE can be converted to an integer, it is.
+    """
     result = {}
     for item in name_value_list:
         tokens = item.split("=", 1)
@@ -220,13 +222,13 @@ def FormatOpt(opt, value):
 def RegenerateAppendFlag(flag, values, predicate, env_name, options):
     """Regenerate a list of command line flags, for an option of action='append'.
 
-  The |env_name|, if given, is checked in the environment and used to generate
-  an initial list of options, then the options that were specified on the
-  command line (given in |values|) are appended.  This matches the handling of
-  environment variables and command line flags where command line flags override
-  the environment, while not requiring the environment to be set when the flags
-  are used again.
-  """
+    The |env_name|, if given, is checked in the environment and used to generate
+    an initial list of options, then the options that were specified on the
+    command line (given in |values|) are appended.  This matches the handling of
+    environment variables and command line flags where command line flags override
+    the environment, while not requiring the environment to be set when the flags
+    are used again.
+    """
     flags = []
     if options.use_environment and env_name:
         for flag_value in ShlexEnv(env_name):
@@ -242,14 +244,14 @@ def RegenerateAppendFlag(flag, values, predicate, env_name, options):
 
 def RegenerateFlags(options):
     """Given a parsed options object, and taking the environment variables into
-  account, returns a list of flags that should regenerate an equivalent options
-  object (even in the absence of the environment variables.)
+    account, returns a list of flags that should regenerate an equivalent options
+    object (even in the absence of the environment variables.)
 
-  Any path options will be normalized relative to depth.
+    Any path options will be normalized relative to depth.
 
-  The format flag is not included, as it is assumed the calling generator will
-  set that as appropriate.
-  """
+    The format flag is not included, as it is assumed the calling generator will
+    set that as appropriate.
+    """
 
     def FixPath(path):
         path = gyp.common.FixIfRelativePath(path, options.depth)
@@ -307,15 +309,15 @@ def __init__(self, usage):
     def add_argument(self, *args, **kw):
         """Add an option to the parser.
 
-    This accepts the same arguments as ArgumentParser.add_argument, plus the
-    following:
-      regenerate: can be set to False to prevent this option from being included
-                  in regeneration.
-      env_name: name of environment variable that additional values for this
-                option come from.
-      type: adds type='path', to tell the regenerator that the values of
-            this option need to be made relative to options.depth
-    """
+        This accepts the same arguments as ArgumentParser.add_argument, plus the
+        following:
+          regenerate: can be set to False to prevent this option from being included
+                      in regeneration.
+          env_name: name of environment variable that additional values for this
+                    option come from.
+          type: adds type='path', to tell the regenerator that the values of
+                this option need to be made relative to options.depth
+        """
         env_name = kw.pop("env_name", None)
         if "dest" in kw and kw.pop("regenerate", True):
             dest = kw["dest"]
@@ -343,7 +345,7 @@ def parse_args(self, *args):
 
 def gyp_main(args):
     my_name = os.path.basename(sys.argv[0])
-    usage = "usage: %(prog)s [options ...] [build_file ...]"
+    usage = "%(prog)s [options ...] [build_file ...]"
 
     parser = RegeneratableOptionParser(usage=usage.replace("%s", "%(prog)s"))
     parser.add_argument(
@@ -489,7 +491,8 @@ def gyp_main(args):
 
     options, build_files_arg = parser.parse_args(args)
     if options.version:
-        import pkg_resources
+        import pkg_resources  # noqa: PLC0415
+
         print(f"v{pkg_resources.get_distribution('gyp-next').version}")
         return 0
     build_files = build_files_arg
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/common.py b/node_modules/node-gyp/gyp/pylib/gyp/common.py
index fbf1024fc3831..223ce47b0032f 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/common.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/common.py
@@ -31,9 +31,8 @@ def __call__(self, *args):
 
 class GypError(Exception):
     """Error class representing an error, which is to be presented
-  to the user.  The main entry point will catch and display this.
-  """
-
+    to the user.  The main entry point will catch and display this.
+    """
 
 
 def ExceptionAppend(e, msg):
@@ -48,9 +47,9 @@ def ExceptionAppend(e, msg):
 
 def FindQualifiedTargets(target, qualified_list):
     """
-  Given a list of qualified targets, return the qualified targets for the
-  specified |target|.
-  """
+    Given a list of qualified targets, return the qualified targets for the
+    specified |target|.
+    """
     return [t for t in qualified_list if ParseQualifiedTarget(t)[1] == target]
 
 
@@ -115,7 +114,7 @@ def BuildFile(fully_qualified_target):
 
 def GetEnvironFallback(var_list, default):
     """Look up a key in the environment, with fallback to secondary keys
-  and finally falling back to a default value."""
+    and finally falling back to a default value."""
     for var in var_list:
         if var in os.environ:
             return os.environ[var]
@@ -178,11 +177,11 @@ def RelativePath(path, relative_to, follow_path_symlink=True):
 @memoize
 def InvertRelativePath(path, toplevel_dir=None):
     """Given a path like foo/bar that is relative to toplevel_dir, return
-  the inverse relative path back to the toplevel_dir.
+    the inverse relative path back to the toplevel_dir.
 
-  E.g. os.path.normpath(os.path.join(path, InvertRelativePath(path)))
-  should always produce the empty string, unless the path contains symlinks.
-  """
+    E.g. os.path.normpath(os.path.join(path, InvertRelativePath(path)))
+    should always produce the empty string, unless the path contains symlinks.
+    """
     if not path:
         return path
     toplevel_dir = "." if toplevel_dir is None else toplevel_dir
@@ -262,12 +261,12 @@ def UnrelativePath(path, relative_to):
 def EncodePOSIXShellArgument(argument):
     """Encodes |argument| suitably for consumption by POSIX shells.
 
-  argument may be quoted and escaped as necessary to ensure that POSIX shells
-  treat the returned value as a literal representing the argument passed to
-  this function.  Parameter (variable) expansions beginning with $ are allowed
-  to remain intact without escaping the $, to allow the argument to contain
-  references to variables to be expanded by the shell.
-  """
+    argument may be quoted and escaped as necessary to ensure that POSIX shells
+    treat the returned value as a literal representing the argument passed to
+    this function.  Parameter (variable) expansions beginning with $ are allowed
+    to remain intact without escaping the $, to allow the argument to contain
+    references to variables to be expanded by the shell.
+    """
 
     if not isinstance(argument, str):
         argument = str(argument)
@@ -282,9 +281,9 @@ def EncodePOSIXShellArgument(argument):
 def EncodePOSIXShellList(list):
     """Encodes |list| suitably for consumption by POSIX shells.
 
-  Returns EncodePOSIXShellArgument for each item in list, and joins them
-  together using the space character as an argument separator.
-  """
+    Returns EncodePOSIXShellArgument for each item in list, and joins them
+    together using the space character as an argument separator.
+    """
 
     encoded_arguments = []
     for argument in list:
@@ -312,14 +311,12 @@ def DeepDependencyTargets(target_dicts, roots):
 
 
 def BuildFileTargets(target_list, build_file):
-    """From a target_list, returns the subset from the specified build_file.
-  """
+    """From a target_list, returns the subset from the specified build_file."""
     return [p for p in target_list if BuildFile(p) == build_file]
 
 
 def AllTargets(target_list, target_dicts, build_file):
-    """Returns all targets (direct and dependencies) for the specified build_file.
-  """
+    """Returns all targets (direct and dependencies) for the specified build_file."""
     bftargets = BuildFileTargets(target_list, build_file)
     deptargets = DeepDependencyTargets(target_dicts, bftargets)
     return bftargets + deptargets
@@ -328,12 +325,12 @@ def AllTargets(target_list, target_dicts, build_file):
 def WriteOnDiff(filename):
     """Write to a file only if the new contents differ.
 
-  Arguments:
-    filename: name of the file to potentially write to.
-  Returns:
-    A file like object which will write to temporary file and only overwrite
-    the target if it differs (on close).
-  """
+    Arguments:
+      filename: name of the file to potentially write to.
+    Returns:
+      A file like object which will write to temporary file and only overwrite
+      the target if it differs (on close).
+    """
 
     class Writer:
         """Wrapper around file which only covers the target if it differs."""
@@ -421,8 +418,10 @@ def EnsureDirExists(path):
     except OSError:
         pass
 
-def GetCrossCompilerPredefines():  # -> dict
+
+def GetCompilerPredefines():  # -> dict
     cmd = []
+    defines = {}
 
     # shlex.split() will eat '\' in posix mode, but
     # setting posix=False will preserve extra '"' cause CreateProcess fail on Windows
@@ -439,7 +438,7 @@ def replace_sep(s):
         if CXXFLAGS := os.environ.get("CXXFLAGS"):
             cmd += shlex.split(replace_sep(CXXFLAGS))
     else:
-        return {}
+        return defines
 
     if sys.platform == "win32":
         fd, input = tempfile.mkstemp(suffix=".c")
@@ -447,20 +446,34 @@ def replace_sep(s):
         try:
             os.close(fd)
             stdout = subprocess.run(
-                real_cmd, shell=True,
-                capture_output=True, check=True
+                real_cmd, shell=True, capture_output=True, check=True
             ).stdout
+        except subprocess.CalledProcessError as e:
+            print(
+                "Warning: failed to get compiler predefines\n"
+                "cmd: %s\n"
+                "status: %d" % (e.cmd, e.returncode),
+                file=sys.stderr,
+            )
+            return defines
         finally:
             os.unlink(input)
     else:
         input = "/dev/null"
         real_cmd = [*cmd, "-dM", "-E", "-x", "c", input]
-        stdout = subprocess.run(
-            real_cmd, shell=False,
-            capture_output=True, check=True
-        ).stdout
+        try:
+            stdout = subprocess.run(
+                real_cmd, shell=False, capture_output=True, check=True
+            ).stdout
+        except subprocess.CalledProcessError as e:
+            print(
+                "Warning: failed to get compiler predefines\n"
+                "cmd: %s\n"
+                "status: %d" % (e.cmd, e.returncode),
+                file=sys.stderr,
+            )
+            return defines
 
-    defines = {}
     lines = stdout.decode("utf-8").replace("\r\n", "\n").split("\n")
     for line in lines:
         if (line or "").startswith("#define "):
@@ -468,6 +481,7 @@ def replace_sep(s):
             defines[key] = " ".join(value)
     return defines
 
+
 def GetFlavorByPlatform():
     """Returns |params.flavor| if it's set, the system's default flavor else."""
     flavors = {
@@ -495,11 +509,12 @@ def GetFlavorByPlatform():
 
     return "linux"
 
+
 def GetFlavor(params):
     if "flavor" in params:
         return params["flavor"]
 
-    defines = GetCrossCompilerPredefines()
+    defines = GetCompilerPredefines()
     if "__EMSCRIPTEN__" in defines:
         return "emscripten"
     if "__wasm__" in defines:
@@ -510,7 +525,7 @@ def GetFlavor(params):
 
 def CopyTool(flavor, out_path, generator_flags={}):
     """Finds (flock|mac|win)_tool.gyp in the gyp directory and copies it
-  to |out_path|."""
+    to |out_path|."""
     # aix and solaris just need flock emulation. mac and win use more complicated
     # support scripts.
     prefix = {
@@ -566,7 +581,8 @@ def uniquer(seq, idfun=lambda x: x):
 
 
 # Based on http://code.activestate.com/recipes/576694/.
-class OrderedSet(MutableSet):
+class OrderedSet(MutableSet):  # noqa: PLW1641
+    # TODO (cclauss): Fix eq-without-hash ruff rule PLW1641
     def __init__(self, iterable=None):
         self.end = end = []
         end += [None, end, end]  # sentinel node for doubly linked list
@@ -644,24 +660,24 @@ def __str__(self):
 def TopologicallySorted(graph, get_edges):
     r"""Topologically sort based on a user provided edge definition.
 
-  Args:
-    graph: A list of node names.
-    get_edges: A function mapping from node name to a hashable collection
-               of node names which this node has outgoing edges to.
-  Returns:
-    A list containing all of the node in graph in topological order.
-    It is assumed that calling get_edges once for each node and caching is
-    cheaper than repeatedly calling get_edges.
-  Raises:
-    CycleError in the event of a cycle.
-  Example:
-    graph = {'a': '$(b) $(c)', 'b': 'hi', 'c': '$(b)'}
-    def GetEdges(node):
-      return re.findall(r'\$\(([^))]\)', graph[node])
-    print TopologicallySorted(graph.keys(), GetEdges)
-    ==>
-    ['a', 'c', b']
-  """
+    Args:
+      graph: A list of node names.
+      get_edges: A function mapping from node name to a hashable collection
+                 of node names which this node has outgoing edges to.
+    Returns:
+      A list containing all of the node in graph in topological order.
+      It is assumed that calling get_edges once for each node and caching is
+      cheaper than repeatedly calling get_edges.
+    Raises:
+      CycleError in the event of a cycle.
+    Example:
+      graph = {'a': '$(b) $(c)', 'b': 'hi', 'c': '$(b)'}
+      def GetEdges(node):
+        return re.findall(r'\$\(([^))]\)', graph[node])
+      print TopologicallySorted(graph.keys(), GetEdges)
+      ==>
+      ['a', 'c', b']
+    """
     get_edges = memoize(get_edges)
     visited = set()
     visiting = set()
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/common_test.py b/node_modules/node-gyp/gyp/pylib/gyp/common_test.py
index bd7172afaf369..b5988816c04a2 100755
--- a/node_modules/node-gyp/gyp/pylib/gyp/common_test.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/common_test.py
@@ -7,6 +7,7 @@
 """Unit tests for the common.py file."""
 
 import os
+import subprocess
 import sys
 import unittest
 from unittest.mock import MagicMock, patch
@@ -27,8 +28,12 @@ def test_Valid(self):
         def GetEdge(node):
             return tuple(graph[node])
 
-        assert gyp.common.TopologicallySorted(
-            graph.keys(), GetEdge) == ["a", "c", "d", "b"]
+        assert gyp.common.TopologicallySorted(graph.keys(), GetEdge) == [
+            "a",
+            "c",
+            "d",
+            "b",
+        ]
 
     def test_Cycle(self):
         """Test that an exception is thrown on a cyclic graph."""
@@ -85,89 +90,97 @@ def decode(self, encoding):
     @patch("os.close")
     @patch("os.unlink")
     @patch("tempfile.mkstemp")
-    def test_GetCrossCompilerPredefines(self, mock_mkstemp, mock_unlink, mock_close):
+    def test_GetCompilerPredefines(self, mock_mkstemp, mock_unlink, mock_close):
         mock_close.return_value = None
         mock_unlink.return_value = None
         mock_mkstemp.return_value = (0, "temp.c")
 
-        def mock_run(env, defines_stdout, expected_cmd):
+        def mock_run(env, defines_stdout, expected_cmd, throws=False):
             with patch("subprocess.run") as mock_run:
-                mock_process = MagicMock()
-                mock_process.returncode = 0
-                mock_process.stdout = TestGetFlavor.MockCommunicate(defines_stdout)
-                mock_run.return_value = mock_process
                 expected_input = "temp.c" if sys.platform == "win32" else "/dev/null"
+                if throws:
+                    mock_run.side_effect = subprocess.CalledProcessError(
+                        returncode=1,
+                        cmd=[*expected_cmd, "-dM", "-E", "-x", "c", expected_input],
+                    )
+                else:
+                    mock_process = MagicMock()
+                    mock_process.returncode = 0
+                    mock_process.stdout = TestGetFlavor.MockCommunicate(defines_stdout)
+                    mock_run.return_value = mock_process
                 with patch.dict(os.environ, env):
-                    defines = gyp.common.GetCrossCompilerPredefines()
+                    try:
+                        defines = gyp.common.GetCompilerPredefines()
+                    except Exception as e:
+                        self.fail(f"GetCompilerPredefines raised an exception: {e}")
                     flavor = gyp.common.GetFlavor({})
-                if env.get("CC_target"):
+                if env.get("CC_target") or env.get("CC"):
                     mock_run.assert_called_with(
-                        [
-                            *expected_cmd,
-                            "-dM", "-E", "-x", "c", expected_input
-                        ],
+                        [*expected_cmd, "-dM", "-E", "-x", "c", expected_input],
                         shell=sys.platform == "win32",
-                        capture_output=True, check=True)
+                        capture_output=True,
+                        check=True,
+                    )
                 return [defines, flavor]
 
+        [defines0, _] = mock_run({"CC": "cl.exe"}, "", ["cl.exe"], True)
+        assert defines0 == {}
+
         [defines1, _] = mock_run({}, "", [])
         assert defines1 == {}
 
         [defines2, flavor2] = mock_run(
-            { "CC_target": "/opt/wasi-sdk/bin/clang" },
+            {"CC_target": "/opt/wasi-sdk/bin/clang"},
             "#define __wasm__ 1\n#define __wasi__ 1\n",
-            ["/opt/wasi-sdk/bin/clang"]
+            ["/opt/wasi-sdk/bin/clang"],
         )
-        assert defines2 == { "__wasm__": "1", "__wasi__": "1" }
+        assert defines2 == {"__wasm__": "1", "__wasi__": "1"}
         assert flavor2 == "wasi"
 
         [defines3, flavor3] = mock_run(
-            { "CC_target": "/opt/wasi-sdk/bin/clang --target=wasm32" },
+            {"CC_target": "/opt/wasi-sdk/bin/clang --target=wasm32"},
             "#define __wasm__ 1\n",
-            ["/opt/wasi-sdk/bin/clang", "--target=wasm32"]
+            ["/opt/wasi-sdk/bin/clang", "--target=wasm32"],
         )
-        assert defines3 == { "__wasm__": "1" }
+        assert defines3 == {"__wasm__": "1"}
         assert flavor3 == "wasm"
 
         [defines4, flavor4] = mock_run(
-            { "CC_target": "/emsdk/upstream/emscripten/emcc" },
+            {"CC_target": "/emsdk/upstream/emscripten/emcc"},
             "#define __EMSCRIPTEN__ 1\n",
-            ["/emsdk/upstream/emscripten/emcc"]
+            ["/emsdk/upstream/emscripten/emcc"],
         )
-        assert defines4 == { "__EMSCRIPTEN__": "1" }
+        assert defines4 == {"__EMSCRIPTEN__": "1"}
         assert flavor4 == "emscripten"
 
         # Test path which include white space
         [defines5, flavor5] = mock_run(
             {
-                "CC_target": "\"/Users/Toyo Li/wasi-sdk/bin/clang\" -O3",
-                "CFLAGS": "--target=wasm32-wasi-threads -pthread"
+                "CC_target": '"/Users/Toyo Li/wasi-sdk/bin/clang" -O3',
+                "CFLAGS": "--target=wasm32-wasi-threads -pthread",
             },
             "#define __wasm__ 1\n#define __wasi__ 1\n#define _REENTRANT 1\n",
             [
                 "/Users/Toyo Li/wasi-sdk/bin/clang",
                 "-O3",
                 "--target=wasm32-wasi-threads",
-                "-pthread"
-            ]
+                "-pthread",
+            ],
         )
-        assert defines5 == {
-            "__wasm__": "1",
-            "__wasi__": "1",
-            "_REENTRANT": "1"
-        }
+        assert defines5 == {"__wasm__": "1", "__wasi__": "1", "_REENTRANT": "1"}
         assert flavor5 == "wasi"
 
         original_sep = os.sep
         os.sep = "\\"
         [defines6, flavor6] = mock_run(
-            { "CC_target": "\"C:\\Program Files\\wasi-sdk\\clang.exe\"" },
+            {"CC_target": '"C:\\Program Files\\wasi-sdk\\clang.exe"'},
             "#define __wasm__ 1\n#define __wasi__ 1\n",
-            ["C:/Program Files/wasi-sdk/clang.exe"]
+            ["C:/Program Files/wasi-sdk/clang.exe"],
         )
         os.sep = original_sep
-        assert defines6 == { "__wasm__": "1", "__wasi__": "1" }
+        assert defines6 == {"__wasm__": "1", "__wasi__": "1"}
         assert flavor6 == "wasi"
 
+
 if __name__ == "__main__":
     unittest.main()
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py b/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py
index e4d2f82b68741..a5d95153eca72 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/easy_xml.py
@@ -10,43 +10,43 @@
 
 
 def XmlToString(content, encoding="utf-8", pretty=False):
-    """ Writes the XML content to disk, touching the file only if it has changed.
-
-  Visual Studio files have a lot of pre-defined structures.  This function makes
-  it easy to represent these structures as Python data structures, instead of
-  having to create a lot of function calls.
-
-  Each XML element of the content is represented as a list composed of:
-  1. The name of the element, a string,
-  2. The attributes of the element, a dictionary (optional), and
-  3+. The content of the element, if any.  Strings are simple text nodes and
-      lists are child elements.
-
-  Example 1:
-      
-  becomes
-      ['test']
-
-  Example 2:
-      
-         This is
-         it!
-      
-
-  becomes
-      ['myelement', {'a':'value1', 'b':'value2'},
-         ['childtype', 'This is'],
-         ['childtype', 'it!'],
-      ]
-
-  Args:
-    content:  The structured content to be converted.
-    encoding: The encoding to report on the first XML line.
-    pretty: True if we want pretty printing with indents and new lines.
-
-  Returns:
-    The XML content as a string.
-  """
+    """Writes the XML content to disk, touching the file only if it has changed.
+
+    Visual Studio files have a lot of pre-defined structures.  This function makes
+    it easy to represent these structures as Python data structures, instead of
+    having to create a lot of function calls.
+
+    Each XML element of the content is represented as a list composed of:
+    1. The name of the element, a string,
+    2. The attributes of the element, a dictionary (optional), and
+    3+. The content of the element, if any.  Strings are simple text nodes and
+        lists are child elements.
+
+    Example 1:
+        
+    becomes
+        ['test']
+
+    Example 2:
+        
+           This is
+           it!
+        
+
+    becomes
+        ['myelement', {'a':'value1', 'b':'value2'},
+           ['childtype', 'This is'],
+           ['childtype', 'it!'],
+        ]
+
+    Args:
+      content:  The structured content to be converted.
+      encoding: The encoding to report on the first XML line.
+      pretty: True if we want pretty printing with indents and new lines.
+
+    Returns:
+      The XML content as a string.
+    """
     # We create a huge list of all the elements of the file.
     xml_parts = ['' % encoding]
     if pretty:
@@ -58,14 +58,14 @@ def XmlToString(content, encoding="utf-8", pretty=False):
 
 
 def _ConstructContentList(xml_parts, specification, pretty, level=0):
-    """ Appends the XML parts corresponding to the specification.
-
-  Args:
-    xml_parts: A list of XML parts to be appended to.
-    specification:  The specification of the element.  See EasyXml docs.
-    pretty: True if we want pretty printing with indents and new lines.
-    level: Indentation level.
-  """
+    """Appends the XML parts corresponding to the specification.
+
+    Args:
+      xml_parts: A list of XML parts to be appended to.
+      specification:  The specification of the element.  See EasyXml docs.
+      pretty: True if we want pretty printing with indents and new lines.
+      level: Indentation level.
+    """
     # The first item in a specification is the name of the element.
     if pretty:
         indentation = "  " * level
@@ -107,16 +107,17 @@ def _ConstructContentList(xml_parts, specification, pretty, level=0):
         xml_parts.append("/>%s" % new_line)
 
 
-def WriteXmlIfChanged(content, path, encoding="utf-8", pretty=False,
-                      win32=(sys.platform == "win32")):
-    """ Writes the XML content to disk, touching the file only if it has changed.
+def WriteXmlIfChanged(
+    content, path, encoding="utf-8", pretty=False, win32=(sys.platform == "win32")
+):
+    """Writes the XML content to disk, touching the file only if it has changed.
 
-  Args:
-    content:  The structured content to be written.
-    path: Location of the file.
-    encoding: The encoding to report on the first line of the XML file.
-    pretty: True if we want pretty printing with indents and new lines.
-  """
+    Args:
+      content:  The structured content to be written.
+      path: Location of the file.
+      encoding: The encoding to report on the first line of the XML file.
+      pretty: True if we want pretty printing with indents and new lines.
+    """
     xml_string = XmlToString(content, encoding, pretty)
     if win32 and os.linesep != "\r\n":
         xml_string = xml_string.replace("\n", "\r\n")
@@ -157,7 +158,7 @@ def WriteXmlIfChanged(content, path, encoding="utf-8", pretty=False,
 
 
 def _XmlEscape(value, attr=False):
-    """ Escape a string for inclusion in XML."""
+    """Escape a string for inclusion in XML."""
 
     def replace(match):
         m = match.string[match.start() : match.end()]
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py b/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py
index bb97b802c5955..29f5dad5a6e90 100755
--- a/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py
@@ -4,7 +4,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-""" Unit tests for the easy_xml.py file. """
+"""Unit tests for the easy_xml.py file."""
 
 import unittest
 from io import StringIO
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py
index cb18742cd8df6..420c4e49ebc19 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/analyzer.py
@@ -62,7 +62,6 @@
 then the "all" target includes "b1" and "b2".
 """
 
-
 import json
 import os
 import posixpath
@@ -130,8 +129,8 @@ def _ToGypPath(path):
 
 def _ResolveParent(path, base_path_components):
     """Resolves |path|, which starts with at least one '../'. Returns an empty
-  string if the path shouldn't be considered. See _AddSources() for a
-  description of |base_path_components|."""
+    string if the path shouldn't be considered. See _AddSources() for a
+    description of |base_path_components|."""
     depth = 0
     while path.startswith("../"):
         depth += 1
@@ -151,11 +150,11 @@ def _ResolveParent(path, base_path_components):
 
 def _AddSources(sources, base_path, base_path_components, result):
     """Extracts valid sources from |sources| and adds them to |result|. Each
-  source file is relative to |base_path|, but may contain '..'. To make
-  resolving '..' easier |base_path_components| contains each of the
-  directories in |base_path|. Additionally each source may contain variables.
-  Such sources are ignored as it is assumed dependencies on them are expressed
-  and tracked in some other means."""
+    source file is relative to |base_path|, but may contain '..'. To make
+    resolving '..' easier |base_path_components| contains each of the
+    directories in |base_path|. Additionally each source may contain variables.
+    Such sources are ignored as it is assumed dependencies on them are expressed
+    and tracked in some other means."""
     # NOTE: gyp paths are always posix style.
     for source in sources:
         if not len(source) or source.startswith(("!!!", "$")):
@@ -218,23 +217,23 @@ def _ExtractSources(target, target_dict, toplevel_dir):
 
 class Target:
     """Holds information about a particular target:
-  deps: set of Targets this Target depends upon. This is not recursive, only the
-    direct dependent Targets.
-  match_status: one of the MatchStatus values.
-  back_deps: set of Targets that have a dependency on this Target.
-  visited: used during iteration to indicate whether we've visited this target.
-    This is used for two iterations, once in building the set of Targets and
-    again in _GetBuildTargets().
-  name: fully qualified name of the target.
-  requires_build: True if the target type is such that it needs to be built.
-    See _DoesTargetTypeRequireBuild for details.
-  added_to_compile_targets: used when determining if the target was added to the
-    set of targets that needs to be built.
-  in_roots: true if this target is a descendant of one of the root nodes.
-  is_executable: true if the type of target is executable.
-  is_static_library: true if the type of target is static_library.
-  is_or_has_linked_ancestor: true if the target does a link (eg executable), or
-    if there is a target in back_deps that does a link."""
+    deps: set of Targets this Target depends upon. This is not recursive, only the
+      direct dependent Targets.
+    match_status: one of the MatchStatus values.
+    back_deps: set of Targets that have a dependency on this Target.
+    visited: used during iteration to indicate whether we've visited this target.
+      This is used for two iterations, once in building the set of Targets and
+      again in _GetBuildTargets().
+    name: fully qualified name of the target.
+    requires_build: True if the target type is such that it needs to be built.
+      See _DoesTargetTypeRequireBuild for details.
+    added_to_compile_targets: used when determining if the target was added to the
+      set of targets that needs to be built.
+    in_roots: true if this target is a descendant of one of the root nodes.
+    is_executable: true if the type of target is executable.
+    is_static_library: true if the type of target is static_library.
+    is_or_has_linked_ancestor: true if the target does a link (eg executable), or
+      if there is a target in back_deps that does a link."""
 
     def __init__(self, name):
         self.deps = set()
@@ -254,8 +253,8 @@ def __init__(self, name):
 
 class Config:
     """Details what we're looking for
-  files: set of files to search for
-  targets: see file description for details."""
+    files: set of files to search for
+    targets: see file description for details."""
 
     def __init__(self):
         self.files = []
@@ -265,7 +264,7 @@ def __init__(self):
 
     def Init(self, params):
         """Initializes Config. This is a separate method as it raises an exception
-    if there is a parse error."""
+        if there is a parse error."""
         generator_flags = params.get("generator_flags", {})
         config_path = generator_flags.get("config_path", None)
         if not config_path:
@@ -289,8 +288,8 @@ def Init(self, params):
 
 def _WasBuildFileModified(build_file, data, files, toplevel_dir):
     """Returns true if the build file |build_file| is either in |files| or
-  one of the files included by |build_file| is in |files|. |toplevel_dir| is
-  the root of the source tree."""
+    one of the files included by |build_file| is in |files|. |toplevel_dir| is
+    the root of the source tree."""
     if _ToLocalPath(toplevel_dir, _ToGypPath(build_file)) in files:
         if debug:
             print("gyp file modified", build_file)
@@ -319,8 +318,8 @@ def _WasBuildFileModified(build_file, data, files, toplevel_dir):
 
 def _GetOrCreateTargetByName(targets, target_name):
     """Creates or returns the Target at targets[target_name]. If there is no
-  Target for |target_name| one is created. Returns a tuple of whether a new
-  Target was created and the Target."""
+    Target for |target_name| one is created. Returns a tuple of whether a new
+    Target was created and the Target."""
     if target_name in targets:
         return False, targets[target_name]
     target = Target(target_name)
@@ -340,13 +339,13 @@ def _DoesTargetTypeRequireBuild(target_dict):
 
 def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files, build_files):
     """Returns a tuple of the following:
-  . A dictionary mapping from fully qualified name to Target.
-  . A list of the targets that have a source file in |files|.
-  . Targets that constitute the 'all' target. See description at top of file
-    for details on the 'all' target.
-  This sets the |match_status| of the targets that contain any of the source
-  files in |files| to MATCH_STATUS_MATCHES.
-  |toplevel_dir| is the root of the source tree."""
+    . A dictionary mapping from fully qualified name to Target.
+    . A list of the targets that have a source file in |files|.
+    . Targets that constitute the 'all' target. See description at top of file
+      for details on the 'all' target.
+    This sets the |match_status| of the targets that contain any of the source
+    files in |files| to MATCH_STATUS_MATCHES.
+    |toplevel_dir| is the root of the source tree."""
     # Maps from target name to Target.
     name_to_target = {}
 
@@ -379,9 +378,10 @@ def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files, build
         target_type = target_dicts[target_name]["type"]
         target.is_executable = target_type == "executable"
         target.is_static_library = target_type == "static_library"
-        target.is_or_has_linked_ancestor = (
-            target_type in {"executable", "shared_library"}
-        )
+        target.is_or_has_linked_ancestor = target_type in {
+            "executable",
+            "shared_library",
+        }
 
         build_file = gyp.common.ParseQualifiedTarget(target_name)[0]
         if build_file not in build_file_in_files:
@@ -427,9 +427,9 @@ def _GenerateTargets(data, target_list, target_dicts, toplevel_dir, files, build
 
 def _GetUnqualifiedToTargetMapping(all_targets, to_find):
     """Returns a tuple of the following:
-  . mapping (dictionary) from unqualified name to Target for all the
-    Targets in |to_find|.
-  . any target names not found. If this is empty all targets were found."""
+    . mapping (dictionary) from unqualified name to Target for all the
+      Targets in |to_find|.
+    . any target names not found. If this is empty all targets were found."""
     result = {}
     if not to_find:
         return {}, []
@@ -446,15 +446,15 @@ def _GetUnqualifiedToTargetMapping(all_targets, to_find):
 
 def _DoesTargetDependOnMatchingTargets(target):
     """Returns true if |target| or any of its dependencies is one of the
-  targets containing the files supplied as input to analyzer. This updates
-  |matches| of the Targets as it recurses.
-  target: the Target to look for."""
+    targets containing the files supplied as input to analyzer. This updates
+    |matches| of the Targets as it recurses.
+    target: the Target to look for."""
     if target.match_status == MATCH_STATUS_DOESNT_MATCH:
         return False
-    if (
-        target.match_status in {MATCH_STATUS_MATCHES,
-                                MATCH_STATUS_MATCHES_BY_DEPENDENCY}
-    ):
+    if target.match_status in {
+        MATCH_STATUS_MATCHES,
+        MATCH_STATUS_MATCHES_BY_DEPENDENCY,
+    }:
         return True
     for dep in target.deps:
         if _DoesTargetDependOnMatchingTargets(dep):
@@ -467,9 +467,9 @@ def _DoesTargetDependOnMatchingTargets(target):
 
 def _GetTargetsDependingOnMatchingTargets(possible_targets):
     """Returns the list of Targets in |possible_targets| that depend (either
-  directly on indirectly) on at least one of the targets containing the files
-  supplied as input to analyzer.
-  possible_targets: targets to search from."""
+    directly on indirectly) on at least one of the targets containing the files
+    supplied as input to analyzer.
+    possible_targets: targets to search from."""
     found = []
     print("Targets that matched by dependency:")
     for target in possible_targets:
@@ -480,11 +480,11 @@ def _GetTargetsDependingOnMatchingTargets(possible_targets):
 
 def _AddCompileTargets(target, roots, add_if_no_ancestor, result):
     """Recurses through all targets that depend on |target|, adding all targets
-  that need to be built (and are in |roots|) to |result|.
-  roots: set of root targets.
-  add_if_no_ancestor: If true and there are no ancestors of |target| then add
-  |target| to |result|. |target| must still be in |roots|.
-  result: targets that need to be built are added here."""
+    that need to be built (and are in |roots|) to |result|.
+    roots: set of root targets.
+    add_if_no_ancestor: If true and there are no ancestors of |target| then add
+    |target| to |result|. |target| must still be in |roots|.
+    result: targets that need to be built are added here."""
     if target.visited:
         return
 
@@ -537,8 +537,8 @@ def _AddCompileTargets(target, roots, add_if_no_ancestor, result):
 
 def _GetCompileTargets(matching_targets, supplied_targets):
     """Returns the set of Targets that require a build.
-  matching_targets: targets that changed and need to be built.
-  supplied_targets: set of targets supplied to analyzer to search from."""
+    matching_targets: targets that changed and need to be built.
+    supplied_targets: set of targets supplied to analyzer to search from."""
     result = set()
     for target in matching_targets:
         print("finding compile targets for match", target.name)
@@ -592,7 +592,7 @@ def _WriteOutput(params, **values):
 
 def _WasGypIncludeFileModified(params, files):
     """Returns true if one of the files in |files| is in the set of included
-  files."""
+    files."""
     if params["options"].includes:
         for include in params["options"].includes:
             if _ToGypPath(os.path.normpath(include)) in files:
@@ -608,7 +608,7 @@ def _NamesNotIn(names, mapping):
 
 def _LookupTargets(names, mapping):
     """Returns a list of the mapping[name] for each value in |names| that is in
-  |mapping|."""
+    |mapping|."""
     return [mapping[name] for name in names if name in mapping]
 
 
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py
index 5ebe58bb556d8..cfc0681f6bb04 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/android.py
@@ -177,9 +177,7 @@ def Write(
             self.WriteLn("LOCAL_IS_HOST_MODULE := true")
             self.WriteLn("LOCAL_MULTILIB := $(GYP_HOST_MULTILIB)")
         elif sdk_version > 0:
-            self.WriteLn(
-                "LOCAL_MODULE_TARGET_ARCH := $(TARGET_$(GYP_VAR_PREFIX)ARCH)"
-            )
+            self.WriteLn("LOCAL_MODULE_TARGET_ARCH := $(TARGET_$(GYP_VAR_PREFIX)ARCH)")
             self.WriteLn("LOCAL_SDK_VERSION := %s" % sdk_version)
 
         # Grab output directories; needed for Actions and Rules.
@@ -588,7 +586,8 @@ def WriteSources(self, spec, configs, extra_sources):
         local_files = []
         for source in sources:
             (root, ext) = os.path.splitext(source)
-            if ("$(gyp_shared_intermediate_dir)" in source
+            if (
+                "$(gyp_shared_intermediate_dir)" in source
                 or "$(gyp_intermediate_dir)" in source
                 or (IsCPPExtension(ext) and ext != local_cpp_extension)
             ):
@@ -734,8 +733,7 @@ def ComputeOutput(self, spec):
         elif self.toolset == "host":
             path = (
                 "$(call intermediates-dir-for,%s,%s,true,,"
-                "$(GYP_HOST_VAR_PREFIX))"
-                % (self.android_class, self.android_module)
+                "$(GYP_HOST_VAR_PREFIX))" % (self.android_class, self.android_module)
             )
         else:
             path = (
@@ -900,8 +898,7 @@ def WriteTarget(
         if self.type != "none":
             self.WriteTargetFlags(spec, configs, link_deps)
 
-        settings = spec.get("aosp_build_settings", {})
-        if settings:
+        if settings := spec.get("aosp_build_settings", {}):
             self.WriteLn("### Set directly by aosp_build_settings.")
             for k, v in settings.items():
                 if isinstance(v, list):
@@ -1002,9 +999,9 @@ def LocalPathify(self, path):
         # - i.e. that the resulting path is still inside the project tree. The
         # path may legitimately have ended up containing just $(LOCAL_PATH), though,
         # so we don't look for a slash.
-        assert local_path.startswith(
-            "$(LOCAL_PATH)"
-        ), f"Path {path} attempts to escape from gyp path {self.path} !)"
+        assert local_path.startswith("$(LOCAL_PATH)"), (
+            f"Path {path} attempts to escape from gyp path {self.path} !)"
+        )
         return local_path
 
     def ExpandInputRoot(self, template, expansion, dirname):
@@ -1046,9 +1043,9 @@ def CalculateMakefilePath(build_file, base_name):
         base_path = gyp.common.RelativePath(os.path.dirname(build_file), options.depth)
         # We write the file in the base_path directory.
         output_file = os.path.join(options.depth, base_path, base_name)
-        assert (
-            not options.generator_output
-        ), "The Android backend does not support options.generator_output."
+        assert not options.generator_output, (
+            "The Android backend does not support options.generator_output."
+        )
         base_path = gyp.common.RelativePath(
             os.path.dirname(build_file), options.toplevel_dir
         )
@@ -1068,9 +1065,9 @@ def CalculateMakefilePath(build_file, base_name):
 
     makefile_name = "GypAndroid" + options.suffix + ".mk"
     makefile_path = os.path.join(options.toplevel_dir, makefile_name)
-    assert (
-        not options.generator_output
-    ), "The Android backend does not support options.generator_output."
+    assert not options.generator_output, (
+        "The Android backend does not support options.generator_output."
+    )
     gyp.common.EnsureDirExists(makefile_path)
     root_makefile = open(makefile_path, "w")
 
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py
index e69103e1b9ba3..dc9ea39acb7fc 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/cmake.py
@@ -28,7 +28,6 @@
 CMakeLists.txt file.
 """
 
-
 import multiprocessing
 import os
 import signal
@@ -97,11 +96,11 @@ def Linkable(filename):
 def NormjoinPathForceCMakeSource(base_path, rel_path):
     """Resolves rel_path against base_path and returns the result.
 
-  If rel_path is an absolute path it is returned unchanged.
-  Otherwise it is resolved against base_path and normalized.
-  If the result is a relative path, it is forced to be relative to the
-  CMakeLists.txt.
-  """
+    If rel_path is an absolute path it is returned unchanged.
+    Otherwise it is resolved against base_path and normalized.
+    If the result is a relative path, it is forced to be relative to the
+    CMakeLists.txt.
+    """
     if os.path.isabs(rel_path):
         return rel_path
     if any(rel_path.startswith(var) for var in FULL_PATH_VARS):
@@ -114,10 +113,10 @@ def NormjoinPathForceCMakeSource(base_path, rel_path):
 
 def NormjoinPath(base_path, rel_path):
     """Resolves rel_path against base_path and returns the result.
-  TODO: what is this really used for?
-  If rel_path begins with '$' it is returned unchanged.
-  Otherwise it is resolved against base_path if relative, then normalized.
-  """
+    TODO: what is this really used for?
+    If rel_path begins with '$' it is returned unchanged.
+    Otherwise it is resolved against base_path if relative, then normalized.
+    """
     if rel_path.startswith("$") and not rel_path.startswith("${configuration}"):
         return rel_path
     return os.path.normpath(os.path.join(base_path, rel_path))
@@ -126,19 +125,19 @@ def NormjoinPath(base_path, rel_path):
 def CMakeStringEscape(a):
     """Escapes the string 'a' for use inside a CMake string.
 
-  This means escaping
-  '\' otherwise it may be seen as modifying the next character
-  '"' otherwise it will end the string
-  ';' otherwise the string becomes a list
+    This means escaping
+    '\' otherwise it may be seen as modifying the next character
+    '"' otherwise it will end the string
+    ';' otherwise the string becomes a list
 
-  The following do not need to be escaped
-  '#' when the lexer is in string state, this does not start a comment
+    The following do not need to be escaped
+    '#' when the lexer is in string state, this does not start a comment
 
-  The following are yet unknown
-  '$' generator variables (like ${obj}) must not be escaped,
-      but text $ should be escaped
-      what is wanted is to know which $ come from generator variables
-  """
+    The following are yet unknown
+    '$' generator variables (like ${obj}) must not be escaped,
+        but text $ should be escaped
+        what is wanted is to know which $ come from generator variables
+    """
     return a.replace("\\", "\\\\").replace(";", "\\;").replace('"', '\\"')
 
 
@@ -237,25 +236,25 @@ def __init__(self, command, modifier, property_modifier):
 def StringToCMakeTargetName(a):
     """Converts the given string 'a' to a valid CMake target name.
 
-  All invalid characters are replaced by '_'.
-  Invalid for cmake: ' ', '/', '(', ')', '"'
-  Invalid for make: ':'
-  Invalid for unknown reasons but cause failures: '.'
-  """
+    All invalid characters are replaced by '_'.
+    Invalid for cmake: ' ', '/', '(', ')', '"'
+    Invalid for make: ':'
+    Invalid for unknown reasons but cause failures: '.'
+    """
     return a.translate(_maketrans(' /():."', "_______"))
 
 
 def WriteActions(target_name, actions, extra_sources, extra_deps, path_to_gyp, output):
     """Write CMake for the 'actions' in the target.
 
-  Args:
-    target_name: the name of the CMake target being generated.
-    actions: the Gyp 'actions' dict for this target.
-    extra_sources: [(, )] to append with generated source files.
-    extra_deps: [] to append with generated targets.
-    path_to_gyp: relative path from CMakeLists.txt being generated to
-        the Gyp file in which the target being generated is defined.
-  """
+    Args:
+      target_name: the name of the CMake target being generated.
+      actions: the Gyp 'actions' dict for this target.
+      extra_sources: [(, )] to append with generated source files.
+      extra_deps: [] to append with generated targets.
+      path_to_gyp: relative path from CMakeLists.txt being generated to
+          the Gyp file in which the target being generated is defined.
+    """
     for action in actions:
         action_name = StringToCMakeTargetName(action["action_name"])
         action_target_name = f"{target_name}__{action_name}"
@@ -337,14 +336,14 @@ def NormjoinRulePathForceCMakeSource(base_path, rel_path, rule_source):
 def WriteRules(target_name, rules, extra_sources, extra_deps, path_to_gyp, output):
     """Write CMake for the 'rules' in the target.
 
-  Args:
-    target_name: the name of the CMake target being generated.
-    actions: the Gyp 'actions' dict for this target.
-    extra_sources: [(, )] to append with generated source files.
-    extra_deps: [] to append with generated targets.
-    path_to_gyp: relative path from CMakeLists.txt being generated to
-        the Gyp file in which the target being generated is defined.
-  """
+    Args:
+      target_name: the name of the CMake target being generated.
+      actions: the Gyp 'actions' dict for this target.
+      extra_sources: [(, )] to append with generated source files.
+      extra_deps: [] to append with generated targets.
+      path_to_gyp: relative path from CMakeLists.txt being generated to
+          the Gyp file in which the target being generated is defined.
+    """
     for rule in rules:
         rule_name = StringToCMakeTargetName(target_name + "__" + rule["rule_name"])
 
@@ -455,13 +454,13 @@ def WriteRules(target_name, rules, extra_sources, extra_deps, path_to_gyp, outpu
 def WriteCopies(target_name, copies, extra_deps, path_to_gyp, output):
     """Write CMake for the 'copies' in the target.
 
-  Args:
-    target_name: the name of the CMake target being generated.
-    actions: the Gyp 'actions' dict for this target.
-    extra_deps: [] to append with generated targets.
-    path_to_gyp: relative path from CMakeLists.txt being generated to
-        the Gyp file in which the target being generated is defined.
-  """
+    Args:
+      target_name: the name of the CMake target being generated.
+      actions: the Gyp 'actions' dict for this target.
+      extra_deps: [] to append with generated targets.
+      path_to_gyp: relative path from CMakeLists.txt being generated to
+          the Gyp file in which the target being generated is defined.
+    """
     copy_name = target_name + "__copies"
 
     # CMake gets upset with custom targets with OUTPUT which specify no output.
@@ -585,23 +584,23 @@ def CreateCMakeTargetFullName(qualified_target):
 class CMakeNamer:
     """Converts Gyp target names into CMake target names.
 
-  CMake requires that target names be globally unique. One way to ensure
-  this is to fully qualify the names of the targets. Unfortunately, this
-  ends up with all targets looking like "chrome_chrome_gyp_chrome" instead
-  of just "chrome". If this generator were only interested in building, it
-  would be possible to fully qualify all target names, then create
-  unqualified target names which depend on all qualified targets which
-  should have had that name. This is more or less what the 'make' generator
-  does with aliases. However, one goal of this generator is to create CMake
-  files for use with IDEs, and fully qualified names are not as user
-  friendly.
+    CMake requires that target names be globally unique. One way to ensure
+    this is to fully qualify the names of the targets. Unfortunately, this
+    ends up with all targets looking like "chrome_chrome_gyp_chrome" instead
+    of just "chrome". If this generator were only interested in building, it
+    would be possible to fully qualify all target names, then create
+    unqualified target names which depend on all qualified targets which
+    should have had that name. This is more or less what the 'make' generator
+    does with aliases. However, one goal of this generator is to create CMake
+    files for use with IDEs, and fully qualified names are not as user
+    friendly.
 
-  Since target name collision is rare, we do the above only when required.
+    Since target name collision is rare, we do the above only when required.
 
-  Toolset variants are always qualified from the base, as this is required for
-  building. However, it also makes sense for an IDE, as it is possible for
-  defines to be different.
-  """
+    Toolset variants are always qualified from the base, as this is required for
+    building. However, it also makes sense for an IDE, as it is possible for
+    defines to be different.
+    """
 
     def __init__(self, target_list):
         self.cmake_target_base_names_conflicting = set()
@@ -810,8 +809,7 @@ def WriteTarget(
     # link directories to targets defined after it is called.
     # As a result, link_directories must come before the target definition.
     # CMake unfortunately has no means of removing entries from LINK_DIRECTORIES.
-    library_dirs = config.get("library_dirs")
-    if library_dirs is not None:
+    if (library_dirs := config.get("library_dirs")) is not None:
         output.write("link_directories(")
         for library_dir in library_dirs:
             output.write(" ")
@@ -1295,8 +1293,7 @@ def CallGenerateOutputForConfig(arglist):
 
 
 def GenerateOutput(target_list, target_dicts, data, params):
-    user_config = params.get("generator_flags", {}).get("config", None)
-    if user_config:
+    if user_config := params.get("generator_flags", {}).get("config", None):
         GenerateOutputForConfig(target_list, target_dicts, data, params, user_config)
     else:
         config_names = target_dicts[target_list[0]]["configurations"]
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py
index e41c72d71070a..c919674024e69 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/dump_dependency_json.py
@@ -56,7 +56,7 @@ def CalculateVariables(default_variables, params):
 
 def CalculateGeneratorInputInfo(params):
     """Calculate the generator specific info that gets fed to input (called by
-  gyp)."""
+    gyp)."""
     generator_flags = params.get("generator_flags", {})
     if generator_flags.get("adjust_static_libraries", False):
         global generator_wants_static_library_dependencies_adjusted
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py
index ed6daa91bac3e..685cd08c964b9 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/eclipse.py
@@ -69,7 +69,7 @@ def CalculateVariables(default_variables, params):
 
 def CalculateGeneratorInputInfo(params):
     """Calculate the generator specific info that gets fed to input (called by
-  gyp)."""
+    gyp)."""
     generator_flags = params.get("generator_flags", {})
     if generator_flags.get("adjust_static_libraries", False):
         global generator_wants_static_library_dependencies_adjusted
@@ -86,10 +86,10 @@ def GetAllIncludeDirectories(
 ):
     """Calculate the set of include directories to be used.
 
-  Returns:
-    A list including all the include_dir's specified for every target followed
-    by any include directories that were added as cflag compiler options.
-  """
+    Returns:
+      A list including all the include_dir's specified for every target followed
+      by any include directories that were added as cflag compiler options.
+    """
 
     gyp_includes_set = set()
     compiler_includes_list = []
@@ -178,11 +178,11 @@ def GetAllIncludeDirectories(
 def GetCompilerPath(target_list, data, options):
     """Determine a command that can be used to invoke the compiler.
 
-  Returns:
-    If this is a gyp project that has explicit make settings, try to determine
-    the compiler from that.  Otherwise, see if a compiler was specified via the
-    CC_target environment variable.
-  """
+    Returns:
+      If this is a gyp project that has explicit make settings, try to determine
+      the compiler from that.  Otherwise, see if a compiler was specified via the
+      CC_target environment variable.
+    """
     # First, see if the compiler is configured in make's settings.
     build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
     make_global_settings_dict = data[build_file].get("make_global_settings", {})
@@ -202,10 +202,10 @@ def GetCompilerPath(target_list, data, options):
 def GetAllDefines(target_list, target_dicts, data, config_name, params, compiler_path):
     """Calculate the defines for a project.
 
-  Returns:
-    A dict that includes explicit defines declared in gyp files along with all
-    of the default defines that the compiler uses.
-  """
+    Returns:
+      A dict that includes explicit defines declared in gyp files along with all
+      of the default defines that the compiler uses.
+    """
 
     # Get defines declared in the gyp files.
     all_defines = {}
@@ -373,8 +373,8 @@ def GenerateClasspathFile(
     target_list, target_dicts, toplevel_dir, toplevel_build, out_name
 ):
     """Generates a classpath file suitable for symbol navigation and code
-  completion of Java code (such as in Android projects) by finding all
-  .java and .jar files used as action inputs."""
+    completion of Java code (such as in Android projects) by finding all
+    .java and .jar files used as action inputs."""
     gyp.common.EnsureDirExists(out_name)
     result = ET.Element("classpath")
 
@@ -451,8 +451,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
     if params["options"].generator_output:
         raise NotImplementedError("--generator_output not implemented for eclipse")
 
-    user_config = params.get("generator_flags", {}).get("config", None)
-    if user_config:
+    if user_config := params.get("generator_flags", {}).get("config", None):
         GenerateOutputForConfig(target_list, target_dicts, data, params, user_config)
     else:
         config_names = target_dicts[target_list[0]]["configurations"]
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py
index a0aa6d9245c81..3c70b81fd2562 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/gypd.py
@@ -30,7 +30,6 @@
 to change.
 """
 
-
 import pprint
 
 import gyp.common
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py
index 36a05deb7eb8b..72d22ff32b92d 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/gypsh.py
@@ -13,7 +13,6 @@
 The expected usage is "gyp -f gypsh -D OS=desired_os".
 """
 
-
 import code
 import sys
 
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py
index e860479069aba..1f0995718b59b 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/make.py
@@ -78,7 +78,7 @@ def CalculateVariables(default_variables, params):
 
         # Copy additional generator configuration data from Xcode, which is shared
         # by the Mac Make generator.
-        import gyp.generator.xcode as xcode_generator
+        import gyp.generator.xcode as xcode_generator  # noqa: PLC0415
 
         global generator_additional_non_configuration_keys
         generator_additional_non_configuration_keys = getattr(
@@ -218,7 +218,7 @@ def CalculateGeneratorInputInfo(params):
 
 quiet_cmd_solink_module = SOLINK_MODULE($(TOOLSET)) $@
 cmd_solink_module = $(LINK.$(TOOLSET)) -bundle $(GYP_LDFLAGS) $(LDFLAGS.$(TOOLSET)) -o $@ $(filter-out FORCE_DO_CMD, $^) $(LIBS)
-""" % {'python': sys.executable}  # noqa: E501
+""" % {"python": sys.executable}  # noqa: E501
 
 LINK_COMMANDS_ANDROID = """\
 quiet_cmd_alink = AR($(TOOLSET)) $@
@@ -443,21 +443,27 @@ def CalculateGeneratorInputInfo(params):
 define fixup_dep
 # The depfile may not exist if the input file didn't have any #includes.
 touch $(depfile).raw
-# Fixup path as in (1).""" +
-    (r"""
+# Fixup path as in (1)."""
+    + (
+        r"""
 sed -e "s|^$(notdir $@)|$@|" -re 's/\\\\([^$$])/\/\1/g' $(depfile).raw >> $(depfile)"""
-    if sys.platform == 'win32' else r"""
-sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)""") +
-    r"""
+        if sys.platform == "win32"
+        else r"""
+sed -e "s|^$(notdir $@)|$@|" $(depfile).raw >> $(depfile)"""
+    )
+    + r"""
 # Add extra rules as in (2).
 # We remove slashes and replace spaces with new lines;
 # remove blank lines;
-# delete the first line and append a colon to the remaining lines.""" +
-    ("""
+# delete the first line and append a colon to the remaining lines."""
+    + (
+        """
 sed -e 's/\\\\\\\\$$//' -e 's/\\\\\\\\/\\//g' -e 'y| |\\n|' $(depfile).raw |\\"""
-    if sys.platform == 'win32' else """
-sed -e 's|\\\\||' -e 'y| |\\n|' $(depfile).raw |\\""") +
-    r"""
+        if sys.platform == "win32"
+        else """
+sed -e 's|\\\\||' -e 'y| |\\n|' $(depfile).raw |\\"""
+    )
+    + r"""
   grep -v '^$$'                             |\
   sed -e 1d -e 's|$$|:|'                     \
     >> $(depfile)
@@ -616,7 +622,7 @@ def CalculateGeneratorInputInfo(params):
 
 quiet_cmd_infoplist = INFOPLIST $@
 cmd_infoplist = $(CC.$(TOOLSET)) -E -P -Wno-trigraphs -x c $(INFOPLIST_DEFINES) "$<" -o "$@"
-""" % {'python': sys.executable}  # noqa: E501
+""" % {"python": sys.executable}  # noqa: E501
 
 
 def WriteRootHeaderSuffixRules(writer):
@@ -733,11 +739,13 @@ def QuoteIfNecessary(string):
         string = '"' + string.replace('"', '\\"') + '"'
     return string
 
+
 def replace_sep(string):
-    if sys.platform == 'win32':
-        string = string.replace('\\\\', '/').replace('\\', '/')
+    if sys.platform == "win32":
+        string = string.replace("\\\\", "/").replace("\\", "/")
     return string
 
+
 def StringToMakefileVariable(string):
     """Convert a string to a value that is acceptable as a make variable name."""
     return re.sub("[^a-zA-Z0-9_]", "_", string)
@@ -1439,9 +1447,7 @@ def WriteSources(
 
         for obj in objs:
             assert " " not in obj, "Spaces in object filenames not supported (%s)" % obj
-        self.WriteLn(
-            "# Add to the list of files we specially track dependencies for."
-        )
+        self.WriteLn("# Add to the list of files we specially track dependencies for.")
         self.WriteLn("all_deps += $(OBJS)")
         self.WriteLn()
 
@@ -1465,8 +1471,7 @@ def WriteSources(
                 order_only=True,
             )
 
-        pchdeps = precompiled_header.GetObjDependencies(compilable, objs)
-        if pchdeps:
+        if pchdeps := precompiled_header.GetObjDependencies(compilable, objs):
             self.WriteLn("# Dependencies from obj files to their precompiled headers")
             for source, obj, gch in pchdeps:
                 self.WriteLn(f"{obj}: {gch}")
@@ -1499,7 +1504,8 @@ def WriteSources(
                     "$(OBJS): GYP_OBJCFLAGS := "
                     "$(DEFS_$(BUILDTYPE)) "
                     "$(INCS_$(BUILDTYPE)) "
-                    "%s " % precompiled_header.GetInclude("m")
+                    "%s "
+                    % precompiled_header.GetInclude("m")
                     + "$(CFLAGS_$(BUILDTYPE)) "
                     "$(CFLAGS_C_$(BUILDTYPE)) "
                     "$(CFLAGS_OBJC_$(BUILDTYPE))"
@@ -1508,7 +1514,8 @@ def WriteSources(
                     "$(OBJS): GYP_OBJCXXFLAGS := "
                     "$(DEFS_$(BUILDTYPE)) "
                     "$(INCS_$(BUILDTYPE)) "
-                    "%s " % precompiled_header.GetInclude("mm")
+                    "%s "
+                    % precompiled_header.GetInclude("mm")
                     + "$(CFLAGS_$(BUILDTYPE)) "
                     "$(CFLAGS_CC_$(BUILDTYPE)) "
                     "$(CFLAGS_OBJCC_$(BUILDTYPE))"
@@ -1600,8 +1607,7 @@ def ComputeOutputBasename(self, spec):
 
         target_prefix = spec.get("product_prefix", target_prefix)
         target = spec.get("product_name", target)
-        product_ext = spec.get("product_extension")
-        if product_ext:
+        if product_ext := spec.get("product_extension"):
             target_ext = "." + product_ext
 
         return target_prefix + target + target_ext
@@ -1882,7 +1888,7 @@ def WriteTarget(
                 self.flavor not in ("mac", "openbsd", "netbsd", "win")
                 and not self.is_standalone_static_library
             ):
-                if self.flavor in ("linux", "android"):
+                if self.flavor in ("linux", "android", "openharmony"):
                     self.WriteMakeRule(
                         [self.output_binary],
                         link_deps,
@@ -1896,7 +1902,7 @@ def WriteTarget(
                         part_of_all,
                         postbuilds=postbuilds,
                     )
-            elif self.flavor in ("linux", "android"):
+            elif self.flavor in ("linux", "android", "openharmony"):
                 self.WriteMakeRule(
                     [self.output_binary],
                     link_deps,
@@ -2383,11 +2389,15 @@ def WriteAutoRegenerationRule(params, root_makefile, makefile_name, build_files)
         % {
             "makefile_name": makefile_name,
             "deps": replace_sep(
-                " ".join(SourceifyAndQuoteSpaces(bf) for bf in build_files)
+                " ".join(sorted(SourceifyAndQuoteSpaces(bf) for bf in build_files))
+            ),
+            "cmd": replace_sep(
+                gyp.common.EncodePOSIXShellList(
+                    [gyp_binary, "-fmake"]
+                    + gyp.RegenerateFlags(options)
+                    + build_files_args
+                )
             ),
-            "cmd": replace_sep(gyp.common.EncodePOSIXShellList(
-                [gyp_binary, "-fmake"] + gyp.RegenerateFlags(options) + build_files_args
-            )),
         }
     )
 
@@ -2460,8 +2470,8 @@ def CalculateMakefilePath(build_file, base_name):
     # wasm-ld doesn't support --start-group/--end-group
     link_commands = LINK_COMMANDS_LINUX
     if flavor in ["wasi", "wasm"]:
-        link_commands = link_commands.replace(' -Wl,--start-group', '').replace(
-            ' -Wl,--end-group', ''
+        link_commands = link_commands.replace(" -Wl,--start-group", "").replace(
+            " -Wl,--end-group", ""
         )
 
     CC_target = replace_sep(GetEnvironFallback(("CC_target", "CC"), "$(CC)"))
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py
index b4aea2e69a193..3b258ee8f395e 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py
@@ -136,15 +136,15 @@ def _GetDomainAndUserName():
 def _NormalizedSource(source):
     """Normalize the path.
 
-  But not if that gets rid of a variable, as this may expand to something
-  larger than one directory.
+    But not if that gets rid of a variable, as this may expand to something
+    larger than one directory.
 
-  Arguments:
-      source: The path to be normalize.d
+    Arguments:
+        source: The path to be normalize.d
 
-  Returns:
-      The normalized path.
-  """
+    Returns:
+        The normalized path.
+    """
     normalized = os.path.normpath(source)
     if source.count("$") == normalized.count("$"):
         source = normalized
@@ -154,11 +154,11 @@ def _NormalizedSource(source):
 def _FixPath(path, separator="\\"):
     """Convert paths to a form that will make sense in a vcproj file.
 
-  Arguments:
-    path: The path to convert, may contain / etc.
-  Returns:
-    The path with all slashes made into backslashes.
-  """
+    Arguments:
+      path: The path to convert, may contain / etc.
+    Returns:
+      The path with all slashes made into backslashes.
+    """
     if (
         fixpath_prefix
         and path
@@ -179,11 +179,11 @@ def _FixPath(path, separator="\\"):
 
 def _IsWindowsAbsPath(path):
     """
-  On Cygwin systems Python needs a little help determining if a path
-  is an absolute Windows path or not, so that
-  it does not treat those as relative, which results in bad paths like:
-  '..\\C:\\\\some_source_code_file.cc'
-  """
+    On Cygwin systems Python needs a little help determining if a path
+    is an absolute Windows path or not, so that
+    it does not treat those as relative, which results in bad paths like:
+    '..\\C:\\\\some_source_code_file.cc'
+    """
     return path.startswith(("c:", "C:"))
 
 
@@ -197,22 +197,22 @@ def _ConvertSourcesToFilterHierarchy(
 ):
     """Converts a list split source file paths into a vcproj folder hierarchy.
 
-  Arguments:
-    sources: A list of source file paths split.
-    prefix: A list of source file path layers meant to apply to each of sources.
-    excluded: A set of excluded files.
-    msvs_version: A MSVSVersion object.
-
-  Returns:
-    A hierarchy of filenames and MSVSProject.Filter objects that matches the
-    layout of the source tree.
-    For example:
-    _ConvertSourcesToFilterHierarchy([['a', 'bob1.c'], ['b', 'bob2.c']],
-                                     prefix=['joe'])
-    -->
-    [MSVSProject.Filter('a', contents=['joe\\a\\bob1.c']),
-     MSVSProject.Filter('b', contents=['joe\\b\\bob2.c'])]
-  """
+    Arguments:
+      sources: A list of source file paths split.
+      prefix: A list of source file path layers meant to apply to each of sources.
+      excluded: A set of excluded files.
+      msvs_version: A MSVSVersion object.
+
+    Returns:
+      A hierarchy of filenames and MSVSProject.Filter objects that matches the
+      layout of the source tree.
+      For example:
+      _ConvertSourcesToFilterHierarchy([['a', 'bob1.c'], ['b', 'bob2.c']],
+                                       prefix=['joe'])
+      -->
+      [MSVSProject.Filter('a', contents=['joe\\a\\bob1.c']),
+       MSVSProject.Filter('b', contents=['joe\\b\\bob2.c'])]
+    """
     if not prefix:
         prefix = []
     result = []
@@ -361,7 +361,6 @@ def _ConfigWindowsTargetPlatformVersion(config_data, version):
 def _BuildCommandLineForRuleRaw(
     spec, cmd, cygwin_shell, has_input_path, quote_cmd, do_setup_env
 ):
-
     if [x for x in cmd if "$(InputDir)" in x]:
         input_dir_preamble = (
             "set INPUTDIR=$(InputDir)\n"
@@ -425,8 +424,7 @@ def _BuildCommandLineForRuleRaw(
         # Return the path with forward slashes because the command using it might
         # not support backslashes.
         arguments = [
-            i if (i[:1] in "/-" or "=" in i) else _FixPath(i, "/")
-            for i in cmd[1:]
+            i if (i[:1] in "/-" or "=" in i) else _FixPath(i, "/") for i in cmd[1:]
         ]
         arguments = [i.replace("$(InputDir)", "%INPUTDIR%") for i in arguments]
         arguments = [MSVSSettings.FixVCMacroSlashes(i) for i in arguments]
@@ -459,17 +457,17 @@ def _BuildCommandLineForRule(spec, rule, has_input_path, do_setup_env):
 def _AddActionStep(actions_dict, inputs, outputs, description, command):
     """Merge action into an existing list of actions.
 
-  Care must be taken so that actions which have overlapping inputs either don't
-  get assigned to the same input, or get collapsed into one.
-
-  Arguments:
-    actions_dict: dictionary keyed on input name, which maps to a list of
-      dicts describing the actions attached to that input file.
-    inputs: list of inputs
-    outputs: list of outputs
-    description: description of the action
-    command: command line to execute
-  """
+    Care must be taken so that actions which have overlapping inputs either don't
+    get assigned to the same input, or get collapsed into one.
+
+    Arguments:
+      actions_dict: dictionary keyed on input name, which maps to a list of
+        dicts describing the actions attached to that input file.
+      inputs: list of inputs
+      outputs: list of outputs
+      description: description of the action
+      command: command line to execute
+    """
     # Require there to be at least one input (call sites will ensure this).
     assert inputs
 
@@ -496,15 +494,15 @@ def _AddCustomBuildToolForMSVS(
 ):
     """Add a custom build tool to execute something.
 
-  Arguments:
-    p: the target project
-    spec: the target project dict
-    primary_input: input file to attach the build tool to
-    inputs: list of inputs
-    outputs: list of outputs
-    description: description of the action
-    cmd: command line to execute
-  """
+    Arguments:
+      p: the target project
+      spec: the target project dict
+      primary_input: input file to attach the build tool to
+      inputs: list of inputs
+      outputs: list of outputs
+      description: description of the action
+      cmd: command line to execute
+    """
     inputs = _FixPaths(inputs)
     outputs = _FixPaths(outputs)
     tool = MSVSProject.Tool(
@@ -526,12 +524,12 @@ def _AddCustomBuildToolForMSVS(
 def _AddAccumulatedActionsToMSVS(p, spec, actions_dict):
     """Add actions accumulated into an actions_dict, merging as needed.
 
-  Arguments:
-    p: the target project
-    spec: the target project dict
-    actions_dict: dictionary keyed on input name, which maps to a list of
-        dicts describing the actions attached to that input file.
-  """
+    Arguments:
+      p: the target project
+      spec: the target project dict
+      actions_dict: dictionary keyed on input name, which maps to a list of
+          dicts describing the actions attached to that input file.
+    """
     for primary_input in actions_dict:
         inputs = OrderedSet()
         outputs = OrderedSet()
@@ -559,12 +557,12 @@ def _AddAccumulatedActionsToMSVS(p, spec, actions_dict):
 def _RuleExpandPath(path, input_file):
     """Given the input file to which a rule applied, string substitute a path.
 
-  Arguments:
-    path: a path to string expand
-    input_file: the file to which the rule applied.
-  Returns:
-    The string substituted path.
-  """
+    Arguments:
+      path: a path to string expand
+      input_file: the file to which the rule applied.
+    Returns:
+      The string substituted path.
+    """
     path = path.replace(
         "$(InputName)", os.path.splitext(os.path.split(input_file)[1])[0]
     )
@@ -580,24 +578,24 @@ def _RuleExpandPath(path, input_file):
 def _FindRuleTriggerFiles(rule, sources):
     """Find the list of files which a particular rule applies to.
 
-  Arguments:
-    rule: the rule in question
-    sources: the set of all known source files for this project
-  Returns:
-    The list of sources that trigger a particular rule.
-  """
+    Arguments:
+      rule: the rule in question
+      sources: the set of all known source files for this project
+    Returns:
+      The list of sources that trigger a particular rule.
+    """
     return rule.get("rule_sources", [])
 
 
 def _RuleInputsAndOutputs(rule, trigger_file):
     """Find the inputs and outputs generated by a rule.
 
-  Arguments:
-    rule: the rule in question.
-    trigger_file: the main trigger for this rule.
-  Returns:
-    The pair of (inputs, outputs) involved in this rule.
-  """
+    Arguments:
+      rule: the rule in question.
+      trigger_file: the main trigger for this rule.
+    Returns:
+      The pair of (inputs, outputs) involved in this rule.
+    """
     raw_inputs = _FixPaths(rule.get("inputs", []))
     raw_outputs = _FixPaths(rule.get("outputs", []))
     inputs = OrderedSet()
@@ -613,13 +611,13 @@ def _RuleInputsAndOutputs(rule, trigger_file):
 def _GenerateNativeRulesForMSVS(p, rules, output_dir, spec, options):
     """Generate a native rules file.
 
-  Arguments:
-    p: the target project
-    rules: the set of rules to include
-    output_dir: the directory in which the project/gyp resides
-    spec: the project dict
-    options: global generator options
-  """
+    Arguments:
+      p: the target project
+      rules: the set of rules to include
+      output_dir: the directory in which the project/gyp resides
+      spec: the project dict
+      options: global generator options
+    """
     rules_filename = "{}{}.rules".format(spec["target_name"], options.suffix)
     rules_file = MSVSToolFile.Writer(
         os.path.join(output_dir, rules_filename), spec["target_name"]
@@ -658,14 +656,14 @@ def _Cygwinify(path):
 def _GenerateExternalRules(rules, output_dir, spec, sources, options, actions_to_add):
     """Generate an external makefile to do a set of rules.
 
-  Arguments:
-    rules: the list of rules to include
-    output_dir: path containing project and gyp files
-    spec: project specification data
-    sources: set of sources known
-    options: global generator options
-    actions_to_add: The list of actions we will add to.
-  """
+    Arguments:
+      rules: the list of rules to include
+      output_dir: path containing project and gyp files
+      spec: project specification data
+      sources: set of sources known
+      options: global generator options
+      actions_to_add: The list of actions we will add to.
+    """
     filename = "{}_rules{}.mk".format(spec["target_name"], options.suffix)
     mk_file = gyp.common.WriteOnDiff(os.path.join(output_dir, filename))
     # Find cygwin style versions of some paths.
@@ -743,17 +741,17 @@ def _GenerateExternalRules(rules, output_dir, spec, sources, options, actions_to
 def _EscapeEnvironmentVariableExpansion(s):
     """Escapes % characters.
 
-  Escapes any % characters so that Windows-style environment variable
-  expansions will leave them alone.
-  See http://connect.microsoft.com/VisualStudio/feedback/details/106127/cl-d-name-text-containing-percentage-characters-doesnt-compile
-  to understand why we have to do this.
+    Escapes any % characters so that Windows-style environment variable
+    expansions will leave them alone.
+    See http://connect.microsoft.com/VisualStudio/feedback/details/106127/cl-d-name-text-containing-percentage-characters-doesnt-compile
+    to understand why we have to do this.
 
-  Args:
-      s: The string to be escaped.
+    Args:
+        s: The string to be escaped.
 
-  Returns:
-      The escaped string.
-  """
+    Returns:
+        The escaped string.
+    """
     s = s.replace("%", "%%")
     return s
 
@@ -764,17 +762,17 @@ def _EscapeEnvironmentVariableExpansion(s):
 def _EscapeCommandLineArgumentForMSVS(s):
     """Escapes a Windows command-line argument.
 
-  So that the Win32 CommandLineToArgv function will turn the escaped result back
-  into the original string.
-  See http://msdn.microsoft.com/en-us/library/17w5ykft.aspx
-  ("Parsing C++ Command-Line Arguments") to understand why we have to do
-  this.
+    So that the Win32 CommandLineToArgv function will turn the escaped result back
+    into the original string.
+    See http://msdn.microsoft.com/en-us/library/17w5ykft.aspx
+    ("Parsing C++ Command-Line Arguments") to understand why we have to do
+    this.
 
-  Args:
-      s: the string to be escaped.
-  Returns:
-      the escaped string.
-  """
+    Args:
+        s: the string to be escaped.
+    Returns:
+        the escaped string.
+    """
 
     def _Replace(match):
         # For a literal quote, CommandLineToArgv requires an odd number of
@@ -795,24 +793,24 @@ def _Replace(match):
 def _EscapeVCProjCommandLineArgListItem(s):
     """Escapes command line arguments for MSVS.
 
-  The VCProj format stores string lists in a single string using commas and
-  semi-colons as separators, which must be quoted if they are to be
-  interpreted literally. However, command-line arguments may already have
-  quotes, and the VCProj parser is ignorant of the backslash escaping
-  convention used by CommandLineToArgv, so the command-line quotes and the
-  VCProj quotes may not be the same quotes. So to store a general
-  command-line argument in a VCProj list, we need to parse the existing
-  quoting according to VCProj's convention and quote any delimiters that are
-  not already quoted by that convention. The quotes that we add will also be
-  seen by CommandLineToArgv, so if backslashes precede them then we also have
-  to escape those backslashes according to the CommandLineToArgv
-  convention.
-
-  Args:
-      s: the string to be escaped.
-  Returns:
-      the escaped string.
-  """
+    The VCProj format stores string lists in a single string using commas and
+    semi-colons as separators, which must be quoted if they are to be
+    interpreted literally. However, command-line arguments may already have
+    quotes, and the VCProj parser is ignorant of the backslash escaping
+    convention used by CommandLineToArgv, so the command-line quotes and the
+    VCProj quotes may not be the same quotes. So to store a general
+    command-line argument in a VCProj list, we need to parse the existing
+    quoting according to VCProj's convention and quote any delimiters that are
+    not already quoted by that convention. The quotes that we add will also be
+    seen by CommandLineToArgv, so if backslashes precede them then we also have
+    to escape those backslashes according to the CommandLineToArgv
+    convention.
+
+    Args:
+        s: the string to be escaped.
+    Returns:
+        the escaped string.
+    """
 
     def _Replace(match):
         # For a non-literal quote, CommandLineToArgv requires an even number of
@@ -896,15 +894,15 @@ def _GenerateRulesForMSVS(
 ):
     """Generate all the rules for a particular project.
 
-  Arguments:
-    p: the project
-    output_dir: directory to emit rules to
-    options: global options passed to the generator
-    spec: the specification for this project
-    sources: the set of all known source files in this project
-    excluded_sources: the set of sources excluded from normal processing
-    actions_to_add: deferred list of actions to add in
-  """
+    Arguments:
+      p: the project
+      output_dir: directory to emit rules to
+      options: global options passed to the generator
+      spec: the specification for this project
+      sources: the set of all known source files in this project
+      excluded_sources: the set of sources excluded from normal processing
+      actions_to_add: deferred list of actions to add in
+    """
     rules = spec.get("rules", [])
     rules_native = [r for r in rules if not int(r.get("msvs_external_rule", 0))]
     rules_external = [r for r in rules if int(r.get("msvs_external_rule", 0))]
@@ -946,12 +944,12 @@ def _AdjustSourcesForRules(rules, sources, excluded_sources, is_msbuild):
 def _FilterActionsFromExcluded(excluded_sources, actions_to_add):
     """Take inputs with actions attached out of the list of exclusions.
 
-  Arguments:
-    excluded_sources: list of source files not to be built.
-    actions_to_add: dict of actions keyed on source file they're attached to.
-  Returns:
-    excluded_sources with files that have actions attached removed.
-  """
+    Arguments:
+      excluded_sources: list of source files not to be built.
+      actions_to_add: dict of actions keyed on source file they're attached to.
+    Returns:
+      excluded_sources with files that have actions attached removed.
+    """
     must_keep = OrderedSet(_FixPaths(actions_to_add.keys()))
     return [s for s in excluded_sources if s not in must_keep]
 
@@ -963,14 +961,14 @@ def _GetDefaultConfiguration(spec):
 def _GetGuidOfProject(proj_path, spec):
     """Get the guid for the project.
 
-  Arguments:
-    proj_path: Path of the vcproj or vcxproj file to generate.
-    spec: The target dictionary containing the properties of the target.
-  Returns:
-    the guid.
-  Raises:
-    ValueError: if the specified GUID is invalid.
-  """
+    Arguments:
+      proj_path: Path of the vcproj or vcxproj file to generate.
+      spec: The target dictionary containing the properties of the target.
+    Returns:
+      the guid.
+    Raises:
+      ValueError: if the specified GUID is invalid.
+    """
     # Pluck out the default configuration.
     default_config = _GetDefaultConfiguration(spec)
     # Decide the guid of the project.
@@ -989,13 +987,13 @@ def _GetGuidOfProject(proj_path, spec):
 def _GetMsbuildToolsetOfProject(proj_path, spec, version):
     """Get the platform toolset for the project.
 
-  Arguments:
-    proj_path: Path of the vcproj or vcxproj file to generate.
-    spec: The target dictionary containing the properties of the target.
-    version: The MSVSVersion object.
-  Returns:
-    the platform toolset string or None.
-  """
+    Arguments:
+      proj_path: Path of the vcproj or vcxproj file to generate.
+      spec: The target dictionary containing the properties of the target.
+      version: The MSVSVersion object.
+    Returns:
+      the platform toolset string or None.
+    """
     # Pluck out the default configuration.
     default_config = _GetDefaultConfiguration(spec)
     toolset = default_config.get("msbuild_toolset")
@@ -1009,14 +1007,14 @@ def _GetMsbuildToolsetOfProject(proj_path, spec, version):
 def _GenerateProject(project, options, version, generator_flags, spec):
     """Generates a vcproj file.
 
-  Arguments:
-    project: the MSVSProject object.
-    options: global generator options.
-    version: the MSVSVersion object.
-    generator_flags: dict of generator-specific flags.
-  Returns:
-    A list of source files that cannot be found on disk.
-  """
+    Arguments:
+      project: the MSVSProject object.
+      options: global generator options.
+      version: the MSVSVersion object.
+      generator_flags: dict of generator-specific flags.
+    Returns:
+      A list of source files that cannot be found on disk.
+    """
     default_config = _GetDefaultConfiguration(project.spec)
 
     # Skip emitting anything if told to with msvs_existing_vcproj option.
@@ -1032,12 +1030,12 @@ def _GenerateProject(project, options, version, generator_flags, spec):
 def _GenerateMSVSProject(project, options, version, generator_flags):
     """Generates a .vcproj file.  It may create .rules and .user files too.
 
-  Arguments:
-    project: The project object we will generate the file for.
-    options: Global options passed to the generator.
-    version: The VisualStudioVersion object.
-    generator_flags: dict of generator-specific flags.
-  """
+    Arguments:
+      project: The project object we will generate the file for.
+      options: Global options passed to the generator.
+      version: The VisualStudioVersion object.
+      generator_flags: dict of generator-specific flags.
+    """
     spec = project.spec
     gyp.common.EnsureDirExists(project.path)
 
@@ -1094,11 +1092,11 @@ def _GenerateMSVSProject(project, options, version, generator_flags):
 def _GetUniquePlatforms(spec):
     """Returns the list of unique platforms for this spec, e.g ['win32', ...].
 
-  Arguments:
-    spec: The target dictionary containing the properties of the target.
-  Returns:
-    The MSVSUserFile object created.
-  """
+    Arguments:
+      spec: The target dictionary containing the properties of the target.
+    Returns:
+      The MSVSUserFile object created.
+    """
     # Gather list of unique platforms.
     platforms = OrderedSet()
     for configuration in spec["configurations"]:
@@ -1110,14 +1108,14 @@ def _GetUniquePlatforms(spec):
 def _CreateMSVSUserFile(proj_path, version, spec):
     """Generates a .user file for the user running this Gyp program.
 
-  Arguments:
-    proj_path: The path of the project file being created.  The .user file
-               shares the same path (with an appropriate suffix).
-    version: The VisualStudioVersion object.
-    spec: The target dictionary containing the properties of the target.
-  Returns:
-    The MSVSUserFile object created.
-  """
+    Arguments:
+      proj_path: The path of the project file being created.  The .user file
+                 shares the same path (with an appropriate suffix).
+      version: The VisualStudioVersion object.
+      spec: The target dictionary containing the properties of the target.
+    Returns:
+      The MSVSUserFile object created.
+    """
     (domain, username) = _GetDomainAndUserName()
     vcuser_filename = ".".join([proj_path, domain, username, "user"])
     user_file = MSVSUserFile.Writer(vcuser_filename, version, spec["target_name"])
@@ -1127,14 +1125,14 @@ def _CreateMSVSUserFile(proj_path, version, spec):
 def _GetMSVSConfigurationType(spec, build_file):
     """Returns the configuration type for this project.
 
-  It's a number defined by Microsoft.  May raise an exception.
+    It's a number defined by Microsoft.  May raise an exception.
 
-  Args:
-      spec: The target dictionary containing the properties of the target.
-      build_file: The path of the gyp file.
-  Returns:
-      An integer, the configuration type.
-  """
+    Args:
+        spec: The target dictionary containing the properties of the target.
+        build_file: The path of the gyp file.
+    Returns:
+        An integer, the configuration type.
+    """
     try:
         config_type = {
             "executable": "1",  # .exe
@@ -1161,17 +1159,17 @@ def _GetMSVSConfigurationType(spec, build_file):
 def _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config):
     """Adds a configuration to the MSVS project.
 
-  Many settings in a vcproj file are specific to a configuration.  This
-  function the main part of the vcproj file that's configuration specific.
-
-  Arguments:
-    p: The target project being generated.
-    spec: The target dictionary containing the properties of the target.
-    config_type: The configuration type, a number as defined by Microsoft.
-    config_name: The name of the configuration.
-    config: The dictionary that defines the special processing to be done
-            for this configuration.
-  """
+    Many settings in a vcproj file are specific to a configuration.  This
+    function the main part of the vcproj file that's configuration specific.
+
+    Arguments:
+      p: The target project being generated.
+      spec: The target dictionary containing the properties of the target.
+      config_type: The configuration type, a number as defined by Microsoft.
+      config_name: The name of the configuration.
+      config: The dictionary that defines the special processing to be done
+              for this configuration.
+    """
     # Get the information for this configuration
     include_dirs, midl_include_dirs, resource_include_dirs = _GetIncludeDirs(config)
     libraries = _GetLibraries(spec)
@@ -1251,12 +1249,12 @@ def _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config):
 def _GetIncludeDirs(config):
     """Returns the list of directories to be used for #include directives.
 
-  Arguments:
-    config: The dictionary that defines the special processing to be done
-            for this configuration.
-  Returns:
-    The list of directory paths.
-  """
+    Arguments:
+      config: The dictionary that defines the special processing to be done
+              for this configuration.
+    Returns:
+      The list of directory paths.
+    """
     # TODO(bradnelson): include_dirs should really be flexible enough not to
     #                   require this sort of thing.
     include_dirs = config.get("include_dirs", []) + config.get(
@@ -1275,12 +1273,12 @@ def _GetIncludeDirs(config):
 def _GetLibraryDirs(config):
     """Returns the list of directories to be used for library search paths.
 
-  Arguments:
-    config: The dictionary that defines the special processing to be done
-            for this configuration.
-  Returns:
-    The list of directory paths.
-  """
+    Arguments:
+      config: The dictionary that defines the special processing to be done
+              for this configuration.
+    Returns:
+      The list of directory paths.
+    """
 
     library_dirs = config.get("library_dirs", [])
     library_dirs = _FixPaths(library_dirs)
@@ -1290,11 +1288,11 @@ def _GetLibraryDirs(config):
 def _GetLibraries(spec):
     """Returns the list of libraries for this configuration.
 
-  Arguments:
-    spec: The target dictionary containing the properties of the target.
-  Returns:
-    The list of directory paths.
-  """
+    Arguments:
+      spec: The target dictionary containing the properties of the target.
+    Returns:
+      The list of directory paths.
+    """
     libraries = spec.get("libraries", [])
     # Strip out -l, as it is not used on windows (but is needed so we can pass
     # in libraries that are assumed to be in the default library path).
@@ -1316,14 +1314,14 @@ def _GetLibraries(spec):
 def _GetOutputFilePathAndTool(spec, msbuild):
     """Returns the path and tool to use for this target.
 
-  Figures out the path of the file this spec will create and the name of
-  the VC tool that will create it.
+    Figures out the path of the file this spec will create and the name of
+    the VC tool that will create it.
 
-  Arguments:
-    spec: The target dictionary containing the properties of the target.
-  Returns:
-    A triple of (file path, name of the vc tool, name of the msbuild tool)
-  """
+    Arguments:
+      spec: The target dictionary containing the properties of the target.
+    Returns:
+      A triple of (file path, name of the vc tool, name of the msbuild tool)
+    """
     # Select a name for the output file.
     out_file = ""
     vc_tool = ""
@@ -1355,17 +1353,16 @@ def _GetOutputFilePathAndTool(spec, msbuild):
 def _GetOutputTargetExt(spec):
     """Returns the extension for this target, including the dot
 
-  If product_extension is specified, set target_extension to this to avoid
-  MSB8012, returns None otherwise. Ignores any target_extension settings in
-  the input files.
-
-  Arguments:
-    spec: The target dictionary containing the properties of the target.
-  Returns:
-    A string with the extension, or None
-  """
-    target_extension = spec.get("product_extension")
-    if target_extension:
+    If product_extension is specified, set target_extension to this to avoid
+    MSB8012, returns None otherwise. Ignores any target_extension settings in
+    the input files.
+
+    Arguments:
+      spec: The target dictionary containing the properties of the target.
+    Returns:
+      A string with the extension, or None
+    """
+    if target_extension := spec.get("product_extension"):
         return "." + target_extension
     return None
 
@@ -1373,12 +1370,12 @@ def _GetOutputTargetExt(spec):
 def _GetDefines(config):
     """Returns the list of preprocessor definitions for this configuration.
 
-  Arguments:
-    config: The dictionary that defines the special processing to be done
-            for this configuration.
-  Returns:
-    The list of preprocessor definitions.
-  """
+    Arguments:
+      config: The dictionary that defines the special processing to be done
+              for this configuration.
+    Returns:
+      The list of preprocessor definitions.
+    """
     defines = []
     for d in config.get("defines", []):
         fd = "=".join([str(dpart) for dpart in d]) if isinstance(d, list) else str(d)
@@ -1412,11 +1409,11 @@ def _GetModuleDefinition(spec):
 def _ConvertToolsToExpectedForm(tools):
     """Convert tools to a form expected by Visual Studio.
 
-  Arguments:
-    tools: A dictionary of settings; the tool name is the key.
-  Returns:
-    A list of Tool objects.
-  """
+    Arguments:
+      tools: A dictionary of settings; the tool name is the key.
+    Returns:
+      A list of Tool objects.
+    """
     tool_list = []
     for tool, settings in tools.items():
         # Collapse settings with lists.
@@ -1439,15 +1436,15 @@ def _ConvertToolsToExpectedForm(tools):
 def _AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name):
     """Add to the project file the configuration specified by config.
 
-  Arguments:
-    p: The target project being generated.
-    spec: the target project dict.
-    tools: A dictionary of settings; the tool name is the key.
-    config: The dictionary that defines the special processing to be done
-            for this configuration.
-    config_type: The configuration type, a number as defined by Microsoft.
-    config_name: The name of the configuration.
-  """
+    Arguments:
+      p: The target project being generated.
+      spec: the target project dict.
+      tools: A dictionary of settings; the tool name is the key.
+      config: The dictionary that defines the special processing to be done
+              for this configuration.
+      config_type: The configuration type, a number as defined by Microsoft.
+      config_name: The name of the configuration.
+    """
     attributes = _GetMSVSAttributes(spec, config, config_type)
     # Add in this configuration.
     tool_list = _ConvertToolsToExpectedForm(tools)
@@ -1488,18 +1485,18 @@ def _AddNormalizedSources(sources_set, sources_array):
 def _PrepareListOfSources(spec, generator_flags, gyp_file):
     """Prepare list of sources and excluded sources.
 
-  Besides the sources specified directly in the spec, adds the gyp file so
-  that a change to it will cause a re-compile. Also adds appropriate sources
-  for actions and copies. Assumes later stage will un-exclude files which
-  have custom build steps attached.
-
-  Arguments:
-    spec: The target dictionary containing the properties of the target.
-    gyp_file: The name of the gyp file.
-  Returns:
-    A pair of (list of sources, list of excluded sources).
-    The sources will be relative to the gyp file.
-  """
+    Besides the sources specified directly in the spec, adds the gyp file so
+    that a change to it will cause a re-compile. Also adds appropriate sources
+    for actions and copies. Assumes later stage will un-exclude files which
+    have custom build steps attached.
+
+    Arguments:
+      spec: The target dictionary containing the properties of the target.
+      gyp_file: The name of the gyp file.
+    Returns:
+      A pair of (list of sources, list of excluded sources).
+      The sources will be relative to the gyp file.
+    """
     sources = OrderedSet()
     _AddNormalizedSources(sources, spec.get("sources", []))
     excluded_sources = OrderedSet()
@@ -1529,19 +1526,19 @@ def _AdjustSourcesAndConvertToFilterHierarchy(
 ):
     """Adjusts the list of sources and excluded sources.
 
-  Also converts the sets to lists.
-
-  Arguments:
-    spec: The target dictionary containing the properties of the target.
-    options: Global generator options.
-    gyp_dir: The path to the gyp file being processed.
-    sources: A set of sources to be included for this project.
-    excluded_sources: A set of sources to be excluded for this project.
-    version: A MSVSVersion object.
-  Returns:
-    A trio of (list of sources, list of excluded sources,
-               path of excluded IDL file)
-  """
+    Also converts the sets to lists.
+
+    Arguments:
+      spec: The target dictionary containing the properties of the target.
+      options: Global generator options.
+      gyp_dir: The path to the gyp file being processed.
+      sources: A set of sources to be included for this project.
+      excluded_sources: A set of sources to be excluded for this project.
+      version: A MSVSVersion object.
+    Returns:
+      A trio of (list of sources, list of excluded sources,
+                 path of excluded IDL file)
+    """
     # Exclude excluded sources coming into the generator.
     excluded_sources.update(OrderedSet(spec.get("sources_excluded", [])))
     # Add excluded sources into sources for good measure.
@@ -1837,8 +1834,11 @@ def _CollapseSingles(parent, node):
     # Recursively explorer the tree of dicts looking for projects which are
     # the sole item in a folder which has the same name as the project. Bring
     # such projects up one level.
-    if (isinstance(node, dict) and len(node) == 1 and
-        next(iter(node)) == parent + ".vcproj"):
+    if (
+        isinstance(node, dict)
+        and len(node) == 1
+        and next(iter(node)) == parent + ".vcproj"
+    ):
         return node[next(iter(node))]
     if not isinstance(node, dict):
         return node
@@ -1907,14 +1907,14 @@ def _GetPlatformOverridesOfProject(spec):
 def _CreateProjectObjects(target_list, target_dicts, options, msvs_version):
     """Create a MSVSProject object for the targets found in target list.
 
-  Arguments:
-    target_list: the list of targets to generate project objects for.
-    target_dicts: the dictionary of specifications.
-    options: global generator options.
-    msvs_version: the MSVSVersion object.
-  Returns:
-    A set of created projects, keyed by target.
-  """
+    Arguments:
+      target_list: the list of targets to generate project objects for.
+      target_dicts: the dictionary of specifications.
+      options: global generator options.
+      msvs_version: the MSVSVersion object.
+    Returns:
+      A set of created projects, keyed by target.
+    """
     global fixpath_prefix
     # Generate each project.
     projects = {}
@@ -1958,15 +1958,15 @@ def _CreateProjectObjects(target_list, target_dicts, options, msvs_version):
 def _InitNinjaFlavor(params, target_list, target_dicts):
     """Initialize targets for the ninja flavor.
 
-  This sets up the necessary variables in the targets to generate msvs projects
-  that use ninja as an external builder. The variables in the spec are only set
-  if they have not been set. This allows individual specs to override the
-  default values initialized here.
-  Arguments:
-    params: Params provided to the generator.
-    target_list: List of target pairs: 'base/base.gyp:base'.
-    target_dicts: Dict of target properties keyed on target pair.
-  """
+    This sets up the necessary variables in the targets to generate msvs projects
+    that use ninja as an external builder. The variables in the spec are only set
+    if they have not been set. This allows individual specs to override the
+    default values initialized here.
+    Arguments:
+      params: Params provided to the generator.
+      target_list: List of target pairs: 'base/base.gyp:base'.
+      target_dicts: Dict of target properties keyed on target pair.
+    """
     for qualified_target in target_list:
         spec = target_dicts[qualified_target]
         if spec.get("msvs_external_builder"):
@@ -2077,12 +2077,12 @@ def CalculateGeneratorInputInfo(params):
 def GenerateOutput(target_list, target_dicts, data, params):
     """Generate .sln and .vcproj files.
 
-  This is the entry point for this generator.
-  Arguments:
-    target_list: List of target pairs: 'base/base.gyp:base'.
-    target_dicts: Dict of target properties keyed on target pair.
-    data: Dictionary containing per .gyp data.
-  """
+    This is the entry point for this generator.
+    Arguments:
+      target_list: List of target pairs: 'base/base.gyp:base'.
+      target_dicts: Dict of target properties keyed on target pair.
+      data: Dictionary containing per .gyp data.
+    """
     global fixpath_prefix
 
     options = params["options"]
@@ -2176,14 +2176,14 @@ def _GenerateMSBuildFiltersFile(
 ):
     """Generate the filters file.
 
-  This file is used by Visual Studio to organize the presentation of source
-  files into folders.
+    This file is used by Visual Studio to organize the presentation of source
+    files into folders.
 
-  Arguments:
-      filters_path: The path of the file to be created.
-      source_files: The hierarchical structure of all the sources.
-      extension_to_rule_name: A dictionary mapping file extensions to rules.
-  """
+    Arguments:
+        filters_path: The path of the file to be created.
+        source_files: The hierarchical structure of all the sources.
+        extension_to_rule_name: A dictionary mapping file extensions to rules.
+    """
     filter_group = []
     source_group = []
     _AppendFiltersForMSBuild(
@@ -2224,14 +2224,14 @@ def _AppendFiltersForMSBuild(
 ):
     """Creates the list of filters and sources to be added in the filter file.
 
-  Args:
-      parent_filter_name: The name of the filter under which the sources are
-          found.
-      sources: The hierarchy of filters and sources to process.
-      extension_to_rule_name: A dictionary mapping file extensions to rules.
-      filter_group: The list to which filter entries will be appended.
-      source_group: The list to which source entries will be appended.
-  """
+    Args:
+        parent_filter_name: The name of the filter under which the sources are
+            found.
+        sources: The hierarchy of filters and sources to process.
+        extension_to_rule_name: A dictionary mapping file extensions to rules.
+        filter_group: The list to which filter entries will be appended.
+        source_group: The list to which source entries will be appended.
+    """
     for source in sources:
         if isinstance(source, MSVSProject.Filter):
             # We have a sub-filter.  Create the name of that sub-filter.
@@ -2275,13 +2275,13 @@ def _MapFileToMsBuildSourceType(
 ):
     """Returns the group and element type of the source file.
 
-  Arguments:
-      source: The source file name.
-      extension_to_rule_name: A dictionary mapping file extensions to rules.
+    Arguments:
+        source: The source file name.
+        extension_to_rule_name: A dictionary mapping file extensions to rules.
 
-  Returns:
-      A pair of (group this file should be part of, the label of element)
-  """
+    Returns:
+        A pair of (group this file should be part of, the label of element)
+    """
     _, ext = os.path.splitext(source)
     ext = ext.lower()
     if ext in extension_to_rule_name:
@@ -2369,22 +2369,22 @@ def _GenerateRulesForMSBuild(
 class MSBuildRule:
     """Used to store information used to generate an MSBuild rule.
 
-  Attributes:
-    rule_name: The rule name, sanitized to use in XML.
-    target_name: The name of the target.
-    after_targets: The name of the AfterTargets element.
-    before_targets: The name of the BeforeTargets element.
-    depends_on: The name of the DependsOn element.
-    compute_output: The name of the ComputeOutput element.
-    dirs_to_make: The name of the DirsToMake element.
-    inputs: The name of the _inputs element.
-    tlog: The name of the _tlog element.
-    extension: The extension this rule applies to.
-    description: The message displayed when this rule is invoked.
-    additional_dependencies: A string listing additional dependencies.
-    outputs: The outputs of this rule.
-    command: The command used to run the rule.
-  """
+    Attributes:
+      rule_name: The rule name, sanitized to use in XML.
+      target_name: The name of the target.
+      after_targets: The name of the AfterTargets element.
+      before_targets: The name of the BeforeTargets element.
+      depends_on: The name of the DependsOn element.
+      compute_output: The name of the ComputeOutput element.
+      dirs_to_make: The name of the DirsToMake element.
+      inputs: The name of the _inputs element.
+      tlog: The name of the _tlog element.
+      extension: The extension this rule applies to.
+      description: The message displayed when this rule is invoked.
+      additional_dependencies: A string listing additional dependencies.
+      outputs: The outputs of this rule.
+      command: The command used to run the rule.
+    """
 
     def __init__(self, rule, spec):
         self.display_name = rule["rule_name"]
@@ -2909,7 +2909,7 @@ def _GetConfigurationCondition(name, settings, spec):
 
 def _GetMSBuildProjectConfigurations(configurations, spec):
     group = ["ItemGroup", {"Label": "ProjectConfigurations"}]
-    for (name, settings) in sorted(configurations.items()):
+    for name, settings in sorted(configurations.items()):
         configuration, platform = _GetConfigurationAndPlatform(name, settings, spec)
         designation = f"{configuration}|{platform}"
         group.append(
@@ -3003,10 +3003,11 @@ def _GetMSBuildConfigurationDetails(spec, build_file):
         vctools_version = msbuild_attributes.get("VCToolsVersion")
         config_type = msbuild_attributes.get("ConfigurationType")
         _AddConditionalProperty(properties, condition, "ConfigurationType", config_type)
-        spectre_mitigation = msbuild_attributes.get('SpectreMitigation')
+        spectre_mitigation = msbuild_attributes.get("SpectreMitigation")
         if spectre_mitigation:
-            _AddConditionalProperty(properties, condition, "SpectreMitigation",
-                                    spectre_mitigation)
+            _AddConditionalProperty(
+                properties, condition, "SpectreMitigation", spectre_mitigation
+            )
         if config_type == "Driver":
             _AddConditionalProperty(properties, condition, "DriverType", "WDM")
             _AddConditionalProperty(
@@ -3166,8 +3167,7 @@ def _GetMSBuildAttributes(spec, config, build_file):
         "windows_driver": "Link",
         "static_library": "Lib",
     }
-    msbuild_tool = msbuild_tool_map.get(spec["type"])
-    if msbuild_tool:
+    if msbuild_tool := msbuild_tool_map.get(spec["type"]):
         msbuild_settings = config["finalized_msbuild_settings"]
         out_file = msbuild_settings[msbuild_tool].get("OutputFile")
         if out_file:
@@ -3184,8 +3184,7 @@ def _GetMSBuildConfigurationGlobalProperties(spec, configurations, build_file):
     # there are actions.
     # TODO(jeanluc) Handle the equivalent of setting 'CYGWIN=nontsec'.
     new_paths = []
-    cygwin_dirs = spec.get("msvs_cygwin_dirs", ["."])[0]
-    if cygwin_dirs:
+    if cygwin_dirs := spec.get("msvs_cygwin_dirs", ["."])[0]:
         cyg_path = "$(MSBuildProjectDirectory)\\%s\\bin\\" % _FixPath(cygwin_dirs)
         new_paths.append(cyg_path)
         # TODO(jeanluc) Change the convention to have both a cygwin_dir and a
@@ -3196,7 +3195,7 @@ def _GetMSBuildConfigurationGlobalProperties(spec, configurations, build_file):
             new_paths = "$(ExecutablePath);" + ";".join(new_paths)
 
     properties = {}
-    for (name, configuration) in sorted(configurations.items()):
+    for name, configuration in sorted(configurations.items()):
         condition = _GetConfigurationCondition(name, configuration, spec)
         attributes = _GetMSBuildAttributes(spec, configuration, build_file)
         msbuild_settings = configuration["finalized_msbuild_settings"]
@@ -3235,14 +3234,14 @@ def _GetMSBuildConfigurationGlobalProperties(spec, configurations, build_file):
 def _AddConditionalProperty(properties, condition, name, value):
     """Adds a property / conditional value pair to a dictionary.
 
-  Arguments:
-    properties: The dictionary to be modified.  The key is the name of the
-        property.  The value is itself a dictionary; its key is the value and
-        the value a list of condition for which this value is true.
-    condition: The condition under which the named property has the value.
-    name: The name of the property.
-    value: The value of the property.
-  """
+    Arguments:
+      properties: The dictionary to be modified.  The key is the name of the
+          property.  The value is itself a dictionary; its key is the value and
+          the value a list of condition for which this value is true.
+      condition: The condition under which the named property has the value.
+      name: The name of the property.
+      value: The value of the property.
+    """
     if name not in properties:
         properties[name] = {}
     values = properties[name]
@@ -3259,13 +3258,13 @@ def _AddConditionalProperty(properties, condition, name, value):
 def _GetMSBuildPropertyGroup(spec, label, properties):
     """Returns a PropertyGroup definition for the specified properties.
 
-  Arguments:
-    spec: The target project dict.
-    label: An optional label for the PropertyGroup.
-    properties: The dictionary to be converted.  The key is the name of the
-        property.  The value is itself a dictionary; its key is the value and
-        the value a list of condition for which this value is true.
-  """
+    Arguments:
+      spec: The target project dict.
+      label: An optional label for the PropertyGroup.
+      properties: The dictionary to be converted.  The key is the name of the
+          property.  The value is itself a dictionary; its key is the value and
+          the value a list of condition for which this value is true.
+    """
     group = ["PropertyGroup"]
     if label:
         group.append({"Label": label})
@@ -3314,7 +3313,7 @@ def GetEdges(node):
 
 def _GetMSBuildToolSettingsSections(spec, configurations):
     groups = []
-    for (name, configuration) in sorted(configurations.items()):
+    for name, configuration in sorted(configurations.items()):
         msbuild_settings = configuration["finalized_msbuild_settings"]
         group = [
             "ItemDefinitionGroup",
@@ -3370,7 +3369,6 @@ def _FinalizeMSBuildSettings(spec, configuration):
     prebuild = configuration.get("msvs_prebuild")
     postbuild = configuration.get("msvs_postbuild")
     def_file = _GetModuleDefinition(spec)
-    precompiled_header = configuration.get("msvs_precompiled_header")
 
     # Add the information to the appropriate tool
     # TODO(jeanluc) We could optimize and generate these settings only if
@@ -3408,11 +3406,11 @@ def _FinalizeMSBuildSettings(spec, configuration):
         msbuild_settings, "ClCompile", "DisableSpecificWarnings", disabled_warnings
     )
     # Turn on precompiled headers if appropriate.
-    if precompiled_header:
+    if precompiled_header := configuration.get("msvs_precompiled_header"):
         # While MSVC works with just file name eg. "v8_pch.h", ClangCL requires
         # the full path eg. "tools/msvs/pch/v8_pch.h" to find the file.
         # P.S. Only ClangCL defines msbuild_toolset, for MSVC it is None.
-        if configuration.get("msbuild_toolset") != 'ClangCL':
+        if configuration.get("msbuild_toolset") != "ClangCL":
             precompiled_header = os.path.split(precompiled_header)[1]
         _ToolAppend(msbuild_settings, "ClCompile", "PrecompiledHeader", "Use")
         _ToolAppend(
@@ -3474,16 +3472,16 @@ def _GetValueFormattedForMSBuild(tool_name, name, value):
 def _VerifySourcesExist(sources, root_dir):
     """Verifies that all source files exist on disk.
 
-  Checks that all regular source files, i.e. not created at run time,
-  exist on disk.  Missing files cause needless recompilation but no otherwise
-  visible errors.
+    Checks that all regular source files, i.e. not created at run time,
+    exist on disk.  Missing files cause needless recompilation but no otherwise
+    visible errors.
 
-  Arguments:
-    sources: A recursive list of Filter/file names.
-    root_dir: The root directory for the relative path names.
-  Returns:
-    A list of source files that cannot be found on disk.
-  """
+    Arguments:
+      sources: A recursive list of Filter/file names.
+      root_dir: The root directory for the relative path names.
+    Returns:
+      A list of source files that cannot be found on disk.
+    """
     missing_sources = []
     for source in sources:
         if isinstance(source, MSVSProject.Filter):
@@ -3568,17 +3566,13 @@ def _AddSources2(
                 detail.append(["ExcludedFromBuild", "true"])
             else:
                 for config_name, configuration in sorted(excluded_configurations):
-                    condition = _GetConfigurationCondition(
-                        config_name, configuration
-                    )
+                    condition = _GetConfigurationCondition(config_name, configuration)
                     detail.append(
                         ["ExcludedFromBuild", {"Condition": condition}, "true"]
                     )
             # Add precompile if needed
             for config_name, configuration in spec["configurations"].items():
-                precompiled_source = configuration.get(
-                    "msvs_precompiled_source", ""
-                )
+                precompiled_source = configuration.get("msvs_precompiled_source", "")
                 if precompiled_source != "":
                     precompiled_source = _FixPath(precompiled_source)
                     if not extensions_excluded_from_precompile:
@@ -3826,15 +3820,15 @@ def _GenerateMSBuildProject(project, options, version, generator_flags, spec):
 def _GetMSBuildExternalBuilderTargets(spec):
     """Return a list of MSBuild targets for external builders.
 
-  The "Build" and "Clean" targets are always generated.  If the spec contains
-  'msvs_external_builder_clcompile_cmd', then the "ClCompile" target will also
-  be generated, to support building selected C/C++ files.
+    The "Build" and "Clean" targets are always generated.  If the spec contains
+    'msvs_external_builder_clcompile_cmd', then the "ClCompile" target will also
+    be generated, to support building selected C/C++ files.
 
-  Arguments:
-    spec: The gyp target spec.
-  Returns:
-    List of MSBuild 'Target' specs.
-  """
+    Arguments:
+      spec: The gyp target spec.
+    Returns:
+      List of MSBuild 'Target' specs.
+    """
     build_cmd = _BuildCommandLineForRuleRaw(
         spec, spec["msvs_external_builder_build_cmd"], False, False, False, False
     )
@@ -3882,14 +3876,14 @@ def _GetMSBuildExtensionTargets(targets_files_of_rules):
 def _GenerateActionsForMSBuild(spec, actions_to_add):
     """Add actions accumulated into an actions_to_add, merging as needed.
 
-  Arguments:
-    spec: the target project dict
-    actions_to_add: dictionary keyed on input name, which maps to a list of
-        dicts describing the actions attached to that input file.
+    Arguments:
+      spec: the target project dict
+      actions_to_add: dictionary keyed on input name, which maps to a list of
+          dicts describing the actions attached to that input file.
 
-  Returns:
-    A pair of (action specification, the sources handled by this action).
-  """
+    Returns:
+      A pair of (action specification, the sources handled by this action).
+    """
     sources_handled_by_action = OrderedSet()
     actions_spec = []
     for primary_input, actions in actions_to_add.items():
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py
index 8cea3d1479e3b..e3c4758696c40 100755
--- a/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs_test.py
@@ -3,7 +3,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-""" Unit tests for the msvs.py file. """
+"""Unit tests for the msvs.py file."""
 
 import unittest
 from io import StringIO
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py
index b7ac823d1490d..bc9ddd26545e9 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja.py
@@ -5,6 +5,7 @@
 
 import collections
 import copy
+import ctypes
 import hashlib
 import json
 import multiprocessing
@@ -263,8 +264,7 @@ def ExpandSpecial(self, path, product_dir=None):
         dir.
         """
 
-        PRODUCT_DIR = "$!PRODUCT_DIR"
-        if PRODUCT_DIR in path:
+        if (PRODUCT_DIR := "$!PRODUCT_DIR") in path:
             if product_dir:
                 path = path.replace(PRODUCT_DIR, product_dir)
             else:
@@ -272,8 +272,7 @@ def ExpandSpecial(self, path, product_dir=None):
                 path = path.replace(PRODUCT_DIR + "\\", "")
                 path = path.replace(PRODUCT_DIR, ".")
 
-        INTERMEDIATE_DIR = "$!INTERMEDIATE_DIR"
-        if INTERMEDIATE_DIR in path:
+        if (INTERMEDIATE_DIR := "$!INTERMEDIATE_DIR") in path:
             int_dir = self.GypPathToUniqueOutput("gen")
             # GypPathToUniqueOutput generates a path relative to the product dir,
             # so insert product_dir in front if it is provided.
@@ -1304,7 +1303,7 @@ def WritePchTargets(self, ninja_file, pch_commands):
             ninja_file.build(gch, cmd, input, variables=[(var_name, lang_flag)])
 
     def WriteLink(self, spec, config_name, config, link_deps, compile_deps):
-        """Write out a link step. Fills out target.binary. """
+        """Write out a link step. Fills out target.binary."""
         if self.flavor != "mac" or len(self.archs) == 1:
             return self.WriteLinkForArch(
                 self.ninja, spec, config_name, config, link_deps, compile_deps
@@ -1348,7 +1347,7 @@ def WriteLink(self, spec, config_name, config, link_deps, compile_deps):
     def WriteLinkForArch(
         self, ninja_file, spec, config_name, config, link_deps, compile_deps, arch=None
     ):
-        """Write out a link step. Fills out target.binary. """
+        """Write out a link step. Fills out target.binary."""
         command = {
             "executable": "link",
             "loadable_module": "solink_module",
@@ -1756,11 +1755,9 @@ def GetPostbuildCommand(self, spec, output, output_binary, is_command_start):
             + " && ".join([ninja_syntax.escape(command) for command in postbuilds])
         )
         command_string = (
-            commands
-            + "); G=$$?; "
+            commands + "); G=$$?; "
             # Remove the final output if any postbuild failed.
-            "((exit $$G) || rm -rf %s) " % output
-            + "&& exit $$G)"
+            "((exit $$G) || rm -rf %s) " % output + "&& exit $$G)"
         )
         if is_command_start:
             return "(" + command_string + " && "
@@ -1949,7 +1946,8 @@ def WriteNewNinjaRule(
                 )
             else:
                 rspfile_content = gyp.msvs_emulation.EncodeRspFileList(
-                    args, win_shell_flags.quote)
+                    args, win_shell_flags.quote
+                )
             command = (
                 "%s gyp-win-tool action-wrapper $arch " % sys.executable
                 + rspfile
@@ -1995,7 +1993,7 @@ def CalculateVariables(default_variables, params):
 
         # Copy additional generator configuration data from Xcode, which is shared
         # by the Mac Ninja generator.
-        import gyp.generator.xcode as xcode_generator
+        import gyp.generator.xcode as xcode_generator  # noqa: PLC0415
 
         generator_additional_non_configuration_keys = getattr(
             xcode_generator, "generator_additional_non_configuration_keys", []
@@ -2018,7 +2016,7 @@ def CalculateVariables(default_variables, params):
 
         # Copy additional generator configuration data from VS, which is shared
         # by the Windows Ninja generator.
-        import gyp.generator.msvs as msvs_generator
+        import gyp.generator.msvs as msvs_generator  # noqa: PLC0415
 
         generator_additional_non_configuration_keys = getattr(
             msvs_generator, "generator_additional_non_configuration_keys", []
@@ -2075,20 +2073,17 @@ def OpenOutput(path, mode="w"):
 
 
 def CommandWithWrapper(cmd, wrappers, prog):
-    wrapper = wrappers.get(cmd, "")
-    if wrapper:
+    if wrapper := wrappers.get(cmd, ""):
         return wrapper + " " + prog
     return prog
 
 
 def GetDefaultConcurrentLinks():
     """Returns a best-guess for a number of concurrent links."""
-    pool_size = int(os.environ.get("GYP_LINK_CONCURRENCY") or 0)
-    if pool_size:
+    if pool_size := int(os.environ.get("GYP_LINK_CONCURRENCY") or 0):
         return pool_size
 
     if sys.platform in ("win32", "cygwin"):
-        import ctypes
 
         class MEMORYSTATUSEX(ctypes.Structure):
             _fields_ = [
@@ -2109,8 +2104,8 @@ class MEMORYSTATUSEX(ctypes.Structure):
 
         # VS 2015 uses 20% more working set than VS 2013 and can consume all RAM
         # on a 64 GiB machine.
-        mem_limit = max(1, stat.ullTotalPhys // (5 * (2 ** 30)))  # total / 5GiB
-        hard_cap = max(1, int(os.environ.get("GYP_LINK_CONCURRENCY_MAX") or 2 ** 32))
+        mem_limit = max(1, stat.ullTotalPhys // (5 * (2**30)))  # total / 5GiB
+        hard_cap = max(1, int(os.environ.get("GYP_LINK_CONCURRENCY_MAX") or 2**32))
         return min(mem_limit, hard_cap)
     elif sys.platform.startswith("linux"):
         if os.path.exists("/proc/meminfo"):
@@ -2121,14 +2116,14 @@ class MEMORYSTATUSEX(ctypes.Structure):
                     if not match:
                         continue
                     # Allow 8Gb per link on Linux because Gold is quite memory hungry
-                    return max(1, int(match.group(1)) // (8 * (2 ** 20)))
+                    return max(1, int(match.group(1)) // (8 * (2**20)))
         return 1
     elif sys.platform == "darwin":
         try:
             avail_bytes = int(subprocess.check_output(["sysctl", "-n", "hw.memsize"]))
             # A static library debug build of Chromium's unit_tests takes ~2.7GB, so
             # 4GB per ld process allows for some more bloat.
-            return max(1, avail_bytes // (4 * (2 ** 30)))  # total / 4GB
+            return max(1, avail_bytes // (4 * (2**30)))  # total / 4GB
         except subprocess.CalledProcessError:
             return 1
     else:
@@ -2305,8 +2300,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name
             key_prefix = re.sub(r"\.HOST$", ".host", key_prefix)
             wrappers[key_prefix] = os.path.join(build_to_root, value)
 
-    mac_toolchain_dir = generator_flags.get("mac_toolchain_dir", None)
-    if mac_toolchain_dir:
+    if mac_toolchain_dir := generator_flags.get("mac_toolchain_dir", None):
         wrappers["LINK"] = "export DEVELOPER_DIR='%s' &&" % mac_toolchain_dir
 
     if flavor == "win":
@@ -2417,8 +2411,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name
             "cc_s",
             description="CC $out",
             command=(
-                "$cc $defines $includes $cflags $cflags_c "
-                "$cflags_pch_c -c $in -o $out"
+                "$cc $defines $includes $cflags $cflags_c $cflags_pch_c -c $in -o $out"
             ),
         )
         master_ninja.rule(
@@ -2529,8 +2522,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name
             "solink",
             description="SOLINK $lib",
             restat=True,
-            command=mtime_preserving_solink_base
-            % {"suffix": "@$link_file_list"},
+            command=mtime_preserving_solink_base % {"suffix": "@$link_file_list"},
             rspfile="$link_file_list",
             rspfile_content=(
                 "-Wl,--whole-archive $in $solibs -Wl,--no-whole-archive $libs"
@@ -2715,7 +2707,7 @@ def GenerateOutputForConfig(target_list, target_dicts, data, params, config_name
             command="$env %(python)s gyp-mac-tool compile-ios-framework-header-map "
             "$out $framework $in && $env %(python)s gyp-mac-tool "
             "copy-ios-framework-headers $framework $copy_headers"
-            % {'python': sys.executable},
+            % {"python": sys.executable},
         )
         master_ninja.rule(
             "mac_tool",
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py
index 581b14595e143..616bc7aaf015a 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/ninja_test.py
@@ -4,7 +4,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-""" Unit tests for the ninja.py file. """
+"""Unit tests for the ninja.py file."""
 
 import sys
 import unittest
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py
index cdf11c3b27b1d..8e05657961fe9 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py
@@ -564,12 +564,12 @@ def AddHeaderToTarget(header, pbxp, xct, is_public):
 def ExpandXcodeVariables(string, expansions):
     """Expands Xcode-style $(VARIABLES) in string per the expansions dict.
 
-  In some rare cases, it is appropriate to expand Xcode variables when a
-  project file is generated.  For any substring $(VAR) in string, if VAR is a
-  key in the expansions dict, $(VAR) will be replaced with expansions[VAR].
-  Any $(VAR) substring in string for which VAR is not a key in the expansions
-  dict will remain in the returned string.
-  """
+    In some rare cases, it is appropriate to expand Xcode variables when a
+    project file is generated.  For any substring $(VAR) in string, if VAR is a
+    key in the expansions dict, $(VAR) will be replaced with expansions[VAR].
+    Any $(VAR) substring in string for which VAR is not a key in the expansions
+    dict will remain in the returned string.
+    """
 
     matches = _xcode_variable_re.findall(string)
     if matches is None:
@@ -592,9 +592,9 @@ def ExpandXcodeVariables(string, expansions):
 
 def EscapeXcodeDefine(s):
     """We must escape the defines that we give to XCode so that it knows not to
-     split on spaces and to respect backslash and quote literals. However, we
-     must not quote the define, or Xcode will incorrectly interpret variables
-     especially $(inherited)."""
+    split on spaces and to respect backslash and quote literals. However, we
+    must not quote the define, or Xcode will incorrectly interpret variables
+    especially $(inherited)."""
     return re.sub(_xcode_define_re, r"\\\1", s)
 
 
@@ -679,9 +679,9 @@ def GenerateOutput(target_list, target_dicts, data, params):
             project_attributes["BuildIndependentTargetsInParallel"] = "YES"
         if upgrade_check_project_version:
             project_attributes["LastUpgradeCheck"] = upgrade_check_project_version
-            project_attributes[
-                "LastTestingUpgradeCheck"
-            ] = upgrade_check_project_version
+            project_attributes["LastTestingUpgradeCheck"] = (
+                upgrade_check_project_version
+            )
             project_attributes["LastSwiftUpdateCheck"] = upgrade_check_project_version
         pbxp.SetProperty("attributes", project_attributes)
 
@@ -734,8 +734,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
             "loadable_module+xcuitest": "com.apple.product-type.bundle.ui-testing",
             "shared_library+bundle": "com.apple.product-type.framework",
             "executable+extension+bundle": "com.apple.product-type.app-extension",
-            "executable+watch+extension+bundle":
-                "com.apple.product-type.watchkit-extension",
+            "executable+watch+extension+bundle": "com.apple.product-type.watchkit-extension",  # noqa: E501
             "executable+watch+bundle": "com.apple.product-type.application.watchapp",
             "mac_kernel_extension+bundle": "com.apple.product-type.kernel-extension",
         }
@@ -780,8 +779,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
                 type_bundle_key += "+watch+extension+bundle"
             elif is_watch_app:
                 assert is_bundle, (
-                    "ios_watch_app flag requires mac_bundle "
-                    "(target %s)" % target_name
+                    "ios_watch_app flag requires mac_bundle (target %s)" % target_name
                 )
                 type_bundle_key += "+watch+bundle"
             elif is_bundle:
@@ -1103,7 +1101,7 @@ def GenerateOutput(target_list, target_dicts, data, params):
                         eol = " \\"
                     makefile.write(f"    {concrete_output}{eol}\n")
 
-                for (rule_source, concrete_outputs, message, action) in zip(
+                for rule_source, concrete_outputs, message, action in zip(
                     rule["rule_sources"],
                     concrete_outputs_by_rule_source,
                     messages,
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py b/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py
index b0b51a08a6db4..bfd8c587a3175 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode_test.py
@@ -4,7 +4,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 
-""" Unit tests for the xcode.py file. """
+"""Unit tests for the xcode.py file."""
 
 import sys
 import unittest
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/input.py b/node_modules/node-gyp/gyp/pylib/gyp/input.py
index 994bf6625fb81..4965ff1571c73 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/input.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/input.py
@@ -139,21 +139,21 @@ def IsPathSection(section):
 def GetIncludedBuildFiles(build_file_path, aux_data, included=None):
     """Return a list of all build files included into build_file_path.
 
-  The returned list will contain build_file_path as well as all other files
-  that it included, either directly or indirectly.  Note that the list may
-  contain files that were included into a conditional section that evaluated
-  to false and was not merged into build_file_path's dict.
+    The returned list will contain build_file_path as well as all other files
+    that it included, either directly or indirectly.  Note that the list may
+    contain files that were included into a conditional section that evaluated
+    to false and was not merged into build_file_path's dict.
 
-  aux_data is a dict containing a key for each build file or included build
-  file.  Those keys provide access to dicts whose "included" keys contain
-  lists of all other files included by the build file.
+    aux_data is a dict containing a key for each build file or included build
+    file.  Those keys provide access to dicts whose "included" keys contain
+    lists of all other files included by the build file.
 
-  included should be left at its default None value by external callers.  It
-  is used for recursion.
+    included should be left at its default None value by external callers.  It
+    is used for recursion.
 
-  The returned list will not contain any duplicate entries.  Each build file
-  in the list will be relative to the current directory.
-  """
+    The returned list will not contain any duplicate entries.  Each build file
+    in the list will be relative to the current directory.
+    """
 
     if included is None:
         included = []
@@ -171,10 +171,10 @@ def GetIncludedBuildFiles(build_file_path, aux_data, included=None):
 
 def CheckedEval(file_contents):
     """Return the eval of a gyp file.
-  The gyp file is restricted to dictionaries and lists only, and
-  repeated keys are not allowed.
-  Note that this is slower than eval() is.
-  """
+    The gyp file is restricted to dictionaries and lists only, and
+    repeated keys are not allowed.
+    Note that this is slower than eval() is.
+    """
 
     syntax_tree = ast.parse(file_contents)
     assert isinstance(syntax_tree, ast.Module)
@@ -508,9 +508,9 @@ def CallLoadTargetBuildFile(
 ):
     """Wrapper around LoadTargetBuildFile for parallel processing.
 
-     This wrapper is used when LoadTargetBuildFile is executed in
-     a worker process.
-  """
+    This wrapper is used when LoadTargetBuildFile is executed in
+    a worker process.
+    """
 
     try:
         signal.signal(signal.SIGINT, signal.SIG_IGN)
@@ -559,10 +559,10 @@ class ParallelProcessingError(Exception):
 class ParallelState:
     """Class to keep track of state when processing input files in parallel.
 
-  If build files are loaded in parallel, use this to keep track of
-  state during farming out and processing parallel jobs. It's stored
-  in a global so that the callback function can have access to it.
-  """
+    If build files are loaded in parallel, use this to keep track of
+    state during farming out and processing parallel jobs. It's stored
+    in a global so that the callback function can have access to it.
+    """
 
     def __init__(self):
         # The multiprocessing pool.
@@ -584,8 +584,7 @@ def __init__(self):
         self.error = False
 
     def LoadTargetBuildFileCallback(self, result):
-        """Handle the results of running LoadTargetBuildFile in another process.
-    """
+        """Handle the results of running LoadTargetBuildFile in another process."""
         self.condition.acquire()
         if not result:
             self.error = True
@@ -692,8 +691,8 @@ def FindEnclosingBracketGroup(input_str):
 def IsStrCanonicalInt(string):
     """Returns True if |string| is in its canonical integer form.
 
-  The canonical form is such that str(int(string)) == string.
-  """
+    The canonical form is such that str(int(string)) == string.
+    """
     if isinstance(string, str):
         # This function is called a lot so for maximum performance, avoid
         # involving regexps which would otherwise make the code much
@@ -870,8 +869,9 @@ def ExpandVariables(input, phase, variables, build_file):
         # This works around actions/rules which have more inputs than will
         # fit on the command line.
         if file_list:
-            contents_list = (contents if isinstance(contents, list)
-                             else contents.split(" "))
+            contents_list = (
+                contents if isinstance(contents, list) else contents.split(" ")
+            )
             replacement = contents_list[0]
             if os.path.isabs(replacement):
                 raise GypError('| cannot handle absolute paths, got "%s"' % replacement)
@@ -934,7 +934,6 @@ def ExpandVariables(input, phase, variables, build_file):
                         os.chdir(build_file_dir)
                     sys.path.append(os.getcwd())
                     try:
-
                         parsed_contents = shlex.split(contents)
                         try:
                             py_module = __import__(parsed_contents[0])
@@ -965,7 +964,7 @@ def ExpandVariables(input, phase, variables, build_file):
                             stdout=subprocess.PIPE,
                             shell=use_shell,
                             cwd=build_file_dir,
-                            check=False
+                            check=False,
                         )
                     except Exception as e:
                         raise GypError(
@@ -1003,9 +1002,7 @@ def ExpandVariables(input, phase, variables, build_file):
                 # ],
                 replacement = []
             else:
-                raise GypError(
-                    "Undefined variable " + contents + " in " + build_file
-                )
+                raise GypError("Undefined variable " + contents + " in " + build_file)
         else:
             replacement = variables[contents]
 
@@ -1114,7 +1111,7 @@ def ExpandVariables(input, phase, variables, build_file):
 
 def EvalCondition(condition, conditions_key, phase, variables, build_file):
     """Returns the dict that should be used or None if the result was
-  that nothing should be used."""
+    that nothing should be used."""
     if not isinstance(condition, list):
         raise GypError(conditions_key + " must be a list")
     if len(condition) < 2:
@@ -1159,7 +1156,7 @@ def EvalCondition(condition, conditions_key, phase, variables, build_file):
 
 def EvalSingleCondition(cond_expr, true_dict, false_dict, phase, variables, build_file):
     """Returns true_dict if cond_expr evaluates to true, and false_dict
-  otherwise."""
+    otherwise."""
     # Do expansions on the condition itself.  Since the condition can naturally
     # contain variable references without needing to resort to GYP expansion
     # syntax, this is of dubious value for variables, but someone might want to
@@ -1289,10 +1286,10 @@ def ProcessVariablesAndConditionsInDict(
 ):
     """Handle all variable and command expansion and conditional evaluation.
 
-  This function is the public entry point for all variable expansions and
-  conditional evaluations.  The variables_in dictionary will not be modified
-  by this function.
-  """
+    This function is the public entry point for all variable expansions and
+    conditional evaluations.  The variables_in dictionary will not be modified
+    by this function.
+    """
 
     # Make a copy of the variables_in dict that can be modified during the
     # loading of automatics and the loading of the variables dict.
@@ -1441,15 +1438,15 @@ def ProcessVariablesAndConditionsInList(the_list, phase, variables, build_file):
 def BuildTargetsDict(data):
     """Builds a dict mapping fully-qualified target names to their target dicts.
 
-  |data| is a dict mapping loaded build files by pathname relative to the
-  current directory.  Values in |data| are build file contents.  For each
-  |data| value with a "targets" key, the value of the "targets" key is taken
-  as a list containing target dicts.  Each target's fully-qualified name is
-  constructed from the pathname of the build file (|data| key) and its
-  "target_name" property.  These fully-qualified names are used as the keys
-  in the returned dict.  These keys provide access to the target dicts,
-  the dicts in the "targets" lists.
-  """
+    |data| is a dict mapping loaded build files by pathname relative to the
+    current directory.  Values in |data| are build file contents.  For each
+    |data| value with a "targets" key, the value of the "targets" key is taken
+    as a list containing target dicts.  Each target's fully-qualified name is
+    constructed from the pathname of the build file (|data| key) and its
+    "target_name" property.  These fully-qualified names are used as the keys
+    in the returned dict.  These keys provide access to the target dicts,
+    the dicts in the "targets" lists.
+    """
 
     targets = {}
     for build_file in data["target_build_files"]:
@@ -1467,13 +1464,13 @@ def BuildTargetsDict(data):
 def QualifyDependencies(targets):
     """Make dependency links fully-qualified relative to the current directory.
 
-  |targets| is a dict mapping fully-qualified target names to their target
-  dicts.  For each target in this dict, keys known to contain dependency
-  links are examined, and any dependencies referenced will be rewritten
-  so that they are fully-qualified and relative to the current directory.
-  All rewritten dependencies are suitable for use as keys to |targets| or a
-  similar dict.
-  """
+    |targets| is a dict mapping fully-qualified target names to their target
+    dicts.  For each target in this dict, keys known to contain dependency
+    links are examined, and any dependencies referenced will be rewritten
+    so that they are fully-qualified and relative to the current directory.
+    All rewritten dependencies are suitable for use as keys to |targets| or a
+    similar dict.
+    """
 
     all_dependency_sections = [
         dep + op for dep in dependency_sections for op in ("", "!", "/")
@@ -1516,18 +1513,18 @@ def QualifyDependencies(targets):
 def ExpandWildcardDependencies(targets, data):
     """Expands dependencies specified as build_file:*.
 
-  For each target in |targets|, examines sections containing links to other
-  targets.  If any such section contains a link of the form build_file:*, it
-  is taken as a wildcard link, and is expanded to list each target in
-  build_file.  The |data| dict provides access to build file dicts.
+    For each target in |targets|, examines sections containing links to other
+    targets.  If any such section contains a link of the form build_file:*, it
+    is taken as a wildcard link, and is expanded to list each target in
+    build_file.  The |data| dict provides access to build file dicts.
 
-  Any target that does not wish to be included by wildcard can provide an
-  optional "suppress_wildcard" key in its target dict.  When present and
-  true, a wildcard dependency link will not include such targets.
+    Any target that does not wish to be included by wildcard can provide an
+    optional "suppress_wildcard" key in its target dict.  When present and
+    true, a wildcard dependency link will not include such targets.
 
-  All dependency names, including the keys to |targets| and the values in each
-  dependency list, must be qualified when this function is called.
-  """
+    All dependency names, including the keys to |targets| and the values in each
+    dependency list, must be qualified when this function is called.
+    """
 
     for target, target_dict in targets.items():
         target_build_file = gyp.common.BuildFile(target)
@@ -1573,14 +1570,10 @@ def ExpandWildcardDependencies(targets, data):
                     if int(dependency_target_dict.get("suppress_wildcard", False)):
                         continue
                     dependency_target_name = dependency_target_dict["target_name"]
-                    if (
-                        dependency_target not in {"*", dependency_target_name}
-                    ):
+                    if dependency_target not in {"*", dependency_target_name}:
                         continue
                     dependency_target_toolset = dependency_target_dict["toolset"]
-                    if (
-                        dependency_toolset not in {"*", dependency_target_toolset}
-                    ):
+                    if dependency_toolset not in {"*", dependency_target_toolset}:
                         continue
                     dependency = gyp.common.QualifiedTarget(
                         dependency_build_file,
@@ -1601,7 +1594,7 @@ def Unify(items):
 
 def RemoveDuplicateDependencies(targets):
     """Makes sure every dependency appears only once in all targets's dependency
-  lists."""
+    lists."""
     for target_name, target_dict in targets.items():
         for dependency_key in dependency_sections:
             dependencies = target_dict.get(dependency_key, [])
@@ -1617,25 +1610,21 @@ def Filter(items, item):
 
 def RemoveSelfDependencies(targets):
     """Remove self dependencies from targets that have the prune_self_dependency
-  variable set."""
+    variable set."""
     for target_name, target_dict in targets.items():
         for dependency_key in dependency_sections:
             dependencies = target_dict.get(dependency_key, [])
             if dependencies:
                 for t in dependencies:
                     if t == target_name and (
-                        targets[t]
-                        .get("variables", {})
-                        .get("prune_self_dependency", 0)
+                        targets[t].get("variables", {}).get("prune_self_dependency", 0)
                     ):
-                        target_dict[dependency_key] = Filter(
-                            dependencies, target_name
-                        )
+                        target_dict[dependency_key] = Filter(dependencies, target_name)
 
 
 def RemoveLinkDependenciesFromNoneTargets(targets):
     """Remove dependencies having the 'link_dependency' attribute from the 'none'
-  targets."""
+    targets."""
     for target_name, target_dict in targets.items():
         for dependency_key in dependency_sections:
             dependencies = target_dict.get(dependency_key, [])
@@ -1651,11 +1640,11 @@ def RemoveLinkDependenciesFromNoneTargets(targets):
 class DependencyGraphNode:
     """
 
-  Attributes:
-    ref: A reference to an object that this DependencyGraphNode represents.
-    dependencies: List of DependencyGraphNodes on which this one depends.
-    dependents: List of DependencyGraphNodes that depend on this one.
-  """
+    Attributes:
+      ref: A reference to an object that this DependencyGraphNode represents.
+      dependencies: List of DependencyGraphNodes on which this one depends.
+      dependents: List of DependencyGraphNodes that depend on this one.
+    """
 
     class CircularException(GypError):
         pass
@@ -1721,8 +1710,8 @@ def ExtractNodeRef(node):
 
     def FindCycles(self):
         """
-    Returns a list of cycles in the graph, where each cycle is its own list.
-    """
+        Returns a list of cycles in the graph, where each cycle is its own list.
+        """
         results = []
         visited = set()
 
@@ -1753,21 +1742,21 @@ def DirectDependencies(self, dependencies=None):
 
     def _AddImportedDependencies(self, targets, dependencies=None):
         """Given a list of direct dependencies, adds indirect dependencies that
-    other dependencies have declared to export their settings.
-
-    This method does not operate on self.  Rather, it operates on the list
-    of dependencies in the |dependencies| argument.  For each dependency in
-    that list, if any declares that it exports the settings of one of its
-    own dependencies, those dependencies whose settings are "passed through"
-    are added to the list.  As new items are added to the list, they too will
-    be processed, so it is possible to import settings through multiple levels
-    of dependencies.
-
-    This method is not terribly useful on its own, it depends on being
-    "primed" with a list of direct dependencies such as one provided by
-    DirectDependencies.  DirectAndImportedDependencies is intended to be the
-    public entry point.
-    """
+        other dependencies have declared to export their settings.
+
+        This method does not operate on self.  Rather, it operates on the list
+        of dependencies in the |dependencies| argument.  For each dependency in
+        that list, if any declares that it exports the settings of one of its
+        own dependencies, those dependencies whose settings are "passed through"
+        are added to the list.  As new items are added to the list, they too will
+        be processed, so it is possible to import settings through multiple levels
+        of dependencies.
+
+        This method is not terribly useful on its own, it depends on being
+        "primed" with a list of direct dependencies such as one provided by
+        DirectDependencies.  DirectAndImportedDependencies is intended to be the
+        public entry point.
+        """
 
         if dependencies is None:
             dependencies = []
@@ -1795,9 +1784,9 @@ def _AddImportedDependencies(self, targets, dependencies=None):
 
     def DirectAndImportedDependencies(self, targets, dependencies=None):
         """Returns a list of a target's direct dependencies and all indirect
-    dependencies that a dependency has advertised settings should be exported
-    through the dependency for.
-    """
+        dependencies that a dependency has advertised settings should be exported
+        through the dependency for.
+        """
 
         dependencies = self.DirectDependencies(dependencies)
         return self._AddImportedDependencies(targets, dependencies)
@@ -1823,19 +1812,19 @@ def _LinkDependenciesInternal(
         self, targets, include_shared_libraries, dependencies=None, initial=True
     ):
         """Returns an OrderedSet of dependency targets that are linked
-    into this target.
+        into this target.
 
-    This function has a split personality, depending on the setting of
-    |initial|.  Outside callers should always leave |initial| at its default
-    setting.
+        This function has a split personality, depending on the setting of
+        |initial|.  Outside callers should always leave |initial| at its default
+        setting.
 
-    When adding a target to the list of dependencies, this function will
-    recurse into itself with |initial| set to False, to collect dependencies
-    that are linked into the linkable target for which the list is being built.
+        When adding a target to the list of dependencies, this function will
+        recurse into itself with |initial| set to False, to collect dependencies
+        that are linked into the linkable target for which the list is being built.
 
-    If |include_shared_libraries| is False, the resulting dependencies will not
-    include shared_library targets that are linked into this target.
-    """
+        If |include_shared_libraries| is False, the resulting dependencies will not
+        include shared_library targets that are linked into this target.
+        """
         if dependencies is None:
             # Using a list to get ordered output and a set to do fast "is it
             # already added" checks.
@@ -1917,9 +1906,9 @@ def _LinkDependenciesInternal(
 
     def DependenciesForLinkSettings(self, targets):
         """
-    Returns a list of dependency targets whose link_settings should be merged
-    into this target.
-    """
+        Returns a list of dependency targets whose link_settings should be merged
+        into this target.
+        """
 
         # TODO(sbaig) Currently, chrome depends on the bug that shared libraries'
         # link_settings are propagated.  So for now, we will allow it, unless the
@@ -1932,8 +1921,8 @@ def DependenciesForLinkSettings(self, targets):
 
     def DependenciesToLinkAgainst(self, targets):
         """
-    Returns a list of dependency targets that are linked into this target.
-    """
+        Returns a list of dependency targets that are linked into this target.
+        """
         return self._LinkDependenciesInternal(targets, True)
 
 
@@ -2446,7 +2435,7 @@ def SetUpConfigurations(target, target_dict):
 
     merged_configurations = {}
     configs = target_dict["configurations"]
-    for (configuration, old_configuration_dict) in configs.items():
+    for configuration, old_configuration_dict in configs.items():
         # Skip abstract configurations (saves work only).
         if old_configuration_dict.get("abstract"):
             continue
@@ -2454,7 +2443,7 @@ def SetUpConfigurations(target, target_dict):
         # Get the inheritance relationship right by making a copy of the target
         # dict.
         new_configuration_dict = {}
-        for (key, target_val) in target_dict.items():
+        for key, target_val in target_dict.items():
             key_ext = key[-1:]
             key_base = key[:-1] if key_ext in key_suffixes else key
             if key_base not in non_configuration_keys:
@@ -2502,25 +2491,25 @@ def SetUpConfigurations(target, target_dict):
 def ProcessListFiltersInDict(name, the_dict):
     """Process regular expression and exclusion-based filters on lists.
 
-  An exclusion list is in a dict key named with a trailing "!", like
-  "sources!".  Every item in such a list is removed from the associated
-  main list, which in this example, would be "sources".  Removed items are
-  placed into a "sources_excluded" list in the dict.
-
-  Regular expression (regex) filters are contained in dict keys named with a
-  trailing "/", such as "sources/" to operate on the "sources" list.  Regex
-  filters in a dict take the form:
-    'sources/': [ ['exclude', '_(linux|mac|win)\\.cc$'],
-                  ['include', '_mac\\.cc$'] ],
-  The first filter says to exclude all files ending in _linux.cc, _mac.cc, and
-  _win.cc.  The second filter then includes all files ending in _mac.cc that
-  are now or were once in the "sources" list.  Items matching an "exclude"
-  filter are subject to the same processing as would occur if they were listed
-  by name in an exclusion list (ending in "!").  Items matching an "include"
-  filter are brought back into the main list if previously excluded by an
-  exclusion list or exclusion regex filter.  Subsequent matching "exclude"
-  patterns can still cause items to be excluded after matching an "include".
-  """
+    An exclusion list is in a dict key named with a trailing "!", like
+    "sources!".  Every item in such a list is removed from the associated
+    main list, which in this example, would be "sources".  Removed items are
+    placed into a "sources_excluded" list in the dict.
+
+    Regular expression (regex) filters are contained in dict keys named with a
+    trailing "/", such as "sources/" to operate on the "sources" list.  Regex
+    filters in a dict take the form:
+      'sources/': [ ['exclude', '_(linux|mac|win)\\.cc$'],
+                    ['include', '_mac\\.cc$'] ],
+    The first filter says to exclude all files ending in _linux.cc, _mac.cc, and
+    _win.cc.  The second filter then includes all files ending in _mac.cc that
+    are now or were once in the "sources" list.  Items matching an "exclude"
+    filter are subject to the same processing as would occur if they were listed
+    by name in an exclusion list (ending in "!").  Items matching an "include"
+    filter are brought back into the main list if previously excluded by an
+    exclusion list or exclusion regex filter.  Subsequent matching "exclude"
+    patterns can still cause items to be excluded after matching an "include".
+    """
 
     # Look through the dictionary for any lists whose keys end in "!" or "/".
     # These are lists that will be treated as exclude lists and regular
@@ -2682,12 +2671,12 @@ def ProcessListFiltersInList(name, the_list):
 def ValidateTargetType(target, target_dict):
     """Ensures the 'type' field on the target is one of the known types.
 
-  Arguments:
-    target: string, name of target.
-    target_dict: dict, target spec.
+    Arguments:
+      target: string, name of target.
+      target_dict: dict, target spec.
 
-  Raises an exception on error.
-  """
+    Raises an exception on error.
+    """
     VALID_TARGET_TYPES = (
         "executable",
         "loadable_module",
@@ -2715,14 +2704,14 @@ def ValidateTargetType(target, target_dict):
 
 def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
     """Ensures that the rules sections in target_dict are valid and consistent,
-  and determines which sources they apply to.
+    and determines which sources they apply to.
 
-  Arguments:
-    target: string, name of target.
-    target_dict: dict, target spec containing "rules" and "sources" lists.
-    extra_sources_for_rules: a list of keys to scan for rule matches in
-        addition to 'sources'.
-  """
+    Arguments:
+      target: string, name of target.
+      target_dict: dict, target spec containing "rules" and "sources" lists.
+      extra_sources_for_rules: a list of keys to scan for rule matches in
+          addition to 'sources'.
+    """
 
     # Dicts to map between values found in rules' 'rule_name' and 'extension'
     # keys and the rule dicts themselves.
@@ -2734,9 +2723,7 @@ def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
         # Make sure that there's no conflict among rule names and extensions.
         rule_name = rule["rule_name"]
         if rule_name in rule_names:
-            raise GypError(
-                f"rule {rule_name} exists in duplicate, target {target}"
-            )
+            raise GypError(f"rule {rule_name} exists in duplicate, target {target}")
         rule_names[rule_name] = rule
 
         rule_extension = rule["extension"]
@@ -2835,8 +2822,7 @@ def ValidateActionsInTarget(target, target_dict, build_file):
 
 
 def TurnIntIntoStrInDict(the_dict):
-    """Given dict the_dict, recursively converts all integers into strings.
-  """
+    """Given dict the_dict, recursively converts all integers into strings."""
     # Use items instead of iteritems because there's no need to try to look at
     # reinserted keys and their associated values.
     for k, v in the_dict.items():
@@ -2854,8 +2840,7 @@ def TurnIntIntoStrInDict(the_dict):
 
 
 def TurnIntIntoStrInList(the_list):
-    """Given list the_list, recursively converts all integers into strings.
-  """
+    """Given list the_list, recursively converts all integers into strings."""
     for index, item in enumerate(the_list):
         if isinstance(item, int):
             the_list[index] = str(item)
@@ -2902,9 +2887,9 @@ def PruneUnwantedTargets(targets, flat_list, dependency_nodes, root_targets, dat
 def VerifyNoCollidingTargets(targets):
     """Verify that no two targets in the same directory share the same name.
 
-  Arguments:
-    targets: A list of targets in the form 'path/to/file.gyp:target_name'.
-  """
+    Arguments:
+      targets: A list of targets in the form 'path/to/file.gyp:target_name'.
+    """
     # Keep a dict going from 'subdirectory:target_name' to 'foo.gyp'.
     used = {}
     for target in targets:
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py b/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py
index 70aab4f1787f4..3710178e110ae 100755
--- a/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py
@@ -8,7 +8,6 @@
 These functions are executed via gyp-mac-tool when using the Makefile generator.
 """
 
-
 import fcntl
 import fnmatch
 import glob
@@ -25,14 +24,13 @@
 
 def main(args):
     executor = MacTool()
-    exit_code = executor.Dispatch(args)
-    if exit_code is not None:
+    if (exit_code := executor.Dispatch(args)) is not None:
         sys.exit(exit_code)
 
 
 class MacTool:
     """This class performs all the Mac tooling steps. The methods can either be
-  executed directly, or dispatched from an argument list."""
+    executed directly, or dispatched from an argument list."""
 
     def Dispatch(self, args):
         """Dispatches a string command to a method."""
@@ -48,7 +46,7 @@ def _CommandifyName(self, name_string):
 
     def ExecCopyBundleResource(self, source, dest, convert_to_binary):
         """Copies a resource file to the bundle/Resources directory, performing any
-    necessary compilation on each resource."""
+        necessary compilation on each resource."""
         convert_to_binary = convert_to_binary == "True"
         extension = os.path.splitext(source)[1].lower()
         if os.path.isdir(source):
@@ -142,7 +140,7 @@ def _CopyStringsFile(self, source, dest):
         #     CFPropertyListCreateFromXMLData(): Old-style plist parser: missing
         #     semicolon in dictionary.
         # on invalid files. Do the same kind of validation.
-        import CoreFoundation
+        import CoreFoundation  # noqa: PLC0415
 
         with open(source, "rb") as in_file:
             s = in_file.read()
@@ -156,15 +154,15 @@ def _CopyStringsFile(self, source, dest):
 
     def _DetectInputEncoding(self, file_name):
         """Reads the first few bytes from file_name and tries to guess the text
-    encoding. Returns None as a guess if it can't detect it."""
+        encoding. Returns None as a guess if it can't detect it."""
         with open(file_name, "rb") as fp:
             try:
                 header = fp.read(3)
             except Exception:
                 return None
-        if header.startswith((b"\xFE\xFF", b"\xFF\xFE")):
+        if header.startswith((b"\xfe\xff", b"\xff\xfe")):
             return "UTF-16"
-        elif header.startswith(b"\xEF\xBB\xBF"):
+        elif header.startswith(b"\xef\xbb\xbf"):
             return "UTF-8"
         else:
             return None
@@ -255,7 +253,7 @@ def ExecFlock(self, lockfile, *cmd_list):
 
     def ExecFilterLibtool(self, *cmd_list):
         """Calls libtool and filters out '/path/to/libtool: file: foo.o has no
-    symbols'."""
+        symbols'."""
         libtool_re = re.compile(
             r"^.*libtool: (?:for architecture: \S* )?file: .* has no symbols$"
         )
@@ -304,7 +302,7 @@ def ExecPackageIosFramework(self, framework):
 
     def ExecPackageFramework(self, framework, version):
         """Takes a path to Something.framework and the Current version of that and
-    sets up all the symlinks."""
+        sets up all the symlinks."""
         # Find the name of the binary based on the part before the ".framework".
         binary = os.path.basename(framework).split(".")[0]
 
@@ -333,7 +331,7 @@ def ExecPackageFramework(self, framework, version):
 
     def _Relink(self, dest, link):
         """Creates a symlink to |dest| named |link|. If |link| already exists,
-    it is overwritten."""
+        it is overwritten."""
         if os.path.lexists(link):
             os.remove(link)
         os.symlink(dest, link)
@@ -358,14 +356,14 @@ def ExecCopyIosFrameworkHeaders(self, framework, *copy_headers):
     def ExecCompileXcassets(self, keys, *inputs):
         """Compiles multiple .xcassets files into a single .car file.
 
-    This invokes 'actool' to compile all the inputs .xcassets files. The
-    |keys| arguments is a json-encoded dictionary of extra arguments to
-    pass to 'actool' when the asset catalogs contains an application icon
-    or a launch image.
+        This invokes 'actool' to compile all the inputs .xcassets files. The
+        |keys| arguments is a json-encoded dictionary of extra arguments to
+        pass to 'actool' when the asset catalogs contains an application icon
+        or a launch image.
 
-    Note that 'actool' does not create the Assets.car file if the asset
-    catalogs does not contains imageset.
-    """
+        Note that 'actool' does not create the Assets.car file if the asset
+        catalogs does not contains imageset.
+        """
         command_line = [
             "xcrun",
             "actool",
@@ -438,13 +436,13 @@ def ExecMergeInfoPlist(self, output, *inputs):
     def ExecCodeSignBundle(self, key, entitlements, provisioning, path, preserve):
         """Code sign a bundle.
 
-    This function tries to code sign an iOS bundle, following the same
-    algorithm as Xcode:
-      1. pick the provisioning profile that best match the bundle identifier,
-         and copy it into the bundle as embedded.mobileprovision,
-      2. copy Entitlements.plist from user or SDK next to the bundle,
-      3. code sign the bundle.
-    """
+        This function tries to code sign an iOS bundle, following the same
+        algorithm as Xcode:
+          1. pick the provisioning profile that best match the bundle identifier,
+             and copy it into the bundle as embedded.mobileprovision,
+          2. copy Entitlements.plist from user or SDK next to the bundle,
+          3. code sign the bundle.
+        """
         substitutions, overrides = self._InstallProvisioningProfile(
             provisioning, self._GetCFBundleIdentifier()
         )
@@ -463,16 +461,16 @@ def ExecCodeSignBundle(self, key, entitlements, provisioning, path, preserve):
     def _InstallProvisioningProfile(self, profile, bundle_identifier):
         """Installs embedded.mobileprovision into the bundle.
 
-    Args:
-      profile: string, optional, short name of the .mobileprovision file
-        to use, if empty or the file is missing, the best file installed
-        will be used
-      bundle_identifier: string, value of CFBundleIdentifier from Info.plist
+        Args:
+          profile: string, optional, short name of the .mobileprovision file
+            to use, if empty or the file is missing, the best file installed
+            will be used
+          bundle_identifier: string, value of CFBundleIdentifier from Info.plist
 
-    Returns:
-      A tuple containing two dictionary: variables substitutions and values
-      to overrides when generating the entitlements file.
-    """
+        Returns:
+          A tuple containing two dictionary: variables substitutions and values
+          to overrides when generating the entitlements file.
+        """
         source_path, provisioning_data, team_id = self._FindProvisioningProfile(
             profile, bundle_identifier
         )
@@ -488,24 +486,24 @@ def _InstallProvisioningProfile(self, profile, bundle_identifier):
     def _FindProvisioningProfile(self, profile, bundle_identifier):
         """Finds the .mobileprovision file to use for signing the bundle.
 
-    Checks all the installed provisioning profiles (or if the user specified
-    the PROVISIONING_PROFILE variable, only consult it) and select the most
-    specific that correspond to the bundle identifier.
+        Checks all the installed provisioning profiles (or if the user specified
+        the PROVISIONING_PROFILE variable, only consult it) and select the most
+        specific that correspond to the bundle identifier.
 
-    Args:
-      profile: string, optional, short name of the .mobileprovision file
-        to use, if empty or the file is missing, the best file installed
-        will be used
-      bundle_identifier: string, value of CFBundleIdentifier from Info.plist
+        Args:
+          profile: string, optional, short name of the .mobileprovision file
+            to use, if empty or the file is missing, the best file installed
+            will be used
+          bundle_identifier: string, value of CFBundleIdentifier from Info.plist
 
-    Returns:
-      A tuple of the path to the selected provisioning profile, the data of
-      the embedded plist in the provisioning profile and the team identifier
-      to use for code signing.
+        Returns:
+          A tuple of the path to the selected provisioning profile, the data of
+          the embedded plist in the provisioning profile and the team identifier
+          to use for code signing.
 
-    Raises:
-      SystemExit: if no .mobileprovision can be used to sign the bundle.
-    """
+        Raises:
+          SystemExit: if no .mobileprovision can be used to sign the bundle.
+        """
         profiles_dir = os.path.join(
             os.environ["HOME"], "Library", "MobileDevice", "Provisioning Profiles"
         )
@@ -553,12 +551,12 @@ def _FindProvisioningProfile(self, profile, bundle_identifier):
     def _LoadProvisioningProfile(self, profile_path):
         """Extracts the plist embedded in a provisioning profile.
 
-    Args:
-      profile_path: string, path to the .mobileprovision file
+        Args:
+          profile_path: string, path to the .mobileprovision file
 
-    Returns:
-      Content of the plist embedded in the provisioning profile as a dictionary.
-    """
+        Returns:
+          Content of the plist embedded in the provisioning profile as a dictionary.
+        """
         with tempfile.NamedTemporaryFile() as temp:
             subprocess.check_call(
                 ["security", "cms", "-D", "-i", profile_path, "-o", temp.name]
@@ -581,16 +579,16 @@ def _MergePlist(self, merged_plist, plist):
     def _LoadPlistMaybeBinary(self, plist_path):
         """Loads into a memory a plist possibly encoded in binary format.
 
-    This is a wrapper around plistlib.readPlist that tries to convert the
-    plist to the XML format if it can't be parsed (assuming that it is in
-    the binary format).
+        This is a wrapper around plistlib.readPlist that tries to convert the
+        plist to the XML format if it can't be parsed (assuming that it is in
+        the binary format).
 
-    Args:
-      plist_path: string, path to a plist file, in XML or binary format
+        Args:
+          plist_path: string, path to a plist file, in XML or binary format
 
-    Returns:
-      Content of the plist as a dictionary.
-    """
+        Returns:
+          Content of the plist as a dictionary.
+        """
         try:
             # First, try to read the file using plistlib that only supports XML,
             # and if an exception is raised, convert a temporary copy to XML and
@@ -606,13 +604,13 @@ def _LoadPlistMaybeBinary(self, plist_path):
     def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
         """Constructs a dictionary of variable substitutions for Entitlements.plist.
 
-    Args:
-      bundle_identifier: string, value of CFBundleIdentifier from Info.plist
-      app_identifier_prefix: string, value for AppIdentifierPrefix
+        Args:
+          bundle_identifier: string, value of CFBundleIdentifier from Info.plist
+          app_identifier_prefix: string, value for AppIdentifierPrefix
 
-    Returns:
-      Dictionary of substitutions to apply when generating Entitlements.plist.
-    """
+        Returns:
+          Dictionary of substitutions to apply when generating Entitlements.plist.
+        """
         return {
             "CFBundleIdentifier": bundle_identifier,
             "AppIdentifierPrefix": app_identifier_prefix,
@@ -621,9 +619,9 @@ def _GetSubstitutions(self, bundle_identifier, app_identifier_prefix):
     def _GetCFBundleIdentifier(self):
         """Extracts CFBundleIdentifier value from Info.plist in the bundle.
 
-    Returns:
-      Value of CFBundleIdentifier in the Info.plist located in the bundle.
-    """
+        Returns:
+          Value of CFBundleIdentifier in the Info.plist located in the bundle.
+        """
         info_plist_path = os.path.join(
             os.environ["TARGET_BUILD_DIR"], os.environ["INFOPLIST_PATH"]
         )
@@ -633,19 +631,19 @@ def _GetCFBundleIdentifier(self):
     def _InstallEntitlements(self, entitlements, substitutions, overrides):
         """Generates and install the ${BundleName}.xcent entitlements file.
 
-    Expands variables "$(variable)" pattern in the source entitlements file,
-    add extra entitlements defined in the .mobileprovision file and the copy
-    the generated plist to "${BundlePath}.xcent".
+        Expands variables "$(variable)" pattern in the source entitlements file,
+        add extra entitlements defined in the .mobileprovision file and the copy
+        the generated plist to "${BundlePath}.xcent".
 
-    Args:
-      entitlements: string, optional, path to the Entitlements.plist template
-        to use, defaults to "${SDKROOT}/Entitlements.plist"
-      substitutions: dictionary, variable substitutions
-      overrides: dictionary, values to add to the entitlements
+        Args:
+          entitlements: string, optional, path to the Entitlements.plist template
+            to use, defaults to "${SDKROOT}/Entitlements.plist"
+          substitutions: dictionary, variable substitutions
+          overrides: dictionary, values to add to the entitlements
 
-    Returns:
-      Path to the generated entitlements file.
-    """
+        Returns:
+          Path to the generated entitlements file.
+        """
         source_path = entitlements
         target_path = os.path.join(
             os.environ["BUILT_PRODUCTS_DIR"], os.environ["PRODUCT_NAME"] + ".xcent"
@@ -665,15 +663,15 @@ def _InstallEntitlements(self, entitlements, substitutions, overrides):
     def _ExpandVariables(self, data, substitutions):
         """Expands variables "$(variable)" in data.
 
-    Args:
-      data: object, can be either string, list or dictionary
-      substitutions: dictionary, variable substitutions to perform
+        Args:
+          data: object, can be either string, list or dictionary
+          substitutions: dictionary, variable substitutions to perform
 
-    Returns:
-      Copy of data where each references to "$(variable)" has been replaced
-      by the corresponding value found in substitutions, or left intact if
-      the key was not found.
-    """
+        Returns:
+          Copy of data where each references to "$(variable)" has been replaced
+          by the corresponding value found in substitutions, or left intact if
+          the key was not found.
+        """
         if isinstance(data, str):
             for key, value in substitutions.items():
                 data = data.replace("$(%s)" % key, value)
@@ -692,15 +690,15 @@ def NextGreaterPowerOf2(x):
 def WriteHmap(output_name, filelist):
     """Generates a header map based on |filelist|.
 
-  Per Mark Mentovai:
-    A header map is structured essentially as a hash table, keyed by names used
-    in #includes, and providing pathnames to the actual files.
+    Per Mark Mentovai:
+      A header map is structured essentially as a hash table, keyed by names used
+      in #includes, and providing pathnames to the actual files.
 
-  The implementation below and the comment above comes from inspecting:
-    http://www.opensource.apple.com/source/distcc/distcc-2503/distcc_dist/include_server/headermap.py?txt
-  while also looking at the implementation in clang in:
-    https://llvm.org/svn/llvm-project/cfe/trunk/lib/Lex/HeaderMap.cpp
-  """
+    The implementation below and the comment above comes from inspecting:
+      http://www.opensource.apple.com/source/distcc/distcc-2503/distcc_dist/include_server/headermap.py?txt
+    while also looking at the implementation in clang in:
+      https://llvm.org/svn/llvm-project/cfe/trunk/lib/Lex/HeaderMap.cpp
+    """
     magic = 1751998832
     version = 1
     _reserved = 0
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py b/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py
index ace0cae5ebff2..7c461a8fdf72d 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py
@@ -74,8 +74,7 @@ def EncodeRspFileList(args, quote_cmd):
         program = call + " " + os.path.normpath(program)
     else:
         program = os.path.normpath(args[0])
-    return (program + " "
-            + " ".join(QuoteForRspFile(arg, quote_cmd) for arg in args[1:]))
+    return program + " " + " ".join(QuoteForRspFile(arg, quote_cmd) for arg in args[1:])
 
 
 def _GenericRetrieve(root, default, path):
@@ -247,9 +246,7 @@ def GetExtension(self):
         the target type.
         """
         ext = self.spec.get("product_extension", None)
-        if ext:
-            return ext
-        return gyp.MSVSUtil.TARGET_TYPE_EXT.get(self.spec["type"], "")
+        return ext or gyp.MSVSUtil.TARGET_TYPE_EXT.get(self.spec["type"], "")
 
     def GetVSMacroEnv(self, base_to_build=None, config=None):
         """Get a dict of variables mapping internal VS macro names to their gyp
@@ -625,8 +622,7 @@ def GetDefFile(self, gyp_to_build_path):
     def _GetDefFileAsLdflags(self, ldflags, gyp_to_build_path):
         """.def files get implicitly converted to a ModuleDefinitionFile for the
         linker in the VS generator. Emulate that behaviour here."""
-        def_file = self.GetDefFile(gyp_to_build_path)
-        if def_file:
+        if def_file := self.GetDefFile(gyp_to_build_path):
             ldflags.append('/DEF:"%s"' % def_file)
 
     def GetPGDName(self, config, expand_special):
@@ -674,14 +670,11 @@ def GetLdflags(
         )
         ld("DelayLoadDLLs", prefix="/DELAYLOAD:")
         ld("TreatLinkerWarningAsErrors", prefix="/WX", map={"true": "", "false": ":NO"})
-        out = self.GetOutputName(config, expand_special)
-        if out:
+        if out := self.GetOutputName(config, expand_special):
             ldflags.append("/OUT:" + out)
-        pdb = self.GetPDBName(config, expand_special, output_name + ".pdb")
-        if pdb:
+        if pdb := self.GetPDBName(config, expand_special, output_name + ".pdb"):
             ldflags.append("/PDB:" + pdb)
-        pgd = self.GetPGDName(config, expand_special)
-        if pgd:
+        if pgd := self.GetPGDName(config, expand_special):
             ldflags.append("/PGD:" + pgd)
         map_file = self.GetMapFileName(config, expand_special)
         ld("GenerateMapFile", map={"true": "/MAP:" + map_file if map_file else "/MAP"})
@@ -940,14 +933,17 @@ def GetRuleShellFlags(self, rule):
         includes whether it should run under cygwin (msvs_cygwin_shell), and
         whether the commands should be quoted (msvs_quote_cmd)."""
         # If the variable is unset, or set to 1 we use cygwin
-        cygwin = int(rule.get("msvs_cygwin_shell",
-                              self.spec.get("msvs_cygwin_shell", 1))) != 0
+        cygwin = (
+            int(rule.get("msvs_cygwin_shell", self.spec.get("msvs_cygwin_shell", 1)))
+            != 0
+        )
         # Default to quoting. There's only a few special instances where the
         # target command uses non-standard command line parsing and handle quotes
         # and quote escaping differently.
         quote_cmd = int(rule.get("msvs_quote_cmd", 1))
-        assert quote_cmd != 0 or cygwin != 1, \
-               "msvs_quote_cmd=0 only applicable for msvs_cygwin_shell=0"
+        assert quote_cmd != 0 or cygwin != 1, (
+            "msvs_quote_cmd=0 only applicable for msvs_cygwin_shell=0"
+        )
         return MsvsSettings.RuleShellFlags(cygwin, quote_cmd)
 
     def _HasExplicitRuleForExtension(self, spec, extension):
@@ -1135,8 +1131,7 @@ def _ExtractImportantEnvironment(output_of_set):
     for required in ("SYSTEMROOT", "TEMP", "TMP"):
         if required not in env:
             raise Exception(
-                'Environment variable "%s" '
-                "required to be set to valid path" % required
+                'Environment variable "%s" required to be set to valid path' % required
             )
     return env
 
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py b/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py
index 729cec0636273..8b026642fc5ef 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/simple_copy.py
@@ -17,8 +17,8 @@ class Error(Exception):
 
 def deepcopy(x):
     """Deep copy operation on gyp objects such as strings, ints, dicts
-  and lists. More than twice as fast as copy.deepcopy but much less
-  generic."""
+    and lists. More than twice as fast as copy.deepcopy but much less
+    generic."""
 
     try:
         return _deepcopy_dispatch[type(x)](x)
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py b/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py
index 7e647f40a84c5..43665577bddda 100755
--- a/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py
@@ -9,7 +9,6 @@
 These functions are executed via gyp-win-tool when using the ninja generator.
 """
 
-
 import os
 import re
 import shutil
@@ -27,18 +26,17 @@
 
 def main(args):
     executor = WinTool()
-    exit_code = executor.Dispatch(args)
-    if exit_code is not None:
+    if (exit_code := executor.Dispatch(args)) is not None:
         sys.exit(exit_code)
 
 
 class WinTool:
     """This class performs all the Windows tooling steps. The methods can either
-  be executed directly, or dispatched from an argument list."""
+    be executed directly, or dispatched from an argument list."""
 
     def _UseSeparateMspdbsrv(self, env, args):
         """Allows to use a unique instance of mspdbsrv.exe per linker instead of a
-    shared one."""
+        shared one."""
         if len(args) < 1:
             raise Exception("Not enough arguments")
 
@@ -115,9 +113,9 @@ def _on_error(fn, path, excinfo):
 
     def ExecLinkWrapper(self, arch, use_separate_mspdbsrv, *args):
         """Filter diagnostic output from link that looks like:
-    '   Creating library ui.dll.lib and object ui.dll.exp'
-    This happens when there are exports from the dll or exe.
-    """
+        '   Creating library ui.dll.lib and object ui.dll.exp'
+        This happens when there are exports from the dll or exe.
+        """
         env = self._GetEnv(arch)
         if use_separate_mspdbsrv == "True":
             self._UseSeparateMspdbsrv(env, args)
@@ -159,10 +157,10 @@ def ExecLinkWithManifests(
         mt,
         rc,
         intermediate_manifest,
-        *manifests
+        *manifests,
     ):
         """A wrapper for handling creating a manifest resource and then executing
-    a link command."""
+        a link command."""
         # The 'normal' way to do manifests is to have link generate a manifest
         # based on gathering dependencies from the object files, then merge that
         # manifest with other manifests supplied as sources, convert the merged
@@ -246,8 +244,8 @@ def dump(filename):
 
     def ExecManifestWrapper(self, arch, *args):
         """Run manifest tool with environment set. Strip out undesirable warning
-    (some XML blocks are recognized by the OS loader, but not the manifest
-    tool)."""
+        (some XML blocks are recognized by the OS loader, but not the manifest
+        tool)."""
         env = self._GetEnv(arch)
         popen = subprocess.Popen(
             args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
@@ -260,8 +258,8 @@ def ExecManifestWrapper(self, arch, *args):
 
     def ExecManifestToRc(self, arch, *args):
         """Creates a resource file pointing a SxS assembly manifest.
-    |args| is tuple containing path to resource file, path to manifest file
-    and resource name which can be "1" (for executables) or "2" (for DLLs)."""
+        |args| is tuple containing path to resource file, path to manifest file
+        and resource name which can be "1" (for executables) or "2" (for DLLs)."""
         manifest_path, resource_path, resource_name = args
         with open(resource_path, "w") as output:
             output.write(
@@ -271,8 +269,8 @@ def ExecManifestToRc(self, arch, *args):
 
     def ExecMidlWrapper(self, arch, outdir, tlb, h, dlldata, iid, proxy, idl, *flags):
         """Filter noisy filenames output from MIDL compile step that isn't
-    quietable via command line flags.
-    """
+        quietable via command line flags.
+        """
         args = (
             ["midl", "/nologo"]
             + list(flags)
@@ -328,7 +326,7 @@ def ExecAsmWrapper(self, arch, *args):
 
     def ExecRcWrapper(self, arch, *args):
         """Filter logo banner from invocations of rc.exe. Older versions of RC
-    don't support the /nologo flag."""
+        don't support the /nologo flag."""
         env = self._GetEnv(arch)
         popen = subprocess.Popen(
             args, shell=True, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT
@@ -345,7 +343,7 @@ def ExecRcWrapper(self, arch, *args):
 
     def ExecActionWrapper(self, arch, rspfile, *dir):
         """Runs an action command line from a response file using the environment
-    for |arch|. If |dir| is supplied, use that as the working directory."""
+        for |arch|. If |dir| is supplied, use that as the working directory."""
         env = self._GetEnv(arch)
         # TODO(scottmg): This is a temporary hack to get some specific variables
         # through to actions that are set after gyp-time. http://crbug.com/333738.
@@ -358,7 +356,7 @@ def ExecActionWrapper(self, arch, rspfile, *dir):
 
     def ExecClCompile(self, project_dir, selected_files):
         """Executed by msvs-ninja projects when the 'ClCompile' target is used to
-    build selected C/C++ files."""
+        build selected C/C++ files."""
         project_dir = os.path.relpath(project_dir, BASE_DIR)
         selected_files = selected_files.split(";")
         ninja_targets = [
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py b/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py
index 85a63dfd7ae0e..192a523529fdd 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/xcode_emulation.py
@@ -7,7 +7,6 @@
 other build systems, such as make and ninja.
 """
 
-
 import copy
 import os
 import os.path
@@ -31,7 +30,7 @@
 
 def XcodeArchsVariableMapping(archs, archs_including_64_bit=None):
     """Constructs a dictionary with expansion for $(ARCHS_STANDARD) variable,
-  and optionally for $(ARCHS_STANDARD_INCLUDING_64_BIT)."""
+    and optionally for $(ARCHS_STANDARD_INCLUDING_64_BIT)."""
     mapping = {"$(ARCHS_STANDARD)": archs}
     if archs_including_64_bit:
         mapping["$(ARCHS_STANDARD_INCLUDING_64_BIT)"] = archs_including_64_bit
@@ -40,10 +39,10 @@ def XcodeArchsVariableMapping(archs, archs_including_64_bit=None):
 
 class XcodeArchsDefault:
     """A class to resolve ARCHS variable from xcode_settings, resolving Xcode
-  macros and implementing filtering by VALID_ARCHS. The expansion of macros
-  depends on the SDKROOT used ("macosx", "iphoneos", "iphonesimulator") and
-  on the version of Xcode.
-  """
+    macros and implementing filtering by VALID_ARCHS. The expansion of macros
+    depends on the SDKROOT used ("macosx", "iphoneos", "iphonesimulator") and
+    on the version of Xcode.
+    """
 
     # Match variable like $(ARCHS_STANDARD).
     variable_pattern = re.compile(r"\$\([a-zA-Z_][a-zA-Z0-9_]*\)$")
@@ -82,8 +81,8 @@ def _ExpandArchs(self, archs, sdkroot):
 
     def ActiveArchs(self, archs, valid_archs, sdkroot):
         """Expands variables references in ARCHS, and filter by VALID_ARCHS if it
-    is defined (if not set, Xcode accept any value in ARCHS, otherwise, only
-    values present in VALID_ARCHS are kept)."""
+        is defined (if not set, Xcode accept any value in ARCHS, otherwise, only
+        values present in VALID_ARCHS are kept)."""
         expanded_archs = self._ExpandArchs(archs or self._default, sdkroot or "")
         if valid_archs:
             filtered_archs = []
@@ -96,24 +95,24 @@ def ActiveArchs(self, archs, valid_archs, sdkroot):
 
 def GetXcodeArchsDefault():
     """Returns the |XcodeArchsDefault| object to use to expand ARCHS for the
-  installed version of Xcode. The default values used by Xcode for ARCHS
-  and the expansion of the variables depends on the version of Xcode used.
+    installed version of Xcode. The default values used by Xcode for ARCHS
+    and the expansion of the variables depends on the version of Xcode used.
 
-  For all version anterior to Xcode 5.0 or posterior to Xcode 5.1 included
-  uses $(ARCHS_STANDARD) if ARCHS is unset, while Xcode 5.0 to 5.0.2 uses
-  $(ARCHS_STANDARD_INCLUDING_64_BIT). This variable was added to Xcode 5.0
-  and deprecated with Xcode 5.1.
+    For all version anterior to Xcode 5.0 or posterior to Xcode 5.1 included
+    uses $(ARCHS_STANDARD) if ARCHS is unset, while Xcode 5.0 to 5.0.2 uses
+    $(ARCHS_STANDARD_INCLUDING_64_BIT). This variable was added to Xcode 5.0
+    and deprecated with Xcode 5.1.
 
-  For "macosx" SDKROOT, all version starting with Xcode 5.0 includes 64-bit
-  architecture as part of $(ARCHS_STANDARD) and default to only building it.
+    For "macosx" SDKROOT, all version starting with Xcode 5.0 includes 64-bit
+    architecture as part of $(ARCHS_STANDARD) and default to only building it.
 
-  For "iphoneos" and "iphonesimulator" SDKROOT, 64-bit architectures are part
-  of $(ARCHS_STANDARD_INCLUDING_64_BIT) from Xcode 5.0. From Xcode 5.1, they
-  are also part of $(ARCHS_STANDARD).
+    For "iphoneos" and "iphonesimulator" SDKROOT, 64-bit architectures are part
+    of $(ARCHS_STANDARD_INCLUDING_64_BIT) from Xcode 5.0. From Xcode 5.1, they
+    are also part of $(ARCHS_STANDARD).
 
-  All these rules are coded in the construction of the |XcodeArchsDefault|
-  object to use depending on the version of Xcode detected. The object is
-  for performance reason."""
+    All these rules are coded in the construction of the |XcodeArchsDefault|
+    object to use depending on the version of Xcode detected. The object is
+    for performance reason."""
     global XCODE_ARCHS_DEFAULT_CACHE
     if XCODE_ARCHS_DEFAULT_CACHE:
         return XCODE_ARCHS_DEFAULT_CACHE
@@ -190,8 +189,8 @@ def __init__(self, spec):
 
     def _ConvertConditionalKeys(self, configname):
         """Converts or warns on conditional keys.  Xcode supports conditional keys,
-    such as CODE_SIGN_IDENTITY[sdk=iphoneos*].  This is a partial implementation
-    with some keys converted while the rest force a warning."""
+        such as CODE_SIGN_IDENTITY[sdk=iphoneos*].  This is a partial implementation
+        with some keys converted while the rest force a warning."""
         settings = self.xcode_settings[configname]
         conditional_keys = [key for key in settings if key.endswith("]")]
         for key in conditional_keys:
@@ -256,13 +255,13 @@ def _IsIosWatchApp(self):
 
     def GetFrameworkVersion(self):
         """Returns the framework version of the current target. Only valid for
-    bundles."""
+        bundles."""
         assert self._IsBundle()
         return self.GetPerTargetSetting("FRAMEWORK_VERSION", default="A")
 
     def GetWrapperExtension(self):
         """Returns the bundle extension (.app, .framework, .plugin, etc).  Only
-    valid for bundles."""
+        valid for bundles."""
         assert self._IsBundle()
         if self.spec["type"] in ("loadable_module", "shared_library"):
             default_wrapper_extension = {
@@ -297,13 +296,13 @@ def GetFullProductName(self):
 
     def GetWrapperName(self):
         """Returns the directory name of the bundle represented by this target.
-    Only valid for bundles."""
+        Only valid for bundles."""
         assert self._IsBundle()
         return self.GetProductName() + self.GetWrapperExtension()
 
     def GetBundleContentsFolderPath(self):
         """Returns the qualified path to the bundle's contents folder. E.g.
-    Chromium.app/Contents or Foo.bundle/Versions/A. Only valid for bundles."""
+        Chromium.app/Contents or Foo.bundle/Versions/A. Only valid for bundles."""
         if self.isIOS:
             return self.GetWrapperName()
         assert self._IsBundle()
@@ -317,7 +316,7 @@ def GetBundleContentsFolderPath(self):
 
     def GetBundleResourceFolder(self):
         """Returns the qualified path to the bundle's resource folder. E.g.
-    Chromium.app/Contents/Resources. Only valid for bundles."""
+        Chromium.app/Contents/Resources. Only valid for bundles."""
         assert self._IsBundle()
         if self.isIOS:
             return self.GetBundleContentsFolderPath()
@@ -325,7 +324,7 @@ def GetBundleResourceFolder(self):
 
     def GetBundleExecutableFolderPath(self):
         """Returns the qualified path to the bundle's executables folder. E.g.
-    Chromium.app/Contents/MacOS. Only valid for bundles."""
+        Chromium.app/Contents/MacOS. Only valid for bundles."""
         assert self._IsBundle()
         if self.spec["type"] in ("shared_library") or self.isIOS:
             return self.GetBundleContentsFolderPath()
@@ -334,25 +333,25 @@ def GetBundleExecutableFolderPath(self):
 
     def GetBundleJavaFolderPath(self):
         """Returns the qualified path to the bundle's Java resource folder.
-    E.g. Chromium.app/Contents/Resources/Java. Only valid for bundles."""
+        E.g. Chromium.app/Contents/Resources/Java. Only valid for bundles."""
         assert self._IsBundle()
         return os.path.join(self.GetBundleResourceFolder(), "Java")
 
     def GetBundleFrameworksFolderPath(self):
         """Returns the qualified path to the bundle's frameworks folder. E.g,
-    Chromium.app/Contents/Frameworks. Only valid for bundles."""
+        Chromium.app/Contents/Frameworks. Only valid for bundles."""
         assert self._IsBundle()
         return os.path.join(self.GetBundleContentsFolderPath(), "Frameworks")
 
     def GetBundleSharedFrameworksFolderPath(self):
         """Returns the qualified path to the bundle's frameworks folder. E.g,
-    Chromium.app/Contents/SharedFrameworks. Only valid for bundles."""
+        Chromium.app/Contents/SharedFrameworks. Only valid for bundles."""
         assert self._IsBundle()
         return os.path.join(self.GetBundleContentsFolderPath(), "SharedFrameworks")
 
     def GetBundleSharedSupportFolderPath(self):
         """Returns the qualified path to the bundle's shared support folder. E.g,
-    Chromium.app/Contents/SharedSupport. Only valid for bundles."""
+        Chromium.app/Contents/SharedSupport. Only valid for bundles."""
         assert self._IsBundle()
         if self.spec["type"] == "shared_library":
             return self.GetBundleResourceFolder()
@@ -361,19 +360,19 @@ def GetBundleSharedSupportFolderPath(self):
 
     def GetBundlePlugInsFolderPath(self):
         """Returns the qualified path to the bundle's plugins folder. E.g,
-    Chromium.app/Contents/PlugIns. Only valid for bundles."""
+        Chromium.app/Contents/PlugIns. Only valid for bundles."""
         assert self._IsBundle()
         return os.path.join(self.GetBundleContentsFolderPath(), "PlugIns")
 
     def GetBundleXPCServicesFolderPath(self):
         """Returns the qualified path to the bundle's XPC services folder. E.g,
-    Chromium.app/Contents/XPCServices. Only valid for bundles."""
+        Chromium.app/Contents/XPCServices. Only valid for bundles."""
         assert self._IsBundle()
         return os.path.join(self.GetBundleContentsFolderPath(), "XPCServices")
 
     def GetBundlePlistPath(self):
         """Returns the qualified path to the bundle's plist file. E.g.
-    Chromium.app/Contents/Info.plist. Only valid for bundles."""
+        Chromium.app/Contents/Info.plist. Only valid for bundles."""
         assert self._IsBundle()
         if (
             self.spec["type"] in ("executable", "loadable_module")
@@ -439,7 +438,7 @@ def GetMachOType(self):
 
     def _GetBundleBinaryPath(self):
         """Returns the name of the bundle binary of by this target.
-    E.g. Chromium.app/Contents/MacOS/Chromium. Only valid for bundles."""
+        E.g. Chromium.app/Contents/MacOS/Chromium. Only valid for bundles."""
         assert self._IsBundle()
         return os.path.join(
             self.GetBundleExecutableFolderPath(), self.GetExecutableName()
@@ -470,14 +469,14 @@ def _GetStandaloneExecutablePrefix(self):
 
     def _GetStandaloneBinaryPath(self):
         """Returns the name of the non-bundle binary represented by this target.
-    E.g. hello_world. Only valid for non-bundles."""
+        E.g. hello_world. Only valid for non-bundles."""
         assert not self._IsBundle()
         assert self.spec["type"] in {
             "executable",
             "shared_library",
             "static_library",
             "loadable_module",
-        }, ("Unexpected type %s" % self.spec["type"])
+        }, "Unexpected type %s" % self.spec["type"]
         target = self.spec["target_name"]
         if self.spec["type"] in {"loadable_module", "shared_library", "static_library"}:
             if target[:3] == "lib":
@@ -490,7 +489,7 @@ def _GetStandaloneBinaryPath(self):
 
     def GetExecutableName(self):
         """Returns the executable name of the bundle represented by this target.
-    E.g. Chromium."""
+        E.g. Chromium."""
         if self._IsBundle():
             return self.spec.get("product_name", self.spec["target_name"])
         else:
@@ -498,7 +497,7 @@ def GetExecutableName(self):
 
     def GetExecutablePath(self):
         """Returns the qualified path to the primary executable of the bundle
-    represented by this target. E.g. Chromium.app/Contents/MacOS/Chromium."""
+        represented by this target. E.g. Chromium.app/Contents/MacOS/Chromium."""
         if self._IsBundle():
             return self._GetBundleBinaryPath()
         else:
@@ -521,7 +520,7 @@ def _GetSdkVersionInfoItem(self, sdk, infoitem):
         # most sensible route and should still do the right thing.
         try:
             return GetStdoutQuiet(["xcrun", "--sdk", sdk, infoitem])
-        except GypError:
+        except (GypError, OSError):
             pass
 
     def _SdkRoot(self, configname):
@@ -568,7 +567,7 @@ def _AppendPlatformVersionMinFlags(self, lst):
 
     def GetCflags(self, configname, arch=None):
         """Returns flags that need to be added to .c, .cc, .m, and .mm
-    compilations."""
+        compilations."""
         # This functions (and the similar ones below) do not offer complete
         # emulation of all xcode_settings keys. They're implemented on demand.
 
@@ -863,7 +862,7 @@ def GetInstallName(self):
 
     def _MapLinkerFlagFilename(self, ldflag, gyp_to_build_path):
         """Checks if ldflag contains a filename and if so remaps it from
-    gyp-directory-relative to build-directory-relative."""
+        gyp-directory-relative to build-directory-relative."""
         # This list is expanded on demand.
         # They get matched as:
         #   -exported_symbols_list file
@@ -895,13 +894,13 @@ def _MapLinkerFlagFilename(self, ldflag, gyp_to_build_path):
     def GetLdflags(self, configname, product_dir, gyp_to_build_path, arch=None):
         """Returns flags that need to be passed to the linker.
 
-    Args:
-        configname: The name of the configuration to get ld flags for.
-        product_dir: The directory where products such static and dynamic
-            libraries are placed. This is added to the library search path.
-        gyp_to_build_path: A function that converts paths relative to the
-            current gyp file to paths relative to the build directory.
-    """
+        Args:
+            configname: The name of the configuration to get ld flags for.
+            product_dir: The directory where products such static and dynamic
+                libraries are placed. This is added to the library search path.
+            gyp_to_build_path: A function that converts paths relative to the
+                current gyp file to paths relative to the build directory.
+        """
         self.configname = configname
         ldflags = []
 
@@ -1001,9 +1000,9 @@ def GetLdflags(self, configname, product_dir, gyp_to_build_path, arch=None):
     def GetLibtoolflags(self, configname):
         """Returns flags that need to be passed to the static linker.
 
-    Args:
-        configname: The name of the configuration to get ld flags for.
-    """
+        Args:
+            configname: The name of the configuration to get ld flags for.
+        """
         self.configname = configname
         libtoolflags = []
 
@@ -1016,7 +1015,7 @@ def GetLibtoolflags(self, configname):
 
     def GetPerTargetSettings(self):
         """Gets a list of all the per-target settings. This will only fetch keys
-    whose values are the same across all configurations."""
+        whose values are the same across all configurations."""
         first_pass = True
         result = {}
         for configname in sorted(self.xcode_settings.keys()):
@@ -1039,7 +1038,7 @@ def GetPerConfigSetting(self, setting, configname, default=None):
 
     def GetPerTargetSetting(self, setting, default=None):
         """Tries to get xcode_settings.setting from spec. Assumes that the setting
-       has the same value in all configurations and throws otherwise."""
+        has the same value in all configurations and throws otherwise."""
         is_first_pass = True
         result = None
         for configname in sorted(self.xcode_settings.keys()):
@@ -1057,15 +1056,14 @@ def GetPerTargetSetting(self, setting, default=None):
 
     def _GetStripPostbuilds(self, configname, output_binary, quiet):
         """Returns a list of shell commands that contain the shell commands
-    necessary to strip this target's binary. These should be run as postbuilds
-    before the actual postbuilds run."""
+        necessary to strip this target's binary. These should be run as postbuilds
+        before the actual postbuilds run."""
         self.configname = configname
 
         result = []
         if self._Test("DEPLOYMENT_POSTPROCESSING", "YES", default="NO") and self._Test(
             "STRIP_INSTALLED_PRODUCT", "YES", default="NO"
         ):
-
             default_strip_style = "debugging"
             if (
                 self.spec["type"] == "loadable_module" or self._IsIosAppExtension()
@@ -1092,8 +1090,8 @@ def _GetStripPostbuilds(self, configname, output_binary, quiet):
 
     def _GetDebugInfoPostbuilds(self, configname, output, output_binary, quiet):
         """Returns a list of shell commands that contain the shell commands
-    necessary to massage this target's debug information. These should be run
-    as postbuilds before the actual postbuilds run."""
+        necessary to massage this target's debug information. These should be run
+        as postbuilds before the actual postbuilds run."""
         self.configname = configname
 
         # For static libraries, no dSYMs are created.
@@ -1114,7 +1112,7 @@ def _GetDebugInfoPostbuilds(self, configname, output, output_binary, quiet):
 
     def _GetTargetPostbuilds(self, configname, output, output_binary, quiet=False):
         """Returns a list of shell commands that contain the shell commands
-    to run as postbuilds for this target, before the actual postbuilds."""
+        to run as postbuilds for this target, before the actual postbuilds."""
         # dSYMs need to build before stripping happens.
         return self._GetDebugInfoPostbuilds(
             configname, output, output_binary, quiet
@@ -1122,11 +1120,10 @@ def _GetTargetPostbuilds(self, configname, output, output_binary, quiet=False):
 
     def _GetIOSPostbuilds(self, configname, output_binary):
         """Return a shell command to codesign the iOS output binary so it can
-    be deployed to a device.  This should be run as the very last step of the
-    build."""
+        be deployed to a device.  This should be run as the very last step of the
+        build."""
         if not (
-            (self.isIOS
-            and (self.spec["type"] == "executable" or self._IsXCTest()))
+            (self.isIOS and (self.spec["type"] == "executable" or self._IsXCTest()))
             or self.IsIosFramework()
         ):
             return []
@@ -1240,7 +1237,7 @@ def AddImplicitPostbuilds(
         self, configname, output, output_binary, postbuilds=[], quiet=False
     ):
         """Returns a list of shell commands that should run before and after
-    |postbuilds|."""
+        |postbuilds|."""
         assert output_binary is not None
         pre = self._GetTargetPostbuilds(configname, output, output_binary, quiet)
         post = self._GetIOSPostbuilds(configname, output_binary)
@@ -1276,8 +1273,8 @@ def _AdjustLibrary(self, library, config_name=None):
 
     def AdjustLibraries(self, libraries, config_name=None):
         """Transforms entries like 'Cocoa.framework' in libraries into entries like
-    '-framework Cocoa', 'libcrypto.dylib' into '-lcrypto', etc.
-    """
+        '-framework Cocoa', 'libcrypto.dylib' into '-lcrypto', etc.
+        """
         libraries = [self._AdjustLibrary(library, config_name) for library in libraries]
         return libraries
 
@@ -1342,20 +1339,19 @@ def GetExtraPlistItems(self, configname=None):
     def _DefaultSdkRoot(self):
         """Returns the default SDKROOT to use.
 
-    Prior to version 5.0.0, if SDKROOT was not explicitly set in the Xcode
-    project, then the environment variable was empty. Starting with this
-    version, Xcode uses the name of the newest SDK installed.
-    """
+        Prior to version 5.0.0, if SDKROOT was not explicitly set in the Xcode
+        project, then the environment variable was empty. Starting with this
+        version, Xcode uses the name of the newest SDK installed.
+        """
         xcode_version, _ = XcodeVersion()
         if xcode_version < "0500":
             return ""
         default_sdk_path = self._XcodeSdkPath("")
-        default_sdk_root = XcodeSettings._sdk_root_cache.get(default_sdk_path)
-        if default_sdk_root:
+        if default_sdk_root := XcodeSettings._sdk_root_cache.get(default_sdk_path):
             return default_sdk_root
         try:
             all_sdks = GetStdout(["xcodebuild", "-showsdks"])
-        except GypError:
+        except (GypError, OSError):
             # If xcodebuild fails, there will be no valid SDKs
             return ""
         for line in all_sdks.splitlines():
@@ -1371,39 +1367,39 @@ def _DefaultSdkRoot(self):
 class MacPrefixHeader:
     """A class that helps with emulating Xcode's GCC_PREFIX_HEADER feature.
 
-  This feature consists of several pieces:
-  * If GCC_PREFIX_HEADER is present, all compilations in that project get an
-    additional |-include path_to_prefix_header| cflag.
-  * If GCC_PRECOMPILE_PREFIX_HEADER is present too, then the prefix header is
-    instead compiled, and all other compilations in the project get an
-    additional |-include path_to_compiled_header| instead.
-    + Compiled prefix headers have the extension gch. There is one gch file for
-      every language used in the project (c, cc, m, mm), since gch files for
-      different languages aren't compatible.
-    + gch files themselves are built with the target's normal cflags, but they
-      obviously don't get the |-include| flag. Instead, they need a -x flag that
-      describes their language.
-    + All o files in the target need to depend on the gch file, to make sure
-      it's built before any o file is built.
-
-  This class helps with some of these tasks, but it needs help from the build
-  system for writing dependencies to the gch files, for writing build commands
-  for the gch files, and for figuring out the location of the gch files.
-  """
+    This feature consists of several pieces:
+    * If GCC_PREFIX_HEADER is present, all compilations in that project get an
+      additional |-include path_to_prefix_header| cflag.
+    * If GCC_PRECOMPILE_PREFIX_HEADER is present too, then the prefix header is
+      instead compiled, and all other compilations in the project get an
+      additional |-include path_to_compiled_header| instead.
+      + Compiled prefix headers have the extension gch. There is one gch file for
+        every language used in the project (c, cc, m, mm), since gch files for
+        different languages aren't compatible.
+      + gch files themselves are built with the target's normal cflags, but they
+        obviously don't get the |-include| flag. Instead, they need a -x flag that
+        describes their language.
+      + All o files in the target need to depend on the gch file, to make sure
+        it's built before any o file is built.
+
+    This class helps with some of these tasks, but it needs help from the build
+    system for writing dependencies to the gch files, for writing build commands
+    for the gch files, and for figuring out the location of the gch files.
+    """
 
     def __init__(
         self, xcode_settings, gyp_path_to_build_path, gyp_path_to_build_output
     ):
         """If xcode_settings is None, all methods on this class are no-ops.
 
-    Args:
-        gyp_path_to_build_path: A function that takes a gyp-relative path,
-            and returns a path relative to the build directory.
-        gyp_path_to_build_output: A function that takes a gyp-relative path and
-            a language code ('c', 'cc', 'm', or 'mm'), and that returns a path
-            to where the output of precompiling that path for that language
-            should be placed (without the trailing '.gch').
-    """
+        Args:
+            gyp_path_to_build_path: A function that takes a gyp-relative path,
+                and returns a path relative to the build directory.
+            gyp_path_to_build_output: A function that takes a gyp-relative path and
+                a language code ('c', 'cc', 'm', or 'mm'), and that returns a path
+                to where the output of precompiling that path for that language
+                should be placed (without the trailing '.gch').
+        """
         # This doesn't support per-configuration prefix headers. Good enough
         # for now.
         self.header = None
@@ -1448,9 +1444,9 @@ def _Gch(self, lang, arch):
 
     def GetObjDependencies(self, sources, objs, arch=None):
         """Given a list of source files and the corresponding object files, returns
-    a list of (source, object, gch) tuples, where |gch| is the build-directory
-    relative path to the gch file each object file depends on.  |compilable[i]|
-    has to be the source file belonging to |objs[i]|."""
+        a list of (source, object, gch) tuples, where |gch| is the build-directory
+        relative path to the gch file each object file depends on.  |compilable[i]|
+        has to be the source file belonging to |objs[i]|."""
         if not self.header or not self.compile_headers:
             return []
 
@@ -1471,8 +1467,8 @@ def GetObjDependencies(self, sources, objs, arch=None):
 
     def GetPchBuildCommands(self, arch=None):
         """Returns [(path_to_gch, language_flag, language, header)].
-    |path_to_gch| and |header| are relative to the build directory.
-    """
+        |path_to_gch| and |header| are relative to the build directory.
+        """
         if not self.header or not self.compile_headers:
             return []
         return [
@@ -1509,7 +1505,8 @@ def XcodeVersion():
             raise GypError("xcodebuild returned unexpected results")
         version = version_list[0].split()[-1]  # Last word on first line
         build = version_list[-1].split()[-1]  # Last word on last line
-    except GypError:  # Xcode not installed so look for XCode Command Line Tools
+    except (GypError, OSError):
+        # Xcode not installed so look for XCode Command Line Tools
         version = CLTVersion()  # macOS Catalina returns 11.0.0.0.1.1567737322
         if not version:
             raise GypError("No Xcode or CLT version detected!")
@@ -1542,21 +1539,21 @@ def CLTVersion():
         try:
             output = GetStdout(["/usr/sbin/pkgutil", "--pkg-info", key])
             return re.search(regex, output).groupdict()["version"]
-        except GypError:
+        except (GypError, OSError):
             continue
 
     regex = re.compile(r"Command Line Tools for Xcode\s+(?P\S+)")
     try:
         output = GetStdout(["/usr/sbin/softwareupdate", "--history"])
         return re.search(regex, output).groupdict()["version"]
-    except GypError:
+    except (GypError, OSError):
         return None
 
 
 def GetStdoutQuiet(cmdlist):
     """Returns the content of standard output returned by invoking |cmdlist|.
-  Ignores the stderr.
-  Raises |GypError| if the command return with a non-zero return code."""
+    Ignores the stderr.
+    Raises |GypError| if the command return with a non-zero return code."""
     job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
     out = job.communicate()[0].decode("utf-8")
     if job.returncode != 0:
@@ -1566,7 +1563,7 @@ def GetStdoutQuiet(cmdlist):
 
 def GetStdout(cmdlist):
     """Returns the content of standard output returned by invoking |cmdlist|.
-  Raises |GypError| if the command return with a non-zero return code."""
+    Raises |GypError| if the command return with a non-zero return code."""
     job = subprocess.Popen(cmdlist, stdout=subprocess.PIPE)
     out = job.communicate()[0].decode("utf-8")
     if job.returncode != 0:
@@ -1577,9 +1574,9 @@ def GetStdout(cmdlist):
 
 def MergeGlobalXcodeSettingsToSpec(global_dict, spec):
     """Merges the global xcode_settings dictionary into each configuration of the
-  target represented by spec. For keys that are both in the global and the local
-  xcode_settings dict, the local key gets precedence.
-  """
+    target represented by spec. For keys that are both in the global and the local
+    xcode_settings dict, the local key gets precedence.
+    """
     # The xcode generator special-cases global xcode_settings and does something
     # that amounts to merging in the global xcode_settings into each local
     # xcode_settings dict.
@@ -1594,9 +1591,9 @@ def MergeGlobalXcodeSettingsToSpec(global_dict, spec):
 def IsMacBundle(flavor, spec):
     """Returns if |spec| should be treated as a bundle.
 
-  Bundles are directories with a certain subdirectory structure, instead of
-  just a single file. Bundle rules do not produce a binary but also package
-  resources into that directory."""
+    Bundles are directories with a certain subdirectory structure, instead of
+    just a single file. Bundle rules do not produce a binary but also package
+    resources into that directory."""
     is_mac_bundle = (
         int(spec.get("mac_xctest_bundle", 0)) != 0
         or int(spec.get("mac_xcuitest_bundle", 0)) != 0
@@ -1613,14 +1610,14 @@ def IsMacBundle(flavor, spec):
 
 def GetMacBundleResources(product_dir, xcode_settings, resources):
     """Yields (output, resource) pairs for every resource in |resources|.
-  Only call this for mac bundle targets.
-
-  Args:
-      product_dir: Path to the directory containing the output bundle,
-          relative to the build directory.
-      xcode_settings: The XcodeSettings of the current target.
-      resources: A list of bundle resources, relative to the build directory.
-  """
+    Only call this for mac bundle targets.
+
+    Args:
+        product_dir: Path to the directory containing the output bundle,
+            relative to the build directory.
+        xcode_settings: The XcodeSettings of the current target.
+        resources: A list of bundle resources, relative to the build directory.
+    """
     dest = os.path.join(product_dir, xcode_settings.GetBundleResourceFolder())
     for res in resources:
         output = dest
@@ -1651,24 +1648,24 @@ def GetMacBundleResources(product_dir, xcode_settings, resources):
 
 def GetMacInfoPlist(product_dir, xcode_settings, gyp_path_to_build_path):
     """Returns (info_plist, dest_plist, defines, extra_env), where:
-  * |info_plist| is the source plist path, relative to the
-    build directory,
-  * |dest_plist| is the destination plist path, relative to the
-    build directory,
-  * |defines| is a list of preprocessor defines (empty if the plist
-    shouldn't be preprocessed,
-  * |extra_env| is a dict of env variables that should be exported when
-    invoking |mac_tool copy-info-plist|.
-
-  Only call this for mac bundle targets.
-
-  Args:
-      product_dir: Path to the directory containing the output bundle,
-          relative to the build directory.
-      xcode_settings: The XcodeSettings of the current target.
-      gyp_to_build_path: A function that converts paths relative to the
-          current gyp file to paths relative to the build directory.
-  """
+    * |info_plist| is the source plist path, relative to the
+      build directory,
+    * |dest_plist| is the destination plist path, relative to the
+      build directory,
+    * |defines| is a list of preprocessor defines (empty if the plist
+      shouldn't be preprocessed,
+    * |extra_env| is a dict of env variables that should be exported when
+      invoking |mac_tool copy-info-plist|.
+
+    Only call this for mac bundle targets.
+
+    Args:
+        product_dir: Path to the directory containing the output bundle,
+            relative to the build directory.
+        xcode_settings: The XcodeSettings of the current target.
+        gyp_to_build_path: A function that converts paths relative to the
+            current gyp file to paths relative to the build directory.
+    """
     info_plist = xcode_settings.GetPerTargetSetting("INFOPLIST_FILE")
     if not info_plist:
         return None, None, [], {}
@@ -1706,18 +1703,18 @@ def _GetXcodeEnv(
     xcode_settings, built_products_dir, srcroot, configuration, additional_settings=None
 ):
     """Return the environment variables that Xcode would set. See
-  http://developer.apple.com/library/mac/#documentation/DeveloperTools/Reference/XcodeBuildSettingRef/1-Build_Setting_Reference/build_setting_ref.html#//apple_ref/doc/uid/TP40003931-CH3-SW153
-  for a full list.
-
-  Args:
-      xcode_settings: An XcodeSettings object. If this is None, this function
-          returns an empty dict.
-      built_products_dir: Absolute path to the built products dir.
-      srcroot: Absolute path to the source root.
-      configuration: The build configuration name.
-      additional_settings: An optional dict with more values to add to the
-          result.
-  """
+    http://developer.apple.com/library/mac/#documentation/DeveloperTools/Reference/XcodeBuildSettingRef/1-Build_Setting_Reference/build_setting_ref.html#//apple_ref/doc/uid/TP40003931-CH3-SW153
+    for a full list.
+
+    Args:
+        xcode_settings: An XcodeSettings object. If this is None, this function
+            returns an empty dict.
+        built_products_dir: Absolute path to the built products dir.
+        srcroot: Absolute path to the source root.
+        configuration: The build configuration name.
+        additional_settings: An optional dict with more values to add to the
+            result.
+    """
 
     if not xcode_settings:
         return {}
@@ -1771,27 +1768,25 @@ def _GetXcodeEnv(
         )
         env["CONTENTS_FOLDER_PATH"] = xcode_settings.GetBundleContentsFolderPath()
         env["EXECUTABLE_FOLDER_PATH"] = xcode_settings.GetBundleExecutableFolderPath()
-        env[
-            "UNLOCALIZED_RESOURCES_FOLDER_PATH"
-        ] = xcode_settings.GetBundleResourceFolder()
+        env["UNLOCALIZED_RESOURCES_FOLDER_PATH"] = (
+            xcode_settings.GetBundleResourceFolder()
+        )
         env["JAVA_FOLDER_PATH"] = xcode_settings.GetBundleJavaFolderPath()
         env["FRAMEWORKS_FOLDER_PATH"] = xcode_settings.GetBundleFrameworksFolderPath()
-        env[
-            "SHARED_FRAMEWORKS_FOLDER_PATH"
-        ] = xcode_settings.GetBundleSharedFrameworksFolderPath()
-        env[
-            "SHARED_SUPPORT_FOLDER_PATH"
-        ] = xcode_settings.GetBundleSharedSupportFolderPath()
+        env["SHARED_FRAMEWORKS_FOLDER_PATH"] = (
+            xcode_settings.GetBundleSharedFrameworksFolderPath()
+        )
+        env["SHARED_SUPPORT_FOLDER_PATH"] = (
+            xcode_settings.GetBundleSharedSupportFolderPath()
+        )
         env["PLUGINS_FOLDER_PATH"] = xcode_settings.GetBundlePlugInsFolderPath()
         env["XPCSERVICES_FOLDER_PATH"] = xcode_settings.GetBundleXPCServicesFolderPath()
         env["INFOPLIST_PATH"] = xcode_settings.GetBundlePlistPath()
         env["WRAPPER_NAME"] = xcode_settings.GetWrapperName()
 
-    install_name = xcode_settings.GetInstallName()
-    if install_name:
+    if install_name := xcode_settings.GetInstallName():
         env["LD_DYLIB_INSTALL_NAME"] = install_name
-    install_name_base = xcode_settings.GetInstallNameBase()
-    if install_name_base:
+    if install_name_base := xcode_settings.GetInstallNameBase():
         env["DYLIB_INSTALL_NAME_BASE"] = install_name_base
     xcode_version, _ = XcodeVersion()
     if xcode_version >= "0500" and not env.get("SDKROOT"):
@@ -1819,8 +1814,8 @@ def _GetXcodeEnv(
 
 def _NormalizeEnvVarReferences(str):
     """Takes a string containing variable references in the form ${FOO}, $(FOO),
-  or $FOO, and returns a string with all variable references in the form ${FOO}.
-  """
+    or $FOO, and returns a string with all variable references in the form ${FOO}.
+    """
     # $FOO -> ${FOO}
     str = re.sub(r"\$([a-zA-Z_][a-zA-Z0-9_]*)", r"${\1}", str)
 
@@ -1836,9 +1831,9 @@ def _NormalizeEnvVarReferences(str):
 
 def ExpandEnvVars(string, expansions):
     """Expands ${VARIABLES}, $(VARIABLES), and $VARIABLES in string per the
-  expansions list. If the variable expands to something that references
-  another variable, this variable is expanded as well if it's in env --
-  until no variables present in env are left."""
+    expansions list. If the variable expands to something that references
+    another variable, this variable is expanded as well if it's in env --
+    until no variables present in env are left."""
     for k, v in reversed(expansions):
         string = string.replace("${" + k + "}", v)
         string = string.replace("$(" + k + ")", v)
@@ -1848,11 +1843,11 @@ def ExpandEnvVars(string, expansions):
 
 def _TopologicallySortedEnvVarKeys(env):
     """Takes a dict |env| whose values are strings that can refer to other keys,
-  for example env['foo'] = '$(bar) and $(baz)'. Returns a list L of all keys of
-  env such that key2 is after key1 in L if env[key2] refers to env[key1].
+    for example env['foo'] = '$(bar) and $(baz)'. Returns a list L of all keys of
+    env such that key2 is after key1 in L if env[key2] refers to env[key1].
 
-  Throws an Exception in case of dependency cycles.
-  """
+    Throws an Exception in case of dependency cycles.
+    """
     # Since environment variables can refer to other variables, the evaluation
     # order is important. Below is the logic to compute the dependency graph
     # and sort it.
@@ -1893,7 +1888,7 @@ def GetSortedXcodeEnv(
 
 def GetSpecPostbuildCommands(spec, quiet=False):
     """Returns the list of postbuilds explicitly defined on |spec|, in a form
-  executable by a shell."""
+    executable by a shell."""
     postbuilds = []
     for postbuild in spec.get("postbuilds", []):
         if not quiet:
@@ -1907,7 +1902,7 @@ def GetSpecPostbuildCommands(spec, quiet=False):
 
 def _HasIOSTarget(targets):
     """Returns true if any target contains the iOS specific key
-  IPHONEOS_DEPLOYMENT_TARGET."""
+    IPHONEOS_DEPLOYMENT_TARGET."""
     for target_dict in targets.values():
         for config in target_dict["configurations"].values():
             if config.get("xcode_settings", {}).get("IPHONEOS_DEPLOYMENT_TARGET"):
@@ -1917,7 +1912,7 @@ def _HasIOSTarget(targets):
 
 def _AddIOSDeviceConfigurations(targets):
     """Clone all targets and append -iphoneos to the name. Configure these targets
-  to build for iOS devices and use correct architectures for those builds."""
+    to build for iOS devices and use correct architectures for those builds."""
     for target_dict in targets.values():
         toolset = target_dict["toolset"]
         configs = target_dict["configurations"]
@@ -1933,7 +1928,7 @@ def _AddIOSDeviceConfigurations(targets):
 
 def CloneConfigurationForDeviceAndEmulator(target_dicts):
     """If |target_dicts| contains any iOS targets, automatically create -iphoneos
-  targets for iOS device builds."""
+    targets for iOS device builds."""
     if _HasIOSTarget(target_dicts):
         return _AddIOSDeviceConfigurations(target_dicts)
     return target_dicts
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py b/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py
index cac1af56f7bfb..1a97a06c51d9f 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/xcode_ninja.py
@@ -21,7 +21,7 @@
 
 
 def _WriteWorkspace(main_gyp, sources_gyp, params):
-    """ Create a workspace to wrap main and sources gyp paths. """
+    """Create a workspace to wrap main and sources gyp paths."""
     (build_file_root, build_file_ext) = os.path.splitext(main_gyp)
     workspace_path = build_file_root + ".xcworkspace"
     options = params["options"]
@@ -57,7 +57,7 @@ def _WriteWorkspace(main_gyp, sources_gyp, params):
 
 
 def _TargetFromSpec(old_spec, params):
-    """ Create fake target for xcode-ninja wrapper. """
+    """Create fake target for xcode-ninja wrapper."""
     # Determine ninja top level build dir (e.g. /path/to/out).
     ninja_toplevel = None
     jobs = 0
@@ -70,12 +70,11 @@ def _TargetFromSpec(old_spec, params):
 
     target_name = old_spec.get("target_name")
     product_name = old_spec.get("product_name", target_name)
-    product_extension = old_spec.get("product_extension")
 
     ninja_target = {}
     ninja_target["target_name"] = target_name
     ninja_target["product_name"] = product_name
-    if product_extension:
+    if product_extension := old_spec.get("product_extension"):
         ninja_target["product_extension"] = product_extension
     ninja_target["toolset"] = old_spec.get("toolset")
     ninja_target["default_configuration"] = old_spec.get("default_configuration")
@@ -103,9 +102,9 @@ def _TargetFromSpec(old_spec, params):
                     new_xcode_settings[key] = old_xcode_settings[key]
 
             ninja_target["configurations"][config] = {}
-            ninja_target["configurations"][config][
-                "xcode_settings"
-            ] = new_xcode_settings
+            ninja_target["configurations"][config]["xcode_settings"] = (
+                new_xcode_settings
+            )
 
     ninja_target["mac_bundle"] = old_spec.get("mac_bundle", 0)
     ninja_target["mac_xctest_bundle"] = old_spec.get("mac_xctest_bundle", 0)
@@ -138,13 +137,13 @@ def _TargetFromSpec(old_spec, params):
 def IsValidTargetForWrapper(target_extras, executable_target_pattern, spec):
     """Limit targets for Xcode wrapper.
 
-  Xcode sometimes performs poorly with too many targets, so only include
-  proper executable targets, with filters to customize.
-  Arguments:
-    target_extras: Regular expression to always add, matching any target.
-    executable_target_pattern: Regular expression limiting executable targets.
-    spec: Specifications for target.
-  """
+    Xcode sometimes performs poorly with too many targets, so only include
+    proper executable targets, with filters to customize.
+    Arguments:
+      target_extras: Regular expression to always add, matching any target.
+      executable_target_pattern: Regular expression limiting executable targets.
+      spec: Specifications for target.
+    """
     target_name = spec.get("target_name")
     # Always include targets matching target_extras.
     if target_extras is not None and re.search(target_extras, target_name):
@@ -155,7 +154,6 @@ def IsValidTargetForWrapper(target_extras, executable_target_pattern, spec):
         spec.get("type", "") == "executable"
         and spec.get("product_extension", "") != "bundle"
     ):
-
         # If there is a filter and the target does not match, exclude the target.
         if executable_target_pattern is not None:
             if not re.search(executable_target_pattern, target_name):
@@ -167,14 +165,14 @@ def IsValidTargetForWrapper(target_extras, executable_target_pattern, spec):
 def CreateWrapper(target_list, target_dicts, data, params):
     """Initialize targets for the ninja wrapper.
 
-  This sets up the necessary variables in the targets to generate Xcode projects
-  that use ninja as an external builder.
-  Arguments:
-    target_list: List of target pairs: 'base/base.gyp:base'.
-    target_dicts: Dict of target properties keyed on target pair.
-    data: Dict of flattened build files keyed on gyp path.
-    params: Dict of global options for gyp.
-  """
+    This sets up the necessary variables in the targets to generate Xcode projects
+    that use ninja as an external builder.
+    Arguments:
+      target_list: List of target pairs: 'base/base.gyp:base'.
+      target_dicts: Dict of target properties keyed on target pair.
+      data: Dict of flattened build files keyed on gyp path.
+      params: Dict of global options for gyp.
+    """
     orig_gyp = params["build_files"][0]
     for gyp_name, gyp_dict in data.items():
         if gyp_name == orig_gyp:
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py b/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py
index be17ef946dce3..11e2be0737223 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/xcodeproj_file.py
@@ -176,15 +176,14 @@ def cmp(x, y):
 def SourceTreeAndPathFromPath(input_path):
     """Given input_path, returns a tuple with sourceTree and path values.
 
-  Examples:
-    input_path     (source_tree, output_path)
-    '$(VAR)/path'  ('VAR', 'path')
-    '$(VAR)'       ('VAR', None)
-    'path'         (None, 'path')
-  """
-
-    source_group_match = _path_leading_variable.match(input_path)
-    if source_group_match:
+    Examples:
+      input_path     (source_tree, output_path)
+      '$(VAR)/path'  ('VAR', 'path')
+      '$(VAR)'       ('VAR', None)
+      'path'         (None, 'path')
+    """
+
+    if source_group_match := _path_leading_variable.match(input_path):
         source_tree = source_group_match.group(1)
         output_path = source_group_match.group(3)  # This may be None.
     else:
@@ -201,70 +200,70 @@ def ConvertVariablesToShellSyntax(input_string):
 class XCObject:
     """The abstract base of all class types used in Xcode project files.
 
-  Class variables:
-    _schema: A dictionary defining the properties of this class.  The keys to
-             _schema are string property keys as used in project files.  Values
-             are a list of four or five elements:
-             [ is_list, property_type, is_strong, is_required, default ]
-             is_list: True if the property described is a list, as opposed
-                      to a single element.
-             property_type: The type to use as the value of the property,
-                            or if is_list is True, the type to use for each
-                            element of the value's list.  property_type must
-                            be an XCObject subclass, or one of the built-in
-                            types str, int, or dict.
-             is_strong: If property_type is an XCObject subclass, is_strong
-                        is True to assert that this class "owns," or serves
-                        as parent, to the property value (or, if is_list is
-                        True, values).  is_strong must be False if
-                        property_type is not an XCObject subclass.
-             is_required: True if the property is required for the class.
-                          Note that is_required being True does not preclude
-                          an empty string ("", in the case of property_type
-                          str) or list ([], in the case of is_list True) from
-                          being set for the property.
-             default: Optional.  If is_required is True, default may be set
-                      to provide a default value for objects that do not supply
-                      their own value.  If is_required is True and default
-                      is not provided, users of the class must supply their own
-                      value for the property.
-             Note that although the values of the array are expressed in
-             boolean terms, subclasses provide values as integers to conserve
-             horizontal space.
-    _should_print_single_line: False in XCObject.  Subclasses whose objects
-                               should be written to the project file in the
-                               alternate single-line format, such as
-                               PBXFileReference and PBXBuildFile, should
-                               set this to True.
-    _encode_transforms: Used by _EncodeString to encode unprintable characters.
-                        The index into this list is the ordinal of the
-                        character to transform; each value is a string
-                        used to represent the character in the output.  XCObject
-                        provides an _encode_transforms list suitable for most
-                        XCObject subclasses.
-    _alternate_encode_transforms: Provided for subclasses that wish to use
-                                  the alternate encoding rules.  Xcode seems
-                                  to use these rules when printing objects in
-                                  single-line format.  Subclasses that desire
-                                  this behavior should set _encode_transforms
-                                  to _alternate_encode_transforms.
-    _hashables: A list of XCObject subclasses that can be hashed by ComputeIDs
-                to construct this object's ID.  Most classes that need custom
-                hashing behavior should do it by overriding Hashables,
-                but in some cases an object's parent may wish to push a
-                hashable value into its child, and it can do so by appending
-                to _hashables.
-  Attributes:
-    id: The object's identifier, a 24-character uppercase hexadecimal string.
-        Usually, objects being created should not set id until the entire
-        project file structure is built.  At that point, UpdateIDs() should
-        be called on the root object to assign deterministic values for id to
-        each object in the tree.
-    parent: The object's parent.  This is set by a parent XCObject when a child
-            object is added to it.
-    _properties: The object's property dictionary.  An object's properties are
-                 described by its class' _schema variable.
-  """
+    Class variables:
+      _schema: A dictionary defining the properties of this class.  The keys to
+               _schema are string property keys as used in project files.  Values
+               are a list of four or five elements:
+               [ is_list, property_type, is_strong, is_required, default ]
+               is_list: True if the property described is a list, as opposed
+                        to a single element.
+               property_type: The type to use as the value of the property,
+                              or if is_list is True, the type to use for each
+                              element of the value's list.  property_type must
+                              be an XCObject subclass, or one of the built-in
+                              types str, int, or dict.
+               is_strong: If property_type is an XCObject subclass, is_strong
+                          is True to assert that this class "owns," or serves
+                          as parent, to the property value (or, if is_list is
+                          True, values).  is_strong must be False if
+                          property_type is not an XCObject subclass.
+               is_required: True if the property is required for the class.
+                            Note that is_required being True does not preclude
+                            an empty string ("", in the case of property_type
+                            str) or list ([], in the case of is_list True) from
+                            being set for the property.
+               default: Optional.  If is_required is True, default may be set
+                        to provide a default value for objects that do not supply
+                        their own value.  If is_required is True and default
+                        is not provided, users of the class must supply their own
+                        value for the property.
+               Note that although the values of the array are expressed in
+               boolean terms, subclasses provide values as integers to conserve
+               horizontal space.
+      _should_print_single_line: False in XCObject.  Subclasses whose objects
+                                 should be written to the project file in the
+                                 alternate single-line format, such as
+                                 PBXFileReference and PBXBuildFile, should
+                                 set this to True.
+      _encode_transforms: Used by _EncodeString to encode unprintable characters.
+                          The index into this list is the ordinal of the
+                          character to transform; each value is a string
+                          used to represent the character in the output.  XCObject
+                          provides an _encode_transforms list suitable for most
+                          XCObject subclasses.
+      _alternate_encode_transforms: Provided for subclasses that wish to use
+                                    the alternate encoding rules.  Xcode seems
+                                    to use these rules when printing objects in
+                                    single-line format.  Subclasses that desire
+                                    this behavior should set _encode_transforms
+                                    to _alternate_encode_transforms.
+      _hashables: A list of XCObject subclasses that can be hashed by ComputeIDs
+                  to construct this object's ID.  Most classes that need custom
+                  hashing behavior should do it by overriding Hashables,
+                  but in some cases an object's parent may wish to push a
+                  hashable value into its child, and it can do so by appending
+                  to _hashables.
+    Attributes:
+      id: The object's identifier, a 24-character uppercase hexadecimal string.
+          Usually, objects being created should not set id until the entire
+          project file structure is built.  At that point, UpdateIDs() should
+          be called on the root object to assign deterministic values for id to
+          each object in the tree.
+      parent: The object's parent.  This is set by a parent XCObject when a child
+              object is added to it.
+      _properties: The object's property dictionary.  An object's properties are
+                   described by its class' _schema variable.
+    """
 
     _schema = {}
     _should_print_single_line = False
@@ -306,12 +305,12 @@ def __repr__(self):
     def Copy(self):
         """Make a copy of this object.
 
-    The new object will have its own copy of lists and dicts.  Any XCObject
-    objects owned by this object (marked "strong") will be copied in the
-    new object, even those found in lists.  If this object has any weak
-    references to other XCObjects, the same references are added to the new
-    object without making a copy.
-    """
+        The new object will have its own copy of lists and dicts.  Any XCObject
+        objects owned by this object (marked "strong") will be copied in the
+        new object, even those found in lists.  If this object has any weak
+        references to other XCObjects, the same references are added to the new
+        object without making a copy.
+        """
 
         that = self.__class__(id=self.id, parent=self.parent)
         for key, value in self._properties.items():
@@ -360,9 +359,9 @@ def Copy(self):
     def Name(self):
         """Return the name corresponding to an object.
 
-    Not all objects necessarily need to be nameable, and not all that do have
-    a "name" property.  Override as needed.
-    """
+        Not all objects necessarily need to be nameable, and not all that do have
+        a "name" property.  Override as needed.
+        """
 
         # If the schema indicates that "name" is required, try to access the
         # property even if it doesn't exist.  This will result in a KeyError
@@ -378,20 +377,19 @@ def Name(self):
     def Comment(self):
         """Return a comment string for the object.
 
-    Most objects just use their name as the comment, but PBXProject uses
-    different values.
+        Most objects just use their name as the comment, but PBXProject uses
+        different values.
 
-    The returned comment is not escaped and does not have any comment marker
-    strings applied to it.
-    """
+        The returned comment is not escaped and does not have any comment marker
+        strings applied to it.
+        """
 
         return self.Name()
 
     def Hashables(self):
         hashables = [self.__class__.__name__]
 
-        name = self.Name()
-        if name is not None:
+        if (name := self.Name()) is not None:
             hashables.append(name)
 
         hashables.extend(self._hashables)
@@ -404,26 +402,26 @@ def HashablesForChild(self):
     def ComputeIDs(self, recursive=True, overwrite=True, seed_hash=None):
         """Set "id" properties deterministically.
 
-    An object's "id" property is set based on a hash of its class type and
-    name, as well as the class type and name of all ancestor objects.  As
-    such, it is only advisable to call ComputeIDs once an entire project file
-    tree is built.
+        An object's "id" property is set based on a hash of its class type and
+        name, as well as the class type and name of all ancestor objects.  As
+        such, it is only advisable to call ComputeIDs once an entire project file
+        tree is built.
 
-    If recursive is True, recurse into all descendant objects and update their
-    hashes.
+        If recursive is True, recurse into all descendant objects and update their
+        hashes.
 
-    If overwrite is True, any existing value set in the "id" property will be
-    replaced.
-    """
+        If overwrite is True, any existing value set in the "id" property will be
+        replaced.
+        """
 
         def _HashUpdate(hash, data):
             """Update hash with data's length and contents.
 
-      If the hash were updated only with the value of data, it would be
-      possible for clowns to induce collisions by manipulating the names of
-      their objects.  By adding the length, it's exceedingly less likely that
-      ID collisions will be encountered, intentionally or not.
-      """
+            If the hash were updated only with the value of data, it would be
+            possible for clowns to induce collisions by manipulating the names of
+            their objects.  By adding the length, it's exceedingly less likely that
+            ID collisions will be encountered, intentionally or not.
+            """
 
             hash.update(struct.pack(">i", len(data)))
             if isinstance(data, str):
@@ -466,8 +464,7 @@ def _HashUpdate(hash, data):
             self.id = "%08X%08X%08X" % tuple(id_ints)
 
     def EnsureNoIDCollisions(self):
-        """Verifies that no two objects have the same ID.  Checks all descendants.
-    """
+        """Verifies that no two objects have the same ID.  Checks all descendants."""
 
         ids = {}
         descendants = self.Descendants()
@@ -500,8 +497,8 @@ def Children(self):
 
     def Descendants(self):
         """Returns a list of all of this object's descendants, including this
-    object.
-    """
+        object.
+        """
 
         children = self.Children()
         descendants = [self]
@@ -517,8 +514,8 @@ def PBXProjectAncestor(self):
 
     def _EncodeComment(self, comment):
         """Encodes a comment to be placed in the project file output, mimicking
-    Xcode behavior.
-    """
+        Xcode behavior.
+        """
 
         # This mimics Xcode behavior by wrapping the comment in "/*" and "*/".  If
         # the string already contains a "*/", it is turned into "(*)/".  This keeps
@@ -545,8 +542,8 @@ def _EncodeTransform(self, match):
 
     def _EncodeString(self, value):
         """Encodes a string to be placed in the project file output, mimicking
-    Xcode behavior.
-    """
+        Xcode behavior.
+        """
 
         # Use quotation marks when any character outside of the range A-Z, a-z, 0-9,
         # $ (dollar sign), . (period), and _ (underscore) is present.  Also use
@@ -587,18 +584,18 @@ def _XCPrint(self, file, tabs, line):
 
     def _XCPrintableValue(self, tabs, value, flatten_list=False):
         """Returns a representation of value that may be printed in a project file,
-    mimicking Xcode's behavior.
+        mimicking Xcode's behavior.
 
-    _XCPrintableValue can handle str and int values, XCObjects (which are
-    made printable by returning their id property), and list and dict objects
-    composed of any of the above types.  When printing a list or dict, and
-    _should_print_single_line is False, the tabs parameter is used to determine
-    how much to indent the lines corresponding to the items in the list or
-    dict.
+        _XCPrintableValue can handle str and int values, XCObjects (which are
+        made printable by returning their id property), and list and dict objects
+        composed of any of the above types.  When printing a list or dict, and
+        _should_print_single_line is False, the tabs parameter is used to determine
+        how much to indent the lines corresponding to the items in the list or
+        dict.
 
-    If flatten_list is True, single-element lists will be transformed into
-    strings.
-    """
+        If flatten_list is True, single-element lists will be transformed into
+        strings.
+        """
 
         printable = ""
         comment = None
@@ -659,12 +656,12 @@ def _XCPrintableValue(self, tabs, value, flatten_list=False):
 
     def _XCKVPrint(self, file, tabs, key, value):
         """Prints a key and value, members of an XCObject's _properties dictionary,
-    to file.
+        to file.
 
-    tabs is an int identifying the indentation level.  If the class'
-    _should_print_single_line variable is True, tabs is ignored and the
-    key-value pair will be followed by a space instead of a newline.
-    """
+        tabs is an int identifying the indentation level.  If the class'
+        _should_print_single_line variable is True, tabs is ignored and the
+        key-value pair will be followed by a space instead of a newline.
+        """
 
         if self._should_print_single_line:
             printable = ""
@@ -722,8 +719,8 @@ def _XCKVPrint(self, file, tabs, key, value):
 
     def Print(self, file=sys.stdout):
         """Prints a reprentation of this object to file, adhering to Xcode output
-    formatting.
-    """
+        formatting.
+        """
 
         self.VerifyHasRequiredProperties()
 
@@ -761,15 +758,15 @@ def Print(self, file=sys.stdout):
     def UpdateProperties(self, properties, do_copy=False):
         """Merge the supplied properties into the _properties dictionary.
 
-    The input properties must adhere to the class schema or a KeyError or
-    TypeError exception will be raised.  If adding an object of an XCObject
-    subclass and the schema indicates a strong relationship, the object's
-    parent will be set to this object.
+        The input properties must adhere to the class schema or a KeyError or
+        TypeError exception will be raised.  If adding an object of an XCObject
+        subclass and the schema indicates a strong relationship, the object's
+        parent will be set to this object.
 
-    If do_copy is True, then lists, dicts, strong-owned XCObjects, and
-    strong-owned XCObjects in lists will be copied instead of having their
-    references added.
-    """
+        If do_copy is True, then lists, dicts, strong-owned XCObjects, and
+        strong-owned XCObjects in lists will be copied instead of having their
+        references added.
+        """
 
         if properties is None:
             return
@@ -910,8 +907,8 @@ def AppendProperty(self, key, value):
 
     def VerifyHasRequiredProperties(self):
         """Ensure that all properties identified as required by the schema are
-    set.
-    """
+        set.
+        """
 
         # TODO(mark): A stronger verification mechanism is needed.  Some
         # subclasses need to perform validation beyond what the schema can enforce.
@@ -922,7 +919,7 @@ def VerifyHasRequiredProperties(self):
 
     def _SetDefaultsFromSchema(self):
         """Assign object default values according to the schema.  This will not
-    overwrite properties that have already been set."""
+        overwrite properties that have already been set."""
 
         defaults = {}
         for property, attributes in self._schema.items():
@@ -944,7 +941,7 @@ def _SetDefaultsFromSchema(self):
 
 class XCHierarchicalElement(XCObject):
     """Abstract base for PBXGroup and PBXFileReference.  Not represented in a
-  project file."""
+    project file."""
 
     # TODO(mark): Do name and path belong here?  Probably so.
     # If path is set and name is not, name may have a default value.  Name will
@@ -1010,27 +1007,27 @@ def Name(self):
     def Hashables(self):
         """Custom hashables for XCHierarchicalElements.
 
-    XCHierarchicalElements are special.  Generally, their hashes shouldn't
-    change if the paths don't change.  The normal XCObject implementation of
-    Hashables adds a hashable for each object, which means that if
-    the hierarchical structure changes (possibly due to changes caused when
-    TakeOverOnlyChild runs and encounters slight changes in the hierarchy),
-    the hashes will change.  For example, if a project file initially contains
-    a/b/f1 and a/b becomes collapsed into a/b, f1 will have a single parent
-    a/b.  If someone later adds a/f2 to the project file, a/b can no longer be
-    collapsed, and f1 winds up with parent b and grandparent a.  That would
-    be sufficient to change f1's hash.
-
-    To counteract this problem, hashables for all XCHierarchicalElements except
-    for the main group (which has neither a name nor a path) are taken to be
-    just the set of path components.  Because hashables are inherited from
-    parents, this provides assurance that a/b/f1 has the same set of hashables
-    whether its parent is b or a/b.
-
-    The main group is a special case.  As it is permitted to have no name or
-    path, it is permitted to use the standard XCObject hash mechanism.  This
-    is not considered a problem because there can be only one main group.
-    """
+        XCHierarchicalElements are special.  Generally, their hashes shouldn't
+        change if the paths don't change.  The normal XCObject implementation of
+        Hashables adds a hashable for each object, which means that if
+        the hierarchical structure changes (possibly due to changes caused when
+        TakeOverOnlyChild runs and encounters slight changes in the hierarchy),
+        the hashes will change.  For example, if a project file initially contains
+        a/b/f1 and a/b becomes collapsed into a/b, f1 will have a single parent
+        a/b.  If someone later adds a/f2 to the project file, a/b can no longer be
+        collapsed, and f1 winds up with parent b and grandparent a.  That would
+        be sufficient to change f1's hash.
+
+        To counteract this problem, hashables for all XCHierarchicalElements except
+        for the main group (which has neither a name nor a path) are taken to be
+        just the set of path components.  Because hashables are inherited from
+        parents, this provides assurance that a/b/f1 has the same set of hashables
+        whether its parent is b or a/b.
+
+        The main group is a special case.  As it is permitted to have no name or
+        path, it is permitted to use the standard XCObject hash mechanism.  This
+        is not considered a problem because there can be only one main group.
+        """
 
         if self == self.PBXProjectAncestor()._properties["mainGroup"]:
             # super
@@ -1051,8 +1048,7 @@ def Hashables(self):
         # including paths with a sourceTree, they'll still inherit their parents'
         # hashables, even though the paths aren't relative to their parents.  This
         # is not expected to be much of a problem in practice.
-        path = self.PathFromSourceTreeAndPath()
-        if path is not None:
+        if (path := self.PathFromSourceTreeAndPath()) is not None:
             components = path.split(posixpath.sep)
             for component in components:
                 hashables.append(self.__class__.__name__ + ".path")
@@ -1160,12 +1156,12 @@ def FullPath(self):
 
 class PBXGroup(XCHierarchicalElement):
     """
-  Attributes:
-    _children_by_path: Maps pathnames of children of this PBXGroup to the
-      actual child XCHierarchicalElement objects.
-    _variant_children_by_name_and_path: Maps (name, path) tuples of
-      PBXVariantGroup children to the actual child PBXVariantGroup objects.
-  """
+    Attributes:
+      _children_by_path: Maps pathnames of children of this PBXGroup to the
+        actual child XCHierarchicalElement objects.
+      _variant_children_by_name_and_path: Maps (name, path) tuples of
+        PBXVariantGroup children to the actual child PBXVariantGroup objects.
+    """
 
     _schema = XCHierarchicalElement._schema.copy()
     _schema.update(
@@ -1284,20 +1280,20 @@ def GetChildByRemoteObject(self, remote_object):
     def AddOrGetFileByPath(self, path, hierarchical):
         """Returns an existing or new file reference corresponding to path.
 
-    If hierarchical is True, this method will create or use the necessary
-    hierarchical group structure corresponding to path.  Otherwise, it will
-    look in and create an item in the current group only.
+        If hierarchical is True, this method will create or use the necessary
+        hierarchical group structure corresponding to path.  Otherwise, it will
+        look in and create an item in the current group only.
 
-    If an existing matching reference is found, it is returned, otherwise, a
-    new one will be created, added to the correct group, and returned.
+        If an existing matching reference is found, it is returned, otherwise, a
+        new one will be created, added to the correct group, and returned.
 
-    If path identifies a directory by virtue of carrying a trailing slash,
-    this method returns a PBXFileReference of "folder" type.  If path
-    identifies a variant, by virtue of it identifying a file inside a directory
-    with an ".lproj" extension, this method returns a PBXVariantGroup
-    containing the variant named by path, and possibly other variants.  For
-    all other paths, a "normal" PBXFileReference will be returned.
-    """
+        If path identifies a directory by virtue of carrying a trailing slash,
+        this method returns a PBXFileReference of "folder" type.  If path
+        identifies a variant, by virtue of it identifying a file inside a directory
+        with an ".lproj" extension, this method returns a PBXVariantGroup
+        containing the variant named by path, and possibly other variants.  For
+        all other paths, a "normal" PBXFileReference will be returned.
+        """
 
         # Adding or getting a directory?  Directories end with a trailing slash.
         is_dir = False
@@ -1382,15 +1378,15 @@ def AddOrGetFileByPath(self, path, hierarchical):
     def AddOrGetVariantGroupByNameAndPath(self, name, path):
         """Returns an existing or new PBXVariantGroup for name and path.
 
-    If a PBXVariantGroup identified by the name and path arguments is already
-    present as a child of this object, it is returned.  Otherwise, a new
-    PBXVariantGroup with the correct properties is created, added as a child,
-    and returned.
+        If a PBXVariantGroup identified by the name and path arguments is already
+        present as a child of this object, it is returned.  Otherwise, a new
+        PBXVariantGroup with the correct properties is created, added as a child,
+        and returned.
 
-    This method will generally be called by AddOrGetFileByPath, which knows
-    when to create a variant group based on the structure of the pathnames
-    passed to it.
-    """
+        This method will generally be called by AddOrGetFileByPath, which knows
+        when to create a variant group based on the structure of the pathnames
+        passed to it.
+        """
 
         key = (name, path)
         if key in self._variant_children_by_name_and_path:
@@ -1408,19 +1404,19 @@ def AddOrGetVariantGroupByNameAndPath(self, name, path):
 
     def TakeOverOnlyChild(self, recurse=False):
         """If this PBXGroup has only one child and it's also a PBXGroup, take
-    it over by making all of its children this object's children.
-
-    This function will continue to take over only children when those children
-    are groups.  If there are three PBXGroups representing a, b, and c, with
-    c inside b and b inside a, and a and b have no other children, this will
-    result in a taking over both b and c, forming a PBXGroup for a/b/c.
-
-    If recurse is True, this function will recurse into children and ask them
-    to collapse themselves by taking over only children as well.  Assuming
-    an example hierarchy with files at a/b/c/d1, a/b/c/d2, and a/b/c/d3/e/f
-    (d1, d2, and f are files, the rest are groups), recursion will result in
-    a group for a/b/c containing a group for d3/e.
-    """
+        it over by making all of its children this object's children.
+
+        This function will continue to take over only children when those children
+        are groups.  If there are three PBXGroups representing a, b, and c, with
+        c inside b and b inside a, and a and b have no other children, this will
+        result in a taking over both b and c, forming a PBXGroup for a/b/c.
+
+        If recurse is True, this function will recurse into children and ask them
+        to collapse themselves by taking over only children as well.  Assuming
+        an example hierarchy with files at a/b/c/d1, a/b/c/d2, and a/b/c/d3/e/f
+        (d1, d2, and f are files, the rest are groups), recursion will result in
+        a group for a/b/c containing a group for d3/e.
+        """
 
         # At this stage, check that child class types are PBXGroup exactly,
         # instead of using isinstance.  The only subclass of PBXGroup,
@@ -1719,16 +1715,16 @@ def DefaultConfiguration(self):
 
     def HasBuildSetting(self, key):
         """Determines the state of a build setting in all XCBuildConfiguration
-    child objects.
+        child objects.
 
-    If all child objects have key in their build settings, and the value is the
-    same in all child objects, returns 1.
+        If all child objects have key in their build settings, and the value is the
+        same in all child objects, returns 1.
 
-    If no child objects have the key in their build settings, returns 0.
+        If no child objects have the key in their build settings, returns 0.
 
-    If some, but not all, child objects have the key in their build settings,
-    or if any children have different values for the key, returns -1.
-    """
+        If some, but not all, child objects have the key in their build settings,
+        or if any children have different values for the key, returns -1.
+        """
 
         has = None
         value = None
@@ -1754,9 +1750,9 @@ def HasBuildSetting(self, key):
     def GetBuildSetting(self, key):
         """Gets the build setting for key.
 
-    All child XCConfiguration objects must have the same value set for the
-    setting, or a ValueError will be raised.
-    """
+        All child XCConfiguration objects must have the same value set for the
+        setting, or a ValueError will be raised.
+        """
 
         # TODO(mark): This is wrong for build settings that are lists.  The list
         # contents should be compared (and a list copy returned?)
@@ -1773,31 +1769,30 @@ def GetBuildSetting(self, key):
 
     def SetBuildSetting(self, key, value):
         """Sets the build setting for key to value in all child
-    XCBuildConfiguration objects.
-    """
+        XCBuildConfiguration objects.
+        """
 
         for configuration in self._properties["buildConfigurations"]:
             configuration.SetBuildSetting(key, value)
 
     def AppendBuildSetting(self, key, value):
         """Appends value to the build setting for key, which is treated as a list,
-    in all child XCBuildConfiguration objects.
-    """
+        in all child XCBuildConfiguration objects.
+        """
 
         for configuration in self._properties["buildConfigurations"]:
             configuration.AppendBuildSetting(key, value)
 
     def DelBuildSetting(self, key):
         """Deletes the build setting key from all child XCBuildConfiguration
-    objects.
-    """
+        objects.
+        """
 
         for configuration in self._properties["buildConfigurations"]:
             configuration.DelBuildSetting(key)
 
     def SetBaseConfiguration(self, value):
-        """Sets the build configuration in all child XCBuildConfiguration objects.
-    """
+        """Sets the build configuration in all child XCBuildConfiguration objects."""
 
         for configuration in self._properties["buildConfigurations"]:
             configuration.SetBaseConfiguration(value)
@@ -1837,14 +1832,14 @@ def Hashables(self):
 
 class XCBuildPhase(XCObject):
     """Abstract base for build phase classes.  Not represented in a project
-  file.
+    file.
 
-  Attributes:
-    _files_by_path: A dict mapping each path of a child in the files list by
-      path (keys) to the corresponding PBXBuildFile children (values).
-    _files_by_xcfilelikeelement: A dict mapping each XCFileLikeElement (keys)
-      to the corresponding PBXBuildFile children (values).
-  """
+    Attributes:
+      _files_by_path: A dict mapping each path of a child in the files list by
+        path (keys) to the corresponding PBXBuildFile children (values).
+      _files_by_xcfilelikeelement: A dict mapping each XCFileLikeElement (keys)
+        to the corresponding PBXBuildFile children (values).
+    """
 
     # TODO(mark): Some build phase types, like PBXShellScriptBuildPhase, don't
     # actually have a "files" list.  XCBuildPhase should not have "files" but
@@ -1883,8 +1878,8 @@ def FileGroup(self, path):
     def _AddPathToDict(self, pbxbuildfile, path):
         """Adds path to the dict tracking paths belonging to this build phase.
 
-    If the path is already a member of this build phase, raises an exception.
-    """
+        If the path is already a member of this build phase, raises an exception.
+        """
 
         if path in self._files_by_path:
             raise ValueError("Found multiple build files with path " + path)
@@ -1893,28 +1888,28 @@ def _AddPathToDict(self, pbxbuildfile, path):
     def _AddBuildFileToDicts(self, pbxbuildfile, path=None):
         """Maintains the _files_by_path and _files_by_xcfilelikeelement dicts.
 
-    If path is specified, then it is the path that is being added to the
-    phase, and pbxbuildfile must contain either a PBXFileReference directly
-    referencing that path, or it must contain a PBXVariantGroup that itself
-    contains a PBXFileReference referencing the path.
-
-    If path is not specified, either the PBXFileReference's path or the paths
-    of all children of the PBXVariantGroup are taken as being added to the
-    phase.
-
-    If the path is already present in the phase, raises an exception.
-
-    If the PBXFileReference or PBXVariantGroup referenced by pbxbuildfile
-    are already present in the phase, referenced by a different PBXBuildFile
-    object, raises an exception.  This does not raise an exception when
-    a PBXFileReference or PBXVariantGroup reappear and are referenced by the
-    same PBXBuildFile that has already introduced them, because in the case
-    of PBXVariantGroup objects, they may correspond to multiple paths that are
-    not all added simultaneously.  When this situation occurs, the path needs
-    to be added to _files_by_path, but nothing needs to change in
-    _files_by_xcfilelikeelement, and the caller should have avoided adding
-    the PBXBuildFile if it is already present in the list of children.
-    """
+        If path is specified, then it is the path that is being added to the
+        phase, and pbxbuildfile must contain either a PBXFileReference directly
+        referencing that path, or it must contain a PBXVariantGroup that itself
+        contains a PBXFileReference referencing the path.
+
+        If path is not specified, either the PBXFileReference's path or the paths
+        of all children of the PBXVariantGroup are taken as being added to the
+        phase.
+
+        If the path is already present in the phase, raises an exception.
+
+        If the PBXFileReference or PBXVariantGroup referenced by pbxbuildfile
+        are already present in the phase, referenced by a different PBXBuildFile
+        object, raises an exception.  This does not raise an exception when
+        a PBXFileReference or PBXVariantGroup reappear and are referenced by the
+        same PBXBuildFile that has already introduced them, because in the case
+        of PBXVariantGroup objects, they may correspond to multiple paths that are
+        not all added simultaneously.  When this situation occurs, the path needs
+        to be added to _files_by_path, but nothing needs to change in
+        _files_by_xcfilelikeelement, and the caller should have avoided adding
+        the PBXBuildFile if it is already present in the list of children.
+        """
 
         xcfilelikeelement = pbxbuildfile._properties["fileRef"]
 
@@ -2105,12 +2100,11 @@ def FileGroup(self, path):
     def SetDestination(self, path):
         """Set the dstSubfolderSpec and dstPath properties from path.
 
-    path may be specified in the same notation used for XCHierarchicalElements,
-    specifically, "$(DIR)/path".
-    """
+        path may be specified in the same notation used for XCHierarchicalElements,
+        specifically, "$(DIR)/path".
+        """
 
-        path_tree_match = self.path_tree_re.search(path)
-        if path_tree_match:
+        if path_tree_match := self.path_tree_re.search(path):
             path_tree = path_tree_match.group(1)
             if path_tree in self.path_tree_first_to_subfolder:
                 subfolder = self.path_tree_first_to_subfolder[path_tree]
@@ -2182,9 +2176,7 @@ def SetDestination(self, path):
             subfolder = 0
             relative_path = path[1:]
         else:
-            raise ValueError(
-                f"Can't use path {path} in a {self.__class__.__name__}"
-            )
+            raise ValueError(f"Can't use path {path} in a {self.__class__.__name__}")
 
         self._properties["dstPath"] = relative_path
         self._properties["dstSubfolderSpec"] = subfolder
@@ -2534,9 +2526,9 @@ def __init__(
                 # loadable modules, but there's precedent: Python loadable modules on
                 # Mac OS X use an .so extension.
                 if self._properties["productType"] == "com.googlecode.gyp.xcode.bundle":
-                    self._properties[
-                        "productType"
-                    ] = "com.apple.product-type.library.dynamic"
+                    self._properties["productType"] = (
+                        "com.apple.product-type.library.dynamic"
+                    )
                     self.SetBuildSetting("MACH_O_TYPE", "mh_bundle")
                     self.SetBuildSetting("DYLIB_CURRENT_VERSION", "")
                     self.SetBuildSetting("DYLIB_COMPATIBILITY_VERSION", "")
@@ -2544,9 +2536,10 @@ def __init__(
                         force_extension = suffix[1:]
 
                 if (
-                    self._properties["productType"] in {
+                    self._properties["productType"]
+                    in {
                         "com.apple.product-type-bundle.unit.test",
-                        "com.apple.product-type-bundle.ui-testing"
+                        "com.apple.product-type-bundle.ui-testing",
                     }
                 ) and force_extension is None:
                     force_extension = suffix[1:]
@@ -2698,10 +2691,8 @@ def AddDependency(self, other):
                 other._properties["productType"] == static_library_type
                 or (
                     (
-                        other._properties["productType"] in {
-                            shared_library_type,
-                            framework_type
-                        }
+                        other._properties["productType"]
+                        in {shared_library_type, framework_type}
                     )
                     and (
                         (not other.HasBuildSetting("MACH_O_TYPE"))
@@ -2710,7 +2701,6 @@ def AddDependency(self, other):
                 )
             )
         ):
-
             file_ref = other.GetProperty("productReference")
 
             pbxproject = self.PBXProjectAncestor()
@@ -2736,13 +2726,13 @@ class PBXProject(XCContainerPortal):
     # PBXContainerItemProxy.
     """
 
-  Attributes:
-    path: "sample.xcodeproj".  TODO(mark) Document me!
-    _other_pbxprojects: A dictionary, keyed by other PBXProject objects.  Each
-                        value is a reference to the dict in the
-                        projectReferences list associated with the keyed
-                        PBXProject.
-  """
+    Attributes:
+      path: "sample.xcodeproj".  TODO(mark) Document me!
+      _other_pbxprojects: A dictionary, keyed by other PBXProject objects.  Each
+                          value is a reference to the dict in the
+                          projectReferences list associated with the keyed
+                          PBXProject.
+    """
 
     _schema = XCContainerPortal._schema.copy()
     _schema.update(
@@ -2837,17 +2827,17 @@ def ProjectsGroup(self):
     def RootGroupForPath(self, path):
         """Returns a PBXGroup child of this object to which path should be added.
 
-    This method is intended to choose between SourceGroup and
-    IntermediatesGroup on the basis of whether path is present in a source
-    directory or an intermediates directory.  For the purposes of this
-    determination, any path located within a derived file directory such as
-    PROJECT_DERIVED_FILE_DIR is treated as being in an intermediates
-    directory.
+        This method is intended to choose between SourceGroup and
+        IntermediatesGroup on the basis of whether path is present in a source
+        directory or an intermediates directory.  For the purposes of this
+        determination, any path located within a derived file directory such as
+        PROJECT_DERIVED_FILE_DIR is treated as being in an intermediates
+        directory.
 
-    The returned value is a two-element tuple.  The first element is the
-    PBXGroup, and the second element specifies whether that group should be
-    organized hierarchically (True) or as a single flat list (False).
-    """
+        The returned value is a two-element tuple.  The first element is the
+        PBXGroup, and the second element specifies whether that group should be
+        organized hierarchically (True) or as a single flat list (False).
+        """
 
         # TODO(mark): make this a class variable and bind to self on call?
         # Also, this list is nowhere near exhaustive.
@@ -2873,11 +2863,11 @@ def RootGroupForPath(self, path):
 
     def AddOrGetFileInRootGroup(self, path):
         """Returns a PBXFileReference corresponding to path in the correct group
-    according to RootGroupForPath's heuristics.
+        according to RootGroupForPath's heuristics.
 
-    If an existing PBXFileReference for path exists, it will be returned.
-    Otherwise, one will be created and returned.
-    """
+        If an existing PBXFileReference for path exists, it will be returned.
+        Otherwise, one will be created and returned.
+        """
 
         (group, hierarchical) = self.RootGroupForPath(path)
         return group.AddOrGetFileByPath(path, hierarchical)
@@ -2927,17 +2917,17 @@ def SortGroups(self):
 
     def AddOrGetProjectReference(self, other_pbxproject):
         """Add a reference to another project file (via PBXProject object) to this
-    one.
+        one.
 
-    Returns [ProductGroup, ProjectRef].  ProductGroup is a PBXGroup object in
-    this project file that contains a PBXReferenceProxy object for each
-    product of each PBXNativeTarget in the other project file.  ProjectRef is
-    a PBXFileReference to the other project file.
+        Returns [ProductGroup, ProjectRef].  ProductGroup is a PBXGroup object in
+        this project file that contains a PBXReferenceProxy object for each
+        product of each PBXNativeTarget in the other project file.  ProjectRef is
+        a PBXFileReference to the other project file.
 
-    If this project file already references the other project file, the
-    existing ProductGroup and ProjectRef are returned.  The ProductGroup will
-    still be updated if necessary.
-    """
+        If this project file already references the other project file, the
+        existing ProductGroup and ProjectRef are returned.  The ProductGroup will
+        still be updated if necessary.
+        """
 
         if "projectReferences" not in self._properties:
             self._properties["projectReferences"] = []
@@ -2989,7 +2979,7 @@ def AddOrGetProjectReference(self, other_pbxproject):
             # Xcode seems to sort this list case-insensitively
             self._properties["projectReferences"] = sorted(
                 self._properties["projectReferences"],
-                key=lambda x: x["ProjectRef"].Name().lower()
+                key=lambda x: x["ProjectRef"].Name().lower(),
             )
         else:
             # The link already exists.  Pull out the relevant data.
@@ -3014,11 +3004,8 @@ def _AllSymrootsUnique(self, target, inherit_unique_symroot):
         # define an explicit value for 'SYMROOT'.
         symroots = self._DefinedSymroots(target)
         for s in self._DefinedSymroots(target):
-            if (
-                (s is not None
-                and not self._IsUniqueSymrootForTarget(s))
-                or (s is None
-                and not inherit_unique_symroot)
+            if (s is not None and not self._IsUniqueSymrootForTarget(s)) or (
+                s is None and not inherit_unique_symroot
             ):
                 return False
         return True if symroots else inherit_unique_symroot
@@ -3122,7 +3109,8 @@ def CompareProducts(x, y, remote_products):
             product_group._properties["children"] = sorted(
                 product_group._properties["children"],
                 key=cmp_to_key(
-                    lambda x, y, rp=remote_products: CompareProducts(x, y, rp)),
+                    lambda x, y, rp=remote_products: CompareProducts(x, y, rp)
+                ),
             )
 
 
@@ -3156,9 +3144,7 @@ def Print(self, file=sys.stdout):
             self._XCPrint(file, 0, "{ ")
         else:
             self._XCPrint(file, 0, "{\n")
-        for property, value in sorted(
-            self._properties.items()
-        ):
+        for property, value in sorted(self._properties.items()):
             if property == "objects":
                 self._PrintObjects(file)
             else:
@@ -3184,9 +3170,7 @@ def _PrintObjects(self, file):
         for class_name in sorted(objects_by_class):
             self._XCPrint(file, 0, "\n")
             self._XCPrint(file, 0, "/* Begin " + class_name + " section */\n")
-            for object in sorted(
-                objects_by_class[class_name], key=attrgetter("id")
-            ):
+            for object in sorted(objects_by_class[class_name], key=attrgetter("id")):
                 object.Print(file)
             self._XCPrint(file, 0, "/* End " + class_name + " section */\n")
 
diff --git a/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py b/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py
index 530196366946d..d7e3b5a95604f 100644
--- a/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py
+++ b/node_modules/node-gyp/gyp/pylib/gyp/xml_fix.py
@@ -9,7 +9,6 @@
 TODO(bradnelson): Consider dropping this when we drop XP support.
 """
 
-
 import xml.dom.minidom
 
 
diff --git a/node_modules/node-gyp/gyp/pylib/packaging/_elffile.py b/node_modules/node-gyp/gyp/pylib/packaging/_elffile.py
index 6fb19b30bb53c..cb33e10556ba1 100644
--- a/node_modules/node-gyp/gyp/pylib/packaging/_elffile.py
+++ b/node_modules/node-gyp/gyp/pylib/packaging/_elffile.py
@@ -48,8 +48,7 @@ def __init__(self, f: IO[bytes]) -> None:
             ident = self._read("16B")
         except struct.error:
             raise ELFInvalid("unable to parse identification")
-        magic = bytes(ident[:4])
-        if magic != b"\x7fELF":
+        if (magic := bytes(ident[:4])) != b"\x7fELF":
             raise ELFInvalid(f"invalid magic: {magic!r}")
 
         self.capacity = ident[4]  # Format for program header (bitness).
diff --git a/node_modules/node-gyp/gyp/pylib/packaging/markers.py b/node_modules/node-gyp/gyp/pylib/packaging/markers.py
index 8b98fca7233be..7e4d150208eec 100644
--- a/node_modules/node-gyp/gyp/pylib/packaging/markers.py
+++ b/node_modules/node-gyp/gyp/pylib/packaging/markers.py
@@ -166,8 +166,7 @@ def _evaluate_markers(markers: MarkerList, environment: Dict[str, str]) -> bool:
 
 def format_full_version(info: "sys._version_info") -> str:
     version = "{0.major}.{0.minor}.{0.micro}".format(info)
-    kind = info.releaselevel
-    if kind != "final":
+    if (kind := info.releaselevel) != "final":
         version += kind[0] + str(info.serial)
     return version
 
diff --git a/node_modules/node-gyp/gyp/pylib/packaging/metadata.py b/node_modules/node-gyp/gyp/pylib/packaging/metadata.py
index 23bb564f3d5ff..43f5c5b30df97 100644
--- a/node_modules/node-gyp/gyp/pylib/packaging/metadata.py
+++ b/node_modules/node-gyp/gyp/pylib/packaging/metadata.py
@@ -591,8 +591,7 @@ def _process_description_content_type(self, value: str) -> str:
                 f"{{field}} must be one of {list(content_types)}, not {value!r}"
             )
 
-        charset = parameters.get("charset", "UTF-8")
-        if charset != "UTF-8":
+        if (charset := parameters.get("charset", "UTF-8")) != "UTF-8":
             raise self._invalid_metadata(
                 f"{{field}} can only specify the UTF-8 charset, not {list(charset)}"
             )
diff --git a/node_modules/node-gyp/gyp/pyproject.toml b/node_modules/node-gyp/gyp/pyproject.toml
index 537308731fe54..3a029c4fc5140 100644
--- a/node_modules/node-gyp/gyp/pyproject.toml
+++ b/node_modules/node-gyp/gyp/pyproject.toml
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
 
 [project]
 name = "gyp-next"
-version = "0.20.0"
+version = "0.20.4"
 authors = [
   { name="Node.js contributors", email="ryzokuken@disroot.org" },
 ]
@@ -39,7 +39,6 @@ gyp = "gyp:script_main"
 [tool.ruff]
 extend-exclude = ["pylib/packaging"]
 line-length = 88
-target-version = "py37"
 
 [tool.ruff.lint]
 select = [
diff --git a/node_modules/node-gyp/gyp/test_gyp.py b/node_modules/node-gyp/gyp/test_gyp.py
index b7bb956b8ed58..70c81ae8ca3bf 100755
--- a/node_modules/node-gyp/gyp/test_gyp.py
+++ b/node_modules/node-gyp/gyp/test_gyp.py
@@ -5,7 +5,6 @@
 
 """gyptest.py -- test runner for GYP tests."""
 
-
 import argparse
 import os
 import platform
@@ -148,13 +147,13 @@ def print_configuration_info():
     print("Test configuration:")
     if sys.platform == "darwin":
         sys.path.append(os.path.abspath("test/lib"))
-        import TestMac
+        import TestMac  # noqa: PLC0415
 
         print(f"  Mac {platform.mac_ver()[0]} {platform.mac_ver()[2]}")
         print(f"  Xcode {TestMac.Xcode.Version()}")
     elif sys.platform == "win32":
         sys.path.append(os.path.abspath("pylib"))
-        import gyp.MSVSVersion
+        import gyp.MSVSVersion  # noqa: PLC0415
 
         print("  Win %s %s\n" % platform.win32_ver()[0:2])
         print("  MSVS %s" % gyp.MSVSVersion.SelectVisualStudioVersion().Description())
diff --git a/node_modules/node-gyp/lib/install.js b/node_modules/node-gyp/lib/install.js
index 90be86c822c8f..ee4adb1e67fcd 100644
--- a/node_modules/node-gyp/lib/install.js
+++ b/node_modules/node-gyp/lib/install.js
@@ -200,10 +200,10 @@ async function install (gyp, argv) {
     // download the tarball and extract!
     // Ommited on Windows if only new node.lib is required
 
-    // on Windows there can be file errors from tar if parallel installs
+    // there can be file errors from tar if parallel installs
     // are happening (not uncommon with multiple native modules) so
     // extract the tarball to a temp directory first and then copy over
-    const tarExtractDir = win ? await fs.mkdtemp(path.join(os.tmpdir(), 'node-gyp-tmp-')) : devDir
+    const tarExtractDir = await fs.mkdtemp(path.join(os.tmpdir(), 'node-gyp-tmp-'))
 
     try {
       if (shouldDownloadTarball) {
@@ -277,17 +277,13 @@ async function install (gyp, argv) {
       }
 
       // copy over the files from the temp tarball extract directory to devDir
-      if (tarExtractDir !== devDir) {
-        await copyDirectory(tarExtractDir, devDir)
-      }
+      await copyDirectory(tarExtractDir, devDir)
     } finally {
-      if (tarExtractDir !== devDir) {
-        try {
-          // try to cleanup temp dir
-          await fs.rm(tarExtractDir, { recursive: true, maxRetries: 3 })
-        } catch {
-          log.warn('failed to clean up temp tarball extract directory')
-        }
+      try {
+        // try to cleanup temp dir
+        await fs.rm(tarExtractDir, { recursive: true, maxRetries: 3 })
+      } catch {
+        log.warn('failed to clean up temp tarball extract directory')
       }
     }
 
diff --git a/node_modules/node-gyp/lib/node-gyp.js b/node_modules/node-gyp/lib/node-gyp.js
index 5e25bf996f8b2..dafce99d49e35 100644
--- a/node_modules/node-gyp/lib/node-gyp.js
+++ b/node_modules/node-gyp/lib/node-gyp.js
@@ -122,31 +122,42 @@ class Gyp extends EventEmitter {
     }
 
     // support for inheriting config env variables from npm
-    const npmConfigPrefix = 'npm_config_'
-    Object.keys(process.env).forEach((name) => {
-      if (name.indexOf(npmConfigPrefix) !== 0) {
-        return
-      }
-      const val = process.env[name]
-      if (name === npmConfigPrefix + 'loglevel') {
-        log.logger.level = val
-      } else {
+    // npm will set environment variables in the following forms:
+    // - `npm_config_` for values from npm's own config. Setting arbitrary
+    //   options on npm's config was deprecated in npm v11 but node-gyp still
+    //   supports it for backwards compatibility.
+    //   See https://github.com/nodejs/node-gyp/issues/3156
+    // - `npm_package_config_node_gyp_` for values from the `config` object
+    //   in package.json. This is the preferred way to set options for node-gyp
+    //   since npm v11. The `node_gyp_` prefix is used to avoid conflicts with
+    //   other tools.
+    // The `npm_package_config_node_gyp_` prefix will take precedence over
+    // `npm_config_` keys.
+    const npmConfigPrefix = /^npm_config_/i
+    const npmPackageConfigPrefix = /^npm_package_config_node_gyp_/i
+
+    const configEnvKeys = Object.keys(process.env)
+      .filter((k) => npmConfigPrefix.test(k) || npmPackageConfigPrefix.test(k))
+      // sort so that npm_package_config_node_gyp_ keys come last and will override
+      .sort((a) => npmConfigPrefix.test(a) ? -1 : 1)
+
+    for (const key of configEnvKeys) {
       // add the user-defined options to the config
-        name = name.substring(npmConfigPrefix.length)
-        // gyp@741b7f1 enters an infinite loop when it encounters
-        // zero-length options so ensure those don't get through.
-        if (name) {
+      const name = npmConfigPrefix.test(key)
+        ? key.replace(npmConfigPrefix, '')
+        : key.replace(npmPackageConfigPrefix, '')
+      // gyp@741b7f1 enters an infinite loop when it encounters
+      // zero-length options so ensure those don't get through.
+      if (name) {
         // convert names like force_process_config to force-process-config
-          if (name.includes('_')) {
-            name = name.replace(/_/g, '-')
-          }
-          this.opts[name] = val
-        }
+        // and convert to lowercase
+        this.opts[name.replaceAll('_', '-').toLowerCase()] = process.env[key]
       }
-    })
+    }
 
     if (this.opts.loglevel) {
       log.logger.level = this.opts.loglevel
+      delete this.opts.loglevel
     }
     log.resume()
   }
diff --git a/node_modules/node-gyp/package.json b/node_modules/node-gyp/package.json
index f69a022ef3d12..018391bd38c47 100644
--- a/node_modules/node-gyp/package.json
+++ b/node_modules/node-gyp/package.json
@@ -11,7 +11,7 @@
     "bindings",
     "gyp"
   ],
-  "version": "11.2.0",
+  "version": "11.4.2",
   "installVersion": 11,
   "author": "Nathan Rajlich  (http://tootallnate.net)",
   "repository": {
diff --git a/package-lock.json b/package-lock.json
index b996ef59ed876..168042e76d03b 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -124,7 +124,7 @@
         "minipass": "^7.1.1",
         "minipass-pipeline": "^1.2.4",
         "ms": "^2.1.2",
-        "node-gyp": "^11.2.0",
+        "node-gyp": "^11.4.2",
         "nopt": "^8.1.0",
         "normalize-package-data": "^8.0.0",
         "npm-audit-report": "^6.0.0",
@@ -10766,7 +10766,9 @@
       }
     },
     "node_modules/node-gyp": {
-      "version": "11.2.0",
+      "version": "11.4.2",
+      "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-11.4.2.tgz",
+      "integrity": "sha512-3gD+6zsrLQH7DyYOUIutaauuXrcyxeTPyQuZQCQoNPZMHMMS5m4y0xclNpvYzoK3VNzuyxT6eF4mkIL4WSZ1eQ==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
diff --git a/package.json b/package.json
index 7492b8730e67d..68cacc8d773d5 100644
--- a/package.json
+++ b/package.json
@@ -91,7 +91,7 @@
     "minipass": "^7.1.1",
     "minipass-pipeline": "^1.2.4",
     "ms": "^2.1.2",
-    "node-gyp": "^11.2.0",
+    "node-gyp": "^11.4.2",
     "nopt": "^8.1.0",
     "normalize-package-data": "^8.0.0",
     "npm-audit-report": "^6.0.0",

From 9519f189a427eb0a56c846379fdd92ff95078a5b Mon Sep 17 00:00:00 2001
From: Gar 
Date: Thu, 18 Sep 2025 10:51:44 -0700
Subject: [PATCH 34/63] deps: npm-install-checks@7.1.2

---
 .../npm-install-checks/lib/dev-engines.js     |  6 ++---
 node_modules/npm-install-checks/package.json  |  6 ++---
 package-lock.json                             |  6 +++--
 package.json                                  |  2 +-
 .../test/lib/commands/install.js.test.cjs     | 22 +++++++++----------
 5 files changed, 22 insertions(+), 20 deletions(-)

diff --git a/node_modules/npm-install-checks/lib/dev-engines.js b/node_modules/npm-install-checks/lib/dev-engines.js
index ac5a182330d3b..2c483349ae70a 100644
--- a/node_modules/npm-install-checks/lib/dev-engines.js
+++ b/node_modules/npm-install-checks/lib/dev-engines.js
@@ -90,14 +90,14 @@ function checkDependency (wanted, current, opts) {
 /** checks devEngines package property and returns array of warnings / errors */
 function checkDevEngines (wanted, current = {}, opts = {}) {
   if ((typeof wanted !== 'object' || wanted === null) || Array.isArray(wanted)) {
-    throw new Error(`Invalid non-object value for devEngines`)
+    throw new Error(`Invalid non-object value for "devEngines"`)
   }
 
   const errors = []
 
   for (const engine of Object.keys(wanted)) {
     if (!recognizedEngines.includes(engine)) {
-      throw new Error(`Invalid property "${engine}"`)
+      throw new Error(`Invalid property "devEngines.${engine}"`)
     }
     const dependencyAsAuthored = wanted[engine]
     const dependencies = [dependencyAsAuthored].flat()
@@ -125,7 +125,7 @@ function checkDevEngines (wanted, current = {}, opts = {}) {
         onFail = 'error'
       }
 
-      const err = Object.assign(new Error(`Invalid engine "${engine}"`), {
+      const err = Object.assign(new Error(`Invalid devEngines.${engine}`), {
         errors: depErrors,
         engine,
         isWarn: onFail === 'warn',
diff --git a/node_modules/npm-install-checks/package.json b/node_modules/npm-install-checks/package.json
index 967f5f659b2fa..28a23354bdbfe 100644
--- a/node_modules/npm-install-checks/package.json
+++ b/node_modules/npm-install-checks/package.json
@@ -1,6 +1,6 @@
 {
   "name": "npm-install-checks",
-  "version": "7.1.1",
+  "version": "7.1.2",
   "description": "Check the engines and platform fields in package.json",
   "main": "lib/index.js",
   "dependencies": {
@@ -8,7 +8,7 @@
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.0",
-    "@npmcli/template-oss": "4.23.4",
+    "@npmcli/template-oss": "4.25.0",
     "tap": "^16.0.1"
   },
   "scripts": {
@@ -40,7 +40,7 @@
   "author": "GitHub Inc.",
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.23.4",
+    "version": "4.25.0",
     "publish": "true"
   },
   "tap": {
diff --git a/package-lock.json b/package-lock.json
index 168042e76d03b..3a77ab432ae4a 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -128,7 +128,7 @@
         "nopt": "^8.1.0",
         "normalize-package-data": "^8.0.0",
         "npm-audit-report": "^6.0.0",
-        "npm-install-checks": "^7.1.1",
+        "npm-install-checks": "^7.1.2",
         "npm-package-arg": "^13.0.0",
         "npm-pick-manifest": "^11.0.1",
         "npm-profile": "^12.0.0",
@@ -11037,7 +11037,9 @@
       }
     },
     "node_modules/npm-install-checks": {
-      "version": "7.1.1",
+      "version": "7.1.2",
+      "resolved": "https://registry.npmjs.org/npm-install-checks/-/npm-install-checks-7.1.2.tgz",
+      "integrity": "sha512-z9HJBCYw9Zr8BqXcllKIs5nI+QggAImbBdHphOzVYrz2CB4iQ6FzWyKmlqDZua+51nAu7FcemlbTc9VgQN5XDQ==",
       "inBundle": true,
       "license": "BSD-2-Clause",
       "dependencies": {
diff --git a/package.json b/package.json
index 68cacc8d773d5..865f53536461e 100644
--- a/package.json
+++ b/package.json
@@ -95,7 +95,7 @@
     "nopt": "^8.1.0",
     "normalize-package-data": "^8.0.0",
     "npm-audit-report": "^6.0.0",
-    "npm-install-checks": "^7.1.1",
+    "npm-install-checks": "^7.1.2",
     "npm-package-arg": "^13.0.0",
     "npm-pick-manifest": "^11.0.1",
     "npm-profile": "^12.0.0",
diff --git a/tap-snapshots/test/lib/commands/install.js.test.cjs b/tap-snapshots/test/lib/commands/install.js.test.cjs
index dd07bce07de7f..3c9fa9bbec447 100644
--- a/tap-snapshots/test/lib/commands/install.js.test.cjs
+++ b/tap-snapshots/test/lib/commands/install.js.test.cjs
@@ -16,7 +16,7 @@ verbose logfile logs-max:10 dir:{CWD}/cache/_logs/{DATE}-
 verbose logfile {CWD}/cache/_logs/{DATE}-debug-0.log
 silly logfile done cleaning log files
 warn EBADDEVENGINES The developer of this package has specified the following through devEngines
-warn EBADDEVENGINES Invalid engine "runtime"
+warn EBADDEVENGINES Invalid devEngines.runtime
 warn EBADDEVENGINES Invalid semver version "0.0.1" does not match "v1337.0.0" for "runtime"
 warn EBADDEVENGINES {
 warn EBADDEVENGINES   current: { name: 'node', version: 'v1337.0.0' },
@@ -132,14 +132,14 @@ verbose logfile logs-max:10 dir:{CWD}/cache/_logs/{DATE}-
 verbose logfile {CWD}/cache/_logs/{DATE}-debug-0.log
 silly logfile done cleaning log files
 verbose stack Error: The developer of this package has specified the following through devEngines
-verbose stack Invalid engine "runtime"
+verbose stack Invalid devEngines.runtime
 verbose stack Invalid name "nondescript" does not match "node" for "runtime"
 verbose stack     at Install.checkDevEngines ({CWD}/lib/base-cmd.js:181:27)
 verbose stack     at MockNpm.#exec ({CWD}/lib/npm.js:252:7)
 verbose stack     at MockNpm.exec ({CWD}/lib/npm.js:208:9)
 error code EBADDEVENGINES
 error EBADDEVENGINES The developer of this package has specified the following through devEngines
-error EBADDEVENGINES Invalid engine "runtime"
+error EBADDEVENGINES Invalid devEngines.runtime
 error EBADDEVENGINES Invalid name "nondescript" does not match "node" for "runtime"
 error EBADDEVENGINES {
 error EBADDEVENGINES   current: { name: 'node', version: 'v1337.0.0' },
@@ -158,13 +158,13 @@ verbose logfile logs-max:10 dir:{CWD}/cache/_logs/{DATE}-
 verbose logfile {CWD}/cache/_logs/{DATE}-debug-0.log
 silly logfile done cleaning log files
 warn EBADDEVENGINES The developer of this package has specified the following through devEngines
-warn EBADDEVENGINES Invalid engine "runtime"
+warn EBADDEVENGINES Invalid devEngines.runtime
 warn EBADDEVENGINES Invalid name "nondescript" does not match "node" for "runtime"
 warn EBADDEVENGINES {
 warn EBADDEVENGINES   current: { name: 'node', version: 'v1337.0.0' },
 warn EBADDEVENGINES   required: { name: 'nondescript', onFail: 'warn' }
 warn EBADDEVENGINES }
-warn EBADDEVENGINES Invalid engine "cpu"
+warn EBADDEVENGINES Invalid devEngines.cpu
 warn EBADDEVENGINES Invalid name "risv" does not match "x86" for "cpu"
 warn EBADDEVENGINES {
 warn EBADDEVENGINES   current: { name: 'x86' },
@@ -190,21 +190,21 @@ verbose logfile logs-max:10 dir:{CWD}/cache/_logs/{DATE}-
 verbose logfile {CWD}/cache/_logs/{DATE}-debug-0.log
 silly logfile done cleaning log files
 warn EBADDEVENGINES The developer of this package has specified the following through devEngines
-warn EBADDEVENGINES Invalid engine "cpu"
+warn EBADDEVENGINES Invalid devEngines.cpu
 warn EBADDEVENGINES Invalid name "risv" does not match "x86" for "cpu"
 warn EBADDEVENGINES {
 warn EBADDEVENGINES   current: { name: 'x86' },
 warn EBADDEVENGINES   required: { name: 'risv', onFail: 'warn' }
 warn EBADDEVENGINES }
 verbose stack Error: The developer of this package has specified the following through devEngines
-verbose stack Invalid engine "runtime"
+verbose stack Invalid devEngines.runtime
 verbose stack Invalid name "nondescript" does not match "node" for "runtime"
 verbose stack     at Install.checkDevEngines ({CWD}/lib/base-cmd.js:181:27)
 verbose stack     at MockNpm.#exec ({CWD}/lib/npm.js:252:7)
 verbose stack     at MockNpm.exec ({CWD}/lib/npm.js:208:9)
 error code EBADDEVENGINES
 error EBADDEVENGINES The developer of this package has specified the following through devEngines
-error EBADDEVENGINES Invalid engine "runtime"
+error EBADDEVENGINES Invalid devEngines.runtime
 error EBADDEVENGINES Invalid name "nondescript" does not match "node" for "runtime"
 error EBADDEVENGINES {
 error EBADDEVENGINES   current: { name: 'node', version: 'v1337.0.0' },
@@ -223,14 +223,14 @@ verbose logfile logs-max:10 dir:{CWD}/cache/_logs/{DATE}-
 verbose logfile {CWD}/cache/_logs/{DATE}-debug-0.log
 silly logfile done cleaning log files
 verbose stack Error: The developer of this package has specified the following through devEngines
-verbose stack Invalid engine "runtime"
+verbose stack Invalid devEngines.runtime
 verbose stack Invalid name "nondescript" does not match "node" for "runtime"
 verbose stack     at Install.checkDevEngines ({CWD}/lib/base-cmd.js:181:27)
 verbose stack     at MockNpm.#exec ({CWD}/lib/npm.js:252:7)
 verbose stack     at MockNpm.exec ({CWD}/lib/npm.js:208:9)
 error code EBADDEVENGINES
 error EBADDEVENGINES The developer of this package has specified the following through devEngines
-error EBADDEVENGINES Invalid engine "runtime"
+error EBADDEVENGINES Invalid devEngines.runtime
 error EBADDEVENGINES Invalid name "nondescript" does not match "node" for "runtime"
 error EBADDEVENGINES {
 error EBADDEVENGINES   current: { name: 'node', version: 'v1337.0.0' },
@@ -250,7 +250,7 @@ verbose logfile {CWD}/cache/_logs/{DATE}-debug-0.log
 warn using --force Recommended protections disabled.
 silly logfile done cleaning log files
 warn EBADDEVENGINES The developer of this package has specified the following through devEngines
-warn EBADDEVENGINES Invalid engine "runtime"
+warn EBADDEVENGINES Invalid devEngines.runtime
 warn EBADDEVENGINES Invalid name "nondescript" does not match "node" for "runtime"
 warn EBADDEVENGINES {
 warn EBADDEVENGINES   current: { name: 'node', version: 'v1337.0.0' },

From 6a392f36312b71cc4b0e71c25b4c95f47d1eeaf8 Mon Sep 17 00:00:00 2001
From: Gar 
Date: Thu, 18 Sep 2025 10:55:14 -0700
Subject: [PATCH 35/63] deps: tinyglobby@0.2.15

---
 .../tinyglobby/dist/{index.js => index.cjs}   | 207 ++++++++++++------
 node_modules/tinyglobby/dist/index.d.cts      | 147 +++++++++++++
 node_modules/tinyglobby/dist/index.d.mts      | 157 ++++++++++---
 node_modules/tinyglobby/dist/index.mjs        | 196 ++++++++++++-----
 .../node_modules/fdir/dist/api/async.js       |  19 --
 .../node_modules/fdir/dist/api/counter.js     |  27 ---
 .../fdir/dist/api/functions/get-array.js      |  13 --
 .../fdir/dist/api/functions/group-files.js    |  11 -
 .../dist/api/functions/invoke-callback.js     |  57 -----
 .../fdir/dist/api/functions/join-path.js      |  36 ---
 .../fdir/dist/api/functions/push-directory.js |  37 ----
 .../fdir/dist/api/functions/push-file.js      |  33 ---
 .../dist/api/functions/resolve-symlink.js     |  67 ------
 .../fdir/dist/api/functions/walk-directory.js |  40 ----
 .../node_modules/fdir/dist/api/queue.js       |  29 ---
 .../node_modules/fdir/dist/api/sync.js        |   9 -
 .../node_modules/fdir/dist/api/walker.js      | 129 -----------
 .../fdir/dist/builder/api-builder.js          |  23 --
 .../node_modules/fdir/dist/builder/index.js   | 136 ------------
 .../node_modules/fdir/dist/index.cjs          |  46 ++--
 .../node_modules/fdir/dist/index.d.cts        |  25 ++-
 .../node_modules/fdir/dist/index.d.mts        |  25 ++-
 .../node_modules/fdir/dist/index.js           |  20 --
 .../node_modules/fdir/dist/index.mjs          |  36 ++-
 .../node_modules/fdir/dist/types.js           |   2 -
 .../node_modules/fdir/dist/utils.js           |  37 ----
 .../tinyglobby/node_modules/fdir/package.json |  23 +-
 node_modules/tinyglobby/package.json          |  48 ++--
 package-lock.json                             |  17 +-
 29 files changed, 720 insertions(+), 932 deletions(-)
 rename node_modules/tinyglobby/dist/{index.js => index.cjs} (57%)
 create mode 100644 node_modules/tinyglobby/dist/index.d.cts
 delete mode 100644 node_modules/tinyglobby/node_modules/fdir/dist/api/async.js
 delete mode 100644 node_modules/tinyglobby/node_modules/fdir/dist/api/counter.js
 delete mode 100644 node_modules/tinyglobby/node_modules/fdir/dist/api/functions/get-array.js
 delete mode 100644 node_modules/tinyglobby/node_modules/fdir/dist/api/functions/group-files.js
 delete mode 100644 node_modules/tinyglobby/node_modules/fdir/dist/api/functions/invoke-callback.js
 delete mode 100644 node_modules/tinyglobby/node_modules/fdir/dist/api/functions/join-path.js
 delete mode 100644 node_modules/tinyglobby/node_modules/fdir/dist/api/functions/push-directory.js
 delete mode 100644 node_modules/tinyglobby/node_modules/fdir/dist/api/functions/push-file.js
 delete mode 100644 node_modules/tinyglobby/node_modules/fdir/dist/api/functions/resolve-symlink.js
 delete mode 100644 node_modules/tinyglobby/node_modules/fdir/dist/api/functions/walk-directory.js
 delete mode 100644 node_modules/tinyglobby/node_modules/fdir/dist/api/queue.js
 delete mode 100644 node_modules/tinyglobby/node_modules/fdir/dist/api/sync.js
 delete mode 100644 node_modules/tinyglobby/node_modules/fdir/dist/api/walker.js
 delete mode 100644 node_modules/tinyglobby/node_modules/fdir/dist/builder/api-builder.js
 delete mode 100644 node_modules/tinyglobby/node_modules/fdir/dist/builder/index.js
 delete mode 100644 node_modules/tinyglobby/node_modules/fdir/dist/index.js
 delete mode 100644 node_modules/tinyglobby/node_modules/fdir/dist/types.js
 delete mode 100644 node_modules/tinyglobby/node_modules/fdir/dist/utils.js

diff --git a/node_modules/tinyglobby/dist/index.js b/node_modules/tinyglobby/dist/index.cjs
similarity index 57%
rename from node_modules/tinyglobby/dist/index.js
rename to node_modules/tinyglobby/dist/index.cjs
index 1e05d89e7ebf1..e5cb03ccec9ac 100644
--- a/node_modules/tinyglobby/dist/index.js
+++ b/node_modules/tinyglobby/dist/index.cjs
@@ -21,39 +21,49 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
 }) : target, mod));
 
 //#endregion
-const path = __toESM(require("path"));
-const fdir = __toESM(require("fdir"));
-const picomatch = __toESM(require("picomatch"));
+let fs = require("fs");
+fs = __toESM(fs);
+let path = require("path");
+path = __toESM(path);
+let url = require("url");
+url = __toESM(url);
+let fdir = require("fdir");
+fdir = __toESM(fdir);
+let picomatch = require("picomatch");
+picomatch = __toESM(picomatch);
 
 //#region src/utils.ts
+const isReadonlyArray = Array.isArray;
+const isWin = process.platform === "win32";
 const ONLY_PARENT_DIRECTORIES = /^(\/?\.\.)+$/;
-function getPartialMatcher(patterns, options) {
+function getPartialMatcher(patterns, options = {}) {
 	const patternsCount = patterns.length;
 	const patternsParts = Array(patternsCount);
-	const regexes = Array(patternsCount);
+	const matchers = Array(patternsCount);
+	const globstarEnabled = !options.noglobstar;
 	for (let i = 0; i < patternsCount; i++) {
 		const parts = splitPattern(patterns[i]);
 		patternsParts[i] = parts;
 		const partsCount = parts.length;
-		const partRegexes = Array(partsCount);
-		for (let j = 0; j < partsCount; j++) partRegexes[j] = picomatch.default.makeRe(parts[j], options);
-		regexes[i] = partRegexes;
+		const partMatchers = Array(partsCount);
+		for (let j = 0; j < partsCount; j++) partMatchers[j] = (0, picomatch.default)(parts[j], options);
+		matchers[i] = partMatchers;
 	}
 	return (input) => {
 		const inputParts = input.split("/");
 		if (inputParts[0] === ".." && ONLY_PARENT_DIRECTORIES.test(input)) return true;
 		for (let i = 0; i < patterns.length; i++) {
 			const patternParts = patternsParts[i];
-			const regex = regexes[i];
+			const matcher = matchers[i];
 			const inputPatternCount = inputParts.length;
 			const minParts = Math.min(inputPatternCount, patternParts.length);
 			let j = 0;
 			while (j < minParts) {
 				const part = patternParts[j];
 				if (part.includes("/")) return true;
-				const match = regex[j].test(inputParts[j]);
+				const match = matcher[j](inputParts[j]);
 				if (!match) break;
-				if (part === "**") return true;
+				if (globstarEnabled && part === "**") return true;
 				j++;
 			}
 			if (j === inputPatternCount) return true;
@@ -61,13 +71,43 @@ function getPartialMatcher(patterns, options) {
 		return false;
 	};
 }
+/* node:coverage ignore next 2 */
+const WIN32_ROOT_DIR = /^[A-Z]:\/$/i;
+const isRoot = isWin ? (p) => WIN32_ROOT_DIR.test(p) : (p) => p === "/";
+function buildFormat(cwd, root, absolute) {
+	if (cwd === root || root.startsWith(`${cwd}/`)) {
+		if (absolute) {
+			const start = isRoot(cwd) ? cwd.length : cwd.length + 1;
+			return (p, isDir) => p.slice(start, isDir ? -1 : void 0) || ".";
+		}
+		const prefix = root.slice(cwd.length + 1);
+		if (prefix) return (p, isDir) => {
+			if (p === ".") return prefix;
+			const result = `${prefix}/${p}`;
+			return isDir ? result.slice(0, -1) : result;
+		};
+		return (p, isDir) => isDir && p !== "." ? p.slice(0, -1) : p;
+	}
+	if (absolute) return (p) => path.posix.relative(cwd, p) || ".";
+	return (p) => path.posix.relative(cwd, `${root}/${p}`) || ".";
+}
+function buildRelative(cwd, root) {
+	if (root.startsWith(`${cwd}/`)) {
+		const prefix = root.slice(cwd.length + 1);
+		return (p) => `${prefix}/${p}`;
+	}
+	return (p) => {
+		const result = path.posix.relative(cwd, `${root}/${p}`);
+		if (p.endsWith("/") && result !== "") return `${result}/`;
+		return result || ".";
+	};
+}
 const splitPatternOptions = { parts: true };
 function splitPattern(path$2) {
 	var _result$parts;
 	const result = picomatch.default.scan(path$2, splitPatternOptions);
 	return ((_result$parts = result.parts) === null || _result$parts === void 0 ? void 0 : _result$parts.length) ? result.parts : [path$2];
 }
-const isWin = process.platform === "win32";
 const ESCAPED_WIN32_BACKSLASHES = /\\(?![()[\]{}!+@])/g;
 function convertPosixPathToPattern(path$2) {
 	return escapePosixPath(path$2);
@@ -75,19 +115,42 @@ function convertPosixPathToPattern(path$2) {
 function convertWin32PathToPattern(path$2) {
 	return escapeWin32Path(path$2).replace(ESCAPED_WIN32_BACKSLASHES, "/");
 }
+/**
+* Converts a path to a pattern depending on the platform.
+* Identical to {@link escapePath} on POSIX systems.
+* @see {@link https://superchupu.dev/tinyglobby/documentation#convertPathToPattern}
+*/
+/* node:coverage ignore next 3 */
 const convertPathToPattern = isWin ? convertWin32PathToPattern : convertPosixPathToPattern;
 const POSIX_UNESCAPED_GLOB_SYMBOLS = /(? path$2.replace(POSIX_UNESCAPED_GLOB_SYMBOLS, "\\$&");
 const escapeWin32Path = (path$2) => path$2.replace(WIN32_UNESCAPED_GLOB_SYMBOLS, "\\$&");
+/**
+* Escapes a path's special characters depending on the platform.
+* @see {@link https://superchupu.dev/tinyglobby/documentation#escapePath}
+*/
+/* node:coverage ignore next */
 const escapePath = isWin ? escapeWin32Path : escapePosixPath;
+/**
+* Checks if a pattern has dynamic parts.
+*
+* Has a few minor differences with [`fast-glob`](https://github.com/mrmlnc/fast-glob) for better accuracy:
+*
+* - Doesn't necessarily return `false` on patterns that include `\`.
+* - Returns `true` if the pattern includes parentheses, regardless of them representing one single pattern or not.
+* - Returns `true` for unfinished glob extensions i.e. `(h`, `+(h`.
+* - Returns `true` for unfinished brace expansions as long as they include `,` or `..`.
+*
+* @see {@link https://superchupu.dev/tinyglobby/documentation#isDynamicPattern}
+*/
 function isDynamicPattern(pattern, options) {
 	if ((options === null || options === void 0 ? void 0 : options.caseSensitiveMatch) === false) return true;
 	const scan = picomatch.default.scan(pattern);
 	return scan.isGlob || scan.negated;
 }
 function log(...tasks) {
-	console.log(`[tinyglobby ${new Date().toLocaleTimeString("es")}]`, ...tasks);
+	console.log(`[tinyglobby ${(/* @__PURE__ */ new Date()).toLocaleTimeString("es")}]`, ...tasks);
 }
 
 //#endregion
@@ -134,13 +197,12 @@ function normalizePattern(pattern, expandDirectories, cwd, props, isIgnore) {
 		}
 		props.depthOffset = newCommonPath.length;
 		props.commonPath = newCommonPath;
-		props.root = newCommonPath.length > 0 ? path.default.posix.join(cwd, ...newCommonPath) : cwd;
+		props.root = newCommonPath.length > 0 ? path.posix.join(cwd, ...newCommonPath) : cwd;
 	}
 	return result;
 }
-function processPatterns({ patterns, ignore = [], expandDirectories = true }, cwd, props) {
+function processPatterns({ patterns = ["**/*"], ignore = [], expandDirectories = true }, cwd, props) {
 	if (typeof patterns === "string") patterns = [patterns];
-	else if (!patterns) patterns = ["**/*"];
 	if (typeof ignore === "string") ignore = [ignore];
 	const matchPatterns = [];
 	const ignorePatterns = [];
@@ -158,66 +220,88 @@ function processPatterns({ patterns, ignore = [], expandDirectories = true }, cw
 		ignore: ignorePatterns
 	};
 }
-function getRelativePath(path$2, cwd, root) {
-	return path.posix.relative(cwd, `${root}/${path$2}`) || ".";
-}
-function processPath(path$2, cwd, root, isDirectory, absolute) {
-	const relativePath = absolute ? path$2.slice(root === "/" ? 1 : root.length + 1) || "." : path$2;
-	if (root === cwd) return isDirectory && relativePath !== "." ? relativePath.slice(0, -1) : relativePath;
-	return getRelativePath(relativePath, cwd, root);
-}
-function formatPaths(paths, cwd, root) {
+function formatPaths(paths, relative) {
 	for (let i = paths.length - 1; i >= 0; i--) {
 		const path$2 = paths[i];
-		paths[i] = getRelativePath(path$2, cwd, root) + (!path$2 || path$2.endsWith("/") ? "/" : "");
+		paths[i] = relative(path$2);
 	}
 	return paths;
 }
-function crawl(options, cwd, sync) {
-	if (process.env.TINYGLOBBY_DEBUG) options.debug = true;
-	if (options.debug) log("globbing with options:", options, "cwd:", cwd);
-	if (Array.isArray(options.patterns) && options.patterns.length === 0) return sync ? [] : Promise.resolve([]);
+function normalizeCwd(cwd) {
+	if (!cwd) return process.cwd().replace(BACKSLASHES, "/");
+	if (cwd instanceof URL) return (0, url.fileURLToPath)(cwd).replace(BACKSLASHES, "/");
+	return path.default.resolve(cwd).replace(BACKSLASHES, "/");
+}
+function getCrawler(patterns, inputOptions = {}) {
+	const options = process.env.TINYGLOBBY_DEBUG ? {
+		...inputOptions,
+		debug: true
+	} : inputOptions;
+	const cwd = normalizeCwd(options.cwd);
+	if (options.debug) log("globbing with:", {
+		patterns,
+		options,
+		cwd
+	});
+	if (Array.isArray(patterns) && patterns.length === 0) return [{
+		sync: () => [],
+		withPromise: async () => []
+	}, false];
 	const props = {
 		root: cwd,
 		commonPath: null,
 		depthOffset: 0
 	};
-	const processed = processPatterns(options, cwd, props);
-	const nocase = options.caseSensitiveMatch === false;
+	const processed = processPatterns({
+		...options,
+		patterns
+	}, cwd, props);
 	if (options.debug) log("internal processing patterns:", processed);
-	const matcher = (0, picomatch.default)(processed.match, {
+	const matchOptions = {
 		dot: options.dot,
-		nocase,
+		nobrace: options.braceExpansion === false,
+		nocase: options.caseSensitiveMatch === false,
+		noextglob: options.extglob === false,
+		noglobstar: options.globstar === false,
+		posix: true
+	};
+	const matcher = (0, picomatch.default)(processed.match, {
+		...matchOptions,
 		ignore: processed.ignore
 	});
-	const ignore = (0, picomatch.default)(processed.ignore, {
-		dot: options.dot,
-		nocase
-	});
-	const partialMatcher = getPartialMatcher(processed.match, {
-		dot: options.dot,
-		nocase
-	});
+	const ignore = (0, picomatch.default)(processed.ignore, matchOptions);
+	const partialMatcher = getPartialMatcher(processed.match, matchOptions);
+	const format = buildFormat(cwd, props.root, options.absolute);
+	const formatExclude = options.absolute ? format : buildFormat(cwd, props.root, true);
 	const fdirOptions = {
 		filters: [options.debug ? (p, isDirectory) => {
-			const path$2 = processPath(p, cwd, props.root, isDirectory, options.absolute);
+			const path$2 = format(p, isDirectory);
 			const matches = matcher(path$2);
 			if (matches) log(`matched ${path$2}`);
 			return matches;
-		} : (p, isDirectory) => matcher(processPath(p, cwd, props.root, isDirectory, options.absolute))],
+		} : (p, isDirectory) => matcher(format(p, isDirectory))],
 		exclude: options.debug ? (_, p) => {
-			const relativePath = processPath(p, cwd, props.root, true, true);
+			const relativePath = formatExclude(p, true);
 			const skipped = relativePath !== "." && !partialMatcher(relativePath) || ignore(relativePath);
 			if (skipped) log(`skipped ${p}`);
 			else log(`crawling ${p}`);
 			return skipped;
 		} : (_, p) => {
-			const relativePath = processPath(p, cwd, props.root, true, true);
+			const relativePath = formatExclude(p, true);
 			return relativePath !== "." && !partialMatcher(relativePath) || ignore(relativePath);
 		},
+		fs: options.fs ? {
+			readdir: options.fs.readdir || fs.default.readdir,
+			readdirSync: options.fs.readdirSync || fs.default.readdirSync,
+			realpath: options.fs.realpath || fs.default.realpath,
+			realpathSync: options.fs.realpathSync || fs.default.realpathSync,
+			stat: options.fs.stat || fs.default.stat,
+			statSync: options.fs.statSync || fs.default.statSync
+		} : void 0,
 		pathSeparator: "/",
 		relativePaths: true,
-		resolveSymlinks: true
+		resolveSymlinks: true,
+		signal: options.signal
 	};
 	if (options.deep !== void 0) fdirOptions.maxDepth = Math.round(options.deep - props.depthOffset);
 	if (options.absolute) {
@@ -236,27 +320,26 @@ function crawl(options, cwd, sync) {
 	props.root = props.root.replace(BACKSLASHES, "");
 	const root = props.root;
 	if (options.debug) log("internal properties:", props);
-	const api = new fdir.fdir(fdirOptions).crawl(root);
-	if (cwd === root || options.absolute) return sync ? api.sync() : api.withPromise();
-	return sync ? formatPaths(api.sync(), cwd, root) : api.withPromise().then((paths) => formatPaths(paths, cwd, root));
+	const relative = cwd !== root && !options.absolute && buildRelative(cwd, props.root);
+	return [new fdir.fdir(fdirOptions).crawl(root), relative];
 }
 async function glob(patternsOrOptions, options) {
 	if (patternsOrOptions && (options === null || options === void 0 ? void 0 : options.patterns)) throw new Error("Cannot pass patterns as both an argument and an option");
-	const opts = Array.isArray(patternsOrOptions) || typeof patternsOrOptions === "string" ? {
-		...options,
-		patterns: patternsOrOptions
-	} : patternsOrOptions;
-	const cwd = opts.cwd ? path.default.resolve(opts.cwd).replace(BACKSLASHES, "/") : process.cwd().replace(BACKSLASHES, "/");
-	return crawl(opts, cwd, false);
+	const isModern = isReadonlyArray(patternsOrOptions) || typeof patternsOrOptions === "string";
+	const opts = isModern ? options : patternsOrOptions;
+	const patterns = isModern ? patternsOrOptions : patternsOrOptions.patterns;
+	const [crawler, relative] = getCrawler(patterns, opts);
+	if (!relative) return crawler.withPromise();
+	return formatPaths(await crawler.withPromise(), relative);
 }
 function globSync(patternsOrOptions, options) {
 	if (patternsOrOptions && (options === null || options === void 0 ? void 0 : options.patterns)) throw new Error("Cannot pass patterns as both an argument and an option");
-	const opts = Array.isArray(patternsOrOptions) || typeof patternsOrOptions === "string" ? {
-		...options,
-		patterns: patternsOrOptions
-	} : patternsOrOptions;
-	const cwd = opts.cwd ? path.default.resolve(opts.cwd).replace(BACKSLASHES, "/") : process.cwd().replace(BACKSLASHES, "/");
-	return crawl(opts, cwd, true);
+	const isModern = isReadonlyArray(patternsOrOptions) || typeof patternsOrOptions === "string";
+	const opts = isModern ? options : patternsOrOptions;
+	const patterns = isModern ? patternsOrOptions : patternsOrOptions.patterns;
+	const [crawler, relative] = getCrawler(patterns, opts);
+	if (!relative) return crawler.sync();
+	return formatPaths(crawler.sync(), relative);
 }
 
 //#endregion
diff --git a/node_modules/tinyglobby/dist/index.d.cts b/node_modules/tinyglobby/dist/index.d.cts
new file mode 100644
index 0000000000000..9d67dae260a76
--- /dev/null
+++ b/node_modules/tinyglobby/dist/index.d.cts
@@ -0,0 +1,147 @@
+import { FSLike } from "fdir";
+
+//#region src/utils.d.ts
+
+/**
+* Converts a path to a pattern depending on the platform.
+* Identical to {@link escapePath} on POSIX systems.
+* @see {@link https://superchupu.dev/tinyglobby/documentation#convertPathToPattern}
+*/
+declare const convertPathToPattern: (path: string) => string;
+/**
+* Escapes a path's special characters depending on the platform.
+* @see {@link https://superchupu.dev/tinyglobby/documentation#escapePath}
+*/
+declare const escapePath: (path: string) => string;
+/**
+* Checks if a pattern has dynamic parts.
+*
+* Has a few minor differences with [`fast-glob`](https://github.com/mrmlnc/fast-glob) for better accuracy:
+*
+* - Doesn't necessarily return `false` on patterns that include `\`.
+* - Returns `true` if the pattern includes parentheses, regardless of them representing one single pattern or not.
+* - Returns `true` for unfinished glob extensions i.e. `(h`, `+(h`.
+* - Returns `true` for unfinished brace expansions as long as they include `,` or `..`.
+*
+* @see {@link https://superchupu.dev/tinyglobby/documentation#isDynamicPattern}
+*/
+declare function isDynamicPattern(pattern: string, options?: {
+  caseSensitiveMatch: boolean;
+}): boolean;
+//#endregion
+//#region src/index.d.ts
+interface GlobOptions {
+  /**
+  * Whether to return absolute paths. Disable to have relative paths.
+  * @default false
+  */
+  absolute?: boolean;
+  /**
+  * Enables support for brace expansion syntax, like `{a,b}` or `{1..9}`.
+  * @default true
+  */
+  braceExpansion?: boolean;
+  /**
+  * Whether to match in case-sensitive mode.
+  * @default true
+  */
+  caseSensitiveMatch?: boolean;
+  /**
+  * The working directory in which to search. Results will be returned relative to this directory, unless
+  * {@link absolute} is set.
+  *
+  * It is important to avoid globbing outside this directory when possible, even with absolute paths enabled,
+  * as doing so can harm performance due to having to recalculate relative paths.
+  * @default process.cwd()
+  */
+  cwd?: string | URL;
+  /**
+  * Logs useful debug information. Meant for development purposes. Logs can change at any time.
+  * @default false
+  */
+  debug?: boolean;
+  /**
+  * Maximum directory depth to crawl.
+  * @default Infinity
+  */
+  deep?: number;
+  /**
+  * Whether to return entries that start with a dot, like `.gitignore` or `.prettierrc`.
+  * @default false
+  */
+  dot?: boolean;
+  /**
+  * Whether to automatically expand directory patterns.
+  *
+  * Important to disable if migrating from [`fast-glob`](https://github.com/mrmlnc/fast-glob).
+  * @default true
+  */
+  expandDirectories?: boolean;
+  /**
+  * Enables support for extglobs, like `+(pattern)`.
+  * @default true
+  */
+  extglob?: boolean;
+  /**
+  * Whether to traverse and include symbolic links. Can slightly affect performance.
+  * @default true
+  */
+  followSymbolicLinks?: boolean;
+  /**
+  * An object that overrides `node:fs` functions.
+  * @default import('node:fs')
+  */
+  fs?: FileSystemAdapter;
+  /**
+  * Enables support for matching nested directories with globstars (`**`).
+  * If `false`, `**` behaves exactly like `*`.
+  * @default true
+  */
+  globstar?: boolean;
+  /**
+  * Glob patterns to exclude from the results.
+  * @default []
+  */
+  ignore?: string | readonly string[];
+  /**
+  * Enable to only return directories.
+  * If `true`, disables {@link onlyFiles}.
+  * @default false
+  */
+  onlyDirectories?: boolean;
+  /**
+  * Enable to only return files.
+  * @default true
+  */
+  onlyFiles?: boolean;
+  /**
+  * @deprecated Provide patterns as the first argument instead.
+  */
+  patterns?: string | readonly string[];
+  /**
+  * An `AbortSignal` to abort crawling the file system.
+  * @default undefined
+  */
+  signal?: AbortSignal;
+}
+type FileSystemAdapter = Partial;
+/**
+* Asynchronously match files following a glob pattern.
+* @see {@link https://superchupu.dev/tinyglobby/documentation#glob}
+*/
+declare function glob(patterns: string | readonly string[], options?: Omit): Promise;
+/**
+* @deprecated Provide patterns as the first argument instead.
+*/
+declare function glob(options: GlobOptions): Promise;
+/**
+* Synchronously match files following a glob pattern.
+* @see {@link https://superchupu.dev/tinyglobby/documentation#globSync}
+*/
+declare function globSync(patterns: string | readonly string[], options?: Omit): string[];
+/**
+* @deprecated Provide patterns as the first argument instead.
+*/
+declare function globSync(options: GlobOptions): string[];
+//#endregion
+export { FileSystemAdapter, GlobOptions, convertPathToPattern, escapePath, glob, globSync, isDynamicPattern };
\ No newline at end of file
diff --git a/node_modules/tinyglobby/dist/index.d.mts b/node_modules/tinyglobby/dist/index.d.mts
index d8b8ef7cf0516..9d67dae260a76 100644
--- a/node_modules/tinyglobby/dist/index.d.mts
+++ b/node_modules/tinyglobby/dist/index.d.mts
@@ -1,46 +1,147 @@
+import { FSLike } from "fdir";
+
 //#region src/utils.d.ts
 
+/**
+* Converts a path to a pattern depending on the platform.
+* Identical to {@link escapePath} on POSIX systems.
+* @see {@link https://superchupu.dev/tinyglobby/documentation#convertPathToPattern}
+*/
 declare const convertPathToPattern: (path: string) => string;
+/**
+* Escapes a path's special characters depending on the platform.
+* @see {@link https://superchupu.dev/tinyglobby/documentation#escapePath}
+*/
 declare const escapePath: (path: string) => string;
-// #endregion
-// #region isDynamicPattern
-/*
-Has a few minor differences with `fast-glob` for better accuracy:
-
-Doesn't necessarily return false on patterns that include `\\`.
-
-Returns true if the pattern includes parentheses,
-regardless of them representing one single pattern or not.
-
-Returns true for unfinished glob extensions i.e. `(h`, `+(h`.
-
-Returns true for unfinished brace expansions as long as they include `,` or `..`.
+/**
+* Checks if a pattern has dynamic parts.
+*
+* Has a few minor differences with [`fast-glob`](https://github.com/mrmlnc/fast-glob) for better accuracy:
+*
+* - Doesn't necessarily return `false` on patterns that include `\`.
+* - Returns `true` if the pattern includes parentheses, regardless of them representing one single pattern or not.
+* - Returns `true` for unfinished glob extensions i.e. `(h`, `+(h`.
+* - Returns `true` for unfinished brace expansions as long as they include `,` or `..`.
+*
+* @see {@link https://superchupu.dev/tinyglobby/documentation#isDynamicPattern}
 */
 declare function isDynamicPattern(pattern: string, options?: {
   caseSensitiveMatch: boolean;
-}): boolean; //#endregion
+}): boolean;
+//#endregion
 //#region src/index.d.ts
-
-// #endregion
-// #region log
 interface GlobOptions {
+  /**
+  * Whether to return absolute paths. Disable to have relative paths.
+  * @default false
+  */
   absolute?: boolean;
-  cwd?: string;
-  patterns?: string | string[];
-  ignore?: string | string[];
-  dot?: boolean;
-  deep?: number;
-  followSymbolicLinks?: boolean;
+  /**
+  * Enables support for brace expansion syntax, like `{a,b}` or `{1..9}`.
+  * @default true
+  */
+  braceExpansion?: boolean;
+  /**
+  * Whether to match in case-sensitive mode.
+  * @default true
+  */
   caseSensitiveMatch?: boolean;
+  /**
+  * The working directory in which to search. Results will be returned relative to this directory, unless
+  * {@link absolute} is set.
+  *
+  * It is important to avoid globbing outside this directory when possible, even with absolute paths enabled,
+  * as doing so can harm performance due to having to recalculate relative paths.
+  * @default process.cwd()
+  */
+  cwd?: string | URL;
+  /**
+  * Logs useful debug information. Meant for development purposes. Logs can change at any time.
+  * @default false
+  */
+  debug?: boolean;
+  /**
+  * Maximum directory depth to crawl.
+  * @default Infinity
+  */
+  deep?: number;
+  /**
+  * Whether to return entries that start with a dot, like `.gitignore` or `.prettierrc`.
+  * @default false
+  */
+  dot?: boolean;
+  /**
+  * Whether to automatically expand directory patterns.
+  *
+  * Important to disable if migrating from [`fast-glob`](https://github.com/mrmlnc/fast-glob).
+  * @default true
+  */
   expandDirectories?: boolean;
+  /**
+  * Enables support for extglobs, like `+(pattern)`.
+  * @default true
+  */
+  extglob?: boolean;
+  /**
+  * Whether to traverse and include symbolic links. Can slightly affect performance.
+  * @default true
+  */
+  followSymbolicLinks?: boolean;
+  /**
+  * An object that overrides `node:fs` functions.
+  * @default import('node:fs')
+  */
+  fs?: FileSystemAdapter;
+  /**
+  * Enables support for matching nested directories with globstars (`**`).
+  * If `false`, `**` behaves exactly like `*`.
+  * @default true
+  */
+  globstar?: boolean;
+  /**
+  * Glob patterns to exclude from the results.
+  * @default []
+  */
+  ignore?: string | readonly string[];
+  /**
+  * Enable to only return directories.
+  * If `true`, disables {@link onlyFiles}.
+  * @default false
+  */
   onlyDirectories?: boolean;
+  /**
+  * Enable to only return files.
+  * @default true
+  */
   onlyFiles?: boolean;
-  debug?: boolean;
+  /**
+  * @deprecated Provide patterns as the first argument instead.
+  */
+  patterns?: string | readonly string[];
+  /**
+  * An `AbortSignal` to abort crawling the file system.
+  * @default undefined
+  */
+  signal?: AbortSignal;
 }
-declare function glob(patterns: string | string[], options?: Omit): Promise;
+type FileSystemAdapter = Partial;
+/**
+* Asynchronously match files following a glob pattern.
+* @see {@link https://superchupu.dev/tinyglobby/documentation#glob}
+*/
+declare function glob(patterns: string | readonly string[], options?: Omit): Promise;
+/**
+* @deprecated Provide patterns as the first argument instead.
+*/
 declare function glob(options: GlobOptions): Promise;
-declare function globSync(patterns: string | string[], options?: Omit): string[];
+/**
+* Synchronously match files following a glob pattern.
+* @see {@link https://superchupu.dev/tinyglobby/documentation#globSync}
+*/
+declare function globSync(patterns: string | readonly string[], options?: Omit): string[];
+/**
+* @deprecated Provide patterns as the first argument instead.
+*/
 declare function globSync(options: GlobOptions): string[];
-
 //#endregion
-export { GlobOptions, convertPathToPattern, escapePath, glob, globSync, isDynamicPattern };
\ No newline at end of file
+export { FileSystemAdapter, GlobOptions, convertPathToPattern, escapePath, glob, globSync, isDynamicPattern };
\ No newline at end of file
diff --git a/node_modules/tinyglobby/dist/index.mjs b/node_modules/tinyglobby/dist/index.mjs
index f04903f5b1a76..4f41787d8bc4b 100644
--- a/node_modules/tinyglobby/dist/index.mjs
+++ b/node_modules/tinyglobby/dist/index.mjs
@@ -1,36 +1,41 @@
+import nativeFs from "fs";
 import path, { posix } from "path";
+import { fileURLToPath } from "url";
 import { fdir } from "fdir";
 import picomatch from "picomatch";
 
 //#region src/utils.ts
+const isReadonlyArray = Array.isArray;
+const isWin = process.platform === "win32";
 const ONLY_PARENT_DIRECTORIES = /^(\/?\.\.)+$/;
-function getPartialMatcher(patterns, options) {
+function getPartialMatcher(patterns, options = {}) {
 	const patternsCount = patterns.length;
 	const patternsParts = Array(patternsCount);
-	const regexes = Array(patternsCount);
+	const matchers = Array(patternsCount);
+	const globstarEnabled = !options.noglobstar;
 	for (let i = 0; i < patternsCount; i++) {
 		const parts = splitPattern(patterns[i]);
 		patternsParts[i] = parts;
 		const partsCount = parts.length;
-		const partRegexes = Array(partsCount);
-		for (let j = 0; j < partsCount; j++) partRegexes[j] = picomatch.makeRe(parts[j], options);
-		regexes[i] = partRegexes;
+		const partMatchers = Array(partsCount);
+		for (let j = 0; j < partsCount; j++) partMatchers[j] = picomatch(parts[j], options);
+		matchers[i] = partMatchers;
 	}
 	return (input) => {
 		const inputParts = input.split("/");
 		if (inputParts[0] === ".." && ONLY_PARENT_DIRECTORIES.test(input)) return true;
 		for (let i = 0; i < patterns.length; i++) {
 			const patternParts = patternsParts[i];
-			const regex = regexes[i];
+			const matcher = matchers[i];
 			const inputPatternCount = inputParts.length;
 			const minParts = Math.min(inputPatternCount, patternParts.length);
 			let j = 0;
 			while (j < minParts) {
 				const part = patternParts[j];
 				if (part.includes("/")) return true;
-				const match = regex[j].test(inputParts[j]);
+				const match = matcher[j](inputParts[j]);
 				if (!match) break;
-				if (part === "**") return true;
+				if (globstarEnabled && part === "**") return true;
 				j++;
 			}
 			if (j === inputPatternCount) return true;
@@ -38,13 +43,43 @@ function getPartialMatcher(patterns, options) {
 		return false;
 	};
 }
+/* node:coverage ignore next 2 */
+const WIN32_ROOT_DIR = /^[A-Z]:\/$/i;
+const isRoot = isWin ? (p) => WIN32_ROOT_DIR.test(p) : (p) => p === "/";
+function buildFormat(cwd, root, absolute) {
+	if (cwd === root || root.startsWith(`${cwd}/`)) {
+		if (absolute) {
+			const start = isRoot(cwd) ? cwd.length : cwd.length + 1;
+			return (p, isDir) => p.slice(start, isDir ? -1 : void 0) || ".";
+		}
+		const prefix = root.slice(cwd.length + 1);
+		if (prefix) return (p, isDir) => {
+			if (p === ".") return prefix;
+			const result = `${prefix}/${p}`;
+			return isDir ? result.slice(0, -1) : result;
+		};
+		return (p, isDir) => isDir && p !== "." ? p.slice(0, -1) : p;
+	}
+	if (absolute) return (p) => posix.relative(cwd, p) || ".";
+	return (p) => posix.relative(cwd, `${root}/${p}`) || ".";
+}
+function buildRelative(cwd, root) {
+	if (root.startsWith(`${cwd}/`)) {
+		const prefix = root.slice(cwd.length + 1);
+		return (p) => `${prefix}/${p}`;
+	}
+	return (p) => {
+		const result = posix.relative(cwd, `${root}/${p}`);
+		if (p.endsWith("/") && result !== "") return `${result}/`;
+		return result || ".";
+	};
+}
 const splitPatternOptions = { parts: true };
 function splitPattern(path$1) {
 	var _result$parts;
 	const result = picomatch.scan(path$1, splitPatternOptions);
 	return ((_result$parts = result.parts) === null || _result$parts === void 0 ? void 0 : _result$parts.length) ? result.parts : [path$1];
 }
-const isWin = process.platform === "win32";
 const ESCAPED_WIN32_BACKSLASHES = /\\(?![()[\]{}!+@])/g;
 function convertPosixPathToPattern(path$1) {
 	return escapePosixPath(path$1);
@@ -52,19 +87,42 @@ function convertPosixPathToPattern(path$1) {
 function convertWin32PathToPattern(path$1) {
 	return escapeWin32Path(path$1).replace(ESCAPED_WIN32_BACKSLASHES, "/");
 }
+/**
+* Converts a path to a pattern depending on the platform.
+* Identical to {@link escapePath} on POSIX systems.
+* @see {@link https://superchupu.dev/tinyglobby/documentation#convertPathToPattern}
+*/
+/* node:coverage ignore next 3 */
 const convertPathToPattern = isWin ? convertWin32PathToPattern : convertPosixPathToPattern;
 const POSIX_UNESCAPED_GLOB_SYMBOLS = /(? path$1.replace(POSIX_UNESCAPED_GLOB_SYMBOLS, "\\$&");
 const escapeWin32Path = (path$1) => path$1.replace(WIN32_UNESCAPED_GLOB_SYMBOLS, "\\$&");
+/**
+* Escapes a path's special characters depending on the platform.
+* @see {@link https://superchupu.dev/tinyglobby/documentation#escapePath}
+*/
+/* node:coverage ignore next */
 const escapePath = isWin ? escapeWin32Path : escapePosixPath;
+/**
+* Checks if a pattern has dynamic parts.
+*
+* Has a few minor differences with [`fast-glob`](https://github.com/mrmlnc/fast-glob) for better accuracy:
+*
+* - Doesn't necessarily return `false` on patterns that include `\`.
+* - Returns `true` if the pattern includes parentheses, regardless of them representing one single pattern or not.
+* - Returns `true` for unfinished glob extensions i.e. `(h`, `+(h`.
+* - Returns `true` for unfinished brace expansions as long as they include `,` or `..`.
+*
+* @see {@link https://superchupu.dev/tinyglobby/documentation#isDynamicPattern}
+*/
 function isDynamicPattern(pattern, options) {
 	if ((options === null || options === void 0 ? void 0 : options.caseSensitiveMatch) === false) return true;
 	const scan = picomatch.scan(pattern);
 	return scan.isGlob || scan.negated;
 }
 function log(...tasks) {
-	console.log(`[tinyglobby ${new Date().toLocaleTimeString("es")}]`, ...tasks);
+	console.log(`[tinyglobby ${(/* @__PURE__ */ new Date()).toLocaleTimeString("es")}]`, ...tasks);
 }
 
 //#endregion
@@ -111,13 +169,12 @@ function normalizePattern(pattern, expandDirectories, cwd, props, isIgnore) {
 		}
 		props.depthOffset = newCommonPath.length;
 		props.commonPath = newCommonPath;
-		props.root = newCommonPath.length > 0 ? path.posix.join(cwd, ...newCommonPath) : cwd;
+		props.root = newCommonPath.length > 0 ? posix.join(cwd, ...newCommonPath) : cwd;
 	}
 	return result;
 }
-function processPatterns({ patterns, ignore = [], expandDirectories = true }, cwd, props) {
+function processPatterns({ patterns = ["**/*"], ignore = [], expandDirectories = true }, cwd, props) {
 	if (typeof patterns === "string") patterns = [patterns];
-	else if (!patterns) patterns = ["**/*"];
 	if (typeof ignore === "string") ignore = [ignore];
 	const matchPatterns = [];
 	const ignorePatterns = [];
@@ -135,66 +192,88 @@ function processPatterns({ patterns, ignore = [], expandDirectories = true }, cw
 		ignore: ignorePatterns
 	};
 }
-function getRelativePath(path$1, cwd, root) {
-	return posix.relative(cwd, `${root}/${path$1}`) || ".";
-}
-function processPath(path$1, cwd, root, isDirectory, absolute) {
-	const relativePath = absolute ? path$1.slice(root === "/" ? 1 : root.length + 1) || "." : path$1;
-	if (root === cwd) return isDirectory && relativePath !== "." ? relativePath.slice(0, -1) : relativePath;
-	return getRelativePath(relativePath, cwd, root);
-}
-function formatPaths(paths, cwd, root) {
+function formatPaths(paths, relative) {
 	for (let i = paths.length - 1; i >= 0; i--) {
 		const path$1 = paths[i];
-		paths[i] = getRelativePath(path$1, cwd, root) + (!path$1 || path$1.endsWith("/") ? "/" : "");
+		paths[i] = relative(path$1);
 	}
 	return paths;
 }
-function crawl(options, cwd, sync) {
-	if (process.env.TINYGLOBBY_DEBUG) options.debug = true;
-	if (options.debug) log("globbing with options:", options, "cwd:", cwd);
-	if (Array.isArray(options.patterns) && options.patterns.length === 0) return sync ? [] : Promise.resolve([]);
+function normalizeCwd(cwd) {
+	if (!cwd) return process.cwd().replace(BACKSLASHES, "/");
+	if (cwd instanceof URL) return fileURLToPath(cwd).replace(BACKSLASHES, "/");
+	return path.resolve(cwd).replace(BACKSLASHES, "/");
+}
+function getCrawler(patterns, inputOptions = {}) {
+	const options = process.env.TINYGLOBBY_DEBUG ? {
+		...inputOptions,
+		debug: true
+	} : inputOptions;
+	const cwd = normalizeCwd(options.cwd);
+	if (options.debug) log("globbing with:", {
+		patterns,
+		options,
+		cwd
+	});
+	if (Array.isArray(patterns) && patterns.length === 0) return [{
+		sync: () => [],
+		withPromise: async () => []
+	}, false];
 	const props = {
 		root: cwd,
 		commonPath: null,
 		depthOffset: 0
 	};
-	const processed = processPatterns(options, cwd, props);
-	const nocase = options.caseSensitiveMatch === false;
+	const processed = processPatterns({
+		...options,
+		patterns
+	}, cwd, props);
 	if (options.debug) log("internal processing patterns:", processed);
-	const matcher = picomatch(processed.match, {
+	const matchOptions = {
 		dot: options.dot,
-		nocase,
+		nobrace: options.braceExpansion === false,
+		nocase: options.caseSensitiveMatch === false,
+		noextglob: options.extglob === false,
+		noglobstar: options.globstar === false,
+		posix: true
+	};
+	const matcher = picomatch(processed.match, {
+		...matchOptions,
 		ignore: processed.ignore
 	});
-	const ignore = picomatch(processed.ignore, {
-		dot: options.dot,
-		nocase
-	});
-	const partialMatcher = getPartialMatcher(processed.match, {
-		dot: options.dot,
-		nocase
-	});
+	const ignore = picomatch(processed.ignore, matchOptions);
+	const partialMatcher = getPartialMatcher(processed.match, matchOptions);
+	const format = buildFormat(cwd, props.root, options.absolute);
+	const formatExclude = options.absolute ? format : buildFormat(cwd, props.root, true);
 	const fdirOptions = {
 		filters: [options.debug ? (p, isDirectory) => {
-			const path$1 = processPath(p, cwd, props.root, isDirectory, options.absolute);
+			const path$1 = format(p, isDirectory);
 			const matches = matcher(path$1);
 			if (matches) log(`matched ${path$1}`);
 			return matches;
-		} : (p, isDirectory) => matcher(processPath(p, cwd, props.root, isDirectory, options.absolute))],
+		} : (p, isDirectory) => matcher(format(p, isDirectory))],
 		exclude: options.debug ? (_, p) => {
-			const relativePath = processPath(p, cwd, props.root, true, true);
+			const relativePath = formatExclude(p, true);
 			const skipped = relativePath !== "." && !partialMatcher(relativePath) || ignore(relativePath);
 			if (skipped) log(`skipped ${p}`);
 			else log(`crawling ${p}`);
 			return skipped;
 		} : (_, p) => {
-			const relativePath = processPath(p, cwd, props.root, true, true);
+			const relativePath = formatExclude(p, true);
 			return relativePath !== "." && !partialMatcher(relativePath) || ignore(relativePath);
 		},
+		fs: options.fs ? {
+			readdir: options.fs.readdir || nativeFs.readdir,
+			readdirSync: options.fs.readdirSync || nativeFs.readdirSync,
+			realpath: options.fs.realpath || nativeFs.realpath,
+			realpathSync: options.fs.realpathSync || nativeFs.realpathSync,
+			stat: options.fs.stat || nativeFs.stat,
+			statSync: options.fs.statSync || nativeFs.statSync
+		} : void 0,
 		pathSeparator: "/",
 		relativePaths: true,
-		resolveSymlinks: true
+		resolveSymlinks: true,
+		signal: options.signal
 	};
 	if (options.deep !== void 0) fdirOptions.maxDepth = Math.round(options.deep - props.depthOffset);
 	if (options.absolute) {
@@ -213,27 +292,26 @@ function crawl(options, cwd, sync) {
 	props.root = props.root.replace(BACKSLASHES, "");
 	const root = props.root;
 	if (options.debug) log("internal properties:", props);
-	const api = new fdir(fdirOptions).crawl(root);
-	if (cwd === root || options.absolute) return sync ? api.sync() : api.withPromise();
-	return sync ? formatPaths(api.sync(), cwd, root) : api.withPromise().then((paths) => formatPaths(paths, cwd, root));
+	const relative = cwd !== root && !options.absolute && buildRelative(cwd, props.root);
+	return [new fdir(fdirOptions).crawl(root), relative];
 }
 async function glob(patternsOrOptions, options) {
 	if (patternsOrOptions && (options === null || options === void 0 ? void 0 : options.patterns)) throw new Error("Cannot pass patterns as both an argument and an option");
-	const opts = Array.isArray(patternsOrOptions) || typeof patternsOrOptions === "string" ? {
-		...options,
-		patterns: patternsOrOptions
-	} : patternsOrOptions;
-	const cwd = opts.cwd ? path.resolve(opts.cwd).replace(BACKSLASHES, "/") : process.cwd().replace(BACKSLASHES, "/");
-	return crawl(opts, cwd, false);
+	const isModern = isReadonlyArray(patternsOrOptions) || typeof patternsOrOptions === "string";
+	const opts = isModern ? options : patternsOrOptions;
+	const patterns = isModern ? patternsOrOptions : patternsOrOptions.patterns;
+	const [crawler, relative] = getCrawler(patterns, opts);
+	if (!relative) return crawler.withPromise();
+	return formatPaths(await crawler.withPromise(), relative);
 }
 function globSync(patternsOrOptions, options) {
 	if (patternsOrOptions && (options === null || options === void 0 ? void 0 : options.patterns)) throw new Error("Cannot pass patterns as both an argument and an option");
-	const opts = Array.isArray(patternsOrOptions) || typeof patternsOrOptions === "string" ? {
-		...options,
-		patterns: patternsOrOptions
-	} : patternsOrOptions;
-	const cwd = opts.cwd ? path.resolve(opts.cwd).replace(BACKSLASHES, "/") : process.cwd().replace(BACKSLASHES, "/");
-	return crawl(opts, cwd, true);
+	const isModern = isReadonlyArray(patternsOrOptions) || typeof patternsOrOptions === "string";
+	const opts = isModern ? options : patternsOrOptions;
+	const patterns = isModern ? patternsOrOptions : patternsOrOptions.patterns;
+	const [crawler, relative] = getCrawler(patterns, opts);
+	if (!relative) return crawler.sync();
+	return formatPaths(crawler.sync(), relative);
 }
 
 //#endregion
diff --git a/node_modules/tinyglobby/node_modules/fdir/dist/api/async.js b/node_modules/tinyglobby/node_modules/fdir/dist/api/async.js
deleted file mode 100644
index efc6649cb04e4..0000000000000
--- a/node_modules/tinyglobby/node_modules/fdir/dist/api/async.js
+++ /dev/null
@@ -1,19 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.callback = exports.promise = void 0;
-const walker_1 = require("./walker");
-function promise(root, options) {
-    return new Promise((resolve, reject) => {
-        callback(root, options, (err, output) => {
-            if (err)
-                return reject(err);
-            resolve(output);
-        });
-    });
-}
-exports.promise = promise;
-function callback(root, options, callback) {
-    let walker = new walker_1.Walker(root, options, callback);
-    walker.start();
-}
-exports.callback = callback;
diff --git a/node_modules/tinyglobby/node_modules/fdir/dist/api/counter.js b/node_modules/tinyglobby/node_modules/fdir/dist/api/counter.js
deleted file mode 100644
index 685cb270b73e5..0000000000000
--- a/node_modules/tinyglobby/node_modules/fdir/dist/api/counter.js
+++ /dev/null
@@ -1,27 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Counter = void 0;
-class Counter {
-    _files = 0;
-    _directories = 0;
-    set files(num) {
-        this._files = num;
-    }
-    get files() {
-        return this._files;
-    }
-    set directories(num) {
-        this._directories = num;
-    }
-    get directories() {
-        return this._directories;
-    }
-    /**
-     * @deprecated use `directories` instead
-     */
-    /* c8 ignore next 3 */
-    get dirs() {
-        return this._directories;
-    }
-}
-exports.Counter = Counter;
diff --git a/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/get-array.js b/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/get-array.js
deleted file mode 100644
index 1e02308dfa6f2..0000000000000
--- a/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/get-array.js
+++ /dev/null
@@ -1,13 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.build = void 0;
-const getArray = (paths) => {
-    return paths;
-};
-const getArrayGroup = () => {
-    return [""].slice(0, 0);
-};
-function build(options) {
-    return options.group ? getArrayGroup : getArray;
-}
-exports.build = build;
diff --git a/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/group-files.js b/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/group-files.js
deleted file mode 100644
index 4ccaa1a481156..0000000000000
--- a/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/group-files.js
+++ /dev/null
@@ -1,11 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.build = void 0;
-const groupFiles = (groups, directory, files) => {
-    groups.push({ directory, files, dir: directory });
-};
-const empty = () => { };
-function build(options) {
-    return options.group ? groupFiles : empty;
-}
-exports.build = build;
diff --git a/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/invoke-callback.js b/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/invoke-callback.js
deleted file mode 100644
index ed59ca2da7898..0000000000000
--- a/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/invoke-callback.js
+++ /dev/null
@@ -1,57 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.build = void 0;
-const onlyCountsSync = (state) => {
-    return state.counts;
-};
-const groupsSync = (state) => {
-    return state.groups;
-};
-const defaultSync = (state) => {
-    return state.paths;
-};
-const limitFilesSync = (state) => {
-    return state.paths.slice(0, state.options.maxFiles);
-};
-const onlyCountsAsync = (state, error, callback) => {
-    report(error, callback, state.counts, state.options.suppressErrors);
-    return null;
-};
-const defaultAsync = (state, error, callback) => {
-    report(error, callback, state.paths, state.options.suppressErrors);
-    return null;
-};
-const limitFilesAsync = (state, error, callback) => {
-    report(error, callback, state.paths.slice(0, state.options.maxFiles), state.options.suppressErrors);
-    return null;
-};
-const groupsAsync = (state, error, callback) => {
-    report(error, callback, state.groups, state.options.suppressErrors);
-    return null;
-};
-function report(error, callback, output, suppressErrors) {
-    if (error && !suppressErrors)
-        callback(error, output);
-    else
-        callback(null, output);
-}
-function build(options, isSynchronous) {
-    const { onlyCounts, group, maxFiles } = options;
-    if (onlyCounts)
-        return isSynchronous
-            ? onlyCountsSync
-            : onlyCountsAsync;
-    else if (group)
-        return isSynchronous
-            ? groupsSync
-            : groupsAsync;
-    else if (maxFiles)
-        return isSynchronous
-            ? limitFilesSync
-            : limitFilesAsync;
-    else
-        return isSynchronous
-            ? defaultSync
-            : defaultAsync;
-}
-exports.build = build;
diff --git a/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/join-path.js b/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/join-path.js
deleted file mode 100644
index e84faf617734e..0000000000000
--- a/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/join-path.js
+++ /dev/null
@@ -1,36 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.build = exports.joinDirectoryPath = exports.joinPathWithBasePath = void 0;
-const path_1 = require("path");
-const utils_1 = require("../../utils");
-function joinPathWithBasePath(filename, directoryPath) {
-    return directoryPath + filename;
-}
-exports.joinPathWithBasePath = joinPathWithBasePath;
-function joinPathWithRelativePath(root, options) {
-    return function (filename, directoryPath) {
-        const sameRoot = directoryPath.startsWith(root);
-        if (sameRoot)
-            return directoryPath.replace(root, "") + filename;
-        else
-            return ((0, utils_1.convertSlashes)((0, path_1.relative)(root, directoryPath), options.pathSeparator) +
-                options.pathSeparator +
-                filename);
-    };
-}
-function joinPath(filename) {
-    return filename;
-}
-function joinDirectoryPath(filename, directoryPath, separator) {
-    return directoryPath + filename + separator;
-}
-exports.joinDirectoryPath = joinDirectoryPath;
-function build(root, options) {
-    const { relativePaths, includeBasePath } = options;
-    return relativePaths && root
-        ? joinPathWithRelativePath(root, options)
-        : includeBasePath
-            ? joinPathWithBasePath
-            : joinPath;
-}
-exports.build = build;
diff --git a/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/push-directory.js b/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/push-directory.js
deleted file mode 100644
index 6858cb6253201..0000000000000
--- a/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/push-directory.js
+++ /dev/null
@@ -1,37 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.build = void 0;
-function pushDirectoryWithRelativePath(root) {
-    return function (directoryPath, paths) {
-        paths.push(directoryPath.substring(root.length) || ".");
-    };
-}
-function pushDirectoryFilterWithRelativePath(root) {
-    return function (directoryPath, paths, filters) {
-        const relativePath = directoryPath.substring(root.length) || ".";
-        if (filters.every((filter) => filter(relativePath, true))) {
-            paths.push(relativePath);
-        }
-    };
-}
-const pushDirectory = (directoryPath, paths) => {
-    paths.push(directoryPath || ".");
-};
-const pushDirectoryFilter = (directoryPath, paths, filters) => {
-    const path = directoryPath || ".";
-    if (filters.every((filter) => filter(path, true))) {
-        paths.push(path);
-    }
-};
-const empty = () => { };
-function build(root, options) {
-    const { includeDirs, filters, relativePaths } = options;
-    if (!includeDirs)
-        return empty;
-    if (relativePaths)
-        return filters && filters.length
-            ? pushDirectoryFilterWithRelativePath(root)
-            : pushDirectoryWithRelativePath(root);
-    return filters && filters.length ? pushDirectoryFilter : pushDirectory;
-}
-exports.build = build;
diff --git a/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/push-file.js b/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/push-file.js
deleted file mode 100644
index 88843952946ad..0000000000000
--- a/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/push-file.js
+++ /dev/null
@@ -1,33 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.build = void 0;
-const pushFileFilterAndCount = (filename, _paths, counts, filters) => {
-    if (filters.every((filter) => filter(filename, false)))
-        counts.files++;
-};
-const pushFileFilter = (filename, paths, _counts, filters) => {
-    if (filters.every((filter) => filter(filename, false)))
-        paths.push(filename);
-};
-const pushFileCount = (_filename, _paths, counts, _filters) => {
-    counts.files++;
-};
-const pushFile = (filename, paths) => {
-    paths.push(filename);
-};
-const empty = () => { };
-function build(options) {
-    const { excludeFiles, filters, onlyCounts } = options;
-    if (excludeFiles)
-        return empty;
-    if (filters && filters.length) {
-        return onlyCounts ? pushFileFilterAndCount : pushFileFilter;
-    }
-    else if (onlyCounts) {
-        return pushFileCount;
-    }
-    else {
-        return pushFile;
-    }
-}
-exports.build = build;
diff --git a/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/resolve-symlink.js b/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/resolve-symlink.js
deleted file mode 100644
index dbf0720cd41f8..0000000000000
--- a/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/resolve-symlink.js
+++ /dev/null
@@ -1,67 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.build = void 0;
-const fs_1 = __importDefault(require("fs"));
-const path_1 = require("path");
-const resolveSymlinksAsync = function (path, state, callback) {
-    const { queue, options: { suppressErrors }, } = state;
-    queue.enqueue();
-    fs_1.default.realpath(path, (error, resolvedPath) => {
-        if (error)
-            return queue.dequeue(suppressErrors ? null : error, state);
-        fs_1.default.stat(resolvedPath, (error, stat) => {
-            if (error)
-                return queue.dequeue(suppressErrors ? null : error, state);
-            if (stat.isDirectory() && isRecursive(path, resolvedPath, state))
-                return queue.dequeue(null, state);
-            callback(stat, resolvedPath);
-            queue.dequeue(null, state);
-        });
-    });
-};
-const resolveSymlinks = function (path, state, callback) {
-    const { queue, options: { suppressErrors }, } = state;
-    queue.enqueue();
-    try {
-        const resolvedPath = fs_1.default.realpathSync(path);
-        const stat = fs_1.default.statSync(resolvedPath);
-        if (stat.isDirectory() && isRecursive(path, resolvedPath, state))
-            return;
-        callback(stat, resolvedPath);
-    }
-    catch (e) {
-        if (!suppressErrors)
-            throw e;
-    }
-};
-function build(options, isSynchronous) {
-    if (!options.resolveSymlinks || options.excludeSymlinks)
-        return null;
-    return isSynchronous ? resolveSymlinks : resolveSymlinksAsync;
-}
-exports.build = build;
-function isRecursive(path, resolved, state) {
-    if (state.options.useRealPaths)
-        return isRecursiveUsingRealPaths(resolved, state);
-    let parent = (0, path_1.dirname)(path);
-    let depth = 1;
-    while (parent !== state.root && depth < 2) {
-        const resolvedPath = state.symlinks.get(parent);
-        const isSameRoot = !!resolvedPath &&
-            (resolvedPath === resolved ||
-                resolvedPath.startsWith(resolved) ||
-                resolved.startsWith(resolvedPath));
-        if (isSameRoot)
-            depth++;
-        else
-            parent = (0, path_1.dirname)(parent);
-    }
-    state.symlinks.set(path, resolved);
-    return depth > 1;
-}
-function isRecursiveUsingRealPaths(resolved, state) {
-    return state.visited.includes(resolved + state.options.pathSeparator);
-}
diff --git a/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/walk-directory.js b/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/walk-directory.js
deleted file mode 100644
index 424302b6f9e14..0000000000000
--- a/node_modules/tinyglobby/node_modules/fdir/dist/api/functions/walk-directory.js
+++ /dev/null
@@ -1,40 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.build = void 0;
-const fs_1 = __importDefault(require("fs"));
-const readdirOpts = { withFileTypes: true };
-const walkAsync = (state, crawlPath, directoryPath, currentDepth, callback) => {
-    state.queue.enqueue();
-    if (currentDepth < 0)
-        return state.queue.dequeue(null, state);
-    state.visited.push(crawlPath);
-    state.counts.directories++;
-    // Perf: Node >= 10 introduced withFileTypes that helps us
-    // skip an extra fs.stat call.
-    fs_1.default.readdir(crawlPath || ".", readdirOpts, (error, entries = []) => {
-        callback(entries, directoryPath, currentDepth);
-        state.queue.dequeue(state.options.suppressErrors ? null : error, state);
-    });
-};
-const walkSync = (state, crawlPath, directoryPath, currentDepth, callback) => {
-    if (currentDepth < 0)
-        return;
-    state.visited.push(crawlPath);
-    state.counts.directories++;
-    let entries = [];
-    try {
-        entries = fs_1.default.readdirSync(crawlPath || ".", readdirOpts);
-    }
-    catch (e) {
-        if (!state.options.suppressErrors)
-            throw e;
-    }
-    callback(entries, directoryPath, currentDepth);
-};
-function build(isSynchronous) {
-    return isSynchronous ? walkSync : walkAsync;
-}
-exports.build = build;
diff --git a/node_modules/tinyglobby/node_modules/fdir/dist/api/queue.js b/node_modules/tinyglobby/node_modules/fdir/dist/api/queue.js
deleted file mode 100644
index 4708d422350af..0000000000000
--- a/node_modules/tinyglobby/node_modules/fdir/dist/api/queue.js
+++ /dev/null
@@ -1,29 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Queue = void 0;
-/**
- * This is a custom stateless queue to track concurrent async fs calls.
- * It increments a counter whenever a call is queued and decrements it
- * as soon as it completes. When the counter hits 0, it calls onQueueEmpty.
- */
-class Queue {
-    onQueueEmpty;
-    count = 0;
-    constructor(onQueueEmpty) {
-        this.onQueueEmpty = onQueueEmpty;
-    }
-    enqueue() {
-        this.count++;
-        return this.count;
-    }
-    dequeue(error, output) {
-        if (this.onQueueEmpty && (--this.count <= 0 || error)) {
-            this.onQueueEmpty(error, output);
-            if (error) {
-                output.controller.abort();
-                this.onQueueEmpty = undefined;
-            }
-        }
-    }
-}
-exports.Queue = Queue;
diff --git a/node_modules/tinyglobby/node_modules/fdir/dist/api/sync.js b/node_modules/tinyglobby/node_modules/fdir/dist/api/sync.js
deleted file mode 100644
index 073bc88d212be..0000000000000
--- a/node_modules/tinyglobby/node_modules/fdir/dist/api/sync.js
+++ /dev/null
@@ -1,9 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.sync = void 0;
-const walker_1 = require("./walker");
-function sync(root, options) {
-    const walker = new walker_1.Walker(root, options);
-    return walker.start();
-}
-exports.sync = sync;
diff --git a/node_modules/tinyglobby/node_modules/fdir/dist/api/walker.js b/node_modules/tinyglobby/node_modules/fdir/dist/api/walker.js
deleted file mode 100644
index 19e913785956f..0000000000000
--- a/node_modules/tinyglobby/node_modules/fdir/dist/api/walker.js
+++ /dev/null
@@ -1,129 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Walker = void 0;
-const path_1 = require("path");
-const utils_1 = require("../utils");
-const joinPath = __importStar(require("./functions/join-path"));
-const pushDirectory = __importStar(require("./functions/push-directory"));
-const pushFile = __importStar(require("./functions/push-file"));
-const getArray = __importStar(require("./functions/get-array"));
-const groupFiles = __importStar(require("./functions/group-files"));
-const resolveSymlink = __importStar(require("./functions/resolve-symlink"));
-const invokeCallback = __importStar(require("./functions/invoke-callback"));
-const walkDirectory = __importStar(require("./functions/walk-directory"));
-const queue_1 = require("./queue");
-const counter_1 = require("./counter");
-class Walker {
-    root;
-    isSynchronous;
-    state;
-    joinPath;
-    pushDirectory;
-    pushFile;
-    getArray;
-    groupFiles;
-    resolveSymlink;
-    walkDirectory;
-    callbackInvoker;
-    constructor(root, options, callback) {
-        this.isSynchronous = !callback;
-        this.callbackInvoker = invokeCallback.build(options, this.isSynchronous);
-        this.root = (0, utils_1.normalizePath)(root, options);
-        this.state = {
-            root: (0, utils_1.isRootDirectory)(this.root) ? this.root : this.root.slice(0, -1),
-            // Perf: we explicitly tell the compiler to optimize for String arrays
-            paths: [""].slice(0, 0),
-            groups: [],
-            counts: new counter_1.Counter(),
-            options,
-            queue: new queue_1.Queue((error, state) => this.callbackInvoker(state, error, callback)),
-            symlinks: new Map(),
-            visited: [""].slice(0, 0),
-            controller: new AbortController(),
-        };
-        /*
-         * Perf: We conditionally change functions according to options. This gives a slight
-         * performance boost. Since these functions are so small, they are automatically inlined
-         * by the javascript engine so there's no function call overhead (in most cases).
-         */
-        this.joinPath = joinPath.build(this.root, options);
-        this.pushDirectory = pushDirectory.build(this.root, options);
-        this.pushFile = pushFile.build(options);
-        this.getArray = getArray.build(options);
-        this.groupFiles = groupFiles.build(options);
-        this.resolveSymlink = resolveSymlink.build(options, this.isSynchronous);
-        this.walkDirectory = walkDirectory.build(this.isSynchronous);
-    }
-    start() {
-        this.pushDirectory(this.root, this.state.paths, this.state.options.filters);
-        this.walkDirectory(this.state, this.root, this.root, this.state.options.maxDepth, this.walk);
-        return this.isSynchronous ? this.callbackInvoker(this.state, null) : null;
-    }
-    walk = (entries, directoryPath, depth) => {
-        const { paths, options: { filters, resolveSymlinks, excludeSymlinks, exclude, maxFiles, signal, useRealPaths, pathSeparator, }, controller, } = this.state;
-        if (controller.signal.aborted ||
-            (signal && signal.aborted) ||
-            (maxFiles && paths.length > maxFiles))
-            return;
-        const files = this.getArray(this.state.paths);
-        for (let i = 0; i < entries.length; ++i) {
-            const entry = entries[i];
-            if (entry.isFile() ||
-                (entry.isSymbolicLink() && !resolveSymlinks && !excludeSymlinks)) {
-                const filename = this.joinPath(entry.name, directoryPath);
-                this.pushFile(filename, files, this.state.counts, filters);
-            }
-            else if (entry.isDirectory()) {
-                let path = joinPath.joinDirectoryPath(entry.name, directoryPath, this.state.options.pathSeparator);
-                if (exclude && exclude(entry.name, path))
-                    continue;
-                this.pushDirectory(path, paths, filters);
-                this.walkDirectory(this.state, path, path, depth - 1, this.walk);
-            }
-            else if (this.resolveSymlink && entry.isSymbolicLink()) {
-                let path = joinPath.joinPathWithBasePath(entry.name, directoryPath);
-                this.resolveSymlink(path, this.state, (stat, resolvedPath) => {
-                    if (stat.isDirectory()) {
-                        resolvedPath = (0, utils_1.normalizePath)(resolvedPath, this.state.options);
-                        if (exclude &&
-                            exclude(entry.name, useRealPaths ? resolvedPath : path + pathSeparator))
-                            return;
-                        this.walkDirectory(this.state, resolvedPath, useRealPaths ? resolvedPath : path + pathSeparator, depth - 1, this.walk);
-                    }
-                    else {
-                        resolvedPath = useRealPaths ? resolvedPath : path;
-                        const filename = (0, path_1.basename)(resolvedPath);
-                        const directoryPath = (0, utils_1.normalizePath)((0, path_1.dirname)(resolvedPath), this.state.options);
-                        resolvedPath = this.joinPath(filename, directoryPath);
-                        this.pushFile(resolvedPath, files, this.state.counts, filters);
-                    }
-                });
-            }
-        }
-        this.groupFiles(this.state.groups, directoryPath, files);
-    };
-}
-exports.Walker = Walker;
diff --git a/node_modules/tinyglobby/node_modules/fdir/dist/builder/api-builder.js b/node_modules/tinyglobby/node_modules/fdir/dist/builder/api-builder.js
deleted file mode 100644
index 0538e6fabfb49..0000000000000
--- a/node_modules/tinyglobby/node_modules/fdir/dist/builder/api-builder.js
+++ /dev/null
@@ -1,23 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.APIBuilder = void 0;
-const async_1 = require("../api/async");
-const sync_1 = require("../api/sync");
-class APIBuilder {
-    root;
-    options;
-    constructor(root, options) {
-        this.root = root;
-        this.options = options;
-    }
-    withPromise() {
-        return (0, async_1.promise)(this.root, this.options);
-    }
-    withCallback(cb) {
-        (0, async_1.callback)(this.root, this.options, cb);
-    }
-    sync() {
-        return (0, sync_1.sync)(this.root, this.options);
-    }
-}
-exports.APIBuilder = APIBuilder;
diff --git a/node_modules/tinyglobby/node_modules/fdir/dist/builder/index.js b/node_modules/tinyglobby/node_modules/fdir/dist/builder/index.js
deleted file mode 100644
index 7f99aece6a348..0000000000000
--- a/node_modules/tinyglobby/node_modules/fdir/dist/builder/index.js
+++ /dev/null
@@ -1,136 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Builder = void 0;
-const path_1 = require("path");
-const api_builder_1 = require("./api-builder");
-var pm = null;
-/* c8 ignore next 6 */
-try {
-    require.resolve("picomatch");
-    pm = require("picomatch");
-}
-catch (_e) {
-    // do nothing
-}
-class Builder {
-    globCache = {};
-    options = {
-        maxDepth: Infinity,
-        suppressErrors: true,
-        pathSeparator: path_1.sep,
-        filters: [],
-    };
-    globFunction;
-    constructor(options) {
-        this.options = { ...this.options, ...options };
-        this.globFunction = this.options.globFunction;
-    }
-    group() {
-        this.options.group = true;
-        return this;
-    }
-    withPathSeparator(separator) {
-        this.options.pathSeparator = separator;
-        return this;
-    }
-    withBasePath() {
-        this.options.includeBasePath = true;
-        return this;
-    }
-    withRelativePaths() {
-        this.options.relativePaths = true;
-        return this;
-    }
-    withDirs() {
-        this.options.includeDirs = true;
-        return this;
-    }
-    withMaxDepth(depth) {
-        this.options.maxDepth = depth;
-        return this;
-    }
-    withMaxFiles(limit) {
-        this.options.maxFiles = limit;
-        return this;
-    }
-    withFullPaths() {
-        this.options.resolvePaths = true;
-        this.options.includeBasePath = true;
-        return this;
-    }
-    withErrors() {
-        this.options.suppressErrors = false;
-        return this;
-    }
-    withSymlinks({ resolvePaths = true } = {}) {
-        this.options.resolveSymlinks = true;
-        this.options.useRealPaths = resolvePaths;
-        return this.withFullPaths();
-    }
-    withAbortSignal(signal) {
-        this.options.signal = signal;
-        return this;
-    }
-    normalize() {
-        this.options.normalizePath = true;
-        return this;
-    }
-    filter(predicate) {
-        this.options.filters.push(predicate);
-        return this;
-    }
-    onlyDirs() {
-        this.options.excludeFiles = true;
-        this.options.includeDirs = true;
-        return this;
-    }
-    exclude(predicate) {
-        this.options.exclude = predicate;
-        return this;
-    }
-    onlyCounts() {
-        this.options.onlyCounts = true;
-        return this;
-    }
-    crawl(root) {
-        return new api_builder_1.APIBuilder(root || ".", this.options);
-    }
-    withGlobFunction(fn) {
-        // cast this since we don't have the new type params yet
-        this.globFunction = fn;
-        return this;
-    }
-    /**
-     * @deprecated Pass options using the constructor instead:
-     * ```ts
-     * new fdir(options).crawl("/path/to/root");
-     * ```
-     * This method will be removed in v7.0
-     */
-    /* c8 ignore next 4 */
-    crawlWithOptions(root, options) {
-        this.options = { ...this.options, ...options };
-        return new api_builder_1.APIBuilder(root || ".", this.options);
-    }
-    glob(...patterns) {
-        if (this.globFunction) {
-            return this.globWithOptions(patterns);
-        }
-        return this.globWithOptions(patterns, ...[{ dot: true }]);
-    }
-    globWithOptions(patterns, ...options) {
-        const globFn = (this.globFunction || pm);
-        /* c8 ignore next 5 */
-        if (!globFn) {
-            throw new Error("Please specify a glob function to use glob matching.");
-        }
-        var isMatch = this.globCache[patterns.join("\0")];
-        if (!isMatch) {
-            isMatch = globFn(patterns, ...options);
-            this.globCache[patterns.join("\0")] = isMatch;
-        }
-        this.options.filters.push((path) => isMatch(path));
-        return this;
-    }
-}
-exports.Builder = Builder;
diff --git a/node_modules/tinyglobby/node_modules/fdir/dist/index.cjs b/node_modules/tinyglobby/node_modules/fdir/dist/index.cjs
index 83e724896ff82..4868ffba35d99 100644
--- a/node_modules/tinyglobby/node_modules/fdir/dist/index.cjs
+++ b/node_modules/tinyglobby/node_modules/fdir/dist/index.cjs
@@ -56,7 +56,7 @@ function joinPathWithBasePath(filename, directoryPath) {
 function joinPathWithRelativePath(root, options) {
 	return function(filename, directoryPath) {
 		const sameRoot = directoryPath.startsWith(root);
-		if (sameRoot) return directoryPath.replace(root, "") + filename;
+		if (sameRoot) return directoryPath.slice(root.length) + filename;
 		else return convertSlashes((0, path.relative)(root, directoryPath), options.pathSeparator) + options.pathSeparator + filename;
 	};
 }
@@ -151,11 +151,11 @@ function build$3(options) {
 //#endregion
 //#region src/api/functions/resolve-symlink.ts
 const resolveSymlinksAsync = function(path$1, state, callback$1) {
-	const { queue, options: { suppressErrors } } = state;
+	const { queue, fs: fs$1, options: { suppressErrors } } = state;
 	queue.enqueue();
-	fs.default.realpath(path$1, (error, resolvedPath) => {
+	fs$1.realpath(path$1, (error, resolvedPath) => {
 		if (error) return queue.dequeue(suppressErrors ? null : error, state);
-		fs.default.stat(resolvedPath, (error$1, stat) => {
+		fs$1.stat(resolvedPath, (error$1, stat) => {
 			if (error$1) return queue.dequeue(suppressErrors ? null : error$1, state);
 			if (stat.isDirectory() && isRecursive(path$1, resolvedPath, state)) return queue.dequeue(null, state);
 			callback$1(stat, resolvedPath);
@@ -164,11 +164,11 @@ const resolveSymlinksAsync = function(path$1, state, callback$1) {
 	});
 };
 const resolveSymlinks = function(path$1, state, callback$1) {
-	const { queue, options: { suppressErrors } } = state;
+	const { queue, fs: fs$1, options: { suppressErrors } } = state;
 	queue.enqueue();
 	try {
-		const resolvedPath = fs.default.realpathSync(path$1);
-		const stat = fs.default.statSync(resolvedPath);
+		const resolvedPath = fs$1.realpathSync(path$1);
+		const stat = fs$1.statSync(resolvedPath);
 		if (stat.isDirectory() && isRecursive(path$1, resolvedPath, state)) return;
 		callback$1(stat, resolvedPath);
 	} catch (e) {
@@ -243,21 +243,23 @@ function build$1(options, isSynchronous) {
 const readdirOpts = { withFileTypes: true };
 const walkAsync = (state, crawlPath, directoryPath, currentDepth, callback$1) => {
 	state.queue.enqueue();
-	if (currentDepth <= 0) return state.queue.dequeue(null, state);
+	if (currentDepth < 0) return state.queue.dequeue(null, state);
+	const { fs: fs$1 } = state;
 	state.visited.push(crawlPath);
 	state.counts.directories++;
-	fs.default.readdir(crawlPath || ".", readdirOpts, (error, entries = []) => {
+	fs$1.readdir(crawlPath || ".", readdirOpts, (error, entries = []) => {
 		callback$1(entries, directoryPath, currentDepth);
 		state.queue.dequeue(state.options.suppressErrors ? null : error, state);
 	});
 };
 const walkSync = (state, crawlPath, directoryPath, currentDepth, callback$1) => {
-	if (currentDepth <= 0) return;
+	const { fs: fs$1 } = state;
+	if (currentDepth < 0) return;
 	state.visited.push(crawlPath);
 	state.counts.directories++;
 	let entries = [];
 	try {
-		entries = fs.default.readdirSync(crawlPath || ".", readdirOpts);
+		entries = fs$1.readdirSync(crawlPath || ".", readdirOpts);
 	} catch (e) {
 		if (!state.options.suppressErrors) throw e;
 	}
@@ -320,6 +322,19 @@ var Counter = class {
 	}
 };
 
+//#endregion
+//#region src/api/aborter.ts
+/**
+* AbortController is not supported on Node 14 so we use this until we can drop
+* support for Node 14.
+*/
+var Aborter = class {
+	aborted = false;
+	abort() {
+		this.aborted = true;
+	}
+};
+
 //#endregion
 //#region src/api/walker.ts
 var Walker = class {
@@ -347,7 +362,8 @@ var Walker = class {
 			queue: new Queue((error, state) => this.callbackInvoker(state, error, callback$1)),
 			symlinks: /* @__PURE__ */ new Map(),
 			visited: [""].slice(0, 0),
-			controller: new AbortController()
+			controller: new Aborter(),
+			fs: options.fs || fs
 		};
 		this.joinPath = build$7(this.root, options);
 		this.pushDirectory = build$6(this.root, options);
@@ -364,7 +380,7 @@ var Walker = class {
 	}
 	walk = (entries, directoryPath, depth) => {
 		const { paths, options: { filters, resolveSymlinks: resolveSymlinks$1, excludeSymlinks, exclude, maxFiles, signal, useRealPaths, pathSeparator }, controller } = this.state;
-		if (controller.signal.aborted || signal && signal.aborted || maxFiles && paths.length > maxFiles) return;
+		if (controller.aborted || signal && signal.aborted || maxFiles && paths.length > maxFiles) return;
 		const files = this.getArray(this.state.paths);
 		for (let i = 0; i < entries.length; ++i) {
 			const entry = entries[i];
@@ -439,12 +455,12 @@ var APIBuilder = class {
 
 //#endregion
 //#region src/builder/index.ts
-var pm = null;
+let pm = null;
 /* c8 ignore next 6 */
 try {
 	require.resolve("picomatch");
 	pm = require("picomatch");
-} catch (_e) {}
+} catch {}
 var Builder = class {
 	globCache = {};
 	options = {
diff --git a/node_modules/tinyglobby/node_modules/fdir/dist/index.d.cts b/node_modules/tinyglobby/node_modules/fdir/dist/index.d.cts
index 8eb36bc363449..f448ef5d9b563 100644
--- a/node_modules/tinyglobby/node_modules/fdir/dist/index.d.cts
+++ b/node_modules/tinyglobby/node_modules/fdir/dist/index.d.cts
@@ -1,6 +1,17 @@
 /// 
+import * as nativeFs from "fs";
 import picomatch from "picomatch";
 
+//#region src/api/aborter.d.ts
+/**
+ * AbortController is not supported on Node 14 so we use this until we can drop
+ * support for Node 14.
+ */
+declare class Aborter {
+  aborted: boolean;
+  abort(): void;
+}
+//#endregion
 //#region src/api/queue.d.ts
 type OnQueueEmptyCallback = (error: Error | null, output: WalkerState) => void;
 /**
@@ -37,6 +48,14 @@ type GroupOutput = Group[];
 type OnlyCountsOutput = Counts;
 type PathsOutput = string[];
 type Output = OnlyCountsOutput | PathsOutput | GroupOutput;
+type FSLike = {
+  readdir: typeof nativeFs.readdir;
+  readdirSync: typeof nativeFs.readdirSync;
+  realpath: typeof nativeFs.realpath;
+  realpathSync: typeof nativeFs.realpathSync;
+  stat: typeof nativeFs.stat;
+  statSync: typeof nativeFs.statSync;
+};
 type WalkerState = {
   root: string;
   paths: string[];
@@ -44,7 +63,8 @@ type WalkerState = {
   counts: Counts;
   options: Options;
   queue: Queue;
-  controller: AbortController;
+  controller: Aborter;
+  fs: FSLike;
   symlinks: Map;
   visited: string[];
 };
@@ -72,6 +92,7 @@ type Options = {
   pathSeparator: PathSeparator;
   signal?: AbortSignal;
   globFunction?: TGlobFunction;
+  fs?: FSLike;
 };
 type GlobMatcher = (test: string) => boolean;
 type GlobFunction = (glob: string | string[], ...params: unknown[]) => GlobMatcher;
@@ -131,4 +152,4 @@ declare class Builder
+import * as nativeFs from "fs";
 import picomatch from "picomatch";
 
+//#region src/api/aborter.d.ts
+/**
+ * AbortController is not supported on Node 14 so we use this until we can drop
+ * support for Node 14.
+ */
+declare class Aborter {
+  aborted: boolean;
+  abort(): void;
+}
+//#endregion
 //#region src/api/queue.d.ts
 type OnQueueEmptyCallback = (error: Error | null, output: WalkerState) => void;
 /**
@@ -37,6 +48,14 @@ type GroupOutput = Group[];
 type OnlyCountsOutput = Counts;
 type PathsOutput = string[];
 type Output = OnlyCountsOutput | PathsOutput | GroupOutput;
+type FSLike = {
+  readdir: typeof nativeFs.readdir;
+  readdirSync: typeof nativeFs.readdirSync;
+  realpath: typeof nativeFs.realpath;
+  realpathSync: typeof nativeFs.realpathSync;
+  stat: typeof nativeFs.stat;
+  statSync: typeof nativeFs.statSync;
+};
 type WalkerState = {
   root: string;
   paths: string[];
@@ -44,7 +63,8 @@ type WalkerState = {
   counts: Counts;
   options: Options;
   queue: Queue;
-  controller: AbortController;
+  controller: Aborter;
+  fs: FSLike;
   symlinks: Map;
   visited: string[];
 };
@@ -72,6 +92,7 @@ type Options = {
   pathSeparator: PathSeparator;
   signal?: AbortSignal;
   globFunction?: TGlobFunction;
+  fs?: FSLike;
 };
 type GlobMatcher = (test: string) => boolean;
 type GlobFunction = (glob: string | string[], ...params: unknown[]) => GlobMatcher;
@@ -131,4 +152,4 @@ declare class Builder {
 		if (error) return queue.dequeue(suppressErrors ? null : error, state);
@@ -146,7 +146,7 @@ const resolveSymlinksAsync = function(path, state, callback$1) {
 	});
 };
 const resolveSymlinks = function(path, state, callback$1) {
-	const { queue, options: { suppressErrors } } = state;
+	const { queue, fs, options: { suppressErrors } } = state;
 	queue.enqueue();
 	try {
 		const resolvedPath = fs.realpathSync(path);
@@ -225,7 +225,8 @@ function build$1(options, isSynchronous) {
 const readdirOpts = { withFileTypes: true };
 const walkAsync = (state, crawlPath, directoryPath, currentDepth, callback$1) => {
 	state.queue.enqueue();
-	if (currentDepth <= 0) return state.queue.dequeue(null, state);
+	if (currentDepth < 0) return state.queue.dequeue(null, state);
+	const { fs } = state;
 	state.visited.push(crawlPath);
 	state.counts.directories++;
 	fs.readdir(crawlPath || ".", readdirOpts, (error, entries = []) => {
@@ -234,7 +235,8 @@ const walkAsync = (state, crawlPath, directoryPath, currentDepth, callback$1) =>
 	});
 };
 const walkSync = (state, crawlPath, directoryPath, currentDepth, callback$1) => {
-	if (currentDepth <= 0) return;
+	const { fs } = state;
+	if (currentDepth < 0) return;
 	state.visited.push(crawlPath);
 	state.counts.directories++;
 	let entries = [];
@@ -302,6 +304,19 @@ var Counter = class {
 	}
 };
 
+//#endregion
+//#region src/api/aborter.ts
+/**
+* AbortController is not supported on Node 14 so we use this until we can drop
+* support for Node 14.
+*/
+var Aborter = class {
+	aborted = false;
+	abort() {
+		this.aborted = true;
+	}
+};
+
 //#endregion
 //#region src/api/walker.ts
 var Walker = class {
@@ -329,7 +344,8 @@ var Walker = class {
 			queue: new Queue((error, state) => this.callbackInvoker(state, error, callback$1)),
 			symlinks: /* @__PURE__ */ new Map(),
 			visited: [""].slice(0, 0),
-			controller: new AbortController()
+			controller: new Aborter(),
+			fs: options.fs || nativeFs
 		};
 		this.joinPath = build$7(this.root, options);
 		this.pushDirectory = build$6(this.root, options);
@@ -346,7 +362,7 @@ var Walker = class {
 	}
 	walk = (entries, directoryPath, depth) => {
 		const { paths, options: { filters, resolveSymlinks: resolveSymlinks$1, excludeSymlinks, exclude, maxFiles, signal, useRealPaths, pathSeparator }, controller } = this.state;
-		if (controller.signal.aborted || signal && signal.aborted || maxFiles && paths.length > maxFiles) return;
+		if (controller.aborted || signal && signal.aborted || maxFiles && paths.length > maxFiles) return;
 		const files = this.getArray(this.state.paths);
 		for (let i = 0; i < entries.length; ++i) {
 			const entry = entries[i];
@@ -421,12 +437,12 @@ var APIBuilder = class {
 
 //#endregion
 //#region src/builder/index.ts
-var pm = null;
+let pm = null;
 /* c8 ignore next 6 */
 try {
 	__require.resolve("picomatch");
 	pm = __require("picomatch");
-} catch (_e) {}
+} catch {}
 var Builder = class {
 	globCache = {};
 	options = {
diff --git a/node_modules/tinyglobby/node_modules/fdir/dist/types.js b/node_modules/tinyglobby/node_modules/fdir/dist/types.js
deleted file mode 100644
index c8ad2e549bdc6..0000000000000
--- a/node_modules/tinyglobby/node_modules/fdir/dist/types.js
+++ /dev/null
@@ -1,2 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/tinyglobby/node_modules/fdir/dist/utils.js b/node_modules/tinyglobby/node_modules/fdir/dist/utils.js
deleted file mode 100644
index 539b2a0d414fe..0000000000000
--- a/node_modules/tinyglobby/node_modules/fdir/dist/utils.js
+++ /dev/null
@@ -1,37 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.normalizePath = exports.isRootDirectory = exports.convertSlashes = exports.cleanPath = void 0;
-const path_1 = require("path");
-function cleanPath(path) {
-    let normalized = (0, path_1.normalize)(path);
-    // we have to remove the last path separator
-    // to account for / root path
-    if (normalized.length > 1 && normalized[normalized.length - 1] === path_1.sep)
-        normalized = normalized.substring(0, normalized.length - 1);
-    return normalized;
-}
-exports.cleanPath = cleanPath;
-const SLASHES_REGEX = /[\\/]/g;
-function convertSlashes(path, separator) {
-    return path.replace(SLASHES_REGEX, separator);
-}
-exports.convertSlashes = convertSlashes;
-const WINDOWS_ROOT_DIR_REGEX = /^[a-z]:[\\/]$/i;
-function isRootDirectory(path) {
-    return path === "/" || WINDOWS_ROOT_DIR_REGEX.test(path);
-}
-exports.isRootDirectory = isRootDirectory;
-function normalizePath(path, options) {
-    const { resolvePaths, normalizePath, pathSeparator } = options;
-    const pathNeedsCleaning = (process.platform === "win32" && path.includes("/")) ||
-        path.startsWith(".");
-    if (resolvePaths)
-        path = (0, path_1.resolve)(path);
-    if (normalizePath || pathNeedsCleaning)
-        path = cleanPath(path);
-    if (path === ".")
-        return "";
-    const needsSeperator = path[path.length - 1] !== pathSeparator;
-    return convertSlashes(needsSeperator ? path + pathSeparator : path, pathSeparator);
-}
-exports.normalizePath = normalizePath;
diff --git a/node_modules/tinyglobby/node_modules/fdir/package.json b/node_modules/tinyglobby/node_modules/fdir/package.json
index f76638120f3df..e229dff815080 100644
--- a/node_modules/tinyglobby/node_modules/fdir/package.json
+++ b/node_modules/tinyglobby/node_modules/fdir/package.json
@@ -1,12 +1,13 @@
 {
   "name": "fdir",
-  "version": "6.4.6",
+  "version": "6.5.0",
   "description": "The fastest directory crawler & globbing alternative to glob, fast-glob, & tiny-glob. Crawls 1m files in < 1s",
-  "main": "dist/index.js",
-  "types": "dist/index.d.ts",
+  "main": "./dist/index.cjs",
+  "types": "./dist/index.d.cts",
+  "type": "module",
   "scripts": {
     "prepublishOnly": "npm run test && npm run build",
-    "build": "tsc",
+    "build": "tsdown",
     "format": "prettier --write src __tests__ benchmarks",
     "test": "vitest run __tests__/",
     "test:coverage": "vitest run --coverage __tests__/",
@@ -16,6 +17,9 @@
     "bench:fdir": "ts-node benchmarks/fdir-benchmark.ts",
     "release": "./scripts/release.sh"
   },
+  "engines": {
+    "node": ">=12.0.0"
+  },
   "repository": {
     "type": "git",
     "url": "git+https://github.com/thecodrr/fdir.git"
@@ -47,7 +51,7 @@
     "@types/glob": "^8.1.0",
     "@types/mock-fs": "^4.13.4",
     "@types/node": "^20.9.4",
-    "@types/picomatch": "^3.0.0",
+    "@types/picomatch": "^4.0.0",
     "@types/tap": "^15.0.11",
     "@vitest/coverage-v8": "^0.34.6",
     "all-files-in-tree": "^1.1.2",
@@ -75,6 +79,7 @@
     "systeminformation": "^5.21.17",
     "tiny-glob": "^0.2.9",
     "ts-node": "^10.9.1",
+    "tsdown": "^0.12.5",
     "typescript": "^5.3.2",
     "vitest": "^0.34.6",
     "walk-sync": "^3.0.0"
@@ -86,5 +91,13 @@
     "picomatch": {
       "optional": true
     }
+  },
+  "module": "./dist/index.mjs",
+  "exports": {
+    ".": {
+      "import": "./dist/index.mjs",
+      "require": "./dist/index.cjs"
+    },
+    "./package.json": "./package.json"
   }
 }
diff --git a/node_modules/tinyglobby/package.json b/node_modules/tinyglobby/package.json
index afbf8a638d1d4..d0247c25ae3a1 100644
--- a/node_modules/tinyglobby/package.json
+++ b/node_modules/tinyglobby/package.json
@@ -1,13 +1,17 @@
 {
   "name": "tinyglobby",
-  "version": "0.2.14",
+  "version": "0.2.15",
   "description": "A fast and minimal alternative to globby and fast-glob",
-  "main": "dist/index.js",
-  "module": "dist/index.mjs",
-  "types": "dist/index.d.ts",
+  "type": "module",
+  "main": "./dist/index.cjs",
+  "module": "./dist/index.mjs",
+  "types": "./dist/index.d.cts",
   "exports": {
-    "import": "./dist/index.mjs",
-    "require": "./dist/index.js"
+    ".": {
+      "import": "./dist/index.mjs",
+      "require": "./dist/index.cjs"
+    },
+    "./package.json": "./package.json"
   },
   "sideEffects": false,
   "files": [
@@ -28,38 +32,42 @@
   "bugs": {
     "url": "https://github.com/SuperchupuDev/tinyglobby/issues"
   },
-  "homepage": "https://github.com/SuperchupuDev/tinyglobby#readme",
+  "homepage": "https://superchupu.dev/tinyglobby",
   "funding": {
     "url": "https://github.com/sponsors/SuperchupuDev"
   },
   "dependencies": {
-    "fdir": "^6.4.4",
-    "picomatch": "^4.0.2"
+    "fdir": "^6.5.0",
+    "picomatch": "^4.0.3"
   },
   "devDependencies": {
-    "@biomejs/biome": "^1.9.4",
-    "@types/node": "^22.15.21",
-    "@types/picomatch": "^4.0.0",
-    "fs-fixture": "^2.7.1",
-    "tsdown": "^0.12.3",
-    "typescript": "^5.8.3"
+    "@biomejs/biome": "^2.2.3",
+    "@types/node": "^24.3.1",
+    "@types/picomatch": "^4.0.2",
+    "fast-glob": "^3.3.3",
+    "fs-fixture": "^2.8.1",
+    "glob": "^11.0.3",
+    "tinybench": "^5.0.1",
+    "tsdown": "^0.14.2",
+    "typescript": "^5.9.2"
   },
   "engines": {
     "node": ">=12.0.0"
   },
   "publishConfig": {
-    "access": "public",
     "provenance": true
   },
   "scripts": {
+    "bench": "node benchmark/bench.ts",
+    "bench:setup": "node benchmark/setup.ts",
     "build": "tsdown",
     "check": "biome check",
+    "check:fix": "biome check --write --unsafe",
     "format": "biome format --write",
     "lint": "biome lint",
-    "lint:fix": "biome lint --fix --unsafe",
-    "test": "node --experimental-transform-types --test",
-    "test:coverage": "node --experimental-transform-types --test --experimental-test-coverage",
-    "test:only": "node --experimental-transform-types --test --test-only",
+    "test": "node --test \"test/**/*.ts\"",
+    "test:coverage": "node --test --experimental-test-coverage \"test/**/*.ts\"",
+    "test:only": "node --test --test-only \"test/**/*.ts\"",
     "typecheck": "tsc --noEmit"
   }
 }
\ No newline at end of file
diff --git a/package-lock.json b/package-lock.json
index 3a77ab432ae4a..ffd1464d5d4ca 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -15815,12 +15815,14 @@
       "license": "MIT"
     },
     "node_modules/tinyglobby": {
-      "version": "0.2.14",
+      "version": "0.2.15",
+      "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz",
+      "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
-        "fdir": "^6.4.4",
-        "picomatch": "^4.0.2"
+        "fdir": "^6.5.0",
+        "picomatch": "^4.0.3"
       },
       "engines": {
         "node": ">=12.0.0"
@@ -15830,9 +15832,14 @@
       }
     },
     "node_modules/tinyglobby/node_modules/fdir": {
-      "version": "6.4.6",
+      "version": "6.5.0",
+      "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz",
+      "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==",
       "inBundle": true,
       "license": "MIT",
+      "engines": {
+        "node": ">=12.0.0"
+      },
       "peerDependencies": {
         "picomatch": "^3 || ^4"
       },
@@ -15844,6 +15851,8 @@
     },
     "node_modules/tinyglobby/node_modules/picomatch": {
       "version": "4.0.3",
+      "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz",
+      "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==",
       "inBundle": true,
       "license": "MIT",
       "peer": true,

From 5516583de7982f4b8d5142510429b809654d8f75 Mon Sep 17 00:00:00 2001
From: Gar 
Date: Thu, 18 Sep 2025 11:03:19 -0700
Subject: [PATCH 36/63] deps: socks@2.8.7

---
 DEPENDENCIES.md                               |    2 -
 node_modules/.gitignore                       |    2 -
 node_modules/ip-address/dist/address-error.js |    4 +-
 node_modules/ip-address/dist/common.js        |   26 +-
 node_modules/ip-address/dist/ip-address.js    |    6 +-
 node_modules/ip-address/dist/ipv4.js          |   53 +-
 node_modules/ip-address/dist/ipv6.js          |  135 +-
 node_modules/ip-address/dist/v6/constants.js  |    4 +-
 node_modules/ip-address/dist/v6/helpers.js    |   15 +-
 .../ip-address/dist/v6/regular-expressions.js |   23 +-
 node_modules/ip-address/package.json          |   47 +-
 node_modules/jsbn/LICENSE                     |   40 -
 node_modules/jsbn/example.html                |   11 -
 node_modules/jsbn/example.js                  |    5 -
 node_modules/jsbn/index.js                    | 1361 -----------------
 node_modules/jsbn/package.json                |   21 -
 node_modules/jsbn/test/es6-import.js          |    3 -
 node_modules/socks/package.json               |    4 +-
 node_modules/sprintf-js/CONTRIBUTORS.md       |   26 -
 node_modules/sprintf-js/LICENSE               |   24 -
 node_modules/sprintf-js/dist/.gitattributes   |    4 -
 .../sprintf-js/dist/angular-sprintf.min.js    |    3 -
 node_modules/sprintf-js/dist/sprintf.min.js   |    3 -
 node_modules/sprintf-js/package.json          |   35 -
 .../sprintf-js/src/angular-sprintf.js         |   24 -
 node_modules/sprintf-js/src/sprintf.js        |  231 ---
 package-lock.json                             |   24 +-
 27 files changed, 171 insertions(+), 1965 deletions(-)
 delete mode 100644 node_modules/jsbn/LICENSE
 delete mode 100644 node_modules/jsbn/example.html
 delete mode 100644 node_modules/jsbn/example.js
 delete mode 100644 node_modules/jsbn/index.js
 delete mode 100644 node_modules/jsbn/package.json
 delete mode 100644 node_modules/jsbn/test/es6-import.js
 delete mode 100644 node_modules/sprintf-js/CONTRIBUTORS.md
 delete mode 100644 node_modules/sprintf-js/LICENSE
 delete mode 100644 node_modules/sprintf-js/dist/.gitattributes
 delete mode 100644 node_modules/sprintf-js/dist/angular-sprintf.min.js
 delete mode 100644 node_modules/sprintf-js/dist/sprintf.min.js
 delete mode 100644 node_modules/sprintf-js/package.json
 delete mode 100644 node_modules/sprintf-js/src/angular-sprintf.js
 delete mode 100644 node_modules/sprintf-js/src/sprintf.js

diff --git a/DEPENDENCIES.md b/DEPENDENCIES.md
index c8c0852ff8fb7..e7b7e57f50615 100644
--- a/DEPENDENCIES.md
+++ b/DEPENDENCIES.md
@@ -302,8 +302,6 @@ graph LR;
   init-package-json-->semver;
   init-package-json-->validate-npm-package-license;
   init-package-json-->validate-npm-package-name;
-  ip-address-->jsbn;
-  ip-address-->sprintf-js;
   is-cidr-->cidr-regex;
   isaacs-brace-expansion-->isaacs-balanced-match["@isaacs/balanced-match"];
   isaacs-cliui-->string-width-cjs;
diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 3729ec7a958fa..8883d013963f4 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -102,7 +102,6 @@
 !/is-cidr
 !/is-fullwidth-code-point
 !/jackspeak
-!/jsbn
 !/json-parse-even-better-errors
 !/json-stringify-nice
 !/jsonparse
@@ -197,7 +196,6 @@
 !/spdx-exceptions
 !/spdx-expression-parse
 !/spdx-license-ids
-!/sprintf-js
 !/ssri
 !/string-width-cjs
 !/string-width
diff --git a/node_modules/ip-address/dist/address-error.js b/node_modules/ip-address/dist/address-error.js
index 4fcade3ba2486..c178ae48200ac 100644
--- a/node_modules/ip-address/dist/address-error.js
+++ b/node_modules/ip-address/dist/address-error.js
@@ -5,9 +5,7 @@ class AddressError extends Error {
     constructor(message, parseMessage) {
         super(message);
         this.name = 'AddressError';
-        if (parseMessage !== null) {
-            this.parseMessage = parseMessage;
-        }
+        this.parseMessage = parseMessage;
     }
 }
 exports.AddressError = AddressError;
diff --git a/node_modules/ip-address/dist/common.js b/node_modules/ip-address/dist/common.js
index 4d10c9a4e8203..273a01e28e317 100644
--- a/node_modules/ip-address/dist/common.js
+++ b/node_modules/ip-address/dist/common.js
@@ -1,6 +1,10 @@
 "use strict";
 Object.defineProperty(exports, "__esModule", { value: true });
-exports.isCorrect = exports.isInSubnet = void 0;
+exports.isInSubnet = isInSubnet;
+exports.isCorrect = isCorrect;
+exports.numberToPaddedHex = numberToPaddedHex;
+exports.stringToPaddedHex = stringToPaddedHex;
+exports.testBit = testBit;
 function isInSubnet(address) {
     if (this.subnetMask < address.subnetMask) {
         return false;
@@ -10,7 +14,6 @@ function isInSubnet(address) {
     }
     return false;
 }
-exports.isInSubnet = isInSubnet;
 function isCorrect(defaultBits) {
     return function () {
         if (this.addressMinusSuffix !== this.correctForm()) {
@@ -22,5 +25,22 @@ function isCorrect(defaultBits) {
         return this.parsedSubnet === String(this.subnetMask);
     };
 }
-exports.isCorrect = isCorrect;
+function numberToPaddedHex(number) {
+    return number.toString(16).padStart(2, '0');
+}
+function stringToPaddedHex(numberString) {
+    return numberToPaddedHex(parseInt(numberString, 10));
+}
+/**
+ * @param binaryValue Binary representation of a value (e.g. `10`)
+ * @param position Byte position, where 0 is the least significant bit
+ */
+function testBit(binaryValue, position) {
+    const { length } = binaryValue;
+    if (position > length) {
+        return false;
+    }
+    const positionInString = length - position;
+    return binaryValue.substring(positionInString, positionInString + 1) === '1';
+}
 //# sourceMappingURL=common.js.map
\ No newline at end of file
diff --git a/node_modules/ip-address/dist/ip-address.js b/node_modules/ip-address/dist/ip-address.js
index 553c005a63cb6..84f348709fe54 100644
--- a/node_modules/ip-address/dist/ip-address.js
+++ b/node_modules/ip-address/dist/ip-address.js
@@ -24,11 +24,11 @@ var __importStar = (this && this.__importStar) || function (mod) {
 };
 Object.defineProperty(exports, "__esModule", { value: true });
 exports.v6 = exports.AddressError = exports.Address6 = exports.Address4 = void 0;
-const ipv4_1 = require("./ipv4");
+var ipv4_1 = require("./ipv4");
 Object.defineProperty(exports, "Address4", { enumerable: true, get: function () { return ipv4_1.Address4; } });
-const ipv6_1 = require("./ipv6");
+var ipv6_1 = require("./ipv6");
 Object.defineProperty(exports, "Address6", { enumerable: true, get: function () { return ipv6_1.Address6; } });
-const address_error_1 = require("./address-error");
+var address_error_1 = require("./address-error");
 Object.defineProperty(exports, "AddressError", { enumerable: true, get: function () { return address_error_1.AddressError; } });
 const helpers = __importStar(require("./v6/helpers"));
 exports.v6 = { helpers };
diff --git a/node_modules/ip-address/dist/ipv4.js b/node_modules/ip-address/dist/ipv4.js
index 22a81b5047f05..f1b60064c5fd5 100644
--- a/node_modules/ip-address/dist/ipv4.js
+++ b/node_modules/ip-address/dist/ipv4.js
@@ -28,8 +28,6 @@ exports.Address4 = void 0;
 const common = __importStar(require("./common"));
 const constants = __importStar(require("./v4/constants"));
 const address_error_1 = require("./address-error");
-const jsbn_1 = require("jsbn");
-const sprintf_js_1 = require("sprintf-js");
 /**
  * Represents an IPv4 address
  * @class Address4
@@ -150,7 +148,7 @@ class Address4 {
      * @returns {String}
      */
     toHex() {
-        return this.parsedAddress.map((part) => (0, sprintf_js_1.sprintf)('%02x', parseInt(part, 10))).join(':');
+        return this.parsedAddress.map((part) => common.stringToPaddedHex(part)).join(':');
     }
     /**
      * Converts an IPv4 address object to an array of bytes
@@ -171,28 +169,27 @@ class Address4 {
         const output = [];
         let i;
         for (i = 0; i < constants.GROUPS; i += 2) {
-            const hex = (0, sprintf_js_1.sprintf)('%02x%02x', parseInt(this.parsedAddress[i], 10), parseInt(this.parsedAddress[i + 1], 10));
-            output.push((0, sprintf_js_1.sprintf)('%x', parseInt(hex, 16)));
+            output.push(`${common.stringToPaddedHex(this.parsedAddress[i])}${common.stringToPaddedHex(this.parsedAddress[i + 1])}`);
         }
         return output.join(':');
     }
     /**
-     * Returns the address as a BigInteger
+     * Returns the address as a `bigint`
      * @memberof Address4
      * @instance
-     * @returns {BigInteger}
+     * @returns {bigint}
      */
-    bigInteger() {
-        return new jsbn_1.BigInteger(this.parsedAddress.map((n) => (0, sprintf_js_1.sprintf)('%02x', parseInt(n, 10))).join(''), 16);
+    bigInt() {
+        return BigInt(`0x${this.parsedAddress.map((n) => common.stringToPaddedHex(n)).join('')}`);
     }
     /**
      * Helper function getting start address.
      * @memberof Address4
      * @instance
-     * @returns {BigInteger}
+     * @returns {bigint}
      */
     _startAddress() {
-        return new jsbn_1.BigInteger(this.mask() + '0'.repeat(constants.BITS - this.subnetMask), 2);
+        return BigInt(`0b${this.mask() + '0'.repeat(constants.BITS - this.subnetMask)}`);
     }
     /**
      * The first address in the range given by this address' subnet.
@@ -202,7 +199,7 @@ class Address4 {
      * @returns {Address4}
      */
     startAddress() {
-        return Address4.fromBigInteger(this._startAddress());
+        return Address4.fromBigInt(this._startAddress());
     }
     /**
      * The first host address in the range given by this address's subnet ie
@@ -212,17 +209,17 @@ class Address4 {
      * @returns {Address4}
      */
     startAddressExclusive() {
-        const adjust = new jsbn_1.BigInteger('1');
-        return Address4.fromBigInteger(this._startAddress().add(adjust));
+        const adjust = BigInt('1');
+        return Address4.fromBigInt(this._startAddress() + adjust);
     }
     /**
      * Helper function getting end address.
      * @memberof Address4
      * @instance
-     * @returns {BigInteger}
+     * @returns {bigint}
      */
     _endAddress() {
-        return new jsbn_1.BigInteger(this.mask() + '1'.repeat(constants.BITS - this.subnetMask), 2);
+        return BigInt(`0b${this.mask() + '1'.repeat(constants.BITS - this.subnetMask)}`);
     }
     /**
      * The last address in the range given by this address' subnet
@@ -232,7 +229,7 @@ class Address4 {
      * @returns {Address4}
      */
     endAddress() {
-        return Address4.fromBigInteger(this._endAddress());
+        return Address4.fromBigInt(this._endAddress());
     }
     /**
      * The last host address in the range given by this address's subnet ie
@@ -242,18 +239,18 @@ class Address4 {
      * @returns {Address4}
      */
     endAddressExclusive() {
-        const adjust = new jsbn_1.BigInteger('1');
-        return Address4.fromBigInteger(this._endAddress().subtract(adjust));
+        const adjust = BigInt('1');
+        return Address4.fromBigInt(this._endAddress() - adjust);
     }
     /**
-     * Converts a BigInteger to a v4 address object
+     * Converts a BigInt to a v4 address object
      * @memberof Address4
      * @static
-     * @param {BigInteger} bigInteger - a BigInteger to convert
+     * @param {bigint} bigInt - a BigInt to convert
      * @returns {Address4}
      */
-    static fromBigInteger(bigInteger) {
-        return Address4.fromInteger(parseInt(bigInteger.toString(), 10));
+    static fromBigInt(bigInt) {
+        return Address4.fromHex(bigInt.toString(16));
     }
     /**
      * Returns the first n bits of the address, defaulting to the
@@ -293,7 +290,7 @@ class Address4 {
         if (options.omitSuffix) {
             return reversed;
         }
-        return (0, sprintf_js_1.sprintf)('%s.in-addr.arpa.', reversed);
+        return `${reversed}.in-addr.arpa.`;
     }
     /**
      * Returns true if the given address is a multicast address
@@ -311,7 +308,7 @@ class Address4 {
      * @returns {string}
      */
     binaryZeroPad() {
-        return this.bigInteger().toString(2).padStart(constants.BITS, '0');
+        return this.bigInt().toString(2).padStart(constants.BITS, '0');
     }
     /**
      * Groups an IPv4 address for inclusion at the end of an IPv6 address
@@ -319,7 +316,11 @@ class Address4 {
      */
     groupForV6() {
         const segments = this.parsedAddress;
-        return this.address.replace(constants.RE_ADDRESS, (0, sprintf_js_1.sprintf)('%s.%s', segments.slice(0, 2).join('.'), segments.slice(2, 4).join('.')));
+        return this.address.replace(constants.RE_ADDRESS, `${segments
+            .slice(0, 2)
+            .join('.')}.${segments
+            .slice(2, 4)
+            .join('.')}`);
     }
 }
 exports.Address4 = Address4;
diff --git a/node_modules/ip-address/dist/ipv6.js b/node_modules/ip-address/dist/ipv6.js
index c88ab84b9ad77..5f88ab63a56eb 100644
--- a/node_modules/ip-address/dist/ipv6.js
+++ b/node_modules/ip-address/dist/ipv6.js
@@ -33,8 +33,7 @@ const helpers = __importStar(require("./v6/helpers"));
 const ipv4_1 = require("./ipv4");
 const regular_expressions_1 = require("./v6/regular-expressions");
 const address_error_1 = require("./address-error");
-const jsbn_1 = require("jsbn");
-const sprintf_js_1 = require("sprintf-js");
+const common_1 = require("./common");
 function assert(condition) {
     if (!condition) {
         throw new Error('Assertion failed.');
@@ -70,7 +69,7 @@ function compact(address, slice) {
     return s1.concat(['compact']).concat(s2);
 }
 function paddedHex(octet) {
-    return (0, sprintf_js_1.sprintf)('%04x', parseInt(octet, 16));
+    return parseInt(octet, 16).toString(16).padStart(4, '0');
 }
 function unsignByte(b) {
     // eslint-disable-next-line no-bitwise
@@ -148,18 +147,18 @@ class Address6 {
         }
     }
     /**
-     * Convert a BigInteger to a v6 address object
+     * Convert a BigInt to a v6 address object
      * @memberof Address6
      * @static
-     * @param {BigInteger} bigInteger - a BigInteger to convert
+     * @param {bigint} bigInt - a BigInt to convert
      * @returns {Address6}
      * @example
-     * var bigInteger = new BigInteger('1000000000000');
-     * var address = Address6.fromBigInteger(bigInteger);
+     * var bigInt = BigInt('1000000000000');
+     * var address = Address6.fromBigInt(bigInt);
      * address.correctForm(); // '::e8:d4a5:1000'
      */
-    static fromBigInteger(bigInteger) {
-        const hex = bigInteger.toString(16).padStart(32, '0');
+    static fromBigInt(bigInt) {
+        const hex = bigInt.toString(16).padStart(32, '0');
         const groups = [];
         let i;
         for (i = 0; i < constants6.GROUPS; i++) {
@@ -279,7 +278,7 @@ class Address6 {
      * @returns {String} the Microsoft UNC transcription of the address
      */
     microsoftTranscription() {
-        return (0, sprintf_js_1.sprintf)('%s.ipv6-literal.net', this.correctForm().replace(/:/g, '-'));
+        return `${this.correctForm().replace(/:/g, '-')}.ipv6-literal.net`;
     }
     /**
      * Return the first n bits of the address, defaulting to the subnet mask
@@ -295,7 +294,7 @@ class Address6 {
      * Return the number of possible subnets of a given size in the address
      * @memberof Address6
      * @instance
-     * @param {number} [size=128] - the subnet size
+     * @param {number} [subnetSize=128] - the subnet size
      * @returns {String}
      */
     // TODO: probably useful to have a numeric version of this too
@@ -306,16 +305,16 @@ class Address6 {
         if (subnetPowers < 0) {
             return '0';
         }
-        return addCommas(new jsbn_1.BigInteger('2', 10).pow(subnetPowers).toString(10));
+        return addCommas((BigInt('2') ** BigInt(subnetPowers)).toString(10));
     }
     /**
      * Helper function getting start address.
      * @memberof Address6
      * @instance
-     * @returns {BigInteger}
+     * @returns {bigint}
      */
     _startAddress() {
-        return new jsbn_1.BigInteger(this.mask() + '0'.repeat(constants6.BITS - this.subnetMask), 2);
+        return BigInt(`0b${this.mask() + '0'.repeat(constants6.BITS - this.subnetMask)}`);
     }
     /**
      * The first address in the range given by this address' subnet
@@ -325,7 +324,7 @@ class Address6 {
      * @returns {Address6}
      */
     startAddress() {
-        return Address6.fromBigInteger(this._startAddress());
+        return Address6.fromBigInt(this._startAddress());
     }
     /**
      * The first host address in the range given by this address's subnet ie
@@ -335,17 +334,17 @@ class Address6 {
      * @returns {Address6}
      */
     startAddressExclusive() {
-        const adjust = new jsbn_1.BigInteger('1');
-        return Address6.fromBigInteger(this._startAddress().add(adjust));
+        const adjust = BigInt('1');
+        return Address6.fromBigInt(this._startAddress() + adjust);
     }
     /**
      * Helper function getting end address.
      * @memberof Address6
      * @instance
-     * @returns {BigInteger}
+     * @returns {bigint}
      */
     _endAddress() {
-        return new jsbn_1.BigInteger(this.mask() + '1'.repeat(constants6.BITS - this.subnetMask), 2);
+        return BigInt(`0b${this.mask() + '1'.repeat(constants6.BITS - this.subnetMask)}`);
     }
     /**
      * The last address in the range given by this address' subnet
@@ -355,7 +354,7 @@ class Address6 {
      * @returns {Address6}
      */
     endAddress() {
-        return Address6.fromBigInteger(this._endAddress());
+        return Address6.fromBigInt(this._endAddress());
     }
     /**
      * The last host address in the range given by this address's subnet ie
@@ -365,8 +364,8 @@ class Address6 {
      * @returns {Address6}
      */
     endAddressExclusive() {
-        const adjust = new jsbn_1.BigInteger('1');
-        return Address6.fromBigInteger(this._endAddress().subtract(adjust));
+        const adjust = BigInt('1');
+        return Address6.fromBigInt(this._endAddress() - adjust);
     }
     /**
      * Return the scope of the address
@@ -375,7 +374,7 @@ class Address6 {
      * @returns {String}
      */
     getScope() {
-        let scope = constants6.SCOPES[this.getBits(12, 16).intValue()];
+        let scope = constants6.SCOPES[parseInt(this.getBits(12, 16).toString(10), 10)];
         if (this.getType() === 'Global unicast' && scope !== 'Link local') {
             scope = 'Global';
         }
@@ -396,13 +395,13 @@ class Address6 {
         return 'Global unicast';
     }
     /**
-     * Return the bits in the given range as a BigInteger
+     * Return the bits in the given range as a BigInt
      * @memberof Address6
      * @instance
-     * @returns {BigInteger}
+     * @returns {bigint}
      */
     getBits(start, end) {
-        return new jsbn_1.BigInteger(this.getBitsBase2(start, end), 2);
+        return BigInt(`0b${this.getBitsBase2(start, end)}`);
     }
     /**
      * Return the bits in the given range as a base-2 string
@@ -460,7 +459,7 @@ class Address6 {
             if (options.omitSuffix) {
                 return reversed;
             }
-            return (0, sprintf_js_1.sprintf)('%s.ip6.arpa.', reversed);
+            return `${reversed}.ip6.arpa.`;
         }
         if (options.omitSuffix) {
             return '';
@@ -509,7 +508,7 @@ class Address6 {
         }
         let correct = groups.join(':');
         correct = correct.replace(/^compact$/, '::');
-        correct = correct.replace(/^compact|compact$/, ':');
+        correct = correct.replace(/(^compact)|(compact$)/, ':');
         correct = correct.replace(/compact/, '');
         return correct;
     }
@@ -525,7 +524,7 @@ class Address6 {
      * //  0000000000000000000000000000000000000000000000000001000000010001'
      */
     binaryZeroPad() {
-        return this.bigInteger().toString(2).padStart(constants6.BITS, '0');
+        return this.bigInt().toString(2).padStart(constants6.BITS, '0');
     }
     // TODO: Improve the semantics of this helper function
     parse4in6(address) {
@@ -551,11 +550,11 @@ class Address6 {
         address = this.parse4in6(address);
         const badCharacters = address.match(constants6.RE_BAD_CHARACTERS);
         if (badCharacters) {
-            throw new address_error_1.AddressError((0, sprintf_js_1.sprintf)('Bad character%s detected in address: %s', badCharacters.length > 1 ? 's' : '', badCharacters.join('')), address.replace(constants6.RE_BAD_CHARACTERS, '$1'));
+            throw new address_error_1.AddressError(`Bad character${badCharacters.length > 1 ? 's' : ''} detected in address: ${badCharacters.join('')}`, address.replace(constants6.RE_BAD_CHARACTERS, '$1'));
         }
         const badAddress = address.match(constants6.RE_BAD_ADDRESS);
         if (badAddress) {
-            throw new address_error_1.AddressError((0, sprintf_js_1.sprintf)('Address failed regex: %s', badAddress.join('')), address.replace(constants6.RE_BAD_ADDRESS, '$1'));
+            throw new address_error_1.AddressError(`Address failed regex: ${badAddress.join('')}`, address.replace(constants6.RE_BAD_ADDRESS, '$1'));
         }
         let groups = [];
         const halves = address.split('::');
@@ -588,7 +587,7 @@ class Address6 {
         else {
             throw new address_error_1.AddressError('Too many :: groups found');
         }
-        groups = groups.map((group) => (0, sprintf_js_1.sprintf)('%x', parseInt(group, 16)));
+        groups = groups.map((group) => parseInt(group, 16).toString(16));
         if (groups.length !== this.groups) {
             throw new address_error_1.AddressError('Incorrect number of groups found');
         }
@@ -610,16 +609,16 @@ class Address6 {
      * @returns {String}
      */
     decimal() {
-        return this.parsedAddress.map((n) => (0, sprintf_js_1.sprintf)('%05d', parseInt(n, 16))).join(':');
+        return this.parsedAddress.map((n) => parseInt(n, 16).toString(10).padStart(5, '0')).join(':');
     }
     /**
-     * Return the address as a BigInteger
+     * Return the address as a BigInt
      * @memberof Address6
      * @instance
-     * @returns {BigInteger}
+     * @returns {bigint}
      */
-    bigInteger() {
-        return new jsbn_1.BigInteger(this.parsedAddress.map(paddedHex).join(''), 16);
+    bigInt() {
+        return BigInt(`0x${this.parsedAddress.map(paddedHex).join('')}`);
     }
     /**
      * Return the last two groups of this address as an IPv4 address string
@@ -632,7 +631,7 @@ class Address6 {
      */
     to4() {
         const binary = this.binaryZeroPad().split('');
-        return ipv4_1.Address4.fromHex(new jsbn_1.BigInteger(binary.slice(96, 128).join(''), 2).toString(16));
+        return ipv4_1.Address4.fromHex(BigInt(`0b${binary.slice(96, 128).join('')}`).toString(16));
     }
     /**
      * Return the v4-in-v6 form of the address
@@ -679,18 +678,21 @@ class Address6 {
           public IPv4 address of the NAT with all bits inverted.
         */
         const prefix = this.getBitsBase16(0, 32);
-        const udpPort = this.getBits(80, 96).xor(new jsbn_1.BigInteger('ffff', 16)).toString();
+        const bitsForUdpPort = this.getBits(80, 96);
+        // eslint-disable-next-line no-bitwise
+        const udpPort = (bitsForUdpPort ^ BigInt('0xffff')).toString();
         const server4 = ipv4_1.Address4.fromHex(this.getBitsBase16(32, 64));
-        const client4 = ipv4_1.Address4.fromHex(this.getBits(96, 128).xor(new jsbn_1.BigInteger('ffffffff', 16)).toString(16));
-        const flags = this.getBits(64, 80);
+        const bitsForClient4 = this.getBits(96, 128);
+        // eslint-disable-next-line no-bitwise
+        const client4 = ipv4_1.Address4.fromHex((bitsForClient4 ^ BigInt('0xffffffff')).toString(16));
         const flagsBase2 = this.getBitsBase2(64, 80);
-        const coneNat = flags.testBit(15);
-        const reserved = flags.testBit(14);
-        const groupIndividual = flags.testBit(8);
-        const universalLocal = flags.testBit(9);
-        const nonce = new jsbn_1.BigInteger(flagsBase2.slice(2, 6) + flagsBase2.slice(8, 16), 2).toString(10);
+        const coneNat = (0, common_1.testBit)(flagsBase2, 15);
+        const reserved = (0, common_1.testBit)(flagsBase2, 14);
+        const groupIndividual = (0, common_1.testBit)(flagsBase2, 8);
+        const universalLocal = (0, common_1.testBit)(flagsBase2, 9);
+        const nonce = BigInt(`0b${flagsBase2.slice(2, 6) + flagsBase2.slice(8, 16)}`).toString(10);
         return {
-            prefix: (0, sprintf_js_1.sprintf)('%s:%s', prefix.slice(0, 4), prefix.slice(4, 8)),
+            prefix: `${prefix.slice(0, 4)}:${prefix.slice(4, 8)}`,
             server4: server4.address,
             client4: client4.address,
             flags: flagsBase2,
@@ -718,7 +720,7 @@ class Address6 {
         const prefix = this.getBitsBase16(0, 16);
         const gateway = ipv4_1.Address4.fromHex(this.getBitsBase16(16, 48));
         return {
-            prefix: (0, sprintf_js_1.sprintf)('%s', prefix.slice(0, 4)),
+            prefix: prefix.slice(0, 4),
             gateway: gateway.address,
         };
     }
@@ -748,12 +750,14 @@ class Address6 {
      * @returns {Array}
      */
     toByteArray() {
-        const byteArray = this.bigInteger().toByteArray();
-        // work around issue where `toByteArray` returns a leading 0 element
-        if (byteArray.length === 17 && byteArray[0] === 0) {
-            return byteArray.slice(1);
+        const valueWithoutPadding = this.bigInt().toString(16);
+        const leadingPad = '0'.repeat(valueWithoutPadding.length % 2);
+        const value = `${leadingPad}${valueWithoutPadding}`;
+        const bytes = [];
+        for (let i = 0, length = value.length; i < length; i += 2) {
+            bytes.push(parseInt(value.substring(i, i + 2), 16));
         }
-        return byteArray;
+        return bytes;
     }
     /**
      * Return an unsigned byte array
@@ -780,14 +784,14 @@ class Address6 {
      * @returns {Address6}
      */
     static fromUnsignedByteArray(bytes) {
-        const BYTE_MAX = new jsbn_1.BigInteger('256', 10);
-        let result = new jsbn_1.BigInteger('0', 10);
-        let multiplier = new jsbn_1.BigInteger('1', 10);
+        const BYTE_MAX = BigInt('256');
+        let result = BigInt('0');
+        let multiplier = BigInt('1');
         for (let i = bytes.length - 1; i >= 0; i--) {
-            result = result.add(multiplier.multiply(new jsbn_1.BigInteger(bytes[i].toString(10), 10)));
-            multiplier = multiplier.multiply(BYTE_MAX);
+            result += multiplier * BigInt(bytes[i].toString(10));
+            multiplier *= BYTE_MAX;
         }
-        return Address6.fromBigInteger(result);
+        return Address6.fromBigInt(result);
     }
     /**
      * Returns true if the address is in the canonical form, false otherwise
@@ -867,9 +871,9 @@ class Address6 {
             optionalPort = '';
         }
         else {
-            optionalPort = (0, sprintf_js_1.sprintf)(':%s', optionalPort);
+            optionalPort = `:${optionalPort}`;
         }
-        return (0, sprintf_js_1.sprintf)('http://[%s]%s/', this.correctForm(), optionalPort);
+        return `http://[${this.correctForm()}]${optionalPort}/`;
     }
     /**
      * @returns {String} a link suitable for conveying the address via a URL hash
@@ -891,10 +895,11 @@ class Address6 {
         if (options.v4) {
             formFunction = this.to4in6;
         }
+        const form = formFunction.call(this);
         if (options.className) {
-            return (0, sprintf_js_1.sprintf)('%2$s', options.prefix, formFunction.call(this), options.className);
+            return `${form}`;
         }
-        return (0, sprintf_js_1.sprintf)('%2$s', options.prefix, formFunction.call(this));
+        return `${form}`;
     }
     /**
      * Groups an address
@@ -918,9 +923,9 @@ class Address6 {
         }
         const classes = ['hover-group'];
         for (let i = this.elisionBegin; i < this.elisionBegin + this.elidedGroups; i++) {
-            classes.push((0, sprintf_js_1.sprintf)('group-%d', i));
+            classes.push(`group-${i}`);
         }
-        output.push((0, sprintf_js_1.sprintf)('', classes.join(' ')));
+        output.push(``);
         if (right.length) {
             output.push(...helpers.simpleGroup(right, this.elisionEnd));
         }
diff --git a/node_modules/ip-address/dist/v6/constants.js b/node_modules/ip-address/dist/v6/constants.js
index e316bb0d0c2cd..0abc423e0a91a 100644
--- a/node_modules/ip-address/dist/v6/constants.js
+++ b/node_modules/ip-address/dist/v6/constants.js
@@ -71,6 +71,6 @@ exports.RE_SUBNET_STRING = /\/\d{1,3}(?=%|$)/;
  * @static
  */
 exports.RE_ZONE_STRING = /%.*$/;
-exports.RE_URL = new RegExp(/^\[{0,1}([0-9a-f:]+)\]{0,1}/);
-exports.RE_URL_WITH_PORT = new RegExp(/\[([0-9a-f:]+)\]:([0-9]{1,5})/);
+exports.RE_URL = /^\[{0,1}([0-9a-f:]+)\]{0,1}/;
+exports.RE_URL_WITH_PORT = /\[([0-9a-f:]+)\]:([0-9]{1,5})/;
 //# sourceMappingURL=constants.js.map
\ No newline at end of file
diff --git a/node_modules/ip-address/dist/v6/helpers.js b/node_modules/ip-address/dist/v6/helpers.js
index 918aaa58c85d7..fafca0c2712dd 100644
--- a/node_modules/ip-address/dist/v6/helpers.js
+++ b/node_modules/ip-address/dist/v6/helpers.js
@@ -1,25 +1,24 @@
 "use strict";
 Object.defineProperty(exports, "__esModule", { value: true });
-exports.simpleGroup = exports.spanLeadingZeroes = exports.spanAll = exports.spanAllZeroes = void 0;
-const sprintf_js_1 = require("sprintf-js");
+exports.spanAllZeroes = spanAllZeroes;
+exports.spanAll = spanAll;
+exports.spanLeadingZeroes = spanLeadingZeroes;
+exports.simpleGroup = simpleGroup;
 /**
  * @returns {String} the string with all zeroes contained in a 
  */
 function spanAllZeroes(s) {
     return s.replace(/(0+)/g, '$1');
 }
-exports.spanAllZeroes = spanAllZeroes;
 /**
  * @returns {String} the string with each character contained in a 
  */
 function spanAll(s, offset = 0) {
     const letters = s.split('');
     return letters
-        .map((n, i) => (0, sprintf_js_1.sprintf)('%s', n, i + offset, spanAllZeroes(n)) // XXX Use #base-2 .value-0 instead?
-    )
+        .map((n, i) => `${spanAllZeroes(n)}`)
         .join('');
 }
-exports.spanAll = spanAll;
 function spanLeadingZeroesSimple(group) {
     return group.replace(/^(0+)/, '$1');
 }
@@ -30,7 +29,6 @@ function spanLeadingZeroes(address) {
     const groups = address.split(':');
     return groups.map((g) => spanLeadingZeroesSimple(g)).join(':');
 }
-exports.spanLeadingZeroes = spanLeadingZeroes;
 /**
  * Groups an address
  * @returns {String} a grouped address
@@ -41,8 +39,7 @@ function simpleGroup(addressString, offset = 0) {
         if (/group-v4/.test(g)) {
             return g;
         }
-        return (0, sprintf_js_1.sprintf)('%s', i + offset, spanLeadingZeroesSimple(g));
+        return `${spanLeadingZeroesSimple(g)}`;
     });
 }
-exports.simpleGroup = simpleGroup;
 //# sourceMappingURL=helpers.js.map
\ No newline at end of file
diff --git a/node_modules/ip-address/dist/v6/regular-expressions.js b/node_modules/ip-address/dist/v6/regular-expressions.js
index 616550a864509..a2c51459307fd 100644
--- a/node_modules/ip-address/dist/v6/regular-expressions.js
+++ b/node_modules/ip-address/dist/v6/regular-expressions.js
@@ -23,20 +23,21 @@ var __importStar = (this && this.__importStar) || function (mod) {
     return result;
 };
 Object.defineProperty(exports, "__esModule", { value: true });
-exports.possibleElisions = exports.simpleRegularExpression = exports.ADDRESS_BOUNDARY = exports.padGroup = exports.groupPossibilities = void 0;
+exports.ADDRESS_BOUNDARY = void 0;
+exports.groupPossibilities = groupPossibilities;
+exports.padGroup = padGroup;
+exports.simpleRegularExpression = simpleRegularExpression;
+exports.possibleElisions = possibleElisions;
 const v6 = __importStar(require("./constants"));
-const sprintf_js_1 = require("sprintf-js");
 function groupPossibilities(possibilities) {
-    return (0, sprintf_js_1.sprintf)('(%s)', possibilities.join('|'));
+    return `(${possibilities.join('|')})`;
 }
-exports.groupPossibilities = groupPossibilities;
 function padGroup(group) {
     if (group.length < 4) {
-        return (0, sprintf_js_1.sprintf)('0{0,%d}%s', 4 - group.length, group);
+        return `0{0,${4 - group.length}}${group}`;
     }
     return group;
 }
-exports.padGroup = padGroup;
 exports.ADDRESS_BOUNDARY = '[^A-Fa-f0-9:]';
 function simpleRegularExpression(groups) {
     const zeroIndexes = [];
@@ -61,7 +62,6 @@ function simpleRegularExpression(groups) {
     possibilities.push(groups.map(padGroup).join(':'));
     return groupPossibilities(possibilities);
 }
-exports.simpleRegularExpression = simpleRegularExpression;
 function possibleElisions(elidedGroups, moreLeft, moreRight) {
     const left = moreLeft ? '' : ':';
     const right = moreRight ? '' : ':';
@@ -79,18 +79,17 @@ function possibleElisions(elidedGroups, moreLeft, moreRight) {
         possibilities.push(':');
     }
     // 4. elision from the left side
-    possibilities.push((0, sprintf_js_1.sprintf)('%s(:0{1,4}){1,%d}', left, elidedGroups - 1));
+    possibilities.push(`${left}(:0{1,4}){1,${elidedGroups - 1}}`);
     // 5. elision from the right side
-    possibilities.push((0, sprintf_js_1.sprintf)('(0{1,4}:){1,%d}%s', elidedGroups - 1, right));
+    possibilities.push(`(0{1,4}:){1,${elidedGroups - 1}}${right}`);
     // 6. no elision
-    possibilities.push((0, sprintf_js_1.sprintf)('(0{1,4}:){%d}0{1,4}', elidedGroups - 1));
+    possibilities.push(`(0{1,4}:){${elidedGroups - 1}}0{1,4}`);
     // 7. elision (including sloppy elision) from the middle
     for (let groups = 1; groups < elidedGroups - 1; groups++) {
         for (let position = 1; position < elidedGroups - groups; position++) {
-            possibilities.push((0, sprintf_js_1.sprintf)('(0{1,4}:){%d}:(0{1,4}:){%d}0{1,4}', position, elidedGroups - position - groups - 1));
+            possibilities.push(`(0{1,4}:){${position}}:(0{1,4}:){${elidedGroups - position - groups - 1}}0{1,4}`);
         }
     }
     return groupPossibilities(possibilities);
 }
-exports.possibleElisions = possibleElisions;
 //# sourceMappingURL=regular-expressions.js.map
\ No newline at end of file
diff --git a/node_modules/ip-address/package.json b/node_modules/ip-address/package.json
index 0543fc41a1306..87795e06433cb 100644
--- a/node_modules/ip-address/package.json
+++ b/node_modules/ip-address/package.json
@@ -7,7 +7,7 @@
     "browser",
     "validation"
   ],
-  "version": "9.0.5",
+  "version": "10.0.1",
   "author": "Beau Gunderson  (https://beaugunderson.com/)",
   "license": "MIT",
   "main": "dist/ip-address.js",
@@ -51,37 +51,28 @@
     "type": "git",
     "url": "git://github.com/beaugunderson/ip-address.git"
   },
-  "dependencies": {
-    "jsbn": "1.1.0",
-    "sprintf-js": "^1.1.3"
-  },
   "devDependencies": {
-    "@types/chai": "^4.2.18",
-    "@types/jsbn": "^1.2.31",
-    "@types/mocha": "^10.0.1",
-    "@types/sprintf-js": "^1.1.2",
-    "@typescript-eslint/eslint-plugin": "^6.7.2",
-    "@typescript-eslint/parser": "^6.7.2",
-    "browserify": "^17.0.0",
-    "chai": "^4.3.4",
-    "codecov": "^3.8.2",
-    "documentation": "^14.0.2",
+    "@types/chai": "^5.0.0",
+    "@types/mocha": "^10.0.8",
+    "@typescript-eslint/eslint-plugin": "^8.8.0",
+    "@typescript-eslint/parser": "^8.8.0",
+    "chai": "^5.1.1",
+    "documentation": "^14.0.3",
     "eslint": "^8.50.0",
+    "eslint_d": "^14.0.4",
     "eslint-config-airbnb": "^19.0.4",
-    "eslint-config-prettier": "^9.0.0",
+    "eslint-config-prettier": "^9.1.0",
     "eslint-plugin-filenames": "^1.3.2",
-    "eslint-plugin-import": "^2.23.4",
-    "eslint-plugin-jsx-a11y": "^6.4.1",
-    "eslint-plugin-prettier": "^5.0.0",
-    "eslint-plugin-react": "^7.24.0",
-    "eslint-plugin-react-hooks": "^4.2.0",
+    "eslint-plugin-import": "^2.30.0",
+    "eslint-plugin-jsx-a11y": "^6.10.0",
+    "eslint-plugin-prettier": "^5.2.1",
     "eslint-plugin-sort-imports-es6-autofix": "^0.6.0",
-    "mocha": "^10.2.0",
-    "nyc": "^15.1.0",
-    "prettier": "^3.0.3",
-    "release-it": "^16.2.0",
-    "source-map-support": "^0.5.19",
-    "ts-node": "^10.0.0",
-    "typescript": "^5.2.2"
+    "mocha": "^10.7.3",
+    "nyc": "^17.1.0",
+    "prettier": "^3.3.3",
+    "release-it": "^17.6.0",
+    "source-map-support": "^0.5.21",
+    "tsx": "^4.19.1",
+    "typescript": "<5.6.0"
   }
 }
diff --git a/node_modules/jsbn/LICENSE b/node_modules/jsbn/LICENSE
deleted file mode 100644
index 24502a9cf7483..0000000000000
--- a/node_modules/jsbn/LICENSE
+++ /dev/null
@@ -1,40 +0,0 @@
-Licensing
----------
-
-This software is covered under the following copyright:
-
-/*
- * Copyright (c) 2003-2005  Tom Wu
- * All Rights Reserved.
- *
- * Permission is hereby granted, free of charge, to any person obtaining
- * a copy of this software and associated documentation files (the
- * "Software"), to deal in the Software without restriction, including
- * without limitation the rights to use, copy, modify, merge, publish,
- * distribute, sublicense, and/or sell copies of the Software, and to
- * permit persons to whom the Software is furnished to do so, subject to
- * the following conditions:
- *
- * The above copyright notice and this permission notice shall be
- * included in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED "AS-IS" AND WITHOUT WARRANTY OF ANY KIND, 
- * EXPRESS, IMPLIED OR OTHERWISE, INCLUDING WITHOUT LIMITATION, ANY 
- * WARRANTY OF MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE.  
- *
- * IN NO EVENT SHALL TOM WU BE LIABLE FOR ANY SPECIAL, INCIDENTAL,
- * INDIRECT OR CONSEQUENTIAL DAMAGES OF ANY KIND, OR ANY DAMAGES WHATSOEVER
- * RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER OR NOT ADVISED OF
- * THE POSSIBILITY OF DAMAGE, AND ON ANY THEORY OF LIABILITY, ARISING OUT
- * OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
- *
- * In addition, the following condition applies:
- *
- * All redistributions must retain an intact copy of this copyright notice
- * and disclaimer.
- */
-
-Address all questions regarding this license to:
-
-  Tom Wu
-  tjw@cs.Stanford.EDU
diff --git a/node_modules/jsbn/example.html b/node_modules/jsbn/example.html
deleted file mode 100644
index 1c0489b137635..0000000000000
--- a/node_modules/jsbn/example.html
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-    
-        
-        
-    
-    
-      
-      
-    
-
diff --git a/node_modules/jsbn/example.js b/node_modules/jsbn/example.js
deleted file mode 100644
index 85979909d7b1d..0000000000000
--- a/node_modules/jsbn/example.js
+++ /dev/null
@@ -1,5 +0,0 @@
-(function () {
-  var BigInteger = jsbn.BigInteger;
-  var a = new BigInteger('91823918239182398123');
-  console.log(a.bitLength());
-}());
diff --git a/node_modules/jsbn/index.js b/node_modules/jsbn/index.js
deleted file mode 100644
index e9eb697b07a89..0000000000000
--- a/node_modules/jsbn/index.js
+++ /dev/null
@@ -1,1361 +0,0 @@
-(function(){
-
-    // Copyright (c) 2005  Tom Wu
-    // All Rights Reserved.
-    // See "LICENSE" for details.
-
-    // Basic JavaScript BN library - subset useful for RSA encryption.
-
-    // Bits per digit
-    var dbits;
-
-    // JavaScript engine analysis
-    var canary = 0xdeadbeefcafe;
-    var j_lm = ((canary&0xffffff)==0xefcafe);
-
-    // (public) Constructor
-    function BigInteger(a,b,c) {
-      if(a != null)
-        if("number" == typeof a) this.fromNumber(a,b,c);
-        else if(b == null && "string" != typeof a) this.fromString(a,256);
-        else this.fromString(a,b);
-    }
-
-    // return new, unset BigInteger
-    function nbi() { return new BigInteger(null); }
-
-    // am: Compute w_j += (x*this_i), propagate carries,
-    // c is initial carry, returns final carry.
-    // c < 3*dvalue, x < 2*dvalue, this_i < dvalue
-    // We need to select the fastest one that works in this environment.
-
-    // am1: use a single mult and divide to get the high bits,
-    // max digit bits should be 26 because
-    // max internal value = 2*dvalue^2-2*dvalue (< 2^53)
-    function am1(i,x,w,j,c,n) {
-      while(--n >= 0) {
-        var v = x*this[i++]+w[j]+c;
-        c = Math.floor(v/0x4000000);
-        w[j++] = v&0x3ffffff;
-      }
-      return c;
-    }
-    // am2 avoids a big mult-and-extract completely.
-    // Max digit bits should be <= 30 because we do bitwise ops
-    // on values up to 2*hdvalue^2-hdvalue-1 (< 2^31)
-    function am2(i,x,w,j,c,n) {
-      var xl = x&0x7fff, xh = x>>15;
-      while(--n >= 0) {
-        var l = this[i]&0x7fff;
-        var h = this[i++]>>15;
-        var m = xh*l+h*xl;
-        l = xl*l+((m&0x7fff)<<15)+w[j]+(c&0x3fffffff);
-        c = (l>>>30)+(m>>>15)+xh*h+(c>>>30);
-        w[j++] = l&0x3fffffff;
-      }
-      return c;
-    }
-    // Alternately, set max digit bits to 28 since some
-    // browsers slow down when dealing with 32-bit numbers.
-    function am3(i,x,w,j,c,n) {
-      var xl = x&0x3fff, xh = x>>14;
-      while(--n >= 0) {
-        var l = this[i]&0x3fff;
-        var h = this[i++]>>14;
-        var m = xh*l+h*xl;
-        l = xl*l+((m&0x3fff)<<14)+w[j]+c;
-        c = (l>>28)+(m>>14)+xh*h;
-        w[j++] = l&0xfffffff;
-      }
-      return c;
-    }
-    var inBrowser = typeof navigator !== "undefined";
-    if(inBrowser && j_lm && (navigator.appName == "Microsoft Internet Explorer")) {
-      BigInteger.prototype.am = am2;
-      dbits = 30;
-    }
-    else if(inBrowser && j_lm && (navigator.appName != "Netscape")) {
-      BigInteger.prototype.am = am1;
-      dbits = 26;
-    }
-    else { // Mozilla/Netscape seems to prefer am3
-      BigInteger.prototype.am = am3;
-      dbits = 28;
-    }
-
-    BigInteger.prototype.DB = dbits;
-    BigInteger.prototype.DM = ((1<= 0; --i) r[i] = this[i];
-      r.t = this.t;
-      r.s = this.s;
-    }
-
-    // (protected) set from integer value x, -DV <= x < DV
-    function bnpFromInt(x) {
-      this.t = 1;
-      this.s = (x<0)?-1:0;
-      if(x > 0) this[0] = x;
-      else if(x < -1) this[0] = x+this.DV;
-      else this.t = 0;
-    }
-
-    // return bigint initialized to value
-    function nbv(i) { var r = nbi(); r.fromInt(i); return r; }
-
-    // (protected) set from string and radix
-    function bnpFromString(s,b) {
-      var k;
-      if(b == 16) k = 4;
-      else if(b == 8) k = 3;
-      else if(b == 256) k = 8; // byte array
-      else if(b == 2) k = 1;
-      else if(b == 32) k = 5;
-      else if(b == 4) k = 2;
-      else { this.fromRadix(s,b); return; }
-      this.t = 0;
-      this.s = 0;
-      var i = s.length, mi = false, sh = 0;
-      while(--i >= 0) {
-        var x = (k==8)?s[i]&0xff:intAt(s,i);
-        if(x < 0) {
-          if(s.charAt(i) == "-") mi = true;
-          continue;
-        }
-        mi = false;
-        if(sh == 0)
-          this[this.t++] = x;
-        else if(sh+k > this.DB) {
-          this[this.t-1] |= (x&((1<<(this.DB-sh))-1))<>(this.DB-sh));
-        }
-        else
-          this[this.t-1] |= x<= this.DB) sh -= this.DB;
-      }
-      if(k == 8 && (s[0]&0x80) != 0) {
-        this.s = -1;
-        if(sh > 0) this[this.t-1] |= ((1<<(this.DB-sh))-1)< 0 && this[this.t-1] == c) --this.t;
-    }
-
-    // (public) return string representation in given radix
-    function bnToString(b) {
-      if(this.s < 0) return "-"+this.negate().toString(b);
-      var k;
-      if(b == 16) k = 4;
-      else if(b == 8) k = 3;
-      else if(b == 2) k = 1;
-      else if(b == 32) k = 5;
-      else if(b == 4) k = 2;
-      else return this.toRadix(b);
-      var km = (1< 0) {
-        if(p < this.DB && (d = this[i]>>p) > 0) { m = true; r = int2char(d); }
-        while(i >= 0) {
-          if(p < k) {
-            d = (this[i]&((1<>(p+=this.DB-k);
-          }
-          else {
-            d = (this[i]>>(p-=k))&km;
-            if(p <= 0) { p += this.DB; --i; }
-          }
-          if(d > 0) m = true;
-          if(m) r += int2char(d);
-        }
-      }
-      return m?r:"0";
-    }
-
-    // (public) -this
-    function bnNegate() { var r = nbi(); BigInteger.ZERO.subTo(this,r); return r; }
-
-    // (public) |this|
-    function bnAbs() { return (this.s<0)?this.negate():this; }
-
-    // (public) return + if this > a, - if this < a, 0 if equal
-    function bnCompareTo(a) {
-      var r = this.s-a.s;
-      if(r != 0) return r;
-      var i = this.t;
-      r = i-a.t;
-      if(r != 0) return (this.s<0)?-r:r;
-      while(--i >= 0) if((r=this[i]-a[i]) != 0) return r;
-      return 0;
-    }
-
-    // returns bit length of the integer x
-    function nbits(x) {
-      var r = 1, t;
-      if((t=x>>>16) != 0) { x = t; r += 16; }
-      if((t=x>>8) != 0) { x = t; r += 8; }
-      if((t=x>>4) != 0) { x = t; r += 4; }
-      if((t=x>>2) != 0) { x = t; r += 2; }
-      if((t=x>>1) != 0) { x = t; r += 1; }
-      return r;
-    }
-
-    // (public) return the number of bits in "this"
-    function bnBitLength() {
-      if(this.t <= 0) return 0;
-      return this.DB*(this.t-1)+nbits(this[this.t-1]^(this.s&this.DM));
-    }
-
-    // (protected) r = this << n*DB
-    function bnpDLShiftTo(n,r) {
-      var i;
-      for(i = this.t-1; i >= 0; --i) r[i+n] = this[i];
-      for(i = n-1; i >= 0; --i) r[i] = 0;
-      r.t = this.t+n;
-      r.s = this.s;
-    }
-
-    // (protected) r = this >> n*DB
-    function bnpDRShiftTo(n,r) {
-      for(var i = n; i < this.t; ++i) r[i-n] = this[i];
-      r.t = Math.max(this.t-n,0);
-      r.s = this.s;
-    }
-
-    // (protected) r = this << n
-    function bnpLShiftTo(n,r) {
-      var bs = n%this.DB;
-      var cbs = this.DB-bs;
-      var bm = (1<= 0; --i) {
-        r[i+ds+1] = (this[i]>>cbs)|c;
-        c = (this[i]&bm)<= 0; --i) r[i] = 0;
-      r[ds] = c;
-      r.t = this.t+ds+1;
-      r.s = this.s;
-      r.clamp();
-    }
-
-    // (protected) r = this >> n
-    function bnpRShiftTo(n,r) {
-      r.s = this.s;
-      var ds = Math.floor(n/this.DB);
-      if(ds >= this.t) { r.t = 0; return; }
-      var bs = n%this.DB;
-      var cbs = this.DB-bs;
-      var bm = (1<>bs;
-      for(var i = ds+1; i < this.t; ++i) {
-        r[i-ds-1] |= (this[i]&bm)<>bs;
-      }
-      if(bs > 0) r[this.t-ds-1] |= (this.s&bm)<>= this.DB;
-      }
-      if(a.t < this.t) {
-        c -= a.s;
-        while(i < this.t) {
-          c += this[i];
-          r[i++] = c&this.DM;
-          c >>= this.DB;
-        }
-        c += this.s;
-      }
-      else {
-        c += this.s;
-        while(i < a.t) {
-          c -= a[i];
-          r[i++] = c&this.DM;
-          c >>= this.DB;
-        }
-        c -= a.s;
-      }
-      r.s = (c<0)?-1:0;
-      if(c < -1) r[i++] = this.DV+c;
-      else if(c > 0) r[i++] = c;
-      r.t = i;
-      r.clamp();
-    }
-
-    // (protected) r = this * a, r != this,a (HAC 14.12)
-    // "this" should be the larger one if appropriate.
-    function bnpMultiplyTo(a,r) {
-      var x = this.abs(), y = a.abs();
-      var i = x.t;
-      r.t = i+y.t;
-      while(--i >= 0) r[i] = 0;
-      for(i = 0; i < y.t; ++i) r[i+x.t] = x.am(0,y[i],r,i,0,x.t);
-      r.s = 0;
-      r.clamp();
-      if(this.s != a.s) BigInteger.ZERO.subTo(r,r);
-    }
-
-    // (protected) r = this^2, r != this (HAC 14.16)
-    function bnpSquareTo(r) {
-      var x = this.abs();
-      var i = r.t = 2*x.t;
-      while(--i >= 0) r[i] = 0;
-      for(i = 0; i < x.t-1; ++i) {
-        var c = x.am(i,x[i],r,2*i,0,1);
-        if((r[i+x.t]+=x.am(i+1,2*x[i],r,2*i+1,c,x.t-i-1)) >= x.DV) {
-          r[i+x.t] -= x.DV;
-          r[i+x.t+1] = 1;
-        }
-      }
-      if(r.t > 0) r[r.t-1] += x.am(i,x[i],r,2*i,0,1);
-      r.s = 0;
-      r.clamp();
-    }
-
-    // (protected) divide this by m, quotient and remainder to q, r (HAC 14.20)
-    // r != q, this != m.  q or r may be null.
-    function bnpDivRemTo(m,q,r) {
-      var pm = m.abs();
-      if(pm.t <= 0) return;
-      var pt = this.abs();
-      if(pt.t < pm.t) {
-        if(q != null) q.fromInt(0);
-        if(r != null) this.copyTo(r);
-        return;
-      }
-      if(r == null) r = nbi();
-      var y = nbi(), ts = this.s, ms = m.s;
-      var nsh = this.DB-nbits(pm[pm.t-1]);   // normalize modulus
-      if(nsh > 0) { pm.lShiftTo(nsh,y); pt.lShiftTo(nsh,r); }
-      else { pm.copyTo(y); pt.copyTo(r); }
-      var ys = y.t;
-      var y0 = y[ys-1];
-      if(y0 == 0) return;
-      var yt = y0*(1<1)?y[ys-2]>>this.F2:0);
-      var d1 = this.FV/yt, d2 = (1<= 0) {
-        r[r.t++] = 1;
-        r.subTo(t,r);
-      }
-      BigInteger.ONE.dlShiftTo(ys,t);
-      t.subTo(y,y);  // "negative" y so we can replace sub with am later
-      while(y.t < ys) y[y.t++] = 0;
-      while(--j >= 0) {
-        // Estimate quotient digit
-        var qd = (r[--i]==y0)?this.DM:Math.floor(r[i]*d1+(r[i-1]+e)*d2);
-        if((r[i]+=y.am(0,qd,r,j,0,ys)) < qd) {   // Try it out
-          y.dlShiftTo(j,t);
-          r.subTo(t,r);
-          while(r[i] < --qd) r.subTo(t,r);
-        }
-      }
-      if(q != null) {
-        r.drShiftTo(ys,q);
-        if(ts != ms) BigInteger.ZERO.subTo(q,q);
-      }
-      r.t = ys;
-      r.clamp();
-      if(nsh > 0) r.rShiftTo(nsh,r); // Denormalize remainder
-      if(ts < 0) BigInteger.ZERO.subTo(r,r);
-    }
-
-    // (public) this mod a
-    function bnMod(a) {
-      var r = nbi();
-      this.abs().divRemTo(a,null,r);
-      if(this.s < 0 && r.compareTo(BigInteger.ZERO) > 0) a.subTo(r,r);
-      return r;
-    }
-
-    // Modular reduction using "classic" algorithm
-    function Classic(m) { this.m = m; }
-    function cConvert(x) {
-      if(x.s < 0 || x.compareTo(this.m) >= 0) return x.mod(this.m);
-      else return x;
-    }
-    function cRevert(x) { return x; }
-    function cReduce(x) { x.divRemTo(this.m,null,x); }
-    function cMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); }
-    function cSqrTo(x,r) { x.squareTo(r); this.reduce(r); }
-
-    Classic.prototype.convert = cConvert;
-    Classic.prototype.revert = cRevert;
-    Classic.prototype.reduce = cReduce;
-    Classic.prototype.mulTo = cMulTo;
-    Classic.prototype.sqrTo = cSqrTo;
-
-    // (protected) return "-1/this % 2^DB"; useful for Mont. reduction
-    // justification:
-    //         xy == 1 (mod m)
-    //         xy =  1+km
-    //   xy(2-xy) = (1+km)(1-km)
-    // x[y(2-xy)] = 1-k^2m^2
-    // x[y(2-xy)] == 1 (mod m^2)
-    // if y is 1/x mod m, then y(2-xy) is 1/x mod m^2
-    // should reduce x and y(2-xy) by m^2 at each step to keep size bounded.
-    // JS multiply "overflows" differently from C/C++, so care is needed here.
-    function bnpInvDigit() {
-      if(this.t < 1) return 0;
-      var x = this[0];
-      if((x&1) == 0) return 0;
-      var y = x&3;       // y == 1/x mod 2^2
-      y = (y*(2-(x&0xf)*y))&0xf; // y == 1/x mod 2^4
-      y = (y*(2-(x&0xff)*y))&0xff;   // y == 1/x mod 2^8
-      y = (y*(2-(((x&0xffff)*y)&0xffff)))&0xffff;    // y == 1/x mod 2^16
-      // last step - calculate inverse mod DV directly;
-      // assumes 16 < DB <= 32 and assumes ability to handle 48-bit ints
-      y = (y*(2-x*y%this.DV))%this.DV;       // y == 1/x mod 2^dbits
-      // we really want the negative inverse, and -DV < y < DV
-      return (y>0)?this.DV-y:-y;
-    }
-
-    // Montgomery reduction
-    function Montgomery(m) {
-      this.m = m;
-      this.mp = m.invDigit();
-      this.mpl = this.mp&0x7fff;
-      this.mph = this.mp>>15;
-      this.um = (1<<(m.DB-15))-1;
-      this.mt2 = 2*m.t;
-    }
-
-    // xR mod m
-    function montConvert(x) {
-      var r = nbi();
-      x.abs().dlShiftTo(this.m.t,r);
-      r.divRemTo(this.m,null,r);
-      if(x.s < 0 && r.compareTo(BigInteger.ZERO) > 0) this.m.subTo(r,r);
-      return r;
-    }
-
-    // x/R mod m
-    function montRevert(x) {
-      var r = nbi();
-      x.copyTo(r);
-      this.reduce(r);
-      return r;
-    }
-
-    // x = x/R mod m (HAC 14.32)
-    function montReduce(x) {
-      while(x.t <= this.mt2) // pad x so am has enough room later
-        x[x.t++] = 0;
-      for(var i = 0; i < this.m.t; ++i) {
-        // faster way of calculating u0 = x[i]*mp mod DV
-        var j = x[i]&0x7fff;
-        var u0 = (j*this.mpl+(((j*this.mph+(x[i]>>15)*this.mpl)&this.um)<<15))&x.DM;
-        // use am to combine the multiply-shift-add into one call
-        j = i+this.m.t;
-        x[j] += this.m.am(0,u0,x,i,0,this.m.t);
-        // propagate carry
-        while(x[j] >= x.DV) { x[j] -= x.DV; x[++j]++; }
-      }
-      x.clamp();
-      x.drShiftTo(this.m.t,x);
-      if(x.compareTo(this.m) >= 0) x.subTo(this.m,x);
-    }
-
-    // r = "x^2/R mod m"; x != r
-    function montSqrTo(x,r) { x.squareTo(r); this.reduce(r); }
-
-    // r = "xy/R mod m"; x,y != r
-    function montMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); }
-
-    Montgomery.prototype.convert = montConvert;
-    Montgomery.prototype.revert = montRevert;
-    Montgomery.prototype.reduce = montReduce;
-    Montgomery.prototype.mulTo = montMulTo;
-    Montgomery.prototype.sqrTo = montSqrTo;
-
-    // (protected) true iff this is even
-    function bnpIsEven() { return ((this.t>0)?(this[0]&1):this.s) == 0; }
-
-    // (protected) this^e, e < 2^32, doing sqr and mul with "r" (HAC 14.79)
-    function bnpExp(e,z) {
-      if(e > 0xffffffff || e < 1) return BigInteger.ONE;
-      var r = nbi(), r2 = nbi(), g = z.convert(this), i = nbits(e)-1;
-      g.copyTo(r);
-      while(--i >= 0) {
-        z.sqrTo(r,r2);
-        if((e&(1< 0) z.mulTo(r2,g,r);
-        else { var t = r; r = r2; r2 = t; }
-      }
-      return z.revert(r);
-    }
-
-    // (public) this^e % m, 0 <= e < 2^32
-    function bnModPowInt(e,m) {
-      var z;
-      if(e < 256 || m.isEven()) z = new Classic(m); else z = new Montgomery(m);
-      return this.exp(e,z);
-    }
-
-    // protected
-    BigInteger.prototype.copyTo = bnpCopyTo;
-    BigInteger.prototype.fromInt = bnpFromInt;
-    BigInteger.prototype.fromString = bnpFromString;
-    BigInteger.prototype.clamp = bnpClamp;
-    BigInteger.prototype.dlShiftTo = bnpDLShiftTo;
-    BigInteger.prototype.drShiftTo = bnpDRShiftTo;
-    BigInteger.prototype.lShiftTo = bnpLShiftTo;
-    BigInteger.prototype.rShiftTo = bnpRShiftTo;
-    BigInteger.prototype.subTo = bnpSubTo;
-    BigInteger.prototype.multiplyTo = bnpMultiplyTo;
-    BigInteger.prototype.squareTo = bnpSquareTo;
-    BigInteger.prototype.divRemTo = bnpDivRemTo;
-    BigInteger.prototype.invDigit = bnpInvDigit;
-    BigInteger.prototype.isEven = bnpIsEven;
-    BigInteger.prototype.exp = bnpExp;
-
-    // public
-    BigInteger.prototype.toString = bnToString;
-    BigInteger.prototype.negate = bnNegate;
-    BigInteger.prototype.abs = bnAbs;
-    BigInteger.prototype.compareTo = bnCompareTo;
-    BigInteger.prototype.bitLength = bnBitLength;
-    BigInteger.prototype.mod = bnMod;
-    BigInteger.prototype.modPowInt = bnModPowInt;
-
-    // "constants"
-    BigInteger.ZERO = nbv(0);
-    BigInteger.ONE = nbv(1);
-
-    // Copyright (c) 2005-2009  Tom Wu
-    // All Rights Reserved.
-    // See "LICENSE" for details.
-
-    // Extended JavaScript BN functions, required for RSA private ops.
-
-    // Version 1.1: new BigInteger("0", 10) returns "proper" zero
-    // Version 1.2: square() API, isProbablePrime fix
-
-    // (public)
-    function bnClone() { var r = nbi(); this.copyTo(r); return r; }
-
-    // (public) return value as integer
-    function bnIntValue() {
-      if(this.s < 0) {
-        if(this.t == 1) return this[0]-this.DV;
-        else if(this.t == 0) return -1;
-      }
-      else if(this.t == 1) return this[0];
-      else if(this.t == 0) return 0;
-      // assumes 16 < DB < 32
-      return ((this[1]&((1<<(32-this.DB))-1))<>24; }
-
-    // (public) return value as short (assumes DB>=16)
-    function bnShortValue() { return (this.t==0)?this.s:(this[0]<<16)>>16; }
-
-    // (protected) return x s.t. r^x < DV
-    function bnpChunkSize(r) { return Math.floor(Math.LN2*this.DB/Math.log(r)); }
-
-    // (public) 0 if this == 0, 1 if this > 0
-    function bnSigNum() {
-      if(this.s < 0) return -1;
-      else if(this.t <= 0 || (this.t == 1 && this[0] <= 0)) return 0;
-      else return 1;
-    }
-
-    // (protected) convert to radix string
-    function bnpToRadix(b) {
-      if(b == null) b = 10;
-      if(this.signum() == 0 || b < 2 || b > 36) return "0";
-      var cs = this.chunkSize(b);
-      var a = Math.pow(b,cs);
-      var d = nbv(a), y = nbi(), z = nbi(), r = "";
-      this.divRemTo(d,y,z);
-      while(y.signum() > 0) {
-        r = (a+z.intValue()).toString(b).substr(1) + r;
-        y.divRemTo(d,y,z);
-      }
-      return z.intValue().toString(b) + r;
-    }
-
-    // (protected) convert from radix string
-    function bnpFromRadix(s,b) {
-      this.fromInt(0);
-      if(b == null) b = 10;
-      var cs = this.chunkSize(b);
-      var d = Math.pow(b,cs), mi = false, j = 0, w = 0;
-      for(var i = 0; i < s.length; ++i) {
-        var x = intAt(s,i);
-        if(x < 0) {
-          if(s.charAt(i) == "-" && this.signum() == 0) mi = true;
-          continue;
-        }
-        w = b*w+x;
-        if(++j >= cs) {
-          this.dMultiply(d);
-          this.dAddOffset(w,0);
-          j = 0;
-          w = 0;
-        }
-      }
-      if(j > 0) {
-        this.dMultiply(Math.pow(b,j));
-        this.dAddOffset(w,0);
-      }
-      if(mi) BigInteger.ZERO.subTo(this,this);
-    }
-
-    // (protected) alternate constructor
-    function bnpFromNumber(a,b,c) {
-      if("number" == typeof b) {
-        // new BigInteger(int,int,RNG)
-        if(a < 2) this.fromInt(1);
-        else {
-          this.fromNumber(a,c);
-          if(!this.testBit(a-1))    // force MSB set
-            this.bitwiseTo(BigInteger.ONE.shiftLeft(a-1),op_or,this);
-          if(this.isEven()) this.dAddOffset(1,0); // force odd
-          while(!this.isProbablePrime(b)) {
-            this.dAddOffset(2,0);
-            if(this.bitLength() > a) this.subTo(BigInteger.ONE.shiftLeft(a-1),this);
-          }
-        }
-      }
-      else {
-        // new BigInteger(int,RNG)
-        var x = new Array(), t = a&7;
-        x.length = (a>>3)+1;
-        b.nextBytes(x);
-        if(t > 0) x[0] &= ((1< 0) {
-        if(p < this.DB && (d = this[i]>>p) != (this.s&this.DM)>>p)
-          r[k++] = d|(this.s<<(this.DB-p));
-        while(i >= 0) {
-          if(p < 8) {
-            d = (this[i]&((1<>(p+=this.DB-8);
-          }
-          else {
-            d = (this[i]>>(p-=8))&0xff;
-            if(p <= 0) { p += this.DB; --i; }
-          }
-          if((d&0x80) != 0) d |= -256;
-          if(k == 0 && (this.s&0x80) != (d&0x80)) ++k;
-          if(k > 0 || d != this.s) r[k++] = d;
-        }
-      }
-      return r;
-    }
-
-    function bnEquals(a) { return(this.compareTo(a)==0); }
-    function bnMin(a) { return(this.compareTo(a)<0)?this:a; }
-    function bnMax(a) { return(this.compareTo(a)>0)?this:a; }
-
-    // (protected) r = this op a (bitwise)
-    function bnpBitwiseTo(a,op,r) {
-      var i, f, m = Math.min(a.t,this.t);
-      for(i = 0; i < m; ++i) r[i] = op(this[i],a[i]);
-      if(a.t < this.t) {
-        f = a.s&this.DM;
-        for(i = m; i < this.t; ++i) r[i] = op(this[i],f);
-        r.t = this.t;
-      }
-      else {
-        f = this.s&this.DM;
-        for(i = m; i < a.t; ++i) r[i] = op(f,a[i]);
-        r.t = a.t;
-      }
-      r.s = op(this.s,a.s);
-      r.clamp();
-    }
-
-    // (public) this & a
-    function op_and(x,y) { return x&y; }
-    function bnAnd(a) { var r = nbi(); this.bitwiseTo(a,op_and,r); return r; }
-
-    // (public) this | a
-    function op_or(x,y) { return x|y; }
-    function bnOr(a) { var r = nbi(); this.bitwiseTo(a,op_or,r); return r; }
-
-    // (public) this ^ a
-    function op_xor(x,y) { return x^y; }
-    function bnXor(a) { var r = nbi(); this.bitwiseTo(a,op_xor,r); return r; }
-
-    // (public) this & ~a
-    function op_andnot(x,y) { return x&~y; }
-    function bnAndNot(a) { var r = nbi(); this.bitwiseTo(a,op_andnot,r); return r; }
-
-    // (public) ~this
-    function bnNot() {
-      var r = nbi();
-      for(var i = 0; i < this.t; ++i) r[i] = this.DM&~this[i];
-      r.t = this.t;
-      r.s = ~this.s;
-      return r;
-    }
-
-    // (public) this << n
-    function bnShiftLeft(n) {
-      var r = nbi();
-      if(n < 0) this.rShiftTo(-n,r); else this.lShiftTo(n,r);
-      return r;
-    }
-
-    // (public) this >> n
-    function bnShiftRight(n) {
-      var r = nbi();
-      if(n < 0) this.lShiftTo(-n,r); else this.rShiftTo(n,r);
-      return r;
-    }
-
-    // return index of lowest 1-bit in x, x < 2^31
-    function lbit(x) {
-      if(x == 0) return -1;
-      var r = 0;
-      if((x&0xffff) == 0) { x >>= 16; r += 16; }
-      if((x&0xff) == 0) { x >>= 8; r += 8; }
-      if((x&0xf) == 0) { x >>= 4; r += 4; }
-      if((x&3) == 0) { x >>= 2; r += 2; }
-      if((x&1) == 0) ++r;
-      return r;
-    }
-
-    // (public) returns index of lowest 1-bit (or -1 if none)
-    function bnGetLowestSetBit() {
-      for(var i = 0; i < this.t; ++i)
-        if(this[i] != 0) return i*this.DB+lbit(this[i]);
-      if(this.s < 0) return this.t*this.DB;
-      return -1;
-    }
-
-    // return number of 1 bits in x
-    function cbit(x) {
-      var r = 0;
-      while(x != 0) { x &= x-1; ++r; }
-      return r;
-    }
-
-    // (public) return number of set bits
-    function bnBitCount() {
-      var r = 0, x = this.s&this.DM;
-      for(var i = 0; i < this.t; ++i) r += cbit(this[i]^x);
-      return r;
-    }
-
-    // (public) true iff nth bit is set
-    function bnTestBit(n) {
-      var j = Math.floor(n/this.DB);
-      if(j >= this.t) return(this.s!=0);
-      return((this[j]&(1<<(n%this.DB)))!=0);
-    }
-
-    // (protected) this op (1<>= this.DB;
-      }
-      if(a.t < this.t) {
-        c += a.s;
-        while(i < this.t) {
-          c += this[i];
-          r[i++] = c&this.DM;
-          c >>= this.DB;
-        }
-        c += this.s;
-      }
-      else {
-        c += this.s;
-        while(i < a.t) {
-          c += a[i];
-          r[i++] = c&this.DM;
-          c >>= this.DB;
-        }
-        c += a.s;
-      }
-      r.s = (c<0)?-1:0;
-      if(c > 0) r[i++] = c;
-      else if(c < -1) r[i++] = this.DV+c;
-      r.t = i;
-      r.clamp();
-    }
-
-    // (public) this + a
-    function bnAdd(a) { var r = nbi(); this.addTo(a,r); return r; }
-
-    // (public) this - a
-    function bnSubtract(a) { var r = nbi(); this.subTo(a,r); return r; }
-
-    // (public) this * a
-    function bnMultiply(a) { var r = nbi(); this.multiplyTo(a,r); return r; }
-
-    // (public) this^2
-    function bnSquare() { var r = nbi(); this.squareTo(r); return r; }
-
-    // (public) this / a
-    function bnDivide(a) { var r = nbi(); this.divRemTo(a,r,null); return r; }
-
-    // (public) this % a
-    function bnRemainder(a) { var r = nbi(); this.divRemTo(a,null,r); return r; }
-
-    // (public) [this/a,this%a]
-    function bnDivideAndRemainder(a) {
-      var q = nbi(), r = nbi();
-      this.divRemTo(a,q,r);
-      return new Array(q,r);
-    }
-
-    // (protected) this *= n, this >= 0, 1 < n < DV
-    function bnpDMultiply(n) {
-      this[this.t] = this.am(0,n-1,this,0,0,this.t);
-      ++this.t;
-      this.clamp();
-    }
-
-    // (protected) this += n << w words, this >= 0
-    function bnpDAddOffset(n,w) {
-      if(n == 0) return;
-      while(this.t <= w) this[this.t++] = 0;
-      this[w] += n;
-      while(this[w] >= this.DV) {
-        this[w] -= this.DV;
-        if(++w >= this.t) this[this.t++] = 0;
-        ++this[w];
-      }
-    }
-
-    // A "null" reducer
-    function NullExp() {}
-    function nNop(x) { return x; }
-    function nMulTo(x,y,r) { x.multiplyTo(y,r); }
-    function nSqrTo(x,r) { x.squareTo(r); }
-
-    NullExp.prototype.convert = nNop;
-    NullExp.prototype.revert = nNop;
-    NullExp.prototype.mulTo = nMulTo;
-    NullExp.prototype.sqrTo = nSqrTo;
-
-    // (public) this^e
-    function bnPow(e) { return this.exp(e,new NullExp()); }
-
-    // (protected) r = lower n words of "this * a", a.t <= n
-    // "this" should be the larger one if appropriate.
-    function bnpMultiplyLowerTo(a,n,r) {
-      var i = Math.min(this.t+a.t,n);
-      r.s = 0; // assumes a,this >= 0
-      r.t = i;
-      while(i > 0) r[--i] = 0;
-      var j;
-      for(j = r.t-this.t; i < j; ++i) r[i+this.t] = this.am(0,a[i],r,i,0,this.t);
-      for(j = Math.min(a.t,n); i < j; ++i) this.am(0,a[i],r,i,0,n-i);
-      r.clamp();
-    }
-
-    // (protected) r = "this * a" without lower n words, n > 0
-    // "this" should be the larger one if appropriate.
-    function bnpMultiplyUpperTo(a,n,r) {
-      --n;
-      var i = r.t = this.t+a.t-n;
-      r.s = 0; // assumes a,this >= 0
-      while(--i >= 0) r[i] = 0;
-      for(i = Math.max(n-this.t,0); i < a.t; ++i)
-        r[this.t+i-n] = this.am(n-i,a[i],r,0,0,this.t+i-n);
-      r.clamp();
-      r.drShiftTo(1,r);
-    }
-
-    // Barrett modular reduction
-    function Barrett(m) {
-      // setup Barrett
-      this.r2 = nbi();
-      this.q3 = nbi();
-      BigInteger.ONE.dlShiftTo(2*m.t,this.r2);
-      this.mu = this.r2.divide(m);
-      this.m = m;
-    }
-
-    function barrettConvert(x) {
-      if(x.s < 0 || x.t > 2*this.m.t) return x.mod(this.m);
-      else if(x.compareTo(this.m) < 0) return x;
-      else { var r = nbi(); x.copyTo(r); this.reduce(r); return r; }
-    }
-
-    function barrettRevert(x) { return x; }
-
-    // x = x mod m (HAC 14.42)
-    function barrettReduce(x) {
-      x.drShiftTo(this.m.t-1,this.r2);
-      if(x.t > this.m.t+1) { x.t = this.m.t+1; x.clamp(); }
-      this.mu.multiplyUpperTo(this.r2,this.m.t+1,this.q3);
-      this.m.multiplyLowerTo(this.q3,this.m.t+1,this.r2);
-      while(x.compareTo(this.r2) < 0) x.dAddOffset(1,this.m.t+1);
-      x.subTo(this.r2,x);
-      while(x.compareTo(this.m) >= 0) x.subTo(this.m,x);
-    }
-
-    // r = x^2 mod m; x != r
-    function barrettSqrTo(x,r) { x.squareTo(r); this.reduce(r); }
-
-    // r = x*y mod m; x,y != r
-    function barrettMulTo(x,y,r) { x.multiplyTo(y,r); this.reduce(r); }
-
-    Barrett.prototype.convert = barrettConvert;
-    Barrett.prototype.revert = barrettRevert;
-    Barrett.prototype.reduce = barrettReduce;
-    Barrett.prototype.mulTo = barrettMulTo;
-    Barrett.prototype.sqrTo = barrettSqrTo;
-
-    // (public) this^e % m (HAC 14.85)
-    function bnModPow(e,m) {
-      var i = e.bitLength(), k, r = nbv(1), z;
-      if(i <= 0) return r;
-      else if(i < 18) k = 1;
-      else if(i < 48) k = 3;
-      else if(i < 144) k = 4;
-      else if(i < 768) k = 5;
-      else k = 6;
-      if(i < 8)
-        z = new Classic(m);
-      else if(m.isEven())
-        z = new Barrett(m);
-      else
-        z = new Montgomery(m);
-
-      // precomputation
-      var g = new Array(), n = 3, k1 = k-1, km = (1< 1) {
-        var g2 = nbi();
-        z.sqrTo(g[1],g2);
-        while(n <= km) {
-          g[n] = nbi();
-          z.mulTo(g2,g[n-2],g[n]);
-          n += 2;
-        }
-      }
-
-      var j = e.t-1, w, is1 = true, r2 = nbi(), t;
-      i = nbits(e[j])-1;
-      while(j >= 0) {
-        if(i >= k1) w = (e[j]>>(i-k1))&km;
-        else {
-          w = (e[j]&((1<<(i+1))-1))<<(k1-i);
-          if(j > 0) w |= e[j-1]>>(this.DB+i-k1);
-        }
-
-        n = k;
-        while((w&1) == 0) { w >>= 1; --n; }
-        if((i -= n) < 0) { i += this.DB; --j; }
-        if(is1) {    // ret == 1, don't bother squaring or multiplying it
-          g[w].copyTo(r);
-          is1 = false;
-        }
-        else {
-          while(n > 1) { z.sqrTo(r,r2); z.sqrTo(r2,r); n -= 2; }
-          if(n > 0) z.sqrTo(r,r2); else { t = r; r = r2; r2 = t; }
-          z.mulTo(r2,g[w],r);
-        }
-
-        while(j >= 0 && (e[j]&(1< 0) {
-        x.rShiftTo(g,x);
-        y.rShiftTo(g,y);
-      }
-      while(x.signum() > 0) {
-        if((i = x.getLowestSetBit()) > 0) x.rShiftTo(i,x);
-        if((i = y.getLowestSetBit()) > 0) y.rShiftTo(i,y);
-        if(x.compareTo(y) >= 0) {
-          x.subTo(y,x);
-          x.rShiftTo(1,x);
-        }
-        else {
-          y.subTo(x,y);
-          y.rShiftTo(1,y);
-        }
-      }
-      if(g > 0) y.lShiftTo(g,y);
-      return y;
-    }
-
-    // (protected) this % n, n < 2^26
-    function bnpModInt(n) {
-      if(n <= 0) return 0;
-      var d = this.DV%n, r = (this.s<0)?n-1:0;
-      if(this.t > 0)
-        if(d == 0) r = this[0]%n;
-        else for(var i = this.t-1; i >= 0; --i) r = (d*r+this[i])%n;
-      return r;
-    }
-
-    // (public) 1/this % m (HAC 14.61)
-    function bnModInverse(m) {
-      var ac = m.isEven();
-      if((this.isEven() && ac) || m.signum() == 0) return BigInteger.ZERO;
-      var u = m.clone(), v = this.clone();
-      var a = nbv(1), b = nbv(0), c = nbv(0), d = nbv(1);
-      while(u.signum() != 0) {
-        while(u.isEven()) {
-          u.rShiftTo(1,u);
-          if(ac) {
-            if(!a.isEven() || !b.isEven()) { a.addTo(this,a); b.subTo(m,b); }
-            a.rShiftTo(1,a);
-          }
-          else if(!b.isEven()) b.subTo(m,b);
-          b.rShiftTo(1,b);
-        }
-        while(v.isEven()) {
-          v.rShiftTo(1,v);
-          if(ac) {
-            if(!c.isEven() || !d.isEven()) { c.addTo(this,c); d.subTo(m,d); }
-            c.rShiftTo(1,c);
-          }
-          else if(!d.isEven()) d.subTo(m,d);
-          d.rShiftTo(1,d);
-        }
-        if(u.compareTo(v) >= 0) {
-          u.subTo(v,u);
-          if(ac) a.subTo(c,a);
-          b.subTo(d,b);
-        }
-        else {
-          v.subTo(u,v);
-          if(ac) c.subTo(a,c);
-          d.subTo(b,d);
-        }
-      }
-      if(v.compareTo(BigInteger.ONE) != 0) return BigInteger.ZERO;
-      if(d.compareTo(m) >= 0) return d.subtract(m);
-      if(d.signum() < 0) d.addTo(m,d); else return d;
-      if(d.signum() < 0) return d.add(m); else return d;
-    }
-
-    var lowprimes = [2,3,5,7,11,13,17,19,23,29,31,37,41,43,47,53,59,61,67,71,73,79,83,89,97,101,103,107,109,113,127,131,137,139,149,151,157,163,167,173,179,181,191,193,197,199,211,223,227,229,233,239,241,251,257,263,269,271,277,281,283,293,307,311,313,317,331,337,347,349,353,359,367,373,379,383,389,397,401,409,419,421,431,433,439,443,449,457,461,463,467,479,487,491,499,503,509,521,523,541,547,557,563,569,571,577,587,593,599,601,607,613,617,619,631,641,643,647,653,659,661,673,677,683,691,701,709,719,727,733,739,743,751,757,761,769,773,787,797,809,811,821,823,827,829,839,853,857,859,863,877,881,883,887,907,911,919,929,937,941,947,953,967,971,977,983,991,997];
-    var lplim = (1<<26)/lowprimes[lowprimes.length-1];
-
-    // (public) test primality with certainty >= 1-.5^t
-    function bnIsProbablePrime(t) {
-      var i, x = this.abs();
-      if(x.t == 1 && x[0] <= lowprimes[lowprimes.length-1]) {
-        for(i = 0; i < lowprimes.length; ++i)
-          if(x[0] == lowprimes[i]) return true;
-        return false;
-      }
-      if(x.isEven()) return false;
-      i = 1;
-      while(i < lowprimes.length) {
-        var m = lowprimes[i], j = i+1;
-        while(j < lowprimes.length && m < lplim) m *= lowprimes[j++];
-        m = x.modInt(m);
-        while(i < j) if(m%lowprimes[i++] == 0) return false;
-      }
-      return x.millerRabin(t);
-    }
-
-    // (protected) true if probably prime (HAC 4.24, Miller-Rabin)
-    function bnpMillerRabin(t) {
-      var n1 = this.subtract(BigInteger.ONE);
-      var k = n1.getLowestSetBit();
-      if(k <= 0) return false;
-      var r = n1.shiftRight(k);
-      t = (t+1)>>1;
-      if(t > lowprimes.length) t = lowprimes.length;
-      var a = nbi();
-      for(var i = 0; i < t; ++i) {
-        //Pick bases at random, instead of starting at 2
-        a.fromInt(lowprimes[Math.floor(Math.random()*lowprimes.length)]);
-        var y = a.modPow(r,this);
-        if(y.compareTo(BigInteger.ONE) != 0 && y.compareTo(n1) != 0) {
-          var j = 1;
-          while(j++ < k && y.compareTo(n1) != 0) {
-            y = y.modPowInt(2,this);
-            if(y.compareTo(BigInteger.ONE) == 0) return false;
-          }
-          if(y.compareTo(n1) != 0) return false;
-        }
-      }
-      return true;
-    }
-
-    // protected
-    BigInteger.prototype.chunkSize = bnpChunkSize;
-    BigInteger.prototype.toRadix = bnpToRadix;
-    BigInteger.prototype.fromRadix = bnpFromRadix;
-    BigInteger.prototype.fromNumber = bnpFromNumber;
-    BigInteger.prototype.bitwiseTo = bnpBitwiseTo;
-    BigInteger.prototype.changeBit = bnpChangeBit;
-    BigInteger.prototype.addTo = bnpAddTo;
-    BigInteger.prototype.dMultiply = bnpDMultiply;
-    BigInteger.prototype.dAddOffset = bnpDAddOffset;
-    BigInteger.prototype.multiplyLowerTo = bnpMultiplyLowerTo;
-    BigInteger.prototype.multiplyUpperTo = bnpMultiplyUpperTo;
-    BigInteger.prototype.modInt = bnpModInt;
-    BigInteger.prototype.millerRabin = bnpMillerRabin;
-
-    // public
-    BigInteger.prototype.clone = bnClone;
-    BigInteger.prototype.intValue = bnIntValue;
-    BigInteger.prototype.byteValue = bnByteValue;
-    BigInteger.prototype.shortValue = bnShortValue;
-    BigInteger.prototype.signum = bnSigNum;
-    BigInteger.prototype.toByteArray = bnToByteArray;
-    BigInteger.prototype.equals = bnEquals;
-    BigInteger.prototype.min = bnMin;
-    BigInteger.prototype.max = bnMax;
-    BigInteger.prototype.and = bnAnd;
-    BigInteger.prototype.or = bnOr;
-    BigInteger.prototype.xor = bnXor;
-    BigInteger.prototype.andNot = bnAndNot;
-    BigInteger.prototype.not = bnNot;
-    BigInteger.prototype.shiftLeft = bnShiftLeft;
-    BigInteger.prototype.shiftRight = bnShiftRight;
-    BigInteger.prototype.getLowestSetBit = bnGetLowestSetBit;
-    BigInteger.prototype.bitCount = bnBitCount;
-    BigInteger.prototype.testBit = bnTestBit;
-    BigInteger.prototype.setBit = bnSetBit;
-    BigInteger.prototype.clearBit = bnClearBit;
-    BigInteger.prototype.flipBit = bnFlipBit;
-    BigInteger.prototype.add = bnAdd;
-    BigInteger.prototype.subtract = bnSubtract;
-    BigInteger.prototype.multiply = bnMultiply;
-    BigInteger.prototype.divide = bnDivide;
-    BigInteger.prototype.remainder = bnRemainder;
-    BigInteger.prototype.divideAndRemainder = bnDivideAndRemainder;
-    BigInteger.prototype.modPow = bnModPow;
-    BigInteger.prototype.modInverse = bnModInverse;
-    BigInteger.prototype.pow = bnPow;
-    BigInteger.prototype.gcd = bnGCD;
-    BigInteger.prototype.isProbablePrime = bnIsProbablePrime;
-
-    // JSBN-specific extension
-    BigInteger.prototype.square = bnSquare;
-
-    // Expose the Barrett function
-    BigInteger.prototype.Barrett = Barrett
-
-    // BigInteger interfaces not implemented in jsbn:
-
-    // BigInteger(int signum, byte[] magnitude)
-    // double doubleValue()
-    // float floatValue()
-    // int hashCode()
-    // long longValue()
-    // static BigInteger valueOf(long val)
-
-    // Random number generator - requires a PRNG backend, e.g. prng4.js
-
-    // For best results, put code like
-    // 
-    // in your main HTML document.
-
-    var rng_state;
-    var rng_pool;
-    var rng_pptr;
-
-    // Mix in a 32-bit integer into the pool
-    function rng_seed_int(x) {
-      rng_pool[rng_pptr++] ^= x & 255;
-      rng_pool[rng_pptr++] ^= (x >> 8) & 255;
-      rng_pool[rng_pptr++] ^= (x >> 16) & 255;
-      rng_pool[rng_pptr++] ^= (x >> 24) & 255;
-      if(rng_pptr >= rng_psize) rng_pptr -= rng_psize;
-    }
-
-    // Mix in the current time (w/milliseconds) into the pool
-    function rng_seed_time() {
-      rng_seed_int(new Date().getTime());
-    }
-
-    // Initialize the pool with junk if needed.
-    if(rng_pool == null) {
-      rng_pool = new Array();
-      rng_pptr = 0;
-      var t;
-      if(typeof window !== "undefined" && window.crypto) {
-        if (window.crypto.getRandomValues) {
-          // Use webcrypto if available
-          var ua = new Uint8Array(32);
-          window.crypto.getRandomValues(ua);
-          for(t = 0; t < 32; ++t)
-            rng_pool[rng_pptr++] = ua[t];
-        }
-        else if(navigator.appName == "Netscape" && navigator.appVersion < "5") {
-          // Extract entropy (256 bits) from NS4 RNG if available
-          var z = window.crypto.random(32);
-          for(t = 0; t < z.length; ++t)
-            rng_pool[rng_pptr++] = z.charCodeAt(t) & 255;
-        }
-      }
-      while(rng_pptr < rng_psize) {  // extract some randomness from Math.random()
-        t = Math.floor(65536 * Math.random());
-        rng_pool[rng_pptr++] = t >>> 8;
-        rng_pool[rng_pptr++] = t & 255;
-      }
-      rng_pptr = 0;
-      rng_seed_time();
-      //rng_seed_int(window.screenX);
-      //rng_seed_int(window.screenY);
-    }
-
-    function rng_get_byte() {
-      if(rng_state == null) {
-        rng_seed_time();
-        rng_state = prng_newstate();
-        rng_state.init(rng_pool);
-        for(rng_pptr = 0; rng_pptr < rng_pool.length; ++rng_pptr)
-          rng_pool[rng_pptr] = 0;
-        rng_pptr = 0;
-        //rng_pool = null;
-      }
-      // TODO: allow reseeding after first request
-      return rng_state.next();
-    }
-
-    function rng_get_bytes(ba) {
-      var i;
-      for(i = 0; i < ba.length; ++i) ba[i] = rng_get_byte();
-    }
-
-    function SecureRandom() {}
-
-    SecureRandom.prototype.nextBytes = rng_get_bytes;
-
-    // prng4.js - uses Arcfour as a PRNG
-
-    function Arcfour() {
-      this.i = 0;
-      this.j = 0;
-      this.S = new Array();
-    }
-
-    // Initialize arcfour context from key, an array of ints, each from [0..255]
-    function ARC4init(key) {
-      var i, j, t;
-      for(i = 0; i < 256; ++i)
-        this.S[i] = i;
-      j = 0;
-      for(i = 0; i < 256; ++i) {
-        j = (j + this.S[i] + key[i % key.length]) & 255;
-        t = this.S[i];
-        this.S[i] = this.S[j];
-        this.S[j] = t;
-      }
-      this.i = 0;
-      this.j = 0;
-    }
-
-    function ARC4next() {
-      var t;
-      this.i = (this.i + 1) & 255;
-      this.j = (this.j + this.S[this.i]) & 255;
-      t = this.S[this.i];
-      this.S[this.i] = this.S[this.j];
-      this.S[this.j] = t;
-      return this.S[(t + this.S[this.i]) & 255];
-    }
-
-    Arcfour.prototype.init = ARC4init;
-    Arcfour.prototype.next = ARC4next;
-
-    // Plug in your RNG constructor here
-    function prng_newstate() {
-      return new Arcfour();
-    }
-
-    // Pool size must be a multiple of 4 and greater than 32.
-    // An array of bytes the size of the pool will be passed to init()
-    var rng_psize = 256;
-
-    if (typeof exports !== 'undefined') {
-        exports = module.exports = {
-            default: BigInteger,
-            BigInteger: BigInteger,
-            SecureRandom: SecureRandom,
-        };
-    } else {
-        this.jsbn = {
-          BigInteger: BigInteger,
-          SecureRandom: SecureRandom
-        };
-    }
-
-}).call(this);
diff --git a/node_modules/jsbn/package.json b/node_modules/jsbn/package.json
deleted file mode 100644
index 97b137c2e2db9..0000000000000
--- a/node_modules/jsbn/package.json
+++ /dev/null
@@ -1,21 +0,0 @@
-{
-  "name": "jsbn",
-  "version": "1.1.0",
-  "description": "The jsbn library is a fast, portable implementation of large-number math in pure JavaScript, enabling public-key crypto and other applications on desktop and mobile browsers.",
-  "main": "index.js",
-  "scripts": {
-    "test": "mocha test.js"
-  },
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/andyperlitch/jsbn.git"
-  },
-  "keywords": [
-    "biginteger",
-    "bignumber",
-    "big",
-    "integer"
-  ],
-  "author": "Tom Wu",
-  "license": "MIT"
-}
diff --git a/node_modules/jsbn/test/es6-import.js b/node_modules/jsbn/test/es6-import.js
deleted file mode 100644
index 668cbdfdc5bef..0000000000000
--- a/node_modules/jsbn/test/es6-import.js
+++ /dev/null
@@ -1,3 +0,0 @@
-import {BigInteger} from '../';
-
-console.log(typeof BigInteger)
diff --git a/node_modules/socks/package.json b/node_modules/socks/package.json
index be8ee73ccbcf6..a7a2a20190ad3 100644
--- a/node_modules/socks/package.json
+++ b/node_modules/socks/package.json
@@ -1,7 +1,7 @@
 {
   "name": "socks",
   "private": false,
-  "version": "2.8.6",
+  "version": "2.8.7",
   "description": "Fully featured SOCKS proxy client supporting SOCKSv4, SOCKSv4a, and SOCKSv5. Includes Bind and Associate functionality.",
   "main": "build/index.js",
   "typings": "typings/index.d.ts",
@@ -44,7 +44,7 @@
     "typescript": "^5.3.3"
   },
   "dependencies": {
-    "ip-address": "^9.0.5",
+    "ip-address": "^10.0.1",
     "smart-buffer": "^4.2.0"
   },
   "scripts": {
diff --git a/node_modules/sprintf-js/CONTRIBUTORS.md b/node_modules/sprintf-js/CONTRIBUTORS.md
deleted file mode 100644
index a16608e936a72..0000000000000
--- a/node_modules/sprintf-js/CONTRIBUTORS.md
+++ /dev/null
@@ -1,26 +0,0 @@
-Alexander Rose [@arose](https://github.com/arose)
-Alexandru Mărășteanu [@alexei](https://github.com/alexei)
-Andras [@andrasq](https://github.com/andrasq)
-Benoit Giannangeli [@giann](https://github.com/giann)
-Branden Visser [@mrvisser](https://github.com/mrvisser)
-David Baird
-daurnimator [@daurnimator](https://github.com/daurnimator)
-Doug Beck [@beck](https://github.com/beck)
-Dzmitry Litskalau [@litmit](https://github.com/litmit)
-Fred Ludlow [@fredludlow](https://github.com/fredludlow)
-Hans Pufal
-Henry [@alograg](https://github.com/alograg)
-Johnny Shields [@johnnyshields](https://github.com/johnnyshields)
-Kamal Abdali
-Matt Simerson [@msimerson](https://github.com/msimerson)
-Maxime Robert [@marob](https://github.com/marob)
-MeriemKhelifi [@MeriemKhelifi](https://github.com/MeriemKhelifi)
-Michael Schramm [@wodka](https://github.com/wodka)
-Nazar Mokrynskyi [@nazar-pc](https://github.com/nazar-pc)
-Oliver Salzburg [@oliversalzburg](https://github.com/oliversalzburg)
-Pablo [@ppollono](https://github.com/ppollono)
-Rabehaja Stevens [@RABEHAJA-STEVENS](https://github.com/RABEHAJA-STEVENS)
-Raphael Pigulla [@pigulla](https://github.com/pigulla)
-rebeccapeltz [@rebeccapeltz](https://github.com/rebeccapeltz)
-Stefan Tingström [@stingstrom](https://github.com/stingstrom)
-Tim Gates [@timgates42](https://github.com/timgates42)
diff --git a/node_modules/sprintf-js/LICENSE b/node_modules/sprintf-js/LICENSE
deleted file mode 100644
index 83f832a2ee282..0000000000000
--- a/node_modules/sprintf-js/LICENSE
+++ /dev/null
@@ -1,24 +0,0 @@
-Copyright (c) 2007-present, Alexandru Mărășteanu 
-All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are met:
-* Redistributions of source code must retain the above copyright
-  notice, this list of conditions and the following disclaimer.
-* Redistributions in binary form must reproduce the above copyright
-  notice, this list of conditions and the following disclaimer in the
-  documentation and/or other materials provided with the distribution.
-* Neither the name of this software nor the names of its contributors may be
-  used to endorse or promote products derived from this software without
-  specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
-ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR
-ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
-(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
-ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
-SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/node_modules/sprintf-js/dist/.gitattributes b/node_modules/sprintf-js/dist/.gitattributes
deleted file mode 100644
index d35bca01c1201..0000000000000
--- a/node_modules/sprintf-js/dist/.gitattributes
+++ /dev/null
@@ -1,4 +0,0 @@
-#ignore all generated files from diff
-#also skip line ending check
-*.js -diff -text
-*.map -diff -text
diff --git a/node_modules/sprintf-js/dist/angular-sprintf.min.js b/node_modules/sprintf-js/dist/angular-sprintf.min.js
deleted file mode 100644
index 5dff8c54337db..0000000000000
--- a/node_modules/sprintf-js/dist/angular-sprintf.min.js
+++ /dev/null
@@ -1,3 +0,0 @@
-/*! sprintf-js v1.1.3 | Copyright (c) 2007-present, Alexandru Mărășteanu  | BSD-3-Clause */
-!function(){"use strict";angular.module("sprintf",[]).filter("sprintf",function(){return function(){return sprintf.apply(null,arguments)}}).filter("fmt",["$filter",function(t){return t("sprintf")}]).filter("vsprintf",function(){return function(t,n){return vsprintf(t,n)}}).filter("vfmt",["$filter",function(t){return t("vsprintf")}])}();
-//# sourceMappingURL=angular-sprintf.min.js.map
diff --git a/node_modules/sprintf-js/dist/sprintf.min.js b/node_modules/sprintf-js/dist/sprintf.min.js
deleted file mode 100644
index ed09637ea3905..0000000000000
--- a/node_modules/sprintf-js/dist/sprintf.min.js
+++ /dev/null
@@ -1,3 +0,0 @@
-/*! sprintf-js v1.1.3 | Copyright (c) 2007-present, Alexandru Mărășteanu  | BSD-3-Clause */
-!function(){"use strict";var g={not_string:/[^s]/,not_bool:/[^t]/,not_type:/[^T]/,not_primitive:/[^v]/,number:/[diefg]/,numeric_arg:/[bcdiefguxX]/,json:/[j]/,not_json:/[^j]/,text:/^[^\x25]+/,modulo:/^\x25{2}/,placeholder:/^\x25(?:([1-9]\d*)\$|\(([^)]+)\))?(\+)?(0|'[^$])?(-)?(\d+)?(?:\.(\d+))?([b-gijostTuvxX])/,key:/^([a-z_][a-z_\d]*)/i,key_access:/^\.([a-z_][a-z_\d]*)/i,index_access:/^\[(\d+)\]/,sign:/^[+-]/};function y(e){return function(e,t){var r,n,i,s,a,o,p,c,l,u=1,f=e.length,d="";for(n=0;n>>0).toString(8);break;case"s":r=String(r),r=s.precision?r.substring(0,s.precision):r;break;case"t":r=String(!!r),r=s.precision?r.substring(0,s.precision):r;break;case"T":r=Object.prototype.toString.call(r).slice(8,-1).toLowerCase(),r=s.precision?r.substring(0,s.precision):r;break;case"u":r=parseInt(r,10)>>>0;break;case"v":r=r.valueOf(),r=s.precision?r.substring(0,s.precision):r;break;case"x":r=(parseInt(r,10)>>>0).toString(16);break;case"X":r=(parseInt(r,10)>>>0).toString(16).toUpperCase()}g.json.test(s.type)?d+=r:(!g.number.test(s.type)||c&&!s.sign?l="":(l=c?"+":"-",r=r.toString().replace(g.sign,"")),o=s.pad_char?"0"===s.pad_char?"0":s.pad_char.charAt(1):" ",p=s.width-(l+r).length,a=s.width&&0",
-  "main": "src/sprintf.js",
-  "scripts": {
-    "test": "mocha test/*.js",
-    "pretest": "npm run lint",
-    "lint": "eslint .",
-    "lint:fix": "eslint --fix ."
-  },
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/alexei/sprintf.js.git"
-  },
-  "license": "BSD-3-Clause",
-  "readmeFilename": "README.md",
-  "devDependencies": {
-    "benchmark": "^2.1.4",
-    "eslint": "^5.10.0",
-    "gulp": "^3.9.1",
-    "gulp-benchmark": "^1.1.1",
-    "gulp-eslint": "^5.0.0",
-    "gulp-header": "^2.0.5",
-    "gulp-mocha": "^6.0.0",
-    "gulp-rename": "^1.4.0",
-    "gulp-sourcemaps": "^2.6.4",
-    "gulp-uglify": "^3.0.1",
-    "mocha": "^5.2.0"
-  },
-  "overrides": {
-    "graceful-fs": "^4.2.11"
-  }
-}
diff --git a/node_modules/sprintf-js/src/angular-sprintf.js b/node_modules/sprintf-js/src/angular-sprintf.js
deleted file mode 100644
index dbfdd65ab2508..0000000000000
--- a/node_modules/sprintf-js/src/angular-sprintf.js
+++ /dev/null
@@ -1,24 +0,0 @@
-/* global angular, sprintf, vsprintf */
-
-!function() {
-    'use strict'
-
-    angular.
-        module('sprintf', []).
-        filter('sprintf', function() {
-            return function() {
-                return sprintf.apply(null, arguments)
-            }
-        }).
-        filter('fmt', ['$filter', function($filter) {
-            return $filter('sprintf')
-        }]).
-        filter('vsprintf', function() {
-            return function(format, argv) {
-                return vsprintf(format, argv)
-            }
-        }).
-        filter('vfmt', ['$filter', function($filter) {
-            return $filter('vsprintf')
-        }])
-}(); // eslint-disable-line
diff --git a/node_modules/sprintf-js/src/sprintf.js b/node_modules/sprintf-js/src/sprintf.js
deleted file mode 100644
index 65d6324645ef1..0000000000000
--- a/node_modules/sprintf-js/src/sprintf.js
+++ /dev/null
@@ -1,231 +0,0 @@
-/* global window, exports, define */
-
-!function() {
-    'use strict'
-
-    var re = {
-        not_string: /[^s]/,
-        not_bool: /[^t]/,
-        not_type: /[^T]/,
-        not_primitive: /[^v]/,
-        number: /[diefg]/,
-        numeric_arg: /[bcdiefguxX]/,
-        json: /[j]/,
-        not_json: /[^j]/,
-        text: /^[^\x25]+/,
-        modulo: /^\x25{2}/,
-        placeholder: /^\x25(?:([1-9]\d*)\$|\(([^)]+)\))?(\+)?(0|'[^$])?(-)?(\d+)?(?:\.(\d+))?([b-gijostTuvxX])/,
-        key: /^([a-z_][a-z_\d]*)/i,
-        key_access: /^\.([a-z_][a-z_\d]*)/i,
-        index_access: /^\[(\d+)\]/,
-        sign: /^[+-]/
-    }
-
-    function sprintf(key) {
-        // `arguments` is not an array, but should be fine for this call
-        return sprintf_format(sprintf_parse(key), arguments)
-    }
-
-    function vsprintf(fmt, argv) {
-        return sprintf.apply(null, [fmt].concat(argv || []))
-    }
-
-    function sprintf_format(parse_tree, argv) {
-        var cursor = 1, tree_length = parse_tree.length, arg, output = '', i, k, ph, pad, pad_character, pad_length, is_positive, sign
-        for (i = 0; i < tree_length; i++) {
-            if (typeof parse_tree[i] === 'string') {
-                output += parse_tree[i]
-            }
-            else if (typeof parse_tree[i] === 'object') {
-                ph = parse_tree[i] // convenience purposes only
-                if (ph.keys) { // keyword argument
-                    arg = argv[cursor]
-                    for (k = 0; k < ph.keys.length; k++) {
-                        if (arg == undefined) {
-                            throw new Error(sprintf('[sprintf] Cannot access property "%s" of undefined value "%s"', ph.keys[k], ph.keys[k-1]))
-                        }
-                        arg = arg[ph.keys[k]]
-                    }
-                }
-                else if (ph.param_no) { // positional argument (explicit)
-                    arg = argv[ph.param_no]
-                }
-                else { // positional argument (implicit)
-                    arg = argv[cursor++]
-                }
-
-                if (re.not_type.test(ph.type) && re.not_primitive.test(ph.type) && arg instanceof Function) {
-                    arg = arg()
-                }
-
-                if (re.numeric_arg.test(ph.type) && (typeof arg !== 'number' && isNaN(arg))) {
-                    throw new TypeError(sprintf('[sprintf] expecting number but found %T', arg))
-                }
-
-                if (re.number.test(ph.type)) {
-                    is_positive = arg >= 0
-                }
-
-                switch (ph.type) {
-                    case 'b':
-                        arg = parseInt(arg, 10).toString(2)
-                        break
-                    case 'c':
-                        arg = String.fromCharCode(parseInt(arg, 10))
-                        break
-                    case 'd':
-                    case 'i':
-                        arg = parseInt(arg, 10)
-                        break
-                    case 'j':
-                        arg = JSON.stringify(arg, null, ph.width ? parseInt(ph.width) : 0)
-                        break
-                    case 'e':
-                        arg = ph.precision ? parseFloat(arg).toExponential(ph.precision) : parseFloat(arg).toExponential()
-                        break
-                    case 'f':
-                        arg = ph.precision ? parseFloat(arg).toFixed(ph.precision) : parseFloat(arg)
-                        break
-                    case 'g':
-                        arg = ph.precision ? String(Number(arg.toPrecision(ph.precision))) : parseFloat(arg)
-                        break
-                    case 'o':
-                        arg = (parseInt(arg, 10) >>> 0).toString(8)
-                        break
-                    case 's':
-                        arg = String(arg)
-                        arg = (ph.precision ? arg.substring(0, ph.precision) : arg)
-                        break
-                    case 't':
-                        arg = String(!!arg)
-                        arg = (ph.precision ? arg.substring(0, ph.precision) : arg)
-                        break
-                    case 'T':
-                        arg = Object.prototype.toString.call(arg).slice(8, -1).toLowerCase()
-                        arg = (ph.precision ? arg.substring(0, ph.precision) : arg)
-                        break
-                    case 'u':
-                        arg = parseInt(arg, 10) >>> 0
-                        break
-                    case 'v':
-                        arg = arg.valueOf()
-                        arg = (ph.precision ? arg.substring(0, ph.precision) : arg)
-                        break
-                    case 'x':
-                        arg = (parseInt(arg, 10) >>> 0).toString(16)
-                        break
-                    case 'X':
-                        arg = (parseInt(arg, 10) >>> 0).toString(16).toUpperCase()
-                        break
-                }
-                if (re.json.test(ph.type)) {
-                    output += arg
-                }
-                else {
-                    if (re.number.test(ph.type) && (!is_positive || ph.sign)) {
-                        sign = is_positive ? '+' : '-'
-                        arg = arg.toString().replace(re.sign, '')
-                    }
-                    else {
-                        sign = ''
-                    }
-                    pad_character = ph.pad_char ? ph.pad_char === '0' ? '0' : ph.pad_char.charAt(1) : ' '
-                    pad_length = ph.width - (sign + arg).length
-                    pad = ph.width ? (pad_length > 0 ? pad_character.repeat(pad_length) : '') : ''
-                    output += ph.align ? sign + arg + pad : (pad_character === '0' ? sign + pad + arg : pad + sign + arg)
-                }
-            }
-        }
-        return output
-    }
-
-    var sprintf_cache = Object.create(null)
-
-    function sprintf_parse(fmt) {
-        if (sprintf_cache[fmt]) {
-            return sprintf_cache[fmt]
-        }
-
-        var _fmt = fmt, match, parse_tree = [], arg_names = 0
-        while (_fmt) {
-            if ((match = re.text.exec(_fmt)) !== null) {
-                parse_tree.push(match[0])
-            }
-            else if ((match = re.modulo.exec(_fmt)) !== null) {
-                parse_tree.push('%')
-            }
-            else if ((match = re.placeholder.exec(_fmt)) !== null) {
-                if (match[2]) {
-                    arg_names |= 1
-                    var field_list = [], replacement_field = match[2], field_match = []
-                    if ((field_match = re.key.exec(replacement_field)) !== null) {
-                        field_list.push(field_match[1])
-                        while ((replacement_field = replacement_field.substring(field_match[0].length)) !== '') {
-                            if ((field_match = re.key_access.exec(replacement_field)) !== null) {
-                                field_list.push(field_match[1])
-                            }
-                            else if ((field_match = re.index_access.exec(replacement_field)) !== null) {
-                                field_list.push(field_match[1])
-                            }
-                            else {
-                                throw new SyntaxError('[sprintf] failed to parse named argument key')
-                            }
-                        }
-                    }
-                    else {
-                        throw new SyntaxError('[sprintf] failed to parse named argument key')
-                    }
-                    match[2] = field_list
-                }
-                else {
-                    arg_names |= 2
-                }
-                if (arg_names === 3) {
-                    throw new Error('[sprintf] mixing positional and named placeholders is not (yet) supported')
-                }
-
-                parse_tree.push(
-                    {
-                        placeholder: match[0],
-                        param_no:    match[1],
-                        keys:        match[2],
-                        sign:        match[3],
-                        pad_char:    match[4],
-                        align:       match[5],
-                        width:       match[6],
-                        precision:   match[7],
-                        type:        match[8]
-                    }
-                )
-            }
-            else {
-                throw new SyntaxError('[sprintf] unexpected placeholder')
-            }
-            _fmt = _fmt.substring(match[0].length)
-        }
-        return sprintf_cache[fmt] = parse_tree
-    }
-
-    /**
-     * export to either browser or node.js
-     */
-    /* eslint-disable quote-props */
-    if (typeof exports !== 'undefined') {
-        exports['sprintf'] = sprintf
-        exports['vsprintf'] = vsprintf
-    }
-    if (typeof window !== 'undefined') {
-        window['sprintf'] = sprintf
-        window['vsprintf'] = vsprintf
-
-        if (typeof define === 'function' && define['amd']) {
-            define(function() {
-                return {
-                    'sprintf': sprintf,
-                    'vsprintf': vsprintf
-                }
-            })
-        }
-    }
-    /* eslint-enable quote-props */
-}(); // eslint-disable-line
diff --git a/package-lock.json b/package-lock.json
index ffd1464d5d4ca..f4764fbb195be 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -8489,13 +8489,11 @@
       }
     },
     "node_modules/ip-address": {
-      "version": "9.0.5",
+      "version": "10.0.1",
+      "resolved": "https://registry.npmjs.org/ip-address/-/ip-address-10.0.1.tgz",
+      "integrity": "sha512-NWv9YLW4PoW2B7xtzaS3NCot75m6nK7Icdv0o3lfMceJVRfSoQwqD4wEH5rLwoKJwUiZ/rfpiVBhnaF0FK4HoA==",
       "inBundle": true,
       "license": "MIT",
-      "dependencies": {
-        "jsbn": "1.1.0",
-        "sprintf-js": "^1.1.3"
-      },
       "engines": {
         "node": ">= 12"
       }
@@ -9231,11 +9229,6 @@
         "js-yaml": "bin/js-yaml.js"
       }
     },
-    "node_modules/jsbn": {
-      "version": "1.1.0",
-      "inBundle": true,
-      "license": "MIT"
-    },
     "node_modules/jsep": {
       "version": "1.4.0",
       "dev": true,
@@ -12959,11 +12952,13 @@
       }
     },
     "node_modules/socks": {
-      "version": "2.8.6",
+      "version": "2.8.7",
+      "resolved": "https://registry.npmjs.org/socks/-/socks-2.8.7.tgz",
+      "integrity": "sha512-HLpt+uLy/pxB+bum/9DzAgiKS8CX1EvbWxI4zlmgGCExImLdiad2iCwXT5Z4c9c3Eq8rP2318mPW2c+QbtjK8A==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
-        "ip-address": "^9.0.5",
+        "ip-address": "^10.0.1",
         "smart-buffer": "^4.2.0"
       },
       "engines": {
@@ -13174,11 +13169,6 @@
         "node": ">= 10.x"
       }
     },
-    "node_modules/sprintf-js": {
-      "version": "1.1.3",
-      "inBundle": true,
-      "license": "BSD-3-Clause"
-    },
     "node_modules/ssri": {
       "version": "12.0.0",
       "inBundle": true,

From 5f6664b7a8f622cfdd356d776e97dc8bae7e0ada Mon Sep 17 00:00:00 2001
From: Gar 
Date: Thu, 18 Sep 2025 11:04:58 -0700
Subject: [PATCH 37/63] deps: spdx-license-ids@3.0.22

---
 node_modules/spdx-license-ids/index.json   | 19 +++++++++++++++++++
 node_modules/spdx-license-ids/package.json |  2 +-
 package-lock.json                          |  4 +++-
 3 files changed, 23 insertions(+), 2 deletions(-)

diff --git a/node_modules/spdx-license-ids/index.json b/node_modules/spdx-license-ids/index.json
index c1ae5520b18ad..b09dc98435c9e 100644
--- a/node_modules/spdx-license-ids/index.json
+++ b/node_modules/spdx-license-ids/index.json
@@ -44,12 +44,15 @@
 	"Artistic-1.0-Perl",
 	"Artistic-1.0-cl8",
 	"Artistic-2.0",
+	"Artistic-dist",
+	"Aspell-RU",
 	"BSD-1-Clause",
 	"BSD-2-Clause",
 	"BSD-2-Clause-Darwin",
 	"BSD-2-Clause-Patent",
 	"BSD-2-Clause-Views",
 	"BSD-2-Clause-first-lines",
+	"BSD-2-Clause-pkgconf-disclaimer",
 	"BSD-3-Clause",
 	"BSD-3-Clause-Attribution",
 	"BSD-3-Clause-Clear",
@@ -190,6 +193,7 @@
 	"Cornell-Lossless-JPEG",
 	"Cronyx",
 	"Crossword",
+	"CryptoSwift",
 	"CrystalStacker",
 	"Cube",
 	"D-FSL-1.0",
@@ -200,6 +204,7 @@
 	"DRL-1.0",
 	"DRL-1.1",
 	"DSDP",
+	"DocBook-DTD",
 	"DocBook-Schema",
 	"DocBook-Stylesheet",
 	"DocBook-XML",
@@ -225,7 +230,10 @@
 	"FSFAP-no-warranty-disclaimer",
 	"FSFUL",
 	"FSFULLR",
+	"FSFULLRSD",
 	"FSFULLRWD",
+	"FSL-1.1-ALv2",
+	"FSL-1.1-MIT",
 	"FTL",
 	"Fair",
 	"Ferguson-Twofish",
@@ -261,11 +269,13 @@
 	"GPL-2.0-or-later",
 	"GPL-3.0-only",
 	"GPL-3.0-or-later",
+	"Game-Programming-Gems",
 	"Giftware",
 	"Glide",
 	"Glulxe",
 	"Graphics-Gems",
 	"Gutmann",
+	"HDF5",
 	"HIDAPI",
 	"HP-1986",
 	"HP-1989",
@@ -411,6 +421,7 @@
 	"NPL-1.1",
 	"NPOSL-3.0",
 	"NRL",
+	"NTIA-PD",
 	"NTP",
 	"NTP-0",
 	"Naumen",
@@ -513,11 +524,13 @@
 	"SMLNJ",
 	"SMPPL",
 	"SNIA",
+	"SOFA",
 	"SPL-1.0",
 	"SSH-OpenSSH",
 	"SSH-short",
 	"SSLeay-standalone",
 	"SSPL-1.0",
+	"SUL-1.0",
 	"SWL",
 	"Saxpath",
 	"SchemeReport",
@@ -563,6 +576,8 @@
 	"Unicode-TOU",
 	"UnixCrypt",
 	"Unlicense",
+	"Unlicense-libtelnet",
+	"Unlicense-libwhirlpool",
 	"VOSTROM",
 	"VSL-1.0",
 	"Vim",
@@ -616,6 +631,8 @@
 	"gtkbook",
 	"hdparm",
 	"iMatix",
+	"jove",
+	"libpng-1.6.35",
 	"libpng-2.0",
 	"libselinux-1.0",
 	"libtiff",
@@ -623,10 +640,12 @@
 	"lsof",
 	"magaz",
 	"mailprio",
+	"man2html",
 	"metamail",
 	"mpi-permissive",
 	"mpich2",
 	"mplus",
+	"ngrep",
 	"pkgconf",
 	"pnmstitch",
 	"psfrag",
diff --git a/node_modules/spdx-license-ids/package.json b/node_modules/spdx-license-ids/package.json
index 9b02c26760459..201e888cecfaa 100644
--- a/node_modules/spdx-license-ids/package.json
+++ b/node_modules/spdx-license-ids/package.json
@@ -1,6 +1,6 @@
 {
 	"name": "spdx-license-ids",
-	"version": "3.0.21",
+	"version": "3.0.22",
 	"description": "A list of SPDX license identifiers",
 	"repository": "jslicense/spdx-license-ids",
 	"author": "Shinnosuke Watanabe (https://github.com/shinnn)",
diff --git a/package-lock.json b/package-lock.json
index f4764fbb195be..a6b73d26bfa89 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -13146,7 +13146,9 @@
       }
     },
     "node_modules/spdx-license-ids": {
-      "version": "3.0.21",
+      "version": "3.0.22",
+      "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.22.tgz",
+      "integrity": "sha512-4PRT4nh1EImPbt2jASOKHX7PB7I+e4IWNLvkKFDxNhJlfjbYlleYQh285Z/3mPTHSAK/AvdMmw5BNNuYH8ShgQ==",
       "inBundle": true,
       "license": "CC0-1.0"
     },

From 46035dbf4d87dad76051410c6b1b2536a874d9ed Mon Sep 17 00:00:00 2001
From: Gar 
Date: Thu, 18 Sep 2025 11:08:28 -0700
Subject: [PATCH 38/63] deps: debug@4.4.3

---
 node_modules/debug/package.json | 2 +-
 package-lock.json               | 4 +++-
 2 files changed, 4 insertions(+), 2 deletions(-)

diff --git a/node_modules/debug/package.json b/node_modules/debug/package.json
index afc2f8b615b22..ee8abb523dbe0 100644
--- a/node_modules/debug/package.json
+++ b/node_modules/debug/package.json
@@ -1,6 +1,6 @@
 {
   "name": "debug",
-  "version": "4.4.1",
+  "version": "4.4.3",
   "repository": {
     "type": "git",
     "url": "git://github.com/debug-js/debug.git"
diff --git a/package-lock.json b/package-lock.json
index a6b73d26bfa89..763c7962b14a9 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -6360,7 +6360,9 @@
       }
     },
     "node_modules/debug": {
-      "version": "4.4.1",
+      "version": "4.4.3",
+      "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz",
+      "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {

From c5157c978fc235dea3a70235b6d08902473058f4 Mon Sep 17 00:00:00 2001
From: Gar 
Date: Thu, 18 Sep 2025 11:16:51 -0700
Subject: [PATCH 39/63] deps: chalk@5.6.2

---
 node_modules/chalk/package.json                          | 2 +-
 node_modules/chalk/source/vendor/supports-color/index.js | 8 ++++++++
 package-lock.json                                        | 6 ++++--
 package.json                                             | 2 +-
 4 files changed, 14 insertions(+), 4 deletions(-)

diff --git a/node_modules/chalk/package.json b/node_modules/chalk/package.json
index 23b4ce33dc667..c9e0dc52ba744 100644
--- a/node_modules/chalk/package.json
+++ b/node_modules/chalk/package.json
@@ -1,6 +1,6 @@
 {
 	"name": "chalk",
-	"version": "5.4.1",
+	"version": "5.6.2",
 	"description": "Terminal string styling done right",
 	"license": "MIT",
 	"repository": "chalk/chalk",
diff --git a/node_modules/chalk/source/vendor/supports-color/index.js b/node_modules/chalk/source/vendor/supports-color/index.js
index 1388372674d49..265d7f8581953 100644
--- a/node_modules/chalk/source/vendor/supports-color/index.js
+++ b/node_modules/chalk/source/vendor/supports-color/index.js
@@ -135,6 +135,14 @@ function _supportsColor(haveStream, {streamIsTTY, sniffFlags = true} = {}) {
 		return 3;
 	}
 
+	if (env.TERM === 'xterm-ghostty') {
+		return 3;
+	}
+
+	if (env.TERM === 'wezterm') {
+		return 3;
+	}
+
 	if ('TERM_PROGRAM' in env) {
 		const version = Number.parseInt((env.TERM_PROGRAM_VERSION || '').split('.')[0], 10);
 
diff --git a/package-lock.json b/package-lock.json
index 763c7962b14a9..a172d542d3c0a 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -97,7 +97,7 @@
         "abbrev": "^3.0.1",
         "archy": "~1.0.0",
         "cacache": "^20.0.1",
-        "chalk": "^5.4.1",
+        "chalk": "^5.6.2",
         "ci-info": "^4.3.0",
         "cli-columns": "^4.0.0",
         "fastest-levenshtein": "^1.0.16",
@@ -5580,7 +5580,9 @@
       }
     },
     "node_modules/chalk": {
-      "version": "5.4.1",
+      "version": "5.6.2",
+      "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.6.2.tgz",
+      "integrity": "sha512-7NzBL0rN6fMUW+f7A6Io4h40qQlG+xGmtMxfbnH/K7TAtt8JQWVQK+6g0UXKMeVJoyV5EkkNsErQ8pVD3bLHbA==",
       "inBundle": true,
       "license": "MIT",
       "engines": {
diff --git a/package.json b/package.json
index 865f53536461e..af99cf2b786cd 100644
--- a/package.json
+++ b/package.json
@@ -64,7 +64,7 @@
     "abbrev": "^3.0.1",
     "archy": "~1.0.0",
     "cacache": "^20.0.1",
-    "chalk": "^5.4.1",
+    "chalk": "^5.6.2",
     "ci-info": "^4.3.0",
     "cli-columns": "^4.0.0",
     "fastest-levenshtein": "^1.0.16",

From 09a7494b59a89faa1f550864ce9f68b0c86179f1 Mon Sep 17 00:00:00 2001
From: Gar 
Date: Thu, 18 Sep 2025 11:18:55 -0700
Subject: [PATCH 40/63] deps: supports-color@10.2.2

---
 node_modules/supports-color/index.js     | 8 ++++++++
 node_modules/supports-color/package.json | 2 +-
 package-lock.json                        | 6 ++++--
 package.json                             | 2 +-
 4 files changed, 14 insertions(+), 4 deletions(-)

diff --git a/node_modules/supports-color/index.js b/node_modules/supports-color/index.js
index b22d50edbdc52..906a6f9b83224 100644
--- a/node_modules/supports-color/index.js
+++ b/node_modules/supports-color/index.js
@@ -147,6 +147,14 @@ function _supportsColor(haveStream, {streamIsTTY, sniffFlags = true} = {}) {
 		return 3;
 	}
 
+	if (env.TERM === 'xterm-ghostty') {
+		return 3;
+	}
+
+	if (env.TERM === 'wezterm') {
+		return 3;
+	}
+
 	if ('TERM_PROGRAM' in env) {
 		const version = Number.parseInt((env.TERM_PROGRAM_VERSION || '').split('.')[0], 10);
 
diff --git a/node_modules/supports-color/package.json b/node_modules/supports-color/package.json
index 8f71b410982b4..8915597ab45a0 100644
--- a/node_modules/supports-color/package.json
+++ b/node_modules/supports-color/package.json
@@ -1,6 +1,6 @@
 {
 	"name": "supports-color",
-	"version": "10.0.0",
+	"version": "10.2.2",
 	"description": "Detect whether a terminal supports color",
 	"license": "MIT",
 	"repository": "chalk/supports-color",
diff --git a/package-lock.json b/package-lock.json
index a172d542d3c0a..b2bb4e88fb4e6 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -143,7 +143,7 @@
         "semver": "^7.7.2",
         "spdx-expression-parse": "^4.0.0",
         "ssri": "^12.0.0",
-        "supports-color": "^10.0.0",
+        "supports-color": "^10.2.2",
         "tar": "^6.2.1",
         "text-table": "~0.2.0",
         "tiny-relative-date": "^1.3.0",
@@ -13380,7 +13380,9 @@
       }
     },
     "node_modules/supports-color": {
-      "version": "10.0.0",
+      "version": "10.2.2",
+      "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-10.2.2.tgz",
+      "integrity": "sha512-SS+jx45GF1QjgEXQx4NJZV9ImqmO2NPz5FNsIHrsDjh2YsHnawpan7SNQ1o8NuhrbHZy9AZhIoCUiCeaW/C80g==",
       "inBundle": true,
       "license": "MIT",
       "engines": {
diff --git a/package.json b/package.json
index af99cf2b786cd..f94232a060051 100644
--- a/package.json
+++ b/package.json
@@ -110,7 +110,7 @@
     "semver": "^7.7.2",
     "spdx-expression-parse": "^4.0.0",
     "ssri": "^12.0.0",
-    "supports-color": "^10.0.0",
+    "supports-color": "^10.2.2",
     "tar": "^6.2.1",
     "text-table": "~0.2.0",
     "tiny-relative-date": "^1.3.0",

From 3b43bf79d36a04ee65f562528c7ac54ebafaf79b Mon Sep 17 00:00:00 2001
From: Gar 
Date: Thu, 18 Sep 2025 11:20:44 -0700
Subject: [PATCH 41/63] chore: dev dependency updates

---
 package-lock.json | 223 ++++++++++++++++++++++++++++++++--------------
 1 file changed, 157 insertions(+), 66 deletions(-)

diff --git a/package-lock.json b/package-lock.json
index b2bb4e88fb4e6..1fc738ea31915 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -1743,7 +1743,9 @@
       }
     },
     "docs/node_modules/vfile-message": {
-      "version": "4.0.2",
+      "version": "4.0.3",
+      "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.3.tgz",
+      "integrity": "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1848,18 +1850,6 @@
       "dev": true,
       "license": "MIT"
     },
-    "node_modules/@ampproject/remapping": {
-      "version": "2.3.0",
-      "dev": true,
-      "license": "Apache-2.0",
-      "dependencies": {
-        "@jridgewell/gen-mapping": "^0.3.5",
-        "@jridgewell/trace-mapping": "^0.3.24"
-      },
-      "engines": {
-        "node": ">=6.0.0"
-      }
-    },
     "node_modules/@asamuzakjp/css-color": {
       "version": "3.2.0",
       "dev": true,
@@ -1872,6 +1862,13 @@
         "lru-cache": "^10.4.3"
       }
     },
+    "node_modules/@asamuzakjp/css-color/node_modules/lru-cache": {
+      "version": "10.4.3",
+      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz",
+      "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==",
+      "dev": true,
+      "license": "ISC"
+    },
     "node_modules/@babel/code-frame": {
       "version": "7.27.1",
       "dev": true,
@@ -1886,7 +1883,9 @@
       }
     },
     "node_modules/@babel/compat-data": {
-      "version": "7.28.0",
+      "version": "7.28.4",
+      "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.28.4.tgz",
+      "integrity": "sha512-YsmSKC29MJwf0gF8Rjjrg5LQCmyh+j/nD8/eP7f+BeoQTKYqs9RoWbjGOdy0+1Ekr68RJZMUOPVQaQisnIo4Rw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -1894,21 +1893,23 @@
       }
     },
     "node_modules/@babel/core": {
-      "version": "7.28.0",
+      "version": "7.28.4",
+      "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.28.4.tgz",
+      "integrity": "sha512-2BCOP7TN8M+gVDj7/ht3hsaO/B/n5oDbiAyyvnRlNOs+u1o+JWNYTQrmpuNp1/Wq2gcFrI01JAW+paEKDMx/CA==",
       "dev": true,
       "license": "MIT",
       "peer": true,
       "dependencies": {
-        "@ampproject/remapping": "^2.2.0",
         "@babel/code-frame": "^7.27.1",
-        "@babel/generator": "^7.28.0",
+        "@babel/generator": "^7.28.3",
         "@babel/helper-compilation-targets": "^7.27.2",
-        "@babel/helper-module-transforms": "^7.27.3",
-        "@babel/helpers": "^7.27.6",
-        "@babel/parser": "^7.28.0",
+        "@babel/helper-module-transforms": "^7.28.3",
+        "@babel/helpers": "^7.28.4",
+        "@babel/parser": "^7.28.4",
         "@babel/template": "^7.27.2",
-        "@babel/traverse": "^7.28.0",
-        "@babel/types": "^7.28.0",
+        "@babel/traverse": "^7.28.4",
+        "@babel/types": "^7.28.4",
+        "@jridgewell/remapping": "^2.3.5",
         "convert-source-map": "^2.0.0",
         "debug": "^4.1.0",
         "gensync": "^1.0.0-beta.2",
@@ -1937,12 +1938,14 @@
       }
     },
     "node_modules/@babel/generator": {
-      "version": "7.28.0",
+      "version": "7.28.3",
+      "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz",
+      "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@babel/parser": "^7.28.0",
-        "@babel/types": "^7.28.0",
+        "@babel/parser": "^7.28.3",
+        "@babel/types": "^7.28.2",
         "@jridgewell/gen-mapping": "^0.3.12",
         "@jridgewell/trace-mapping": "^0.3.28",
         "jsesc": "^3.0.2"
@@ -2008,13 +2011,15 @@
       }
     },
     "node_modules/@babel/helper-module-transforms": {
-      "version": "7.27.3",
+      "version": "7.28.3",
+      "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.28.3.tgz",
+      "integrity": "sha512-gytXUbs8k2sXS9PnQptz5o0QnpLL51SwASIORY6XaBKF88nsOT0Zw9szLqlSGQDP/4TljBAD5y98p2U1fqkdsw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
         "@babel/helper-module-imports": "^7.27.1",
         "@babel/helper-validator-identifier": "^7.27.1",
-        "@babel/traverse": "^7.27.3"
+        "@babel/traverse": "^7.28.3"
       },
       "engines": {
         "node": ">=6.9.0"
@@ -2048,23 +2053,27 @@
       }
     },
     "node_modules/@babel/helpers": {
-      "version": "7.27.6",
+      "version": "7.28.4",
+      "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.4.tgz",
+      "integrity": "sha512-HFN59MmQXGHVyYadKLVumYsA9dBFun/ldYxipEjzA4196jpLZd8UjEEBLkbEkvfYreDqJhZxYAWFPtrfhNpj4w==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
         "@babel/template": "^7.27.2",
-        "@babel/types": "^7.27.6"
+        "@babel/types": "^7.28.4"
       },
       "engines": {
         "node": ">=6.9.0"
       }
     },
     "node_modules/@babel/parser": {
-      "version": "7.28.0",
+      "version": "7.28.4",
+      "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz",
+      "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@babel/types": "^7.28.0"
+        "@babel/types": "^7.28.4"
       },
       "bin": {
         "parser": "bin/babel-parser.js"
@@ -2087,16 +2096,18 @@
       }
     },
     "node_modules/@babel/traverse": {
-      "version": "7.28.0",
+      "version": "7.28.4",
+      "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz",
+      "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
         "@babel/code-frame": "^7.27.1",
-        "@babel/generator": "^7.28.0",
+        "@babel/generator": "^7.28.3",
         "@babel/helper-globals": "^7.28.0",
-        "@babel/parser": "^7.28.0",
+        "@babel/parser": "^7.28.4",
         "@babel/template": "^7.27.2",
-        "@babel/types": "^7.28.0",
+        "@babel/types": "^7.28.4",
         "debug": "^4.3.1"
       },
       "engines": {
@@ -2104,7 +2115,9 @@
       }
     },
     "node_modules/@babel/types": {
-      "version": "7.28.1",
+      "version": "7.28.4",
+      "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz",
+      "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2357,7 +2370,9 @@
       }
     },
     "node_modules/@csstools/color-helpers": {
-      "version": "5.0.2",
+      "version": "5.1.0",
+      "resolved": "https://registry.npmjs.org/@csstools/color-helpers/-/color-helpers-5.1.0.tgz",
+      "integrity": "sha512-S11EXWJyy0Mz5SYvRmY8nJYTFFd1LCNV+7cXyAgQtOOuzb4EsgfqDufL+9esx72/eLhsRdGZwaldu/h+E4t4BA==",
       "dev": true,
       "funding": [
         {
@@ -2397,7 +2412,9 @@
       }
     },
     "node_modules/@csstools/css-color-parser": {
-      "version": "3.0.10",
+      "version": "3.1.0",
+      "resolved": "https://registry.npmjs.org/@csstools/css-color-parser/-/css-color-parser-3.1.0.tgz",
+      "integrity": "sha512-nbtKwh3a6xNVIp/VRuXV64yTKnb1IjTAEEh3irzS+HkKjAOYLTGNb9pmVNntZ8iVBHcWDA2Dof0QtPgFI1BaTA==",
       "dev": true,
       "funding": [
         {
@@ -2411,7 +2428,7 @@
       ],
       "license": "MIT",
       "dependencies": {
-        "@csstools/color-helpers": "^5.0.2",
+        "@csstools/color-helpers": "^5.1.0",
         "@csstools/css-calc": "^2.1.4"
       },
       "engines": {
@@ -2464,7 +2481,9 @@
       }
     },
     "node_modules/@eslint-community/eslint-utils": {
-      "version": "4.7.0",
+      "version": "4.9.0",
+      "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.9.0.tgz",
+      "integrity": "sha512-ayVFHdtZ+hsq1t2Dy24wCmGXGe4q9Gu3smhLYALJrr473ZH27MsnSL+LKUlimp4BWJqMDMLmPpx/Q9R3OAlL4g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2869,7 +2888,9 @@
       }
     },
     "node_modules/@jridgewell/gen-mapping": {
-      "version": "0.3.12",
+      "version": "0.3.13",
+      "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz",
+      "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2877,6 +2898,17 @@
         "@jridgewell/trace-mapping": "^0.3.24"
       }
     },
+    "node_modules/@jridgewell/remapping": {
+      "version": "2.3.5",
+      "resolved": "https://registry.npmjs.org/@jridgewell/remapping/-/remapping-2.3.5.tgz",
+      "integrity": "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@jridgewell/gen-mapping": "^0.3.5",
+        "@jridgewell/trace-mapping": "^0.3.24"
+      }
+    },
     "node_modules/@jridgewell/resolve-uri": {
       "version": "3.1.2",
       "dev": true,
@@ -2886,12 +2918,16 @@
       }
     },
     "node_modules/@jridgewell/sourcemap-codec": {
-      "version": "1.5.4",
+      "version": "1.5.5",
+      "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz",
+      "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/@jridgewell/trace-mapping": {
-      "version": "0.3.29",
+      "version": "0.3.31",
+      "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz",
+      "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -4817,12 +4853,14 @@
       "license": "MIT"
     },
     "node_modules/@types/node": {
-      "version": "24.1.0",
+      "version": "24.5.2",
+      "resolved": "https://registry.npmjs.org/@types/node/-/node-24.5.2.tgz",
+      "integrity": "sha512-FYxk1I7wPv3K2XBaoyH2cTnocQEu8AOZ60hPbsyukMPLv5/5qr7V1i8PLHdl6Zf87I+xZXFvPCXYjiTFq+YSDQ==",
       "dev": true,
       "license": "MIT",
       "peer": true,
       "dependencies": {
-        "undici-types": "~7.8.0"
+        "undici-types": "~7.12.0"
       }
     },
     "node_modules/@types/normalize-package-data": {
@@ -4864,7 +4902,9 @@
       "license": "ISC"
     },
     "node_modules/@xmldom/xmldom": {
-      "version": "0.8.10",
+      "version": "0.8.11",
+      "resolved": "https://registry.npmjs.org/@xmldom/xmldom/-/xmldom-0.8.11.tgz",
+      "integrity": "sha512-cQzWCtO6C8TQiYl1ruKNn2U6Ao4o4WBBcbL61yJl84x+j5sOWWFU9X7DpND8XZG3daDppSsigMdfAIl2upQBRw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -5284,9 +5324,19 @@
       }
     },
     "node_modules/b4a": {
-      "version": "1.6.7",
+      "version": "1.7.1",
+      "resolved": "https://registry.npmjs.org/b4a/-/b4a-1.7.1.tgz",
+      "integrity": "sha512-ZovbrBV0g6JxK5cGUF1Suby1vLfKjv4RWi8IxoaO/Mon8BDD9I21RxjHFtgQ+kskJqLAVyQZly3uMBui+vhc8Q==",
       "dev": true,
-      "license": "Apache-2.0"
+      "license": "Apache-2.0",
+      "peerDependencies": {
+        "react-native-b4a": "*"
+      },
+      "peerDependenciesMeta": {
+        "react-native-b4a": {
+          "optional": true
+        }
+      }
     },
     "node_modules/bail": {
       "version": "2.0.2",
@@ -5303,11 +5353,23 @@
       "license": "MIT"
     },
     "node_modules/bare-events": {
-      "version": "2.6.0",
+      "version": "2.7.0",
+      "resolved": "https://registry.npmjs.org/bare-events/-/bare-events-2.7.0.tgz",
+      "integrity": "sha512-b3N5eTW1g7vXkw+0CXh/HazGTcO5KYuu/RCNaJbDMPI6LHDi+7qe8EmxKUVe1sUbY2KZOVZFyj62x0OEz9qyAA==",
       "dev": true,
       "license": "Apache-2.0",
       "optional": true
     },
+    "node_modules/baseline-browser-mapping": {
+      "version": "2.8.6",
+      "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.8.6.tgz",
+      "integrity": "sha512-wrH5NNqren/QMtKUEEJf7z86YjfqW/2uw3IL3/xpqZUC95SSVIFXYQeeGjL6FT/X68IROu6RMehZQS5foy2BXw==",
+      "dev": true,
+      "license": "Apache-2.0",
+      "bin": {
+        "baseline-browser-mapping": "dist/cli.js"
+      }
+    },
     "node_modules/basic-auth-parser": {
       "version": "0.0.2-1",
       "dev": true
@@ -5383,7 +5445,9 @@
       }
     },
     "node_modules/browserslist": {
-      "version": "4.25.1",
+      "version": "4.26.2",
+      "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.26.2.tgz",
+      "integrity": "sha512-ECFzp6uFOSB+dcZ5BK/IBaGWssbSYBHvuMeMt3MMFyhI0Z8SqGgEkBLARgpRH3hutIgPVsALcMwbDrJqPxQ65A==",
       "dev": true,
       "funding": [
         {
@@ -5402,9 +5466,10 @@
       "license": "MIT",
       "peer": true,
       "dependencies": {
-        "caniuse-lite": "^1.0.30001726",
-        "electron-to-chromium": "^1.5.173",
-        "node-releases": "^2.0.19",
+        "baseline-browser-mapping": "^2.8.3",
+        "caniuse-lite": "^1.0.30001741",
+        "electron-to-chromium": "^1.5.218",
+        "node-releases": "^2.0.21",
         "update-browserslist-db": "^1.1.3"
       },
       "bin": {
@@ -5552,7 +5617,9 @@
       }
     },
     "node_modules/caniuse-lite": {
-      "version": "1.0.30001727",
+      "version": "1.0.30001743",
+      "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001743.tgz",
+      "integrity": "sha512-e6Ojr7RV14Un7dz6ASD0aZDmQPT/A+eZU+nuTNfjqmRrmkmQlnTNWH0SKmqagx9PeW87UVqapSurtAXifmtdmw==",
       "dev": true,
       "funding": [
         {
@@ -6428,7 +6495,9 @@
       }
     },
     "node_modules/dedent": {
-      "version": "1.6.0",
+      "version": "1.7.0",
+      "resolved": "https://registry.npmjs.org/dedent/-/dedent-1.7.0.tgz",
+      "integrity": "sha512-HGFtf8yhuhGhqO07SV79tRp+br4MnbdjeVxotpn1QBl30pcLLCQjX5b2295ll0fv8RKDKsmWYrl05usHM9CewQ==",
       "dev": true,
       "license": "MIT",
       "peerDependencies": {
@@ -6638,7 +6707,9 @@
       "license": "MIT"
     },
     "node_modules/electron-to-chromium": {
-      "version": "1.5.189",
+      "version": "1.5.222",
+      "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.222.tgz",
+      "integrity": "sha512-gA7psSwSwQRE60CEoLz6JBCQPIxNeuzB2nL8vE03GK/OHxlvykbLyeiumQy1iH5C2f3YbRAZpGCMT12a/9ih9w==",
       "dev": true,
       "license": "ISC"
     },
@@ -6681,7 +6752,9 @@
       "license": "MIT"
     },
     "node_modules/error-ex": {
-      "version": "1.3.2",
+      "version": "1.3.4",
+      "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.4.tgz",
+      "integrity": "sha512-sqQamAnR14VgCr1A618A3sGrygcpK+HEbenA/HiEAkkUwcZIIB/tgWqHFxWgOyDh4nB4JCRimh79dR5Ywc9MDQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7405,7 +7478,9 @@
       "license": "MIT"
     },
     "node_modules/fast-uri": {
-      "version": "3.0.6",
+      "version": "3.1.0",
+      "resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.1.0.tgz",
+      "integrity": "sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA==",
       "dev": true,
       "funding": [
         {
@@ -8416,7 +8491,9 @@
       }
     },
     "node_modules/import-meta-resolve": {
-      "version": "4.1.0",
+      "version": "4.2.0",
+      "resolved": "https://registry.npmjs.org/import-meta-resolve/-/import-meta-resolve-4.2.0.tgz",
+      "integrity": "sha512-Iqv2fzaTQN28s/FwZAoFq0ZSs/7hMAHJVX+w8PZl3cY19Pxk6jFFalxQoIfW2826i/fDLXv8IiEZRIT0lDuWcg==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -9182,7 +9259,9 @@
       }
     },
     "node_modules/istanbul-reports": {
-      "version": "3.1.7",
+      "version": "3.2.0",
+      "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.2.0.tgz",
+      "integrity": "sha512-HGYWWS/ehqTV3xN10i23tkPkpH46MLCIMFNCaaKNavAXTF1RkqxawEPtnjnGZ6XKSInBKkiOA5BKS+aZiY3AvA==",
       "dev": true,
       "license": "BSD-3-Clause",
       "dependencies": {
@@ -9210,7 +9289,9 @@
       }
     },
     "node_modules/jiti": {
-      "version": "2.4.2",
+      "version": "2.5.1",
+      "resolved": "https://registry.npmjs.org/jiti/-/jiti-2.5.1.tgz",
+      "integrity": "sha512-twQoecYPiVA5K/h6SxtORw/Bs3ar+mLUtoPSc7iMXzQzK8d7eJ/R09wmTwAjiamETn1cXYPGfNnu7DMoHgu12w==",
       "dev": true,
       "license": "MIT",
       "bin": {
@@ -10975,7 +11056,9 @@
       }
     },
     "node_modules/node-releases": {
-      "version": "2.0.19",
+      "version": "2.0.21",
+      "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.21.tgz",
+      "integrity": "sha512-5b0pgg78U3hwXkCM8Z9b2FJdPZlr9Psr9V2gQPESdGHqbntyFJKFW4r5TeWGFzafGY3hzs1JC62VEQMbl1JFkw==",
       "dev": true,
       "license": "MIT"
     },
@@ -11143,7 +11226,9 @@
       }
     },
     "node_modules/nwsapi": {
-      "version": "2.2.20",
+      "version": "2.2.22",
+      "resolved": "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.22.tgz",
+      "integrity": "sha512-ujSMe1OWVn55euT1ihwCI1ZcAaAU3nxUiDwfDQldc51ZXaB9m2AyOn6/jh1BLe2t/G8xd6uKG1UBF2aZJeg2SQ==",
       "dev": true,
       "license": "MIT"
     },
@@ -16071,7 +16156,9 @@
       }
     },
     "node_modules/typescript": {
-      "version": "5.8.3",
+      "version": "5.9.2",
+      "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.2.tgz",
+      "integrity": "sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A==",
       "dev": true,
       "license": "Apache-2.0",
       "peer": true,
@@ -16121,7 +16208,9 @@
       }
     },
     "node_modules/undici-types": {
-      "version": "7.8.0",
+      "version": "7.12.0",
+      "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.12.0.tgz",
+      "integrity": "sha512-goOacqME2GYyOZZfb5Lgtu+1IDmAlAEu5xnD3+xTzS10hT0vzpf0SPjkXwAw9Jm+4n/mQGDP3LO8CPbYROeBfQ==",
       "dev": true,
       "license": "MIT"
     },
@@ -16786,7 +16875,9 @@
       "license": "ISC"
     },
     "node_modules/yaml": {
-      "version": "2.8.0",
+      "version": "2.8.1",
+      "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.1.tgz",
+      "integrity": "sha512-lcYcMxX2PO9XMGvAJkJ3OsNMw+/7FKes7/hgerGUYWIoWu5j/+YQqcZr5JnPZWzOsEBgMbSbiSTn/dv/69Mkpw==",
       "dev": true,
       "license": "ISC",
       "bin": {

From 6e4d673138ee4026081e72bea1f6cdfc14516a98 Mon Sep 17 00:00:00 2001
From: Gar 
Date: Thu, 18 Sep 2025 11:36:39 -0700
Subject: [PATCH 42/63] deps: isexe@3.1.1

---
 node_modules/chownr/LICENSE                   |  2 +-
 .../node_modules => }/isexe/dist/cjs/index.js |  0
 .../isexe/dist/cjs/options.js                 |  0
 .../isexe/dist/cjs/package.json               |  0
 .../node_modules => }/isexe/dist/cjs/posix.js |  0
 .../node_modules => }/isexe/dist/cjs/win32.js |  0
 .../node_modules => }/isexe/dist/mjs/index.js |  0
 .../isexe/dist/mjs/options.js                 |  0
 .../isexe/dist/mjs/package.json               |  0
 .../node_modules => }/isexe/dist/mjs/posix.js |  0
 .../node_modules => }/isexe/dist/mjs/win32.js |  0
 .../node_modules => }/isexe/package.json      |  0
 node_modules/which/node_modules/isexe/LICENSE | 15 ------
 package-lock.json                             | 46 +++++++++++--------
 14 files changed, 29 insertions(+), 34 deletions(-)
 rename node_modules/{which/node_modules => }/isexe/dist/cjs/index.js (100%)
 rename node_modules/{which/node_modules => }/isexe/dist/cjs/options.js (100%)
 rename node_modules/{which/node_modules => }/isexe/dist/cjs/package.json (100%)
 rename node_modules/{which/node_modules => }/isexe/dist/cjs/posix.js (100%)
 rename node_modules/{which/node_modules => }/isexe/dist/cjs/win32.js (100%)
 rename node_modules/{which/node_modules => }/isexe/dist/mjs/index.js (100%)
 rename node_modules/{which/node_modules => }/isexe/dist/mjs/options.js (100%)
 rename node_modules/{which/node_modules => }/isexe/dist/mjs/package.json (100%)
 rename node_modules/{which/node_modules => }/isexe/dist/mjs/posix.js (100%)
 rename node_modules/{which/node_modules => }/isexe/dist/mjs/win32.js (100%)
 rename node_modules/{which/node_modules => }/isexe/package.json (100%)
 delete mode 100644 node_modules/which/node_modules/isexe/LICENSE

diff --git a/node_modules/chownr/LICENSE b/node_modules/chownr/LICENSE
index 19129e315fe59..c925dbe826b67 100644
--- a/node_modules/chownr/LICENSE
+++ b/node_modules/chownr/LICENSE
@@ -1,6 +1,6 @@
 The ISC License
 
-Copyright (c) Isaac Z. Schlueter and Contributors
+Copyright (c) 2016-2022 Isaac Z. Schlueter and Contributors
 
 Permission to use, copy, modify, and/or distribute this software for any
 purpose with or without fee is hereby granted, provided that the above
diff --git a/node_modules/which/node_modules/isexe/dist/cjs/index.js b/node_modules/isexe/dist/cjs/index.js
similarity index 100%
rename from node_modules/which/node_modules/isexe/dist/cjs/index.js
rename to node_modules/isexe/dist/cjs/index.js
diff --git a/node_modules/which/node_modules/isexe/dist/cjs/options.js b/node_modules/isexe/dist/cjs/options.js
similarity index 100%
rename from node_modules/which/node_modules/isexe/dist/cjs/options.js
rename to node_modules/isexe/dist/cjs/options.js
diff --git a/node_modules/which/node_modules/isexe/dist/cjs/package.json b/node_modules/isexe/dist/cjs/package.json
similarity index 100%
rename from node_modules/which/node_modules/isexe/dist/cjs/package.json
rename to node_modules/isexe/dist/cjs/package.json
diff --git a/node_modules/which/node_modules/isexe/dist/cjs/posix.js b/node_modules/isexe/dist/cjs/posix.js
similarity index 100%
rename from node_modules/which/node_modules/isexe/dist/cjs/posix.js
rename to node_modules/isexe/dist/cjs/posix.js
diff --git a/node_modules/which/node_modules/isexe/dist/cjs/win32.js b/node_modules/isexe/dist/cjs/win32.js
similarity index 100%
rename from node_modules/which/node_modules/isexe/dist/cjs/win32.js
rename to node_modules/isexe/dist/cjs/win32.js
diff --git a/node_modules/which/node_modules/isexe/dist/mjs/index.js b/node_modules/isexe/dist/mjs/index.js
similarity index 100%
rename from node_modules/which/node_modules/isexe/dist/mjs/index.js
rename to node_modules/isexe/dist/mjs/index.js
diff --git a/node_modules/which/node_modules/isexe/dist/mjs/options.js b/node_modules/isexe/dist/mjs/options.js
similarity index 100%
rename from node_modules/which/node_modules/isexe/dist/mjs/options.js
rename to node_modules/isexe/dist/mjs/options.js
diff --git a/node_modules/which/node_modules/isexe/dist/mjs/package.json b/node_modules/isexe/dist/mjs/package.json
similarity index 100%
rename from node_modules/which/node_modules/isexe/dist/mjs/package.json
rename to node_modules/isexe/dist/mjs/package.json
diff --git a/node_modules/which/node_modules/isexe/dist/mjs/posix.js b/node_modules/isexe/dist/mjs/posix.js
similarity index 100%
rename from node_modules/which/node_modules/isexe/dist/mjs/posix.js
rename to node_modules/isexe/dist/mjs/posix.js
diff --git a/node_modules/which/node_modules/isexe/dist/mjs/win32.js b/node_modules/isexe/dist/mjs/win32.js
similarity index 100%
rename from node_modules/which/node_modules/isexe/dist/mjs/win32.js
rename to node_modules/isexe/dist/mjs/win32.js
diff --git a/node_modules/which/node_modules/isexe/package.json b/node_modules/isexe/package.json
similarity index 100%
rename from node_modules/which/node_modules/isexe/package.json
rename to node_modules/isexe/package.json
diff --git a/node_modules/which/node_modules/isexe/LICENSE b/node_modules/which/node_modules/isexe/LICENSE
deleted file mode 100644
index c925dbe826b67..0000000000000
--- a/node_modules/which/node_modules/isexe/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) 2016-2022 Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/package-lock.json b/package-lock.json
index 1fc738ea31915..5f1552405084e 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -3875,14 +3875,6 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
-    "node_modules/@npmcli/template-oss/node_modules/isexe": {
-      "version": "3.1.1",
-      "dev": true,
-      "license": "ISC",
-      "engines": {
-        "node": ">=16"
-      }
-    },
     "node_modules/@npmcli/template-oss/node_modules/jackspeak": {
       "version": "3.4.3",
       "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz",
@@ -6251,6 +6243,13 @@
         "node": ">= 8"
       }
     },
+    "node_modules/cross-spawn/node_modules/isexe": {
+      "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
+      "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==",
+      "inBundle": true,
+      "license": "ISC"
+    },
     "node_modules/cross-spawn/node_modules/which": {
       "version": "2.0.2",
       "inBundle": true,
@@ -9074,9 +9073,14 @@
       "license": "MIT"
     },
     "node_modules/isexe": {
-      "version": "2.0.0",
+      "version": "3.1.1",
+      "resolved": "https://registry.npmjs.org/isexe/-/isexe-3.1.1.tgz",
+      "integrity": "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==",
       "inBundle": true,
-      "license": "ISC"
+      "license": "ISC",
+      "engines": {
+        "node": ">=16"
+      }
     },
     "node_modules/istanbul-lib-coverage": {
       "version": "3.2.2",
@@ -13158,6 +13162,13 @@
         "url": "https://github.com/sponsors/isaacs"
       }
     },
+    "node_modules/spawn-wrap/node_modules/isexe": {
+      "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
+      "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==",
+      "dev": true,
+      "license": "ISC"
+    },
     "node_modules/spawn-wrap/node_modules/minimatch": {
       "version": "3.1.2",
       "dev": true,
@@ -14883,6 +14894,13 @@
         "node": ">=8"
       }
     },
+    "node_modules/tap/node_modules/isexe": {
+      "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
+      "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==",
+      "dev": true,
+      "license": "ISC"
+    },
     "node_modules/tap/node_modules/jackspeak": {
       "version": "1.4.2",
       "dev": true,
@@ -16674,14 +16692,6 @@
         "url": "https://github.com/sponsors/ljharb"
       }
     },
-    "node_modules/which/node_modules/isexe": {
-      "version": "3.1.1",
-      "inBundle": true,
-      "license": "ISC",
-      "engines": {
-        "node": ">=16"
-      }
-    },
     "node_modules/word-wrap": {
       "version": "1.2.5",
       "dev": true,

From 099238ac13ba535c99ff51bde348fcd9f6b86542 Mon Sep 17 00:00:00 2001
From: Gar 
Date: Thu, 18 Sep 2025 11:37:56 -0700
Subject: [PATCH 43/63] deps: fdir@6.5.0

---
 node_modules/.gitignore                       |    6 +-
 .../binary-extensions/binary-extensions.json  |  264 -
 node_modules/binary-extensions/index.js       |    3 -
 node_modules/binary-extensions/license        |   10 -
 node_modules/binary-extensions/package.json   |   45 -
 .../cross-spawn/node_modules/isexe/LICENSE    |   15 +
 .../cross-spawn/node_modules/isexe/index.js   |   57 +
 .../cross-spawn/node_modules/isexe/mode.js    |   41 +
 .../node_modules/isexe/package.json           |   31 +
 .../node_modules/isexe/test/basic.js          |  221 +
 .../cross-spawn/node_modules/isexe/windows.js |   42 +
 package-lock.json                             | 4676 +++++++++--------
 12 files changed, 3034 insertions(+), 2377 deletions(-)
 delete mode 100644 node_modules/binary-extensions/binary-extensions.json
 delete mode 100644 node_modules/binary-extensions/index.js
 delete mode 100644 node_modules/binary-extensions/license
 delete mode 100644 node_modules/binary-extensions/package.json
 create mode 100644 node_modules/cross-spawn/node_modules/isexe/LICENSE
 create mode 100644 node_modules/cross-spawn/node_modules/isexe/index.js
 create mode 100644 node_modules/cross-spawn/node_modules/isexe/mode.js
 create mode 100644 node_modules/cross-spawn/node_modules/isexe/package.json
 create mode 100644 node_modules/cross-spawn/node_modules/isexe/test/basic.js
 create mode 100644 node_modules/cross-spawn/node_modules/isexe/windows.js

diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 8883d013963f4..f146e9040bbae 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -57,7 +57,6 @@
 !/archy
 !/balanced-match
 !/bin-links
-!/binary-extensions
 !/brace-expansion
 !/cacache
 !/chalk
@@ -101,6 +100,7 @@
 !/ip-regex
 !/is-cidr
 !/is-fullwidth-code-point
+!/isexe
 !/jackspeak
 !/json-parse-even-better-errors
 !/json-stringify-nice
@@ -215,7 +215,6 @@
 /tar/node_modules/minizlib/node_modules/*
 !/tar/node_modules/minizlib/node_modules/minipass
 !/tar/node_modules/mkdirp
-!/tar/node_modules/yallist
 !/text-table
 !/tiny-relative-date
 !/tinyglobby
@@ -235,9 +234,6 @@
 !/validate-npm-package-name
 !/walk-up-path
 !/which
-!/which/node_modules/
-/which/node_modules/*
-!/which/node_modules/isexe
 !/wrap-ansi-cjs
 !/wrap-ansi-cjs/node_modules/
 /wrap-ansi-cjs/node_modules/*
diff --git a/node_modules/binary-extensions/binary-extensions.json b/node_modules/binary-extensions/binary-extensions.json
deleted file mode 100644
index 9a57d80cd08fb..0000000000000
--- a/node_modules/binary-extensions/binary-extensions.json
+++ /dev/null
@@ -1,264 +0,0 @@
-[
-	"3dm",
-	"3ds",
-	"3g2",
-	"3gp",
-	"7z",
-	"a",
-	"aac",
-	"adp",
-	"afdesign",
-	"afphoto",
-	"afpub",
-	"ai",
-	"aif",
-	"aiff",
-	"alz",
-	"ape",
-	"apk",
-	"appimage",
-	"ar",
-	"arj",
-	"asf",
-	"au",
-	"avi",
-	"bak",
-	"baml",
-	"bh",
-	"bin",
-	"bk",
-	"bmp",
-	"btif",
-	"bz2",
-	"bzip2",
-	"cab",
-	"caf",
-	"cgm",
-	"class",
-	"cmx",
-	"cpio",
-	"cr2",
-	"cr3",
-	"cur",
-	"dat",
-	"dcm",
-	"deb",
-	"dex",
-	"djvu",
-	"dll",
-	"dmg",
-	"dng",
-	"doc",
-	"docm",
-	"docx",
-	"dot",
-	"dotm",
-	"dra",
-	"DS_Store",
-	"dsk",
-	"dts",
-	"dtshd",
-	"dvb",
-	"dwg",
-	"dxf",
-	"ecelp4800",
-	"ecelp7470",
-	"ecelp9600",
-	"egg",
-	"eol",
-	"eot",
-	"epub",
-	"exe",
-	"f4v",
-	"fbs",
-	"fh",
-	"fla",
-	"flac",
-	"flatpak",
-	"fli",
-	"flv",
-	"fpx",
-	"fst",
-	"fvt",
-	"g3",
-	"gh",
-	"gif",
-	"graffle",
-	"gz",
-	"gzip",
-	"h261",
-	"h263",
-	"h264",
-	"icns",
-	"ico",
-	"ief",
-	"img",
-	"ipa",
-	"iso",
-	"jar",
-	"jpeg",
-	"jpg",
-	"jpgv",
-	"jpm",
-	"jxr",
-	"key",
-	"ktx",
-	"lha",
-	"lib",
-	"lvp",
-	"lz",
-	"lzh",
-	"lzma",
-	"lzo",
-	"m3u",
-	"m4a",
-	"m4v",
-	"mar",
-	"mdi",
-	"mht",
-	"mid",
-	"midi",
-	"mj2",
-	"mka",
-	"mkv",
-	"mmr",
-	"mng",
-	"mobi",
-	"mov",
-	"movie",
-	"mp3",
-	"mp4",
-	"mp4a",
-	"mpeg",
-	"mpg",
-	"mpga",
-	"mxu",
-	"nef",
-	"npx",
-	"numbers",
-	"nupkg",
-	"o",
-	"odp",
-	"ods",
-	"odt",
-	"oga",
-	"ogg",
-	"ogv",
-	"otf",
-	"ott",
-	"pages",
-	"pbm",
-	"pcx",
-	"pdb",
-	"pdf",
-	"pea",
-	"pgm",
-	"pic",
-	"png",
-	"pnm",
-	"pot",
-	"potm",
-	"potx",
-	"ppa",
-	"ppam",
-	"ppm",
-	"pps",
-	"ppsm",
-	"ppsx",
-	"ppt",
-	"pptm",
-	"pptx",
-	"psd",
-	"pya",
-	"pyc",
-	"pyo",
-	"pyv",
-	"qt",
-	"rar",
-	"ras",
-	"raw",
-	"resources",
-	"rgb",
-	"rip",
-	"rlc",
-	"rmf",
-	"rmvb",
-	"rpm",
-	"rtf",
-	"rz",
-	"s3m",
-	"s7z",
-	"scpt",
-	"sgi",
-	"shar",
-	"snap",
-	"sil",
-	"sketch",
-	"slk",
-	"smv",
-	"snk",
-	"so",
-	"stl",
-	"suo",
-	"sub",
-	"swf",
-	"tar",
-	"tbz",
-	"tbz2",
-	"tga",
-	"tgz",
-	"thmx",
-	"tif",
-	"tiff",
-	"tlz",
-	"ttc",
-	"ttf",
-	"txz",
-	"udf",
-	"uvh",
-	"uvi",
-	"uvm",
-	"uvp",
-	"uvs",
-	"uvu",
-	"viv",
-	"vob",
-	"war",
-	"wav",
-	"wax",
-	"wbmp",
-	"wdp",
-	"weba",
-	"webm",
-	"webp",
-	"whl",
-	"wim",
-	"wm",
-	"wma",
-	"wmv",
-	"wmx",
-	"woff",
-	"woff2",
-	"wrm",
-	"wvx",
-	"xbm",
-	"xif",
-	"xla",
-	"xlam",
-	"xls",
-	"xlsb",
-	"xlsm",
-	"xlsx",
-	"xlt",
-	"xltm",
-	"xltx",
-	"xm",
-	"xmind",
-	"xpi",
-	"xpm",
-	"xwd",
-	"xz",
-	"z",
-	"zip",
-	"zipx"
-]
diff --git a/node_modules/binary-extensions/index.js b/node_modules/binary-extensions/index.js
deleted file mode 100644
index 6c99c7eb54f17..0000000000000
--- a/node_modules/binary-extensions/index.js
+++ /dev/null
@@ -1,3 +0,0 @@
-import binaryExtensions from './binary-extensions.json' with {type: 'json'};
-
-export default binaryExtensions;
diff --git a/node_modules/binary-extensions/license b/node_modules/binary-extensions/license
deleted file mode 100644
index 5493a1a6e3f9a..0000000000000
--- a/node_modules/binary-extensions/license
+++ /dev/null
@@ -1,10 +0,0 @@
-MIT License
-
-Copyright (c) Sindre Sorhus  (https://sindresorhus.com)
-Copyright (c) Paul Miller (https://paulmillr.com)
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/binary-extensions/package.json b/node_modules/binary-extensions/package.json
deleted file mode 100644
index abe49c2e9a34a..0000000000000
--- a/node_modules/binary-extensions/package.json
+++ /dev/null
@@ -1,45 +0,0 @@
-{
-	"name": "binary-extensions",
-	"version": "3.1.0",
-	"description": "List of binary file extensions",
-	"license": "MIT",
-	"repository": "sindresorhus/binary-extensions",
-	"funding": "https://github.com/sponsors/sindresorhus",
-	"author": {
-		"name": "Sindre Sorhus",
-		"email": "sindresorhus@gmail.com",
-		"url": "https://sindresorhus.com"
-	},
-	"type": "module",
-	"exports": {
-		"types": "./index.d.ts",
-		"default": "./index.js"
-	},
-	"sideEffects": false,
-	"engines": {
-		"node": ">=18.20"
-	},
-	"scripts": {
-		"//test": "xo && ava && tsd",
-		"test": "ava && tsd"
-	},
-	"files": [
-		"index.js",
-		"index.d.ts",
-		"binary-extensions.json"
-	],
-	"keywords": [
-		"binary",
-		"extensions",
-		"extension",
-		"file",
-		"json",
-		"list",
-		"array"
-	],
-	"devDependencies": {
-		"ava": "^6.1.2",
-		"tsd": "^0.31.0",
-		"xo": "^0.58.0"
-	}
-}
diff --git a/node_modules/cross-spawn/node_modules/isexe/LICENSE b/node_modules/cross-spawn/node_modules/isexe/LICENSE
new file mode 100644
index 0000000000000..19129e315fe59
--- /dev/null
+++ b/node_modules/cross-spawn/node_modules/isexe/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/cross-spawn/node_modules/isexe/index.js b/node_modules/cross-spawn/node_modules/isexe/index.js
new file mode 100644
index 0000000000000..553fb32b119bd
--- /dev/null
+++ b/node_modules/cross-spawn/node_modules/isexe/index.js
@@ -0,0 +1,57 @@
+var fs = require('fs')
+var core
+if (process.platform === 'win32' || global.TESTING_WINDOWS) {
+  core = require('./windows.js')
+} else {
+  core = require('./mode.js')
+}
+
+module.exports = isexe
+isexe.sync = sync
+
+function isexe (path, options, cb) {
+  if (typeof options === 'function') {
+    cb = options
+    options = {}
+  }
+
+  if (!cb) {
+    if (typeof Promise !== 'function') {
+      throw new TypeError('callback not provided')
+    }
+
+    return new Promise(function (resolve, reject) {
+      isexe(path, options || {}, function (er, is) {
+        if (er) {
+          reject(er)
+        } else {
+          resolve(is)
+        }
+      })
+    })
+  }
+
+  core(path, options || {}, function (er, is) {
+    // ignore EACCES because that just means we aren't allowed to run it
+    if (er) {
+      if (er.code === 'EACCES' || options && options.ignoreErrors) {
+        er = null
+        is = false
+      }
+    }
+    cb(er, is)
+  })
+}
+
+function sync (path, options) {
+  // my kingdom for a filtered catch
+  try {
+    return core.sync(path, options || {})
+  } catch (er) {
+    if (options && options.ignoreErrors || er.code === 'EACCES') {
+      return false
+    } else {
+      throw er
+    }
+  }
+}
diff --git a/node_modules/cross-spawn/node_modules/isexe/mode.js b/node_modules/cross-spawn/node_modules/isexe/mode.js
new file mode 100644
index 0000000000000..1995ea4a06aec
--- /dev/null
+++ b/node_modules/cross-spawn/node_modules/isexe/mode.js
@@ -0,0 +1,41 @@
+module.exports = isexe
+isexe.sync = sync
+
+var fs = require('fs')
+
+function isexe (path, options, cb) {
+  fs.stat(path, function (er, stat) {
+    cb(er, er ? false : checkStat(stat, options))
+  })
+}
+
+function sync (path, options) {
+  return checkStat(fs.statSync(path), options)
+}
+
+function checkStat (stat, options) {
+  return stat.isFile() && checkMode(stat, options)
+}
+
+function checkMode (stat, options) {
+  var mod = stat.mode
+  var uid = stat.uid
+  var gid = stat.gid
+
+  var myUid = options.uid !== undefined ?
+    options.uid : process.getuid && process.getuid()
+  var myGid = options.gid !== undefined ?
+    options.gid : process.getgid && process.getgid()
+
+  var u = parseInt('100', 8)
+  var g = parseInt('010', 8)
+  var o = parseInt('001', 8)
+  var ug = u | g
+
+  var ret = (mod & o) ||
+    (mod & g) && gid === myGid ||
+    (mod & u) && uid === myUid ||
+    (mod & ug) && myUid === 0
+
+  return ret
+}
diff --git a/node_modules/cross-spawn/node_modules/isexe/package.json b/node_modules/cross-spawn/node_modules/isexe/package.json
new file mode 100644
index 0000000000000..e452689442f20
--- /dev/null
+++ b/node_modules/cross-spawn/node_modules/isexe/package.json
@@ -0,0 +1,31 @@
+{
+  "name": "isexe",
+  "version": "2.0.0",
+  "description": "Minimal module to check if a file is executable.",
+  "main": "index.js",
+  "directories": {
+    "test": "test"
+  },
+  "devDependencies": {
+    "mkdirp": "^0.5.1",
+    "rimraf": "^2.5.0",
+    "tap": "^10.3.0"
+  },
+  "scripts": {
+    "test": "tap test/*.js --100",
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "postpublish": "git push origin --all; git push origin --tags"
+  },
+  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
+  "license": "ISC",
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/isaacs/isexe.git"
+  },
+  "keywords": [],
+  "bugs": {
+    "url": "https://github.com/isaacs/isexe/issues"
+  },
+  "homepage": "https://github.com/isaacs/isexe#readme"
+}
diff --git a/node_modules/cross-spawn/node_modules/isexe/test/basic.js b/node_modules/cross-spawn/node_modules/isexe/test/basic.js
new file mode 100644
index 0000000000000..d926df64b9024
--- /dev/null
+++ b/node_modules/cross-spawn/node_modules/isexe/test/basic.js
@@ -0,0 +1,221 @@
+var t = require('tap')
+var fs = require('fs')
+var path = require('path')
+var fixture = path.resolve(__dirname, 'fixtures')
+var meow = fixture + '/meow.cat'
+var mine = fixture + '/mine.cat'
+var ours = fixture + '/ours.cat'
+var fail = fixture + '/fail.false'
+var noent = fixture + '/enoent.exe'
+var mkdirp = require('mkdirp')
+var rimraf = require('rimraf')
+
+var isWindows = process.platform === 'win32'
+var hasAccess = typeof fs.access === 'function'
+var winSkip = isWindows && 'windows'
+var accessSkip = !hasAccess && 'no fs.access function'
+var hasPromise = typeof Promise === 'function'
+var promiseSkip = !hasPromise && 'no global Promise'
+
+function reset () {
+  delete require.cache[require.resolve('../')]
+  return require('../')
+}
+
+t.test('setup fixtures', function (t) {
+  rimraf.sync(fixture)
+  mkdirp.sync(fixture)
+  fs.writeFileSync(meow, '#!/usr/bin/env cat\nmeow\n')
+  fs.chmodSync(meow, parseInt('0755', 8))
+  fs.writeFileSync(fail, '#!/usr/bin/env false\n')
+  fs.chmodSync(fail, parseInt('0644', 8))
+  fs.writeFileSync(mine, '#!/usr/bin/env cat\nmine\n')
+  fs.chmodSync(mine, parseInt('0744', 8))
+  fs.writeFileSync(ours, '#!/usr/bin/env cat\nours\n')
+  fs.chmodSync(ours, parseInt('0754', 8))
+  t.end()
+})
+
+t.test('promise', { skip: promiseSkip }, function (t) {
+  var isexe = reset()
+  t.test('meow async', function (t) {
+    isexe(meow).then(function (is) {
+      t.ok(is)
+      t.end()
+    })
+  })
+  t.test('fail async', function (t) {
+    isexe(fail).then(function (is) {
+      t.notOk(is)
+      t.end()
+    })
+  })
+  t.test('noent async', function (t) {
+    isexe(noent).catch(function (er) {
+      t.ok(er)
+      t.end()
+    })
+  })
+  t.test('noent ignore async', function (t) {
+    isexe(noent, { ignoreErrors: true }).then(function (is) {
+      t.notOk(is)
+      t.end()
+    })
+  })
+  t.end()
+})
+
+t.test('no promise', function (t) {
+  global.Promise = null
+  var isexe = reset()
+  t.throws('try to meow a promise', function () {
+    isexe(meow)
+  })
+  t.end()
+})
+
+t.test('access', { skip: accessSkip || winSkip }, function (t) {
+  runTest(t)
+})
+
+t.test('mode', { skip: winSkip }, function (t) {
+  delete fs.access
+  delete fs.accessSync
+  var isexe = reset()
+  t.ok(isexe.sync(ours, { uid: 0, gid: 0 }))
+  t.ok(isexe.sync(mine, { uid: 0, gid: 0 }))
+  runTest(t)
+})
+
+t.test('windows', function (t) {
+  global.TESTING_WINDOWS = true
+  var pathExt = '.EXE;.CAT;.CMD;.COM'
+  t.test('pathExt option', function (t) {
+    runTest(t, { pathExt: '.EXE;.CAT;.CMD;.COM' })
+  })
+  t.test('pathExt env', function (t) {
+    process.env.PATHEXT = pathExt
+    runTest(t)
+  })
+  t.test('no pathExt', function (t) {
+    // with a pathExt of '', any filename is fine.
+    // so the "fail" one would still pass.
+    runTest(t, { pathExt: '', skipFail: true })
+  })
+  t.test('pathext with empty entry', function (t) {
+    // with a pathExt of '', any filename is fine.
+    // so the "fail" one would still pass.
+    runTest(t, { pathExt: ';' + pathExt, skipFail: true })
+  })
+  t.end()
+})
+
+t.test('cleanup', function (t) {
+  rimraf.sync(fixture)
+  t.end()
+})
+
+function runTest (t, options) {
+  var isexe = reset()
+
+  var optionsIgnore = Object.create(options || {})
+  optionsIgnore.ignoreErrors = true
+
+  if (!options || !options.skipFail) {
+    t.notOk(isexe.sync(fail, options))
+  }
+  t.notOk(isexe.sync(noent, optionsIgnore))
+  if (!options) {
+    t.ok(isexe.sync(meow))
+  } else {
+    t.ok(isexe.sync(meow, options))
+  }
+
+  t.ok(isexe.sync(mine, options))
+  t.ok(isexe.sync(ours, options))
+  t.throws(function () {
+    isexe.sync(noent, options)
+  })
+
+  t.test('meow async', function (t) {
+    if (!options) {
+      isexe(meow, function (er, is) {
+        if (er) {
+          throw er
+        }
+        t.ok(is)
+        t.end()
+      })
+    } else {
+      isexe(meow, options, function (er, is) {
+        if (er) {
+          throw er
+        }
+        t.ok(is)
+        t.end()
+      })
+    }
+  })
+
+  t.test('mine async', function (t) {
+    isexe(mine, options, function (er, is) {
+      if (er) {
+        throw er
+      }
+      t.ok(is)
+      t.end()
+    })
+  })
+
+  t.test('ours async', function (t) {
+    isexe(ours, options, function (er, is) {
+      if (er) {
+        throw er
+      }
+      t.ok(is)
+      t.end()
+    })
+  })
+
+  if (!options || !options.skipFail) {
+    t.test('fail async', function (t) {
+      isexe(fail, options, function (er, is) {
+        if (er) {
+          throw er
+        }
+        t.notOk(is)
+        t.end()
+      })
+    })
+  }
+
+  t.test('noent async', function (t) {
+    isexe(noent, options, function (er, is) {
+      t.ok(er)
+      t.notOk(is)
+      t.end()
+    })
+  })
+
+  t.test('noent ignore async', function (t) {
+    isexe(noent, optionsIgnore, function (er, is) {
+      if (er) {
+        throw er
+      }
+      t.notOk(is)
+      t.end()
+    })
+  })
+
+  t.test('directory is not executable', function (t) {
+    isexe(__dirname, options, function (er, is) {
+      if (er) {
+        throw er
+      }
+      t.notOk(is)
+      t.end()
+    })
+  })
+
+  t.end()
+}
diff --git a/node_modules/cross-spawn/node_modules/isexe/windows.js b/node_modules/cross-spawn/node_modules/isexe/windows.js
new file mode 100644
index 0000000000000..34996734d8ef3
--- /dev/null
+++ b/node_modules/cross-spawn/node_modules/isexe/windows.js
@@ -0,0 +1,42 @@
+module.exports = isexe
+isexe.sync = sync
+
+var fs = require('fs')
+
+function checkPathExt (path, options) {
+  var pathext = options.pathExt !== undefined ?
+    options.pathExt : process.env.PATHEXT
+
+  if (!pathext) {
+    return true
+  }
+
+  pathext = pathext.split(';')
+  if (pathext.indexOf('') !== -1) {
+    return true
+  }
+  for (var i = 0; i < pathext.length; i++) {
+    var p = pathext[i].toLowerCase()
+    if (p && path.substr(-p.length).toLowerCase() === p) {
+      return true
+    }
+  }
+  return false
+}
+
+function checkStat (stat, path, options) {
+  if (!stat.isSymbolicLink() && !stat.isFile()) {
+    return false
+  }
+  return checkPathExt(path, options)
+}
+
+function isexe (path, options, cb) {
+  fs.stat(path, function (er, stat) {
+    cb(er, er ? false : checkStat(stat, path, options))
+  })
+}
+
+function sync (path, options) {
+  return checkStat(fs.statSync(path), path, options)
+}
diff --git a/package-lock.json b/package-lock.json
index 5f1552405084e..84e98ec4ffc07 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -206,1577 +206,6 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "docs/node_modules/@types/hast": {
-      "version": "2.3.10",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^2"
-      }
-    },
-    "docs/node_modules/@types/hast/node_modules/@types/unist": {
-      "version": "2.0.11",
-      "dev": true,
-      "license": "MIT"
-    },
-    "docs/node_modules/@types/mdast": {
-      "version": "4.0.4",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "*"
-      }
-    },
-    "docs/node_modules/@types/unist": {
-      "version": "3.0.3",
-      "dev": true,
-      "license": "MIT"
-    },
-    "docs/node_modules/escape-string-regexp": {
-      "version": "5.0.0",
-      "dev": true,
-      "license": "MIT",
-      "engines": {
-        "node": ">=12"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/sindresorhus"
-      }
-    },
-    "docs/node_modules/github-slugger": {
-      "version": "1.5.0",
-      "dev": true,
-      "license": "ISC"
-    },
-    "docs/node_modules/hast-util-to-html": {
-      "version": "8.0.4",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/hast": "^2.0.0",
-        "@types/unist": "^2.0.0",
-        "ccount": "^2.0.0",
-        "comma-separated-tokens": "^2.0.0",
-        "hast-util-raw": "^7.0.0",
-        "hast-util-whitespace": "^2.0.0",
-        "html-void-elements": "^2.0.0",
-        "property-information": "^6.0.0",
-        "space-separated-tokens": "^2.0.0",
-        "stringify-entities": "^4.0.0",
-        "zwitch": "^2.0.4"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/hast-util-to-html/node_modules/@types/unist": {
-      "version": "2.0.11",
-      "dev": true,
-      "license": "MIT"
-    },
-    "docs/node_modules/hast-util-whitespace": {
-      "version": "2.0.1",
-      "dev": true,
-      "license": "MIT",
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/html-void-elements": {
-      "version": "2.0.1",
-      "dev": true,
-      "license": "MIT",
-      "funding": {
-        "type": "github",
-        "url": "https://github.com/sponsors/wooorm"
-      }
-    },
-    "docs/node_modules/jsdom": {
-      "version": "24.1.3",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "cssstyle": "^4.0.1",
-        "data-urls": "^5.0.0",
-        "decimal.js": "^10.4.3",
-        "form-data": "^4.0.0",
-        "html-encoding-sniffer": "^4.0.0",
-        "http-proxy-agent": "^7.0.2",
-        "https-proxy-agent": "^7.0.5",
-        "is-potential-custom-element-name": "^1.0.1",
-        "nwsapi": "^2.2.12",
-        "parse5": "^7.1.2",
-        "rrweb-cssom": "^0.7.1",
-        "saxes": "^6.0.0",
-        "symbol-tree": "^3.2.4",
-        "tough-cookie": "^4.1.4",
-        "w3c-xmlserializer": "^5.0.0",
-        "webidl-conversions": "^7.0.0",
-        "whatwg-encoding": "^3.1.1",
-        "whatwg-mimetype": "^4.0.0",
-        "whatwg-url": "^14.0.0",
-        "ws": "^8.18.0",
-        "xml-name-validator": "^5.0.0"
-      },
-      "engines": {
-        "node": ">=18"
-      },
-      "peerDependencies": {
-        "canvas": "^2.11.2"
-      },
-      "peerDependenciesMeta": {
-        "canvas": {
-          "optional": true
-        }
-      }
-    },
-    "docs/node_modules/mdast-util-definitions": {
-      "version": "5.1.2",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/mdast": "^3.0.0",
-        "@types/unist": "^2.0.0",
-        "unist-util-visit": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/mdast-util-definitions/node_modules/@types/mdast": {
-      "version": "3.0.15",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^2"
-      }
-    },
-    "docs/node_modules/mdast-util-definitions/node_modules/@types/unist": {
-      "version": "2.0.11",
-      "dev": true,
-      "license": "MIT"
-    },
-    "docs/node_modules/mdast-util-definitions/node_modules/unist-util-is": {
-      "version": "5.2.1",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^2.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/mdast-util-definitions/node_modules/unist-util-visit": {
-      "version": "4.1.2",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^2.0.0",
-        "unist-util-is": "^5.0.0",
-        "unist-util-visit-parents": "^5.1.1"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/mdast-util-definitions/node_modules/unist-util-visit-parents": {
-      "version": "5.1.3",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^2.0.0",
-        "unist-util-is": "^5.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/mdast-util-find-and-replace": {
-      "version": "3.0.2",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/mdast": "^4.0.0",
-        "escape-string-regexp": "^5.0.0",
-        "unist-util-is": "^6.0.0",
-        "unist-util-visit-parents": "^6.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/mdast-util-from-markdown": {
-      "version": "2.0.2",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/mdast": "^4.0.0",
-        "@types/unist": "^3.0.0",
-        "decode-named-character-reference": "^1.0.0",
-        "devlop": "^1.0.0",
-        "mdast-util-to-string": "^4.0.0",
-        "micromark": "^4.0.0",
-        "micromark-util-decode-numeric-character-reference": "^2.0.0",
-        "micromark-util-decode-string": "^2.0.0",
-        "micromark-util-normalize-identifier": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0",
-        "unist-util-stringify-position": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/mdast-util-gfm": {
-      "version": "3.1.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "mdast-util-from-markdown": "^2.0.0",
-        "mdast-util-gfm-autolink-literal": "^2.0.0",
-        "mdast-util-gfm-footnote": "^2.0.0",
-        "mdast-util-gfm-strikethrough": "^2.0.0",
-        "mdast-util-gfm-table": "^2.0.0",
-        "mdast-util-gfm-task-list-item": "^2.0.0",
-        "mdast-util-to-markdown": "^2.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/mdast-util-gfm-autolink-literal": {
-      "version": "2.0.1",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/mdast": "^4.0.0",
-        "ccount": "^2.0.0",
-        "devlop": "^1.0.0",
-        "mdast-util-find-and-replace": "^3.0.0",
-        "micromark-util-character": "^2.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/mdast-util-gfm-footnote": {
-      "version": "2.1.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/mdast": "^4.0.0",
-        "devlop": "^1.1.0",
-        "mdast-util-from-markdown": "^2.0.0",
-        "mdast-util-to-markdown": "^2.0.0",
-        "micromark-util-normalize-identifier": "^2.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/mdast-util-gfm-strikethrough": {
-      "version": "2.0.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/mdast": "^4.0.0",
-        "mdast-util-from-markdown": "^2.0.0",
-        "mdast-util-to-markdown": "^2.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/mdast-util-gfm-table": {
-      "version": "2.0.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/mdast": "^4.0.0",
-        "devlop": "^1.0.0",
-        "markdown-table": "^3.0.0",
-        "mdast-util-from-markdown": "^2.0.0",
-        "mdast-util-to-markdown": "^2.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/mdast-util-gfm-task-list-item": {
-      "version": "2.0.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/mdast": "^4.0.0",
-        "devlop": "^1.0.0",
-        "mdast-util-from-markdown": "^2.0.0",
-        "mdast-util-to-markdown": "^2.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/mdast-util-phrasing": {
-      "version": "4.1.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/mdast": "^4.0.0",
-        "unist-util-is": "^6.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/mdast-util-to-hast": {
-      "version": "12.3.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/hast": "^2.0.0",
-        "@types/mdast": "^3.0.0",
-        "mdast-util-definitions": "^5.0.0",
-        "micromark-util-sanitize-uri": "^1.1.0",
-        "trim-lines": "^3.0.0",
-        "unist-util-generated": "^2.0.0",
-        "unist-util-position": "^4.0.0",
-        "unist-util-visit": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/mdast-util-to-hast/node_modules/@types/mdast": {
-      "version": "3.0.15",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^2"
-      }
-    },
-    "docs/node_modules/mdast-util-to-hast/node_modules/@types/unist": {
-      "version": "2.0.11",
-      "dev": true,
-      "license": "MIT"
-    },
-    "docs/node_modules/mdast-util-to-hast/node_modules/micromark-util-character": {
-      "version": "1.2.0",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-symbol": "^1.0.0",
-        "micromark-util-types": "^1.0.0"
-      }
-    },
-    "docs/node_modules/mdast-util-to-hast/node_modules/micromark-util-encode": {
-      "version": "1.1.0",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "docs/node_modules/mdast-util-to-hast/node_modules/micromark-util-sanitize-uri": {
-      "version": "1.2.0",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-character": "^1.0.0",
-        "micromark-util-encode": "^1.0.0",
-        "micromark-util-symbol": "^1.0.0"
-      }
-    },
-    "docs/node_modules/mdast-util-to-hast/node_modules/micromark-util-symbol": {
-      "version": "1.1.0",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "docs/node_modules/mdast-util-to-hast/node_modules/micromark-util-types": {
-      "version": "1.1.0",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "docs/node_modules/mdast-util-to-hast/node_modules/unist-util-is": {
-      "version": "5.2.1",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^2.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/mdast-util-to-hast/node_modules/unist-util-visit": {
-      "version": "4.1.2",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^2.0.0",
-        "unist-util-is": "^5.0.0",
-        "unist-util-visit-parents": "^5.1.1"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/mdast-util-to-hast/node_modules/unist-util-visit-parents": {
-      "version": "5.1.3",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^2.0.0",
-        "unist-util-is": "^5.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/mdast-util-to-markdown": {
-      "version": "2.1.2",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/mdast": "^4.0.0",
-        "@types/unist": "^3.0.0",
-        "longest-streak": "^3.0.0",
-        "mdast-util-phrasing": "^4.0.0",
-        "mdast-util-to-string": "^4.0.0",
-        "micromark-util-classify-character": "^2.0.0",
-        "micromark-util-decode-string": "^2.0.0",
-        "unist-util-visit": "^5.0.0",
-        "zwitch": "^2.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/mdast-util-to-string": {
-      "version": "4.0.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/mdast": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/micromark": {
-      "version": "4.0.2",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "@types/debug": "^4.0.0",
-        "debug": "^4.0.0",
-        "decode-named-character-reference": "^1.0.0",
-        "devlop": "^1.0.0",
-        "micromark-core-commonmark": "^2.0.0",
-        "micromark-factory-space": "^2.0.0",
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-chunked": "^2.0.0",
-        "micromark-util-combine-extensions": "^2.0.0",
-        "micromark-util-decode-numeric-character-reference": "^2.0.0",
-        "micromark-util-encode": "^2.0.0",
-        "micromark-util-normalize-identifier": "^2.0.0",
-        "micromark-util-resolve-all": "^2.0.0",
-        "micromark-util-sanitize-uri": "^2.0.0",
-        "micromark-util-subtokenize": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      }
-    },
-    "docs/node_modules/micromark-core-commonmark": {
-      "version": "2.0.3",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "decode-named-character-reference": "^1.0.0",
-        "devlop": "^1.0.0",
-        "micromark-factory-destination": "^2.0.0",
-        "micromark-factory-label": "^2.0.0",
-        "micromark-factory-space": "^2.0.0",
-        "micromark-factory-title": "^2.0.0",
-        "micromark-factory-whitespace": "^2.0.0",
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-chunked": "^2.0.0",
-        "micromark-util-classify-character": "^2.0.0",
-        "micromark-util-html-tag-name": "^2.0.0",
-        "micromark-util-normalize-identifier": "^2.0.0",
-        "micromark-util-resolve-all": "^2.0.0",
-        "micromark-util-subtokenize": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      }
-    },
-    "docs/node_modules/micromark-extension-gfm": {
-      "version": "3.0.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "micromark-extension-gfm-autolink-literal": "^2.0.0",
-        "micromark-extension-gfm-footnote": "^2.0.0",
-        "micromark-extension-gfm-strikethrough": "^2.0.0",
-        "micromark-extension-gfm-table": "^2.0.0",
-        "micromark-extension-gfm-tagfilter": "^2.0.0",
-        "micromark-extension-gfm-task-list-item": "^2.0.0",
-        "micromark-util-combine-extensions": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/micromark-extension-gfm-autolink-literal": {
-      "version": "2.1.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-sanitize-uri": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/micromark-extension-gfm-footnote": {
-      "version": "2.1.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "devlop": "^1.0.0",
-        "micromark-core-commonmark": "^2.0.0",
-        "micromark-factory-space": "^2.0.0",
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-normalize-identifier": "^2.0.0",
-        "micromark-util-sanitize-uri": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/micromark-extension-gfm-strikethrough": {
-      "version": "2.1.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "devlop": "^1.0.0",
-        "micromark-util-chunked": "^2.0.0",
-        "micromark-util-classify-character": "^2.0.0",
-        "micromark-util-resolve-all": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/micromark-extension-gfm-table": {
-      "version": "2.1.1",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "devlop": "^1.0.0",
-        "micromark-factory-space": "^2.0.0",
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/micromark-extension-gfm-tagfilter": {
-      "version": "2.0.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-types": "^2.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/micromark-extension-gfm-task-list-item": {
-      "version": "2.1.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "devlop": "^1.0.0",
-        "micromark-factory-space": "^2.0.0",
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/micromark-factory-destination": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      }
-    },
-    "docs/node_modules/micromark-factory-label": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "devlop": "^1.0.0",
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      }
-    },
-    "docs/node_modules/micromark-factory-space": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      }
-    },
-    "docs/node_modules/micromark-factory-title": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-factory-space": "^2.0.0",
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      }
-    },
-    "docs/node_modules/micromark-factory-whitespace": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-factory-space": "^2.0.0",
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      }
-    },
-    "docs/node_modules/micromark-util-character": {
-      "version": "2.1.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      }
-    },
-    "docs/node_modules/micromark-util-chunked": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-symbol": "^2.0.0"
-      }
-    },
-    "docs/node_modules/micromark-util-classify-character": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      }
-    },
-    "docs/node_modules/micromark-util-combine-extensions": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-chunked": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      }
-    },
-    "docs/node_modules/micromark-util-decode-numeric-character-reference": {
-      "version": "2.0.2",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-symbol": "^2.0.0"
-      }
-    },
-    "docs/node_modules/micromark-util-decode-string": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "decode-named-character-reference": "^1.0.0",
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-decode-numeric-character-reference": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0"
-      }
-    },
-    "docs/node_modules/micromark-util-encode": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "docs/node_modules/micromark-util-html-tag-name": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "docs/node_modules/micromark-util-normalize-identifier": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-symbol": "^2.0.0"
-      }
-    },
-    "docs/node_modules/micromark-util-resolve-all": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-types": "^2.0.0"
-      }
-    },
-    "docs/node_modules/micromark-util-sanitize-uri": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-encode": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0"
-      }
-    },
-    "docs/node_modules/micromark-util-subtokenize": {
-      "version": "2.1.0",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "devlop": "^1.0.0",
-        "micromark-util-chunked": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      }
-    },
-    "docs/node_modules/micromark-util-symbol": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "docs/node_modules/micromark-util-types": {
-      "version": "2.0.2",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "docs/node_modules/rehype-stringify": {
-      "version": "9.0.4",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/hast": "^2.0.0",
-        "hast-util-to-html": "^8.0.0",
-        "unified": "^10.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/rehype-stringify/node_modules/@types/unist": {
-      "version": "2.0.11",
-      "dev": true,
-      "license": "MIT"
-    },
-    "docs/node_modules/rehype-stringify/node_modules/unified": {
-      "version": "10.1.2",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^2.0.0",
-        "bail": "^2.0.0",
-        "extend": "^3.0.0",
-        "is-buffer": "^2.0.0",
-        "is-plain-obj": "^4.0.0",
-        "trough": "^2.0.0",
-        "vfile": "^5.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/rehype-stringify/node_modules/unist-util-stringify-position": {
-      "version": "3.0.3",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^2.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/rehype-stringify/node_modules/vfile": {
-      "version": "5.3.7",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^2.0.0",
-        "is-buffer": "^2.0.0",
-        "unist-util-stringify-position": "^3.0.0",
-        "vfile-message": "^3.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/rehype-stringify/node_modules/vfile-message": {
-      "version": "3.1.4",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^2.0.0",
-        "unist-util-stringify-position": "^3.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/remark-gfm": {
-      "version": "4.0.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/mdast": "^4.0.0",
-        "mdast-util-gfm": "^3.0.0",
-        "micromark-extension-gfm": "^3.0.0",
-        "remark-parse": "^11.0.0",
-        "remark-stringify": "^11.0.0",
-        "unified": "^11.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/remark-man": {
-      "version": "8.0.1",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/mdast": "^3.0.0",
-        "@types/unist": "^2.0.0",
-        "github-slugger": "^1.0.0",
-        "groff-escape": "^2.0.0",
-        "mdast-util-definitions": "^5.0.0",
-        "mdast-util-to-string": "^3.0.0",
-        "months": "^2.0.0",
-        "unified": "^10.0.0",
-        "unist-util-visit": "^4.0.0",
-        "zwitch": "^2.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/remark-man/node_modules/@types/mdast": {
-      "version": "3.0.15",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^2"
-      }
-    },
-    "docs/node_modules/remark-man/node_modules/@types/unist": {
-      "version": "2.0.11",
-      "dev": true,
-      "license": "MIT"
-    },
-    "docs/node_modules/remark-man/node_modules/mdast-util-to-string": {
-      "version": "3.2.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/mdast": "^3.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/remark-man/node_modules/unified": {
-      "version": "10.1.2",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^2.0.0",
-        "bail": "^2.0.0",
-        "extend": "^3.0.0",
-        "is-buffer": "^2.0.0",
-        "is-plain-obj": "^4.0.0",
-        "trough": "^2.0.0",
-        "vfile": "^5.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/remark-man/node_modules/unist-util-is": {
-      "version": "5.2.1",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^2.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/remark-man/node_modules/unist-util-stringify-position": {
-      "version": "3.0.3",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^2.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/remark-man/node_modules/unist-util-visit": {
-      "version": "4.1.2",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^2.0.0",
-        "unist-util-is": "^5.0.0",
-        "unist-util-visit-parents": "^5.1.1"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/remark-man/node_modules/unist-util-visit-parents": {
-      "version": "5.1.3",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^2.0.0",
-        "unist-util-is": "^5.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/remark-man/node_modules/vfile": {
-      "version": "5.3.7",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^2.0.0",
-        "is-buffer": "^2.0.0",
-        "unist-util-stringify-position": "^3.0.0",
-        "vfile-message": "^3.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/remark-man/node_modules/vfile-message": {
-      "version": "3.1.4",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^2.0.0",
-        "unist-util-stringify-position": "^3.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/remark-parse": {
-      "version": "11.0.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/mdast": "^4.0.0",
-        "mdast-util-from-markdown": "^2.0.0",
-        "micromark-util-types": "^2.0.0",
-        "unified": "^11.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/remark-rehype": {
-      "version": "10.1.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/hast": "^2.0.0",
-        "@types/mdast": "^3.0.0",
-        "mdast-util-to-hast": "^12.1.0",
-        "unified": "^10.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/remark-rehype/node_modules/@types/mdast": {
-      "version": "3.0.15",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^2"
-      }
-    },
-    "docs/node_modules/remark-rehype/node_modules/@types/unist": {
-      "version": "2.0.11",
-      "dev": true,
-      "license": "MIT"
-    },
-    "docs/node_modules/remark-rehype/node_modules/unified": {
-      "version": "10.1.2",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^2.0.0",
-        "bail": "^2.0.0",
-        "extend": "^3.0.0",
-        "is-buffer": "^2.0.0",
-        "is-plain-obj": "^4.0.0",
-        "trough": "^2.0.0",
-        "vfile": "^5.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/remark-rehype/node_modules/unist-util-stringify-position": {
-      "version": "3.0.3",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^2.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/remark-rehype/node_modules/vfile": {
-      "version": "5.3.7",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^2.0.0",
-        "is-buffer": "^2.0.0",
-        "unist-util-stringify-position": "^3.0.0",
-        "vfile-message": "^3.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/remark-rehype/node_modules/vfile-message": {
-      "version": "3.1.4",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^2.0.0",
-        "unist-util-stringify-position": "^3.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/remark-stringify": {
-      "version": "11.0.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/mdast": "^4.0.0",
-        "mdast-util-to-markdown": "^2.0.0",
-        "unified": "^11.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/tough-cookie": {
-      "version": "4.1.4",
-      "dev": true,
-      "license": "BSD-3-Clause",
-      "dependencies": {
-        "psl": "^1.1.33",
-        "punycode": "^2.1.1",
-        "universalify": "^0.2.0",
-        "url-parse": "^1.5.3"
-      },
-      "engines": {
-        "node": ">=6"
-      }
-    },
-    "docs/node_modules/tr46": {
-      "version": "5.1.1",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "punycode": "^2.3.1"
-      },
-      "engines": {
-        "node": ">=18"
-      }
-    },
-    "docs/node_modules/unified": {
-      "version": "11.0.5",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "bail": "^2.0.0",
-        "devlop": "^1.0.0",
-        "extend": "^3.0.0",
-        "is-plain-obj": "^4.0.0",
-        "trough": "^2.0.0",
-        "vfile": "^6.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/unist-util-is": {
-      "version": "6.0.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/unist-util-position": {
-      "version": "4.0.4",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^2.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/unist-util-position/node_modules/@types/unist": {
-      "version": "2.0.11",
-      "dev": true,
-      "license": "MIT"
-    },
-    "docs/node_modules/unist-util-stringify-position": {
-      "version": "4.0.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/unist-util-visit": {
-      "version": "5.0.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "unist-util-is": "^6.0.0",
-        "unist-util-visit-parents": "^6.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/unist-util-visit-parents": {
-      "version": "6.0.1",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "unist-util-is": "^6.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/vfile": {
-      "version": "6.0.3",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "vfile-message": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/vfile-message": {
-      "version": "4.0.3",
-      "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.3.tgz",
-      "integrity": "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "unist-util-stringify-position": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "docs/node_modules/webidl-conversions": {
-      "version": "7.0.0",
-      "dev": true,
-      "license": "BSD-2-Clause",
-      "engines": {
-        "node": ">=12"
-      }
-    },
-    "docs/node_modules/whatwg-url": {
-      "version": "14.2.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "tr46": "^5.1.0",
-        "webidl-conversions": "^7.0.0"
-      },
-      "engines": {
-        "node": ">=18"
-      }
-    },
     "mock-globals": {
       "name": "@npmcli/mock-globals",
       "version": "1.0.0",
@@ -1810,6 +239,8 @@
     },
     "node_modules/@actions/core": {
       "version": "1.11.1",
+      "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.11.1.tgz",
+      "integrity": "sha512-hXJCSrkwfA46Vd9Z3q4cpEpHB1rL5NG04+/rbqW9d3+CSvtB1tYe8UTpAlixa1vj0m/ULglfEK2UKxMGxCxv5A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1819,6 +250,8 @@
     },
     "node_modules/@actions/exec": {
       "version": "1.1.1",
+      "resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.1.1.tgz",
+      "integrity": "sha512-+sCcHHbVdk93a0XT19ECtO/gIXoxvdsgQLzb2fE2/5sIZmWQuluYyjPQtrtTHdU1YzTZ7bAPN4sITq2xi1679w==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1827,6 +260,8 @@
     },
     "node_modules/@actions/http-client": {
       "version": "2.2.3",
+      "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.2.3.tgz",
+      "integrity": "sha512-mx8hyJi/hjFvbPokCg4uRd4ZX78t+YyRPtnKWwIl+RzNaVuFpQHfmlGVfsKEJN8LwTCvL+DfVgAM04XaHkm6bA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1836,6 +271,8 @@
     },
     "node_modules/@actions/http-client/node_modules/undici": {
       "version": "5.29.0",
+      "resolved": "https://registry.npmjs.org/undici/-/undici-5.29.0.tgz",
+      "integrity": "sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1847,11 +284,15 @@
     },
     "node_modules/@actions/io": {
       "version": "1.1.3",
+      "resolved": "https://registry.npmjs.org/@actions/io/-/io-1.1.3.tgz",
+      "integrity": "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/@asamuzakjp/css-color": {
       "version": "3.2.0",
+      "resolved": "https://registry.npmjs.org/@asamuzakjp/css-color/-/css-color-3.2.0.tgz",
+      "integrity": "sha512-K1A6z8tS3XsmCMM86xoWdn7Fkdn9m6RSVtocUrJYIwZnFVkng/PvkEoWtOWmP+Scc6saYWHWZYbndEEXxl24jw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1871,6 +312,8 @@
     },
     "node_modules/@babel/code-frame": {
       "version": "7.27.1",
+      "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz",
+      "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1898,7 +341,6 @@
       "integrity": "sha512-2BCOP7TN8M+gVDj7/ht3hsaO/B/n5oDbiAyyvnRlNOs+u1o+JWNYTQrmpuNp1/Wq2gcFrI01JAW+paEKDMx/CA==",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "@babel/code-frame": "^7.27.1",
         "@babel/generator": "^7.28.3",
@@ -1926,11 +368,15 @@
     },
     "node_modules/@babel/core/node_modules/convert-source-map": {
       "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz",
+      "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/@babel/core/node_modules/semver": {
       "version": "6.3.1",
+      "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
+      "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
       "dev": true,
       "license": "ISC",
       "bin": {
@@ -1956,6 +402,8 @@
     },
     "node_modules/@babel/helper-compilation-targets": {
       "version": "7.27.2",
+      "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.27.2.tgz",
+      "integrity": "sha512-2+1thGUUWWjLTYTHZWK1n8Yga0ijBz1XAhUXcKy81rd5g6yh7hGqMp45v7cadSbEHc9G3OTv45SyneRN3ps4DQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1971,6 +419,8 @@
     },
     "node_modules/@babel/helper-compilation-targets/node_modules/lru-cache": {
       "version": "5.1.1",
+      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz",
+      "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -1979,6 +429,8 @@
     },
     "node_modules/@babel/helper-compilation-targets/node_modules/semver": {
       "version": "6.3.1",
+      "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
+      "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
       "dev": true,
       "license": "ISC",
       "bin": {
@@ -1987,11 +439,15 @@
     },
     "node_modules/@babel/helper-compilation-targets/node_modules/yallist": {
       "version": "3.1.1",
+      "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz",
+      "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/@babel/helper-globals": {
       "version": "7.28.0",
+      "resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz",
+      "integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -2000,6 +456,8 @@
     },
     "node_modules/@babel/helper-module-imports": {
       "version": "7.27.1",
+      "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.27.1.tgz",
+      "integrity": "sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2030,6 +488,8 @@
     },
     "node_modules/@babel/helper-string-parser": {
       "version": "7.27.1",
+      "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz",
+      "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -2038,6 +498,8 @@
     },
     "node_modules/@babel/helper-validator-identifier": {
       "version": "7.27.1",
+      "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz",
+      "integrity": "sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -2046,6 +508,8 @@
     },
     "node_modules/@babel/helper-validator-option": {
       "version": "7.27.1",
+      "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz",
+      "integrity": "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -2084,6 +548,8 @@
     },
     "node_modules/@babel/template": {
       "version": "7.27.2",
+      "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.2.tgz",
+      "integrity": "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2130,6 +596,8 @@
     },
     "node_modules/@colors/colors": {
       "version": "1.5.0",
+      "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz",
+      "integrity": "sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==",
       "dev": true,
       "license": "MIT",
       "optional": true,
@@ -2139,6 +607,8 @@
     },
     "node_modules/@commitlint/cli": {
       "version": "19.8.1",
+      "resolved": "https://registry.npmjs.org/@commitlint/cli/-/cli-19.8.1.tgz",
+      "integrity": "sha512-LXUdNIkspyxrlV6VDHWBmCZRtkEVRpBKxi2Gtw3J54cGWhLCTouVD/Q6ZSaSvd2YaDObWK8mDjrz3TIKtaQMAA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2159,6 +629,8 @@
     },
     "node_modules/@commitlint/config-conventional": {
       "version": "19.8.1",
+      "resolved": "https://registry.npmjs.org/@commitlint/config-conventional/-/config-conventional-19.8.1.tgz",
+      "integrity": "sha512-/AZHJL6F6B/G959CsMAzrPKKZjeEiAVifRyEwXxcT6qtqbPwGw+iQxmNS+Bu+i09OCtdNRW6pNpBvgPrtMr9EQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2171,6 +643,8 @@
     },
     "node_modules/@commitlint/config-validator": {
       "version": "19.8.1",
+      "resolved": "https://registry.npmjs.org/@commitlint/config-validator/-/config-validator-19.8.1.tgz",
+      "integrity": "sha512-0jvJ4u+eqGPBIzzSdqKNX1rvdbSU1lPNYlfQQRIFnBgLy26BtC0cFnr7c/AyuzExMxWsMOte6MkTi9I3SQ3iGQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2183,6 +657,8 @@
     },
     "node_modules/@commitlint/ensure": {
       "version": "19.8.1",
+      "resolved": "https://registry.npmjs.org/@commitlint/ensure/-/ensure-19.8.1.tgz",
+      "integrity": "sha512-mXDnlJdvDzSObafjYrOSvZBwkD01cqB4gbnnFuVyNpGUM5ijwU/r/6uqUmBXAAOKRfyEjpkGVZxaDsCVnHAgyw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2199,6 +675,8 @@
     },
     "node_modules/@commitlint/execute-rule": {
       "version": "19.8.1",
+      "resolved": "https://registry.npmjs.org/@commitlint/execute-rule/-/execute-rule-19.8.1.tgz",
+      "integrity": "sha512-YfJyIqIKWI64Mgvn/sE7FXvVMQER/Cd+s3hZke6cI1xgNT/f6ZAz5heND0QtffH+KbcqAwXDEE1/5niYayYaQA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -2207,6 +685,8 @@
     },
     "node_modules/@commitlint/format": {
       "version": "19.8.1",
+      "resolved": "https://registry.npmjs.org/@commitlint/format/-/format-19.8.1.tgz",
+      "integrity": "sha512-kSJj34Rp10ItP+Eh9oCItiuN/HwGQMXBnIRk69jdOwEW9llW9FlyqcWYbHPSGofmjsqeoxa38UaEA5tsbm2JWw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2219,6 +699,8 @@
     },
     "node_modules/@commitlint/is-ignored": {
       "version": "19.8.1",
+      "resolved": "https://registry.npmjs.org/@commitlint/is-ignored/-/is-ignored-19.8.1.tgz",
+      "integrity": "sha512-AceOhEhekBUQ5dzrVhDDsbMaY5LqtN8s1mqSnT2Kz1ERvVZkNihrs3Sfk1Je/rxRNbXYFzKZSHaPsEJJDJV8dg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2231,6 +713,8 @@
     },
     "node_modules/@commitlint/lint": {
       "version": "19.8.1",
+      "resolved": "https://registry.npmjs.org/@commitlint/lint/-/lint-19.8.1.tgz",
+      "integrity": "sha512-52PFbsl+1EvMuokZXLRlOsdcLHf10isTPlWwoY1FQIidTsTvjKXVXYb7AvtpWkDzRO2ZsqIgPK7bI98x8LRUEw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2245,6 +729,8 @@
     },
     "node_modules/@commitlint/load": {
       "version": "19.8.1",
+      "resolved": "https://registry.npmjs.org/@commitlint/load/-/load-19.8.1.tgz",
+      "integrity": "sha512-9V99EKG3u7z+FEoe4ikgq7YGRCSukAcvmKQuTtUyiYPnOd9a2/H9Ak1J9nJA1HChRQp9OA/sIKPugGS+FK/k1A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2265,6 +751,8 @@
     },
     "node_modules/@commitlint/message": {
       "version": "19.8.1",
+      "resolved": "https://registry.npmjs.org/@commitlint/message/-/message-19.8.1.tgz",
+      "integrity": "sha512-+PMLQvjRXiU+Ae0Wc+p99EoGEutzSXFVwQfa3jRNUZLNW5odZAyseb92OSBTKCu+9gGZiJASt76Cj3dLTtcTdg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -2273,6 +761,8 @@
     },
     "node_modules/@commitlint/parse": {
       "version": "19.8.1",
+      "resolved": "https://registry.npmjs.org/@commitlint/parse/-/parse-19.8.1.tgz",
+      "integrity": "sha512-mmAHYcMBmAgJDKWdkjIGq50X4yB0pSGpxyOODwYmoexxxiUCy5JJT99t1+PEMK7KtsCtzuWYIAXYAiKR+k+/Jw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2286,6 +776,8 @@
     },
     "node_modules/@commitlint/read": {
       "version": "19.8.1",
+      "resolved": "https://registry.npmjs.org/@commitlint/read/-/read-19.8.1.tgz",
+      "integrity": "sha512-03Jbjb1MqluaVXKHKRuGhcKWtSgh3Jizqy2lJCRbRrnWpcM06MYm8th59Xcns8EqBYvo0Xqb+2DoZFlga97uXQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2301,6 +793,8 @@
     },
     "node_modules/@commitlint/resolve-extends": {
       "version": "19.8.1",
+      "resolved": "https://registry.npmjs.org/@commitlint/resolve-extends/-/resolve-extends-19.8.1.tgz",
+      "integrity": "sha512-GM0mAhFk49I+T/5UCYns5ayGStkTt4XFFrjjf0L4S26xoMTSkdCf9ZRO8en1kuopC4isDFuEm7ZOm/WRVeElVg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2317,6 +811,8 @@
     },
     "node_modules/@commitlint/rules": {
       "version": "19.8.1",
+      "resolved": "https://registry.npmjs.org/@commitlint/rules/-/rules-19.8.1.tgz",
+      "integrity": "sha512-Hnlhd9DyvGiGwjfjfToMi1dsnw1EXKGJNLTcsuGORHz6SS9swRgkBsou33MQ2n51/boIDrbsg4tIBbRpEWK2kw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2331,6 +827,8 @@
     },
     "node_modules/@commitlint/to-lines": {
       "version": "19.8.1",
+      "resolved": "https://registry.npmjs.org/@commitlint/to-lines/-/to-lines-19.8.1.tgz",
+      "integrity": "sha512-98Mm5inzbWTKuZQr2aW4SReY6WUukdWXuZhrqf1QdKPZBCCsXuG87c+iP0bwtD6DBnmVVQjgp4whoHRVixyPBg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -2339,6 +837,8 @@
     },
     "node_modules/@commitlint/top-level": {
       "version": "19.8.1",
+      "resolved": "https://registry.npmjs.org/@commitlint/top-level/-/top-level-19.8.1.tgz",
+      "integrity": "sha512-Ph8IN1IOHPSDhURCSXBz44+CIu+60duFwRsg6HqaISFHQHbmBtxVw4ZrFNIYUzEP7WwrNPxa2/5qJ//NK1FGcw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2350,6 +850,8 @@
     },
     "node_modules/@commitlint/types": {
       "version": "19.8.1",
+      "resolved": "https://registry.npmjs.org/@commitlint/types/-/types-19.8.1.tgz",
+      "integrity": "sha512-/yCrWGCoA1SVKOks25EGadP9Pnj0oAIHGpl2wH2M2Y46dPM2ueb8wyCVOD7O3WCTkaJ0IkKvzhl1JY7+uCT2Dw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2362,6 +864,8 @@
     },
     "node_modules/@conventional-commits/parser": {
       "version": "0.4.1",
+      "resolved": "https://registry.npmjs.org/@conventional-commits/parser/-/parser-0.4.1.tgz",
+      "integrity": "sha512-H2ZmUVt6q+KBccXfMBhbBF14NlANeqHTXL4qCL6QGbMzrc4HDXyzWuxPxPNbz71f/5UkR5DrycP5VO9u7crahg==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -2391,6 +895,8 @@
     },
     "node_modules/@csstools/css-calc": {
       "version": "2.1.4",
+      "resolved": "https://registry.npmjs.org/@csstools/css-calc/-/css-calc-2.1.4.tgz",
+      "integrity": "sha512-3N8oaj+0juUw/1H3YwmDDJXCgTB1gKU6Hc/bB502u9zR0q2vd786XJH9QfrKIEgFlZmhZiq6epXl4rHqhzsIgQ==",
       "dev": true,
       "funding": [
         {
@@ -2441,6 +947,8 @@
     },
     "node_modules/@csstools/css-parser-algorithms": {
       "version": "3.0.5",
+      "resolved": "https://registry.npmjs.org/@csstools/css-parser-algorithms/-/css-parser-algorithms-3.0.5.tgz",
+      "integrity": "sha512-DaDeUkXZKjdGhgYaHNJTV9pV7Y9B3b644jCLs9Upc3VeNGg6LWARAT6O+Q+/COo+2gg/bM5rhpMAtf70WqfBdQ==",
       "dev": true,
       "funding": [
         {
@@ -2453,7 +961,6 @@
         }
       ],
       "license": "MIT",
-      "peer": true,
       "engines": {
         "node": ">=18"
       },
@@ -2463,6 +970,8 @@
     },
     "node_modules/@csstools/css-tokenizer": {
       "version": "3.0.4",
+      "resolved": "https://registry.npmjs.org/@csstools/css-tokenizer/-/css-tokenizer-3.0.4.tgz",
+      "integrity": "sha512-Vd/9EVDiu6PPJt9yAh6roZP6El1xHrdvIVGjyBsHR0RYwNHgL7FJPyIIW4fANJNG6FtyZfvlRPpFI4ZM/lubvw==",
       "dev": true,
       "funding": [
         {
@@ -2475,7 +984,6 @@
         }
       ],
       "license": "MIT",
-      "peer": true,
       "engines": {
         "node": ">=18"
       }
@@ -2486,6 +994,7 @@
       "integrity": "sha512-ayVFHdtZ+hsq1t2Dy24wCmGXGe4q9Gu3smhLYALJrr473ZH27MsnSL+LKUlimp4BWJqMDMLmPpx/Q9R3OAlL4g==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "eslint-visitor-keys": "^3.4.3"
       },
@@ -2501,16 +1010,22 @@
     },
     "node_modules/@eslint-community/regexpp": {
       "version": "4.12.1",
+      "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.1.tgz",
+      "integrity": "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "engines": {
         "node": "^12.0.0 || ^14.0.0 || >=16.0.0"
       }
     },
     "node_modules/@eslint/eslintrc": {
       "version": "2.1.4",
+      "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.4.tgz",
+      "integrity": "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "ajv": "^6.12.4",
         "debug": "^4.3.2",
@@ -2531,8 +1046,11 @@
     },
     "node_modules/@eslint/eslintrc/node_modules/ajv": {
       "version": "6.12.6",
+      "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz",
+      "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "fast-deep-equal": "^3.1.1",
         "fast-json-stable-stringify": "^2.0.0",
@@ -2546,8 +1064,11 @@
     },
     "node_modules/@eslint/eslintrc/node_modules/brace-expansion": {
       "version": "1.1.12",
+      "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
+      "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "balanced-match": "^1.0.0",
         "concat-map": "0.0.1"
@@ -2555,13 +1076,19 @@
     },
     "node_modules/@eslint/eslintrc/node_modules/json-schema-traverse": {
       "version": "0.4.1",
+      "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
+      "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==",
       "dev": true,
-      "license": "MIT"
+      "license": "MIT",
+      "peer": true
     },
     "node_modules/@eslint/eslintrc/node_modules/minimatch": {
       "version": "3.1.2",
+      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
+      "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
       "dev": true,
       "license": "ISC",
+      "peer": true,
       "dependencies": {
         "brace-expansion": "^1.1.7"
       },
@@ -2571,14 +1098,19 @@
     },
     "node_modules/@eslint/js": {
       "version": "8.57.1",
+      "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.1.tgz",
+      "integrity": "sha512-d9zaMRSTIKDLhctzH12MtXvJKSSUhaHcjV+2Z+GK+EEY7XKpP5yR4x+N3TAcHTcu963nIr+TMcCb4DBCYX1z6Q==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "engines": {
         "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
       }
     },
     "node_modules/@fastify/busboy": {
       "version": "2.1.1",
+      "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz",
+      "integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -2587,6 +1119,8 @@
     },
     "node_modules/@google-automations/git-file-utils": {
       "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/@google-automations/git-file-utils/-/git-file-utils-2.0.0.tgz",
+      "integrity": "sha512-F6h8npq7rt60fr3W+cil/zXbIiF9Hj8JzaN3LNh7uBIJpsWnjL9ObV84qW/345boMheDdo/n+cItmvCfsn0lLA==",
       "dev": true,
       "license": "Apache-2.0",
       "dependencies": {
@@ -2600,6 +1134,8 @@
     },
     "node_modules/@google-automations/git-file-utils/node_modules/minimatch": {
       "version": "5.1.6",
+      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz",
+      "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -2611,8 +1147,12 @@
     },
     "node_modules/@humanwhocodes/config-array": {
       "version": "0.13.0",
+      "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.13.0.tgz",
+      "integrity": "sha512-DZLEEqFWQFiyK6h5YIeynKx7JlvCYWL0cImfSRXZ9l4Sg2efkFGTuFf6vzXjK1cq6IYkU+Eg/JizXw+TD2vRNw==",
+      "deprecated": "Use @eslint/config-array instead",
       "dev": true,
       "license": "Apache-2.0",
+      "peer": true,
       "dependencies": {
         "@humanwhocodes/object-schema": "^2.0.3",
         "debug": "^4.3.1",
@@ -2624,8 +1164,11 @@
     },
     "node_modules/@humanwhocodes/config-array/node_modules/brace-expansion": {
       "version": "1.1.12",
+      "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
+      "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "balanced-match": "^1.0.0",
         "concat-map": "0.0.1"
@@ -2633,8 +1176,11 @@
     },
     "node_modules/@humanwhocodes/config-array/node_modules/minimatch": {
       "version": "3.1.2",
+      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
+      "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
       "dev": true,
       "license": "ISC",
+      "peer": true,
       "dependencies": {
         "brace-expansion": "^1.1.7"
       },
@@ -2644,8 +1190,11 @@
     },
     "node_modules/@humanwhocodes/module-importer": {
       "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz",
+      "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==",
       "dev": true,
       "license": "Apache-2.0",
+      "peer": true,
       "engines": {
         "node": ">=12.22"
       },
@@ -2656,11 +1205,17 @@
     },
     "node_modules/@humanwhocodes/object-schema": {
       "version": "2.0.3",
+      "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.3.tgz",
+      "integrity": "sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==",
+      "deprecated": "Use @eslint/object-schema instead",
       "dev": true,
-      "license": "BSD-3-Clause"
+      "license": "BSD-3-Clause",
+      "peer": true
     },
     "node_modules/@iarna/toml": {
       "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/@iarna/toml/-/toml-3.0.0.tgz",
+      "integrity": "sha512-td6ZUkz2oS3VeleBcN+m//Q6HlCFCPrnI0FZhrt/h4XqLEdOyYp2u21nd8MdsR+WJy5r9PTDaHTDDfhf4H4l6Q==",
       "dev": true,
       "license": "ISC"
     },
@@ -2685,8 +1240,6 @@
     },
     "node_modules/@isaacs/cliui": {
       "version": "8.0.2",
-      "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz",
-      "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -2703,8 +1256,6 @@
     },
     "node_modules/@isaacs/cliui/node_modules/ansi-regex": {
       "version": "6.2.2",
-      "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz",
-      "integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==",
       "inBundle": true,
       "license": "MIT",
       "engines": {
@@ -2716,15 +1267,11 @@
     },
     "node_modules/@isaacs/cliui/node_modules/emoji-regex": {
       "version": "9.2.2",
-      "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz",
-      "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==",
       "inBundle": true,
       "license": "MIT"
     },
     "node_modules/@isaacs/cliui/node_modules/string-width": {
       "version": "5.1.2",
-      "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz",
-      "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -2741,8 +1288,6 @@
     },
     "node_modules/@isaacs/cliui/node_modules/strip-ansi": {
       "version": "7.1.2",
-      "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz",
-      "integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -2773,6 +1318,8 @@
     },
     "node_modules/@istanbuljs/load-nyc-config": {
       "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz",
+      "integrity": "sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -2788,26 +1335,18 @@
     },
     "node_modules/@istanbuljs/load-nyc-config/node_modules/argparse": {
       "version": "1.0.10",
+      "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz",
+      "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
         "sprintf-js": "~1.0.2"
       }
     },
-    "node_modules/@istanbuljs/load-nyc-config/node_modules/esprima": {
-      "version": "4.0.1",
-      "dev": true,
-      "license": "BSD-2-Clause",
-      "bin": {
-        "esparse": "bin/esparse.js",
-        "esvalidate": "bin/esvalidate.js"
-      },
-      "engines": {
-        "node": ">=4"
-      }
-    },
     "node_modules/@istanbuljs/load-nyc-config/node_modules/find-up": {
       "version": "4.1.0",
+      "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz",
+      "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2820,6 +1359,8 @@
     },
     "node_modules/@istanbuljs/load-nyc-config/node_modules/js-yaml": {
       "version": "3.14.1",
+      "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz",
+      "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2832,6 +1373,8 @@
     },
     "node_modules/@istanbuljs/load-nyc-config/node_modules/locate-path": {
       "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz",
+      "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2843,6 +1386,8 @@
     },
     "node_modules/@istanbuljs/load-nyc-config/node_modules/p-limit": {
       "version": "2.3.0",
+      "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz",
+      "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2857,6 +1402,8 @@
     },
     "node_modules/@istanbuljs/load-nyc-config/node_modules/p-locate": {
       "version": "4.1.0",
+      "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz",
+      "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2868,19 +1415,18 @@
     },
     "node_modules/@istanbuljs/load-nyc-config/node_modules/path-exists": {
       "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
+      "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
       "dev": true,
       "license": "MIT",
       "engines": {
         "node": ">=8"
       }
     },
-    "node_modules/@istanbuljs/load-nyc-config/node_modules/sprintf-js": {
-      "version": "1.0.3",
-      "dev": true,
-      "license": "BSD-3-Clause"
-    },
     "node_modules/@istanbuljs/schema": {
       "version": "0.1.3",
+      "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz",
+      "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -2911,6 +1457,8 @@
     },
     "node_modules/@jridgewell/resolve-uri": {
       "version": "3.1.2",
+      "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz",
+      "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -2937,6 +1485,8 @@
     },
     "node_modules/@jsep-plugin/assignment": {
       "version": "1.3.0",
+      "resolved": "https://registry.npmjs.org/@jsep-plugin/assignment/-/assignment-1.3.0.tgz",
+      "integrity": "sha512-VVgV+CXrhbMI3aSusQyclHkenWSAm95WaiKrMxRFam3JSUiIaQjoMIw2sEs/OX4XifnqeQUN4DYbJjlA8EfktQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -2948,6 +1498,8 @@
     },
     "node_modules/@jsep-plugin/regex": {
       "version": "1.0.4",
+      "resolved": "https://registry.npmjs.org/@jsep-plugin/regex/-/regex-1.0.4.tgz",
+      "integrity": "sha512-q7qL4Mgjs1vByCaTnDFcBnV9HS7GVPJX5vyVoCgZHNSC9rjwIlmbXG5sUuorR5ndfHAIlJ8pVStxvjXHbNvtUg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -2959,8 +1511,11 @@
     },
     "node_modules/@nodelib/fs.scandir": {
       "version": "2.1.5",
+      "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz",
+      "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "@nodelib/fs.stat": "2.0.5",
         "run-parallel": "^1.1.9"
@@ -2971,16 +1526,22 @@
     },
     "node_modules/@nodelib/fs.stat": {
       "version": "2.0.5",
+      "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz",
+      "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "engines": {
         "node": ">= 8"
       }
     },
     "node_modules/@nodelib/fs.walk": {
       "version": "1.2.8",
+      "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz",
+      "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "@nodelib/fs.scandir": "2.1.5",
         "fastq": "^1.6.0"
@@ -2991,8 +1552,6 @@
     },
     "node_modules/@npmcli/agent": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/agent/-/agent-4.0.0.tgz",
-      "integrity": "sha512-kAQTcEN9E8ERLVg5AsGwLNoFb+oEG6engbqAU2P43gD4JEIkNGMHdVQ096FsOAAYpZPB0RSt0zgInKIAS1l5QA==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -3020,6 +1579,8 @@
     },
     "node_modules/@npmcli/eslint-config": {
       "version": "5.1.0",
+      "resolved": "https://registry.npmjs.org/@npmcli/eslint-config/-/eslint-config-5.1.0.tgz",
+      "integrity": "sha512-L4FAYndvARxkbTBNbsbDDkArIf8A8WmTFGVKdevJ3jd9nPzDKWiuC9TW0QtEnRsFHr5IX7G6qkRLK+drLIGoEA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3153,8 +1714,6 @@
     },
     "node_modules/@npmcli/promise-spawn": {
       "version": "8.0.3",
-      "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-8.0.3.tgz",
-      "integrity": "sha512-Yb00SWaL4F8w+K8YGhQ55+xE4RUNdMHV43WZGsiTM92gS+lC0mGsn7I4hLug7pbao035S6bj3Y3w0cUNGLfmkg==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -3204,6 +1763,8 @@
     },
     "node_modules/@npmcli/template-oss": {
       "version": "4.24.4",
+      "resolved": "https://registry.npmjs.org/@npmcli/template-oss/-/template-oss-4.24.4.tgz",
+      "integrity": "sha512-NF6SQC2wjBTft7RM9YaILf8dSum5cjQCDnsOlQYdarNQJSxKqaePKpOEYSsy6crjz3TfZ/jrAd0M4pLT/VGc/w==",
       "dev": true,
       "hasInstallScript": true,
       "license": "ISC",
@@ -3251,6 +1812,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/agent": {
       "version": "2.2.2",
+      "resolved": "https://registry.npmjs.org/@npmcli/agent/-/agent-2.2.2.tgz",
+      "integrity": "sha512-OrcNPXdpSl9UX7qPVRWbmWMCSXrcDa2M9DvrbOTj7ao1S4PlqVFYv9/yLKMkrJKZ/V5A/kDBC690or307i26Og==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3266,6 +1829,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist": {
       "version": "7.5.4",
+      "resolved": "https://registry.npmjs.org/@npmcli/arborist/-/arborist-7.5.4.tgz",
+      "integrity": "sha512-nWtIc6QwwoUORCRNzKx4ypHqCk3drI+5aeYdMTQQiRCcn4lOOgfQh7WyZobGYTxXPSq1VwV53lkpN/BRlRk08g==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3314,6 +1879,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist/node_modules/@npmcli/git": {
       "version": "5.0.8",
+      "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-5.0.8.tgz",
+      "integrity": "sha512-liASfw5cqhjNW9UFd+ruwwdEf/lbOAQjLL2XY2dFW/bkJheXDYZgOyul/4gVvEV4BWkTXjYGmDqMw9uegdbJNQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3333,6 +1900,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist/node_modules/@npmcli/map-workspaces": {
       "version": "3.0.6",
+      "resolved": "https://registry.npmjs.org/@npmcli/map-workspaces/-/map-workspaces-3.0.6.tgz",
+      "integrity": "sha512-tkYs0OYnzQm6iIRdfy+LcLBjcKuQCeE5YLb8KnrIlutJfheNaPvPpgoFEyEFgbjzl5PLZ3IA/BWAwRU0eHuQDA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3345,8 +1914,20 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist/node_modules/@npmcli/name-from-folder": {
+      "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/@npmcli/name-from-folder/-/name-from-folder-2.0.0.tgz",
+      "integrity": "sha512-pwK+BfEBZJbKdNYpHHRTNBwBoqrN/iIMO0AiGvYsp3Hoaq0WbgGSWQR6SCldZovoDpY3yje5lkFUe6gsDgJ2vg==",
+      "dev": true,
+      "license": "ISC",
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist/node_modules/@npmcli/package-json": {
       "version": "5.2.1",
+      "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-5.2.1.tgz",
+      "integrity": "sha512-f7zYC6kQautXHvNbLEWgD/uGu1+xCn9izgqBfgItWSx22U0ZDekxN08A1vM8cTxj/cRVe0Q94Ode+tdoYmIOOQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3362,8 +1943,23 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist/node_modules/@npmcli/promise-spawn": {
+      "version": "7.0.2",
+      "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-7.0.2.tgz",
+      "integrity": "sha512-xhfYPXoV5Dy4UkY0D+v2KkwvnDfiA/8Mt3sWCGI/hM03NsYIH8ZaG6QzS9x7pje5vHZBZJ2v6VRFVTWACnqcmQ==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "which": "^4.0.0"
+      },
+      "engines": {
+        "node": "^16.14.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist/node_modules/hosted-git-info": {
       "version": "7.0.2",
+      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.2.tgz",
+      "integrity": "sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3375,14 +1971,28 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist/node_modules/ini": {
       "version": "4.1.3",
+      "resolved": "https://registry.npmjs.org/ini/-/ini-4.1.3.tgz",
+      "integrity": "sha512-X7rqawQBvfdjS10YU1y1YVreA3SsLrW9dX2CewP2EbBJM4ypVNLDkO5y04gejPwKIY9lR+7r9gn3rFPt/kmWFg==",
       "dev": true,
       "license": "ISC",
       "engines": {
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist/node_modules/json-parse-even-better-errors": {
+      "version": "3.0.2",
+      "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-3.0.2.tgz",
+      "integrity": "sha512-fi0NG4bPjCHunUJffmLd0gxssIgkNmArMvis4iNah6Owg1MCJjWhEcDLmsK6iGkJq3tHwbDkTlce70/tmXN4cQ==",
+      "dev": true,
+      "license": "MIT",
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist/node_modules/npm-package-arg": {
       "version": "11.0.3",
+      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-11.0.3.tgz",
+      "integrity": "sha512-sHGJy8sOC1YraBywpzQlIKBE4pBbGbiF95U6Auspzyem956E0+FtDtsx1ZxlOJkQCZ1AFXAY/yuvtFYrOxF+Bw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3395,104 +2005,99 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist/node_modules/validate-npm-package-name": {
-      "version": "5.0.1",
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist/node_modules/proc-log": {
+      "version": "4.2.0",
+      "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-4.2.0.tgz",
+      "integrity": "sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==",
       "dev": true,
       "license": "ISC",
       "engines": {
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/fs": {
-      "version": "3.1.1",
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist/node_modules/validate-npm-package-name": {
+      "version": "5.0.1",
+      "resolved": "https://registry.npmjs.org/validate-npm-package-name/-/validate-npm-package-name-5.0.1.tgz",
+      "integrity": "sha512-OljLrQ9SQdOUqTaQxqL5dEfZWrXExyyWsozYlAWFawPVNuD83igl7uJD2RTkNMbniIYgt8l81eCJGIdQF7avLQ==",
       "dev": true,
       "license": "ISC",
-      "dependencies": {
-        "semver": "^7.3.5"
-      },
       "engines": {
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/git": {
-      "version": "6.0.3",
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist/node_modules/which": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/which/-/which-4.0.0.tgz",
+      "integrity": "sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
-        "@npmcli/promise-spawn": "^8.0.0",
-        "ini": "^5.0.0",
-        "lru-cache": "^10.0.1",
-        "npm-pick-manifest": "^10.0.0",
-        "proc-log": "^5.0.0",
-        "promise-retry": "^2.0.1",
-        "semver": "^7.3.5",
-        "which": "^5.0.0"
+        "isexe": "^3.1.1"
       },
-      "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/git/node_modules/@npmcli/promise-spawn": {
-      "version": "8.0.3",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "which": "^5.0.0"
+      "bin": {
+        "node-which": "bin/which.js"
       },
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^16.13.0 || >=18.0.0"
       }
     },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/git/node_modules/npm-install-checks": {
-      "version": "7.1.2",
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/fs": {
+      "version": "3.1.1",
+      "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-3.1.1.tgz",
+      "integrity": "sha512-q9CRWjpHCMIh5sVyefoD1cA7PkvILqCZsnSOEUUivORLjxCO/Irmue2DprETiNgEqktDBZaM1Bi+jrarx1XdCg==",
       "dev": true,
-      "license": "BSD-2-Clause",
+      "license": "ISC",
       "dependencies": {
-        "semver": "^7.1.1"
+        "semver": "^7.3.5"
       },
       "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/git/node_modules/npm-normalize-package-bin": {
-      "version": "4.0.0",
-      "dev": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^18.17.0 || >=20.5.0"
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/git/node_modules/npm-pick-manifest": {
-      "version": "10.0.0",
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/git": {
+      "version": "6.0.3",
+      "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-6.0.3.tgz",
+      "integrity": "sha512-GUYESQlxZRAdhs3UhbB6pVRNUELQOHXwK9ruDkwmCv2aZ5y0SApQzUJCg02p3A7Ue2J5hxvlk1YI53c00NmRyQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
-        "npm-install-checks": "^7.1.0",
-        "npm-normalize-package-bin": "^4.0.0",
-        "npm-package-arg": "^12.0.0",
-        "semver": "^7.3.5"
+        "@npmcli/promise-spawn": "^8.0.0",
+        "ini": "^5.0.0",
+        "lru-cache": "^10.0.1",
+        "npm-pick-manifest": "^10.0.0",
+        "proc-log": "^5.0.0",
+        "promise-retry": "^2.0.1",
+        "semver": "^7.3.5",
+        "which": "^5.0.0"
       },
       "engines": {
         "node": "^18.17.0 || >=20.5.0"
       }
     },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/git/node_modules/proc-log": {
-      "version": "5.0.0",
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/git/node_modules/npm-install-checks": {
+      "version": "7.1.2",
+      "resolved": "https://registry.npmjs.org/npm-install-checks/-/npm-install-checks-7.1.2.tgz",
+      "integrity": "sha512-z9HJBCYw9Zr8BqXcllKIs5nI+QggAImbBdHphOzVYrz2CB4iQ6FzWyKmlqDZua+51nAu7FcemlbTc9VgQN5XDQ==",
       "dev": true,
-      "license": "ISC",
+      "license": "BSD-2-Clause",
+      "dependencies": {
+        "semver": "^7.1.1"
+      },
       "engines": {
         "node": "^18.17.0 || >=20.5.0"
       }
     },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/git/node_modules/which": {
-      "version": "5.0.0",
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/git/node_modules/npm-pick-manifest": {
+      "version": "10.0.0",
+      "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-10.0.0.tgz",
+      "integrity": "sha512-r4fFa4FqYY8xaM7fHecQ9Z2nE9hgNfJR+EmoKv0+chvzWkBcORX3r0FpTByP+CbOVJDladMXnPQGVN8PBLGuTQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
-        "isexe": "^3.1.1"
-      },
-      "bin": {
-        "node-which": "bin/which.js"
+        "npm-install-checks": "^7.1.0",
+        "npm-normalize-package-bin": "^4.0.0",
+        "npm-package-arg": "^12.0.0",
+        "semver": "^7.3.5"
       },
       "engines": {
         "node": "^18.17.0 || >=20.5.0"
@@ -3500,6 +2105,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/installed-package-contents": {
       "version": "2.1.0",
+      "resolved": "https://registry.npmjs.org/@npmcli/installed-package-contents/-/installed-package-contents-2.1.0.tgz",
+      "integrity": "sha512-c8UuGLeZpm69BryRykLuKRyKFZYJsZSCT4aVY5ds4omyZqJ172ApzgfKJ5eV/r3HgLdUYgFVe54KSFVjKoe27w==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3513,8 +2120,20 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/installed-package-contents/node_modules/npm-normalize-package-bin": {
+      "version": "3.0.1",
+      "resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-3.0.1.tgz",
+      "integrity": "sha512-dMxCf+zZ+3zeQZXKxmyuCKlIDPGuv8EF940xbkC4kQVDTtqoh6rJFO+JTKSA6/Rwi0getWmtuy4Itup0AMcaDQ==",
+      "dev": true,
+      "license": "ISC",
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/map-workspaces": {
       "version": "4.0.2",
+      "resolved": "https://registry.npmjs.org/@npmcli/map-workspaces/-/map-workspaces-4.0.2.tgz",
+      "integrity": "sha512-mnuMuibEbkaBTYj9HQ3dMe6L0ylYW+s/gfz7tBDMFY/la0w9Kf44P9aLn4/+/t3aTR3YUHKoT6XQL9rlicIe3Q==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3527,16 +2146,10 @@
         "node": "^18.17.0 || >=20.5.0"
       }
     },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/map-workspaces/node_modules/@npmcli/name-from-folder": {
-      "version": "3.0.0",
-      "dev": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/metavuln-calculator": {
       "version": "7.1.1",
+      "resolved": "https://registry.npmjs.org/@npmcli/metavuln-calculator/-/metavuln-calculator-7.1.1.tgz",
+      "integrity": "sha512-Nkxf96V0lAx3HCpVda7Vw4P23RILgdi/5K1fmj2tZkWIYLpXAN8k2UVVOsW16TsS5F8Ws2I7Cm+PU1/rsVF47g==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3550,8 +2163,20 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/name-from-folder": {
-      "version": "2.0.0",
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/metavuln-calculator/node_modules/json-parse-even-better-errors": {
+      "version": "3.0.2",
+      "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-3.0.2.tgz",
+      "integrity": "sha512-fi0NG4bPjCHunUJffmLd0gxssIgkNmArMvis4iNah6Owg1MCJjWhEcDLmsK6iGkJq3tHwbDkTlce70/tmXN4cQ==",
+      "dev": true,
+      "license": "MIT",
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/metavuln-calculator/node_modules/proc-log": {
+      "version": "4.2.0",
+      "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-4.2.0.tgz",
+      "integrity": "sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -3560,6 +2185,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/node-gyp": {
       "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/@npmcli/node-gyp/-/node-gyp-3.0.0.tgz",
+      "integrity": "sha512-gp8pRXC2oOxu0DUE1/M3bYtb1b3/DbJ5aM113+XJBgfXdussRAsX0YOrOhdd8WvnAR6auDBvJomGAkLKA5ydxA==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -3568,6 +2195,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/package-json": {
       "version": "6.2.0",
+      "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-6.2.0.tgz",
+      "integrity": "sha512-rCNLSB/JzNvot0SEyXqWZ7tX2B5dD2a1br2Dp0vSYVo5jh8Z0EZ7lS9TsZ1UtziddB1UfNUaMCc538/HztnJGA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3583,35 +2212,10 @@
         "node": "^18.17.0 || >=20.5.0"
       }
     },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/package-json/node_modules/json-parse-even-better-errors": {
-      "version": "4.0.0",
-      "dev": true,
-      "license": "MIT",
-      "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/package-json/node_modules/proc-log": {
-      "version": "5.0.0",
-      "dev": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/promise-spawn": {
-      "version": "7.0.2",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "which": "^4.0.0"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/query": {
       "version": "3.1.0",
+      "resolved": "https://registry.npmjs.org/@npmcli/query/-/query-3.1.0.tgz",
+      "integrity": "sha512-C/iR0tk7KSKGldibYIB9x8GtO/0Bd0I2mhOaDb8ucQL/bQVTmGoeREaFj64Z5+iCBRf3dQfed0CjJL7I8iTkiQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3623,6 +2227,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/redact": {
       "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/@npmcli/redact/-/redact-2.0.1.tgz",
+      "integrity": "sha512-YgsR5jCQZhVmTJvjduTOIHph0L73pK8xwMVaDY0PatySqVM9AZj93jpoXYSJqfHFxFkN9dmqTw6OiqExsS3LPw==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -3631,6 +2237,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/run-script": {
       "version": "8.1.0",
+      "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-8.1.0.tgz",
+      "integrity": "sha512-y7efHHwghQfk28G2z3tlZ67pLG0XdfYbcVG26r7YIXALRsrVQcTq4/tdenSmdOrEsNahIYA/eh8aEVROWGFUDg==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3647,6 +2255,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/run-script/node_modules/@npmcli/git": {
       "version": "5.0.8",
+      "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-5.0.8.tgz",
+      "integrity": "sha512-liASfw5cqhjNW9UFd+ruwwdEf/lbOAQjLL2XY2dFW/bkJheXDYZgOyul/4gVvEV4BWkTXjYGmDqMw9uegdbJNQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3666,6 +2276,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json": {
       "version": "5.2.1",
+      "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-5.2.1.tgz",
+      "integrity": "sha512-f7zYC6kQautXHvNbLEWgD/uGu1+xCn9izgqBfgItWSx22U0ZDekxN08A1vM8cTxj/cRVe0Q94Ode+tdoYmIOOQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3681,8 +2293,23 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/run-script/node_modules/@npmcli/promise-spawn": {
+      "version": "7.0.2",
+      "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-7.0.2.tgz",
+      "integrity": "sha512-xhfYPXoV5Dy4UkY0D+v2KkwvnDfiA/8Mt3sWCGI/hM03NsYIH8ZaG6QzS9x7pje5vHZBZJ2v6VRFVTWACnqcmQ==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "which": "^4.0.0"
+      },
+      "engines": {
+        "node": "^16.14.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/run-script/node_modules/hosted-git-info": {
       "version": "7.0.2",
+      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.2.tgz",
+      "integrity": "sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3694,14 +2321,54 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@npmcli/run-script/node_modules/ini": {
       "version": "4.1.3",
+      "resolved": "https://registry.npmjs.org/ini/-/ini-4.1.3.tgz",
+      "integrity": "sha512-X7rqawQBvfdjS10YU1y1YVreA3SsLrW9dX2CewP2EbBJM4ypVNLDkO5y04gejPwKIY9lR+7r9gn3rFPt/kmWFg==",
+      "dev": true,
+      "license": "ISC",
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/run-script/node_modules/json-parse-even-better-errors": {
+      "version": "3.0.2",
+      "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-3.0.2.tgz",
+      "integrity": "sha512-fi0NG4bPjCHunUJffmLd0gxssIgkNmArMvis4iNah6Owg1MCJjWhEcDLmsK6iGkJq3tHwbDkTlce70/tmXN4cQ==",
+      "dev": true,
+      "license": "MIT",
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/run-script/node_modules/proc-log": {
+      "version": "4.2.0",
+      "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-4.2.0.tgz",
+      "integrity": "sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==",
       "dev": true,
       "license": "ISC",
       "engines": {
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/@npmcli/run-script/node_modules/which": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/which/-/which-4.0.0.tgz",
+      "integrity": "sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "isexe": "^3.1.1"
+      },
+      "bin": {
+        "node-which": "bin/which.js"
+      },
+      "engines": {
+        "node": "^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/@sigstore/bundle": {
       "version": "2.3.2",
+      "resolved": "https://registry.npmjs.org/@sigstore/bundle/-/bundle-2.3.2.tgz",
+      "integrity": "sha512-wueKWDk70QixNLB363yHc2D2ItTgYiMTdPwK8D9dKQMR3ZQ0c35IxP5xnwQ8cNLoCgCRcHf14kE+CLIvNX1zmA==",
       "dev": true,
       "license": "Apache-2.0",
       "dependencies": {
@@ -3713,6 +2380,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@sigstore/core": {
       "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/@sigstore/core/-/core-1.1.0.tgz",
+      "integrity": "sha512-JzBqdVIyqm2FRQCulY6nbQzMpJJpSiJ8XXWMhtOX9eKgaXXpfNOF53lzQEjIydlStnd/eFtuC1dW4VYdD93oRg==",
       "dev": true,
       "license": "Apache-2.0",
       "engines": {
@@ -3721,6 +2390,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@sigstore/protobuf-specs": {
       "version": "0.3.3",
+      "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.3.3.tgz",
+      "integrity": "sha512-RpacQhBlwpBWd7KEJsRKcBQalbV28fvkxwTOJIqhIuDysMMaJW47V4OqW30iJB9uRpqOSxxEAQFdr8tTattReQ==",
       "dev": true,
       "license": "Apache-2.0",
       "engines": {
@@ -3729,6 +2400,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@sigstore/sign": {
       "version": "2.3.2",
+      "resolved": "https://registry.npmjs.org/@sigstore/sign/-/sign-2.3.2.tgz",
+      "integrity": "sha512-5Vz5dPVuunIIvC5vBb0APwo7qKA4G9yM48kPWJT+OEERs40md5GoUR1yedwpekWZ4m0Hhw44m6zU+ObsON+iDA==",
       "dev": true,
       "license": "Apache-2.0",
       "dependencies": {
@@ -3743,8 +2416,20 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/@sigstore/sign/node_modules/proc-log": {
+      "version": "4.2.0",
+      "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-4.2.0.tgz",
+      "integrity": "sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==",
+      "dev": true,
+      "license": "ISC",
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/@sigstore/tuf": {
       "version": "2.3.4",
+      "resolved": "https://registry.npmjs.org/@sigstore/tuf/-/tuf-2.3.4.tgz",
+      "integrity": "sha512-44vtsveTPUpqhm9NCrbU8CWLe3Vck2HO1PNLw7RIajbB7xhtn5RBPm1VNSCMwqGYHhDsBJG8gDF0q4lgydsJvw==",
       "dev": true,
       "license": "Apache-2.0",
       "dependencies": {
@@ -3757,6 +2442,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@sigstore/verify": {
       "version": "1.2.1",
+      "resolved": "https://registry.npmjs.org/@sigstore/verify/-/verify-1.2.1.tgz",
+      "integrity": "sha512-8iKx79/F73DKbGfRf7+t4dqrc0bRr0thdPrxAtCKWRm/F0tG71i6O1rvlnScncJLLBZHn3h8M3c1BSUAb9yu8g==",
       "dev": true,
       "license": "Apache-2.0",
       "dependencies": {
@@ -3770,6 +2457,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/@tufjs/models": {
       "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/@tufjs/models/-/models-2.0.1.tgz",
+      "integrity": "sha512-92F7/SFyufn4DXsha9+QfKnN03JGqtMFMXgSHbZOo8JG59WkTni7UzAouNQDf7AuP9OAMxVOPQcqG3sB7w+kkg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3782,6 +2471,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/abbrev": {
       "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-2.0.0.tgz",
+      "integrity": "sha512-6/mh1E2u2YgEsCHdY0Yx5oW+61gZU+1vXaoiHHrpKeuRNNgFvS+/jrwHiQhB5apAf5oB7UB7E19ol2R2LKH8hQ==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -3790,6 +2481,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/bin-links": {
       "version": "4.0.4",
+      "resolved": "https://registry.npmjs.org/bin-links/-/bin-links-4.0.4.tgz",
+      "integrity": "sha512-cMtq4W5ZsEwcutJrVId+a/tjt8GSbS+h0oNkdl6+6rBuEv8Ot33Bevj5KPm40t309zuhVic8NjpuL42QCiJWWA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3802,8 +2495,20 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/bin-links/node_modules/npm-normalize-package-bin": {
+      "version": "3.0.1",
+      "resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-3.0.1.tgz",
+      "integrity": "sha512-dMxCf+zZ+3zeQZXKxmyuCKlIDPGuv8EF940xbkC4kQVDTtqoh6rJFO+JTKSA6/Rwi0getWmtuy4Itup0AMcaDQ==",
+      "dev": true,
+      "license": "ISC",
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/cacache": {
       "version": "18.0.4",
+      "resolved": "https://registry.npmjs.org/cacache/-/cacache-18.0.4.tgz",
+      "integrity": "sha512-B+L5iIa9mgcjLbliir2th36yEwPftrzteHYujzsx3dFP/31GCHcIeS8f5MGd80odLOjaOvSpU3EEAmRQptkxLQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3826,6 +2531,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/cmd-shim": {
       "version": "6.0.3",
+      "resolved": "https://registry.npmjs.org/cmd-shim/-/cmd-shim-6.0.3.tgz",
+      "integrity": "sha512-FMabTRlc5t5zjdenF6mS0MBeFZm0XqHqeOkcskKFb/LYCcRQ5fVgLOHVc4Lq9CqABd9zhjwPjMBCJvMCziSVtA==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -3855,6 +2562,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/hosted-git-info": {
       "version": "8.1.0",
+      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-8.1.0.tgz",
+      "integrity": "sha512-Rw/B2DNQaPBICNXEm8balFz9a6WpZrkCGpcWFpy7nCj+NyhSdqXipmfvtmWt9xGfp0wZnBxB+iVpLmQMYt47Tw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3866,6 +2575,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/ignore-walk": {
       "version": "6.0.5",
+      "resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-6.0.5.tgz",
+      "integrity": "sha512-VuuG0wCnjhnylG1ABXT3dAuIpTNDs/G8jlpmwXY03fXoXy/8ZK8/T+hMzt8L4WnrLCJgdybqgPagnF/f97cg3A==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3891,14 +2602,6 @@
         "@pkgjs/parseargs": "^0.11.0"
       }
     },
-    "node_modules/@npmcli/template-oss/node_modules/json-parse-even-better-errors": {
-      "version": "3.0.2",
-      "dev": true,
-      "license": "MIT",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
     "node_modules/@npmcli/template-oss/node_modules/lru-cache": {
       "version": "10.4.3",
       "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz",
@@ -3908,6 +2611,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/make-fetch-happen": {
       "version": "13.0.1",
+      "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-13.0.1.tgz",
+      "integrity": "sha512-cKTUFc/rbKUd/9meOvgrpJ2WrNzymt6jfRDdwg5UCnVzv9dTpEj9JS5m3wtziXVCjluIXyL8pcaukYqezIzZQA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3928,6 +2633,16 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/make-fetch-happen/node_modules/proc-log": {
+      "version": "4.2.0",
+      "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-4.2.0.tgz",
+      "integrity": "sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==",
+      "dev": true,
+      "license": "ISC",
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/minimatch": {
       "version": "9.0.5",
       "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz",
@@ -3946,6 +2661,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/minipass-fetch": {
       "version": "3.0.5",
+      "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-3.0.5.tgz",
+      "integrity": "sha512-2N8elDQAtSnFV0Dk7gt15KHsS0Fyz6CbYZ360h0WTYV1Ty46li3rAXVOQj1THMNLdmrD9Vt5pBPtWtVkpwGBqg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3999,6 +2716,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/node-gyp": {
       "version": "10.3.1",
+      "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-10.3.1.tgz",
+      "integrity": "sha512-Pp3nFHBThHzVtNY7U6JfPjvT/DTE8+o/4xKsLQtBoU+j2HLsGlhcfzflAoUreaJbNmYnX+LlLi0qjV8kpyO6xQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -4020,8 +2739,36 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/node-gyp/node_modules/proc-log": {
+      "version": "4.2.0",
+      "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-4.2.0.tgz",
+      "integrity": "sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==",
+      "dev": true,
+      "license": "ISC",
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
+    "node_modules/@npmcli/template-oss/node_modules/node-gyp/node_modules/which": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/which/-/which-4.0.0.tgz",
+      "integrity": "sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "isexe": "^3.1.1"
+      },
+      "bin": {
+        "node-which": "bin/which.js"
+      },
+      "engines": {
+        "node": "^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/nopt": {
       "version": "7.2.1",
+      "resolved": "https://registry.npmjs.org/nopt/-/nopt-7.2.1.tgz",
+      "integrity": "sha512-taM24ViiimT/XntxbPyJQzCG+p4EKOpgD3mxFwW38mGjVUrfERQOeY4EDHjdnptttfHuHQXFx+lTP08Q+mLa/w==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4036,6 +2783,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/normalize-package-data": {
       "version": "6.0.2",
+      "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-6.0.2.tgz",
+      "integrity": "sha512-V6gygoYb/5EmNI+MEGrWkC+e6+Rr7mTmfHrxDbLzxQogBkgzo76rkok0Am6thgSF7Mv2nLOajAJj5vDJZEFn7g==",
       "dev": true,
       "license": "BSD-2-Clause",
       "dependencies": {
@@ -4049,6 +2798,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/normalize-package-data/node_modules/hosted-git-info": {
       "version": "7.0.2",
+      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.2.tgz",
+      "integrity": "sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4060,6 +2811,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/npm-bundled": {
       "version": "3.0.1",
+      "resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-3.0.1.tgz",
+      "integrity": "sha512-+AvaheE/ww1JEwRHOrn4WHNzOxGtVp+adrg2AeZS/7KuxGUYFuBta98wYpfHBbJp6Tg6j1NKSEVHNcfZzJHQwQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4069,8 +2822,20 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/npm-bundled/node_modules/npm-normalize-package-bin": {
+      "version": "3.0.1",
+      "resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-3.0.1.tgz",
+      "integrity": "sha512-dMxCf+zZ+3zeQZXKxmyuCKlIDPGuv8EF940xbkC4kQVDTtqoh6rJFO+JTKSA6/Rwi0getWmtuy4Itup0AMcaDQ==",
+      "dev": true,
+      "license": "ISC",
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/npm-install-checks": {
       "version": "6.3.0",
+      "resolved": "https://registry.npmjs.org/npm-install-checks/-/npm-install-checks-6.3.0.tgz",
+      "integrity": "sha512-W29RiK/xtpCGqn6f3ixfRYGk+zRyr+Ew9F2E20BfXxT5/euLdA/Nm7fO7OeTGuAmTs30cpgInyJ0cYe708YTZw==",
       "dev": true,
       "license": "BSD-2-Clause",
       "dependencies": {
@@ -4080,16 +2845,10 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
-    "node_modules/@npmcli/template-oss/node_modules/npm-normalize-package-bin": {
-      "version": "3.0.1",
-      "dev": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
     "node_modules/@npmcli/template-oss/node_modules/npm-package-arg": {
       "version": "12.0.2",
+      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-12.0.2.tgz",
+      "integrity": "sha512-f1NpFjNI9O4VbKMOlA5QoBq/vSQPORHcTZ2feJpFkTHJ9eQkdlmZEKSjcAhxTGInC7RlEyScT9ui67NaOsjFWA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4102,16 +2861,10 @@
         "node": "^18.17.0 || >=20.5.0"
       }
     },
-    "node_modules/@npmcli/template-oss/node_modules/npm-package-arg/node_modules/proc-log": {
-      "version": "5.0.0",
-      "dev": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
     "node_modules/@npmcli/template-oss/node_modules/npm-packlist": {
       "version": "8.0.2",
+      "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-8.0.2.tgz",
+      "integrity": "sha512-shYrPFIS/JLP4oQmAwDyk5HcyysKW8/JLTEA32S0Z5TzvpaeeX2yMFfoK1fjEBnCBvVyIB/Jj/GBFdm0wsgzbA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4123,6 +2876,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/npm-pick-manifest": {
       "version": "9.1.0",
+      "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-9.1.0.tgz",
+      "integrity": "sha512-nkc+3pIIhqHVQr085X9d2JzPzLyjzQS96zbruppqC9aZRm/x8xx6xhI98gHtsfELP2bE+loHq8ZaHFHhe+NauA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4137,6 +2892,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/npm-pick-manifest/node_modules/hosted-git-info": {
       "version": "7.0.2",
+      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.2.tgz",
+      "integrity": "sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4146,8 +2903,20 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/npm-pick-manifest/node_modules/npm-normalize-package-bin": {
+      "version": "3.0.1",
+      "resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-3.0.1.tgz",
+      "integrity": "sha512-dMxCf+zZ+3zeQZXKxmyuCKlIDPGuv8EF940xbkC4kQVDTtqoh6rJFO+JTKSA6/Rwi0getWmtuy4Itup0AMcaDQ==",
+      "dev": true,
+      "license": "ISC",
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/npm-pick-manifest/node_modules/npm-package-arg": {
       "version": "11.0.3",
+      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-11.0.3.tgz",
+      "integrity": "sha512-sHGJy8sOC1YraBywpzQlIKBE4pBbGbiF95U6Auspzyem956E0+FtDtsx1ZxlOJkQCZ1AFXAY/yuvtFYrOxF+Bw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4160,8 +2929,20 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/npm-pick-manifest/node_modules/proc-log": {
+      "version": "4.2.0",
+      "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-4.2.0.tgz",
+      "integrity": "sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==",
+      "dev": true,
+      "license": "ISC",
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/npm-pick-manifest/node_modules/validate-npm-package-name": {
       "version": "5.0.1",
+      "resolved": "https://registry.npmjs.org/validate-npm-package-name/-/validate-npm-package-name-5.0.1.tgz",
+      "integrity": "sha512-OljLrQ9SQdOUqTaQxqL5dEfZWrXExyyWsozYlAWFawPVNuD83igl7uJD2RTkNMbniIYgt8l81eCJGIdQF7avLQ==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -4170,6 +2951,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/npm-registry-fetch": {
       "version": "17.1.0",
+      "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-17.1.0.tgz",
+      "integrity": "sha512-5+bKQRH0J1xG1uZ1zMNvxW0VEyoNWgJpY9UDuluPFLKDfJ9u2JmmjmTJV1srBGQOROfdBMiVvnH2Zvpbm+xkVA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4188,6 +2971,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/npm-registry-fetch/node_modules/hosted-git-info": {
       "version": "7.0.2",
+      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.2.tgz",
+      "integrity": "sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4199,6 +2984,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/npm-registry-fetch/node_modules/npm-package-arg": {
       "version": "11.0.3",
+      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-11.0.3.tgz",
+      "integrity": "sha512-sHGJy8sOC1YraBywpzQlIKBE4pBbGbiF95U6Auspzyem956E0+FtDtsx1ZxlOJkQCZ1AFXAY/yuvtFYrOxF+Bw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4211,8 +2998,20 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/npm-registry-fetch/node_modules/proc-log": {
+      "version": "4.2.0",
+      "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-4.2.0.tgz",
+      "integrity": "sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==",
+      "dev": true,
+      "license": "ISC",
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/npm-registry-fetch/node_modules/validate-npm-package-name": {
       "version": "5.0.1",
+      "resolved": "https://registry.npmjs.org/validate-npm-package-name/-/validate-npm-package-name-5.0.1.tgz",
+      "integrity": "sha512-OljLrQ9SQdOUqTaQxqL5dEfZWrXExyyWsozYlAWFawPVNuD83igl7uJD2RTkNMbniIYgt8l81eCJGIdQF7avLQ==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -4221,6 +3020,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/p-map": {
       "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz",
+      "integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -4235,6 +3036,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/pacote": {
       "version": "18.0.6",
+      "resolved": "https://registry.npmjs.org/pacote/-/pacote-18.0.6.tgz",
+      "integrity": "sha512-+eK3G27SMwsB8kLIuj4h1FUhHtwiEUo21Tw8wNjmvdlpOEr613edv+8FUsTj/4F/VN5ywGE19X18N7CC2EJk6A==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4265,6 +3068,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/pacote/node_modules/@npmcli/git": {
       "version": "5.0.8",
+      "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-5.0.8.tgz",
+      "integrity": "sha512-liASfw5cqhjNW9UFd+ruwwdEf/lbOAQjLL2XY2dFW/bkJheXDYZgOyul/4gVvEV4BWkTXjYGmDqMw9uegdbJNQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4284,6 +3089,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/pacote/node_modules/@npmcli/package-json": {
       "version": "5.2.1",
+      "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-5.2.1.tgz",
+      "integrity": "sha512-f7zYC6kQautXHvNbLEWgD/uGu1+xCn9izgqBfgItWSx22U0ZDekxN08A1vM8cTxj/cRVe0Q94Ode+tdoYmIOOQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4299,8 +3106,23 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/pacote/node_modules/@npmcli/promise-spawn": {
+      "version": "7.0.2",
+      "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-7.0.2.tgz",
+      "integrity": "sha512-xhfYPXoV5Dy4UkY0D+v2KkwvnDfiA/8Mt3sWCGI/hM03NsYIH8ZaG6QzS9x7pje5vHZBZJ2v6VRFVTWACnqcmQ==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "which": "^4.0.0"
+      },
+      "engines": {
+        "node": "^16.14.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/pacote/node_modules/hosted-git-info": {
       "version": "7.0.2",
+      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.2.tgz",
+      "integrity": "sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4312,14 +3134,28 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/pacote/node_modules/ini": {
       "version": "4.1.3",
+      "resolved": "https://registry.npmjs.org/ini/-/ini-4.1.3.tgz",
+      "integrity": "sha512-X7rqawQBvfdjS10YU1y1YVreA3SsLrW9dX2CewP2EbBJM4ypVNLDkO5y04gejPwKIY9lR+7r9gn3rFPt/kmWFg==",
       "dev": true,
       "license": "ISC",
       "engines": {
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/pacote/node_modules/json-parse-even-better-errors": {
+      "version": "3.0.2",
+      "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-3.0.2.tgz",
+      "integrity": "sha512-fi0NG4bPjCHunUJffmLd0gxssIgkNmArMvis4iNah6Owg1MCJjWhEcDLmsK6iGkJq3tHwbDkTlce70/tmXN4cQ==",
+      "dev": true,
+      "license": "MIT",
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/pacote/node_modules/npm-package-arg": {
       "version": "11.0.3",
+      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-11.0.3.tgz",
+      "integrity": "sha512-sHGJy8sOC1YraBywpzQlIKBE4pBbGbiF95U6Auspzyem956E0+FtDtsx1ZxlOJkQCZ1AFXAY/yuvtFYrOxF+Bw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4332,16 +3168,46 @@
         "node": "^16.14.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/pacote/node_modules/proc-log": {
+      "version": "4.2.0",
+      "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-4.2.0.tgz",
+      "integrity": "sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==",
+      "dev": true,
+      "license": "ISC",
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/pacote/node_modules/validate-npm-package-name": {
       "version": "5.0.1",
+      "resolved": "https://registry.npmjs.org/validate-npm-package-name/-/validate-npm-package-name-5.0.1.tgz",
+      "integrity": "sha512-OljLrQ9SQdOUqTaQxqL5dEfZWrXExyyWsozYlAWFawPVNuD83igl7uJD2RTkNMbniIYgt8l81eCJGIdQF7avLQ==",
       "dev": true,
       "license": "ISC",
       "engines": {
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/pacote/node_modules/which": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/which/-/which-4.0.0.tgz",
+      "integrity": "sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "isexe": "^3.1.1"
+      },
+      "bin": {
+        "node-which": "bin/which.js"
+      },
+      "engines": {
+        "node": "^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/parse-conflict-json": {
       "version": "3.0.1",
+      "resolved": "https://registry.npmjs.org/parse-conflict-json/-/parse-conflict-json-3.0.1.tgz",
+      "integrity": "sha512-01TvEktc68vwbJOtWZluyWeVGWjP+bZwXtPDMQVbBKzbJ/vZBif0L69KH1+cHv1SZ6e0FKLvjyHe8mqsIqYOmw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4353,6 +3219,16 @@
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
+    "node_modules/@npmcli/template-oss/node_modules/parse-conflict-json/node_modules/json-parse-even-better-errors": {
+      "version": "3.0.2",
+      "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-3.0.2.tgz",
+      "integrity": "sha512-fi0NG4bPjCHunUJffmLd0gxssIgkNmArMvis4iNah6Owg1MCJjWhEcDLmsK6iGkJq3tHwbDkTlce70/tmXN4cQ==",
+      "dev": true,
+      "license": "MIT",
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/@npmcli/template-oss/node_modules/path-scurry": {
       "version": "1.11.1",
       "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz",
@@ -4372,6 +3248,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/postcss-selector-parser": {
       "version": "6.1.2",
+      "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.1.2.tgz",
+      "integrity": "sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -4382,16 +3260,10 @@
         "node": ">=4"
       }
     },
-    "node_modules/@npmcli/template-oss/node_modules/proc-log": {
-      "version": "4.2.0",
-      "dev": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
     "node_modules/@npmcli/template-oss/node_modules/proggy": {
       "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/proggy/-/proggy-2.0.0.tgz",
+      "integrity": "sha512-69agxLtnI8xBs9gUGqEnK26UfiexpHy+KUpBQWabiytQjnn5wFY8rklAi7GRfABIuPNnQ/ik48+LGLkYYJcy4A==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -4400,26 +3272,18 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/read-cmd-shim": {
       "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/read-cmd-shim/-/read-cmd-shim-4.0.0.tgz",
+      "integrity": "sha512-yILWifhaSEEytfXI76kB9xEEiG1AiozaCJZ83A87ytjRiN+jVibXjedjCRNjoZviinhG+4UkalO3mWTd8u5O0Q==",
       "dev": true,
       "license": "ISC",
       "engines": {
         "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
       }
     },
-    "node_modules/@npmcli/template-oss/node_modules/read-package-json-fast": {
-      "version": "3.0.2",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "json-parse-even-better-errors": "^3.0.0",
-        "npm-normalize-package-bin": "^3.0.0"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
     "node_modules/@npmcli/template-oss/node_modules/sigstore": {
       "version": "2.3.1",
+      "resolved": "https://registry.npmjs.org/sigstore/-/sigstore-2.3.1.tgz",
+      "integrity": "sha512-8G+/XDU8wNsJOQS5ysDVO0Etg9/2uA5gR9l4ZwijjlwxBcrU6RPfwi2+jJmbP+Ap1Hlp/nVAaEO4Fj22/SL2gQ==",
       "dev": true,
       "license": "Apache-2.0",
       "dependencies": {
@@ -4436,6 +3300,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/ssri": {
       "version": "10.0.6",
+      "resolved": "https://registry.npmjs.org/ssri/-/ssri-10.0.6.tgz",
+      "integrity": "sha512-MGrFH9Z4NP9Iyhqn16sDtBpRRNJ0Y2hNa6D65h736fVSaPCHr4DM4sWUNvVaSuC+0OBGhwsrydQwmgfg5LncqQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4447,6 +3313,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/tuf-js": {
       "version": "2.2.1",
+      "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-2.2.1.tgz",
+      "integrity": "sha512-GwIJau9XaA8nLVbUXsN3IlFi7WmQ48gBUrl3FTkkL/XLu/POhBzfmX9hd33FNMX1qAsfl6ozO1iMmW9NC8YniA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -4460,6 +3328,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/unique-filename": {
       "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-3.0.0.tgz",
+      "integrity": "sha512-afXhuC55wkAmZ0P18QsVE6kp8JaxrEokN2HGIoIVv2ijHQd419H0+6EigAFcIzXeMIkcIkNBpB3L/DXB3cTS/g==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4471,6 +3341,8 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/unique-slug": {
       "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/unique-slug/-/unique-slug-4.0.0.tgz",
+      "integrity": "sha512-WrcA6AyEfqDX5bWige/4NQfPZMtASNVxdmWR76WESYQVAACSgWcR6e9i0mofqqBxYFtL4oAxPIptY73/0YE1DQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4482,25 +3354,15 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/walk-up-path": {
       "version": "3.0.1",
+      "resolved": "https://registry.npmjs.org/walk-up-path/-/walk-up-path-3.0.1.tgz",
+      "integrity": "sha512-9YlCL/ynK3CTlrSRrDxZvUauLzAswPCrsaCgilqFevUYpeEW0/3ScEjaa3kbW/T0ghhkEr7mv+fpjqn1Y1YuTA==",
       "dev": true,
       "license": "ISC"
     },
-    "node_modules/@npmcli/template-oss/node_modules/which": {
-      "version": "4.0.0",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "isexe": "^3.1.1"
-      },
-      "bin": {
-        "node-which": "bin/which.js"
-      },
-      "engines": {
-        "node": "^16.13.0 || >=18.0.0"
-      }
-    },
     "node_modules/@npmcli/template-oss/node_modules/write-file-atomic": {
       "version": "5.0.1",
+      "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-5.0.1.tgz",
+      "integrity": "sha512-+QU2zd6OTD8XWIJCbffaiQeH9U73qIqafo1x6V1snCWYGJf6cVE0cDR4D8xRzcEnfI21IFrUPzPGtcPf8AC+Rw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4513,6 +3375,8 @@
     },
     "node_modules/@octokit/auth-token": {
       "version": "3.0.4",
+      "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-3.0.4.tgz",
+      "integrity": "sha512-TWFX7cZF2LXoCvdmJWY7XVPi74aSY0+FfBZNSXEXFkMpjcqsQwDSYVv5FhRFaI0V1ECnwbz4j59T/G+rXNWaIQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -4521,9 +3385,10 @@
     },
     "node_modules/@octokit/core": {
       "version": "4.2.4",
+      "resolved": "https://registry.npmjs.org/@octokit/core/-/core-4.2.4.tgz",
+      "integrity": "sha512-rYKilwgzQ7/imScn3M9/pFfUf4I1AZEH3KhyJmtPdE2zfaXAn2mFfUy4FbKewzc2We5y/LlKLj36fWJLKC2SIQ==",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "@octokit/auth-token": "^3.0.0",
         "@octokit/graphql": "^5.0.0",
@@ -4539,6 +3404,8 @@
     },
     "node_modules/@octokit/endpoint": {
       "version": "7.0.6",
+      "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-7.0.6.tgz",
+      "integrity": "sha512-5L4fseVRUsDFGR00tMWD/Trdeeihn999rTMGRMC1G/Ldi1uWlWJzI98H4Iak5DB/RVvQuyMYKqSK/R6mbSOQyg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -4552,6 +3419,8 @@
     },
     "node_modules/@octokit/graphql": {
       "version": "5.0.6",
+      "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-5.0.6.tgz",
+      "integrity": "sha512-Fxyxdy/JH0MnIB5h+UQ3yCoh1FG4kWXfFKkpWqjZHw/p+Kc8Y44Hu/kCgNBT6nU1shNumEchmW/sUO1JuQnPcw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -4565,11 +3434,15 @@
     },
     "node_modules/@octokit/openapi-types": {
       "version": "18.1.1",
+      "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-18.1.1.tgz",
+      "integrity": "sha512-VRaeH8nCDtF5aXWnjPuEMIYf1itK/s3JYyJcWFJT8X9pSNnBtriDf7wlEWsGuhPLl4QIH4xM8fqTXDwJ3Mu6sw==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/@octokit/plugin-paginate-rest": {
       "version": "6.1.2",
+      "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-6.1.2.tgz",
+      "integrity": "sha512-qhrmtQeHU/IivxucOV1bbI/xZyC/iOBhclokv7Sut5vnejAIAEXVcGQeRpQlU39E0WwK9lNvJHphHri/DB6lbQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -4585,6 +3458,8 @@
     },
     "node_modules/@octokit/plugin-request-log": {
       "version": "1.0.4",
+      "resolved": "https://registry.npmjs.org/@octokit/plugin-request-log/-/plugin-request-log-1.0.4.tgz",
+      "integrity": "sha512-mLUsMkgP7K/cnFEw07kWqXGF5LKrOkD+lhCrKvPHXWDywAwuDUeDwWBpc69XK3pNX0uKiVt8g5z96PJ6z9xCFA==",
       "dev": true,
       "license": "MIT",
       "peerDependencies": {
@@ -4593,6 +3468,8 @@
     },
     "node_modules/@octokit/plugin-rest-endpoint-methods": {
       "version": "7.2.3",
+      "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-7.2.3.tgz",
+      "integrity": "sha512-I5Gml6kTAkzVlN7KCtjOM+Ruwe/rQppp0QU372K1GP7kNOYEKe8Xn5BW4sE62JAHdwpq95OQK/qGNyKQMUzVgA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -4607,6 +3484,8 @@
     },
     "node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types": {
       "version": "10.0.0",
+      "resolved": "https://registry.npmjs.org/@octokit/types/-/types-10.0.0.tgz",
+      "integrity": "sha512-Vm8IddVmhCgU1fxC1eyinpwqzXPEYu0NrYzD3YZjlGjyftdLBTeqNblRC0jmJmgxbJIsQlyogVeGnrNaaMVzIg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -4615,6 +3494,8 @@
     },
     "node_modules/@octokit/request": {
       "version": "6.2.8",
+      "resolved": "https://registry.npmjs.org/@octokit/request/-/request-6.2.8.tgz",
+      "integrity": "sha512-ow4+pkVQ+6XVVsekSYBzJC0VTVvh/FCTUUgTsboGq+DTeWdyIFV8WSCdo0RIxk6wSkBTHqIK1mYuY7nOBXOchw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -4631,6 +3512,8 @@
     },
     "node_modules/@octokit/request-error": {
       "version": "3.0.3",
+      "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-3.0.3.tgz",
+      "integrity": "sha512-crqw3V5Iy2uOU5Np+8M/YexTlT8zxCfI+qu+LxUB7SZpje4Qmx3mub5DfEKSO8Ylyk0aogi6TYdf6kxzh2BguQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -4644,6 +3527,8 @@
     },
     "node_modules/@octokit/rest": {
       "version": "19.0.13",
+      "resolved": "https://registry.npmjs.org/@octokit/rest/-/rest-19.0.13.tgz",
+      "integrity": "sha512-/EzVox5V9gYGdbAI+ovYj3nXQT1TtTHRT+0eZPcuC05UFSWO3mdO9UY1C0i2eLF9Un1ONJkAk+IEtYGAC+TahA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -4658,11 +3543,15 @@
     },
     "node_modules/@octokit/tsconfig": {
       "version": "1.0.2",
+      "resolved": "https://registry.npmjs.org/@octokit/tsconfig/-/tsconfig-1.0.2.tgz",
+      "integrity": "sha512-I0vDR0rdtP8p2lGMzvsJzbhdOWy405HcGovrspJ8RRibHnyRgggUSNO5AIox5LmqiwmatHKYsvj6VGFHkqS7lA==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/@octokit/types": {
       "version": "9.3.2",
+      "resolved": "https://registry.npmjs.org/@octokit/types/-/types-9.3.2.tgz",
+      "integrity": "sha512-D4iHGTdAnEEVsB8fl95m1hiz7D5YiRdQ9b/OEb3BYRVwbLsGHcRVPz+u+BgRLNk0Q0/4iZCBqDN96j2XNxfXrA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -4671,8 +3560,6 @@
     },
     "node_modules/@pkgjs/parseargs": {
       "version": "0.11.0",
-      "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz",
-      "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==",
       "inBundle": true,
       "license": "MIT",
       "optional": true,
@@ -4682,8 +3569,11 @@
     },
     "node_modules/@rtsao/scc": {
       "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/@rtsao/scc/-/scc-1.1.0.tgz",
+      "integrity": "sha512-zt6OdqaDoOnJ1ZYsCYGt9YmWzDXl4vQdKTyJev62gFhRGKdx7mcT54V9KIjg+d2wi9EXsPvAPKe7i7WjfVWB8g==",
       "dev": true,
-      "license": "MIT"
+      "license": "MIT",
+      "peer": true
     },
     "node_modules/@sigstore/bundle": {
       "version": "4.0.0",
@@ -4763,8 +3653,6 @@
     },
     "node_modules/@tufjs/models": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/@tufjs/models/-/models-4.0.0.tgz",
-      "integrity": "sha512-h5x5ga/hh82COe+GoD4+gKUeV4T3iaYOxqLt41GRKApinPI7DMidhCmNVTjKfhCWFJIGXaFJee07XczdT4jdZQ==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -4777,8 +3665,6 @@
     },
     "node_modules/@tufjs/models/node_modules/minimatch": {
       "version": "9.0.5",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz",
-      "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -4807,6 +3693,8 @@
     },
     "node_modules/@types/conventional-commits-parser": {
       "version": "5.0.1",
+      "resolved": "https://registry.npmjs.org/@types/conventional-commits-parser/-/conventional-commits-parser-5.0.1.tgz",
+      "integrity": "sha512-7uz5EHdzz2TqoMfV7ee61Egf5y6NkcO4FB/1iCCQnbeiI1F3xzv3vK5dBCXUCLQgGYS+mUeigK1iKQzvED+QnQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -4815,19 +3703,36 @@
     },
     "node_modules/@types/debug": {
       "version": "4.1.12",
+      "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz",
+      "integrity": "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
         "@types/ms": "*"
       }
     },
+    "node_modules/@types/hast": {
+      "version": "2.3.10",
+      "resolved": "https://registry.npmjs.org/@types/hast/-/hast-2.3.10.tgz",
+      "integrity": "sha512-McWspRw8xx8J9HurkVBfYj0xKoE25tOFlHGdx4MJ5xORQrMGZNqJhVQWaIbm6Oyla5kYOXtDiopzKRJzEOkwJw==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^2"
+      }
+    },
     "node_modules/@types/json5": {
       "version": "0.0.29",
+      "resolved": "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz",
+      "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==",
       "dev": true,
-      "license": "MIT"
+      "license": "MIT",
+      "peer": true
     },
     "node_modules/@types/mdast": {
       "version": "3.0.15",
+      "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-3.0.15.tgz",
+      "integrity": "sha512-LnwD+mUEfxWMa1QpDraczIn6k0Ee3SMicuYSSzS6ZYl2gKS09EClnJYGd8Du6rfc5r/GZEk5o1mRb8TaTj03sQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -4836,11 +3741,15 @@
     },
     "node_modules/@types/minimist": {
       "version": "1.2.5",
+      "resolved": "https://registry.npmjs.org/@types/minimist/-/minimist-1.2.5.tgz",
+      "integrity": "sha512-hov8bUuiLiyFPGyFPE1lwWhmzYbirOXQNNo40+y3zow8aFVTeyn3VWL0VFFfdNddA8S4Vf0Tc062rzyNr7Paag==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/@types/ms": {
       "version": "2.1.0",
+      "resolved": "https://registry.npmjs.org/@types/ms/-/ms-2.1.0.tgz",
+      "integrity": "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==",
       "dev": true,
       "license": "MIT"
     },
@@ -4850,33 +3759,42 @@
       "integrity": "sha512-FYxk1I7wPv3K2XBaoyH2cTnocQEu8AOZ60hPbsyukMPLv5/5qr7V1i8PLHdl6Zf87I+xZXFvPCXYjiTFq+YSDQ==",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "undici-types": "~7.12.0"
       }
     },
     "node_modules/@types/normalize-package-data": {
       "version": "2.4.4",
+      "resolved": "https://registry.npmjs.org/@types/normalize-package-data/-/normalize-package-data-2.4.4.tgz",
+      "integrity": "sha512-37i+OaWTh9qeK4LSHPsyRC7NahnGotNuZvjLSgcPzblpHB3rrCJxAOgI5gCdKm7coonsaX1Of0ILiTcnZjbfxA==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/@types/npm-package-arg": {
       "version": "6.1.4",
+      "resolved": "https://registry.npmjs.org/@types/npm-package-arg/-/npm-package-arg-6.1.4.tgz",
+      "integrity": "sha512-vDgdbMy2QXHnAruzlv68pUtXCjmqUk3WrBAsRboRovsOmxbfn/WiYCjmecyKjGztnMps5dWp4Uq2prp+Ilo17Q==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/@types/parse5": {
       "version": "6.0.3",
+      "resolved": "https://registry.npmjs.org/@types/parse5/-/parse5-6.0.3.tgz",
+      "integrity": "sha512-SuT16Q1K51EAVPz1K29DJ/sXjhSQ0zjvsypYJ6tlwVsRV9jwW5Adq2ch8Dq8kDBCkYnELS7N7VNCSB5nC56t/g==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/@types/unist": {
       "version": "2.0.11",
+      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.11.tgz",
+      "integrity": "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/@types/yargs": {
       "version": "16.0.9",
+      "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-16.0.9.tgz",
+      "integrity": "sha512-tHhzvkFXZQeTECenFoRljLBYPZJ7jAVxqqtEI0qTLOmuultnFp4I9yKE17vTuhf7BkhCu7I4XuemPgikDVuYqA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -4885,13 +3803,18 @@
     },
     "node_modules/@types/yargs-parser": {
       "version": "21.0.3",
+      "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.3.tgz",
+      "integrity": "sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/@ungap/structured-clone": {
       "version": "1.3.0",
+      "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.3.0.tgz",
+      "integrity": "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==",
       "dev": true,
-      "license": "ISC"
+      "license": "ISC",
+      "peer": true
     },
     "node_modules/@xmldom/xmldom": {
       "version": "0.8.11",
@@ -4913,6 +3836,8 @@
     },
     "node_modules/acorn": {
       "version": "8.15.0",
+      "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz",
+      "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -4925,8 +3850,11 @@
     },
     "node_modules/acorn-jsx": {
       "version": "5.3.2",
+      "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz",
+      "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "peerDependencies": {
         "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0"
       }
@@ -4941,6 +3869,8 @@
     },
     "node_modules/aggregate-error": {
       "version": "3.1.0",
+      "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz",
+      "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -4953,9 +3883,10 @@
     },
     "node_modules/ajv": {
       "version": "8.17.1",
+      "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz",
+      "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "fast-deep-equal": "^3.1.3",
         "fast-uri": "^3.0.1",
@@ -4969,6 +3900,8 @@
     },
     "node_modules/ajv-formats": {
       "version": "2.1.1",
+      "resolved": "https://registry.npmjs.org/ajv-formats/-/ajv-formats-2.1.1.tgz",
+      "integrity": "sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -4985,6 +3918,8 @@
     },
     "node_modules/ajv-formats-draft2019": {
       "version": "1.6.1",
+      "resolved": "https://registry.npmjs.org/ajv-formats-draft2019/-/ajv-formats-draft2019-1.6.1.tgz",
+      "integrity": "sha512-JQPvavpkWDvIsBp2Z33UkYCtXCSpW4HD3tAZ+oL4iEFOk9obQZffx0yANwECt6vzr6ET+7HN5czRyqXbnq/u0Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5007,8 +3942,6 @@
     },
     "node_modules/ansi-styles": {
       "version": "6.2.3",
-      "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz",
-      "integrity": "sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==",
       "inBundle": true,
       "license": "MIT",
       "engines": {
@@ -5020,6 +3953,8 @@
     },
     "node_modules/anymatch": {
       "version": "3.1.3",
+      "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz",
+      "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -5032,6 +3967,8 @@
     },
     "node_modules/append-transform": {
       "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/append-transform/-/append-transform-2.0.0.tgz",
+      "integrity": "sha512-7yeyCEurROLQJFv5Xj4lEGTy0borxepjFv1g22oAdqFu//SrAlDl1O1Nxx15SH1RoliUml6p8dwJW9jvZughhg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5052,11 +3989,15 @@
     },
     "node_modules/argparse": {
       "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz",
+      "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==",
       "dev": true,
       "license": "Python-2.0"
     },
     "node_modules/args": {
       "version": "5.0.3",
+      "resolved": "https://registry.npmjs.org/args/-/args-5.0.3.tgz",
+      "integrity": "sha512-h6k/zfFgusnv3i5TU08KQkVKuCPBtL/PWQbWkHUxvJrZ2nAyeaUupneemcrgn1xmqxPQsPIzwkUhOpoqPDRZuA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5071,6 +4012,8 @@
     },
     "node_modules/args/node_modules/ansi-styles": {
       "version": "3.2.1",
+      "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz",
+      "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5082,6 +4025,8 @@
     },
     "node_modules/args/node_modules/camelcase": {
       "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.0.0.tgz",
+      "integrity": "sha512-faqwZqnWxbxn+F1d399ygeamQNy3lPp/H9H6rNrqYh4FSVCtcY+3cub1MxA8o9mDd55mM8Aghuu/kuyYA6VTsA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -5090,6 +4035,8 @@
     },
     "node_modules/args/node_modules/chalk": {
       "version": "2.4.2",
+      "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz",
+      "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5103,6 +4050,8 @@
     },
     "node_modules/args/node_modules/color-convert": {
       "version": "1.9.3",
+      "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz",
+      "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5111,19 +4060,35 @@
     },
     "node_modules/args/node_modules/color-name": {
       "version": "1.1.3",
+      "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz",
+      "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/args/node_modules/escape-string-regexp": {
       "version": "1.0.5",
+      "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz",
+      "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==",
       "dev": true,
       "license": "MIT",
       "engines": {
         "node": ">=0.8.0"
       }
     },
+    "node_modules/args/node_modules/has-flag": {
+      "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz",
+      "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==",
+      "dev": true,
+      "license": "MIT",
+      "engines": {
+        "node": ">=4"
+      }
+    },
     "node_modules/args/node_modules/mri": {
       "version": "1.1.4",
+      "resolved": "https://registry.npmjs.org/mri/-/mri-1.1.4.tgz",
+      "integrity": "sha512-6y7IjGPm8AzlvoUrwAaw1tLnUBudaS3752vcd8JtrpGGQn+rXIe63LFVHm/YMwtqAuh+LJPCFdlLYPWM1nYn6w==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -5132,6 +4097,8 @@
     },
     "node_modules/args/node_modules/supports-color": {
       "version": "5.5.0",
+      "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz",
+      "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5143,8 +4110,11 @@
     },
     "node_modules/array-buffer-byte-length": {
       "version": "1.0.2",
+      "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.2.tgz",
+      "integrity": "sha512-LHE+8BuR7RYGDKvnrmcuSq3tDcKv9OFEXQt/HpbZhY7V6h0zlUXutnAD82GiFx9rdieCMjkvtcsPqBwgUl1Iiw==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.3",
         "is-array-buffer": "^3.0.5"
@@ -5158,13 +4128,18 @@
     },
     "node_modules/array-ify": {
       "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/array-ify/-/array-ify-1.0.0.tgz",
+      "integrity": "sha512-c5AMf34bKdvPhQ7tBGhqkgKNUzMr4WUs+WDtC2ZUGOUncbxKMTvqxYctiseW3+L4bA8ec+GcZ6/A/FW4m8ukng==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/array-includes": {
       "version": "3.1.9",
+      "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.9.tgz",
+      "integrity": "sha512-FmeCCAenzH0KH381SPT5FZmiA/TmpndpcaShhfgEN9eCVjnFBqq3l1xrI42y8+PPLI6hypzou4GXw00WHmPBLQ==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bind": "^1.0.8",
         "call-bound": "^1.0.4",
@@ -5184,8 +4159,11 @@
     },
     "node_modules/array.prototype.findlastindex": {
       "version": "1.2.6",
+      "resolved": "https://registry.npmjs.org/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.6.tgz",
+      "integrity": "sha512-F/TKATkzseUExPlfvmwQKGITM3DGTK+vkAsCZoDc5daVygbJBnjEUCbgkAvVFsgfXfX4YIqZ/27G3k3tdXrTxQ==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bind": "^1.0.8",
         "call-bound": "^1.0.4",
@@ -5204,8 +4182,11 @@
     },
     "node_modules/array.prototype.flat": {
       "version": "1.3.3",
+      "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.3.tgz",
+      "integrity": "sha512-rwG/ja1neyLqCuGZ5YYrznA62D4mZXg0i1cIskIUKSiqF3Cje9/wXAls9B9s1Wa2fomMsIv8czB8jZcPmxCXFg==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bind": "^1.0.8",
         "define-properties": "^1.2.1",
@@ -5221,8 +4202,11 @@
     },
     "node_modules/array.prototype.flatmap": {
       "version": "1.3.3",
+      "resolved": "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.3.tgz",
+      "integrity": "sha512-Y7Wt51eKJSyi80hFrJCePGGNo5ktJCslFuboqJsbf57CCPcm5zztluPlc4/aD8sWsKvlwatezpV4U1efk8kpjg==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bind": "^1.0.8",
         "define-properties": "^1.2.1",
@@ -5238,8 +4222,11 @@
     },
     "node_modules/arraybuffer.prototype.slice": {
       "version": "1.0.4",
+      "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.4.tgz",
+      "integrity": "sha512-BNoCY6SXXPQ7gF2opIP4GBE+Xw7U+pHMYKuzjgCN3GwiaIR09UUeKfheyIry77QtrCBlC0KK0q5/TER/tYh3PQ==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "array-buffer-byte-length": "^1.0.1",
         "call-bind": "^1.0.8",
@@ -5258,6 +4245,8 @@
     },
     "node_modules/arrify": {
       "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz",
+      "integrity": "sha512-3CYzex9M9FGQjCGMGyi6/31c8GJbgb0qGyrx5HWxPd0aCwh4cB2YjMb2Xf9UuoogrMrlO9cTqnB5rI5GHZTcUA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -5266,14 +4255,19 @@
     },
     "node_modules/async-function": {
       "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/async-function/-/async-function-1.0.0.tgz",
+      "integrity": "sha512-hsU18Ae8CDTR6Kgu9DYf0EbCr/a5iGL0rytQDobUcdpYOKokk8LEjVphnXkDkgpi0wYVsqrXuP0bZxJaTqdgoA==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "engines": {
         "node": ">= 0.4"
       }
     },
     "node_modules/async-hook-domain": {
       "version": "2.0.4",
+      "resolved": "https://registry.npmjs.org/async-hook-domain/-/async-hook-domain-2.0.4.tgz",
+      "integrity": "sha512-14LjCmlK1PK8eDtTezR6WX8TMaYNIzBIsd2D1sGoGjgx0BuNMMoSdk7i/drlbtamy0AWv9yv2tkB+ASdmeqFIw==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -5282,6 +4276,8 @@
     },
     "node_modules/async-retry": {
       "version": "1.3.3",
+      "resolved": "https://registry.npmjs.org/async-retry/-/async-retry-1.3.3.tgz",
+      "integrity": "sha512-wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5290,6 +4286,8 @@
     },
     "node_modules/async-retry/node_modules/retry": {
       "version": "0.13.1",
+      "resolved": "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz",
+      "integrity": "sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -5298,13 +4296,18 @@
     },
     "node_modules/asynckit": {
       "version": "0.4.0",
+      "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
+      "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/available-typed-arrays": {
       "version": "1.0.7",
+      "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz",
+      "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "possible-typed-array-names": "^1.0.0"
       },
@@ -5332,6 +4335,8 @@
     },
     "node_modules/bail": {
       "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/bail/-/bail-2.0.2.tgz",
+      "integrity": "sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -5364,15 +4369,21 @@
     },
     "node_modules/basic-auth-parser": {
       "version": "0.0.2-1",
+      "resolved": "https://registry.npmjs.org/basic-auth-parser/-/basic-auth-parser-0.0.2-1.tgz",
+      "integrity": "sha512-GFj8iVxo9onSU6BnnQvVwqvxh60UcSHJEDnIk3z4B6iOjsKSmqe+ibW0Rsz7YO7IE1HG3D3tqCNIidP46SZVdQ==",
       "dev": true
     },
     "node_modules/before-after-hook": {
       "version": "2.2.3",
+      "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz",
+      "integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==",
       "dev": true,
       "license": "Apache-2.0"
     },
     "node_modules/benchmark": {
       "version": "2.1.4",
+      "resolved": "https://registry.npmjs.org/benchmark/-/benchmark-2.1.4.tgz",
+      "integrity": "sha512-l9MlfN4M1K/H2fbhfMy3B7vJd6AGKJVQn2h6Sg/Yx+KckoUA7ewS5Vv6TjSq18ooE1kS9hhAlQRH3AkXIh/aOQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5394,18 +4405,10 @@
         "node": "^18.17.0 || >=20.5.0"
       }
     },
-    "node_modules/binary-extensions": {
-      "version": "3.1.0",
-      "license": "MIT",
-      "engines": {
-        "node": ">=18.20"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/sindresorhus"
-      }
-    },
     "node_modules/bind-obj-methods": {
       "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/bind-obj-methods/-/bind-obj-methods-3.0.0.tgz",
+      "integrity": "sha512-nLEaaz3/sEzNSyPWRsN9HNsqwk1AUyECtGj+XwGdIi3xABnEqecvXtIJ0wehQXuuER5uZ/5fTs2usONgYjG+iw==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -5414,6 +4417,8 @@
     },
     "node_modules/boolbase": {
       "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz",
+      "integrity": "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==",
       "dev": true,
       "license": "ISC"
     },
@@ -5427,6 +4432,8 @@
     },
     "node_modules/braces": {
       "version": "3.0.3",
+      "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
+      "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5456,7 +4463,6 @@
         }
       ],
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "baseline-browser-mapping": "^2.8.3",
         "caniuse-lite": "^1.0.30001741",
@@ -5473,6 +4479,8 @@
     },
     "node_modules/buffer-from": {
       "version": "1.1.2",
+      "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz",
+      "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==",
       "dev": true,
       "license": "MIT"
     },
@@ -5499,6 +4507,8 @@
     },
     "node_modules/caching-transform": {
       "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/caching-transform/-/caching-transform-4.0.0.tgz",
+      "integrity": "sha512-kpqOvwXnjjN44D89K5ccQC+RUrsy7jB/XLlRrx0D7/2HNcTPqzsb6XgYoErwko6QsV184CA2YgS1fxDiiDZMWA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5513,11 +4523,15 @@
     },
     "node_modules/caching-transform/node_modules/signal-exit": {
       "version": "3.0.7",
+      "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz",
+      "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/caching-transform/node_modules/write-file-atomic": {
       "version": "3.0.3",
+      "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-3.0.3.tgz",
+      "integrity": "sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -5529,8 +4543,11 @@
     },
     "node_modules/call-bind": {
       "version": "1.0.8",
+      "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.8.tgz",
+      "integrity": "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bind-apply-helpers": "^1.0.0",
         "es-define-property": "^1.0.0",
@@ -5546,6 +4563,8 @@
     },
     "node_modules/call-bind-apply-helpers": {
       "version": "1.0.2",
+      "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz",
+      "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5558,8 +4577,11 @@
     },
     "node_modules/call-bound": {
       "version": "1.0.4",
+      "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz",
+      "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bind-apply-helpers": "^1.0.2",
         "get-intrinsic": "^1.3.0"
@@ -5573,11 +4595,15 @@
     },
     "node_modules/caller": {
       "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/caller/-/caller-1.1.0.tgz",
+      "integrity": "sha512-n+21IZC3j06YpCWaxmUy5AnVqhmCIM2bQtqQyy00HJlmStRt6kwDX5F9Z97pqwAB+G/tgSz6q/kUBbNyQzIubw==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/callsites": {
       "version": "3.1.0",
+      "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz",
+      "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -5586,6 +4612,8 @@
     },
     "node_modules/camelcase": {
       "version": "5.3.1",
+      "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz",
+      "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -5594,6 +4622,8 @@
     },
     "node_modules/camelcase-keys": {
       "version": "6.2.2",
+      "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-6.2.2.tgz",
+      "integrity": "sha512-YrwaA0vEKazPBkn0ipTiMpSajYDSe+KjQfrjhcBMxJt/znbvlHd8Pw/Vamaz5EB4Wfhs3SUR3Z9mwRu/P3s3Yg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5631,6 +4661,8 @@
     },
     "node_modules/ccount": {
       "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/ccount/-/ccount-2.0.1.tgz",
+      "integrity": "sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -5640,8 +4672,6 @@
     },
     "node_modules/chalk": {
       "version": "5.6.2",
-      "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.6.2.tgz",
-      "integrity": "sha512-7NzBL0rN6fMUW+f7A6Io4h40qQlG+xGmtMxfbnH/K7TAtt8JQWVQK+6g0UXKMeVJoyV5EkkNsErQ8pVD3bLHbA==",
       "inBundle": true,
       "license": "MIT",
       "engines": {
@@ -5653,6 +4683,8 @@
     },
     "node_modules/character-entities": {
       "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/character-entities/-/character-entities-2.0.2.tgz",
+      "integrity": "sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -5662,6 +4694,8 @@
     },
     "node_modules/character-entities-html4": {
       "version": "2.1.0",
+      "resolved": "https://registry.npmjs.org/character-entities-html4/-/character-entities-html4-2.1.0.tgz",
+      "integrity": "sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -5671,6 +4705,8 @@
     },
     "node_modules/character-entities-legacy": {
       "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-3.0.0.tgz",
+      "integrity": "sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -5680,6 +4716,8 @@
     },
     "node_modules/chokidar": {
       "version": "3.6.0",
+      "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz",
+      "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5703,6 +4741,8 @@
     },
     "node_modules/chokidar/node_modules/glob-parent": {
       "version": "5.1.2",
+      "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz",
+      "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -5747,6 +4787,8 @@
     },
     "node_modules/clean-stack": {
       "version": "2.2.0",
+      "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz",
+      "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -5767,6 +4809,8 @@
     },
     "node_modules/cli-table3": {
       "version": "0.6.5",
+      "resolved": "https://registry.npmjs.org/cli-table3/-/cli-table3-0.6.5.tgz",
+      "integrity": "sha512-+W/5efTR7y5HRD7gACw9yQjqMVvEMLBHmboM/kPWam+H+Hmyrgjh6YncVKK122YZkXrLudzTuAukUw9FnMf7IQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5781,6 +4825,8 @@
     },
     "node_modules/cliui": {
       "version": "8.0.1",
+      "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz",
+      "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -5794,6 +4840,8 @@
     },
     "node_modules/cliui/node_modules/ansi-styles": {
       "version": "4.3.0",
+      "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
+      "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5808,6 +4856,8 @@
     },
     "node_modules/cliui/node_modules/wrap-ansi": {
       "version": "7.0.0",
+      "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
+      "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5831,6 +4881,8 @@
     },
     "node_modules/code-suggester": {
       "version": "4.3.4",
+      "resolved": "https://registry.npmjs.org/code-suggester/-/code-suggester-4.3.4.tgz",
+      "integrity": "sha512-qOj12mccFX2NALK01WnrwJKCmIwp1TMuskueh2EVaR4bc3xw072yfX9Ojq7yFQL4AmXfTXHKNjSO8lvh0y5MuA==",
       "dev": true,
       "license": "Apache-2.0",
       "dependencies": {
@@ -5851,6 +4903,8 @@
     },
     "node_modules/code-suggester/node_modules/ansi-styles": {
       "version": "4.3.0",
+      "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
+      "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5865,6 +4919,8 @@
     },
     "node_modules/code-suggester/node_modules/brace-expansion": {
       "version": "1.1.12",
+      "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
+      "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5874,6 +4930,8 @@
     },
     "node_modules/code-suggester/node_modules/cliui": {
       "version": "7.0.4",
+      "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz",
+      "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -5884,6 +4942,8 @@
     },
     "node_modules/code-suggester/node_modules/diff": {
       "version": "5.2.0",
+      "resolved": "https://registry.npmjs.org/diff/-/diff-5.2.0.tgz",
+      "integrity": "sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A==",
       "dev": true,
       "license": "BSD-3-Clause",
       "engines": {
@@ -5892,6 +4952,9 @@
     },
     "node_modules/code-suggester/node_modules/glob": {
       "version": "7.2.3",
+      "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
+      "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
+      "deprecated": "Glob versions prior to v9 are no longer supported",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -5911,6 +4974,8 @@
     },
     "node_modules/code-suggester/node_modules/minimatch": {
       "version": "3.1.2",
+      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
+      "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -5922,6 +4987,8 @@
     },
     "node_modules/code-suggester/node_modules/wrap-ansi": {
       "version": "7.0.0",
+      "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
+      "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5938,6 +5005,8 @@
     },
     "node_modules/code-suggester/node_modules/yargs": {
       "version": "16.2.0",
+      "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz",
+      "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5955,6 +5024,8 @@
     },
     "node_modules/code-suggester/node_modules/yargs-parser": {
       "version": "20.2.9",
+      "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz",
+      "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -5979,6 +5050,8 @@
     },
     "node_modules/color-support": {
       "version": "1.1.3",
+      "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz",
+      "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==",
       "dev": true,
       "license": "ISC",
       "bin": {
@@ -5987,6 +5060,8 @@
     },
     "node_modules/combined-stream": {
       "version": "1.0.8",
+      "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
+      "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5998,6 +5073,8 @@
     },
     "node_modules/comma-separated-tokens": {
       "version": "2.0.3",
+      "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-2.0.3.tgz",
+      "integrity": "sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -6007,6 +5084,8 @@
     },
     "node_modules/commander": {
       "version": "2.20.3",
+      "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz",
+      "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==",
       "dev": true,
       "license": "MIT"
     },
@@ -6016,11 +5095,15 @@
     },
     "node_modules/commondir": {
       "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz",
+      "integrity": "sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/compare-func": {
       "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/compare-func/-/compare-func-2.0.0.tgz",
+      "integrity": "sha512-zHig5N+tPWARooBnb0Zx1MFcdfpyJrfTJ3Y5L+IFvUm8rM74hHz66z0gw0x4tijh5CorKkKUCnW82R2vmpeCRA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6030,11 +5113,15 @@
     },
     "node_modules/concat-map": {
       "version": "0.0.1",
+      "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
+      "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/conventional-changelog-angular": {
       "version": "7.0.0",
+      "resolved": "https://registry.npmjs.org/conventional-changelog-angular/-/conventional-changelog-angular-7.0.0.tgz",
+      "integrity": "sha512-ROjNchA9LgfNMTTFSIWPzebCwOGFdgkEq45EnvvrmSLvCtAw0HSmrCs7/ty+wAeYUZyNay0YMUNYFTRL72PkBQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -6046,6 +5133,8 @@
     },
     "node_modules/conventional-changelog-conventionalcommits": {
       "version": "7.0.2",
+      "resolved": "https://registry.npmjs.org/conventional-changelog-conventionalcommits/-/conventional-changelog-conventionalcommits-7.0.2.tgz",
+      "integrity": "sha512-NKXYmMR/Hr1DevQegFB4MwfM5Vv0m4UIxKZTTYuD98lpTknaZlSRrDOG4X7wIXpGkfsYxZTghUN+Qq+T0YQI7w==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -6057,6 +5146,8 @@
     },
     "node_modules/conventional-changelog-writer": {
       "version": "6.0.1",
+      "resolved": "https://registry.npmjs.org/conventional-changelog-writer/-/conventional-changelog-writer-6.0.1.tgz",
+      "integrity": "sha512-359t9aHorPw+U+nHzUXHS5ZnPBOizRxfQsWT5ZDHBfvfxQOAik+yfuhKXG66CN5LEWPpMNnIMHUTCKeYNprvHQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6077,6 +5168,8 @@
     },
     "node_modules/conventional-changelog-writer/node_modules/hosted-git-info": {
       "version": "4.1.0",
+      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-4.1.0.tgz",
+      "integrity": "sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -6088,6 +5181,8 @@
     },
     "node_modules/conventional-changelog-writer/node_modules/lru-cache": {
       "version": "6.0.0",
+      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
+      "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -6099,6 +5194,8 @@
     },
     "node_modules/conventional-changelog-writer/node_modules/meow": {
       "version": "8.1.2",
+      "resolved": "https://registry.npmjs.org/meow/-/meow-8.1.2.tgz",
+      "integrity": "sha512-r85E3NdZ+mpYk1C6RjPFEMSE+s1iZMuHtsHAqY0DT3jZczl0diWUZ8g6oU7h0M9cD2EL+PzaYghhCLzR0ZNn5Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6123,6 +5220,8 @@
     },
     "node_modules/conventional-changelog-writer/node_modules/normalize-package-data": {
       "version": "3.0.3",
+      "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-3.0.3.tgz",
+      "integrity": "sha512-p2W1sgqij3zMMyRC067Dg16bfzVH+w7hyegmpIvZ4JNjqtGOVAIvLmjBx3yP7YTe9vKJgkoNOPjwQGogDoMXFA==",
       "dev": true,
       "license": "BSD-2-Clause",
       "dependencies": {
@@ -6137,6 +5236,8 @@
     },
     "node_modules/conventional-changelog-writer/node_modules/type-fest": {
       "version": "0.18.1",
+      "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.18.1.tgz",
+      "integrity": "sha512-OIAYXk8+ISY+qTOwkHtKqzAuxchoMiD9Udx+FSGQDuiRR+PJKJHc2NJAXlbhkGwTt/4/nKZxELY1w3ReWOL8mw==",
       "dev": true,
       "license": "(MIT OR CC0-1.0)",
       "engines": {
@@ -6148,6 +5249,8 @@
     },
     "node_modules/conventional-changelog-writer/node_modules/yargs-parser": {
       "version": "20.2.9",
+      "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz",
+      "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -6156,6 +5259,8 @@
     },
     "node_modules/conventional-commits-filter": {
       "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/conventional-commits-filter/-/conventional-commits-filter-3.0.0.tgz",
+      "integrity": "sha512-1ymej8b5LouPx9Ox0Dw/qAO2dVdfpRFq28e5Y0jJEU8ZrLdy0vOSkkIInwmxErFGhg6SALro60ZrwYFVTUDo4Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6168,6 +5273,8 @@
     },
     "node_modules/conventional-commits-parser": {
       "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/conventional-commits-parser/-/conventional-commits-parser-5.0.0.tgz",
+      "integrity": "sha512-ZPMl0ZJbw74iS9LuX9YIAiW8pfM5p3yh2o/NbXHbkFuZzY5jvdi5jFycEOkmBW5H5I7nA+D6f3UcsCLP2vvSEA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6185,14 +5292,17 @@
     },
     "node_modules/convert-source-map": {
       "version": "1.9.0",
+      "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.9.0.tgz",
+      "integrity": "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/cosmiconfig": {
       "version": "9.0.0",
+      "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-9.0.0.tgz",
+      "integrity": "sha512-itvL5h8RETACmOTFc4UfIyB2RfEHi71Ax6E/PivVxq9NseKbOWpeyHEOIbmAw1rs8Ak0VursQNww7lf7YtUwzg==",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "env-paths": "^2.2.1",
         "import-fresh": "^3.3.0",
@@ -6216,6 +5326,8 @@
     },
     "node_modules/cosmiconfig-typescript-loader": {
       "version": "6.1.0",
+      "resolved": "https://registry.npmjs.org/cosmiconfig-typescript-loader/-/cosmiconfig-typescript-loader-6.1.0.tgz",
+      "integrity": "sha512-tJ1w35ZRUiM5FeTzT7DtYWAFFv37ZLqSRkGi2oeCK1gPhvaWjkAtfXvLmvE1pRfxxp9aQo6ba/Pvg1dKj05D4g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6266,6 +5378,8 @@
     },
     "node_modules/css-select": {
       "version": "5.2.2",
+      "resolved": "https://registry.npmjs.org/css-select/-/css-select-5.2.2.tgz",
+      "integrity": "sha512-TizTzUddG/xYLA3NXodFM0fSbNizXjOKhqiQQwvhlspadZokn1KDy0NZFS0wuEubIYAV5/c1/lAr0TaaFXEXzw==",
       "dev": true,
       "license": "BSD-2-Clause",
       "dependencies": {
@@ -6281,6 +5395,8 @@
     },
     "node_modules/css-what": {
       "version": "6.2.2",
+      "resolved": "https://registry.npmjs.org/css-what/-/css-what-6.2.2.tgz",
+      "integrity": "sha512-u/O3vwbptzhMs3L1fQE82ZSLHQQfto5gyZzwteVIEyeaY5Fc7R4dapF/BvRoSYFeqfBk4m0V1Vafq5Pjv25wvA==",
       "dev": true,
       "license": "BSD-2-Clause",
       "engines": {
@@ -6302,6 +5418,8 @@
     },
     "node_modules/cssstyle": {
       "version": "4.6.0",
+      "resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-4.6.0.tgz",
+      "integrity": "sha512-2z+rWdzbbSZv6/rhtvzvqeZQHrBaqgogqt85sqFNbabZOuFbCVFb8kPeEtZjiKkbrm395irpNKiYeFeLiQnFPg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6314,11 +5432,15 @@
     },
     "node_modules/cssstyle/node_modules/rrweb-cssom": {
       "version": "0.8.0",
+      "resolved": "https://registry.npmjs.org/rrweb-cssom/-/rrweb-cssom-0.8.0.tgz",
+      "integrity": "sha512-guoltQEx+9aMf2gDZ0s62EcV8lsXR+0w8915TC3ITdn2YueuNjdAYh/levpU9nFaoChh9RUS5ZdQMrKfVEN9tw==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/dargs": {
       "version": "8.1.0",
+      "resolved": "https://registry.npmjs.org/dargs/-/dargs-8.1.0.tgz",
+      "integrity": "sha512-wAV9QHOsNbwnWdNW2FYvE1P56wtgSbM+3SZcdGiWQILwVjACCXDCI3Ai8QlCjMDB8YK5zySiXZYBiwGmNY3lnw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -6330,6 +5452,8 @@
     },
     "node_modules/data-urls": {
       "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/data-urls/-/data-urls-5.0.0.tgz",
+      "integrity": "sha512-ZYP5VBHshaDAiVZxjbRVcFJpc+4xGgT0bK3vzy1HLN8jTO975HEbuYzZJcHoQEY5K1a0z8YayJkyVETa08eNTg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6342,6 +5466,8 @@
     },
     "node_modules/data-urls/node_modules/tr46": {
       "version": "5.1.1",
+      "resolved": "https://registry.npmjs.org/tr46/-/tr46-5.1.1.tgz",
+      "integrity": "sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6353,6 +5479,8 @@
     },
     "node_modules/data-urls/node_modules/webidl-conversions": {
       "version": "7.0.0",
+      "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz",
+      "integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==",
       "dev": true,
       "license": "BSD-2-Clause",
       "engines": {
@@ -6361,6 +5489,8 @@
     },
     "node_modules/data-urls/node_modules/whatwg-url": {
       "version": "14.2.0",
+      "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-14.2.0.tgz",
+      "integrity": "sha512-De72GdQZzNTUBBChsXueQUnPKDkg/5A5zp7pFDuQAj5UFoENpiACU0wlCvzpAGnTkj++ihpKwKyYewn/XNUbKw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6373,8 +5503,11 @@
     },
     "node_modules/data-view-buffer": {
       "version": "1.0.2",
+      "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.2.tgz",
+      "integrity": "sha512-EmKO5V3OLXh1rtK2wgXRansaK1/mtVdTUEiEI0W8RkvgT05kfxaH29PliLnpLP73yYO6142Q72QNa8Wx/A5CqQ==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.3",
         "es-errors": "^1.3.0",
@@ -6389,8 +5522,11 @@
     },
     "node_modules/data-view-byte-length": {
       "version": "1.0.2",
+      "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.2.tgz",
+      "integrity": "sha512-tuhGbE6CfTM9+5ANGf+oQb72Ky/0+s3xKUpHvShfiz2RxMFgFPjsXuRLBVMtvMs15awe45SRb83D6wH4ew6wlQ==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.3",
         "es-errors": "^1.3.0",
@@ -6405,8 +5541,11 @@
     },
     "node_modules/data-view-byte-offset": {
       "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.1.tgz",
+      "integrity": "sha512-BS8PfmtDGnrgYdOonGZQdLZslWIeCGFP9tpan0hi1Co2Zr2NKADsvGYA8XxuG/4UWgJ6Cjtv+YJnB6MM69QGlQ==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.2",
         "es-errors": "^1.3.0",
@@ -6421,6 +5560,8 @@
     },
     "node_modules/dateformat": {
       "version": "3.0.3",
+      "resolved": "https://registry.npmjs.org/dateformat/-/dateformat-3.0.3.tgz",
+      "integrity": "sha512-jyCETtSl3VMZMWeRo7iY1FL19ges1t55hMo5yaam4Jrsm5EPL89UQkoQRyiI+Yf4k8r2ZpdngkV8hr1lIdjb3Q==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -6429,8 +5570,6 @@
     },
     "node_modules/debug": {
       "version": "4.4.3",
-      "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz",
-      "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -6447,6 +5586,8 @@
     },
     "node_modules/decamelize": {
       "version": "1.2.0",
+      "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz",
+      "integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -6455,6 +5596,8 @@
     },
     "node_modules/decamelize-keys": {
       "version": "1.1.1",
+      "resolved": "https://registry.npmjs.org/decamelize-keys/-/decamelize-keys-1.1.1.tgz",
+      "integrity": "sha512-WiPxgEirIV0/eIOMcnFBA3/IJZAZqKnwAwWyvvdi4lsr1WCN22nhdf/3db3DoZcUjTV2SqfzIwNyp6y2xs3nmg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6470,6 +5613,8 @@
     },
     "node_modules/decamelize-keys/node_modules/map-obj": {
       "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-1.0.1.tgz",
+      "integrity": "sha512-7N/q3lyZ+LVCp7PzuxrJr4KMbBE2hW7BT7YNia330OFxIf4d3r5zVpicP2650l7CPN6RM9zOJRl3NGpqSiw3Eg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -6478,11 +5623,15 @@
     },
     "node_modules/decimal.js": {
       "version": "10.6.0",
+      "resolved": "https://registry.npmjs.org/decimal.js/-/decimal.js-10.6.0.tgz",
+      "integrity": "sha512-YpgQiITW3JXGntzdUmyUR1V812Hn8T1YVXhCu+wO3OpS4eU9l4YdD3qjyiKdV6mvV29zapkMeD390UVEf2lkUg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/decode-named-character-reference": {
       "version": "1.2.0",
+      "resolved": "https://registry.npmjs.org/decode-named-character-reference/-/decode-named-character-reference-1.2.0.tgz",
+      "integrity": "sha512-c6fcElNV6ShtZXmsgNgFFV5tVX2PaV4g+MOAkb8eXHvn6sryJBrZa9r0zV6+dtTyoCKxtDy5tyQ5ZwQuidtd+Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6510,11 +5659,16 @@
     },
     "node_modules/deep-is": {
       "version": "0.1.4",
+      "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz",
+      "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==",
       "dev": true,
-      "license": "MIT"
+      "license": "MIT",
+      "peer": true
     },
     "node_modules/default-require-extensions": {
       "version": "3.0.1",
+      "resolved": "https://registry.npmjs.org/default-require-extensions/-/default-require-extensions-3.0.1.tgz",
+      "integrity": "sha512-eXTJmRbm2TIt9MgWTsOH1wEuhew6XGZcMeGKCtLedIg/NCsg1iBePXkceTdK4Fii7pzmN9tGsZhKzZ4h7O/fxw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6529,8 +5683,11 @@
     },
     "node_modules/define-data-property": {
       "version": "1.1.4",
+      "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz",
+      "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "es-define-property": "^1.0.0",
         "es-errors": "^1.3.0",
@@ -6545,8 +5702,11 @@
     },
     "node_modules/define-properties": {
       "version": "1.2.1",
+      "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz",
+      "integrity": "sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "define-data-property": "^1.0.1",
         "has-property-descriptors": "^1.0.0",
@@ -6561,6 +5721,8 @@
     },
     "node_modules/delayed-stream": {
       "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
+      "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -6569,11 +5731,15 @@
     },
     "node_modules/deprecation": {
       "version": "2.3.1",
+      "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz",
+      "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/dequal": {
       "version": "2.0.3",
+      "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz",
+      "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -6582,24 +5748,14 @@
     },
     "node_modules/detect-indent": {
       "version": "6.1.0",
+      "resolved": "https://registry.npmjs.org/detect-indent/-/detect-indent-6.1.0.tgz",
+      "integrity": "sha512-reYkTUJAZb9gUuZ2RvVCNhVHdg62RHnJ7WJl8ftMi4diZ6NWlciOzQN88pUhSELEwflJht4oQDv0F0BMlwaYtA==",
       "dev": true,
       "license": "MIT",
       "engines": {
         "node": ">=8"
       }
     },
-    "node_modules/devlop": {
-      "version": "1.1.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "dequal": "^2.0.0"
-      },
-      "funding": {
-        "type": "github",
-        "url": "https://github.com/sponsors/wooorm"
-      }
-    },
     "node_modules/diff": {
       "version": "7.0.0",
       "license": "BSD-3-Clause",
@@ -6609,13 +5765,18 @@
     },
     "node_modules/discontinuous-range": {
       "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/discontinuous-range/-/discontinuous-range-1.0.0.tgz",
+      "integrity": "sha512-c68LpLbO+7kP/b1Hr1qs8/BJ09F5khZGTxqxZuhzxpmwJKOgRFHJWIb9/KmqnqHhLdO55aOxFH/EGBvUQbL/RQ==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/doctrine": {
       "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz",
+      "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==",
       "dev": true,
       "license": "Apache-2.0",
+      "peer": true,
       "dependencies": {
         "esutils": "^2.0.2"
       },
@@ -6625,6 +5786,8 @@
     },
     "node_modules/dom-serializer": {
       "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz",
+      "integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6638,6 +5801,8 @@
     },
     "node_modules/domelementtype": {
       "version": "2.3.0",
+      "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz",
+      "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==",
       "dev": true,
       "funding": [
         {
@@ -6649,6 +5814,8 @@
     },
     "node_modules/domhandler": {
       "version": "5.0.3",
+      "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz",
+      "integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==",
       "dev": true,
       "license": "BSD-2-Clause",
       "dependencies": {
@@ -6663,6 +5830,8 @@
     },
     "node_modules/domutils": {
       "version": "3.2.2",
+      "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.2.2.tgz",
+      "integrity": "sha512-6kZKyUajlDuqlHKVX1w7gyslj9MPIXzIFiz/rGu35uC1wMi+kMhQwGhl4lt9unC9Vb9INnY9Z3/ZA3+FhASLaw==",
       "dev": true,
       "license": "BSD-2-Clause",
       "dependencies": {
@@ -6676,6 +5845,8 @@
     },
     "node_modules/dot-prop": {
       "version": "5.3.0",
+      "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-5.3.0.tgz",
+      "integrity": "sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6687,6 +5858,8 @@
     },
     "node_modules/dunder-proto": {
       "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz",
+      "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6700,8 +5873,6 @@
     },
     "node_modules/eastasianwidth": {
       "version": "0.2.0",
-      "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz",
-      "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==",
       "inBundle": true,
       "license": "MIT"
     },
@@ -6728,6 +5899,8 @@
     },
     "node_modules/entities": {
       "version": "4.5.0",
+      "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz",
+      "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==",
       "dev": true,
       "license": "BSD-2-Clause",
       "engines": {
@@ -6762,8 +5935,11 @@
     },
     "node_modules/es-abstract": {
       "version": "1.24.0",
+      "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.24.0.tgz",
+      "integrity": "sha512-WSzPgsdLtTcQwm4CROfS5ju2Wa1QQcVeT37jFjYzdFz1r9ahadC8B8/a4qxJxM+09F18iumCdRmlr96ZYkQvEg==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "array-buffer-byte-length": "^1.0.2",
         "arraybuffer.prototype.slice": "^1.0.4",
@@ -6829,6 +6005,8 @@
     },
     "node_modules/es-define-property": {
       "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz",
+      "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -6837,6 +6015,8 @@
     },
     "node_modules/es-errors": {
       "version": "1.3.0",
+      "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz",
+      "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -6845,6 +6025,8 @@
     },
     "node_modules/es-object-atoms": {
       "version": "1.1.1",
+      "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz",
+      "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6856,6 +6038,8 @@
     },
     "node_modules/es-set-tostringtag": {
       "version": "2.1.0",
+      "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz",
+      "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6870,8 +6054,11 @@
     },
     "node_modules/es-shim-unscopables": {
       "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.1.0.tgz",
+      "integrity": "sha512-d9T8ucsEhh8Bi1woXCf+TIKDIROLG5WCkxg8geBCbvk22kzwC5G2OnXVMO6FUsvQlgUUXQ2itephWDLqDzbeCw==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "hasown": "^2.0.2"
       },
@@ -6881,8 +6068,11 @@
     },
     "node_modules/es-to-primitive": {
       "version": "1.3.0",
+      "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.3.0.tgz",
+      "integrity": "sha512-w+5mJ3GuFL+NjVtJlvydShqE1eN3h3PbI7/5LAsYJP/2qtuMXjfL2LpHSRqo4b4eSF5K/DH1JXKUAHSB2UW50g==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "is-callable": "^1.2.7",
         "is-date-object": "^1.0.5",
@@ -6897,11 +6087,15 @@
     },
     "node_modules/es6-error": {
       "version": "4.1.1",
+      "resolved": "https://registry.npmjs.org/es6-error/-/es6-error-4.1.1.tgz",
+      "integrity": "sha512-Um/+FxMr9CISWh0bi5Zv0iOD+4cFh5qLeks1qhAopKVAJw3drgKbKySikp7wGhDL0HPeaja0P5ULZrxLkniUVg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/escalade": {
       "version": "3.2.0",
+      "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz",
+      "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -6910,8 +6104,11 @@
     },
     "node_modules/escape-string-regexp": {
       "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz",
+      "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "engines": {
         "node": ">=10"
       },
@@ -6921,6 +6118,9 @@
     },
     "node_modules/eslint": {
       "version": "8.57.1",
+      "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.1.tgz",
+      "integrity": "sha512-ypowyDxpVSYpkXr9WPv2PAZCtNip1Mv5KTW0SCurXv/9iOpcrH9PaqUElksqEB6pChqHGDRCFTyrZlGhnLNGiA==",
+      "deprecated": "This version is no longer supported. Please see https://eslint.org/version-support for other options.",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -6976,8 +6176,11 @@
     },
     "node_modules/eslint-import-resolver-node": {
       "version": "0.3.9",
+      "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.9.tgz",
+      "integrity": "sha512-WFj2isz22JahUv+B788TlO3N6zL3nNJGU8CcZbPZvVEkBPaJdCV4vy5wyghty5ROFbCRnm132v8BScu5/1BQ8g==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "debug": "^3.2.7",
         "is-core-module": "^2.13.0",
@@ -6986,16 +6189,22 @@
     },
     "node_modules/eslint-import-resolver-node/node_modules/debug": {
       "version": "3.2.7",
+      "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz",
+      "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "ms": "^2.1.1"
       }
     },
     "node_modules/eslint-module-utils": {
       "version": "2.12.1",
+      "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.12.1.tgz",
+      "integrity": "sha512-L8jSWTze7K2mTg0vos/RuLRS5soomksDPoJLXIslC7c8Wmut3bx7CPpJijDcBZtxQ5lrbUdM+s0OlNbz0DCDNw==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "debug": "^3.2.7"
       },
@@ -7010,16 +6219,22 @@
     },
     "node_modules/eslint-module-utils/node_modules/debug": {
       "version": "3.2.7",
+      "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz",
+      "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "ms": "^2.1.1"
       }
     },
     "node_modules/eslint-plugin-es": {
       "version": "3.0.1",
+      "resolved": "https://registry.npmjs.org/eslint-plugin-es/-/eslint-plugin-es-3.0.1.tgz",
+      "integrity": "sha512-GUmAsJaN4Fc7Gbtl8uOBlayo2DqhwWvEzykMHSCZHU3XdJ+NSzzZcVhXh3VxX5icqQ+oQdIEawXX8xkR3mIFmQ==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "eslint-utils": "^2.0.0",
         "regexpp": "^3.0.0"
@@ -7036,8 +6251,11 @@
     },
     "node_modules/eslint-plugin-import": {
       "version": "2.32.0",
+      "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.32.0.tgz",
+      "integrity": "sha512-whOE1HFo/qJDyX4SnXzP4N6zOWn79WhnCUY/iDR0mPfQZO8wcYE4JClzI2oZrhBnnMUCBCHZhO6VQyoBU95mZA==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "@rtsao/scc": "^1.1.0",
         "array-includes": "^3.1.9",
@@ -7068,8 +6286,11 @@
     },
     "node_modules/eslint-plugin-import/node_modules/brace-expansion": {
       "version": "1.1.12",
+      "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
+      "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "balanced-match": "^1.0.0",
         "concat-map": "0.0.1"
@@ -7077,16 +6298,22 @@
     },
     "node_modules/eslint-plugin-import/node_modules/debug": {
       "version": "3.2.7",
+      "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz",
+      "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "ms": "^2.1.1"
       }
     },
     "node_modules/eslint-plugin-import/node_modules/doctrine": {
       "version": "2.1.0",
+      "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz",
+      "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==",
       "dev": true,
       "license": "Apache-2.0",
+      "peer": true,
       "dependencies": {
         "esutils": "^2.0.2"
       },
@@ -7096,8 +6323,11 @@
     },
     "node_modules/eslint-plugin-import/node_modules/minimatch": {
       "version": "3.1.2",
+      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
+      "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
       "dev": true,
       "license": "ISC",
+      "peer": true,
       "dependencies": {
         "brace-expansion": "^1.1.7"
       },
@@ -7107,16 +6337,22 @@
     },
     "node_modules/eslint-plugin-import/node_modules/semver": {
       "version": "6.3.1",
+      "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
+      "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
       "dev": true,
       "license": "ISC",
+      "peer": true,
       "bin": {
         "semver": "bin/semver.js"
       }
     },
     "node_modules/eslint-plugin-node": {
       "version": "11.1.0",
+      "resolved": "https://registry.npmjs.org/eslint-plugin-node/-/eslint-plugin-node-11.1.0.tgz",
+      "integrity": "sha512-oUwtPJ1W0SKD0Tr+wqu92c5xuCeQqB3hSCHasn/ZgjFdA9iDGNkNf2Zi9ztY7X+hNuMib23LNGRm6+uN+KLE3g==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "eslint-plugin-es": "^3.0.0",
         "eslint-utils": "^2.0.0",
@@ -7134,8 +6370,11 @@
     },
     "node_modules/eslint-plugin-node/node_modules/brace-expansion": {
       "version": "1.1.12",
+      "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
+      "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "balanced-match": "^1.0.0",
         "concat-map": "0.0.1"
@@ -7143,8 +6382,11 @@
     },
     "node_modules/eslint-plugin-node/node_modules/minimatch": {
       "version": "3.1.2",
+      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
+      "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
       "dev": true,
       "license": "ISC",
+      "peer": true,
       "dependencies": {
         "brace-expansion": "^1.1.7"
       },
@@ -7154,14 +6396,19 @@
     },
     "node_modules/eslint-plugin-node/node_modules/semver": {
       "version": "6.3.1",
+      "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
+      "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
       "dev": true,
       "license": "ISC",
+      "peer": true,
       "bin": {
         "semver": "bin/semver.js"
       }
     },
     "node_modules/eslint-plugin-promise": {
       "version": "6.6.0",
+      "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-6.6.0.tgz",
+      "integrity": "sha512-57Zzfw8G6+Gq7axm2Pdo3gW/Rx3h9Yywgn61uE/3elTCOePEHVrn2i5CdfBwA1BLK0Q0WqctICIUSqXZW/VprQ==",
       "dev": true,
       "license": "ISC",
       "peer": true,
@@ -7177,8 +6424,11 @@
     },
     "node_modules/eslint-scope": {
       "version": "7.2.2",
+      "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz",
+      "integrity": "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==",
       "dev": true,
       "license": "BSD-2-Clause",
+      "peer": true,
       "dependencies": {
         "esrecurse": "^4.3.0",
         "estraverse": "^5.2.0"
@@ -7192,8 +6442,11 @@
     },
     "node_modules/eslint-utils": {
       "version": "2.1.0",
+      "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.1.0.tgz",
+      "integrity": "sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "eslint-visitor-keys": "^1.1.0"
       },
@@ -7206,16 +6459,22 @@
     },
     "node_modules/eslint-utils/node_modules/eslint-visitor-keys": {
       "version": "1.3.0",
+      "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz",
+      "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==",
       "dev": true,
       "license": "Apache-2.0",
+      "peer": true,
       "engines": {
         "node": ">=4"
       }
     },
     "node_modules/eslint-visitor-keys": {
       "version": "3.4.3",
+      "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz",
+      "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==",
       "dev": true,
       "license": "Apache-2.0",
+      "peer": true,
       "engines": {
         "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
       },
@@ -7225,8 +6484,11 @@
     },
     "node_modules/eslint/node_modules/ajv": {
       "version": "6.12.6",
+      "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz",
+      "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "fast-deep-equal": "^3.1.1",
         "fast-json-stable-stringify": "^2.0.0",
@@ -7240,8 +6502,11 @@
     },
     "node_modules/eslint/node_modules/ansi-styles": {
       "version": "4.3.0",
+      "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
+      "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "color-convert": "^2.0.1"
       },
@@ -7254,8 +6519,11 @@
     },
     "node_modules/eslint/node_modules/brace-expansion": {
       "version": "1.1.12",
+      "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
+      "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "balanced-match": "^1.0.0",
         "concat-map": "0.0.1"
@@ -7263,8 +6531,11 @@
     },
     "node_modules/eslint/node_modules/chalk": {
       "version": "4.1.2",
+      "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
+      "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "ansi-styles": "^4.1.0",
         "supports-color": "^7.1.0"
@@ -7278,8 +6549,11 @@
     },
     "node_modules/eslint/node_modules/find-up": {
       "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz",
+      "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "locate-path": "^6.0.0",
         "path-exists": "^4.0.0"
@@ -7291,23 +6565,21 @@
         "url": "https://github.com/sponsors/sindresorhus"
       }
     },
-    "node_modules/eslint/node_modules/has-flag": {
-      "version": "4.0.0",
-      "dev": true,
-      "license": "MIT",
-      "engines": {
-        "node": ">=8"
-      }
-    },
     "node_modules/eslint/node_modules/json-schema-traverse": {
       "version": "0.4.1",
+      "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
+      "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==",
       "dev": true,
-      "license": "MIT"
+      "license": "MIT",
+      "peer": true
     },
     "node_modules/eslint/node_modules/locate-path": {
       "version": "6.0.0",
+      "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz",
+      "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "p-locate": "^5.0.0"
       },
@@ -7320,8 +6592,11 @@
     },
     "node_modules/eslint/node_modules/minimatch": {
       "version": "3.1.2",
+      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
+      "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
       "dev": true,
       "license": "ISC",
+      "peer": true,
       "dependencies": {
         "brace-expansion": "^1.1.7"
       },
@@ -7331,8 +6606,11 @@
     },
     "node_modules/eslint/node_modules/p-limit": {
       "version": "3.1.0",
+      "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz",
+      "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "yocto-queue": "^0.1.0"
       },
@@ -7345,8 +6623,11 @@
     },
     "node_modules/eslint/node_modules/p-locate": {
       "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz",
+      "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "p-limit": "^3.0.2"
       },
@@ -7359,16 +6640,22 @@
     },
     "node_modules/eslint/node_modules/path-exists": {
       "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
+      "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "engines": {
         "node": ">=8"
       }
     },
     "node_modules/eslint/node_modules/supports-color": {
       "version": "7.2.0",
+      "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
+      "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "has-flag": "^4.0.0"
       },
@@ -7378,8 +6665,11 @@
     },
     "node_modules/eslint/node_modules/yocto-queue": {
       "version": "0.1.0",
+      "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz",
+      "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "engines": {
         "node": ">=10"
       },
@@ -7389,8 +6679,11 @@
     },
     "node_modules/espree": {
       "version": "9.6.1",
+      "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz",
+      "integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==",
       "dev": true,
       "license": "BSD-2-Clause",
+      "peer": true,
       "dependencies": {
         "acorn": "^8.9.0",
         "acorn-jsx": "^5.3.2",
@@ -7403,10 +6696,27 @@
         "url": "https://opencollective.com/eslint"
       }
     },
+    "node_modules/esprima": {
+      "version": "4.0.1",
+      "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz",
+      "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==",
+      "dev": true,
+      "license": "BSD-2-Clause",
+      "bin": {
+        "esparse": "bin/esparse.js",
+        "esvalidate": "bin/esvalidate.js"
+      },
+      "engines": {
+        "node": ">=4"
+      }
+    },
     "node_modules/esquery": {
       "version": "1.6.0",
+      "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz",
+      "integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==",
       "dev": true,
       "license": "BSD-3-Clause",
+      "peer": true,
       "dependencies": {
         "estraverse": "^5.1.0"
       },
@@ -7416,8 +6726,11 @@
     },
     "node_modules/esrecurse": {
       "version": "4.3.0",
+      "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz",
+      "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==",
       "dev": true,
       "license": "BSD-2-Clause",
+      "peer": true,
       "dependencies": {
         "estraverse": "^5.2.0"
       },
@@ -7427,22 +6740,30 @@
     },
     "node_modules/estraverse": {
       "version": "5.3.0",
+      "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz",
+      "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==",
       "dev": true,
       "license": "BSD-2-Clause",
+      "peer": true,
       "engines": {
         "node": ">=4.0"
       }
     },
     "node_modules/esutils": {
       "version": "2.0.3",
+      "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz",
+      "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==",
       "dev": true,
       "license": "BSD-2-Clause",
+      "peer": true,
       "engines": {
         "node": ">=0.10.0"
       }
     },
     "node_modules/events-to-array": {
       "version": "1.1.2",
+      "resolved": "https://registry.npmjs.org/events-to-array/-/events-to-array-1.1.2.tgz",
+      "integrity": "sha512-inRWzRY7nG+aXZxBzEqYKB3HPgwflZRopAjDCHv0whhRx+MTUr1ei0ICZUypdyE0HRm4L2d5VEcIqLD6yl+BFA==",
       "dev": true,
       "license": "ISC"
     },
@@ -7453,28 +6774,40 @@
     },
     "node_modules/extend": {
       "version": "3.0.2",
+      "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz",
+      "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/fast-deep-equal": {
       "version": "3.1.3",
+      "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
+      "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/fast-fifo": {
       "version": "1.3.2",
+      "resolved": "https://registry.npmjs.org/fast-fifo/-/fast-fifo-1.3.2.tgz",
+      "integrity": "sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/fast-json-stable-stringify": {
       "version": "2.1.0",
+      "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz",
+      "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==",
       "dev": true,
-      "license": "MIT"
+      "license": "MIT",
+      "peer": true
     },
     "node_modules/fast-levenshtein": {
       "version": "2.0.6",
+      "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz",
+      "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==",
       "dev": true,
-      "license": "MIT"
+      "license": "MIT",
+      "peer": true
     },
     "node_modules/fast-uri": {
       "version": "3.1.0",
@@ -7503,14 +6836,19 @@
     },
     "node_modules/fastq": {
       "version": "1.19.1",
+      "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.1.tgz",
+      "integrity": "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==",
       "dev": true,
       "license": "ISC",
+      "peer": true,
       "dependencies": {
         "reusify": "^1.0.4"
       }
     },
     "node_modules/figures": {
       "version": "3.2.0",
+      "resolved": "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz",
+      "integrity": "sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7525,6 +6863,8 @@
     },
     "node_modules/figures/node_modules/escape-string-regexp": {
       "version": "1.0.5",
+      "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz",
+      "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -7533,8 +6873,11 @@
     },
     "node_modules/file-entry-cache": {
       "version": "6.0.1",
+      "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz",
+      "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "flat-cache": "^3.0.4"
       },
@@ -7544,6 +6887,8 @@
     },
     "node_modules/fill-range": {
       "version": "7.1.1",
+      "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
+      "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7555,6 +6900,8 @@
     },
     "node_modules/find-cache-dir": {
       "version": "3.3.2",
+      "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-3.3.2.tgz",
+      "integrity": "sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7571,6 +6918,8 @@
     },
     "node_modules/find-up": {
       "version": "7.0.0",
+      "resolved": "https://registry.npmjs.org/find-up/-/find-up-7.0.0.tgz",
+      "integrity": "sha512-YyZM99iHrqLKjmt4LJDj58KI+fYyufRLBSYcqycxf//KpBk9FoewoGX0450m9nB44qrZnovzC2oeP5hUibxc/g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7587,13 +6936,18 @@
     },
     "node_modules/findit": {
       "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/findit/-/findit-2.0.0.tgz",
+      "integrity": "sha512-ENZS237/Hr8bjczn5eKuBohLgaD0JyUd0arxretR1f9RO46vZHA1b2y0VorgGV3WaOT3c+78P8h7v4JGJ1i/rg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/flat-cache": {
       "version": "3.2.0",
+      "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.2.0.tgz",
+      "integrity": "sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "flatted": "^3.2.9",
         "keyv": "^4.5.3",
@@ -7605,8 +6959,11 @@
     },
     "node_modules/flat-cache/node_modules/brace-expansion": {
       "version": "1.1.12",
+      "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
+      "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "balanced-match": "^1.0.0",
         "concat-map": "0.0.1"
@@ -7614,8 +6971,12 @@
     },
     "node_modules/flat-cache/node_modules/glob": {
       "version": "7.2.3",
+      "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
+      "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
+      "deprecated": "Glob versions prior to v9 are no longer supported",
       "dev": true,
       "license": "ISC",
+      "peer": true,
       "dependencies": {
         "fs.realpath": "^1.0.0",
         "inflight": "^1.0.4",
@@ -7633,8 +6994,11 @@
     },
     "node_modules/flat-cache/node_modules/minimatch": {
       "version": "3.1.2",
+      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
+      "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
       "dev": true,
       "license": "ISC",
+      "peer": true,
       "dependencies": {
         "brace-expansion": "^1.1.7"
       },
@@ -7644,8 +7008,12 @@
     },
     "node_modules/flat-cache/node_modules/rimraf": {
       "version": "3.0.2",
+      "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz",
+      "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==",
+      "deprecated": "Rimraf versions prior to v4 are no longer supported",
       "dev": true,
       "license": "ISC",
+      "peer": true,
       "dependencies": {
         "glob": "^7.1.3"
       },
@@ -7658,13 +7026,19 @@
     },
     "node_modules/flatted": {
       "version": "3.3.3",
+      "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz",
+      "integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==",
       "dev": true,
-      "license": "ISC"
+      "license": "ISC",
+      "peer": true
     },
     "node_modules/for-each": {
       "version": "0.3.5",
+      "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.5.tgz",
+      "integrity": "sha512-dKx12eRCVIzqCxFGplyFKJMPvLEWgmNtUrpTiJIR5u97zEhRG8ySrtboPHZXx7daLxQVrl643cTzbab2tkQjxg==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "is-callable": "^1.2.7"
       },
@@ -7692,6 +7066,8 @@
     },
     "node_modules/form-data": {
       "version": "4.0.4",
+      "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz",
+      "integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7707,6 +7083,8 @@
     },
     "node_modules/fromentries": {
       "version": "1.3.2",
+      "resolved": "https://registry.npmjs.org/fromentries/-/fromentries-1.3.2.tgz",
+      "integrity": "sha512-cHEpEQHUg0f8XdtZCc2ZAhrHzKzT0MrFUTcvx+hfxYu7rGMDc5SKoXFh+n4YigxsHXRzc6OrCshdR1bWH6HHyg==",
       "dev": true,
       "funding": [
         {
@@ -7726,6 +7104,8 @@
     },
     "node_modules/front-matter": {
       "version": "4.0.2",
+      "resolved": "https://registry.npmjs.org/front-matter/-/front-matter-4.0.2.tgz",
+      "integrity": "sha512-I8ZuJ/qG92NWX8i5x1Y8qyj3vizhXS31OxjKDu3LKP+7/qBgfIKValiZIEwoVoJKUHlhWtYrktkxV1XsX+pPlg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7734,26 +7114,18 @@
     },
     "node_modules/front-matter/node_modules/argparse": {
       "version": "1.0.10",
+      "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz",
+      "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
         "sprintf-js": "~1.0.2"
       }
     },
-    "node_modules/front-matter/node_modules/esprima": {
-      "version": "4.0.1",
-      "dev": true,
-      "license": "BSD-2-Clause",
-      "bin": {
-        "esparse": "bin/esparse.js",
-        "esvalidate": "bin/esvalidate.js"
-      },
-      "engines": {
-        "node": ">=4"
-      }
-    },
     "node_modules/front-matter/node_modules/js-yaml": {
       "version": "3.14.1",
+      "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz",
+      "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7764,13 +7136,10 @@
         "js-yaml": "bin/js-yaml.js"
       }
     },
-    "node_modules/front-matter/node_modules/sprintf-js": {
-      "version": "1.0.3",
-      "dev": true,
-      "license": "BSD-3-Clause"
-    },
     "node_modules/fs-exists-cached": {
       "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/fs-exists-cached/-/fs-exists-cached-1.0.0.tgz",
+      "integrity": "sha512-kSxoARUDn4F2RPXX48UXnaFKwVU7Ivd/6qpzZL29MCDmr9sTvybv4gFCp+qaI4fM9m0z9fgz/yJvi56GAz+BZg==",
       "dev": true,
       "license": "ISC"
     },
@@ -7787,12 +7156,17 @@
     },
     "node_modules/fs.realpath": {
       "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
+      "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/fsevents": {
       "version": "2.3.3",
+      "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz",
+      "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==",
       "dev": true,
+      "hasInstallScript": true,
       "license": "MIT",
       "optional": true,
       "os": [
@@ -7804,6 +7178,8 @@
     },
     "node_modules/function-bind": {
       "version": "1.1.2",
+      "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz",
+      "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -7812,13 +7188,18 @@
     },
     "node_modules/function-loop": {
       "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/function-loop/-/function-loop-2.0.1.tgz",
+      "integrity": "sha512-ktIR+O6i/4h+j/ZhZJNdzeI4i9lEPeEK6UPR2EVyTVBqOwcU3Za9xYKLH64ZR9HmcROyRrOkizNyjjtWJzDDkQ==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/function.prototype.name": {
       "version": "1.1.8",
+      "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.8.tgz",
+      "integrity": "sha512-e5iwyodOHhbMr/yNrc7fDYG4qlbIvI5gajyzPnb5TCwyhjApznQh1BMFou9b30SevY43gCJKXycoCBjMbsuW0Q==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bind": "^1.0.8",
         "call-bound": "^1.0.3",
@@ -7836,14 +7217,19 @@
     },
     "node_modules/functions-have-names": {
       "version": "1.2.3",
+      "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz",
+      "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "funding": {
         "url": "https://github.com/sponsors/ljharb"
       }
     },
     "node_modules/gensync": {
       "version": "1.0.0-beta.2",
+      "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz",
+      "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -7852,6 +7238,8 @@
     },
     "node_modules/get-caller-file": {
       "version": "2.0.5",
+      "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz",
+      "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -7860,6 +7248,8 @@
     },
     "node_modules/get-intrinsic": {
       "version": "1.3.0",
+      "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz",
+      "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7883,6 +7273,8 @@
     },
     "node_modules/get-package-type": {
       "version": "0.1.0",
+      "resolved": "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz",
+      "integrity": "sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -7891,6 +7283,8 @@
     },
     "node_modules/get-proto": {
       "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz",
+      "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7903,8 +7297,11 @@
     },
     "node_modules/get-symbol-description": {
       "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.1.0.tgz",
+      "integrity": "sha512-w9UMqWwJxHNOvoNzSJ2oPF5wvYcvP7jUvYzhp67yEhTi17ZDBBC1z9pTdGuzjD+EFIqLSYRweZjqfiPzQ06Ebg==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.3",
         "es-errors": "^1.3.0",
@@ -7919,6 +7316,8 @@
     },
     "node_modules/git-raw-commits": {
       "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/git-raw-commits/-/git-raw-commits-4.0.0.tgz",
+      "integrity": "sha512-ICsMM1Wk8xSGMowkOmPrzo2Fgmfo4bMHLNX6ytHjajRJUqvHOw/TFapQ+QG75c3X/tTDDhOSRPGC52dDbNM8FQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7933,10 +7332,15 @@
         "node": ">=16"
       }
     },
+    "node_modules/github-slugger": {
+      "version": "1.5.0",
+      "resolved": "https://registry.npmjs.org/github-slugger/-/github-slugger-1.5.0.tgz",
+      "integrity": "sha512-wIh+gKBI9Nshz2o46B0B3f5k/W+WI9ZAv6y5Dn5WJ5SK1t0TnDimB4WE5rmTD05ZAIn8HALCZVmCsvj0w0v0lw==",
+      "dev": true,
+      "license": "ISC"
+    },
     "node_modules/glob": {
       "version": "11.0.3",
-      "resolved": "https://registry.npmjs.org/glob/-/glob-11.0.3.tgz",
-      "integrity": "sha512-2Nim7dha1KVkaiF4q6Dj+ngPPMdfvLJEOpZk/jKiUAkqKebpGAWQXAq9z1xu9HKu5lWfqw/FASuccEjyznjPaA==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -7959,8 +7363,11 @@
     },
     "node_modules/glob-parent": {
       "version": "6.0.2",
+      "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz",
+      "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==",
       "dev": true,
       "license": "ISC",
+      "peer": true,
       "dependencies": {
         "is-glob": "^4.0.3"
       },
@@ -7970,6 +7377,8 @@
     },
     "node_modules/global-directory": {
       "version": "4.0.1",
+      "resolved": "https://registry.npmjs.org/global-directory/-/global-directory-4.0.1.tgz",
+      "integrity": "sha512-wHTUcDUoZ1H5/0iVqEudYW4/kAlN5cZ3j/bXn0Dpbizl9iaUVeWSHqiOjsgk6OW2bkLclbBjzewBz6weQ1zA2Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7984,6 +7393,8 @@
     },
     "node_modules/global-directory/node_modules/ini": {
       "version": "4.1.1",
+      "resolved": "https://registry.npmjs.org/ini/-/ini-4.1.1.tgz",
+      "integrity": "sha512-QQnnxNyfvmHFIsj7gkPcYymR8Jdw/o7mp5ZFihxn6h8Ci6fh3Dx4E1gPjpQEpIuPo9XVNY/ZUwh4BPMjGyL01g==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -7992,8 +7403,11 @@
     },
     "node_modules/globals": {
       "version": "13.24.0",
+      "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz",
+      "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "type-fest": "^0.20.2"
       },
@@ -8006,8 +7420,11 @@
     },
     "node_modules/globalthis": {
       "version": "1.0.4",
+      "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.4.tgz",
+      "integrity": "sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "define-properties": "^1.2.1",
         "gopd": "^1.0.1"
@@ -8021,6 +7438,8 @@
     },
     "node_modules/gopd": {
       "version": "1.2.0",
+      "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz",
+      "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -8037,11 +7456,16 @@
     },
     "node_modules/graphemer": {
       "version": "1.4.0",
+      "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz",
+      "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==",
       "dev": true,
-      "license": "MIT"
+      "license": "MIT",
+      "peer": true
     },
     "node_modules/groff-escape": {
       "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/groff-escape/-/groff-escape-2.0.1.tgz",
+      "integrity": "sha512-S0nG+mLFTu1buDKQsRlBtIxZU/dMvrdCURJg/zSLKpL333yi1Fs5bLUYk+v3pRYlc+qmHtukMAM2slB0AKFKAw==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -8051,6 +7475,8 @@
     },
     "node_modules/handlebars": {
       "version": "4.7.8",
+      "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.8.tgz",
+      "integrity": "sha512-vafaFqs8MZkRrSX7sFVUdo3ap/eNiLnb4IakshzvP56X5Nr1iGKAIqdX6tMlm6HcNRIkr6AxO5jFEoJzzpT8aQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8071,6 +7497,8 @@
     },
     "node_modules/hard-rejection": {
       "version": "2.1.0",
+      "resolved": "https://registry.npmjs.org/hard-rejection/-/hard-rejection-2.1.0.tgz",
+      "integrity": "sha512-VIZB+ibDhx7ObhAe7OVtoEbuP4h/MuOTHJ+J8h/eBXotJYl0fBgR72xDFCKgIh22OJZIOVNxBMWuhAr10r8HdA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -8079,8 +7507,11 @@
     },
     "node_modules/has-bigints": {
       "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.1.0.tgz",
+      "integrity": "sha512-R3pbpkcIqv2Pm3dUwgjclDRVmWpTJW2DcMzcIhEXEx1oh/CEMObMm3KLmRJOdvhM7o4uQBnwr8pzRK2sJWIqfg==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "engines": {
         "node": ">= 0.4"
       },
@@ -8089,17 +7520,22 @@
       }
     },
     "node_modules/has-flag": {
-      "version": "3.0.0",
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
+      "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
-        "node": ">=4"
+        "node": ">=8"
       }
     },
     "node_modules/has-property-descriptors": {
       "version": "1.0.2",
+      "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz",
+      "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "es-define-property": "^1.0.0"
       },
@@ -8109,8 +7545,11 @@
     },
     "node_modules/has-proto": {
       "version": "1.2.0",
+      "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.2.0.tgz",
+      "integrity": "sha512-KIL7eQPfHQRC8+XluaIw7BHUwwqL19bQn4hzNgdr+1wXoU0KKj6rufu47lhY7KbJR2C6T6+PfyN0Ea7wkSS+qQ==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "dunder-proto": "^1.0.0"
       },
@@ -8123,6 +7562,8 @@
     },
     "node_modules/has-symbols": {
       "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz",
+      "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -8134,6 +7575,8 @@
     },
     "node_modules/has-tostringtag": {
       "version": "1.0.2",
+      "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz",
+      "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8148,6 +7591,8 @@
     },
     "node_modules/hasha": {
       "version": "5.2.2",
+      "resolved": "https://registry.npmjs.org/hasha/-/hasha-5.2.2.tgz",
+      "integrity": "sha512-Hrp5vIK/xr5SkeN2onO32H0MgNZ0f17HRNH39WfL0SYUNOTZ5Lz1TJ8Pajo/87dYGEFlLMm7mIc/k/s6Bvz9HQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8161,19 +7606,10 @@
         "url": "https://github.com/sponsors/sindresorhus"
       }
     },
-    "node_modules/hasha/node_modules/is-stream": {
-      "version": "2.0.1",
-      "dev": true,
-      "license": "MIT",
-      "engines": {
-        "node": ">=8"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/sindresorhus"
-      }
-    },
     "node_modules/hasha/node_modules/type-fest": {
       "version": "0.8.1",
+      "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz",
+      "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==",
       "dev": true,
       "license": "(MIT OR CC0-1.0)",
       "engines": {
@@ -8182,6 +7618,8 @@
     },
     "node_modules/hasown": {
       "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz",
+      "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8193,6 +7631,8 @@
     },
     "node_modules/hast-util-from-parse5": {
       "version": "7.1.2",
+      "resolved": "https://registry.npmjs.org/hast-util-from-parse5/-/hast-util-from-parse5-7.1.2.tgz",
+      "integrity": "sha512-Nz7FfPBuljzsN3tCQ4kCBKqdNhQE2l0Tn+X1ubgKBPRoiDIu1mL08Cfw4k7q71+Duyaw7DXDN+VTAp4Vh3oCOw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8209,16 +7649,10 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/hast-util-from-parse5/node_modules/@types/hast": {
-      "version": "2.3.10",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^2"
-      }
-    },
     "node_modules/hast-util-parse-selector": {
       "version": "3.1.1",
+      "resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-3.1.1.tgz",
+      "integrity": "sha512-jdlwBjEexy1oGz0aJ2f4GKMaVKkA9jwjr4MjAAI22E5fM/TXVZHuS5OpONtdeIkRKqAaryQ2E9xNQxijoThSZA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8229,16 +7663,10 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/hast-util-parse-selector/node_modules/@types/hast": {
-      "version": "2.3.10",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^2"
-      }
-    },
     "node_modules/hast-util-raw": {
       "version": "7.2.3",
+      "resolved": "https://registry.npmjs.org/hast-util-raw/-/hast-util-raw-7.2.3.tgz",
+      "integrity": "sha512-RujVQfVsOrxzPOPSzZFiwofMArbQke6DJjnFfceiEbFh7S05CbPt0cYN+A5YeD3pso0JQk6O1aHBnx9+Pm2uqg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8259,42 +7687,17 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/hast-util-raw/node_modules/@types/hast": {
-      "version": "2.3.10",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^2"
-      }
-    },
-    "node_modules/hast-util-raw/node_modules/html-void-elements": {
-      "version": "2.0.1",
-      "dev": true,
-      "license": "MIT",
-      "funding": {
-        "type": "github",
-        "url": "https://github.com/sponsors/wooorm"
-      }
-    },
-    "node_modules/hast-util-raw/node_modules/parse5": {
-      "version": "6.0.1",
-      "dev": true,
-      "license": "MIT"
-    },
-    "node_modules/hast-util-raw/node_modules/unist-util-position": {
-      "version": "4.0.4",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^2.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
+    "node_modules/hast-util-raw/node_modules/parse5": {
+      "version": "6.0.1",
+      "resolved": "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz",
+      "integrity": "sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==",
+      "dev": true,
+      "license": "MIT"
+    },
     "node_modules/hast-util-raw/node_modules/unist-util-visit": {
       "version": "4.1.2",
+      "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-4.1.2.tgz",
+      "integrity": "sha512-MSd8OUGISqHdVvfY9TPhyK2VdUrPgxkUtWSuMHF6XAAFuL4LokseigBnZtPnJMu+FbynTkFNnFlyjxpVKujMRg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8309,6 +7712,8 @@
     },
     "node_modules/hast-util-raw/node_modules/unist-util-visit-parents": {
       "version": "5.1.3",
+      "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-5.1.3.tgz",
+      "integrity": "sha512-x6+y8g7wWMyQhL1iZfhIPhDAs7Xwbn9nRosDXl7qoPTSCy0yNxnKc+hWokFifWQIDGi154rdUqKvbCa4+1kLhg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8320,8 +7725,34 @@
         "url": "https://opencollective.com/unified"
       }
     },
+    "node_modules/hast-util-to-html": {
+      "version": "8.0.4",
+      "resolved": "https://registry.npmjs.org/hast-util-to-html/-/hast-util-to-html-8.0.4.tgz",
+      "integrity": "sha512-4tpQTUOr9BMjtYyNlt0P50mH7xj0Ks2xpo8M943Vykljf99HW6EzulIoJP1N3eKOSScEHzyzi9dm7/cn0RfGwA==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/hast": "^2.0.0",
+        "@types/unist": "^2.0.0",
+        "ccount": "^2.0.0",
+        "comma-separated-tokens": "^2.0.0",
+        "hast-util-raw": "^7.0.0",
+        "hast-util-whitespace": "^2.0.0",
+        "html-void-elements": "^2.0.0",
+        "property-information": "^6.0.0",
+        "space-separated-tokens": "^2.0.0",
+        "stringify-entities": "^4.0.0",
+        "zwitch": "^2.0.4"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
     "node_modules/hast-util-to-parse5": {
       "version": "7.1.0",
+      "resolved": "https://registry.npmjs.org/hast-util-to-parse5/-/hast-util-to-parse5-7.1.0.tgz",
+      "integrity": "sha512-YNRgAJkH2Jky5ySkIqFXTQiaqcAtJyVE+D5lkN6CdtOqrnkLfGYYrEcKuHOJZlp+MwjSwuD3fZuawI+sic/RBw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8337,16 +7768,21 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/hast-util-to-parse5/node_modules/@types/hast": {
-      "version": "2.3.10",
+    "node_modules/hast-util-whitespace": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/hast-util-whitespace/-/hast-util-whitespace-2.0.1.tgz",
+      "integrity": "sha512-nAxA0v8+vXSBDt3AnRUNjyRIQ0rD+ntpbAp4LnPkumc5M9yUbSMa4XDU9Q6etY4f1Wp4bNgvc1yjiZtsTTrSng==",
       "dev": true,
       "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^2"
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
       }
     },
     "node_modules/hastscript": {
       "version": "7.2.0",
+      "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-7.2.0.tgz",
+      "integrity": "sha512-TtYPq24IldU8iKoJQqvZOuhi5CyCQRAbvDOX0x1eW6rsHSxa/1i2CCiptNTotGHJ3VoHRGmqiv6/D3q113ikkw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8361,16 +7797,10 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/hastscript/node_modules/@types/hast": {
-      "version": "2.3.10",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^2"
-      }
-    },
     "node_modules/he": {
       "version": "1.2.0",
+      "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz",
+      "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==",
       "dev": true,
       "license": "MIT",
       "bin": {
@@ -8390,6 +7820,8 @@
     },
     "node_modules/html-encoding-sniffer": {
       "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-4.0.0.tgz",
+      "integrity": "sha512-Y22oTqIU4uuPgEemfz7NDJz6OeKf12Lsu+QC+s3BVpda64lTiMYCyGwg5ki4vFxkMwQdeZDl2adZoqUgdFuTgQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8401,9 +7833,22 @@
     },
     "node_modules/html-escaper": {
       "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz",
+      "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==",
       "dev": true,
       "license": "MIT"
     },
+    "node_modules/html-void-elements": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/html-void-elements/-/html-void-elements-2.0.1.tgz",
+      "integrity": "sha512-0quDb7s97CfemeJAnW9wC0hw78MtW7NU3hqtCD75g2vFlDLt36llsYD7uB7SUzojLMP24N5IatXf7ylGXiGG9A==",
+      "dev": true,
+      "license": "MIT",
+      "funding": {
+        "type": "github",
+        "url": "https://github.com/sponsors/wooorm"
+      }
+    },
     "node_modules/http-cache-semantics": {
       "version": "4.2.0",
       "inBundle": true,
@@ -8447,16 +7892,17 @@
     },
     "node_modules/ignore": {
       "version": "5.3.2",
+      "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz",
+      "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "engines": {
         "node": ">= 4"
       }
     },
     "node_modules/ignore-walk": {
       "version": "8.0.0",
-      "resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-8.0.0.tgz",
-      "integrity": "sha512-FCeMZT4NiRQGh+YkeKMtWrOmBgWjHjMJ26WQWrRQyoyzqevdaGSakUaJW5xQYmjLlUVk2qUnCjYVBax9EKKg8A==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -8468,6 +7914,8 @@
     },
     "node_modules/import-fresh": {
       "version": "3.3.1",
+      "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz",
+      "integrity": "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8483,6 +7931,8 @@
     },
     "node_modules/import-fresh/node_modules/resolve-from": {
       "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz",
+      "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -8510,6 +7960,8 @@
     },
     "node_modules/indent-string": {
       "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz",
+      "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -8518,6 +7970,9 @@
     },
     "node_modules/inflight": {
       "version": "1.0.6",
+      "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
+      "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==",
+      "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -8527,6 +7982,8 @@
     },
     "node_modules/inherits": {
       "version": "2.0.4",
+      "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
+      "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==",
       "dev": true,
       "license": "ISC"
     },
@@ -8557,8 +8014,11 @@
     },
     "node_modules/internal-slot": {
       "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.1.0.tgz",
+      "integrity": "sha512-4gd7VpWNQNB4UKKCFFVcp1AVv+FMOgs9NKzjHKusc8jTMhd5eL1NqQqOpE0KzMds804/yHlglp3uxgluOqAPLw==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "es-errors": "^1.3.0",
         "hasown": "^2.0.2",
@@ -8570,8 +8030,6 @@
     },
     "node_modules/ip-address": {
       "version": "10.0.1",
-      "resolved": "https://registry.npmjs.org/ip-address/-/ip-address-10.0.1.tgz",
-      "integrity": "sha512-NWv9YLW4PoW2B7xtzaS3NCot75m6nK7Icdv0o3lfMceJVRfSoQwqD4wEH5rLwoKJwUiZ/rfpiVBhnaF0FK4HoA==",
       "inBundle": true,
       "license": "MIT",
       "engines": {
@@ -8591,8 +8049,11 @@
     },
     "node_modules/is-array-buffer": {
       "version": "3.0.5",
+      "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.5.tgz",
+      "integrity": "sha512-DDfANUiiG2wC1qawP66qlTugJeL5HyzMpfr8lLK+jMQirGzNod0B12cFB/9q838Ru27sBwfw78/rdoU7RERz6A==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bind": "^1.0.8",
         "call-bound": "^1.0.3",
@@ -8607,13 +8068,18 @@
     },
     "node_modules/is-arrayish": {
       "version": "0.2.1",
+      "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz",
+      "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/is-async-function": {
       "version": "2.1.1",
+      "resolved": "https://registry.npmjs.org/is-async-function/-/is-async-function-2.1.1.tgz",
+      "integrity": "sha512-9dgM/cZBnNvjzaMYHVoxxfPj2QXt22Ev7SuuPrs+xav0ukGB0S6d4ydZdEiM48kLx5kDV+QBPrpVnFyefL8kkQ==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "async-function": "^1.0.0",
         "call-bound": "^1.0.3",
@@ -8630,8 +8096,11 @@
     },
     "node_modules/is-bigint": {
       "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.1.0.tgz",
+      "integrity": "sha512-n4ZT37wG78iz03xPRKJrHTdZbe3IicyucEtdRsV5yglwc3GyUfbAfpSeD0FJ41NbUNSt5wbhqfp1fS+BgnvDFQ==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "has-bigints": "^1.0.2"
       },
@@ -8644,6 +8113,8 @@
     },
     "node_modules/is-binary-path": {
       "version": "2.1.0",
+      "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz",
+      "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8655,6 +8126,8 @@
     },
     "node_modules/is-binary-path/node_modules/binary-extensions": {
       "version": "2.3.0",
+      "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz",
+      "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -8666,8 +8139,11 @@
     },
     "node_modules/is-boolean-object": {
       "version": "1.2.2",
+      "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.2.2.tgz",
+      "integrity": "sha512-wa56o2/ElJMYqjCjGkXri7it5FbebW5usLw/nPmCMs5DeZ7eziSYZhSmPRn0txqeW4LnAmQQU7FgqLpsEFKM4A==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.3",
         "has-tostringtag": "^1.0.2"
@@ -8681,6 +8157,8 @@
     },
     "node_modules/is-buffer": {
       "version": "2.0.5",
+      "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.5.tgz",
+      "integrity": "sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ==",
       "dev": true,
       "funding": [
         {
@@ -8703,8 +8181,11 @@
     },
     "node_modules/is-callable": {
       "version": "1.2.7",
+      "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz",
+      "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "engines": {
         "node": ">= 0.4"
       },
@@ -8725,6 +8206,8 @@
     },
     "node_modules/is-core-module": {
       "version": "2.16.1",
+      "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz",
+      "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8739,8 +8222,11 @@
     },
     "node_modules/is-data-view": {
       "version": "1.0.2",
+      "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.2.tgz",
+      "integrity": "sha512-RKtWF8pGmS87i2D6gqQu/l7EYRlVdfzemCJN/P3UOs//x1QE7mfhvzHIApBTRf7axvT6DMGwSwBXYCT0nfB9xw==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.2",
         "get-intrinsic": "^1.2.6",
@@ -8755,8 +8241,11 @@
     },
     "node_modules/is-date-object": {
       "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.1.0.tgz",
+      "integrity": "sha512-PwwhEakHVKTdRNVOw+/Gyh0+MzlCl4R6qKvkhuvLtPMggI1WAHt9sOwZxQLSGpUaDnrdyDsomoRgNnCfKNSXXg==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.2",
         "has-tostringtag": "^1.0.2"
@@ -8770,6 +8259,8 @@
     },
     "node_modules/is-extglob": {
       "version": "2.1.1",
+      "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz",
+      "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -8778,8 +8269,11 @@
     },
     "node_modules/is-finalizationregistry": {
       "version": "1.1.1",
+      "resolved": "https://registry.npmjs.org/is-finalizationregistry/-/is-finalizationregistry-1.1.1.tgz",
+      "integrity": "sha512-1pC6N8qWJbWoPtEjgcL2xyhQOP491EQjeUo3qTKcmV8YSDDJrOepfG8pcC7h/QgnQHYSv0mJ3Z/ZWxmatVrysg==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.3"
       },
@@ -8800,8 +8294,11 @@
     },
     "node_modules/is-generator-function": {
       "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.1.0.tgz",
+      "integrity": "sha512-nPUB5km40q9e8UfN/Zc24eLlzdSf9OfKByBw9CIdw4H1giPMeA0OIJvbchsCu4npfI2QcMVBsGEBHKZ7wLTWmQ==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.3",
         "get-proto": "^1.0.0",
@@ -8817,6 +8314,8 @@
     },
     "node_modules/is-glob": {
       "version": "4.0.3",
+      "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz",
+      "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8828,13 +8327,18 @@
     },
     "node_modules/is-lambda": {
       "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/is-lambda/-/is-lambda-1.0.1.tgz",
+      "integrity": "sha512-z7CMFGNrENq5iFB9Bqo64Xk6Y9sg+epq1myIcdHaGnbMTYOxvzsEtdYqQUylB7LxfkvgrrjP32T6Ywciio9UIQ==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/is-map": {
       "version": "2.0.3",
+      "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.3.tgz",
+      "integrity": "sha512-1Qed0/Hr2m+YqxnM09CjA2d/i6YZNfF6R2oRAOj36eUdS6qIV/huPJNSEpKbupewFs+ZsJlxsjjPbc0/afW6Lw==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "engines": {
         "node": ">= 0.4"
       },
@@ -8844,8 +8348,11 @@
     },
     "node_modules/is-negative-zero": {
       "version": "2.0.3",
+      "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.3.tgz",
+      "integrity": "sha512-5KoIu2Ngpyek75jXodFvnafB6DJgr3u8uuK0LEZJjrU19DrMD3EVERaR8sjz8CCGgpZvxPl9SuE1GMVPFHx1mw==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "engines": {
         "node": ">= 0.4"
       },
@@ -8855,6 +8362,8 @@
     },
     "node_modules/is-number": {
       "version": "7.0.0",
+      "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
+      "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -8863,8 +8372,11 @@
     },
     "node_modules/is-number-object": {
       "version": "1.1.1",
+      "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.1.1.tgz",
+      "integrity": "sha512-lZhclumE1G6VYD8VHe35wFaIif+CTy5SJIi5+3y4psDgWu4wPDoBhF8NxUOinEc7pHgiTsT6MaBb92rKhhD+Xw==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.3",
         "has-tostringtag": "^1.0.2"
@@ -8878,6 +8390,8 @@
     },
     "node_modules/is-obj": {
       "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz",
+      "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -8886,14 +8400,19 @@
     },
     "node_modules/is-path-inside": {
       "version": "3.0.3",
+      "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz",
+      "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "engines": {
         "node": ">=8"
       }
     },
     "node_modules/is-plain-obj": {
       "version": "4.1.0",
+      "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-4.1.0.tgz",
+      "integrity": "sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -8905,6 +8424,8 @@
     },
     "node_modules/is-plain-object": {
       "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz",
+      "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -8913,13 +8434,18 @@
     },
     "node_modules/is-potential-custom-element-name": {
       "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz",
+      "integrity": "sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/is-regex": {
       "version": "1.2.1",
+      "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.2.1.tgz",
+      "integrity": "sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.2",
         "gopd": "^1.2.0",
@@ -8935,8 +8461,11 @@
     },
     "node_modules/is-set": {
       "version": "2.0.3",
+      "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.3.tgz",
+      "integrity": "sha512-iPAjerrse27/ygGLxw+EBR9agv9Y6uLeYVJMu+QNCoouJ1/1ri0mGrcWpfCqFZuzzx3WjtwxG098X+n4OuRkPg==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "engines": {
         "node": ">= 0.4"
       },
@@ -8946,8 +8475,11 @@
     },
     "node_modules/is-shared-array-buffer": {
       "version": "1.0.4",
+      "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.4.tgz",
+      "integrity": "sha512-ISWac8drv4ZGfwKl5slpHG9OwPNty4jOWPRIhBpxOoD+hqITiwuipOQ2bNthAzwA3B4fIjO4Nln74N0S9byq8A==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.3"
       },
@@ -8958,10 +8490,26 @@
         "url": "https://github.com/sponsors/ljharb"
       }
     },
+    "node_modules/is-stream": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz",
+      "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==",
+      "dev": true,
+      "license": "MIT",
+      "engines": {
+        "node": ">=8"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/sindresorhus"
+      }
+    },
     "node_modules/is-string": {
       "version": "1.1.1",
+      "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.1.1.tgz",
+      "integrity": "sha512-BtEeSsoaQjlSPBemMQIrY1MY0uM6vnS1g5fmufYOtnxLGUZM2178PKbhsk7Ffv58IX+ZtcvoGwccYsh0PglkAA==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.3",
         "has-tostringtag": "^1.0.2"
@@ -8975,8 +8523,11 @@
     },
     "node_modules/is-symbol": {
       "version": "1.1.1",
+      "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.1.1.tgz",
+      "integrity": "sha512-9gGx6GTtCQM73BgmHQXfDmLtfjjTUDSyoxTCbp5WtoixAhfgsDirWIcVQ/IHpvI5Vgd5i/J5F7B9cN/WlVbC/w==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.2",
         "has-symbols": "^1.1.0",
@@ -8991,6 +8542,8 @@
     },
     "node_modules/is-text-path": {
       "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/is-text-path/-/is-text-path-2.0.0.tgz",
+      "integrity": "sha512-+oDTluR6WEjdXEJMnC2z6A4FRwFoYuvShVVEGsS7ewc0UTi2QtAKMDJuL4BDEVt+5T7MjFo12RP8ghOM75oKJw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9002,8 +8555,11 @@
     },
     "node_modules/is-typed-array": {
       "version": "1.1.15",
+      "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.15.tgz",
+      "integrity": "sha512-p3EcsicXjit7SaskXHs1hA91QxgTw46Fv6EFKKGS5DRFLD8yKnohjF3hxoju94b/OcMZoQukzpPpBE9uLVKzgQ==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "which-typed-array": "^1.1.16"
       },
@@ -9016,13 +8572,18 @@
     },
     "node_modules/is-typedarray": {
       "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz",
+      "integrity": "sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/is-weakmap": {
       "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.2.tgz",
+      "integrity": "sha512-K5pXYOm9wqY1RgjpL3YTkF39tni1XajUIkawTLUo9EZEVUFga5gSQJF8nNS7ZwJQ02y+1YCNYcMh+HIf1ZqE+w==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "engines": {
         "node": ">= 0.4"
       },
@@ -9032,8 +8593,11 @@
     },
     "node_modules/is-weakref": {
       "version": "1.1.1",
+      "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.1.1.tgz",
+      "integrity": "sha512-6i9mGWSlqzNMEqpCp93KwRS1uUOodk2OJ6b+sq7ZPDSy2WuI5NFIxp/254TytR8ftefexkWn5xNiHUNpPOfSew==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.3"
       },
@@ -9046,8 +8610,11 @@
     },
     "node_modules/is-weakset": {
       "version": "2.0.4",
+      "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.4.tgz",
+      "integrity": "sha512-mfcwb6IzQyOKTs84CQMrOwW4gQcaTOAWJ0zzJCl2WSPDrWk/OzDaImWFH3djXhb24g4eudZfLRozAvPGw4d9hQ==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.3",
         "get-intrinsic": "^1.2.6"
@@ -9061,6 +8628,8 @@
     },
     "node_modules/is-windows": {
       "version": "1.0.2",
+      "resolved": "https://registry.npmjs.org/is-windows/-/is-windows-1.0.2.tgz",
+      "integrity": "sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -9069,13 +8638,14 @@
     },
     "node_modules/isarray": {
       "version": "2.0.5",
+      "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz",
+      "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==",
       "dev": true,
-      "license": "MIT"
+      "license": "MIT",
+      "peer": true
     },
     "node_modules/isexe": {
       "version": "3.1.1",
-      "resolved": "https://registry.npmjs.org/isexe/-/isexe-3.1.1.tgz",
-      "integrity": "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==",
       "inBundle": true,
       "license": "ISC",
       "engines": {
@@ -9084,6 +8654,8 @@
     },
     "node_modules/istanbul-lib-coverage": {
       "version": "3.2.2",
+      "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz",
+      "integrity": "sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==",
       "dev": true,
       "license": "BSD-3-Clause",
       "engines": {
@@ -9092,6 +8664,8 @@
     },
     "node_modules/istanbul-lib-hook": {
       "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/istanbul-lib-hook/-/istanbul-lib-hook-3.0.0.tgz",
+      "integrity": "sha512-Pt/uge1Q9s+5VAZ+pCo16TYMWPBIl+oaNIjgLQxcX0itS6ueeaA+pEfThZpH8WxhFgCiEb8sAJY6MdUKgiIWaQ==",
       "dev": true,
       "license": "BSD-3-Clause",
       "dependencies": {
@@ -9103,6 +8677,8 @@
     },
     "node_modules/istanbul-lib-instrument": {
       "version": "4.0.3",
+      "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-4.0.3.tgz",
+      "integrity": "sha512-BXgQl9kf4WTCPCCpmFGoJkz/+uhvm7h7PFKUYxh7qarQd3ER33vHG//qaE8eN25l07YqZPpHXU9I09l/RD5aGQ==",
       "dev": true,
       "license": "BSD-3-Clause",
       "dependencies": {
@@ -9117,6 +8693,8 @@
     },
     "node_modules/istanbul-lib-instrument/node_modules/semver": {
       "version": "6.3.1",
+      "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
+      "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
       "dev": true,
       "license": "ISC",
       "bin": {
@@ -9125,6 +8703,8 @@
     },
     "node_modules/istanbul-lib-processinfo": {
       "version": "2.0.3",
+      "resolved": "https://registry.npmjs.org/istanbul-lib-processinfo/-/istanbul-lib-processinfo-2.0.3.tgz",
+      "integrity": "sha512-NkwHbo3E00oybX6NGJi6ar0B29vxyvNwoC7eJ4G4Yq28UfY758Hgn/heV8VRFhevPED4LXfFz0DQ8z/0kw9zMg==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -9141,6 +8721,8 @@
     },
     "node_modules/istanbul-lib-processinfo/node_modules/brace-expansion": {
       "version": "1.1.12",
+      "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
+      "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9150,6 +8732,9 @@
     },
     "node_modules/istanbul-lib-processinfo/node_modules/glob": {
       "version": "7.2.3",
+      "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
+      "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
+      "deprecated": "Glob versions prior to v9 are no longer supported",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -9169,6 +8754,8 @@
     },
     "node_modules/istanbul-lib-processinfo/node_modules/minimatch": {
       "version": "3.1.2",
+      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
+      "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -9180,6 +8767,8 @@
     },
     "node_modules/istanbul-lib-processinfo/node_modules/p-map": {
       "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/p-map/-/p-map-3.0.0.tgz",
+      "integrity": "sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9191,6 +8780,9 @@
     },
     "node_modules/istanbul-lib-processinfo/node_modules/rimraf": {
       "version": "3.0.2",
+      "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz",
+      "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==",
+      "deprecated": "Rimraf versions prior to v4 are no longer supported",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -9205,6 +8797,8 @@
     },
     "node_modules/istanbul-lib-report": {
       "version": "3.0.1",
+      "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz",
+      "integrity": "sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==",
       "dev": true,
       "license": "BSD-3-Clause",
       "dependencies": {
@@ -9216,16 +8810,10 @@
         "node": ">=10"
       }
     },
-    "node_modules/istanbul-lib-report/node_modules/has-flag": {
-      "version": "4.0.0",
-      "dev": true,
-      "license": "MIT",
-      "engines": {
-        "node": ">=8"
-      }
-    },
     "node_modules/istanbul-lib-report/node_modules/make-dir": {
       "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz",
+      "integrity": "sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9240,6 +8828,8 @@
     },
     "node_modules/istanbul-lib-report/node_modules/supports-color": {
       "version": "7.2.0",
+      "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
+      "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9251,6 +8841,8 @@
     },
     "node_modules/istanbul-lib-source-maps": {
       "version": "4.0.1",
+      "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz",
+      "integrity": "sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==",
       "dev": true,
       "license": "BSD-3-Clause",
       "dependencies": {
@@ -9278,8 +8870,6 @@
     },
     "node_modules/jackspeak": {
       "version": "4.1.1",
-      "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-4.1.1.tgz",
-      "integrity": "sha512-zptv57P3GpL+O0I7VdMJNBZCu+BPHVQUk55Ft8/QCJjTVxrnJHuVuX/0Bl2A6/+2oyR/ZMEuFKwmzqqZ/U5nPQ==",
       "inBundle": true,
       "license": "BlueOak-1.0.0",
       "dependencies": {
@@ -9304,11 +8894,15 @@
     },
     "node_modules/js-tokens": {
       "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
+      "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/js-yaml": {
       "version": "4.1.0",
+      "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz",
+      "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9318,17 +8912,98 @@
         "js-yaml": "bin/js-yaml.js"
       }
     },
+    "node_modules/jsdom": {
+      "version": "24.1.3",
+      "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-24.1.3.tgz",
+      "integrity": "sha512-MyL55p3Ut3cXbeBEG7Hcv0mVM8pp8PBNWxRqchZnSfAiES1v1mRnMeFfaHWIPULpwsYfvO+ZmMZz5tGCnjzDUQ==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "cssstyle": "^4.0.1",
+        "data-urls": "^5.0.0",
+        "decimal.js": "^10.4.3",
+        "form-data": "^4.0.0",
+        "html-encoding-sniffer": "^4.0.0",
+        "http-proxy-agent": "^7.0.2",
+        "https-proxy-agent": "^7.0.5",
+        "is-potential-custom-element-name": "^1.0.1",
+        "nwsapi": "^2.2.12",
+        "parse5": "^7.1.2",
+        "rrweb-cssom": "^0.7.1",
+        "saxes": "^6.0.0",
+        "symbol-tree": "^3.2.4",
+        "tough-cookie": "^4.1.4",
+        "w3c-xmlserializer": "^5.0.0",
+        "webidl-conversions": "^7.0.0",
+        "whatwg-encoding": "^3.1.1",
+        "whatwg-mimetype": "^4.0.0",
+        "whatwg-url": "^14.0.0",
+        "ws": "^8.18.0",
+        "xml-name-validator": "^5.0.0"
+      },
+      "engines": {
+        "node": ">=18"
+      },
+      "peerDependencies": {
+        "canvas": "^2.11.2"
+      },
+      "peerDependenciesMeta": {
+        "canvas": {
+          "optional": true
+        }
+      }
+    },
+    "node_modules/jsdom/node_modules/tr46": {
+      "version": "5.1.1",
+      "resolved": "https://registry.npmjs.org/tr46/-/tr46-5.1.1.tgz",
+      "integrity": "sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "punycode": "^2.3.1"
+      },
+      "engines": {
+        "node": ">=18"
+      }
+    },
+    "node_modules/jsdom/node_modules/webidl-conversions": {
+      "version": "7.0.0",
+      "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz",
+      "integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==",
+      "dev": true,
+      "license": "BSD-2-Clause",
+      "engines": {
+        "node": ">=12"
+      }
+    },
+    "node_modules/jsdom/node_modules/whatwg-url": {
+      "version": "14.2.0",
+      "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-14.2.0.tgz",
+      "integrity": "sha512-De72GdQZzNTUBBChsXueQUnPKDkg/5A5zp7pFDuQAj5UFoENpiACU0wlCvzpAGnTkj++ihpKwKyYewn/XNUbKw==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "tr46": "^5.1.0",
+        "webidl-conversions": "^7.0.0"
+      },
+      "engines": {
+        "node": ">=18"
+      }
+    },
     "node_modules/jsep": {
       "version": "1.4.0",
+      "resolved": "https://registry.npmjs.org/jsep/-/jsep-1.4.0.tgz",
+      "integrity": "sha512-B7qPcEVE3NVkmSJbaYxvv4cHkVW7DQsZz13pUMrfS8z8Q/BuShN+gcTXrUlPiGqM2/t/EEaI030bpxMqY8gMlw==",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "engines": {
         "node": ">= 10.16.0"
       }
     },
     "node_modules/jsesc": {
       "version": "3.1.0",
+      "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz",
+      "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==",
       "dev": true,
       "license": "MIT",
       "bin": {
@@ -9340,8 +9015,11 @@
     },
     "node_modules/json-buffer": {
       "version": "3.0.1",
+      "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz",
+      "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==",
       "dev": true,
-      "license": "MIT"
+      "license": "MIT",
+      "peer": true
     },
     "node_modules/json-parse-even-better-errors": {
       "version": "4.0.0",
@@ -9353,13 +9031,18 @@
     },
     "node_modules/json-schema-traverse": {
       "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz",
+      "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/json-stable-stringify-without-jsonify": {
       "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz",
+      "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==",
       "dev": true,
-      "license": "MIT"
+      "license": "MIT",
+      "peer": true
     },
     "node_modules/json-stringify-nice": {
       "version": "1.1.4",
@@ -9370,11 +9053,15 @@
     },
     "node_modules/json-stringify-safe": {
       "version": "5.0.1",
+      "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz",
+      "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/json5": {
       "version": "2.2.3",
+      "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz",
+      "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==",
       "dev": true,
       "license": "MIT",
       "bin": {
@@ -9394,6 +9081,8 @@
     },
     "node_modules/jsonpath-plus": {
       "version": "10.3.0",
+      "resolved": "https://registry.npmjs.org/jsonpath-plus/-/jsonpath-plus-10.3.0.tgz",
+      "integrity": "sha512-8TNmfeTCk2Le33A3vRRwtuworG/L5RrgMvdjhKZxvyShO+mBu2fP50OWUjRLNtvw344DdDarFh9buFAZs5ujeA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9411,6 +9100,8 @@
     },
     "node_modules/JSONStream": {
       "version": "1.3.5",
+      "resolved": "https://registry.npmjs.org/JSONStream/-/JSONStream-1.3.5.tgz",
+      "integrity": "sha512-E+iruNOY8VV9s4JEbe1aNEm6MiszPRr/UfcHMz0TQh1BXSxHK+ASV1R6W4HpjBhSeS+54PIsAMCBmwD06LLsqQ==",
       "dev": true,
       "license": "(MIT OR Apache-2.0)",
       "dependencies": {
@@ -9426,6 +9117,8 @@
     },
     "node_modules/just-deep-map-values": {
       "version": "1.2.0",
+      "resolved": "https://registry.npmjs.org/just-deep-map-values/-/just-deep-map-values-1.2.0.tgz",
+      "integrity": "sha512-4vpPBzHHis4UW/EbH5kHZn0gJvKP+EiMpbjD669ZSxdwx+EoAlQLMbLR08SEtydcq/MjDPPtwGiPo9R893iHVA==",
       "dev": true,
       "license": "MIT"
     },
@@ -9441,29 +9134,40 @@
     },
     "node_modules/just-extend": {
       "version": "6.2.0",
+      "resolved": "https://registry.npmjs.org/just-extend/-/just-extend-6.2.0.tgz",
+      "integrity": "sha512-cYofQu2Xpom82S6qD778jBDpwvvy39s1l/hrYij2u9AMdQcGRpaBu6kY4mVhuno5kJVi1DAz4aiphA2WI1/OAw==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/just-omit": {
       "version": "2.2.0",
+      "resolved": "https://registry.npmjs.org/just-omit/-/just-omit-2.2.0.tgz",
+      "integrity": "sha512-Js7+HxDOGcB3RhI38Mird/RgyMf3t0DAJFda1QWqqlAKTa36NeSYIufJXxrZUbysFTRcTOFcoMCiFK5FwCoI7Q==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/just-safe-set": {
       "version": "4.2.1",
+      "resolved": "https://registry.npmjs.org/just-safe-set/-/just-safe-set-4.2.1.tgz",
+      "integrity": "sha512-La5CP41Ycv52+E4g7w1sRV8XXk7Sp8a/TwWQAYQKn6RsQz1FD4Z/rDRRmqV3wJznS1MDF3YxK7BCudX1J8FxLg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/keyv": {
       "version": "4.5.4",
+      "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz",
+      "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "json-buffer": "3.0.1"
       }
     },
     "node_modules/kind-of": {
       "version": "6.0.3",
+      "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz",
+      "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -9472,6 +9176,8 @@
     },
     "node_modules/kleur": {
       "version": "4.1.5",
+      "resolved": "https://registry.npmjs.org/kleur/-/kleur-4.1.5.tgz",
+      "integrity": "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -9480,6 +9186,8 @@
     },
     "node_modules/leven": {
       "version": "2.1.0",
+      "resolved": "https://registry.npmjs.org/leven/-/leven-2.1.0.tgz",
+      "integrity": "sha512-nvVPLpIHUxCUoRLrFqTgSxXJ614d8AgQoWl7zPe/2VadE8+1dpU3LBhowRuBAcuwruWtOdD8oYC9jDNJjXDPyA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -9488,8 +9196,11 @@
     },
     "node_modules/levn": {
       "version": "0.4.1",
+      "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz",
+      "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "prelude-ls": "^1.2.1",
         "type-check": "~0.4.0"
@@ -9540,6 +9251,8 @@
     },
     "node_modules/libtap": {
       "version": "1.4.1",
+      "resolved": "https://registry.npmjs.org/libtap/-/libtap-1.4.1.tgz",
+      "integrity": "sha512-S9v19shLTigoMn3c02V7LZ4t09zxmVP3r3RbEAwuHFYeKgF+ESFJxoQ0PMFKW4XdgQhcjVBEwDoopG6WROq/gw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -9566,6 +9279,8 @@
     },
     "node_modules/libtap/node_modules/diff": {
       "version": "4.0.2",
+      "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz",
+      "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==",
       "dev": true,
       "license": "BSD-3-Clause",
       "engines": {
@@ -9574,6 +9289,8 @@
     },
     "node_modules/libtap/node_modules/minipass": {
       "version": "3.3.6",
+      "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz",
+      "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -9585,16 +9302,22 @@
     },
     "node_modules/libtap/node_modules/signal-exit": {
       "version": "3.0.7",
+      "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz",
+      "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/lines-and-columns": {
       "version": "1.2.4",
+      "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz",
+      "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/locate-path": {
       "version": "7.2.0",
+      "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-7.2.0.tgz",
+      "integrity": "sha512-gvVijfZvn7R+2qyPX8mAuKcFGDf6Nc61GdvGafQsHL0sBIxfKzA+usWn4GFC/bk+QdwPUD4kWFJLhElipq+0VA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9609,66 +9332,92 @@
     },
     "node_modules/lodash": {
       "version": "4.17.21",
+      "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
+      "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/lodash.camelcase": {
       "version": "4.3.0",
+      "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz",
+      "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/lodash.flattendeep": {
       "version": "4.4.0",
+      "resolved": "https://registry.npmjs.org/lodash.flattendeep/-/lodash.flattendeep-4.4.0.tgz",
+      "integrity": "sha512-uHaJFihxmJcEX3kT4I23ABqKKalJ/zDrDg0lsFtc1h+3uw49SIJ5beyhx5ExVRti3AvKoOJngIj7xz3oylPdWQ==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/lodash.ismatch": {
       "version": "4.4.0",
+      "resolved": "https://registry.npmjs.org/lodash.ismatch/-/lodash.ismatch-4.4.0.tgz",
+      "integrity": "sha512-fPMfXjGQEV9Xsq/8MTSgUf255gawYRbjwMyDbcvDhXgV7enSZA0hynz6vMPnpAb5iONEzBHBPsT+0zes5Z301g==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/lodash.isplainobject": {
       "version": "4.0.6",
+      "resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz",
+      "integrity": "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/lodash.kebabcase": {
       "version": "4.1.1",
+      "resolved": "https://registry.npmjs.org/lodash.kebabcase/-/lodash.kebabcase-4.1.1.tgz",
+      "integrity": "sha512-N8XRTIMMqqDgSy4VLKPnJ/+hpGZN+PHQiJnSenYqPaVV/NCqEogTnAdZLQiGKhxX+JCs8waWq2t1XHWKOmlY8g==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/lodash.merge": {
       "version": "4.6.2",
+      "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz",
+      "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/lodash.mergewith": {
       "version": "4.6.2",
+      "resolved": "https://registry.npmjs.org/lodash.mergewith/-/lodash.mergewith-4.6.2.tgz",
+      "integrity": "sha512-GK3g5RPZWTRSeLSpgP8Xhra+pnjBC56q9FZYe1d5RN3TJ35dbkGy3YqBSMbyCrlbi+CM9Z3Jk5yTL7RCsqboyQ==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/lodash.snakecase": {
       "version": "4.1.1",
+      "resolved": "https://registry.npmjs.org/lodash.snakecase/-/lodash.snakecase-4.1.1.tgz",
+      "integrity": "sha512-QZ1d4xoBHYUeuouhEq3lk3Uq7ldgyFXGBhg04+oRLnIz8o9T65Eh+8YdroUwn846zchkA9yDsDl5CVVaV2nqYw==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/lodash.startcase": {
       "version": "4.4.0",
+      "resolved": "https://registry.npmjs.org/lodash.startcase/-/lodash.startcase-4.4.0.tgz",
+      "integrity": "sha512-+WKqsK294HMSc2jEbNgpHpd0JfIBhp7rEV4aqXWqFr6AlXov+SlcgB1Fv01y2kGe3Gc8nMW7VA0SrGuSkRfIEg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/lodash.uniq": {
       "version": "4.5.0",
+      "resolved": "https://registry.npmjs.org/lodash.uniq/-/lodash.uniq-4.5.0.tgz",
+      "integrity": "sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/lodash.upperfirst": {
       "version": "4.3.1",
+      "resolved": "https://registry.npmjs.org/lodash.upperfirst/-/lodash.upperfirst-4.3.1.tgz",
+      "integrity": "sha512-sReKOYJIJf74dhJONhU4e0/shzi1trVbSWDOhKYE5XV2O+H7Sb2Dihwuc7xWxVl+DgFPyTqIN3zMfT9cq5iWDg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/longest-streak": {
       "version": "3.1.0",
+      "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-3.1.0.tgz",
+      "integrity": "sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -9686,6 +9435,8 @@
     },
     "node_modules/make-dir": {
       "version": "3.1.0",
+      "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz",
+      "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9700,6 +9451,8 @@
     },
     "node_modules/make-dir/node_modules/semver": {
       "version": "6.3.1",
+      "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
+      "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
       "dev": true,
       "license": "ISC",
       "bin": {
@@ -9729,6 +9482,8 @@
     },
     "node_modules/map-obj": {
       "version": "4.3.0",
+      "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.3.0.tgz",
+      "integrity": "sha512-hdN1wVrZbb29eBGiGjJbeP8JbKjq1urkHJ/LIP/NY48MZ1QVXUsQBV1G1zvYFHn1XE06cwjBsOI2K3Ulnj1YXQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -9740,6 +9495,8 @@
     },
     "node_modules/markdown-table": {
       "version": "3.0.4",
+      "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-3.0.4.tgz",
+      "integrity": "sha512-wiYz4+JrLyb/DqW2hkFJxP7Vd7JuTDm77fvbM8VfEQdmSMqcImWeeRbHwZjBjIFki/VaMK2BhFi7oUUZeM5bqw==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -9749,14 +9506,65 @@
     },
     "node_modules/math-intrinsics": {
       "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz",
+      "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==",
       "dev": true,
       "license": "MIT",
       "engines": {
         "node": ">= 0.4"
       }
     },
+    "node_modules/mdast-util-definitions": {
+      "version": "5.1.2",
+      "resolved": "https://registry.npmjs.org/mdast-util-definitions/-/mdast-util-definitions-5.1.2.tgz",
+      "integrity": "sha512-8SVPMuHqlPME/z3gqVwWY4zVXn8lqKv/pAhC57FuJ40ImXyBpmO5ukh98zB2v7Blql2FiHjHv9LVztSIqjY+MA==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/mdast": "^3.0.0",
+        "@types/unist": "^2.0.0",
+        "unist-util-visit": "^4.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/mdast-util-definitions/node_modules/unist-util-visit": {
+      "version": "4.1.2",
+      "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-4.1.2.tgz",
+      "integrity": "sha512-MSd8OUGISqHdVvfY9TPhyK2VdUrPgxkUtWSuMHF6XAAFuL4LokseigBnZtPnJMu+FbynTkFNnFlyjxpVKujMRg==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^2.0.0",
+        "unist-util-is": "^5.0.0",
+        "unist-util-visit-parents": "^5.1.1"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/mdast-util-definitions/node_modules/unist-util-visit-parents": {
+      "version": "5.1.3",
+      "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-5.1.3.tgz",
+      "integrity": "sha512-x6+y8g7wWMyQhL1iZfhIPhDAs7Xwbn9nRosDXl7qoPTSCy0yNxnKc+hWokFifWQIDGi154rdUqKvbCa4+1kLhg==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^2.0.0",
+        "unist-util-is": "^5.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
     "node_modules/mdast-util-find-and-replace": {
       "version": "2.2.2",
+      "resolved": "https://registry.npmjs.org/mdast-util-find-and-replace/-/mdast-util-find-and-replace-2.2.2.tgz",
+      "integrity": "sha512-MTtdFRz/eMDHXzeK6W3dO7mXUlF82Gom4y0oOgvHhh/HXZAGvIQDUvQ0SuUx+j2tv44b8xTHOm8K/9OoRFnXKw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9772,6 +9580,8 @@
     },
     "node_modules/mdast-util-find-and-replace/node_modules/escape-string-regexp": {
       "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz",
+      "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -9783,6 +9593,8 @@
     },
     "node_modules/mdast-util-find-and-replace/node_modules/unist-util-visit-parents": {
       "version": "5.1.3",
+      "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-5.1.3.tgz",
+      "integrity": "sha512-x6+y8g7wWMyQhL1iZfhIPhDAs7Xwbn9nRosDXl7qoPTSCy0yNxnKc+hWokFifWQIDGi154rdUqKvbCa4+1kLhg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9796,6 +9608,8 @@
     },
     "node_modules/mdast-util-from-markdown": {
       "version": "1.3.1",
+      "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-1.3.1.tgz",
+      "integrity": "sha512-4xTO/M8c82qBcnQc1tgpNtubGUW/Y1tBQ1B0i5CtSoelOLKFYlElIr3bvgREYYO5iRqbMY1YuqZng0GVOI8Qww==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9819,6 +9633,8 @@
     },
     "node_modules/mdast-util-gfm": {
       "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/mdast-util-gfm/-/mdast-util-gfm-2.0.2.tgz",
+      "integrity": "sha512-qvZ608nBppZ4icQlhQQIAdc6S3Ffj9RGmzwUKUWuEICFnd1LVkN3EktF7ZHAgfcEdvZB5owU9tQgt99e2TlLjg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9837,6 +9653,8 @@
     },
     "node_modules/mdast-util-gfm-autolink-literal": {
       "version": "1.0.3",
+      "resolved": "https://registry.npmjs.org/mdast-util-gfm-autolink-literal/-/mdast-util-gfm-autolink-literal-1.0.3.tgz",
+      "integrity": "sha512-My8KJ57FYEy2W2LyNom4n3E7hKTuQk/0SES0u16tjA9Z3oFkF4RrC/hPAPgjlSpezsOvI8ObcXcElo92wn5IGA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9852,6 +9670,8 @@
     },
     "node_modules/mdast-util-gfm-footnote": {
       "version": "1.0.2",
+      "resolved": "https://registry.npmjs.org/mdast-util-gfm-footnote/-/mdast-util-gfm-footnote-1.0.2.tgz",
+      "integrity": "sha512-56D19KOGbE00uKVj3sgIykpwKL179QsVFwx/DCW0u/0+URsryacI4MAdNJl0dh+u2PSsD9FtxPFbHCzJ78qJFQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9866,6 +9686,8 @@
     },
     "node_modules/mdast-util-gfm-strikethrough": {
       "version": "1.0.3",
+      "resolved": "https://registry.npmjs.org/mdast-util-gfm-strikethrough/-/mdast-util-gfm-strikethrough-1.0.3.tgz",
+      "integrity": "sha512-DAPhYzTYrRcXdMjUtUjKvW9z/FNAMTdU0ORyMcbmkwYNbKocDpdk+PX1L1dQgOID/+vVs1uBQ7ElrBQfZ0cuiQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9879,6 +9701,8 @@
     },
     "node_modules/mdast-util-gfm-table": {
       "version": "1.0.7",
+      "resolved": "https://registry.npmjs.org/mdast-util-gfm-table/-/mdast-util-gfm-table-1.0.7.tgz",
+      "integrity": "sha512-jjcpmNnQvrmN5Vx7y7lEc2iIOEytYv7rTvu+MeyAsSHTASGCCRA79Igg2uKssgOs1i1po8s3plW0sTu1wkkLGg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9894,6 +9718,8 @@
     },
     "node_modules/mdast-util-gfm-task-list-item": {
       "version": "1.0.2",
+      "resolved": "https://registry.npmjs.org/mdast-util-gfm-task-list-item/-/mdast-util-gfm-task-list-item-1.0.2.tgz",
+      "integrity": "sha512-PFTA1gzfp1B1UaiJVyhJZA1rm0+Tzn690frc/L8vNX1Jop4STZgOE6bxUhnzdVSB+vm2GU1tIsuQcA9bxTQpMQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9907,10 +9733,64 @@
     },
     "node_modules/mdast-util-phrasing": {
       "version": "3.0.1",
+      "resolved": "https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-3.0.1.tgz",
+      "integrity": "sha512-WmI1gTXUBJo4/ZmSk79Wcb2HcjPJBzM1nlI/OUWA8yk2X9ik3ffNbBGsU+09BFmXaL1IBb9fiuvq6/KMiNycSg==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/mdast": "^3.0.0",
+        "unist-util-is": "^5.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/mdast-util-to-hast": {
+      "version": "12.3.0",
+      "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-12.3.0.tgz",
+      "integrity": "sha512-pits93r8PhnIoU4Vy9bjW39M2jJ6/tdHyja9rrot9uujkN7UTU9SDnE6WNJz/IGyQk3XHX6yNNtrBH6cQzm8Hw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
+        "@types/hast": "^2.0.0",
         "@types/mdast": "^3.0.0",
+        "mdast-util-definitions": "^5.0.0",
+        "micromark-util-sanitize-uri": "^1.1.0",
+        "trim-lines": "^3.0.0",
+        "unist-util-generated": "^2.0.0",
+        "unist-util-position": "^4.0.0",
+        "unist-util-visit": "^4.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/mdast-util-to-hast/node_modules/unist-util-visit": {
+      "version": "4.1.2",
+      "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-4.1.2.tgz",
+      "integrity": "sha512-MSd8OUGISqHdVvfY9TPhyK2VdUrPgxkUtWSuMHF6XAAFuL4LokseigBnZtPnJMu+FbynTkFNnFlyjxpVKujMRg==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^2.0.0",
+        "unist-util-is": "^5.0.0",
+        "unist-util-visit-parents": "^5.1.1"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/mdast-util-to-hast/node_modules/unist-util-visit-parents": {
+      "version": "5.1.3",
+      "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-5.1.3.tgz",
+      "integrity": "sha512-x6+y8g7wWMyQhL1iZfhIPhDAs7Xwbn9nRosDXl7qoPTSCy0yNxnKc+hWokFifWQIDGi154rdUqKvbCa4+1kLhg==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^2.0.0",
         "unist-util-is": "^5.0.0"
       },
       "funding": {
@@ -9920,6 +9800,8 @@
     },
     "node_modules/mdast-util-to-markdown": {
       "version": "1.5.0",
+      "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-1.5.0.tgz",
+      "integrity": "sha512-bbv7TPv/WC49thZPg3jXuqzuvI45IL2EVAr/KxF0BSdHsU0ceFHOmwQn6evxAh1GaoK/6GQ1wp4R4oW2+LFL/A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9939,6 +9821,8 @@
     },
     "node_modules/mdast-util-to-markdown/node_modules/unist-util-visit": {
       "version": "4.1.2",
+      "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-4.1.2.tgz",
+      "integrity": "sha512-MSd8OUGISqHdVvfY9TPhyK2VdUrPgxkUtWSuMHF6XAAFuL4LokseigBnZtPnJMu+FbynTkFNnFlyjxpVKujMRg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9953,6 +9837,8 @@
     },
     "node_modules/mdast-util-to-markdown/node_modules/unist-util-visit-parents": {
       "version": "5.1.3",
+      "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-5.1.3.tgz",
+      "integrity": "sha512-x6+y8g7wWMyQhL1iZfhIPhDAs7Xwbn9nRosDXl7qoPTSCy0yNxnKc+hWokFifWQIDGi154rdUqKvbCa4+1kLhg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9966,6 +9852,8 @@
     },
     "node_modules/mdast-util-to-string": {
       "version": "3.2.0",
+      "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-3.2.0.tgz",
+      "integrity": "sha512-V4Zn/ncyN1QNSqSBxTrMOLpjr+IKdHl2v3KVLoWmDPscP4r9GcCi71gjgvUV1SFSKh92AjAG4peFuBl2/YgCJg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9978,6 +9866,8 @@
     },
     "node_modules/meow": {
       "version": "12.1.1",
+      "resolved": "https://registry.npmjs.org/meow/-/meow-12.1.1.tgz",
+      "integrity": "sha512-BhXM0Au22RwUneMPwSCnyhTOizdWoIEPU9sp0Aqa1PnDMR5Wv2FGXYDjuzJEIX+Eo2Rb8xuYe5jrnm5QowQFkw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -9989,6 +9879,8 @@
     },
     "node_modules/micromark": {
       "version": "3.2.0",
+      "resolved": "https://registry.npmjs.org/micromark/-/micromark-3.2.0.tgz",
+      "integrity": "sha512-uD66tJj54JLYq0De10AhWycZWGQNUvDI55xPgk2sQM5kn1JYlhbCMTtEeT27+vAhW2FBQxLlOmS3pmA7/2z4aA==",
       "dev": true,
       "funding": [
         {
@@ -10023,6 +9915,8 @@
     },
     "node_modules/micromark-core-commonmark": {
       "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-1.1.0.tgz",
+      "integrity": "sha512-BgHO1aRbolh2hcrzL2d1La37V0Aoz73ymF8rAcKnohLy93titmv62E0gP8Hrx9PKcKrqCZ1BbLGbP3bEhoXYlw==",
       "dev": true,
       "funding": [
         {
@@ -10056,6 +9950,8 @@
     },
     "node_modules/micromark-extension-gfm": {
       "version": "2.0.3",
+      "resolved": "https://registry.npmjs.org/micromark-extension-gfm/-/micromark-extension-gfm-2.0.3.tgz",
+      "integrity": "sha512-vb9OoHqrhCmbRidQv/2+Bc6pkP0FrtlhurxZofvOEy5o8RtuuvTq+RQ1Vw5ZDNrVraQZu3HixESqbG+0iKk/MQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10075,6 +9971,8 @@
     },
     "node_modules/micromark-extension-gfm-autolink-literal": {
       "version": "1.0.5",
+      "resolved": "https://registry.npmjs.org/micromark-extension-gfm-autolink-literal/-/micromark-extension-gfm-autolink-literal-1.0.5.tgz",
+      "integrity": "sha512-z3wJSLrDf8kRDOh2qBtoTRD53vJ+CWIyo7uyZuxf/JAbNJjiHsOpG1y5wxk8drtv3ETAHutCu6N3thkOOgueWg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10090,6 +9988,8 @@
     },
     "node_modules/micromark-extension-gfm-footnote": {
       "version": "1.1.2",
+      "resolved": "https://registry.npmjs.org/micromark-extension-gfm-footnote/-/micromark-extension-gfm-footnote-1.1.2.tgz",
+      "integrity": "sha512-Yxn7z7SxgyGWRNa4wzf8AhYYWNrwl5q1Z8ii+CSTTIqVkmGZF1CElX2JI8g5yGoM3GAman9/PVCUFUSJ0kB/8Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10109,6 +10009,8 @@
     },
     "node_modules/micromark-extension-gfm-strikethrough": {
       "version": "1.0.7",
+      "resolved": "https://registry.npmjs.org/micromark-extension-gfm-strikethrough/-/micromark-extension-gfm-strikethrough-1.0.7.tgz",
+      "integrity": "sha512-sX0FawVE1o3abGk3vRjOH50L5TTLr3b5XMqnP9YDRb34M0v5OoZhG+OHFz1OffZ9dlwgpTBKaT4XW/AsUVnSDw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10126,6 +10028,8 @@
     },
     "node_modules/micromark-extension-gfm-table": {
       "version": "1.0.7",
+      "resolved": "https://registry.npmjs.org/micromark-extension-gfm-table/-/micromark-extension-gfm-table-1.0.7.tgz",
+      "integrity": "sha512-3ZORTHtcSnMQEKtAOsBQ9/oHp9096pI/UvdPtN7ehKvrmZZ2+bbWhi0ln+I9drmwXMt5boocn6OlwQzNXeVeqw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10142,6 +10046,8 @@
     },
     "node_modules/micromark-extension-gfm-tagfilter": {
       "version": "1.0.2",
+      "resolved": "https://registry.npmjs.org/micromark-extension-gfm-tagfilter/-/micromark-extension-gfm-tagfilter-1.0.2.tgz",
+      "integrity": "sha512-5XWB9GbAUSHTn8VPU8/1DBXMuKYT5uOgEjJb8gN3mW0PNW5OPHpSdojoqf+iq1xo7vWzw/P8bAHY0n6ijpXF7g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10154,6 +10060,8 @@
     },
     "node_modules/micromark-extension-gfm-task-list-item": {
       "version": "1.0.5",
+      "resolved": "https://registry.npmjs.org/micromark-extension-gfm-task-list-item/-/micromark-extension-gfm-task-list-item-1.0.5.tgz",
+      "integrity": "sha512-RMFXl2uQ0pNQy6Lun2YBYT9g9INXtWJULgbt01D/x8/6yJ2qpKyzdZD3pi6UIkzF++Da49xAelVKUeUMqd5eIQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10170,6 +10078,8 @@
     },
     "node_modules/micromark-factory-destination": {
       "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-1.1.0.tgz",
+      "integrity": "sha512-XaNDROBgx9SgSChd69pjiGKbV+nfHGDPVYFs5dOoDd7ZnMAE+Cuu91BCpsY8RT2NP9vo/B8pds2VQNCLiu0zhg==",
       "dev": true,
       "funding": [
         {
@@ -10190,6 +10100,8 @@
     },
     "node_modules/micromark-factory-label": {
       "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/micromark-factory-label/-/micromark-factory-label-1.1.0.tgz",
+      "integrity": "sha512-OLtyez4vZo/1NjxGhcpDSbHQ+m0IIGnT8BoPamh+7jVlzLJBH98zzuCoUeMxvM6WsNeh8wx8cKvqLiPHEACn0w==",
       "dev": true,
       "funding": [
         {
@@ -10211,6 +10123,8 @@
     },
     "node_modules/micromark-factory-space": {
       "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-1.1.0.tgz",
+      "integrity": "sha512-cRzEj7c0OL4Mw2v6nwzttyOZe8XY/Z8G0rzmWQZTBi/jjwyw/U4uqKtUORXQrR5bAZZnbTI/feRV/R7hc4jQYQ==",
       "dev": true,
       "funding": [
         {
@@ -10230,6 +10144,8 @@
     },
     "node_modules/micromark-factory-title": {
       "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/micromark-factory-title/-/micromark-factory-title-1.1.0.tgz",
+      "integrity": "sha512-J7n9R3vMmgjDOCY8NPw55jiyaQnH5kBdV2/UXCtZIpnHH3P6nHUKaH7XXEYuWwx/xUJcawa8plLBEjMPU24HzQ==",
       "dev": true,
       "funding": [
         {
@@ -10251,6 +10167,8 @@
     },
     "node_modules/micromark-factory-whitespace": {
       "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/micromark-factory-whitespace/-/micromark-factory-whitespace-1.1.0.tgz",
+      "integrity": "sha512-v2WlmiymVSp5oMg+1Q0N1Lxmt6pMhIHD457whWM7/GUlEks1hI9xj5w3zbc4uuMKXGisksZk8DzP2UyGbGqNsQ==",
       "dev": true,
       "funding": [
         {
@@ -10272,6 +10190,8 @@
     },
     "node_modules/micromark-util-character": {
       "version": "1.2.0",
+      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-1.2.0.tgz",
+      "integrity": "sha512-lXraTwcX3yH/vMDaFWCQJP1uIszLVebzUa3ZHdrgxr7KEU/9mL4mVgCpGbyhvNLNlauROiNUq7WN5u7ndbY6xg==",
       "dev": true,
       "funding": [
         {
@@ -10291,6 +10211,8 @@
     },
     "node_modules/micromark-util-chunked": {
       "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/micromark-util-chunked/-/micromark-util-chunked-1.1.0.tgz",
+      "integrity": "sha512-Ye01HXpkZPNcV6FiyoW2fGZDUw4Yc7vT0E9Sad83+bEDiCJ1uXu0S3mr8WLpsz3HaG3x2q0HM6CTuPdcZcluFQ==",
       "dev": true,
       "funding": [
         {
@@ -10309,6 +10231,8 @@
     },
     "node_modules/micromark-util-classify-character": {
       "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/micromark-util-classify-character/-/micromark-util-classify-character-1.1.0.tgz",
+      "integrity": "sha512-SL0wLxtKSnklKSUplok1WQFoGhUdWYKggKUiqhX+Swala+BtptGCu5iPRc+xvzJ4PXE/hwM3FNXsfEVgoZsWbw==",
       "dev": true,
       "funding": [
         {
@@ -10329,6 +10253,8 @@
     },
     "node_modules/micromark-util-combine-extensions": {
       "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/micromark-util-combine-extensions/-/micromark-util-combine-extensions-1.1.0.tgz",
+      "integrity": "sha512-Q20sp4mfNf9yEqDL50WwuWZHUrCO4fEyeDCnMGmG5Pr0Cz15Uo7KBs6jq+dq0EgX4DPwwrh9m0X+zPV1ypFvUA==",
       "dev": true,
       "funding": [
         {
@@ -10348,6 +10274,8 @@
     },
     "node_modules/micromark-util-decode-numeric-character-reference": {
       "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-1.1.0.tgz",
+      "integrity": "sha512-m9V0ExGv0jB1OT21mrWcuf4QhP46pH1KkfWy9ZEezqHKAxkj4mPCy3nIH1rkbdMlChLHX531eOrymlwyZIf2iw==",
       "dev": true,
       "funding": [
         {
@@ -10366,6 +10294,8 @@
     },
     "node_modules/micromark-util-decode-string": {
       "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/micromark-util-decode-string/-/micromark-util-decode-string-1.1.0.tgz",
+      "integrity": "sha512-YphLGCK8gM1tG1bd54azwyrQRjCFcmgj2S2GoJDNnh4vYtnL38JS8M4gpxzOPNyHdNEpheyWXCTnnTDY3N+NVQ==",
       "dev": true,
       "funding": [
         {
@@ -10387,6 +10317,8 @@
     },
     "node_modules/micromark-util-encode": {
       "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-1.1.0.tgz",
+      "integrity": "sha512-EuEzTWSTAj9PA5GOAs992GzNh2dGQO52UvAbtSOMvXTxv3Criqb6IOzJUBCmEqrrXSblJIJBbFFv6zPxpreiJw==",
       "dev": true,
       "funding": [
         {
@@ -10402,6 +10334,8 @@
     },
     "node_modules/micromark-util-html-tag-name": {
       "version": "1.2.0",
+      "resolved": "https://registry.npmjs.org/micromark-util-html-tag-name/-/micromark-util-html-tag-name-1.2.0.tgz",
+      "integrity": "sha512-VTQzcuQgFUD7yYztuQFKXT49KghjtETQ+Wv/zUjGSGBioZnkA4P1XXZPT1FHeJA6RwRXSF47yvJ1tsJdoxwO+Q==",
       "dev": true,
       "funding": [
         {
@@ -10417,6 +10351,8 @@
     },
     "node_modules/micromark-util-normalize-identifier": {
       "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-1.1.0.tgz",
+      "integrity": "sha512-N+w5vhqrBihhjdpM8+5Xsxy71QWqGn7HYNUvch71iV2PM7+E3uWGox1Qp90loa1ephtCxG2ftRV/Conitc6P2Q==",
       "dev": true,
       "funding": [
         {
@@ -10435,6 +10371,8 @@
     },
     "node_modules/micromark-util-resolve-all": {
       "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/micromark-util-resolve-all/-/micromark-util-resolve-all-1.1.0.tgz",
+      "integrity": "sha512-b/G6BTMSg+bX+xVCshPTPyAu2tmA0E4X98NSR7eIbeC6ycCqCeE7wjfDIgzEbkzdEVJXRtOG4FbEm/uGbCRouA==",
       "dev": true,
       "funding": [
         {
@@ -10453,6 +10391,8 @@
     },
     "node_modules/micromark-util-sanitize-uri": {
       "version": "1.2.0",
+      "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-1.2.0.tgz",
+      "integrity": "sha512-QO4GXv0XZfWey4pYFndLUKEAktKkG5kZTdUNaTAkzbuJxn2tNBOr+QtxR2XpWaMhbImT2dPzyLrPXLlPhph34A==",
       "dev": true,
       "funding": [
         {
@@ -10473,6 +10413,8 @@
     },
     "node_modules/micromark-util-subtokenize": {
       "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/micromark-util-subtokenize/-/micromark-util-subtokenize-1.1.0.tgz",
+      "integrity": "sha512-kUQHyzRoxvZO2PuLzMt2P/dwVsTiivCK8icYTeR+3WgbuPqfHgPPy7nFKbeqRivBvn/3N3GBiNC+JRTMSxEC7A==",
       "dev": true,
       "funding": [
         {
@@ -10494,6 +10436,8 @@
     },
     "node_modules/micromark-util-symbol": {
       "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-1.1.0.tgz",
+      "integrity": "sha512-uEjpEYY6KMs1g7QfJ2eX1SQEV+ZT4rUD3UcF6l57acZvLNK7PBZL+ty82Z1qhK1/yXIY4bdx04FKMgR0g4IAag==",
       "dev": true,
       "funding": [
         {
@@ -10509,6 +10453,8 @@
     },
     "node_modules/micromark-util-types": {
       "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-1.1.0.tgz",
+      "integrity": "sha512-ukRBgie8TIAcacscVHSiddHjO4k/q3pnedmzMQ4iwDcK0FtFCohKOlFbaOL/mPgfnPsL3C1ZyxJa4sbWrBl3jg==",
       "dev": true,
       "funding": [
         {
@@ -10524,6 +10470,8 @@
     },
     "node_modules/mime-db": {
       "version": "1.52.0",
+      "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
+      "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -10532,6 +10480,8 @@
     },
     "node_modules/mime-types": {
       "version": "2.1.35",
+      "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
+      "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10543,6 +10493,8 @@
     },
     "node_modules/min-indent": {
       "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz",
+      "integrity": "sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -10551,6 +10503,8 @@
     },
     "node_modules/minify-registry-metadata": {
       "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/minify-registry-metadata/-/minify-registry-metadata-4.0.0.tgz",
+      "integrity": "sha512-dWVW3TmMejEOKNwQ09iPCyVf6+kgtG9E3806YZYY4URy5o1dSb1cAn8aUe5zOgvOyrVKLfIHt9fSsXGyhwVsgA==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -10559,8 +10513,6 @@
     },
     "node_modules/minimatch": {
       "version": "10.0.3",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.0.3.tgz",
-      "integrity": "sha512-IPZ167aShDZZUMdRk66cyQAW3qr0WzbHkPdMYa8bzZhlHhO3jALbKdxcaak7W9FfT2rZNpQuUu4Od7ILEpXSaw==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -10575,6 +10527,8 @@
     },
     "node_modules/minimist": {
       "version": "1.2.8",
+      "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz",
+      "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -10583,6 +10537,8 @@
     },
     "node_modules/minimist-options": {
       "version": "4.1.0",
+      "resolved": "https://registry.npmjs.org/minimist-options/-/minimist-options-4.1.0.tgz",
+      "integrity": "sha512-Q4r8ghd80yhO/0j1O3B2BjweX3fiHg9cdOwjJd2J76Q135c+NDxGCqdYKQ1SKBuFfgWbAUzBfvYjPUEeNgqN1A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10596,6 +10552,8 @@
     },
     "node_modules/minimist-options/node_modules/is-plain-obj": {
       "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz",
+      "integrity": "sha512-yvkRyxmFKEOQ4pNXCmJG5AEQNlXJS5LaONXo5/cLdTZdWvsZ1ioJEonLGAosKlMWE8lwUy/bJzMjcw8az73+Fg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -10705,8 +10663,6 @@
     },
     "node_modules/minizlib": {
       "version": "3.0.2",
-      "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.0.2.tgz",
-      "integrity": "sha512-oG62iEk+CYt5Xj2YqI5Xi9xWUeZhDI8jjQmC5oThVH5JGCTgIjr7ciJDzC7MBzYd//WvR1OTmP5Q38Q8ShQtVA==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -10718,8 +10674,6 @@
     },
     "node_modules/mkdirp": {
       "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz",
-      "integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==",
       "inBundle": true,
       "license": "MIT",
       "bin": {
@@ -10734,6 +10688,8 @@
     },
     "node_modules/modify-values": {
       "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/modify-values/-/modify-values-1.0.1.tgz",
+      "integrity": "sha512-xV2bxeN6F7oYjZWTe/YPAy6MN2M+sL4u/Rlm2AHCIVGfo2p1yGmBHQ6vHehl4bRTZBdHu3TSkWdYgkwpYzAGSw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -10742,6 +10698,8 @@
     },
     "node_modules/months": {
       "version": "2.1.0",
+      "resolved": "https://registry.npmjs.org/months/-/months-2.1.0.tgz",
+      "integrity": "sha512-2M9gdDB/uVt304/hJ3k2UIquJhOV5dRjp9BovHmZSINaRp7pdJuHXxOcuSjmJaKNomFyYyu0y3LBigdWiAUEmQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -10750,11 +10708,15 @@
     },
     "node_modules/moo": {
       "version": "0.5.2",
+      "resolved": "https://registry.npmjs.org/moo/-/moo-0.5.2.tgz",
+      "integrity": "sha512-iSAJLHYKnX41mKcJKjqvnAN9sf0LMDTXDEvFv+ffuRR9a1MIuXLjMNL6EsnDHSkKLTWNqQQ5uo61P4EbU4NU+Q==",
       "dev": true,
       "license": "BSD-3-Clause"
     },
     "node_modules/mri": {
       "version": "1.2.0",
+      "resolved": "https://registry.npmjs.org/mri/-/mri-1.2.0.tgz",
+      "integrity": "sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -10776,11 +10738,16 @@
     },
     "node_modules/natural-compare": {
       "version": "1.4.0",
+      "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz",
+      "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==",
       "dev": true,
-      "license": "MIT"
+      "license": "MIT",
+      "peer": true
     },
     "node_modules/nearley": {
       "version": "2.20.1",
+      "resolved": "https://registry.npmjs.org/nearley/-/nearley-2.20.1.tgz",
+      "integrity": "sha512-+Mc8UaAebFzgV+KpI5n7DasuuQCHA89dmwm7JXw3TV43ukfNQ9DnBH3Mdb2g/I4Fdxc26pwimBWvjIw0UAILSQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10802,8 +10769,6 @@
     },
     "node_modules/negotiator": {
       "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz",
-      "integrity": "sha512-8Ofs/AUQh8MaEcrlq5xOX0CQ9ypTF5dl78mjlMNfOK08fzpgTHQRQPBxcPlEtIw0yRpws+Zo/3r+5WRby7u3Gg==",
       "inBundle": true,
       "license": "MIT",
       "engines": {
@@ -10812,11 +10777,15 @@
     },
     "node_modules/neo-async": {
       "version": "2.6.2",
+      "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz",
+      "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/nock": {
       "version": "13.5.6",
+      "resolved": "https://registry.npmjs.org/nock/-/nock-13.5.6.tgz",
+      "integrity": "sha512-o2zOYiCpzRqSzPj0Zt/dQ/DqZeYoaQ7TUonc/xUPjCGl9WeHpNbxgVvOquXYAaJzI0M9BXV3HTzG0p8IUAbBTQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10830,6 +10799,8 @@
     },
     "node_modules/node-fetch": {
       "version": "2.7.0",
+      "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz",
+      "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10849,8 +10820,6 @@
     },
     "node_modules/node-gyp": {
       "version": "11.4.2",
-      "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-11.4.2.tgz",
-      "integrity": "sha512-3gD+6zsrLQH7DyYOUIutaauuXrcyxeTPyQuZQCQoNPZMHMMS5m4y0xclNpvYzoK3VNzuyxT6eF4mkIL4WSZ1eQ==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -10874,8 +10843,6 @@
     },
     "node_modules/node-gyp/node_modules/@npmcli/agent": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/agent/-/agent-3.0.0.tgz",
-      "integrity": "sha512-S79NdEgDQd/NGCay6TCoVzXSj74skRZIKJcpJjC5lOq34SZzyI6MqtiiWoiVWoVrTcGjNeC4ipbh1VIHlpfF5Q==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -10921,8 +10888,6 @@
     },
     "node_modules/node-gyp/node_modules/glob": {
       "version": "10.4.5",
-      "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz",
-      "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -10942,8 +10907,6 @@
     },
     "node_modules/node-gyp/node_modules/jackspeak": {
       "version": "3.4.3",
-      "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz",
-      "integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==",
       "inBundle": true,
       "license": "BlueOak-1.0.0",
       "dependencies": {
@@ -10983,9 +10946,7 @@
       }
     },
     "node_modules/node-gyp/node_modules/minimatch": {
-      "version": "9.0.5",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz",
-      "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==",
+      "version": "9.0.5",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -11000,8 +10961,6 @@
     },
     "node_modules/node-gyp/node_modules/path-scurry": {
       "version": "1.11.1",
-      "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz",
-      "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==",
       "inBundle": true,
       "license": "BlueOak-1.0.0",
       "dependencies": {
@@ -11041,6 +11000,8 @@
     },
     "node_modules/node-html-parser": {
       "version": "6.1.13",
+      "resolved": "https://registry.npmjs.org/node-html-parser/-/node-html-parser-6.1.13.tgz",
+      "integrity": "sha512-qIsTMOY4C/dAa5Q5vsobRpOOvPfC4pB61UVW2uSwZNUp0QU/jCekTal1vMmbO0DgdHeLUJpv/ARmDqErVxA3Sg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11050,6 +11011,8 @@
     },
     "node_modules/node-preload": {
       "version": "0.2.1",
+      "resolved": "https://registry.npmjs.org/node-preload/-/node-preload-0.2.1.tgz",
+      "integrity": "sha512-RM5oyBy45cLEoHqCeh+MNuFAxO0vTFBLskvQbOKnEE7YTTSN4tbN8QWDIPQ6L+WvKsB/qLEGpYe2ZZ9d4W9OIQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11095,6 +11058,8 @@
     },
     "node_modules/normalize-path": {
       "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz",
+      "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -11122,8 +11087,6 @@
     },
     "node_modules/npm-install-checks": {
       "version": "7.1.2",
-      "resolved": "https://registry.npmjs.org/npm-install-checks/-/npm-install-checks-7.1.2.tgz",
-      "integrity": "sha512-z9HJBCYw9Zr8BqXcllKIs5nI+QggAImbBdHphOzVYrz2CB4iQ6FzWyKmlqDZua+51nAu7FcemlbTc9VgQN5XDQ==",
       "inBundle": true,
       "license": "BSD-2-Clause",
       "dependencies": {
@@ -11220,6 +11183,8 @@
     },
     "node_modules/nth-check": {
       "version": "2.1.1",
+      "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz",
+      "integrity": "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==",
       "dev": true,
       "license": "BSD-2-Clause",
       "dependencies": {
@@ -11238,6 +11203,8 @@
     },
     "node_modules/nyc": {
       "version": "15.1.0",
+      "resolved": "https://registry.npmjs.org/nyc/-/nyc-15.1.0.tgz",
+      "integrity": "sha512-jMW04n9SxKdKi1ZMGhvUTHBN0EICCRkHemEoE5jm6mTYcqcdas0ATzgUgejlQUHMvpnOZqGB5Xxsv9KxJW1j8A==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -11278,6 +11245,8 @@
     },
     "node_modules/nyc/node_modules/ansi-styles": {
       "version": "4.3.0",
+      "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
+      "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11292,6 +11261,8 @@
     },
     "node_modules/nyc/node_modules/brace-expansion": {
       "version": "1.1.12",
+      "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
+      "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11301,6 +11272,8 @@
     },
     "node_modules/nyc/node_modules/cliui": {
       "version": "6.0.0",
+      "resolved": "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz",
+      "integrity": "sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -11311,6 +11284,8 @@
     },
     "node_modules/nyc/node_modules/find-up": {
       "version": "4.1.0",
+      "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz",
+      "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11323,6 +11298,8 @@
     },
     "node_modules/nyc/node_modules/foreground-child": {
       "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-2.0.0.tgz",
+      "integrity": "sha512-dCIq9FpEcyQyXKCkyzmlPTFNgrCzPudOe+mhvJU5zAtlBnGVy2yKxtfsxK2tQBThwq225jcvBjpw1Gr40uzZCA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -11335,6 +11312,9 @@
     },
     "node_modules/nyc/node_modules/glob": {
       "version": "7.2.3",
+      "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
+      "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
+      "deprecated": "Glob versions prior to v9 are no longer supported",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -11354,6 +11334,8 @@
     },
     "node_modules/nyc/node_modules/locate-path": {
       "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz",
+      "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11365,6 +11347,8 @@
     },
     "node_modules/nyc/node_modules/minimatch": {
       "version": "3.1.2",
+      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
+      "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -11376,6 +11360,8 @@
     },
     "node_modules/nyc/node_modules/p-limit": {
       "version": "2.3.0",
+      "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz",
+      "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11390,6 +11376,8 @@
     },
     "node_modules/nyc/node_modules/p-locate": {
       "version": "4.1.0",
+      "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz",
+      "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11401,6 +11389,8 @@
     },
     "node_modules/nyc/node_modules/p-map": {
       "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/p-map/-/p-map-3.0.0.tgz",
+      "integrity": "sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11412,6 +11402,8 @@
     },
     "node_modules/nyc/node_modules/path-exists": {
       "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
+      "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -11420,6 +11412,9 @@
     },
     "node_modules/nyc/node_modules/rimraf": {
       "version": "3.0.2",
+      "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz",
+      "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==",
+      "deprecated": "Rimraf versions prior to v4 are no longer supported",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -11434,11 +11429,15 @@
     },
     "node_modules/nyc/node_modules/signal-exit": {
       "version": "3.0.7",
+      "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz",
+      "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/nyc/node_modules/wrap-ansi": {
       "version": "6.2.0",
+      "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz",
+      "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11452,11 +11451,15 @@
     },
     "node_modules/nyc/node_modules/y18n": {
       "version": "4.0.3",
+      "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.3.tgz",
+      "integrity": "sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/nyc/node_modules/yargs": {
       "version": "15.4.1",
+      "resolved": "https://registry.npmjs.org/yargs/-/yargs-15.4.1.tgz",
+      "integrity": "sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11478,6 +11481,8 @@
     },
     "node_modules/nyc/node_modules/yargs-parser": {
       "version": "18.1.3",
+      "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-18.1.3.tgz",
+      "integrity": "sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -11490,8 +11495,11 @@
     },
     "node_modules/object-inspect": {
       "version": "1.13.4",
+      "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz",
+      "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "engines": {
         "node": ">= 0.4"
       },
@@ -11501,16 +11509,22 @@
     },
     "node_modules/object-keys": {
       "version": "1.1.1",
+      "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz",
+      "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "engines": {
         "node": ">= 0.4"
       }
     },
     "node_modules/object.assign": {
       "version": "4.1.7",
+      "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.7.tgz",
+      "integrity": "sha512-nK28WOo+QIjBkDduTINE4JkF/UJJKyf2EJxvJKfblDpyg0Q+pkOHNTL0Qwy6NP6FhE/EnzV73BxxqcJaXY9anw==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bind": "^1.0.8",
         "call-bound": "^1.0.3",
@@ -11528,8 +11542,11 @@
     },
     "node_modules/object.fromentries": {
       "version": "2.0.8",
+      "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.8.tgz",
+      "integrity": "sha512-k6E21FzySsSK5a21KRADBd/NGneRegFO5pLHfdQLpRDETUNJueLXs3WCzyQ3tFRDYgbq3KHGXfTbi2bs8WQ6rQ==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bind": "^1.0.7",
         "define-properties": "^1.2.1",
@@ -11545,8 +11562,11 @@
     },
     "node_modules/object.groupby": {
       "version": "1.0.3",
+      "resolved": "https://registry.npmjs.org/object.groupby/-/object.groupby-1.0.3.tgz",
+      "integrity": "sha512-+Lhy3TQTuzXI5hevh8sBGqbmurHbbIjAi0Z4S63nthVLmLxfbj4T54a4CfZrXIrt9iP4mVAPYMo/v99taj3wjQ==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bind": "^1.0.7",
         "define-properties": "^1.2.1",
@@ -11558,8 +11578,11 @@
     },
     "node_modules/object.values": {
       "version": "1.2.1",
+      "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.2.1.tgz",
+      "integrity": "sha512-gXah6aZrcUxjWg2zR2MwouP2eHlCBzdV4pygudehaKXSGW4v2AsRQUK+lwwXhii6KFZcunEnmSUoYp5CXibxtA==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bind": "^1.0.8",
         "call-bound": "^1.0.3",
@@ -11575,6 +11598,8 @@
     },
     "node_modules/once": {
       "version": "1.4.0",
+      "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
+      "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -11583,6 +11608,8 @@
     },
     "node_modules/opener": {
       "version": "1.5.2",
+      "resolved": "https://registry.npmjs.org/opener/-/opener-1.5.2.tgz",
+      "integrity": "sha512-ur5UIdyw5Y7yEj9wLzhqXiy6GZ3Mwx0yGI+5sMn2r0N0v3cKJvUmFH5yPP+WXh9e0xfyzyJX95D8l088DNFj7A==",
       "dev": true,
       "license": "(WTFPL OR MIT)",
       "bin": {
@@ -11591,8 +11618,11 @@
     },
     "node_modules/optionator": {
       "version": "0.9.4",
+      "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz",
+      "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "deep-is": "^0.1.3",
         "fast-levenshtein": "^2.0.6",
@@ -11607,8 +11637,11 @@
     },
     "node_modules/own-keys": {
       "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/own-keys/-/own-keys-1.0.1.tgz",
+      "integrity": "sha512-qFOyK5PjiWZd+QQIh+1jhdb9LpxTF0qs7Pm8o5QHYZ0M3vKqSqzsZaEB6oWlxZ+q2sJBMI/Ktgd2N5ZwQoRHfg==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "get-intrinsic": "^1.2.6",
         "object-keys": "^1.1.1",
@@ -11623,11 +11656,15 @@
     },
     "node_modules/own-or": {
       "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/own-or/-/own-or-1.0.0.tgz",
+      "integrity": "sha512-NfZr5+Tdf6MB8UI9GLvKRs4cXY8/yB0w3xtt84xFdWy8hkGjn+JFc60VhzS/hFRfbyxFcGYMTjnF4Me+RbbqrA==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/own-or-env": {
       "version": "1.0.2",
+      "resolved": "https://registry.npmjs.org/own-or-env/-/own-or-env-1.0.2.tgz",
+      "integrity": "sha512-NQ7v0fliWtK7Lkb+WdFqe6ky9XAzYmlkXthQrBbzlYbmFKoAYbDDcwmOm6q8kOuwSRXW8bdL5ORksploUJmWgw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -11636,6 +11673,8 @@
     },
     "node_modules/p-limit": {
       "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-4.0.0.tgz",
+      "integrity": "sha512-5b0R4txpzjPWVw/cXXUResoD4hb6U/x9BH08L7nw+GN1sezDzPdxeRvpc9c433fZhBan/wusjbCsqwqm4EIBIQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11650,6 +11689,8 @@
     },
     "node_modules/p-locate": {
       "version": "6.0.0",
+      "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-6.0.0.tgz",
+      "integrity": "sha512-wPrq66Llhl7/4AGC6I+cqxT07LhXvWL08LNXz1fENOw0Ap4sRZZ/gZpTTJ5jpurzzzfS2W/Ge9BY3LgLjCShcw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11675,6 +11716,8 @@
     },
     "node_modules/p-try": {
       "version": "2.2.0",
+      "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz",
+      "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -11683,6 +11726,8 @@
     },
     "node_modules/package-hash": {
       "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/package-hash/-/package-hash-4.0.0.tgz",
+      "integrity": "sha512-whdkPIooSu/bASggZ96BWVvZTRMOFxnyUG5PnTSGKoJE2gd5mbVNmR2Nj20QFzxYYgAXpoqC+AiXzl+UMRh7zQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -11764,6 +11809,8 @@
     },
     "node_modules/parent-module": {
       "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz",
+      "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11788,16 +11835,22 @@
     },
     "node_modules/parse-diff": {
       "version": "0.11.1",
+      "resolved": "https://registry.npmjs.org/parse-diff/-/parse-diff-0.11.1.tgz",
+      "integrity": "sha512-Oq4j8LAOPOcssanQkIjxosjATBIEJhCxMCxPhMu+Ci4wdNmAEdx0O+a7gzbR2PyKXgKPvRLIN5g224+dJAsKHA==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/parse-github-repo-url": {
       "version": "1.4.1",
+      "resolved": "https://registry.npmjs.org/parse-github-repo-url/-/parse-github-repo-url-1.4.1.tgz",
+      "integrity": "sha512-bSWyzBKqcSL4RrncTpGsEKoJ7H8a4L3++ifTAbTFeMHyq2wRV+42DGmQcHIrJIvdcacjIOxEuKH/w4tthF17gg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/parse-json": {
       "version": "5.2.0",
+      "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz",
+      "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11815,11 +11868,15 @@
     },
     "node_modules/parse-json/node_modules/json-parse-even-better-errors": {
       "version": "2.3.1",
+      "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz",
+      "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/parse5": {
       "version": "7.3.0",
+      "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.3.0.tgz",
+      "integrity": "sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11831,6 +11888,8 @@
     },
     "node_modules/parse5/node_modules/entities": {
       "version": "6.0.1",
+      "resolved": "https://registry.npmjs.org/entities/-/entities-6.0.1.tgz",
+      "integrity": "sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g==",
       "dev": true,
       "license": "BSD-2-Clause",
       "engines": {
@@ -11842,6 +11901,8 @@
     },
     "node_modules/path-exists": {
       "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-5.0.0.tgz",
+      "integrity": "sha512-RjhtfwJOxzcFmNOi6ltcbcu4Iu+FL3zEj83dk4kAS+fVpTxXLO1b38RvJgT/0QwvV/L3aY9TAnyv0EOqW4GoMQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -11850,6 +11911,8 @@
     },
     "node_modules/path-is-absolute": {
       "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
+      "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -11866,13 +11929,13 @@
     },
     "node_modules/path-parse": {
       "version": "1.0.7",
+      "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz",
+      "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/path-scurry": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-2.0.0.tgz",
-      "integrity": "sha512-ypGJsmGtdXUOeM5u93TyeIEfEhM6s+ljAhrk5vAvSx8uyY/02OvrZnA0YNGUrPXfpJMgI1ODd3nwz8Npx4O4cg==",
       "inBundle": true,
       "license": "BlueOak-1.0.0",
       "dependencies": {
@@ -11888,11 +11951,15 @@
     },
     "node_modules/picocolors": {
       "version": "1.1.1",
+      "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
+      "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/picomatch": {
       "version": "2.3.1",
+      "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
+      "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -11904,6 +11971,8 @@
     },
     "node_modules/pkg-dir": {
       "version": "4.2.0",
+      "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz",
+      "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11915,6 +11984,8 @@
     },
     "node_modules/pkg-dir/node_modules/find-up": {
       "version": "4.1.0",
+      "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz",
+      "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11927,6 +11998,8 @@
     },
     "node_modules/pkg-dir/node_modules/locate-path": {
       "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz",
+      "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11938,6 +12011,8 @@
     },
     "node_modules/pkg-dir/node_modules/p-limit": {
       "version": "2.3.0",
+      "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz",
+      "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11952,6 +12027,8 @@
     },
     "node_modules/pkg-dir/node_modules/p-locate": {
       "version": "4.1.0",
+      "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz",
+      "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11963,6 +12040,8 @@
     },
     "node_modules/pkg-dir/node_modules/path-exists": {
       "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
+      "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -11971,13 +12050,18 @@
     },
     "node_modules/platform": {
       "version": "1.3.6",
+      "resolved": "https://registry.npmjs.org/platform/-/platform-1.3.6.tgz",
+      "integrity": "sha512-fnWVljUchTro6RiCFvCXBbNhJc2NijN7oIQxbwsyL0buWJPG85v81ehlHI9fXrJsMNgTofEoWIQeClKpgxFLrg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/possible-typed-array-names": {
       "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.1.0.tgz",
+      "integrity": "sha512-/+5VFTchJDoVj3bhoqi6UeymcD00DAwb1nJwamzPvHEszJ4FpF6SNNbUbOS8yI56qHzdV8eK0qEfOSiodkTdxg==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "engines": {
         "node": ">= 0.4"
       }
@@ -11995,8 +12079,11 @@
     },
     "node_modules/prelude-ls": {
       "version": "1.2.1",
+      "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz",
+      "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "engines": {
         "node": ">= 0.8.0"
       }
@@ -12011,6 +12098,8 @@
     },
     "node_modules/process-on-spawn": {
       "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/process-on-spawn/-/process-on-spawn-1.1.0.tgz",
+      "integrity": "sha512-JOnOPQ/8TZgjs1JIH/m9ni7FfimjNa/PRx7y/Wb5qdItsnhO0jE4AT7fC0HjC28DUQWDr50dwSYZLdRMlqDq3Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12043,6 +12132,8 @@
     },
     "node_modules/promise-inflight": {
       "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/promise-inflight/-/promise-inflight-1.0.1.tgz",
+      "integrity": "sha512-6zWPyEOFaQBJYcGMHBKTKJ3u6TBsnMFOIZSa6ce1e/ZrrsOlnHRHbabMjLiBYKp+n44X9eUI6VUPaukCXHuG4g==",
       "dev": true,
       "license": "ISC"
     },
@@ -12071,6 +12162,8 @@
     },
     "node_modules/propagate": {
       "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/propagate/-/propagate-2.0.1.tgz",
+      "integrity": "sha512-vGrhOavPSTz4QVNuBNdcNXePNdNMaO1xj9yBeH1ScQPjk/rhg9sSlCXPhMkFuaNNW/syTvYqsnbIJxMBfRbbag==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -12079,6 +12172,8 @@
     },
     "node_modules/property-information": {
       "version": "6.5.0",
+      "resolved": "https://registry.npmjs.org/property-information/-/property-information-6.5.0.tgz",
+      "integrity": "sha512-PgTgs/BlvHxOu8QuEN7wi5A0OmXaBcHpmCSTehcs6Uuu9IkDIEo13Hy7n898RHfrQ49vKCoGeWZSaAK01nwVig==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -12088,6 +12183,8 @@
     },
     "node_modules/proxy": {
       "version": "2.2.0",
+      "resolved": "https://registry.npmjs.org/proxy/-/proxy-2.2.0.tgz",
+      "integrity": "sha512-nYclNIWj9UpXbVJ3W5EXIYiGR88AKZoGt90kyh3zoOBY5QW+7bbtPvMFgKGD4VJmpS3UXQXtlGXSg3lRNLOFLg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12104,6 +12201,8 @@
     },
     "node_modules/psl": {
       "version": "1.15.0",
+      "resolved": "https://registry.npmjs.org/psl/-/psl-1.15.0.tgz",
+      "integrity": "sha512-JZd3gMVBAVQkSs6HdNZo9Sdo0LNcQeMNP3CozBJb3JYC/QUYZTnKxP+f8oWRX4rHP5EurWxqAHTSwUCjlNKa1w==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12115,6 +12214,8 @@
     },
     "node_modules/punycode": {
       "version": "2.3.1",
+      "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz",
+      "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -12130,11 +12231,15 @@
     },
     "node_modules/querystringify": {
       "version": "2.2.0",
+      "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz",
+      "integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/queue-microtask": {
       "version": "1.2.3",
+      "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
+      "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==",
       "dev": true,
       "funding": [
         {
@@ -12150,10 +12255,13 @@
           "url": "https://feross.org/support"
         }
       ],
-      "license": "MIT"
+      "license": "MIT",
+      "peer": true
     },
     "node_modules/quick-lru": {
       "version": "4.0.1",
+      "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-4.0.1.tgz",
+      "integrity": "sha512-ARhCpm70fzdcvNQfPoy49IaanKkTlRWF2JMzqhcJbhSFRZv7nPTvZJdcY7301IPmvW+/p0RgIWnQDLJxifsQ7g==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -12162,11 +12270,15 @@
     },
     "node_modules/railroad-diagrams": {
       "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/railroad-diagrams/-/railroad-diagrams-1.0.0.tgz",
+      "integrity": "sha512-cz93DjNeLY0idrCNOH6PviZGRN9GJhsdm9hpn1YCS879fj4W+x5IFJhhkRZcwVgMmFF7R82UA/7Oh+R8lLZg6A==",
       "dev": true,
       "license": "CC0-1.0"
     },
     "node_modules/randexp": {
       "version": "0.4.6",
+      "resolved": "https://registry.npmjs.org/randexp/-/randexp-0.4.6.tgz",
+      "integrity": "sha512-80WNmd9DA0tmZrw9qQa62GPPWfuXJknrmVmLcxvq4uZBdYqb1wYoKTmnlGUchvVWe0XiLupYkBoXVOxz3C8DYQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12195,8 +12307,44 @@
         "node": "^18.17.0 || >=20.5.0"
       }
     },
+    "node_modules/read-package-json-fast": {
+      "version": "3.0.2",
+      "resolved": "https://registry.npmjs.org/read-package-json-fast/-/read-package-json-fast-3.0.2.tgz",
+      "integrity": "sha512-0J+Msgym3vrLOUB3hzQCuZHII0xkNGCtz/HJH9xZshwv9DbDwkw1KaE3gx/e2J5rpEY5rtOy6cyhKOPrkP7FZw==",
+      "dev": true,
+      "license": "ISC",
+      "dependencies": {
+        "json-parse-even-better-errors": "^3.0.0",
+        "npm-normalize-package-bin": "^3.0.0"
+      },
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
+    "node_modules/read-package-json-fast/node_modules/json-parse-even-better-errors": {
+      "version": "3.0.2",
+      "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-3.0.2.tgz",
+      "integrity": "sha512-fi0NG4bPjCHunUJffmLd0gxssIgkNmArMvis4iNah6Owg1MCJjWhEcDLmsK6iGkJq3tHwbDkTlce70/tmXN4cQ==",
+      "dev": true,
+      "license": "MIT",
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
+    "node_modules/read-package-json-fast/node_modules/npm-normalize-package-bin": {
+      "version": "3.0.1",
+      "resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-3.0.1.tgz",
+      "integrity": "sha512-dMxCf+zZ+3zeQZXKxmyuCKlIDPGuv8EF940xbkC4kQVDTtqoh6rJFO+JTKSA6/Rwi0getWmtuy4Itup0AMcaDQ==",
+      "dev": true,
+      "license": "ISC",
+      "engines": {
+        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      }
+    },
     "node_modules/read-pkg": {
       "version": "5.2.0",
+      "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-5.2.0.tgz",
+      "integrity": "sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12211,6 +12359,8 @@
     },
     "node_modules/read-pkg-up": {
       "version": "7.0.1",
+      "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-7.0.1.tgz",
+      "integrity": "sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12227,6 +12377,8 @@
     },
     "node_modules/read-pkg-up/node_modules/find-up": {
       "version": "4.1.0",
+      "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz",
+      "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12239,6 +12391,8 @@
     },
     "node_modules/read-pkg-up/node_modules/locate-path": {
       "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz",
+      "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12250,6 +12404,8 @@
     },
     "node_modules/read-pkg-up/node_modules/p-limit": {
       "version": "2.3.0",
+      "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz",
+      "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12264,6 +12420,8 @@
     },
     "node_modules/read-pkg-up/node_modules/p-locate": {
       "version": "4.1.0",
+      "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz",
+      "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12275,6 +12433,8 @@
     },
     "node_modules/read-pkg-up/node_modules/path-exists": {
       "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
+      "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -12283,6 +12443,8 @@
     },
     "node_modules/read-pkg-up/node_modules/type-fest": {
       "version": "0.8.1",
+      "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz",
+      "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==",
       "dev": true,
       "license": "(MIT OR CC0-1.0)",
       "engines": {
@@ -12291,11 +12453,15 @@
     },
     "node_modules/read-pkg/node_modules/hosted-git-info": {
       "version": "2.8.9",
+      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz",
+      "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/read-pkg/node_modules/normalize-package-data": {
       "version": "2.5.0",
+      "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz",
+      "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==",
       "dev": true,
       "license": "BSD-2-Clause",
       "dependencies": {
@@ -12307,6 +12473,8 @@
     },
     "node_modules/read-pkg/node_modules/semver": {
       "version": "5.7.2",
+      "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz",
+      "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==",
       "dev": true,
       "license": "ISC",
       "bin": {
@@ -12315,6 +12483,8 @@
     },
     "node_modules/read-pkg/node_modules/type-fest": {
       "version": "0.6.0",
+      "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.6.0.tgz",
+      "integrity": "sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==",
       "dev": true,
       "license": "(MIT OR CC0-1.0)",
       "engines": {
@@ -12323,6 +12493,8 @@
     },
     "node_modules/readdirp": {
       "version": "3.6.0",
+      "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz",
+      "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12334,6 +12506,8 @@
     },
     "node_modules/redent": {
       "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/redent/-/redent-3.0.0.tgz",
+      "integrity": "sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12346,8 +12520,11 @@
     },
     "node_modules/reflect.getprototypeof": {
       "version": "1.0.10",
+      "resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.10.tgz",
+      "integrity": "sha512-00o4I+DVrefhv+nX0ulyi3biSHCPDe+yLv5o/p6d/UVlirijB8E16FtfwSAi4g3tcqrQ4lRAqQSoFEZJehYEcw==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bind": "^1.0.8",
         "define-properties": "^1.2.1",
@@ -12367,8 +12544,11 @@
     },
     "node_modules/regexp.prototype.flags": {
       "version": "1.5.4",
+      "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.4.tgz",
+      "integrity": "sha512-dYqgNSZbDwkaJ2ceRd9ojCGjBq+mOm9LmtXnAnEGyHhN/5R7iDW2TRw3h+o/jCFxus3P2LfWIIiwowAjANm7IA==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bind": "^1.0.8",
         "define-properties": "^1.2.1",
@@ -12386,8 +12566,11 @@
     },
     "node_modules/regexpp": {
       "version": "3.2.0",
+      "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz",
+      "integrity": "sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "engines": {
         "node": ">=8"
       },
@@ -12395,8 +12578,26 @@
         "url": "https://github.com/sponsors/mysticatea"
       }
     },
+    "node_modules/rehype-stringify": {
+      "version": "9.0.4",
+      "resolved": "https://registry.npmjs.org/rehype-stringify/-/rehype-stringify-9.0.4.tgz",
+      "integrity": "sha512-Uk5xu1YKdqobe5XpSskwPvo1XeHUUucWEQSl8hTrXt5selvca1e8K1EZ37E6YoZ4BT8BCqCdVfQW7OfHfthtVQ==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/hast": "^2.0.0",
+        "hast-util-to-html": "^8.0.0",
+        "unified": "^10.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
     "node_modules/release-please": {
       "version": "16.15.0",
+      "resolved": "https://registry.npmjs.org/release-please/-/release-please-16.15.0.tgz",
+      "integrity": "sha512-C55PsUOMzAbPSrdqF/KKAqhaYVRGlarNNWgW/DyAsg15U4g/TkxXVpEZqAV1o38CoEoKhssnKTGnb5/eT4/DUw==",
       "dev": true,
       "license": "Apache-2.0",
       "dependencies": {
@@ -12441,6 +12642,8 @@
     },
     "node_modules/release-please/node_modules/ansi-styles": {
       "version": "4.3.0",
+      "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
+      "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12455,6 +12658,8 @@
     },
     "node_modules/release-please/node_modules/chalk": {
       "version": "4.1.2",
+      "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
+      "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12470,6 +12675,8 @@
     },
     "node_modules/release-please/node_modules/conventional-changelog-conventionalcommits": {
       "version": "6.1.0",
+      "resolved": "https://registry.npmjs.org/conventional-changelog-conventionalcommits/-/conventional-changelog-conventionalcommits-6.1.0.tgz",
+      "integrity": "sha512-3cS3GEtR78zTfMzk0AizXKKIdN4OvSh7ibNz6/DPbhWWQu7LqE/8+/GqSodV+sywUR2gpJAdP/1JFf4XtN7Zpw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -12479,16 +12686,10 @@
         "node": ">=14"
       }
     },
-    "node_modules/release-please/node_modules/has-flag": {
-      "version": "4.0.0",
-      "dev": true,
-      "license": "MIT",
-      "engines": {
-        "node": ">=8"
-      }
-    },
     "node_modules/release-please/node_modules/supports-color": {
       "version": "7.2.0",
+      "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
+      "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12500,6 +12701,8 @@
     },
     "node_modules/release-please/node_modules/type-fest": {
       "version": "3.13.1",
+      "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-3.13.1.tgz",
+      "integrity": "sha512-tLq3bSNx+xSpwvAJnzrK0Ep5CLNWjvFTOp71URMaAEWBfRb9nnJiBoUe0tF8bI4ZFO3omgBR6NvnbzVUT3Ly4g==",
       "dev": true,
       "license": "(MIT OR CC0-1.0)",
       "engines": {
@@ -12511,6 +12714,8 @@
     },
     "node_modules/release-please/node_modules/typescript": {
       "version": "4.9.5",
+      "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.9.5.tgz",
+      "integrity": "sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g==",
       "dev": true,
       "license": "Apache-2.0",
       "bin": {
@@ -12523,6 +12728,8 @@
     },
     "node_modules/release-zalgo": {
       "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/release-zalgo/-/release-zalgo-1.0.0.tgz",
+      "integrity": "sha512-gUAyHVHPPC5wdqX/LG4LWtRYtgjxyX78oanFNTMMyFEfOqdC54s3eE82imuWKbOeqYht2CrNf64Qb8vgmmtZGA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -12534,6 +12741,8 @@
     },
     "node_modules/remark": {
       "version": "14.0.3",
+      "resolved": "https://registry.npmjs.org/remark/-/remark-14.0.3.tgz",
+      "integrity": "sha512-bfmJW1dmR2LvaMJuAnE88pZP9DktIFYXazkTfOIKZzi3Knk9lT0roItIA24ydOucI3bV/g/tXBA6hzqq3FV9Ew==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12549,6 +12758,8 @@
     },
     "node_modules/remark-gfm": {
       "version": "3.0.1",
+      "resolved": "https://registry.npmjs.org/remark-gfm/-/remark-gfm-3.0.1.tgz",
+      "integrity": "sha512-lEFDoi2PICJyNrACFOfDD3JlLkuSbOa5Wd8EPt06HUdptv8Gn0bxYTdbU/XXQ3swAPkEaGxxPN9cbnMHvVu1Ig==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12564,6 +12775,8 @@
     },
     "node_modules/remark-github": {
       "version": "11.2.4",
+      "resolved": "https://registry.npmjs.org/remark-github/-/remark-github-11.2.4.tgz",
+      "integrity": "sha512-GJjWFpwqdrHHhPWqMbb8+lqFLiHQ9pCzUmXmRrhMFXGpYov5n2ljsZzuWgXlfzArfQYkiKIZczA2I8IHYMHqCA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12580,6 +12793,8 @@
     },
     "node_modules/remark-github/node_modules/unist-util-visit": {
       "version": "4.1.2",
+      "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-4.1.2.tgz",
+      "integrity": "sha512-MSd8OUGISqHdVvfY9TPhyK2VdUrPgxkUtWSuMHF6XAAFuL4LokseigBnZtPnJMu+FbynTkFNnFlyjxpVKujMRg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12594,6 +12809,62 @@
     },
     "node_modules/remark-github/node_modules/unist-util-visit-parents": {
       "version": "5.1.3",
+      "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-5.1.3.tgz",
+      "integrity": "sha512-x6+y8g7wWMyQhL1iZfhIPhDAs7Xwbn9nRosDXl7qoPTSCy0yNxnKc+hWokFifWQIDGi154rdUqKvbCa4+1kLhg==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^2.0.0",
+        "unist-util-is": "^5.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark-man": {
+      "version": "8.0.1",
+      "resolved": "https://registry.npmjs.org/remark-man/-/remark-man-8.0.1.tgz",
+      "integrity": "sha512-F/BbNaEF/QiZXoMiC43/qb8kAgGBKIS3yA+Br4CObgyoD+9Bioq1v+LmrLVbkwy9BErircQQ4J8yR2vFD34fBA==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/mdast": "^3.0.0",
+        "@types/unist": "^2.0.0",
+        "github-slugger": "^1.0.0",
+        "groff-escape": "^2.0.0",
+        "mdast-util-definitions": "^5.0.0",
+        "mdast-util-to-string": "^3.0.0",
+        "months": "^2.0.0",
+        "unified": "^10.0.0",
+        "unist-util-visit": "^4.0.0",
+        "zwitch": "^2.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark-man/node_modules/unist-util-visit": {
+      "version": "4.1.2",
+      "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-4.1.2.tgz",
+      "integrity": "sha512-MSd8OUGISqHdVvfY9TPhyK2VdUrPgxkUtWSuMHF6XAAFuL4LokseigBnZtPnJMu+FbynTkFNnFlyjxpVKujMRg==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^2.0.0",
+        "unist-util-is": "^5.0.0",
+        "unist-util-visit-parents": "^5.1.1"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark-man/node_modules/unist-util-visit-parents": {
+      "version": "5.1.3",
+      "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-5.1.3.tgz",
+      "integrity": "sha512-x6+y8g7wWMyQhL1iZfhIPhDAs7Xwbn9nRosDXl7qoPTSCy0yNxnKc+hWokFifWQIDGi154rdUqKvbCa4+1kLhg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12607,6 +12878,8 @@
     },
     "node_modules/remark-parse": {
       "version": "10.0.2",
+      "resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-10.0.2.tgz",
+      "integrity": "sha512-3ydxgHa/ZQzG8LvC7jTXccARYDcRld3VfcgIIFs7bI6vbRSxJJmzgLEIIoYKyrfhaY+ujuWaf/PJiMZXoiCXgw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12619,8 +12892,27 @@
         "url": "https://opencollective.com/unified"
       }
     },
+    "node_modules/remark-rehype": {
+      "version": "10.1.0",
+      "resolved": "https://registry.npmjs.org/remark-rehype/-/remark-rehype-10.1.0.tgz",
+      "integrity": "sha512-EFmR5zppdBp0WQeDVZ/b66CWJipB2q2VLNFMabzDSGR66Z2fQii83G5gTBbgGEnEEA0QRussvrFHxk1HWGJskw==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/hast": "^2.0.0",
+        "@types/mdast": "^3.0.0",
+        "mdast-util-to-hast": "^12.1.0",
+        "unified": "^10.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
     "node_modules/remark-stringify": {
       "version": "10.0.3",
+      "resolved": "https://registry.npmjs.org/remark-stringify/-/remark-stringify-10.0.3.tgz",
+      "integrity": "sha512-koyOzCMYoUHudypbj4XpnAKFbkddRMYZHwghnxd7ue5210WzGw6kOBwauJTRUMq16jsovXx8dYNvSSWP89kZ3A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12635,6 +12927,8 @@
     },
     "node_modules/require-directory": {
       "version": "2.1.1",
+      "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
+      "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -12643,6 +12937,8 @@
     },
     "node_modules/require-from-string": {
       "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz",
+      "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -12651,6 +12947,8 @@
     },
     "node_modules/require-inject": {
       "version": "1.4.4",
+      "resolved": "https://registry.npmjs.org/require-inject/-/require-inject-1.4.4.tgz",
+      "integrity": "sha512-5Y5ctRN84+I4iOZO61gm+48tgP/6Hcd3VZydkaEM3MCuOvnHRsTJYQBOc01faI/Z9at5nsCAJVHhlfPA6Pc0Og==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -12659,16 +12957,22 @@
     },
     "node_modules/require-main-filename": {
       "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz",
+      "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/requires-port": {
       "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz",
+      "integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/resolve": {
       "version": "1.22.10",
+      "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz",
+      "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12688,6 +12992,8 @@
     },
     "node_modules/resolve-from": {
       "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz",
+      "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -12696,6 +13002,8 @@
     },
     "node_modules/ret": {
       "version": "0.1.15",
+      "resolved": "https://registry.npmjs.org/ret/-/ret-0.1.15.tgz",
+      "integrity": "sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -12712,8 +13020,11 @@
     },
     "node_modules/reusify": {
       "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz",
+      "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "engines": {
         "iojs": ">=1.0.0",
         "node": ">=0.10.0"
@@ -12741,11 +13052,15 @@
     },
     "node_modules/rrweb-cssom": {
       "version": "0.7.1",
+      "resolved": "https://registry.npmjs.org/rrweb-cssom/-/rrweb-cssom-0.7.1.tgz",
+      "integrity": "sha512-TrEMa7JGdVm0UThDJSx7ddw5nVm3UJS9o9CCIZ72B1vSyEZoziDqBYP3XIoi/12lKrJR8rE3jeFHMok2F/Mnsg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/run-parallel": {
       "version": "1.2.0",
+      "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz",
+      "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==",
       "dev": true,
       "funding": [
         {
@@ -12762,12 +13077,15 @@
         }
       ],
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "queue-microtask": "^1.2.2"
       }
     },
     "node_modules/sade": {
       "version": "1.8.1",
+      "resolved": "https://registry.npmjs.org/sade/-/sade-1.8.1.tgz",
+      "integrity": "sha512-xal3CZX1Xlo/k4ApwCFrHVACi9fBqJ7V+mwhBsuf/1IOKbBy098Fex+Wa/5QMubw09pSZ/u8EY8PWgevJsXp1A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12779,8 +13097,11 @@
     },
     "node_modules/safe-array-concat": {
       "version": "1.1.3",
+      "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.3.tgz",
+      "integrity": "sha512-AURm5f0jYEOydBj7VQlVvDrjeFgthDdEF5H1dP+6mNpoXOMo1quQqJ4wvJDyRZ9+pO3kGWoOdmV08cSv2aJV6Q==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bind": "^1.0.8",
         "call-bound": "^1.0.2",
@@ -12797,8 +13118,11 @@
     },
     "node_modules/safe-push-apply": {
       "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/safe-push-apply/-/safe-push-apply-1.0.0.tgz",
+      "integrity": "sha512-iKE9w/Z7xCzUMIZqdBsp6pEQvwuEebH4vdpjcDWnyzaI6yl6O9FHvVpmGelvEHNsoY6wGblkxR6Zty/h00WiSA==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "es-errors": "^1.3.0",
         "isarray": "^2.0.5"
@@ -12812,8 +13136,11 @@
     },
     "node_modules/safe-regex-test": {
       "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.1.0.tgz",
+      "integrity": "sha512-x/+Cz4YrimQxQccJf5mKEbIa1NzeCRNI5Ecl/ekmlYaampdNLPalVyIcCZNNH3MvmqBugV5TMYZXv0ljslUlaw==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.2",
         "es-errors": "^1.3.0",
@@ -12834,6 +13161,8 @@
     },
     "node_modules/saxes": {
       "version": "6.0.0",
+      "resolved": "https://registry.npmjs.org/saxes/-/saxes-6.0.0.tgz",
+      "integrity": "sha512-xAg7SOnEhrm5zI3puOOKyy1OMcMlIJZYNJY7xLBwSze0UjhPLnWfj2GF2EpT0jmzaJKIWKHLsaSSajf35bcYnA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -12845,6 +13174,8 @@
     },
     "node_modules/schemes": {
       "version": "1.4.0",
+      "resolved": "https://registry.npmjs.org/schemes/-/schemes-1.4.0.tgz",
+      "integrity": "sha512-ImFy9FbCsQlVgnE3TCWmLPCFnVzx0lHL/l+umHplDqAKd0dzFpnS6lFZIpagBlYhKwzVmlV36ec0Y1XTu8JBAQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12864,13 +13195,18 @@
     },
     "node_modules/set-blocking": {
       "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz",
+      "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/set-function-length": {
       "version": "1.2.2",
+      "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz",
+      "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "define-data-property": "^1.1.4",
         "es-errors": "^1.3.0",
@@ -12885,8 +13221,11 @@
     },
     "node_modules/set-function-name": {
       "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.2.tgz",
+      "integrity": "sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "define-data-property": "^1.1.4",
         "es-errors": "^1.3.0",
@@ -12899,8 +13238,11 @@
     },
     "node_modules/set-proto": {
       "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/set-proto/-/set-proto-1.0.0.tgz",
+      "integrity": "sha512-RJRdvCo6IAnPdsvP/7m6bsQqNnn1FCBX5ZNtFL98MmFF/4xAIJTIg1YbHW5DC2W5SKZanrC6i4HsJqlajw/dZw==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "dunder-proto": "^1.0.1",
         "es-errors": "^1.3.0",
@@ -12931,8 +13273,11 @@
     },
     "node_modules/side-channel": {
       "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz",
+      "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "es-errors": "^1.3.0",
         "object-inspect": "^1.13.3",
@@ -12949,8 +13294,11 @@
     },
     "node_modules/side-channel-list": {
       "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz",
+      "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "es-errors": "^1.3.0",
         "object-inspect": "^1.13.3"
@@ -12964,8 +13312,11 @@
     },
     "node_modules/side-channel-map": {
       "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz",
+      "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.2",
         "es-errors": "^1.3.0",
@@ -12981,8 +13332,11 @@
     },
     "node_modules/side-channel-weakmap": {
       "version": "1.0.2",
+      "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz",
+      "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.2",
         "es-errors": "^1.3.0",
@@ -13035,6 +13389,8 @@
     },
     "node_modules/smtp-address-parser": {
       "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/smtp-address-parser/-/smtp-address-parser-1.1.0.tgz",
+      "integrity": "sha512-Gz11jbNU0plrReU9Sj7fmshSBxxJ9ShdD2q4ktHIHo/rpTH6lFyQoYHYKINPJtPe8aHFnsbtW46Ls0tCCBsIZg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13046,8 +13402,6 @@
     },
     "node_modules/socks": {
       "version": "2.8.7",
-      "resolved": "https://registry.npmjs.org/socks/-/socks-2.8.7.tgz",
-      "integrity": "sha512-HLpt+uLy/pxB+bum/9DzAgiKS8CX1EvbWxI4zlmgGCExImLdiad2iCwXT5Z4c9c3Eq8rP2318mPW2c+QbtjK8A==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -13074,6 +13428,8 @@
     },
     "node_modules/source-map": {
       "version": "0.6.1",
+      "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
+      "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==",
       "dev": true,
       "license": "BSD-3-Clause",
       "engines": {
@@ -13082,6 +13438,8 @@
     },
     "node_modules/source-map-support": {
       "version": "0.5.21",
+      "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz",
+      "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13091,6 +13449,8 @@
     },
     "node_modules/space-separated-tokens": {
       "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-2.0.2.tgz",
+      "integrity": "sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -13100,6 +13460,8 @@
     },
     "node_modules/spawk": {
       "version": "1.8.2",
+      "resolved": "https://registry.npmjs.org/spawk/-/spawk-1.8.2.tgz",
+      "integrity": "sha512-3Dl+ekoMHRvXo+Xc3EUSnjySawnc9SpkaBuA3kU2wYiuSEAIYB4b5cGjvmq5olexBsO/fCLZUKHjSMQlzSU4Ww==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -13108,6 +13470,8 @@
     },
     "node_modules/spawn-wrap": {
       "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/spawn-wrap/-/spawn-wrap-2.0.0.tgz",
+      "integrity": "sha512-EeajNjfN9zMnULLwhZZQU3GWBoFNkbngTUPfaawT4RkMiviTxcX0qfhVbGey39mfctfDHkWtuecgQ8NJcyQWHg==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -13124,6 +13488,8 @@
     },
     "node_modules/spawn-wrap/node_modules/brace-expansion": {
       "version": "1.1.12",
+      "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
+      "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13133,6 +13499,8 @@
     },
     "node_modules/spawn-wrap/node_modules/foreground-child": {
       "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-2.0.0.tgz",
+      "integrity": "sha512-dCIq9FpEcyQyXKCkyzmlPTFNgrCzPudOe+mhvJU5zAtlBnGVy2yKxtfsxK2tQBThwq225jcvBjpw1Gr40uzZCA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -13145,6 +13513,9 @@
     },
     "node_modules/spawn-wrap/node_modules/glob": {
       "version": "7.2.3",
+      "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
+      "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
+      "deprecated": "Glob versions prior to v9 are no longer supported",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -13171,6 +13542,8 @@
     },
     "node_modules/spawn-wrap/node_modules/minimatch": {
       "version": "3.1.2",
+      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
+      "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -13182,6 +13555,9 @@
     },
     "node_modules/spawn-wrap/node_modules/rimraf": {
       "version": "3.0.2",
+      "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz",
+      "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==",
+      "deprecated": "Rimraf versions prior to v4 are no longer supported",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -13196,11 +13572,15 @@
     },
     "node_modules/spawn-wrap/node_modules/signal-exit": {
       "version": "3.0.7",
+      "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz",
+      "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/spawn-wrap/node_modules/which": {
       "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
+      "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -13247,13 +13627,13 @@
     },
     "node_modules/spdx-license-ids": {
       "version": "3.0.22",
-      "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.22.tgz",
-      "integrity": "sha512-4PRT4nh1EImPbt2jASOKHX7PB7I+e4IWNLvkKFDxNhJlfjbYlleYQh285Z/3mPTHSAK/AvdMmw5BNNuYH8ShgQ==",
       "inBundle": true,
       "license": "CC0-1.0"
     },
     "node_modules/split": {
       "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz",
+      "integrity": "sha512-mTyOoPbrivtXnwnIxZRFYRrPNtEFKlpB2fvjSnCQUiAA6qAZzqwna5envK4uk6OIeP17CsdF3rSBGYVBsU0Tkg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13265,12 +13645,21 @@
     },
     "node_modules/split2": {
       "version": "4.2.0",
+      "resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz",
+      "integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==",
       "dev": true,
       "license": "ISC",
       "engines": {
         "node": ">= 10.x"
       }
     },
+    "node_modules/sprintf-js": {
+      "version": "1.0.3",
+      "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz",
+      "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==",
+      "dev": true,
+      "license": "BSD-3-Clause"
+    },
     "node_modules/ssri": {
       "version": "12.0.0",
       "inBundle": true,
@@ -13284,6 +13673,8 @@
     },
     "node_modules/stack-utils": {
       "version": "2.0.6",
+      "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.6.tgz",
+      "integrity": "sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13295,6 +13686,8 @@
     },
     "node_modules/stack-utils/node_modules/escape-string-regexp": {
       "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz",
+      "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -13303,8 +13696,11 @@
     },
     "node_modules/stop-iteration-iterator": {
       "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/stop-iteration-iterator/-/stop-iteration-iterator-1.1.0.tgz",
+      "integrity": "sha512-eLoXW/DHyl62zxY4SCaIgnRhuMr6ri4juEYARS8E6sCEqzKpOiE521Ucofdx+KnDZl5xmvGYaaKCk5FEOxJCoQ==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "es-errors": "^1.3.0",
         "internal-slot": "^1.1.0"
@@ -13315,6 +13711,8 @@
     },
     "node_modules/streamx": {
       "version": "2.22.1",
+      "resolved": "https://registry.npmjs.org/streamx/-/streamx-2.22.1.tgz",
+      "integrity": "sha512-znKXEBxfatz2GBNK02kRnCXjV+AA4kjZIUxeWSr3UGirZMJfTE9uiwKHobnbgxWyL/JWro8tTq+vOqAK1/qbSA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13341,8 +13739,6 @@
     "node_modules/string-width-cjs": {
       "name": "string-width",
       "version": "4.2.3",
-      "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
-      "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -13356,8 +13752,11 @@
     },
     "node_modules/string.prototype.trim": {
       "version": "1.2.10",
+      "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.10.tgz",
+      "integrity": "sha512-Rs66F0P/1kedk5lyYyH9uBzuiI/kNRmwJAR9quK6VOtIpZ2G+hMZd+HQbbv25MgCA6gEffoMZYxlTod4WcdrKA==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bind": "^1.0.8",
         "call-bound": "^1.0.2",
@@ -13376,8 +13775,11 @@
     },
     "node_modules/string.prototype.trimend": {
       "version": "1.0.9",
+      "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.9.tgz",
+      "integrity": "sha512-G7Ok5C6E/j4SGfyLCloXTrngQIQU3PWtXGst3yM7Bea9FRURf1S42ZHlZZtsNque2FN2PoUhfZXYLNWwEr4dLQ==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bind": "^1.0.8",
         "call-bound": "^1.0.2",
@@ -13393,8 +13795,11 @@
     },
     "node_modules/string.prototype.trimstart": {
       "version": "1.0.8",
+      "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.8.tgz",
+      "integrity": "sha512-UXSH262CSZY1tfu3G3Secr6uGLCFVPMhIqHjlgCUtCCcgihYc/xKs9djMTMUOb2j1mVSeU8EU6NWc/iQKU6Gfg==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bind": "^1.0.7",
         "define-properties": "^1.2.1",
@@ -13409,6 +13814,8 @@
     },
     "node_modules/stringify-entities": {
       "version": "4.0.4",
+      "resolved": "https://registry.npmjs.org/stringify-entities/-/stringify-entities-4.0.4.tgz",
+      "integrity": "sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13434,8 +13841,6 @@
     "node_modules/strip-ansi-cjs": {
       "name": "strip-ansi",
       "version": "6.0.1",
-      "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
-      "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -13447,6 +13852,8 @@
     },
     "node_modules/strip-bom": {
       "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz",
+      "integrity": "sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -13455,6 +13862,8 @@
     },
     "node_modules/strip-indent": {
       "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-3.0.0.tgz",
+      "integrity": "sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13466,8 +13875,11 @@
     },
     "node_modules/strip-json-comments": {
       "version": "3.1.1",
+      "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz",
+      "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "engines": {
         "node": ">=8"
       },
@@ -13477,8 +13889,6 @@
     },
     "node_modules/supports-color": {
       "version": "10.2.2",
-      "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-10.2.2.tgz",
-      "integrity": "sha512-SS+jx45GF1QjgEXQx4NJZV9ImqmO2NPz5FNsIHrsDjh2YsHnawpan7SNQ1o8NuhrbHZy9AZhIoCUiCeaW/C80g==",
       "inBundle": true,
       "license": "MIT",
       "engines": {
@@ -13490,6 +13900,8 @@
     },
     "node_modules/supports-preserve-symlinks-flag": {
       "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz",
+      "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -13501,11 +13913,15 @@
     },
     "node_modules/symbol-tree": {
       "version": "3.2.4",
+      "resolved": "https://registry.npmjs.org/symbol-tree/-/symbol-tree-3.2.4.tgz",
+      "integrity": "sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/tap": {
       "version": "16.3.10",
+      "resolved": "https://registry.npmjs.org/tap/-/tap-16.3.10.tgz",
+      "integrity": "sha512-q5Am+PpGHS6JSjk/Zn4bCRBihmZVM15v/MYXUy60wenw5HDe7pVrevLCEoMEz7tuw6jaPOJJqni1y8apN23IGw==",
       "bundleDependencies": [
         "ink",
         "treport",
@@ -13575,6 +13991,8 @@
     },
     "node_modules/tap-mocha-reporter": {
       "version": "5.0.4",
+      "resolved": "https://registry.npmjs.org/tap-mocha-reporter/-/tap-mocha-reporter-5.0.4.tgz",
+      "integrity": "sha512-J+YMO8B7lq1O6Zxd/jeuG27vJ+Y4tLiRMKPSb7KR6FVh86k3Rq1TwYc2GKPyIjCbzzdMdReh3Vfz9L5cg1Z2Bw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -13596,6 +14014,8 @@
     },
     "node_modules/tap-mocha-reporter/node_modules/brace-expansion": {
       "version": "1.1.12",
+      "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
+      "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13605,6 +14025,8 @@
     },
     "node_modules/tap-mocha-reporter/node_modules/diff": {
       "version": "4.0.2",
+      "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz",
+      "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==",
       "dev": true,
       "license": "BSD-3-Clause",
       "engines": {
@@ -13613,6 +14035,8 @@
     },
     "node_modules/tap-mocha-reporter/node_modules/escape-string-regexp": {
       "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz",
+      "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -13621,6 +14045,9 @@
     },
     "node_modules/tap-mocha-reporter/node_modules/glob": {
       "version": "7.2.3",
+      "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
+      "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
+      "deprecated": "Glob versions prior to v9 are no longer supported",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -13640,6 +14067,8 @@
     },
     "node_modules/tap-mocha-reporter/node_modules/minimatch": {
       "version": "3.1.2",
+      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
+      "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -13651,6 +14080,8 @@
     },
     "node_modules/tap-parser": {
       "version": "11.0.2",
+      "resolved": "https://registry.npmjs.org/tap-parser/-/tap-parser-11.0.2.tgz",
+      "integrity": "sha512-6qGlC956rcORw+fg7Fv1iCRAY8/bU9UabUAhs3mXRH6eRmVZcNPLheSXCYaVaYeSwx5xa/1HXZb1537YSvwDZg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13667,6 +14098,8 @@
     },
     "node_modules/tap-parser/node_modules/minipass": {
       "version": "3.3.6",
+      "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz",
+      "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -13678,6 +14111,8 @@
     },
     "node_modules/tap-yaml": {
       "version": "1.0.2",
+      "resolved": "https://registry.npmjs.org/tap-yaml/-/tap-yaml-1.0.2.tgz",
+      "integrity": "sha512-GegASpuqBnRNdT1U+yuUPZ8rEU64pL35WPBpCISWwff4dErS2/438barz7WFJl4Nzh3Y05tfPidZnH+GaV1wMg==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -13686,6 +14121,8 @@
     },
     "node_modules/tap-yaml/node_modules/yaml": {
       "version": "1.10.2",
+      "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz",
+      "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -13732,7 +14169,6 @@
       "dev": true,
       "inBundle": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "@ampproject/remapping": "^2.2.0",
         "@babel/code-frame": "^7.23.5",
@@ -14189,7 +14625,6 @@
       "dev": true,
       "inBundle": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "@types/prop-types": "*",
         "@types/scheduler": "*",
@@ -14318,7 +14753,6 @@
       ],
       "inBundle": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "caniuse-lite": "^1.0.30001565",
         "electron-to-chromium": "^1.4.601",
@@ -14460,6 +14894,8 @@
     },
     "node_modules/tap/node_modules/cliui": {
       "version": "7.0.4",
+      "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz",
+      "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -14468,52 +14904,6 @@
         "wrap-ansi": "^7.0.0"
       }
     },
-    "node_modules/tap/node_modules/cliui/node_modules/ansi-styles": {
-      "version": "4.3.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "color-convert": "^2.0.1"
-      },
-      "engines": {
-        "node": ">=8"
-      },
-      "funding": {
-        "url": "https://github.com/chalk/ansi-styles?sponsor=1"
-      }
-    },
-    "node_modules/tap/node_modules/cliui/node_modules/color-convert": {
-      "version": "2.0.1",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "color-name": "~1.1.4"
-      },
-      "engines": {
-        "node": ">=7.0.0"
-      }
-    },
-    "node_modules/tap/node_modules/cliui/node_modules/color-name": {
-      "version": "1.1.4",
-      "dev": true,
-      "license": "MIT"
-    },
-    "node_modules/tap/node_modules/cliui/node_modules/wrap-ansi": {
-      "version": "7.0.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "ansi-styles": "^4.0.0",
-        "string-width": "^4.1.0",
-        "strip-ansi": "^6.0.0"
-      },
-      "engines": {
-        "node": ">=10"
-      },
-      "funding": {
-        "url": "https://github.com/chalk/wrap-ansi?sponsor=1"
-      }
-    },
     "node_modules/tap/node_modules/code-excerpt": {
       "version": "3.0.0",
       "dev": true,
@@ -14672,6 +15062,8 @@
     },
     "node_modules/tap/node_modules/foreground-child": {
       "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-2.0.0.tgz",
+      "integrity": "sha512-dCIq9FpEcyQyXKCkyzmlPTFNgrCzPudOe+mhvJU5zAtlBnGVy2yKxtfsxK2tQBThwq225jcvBjpw1Gr40uzZCA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -14903,6 +15295,8 @@
     },
     "node_modules/tap/node_modules/jackspeak": {
       "version": "1.4.2",
+      "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-1.4.2.tgz",
+      "integrity": "sha512-GHeGTmnuaHnvS+ZctRB01bfxARuu9wW83ENbuiweu07SFcVlZrJpcshSre/keGT7YGBhLHg/+rXCNSrsEHKU4Q==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -15188,7 +15582,6 @@
       "dev": true,
       "inBundle": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "loose-envify": "^1.1.0",
         "object-assign": "^4.1.1"
@@ -15584,6 +15977,8 @@
     },
     "node_modules/tap/node_modules/which": {
       "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
+      "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -15727,6 +16122,8 @@
     },
     "node_modules/tar-stream": {
       "version": "3.1.7",
+      "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-3.1.7.tgz",
+      "integrity": "sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -15767,8 +16164,6 @@
     },
     "node_modules/tar/node_modules/minizlib": {
       "version": "2.1.2",
-      "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz",
-      "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -15781,8 +16176,6 @@
     },
     "node_modules/tar/node_modules/minizlib/node_modules/minipass": {
       "version": "3.3.6",
-      "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz",
-      "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
@@ -15794,8 +16187,6 @@
     },
     "node_modules/tar/node_modules/mkdirp": {
       "version": "1.0.4",
-      "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz",
-      "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==",
       "inBundle": true,
       "license": "MIT",
       "bin": {
@@ -15807,6 +16198,8 @@
     },
     "node_modules/tcompare": {
       "version": "5.0.7",
+      "resolved": "https://registry.npmjs.org/tcompare/-/tcompare-5.0.7.tgz",
+      "integrity": "sha512-d9iddt6YYGgyxJw5bjsN7UJUO1kGOtjSlNy/4PoGYAjQS5pAT/hzIoLf1bZCw+uUxRmZJh7Yy1aA7xKVRT9B4w==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -15818,6 +16211,8 @@
     },
     "node_modules/tcompare/node_modules/diff": {
       "version": "4.0.2",
+      "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz",
+      "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==",
       "dev": true,
       "license": "BSD-3-Clause",
       "engines": {
@@ -15826,6 +16221,8 @@
     },
     "node_modules/test-exclude": {
       "version": "6.0.0",
+      "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz",
+      "integrity": "sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -15839,6 +16236,8 @@
     },
     "node_modules/test-exclude/node_modules/brace-expansion": {
       "version": "1.1.12",
+      "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
+      "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -15848,6 +16247,9 @@
     },
     "node_modules/test-exclude/node_modules/glob": {
       "version": "7.2.3",
+      "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
+      "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
+      "deprecated": "Glob versions prior to v9 are no longer supported",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -15867,6 +16269,8 @@
     },
     "node_modules/test-exclude/node_modules/minimatch": {
       "version": "3.1.2",
+      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
+      "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -15878,6 +16282,8 @@
     },
     "node_modules/text-decoder": {
       "version": "1.2.3",
+      "resolved": "https://registry.npmjs.org/text-decoder/-/text-decoder-1.2.3.tgz",
+      "integrity": "sha512-3/o9z3X0X0fTupwsYvR03pJ/DjWuqqrfwBgTQzdWDiQSm9KitAyz/9WqsT2JQW7KV2m+bC2ol/zqpW37NHxLaA==",
       "dev": true,
       "license": "Apache-2.0",
       "dependencies": {
@@ -15886,6 +16292,8 @@
     },
     "node_modules/text-extensions": {
       "version": "2.4.0",
+      "resolved": "https://registry.npmjs.org/text-extensions/-/text-extensions-2.4.0.tgz",
+      "integrity": "sha512-te/NtwBwfiNRLf9Ijqx3T0nlqZiQ2XrrtBvu+cLL8ZRrGkO0NHTug8MYFKyoSrv/sHTaSKfilUkizV6XhxMJ3g==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -15902,6 +16310,8 @@
     },
     "node_modules/through": {
       "version": "2.3.8",
+      "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz",
+      "integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==",
       "dev": true,
       "license": "MIT"
     },
@@ -15912,13 +16322,13 @@
     },
     "node_modules/tinyexec": {
       "version": "1.0.1",
+      "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-1.0.1.tgz",
+      "integrity": "sha512-5uC6DDlmeqiOwCPmK9jMSdOuZTh8bU39Ys6yidB+UTt5hfZUPGAypSgFRiEp+jbi9qH40BLDvy85jIU88wKSqw==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/tinyglobby": {
       "version": "0.2.15",
-      "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz",
-      "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -15956,7 +16366,6 @@
       "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==",
       "inBundle": true,
       "license": "MIT",
-      "peer": true,
       "engines": {
         "node": ">=12"
       },
@@ -15966,6 +16375,8 @@
     },
     "node_modules/to-regex-range": {
       "version": "5.0.1",
+      "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
+      "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -15975,8 +16386,26 @@
         "node": ">=8.0"
       }
     },
+    "node_modules/tough-cookie": {
+      "version": "4.1.4",
+      "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.4.tgz",
+      "integrity": "sha512-Loo5UUvLD9ScZ6jh8beX1T6sO1w2/MpCRpEP7V280GKMVUQ0Jzar2U3UJPsrdbziLEMMhu3Ujnq//rhiFuIeag==",
+      "dev": true,
+      "license": "BSD-3-Clause",
+      "dependencies": {
+        "psl": "^1.1.33",
+        "punycode": "^2.1.1",
+        "universalify": "^0.2.0",
+        "url-parse": "^1.5.3"
+      },
+      "engines": {
+        "node": ">=6"
+      }
+    },
     "node_modules/tr46": {
       "version": "0.0.3",
+      "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz",
+      "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==",
       "dev": true,
       "license": "MIT"
     },
@@ -15990,6 +16419,8 @@
     },
     "node_modules/trim-lines": {
       "version": "3.0.1",
+      "resolved": "https://registry.npmjs.org/trim-lines/-/trim-lines-3.0.1.tgz",
+      "integrity": "sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -15999,6 +16430,8 @@
     },
     "node_modules/trim-newlines": {
       "version": "3.0.1",
+      "resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-3.0.1.tgz",
+      "integrity": "sha512-c1PTsA3tYrIsLGkJkzHF+w9F2EyxfXGo4UyJc4pFL++FMjnq0HJS69T3M7d//gKrFKwy429bouPescbjecU+Zw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -16007,6 +16440,8 @@
     },
     "node_modules/trivial-deferred": {
       "version": "1.1.2",
+      "resolved": "https://registry.npmjs.org/trivial-deferred/-/trivial-deferred-1.1.2.tgz",
+      "integrity": "sha512-vDPiDBC3hyP6O4JrJYMImW3nl3c03Tsj9fEXc7Qc/XKa1O7gf5ZtFfIR/E0dun9SnDHdwjna1Z2rSzYgqpxh/g==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -16015,6 +16450,8 @@
     },
     "node_modules/trough": {
       "version": "2.2.0",
+      "resolved": "https://registry.npmjs.org/trough/-/trough-2.2.0.tgz",
+      "integrity": "sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -16024,8 +16461,11 @@
     },
     "node_modules/tsconfig-paths": {
       "version": "3.15.0",
+      "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.15.0.tgz",
+      "integrity": "sha512-2Ac2RgzDe/cn48GvOe3M+o82pEFewD3UPbyoUHHdKasHwJKjds4fLXWf/Ux5kATBKN20oaFGu+jbElp1pos0mg==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "@types/json5": "^0.0.29",
         "json5": "^1.0.2",
@@ -16035,8 +16475,11 @@
     },
     "node_modules/tsconfig-paths/node_modules/json5": {
       "version": "1.0.2",
+      "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.2.tgz",
+      "integrity": "sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "minimist": "^1.2.0"
       },
@@ -16046,8 +16489,11 @@
     },
     "node_modules/tsconfig-paths/node_modules/strip-bom": {
       "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz",
+      "integrity": "sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "engines": {
         "node": ">=4"
       }
@@ -16067,6 +16513,8 @@
     },
     "node_modules/tunnel": {
       "version": "0.0.6",
+      "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz",
+      "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -16075,8 +16523,11 @@
     },
     "node_modules/type-check": {
       "version": "0.4.0",
+      "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz",
+      "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "prelude-ls": "^1.2.1"
       },
@@ -16086,8 +16537,11 @@
     },
     "node_modules/type-fest": {
       "version": "0.20.2",
+      "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz",
+      "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==",
       "dev": true,
       "license": "(MIT OR CC0-1.0)",
+      "peer": true,
       "engines": {
         "node": ">=10"
       },
@@ -16097,8 +16551,11 @@
     },
     "node_modules/typed-array-buffer": {
       "version": "1.0.3",
+      "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.3.tgz",
+      "integrity": "sha512-nAYYwfY3qnzX30IkA6AQZjVbtK6duGontcQm1WSG1MD94YLqK0515GNApXkoxKOWMusVssAHWLh9SeaoefYFGw==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.3",
         "es-errors": "^1.3.0",
@@ -16110,8 +16567,11 @@
     },
     "node_modules/typed-array-byte-length": {
       "version": "1.0.3",
+      "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.3.tgz",
+      "integrity": "sha512-BaXgOuIxz8n8pIq3e7Atg/7s+DpiYrxn4vdot3w9KbnBhcRQq6o3xemQdIfynqSeXeDrF32x+WvfzmOjPiY9lg==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bind": "^1.0.8",
         "for-each": "^0.3.3",
@@ -16128,8 +16588,11 @@
     },
     "node_modules/typed-array-byte-offset": {
       "version": "1.0.4",
+      "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.4.tgz",
+      "integrity": "sha512-bTlAFB/FBYMcuX81gbL4OcpH5PmlFHqlCCpAl8AlEzMz5k53oNDvN8p1PNOWLEmI2x4orp3raOFB51tv9X+MFQ==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "available-typed-arrays": "^1.0.7",
         "call-bind": "^1.0.8",
@@ -16148,8 +16611,11 @@
     },
     "node_modules/typed-array-length": {
       "version": "1.0.7",
+      "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.7.tgz",
+      "integrity": "sha512-3KS2b+kL7fsuk/eJZ7EQdnEmQoaho/r6KUef7hxvltNA5DR8NAUM+8wJMbJyZ4G9/7i3v5zPBIMN5aybAh2/Jg==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bind": "^1.0.7",
         "for-each": "^0.3.3",
@@ -16167,6 +16633,8 @@
     },
     "node_modules/typedarray-to-buffer": {
       "version": "3.1.5",
+      "resolved": "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz",
+      "integrity": "sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -16190,6 +16658,8 @@
     },
     "node_modules/uglify-js": {
       "version": "3.19.3",
+      "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.19.3.tgz",
+      "integrity": "sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ==",
       "dev": true,
       "license": "BSD-2-Clause",
       "optional": true,
@@ -16202,8 +16672,11 @@
     },
     "node_modules/unbox-primitive": {
       "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.1.0.tgz",
+      "integrity": "sha512-nWJ91DjeOkej/TA8pXQ3myruKpKEYgqvpw9lz4OPHj/NWFNluYrjbz9j01CJ8yKQd2g4jFoOkINCTW2I5LEEyw==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.3",
         "has-bigints": "^1.0.2",
@@ -16219,6 +16692,8 @@
     },
     "node_modules/undici": {
       "version": "6.21.3",
+      "resolved": "https://registry.npmjs.org/undici/-/undici-6.21.3.tgz",
+      "integrity": "sha512-gBLkYIlEnSp8pFbT64yFgGE6UIB9tAkhukC23PmMDCe5Nd+cRqKxSjw5y54MK2AZMgZfJWMaNE4nYUHgi1XEOw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -16234,6 +16709,8 @@
     },
     "node_modules/unicode-length": {
       "version": "2.1.0",
+      "resolved": "https://registry.npmjs.org/unicode-length/-/unicode-length-2.1.0.tgz",
+      "integrity": "sha512-4bV582zTV9Q02RXBxSUMiuN/KHo5w4aTojuKTNT96DIKps/SIawFp7cS5Mu25VuY1AioGXrmYyzKZUzh8OqoUw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -16242,6 +16719,8 @@
     },
     "node_modules/unicorn-magic": {
       "version": "0.1.0",
+      "resolved": "https://registry.npmjs.org/unicorn-magic/-/unicorn-magic-0.1.0.tgz",
+      "integrity": "sha512-lRfVq8fE8gz6QMBuDM6a+LO3IAzTi05H6gCVaUpir2E1Rwpo4ZUog45KpNXKC/Mn3Yb9UDuHumeFTo9iV/D9FQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -16253,6 +16732,8 @@
     },
     "node_modules/unified": {
       "version": "10.1.2",
+      "resolved": "https://registry.npmjs.org/unified/-/unified-10.1.2.tgz",
+      "integrity": "sha512-pUSWAi/RAnVy1Pif2kAoeWNBa3JVrx0MId2LASj8G+7AiHWoKZNTomq6LG326T68U7/e263X6fTdcXIy7XnF7Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -16293,6 +16774,8 @@
     },
     "node_modules/unist-util-generated": {
       "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/unist-util-generated/-/unist-util-generated-2.0.1.tgz",
+      "integrity": "sha512-qF72kLmPxAw0oN2fwpWIqbXAVyEqUzDHMsbtPvOudIlUzXYFIeQIuxXQCRCFh22B7cixvU0MG7m3MW8FTq/S+A==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -16302,6 +16785,22 @@
     },
     "node_modules/unist-util-is": {
       "version": "5.2.1",
+      "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-5.2.1.tgz",
+      "integrity": "sha512-u9njyyfEh43npf1M+yGKDGVPbY/JWEemg5nH05ncKPfi+kBbKBJoTdsogMu33uhytuLlv9y0O7GH7fEdwLdLQw==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^2.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/unist-util-position": {
+      "version": "4.0.4",
+      "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-4.0.4.tgz",
+      "integrity": "sha512-kUBE91efOWfIVBo8xzh/uZQ7p9ffYRtUbMRZBNFYwf0RK8koUMx6dGUfwylLOKmaT2cs4wSW96QoYUSXAyEtpg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -16314,6 +16813,8 @@
     },
     "node_modules/unist-util-stringify-position": {
       "version": "3.0.3",
+      "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-3.0.3.tgz",
+      "integrity": "sha512-k5GzIBZ/QatR8N5X2y+drfpWG8IDBzdnVj6OInRNWm1oXrzydiaAT2OQiA8DPRRZyAKb9b6I2a6PxYklZD0gKg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -16326,6 +16827,8 @@
     },
     "node_modules/unist-util-visit": {
       "version": "2.0.3",
+      "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-2.0.3.tgz",
+      "integrity": "sha512-iJ4/RczbJMkD0712mGktuGpm/U4By4FfDonL7N/9tATGIF4imikjOuagyMY53tnZq3NP6BcmlrHhEKAfGWjh7Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -16340,6 +16843,8 @@
     },
     "node_modules/unist-util-visit-parents": {
       "version": "3.1.1",
+      "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-3.1.1.tgz",
+      "integrity": "sha512-1KROIZWo6bcMrZEwiH2UrXDyalAa0uqzWCxCJj6lPOvTve2WkfgCytoDTPaMnodXh1WrXOq0haVYHj99ynJlsg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -16353,6 +16858,8 @@
     },
     "node_modules/unist-util-visit-parents/node_modules/unist-util-is": {
       "version": "4.1.0",
+      "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-4.1.0.tgz",
+      "integrity": "sha512-ZOQSsnce92GrxSqlnEEseX0gi7GH9zTJZ0p9dtu87WRb/37mMPO2Ilx1s/t9vBHrFhbgweUwb+t7cIn5dxPhZg==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -16362,6 +16869,8 @@
     },
     "node_modules/unist-util-visit/node_modules/unist-util-is": {
       "version": "4.1.0",
+      "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-4.1.0.tgz",
+      "integrity": "sha512-ZOQSsnce92GrxSqlnEEseX0gi7GH9zTJZ0p9dtu87WRb/37mMPO2Ilx1s/t9vBHrFhbgweUwb+t7cIn5dxPhZg==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -16371,11 +16880,15 @@
     },
     "node_modules/universal-user-agent": {
       "version": "6.0.1",
+      "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz",
+      "integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/universalify": {
       "version": "0.2.0",
+      "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz",
+      "integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -16384,6 +16897,8 @@
     },
     "node_modules/update-browserslist-db": {
       "version": "1.1.3",
+      "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.3.tgz",
+      "integrity": "sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw==",
       "dev": true,
       "funding": [
         {
@@ -16413,6 +16928,8 @@
     },
     "node_modules/uri-js": {
       "version": "4.4.1",
+      "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz",
+      "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==",
       "dev": true,
       "license": "BSD-2-Clause",
       "dependencies": {
@@ -16421,6 +16938,8 @@
     },
     "node_modules/url-parse": {
       "version": "1.5.10",
+      "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz",
+      "integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -16434,6 +16953,8 @@
     },
     "node_modules/uuid": {
       "version": "8.3.2",
+      "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
+      "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==",
       "dev": true,
       "license": "MIT",
       "bin": {
@@ -16442,6 +16963,8 @@
     },
     "node_modules/uvu": {
       "version": "0.5.6",
+      "resolved": "https://registry.npmjs.org/uvu/-/uvu-0.5.6.tgz",
+      "integrity": "sha512-+g8ENReyr8YsOc6fv/NVJs2vFdHBnBNdfE49rshrTzDWOlUx4Gq7KOS2GD8eqhy2j+Ejq29+SbKH8yjkAqXqoA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -16459,6 +16982,8 @@
     },
     "node_modules/uvu/node_modules/diff": {
       "version": "5.2.0",
+      "resolved": "https://registry.npmjs.org/diff/-/diff-5.2.0.tgz",
+      "integrity": "sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A==",
       "dev": true,
       "license": "BSD-3-Clause",
       "engines": {
@@ -16493,6 +17018,8 @@
     },
     "node_modules/vfile": {
       "version": "5.3.7",
+      "resolved": "https://registry.npmjs.org/vfile/-/vfile-5.3.7.tgz",
+      "integrity": "sha512-r7qlzkgErKjobAmyNIkkSpizsFPYiUPuJb5pNW1RB4JcYVZhs4lIbVqk8XPk033CV/1z8ss5pkax8SuhGpcG8g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -16508,6 +17035,8 @@
     },
     "node_modules/vfile-location": {
       "version": "4.1.0",
+      "resolved": "https://registry.npmjs.org/vfile-location/-/vfile-location-4.1.0.tgz",
+      "integrity": "sha512-YF23YMyASIIJXpktBa4vIGLJ5Gs88UB/XePgqPmTa7cDA+JeO3yclbpheQYCHjVHBn/yePzrXuygIL+xbvRYHw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -16521,6 +17050,8 @@
     },
     "node_modules/vfile-message": {
       "version": "3.1.4",
+      "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-3.1.4.tgz",
+      "integrity": "sha512-fa0Z6P8HUrQN4BZaX05SIVXic+7kE3b05PWAtPuYP9QLHsLKYR7/AlLW3NtOrpXRLeawpDLMsVkmk5DG0NXgWw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -16534,6 +17065,8 @@
     },
     "node_modules/w3c-xmlserializer": {
       "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/w3c-xmlserializer/-/w3c-xmlserializer-5.0.0.tgz",
+      "integrity": "sha512-o8qghlI8NZHU1lLPrpi2+Uq7abh4GGPpYANlalzWxyWteJOCsr/P+oPBA49TOLu5FTZO4d3F9MnWJfiMo4BkmA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -16552,6 +17085,8 @@
     },
     "node_modules/web-namespaces": {
       "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/web-namespaces/-/web-namespaces-2.0.1.tgz",
+      "integrity": "sha512-bKr1DkiNa2krS7qxNtdrtHAmzuYGFQLiQ13TsorsdT6ULTkPLKuu5+GsFpDlg6JFjUTwX2DyhMPG2be8uPrqsQ==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -16561,11 +17096,15 @@
     },
     "node_modules/webidl-conversions": {
       "version": "3.0.1",
+      "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
+      "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==",
       "dev": true,
       "license": "BSD-2-Clause"
     },
     "node_modules/whatwg-encoding": {
       "version": "3.1.1",
+      "resolved": "https://registry.npmjs.org/whatwg-encoding/-/whatwg-encoding-3.1.1.tgz",
+      "integrity": "sha512-6qN4hJdMwfYBtE3YBTTHhoeuUrDBPZmbQaxWAqSALV/MeEnR5z1xd8UKud2RAkFoPkmB+hli1TZSnyi84xz1vQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -16577,6 +17116,8 @@
     },
     "node_modules/whatwg-mimetype": {
       "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-4.0.0.tgz",
+      "integrity": "sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -16585,6 +17126,8 @@
     },
     "node_modules/whatwg-url": {
       "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz",
+      "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -16608,8 +17151,11 @@
     },
     "node_modules/which-boxed-primitive": {
       "version": "1.1.1",
+      "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.1.1.tgz",
+      "integrity": "sha512-TbX3mj8n0odCBFVlY8AxkqcHASw3L60jIuF8jFP78az3C2YhmGvqbHBpAjTRH2/xqYunrJ9g1jSyjCjpoWzIAA==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "is-bigint": "^1.1.0",
         "is-boolean-object": "^1.2.1",
@@ -16626,8 +17172,11 @@
     },
     "node_modules/which-builtin-type": {
       "version": "1.2.1",
+      "resolved": "https://registry.npmjs.org/which-builtin-type/-/which-builtin-type-1.2.1.tgz",
+      "integrity": "sha512-6iBczoX+kDQ7a3+YJBnh3T+KZRxM/iYNPXicqk66/Qfm1b93iu+yOImkg0zHbj5LNOcNv1TEADiZ0xa34B4q6Q==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.2",
         "function.prototype.name": "^1.1.6",
@@ -16652,8 +17201,11 @@
     },
     "node_modules/which-collection": {
       "version": "1.0.2",
+      "resolved": "https://registry.npmjs.org/which-collection/-/which-collection-1.0.2.tgz",
+      "integrity": "sha512-K4jVyjnBdgvc86Y6BkaLZEN933SwYOuBFkdmBu9ZfkcAbdVbpITnDmjvZ/aQjRXQrv5EPkTnD1s39GiiqbngCw==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "is-map": "^2.0.3",
         "is-set": "^2.0.3",
@@ -16669,13 +17221,18 @@
     },
     "node_modules/which-module": {
       "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.1.tgz",
+      "integrity": "sha512-iBdZ57RDvnOR9AGBhML2vFZf7h8vmBjhoaZqODJBFWHVtKkDmKuHai3cx5PgVMrX5YDNp27AofYbAwctSS+vhQ==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/which-typed-array": {
       "version": "1.1.19",
+      "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.19.tgz",
+      "integrity": "sha512-rEvr90Bck4WZt9HHFC4DJMsjvu7x+r6bImz0/BrbWb7A2djJ8hnZMrWnHo9F8ssv0OMErasDhftrfROTyqSDrw==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "available-typed-arrays": "^1.0.7",
         "call-bind": "^1.0.8",
@@ -16694,21 +17251,24 @@
     },
     "node_modules/word-wrap": {
       "version": "1.2.5",
+      "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz",
+      "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "engines": {
         "node": ">=0.10.0"
       }
     },
     "node_modules/wordwrap": {
       "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz",
+      "integrity": "sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/wrap-ansi": {
       "version": "8.1.0",
-      "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz",
-      "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -16726,8 +17286,6 @@
     "node_modules/wrap-ansi-cjs": {
       "name": "wrap-ansi",
       "version": "7.0.0",
-      "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
-      "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -16744,8 +17302,6 @@
     },
     "node_modules/wrap-ansi-cjs/node_modules/ansi-styles": {
       "version": "4.3.0",
-      "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
-      "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -16760,8 +17316,6 @@
     },
     "node_modules/wrap-ansi/node_modules/ansi-regex": {
       "version": "6.2.2",
-      "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz",
-      "integrity": "sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==",
       "inBundle": true,
       "license": "MIT",
       "engines": {
@@ -16773,15 +17327,11 @@
     },
     "node_modules/wrap-ansi/node_modules/emoji-regex": {
       "version": "9.2.2",
-      "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz",
-      "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==",
       "inBundle": true,
       "license": "MIT"
     },
     "node_modules/wrap-ansi/node_modules/string-width": {
       "version": "5.1.2",
-      "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz",
-      "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -16798,8 +17348,6 @@
     },
     "node_modules/wrap-ansi/node_modules/strip-ansi": {
       "version": "7.1.2",
-      "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz",
-      "integrity": "sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -16814,6 +17362,8 @@
     },
     "node_modules/wrappy": {
       "version": "1.0.2",
+      "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
+      "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==",
       "dev": true,
       "license": "ISC"
     },
@@ -16830,6 +17380,8 @@
     },
     "node_modules/ws": {
       "version": "8.18.3",
+      "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.3.tgz",
+      "integrity": "sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -16850,6 +17402,8 @@
     },
     "node_modules/xml-name-validator": {
       "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/xml-name-validator/-/xml-name-validator-5.0.0.tgz",
+      "integrity": "sha512-EvGK8EJ3DhaHfbRlETOWAS5pO9MZITeauHKJyb8wyajUfQUenkIg2MvLDTZ4T/TgIcm3HU0TFBgWWboAZ30UHg==",
       "dev": true,
       "license": "Apache-2.0",
       "engines": {
@@ -16858,11 +17412,15 @@
     },
     "node_modules/xmlchars": {
       "version": "2.2.0",
+      "resolved": "https://registry.npmjs.org/xmlchars/-/xmlchars-2.2.0.tgz",
+      "integrity": "sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/xpath": {
       "version": "0.0.34",
+      "resolved": "https://registry.npmjs.org/xpath/-/xpath-0.0.34.tgz",
+      "integrity": "sha512-FxF6+rkr1rNSQrhUNYrAFJpRXNzlDoMxeXN5qI84939ylEv3qqPFKa85Oxr6tDaJKqwW6KKyo2v26TSv3k6LeA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -16871,6 +17429,8 @@
     },
     "node_modules/y18n": {
       "version": "5.0.8",
+      "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz",
+      "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -16879,8 +17439,6 @@
     },
     "node_modules/yallist": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
-      "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==",
       "inBundle": true,
       "license": "ISC"
     },
@@ -16899,6 +17457,8 @@
     },
     "node_modules/yargs": {
       "version": "17.7.2",
+      "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz",
+      "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -16916,6 +17476,8 @@
     },
     "node_modules/yargs-parser": {
       "version": "21.1.1",
+      "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz",
+      "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -16924,6 +17486,8 @@
     },
     "node_modules/yocto-queue": {
       "version": "1.2.1",
+      "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-1.2.1.tgz",
+      "integrity": "sha512-AyeEbWOu/TAXdxlV9wmGcR0+yh2j3vYPGOECcIj2S7MkrLyC7ne+oye2BKTItt0ii2PHk4cDy+95+LshzbXnGg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -16935,6 +17499,8 @@
     },
     "node_modules/zwitch": {
       "version": "2.0.4",
+      "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.4.tgz",
+      "integrity": "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -17080,6 +17646,16 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
+    "workspaces/libnpmdiff/node_modules/binary-extensions": {
+      "version": "3.1.0",
+      "license": "MIT",
+      "engines": {
+        "node": ">=18.20"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/sindresorhus"
+      }
+    },
     "workspaces/libnpmexec": {
       "version": "10.1.6",
       "license": "ISC",

From 48285e04fd0a89b34d0c214295d5e76f68413f91 Mon Sep 17 00:00:00 2001
From: Gar 
Date: Thu, 18 Sep 2025 13:08:12 -0700
Subject: [PATCH 44/63] deps: add fdir, isexe, and picomatch to node_modules

These got lost during the shuffle of the deps updates, they are production deps and need to be included here
---
 node_modules/.gitignore                 |    6 +-
 node_modules/fdir/LICENSE               |    7 +
 node_modules/fdir/dist/index.cjs        |  588 ++++++++++++
 node_modules/fdir/dist/index.d.cts      |  155 ++++
 node_modules/fdir/dist/index.d.mts      |  155 ++++
 node_modules/fdir/dist/index.mjs        |  570 ++++++++++++
 node_modules/fdir/package.json          |  103 +++
 node_modules/picomatch/LICENSE          |   21 +
 node_modules/picomatch/index.js         |   17 +
 node_modules/picomatch/lib/constants.js |  180 ++++
 node_modules/picomatch/lib/parse.js     | 1085 +++++++++++++++++++++++
 node_modules/picomatch/lib/picomatch.js |  341 +++++++
 node_modules/picomatch/lib/scan.js      |  391 ++++++++
 node_modules/picomatch/lib/utils.js     |   72 ++
 node_modules/picomatch/package.json     |   83 ++
 node_modules/picomatch/posix.js         |    3 +
 16 files changed, 3773 insertions(+), 4 deletions(-)
 create mode 100644 node_modules/fdir/LICENSE
 create mode 100644 node_modules/fdir/dist/index.cjs
 create mode 100644 node_modules/fdir/dist/index.d.cts
 create mode 100644 node_modules/fdir/dist/index.d.mts
 create mode 100644 node_modules/fdir/dist/index.mjs
 create mode 100644 node_modules/fdir/package.json
 create mode 100644 node_modules/picomatch/LICENSE
 create mode 100644 node_modules/picomatch/index.js
 create mode 100644 node_modules/picomatch/lib/constants.js
 create mode 100644 node_modules/picomatch/lib/parse.js
 create mode 100644 node_modules/picomatch/lib/picomatch.js
 create mode 100644 node_modules/picomatch/lib/scan.js
 create mode 100644 node_modules/picomatch/lib/utils.js
 create mode 100644 node_modules/picomatch/package.json
 create mode 100644 node_modules/picomatch/posix.js

diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index f146e9040bbae..aa6e36717bc7c 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -83,6 +83,7 @@
 !/err-code
 !/exponential-backoff
 !/fastest-levenshtein
+!/fdir
 !/foreground-child
 !/fs-minipass
 !/glob
@@ -169,6 +170,7 @@
 !/parse-conflict-json
 !/path-key
 !/path-scurry
+!/picomatch
 !/postcss-selector-parser
 !/proc-log
 !/proggy
@@ -218,10 +220,6 @@
 !/text-table
 !/tiny-relative-date
 !/tinyglobby
-!/tinyglobby/node_modules/
-/tinyglobby/node_modules/*
-!/tinyglobby/node_modules/fdir
-!/tinyglobby/node_modules/picomatch
 !/treeverse
 !/tuf-js
 !/unique-filename
diff --git a/node_modules/fdir/LICENSE b/node_modules/fdir/LICENSE
new file mode 100644
index 0000000000000..bb7fdee44cae6
--- /dev/null
+++ b/node_modules/fdir/LICENSE
@@ -0,0 +1,7 @@
+Copyright 2023 Abdullah Atta
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/fdir/dist/index.cjs b/node_modules/fdir/dist/index.cjs
new file mode 100644
index 0000000000000..4868ffba35d99
--- /dev/null
+++ b/node_modules/fdir/dist/index.cjs
@@ -0,0 +1,588 @@
+//#region rolldown:runtime
+var __create = Object.create;
+var __defProp = Object.defineProperty;
+var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
+var __getOwnPropNames = Object.getOwnPropertyNames;
+var __getProtoOf = Object.getPrototypeOf;
+var __hasOwnProp = Object.prototype.hasOwnProperty;
+var __copyProps = (to, from, except, desc) => {
+	if (from && typeof from === "object" || typeof from === "function") for (var keys = __getOwnPropNames(from), i = 0, n = keys.length, key; i < n; i++) {
+		key = keys[i];
+		if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, {
+			get: ((k) => from[k]).bind(null, key),
+			enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable
+		});
+	}
+	return to;
+};
+var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", {
+	value: mod,
+	enumerable: true
+}) : target, mod));
+
+//#endregion
+const path = __toESM(require("path"));
+const fs = __toESM(require("fs"));
+
+//#region src/utils.ts
+function cleanPath(path$1) {
+	let normalized = (0, path.normalize)(path$1);
+	if (normalized.length > 1 && normalized[normalized.length - 1] === path.sep) normalized = normalized.substring(0, normalized.length - 1);
+	return normalized;
+}
+const SLASHES_REGEX = /[\\/]/g;
+function convertSlashes(path$1, separator) {
+	return path$1.replace(SLASHES_REGEX, separator);
+}
+const WINDOWS_ROOT_DIR_REGEX = /^[a-z]:[\\/]$/i;
+function isRootDirectory(path$1) {
+	return path$1 === "/" || WINDOWS_ROOT_DIR_REGEX.test(path$1);
+}
+function normalizePath(path$1, options) {
+	const { resolvePaths, normalizePath: normalizePath$1, pathSeparator } = options;
+	const pathNeedsCleaning = process.platform === "win32" && path$1.includes("/") || path$1.startsWith(".");
+	if (resolvePaths) path$1 = (0, path.resolve)(path$1);
+	if (normalizePath$1 || pathNeedsCleaning) path$1 = cleanPath(path$1);
+	if (path$1 === ".") return "";
+	const needsSeperator = path$1[path$1.length - 1] !== pathSeparator;
+	return convertSlashes(needsSeperator ? path$1 + pathSeparator : path$1, pathSeparator);
+}
+
+//#endregion
+//#region src/api/functions/join-path.ts
+function joinPathWithBasePath(filename, directoryPath) {
+	return directoryPath + filename;
+}
+function joinPathWithRelativePath(root, options) {
+	return function(filename, directoryPath) {
+		const sameRoot = directoryPath.startsWith(root);
+		if (sameRoot) return directoryPath.slice(root.length) + filename;
+		else return convertSlashes((0, path.relative)(root, directoryPath), options.pathSeparator) + options.pathSeparator + filename;
+	};
+}
+function joinPath(filename) {
+	return filename;
+}
+function joinDirectoryPath(filename, directoryPath, separator) {
+	return directoryPath + filename + separator;
+}
+function build$7(root, options) {
+	const { relativePaths, includeBasePath } = options;
+	return relativePaths && root ? joinPathWithRelativePath(root, options) : includeBasePath ? joinPathWithBasePath : joinPath;
+}
+
+//#endregion
+//#region src/api/functions/push-directory.ts
+function pushDirectoryWithRelativePath(root) {
+	return function(directoryPath, paths) {
+		paths.push(directoryPath.substring(root.length) || ".");
+	};
+}
+function pushDirectoryFilterWithRelativePath(root) {
+	return function(directoryPath, paths, filters) {
+		const relativePath = directoryPath.substring(root.length) || ".";
+		if (filters.every((filter) => filter(relativePath, true))) paths.push(relativePath);
+	};
+}
+const pushDirectory = (directoryPath, paths) => {
+	paths.push(directoryPath || ".");
+};
+const pushDirectoryFilter = (directoryPath, paths, filters) => {
+	const path$1 = directoryPath || ".";
+	if (filters.every((filter) => filter(path$1, true))) paths.push(path$1);
+};
+const empty$2 = () => {};
+function build$6(root, options) {
+	const { includeDirs, filters, relativePaths } = options;
+	if (!includeDirs) return empty$2;
+	if (relativePaths) return filters && filters.length ? pushDirectoryFilterWithRelativePath(root) : pushDirectoryWithRelativePath(root);
+	return filters && filters.length ? pushDirectoryFilter : pushDirectory;
+}
+
+//#endregion
+//#region src/api/functions/push-file.ts
+const pushFileFilterAndCount = (filename, _paths, counts, filters) => {
+	if (filters.every((filter) => filter(filename, false))) counts.files++;
+};
+const pushFileFilter = (filename, paths, _counts, filters) => {
+	if (filters.every((filter) => filter(filename, false))) paths.push(filename);
+};
+const pushFileCount = (_filename, _paths, counts, _filters) => {
+	counts.files++;
+};
+const pushFile = (filename, paths) => {
+	paths.push(filename);
+};
+const empty$1 = () => {};
+function build$5(options) {
+	const { excludeFiles, filters, onlyCounts } = options;
+	if (excludeFiles) return empty$1;
+	if (filters && filters.length) return onlyCounts ? pushFileFilterAndCount : pushFileFilter;
+	else if (onlyCounts) return pushFileCount;
+	else return pushFile;
+}
+
+//#endregion
+//#region src/api/functions/get-array.ts
+const getArray = (paths) => {
+	return paths;
+};
+const getArrayGroup = () => {
+	return [""].slice(0, 0);
+};
+function build$4(options) {
+	return options.group ? getArrayGroup : getArray;
+}
+
+//#endregion
+//#region src/api/functions/group-files.ts
+const groupFiles = (groups, directory, files) => {
+	groups.push({
+		directory,
+		files,
+		dir: directory
+	});
+};
+const empty = () => {};
+function build$3(options) {
+	return options.group ? groupFiles : empty;
+}
+
+//#endregion
+//#region src/api/functions/resolve-symlink.ts
+const resolveSymlinksAsync = function(path$1, state, callback$1) {
+	const { queue, fs: fs$1, options: { suppressErrors } } = state;
+	queue.enqueue();
+	fs$1.realpath(path$1, (error, resolvedPath) => {
+		if (error) return queue.dequeue(suppressErrors ? null : error, state);
+		fs$1.stat(resolvedPath, (error$1, stat) => {
+			if (error$1) return queue.dequeue(suppressErrors ? null : error$1, state);
+			if (stat.isDirectory() && isRecursive(path$1, resolvedPath, state)) return queue.dequeue(null, state);
+			callback$1(stat, resolvedPath);
+			queue.dequeue(null, state);
+		});
+	});
+};
+const resolveSymlinks = function(path$1, state, callback$1) {
+	const { queue, fs: fs$1, options: { suppressErrors } } = state;
+	queue.enqueue();
+	try {
+		const resolvedPath = fs$1.realpathSync(path$1);
+		const stat = fs$1.statSync(resolvedPath);
+		if (stat.isDirectory() && isRecursive(path$1, resolvedPath, state)) return;
+		callback$1(stat, resolvedPath);
+	} catch (e) {
+		if (!suppressErrors) throw e;
+	}
+};
+function build$2(options, isSynchronous) {
+	if (!options.resolveSymlinks || options.excludeSymlinks) return null;
+	return isSynchronous ? resolveSymlinks : resolveSymlinksAsync;
+}
+function isRecursive(path$1, resolved, state) {
+	if (state.options.useRealPaths) return isRecursiveUsingRealPaths(resolved, state);
+	let parent = (0, path.dirname)(path$1);
+	let depth = 1;
+	while (parent !== state.root && depth < 2) {
+		const resolvedPath = state.symlinks.get(parent);
+		const isSameRoot = !!resolvedPath && (resolvedPath === resolved || resolvedPath.startsWith(resolved) || resolved.startsWith(resolvedPath));
+		if (isSameRoot) depth++;
+		else parent = (0, path.dirname)(parent);
+	}
+	state.symlinks.set(path$1, resolved);
+	return depth > 1;
+}
+function isRecursiveUsingRealPaths(resolved, state) {
+	return state.visited.includes(resolved + state.options.pathSeparator);
+}
+
+//#endregion
+//#region src/api/functions/invoke-callback.ts
+const onlyCountsSync = (state) => {
+	return state.counts;
+};
+const groupsSync = (state) => {
+	return state.groups;
+};
+const defaultSync = (state) => {
+	return state.paths;
+};
+const limitFilesSync = (state) => {
+	return state.paths.slice(0, state.options.maxFiles);
+};
+const onlyCountsAsync = (state, error, callback$1) => {
+	report(error, callback$1, state.counts, state.options.suppressErrors);
+	return null;
+};
+const defaultAsync = (state, error, callback$1) => {
+	report(error, callback$1, state.paths, state.options.suppressErrors);
+	return null;
+};
+const limitFilesAsync = (state, error, callback$1) => {
+	report(error, callback$1, state.paths.slice(0, state.options.maxFiles), state.options.suppressErrors);
+	return null;
+};
+const groupsAsync = (state, error, callback$1) => {
+	report(error, callback$1, state.groups, state.options.suppressErrors);
+	return null;
+};
+function report(error, callback$1, output, suppressErrors) {
+	if (error && !suppressErrors) callback$1(error, output);
+	else callback$1(null, output);
+}
+function build$1(options, isSynchronous) {
+	const { onlyCounts, group, maxFiles } = options;
+	if (onlyCounts) return isSynchronous ? onlyCountsSync : onlyCountsAsync;
+	else if (group) return isSynchronous ? groupsSync : groupsAsync;
+	else if (maxFiles) return isSynchronous ? limitFilesSync : limitFilesAsync;
+	else return isSynchronous ? defaultSync : defaultAsync;
+}
+
+//#endregion
+//#region src/api/functions/walk-directory.ts
+const readdirOpts = { withFileTypes: true };
+const walkAsync = (state, crawlPath, directoryPath, currentDepth, callback$1) => {
+	state.queue.enqueue();
+	if (currentDepth < 0) return state.queue.dequeue(null, state);
+	const { fs: fs$1 } = state;
+	state.visited.push(crawlPath);
+	state.counts.directories++;
+	fs$1.readdir(crawlPath || ".", readdirOpts, (error, entries = []) => {
+		callback$1(entries, directoryPath, currentDepth);
+		state.queue.dequeue(state.options.suppressErrors ? null : error, state);
+	});
+};
+const walkSync = (state, crawlPath, directoryPath, currentDepth, callback$1) => {
+	const { fs: fs$1 } = state;
+	if (currentDepth < 0) return;
+	state.visited.push(crawlPath);
+	state.counts.directories++;
+	let entries = [];
+	try {
+		entries = fs$1.readdirSync(crawlPath || ".", readdirOpts);
+	} catch (e) {
+		if (!state.options.suppressErrors) throw e;
+	}
+	callback$1(entries, directoryPath, currentDepth);
+};
+function build(isSynchronous) {
+	return isSynchronous ? walkSync : walkAsync;
+}
+
+//#endregion
+//#region src/api/queue.ts
+/**
+* This is a custom stateless queue to track concurrent async fs calls.
+* It increments a counter whenever a call is queued and decrements it
+* as soon as it completes. When the counter hits 0, it calls onQueueEmpty.
+*/
+var Queue = class {
+	count = 0;
+	constructor(onQueueEmpty) {
+		this.onQueueEmpty = onQueueEmpty;
+	}
+	enqueue() {
+		this.count++;
+		return this.count;
+	}
+	dequeue(error, output) {
+		if (this.onQueueEmpty && (--this.count <= 0 || error)) {
+			this.onQueueEmpty(error, output);
+			if (error) {
+				output.controller.abort();
+				this.onQueueEmpty = void 0;
+			}
+		}
+	}
+};
+
+//#endregion
+//#region src/api/counter.ts
+var Counter = class {
+	_files = 0;
+	_directories = 0;
+	set files(num) {
+		this._files = num;
+	}
+	get files() {
+		return this._files;
+	}
+	set directories(num) {
+		this._directories = num;
+	}
+	get directories() {
+		return this._directories;
+	}
+	/**
+	* @deprecated use `directories` instead
+	*/
+	/* c8 ignore next 3 */
+	get dirs() {
+		return this._directories;
+	}
+};
+
+//#endregion
+//#region src/api/aborter.ts
+/**
+* AbortController is not supported on Node 14 so we use this until we can drop
+* support for Node 14.
+*/
+var Aborter = class {
+	aborted = false;
+	abort() {
+		this.aborted = true;
+	}
+};
+
+//#endregion
+//#region src/api/walker.ts
+var Walker = class {
+	root;
+	isSynchronous;
+	state;
+	joinPath;
+	pushDirectory;
+	pushFile;
+	getArray;
+	groupFiles;
+	resolveSymlink;
+	walkDirectory;
+	callbackInvoker;
+	constructor(root, options, callback$1) {
+		this.isSynchronous = !callback$1;
+		this.callbackInvoker = build$1(options, this.isSynchronous);
+		this.root = normalizePath(root, options);
+		this.state = {
+			root: isRootDirectory(this.root) ? this.root : this.root.slice(0, -1),
+			paths: [""].slice(0, 0),
+			groups: [],
+			counts: new Counter(),
+			options,
+			queue: new Queue((error, state) => this.callbackInvoker(state, error, callback$1)),
+			symlinks: /* @__PURE__ */ new Map(),
+			visited: [""].slice(0, 0),
+			controller: new Aborter(),
+			fs: options.fs || fs
+		};
+		this.joinPath = build$7(this.root, options);
+		this.pushDirectory = build$6(this.root, options);
+		this.pushFile = build$5(options);
+		this.getArray = build$4(options);
+		this.groupFiles = build$3(options);
+		this.resolveSymlink = build$2(options, this.isSynchronous);
+		this.walkDirectory = build(this.isSynchronous);
+	}
+	start() {
+		this.pushDirectory(this.root, this.state.paths, this.state.options.filters);
+		this.walkDirectory(this.state, this.root, this.root, this.state.options.maxDepth, this.walk);
+		return this.isSynchronous ? this.callbackInvoker(this.state, null) : null;
+	}
+	walk = (entries, directoryPath, depth) => {
+		const { paths, options: { filters, resolveSymlinks: resolveSymlinks$1, excludeSymlinks, exclude, maxFiles, signal, useRealPaths, pathSeparator }, controller } = this.state;
+		if (controller.aborted || signal && signal.aborted || maxFiles && paths.length > maxFiles) return;
+		const files = this.getArray(this.state.paths);
+		for (let i = 0; i < entries.length; ++i) {
+			const entry = entries[i];
+			if (entry.isFile() || entry.isSymbolicLink() && !resolveSymlinks$1 && !excludeSymlinks) {
+				const filename = this.joinPath(entry.name, directoryPath);
+				this.pushFile(filename, files, this.state.counts, filters);
+			} else if (entry.isDirectory()) {
+				let path$1 = joinDirectoryPath(entry.name, directoryPath, this.state.options.pathSeparator);
+				if (exclude && exclude(entry.name, path$1)) continue;
+				this.pushDirectory(path$1, paths, filters);
+				this.walkDirectory(this.state, path$1, path$1, depth - 1, this.walk);
+			} else if (this.resolveSymlink && entry.isSymbolicLink()) {
+				let path$1 = joinPathWithBasePath(entry.name, directoryPath);
+				this.resolveSymlink(path$1, this.state, (stat, resolvedPath) => {
+					if (stat.isDirectory()) {
+						resolvedPath = normalizePath(resolvedPath, this.state.options);
+						if (exclude && exclude(entry.name, useRealPaths ? resolvedPath : path$1 + pathSeparator)) return;
+						this.walkDirectory(this.state, resolvedPath, useRealPaths ? resolvedPath : path$1 + pathSeparator, depth - 1, this.walk);
+					} else {
+						resolvedPath = useRealPaths ? resolvedPath : path$1;
+						const filename = (0, path.basename)(resolvedPath);
+						const directoryPath$1 = normalizePath((0, path.dirname)(resolvedPath), this.state.options);
+						resolvedPath = this.joinPath(filename, directoryPath$1);
+						this.pushFile(resolvedPath, files, this.state.counts, filters);
+					}
+				});
+			}
+		}
+		this.groupFiles(this.state.groups, directoryPath, files);
+	};
+};
+
+//#endregion
+//#region src/api/async.ts
+function promise(root, options) {
+	return new Promise((resolve$1, reject) => {
+		callback(root, options, (err, output) => {
+			if (err) return reject(err);
+			resolve$1(output);
+		});
+	});
+}
+function callback(root, options, callback$1) {
+	let walker = new Walker(root, options, callback$1);
+	walker.start();
+}
+
+//#endregion
+//#region src/api/sync.ts
+function sync(root, options) {
+	const walker = new Walker(root, options);
+	return walker.start();
+}
+
+//#endregion
+//#region src/builder/api-builder.ts
+var APIBuilder = class {
+	constructor(root, options) {
+		this.root = root;
+		this.options = options;
+	}
+	withPromise() {
+		return promise(this.root, this.options);
+	}
+	withCallback(cb) {
+		callback(this.root, this.options, cb);
+	}
+	sync() {
+		return sync(this.root, this.options);
+	}
+};
+
+//#endregion
+//#region src/builder/index.ts
+let pm = null;
+/* c8 ignore next 6 */
+try {
+	require.resolve("picomatch");
+	pm = require("picomatch");
+} catch {}
+var Builder = class {
+	globCache = {};
+	options = {
+		maxDepth: Infinity,
+		suppressErrors: true,
+		pathSeparator: path.sep,
+		filters: []
+	};
+	globFunction;
+	constructor(options) {
+		this.options = {
+			...this.options,
+			...options
+		};
+		this.globFunction = this.options.globFunction;
+	}
+	group() {
+		this.options.group = true;
+		return this;
+	}
+	withPathSeparator(separator) {
+		this.options.pathSeparator = separator;
+		return this;
+	}
+	withBasePath() {
+		this.options.includeBasePath = true;
+		return this;
+	}
+	withRelativePaths() {
+		this.options.relativePaths = true;
+		return this;
+	}
+	withDirs() {
+		this.options.includeDirs = true;
+		return this;
+	}
+	withMaxDepth(depth) {
+		this.options.maxDepth = depth;
+		return this;
+	}
+	withMaxFiles(limit) {
+		this.options.maxFiles = limit;
+		return this;
+	}
+	withFullPaths() {
+		this.options.resolvePaths = true;
+		this.options.includeBasePath = true;
+		return this;
+	}
+	withErrors() {
+		this.options.suppressErrors = false;
+		return this;
+	}
+	withSymlinks({ resolvePaths = true } = {}) {
+		this.options.resolveSymlinks = true;
+		this.options.useRealPaths = resolvePaths;
+		return this.withFullPaths();
+	}
+	withAbortSignal(signal) {
+		this.options.signal = signal;
+		return this;
+	}
+	normalize() {
+		this.options.normalizePath = true;
+		return this;
+	}
+	filter(predicate) {
+		this.options.filters.push(predicate);
+		return this;
+	}
+	onlyDirs() {
+		this.options.excludeFiles = true;
+		this.options.includeDirs = true;
+		return this;
+	}
+	exclude(predicate) {
+		this.options.exclude = predicate;
+		return this;
+	}
+	onlyCounts() {
+		this.options.onlyCounts = true;
+		return this;
+	}
+	crawl(root) {
+		return new APIBuilder(root || ".", this.options);
+	}
+	withGlobFunction(fn) {
+		this.globFunction = fn;
+		return this;
+	}
+	/**
+	* @deprecated Pass options using the constructor instead:
+	* ```ts
+	* new fdir(options).crawl("/path/to/root");
+	* ```
+	* This method will be removed in v7.0
+	*/
+	/* c8 ignore next 4 */
+	crawlWithOptions(root, options) {
+		this.options = {
+			...this.options,
+			...options
+		};
+		return new APIBuilder(root || ".", this.options);
+	}
+	glob(...patterns) {
+		if (this.globFunction) return this.globWithOptions(patterns);
+		return this.globWithOptions(patterns, ...[{ dot: true }]);
+	}
+	globWithOptions(patterns, ...options) {
+		const globFn = this.globFunction || pm;
+		/* c8 ignore next 5 */
+		if (!globFn) throw new Error("Please specify a glob function to use glob matching.");
+		var isMatch = this.globCache[patterns.join("\0")];
+		if (!isMatch) {
+			isMatch = globFn(patterns, ...options);
+			this.globCache[patterns.join("\0")] = isMatch;
+		}
+		this.options.filters.push((path$1) => isMatch(path$1));
+		return this;
+	}
+};
+
+//#endregion
+exports.fdir = Builder;
\ No newline at end of file
diff --git a/node_modules/fdir/dist/index.d.cts b/node_modules/fdir/dist/index.d.cts
new file mode 100644
index 0000000000000..f448ef5d9b563
--- /dev/null
+++ b/node_modules/fdir/dist/index.d.cts
@@ -0,0 +1,155 @@
+/// 
+import * as nativeFs from "fs";
+import picomatch from "picomatch";
+
+//#region src/api/aborter.d.ts
+/**
+ * AbortController is not supported on Node 14 so we use this until we can drop
+ * support for Node 14.
+ */
+declare class Aborter {
+  aborted: boolean;
+  abort(): void;
+}
+//#endregion
+//#region src/api/queue.d.ts
+type OnQueueEmptyCallback = (error: Error | null, output: WalkerState) => void;
+/**
+ * This is a custom stateless queue to track concurrent async fs calls.
+ * It increments a counter whenever a call is queued and decrements it
+ * as soon as it completes. When the counter hits 0, it calls onQueueEmpty.
+ */
+declare class Queue {
+  private onQueueEmpty?;
+  count: number;
+  constructor(onQueueEmpty?: OnQueueEmptyCallback | undefined);
+  enqueue(): number;
+  dequeue(error: Error | null, output: WalkerState): void;
+}
+//#endregion
+//#region src/types.d.ts
+type Counts = {
+  files: number;
+  directories: number;
+  /**
+   * @deprecated use `directories` instead. Will be removed in v7.0.
+   */
+  dirs: number;
+};
+type Group = {
+  directory: string;
+  files: string[];
+  /**
+   * @deprecated use `directory` instead. Will be removed in v7.0.
+   */
+  dir: string;
+};
+type GroupOutput = Group[];
+type OnlyCountsOutput = Counts;
+type PathsOutput = string[];
+type Output = OnlyCountsOutput | PathsOutput | GroupOutput;
+type FSLike = {
+  readdir: typeof nativeFs.readdir;
+  readdirSync: typeof nativeFs.readdirSync;
+  realpath: typeof nativeFs.realpath;
+  realpathSync: typeof nativeFs.realpathSync;
+  stat: typeof nativeFs.stat;
+  statSync: typeof nativeFs.statSync;
+};
+type WalkerState = {
+  root: string;
+  paths: string[];
+  groups: Group[];
+  counts: Counts;
+  options: Options;
+  queue: Queue;
+  controller: Aborter;
+  fs: FSLike;
+  symlinks: Map;
+  visited: string[];
+};
+type ResultCallback = (error: Error | null, output: TOutput) => void;
+type FilterPredicate = (path: string, isDirectory: boolean) => boolean;
+type ExcludePredicate = (dirName: string, dirPath: string) => boolean;
+type PathSeparator = "/" | "\\";
+type Options = {
+  includeBasePath?: boolean;
+  includeDirs?: boolean;
+  normalizePath?: boolean;
+  maxDepth: number;
+  maxFiles?: number;
+  resolvePaths?: boolean;
+  suppressErrors: boolean;
+  group?: boolean;
+  onlyCounts?: boolean;
+  filters: FilterPredicate[];
+  resolveSymlinks?: boolean;
+  useRealPaths?: boolean;
+  excludeFiles?: boolean;
+  excludeSymlinks?: boolean;
+  exclude?: ExcludePredicate;
+  relativePaths?: boolean;
+  pathSeparator: PathSeparator;
+  signal?: AbortSignal;
+  globFunction?: TGlobFunction;
+  fs?: FSLike;
+};
+type GlobMatcher = (test: string) => boolean;
+type GlobFunction = (glob: string | string[], ...params: unknown[]) => GlobMatcher;
+type GlobParams = T extends ((globs: string | string[], ...params: infer TParams extends unknown[]) => GlobMatcher) ? TParams : [];
+//#endregion
+//#region src/builder/api-builder.d.ts
+declare class APIBuilder {
+  private readonly root;
+  private readonly options;
+  constructor(root: string, options: Options);
+  withPromise(): Promise;
+  withCallback(cb: ResultCallback): void;
+  sync(): TReturnType;
+}
+//#endregion
+//#region src/builder/index.d.ts
+declare class Builder {
+  private readonly globCache;
+  private options;
+  private globFunction?;
+  constructor(options?: Partial>);
+  group(): Builder;
+  withPathSeparator(separator: "/" | "\\"): this;
+  withBasePath(): this;
+  withRelativePaths(): this;
+  withDirs(): this;
+  withMaxDepth(depth: number): this;
+  withMaxFiles(limit: number): this;
+  withFullPaths(): this;
+  withErrors(): this;
+  withSymlinks({
+    resolvePaths
+  }?: {
+    resolvePaths?: boolean | undefined;
+  }): this;
+  withAbortSignal(signal: AbortSignal): this;
+  normalize(): this;
+  filter(predicate: FilterPredicate): this;
+  onlyDirs(): this;
+  exclude(predicate: ExcludePredicate): this;
+  onlyCounts(): Builder;
+  crawl(root?: string): APIBuilder;
+  withGlobFunction(fn: TFunc): Builder;
+  /**
+   * @deprecated Pass options using the constructor instead:
+   * ```ts
+   * new fdir(options).crawl("/path/to/root");
+   * ```
+   * This method will be removed in v7.0
+   */
+  crawlWithOptions(root: string, options: Partial>): APIBuilder;
+  glob(...patterns: string[]): Builder;
+  globWithOptions(patterns: string[]): Builder;
+  globWithOptions(patterns: string[], ...options: GlobParams): Builder;
+}
+//#endregion
+//#region src/index.d.ts
+type Fdir = typeof Builder;
+//#endregion
+export { Counts, ExcludePredicate, FSLike, Fdir, FilterPredicate, GlobFunction, GlobMatcher, GlobParams, Group, GroupOutput, OnlyCountsOutput, Options, Output, PathSeparator, PathsOutput, ResultCallback, WalkerState, Builder as fdir };
\ No newline at end of file
diff --git a/node_modules/fdir/dist/index.d.mts b/node_modules/fdir/dist/index.d.mts
new file mode 100644
index 0000000000000..f448ef5d9b563
--- /dev/null
+++ b/node_modules/fdir/dist/index.d.mts
@@ -0,0 +1,155 @@
+/// 
+import * as nativeFs from "fs";
+import picomatch from "picomatch";
+
+//#region src/api/aborter.d.ts
+/**
+ * AbortController is not supported on Node 14 so we use this until we can drop
+ * support for Node 14.
+ */
+declare class Aborter {
+  aborted: boolean;
+  abort(): void;
+}
+//#endregion
+//#region src/api/queue.d.ts
+type OnQueueEmptyCallback = (error: Error | null, output: WalkerState) => void;
+/**
+ * This is a custom stateless queue to track concurrent async fs calls.
+ * It increments a counter whenever a call is queued and decrements it
+ * as soon as it completes. When the counter hits 0, it calls onQueueEmpty.
+ */
+declare class Queue {
+  private onQueueEmpty?;
+  count: number;
+  constructor(onQueueEmpty?: OnQueueEmptyCallback | undefined);
+  enqueue(): number;
+  dequeue(error: Error | null, output: WalkerState): void;
+}
+//#endregion
+//#region src/types.d.ts
+type Counts = {
+  files: number;
+  directories: number;
+  /**
+   * @deprecated use `directories` instead. Will be removed in v7.0.
+   */
+  dirs: number;
+};
+type Group = {
+  directory: string;
+  files: string[];
+  /**
+   * @deprecated use `directory` instead. Will be removed in v7.0.
+   */
+  dir: string;
+};
+type GroupOutput = Group[];
+type OnlyCountsOutput = Counts;
+type PathsOutput = string[];
+type Output = OnlyCountsOutput | PathsOutput | GroupOutput;
+type FSLike = {
+  readdir: typeof nativeFs.readdir;
+  readdirSync: typeof nativeFs.readdirSync;
+  realpath: typeof nativeFs.realpath;
+  realpathSync: typeof nativeFs.realpathSync;
+  stat: typeof nativeFs.stat;
+  statSync: typeof nativeFs.statSync;
+};
+type WalkerState = {
+  root: string;
+  paths: string[];
+  groups: Group[];
+  counts: Counts;
+  options: Options;
+  queue: Queue;
+  controller: Aborter;
+  fs: FSLike;
+  symlinks: Map;
+  visited: string[];
+};
+type ResultCallback = (error: Error | null, output: TOutput) => void;
+type FilterPredicate = (path: string, isDirectory: boolean) => boolean;
+type ExcludePredicate = (dirName: string, dirPath: string) => boolean;
+type PathSeparator = "/" | "\\";
+type Options = {
+  includeBasePath?: boolean;
+  includeDirs?: boolean;
+  normalizePath?: boolean;
+  maxDepth: number;
+  maxFiles?: number;
+  resolvePaths?: boolean;
+  suppressErrors: boolean;
+  group?: boolean;
+  onlyCounts?: boolean;
+  filters: FilterPredicate[];
+  resolveSymlinks?: boolean;
+  useRealPaths?: boolean;
+  excludeFiles?: boolean;
+  excludeSymlinks?: boolean;
+  exclude?: ExcludePredicate;
+  relativePaths?: boolean;
+  pathSeparator: PathSeparator;
+  signal?: AbortSignal;
+  globFunction?: TGlobFunction;
+  fs?: FSLike;
+};
+type GlobMatcher = (test: string) => boolean;
+type GlobFunction = (glob: string | string[], ...params: unknown[]) => GlobMatcher;
+type GlobParams = T extends ((globs: string | string[], ...params: infer TParams extends unknown[]) => GlobMatcher) ? TParams : [];
+//#endregion
+//#region src/builder/api-builder.d.ts
+declare class APIBuilder {
+  private readonly root;
+  private readonly options;
+  constructor(root: string, options: Options);
+  withPromise(): Promise;
+  withCallback(cb: ResultCallback): void;
+  sync(): TReturnType;
+}
+//#endregion
+//#region src/builder/index.d.ts
+declare class Builder {
+  private readonly globCache;
+  private options;
+  private globFunction?;
+  constructor(options?: Partial>);
+  group(): Builder;
+  withPathSeparator(separator: "/" | "\\"): this;
+  withBasePath(): this;
+  withRelativePaths(): this;
+  withDirs(): this;
+  withMaxDepth(depth: number): this;
+  withMaxFiles(limit: number): this;
+  withFullPaths(): this;
+  withErrors(): this;
+  withSymlinks({
+    resolvePaths
+  }?: {
+    resolvePaths?: boolean | undefined;
+  }): this;
+  withAbortSignal(signal: AbortSignal): this;
+  normalize(): this;
+  filter(predicate: FilterPredicate): this;
+  onlyDirs(): this;
+  exclude(predicate: ExcludePredicate): this;
+  onlyCounts(): Builder;
+  crawl(root?: string): APIBuilder;
+  withGlobFunction(fn: TFunc): Builder;
+  /**
+   * @deprecated Pass options using the constructor instead:
+   * ```ts
+   * new fdir(options).crawl("/path/to/root");
+   * ```
+   * This method will be removed in v7.0
+   */
+  crawlWithOptions(root: string, options: Partial>): APIBuilder;
+  glob(...patterns: string[]): Builder;
+  globWithOptions(patterns: string[]): Builder;
+  globWithOptions(patterns: string[], ...options: GlobParams): Builder;
+}
+//#endregion
+//#region src/index.d.ts
+type Fdir = typeof Builder;
+//#endregion
+export { Counts, ExcludePredicate, FSLike, Fdir, FilterPredicate, GlobFunction, GlobMatcher, GlobParams, Group, GroupOutput, OnlyCountsOutput, Options, Output, PathSeparator, PathsOutput, ResultCallback, WalkerState, Builder as fdir };
\ No newline at end of file
diff --git a/node_modules/fdir/dist/index.mjs b/node_modules/fdir/dist/index.mjs
new file mode 100644
index 0000000000000..5c37e092b507d
--- /dev/null
+++ b/node_modules/fdir/dist/index.mjs
@@ -0,0 +1,570 @@
+import { createRequire } from "module";
+import { basename, dirname, normalize, relative, resolve, sep } from "path";
+import * as nativeFs from "fs";
+
+//#region rolldown:runtime
+var __require = /* @__PURE__ */ createRequire(import.meta.url);
+
+//#endregion
+//#region src/utils.ts
+function cleanPath(path) {
+	let normalized = normalize(path);
+	if (normalized.length > 1 && normalized[normalized.length - 1] === sep) normalized = normalized.substring(0, normalized.length - 1);
+	return normalized;
+}
+const SLASHES_REGEX = /[\\/]/g;
+function convertSlashes(path, separator) {
+	return path.replace(SLASHES_REGEX, separator);
+}
+const WINDOWS_ROOT_DIR_REGEX = /^[a-z]:[\\/]$/i;
+function isRootDirectory(path) {
+	return path === "/" || WINDOWS_ROOT_DIR_REGEX.test(path);
+}
+function normalizePath(path, options) {
+	const { resolvePaths, normalizePath: normalizePath$1, pathSeparator } = options;
+	const pathNeedsCleaning = process.platform === "win32" && path.includes("/") || path.startsWith(".");
+	if (resolvePaths) path = resolve(path);
+	if (normalizePath$1 || pathNeedsCleaning) path = cleanPath(path);
+	if (path === ".") return "";
+	const needsSeperator = path[path.length - 1] !== pathSeparator;
+	return convertSlashes(needsSeperator ? path + pathSeparator : path, pathSeparator);
+}
+
+//#endregion
+//#region src/api/functions/join-path.ts
+function joinPathWithBasePath(filename, directoryPath) {
+	return directoryPath + filename;
+}
+function joinPathWithRelativePath(root, options) {
+	return function(filename, directoryPath) {
+		const sameRoot = directoryPath.startsWith(root);
+		if (sameRoot) return directoryPath.slice(root.length) + filename;
+		else return convertSlashes(relative(root, directoryPath), options.pathSeparator) + options.pathSeparator + filename;
+	};
+}
+function joinPath(filename) {
+	return filename;
+}
+function joinDirectoryPath(filename, directoryPath, separator) {
+	return directoryPath + filename + separator;
+}
+function build$7(root, options) {
+	const { relativePaths, includeBasePath } = options;
+	return relativePaths && root ? joinPathWithRelativePath(root, options) : includeBasePath ? joinPathWithBasePath : joinPath;
+}
+
+//#endregion
+//#region src/api/functions/push-directory.ts
+function pushDirectoryWithRelativePath(root) {
+	return function(directoryPath, paths) {
+		paths.push(directoryPath.substring(root.length) || ".");
+	};
+}
+function pushDirectoryFilterWithRelativePath(root) {
+	return function(directoryPath, paths, filters) {
+		const relativePath = directoryPath.substring(root.length) || ".";
+		if (filters.every((filter) => filter(relativePath, true))) paths.push(relativePath);
+	};
+}
+const pushDirectory = (directoryPath, paths) => {
+	paths.push(directoryPath || ".");
+};
+const pushDirectoryFilter = (directoryPath, paths, filters) => {
+	const path = directoryPath || ".";
+	if (filters.every((filter) => filter(path, true))) paths.push(path);
+};
+const empty$2 = () => {};
+function build$6(root, options) {
+	const { includeDirs, filters, relativePaths } = options;
+	if (!includeDirs) return empty$2;
+	if (relativePaths) return filters && filters.length ? pushDirectoryFilterWithRelativePath(root) : pushDirectoryWithRelativePath(root);
+	return filters && filters.length ? pushDirectoryFilter : pushDirectory;
+}
+
+//#endregion
+//#region src/api/functions/push-file.ts
+const pushFileFilterAndCount = (filename, _paths, counts, filters) => {
+	if (filters.every((filter) => filter(filename, false))) counts.files++;
+};
+const pushFileFilter = (filename, paths, _counts, filters) => {
+	if (filters.every((filter) => filter(filename, false))) paths.push(filename);
+};
+const pushFileCount = (_filename, _paths, counts, _filters) => {
+	counts.files++;
+};
+const pushFile = (filename, paths) => {
+	paths.push(filename);
+};
+const empty$1 = () => {};
+function build$5(options) {
+	const { excludeFiles, filters, onlyCounts } = options;
+	if (excludeFiles) return empty$1;
+	if (filters && filters.length) return onlyCounts ? pushFileFilterAndCount : pushFileFilter;
+	else if (onlyCounts) return pushFileCount;
+	else return pushFile;
+}
+
+//#endregion
+//#region src/api/functions/get-array.ts
+const getArray = (paths) => {
+	return paths;
+};
+const getArrayGroup = () => {
+	return [""].slice(0, 0);
+};
+function build$4(options) {
+	return options.group ? getArrayGroup : getArray;
+}
+
+//#endregion
+//#region src/api/functions/group-files.ts
+const groupFiles = (groups, directory, files) => {
+	groups.push({
+		directory,
+		files,
+		dir: directory
+	});
+};
+const empty = () => {};
+function build$3(options) {
+	return options.group ? groupFiles : empty;
+}
+
+//#endregion
+//#region src/api/functions/resolve-symlink.ts
+const resolveSymlinksAsync = function(path, state, callback$1) {
+	const { queue, fs, options: { suppressErrors } } = state;
+	queue.enqueue();
+	fs.realpath(path, (error, resolvedPath) => {
+		if (error) return queue.dequeue(suppressErrors ? null : error, state);
+		fs.stat(resolvedPath, (error$1, stat) => {
+			if (error$1) return queue.dequeue(suppressErrors ? null : error$1, state);
+			if (stat.isDirectory() && isRecursive(path, resolvedPath, state)) return queue.dequeue(null, state);
+			callback$1(stat, resolvedPath);
+			queue.dequeue(null, state);
+		});
+	});
+};
+const resolveSymlinks = function(path, state, callback$1) {
+	const { queue, fs, options: { suppressErrors } } = state;
+	queue.enqueue();
+	try {
+		const resolvedPath = fs.realpathSync(path);
+		const stat = fs.statSync(resolvedPath);
+		if (stat.isDirectory() && isRecursive(path, resolvedPath, state)) return;
+		callback$1(stat, resolvedPath);
+	} catch (e) {
+		if (!suppressErrors) throw e;
+	}
+};
+function build$2(options, isSynchronous) {
+	if (!options.resolveSymlinks || options.excludeSymlinks) return null;
+	return isSynchronous ? resolveSymlinks : resolveSymlinksAsync;
+}
+function isRecursive(path, resolved, state) {
+	if (state.options.useRealPaths) return isRecursiveUsingRealPaths(resolved, state);
+	let parent = dirname(path);
+	let depth = 1;
+	while (parent !== state.root && depth < 2) {
+		const resolvedPath = state.symlinks.get(parent);
+		const isSameRoot = !!resolvedPath && (resolvedPath === resolved || resolvedPath.startsWith(resolved) || resolved.startsWith(resolvedPath));
+		if (isSameRoot) depth++;
+		else parent = dirname(parent);
+	}
+	state.symlinks.set(path, resolved);
+	return depth > 1;
+}
+function isRecursiveUsingRealPaths(resolved, state) {
+	return state.visited.includes(resolved + state.options.pathSeparator);
+}
+
+//#endregion
+//#region src/api/functions/invoke-callback.ts
+const onlyCountsSync = (state) => {
+	return state.counts;
+};
+const groupsSync = (state) => {
+	return state.groups;
+};
+const defaultSync = (state) => {
+	return state.paths;
+};
+const limitFilesSync = (state) => {
+	return state.paths.slice(0, state.options.maxFiles);
+};
+const onlyCountsAsync = (state, error, callback$1) => {
+	report(error, callback$1, state.counts, state.options.suppressErrors);
+	return null;
+};
+const defaultAsync = (state, error, callback$1) => {
+	report(error, callback$1, state.paths, state.options.suppressErrors);
+	return null;
+};
+const limitFilesAsync = (state, error, callback$1) => {
+	report(error, callback$1, state.paths.slice(0, state.options.maxFiles), state.options.suppressErrors);
+	return null;
+};
+const groupsAsync = (state, error, callback$1) => {
+	report(error, callback$1, state.groups, state.options.suppressErrors);
+	return null;
+};
+function report(error, callback$1, output, suppressErrors) {
+	if (error && !suppressErrors) callback$1(error, output);
+	else callback$1(null, output);
+}
+function build$1(options, isSynchronous) {
+	const { onlyCounts, group, maxFiles } = options;
+	if (onlyCounts) return isSynchronous ? onlyCountsSync : onlyCountsAsync;
+	else if (group) return isSynchronous ? groupsSync : groupsAsync;
+	else if (maxFiles) return isSynchronous ? limitFilesSync : limitFilesAsync;
+	else return isSynchronous ? defaultSync : defaultAsync;
+}
+
+//#endregion
+//#region src/api/functions/walk-directory.ts
+const readdirOpts = { withFileTypes: true };
+const walkAsync = (state, crawlPath, directoryPath, currentDepth, callback$1) => {
+	state.queue.enqueue();
+	if (currentDepth < 0) return state.queue.dequeue(null, state);
+	const { fs } = state;
+	state.visited.push(crawlPath);
+	state.counts.directories++;
+	fs.readdir(crawlPath || ".", readdirOpts, (error, entries = []) => {
+		callback$1(entries, directoryPath, currentDepth);
+		state.queue.dequeue(state.options.suppressErrors ? null : error, state);
+	});
+};
+const walkSync = (state, crawlPath, directoryPath, currentDepth, callback$1) => {
+	const { fs } = state;
+	if (currentDepth < 0) return;
+	state.visited.push(crawlPath);
+	state.counts.directories++;
+	let entries = [];
+	try {
+		entries = fs.readdirSync(crawlPath || ".", readdirOpts);
+	} catch (e) {
+		if (!state.options.suppressErrors) throw e;
+	}
+	callback$1(entries, directoryPath, currentDepth);
+};
+function build(isSynchronous) {
+	return isSynchronous ? walkSync : walkAsync;
+}
+
+//#endregion
+//#region src/api/queue.ts
+/**
+* This is a custom stateless queue to track concurrent async fs calls.
+* It increments a counter whenever a call is queued and decrements it
+* as soon as it completes. When the counter hits 0, it calls onQueueEmpty.
+*/
+var Queue = class {
+	count = 0;
+	constructor(onQueueEmpty) {
+		this.onQueueEmpty = onQueueEmpty;
+	}
+	enqueue() {
+		this.count++;
+		return this.count;
+	}
+	dequeue(error, output) {
+		if (this.onQueueEmpty && (--this.count <= 0 || error)) {
+			this.onQueueEmpty(error, output);
+			if (error) {
+				output.controller.abort();
+				this.onQueueEmpty = void 0;
+			}
+		}
+	}
+};
+
+//#endregion
+//#region src/api/counter.ts
+var Counter = class {
+	_files = 0;
+	_directories = 0;
+	set files(num) {
+		this._files = num;
+	}
+	get files() {
+		return this._files;
+	}
+	set directories(num) {
+		this._directories = num;
+	}
+	get directories() {
+		return this._directories;
+	}
+	/**
+	* @deprecated use `directories` instead
+	*/
+	/* c8 ignore next 3 */
+	get dirs() {
+		return this._directories;
+	}
+};
+
+//#endregion
+//#region src/api/aborter.ts
+/**
+* AbortController is not supported on Node 14 so we use this until we can drop
+* support for Node 14.
+*/
+var Aborter = class {
+	aborted = false;
+	abort() {
+		this.aborted = true;
+	}
+};
+
+//#endregion
+//#region src/api/walker.ts
+var Walker = class {
+	root;
+	isSynchronous;
+	state;
+	joinPath;
+	pushDirectory;
+	pushFile;
+	getArray;
+	groupFiles;
+	resolveSymlink;
+	walkDirectory;
+	callbackInvoker;
+	constructor(root, options, callback$1) {
+		this.isSynchronous = !callback$1;
+		this.callbackInvoker = build$1(options, this.isSynchronous);
+		this.root = normalizePath(root, options);
+		this.state = {
+			root: isRootDirectory(this.root) ? this.root : this.root.slice(0, -1),
+			paths: [""].slice(0, 0),
+			groups: [],
+			counts: new Counter(),
+			options,
+			queue: new Queue((error, state) => this.callbackInvoker(state, error, callback$1)),
+			symlinks: /* @__PURE__ */ new Map(),
+			visited: [""].slice(0, 0),
+			controller: new Aborter(),
+			fs: options.fs || nativeFs
+		};
+		this.joinPath = build$7(this.root, options);
+		this.pushDirectory = build$6(this.root, options);
+		this.pushFile = build$5(options);
+		this.getArray = build$4(options);
+		this.groupFiles = build$3(options);
+		this.resolveSymlink = build$2(options, this.isSynchronous);
+		this.walkDirectory = build(this.isSynchronous);
+	}
+	start() {
+		this.pushDirectory(this.root, this.state.paths, this.state.options.filters);
+		this.walkDirectory(this.state, this.root, this.root, this.state.options.maxDepth, this.walk);
+		return this.isSynchronous ? this.callbackInvoker(this.state, null) : null;
+	}
+	walk = (entries, directoryPath, depth) => {
+		const { paths, options: { filters, resolveSymlinks: resolveSymlinks$1, excludeSymlinks, exclude, maxFiles, signal, useRealPaths, pathSeparator }, controller } = this.state;
+		if (controller.aborted || signal && signal.aborted || maxFiles && paths.length > maxFiles) return;
+		const files = this.getArray(this.state.paths);
+		for (let i = 0; i < entries.length; ++i) {
+			const entry = entries[i];
+			if (entry.isFile() || entry.isSymbolicLink() && !resolveSymlinks$1 && !excludeSymlinks) {
+				const filename = this.joinPath(entry.name, directoryPath);
+				this.pushFile(filename, files, this.state.counts, filters);
+			} else if (entry.isDirectory()) {
+				let path = joinDirectoryPath(entry.name, directoryPath, this.state.options.pathSeparator);
+				if (exclude && exclude(entry.name, path)) continue;
+				this.pushDirectory(path, paths, filters);
+				this.walkDirectory(this.state, path, path, depth - 1, this.walk);
+			} else if (this.resolveSymlink && entry.isSymbolicLink()) {
+				let path = joinPathWithBasePath(entry.name, directoryPath);
+				this.resolveSymlink(path, this.state, (stat, resolvedPath) => {
+					if (stat.isDirectory()) {
+						resolvedPath = normalizePath(resolvedPath, this.state.options);
+						if (exclude && exclude(entry.name, useRealPaths ? resolvedPath : path + pathSeparator)) return;
+						this.walkDirectory(this.state, resolvedPath, useRealPaths ? resolvedPath : path + pathSeparator, depth - 1, this.walk);
+					} else {
+						resolvedPath = useRealPaths ? resolvedPath : path;
+						const filename = basename(resolvedPath);
+						const directoryPath$1 = normalizePath(dirname(resolvedPath), this.state.options);
+						resolvedPath = this.joinPath(filename, directoryPath$1);
+						this.pushFile(resolvedPath, files, this.state.counts, filters);
+					}
+				});
+			}
+		}
+		this.groupFiles(this.state.groups, directoryPath, files);
+	};
+};
+
+//#endregion
+//#region src/api/async.ts
+function promise(root, options) {
+	return new Promise((resolve$1, reject) => {
+		callback(root, options, (err, output) => {
+			if (err) return reject(err);
+			resolve$1(output);
+		});
+	});
+}
+function callback(root, options, callback$1) {
+	let walker = new Walker(root, options, callback$1);
+	walker.start();
+}
+
+//#endregion
+//#region src/api/sync.ts
+function sync(root, options) {
+	const walker = new Walker(root, options);
+	return walker.start();
+}
+
+//#endregion
+//#region src/builder/api-builder.ts
+var APIBuilder = class {
+	constructor(root, options) {
+		this.root = root;
+		this.options = options;
+	}
+	withPromise() {
+		return promise(this.root, this.options);
+	}
+	withCallback(cb) {
+		callback(this.root, this.options, cb);
+	}
+	sync() {
+		return sync(this.root, this.options);
+	}
+};
+
+//#endregion
+//#region src/builder/index.ts
+let pm = null;
+/* c8 ignore next 6 */
+try {
+	__require.resolve("picomatch");
+	pm = __require("picomatch");
+} catch {}
+var Builder = class {
+	globCache = {};
+	options = {
+		maxDepth: Infinity,
+		suppressErrors: true,
+		pathSeparator: sep,
+		filters: []
+	};
+	globFunction;
+	constructor(options) {
+		this.options = {
+			...this.options,
+			...options
+		};
+		this.globFunction = this.options.globFunction;
+	}
+	group() {
+		this.options.group = true;
+		return this;
+	}
+	withPathSeparator(separator) {
+		this.options.pathSeparator = separator;
+		return this;
+	}
+	withBasePath() {
+		this.options.includeBasePath = true;
+		return this;
+	}
+	withRelativePaths() {
+		this.options.relativePaths = true;
+		return this;
+	}
+	withDirs() {
+		this.options.includeDirs = true;
+		return this;
+	}
+	withMaxDepth(depth) {
+		this.options.maxDepth = depth;
+		return this;
+	}
+	withMaxFiles(limit) {
+		this.options.maxFiles = limit;
+		return this;
+	}
+	withFullPaths() {
+		this.options.resolvePaths = true;
+		this.options.includeBasePath = true;
+		return this;
+	}
+	withErrors() {
+		this.options.suppressErrors = false;
+		return this;
+	}
+	withSymlinks({ resolvePaths = true } = {}) {
+		this.options.resolveSymlinks = true;
+		this.options.useRealPaths = resolvePaths;
+		return this.withFullPaths();
+	}
+	withAbortSignal(signal) {
+		this.options.signal = signal;
+		return this;
+	}
+	normalize() {
+		this.options.normalizePath = true;
+		return this;
+	}
+	filter(predicate) {
+		this.options.filters.push(predicate);
+		return this;
+	}
+	onlyDirs() {
+		this.options.excludeFiles = true;
+		this.options.includeDirs = true;
+		return this;
+	}
+	exclude(predicate) {
+		this.options.exclude = predicate;
+		return this;
+	}
+	onlyCounts() {
+		this.options.onlyCounts = true;
+		return this;
+	}
+	crawl(root) {
+		return new APIBuilder(root || ".", this.options);
+	}
+	withGlobFunction(fn) {
+		this.globFunction = fn;
+		return this;
+	}
+	/**
+	* @deprecated Pass options using the constructor instead:
+	* ```ts
+	* new fdir(options).crawl("/path/to/root");
+	* ```
+	* This method will be removed in v7.0
+	*/
+	/* c8 ignore next 4 */
+	crawlWithOptions(root, options) {
+		this.options = {
+			...this.options,
+			...options
+		};
+		return new APIBuilder(root || ".", this.options);
+	}
+	glob(...patterns) {
+		if (this.globFunction) return this.globWithOptions(patterns);
+		return this.globWithOptions(patterns, ...[{ dot: true }]);
+	}
+	globWithOptions(patterns, ...options) {
+		const globFn = this.globFunction || pm;
+		/* c8 ignore next 5 */
+		if (!globFn) throw new Error("Please specify a glob function to use glob matching.");
+		var isMatch = this.globCache[patterns.join("\0")];
+		if (!isMatch) {
+			isMatch = globFn(patterns, ...options);
+			this.globCache[patterns.join("\0")] = isMatch;
+		}
+		this.options.filters.push((path) => isMatch(path));
+		return this;
+	}
+};
+
+//#endregion
+export { Builder as fdir };
\ No newline at end of file
diff --git a/node_modules/fdir/package.json b/node_modules/fdir/package.json
new file mode 100644
index 0000000000000..e229dff815080
--- /dev/null
+++ b/node_modules/fdir/package.json
@@ -0,0 +1,103 @@
+{
+  "name": "fdir",
+  "version": "6.5.0",
+  "description": "The fastest directory crawler & globbing alternative to glob, fast-glob, & tiny-glob. Crawls 1m files in < 1s",
+  "main": "./dist/index.cjs",
+  "types": "./dist/index.d.cts",
+  "type": "module",
+  "scripts": {
+    "prepublishOnly": "npm run test && npm run build",
+    "build": "tsdown",
+    "format": "prettier --write src __tests__ benchmarks",
+    "test": "vitest run __tests__/",
+    "test:coverage": "vitest run --coverage __tests__/",
+    "test:watch": "vitest __tests__/",
+    "bench": "ts-node benchmarks/benchmark.js",
+    "bench:glob": "ts-node benchmarks/glob-benchmark.ts",
+    "bench:fdir": "ts-node benchmarks/fdir-benchmark.ts",
+    "release": "./scripts/release.sh"
+  },
+  "engines": {
+    "node": ">=12.0.0"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/thecodrr/fdir.git"
+  },
+  "keywords": [
+    "util",
+    "os",
+    "sys",
+    "fs",
+    "walk",
+    "crawler",
+    "directory",
+    "files",
+    "io",
+    "tiny-glob",
+    "glob",
+    "fast-glob",
+    "speed",
+    "javascript",
+    "nodejs"
+  ],
+  "author": "thecodrr ",
+  "license": "MIT",
+  "bugs": {
+    "url": "https://github.com/thecodrr/fdir/issues"
+  },
+  "homepage": "https://github.com/thecodrr/fdir#readme",
+  "devDependencies": {
+    "@types/glob": "^8.1.0",
+    "@types/mock-fs": "^4.13.4",
+    "@types/node": "^20.9.4",
+    "@types/picomatch": "^4.0.0",
+    "@types/tap": "^15.0.11",
+    "@vitest/coverage-v8": "^0.34.6",
+    "all-files-in-tree": "^1.1.2",
+    "benny": "^3.7.1",
+    "csv-to-markdown-table": "^1.3.1",
+    "expect": "^29.7.0",
+    "fast-glob": "^3.3.2",
+    "fdir1": "npm:fdir@1.2.0",
+    "fdir2": "npm:fdir@2.1.0",
+    "fdir3": "npm:fdir@3.4.2",
+    "fdir4": "npm:fdir@4.1.0",
+    "fdir5": "npm:fdir@5.0.0",
+    "fs-readdir-recursive": "^1.1.0",
+    "get-all-files": "^4.1.0",
+    "glob": "^10.3.10",
+    "klaw-sync": "^6.0.0",
+    "mock-fs": "^5.2.0",
+    "picomatch": "^4.0.2",
+    "prettier": "^3.5.3",
+    "recur-readdir": "0.0.1",
+    "recursive-files": "^1.0.2",
+    "recursive-fs": "^2.1.0",
+    "recursive-readdir": "^2.2.3",
+    "rrdir": "^12.1.0",
+    "systeminformation": "^5.21.17",
+    "tiny-glob": "^0.2.9",
+    "ts-node": "^10.9.1",
+    "tsdown": "^0.12.5",
+    "typescript": "^5.3.2",
+    "vitest": "^0.34.6",
+    "walk-sync": "^3.0.0"
+  },
+  "peerDependencies": {
+    "picomatch": "^3 || ^4"
+  },
+  "peerDependenciesMeta": {
+    "picomatch": {
+      "optional": true
+    }
+  },
+  "module": "./dist/index.mjs",
+  "exports": {
+    ".": {
+      "import": "./dist/index.mjs",
+      "require": "./dist/index.cjs"
+    },
+    "./package.json": "./package.json"
+  }
+}
diff --git a/node_modules/picomatch/LICENSE b/node_modules/picomatch/LICENSE
new file mode 100644
index 0000000000000..3608dca25e30b
--- /dev/null
+++ b/node_modules/picomatch/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2017-present, Jon Schlinkert.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/node_modules/picomatch/index.js b/node_modules/picomatch/index.js
new file mode 100644
index 0000000000000..a753b1d9e843c
--- /dev/null
+++ b/node_modules/picomatch/index.js
@@ -0,0 +1,17 @@
+'use strict';
+
+const pico = require('./lib/picomatch');
+const utils = require('./lib/utils');
+
+function picomatch(glob, options, returnState = false) {
+  // default to os.platform()
+  if (options && (options.windows === null || options.windows === undefined)) {
+    // don't mutate the original options object
+    options = { ...options, windows: utils.isWindows() };
+  }
+
+  return pico(glob, options, returnState);
+}
+
+Object.assign(picomatch, pico);
+module.exports = picomatch;
diff --git a/node_modules/picomatch/lib/constants.js b/node_modules/picomatch/lib/constants.js
new file mode 100644
index 0000000000000..3f7ef7e53adaf
--- /dev/null
+++ b/node_modules/picomatch/lib/constants.js
@@ -0,0 +1,180 @@
+'use strict';
+
+const WIN_SLASH = '\\\\/';
+const WIN_NO_SLASH = `[^${WIN_SLASH}]`;
+
+/**
+ * Posix glob regex
+ */
+
+const DOT_LITERAL = '\\.';
+const PLUS_LITERAL = '\\+';
+const QMARK_LITERAL = '\\?';
+const SLASH_LITERAL = '\\/';
+const ONE_CHAR = '(?=.)';
+const QMARK = '[^/]';
+const END_ANCHOR = `(?:${SLASH_LITERAL}|$)`;
+const START_ANCHOR = `(?:^|${SLASH_LITERAL})`;
+const DOTS_SLASH = `${DOT_LITERAL}{1,2}${END_ANCHOR}`;
+const NO_DOT = `(?!${DOT_LITERAL})`;
+const NO_DOTS = `(?!${START_ANCHOR}${DOTS_SLASH})`;
+const NO_DOT_SLASH = `(?!${DOT_LITERAL}{0,1}${END_ANCHOR})`;
+const NO_DOTS_SLASH = `(?!${DOTS_SLASH})`;
+const QMARK_NO_DOT = `[^.${SLASH_LITERAL}]`;
+const STAR = `${QMARK}*?`;
+const SEP = '/';
+
+const POSIX_CHARS = {
+  DOT_LITERAL,
+  PLUS_LITERAL,
+  QMARK_LITERAL,
+  SLASH_LITERAL,
+  ONE_CHAR,
+  QMARK,
+  END_ANCHOR,
+  DOTS_SLASH,
+  NO_DOT,
+  NO_DOTS,
+  NO_DOT_SLASH,
+  NO_DOTS_SLASH,
+  QMARK_NO_DOT,
+  STAR,
+  START_ANCHOR,
+  SEP
+};
+
+/**
+ * Windows glob regex
+ */
+
+const WINDOWS_CHARS = {
+  ...POSIX_CHARS,
+
+  SLASH_LITERAL: `[${WIN_SLASH}]`,
+  QMARK: WIN_NO_SLASH,
+  STAR: `${WIN_NO_SLASH}*?`,
+  DOTS_SLASH: `${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$)`,
+  NO_DOT: `(?!${DOT_LITERAL})`,
+  NO_DOTS: `(?!(?:^|[${WIN_SLASH}])${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`,
+  NO_DOT_SLASH: `(?!${DOT_LITERAL}{0,1}(?:[${WIN_SLASH}]|$))`,
+  NO_DOTS_SLASH: `(?!${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`,
+  QMARK_NO_DOT: `[^.${WIN_SLASH}]`,
+  START_ANCHOR: `(?:^|[${WIN_SLASH}])`,
+  END_ANCHOR: `(?:[${WIN_SLASH}]|$)`,
+  SEP: '\\'
+};
+
+/**
+ * POSIX Bracket Regex
+ */
+
+const POSIX_REGEX_SOURCE = {
+  alnum: 'a-zA-Z0-9',
+  alpha: 'a-zA-Z',
+  ascii: '\\x00-\\x7F',
+  blank: ' \\t',
+  cntrl: '\\x00-\\x1F\\x7F',
+  digit: '0-9',
+  graph: '\\x21-\\x7E',
+  lower: 'a-z',
+  print: '\\x20-\\x7E ',
+  punct: '\\-!"#$%&\'()\\*+,./:;<=>?@[\\]^_`{|}~',
+  space: ' \\t\\r\\n\\v\\f',
+  upper: 'A-Z',
+  word: 'A-Za-z0-9_',
+  xdigit: 'A-Fa-f0-9'
+};
+
+module.exports = {
+  MAX_LENGTH: 1024 * 64,
+  POSIX_REGEX_SOURCE,
+
+  // regular expressions
+  REGEX_BACKSLASH: /\\(?![*+?^${}(|)[\]])/g,
+  REGEX_NON_SPECIAL_CHARS: /^[^@![\].,$*+?^{}()|\\/]+/,
+  REGEX_SPECIAL_CHARS: /[-*+?.^${}(|)[\]]/,
+  REGEX_SPECIAL_CHARS_BACKREF: /(\\?)((\W)(\3*))/g,
+  REGEX_SPECIAL_CHARS_GLOBAL: /([-*+?.^${}(|)[\]])/g,
+  REGEX_REMOVE_BACKSLASH: /(?:\[.*?[^\\]\]|\\(?=.))/g,
+
+  // Replace globs with equivalent patterns to reduce parsing time.
+  REPLACEMENTS: {
+    __proto__: null,
+    '***': '*',
+    '**/**': '**',
+    '**/**/**': '**'
+  },
+
+  // Digits
+  CHAR_0: 48, /* 0 */
+  CHAR_9: 57, /* 9 */
+
+  // Alphabet chars.
+  CHAR_UPPERCASE_A: 65, /* A */
+  CHAR_LOWERCASE_A: 97, /* a */
+  CHAR_UPPERCASE_Z: 90, /* Z */
+  CHAR_LOWERCASE_Z: 122, /* z */
+
+  CHAR_LEFT_PARENTHESES: 40, /* ( */
+  CHAR_RIGHT_PARENTHESES: 41, /* ) */
+
+  CHAR_ASTERISK: 42, /* * */
+
+  // Non-alphabetic chars.
+  CHAR_AMPERSAND: 38, /* & */
+  CHAR_AT: 64, /* @ */
+  CHAR_BACKWARD_SLASH: 92, /* \ */
+  CHAR_CARRIAGE_RETURN: 13, /* \r */
+  CHAR_CIRCUMFLEX_ACCENT: 94, /* ^ */
+  CHAR_COLON: 58, /* : */
+  CHAR_COMMA: 44, /* , */
+  CHAR_DOT: 46, /* . */
+  CHAR_DOUBLE_QUOTE: 34, /* " */
+  CHAR_EQUAL: 61, /* = */
+  CHAR_EXCLAMATION_MARK: 33, /* ! */
+  CHAR_FORM_FEED: 12, /* \f */
+  CHAR_FORWARD_SLASH: 47, /* / */
+  CHAR_GRAVE_ACCENT: 96, /* ` */
+  CHAR_HASH: 35, /* # */
+  CHAR_HYPHEN_MINUS: 45, /* - */
+  CHAR_LEFT_ANGLE_BRACKET: 60, /* < */
+  CHAR_LEFT_CURLY_BRACE: 123, /* { */
+  CHAR_LEFT_SQUARE_BRACKET: 91, /* [ */
+  CHAR_LINE_FEED: 10, /* \n */
+  CHAR_NO_BREAK_SPACE: 160, /* \u00A0 */
+  CHAR_PERCENT: 37, /* % */
+  CHAR_PLUS: 43, /* + */
+  CHAR_QUESTION_MARK: 63, /* ? */
+  CHAR_RIGHT_ANGLE_BRACKET: 62, /* > */
+  CHAR_RIGHT_CURLY_BRACE: 125, /* } */
+  CHAR_RIGHT_SQUARE_BRACKET: 93, /* ] */
+  CHAR_SEMICOLON: 59, /* ; */
+  CHAR_SINGLE_QUOTE: 39, /* ' */
+  CHAR_SPACE: 32, /*   */
+  CHAR_TAB: 9, /* \t */
+  CHAR_UNDERSCORE: 95, /* _ */
+  CHAR_VERTICAL_LINE: 124, /* | */
+  CHAR_ZERO_WIDTH_NOBREAK_SPACE: 65279, /* \uFEFF */
+
+  /**
+   * Create EXTGLOB_CHARS
+   */
+
+  extglobChars(chars) {
+    return {
+      '!': { type: 'negate', open: '(?:(?!(?:', close: `))${chars.STAR})` },
+      '?': { type: 'qmark', open: '(?:', close: ')?' },
+      '+': { type: 'plus', open: '(?:', close: ')+' },
+      '*': { type: 'star', open: '(?:', close: ')*' },
+      '@': { type: 'at', open: '(?:', close: ')' }
+    };
+  },
+
+  /**
+   * Create GLOB_CHARS
+   */
+
+  globChars(win32) {
+    return win32 === true ? WINDOWS_CHARS : POSIX_CHARS;
+  }
+};
diff --git a/node_modules/picomatch/lib/parse.js b/node_modules/picomatch/lib/parse.js
new file mode 100644
index 0000000000000..8fd8ff499d182
--- /dev/null
+++ b/node_modules/picomatch/lib/parse.js
@@ -0,0 +1,1085 @@
+'use strict';
+
+const constants = require('./constants');
+const utils = require('./utils');
+
+/**
+ * Constants
+ */
+
+const {
+  MAX_LENGTH,
+  POSIX_REGEX_SOURCE,
+  REGEX_NON_SPECIAL_CHARS,
+  REGEX_SPECIAL_CHARS_BACKREF,
+  REPLACEMENTS
+} = constants;
+
+/**
+ * Helpers
+ */
+
+const expandRange = (args, options) => {
+  if (typeof options.expandRange === 'function') {
+    return options.expandRange(...args, options);
+  }
+
+  args.sort();
+  const value = `[${args.join('-')}]`;
+
+  try {
+    /* eslint-disable-next-line no-new */
+    new RegExp(value);
+  } catch (ex) {
+    return args.map(v => utils.escapeRegex(v)).join('..');
+  }
+
+  return value;
+};
+
+/**
+ * Create the message for a syntax error
+ */
+
+const syntaxError = (type, char) => {
+  return `Missing ${type}: "${char}" - use "\\\\${char}" to match literal characters`;
+};
+
+/**
+ * Parse the given input string.
+ * @param {String} input
+ * @param {Object} options
+ * @return {Object}
+ */
+
+const parse = (input, options) => {
+  if (typeof input !== 'string') {
+    throw new TypeError('Expected a string');
+  }
+
+  input = REPLACEMENTS[input] || input;
+
+  const opts = { ...options };
+  const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH;
+
+  let len = input.length;
+  if (len > max) {
+    throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`);
+  }
+
+  const bos = { type: 'bos', value: '', output: opts.prepend || '' };
+  const tokens = [bos];
+
+  const capture = opts.capture ? '' : '?:';
+
+  // create constants based on platform, for windows or posix
+  const PLATFORM_CHARS = constants.globChars(opts.windows);
+  const EXTGLOB_CHARS = constants.extglobChars(PLATFORM_CHARS);
+
+  const {
+    DOT_LITERAL,
+    PLUS_LITERAL,
+    SLASH_LITERAL,
+    ONE_CHAR,
+    DOTS_SLASH,
+    NO_DOT,
+    NO_DOT_SLASH,
+    NO_DOTS_SLASH,
+    QMARK,
+    QMARK_NO_DOT,
+    STAR,
+    START_ANCHOR
+  } = PLATFORM_CHARS;
+
+  const globstar = opts => {
+    return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`;
+  };
+
+  const nodot = opts.dot ? '' : NO_DOT;
+  const qmarkNoDot = opts.dot ? QMARK : QMARK_NO_DOT;
+  let star = opts.bash === true ? globstar(opts) : STAR;
+
+  if (opts.capture) {
+    star = `(${star})`;
+  }
+
+  // minimatch options support
+  if (typeof opts.noext === 'boolean') {
+    opts.noextglob = opts.noext;
+  }
+
+  const state = {
+    input,
+    index: -1,
+    start: 0,
+    dot: opts.dot === true,
+    consumed: '',
+    output: '',
+    prefix: '',
+    backtrack: false,
+    negated: false,
+    brackets: 0,
+    braces: 0,
+    parens: 0,
+    quotes: 0,
+    globstar: false,
+    tokens
+  };
+
+  input = utils.removePrefix(input, state);
+  len = input.length;
+
+  const extglobs = [];
+  const braces = [];
+  const stack = [];
+  let prev = bos;
+  let value;
+
+  /**
+   * Tokenizing helpers
+   */
+
+  const eos = () => state.index === len - 1;
+  const peek = state.peek = (n = 1) => input[state.index + n];
+  const advance = state.advance = () => input[++state.index] || '';
+  const remaining = () => input.slice(state.index + 1);
+  const consume = (value = '', num = 0) => {
+    state.consumed += value;
+    state.index += num;
+  };
+
+  const append = token => {
+    state.output += token.output != null ? token.output : token.value;
+    consume(token.value);
+  };
+
+  const negate = () => {
+    let count = 1;
+
+    while (peek() === '!' && (peek(2) !== '(' || peek(3) === '?')) {
+      advance();
+      state.start++;
+      count++;
+    }
+
+    if (count % 2 === 0) {
+      return false;
+    }
+
+    state.negated = true;
+    state.start++;
+    return true;
+  };
+
+  const increment = type => {
+    state[type]++;
+    stack.push(type);
+  };
+
+  const decrement = type => {
+    state[type]--;
+    stack.pop();
+  };
+
+  /**
+   * Push tokens onto the tokens array. This helper speeds up
+   * tokenizing by 1) helping us avoid backtracking as much as possible,
+   * and 2) helping us avoid creating extra tokens when consecutive
+   * characters are plain text. This improves performance and simplifies
+   * lookbehinds.
+   */
+
+  const push = tok => {
+    if (prev.type === 'globstar') {
+      const isBrace = state.braces > 0 && (tok.type === 'comma' || tok.type === 'brace');
+      const isExtglob = tok.extglob === true || (extglobs.length && (tok.type === 'pipe' || tok.type === 'paren'));
+
+      if (tok.type !== 'slash' && tok.type !== 'paren' && !isBrace && !isExtglob) {
+        state.output = state.output.slice(0, -prev.output.length);
+        prev.type = 'star';
+        prev.value = '*';
+        prev.output = star;
+        state.output += prev.output;
+      }
+    }
+
+    if (extglobs.length && tok.type !== 'paren') {
+      extglobs[extglobs.length - 1].inner += tok.value;
+    }
+
+    if (tok.value || tok.output) append(tok);
+    if (prev && prev.type === 'text' && tok.type === 'text') {
+      prev.output = (prev.output || prev.value) + tok.value;
+      prev.value += tok.value;
+      return;
+    }
+
+    tok.prev = prev;
+    tokens.push(tok);
+    prev = tok;
+  };
+
+  const extglobOpen = (type, value) => {
+    const token = { ...EXTGLOB_CHARS[value], conditions: 1, inner: '' };
+
+    token.prev = prev;
+    token.parens = state.parens;
+    token.output = state.output;
+    const output = (opts.capture ? '(' : '') + token.open;
+
+    increment('parens');
+    push({ type, value, output: state.output ? '' : ONE_CHAR });
+    push({ type: 'paren', extglob: true, value: advance(), output });
+    extglobs.push(token);
+  };
+
+  const extglobClose = token => {
+    let output = token.close + (opts.capture ? ')' : '');
+    let rest;
+
+    if (token.type === 'negate') {
+      let extglobStar = star;
+
+      if (token.inner && token.inner.length > 1 && token.inner.includes('/')) {
+        extglobStar = globstar(opts);
+      }
+
+      if (extglobStar !== star || eos() || /^\)+$/.test(remaining())) {
+        output = token.close = `)$))${extglobStar}`;
+      }
+
+      if (token.inner.includes('*') && (rest = remaining()) && /^\.[^\\/.]+$/.test(rest)) {
+        // Any non-magical string (`.ts`) or even nested expression (`.{ts,tsx}`) can follow after the closing parenthesis.
+        // In this case, we need to parse the string and use it in the output of the original pattern.
+        // Suitable patterns: `/!(*.d).ts`, `/!(*.d).{ts,tsx}`, `**/!(*-dbg).@(js)`.
+        //
+        // Disabling the `fastpaths` option due to a problem with parsing strings as `.ts` in the pattern like `**/!(*.d).ts`.
+        const expression = parse(rest, { ...options, fastpaths: false }).output;
+
+        output = token.close = `)${expression})${extglobStar})`;
+      }
+
+      if (token.prev.type === 'bos') {
+        state.negatedExtglob = true;
+      }
+    }
+
+    push({ type: 'paren', extglob: true, value, output });
+    decrement('parens');
+  };
+
+  /**
+   * Fast paths
+   */
+
+  if (opts.fastpaths !== false && !/(^[*!]|[/()[\]{}"])/.test(input)) {
+    let backslashes = false;
+
+    let output = input.replace(REGEX_SPECIAL_CHARS_BACKREF, (m, esc, chars, first, rest, index) => {
+      if (first === '\\') {
+        backslashes = true;
+        return m;
+      }
+
+      if (first === '?') {
+        if (esc) {
+          return esc + first + (rest ? QMARK.repeat(rest.length) : '');
+        }
+        if (index === 0) {
+          return qmarkNoDot + (rest ? QMARK.repeat(rest.length) : '');
+        }
+        return QMARK.repeat(chars.length);
+      }
+
+      if (first === '.') {
+        return DOT_LITERAL.repeat(chars.length);
+      }
+
+      if (first === '*') {
+        if (esc) {
+          return esc + first + (rest ? star : '');
+        }
+        return star;
+      }
+      return esc ? m : `\\${m}`;
+    });
+
+    if (backslashes === true) {
+      if (opts.unescape === true) {
+        output = output.replace(/\\/g, '');
+      } else {
+        output = output.replace(/\\+/g, m => {
+          return m.length % 2 === 0 ? '\\\\' : (m ? '\\' : '');
+        });
+      }
+    }
+
+    if (output === input && opts.contains === true) {
+      state.output = input;
+      return state;
+    }
+
+    state.output = utils.wrapOutput(output, state, options);
+    return state;
+  }
+
+  /**
+   * Tokenize input until we reach end-of-string
+   */
+
+  while (!eos()) {
+    value = advance();
+
+    if (value === '\u0000') {
+      continue;
+    }
+
+    /**
+     * Escaped characters
+     */
+
+    if (value === '\\') {
+      const next = peek();
+
+      if (next === '/' && opts.bash !== true) {
+        continue;
+      }
+
+      if (next === '.' || next === ';') {
+        continue;
+      }
+
+      if (!next) {
+        value += '\\';
+        push({ type: 'text', value });
+        continue;
+      }
+
+      // collapse slashes to reduce potential for exploits
+      const match = /^\\+/.exec(remaining());
+      let slashes = 0;
+
+      if (match && match[0].length > 2) {
+        slashes = match[0].length;
+        state.index += slashes;
+        if (slashes % 2 !== 0) {
+          value += '\\';
+        }
+      }
+
+      if (opts.unescape === true) {
+        value = advance();
+      } else {
+        value += advance();
+      }
+
+      if (state.brackets === 0) {
+        push({ type: 'text', value });
+        continue;
+      }
+    }
+
+    /**
+     * If we're inside a regex character class, continue
+     * until we reach the closing bracket.
+     */
+
+    if (state.brackets > 0 && (value !== ']' || prev.value === '[' || prev.value === '[^')) {
+      if (opts.posix !== false && value === ':') {
+        const inner = prev.value.slice(1);
+        if (inner.includes('[')) {
+          prev.posix = true;
+
+          if (inner.includes(':')) {
+            const idx = prev.value.lastIndexOf('[');
+            const pre = prev.value.slice(0, idx);
+            const rest = prev.value.slice(idx + 2);
+            const posix = POSIX_REGEX_SOURCE[rest];
+            if (posix) {
+              prev.value = pre + posix;
+              state.backtrack = true;
+              advance();
+
+              if (!bos.output && tokens.indexOf(prev) === 1) {
+                bos.output = ONE_CHAR;
+              }
+              continue;
+            }
+          }
+        }
+      }
+
+      if ((value === '[' && peek() !== ':') || (value === '-' && peek() === ']')) {
+        value = `\\${value}`;
+      }
+
+      if (value === ']' && (prev.value === '[' || prev.value === '[^')) {
+        value = `\\${value}`;
+      }
+
+      if (opts.posix === true && value === '!' && prev.value === '[') {
+        value = '^';
+      }
+
+      prev.value += value;
+      append({ value });
+      continue;
+    }
+
+    /**
+     * If we're inside a quoted string, continue
+     * until we reach the closing double quote.
+     */
+
+    if (state.quotes === 1 && value !== '"') {
+      value = utils.escapeRegex(value);
+      prev.value += value;
+      append({ value });
+      continue;
+    }
+
+    /**
+     * Double quotes
+     */
+
+    if (value === '"') {
+      state.quotes = state.quotes === 1 ? 0 : 1;
+      if (opts.keepQuotes === true) {
+        push({ type: 'text', value });
+      }
+      continue;
+    }
+
+    /**
+     * Parentheses
+     */
+
+    if (value === '(') {
+      increment('parens');
+      push({ type: 'paren', value });
+      continue;
+    }
+
+    if (value === ')') {
+      if (state.parens === 0 && opts.strictBrackets === true) {
+        throw new SyntaxError(syntaxError('opening', '('));
+      }
+
+      const extglob = extglobs[extglobs.length - 1];
+      if (extglob && state.parens === extglob.parens + 1) {
+        extglobClose(extglobs.pop());
+        continue;
+      }
+
+      push({ type: 'paren', value, output: state.parens ? ')' : '\\)' });
+      decrement('parens');
+      continue;
+    }
+
+    /**
+     * Square brackets
+     */
+
+    if (value === '[') {
+      if (opts.nobracket === true || !remaining().includes(']')) {
+        if (opts.nobracket !== true && opts.strictBrackets === true) {
+          throw new SyntaxError(syntaxError('closing', ']'));
+        }
+
+        value = `\\${value}`;
+      } else {
+        increment('brackets');
+      }
+
+      push({ type: 'bracket', value });
+      continue;
+    }
+
+    if (value === ']') {
+      if (opts.nobracket === true || (prev && prev.type === 'bracket' && prev.value.length === 1)) {
+        push({ type: 'text', value, output: `\\${value}` });
+        continue;
+      }
+
+      if (state.brackets === 0) {
+        if (opts.strictBrackets === true) {
+          throw new SyntaxError(syntaxError('opening', '['));
+        }
+
+        push({ type: 'text', value, output: `\\${value}` });
+        continue;
+      }
+
+      decrement('brackets');
+
+      const prevValue = prev.value.slice(1);
+      if (prev.posix !== true && prevValue[0] === '^' && !prevValue.includes('/')) {
+        value = `/${value}`;
+      }
+
+      prev.value += value;
+      append({ value });
+
+      // when literal brackets are explicitly disabled
+      // assume we should match with a regex character class
+      if (opts.literalBrackets === false || utils.hasRegexChars(prevValue)) {
+        continue;
+      }
+
+      const escaped = utils.escapeRegex(prev.value);
+      state.output = state.output.slice(0, -prev.value.length);
+
+      // when literal brackets are explicitly enabled
+      // assume we should escape the brackets to match literal characters
+      if (opts.literalBrackets === true) {
+        state.output += escaped;
+        prev.value = escaped;
+        continue;
+      }
+
+      // when the user specifies nothing, try to match both
+      prev.value = `(${capture}${escaped}|${prev.value})`;
+      state.output += prev.value;
+      continue;
+    }
+
+    /**
+     * Braces
+     */
+
+    if (value === '{' && opts.nobrace !== true) {
+      increment('braces');
+
+      const open = {
+        type: 'brace',
+        value,
+        output: '(',
+        outputIndex: state.output.length,
+        tokensIndex: state.tokens.length
+      };
+
+      braces.push(open);
+      push(open);
+      continue;
+    }
+
+    if (value === '}') {
+      const brace = braces[braces.length - 1];
+
+      if (opts.nobrace === true || !brace) {
+        push({ type: 'text', value, output: value });
+        continue;
+      }
+
+      let output = ')';
+
+      if (brace.dots === true) {
+        const arr = tokens.slice();
+        const range = [];
+
+        for (let i = arr.length - 1; i >= 0; i--) {
+          tokens.pop();
+          if (arr[i].type === 'brace') {
+            break;
+          }
+          if (arr[i].type !== 'dots') {
+            range.unshift(arr[i].value);
+          }
+        }
+
+        output = expandRange(range, opts);
+        state.backtrack = true;
+      }
+
+      if (brace.comma !== true && brace.dots !== true) {
+        const out = state.output.slice(0, brace.outputIndex);
+        const toks = state.tokens.slice(brace.tokensIndex);
+        brace.value = brace.output = '\\{';
+        value = output = '\\}';
+        state.output = out;
+        for (const t of toks) {
+          state.output += (t.output || t.value);
+        }
+      }
+
+      push({ type: 'brace', value, output });
+      decrement('braces');
+      braces.pop();
+      continue;
+    }
+
+    /**
+     * Pipes
+     */
+
+    if (value === '|') {
+      if (extglobs.length > 0) {
+        extglobs[extglobs.length - 1].conditions++;
+      }
+      push({ type: 'text', value });
+      continue;
+    }
+
+    /**
+     * Commas
+     */
+
+    if (value === ',') {
+      let output = value;
+
+      const brace = braces[braces.length - 1];
+      if (brace && stack[stack.length - 1] === 'braces') {
+        brace.comma = true;
+        output = '|';
+      }
+
+      push({ type: 'comma', value, output });
+      continue;
+    }
+
+    /**
+     * Slashes
+     */
+
+    if (value === '/') {
+      // if the beginning of the glob is "./", advance the start
+      // to the current index, and don't add the "./" characters
+      // to the state. This greatly simplifies lookbehinds when
+      // checking for BOS characters like "!" and "." (not "./")
+      if (prev.type === 'dot' && state.index === state.start + 1) {
+        state.start = state.index + 1;
+        state.consumed = '';
+        state.output = '';
+        tokens.pop();
+        prev = bos; // reset "prev" to the first token
+        continue;
+      }
+
+      push({ type: 'slash', value, output: SLASH_LITERAL });
+      continue;
+    }
+
+    /**
+     * Dots
+     */
+
+    if (value === '.') {
+      if (state.braces > 0 && prev.type === 'dot') {
+        if (prev.value === '.') prev.output = DOT_LITERAL;
+        const brace = braces[braces.length - 1];
+        prev.type = 'dots';
+        prev.output += value;
+        prev.value += value;
+        brace.dots = true;
+        continue;
+      }
+
+      if ((state.braces + state.parens) === 0 && prev.type !== 'bos' && prev.type !== 'slash') {
+        push({ type: 'text', value, output: DOT_LITERAL });
+        continue;
+      }
+
+      push({ type: 'dot', value, output: DOT_LITERAL });
+      continue;
+    }
+
+    /**
+     * Question marks
+     */
+
+    if (value === '?') {
+      const isGroup = prev && prev.value === '(';
+      if (!isGroup && opts.noextglob !== true && peek() === '(' && peek(2) !== '?') {
+        extglobOpen('qmark', value);
+        continue;
+      }
+
+      if (prev && prev.type === 'paren') {
+        const next = peek();
+        let output = value;
+
+        if ((prev.value === '(' && !/[!=<:]/.test(next)) || (next === '<' && !/<([!=]|\w+>)/.test(remaining()))) {
+          output = `\\${value}`;
+        }
+
+        push({ type: 'text', value, output });
+        continue;
+      }
+
+      if (opts.dot !== true && (prev.type === 'slash' || prev.type === 'bos')) {
+        push({ type: 'qmark', value, output: QMARK_NO_DOT });
+        continue;
+      }
+
+      push({ type: 'qmark', value, output: QMARK });
+      continue;
+    }
+
+    /**
+     * Exclamation
+     */
+
+    if (value === '!') {
+      if (opts.noextglob !== true && peek() === '(') {
+        if (peek(2) !== '?' || !/[!=<:]/.test(peek(3))) {
+          extglobOpen('negate', value);
+          continue;
+        }
+      }
+
+      if (opts.nonegate !== true && state.index === 0) {
+        negate();
+        continue;
+      }
+    }
+
+    /**
+     * Plus
+     */
+
+    if (value === '+') {
+      if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') {
+        extglobOpen('plus', value);
+        continue;
+      }
+
+      if ((prev && prev.value === '(') || opts.regex === false) {
+        push({ type: 'plus', value, output: PLUS_LITERAL });
+        continue;
+      }
+
+      if ((prev && (prev.type === 'bracket' || prev.type === 'paren' || prev.type === 'brace')) || state.parens > 0) {
+        push({ type: 'plus', value });
+        continue;
+      }
+
+      push({ type: 'plus', value: PLUS_LITERAL });
+      continue;
+    }
+
+    /**
+     * Plain text
+     */
+
+    if (value === '@') {
+      if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') {
+        push({ type: 'at', extglob: true, value, output: '' });
+        continue;
+      }
+
+      push({ type: 'text', value });
+      continue;
+    }
+
+    /**
+     * Plain text
+     */
+
+    if (value !== '*') {
+      if (value === '$' || value === '^') {
+        value = `\\${value}`;
+      }
+
+      const match = REGEX_NON_SPECIAL_CHARS.exec(remaining());
+      if (match) {
+        value += match[0];
+        state.index += match[0].length;
+      }
+
+      push({ type: 'text', value });
+      continue;
+    }
+
+    /**
+     * Stars
+     */
+
+    if (prev && (prev.type === 'globstar' || prev.star === true)) {
+      prev.type = 'star';
+      prev.star = true;
+      prev.value += value;
+      prev.output = star;
+      state.backtrack = true;
+      state.globstar = true;
+      consume(value);
+      continue;
+    }
+
+    let rest = remaining();
+    if (opts.noextglob !== true && /^\([^?]/.test(rest)) {
+      extglobOpen('star', value);
+      continue;
+    }
+
+    if (prev.type === 'star') {
+      if (opts.noglobstar === true) {
+        consume(value);
+        continue;
+      }
+
+      const prior = prev.prev;
+      const before = prior.prev;
+      const isStart = prior.type === 'slash' || prior.type === 'bos';
+      const afterStar = before && (before.type === 'star' || before.type === 'globstar');
+
+      if (opts.bash === true && (!isStart || (rest[0] && rest[0] !== '/'))) {
+        push({ type: 'star', value, output: '' });
+        continue;
+      }
+
+      const isBrace = state.braces > 0 && (prior.type === 'comma' || prior.type === 'brace');
+      const isExtglob = extglobs.length && (prior.type === 'pipe' || prior.type === 'paren');
+      if (!isStart && prior.type !== 'paren' && !isBrace && !isExtglob) {
+        push({ type: 'star', value, output: '' });
+        continue;
+      }
+
+      // strip consecutive `/**/`
+      while (rest.slice(0, 3) === '/**') {
+        const after = input[state.index + 4];
+        if (after && after !== '/') {
+          break;
+        }
+        rest = rest.slice(3);
+        consume('/**', 3);
+      }
+
+      if (prior.type === 'bos' && eos()) {
+        prev.type = 'globstar';
+        prev.value += value;
+        prev.output = globstar(opts);
+        state.output = prev.output;
+        state.globstar = true;
+        consume(value);
+        continue;
+      }
+
+      if (prior.type === 'slash' && prior.prev.type !== 'bos' && !afterStar && eos()) {
+        state.output = state.output.slice(0, -(prior.output + prev.output).length);
+        prior.output = `(?:${prior.output}`;
+
+        prev.type = 'globstar';
+        prev.output = globstar(opts) + (opts.strictSlashes ? ')' : '|$)');
+        prev.value += value;
+        state.globstar = true;
+        state.output += prior.output + prev.output;
+        consume(value);
+        continue;
+      }
+
+      if (prior.type === 'slash' && prior.prev.type !== 'bos' && rest[0] === '/') {
+        const end = rest[1] !== void 0 ? '|$' : '';
+
+        state.output = state.output.slice(0, -(prior.output + prev.output).length);
+        prior.output = `(?:${prior.output}`;
+
+        prev.type = 'globstar';
+        prev.output = `${globstar(opts)}${SLASH_LITERAL}|${SLASH_LITERAL}${end})`;
+        prev.value += value;
+
+        state.output += prior.output + prev.output;
+        state.globstar = true;
+
+        consume(value + advance());
+
+        push({ type: 'slash', value: '/', output: '' });
+        continue;
+      }
+
+      if (prior.type === 'bos' && rest[0] === '/') {
+        prev.type = 'globstar';
+        prev.value += value;
+        prev.output = `(?:^|${SLASH_LITERAL}|${globstar(opts)}${SLASH_LITERAL})`;
+        state.output = prev.output;
+        state.globstar = true;
+        consume(value + advance());
+        push({ type: 'slash', value: '/', output: '' });
+        continue;
+      }
+
+      // remove single star from output
+      state.output = state.output.slice(0, -prev.output.length);
+
+      // reset previous token to globstar
+      prev.type = 'globstar';
+      prev.output = globstar(opts);
+      prev.value += value;
+
+      // reset output with globstar
+      state.output += prev.output;
+      state.globstar = true;
+      consume(value);
+      continue;
+    }
+
+    const token = { type: 'star', value, output: star };
+
+    if (opts.bash === true) {
+      token.output = '.*?';
+      if (prev.type === 'bos' || prev.type === 'slash') {
+        token.output = nodot + token.output;
+      }
+      push(token);
+      continue;
+    }
+
+    if (prev && (prev.type === 'bracket' || prev.type === 'paren') && opts.regex === true) {
+      token.output = value;
+      push(token);
+      continue;
+    }
+
+    if (state.index === state.start || prev.type === 'slash' || prev.type === 'dot') {
+      if (prev.type === 'dot') {
+        state.output += NO_DOT_SLASH;
+        prev.output += NO_DOT_SLASH;
+
+      } else if (opts.dot === true) {
+        state.output += NO_DOTS_SLASH;
+        prev.output += NO_DOTS_SLASH;
+
+      } else {
+        state.output += nodot;
+        prev.output += nodot;
+      }
+
+      if (peek() !== '*') {
+        state.output += ONE_CHAR;
+        prev.output += ONE_CHAR;
+      }
+    }
+
+    push(token);
+  }
+
+  while (state.brackets > 0) {
+    if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ']'));
+    state.output = utils.escapeLast(state.output, '[');
+    decrement('brackets');
+  }
+
+  while (state.parens > 0) {
+    if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ')'));
+    state.output = utils.escapeLast(state.output, '(');
+    decrement('parens');
+  }
+
+  while (state.braces > 0) {
+    if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', '}'));
+    state.output = utils.escapeLast(state.output, '{');
+    decrement('braces');
+  }
+
+  if (opts.strictSlashes !== true && (prev.type === 'star' || prev.type === 'bracket')) {
+    push({ type: 'maybe_slash', value: '', output: `${SLASH_LITERAL}?` });
+  }
+
+  // rebuild the output if we had to backtrack at any point
+  if (state.backtrack === true) {
+    state.output = '';
+
+    for (const token of state.tokens) {
+      state.output += token.output != null ? token.output : token.value;
+
+      if (token.suffix) {
+        state.output += token.suffix;
+      }
+    }
+  }
+
+  return state;
+};
+
+/**
+ * Fast paths for creating regular expressions for common glob patterns.
+ * This can significantly speed up processing and has very little downside
+ * impact when none of the fast paths match.
+ */
+
+parse.fastpaths = (input, options) => {
+  const opts = { ...options };
+  const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH;
+  const len = input.length;
+  if (len > max) {
+    throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`);
+  }
+
+  input = REPLACEMENTS[input] || input;
+
+  // create constants based on platform, for windows or posix
+  const {
+    DOT_LITERAL,
+    SLASH_LITERAL,
+    ONE_CHAR,
+    DOTS_SLASH,
+    NO_DOT,
+    NO_DOTS,
+    NO_DOTS_SLASH,
+    STAR,
+    START_ANCHOR
+  } = constants.globChars(opts.windows);
+
+  const nodot = opts.dot ? NO_DOTS : NO_DOT;
+  const slashDot = opts.dot ? NO_DOTS_SLASH : NO_DOT;
+  const capture = opts.capture ? '' : '?:';
+  const state = { negated: false, prefix: '' };
+  let star = opts.bash === true ? '.*?' : STAR;
+
+  if (opts.capture) {
+    star = `(${star})`;
+  }
+
+  const globstar = opts => {
+    if (opts.noglobstar === true) return star;
+    return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`;
+  };
+
+  const create = str => {
+    switch (str) {
+      case '*':
+        return `${nodot}${ONE_CHAR}${star}`;
+
+      case '.*':
+        return `${DOT_LITERAL}${ONE_CHAR}${star}`;
+
+      case '*.*':
+        return `${nodot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`;
+
+      case '*/*':
+        return `${nodot}${star}${SLASH_LITERAL}${ONE_CHAR}${slashDot}${star}`;
+
+      case '**':
+        return nodot + globstar(opts);
+
+      case '**/*':
+        return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${ONE_CHAR}${star}`;
+
+      case '**/*.*':
+        return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`;
+
+      case '**/.*':
+        return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${DOT_LITERAL}${ONE_CHAR}${star}`;
+
+      default: {
+        const match = /^(.*?)\.(\w+)$/.exec(str);
+        if (!match) return;
+
+        const source = create(match[1]);
+        if (!source) return;
+
+        return source + DOT_LITERAL + match[2];
+      }
+    }
+  };
+
+  const output = utils.removePrefix(input, state);
+  let source = create(output);
+
+  if (source && opts.strictSlashes !== true) {
+    source += `${SLASH_LITERAL}?`;
+  }
+
+  return source;
+};
+
+module.exports = parse;
diff --git a/node_modules/picomatch/lib/picomatch.js b/node_modules/picomatch/lib/picomatch.js
new file mode 100644
index 0000000000000..d0ebd9f163cf2
--- /dev/null
+++ b/node_modules/picomatch/lib/picomatch.js
@@ -0,0 +1,341 @@
+'use strict';
+
+const scan = require('./scan');
+const parse = require('./parse');
+const utils = require('./utils');
+const constants = require('./constants');
+const isObject = val => val && typeof val === 'object' && !Array.isArray(val);
+
+/**
+ * Creates a matcher function from one or more glob patterns. The
+ * returned function takes a string to match as its first argument,
+ * and returns true if the string is a match. The returned matcher
+ * function also takes a boolean as the second argument that, when true,
+ * returns an object with additional information.
+ *
+ * ```js
+ * const picomatch = require('picomatch');
+ * // picomatch(glob[, options]);
+ *
+ * const isMatch = picomatch('*.!(*a)');
+ * console.log(isMatch('a.a')); //=> false
+ * console.log(isMatch('a.b')); //=> true
+ * ```
+ * @name picomatch
+ * @param {String|Array} `globs` One or more glob patterns.
+ * @param {Object=} `options`
+ * @return {Function=} Returns a matcher function.
+ * @api public
+ */
+
+const picomatch = (glob, options, returnState = false) => {
+  if (Array.isArray(glob)) {
+    const fns = glob.map(input => picomatch(input, options, returnState));
+    const arrayMatcher = str => {
+      for (const isMatch of fns) {
+        const state = isMatch(str);
+        if (state) return state;
+      }
+      return false;
+    };
+    return arrayMatcher;
+  }
+
+  const isState = isObject(glob) && glob.tokens && glob.input;
+
+  if (glob === '' || (typeof glob !== 'string' && !isState)) {
+    throw new TypeError('Expected pattern to be a non-empty string');
+  }
+
+  const opts = options || {};
+  const posix = opts.windows;
+  const regex = isState
+    ? picomatch.compileRe(glob, options)
+    : picomatch.makeRe(glob, options, false, true);
+
+  const state = regex.state;
+  delete regex.state;
+
+  let isIgnored = () => false;
+  if (opts.ignore) {
+    const ignoreOpts = { ...options, ignore: null, onMatch: null, onResult: null };
+    isIgnored = picomatch(opts.ignore, ignoreOpts, returnState);
+  }
+
+  const matcher = (input, returnObject = false) => {
+    const { isMatch, match, output } = picomatch.test(input, regex, options, { glob, posix });
+    const result = { glob, state, regex, posix, input, output, match, isMatch };
+
+    if (typeof opts.onResult === 'function') {
+      opts.onResult(result);
+    }
+
+    if (isMatch === false) {
+      result.isMatch = false;
+      return returnObject ? result : false;
+    }
+
+    if (isIgnored(input)) {
+      if (typeof opts.onIgnore === 'function') {
+        opts.onIgnore(result);
+      }
+      result.isMatch = false;
+      return returnObject ? result : false;
+    }
+
+    if (typeof opts.onMatch === 'function') {
+      opts.onMatch(result);
+    }
+    return returnObject ? result : true;
+  };
+
+  if (returnState) {
+    matcher.state = state;
+  }
+
+  return matcher;
+};
+
+/**
+ * Test `input` with the given `regex`. This is used by the main
+ * `picomatch()` function to test the input string.
+ *
+ * ```js
+ * const picomatch = require('picomatch');
+ * // picomatch.test(input, regex[, options]);
+ *
+ * console.log(picomatch.test('foo/bar', /^(?:([^/]*?)\/([^/]*?))$/));
+ * // { isMatch: true, match: [ 'foo/', 'foo', 'bar' ], output: 'foo/bar' }
+ * ```
+ * @param {String} `input` String to test.
+ * @param {RegExp} `regex`
+ * @return {Object} Returns an object with matching info.
+ * @api public
+ */
+
+picomatch.test = (input, regex, options, { glob, posix } = {}) => {
+  if (typeof input !== 'string') {
+    throw new TypeError('Expected input to be a string');
+  }
+
+  if (input === '') {
+    return { isMatch: false, output: '' };
+  }
+
+  const opts = options || {};
+  const format = opts.format || (posix ? utils.toPosixSlashes : null);
+  let match = input === glob;
+  let output = (match && format) ? format(input) : input;
+
+  if (match === false) {
+    output = format ? format(input) : input;
+    match = output === glob;
+  }
+
+  if (match === false || opts.capture === true) {
+    if (opts.matchBase === true || opts.basename === true) {
+      match = picomatch.matchBase(input, regex, options, posix);
+    } else {
+      match = regex.exec(output);
+    }
+  }
+
+  return { isMatch: Boolean(match), match, output };
+};
+
+/**
+ * Match the basename of a filepath.
+ *
+ * ```js
+ * const picomatch = require('picomatch');
+ * // picomatch.matchBase(input, glob[, options]);
+ * console.log(picomatch.matchBase('foo/bar.js', '*.js'); // true
+ * ```
+ * @param {String} `input` String to test.
+ * @param {RegExp|String} `glob` Glob pattern or regex created by [.makeRe](#makeRe).
+ * @return {Boolean}
+ * @api public
+ */
+
+picomatch.matchBase = (input, glob, options) => {
+  const regex = glob instanceof RegExp ? glob : picomatch.makeRe(glob, options);
+  return regex.test(utils.basename(input));
+};
+
+/**
+ * Returns true if **any** of the given glob `patterns` match the specified `string`.
+ *
+ * ```js
+ * const picomatch = require('picomatch');
+ * // picomatch.isMatch(string, patterns[, options]);
+ *
+ * console.log(picomatch.isMatch('a.a', ['b.*', '*.a'])); //=> true
+ * console.log(picomatch.isMatch('a.a', 'b.*')); //=> false
+ * ```
+ * @param {String|Array} str The string to test.
+ * @param {String|Array} patterns One or more glob patterns to use for matching.
+ * @param {Object} [options] See available [options](#options).
+ * @return {Boolean} Returns true if any patterns match `str`
+ * @api public
+ */
+
+picomatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str);
+
+/**
+ * Parse a glob pattern to create the source string for a regular
+ * expression.
+ *
+ * ```js
+ * const picomatch = require('picomatch');
+ * const result = picomatch.parse(pattern[, options]);
+ * ```
+ * @param {String} `pattern`
+ * @param {Object} `options`
+ * @return {Object} Returns an object with useful properties and output to be used as a regex source string.
+ * @api public
+ */
+
+picomatch.parse = (pattern, options) => {
+  if (Array.isArray(pattern)) return pattern.map(p => picomatch.parse(p, options));
+  return parse(pattern, { ...options, fastpaths: false });
+};
+
+/**
+ * Scan a glob pattern to separate the pattern into segments.
+ *
+ * ```js
+ * const picomatch = require('picomatch');
+ * // picomatch.scan(input[, options]);
+ *
+ * const result = picomatch.scan('!./foo/*.js');
+ * console.log(result);
+ * { prefix: '!./',
+ *   input: '!./foo/*.js',
+ *   start: 3,
+ *   base: 'foo',
+ *   glob: '*.js',
+ *   isBrace: false,
+ *   isBracket: false,
+ *   isGlob: true,
+ *   isExtglob: false,
+ *   isGlobstar: false,
+ *   negated: true }
+ * ```
+ * @param {String} `input` Glob pattern to scan.
+ * @param {Object} `options`
+ * @return {Object} Returns an object with
+ * @api public
+ */
+
+picomatch.scan = (input, options) => scan(input, options);
+
+/**
+ * Compile a regular expression from the `state` object returned by the
+ * [parse()](#parse) method.
+ *
+ * @param {Object} `state`
+ * @param {Object} `options`
+ * @param {Boolean} `returnOutput` Intended for implementors, this argument allows you to return the raw output from the parser.
+ * @param {Boolean} `returnState` Adds the state to a `state` property on the returned regex. Useful for implementors and debugging.
+ * @return {RegExp}
+ * @api public
+ */
+
+picomatch.compileRe = (state, options, returnOutput = false, returnState = false) => {
+  if (returnOutput === true) {
+    return state.output;
+  }
+
+  const opts = options || {};
+  const prepend = opts.contains ? '' : '^';
+  const append = opts.contains ? '' : '$';
+
+  let source = `${prepend}(?:${state.output})${append}`;
+  if (state && state.negated === true) {
+    source = `^(?!${source}).*$`;
+  }
+
+  const regex = picomatch.toRegex(source, options);
+  if (returnState === true) {
+    regex.state = state;
+  }
+
+  return regex;
+};
+
+/**
+ * Create a regular expression from a parsed glob pattern.
+ *
+ * ```js
+ * const picomatch = require('picomatch');
+ * const state = picomatch.parse('*.js');
+ * // picomatch.compileRe(state[, options]);
+ *
+ * console.log(picomatch.compileRe(state));
+ * //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/
+ * ```
+ * @param {String} `state` The object returned from the `.parse` method.
+ * @param {Object} `options`
+ * @param {Boolean} `returnOutput` Implementors may use this argument to return the compiled output, instead of a regular expression. This is not exposed on the options to prevent end-users from mutating the result.
+ * @param {Boolean} `returnState` Implementors may use this argument to return the state from the parsed glob with the returned regular expression.
+ * @return {RegExp} Returns a regex created from the given pattern.
+ * @api public
+ */
+
+picomatch.makeRe = (input, options = {}, returnOutput = false, returnState = false) => {
+  if (!input || typeof input !== 'string') {
+    throw new TypeError('Expected a non-empty string');
+  }
+
+  let parsed = { negated: false, fastpaths: true };
+
+  if (options.fastpaths !== false && (input[0] === '.' || input[0] === '*')) {
+    parsed.output = parse.fastpaths(input, options);
+  }
+
+  if (!parsed.output) {
+    parsed = parse(input, options);
+  }
+
+  return picomatch.compileRe(parsed, options, returnOutput, returnState);
+};
+
+/**
+ * Create a regular expression from the given regex source string.
+ *
+ * ```js
+ * const picomatch = require('picomatch');
+ * // picomatch.toRegex(source[, options]);
+ *
+ * const { output } = picomatch.parse('*.js');
+ * console.log(picomatch.toRegex(output));
+ * //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/
+ * ```
+ * @param {String} `source` Regular expression source string.
+ * @param {Object} `options`
+ * @return {RegExp}
+ * @api public
+ */
+
+picomatch.toRegex = (source, options) => {
+  try {
+    const opts = options || {};
+    return new RegExp(source, opts.flags || (opts.nocase ? 'i' : ''));
+  } catch (err) {
+    if (options && options.debug === true) throw err;
+    return /$^/;
+  }
+};
+
+/**
+ * Picomatch constants.
+ * @return {Object}
+ */
+
+picomatch.constants = constants;
+
+/**
+ * Expose "picomatch"
+ */
+
+module.exports = picomatch;
diff --git a/node_modules/picomatch/lib/scan.js b/node_modules/picomatch/lib/scan.js
new file mode 100644
index 0000000000000..e59cd7a1357b1
--- /dev/null
+++ b/node_modules/picomatch/lib/scan.js
@@ -0,0 +1,391 @@
+'use strict';
+
+const utils = require('./utils');
+const {
+  CHAR_ASTERISK,             /* * */
+  CHAR_AT,                   /* @ */
+  CHAR_BACKWARD_SLASH,       /* \ */
+  CHAR_COMMA,                /* , */
+  CHAR_DOT,                  /* . */
+  CHAR_EXCLAMATION_MARK,     /* ! */
+  CHAR_FORWARD_SLASH,        /* / */
+  CHAR_LEFT_CURLY_BRACE,     /* { */
+  CHAR_LEFT_PARENTHESES,     /* ( */
+  CHAR_LEFT_SQUARE_BRACKET,  /* [ */
+  CHAR_PLUS,                 /* + */
+  CHAR_QUESTION_MARK,        /* ? */
+  CHAR_RIGHT_CURLY_BRACE,    /* } */
+  CHAR_RIGHT_PARENTHESES,    /* ) */
+  CHAR_RIGHT_SQUARE_BRACKET  /* ] */
+} = require('./constants');
+
+const isPathSeparator = code => {
+  return code === CHAR_FORWARD_SLASH || code === CHAR_BACKWARD_SLASH;
+};
+
+const depth = token => {
+  if (token.isPrefix !== true) {
+    token.depth = token.isGlobstar ? Infinity : 1;
+  }
+};
+
+/**
+ * Quickly scans a glob pattern and returns an object with a handful of
+ * useful properties, like `isGlob`, `path` (the leading non-glob, if it exists),
+ * `glob` (the actual pattern), `negated` (true if the path starts with `!` but not
+ * with `!(`) and `negatedExtglob` (true if the path starts with `!(`).
+ *
+ * ```js
+ * const pm = require('picomatch');
+ * console.log(pm.scan('foo/bar/*.js'));
+ * { isGlob: true, input: 'foo/bar/*.js', base: 'foo/bar', glob: '*.js' }
+ * ```
+ * @param {String} `str`
+ * @param {Object} `options`
+ * @return {Object} Returns an object with tokens and regex source string.
+ * @api public
+ */
+
+const scan = (input, options) => {
+  const opts = options || {};
+
+  const length = input.length - 1;
+  const scanToEnd = opts.parts === true || opts.scanToEnd === true;
+  const slashes = [];
+  const tokens = [];
+  const parts = [];
+
+  let str = input;
+  let index = -1;
+  let start = 0;
+  let lastIndex = 0;
+  let isBrace = false;
+  let isBracket = false;
+  let isGlob = false;
+  let isExtglob = false;
+  let isGlobstar = false;
+  let braceEscaped = false;
+  let backslashes = false;
+  let negated = false;
+  let negatedExtglob = false;
+  let finished = false;
+  let braces = 0;
+  let prev;
+  let code;
+  let token = { value: '', depth: 0, isGlob: false };
+
+  const eos = () => index >= length;
+  const peek = () => str.charCodeAt(index + 1);
+  const advance = () => {
+    prev = code;
+    return str.charCodeAt(++index);
+  };
+
+  while (index < length) {
+    code = advance();
+    let next;
+
+    if (code === CHAR_BACKWARD_SLASH) {
+      backslashes = token.backslashes = true;
+      code = advance();
+
+      if (code === CHAR_LEFT_CURLY_BRACE) {
+        braceEscaped = true;
+      }
+      continue;
+    }
+
+    if (braceEscaped === true || code === CHAR_LEFT_CURLY_BRACE) {
+      braces++;
+
+      while (eos() !== true && (code = advance())) {
+        if (code === CHAR_BACKWARD_SLASH) {
+          backslashes = token.backslashes = true;
+          advance();
+          continue;
+        }
+
+        if (code === CHAR_LEFT_CURLY_BRACE) {
+          braces++;
+          continue;
+        }
+
+        if (braceEscaped !== true && code === CHAR_DOT && (code = advance()) === CHAR_DOT) {
+          isBrace = token.isBrace = true;
+          isGlob = token.isGlob = true;
+          finished = true;
+
+          if (scanToEnd === true) {
+            continue;
+          }
+
+          break;
+        }
+
+        if (braceEscaped !== true && code === CHAR_COMMA) {
+          isBrace = token.isBrace = true;
+          isGlob = token.isGlob = true;
+          finished = true;
+
+          if (scanToEnd === true) {
+            continue;
+          }
+
+          break;
+        }
+
+        if (code === CHAR_RIGHT_CURLY_BRACE) {
+          braces--;
+
+          if (braces === 0) {
+            braceEscaped = false;
+            isBrace = token.isBrace = true;
+            finished = true;
+            break;
+          }
+        }
+      }
+
+      if (scanToEnd === true) {
+        continue;
+      }
+
+      break;
+    }
+
+    if (code === CHAR_FORWARD_SLASH) {
+      slashes.push(index);
+      tokens.push(token);
+      token = { value: '', depth: 0, isGlob: false };
+
+      if (finished === true) continue;
+      if (prev === CHAR_DOT && index === (start + 1)) {
+        start += 2;
+        continue;
+      }
+
+      lastIndex = index + 1;
+      continue;
+    }
+
+    if (opts.noext !== true) {
+      const isExtglobChar = code === CHAR_PLUS
+        || code === CHAR_AT
+        || code === CHAR_ASTERISK
+        || code === CHAR_QUESTION_MARK
+        || code === CHAR_EXCLAMATION_MARK;
+
+      if (isExtglobChar === true && peek() === CHAR_LEFT_PARENTHESES) {
+        isGlob = token.isGlob = true;
+        isExtglob = token.isExtglob = true;
+        finished = true;
+        if (code === CHAR_EXCLAMATION_MARK && index === start) {
+          negatedExtglob = true;
+        }
+
+        if (scanToEnd === true) {
+          while (eos() !== true && (code = advance())) {
+            if (code === CHAR_BACKWARD_SLASH) {
+              backslashes = token.backslashes = true;
+              code = advance();
+              continue;
+            }
+
+            if (code === CHAR_RIGHT_PARENTHESES) {
+              isGlob = token.isGlob = true;
+              finished = true;
+              break;
+            }
+          }
+          continue;
+        }
+        break;
+      }
+    }
+
+    if (code === CHAR_ASTERISK) {
+      if (prev === CHAR_ASTERISK) isGlobstar = token.isGlobstar = true;
+      isGlob = token.isGlob = true;
+      finished = true;
+
+      if (scanToEnd === true) {
+        continue;
+      }
+      break;
+    }
+
+    if (code === CHAR_QUESTION_MARK) {
+      isGlob = token.isGlob = true;
+      finished = true;
+
+      if (scanToEnd === true) {
+        continue;
+      }
+      break;
+    }
+
+    if (code === CHAR_LEFT_SQUARE_BRACKET) {
+      while (eos() !== true && (next = advance())) {
+        if (next === CHAR_BACKWARD_SLASH) {
+          backslashes = token.backslashes = true;
+          advance();
+          continue;
+        }
+
+        if (next === CHAR_RIGHT_SQUARE_BRACKET) {
+          isBracket = token.isBracket = true;
+          isGlob = token.isGlob = true;
+          finished = true;
+          break;
+        }
+      }
+
+      if (scanToEnd === true) {
+        continue;
+      }
+
+      break;
+    }
+
+    if (opts.nonegate !== true && code === CHAR_EXCLAMATION_MARK && index === start) {
+      negated = token.negated = true;
+      start++;
+      continue;
+    }
+
+    if (opts.noparen !== true && code === CHAR_LEFT_PARENTHESES) {
+      isGlob = token.isGlob = true;
+
+      if (scanToEnd === true) {
+        while (eos() !== true && (code = advance())) {
+          if (code === CHAR_LEFT_PARENTHESES) {
+            backslashes = token.backslashes = true;
+            code = advance();
+            continue;
+          }
+
+          if (code === CHAR_RIGHT_PARENTHESES) {
+            finished = true;
+            break;
+          }
+        }
+        continue;
+      }
+      break;
+    }
+
+    if (isGlob === true) {
+      finished = true;
+
+      if (scanToEnd === true) {
+        continue;
+      }
+
+      break;
+    }
+  }
+
+  if (opts.noext === true) {
+    isExtglob = false;
+    isGlob = false;
+  }
+
+  let base = str;
+  let prefix = '';
+  let glob = '';
+
+  if (start > 0) {
+    prefix = str.slice(0, start);
+    str = str.slice(start);
+    lastIndex -= start;
+  }
+
+  if (base && isGlob === true && lastIndex > 0) {
+    base = str.slice(0, lastIndex);
+    glob = str.slice(lastIndex);
+  } else if (isGlob === true) {
+    base = '';
+    glob = str;
+  } else {
+    base = str;
+  }
+
+  if (base && base !== '' && base !== '/' && base !== str) {
+    if (isPathSeparator(base.charCodeAt(base.length - 1))) {
+      base = base.slice(0, -1);
+    }
+  }
+
+  if (opts.unescape === true) {
+    if (glob) glob = utils.removeBackslashes(glob);
+
+    if (base && backslashes === true) {
+      base = utils.removeBackslashes(base);
+    }
+  }
+
+  const state = {
+    prefix,
+    input,
+    start,
+    base,
+    glob,
+    isBrace,
+    isBracket,
+    isGlob,
+    isExtglob,
+    isGlobstar,
+    negated,
+    negatedExtglob
+  };
+
+  if (opts.tokens === true) {
+    state.maxDepth = 0;
+    if (!isPathSeparator(code)) {
+      tokens.push(token);
+    }
+    state.tokens = tokens;
+  }
+
+  if (opts.parts === true || opts.tokens === true) {
+    let prevIndex;
+
+    for (let idx = 0; idx < slashes.length; idx++) {
+      const n = prevIndex ? prevIndex + 1 : start;
+      const i = slashes[idx];
+      const value = input.slice(n, i);
+      if (opts.tokens) {
+        if (idx === 0 && start !== 0) {
+          tokens[idx].isPrefix = true;
+          tokens[idx].value = prefix;
+        } else {
+          tokens[idx].value = value;
+        }
+        depth(tokens[idx]);
+        state.maxDepth += tokens[idx].depth;
+      }
+      if (idx !== 0 || value !== '') {
+        parts.push(value);
+      }
+      prevIndex = i;
+    }
+
+    if (prevIndex && prevIndex + 1 < input.length) {
+      const value = input.slice(prevIndex + 1);
+      parts.push(value);
+
+      if (opts.tokens) {
+        tokens[tokens.length - 1].value = value;
+        depth(tokens[tokens.length - 1]);
+        state.maxDepth += tokens[tokens.length - 1].depth;
+      }
+    }
+
+    state.slashes = slashes;
+    state.parts = parts;
+  }
+
+  return state;
+};
+
+module.exports = scan;
diff --git a/node_modules/picomatch/lib/utils.js b/node_modules/picomatch/lib/utils.js
new file mode 100644
index 0000000000000..9c97cae222ca8
--- /dev/null
+++ b/node_modules/picomatch/lib/utils.js
@@ -0,0 +1,72 @@
+/*global navigator*/
+'use strict';
+
+const {
+  REGEX_BACKSLASH,
+  REGEX_REMOVE_BACKSLASH,
+  REGEX_SPECIAL_CHARS,
+  REGEX_SPECIAL_CHARS_GLOBAL
+} = require('./constants');
+
+exports.isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val);
+exports.hasRegexChars = str => REGEX_SPECIAL_CHARS.test(str);
+exports.isRegexChar = str => str.length === 1 && exports.hasRegexChars(str);
+exports.escapeRegex = str => str.replace(REGEX_SPECIAL_CHARS_GLOBAL, '\\$1');
+exports.toPosixSlashes = str => str.replace(REGEX_BACKSLASH, '/');
+
+exports.isWindows = () => {
+  if (typeof navigator !== 'undefined' && navigator.platform) {
+    const platform = navigator.platform.toLowerCase();
+    return platform === 'win32' || platform === 'windows';
+  }
+
+  if (typeof process !== 'undefined' && process.platform) {
+    return process.platform === 'win32';
+  }
+
+  return false;
+};
+
+exports.removeBackslashes = str => {
+  return str.replace(REGEX_REMOVE_BACKSLASH, match => {
+    return match === '\\' ? '' : match;
+  });
+};
+
+exports.escapeLast = (input, char, lastIdx) => {
+  const idx = input.lastIndexOf(char, lastIdx);
+  if (idx === -1) return input;
+  if (input[idx - 1] === '\\') return exports.escapeLast(input, char, idx - 1);
+  return `${input.slice(0, idx)}\\${input.slice(idx)}`;
+};
+
+exports.removePrefix = (input, state = {}) => {
+  let output = input;
+  if (output.startsWith('./')) {
+    output = output.slice(2);
+    state.prefix = './';
+  }
+  return output;
+};
+
+exports.wrapOutput = (input, state = {}, options = {}) => {
+  const prepend = options.contains ? '' : '^';
+  const append = options.contains ? '' : '$';
+
+  let output = `${prepend}(?:${input})${append}`;
+  if (state.negated === true) {
+    output = `(?:^(?!${output}).*$)`;
+  }
+  return output;
+};
+
+exports.basename = (path, { windows } = {}) => {
+  const segs = path.split(windows ? /[\\/]/ : '/');
+  const last = segs[segs.length - 1];
+
+  if (last === '') {
+    return segs[segs.length - 2];
+  }
+
+  return last;
+};
diff --git a/node_modules/picomatch/package.json b/node_modules/picomatch/package.json
new file mode 100644
index 0000000000000..372e27e05f412
--- /dev/null
+++ b/node_modules/picomatch/package.json
@@ -0,0 +1,83 @@
+{
+  "name": "picomatch",
+  "description": "Blazing fast and accurate glob matcher written in JavaScript, with no dependencies and full support for standard and extended Bash glob features, including braces, extglobs, POSIX brackets, and regular expressions.",
+  "version": "4.0.3",
+  "homepage": "https://github.com/micromatch/picomatch",
+  "author": "Jon Schlinkert (https://github.com/jonschlinkert)",
+  "funding": "https://github.com/sponsors/jonschlinkert",
+  "repository": "micromatch/picomatch",
+  "bugs": {
+    "url": "https://github.com/micromatch/picomatch/issues"
+  },
+  "license": "MIT",
+  "files": [
+    "index.js",
+    "posix.js",
+    "lib"
+  ],
+  "sideEffects": false,
+  "main": "index.js",
+  "engines": {
+    "node": ">=12"
+  },
+  "scripts": {
+    "lint": "eslint --cache --cache-location node_modules/.cache/.eslintcache --report-unused-disable-directives --ignore-path .gitignore .",
+    "mocha": "mocha --reporter dot",
+    "test": "npm run lint && npm run mocha",
+    "test:ci": "npm run test:cover",
+    "test:cover": "nyc npm run mocha"
+  },
+  "devDependencies": {
+    "eslint": "^8.57.0",
+    "fill-range": "^7.0.1",
+    "gulp-format-md": "^2.0.0",
+    "mocha": "^10.4.0",
+    "nyc": "^15.1.0",
+    "time-require": "github:jonschlinkert/time-require"
+  },
+  "keywords": [
+    "glob",
+    "match",
+    "picomatch"
+  ],
+  "nyc": {
+    "reporter": [
+      "html",
+      "lcov",
+      "text-summary"
+    ]
+  },
+  "verb": {
+    "toc": {
+      "render": true,
+      "method": "preWrite",
+      "maxdepth": 3
+    },
+    "layout": "empty",
+    "tasks": [
+      "readme"
+    ],
+    "plugins": [
+      "gulp-format-md"
+    ],
+    "lint": {
+      "reflinks": true
+    },
+    "related": {
+      "list": [
+        "braces",
+        "micromatch"
+      ]
+    },
+    "reflinks": [
+      "braces",
+      "expand-brackets",
+      "extglob",
+      "fill-range",
+      "micromatch",
+      "minimatch",
+      "nanomatch",
+      "picomatch"
+    ]
+  }
+}
diff --git a/node_modules/picomatch/posix.js b/node_modules/picomatch/posix.js
new file mode 100644
index 0000000000000..d2f2bc59d0ac7
--- /dev/null
+++ b/node_modules/picomatch/posix.js
@@ -0,0 +1,3 @@
+'use strict';
+
+module.exports = require('./lib/picomatch');

From 402a0ab1b4e5d1a8414dd063d0cbde0c0bc5a192 Mon Sep 17 00:00:00 2001
From: Gar 
Date: Thu, 18 Sep 2025 13:08:41 -0700
Subject: [PATCH 45/63] chore: @npmcli/template-oss@4.25.1

---
 docs/package.json                       |    4 +-
 mock-globals/package.json               |    4 +-
 mock-registry/package.json              |    4 +-
 node_modules/.gitignore                 |    6 +-
 node_modules/fdir/LICENSE               |    7 -
 node_modules/fdir/dist/index.cjs        |  588 ----
 node_modules/fdir/dist/index.d.cts      |  155 -
 node_modules/fdir/dist/index.d.mts      |  155 -
 node_modules/fdir/dist/index.mjs        |  570 ----
 node_modules/fdir/package.json          |  103 -
 node_modules/picomatch/LICENSE          |   21 -
 node_modules/picomatch/index.js         |   17 -
 node_modules/picomatch/lib/constants.js |  180 -
 node_modules/picomatch/lib/parse.js     | 1085 ------
 node_modules/picomatch/lib/picomatch.js |  341 --
 node_modules/picomatch/lib/scan.js      |  391 ---
 node_modules/picomatch/lib/utils.js     |   72 -
 node_modules/picomatch/package.json     |   83 -
 node_modules/picomatch/posix.js         |    3 -
 package-lock.json                       | 4142 +++++------------------
 package.json                            |    4 +-
 smoke-tests/package.json                |    4 +-
 workspaces/arborist/package.json        |    4 +-
 workspaces/config/package.json          |    4 +-
 workspaces/libnpmaccess/package.json    |    4 +-
 workspaces/libnpmdiff/package.json      |    4 +-
 workspaces/libnpmexec/package.json      |    4 +-
 workspaces/libnpmfund/package.json      |    4 +-
 workspaces/libnpmorg/package.json       |    4 +-
 workspaces/libnpmpack/package.json      |    4 +-
 workspaces/libnpmpublish/package.json   |    4 +-
 workspaces/libnpmsearch/package.json    |    4 +-
 workspaces/libnpmteam/package.json      |    4 +-
 workspaces/libnpmversion/package.json   |    4 +-
 34 files changed, 903 insertions(+), 7084 deletions(-)
 delete mode 100644 node_modules/fdir/LICENSE
 delete mode 100644 node_modules/fdir/dist/index.cjs
 delete mode 100644 node_modules/fdir/dist/index.d.cts
 delete mode 100644 node_modules/fdir/dist/index.d.mts
 delete mode 100644 node_modules/fdir/dist/index.mjs
 delete mode 100644 node_modules/fdir/package.json
 delete mode 100644 node_modules/picomatch/LICENSE
 delete mode 100644 node_modules/picomatch/index.js
 delete mode 100644 node_modules/picomatch/lib/constants.js
 delete mode 100644 node_modules/picomatch/lib/parse.js
 delete mode 100644 node_modules/picomatch/lib/picomatch.js
 delete mode 100644 node_modules/picomatch/lib/scan.js
 delete mode 100644 node_modules/picomatch/lib/utils.js
 delete mode 100644 node_modules/picomatch/package.json
 delete mode 100644 node_modules/picomatch/posix.js

diff --git a/docs/package.json b/docs/package.json
index d1d1884e4ba65..1946a8b6e9664 100644
--- a/docs/package.json
+++ b/docs/package.json
@@ -23,7 +23,7 @@
   "devDependencies": {
     "@isaacs/string-locale-compare": "^1.1.0",
     "@npmcli/eslint-config": "^5.0.1",
-    "@npmcli/template-oss": "4.24.4",
+    "@npmcli/template-oss": "4.25.1",
     "front-matter": "^4.0.2",
     "ignore-walk": "^8.0.0",
     "jsdom": "^24.0.0",
@@ -56,7 +56,7 @@
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
     "ciVersions": "latest",
-    "version": "4.24.4",
+    "version": "4.25.1",
     "content": "../scripts/template-oss/index.js",
     "workspaceRepo": {
       "add": {
diff --git a/mock-globals/package.json b/mock-globals/package.json
index bea0730d44dd0..98d849aba496e 100644
--- a/mock-globals/package.json
+++ b/mock-globals/package.json
@@ -35,7 +35,7 @@
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.24.4",
+    "version": "4.25.1",
     "content": "../scripts/template-oss/index.js"
   },
   "tap": {
@@ -50,7 +50,7 @@
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.1",
-    "@npmcli/template-oss": "4.24.4",
+    "@npmcli/template-oss": "4.25.1",
     "tap": "^16.3.8"
   }
 }
diff --git a/mock-registry/package.json b/mock-registry/package.json
index 4db2bda9ee0dd..94d3baeb27c49 100644
--- a/mock-registry/package.json
+++ b/mock-registry/package.json
@@ -35,7 +35,7 @@
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.24.4",
+    "version": "4.25.1",
     "content": "../scripts/template-oss/index.js"
   },
   "tap": {
@@ -48,7 +48,7 @@
   "devDependencies": {
     "@npmcli/arborist": "^9.1.2",
     "@npmcli/eslint-config": "^5.0.1",
-    "@npmcli/template-oss": "4.24.4",
+    "@npmcli/template-oss": "4.25.1",
     "json-stringify-safe": "^5.0.1",
     "nock": "^13.3.3",
     "npm-package-arg": "^13.0.0",
diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index aa6e36717bc7c..f146e9040bbae 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -83,7 +83,6 @@
 !/err-code
 !/exponential-backoff
 !/fastest-levenshtein
-!/fdir
 !/foreground-child
 !/fs-minipass
 !/glob
@@ -170,7 +169,6 @@
 !/parse-conflict-json
 !/path-key
 !/path-scurry
-!/picomatch
 !/postcss-selector-parser
 !/proc-log
 !/proggy
@@ -220,6 +218,10 @@
 !/text-table
 !/tiny-relative-date
 !/tinyglobby
+!/tinyglobby/node_modules/
+/tinyglobby/node_modules/*
+!/tinyglobby/node_modules/fdir
+!/tinyglobby/node_modules/picomatch
 !/treeverse
 !/tuf-js
 !/unique-filename
diff --git a/node_modules/fdir/LICENSE b/node_modules/fdir/LICENSE
deleted file mode 100644
index bb7fdee44cae6..0000000000000
--- a/node_modules/fdir/LICENSE
+++ /dev/null
@@ -1,7 +0,0 @@
-Copyright 2023 Abdullah Atta
-
-Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/fdir/dist/index.cjs b/node_modules/fdir/dist/index.cjs
deleted file mode 100644
index 4868ffba35d99..0000000000000
--- a/node_modules/fdir/dist/index.cjs
+++ /dev/null
@@ -1,588 +0,0 @@
-//#region rolldown:runtime
-var __create = Object.create;
-var __defProp = Object.defineProperty;
-var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
-var __getOwnPropNames = Object.getOwnPropertyNames;
-var __getProtoOf = Object.getPrototypeOf;
-var __hasOwnProp = Object.prototype.hasOwnProperty;
-var __copyProps = (to, from, except, desc) => {
-	if (from && typeof from === "object" || typeof from === "function") for (var keys = __getOwnPropNames(from), i = 0, n = keys.length, key; i < n; i++) {
-		key = keys[i];
-		if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, {
-			get: ((k) => from[k]).bind(null, key),
-			enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable
-		});
-	}
-	return to;
-};
-var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", {
-	value: mod,
-	enumerable: true
-}) : target, mod));
-
-//#endregion
-const path = __toESM(require("path"));
-const fs = __toESM(require("fs"));
-
-//#region src/utils.ts
-function cleanPath(path$1) {
-	let normalized = (0, path.normalize)(path$1);
-	if (normalized.length > 1 && normalized[normalized.length - 1] === path.sep) normalized = normalized.substring(0, normalized.length - 1);
-	return normalized;
-}
-const SLASHES_REGEX = /[\\/]/g;
-function convertSlashes(path$1, separator) {
-	return path$1.replace(SLASHES_REGEX, separator);
-}
-const WINDOWS_ROOT_DIR_REGEX = /^[a-z]:[\\/]$/i;
-function isRootDirectory(path$1) {
-	return path$1 === "/" || WINDOWS_ROOT_DIR_REGEX.test(path$1);
-}
-function normalizePath(path$1, options) {
-	const { resolvePaths, normalizePath: normalizePath$1, pathSeparator } = options;
-	const pathNeedsCleaning = process.platform === "win32" && path$1.includes("/") || path$1.startsWith(".");
-	if (resolvePaths) path$1 = (0, path.resolve)(path$1);
-	if (normalizePath$1 || pathNeedsCleaning) path$1 = cleanPath(path$1);
-	if (path$1 === ".") return "";
-	const needsSeperator = path$1[path$1.length - 1] !== pathSeparator;
-	return convertSlashes(needsSeperator ? path$1 + pathSeparator : path$1, pathSeparator);
-}
-
-//#endregion
-//#region src/api/functions/join-path.ts
-function joinPathWithBasePath(filename, directoryPath) {
-	return directoryPath + filename;
-}
-function joinPathWithRelativePath(root, options) {
-	return function(filename, directoryPath) {
-		const sameRoot = directoryPath.startsWith(root);
-		if (sameRoot) return directoryPath.slice(root.length) + filename;
-		else return convertSlashes((0, path.relative)(root, directoryPath), options.pathSeparator) + options.pathSeparator + filename;
-	};
-}
-function joinPath(filename) {
-	return filename;
-}
-function joinDirectoryPath(filename, directoryPath, separator) {
-	return directoryPath + filename + separator;
-}
-function build$7(root, options) {
-	const { relativePaths, includeBasePath } = options;
-	return relativePaths && root ? joinPathWithRelativePath(root, options) : includeBasePath ? joinPathWithBasePath : joinPath;
-}
-
-//#endregion
-//#region src/api/functions/push-directory.ts
-function pushDirectoryWithRelativePath(root) {
-	return function(directoryPath, paths) {
-		paths.push(directoryPath.substring(root.length) || ".");
-	};
-}
-function pushDirectoryFilterWithRelativePath(root) {
-	return function(directoryPath, paths, filters) {
-		const relativePath = directoryPath.substring(root.length) || ".";
-		if (filters.every((filter) => filter(relativePath, true))) paths.push(relativePath);
-	};
-}
-const pushDirectory = (directoryPath, paths) => {
-	paths.push(directoryPath || ".");
-};
-const pushDirectoryFilter = (directoryPath, paths, filters) => {
-	const path$1 = directoryPath || ".";
-	if (filters.every((filter) => filter(path$1, true))) paths.push(path$1);
-};
-const empty$2 = () => {};
-function build$6(root, options) {
-	const { includeDirs, filters, relativePaths } = options;
-	if (!includeDirs) return empty$2;
-	if (relativePaths) return filters && filters.length ? pushDirectoryFilterWithRelativePath(root) : pushDirectoryWithRelativePath(root);
-	return filters && filters.length ? pushDirectoryFilter : pushDirectory;
-}
-
-//#endregion
-//#region src/api/functions/push-file.ts
-const pushFileFilterAndCount = (filename, _paths, counts, filters) => {
-	if (filters.every((filter) => filter(filename, false))) counts.files++;
-};
-const pushFileFilter = (filename, paths, _counts, filters) => {
-	if (filters.every((filter) => filter(filename, false))) paths.push(filename);
-};
-const pushFileCount = (_filename, _paths, counts, _filters) => {
-	counts.files++;
-};
-const pushFile = (filename, paths) => {
-	paths.push(filename);
-};
-const empty$1 = () => {};
-function build$5(options) {
-	const { excludeFiles, filters, onlyCounts } = options;
-	if (excludeFiles) return empty$1;
-	if (filters && filters.length) return onlyCounts ? pushFileFilterAndCount : pushFileFilter;
-	else if (onlyCounts) return pushFileCount;
-	else return pushFile;
-}
-
-//#endregion
-//#region src/api/functions/get-array.ts
-const getArray = (paths) => {
-	return paths;
-};
-const getArrayGroup = () => {
-	return [""].slice(0, 0);
-};
-function build$4(options) {
-	return options.group ? getArrayGroup : getArray;
-}
-
-//#endregion
-//#region src/api/functions/group-files.ts
-const groupFiles = (groups, directory, files) => {
-	groups.push({
-		directory,
-		files,
-		dir: directory
-	});
-};
-const empty = () => {};
-function build$3(options) {
-	return options.group ? groupFiles : empty;
-}
-
-//#endregion
-//#region src/api/functions/resolve-symlink.ts
-const resolveSymlinksAsync = function(path$1, state, callback$1) {
-	const { queue, fs: fs$1, options: { suppressErrors } } = state;
-	queue.enqueue();
-	fs$1.realpath(path$1, (error, resolvedPath) => {
-		if (error) return queue.dequeue(suppressErrors ? null : error, state);
-		fs$1.stat(resolvedPath, (error$1, stat) => {
-			if (error$1) return queue.dequeue(suppressErrors ? null : error$1, state);
-			if (stat.isDirectory() && isRecursive(path$1, resolvedPath, state)) return queue.dequeue(null, state);
-			callback$1(stat, resolvedPath);
-			queue.dequeue(null, state);
-		});
-	});
-};
-const resolveSymlinks = function(path$1, state, callback$1) {
-	const { queue, fs: fs$1, options: { suppressErrors } } = state;
-	queue.enqueue();
-	try {
-		const resolvedPath = fs$1.realpathSync(path$1);
-		const stat = fs$1.statSync(resolvedPath);
-		if (stat.isDirectory() && isRecursive(path$1, resolvedPath, state)) return;
-		callback$1(stat, resolvedPath);
-	} catch (e) {
-		if (!suppressErrors) throw e;
-	}
-};
-function build$2(options, isSynchronous) {
-	if (!options.resolveSymlinks || options.excludeSymlinks) return null;
-	return isSynchronous ? resolveSymlinks : resolveSymlinksAsync;
-}
-function isRecursive(path$1, resolved, state) {
-	if (state.options.useRealPaths) return isRecursiveUsingRealPaths(resolved, state);
-	let parent = (0, path.dirname)(path$1);
-	let depth = 1;
-	while (parent !== state.root && depth < 2) {
-		const resolvedPath = state.symlinks.get(parent);
-		const isSameRoot = !!resolvedPath && (resolvedPath === resolved || resolvedPath.startsWith(resolved) || resolved.startsWith(resolvedPath));
-		if (isSameRoot) depth++;
-		else parent = (0, path.dirname)(parent);
-	}
-	state.symlinks.set(path$1, resolved);
-	return depth > 1;
-}
-function isRecursiveUsingRealPaths(resolved, state) {
-	return state.visited.includes(resolved + state.options.pathSeparator);
-}
-
-//#endregion
-//#region src/api/functions/invoke-callback.ts
-const onlyCountsSync = (state) => {
-	return state.counts;
-};
-const groupsSync = (state) => {
-	return state.groups;
-};
-const defaultSync = (state) => {
-	return state.paths;
-};
-const limitFilesSync = (state) => {
-	return state.paths.slice(0, state.options.maxFiles);
-};
-const onlyCountsAsync = (state, error, callback$1) => {
-	report(error, callback$1, state.counts, state.options.suppressErrors);
-	return null;
-};
-const defaultAsync = (state, error, callback$1) => {
-	report(error, callback$1, state.paths, state.options.suppressErrors);
-	return null;
-};
-const limitFilesAsync = (state, error, callback$1) => {
-	report(error, callback$1, state.paths.slice(0, state.options.maxFiles), state.options.suppressErrors);
-	return null;
-};
-const groupsAsync = (state, error, callback$1) => {
-	report(error, callback$1, state.groups, state.options.suppressErrors);
-	return null;
-};
-function report(error, callback$1, output, suppressErrors) {
-	if (error && !suppressErrors) callback$1(error, output);
-	else callback$1(null, output);
-}
-function build$1(options, isSynchronous) {
-	const { onlyCounts, group, maxFiles } = options;
-	if (onlyCounts) return isSynchronous ? onlyCountsSync : onlyCountsAsync;
-	else if (group) return isSynchronous ? groupsSync : groupsAsync;
-	else if (maxFiles) return isSynchronous ? limitFilesSync : limitFilesAsync;
-	else return isSynchronous ? defaultSync : defaultAsync;
-}
-
-//#endregion
-//#region src/api/functions/walk-directory.ts
-const readdirOpts = { withFileTypes: true };
-const walkAsync = (state, crawlPath, directoryPath, currentDepth, callback$1) => {
-	state.queue.enqueue();
-	if (currentDepth < 0) return state.queue.dequeue(null, state);
-	const { fs: fs$1 } = state;
-	state.visited.push(crawlPath);
-	state.counts.directories++;
-	fs$1.readdir(crawlPath || ".", readdirOpts, (error, entries = []) => {
-		callback$1(entries, directoryPath, currentDepth);
-		state.queue.dequeue(state.options.suppressErrors ? null : error, state);
-	});
-};
-const walkSync = (state, crawlPath, directoryPath, currentDepth, callback$1) => {
-	const { fs: fs$1 } = state;
-	if (currentDepth < 0) return;
-	state.visited.push(crawlPath);
-	state.counts.directories++;
-	let entries = [];
-	try {
-		entries = fs$1.readdirSync(crawlPath || ".", readdirOpts);
-	} catch (e) {
-		if (!state.options.suppressErrors) throw e;
-	}
-	callback$1(entries, directoryPath, currentDepth);
-};
-function build(isSynchronous) {
-	return isSynchronous ? walkSync : walkAsync;
-}
-
-//#endregion
-//#region src/api/queue.ts
-/**
-* This is a custom stateless queue to track concurrent async fs calls.
-* It increments a counter whenever a call is queued and decrements it
-* as soon as it completes. When the counter hits 0, it calls onQueueEmpty.
-*/
-var Queue = class {
-	count = 0;
-	constructor(onQueueEmpty) {
-		this.onQueueEmpty = onQueueEmpty;
-	}
-	enqueue() {
-		this.count++;
-		return this.count;
-	}
-	dequeue(error, output) {
-		if (this.onQueueEmpty && (--this.count <= 0 || error)) {
-			this.onQueueEmpty(error, output);
-			if (error) {
-				output.controller.abort();
-				this.onQueueEmpty = void 0;
-			}
-		}
-	}
-};
-
-//#endregion
-//#region src/api/counter.ts
-var Counter = class {
-	_files = 0;
-	_directories = 0;
-	set files(num) {
-		this._files = num;
-	}
-	get files() {
-		return this._files;
-	}
-	set directories(num) {
-		this._directories = num;
-	}
-	get directories() {
-		return this._directories;
-	}
-	/**
-	* @deprecated use `directories` instead
-	*/
-	/* c8 ignore next 3 */
-	get dirs() {
-		return this._directories;
-	}
-};
-
-//#endregion
-//#region src/api/aborter.ts
-/**
-* AbortController is not supported on Node 14 so we use this until we can drop
-* support for Node 14.
-*/
-var Aborter = class {
-	aborted = false;
-	abort() {
-		this.aborted = true;
-	}
-};
-
-//#endregion
-//#region src/api/walker.ts
-var Walker = class {
-	root;
-	isSynchronous;
-	state;
-	joinPath;
-	pushDirectory;
-	pushFile;
-	getArray;
-	groupFiles;
-	resolveSymlink;
-	walkDirectory;
-	callbackInvoker;
-	constructor(root, options, callback$1) {
-		this.isSynchronous = !callback$1;
-		this.callbackInvoker = build$1(options, this.isSynchronous);
-		this.root = normalizePath(root, options);
-		this.state = {
-			root: isRootDirectory(this.root) ? this.root : this.root.slice(0, -1),
-			paths: [""].slice(0, 0),
-			groups: [],
-			counts: new Counter(),
-			options,
-			queue: new Queue((error, state) => this.callbackInvoker(state, error, callback$1)),
-			symlinks: /* @__PURE__ */ new Map(),
-			visited: [""].slice(0, 0),
-			controller: new Aborter(),
-			fs: options.fs || fs
-		};
-		this.joinPath = build$7(this.root, options);
-		this.pushDirectory = build$6(this.root, options);
-		this.pushFile = build$5(options);
-		this.getArray = build$4(options);
-		this.groupFiles = build$3(options);
-		this.resolveSymlink = build$2(options, this.isSynchronous);
-		this.walkDirectory = build(this.isSynchronous);
-	}
-	start() {
-		this.pushDirectory(this.root, this.state.paths, this.state.options.filters);
-		this.walkDirectory(this.state, this.root, this.root, this.state.options.maxDepth, this.walk);
-		return this.isSynchronous ? this.callbackInvoker(this.state, null) : null;
-	}
-	walk = (entries, directoryPath, depth) => {
-		const { paths, options: { filters, resolveSymlinks: resolveSymlinks$1, excludeSymlinks, exclude, maxFiles, signal, useRealPaths, pathSeparator }, controller } = this.state;
-		if (controller.aborted || signal && signal.aborted || maxFiles && paths.length > maxFiles) return;
-		const files = this.getArray(this.state.paths);
-		for (let i = 0; i < entries.length; ++i) {
-			const entry = entries[i];
-			if (entry.isFile() || entry.isSymbolicLink() && !resolveSymlinks$1 && !excludeSymlinks) {
-				const filename = this.joinPath(entry.name, directoryPath);
-				this.pushFile(filename, files, this.state.counts, filters);
-			} else if (entry.isDirectory()) {
-				let path$1 = joinDirectoryPath(entry.name, directoryPath, this.state.options.pathSeparator);
-				if (exclude && exclude(entry.name, path$1)) continue;
-				this.pushDirectory(path$1, paths, filters);
-				this.walkDirectory(this.state, path$1, path$1, depth - 1, this.walk);
-			} else if (this.resolveSymlink && entry.isSymbolicLink()) {
-				let path$1 = joinPathWithBasePath(entry.name, directoryPath);
-				this.resolveSymlink(path$1, this.state, (stat, resolvedPath) => {
-					if (stat.isDirectory()) {
-						resolvedPath = normalizePath(resolvedPath, this.state.options);
-						if (exclude && exclude(entry.name, useRealPaths ? resolvedPath : path$1 + pathSeparator)) return;
-						this.walkDirectory(this.state, resolvedPath, useRealPaths ? resolvedPath : path$1 + pathSeparator, depth - 1, this.walk);
-					} else {
-						resolvedPath = useRealPaths ? resolvedPath : path$1;
-						const filename = (0, path.basename)(resolvedPath);
-						const directoryPath$1 = normalizePath((0, path.dirname)(resolvedPath), this.state.options);
-						resolvedPath = this.joinPath(filename, directoryPath$1);
-						this.pushFile(resolvedPath, files, this.state.counts, filters);
-					}
-				});
-			}
-		}
-		this.groupFiles(this.state.groups, directoryPath, files);
-	};
-};
-
-//#endregion
-//#region src/api/async.ts
-function promise(root, options) {
-	return new Promise((resolve$1, reject) => {
-		callback(root, options, (err, output) => {
-			if (err) return reject(err);
-			resolve$1(output);
-		});
-	});
-}
-function callback(root, options, callback$1) {
-	let walker = new Walker(root, options, callback$1);
-	walker.start();
-}
-
-//#endregion
-//#region src/api/sync.ts
-function sync(root, options) {
-	const walker = new Walker(root, options);
-	return walker.start();
-}
-
-//#endregion
-//#region src/builder/api-builder.ts
-var APIBuilder = class {
-	constructor(root, options) {
-		this.root = root;
-		this.options = options;
-	}
-	withPromise() {
-		return promise(this.root, this.options);
-	}
-	withCallback(cb) {
-		callback(this.root, this.options, cb);
-	}
-	sync() {
-		return sync(this.root, this.options);
-	}
-};
-
-//#endregion
-//#region src/builder/index.ts
-let pm = null;
-/* c8 ignore next 6 */
-try {
-	require.resolve("picomatch");
-	pm = require("picomatch");
-} catch {}
-var Builder = class {
-	globCache = {};
-	options = {
-		maxDepth: Infinity,
-		suppressErrors: true,
-		pathSeparator: path.sep,
-		filters: []
-	};
-	globFunction;
-	constructor(options) {
-		this.options = {
-			...this.options,
-			...options
-		};
-		this.globFunction = this.options.globFunction;
-	}
-	group() {
-		this.options.group = true;
-		return this;
-	}
-	withPathSeparator(separator) {
-		this.options.pathSeparator = separator;
-		return this;
-	}
-	withBasePath() {
-		this.options.includeBasePath = true;
-		return this;
-	}
-	withRelativePaths() {
-		this.options.relativePaths = true;
-		return this;
-	}
-	withDirs() {
-		this.options.includeDirs = true;
-		return this;
-	}
-	withMaxDepth(depth) {
-		this.options.maxDepth = depth;
-		return this;
-	}
-	withMaxFiles(limit) {
-		this.options.maxFiles = limit;
-		return this;
-	}
-	withFullPaths() {
-		this.options.resolvePaths = true;
-		this.options.includeBasePath = true;
-		return this;
-	}
-	withErrors() {
-		this.options.suppressErrors = false;
-		return this;
-	}
-	withSymlinks({ resolvePaths = true } = {}) {
-		this.options.resolveSymlinks = true;
-		this.options.useRealPaths = resolvePaths;
-		return this.withFullPaths();
-	}
-	withAbortSignal(signal) {
-		this.options.signal = signal;
-		return this;
-	}
-	normalize() {
-		this.options.normalizePath = true;
-		return this;
-	}
-	filter(predicate) {
-		this.options.filters.push(predicate);
-		return this;
-	}
-	onlyDirs() {
-		this.options.excludeFiles = true;
-		this.options.includeDirs = true;
-		return this;
-	}
-	exclude(predicate) {
-		this.options.exclude = predicate;
-		return this;
-	}
-	onlyCounts() {
-		this.options.onlyCounts = true;
-		return this;
-	}
-	crawl(root) {
-		return new APIBuilder(root || ".", this.options);
-	}
-	withGlobFunction(fn) {
-		this.globFunction = fn;
-		return this;
-	}
-	/**
-	* @deprecated Pass options using the constructor instead:
-	* ```ts
-	* new fdir(options).crawl("/path/to/root");
-	* ```
-	* This method will be removed in v7.0
-	*/
-	/* c8 ignore next 4 */
-	crawlWithOptions(root, options) {
-		this.options = {
-			...this.options,
-			...options
-		};
-		return new APIBuilder(root || ".", this.options);
-	}
-	glob(...patterns) {
-		if (this.globFunction) return this.globWithOptions(patterns);
-		return this.globWithOptions(patterns, ...[{ dot: true }]);
-	}
-	globWithOptions(patterns, ...options) {
-		const globFn = this.globFunction || pm;
-		/* c8 ignore next 5 */
-		if (!globFn) throw new Error("Please specify a glob function to use glob matching.");
-		var isMatch = this.globCache[patterns.join("\0")];
-		if (!isMatch) {
-			isMatch = globFn(patterns, ...options);
-			this.globCache[patterns.join("\0")] = isMatch;
-		}
-		this.options.filters.push((path$1) => isMatch(path$1));
-		return this;
-	}
-};
-
-//#endregion
-exports.fdir = Builder;
\ No newline at end of file
diff --git a/node_modules/fdir/dist/index.d.cts b/node_modules/fdir/dist/index.d.cts
deleted file mode 100644
index f448ef5d9b563..0000000000000
--- a/node_modules/fdir/dist/index.d.cts
+++ /dev/null
@@ -1,155 +0,0 @@
-/// 
-import * as nativeFs from "fs";
-import picomatch from "picomatch";
-
-//#region src/api/aborter.d.ts
-/**
- * AbortController is not supported on Node 14 so we use this until we can drop
- * support for Node 14.
- */
-declare class Aborter {
-  aborted: boolean;
-  abort(): void;
-}
-//#endregion
-//#region src/api/queue.d.ts
-type OnQueueEmptyCallback = (error: Error | null, output: WalkerState) => void;
-/**
- * This is a custom stateless queue to track concurrent async fs calls.
- * It increments a counter whenever a call is queued and decrements it
- * as soon as it completes. When the counter hits 0, it calls onQueueEmpty.
- */
-declare class Queue {
-  private onQueueEmpty?;
-  count: number;
-  constructor(onQueueEmpty?: OnQueueEmptyCallback | undefined);
-  enqueue(): number;
-  dequeue(error: Error | null, output: WalkerState): void;
-}
-//#endregion
-//#region src/types.d.ts
-type Counts = {
-  files: number;
-  directories: number;
-  /**
-   * @deprecated use `directories` instead. Will be removed in v7.0.
-   */
-  dirs: number;
-};
-type Group = {
-  directory: string;
-  files: string[];
-  /**
-   * @deprecated use `directory` instead. Will be removed in v7.0.
-   */
-  dir: string;
-};
-type GroupOutput = Group[];
-type OnlyCountsOutput = Counts;
-type PathsOutput = string[];
-type Output = OnlyCountsOutput | PathsOutput | GroupOutput;
-type FSLike = {
-  readdir: typeof nativeFs.readdir;
-  readdirSync: typeof nativeFs.readdirSync;
-  realpath: typeof nativeFs.realpath;
-  realpathSync: typeof nativeFs.realpathSync;
-  stat: typeof nativeFs.stat;
-  statSync: typeof nativeFs.statSync;
-};
-type WalkerState = {
-  root: string;
-  paths: string[];
-  groups: Group[];
-  counts: Counts;
-  options: Options;
-  queue: Queue;
-  controller: Aborter;
-  fs: FSLike;
-  symlinks: Map;
-  visited: string[];
-};
-type ResultCallback = (error: Error | null, output: TOutput) => void;
-type FilterPredicate = (path: string, isDirectory: boolean) => boolean;
-type ExcludePredicate = (dirName: string, dirPath: string) => boolean;
-type PathSeparator = "/" | "\\";
-type Options = {
-  includeBasePath?: boolean;
-  includeDirs?: boolean;
-  normalizePath?: boolean;
-  maxDepth: number;
-  maxFiles?: number;
-  resolvePaths?: boolean;
-  suppressErrors: boolean;
-  group?: boolean;
-  onlyCounts?: boolean;
-  filters: FilterPredicate[];
-  resolveSymlinks?: boolean;
-  useRealPaths?: boolean;
-  excludeFiles?: boolean;
-  excludeSymlinks?: boolean;
-  exclude?: ExcludePredicate;
-  relativePaths?: boolean;
-  pathSeparator: PathSeparator;
-  signal?: AbortSignal;
-  globFunction?: TGlobFunction;
-  fs?: FSLike;
-};
-type GlobMatcher = (test: string) => boolean;
-type GlobFunction = (glob: string | string[], ...params: unknown[]) => GlobMatcher;
-type GlobParams = T extends ((globs: string | string[], ...params: infer TParams extends unknown[]) => GlobMatcher) ? TParams : [];
-//#endregion
-//#region src/builder/api-builder.d.ts
-declare class APIBuilder {
-  private readonly root;
-  private readonly options;
-  constructor(root: string, options: Options);
-  withPromise(): Promise;
-  withCallback(cb: ResultCallback): void;
-  sync(): TReturnType;
-}
-//#endregion
-//#region src/builder/index.d.ts
-declare class Builder {
-  private readonly globCache;
-  private options;
-  private globFunction?;
-  constructor(options?: Partial>);
-  group(): Builder;
-  withPathSeparator(separator: "/" | "\\"): this;
-  withBasePath(): this;
-  withRelativePaths(): this;
-  withDirs(): this;
-  withMaxDepth(depth: number): this;
-  withMaxFiles(limit: number): this;
-  withFullPaths(): this;
-  withErrors(): this;
-  withSymlinks({
-    resolvePaths
-  }?: {
-    resolvePaths?: boolean | undefined;
-  }): this;
-  withAbortSignal(signal: AbortSignal): this;
-  normalize(): this;
-  filter(predicate: FilterPredicate): this;
-  onlyDirs(): this;
-  exclude(predicate: ExcludePredicate): this;
-  onlyCounts(): Builder;
-  crawl(root?: string): APIBuilder;
-  withGlobFunction(fn: TFunc): Builder;
-  /**
-   * @deprecated Pass options using the constructor instead:
-   * ```ts
-   * new fdir(options).crawl("/path/to/root");
-   * ```
-   * This method will be removed in v7.0
-   */
-  crawlWithOptions(root: string, options: Partial>): APIBuilder;
-  glob(...patterns: string[]): Builder;
-  globWithOptions(patterns: string[]): Builder;
-  globWithOptions(patterns: string[], ...options: GlobParams): Builder;
-}
-//#endregion
-//#region src/index.d.ts
-type Fdir = typeof Builder;
-//#endregion
-export { Counts, ExcludePredicate, FSLike, Fdir, FilterPredicate, GlobFunction, GlobMatcher, GlobParams, Group, GroupOutput, OnlyCountsOutput, Options, Output, PathSeparator, PathsOutput, ResultCallback, WalkerState, Builder as fdir };
\ No newline at end of file
diff --git a/node_modules/fdir/dist/index.d.mts b/node_modules/fdir/dist/index.d.mts
deleted file mode 100644
index f448ef5d9b563..0000000000000
--- a/node_modules/fdir/dist/index.d.mts
+++ /dev/null
@@ -1,155 +0,0 @@
-/// 
-import * as nativeFs from "fs";
-import picomatch from "picomatch";
-
-//#region src/api/aborter.d.ts
-/**
- * AbortController is not supported on Node 14 so we use this until we can drop
- * support for Node 14.
- */
-declare class Aborter {
-  aborted: boolean;
-  abort(): void;
-}
-//#endregion
-//#region src/api/queue.d.ts
-type OnQueueEmptyCallback = (error: Error | null, output: WalkerState) => void;
-/**
- * This is a custom stateless queue to track concurrent async fs calls.
- * It increments a counter whenever a call is queued and decrements it
- * as soon as it completes. When the counter hits 0, it calls onQueueEmpty.
- */
-declare class Queue {
-  private onQueueEmpty?;
-  count: number;
-  constructor(onQueueEmpty?: OnQueueEmptyCallback | undefined);
-  enqueue(): number;
-  dequeue(error: Error | null, output: WalkerState): void;
-}
-//#endregion
-//#region src/types.d.ts
-type Counts = {
-  files: number;
-  directories: number;
-  /**
-   * @deprecated use `directories` instead. Will be removed in v7.0.
-   */
-  dirs: number;
-};
-type Group = {
-  directory: string;
-  files: string[];
-  /**
-   * @deprecated use `directory` instead. Will be removed in v7.0.
-   */
-  dir: string;
-};
-type GroupOutput = Group[];
-type OnlyCountsOutput = Counts;
-type PathsOutput = string[];
-type Output = OnlyCountsOutput | PathsOutput | GroupOutput;
-type FSLike = {
-  readdir: typeof nativeFs.readdir;
-  readdirSync: typeof nativeFs.readdirSync;
-  realpath: typeof nativeFs.realpath;
-  realpathSync: typeof nativeFs.realpathSync;
-  stat: typeof nativeFs.stat;
-  statSync: typeof nativeFs.statSync;
-};
-type WalkerState = {
-  root: string;
-  paths: string[];
-  groups: Group[];
-  counts: Counts;
-  options: Options;
-  queue: Queue;
-  controller: Aborter;
-  fs: FSLike;
-  symlinks: Map;
-  visited: string[];
-};
-type ResultCallback = (error: Error | null, output: TOutput) => void;
-type FilterPredicate = (path: string, isDirectory: boolean) => boolean;
-type ExcludePredicate = (dirName: string, dirPath: string) => boolean;
-type PathSeparator = "/" | "\\";
-type Options = {
-  includeBasePath?: boolean;
-  includeDirs?: boolean;
-  normalizePath?: boolean;
-  maxDepth: number;
-  maxFiles?: number;
-  resolvePaths?: boolean;
-  suppressErrors: boolean;
-  group?: boolean;
-  onlyCounts?: boolean;
-  filters: FilterPredicate[];
-  resolveSymlinks?: boolean;
-  useRealPaths?: boolean;
-  excludeFiles?: boolean;
-  excludeSymlinks?: boolean;
-  exclude?: ExcludePredicate;
-  relativePaths?: boolean;
-  pathSeparator: PathSeparator;
-  signal?: AbortSignal;
-  globFunction?: TGlobFunction;
-  fs?: FSLike;
-};
-type GlobMatcher = (test: string) => boolean;
-type GlobFunction = (glob: string | string[], ...params: unknown[]) => GlobMatcher;
-type GlobParams = T extends ((globs: string | string[], ...params: infer TParams extends unknown[]) => GlobMatcher) ? TParams : [];
-//#endregion
-//#region src/builder/api-builder.d.ts
-declare class APIBuilder {
-  private readonly root;
-  private readonly options;
-  constructor(root: string, options: Options);
-  withPromise(): Promise;
-  withCallback(cb: ResultCallback): void;
-  sync(): TReturnType;
-}
-//#endregion
-//#region src/builder/index.d.ts
-declare class Builder {
-  private readonly globCache;
-  private options;
-  private globFunction?;
-  constructor(options?: Partial>);
-  group(): Builder;
-  withPathSeparator(separator: "/" | "\\"): this;
-  withBasePath(): this;
-  withRelativePaths(): this;
-  withDirs(): this;
-  withMaxDepth(depth: number): this;
-  withMaxFiles(limit: number): this;
-  withFullPaths(): this;
-  withErrors(): this;
-  withSymlinks({
-    resolvePaths
-  }?: {
-    resolvePaths?: boolean | undefined;
-  }): this;
-  withAbortSignal(signal: AbortSignal): this;
-  normalize(): this;
-  filter(predicate: FilterPredicate): this;
-  onlyDirs(): this;
-  exclude(predicate: ExcludePredicate): this;
-  onlyCounts(): Builder;
-  crawl(root?: string): APIBuilder;
-  withGlobFunction(fn: TFunc): Builder;
-  /**
-   * @deprecated Pass options using the constructor instead:
-   * ```ts
-   * new fdir(options).crawl("/path/to/root");
-   * ```
-   * This method will be removed in v7.0
-   */
-  crawlWithOptions(root: string, options: Partial>): APIBuilder;
-  glob(...patterns: string[]): Builder;
-  globWithOptions(patterns: string[]): Builder;
-  globWithOptions(patterns: string[], ...options: GlobParams): Builder;
-}
-//#endregion
-//#region src/index.d.ts
-type Fdir = typeof Builder;
-//#endregion
-export { Counts, ExcludePredicate, FSLike, Fdir, FilterPredicate, GlobFunction, GlobMatcher, GlobParams, Group, GroupOutput, OnlyCountsOutput, Options, Output, PathSeparator, PathsOutput, ResultCallback, WalkerState, Builder as fdir };
\ No newline at end of file
diff --git a/node_modules/fdir/dist/index.mjs b/node_modules/fdir/dist/index.mjs
deleted file mode 100644
index 5c37e092b507d..0000000000000
--- a/node_modules/fdir/dist/index.mjs
+++ /dev/null
@@ -1,570 +0,0 @@
-import { createRequire } from "module";
-import { basename, dirname, normalize, relative, resolve, sep } from "path";
-import * as nativeFs from "fs";
-
-//#region rolldown:runtime
-var __require = /* @__PURE__ */ createRequire(import.meta.url);
-
-//#endregion
-//#region src/utils.ts
-function cleanPath(path) {
-	let normalized = normalize(path);
-	if (normalized.length > 1 && normalized[normalized.length - 1] === sep) normalized = normalized.substring(0, normalized.length - 1);
-	return normalized;
-}
-const SLASHES_REGEX = /[\\/]/g;
-function convertSlashes(path, separator) {
-	return path.replace(SLASHES_REGEX, separator);
-}
-const WINDOWS_ROOT_DIR_REGEX = /^[a-z]:[\\/]$/i;
-function isRootDirectory(path) {
-	return path === "/" || WINDOWS_ROOT_DIR_REGEX.test(path);
-}
-function normalizePath(path, options) {
-	const { resolvePaths, normalizePath: normalizePath$1, pathSeparator } = options;
-	const pathNeedsCleaning = process.platform === "win32" && path.includes("/") || path.startsWith(".");
-	if (resolvePaths) path = resolve(path);
-	if (normalizePath$1 || pathNeedsCleaning) path = cleanPath(path);
-	if (path === ".") return "";
-	const needsSeperator = path[path.length - 1] !== pathSeparator;
-	return convertSlashes(needsSeperator ? path + pathSeparator : path, pathSeparator);
-}
-
-//#endregion
-//#region src/api/functions/join-path.ts
-function joinPathWithBasePath(filename, directoryPath) {
-	return directoryPath + filename;
-}
-function joinPathWithRelativePath(root, options) {
-	return function(filename, directoryPath) {
-		const sameRoot = directoryPath.startsWith(root);
-		if (sameRoot) return directoryPath.slice(root.length) + filename;
-		else return convertSlashes(relative(root, directoryPath), options.pathSeparator) + options.pathSeparator + filename;
-	};
-}
-function joinPath(filename) {
-	return filename;
-}
-function joinDirectoryPath(filename, directoryPath, separator) {
-	return directoryPath + filename + separator;
-}
-function build$7(root, options) {
-	const { relativePaths, includeBasePath } = options;
-	return relativePaths && root ? joinPathWithRelativePath(root, options) : includeBasePath ? joinPathWithBasePath : joinPath;
-}
-
-//#endregion
-//#region src/api/functions/push-directory.ts
-function pushDirectoryWithRelativePath(root) {
-	return function(directoryPath, paths) {
-		paths.push(directoryPath.substring(root.length) || ".");
-	};
-}
-function pushDirectoryFilterWithRelativePath(root) {
-	return function(directoryPath, paths, filters) {
-		const relativePath = directoryPath.substring(root.length) || ".";
-		if (filters.every((filter) => filter(relativePath, true))) paths.push(relativePath);
-	};
-}
-const pushDirectory = (directoryPath, paths) => {
-	paths.push(directoryPath || ".");
-};
-const pushDirectoryFilter = (directoryPath, paths, filters) => {
-	const path = directoryPath || ".";
-	if (filters.every((filter) => filter(path, true))) paths.push(path);
-};
-const empty$2 = () => {};
-function build$6(root, options) {
-	const { includeDirs, filters, relativePaths } = options;
-	if (!includeDirs) return empty$2;
-	if (relativePaths) return filters && filters.length ? pushDirectoryFilterWithRelativePath(root) : pushDirectoryWithRelativePath(root);
-	return filters && filters.length ? pushDirectoryFilter : pushDirectory;
-}
-
-//#endregion
-//#region src/api/functions/push-file.ts
-const pushFileFilterAndCount = (filename, _paths, counts, filters) => {
-	if (filters.every((filter) => filter(filename, false))) counts.files++;
-};
-const pushFileFilter = (filename, paths, _counts, filters) => {
-	if (filters.every((filter) => filter(filename, false))) paths.push(filename);
-};
-const pushFileCount = (_filename, _paths, counts, _filters) => {
-	counts.files++;
-};
-const pushFile = (filename, paths) => {
-	paths.push(filename);
-};
-const empty$1 = () => {};
-function build$5(options) {
-	const { excludeFiles, filters, onlyCounts } = options;
-	if (excludeFiles) return empty$1;
-	if (filters && filters.length) return onlyCounts ? pushFileFilterAndCount : pushFileFilter;
-	else if (onlyCounts) return pushFileCount;
-	else return pushFile;
-}
-
-//#endregion
-//#region src/api/functions/get-array.ts
-const getArray = (paths) => {
-	return paths;
-};
-const getArrayGroup = () => {
-	return [""].slice(0, 0);
-};
-function build$4(options) {
-	return options.group ? getArrayGroup : getArray;
-}
-
-//#endregion
-//#region src/api/functions/group-files.ts
-const groupFiles = (groups, directory, files) => {
-	groups.push({
-		directory,
-		files,
-		dir: directory
-	});
-};
-const empty = () => {};
-function build$3(options) {
-	return options.group ? groupFiles : empty;
-}
-
-//#endregion
-//#region src/api/functions/resolve-symlink.ts
-const resolveSymlinksAsync = function(path, state, callback$1) {
-	const { queue, fs, options: { suppressErrors } } = state;
-	queue.enqueue();
-	fs.realpath(path, (error, resolvedPath) => {
-		if (error) return queue.dequeue(suppressErrors ? null : error, state);
-		fs.stat(resolvedPath, (error$1, stat) => {
-			if (error$1) return queue.dequeue(suppressErrors ? null : error$1, state);
-			if (stat.isDirectory() && isRecursive(path, resolvedPath, state)) return queue.dequeue(null, state);
-			callback$1(stat, resolvedPath);
-			queue.dequeue(null, state);
-		});
-	});
-};
-const resolveSymlinks = function(path, state, callback$1) {
-	const { queue, fs, options: { suppressErrors } } = state;
-	queue.enqueue();
-	try {
-		const resolvedPath = fs.realpathSync(path);
-		const stat = fs.statSync(resolvedPath);
-		if (stat.isDirectory() && isRecursive(path, resolvedPath, state)) return;
-		callback$1(stat, resolvedPath);
-	} catch (e) {
-		if (!suppressErrors) throw e;
-	}
-};
-function build$2(options, isSynchronous) {
-	if (!options.resolveSymlinks || options.excludeSymlinks) return null;
-	return isSynchronous ? resolveSymlinks : resolveSymlinksAsync;
-}
-function isRecursive(path, resolved, state) {
-	if (state.options.useRealPaths) return isRecursiveUsingRealPaths(resolved, state);
-	let parent = dirname(path);
-	let depth = 1;
-	while (parent !== state.root && depth < 2) {
-		const resolvedPath = state.symlinks.get(parent);
-		const isSameRoot = !!resolvedPath && (resolvedPath === resolved || resolvedPath.startsWith(resolved) || resolved.startsWith(resolvedPath));
-		if (isSameRoot) depth++;
-		else parent = dirname(parent);
-	}
-	state.symlinks.set(path, resolved);
-	return depth > 1;
-}
-function isRecursiveUsingRealPaths(resolved, state) {
-	return state.visited.includes(resolved + state.options.pathSeparator);
-}
-
-//#endregion
-//#region src/api/functions/invoke-callback.ts
-const onlyCountsSync = (state) => {
-	return state.counts;
-};
-const groupsSync = (state) => {
-	return state.groups;
-};
-const defaultSync = (state) => {
-	return state.paths;
-};
-const limitFilesSync = (state) => {
-	return state.paths.slice(0, state.options.maxFiles);
-};
-const onlyCountsAsync = (state, error, callback$1) => {
-	report(error, callback$1, state.counts, state.options.suppressErrors);
-	return null;
-};
-const defaultAsync = (state, error, callback$1) => {
-	report(error, callback$1, state.paths, state.options.suppressErrors);
-	return null;
-};
-const limitFilesAsync = (state, error, callback$1) => {
-	report(error, callback$1, state.paths.slice(0, state.options.maxFiles), state.options.suppressErrors);
-	return null;
-};
-const groupsAsync = (state, error, callback$1) => {
-	report(error, callback$1, state.groups, state.options.suppressErrors);
-	return null;
-};
-function report(error, callback$1, output, suppressErrors) {
-	if (error && !suppressErrors) callback$1(error, output);
-	else callback$1(null, output);
-}
-function build$1(options, isSynchronous) {
-	const { onlyCounts, group, maxFiles } = options;
-	if (onlyCounts) return isSynchronous ? onlyCountsSync : onlyCountsAsync;
-	else if (group) return isSynchronous ? groupsSync : groupsAsync;
-	else if (maxFiles) return isSynchronous ? limitFilesSync : limitFilesAsync;
-	else return isSynchronous ? defaultSync : defaultAsync;
-}
-
-//#endregion
-//#region src/api/functions/walk-directory.ts
-const readdirOpts = { withFileTypes: true };
-const walkAsync = (state, crawlPath, directoryPath, currentDepth, callback$1) => {
-	state.queue.enqueue();
-	if (currentDepth < 0) return state.queue.dequeue(null, state);
-	const { fs } = state;
-	state.visited.push(crawlPath);
-	state.counts.directories++;
-	fs.readdir(crawlPath || ".", readdirOpts, (error, entries = []) => {
-		callback$1(entries, directoryPath, currentDepth);
-		state.queue.dequeue(state.options.suppressErrors ? null : error, state);
-	});
-};
-const walkSync = (state, crawlPath, directoryPath, currentDepth, callback$1) => {
-	const { fs } = state;
-	if (currentDepth < 0) return;
-	state.visited.push(crawlPath);
-	state.counts.directories++;
-	let entries = [];
-	try {
-		entries = fs.readdirSync(crawlPath || ".", readdirOpts);
-	} catch (e) {
-		if (!state.options.suppressErrors) throw e;
-	}
-	callback$1(entries, directoryPath, currentDepth);
-};
-function build(isSynchronous) {
-	return isSynchronous ? walkSync : walkAsync;
-}
-
-//#endregion
-//#region src/api/queue.ts
-/**
-* This is a custom stateless queue to track concurrent async fs calls.
-* It increments a counter whenever a call is queued and decrements it
-* as soon as it completes. When the counter hits 0, it calls onQueueEmpty.
-*/
-var Queue = class {
-	count = 0;
-	constructor(onQueueEmpty) {
-		this.onQueueEmpty = onQueueEmpty;
-	}
-	enqueue() {
-		this.count++;
-		return this.count;
-	}
-	dequeue(error, output) {
-		if (this.onQueueEmpty && (--this.count <= 0 || error)) {
-			this.onQueueEmpty(error, output);
-			if (error) {
-				output.controller.abort();
-				this.onQueueEmpty = void 0;
-			}
-		}
-	}
-};
-
-//#endregion
-//#region src/api/counter.ts
-var Counter = class {
-	_files = 0;
-	_directories = 0;
-	set files(num) {
-		this._files = num;
-	}
-	get files() {
-		return this._files;
-	}
-	set directories(num) {
-		this._directories = num;
-	}
-	get directories() {
-		return this._directories;
-	}
-	/**
-	* @deprecated use `directories` instead
-	*/
-	/* c8 ignore next 3 */
-	get dirs() {
-		return this._directories;
-	}
-};
-
-//#endregion
-//#region src/api/aborter.ts
-/**
-* AbortController is not supported on Node 14 so we use this until we can drop
-* support for Node 14.
-*/
-var Aborter = class {
-	aborted = false;
-	abort() {
-		this.aborted = true;
-	}
-};
-
-//#endregion
-//#region src/api/walker.ts
-var Walker = class {
-	root;
-	isSynchronous;
-	state;
-	joinPath;
-	pushDirectory;
-	pushFile;
-	getArray;
-	groupFiles;
-	resolveSymlink;
-	walkDirectory;
-	callbackInvoker;
-	constructor(root, options, callback$1) {
-		this.isSynchronous = !callback$1;
-		this.callbackInvoker = build$1(options, this.isSynchronous);
-		this.root = normalizePath(root, options);
-		this.state = {
-			root: isRootDirectory(this.root) ? this.root : this.root.slice(0, -1),
-			paths: [""].slice(0, 0),
-			groups: [],
-			counts: new Counter(),
-			options,
-			queue: new Queue((error, state) => this.callbackInvoker(state, error, callback$1)),
-			symlinks: /* @__PURE__ */ new Map(),
-			visited: [""].slice(0, 0),
-			controller: new Aborter(),
-			fs: options.fs || nativeFs
-		};
-		this.joinPath = build$7(this.root, options);
-		this.pushDirectory = build$6(this.root, options);
-		this.pushFile = build$5(options);
-		this.getArray = build$4(options);
-		this.groupFiles = build$3(options);
-		this.resolveSymlink = build$2(options, this.isSynchronous);
-		this.walkDirectory = build(this.isSynchronous);
-	}
-	start() {
-		this.pushDirectory(this.root, this.state.paths, this.state.options.filters);
-		this.walkDirectory(this.state, this.root, this.root, this.state.options.maxDepth, this.walk);
-		return this.isSynchronous ? this.callbackInvoker(this.state, null) : null;
-	}
-	walk = (entries, directoryPath, depth) => {
-		const { paths, options: { filters, resolveSymlinks: resolveSymlinks$1, excludeSymlinks, exclude, maxFiles, signal, useRealPaths, pathSeparator }, controller } = this.state;
-		if (controller.aborted || signal && signal.aborted || maxFiles && paths.length > maxFiles) return;
-		const files = this.getArray(this.state.paths);
-		for (let i = 0; i < entries.length; ++i) {
-			const entry = entries[i];
-			if (entry.isFile() || entry.isSymbolicLink() && !resolveSymlinks$1 && !excludeSymlinks) {
-				const filename = this.joinPath(entry.name, directoryPath);
-				this.pushFile(filename, files, this.state.counts, filters);
-			} else if (entry.isDirectory()) {
-				let path = joinDirectoryPath(entry.name, directoryPath, this.state.options.pathSeparator);
-				if (exclude && exclude(entry.name, path)) continue;
-				this.pushDirectory(path, paths, filters);
-				this.walkDirectory(this.state, path, path, depth - 1, this.walk);
-			} else if (this.resolveSymlink && entry.isSymbolicLink()) {
-				let path = joinPathWithBasePath(entry.name, directoryPath);
-				this.resolveSymlink(path, this.state, (stat, resolvedPath) => {
-					if (stat.isDirectory()) {
-						resolvedPath = normalizePath(resolvedPath, this.state.options);
-						if (exclude && exclude(entry.name, useRealPaths ? resolvedPath : path + pathSeparator)) return;
-						this.walkDirectory(this.state, resolvedPath, useRealPaths ? resolvedPath : path + pathSeparator, depth - 1, this.walk);
-					} else {
-						resolvedPath = useRealPaths ? resolvedPath : path;
-						const filename = basename(resolvedPath);
-						const directoryPath$1 = normalizePath(dirname(resolvedPath), this.state.options);
-						resolvedPath = this.joinPath(filename, directoryPath$1);
-						this.pushFile(resolvedPath, files, this.state.counts, filters);
-					}
-				});
-			}
-		}
-		this.groupFiles(this.state.groups, directoryPath, files);
-	};
-};
-
-//#endregion
-//#region src/api/async.ts
-function promise(root, options) {
-	return new Promise((resolve$1, reject) => {
-		callback(root, options, (err, output) => {
-			if (err) return reject(err);
-			resolve$1(output);
-		});
-	});
-}
-function callback(root, options, callback$1) {
-	let walker = new Walker(root, options, callback$1);
-	walker.start();
-}
-
-//#endregion
-//#region src/api/sync.ts
-function sync(root, options) {
-	const walker = new Walker(root, options);
-	return walker.start();
-}
-
-//#endregion
-//#region src/builder/api-builder.ts
-var APIBuilder = class {
-	constructor(root, options) {
-		this.root = root;
-		this.options = options;
-	}
-	withPromise() {
-		return promise(this.root, this.options);
-	}
-	withCallback(cb) {
-		callback(this.root, this.options, cb);
-	}
-	sync() {
-		return sync(this.root, this.options);
-	}
-};
-
-//#endregion
-//#region src/builder/index.ts
-let pm = null;
-/* c8 ignore next 6 */
-try {
-	__require.resolve("picomatch");
-	pm = __require("picomatch");
-} catch {}
-var Builder = class {
-	globCache = {};
-	options = {
-		maxDepth: Infinity,
-		suppressErrors: true,
-		pathSeparator: sep,
-		filters: []
-	};
-	globFunction;
-	constructor(options) {
-		this.options = {
-			...this.options,
-			...options
-		};
-		this.globFunction = this.options.globFunction;
-	}
-	group() {
-		this.options.group = true;
-		return this;
-	}
-	withPathSeparator(separator) {
-		this.options.pathSeparator = separator;
-		return this;
-	}
-	withBasePath() {
-		this.options.includeBasePath = true;
-		return this;
-	}
-	withRelativePaths() {
-		this.options.relativePaths = true;
-		return this;
-	}
-	withDirs() {
-		this.options.includeDirs = true;
-		return this;
-	}
-	withMaxDepth(depth) {
-		this.options.maxDepth = depth;
-		return this;
-	}
-	withMaxFiles(limit) {
-		this.options.maxFiles = limit;
-		return this;
-	}
-	withFullPaths() {
-		this.options.resolvePaths = true;
-		this.options.includeBasePath = true;
-		return this;
-	}
-	withErrors() {
-		this.options.suppressErrors = false;
-		return this;
-	}
-	withSymlinks({ resolvePaths = true } = {}) {
-		this.options.resolveSymlinks = true;
-		this.options.useRealPaths = resolvePaths;
-		return this.withFullPaths();
-	}
-	withAbortSignal(signal) {
-		this.options.signal = signal;
-		return this;
-	}
-	normalize() {
-		this.options.normalizePath = true;
-		return this;
-	}
-	filter(predicate) {
-		this.options.filters.push(predicate);
-		return this;
-	}
-	onlyDirs() {
-		this.options.excludeFiles = true;
-		this.options.includeDirs = true;
-		return this;
-	}
-	exclude(predicate) {
-		this.options.exclude = predicate;
-		return this;
-	}
-	onlyCounts() {
-		this.options.onlyCounts = true;
-		return this;
-	}
-	crawl(root) {
-		return new APIBuilder(root || ".", this.options);
-	}
-	withGlobFunction(fn) {
-		this.globFunction = fn;
-		return this;
-	}
-	/**
-	* @deprecated Pass options using the constructor instead:
-	* ```ts
-	* new fdir(options).crawl("/path/to/root");
-	* ```
-	* This method will be removed in v7.0
-	*/
-	/* c8 ignore next 4 */
-	crawlWithOptions(root, options) {
-		this.options = {
-			...this.options,
-			...options
-		};
-		return new APIBuilder(root || ".", this.options);
-	}
-	glob(...patterns) {
-		if (this.globFunction) return this.globWithOptions(patterns);
-		return this.globWithOptions(patterns, ...[{ dot: true }]);
-	}
-	globWithOptions(patterns, ...options) {
-		const globFn = this.globFunction || pm;
-		/* c8 ignore next 5 */
-		if (!globFn) throw new Error("Please specify a glob function to use glob matching.");
-		var isMatch = this.globCache[patterns.join("\0")];
-		if (!isMatch) {
-			isMatch = globFn(patterns, ...options);
-			this.globCache[patterns.join("\0")] = isMatch;
-		}
-		this.options.filters.push((path) => isMatch(path));
-		return this;
-	}
-};
-
-//#endregion
-export { Builder as fdir };
\ No newline at end of file
diff --git a/node_modules/fdir/package.json b/node_modules/fdir/package.json
deleted file mode 100644
index e229dff815080..0000000000000
--- a/node_modules/fdir/package.json
+++ /dev/null
@@ -1,103 +0,0 @@
-{
-  "name": "fdir",
-  "version": "6.5.0",
-  "description": "The fastest directory crawler & globbing alternative to glob, fast-glob, & tiny-glob. Crawls 1m files in < 1s",
-  "main": "./dist/index.cjs",
-  "types": "./dist/index.d.cts",
-  "type": "module",
-  "scripts": {
-    "prepublishOnly": "npm run test && npm run build",
-    "build": "tsdown",
-    "format": "prettier --write src __tests__ benchmarks",
-    "test": "vitest run __tests__/",
-    "test:coverage": "vitest run --coverage __tests__/",
-    "test:watch": "vitest __tests__/",
-    "bench": "ts-node benchmarks/benchmark.js",
-    "bench:glob": "ts-node benchmarks/glob-benchmark.ts",
-    "bench:fdir": "ts-node benchmarks/fdir-benchmark.ts",
-    "release": "./scripts/release.sh"
-  },
-  "engines": {
-    "node": ">=12.0.0"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/thecodrr/fdir.git"
-  },
-  "keywords": [
-    "util",
-    "os",
-    "sys",
-    "fs",
-    "walk",
-    "crawler",
-    "directory",
-    "files",
-    "io",
-    "tiny-glob",
-    "glob",
-    "fast-glob",
-    "speed",
-    "javascript",
-    "nodejs"
-  ],
-  "author": "thecodrr ",
-  "license": "MIT",
-  "bugs": {
-    "url": "https://github.com/thecodrr/fdir/issues"
-  },
-  "homepage": "https://github.com/thecodrr/fdir#readme",
-  "devDependencies": {
-    "@types/glob": "^8.1.0",
-    "@types/mock-fs": "^4.13.4",
-    "@types/node": "^20.9.4",
-    "@types/picomatch": "^4.0.0",
-    "@types/tap": "^15.0.11",
-    "@vitest/coverage-v8": "^0.34.6",
-    "all-files-in-tree": "^1.1.2",
-    "benny": "^3.7.1",
-    "csv-to-markdown-table": "^1.3.1",
-    "expect": "^29.7.0",
-    "fast-glob": "^3.3.2",
-    "fdir1": "npm:fdir@1.2.0",
-    "fdir2": "npm:fdir@2.1.0",
-    "fdir3": "npm:fdir@3.4.2",
-    "fdir4": "npm:fdir@4.1.0",
-    "fdir5": "npm:fdir@5.0.0",
-    "fs-readdir-recursive": "^1.1.0",
-    "get-all-files": "^4.1.0",
-    "glob": "^10.3.10",
-    "klaw-sync": "^6.0.0",
-    "mock-fs": "^5.2.0",
-    "picomatch": "^4.0.2",
-    "prettier": "^3.5.3",
-    "recur-readdir": "0.0.1",
-    "recursive-files": "^1.0.2",
-    "recursive-fs": "^2.1.0",
-    "recursive-readdir": "^2.2.3",
-    "rrdir": "^12.1.0",
-    "systeminformation": "^5.21.17",
-    "tiny-glob": "^0.2.9",
-    "ts-node": "^10.9.1",
-    "tsdown": "^0.12.5",
-    "typescript": "^5.3.2",
-    "vitest": "^0.34.6",
-    "walk-sync": "^3.0.0"
-  },
-  "peerDependencies": {
-    "picomatch": "^3 || ^4"
-  },
-  "peerDependenciesMeta": {
-    "picomatch": {
-      "optional": true
-    }
-  },
-  "module": "./dist/index.mjs",
-  "exports": {
-    ".": {
-      "import": "./dist/index.mjs",
-      "require": "./dist/index.cjs"
-    },
-    "./package.json": "./package.json"
-  }
-}
diff --git a/node_modules/picomatch/LICENSE b/node_modules/picomatch/LICENSE
deleted file mode 100644
index 3608dca25e30b..0000000000000
--- a/node_modules/picomatch/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-The MIT License (MIT)
-
-Copyright (c) 2017-present, Jon Schlinkert.
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
diff --git a/node_modules/picomatch/index.js b/node_modules/picomatch/index.js
deleted file mode 100644
index a753b1d9e843c..0000000000000
--- a/node_modules/picomatch/index.js
+++ /dev/null
@@ -1,17 +0,0 @@
-'use strict';
-
-const pico = require('./lib/picomatch');
-const utils = require('./lib/utils');
-
-function picomatch(glob, options, returnState = false) {
-  // default to os.platform()
-  if (options && (options.windows === null || options.windows === undefined)) {
-    // don't mutate the original options object
-    options = { ...options, windows: utils.isWindows() };
-  }
-
-  return pico(glob, options, returnState);
-}
-
-Object.assign(picomatch, pico);
-module.exports = picomatch;
diff --git a/node_modules/picomatch/lib/constants.js b/node_modules/picomatch/lib/constants.js
deleted file mode 100644
index 3f7ef7e53adaf..0000000000000
--- a/node_modules/picomatch/lib/constants.js
+++ /dev/null
@@ -1,180 +0,0 @@
-'use strict';
-
-const WIN_SLASH = '\\\\/';
-const WIN_NO_SLASH = `[^${WIN_SLASH}]`;
-
-/**
- * Posix glob regex
- */
-
-const DOT_LITERAL = '\\.';
-const PLUS_LITERAL = '\\+';
-const QMARK_LITERAL = '\\?';
-const SLASH_LITERAL = '\\/';
-const ONE_CHAR = '(?=.)';
-const QMARK = '[^/]';
-const END_ANCHOR = `(?:${SLASH_LITERAL}|$)`;
-const START_ANCHOR = `(?:^|${SLASH_LITERAL})`;
-const DOTS_SLASH = `${DOT_LITERAL}{1,2}${END_ANCHOR}`;
-const NO_DOT = `(?!${DOT_LITERAL})`;
-const NO_DOTS = `(?!${START_ANCHOR}${DOTS_SLASH})`;
-const NO_DOT_SLASH = `(?!${DOT_LITERAL}{0,1}${END_ANCHOR})`;
-const NO_DOTS_SLASH = `(?!${DOTS_SLASH})`;
-const QMARK_NO_DOT = `[^.${SLASH_LITERAL}]`;
-const STAR = `${QMARK}*?`;
-const SEP = '/';
-
-const POSIX_CHARS = {
-  DOT_LITERAL,
-  PLUS_LITERAL,
-  QMARK_LITERAL,
-  SLASH_LITERAL,
-  ONE_CHAR,
-  QMARK,
-  END_ANCHOR,
-  DOTS_SLASH,
-  NO_DOT,
-  NO_DOTS,
-  NO_DOT_SLASH,
-  NO_DOTS_SLASH,
-  QMARK_NO_DOT,
-  STAR,
-  START_ANCHOR,
-  SEP
-};
-
-/**
- * Windows glob regex
- */
-
-const WINDOWS_CHARS = {
-  ...POSIX_CHARS,
-
-  SLASH_LITERAL: `[${WIN_SLASH}]`,
-  QMARK: WIN_NO_SLASH,
-  STAR: `${WIN_NO_SLASH}*?`,
-  DOTS_SLASH: `${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$)`,
-  NO_DOT: `(?!${DOT_LITERAL})`,
-  NO_DOTS: `(?!(?:^|[${WIN_SLASH}])${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`,
-  NO_DOT_SLASH: `(?!${DOT_LITERAL}{0,1}(?:[${WIN_SLASH}]|$))`,
-  NO_DOTS_SLASH: `(?!${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`,
-  QMARK_NO_DOT: `[^.${WIN_SLASH}]`,
-  START_ANCHOR: `(?:^|[${WIN_SLASH}])`,
-  END_ANCHOR: `(?:[${WIN_SLASH}]|$)`,
-  SEP: '\\'
-};
-
-/**
- * POSIX Bracket Regex
- */
-
-const POSIX_REGEX_SOURCE = {
-  alnum: 'a-zA-Z0-9',
-  alpha: 'a-zA-Z',
-  ascii: '\\x00-\\x7F',
-  blank: ' \\t',
-  cntrl: '\\x00-\\x1F\\x7F',
-  digit: '0-9',
-  graph: '\\x21-\\x7E',
-  lower: 'a-z',
-  print: '\\x20-\\x7E ',
-  punct: '\\-!"#$%&\'()\\*+,./:;<=>?@[\\]^_`{|}~',
-  space: ' \\t\\r\\n\\v\\f',
-  upper: 'A-Z',
-  word: 'A-Za-z0-9_',
-  xdigit: 'A-Fa-f0-9'
-};
-
-module.exports = {
-  MAX_LENGTH: 1024 * 64,
-  POSIX_REGEX_SOURCE,
-
-  // regular expressions
-  REGEX_BACKSLASH: /\\(?![*+?^${}(|)[\]])/g,
-  REGEX_NON_SPECIAL_CHARS: /^[^@![\].,$*+?^{}()|\\/]+/,
-  REGEX_SPECIAL_CHARS: /[-*+?.^${}(|)[\]]/,
-  REGEX_SPECIAL_CHARS_BACKREF: /(\\?)((\W)(\3*))/g,
-  REGEX_SPECIAL_CHARS_GLOBAL: /([-*+?.^${}(|)[\]])/g,
-  REGEX_REMOVE_BACKSLASH: /(?:\[.*?[^\\]\]|\\(?=.))/g,
-
-  // Replace globs with equivalent patterns to reduce parsing time.
-  REPLACEMENTS: {
-    __proto__: null,
-    '***': '*',
-    '**/**': '**',
-    '**/**/**': '**'
-  },
-
-  // Digits
-  CHAR_0: 48, /* 0 */
-  CHAR_9: 57, /* 9 */
-
-  // Alphabet chars.
-  CHAR_UPPERCASE_A: 65, /* A */
-  CHAR_LOWERCASE_A: 97, /* a */
-  CHAR_UPPERCASE_Z: 90, /* Z */
-  CHAR_LOWERCASE_Z: 122, /* z */
-
-  CHAR_LEFT_PARENTHESES: 40, /* ( */
-  CHAR_RIGHT_PARENTHESES: 41, /* ) */
-
-  CHAR_ASTERISK: 42, /* * */
-
-  // Non-alphabetic chars.
-  CHAR_AMPERSAND: 38, /* & */
-  CHAR_AT: 64, /* @ */
-  CHAR_BACKWARD_SLASH: 92, /* \ */
-  CHAR_CARRIAGE_RETURN: 13, /* \r */
-  CHAR_CIRCUMFLEX_ACCENT: 94, /* ^ */
-  CHAR_COLON: 58, /* : */
-  CHAR_COMMA: 44, /* , */
-  CHAR_DOT: 46, /* . */
-  CHAR_DOUBLE_QUOTE: 34, /* " */
-  CHAR_EQUAL: 61, /* = */
-  CHAR_EXCLAMATION_MARK: 33, /* ! */
-  CHAR_FORM_FEED: 12, /* \f */
-  CHAR_FORWARD_SLASH: 47, /* / */
-  CHAR_GRAVE_ACCENT: 96, /* ` */
-  CHAR_HASH: 35, /* # */
-  CHAR_HYPHEN_MINUS: 45, /* - */
-  CHAR_LEFT_ANGLE_BRACKET: 60, /* < */
-  CHAR_LEFT_CURLY_BRACE: 123, /* { */
-  CHAR_LEFT_SQUARE_BRACKET: 91, /* [ */
-  CHAR_LINE_FEED: 10, /* \n */
-  CHAR_NO_BREAK_SPACE: 160, /* \u00A0 */
-  CHAR_PERCENT: 37, /* % */
-  CHAR_PLUS: 43, /* + */
-  CHAR_QUESTION_MARK: 63, /* ? */
-  CHAR_RIGHT_ANGLE_BRACKET: 62, /* > */
-  CHAR_RIGHT_CURLY_BRACE: 125, /* } */
-  CHAR_RIGHT_SQUARE_BRACKET: 93, /* ] */
-  CHAR_SEMICOLON: 59, /* ; */
-  CHAR_SINGLE_QUOTE: 39, /* ' */
-  CHAR_SPACE: 32, /*   */
-  CHAR_TAB: 9, /* \t */
-  CHAR_UNDERSCORE: 95, /* _ */
-  CHAR_VERTICAL_LINE: 124, /* | */
-  CHAR_ZERO_WIDTH_NOBREAK_SPACE: 65279, /* \uFEFF */
-
-  /**
-   * Create EXTGLOB_CHARS
-   */
-
-  extglobChars(chars) {
-    return {
-      '!': { type: 'negate', open: '(?:(?!(?:', close: `))${chars.STAR})` },
-      '?': { type: 'qmark', open: '(?:', close: ')?' },
-      '+': { type: 'plus', open: '(?:', close: ')+' },
-      '*': { type: 'star', open: '(?:', close: ')*' },
-      '@': { type: 'at', open: '(?:', close: ')' }
-    };
-  },
-
-  /**
-   * Create GLOB_CHARS
-   */
-
-  globChars(win32) {
-    return win32 === true ? WINDOWS_CHARS : POSIX_CHARS;
-  }
-};
diff --git a/node_modules/picomatch/lib/parse.js b/node_modules/picomatch/lib/parse.js
deleted file mode 100644
index 8fd8ff499d182..0000000000000
--- a/node_modules/picomatch/lib/parse.js
+++ /dev/null
@@ -1,1085 +0,0 @@
-'use strict';
-
-const constants = require('./constants');
-const utils = require('./utils');
-
-/**
- * Constants
- */
-
-const {
-  MAX_LENGTH,
-  POSIX_REGEX_SOURCE,
-  REGEX_NON_SPECIAL_CHARS,
-  REGEX_SPECIAL_CHARS_BACKREF,
-  REPLACEMENTS
-} = constants;
-
-/**
- * Helpers
- */
-
-const expandRange = (args, options) => {
-  if (typeof options.expandRange === 'function') {
-    return options.expandRange(...args, options);
-  }
-
-  args.sort();
-  const value = `[${args.join('-')}]`;
-
-  try {
-    /* eslint-disable-next-line no-new */
-    new RegExp(value);
-  } catch (ex) {
-    return args.map(v => utils.escapeRegex(v)).join('..');
-  }
-
-  return value;
-};
-
-/**
- * Create the message for a syntax error
- */
-
-const syntaxError = (type, char) => {
-  return `Missing ${type}: "${char}" - use "\\\\${char}" to match literal characters`;
-};
-
-/**
- * Parse the given input string.
- * @param {String} input
- * @param {Object} options
- * @return {Object}
- */
-
-const parse = (input, options) => {
-  if (typeof input !== 'string') {
-    throw new TypeError('Expected a string');
-  }
-
-  input = REPLACEMENTS[input] || input;
-
-  const opts = { ...options };
-  const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH;
-
-  let len = input.length;
-  if (len > max) {
-    throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`);
-  }
-
-  const bos = { type: 'bos', value: '', output: opts.prepend || '' };
-  const tokens = [bos];
-
-  const capture = opts.capture ? '' : '?:';
-
-  // create constants based on platform, for windows or posix
-  const PLATFORM_CHARS = constants.globChars(opts.windows);
-  const EXTGLOB_CHARS = constants.extglobChars(PLATFORM_CHARS);
-
-  const {
-    DOT_LITERAL,
-    PLUS_LITERAL,
-    SLASH_LITERAL,
-    ONE_CHAR,
-    DOTS_SLASH,
-    NO_DOT,
-    NO_DOT_SLASH,
-    NO_DOTS_SLASH,
-    QMARK,
-    QMARK_NO_DOT,
-    STAR,
-    START_ANCHOR
-  } = PLATFORM_CHARS;
-
-  const globstar = opts => {
-    return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`;
-  };
-
-  const nodot = opts.dot ? '' : NO_DOT;
-  const qmarkNoDot = opts.dot ? QMARK : QMARK_NO_DOT;
-  let star = opts.bash === true ? globstar(opts) : STAR;
-
-  if (opts.capture) {
-    star = `(${star})`;
-  }
-
-  // minimatch options support
-  if (typeof opts.noext === 'boolean') {
-    opts.noextglob = opts.noext;
-  }
-
-  const state = {
-    input,
-    index: -1,
-    start: 0,
-    dot: opts.dot === true,
-    consumed: '',
-    output: '',
-    prefix: '',
-    backtrack: false,
-    negated: false,
-    brackets: 0,
-    braces: 0,
-    parens: 0,
-    quotes: 0,
-    globstar: false,
-    tokens
-  };
-
-  input = utils.removePrefix(input, state);
-  len = input.length;
-
-  const extglobs = [];
-  const braces = [];
-  const stack = [];
-  let prev = bos;
-  let value;
-
-  /**
-   * Tokenizing helpers
-   */
-
-  const eos = () => state.index === len - 1;
-  const peek = state.peek = (n = 1) => input[state.index + n];
-  const advance = state.advance = () => input[++state.index] || '';
-  const remaining = () => input.slice(state.index + 1);
-  const consume = (value = '', num = 0) => {
-    state.consumed += value;
-    state.index += num;
-  };
-
-  const append = token => {
-    state.output += token.output != null ? token.output : token.value;
-    consume(token.value);
-  };
-
-  const negate = () => {
-    let count = 1;
-
-    while (peek() === '!' && (peek(2) !== '(' || peek(3) === '?')) {
-      advance();
-      state.start++;
-      count++;
-    }
-
-    if (count % 2 === 0) {
-      return false;
-    }
-
-    state.negated = true;
-    state.start++;
-    return true;
-  };
-
-  const increment = type => {
-    state[type]++;
-    stack.push(type);
-  };
-
-  const decrement = type => {
-    state[type]--;
-    stack.pop();
-  };
-
-  /**
-   * Push tokens onto the tokens array. This helper speeds up
-   * tokenizing by 1) helping us avoid backtracking as much as possible,
-   * and 2) helping us avoid creating extra tokens when consecutive
-   * characters are plain text. This improves performance and simplifies
-   * lookbehinds.
-   */
-
-  const push = tok => {
-    if (prev.type === 'globstar') {
-      const isBrace = state.braces > 0 && (tok.type === 'comma' || tok.type === 'brace');
-      const isExtglob = tok.extglob === true || (extglobs.length && (tok.type === 'pipe' || tok.type === 'paren'));
-
-      if (tok.type !== 'slash' && tok.type !== 'paren' && !isBrace && !isExtglob) {
-        state.output = state.output.slice(0, -prev.output.length);
-        prev.type = 'star';
-        prev.value = '*';
-        prev.output = star;
-        state.output += prev.output;
-      }
-    }
-
-    if (extglobs.length && tok.type !== 'paren') {
-      extglobs[extglobs.length - 1].inner += tok.value;
-    }
-
-    if (tok.value || tok.output) append(tok);
-    if (prev && prev.type === 'text' && tok.type === 'text') {
-      prev.output = (prev.output || prev.value) + tok.value;
-      prev.value += tok.value;
-      return;
-    }
-
-    tok.prev = prev;
-    tokens.push(tok);
-    prev = tok;
-  };
-
-  const extglobOpen = (type, value) => {
-    const token = { ...EXTGLOB_CHARS[value], conditions: 1, inner: '' };
-
-    token.prev = prev;
-    token.parens = state.parens;
-    token.output = state.output;
-    const output = (opts.capture ? '(' : '') + token.open;
-
-    increment('parens');
-    push({ type, value, output: state.output ? '' : ONE_CHAR });
-    push({ type: 'paren', extglob: true, value: advance(), output });
-    extglobs.push(token);
-  };
-
-  const extglobClose = token => {
-    let output = token.close + (opts.capture ? ')' : '');
-    let rest;
-
-    if (token.type === 'negate') {
-      let extglobStar = star;
-
-      if (token.inner && token.inner.length > 1 && token.inner.includes('/')) {
-        extglobStar = globstar(opts);
-      }
-
-      if (extglobStar !== star || eos() || /^\)+$/.test(remaining())) {
-        output = token.close = `)$))${extglobStar}`;
-      }
-
-      if (token.inner.includes('*') && (rest = remaining()) && /^\.[^\\/.]+$/.test(rest)) {
-        // Any non-magical string (`.ts`) or even nested expression (`.{ts,tsx}`) can follow after the closing parenthesis.
-        // In this case, we need to parse the string and use it in the output of the original pattern.
-        // Suitable patterns: `/!(*.d).ts`, `/!(*.d).{ts,tsx}`, `**/!(*-dbg).@(js)`.
-        //
-        // Disabling the `fastpaths` option due to a problem with parsing strings as `.ts` in the pattern like `**/!(*.d).ts`.
-        const expression = parse(rest, { ...options, fastpaths: false }).output;
-
-        output = token.close = `)${expression})${extglobStar})`;
-      }
-
-      if (token.prev.type === 'bos') {
-        state.negatedExtglob = true;
-      }
-    }
-
-    push({ type: 'paren', extglob: true, value, output });
-    decrement('parens');
-  };
-
-  /**
-   * Fast paths
-   */
-
-  if (opts.fastpaths !== false && !/(^[*!]|[/()[\]{}"])/.test(input)) {
-    let backslashes = false;
-
-    let output = input.replace(REGEX_SPECIAL_CHARS_BACKREF, (m, esc, chars, first, rest, index) => {
-      if (first === '\\') {
-        backslashes = true;
-        return m;
-      }
-
-      if (first === '?') {
-        if (esc) {
-          return esc + first + (rest ? QMARK.repeat(rest.length) : '');
-        }
-        if (index === 0) {
-          return qmarkNoDot + (rest ? QMARK.repeat(rest.length) : '');
-        }
-        return QMARK.repeat(chars.length);
-      }
-
-      if (first === '.') {
-        return DOT_LITERAL.repeat(chars.length);
-      }
-
-      if (first === '*') {
-        if (esc) {
-          return esc + first + (rest ? star : '');
-        }
-        return star;
-      }
-      return esc ? m : `\\${m}`;
-    });
-
-    if (backslashes === true) {
-      if (opts.unescape === true) {
-        output = output.replace(/\\/g, '');
-      } else {
-        output = output.replace(/\\+/g, m => {
-          return m.length % 2 === 0 ? '\\\\' : (m ? '\\' : '');
-        });
-      }
-    }
-
-    if (output === input && opts.contains === true) {
-      state.output = input;
-      return state;
-    }
-
-    state.output = utils.wrapOutput(output, state, options);
-    return state;
-  }
-
-  /**
-   * Tokenize input until we reach end-of-string
-   */
-
-  while (!eos()) {
-    value = advance();
-
-    if (value === '\u0000') {
-      continue;
-    }
-
-    /**
-     * Escaped characters
-     */
-
-    if (value === '\\') {
-      const next = peek();
-
-      if (next === '/' && opts.bash !== true) {
-        continue;
-      }
-
-      if (next === '.' || next === ';') {
-        continue;
-      }
-
-      if (!next) {
-        value += '\\';
-        push({ type: 'text', value });
-        continue;
-      }
-
-      // collapse slashes to reduce potential for exploits
-      const match = /^\\+/.exec(remaining());
-      let slashes = 0;
-
-      if (match && match[0].length > 2) {
-        slashes = match[0].length;
-        state.index += slashes;
-        if (slashes % 2 !== 0) {
-          value += '\\';
-        }
-      }
-
-      if (opts.unescape === true) {
-        value = advance();
-      } else {
-        value += advance();
-      }
-
-      if (state.brackets === 0) {
-        push({ type: 'text', value });
-        continue;
-      }
-    }
-
-    /**
-     * If we're inside a regex character class, continue
-     * until we reach the closing bracket.
-     */
-
-    if (state.brackets > 0 && (value !== ']' || prev.value === '[' || prev.value === '[^')) {
-      if (opts.posix !== false && value === ':') {
-        const inner = prev.value.slice(1);
-        if (inner.includes('[')) {
-          prev.posix = true;
-
-          if (inner.includes(':')) {
-            const idx = prev.value.lastIndexOf('[');
-            const pre = prev.value.slice(0, idx);
-            const rest = prev.value.slice(idx + 2);
-            const posix = POSIX_REGEX_SOURCE[rest];
-            if (posix) {
-              prev.value = pre + posix;
-              state.backtrack = true;
-              advance();
-
-              if (!bos.output && tokens.indexOf(prev) === 1) {
-                bos.output = ONE_CHAR;
-              }
-              continue;
-            }
-          }
-        }
-      }
-
-      if ((value === '[' && peek() !== ':') || (value === '-' && peek() === ']')) {
-        value = `\\${value}`;
-      }
-
-      if (value === ']' && (prev.value === '[' || prev.value === '[^')) {
-        value = `\\${value}`;
-      }
-
-      if (opts.posix === true && value === '!' && prev.value === '[') {
-        value = '^';
-      }
-
-      prev.value += value;
-      append({ value });
-      continue;
-    }
-
-    /**
-     * If we're inside a quoted string, continue
-     * until we reach the closing double quote.
-     */
-
-    if (state.quotes === 1 && value !== '"') {
-      value = utils.escapeRegex(value);
-      prev.value += value;
-      append({ value });
-      continue;
-    }
-
-    /**
-     * Double quotes
-     */
-
-    if (value === '"') {
-      state.quotes = state.quotes === 1 ? 0 : 1;
-      if (opts.keepQuotes === true) {
-        push({ type: 'text', value });
-      }
-      continue;
-    }
-
-    /**
-     * Parentheses
-     */
-
-    if (value === '(') {
-      increment('parens');
-      push({ type: 'paren', value });
-      continue;
-    }
-
-    if (value === ')') {
-      if (state.parens === 0 && opts.strictBrackets === true) {
-        throw new SyntaxError(syntaxError('opening', '('));
-      }
-
-      const extglob = extglobs[extglobs.length - 1];
-      if (extglob && state.parens === extglob.parens + 1) {
-        extglobClose(extglobs.pop());
-        continue;
-      }
-
-      push({ type: 'paren', value, output: state.parens ? ')' : '\\)' });
-      decrement('parens');
-      continue;
-    }
-
-    /**
-     * Square brackets
-     */
-
-    if (value === '[') {
-      if (opts.nobracket === true || !remaining().includes(']')) {
-        if (opts.nobracket !== true && opts.strictBrackets === true) {
-          throw new SyntaxError(syntaxError('closing', ']'));
-        }
-
-        value = `\\${value}`;
-      } else {
-        increment('brackets');
-      }
-
-      push({ type: 'bracket', value });
-      continue;
-    }
-
-    if (value === ']') {
-      if (opts.nobracket === true || (prev && prev.type === 'bracket' && prev.value.length === 1)) {
-        push({ type: 'text', value, output: `\\${value}` });
-        continue;
-      }
-
-      if (state.brackets === 0) {
-        if (opts.strictBrackets === true) {
-          throw new SyntaxError(syntaxError('opening', '['));
-        }
-
-        push({ type: 'text', value, output: `\\${value}` });
-        continue;
-      }
-
-      decrement('brackets');
-
-      const prevValue = prev.value.slice(1);
-      if (prev.posix !== true && prevValue[0] === '^' && !prevValue.includes('/')) {
-        value = `/${value}`;
-      }
-
-      prev.value += value;
-      append({ value });
-
-      // when literal brackets are explicitly disabled
-      // assume we should match with a regex character class
-      if (opts.literalBrackets === false || utils.hasRegexChars(prevValue)) {
-        continue;
-      }
-
-      const escaped = utils.escapeRegex(prev.value);
-      state.output = state.output.slice(0, -prev.value.length);
-
-      // when literal brackets are explicitly enabled
-      // assume we should escape the brackets to match literal characters
-      if (opts.literalBrackets === true) {
-        state.output += escaped;
-        prev.value = escaped;
-        continue;
-      }
-
-      // when the user specifies nothing, try to match both
-      prev.value = `(${capture}${escaped}|${prev.value})`;
-      state.output += prev.value;
-      continue;
-    }
-
-    /**
-     * Braces
-     */
-
-    if (value === '{' && opts.nobrace !== true) {
-      increment('braces');
-
-      const open = {
-        type: 'brace',
-        value,
-        output: '(',
-        outputIndex: state.output.length,
-        tokensIndex: state.tokens.length
-      };
-
-      braces.push(open);
-      push(open);
-      continue;
-    }
-
-    if (value === '}') {
-      const brace = braces[braces.length - 1];
-
-      if (opts.nobrace === true || !brace) {
-        push({ type: 'text', value, output: value });
-        continue;
-      }
-
-      let output = ')';
-
-      if (brace.dots === true) {
-        const arr = tokens.slice();
-        const range = [];
-
-        for (let i = arr.length - 1; i >= 0; i--) {
-          tokens.pop();
-          if (arr[i].type === 'brace') {
-            break;
-          }
-          if (arr[i].type !== 'dots') {
-            range.unshift(arr[i].value);
-          }
-        }
-
-        output = expandRange(range, opts);
-        state.backtrack = true;
-      }
-
-      if (brace.comma !== true && brace.dots !== true) {
-        const out = state.output.slice(0, brace.outputIndex);
-        const toks = state.tokens.slice(brace.tokensIndex);
-        brace.value = brace.output = '\\{';
-        value = output = '\\}';
-        state.output = out;
-        for (const t of toks) {
-          state.output += (t.output || t.value);
-        }
-      }
-
-      push({ type: 'brace', value, output });
-      decrement('braces');
-      braces.pop();
-      continue;
-    }
-
-    /**
-     * Pipes
-     */
-
-    if (value === '|') {
-      if (extglobs.length > 0) {
-        extglobs[extglobs.length - 1].conditions++;
-      }
-      push({ type: 'text', value });
-      continue;
-    }
-
-    /**
-     * Commas
-     */
-
-    if (value === ',') {
-      let output = value;
-
-      const brace = braces[braces.length - 1];
-      if (brace && stack[stack.length - 1] === 'braces') {
-        brace.comma = true;
-        output = '|';
-      }
-
-      push({ type: 'comma', value, output });
-      continue;
-    }
-
-    /**
-     * Slashes
-     */
-
-    if (value === '/') {
-      // if the beginning of the glob is "./", advance the start
-      // to the current index, and don't add the "./" characters
-      // to the state. This greatly simplifies lookbehinds when
-      // checking for BOS characters like "!" and "." (not "./")
-      if (prev.type === 'dot' && state.index === state.start + 1) {
-        state.start = state.index + 1;
-        state.consumed = '';
-        state.output = '';
-        tokens.pop();
-        prev = bos; // reset "prev" to the first token
-        continue;
-      }
-
-      push({ type: 'slash', value, output: SLASH_LITERAL });
-      continue;
-    }
-
-    /**
-     * Dots
-     */
-
-    if (value === '.') {
-      if (state.braces > 0 && prev.type === 'dot') {
-        if (prev.value === '.') prev.output = DOT_LITERAL;
-        const brace = braces[braces.length - 1];
-        prev.type = 'dots';
-        prev.output += value;
-        prev.value += value;
-        brace.dots = true;
-        continue;
-      }
-
-      if ((state.braces + state.parens) === 0 && prev.type !== 'bos' && prev.type !== 'slash') {
-        push({ type: 'text', value, output: DOT_LITERAL });
-        continue;
-      }
-
-      push({ type: 'dot', value, output: DOT_LITERAL });
-      continue;
-    }
-
-    /**
-     * Question marks
-     */
-
-    if (value === '?') {
-      const isGroup = prev && prev.value === '(';
-      if (!isGroup && opts.noextglob !== true && peek() === '(' && peek(2) !== '?') {
-        extglobOpen('qmark', value);
-        continue;
-      }
-
-      if (prev && prev.type === 'paren') {
-        const next = peek();
-        let output = value;
-
-        if ((prev.value === '(' && !/[!=<:]/.test(next)) || (next === '<' && !/<([!=]|\w+>)/.test(remaining()))) {
-          output = `\\${value}`;
-        }
-
-        push({ type: 'text', value, output });
-        continue;
-      }
-
-      if (opts.dot !== true && (prev.type === 'slash' || prev.type === 'bos')) {
-        push({ type: 'qmark', value, output: QMARK_NO_DOT });
-        continue;
-      }
-
-      push({ type: 'qmark', value, output: QMARK });
-      continue;
-    }
-
-    /**
-     * Exclamation
-     */
-
-    if (value === '!') {
-      if (opts.noextglob !== true && peek() === '(') {
-        if (peek(2) !== '?' || !/[!=<:]/.test(peek(3))) {
-          extglobOpen('negate', value);
-          continue;
-        }
-      }
-
-      if (opts.nonegate !== true && state.index === 0) {
-        negate();
-        continue;
-      }
-    }
-
-    /**
-     * Plus
-     */
-
-    if (value === '+') {
-      if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') {
-        extglobOpen('plus', value);
-        continue;
-      }
-
-      if ((prev && prev.value === '(') || opts.regex === false) {
-        push({ type: 'plus', value, output: PLUS_LITERAL });
-        continue;
-      }
-
-      if ((prev && (prev.type === 'bracket' || prev.type === 'paren' || prev.type === 'brace')) || state.parens > 0) {
-        push({ type: 'plus', value });
-        continue;
-      }
-
-      push({ type: 'plus', value: PLUS_LITERAL });
-      continue;
-    }
-
-    /**
-     * Plain text
-     */
-
-    if (value === '@') {
-      if (opts.noextglob !== true && peek() === '(' && peek(2) !== '?') {
-        push({ type: 'at', extglob: true, value, output: '' });
-        continue;
-      }
-
-      push({ type: 'text', value });
-      continue;
-    }
-
-    /**
-     * Plain text
-     */
-
-    if (value !== '*') {
-      if (value === '$' || value === '^') {
-        value = `\\${value}`;
-      }
-
-      const match = REGEX_NON_SPECIAL_CHARS.exec(remaining());
-      if (match) {
-        value += match[0];
-        state.index += match[0].length;
-      }
-
-      push({ type: 'text', value });
-      continue;
-    }
-
-    /**
-     * Stars
-     */
-
-    if (prev && (prev.type === 'globstar' || prev.star === true)) {
-      prev.type = 'star';
-      prev.star = true;
-      prev.value += value;
-      prev.output = star;
-      state.backtrack = true;
-      state.globstar = true;
-      consume(value);
-      continue;
-    }
-
-    let rest = remaining();
-    if (opts.noextglob !== true && /^\([^?]/.test(rest)) {
-      extglobOpen('star', value);
-      continue;
-    }
-
-    if (prev.type === 'star') {
-      if (opts.noglobstar === true) {
-        consume(value);
-        continue;
-      }
-
-      const prior = prev.prev;
-      const before = prior.prev;
-      const isStart = prior.type === 'slash' || prior.type === 'bos';
-      const afterStar = before && (before.type === 'star' || before.type === 'globstar');
-
-      if (opts.bash === true && (!isStart || (rest[0] && rest[0] !== '/'))) {
-        push({ type: 'star', value, output: '' });
-        continue;
-      }
-
-      const isBrace = state.braces > 0 && (prior.type === 'comma' || prior.type === 'brace');
-      const isExtglob = extglobs.length && (prior.type === 'pipe' || prior.type === 'paren');
-      if (!isStart && prior.type !== 'paren' && !isBrace && !isExtglob) {
-        push({ type: 'star', value, output: '' });
-        continue;
-      }
-
-      // strip consecutive `/**/`
-      while (rest.slice(0, 3) === '/**') {
-        const after = input[state.index + 4];
-        if (after && after !== '/') {
-          break;
-        }
-        rest = rest.slice(3);
-        consume('/**', 3);
-      }
-
-      if (prior.type === 'bos' && eos()) {
-        prev.type = 'globstar';
-        prev.value += value;
-        prev.output = globstar(opts);
-        state.output = prev.output;
-        state.globstar = true;
-        consume(value);
-        continue;
-      }
-
-      if (prior.type === 'slash' && prior.prev.type !== 'bos' && !afterStar && eos()) {
-        state.output = state.output.slice(0, -(prior.output + prev.output).length);
-        prior.output = `(?:${prior.output}`;
-
-        prev.type = 'globstar';
-        prev.output = globstar(opts) + (opts.strictSlashes ? ')' : '|$)');
-        prev.value += value;
-        state.globstar = true;
-        state.output += prior.output + prev.output;
-        consume(value);
-        continue;
-      }
-
-      if (prior.type === 'slash' && prior.prev.type !== 'bos' && rest[0] === '/') {
-        const end = rest[1] !== void 0 ? '|$' : '';
-
-        state.output = state.output.slice(0, -(prior.output + prev.output).length);
-        prior.output = `(?:${prior.output}`;
-
-        prev.type = 'globstar';
-        prev.output = `${globstar(opts)}${SLASH_LITERAL}|${SLASH_LITERAL}${end})`;
-        prev.value += value;
-
-        state.output += prior.output + prev.output;
-        state.globstar = true;
-
-        consume(value + advance());
-
-        push({ type: 'slash', value: '/', output: '' });
-        continue;
-      }
-
-      if (prior.type === 'bos' && rest[0] === '/') {
-        prev.type = 'globstar';
-        prev.value += value;
-        prev.output = `(?:^|${SLASH_LITERAL}|${globstar(opts)}${SLASH_LITERAL})`;
-        state.output = prev.output;
-        state.globstar = true;
-        consume(value + advance());
-        push({ type: 'slash', value: '/', output: '' });
-        continue;
-      }
-
-      // remove single star from output
-      state.output = state.output.slice(0, -prev.output.length);
-
-      // reset previous token to globstar
-      prev.type = 'globstar';
-      prev.output = globstar(opts);
-      prev.value += value;
-
-      // reset output with globstar
-      state.output += prev.output;
-      state.globstar = true;
-      consume(value);
-      continue;
-    }
-
-    const token = { type: 'star', value, output: star };
-
-    if (opts.bash === true) {
-      token.output = '.*?';
-      if (prev.type === 'bos' || prev.type === 'slash') {
-        token.output = nodot + token.output;
-      }
-      push(token);
-      continue;
-    }
-
-    if (prev && (prev.type === 'bracket' || prev.type === 'paren') && opts.regex === true) {
-      token.output = value;
-      push(token);
-      continue;
-    }
-
-    if (state.index === state.start || prev.type === 'slash' || prev.type === 'dot') {
-      if (prev.type === 'dot') {
-        state.output += NO_DOT_SLASH;
-        prev.output += NO_DOT_SLASH;
-
-      } else if (opts.dot === true) {
-        state.output += NO_DOTS_SLASH;
-        prev.output += NO_DOTS_SLASH;
-
-      } else {
-        state.output += nodot;
-        prev.output += nodot;
-      }
-
-      if (peek() !== '*') {
-        state.output += ONE_CHAR;
-        prev.output += ONE_CHAR;
-      }
-    }
-
-    push(token);
-  }
-
-  while (state.brackets > 0) {
-    if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ']'));
-    state.output = utils.escapeLast(state.output, '[');
-    decrement('brackets');
-  }
-
-  while (state.parens > 0) {
-    if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', ')'));
-    state.output = utils.escapeLast(state.output, '(');
-    decrement('parens');
-  }
-
-  while (state.braces > 0) {
-    if (opts.strictBrackets === true) throw new SyntaxError(syntaxError('closing', '}'));
-    state.output = utils.escapeLast(state.output, '{');
-    decrement('braces');
-  }
-
-  if (opts.strictSlashes !== true && (prev.type === 'star' || prev.type === 'bracket')) {
-    push({ type: 'maybe_slash', value: '', output: `${SLASH_LITERAL}?` });
-  }
-
-  // rebuild the output if we had to backtrack at any point
-  if (state.backtrack === true) {
-    state.output = '';
-
-    for (const token of state.tokens) {
-      state.output += token.output != null ? token.output : token.value;
-
-      if (token.suffix) {
-        state.output += token.suffix;
-      }
-    }
-  }
-
-  return state;
-};
-
-/**
- * Fast paths for creating regular expressions for common glob patterns.
- * This can significantly speed up processing and has very little downside
- * impact when none of the fast paths match.
- */
-
-parse.fastpaths = (input, options) => {
-  const opts = { ...options };
-  const max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH;
-  const len = input.length;
-  if (len > max) {
-    throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`);
-  }
-
-  input = REPLACEMENTS[input] || input;
-
-  // create constants based on platform, for windows or posix
-  const {
-    DOT_LITERAL,
-    SLASH_LITERAL,
-    ONE_CHAR,
-    DOTS_SLASH,
-    NO_DOT,
-    NO_DOTS,
-    NO_DOTS_SLASH,
-    STAR,
-    START_ANCHOR
-  } = constants.globChars(opts.windows);
-
-  const nodot = opts.dot ? NO_DOTS : NO_DOT;
-  const slashDot = opts.dot ? NO_DOTS_SLASH : NO_DOT;
-  const capture = opts.capture ? '' : '?:';
-  const state = { negated: false, prefix: '' };
-  let star = opts.bash === true ? '.*?' : STAR;
-
-  if (opts.capture) {
-    star = `(${star})`;
-  }
-
-  const globstar = opts => {
-    if (opts.noglobstar === true) return star;
-    return `(${capture}(?:(?!${START_ANCHOR}${opts.dot ? DOTS_SLASH : DOT_LITERAL}).)*?)`;
-  };
-
-  const create = str => {
-    switch (str) {
-      case '*':
-        return `${nodot}${ONE_CHAR}${star}`;
-
-      case '.*':
-        return `${DOT_LITERAL}${ONE_CHAR}${star}`;
-
-      case '*.*':
-        return `${nodot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`;
-
-      case '*/*':
-        return `${nodot}${star}${SLASH_LITERAL}${ONE_CHAR}${slashDot}${star}`;
-
-      case '**':
-        return nodot + globstar(opts);
-
-      case '**/*':
-        return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${ONE_CHAR}${star}`;
-
-      case '**/*.*':
-        return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${slashDot}${star}${DOT_LITERAL}${ONE_CHAR}${star}`;
-
-      case '**/.*':
-        return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL})?${DOT_LITERAL}${ONE_CHAR}${star}`;
-
-      default: {
-        const match = /^(.*?)\.(\w+)$/.exec(str);
-        if (!match) return;
-
-        const source = create(match[1]);
-        if (!source) return;
-
-        return source + DOT_LITERAL + match[2];
-      }
-    }
-  };
-
-  const output = utils.removePrefix(input, state);
-  let source = create(output);
-
-  if (source && opts.strictSlashes !== true) {
-    source += `${SLASH_LITERAL}?`;
-  }
-
-  return source;
-};
-
-module.exports = parse;
diff --git a/node_modules/picomatch/lib/picomatch.js b/node_modules/picomatch/lib/picomatch.js
deleted file mode 100644
index d0ebd9f163cf2..0000000000000
--- a/node_modules/picomatch/lib/picomatch.js
+++ /dev/null
@@ -1,341 +0,0 @@
-'use strict';
-
-const scan = require('./scan');
-const parse = require('./parse');
-const utils = require('./utils');
-const constants = require('./constants');
-const isObject = val => val && typeof val === 'object' && !Array.isArray(val);
-
-/**
- * Creates a matcher function from one or more glob patterns. The
- * returned function takes a string to match as its first argument,
- * and returns true if the string is a match. The returned matcher
- * function also takes a boolean as the second argument that, when true,
- * returns an object with additional information.
- *
- * ```js
- * const picomatch = require('picomatch');
- * // picomatch(glob[, options]);
- *
- * const isMatch = picomatch('*.!(*a)');
- * console.log(isMatch('a.a')); //=> false
- * console.log(isMatch('a.b')); //=> true
- * ```
- * @name picomatch
- * @param {String|Array} `globs` One or more glob patterns.
- * @param {Object=} `options`
- * @return {Function=} Returns a matcher function.
- * @api public
- */
-
-const picomatch = (glob, options, returnState = false) => {
-  if (Array.isArray(glob)) {
-    const fns = glob.map(input => picomatch(input, options, returnState));
-    const arrayMatcher = str => {
-      for (const isMatch of fns) {
-        const state = isMatch(str);
-        if (state) return state;
-      }
-      return false;
-    };
-    return arrayMatcher;
-  }
-
-  const isState = isObject(glob) && glob.tokens && glob.input;
-
-  if (glob === '' || (typeof glob !== 'string' && !isState)) {
-    throw new TypeError('Expected pattern to be a non-empty string');
-  }
-
-  const opts = options || {};
-  const posix = opts.windows;
-  const regex = isState
-    ? picomatch.compileRe(glob, options)
-    : picomatch.makeRe(glob, options, false, true);
-
-  const state = regex.state;
-  delete regex.state;
-
-  let isIgnored = () => false;
-  if (opts.ignore) {
-    const ignoreOpts = { ...options, ignore: null, onMatch: null, onResult: null };
-    isIgnored = picomatch(opts.ignore, ignoreOpts, returnState);
-  }
-
-  const matcher = (input, returnObject = false) => {
-    const { isMatch, match, output } = picomatch.test(input, regex, options, { glob, posix });
-    const result = { glob, state, regex, posix, input, output, match, isMatch };
-
-    if (typeof opts.onResult === 'function') {
-      opts.onResult(result);
-    }
-
-    if (isMatch === false) {
-      result.isMatch = false;
-      return returnObject ? result : false;
-    }
-
-    if (isIgnored(input)) {
-      if (typeof opts.onIgnore === 'function') {
-        opts.onIgnore(result);
-      }
-      result.isMatch = false;
-      return returnObject ? result : false;
-    }
-
-    if (typeof opts.onMatch === 'function') {
-      opts.onMatch(result);
-    }
-    return returnObject ? result : true;
-  };
-
-  if (returnState) {
-    matcher.state = state;
-  }
-
-  return matcher;
-};
-
-/**
- * Test `input` with the given `regex`. This is used by the main
- * `picomatch()` function to test the input string.
- *
- * ```js
- * const picomatch = require('picomatch');
- * // picomatch.test(input, regex[, options]);
- *
- * console.log(picomatch.test('foo/bar', /^(?:([^/]*?)\/([^/]*?))$/));
- * // { isMatch: true, match: [ 'foo/', 'foo', 'bar' ], output: 'foo/bar' }
- * ```
- * @param {String} `input` String to test.
- * @param {RegExp} `regex`
- * @return {Object} Returns an object with matching info.
- * @api public
- */
-
-picomatch.test = (input, regex, options, { glob, posix } = {}) => {
-  if (typeof input !== 'string') {
-    throw new TypeError('Expected input to be a string');
-  }
-
-  if (input === '') {
-    return { isMatch: false, output: '' };
-  }
-
-  const opts = options || {};
-  const format = opts.format || (posix ? utils.toPosixSlashes : null);
-  let match = input === glob;
-  let output = (match && format) ? format(input) : input;
-
-  if (match === false) {
-    output = format ? format(input) : input;
-    match = output === glob;
-  }
-
-  if (match === false || opts.capture === true) {
-    if (opts.matchBase === true || opts.basename === true) {
-      match = picomatch.matchBase(input, regex, options, posix);
-    } else {
-      match = regex.exec(output);
-    }
-  }
-
-  return { isMatch: Boolean(match), match, output };
-};
-
-/**
- * Match the basename of a filepath.
- *
- * ```js
- * const picomatch = require('picomatch');
- * // picomatch.matchBase(input, glob[, options]);
- * console.log(picomatch.matchBase('foo/bar.js', '*.js'); // true
- * ```
- * @param {String} `input` String to test.
- * @param {RegExp|String} `glob` Glob pattern or regex created by [.makeRe](#makeRe).
- * @return {Boolean}
- * @api public
- */
-
-picomatch.matchBase = (input, glob, options) => {
-  const regex = glob instanceof RegExp ? glob : picomatch.makeRe(glob, options);
-  return regex.test(utils.basename(input));
-};
-
-/**
- * Returns true if **any** of the given glob `patterns` match the specified `string`.
- *
- * ```js
- * const picomatch = require('picomatch');
- * // picomatch.isMatch(string, patterns[, options]);
- *
- * console.log(picomatch.isMatch('a.a', ['b.*', '*.a'])); //=> true
- * console.log(picomatch.isMatch('a.a', 'b.*')); //=> false
- * ```
- * @param {String|Array} str The string to test.
- * @param {String|Array} patterns One or more glob patterns to use for matching.
- * @param {Object} [options] See available [options](#options).
- * @return {Boolean} Returns true if any patterns match `str`
- * @api public
- */
-
-picomatch.isMatch = (str, patterns, options) => picomatch(patterns, options)(str);
-
-/**
- * Parse a glob pattern to create the source string for a regular
- * expression.
- *
- * ```js
- * const picomatch = require('picomatch');
- * const result = picomatch.parse(pattern[, options]);
- * ```
- * @param {String} `pattern`
- * @param {Object} `options`
- * @return {Object} Returns an object with useful properties and output to be used as a regex source string.
- * @api public
- */
-
-picomatch.parse = (pattern, options) => {
-  if (Array.isArray(pattern)) return pattern.map(p => picomatch.parse(p, options));
-  return parse(pattern, { ...options, fastpaths: false });
-};
-
-/**
- * Scan a glob pattern to separate the pattern into segments.
- *
- * ```js
- * const picomatch = require('picomatch');
- * // picomatch.scan(input[, options]);
- *
- * const result = picomatch.scan('!./foo/*.js');
- * console.log(result);
- * { prefix: '!./',
- *   input: '!./foo/*.js',
- *   start: 3,
- *   base: 'foo',
- *   glob: '*.js',
- *   isBrace: false,
- *   isBracket: false,
- *   isGlob: true,
- *   isExtglob: false,
- *   isGlobstar: false,
- *   negated: true }
- * ```
- * @param {String} `input` Glob pattern to scan.
- * @param {Object} `options`
- * @return {Object} Returns an object with
- * @api public
- */
-
-picomatch.scan = (input, options) => scan(input, options);
-
-/**
- * Compile a regular expression from the `state` object returned by the
- * [parse()](#parse) method.
- *
- * @param {Object} `state`
- * @param {Object} `options`
- * @param {Boolean} `returnOutput` Intended for implementors, this argument allows you to return the raw output from the parser.
- * @param {Boolean} `returnState` Adds the state to a `state` property on the returned regex. Useful for implementors and debugging.
- * @return {RegExp}
- * @api public
- */
-
-picomatch.compileRe = (state, options, returnOutput = false, returnState = false) => {
-  if (returnOutput === true) {
-    return state.output;
-  }
-
-  const opts = options || {};
-  const prepend = opts.contains ? '' : '^';
-  const append = opts.contains ? '' : '$';
-
-  let source = `${prepend}(?:${state.output})${append}`;
-  if (state && state.negated === true) {
-    source = `^(?!${source}).*$`;
-  }
-
-  const regex = picomatch.toRegex(source, options);
-  if (returnState === true) {
-    regex.state = state;
-  }
-
-  return regex;
-};
-
-/**
- * Create a regular expression from a parsed glob pattern.
- *
- * ```js
- * const picomatch = require('picomatch');
- * const state = picomatch.parse('*.js');
- * // picomatch.compileRe(state[, options]);
- *
- * console.log(picomatch.compileRe(state));
- * //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/
- * ```
- * @param {String} `state` The object returned from the `.parse` method.
- * @param {Object} `options`
- * @param {Boolean} `returnOutput` Implementors may use this argument to return the compiled output, instead of a regular expression. This is not exposed on the options to prevent end-users from mutating the result.
- * @param {Boolean} `returnState` Implementors may use this argument to return the state from the parsed glob with the returned regular expression.
- * @return {RegExp} Returns a regex created from the given pattern.
- * @api public
- */
-
-picomatch.makeRe = (input, options = {}, returnOutput = false, returnState = false) => {
-  if (!input || typeof input !== 'string') {
-    throw new TypeError('Expected a non-empty string');
-  }
-
-  let parsed = { negated: false, fastpaths: true };
-
-  if (options.fastpaths !== false && (input[0] === '.' || input[0] === '*')) {
-    parsed.output = parse.fastpaths(input, options);
-  }
-
-  if (!parsed.output) {
-    parsed = parse(input, options);
-  }
-
-  return picomatch.compileRe(parsed, options, returnOutput, returnState);
-};
-
-/**
- * Create a regular expression from the given regex source string.
- *
- * ```js
- * const picomatch = require('picomatch');
- * // picomatch.toRegex(source[, options]);
- *
- * const { output } = picomatch.parse('*.js');
- * console.log(picomatch.toRegex(output));
- * //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/
- * ```
- * @param {String} `source` Regular expression source string.
- * @param {Object} `options`
- * @return {RegExp}
- * @api public
- */
-
-picomatch.toRegex = (source, options) => {
-  try {
-    const opts = options || {};
-    return new RegExp(source, opts.flags || (opts.nocase ? 'i' : ''));
-  } catch (err) {
-    if (options && options.debug === true) throw err;
-    return /$^/;
-  }
-};
-
-/**
- * Picomatch constants.
- * @return {Object}
- */
-
-picomatch.constants = constants;
-
-/**
- * Expose "picomatch"
- */
-
-module.exports = picomatch;
diff --git a/node_modules/picomatch/lib/scan.js b/node_modules/picomatch/lib/scan.js
deleted file mode 100644
index e59cd7a1357b1..0000000000000
--- a/node_modules/picomatch/lib/scan.js
+++ /dev/null
@@ -1,391 +0,0 @@
-'use strict';
-
-const utils = require('./utils');
-const {
-  CHAR_ASTERISK,             /* * */
-  CHAR_AT,                   /* @ */
-  CHAR_BACKWARD_SLASH,       /* \ */
-  CHAR_COMMA,                /* , */
-  CHAR_DOT,                  /* . */
-  CHAR_EXCLAMATION_MARK,     /* ! */
-  CHAR_FORWARD_SLASH,        /* / */
-  CHAR_LEFT_CURLY_BRACE,     /* { */
-  CHAR_LEFT_PARENTHESES,     /* ( */
-  CHAR_LEFT_SQUARE_BRACKET,  /* [ */
-  CHAR_PLUS,                 /* + */
-  CHAR_QUESTION_MARK,        /* ? */
-  CHAR_RIGHT_CURLY_BRACE,    /* } */
-  CHAR_RIGHT_PARENTHESES,    /* ) */
-  CHAR_RIGHT_SQUARE_BRACKET  /* ] */
-} = require('./constants');
-
-const isPathSeparator = code => {
-  return code === CHAR_FORWARD_SLASH || code === CHAR_BACKWARD_SLASH;
-};
-
-const depth = token => {
-  if (token.isPrefix !== true) {
-    token.depth = token.isGlobstar ? Infinity : 1;
-  }
-};
-
-/**
- * Quickly scans a glob pattern and returns an object with a handful of
- * useful properties, like `isGlob`, `path` (the leading non-glob, if it exists),
- * `glob` (the actual pattern), `negated` (true if the path starts with `!` but not
- * with `!(`) and `negatedExtglob` (true if the path starts with `!(`).
- *
- * ```js
- * const pm = require('picomatch');
- * console.log(pm.scan('foo/bar/*.js'));
- * { isGlob: true, input: 'foo/bar/*.js', base: 'foo/bar', glob: '*.js' }
- * ```
- * @param {String} `str`
- * @param {Object} `options`
- * @return {Object} Returns an object with tokens and regex source string.
- * @api public
- */
-
-const scan = (input, options) => {
-  const opts = options || {};
-
-  const length = input.length - 1;
-  const scanToEnd = opts.parts === true || opts.scanToEnd === true;
-  const slashes = [];
-  const tokens = [];
-  const parts = [];
-
-  let str = input;
-  let index = -1;
-  let start = 0;
-  let lastIndex = 0;
-  let isBrace = false;
-  let isBracket = false;
-  let isGlob = false;
-  let isExtglob = false;
-  let isGlobstar = false;
-  let braceEscaped = false;
-  let backslashes = false;
-  let negated = false;
-  let negatedExtglob = false;
-  let finished = false;
-  let braces = 0;
-  let prev;
-  let code;
-  let token = { value: '', depth: 0, isGlob: false };
-
-  const eos = () => index >= length;
-  const peek = () => str.charCodeAt(index + 1);
-  const advance = () => {
-    prev = code;
-    return str.charCodeAt(++index);
-  };
-
-  while (index < length) {
-    code = advance();
-    let next;
-
-    if (code === CHAR_BACKWARD_SLASH) {
-      backslashes = token.backslashes = true;
-      code = advance();
-
-      if (code === CHAR_LEFT_CURLY_BRACE) {
-        braceEscaped = true;
-      }
-      continue;
-    }
-
-    if (braceEscaped === true || code === CHAR_LEFT_CURLY_BRACE) {
-      braces++;
-
-      while (eos() !== true && (code = advance())) {
-        if (code === CHAR_BACKWARD_SLASH) {
-          backslashes = token.backslashes = true;
-          advance();
-          continue;
-        }
-
-        if (code === CHAR_LEFT_CURLY_BRACE) {
-          braces++;
-          continue;
-        }
-
-        if (braceEscaped !== true && code === CHAR_DOT && (code = advance()) === CHAR_DOT) {
-          isBrace = token.isBrace = true;
-          isGlob = token.isGlob = true;
-          finished = true;
-
-          if (scanToEnd === true) {
-            continue;
-          }
-
-          break;
-        }
-
-        if (braceEscaped !== true && code === CHAR_COMMA) {
-          isBrace = token.isBrace = true;
-          isGlob = token.isGlob = true;
-          finished = true;
-
-          if (scanToEnd === true) {
-            continue;
-          }
-
-          break;
-        }
-
-        if (code === CHAR_RIGHT_CURLY_BRACE) {
-          braces--;
-
-          if (braces === 0) {
-            braceEscaped = false;
-            isBrace = token.isBrace = true;
-            finished = true;
-            break;
-          }
-        }
-      }
-
-      if (scanToEnd === true) {
-        continue;
-      }
-
-      break;
-    }
-
-    if (code === CHAR_FORWARD_SLASH) {
-      slashes.push(index);
-      tokens.push(token);
-      token = { value: '', depth: 0, isGlob: false };
-
-      if (finished === true) continue;
-      if (prev === CHAR_DOT && index === (start + 1)) {
-        start += 2;
-        continue;
-      }
-
-      lastIndex = index + 1;
-      continue;
-    }
-
-    if (opts.noext !== true) {
-      const isExtglobChar = code === CHAR_PLUS
-        || code === CHAR_AT
-        || code === CHAR_ASTERISK
-        || code === CHAR_QUESTION_MARK
-        || code === CHAR_EXCLAMATION_MARK;
-
-      if (isExtglobChar === true && peek() === CHAR_LEFT_PARENTHESES) {
-        isGlob = token.isGlob = true;
-        isExtglob = token.isExtglob = true;
-        finished = true;
-        if (code === CHAR_EXCLAMATION_MARK && index === start) {
-          negatedExtglob = true;
-        }
-
-        if (scanToEnd === true) {
-          while (eos() !== true && (code = advance())) {
-            if (code === CHAR_BACKWARD_SLASH) {
-              backslashes = token.backslashes = true;
-              code = advance();
-              continue;
-            }
-
-            if (code === CHAR_RIGHT_PARENTHESES) {
-              isGlob = token.isGlob = true;
-              finished = true;
-              break;
-            }
-          }
-          continue;
-        }
-        break;
-      }
-    }
-
-    if (code === CHAR_ASTERISK) {
-      if (prev === CHAR_ASTERISK) isGlobstar = token.isGlobstar = true;
-      isGlob = token.isGlob = true;
-      finished = true;
-
-      if (scanToEnd === true) {
-        continue;
-      }
-      break;
-    }
-
-    if (code === CHAR_QUESTION_MARK) {
-      isGlob = token.isGlob = true;
-      finished = true;
-
-      if (scanToEnd === true) {
-        continue;
-      }
-      break;
-    }
-
-    if (code === CHAR_LEFT_SQUARE_BRACKET) {
-      while (eos() !== true && (next = advance())) {
-        if (next === CHAR_BACKWARD_SLASH) {
-          backslashes = token.backslashes = true;
-          advance();
-          continue;
-        }
-
-        if (next === CHAR_RIGHT_SQUARE_BRACKET) {
-          isBracket = token.isBracket = true;
-          isGlob = token.isGlob = true;
-          finished = true;
-          break;
-        }
-      }
-
-      if (scanToEnd === true) {
-        continue;
-      }
-
-      break;
-    }
-
-    if (opts.nonegate !== true && code === CHAR_EXCLAMATION_MARK && index === start) {
-      negated = token.negated = true;
-      start++;
-      continue;
-    }
-
-    if (opts.noparen !== true && code === CHAR_LEFT_PARENTHESES) {
-      isGlob = token.isGlob = true;
-
-      if (scanToEnd === true) {
-        while (eos() !== true && (code = advance())) {
-          if (code === CHAR_LEFT_PARENTHESES) {
-            backslashes = token.backslashes = true;
-            code = advance();
-            continue;
-          }
-
-          if (code === CHAR_RIGHT_PARENTHESES) {
-            finished = true;
-            break;
-          }
-        }
-        continue;
-      }
-      break;
-    }
-
-    if (isGlob === true) {
-      finished = true;
-
-      if (scanToEnd === true) {
-        continue;
-      }
-
-      break;
-    }
-  }
-
-  if (opts.noext === true) {
-    isExtglob = false;
-    isGlob = false;
-  }
-
-  let base = str;
-  let prefix = '';
-  let glob = '';
-
-  if (start > 0) {
-    prefix = str.slice(0, start);
-    str = str.slice(start);
-    lastIndex -= start;
-  }
-
-  if (base && isGlob === true && lastIndex > 0) {
-    base = str.slice(0, lastIndex);
-    glob = str.slice(lastIndex);
-  } else if (isGlob === true) {
-    base = '';
-    glob = str;
-  } else {
-    base = str;
-  }
-
-  if (base && base !== '' && base !== '/' && base !== str) {
-    if (isPathSeparator(base.charCodeAt(base.length - 1))) {
-      base = base.slice(0, -1);
-    }
-  }
-
-  if (opts.unescape === true) {
-    if (glob) glob = utils.removeBackslashes(glob);
-
-    if (base && backslashes === true) {
-      base = utils.removeBackslashes(base);
-    }
-  }
-
-  const state = {
-    prefix,
-    input,
-    start,
-    base,
-    glob,
-    isBrace,
-    isBracket,
-    isGlob,
-    isExtglob,
-    isGlobstar,
-    negated,
-    negatedExtglob
-  };
-
-  if (opts.tokens === true) {
-    state.maxDepth = 0;
-    if (!isPathSeparator(code)) {
-      tokens.push(token);
-    }
-    state.tokens = tokens;
-  }
-
-  if (opts.parts === true || opts.tokens === true) {
-    let prevIndex;
-
-    for (let idx = 0; idx < slashes.length; idx++) {
-      const n = prevIndex ? prevIndex + 1 : start;
-      const i = slashes[idx];
-      const value = input.slice(n, i);
-      if (opts.tokens) {
-        if (idx === 0 && start !== 0) {
-          tokens[idx].isPrefix = true;
-          tokens[idx].value = prefix;
-        } else {
-          tokens[idx].value = value;
-        }
-        depth(tokens[idx]);
-        state.maxDepth += tokens[idx].depth;
-      }
-      if (idx !== 0 || value !== '') {
-        parts.push(value);
-      }
-      prevIndex = i;
-    }
-
-    if (prevIndex && prevIndex + 1 < input.length) {
-      const value = input.slice(prevIndex + 1);
-      parts.push(value);
-
-      if (opts.tokens) {
-        tokens[tokens.length - 1].value = value;
-        depth(tokens[tokens.length - 1]);
-        state.maxDepth += tokens[tokens.length - 1].depth;
-      }
-    }
-
-    state.slashes = slashes;
-    state.parts = parts;
-  }
-
-  return state;
-};
-
-module.exports = scan;
diff --git a/node_modules/picomatch/lib/utils.js b/node_modules/picomatch/lib/utils.js
deleted file mode 100644
index 9c97cae222ca8..0000000000000
--- a/node_modules/picomatch/lib/utils.js
+++ /dev/null
@@ -1,72 +0,0 @@
-/*global navigator*/
-'use strict';
-
-const {
-  REGEX_BACKSLASH,
-  REGEX_REMOVE_BACKSLASH,
-  REGEX_SPECIAL_CHARS,
-  REGEX_SPECIAL_CHARS_GLOBAL
-} = require('./constants');
-
-exports.isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val);
-exports.hasRegexChars = str => REGEX_SPECIAL_CHARS.test(str);
-exports.isRegexChar = str => str.length === 1 && exports.hasRegexChars(str);
-exports.escapeRegex = str => str.replace(REGEX_SPECIAL_CHARS_GLOBAL, '\\$1');
-exports.toPosixSlashes = str => str.replace(REGEX_BACKSLASH, '/');
-
-exports.isWindows = () => {
-  if (typeof navigator !== 'undefined' && navigator.platform) {
-    const platform = navigator.platform.toLowerCase();
-    return platform === 'win32' || platform === 'windows';
-  }
-
-  if (typeof process !== 'undefined' && process.platform) {
-    return process.platform === 'win32';
-  }
-
-  return false;
-};
-
-exports.removeBackslashes = str => {
-  return str.replace(REGEX_REMOVE_BACKSLASH, match => {
-    return match === '\\' ? '' : match;
-  });
-};
-
-exports.escapeLast = (input, char, lastIdx) => {
-  const idx = input.lastIndexOf(char, lastIdx);
-  if (idx === -1) return input;
-  if (input[idx - 1] === '\\') return exports.escapeLast(input, char, idx - 1);
-  return `${input.slice(0, idx)}\\${input.slice(idx)}`;
-};
-
-exports.removePrefix = (input, state = {}) => {
-  let output = input;
-  if (output.startsWith('./')) {
-    output = output.slice(2);
-    state.prefix = './';
-  }
-  return output;
-};
-
-exports.wrapOutput = (input, state = {}, options = {}) => {
-  const prepend = options.contains ? '' : '^';
-  const append = options.contains ? '' : '$';
-
-  let output = `${prepend}(?:${input})${append}`;
-  if (state.negated === true) {
-    output = `(?:^(?!${output}).*$)`;
-  }
-  return output;
-};
-
-exports.basename = (path, { windows } = {}) => {
-  const segs = path.split(windows ? /[\\/]/ : '/');
-  const last = segs[segs.length - 1];
-
-  if (last === '') {
-    return segs[segs.length - 2];
-  }
-
-  return last;
-};
diff --git a/node_modules/picomatch/package.json b/node_modules/picomatch/package.json
deleted file mode 100644
index 372e27e05f412..0000000000000
--- a/node_modules/picomatch/package.json
+++ /dev/null
@@ -1,83 +0,0 @@
-{
-  "name": "picomatch",
-  "description": "Blazing fast and accurate glob matcher written in JavaScript, with no dependencies and full support for standard and extended Bash glob features, including braces, extglobs, POSIX brackets, and regular expressions.",
-  "version": "4.0.3",
-  "homepage": "https://github.com/micromatch/picomatch",
-  "author": "Jon Schlinkert (https://github.com/jonschlinkert)",
-  "funding": "https://github.com/sponsors/jonschlinkert",
-  "repository": "micromatch/picomatch",
-  "bugs": {
-    "url": "https://github.com/micromatch/picomatch/issues"
-  },
-  "license": "MIT",
-  "files": [
-    "index.js",
-    "posix.js",
-    "lib"
-  ],
-  "sideEffects": false,
-  "main": "index.js",
-  "engines": {
-    "node": ">=12"
-  },
-  "scripts": {
-    "lint": "eslint --cache --cache-location node_modules/.cache/.eslintcache --report-unused-disable-directives --ignore-path .gitignore .",
-    "mocha": "mocha --reporter dot",
-    "test": "npm run lint && npm run mocha",
-    "test:ci": "npm run test:cover",
-    "test:cover": "nyc npm run mocha"
-  },
-  "devDependencies": {
-    "eslint": "^8.57.0",
-    "fill-range": "^7.0.1",
-    "gulp-format-md": "^2.0.0",
-    "mocha": "^10.4.0",
-    "nyc": "^15.1.0",
-    "time-require": "github:jonschlinkert/time-require"
-  },
-  "keywords": [
-    "glob",
-    "match",
-    "picomatch"
-  ],
-  "nyc": {
-    "reporter": [
-      "html",
-      "lcov",
-      "text-summary"
-    ]
-  },
-  "verb": {
-    "toc": {
-      "render": true,
-      "method": "preWrite",
-      "maxdepth": 3
-    },
-    "layout": "empty",
-    "tasks": [
-      "readme"
-    ],
-    "plugins": [
-      "gulp-format-md"
-    ],
-    "lint": {
-      "reflinks": true
-    },
-    "related": {
-      "list": [
-        "braces",
-        "micromatch"
-      ]
-    },
-    "reflinks": [
-      "braces",
-      "expand-brackets",
-      "extglob",
-      "fill-range",
-      "micromatch",
-      "minimatch",
-      "nanomatch",
-      "picomatch"
-    ]
-  }
-}
diff --git a/node_modules/picomatch/posix.js b/node_modules/picomatch/posix.js
deleted file mode 100644
index d2f2bc59d0ac7..0000000000000
--- a/node_modules/picomatch/posix.js
+++ /dev/null
@@ -1,3 +0,0 @@
-'use strict';
-
-module.exports = require('./lib/picomatch');
diff --git a/package-lock.json b/package-lock.json
index 84e98ec4ffc07..97d0cc81e6fae 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -161,7 +161,7 @@
         "@npmcli/git": "^7.0.0",
         "@npmcli/mock-globals": "^1.0.0",
         "@npmcli/mock-registry": "^1.0.0",
-        "@npmcli/template-oss": "4.24.4",
+        "@npmcli/template-oss": "4.25.1",
         "@tufjs/repo-mock": "^4.0.0",
         "ajv": "^8.12.0",
         "ajv-formats": "^2.1.1",
@@ -188,7 +188,7 @@
       "devDependencies": {
         "@isaacs/string-locale-compare": "^1.1.0",
         "@npmcli/eslint-config": "^5.0.1",
-        "@npmcli/template-oss": "4.24.4",
+        "@npmcli/template-oss": "4.25.1",
         "front-matter": "^4.0.2",
         "ignore-walk": "^8.0.0",
         "jsdom": "^24.0.0",
@@ -212,7 +212,7 @@
       "license": "ISC",
       "devDependencies": {
         "@npmcli/eslint-config": "^5.0.1",
-        "@npmcli/template-oss": "4.24.4",
+        "@npmcli/template-oss": "4.25.1",
         "tap": "^16.3.8"
       },
       "engines": {
@@ -226,7 +226,7 @@
       "devDependencies": {
         "@npmcli/arborist": "^9.1.2",
         "@npmcli/eslint-config": "^5.0.1",
-        "@npmcli/template-oss": "4.24.4",
+        "@npmcli/template-oss": "4.25.1",
         "json-stringify-safe": "^5.0.1",
         "nock": "^13.3.3",
         "npm-package-arg": "^13.0.0",
@@ -239,8 +239,6 @@
     },
     "node_modules/@actions/core": {
       "version": "1.11.1",
-      "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.11.1.tgz",
-      "integrity": "sha512-hXJCSrkwfA46Vd9Z3q4cpEpHB1rL5NG04+/rbqW9d3+CSvtB1tYe8UTpAlixa1vj0m/ULglfEK2UKxMGxCxv5A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -250,8 +248,6 @@
     },
     "node_modules/@actions/exec": {
       "version": "1.1.1",
-      "resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.1.1.tgz",
-      "integrity": "sha512-+sCcHHbVdk93a0XT19ECtO/gIXoxvdsgQLzb2fE2/5sIZmWQuluYyjPQtrtTHdU1YzTZ7bAPN4sITq2xi1679w==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -260,8 +256,6 @@
     },
     "node_modules/@actions/http-client": {
       "version": "2.2.3",
-      "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.2.3.tgz",
-      "integrity": "sha512-mx8hyJi/hjFvbPokCg4uRd4ZX78t+YyRPtnKWwIl+RzNaVuFpQHfmlGVfsKEJN8LwTCvL+DfVgAM04XaHkm6bA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -271,8 +265,6 @@
     },
     "node_modules/@actions/http-client/node_modules/undici": {
       "version": "5.29.0",
-      "resolved": "https://registry.npmjs.org/undici/-/undici-5.29.0.tgz",
-      "integrity": "sha512-raqeBD6NQK4SkWhQzeYKd1KmIG6dllBOTt55Rmkt4HtI9mwdWtJljnrXjAFUBLTSN67HWrOIZ3EPF4kjUw80Bg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -284,15 +276,11 @@
     },
     "node_modules/@actions/io": {
       "version": "1.1.3",
-      "resolved": "https://registry.npmjs.org/@actions/io/-/io-1.1.3.tgz",
-      "integrity": "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/@asamuzakjp/css-color": {
       "version": "3.2.0",
-      "resolved": "https://registry.npmjs.org/@asamuzakjp/css-color/-/css-color-3.2.0.tgz",
-      "integrity": "sha512-K1A6z8tS3XsmCMM86xoWdn7Fkdn9m6RSVtocUrJYIwZnFVkng/PvkEoWtOWmP+Scc6saYWHWZYbndEEXxl24jw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -305,15 +293,11 @@
     },
     "node_modules/@asamuzakjp/css-color/node_modules/lru-cache": {
       "version": "10.4.3",
-      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz",
-      "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/@babel/code-frame": {
       "version": "7.27.1",
-      "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz",
-      "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -327,8 +311,6 @@
     },
     "node_modules/@babel/compat-data": {
       "version": "7.28.4",
-      "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.28.4.tgz",
-      "integrity": "sha512-YsmSKC29MJwf0gF8Rjjrg5LQCmyh+j/nD8/eP7f+BeoQTKYqs9RoWbjGOdy0+1Ekr68RJZMUOPVQaQisnIo4Rw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -337,8 +319,6 @@
     },
     "node_modules/@babel/core": {
       "version": "7.28.4",
-      "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.28.4.tgz",
-      "integrity": "sha512-2BCOP7TN8M+gVDj7/ht3hsaO/B/n5oDbiAyyvnRlNOs+u1o+JWNYTQrmpuNp1/Wq2gcFrI01JAW+paEKDMx/CA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -368,15 +348,11 @@
     },
     "node_modules/@babel/core/node_modules/convert-source-map": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz",
-      "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/@babel/core/node_modules/semver": {
       "version": "6.3.1",
-      "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
-      "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
       "dev": true,
       "license": "ISC",
       "bin": {
@@ -385,8 +361,6 @@
     },
     "node_modules/@babel/generator": {
       "version": "7.28.3",
-      "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz",
-      "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -402,8 +376,6 @@
     },
     "node_modules/@babel/helper-compilation-targets": {
       "version": "7.27.2",
-      "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.27.2.tgz",
-      "integrity": "sha512-2+1thGUUWWjLTYTHZWK1n8Yga0ijBz1XAhUXcKy81rd5g6yh7hGqMp45v7cadSbEHc9G3OTv45SyneRN3ps4DQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -419,8 +391,6 @@
     },
     "node_modules/@babel/helper-compilation-targets/node_modules/lru-cache": {
       "version": "5.1.1",
-      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz",
-      "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -429,8 +399,6 @@
     },
     "node_modules/@babel/helper-compilation-targets/node_modules/semver": {
       "version": "6.3.1",
-      "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
-      "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
       "dev": true,
       "license": "ISC",
       "bin": {
@@ -439,15 +407,11 @@
     },
     "node_modules/@babel/helper-compilation-targets/node_modules/yallist": {
       "version": "3.1.1",
-      "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz",
-      "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/@babel/helper-globals": {
       "version": "7.28.0",
-      "resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz",
-      "integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -456,8 +420,6 @@
     },
     "node_modules/@babel/helper-module-imports": {
       "version": "7.27.1",
-      "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.27.1.tgz",
-      "integrity": "sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -470,8 +432,6 @@
     },
     "node_modules/@babel/helper-module-transforms": {
       "version": "7.28.3",
-      "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.28.3.tgz",
-      "integrity": "sha512-gytXUbs8k2sXS9PnQptz5o0QnpLL51SwASIORY6XaBKF88nsOT0Zw9szLqlSGQDP/4TljBAD5y98p2U1fqkdsw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -488,8 +448,6 @@
     },
     "node_modules/@babel/helper-string-parser": {
       "version": "7.27.1",
-      "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz",
-      "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -498,8 +456,6 @@
     },
     "node_modules/@babel/helper-validator-identifier": {
       "version": "7.27.1",
-      "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.27.1.tgz",
-      "integrity": "sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -508,8 +464,6 @@
     },
     "node_modules/@babel/helper-validator-option": {
       "version": "7.27.1",
-      "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz",
-      "integrity": "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -518,8 +472,6 @@
     },
     "node_modules/@babel/helpers": {
       "version": "7.28.4",
-      "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.4.tgz",
-      "integrity": "sha512-HFN59MmQXGHVyYadKLVumYsA9dBFun/ldYxipEjzA4196jpLZd8UjEEBLkbEkvfYreDqJhZxYAWFPtrfhNpj4w==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -532,8 +484,6 @@
     },
     "node_modules/@babel/parser": {
       "version": "7.28.4",
-      "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz",
-      "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -548,8 +498,6 @@
     },
     "node_modules/@babel/template": {
       "version": "7.27.2",
-      "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.2.tgz",
-      "integrity": "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -563,8 +511,6 @@
     },
     "node_modules/@babel/traverse": {
       "version": "7.28.4",
-      "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz",
-      "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -582,8 +528,6 @@
     },
     "node_modules/@babel/types": {
       "version": "7.28.4",
-      "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz",
-      "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -596,8 +540,6 @@
     },
     "node_modules/@colors/colors": {
       "version": "1.5.0",
-      "resolved": "https://registry.npmjs.org/@colors/colors/-/colors-1.5.0.tgz",
-      "integrity": "sha512-ooWCrlZP11i8GImSjTHYHLkvFDP48nS4+204nGb1RiX/WXYHmJA2III9/e2DWVabCESdW7hBAEzHRqUn9OUVvQ==",
       "dev": true,
       "license": "MIT",
       "optional": true,
@@ -607,8 +549,6 @@
     },
     "node_modules/@commitlint/cli": {
       "version": "19.8.1",
-      "resolved": "https://registry.npmjs.org/@commitlint/cli/-/cli-19.8.1.tgz",
-      "integrity": "sha512-LXUdNIkspyxrlV6VDHWBmCZRtkEVRpBKxi2Gtw3J54cGWhLCTouVD/Q6ZSaSvd2YaDObWK8mDjrz3TIKtaQMAA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -629,8 +569,6 @@
     },
     "node_modules/@commitlint/config-conventional": {
       "version": "19.8.1",
-      "resolved": "https://registry.npmjs.org/@commitlint/config-conventional/-/config-conventional-19.8.1.tgz",
-      "integrity": "sha512-/AZHJL6F6B/G959CsMAzrPKKZjeEiAVifRyEwXxcT6qtqbPwGw+iQxmNS+Bu+i09OCtdNRW6pNpBvgPrtMr9EQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -643,8 +581,6 @@
     },
     "node_modules/@commitlint/config-validator": {
       "version": "19.8.1",
-      "resolved": "https://registry.npmjs.org/@commitlint/config-validator/-/config-validator-19.8.1.tgz",
-      "integrity": "sha512-0jvJ4u+eqGPBIzzSdqKNX1rvdbSU1lPNYlfQQRIFnBgLy26BtC0cFnr7c/AyuzExMxWsMOte6MkTi9I3SQ3iGQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -657,8 +593,6 @@
     },
     "node_modules/@commitlint/ensure": {
       "version": "19.8.1",
-      "resolved": "https://registry.npmjs.org/@commitlint/ensure/-/ensure-19.8.1.tgz",
-      "integrity": "sha512-mXDnlJdvDzSObafjYrOSvZBwkD01cqB4gbnnFuVyNpGUM5ijwU/r/6uqUmBXAAOKRfyEjpkGVZxaDsCVnHAgyw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -675,8 +609,6 @@
     },
     "node_modules/@commitlint/execute-rule": {
       "version": "19.8.1",
-      "resolved": "https://registry.npmjs.org/@commitlint/execute-rule/-/execute-rule-19.8.1.tgz",
-      "integrity": "sha512-YfJyIqIKWI64Mgvn/sE7FXvVMQER/Cd+s3hZke6cI1xgNT/f6ZAz5heND0QtffH+KbcqAwXDEE1/5niYayYaQA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -685,8 +617,6 @@
     },
     "node_modules/@commitlint/format": {
       "version": "19.8.1",
-      "resolved": "https://registry.npmjs.org/@commitlint/format/-/format-19.8.1.tgz",
-      "integrity": "sha512-kSJj34Rp10ItP+Eh9oCItiuN/HwGQMXBnIRk69jdOwEW9llW9FlyqcWYbHPSGofmjsqeoxa38UaEA5tsbm2JWw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -699,8 +629,6 @@
     },
     "node_modules/@commitlint/is-ignored": {
       "version": "19.8.1",
-      "resolved": "https://registry.npmjs.org/@commitlint/is-ignored/-/is-ignored-19.8.1.tgz",
-      "integrity": "sha512-AceOhEhekBUQ5dzrVhDDsbMaY5LqtN8s1mqSnT2Kz1ERvVZkNihrs3Sfk1Je/rxRNbXYFzKZSHaPsEJJDJV8dg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -713,8 +641,6 @@
     },
     "node_modules/@commitlint/lint": {
       "version": "19.8.1",
-      "resolved": "https://registry.npmjs.org/@commitlint/lint/-/lint-19.8.1.tgz",
-      "integrity": "sha512-52PFbsl+1EvMuokZXLRlOsdcLHf10isTPlWwoY1FQIidTsTvjKXVXYb7AvtpWkDzRO2ZsqIgPK7bI98x8LRUEw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -729,8 +655,6 @@
     },
     "node_modules/@commitlint/load": {
       "version": "19.8.1",
-      "resolved": "https://registry.npmjs.org/@commitlint/load/-/load-19.8.1.tgz",
-      "integrity": "sha512-9V99EKG3u7z+FEoe4ikgq7YGRCSukAcvmKQuTtUyiYPnOd9a2/H9Ak1J9nJA1HChRQp9OA/sIKPugGS+FK/k1A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -751,8 +675,6 @@
     },
     "node_modules/@commitlint/message": {
       "version": "19.8.1",
-      "resolved": "https://registry.npmjs.org/@commitlint/message/-/message-19.8.1.tgz",
-      "integrity": "sha512-+PMLQvjRXiU+Ae0Wc+p99EoGEutzSXFVwQfa3jRNUZLNW5odZAyseb92OSBTKCu+9gGZiJASt76Cj3dLTtcTdg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -761,8 +683,6 @@
     },
     "node_modules/@commitlint/parse": {
       "version": "19.8.1",
-      "resolved": "https://registry.npmjs.org/@commitlint/parse/-/parse-19.8.1.tgz",
-      "integrity": "sha512-mmAHYcMBmAgJDKWdkjIGq50X4yB0pSGpxyOODwYmoexxxiUCy5JJT99t1+PEMK7KtsCtzuWYIAXYAiKR+k+/Jw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -776,8 +696,6 @@
     },
     "node_modules/@commitlint/read": {
       "version": "19.8.1",
-      "resolved": "https://registry.npmjs.org/@commitlint/read/-/read-19.8.1.tgz",
-      "integrity": "sha512-03Jbjb1MqluaVXKHKRuGhcKWtSgh3Jizqy2lJCRbRrnWpcM06MYm8th59Xcns8EqBYvo0Xqb+2DoZFlga97uXQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -793,8 +711,6 @@
     },
     "node_modules/@commitlint/resolve-extends": {
       "version": "19.8.1",
-      "resolved": "https://registry.npmjs.org/@commitlint/resolve-extends/-/resolve-extends-19.8.1.tgz",
-      "integrity": "sha512-GM0mAhFk49I+T/5UCYns5ayGStkTt4XFFrjjf0L4S26xoMTSkdCf9ZRO8en1kuopC4isDFuEm7ZOm/WRVeElVg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -811,8 +727,6 @@
     },
     "node_modules/@commitlint/rules": {
       "version": "19.8.1",
-      "resolved": "https://registry.npmjs.org/@commitlint/rules/-/rules-19.8.1.tgz",
-      "integrity": "sha512-Hnlhd9DyvGiGwjfjfToMi1dsnw1EXKGJNLTcsuGORHz6SS9swRgkBsou33MQ2n51/boIDrbsg4tIBbRpEWK2kw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -827,8 +741,6 @@
     },
     "node_modules/@commitlint/to-lines": {
       "version": "19.8.1",
-      "resolved": "https://registry.npmjs.org/@commitlint/to-lines/-/to-lines-19.8.1.tgz",
-      "integrity": "sha512-98Mm5inzbWTKuZQr2aW4SReY6WUukdWXuZhrqf1QdKPZBCCsXuG87c+iP0bwtD6DBnmVVQjgp4whoHRVixyPBg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -837,8 +749,6 @@
     },
     "node_modules/@commitlint/top-level": {
       "version": "19.8.1",
-      "resolved": "https://registry.npmjs.org/@commitlint/top-level/-/top-level-19.8.1.tgz",
-      "integrity": "sha512-Ph8IN1IOHPSDhURCSXBz44+CIu+60duFwRsg6HqaISFHQHbmBtxVw4ZrFNIYUzEP7WwrNPxa2/5qJ//NK1FGcw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -850,8 +760,6 @@
     },
     "node_modules/@commitlint/types": {
       "version": "19.8.1",
-      "resolved": "https://registry.npmjs.org/@commitlint/types/-/types-19.8.1.tgz",
-      "integrity": "sha512-/yCrWGCoA1SVKOks25EGadP9Pnj0oAIHGpl2wH2M2Y46dPM2ueb8wyCVOD7O3WCTkaJ0IkKvzhl1JY7+uCT2Dw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -875,8 +783,6 @@
     },
     "node_modules/@csstools/color-helpers": {
       "version": "5.1.0",
-      "resolved": "https://registry.npmjs.org/@csstools/color-helpers/-/color-helpers-5.1.0.tgz",
-      "integrity": "sha512-S11EXWJyy0Mz5SYvRmY8nJYTFFd1LCNV+7cXyAgQtOOuzb4EsgfqDufL+9esx72/eLhsRdGZwaldu/h+E4t4BA==",
       "dev": true,
       "funding": [
         {
@@ -895,8 +801,6 @@
     },
     "node_modules/@csstools/css-calc": {
       "version": "2.1.4",
-      "resolved": "https://registry.npmjs.org/@csstools/css-calc/-/css-calc-2.1.4.tgz",
-      "integrity": "sha512-3N8oaj+0juUw/1H3YwmDDJXCgTB1gKU6Hc/bB502u9zR0q2vd786XJH9QfrKIEgFlZmhZiq6epXl4rHqhzsIgQ==",
       "dev": true,
       "funding": [
         {
@@ -919,8 +823,6 @@
     },
     "node_modules/@csstools/css-color-parser": {
       "version": "3.1.0",
-      "resolved": "https://registry.npmjs.org/@csstools/css-color-parser/-/css-color-parser-3.1.0.tgz",
-      "integrity": "sha512-nbtKwh3a6xNVIp/VRuXV64yTKnb1IjTAEEh3irzS+HkKjAOYLTGNb9pmVNntZ8iVBHcWDA2Dof0QtPgFI1BaTA==",
       "dev": true,
       "funding": [
         {
@@ -947,8 +849,6 @@
     },
     "node_modules/@csstools/css-parser-algorithms": {
       "version": "3.0.5",
-      "resolved": "https://registry.npmjs.org/@csstools/css-parser-algorithms/-/css-parser-algorithms-3.0.5.tgz",
-      "integrity": "sha512-DaDeUkXZKjdGhgYaHNJTV9pV7Y9B3b644jCLs9Upc3VeNGg6LWARAT6O+Q+/COo+2gg/bM5rhpMAtf70WqfBdQ==",
       "dev": true,
       "funding": [
         {
@@ -970,8 +870,6 @@
     },
     "node_modules/@csstools/css-tokenizer": {
       "version": "3.0.4",
-      "resolved": "https://registry.npmjs.org/@csstools/css-tokenizer/-/css-tokenizer-3.0.4.tgz",
-      "integrity": "sha512-Vd/9EVDiu6PPJt9yAh6roZP6El1xHrdvIVGjyBsHR0RYwNHgL7FJPyIIW4fANJNG6FtyZfvlRPpFI4ZM/lubvw==",
       "dev": true,
       "funding": [
         {
@@ -990,8 +888,6 @@
     },
     "node_modules/@eslint-community/eslint-utils": {
       "version": "4.9.0",
-      "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.9.0.tgz",
-      "integrity": "sha512-ayVFHdtZ+hsq1t2Dy24wCmGXGe4q9Gu3smhLYALJrr473ZH27MsnSL+LKUlimp4BWJqMDMLmPpx/Q9R3OAlL4g==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -1010,8 +906,6 @@
     },
     "node_modules/@eslint-community/regexpp": {
       "version": "4.12.1",
-      "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.1.tgz",
-      "integrity": "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -1021,8 +915,6 @@
     },
     "node_modules/@eslint/eslintrc": {
       "version": "2.1.4",
-      "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.1.4.tgz",
-      "integrity": "sha512-269Z39MS6wVJtsoUl10L60WdkhJVdPG24Q4eZTH3nnF6lpvSShEK3wQjDX9JRWAUPvPh7COouPpU9IrqaZFvtQ==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -1046,8 +938,6 @@
     },
     "node_modules/@eslint/eslintrc/node_modules/ajv": {
       "version": "6.12.6",
-      "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz",
-      "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -1064,8 +954,6 @@
     },
     "node_modules/@eslint/eslintrc/node_modules/brace-expansion": {
       "version": "1.1.12",
-      "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
-      "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -1076,16 +964,12 @@
     },
     "node_modules/@eslint/eslintrc/node_modules/json-schema-traverse": {
       "version": "0.4.1",
-      "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
-      "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==",
       "dev": true,
       "license": "MIT",
       "peer": true
     },
     "node_modules/@eslint/eslintrc/node_modules/minimatch": {
       "version": "3.1.2",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
-      "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
       "dev": true,
       "license": "ISC",
       "peer": true,
@@ -1098,8 +982,6 @@
     },
     "node_modules/@eslint/js": {
       "version": "8.57.1",
-      "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.1.tgz",
-      "integrity": "sha512-d9zaMRSTIKDLhctzH12MtXvJKSSUhaHcjV+2Z+GK+EEY7XKpP5yR4x+N3TAcHTcu963nIr+TMcCb4DBCYX1z6Q==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -1109,8 +991,6 @@
     },
     "node_modules/@fastify/busboy": {
       "version": "2.1.1",
-      "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz",
-      "integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -1118,20 +998,194 @@
       }
     },
     "node_modules/@google-automations/git-file-utils": {
-      "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/@google-automations/git-file-utils/-/git-file-utils-2.0.0.tgz",
-      "integrity": "sha512-F6h8npq7rt60fr3W+cil/zXbIiF9Hj8JzaN3LNh7uBIJpsWnjL9ObV84qW/345boMheDdo/n+cItmvCfsn0lLA==",
+      "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/@google-automations/git-file-utils/-/git-file-utils-3.0.0.tgz",
+      "integrity": "sha512-e+WLoKR0TchIhKsSDOnd/su171eXKAAdLpP2tS825UAloTgfYus53kW8uKoVj9MAsMjXGXsJ2s1ASgjq81xVdA==",
       "dev": true,
       "license": "Apache-2.0",
       "dependencies": {
-        "@octokit/rest": "^19.0.7",
-        "@octokit/types": "^9.0.0",
+        "@octokit/rest": "^20.1.1",
+        "@octokit/types": "^13.0.0",
         "minimatch": "^5.1.0"
       },
       "engines": {
         "node": ">= 18"
       }
     },
+    "node_modules/@google-automations/git-file-utils/node_modules/@octokit/auth-token": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-4.0.0.tgz",
+      "integrity": "sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA==",
+      "dev": true,
+      "license": "MIT",
+      "engines": {
+        "node": ">= 18"
+      }
+    },
+    "node_modules/@google-automations/git-file-utils/node_modules/@octokit/core": {
+      "version": "5.2.2",
+      "resolved": "https://registry.npmjs.org/@octokit/core/-/core-5.2.2.tgz",
+      "integrity": "sha512-/g2d4sW9nUDJOMz3mabVQvOGhVa4e/BN/Um7yca9Bb2XTzPPnfTWHWQg+IsEYO7M3Vx+EXvaM/I2pJWIMun1bg==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@octokit/auth-token": "^4.0.0",
+        "@octokit/graphql": "^7.1.0",
+        "@octokit/request": "^8.4.1",
+        "@octokit/request-error": "^5.1.1",
+        "@octokit/types": "^13.0.0",
+        "before-after-hook": "^2.2.0",
+        "universal-user-agent": "^6.0.0"
+      },
+      "engines": {
+        "node": ">= 18"
+      }
+    },
+    "node_modules/@google-automations/git-file-utils/node_modules/@octokit/endpoint": {
+      "version": "9.0.6",
+      "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-9.0.6.tgz",
+      "integrity": "sha512-H1fNTMA57HbkFESSt3Y9+FBICv+0jFceJFPWDePYlR/iMGrwM5ph+Dd4XRQs+8X+PUFURLQgX9ChPfhJ/1uNQw==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@octokit/types": "^13.1.0",
+        "universal-user-agent": "^6.0.0"
+      },
+      "engines": {
+        "node": ">= 18"
+      }
+    },
+    "node_modules/@google-automations/git-file-utils/node_modules/@octokit/graphql": {
+      "version": "7.1.1",
+      "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-7.1.1.tgz",
+      "integrity": "sha512-3mkDltSfcDUoa176nlGoA32RGjeWjl3K7F/BwHwRMJUW/IteSa4bnSV8p2ThNkcIcZU2umkZWxwETSSCJf2Q7g==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@octokit/request": "^8.4.1",
+        "@octokit/types": "^13.0.0",
+        "universal-user-agent": "^6.0.0"
+      },
+      "engines": {
+        "node": ">= 18"
+      }
+    },
+    "node_modules/@google-automations/git-file-utils/node_modules/@octokit/openapi-types": {
+      "version": "24.2.0",
+      "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz",
+      "integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==",
+      "dev": true,
+      "license": "MIT"
+    },
+    "node_modules/@google-automations/git-file-utils/node_modules/@octokit/plugin-paginate-rest": {
+      "version": "11.4.4-cjs.2",
+      "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-11.4.4-cjs.2.tgz",
+      "integrity": "sha512-2dK6z8fhs8lla5PaOTgqfCGBxgAv/le+EhPs27KklPhm1bKObpu6lXzwfUEQ16ajXzqNrKMujsFyo9K2eaoISw==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@octokit/types": "^13.7.0"
+      },
+      "engines": {
+        "node": ">= 18"
+      },
+      "peerDependencies": {
+        "@octokit/core": "5"
+      }
+    },
+    "node_modules/@google-automations/git-file-utils/node_modules/@octokit/plugin-request-log": {
+      "version": "4.0.1",
+      "resolved": "https://registry.npmjs.org/@octokit/plugin-request-log/-/plugin-request-log-4.0.1.tgz",
+      "integrity": "sha512-GihNqNpGHorUrO7Qa9JbAl0dbLnqJVrV8OXe2Zm5/Y4wFkZQDfTreBzVmiRfJVfE4mClXdihHnbpyyO9FSX4HA==",
+      "dev": true,
+      "license": "MIT",
+      "engines": {
+        "node": ">= 18"
+      },
+      "peerDependencies": {
+        "@octokit/core": "5"
+      }
+    },
+    "node_modules/@google-automations/git-file-utils/node_modules/@octokit/plugin-rest-endpoint-methods": {
+      "version": "13.3.2-cjs.1",
+      "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-13.3.2-cjs.1.tgz",
+      "integrity": "sha512-VUjIjOOvF2oELQmiFpWA1aOPdawpyaCUqcEBc/UOUnj3Xp6DJGrJ1+bjUIIDzdHjnFNO6q57ODMfdEZnoBkCwQ==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@octokit/types": "^13.8.0"
+      },
+      "engines": {
+        "node": ">= 18"
+      },
+      "peerDependencies": {
+        "@octokit/core": "^5"
+      }
+    },
+    "node_modules/@google-automations/git-file-utils/node_modules/@octokit/request": {
+      "version": "8.4.1",
+      "resolved": "https://registry.npmjs.org/@octokit/request/-/request-8.4.1.tgz",
+      "integrity": "sha512-qnB2+SY3hkCmBxZsR/MPCybNmbJe4KAlfWErXq+rBKkQJlbjdJeS85VI9r8UqeLYLvnAenU8Q1okM/0MBsAGXw==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@octokit/endpoint": "^9.0.6",
+        "@octokit/request-error": "^5.1.1",
+        "@octokit/types": "^13.1.0",
+        "universal-user-agent": "^6.0.0"
+      },
+      "engines": {
+        "node": ">= 18"
+      }
+    },
+    "node_modules/@google-automations/git-file-utils/node_modules/@octokit/request-error": {
+      "version": "5.1.1",
+      "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.1.tgz",
+      "integrity": "sha512-v9iyEQJH6ZntoENr9/yXxjuezh4My67CBSu9r6Ve/05Iu5gNgnisNWOsoJHTP6k0Rr0+HQIpnH+kyammu90q/g==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@octokit/types": "^13.1.0",
+        "deprecation": "^2.0.0",
+        "once": "^1.4.0"
+      },
+      "engines": {
+        "node": ">= 18"
+      }
+    },
+    "node_modules/@google-automations/git-file-utils/node_modules/@octokit/rest": {
+      "version": "20.1.2",
+      "resolved": "https://registry.npmjs.org/@octokit/rest/-/rest-20.1.2.tgz",
+      "integrity": "sha512-GmYiltypkHHtihFwPRxlaorG5R9VAHuk/vbszVoRTGXnAsY60wYLkh/E2XiFmdZmqrisw+9FaazS1i5SbdWYgA==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@octokit/core": "^5.0.2",
+        "@octokit/plugin-paginate-rest": "11.4.4-cjs.2",
+        "@octokit/plugin-request-log": "^4.0.0",
+        "@octokit/plugin-rest-endpoint-methods": "13.3.2-cjs.1"
+      },
+      "engines": {
+        "node": ">= 18"
+      }
+    },
+    "node_modules/@google-automations/git-file-utils/node_modules/@octokit/types": {
+      "version": "13.10.0",
+      "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz",
+      "integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@octokit/openapi-types": "^24.2.0"
+      }
+    },
+    "node_modules/@google-automations/git-file-utils/node_modules/before-after-hook": {
+      "version": "2.2.3",
+      "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz",
+      "integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==",
+      "dev": true,
+      "license": "Apache-2.0"
+    },
     "node_modules/@google-automations/git-file-utils/node_modules/minimatch": {
       "version": "5.1.6",
       "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz",
@@ -1145,11 +1199,15 @@
         "node": ">=10"
       }
     },
+    "node_modules/@google-automations/git-file-utils/node_modules/universal-user-agent": {
+      "version": "6.0.1",
+      "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz",
+      "integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==",
+      "dev": true,
+      "license": "ISC"
+    },
     "node_modules/@humanwhocodes/config-array": {
       "version": "0.13.0",
-      "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.13.0.tgz",
-      "integrity": "sha512-DZLEEqFWQFiyK6h5YIeynKx7JlvCYWL0cImfSRXZ9l4Sg2efkFGTuFf6vzXjK1cq6IYkU+Eg/JizXw+TD2vRNw==",
-      "deprecated": "Use @eslint/config-array instead",
       "dev": true,
       "license": "Apache-2.0",
       "peer": true,
@@ -1164,8 +1222,6 @@
     },
     "node_modules/@humanwhocodes/config-array/node_modules/brace-expansion": {
       "version": "1.1.12",
-      "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
-      "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -1176,8 +1232,6 @@
     },
     "node_modules/@humanwhocodes/config-array/node_modules/minimatch": {
       "version": "3.1.2",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
-      "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
       "dev": true,
       "license": "ISC",
       "peer": true,
@@ -1190,8 +1244,6 @@
     },
     "node_modules/@humanwhocodes/module-importer": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz",
-      "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==",
       "dev": true,
       "license": "Apache-2.0",
       "peer": true,
@@ -1205,9 +1257,6 @@
     },
     "node_modules/@humanwhocodes/object-schema": {
       "version": "2.0.3",
-      "resolved": "https://registry.npmjs.org/@humanwhocodes/object-schema/-/object-schema-2.0.3.tgz",
-      "integrity": "sha512-93zYdMES/c1D69yZiKDBj0V24vqNzB/koF26KPaagAfd3P/4gUlh3Dys5ogAK+Exi9QyzlD8x/08Zt7wIKcDcA==",
-      "deprecated": "Use @eslint/object-schema instead",
       "dev": true,
       "license": "BSD-3-Clause",
       "peer": true
@@ -1318,8 +1367,6 @@
     },
     "node_modules/@istanbuljs/load-nyc-config": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz",
-      "integrity": "sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -1335,8 +1382,6 @@
     },
     "node_modules/@istanbuljs/load-nyc-config/node_modules/argparse": {
       "version": "1.0.10",
-      "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz",
-      "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1345,8 +1390,6 @@
     },
     "node_modules/@istanbuljs/load-nyc-config/node_modules/find-up": {
       "version": "4.1.0",
-      "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz",
-      "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1359,8 +1402,6 @@
     },
     "node_modules/@istanbuljs/load-nyc-config/node_modules/js-yaml": {
       "version": "3.14.1",
-      "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz",
-      "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1373,8 +1414,6 @@
     },
     "node_modules/@istanbuljs/load-nyc-config/node_modules/locate-path": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz",
-      "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1386,8 +1425,6 @@
     },
     "node_modules/@istanbuljs/load-nyc-config/node_modules/p-limit": {
       "version": "2.3.0",
-      "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz",
-      "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1402,8 +1439,6 @@
     },
     "node_modules/@istanbuljs/load-nyc-config/node_modules/p-locate": {
       "version": "4.1.0",
-      "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz",
-      "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1415,8 +1450,6 @@
     },
     "node_modules/@istanbuljs/load-nyc-config/node_modules/path-exists": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
-      "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -1425,8 +1458,6 @@
     },
     "node_modules/@istanbuljs/schema": {
       "version": "0.1.3",
-      "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz",
-      "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -1435,8 +1466,6 @@
     },
     "node_modules/@jridgewell/gen-mapping": {
       "version": "0.3.13",
-      "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz",
-      "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1446,8 +1475,6 @@
     },
     "node_modules/@jridgewell/remapping": {
       "version": "2.3.5",
-      "resolved": "https://registry.npmjs.org/@jridgewell/remapping/-/remapping-2.3.5.tgz",
-      "integrity": "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1457,8 +1484,6 @@
     },
     "node_modules/@jridgewell/resolve-uri": {
       "version": "3.1.2",
-      "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz",
-      "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -1467,15 +1492,11 @@
     },
     "node_modules/@jridgewell/sourcemap-codec": {
       "version": "1.5.5",
-      "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz",
-      "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/@jridgewell/trace-mapping": {
       "version": "0.3.31",
-      "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz",
-      "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1511,8 +1532,6 @@
     },
     "node_modules/@nodelib/fs.scandir": {
       "version": "2.1.5",
-      "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz",
-      "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -1526,8 +1545,6 @@
     },
     "node_modules/@nodelib/fs.stat": {
       "version": "2.0.5",
-      "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz",
-      "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -1537,8 +1554,6 @@
     },
     "node_modules/@nodelib/fs.walk": {
       "version": "1.2.8",
-      "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz",
-      "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -1579,8 +1594,6 @@
     },
     "node_modules/@npmcli/eslint-config": {
       "version": "5.1.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/eslint-config/-/eslint-config-5.1.0.tgz",
-      "integrity": "sha512-L4FAYndvARxkbTBNbsbDDkArIf8A8WmTFGVKdevJ3jd9nPzDKWiuC9TW0QtEnRsFHr5IX7G6qkRLK+drLIGoEA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -1762,9 +1775,9 @@
       "link": true
     },
     "node_modules/@npmcli/template-oss": {
-      "version": "4.24.4",
-      "resolved": "https://registry.npmjs.org/@npmcli/template-oss/-/template-oss-4.24.4.tgz",
-      "integrity": "sha512-NF6SQC2wjBTft7RM9YaILf8dSum5cjQCDnsOlQYdarNQJSxKqaePKpOEYSsy6crjz3TfZ/jrAd0M4pLT/VGc/w==",
+      "version": "4.25.1",
+      "resolved": "https://registry.npmjs.org/@npmcli/template-oss/-/template-oss-4.25.1.tgz",
+      "integrity": "sha512-odmdn3CQCUqmT5+Vjiz/UTAORc8xDVU591WLBMotGb35hfIB/zf6RbUB/sEbR1JEjIHQtjhMa6qojoo8f8LmnQ==",
       "dev": true,
       "hasInstallScript": true,
       "license": "ISC",
@@ -1776,28 +1789,27 @@
         "@commitlint/cli": "^19.0.3",
         "@commitlint/config-conventional": "^19.2.2",
         "@isaacs/string-locale-compare": "^1.1.0",
-        "@npmcli/arborist": "^7.2.1",
-        "@npmcli/git": "^6.0.0",
-        "@npmcli/map-workspaces": "^4.0.0",
-        "@npmcli/package-json": "^6.0.0",
-        "@octokit/rest": "^19.0.4",
+        "@npmcli/arborist": "^9.1.2",
+        "@npmcli/git": "^7.0.0",
+        "@npmcli/map-workspaces": "^5.0.0",
+        "@npmcli/package-json": "^7.0.0",
+        "@octokit/rest": "^22.0.0",
         "dedent": "^1.5.1",
-        "diff": "^7.0.0",
-        "glob": "^10.1.0",
+        "diff": "^8.0.2",
+        "glob": "^11.0.3",
         "handlebars": "^4.7.7",
-        "hosted-git-info": "^8.0.0",
+        "hosted-git-info": "^9.0.0",
         "ini": "^5.0.0",
         "json-parse-even-better-errors": "^4.0.0",
         "just-deep-map-values": "^1.1.1",
         "just-diff": "^6.0.0",
         "just-omit": "^2.2.0",
         "lodash": "^4.17.21",
-        "minimatch": "^9.0.2",
-        "npm-package-arg": "^12.0.0",
+        "minimatch": "^10.0.3",
+        "npm-package-arg": "^13.0.0",
         "proc-log": "^5.0.0",
-        "release-please": "16.15.0",
+        "release-please": "^17.1.1",
         "semver": "^7.3.5",
-        "undici": "^6.7.0",
         "yaml": "^2.1.1"
       },
       "bin": {
@@ -1810,1752 +1822,265 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/agent": {
-      "version": "2.2.2",
-      "resolved": "https://registry.npmjs.org/@npmcli/agent/-/agent-2.2.2.tgz",
-      "integrity": "sha512-OrcNPXdpSl9UX7qPVRWbmWMCSXrcDa2M9DvrbOTj7ao1S4PlqVFYv9/yLKMkrJKZ/V5A/kDBC690or307i26Og==",
+    "node_modules/@npmcli/template-oss/node_modules/diff": {
+      "version": "8.0.2",
+      "resolved": "https://registry.npmjs.org/diff/-/diff-8.0.2.tgz",
+      "integrity": "sha512-sSuxWU5j5SR9QQji/o2qMvqRNYRDOcBTgsJ/DeCf4iSN4gW+gNMXM7wFIP+fdXZxoNiAnHUTGjCr+TSWXdRDKg==",
       "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "agent-base": "^7.1.0",
-        "http-proxy-agent": "^7.0.0",
-        "https-proxy-agent": "^7.0.1",
-        "lru-cache": "^10.0.1",
-        "socks-proxy-agent": "^8.0.3"
-      },
+      "license": "BSD-3-Clause",
       "engines": {
-        "node": "^16.14.0 || >=18.0.0"
+        "node": ">=0.3.1"
       }
     },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist": {
-      "version": "7.5.4",
-      "resolved": "https://registry.npmjs.org/@npmcli/arborist/-/arborist-7.5.4.tgz",
-      "integrity": "sha512-nWtIc6QwwoUORCRNzKx4ypHqCk3drI+5aeYdMTQQiRCcn4lOOgfQh7WyZobGYTxXPSq1VwV53lkpN/BRlRk08g==",
+    "node_modules/@octokit/auth-token": {
+      "version": "6.0.0",
+      "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-6.0.0.tgz",
+      "integrity": "sha512-P4YJBPdPSpWTQ1NU4XYdvHvXJJDxM6YwpS0FZHRgP7YFkdVxsWcpWGy/NVqlAA7PcPCnMacXlRm1y2PFZRWL/w==",
       "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "@isaacs/string-locale-compare": "^1.1.0",
-        "@npmcli/fs": "^3.1.1",
-        "@npmcli/installed-package-contents": "^2.1.0",
-        "@npmcli/map-workspaces": "^3.0.2",
-        "@npmcli/metavuln-calculator": "^7.1.1",
-        "@npmcli/name-from-folder": "^2.0.0",
-        "@npmcli/node-gyp": "^3.0.0",
-        "@npmcli/package-json": "^5.1.0",
-        "@npmcli/query": "^3.1.0",
-        "@npmcli/redact": "^2.0.0",
-        "@npmcli/run-script": "^8.1.0",
-        "bin-links": "^4.0.4",
-        "cacache": "^18.0.3",
-        "common-ancestor-path": "^1.0.1",
-        "hosted-git-info": "^7.0.2",
-        "json-parse-even-better-errors": "^3.0.2",
-        "json-stringify-nice": "^1.1.4",
-        "lru-cache": "^10.2.2",
-        "minimatch": "^9.0.4",
-        "nopt": "^7.2.1",
-        "npm-install-checks": "^6.2.0",
-        "npm-package-arg": "^11.0.2",
-        "npm-pick-manifest": "^9.0.1",
-        "npm-registry-fetch": "^17.0.1",
-        "pacote": "^18.0.6",
-        "parse-conflict-json": "^3.0.0",
-        "proc-log": "^4.2.0",
-        "proggy": "^2.0.0",
-        "promise-all-reject-late": "^1.0.0",
-        "promise-call-limit": "^3.0.1",
-        "read-package-json-fast": "^3.0.2",
-        "semver": "^7.3.7",
-        "ssri": "^10.0.6",
-        "treeverse": "^3.0.0",
-        "walk-up-path": "^3.0.1"
-      },
-      "bin": {
-        "arborist": "bin/index.js"
-      },
+      "license": "MIT",
       "engines": {
-        "node": "^16.14.0 || >=18.0.0"
+        "node": ">= 20"
       }
     },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist/node_modules/@npmcli/git": {
-      "version": "5.0.8",
-      "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-5.0.8.tgz",
-      "integrity": "sha512-liASfw5cqhjNW9UFd+ruwwdEf/lbOAQjLL2XY2dFW/bkJheXDYZgOyul/4gVvEV4BWkTXjYGmDqMw9uegdbJNQ==",
+    "node_modules/@octokit/core": {
+      "version": "7.0.4",
+      "resolved": "https://registry.npmjs.org/@octokit/core/-/core-7.0.4.tgz",
+      "integrity": "sha512-jOT8V1Ba5BdC79sKrRWDdMT5l1R+XNHTPR6CPWzUP2EcfAcvIHZWF0eAbmRcpOOP5gVIwnqNg0C4nvh6Abc3OA==",
       "dev": true,
-      "license": "ISC",
+      "license": "MIT",
       "dependencies": {
-        "@npmcli/promise-spawn": "^7.0.0",
-        "ini": "^4.1.3",
-        "lru-cache": "^10.0.1",
-        "npm-pick-manifest": "^9.0.0",
-        "proc-log": "^4.0.0",
-        "promise-inflight": "^1.0.1",
-        "promise-retry": "^2.0.1",
-        "semver": "^7.3.5",
-        "which": "^4.0.0"
+        "@octokit/auth-token": "^6.0.0",
+        "@octokit/graphql": "^9.0.1",
+        "@octokit/request": "^10.0.2",
+        "@octokit/request-error": "^7.0.0",
+        "@octokit/types": "^15.0.0",
+        "before-after-hook": "^4.0.0",
+        "universal-user-agent": "^7.0.0"
       },
       "engines": {
-        "node": "^16.14.0 || >=18.0.0"
+        "node": ">= 20"
       }
     },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist/node_modules/@npmcli/map-workspaces": {
-      "version": "3.0.6",
-      "resolved": "https://registry.npmjs.org/@npmcli/map-workspaces/-/map-workspaces-3.0.6.tgz",
-      "integrity": "sha512-tkYs0OYnzQm6iIRdfy+LcLBjcKuQCeE5YLb8KnrIlutJfheNaPvPpgoFEyEFgbjzl5PLZ3IA/BWAwRU0eHuQDA==",
+    "node_modules/@octokit/endpoint": {
+      "version": "11.0.0",
+      "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-11.0.0.tgz",
+      "integrity": "sha512-hoYicJZaqISMAI3JfaDr1qMNi48OctWuOih1m80bkYow/ayPw6Jj52tqWJ6GEoFTk1gBqfanSoI1iY99Z5+ekQ==",
       "dev": true,
-      "license": "ISC",
+      "license": "MIT",
       "dependencies": {
-        "@npmcli/name-from-folder": "^2.0.0",
-        "glob": "^10.2.2",
-        "minimatch": "^9.0.0",
-        "read-package-json-fast": "^3.0.0"
+        "@octokit/types": "^14.0.0",
+        "universal-user-agent": "^7.0.2"
       },
       "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+        "node": ">= 20"
       }
     },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist/node_modules/@npmcli/name-from-folder": {
-      "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/name-from-folder/-/name-from-folder-2.0.0.tgz",
-      "integrity": "sha512-pwK+BfEBZJbKdNYpHHRTNBwBoqrN/iIMO0AiGvYsp3Hoaq0WbgGSWQR6SCldZovoDpY3yje5lkFUe6gsDgJ2vg==",
+    "node_modules/@octokit/endpoint/node_modules/@octokit/openapi-types": {
+      "version": "25.1.0",
+      "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-25.1.0.tgz",
+      "integrity": "sha512-idsIggNXUKkk0+BExUn1dQ92sfysJrje03Q0bv0e+KPLrvyqZF8MnBpFz8UNfYDwB3Ie7Z0TByjWfzxt7vseaA==",
       "dev": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      "license": "MIT"
+    },
+    "node_modules/@octokit/endpoint/node_modules/@octokit/types": {
+      "version": "14.1.0",
+      "resolved": "https://registry.npmjs.org/@octokit/types/-/types-14.1.0.tgz",
+      "integrity": "sha512-1y6DgTy8Jomcpu33N+p5w58l6xyt55Ar2I91RPiIA0xCJBXyUAhXCcmZaDWSANiha7R9a6qJJ2CRomGPZ6f46g==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@octokit/openapi-types": "^25.1.0"
       }
     },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist/node_modules/@npmcli/package-json": {
-      "version": "5.2.1",
-      "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-5.2.1.tgz",
-      "integrity": "sha512-f7zYC6kQautXHvNbLEWgD/uGu1+xCn9izgqBfgItWSx22U0ZDekxN08A1vM8cTxj/cRVe0Q94Ode+tdoYmIOOQ==",
+    "node_modules/@octokit/graphql": {
+      "version": "9.0.1",
+      "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-9.0.1.tgz",
+      "integrity": "sha512-j1nQNU1ZxNFx2ZtKmL4sMrs4egy5h65OMDmSbVyuCzjOcwsHq6EaYjOTGXPQxgfiN8dJ4CriYHk6zF050WEULg==",
       "dev": true,
-      "license": "ISC",
+      "license": "MIT",
       "dependencies": {
-        "@npmcli/git": "^5.0.0",
-        "glob": "^10.2.2",
-        "hosted-git-info": "^7.0.0",
-        "json-parse-even-better-errors": "^3.0.0",
-        "normalize-package-data": "^6.0.0",
-        "proc-log": "^4.0.0",
-        "semver": "^7.5.3"
+        "@octokit/request": "^10.0.2",
+        "@octokit/types": "^14.0.0",
+        "universal-user-agent": "^7.0.0"
       },
       "engines": {
-        "node": "^16.14.0 || >=18.0.0"
+        "node": ">= 20"
       }
     },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist/node_modules/@npmcli/promise-spawn": {
-      "version": "7.0.2",
-      "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-7.0.2.tgz",
-      "integrity": "sha512-xhfYPXoV5Dy4UkY0D+v2KkwvnDfiA/8Mt3sWCGI/hM03NsYIH8ZaG6QzS9x7pje5vHZBZJ2v6VRFVTWACnqcmQ==",
+    "node_modules/@octokit/graphql/node_modules/@octokit/openapi-types": {
+      "version": "25.1.0",
+      "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-25.1.0.tgz",
+      "integrity": "sha512-idsIggNXUKkk0+BExUn1dQ92sfysJrje03Q0bv0e+KPLrvyqZF8MnBpFz8UNfYDwB3Ie7Z0TByjWfzxt7vseaA==",
       "dev": true,
-      "license": "ISC",
+      "license": "MIT"
+    },
+    "node_modules/@octokit/graphql/node_modules/@octokit/types": {
+      "version": "14.1.0",
+      "resolved": "https://registry.npmjs.org/@octokit/types/-/types-14.1.0.tgz",
+      "integrity": "sha512-1y6DgTy8Jomcpu33N+p5w58l6xyt55Ar2I91RPiIA0xCJBXyUAhXCcmZaDWSANiha7R9a6qJJ2CRomGPZ6f46g==",
+      "dev": true,
+      "license": "MIT",
       "dependencies": {
-        "which": "^4.0.0"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
+        "@octokit/openapi-types": "^25.1.0"
       }
     },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist/node_modules/hosted-git-info": {
-      "version": "7.0.2",
-      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.2.tgz",
-      "integrity": "sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==",
+    "node_modules/@octokit/openapi-types": {
+      "version": "26.0.0",
+      "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-26.0.0.tgz",
+      "integrity": "sha512-7AtcfKtpo77j7Ts73b4OWhOZHTKo/gGY8bB3bNBQz4H+GRSWqx2yvj8TXRsbdTE0eRmYmXOEY66jM7mJ7LzfsA==",
       "dev": true,
-      "license": "ISC",
+      "license": "MIT"
+    },
+    "node_modules/@octokit/plugin-paginate-rest": {
+      "version": "13.1.1",
+      "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-13.1.1.tgz",
+      "integrity": "sha512-q9iQGlZlxAVNRN2jDNskJW/Cafy7/XE52wjZ5TTvyhyOD904Cvx//DNyoO3J/MXJ0ve3rPoNWKEg5iZrisQSuw==",
+      "dev": true,
+      "license": "MIT",
       "dependencies": {
-        "lru-cache": "^10.0.1"
+        "@octokit/types": "^14.1.0"
       },
       "engines": {
-        "node": "^16.14.0 || >=18.0.0"
+        "node": ">= 20"
+      },
+      "peerDependencies": {
+        "@octokit/core": ">=6"
       }
     },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist/node_modules/ini": {
-      "version": "4.1.3",
-      "resolved": "https://registry.npmjs.org/ini/-/ini-4.1.3.tgz",
-      "integrity": "sha512-X7rqawQBvfdjS10YU1y1YVreA3SsLrW9dX2CewP2EbBJM4ypVNLDkO5y04gejPwKIY9lR+7r9gn3rFPt/kmWFg==",
+    "node_modules/@octokit/plugin-paginate-rest/node_modules/@octokit/openapi-types": {
+      "version": "25.1.0",
+      "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-25.1.0.tgz",
+      "integrity": "sha512-idsIggNXUKkk0+BExUn1dQ92sfysJrje03Q0bv0e+KPLrvyqZF8MnBpFz8UNfYDwB3Ie7Z0TByjWfzxt7vseaA==",
       "dev": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+      "license": "MIT"
+    },
+    "node_modules/@octokit/plugin-paginate-rest/node_modules/@octokit/types": {
+      "version": "14.1.0",
+      "resolved": "https://registry.npmjs.org/@octokit/types/-/types-14.1.0.tgz",
+      "integrity": "sha512-1y6DgTy8Jomcpu33N+p5w58l6xyt55Ar2I91RPiIA0xCJBXyUAhXCcmZaDWSANiha7R9a6qJJ2CRomGPZ6f46g==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@octokit/openapi-types": "^25.1.0"
       }
     },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist/node_modules/json-parse-even-better-errors": {
-      "version": "3.0.2",
-      "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-3.0.2.tgz",
-      "integrity": "sha512-fi0NG4bPjCHunUJffmLd0gxssIgkNmArMvis4iNah6Owg1MCJjWhEcDLmsK6iGkJq3tHwbDkTlce70/tmXN4cQ==",
+    "node_modules/@octokit/plugin-request-log": {
+      "version": "6.0.0",
+      "resolved": "https://registry.npmjs.org/@octokit/plugin-request-log/-/plugin-request-log-6.0.0.tgz",
+      "integrity": "sha512-UkOzeEN3W91/eBq9sPZNQ7sUBvYCqYbrrD8gTbBuGtHEuycE4/awMXcYvx6sVYo7LypPhmQwwpUe4Yyu4QZN5Q==",
       "dev": true,
       "license": "MIT",
       "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+        "node": ">= 20"
+      },
+      "peerDependencies": {
+        "@octokit/core": ">=6"
       }
     },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist/node_modules/npm-package-arg": {
-      "version": "11.0.3",
-      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-11.0.3.tgz",
-      "integrity": "sha512-sHGJy8sOC1YraBywpzQlIKBE4pBbGbiF95U6Auspzyem956E0+FtDtsx1ZxlOJkQCZ1AFXAY/yuvtFYrOxF+Bw==",
+    "node_modules/@octokit/plugin-rest-endpoint-methods": {
+      "version": "16.1.0",
+      "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-16.1.0.tgz",
+      "integrity": "sha512-nCsyiKoGRnhH5LkH8hJEZb9swpqOcsW+VXv1QoyUNQXJeVODG4+xM6UICEqyqe9XFr6LkL8BIiFCPev8zMDXPw==",
       "dev": true,
-      "license": "ISC",
+      "license": "MIT",
       "dependencies": {
-        "hosted-git-info": "^7.0.0",
-        "proc-log": "^4.0.0",
-        "semver": "^7.3.5",
-        "validate-npm-package-name": "^5.0.0"
+        "@octokit/types": "^15.0.0"
       },
       "engines": {
-        "node": "^16.14.0 || >=18.0.0"
+        "node": ">= 20"
+      },
+      "peerDependencies": {
+        "@octokit/core": ">=6"
       }
     },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist/node_modules/proc-log": {
-      "version": "4.2.0",
-      "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-4.2.0.tgz",
-      "integrity": "sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==",
+    "node_modules/@octokit/request": {
+      "version": "10.0.3",
+      "resolved": "https://registry.npmjs.org/@octokit/request/-/request-10.0.3.tgz",
+      "integrity": "sha512-V6jhKokg35vk098iBqp2FBKunk3kMTXlmq+PtbV9Gl3TfskWlebSofU9uunVKhUN7xl+0+i5vt0TGTG8/p/7HA==",
       "dev": true,
-      "license": "ISC",
+      "license": "MIT",
+      "dependencies": {
+        "@octokit/endpoint": "^11.0.0",
+        "@octokit/request-error": "^7.0.0",
+        "@octokit/types": "^14.0.0",
+        "fast-content-type-parse": "^3.0.0",
+        "universal-user-agent": "^7.0.2"
+      },
       "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
+        "node": ">= 20"
       }
     },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist/node_modules/validate-npm-package-name": {
-      "version": "5.0.1",
-      "resolved": "https://registry.npmjs.org/validate-npm-package-name/-/validate-npm-package-name-5.0.1.tgz",
-      "integrity": "sha512-OljLrQ9SQdOUqTaQxqL5dEfZWrXExyyWsozYlAWFawPVNuD83igl7uJD2RTkNMbniIYgt8l81eCJGIdQF7avLQ==",
-      "dev": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/arborist/node_modules/which": {
-      "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/which/-/which-4.0.0.tgz",
-      "integrity": "sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "isexe": "^3.1.1"
-      },
-      "bin": {
-        "node-which": "bin/which.js"
-      },
-      "engines": {
-        "node": "^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/fs": {
-      "version": "3.1.1",
-      "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-3.1.1.tgz",
-      "integrity": "sha512-q9CRWjpHCMIh5sVyefoD1cA7PkvILqCZsnSOEUUivORLjxCO/Irmue2DprETiNgEqktDBZaM1Bi+jrarx1XdCg==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "semver": "^7.3.5"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/git": {
-      "version": "6.0.3",
-      "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-6.0.3.tgz",
-      "integrity": "sha512-GUYESQlxZRAdhs3UhbB6pVRNUELQOHXwK9ruDkwmCv2aZ5y0SApQzUJCg02p3A7Ue2J5hxvlk1YI53c00NmRyQ==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "@npmcli/promise-spawn": "^8.0.0",
-        "ini": "^5.0.0",
-        "lru-cache": "^10.0.1",
-        "npm-pick-manifest": "^10.0.0",
-        "proc-log": "^5.0.0",
-        "promise-retry": "^2.0.1",
-        "semver": "^7.3.5",
-        "which": "^5.0.0"
-      },
-      "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/git/node_modules/npm-install-checks": {
-      "version": "7.1.2",
-      "resolved": "https://registry.npmjs.org/npm-install-checks/-/npm-install-checks-7.1.2.tgz",
-      "integrity": "sha512-z9HJBCYw9Zr8BqXcllKIs5nI+QggAImbBdHphOzVYrz2CB4iQ6FzWyKmlqDZua+51nAu7FcemlbTc9VgQN5XDQ==",
-      "dev": true,
-      "license": "BSD-2-Clause",
-      "dependencies": {
-        "semver": "^7.1.1"
-      },
-      "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/git/node_modules/npm-pick-manifest": {
-      "version": "10.0.0",
-      "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-10.0.0.tgz",
-      "integrity": "sha512-r4fFa4FqYY8xaM7fHecQ9Z2nE9hgNfJR+EmoKv0+chvzWkBcORX3r0FpTByP+CbOVJDladMXnPQGVN8PBLGuTQ==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "npm-install-checks": "^7.1.0",
-        "npm-normalize-package-bin": "^4.0.0",
-        "npm-package-arg": "^12.0.0",
-        "semver": "^7.3.5"
-      },
-      "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/installed-package-contents": {
-      "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/installed-package-contents/-/installed-package-contents-2.1.0.tgz",
-      "integrity": "sha512-c8UuGLeZpm69BryRykLuKRyKFZYJsZSCT4aVY5ds4omyZqJ172ApzgfKJ5eV/r3HgLdUYgFVe54KSFVjKoe27w==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "npm-bundled": "^3.0.0",
-        "npm-normalize-package-bin": "^3.0.0"
-      },
-      "bin": {
-        "installed-package-contents": "bin/index.js"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/installed-package-contents/node_modules/npm-normalize-package-bin": {
-      "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-3.0.1.tgz",
-      "integrity": "sha512-dMxCf+zZ+3zeQZXKxmyuCKlIDPGuv8EF940xbkC4kQVDTtqoh6rJFO+JTKSA6/Rwi0getWmtuy4Itup0AMcaDQ==",
-      "dev": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/map-workspaces": {
-      "version": "4.0.2",
-      "resolved": "https://registry.npmjs.org/@npmcli/map-workspaces/-/map-workspaces-4.0.2.tgz",
-      "integrity": "sha512-mnuMuibEbkaBTYj9HQ3dMe6L0ylYW+s/gfz7tBDMFY/la0w9Kf44P9aLn4/+/t3aTR3YUHKoT6XQL9rlicIe3Q==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "@npmcli/name-from-folder": "^3.0.0",
-        "@npmcli/package-json": "^6.0.0",
-        "glob": "^10.2.2",
-        "minimatch": "^9.0.0"
-      },
-      "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/metavuln-calculator": {
-      "version": "7.1.1",
-      "resolved": "https://registry.npmjs.org/@npmcli/metavuln-calculator/-/metavuln-calculator-7.1.1.tgz",
-      "integrity": "sha512-Nkxf96V0lAx3HCpVda7Vw4P23RILgdi/5K1fmj2tZkWIYLpXAN8k2UVVOsW16TsS5F8Ws2I7Cm+PU1/rsVF47g==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "cacache": "^18.0.0",
-        "json-parse-even-better-errors": "^3.0.0",
-        "pacote": "^18.0.0",
-        "proc-log": "^4.1.0",
-        "semver": "^7.3.5"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/metavuln-calculator/node_modules/json-parse-even-better-errors": {
-      "version": "3.0.2",
-      "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-3.0.2.tgz",
-      "integrity": "sha512-fi0NG4bPjCHunUJffmLd0gxssIgkNmArMvis4iNah6Owg1MCJjWhEcDLmsK6iGkJq3tHwbDkTlce70/tmXN4cQ==",
-      "dev": true,
-      "license": "MIT",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/metavuln-calculator/node_modules/proc-log": {
-      "version": "4.2.0",
-      "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-4.2.0.tgz",
-      "integrity": "sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==",
-      "dev": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/node-gyp": {
-      "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/node-gyp/-/node-gyp-3.0.0.tgz",
-      "integrity": "sha512-gp8pRXC2oOxu0DUE1/M3bYtb1b3/DbJ5aM113+XJBgfXdussRAsX0YOrOhdd8WvnAR6auDBvJomGAkLKA5ydxA==",
-      "dev": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/package-json": {
-      "version": "6.2.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-6.2.0.tgz",
-      "integrity": "sha512-rCNLSB/JzNvot0SEyXqWZ7tX2B5dD2a1br2Dp0vSYVo5jh8Z0EZ7lS9TsZ1UtziddB1UfNUaMCc538/HztnJGA==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "@npmcli/git": "^6.0.0",
-        "glob": "^10.2.2",
-        "hosted-git-info": "^8.0.0",
-        "json-parse-even-better-errors": "^4.0.0",
-        "proc-log": "^5.0.0",
-        "semver": "^7.5.3",
-        "validate-npm-package-license": "^3.0.4"
-      },
-      "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/query": {
-      "version": "3.1.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/query/-/query-3.1.0.tgz",
-      "integrity": "sha512-C/iR0tk7KSKGldibYIB9x8GtO/0Bd0I2mhOaDb8ucQL/bQVTmGoeREaFj64Z5+iCBRf3dQfed0CjJL7I8iTkiQ==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "postcss-selector-parser": "^6.0.10"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/redact": {
-      "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/@npmcli/redact/-/redact-2.0.1.tgz",
-      "integrity": "sha512-YgsR5jCQZhVmTJvjduTOIHph0L73pK8xwMVaDY0PatySqVM9AZj93jpoXYSJqfHFxFkN9dmqTw6OiqExsS3LPw==",
-      "dev": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/run-script": {
-      "version": "8.1.0",
-      "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-8.1.0.tgz",
-      "integrity": "sha512-y7efHHwghQfk28G2z3tlZ67pLG0XdfYbcVG26r7YIXALRsrVQcTq4/tdenSmdOrEsNahIYA/eh8aEVROWGFUDg==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "@npmcli/node-gyp": "^3.0.0",
-        "@npmcli/package-json": "^5.0.0",
-        "@npmcli/promise-spawn": "^7.0.0",
-        "node-gyp": "^10.0.0",
-        "proc-log": "^4.0.0",
-        "which": "^4.0.0"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/run-script/node_modules/@npmcli/git": {
-      "version": "5.0.8",
-      "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-5.0.8.tgz",
-      "integrity": "sha512-liASfw5cqhjNW9UFd+ruwwdEf/lbOAQjLL2XY2dFW/bkJheXDYZgOyul/4gVvEV4BWkTXjYGmDqMw9uegdbJNQ==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "@npmcli/promise-spawn": "^7.0.0",
-        "ini": "^4.1.3",
-        "lru-cache": "^10.0.1",
-        "npm-pick-manifest": "^9.0.0",
-        "proc-log": "^4.0.0",
-        "promise-inflight": "^1.0.1",
-        "promise-retry": "^2.0.1",
-        "semver": "^7.3.5",
-        "which": "^4.0.0"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/run-script/node_modules/@npmcli/package-json": {
-      "version": "5.2.1",
-      "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-5.2.1.tgz",
-      "integrity": "sha512-f7zYC6kQautXHvNbLEWgD/uGu1+xCn9izgqBfgItWSx22U0ZDekxN08A1vM8cTxj/cRVe0Q94Ode+tdoYmIOOQ==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "@npmcli/git": "^5.0.0",
-        "glob": "^10.2.2",
-        "hosted-git-info": "^7.0.0",
-        "json-parse-even-better-errors": "^3.0.0",
-        "normalize-package-data": "^6.0.0",
-        "proc-log": "^4.0.0",
-        "semver": "^7.5.3"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/run-script/node_modules/@npmcli/promise-spawn": {
-      "version": "7.0.2",
-      "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-7.0.2.tgz",
-      "integrity": "sha512-xhfYPXoV5Dy4UkY0D+v2KkwvnDfiA/8Mt3sWCGI/hM03NsYIH8ZaG6QzS9x7pje5vHZBZJ2v6VRFVTWACnqcmQ==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "which": "^4.0.0"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/run-script/node_modules/hosted-git-info": {
-      "version": "7.0.2",
-      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.2.tgz",
-      "integrity": "sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "lru-cache": "^10.0.1"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/run-script/node_modules/ini": {
-      "version": "4.1.3",
-      "resolved": "https://registry.npmjs.org/ini/-/ini-4.1.3.tgz",
-      "integrity": "sha512-X7rqawQBvfdjS10YU1y1YVreA3SsLrW9dX2CewP2EbBJM4ypVNLDkO5y04gejPwKIY9lR+7r9gn3rFPt/kmWFg==",
-      "dev": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/run-script/node_modules/json-parse-even-better-errors": {
-      "version": "3.0.2",
-      "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-3.0.2.tgz",
-      "integrity": "sha512-fi0NG4bPjCHunUJffmLd0gxssIgkNmArMvis4iNah6Owg1MCJjWhEcDLmsK6iGkJq3tHwbDkTlce70/tmXN4cQ==",
-      "dev": true,
-      "license": "MIT",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/run-script/node_modules/proc-log": {
-      "version": "4.2.0",
-      "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-4.2.0.tgz",
-      "integrity": "sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==",
-      "dev": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/@npmcli/run-script/node_modules/which": {
-      "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/which/-/which-4.0.0.tgz",
-      "integrity": "sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "isexe": "^3.1.1"
-      },
-      "bin": {
-        "node-which": "bin/which.js"
-      },
-      "engines": {
-        "node": "^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/@sigstore/bundle": {
-      "version": "2.3.2",
-      "resolved": "https://registry.npmjs.org/@sigstore/bundle/-/bundle-2.3.2.tgz",
-      "integrity": "sha512-wueKWDk70QixNLB363yHc2D2ItTgYiMTdPwK8D9dKQMR3ZQ0c35IxP5xnwQ8cNLoCgCRcHf14kE+CLIvNX1zmA==",
-      "dev": true,
-      "license": "Apache-2.0",
-      "dependencies": {
-        "@sigstore/protobuf-specs": "^0.3.2"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/@sigstore/core": {
-      "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/@sigstore/core/-/core-1.1.0.tgz",
-      "integrity": "sha512-JzBqdVIyqm2FRQCulY6nbQzMpJJpSiJ8XXWMhtOX9eKgaXXpfNOF53lzQEjIydlStnd/eFtuC1dW4VYdD93oRg==",
-      "dev": true,
-      "license": "Apache-2.0",
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/@sigstore/protobuf-specs": {
-      "version": "0.3.3",
-      "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.3.3.tgz",
-      "integrity": "sha512-RpacQhBlwpBWd7KEJsRKcBQalbV28fvkxwTOJIqhIuDysMMaJW47V4OqW30iJB9uRpqOSxxEAQFdr8tTattReQ==",
-      "dev": true,
-      "license": "Apache-2.0",
-      "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/@sigstore/sign": {
-      "version": "2.3.2",
-      "resolved": "https://registry.npmjs.org/@sigstore/sign/-/sign-2.3.2.tgz",
-      "integrity": "sha512-5Vz5dPVuunIIvC5vBb0APwo7qKA4G9yM48kPWJT+OEERs40md5GoUR1yedwpekWZ4m0Hhw44m6zU+ObsON+iDA==",
-      "dev": true,
-      "license": "Apache-2.0",
-      "dependencies": {
-        "@sigstore/bundle": "^2.3.2",
-        "@sigstore/core": "^1.0.0",
-        "@sigstore/protobuf-specs": "^0.3.2",
-        "make-fetch-happen": "^13.0.1",
-        "proc-log": "^4.2.0",
-        "promise-retry": "^2.0.1"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/@sigstore/sign/node_modules/proc-log": {
-      "version": "4.2.0",
-      "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-4.2.0.tgz",
-      "integrity": "sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==",
-      "dev": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/@sigstore/tuf": {
-      "version": "2.3.4",
-      "resolved": "https://registry.npmjs.org/@sigstore/tuf/-/tuf-2.3.4.tgz",
-      "integrity": "sha512-44vtsveTPUpqhm9NCrbU8CWLe3Vck2HO1PNLw7RIajbB7xhtn5RBPm1VNSCMwqGYHhDsBJG8gDF0q4lgydsJvw==",
-      "dev": true,
-      "license": "Apache-2.0",
-      "dependencies": {
-        "@sigstore/protobuf-specs": "^0.3.2",
-        "tuf-js": "^2.2.1"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/@sigstore/verify": {
-      "version": "1.2.1",
-      "resolved": "https://registry.npmjs.org/@sigstore/verify/-/verify-1.2.1.tgz",
-      "integrity": "sha512-8iKx79/F73DKbGfRf7+t4dqrc0bRr0thdPrxAtCKWRm/F0tG71i6O1rvlnScncJLLBZHn3h8M3c1BSUAb9yu8g==",
-      "dev": true,
-      "license": "Apache-2.0",
-      "dependencies": {
-        "@sigstore/bundle": "^2.3.2",
-        "@sigstore/core": "^1.1.0",
-        "@sigstore/protobuf-specs": "^0.3.2"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/@tufjs/models": {
-      "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/@tufjs/models/-/models-2.0.1.tgz",
-      "integrity": "sha512-92F7/SFyufn4DXsha9+QfKnN03JGqtMFMXgSHbZOo8JG59WkTni7UzAouNQDf7AuP9OAMxVOPQcqG3sB7w+kkg==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@tufjs/canonical-json": "2.0.0",
-        "minimatch": "^9.0.4"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/abbrev": {
-      "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-2.0.0.tgz",
-      "integrity": "sha512-6/mh1E2u2YgEsCHdY0Yx5oW+61gZU+1vXaoiHHrpKeuRNNgFvS+/jrwHiQhB5apAf5oB7UB7E19ol2R2LKH8hQ==",
-      "dev": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/bin-links": {
-      "version": "4.0.4",
-      "resolved": "https://registry.npmjs.org/bin-links/-/bin-links-4.0.4.tgz",
-      "integrity": "sha512-cMtq4W5ZsEwcutJrVId+a/tjt8GSbS+h0oNkdl6+6rBuEv8Ot33Bevj5KPm40t309zuhVic8NjpuL42QCiJWWA==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "cmd-shim": "^6.0.0",
-        "npm-normalize-package-bin": "^3.0.0",
-        "read-cmd-shim": "^4.0.0",
-        "write-file-atomic": "^5.0.0"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/bin-links/node_modules/npm-normalize-package-bin": {
-      "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-3.0.1.tgz",
-      "integrity": "sha512-dMxCf+zZ+3zeQZXKxmyuCKlIDPGuv8EF940xbkC4kQVDTtqoh6rJFO+JTKSA6/Rwi0getWmtuy4Itup0AMcaDQ==",
-      "dev": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/cacache": {
-      "version": "18.0.4",
-      "resolved": "https://registry.npmjs.org/cacache/-/cacache-18.0.4.tgz",
-      "integrity": "sha512-B+L5iIa9mgcjLbliir2th36yEwPftrzteHYujzsx3dFP/31GCHcIeS8f5MGd80odLOjaOvSpU3EEAmRQptkxLQ==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "@npmcli/fs": "^3.1.0",
-        "fs-minipass": "^3.0.0",
-        "glob": "^10.2.2",
-        "lru-cache": "^10.0.1",
-        "minipass": "^7.0.3",
-        "minipass-collect": "^2.0.1",
-        "minipass-flush": "^1.0.5",
-        "minipass-pipeline": "^1.2.4",
-        "p-map": "^4.0.0",
-        "ssri": "^10.0.0",
-        "tar": "^6.1.11",
-        "unique-filename": "^3.0.0"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/cmd-shim": {
-      "version": "6.0.3",
-      "resolved": "https://registry.npmjs.org/cmd-shim/-/cmd-shim-6.0.3.tgz",
-      "integrity": "sha512-FMabTRlc5t5zjdenF6mS0MBeFZm0XqHqeOkcskKFb/LYCcRQ5fVgLOHVc4Lq9CqABd9zhjwPjMBCJvMCziSVtA==",
-      "dev": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/glob": {
-      "version": "10.4.5",
-      "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz",
-      "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "foreground-child": "^3.1.0",
-        "jackspeak": "^3.1.2",
-        "minimatch": "^9.0.4",
-        "minipass": "^7.1.2",
-        "package-json-from-dist": "^1.0.0",
-        "path-scurry": "^1.11.1"
-      },
-      "bin": {
-        "glob": "dist/esm/bin.mjs"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/hosted-git-info": {
-      "version": "8.1.0",
-      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-8.1.0.tgz",
-      "integrity": "sha512-Rw/B2DNQaPBICNXEm8balFz9a6WpZrkCGpcWFpy7nCj+NyhSdqXipmfvtmWt9xGfp0wZnBxB+iVpLmQMYt47Tw==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "lru-cache": "^10.0.1"
-      },
-      "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/ignore-walk": {
-      "version": "6.0.5",
-      "resolved": "https://registry.npmjs.org/ignore-walk/-/ignore-walk-6.0.5.tgz",
-      "integrity": "sha512-VuuG0wCnjhnylG1ABXT3dAuIpTNDs/G8jlpmwXY03fXoXy/8ZK8/T+hMzt8L4WnrLCJgdybqgPagnF/f97cg3A==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "minimatch": "^9.0.0"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/jackspeak": {
-      "version": "3.4.3",
-      "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz",
-      "integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==",
-      "dev": true,
-      "license": "BlueOak-1.0.0",
-      "dependencies": {
-        "@isaacs/cliui": "^8.0.2"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      },
-      "optionalDependencies": {
-        "@pkgjs/parseargs": "^0.11.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/lru-cache": {
-      "version": "10.4.3",
-      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz",
-      "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==",
-      "dev": true,
-      "license": "ISC"
-    },
-    "node_modules/@npmcli/template-oss/node_modules/make-fetch-happen": {
-      "version": "13.0.1",
-      "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-13.0.1.tgz",
-      "integrity": "sha512-cKTUFc/rbKUd/9meOvgrpJ2WrNzymt6jfRDdwg5UCnVzv9dTpEj9JS5m3wtziXVCjluIXyL8pcaukYqezIzZQA==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "@npmcli/agent": "^2.0.0",
-        "cacache": "^18.0.0",
-        "http-cache-semantics": "^4.1.1",
-        "is-lambda": "^1.0.1",
-        "minipass": "^7.0.2",
-        "minipass-fetch": "^3.0.0",
-        "minipass-flush": "^1.0.5",
-        "minipass-pipeline": "^1.2.4",
-        "negotiator": "^0.6.3",
-        "proc-log": "^4.2.0",
-        "promise-retry": "^2.0.1",
-        "ssri": "^10.0.0"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/make-fetch-happen/node_modules/proc-log": {
-      "version": "4.2.0",
-      "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-4.2.0.tgz",
-      "integrity": "sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==",
-      "dev": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/minimatch": {
-      "version": "9.0.5",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz",
-      "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "brace-expansion": "^2.0.1"
-      },
-      "engines": {
-        "node": ">=16 || 14 >=14.17"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/minipass-fetch": {
-      "version": "3.0.5",
-      "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-3.0.5.tgz",
-      "integrity": "sha512-2N8elDQAtSnFV0Dk7gt15KHsS0Fyz6CbYZ360h0WTYV1Ty46li3rAXVOQj1THMNLdmrD9Vt5pBPtWtVkpwGBqg==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "minipass": "^7.0.3",
-        "minipass-sized": "^1.0.3",
-        "minizlib": "^2.1.2"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      },
-      "optionalDependencies": {
-        "encoding": "^0.1.13"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/minizlib": {
-      "version": "2.1.2",
-      "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz",
-      "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "minipass": "^3.0.0",
-        "yallist": "^4.0.0"
-      },
-      "engines": {
-        "node": ">= 8"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/minizlib/node_modules/minipass": {
-      "version": "3.3.6",
-      "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz",
-      "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "yallist": "^4.0.0"
-      },
-      "engines": {
-        "node": ">=8"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/negotiator": {
-      "version": "0.6.4",
-      "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.4.tgz",
-      "integrity": "sha512-myRT3DiWPHqho5PrJaIRyaMv2kgYf0mUVgBNOYMuCH5Ki1yEiQaf/ZJuQ62nvpc44wL5WDbTX7yGJi1Neevw8w==",
-      "dev": true,
-      "license": "MIT",
-      "engines": {
-        "node": ">= 0.6"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/node-gyp": {
-      "version": "10.3.1",
-      "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-10.3.1.tgz",
-      "integrity": "sha512-Pp3nFHBThHzVtNY7U6JfPjvT/DTE8+o/4xKsLQtBoU+j2HLsGlhcfzflAoUreaJbNmYnX+LlLi0qjV8kpyO6xQ==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "env-paths": "^2.2.0",
-        "exponential-backoff": "^3.1.1",
-        "glob": "^10.3.10",
-        "graceful-fs": "^4.2.6",
-        "make-fetch-happen": "^13.0.0",
-        "nopt": "^7.0.0",
-        "proc-log": "^4.1.0",
-        "semver": "^7.3.5",
-        "tar": "^6.2.1",
-        "which": "^4.0.0"
-      },
-      "bin": {
-        "node-gyp": "bin/node-gyp.js"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/node-gyp/node_modules/proc-log": {
-      "version": "4.2.0",
-      "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-4.2.0.tgz",
-      "integrity": "sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==",
-      "dev": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/node-gyp/node_modules/which": {
-      "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/which/-/which-4.0.0.tgz",
-      "integrity": "sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "isexe": "^3.1.1"
-      },
-      "bin": {
-        "node-which": "bin/which.js"
-      },
-      "engines": {
-        "node": "^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/nopt": {
-      "version": "7.2.1",
-      "resolved": "https://registry.npmjs.org/nopt/-/nopt-7.2.1.tgz",
-      "integrity": "sha512-taM24ViiimT/XntxbPyJQzCG+p4EKOpgD3mxFwW38mGjVUrfERQOeY4EDHjdnptttfHuHQXFx+lTP08Q+mLa/w==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "abbrev": "^2.0.0"
-      },
-      "bin": {
-        "nopt": "bin/nopt.js"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/normalize-package-data": {
-      "version": "6.0.2",
-      "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-6.0.2.tgz",
-      "integrity": "sha512-V6gygoYb/5EmNI+MEGrWkC+e6+Rr7mTmfHrxDbLzxQogBkgzo76rkok0Am6thgSF7Mv2nLOajAJj5vDJZEFn7g==",
-      "dev": true,
-      "license": "BSD-2-Clause",
-      "dependencies": {
-        "hosted-git-info": "^7.0.0",
-        "semver": "^7.3.5",
-        "validate-npm-package-license": "^3.0.4"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/normalize-package-data/node_modules/hosted-git-info": {
-      "version": "7.0.2",
-      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.2.tgz",
-      "integrity": "sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "lru-cache": "^10.0.1"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/npm-bundled": {
-      "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-3.0.1.tgz",
-      "integrity": "sha512-+AvaheE/ww1JEwRHOrn4WHNzOxGtVp+adrg2AeZS/7KuxGUYFuBta98wYpfHBbJp6Tg6j1NKSEVHNcfZzJHQwQ==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "npm-normalize-package-bin": "^3.0.0"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/npm-bundled/node_modules/npm-normalize-package-bin": {
-      "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-3.0.1.tgz",
-      "integrity": "sha512-dMxCf+zZ+3zeQZXKxmyuCKlIDPGuv8EF940xbkC4kQVDTtqoh6rJFO+JTKSA6/Rwi0getWmtuy4Itup0AMcaDQ==",
-      "dev": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/npm-install-checks": {
-      "version": "6.3.0",
-      "resolved": "https://registry.npmjs.org/npm-install-checks/-/npm-install-checks-6.3.0.tgz",
-      "integrity": "sha512-W29RiK/xtpCGqn6f3ixfRYGk+zRyr+Ew9F2E20BfXxT5/euLdA/Nm7fO7OeTGuAmTs30cpgInyJ0cYe708YTZw==",
-      "dev": true,
-      "license": "BSD-2-Clause",
-      "dependencies": {
-        "semver": "^7.1.1"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/npm-package-arg": {
-      "version": "12.0.2",
-      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-12.0.2.tgz",
-      "integrity": "sha512-f1NpFjNI9O4VbKMOlA5QoBq/vSQPORHcTZ2feJpFkTHJ9eQkdlmZEKSjcAhxTGInC7RlEyScT9ui67NaOsjFWA==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "hosted-git-info": "^8.0.0",
-        "proc-log": "^5.0.0",
-        "semver": "^7.3.5",
-        "validate-npm-package-name": "^6.0.0"
-      },
-      "engines": {
-        "node": "^18.17.0 || >=20.5.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/npm-packlist": {
-      "version": "8.0.2",
-      "resolved": "https://registry.npmjs.org/npm-packlist/-/npm-packlist-8.0.2.tgz",
-      "integrity": "sha512-shYrPFIS/JLP4oQmAwDyk5HcyysKW8/JLTEA32S0Z5TzvpaeeX2yMFfoK1fjEBnCBvVyIB/Jj/GBFdm0wsgzbA==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "ignore-walk": "^6.0.4"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/npm-pick-manifest": {
-      "version": "9.1.0",
-      "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-9.1.0.tgz",
-      "integrity": "sha512-nkc+3pIIhqHVQr085X9d2JzPzLyjzQS96zbruppqC9aZRm/x8xx6xhI98gHtsfELP2bE+loHq8ZaHFHhe+NauA==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "npm-install-checks": "^6.0.0",
-        "npm-normalize-package-bin": "^3.0.0",
-        "npm-package-arg": "^11.0.0",
-        "semver": "^7.3.5"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/npm-pick-manifest/node_modules/hosted-git-info": {
-      "version": "7.0.2",
-      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.2.tgz",
-      "integrity": "sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "lru-cache": "^10.0.1"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/npm-pick-manifest/node_modules/npm-normalize-package-bin": {
-      "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-3.0.1.tgz",
-      "integrity": "sha512-dMxCf+zZ+3zeQZXKxmyuCKlIDPGuv8EF940xbkC4kQVDTtqoh6rJFO+JTKSA6/Rwi0getWmtuy4Itup0AMcaDQ==",
-      "dev": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/npm-pick-manifest/node_modules/npm-package-arg": {
-      "version": "11.0.3",
-      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-11.0.3.tgz",
-      "integrity": "sha512-sHGJy8sOC1YraBywpzQlIKBE4pBbGbiF95U6Auspzyem956E0+FtDtsx1ZxlOJkQCZ1AFXAY/yuvtFYrOxF+Bw==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "hosted-git-info": "^7.0.0",
-        "proc-log": "^4.0.0",
-        "semver": "^7.3.5",
-        "validate-npm-package-name": "^5.0.0"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/npm-pick-manifest/node_modules/proc-log": {
-      "version": "4.2.0",
-      "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-4.2.0.tgz",
-      "integrity": "sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==",
-      "dev": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/npm-pick-manifest/node_modules/validate-npm-package-name": {
-      "version": "5.0.1",
-      "resolved": "https://registry.npmjs.org/validate-npm-package-name/-/validate-npm-package-name-5.0.1.tgz",
-      "integrity": "sha512-OljLrQ9SQdOUqTaQxqL5dEfZWrXExyyWsozYlAWFawPVNuD83igl7uJD2RTkNMbniIYgt8l81eCJGIdQF7avLQ==",
-      "dev": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/npm-registry-fetch": {
-      "version": "17.1.0",
-      "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-17.1.0.tgz",
-      "integrity": "sha512-5+bKQRH0J1xG1uZ1zMNvxW0VEyoNWgJpY9UDuluPFLKDfJ9u2JmmjmTJV1srBGQOROfdBMiVvnH2Zvpbm+xkVA==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "@npmcli/redact": "^2.0.0",
-        "jsonparse": "^1.3.1",
-        "make-fetch-happen": "^13.0.0",
-        "minipass": "^7.0.2",
-        "minipass-fetch": "^3.0.0",
-        "minizlib": "^2.1.2",
-        "npm-package-arg": "^11.0.0",
-        "proc-log": "^4.0.0"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/npm-registry-fetch/node_modules/hosted-git-info": {
-      "version": "7.0.2",
-      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.2.tgz",
-      "integrity": "sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "lru-cache": "^10.0.1"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/npm-registry-fetch/node_modules/npm-package-arg": {
-      "version": "11.0.3",
-      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-11.0.3.tgz",
-      "integrity": "sha512-sHGJy8sOC1YraBywpzQlIKBE4pBbGbiF95U6Auspzyem956E0+FtDtsx1ZxlOJkQCZ1AFXAY/yuvtFYrOxF+Bw==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "hosted-git-info": "^7.0.0",
-        "proc-log": "^4.0.0",
-        "semver": "^7.3.5",
-        "validate-npm-package-name": "^5.0.0"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/npm-registry-fetch/node_modules/proc-log": {
-      "version": "4.2.0",
-      "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-4.2.0.tgz",
-      "integrity": "sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==",
-      "dev": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/npm-registry-fetch/node_modules/validate-npm-package-name": {
-      "version": "5.0.1",
-      "resolved": "https://registry.npmjs.org/validate-npm-package-name/-/validate-npm-package-name-5.0.1.tgz",
-      "integrity": "sha512-OljLrQ9SQdOUqTaQxqL5dEfZWrXExyyWsozYlAWFawPVNuD83igl7uJD2RTkNMbniIYgt8l81eCJGIdQF7avLQ==",
-      "dev": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/p-map": {
-      "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/p-map/-/p-map-4.0.0.tgz",
-      "integrity": "sha512-/bjOqmgETBYB5BoEeGVea8dmvHb2m9GLy1E9W43yeyfP6QQCZGFNa+XRceJEuDB6zqr+gKpIAmlLebMpykw/MQ==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "aggregate-error": "^3.0.0"
-      },
-      "engines": {
-        "node": ">=10"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/sindresorhus"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/pacote": {
-      "version": "18.0.6",
-      "resolved": "https://registry.npmjs.org/pacote/-/pacote-18.0.6.tgz",
-      "integrity": "sha512-+eK3G27SMwsB8kLIuj4h1FUhHtwiEUo21Tw8wNjmvdlpOEr613edv+8FUsTj/4F/VN5ywGE19X18N7CC2EJk6A==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "@npmcli/git": "^5.0.0",
-        "@npmcli/installed-package-contents": "^2.0.1",
-        "@npmcli/package-json": "^5.1.0",
-        "@npmcli/promise-spawn": "^7.0.0",
-        "@npmcli/run-script": "^8.0.0",
-        "cacache": "^18.0.0",
-        "fs-minipass": "^3.0.0",
-        "minipass": "^7.0.2",
-        "npm-package-arg": "^11.0.0",
-        "npm-packlist": "^8.0.0",
-        "npm-pick-manifest": "^9.0.0",
-        "npm-registry-fetch": "^17.0.0",
-        "proc-log": "^4.0.0",
-        "promise-retry": "^2.0.1",
-        "sigstore": "^2.2.0",
-        "ssri": "^10.0.0",
-        "tar": "^6.1.11"
-      },
-      "bin": {
-        "pacote": "bin/index.js"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/pacote/node_modules/@npmcli/git": {
-      "version": "5.0.8",
-      "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-5.0.8.tgz",
-      "integrity": "sha512-liASfw5cqhjNW9UFd+ruwwdEf/lbOAQjLL2XY2dFW/bkJheXDYZgOyul/4gVvEV4BWkTXjYGmDqMw9uegdbJNQ==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "@npmcli/promise-spawn": "^7.0.0",
-        "ini": "^4.1.3",
-        "lru-cache": "^10.0.1",
-        "npm-pick-manifest": "^9.0.0",
-        "proc-log": "^4.0.0",
-        "promise-inflight": "^1.0.1",
-        "promise-retry": "^2.0.1",
-        "semver": "^7.3.5",
-        "which": "^4.0.0"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/pacote/node_modules/@npmcli/package-json": {
-      "version": "5.2.1",
-      "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-5.2.1.tgz",
-      "integrity": "sha512-f7zYC6kQautXHvNbLEWgD/uGu1+xCn9izgqBfgItWSx22U0ZDekxN08A1vM8cTxj/cRVe0Q94Ode+tdoYmIOOQ==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "@npmcli/git": "^5.0.0",
-        "glob": "^10.2.2",
-        "hosted-git-info": "^7.0.0",
-        "json-parse-even-better-errors": "^3.0.0",
-        "normalize-package-data": "^6.0.0",
-        "proc-log": "^4.0.0",
-        "semver": "^7.5.3"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/pacote/node_modules/@npmcli/promise-spawn": {
-      "version": "7.0.2",
-      "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-7.0.2.tgz",
-      "integrity": "sha512-xhfYPXoV5Dy4UkY0D+v2KkwvnDfiA/8Mt3sWCGI/hM03NsYIH8ZaG6QzS9x7pje5vHZBZJ2v6VRFVTWACnqcmQ==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "which": "^4.0.0"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/pacote/node_modules/hosted-git-info": {
-      "version": "7.0.2",
-      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-7.0.2.tgz",
-      "integrity": "sha512-puUZAUKT5m8Zzvs72XWy3HtvVbTWljRE66cP60bxJzAqf2DgICo7lYTY2IHUmLnNpjYvw5bvmoHvPc0QO2a62w==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "lru-cache": "^10.0.1"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/pacote/node_modules/ini": {
-      "version": "4.1.3",
-      "resolved": "https://registry.npmjs.org/ini/-/ini-4.1.3.tgz",
-      "integrity": "sha512-X7rqawQBvfdjS10YU1y1YVreA3SsLrW9dX2CewP2EbBJM4ypVNLDkO5y04gejPwKIY9lR+7r9gn3rFPt/kmWFg==",
-      "dev": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/pacote/node_modules/json-parse-even-better-errors": {
-      "version": "3.0.2",
-      "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-3.0.2.tgz",
-      "integrity": "sha512-fi0NG4bPjCHunUJffmLd0gxssIgkNmArMvis4iNah6Owg1MCJjWhEcDLmsK6iGkJq3tHwbDkTlce70/tmXN4cQ==",
-      "dev": true,
-      "license": "MIT",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/pacote/node_modules/npm-package-arg": {
-      "version": "11.0.3",
-      "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-11.0.3.tgz",
-      "integrity": "sha512-sHGJy8sOC1YraBywpzQlIKBE4pBbGbiF95U6Auspzyem956E0+FtDtsx1ZxlOJkQCZ1AFXAY/yuvtFYrOxF+Bw==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "hosted-git-info": "^7.0.0",
-        "proc-log": "^4.0.0",
-        "semver": "^7.3.5",
-        "validate-npm-package-name": "^5.0.0"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/pacote/node_modules/proc-log": {
-      "version": "4.2.0",
-      "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-4.2.0.tgz",
-      "integrity": "sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==",
-      "dev": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/pacote/node_modules/validate-npm-package-name": {
-      "version": "5.0.1",
-      "resolved": "https://registry.npmjs.org/validate-npm-package-name/-/validate-npm-package-name-5.0.1.tgz",
-      "integrity": "sha512-OljLrQ9SQdOUqTaQxqL5dEfZWrXExyyWsozYlAWFawPVNuD83igl7uJD2RTkNMbniIYgt8l81eCJGIdQF7avLQ==",
-      "dev": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/pacote/node_modules/which": {
-      "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/which/-/which-4.0.0.tgz",
-      "integrity": "sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "isexe": "^3.1.1"
-      },
-      "bin": {
-        "node-which": "bin/which.js"
-      },
-      "engines": {
-        "node": "^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/parse-conflict-json": {
-      "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/parse-conflict-json/-/parse-conflict-json-3.0.1.tgz",
-      "integrity": "sha512-01TvEktc68vwbJOtWZluyWeVGWjP+bZwXtPDMQVbBKzbJ/vZBif0L69KH1+cHv1SZ6e0FKLvjyHe8mqsIqYOmw==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "json-parse-even-better-errors": "^3.0.0",
-        "just-diff": "^6.0.0",
-        "just-diff-apply": "^5.2.0"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/parse-conflict-json/node_modules/json-parse-even-better-errors": {
-      "version": "3.0.2",
-      "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-3.0.2.tgz",
-      "integrity": "sha512-fi0NG4bPjCHunUJffmLd0gxssIgkNmArMvis4iNah6Owg1MCJjWhEcDLmsK6iGkJq3tHwbDkTlce70/tmXN4cQ==",
-      "dev": true,
-      "license": "MIT",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/path-scurry": {
-      "version": "1.11.1",
-      "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz",
-      "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==",
-      "dev": true,
-      "license": "BlueOak-1.0.0",
-      "dependencies": {
-        "lru-cache": "^10.2.0",
-        "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0"
-      },
-      "engines": {
-        "node": ">=16 || 14 >=14.18"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/postcss-selector-parser": {
-      "version": "6.1.2",
-      "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.1.2.tgz",
-      "integrity": "sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "cssesc": "^3.0.0",
-        "util-deprecate": "^1.0.2"
-      },
-      "engines": {
-        "node": ">=4"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/proggy": {
-      "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/proggy/-/proggy-2.0.0.tgz",
-      "integrity": "sha512-69agxLtnI8xBs9gUGqEnK26UfiexpHy+KUpBQWabiytQjnn5wFY8rklAi7GRfABIuPNnQ/ik48+LGLkYYJcy4A==",
-      "dev": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/read-cmd-shim": {
-      "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/read-cmd-shim/-/read-cmd-shim-4.0.0.tgz",
-      "integrity": "sha512-yILWifhaSEEytfXI76kB9xEEiG1AiozaCJZ83A87ytjRiN+jVibXjedjCRNjoZviinhG+4UkalO3mWTd8u5O0Q==",
-      "dev": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/sigstore": {
-      "version": "2.3.1",
-      "resolved": "https://registry.npmjs.org/sigstore/-/sigstore-2.3.1.tgz",
-      "integrity": "sha512-8G+/XDU8wNsJOQS5ysDVO0Etg9/2uA5gR9l4ZwijjlwxBcrU6RPfwi2+jJmbP+Ap1Hlp/nVAaEO4Fj22/SL2gQ==",
-      "dev": true,
-      "license": "Apache-2.0",
-      "dependencies": {
-        "@sigstore/bundle": "^2.3.2",
-        "@sigstore/core": "^1.0.0",
-        "@sigstore/protobuf-specs": "^0.3.2",
-        "@sigstore/sign": "^2.3.2",
-        "@sigstore/tuf": "^2.3.4",
-        "@sigstore/verify": "^1.2.1"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/ssri": {
-      "version": "10.0.6",
-      "resolved": "https://registry.npmjs.org/ssri/-/ssri-10.0.6.tgz",
-      "integrity": "sha512-MGrFH9Z4NP9Iyhqn16sDtBpRRNJ0Y2hNa6D65h736fVSaPCHr4DM4sWUNvVaSuC+0OBGhwsrydQwmgfg5LncqQ==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "minipass": "^7.0.3"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/tuf-js": {
-      "version": "2.2.1",
-      "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-2.2.1.tgz",
-      "integrity": "sha512-GwIJau9XaA8nLVbUXsN3IlFi7WmQ48gBUrl3FTkkL/XLu/POhBzfmX9hd33FNMX1qAsfl6ozO1iMmW9NC8YniA==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@tufjs/models": "2.0.1",
-        "debug": "^4.3.4",
-        "make-fetch-happen": "^13.0.1"
-      },
-      "engines": {
-        "node": "^16.14.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/unique-filename": {
-      "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-3.0.0.tgz",
-      "integrity": "sha512-afXhuC55wkAmZ0P18QsVE6kp8JaxrEokN2HGIoIVv2ijHQd419H0+6EigAFcIzXeMIkcIkNBpB3L/DXB3cTS/g==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "unique-slug": "^4.0.0"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/unique-slug": {
-      "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/unique-slug/-/unique-slug-4.0.0.tgz",
-      "integrity": "sha512-WrcA6AyEfqDX5bWige/4NQfPZMtASNVxdmWR76WESYQVAACSgWcR6e9i0mofqqBxYFtL4oAxPIptY73/0YE1DQ==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "imurmurhash": "^0.1.4"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@npmcli/template-oss/node_modules/walk-up-path": {
-      "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/walk-up-path/-/walk-up-path-3.0.1.tgz",
-      "integrity": "sha512-9YlCL/ynK3CTlrSRrDxZvUauLzAswPCrsaCgilqFevUYpeEW0/3ScEjaa3kbW/T0ghhkEr7mv+fpjqn1Y1YuTA==",
-      "dev": true,
-      "license": "ISC"
-    },
-    "node_modules/@npmcli/template-oss/node_modules/write-file-atomic": {
-      "version": "5.0.1",
-      "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-5.0.1.tgz",
-      "integrity": "sha512-+QU2zd6OTD8XWIJCbffaiQeH9U73qIqafo1x6V1snCWYGJf6cVE0cDR4D8xRzcEnfI21IFrUPzPGtcPf8AC+Rw==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "imurmurhash": "^0.1.4",
-        "signal-exit": "^4.0.1"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/@octokit/auth-token": {
-      "version": "3.0.4",
-      "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-3.0.4.tgz",
-      "integrity": "sha512-TWFX7cZF2LXoCvdmJWY7XVPi74aSY0+FfBZNSXEXFkMpjcqsQwDSYVv5FhRFaI0V1ECnwbz4j59T/G+rXNWaIQ==",
-      "dev": true,
-      "license": "MIT",
-      "engines": {
-        "node": ">= 14"
-      }
-    },
-    "node_modules/@octokit/core": {
-      "version": "4.2.4",
-      "resolved": "https://registry.npmjs.org/@octokit/core/-/core-4.2.4.tgz",
-      "integrity": "sha512-rYKilwgzQ7/imScn3M9/pFfUf4I1AZEH3KhyJmtPdE2zfaXAn2mFfUy4FbKewzc2We5y/LlKLj36fWJLKC2SIQ==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@octokit/auth-token": "^3.0.0",
-        "@octokit/graphql": "^5.0.0",
-        "@octokit/request": "^6.0.0",
-        "@octokit/request-error": "^3.0.0",
-        "@octokit/types": "^9.0.0",
-        "before-after-hook": "^2.2.0",
-        "universal-user-agent": "^6.0.0"
-      },
-      "engines": {
-        "node": ">= 14"
-      }
-    },
-    "node_modules/@octokit/endpoint": {
-      "version": "7.0.6",
-      "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-7.0.6.tgz",
-      "integrity": "sha512-5L4fseVRUsDFGR00tMWD/Trdeeihn999rTMGRMC1G/Ldi1uWlWJzI98H4Iak5DB/RVvQuyMYKqSK/R6mbSOQyg==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@octokit/types": "^9.0.0",
-        "is-plain-object": "^5.0.0",
-        "universal-user-agent": "^6.0.0"
-      },
-      "engines": {
-        "node": ">= 14"
-      }
-    },
-    "node_modules/@octokit/graphql": {
-      "version": "5.0.6",
-      "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-5.0.6.tgz",
-      "integrity": "sha512-Fxyxdy/JH0MnIB5h+UQ3yCoh1FG4kWXfFKkpWqjZHw/p+Kc8Y44Hu/kCgNBT6nU1shNumEchmW/sUO1JuQnPcw==",
+    "node_modules/@octokit/request-error": {
+      "version": "7.0.0",
+      "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-7.0.0.tgz",
+      "integrity": "sha512-KRA7VTGdVyJlh0cP5Tf94hTiYVVqmt2f3I6mnimmaVz4UG3gQV/k4mDJlJv3X67iX6rmN7gSHCF8ssqeMnmhZg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@octokit/request": "^6.0.0",
-        "@octokit/types": "^9.0.0",
-        "universal-user-agent": "^6.0.0"
+        "@octokit/types": "^14.0.0"
       },
       "engines": {
-        "node": ">= 14"
+        "node": ">= 20"
       }
     },
-    "node_modules/@octokit/openapi-types": {
-      "version": "18.1.1",
-      "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-18.1.1.tgz",
-      "integrity": "sha512-VRaeH8nCDtF5aXWnjPuEMIYf1itK/s3JYyJcWFJT8X9pSNnBtriDf7wlEWsGuhPLl4QIH4xM8fqTXDwJ3Mu6sw==",
+    "node_modules/@octokit/request-error/node_modules/@octokit/openapi-types": {
+      "version": "25.1.0",
+      "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-25.1.0.tgz",
+      "integrity": "sha512-idsIggNXUKkk0+BExUn1dQ92sfysJrje03Q0bv0e+KPLrvyqZF8MnBpFz8UNfYDwB3Ie7Z0TByjWfzxt7vseaA==",
       "dev": true,
       "license": "MIT"
     },
-    "node_modules/@octokit/plugin-paginate-rest": {
-      "version": "6.1.2",
-      "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-6.1.2.tgz",
-      "integrity": "sha512-qhrmtQeHU/IivxucOV1bbI/xZyC/iOBhclokv7Sut5vnejAIAEXVcGQeRpQlU39E0WwK9lNvJHphHri/DB6lbQ==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@octokit/tsconfig": "^1.0.2",
-        "@octokit/types": "^9.2.3"
-      },
-      "engines": {
-        "node": ">= 14"
-      },
-      "peerDependencies": {
-        "@octokit/core": ">=4"
-      }
-    },
-    "node_modules/@octokit/plugin-request-log": {
-      "version": "1.0.4",
-      "resolved": "https://registry.npmjs.org/@octokit/plugin-request-log/-/plugin-request-log-1.0.4.tgz",
-      "integrity": "sha512-mLUsMkgP7K/cnFEw07kWqXGF5LKrOkD+lhCrKvPHXWDywAwuDUeDwWBpc69XK3pNX0uKiVt8g5z96PJ6z9xCFA==",
-      "dev": true,
-      "license": "MIT",
-      "peerDependencies": {
-        "@octokit/core": ">=3"
-      }
-    },
-    "node_modules/@octokit/plugin-rest-endpoint-methods": {
-      "version": "7.2.3",
-      "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-7.2.3.tgz",
-      "integrity": "sha512-I5Gml6kTAkzVlN7KCtjOM+Ruwe/rQppp0QU372K1GP7kNOYEKe8Xn5BW4sE62JAHdwpq95OQK/qGNyKQMUzVgA==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@octokit/types": "^10.0.0"
-      },
-      "engines": {
-        "node": ">= 14"
-      },
-      "peerDependencies": {
-        "@octokit/core": ">=3"
-      }
-    },
-    "node_modules/@octokit/plugin-rest-endpoint-methods/node_modules/@octokit/types": {
-      "version": "10.0.0",
-      "resolved": "https://registry.npmjs.org/@octokit/types/-/types-10.0.0.tgz",
-      "integrity": "sha512-Vm8IddVmhCgU1fxC1eyinpwqzXPEYu0NrYzD3YZjlGjyftdLBTeqNblRC0jmJmgxbJIsQlyogVeGnrNaaMVzIg==",
+    "node_modules/@octokit/request-error/node_modules/@octokit/types": {
+      "version": "14.1.0",
+      "resolved": "https://registry.npmjs.org/@octokit/types/-/types-14.1.0.tgz",
+      "integrity": "sha512-1y6DgTy8Jomcpu33N+p5w58l6xyt55Ar2I91RPiIA0xCJBXyUAhXCcmZaDWSANiha7R9a6qJJ2CRomGPZ6f46g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@octokit/openapi-types": "^18.0.0"
+        "@octokit/openapi-types": "^25.1.0"
       }
     },
-    "node_modules/@octokit/request": {
-      "version": "6.2.8",
-      "resolved": "https://registry.npmjs.org/@octokit/request/-/request-6.2.8.tgz",
-      "integrity": "sha512-ow4+pkVQ+6XVVsekSYBzJC0VTVvh/FCTUUgTsboGq+DTeWdyIFV8WSCdo0RIxk6wSkBTHqIK1mYuY7nOBXOchw==",
+    "node_modules/@octokit/request/node_modules/@octokit/openapi-types": {
+      "version": "25.1.0",
+      "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-25.1.0.tgz",
+      "integrity": "sha512-idsIggNXUKkk0+BExUn1dQ92sfysJrje03Q0bv0e+KPLrvyqZF8MnBpFz8UNfYDwB3Ie7Z0TByjWfzxt7vseaA==",
       "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@octokit/endpoint": "^7.0.0",
-        "@octokit/request-error": "^3.0.0",
-        "@octokit/types": "^9.0.0",
-        "is-plain-object": "^5.0.0",
-        "node-fetch": "^2.6.7",
-        "universal-user-agent": "^6.0.0"
-      },
-      "engines": {
-        "node": ">= 14"
-      }
+      "license": "MIT"
     },
-    "node_modules/@octokit/request-error": {
-      "version": "3.0.3",
-      "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-3.0.3.tgz",
-      "integrity": "sha512-crqw3V5Iy2uOU5Np+8M/YexTlT8zxCfI+qu+LxUB7SZpje4Qmx3mub5DfEKSO8Ylyk0aogi6TYdf6kxzh2BguQ==",
+    "node_modules/@octokit/request/node_modules/@octokit/types": {
+      "version": "14.1.0",
+      "resolved": "https://registry.npmjs.org/@octokit/types/-/types-14.1.0.tgz",
+      "integrity": "sha512-1y6DgTy8Jomcpu33N+p5w58l6xyt55Ar2I91RPiIA0xCJBXyUAhXCcmZaDWSANiha7R9a6qJJ2CRomGPZ6f46g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@octokit/types": "^9.0.0",
-        "deprecation": "^2.0.0",
-        "once": "^1.4.0"
-      },
-      "engines": {
-        "node": ">= 14"
+        "@octokit/openapi-types": "^25.1.0"
       }
     },
     "node_modules/@octokit/rest": {
-      "version": "19.0.13",
-      "resolved": "https://registry.npmjs.org/@octokit/rest/-/rest-19.0.13.tgz",
-      "integrity": "sha512-/EzVox5V9gYGdbAI+ovYj3nXQT1TtTHRT+0eZPcuC05UFSWO3mdO9UY1C0i2eLF9Un1ONJkAk+IEtYGAC+TahA==",
+      "version": "22.0.0",
+      "resolved": "https://registry.npmjs.org/@octokit/rest/-/rest-22.0.0.tgz",
+      "integrity": "sha512-z6tmTu9BTnw51jYGulxrlernpsQYXpui1RK21vmXn8yF5bp6iX16yfTtJYGK5Mh1qDkvDOmp2n8sRMcQmR8jiA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@octokit/core": "^4.2.1",
-        "@octokit/plugin-paginate-rest": "^6.1.2",
-        "@octokit/plugin-request-log": "^1.0.4",
-        "@octokit/plugin-rest-endpoint-methods": "^7.1.2"
+        "@octokit/core": "^7.0.2",
+        "@octokit/plugin-paginate-rest": "^13.0.1",
+        "@octokit/plugin-request-log": "^6.0.0",
+        "@octokit/plugin-rest-endpoint-methods": "^16.0.0"
       },
       "engines": {
-        "node": ">= 14"
-      }
-    },
-    "node_modules/@octokit/tsconfig": {
-      "version": "1.0.2",
-      "resolved": "https://registry.npmjs.org/@octokit/tsconfig/-/tsconfig-1.0.2.tgz",
-      "integrity": "sha512-I0vDR0rdtP8p2lGMzvsJzbhdOWy405HcGovrspJ8RRibHnyRgggUSNO5AIox5LmqiwmatHKYsvj6VGFHkqS7lA==",
-      "dev": true,
-      "license": "MIT"
+        "node": ">= 20"
+      }
     },
     "node_modules/@octokit/types": {
-      "version": "9.3.2",
-      "resolved": "https://registry.npmjs.org/@octokit/types/-/types-9.3.2.tgz",
-      "integrity": "sha512-D4iHGTdAnEEVsB8fl95m1hiz7D5YiRdQ9b/OEb3BYRVwbLsGHcRVPz+u+BgRLNk0Q0/4iZCBqDN96j2XNxfXrA==",
+      "version": "15.0.0",
+      "resolved": "https://registry.npmjs.org/@octokit/types/-/types-15.0.0.tgz",
+      "integrity": "sha512-8o6yDfmoGJUIeR9OfYU0/TUJTnMPG2r68+1yEdUeG2Fdqpj8Qetg0ziKIgcBm0RW/j29H41WP37CYCEhp6GoHQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@octokit/openapi-types": "^18.0.0"
+        "@octokit/openapi-types": "^26.0.0"
       }
     },
     "node_modules/@pkgjs/parseargs": {
@@ -3569,8 +2094,6 @@
     },
     "node_modules/@rtsao/scc": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/@rtsao/scc/-/scc-1.1.0.tgz",
-      "integrity": "sha512-zt6OdqaDoOnJ1ZYsCYGt9YmWzDXl4vQdKTyJev62gFhRGKdx7mcT54V9KIjg+d2wi9EXsPvAPKe7i7WjfVWB8g==",
       "dev": true,
       "license": "MIT",
       "peer": true
@@ -3679,8 +2202,6 @@
     },
     "node_modules/@tufjs/repo-mock": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/@tufjs/repo-mock/-/repo-mock-4.0.0.tgz",
-      "integrity": "sha512-Z/w5mFJC26ZbrGYduDkWzGCxui9rSXkJqWROSOhaLk8s+PcVAv/W03nOBqpcfbgMVLYVYtMYaopoGSuC1mbNsQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3693,8 +2214,6 @@
     },
     "node_modules/@types/conventional-commits-parser": {
       "version": "5.0.1",
-      "resolved": "https://registry.npmjs.org/@types/conventional-commits-parser/-/conventional-commits-parser-5.0.1.tgz",
-      "integrity": "sha512-7uz5EHdzz2TqoMfV7ee61Egf5y6NkcO4FB/1iCCQnbeiI1F3xzv3vK5dBCXUCLQgGYS+mUeigK1iKQzvED+QnQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3703,8 +2222,6 @@
     },
     "node_modules/@types/debug": {
       "version": "4.1.12",
-      "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz",
-      "integrity": "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3713,8 +2230,6 @@
     },
     "node_modules/@types/hast": {
       "version": "2.3.10",
-      "resolved": "https://registry.npmjs.org/@types/hast/-/hast-2.3.10.tgz",
-      "integrity": "sha512-McWspRw8xx8J9HurkVBfYj0xKoE25tOFlHGdx4MJ5xORQrMGZNqJhVQWaIbm6Oyla5kYOXtDiopzKRJzEOkwJw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3723,16 +2238,12 @@
     },
     "node_modules/@types/json5": {
       "version": "0.0.29",
-      "resolved": "https://registry.npmjs.org/@types/json5/-/json5-0.0.29.tgz",
-      "integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==",
       "dev": true,
       "license": "MIT",
       "peer": true
     },
     "node_modules/@types/mdast": {
       "version": "3.0.15",
-      "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-3.0.15.tgz",
-      "integrity": "sha512-LnwD+mUEfxWMa1QpDraczIn6k0Ee3SMicuYSSzS6ZYl2gKS09EClnJYGd8Du6rfc5r/GZEk5o1mRb8TaTj03sQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3748,15 +2259,11 @@
     },
     "node_modules/@types/ms": {
       "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/@types/ms/-/ms-2.1.0.tgz",
-      "integrity": "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/@types/node": {
       "version": "24.5.2",
-      "resolved": "https://registry.npmjs.org/@types/node/-/node-24.5.2.tgz",
-      "integrity": "sha512-FYxk1I7wPv3K2XBaoyH2cTnocQEu8AOZ60hPbsyukMPLv5/5qr7V1i8PLHdl6Zf87I+xZXFvPCXYjiTFq+YSDQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3779,15 +2286,11 @@
     },
     "node_modules/@types/parse5": {
       "version": "6.0.3",
-      "resolved": "https://registry.npmjs.org/@types/parse5/-/parse5-6.0.3.tgz",
-      "integrity": "sha512-SuT16Q1K51EAVPz1K29DJ/sXjhSQ0zjvsypYJ6tlwVsRV9jwW5Adq2ch8Dq8kDBCkYnELS7N7VNCSB5nC56t/g==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/@types/unist": {
       "version": "2.0.11",
-      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.11.tgz",
-      "integrity": "sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==",
       "dev": true,
       "license": "MIT"
     },
@@ -3810,8 +2313,6 @@
     },
     "node_modules/@ungap/structured-clone": {
       "version": "1.3.0",
-      "resolved": "https://registry.npmjs.org/@ungap/structured-clone/-/structured-clone-1.3.0.tgz",
-      "integrity": "sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==",
       "dev": true,
       "license": "ISC",
       "peer": true
@@ -3836,8 +2337,6 @@
     },
     "node_modules/acorn": {
       "version": "8.15.0",
-      "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz",
-      "integrity": "sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -3850,8 +2349,6 @@
     },
     "node_modules/acorn-jsx": {
       "version": "5.3.2",
-      "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz",
-      "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -3869,8 +2366,6 @@
     },
     "node_modules/aggregate-error": {
       "version": "3.1.0",
-      "resolved": "https://registry.npmjs.org/aggregate-error/-/aggregate-error-3.1.0.tgz",
-      "integrity": "sha512-4I7Td01quW/RpocfNayFdFVk1qSuoh0E7JrbRJ16nH01HhKFQ88INq9Sd+nd72zqRySlr9BmDA8xlEJ6vJMrYA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3883,8 +2378,6 @@
     },
     "node_modules/ajv": {
       "version": "8.17.1",
-      "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz",
-      "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3900,8 +2393,6 @@
     },
     "node_modules/ajv-formats": {
       "version": "2.1.1",
-      "resolved": "https://registry.npmjs.org/ajv-formats/-/ajv-formats-2.1.1.tgz",
-      "integrity": "sha512-Wx0Kx52hxE7C18hkMEggYlEifqWZtYaRgouJor+WMdPnQyEK13vgEWyVNup7SoeeoLMsr4kf5h6dOW11I15MUA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3918,8 +2409,6 @@
     },
     "node_modules/ajv-formats-draft2019": {
       "version": "1.6.1",
-      "resolved": "https://registry.npmjs.org/ajv-formats-draft2019/-/ajv-formats-draft2019-1.6.1.tgz",
-      "integrity": "sha512-JQPvavpkWDvIsBp2Z33UkYCtXCSpW4HD3tAZ+oL4iEFOk9obQZffx0yANwECt6vzr6ET+7HN5czRyqXbnq/u0Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3953,8 +2442,6 @@
     },
     "node_modules/anymatch": {
       "version": "3.1.3",
-      "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz",
-      "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3965,10 +2452,21 @@
         "node": ">= 8"
       }
     },
+    "node_modules/anymatch/node_modules/picomatch": {
+      "version": "2.3.1",
+      "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
+      "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==",
+      "dev": true,
+      "license": "MIT",
+      "engines": {
+        "node": ">=8.6"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/jonschlinkert"
+      }
+    },
     "node_modules/append-transform": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/append-transform/-/append-transform-2.0.0.tgz",
-      "integrity": "sha512-7yeyCEurROLQJFv5Xj4lEGTy0borxepjFv1g22oAdqFu//SrAlDl1O1Nxx15SH1RoliUml6p8dwJW9jvZughhg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3989,15 +2487,11 @@
     },
     "node_modules/argparse": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz",
-      "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==",
       "dev": true,
       "license": "Python-2.0"
     },
     "node_modules/args": {
       "version": "5.0.3",
-      "resolved": "https://registry.npmjs.org/args/-/args-5.0.3.tgz",
-      "integrity": "sha512-h6k/zfFgusnv3i5TU08KQkVKuCPBtL/PWQbWkHUxvJrZ2nAyeaUupneemcrgn1xmqxPQsPIzwkUhOpoqPDRZuA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -4012,8 +2506,6 @@
     },
     "node_modules/args/node_modules/ansi-styles": {
       "version": "3.2.1",
-      "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz",
-      "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -4025,8 +2517,6 @@
     },
     "node_modules/args/node_modules/camelcase": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.0.0.tgz",
-      "integrity": "sha512-faqwZqnWxbxn+F1d399ygeamQNy3lPp/H9H6rNrqYh4FSVCtcY+3cub1MxA8o9mDd55mM8Aghuu/kuyYA6VTsA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -4035,8 +2525,6 @@
     },
     "node_modules/args/node_modules/chalk": {
       "version": "2.4.2",
-      "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz",
-      "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -4050,8 +2538,6 @@
     },
     "node_modules/args/node_modules/color-convert": {
       "version": "1.9.3",
-      "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz",
-      "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -4060,15 +2546,11 @@
     },
     "node_modules/args/node_modules/color-name": {
       "version": "1.1.3",
-      "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz",
-      "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/args/node_modules/escape-string-regexp": {
       "version": "1.0.5",
-      "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz",
-      "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -4077,8 +2559,6 @@
     },
     "node_modules/args/node_modules/has-flag": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz",
-      "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -4087,8 +2567,6 @@
     },
     "node_modules/args/node_modules/mri": {
       "version": "1.1.4",
-      "resolved": "https://registry.npmjs.org/mri/-/mri-1.1.4.tgz",
-      "integrity": "sha512-6y7IjGPm8AzlvoUrwAaw1tLnUBudaS3752vcd8JtrpGGQn+rXIe63LFVHm/YMwtqAuh+LJPCFdlLYPWM1nYn6w==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -4097,8 +2575,6 @@
     },
     "node_modules/args/node_modules/supports-color": {
       "version": "5.5.0",
-      "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz",
-      "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -4110,8 +2586,6 @@
     },
     "node_modules/array-buffer-byte-length": {
       "version": "1.0.2",
-      "resolved": "https://registry.npmjs.org/array-buffer-byte-length/-/array-buffer-byte-length-1.0.2.tgz",
-      "integrity": "sha512-LHE+8BuR7RYGDKvnrmcuSq3tDcKv9OFEXQt/HpbZhY7V6h0zlUXutnAD82GiFx9rdieCMjkvtcsPqBwgUl1Iiw==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -4128,15 +2602,11 @@
     },
     "node_modules/array-ify": {
       "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/array-ify/-/array-ify-1.0.0.tgz",
-      "integrity": "sha512-c5AMf34bKdvPhQ7tBGhqkgKNUzMr4WUs+WDtC2ZUGOUncbxKMTvqxYctiseW3+L4bA8ec+GcZ6/A/FW4m8ukng==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/array-includes": {
       "version": "3.1.9",
-      "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.1.9.tgz",
-      "integrity": "sha512-FmeCCAenzH0KH381SPT5FZmiA/TmpndpcaShhfgEN9eCVjnFBqq3l1xrI42y8+PPLI6hypzou4GXw00WHmPBLQ==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -4159,8 +2629,6 @@
     },
     "node_modules/array.prototype.findlastindex": {
       "version": "1.2.6",
-      "resolved": "https://registry.npmjs.org/array.prototype.findlastindex/-/array.prototype.findlastindex-1.2.6.tgz",
-      "integrity": "sha512-F/TKATkzseUExPlfvmwQKGITM3DGTK+vkAsCZoDc5daVygbJBnjEUCbgkAvVFsgfXfX4YIqZ/27G3k3tdXrTxQ==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -4182,8 +2650,6 @@
     },
     "node_modules/array.prototype.flat": {
       "version": "1.3.3",
-      "resolved": "https://registry.npmjs.org/array.prototype.flat/-/array.prototype.flat-1.3.3.tgz",
-      "integrity": "sha512-rwG/ja1neyLqCuGZ5YYrznA62D4mZXg0i1cIskIUKSiqF3Cje9/wXAls9B9s1Wa2fomMsIv8czB8jZcPmxCXFg==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -4202,8 +2668,6 @@
     },
     "node_modules/array.prototype.flatmap": {
       "version": "1.3.3",
-      "resolved": "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.3.3.tgz",
-      "integrity": "sha512-Y7Wt51eKJSyi80hFrJCePGGNo5ktJCslFuboqJsbf57CCPcm5zztluPlc4/aD8sWsKvlwatezpV4U1efk8kpjg==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -4222,8 +2686,6 @@
     },
     "node_modules/arraybuffer.prototype.slice": {
       "version": "1.0.4",
-      "resolved": "https://registry.npmjs.org/arraybuffer.prototype.slice/-/arraybuffer.prototype.slice-1.0.4.tgz",
-      "integrity": "sha512-BNoCY6SXXPQ7gF2opIP4GBE+Xw7U+pHMYKuzjgCN3GwiaIR09UUeKfheyIry77QtrCBlC0KK0q5/TER/tYh3PQ==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -4255,8 +2717,6 @@
     },
     "node_modules/async-function": {
       "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/async-function/-/async-function-1.0.0.tgz",
-      "integrity": "sha512-hsU18Ae8CDTR6Kgu9DYf0EbCr/a5iGL0rytQDobUcdpYOKokk8LEjVphnXkDkgpi0wYVsqrXuP0bZxJaTqdgoA==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -4266,8 +2726,6 @@
     },
     "node_modules/async-hook-domain": {
       "version": "2.0.4",
-      "resolved": "https://registry.npmjs.org/async-hook-domain/-/async-hook-domain-2.0.4.tgz",
-      "integrity": "sha512-14LjCmlK1PK8eDtTezR6WX8TMaYNIzBIsd2D1sGoGjgx0BuNMMoSdk7i/drlbtamy0AWv9yv2tkB+ASdmeqFIw==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -4296,15 +2754,11 @@
     },
     "node_modules/asynckit": {
       "version": "0.4.0",
-      "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
-      "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/available-typed-arrays": {
       "version": "1.0.7",
-      "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz",
-      "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -4320,8 +2774,6 @@
     },
     "node_modules/b4a": {
       "version": "1.7.1",
-      "resolved": "https://registry.npmjs.org/b4a/-/b4a-1.7.1.tgz",
-      "integrity": "sha512-ZovbrBV0g6JxK5cGUF1Suby1vLfKjv4RWi8IxoaO/Mon8BDD9I21RxjHFtgQ+kskJqLAVyQZly3uMBui+vhc8Q==",
       "dev": true,
       "license": "Apache-2.0",
       "peerDependencies": {
@@ -4335,8 +2787,6 @@
     },
     "node_modules/bail": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/bail/-/bail-2.0.2.tgz",
-      "integrity": "sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -4351,16 +2801,12 @@
     },
     "node_modules/bare-events": {
       "version": "2.7.0",
-      "resolved": "https://registry.npmjs.org/bare-events/-/bare-events-2.7.0.tgz",
-      "integrity": "sha512-b3N5eTW1g7vXkw+0CXh/HazGTcO5KYuu/RCNaJbDMPI6LHDi+7qe8EmxKUVe1sUbY2KZOVZFyj62x0OEz9qyAA==",
       "dev": true,
       "license": "Apache-2.0",
       "optional": true
     },
     "node_modules/baseline-browser-mapping": {
       "version": "2.8.6",
-      "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.8.6.tgz",
-      "integrity": "sha512-wrH5NNqren/QMtKUEEJf7z86YjfqW/2uw3IL3/xpqZUC95SSVIFXYQeeGjL6FT/X68IROu6RMehZQS5foy2BXw==",
       "dev": true,
       "license": "Apache-2.0",
       "bin": {
@@ -4369,21 +2815,17 @@
     },
     "node_modules/basic-auth-parser": {
       "version": "0.0.2-1",
-      "resolved": "https://registry.npmjs.org/basic-auth-parser/-/basic-auth-parser-0.0.2-1.tgz",
-      "integrity": "sha512-GFj8iVxo9onSU6BnnQvVwqvxh60UcSHJEDnIk3z4B6iOjsKSmqe+ibW0Rsz7YO7IE1HG3D3tqCNIidP46SZVdQ==",
       "dev": true
     },
     "node_modules/before-after-hook": {
-      "version": "2.2.3",
-      "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz",
-      "integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==",
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-4.0.0.tgz",
+      "integrity": "sha512-q6tR3RPqIB1pMiTRMFcZwuG5T8vwp+vUvEG0vuI6B+Rikh5BfPp2fQ82c925FOs+b0lcFQ8CFrL+KbilfZFhOQ==",
       "dev": true,
       "license": "Apache-2.0"
     },
     "node_modules/benchmark": {
       "version": "2.1.4",
-      "resolved": "https://registry.npmjs.org/benchmark/-/benchmark-2.1.4.tgz",
-      "integrity": "sha512-l9MlfN4M1K/H2fbhfMy3B7vJd6AGKJVQn2h6Sg/Yx+KckoUA7ewS5Vv6TjSq18ooE1kS9hhAlQRH3AkXIh/aOQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -4407,8 +2849,6 @@
     },
     "node_modules/bind-obj-methods": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/bind-obj-methods/-/bind-obj-methods-3.0.0.tgz",
-      "integrity": "sha512-nLEaaz3/sEzNSyPWRsN9HNsqwk1AUyECtGj+XwGdIi3xABnEqecvXtIJ0wehQXuuER5uZ/5fTs2usONgYjG+iw==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -4432,8 +2872,6 @@
     },
     "node_modules/braces": {
       "version": "3.0.3",
-      "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
-      "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -4445,8 +2883,6 @@
     },
     "node_modules/browserslist": {
       "version": "4.26.2",
-      "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.26.2.tgz",
-      "integrity": "sha512-ECFzp6uFOSB+dcZ5BK/IBaGWssbSYBHvuMeMt3MMFyhI0Z8SqGgEkBLARgpRH3hutIgPVsALcMwbDrJqPxQ65A==",
       "dev": true,
       "funding": [
         {
@@ -4479,8 +2915,6 @@
     },
     "node_modules/buffer-from": {
       "version": "1.1.2",
-      "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz",
-      "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==",
       "dev": true,
       "license": "MIT"
     },
@@ -4507,8 +2941,6 @@
     },
     "node_modules/caching-transform": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/caching-transform/-/caching-transform-4.0.0.tgz",
-      "integrity": "sha512-kpqOvwXnjjN44D89K5ccQC+RUrsy7jB/XLlRrx0D7/2HNcTPqzsb6XgYoErwko6QsV184CA2YgS1fxDiiDZMWA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -4523,15 +2955,11 @@
     },
     "node_modules/caching-transform/node_modules/signal-exit": {
       "version": "3.0.7",
-      "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz",
-      "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/caching-transform/node_modules/write-file-atomic": {
       "version": "3.0.3",
-      "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-3.0.3.tgz",
-      "integrity": "sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4543,8 +2971,6 @@
     },
     "node_modules/call-bind": {
       "version": "1.0.8",
-      "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.8.tgz",
-      "integrity": "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -4563,8 +2989,6 @@
     },
     "node_modules/call-bind-apply-helpers": {
       "version": "1.0.2",
-      "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz",
-      "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -4577,8 +3001,6 @@
     },
     "node_modules/call-bound": {
       "version": "1.0.4",
-      "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz",
-      "integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -4595,15 +3017,11 @@
     },
     "node_modules/caller": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/caller/-/caller-1.1.0.tgz",
-      "integrity": "sha512-n+21IZC3j06YpCWaxmUy5AnVqhmCIM2bQtqQyy00HJlmStRt6kwDX5F9Z97pqwAB+G/tgSz6q/kUBbNyQzIubw==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/callsites": {
       "version": "3.1.0",
-      "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz",
-      "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -4612,8 +3030,6 @@
     },
     "node_modules/camelcase": {
       "version": "5.3.1",
-      "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz",
-      "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -4640,8 +3056,6 @@
     },
     "node_modules/caniuse-lite": {
       "version": "1.0.30001743",
-      "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001743.tgz",
-      "integrity": "sha512-e6Ojr7RV14Un7dz6ASD0aZDmQPT/A+eZU+nuTNfjqmRrmkmQlnTNWH0SKmqagx9PeW87UVqapSurtAXifmtdmw==",
       "dev": true,
       "funding": [
         {
@@ -4661,8 +3075,6 @@
     },
     "node_modules/ccount": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/ccount/-/ccount-2.0.1.tgz",
-      "integrity": "sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -4683,8 +3095,6 @@
     },
     "node_modules/character-entities": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/character-entities/-/character-entities-2.0.2.tgz",
-      "integrity": "sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -4694,8 +3104,6 @@
     },
     "node_modules/character-entities-html4": {
       "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/character-entities-html4/-/character-entities-html4-2.1.0.tgz",
-      "integrity": "sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -4705,8 +3113,6 @@
     },
     "node_modules/character-entities-legacy": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-3.0.0.tgz",
-      "integrity": "sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -4716,8 +3122,6 @@
     },
     "node_modules/chokidar": {
       "version": "3.6.0",
-      "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz",
-      "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -4741,8 +3145,6 @@
     },
     "node_modules/chokidar/node_modules/glob-parent": {
       "version": "5.1.2",
-      "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz",
-      "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4787,8 +3189,6 @@
     },
     "node_modules/clean-stack": {
       "version": "2.2.0",
-      "resolved": "https://registry.npmjs.org/clean-stack/-/clean-stack-2.2.0.tgz",
-      "integrity": "sha512-4diC9HaTE+KRAMWhDhrGOECgWZxoevMc5TlkObMqNSsVU62PYzXZ/SMTjzyGAFF1YusgxGcSWTEXBhp0CPwQ1A==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -4809,8 +3209,6 @@
     },
     "node_modules/cli-table3": {
       "version": "0.6.5",
-      "resolved": "https://registry.npmjs.org/cli-table3/-/cli-table3-0.6.5.tgz",
-      "integrity": "sha512-+W/5efTR7y5HRD7gACw9yQjqMVvEMLBHmboM/kPWam+H+Hmyrgjh6YncVKK122YZkXrLudzTuAukUw9FnMf7IQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -4825,8 +3223,6 @@
     },
     "node_modules/cliui": {
       "version": "8.0.1",
-      "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz",
-      "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -4840,8 +3236,6 @@
     },
     "node_modules/cliui/node_modules/ansi-styles": {
       "version": "4.3.0",
-      "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
-      "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -4856,8 +3250,6 @@
     },
     "node_modules/cliui/node_modules/wrap-ansi": {
       "version": "7.0.0",
-      "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
-      "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -4880,13 +3272,13 @@
       }
     },
     "node_modules/code-suggester": {
-      "version": "4.3.4",
-      "resolved": "https://registry.npmjs.org/code-suggester/-/code-suggester-4.3.4.tgz",
-      "integrity": "sha512-qOj12mccFX2NALK01WnrwJKCmIwp1TMuskueh2EVaR4bc3xw072yfX9Ojq7yFQL4AmXfTXHKNjSO8lvh0y5MuA==",
+      "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/code-suggester/-/code-suggester-5.0.0.tgz",
+      "integrity": "sha512-/xyGfSM/hMYxl12kqoYoOwUm0D1uuVT2nWcMiTq2Fn5MLi+BlWkHq5AUvtniDJwVSdI3jgbK4AOzGws+v/dFPQ==",
       "dev": true,
       "license": "Apache-2.0",
       "dependencies": {
-        "@octokit/rest": "^19.0.5",
+        "@octokit/rest": "^20.1.1",
         "@types/yargs": "^16.0.0",
         "async-retry": "^1.3.1",
         "diff": "^5.0.0",
@@ -4898,7 +3290,174 @@
         "code-suggester": "build/src/bin/code-suggester.js"
       },
       "engines": {
-        "node": ">=14.0.0"
+        "node": ">=18.0.0"
+      }
+    },
+    "node_modules/code-suggester/node_modules/@octokit/auth-token": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-4.0.0.tgz",
+      "integrity": "sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA==",
+      "dev": true,
+      "license": "MIT",
+      "engines": {
+        "node": ">= 18"
+      }
+    },
+    "node_modules/code-suggester/node_modules/@octokit/core": {
+      "version": "5.2.2",
+      "resolved": "https://registry.npmjs.org/@octokit/core/-/core-5.2.2.tgz",
+      "integrity": "sha512-/g2d4sW9nUDJOMz3mabVQvOGhVa4e/BN/Um7yca9Bb2XTzPPnfTWHWQg+IsEYO7M3Vx+EXvaM/I2pJWIMun1bg==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@octokit/auth-token": "^4.0.0",
+        "@octokit/graphql": "^7.1.0",
+        "@octokit/request": "^8.4.1",
+        "@octokit/request-error": "^5.1.1",
+        "@octokit/types": "^13.0.0",
+        "before-after-hook": "^2.2.0",
+        "universal-user-agent": "^6.0.0"
+      },
+      "engines": {
+        "node": ">= 18"
+      }
+    },
+    "node_modules/code-suggester/node_modules/@octokit/endpoint": {
+      "version": "9.0.6",
+      "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-9.0.6.tgz",
+      "integrity": "sha512-H1fNTMA57HbkFESSt3Y9+FBICv+0jFceJFPWDePYlR/iMGrwM5ph+Dd4XRQs+8X+PUFURLQgX9ChPfhJ/1uNQw==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@octokit/types": "^13.1.0",
+        "universal-user-agent": "^6.0.0"
+      },
+      "engines": {
+        "node": ">= 18"
+      }
+    },
+    "node_modules/code-suggester/node_modules/@octokit/graphql": {
+      "version": "7.1.1",
+      "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-7.1.1.tgz",
+      "integrity": "sha512-3mkDltSfcDUoa176nlGoA32RGjeWjl3K7F/BwHwRMJUW/IteSa4bnSV8p2ThNkcIcZU2umkZWxwETSSCJf2Q7g==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@octokit/request": "^8.4.1",
+        "@octokit/types": "^13.0.0",
+        "universal-user-agent": "^6.0.0"
+      },
+      "engines": {
+        "node": ">= 18"
+      }
+    },
+    "node_modules/code-suggester/node_modules/@octokit/openapi-types": {
+      "version": "24.2.0",
+      "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz",
+      "integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==",
+      "dev": true,
+      "license": "MIT"
+    },
+    "node_modules/code-suggester/node_modules/@octokit/plugin-paginate-rest": {
+      "version": "11.4.4-cjs.2",
+      "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-11.4.4-cjs.2.tgz",
+      "integrity": "sha512-2dK6z8fhs8lla5PaOTgqfCGBxgAv/le+EhPs27KklPhm1bKObpu6lXzwfUEQ16ajXzqNrKMujsFyo9K2eaoISw==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@octokit/types": "^13.7.0"
+      },
+      "engines": {
+        "node": ">= 18"
+      },
+      "peerDependencies": {
+        "@octokit/core": "5"
+      }
+    },
+    "node_modules/code-suggester/node_modules/@octokit/plugin-request-log": {
+      "version": "4.0.1",
+      "resolved": "https://registry.npmjs.org/@octokit/plugin-request-log/-/plugin-request-log-4.0.1.tgz",
+      "integrity": "sha512-GihNqNpGHorUrO7Qa9JbAl0dbLnqJVrV8OXe2Zm5/Y4wFkZQDfTreBzVmiRfJVfE4mClXdihHnbpyyO9FSX4HA==",
+      "dev": true,
+      "license": "MIT",
+      "engines": {
+        "node": ">= 18"
+      },
+      "peerDependencies": {
+        "@octokit/core": "5"
+      }
+    },
+    "node_modules/code-suggester/node_modules/@octokit/plugin-rest-endpoint-methods": {
+      "version": "13.3.2-cjs.1",
+      "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-13.3.2-cjs.1.tgz",
+      "integrity": "sha512-VUjIjOOvF2oELQmiFpWA1aOPdawpyaCUqcEBc/UOUnj3Xp6DJGrJ1+bjUIIDzdHjnFNO6q57ODMfdEZnoBkCwQ==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@octokit/types": "^13.8.0"
+      },
+      "engines": {
+        "node": ">= 18"
+      },
+      "peerDependencies": {
+        "@octokit/core": "^5"
+      }
+    },
+    "node_modules/code-suggester/node_modules/@octokit/request": {
+      "version": "8.4.1",
+      "resolved": "https://registry.npmjs.org/@octokit/request/-/request-8.4.1.tgz",
+      "integrity": "sha512-qnB2+SY3hkCmBxZsR/MPCybNmbJe4KAlfWErXq+rBKkQJlbjdJeS85VI9r8UqeLYLvnAenU8Q1okM/0MBsAGXw==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@octokit/endpoint": "^9.0.6",
+        "@octokit/request-error": "^5.1.1",
+        "@octokit/types": "^13.1.0",
+        "universal-user-agent": "^6.0.0"
+      },
+      "engines": {
+        "node": ">= 18"
+      }
+    },
+    "node_modules/code-suggester/node_modules/@octokit/request-error": {
+      "version": "5.1.1",
+      "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.1.tgz",
+      "integrity": "sha512-v9iyEQJH6ZntoENr9/yXxjuezh4My67CBSu9r6Ve/05Iu5gNgnisNWOsoJHTP6k0Rr0+HQIpnH+kyammu90q/g==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@octokit/types": "^13.1.0",
+        "deprecation": "^2.0.0",
+        "once": "^1.4.0"
+      },
+      "engines": {
+        "node": ">= 18"
+      }
+    },
+    "node_modules/code-suggester/node_modules/@octokit/rest": {
+      "version": "20.1.2",
+      "resolved": "https://registry.npmjs.org/@octokit/rest/-/rest-20.1.2.tgz",
+      "integrity": "sha512-GmYiltypkHHtihFwPRxlaorG5R9VAHuk/vbszVoRTGXnAsY60wYLkh/E2XiFmdZmqrisw+9FaazS1i5SbdWYgA==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@octokit/core": "^5.0.2",
+        "@octokit/plugin-paginate-rest": "11.4.4-cjs.2",
+        "@octokit/plugin-request-log": "^4.0.0",
+        "@octokit/plugin-rest-endpoint-methods": "13.3.2-cjs.1"
+      },
+      "engines": {
+        "node": ">= 18"
+      }
+    },
+    "node_modules/code-suggester/node_modules/@octokit/types": {
+      "version": "13.10.0",
+      "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz",
+      "integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@octokit/openapi-types": "^24.2.0"
       }
     },
     "node_modules/code-suggester/node_modules/ansi-styles": {
@@ -4917,6 +3476,13 @@
         "url": "https://github.com/chalk/ansi-styles?sponsor=1"
       }
     },
+    "node_modules/code-suggester/node_modules/before-after-hook": {
+      "version": "2.2.3",
+      "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz",
+      "integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==",
+      "dev": true,
+      "license": "Apache-2.0"
+    },
     "node_modules/code-suggester/node_modules/brace-expansion": {
       "version": "1.1.12",
       "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
@@ -4985,6 +3551,13 @@
         "node": "*"
       }
     },
+    "node_modules/code-suggester/node_modules/universal-user-agent": {
+      "version": "6.0.1",
+      "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz",
+      "integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==",
+      "dev": true,
+      "license": "ISC"
+    },
     "node_modules/code-suggester/node_modules/wrap-ansi": {
       "version": "7.0.0",
       "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
@@ -5050,8 +3623,6 @@
     },
     "node_modules/color-support": {
       "version": "1.1.3",
-      "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz",
-      "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==",
       "dev": true,
       "license": "ISC",
       "bin": {
@@ -5060,8 +3631,6 @@
     },
     "node_modules/combined-stream": {
       "version": "1.0.8",
-      "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
-      "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5073,8 +3642,6 @@
     },
     "node_modules/comma-separated-tokens": {
       "version": "2.0.3",
-      "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-2.0.3.tgz",
-      "integrity": "sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -5084,8 +3651,6 @@
     },
     "node_modules/commander": {
       "version": "2.20.3",
-      "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz",
-      "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==",
       "dev": true,
       "license": "MIT"
     },
@@ -5095,15 +3660,11 @@
     },
     "node_modules/commondir": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/commondir/-/commondir-1.0.1.tgz",
-      "integrity": "sha512-W9pAhw0ja1Edb5GVdIF1mjZw/ASI0AlShXM83UUGe2DVr5TdAPEA1OA8m/g8zWp9x6On7gqufY+FatDbC3MDQg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/compare-func": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/compare-func/-/compare-func-2.0.0.tgz",
-      "integrity": "sha512-zHig5N+tPWARooBnb0Zx1MFcdfpyJrfTJ3Y5L+IFvUm8rM74hHz66z0gw0x4tijh5CorKkKUCnW82R2vmpeCRA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5113,15 +3674,11 @@
     },
     "node_modules/concat-map": {
       "version": "0.0.1",
-      "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
-      "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/conventional-changelog-angular": {
       "version": "7.0.0",
-      "resolved": "https://registry.npmjs.org/conventional-changelog-angular/-/conventional-changelog-angular-7.0.0.tgz",
-      "integrity": "sha512-ROjNchA9LgfNMTTFSIWPzebCwOGFdgkEq45EnvvrmSLvCtAw0HSmrCs7/ty+wAeYUZyNay0YMUNYFTRL72PkBQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -5133,8 +3690,6 @@
     },
     "node_modules/conventional-changelog-conventionalcommits": {
       "version": "7.0.2",
-      "resolved": "https://registry.npmjs.org/conventional-changelog-conventionalcommits/-/conventional-changelog-conventionalcommits-7.0.2.tgz",
-      "integrity": "sha512-NKXYmMR/Hr1DevQegFB4MwfM5Vv0m4UIxKZTTYuD98lpTknaZlSRrDOG4X7wIXpGkfsYxZTghUN+Qq+T0YQI7w==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -5273,8 +3828,6 @@
     },
     "node_modules/conventional-commits-parser": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/conventional-commits-parser/-/conventional-commits-parser-5.0.0.tgz",
-      "integrity": "sha512-ZPMl0ZJbw74iS9LuX9YIAiW8pfM5p3yh2o/NbXHbkFuZzY5jvdi5jFycEOkmBW5H5I7nA+D6f3UcsCLP2vvSEA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5292,15 +3845,11 @@
     },
     "node_modules/convert-source-map": {
       "version": "1.9.0",
-      "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.9.0.tgz",
-      "integrity": "sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/cosmiconfig": {
       "version": "9.0.0",
-      "resolved": "https://registry.npmjs.org/cosmiconfig/-/cosmiconfig-9.0.0.tgz",
-      "integrity": "sha512-itvL5h8RETACmOTFc4UfIyB2RfEHi71Ax6E/PivVxq9NseKbOWpeyHEOIbmAw1rs8Ak0VursQNww7lf7YtUwzg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5326,8 +3875,6 @@
     },
     "node_modules/cosmiconfig-typescript-loader": {
       "version": "6.1.0",
-      "resolved": "https://registry.npmjs.org/cosmiconfig-typescript-loader/-/cosmiconfig-typescript-loader-6.1.0.tgz",
-      "integrity": "sha512-tJ1w35ZRUiM5FeTzT7DtYWAFFv37ZLqSRkGi2oeCK1gPhvaWjkAtfXvLmvE1pRfxxp9aQo6ba/Pvg1dKj05D4g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5357,8 +3904,6 @@
     },
     "node_modules/cross-spawn/node_modules/isexe": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
-      "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==",
       "inBundle": true,
       "license": "ISC"
     },
@@ -5418,8 +3963,6 @@
     },
     "node_modules/cssstyle": {
       "version": "4.6.0",
-      "resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-4.6.0.tgz",
-      "integrity": "sha512-2z+rWdzbbSZv6/rhtvzvqeZQHrBaqgogqt85sqFNbabZOuFbCVFb8kPeEtZjiKkbrm395irpNKiYeFeLiQnFPg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5432,15 +3975,11 @@
     },
     "node_modules/cssstyle/node_modules/rrweb-cssom": {
       "version": "0.8.0",
-      "resolved": "https://registry.npmjs.org/rrweb-cssom/-/rrweb-cssom-0.8.0.tgz",
-      "integrity": "sha512-guoltQEx+9aMf2gDZ0s62EcV8lsXR+0w8915TC3ITdn2YueuNjdAYh/levpU9nFaoChh9RUS5ZdQMrKfVEN9tw==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/dargs": {
       "version": "8.1.0",
-      "resolved": "https://registry.npmjs.org/dargs/-/dargs-8.1.0.tgz",
-      "integrity": "sha512-wAV9QHOsNbwnWdNW2FYvE1P56wtgSbM+3SZcdGiWQILwVjACCXDCI3Ai8QlCjMDB8YK5zySiXZYBiwGmNY3lnw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -5452,8 +3991,6 @@
     },
     "node_modules/data-urls": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/data-urls/-/data-urls-5.0.0.tgz",
-      "integrity": "sha512-ZYP5VBHshaDAiVZxjbRVcFJpc+4xGgT0bK3vzy1HLN8jTO975HEbuYzZJcHoQEY5K1a0z8YayJkyVETa08eNTg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5466,8 +4003,6 @@
     },
     "node_modules/data-urls/node_modules/tr46": {
       "version": "5.1.1",
-      "resolved": "https://registry.npmjs.org/tr46/-/tr46-5.1.1.tgz",
-      "integrity": "sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5479,8 +4014,6 @@
     },
     "node_modules/data-urls/node_modules/webidl-conversions": {
       "version": "7.0.0",
-      "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz",
-      "integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==",
       "dev": true,
       "license": "BSD-2-Clause",
       "engines": {
@@ -5489,8 +4022,6 @@
     },
     "node_modules/data-urls/node_modules/whatwg-url": {
       "version": "14.2.0",
-      "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-14.2.0.tgz",
-      "integrity": "sha512-De72GdQZzNTUBBChsXueQUnPKDkg/5A5zp7pFDuQAj5UFoENpiACU0wlCvzpAGnTkj++ihpKwKyYewn/XNUbKw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5503,8 +4034,6 @@
     },
     "node_modules/data-view-buffer": {
       "version": "1.0.2",
-      "resolved": "https://registry.npmjs.org/data-view-buffer/-/data-view-buffer-1.0.2.tgz",
-      "integrity": "sha512-EmKO5V3OLXh1rtK2wgXRansaK1/mtVdTUEiEI0W8RkvgT05kfxaH29PliLnpLP73yYO6142Q72QNa8Wx/A5CqQ==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -5522,8 +4051,6 @@
     },
     "node_modules/data-view-byte-length": {
       "version": "1.0.2",
-      "resolved": "https://registry.npmjs.org/data-view-byte-length/-/data-view-byte-length-1.0.2.tgz",
-      "integrity": "sha512-tuhGbE6CfTM9+5ANGf+oQb72Ky/0+s3xKUpHvShfiz2RxMFgFPjsXuRLBVMtvMs15awe45SRb83D6wH4ew6wlQ==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -5541,8 +4068,6 @@
     },
     "node_modules/data-view-byte-offset": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/data-view-byte-offset/-/data-view-byte-offset-1.0.1.tgz",
-      "integrity": "sha512-BS8PfmtDGnrgYdOonGZQdLZslWIeCGFP9tpan0hi1Co2Zr2NKADsvGYA8XxuG/4UWgJ6Cjtv+YJnB6MM69QGlQ==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -5586,8 +4111,6 @@
     },
     "node_modules/decamelize": {
       "version": "1.2.0",
-      "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz",
-      "integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -5623,15 +4146,11 @@
     },
     "node_modules/decimal.js": {
       "version": "10.6.0",
-      "resolved": "https://registry.npmjs.org/decimal.js/-/decimal.js-10.6.0.tgz",
-      "integrity": "sha512-YpgQiITW3JXGntzdUmyUR1V812Hn8T1YVXhCu+wO3OpS4eU9l4YdD3qjyiKdV6mvV29zapkMeD390UVEf2lkUg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/decode-named-character-reference": {
       "version": "1.2.0",
-      "resolved": "https://registry.npmjs.org/decode-named-character-reference/-/decode-named-character-reference-1.2.0.tgz",
-      "integrity": "sha512-c6fcElNV6ShtZXmsgNgFFV5tVX2PaV4g+MOAkb8eXHvn6sryJBrZa9r0zV6+dtTyoCKxtDy5tyQ5ZwQuidtd+Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5644,8 +4163,6 @@
     },
     "node_modules/dedent": {
       "version": "1.7.0",
-      "resolved": "https://registry.npmjs.org/dedent/-/dedent-1.7.0.tgz",
-      "integrity": "sha512-HGFtf8yhuhGhqO07SV79tRp+br4MnbdjeVxotpn1QBl30pcLLCQjX5b2295ll0fv8RKDKsmWYrl05usHM9CewQ==",
       "dev": true,
       "license": "MIT",
       "peerDependencies": {
@@ -5659,16 +4176,12 @@
     },
     "node_modules/deep-is": {
       "version": "0.1.4",
-      "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz",
-      "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==",
       "dev": true,
       "license": "MIT",
       "peer": true
     },
     "node_modules/default-require-extensions": {
       "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/default-require-extensions/-/default-require-extensions-3.0.1.tgz",
-      "integrity": "sha512-eXTJmRbm2TIt9MgWTsOH1wEuhew6XGZcMeGKCtLedIg/NCsg1iBePXkceTdK4Fii7pzmN9tGsZhKzZ4h7O/fxw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5683,8 +4196,6 @@
     },
     "node_modules/define-data-property": {
       "version": "1.1.4",
-      "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz",
-      "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -5702,8 +4213,6 @@
     },
     "node_modules/define-properties": {
       "version": "1.2.1",
-      "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.2.1.tgz",
-      "integrity": "sha512-8QmQKqEASLd5nx0U1B1okLElbUuuttJ/AnYmRXbbbGDWh6uS208EjD4Xqq/I9wK7u0v6O08XhTWnt5XtEbR6Dg==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -5721,8 +4230,6 @@
     },
     "node_modules/delayed-stream": {
       "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
-      "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -5738,8 +4245,6 @@
     },
     "node_modules/dequal": {
       "version": "2.0.3",
-      "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz",
-      "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -5765,15 +4270,11 @@
     },
     "node_modules/discontinuous-range": {
       "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/discontinuous-range/-/discontinuous-range-1.0.0.tgz",
-      "integrity": "sha512-c68LpLbO+7kP/b1Hr1qs8/BJ09F5khZGTxqxZuhzxpmwJKOgRFHJWIb9/KmqnqHhLdO55aOxFH/EGBvUQbL/RQ==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/doctrine": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz",
-      "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==",
       "dev": true,
       "license": "Apache-2.0",
       "peer": true,
@@ -5845,8 +4346,6 @@
     },
     "node_modules/dot-prop": {
       "version": "5.3.0",
-      "resolved": "https://registry.npmjs.org/dot-prop/-/dot-prop-5.3.0.tgz",
-      "integrity": "sha512-QM8q3zDe58hqUqjraQOmzZ1LIH9SWQJTlEKCH4kJ2oQvLZk7RbQXvtDM2XEq3fwkV9CCvvH4LA0AV+ogFsBM2Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5858,8 +4357,6 @@
     },
     "node_modules/dunder-proto": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz",
-      "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5878,8 +4375,6 @@
     },
     "node_modules/electron-to-chromium": {
       "version": "1.5.222",
-      "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.222.tgz",
-      "integrity": "sha512-gA7psSwSwQRE60CEoLz6JBCQPIxNeuzB2nL8vE03GK/OHxlvykbLyeiumQy1iH5C2f3YbRAZpGCMT12a/9ih9w==",
       "dev": true,
       "license": "ISC"
     },
@@ -5925,8 +4420,6 @@
     },
     "node_modules/error-ex": {
       "version": "1.3.4",
-      "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.4.tgz",
-      "integrity": "sha512-sqQamAnR14VgCr1A618A3sGrygcpK+HEbenA/HiEAkkUwcZIIB/tgWqHFxWgOyDh4nB4JCRimh79dR5Ywc9MDQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5935,8 +4428,6 @@
     },
     "node_modules/es-abstract": {
       "version": "1.24.0",
-      "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.24.0.tgz",
-      "integrity": "sha512-WSzPgsdLtTcQwm4CROfS5ju2Wa1QQcVeT37jFjYzdFz1r9ahadC8B8/a4qxJxM+09F18iumCdRmlr96ZYkQvEg==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -6005,8 +4496,6 @@
     },
     "node_modules/es-define-property": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz",
-      "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -6015,8 +4504,6 @@
     },
     "node_modules/es-errors": {
       "version": "1.3.0",
-      "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz",
-      "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -6025,8 +4512,6 @@
     },
     "node_modules/es-object-atoms": {
       "version": "1.1.1",
-      "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz",
-      "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6038,8 +4523,6 @@
     },
     "node_modules/es-set-tostringtag": {
       "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz",
-      "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6054,8 +4537,6 @@
     },
     "node_modules/es-shim-unscopables": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/es-shim-unscopables/-/es-shim-unscopables-1.1.0.tgz",
-      "integrity": "sha512-d9T8ucsEhh8Bi1woXCf+TIKDIROLG5WCkxg8geBCbvk22kzwC5G2OnXVMO6FUsvQlgUUXQ2itephWDLqDzbeCw==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -6068,8 +4549,6 @@
     },
     "node_modules/es-to-primitive": {
       "version": "1.3.0",
-      "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.3.0.tgz",
-      "integrity": "sha512-w+5mJ3GuFL+NjVtJlvydShqE1eN3h3PbI7/5LAsYJP/2qtuMXjfL2LpHSRqo4b4eSF5K/DH1JXKUAHSB2UW50g==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -6087,15 +4566,11 @@
     },
     "node_modules/es6-error": {
       "version": "4.1.1",
-      "resolved": "https://registry.npmjs.org/es6-error/-/es6-error-4.1.1.tgz",
-      "integrity": "sha512-Um/+FxMr9CISWh0bi5Zv0iOD+4cFh5qLeks1qhAopKVAJw3drgKbKySikp7wGhDL0HPeaja0P5ULZrxLkniUVg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/escalade": {
       "version": "3.2.0",
-      "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz",
-      "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -6104,8 +4579,6 @@
     },
     "node_modules/escape-string-regexp": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz",
-      "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -6118,9 +4591,6 @@
     },
     "node_modules/eslint": {
       "version": "8.57.1",
-      "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.1.tgz",
-      "integrity": "sha512-ypowyDxpVSYpkXr9WPv2PAZCtNip1Mv5KTW0SCurXv/9iOpcrH9PaqUElksqEB6pChqHGDRCFTyrZlGhnLNGiA==",
-      "deprecated": "This version is no longer supported. Please see https://eslint.org/version-support for other options.",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -6176,8 +4646,6 @@
     },
     "node_modules/eslint-import-resolver-node": {
       "version": "0.3.9",
-      "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.9.tgz",
-      "integrity": "sha512-WFj2isz22JahUv+B788TlO3N6zL3nNJGU8CcZbPZvVEkBPaJdCV4vy5wyghty5ROFbCRnm132v8BScu5/1BQ8g==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -6189,8 +4657,6 @@
     },
     "node_modules/eslint-import-resolver-node/node_modules/debug": {
       "version": "3.2.7",
-      "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz",
-      "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -6200,8 +4666,6 @@
     },
     "node_modules/eslint-module-utils": {
       "version": "2.12.1",
-      "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.12.1.tgz",
-      "integrity": "sha512-L8jSWTze7K2mTg0vos/RuLRS5soomksDPoJLXIslC7c8Wmut3bx7CPpJijDcBZtxQ5lrbUdM+s0OlNbz0DCDNw==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -6219,8 +4683,6 @@
     },
     "node_modules/eslint-module-utils/node_modules/debug": {
       "version": "3.2.7",
-      "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz",
-      "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -6230,8 +4692,6 @@
     },
     "node_modules/eslint-plugin-es": {
       "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/eslint-plugin-es/-/eslint-plugin-es-3.0.1.tgz",
-      "integrity": "sha512-GUmAsJaN4Fc7Gbtl8uOBlayo2DqhwWvEzykMHSCZHU3XdJ+NSzzZcVhXh3VxX5icqQ+oQdIEawXX8xkR3mIFmQ==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -6251,8 +4711,6 @@
     },
     "node_modules/eslint-plugin-import": {
       "version": "2.32.0",
-      "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.32.0.tgz",
-      "integrity": "sha512-whOE1HFo/qJDyX4SnXzP4N6zOWn79WhnCUY/iDR0mPfQZO8wcYE4JClzI2oZrhBnnMUCBCHZhO6VQyoBU95mZA==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -6286,8 +4744,6 @@
     },
     "node_modules/eslint-plugin-import/node_modules/brace-expansion": {
       "version": "1.1.12",
-      "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
-      "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -6298,8 +4754,6 @@
     },
     "node_modules/eslint-plugin-import/node_modules/debug": {
       "version": "3.2.7",
-      "resolved": "https://registry.npmjs.org/debug/-/debug-3.2.7.tgz",
-      "integrity": "sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -6309,8 +4763,6 @@
     },
     "node_modules/eslint-plugin-import/node_modules/doctrine": {
       "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-2.1.0.tgz",
-      "integrity": "sha512-35mSku4ZXK0vfCuHEDAwt55dg2jNajHZ1odvF+8SSr82EsZY4QmXfuWso8oEd8zRhVObSN18aM0CjSdoBX7zIw==",
       "dev": true,
       "license": "Apache-2.0",
       "peer": true,
@@ -6323,8 +4775,6 @@
     },
     "node_modules/eslint-plugin-import/node_modules/minimatch": {
       "version": "3.1.2",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
-      "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
       "dev": true,
       "license": "ISC",
       "peer": true,
@@ -6337,8 +4787,6 @@
     },
     "node_modules/eslint-plugin-import/node_modules/semver": {
       "version": "6.3.1",
-      "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
-      "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
       "dev": true,
       "license": "ISC",
       "peer": true,
@@ -6348,8 +4796,6 @@
     },
     "node_modules/eslint-plugin-node": {
       "version": "11.1.0",
-      "resolved": "https://registry.npmjs.org/eslint-plugin-node/-/eslint-plugin-node-11.1.0.tgz",
-      "integrity": "sha512-oUwtPJ1W0SKD0Tr+wqu92c5xuCeQqB3hSCHasn/ZgjFdA9iDGNkNf2Zi9ztY7X+hNuMib23LNGRm6+uN+KLE3g==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -6370,8 +4816,6 @@
     },
     "node_modules/eslint-plugin-node/node_modules/brace-expansion": {
       "version": "1.1.12",
-      "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
-      "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -6382,8 +4826,6 @@
     },
     "node_modules/eslint-plugin-node/node_modules/minimatch": {
       "version": "3.1.2",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
-      "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
       "dev": true,
       "license": "ISC",
       "peer": true,
@@ -6396,8 +4838,6 @@
     },
     "node_modules/eslint-plugin-node/node_modules/semver": {
       "version": "6.3.1",
-      "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
-      "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
       "dev": true,
       "license": "ISC",
       "peer": true,
@@ -6407,8 +4847,6 @@
     },
     "node_modules/eslint-plugin-promise": {
       "version": "6.6.0",
-      "resolved": "https://registry.npmjs.org/eslint-plugin-promise/-/eslint-plugin-promise-6.6.0.tgz",
-      "integrity": "sha512-57Zzfw8G6+Gq7axm2Pdo3gW/Rx3h9Yywgn61uE/3elTCOePEHVrn2i5CdfBwA1BLK0Q0WqctICIUSqXZW/VprQ==",
       "dev": true,
       "license": "ISC",
       "peer": true,
@@ -6424,8 +4862,6 @@
     },
     "node_modules/eslint-scope": {
       "version": "7.2.2",
-      "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-7.2.2.tgz",
-      "integrity": "sha512-dOt21O7lTMhDM+X9mB4GX+DZrZtCUJPL/wlcTqxyrx5IvO0IYtILdtrQGQp+8n5S0gwSVmOf9NQrjMOgfQZlIg==",
       "dev": true,
       "license": "BSD-2-Clause",
       "peer": true,
@@ -6442,8 +4878,6 @@
     },
     "node_modules/eslint-utils": {
       "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-2.1.0.tgz",
-      "integrity": "sha512-w94dQYoauyvlDc43XnGB8lU3Zt713vNChgt4EWwhXAP2XkBvndfxF0AgIqKOOasjPIPzj9JqgwkwbCYD0/V3Zg==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -6459,8 +4893,6 @@
     },
     "node_modules/eslint-utils/node_modules/eslint-visitor-keys": {
       "version": "1.3.0",
-      "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz",
-      "integrity": "sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ==",
       "dev": true,
       "license": "Apache-2.0",
       "peer": true,
@@ -6470,8 +4902,6 @@
     },
     "node_modules/eslint-visitor-keys": {
       "version": "3.4.3",
-      "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz",
-      "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==",
       "dev": true,
       "license": "Apache-2.0",
       "peer": true,
@@ -6484,8 +4914,6 @@
     },
     "node_modules/eslint/node_modules/ajv": {
       "version": "6.12.6",
-      "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz",
-      "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -6502,8 +4930,6 @@
     },
     "node_modules/eslint/node_modules/ansi-styles": {
       "version": "4.3.0",
-      "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
-      "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -6519,8 +4945,6 @@
     },
     "node_modules/eslint/node_modules/brace-expansion": {
       "version": "1.1.12",
-      "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
-      "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -6531,8 +4955,6 @@
     },
     "node_modules/eslint/node_modules/chalk": {
       "version": "4.1.2",
-      "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
-      "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -6549,8 +4971,6 @@
     },
     "node_modules/eslint/node_modules/find-up": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz",
-      "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -6567,16 +4987,12 @@
     },
     "node_modules/eslint/node_modules/json-schema-traverse": {
       "version": "0.4.1",
-      "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz",
-      "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==",
       "dev": true,
       "license": "MIT",
       "peer": true
     },
     "node_modules/eslint/node_modules/locate-path": {
       "version": "6.0.0",
-      "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz",
-      "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -6592,8 +5008,6 @@
     },
     "node_modules/eslint/node_modules/minimatch": {
       "version": "3.1.2",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
-      "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
       "dev": true,
       "license": "ISC",
       "peer": true,
@@ -6606,8 +5020,6 @@
     },
     "node_modules/eslint/node_modules/p-limit": {
       "version": "3.1.0",
-      "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz",
-      "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -6623,8 +5035,6 @@
     },
     "node_modules/eslint/node_modules/p-locate": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz",
-      "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -6640,8 +5050,6 @@
     },
     "node_modules/eslint/node_modules/path-exists": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
-      "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -6651,8 +5059,6 @@
     },
     "node_modules/eslint/node_modules/supports-color": {
       "version": "7.2.0",
-      "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
-      "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -6665,8 +5071,6 @@
     },
     "node_modules/eslint/node_modules/yocto-queue": {
       "version": "0.1.0",
-      "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz",
-      "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -6679,8 +5083,6 @@
     },
     "node_modules/espree": {
       "version": "9.6.1",
-      "resolved": "https://registry.npmjs.org/espree/-/espree-9.6.1.tgz",
-      "integrity": "sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==",
       "dev": true,
       "license": "BSD-2-Clause",
       "peer": true,
@@ -6698,8 +5100,6 @@
     },
     "node_modules/esprima": {
       "version": "4.0.1",
-      "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz",
-      "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==",
       "dev": true,
       "license": "BSD-2-Clause",
       "bin": {
@@ -6712,8 +5112,6 @@
     },
     "node_modules/esquery": {
       "version": "1.6.0",
-      "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz",
-      "integrity": "sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg==",
       "dev": true,
       "license": "BSD-3-Clause",
       "peer": true,
@@ -6726,8 +5124,6 @@
     },
     "node_modules/esrecurse": {
       "version": "4.3.0",
-      "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz",
-      "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==",
       "dev": true,
       "license": "BSD-2-Clause",
       "peer": true,
@@ -6740,8 +5136,6 @@
     },
     "node_modules/estraverse": {
       "version": "5.3.0",
-      "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz",
-      "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==",
       "dev": true,
       "license": "BSD-2-Clause",
       "peer": true,
@@ -6751,8 +5145,6 @@
     },
     "node_modules/esutils": {
       "version": "2.0.3",
-      "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz",
-      "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==",
       "dev": true,
       "license": "BSD-2-Clause",
       "peer": true,
@@ -6762,8 +5154,6 @@
     },
     "node_modules/events-to-array": {
       "version": "1.1.2",
-      "resolved": "https://registry.npmjs.org/events-to-array/-/events-to-array-1.1.2.tgz",
-      "integrity": "sha512-inRWzRY7nG+aXZxBzEqYKB3HPgwflZRopAjDCHv0whhRx+MTUr1ei0ICZUypdyE0HRm4L2d5VEcIqLD6yl+BFA==",
       "dev": true,
       "license": "ISC"
     },
@@ -6774,45 +5164,50 @@
     },
     "node_modules/extend": {
       "version": "3.0.2",
-      "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz",
-      "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==",
       "dev": true,
       "license": "MIT"
     },
+    "node_modules/fast-content-type-parse": {
+      "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/fast-content-type-parse/-/fast-content-type-parse-3.0.0.tgz",
+      "integrity": "sha512-ZvLdcY8P+N8mGQJahJV5G4U88CSvT1rP8ApL6uETe88MBXrBHAkZlSEySdUlyztF7ccb+Znos3TFqaepHxdhBg==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "github",
+          "url": "https://github.com/sponsors/fastify"
+        },
+        {
+          "type": "opencollective",
+          "url": "https://opencollective.com/fastify"
+        }
+      ],
+      "license": "MIT"
+    },
     "node_modules/fast-deep-equal": {
       "version": "3.1.3",
-      "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
-      "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/fast-fifo": {
       "version": "1.3.2",
-      "resolved": "https://registry.npmjs.org/fast-fifo/-/fast-fifo-1.3.2.tgz",
-      "integrity": "sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/fast-json-stable-stringify": {
       "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz",
-      "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==",
       "dev": true,
       "license": "MIT",
       "peer": true
     },
     "node_modules/fast-levenshtein": {
       "version": "2.0.6",
-      "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz",
-      "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==",
       "dev": true,
       "license": "MIT",
       "peer": true
     },
     "node_modules/fast-uri": {
       "version": "3.1.0",
-      "resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.1.0.tgz",
-      "integrity": "sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA==",
       "dev": true,
       "funding": [
         {
@@ -6836,8 +5231,6 @@
     },
     "node_modules/fastq": {
       "version": "1.19.1",
-      "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.19.1.tgz",
-      "integrity": "sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ==",
       "dev": true,
       "license": "ISC",
       "peer": true,
@@ -6873,8 +5266,6 @@
     },
     "node_modules/file-entry-cache": {
       "version": "6.0.1",
-      "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-6.0.1.tgz",
-      "integrity": "sha512-7Gps/XWymbLk2QLYK4NzpMOrYjMhdIxXuIvy2QBsLE6ljuodKvdkWs/cpyJJ3CVIVpH0Oi1Hvg1ovbMzLdFBBg==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -6887,8 +5278,6 @@
     },
     "node_modules/fill-range": {
       "version": "7.1.1",
-      "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz",
-      "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6900,8 +5289,6 @@
     },
     "node_modules/find-cache-dir": {
       "version": "3.3.2",
-      "resolved": "https://registry.npmjs.org/find-cache-dir/-/find-cache-dir-3.3.2.tgz",
-      "integrity": "sha512-wXZV5emFEjrridIgED11OoUKLxiYjAcqot/NJdAkOhlJ+vGzwhOAfcG5OX1jP+S0PcjEn8bdMJv+g2jwQ3Onig==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6918,8 +5305,6 @@
     },
     "node_modules/find-up": {
       "version": "7.0.0",
-      "resolved": "https://registry.npmjs.org/find-up/-/find-up-7.0.0.tgz",
-      "integrity": "sha512-YyZM99iHrqLKjmt4LJDj58KI+fYyufRLBSYcqycxf//KpBk9FoewoGX0450m9nB44qrZnovzC2oeP5hUibxc/g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6936,15 +5321,11 @@
     },
     "node_modules/findit": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/findit/-/findit-2.0.0.tgz",
-      "integrity": "sha512-ENZS237/Hr8bjczn5eKuBohLgaD0JyUd0arxretR1f9RO46vZHA1b2y0VorgGV3WaOT3c+78P8h7v4JGJ1i/rg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/flat-cache": {
       "version": "3.2.0",
-      "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-3.2.0.tgz",
-      "integrity": "sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -6959,8 +5340,6 @@
     },
     "node_modules/flat-cache/node_modules/brace-expansion": {
       "version": "1.1.12",
-      "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
-      "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -6971,9 +5350,6 @@
     },
     "node_modules/flat-cache/node_modules/glob": {
       "version": "7.2.3",
-      "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
-      "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
-      "deprecated": "Glob versions prior to v9 are no longer supported",
       "dev": true,
       "license": "ISC",
       "peer": true,
@@ -6994,8 +5370,6 @@
     },
     "node_modules/flat-cache/node_modules/minimatch": {
       "version": "3.1.2",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
-      "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
       "dev": true,
       "license": "ISC",
       "peer": true,
@@ -7008,9 +5382,6 @@
     },
     "node_modules/flat-cache/node_modules/rimraf": {
       "version": "3.0.2",
-      "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz",
-      "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==",
-      "deprecated": "Rimraf versions prior to v4 are no longer supported",
       "dev": true,
       "license": "ISC",
       "peer": true,
@@ -7026,16 +5397,12 @@
     },
     "node_modules/flatted": {
       "version": "3.3.3",
-      "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz",
-      "integrity": "sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg==",
       "dev": true,
       "license": "ISC",
       "peer": true
     },
     "node_modules/for-each": {
       "version": "0.3.5",
-      "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.5.tgz",
-      "integrity": "sha512-dKx12eRCVIzqCxFGplyFKJMPvLEWgmNtUrpTiJIR5u97zEhRG8ySrtboPHZXx7daLxQVrl643cTzbab2tkQjxg==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -7066,8 +5433,6 @@
     },
     "node_modules/form-data": {
       "version": "4.0.4",
-      "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz",
-      "integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7083,8 +5448,6 @@
     },
     "node_modules/fromentries": {
       "version": "1.3.2",
-      "resolved": "https://registry.npmjs.org/fromentries/-/fromentries-1.3.2.tgz",
-      "integrity": "sha512-cHEpEQHUg0f8XdtZCc2ZAhrHzKzT0MrFUTcvx+hfxYu7rGMDc5SKoXFh+n4YigxsHXRzc6OrCshdR1bWH6HHyg==",
       "dev": true,
       "funding": [
         {
@@ -7104,8 +5467,6 @@
     },
     "node_modules/front-matter": {
       "version": "4.0.2",
-      "resolved": "https://registry.npmjs.org/front-matter/-/front-matter-4.0.2.tgz",
-      "integrity": "sha512-I8ZuJ/qG92NWX8i5x1Y8qyj3vizhXS31OxjKDu3LKP+7/qBgfIKValiZIEwoVoJKUHlhWtYrktkxV1XsX+pPlg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7114,8 +5475,6 @@
     },
     "node_modules/front-matter/node_modules/argparse": {
       "version": "1.0.10",
-      "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz",
-      "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7124,8 +5483,6 @@
     },
     "node_modules/front-matter/node_modules/js-yaml": {
       "version": "3.14.1",
-      "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz",
-      "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7138,8 +5495,6 @@
     },
     "node_modules/fs-exists-cached": {
       "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/fs-exists-cached/-/fs-exists-cached-1.0.0.tgz",
-      "integrity": "sha512-kSxoARUDn4F2RPXX48UXnaFKwVU7Ivd/6qpzZL29MCDmr9sTvybv4gFCp+qaI4fM9m0z9fgz/yJvi56GAz+BZg==",
       "dev": true,
       "license": "ISC"
     },
@@ -7156,30 +5511,11 @@
     },
     "node_modules/fs.realpath": {
       "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz",
-      "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==",
       "dev": true,
       "license": "ISC"
     },
-    "node_modules/fsevents": {
-      "version": "2.3.3",
-      "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz",
-      "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==",
-      "dev": true,
-      "hasInstallScript": true,
-      "license": "MIT",
-      "optional": true,
-      "os": [
-        "darwin"
-      ],
-      "engines": {
-        "node": "^8.16.0 || ^10.6.0 || >=11.0.0"
-      }
-    },
     "node_modules/function-bind": {
       "version": "1.1.2",
-      "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz",
-      "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -7188,15 +5524,11 @@
     },
     "node_modules/function-loop": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/function-loop/-/function-loop-2.0.1.tgz",
-      "integrity": "sha512-ktIR+O6i/4h+j/ZhZJNdzeI4i9lEPeEK6UPR2EVyTVBqOwcU3Za9xYKLH64ZR9HmcROyRrOkizNyjjtWJzDDkQ==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/function.prototype.name": {
       "version": "1.1.8",
-      "resolved": "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.8.tgz",
-      "integrity": "sha512-e5iwyodOHhbMr/yNrc7fDYG4qlbIvI5gajyzPnb5TCwyhjApznQh1BMFou9b30SevY43gCJKXycoCBjMbsuW0Q==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -7217,8 +5549,6 @@
     },
     "node_modules/functions-have-names": {
       "version": "1.2.3",
-      "resolved": "https://registry.npmjs.org/functions-have-names/-/functions-have-names-1.2.3.tgz",
-      "integrity": "sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -7228,8 +5558,6 @@
     },
     "node_modules/gensync": {
       "version": "1.0.0-beta.2",
-      "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz",
-      "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -7238,8 +5566,6 @@
     },
     "node_modules/get-caller-file": {
       "version": "2.0.5",
-      "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz",
-      "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -7248,8 +5574,6 @@
     },
     "node_modules/get-intrinsic": {
       "version": "1.3.0",
-      "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz",
-      "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7273,8 +5597,6 @@
     },
     "node_modules/get-package-type": {
       "version": "0.1.0",
-      "resolved": "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz",
-      "integrity": "sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -7283,8 +5605,6 @@
     },
     "node_modules/get-proto": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz",
-      "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7297,8 +5617,6 @@
     },
     "node_modules/get-symbol-description": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/get-symbol-description/-/get-symbol-description-1.1.0.tgz",
-      "integrity": "sha512-w9UMqWwJxHNOvoNzSJ2oPF5wvYcvP7jUvYzhp67yEhTi17ZDBBC1z9pTdGuzjD+EFIqLSYRweZjqfiPzQ06Ebg==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -7316,8 +5634,6 @@
     },
     "node_modules/git-raw-commits": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/git-raw-commits/-/git-raw-commits-4.0.0.tgz",
-      "integrity": "sha512-ICsMM1Wk8xSGMowkOmPrzo2Fgmfo4bMHLNX6ytHjajRJUqvHOw/TFapQ+QG75c3X/tTDDhOSRPGC52dDbNM8FQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7334,8 +5650,6 @@
     },
     "node_modules/github-slugger": {
       "version": "1.5.0",
-      "resolved": "https://registry.npmjs.org/github-slugger/-/github-slugger-1.5.0.tgz",
-      "integrity": "sha512-wIh+gKBI9Nshz2o46B0B3f5k/W+WI9ZAv6y5Dn5WJ5SK1t0TnDimB4WE5rmTD05ZAIn8HALCZVmCsvj0w0v0lw==",
       "dev": true,
       "license": "ISC"
     },
@@ -7363,8 +5677,6 @@
     },
     "node_modules/glob-parent": {
       "version": "6.0.2",
-      "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz",
-      "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==",
       "dev": true,
       "license": "ISC",
       "peer": true,
@@ -7377,8 +5689,6 @@
     },
     "node_modules/global-directory": {
       "version": "4.0.1",
-      "resolved": "https://registry.npmjs.org/global-directory/-/global-directory-4.0.1.tgz",
-      "integrity": "sha512-wHTUcDUoZ1H5/0iVqEudYW4/kAlN5cZ3j/bXn0Dpbizl9iaUVeWSHqiOjsgk6OW2bkLclbBjzewBz6weQ1zA2Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7393,8 +5703,6 @@
     },
     "node_modules/global-directory/node_modules/ini": {
       "version": "4.1.1",
-      "resolved": "https://registry.npmjs.org/ini/-/ini-4.1.1.tgz",
-      "integrity": "sha512-QQnnxNyfvmHFIsj7gkPcYymR8Jdw/o7mp5ZFihxn6h8Ci6fh3Dx4E1gPjpQEpIuPo9XVNY/ZUwh4BPMjGyL01g==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -7403,8 +5711,6 @@
     },
     "node_modules/globals": {
       "version": "13.24.0",
-      "resolved": "https://registry.npmjs.org/globals/-/globals-13.24.0.tgz",
-      "integrity": "sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -7420,8 +5726,6 @@
     },
     "node_modules/globalthis": {
       "version": "1.0.4",
-      "resolved": "https://registry.npmjs.org/globalthis/-/globalthis-1.0.4.tgz",
-      "integrity": "sha512-DpLKbNU4WylpxJykQujfCcwYWiV/Jhm50Goo0wrVILAv5jOr9d+H+UR3PhSCD2rCCEIg0uc+G+muBTwD54JhDQ==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -7438,8 +5742,6 @@
     },
     "node_modules/gopd": {
       "version": "1.2.0",
-      "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz",
-      "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -7456,16 +5758,12 @@
     },
     "node_modules/graphemer": {
       "version": "1.4.0",
-      "resolved": "https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz",
-      "integrity": "sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==",
       "dev": true,
       "license": "MIT",
       "peer": true
     },
     "node_modules/groff-escape": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/groff-escape/-/groff-escape-2.0.1.tgz",
-      "integrity": "sha512-S0nG+mLFTu1buDKQsRlBtIxZU/dMvrdCURJg/zSLKpL333yi1Fs5bLUYk+v3pRYlc+qmHtukMAM2slB0AKFKAw==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -7507,8 +5805,6 @@
     },
     "node_modules/has-bigints": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/has-bigints/-/has-bigints-1.1.0.tgz",
-      "integrity": "sha512-R3pbpkcIqv2Pm3dUwgjclDRVmWpTJW2DcMzcIhEXEx1oh/CEMObMm3KLmRJOdvhM7o4uQBnwr8pzRK2sJWIqfg==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -7521,8 +5817,6 @@
     },
     "node_modules/has-flag": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
-      "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -7531,8 +5825,6 @@
     },
     "node_modules/has-property-descriptors": {
       "version": "1.0.2",
-      "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz",
-      "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -7545,8 +5837,6 @@
     },
     "node_modules/has-proto": {
       "version": "1.2.0",
-      "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.2.0.tgz",
-      "integrity": "sha512-KIL7eQPfHQRC8+XluaIw7BHUwwqL19bQn4hzNgdr+1wXoU0KKj6rufu47lhY7KbJR2C6T6+PfyN0Ea7wkSS+qQ==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -7562,8 +5852,6 @@
     },
     "node_modules/has-symbols": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz",
-      "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -7575,8 +5863,6 @@
     },
     "node_modules/has-tostringtag": {
       "version": "1.0.2",
-      "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz",
-      "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7591,8 +5877,6 @@
     },
     "node_modules/hasha": {
       "version": "5.2.2",
-      "resolved": "https://registry.npmjs.org/hasha/-/hasha-5.2.2.tgz",
-      "integrity": "sha512-Hrp5vIK/xr5SkeN2onO32H0MgNZ0f17HRNH39WfL0SYUNOTZ5Lz1TJ8Pajo/87dYGEFlLMm7mIc/k/s6Bvz9HQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7608,8 +5892,6 @@
     },
     "node_modules/hasha/node_modules/type-fest": {
       "version": "0.8.1",
-      "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz",
-      "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==",
       "dev": true,
       "license": "(MIT OR CC0-1.0)",
       "engines": {
@@ -7618,8 +5900,6 @@
     },
     "node_modules/hasown": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz",
-      "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7631,8 +5911,6 @@
     },
     "node_modules/hast-util-from-parse5": {
       "version": "7.1.2",
-      "resolved": "https://registry.npmjs.org/hast-util-from-parse5/-/hast-util-from-parse5-7.1.2.tgz",
-      "integrity": "sha512-Nz7FfPBuljzsN3tCQ4kCBKqdNhQE2l0Tn+X1ubgKBPRoiDIu1mL08Cfw4k7q71+Duyaw7DXDN+VTAp4Vh3oCOw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7651,8 +5929,6 @@
     },
     "node_modules/hast-util-parse-selector": {
       "version": "3.1.1",
-      "resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-3.1.1.tgz",
-      "integrity": "sha512-jdlwBjEexy1oGz0aJ2f4GKMaVKkA9jwjr4MjAAI22E5fM/TXVZHuS5OpONtdeIkRKqAaryQ2E9xNQxijoThSZA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7665,8 +5941,6 @@
     },
     "node_modules/hast-util-raw": {
       "version": "7.2.3",
-      "resolved": "https://registry.npmjs.org/hast-util-raw/-/hast-util-raw-7.2.3.tgz",
-      "integrity": "sha512-RujVQfVsOrxzPOPSzZFiwofMArbQke6DJjnFfceiEbFh7S05CbPt0cYN+A5YeD3pso0JQk6O1aHBnx9+Pm2uqg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7689,15 +5963,11 @@
     },
     "node_modules/hast-util-raw/node_modules/parse5": {
       "version": "6.0.1",
-      "resolved": "https://registry.npmjs.org/parse5/-/parse5-6.0.1.tgz",
-      "integrity": "sha512-Ofn/CTFzRGTTxwpNEs9PP93gXShHcTq255nzRYSKe8AkVpZY7e1fpmTfOyoIvjP5HG7Z2ZM7VS9PPhQGW2pOpw==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/hast-util-raw/node_modules/unist-util-visit": {
       "version": "4.1.2",
-      "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-4.1.2.tgz",
-      "integrity": "sha512-MSd8OUGISqHdVvfY9TPhyK2VdUrPgxkUtWSuMHF6XAAFuL4LokseigBnZtPnJMu+FbynTkFNnFlyjxpVKujMRg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7712,8 +5982,6 @@
     },
     "node_modules/hast-util-raw/node_modules/unist-util-visit-parents": {
       "version": "5.1.3",
-      "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-5.1.3.tgz",
-      "integrity": "sha512-x6+y8g7wWMyQhL1iZfhIPhDAs7Xwbn9nRosDXl7qoPTSCy0yNxnKc+hWokFifWQIDGi154rdUqKvbCa4+1kLhg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7727,8 +5995,6 @@
     },
     "node_modules/hast-util-to-html": {
       "version": "8.0.4",
-      "resolved": "https://registry.npmjs.org/hast-util-to-html/-/hast-util-to-html-8.0.4.tgz",
-      "integrity": "sha512-4tpQTUOr9BMjtYyNlt0P50mH7xj0Ks2xpo8M943Vykljf99HW6EzulIoJP1N3eKOSScEHzyzi9dm7/cn0RfGwA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7751,8 +6017,6 @@
     },
     "node_modules/hast-util-to-parse5": {
       "version": "7.1.0",
-      "resolved": "https://registry.npmjs.org/hast-util-to-parse5/-/hast-util-to-parse5-7.1.0.tgz",
-      "integrity": "sha512-YNRgAJkH2Jky5ySkIqFXTQiaqcAtJyVE+D5lkN6CdtOqrnkLfGYYrEcKuHOJZlp+MwjSwuD3fZuawI+sic/RBw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7770,8 +6034,6 @@
     },
     "node_modules/hast-util-whitespace": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/hast-util-whitespace/-/hast-util-whitespace-2.0.1.tgz",
-      "integrity": "sha512-nAxA0v8+vXSBDt3AnRUNjyRIQ0rD+ntpbAp4LnPkumc5M9yUbSMa4XDU9Q6etY4f1Wp4bNgvc1yjiZtsTTrSng==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -7781,8 +6043,6 @@
     },
     "node_modules/hastscript": {
       "version": "7.2.0",
-      "resolved": "https://registry.npmjs.org/hastscript/-/hastscript-7.2.0.tgz",
-      "integrity": "sha512-TtYPq24IldU8iKoJQqvZOuhi5CyCQRAbvDOX0x1eW6rsHSxa/1i2CCiptNTotGHJ3VoHRGmqiv6/D3q113ikkw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7820,8 +6080,6 @@
     },
     "node_modules/html-encoding-sniffer": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/html-encoding-sniffer/-/html-encoding-sniffer-4.0.0.tgz",
-      "integrity": "sha512-Y22oTqIU4uuPgEemfz7NDJz6OeKf12Lsu+QC+s3BVpda64lTiMYCyGwg5ki4vFxkMwQdeZDl2adZoqUgdFuTgQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7833,15 +6091,11 @@
     },
     "node_modules/html-escaper": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz",
-      "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/html-void-elements": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/html-void-elements/-/html-void-elements-2.0.1.tgz",
-      "integrity": "sha512-0quDb7s97CfemeJAnW9wC0hw78MtW7NU3hqtCD75g2vFlDLt36llsYD7uB7SUzojLMP24N5IatXf7ylGXiGG9A==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -7892,8 +6146,6 @@
     },
     "node_modules/ignore": {
       "version": "5.3.2",
-      "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz",
-      "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -7914,8 +6166,6 @@
     },
     "node_modules/import-fresh": {
       "version": "3.3.1",
-      "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz",
-      "integrity": "sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7931,8 +6181,6 @@
     },
     "node_modules/import-fresh/node_modules/resolve-from": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz",
-      "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -7941,8 +6189,6 @@
     },
     "node_modules/import-meta-resolve": {
       "version": "4.2.0",
-      "resolved": "https://registry.npmjs.org/import-meta-resolve/-/import-meta-resolve-4.2.0.tgz",
-      "integrity": "sha512-Iqv2fzaTQN28s/FwZAoFq0ZSs/7hMAHJVX+w8PZl3cY19Pxk6jFFalxQoIfW2826i/fDLXv8IiEZRIT0lDuWcg==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -7960,8 +6206,6 @@
     },
     "node_modules/indent-string": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/indent-string/-/indent-string-4.0.0.tgz",
-      "integrity": "sha512-EdDDZu4A2OyIK7Lr/2zG+w5jmbuk1DVBnEwREQvBzspBJkCEbRa8GxU1lghYcaGJCnRWibjDXlq779X1/y5xwg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -7970,9 +6214,6 @@
     },
     "node_modules/inflight": {
       "version": "1.0.6",
-      "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz",
-      "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==",
-      "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -7982,8 +6223,6 @@
     },
     "node_modules/inherits": {
       "version": "2.0.4",
-      "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz",
-      "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==",
       "dev": true,
       "license": "ISC"
     },
@@ -8014,8 +6253,6 @@
     },
     "node_modules/internal-slot": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/internal-slot/-/internal-slot-1.1.0.tgz",
-      "integrity": "sha512-4gd7VpWNQNB4UKKCFFVcp1AVv+FMOgs9NKzjHKusc8jTMhd5eL1NqQqOpE0KzMds804/yHlglp3uxgluOqAPLw==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -8049,8 +6286,6 @@
     },
     "node_modules/is-array-buffer": {
       "version": "3.0.5",
-      "resolved": "https://registry.npmjs.org/is-array-buffer/-/is-array-buffer-3.0.5.tgz",
-      "integrity": "sha512-DDfANUiiG2wC1qawP66qlTugJeL5HyzMpfr8lLK+jMQirGzNod0B12cFB/9q838Ru27sBwfw78/rdoU7RERz6A==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -8068,15 +6303,11 @@
     },
     "node_modules/is-arrayish": {
       "version": "0.2.1",
-      "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz",
-      "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/is-async-function": {
       "version": "2.1.1",
-      "resolved": "https://registry.npmjs.org/is-async-function/-/is-async-function-2.1.1.tgz",
-      "integrity": "sha512-9dgM/cZBnNvjzaMYHVoxxfPj2QXt22Ev7SuuPrs+xav0ukGB0S6d4ydZdEiM48kLx5kDV+QBPrpVnFyefL8kkQ==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -8096,8 +6327,6 @@
     },
     "node_modules/is-bigint": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.1.0.tgz",
-      "integrity": "sha512-n4ZT37wG78iz03xPRKJrHTdZbe3IicyucEtdRsV5yglwc3GyUfbAfpSeD0FJ41NbUNSt5wbhqfp1fS+BgnvDFQ==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -8113,8 +6342,6 @@
     },
     "node_modules/is-binary-path": {
       "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz",
-      "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8126,8 +6353,6 @@
     },
     "node_modules/is-binary-path/node_modules/binary-extensions": {
       "version": "2.3.0",
-      "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz",
-      "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -8139,8 +6364,6 @@
     },
     "node_modules/is-boolean-object": {
       "version": "1.2.2",
-      "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.2.2.tgz",
-      "integrity": "sha512-wa56o2/ElJMYqjCjGkXri7it5FbebW5usLw/nPmCMs5DeZ7eziSYZhSmPRn0txqeW4LnAmQQU7FgqLpsEFKM4A==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -8157,8 +6380,6 @@
     },
     "node_modules/is-buffer": {
       "version": "2.0.5",
-      "resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.5.tgz",
-      "integrity": "sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ==",
       "dev": true,
       "funding": [
         {
@@ -8181,8 +6402,6 @@
     },
     "node_modules/is-callable": {
       "version": "1.2.7",
-      "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz",
-      "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -8206,8 +6425,6 @@
     },
     "node_modules/is-core-module": {
       "version": "2.16.1",
-      "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz",
-      "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8222,8 +6439,6 @@
     },
     "node_modules/is-data-view": {
       "version": "1.0.2",
-      "resolved": "https://registry.npmjs.org/is-data-view/-/is-data-view-1.0.2.tgz",
-      "integrity": "sha512-RKtWF8pGmS87i2D6gqQu/l7EYRlVdfzemCJN/P3UOs//x1QE7mfhvzHIApBTRf7axvT6DMGwSwBXYCT0nfB9xw==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -8241,8 +6456,6 @@
     },
     "node_modules/is-date-object": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.1.0.tgz",
-      "integrity": "sha512-PwwhEakHVKTdRNVOw+/Gyh0+MzlCl4R6qKvkhuvLtPMggI1WAHt9sOwZxQLSGpUaDnrdyDsomoRgNnCfKNSXXg==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -8259,8 +6472,6 @@
     },
     "node_modules/is-extglob": {
       "version": "2.1.1",
-      "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz",
-      "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -8269,8 +6480,6 @@
     },
     "node_modules/is-finalizationregistry": {
       "version": "1.1.1",
-      "resolved": "https://registry.npmjs.org/is-finalizationregistry/-/is-finalizationregistry-1.1.1.tgz",
-      "integrity": "sha512-1pC6N8qWJbWoPtEjgcL2xyhQOP491EQjeUo3qTKcmV8YSDDJrOepfG8pcC7h/QgnQHYSv0mJ3Z/ZWxmatVrysg==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -8294,8 +6503,6 @@
     },
     "node_modules/is-generator-function": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.1.0.tgz",
-      "integrity": "sha512-nPUB5km40q9e8UfN/Zc24eLlzdSf9OfKByBw9CIdw4H1giPMeA0OIJvbchsCu4npfI2QcMVBsGEBHKZ7wLTWmQ==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -8314,8 +6521,6 @@
     },
     "node_modules/is-glob": {
       "version": "4.0.3",
-      "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz",
-      "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8325,17 +6530,8 @@
         "node": ">=0.10.0"
       }
     },
-    "node_modules/is-lambda": {
-      "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/is-lambda/-/is-lambda-1.0.1.tgz",
-      "integrity": "sha512-z7CMFGNrENq5iFB9Bqo64Xk6Y9sg+epq1myIcdHaGnbMTYOxvzsEtdYqQUylB7LxfkvgrrjP32T6Ywciio9UIQ==",
-      "dev": true,
-      "license": "MIT"
-    },
     "node_modules/is-map": {
       "version": "2.0.3",
-      "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.3.tgz",
-      "integrity": "sha512-1Qed0/Hr2m+YqxnM09CjA2d/i6YZNfF6R2oRAOj36eUdS6qIV/huPJNSEpKbupewFs+ZsJlxsjjPbc0/afW6Lw==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -8348,8 +6544,6 @@
     },
     "node_modules/is-negative-zero": {
       "version": "2.0.3",
-      "resolved": "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.3.tgz",
-      "integrity": "sha512-5KoIu2Ngpyek75jXodFvnafB6DJgr3u8uuK0LEZJjrU19DrMD3EVERaR8sjz8CCGgpZvxPl9SuE1GMVPFHx1mw==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -8362,8 +6556,6 @@
     },
     "node_modules/is-number": {
       "version": "7.0.0",
-      "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz",
-      "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -8372,8 +6564,6 @@
     },
     "node_modules/is-number-object": {
       "version": "1.1.1",
-      "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.1.1.tgz",
-      "integrity": "sha512-lZhclumE1G6VYD8VHe35wFaIif+CTy5SJIi5+3y4psDgWu4wPDoBhF8NxUOinEc7pHgiTsT6MaBb92rKhhD+Xw==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -8390,8 +6580,6 @@
     },
     "node_modules/is-obj": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/is-obj/-/is-obj-2.0.0.tgz",
-      "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -8400,8 +6588,6 @@
     },
     "node_modules/is-path-inside": {
       "version": "3.0.3",
-      "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz",
-      "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -8411,8 +6597,6 @@
     },
     "node_modules/is-plain-obj": {
       "version": "4.1.0",
-      "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-4.1.0.tgz",
-      "integrity": "sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -8422,27 +6606,13 @@
         "url": "https://github.com/sponsors/sindresorhus"
       }
     },
-    "node_modules/is-plain-object": {
-      "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/is-plain-object/-/is-plain-object-5.0.0.tgz",
-      "integrity": "sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==",
-      "dev": true,
-      "license": "MIT",
-      "engines": {
-        "node": ">=0.10.0"
-      }
-    },
     "node_modules/is-potential-custom-element-name": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/is-potential-custom-element-name/-/is-potential-custom-element-name-1.0.1.tgz",
-      "integrity": "sha512-bCYeRA2rVibKZd+s2625gGnGF/t7DSqDs4dP7CrLA1m7jKWz6pps0LpYLJN8Q64HtmPKJ1hrN3nzPNKFEKOUiQ==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/is-regex": {
       "version": "1.2.1",
-      "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.2.1.tgz",
-      "integrity": "sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -8461,8 +6631,6 @@
     },
     "node_modules/is-set": {
       "version": "2.0.3",
-      "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.3.tgz",
-      "integrity": "sha512-iPAjerrse27/ygGLxw+EBR9agv9Y6uLeYVJMu+QNCoouJ1/1ri0mGrcWpfCqFZuzzx3WjtwxG098X+n4OuRkPg==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -8475,8 +6643,6 @@
     },
     "node_modules/is-shared-array-buffer": {
       "version": "1.0.4",
-      "resolved": "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.4.tgz",
-      "integrity": "sha512-ISWac8drv4ZGfwKl5slpHG9OwPNty4jOWPRIhBpxOoD+hqITiwuipOQ2bNthAzwA3B4fIjO4Nln74N0S9byq8A==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -8492,8 +6658,6 @@
     },
     "node_modules/is-stream": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz",
-      "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -8505,8 +6669,6 @@
     },
     "node_modules/is-string": {
       "version": "1.1.1",
-      "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.1.1.tgz",
-      "integrity": "sha512-BtEeSsoaQjlSPBemMQIrY1MY0uM6vnS1g5fmufYOtnxLGUZM2178PKbhsk7Ffv58IX+ZtcvoGwccYsh0PglkAA==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -8523,8 +6685,6 @@
     },
     "node_modules/is-symbol": {
       "version": "1.1.1",
-      "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.1.1.tgz",
-      "integrity": "sha512-9gGx6GTtCQM73BgmHQXfDmLtfjjTUDSyoxTCbp5WtoixAhfgsDirWIcVQ/IHpvI5Vgd5i/J5F7B9cN/WlVbC/w==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -8542,8 +6702,6 @@
     },
     "node_modules/is-text-path": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/is-text-path/-/is-text-path-2.0.0.tgz",
-      "integrity": "sha512-+oDTluR6WEjdXEJMnC2z6A4FRwFoYuvShVVEGsS7ewc0UTi2QtAKMDJuL4BDEVt+5T7MjFo12RP8ghOM75oKJw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8555,8 +6713,6 @@
     },
     "node_modules/is-typed-array": {
       "version": "1.1.15",
-      "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.15.tgz",
-      "integrity": "sha512-p3EcsicXjit7SaskXHs1hA91QxgTw46Fv6EFKKGS5DRFLD8yKnohjF3hxoju94b/OcMZoQukzpPpBE9uLVKzgQ==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -8572,15 +6728,11 @@
     },
     "node_modules/is-typedarray": {
       "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz",
-      "integrity": "sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/is-weakmap": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.2.tgz",
-      "integrity": "sha512-K5pXYOm9wqY1RgjpL3YTkF39tni1XajUIkawTLUo9EZEVUFga5gSQJF8nNS7ZwJQ02y+1YCNYcMh+HIf1ZqE+w==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -8593,8 +6745,6 @@
     },
     "node_modules/is-weakref": {
       "version": "1.1.1",
-      "resolved": "https://registry.npmjs.org/is-weakref/-/is-weakref-1.1.1.tgz",
-      "integrity": "sha512-6i9mGWSlqzNMEqpCp93KwRS1uUOodk2OJ6b+sq7ZPDSy2WuI5NFIxp/254TytR8ftefexkWn5xNiHUNpPOfSew==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -8610,8 +6760,6 @@
     },
     "node_modules/is-weakset": {
       "version": "2.0.4",
-      "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.4.tgz",
-      "integrity": "sha512-mfcwb6IzQyOKTs84CQMrOwW4gQcaTOAWJ0zzJCl2WSPDrWk/OzDaImWFH3djXhb24g4eudZfLRozAvPGw4d9hQ==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -8628,8 +6776,6 @@
     },
     "node_modules/is-windows": {
       "version": "1.0.2",
-      "resolved": "https://registry.npmjs.org/is-windows/-/is-windows-1.0.2.tgz",
-      "integrity": "sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -8638,8 +6784,6 @@
     },
     "node_modules/isarray": {
       "version": "2.0.5",
-      "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz",
-      "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==",
       "dev": true,
       "license": "MIT",
       "peer": true
@@ -8654,8 +6798,6 @@
     },
     "node_modules/istanbul-lib-coverage": {
       "version": "3.2.2",
-      "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz",
-      "integrity": "sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==",
       "dev": true,
       "license": "BSD-3-Clause",
       "engines": {
@@ -8664,8 +6806,6 @@
     },
     "node_modules/istanbul-lib-hook": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/istanbul-lib-hook/-/istanbul-lib-hook-3.0.0.tgz",
-      "integrity": "sha512-Pt/uge1Q9s+5VAZ+pCo16TYMWPBIl+oaNIjgLQxcX0itS6ueeaA+pEfThZpH8WxhFgCiEb8sAJY6MdUKgiIWaQ==",
       "dev": true,
       "license": "BSD-3-Clause",
       "dependencies": {
@@ -8677,8 +6817,6 @@
     },
     "node_modules/istanbul-lib-instrument": {
       "version": "4.0.3",
-      "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-4.0.3.tgz",
-      "integrity": "sha512-BXgQl9kf4WTCPCCpmFGoJkz/+uhvm7h7PFKUYxh7qarQd3ER33vHG//qaE8eN25l07YqZPpHXU9I09l/RD5aGQ==",
       "dev": true,
       "license": "BSD-3-Clause",
       "dependencies": {
@@ -8693,8 +6831,6 @@
     },
     "node_modules/istanbul-lib-instrument/node_modules/semver": {
       "version": "6.3.1",
-      "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
-      "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
       "dev": true,
       "license": "ISC",
       "bin": {
@@ -8703,8 +6839,6 @@
     },
     "node_modules/istanbul-lib-processinfo": {
       "version": "2.0.3",
-      "resolved": "https://registry.npmjs.org/istanbul-lib-processinfo/-/istanbul-lib-processinfo-2.0.3.tgz",
-      "integrity": "sha512-NkwHbo3E00oybX6NGJi6ar0B29vxyvNwoC7eJ4G4Yq28UfY758Hgn/heV8VRFhevPED4LXfFz0DQ8z/0kw9zMg==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -8721,8 +6855,6 @@
     },
     "node_modules/istanbul-lib-processinfo/node_modules/brace-expansion": {
       "version": "1.1.12",
-      "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
-      "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8732,9 +6864,6 @@
     },
     "node_modules/istanbul-lib-processinfo/node_modules/glob": {
       "version": "7.2.3",
-      "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
-      "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
-      "deprecated": "Glob versions prior to v9 are no longer supported",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -8754,8 +6883,6 @@
     },
     "node_modules/istanbul-lib-processinfo/node_modules/minimatch": {
       "version": "3.1.2",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
-      "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -8767,8 +6894,6 @@
     },
     "node_modules/istanbul-lib-processinfo/node_modules/p-map": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/p-map/-/p-map-3.0.0.tgz",
-      "integrity": "sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8780,9 +6905,6 @@
     },
     "node_modules/istanbul-lib-processinfo/node_modules/rimraf": {
       "version": "3.0.2",
-      "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz",
-      "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==",
-      "deprecated": "Rimraf versions prior to v4 are no longer supported",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -8797,8 +6919,6 @@
     },
     "node_modules/istanbul-lib-report": {
       "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz",
-      "integrity": "sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==",
       "dev": true,
       "license": "BSD-3-Clause",
       "dependencies": {
@@ -8812,8 +6932,6 @@
     },
     "node_modules/istanbul-lib-report/node_modules/make-dir": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz",
-      "integrity": "sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8828,8 +6946,6 @@
     },
     "node_modules/istanbul-lib-report/node_modules/supports-color": {
       "version": "7.2.0",
-      "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
-      "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8841,8 +6957,6 @@
     },
     "node_modules/istanbul-lib-source-maps": {
       "version": "4.0.1",
-      "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz",
-      "integrity": "sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==",
       "dev": true,
       "license": "BSD-3-Clause",
       "dependencies": {
@@ -8856,8 +6970,6 @@
     },
     "node_modules/istanbul-reports": {
       "version": "3.2.0",
-      "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.2.0.tgz",
-      "integrity": "sha512-HGYWWS/ehqTV3xN10i23tkPkpH46MLCIMFNCaaKNavAXTF1RkqxawEPtnjnGZ6XKSInBKkiOA5BKS+aZiY3AvA==",
       "dev": true,
       "license": "BSD-3-Clause",
       "dependencies": {
@@ -8884,8 +6996,6 @@
     },
     "node_modules/jiti": {
       "version": "2.5.1",
-      "resolved": "https://registry.npmjs.org/jiti/-/jiti-2.5.1.tgz",
-      "integrity": "sha512-twQoecYPiVA5K/h6SxtORw/Bs3ar+mLUtoPSc7iMXzQzK8d7eJ/R09wmTwAjiamETn1cXYPGfNnu7DMoHgu12w==",
       "dev": true,
       "license": "MIT",
       "bin": {
@@ -8894,15 +7004,11 @@
     },
     "node_modules/js-tokens": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz",
-      "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/js-yaml": {
       "version": "4.1.0",
-      "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz",
-      "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8914,8 +7020,6 @@
     },
     "node_modules/jsdom": {
       "version": "24.1.3",
-      "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-24.1.3.tgz",
-      "integrity": "sha512-MyL55p3Ut3cXbeBEG7Hcv0mVM8pp8PBNWxRqchZnSfAiES1v1mRnMeFfaHWIPULpwsYfvO+ZmMZz5tGCnjzDUQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8955,8 +7059,6 @@
     },
     "node_modules/jsdom/node_modules/tr46": {
       "version": "5.1.1",
-      "resolved": "https://registry.npmjs.org/tr46/-/tr46-5.1.1.tgz",
-      "integrity": "sha512-hdF5ZgjTqgAntKkklYw0R03MG2x/bSzTtkxmIRw/sTNV8YXsCJ1tfLAX23lhxhHJlEf3CRCOCGGWw3vI3GaSPw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8968,8 +7070,6 @@
     },
     "node_modules/jsdom/node_modules/webidl-conversions": {
       "version": "7.0.0",
-      "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-7.0.0.tgz",
-      "integrity": "sha512-VwddBukDzu71offAQR975unBIGqfKZpM+8ZX6ySk8nYhVoo5CYaZyzt3YBvYtRtO+aoGlqxPg/B87NGVZ/fu6g==",
       "dev": true,
       "license": "BSD-2-Clause",
       "engines": {
@@ -8978,8 +7078,6 @@
     },
     "node_modules/jsdom/node_modules/whatwg-url": {
       "version": "14.2.0",
-      "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-14.2.0.tgz",
-      "integrity": "sha512-De72GdQZzNTUBBChsXueQUnPKDkg/5A5zp7pFDuQAj5UFoENpiACU0wlCvzpAGnTkj++ihpKwKyYewn/XNUbKw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9002,8 +7100,6 @@
     },
     "node_modules/jsesc": {
       "version": "3.1.0",
-      "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz",
-      "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==",
       "dev": true,
       "license": "MIT",
       "bin": {
@@ -9015,8 +7111,6 @@
     },
     "node_modules/json-buffer": {
       "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz",
-      "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==",
       "dev": true,
       "license": "MIT",
       "peer": true
@@ -9031,15 +7125,11 @@
     },
     "node_modules/json-schema-traverse": {
       "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-1.0.0.tgz",
-      "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/json-stable-stringify-without-jsonify": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz",
-      "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==",
       "dev": true,
       "license": "MIT",
       "peer": true
@@ -9053,15 +7143,11 @@
     },
     "node_modules/json-stringify-safe": {
       "version": "5.0.1",
-      "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz",
-      "integrity": "sha512-ZClg6AaYvamvYEE82d3Iyd3vSSIjQ+odgjaTzRuO3s7toCdFKczob2i0zCh7JE8kWn17yvAWhUVxvqGwUalsRA==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/json5": {
       "version": "2.2.3",
-      "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz",
-      "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==",
       "dev": true,
       "license": "MIT",
       "bin": {
@@ -9100,8 +7186,6 @@
     },
     "node_modules/JSONStream": {
       "version": "1.3.5",
-      "resolved": "https://registry.npmjs.org/JSONStream/-/JSONStream-1.3.5.tgz",
-      "integrity": "sha512-E+iruNOY8VV9s4JEbe1aNEm6MiszPRr/UfcHMz0TQh1BXSxHK+ASV1R6W4HpjBhSeS+54PIsAMCBmwD06LLsqQ==",
       "dev": true,
       "license": "(MIT OR Apache-2.0)",
       "dependencies": {
@@ -9117,8 +7201,6 @@
     },
     "node_modules/just-deep-map-values": {
       "version": "1.2.0",
-      "resolved": "https://registry.npmjs.org/just-deep-map-values/-/just-deep-map-values-1.2.0.tgz",
-      "integrity": "sha512-4vpPBzHHis4UW/EbH5kHZn0gJvKP+EiMpbjD669ZSxdwx+EoAlQLMbLR08SEtydcq/MjDPPtwGiPo9R893iHVA==",
       "dev": true,
       "license": "MIT"
     },
@@ -9134,29 +7216,21 @@
     },
     "node_modules/just-extend": {
       "version": "6.2.0",
-      "resolved": "https://registry.npmjs.org/just-extend/-/just-extend-6.2.0.tgz",
-      "integrity": "sha512-cYofQu2Xpom82S6qD778jBDpwvvy39s1l/hrYij2u9AMdQcGRpaBu6kY4mVhuno5kJVi1DAz4aiphA2WI1/OAw==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/just-omit": {
       "version": "2.2.0",
-      "resolved": "https://registry.npmjs.org/just-omit/-/just-omit-2.2.0.tgz",
-      "integrity": "sha512-Js7+HxDOGcB3RhI38Mird/RgyMf3t0DAJFda1QWqqlAKTa36NeSYIufJXxrZUbysFTRcTOFcoMCiFK5FwCoI7Q==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/just-safe-set": {
       "version": "4.2.1",
-      "resolved": "https://registry.npmjs.org/just-safe-set/-/just-safe-set-4.2.1.tgz",
-      "integrity": "sha512-La5CP41Ycv52+E4g7w1sRV8XXk7Sp8a/TwWQAYQKn6RsQz1FD4Z/rDRRmqV3wJznS1MDF3YxK7BCudX1J8FxLg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/keyv": {
       "version": "4.5.4",
-      "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz",
-      "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -9176,8 +7250,6 @@
     },
     "node_modules/kleur": {
       "version": "4.1.5",
-      "resolved": "https://registry.npmjs.org/kleur/-/kleur-4.1.5.tgz",
-      "integrity": "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -9186,8 +7258,6 @@
     },
     "node_modules/leven": {
       "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/leven/-/leven-2.1.0.tgz",
-      "integrity": "sha512-nvVPLpIHUxCUoRLrFqTgSxXJ614d8AgQoWl7zPe/2VadE8+1dpU3LBhowRuBAcuwruWtOdD8oYC9jDNJjXDPyA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -9196,8 +7266,6 @@
     },
     "node_modules/levn": {
       "version": "0.4.1",
-      "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz",
-      "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -9251,8 +7319,6 @@
     },
     "node_modules/libtap": {
       "version": "1.4.1",
-      "resolved": "https://registry.npmjs.org/libtap/-/libtap-1.4.1.tgz",
-      "integrity": "sha512-S9v19shLTigoMn3c02V7LZ4t09zxmVP3r3RbEAwuHFYeKgF+ESFJxoQ0PMFKW4XdgQhcjVBEwDoopG6WROq/gw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -9279,8 +7345,6 @@
     },
     "node_modules/libtap/node_modules/diff": {
       "version": "4.0.2",
-      "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz",
-      "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==",
       "dev": true,
       "license": "BSD-3-Clause",
       "engines": {
@@ -9289,8 +7353,6 @@
     },
     "node_modules/libtap/node_modules/minipass": {
       "version": "3.3.6",
-      "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz",
-      "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -9302,22 +7364,16 @@
     },
     "node_modules/libtap/node_modules/signal-exit": {
       "version": "3.0.7",
-      "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz",
-      "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/lines-and-columns": {
       "version": "1.2.4",
-      "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz",
-      "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/locate-path": {
       "version": "7.2.0",
-      "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-7.2.0.tgz",
-      "integrity": "sha512-gvVijfZvn7R+2qyPX8mAuKcFGDf6Nc61GdvGafQsHL0sBIxfKzA+usWn4GFC/bk+QdwPUD4kWFJLhElipq+0VA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9332,22 +7388,16 @@
     },
     "node_modules/lodash": {
       "version": "4.17.21",
-      "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
-      "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/lodash.camelcase": {
       "version": "4.3.0",
-      "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz",
-      "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/lodash.flattendeep": {
       "version": "4.4.0",
-      "resolved": "https://registry.npmjs.org/lodash.flattendeep/-/lodash.flattendeep-4.4.0.tgz",
-      "integrity": "sha512-uHaJFihxmJcEX3kT4I23ABqKKalJ/zDrDg0lsFtc1h+3uw49SIJ5beyhx5ExVRti3AvKoOJngIj7xz3oylPdWQ==",
       "dev": true,
       "license": "MIT"
     },
@@ -9360,64 +7410,46 @@
     },
     "node_modules/lodash.isplainobject": {
       "version": "4.0.6",
-      "resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz",
-      "integrity": "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/lodash.kebabcase": {
       "version": "4.1.1",
-      "resolved": "https://registry.npmjs.org/lodash.kebabcase/-/lodash.kebabcase-4.1.1.tgz",
-      "integrity": "sha512-N8XRTIMMqqDgSy4VLKPnJ/+hpGZN+PHQiJnSenYqPaVV/NCqEogTnAdZLQiGKhxX+JCs8waWq2t1XHWKOmlY8g==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/lodash.merge": {
       "version": "4.6.2",
-      "resolved": "https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz",
-      "integrity": "sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/lodash.mergewith": {
       "version": "4.6.2",
-      "resolved": "https://registry.npmjs.org/lodash.mergewith/-/lodash.mergewith-4.6.2.tgz",
-      "integrity": "sha512-GK3g5RPZWTRSeLSpgP8Xhra+pnjBC56q9FZYe1d5RN3TJ35dbkGy3YqBSMbyCrlbi+CM9Z3Jk5yTL7RCsqboyQ==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/lodash.snakecase": {
       "version": "4.1.1",
-      "resolved": "https://registry.npmjs.org/lodash.snakecase/-/lodash.snakecase-4.1.1.tgz",
-      "integrity": "sha512-QZ1d4xoBHYUeuouhEq3lk3Uq7ldgyFXGBhg04+oRLnIz8o9T65Eh+8YdroUwn846zchkA9yDsDl5CVVaV2nqYw==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/lodash.startcase": {
       "version": "4.4.0",
-      "resolved": "https://registry.npmjs.org/lodash.startcase/-/lodash.startcase-4.4.0.tgz",
-      "integrity": "sha512-+WKqsK294HMSc2jEbNgpHpd0JfIBhp7rEV4aqXWqFr6AlXov+SlcgB1Fv01y2kGe3Gc8nMW7VA0SrGuSkRfIEg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/lodash.uniq": {
       "version": "4.5.0",
-      "resolved": "https://registry.npmjs.org/lodash.uniq/-/lodash.uniq-4.5.0.tgz",
-      "integrity": "sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/lodash.upperfirst": {
       "version": "4.3.1",
-      "resolved": "https://registry.npmjs.org/lodash.upperfirst/-/lodash.upperfirst-4.3.1.tgz",
-      "integrity": "sha512-sReKOYJIJf74dhJONhU4e0/shzi1trVbSWDOhKYE5XV2O+H7Sb2Dihwuc7xWxVl+DgFPyTqIN3zMfT9cq5iWDg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/longest-streak": {
       "version": "3.1.0",
-      "resolved": "https://registry.npmjs.org/longest-streak/-/longest-streak-3.1.0.tgz",
-      "integrity": "sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -9435,8 +7467,6 @@
     },
     "node_modules/make-dir": {
       "version": "3.1.0",
-      "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz",
-      "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9451,8 +7481,6 @@
     },
     "node_modules/make-dir/node_modules/semver": {
       "version": "6.3.1",
-      "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz",
-      "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==",
       "dev": true,
       "license": "ISC",
       "bin": {
@@ -9495,8 +7523,6 @@
     },
     "node_modules/markdown-table": {
       "version": "3.0.4",
-      "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-3.0.4.tgz",
-      "integrity": "sha512-wiYz4+JrLyb/DqW2hkFJxP7Vd7JuTDm77fvbM8VfEQdmSMqcImWeeRbHwZjBjIFki/VaMK2BhFi7oUUZeM5bqw==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -9506,8 +7532,6 @@
     },
     "node_modules/math-intrinsics": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz",
-      "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -9516,8 +7540,6 @@
     },
     "node_modules/mdast-util-definitions": {
       "version": "5.1.2",
-      "resolved": "https://registry.npmjs.org/mdast-util-definitions/-/mdast-util-definitions-5.1.2.tgz",
-      "integrity": "sha512-8SVPMuHqlPME/z3gqVwWY4zVXn8lqKv/pAhC57FuJ40ImXyBpmO5ukh98zB2v7Blql2FiHjHv9LVztSIqjY+MA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9532,8 +7554,6 @@
     },
     "node_modules/mdast-util-definitions/node_modules/unist-util-visit": {
       "version": "4.1.2",
-      "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-4.1.2.tgz",
-      "integrity": "sha512-MSd8OUGISqHdVvfY9TPhyK2VdUrPgxkUtWSuMHF6XAAFuL4LokseigBnZtPnJMu+FbynTkFNnFlyjxpVKujMRg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9548,8 +7568,6 @@
     },
     "node_modules/mdast-util-definitions/node_modules/unist-util-visit-parents": {
       "version": "5.1.3",
-      "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-5.1.3.tgz",
-      "integrity": "sha512-x6+y8g7wWMyQhL1iZfhIPhDAs7Xwbn9nRosDXl7qoPTSCy0yNxnKc+hWokFifWQIDGi154rdUqKvbCa4+1kLhg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9563,8 +7581,6 @@
     },
     "node_modules/mdast-util-find-and-replace": {
       "version": "2.2.2",
-      "resolved": "https://registry.npmjs.org/mdast-util-find-and-replace/-/mdast-util-find-and-replace-2.2.2.tgz",
-      "integrity": "sha512-MTtdFRz/eMDHXzeK6W3dO7mXUlF82Gom4y0oOgvHhh/HXZAGvIQDUvQ0SuUx+j2tv44b8xTHOm8K/9OoRFnXKw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9580,8 +7596,6 @@
     },
     "node_modules/mdast-util-find-and-replace/node_modules/escape-string-regexp": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz",
-      "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -9593,8 +7607,6 @@
     },
     "node_modules/mdast-util-find-and-replace/node_modules/unist-util-visit-parents": {
       "version": "5.1.3",
-      "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-5.1.3.tgz",
-      "integrity": "sha512-x6+y8g7wWMyQhL1iZfhIPhDAs7Xwbn9nRosDXl7qoPTSCy0yNxnKc+hWokFifWQIDGi154rdUqKvbCa4+1kLhg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9608,8 +7620,6 @@
     },
     "node_modules/mdast-util-from-markdown": {
       "version": "1.3.1",
-      "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-1.3.1.tgz",
-      "integrity": "sha512-4xTO/M8c82qBcnQc1tgpNtubGUW/Y1tBQ1B0i5CtSoelOLKFYlElIr3bvgREYYO5iRqbMY1YuqZng0GVOI8Qww==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9633,8 +7643,6 @@
     },
     "node_modules/mdast-util-gfm": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/mdast-util-gfm/-/mdast-util-gfm-2.0.2.tgz",
-      "integrity": "sha512-qvZ608nBppZ4icQlhQQIAdc6S3Ffj9RGmzwUKUWuEICFnd1LVkN3EktF7ZHAgfcEdvZB5owU9tQgt99e2TlLjg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9653,8 +7661,6 @@
     },
     "node_modules/mdast-util-gfm-autolink-literal": {
       "version": "1.0.3",
-      "resolved": "https://registry.npmjs.org/mdast-util-gfm-autolink-literal/-/mdast-util-gfm-autolink-literal-1.0.3.tgz",
-      "integrity": "sha512-My8KJ57FYEy2W2LyNom4n3E7hKTuQk/0SES0u16tjA9Z3oFkF4RrC/hPAPgjlSpezsOvI8ObcXcElo92wn5IGA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9670,8 +7676,6 @@
     },
     "node_modules/mdast-util-gfm-footnote": {
       "version": "1.0.2",
-      "resolved": "https://registry.npmjs.org/mdast-util-gfm-footnote/-/mdast-util-gfm-footnote-1.0.2.tgz",
-      "integrity": "sha512-56D19KOGbE00uKVj3sgIykpwKL179QsVFwx/DCW0u/0+URsryacI4MAdNJl0dh+u2PSsD9FtxPFbHCzJ78qJFQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9686,8 +7690,6 @@
     },
     "node_modules/mdast-util-gfm-strikethrough": {
       "version": "1.0.3",
-      "resolved": "https://registry.npmjs.org/mdast-util-gfm-strikethrough/-/mdast-util-gfm-strikethrough-1.0.3.tgz",
-      "integrity": "sha512-DAPhYzTYrRcXdMjUtUjKvW9z/FNAMTdU0ORyMcbmkwYNbKocDpdk+PX1L1dQgOID/+vVs1uBQ7ElrBQfZ0cuiQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9701,8 +7703,6 @@
     },
     "node_modules/mdast-util-gfm-table": {
       "version": "1.0.7",
-      "resolved": "https://registry.npmjs.org/mdast-util-gfm-table/-/mdast-util-gfm-table-1.0.7.tgz",
-      "integrity": "sha512-jjcpmNnQvrmN5Vx7y7lEc2iIOEytYv7rTvu+MeyAsSHTASGCCRA79Igg2uKssgOs1i1po8s3plW0sTu1wkkLGg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9718,8 +7718,6 @@
     },
     "node_modules/mdast-util-gfm-task-list-item": {
       "version": "1.0.2",
-      "resolved": "https://registry.npmjs.org/mdast-util-gfm-task-list-item/-/mdast-util-gfm-task-list-item-1.0.2.tgz",
-      "integrity": "sha512-PFTA1gzfp1B1UaiJVyhJZA1rm0+Tzn690frc/L8vNX1Jop4STZgOE6bxUhnzdVSB+vm2GU1tIsuQcA9bxTQpMQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9733,8 +7731,6 @@
     },
     "node_modules/mdast-util-phrasing": {
       "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-3.0.1.tgz",
-      "integrity": "sha512-WmI1gTXUBJo4/ZmSk79Wcb2HcjPJBzM1nlI/OUWA8yk2X9ik3ffNbBGsU+09BFmXaL1IBb9fiuvq6/KMiNycSg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9748,8 +7744,6 @@
     },
     "node_modules/mdast-util-to-hast": {
       "version": "12.3.0",
-      "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-12.3.0.tgz",
-      "integrity": "sha512-pits93r8PhnIoU4Vy9bjW39M2jJ6/tdHyja9rrot9uujkN7UTU9SDnE6WNJz/IGyQk3XHX6yNNtrBH6cQzm8Hw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9769,8 +7763,6 @@
     },
     "node_modules/mdast-util-to-hast/node_modules/unist-util-visit": {
       "version": "4.1.2",
-      "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-4.1.2.tgz",
-      "integrity": "sha512-MSd8OUGISqHdVvfY9TPhyK2VdUrPgxkUtWSuMHF6XAAFuL4LokseigBnZtPnJMu+FbynTkFNnFlyjxpVKujMRg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9785,8 +7777,6 @@
     },
     "node_modules/mdast-util-to-hast/node_modules/unist-util-visit-parents": {
       "version": "5.1.3",
-      "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-5.1.3.tgz",
-      "integrity": "sha512-x6+y8g7wWMyQhL1iZfhIPhDAs7Xwbn9nRosDXl7qoPTSCy0yNxnKc+hWokFifWQIDGi154rdUqKvbCa4+1kLhg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9800,8 +7790,6 @@
     },
     "node_modules/mdast-util-to-markdown": {
       "version": "1.5.0",
-      "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-1.5.0.tgz",
-      "integrity": "sha512-bbv7TPv/WC49thZPg3jXuqzuvI45IL2EVAr/KxF0BSdHsU0ceFHOmwQn6evxAh1GaoK/6GQ1wp4R4oW2+LFL/A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9821,8 +7809,6 @@
     },
     "node_modules/mdast-util-to-markdown/node_modules/unist-util-visit": {
       "version": "4.1.2",
-      "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-4.1.2.tgz",
-      "integrity": "sha512-MSd8OUGISqHdVvfY9TPhyK2VdUrPgxkUtWSuMHF6XAAFuL4LokseigBnZtPnJMu+FbynTkFNnFlyjxpVKujMRg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9837,8 +7823,6 @@
     },
     "node_modules/mdast-util-to-markdown/node_modules/unist-util-visit-parents": {
       "version": "5.1.3",
-      "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-5.1.3.tgz",
-      "integrity": "sha512-x6+y8g7wWMyQhL1iZfhIPhDAs7Xwbn9nRosDXl7qoPTSCy0yNxnKc+hWokFifWQIDGi154rdUqKvbCa4+1kLhg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9852,8 +7836,6 @@
     },
     "node_modules/mdast-util-to-string": {
       "version": "3.2.0",
-      "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-3.2.0.tgz",
-      "integrity": "sha512-V4Zn/ncyN1QNSqSBxTrMOLpjr+IKdHl2v3KVLoWmDPscP4r9GcCi71gjgvUV1SFSKh92AjAG4peFuBl2/YgCJg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9866,8 +7848,6 @@
     },
     "node_modules/meow": {
       "version": "12.1.1",
-      "resolved": "https://registry.npmjs.org/meow/-/meow-12.1.1.tgz",
-      "integrity": "sha512-BhXM0Au22RwUneMPwSCnyhTOizdWoIEPU9sp0Aqa1PnDMR5Wv2FGXYDjuzJEIX+Eo2Rb8xuYe5jrnm5QowQFkw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -9879,8 +7859,6 @@
     },
     "node_modules/micromark": {
       "version": "3.2.0",
-      "resolved": "https://registry.npmjs.org/micromark/-/micromark-3.2.0.tgz",
-      "integrity": "sha512-uD66tJj54JLYq0De10AhWycZWGQNUvDI55xPgk2sQM5kn1JYlhbCMTtEeT27+vAhW2FBQxLlOmS3pmA7/2z4aA==",
       "dev": true,
       "funding": [
         {
@@ -9915,8 +7893,6 @@
     },
     "node_modules/micromark-core-commonmark": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-1.1.0.tgz",
-      "integrity": "sha512-BgHO1aRbolh2hcrzL2d1La37V0Aoz73ymF8rAcKnohLy93titmv62E0gP8Hrx9PKcKrqCZ1BbLGbP3bEhoXYlw==",
       "dev": true,
       "funding": [
         {
@@ -9950,8 +7926,6 @@
     },
     "node_modules/micromark-extension-gfm": {
       "version": "2.0.3",
-      "resolved": "https://registry.npmjs.org/micromark-extension-gfm/-/micromark-extension-gfm-2.0.3.tgz",
-      "integrity": "sha512-vb9OoHqrhCmbRidQv/2+Bc6pkP0FrtlhurxZofvOEy5o8RtuuvTq+RQ1Vw5ZDNrVraQZu3HixESqbG+0iKk/MQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9971,8 +7945,6 @@
     },
     "node_modules/micromark-extension-gfm-autolink-literal": {
       "version": "1.0.5",
-      "resolved": "https://registry.npmjs.org/micromark-extension-gfm-autolink-literal/-/micromark-extension-gfm-autolink-literal-1.0.5.tgz",
-      "integrity": "sha512-z3wJSLrDf8kRDOh2qBtoTRD53vJ+CWIyo7uyZuxf/JAbNJjiHsOpG1y5wxk8drtv3ETAHutCu6N3thkOOgueWg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9988,8 +7960,6 @@
     },
     "node_modules/micromark-extension-gfm-footnote": {
       "version": "1.1.2",
-      "resolved": "https://registry.npmjs.org/micromark-extension-gfm-footnote/-/micromark-extension-gfm-footnote-1.1.2.tgz",
-      "integrity": "sha512-Yxn7z7SxgyGWRNa4wzf8AhYYWNrwl5q1Z8ii+CSTTIqVkmGZF1CElX2JI8g5yGoM3GAman9/PVCUFUSJ0kB/8Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10009,8 +7979,6 @@
     },
     "node_modules/micromark-extension-gfm-strikethrough": {
       "version": "1.0.7",
-      "resolved": "https://registry.npmjs.org/micromark-extension-gfm-strikethrough/-/micromark-extension-gfm-strikethrough-1.0.7.tgz",
-      "integrity": "sha512-sX0FawVE1o3abGk3vRjOH50L5TTLr3b5XMqnP9YDRb34M0v5OoZhG+OHFz1OffZ9dlwgpTBKaT4XW/AsUVnSDw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10028,8 +7996,6 @@
     },
     "node_modules/micromark-extension-gfm-table": {
       "version": "1.0.7",
-      "resolved": "https://registry.npmjs.org/micromark-extension-gfm-table/-/micromark-extension-gfm-table-1.0.7.tgz",
-      "integrity": "sha512-3ZORTHtcSnMQEKtAOsBQ9/oHp9096pI/UvdPtN7ehKvrmZZ2+bbWhi0ln+I9drmwXMt5boocn6OlwQzNXeVeqw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10046,8 +8012,6 @@
     },
     "node_modules/micromark-extension-gfm-tagfilter": {
       "version": "1.0.2",
-      "resolved": "https://registry.npmjs.org/micromark-extension-gfm-tagfilter/-/micromark-extension-gfm-tagfilter-1.0.2.tgz",
-      "integrity": "sha512-5XWB9GbAUSHTn8VPU8/1DBXMuKYT5uOgEjJb8gN3mW0PNW5OPHpSdojoqf+iq1xo7vWzw/P8bAHY0n6ijpXF7g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10060,8 +8024,6 @@
     },
     "node_modules/micromark-extension-gfm-task-list-item": {
       "version": "1.0.5",
-      "resolved": "https://registry.npmjs.org/micromark-extension-gfm-task-list-item/-/micromark-extension-gfm-task-list-item-1.0.5.tgz",
-      "integrity": "sha512-RMFXl2uQ0pNQy6Lun2YBYT9g9INXtWJULgbt01D/x8/6yJ2qpKyzdZD3pi6UIkzF++Da49xAelVKUeUMqd5eIQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10078,8 +8040,6 @@
     },
     "node_modules/micromark-factory-destination": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-1.1.0.tgz",
-      "integrity": "sha512-XaNDROBgx9SgSChd69pjiGKbV+nfHGDPVYFs5dOoDd7ZnMAE+Cuu91BCpsY8RT2NP9vo/B8pds2VQNCLiu0zhg==",
       "dev": true,
       "funding": [
         {
@@ -10100,8 +8060,6 @@
     },
     "node_modules/micromark-factory-label": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-factory-label/-/micromark-factory-label-1.1.0.tgz",
-      "integrity": "sha512-OLtyez4vZo/1NjxGhcpDSbHQ+m0IIGnT8BoPamh+7jVlzLJBH98zzuCoUeMxvM6WsNeh8wx8cKvqLiPHEACn0w==",
       "dev": true,
       "funding": [
         {
@@ -10123,8 +8081,6 @@
     },
     "node_modules/micromark-factory-space": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-1.1.0.tgz",
-      "integrity": "sha512-cRzEj7c0OL4Mw2v6nwzttyOZe8XY/Z8G0rzmWQZTBi/jjwyw/U4uqKtUORXQrR5bAZZnbTI/feRV/R7hc4jQYQ==",
       "dev": true,
       "funding": [
         {
@@ -10144,8 +8100,6 @@
     },
     "node_modules/micromark-factory-title": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-factory-title/-/micromark-factory-title-1.1.0.tgz",
-      "integrity": "sha512-J7n9R3vMmgjDOCY8NPw55jiyaQnH5kBdV2/UXCtZIpnHH3P6nHUKaH7XXEYuWwx/xUJcawa8plLBEjMPU24HzQ==",
       "dev": true,
       "funding": [
         {
@@ -10167,8 +8121,6 @@
     },
     "node_modules/micromark-factory-whitespace": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-factory-whitespace/-/micromark-factory-whitespace-1.1.0.tgz",
-      "integrity": "sha512-v2WlmiymVSp5oMg+1Q0N1Lxmt6pMhIHD457whWM7/GUlEks1hI9xj5w3zbc4uuMKXGisksZk8DzP2UyGbGqNsQ==",
       "dev": true,
       "funding": [
         {
@@ -10190,8 +8142,6 @@
     },
     "node_modules/micromark-util-character": {
       "version": "1.2.0",
-      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-1.2.0.tgz",
-      "integrity": "sha512-lXraTwcX3yH/vMDaFWCQJP1uIszLVebzUa3ZHdrgxr7KEU/9mL4mVgCpGbyhvNLNlauROiNUq7WN5u7ndbY6xg==",
       "dev": true,
       "funding": [
         {
@@ -10211,8 +8161,6 @@
     },
     "node_modules/micromark-util-chunked": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-util-chunked/-/micromark-util-chunked-1.1.0.tgz",
-      "integrity": "sha512-Ye01HXpkZPNcV6FiyoW2fGZDUw4Yc7vT0E9Sad83+bEDiCJ1uXu0S3mr8WLpsz3HaG3x2q0HM6CTuPdcZcluFQ==",
       "dev": true,
       "funding": [
         {
@@ -10231,8 +8179,6 @@
     },
     "node_modules/micromark-util-classify-character": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-util-classify-character/-/micromark-util-classify-character-1.1.0.tgz",
-      "integrity": "sha512-SL0wLxtKSnklKSUplok1WQFoGhUdWYKggKUiqhX+Swala+BtptGCu5iPRc+xvzJ4PXE/hwM3FNXsfEVgoZsWbw==",
       "dev": true,
       "funding": [
         {
@@ -10253,8 +8199,6 @@
     },
     "node_modules/micromark-util-combine-extensions": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-util-combine-extensions/-/micromark-util-combine-extensions-1.1.0.tgz",
-      "integrity": "sha512-Q20sp4mfNf9yEqDL50WwuWZHUrCO4fEyeDCnMGmG5Pr0Cz15Uo7KBs6jq+dq0EgX4DPwwrh9m0X+zPV1ypFvUA==",
       "dev": true,
       "funding": [
         {
@@ -10274,8 +8218,6 @@
     },
     "node_modules/micromark-util-decode-numeric-character-reference": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-1.1.0.tgz",
-      "integrity": "sha512-m9V0ExGv0jB1OT21mrWcuf4QhP46pH1KkfWy9ZEezqHKAxkj4mPCy3nIH1rkbdMlChLHX531eOrymlwyZIf2iw==",
       "dev": true,
       "funding": [
         {
@@ -10294,8 +8236,6 @@
     },
     "node_modules/micromark-util-decode-string": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-util-decode-string/-/micromark-util-decode-string-1.1.0.tgz",
-      "integrity": "sha512-YphLGCK8gM1tG1bd54azwyrQRjCFcmgj2S2GoJDNnh4vYtnL38JS8M4gpxzOPNyHdNEpheyWXCTnnTDY3N+NVQ==",
       "dev": true,
       "funding": [
         {
@@ -10317,8 +8257,6 @@
     },
     "node_modules/micromark-util-encode": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-1.1.0.tgz",
-      "integrity": "sha512-EuEzTWSTAj9PA5GOAs992GzNh2dGQO52UvAbtSOMvXTxv3Criqb6IOzJUBCmEqrrXSblJIJBbFFv6zPxpreiJw==",
       "dev": true,
       "funding": [
         {
@@ -10334,8 +8272,6 @@
     },
     "node_modules/micromark-util-html-tag-name": {
       "version": "1.2.0",
-      "resolved": "https://registry.npmjs.org/micromark-util-html-tag-name/-/micromark-util-html-tag-name-1.2.0.tgz",
-      "integrity": "sha512-VTQzcuQgFUD7yYztuQFKXT49KghjtETQ+Wv/zUjGSGBioZnkA4P1XXZPT1FHeJA6RwRXSF47yvJ1tsJdoxwO+Q==",
       "dev": true,
       "funding": [
         {
@@ -10351,8 +8287,6 @@
     },
     "node_modules/micromark-util-normalize-identifier": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-1.1.0.tgz",
-      "integrity": "sha512-N+w5vhqrBihhjdpM8+5Xsxy71QWqGn7HYNUvch71iV2PM7+E3uWGox1Qp90loa1ephtCxG2ftRV/Conitc6P2Q==",
       "dev": true,
       "funding": [
         {
@@ -10371,8 +8305,6 @@
     },
     "node_modules/micromark-util-resolve-all": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-util-resolve-all/-/micromark-util-resolve-all-1.1.0.tgz",
-      "integrity": "sha512-b/G6BTMSg+bX+xVCshPTPyAu2tmA0E4X98NSR7eIbeC6ycCqCeE7wjfDIgzEbkzdEVJXRtOG4FbEm/uGbCRouA==",
       "dev": true,
       "funding": [
         {
@@ -10391,8 +8323,6 @@
     },
     "node_modules/micromark-util-sanitize-uri": {
       "version": "1.2.0",
-      "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-1.2.0.tgz",
-      "integrity": "sha512-QO4GXv0XZfWey4pYFndLUKEAktKkG5kZTdUNaTAkzbuJxn2tNBOr+QtxR2XpWaMhbImT2dPzyLrPXLlPhph34A==",
       "dev": true,
       "funding": [
         {
@@ -10413,8 +8343,6 @@
     },
     "node_modules/micromark-util-subtokenize": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-util-subtokenize/-/micromark-util-subtokenize-1.1.0.tgz",
-      "integrity": "sha512-kUQHyzRoxvZO2PuLzMt2P/dwVsTiivCK8icYTeR+3WgbuPqfHgPPy7nFKbeqRivBvn/3N3GBiNC+JRTMSxEC7A==",
       "dev": true,
       "funding": [
         {
@@ -10436,8 +8364,6 @@
     },
     "node_modules/micromark-util-symbol": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-1.1.0.tgz",
-      "integrity": "sha512-uEjpEYY6KMs1g7QfJ2eX1SQEV+ZT4rUD3UcF6l57acZvLNK7PBZL+ty82Z1qhK1/yXIY4bdx04FKMgR0g4IAag==",
       "dev": true,
       "funding": [
         {
@@ -10453,8 +8379,6 @@
     },
     "node_modules/micromark-util-types": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-1.1.0.tgz",
-      "integrity": "sha512-ukRBgie8TIAcacscVHSiddHjO4k/q3pnedmzMQ4iwDcK0FtFCohKOlFbaOL/mPgfnPsL3C1ZyxJa4sbWrBl3jg==",
       "dev": true,
       "funding": [
         {
@@ -10470,8 +8394,6 @@
     },
     "node_modules/mime-db": {
       "version": "1.52.0",
-      "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
-      "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -10480,8 +8402,6 @@
     },
     "node_modules/mime-types": {
       "version": "2.1.35",
-      "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
-      "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10503,8 +8423,6 @@
     },
     "node_modules/minify-registry-metadata": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/minify-registry-metadata/-/minify-registry-metadata-4.0.0.tgz",
-      "integrity": "sha512-dWVW3TmMejEOKNwQ09iPCyVf6+kgtG9E3806YZYY4URy5o1dSb1cAn8aUe5zOgvOyrVKLfIHt9fSsXGyhwVsgA==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -10527,8 +8445,6 @@
     },
     "node_modules/minimist": {
       "version": "1.2.8",
-      "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz",
-      "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -10698,8 +8614,6 @@
     },
     "node_modules/months": {
       "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/months/-/months-2.1.0.tgz",
-      "integrity": "sha512-2M9gdDB/uVt304/hJ3k2UIquJhOV5dRjp9BovHmZSINaRp7pdJuHXxOcuSjmJaKNomFyYyu0y3LBigdWiAUEmQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -10708,15 +8622,11 @@
     },
     "node_modules/moo": {
       "version": "0.5.2",
-      "resolved": "https://registry.npmjs.org/moo/-/moo-0.5.2.tgz",
-      "integrity": "sha512-iSAJLHYKnX41mKcJKjqvnAN9sf0LMDTXDEvFv+ffuRR9a1MIuXLjMNL6EsnDHSkKLTWNqQQ5uo61P4EbU4NU+Q==",
       "dev": true,
       "license": "BSD-3-Clause"
     },
     "node_modules/mri": {
       "version": "1.2.0",
-      "resolved": "https://registry.npmjs.org/mri/-/mri-1.2.0.tgz",
-      "integrity": "sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -10738,16 +8648,12 @@
     },
     "node_modules/natural-compare": {
       "version": "1.4.0",
-      "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz",
-      "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==",
       "dev": true,
       "license": "MIT",
       "peer": true
     },
     "node_modules/nearley": {
       "version": "2.20.1",
-      "resolved": "https://registry.npmjs.org/nearley/-/nearley-2.20.1.tgz",
-      "integrity": "sha512-+Mc8UaAebFzgV+KpI5n7DasuuQCHA89dmwm7JXw3TV43ukfNQ9DnBH3Mdb2g/I4Fdxc26pwimBWvjIw0UAILSQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10784,8 +8690,6 @@
     },
     "node_modules/nock": {
       "version": "13.5.6",
-      "resolved": "https://registry.npmjs.org/nock/-/nock-13.5.6.tgz",
-      "integrity": "sha512-o2zOYiCpzRqSzPj0Zt/dQ/DqZeYoaQ7TUonc/xUPjCGl9WeHpNbxgVvOquXYAaJzI0M9BXV3HTzG0p8IUAbBTQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10797,27 +8701,6 @@
         "node": ">= 10.13"
       }
     },
-    "node_modules/node-fetch": {
-      "version": "2.7.0",
-      "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz",
-      "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "whatwg-url": "^5.0.0"
-      },
-      "engines": {
-        "node": "4.x || >=6.0.0"
-      },
-      "peerDependencies": {
-        "encoding": "^0.1.0"
-      },
-      "peerDependenciesMeta": {
-        "encoding": {
-          "optional": true
-        }
-      }
-    },
     "node_modules/node-gyp": {
       "version": "11.4.2",
       "inBundle": true,
@@ -11011,8 +8894,6 @@
     },
     "node_modules/node-preload": {
       "version": "0.2.1",
-      "resolved": "https://registry.npmjs.org/node-preload/-/node-preload-0.2.1.tgz",
-      "integrity": "sha512-RM5oyBy45cLEoHqCeh+MNuFAxO0vTFBLskvQbOKnEE7YTTSN4tbN8QWDIPQ6L+WvKsB/qLEGpYe2ZZ9d4W9OIQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11024,8 +8905,6 @@
     },
     "node_modules/node-releases": {
       "version": "2.0.21",
-      "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.21.tgz",
-      "integrity": "sha512-5b0pgg78U3hwXkCM8Z9b2FJdPZlr9Psr9V2gQPESdGHqbntyFJKFW4r5TeWGFzafGY3hzs1JC62VEQMbl1JFkw==",
       "dev": true,
       "license": "MIT"
     },
@@ -11058,8 +8937,6 @@
     },
     "node_modules/normalize-path": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz",
-      "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -11196,15 +9073,11 @@
     },
     "node_modules/nwsapi": {
       "version": "2.2.22",
-      "resolved": "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.22.tgz",
-      "integrity": "sha512-ujSMe1OWVn55euT1ihwCI1ZcAaAU3nxUiDwfDQldc51ZXaB9m2AyOn6/jh1BLe2t/G8xd6uKG1UBF2aZJeg2SQ==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/nyc": {
       "version": "15.1.0",
-      "resolved": "https://registry.npmjs.org/nyc/-/nyc-15.1.0.tgz",
-      "integrity": "sha512-jMW04n9SxKdKi1ZMGhvUTHBN0EICCRkHemEoE5jm6mTYcqcdas0ATzgUgejlQUHMvpnOZqGB5Xxsv9KxJW1j8A==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -11245,8 +9118,6 @@
     },
     "node_modules/nyc/node_modules/ansi-styles": {
       "version": "4.3.0",
-      "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
-      "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11261,8 +9132,6 @@
     },
     "node_modules/nyc/node_modules/brace-expansion": {
       "version": "1.1.12",
-      "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
-      "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11272,8 +9141,6 @@
     },
     "node_modules/nyc/node_modules/cliui": {
       "version": "6.0.0",
-      "resolved": "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz",
-      "integrity": "sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -11284,8 +9151,6 @@
     },
     "node_modules/nyc/node_modules/find-up": {
       "version": "4.1.0",
-      "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz",
-      "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11298,8 +9163,6 @@
     },
     "node_modules/nyc/node_modules/foreground-child": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-2.0.0.tgz",
-      "integrity": "sha512-dCIq9FpEcyQyXKCkyzmlPTFNgrCzPudOe+mhvJU5zAtlBnGVy2yKxtfsxK2tQBThwq225jcvBjpw1Gr40uzZCA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -11312,9 +9175,6 @@
     },
     "node_modules/nyc/node_modules/glob": {
       "version": "7.2.3",
-      "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
-      "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
-      "deprecated": "Glob versions prior to v9 are no longer supported",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -11334,8 +9194,6 @@
     },
     "node_modules/nyc/node_modules/locate-path": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz",
-      "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11347,8 +9205,6 @@
     },
     "node_modules/nyc/node_modules/minimatch": {
       "version": "3.1.2",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
-      "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -11360,8 +9216,6 @@
     },
     "node_modules/nyc/node_modules/p-limit": {
       "version": "2.3.0",
-      "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz",
-      "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11376,8 +9230,6 @@
     },
     "node_modules/nyc/node_modules/p-locate": {
       "version": "4.1.0",
-      "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz",
-      "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11389,8 +9241,6 @@
     },
     "node_modules/nyc/node_modules/p-map": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/p-map/-/p-map-3.0.0.tgz",
-      "integrity": "sha512-d3qXVTF/s+W+CdJ5A29wywV2n8CQQYahlgz2bFiA+4eVNJbHJodPZ+/gXwPGh0bOqA+j8S+6+ckmvLGPk1QpxQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11402,8 +9252,6 @@
     },
     "node_modules/nyc/node_modules/path-exists": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
-      "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -11412,9 +9260,6 @@
     },
     "node_modules/nyc/node_modules/rimraf": {
       "version": "3.0.2",
-      "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz",
-      "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==",
-      "deprecated": "Rimraf versions prior to v4 are no longer supported",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -11429,15 +9274,11 @@
     },
     "node_modules/nyc/node_modules/signal-exit": {
       "version": "3.0.7",
-      "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz",
-      "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/nyc/node_modules/wrap-ansi": {
       "version": "6.2.0",
-      "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz",
-      "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11451,15 +9292,11 @@
     },
     "node_modules/nyc/node_modules/y18n": {
       "version": "4.0.3",
-      "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.3.tgz",
-      "integrity": "sha512-JKhqTOwSrqNA1NY5lSztJ1GrBiUodLMmIZuLiDaMRJ+itFd+ABVE8XBjOvIWL+rSqNDC74LCSFmlb/U4UZ4hJQ==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/nyc/node_modules/yargs": {
       "version": "15.4.1",
-      "resolved": "https://registry.npmjs.org/yargs/-/yargs-15.4.1.tgz",
-      "integrity": "sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11481,8 +9318,6 @@
     },
     "node_modules/nyc/node_modules/yargs-parser": {
       "version": "18.1.3",
-      "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-18.1.3.tgz",
-      "integrity": "sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -11495,8 +9330,6 @@
     },
     "node_modules/object-inspect": {
       "version": "1.13.4",
-      "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz",
-      "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -11509,8 +9342,6 @@
     },
     "node_modules/object-keys": {
       "version": "1.1.1",
-      "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz",
-      "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -11520,8 +9351,6 @@
     },
     "node_modules/object.assign": {
       "version": "4.1.7",
-      "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.7.tgz",
-      "integrity": "sha512-nK28WOo+QIjBkDduTINE4JkF/UJJKyf2EJxvJKfblDpyg0Q+pkOHNTL0Qwy6NP6FhE/EnzV73BxxqcJaXY9anw==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -11542,8 +9371,6 @@
     },
     "node_modules/object.fromentries": {
       "version": "2.0.8",
-      "resolved": "https://registry.npmjs.org/object.fromentries/-/object.fromentries-2.0.8.tgz",
-      "integrity": "sha512-k6E21FzySsSK5a21KRADBd/NGneRegFO5pLHfdQLpRDETUNJueLXs3WCzyQ3tFRDYgbq3KHGXfTbi2bs8WQ6rQ==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -11562,8 +9389,6 @@
     },
     "node_modules/object.groupby": {
       "version": "1.0.3",
-      "resolved": "https://registry.npmjs.org/object.groupby/-/object.groupby-1.0.3.tgz",
-      "integrity": "sha512-+Lhy3TQTuzXI5hevh8sBGqbmurHbbIjAi0Z4S63nthVLmLxfbj4T54a4CfZrXIrt9iP4mVAPYMo/v99taj3wjQ==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -11578,8 +9403,6 @@
     },
     "node_modules/object.values": {
       "version": "1.2.1",
-      "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.2.1.tgz",
-      "integrity": "sha512-gXah6aZrcUxjWg2zR2MwouP2eHlCBzdV4pygudehaKXSGW4v2AsRQUK+lwwXhii6KFZcunEnmSUoYp5CXibxtA==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -11598,8 +9421,6 @@
     },
     "node_modules/once": {
       "version": "1.4.0",
-      "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz",
-      "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -11608,8 +9429,6 @@
     },
     "node_modules/opener": {
       "version": "1.5.2",
-      "resolved": "https://registry.npmjs.org/opener/-/opener-1.5.2.tgz",
-      "integrity": "sha512-ur5UIdyw5Y7yEj9wLzhqXiy6GZ3Mwx0yGI+5sMn2r0N0v3cKJvUmFH5yPP+WXh9e0xfyzyJX95D8l088DNFj7A==",
       "dev": true,
       "license": "(WTFPL OR MIT)",
       "bin": {
@@ -11618,8 +9437,6 @@
     },
     "node_modules/optionator": {
       "version": "0.9.4",
-      "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz",
-      "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -11637,8 +9454,6 @@
     },
     "node_modules/own-keys": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/own-keys/-/own-keys-1.0.1.tgz",
-      "integrity": "sha512-qFOyK5PjiWZd+QQIh+1jhdb9LpxTF0qs7Pm8o5QHYZ0M3vKqSqzsZaEB6oWlxZ+q2sJBMI/Ktgd2N5ZwQoRHfg==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -11656,15 +9471,11 @@
     },
     "node_modules/own-or": {
       "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/own-or/-/own-or-1.0.0.tgz",
-      "integrity": "sha512-NfZr5+Tdf6MB8UI9GLvKRs4cXY8/yB0w3xtt84xFdWy8hkGjn+JFc60VhzS/hFRfbyxFcGYMTjnF4Me+RbbqrA==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/own-or-env": {
       "version": "1.0.2",
-      "resolved": "https://registry.npmjs.org/own-or-env/-/own-or-env-1.0.2.tgz",
-      "integrity": "sha512-NQ7v0fliWtK7Lkb+WdFqe6ky9XAzYmlkXthQrBbzlYbmFKoAYbDDcwmOm6q8kOuwSRXW8bdL5ORksploUJmWgw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -11673,8 +9484,6 @@
     },
     "node_modules/p-limit": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-4.0.0.tgz",
-      "integrity": "sha512-5b0R4txpzjPWVw/cXXUResoD4hb6U/x9BH08L7nw+GN1sezDzPdxeRvpc9c433fZhBan/wusjbCsqwqm4EIBIQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11689,8 +9498,6 @@
     },
     "node_modules/p-locate": {
       "version": "6.0.0",
-      "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-6.0.0.tgz",
-      "integrity": "sha512-wPrq66Llhl7/4AGC6I+cqxT07LhXvWL08LNXz1fENOw0Ap4sRZZ/gZpTTJ5jpurzzzfS2W/Ge9BY3LgLjCShcw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11716,8 +9523,6 @@
     },
     "node_modules/p-try": {
       "version": "2.2.0",
-      "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz",
-      "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -11726,8 +9531,6 @@
     },
     "node_modules/package-hash": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/package-hash/-/package-hash-4.0.0.tgz",
-      "integrity": "sha512-whdkPIooSu/bASggZ96BWVvZTRMOFxnyUG5PnTSGKoJE2gd5mbVNmR2Nj20QFzxYYgAXpoqC+AiXzl+UMRh7zQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -11809,8 +9612,6 @@
     },
     "node_modules/parent-module": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz",
-      "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11849,8 +9650,6 @@
     },
     "node_modules/parse-json": {
       "version": "5.2.0",
-      "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz",
-      "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11868,15 +9667,11 @@
     },
     "node_modules/parse-json/node_modules/json-parse-even-better-errors": {
       "version": "2.3.1",
-      "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz",
-      "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/parse5": {
       "version": "7.3.0",
-      "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.3.0.tgz",
-      "integrity": "sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11888,8 +9683,6 @@
     },
     "node_modules/parse5/node_modules/entities": {
       "version": "6.0.1",
-      "resolved": "https://registry.npmjs.org/entities/-/entities-6.0.1.tgz",
-      "integrity": "sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g==",
       "dev": true,
       "license": "BSD-2-Clause",
       "engines": {
@@ -11901,8 +9694,6 @@
     },
     "node_modules/path-exists": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-5.0.0.tgz",
-      "integrity": "sha512-RjhtfwJOxzcFmNOi6ltcbcu4Iu+FL3zEj83dk4kAS+fVpTxXLO1b38RvJgT/0QwvV/L3aY9TAnyv0EOqW4GoMQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -11911,8 +9702,6 @@
     },
     "node_modules/path-is-absolute": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz",
-      "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -11929,8 +9718,6 @@
     },
     "node_modules/path-parse": {
       "version": "1.0.7",
-      "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz",
-      "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==",
       "dev": true,
       "license": "MIT"
     },
@@ -11951,28 +9738,11 @@
     },
     "node_modules/picocolors": {
       "version": "1.1.1",
-      "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz",
-      "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==",
       "dev": true,
       "license": "ISC"
     },
-    "node_modules/picomatch": {
-      "version": "2.3.1",
-      "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
-      "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==",
-      "dev": true,
-      "license": "MIT",
-      "engines": {
-        "node": ">=8.6"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/jonschlinkert"
-      }
-    },
     "node_modules/pkg-dir": {
       "version": "4.2.0",
-      "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz",
-      "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11984,8 +9754,6 @@
     },
     "node_modules/pkg-dir/node_modules/find-up": {
       "version": "4.1.0",
-      "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz",
-      "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11998,8 +9766,6 @@
     },
     "node_modules/pkg-dir/node_modules/locate-path": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz",
-      "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12011,8 +9777,6 @@
     },
     "node_modules/pkg-dir/node_modules/p-limit": {
       "version": "2.3.0",
-      "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz",
-      "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12027,8 +9791,6 @@
     },
     "node_modules/pkg-dir/node_modules/p-locate": {
       "version": "4.1.0",
-      "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz",
-      "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12040,8 +9802,6 @@
     },
     "node_modules/pkg-dir/node_modules/path-exists": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
-      "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -12050,15 +9810,11 @@
     },
     "node_modules/platform": {
       "version": "1.3.6",
-      "resolved": "https://registry.npmjs.org/platform/-/platform-1.3.6.tgz",
-      "integrity": "sha512-fnWVljUchTro6RiCFvCXBbNhJc2NijN7oIQxbwsyL0buWJPG85v81ehlHI9fXrJsMNgTofEoWIQeClKpgxFLrg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/possible-typed-array-names": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.1.0.tgz",
-      "integrity": "sha512-/+5VFTchJDoVj3bhoqi6UeymcD00DAwb1nJwamzPvHEszJ4FpF6SNNbUbOS8yI56qHzdV8eK0qEfOSiodkTdxg==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -12079,8 +9835,6 @@
     },
     "node_modules/prelude-ls": {
       "version": "1.2.1",
-      "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz",
-      "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -12098,8 +9852,6 @@
     },
     "node_modules/process-on-spawn": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/process-on-spawn/-/process-on-spawn-1.1.0.tgz",
-      "integrity": "sha512-JOnOPQ/8TZgjs1JIH/m9ni7FfimjNa/PRx7y/Wb5qdItsnhO0jE4AT7fC0HjC28DUQWDr50dwSYZLdRMlqDq3Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12130,13 +9882,6 @@
         "url": "https://github.com/sponsors/isaacs"
       }
     },
-    "node_modules/promise-inflight": {
-      "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/promise-inflight/-/promise-inflight-1.0.1.tgz",
-      "integrity": "sha512-6zWPyEOFaQBJYcGMHBKTKJ3u6TBsnMFOIZSa6ce1e/ZrrsOlnHRHbabMjLiBYKp+n44X9eUI6VUPaukCXHuG4g==",
-      "dev": true,
-      "license": "ISC"
-    },
     "node_modules/promise-retry": {
       "version": "2.0.1",
       "inBundle": true,
@@ -12162,8 +9907,6 @@
     },
     "node_modules/propagate": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/propagate/-/propagate-2.0.1.tgz",
-      "integrity": "sha512-vGrhOavPSTz4QVNuBNdcNXePNdNMaO1xj9yBeH1ScQPjk/rhg9sSlCXPhMkFuaNNW/syTvYqsnbIJxMBfRbbag==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -12172,8 +9915,6 @@
     },
     "node_modules/property-information": {
       "version": "6.5.0",
-      "resolved": "https://registry.npmjs.org/property-information/-/property-information-6.5.0.tgz",
-      "integrity": "sha512-PgTgs/BlvHxOu8QuEN7wi5A0OmXaBcHpmCSTehcs6Uuu9IkDIEo13Hy7n898RHfrQ49vKCoGeWZSaAK01nwVig==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -12183,8 +9924,6 @@
     },
     "node_modules/proxy": {
       "version": "2.2.0",
-      "resolved": "https://registry.npmjs.org/proxy/-/proxy-2.2.0.tgz",
-      "integrity": "sha512-nYclNIWj9UpXbVJ3W5EXIYiGR88AKZoGt90kyh3zoOBY5QW+7bbtPvMFgKGD4VJmpS3UXQXtlGXSg3lRNLOFLg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12201,8 +9940,6 @@
     },
     "node_modules/psl": {
       "version": "1.15.0",
-      "resolved": "https://registry.npmjs.org/psl/-/psl-1.15.0.tgz",
-      "integrity": "sha512-JZd3gMVBAVQkSs6HdNZo9Sdo0LNcQeMNP3CozBJb3JYC/QUYZTnKxP+f8oWRX4rHP5EurWxqAHTSwUCjlNKa1w==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12214,8 +9951,6 @@
     },
     "node_modules/punycode": {
       "version": "2.3.1",
-      "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz",
-      "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -12231,15 +9966,11 @@
     },
     "node_modules/querystringify": {
       "version": "2.2.0",
-      "resolved": "https://registry.npmjs.org/querystringify/-/querystringify-2.2.0.tgz",
-      "integrity": "sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/queue-microtask": {
       "version": "1.2.3",
-      "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz",
-      "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==",
       "dev": true,
       "funding": [
         {
@@ -12270,15 +10001,11 @@
     },
     "node_modules/railroad-diagrams": {
       "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/railroad-diagrams/-/railroad-diagrams-1.0.0.tgz",
-      "integrity": "sha512-cz93DjNeLY0idrCNOH6PviZGRN9GJhsdm9hpn1YCS879fj4W+x5IFJhhkRZcwVgMmFF7R82UA/7Oh+R8lLZg6A==",
       "dev": true,
       "license": "CC0-1.0"
     },
     "node_modules/randexp": {
       "version": "0.4.6",
-      "resolved": "https://registry.npmjs.org/randexp/-/randexp-0.4.6.tgz",
-      "integrity": "sha512-80WNmd9DA0tmZrw9qQa62GPPWfuXJknrmVmLcxvq4uZBdYqb1wYoKTmnlGUchvVWe0XiLupYkBoXVOxz3C8DYQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12307,40 +10034,6 @@
         "node": "^18.17.0 || >=20.5.0"
       }
     },
-    "node_modules/read-package-json-fast": {
-      "version": "3.0.2",
-      "resolved": "https://registry.npmjs.org/read-package-json-fast/-/read-package-json-fast-3.0.2.tgz",
-      "integrity": "sha512-0J+Msgym3vrLOUB3hzQCuZHII0xkNGCtz/HJH9xZshwv9DbDwkw1KaE3gx/e2J5rpEY5rtOy6cyhKOPrkP7FZw==",
-      "dev": true,
-      "license": "ISC",
-      "dependencies": {
-        "json-parse-even-better-errors": "^3.0.0",
-        "npm-normalize-package-bin": "^3.0.0"
-      },
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/read-package-json-fast/node_modules/json-parse-even-better-errors": {
-      "version": "3.0.2",
-      "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-3.0.2.tgz",
-      "integrity": "sha512-fi0NG4bPjCHunUJffmLd0gxssIgkNmArMvis4iNah6Owg1MCJjWhEcDLmsK6iGkJq3tHwbDkTlce70/tmXN4cQ==",
-      "dev": true,
-      "license": "MIT",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
-    "node_modules/read-package-json-fast/node_modules/npm-normalize-package-bin": {
-      "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-3.0.1.tgz",
-      "integrity": "sha512-dMxCf+zZ+3zeQZXKxmyuCKlIDPGuv8EF940xbkC4kQVDTtqoh6rJFO+JTKSA6/Rwi0getWmtuy4Itup0AMcaDQ==",
-      "dev": true,
-      "license": "ISC",
-      "engines": {
-        "node": "^14.17.0 || ^16.13.0 || >=18.0.0"
-      }
-    },
     "node_modules/read-pkg": {
       "version": "5.2.0",
       "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-5.2.0.tgz",
@@ -12493,8 +10186,6 @@
     },
     "node_modules/readdirp": {
       "version": "3.6.0",
-      "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz",
-      "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12504,6 +10195,19 @@
         "node": ">=8.10.0"
       }
     },
+    "node_modules/readdirp/node_modules/picomatch": {
+      "version": "2.3.1",
+      "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
+      "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==",
+      "dev": true,
+      "license": "MIT",
+      "engines": {
+        "node": ">=8.6"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/jonschlinkert"
+      }
+    },
     "node_modules/redent": {
       "version": "3.0.0",
       "resolved": "https://registry.npmjs.org/redent/-/redent-3.0.0.tgz",
@@ -12520,8 +10224,6 @@
     },
     "node_modules/reflect.getprototypeof": {
       "version": "1.0.10",
-      "resolved": "https://registry.npmjs.org/reflect.getprototypeof/-/reflect.getprototypeof-1.0.10.tgz",
-      "integrity": "sha512-00o4I+DVrefhv+nX0ulyi3biSHCPDe+yLv5o/p6d/UVlirijB8E16FtfwSAi4g3tcqrQ4lRAqQSoFEZJehYEcw==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -12544,8 +10246,6 @@
     },
     "node_modules/regexp.prototype.flags": {
       "version": "1.5.4",
-      "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.4.tgz",
-      "integrity": "sha512-dYqgNSZbDwkaJ2ceRd9ojCGjBq+mOm9LmtXnAnEGyHhN/5R7iDW2TRw3h+o/jCFxus3P2LfWIIiwowAjANm7IA==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -12566,8 +10266,6 @@
     },
     "node_modules/regexpp": {
       "version": "3.2.0",
-      "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-3.2.0.tgz",
-      "integrity": "sha512-pq2bWo9mVD43nbts2wGv17XLiNLya+GklZ8kaDLV2Z08gDCsGpnKn9BFMepvWuHCbyVvY7J5o5+BVvoQbmlJLg==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -12580,8 +10278,6 @@
     },
     "node_modules/rehype-stringify": {
       "version": "9.0.4",
-      "resolved": "https://registry.npmjs.org/rehype-stringify/-/rehype-stringify-9.0.4.tgz",
-      "integrity": "sha512-Uk5xu1YKdqobe5XpSskwPvo1XeHUUucWEQSl8hTrXt5selvca1e8K1EZ37E6YoZ4BT8BCqCdVfQW7OfHfthtVQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12595,23 +10291,23 @@
       }
     },
     "node_modules/release-please": {
-      "version": "16.15.0",
-      "resolved": "https://registry.npmjs.org/release-please/-/release-please-16.15.0.tgz",
-      "integrity": "sha512-C55PsUOMzAbPSrdqF/KKAqhaYVRGlarNNWgW/DyAsg15U4g/TkxXVpEZqAV1o38CoEoKhssnKTGnb5/eT4/DUw==",
+      "version": "17.1.2",
+      "resolved": "https://registry.npmjs.org/release-please/-/release-please-17.1.2.tgz",
+      "integrity": "sha512-5p+w8Ex4fcNUr4pLX+Dog5t8fXNLp4UK5tyr//bQ0Vn3g8mnzCErwpRStAimTZdxWNQrC0TeF2gG9gixerS7Hg==",
       "dev": true,
       "license": "Apache-2.0",
       "dependencies": {
         "@conventional-commits/parser": "^0.4.1",
-        "@google-automations/git-file-utils": "^2.0.0",
+        "@google-automations/git-file-utils": "^3.0.0",
         "@iarna/toml": "^3.0.0",
-        "@octokit/graphql": "^5.0.0",
-        "@octokit/request": "^6.0.0",
-        "@octokit/request-error": "^3.0.0",
-        "@octokit/rest": "^19.0.0",
+        "@octokit/graphql": "^7.1.0",
+        "@octokit/request": "^8.3.1",
+        "@octokit/request-error": "^5.1.0",
+        "@octokit/rest": "^20.1.1",
         "@types/npm-package-arg": "^6.1.0",
         "@xmldom/xmldom": "^0.8.4",
         "chalk": "^4.0.0",
-        "code-suggester": "^4.2.0",
+        "code-suggester": "^5.0.0",
         "conventional-changelog-conventionalcommits": "^6.0.0",
         "conventional-changelog-writer": "^6.0.0",
         "conventional-commits-filter": "^3.0.0",
@@ -12640,6 +10336,173 @@
         "node": ">=18.0.0"
       }
     },
+    "node_modules/release-please/node_modules/@octokit/auth-token": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-4.0.0.tgz",
+      "integrity": "sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA==",
+      "dev": true,
+      "license": "MIT",
+      "engines": {
+        "node": ">= 18"
+      }
+    },
+    "node_modules/release-please/node_modules/@octokit/core": {
+      "version": "5.2.2",
+      "resolved": "https://registry.npmjs.org/@octokit/core/-/core-5.2.2.tgz",
+      "integrity": "sha512-/g2d4sW9nUDJOMz3mabVQvOGhVa4e/BN/Um7yca9Bb2XTzPPnfTWHWQg+IsEYO7M3Vx+EXvaM/I2pJWIMun1bg==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@octokit/auth-token": "^4.0.0",
+        "@octokit/graphql": "^7.1.0",
+        "@octokit/request": "^8.4.1",
+        "@octokit/request-error": "^5.1.1",
+        "@octokit/types": "^13.0.0",
+        "before-after-hook": "^2.2.0",
+        "universal-user-agent": "^6.0.0"
+      },
+      "engines": {
+        "node": ">= 18"
+      }
+    },
+    "node_modules/release-please/node_modules/@octokit/endpoint": {
+      "version": "9.0.6",
+      "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-9.0.6.tgz",
+      "integrity": "sha512-H1fNTMA57HbkFESSt3Y9+FBICv+0jFceJFPWDePYlR/iMGrwM5ph+Dd4XRQs+8X+PUFURLQgX9ChPfhJ/1uNQw==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@octokit/types": "^13.1.0",
+        "universal-user-agent": "^6.0.0"
+      },
+      "engines": {
+        "node": ">= 18"
+      }
+    },
+    "node_modules/release-please/node_modules/@octokit/graphql": {
+      "version": "7.1.1",
+      "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-7.1.1.tgz",
+      "integrity": "sha512-3mkDltSfcDUoa176nlGoA32RGjeWjl3K7F/BwHwRMJUW/IteSa4bnSV8p2ThNkcIcZU2umkZWxwETSSCJf2Q7g==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@octokit/request": "^8.4.1",
+        "@octokit/types": "^13.0.0",
+        "universal-user-agent": "^6.0.0"
+      },
+      "engines": {
+        "node": ">= 18"
+      }
+    },
+    "node_modules/release-please/node_modules/@octokit/openapi-types": {
+      "version": "24.2.0",
+      "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz",
+      "integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==",
+      "dev": true,
+      "license": "MIT"
+    },
+    "node_modules/release-please/node_modules/@octokit/plugin-paginate-rest": {
+      "version": "11.4.4-cjs.2",
+      "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-11.4.4-cjs.2.tgz",
+      "integrity": "sha512-2dK6z8fhs8lla5PaOTgqfCGBxgAv/le+EhPs27KklPhm1bKObpu6lXzwfUEQ16ajXzqNrKMujsFyo9K2eaoISw==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@octokit/types": "^13.7.0"
+      },
+      "engines": {
+        "node": ">= 18"
+      },
+      "peerDependencies": {
+        "@octokit/core": "5"
+      }
+    },
+    "node_modules/release-please/node_modules/@octokit/plugin-request-log": {
+      "version": "4.0.1",
+      "resolved": "https://registry.npmjs.org/@octokit/plugin-request-log/-/plugin-request-log-4.0.1.tgz",
+      "integrity": "sha512-GihNqNpGHorUrO7Qa9JbAl0dbLnqJVrV8OXe2Zm5/Y4wFkZQDfTreBzVmiRfJVfE4mClXdihHnbpyyO9FSX4HA==",
+      "dev": true,
+      "license": "MIT",
+      "engines": {
+        "node": ">= 18"
+      },
+      "peerDependencies": {
+        "@octokit/core": "5"
+      }
+    },
+    "node_modules/release-please/node_modules/@octokit/plugin-rest-endpoint-methods": {
+      "version": "13.3.2-cjs.1",
+      "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-13.3.2-cjs.1.tgz",
+      "integrity": "sha512-VUjIjOOvF2oELQmiFpWA1aOPdawpyaCUqcEBc/UOUnj3Xp6DJGrJ1+bjUIIDzdHjnFNO6q57ODMfdEZnoBkCwQ==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@octokit/types": "^13.8.0"
+      },
+      "engines": {
+        "node": ">= 18"
+      },
+      "peerDependencies": {
+        "@octokit/core": "^5"
+      }
+    },
+    "node_modules/release-please/node_modules/@octokit/request": {
+      "version": "8.4.1",
+      "resolved": "https://registry.npmjs.org/@octokit/request/-/request-8.4.1.tgz",
+      "integrity": "sha512-qnB2+SY3hkCmBxZsR/MPCybNmbJe4KAlfWErXq+rBKkQJlbjdJeS85VI9r8UqeLYLvnAenU8Q1okM/0MBsAGXw==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@octokit/endpoint": "^9.0.6",
+        "@octokit/request-error": "^5.1.1",
+        "@octokit/types": "^13.1.0",
+        "universal-user-agent": "^6.0.0"
+      },
+      "engines": {
+        "node": ">= 18"
+      }
+    },
+    "node_modules/release-please/node_modules/@octokit/request-error": {
+      "version": "5.1.1",
+      "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.1.tgz",
+      "integrity": "sha512-v9iyEQJH6ZntoENr9/yXxjuezh4My67CBSu9r6Ve/05Iu5gNgnisNWOsoJHTP6k0Rr0+HQIpnH+kyammu90q/g==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@octokit/types": "^13.1.0",
+        "deprecation": "^2.0.0",
+        "once": "^1.4.0"
+      },
+      "engines": {
+        "node": ">= 18"
+      }
+    },
+    "node_modules/release-please/node_modules/@octokit/rest": {
+      "version": "20.1.2",
+      "resolved": "https://registry.npmjs.org/@octokit/rest/-/rest-20.1.2.tgz",
+      "integrity": "sha512-GmYiltypkHHtihFwPRxlaorG5R9VAHuk/vbszVoRTGXnAsY60wYLkh/E2XiFmdZmqrisw+9FaazS1i5SbdWYgA==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@octokit/core": "^5.0.2",
+        "@octokit/plugin-paginate-rest": "11.4.4-cjs.2",
+        "@octokit/plugin-request-log": "^4.0.0",
+        "@octokit/plugin-rest-endpoint-methods": "13.3.2-cjs.1"
+      },
+      "engines": {
+        "node": ">= 18"
+      }
+    },
+    "node_modules/release-please/node_modules/@octokit/types": {
+      "version": "13.10.0",
+      "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz",
+      "integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@octokit/openapi-types": "^24.2.0"
+      }
+    },
     "node_modules/release-please/node_modules/ansi-styles": {
       "version": "4.3.0",
       "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
@@ -12656,6 +10519,13 @@
         "url": "https://github.com/chalk/ansi-styles?sponsor=1"
       }
     },
+    "node_modules/release-please/node_modules/before-after-hook": {
+      "version": "2.2.3",
+      "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz",
+      "integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==",
+      "dev": true,
+      "license": "Apache-2.0"
+    },
     "node_modules/release-please/node_modules/chalk": {
       "version": "4.1.2",
       "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
@@ -12726,10 +10596,15 @@
         "node": ">=4.2.0"
       }
     },
+    "node_modules/release-please/node_modules/universal-user-agent": {
+      "version": "6.0.1",
+      "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz",
+      "integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==",
+      "dev": true,
+      "license": "ISC"
+    },
     "node_modules/release-zalgo": {
       "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/release-zalgo/-/release-zalgo-1.0.0.tgz",
-      "integrity": "sha512-gUAyHVHPPC5wdqX/LG4LWtRYtgjxyX78oanFNTMMyFEfOqdC54s3eE82imuWKbOeqYht2CrNf64Qb8vgmmtZGA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -12741,8 +10616,6 @@
     },
     "node_modules/remark": {
       "version": "14.0.3",
-      "resolved": "https://registry.npmjs.org/remark/-/remark-14.0.3.tgz",
-      "integrity": "sha512-bfmJW1dmR2LvaMJuAnE88pZP9DktIFYXazkTfOIKZzi3Knk9lT0roItIA24ydOucI3bV/g/tXBA6hzqq3FV9Ew==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12758,8 +10631,6 @@
     },
     "node_modules/remark-gfm": {
       "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/remark-gfm/-/remark-gfm-3.0.1.tgz",
-      "integrity": "sha512-lEFDoi2PICJyNrACFOfDD3JlLkuSbOa5Wd8EPt06HUdptv8Gn0bxYTdbU/XXQ3swAPkEaGxxPN9cbnMHvVu1Ig==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12775,8 +10646,6 @@
     },
     "node_modules/remark-github": {
       "version": "11.2.4",
-      "resolved": "https://registry.npmjs.org/remark-github/-/remark-github-11.2.4.tgz",
-      "integrity": "sha512-GJjWFpwqdrHHhPWqMbb8+lqFLiHQ9pCzUmXmRrhMFXGpYov5n2ljsZzuWgXlfzArfQYkiKIZczA2I8IHYMHqCA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12793,8 +10662,6 @@
     },
     "node_modules/remark-github/node_modules/unist-util-visit": {
       "version": "4.1.2",
-      "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-4.1.2.tgz",
-      "integrity": "sha512-MSd8OUGISqHdVvfY9TPhyK2VdUrPgxkUtWSuMHF6XAAFuL4LokseigBnZtPnJMu+FbynTkFNnFlyjxpVKujMRg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12809,8 +10676,6 @@
     },
     "node_modules/remark-github/node_modules/unist-util-visit-parents": {
       "version": "5.1.3",
-      "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-5.1.3.tgz",
-      "integrity": "sha512-x6+y8g7wWMyQhL1iZfhIPhDAs7Xwbn9nRosDXl7qoPTSCy0yNxnKc+hWokFifWQIDGi154rdUqKvbCa4+1kLhg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12824,8 +10689,6 @@
     },
     "node_modules/remark-man": {
       "version": "8.0.1",
-      "resolved": "https://registry.npmjs.org/remark-man/-/remark-man-8.0.1.tgz",
-      "integrity": "sha512-F/BbNaEF/QiZXoMiC43/qb8kAgGBKIS3yA+Br4CObgyoD+9Bioq1v+LmrLVbkwy9BErircQQ4J8yR2vFD34fBA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12847,8 +10710,6 @@
     },
     "node_modules/remark-man/node_modules/unist-util-visit": {
       "version": "4.1.2",
-      "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-4.1.2.tgz",
-      "integrity": "sha512-MSd8OUGISqHdVvfY9TPhyK2VdUrPgxkUtWSuMHF6XAAFuL4LokseigBnZtPnJMu+FbynTkFNnFlyjxpVKujMRg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12863,8 +10724,6 @@
     },
     "node_modules/remark-man/node_modules/unist-util-visit-parents": {
       "version": "5.1.3",
-      "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-5.1.3.tgz",
-      "integrity": "sha512-x6+y8g7wWMyQhL1iZfhIPhDAs7Xwbn9nRosDXl7qoPTSCy0yNxnKc+hWokFifWQIDGi154rdUqKvbCa4+1kLhg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12878,8 +10737,6 @@
     },
     "node_modules/remark-parse": {
       "version": "10.0.2",
-      "resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-10.0.2.tgz",
-      "integrity": "sha512-3ydxgHa/ZQzG8LvC7jTXccARYDcRld3VfcgIIFs7bI6vbRSxJJmzgLEIIoYKyrfhaY+ujuWaf/PJiMZXoiCXgw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12894,8 +10751,6 @@
     },
     "node_modules/remark-rehype": {
       "version": "10.1.0",
-      "resolved": "https://registry.npmjs.org/remark-rehype/-/remark-rehype-10.1.0.tgz",
-      "integrity": "sha512-EFmR5zppdBp0WQeDVZ/b66CWJipB2q2VLNFMabzDSGR66Z2fQii83G5gTBbgGEnEEA0QRussvrFHxk1HWGJskw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12911,8 +10766,6 @@
     },
     "node_modules/remark-stringify": {
       "version": "10.0.3",
-      "resolved": "https://registry.npmjs.org/remark-stringify/-/remark-stringify-10.0.3.tgz",
-      "integrity": "sha512-koyOzCMYoUHudypbj4XpnAKFbkddRMYZHwghnxd7ue5210WzGw6kOBwauJTRUMq16jsovXx8dYNvSSWP89kZ3A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12927,8 +10780,6 @@
     },
     "node_modules/require-directory": {
       "version": "2.1.1",
-      "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
-      "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -12937,8 +10788,6 @@
     },
     "node_modules/require-from-string": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/require-from-string/-/require-from-string-2.0.2.tgz",
-      "integrity": "sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -12947,8 +10796,6 @@
     },
     "node_modules/require-inject": {
       "version": "1.4.4",
-      "resolved": "https://registry.npmjs.org/require-inject/-/require-inject-1.4.4.tgz",
-      "integrity": "sha512-5Y5ctRN84+I4iOZO61gm+48tgP/6Hcd3VZydkaEM3MCuOvnHRsTJYQBOc01faI/Z9at5nsCAJVHhlfPA6Pc0Og==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -12957,22 +10804,16 @@
     },
     "node_modules/require-main-filename": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz",
-      "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/requires-port": {
       "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz",
-      "integrity": "sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/resolve": {
       "version": "1.22.10",
-      "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz",
-      "integrity": "sha512-NPRy+/ncIMeDlTAsuqwKIiferiawhefFJtkNSW0qZJEqMEb+qBt/77B/jGeeek+F0uOeN05CDa6HXbbIgtVX4w==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12992,8 +10833,6 @@
     },
     "node_modules/resolve-from": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz",
-      "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -13002,8 +10841,6 @@
     },
     "node_modules/ret": {
       "version": "0.1.15",
-      "resolved": "https://registry.npmjs.org/ret/-/ret-0.1.15.tgz",
-      "integrity": "sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -13020,8 +10857,6 @@
     },
     "node_modules/reusify": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz",
-      "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -13032,8 +10867,6 @@
     },
     "node_modules/rimraf": {
       "version": "6.0.1",
-      "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-6.0.1.tgz",
-      "integrity": "sha512-9dkvaxAsk/xNXSJzMgFqqMCuFgt2+KsOFek3TMLfo8NCPfWpBmqwyNn5Y+NX56QUYfCtsyhF3ayiboEoUmJk/A==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -13052,15 +10885,11 @@
     },
     "node_modules/rrweb-cssom": {
       "version": "0.7.1",
-      "resolved": "https://registry.npmjs.org/rrweb-cssom/-/rrweb-cssom-0.7.1.tgz",
-      "integrity": "sha512-TrEMa7JGdVm0UThDJSx7ddw5nVm3UJS9o9CCIZ72B1vSyEZoziDqBYP3XIoi/12lKrJR8rE3jeFHMok2F/Mnsg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/run-parallel": {
       "version": "1.2.0",
-      "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz",
-      "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==",
       "dev": true,
       "funding": [
         {
@@ -13084,8 +10913,6 @@
     },
     "node_modules/sade": {
       "version": "1.8.1",
-      "resolved": "https://registry.npmjs.org/sade/-/sade-1.8.1.tgz",
-      "integrity": "sha512-xal3CZX1Xlo/k4ApwCFrHVACi9fBqJ7V+mwhBsuf/1IOKbBy098Fex+Wa/5QMubw09pSZ/u8EY8PWgevJsXp1A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13097,8 +10924,6 @@
     },
     "node_modules/safe-array-concat": {
       "version": "1.1.3",
-      "resolved": "https://registry.npmjs.org/safe-array-concat/-/safe-array-concat-1.1.3.tgz",
-      "integrity": "sha512-AURm5f0jYEOydBj7VQlVvDrjeFgthDdEF5H1dP+6mNpoXOMo1quQqJ4wvJDyRZ9+pO3kGWoOdmV08cSv2aJV6Q==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -13118,8 +10943,6 @@
     },
     "node_modules/safe-push-apply": {
       "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/safe-push-apply/-/safe-push-apply-1.0.0.tgz",
-      "integrity": "sha512-iKE9w/Z7xCzUMIZqdBsp6pEQvwuEebH4vdpjcDWnyzaI6yl6O9FHvVpmGelvEHNsoY6wGblkxR6Zty/h00WiSA==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -13136,8 +10959,6 @@
     },
     "node_modules/safe-regex-test": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/safe-regex-test/-/safe-regex-test-1.1.0.tgz",
-      "integrity": "sha512-x/+Cz4YrimQxQccJf5mKEbIa1NzeCRNI5Ecl/ekmlYaampdNLPalVyIcCZNNH3MvmqBugV5TMYZXv0ljslUlaw==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -13161,8 +10982,6 @@
     },
     "node_modules/saxes": {
       "version": "6.0.0",
-      "resolved": "https://registry.npmjs.org/saxes/-/saxes-6.0.0.tgz",
-      "integrity": "sha512-xAg7SOnEhrm5zI3puOOKyy1OMcMlIJZYNJY7xLBwSze0UjhPLnWfj2GF2EpT0jmzaJKIWKHLsaSSajf35bcYnA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -13174,8 +10993,6 @@
     },
     "node_modules/schemes": {
       "version": "1.4.0",
-      "resolved": "https://registry.npmjs.org/schemes/-/schemes-1.4.0.tgz",
-      "integrity": "sha512-ImFy9FbCsQlVgnE3TCWmLPCFnVzx0lHL/l+umHplDqAKd0dzFpnS6lFZIpagBlYhKwzVmlV36ec0Y1XTu8JBAQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13195,15 +11012,11 @@
     },
     "node_modules/set-blocking": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz",
-      "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/set-function-length": {
       "version": "1.2.2",
-      "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz",
-      "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -13221,8 +11034,6 @@
     },
     "node_modules/set-function-name": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/set-function-name/-/set-function-name-2.0.2.tgz",
-      "integrity": "sha512-7PGFlmtwsEADb0WYyvCMa1t+yke6daIG4Wirafur5kcf+MhUnPms1UeR0CKQdTZD81yESwMHbtn+TR+dMviakQ==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -13238,8 +11049,6 @@
     },
     "node_modules/set-proto": {
       "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/set-proto/-/set-proto-1.0.0.tgz",
-      "integrity": "sha512-RJRdvCo6IAnPdsvP/7m6bsQqNnn1FCBX5ZNtFL98MmFF/4xAIJTIg1YbHW5DC2W5SKZanrC6i4HsJqlajw/dZw==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -13273,8 +11082,6 @@
     },
     "node_modules/side-channel": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz",
-      "integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -13294,8 +11101,6 @@
     },
     "node_modules/side-channel-list": {
       "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz",
-      "integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -13312,8 +11117,6 @@
     },
     "node_modules/side-channel-map": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz",
-      "integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -13332,8 +11135,6 @@
     },
     "node_modules/side-channel-weakmap": {
       "version": "1.0.2",
-      "resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz",
-      "integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -13389,8 +11190,6 @@
     },
     "node_modules/smtp-address-parser": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/smtp-address-parser/-/smtp-address-parser-1.1.0.tgz",
-      "integrity": "sha512-Gz11jbNU0plrReU9Sj7fmshSBxxJ9ShdD2q4ktHIHo/rpTH6lFyQoYHYKINPJtPe8aHFnsbtW46Ls0tCCBsIZg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13428,8 +11227,6 @@
     },
     "node_modules/source-map": {
       "version": "0.6.1",
-      "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
-      "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==",
       "dev": true,
       "license": "BSD-3-Clause",
       "engines": {
@@ -13438,8 +11235,6 @@
     },
     "node_modules/source-map-support": {
       "version": "0.5.21",
-      "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.21.tgz",
-      "integrity": "sha512-uBHU3L3czsIyYXKX88fdrGovxdSCoTGDRZ6SYXtSRxLZUzHg5P/66Ht6uoUlHu9EZod+inXhKo3qQgwXUT/y1w==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13449,8 +11244,6 @@
     },
     "node_modules/space-separated-tokens": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-2.0.2.tgz",
-      "integrity": "sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -13460,8 +11253,6 @@
     },
     "node_modules/spawk": {
       "version": "1.8.2",
-      "resolved": "https://registry.npmjs.org/spawk/-/spawk-1.8.2.tgz",
-      "integrity": "sha512-3Dl+ekoMHRvXo+Xc3EUSnjySawnc9SpkaBuA3kU2wYiuSEAIYB4b5cGjvmq5olexBsO/fCLZUKHjSMQlzSU4Ww==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -13470,8 +11261,6 @@
     },
     "node_modules/spawn-wrap": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/spawn-wrap/-/spawn-wrap-2.0.0.tgz",
-      "integrity": "sha512-EeajNjfN9zMnULLwhZZQU3GWBoFNkbngTUPfaawT4RkMiviTxcX0qfhVbGey39mfctfDHkWtuecgQ8NJcyQWHg==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -13488,8 +11277,6 @@
     },
     "node_modules/spawn-wrap/node_modules/brace-expansion": {
       "version": "1.1.12",
-      "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
-      "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13499,8 +11286,6 @@
     },
     "node_modules/spawn-wrap/node_modules/foreground-child": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-2.0.0.tgz",
-      "integrity": "sha512-dCIq9FpEcyQyXKCkyzmlPTFNgrCzPudOe+mhvJU5zAtlBnGVy2yKxtfsxK2tQBThwq225jcvBjpw1Gr40uzZCA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -13513,9 +11298,6 @@
     },
     "node_modules/spawn-wrap/node_modules/glob": {
       "version": "7.2.3",
-      "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
-      "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
-      "deprecated": "Glob versions prior to v9 are no longer supported",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -13535,15 +11317,11 @@
     },
     "node_modules/spawn-wrap/node_modules/isexe": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
-      "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/spawn-wrap/node_modules/minimatch": {
       "version": "3.1.2",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
-      "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -13555,9 +11333,6 @@
     },
     "node_modules/spawn-wrap/node_modules/rimraf": {
       "version": "3.0.2",
-      "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz",
-      "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==",
-      "deprecated": "Rimraf versions prior to v4 are no longer supported",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -13572,15 +11347,11 @@
     },
     "node_modules/spawn-wrap/node_modules/signal-exit": {
       "version": "3.0.7",
-      "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz",
-      "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/spawn-wrap/node_modules/which": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
-      "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -13645,8 +11416,6 @@
     },
     "node_modules/split2": {
       "version": "4.2.0",
-      "resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz",
-      "integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -13655,8 +11424,6 @@
     },
     "node_modules/sprintf-js": {
       "version": "1.0.3",
-      "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz",
-      "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==",
       "dev": true,
       "license": "BSD-3-Clause"
     },
@@ -13673,8 +11440,6 @@
     },
     "node_modules/stack-utils": {
       "version": "2.0.6",
-      "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.6.tgz",
-      "integrity": "sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13686,8 +11451,6 @@
     },
     "node_modules/stack-utils/node_modules/escape-string-regexp": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz",
-      "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -13696,8 +11459,6 @@
     },
     "node_modules/stop-iteration-iterator": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/stop-iteration-iterator/-/stop-iteration-iterator-1.1.0.tgz",
-      "integrity": "sha512-eLoXW/DHyl62zxY4SCaIgnRhuMr6ri4juEYARS8E6sCEqzKpOiE521Ucofdx+KnDZl5xmvGYaaKCk5FEOxJCoQ==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -13711,8 +11472,6 @@
     },
     "node_modules/streamx": {
       "version": "2.22.1",
-      "resolved": "https://registry.npmjs.org/streamx/-/streamx-2.22.1.tgz",
-      "integrity": "sha512-znKXEBxfatz2GBNK02kRnCXjV+AA4kjZIUxeWSr3UGirZMJfTE9uiwKHobnbgxWyL/JWro8tTq+vOqAK1/qbSA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13752,8 +11511,6 @@
     },
     "node_modules/string.prototype.trim": {
       "version": "1.2.10",
-      "resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.2.10.tgz",
-      "integrity": "sha512-Rs66F0P/1kedk5lyYyH9uBzuiI/kNRmwJAR9quK6VOtIpZ2G+hMZd+HQbbv25MgCA6gEffoMZYxlTod4WcdrKA==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -13775,8 +11532,6 @@
     },
     "node_modules/string.prototype.trimend": {
       "version": "1.0.9",
-      "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.9.tgz",
-      "integrity": "sha512-G7Ok5C6E/j4SGfyLCloXTrngQIQU3PWtXGst3yM7Bea9FRURf1S42ZHlZZtsNque2FN2PoUhfZXYLNWwEr4dLQ==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -13795,8 +11550,6 @@
     },
     "node_modules/string.prototype.trimstart": {
       "version": "1.0.8",
-      "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.8.tgz",
-      "integrity": "sha512-UXSH262CSZY1tfu3G3Secr6uGLCFVPMhIqHjlgCUtCCcgihYc/xKs9djMTMUOb2j1mVSeU8EU6NWc/iQKU6Gfg==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -13814,8 +11567,6 @@
     },
     "node_modules/stringify-entities": {
       "version": "4.0.4",
-      "resolved": "https://registry.npmjs.org/stringify-entities/-/stringify-entities-4.0.4.tgz",
-      "integrity": "sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13852,8 +11603,6 @@
     },
     "node_modules/strip-bom": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz",
-      "integrity": "sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -13875,8 +11624,6 @@
     },
     "node_modules/strip-json-comments": {
       "version": "3.1.1",
-      "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz",
-      "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -13900,8 +11647,6 @@
     },
     "node_modules/supports-preserve-symlinks-flag": {
       "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz",
-      "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -13913,15 +11658,11 @@
     },
     "node_modules/symbol-tree": {
       "version": "3.2.4",
-      "resolved": "https://registry.npmjs.org/symbol-tree/-/symbol-tree-3.2.4.tgz",
-      "integrity": "sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/tap": {
       "version": "16.3.10",
-      "resolved": "https://registry.npmjs.org/tap/-/tap-16.3.10.tgz",
-      "integrity": "sha512-q5Am+PpGHS6JSjk/Zn4bCRBihmZVM15v/MYXUy60wenw5HDe7pVrevLCEoMEz7tuw6jaPOJJqni1y8apN23IGw==",
       "bundleDependencies": [
         "ink",
         "treport",
@@ -13991,8 +11732,6 @@
     },
     "node_modules/tap-mocha-reporter": {
       "version": "5.0.4",
-      "resolved": "https://registry.npmjs.org/tap-mocha-reporter/-/tap-mocha-reporter-5.0.4.tgz",
-      "integrity": "sha512-J+YMO8B7lq1O6Zxd/jeuG27vJ+Y4tLiRMKPSb7KR6FVh86k3Rq1TwYc2GKPyIjCbzzdMdReh3Vfz9L5cg1Z2Bw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -14014,8 +11753,6 @@
     },
     "node_modules/tap-mocha-reporter/node_modules/brace-expansion": {
       "version": "1.1.12",
-      "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
-      "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14025,8 +11762,6 @@
     },
     "node_modules/tap-mocha-reporter/node_modules/diff": {
       "version": "4.0.2",
-      "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz",
-      "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==",
       "dev": true,
       "license": "BSD-3-Clause",
       "engines": {
@@ -14035,8 +11770,6 @@
     },
     "node_modules/tap-mocha-reporter/node_modules/escape-string-regexp": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz",
-      "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -14045,9 +11778,6 @@
     },
     "node_modules/tap-mocha-reporter/node_modules/glob": {
       "version": "7.2.3",
-      "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
-      "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
-      "deprecated": "Glob versions prior to v9 are no longer supported",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -14067,8 +11797,6 @@
     },
     "node_modules/tap-mocha-reporter/node_modules/minimatch": {
       "version": "3.1.2",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
-      "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -14080,8 +11808,6 @@
     },
     "node_modules/tap-parser": {
       "version": "11.0.2",
-      "resolved": "https://registry.npmjs.org/tap-parser/-/tap-parser-11.0.2.tgz",
-      "integrity": "sha512-6qGlC956rcORw+fg7Fv1iCRAY8/bU9UabUAhs3mXRH6eRmVZcNPLheSXCYaVaYeSwx5xa/1HXZb1537YSvwDZg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14098,8 +11824,6 @@
     },
     "node_modules/tap-parser/node_modules/minipass": {
       "version": "3.3.6",
-      "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz",
-      "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -14111,8 +11835,6 @@
     },
     "node_modules/tap-yaml": {
       "version": "1.0.2",
-      "resolved": "https://registry.npmjs.org/tap-yaml/-/tap-yaml-1.0.2.tgz",
-      "integrity": "sha512-GegASpuqBnRNdT1U+yuUPZ8rEU64pL35WPBpCISWwff4dErS2/438barz7WFJl4Nzh3Y05tfPidZnH+GaV1wMg==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -14121,8 +11843,6 @@
     },
     "node_modules/tap-yaml/node_modules/yaml": {
       "version": "1.10.2",
-      "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.2.tgz",
-      "integrity": "sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -14894,8 +12614,6 @@
     },
     "node_modules/tap/node_modules/cliui": {
       "version": "7.0.4",
-      "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz",
-      "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -14904,6 +12622,60 @@
         "wrap-ansi": "^7.0.0"
       }
     },
+    "node_modules/tap/node_modules/cliui/node_modules/ansi-styles": {
+      "version": "4.3.0",
+      "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
+      "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "color-convert": "^2.0.1"
+      },
+      "engines": {
+        "node": ">=8"
+      },
+      "funding": {
+        "url": "https://github.com/chalk/ansi-styles?sponsor=1"
+      }
+    },
+    "node_modules/tap/node_modules/cliui/node_modules/color-convert": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
+      "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "color-name": "~1.1.4"
+      },
+      "engines": {
+        "node": ">=7.0.0"
+      }
+    },
+    "node_modules/tap/node_modules/cliui/node_modules/color-name": {
+      "version": "1.1.4",
+      "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
+      "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
+      "dev": true,
+      "license": "MIT"
+    },
+    "node_modules/tap/node_modules/cliui/node_modules/wrap-ansi": {
+      "version": "7.0.0",
+      "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
+      "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "ansi-styles": "^4.0.0",
+        "string-width": "^4.1.0",
+        "strip-ansi": "^6.0.0"
+      },
+      "engines": {
+        "node": ">=10"
+      },
+      "funding": {
+        "url": "https://github.com/chalk/wrap-ansi?sponsor=1"
+      }
+    },
     "node_modules/tap/node_modules/code-excerpt": {
       "version": "3.0.0",
       "dev": true,
@@ -15062,8 +12834,6 @@
     },
     "node_modules/tap/node_modules/foreground-child": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-2.0.0.tgz",
-      "integrity": "sha512-dCIq9FpEcyQyXKCkyzmlPTFNgrCzPudOe+mhvJU5zAtlBnGVy2yKxtfsxK2tQBThwq225jcvBjpw1Gr40uzZCA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -15288,15 +13058,11 @@
     },
     "node_modules/tap/node_modules/isexe": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz",
-      "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/tap/node_modules/jackspeak": {
       "version": "1.4.2",
-      "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-1.4.2.tgz",
-      "integrity": "sha512-GHeGTmnuaHnvS+ZctRB01bfxARuu9wW83ENbuiweu07SFcVlZrJpcshSre/keGT7YGBhLHg/+rXCNSrsEHKU4Q==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -15431,8 +13197,6 @@
     },
     "node_modules/tap/node_modules/mkdirp": {
       "version": "1.0.4",
-      "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz",
-      "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==",
       "dev": true,
       "license": "MIT",
       "bin": {
@@ -15977,8 +13741,6 @@
     },
     "node_modules/tap/node_modules/which": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
-      "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -16122,8 +13884,6 @@
     },
     "node_modules/tar-stream": {
       "version": "3.1.7",
-      "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-3.1.7.tgz",
-      "integrity": "sha512-qJj60CXt7IU1Ffyc3NJMjh6EkuCFej46zUqJ4J7pqYlThyd9bO0XBTmcOIhSzZJVWfsLks0+nle/j538YAW9RQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -16198,8 +13958,6 @@
     },
     "node_modules/tcompare": {
       "version": "5.0.7",
-      "resolved": "https://registry.npmjs.org/tcompare/-/tcompare-5.0.7.tgz",
-      "integrity": "sha512-d9iddt6YYGgyxJw5bjsN7UJUO1kGOtjSlNy/4PoGYAjQS5pAT/hzIoLf1bZCw+uUxRmZJh7Yy1aA7xKVRT9B4w==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -16211,8 +13969,6 @@
     },
     "node_modules/tcompare/node_modules/diff": {
       "version": "4.0.2",
-      "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz",
-      "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==",
       "dev": true,
       "license": "BSD-3-Clause",
       "engines": {
@@ -16221,8 +13977,6 @@
     },
     "node_modules/test-exclude": {
       "version": "6.0.0",
-      "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz",
-      "integrity": "sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -16236,8 +13990,6 @@
     },
     "node_modules/test-exclude/node_modules/brace-expansion": {
       "version": "1.1.12",
-      "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
-      "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -16247,9 +13999,6 @@
     },
     "node_modules/test-exclude/node_modules/glob": {
       "version": "7.2.3",
-      "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
-      "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
-      "deprecated": "Glob versions prior to v9 are no longer supported",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -16269,8 +14018,6 @@
     },
     "node_modules/test-exclude/node_modules/minimatch": {
       "version": "3.1.2",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
-      "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -16282,8 +14029,6 @@
     },
     "node_modules/text-decoder": {
       "version": "1.2.3",
-      "resolved": "https://registry.npmjs.org/text-decoder/-/text-decoder-1.2.3.tgz",
-      "integrity": "sha512-3/o9z3X0X0fTupwsYvR03pJ/DjWuqqrfwBgTQzdWDiQSm9KitAyz/9WqsT2JQW7KV2m+bC2ol/zqpW37NHxLaA==",
       "dev": true,
       "license": "Apache-2.0",
       "dependencies": {
@@ -16292,8 +14037,6 @@
     },
     "node_modules/text-extensions": {
       "version": "2.4.0",
-      "resolved": "https://registry.npmjs.org/text-extensions/-/text-extensions-2.4.0.tgz",
-      "integrity": "sha512-te/NtwBwfiNRLf9Ijqx3T0nlqZiQ2XrrtBvu+cLL8ZRrGkO0NHTug8MYFKyoSrv/sHTaSKfilUkizV6XhxMJ3g==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -16310,8 +14053,6 @@
     },
     "node_modules/through": {
       "version": "2.3.8",
-      "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz",
-      "integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==",
       "dev": true,
       "license": "MIT"
     },
@@ -16322,8 +14063,6 @@
     },
     "node_modules/tinyexec": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/tinyexec/-/tinyexec-1.0.1.tgz",
-      "integrity": "sha512-5uC6DDlmeqiOwCPmK9jMSdOuZTh8bU39Ys6yidB+UTt5hfZUPGAypSgFRiEp+jbi9qH40BLDvy85jIU88wKSqw==",
       "dev": true,
       "license": "MIT"
     },
@@ -16344,8 +14083,6 @@
     },
     "node_modules/tinyglobby/node_modules/fdir": {
       "version": "6.5.0",
-      "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz",
-      "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==",
       "inBundle": true,
       "license": "MIT",
       "engines": {
@@ -16362,8 +14099,6 @@
     },
     "node_modules/tinyglobby/node_modules/picomatch": {
       "version": "4.0.3",
-      "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz",
-      "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==",
       "inBundle": true,
       "license": "MIT",
       "engines": {
@@ -16375,8 +14110,6 @@
     },
     "node_modules/to-regex-range": {
       "version": "5.0.1",
-      "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz",
-      "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -16388,8 +14121,6 @@
     },
     "node_modules/tough-cookie": {
       "version": "4.1.4",
-      "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.1.4.tgz",
-      "integrity": "sha512-Loo5UUvLD9ScZ6jh8beX1T6sO1w2/MpCRpEP7V280GKMVUQ0Jzar2U3UJPsrdbziLEMMhu3Ujnq//rhiFuIeag==",
       "dev": true,
       "license": "BSD-3-Clause",
       "dependencies": {
@@ -16402,13 +14133,6 @@
         "node": ">=6"
       }
     },
-    "node_modules/tr46": {
-      "version": "0.0.3",
-      "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz",
-      "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==",
-      "dev": true,
-      "license": "MIT"
-    },
     "node_modules/treeverse": {
       "version": "3.0.0",
       "inBundle": true,
@@ -16419,8 +14143,6 @@
     },
     "node_modules/trim-lines": {
       "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/trim-lines/-/trim-lines-3.0.1.tgz",
-      "integrity": "sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -16440,8 +14162,6 @@
     },
     "node_modules/trivial-deferred": {
       "version": "1.1.2",
-      "resolved": "https://registry.npmjs.org/trivial-deferred/-/trivial-deferred-1.1.2.tgz",
-      "integrity": "sha512-vDPiDBC3hyP6O4JrJYMImW3nl3c03Tsj9fEXc7Qc/XKa1O7gf5ZtFfIR/E0dun9SnDHdwjna1Z2rSzYgqpxh/g==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -16450,8 +14170,6 @@
     },
     "node_modules/trough": {
       "version": "2.2.0",
-      "resolved": "https://registry.npmjs.org/trough/-/trough-2.2.0.tgz",
-      "integrity": "sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -16461,8 +14179,6 @@
     },
     "node_modules/tsconfig-paths": {
       "version": "3.15.0",
-      "resolved": "https://registry.npmjs.org/tsconfig-paths/-/tsconfig-paths-3.15.0.tgz",
-      "integrity": "sha512-2Ac2RgzDe/cn48GvOe3M+o82pEFewD3UPbyoUHHdKasHwJKjds4fLXWf/Ux5kATBKN20oaFGu+jbElp1pos0mg==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -16475,8 +14191,6 @@
     },
     "node_modules/tsconfig-paths/node_modules/json5": {
       "version": "1.0.2",
-      "resolved": "https://registry.npmjs.org/json5/-/json5-1.0.2.tgz",
-      "integrity": "sha512-g1MWMLBiz8FKi1e4w0UyVL3w+iJceWAFBAaBnnGKOpNa5f8TLktkbre1+s6oICydWAm+HRUGTmI+//xv2hvXYA==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -16489,8 +14203,6 @@
     },
     "node_modules/tsconfig-paths/node_modules/strip-bom": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz",
-      "integrity": "sha512-vavAMRXOgBVNF6nyEEmL3DBK19iRpDcoIwW+swQ+CbGiu7lju6t+JklA1MHweoWtadgt4ISVUsXLyDq34ddcwA==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -16513,8 +14225,6 @@
     },
     "node_modules/tunnel": {
       "version": "0.0.6",
-      "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz",
-      "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -16523,8 +14233,6 @@
     },
     "node_modules/type-check": {
       "version": "0.4.0",
-      "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz",
-      "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -16537,8 +14245,6 @@
     },
     "node_modules/type-fest": {
       "version": "0.20.2",
-      "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz",
-      "integrity": "sha512-Ne+eE4r0/iWnpAxD852z3A+N0Bt5RN//NjJwRd2VFHEmrywxf5vsZlh4R6lixl6B+wz/8d+maTSAkN1FIkI3LQ==",
       "dev": true,
       "license": "(MIT OR CC0-1.0)",
       "peer": true,
@@ -16551,8 +14257,6 @@
     },
     "node_modules/typed-array-buffer": {
       "version": "1.0.3",
-      "resolved": "https://registry.npmjs.org/typed-array-buffer/-/typed-array-buffer-1.0.3.tgz",
-      "integrity": "sha512-nAYYwfY3qnzX30IkA6AQZjVbtK6duGontcQm1WSG1MD94YLqK0515GNApXkoxKOWMusVssAHWLh9SeaoefYFGw==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -16567,8 +14271,6 @@
     },
     "node_modules/typed-array-byte-length": {
       "version": "1.0.3",
-      "resolved": "https://registry.npmjs.org/typed-array-byte-length/-/typed-array-byte-length-1.0.3.tgz",
-      "integrity": "sha512-BaXgOuIxz8n8pIq3e7Atg/7s+DpiYrxn4vdot3w9KbnBhcRQq6o3xemQdIfynqSeXeDrF32x+WvfzmOjPiY9lg==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -16588,8 +14290,6 @@
     },
     "node_modules/typed-array-byte-offset": {
       "version": "1.0.4",
-      "resolved": "https://registry.npmjs.org/typed-array-byte-offset/-/typed-array-byte-offset-1.0.4.tgz",
-      "integrity": "sha512-bTlAFB/FBYMcuX81gbL4OcpH5PmlFHqlCCpAl8AlEzMz5k53oNDvN8p1PNOWLEmI2x4orp3raOFB51tv9X+MFQ==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -16611,8 +14311,6 @@
     },
     "node_modules/typed-array-length": {
       "version": "1.0.7",
-      "resolved": "https://registry.npmjs.org/typed-array-length/-/typed-array-length-1.0.7.tgz",
-      "integrity": "sha512-3KS2b+kL7fsuk/eJZ7EQdnEmQoaho/r6KUef7hxvltNA5DR8NAUM+8wJMbJyZ4G9/7i3v5zPBIMN5aybAh2/Jg==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -16633,8 +14331,6 @@
     },
     "node_modules/typedarray-to-buffer": {
       "version": "3.1.5",
-      "resolved": "https://registry.npmjs.org/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz",
-      "integrity": "sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -16643,8 +14339,6 @@
     },
     "node_modules/typescript": {
       "version": "5.9.2",
-      "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.2.tgz",
-      "integrity": "sha512-CWBzXQrc/qOkhidw1OzBTQuYRbfyxDXJMVJ1XNwUHGROVmuaeiEm3OslpZ1RV96d7SKKjZKrSJu3+t/xlw3R9A==",
       "dev": true,
       "license": "Apache-2.0",
       "peer": true,
@@ -16672,8 +14366,6 @@
     },
     "node_modules/unbox-primitive": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/unbox-primitive/-/unbox-primitive-1.1.0.tgz",
-      "integrity": "sha512-nWJ91DjeOkej/TA8pXQ3myruKpKEYgqvpw9lz4OPHj/NWFNluYrjbz9j01CJ8yKQd2g4jFoOkINCTW2I5LEEyw==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -16690,27 +14382,13 @@
         "url": "https://github.com/sponsors/ljharb"
       }
     },
-    "node_modules/undici": {
-      "version": "6.21.3",
-      "resolved": "https://registry.npmjs.org/undici/-/undici-6.21.3.tgz",
-      "integrity": "sha512-gBLkYIlEnSp8pFbT64yFgGE6UIB9tAkhukC23PmMDCe5Nd+cRqKxSjw5y54MK2AZMgZfJWMaNE4nYUHgi1XEOw==",
-      "dev": true,
-      "license": "MIT",
-      "engines": {
-        "node": ">=18.17"
-      }
-    },
     "node_modules/undici-types": {
       "version": "7.12.0",
-      "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.12.0.tgz",
-      "integrity": "sha512-goOacqME2GYyOZZfb5Lgtu+1IDmAlAEu5xnD3+xTzS10hT0vzpf0SPjkXwAw9Jm+4n/mQGDP3LO8CPbYROeBfQ==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/unicode-length": {
       "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/unicode-length/-/unicode-length-2.1.0.tgz",
-      "integrity": "sha512-4bV582zTV9Q02RXBxSUMiuN/KHo5w4aTojuKTNT96DIKps/SIawFp7cS5Mu25VuY1AioGXrmYyzKZUzh8OqoUw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -16719,8 +14397,6 @@
     },
     "node_modules/unicorn-magic": {
       "version": "0.1.0",
-      "resolved": "https://registry.npmjs.org/unicorn-magic/-/unicorn-magic-0.1.0.tgz",
-      "integrity": "sha512-lRfVq8fE8gz6QMBuDM6a+LO3IAzTi05H6gCVaUpir2E1Rwpo4ZUog45KpNXKC/Mn3Yb9UDuHumeFTo9iV/D9FQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -16732,8 +14408,6 @@
     },
     "node_modules/unified": {
       "version": "10.1.2",
-      "resolved": "https://registry.npmjs.org/unified/-/unified-10.1.2.tgz",
-      "integrity": "sha512-pUSWAi/RAnVy1Pif2kAoeWNBa3JVrx0MId2LASj8G+7AiHWoKZNTomq6LG326T68U7/e263X6fTdcXIy7XnF7Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -16774,8 +14448,6 @@
     },
     "node_modules/unist-util-generated": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/unist-util-generated/-/unist-util-generated-2.0.1.tgz",
-      "integrity": "sha512-qF72kLmPxAw0oN2fwpWIqbXAVyEqUzDHMsbtPvOudIlUzXYFIeQIuxXQCRCFh22B7cixvU0MG7m3MW8FTq/S+A==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -16785,8 +14457,6 @@
     },
     "node_modules/unist-util-is": {
       "version": "5.2.1",
-      "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-5.2.1.tgz",
-      "integrity": "sha512-u9njyyfEh43npf1M+yGKDGVPbY/JWEemg5nH05ncKPfi+kBbKBJoTdsogMu33uhytuLlv9y0O7GH7fEdwLdLQw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -16799,8 +14469,6 @@
     },
     "node_modules/unist-util-position": {
       "version": "4.0.4",
-      "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-4.0.4.tgz",
-      "integrity": "sha512-kUBE91efOWfIVBo8xzh/uZQ7p9ffYRtUbMRZBNFYwf0RK8koUMx6dGUfwylLOKmaT2cs4wSW96QoYUSXAyEtpg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -16813,8 +14481,6 @@
     },
     "node_modules/unist-util-stringify-position": {
       "version": "3.0.3",
-      "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-3.0.3.tgz",
-      "integrity": "sha512-k5GzIBZ/QatR8N5X2y+drfpWG8IDBzdnVj6OInRNWm1oXrzydiaAT2OQiA8DPRRZyAKb9b6I2a6PxYklZD0gKg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -16879,16 +14545,14 @@
       }
     },
     "node_modules/universal-user-agent": {
-      "version": "6.0.1",
-      "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz",
-      "integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==",
+      "version": "7.0.3",
+      "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-7.0.3.tgz",
+      "integrity": "sha512-TmnEAEAsBJVZM/AADELsK76llnwcf9vMKuPz8JflO1frO8Lchitr0fNaN9d+Ap0BjKtqWqd/J17qeDnXh8CL2A==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/universalify": {
       "version": "0.2.0",
-      "resolved": "https://registry.npmjs.org/universalify/-/universalify-0.2.0.tgz",
-      "integrity": "sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -16897,8 +14561,6 @@
     },
     "node_modules/update-browserslist-db": {
       "version": "1.1.3",
-      "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.3.tgz",
-      "integrity": "sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw==",
       "dev": true,
       "funding": [
         {
@@ -16928,8 +14590,6 @@
     },
     "node_modules/uri-js": {
       "version": "4.4.1",
-      "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz",
-      "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==",
       "dev": true,
       "license": "BSD-2-Clause",
       "dependencies": {
@@ -16938,8 +14598,6 @@
     },
     "node_modules/url-parse": {
       "version": "1.5.10",
-      "resolved": "https://registry.npmjs.org/url-parse/-/url-parse-1.5.10.tgz",
-      "integrity": "sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -16953,8 +14611,6 @@
     },
     "node_modules/uuid": {
       "version": "8.3.2",
-      "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
-      "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==",
       "dev": true,
       "license": "MIT",
       "bin": {
@@ -16963,8 +14619,6 @@
     },
     "node_modules/uvu": {
       "version": "0.5.6",
-      "resolved": "https://registry.npmjs.org/uvu/-/uvu-0.5.6.tgz",
-      "integrity": "sha512-+g8ENReyr8YsOc6fv/NVJs2vFdHBnBNdfE49rshrTzDWOlUx4Gq7KOS2GD8eqhy2j+Ejq29+SbKH8yjkAqXqoA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -16982,8 +14636,6 @@
     },
     "node_modules/uvu/node_modules/diff": {
       "version": "5.2.0",
-      "resolved": "https://registry.npmjs.org/diff/-/diff-5.2.0.tgz",
-      "integrity": "sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A==",
       "dev": true,
       "license": "BSD-3-Clause",
       "engines": {
@@ -17018,8 +14670,6 @@
     },
     "node_modules/vfile": {
       "version": "5.3.7",
-      "resolved": "https://registry.npmjs.org/vfile/-/vfile-5.3.7.tgz",
-      "integrity": "sha512-r7qlzkgErKjobAmyNIkkSpizsFPYiUPuJb5pNW1RB4JcYVZhs4lIbVqk8XPk033CV/1z8ss5pkax8SuhGpcG8g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -17035,8 +14685,6 @@
     },
     "node_modules/vfile-location": {
       "version": "4.1.0",
-      "resolved": "https://registry.npmjs.org/vfile-location/-/vfile-location-4.1.0.tgz",
-      "integrity": "sha512-YF23YMyASIIJXpktBa4vIGLJ5Gs88UB/XePgqPmTa7cDA+JeO3yclbpheQYCHjVHBn/yePzrXuygIL+xbvRYHw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -17050,8 +14698,6 @@
     },
     "node_modules/vfile-message": {
       "version": "3.1.4",
-      "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-3.1.4.tgz",
-      "integrity": "sha512-fa0Z6P8HUrQN4BZaX05SIVXic+7kE3b05PWAtPuYP9QLHsLKYR7/AlLW3NtOrpXRLeawpDLMsVkmk5DG0NXgWw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -17065,8 +14711,6 @@
     },
     "node_modules/w3c-xmlserializer": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/w3c-xmlserializer/-/w3c-xmlserializer-5.0.0.tgz",
-      "integrity": "sha512-o8qghlI8NZHU1lLPrpi2+Uq7abh4GGPpYANlalzWxyWteJOCsr/P+oPBA49TOLu5FTZO4d3F9MnWJfiMo4BkmA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -17085,8 +14729,6 @@
     },
     "node_modules/web-namespaces": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/web-namespaces/-/web-namespaces-2.0.1.tgz",
-      "integrity": "sha512-bKr1DkiNa2krS7qxNtdrtHAmzuYGFQLiQ13TsorsdT6ULTkPLKuu5+GsFpDlg6JFjUTwX2DyhMPG2be8uPrqsQ==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -17094,17 +14736,8 @@
         "url": "https://github.com/sponsors/wooorm"
       }
     },
-    "node_modules/webidl-conversions": {
-      "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
-      "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==",
-      "dev": true,
-      "license": "BSD-2-Clause"
-    },
     "node_modules/whatwg-encoding": {
       "version": "3.1.1",
-      "resolved": "https://registry.npmjs.org/whatwg-encoding/-/whatwg-encoding-3.1.1.tgz",
-      "integrity": "sha512-6qN4hJdMwfYBtE3YBTTHhoeuUrDBPZmbQaxWAqSALV/MeEnR5z1xd8UKud2RAkFoPkmB+hli1TZSnyi84xz1vQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -17116,25 +14749,12 @@
     },
     "node_modules/whatwg-mimetype": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-4.0.0.tgz",
-      "integrity": "sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg==",
       "dev": true,
       "license": "MIT",
       "engines": {
         "node": ">=18"
       }
     },
-    "node_modules/whatwg-url": {
-      "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz",
-      "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "tr46": "~0.0.3",
-        "webidl-conversions": "^3.0.0"
-      }
-    },
     "node_modules/which": {
       "version": "5.0.0",
       "inBundle": true,
@@ -17151,8 +14771,6 @@
     },
     "node_modules/which-boxed-primitive": {
       "version": "1.1.1",
-      "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.1.1.tgz",
-      "integrity": "sha512-TbX3mj8n0odCBFVlY8AxkqcHASw3L60jIuF8jFP78az3C2YhmGvqbHBpAjTRH2/xqYunrJ9g1jSyjCjpoWzIAA==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -17172,8 +14790,6 @@
     },
     "node_modules/which-builtin-type": {
       "version": "1.2.1",
-      "resolved": "https://registry.npmjs.org/which-builtin-type/-/which-builtin-type-1.2.1.tgz",
-      "integrity": "sha512-6iBczoX+kDQ7a3+YJBnh3T+KZRxM/iYNPXicqk66/Qfm1b93iu+yOImkg0zHbj5LNOcNv1TEADiZ0xa34B4q6Q==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -17201,8 +14817,6 @@
     },
     "node_modules/which-collection": {
       "version": "1.0.2",
-      "resolved": "https://registry.npmjs.org/which-collection/-/which-collection-1.0.2.tgz",
-      "integrity": "sha512-K4jVyjnBdgvc86Y6BkaLZEN933SwYOuBFkdmBu9ZfkcAbdVbpITnDmjvZ/aQjRXQrv5EPkTnD1s39GiiqbngCw==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -17221,15 +14835,11 @@
     },
     "node_modules/which-module": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.1.tgz",
-      "integrity": "sha512-iBdZ57RDvnOR9AGBhML2vFZf7h8vmBjhoaZqODJBFWHVtKkDmKuHai3cx5PgVMrX5YDNp27AofYbAwctSS+vhQ==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/which-typed-array": {
       "version": "1.1.19",
-      "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.19.tgz",
-      "integrity": "sha512-rEvr90Bck4WZt9HHFC4DJMsjvu7x+r6bImz0/BrbWb7A2djJ8hnZMrWnHo9F8ssv0OMErasDhftrfROTyqSDrw==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -17251,8 +14861,6 @@
     },
     "node_modules/word-wrap": {
       "version": "1.2.5",
-      "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz",
-      "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==",
       "dev": true,
       "license": "MIT",
       "peer": true,
@@ -17362,8 +14970,6 @@
     },
     "node_modules/wrappy": {
       "version": "1.0.2",
-      "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz",
-      "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==",
       "dev": true,
       "license": "ISC"
     },
@@ -17380,8 +14986,6 @@
     },
     "node_modules/ws": {
       "version": "8.18.3",
-      "resolved": "https://registry.npmjs.org/ws/-/ws-8.18.3.tgz",
-      "integrity": "sha512-PEIGCY5tSlUt50cqyMXfCzX+oOPqN0vuGqWzbcJ2xvnkzkq46oOpz7dQaTDBdfICb4N14+GARUDw2XV2N4tvzg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -17402,8 +15006,6 @@
     },
     "node_modules/xml-name-validator": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/xml-name-validator/-/xml-name-validator-5.0.0.tgz",
-      "integrity": "sha512-EvGK8EJ3DhaHfbRlETOWAS5pO9MZITeauHKJyb8wyajUfQUenkIg2MvLDTZ4T/TgIcm3HU0TFBgWWboAZ30UHg==",
       "dev": true,
       "license": "Apache-2.0",
       "engines": {
@@ -17412,8 +15014,6 @@
     },
     "node_modules/xmlchars": {
       "version": "2.2.0",
-      "resolved": "https://registry.npmjs.org/xmlchars/-/xmlchars-2.2.0.tgz",
-      "integrity": "sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw==",
       "dev": true,
       "license": "MIT"
     },
@@ -17429,8 +15029,6 @@
     },
     "node_modules/y18n": {
       "version": "5.0.8",
-      "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz",
-      "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -17444,8 +15042,6 @@
     },
     "node_modules/yaml": {
       "version": "2.8.1",
-      "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.1.tgz",
-      "integrity": "sha512-lcYcMxX2PO9XMGvAJkJ3OsNMw+/7FKes7/hgerGUYWIoWu5j/+YQqcZr5JnPZWzOsEBgMbSbiSTn/dv/69Mkpw==",
       "dev": true,
       "license": "ISC",
       "bin": {
@@ -17457,8 +15053,6 @@
     },
     "node_modules/yargs": {
       "version": "17.7.2",
-      "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz",
-      "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -17476,8 +15070,6 @@
     },
     "node_modules/yargs-parser": {
       "version": "21.1.1",
-      "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz",
-      "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -17486,8 +15078,6 @@
     },
     "node_modules/yocto-queue": {
       "version": "1.2.1",
-      "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-1.2.1.tgz",
-      "integrity": "sha512-AyeEbWOu/TAXdxlV9wmGcR0+yh2j3vYPGOECcIj2S7MkrLyC7ne+oye2BKTItt0ii2PHk4cDy+95+LshzbXnGg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -17499,8 +15089,6 @@
     },
     "node_modules/zwitch": {
       "version": "2.0.4",
-      "resolved": "https://registry.npmjs.org/zwitch/-/zwitch-2.0.4.tgz",
-      "integrity": "sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -17516,7 +15104,7 @@
         "@npmcli/eslint-config": "^5.0.1",
         "@npmcli/mock-registry": "^1.0.0",
         "@npmcli/promise-spawn": "^8.0.1",
-        "@npmcli/template-oss": "4.24.4",
+        "@npmcli/template-oss": "4.25.1",
         "proxy": "^2.1.1",
         "rimraf": "^6.0.1",
         "tap": "^16.3.8",
@@ -17571,7 +15159,7 @@
       "devDependencies": {
         "@npmcli/eslint-config": "^5.0.1",
         "@npmcli/mock-registry": "^1.0.0",
-        "@npmcli/template-oss": "4.24.4",
+        "@npmcli/template-oss": "4.25.1",
         "benchmark": "^2.1.4",
         "minify-registry-metadata": "^4.0.0",
         "nock": "^13.3.3",
@@ -17600,7 +15188,7 @@
       "devDependencies": {
         "@npmcli/eslint-config": "^5.0.1",
         "@npmcli/mock-globals": "^1.0.0",
-        "@npmcli/template-oss": "4.24.4",
+        "@npmcli/template-oss": "4.25.1",
         "tap": "^16.3.8"
       },
       "engines": {
@@ -17617,7 +15205,7 @@
       "devDependencies": {
         "@npmcli/eslint-config": "^5.0.1",
         "@npmcli/mock-registry": "^1.0.0",
-        "@npmcli/template-oss": "4.24.4",
+        "@npmcli/template-oss": "4.25.1",
         "tap": "^16.3.8"
       },
       "engines": {
@@ -17639,7 +15227,7 @@
       },
       "devDependencies": {
         "@npmcli/eslint-config": "^5.0.1",
-        "@npmcli/template-oss": "4.24.4",
+        "@npmcli/template-oss": "4.25.1",
         "tap": "^16.3.8"
       },
       "engines": {
@@ -17676,7 +15264,7 @@
       "devDependencies": {
         "@npmcli/eslint-config": "^5.0.1",
         "@npmcli/mock-registry": "^1.0.0",
-        "@npmcli/template-oss": "4.24.4",
+        "@npmcli/template-oss": "4.25.1",
         "bin-links": "^5.0.0",
         "chalk": "^5.2.0",
         "just-extend": "^6.2.0",
@@ -17695,7 +15283,7 @@
       },
       "devDependencies": {
         "@npmcli/eslint-config": "^5.0.1",
-        "@npmcli/template-oss": "4.24.4",
+        "@npmcli/template-oss": "4.25.1",
         "tap": "^16.3.8"
       },
       "engines": {
@@ -17711,7 +15299,7 @@
       },
       "devDependencies": {
         "@npmcli/eslint-config": "^5.0.1",
-        "@npmcli/template-oss": "4.24.4",
+        "@npmcli/template-oss": "4.25.1",
         "minipass": "^7.1.1",
         "nock": "^13.3.3",
         "tap": "^16.3.8"
@@ -17731,7 +15319,7 @@
       },
       "devDependencies": {
         "@npmcli/eslint-config": "^5.0.1",
-        "@npmcli/template-oss": "4.24.4",
+        "@npmcli/template-oss": "4.25.1",
         "nock": "^13.3.3",
         "spawk": "^1.7.1",
         "tap": "^16.3.8"
@@ -17757,7 +15345,7 @@
         "@npmcli/eslint-config": "^5.0.1",
         "@npmcli/mock-globals": "^1.0.0",
         "@npmcli/mock-registry": "^1.0.0",
-        "@npmcli/template-oss": "4.24.4",
+        "@npmcli/template-oss": "4.25.1",
         "tap": "^16.3.8"
       },
       "engines": {
@@ -17772,7 +15360,7 @@
       },
       "devDependencies": {
         "@npmcli/eslint-config": "^5.0.1",
-        "@npmcli/template-oss": "4.24.4",
+        "@npmcli/template-oss": "4.25.1",
         "nock": "^13.3.3",
         "tap": "^16.3.8"
       },
@@ -17789,7 +15377,7 @@
       },
       "devDependencies": {
         "@npmcli/eslint-config": "^5.0.1",
-        "@npmcli/template-oss": "4.24.4",
+        "@npmcli/template-oss": "4.25.1",
         "nock": "^13.3.3",
         "tap": "^16.3.8"
       },
@@ -17809,7 +15397,7 @@
       },
       "devDependencies": {
         "@npmcli/eslint-config": "^5.0.1",
-        "@npmcli/template-oss": "4.24.4",
+        "@npmcli/template-oss": "4.25.1",
         "require-inject": "^1.4.4",
         "tap": "^16.3.8"
       },
diff --git a/package.json b/package.json
index f94232a060051..b1130b5891c7c 100644
--- a/package.json
+++ b/package.json
@@ -192,7 +192,7 @@
     "@npmcli/git": "^7.0.0",
     "@npmcli/mock-globals": "^1.0.0",
     "@npmcli/mock-registry": "^1.0.0",
-    "@npmcli/template-oss": "4.24.4",
+    "@npmcli/template-oss": "4.25.1",
     "@tufjs/repo-mock": "^4.0.0",
     "ajv": "^8.12.0",
     "ajv-formats": "^2.1.1",
@@ -250,7 +250,7 @@
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.24.4",
+    "version": "4.25.1",
     "content": "./scripts/template-oss/root.js"
   },
   "license": "Artistic-2.0",
diff --git a/smoke-tests/package.json b/smoke-tests/package.json
index 3bbfff3742068..11d61b66a53d1 100644
--- a/smoke-tests/package.json
+++ b/smoke-tests/package.json
@@ -22,7 +22,7 @@
     "@npmcli/eslint-config": "^5.0.1",
     "@npmcli/mock-registry": "^1.0.0",
     "@npmcli/promise-spawn": "^8.0.1",
-    "@npmcli/template-oss": "4.24.4",
+    "@npmcli/template-oss": "4.25.1",
     "proxy": "^2.1.1",
     "rimraf": "^6.0.1",
     "tap": "^16.3.8",
@@ -32,7 +32,7 @@
   "license": "ISC",
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.24.4",
+    "version": "4.25.1",
     "content": "../scripts/template-oss/index.js"
   },
   "tap": {
diff --git a/workspaces/arborist/package.json b/workspaces/arborist/package.json
index 3788403162f0c..007ac6883064f 100644
--- a/workspaces/arborist/package.json
+++ b/workspaces/arborist/package.json
@@ -40,7 +40,7 @@
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.1",
     "@npmcli/mock-registry": "^1.0.0",
-    "@npmcli/template-oss": "4.24.4",
+    "@npmcli/template-oss": "4.25.1",
     "benchmark": "^2.1.4",
     "minify-registry-metadata": "^4.0.0",
     "nock": "^13.3.3",
@@ -92,7 +92,7 @@
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.24.4",
+    "version": "4.25.1",
     "content": "../../scripts/template-oss/index.js"
   }
 }
diff --git a/workspaces/config/package.json b/workspaces/config/package.json
index 6db1b77174a9b..7b25431171c3b 100644
--- a/workspaces/config/package.json
+++ b/workspaces/config/package.json
@@ -33,7 +33,7 @@
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.1",
     "@npmcli/mock-globals": "^1.0.0",
-    "@npmcli/template-oss": "4.24.4",
+    "@npmcli/template-oss": "4.25.1",
     "tap": "^16.3.8"
   },
   "dependencies": {
@@ -51,7 +51,7 @@
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.24.4",
+    "version": "4.25.1",
     "content": "../../scripts/template-oss/index.js"
   }
 }
diff --git a/workspaces/libnpmaccess/package.json b/workspaces/libnpmaccess/package.json
index c4f81159c6e0d..98a991312ea21 100644
--- a/workspaces/libnpmaccess/package.json
+++ b/workspaces/libnpmaccess/package.json
@@ -18,7 +18,7 @@
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.1",
     "@npmcli/mock-registry": "^1.0.0",
-    "@npmcli/template-oss": "4.24.4",
+    "@npmcli/template-oss": "4.25.1",
     "tap": "^16.3.8"
   },
   "repository": {
@@ -41,7 +41,7 @@
   ],
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.24.4",
+    "version": "4.25.1",
     "content": "../../scripts/template-oss/index.js"
   },
   "tap": {
diff --git a/workspaces/libnpmdiff/package.json b/workspaces/libnpmdiff/package.json
index f04552f4f3c9e..4c4901b9c9764 100644
--- a/workspaces/libnpmdiff/package.json
+++ b/workspaces/libnpmdiff/package.json
@@ -43,7 +43,7 @@
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.1",
-    "@npmcli/template-oss": "4.24.4",
+    "@npmcli/template-oss": "4.25.1",
     "tap": "^16.3.8"
   },
   "dependencies": {
@@ -58,7 +58,7 @@
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.24.4",
+    "version": "4.25.1",
     "content": "../../scripts/template-oss/index.js"
   },
   "tap": {
diff --git a/workspaces/libnpmexec/package.json b/workspaces/libnpmexec/package.json
index 2acf608ef3858..3485c945c3036 100644
--- a/workspaces/libnpmexec/package.json
+++ b/workspaces/libnpmexec/package.json
@@ -52,7 +52,7 @@
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.1",
     "@npmcli/mock-registry": "^1.0.0",
-    "@npmcli/template-oss": "4.24.4",
+    "@npmcli/template-oss": "4.25.1",
     "bin-links": "^5.0.0",
     "chalk": "^5.2.0",
     "just-extend": "^6.2.0",
@@ -75,7 +75,7 @@
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.24.4",
+    "version": "4.25.1",
     "content": "../../scripts/template-oss/index.js"
   }
 }
diff --git a/workspaces/libnpmfund/package.json b/workspaces/libnpmfund/package.json
index 10c769275c499..aa94d7dbf7bf7 100644
--- a/workspaces/libnpmfund/package.json
+++ b/workspaces/libnpmfund/package.json
@@ -42,7 +42,7 @@
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.1",
-    "@npmcli/template-oss": "4.24.4",
+    "@npmcli/template-oss": "4.25.1",
     "tap": "^16.3.8"
   },
   "dependencies": {
@@ -53,7 +53,7 @@
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.24.4",
+    "version": "4.25.1",
     "content": "../../scripts/template-oss/index.js"
   },
   "tap": {
diff --git a/workspaces/libnpmorg/package.json b/workspaces/libnpmorg/package.json
index 368cc7fef987d..9c3d1d9effa19 100644
--- a/workspaces/libnpmorg/package.json
+++ b/workspaces/libnpmorg/package.json
@@ -29,7 +29,7 @@
   ],
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.1",
-    "@npmcli/template-oss": "4.24.4",
+    "@npmcli/template-oss": "4.25.1",
     "minipass": "^7.1.1",
     "nock": "^13.3.3",
     "tap": "^16.3.8"
@@ -50,7 +50,7 @@
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.24.4",
+    "version": "4.25.1",
     "content": "../../scripts/template-oss/index.js"
   },
   "tap": {
diff --git a/workspaces/libnpmpack/package.json b/workspaces/libnpmpack/package.json
index 29c3fe93375a5..3656850ba356e 100644
--- a/workspaces/libnpmpack/package.json
+++ b/workspaces/libnpmpack/package.json
@@ -24,7 +24,7 @@
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.1",
-    "@npmcli/template-oss": "4.24.4",
+    "@npmcli/template-oss": "4.25.1",
     "nock": "^13.3.3",
     "spawk": "^1.7.1",
     "tap": "^16.3.8"
@@ -47,7 +47,7 @@
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.24.4",
+    "version": "4.25.1",
     "content": "../../scripts/template-oss/index.js"
   },
   "tap": {
diff --git a/workspaces/libnpmpublish/package.json b/workspaces/libnpmpublish/package.json
index d789a3cbabe01..b10c175a26ed6 100644
--- a/workspaces/libnpmpublish/package.json
+++ b/workspaces/libnpmpublish/package.json
@@ -27,7 +27,7 @@
     "@npmcli/eslint-config": "^5.0.1",
     "@npmcli/mock-globals": "^1.0.0",
     "@npmcli/mock-registry": "^1.0.0",
-    "@npmcli/template-oss": "4.24.4",
+    "@npmcli/template-oss": "4.25.1",
     "tap": "^16.3.8"
   },
   "repository": {
@@ -52,7 +52,7 @@
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.24.4",
+    "version": "4.25.1",
     "content": "../../scripts/template-oss/index.js"
   },
   "tap": {
diff --git a/workspaces/libnpmsearch/package.json b/workspaces/libnpmsearch/package.json
index 21fc85e188c12..60075a1624fd2 100644
--- a/workspaces/libnpmsearch/package.json
+++ b/workspaces/libnpmsearch/package.json
@@ -27,7 +27,7 @@
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.1",
-    "@npmcli/template-oss": "4.24.4",
+    "@npmcli/template-oss": "4.25.1",
     "nock": "^13.3.3",
     "tap": "^16.3.8"
   },
@@ -46,7 +46,7 @@
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.24.4",
+    "version": "4.25.1",
     "content": "../../scripts/template-oss/index.js"
   },
   "tap": {
diff --git a/workspaces/libnpmteam/package.json b/workspaces/libnpmteam/package.json
index 270680bd6e3fe..d89726c5b7cbd 100644
--- a/workspaces/libnpmteam/package.json
+++ b/workspaces/libnpmteam/package.json
@@ -17,7 +17,7 @@
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.1",
-    "@npmcli/template-oss": "4.24.4",
+    "@npmcli/template-oss": "4.25.1",
     "nock": "^13.3.3",
     "tap": "^16.3.8"
   },
@@ -40,7 +40,7 @@
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.24.4",
+    "version": "4.25.1",
     "content": "../../scripts/template-oss/index.js"
   },
   "tap": {
diff --git a/workspaces/libnpmversion/package.json b/workspaces/libnpmversion/package.json
index ff41399b65140..c62ebe4a3337e 100644
--- a/workspaces/libnpmversion/package.json
+++ b/workspaces/libnpmversion/package.json
@@ -33,7 +33,7 @@
   },
   "devDependencies": {
     "@npmcli/eslint-config": "^5.0.1",
-    "@npmcli/template-oss": "4.24.4",
+    "@npmcli/template-oss": "4.25.1",
     "require-inject": "^1.4.4",
     "tap": "^16.3.8"
   },
@@ -49,7 +49,7 @@
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.24.4",
+    "version": "4.25.1",
     "content": "../../scripts/template-oss/index.js"
   }
 }

From ef87ec6612fe5924d3466967aa7e104f3f98bf15 Mon Sep 17 00:00:00 2001
From: Gar 
Date: Thu, 18 Sep 2025 13:14:42 -0700
Subject: [PATCH 46/63] deps: diff@8.0.2

---
 node_modules/diff/CONTRIBUTING.md             |   22 +-
 node_modules/diff/dist/diff.js                | 3628 ++++++++---------
 node_modules/diff/dist/diff.min.js            |   38 +-
 node_modules/diff/eslint.config.mjs           |  182 +
 node_modules/diff/lib/convert/dmp.js          |   27 -
 node_modules/diff/lib/convert/xml.js          |   35 -
 node_modules/diff/lib/diff/array.js           |   39 -
 node_modules/diff/lib/diff/base.js            |  304 --
 node_modules/diff/lib/diff/character.js       |   33 -
 node_modules/diff/lib/diff/css.js             |   36 -
 node_modules/diff/lib/diff/json.js            |  143 -
 node_modules/diff/lib/diff/line.js            |  121 -
 node_modules/diff/lib/diff/sentence.js        |   36 -
 node_modules/diff/lib/diff/word.js            |  543 ---
 node_modules/diff/lib/index.es6.js            | 2041 ----------
 node_modules/diff/lib/index.js                |  217 -
 node_modules/diff/lib/index.mjs               | 2041 ----------
 node_modules/diff/lib/patch/apply.js          |  393 --
 node_modules/diff/lib/patch/create.js         |  369 --
 node_modules/diff/lib/patch/line-endings.js   |  176 -
 node_modules/diff/lib/patch/merge.js          |  535 ---
 node_modules/diff/lib/patch/parse.js          |  151 -
 node_modules/diff/lib/patch/reverse.js        |   58 -
 node_modules/diff/lib/util/array.js           |   27 -
 .../diff/lib/util/distance-iterator.js        |   54 -
 node_modules/diff/lib/util/params.js          |   22 -
 node_modules/diff/lib/util/string.js          |  131 -
 node_modules/diff/libcjs/convert/dmp.js       |   24 +
 node_modules/diff/libcjs/convert/xml.js       |   34 +
 node_modules/diff/libcjs/diff/array.js        |   40 +
 node_modules/diff/libcjs/diff/base.js         |  265 ++
 node_modules/diff/libcjs/diff/character.js    |   31 +
 node_modules/diff/libcjs/diff/css.js          |   34 +
 node_modules/diff/libcjs/diff/json.js         |  105 +
 node_modules/diff/libcjs/diff/line.js         |   89 +
 node_modules/diff/libcjs/diff/sentence.js     |   67 +
 node_modules/diff/libcjs/diff/word.js         |  307 ++
 node_modules/diff/libcjs/index.js             |   61 +
 node_modules/diff/libcjs/package.json         |    1 +
 node_modules/diff/libcjs/patch/apply.js       |  267 ++
 node_modules/diff/libcjs/patch/create.js      |  223 +
 .../diff/libcjs/patch/line-endings.js         |   61 +
 node_modules/diff/libcjs/patch/parse.js       |  133 +
 node_modules/diff/libcjs/patch/reverse.js     |   37 +
 node_modules/diff/libcjs/types.js             |    2 +
 node_modules/diff/libcjs/util/array.js        |   21 +
 .../diff/libcjs/util/distance-iterator.js     |   40 +
 node_modules/diff/libcjs/util/params.js       |   17 +
 node_modules/diff/libcjs/util/string.js       |  141 +
 node_modules/diff/libesm/convert/dmp.js       |   21 +
 node_modules/diff/libesm/convert/xml.js       |   31 +
 node_modules/diff/libesm/diff/array.js        |   16 +
 node_modules/diff/libesm/diff/base.js         |  253 ++
 node_modules/diff/libesm/diff/character.js    |    7 +
 node_modules/diff/libesm/diff/css.js          |   10 +
 node_modules/diff/libesm/diff/json.js         |   78 +
 node_modules/diff/libesm/diff/line.js         |   65 +
 node_modules/diff/libesm/diff/sentence.js     |   43 +
 node_modules/diff/libesm/diff/word.js         |  276 ++
 node_modules/diff/libesm/index.js             |   30 +
 node_modules/diff/libesm/package.json         |    1 +
 node_modules/diff/libesm/patch/apply.js       |  257 ++
 node_modules/diff/libesm/patch/create.js      |  201 +
 .../diff/libesm/patch/line-endings.js         |   44 +
 node_modules/diff/libesm/patch/parse.js       |  130 +
 node_modules/diff/libesm/patch/reverse.js     |   23 +
 node_modules/diff/libesm/types.js             |    1 +
 node_modules/diff/libesm/util/array.js        |   17 +
 .../diff/libesm/util/distance-iterator.js     |   37 +
 node_modules/diff/libesm/util/params.js       |   14 +
 node_modules/diff/libesm/util/string.js       |  128 +
 node_modules/diff/package.json                |  127 +-
 node_modules/diff/release-notes.md            |   44 +-
 node_modules/diff/runtime.js                  |    3 -
 package-lock.json                             |  361 +-
 package.json                                  |    2 +-
 workspaces/libnpmdiff/package.json            |    2 +-
 77 files changed, 5609 insertions(+), 10015 deletions(-)
 create mode 100644 node_modules/diff/eslint.config.mjs
 delete mode 100644 node_modules/diff/lib/convert/dmp.js
 delete mode 100644 node_modules/diff/lib/convert/xml.js
 delete mode 100644 node_modules/diff/lib/diff/array.js
 delete mode 100644 node_modules/diff/lib/diff/base.js
 delete mode 100644 node_modules/diff/lib/diff/character.js
 delete mode 100644 node_modules/diff/lib/diff/css.js
 delete mode 100644 node_modules/diff/lib/diff/json.js
 delete mode 100644 node_modules/diff/lib/diff/line.js
 delete mode 100644 node_modules/diff/lib/diff/sentence.js
 delete mode 100644 node_modules/diff/lib/diff/word.js
 delete mode 100644 node_modules/diff/lib/index.es6.js
 delete mode 100644 node_modules/diff/lib/index.js
 delete mode 100644 node_modules/diff/lib/index.mjs
 delete mode 100644 node_modules/diff/lib/patch/apply.js
 delete mode 100644 node_modules/diff/lib/patch/create.js
 delete mode 100644 node_modules/diff/lib/patch/line-endings.js
 delete mode 100644 node_modules/diff/lib/patch/merge.js
 delete mode 100644 node_modules/diff/lib/patch/parse.js
 delete mode 100644 node_modules/diff/lib/patch/reverse.js
 delete mode 100644 node_modules/diff/lib/util/array.js
 delete mode 100644 node_modules/diff/lib/util/distance-iterator.js
 delete mode 100644 node_modules/diff/lib/util/params.js
 delete mode 100644 node_modules/diff/lib/util/string.js
 create mode 100644 node_modules/diff/libcjs/convert/dmp.js
 create mode 100644 node_modules/diff/libcjs/convert/xml.js
 create mode 100644 node_modules/diff/libcjs/diff/array.js
 create mode 100644 node_modules/diff/libcjs/diff/base.js
 create mode 100644 node_modules/diff/libcjs/diff/character.js
 create mode 100644 node_modules/diff/libcjs/diff/css.js
 create mode 100644 node_modules/diff/libcjs/diff/json.js
 create mode 100644 node_modules/diff/libcjs/diff/line.js
 create mode 100644 node_modules/diff/libcjs/diff/sentence.js
 create mode 100644 node_modules/diff/libcjs/diff/word.js
 create mode 100644 node_modules/diff/libcjs/index.js
 create mode 100644 node_modules/diff/libcjs/package.json
 create mode 100644 node_modules/diff/libcjs/patch/apply.js
 create mode 100644 node_modules/diff/libcjs/patch/create.js
 create mode 100644 node_modules/diff/libcjs/patch/line-endings.js
 create mode 100644 node_modules/diff/libcjs/patch/parse.js
 create mode 100644 node_modules/diff/libcjs/patch/reverse.js
 create mode 100644 node_modules/diff/libcjs/types.js
 create mode 100644 node_modules/diff/libcjs/util/array.js
 create mode 100644 node_modules/diff/libcjs/util/distance-iterator.js
 create mode 100644 node_modules/diff/libcjs/util/params.js
 create mode 100644 node_modules/diff/libcjs/util/string.js
 create mode 100644 node_modules/diff/libesm/convert/dmp.js
 create mode 100644 node_modules/diff/libesm/convert/xml.js
 create mode 100644 node_modules/diff/libesm/diff/array.js
 create mode 100644 node_modules/diff/libesm/diff/base.js
 create mode 100644 node_modules/diff/libesm/diff/character.js
 create mode 100644 node_modules/diff/libesm/diff/css.js
 create mode 100644 node_modules/diff/libesm/diff/json.js
 create mode 100644 node_modules/diff/libesm/diff/line.js
 create mode 100644 node_modules/diff/libesm/diff/sentence.js
 create mode 100644 node_modules/diff/libesm/diff/word.js
 create mode 100644 node_modules/diff/libesm/index.js
 create mode 100644 node_modules/diff/libesm/package.json
 create mode 100644 node_modules/diff/libesm/patch/apply.js
 create mode 100644 node_modules/diff/libesm/patch/create.js
 create mode 100644 node_modules/diff/libesm/patch/line-endings.js
 create mode 100644 node_modules/diff/libesm/patch/parse.js
 create mode 100644 node_modules/diff/libesm/patch/reverse.js
 create mode 100644 node_modules/diff/libesm/types.js
 create mode 100644 node_modules/diff/libesm/util/array.js
 create mode 100644 node_modules/diff/libesm/util/distance-iterator.js
 create mode 100644 node_modules/diff/libesm/util/params.js
 create mode 100644 node_modules/diff/libesm/util/string.js
 delete mode 100644 node_modules/diff/runtime.js

diff --git a/node_modules/diff/CONTRIBUTING.md b/node_modules/diff/CONTRIBUTING.md
index 199c556c1ffb0..203d0245fc634 100644
--- a/node_modules/diff/CONTRIBUTING.md
+++ b/node_modules/diff/CONTRIBUTING.md
@@ -1,36 +1,24 @@
-# How to Contribute
-
-## Pull Requests
-
-We also accept [pull requests][pull-request]!
-
-Generally we like to see pull requests that
-
-- Maintain the existing code style
-- Are focused on a single change (i.e. avoid large refactoring or style adjustments in untouched code if not the primary goal of the pull request)
-- Have [good commit messages](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html)
-- Have tests
-- Don't decrease the current code coverage (see coverage/lcov-report/index.html)
-
-## Building
+## Building and testing
 
 ```
 yarn
 yarn test
 ```
 
-Running `yarn test -- dev` will watch for tests within Node and `karma start` may be used for manual testing in browsers.
+To run tests in a *browser* (for instance to test compatibility with Firefox, with Safari, or with old browser versions), run `yarn karma start`, then open http://localhost:9876/ in the browser you want to test in. Results of the test run will appear in the terminal where `yarn karma start` is running.
 
 If you notice any problems, please report them to the GitHub issue tracker at
 [http://github.com/kpdecker/jsdiff/issues](http://github.com/kpdecker/jsdiff/issues).
 
 ## Releasing
 
+Run a test in Firefox via the procedure above before releasing.
+
 A full release may be completed by first updating the `"version"` property in package.json, then running the following:
 
 ```
 yarn clean
-yarn grunt release
+yarn build
 yarn publish
 ```
 
diff --git a/node_modules/diff/dist/diff.js b/node_modules/diff/dist/diff.js
index 2c2c33344ecd2..4140e503f1559 100644
--- a/node_modules/diff/dist/diff.js
+++ b/node_modules/diff/dist/diff.js
@@ -1,2106 +1,1674 @@
-/*!
-
- diff v7.0.0
-
-BSD 3-Clause License
-
-Copyright (c) 2009-2015, Kevin Decker 
-All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are met:
-
-1. Redistributions of source code must retain the above copyright notice, this
-   list of conditions and the following disclaimer.
-
-2. Redistributions in binary form must reproduce the above copyright notice,
-   this list of conditions and the following disclaimer in the documentation
-   and/or other materials provided with the distribution.
-
-3. Neither the name of the copyright holder nor the names of its
-   contributors may be used to endorse or promote products derived from
-   this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-@license
-*/
 (function (global, factory) {
-  typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
-  typeof define === 'function' && define.amd ? define(['exports'], factory) :
-  (global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.Diff = {}));
+    typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :
+    typeof define === 'function' && define.amd ? define(['exports'], factory) :
+    (global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.Diff = {}));
 })(this, (function (exports) { 'use strict';
 
-  function Diff() {}
-  Diff.prototype = {
-    diff: function diff(oldString, newString) {
-      var _options$timeout;
-      var options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
-      var callback = options.callback;
-      if (typeof options === 'function') {
-        callback = options;
-        options = {};
-      }
-      var self = this;
-      function done(value) {
-        value = self.postProcess(value, options);
-        if (callback) {
-          setTimeout(function () {
-            callback(value);
-          }, 0);
-          return true;
-        } else {
-          return value;
-        }
-      }
-
-      // Allow subclasses to massage the input prior to running
-      oldString = this.castInput(oldString, options);
-      newString = this.castInput(newString, options);
-      oldString = this.removeEmpty(this.tokenize(oldString, options));
-      newString = this.removeEmpty(this.tokenize(newString, options));
-      var newLen = newString.length,
-        oldLen = oldString.length;
-      var editLength = 1;
-      var maxEditLength = newLen + oldLen;
-      if (options.maxEditLength != null) {
-        maxEditLength = Math.min(maxEditLength, options.maxEditLength);
-      }
-      var maxExecutionTime = (_options$timeout = options.timeout) !== null && _options$timeout !== void 0 ? _options$timeout : Infinity;
-      var abortAfterTimestamp = Date.now() + maxExecutionTime;
-      var bestPath = [{
-        oldPos: -1,
-        lastComponent: undefined
-      }];
-
-      // Seed editLength = 0, i.e. the content starts with the same values
-      var newPos = this.extractCommon(bestPath[0], newString, oldString, 0, options);
-      if (bestPath[0].oldPos + 1 >= oldLen && newPos + 1 >= newLen) {
-        // Identity per the equality and tokenizer
-        return done(buildValues(self, bestPath[0].lastComponent, newString, oldString, self.useLongestToken));
-      }
-
-      // Once we hit the right edge of the edit graph on some diagonal k, we can
-      // definitely reach the end of the edit graph in no more than k edits, so
-      // there's no point in considering any moves to diagonal k+1 any more (from
-      // which we're guaranteed to need at least k+1 more edits).
-      // Similarly, once we've reached the bottom of the edit graph, there's no
-      // point considering moves to lower diagonals.
-      // We record this fact by setting minDiagonalToConsider and
-      // maxDiagonalToConsider to some finite value once we've hit the edge of
-      // the edit graph.
-      // This optimization is not faithful to the original algorithm presented in
-      // Myers's paper, which instead pointlessly extends D-paths off the end of
-      // the edit graph - see page 7 of Myers's paper which notes this point
-      // explicitly and illustrates it with a diagram. This has major performance
-      // implications for some common scenarios. For instance, to compute a diff
-      // where the new text simply appends d characters on the end of the
-      // original text of length n, the true Myers algorithm will take O(n+d^2)
-      // time while this optimization needs only O(n+d) time.
-      var minDiagonalToConsider = -Infinity,
-        maxDiagonalToConsider = Infinity;
-
-      // Main worker method. checks all permutations of a given edit length for acceptance.
-      function execEditLength() {
-        for (var diagonalPath = Math.max(minDiagonalToConsider, -editLength); diagonalPath <= Math.min(maxDiagonalToConsider, editLength); diagonalPath += 2) {
-          var basePath = void 0;
-          var removePath = bestPath[diagonalPath - 1],
-            addPath = bestPath[diagonalPath + 1];
-          if (removePath) {
-            // No one else is going to attempt to use this value, clear it
-            bestPath[diagonalPath - 1] = undefined;
-          }
-          var canAdd = false;
-          if (addPath) {
-            // what newPos will be after we do an insertion:
-            var addPathNewPos = addPath.oldPos - diagonalPath;
-            canAdd = addPath && 0 <= addPathNewPos && addPathNewPos < newLen;
-          }
-          var canRemove = removePath && removePath.oldPos + 1 < oldLen;
-          if (!canAdd && !canRemove) {
-            // If this path is a terminal then prune
-            bestPath[diagonalPath] = undefined;
-            continue;
-          }
-
-          // Select the diagonal that we want to branch from. We select the prior
-          // path whose position in the old string is the farthest from the origin
-          // and does not pass the bounds of the diff graph
-          if (!canRemove || canAdd && removePath.oldPos < addPath.oldPos) {
-            basePath = self.addToPath(addPath, true, false, 0, options);
-          } else {
-            basePath = self.addToPath(removePath, false, true, 1, options);
-          }
-          newPos = self.extractCommon(basePath, newString, oldString, diagonalPath, options);
-          if (basePath.oldPos + 1 >= oldLen && newPos + 1 >= newLen) {
-            // If we have hit the end of both strings, then we are done
-            return done(buildValues(self, basePath.lastComponent, newString, oldString, self.useLongestToken));
-          } else {
-            bestPath[diagonalPath] = basePath;
-            if (basePath.oldPos + 1 >= oldLen) {
-              maxDiagonalToConsider = Math.min(maxDiagonalToConsider, diagonalPath - 1);
-            }
-            if (newPos + 1 >= newLen) {
-              minDiagonalToConsider = Math.max(minDiagonalToConsider, diagonalPath + 1);
-            }
-          }
-        }
-        editLength++;
-      }
-
-      // Performs the length of edit iteration. Is a bit fugly as this has to support the
-      // sync and async mode which is never fun. Loops over execEditLength until a value
-      // is produced, or until the edit length exceeds options.maxEditLength (if given),
-      // in which case it will return undefined.
-      if (callback) {
-        (function exec() {
-          setTimeout(function () {
-            if (editLength > maxEditLength || Date.now() > abortAfterTimestamp) {
-              return callback();
-            }
-            if (!execEditLength()) {
-              exec();
-            }
-          }, 0);
-        })();
-      } else {
-        while (editLength <= maxEditLength && Date.now() <= abortAfterTimestamp) {
-          var ret = execEditLength();
-          if (ret) {
+    class Diff {
+        diff(oldStr, newStr, 
+        // Type below is not accurate/complete - see above for full possibilities - but it compiles
+        options = {}) {
+            let callback;
+            if (typeof options === 'function') {
+                callback = options;
+                options = {};
+            }
+            else if ('callback' in options) {
+                callback = options.callback;
+            }
+            // Allow subclasses to massage the input prior to running
+            const oldString = this.castInput(oldStr, options);
+            const newString = this.castInput(newStr, options);
+            const oldTokens = this.removeEmpty(this.tokenize(oldString, options));
+            const newTokens = this.removeEmpty(this.tokenize(newString, options));
+            return this.diffWithOptionsObj(oldTokens, newTokens, options, callback);
+        }
+        diffWithOptionsObj(oldTokens, newTokens, options, callback) {
+            var _a;
+            const done = (value) => {
+                value = this.postProcess(value, options);
+                if (callback) {
+                    setTimeout(function () { callback(value); }, 0);
+                    return undefined;
+                }
+                else {
+                    return value;
+                }
+            };
+            const newLen = newTokens.length, oldLen = oldTokens.length;
+            let editLength = 1;
+            let maxEditLength = newLen + oldLen;
+            if (options.maxEditLength != null) {
+                maxEditLength = Math.min(maxEditLength, options.maxEditLength);
+            }
+            const maxExecutionTime = (_a = options.timeout) !== null && _a !== void 0 ? _a : Infinity;
+            const abortAfterTimestamp = Date.now() + maxExecutionTime;
+            const bestPath = [{ oldPos: -1, lastComponent: undefined }];
+            // Seed editLength = 0, i.e. the content starts with the same values
+            let newPos = this.extractCommon(bestPath[0], newTokens, oldTokens, 0, options);
+            if (bestPath[0].oldPos + 1 >= oldLen && newPos + 1 >= newLen) {
+                // Identity per the equality and tokenizer
+                return done(this.buildValues(bestPath[0].lastComponent, newTokens, oldTokens));
+            }
+            // Once we hit the right edge of the edit graph on some diagonal k, we can
+            // definitely reach the end of the edit graph in no more than k edits, so
+            // there's no point in considering any moves to diagonal k+1 any more (from
+            // which we're guaranteed to need at least k+1 more edits).
+            // Similarly, once we've reached the bottom of the edit graph, there's no
+            // point considering moves to lower diagonals.
+            // We record this fact by setting minDiagonalToConsider and
+            // maxDiagonalToConsider to some finite value once we've hit the edge of
+            // the edit graph.
+            // This optimization is not faithful to the original algorithm presented in
+            // Myers's paper, which instead pointlessly extends D-paths off the end of
+            // the edit graph - see page 7 of Myers's paper which notes this point
+            // explicitly and illustrates it with a diagram. This has major performance
+            // implications for some common scenarios. For instance, to compute a diff
+            // where the new text simply appends d characters on the end of the
+            // original text of length n, the true Myers algorithm will take O(n+d^2)
+            // time while this optimization needs only O(n+d) time.
+            let minDiagonalToConsider = -Infinity, maxDiagonalToConsider = Infinity;
+            // Main worker method. checks all permutations of a given edit length for acceptance.
+            const execEditLength = () => {
+                for (let diagonalPath = Math.max(minDiagonalToConsider, -editLength); diagonalPath <= Math.min(maxDiagonalToConsider, editLength); diagonalPath += 2) {
+                    let basePath;
+                    const removePath = bestPath[diagonalPath - 1], addPath = bestPath[diagonalPath + 1];
+                    if (removePath) {
+                        // No one else is going to attempt to use this value, clear it
+                        // @ts-expect-error - perf optimisation. This type-violating value will never be read.
+                        bestPath[diagonalPath - 1] = undefined;
+                    }
+                    let canAdd = false;
+                    if (addPath) {
+                        // what newPos will be after we do an insertion:
+                        const addPathNewPos = addPath.oldPos - diagonalPath;
+                        canAdd = addPath && 0 <= addPathNewPos && addPathNewPos < newLen;
+                    }
+                    const canRemove = removePath && removePath.oldPos + 1 < oldLen;
+                    if (!canAdd && !canRemove) {
+                        // If this path is a terminal then prune
+                        // @ts-expect-error - perf optimisation. This type-violating value will never be read.
+                        bestPath[diagonalPath] = undefined;
+                        continue;
+                    }
+                    // Select the diagonal that we want to branch from. We select the prior
+                    // path whose position in the old string is the farthest from the origin
+                    // and does not pass the bounds of the diff graph
+                    if (!canRemove || (canAdd && removePath.oldPos < addPath.oldPos)) {
+                        basePath = this.addToPath(addPath, true, false, 0, options);
+                    }
+                    else {
+                        basePath = this.addToPath(removePath, false, true, 1, options);
+                    }
+                    newPos = this.extractCommon(basePath, newTokens, oldTokens, diagonalPath, options);
+                    if (basePath.oldPos + 1 >= oldLen && newPos + 1 >= newLen) {
+                        // If we have hit the end of both strings, then we are done
+                        return done(this.buildValues(basePath.lastComponent, newTokens, oldTokens)) || true;
+                    }
+                    else {
+                        bestPath[diagonalPath] = basePath;
+                        if (basePath.oldPos + 1 >= oldLen) {
+                            maxDiagonalToConsider = Math.min(maxDiagonalToConsider, diagonalPath - 1);
+                        }
+                        if (newPos + 1 >= newLen) {
+                            minDiagonalToConsider = Math.max(minDiagonalToConsider, diagonalPath + 1);
+                        }
+                    }
+                }
+                editLength++;
+            };
+            // Performs the length of edit iteration. Is a bit fugly as this has to support the
+            // sync and async mode which is never fun. Loops over execEditLength until a value
+            // is produced, or until the edit length exceeds options.maxEditLength (if given),
+            // in which case it will return undefined.
+            if (callback) {
+                (function exec() {
+                    setTimeout(function () {
+                        if (editLength > maxEditLength || Date.now() > abortAfterTimestamp) {
+                            return callback(undefined);
+                        }
+                        if (!execEditLength()) {
+                            exec();
+                        }
+                    }, 0);
+                }());
+            }
+            else {
+                while (editLength <= maxEditLength && Date.now() <= abortAfterTimestamp) {
+                    const ret = execEditLength();
+                    if (ret) {
+                        return ret;
+                    }
+                }
+            }
+        }
+        addToPath(path, added, removed, oldPosInc, options) {
+            const last = path.lastComponent;
+            if (last && !options.oneChangePerToken && last.added === added && last.removed === removed) {
+                return {
+                    oldPos: path.oldPos + oldPosInc,
+                    lastComponent: { count: last.count + 1, added: added, removed: removed, previousComponent: last.previousComponent }
+                };
+            }
+            else {
+                return {
+                    oldPos: path.oldPos + oldPosInc,
+                    lastComponent: { count: 1, added: added, removed: removed, previousComponent: last }
+                };
+            }
+        }
+        extractCommon(basePath, newTokens, oldTokens, diagonalPath, options) {
+            const newLen = newTokens.length, oldLen = oldTokens.length;
+            let oldPos = basePath.oldPos, newPos = oldPos - diagonalPath, commonCount = 0;
+            while (newPos + 1 < newLen && oldPos + 1 < oldLen && this.equals(oldTokens[oldPos + 1], newTokens[newPos + 1], options)) {
+                newPos++;
+                oldPos++;
+                commonCount++;
+                if (options.oneChangePerToken) {
+                    basePath.lastComponent = { count: 1, previousComponent: basePath.lastComponent, added: false, removed: false };
+                }
+            }
+            if (commonCount && !options.oneChangePerToken) {
+                basePath.lastComponent = { count: commonCount, previousComponent: basePath.lastComponent, added: false, removed: false };
+            }
+            basePath.oldPos = oldPos;
+            return newPos;
+        }
+        equals(left, right, options) {
+            if (options.comparator) {
+                return options.comparator(left, right);
+            }
+            else {
+                return left === right
+                    || (!!options.ignoreCase && left.toLowerCase() === right.toLowerCase());
+            }
+        }
+        removeEmpty(array) {
+            const ret = [];
+            for (let i = 0; i < array.length; i++) {
+                if (array[i]) {
+                    ret.push(array[i]);
+                }
+            }
             return ret;
-          }
-        }
-      }
-    },
-    addToPath: function addToPath(path, added, removed, oldPosInc, options) {
-      var last = path.lastComponent;
-      if (last && !options.oneChangePerToken && last.added === added && last.removed === removed) {
-        return {
-          oldPos: path.oldPos + oldPosInc,
-          lastComponent: {
-            count: last.count + 1,
-            added: added,
-            removed: removed,
-            previousComponent: last.previousComponent
-          }
-        };
-      } else {
-        return {
-          oldPos: path.oldPos + oldPosInc,
-          lastComponent: {
-            count: 1,
-            added: added,
-            removed: removed,
-            previousComponent: last
-          }
-        };
-      }
-    },
-    extractCommon: function extractCommon(basePath, newString, oldString, diagonalPath, options) {
-      var newLen = newString.length,
-        oldLen = oldString.length,
-        oldPos = basePath.oldPos,
-        newPos = oldPos - diagonalPath,
-        commonCount = 0;
-      while (newPos + 1 < newLen && oldPos + 1 < oldLen && this.equals(oldString[oldPos + 1], newString[newPos + 1], options)) {
-        newPos++;
-        oldPos++;
-        commonCount++;
-        if (options.oneChangePerToken) {
-          basePath.lastComponent = {
-            count: 1,
-            previousComponent: basePath.lastComponent,
-            added: false,
-            removed: false
-          };
-        }
-      }
-      if (commonCount && !options.oneChangePerToken) {
-        basePath.lastComponent = {
-          count: commonCount,
-          previousComponent: basePath.lastComponent,
-          added: false,
-          removed: false
-        };
-      }
-      basePath.oldPos = oldPos;
-      return newPos;
-    },
-    equals: function equals(left, right, options) {
-      if (options.comparator) {
-        return options.comparator(left, right);
-      } else {
-        return left === right || options.ignoreCase && left.toLowerCase() === right.toLowerCase();
-      }
-    },
-    removeEmpty: function removeEmpty(array) {
-      var ret = [];
-      for (var i = 0; i < array.length; i++) {
-        if (array[i]) {
-          ret.push(array[i]);
-        }
-      }
-      return ret;
-    },
-    castInput: function castInput(value) {
-      return value;
-    },
-    tokenize: function tokenize(value) {
-      return Array.from(value);
-    },
-    join: function join(chars) {
-      return chars.join('');
-    },
-    postProcess: function postProcess(changeObjects) {
-      return changeObjects;
-    }
-  };
-  function buildValues(diff, lastComponent, newString, oldString, useLongestToken) {
-    // First we convert our linked list of components in reverse order to an
-    // array in the right order:
-    var components = [];
-    var nextComponent;
-    while (lastComponent) {
-      components.push(lastComponent);
-      nextComponent = lastComponent.previousComponent;
-      delete lastComponent.previousComponent;
-      lastComponent = nextComponent;
+        }
+        // eslint-disable-next-line @typescript-eslint/no-unused-vars
+        castInput(value, options) {
+            return value;
+        }
+        // eslint-disable-next-line @typescript-eslint/no-unused-vars
+        tokenize(value, options) {
+            return Array.from(value);
+        }
+        join(chars) {
+            // Assumes ValueT is string, which is the case for most subclasses.
+            // When it's false, e.g. in diffArrays, this method needs to be overridden (e.g. with a no-op)
+            // Yes, the casts are verbose and ugly, because this pattern - of having the base class SORT OF
+            // assume tokens and values are strings, but not completely - is weird and janky.
+            return chars.join('');
+        }
+        postProcess(changeObjects, 
+        // eslint-disable-next-line @typescript-eslint/no-unused-vars
+        options) {
+            return changeObjects;
+        }
+        get useLongestToken() {
+            return false;
+        }
+        buildValues(lastComponent, newTokens, oldTokens) {
+            // First we convert our linked list of components in reverse order to an
+            // array in the right order:
+            const components = [];
+            let nextComponent;
+            while (lastComponent) {
+                components.push(lastComponent);
+                nextComponent = lastComponent.previousComponent;
+                delete lastComponent.previousComponent;
+                lastComponent = nextComponent;
+            }
+            components.reverse();
+            const componentLen = components.length;
+            let componentPos = 0, newPos = 0, oldPos = 0;
+            for (; componentPos < componentLen; componentPos++) {
+                const component = components[componentPos];
+                if (!component.removed) {
+                    if (!component.added && this.useLongestToken) {
+                        let value = newTokens.slice(newPos, newPos + component.count);
+                        value = value.map(function (value, i) {
+                            const oldValue = oldTokens[oldPos + i];
+                            return oldValue.length > value.length ? oldValue : value;
+                        });
+                        component.value = this.join(value);
+                    }
+                    else {
+                        component.value = this.join(newTokens.slice(newPos, newPos + component.count));
+                    }
+                    newPos += component.count;
+                    // Common case
+                    if (!component.added) {
+                        oldPos += component.count;
+                    }
+                }
+                else {
+                    component.value = this.join(oldTokens.slice(oldPos, oldPos + component.count));
+                    oldPos += component.count;
+                }
+            }
+            return components;
+        }
     }
-    components.reverse();
-    var componentPos = 0,
-      componentLen = components.length,
-      newPos = 0,
-      oldPos = 0;
-    for (; componentPos < componentLen; componentPos++) {
-      var component = components[componentPos];
-      if (!component.removed) {
-        if (!component.added && useLongestToken) {
-          var value = newString.slice(newPos, newPos + component.count);
-          value = value.map(function (value, i) {
-            var oldValue = oldString[oldPos + i];
-            return oldValue.length > value.length ? oldValue : value;
-          });
-          component.value = diff.join(value);
-        } else {
-          component.value = diff.join(newString.slice(newPos, newPos + component.count));
-        }
-        newPos += component.count;
 
-        // Common case
-        if (!component.added) {
-          oldPos += component.count;
-        }
-      } else {
-        component.value = diff.join(oldString.slice(oldPos, oldPos + component.count));
-        oldPos += component.count;
-      }
+    class CharacterDiff extends Diff {
     }
-    return components;
-  }
-
-  var characterDiff = new Diff();
-  function diffChars(oldStr, newStr, options) {
-    return characterDiff.diff(oldStr, newStr, options);
-  }
-
-  function longestCommonPrefix(str1, str2) {
-    var i;
-    for (i = 0; i < str1.length && i < str2.length; i++) {
-      if (str1[i] != str2[i]) {
-        return str1.slice(0, i);
-      }
+    const characterDiff = new CharacterDiff();
+    function diffChars(oldStr, newStr, options) {
+        return characterDiff.diff(oldStr, newStr, options);
     }
-    return str1.slice(0, i);
-  }
-  function longestCommonSuffix(str1, str2) {
-    var i;
 
-    // Unlike longestCommonPrefix, we need a special case to handle all scenarios
-    // where we return the empty string since str1.slice(-0) will return the
-    // entire string.
-    if (!str1 || !str2 || str1[str1.length - 1] != str2[str2.length - 1]) {
-      return '';
+    function longestCommonPrefix(str1, str2) {
+        let i;
+        for (i = 0; i < str1.length && i < str2.length; i++) {
+            if (str1[i] != str2[i]) {
+                return str1.slice(0, i);
+            }
+        }
+        return str1.slice(0, i);
     }
-    for (i = 0; i < str1.length && i < str2.length; i++) {
-      if (str1[str1.length - (i + 1)] != str2[str2.length - (i + 1)]) {
+    function longestCommonSuffix(str1, str2) {
+        let i;
+        // Unlike longestCommonPrefix, we need a special case to handle all scenarios
+        // where we return the empty string since str1.slice(-0) will return the
+        // entire string.
+        if (!str1 || !str2 || str1[str1.length - 1] != str2[str2.length - 1]) {
+            return '';
+        }
+        for (i = 0; i < str1.length && i < str2.length; i++) {
+            if (str1[str1.length - (i + 1)] != str2[str2.length - (i + 1)]) {
+                return str1.slice(-i);
+            }
+        }
         return str1.slice(-i);
-      }
-    }
-    return str1.slice(-i);
-  }
-  function replacePrefix(string, oldPrefix, newPrefix) {
-    if (string.slice(0, oldPrefix.length) != oldPrefix) {
-      throw Error("string ".concat(JSON.stringify(string), " doesn't start with prefix ").concat(JSON.stringify(oldPrefix), "; this is a bug"));
-    }
-    return newPrefix + string.slice(oldPrefix.length);
-  }
-  function replaceSuffix(string, oldSuffix, newSuffix) {
-    if (!oldSuffix) {
-      return string + newSuffix;
     }
-    if (string.slice(-oldSuffix.length) != oldSuffix) {
-      throw Error("string ".concat(JSON.stringify(string), " doesn't end with suffix ").concat(JSON.stringify(oldSuffix), "; this is a bug"));
+    function replacePrefix(string, oldPrefix, newPrefix) {
+        if (string.slice(0, oldPrefix.length) != oldPrefix) {
+            throw Error(`string ${JSON.stringify(string)} doesn't start with prefix ${JSON.stringify(oldPrefix)}; this is a bug`);
+        }
+        return newPrefix + string.slice(oldPrefix.length);
     }
-    return string.slice(0, -oldSuffix.length) + newSuffix;
-  }
-  function removePrefix(string, oldPrefix) {
-    return replacePrefix(string, oldPrefix, '');
-  }
-  function removeSuffix(string, oldSuffix) {
-    return replaceSuffix(string, oldSuffix, '');
-  }
-  function maximumOverlap(string1, string2) {
-    return string2.slice(0, overlapCount(string1, string2));
-  }
-
-  // Nicked from https://stackoverflow.com/a/60422853/1709587
-  function overlapCount(a, b) {
-    // Deal with cases where the strings differ in length
-    var startA = 0;
-    if (a.length > b.length) {
-      startA = a.length - b.length;
+    function replaceSuffix(string, oldSuffix, newSuffix) {
+        if (!oldSuffix) {
+            return string + newSuffix;
+        }
+        if (string.slice(-oldSuffix.length) != oldSuffix) {
+            throw Error(`string ${JSON.stringify(string)} doesn't end with suffix ${JSON.stringify(oldSuffix)}; this is a bug`);
+        }
+        return string.slice(0, -oldSuffix.length) + newSuffix;
     }
-    var endB = b.length;
-    if (a.length < b.length) {
-      endB = a.length;
+    function removePrefix(string, oldPrefix) {
+        return replacePrefix(string, oldPrefix, '');
     }
-    // Create a back-reference for each index
-    //   that should be followed in case of a mismatch.
-    //   We only need B to make these references:
-    var map = Array(endB);
-    var k = 0; // Index that lags behind j
-    map[0] = 0;
-    for (var j = 1; j < endB; j++) {
-      if (b[j] == b[k]) {
-        map[j] = map[k]; // skip over the same character (optional optimisation)
-      } else {
-        map[j] = k;
-      }
-      while (k > 0 && b[j] != b[k]) {
-        k = map[k];
-      }
-      if (b[j] == b[k]) {
-        k++;
-      }
+    function removeSuffix(string, oldSuffix) {
+        return replaceSuffix(string, oldSuffix, '');
     }
-    // Phase 2: use these references while iterating over A
-    k = 0;
-    for (var i = startA; i < a.length; i++) {
-      while (k > 0 && a[i] != b[k]) {
-        k = map[k];
-      }
-      if (a[i] == b[k]) {
-        k++;
-      }
+    function maximumOverlap(string1, string2) {
+        return string2.slice(0, overlapCount(string1, string2));
     }
-    return k;
-  }
-
-  /**
-   * Returns true if the string consistently uses Windows line endings.
-   */
-  function hasOnlyWinLineEndings(string) {
-    return string.includes('\r\n') && !string.startsWith('\n') && !string.match(/[^\r]\n/);
-  }
-
-  /**
-   * Returns true if the string consistently uses Unix line endings.
-   */
-  function hasOnlyUnixLineEndings(string) {
-    return !string.includes('\r\n') && string.includes('\n');
-  }
-
-  // Based on https://en.wikipedia.org/wiki/Latin_script_in_Unicode
-  //
-  // Ranges and exceptions:
-  // Latin-1 Supplement, 0080–00FF
-  //  - U+00D7  × Multiplication sign
-  //  - U+00F7  ÷ Division sign
-  // Latin Extended-A, 0100–017F
-  // Latin Extended-B, 0180–024F
-  // IPA Extensions, 0250–02AF
-  // Spacing Modifier Letters, 02B0–02FF
-  //  - U+02C7  ˇ ˇ  Caron
-  //  - U+02D8  ˘ ˘  Breve
-  //  - U+02D9  ˙ ˙  Dot Above
-  //  - U+02DA  ˚ ˚  Ring Above
-  //  - U+02DB  ˛ ˛  Ogonek
-  //  - U+02DC  ˜ ˜  Small Tilde
-  //  - U+02DD  ˝ ˝  Double Acute Accent
-  // Latin Extended Additional, 1E00–1EFF
-  var extendedWordChars = "a-zA-Z0-9_\\u{C0}-\\u{FF}\\u{D8}-\\u{F6}\\u{F8}-\\u{2C6}\\u{2C8}-\\u{2D7}\\u{2DE}-\\u{2FF}\\u{1E00}-\\u{1EFF}";
-
-  // Each token is one of the following:
-  // - A punctuation mark plus the surrounding whitespace
-  // - A word plus the surrounding whitespace
-  // - Pure whitespace (but only in the special case where this the entire text
-  //   is just whitespace)
-  //
-  // We have to include surrounding whitespace in the tokens because the two
-  // alternative approaches produce horribly broken results:
-  // * If we just discard the whitespace, we can't fully reproduce the original
-  //   text from the sequence of tokens and any attempt to render the diff will
-  //   get the whitespace wrong.
-  // * If we have separate tokens for whitespace, then in a typical text every
-  //   second token will be a single space character. But this often results in
-  //   the optimal diff between two texts being a perverse one that preserves
-  //   the spaces between words but deletes and reinserts actual common words.
-  //   See https://github.com/kpdecker/jsdiff/issues/160#issuecomment-1866099640
-  //   for an example.
-  //
-  // Keeping the surrounding whitespace of course has implications for .equals
-  // and .join, not just .tokenize.
-
-  // This regex does NOT fully implement the tokenization rules described above.
-  // Instead, it gives runs of whitespace their own "token". The tokenize method
-  // then handles stitching whitespace tokens onto adjacent word or punctuation
-  // tokens.
-  var tokenizeIncludingWhitespace = new RegExp("[".concat(extendedWordChars, "]+|\\s+|[^").concat(extendedWordChars, "]"), 'ug');
-  var wordDiff = new Diff();
-  wordDiff.equals = function (left, right, options) {
-    if (options.ignoreCase) {
-      left = left.toLowerCase();
-      right = right.toLowerCase();
-    }
-    return left.trim() === right.trim();
-  };
-  wordDiff.tokenize = function (value) {
-    var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
-    var parts;
-    if (options.intlSegmenter) {
-      if (options.intlSegmenter.resolvedOptions().granularity != 'word') {
-        throw new Error('The segmenter passed must have a granularity of "word"');
-      }
-      parts = Array.from(options.intlSegmenter.segment(value), function (segment) {
-        return segment.segment;
-      });
-    } else {
-      parts = value.match(tokenizeIncludingWhitespace) || [];
+    // Nicked from https://stackoverflow.com/a/60422853/1709587
+    function overlapCount(a, b) {
+        // Deal with cases where the strings differ in length
+        let startA = 0;
+        if (a.length > b.length) {
+            startA = a.length - b.length;
+        }
+        let endB = b.length;
+        if (a.length < b.length) {
+            endB = a.length;
+        }
+        // Create a back-reference for each index
+        //   that should be followed in case of a mismatch.
+        //   We only need B to make these references:
+        const map = Array(endB);
+        let k = 0; // Index that lags behind j
+        map[0] = 0;
+        for (let j = 1; j < endB; j++) {
+            if (b[j] == b[k]) {
+                map[j] = map[k]; // skip over the same character (optional optimisation)
+            }
+            else {
+                map[j] = k;
+            }
+            while (k > 0 && b[j] != b[k]) {
+                k = map[k];
+            }
+            if (b[j] == b[k]) {
+                k++;
+            }
+        }
+        // Phase 2: use these references while iterating over A
+        k = 0;
+        for (let i = startA; i < a.length; i++) {
+            while (k > 0 && a[i] != b[k]) {
+                k = map[k];
+            }
+            if (a[i] == b[k]) {
+                k++;
+            }
+        }
+        return k;
     }
-    var tokens = [];
-    var prevPart = null;
-    parts.forEach(function (part) {
-      if (/\s/.test(part)) {
-        if (prevPart == null) {
-          tokens.push(part);
-        } else {
-          tokens.push(tokens.pop() + part);
-        }
-      } else if (/\s/.test(prevPart)) {
-        if (tokens[tokens.length - 1] == prevPart) {
-          tokens.push(tokens.pop() + part);
-        } else {
-          tokens.push(prevPart + part);
-        }
-      } else {
-        tokens.push(part);
-      }
-      prevPart = part;
-    });
-    return tokens;
-  };
-  wordDiff.join = function (tokens) {
-    // Tokens being joined here will always have appeared consecutively in the
-    // same text, so we can simply strip off the leading whitespace from all the
-    // tokens except the first (and except any whitespace-only tokens - but such
-    // a token will always be the first and only token anyway) and then join them
-    // and the whitespace around words and punctuation will end up correct.
-    return tokens.map(function (token, i) {
-      if (i == 0) {
-        return token;
-      } else {
-        return token.replace(/^\s+/, '');
-      }
-    }).join('');
-  };
-  wordDiff.postProcess = function (changes, options) {
-    if (!changes || options.oneChangePerToken) {
-      return changes;
+    /**
+     * Returns true if the string consistently uses Windows line endings.
+     */
+    function hasOnlyWinLineEndings(string) {
+        return string.includes('\r\n') && !string.startsWith('\n') && !string.match(/[^\r]\n/);
     }
-    var lastKeep = null;
-    // Change objects representing any insertion or deletion since the last
-    // "keep" change object. There can be at most one of each.
-    var insertion = null;
-    var deletion = null;
-    changes.forEach(function (change) {
-      if (change.added) {
-        insertion = change;
-      } else if (change.removed) {
-        deletion = change;
-      } else {
-        if (insertion || deletion) {
-          // May be false at start of text
-          dedupeWhitespaceInChangeObjects(lastKeep, deletion, insertion, change);
-        }
-        lastKeep = change;
-        insertion = null;
-        deletion = null;
-      }
-    });
-    if (insertion || deletion) {
-      dedupeWhitespaceInChangeObjects(lastKeep, deletion, insertion, null);
+    /**
+     * Returns true if the string consistently uses Unix line endings.
+     */
+    function hasOnlyUnixLineEndings(string) {
+        return !string.includes('\r\n') && string.includes('\n');
+    }
+    function trailingWs(string) {
+        // Yes, this looks overcomplicated and dumb - why not replace the whole function with
+        //     return string match(/\s*$/)[0]
+        // you ask? Because:
+        // 1. the trap described at https://markamery.com/blog/quadratic-time-regexes/ would mean doing
+        //    this would cause this function to take O(n²) time in the worst case (specifically when
+        //    there is a massive run of NON-TRAILING whitespace in `string`), and
+        // 2. the fix proposed in the same blog post, of using a negative lookbehind, is incompatible
+        //    with old Safari versions that we'd like to not break if possible (see
+        //    https://github.com/kpdecker/jsdiff/pull/550)
+        // It feels absurd to do this with an explicit loop instead of a regex, but I really can't see a
+        // better way that doesn't result in broken behaviour.
+        let i;
+        for (i = string.length - 1; i >= 0; i--) {
+            if (!string[i].match(/\s/)) {
+                break;
+            }
+        }
+        return string.substring(i + 1);
     }
-    return changes;
-  };
-  function diffWords(oldStr, newStr, options) {
-    // This option has never been documented and never will be (it's clearer to
-    // just call `diffWordsWithSpace` directly if you need that behavior), but
-    // has existed in jsdiff for a long time, so we retain support for it here
-    // for the sake of backwards compatibility.
-    if ((options === null || options === void 0 ? void 0 : options.ignoreWhitespace) != null && !options.ignoreWhitespace) {
-      return diffWordsWithSpace(oldStr, newStr, options);
+    function leadingWs(string) {
+        // Thankfully the annoying considerations described in trailingWs don't apply here:
+        const match = string.match(/^\s*/);
+        return match ? match[0] : '';
     }
-    return wordDiff.diff(oldStr, newStr, options);
-  }
-  function dedupeWhitespaceInChangeObjects(startKeep, deletion, insertion, endKeep) {
-    // Before returning, we tidy up the leading and trailing whitespace of the
-    // change objects to eliminate cases where trailing whitespace in one object
-    // is repeated as leading whitespace in the next.
-    // Below are examples of the outcomes we want here to explain the code.
-    // I=insert, K=keep, D=delete
-    // 1. diffing 'foo bar baz' vs 'foo baz'
-    //    Prior to cleanup, we have K:'foo ' D:' bar ' K:' baz'
-    //    After cleanup, we want:   K:'foo ' D:'bar ' K:'baz'
-    //
-    // 2. Diffing 'foo bar baz' vs 'foo qux baz'
-    //    Prior to cleanup, we have K:'foo ' D:' bar ' I:' qux ' K:' baz'
-    //    After cleanup, we want K:'foo ' D:'bar' I:'qux' K:' baz'
-    //
-    // 3. Diffing 'foo\nbar baz' vs 'foo baz'
-    //    Prior to cleanup, we have K:'foo ' D:'\nbar ' K:' baz'
-    //    After cleanup, we want K'foo' D:'\nbar' K:' baz'
+
+    // Based on https://en.wikipedia.org/wiki/Latin_script_in_Unicode
     //
-    // 4. Diffing 'foo baz' vs 'foo\nbar baz'
-    //    Prior to cleanup, we have K:'foo\n' I:'\nbar ' K:' baz'
-    //    After cleanup, we ideally want K'foo' I:'\nbar' K:' baz'
-    //    but don't actually manage this currently (the pre-cleanup change
-    //    objects don't contain enough information to make it possible).
+    // Ranges and exceptions:
+    // Latin-1 Supplement, 0080–00FF
+    //  - U+00D7  × Multiplication sign
+    //  - U+00F7  ÷ Division sign
+    // Latin Extended-A, 0100–017F
+    // Latin Extended-B, 0180–024F
+    // IPA Extensions, 0250–02AF
+    // Spacing Modifier Letters, 02B0–02FF
+    //  - U+02C7  ˇ ˇ  Caron
+    //  - U+02D8  ˘ ˘  Breve
+    //  - U+02D9  ˙ ˙  Dot Above
+    //  - U+02DA  ˚ ˚  Ring Above
+    //  - U+02DB  ˛ ˛  Ogonek
+    //  - U+02DC  ˜ ˜  Small Tilde
+    //  - U+02DD  ˝ ˝  Double Acute Accent
+    // Latin Extended Additional, 1E00–1EFF
+    const extendedWordChars = 'a-zA-Z0-9_\\u{C0}-\\u{FF}\\u{D8}-\\u{F6}\\u{F8}-\\u{2C6}\\u{2C8}-\\u{2D7}\\u{2DE}-\\u{2FF}\\u{1E00}-\\u{1EFF}';
+    // Each token is one of the following:
+    // - A punctuation mark plus the surrounding whitespace
+    // - A word plus the surrounding whitespace
+    // - Pure whitespace (but only in the special case where this the entire text
+    //   is just whitespace)
     //
-    // 5. Diffing 'foo   bar baz' vs 'foo  baz'
-    //    Prior to cleanup, we have K:'foo  ' D:'   bar ' K:'  baz'
-    //    After cleanup, we want K:'foo  ' D:' bar ' K:'baz'
+    // We have to include surrounding whitespace in the tokens because the two
+    // alternative approaches produce horribly broken results:
+    // * If we just discard the whitespace, we can't fully reproduce the original
+    //   text from the sequence of tokens and any attempt to render the diff will
+    //   get the whitespace wrong.
+    // * If we have separate tokens for whitespace, then in a typical text every
+    //   second token will be a single space character. But this often results in
+    //   the optimal diff between two texts being a perverse one that preserves
+    //   the spaces between words but deletes and reinserts actual common words.
+    //   See https://github.com/kpdecker/jsdiff/issues/160#issuecomment-1866099640
+    //   for an example.
     //
-    // Our handling is unavoidably imperfect in the case where there's a single
-    // indel between keeps and the whitespace has changed. For instance, consider
-    // diffing 'foo\tbar\nbaz' vs 'foo baz'. Unless we create an extra change
-    // object to represent the insertion of the space character (which isn't even
-    // a token), we have no way to avoid losing information about the texts'
-    // original whitespace in the result we return. Still, we do our best to
-    // output something that will look sensible if we e.g. print it with
-    // insertions in green and deletions in red.
-
-    // Between two "keep" change objects (or before the first or after the last
-    // change object), we can have either:
-    // * A "delete" followed by an "insert"
-    // * Just an "insert"
-    // * Just a "delete"
-    // We handle the three cases separately.
-    if (deletion && insertion) {
-      var oldWsPrefix = deletion.value.match(/^\s*/)[0];
-      var oldWsSuffix = deletion.value.match(/\s*$/)[0];
-      var newWsPrefix = insertion.value.match(/^\s*/)[0];
-      var newWsSuffix = insertion.value.match(/\s*$/)[0];
-      if (startKeep) {
-        var commonWsPrefix = longestCommonPrefix(oldWsPrefix, newWsPrefix);
-        startKeep.value = replaceSuffix(startKeep.value, newWsPrefix, commonWsPrefix);
-        deletion.value = removePrefix(deletion.value, commonWsPrefix);
-        insertion.value = removePrefix(insertion.value, commonWsPrefix);
-      }
-      if (endKeep) {
-        var commonWsSuffix = longestCommonSuffix(oldWsSuffix, newWsSuffix);
-        endKeep.value = replacePrefix(endKeep.value, newWsSuffix, commonWsSuffix);
-        deletion.value = removeSuffix(deletion.value, commonWsSuffix);
-        insertion.value = removeSuffix(insertion.value, commonWsSuffix);
-      }
-    } else if (insertion) {
-      // The whitespaces all reflect what was in the new text rather than
-      // the old, so we essentially have no information about whitespace
-      // insertion or deletion. We just want to dedupe the whitespace.
-      // We do that by having each change object keep its trailing
-      // whitespace and deleting duplicate leading whitespace where
-      // present.
-      if (startKeep) {
-        insertion.value = insertion.value.replace(/^\s*/, '');
-      }
-      if (endKeep) {
-        endKeep.value = endKeep.value.replace(/^\s*/, '');
-      }
-      // otherwise we've got a deletion and no insertion
-    } else if (startKeep && endKeep) {
-      var newWsFull = endKeep.value.match(/^\s*/)[0],
-        delWsStart = deletion.value.match(/^\s*/)[0],
-        delWsEnd = deletion.value.match(/\s*$/)[0];
-
-      // Any whitespace that comes straight after startKeep in both the old and
-      // new texts, assign to startKeep and remove from the deletion.
-      var newWsStart = longestCommonPrefix(newWsFull, delWsStart);
-      deletion.value = removePrefix(deletion.value, newWsStart);
-
-      // Any whitespace that comes straight before endKeep in both the old and
-      // new texts, and hasn't already been assigned to startKeep, assign to
-      // endKeep and remove from the deletion.
-      var newWsEnd = longestCommonSuffix(removePrefix(newWsFull, newWsStart), delWsEnd);
-      deletion.value = removeSuffix(deletion.value, newWsEnd);
-      endKeep.value = replacePrefix(endKeep.value, newWsFull, newWsEnd);
-
-      // If there's any whitespace from the new text that HASN'T already been
-      // assigned, assign it to the start:
-      startKeep.value = replaceSuffix(startKeep.value, newWsFull, newWsFull.slice(0, newWsFull.length - newWsEnd.length));
-    } else if (endKeep) {
-      // We are at the start of the text. Preserve all the whitespace on
-      // endKeep, and just remove whitespace from the end of deletion to the
-      // extent that it overlaps with the start of endKeep.
-      var endKeepWsPrefix = endKeep.value.match(/^\s*/)[0];
-      var deletionWsSuffix = deletion.value.match(/\s*$/)[0];
-      var overlap = maximumOverlap(deletionWsSuffix, endKeepWsPrefix);
-      deletion.value = removeSuffix(deletion.value, overlap);
-    } else if (startKeep) {
-      // We are at the END of the text. Preserve all the whitespace on
-      // startKeep, and just remove whitespace from the start of deletion to
-      // the extent that it overlaps with the end of startKeep.
-      var startKeepWsSuffix = startKeep.value.match(/\s*$/)[0];
-      var deletionWsPrefix = deletion.value.match(/^\s*/)[0];
-      var _overlap = maximumOverlap(startKeepWsSuffix, deletionWsPrefix);
-      deletion.value = removePrefix(deletion.value, _overlap);
-    }
-  }
-  var wordWithSpaceDiff = new Diff();
-  wordWithSpaceDiff.tokenize = function (value) {
-    // Slightly different to the tokenizeIncludingWhitespace regex used above in
-    // that this one treats each individual newline as a distinct tokens, rather
-    // than merging them into other surrounding whitespace. This was requested
-    // in https://github.com/kpdecker/jsdiff/issues/180 &
-    //    https://github.com/kpdecker/jsdiff/issues/211
-    var regex = new RegExp("(\\r?\\n)|[".concat(extendedWordChars, "]+|[^\\S\\n\\r]+|[^").concat(extendedWordChars, "]"), 'ug');
-    return value.match(regex) || [];
-  };
-  function diffWordsWithSpace(oldStr, newStr, options) {
-    return wordWithSpaceDiff.diff(oldStr, newStr, options);
-  }
-
-  function generateOptions(options, defaults) {
-    if (typeof options === 'function') {
-      defaults.callback = options;
-    } else if (options) {
-      for (var name in options) {
-        /* istanbul ignore else */
-        if (options.hasOwnProperty(name)) {
-          defaults[name] = options[name];
-        }
-      }
-    }
-    return defaults;
-  }
-
-  var lineDiff = new Diff();
-  lineDiff.tokenize = function (value, options) {
-    if (options.stripTrailingCr) {
-      // remove one \r before \n to match GNU diff's --strip-trailing-cr behavior
-      value = value.replace(/\r\n/g, '\n');
-    }
-    var retLines = [],
-      linesAndNewlines = value.split(/(\n|\r\n)/);
-
-    // Ignore the final empty token that occurs if the string ends with a new line
-    if (!linesAndNewlines[linesAndNewlines.length - 1]) {
-      linesAndNewlines.pop();
-    }
-
-    // Merge the content and line separators into single tokens
-    for (var i = 0; i < linesAndNewlines.length; i++) {
-      var line = linesAndNewlines[i];
-      if (i % 2 && !options.newlineIsToken) {
-        retLines[retLines.length - 1] += line;
-      } else {
-        retLines.push(line);
-      }
-    }
-    return retLines;
-  };
-  lineDiff.equals = function (left, right, options) {
-    // If we're ignoring whitespace, we need to normalise lines by stripping
-    // whitespace before checking equality. (This has an annoying interaction
-    // with newlineIsToken that requires special handling: if newlines get their
-    // own token, then we DON'T want to trim the *newline* tokens down to empty
-    // strings, since this would cause us to treat whitespace-only line content
-    // as equal to a separator between lines, which would be weird and
-    // inconsistent with the documented behavior of the options.)
-    if (options.ignoreWhitespace) {
-      if (!options.newlineIsToken || !left.includes('\n')) {
-        left = left.trim();
-      }
-      if (!options.newlineIsToken || !right.includes('\n')) {
-        right = right.trim();
-      }
-    } else if (options.ignoreNewlineAtEof && !options.newlineIsToken) {
-      if (left.endsWith('\n')) {
-        left = left.slice(0, -1);
-      }
-      if (right.endsWith('\n')) {
-        right = right.slice(0, -1);
-      }
+    // Keeping the surrounding whitespace of course has implications for .equals
+    // and .join, not just .tokenize.
+    // This regex does NOT fully implement the tokenization rules described above.
+    // Instead, it gives runs of whitespace their own "token". The tokenize method
+    // then handles stitching whitespace tokens onto adjacent word or punctuation
+    // tokens.
+    const tokenizeIncludingWhitespace = new RegExp(`[${extendedWordChars}]+|\\s+|[^${extendedWordChars}]`, 'ug');
+    class WordDiff extends Diff {
+        equals(left, right, options) {
+            if (options.ignoreCase) {
+                left = left.toLowerCase();
+                right = right.toLowerCase();
+            }
+            return left.trim() === right.trim();
+        }
+        tokenize(value, options = {}) {
+            let parts;
+            if (options.intlSegmenter) {
+                const segmenter = options.intlSegmenter;
+                if (segmenter.resolvedOptions().granularity != 'word') {
+                    throw new Error('The segmenter passed must have a granularity of "word"');
+                }
+                parts = Array.from(segmenter.segment(value), segment => segment.segment);
+            }
+            else {
+                parts = value.match(tokenizeIncludingWhitespace) || [];
+            }
+            const tokens = [];
+            let prevPart = null;
+            parts.forEach(part => {
+                if ((/\s/).test(part)) {
+                    if (prevPart == null) {
+                        tokens.push(part);
+                    }
+                    else {
+                        tokens.push(tokens.pop() + part);
+                    }
+                }
+                else if (prevPart != null && (/\s/).test(prevPart)) {
+                    if (tokens[tokens.length - 1] == prevPart) {
+                        tokens.push(tokens.pop() + part);
+                    }
+                    else {
+                        tokens.push(prevPart + part);
+                    }
+                }
+                else {
+                    tokens.push(part);
+                }
+                prevPart = part;
+            });
+            return tokens;
+        }
+        join(tokens) {
+            // Tokens being joined here will always have appeared consecutively in the
+            // same text, so we can simply strip off the leading whitespace from all the
+            // tokens except the first (and except any whitespace-only tokens - but such
+            // a token will always be the first and only token anyway) and then join them
+            // and the whitespace around words and punctuation will end up correct.
+            return tokens.map((token, i) => {
+                if (i == 0) {
+                    return token;
+                }
+                else {
+                    return token.replace((/^\s+/), '');
+                }
+            }).join('');
+        }
+        postProcess(changes, options) {
+            if (!changes || options.oneChangePerToken) {
+                return changes;
+            }
+            let lastKeep = null;
+            // Change objects representing any insertion or deletion since the last
+            // "keep" change object. There can be at most one of each.
+            let insertion = null;
+            let deletion = null;
+            changes.forEach(change => {
+                if (change.added) {
+                    insertion = change;
+                }
+                else if (change.removed) {
+                    deletion = change;
+                }
+                else {
+                    if (insertion || deletion) { // May be false at start of text
+                        dedupeWhitespaceInChangeObjects(lastKeep, deletion, insertion, change);
+                    }
+                    lastKeep = change;
+                    insertion = null;
+                    deletion = null;
+                }
+            });
+            if (insertion || deletion) {
+                dedupeWhitespaceInChangeObjects(lastKeep, deletion, insertion, null);
+            }
+            return changes;
+        }
     }
-    return Diff.prototype.equals.call(this, left, right, options);
-  };
-  function diffLines(oldStr, newStr, callback) {
-    return lineDiff.diff(oldStr, newStr, callback);
-  }
-
-  // Kept for backwards compatibility. This is a rather arbitrary wrapper method
-  // that just calls `diffLines` with `ignoreWhitespace: true`. It's confusing to
-  // have two ways to do exactly the same thing in the API, so we no longer
-  // document this one (library users should explicitly use `diffLines` with
-  // `ignoreWhitespace: true` instead) but we keep it around to maintain
-  // compatibility with code that used old versions.
-  function diffTrimmedLines(oldStr, newStr, callback) {
-    var options = generateOptions(callback, {
-      ignoreWhitespace: true
-    });
-    return lineDiff.diff(oldStr, newStr, options);
-  }
-
-  var sentenceDiff = new Diff();
-  sentenceDiff.tokenize = function (value) {
-    return value.split(/(\S.+?[.!?])(?=\s+|$)/);
-  };
-  function diffSentences(oldStr, newStr, callback) {
-    return sentenceDiff.diff(oldStr, newStr, callback);
-  }
-
-  var cssDiff = new Diff();
-  cssDiff.tokenize = function (value) {
-    return value.split(/([{}:;,]|\s+)/);
-  };
-  function diffCss(oldStr, newStr, callback) {
-    return cssDiff.diff(oldStr, newStr, callback);
-  }
-
-  function ownKeys(e, r) {
-    var t = Object.keys(e);
-    if (Object.getOwnPropertySymbols) {
-      var o = Object.getOwnPropertySymbols(e);
-      r && (o = o.filter(function (r) {
-        return Object.getOwnPropertyDescriptor(e, r).enumerable;
-      })), t.push.apply(t, o);
+    const wordDiff = new WordDiff();
+    function diffWords(oldStr, newStr, options) {
+        // This option has never been documented and never will be (it's clearer to
+        // just call `diffWordsWithSpace` directly if you need that behavior), but
+        // has existed in jsdiff for a long time, so we retain support for it here
+        // for the sake of backwards compatibility.
+        if ((options === null || options === void 0 ? void 0 : options.ignoreWhitespace) != null && !options.ignoreWhitespace) {
+            return diffWordsWithSpace(oldStr, newStr, options);
+        }
+        return wordDiff.diff(oldStr, newStr, options);
+    }
+    function dedupeWhitespaceInChangeObjects(startKeep, deletion, insertion, endKeep) {
+        // Before returning, we tidy up the leading and trailing whitespace of the
+        // change objects to eliminate cases where trailing whitespace in one object
+        // is repeated as leading whitespace in the next.
+        // Below are examples of the outcomes we want here to explain the code.
+        // I=insert, K=keep, D=delete
+        // 1. diffing 'foo bar baz' vs 'foo baz'
+        //    Prior to cleanup, we have K:'foo ' D:' bar ' K:' baz'
+        //    After cleanup, we want:   K:'foo ' D:'bar ' K:'baz'
+        //
+        // 2. Diffing 'foo bar baz' vs 'foo qux baz'
+        //    Prior to cleanup, we have K:'foo ' D:' bar ' I:' qux ' K:' baz'
+        //    After cleanup, we want K:'foo ' D:'bar' I:'qux' K:' baz'
+        //
+        // 3. Diffing 'foo\nbar baz' vs 'foo baz'
+        //    Prior to cleanup, we have K:'foo ' D:'\nbar ' K:' baz'
+        //    After cleanup, we want K'foo' D:'\nbar' K:' baz'
+        //
+        // 4. Diffing 'foo baz' vs 'foo\nbar baz'
+        //    Prior to cleanup, we have K:'foo\n' I:'\nbar ' K:' baz'
+        //    After cleanup, we ideally want K'foo' I:'\nbar' K:' baz'
+        //    but don't actually manage this currently (the pre-cleanup change
+        //    objects don't contain enough information to make it possible).
+        //
+        // 5. Diffing 'foo   bar baz' vs 'foo  baz'
+        //    Prior to cleanup, we have K:'foo  ' D:'   bar ' K:'  baz'
+        //    After cleanup, we want K:'foo  ' D:' bar ' K:'baz'
+        //
+        // Our handling is unavoidably imperfect in the case where there's a single
+        // indel between keeps and the whitespace has changed. For instance, consider
+        // diffing 'foo\tbar\nbaz' vs 'foo baz'. Unless we create an extra change
+        // object to represent the insertion of the space character (which isn't even
+        // a token), we have no way to avoid losing information about the texts'
+        // original whitespace in the result we return. Still, we do our best to
+        // output something that will look sensible if we e.g. print it with
+        // insertions in green and deletions in red.
+        // Between two "keep" change objects (or before the first or after the last
+        // change object), we can have either:
+        // * A "delete" followed by an "insert"
+        // * Just an "insert"
+        // * Just a "delete"
+        // We handle the three cases separately.
+        if (deletion && insertion) {
+            const oldWsPrefix = leadingWs(deletion.value);
+            const oldWsSuffix = trailingWs(deletion.value);
+            const newWsPrefix = leadingWs(insertion.value);
+            const newWsSuffix = trailingWs(insertion.value);
+            if (startKeep) {
+                const commonWsPrefix = longestCommonPrefix(oldWsPrefix, newWsPrefix);
+                startKeep.value = replaceSuffix(startKeep.value, newWsPrefix, commonWsPrefix);
+                deletion.value = removePrefix(deletion.value, commonWsPrefix);
+                insertion.value = removePrefix(insertion.value, commonWsPrefix);
+            }
+            if (endKeep) {
+                const commonWsSuffix = longestCommonSuffix(oldWsSuffix, newWsSuffix);
+                endKeep.value = replacePrefix(endKeep.value, newWsSuffix, commonWsSuffix);
+                deletion.value = removeSuffix(deletion.value, commonWsSuffix);
+                insertion.value = removeSuffix(insertion.value, commonWsSuffix);
+            }
+        }
+        else if (insertion) {
+            // The whitespaces all reflect what was in the new text rather than
+            // the old, so we essentially have no information about whitespace
+            // insertion or deletion. We just want to dedupe the whitespace.
+            // We do that by having each change object keep its trailing
+            // whitespace and deleting duplicate leading whitespace where
+            // present.
+            if (startKeep) {
+                const ws = leadingWs(insertion.value);
+                insertion.value = insertion.value.substring(ws.length);
+            }
+            if (endKeep) {
+                const ws = leadingWs(endKeep.value);
+                endKeep.value = endKeep.value.substring(ws.length);
+            }
+            // otherwise we've got a deletion and no insertion
+        }
+        else if (startKeep && endKeep) {
+            const newWsFull = leadingWs(endKeep.value), delWsStart = leadingWs(deletion.value), delWsEnd = trailingWs(deletion.value);
+            // Any whitespace that comes straight after startKeep in both the old and
+            // new texts, assign to startKeep and remove from the deletion.
+            const newWsStart = longestCommonPrefix(newWsFull, delWsStart);
+            deletion.value = removePrefix(deletion.value, newWsStart);
+            // Any whitespace that comes straight before endKeep in both the old and
+            // new texts, and hasn't already been assigned to startKeep, assign to
+            // endKeep and remove from the deletion.
+            const newWsEnd = longestCommonSuffix(removePrefix(newWsFull, newWsStart), delWsEnd);
+            deletion.value = removeSuffix(deletion.value, newWsEnd);
+            endKeep.value = replacePrefix(endKeep.value, newWsFull, newWsEnd);
+            // If there's any whitespace from the new text that HASN'T already been
+            // assigned, assign it to the start:
+            startKeep.value = replaceSuffix(startKeep.value, newWsFull, newWsFull.slice(0, newWsFull.length - newWsEnd.length));
+        }
+        else if (endKeep) {
+            // We are at the start of the text. Preserve all the whitespace on
+            // endKeep, and just remove whitespace from the end of deletion to the
+            // extent that it overlaps with the start of endKeep.
+            const endKeepWsPrefix = leadingWs(endKeep.value);
+            const deletionWsSuffix = trailingWs(deletion.value);
+            const overlap = maximumOverlap(deletionWsSuffix, endKeepWsPrefix);
+            deletion.value = removeSuffix(deletion.value, overlap);
+        }
+        else if (startKeep) {
+            // We are at the END of the text. Preserve all the whitespace on
+            // startKeep, and just remove whitespace from the start of deletion to
+            // the extent that it overlaps with the end of startKeep.
+            const startKeepWsSuffix = trailingWs(startKeep.value);
+            const deletionWsPrefix = leadingWs(deletion.value);
+            const overlap = maximumOverlap(startKeepWsSuffix, deletionWsPrefix);
+            deletion.value = removePrefix(deletion.value, overlap);
+        }
     }
-    return t;
-  }
-  function _objectSpread2(e) {
-    for (var r = 1; r < arguments.length; r++) {
-      var t = null != arguments[r] ? arguments[r] : {};
-      r % 2 ? ownKeys(Object(t), !0).forEach(function (r) {
-        _defineProperty(e, r, t[r]);
-      }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(e, Object.getOwnPropertyDescriptors(t)) : ownKeys(Object(t)).forEach(function (r) {
-        Object.defineProperty(e, r, Object.getOwnPropertyDescriptor(t, r));
-      });
+    class WordsWithSpaceDiff extends Diff {
+        tokenize(value) {
+            // Slightly different to the tokenizeIncludingWhitespace regex used above in
+            // that this one treats each individual newline as a distinct tokens, rather
+            // than merging them into other surrounding whitespace. This was requested
+            // in https://github.com/kpdecker/jsdiff/issues/180 &
+            //    https://github.com/kpdecker/jsdiff/issues/211
+            const regex = new RegExp(`(\\r?\\n)|[${extendedWordChars}]+|[^\\S\\n\\r]+|[^${extendedWordChars}]`, 'ug');
+            return value.match(regex) || [];
+        }
     }
-    return e;
-  }
-  function _toPrimitive(t, r) {
-    if ("object" != typeof t || !t) return t;
-    var e = t[Symbol.toPrimitive];
-    if (void 0 !== e) {
-      var i = e.call(t, r || "default");
-      if ("object" != typeof i) return i;
-      throw new TypeError("@@toPrimitive must return a primitive value.");
+    const wordsWithSpaceDiff = new WordsWithSpaceDiff();
+    function diffWordsWithSpace(oldStr, newStr, options) {
+        return wordsWithSpaceDiff.diff(oldStr, newStr, options);
     }
-    return ("string" === r ? String : Number)(t);
-  }
-  function _toPropertyKey(t) {
-    var i = _toPrimitive(t, "string");
-    return "symbol" == typeof i ? i : i + "";
-  }
-  function _typeof(o) {
-    "@babel/helpers - typeof";
 
-    return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (o) {
-      return typeof o;
-    } : function (o) {
-      return o && "function" == typeof Symbol && o.constructor === Symbol && o !== Symbol.prototype ? "symbol" : typeof o;
-    }, _typeof(o);
-  }
-  function _defineProperty(obj, key, value) {
-    key = _toPropertyKey(key);
-    if (key in obj) {
-      Object.defineProperty(obj, key, {
-        value: value,
-        enumerable: true,
-        configurable: true,
-        writable: true
-      });
-    } else {
-      obj[key] = value;
+    function generateOptions(options, defaults) {
+        if (typeof options === 'function') {
+            defaults.callback = options;
+        }
+        else if (options) {
+            for (const name in options) {
+                /* istanbul ignore else */
+                if (Object.prototype.hasOwnProperty.call(options, name)) {
+                    defaults[name] = options[name];
+                }
+            }
+        }
+        return defaults;
     }
-    return obj;
-  }
-  function _toConsumableArray(arr) {
-    return _arrayWithoutHoles(arr) || _iterableToArray(arr) || _unsupportedIterableToArray(arr) || _nonIterableSpread();
-  }
-  function _arrayWithoutHoles(arr) {
-    if (Array.isArray(arr)) return _arrayLikeToArray(arr);
-  }
-  function _iterableToArray(iter) {
-    if (typeof Symbol !== "undefined" && iter[Symbol.iterator] != null || iter["@@iterator"] != null) return Array.from(iter);
-  }
-  function _unsupportedIterableToArray(o, minLen) {
-    if (!o) return;
-    if (typeof o === "string") return _arrayLikeToArray(o, minLen);
-    var n = Object.prototype.toString.call(o).slice(8, -1);
-    if (n === "Object" && o.constructor) n = o.constructor.name;
-    if (n === "Map" || n === "Set") return Array.from(o);
-    if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen);
-  }
-  function _arrayLikeToArray(arr, len) {
-    if (len == null || len > arr.length) len = arr.length;
-    for (var i = 0, arr2 = new Array(len); i < len; i++) arr2[i] = arr[i];
-    return arr2;
-  }
-  function _nonIterableSpread() {
-    throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.");
-  }
-
-  var jsonDiff = new Diff();
-  // Discriminate between two lines of pretty-printed, serialized JSON where one of them has a
-  // dangling comma and the other doesn't. Turns out including the dangling comma yields the nicest output:
-  jsonDiff.useLongestToken = true;
-  jsonDiff.tokenize = lineDiff.tokenize;
-  jsonDiff.castInput = function (value, options) {
-    var undefinedReplacement = options.undefinedReplacement,
-      _options$stringifyRep = options.stringifyReplacer,
-      stringifyReplacer = _options$stringifyRep === void 0 ? function (k, v) {
-        return typeof v === 'undefined' ? undefinedReplacement : v;
-      } : _options$stringifyRep;
-    return typeof value === 'string' ? value : JSON.stringify(canonicalize(value, null, null, stringifyReplacer), stringifyReplacer, '  ');
-  };
-  jsonDiff.equals = function (left, right, options) {
-    return Diff.prototype.equals.call(jsonDiff, left.replace(/,([\r\n])/g, '$1'), right.replace(/,([\r\n])/g, '$1'), options);
-  };
-  function diffJson(oldObj, newObj, options) {
-    return jsonDiff.diff(oldObj, newObj, options);
-  }
 
-  // This function handles the presence of circular references by bailing out when encountering an
-  // object that is already on the "stack" of items being processed. Accepts an optional replacer
-  function canonicalize(obj, stack, replacementStack, replacer, key) {
-    stack = stack || [];
-    replacementStack = replacementStack || [];
-    if (replacer) {
-      obj = replacer(key, obj);
-    }
-    var i;
-    for (i = 0; i < stack.length; i += 1) {
-      if (stack[i] === obj) {
-        return replacementStack[i];
-      }
-    }
-    var canonicalizedObj;
-    if ('[object Array]' === Object.prototype.toString.call(obj)) {
-      stack.push(obj);
-      canonicalizedObj = new Array(obj.length);
-      replacementStack.push(canonicalizedObj);
-      for (i = 0; i < obj.length; i += 1) {
-        canonicalizedObj[i] = canonicalize(obj[i], stack, replacementStack, replacer, key);
-      }
-      stack.pop();
-      replacementStack.pop();
-      return canonicalizedObj;
+    class LineDiff extends Diff {
+        constructor() {
+            super(...arguments);
+            this.tokenize = tokenize;
+        }
+        equals(left, right, options) {
+            // If we're ignoring whitespace, we need to normalise lines by stripping
+            // whitespace before checking equality. (This has an annoying interaction
+            // with newlineIsToken that requires special handling: if newlines get their
+            // own token, then we DON'T want to trim the *newline* tokens down to empty
+            // strings, since this would cause us to treat whitespace-only line content
+            // as equal to a separator between lines, which would be weird and
+            // inconsistent with the documented behavior of the options.)
+            if (options.ignoreWhitespace) {
+                if (!options.newlineIsToken || !left.includes('\n')) {
+                    left = left.trim();
+                }
+                if (!options.newlineIsToken || !right.includes('\n')) {
+                    right = right.trim();
+                }
+            }
+            else if (options.ignoreNewlineAtEof && !options.newlineIsToken) {
+                if (left.endsWith('\n')) {
+                    left = left.slice(0, -1);
+                }
+                if (right.endsWith('\n')) {
+                    right = right.slice(0, -1);
+                }
+            }
+            return super.equals(left, right, options);
+        }
     }
-    if (obj && obj.toJSON) {
-      obj = obj.toJSON();
+    const lineDiff = new LineDiff();
+    function diffLines(oldStr, newStr, options) {
+        return lineDiff.diff(oldStr, newStr, options);
     }
-    if (_typeof(obj) === 'object' && obj !== null) {
-      stack.push(obj);
-      canonicalizedObj = {};
-      replacementStack.push(canonicalizedObj);
-      var sortedKeys = [],
-        _key;
-      for (_key in obj) {
-        /* istanbul ignore else */
-        if (Object.prototype.hasOwnProperty.call(obj, _key)) {
-          sortedKeys.push(_key);
-        }
-      }
-      sortedKeys.sort();
-      for (i = 0; i < sortedKeys.length; i += 1) {
-        _key = sortedKeys[i];
-        canonicalizedObj[_key] = canonicalize(obj[_key], stack, replacementStack, replacer, _key);
-      }
-      stack.pop();
-      replacementStack.pop();
-    } else {
-      canonicalizedObj = obj;
+    function diffTrimmedLines(oldStr, newStr, options) {
+        options = generateOptions(options, { ignoreWhitespace: true });
+        return lineDiff.diff(oldStr, newStr, options);
     }
-    return canonicalizedObj;
-  }
-
-  var arrayDiff = new Diff();
-  arrayDiff.tokenize = function (value) {
-    return value.slice();
-  };
-  arrayDiff.join = arrayDiff.removeEmpty = function (value) {
-    return value;
-  };
-  function diffArrays(oldArr, newArr, callback) {
-    return arrayDiff.diff(oldArr, newArr, callback);
-  }
-
-  function unixToWin(patch) {
-    if (Array.isArray(patch)) {
-      return patch.map(unixToWin);
+    // Exported standalone so it can be used from jsonDiff too.
+    function tokenize(value, options) {
+        if (options.stripTrailingCr) {
+            // remove one \r before \n to match GNU diff's --strip-trailing-cr behavior
+            value = value.replace(/\r\n/g, '\n');
+        }
+        const retLines = [], linesAndNewlines = value.split(/(\n|\r\n)/);
+        // Ignore the final empty token that occurs if the string ends with a new line
+        if (!linesAndNewlines[linesAndNewlines.length - 1]) {
+            linesAndNewlines.pop();
+        }
+        // Merge the content and line separators into single tokens
+        for (let i = 0; i < linesAndNewlines.length; i++) {
+            const line = linesAndNewlines[i];
+            if (i % 2 && !options.newlineIsToken) {
+                retLines[retLines.length - 1] += line;
+            }
+            else {
+                retLines.push(line);
+            }
+        }
+        return retLines;
+    }
+
+    function isSentenceEndPunct(char) {
+        return char == '.' || char == '!' || char == '?';
+    }
+    class SentenceDiff extends Diff {
+        tokenize(value) {
+            var _a;
+            // If in future we drop support for environments that don't support lookbehinds, we can replace
+            // this entire function with:
+            //     return value.split(/(?<=[.!?])(\s+|$)/);
+            // but until then, for similar reasons to the trailingWs function in string.ts, we are forced
+            // to do this verbosely "by hand" instead of using a regex.
+            const result = [];
+            let tokenStartI = 0;
+            for (let i = 0; i < value.length; i++) {
+                if (i == value.length - 1) {
+                    result.push(value.slice(tokenStartI));
+                    break;
+                }
+                if (isSentenceEndPunct(value[i]) && value[i + 1].match(/\s/)) {
+                    // We've hit a sentence break - i.e. a punctuation mark followed by whitespace.
+                    // We now want to push TWO tokens to the result:
+                    // 1. the sentence
+                    result.push(value.slice(tokenStartI, i + 1));
+                    // 2. the whitespace
+                    i = tokenStartI = i + 1;
+                    while ((_a = value[i + 1]) === null || _a === void 0 ? void 0 : _a.match(/\s/)) {
+                        i++;
+                    }
+                    result.push(value.slice(tokenStartI, i + 1));
+                    // Then the next token (a sentence) starts on the character after the whitespace.
+                    // (It's okay if this is off the end of the string - then the outer loop will terminate
+                    // here anyway.)
+                    tokenStartI = i + 1;
+                }
+            }
+            return result;
+        }
     }
-    return _objectSpread2(_objectSpread2({}, patch), {}, {
-      hunks: patch.hunks.map(function (hunk) {
-        return _objectSpread2(_objectSpread2({}, hunk), {}, {
-          lines: hunk.lines.map(function (line, i) {
-            var _hunk$lines;
-            return line.startsWith('\\') || line.endsWith('\r') || (_hunk$lines = hunk.lines[i + 1]) !== null && _hunk$lines !== void 0 && _hunk$lines.startsWith('\\') ? line : line + '\r';
-          })
-        });
-      })
-    });
-  }
-  function winToUnix(patch) {
-    if (Array.isArray(patch)) {
-      return patch.map(winToUnix);
+    const sentenceDiff = new SentenceDiff();
+    function diffSentences(oldStr, newStr, options) {
+        return sentenceDiff.diff(oldStr, newStr, options);
     }
-    return _objectSpread2(_objectSpread2({}, patch), {}, {
-      hunks: patch.hunks.map(function (hunk) {
-        return _objectSpread2(_objectSpread2({}, hunk), {}, {
-          lines: hunk.lines.map(function (line) {
-            return line.endsWith('\r') ? line.substring(0, line.length - 1) : line;
-          })
-        });
-      })
-    });
-  }
 
-  /**
-   * Returns true if the patch consistently uses Unix line endings (or only involves one line and has
-   * no line endings).
-   */
-  function isUnix(patch) {
-    if (!Array.isArray(patch)) {
-      patch = [patch];
+    class CssDiff extends Diff {
+        tokenize(value) {
+            return value.split(/([{}:;,]|\s+)/);
+        }
     }
-    return !patch.some(function (index) {
-      return index.hunks.some(function (hunk) {
-        return hunk.lines.some(function (line) {
-          return !line.startsWith('\\') && line.endsWith('\r');
-        });
-      });
-    });
-  }
-
-  /**
-   * Returns true if the patch uses Windows line endings and only Windows line endings.
-   */
-  function isWin(patch) {
-    if (!Array.isArray(patch)) {
-      patch = [patch];
+    const cssDiff = new CssDiff();
+    function diffCss(oldStr, newStr, options) {
+        return cssDiff.diff(oldStr, newStr, options);
     }
-    return patch.some(function (index) {
-      return index.hunks.some(function (hunk) {
-        return hunk.lines.some(function (line) {
-          return line.endsWith('\r');
-        });
-      });
-    }) && patch.every(function (index) {
-      return index.hunks.every(function (hunk) {
-        return hunk.lines.every(function (line, i) {
-          var _hunk$lines2;
-          return line.startsWith('\\') || line.endsWith('\r') || ((_hunk$lines2 = hunk.lines[i + 1]) === null || _hunk$lines2 === void 0 ? void 0 : _hunk$lines2.startsWith('\\'));
-        });
-      });
-    });
-  }
-
-  function parsePatch(uniDiff) {
-    var diffstr = uniDiff.split(/\n/),
-      list = [],
-      i = 0;
-    function parseIndex() {
-      var index = {};
-      list.push(index);
 
-      // Parse diff metadata
-      while (i < diffstr.length) {
-        var line = diffstr[i];
-
-        // File header found, end parsing diff metadata
-        if (/^(\-\-\-|\+\+\+|@@)\s/.test(line)) {
-          break;
+    class JsonDiff extends Diff {
+        constructor() {
+            super(...arguments);
+            this.tokenize = tokenize;
         }
-
-        // Diff index
-        var header = /^(?:Index:|diff(?: -r \w+)+)\s+(.+?)\s*$/.exec(line);
-        if (header) {
-          index.index = header[1];
+        get useLongestToken() {
+            // Discriminate between two lines of pretty-printed, serialized JSON where one of them has a
+            // dangling comma and the other doesn't. Turns out including the dangling comma yields the nicest output:
+            return true;
+        }
+        castInput(value, options) {
+            const { undefinedReplacement, stringifyReplacer = (k, v) => typeof v === 'undefined' ? undefinedReplacement : v } = options;
+            return typeof value === 'string' ? value : JSON.stringify(canonicalize(value, null, null, stringifyReplacer), null, '  ');
+        }
+        equals(left, right, options) {
+            return super.equals(left.replace(/,([\r\n])/g, '$1'), right.replace(/,([\r\n])/g, '$1'), options);
         }
-        i++;
-      }
-
-      // Parse file headers if they are defined. Unified diff requires them, but
-      // there's no technical issues to have an isolated hunk without file header
-      parseFileHeader(index);
-      parseFileHeader(index);
-
-      // Parse hunks
-      index.hunks = [];
-      while (i < diffstr.length) {
-        var _line = diffstr[i];
-        if (/^(Index:\s|diff\s|\-\-\-\s|\+\+\+\s|===================================================================)/.test(_line)) {
-          break;
-        } else if (/^@@/.test(_line)) {
-          index.hunks.push(parseHunk());
-        } else if (_line) {
-          throw new Error('Unknown line ' + (i + 1) + ' ' + JSON.stringify(_line));
-        } else {
-          i++;
-        }
-      }
-    }
-
-    // Parses the --- and +++ headers, if none are found, no lines
-    // are consumed.
-    function parseFileHeader(index) {
-      var fileHeader = /^(---|\+\+\+)\s+(.*)\r?$/.exec(diffstr[i]);
-      if (fileHeader) {
-        var keyPrefix = fileHeader[1] === '---' ? 'old' : 'new';
-        var data = fileHeader[2].split('\t', 2);
-        var fileName = data[0].replace(/\\\\/g, '\\');
-        if (/^".*"$/.test(fileName)) {
-          fileName = fileName.substr(1, fileName.length - 2);
-        }
-        index[keyPrefix + 'FileName'] = fileName;
-        index[keyPrefix + 'Header'] = (data[1] || '').trim();
-        i++;
-      }
-    }
-
-    // Parses a hunk
-    // This assumes that we are at the start of a hunk.
-    function parseHunk() {
-      var chunkHeaderIndex = i,
-        chunkHeaderLine = diffstr[i++],
-        chunkHeader = chunkHeaderLine.split(/@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@/);
-      var hunk = {
-        oldStart: +chunkHeader[1],
-        oldLines: typeof chunkHeader[2] === 'undefined' ? 1 : +chunkHeader[2],
-        newStart: +chunkHeader[3],
-        newLines: typeof chunkHeader[4] === 'undefined' ? 1 : +chunkHeader[4],
-        lines: []
-      };
-
-      // Unified Diff Format quirk: If the chunk size is 0,
-      // the first number is one lower than one would expect.
-      // https://www.artima.com/weblogs/viewpost.jsp?thread=164293
-      if (hunk.oldLines === 0) {
-        hunk.oldStart += 1;
-      }
-      if (hunk.newLines === 0) {
-        hunk.newStart += 1;
-      }
-      var addCount = 0,
-        removeCount = 0;
-      for (; i < diffstr.length && (removeCount < hunk.oldLines || addCount < hunk.newLines || (_diffstr$i = diffstr[i]) !== null && _diffstr$i !== void 0 && _diffstr$i.startsWith('\\')); i++) {
-        var _diffstr$i;
-        var operation = diffstr[i].length == 0 && i != diffstr.length - 1 ? ' ' : diffstr[i][0];
-        if (operation === '+' || operation === '-' || operation === ' ' || operation === '\\') {
-          hunk.lines.push(diffstr[i]);
-          if (operation === '+') {
-            addCount++;
-          } else if (operation === '-') {
-            removeCount++;
-          } else if (operation === ' ') {
-            addCount++;
-            removeCount++;
-          }
-        } else {
-          throw new Error("Hunk at line ".concat(chunkHeaderIndex + 1, " contained invalid line ").concat(diffstr[i]));
-        }
-      }
-
-      // Handle the empty block count case
-      if (!addCount && hunk.newLines === 1) {
-        hunk.newLines = 0;
-      }
-      if (!removeCount && hunk.oldLines === 1) {
-        hunk.oldLines = 0;
-      }
-
-      // Perform sanity checking
-      if (addCount !== hunk.newLines) {
-        throw new Error('Added line count did not match for hunk at line ' + (chunkHeaderIndex + 1));
-      }
-      if (removeCount !== hunk.oldLines) {
-        throw new Error('Removed line count did not match for hunk at line ' + (chunkHeaderIndex + 1));
-      }
-      return hunk;
     }
-    while (i < diffstr.length) {
-      parseIndex();
+    const jsonDiff = new JsonDiff();
+    function diffJson(oldStr, newStr, options) {
+        return jsonDiff.diff(oldStr, newStr, options);
+    }
+    // This function handles the presence of circular references by bailing out when encountering an
+    // object that is already on the "stack" of items being processed. Accepts an optional replacer
+    function canonicalize(obj, stack, replacementStack, replacer, key) {
+        stack = stack || [];
+        replacementStack = replacementStack || [];
+        if (replacer) {
+            obj = replacer(key === undefined ? '' : key, obj);
+        }
+        let i;
+        for (i = 0; i < stack.length; i += 1) {
+            if (stack[i] === obj) {
+                return replacementStack[i];
+            }
+        }
+        let canonicalizedObj;
+        if ('[object Array]' === Object.prototype.toString.call(obj)) {
+            stack.push(obj);
+            canonicalizedObj = new Array(obj.length);
+            replacementStack.push(canonicalizedObj);
+            for (i = 0; i < obj.length; i += 1) {
+                canonicalizedObj[i] = canonicalize(obj[i], stack, replacementStack, replacer, String(i));
+            }
+            stack.pop();
+            replacementStack.pop();
+            return canonicalizedObj;
+        }
+        if (obj && obj.toJSON) {
+            obj = obj.toJSON();
+        }
+        if (typeof obj === 'object' && obj !== null) {
+            stack.push(obj);
+            canonicalizedObj = {};
+            replacementStack.push(canonicalizedObj);
+            const sortedKeys = [];
+            let key;
+            for (key in obj) {
+                /* istanbul ignore else */
+                if (Object.prototype.hasOwnProperty.call(obj, key)) {
+                    sortedKeys.push(key);
+                }
+            }
+            sortedKeys.sort();
+            for (i = 0; i < sortedKeys.length; i += 1) {
+                key = sortedKeys[i];
+                canonicalizedObj[key] = canonicalize(obj[key], stack, replacementStack, replacer, key);
+            }
+            stack.pop();
+            replacementStack.pop();
+        }
+        else {
+            canonicalizedObj = obj;
+        }
+        return canonicalizedObj;
     }
-    return list;
-  }
 
-  // Iterator that traverses in the range of [min, max], stepping
-  // by distance from a given start position. I.e. for [0, 4], with
-  // start of 2, this will iterate 2, 3, 1, 4, 0.
-  function distanceIterator (start, minLine, maxLine) {
-    var wantForward = true,
-      backwardExhausted = false,
-      forwardExhausted = false,
-      localOffset = 1;
-    return function iterator() {
-      if (wantForward && !forwardExhausted) {
-        if (backwardExhausted) {
-          localOffset++;
-        } else {
-          wantForward = false;
+    class ArrayDiff extends Diff {
+        tokenize(value) {
+            return value.slice();
         }
-
-        // Check if trying to fit beyond text length, and if not, check it fits
-        // after offset location (or desired location on first iteration)
-        if (start + localOffset <= maxLine) {
-          return start + localOffset;
+        join(value) {
+            return value;
         }
-        forwardExhausted = true;
-      }
-      if (!backwardExhausted) {
-        if (!forwardExhausted) {
-          wantForward = true;
+        removeEmpty(value) {
+            return value;
         }
-
-        // Check if trying to fit before text beginning, and if not, check it fits
-        // before offset location
-        if (minLine <= start - localOffset) {
-          return start - localOffset++;
-        }
-        backwardExhausted = true;
-        return iterator();
-      }
-
-      // We tried to fit hunk before text beginning and beyond text length, then
-      // hunk can't fit on the text. Return undefined
-    };
-  }
-
-  function applyPatch(source, uniDiff) {
-    var options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
-    if (typeof uniDiff === 'string') {
-      uniDiff = parsePatch(uniDiff);
-    }
-    if (Array.isArray(uniDiff)) {
-      if (uniDiff.length > 1) {
-        throw new Error('applyPatch only works with a single input.');
-      }
-      uniDiff = uniDiff[0];
-    }
-    if (options.autoConvertLineEndings || options.autoConvertLineEndings == null) {
-      if (hasOnlyWinLineEndings(source) && isUnix(uniDiff)) {
-        uniDiff = unixToWin(uniDiff);
-      } else if (hasOnlyUnixLineEndings(source) && isWin(uniDiff)) {
-        uniDiff = winToUnix(uniDiff);
-      }
     }
-
-    // Apply the diff to the input
-    var lines = source.split('\n'),
-      hunks = uniDiff.hunks,
-      compareLine = options.compareLine || function (lineNumber, line, operation, patchContent) {
-        return line === patchContent;
-      },
-      fuzzFactor = options.fuzzFactor || 0,
-      minLine = 0;
-    if (fuzzFactor < 0 || !Number.isInteger(fuzzFactor)) {
-      throw new Error('fuzzFactor must be a non-negative integer');
+    const arrayDiff = new ArrayDiff();
+    function diffArrays(oldArr, newArr, options) {
+        return arrayDiff.diff(oldArr, newArr, options);
     }
 
-    // Special case for empty patch.
-    if (!hunks.length) {
-      return source;
+    function unixToWin(patch) {
+        if (Array.isArray(patch)) {
+            // It would be cleaner if instead of the line below we could just write
+            //     return patch.map(unixToWin)
+            // but mysteriously TypeScript (v5.7.3 at the time of writing) does not like this and it will
+            // refuse to compile, thinking that unixToWin could then return StructuredPatch[][] and the
+            // result would be incompatible with the overload signatures.
+            // See bug report at https://github.com/microsoft/TypeScript/issues/61398.
+            return patch.map(p => unixToWin(p));
+        }
+        return Object.assign(Object.assign({}, patch), { hunks: patch.hunks.map(hunk => (Object.assign(Object.assign({}, hunk), { lines: hunk.lines.map((line, i) => {
+                    var _a;
+                    return (line.startsWith('\\') || line.endsWith('\r') || ((_a = hunk.lines[i + 1]) === null || _a === void 0 ? void 0 : _a.startsWith('\\')))
+                        ? line
+                        : line + '\r';
+                }) }))) });
+    }
+    function winToUnix(patch) {
+        if (Array.isArray(patch)) {
+            // (See comment above equivalent line in unixToWin)
+            return patch.map(p => winToUnix(p));
+        }
+        return Object.assign(Object.assign({}, patch), { hunks: patch.hunks.map(hunk => (Object.assign(Object.assign({}, hunk), { lines: hunk.lines.map(line => line.endsWith('\r') ? line.substring(0, line.length - 1) : line) }))) });
     }
-
-    // Before anything else, handle EOFNL insertion/removal. If the patch tells us to make a change
-    // to the EOFNL that is redundant/impossible - i.e. to remove a newline that's not there, or add a
-    // newline that already exists - then we either return false and fail to apply the patch (if
-    // fuzzFactor is 0) or simply ignore the problem and do nothing (if fuzzFactor is >0).
-    // If we do need to remove/add a newline at EOF, this will always be in the final hunk:
-    var prevLine = '',
-      removeEOFNL = false,
-      addEOFNL = false;
-    for (var i = 0; i < hunks[hunks.length - 1].lines.length; i++) {
-      var line = hunks[hunks.length - 1].lines[i];
-      if (line[0] == '\\') {
-        if (prevLine[0] == '+') {
-          removeEOFNL = true;
-        } else if (prevLine[0] == '-') {
-          addEOFNL = true;
-        }
-      }
-      prevLine = line;
+    /**
+     * Returns true if the patch consistently uses Unix line endings (or only involves one line and has
+     * no line endings).
+     */
+    function isUnix(patch) {
+        if (!Array.isArray(patch)) {
+            patch = [patch];
+        }
+        return !patch.some(index => index.hunks.some(hunk => hunk.lines.some(line => !line.startsWith('\\') && line.endsWith('\r'))));
     }
-    if (removeEOFNL) {
-      if (addEOFNL) {
-        // This means the final line gets changed but doesn't have a trailing newline in either the
-        // original or patched version. In that case, we do nothing if fuzzFactor > 0, and if
-        // fuzzFactor is 0, we simply validate that the source file has no trailing newline.
-        if (!fuzzFactor && lines[lines.length - 1] == '') {
-          return false;
-        }
-      } else if (lines[lines.length - 1] == '') {
-        lines.pop();
-      } else if (!fuzzFactor) {
-        return false;
-      }
-    } else if (addEOFNL) {
-      if (lines[lines.length - 1] != '') {
-        lines.push('');
-      } else if (!fuzzFactor) {
-        return false;
-      }
+    /**
+     * Returns true if the patch uses Windows line endings and only Windows line endings.
+     */
+    function isWin(patch) {
+        if (!Array.isArray(patch)) {
+            patch = [patch];
+        }
+        return patch.some(index => index.hunks.some(hunk => hunk.lines.some(line => line.endsWith('\r'))))
+            && patch.every(index => index.hunks.every(hunk => hunk.lines.every((line, i) => { var _a; return line.startsWith('\\') || line.endsWith('\r') || ((_a = hunk.lines[i + 1]) === null || _a === void 0 ? void 0 : _a.startsWith('\\')); })));
     }
 
     /**
-     * Checks if the hunk can be made to fit at the provided location with at most `maxErrors`
-     * insertions, substitutions, or deletions, while ensuring also that:
-     * - lines deleted in the hunk match exactly, and
-     * - wherever an insertion operation or block of insertion operations appears in the hunk, the
-     *   immediately preceding and following lines of context match exactly
-     *
-     * `toPos` should be set such that lines[toPos] is meant to match hunkLines[0].
+     * Parses a patch into structured data, in the same structure returned by `structuredPatch`.
      *
-     * If the hunk can be applied, returns an object with properties `oldLineLastI` and
-     * `replacementLines`. Otherwise, returns null.
+     * @return a JSON object representation of the a patch, suitable for use with the `applyPatch` method.
      */
-    function applyHunk(hunkLines, toPos, maxErrors) {
-      var hunkLinesI = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : 0;
-      var lastContextLineMatched = arguments.length > 4 && arguments[4] !== undefined ? arguments[4] : true;
-      var patchedLines = arguments.length > 5 && arguments[5] !== undefined ? arguments[5] : [];
-      var patchedLinesLength = arguments.length > 6 && arguments[6] !== undefined ? arguments[6] : 0;
-      var nConsecutiveOldContextLines = 0;
-      var nextContextLineMustMatch = false;
-      for (; hunkLinesI < hunkLines.length; hunkLinesI++) {
-        var hunkLine = hunkLines[hunkLinesI],
-          operation = hunkLine.length > 0 ? hunkLine[0] : ' ',
-          content = hunkLine.length > 0 ? hunkLine.substr(1) : hunkLine;
-        if (operation === '-') {
-          if (compareLine(toPos + 1, lines[toPos], operation, content)) {
-            toPos++;
-            nConsecutiveOldContextLines = 0;
-          } else {
-            if (!maxErrors || lines[toPos] == null) {
-              return null;
-            }
-            patchedLines[patchedLinesLength] = lines[toPos];
-            return applyHunk(hunkLines, toPos + 1, maxErrors - 1, hunkLinesI, false, patchedLines, patchedLinesLength + 1);
-          }
-        }
-        if (operation === '+') {
-          if (!lastContextLineMatched) {
-            return null;
-          }
-          patchedLines[patchedLinesLength] = content;
-          patchedLinesLength++;
-          nConsecutiveOldContextLines = 0;
-          nextContextLineMustMatch = true;
-        }
-        if (operation === ' ') {
-          nConsecutiveOldContextLines++;
-          patchedLines[patchedLinesLength] = lines[toPos];
-          if (compareLine(toPos + 1, lines[toPos], operation, content)) {
-            patchedLinesLength++;
-            lastContextLineMatched = true;
-            nextContextLineMustMatch = false;
-            toPos++;
-          } else {
-            if (nextContextLineMustMatch || !maxErrors) {
-              return null;
+    function parsePatch(uniDiff) {
+        const diffstr = uniDiff.split(/\n/), list = [];
+        let i = 0;
+        function parseIndex() {
+            const index = {};
+            list.push(index);
+            // Parse diff metadata
+            while (i < diffstr.length) {
+                const line = diffstr[i];
+                // File header found, end parsing diff metadata
+                if ((/^(---|\+\+\+|@@)\s/).test(line)) {
+                    break;
+                }
+                // Diff index
+                const header = (/^(?:Index:|diff(?: -r \w+)+)\s+(.+?)\s*$/).exec(line);
+                if (header) {
+                    index.index = header[1];
+                }
+                i++;
             }
-
-            // Consider 3 possibilities in sequence:
-            // 1. lines contains a *substitution* not included in the patch context, or
-            // 2. lines contains an *insertion* not included in the patch context, or
-            // 3. lines contains a *deletion* not included in the patch context
-            // The first two options are of course only possible if the line from lines is non-null -
-            // i.e. only option 3 is possible if we've overrun the end of the old file.
-            return lines[toPos] && (applyHunk(hunkLines, toPos + 1, maxErrors - 1, hunkLinesI + 1, false, patchedLines, patchedLinesLength + 1) || applyHunk(hunkLines, toPos + 1, maxErrors - 1, hunkLinesI, false, patchedLines, patchedLinesLength + 1)) || applyHunk(hunkLines, toPos, maxErrors - 1, hunkLinesI + 1, false, patchedLines, patchedLinesLength);
-          }
-        }
-      }
-
-      // Before returning, trim any unmodified context lines off the end of patchedLines and reduce
-      // toPos (and thus oldLineLastI) accordingly. This allows later hunks to be applied to a region
-      // that starts in this hunk's trailing context.
-      patchedLinesLength -= nConsecutiveOldContextLines;
-      toPos -= nConsecutiveOldContextLines;
-      patchedLines.length = patchedLinesLength;
-      return {
-        patchedLines: patchedLines,
-        oldLineLastI: toPos - 1
-      };
-    }
-    var resultLines = [];
-
-    // Search best fit offsets for each hunk based on the previous ones
-    var prevHunkOffset = 0;
-    for (var _i = 0; _i < hunks.length; _i++) {
-      var hunk = hunks[_i];
-      var hunkResult = void 0;
-      var maxLine = lines.length - hunk.oldLines + fuzzFactor;
-      var toPos = void 0;
-      for (var maxErrors = 0; maxErrors <= fuzzFactor; maxErrors++) {
-        toPos = hunk.oldStart + prevHunkOffset - 1;
-        var iterator = distanceIterator(toPos, minLine, maxLine);
-        for (; toPos !== undefined; toPos = iterator()) {
-          hunkResult = applyHunk(hunk.lines, toPos, maxErrors);
-          if (hunkResult) {
-            break;
-          }
-        }
-        if (hunkResult) {
-          break;
-        }
-      }
-      if (!hunkResult) {
-        return false;
-      }
-
-      // Copy everything from the end of where we applied the last hunk to the start of this hunk
-      for (var _i2 = minLine; _i2 < toPos; _i2++) {
-        resultLines.push(lines[_i2]);
-      }
-
-      // Add the lines produced by applying the hunk:
-      for (var _i3 = 0; _i3 < hunkResult.patchedLines.length; _i3++) {
-        var _line = hunkResult.patchedLines[_i3];
-        resultLines.push(_line);
-      }
-
-      // Set lower text limit to end of the current hunk, so next ones don't try
-      // to fit over already patched text
-      minLine = hunkResult.oldLineLastI + 1;
-
-      // Note the offset between where the patch said the hunk should've applied and where we
-      // applied it, so we can adjust future hunks accordingly:
-      prevHunkOffset = toPos + 1 - hunk.oldStart;
-    }
-
-    // Copy over the rest of the lines from the old text
-    for (var _i4 = minLine; _i4 < lines.length; _i4++) {
-      resultLines.push(lines[_i4]);
-    }
-    return resultLines.join('\n');
-  }
-
-  // Wrapper that supports multiple file patches via callbacks.
-  function applyPatches(uniDiff, options) {
-    if (typeof uniDiff === 'string') {
-      uniDiff = parsePatch(uniDiff);
-    }
-    var currentIndex = 0;
-    function processIndex() {
-      var index = uniDiff[currentIndex++];
-      if (!index) {
-        return options.complete();
-      }
-      options.loadFile(index, function (err, data) {
-        if (err) {
-          return options.complete(err);
-        }
-        var updatedContent = applyPatch(data, index, options);
-        options.patched(index, updatedContent, function (err) {
-          if (err) {
-            return options.complete(err);
-          }
-          processIndex();
-        });
-      });
-    }
-    processIndex();
-  }
-
-  function structuredPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options) {
-    if (!options) {
-      options = {};
-    }
-    if (typeof options === 'function') {
-      options = {
-        callback: options
-      };
-    }
-    if (typeof options.context === 'undefined') {
-      options.context = 4;
-    }
-    if (options.newlineIsToken) {
-      throw new Error('newlineIsToken may not be used with patch-generation functions, only with diffing functions');
-    }
-    if (!options.callback) {
-      return diffLinesResultToPatch(diffLines(oldStr, newStr, options));
-    } else {
-      var _options = options,
-        _callback = _options.callback;
-      diffLines(oldStr, newStr, _objectSpread2(_objectSpread2({}, options), {}, {
-        callback: function callback(diff) {
-          var patch = diffLinesResultToPatch(diff);
-          _callback(patch);
-        }
-      }));
-    }
-    function diffLinesResultToPatch(diff) {
-      // STEP 1: Build up the patch with no "\ No newline at end of file" lines and with the arrays
-      //         of lines containing trailing newline characters. We'll tidy up later...
-
-      if (!diff) {
-        return;
-      }
-      diff.push({
-        value: '',
-        lines: []
-      }); // Append an empty value to make cleanup easier
-
-      function contextLines(lines) {
-        return lines.map(function (entry) {
-          return ' ' + entry;
-        });
-      }
-      var hunks = [];
-      var oldRangeStart = 0,
-        newRangeStart = 0,
-        curRange = [],
-        oldLine = 1,
-        newLine = 1;
-      var _loop = function _loop() {
-        var current = diff[i],
-          lines = current.lines || splitLines(current.value);
-        current.lines = lines;
-        if (current.added || current.removed) {
-          var _curRange;
-          // If we have previous context, start with that
-          if (!oldRangeStart) {
-            var prev = diff[i - 1];
-            oldRangeStart = oldLine;
-            newRangeStart = newLine;
-            if (prev) {
-              curRange = options.context > 0 ? contextLines(prev.lines.slice(-options.context)) : [];
-              oldRangeStart -= curRange.length;
-              newRangeStart -= curRange.length;
-            }
-          }
-
-          // Output our changes
-          (_curRange = curRange).push.apply(_curRange, _toConsumableArray(lines.map(function (entry) {
-            return (current.added ? '+' : '-') + entry;
-          })));
-
-          // Track the updated file position
-          if (current.added) {
-            newLine += lines.length;
-          } else {
-            oldLine += lines.length;
-          }
-        } else {
-          // Identical context lines. Track line changes
-          if (oldRangeStart) {
-            // Close out any changes that have been output (or join overlapping)
-            if (lines.length <= options.context * 2 && i < diff.length - 2) {
-              var _curRange2;
-              // Overlapping
-              (_curRange2 = curRange).push.apply(_curRange2, _toConsumableArray(contextLines(lines)));
-            } else {
-              var _curRange3;
-              // end the range and output
-              var contextSize = Math.min(lines.length, options.context);
-              (_curRange3 = curRange).push.apply(_curRange3, _toConsumableArray(contextLines(lines.slice(0, contextSize))));
-              var _hunk = {
-                oldStart: oldRangeStart,
-                oldLines: oldLine - oldRangeStart + contextSize,
-                newStart: newRangeStart,
-                newLines: newLine - newRangeStart + contextSize,
-                lines: curRange
-              };
-              hunks.push(_hunk);
-              oldRangeStart = 0;
-              newRangeStart = 0;
-              curRange = [];
-            }
-          }
-          oldLine += lines.length;
-          newLine += lines.length;
-        }
-      };
-      for (var i = 0; i < diff.length; i++) {
-        _loop();
-      }
-
-      // Step 2: eliminate the trailing `\n` from each line of each hunk, and, where needed, add
-      //         "\ No newline at end of file".
-      for (var _i = 0, _hunks = hunks; _i < _hunks.length; _i++) {
-        var hunk = _hunks[_i];
-        for (var _i2 = 0; _i2 < hunk.lines.length; _i2++) {
-          if (hunk.lines[_i2].endsWith('\n')) {
-            hunk.lines[_i2] = hunk.lines[_i2].slice(0, -1);
-          } else {
-            hunk.lines.splice(_i2 + 1, 0, '\\ No newline at end of file');
-            _i2++; // Skip the line we just added, then continue iterating
-          }
-        }
-      }
-      return {
-        oldFileName: oldFileName,
-        newFileName: newFileName,
-        oldHeader: oldHeader,
-        newHeader: newHeader,
-        hunks: hunks
-      };
-    }
-  }
-  function formatPatch(diff) {
-    if (Array.isArray(diff)) {
-      return diff.map(formatPatch).join('\n');
-    }
-    var ret = [];
-    if (diff.oldFileName == diff.newFileName) {
-      ret.push('Index: ' + diff.oldFileName);
-    }
-    ret.push('===================================================================');
-    ret.push('--- ' + diff.oldFileName + (typeof diff.oldHeader === 'undefined' ? '' : '\t' + diff.oldHeader));
-    ret.push('+++ ' + diff.newFileName + (typeof diff.newHeader === 'undefined' ? '' : '\t' + diff.newHeader));
-    for (var i = 0; i < diff.hunks.length; i++) {
-      var hunk = diff.hunks[i];
-      // Unified Diff Format quirk: If the chunk size is 0,
-      // the first number is one lower than one would expect.
-      // https://www.artima.com/weblogs/viewpost.jsp?thread=164293
-      if (hunk.oldLines === 0) {
-        hunk.oldStart -= 1;
-      }
-      if (hunk.newLines === 0) {
-        hunk.newStart -= 1;
-      }
-      ret.push('@@ -' + hunk.oldStart + ',' + hunk.oldLines + ' +' + hunk.newStart + ',' + hunk.newLines + ' @@');
-      ret.push.apply(ret, hunk.lines);
-    }
-    return ret.join('\n') + '\n';
-  }
-  function createTwoFilesPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options) {
-    var _options2;
-    if (typeof options === 'function') {
-      options = {
-        callback: options
-      };
-    }
-    if (!((_options2 = options) !== null && _options2 !== void 0 && _options2.callback)) {
-      var patchObj = structuredPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options);
-      if (!patchObj) {
-        return;
-      }
-      return formatPatch(patchObj);
-    } else {
-      var _options3 = options,
-        _callback2 = _options3.callback;
-      structuredPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, _objectSpread2(_objectSpread2({}, options), {}, {
-        callback: function callback(patchObj) {
-          if (!patchObj) {
-            _callback2();
-          } else {
-            _callback2(formatPatch(patchObj));
-          }
-        }
-      }));
-    }
-  }
-  function createPatch(fileName, oldStr, newStr, oldHeader, newHeader, options) {
-    return createTwoFilesPatch(fileName, fileName, oldStr, newStr, oldHeader, newHeader, options);
-  }
-
-  /**
-   * Split `text` into an array of lines, including the trailing newline character (where present)
-   */
-  function splitLines(text) {
-    var hasTrailingNl = text.endsWith('\n');
-    var result = text.split('\n').map(function (line) {
-      return line + '\n';
-    });
-    if (hasTrailingNl) {
-      result.pop();
-    } else {
-      result.push(result.pop().slice(0, -1));
-    }
-    return result;
-  }
-
-  function arrayEqual(a, b) {
-    if (a.length !== b.length) {
-      return false;
-    }
-    return arrayStartsWith(a, b);
-  }
-  function arrayStartsWith(array, start) {
-    if (start.length > array.length) {
-      return false;
-    }
-    for (var i = 0; i < start.length; i++) {
-      if (start[i] !== array[i]) {
-        return false;
-      }
-    }
-    return true;
-  }
-
-  function calcLineCount(hunk) {
-    var _calcOldNewLineCount = calcOldNewLineCount(hunk.lines),
-      oldLines = _calcOldNewLineCount.oldLines,
-      newLines = _calcOldNewLineCount.newLines;
-    if (oldLines !== undefined) {
-      hunk.oldLines = oldLines;
-    } else {
-      delete hunk.oldLines;
-    }
-    if (newLines !== undefined) {
-      hunk.newLines = newLines;
-    } else {
-      delete hunk.newLines;
-    }
-  }
-  function merge(mine, theirs, base) {
-    mine = loadPatch(mine, base);
-    theirs = loadPatch(theirs, base);
-    var ret = {};
-
-    // For index we just let it pass through as it doesn't have any necessary meaning.
-    // Leaving sanity checks on this to the API consumer that may know more about the
-    // meaning in their own context.
-    if (mine.index || theirs.index) {
-      ret.index = mine.index || theirs.index;
-    }
-    if (mine.newFileName || theirs.newFileName) {
-      if (!fileNameChanged(mine)) {
-        // No header or no change in ours, use theirs (and ours if theirs does not exist)
-        ret.oldFileName = theirs.oldFileName || mine.oldFileName;
-        ret.newFileName = theirs.newFileName || mine.newFileName;
-        ret.oldHeader = theirs.oldHeader || mine.oldHeader;
-        ret.newHeader = theirs.newHeader || mine.newHeader;
-      } else if (!fileNameChanged(theirs)) {
-        // No header or no change in theirs, use ours
-        ret.oldFileName = mine.oldFileName;
-        ret.newFileName = mine.newFileName;
-        ret.oldHeader = mine.oldHeader;
-        ret.newHeader = mine.newHeader;
-      } else {
-        // Both changed... figure it out
-        ret.oldFileName = selectField(ret, mine.oldFileName, theirs.oldFileName);
-        ret.newFileName = selectField(ret, mine.newFileName, theirs.newFileName);
-        ret.oldHeader = selectField(ret, mine.oldHeader, theirs.oldHeader);
-        ret.newHeader = selectField(ret, mine.newHeader, theirs.newHeader);
-      }
-    }
-    ret.hunks = [];
-    var mineIndex = 0,
-      theirsIndex = 0,
-      mineOffset = 0,
-      theirsOffset = 0;
-    while (mineIndex < mine.hunks.length || theirsIndex < theirs.hunks.length) {
-      var mineCurrent = mine.hunks[mineIndex] || {
-          oldStart: Infinity
-        },
-        theirsCurrent = theirs.hunks[theirsIndex] || {
-          oldStart: Infinity
-        };
-      if (hunkBefore(mineCurrent, theirsCurrent)) {
-        // This patch does not overlap with any of the others, yay.
-        ret.hunks.push(cloneHunk(mineCurrent, mineOffset));
-        mineIndex++;
-        theirsOffset += mineCurrent.newLines - mineCurrent.oldLines;
-      } else if (hunkBefore(theirsCurrent, mineCurrent)) {
-        // This patch does not overlap with any of the others, yay.
-        ret.hunks.push(cloneHunk(theirsCurrent, theirsOffset));
-        theirsIndex++;
-        mineOffset += theirsCurrent.newLines - theirsCurrent.oldLines;
-      } else {
-        // Overlap, merge as best we can
-        var mergedHunk = {
-          oldStart: Math.min(mineCurrent.oldStart, theirsCurrent.oldStart),
-          oldLines: 0,
-          newStart: Math.min(mineCurrent.newStart + mineOffset, theirsCurrent.oldStart + theirsOffset),
-          newLines: 0,
-          lines: []
+            // Parse file headers if they are defined. Unified diff requires them, but
+            // there's no technical issues to have an isolated hunk without file header
+            parseFileHeader(index);
+            parseFileHeader(index);
+            // Parse hunks
+            index.hunks = [];
+            while (i < diffstr.length) {
+                const line = diffstr[i];
+                if ((/^(Index:\s|diff\s|---\s|\+\+\+\s|===================================================================)/).test(line)) {
+                    break;
+                }
+                else if ((/^@@/).test(line)) {
+                    index.hunks.push(parseHunk());
+                }
+                else if (line) {
+                    throw new Error('Unknown line ' + (i + 1) + ' ' + JSON.stringify(line));
+                }
+                else {
+                    i++;
+                }
+            }
+        }
+        // Parses the --- and +++ headers, if none are found, no lines
+        // are consumed.
+        function parseFileHeader(index) {
+            const fileHeader = (/^(---|\+\+\+)\s+(.*)\r?$/).exec(diffstr[i]);
+            if (fileHeader) {
+                const data = fileHeader[2].split('\t', 2), header = (data[1] || '').trim();
+                let fileName = data[0].replace(/\\\\/g, '\\');
+                if ((/^".*"$/).test(fileName)) {
+                    fileName = fileName.substr(1, fileName.length - 2);
+                }
+                if (fileHeader[1] === '---') {
+                    index.oldFileName = fileName;
+                    index.oldHeader = header;
+                }
+                else {
+                    index.newFileName = fileName;
+                    index.newHeader = header;
+                }
+                i++;
+            }
+        }
+        // Parses a hunk
+        // This assumes that we are at the start of a hunk.
+        function parseHunk() {
+            var _a;
+            const chunkHeaderIndex = i, chunkHeaderLine = diffstr[i++], chunkHeader = chunkHeaderLine.split(/@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@/);
+            const hunk = {
+                oldStart: +chunkHeader[1],
+                oldLines: typeof chunkHeader[2] === 'undefined' ? 1 : +chunkHeader[2],
+                newStart: +chunkHeader[3],
+                newLines: typeof chunkHeader[4] === 'undefined' ? 1 : +chunkHeader[4],
+                lines: []
+            };
+            // Unified Diff Format quirk: If the chunk size is 0,
+            // the first number is one lower than one would expect.
+            // https://www.artima.com/weblogs/viewpost.jsp?thread=164293
+            if (hunk.oldLines === 0) {
+                hunk.oldStart += 1;
+            }
+            if (hunk.newLines === 0) {
+                hunk.newStart += 1;
+            }
+            let addCount = 0, removeCount = 0;
+            for (; i < diffstr.length && (removeCount < hunk.oldLines || addCount < hunk.newLines || ((_a = diffstr[i]) === null || _a === void 0 ? void 0 : _a.startsWith('\\'))); i++) {
+                const operation = (diffstr[i].length == 0 && i != (diffstr.length - 1)) ? ' ' : diffstr[i][0];
+                if (operation === '+' || operation === '-' || operation === ' ' || operation === '\\') {
+                    hunk.lines.push(diffstr[i]);
+                    if (operation === '+') {
+                        addCount++;
+                    }
+                    else if (operation === '-') {
+                        removeCount++;
+                    }
+                    else if (operation === ' ') {
+                        addCount++;
+                        removeCount++;
+                    }
+                }
+                else {
+                    throw new Error(`Hunk at line ${chunkHeaderIndex + 1} contained invalid line ${diffstr[i]}`);
+                }
+            }
+            // Handle the empty block count case
+            if (!addCount && hunk.newLines === 1) {
+                hunk.newLines = 0;
+            }
+            if (!removeCount && hunk.oldLines === 1) {
+                hunk.oldLines = 0;
+            }
+            // Perform sanity checking
+            if (addCount !== hunk.newLines) {
+                throw new Error('Added line count did not match for hunk at line ' + (chunkHeaderIndex + 1));
+            }
+            if (removeCount !== hunk.oldLines) {
+                throw new Error('Removed line count did not match for hunk at line ' + (chunkHeaderIndex + 1));
+            }
+            return hunk;
+        }
+        while (i < diffstr.length) {
+            parseIndex();
+        }
+        return list;
+    }
+
+    // Iterator that traverses in the range of [min, max], stepping
+    // by distance from a given start position. I.e. for [0, 4], with
+    // start of 2, this will iterate 2, 3, 1, 4, 0.
+    function distanceIterator (start, minLine, maxLine) {
+        let wantForward = true, backwardExhausted = false, forwardExhausted = false, localOffset = 1;
+        return function iterator() {
+            if (wantForward && !forwardExhausted) {
+                if (backwardExhausted) {
+                    localOffset++;
+                }
+                else {
+                    wantForward = false;
+                }
+                // Check if trying to fit beyond text length, and if not, check it fits
+                // after offset location (or desired location on first iteration)
+                if (start + localOffset <= maxLine) {
+                    return start + localOffset;
+                }
+                forwardExhausted = true;
+            }
+            if (!backwardExhausted) {
+                if (!forwardExhausted) {
+                    wantForward = true;
+                }
+                // Check if trying to fit before text beginning, and if not, check it fits
+                // before offset location
+                if (minLine <= start - localOffset) {
+                    return start - localOffset++;
+                }
+                backwardExhausted = true;
+                return iterator();
+            }
+            // We tried to fit hunk before text beginning and beyond text length, then
+            // hunk can't fit on the text. Return undefined
+            return undefined;
         };
-        mergeLines(mergedHunk, mineCurrent.oldStart, mineCurrent.lines, theirsCurrent.oldStart, theirsCurrent.lines);
-        theirsIndex++;
-        mineIndex++;
-        ret.hunks.push(mergedHunk);
-      }
-    }
-    return ret;
-  }
-  function loadPatch(param, base) {
-    if (typeof param === 'string') {
-      if (/^@@/m.test(param) || /^Index:/m.test(param)) {
-        return parsePatch(param)[0];
-      }
-      if (!base) {
-        throw new Error('Must provide a base reference or pass in a patch');
-      }
-      return structuredPatch(undefined, undefined, base, param);
     }
-    return param;
-  }
-  function fileNameChanged(patch) {
-    return patch.newFileName && patch.newFileName !== patch.oldFileName;
-  }
-  function selectField(index, mine, theirs) {
-    if (mine === theirs) {
-      return mine;
-    } else {
-      index.conflict = true;
-      return {
-        mine: mine,
-        theirs: theirs
-      };
-    }
-  }
-  function hunkBefore(test, check) {
-    return test.oldStart < check.oldStart && test.oldStart + test.oldLines < check.oldStart;
-  }
-  function cloneHunk(hunk, offset) {
-    return {
-      oldStart: hunk.oldStart,
-      oldLines: hunk.oldLines,
-      newStart: hunk.newStart + offset,
-      newLines: hunk.newLines,
-      lines: hunk.lines
-    };
-  }
-  function mergeLines(hunk, mineOffset, mineLines, theirOffset, theirLines) {
-    // This will generally result in a conflicted hunk, but there are cases where the context
-    // is the only overlap where we can successfully merge the content here.
-    var mine = {
-        offset: mineOffset,
-        lines: mineLines,
-        index: 0
-      },
-      their = {
-        offset: theirOffset,
-        lines: theirLines,
-        index: 0
-      };
-
-    // Handle any leading content
-    insertLeading(hunk, mine, their);
-    insertLeading(hunk, their, mine);
 
-    // Now in the overlap content. Scan through and select the best changes from each.
-    while (mine.index < mine.lines.length && their.index < their.lines.length) {
-      var mineCurrent = mine.lines[mine.index],
-        theirCurrent = their.lines[their.index];
-      if ((mineCurrent[0] === '-' || mineCurrent[0] === '+') && (theirCurrent[0] === '-' || theirCurrent[0] === '+')) {
-        // Both modified ...
-        mutualChange(hunk, mine, their);
-      } else if (mineCurrent[0] === '+' && theirCurrent[0] === ' ') {
-        var _hunk$lines;
-        // Mine inserted
-        (_hunk$lines = hunk.lines).push.apply(_hunk$lines, _toConsumableArray(collectChange(mine)));
-      } else if (theirCurrent[0] === '+' && mineCurrent[0] === ' ') {
-        var _hunk$lines2;
-        // Theirs inserted
-        (_hunk$lines2 = hunk.lines).push.apply(_hunk$lines2, _toConsumableArray(collectChange(their)));
-      } else if (mineCurrent[0] === '-' && theirCurrent[0] === ' ') {
-        // Mine removed or edited
-        removal(hunk, mine, their);
-      } else if (theirCurrent[0] === '-' && mineCurrent[0] === ' ') {
-        // Their removed or edited
-        removal(hunk, their, mine, true);
-      } else if (mineCurrent === theirCurrent) {
-        // Context identity
-        hunk.lines.push(mineCurrent);
-        mine.index++;
-        their.index++;
-      } else {
-        // Context mismatch
-        conflict(hunk, collectChange(mine), collectChange(their));
-      }
-    }
-
-    // Now push anything that may be remaining
-    insertTrailing(hunk, mine);
-    insertTrailing(hunk, their);
-    calcLineCount(hunk);
-  }
-  function mutualChange(hunk, mine, their) {
-    var myChanges = collectChange(mine),
-      theirChanges = collectChange(their);
-    if (allRemoves(myChanges) && allRemoves(theirChanges)) {
-      // Special case for remove changes that are supersets of one another
-      if (arrayStartsWith(myChanges, theirChanges) && skipRemoveSuperset(their, myChanges, myChanges.length - theirChanges.length)) {
-        var _hunk$lines3;
-        (_hunk$lines3 = hunk.lines).push.apply(_hunk$lines3, _toConsumableArray(myChanges));
-        return;
-      } else if (arrayStartsWith(theirChanges, myChanges) && skipRemoveSuperset(mine, theirChanges, theirChanges.length - myChanges.length)) {
-        var _hunk$lines4;
-        (_hunk$lines4 = hunk.lines).push.apply(_hunk$lines4, _toConsumableArray(theirChanges));
-        return;
-      }
-    } else if (arrayEqual(myChanges, theirChanges)) {
-      var _hunk$lines5;
-      (_hunk$lines5 = hunk.lines).push.apply(_hunk$lines5, _toConsumableArray(myChanges));
-      return;
-    }
-    conflict(hunk, myChanges, theirChanges);
-  }
-  function removal(hunk, mine, their, swap) {
-    var myChanges = collectChange(mine),
-      theirChanges = collectContext(their, myChanges);
-    if (theirChanges.merged) {
-      var _hunk$lines6;
-      (_hunk$lines6 = hunk.lines).push.apply(_hunk$lines6, _toConsumableArray(theirChanges.merged));
-    } else {
-      conflict(hunk, swap ? theirChanges : myChanges, swap ? myChanges : theirChanges);
-    }
-  }
-  function conflict(hunk, mine, their) {
-    hunk.conflict = true;
-    hunk.lines.push({
-      conflict: true,
-      mine: mine,
-      theirs: their
-    });
-  }
-  function insertLeading(hunk, insert, their) {
-    while (insert.offset < their.offset && insert.index < insert.lines.length) {
-      var line = insert.lines[insert.index++];
-      hunk.lines.push(line);
-      insert.offset++;
+    /**
+     * attempts to apply a unified diff patch.
+     *
+     * Hunks are applied first to last.
+     * `applyPatch` first tries to apply the first hunk at the line number specified in the hunk header, and with all context lines matching exactly.
+     * If that fails, it tries scanning backwards and forwards, one line at a time, to find a place to apply the hunk where the context lines match exactly.
+     * If that still fails, and `fuzzFactor` is greater than zero, it increments the maximum number of mismatches (missing, extra, or changed context lines) that there can be between the hunk context and a region where we are trying to apply the patch such that the hunk will still be considered to match.
+     * Regardless of `fuzzFactor`, lines to be deleted in the hunk *must* be present for a hunk to match, and the context lines *immediately* before and after an insertion must match exactly.
+     *
+     * Once a hunk is successfully fitted, the process begins again with the next hunk.
+     * Regardless of `fuzzFactor`, later hunks must be applied later in the file than earlier hunks.
+     *
+     * If a hunk cannot be successfully fitted *anywhere* with fewer than `fuzzFactor` mismatches, `applyPatch` fails and returns `false`.
+     *
+     * If a hunk is successfully fitted but not at the line number specified by the hunk header, all subsequent hunks have their target line number adjusted accordingly.
+     * (e.g. if the first hunk is applied 10 lines below where the hunk header said it should fit, `applyPatch` will *start* looking for somewhere to apply the second hunk 10 lines below where its hunk header says it goes.)
+     *
+     * If the patch was applied successfully, returns a string containing the patched text.
+     * If the patch could not be applied (because some hunks in the patch couldn't be fitted to the text in `source`), `applyPatch` returns false.
+     *
+     * @param patch a string diff or the output from the `parsePatch` or `structuredPatch` methods.
+     */
+    function applyPatch(source, patch, options = {}) {
+        let patches;
+        if (typeof patch === 'string') {
+            patches = parsePatch(patch);
+        }
+        else if (Array.isArray(patch)) {
+            patches = patch;
+        }
+        else {
+            patches = [patch];
+        }
+        if (patches.length > 1) {
+            throw new Error('applyPatch only works with a single input.');
+        }
+        return applyStructuredPatch(source, patches[0], options);
     }
-  }
-  function insertTrailing(hunk, insert) {
-    while (insert.index < insert.lines.length) {
-      var line = insert.lines[insert.index++];
-      hunk.lines.push(line);
+    function applyStructuredPatch(source, patch, options = {}) {
+        if (options.autoConvertLineEndings || options.autoConvertLineEndings == null) {
+            if (hasOnlyWinLineEndings(source) && isUnix(patch)) {
+                patch = unixToWin(patch);
+            }
+            else if (hasOnlyUnixLineEndings(source) && isWin(patch)) {
+                patch = winToUnix(patch);
+            }
+        }
+        // Apply the diff to the input
+        const lines = source.split('\n'), hunks = patch.hunks, compareLine = options.compareLine || ((lineNumber, line, operation, patchContent) => line === patchContent), fuzzFactor = options.fuzzFactor || 0;
+        let minLine = 0;
+        if (fuzzFactor < 0 || !Number.isInteger(fuzzFactor)) {
+            throw new Error('fuzzFactor must be a non-negative integer');
+        }
+        // Special case for empty patch.
+        if (!hunks.length) {
+            return source;
+        }
+        // Before anything else, handle EOFNL insertion/removal. If the patch tells us to make a change
+        // to the EOFNL that is redundant/impossible - i.e. to remove a newline that's not there, or add a
+        // newline that already exists - then we either return false and fail to apply the patch (if
+        // fuzzFactor is 0) or simply ignore the problem and do nothing (if fuzzFactor is >0).
+        // If we do need to remove/add a newline at EOF, this will always be in the final hunk:
+        let prevLine = '', removeEOFNL = false, addEOFNL = false;
+        for (let i = 0; i < hunks[hunks.length - 1].lines.length; i++) {
+            const line = hunks[hunks.length - 1].lines[i];
+            if (line[0] == '\\') {
+                if (prevLine[0] == '+') {
+                    removeEOFNL = true;
+                }
+                else if (prevLine[0] == '-') {
+                    addEOFNL = true;
+                }
+            }
+            prevLine = line;
+        }
+        if (removeEOFNL) {
+            if (addEOFNL) {
+                // This means the final line gets changed but doesn't have a trailing newline in either the
+                // original or patched version. In that case, we do nothing if fuzzFactor > 0, and if
+                // fuzzFactor is 0, we simply validate that the source file has no trailing newline.
+                if (!fuzzFactor && lines[lines.length - 1] == '') {
+                    return false;
+                }
+            }
+            else if (lines[lines.length - 1] == '') {
+                lines.pop();
+            }
+            else if (!fuzzFactor) {
+                return false;
+            }
+        }
+        else if (addEOFNL) {
+            if (lines[lines.length - 1] != '') {
+                lines.push('');
+            }
+            else if (!fuzzFactor) {
+                return false;
+            }
+        }
+        /**
+         * Checks if the hunk can be made to fit at the provided location with at most `maxErrors`
+         * insertions, substitutions, or deletions, while ensuring also that:
+         * - lines deleted in the hunk match exactly, and
+         * - wherever an insertion operation or block of insertion operations appears in the hunk, the
+         *   immediately preceding and following lines of context match exactly
+         *
+         * `toPos` should be set such that lines[toPos] is meant to match hunkLines[0].
+         *
+         * If the hunk can be applied, returns an object with properties `oldLineLastI` and
+         * `replacementLines`. Otherwise, returns null.
+         */
+        function applyHunk(hunkLines, toPos, maxErrors, hunkLinesI = 0, lastContextLineMatched = true, patchedLines = [], patchedLinesLength = 0) {
+            let nConsecutiveOldContextLines = 0;
+            let nextContextLineMustMatch = false;
+            for (; hunkLinesI < hunkLines.length; hunkLinesI++) {
+                const hunkLine = hunkLines[hunkLinesI], operation = (hunkLine.length > 0 ? hunkLine[0] : ' '), content = (hunkLine.length > 0 ? hunkLine.substr(1) : hunkLine);
+                if (operation === '-') {
+                    if (compareLine(toPos + 1, lines[toPos], operation, content)) {
+                        toPos++;
+                        nConsecutiveOldContextLines = 0;
+                    }
+                    else {
+                        if (!maxErrors || lines[toPos] == null) {
+                            return null;
+                        }
+                        patchedLines[patchedLinesLength] = lines[toPos];
+                        return applyHunk(hunkLines, toPos + 1, maxErrors - 1, hunkLinesI, false, patchedLines, patchedLinesLength + 1);
+                    }
+                }
+                if (operation === '+') {
+                    if (!lastContextLineMatched) {
+                        return null;
+                    }
+                    patchedLines[patchedLinesLength] = content;
+                    patchedLinesLength++;
+                    nConsecutiveOldContextLines = 0;
+                    nextContextLineMustMatch = true;
+                }
+                if (operation === ' ') {
+                    nConsecutiveOldContextLines++;
+                    patchedLines[patchedLinesLength] = lines[toPos];
+                    if (compareLine(toPos + 1, lines[toPos], operation, content)) {
+                        patchedLinesLength++;
+                        lastContextLineMatched = true;
+                        nextContextLineMustMatch = false;
+                        toPos++;
+                    }
+                    else {
+                        if (nextContextLineMustMatch || !maxErrors) {
+                            return null;
+                        }
+                        // Consider 3 possibilities in sequence:
+                        // 1. lines contains a *substitution* not included in the patch context, or
+                        // 2. lines contains an *insertion* not included in the patch context, or
+                        // 3. lines contains a *deletion* not included in the patch context
+                        // The first two options are of course only possible if the line from lines is non-null -
+                        // i.e. only option 3 is possible if we've overrun the end of the old file.
+                        return (lines[toPos] && (applyHunk(hunkLines, toPos + 1, maxErrors - 1, hunkLinesI + 1, false, patchedLines, patchedLinesLength + 1) || applyHunk(hunkLines, toPos + 1, maxErrors - 1, hunkLinesI, false, patchedLines, patchedLinesLength + 1)) || applyHunk(hunkLines, toPos, maxErrors - 1, hunkLinesI + 1, false, patchedLines, patchedLinesLength));
+                    }
+                }
+            }
+            // Before returning, trim any unmodified context lines off the end of patchedLines and reduce
+            // toPos (and thus oldLineLastI) accordingly. This allows later hunks to be applied to a region
+            // that starts in this hunk's trailing context.
+            patchedLinesLength -= nConsecutiveOldContextLines;
+            toPos -= nConsecutiveOldContextLines;
+            patchedLines.length = patchedLinesLength;
+            return {
+                patchedLines,
+                oldLineLastI: toPos - 1
+            };
+        }
+        const resultLines = [];
+        // Search best fit offsets for each hunk based on the previous ones
+        let prevHunkOffset = 0;
+        for (let i = 0; i < hunks.length; i++) {
+            const hunk = hunks[i];
+            let hunkResult;
+            const maxLine = lines.length - hunk.oldLines + fuzzFactor;
+            let toPos;
+            for (let maxErrors = 0; maxErrors <= fuzzFactor; maxErrors++) {
+                toPos = hunk.oldStart + prevHunkOffset - 1;
+                const iterator = distanceIterator(toPos, minLine, maxLine);
+                for (; toPos !== undefined; toPos = iterator()) {
+                    hunkResult = applyHunk(hunk.lines, toPos, maxErrors);
+                    if (hunkResult) {
+                        break;
+                    }
+                }
+                if (hunkResult) {
+                    break;
+                }
+            }
+            if (!hunkResult) {
+                return false;
+            }
+            // Copy everything from the end of where we applied the last hunk to the start of this hunk
+            for (let i = minLine; i < toPos; i++) {
+                resultLines.push(lines[i]);
+            }
+            // Add the lines produced by applying the hunk:
+            for (let i = 0; i < hunkResult.patchedLines.length; i++) {
+                const line = hunkResult.patchedLines[i];
+                resultLines.push(line);
+            }
+            // Set lower text limit to end of the current hunk, so next ones don't try
+            // to fit over already patched text
+            minLine = hunkResult.oldLineLastI + 1;
+            // Note the offset between where the patch said the hunk should've applied and where we
+            // applied it, so we can adjust future hunks accordingly:
+            prevHunkOffset = toPos + 1 - hunk.oldStart;
+        }
+        // Copy over the rest of the lines from the old text
+        for (let i = minLine; i < lines.length; i++) {
+            resultLines.push(lines[i]);
+        }
+        return resultLines.join('\n');
     }
-  }
-  function collectChange(state) {
-    var ret = [],
-      operation = state.lines[state.index][0];
-    while (state.index < state.lines.length) {
-      var line = state.lines[state.index];
-
-      // Group additions that are immediately after subtractions and treat them as one "atomic" modify change.
-      if (operation === '-' && line[0] === '+') {
-        operation = '+';
-      }
-      if (operation === line[0]) {
-        ret.push(line);
-        state.index++;
-      } else {
-        break;
-      }
+    /**
+     * applies one or more patches.
+     *
+     * `patch` may be either an array of structured patch objects, or a string representing a patch in unified diff format (which may patch one or more files).
+     *
+     * This method will iterate over the contents of the patch and apply to data provided through callbacks. The general flow for each patch index is:
+     *
+     * - `options.loadFile(index, callback)` is called. The caller should then load the contents of the file and then pass that to the `callback(err, data)` callback. Passing an `err` will terminate further patch execution.
+     * - `options.patched(index, content, callback)` is called once the patch has been applied. `content` will be the return value from `applyPatch`. When it's ready, the caller should call `callback(err)` callback. Passing an `err` will terminate further patch execution.
+     *
+     * Once all patches have been applied or an error occurs, the `options.complete(err)` callback is made.
+     */
+    function applyPatches(uniDiff, options) {
+        const spDiff = typeof uniDiff === 'string' ? parsePatch(uniDiff) : uniDiff;
+        let currentIndex = 0;
+        function processIndex() {
+            const index = spDiff[currentIndex++];
+            if (!index) {
+                return options.complete();
+            }
+            options.loadFile(index, function (err, data) {
+                if (err) {
+                    return options.complete(err);
+                }
+                const updatedContent = applyPatch(data, index, options);
+                options.patched(index, updatedContent, function (err) {
+                    if (err) {
+                        return options.complete(err);
+                    }
+                    processIndex();
+                });
+            });
+        }
+        processIndex();
     }
-    return ret;
-  }
-  function collectContext(state, matchChanges) {
-    var changes = [],
-      merged = [],
-      matchIndex = 0,
-      contextChanges = false,
-      conflicted = false;
-    while (matchIndex < matchChanges.length && state.index < state.lines.length) {
-      var change = state.lines[state.index],
-        match = matchChanges[matchIndex];
-
-      // Once we've hit our add, then we are done
-      if (match[0] === '+') {
-        break;
-      }
-      contextChanges = contextChanges || change[0] !== ' ';
-      merged.push(match);
-      matchIndex++;
 
-      // Consume any additions in the other block as a conflict to attempt
-      // to pull in the remaining context after this
-      if (change[0] === '+') {
-        conflicted = true;
-        while (change[0] === '+') {
-          changes.push(change);
-          change = state.lines[++state.index];
-        }
-      }
-      if (match.substr(1) === change.substr(1)) {
-        changes.push(change);
-        state.index++;
-      } else {
-        conflicted = true;
-      }
-    }
-    if ((matchChanges[matchIndex] || '')[0] === '+' && contextChanges) {
-      conflicted = true;
-    }
-    if (conflicted) {
-      return changes;
+    function reversePatch(structuredPatch) {
+        if (Array.isArray(structuredPatch)) {
+            // (See comment in unixToWin for why we need the pointless-looking anonymous function here)
+            return structuredPatch.map(patch => reversePatch(patch)).reverse();
+        }
+        return Object.assign(Object.assign({}, structuredPatch), { oldFileName: structuredPatch.newFileName, oldHeader: structuredPatch.newHeader, newFileName: structuredPatch.oldFileName, newHeader: structuredPatch.oldHeader, hunks: structuredPatch.hunks.map(hunk => {
+                return {
+                    oldLines: hunk.newLines,
+                    oldStart: hunk.newStart,
+                    newLines: hunk.oldLines,
+                    newStart: hunk.oldStart,
+                    lines: hunk.lines.map(l => {
+                        if (l.startsWith('-')) {
+                            return `+${l.slice(1)}`;
+                        }
+                        if (l.startsWith('+')) {
+                            return `-${l.slice(1)}`;
+                        }
+                        return l;
+                    })
+                };
+            }) });
+    }
+
+    function structuredPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options) {
+        let optionsObj;
+        if (!options) {
+            optionsObj = {};
+        }
+        else if (typeof options === 'function') {
+            optionsObj = { callback: options };
+        }
+        else {
+            optionsObj = options;
+        }
+        if (typeof optionsObj.context === 'undefined') {
+            optionsObj.context = 4;
+        }
+        // We copy this into its own variable to placate TypeScript, which thinks
+        // optionsObj.context might be undefined in the callbacks below.
+        const context = optionsObj.context;
+        // @ts-expect-error (runtime check for something that is correctly a static type error)
+        if (optionsObj.newlineIsToken) {
+            throw new Error('newlineIsToken may not be used with patch-generation functions, only with diffing functions');
+        }
+        if (!optionsObj.callback) {
+            return diffLinesResultToPatch(diffLines(oldStr, newStr, optionsObj));
+        }
+        else {
+            const { callback } = optionsObj;
+            diffLines(oldStr, newStr, Object.assign(Object.assign({}, optionsObj), { callback: (diff) => {
+                    const patch = diffLinesResultToPatch(diff);
+                    // TypeScript is unhappy without the cast because it does not understand that `patch` may
+                    // be undefined here only if `callback` is StructuredPatchCallbackAbortable:
+                    callback(patch);
+                } }));
+        }
+        function diffLinesResultToPatch(diff) {
+            // STEP 1: Build up the patch with no "\ No newline at end of file" lines and with the arrays
+            //         of lines containing trailing newline characters. We'll tidy up later...
+            if (!diff) {
+                return;
+            }
+            diff.push({ value: '', lines: [] }); // Append an empty value to make cleanup easier
+            function contextLines(lines) {
+                return lines.map(function (entry) { return ' ' + entry; });
+            }
+            const hunks = [];
+            let oldRangeStart = 0, newRangeStart = 0, curRange = [], oldLine = 1, newLine = 1;
+            for (let i = 0; i < diff.length; i++) {
+                const current = diff[i], lines = current.lines || splitLines(current.value);
+                current.lines = lines;
+                if (current.added || current.removed) {
+                    // If we have previous context, start with that
+                    if (!oldRangeStart) {
+                        const prev = diff[i - 1];
+                        oldRangeStart = oldLine;
+                        newRangeStart = newLine;
+                        if (prev) {
+                            curRange = context > 0 ? contextLines(prev.lines.slice(-context)) : [];
+                            oldRangeStart -= curRange.length;
+                            newRangeStart -= curRange.length;
+                        }
+                    }
+                    // Output our changes
+                    for (const line of lines) {
+                        curRange.push((current.added ? '+' : '-') + line);
+                    }
+                    // Track the updated file position
+                    if (current.added) {
+                        newLine += lines.length;
+                    }
+                    else {
+                        oldLine += lines.length;
+                    }
+                }
+                else {
+                    // Identical context lines. Track line changes
+                    if (oldRangeStart) {
+                        // Close out any changes that have been output (or join overlapping)
+                        if (lines.length <= context * 2 && i < diff.length - 2) {
+                            // Overlapping
+                            for (const line of contextLines(lines)) {
+                                curRange.push(line);
+                            }
+                        }
+                        else {
+                            // end the range and output
+                            const contextSize = Math.min(lines.length, context);
+                            for (const line of contextLines(lines.slice(0, contextSize))) {
+                                curRange.push(line);
+                            }
+                            const hunk = {
+                                oldStart: oldRangeStart,
+                                oldLines: (oldLine - oldRangeStart + contextSize),
+                                newStart: newRangeStart,
+                                newLines: (newLine - newRangeStart + contextSize),
+                                lines: curRange
+                            };
+                            hunks.push(hunk);
+                            oldRangeStart = 0;
+                            newRangeStart = 0;
+                            curRange = [];
+                        }
+                    }
+                    oldLine += lines.length;
+                    newLine += lines.length;
+                }
+            }
+            // Step 2: eliminate the trailing `\n` from each line of each hunk, and, where needed, add
+            //         "\ No newline at end of file".
+            for (const hunk of hunks) {
+                for (let i = 0; i < hunk.lines.length; i++) {
+                    if (hunk.lines[i].endsWith('\n')) {
+                        hunk.lines[i] = hunk.lines[i].slice(0, -1);
+                    }
+                    else {
+                        hunk.lines.splice(i + 1, 0, '\\ No newline at end of file');
+                        i++; // Skip the line we just added, then continue iterating
+                    }
+                }
+            }
+            return {
+                oldFileName: oldFileName, newFileName: newFileName,
+                oldHeader: oldHeader, newHeader: newHeader,
+                hunks: hunks
+            };
+        }
     }
-    while (matchIndex < matchChanges.length) {
-      merged.push(matchChanges[matchIndex++]);
+    /**
+     * creates a unified diff patch.
+     * @param patch either a single structured patch object (as returned by `structuredPatch`) or an array of them (as returned by `parsePatch`)
+     */
+    function formatPatch(patch) {
+        if (Array.isArray(patch)) {
+            return patch.map(formatPatch).join('\n');
+        }
+        const ret = [];
+        if (patch.oldFileName == patch.newFileName) {
+            ret.push('Index: ' + patch.oldFileName);
+        }
+        ret.push('===================================================================');
+        ret.push('--- ' + patch.oldFileName + (typeof patch.oldHeader === 'undefined' ? '' : '\t' + patch.oldHeader));
+        ret.push('+++ ' + patch.newFileName + (typeof patch.newHeader === 'undefined' ? '' : '\t' + patch.newHeader));
+        for (let i = 0; i < patch.hunks.length; i++) {
+            const hunk = patch.hunks[i];
+            // Unified Diff Format quirk: If the chunk size is 0,
+            // the first number is one lower than one would expect.
+            // https://www.artima.com/weblogs/viewpost.jsp?thread=164293
+            if (hunk.oldLines === 0) {
+                hunk.oldStart -= 1;
+            }
+            if (hunk.newLines === 0) {
+                hunk.newStart -= 1;
+            }
+            ret.push('@@ -' + hunk.oldStart + ',' + hunk.oldLines
+                + ' +' + hunk.newStart + ',' + hunk.newLines
+                + ' @@');
+            for (const line of hunk.lines) {
+                ret.push(line);
+            }
+        }
+        return ret.join('\n') + '\n';
     }
-    return {
-      merged: merged,
-      changes: changes
-    };
-  }
-  function allRemoves(changes) {
-    return changes.reduce(function (prev, change) {
-      return prev && change[0] === '-';
-    }, true);
-  }
-  function skipRemoveSuperset(state, removeChanges, delta) {
-    for (var i = 0; i < delta; i++) {
-      var changeContent = removeChanges[removeChanges.length - delta + i].substr(1);
-      if (state.lines[state.index + i] !== ' ' + changeContent) {
-        return false;
-      }
+    function createTwoFilesPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options) {
+        if (typeof options === 'function') {
+            options = { callback: options };
+        }
+        if (!(options === null || options === void 0 ? void 0 : options.callback)) {
+            const patchObj = structuredPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options);
+            if (!patchObj) {
+                return;
+            }
+            return formatPatch(patchObj);
+        }
+        else {
+            const { callback } = options;
+            structuredPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, Object.assign(Object.assign({}, options), { callback: patchObj => {
+                    if (!patchObj) {
+                        callback(undefined);
+                    }
+                    else {
+                        callback(formatPatch(patchObj));
+                    }
+                } }));
+        }
     }
-    state.index += delta;
-    return true;
-  }
-  function calcOldNewLineCount(lines) {
-    var oldLines = 0;
-    var newLines = 0;
-    lines.forEach(function (line) {
-      if (typeof line !== 'string') {
-        var myCount = calcOldNewLineCount(line.mine);
-        var theirCount = calcOldNewLineCount(line.theirs);
-        if (oldLines !== undefined) {
-          if (myCount.oldLines === theirCount.oldLines) {
-            oldLines += myCount.oldLines;
-          } else {
-            oldLines = undefined;
-          }
-        }
-        if (newLines !== undefined) {
-          if (myCount.newLines === theirCount.newLines) {
-            newLines += myCount.newLines;
-          } else {
-            newLines = undefined;
-          }
-        }
-      } else {
-        if (newLines !== undefined && (line[0] === '+' || line[0] === ' ')) {
-          newLines++;
-        }
-        if (oldLines !== undefined && (line[0] === '-' || line[0] === ' ')) {
-          oldLines++;
-        }
-      }
-    });
-    return {
-      oldLines: oldLines,
-      newLines: newLines
-    };
-  }
-
-  function reversePatch(structuredPatch) {
-    if (Array.isArray(structuredPatch)) {
-      return structuredPatch.map(reversePatch).reverse();
+    function createPatch(fileName, oldStr, newStr, oldHeader, newHeader, options) {
+        return createTwoFilesPatch(fileName, fileName, oldStr, newStr, oldHeader, newHeader, options);
     }
-    return _objectSpread2(_objectSpread2({}, structuredPatch), {}, {
-      oldFileName: structuredPatch.newFileName,
-      oldHeader: structuredPatch.newHeader,
-      newFileName: structuredPatch.oldFileName,
-      newHeader: structuredPatch.oldHeader,
-      hunks: structuredPatch.hunks.map(function (hunk) {
-        return {
-          oldLines: hunk.newLines,
-          oldStart: hunk.newStart,
-          newLines: hunk.oldLines,
-          newStart: hunk.oldStart,
-          lines: hunk.lines.map(function (l) {
-            if (l.startsWith('-')) {
-              return "+".concat(l.slice(1));
-            }
-            if (l.startsWith('+')) {
-              return "-".concat(l.slice(1));
-            }
-            return l;
-          })
-        };
-      })
-    });
-  }
-
-  // See: http://code.google.com/p/google-diff-match-patch/wiki/API
-  function convertChangesToDMP(changes) {
-    var ret = [],
-      change,
-      operation;
-    for (var i = 0; i < changes.length; i++) {
-      change = changes[i];
-      if (change.added) {
-        operation = 1;
-      } else if (change.removed) {
-        operation = -1;
-      } else {
-        operation = 0;
-      }
-      ret.push([operation, change.value]);
+    /**
+     * Split `text` into an array of lines, including the trailing newline character (where present)
+     */
+    function splitLines(text) {
+        const hasTrailingNl = text.endsWith('\n');
+        const result = text.split('\n').map(line => line + '\n');
+        if (hasTrailingNl) {
+            result.pop();
+        }
+        else {
+            result.push(result.pop().slice(0, -1));
+        }
+        return result;
     }
-    return ret;
-  }
 
-  function convertChangesToXML(changes) {
-    var ret = [];
-    for (var i = 0; i < changes.length; i++) {
-      var change = changes[i];
-      if (change.added) {
-        ret.push('');
-      } else if (change.removed) {
-        ret.push('');
-      }
-      ret.push(escapeHTML(change.value));
-      if (change.added) {
-        ret.push('');
-      } else if (change.removed) {
-        ret.push('');
-      }
+    /**
+     * converts a list of change objects to the format returned by Google's [diff-match-patch](https://github.com/google/diff-match-patch) library
+     */
+    function convertChangesToDMP(changes) {
+        const ret = [];
+        let change, operation;
+        for (let i = 0; i < changes.length; i++) {
+            change = changes[i];
+            if (change.added) {
+                operation = 1;
+            }
+            else if (change.removed) {
+                operation = -1;
+            }
+            else {
+                operation = 0;
+            }
+            ret.push([operation, change.value]);
+        }
+        return ret;
     }
-    return ret.join('');
-  }
-  function escapeHTML(s) {
-    var n = s;
-    n = n.replace(/&/g, '&');
-    n = n.replace(//g, '>');
-    n = n.replace(/"/g, '"');
-    return n;
-  }
 
-  exports.Diff = Diff;
-  exports.applyPatch = applyPatch;
-  exports.applyPatches = applyPatches;
-  exports.canonicalize = canonicalize;
-  exports.convertChangesToDMP = convertChangesToDMP;
-  exports.convertChangesToXML = convertChangesToXML;
-  exports.createPatch = createPatch;
-  exports.createTwoFilesPatch = createTwoFilesPatch;
-  exports.diffArrays = diffArrays;
-  exports.diffChars = diffChars;
-  exports.diffCss = diffCss;
-  exports.diffJson = diffJson;
-  exports.diffLines = diffLines;
-  exports.diffSentences = diffSentences;
-  exports.diffTrimmedLines = diffTrimmedLines;
-  exports.diffWords = diffWords;
-  exports.diffWordsWithSpace = diffWordsWithSpace;
-  exports.formatPatch = formatPatch;
-  exports.merge = merge;
-  exports.parsePatch = parsePatch;
-  exports.reversePatch = reversePatch;
-  exports.structuredPatch = structuredPatch;
+    /**
+     * converts a list of change objects to a serialized XML format
+     */
+    function convertChangesToXML(changes) {
+        const ret = [];
+        for (let i = 0; i < changes.length; i++) {
+            const change = changes[i];
+            if (change.added) {
+                ret.push('');
+            }
+            else if (change.removed) {
+                ret.push('');
+            }
+            ret.push(escapeHTML(change.value));
+            if (change.added) {
+                ret.push('');
+            }
+            else if (change.removed) {
+                ret.push('');
+            }
+        }
+        return ret.join('');
+    }
+    function escapeHTML(s) {
+        let n = s;
+        n = n.replace(/&/g, '&');
+        n = n.replace(//g, '>');
+        n = n.replace(/"/g, '"');
+        return n;
+    }
+
+    exports.Diff = Diff;
+    exports.applyPatch = applyPatch;
+    exports.applyPatches = applyPatches;
+    exports.arrayDiff = arrayDiff;
+    exports.canonicalize = canonicalize;
+    exports.characterDiff = characterDiff;
+    exports.convertChangesToDMP = convertChangesToDMP;
+    exports.convertChangesToXML = convertChangesToXML;
+    exports.createPatch = createPatch;
+    exports.createTwoFilesPatch = createTwoFilesPatch;
+    exports.cssDiff = cssDiff;
+    exports.diffArrays = diffArrays;
+    exports.diffChars = diffChars;
+    exports.diffCss = diffCss;
+    exports.diffJson = diffJson;
+    exports.diffLines = diffLines;
+    exports.diffSentences = diffSentences;
+    exports.diffTrimmedLines = diffTrimmedLines;
+    exports.diffWords = diffWords;
+    exports.diffWordsWithSpace = diffWordsWithSpace;
+    exports.formatPatch = formatPatch;
+    exports.jsonDiff = jsonDiff;
+    exports.lineDiff = lineDiff;
+    exports.parsePatch = parsePatch;
+    exports.reversePatch = reversePatch;
+    exports.sentenceDiff = sentenceDiff;
+    exports.structuredPatch = structuredPatch;
+    exports.wordDiff = wordDiff;
+    exports.wordsWithSpaceDiff = wordsWithSpaceDiff;
 
 }));
diff --git a/node_modules/diff/dist/diff.min.js b/node_modules/diff/dist/diff.min.js
index 4d96b763e537a..6fd5d020d282c 100644
--- a/node_modules/diff/dist/diff.min.js
+++ b/node_modules/diff/dist/diff.min.js
@@ -1,37 +1 @@
-/*!
-
- diff v7.0.0
-
-BSD 3-Clause License
-
-Copyright (c) 2009-2015, Kevin Decker 
-All rights reserved.
-
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions are met:
-
-1. Redistributions of source code must retain the above copyright notice, this
-   list of conditions and the following disclaimer.
-
-2. Redistributions in binary form must reproduce the above copyright notice,
-   this list of conditions and the following disclaimer in the documentation
-   and/or other materials provided with the distribution.
-
-3. Neither the name of the copyright holder nor the names of its
-   contributors may be used to endorse or promote products derived from
-   this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
-AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
-FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
-DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
-OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-@license
-*/
-!function(e,n){"object"==typeof exports&&"undefined"!=typeof module?n(exports):"function"==typeof define&&define.amd?define(["exports"],n):n((e="undefined"!=typeof globalThis?globalThis:e||self).Diff={})}(this,function(e){"use strict";function r(){}function w(e,n,t,r,i){for(var o,l=[];n;)l.push(n),o=n.previousComponent,delete n.previousComponent,n=o;l.reverse();for(var a=0,u=l.length,s=0,f=0;ae.length?n:e}),d.value=e.join(c)):d.value=e.join(t.slice(s,s+d.count)),s+=d.count,d.added||(f+=d.count))}return l}r.prototype={diff:function(l,a){var u=2=d&&c<=v+1)return f(w(s,p[0].lastComponent,a,l,s.useLongestToken));var g=-1/0,m=1/0;function i(){for(var e=Math.max(g,-h);e<=Math.min(m,h);e+=2){var n=void 0,t=p[e-1],r=p[e+1],i=(t&&(p[e-1]=void 0),!1),o=(r&&(o=r.oldPos-e,i=r&&0<=o&&o=d&&c<=v+1)return f(w(s,n.lastComponent,a,l,s.useLongestToken));(p[e]=n).oldPos+1>=d&&(m=Math.min(m,e-1)),c<=v+1&&(g=Math.max(g,e+1))}else p[e]=void 0}h++}if(n)!function e(){setTimeout(function(){if(tr)return n();i()||e()},0)}();else for(;h<=t&&Date.now()<=r;){var o=i();if(o)return o}},addToPath:function(e,n,t,r,i){var o=e.lastComponent;return o&&!i.oneChangePerToken&&o.added===n&&o.removed===t?{oldPos:e.oldPos+r,lastComponent:{count:o.count+1,added:n,removed:t,previousComponent:o.previousComponent}}:{oldPos:e.oldPos+r,lastComponent:{count:1,added:n,removed:t,previousComponent:o}}},extractCommon:function(e,n,t,r,i){for(var o=n.length,l=t.length,a=e.oldPos,u=a-r,s=0;u+1n.length&&(t=e.length-n.length);var r=n.length;e.lengthe.length)&&(n=e.length);for(var t=0,r=new Array(n);te.length)return!1;for(var t=0;t"):r.removed&&n.push(""),n.push(r.value.replace(/&/g,"&").replace(//g,">").replace(/"/g,""")),r.added?n.push(""):r.removed&&n.push("")}return n.join("")},e.createPatch=function(e,n,t,r,i,o){return M(e,e,n,t,r,i,o)},e.createTwoFilesPatch=M,e.diffArrays=function(e,n,t){return F.diff(e,n,t)},e.diffChars=function(e,n,t){return I.diff(e,n,t)},e.diffCss=function(e,n,t){return m.diff(e,n,t)},e.diffJson=function(e,n,t){return x.diff(e,n,t)},e.diffLines=y,e.diffSentences=function(e,n,t){return g.diff(e,n,t)},e.diffTrimmedLines=function(e,n,t){return t=function(e,n){if("function"==typeof e)n.callback=e;else if(e)for(var t in e)e.hasOwnProperty(t)&&(n[t]=e[t]);return n}(t,{ignoreWhitespace:!0}),v.diff(e,n,t)},e.diffWords=function(e,n,t){return null==(null==t?void 0:t.ignoreWhitespace)||t.ignoreWhitespace?i.diff(e,n,t):a(e,n,t)},e.diffWordsWithSpace=a,e.formatPatch=E,e.merge=function(e,n,t){e=J(e,t),n=J(n,t);for(var r={},i=((e.index||n.index)&&(r.index=e.index||n.index),(e.newFileName||n.newFileName)&&(q(e)?q(n)?(r.oldFileName=H(r,e.oldFileName,n.oldFileName),r.newFileName=H(r,e.newFileName,n.newFileName),r.oldHeader=H(r,e.oldHeader,n.oldHeader),r.newHeader=H(r,e.newHeader,n.newHeader)):(r.oldFileName=e.oldFileName,r.newFileName=e.newFileName,r.oldHeader=e.oldHeader,r.newHeader=e.newHeader):(r.oldFileName=n.oldFileName||e.oldFileName,r.newFileName=n.newFileName||e.newFileName,r.oldHeader=n.oldHeader||e.oldHeader,r.newHeader=n.newHeader||e.newHeader)),r.hunks=[],0),o=0,l=0,a=0;i{"object"==typeof exports&&"undefined"!=typeof module?factory(exports):"function"==typeof define&&define.amd?define(["exports"],factory):factory((global="undefined"!=typeof globalThis?globalThis:global||self).Diff={})})(this,function(exports){class Diff{diff(oldStr,newStr,options={}){let callback;"function"==typeof options?(callback=options,options={}):"callback"in options&&(callback=options.callback);oldStr=this.castInput(oldStr,options),newStr=this.castInput(newStr,options),oldStr=this.removeEmpty(this.tokenize(oldStr,options)),newStr=this.removeEmpty(this.tokenize(newStr,options));return this.diffWithOptionsObj(oldStr,newStr,options,callback)}diffWithOptionsObj(oldTokens,newTokens,options,callback){let _a,done=value=>{if(value=this.postProcess(value,options),!callback)return value;setTimeout(function(){callback(value)},0)},newLen=newTokens.length,oldLen=oldTokens.length,editLength=1,maxEditLength=newLen+oldLen;null!=options.maxEditLength&&(maxEditLength=Math.min(maxEditLength,options.maxEditLength));var maxExecutionTime=null!=(_a=options.timeout)?_a:1/0;let abortAfterTimestamp=Date.now()+maxExecutionTime,bestPath=[{oldPos:-1,lastComponent:void 0}],newPos=this.extractCommon(bestPath[0],newTokens,oldTokens,0,options);if(bestPath[0].oldPos+1>=oldLen&&newPos+1>=newLen)return done(this.buildValues(bestPath[0].lastComponent,newTokens,oldTokens));let minDiagonalToConsider=-1/0,maxDiagonalToConsider=1/0,execEditLength=()=>{for(let diagonalPath=Math.max(minDiagonalToConsider,-editLength);diagonalPath<=Math.min(maxDiagonalToConsider,editLength);diagonalPath+=2){let basePath;var removePath=bestPath[diagonalPath-1],addPath=bestPath[diagonalPath+1];removePath&&(bestPath[diagonalPath-1]=void 0);let canAdd=!1;addPath&&(addPathNewPos=addPath.oldPos-diagonalPath,canAdd=addPath&&0<=addPathNewPos&&addPathNewPos=oldLen&&newPos+1>=newLen)return done(this.buildValues(basePath.lastComponent,newTokens,oldTokens))||!0;(bestPath[diagonalPath]=basePath).oldPos+1>=oldLen&&(maxDiagonalToConsider=Math.min(maxDiagonalToConsider,diagonalPath-1)),newPos+1>=newLen&&(minDiagonalToConsider=Math.max(minDiagonalToConsider,diagonalPath+1))}else bestPath[diagonalPath]=void 0}editLength++};if(callback)!function exec(){setTimeout(function(){if(editLength>maxEditLength||Date.now()>abortAfterTimestamp)return callback(void 0);execEditLength()||exec()},0)}();else for(;editLength<=maxEditLength&&Date.now()<=abortAfterTimestamp;){var ret=execEditLength();if(ret)return ret}}addToPath(path,added,removed,oldPosInc,options){var last=path.lastComponent;return last&&!options.oneChangePerToken&&last.added===added&&last.removed===removed?{oldPos:path.oldPos+oldPosInc,lastComponent:{count:last.count+1,added:added,removed:removed,previousComponent:last.previousComponent}}:{oldPos:path.oldPos+oldPosInc,lastComponent:{count:1,added:added,removed:removed,previousComponent:last}}}extractCommon(basePath,newTokens,oldTokens,diagonalPath,options){var newLen=newTokens.length,oldLen=oldTokens.length;let oldPos=basePath.oldPos,newPos=oldPos-diagonalPath,commonCount=0;for(;newPos+1value.length?i:value}),component.value=this.join(value)}else component.value=this.join(newTokens.slice(newPos,newPos+component.count));newPos+=component.count,component.added||(oldPos+=component.count)}}return components}}class CharacterDiff extends Diff{}let characterDiff=new CharacterDiff;function longestCommonPrefix(str1,str2){let i;for(i=0;i{let startA=0,endB=(a.length>b.length&&(startA=a.length-b.length),b.length),map=(a.lengthsegment.segment)}else parts=value.match(tokenizeIncludingWhitespace)||[];let tokens=[],prevPart=null;return parts.forEach(part=>{/\s/.test(part)?null==prevPart?tokens.push(part):tokens.push(tokens.pop()+part):null!=prevPart&&/\s/.test(prevPart)?tokens[tokens.length-1]==prevPart?tokens.push(tokens.pop()+part):tokens.push(prevPart+part):tokens.push(part),prevPart=part}),tokens}join(tokens){return tokens.map((token,i)=>0==i?token:token.replace(/^\s+/,"")).join("")}postProcess(changes,options){if(changes&&!options.oneChangePerToken){let lastKeep=null,insertion=null,deletion=null;changes.forEach(change=>{change.added?insertion=change:deletion=change.removed?change:((insertion||deletion)&&dedupeWhitespaceInChangeObjects(lastKeep,deletion,insertion,change),lastKeep=change,insertion=null)}),(insertion||deletion)&&dedupeWhitespaceInChangeObjects(lastKeep,deletion,insertion,null)}return changes}}let wordDiff=new WordDiff;function dedupeWhitespaceInChangeObjects(startKeep,deletion,insertion,endKeep){if(deletion&&insertion){var oldWsPrefix=leadingWs(deletion.value),oldWsSuffix=trailingWs(deletion.value),newWsPrefix=leadingWs(insertion.value),newWsSuffix=trailingWs(insertion.value);startKeep&&(oldWsPrefix=longestCommonPrefix(oldWsPrefix,newWsPrefix),startKeep.value=replaceSuffix(startKeep.value,newWsPrefix,oldWsPrefix),deletion.value=removePrefix(deletion.value,oldWsPrefix),insertion.value=removePrefix(insertion.value,oldWsPrefix)),endKeep&&(newWsPrefix=longestCommonSuffix(oldWsSuffix,newWsSuffix),endKeep.value=replacePrefix(endKeep.value,newWsSuffix,newWsPrefix),deletion.value=removeSuffix(deletion.value,newWsPrefix),insertion.value=removeSuffix(insertion.value,newWsPrefix))}else if(insertion){if(startKeep&&(oldWsPrefix=leadingWs(insertion.value),insertion.value=insertion.value.substring(oldWsPrefix.length)),endKeep){let ws=leadingWs(endKeep.value);endKeep.value=endKeep.value.substring(ws.length)}}else if(startKeep&&endKeep){oldWsSuffix=leadingWs(endKeep.value),newWsSuffix=leadingWs(deletion.value),newWsPrefix=trailingWs(deletion.value),insertion=longestCommonPrefix(oldWsSuffix,newWsSuffix),oldWsPrefix=(deletion.value=removePrefix(deletion.value,insertion),longestCommonSuffix(removePrefix(oldWsSuffix,insertion),newWsPrefix));deletion.value=removeSuffix(deletion.value,oldWsPrefix),endKeep.value=replacePrefix(endKeep.value,oldWsSuffix,oldWsPrefix),startKeep.value=replaceSuffix(startKeep.value,oldWsSuffix,oldWsSuffix.slice(0,oldWsSuffix.length-oldWsPrefix.length))}else if(endKeep){newWsSuffix=leadingWs(endKeep.value),insertion=maximumOverlap(trailingWs(deletion.value),newWsSuffix);deletion.value=removeSuffix(deletion.value,insertion)}else if(startKeep){let overlap=maximumOverlap(trailingWs(startKeep.value),leadingWs(deletion.value));deletion.value=removePrefix(deletion.value,overlap)}}class WordsWithSpaceDiff extends Diff{tokenize(value){var regex=new RegExp(`(\\r?\\n)|[${extendedWordChars}]+|[^\\S\\n\\r]+|[^${extendedWordChars}]`,"ug");return value.match(regex)||[]}}let wordsWithSpaceDiff=new WordsWithSpaceDiff;function diffWordsWithSpace(oldStr,newStr,options){return wordsWithSpaceDiff.diff(oldStr,newStr,options)}class LineDiff extends Diff{constructor(){super(...arguments),this.tokenize=tokenize}equals(left,right,options){return options.ignoreWhitespace?(options.newlineIsToken&&left.includes("\n")||(left=left.trim()),options.newlineIsToken&&right.includes("\n")||(right=right.trim())):options.ignoreNewlineAtEof&&!options.newlineIsToken&&(left.endsWith("\n")&&(left=left.slice(0,-1)),right.endsWith("\n"))&&(right=right.slice(0,-1)),super.equals(left,right,options)}}let lineDiff=new LineDiff;function diffLines(oldStr,newStr,options){return lineDiff.diff(oldStr,newStr,options)}function tokenize(value,options){var retLines=[],linesAndNewlines=(value=options.stripTrailingCr?value.replace(/\r\n/g,"\n"):value).split(/(\n|\r\n)/);linesAndNewlines[linesAndNewlines.length-1]||linesAndNewlines.pop();for(let i=0;ivoid 0===v?undefinedReplacement:v}=options;return"string"==typeof value?value:JSON.stringify(canonicalize(value,null,null,stringifyReplacer),null,"  ")}equals(left,right,options){return super.equals(left.replace(/,([\r\n])/g,"$1"),right.replace(/,([\r\n])/g,"$1"),options)}}let jsonDiff=new JsonDiff;function canonicalize(obj,stack,replacementStack,replacer,key){stack=stack||[],replacementStack=replacementStack||[],replacer&&(obj=replacer(void 0===key?"":key,obj));let i;for(i=0;i{var chunkHeaderIndex=i,chunkHeaderLine=diffstr[i++],hunk={oldStart:+(chunkHeaderLine=chunkHeaderLine.split(/@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@/))[1],oldLines:void 0===chunkHeaderLine[2]?1:+chunkHeaderLine[2],newStart:+chunkHeaderLine[3],newLines:void 0===chunkHeaderLine[4]?1:+chunkHeaderLine[4],lines:[]};0===hunk.oldLines&&(hunk.oldStart+=1),0===hunk.newLines&&(hunk.newStart+=1);let addCount=0,removeCount=0;for(;i{!options.autoConvertLineEndings&&null!=options.autoConvertLineEndings||((string=>string.includes("\r\n")&&!string.startsWith("\n")&&!string.match(/[^\r]\n/))(source)&&(patch=>!(patch=Array.isArray(patch)?patch:[patch]).some(index=>index.hunks.some(hunk=>hunk.lines.some(line=>!line.startsWith("\\")&&line.endsWith("\r")))))(patch)?patch=function unixToWin(patch){return Array.isArray(patch)?patch.map(p=>unixToWin(p)):Object.assign(Object.assign({},patch),{hunks:patch.hunks.map(hunk=>Object.assign(Object.assign({},hunk),{lines:hunk.lines.map((line,i)=>line.startsWith("\\")||line.endsWith("\r")||null!=(i=hunk.lines[i+1])&&i.startsWith("\\")?line:line+"\r")}))})}(patch):(string=>!string.includes("\r\n")&&string.includes("\n"))(source)&&(patch=>(patch=Array.isArray(patch)?patch:[patch]).some(index=>index.hunks.some(hunk=>hunk.lines.some(line=>line.endsWith("\r"))))&&patch.every(index=>index.hunks.every(hunk=>hunk.lines.every((line,i)=>line.startsWith("\\")||line.endsWith("\r")||(null==(line=hunk.lines[i+1])?void 0:line.startsWith("\\"))))))(patch)&&(patch=function winToUnix(patch){return Array.isArray(patch)?patch.map(p=>winToUnix(p)):Object.assign(Object.assign({},patch),{hunks:patch.hunks.map(hunk=>Object.assign(Object.assign({},hunk),{lines:hunk.lines.map(line=>line.endsWith("\r")?line.substring(0,line.length-1):line)}))})}(patch)));let lines=source.split("\n"),hunks=patch.hunks,compareLine=options.compareLine||((lineNumber,line,operation,patchContent)=>line===patchContent),fuzzFactor=options.fuzzFactor||0,minLine=0;if(fuzzFactor<0||!Number.isInteger(fuzzFactor))throw new Error("fuzzFactor must be a non-negative integer");if(!hunks.length)return source;let prevLine="",removeEOFNL=!1,addEOFNL=!1;for(let i=0;i{let wantForward=!0,backwardExhausted=!1,forwardExhausted=!1,localOffset=1;return function iterator(){if(wantForward&&!forwardExhausted){if(backwardExhausted?localOffset++:wantForward=!1,start+localOffset<=maxLine)return start+localOffset;forwardExhausted=!0}if(!backwardExhausted)return forwardExhausted||(wantForward=!0),minLine<=start-localOffset?start-localOffset++:(backwardExhausted=!0,iterator())}})(toPos=hunk.oldStart+prevHunkOffset-1,minLine,maxLine);void 0!==toPos&&!(hunkResult=function applyHunk(hunkLines,toPos,maxErrors,hunkLinesI=0,lastContextLineMatched=!0,patchedLines=[],patchedLinesLength=0){let nConsecutiveOldContextLines=0,nextContextLineMustMatch=!1;for(;hunkLinesI{diff=diffLinesResultToPatch(diff);callback(diff)}}))}function diffLinesResultToPatch(diff){if(diff){diff.push({value:"",lines:[]});var hunks=[];let oldRangeStart=0,newRangeStart=0,curRange=[],oldLine=1,newLine=1;for(let i=0;i{var hasTrailingNl=text.endsWith("\n"),text=text.split("\n").map(line=>line+"\n");return hasTrailingNl?text.pop():text.push(text.pop().slice(0,-1)),text})(current.value);if(current.lines=lines,current.added||current.removed){oldRangeStart||(prev=diff[i-1],oldRangeStart=oldLine,newRangeStart=newLine,prev&&(curRange=0{patchObj?callback(formatPatch(patchObj)):callback(void 0)}}))}else{oldFileName=structuredPatch(oldFileName,newFileName,oldStr,newStr,oldHeader,newHeader,options);if(oldFileName)return formatPatch(oldFileName)}}exports.Diff=Diff,exports.applyPatch=applyPatch,exports.applyPatches=function(uniDiff,options){let spDiff="string"==typeof uniDiff?parsePatch(uniDiff):uniDiff,currentIndex=0;!function processIndex(){let index=spDiff[currentIndex++];if(!index)return options.complete();options.loadFile(index,function(err,data){if(err)return options.complete(err);err=applyPatch(data,index,options),options.patched(index,err,function(err){if(err)return options.complete(err);processIndex()})})}()},exports.arrayDiff=arrayDiff,exports.canonicalize=canonicalize,exports.characterDiff=characterDiff,exports.convertChangesToDMP=function(changes){var ret=[];let change,operation;for(let i=0;i"):change.removed&&ret.push(""),ret.push((s=>{let n=s;return n=(n=(n=(n=n.replace(/&/g,"&")).replace(//g,">")).replace(/"/g,""")})(change.value)),change.added?ret.push(""):change.removed&&ret.push("")}return ret.join("")},exports.createPatch=function(fileName,oldStr,newStr,oldHeader,newHeader,options){return createTwoFilesPatch(fileName,fileName,oldStr,newStr,oldHeader,newHeader,options)},exports.createTwoFilesPatch=createTwoFilesPatch,exports.cssDiff=cssDiff,exports.diffArrays=function(oldArr,newArr,options){return arrayDiff.diff(oldArr,newArr,options)},exports.diffChars=function(oldStr,newStr,options){return characterDiff.diff(oldStr,newStr,options)},exports.diffCss=function(oldStr,newStr,options){return cssDiff.diff(oldStr,newStr,options)},exports.diffJson=function(oldStr,newStr,options){return jsonDiff.diff(oldStr,newStr,options)},exports.diffLines=diffLines,exports.diffSentences=function(oldStr,newStr,options){return sentenceDiff.diff(oldStr,newStr,options)},exports.diffTrimmedLines=function(oldStr,newStr,options){return options=((options,defaults)=>{if("function"==typeof options)defaults.callback=options;else if(options)for(var name in options)Object.prototype.hasOwnProperty.call(options,name)&&(defaults[name]=options[name]);return defaults})(options,{ignoreWhitespace:!0}),lineDiff.diff(oldStr,newStr,options)},exports.diffWords=function(oldStr,newStr,options){return null==(null==options?void 0:options.ignoreWhitespace)||options.ignoreWhitespace?wordDiff.diff(oldStr,newStr,options):diffWordsWithSpace(oldStr,newStr,options)},exports.diffWordsWithSpace=diffWordsWithSpace,exports.formatPatch=formatPatch,exports.jsonDiff=jsonDiff,exports.lineDiff=lineDiff,exports.parsePatch=parsePatch,exports.reversePatch=function reversePatch(structuredPatch){return Array.isArray(structuredPatch)?structuredPatch.map(patch=>reversePatch(patch)).reverse():Object.assign(Object.assign({},structuredPatch),{oldFileName:structuredPatch.newFileName,oldHeader:structuredPatch.newHeader,newFileName:structuredPatch.oldFileName,newHeader:structuredPatch.oldHeader,hunks:structuredPatch.hunks.map(hunk=>({oldLines:hunk.newLines,oldStart:hunk.newStart,newLines:hunk.oldLines,newStart:hunk.oldStart,lines:hunk.lines.map(l=>l.startsWith("-")?"+"+l.slice(1):l.startsWith("+")?"-"+l.slice(1):l)}))})},exports.sentenceDiff=sentenceDiff,exports.structuredPatch=structuredPatch,exports.wordDiff=wordDiff,exports.wordsWithSpaceDiff=wordsWithSpaceDiff});
\ No newline at end of file
diff --git a/node_modules/diff/eslint.config.mjs b/node_modules/diff/eslint.config.mjs
new file mode 100644
index 0000000000000..ea1c73566ea89
--- /dev/null
+++ b/node_modules/diff/eslint.config.mjs
@@ -0,0 +1,182 @@
+// @ts-check
+
+import eslint from '@eslint/js';
+import tseslint from 'typescript-eslint';
+import globals from "globals";
+
+export default tseslint.config(
+  {
+    ignores: [
+      "**/*", // ignore everything...
+      "!src/**/", "!src/**/*.ts", // ... except our TypeScript source files...
+      "!test/**/", "!test/**/*.js", // ... and our tests
+    ],
+  },
+  eslint.configs.recommended,
+  tseslint.configs.recommended,
+  {
+    files: ['src/**/*.ts'],
+    languageOptions: {
+      parserOptions: {
+        projectService: true,
+        tsconfigRootDir: import.meta.dirname,
+      },
+    },
+    extends: [tseslint.configs.recommendedTypeChecked],
+    rules: {
+      // Not sure if these actually serve a purpose, but they provide a way to enforce SOME of what
+      // would be imposed by having "verbatimModuleSyntax": true in our tsconfig.json without
+      // actually doing that.
+      "@typescript-eslint/consistent-type-imports": 2,
+      "@typescript-eslint/consistent-type-exports": 2,
+
+      // Things from the recommendedTypeChecked shared config that are disabled simply because they
+      // caused lots of errors in our existing code when tried. Plausibly useful to turn on if
+      // possible and somebody fancies doing the work:
+      "@typescript-eslint/no-unsafe-argument": 0,
+      "@typescript-eslint/no-unsafe-assignment": 0,
+      "@typescript-eslint/no-unsafe-call": 0,
+      "@typescript-eslint/no-unsafe-member-access": 0,
+      "@typescript-eslint/no-unsafe-return": 0,
+    }
+  },
+  {
+    languageOptions: {
+      globals: {
+        ...globals.browser,
+      },
+    },
+
+    rules: {
+      // Possible Errors //
+      //-----------------//
+      "comma-dangle": [2, "never"],
+      "no-console": 1, // Allow for debugging
+      "no-debugger": 1, // Allow for debugging
+      "no-extra-parens": [2, "functions"],
+      "no-extra-semi": 2,
+      "no-negated-in-lhs": 2,
+      "no-unreachable": 1, // Optimizer and coverage will handle/highlight this and can be useful for debugging
+
+      // Best Practices //
+      //----------------//
+      curly: 2,
+      "default-case": 1,
+      "dot-notation": [2, {
+        allowKeywords: false,
+      }],
+      "guard-for-in": 1,
+      "no-alert": 2,
+      "no-caller": 2,
+      "no-div-regex": 1,
+      "no-eval": 2,
+      "no-extend-native": 2,
+      "no-extra-bind": 2,
+      "no-floating-decimal": 2,
+      "no-implied-eval": 2,
+      "no-iterator": 2,
+      "no-labels": 2,
+      "no-lone-blocks": 2,
+      "no-multi-spaces": 2,
+      "no-multi-str": 1,
+      "no-native-reassign": 2,
+      "no-new": 2,
+      "no-new-func": 2,
+      "no-new-wrappers": 2,
+      "no-octal-escape": 2,
+      "no-process-env": 2,
+      "no-proto": 2,
+      "no-return-assign": 2,
+      "no-script-url": 2,
+      "no-self-compare": 2,
+      "no-sequences": 2,
+      "no-throw-literal": 2,
+      "no-unused-expressions": 2,
+      "no-warning-comments": 1,
+      radix: 2,
+      "wrap-iife": 2,
+
+      // Variables //
+      //-----------//
+      "no-catch-shadow": 2,
+      "no-label-var": 2,
+      "no-undef-init": 2,
+
+      // Node.js //
+      //---------//
+
+      // Stylistic //
+      //-----------//
+      "brace-style": [2, "1tbs", {
+        allowSingleLine: true,
+      }],
+      camelcase: 2,
+      "comma-spacing": [2, {
+        before: false,
+        after: true,
+      }],
+      "comma-style": [2, "last"],
+      "consistent-this": [1, "self"],
+      "eol-last": 2,
+      "func-style": [2, "declaration"],
+      "key-spacing": [2, {
+        beforeColon: false,
+        afterColon: true,
+      }],
+      "new-cap": 2,
+      "new-parens": 2,
+      "no-array-constructor": 2,
+      "no-lonely-if": 2,
+      "no-mixed-spaces-and-tabs": 2,
+      "no-nested-ternary": 1,
+      "no-new-object": 2,
+      "no-spaced-func": 2,
+      "no-trailing-spaces": 2,
+      "quote-props": [2, "as-needed", {
+        keywords: true,
+      }],
+      quotes: [2, "single", "avoid-escape"],
+      semi: 2,
+      "semi-spacing": [2, {
+        before: false,
+        after: true,
+      }],
+      "space-before-blocks": [2, "always"],
+      "space-before-function-paren": [2, {
+        anonymous: "never",
+        named: "never",
+      }],
+      "space-in-parens": [2, "never"],
+      "space-infix-ops": 2,
+      "space-unary-ops": 2,
+      "spaced-comment": [2, "always"],
+      "wrap-regex": 1,
+      "no-var": 2,
+
+      // Typescript //
+      //------------//
+      "@typescript-eslint/no-explicit-any": 0, // Very strict rule, incompatible with our code
+
+      // We use these intentionally - e.g.
+      //     export interface DiffCssOptions extends CommonDiffOptions {}
+      // for the options argument to diffCss which currently takes no options beyond the ones
+      // common to all diffFoo functions. Doing this allows consistency (one options interface per
+      // diffFoo function) and future-proofs against the API having to change in future if we add a
+      // non-common option to one of these functions.
+      "@typescript-eslint/no-empty-object-type": [2, {allowInterfaces: 'with-single-extends'}],
+    },
+  },
+  {
+    files: ['test/**/*.js'],
+    languageOptions: {
+      globals: {
+        ...globals.node,
+        ...globals.mocha,
+      },
+    },
+    rules: {
+      "no-unused-expressions": 0, // Needs disabling to support Chai `.to.be.undefined` etc syntax
+      "@typescript-eslint/no-unused-expressions": 0, // (as above)
+    },
+  }
+);
diff --git a/node_modules/diff/lib/convert/dmp.js b/node_modules/diff/lib/convert/dmp.js
deleted file mode 100644
index 4f9081a59b9cd..0000000000000
--- a/node_modules/diff/lib/convert/dmp.js
+++ /dev/null
@@ -1,27 +0,0 @@
-/*istanbul ignore start*/
-"use strict";
-
-Object.defineProperty(exports, "__esModule", {
-  value: true
-});
-exports.convertChangesToDMP = convertChangesToDMP;
-/*istanbul ignore end*/
-// See: http://code.google.com/p/google-diff-match-patch/wiki/API
-function convertChangesToDMP(changes) {
-  var ret = [],
-    change,
-    operation;
-  for (var i = 0; i < changes.length; i++) {
-    change = changes[i];
-    if (change.added) {
-      operation = 1;
-    } else if (change.removed) {
-      operation = -1;
-    } else {
-      operation = 0;
-    }
-    ret.push([operation, change.value]);
-  }
-  return ret;
-}
-//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJuYW1lcyI6WyJjb252ZXJ0Q2hhbmdlc1RvRE1QIiwiY2hhbmdlcyIsInJldCIsImNoYW5nZSIsIm9wZXJhdGlvbiIsImkiLCJsZW5ndGgiLCJhZGRlZCIsInJlbW92ZWQiLCJwdXNoIiwidmFsdWUiXSwic291cmNlcyI6WyIuLi8uLi9zcmMvY29udmVydC9kbXAuanMiXSwic291cmNlc0NvbnRlbnQiOlsiLy8gU2VlOiBodHRwOi8vY29kZS5nb29nbGUuY29tL3AvZ29vZ2xlLWRpZmYtbWF0Y2gtcGF0Y2gvd2lraS9BUElcbmV4cG9ydCBmdW5jdGlvbiBjb252ZXJ0Q2hhbmdlc1RvRE1QKGNoYW5nZXMpIHtcbiAgbGV0IHJldCA9IFtdLFxuICAgICAgY2hhbmdlLFxuICAgICAgb3BlcmF0aW9uO1xuICBmb3IgKGxldCBpID0gMDsgaSA8IGNoYW5nZXMubGVuZ3RoOyBpKyspIHtcbiAgICBjaGFuZ2UgPSBjaGFuZ2VzW2ldO1xuICAgIGlmIChjaGFuZ2UuYWRkZWQpIHtcbiAgICAgIG9wZXJhdGlvbiA9IDE7XG4gICAgfSBlbHNlIGlmIChjaGFuZ2UucmVtb3ZlZCkge1xuICAgICAgb3BlcmF0aW9uID0gLTE7XG4gICAgfSBlbHNlIHtcbiAgICAgIG9wZXJhdGlvbiA9IDA7XG4gICAgfVxuXG4gICAgcmV0LnB1c2goW29wZXJhdGlvbiwgY2hhbmdlLnZhbHVlXSk7XG4gIH1cbiAgcmV0dXJuIHJldDtcbn1cbiJdLCJtYXBwaW5ncyI6Ijs7Ozs7Ozs7QUFBQTtBQUNPLFNBQVNBLG1CQUFtQkEsQ0FBQ0MsT0FBTyxFQUFFO0VBQzNDLElBQUlDLEdBQUcsR0FBRyxFQUFFO0lBQ1JDLE1BQU07SUFDTkMsU0FBUztFQUNiLEtBQUssSUFBSUMsQ0FBQyxHQUFHLENBQUMsRUFBRUEsQ0FBQyxHQUFHSixPQUFPLENBQUNLLE1BQU0sRUFBRUQsQ0FBQyxFQUFFLEVBQUU7SUFDdkNGLE1BQU0sR0FBR0YsT0FBTyxDQUFDSSxDQUFDLENBQUM7SUFDbkIsSUFBSUYsTUFBTSxDQUFDSSxLQUFLLEVBQUU7TUFDaEJILFNBQVMsR0FBRyxDQUFDO0lBQ2YsQ0FBQyxNQUFNLElBQUlELE1BQU0sQ0FBQ0ssT0FBTyxFQUFFO01BQ3pCSixTQUFTLEdBQUcsQ0FBQyxDQUFDO0lBQ2hCLENBQUMsTUFBTTtNQUNMQSxTQUFTLEdBQUcsQ0FBQztJQUNmO0lBRUFGLEdBQUcsQ0FBQ08sSUFBSSxDQUFDLENBQUNMLFNBQVMsRUFBRUQsTUFBTSxDQUFDTyxLQUFLLENBQUMsQ0FBQztFQUNyQztFQUNBLE9BQU9SLEdBQUc7QUFDWiIsImlnbm9yZUxpc3QiOltdfQ==
diff --git a/node_modules/diff/lib/convert/xml.js b/node_modules/diff/lib/convert/xml.js
deleted file mode 100644
index d21b7d35638e7..0000000000000
--- a/node_modules/diff/lib/convert/xml.js
+++ /dev/null
@@ -1,35 +0,0 @@
-/*istanbul ignore start*/
-"use strict";
-
-Object.defineProperty(exports, "__esModule", {
-  value: true
-});
-exports.convertChangesToXML = convertChangesToXML;
-/*istanbul ignore end*/
-function convertChangesToXML(changes) {
-  var ret = [];
-  for (var i = 0; i < changes.length; i++) {
-    var change = changes[i];
-    if (change.added) {
-      ret.push('');
-    } else if (change.removed) {
-      ret.push('');
-    }
-    ret.push(escapeHTML(change.value));
-    if (change.added) {
-      ret.push('');
-    } else if (change.removed) {
-      ret.push('');
-    }
-  }
-  return ret.join('');
-}
-function escapeHTML(s) {
-  var n = s;
-  n = n.replace(/&/g, '&');
-  n = n.replace(//g, '>');
-  n = n.replace(/"/g, '"');
-  return n;
-}
-//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJuYW1lcyI6WyJjb252ZXJ0Q2hhbmdlc1RvWE1MIiwiY2hhbmdlcyIsInJldCIsImkiLCJsZW5ndGgiLCJjaGFuZ2UiLCJhZGRlZCIsInB1c2giLCJyZW1vdmVkIiwiZXNjYXBlSFRNTCIsInZhbHVlIiwiam9pbiIsInMiLCJuIiwicmVwbGFjZSJdLCJzb3VyY2VzIjpbIi4uLy4uL3NyYy9jb252ZXJ0L3htbC5qcyJdLCJzb3VyY2VzQ29udGVudCI6WyJleHBvcnQgZnVuY3Rpb24gY29udmVydENoYW5nZXNUb1hNTChjaGFuZ2VzKSB7XG4gIGxldCByZXQgPSBbXTtcbiAgZm9yIChsZXQgaSA9IDA7IGkgPCBjaGFuZ2VzLmxlbmd0aDsgaSsrKSB7XG4gICAgbGV0IGNoYW5nZSA9IGNoYW5nZXNbaV07XG4gICAgaWYgKGNoYW5nZS5hZGRlZCkge1xuICAgICAgcmV0LnB1c2goJzxpbnM+Jyk7XG4gICAgfSBlbHNlIGlmIChjaGFuZ2UucmVtb3ZlZCkge1xuICAgICAgcmV0LnB1c2goJzxkZWw+Jyk7XG4gICAgfVxuXG4gICAgcmV0LnB1c2goZXNjYXBlSFRNTChjaGFuZ2UudmFsdWUpKTtcblxuICAgIGlmIChjaGFuZ2UuYWRkZWQpIHtcbiAgICAgIHJldC5wdXNoKCc8L2lucz4nKTtcbiAgICB9IGVsc2UgaWYgKGNoYW5nZS5yZW1vdmVkKSB7XG4gICAgICByZXQucHVzaCgnPC9kZWw+Jyk7XG4gICAgfVxuICB9XG4gIHJldHVybiByZXQuam9pbignJyk7XG59XG5cbmZ1bmN0aW9uIGVzY2FwZUhUTUwocykge1xuICBsZXQgbiA9IHM7XG4gIG4gPSBuLnJlcGxhY2UoLyYvZywgJyZhbXA7Jyk7XG4gIG4gPSBuLnJlcGxhY2UoLzwvZywgJyZsdDsnKTtcbiAgbiA9IG4ucmVwbGFjZSgvPi9nLCAnJmd0OycpO1xuICBuID0gbi5yZXBsYWNlKC9cIi9nLCAnJnF1b3Q7Jyk7XG5cbiAgcmV0dXJuIG47XG59XG4iXSwibWFwcGluZ3MiOiI7Ozs7Ozs7O0FBQU8sU0FBU0EsbUJBQW1CQSxDQUFDQyxPQUFPLEVBQUU7RUFDM0MsSUFBSUMsR0FBRyxHQUFHLEVBQUU7RUFDWixLQUFLLElBQUlDLENBQUMsR0FBRyxDQUFDLEVBQUVBLENBQUMsR0FBR0YsT0FBTyxDQUFDRyxNQUFNLEVBQUVELENBQUMsRUFBRSxFQUFFO0lBQ3ZDLElBQUlFLE1BQU0sR0FBR0osT0FBTyxDQUFDRSxDQUFDLENBQUM7SUFDdkIsSUFBSUUsTUFBTSxDQUFDQyxLQUFLLEVBQUU7TUFDaEJKLEdBQUcsQ0FBQ0ssSUFBSSxDQUFDLE9BQU8sQ0FBQztJQUNuQixDQUFDLE1BQU0sSUFBSUYsTUFBTSxDQUFDRyxPQUFPLEVBQUU7TUFDekJOLEdBQUcsQ0FBQ0ssSUFBSSxDQUFDLE9BQU8sQ0FBQztJQUNuQjtJQUVBTCxHQUFHLENBQUNLLElBQUksQ0FBQ0UsVUFBVSxDQUFDSixNQUFNLENBQUNLLEtBQUssQ0FBQyxDQUFDO0lBRWxDLElBQUlMLE1BQU0sQ0FBQ0MsS0FBSyxFQUFFO01BQ2hCSixHQUFHLENBQUNLLElBQUksQ0FBQyxRQUFRLENBQUM7SUFDcEIsQ0FBQyxNQUFNLElBQUlGLE1BQU0sQ0FBQ0csT0FBTyxFQUFFO01BQ3pCTixHQUFHLENBQUNLLElBQUksQ0FBQyxRQUFRLENBQUM7SUFDcEI7RUFDRjtFQUNBLE9BQU9MLEdBQUcsQ0FBQ1MsSUFBSSxDQUFDLEVBQUUsQ0FBQztBQUNyQjtBQUVBLFNBQVNGLFVBQVVBLENBQUNHLENBQUMsRUFBRTtFQUNyQixJQUFJQyxDQUFDLEdBQUdELENBQUM7RUFDVEMsQ0FBQyxHQUFHQSxDQUFDLENBQUNDLE9BQU8sQ0FBQyxJQUFJLEVBQUUsT0FBTyxDQUFDO0VBQzVCRCxDQUFDLEdBQUdBLENBQUMsQ0FBQ0MsT0FBTyxDQUFDLElBQUksRUFBRSxNQUFNLENBQUM7RUFDM0JELENBQUMsR0FBR0EsQ0FBQyxDQUFDQyxPQUFPLENBQUMsSUFBSSxFQUFFLE1BQU0sQ0FBQztFQUMzQkQsQ0FBQyxHQUFHQSxDQUFDLENBQUNDLE9BQU8sQ0FBQyxJQUFJLEVBQUUsUUFBUSxDQUFDO0VBRTdCLE9BQU9ELENBQUM7QUFDViIsImlnbm9yZUxpc3QiOltdfQ==
diff --git a/node_modules/diff/lib/diff/array.js b/node_modules/diff/lib/diff/array.js
deleted file mode 100644
index bd0802db42ec2..0000000000000
--- a/node_modules/diff/lib/diff/array.js
+++ /dev/null
@@ -1,39 +0,0 @@
-/*istanbul ignore start*/
-"use strict";
-
-Object.defineProperty(exports, "__esModule", {
-  value: true
-});
-exports.arrayDiff = void 0;
-exports.diffArrays = diffArrays;
-/*istanbul ignore end*/
-var
-/*istanbul ignore start*/
-_base = _interopRequireDefault(require("./base"))
-/*istanbul ignore end*/
-;
-/*istanbul ignore start*/ function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
-/*istanbul ignore end*/
-var arrayDiff =
-/*istanbul ignore start*/
-exports.arrayDiff =
-/*istanbul ignore end*/
-new
-/*istanbul ignore start*/
-_base
-/*istanbul ignore end*/
-[
-/*istanbul ignore start*/
-"default"
-/*istanbul ignore end*/
-]();
-arrayDiff.tokenize = function (value) {
-  return value.slice();
-};
-arrayDiff.join = arrayDiff.removeEmpty = function (value) {
-  return value;
-};
-function diffArrays(oldArr, newArr, callback) {
-  return arrayDiff.diff(oldArr, newArr, callback);
-}
-//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJuYW1lcyI6WyJfYmFzZSIsIl9pbnRlcm9wUmVxdWlyZURlZmF1bHQiLCJyZXF1aXJlIiwib2JqIiwiX19lc01vZHVsZSIsImFycmF5RGlmZiIsImV4cG9ydHMiLCJEaWZmIiwidG9rZW5pemUiLCJ2YWx1ZSIsInNsaWNlIiwiam9pbiIsInJlbW92ZUVtcHR5IiwiZGlmZkFycmF5cyIsIm9sZEFyciIsIm5ld0FyciIsImNhbGxiYWNrIiwiZGlmZiJdLCJzb3VyY2VzIjpbIi4uLy4uL3NyYy9kaWZmL2FycmF5LmpzIl0sInNvdXJjZXNDb250ZW50IjpbImltcG9ydCBEaWZmIGZyb20gJy4vYmFzZSc7XG5cbmV4cG9ydCBjb25zdCBhcnJheURpZmYgPSBuZXcgRGlmZigpO1xuYXJyYXlEaWZmLnRva2VuaXplID0gZnVuY3Rpb24odmFsdWUpIHtcbiAgcmV0dXJuIHZhbHVlLnNsaWNlKCk7XG59O1xuYXJyYXlEaWZmLmpvaW4gPSBhcnJheURpZmYucmVtb3ZlRW1wdHkgPSBmdW5jdGlvbih2YWx1ZSkge1xuICByZXR1cm4gdmFsdWU7XG59O1xuXG5leHBvcnQgZnVuY3Rpb24gZGlmZkFycmF5cyhvbGRBcnIsIG5ld0FyciwgY2FsbGJhY2spIHsgcmV0dXJuIGFycmF5RGlmZi5kaWZmKG9sZEFyciwgbmV3QXJyLCBjYWxsYmFjayk7IH1cbiJdLCJtYXBwaW5ncyI6Ijs7Ozs7Ozs7O0FBQUE7QUFBQTtBQUFBQSxLQUFBLEdBQUFDLHNCQUFBLENBQUFDLE9BQUE7QUFBQTtBQUFBO0FBQTBCLG1DQUFBRCx1QkFBQUUsR0FBQSxXQUFBQSxHQUFBLElBQUFBLEdBQUEsQ0FBQUMsVUFBQSxHQUFBRCxHQUFBLGdCQUFBQSxHQUFBO0FBQUE7QUFFbkIsSUFBTUUsU0FBUztBQUFBO0FBQUFDLE9BQUEsQ0FBQUQsU0FBQTtBQUFBO0FBQUc7QUFBSUU7QUFBQUE7QUFBQUE7QUFBQUE7QUFBQUE7QUFBQUE7QUFBQUE7QUFBQUEsQ0FBSSxDQUFDLENBQUM7QUFDbkNGLFNBQVMsQ0FBQ0csUUFBUSxHQUFHLFVBQVNDLEtBQUssRUFBRTtFQUNuQyxPQUFPQSxLQUFLLENBQUNDLEtBQUssQ0FBQyxDQUFDO0FBQ3RCLENBQUM7QUFDREwsU0FBUyxDQUFDTSxJQUFJLEdBQUdOLFNBQVMsQ0FBQ08sV0FBVyxHQUFHLFVBQVNILEtBQUssRUFBRTtFQUN2RCxPQUFPQSxLQUFLO0FBQ2QsQ0FBQztBQUVNLFNBQVNJLFVBQVVBLENBQUNDLE1BQU0sRUFBRUMsTUFBTSxFQUFFQyxRQUFRLEVBQUU7RUFBRSxPQUFPWCxTQUFTLENBQUNZLElBQUksQ0FBQ0gsTUFBTSxFQUFFQyxNQUFNLEVBQUVDLFFBQVEsQ0FBQztBQUFFIiwiaWdub3JlTGlzdCI6W119
diff --git a/node_modules/diff/lib/diff/base.js b/node_modules/diff/lib/diff/base.js
deleted file mode 100644
index d2b4b447f51fe..0000000000000
--- a/node_modules/diff/lib/diff/base.js
+++ /dev/null
@@ -1,304 +0,0 @@
-/*istanbul ignore start*/
-"use strict";
-
-Object.defineProperty(exports, "__esModule", {
-  value: true
-});
-exports["default"] = Diff;
-/*istanbul ignore end*/
-function Diff() {}
-Diff.prototype = {
-  /*istanbul ignore start*/
-  /*istanbul ignore end*/
-  diff: function diff(oldString, newString) {
-    /*istanbul ignore start*/
-    var _options$timeout;
-    var
-    /*istanbul ignore end*/
-    options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
-    var callback = options.callback;
-    if (typeof options === 'function') {
-      callback = options;
-      options = {};
-    }
-    var self = this;
-    function done(value) {
-      value = self.postProcess(value, options);
-      if (callback) {
-        setTimeout(function () {
-          callback(value);
-        }, 0);
-        return true;
-      } else {
-        return value;
-      }
-    }
-
-    // Allow subclasses to massage the input prior to running
-    oldString = this.castInput(oldString, options);
-    newString = this.castInput(newString, options);
-    oldString = this.removeEmpty(this.tokenize(oldString, options));
-    newString = this.removeEmpty(this.tokenize(newString, options));
-    var newLen = newString.length,
-      oldLen = oldString.length;
-    var editLength = 1;
-    var maxEditLength = newLen + oldLen;
-    if (options.maxEditLength != null) {
-      maxEditLength = Math.min(maxEditLength, options.maxEditLength);
-    }
-    var maxExecutionTime =
-    /*istanbul ignore start*/
-    (_options$timeout =
-    /*istanbul ignore end*/
-    options.timeout) !== null && _options$timeout !== void 0 ? _options$timeout : Infinity;
-    var abortAfterTimestamp = Date.now() + maxExecutionTime;
-    var bestPath = [{
-      oldPos: -1,
-      lastComponent: undefined
-    }];
-
-    // Seed editLength = 0, i.e. the content starts with the same values
-    var newPos = this.extractCommon(bestPath[0], newString, oldString, 0, options);
-    if (bestPath[0].oldPos + 1 >= oldLen && newPos + 1 >= newLen) {
-      // Identity per the equality and tokenizer
-      return done(buildValues(self, bestPath[0].lastComponent, newString, oldString, self.useLongestToken));
-    }
-
-    // Once we hit the right edge of the edit graph on some diagonal k, we can
-    // definitely reach the end of the edit graph in no more than k edits, so
-    // there's no point in considering any moves to diagonal k+1 any more (from
-    // which we're guaranteed to need at least k+1 more edits).
-    // Similarly, once we've reached the bottom of the edit graph, there's no
-    // point considering moves to lower diagonals.
-    // We record this fact by setting minDiagonalToConsider and
-    // maxDiagonalToConsider to some finite value once we've hit the edge of
-    // the edit graph.
-    // This optimization is not faithful to the original algorithm presented in
-    // Myers's paper, which instead pointlessly extends D-paths off the end of
-    // the edit graph - see page 7 of Myers's paper which notes this point
-    // explicitly and illustrates it with a diagram. This has major performance
-    // implications for some common scenarios. For instance, to compute a diff
-    // where the new text simply appends d characters on the end of the
-    // original text of length n, the true Myers algorithm will take O(n+d^2)
-    // time while this optimization needs only O(n+d) time.
-    var minDiagonalToConsider = -Infinity,
-      maxDiagonalToConsider = Infinity;
-
-    // Main worker method. checks all permutations of a given edit length for acceptance.
-    function execEditLength() {
-      for (var diagonalPath = Math.max(minDiagonalToConsider, -editLength); diagonalPath <= Math.min(maxDiagonalToConsider, editLength); diagonalPath += 2) {
-        var basePath =
-        /*istanbul ignore start*/
-        void 0
-        /*istanbul ignore end*/
-        ;
-        var removePath = bestPath[diagonalPath - 1],
-          addPath = bestPath[diagonalPath + 1];
-        if (removePath) {
-          // No one else is going to attempt to use this value, clear it
-          bestPath[diagonalPath - 1] = undefined;
-        }
-        var canAdd = false;
-        if (addPath) {
-          // what newPos will be after we do an insertion:
-          var addPathNewPos = addPath.oldPos - diagonalPath;
-          canAdd = addPath && 0 <= addPathNewPos && addPathNewPos < newLen;
-        }
-        var canRemove = removePath && removePath.oldPos + 1 < oldLen;
-        if (!canAdd && !canRemove) {
-          // If this path is a terminal then prune
-          bestPath[diagonalPath] = undefined;
-          continue;
-        }
-
-        // Select the diagonal that we want to branch from. We select the prior
-        // path whose position in the old string is the farthest from the origin
-        // and does not pass the bounds of the diff graph
-        if (!canRemove || canAdd && removePath.oldPos < addPath.oldPos) {
-          basePath = self.addToPath(addPath, true, false, 0, options);
-        } else {
-          basePath = self.addToPath(removePath, false, true, 1, options);
-        }
-        newPos = self.extractCommon(basePath, newString, oldString, diagonalPath, options);
-        if (basePath.oldPos + 1 >= oldLen && newPos + 1 >= newLen) {
-          // If we have hit the end of both strings, then we are done
-          return done(buildValues(self, basePath.lastComponent, newString, oldString, self.useLongestToken));
-        } else {
-          bestPath[diagonalPath] = basePath;
-          if (basePath.oldPos + 1 >= oldLen) {
-            maxDiagonalToConsider = Math.min(maxDiagonalToConsider, diagonalPath - 1);
-          }
-          if (newPos + 1 >= newLen) {
-            minDiagonalToConsider = Math.max(minDiagonalToConsider, diagonalPath + 1);
-          }
-        }
-      }
-      editLength++;
-    }
-
-    // Performs the length of edit iteration. Is a bit fugly as this has to support the
-    // sync and async mode which is never fun. Loops over execEditLength until a value
-    // is produced, or until the edit length exceeds options.maxEditLength (if given),
-    // in which case it will return undefined.
-    if (callback) {
-      (function exec() {
-        setTimeout(function () {
-          if (editLength > maxEditLength || Date.now() > abortAfterTimestamp) {
-            return callback();
-          }
-          if (!execEditLength()) {
-            exec();
-          }
-        }, 0);
-      })();
-    } else {
-      while (editLength <= maxEditLength && Date.now() <= abortAfterTimestamp) {
-        var ret = execEditLength();
-        if (ret) {
-          return ret;
-        }
-      }
-    }
-  },
-  /*istanbul ignore start*/
-  /*istanbul ignore end*/
-  addToPath: function addToPath(path, added, removed, oldPosInc, options) {
-    var last = path.lastComponent;
-    if (last && !options.oneChangePerToken && last.added === added && last.removed === removed) {
-      return {
-        oldPos: path.oldPos + oldPosInc,
-        lastComponent: {
-          count: last.count + 1,
-          added: added,
-          removed: removed,
-          previousComponent: last.previousComponent
-        }
-      };
-    } else {
-      return {
-        oldPos: path.oldPos + oldPosInc,
-        lastComponent: {
-          count: 1,
-          added: added,
-          removed: removed,
-          previousComponent: last
-        }
-      };
-    }
-  },
-  /*istanbul ignore start*/
-  /*istanbul ignore end*/
-  extractCommon: function extractCommon(basePath, newString, oldString, diagonalPath, options) {
-    var newLen = newString.length,
-      oldLen = oldString.length,
-      oldPos = basePath.oldPos,
-      newPos = oldPos - diagonalPath,
-      commonCount = 0;
-    while (newPos + 1 < newLen && oldPos + 1 < oldLen && this.equals(oldString[oldPos + 1], newString[newPos + 1], options)) {
-      newPos++;
-      oldPos++;
-      commonCount++;
-      if (options.oneChangePerToken) {
-        basePath.lastComponent = {
-          count: 1,
-          previousComponent: basePath.lastComponent,
-          added: false,
-          removed: false
-        };
-      }
-    }
-    if (commonCount && !options.oneChangePerToken) {
-      basePath.lastComponent = {
-        count: commonCount,
-        previousComponent: basePath.lastComponent,
-        added: false,
-        removed: false
-      };
-    }
-    basePath.oldPos = oldPos;
-    return newPos;
-  },
-  /*istanbul ignore start*/
-  /*istanbul ignore end*/
-  equals: function equals(left, right, options) {
-    if (options.comparator) {
-      return options.comparator(left, right);
-    } else {
-      return left === right || options.ignoreCase && left.toLowerCase() === right.toLowerCase();
-    }
-  },
-  /*istanbul ignore start*/
-  /*istanbul ignore end*/
-  removeEmpty: function removeEmpty(array) {
-    var ret = [];
-    for (var i = 0; i < array.length; i++) {
-      if (array[i]) {
-        ret.push(array[i]);
-      }
-    }
-    return ret;
-  },
-  /*istanbul ignore start*/
-  /*istanbul ignore end*/
-  castInput: function castInput(value) {
-    return value;
-  },
-  /*istanbul ignore start*/
-  /*istanbul ignore end*/
-  tokenize: function tokenize(value) {
-    return Array.from(value);
-  },
-  /*istanbul ignore start*/
-  /*istanbul ignore end*/
-  join: function join(chars) {
-    return chars.join('');
-  },
-  /*istanbul ignore start*/
-  /*istanbul ignore end*/
-  postProcess: function postProcess(changeObjects) {
-    return changeObjects;
-  }
-};
-function buildValues(diff, lastComponent, newString, oldString, useLongestToken) {
-  // First we convert our linked list of components in reverse order to an
-  // array in the right order:
-  var components = [];
-  var nextComponent;
-  while (lastComponent) {
-    components.push(lastComponent);
-    nextComponent = lastComponent.previousComponent;
-    delete lastComponent.previousComponent;
-    lastComponent = nextComponent;
-  }
-  components.reverse();
-  var componentPos = 0,
-    componentLen = components.length,
-    newPos = 0,
-    oldPos = 0;
-  for (; componentPos < componentLen; componentPos++) {
-    var component = components[componentPos];
-    if (!component.removed) {
-      if (!component.added && useLongestToken) {
-        var value = newString.slice(newPos, newPos + component.count);
-        value = value.map(function (value, i) {
-          var oldValue = oldString[oldPos + i];
-          return oldValue.length > value.length ? oldValue : value;
-        });
-        component.value = diff.join(value);
-      } else {
-        component.value = diff.join(newString.slice(newPos, newPos + component.count));
-      }
-      newPos += component.count;
-
-      // Common case
-      if (!component.added) {
-        oldPos += component.count;
-      }
-    } else {
-      component.value = diff.join(oldString.slice(oldPos, oldPos + component.count));
-      oldPos += component.count;
-    }
-  }
-  return components;
-}
-//# sourceMappingURL=data:application/json;charset=utf-8;base64,{"version":3,"names":["Diff","prototype","diff","oldString","newString","_options$timeout","options","arguments","length","undefined","callback","self","done","value","postProcess","setTimeout","castInput","removeEmpty","tokenize","newLen","oldLen","editLength","maxEditLength","Math","min","maxExecutionTime","timeout","Infinity","abortAfterTimestamp","Date","now","bestPath","oldPos","lastComponent","newPos","extractCommon","buildValues","useLongestToken","minDiagonalToConsider","maxDiagonalToConsider","execEditLength","diagonalPath","max","basePath","removePath","addPath","canAdd","addPathNewPos","canRemove","addToPath","exec","ret","path","added","removed","oldPosInc","last","oneChangePerToken","count","previousComponent","commonCount","equals","left","right","comparator","ignoreCase","toLowerCase","array","i","push","Array","from","join","chars","changeObjects","components","nextComponent","reverse","componentPos","componentLen","component","slice","map","oldValue"],"sources":["../../src/diff/base.js"],"sourcesContent":["export default function Diff() {}\n\nDiff.prototype = {\n  diff(oldString, newString, options = {}) {\n    let callback = options.callback;\n    if (typeof options === 'function') {\n      callback = options;\n      options = {};\n    }\n\n    let self = this;\n\n    function done(value) {\n      value = self.postProcess(value, options);\n      if (callback) {\n        setTimeout(function() { callback(value); }, 0);\n        return true;\n      } else {\n        return value;\n      }\n    }\n\n    // Allow subclasses to massage the input prior to running\n    oldString = this.castInput(oldString, options);\n    newString = this.castInput(newString, options);\n\n    oldString = this.removeEmpty(this.tokenize(oldString, options));\n    newString = this.removeEmpty(this.tokenize(newString, options));\n\n    let newLen = newString.length, oldLen = oldString.length;\n    let editLength = 1;\n    let maxEditLength = newLen + oldLen;\n    if(options.maxEditLength != null) {\n      maxEditLength = Math.min(maxEditLength, options.maxEditLength);\n    }\n    const maxExecutionTime = options.timeout ?? Infinity;\n    const abortAfterTimestamp = Date.now() + maxExecutionTime;\n\n    let bestPath = [{ oldPos: -1, lastComponent: undefined }];\n\n    // Seed editLength = 0, i.e. the content starts with the same values\n    let newPos = this.extractCommon(bestPath[0], newString, oldString, 0, options);\n    if (bestPath[0].oldPos + 1 >= oldLen && newPos + 1 >= newLen) {\n      // Identity per the equality and tokenizer\n      return done(buildValues(self, bestPath[0].lastComponent, newString, oldString, self.useLongestToken));\n    }\n\n    // Once we hit the right edge of the edit graph on some diagonal k, we can\n    // definitely reach the end of the edit graph in no more than k edits, so\n    // there's no point in considering any moves to diagonal k+1 any more (from\n    // which we're guaranteed to need at least k+1 more edits).\n    // Similarly, once we've reached the bottom of the edit graph, there's no\n    // point considering moves to lower diagonals.\n    // We record this fact by setting minDiagonalToConsider and\n    // maxDiagonalToConsider to some finite value once we've hit the edge of\n    // the edit graph.\n    // This optimization is not faithful to the original algorithm presented in\n    // Myers's paper, which instead pointlessly extends D-paths off the end of\n    // the edit graph - see page 7 of Myers's paper which notes this point\n    // explicitly and illustrates it with a diagram. This has major performance\n    // implications for some common scenarios. For instance, to compute a diff\n    // where the new text simply appends d characters on the end of the\n    // original text of length n, the true Myers algorithm will take O(n+d^2)\n    // time while this optimization needs only O(n+d) time.\n    let minDiagonalToConsider = -Infinity, maxDiagonalToConsider = Infinity;\n\n    // Main worker method. checks all permutations of a given edit length for acceptance.\n    function execEditLength() {\n      for (\n        let diagonalPath = Math.max(minDiagonalToConsider, -editLength);\n        diagonalPath <= Math.min(maxDiagonalToConsider, editLength);\n        diagonalPath += 2\n      ) {\n        let basePath;\n        let removePath = bestPath[diagonalPath - 1],\n            addPath = bestPath[diagonalPath + 1];\n        if (removePath) {\n          // No one else is going to attempt to use this value, clear it\n          bestPath[diagonalPath - 1] = undefined;\n        }\n\n        let canAdd = false;\n        if (addPath) {\n          // what newPos will be after we do an insertion:\n          const addPathNewPos = addPath.oldPos - diagonalPath;\n          canAdd = addPath && 0 <= addPathNewPos && addPathNewPos < newLen;\n        }\n\n        let canRemove = removePath && removePath.oldPos + 1 < oldLen;\n        if (!canAdd && !canRemove) {\n          // If this path is a terminal then prune\n          bestPath[diagonalPath] = undefined;\n          continue;\n        }\n\n        // Select the diagonal that we want to branch from. We select the prior\n        // path whose position in the old string is the farthest from the origin\n        // and does not pass the bounds of the diff graph\n        if (!canRemove || (canAdd && removePath.oldPos < addPath.oldPos)) {\n          basePath = self.addToPath(addPath, true, false, 0, options);\n        } else {\n          basePath = self.addToPath(removePath, false, true, 1, options);\n        }\n\n        newPos = self.extractCommon(basePath, newString, oldString, diagonalPath, options);\n\n        if (basePath.oldPos + 1 >= oldLen && newPos + 1 >= newLen) {\n          // If we have hit the end of both strings, then we are done\n          return done(buildValues(self, basePath.lastComponent, newString, oldString, self.useLongestToken));\n        } else {\n          bestPath[diagonalPath] = basePath;\n          if (basePath.oldPos + 1 >= oldLen) {\n            maxDiagonalToConsider = Math.min(maxDiagonalToConsider, diagonalPath - 1);\n          }\n          if (newPos + 1 >= newLen) {\n            minDiagonalToConsider = Math.max(minDiagonalToConsider, diagonalPath + 1);\n          }\n        }\n      }\n\n      editLength++;\n    }\n\n    // Performs the length of edit iteration. Is a bit fugly as this has to support the\n    // sync and async mode which is never fun. Loops over execEditLength until a value\n    // is produced, or until the edit length exceeds options.maxEditLength (if given),\n    // in which case it will return undefined.\n    if (callback) {\n      (function exec() {\n        setTimeout(function() {\n          if (editLength > maxEditLength || Date.now() > abortAfterTimestamp) {\n            return callback();\n          }\n\n          if (!execEditLength()) {\n            exec();\n          }\n        }, 0);\n      }());\n    } else {\n      while (editLength <= maxEditLength && Date.now() <= abortAfterTimestamp) {\n        let ret = execEditLength();\n        if (ret) {\n          return ret;\n        }\n      }\n    }\n  },\n\n  addToPath(path, added, removed, oldPosInc, options) {\n    let last = path.lastComponent;\n    if (last && !options.oneChangePerToken && last.added === added && last.removed === removed) {\n      return {\n        oldPos: path.oldPos + oldPosInc,\n        lastComponent: {count: last.count + 1, added: added, removed: removed, previousComponent: last.previousComponent }\n      };\n    } else {\n      return {\n        oldPos: path.oldPos + oldPosInc,\n        lastComponent: {count: 1, added: added, removed: removed, previousComponent: last }\n      };\n    }\n  },\n  extractCommon(basePath, newString, oldString, diagonalPath, options) {\n    let newLen = newString.length,\n        oldLen = oldString.length,\n        oldPos = basePath.oldPos,\n        newPos = oldPos - diagonalPath,\n\n        commonCount = 0;\n    while (newPos + 1 < newLen && oldPos + 1 < oldLen && this.equals(oldString[oldPos + 1], newString[newPos + 1], options)) {\n      newPos++;\n      oldPos++;\n      commonCount++;\n      if (options.oneChangePerToken) {\n        basePath.lastComponent = {count: 1, previousComponent: basePath.lastComponent, added: false, removed: false};\n      }\n    }\n\n    if (commonCount && !options.oneChangePerToken) {\n      basePath.lastComponent = {count: commonCount, previousComponent: basePath.lastComponent, added: false, removed: false};\n    }\n\n    basePath.oldPos = oldPos;\n    return newPos;\n  },\n\n  equals(left, right, options) {\n    if (options.comparator) {\n      return options.comparator(left, right);\n    } else {\n      return left === right\n        || (options.ignoreCase && left.toLowerCase() === right.toLowerCase());\n    }\n  },\n  removeEmpty(array) {\n    let ret = [];\n    for (let i = 0; i < array.length; i++) {\n      if (array[i]) {\n        ret.push(array[i]);\n      }\n    }\n    return ret;\n  },\n  castInput(value) {\n    return value;\n  },\n  tokenize(value) {\n    return Array.from(value);\n  },\n  join(chars) {\n    return chars.join('');\n  },\n  postProcess(changeObjects) {\n    return changeObjects;\n  }\n};\n\nfunction buildValues(diff, lastComponent, newString, oldString, useLongestToken) {\n  // First we convert our linked list of components in reverse order to an\n  // array in the right order:\n  const components = [];\n  let nextComponent;\n  while (lastComponent) {\n    components.push(lastComponent);\n    nextComponent = lastComponent.previousComponent;\n    delete lastComponent.previousComponent;\n    lastComponent = nextComponent;\n  }\n  components.reverse();\n\n  let componentPos = 0,\n      componentLen = components.length,\n      newPos = 0,\n      oldPos = 0;\n\n  for (; componentPos < componentLen; componentPos++) {\n    let component = components[componentPos];\n    if (!component.removed) {\n      if (!component.added && useLongestToken) {\n        let value = newString.slice(newPos, newPos + component.count);\n        value = value.map(function(value, i) {\n          let oldValue = oldString[oldPos + i];\n          return oldValue.length > value.length ? oldValue : value;\n        });\n\n        component.value = diff.join(value);\n      } else {\n        component.value = diff.join(newString.slice(newPos, newPos + component.count));\n      }\n      newPos += component.count;\n\n      // Common case\n      if (!component.added) {\n        oldPos += component.count;\n      }\n    } else {\n      component.value = diff.join(oldString.slice(oldPos, oldPos + component.count));\n      oldPos += component.count;\n    }\n  }\n\n  return components;\n}\n"],"mappings":";;;;;;;;AAAe,SAASA,IAAIA,CAAA,EAAG,CAAC;AAEhCA,IAAI,CAACC,SAAS,GAAG;EAAA;EAAA;EACfC,IAAI,WAAAA,KAACC,SAAS,EAAEC,SAAS,EAAgB;IAAA;IAAA,IAAAC,gBAAA;IAAA;IAAA;IAAdC,OAAO,GAAAC,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,CAAC,CAAC;IACrC,IAAIG,QAAQ,GAAGJ,OAAO,CAACI,QAAQ;IAC/B,IAAI,OAAOJ,OAAO,KAAK,UAAU,EAAE;MACjCI,QAAQ,GAAGJ,OAAO;MAClBA,OAAO,GAAG,CAAC,CAAC;IACd;IAEA,IAAIK,IAAI,GAAG,IAAI;IAEf,SAASC,IAAIA,CAACC,KAAK,EAAE;MACnBA,KAAK,GAAGF,IAAI,CAACG,WAAW,CAACD,KAAK,EAAEP,OAAO,CAAC;MACxC,IAAII,QAAQ,EAAE;QACZK,UAAU,CAAC,YAAW;UAAEL,QAAQ,CAACG,KAAK,CAAC;QAAE,CAAC,EAAE,CAAC,CAAC;QAC9C,OAAO,IAAI;MACb,CAAC,MAAM;QACL,OAAOA,KAAK;MACd;IACF;;IAEA;IACAV,SAAS,GAAG,IAAI,CAACa,SAAS,CAACb,SAAS,EAAEG,OAAO,CAAC;IAC9CF,SAAS,GAAG,IAAI,CAACY,SAAS,CAACZ,SAAS,EAAEE,OAAO,CAAC;IAE9CH,SAAS,GAAG,IAAI,CAACc,WAAW,CAAC,IAAI,CAACC,QAAQ,CAACf,SAAS,EAAEG,OAAO,CAAC,CAAC;IAC/DF,SAAS,GAAG,IAAI,CAACa,WAAW,CAAC,IAAI,CAACC,QAAQ,CAACd,SAAS,EAAEE,OAAO,CAAC,CAAC;IAE/D,IAAIa,MAAM,GAAGf,SAAS,CAACI,MAAM;MAAEY,MAAM,GAAGjB,SAAS,CAACK,MAAM;IACxD,IAAIa,UAAU,GAAG,CAAC;IAClB,IAAIC,aAAa,GAAGH,MAAM,GAAGC,MAAM;IACnC,IAAGd,OAAO,CAACgB,aAAa,IAAI,IAAI,EAAE;MAChCA,aAAa,GAAGC,IAAI,CAACC,GAAG,CAACF,aAAa,EAAEhB,OAAO,CAACgB,aAAa,CAAC;IAChE;IACA,IAAMG,gBAAgB;IAAA;IAAA,CAAApB,gBAAA;IAAA;IAAGC,OAAO,CAACoB,OAAO,cAAArB,gBAAA,cAAAA,gBAAA,GAAIsB,QAAQ;IACpD,IAAMC,mBAAmB,GAAGC,IAAI,CAACC,GAAG,CAAC,CAAC,GAAGL,gBAAgB;IAEzD,IAAIM,QAAQ,GAAG,CAAC;MAAEC,MAAM,EAAE,CAAC,CAAC;MAAEC,aAAa,EAAExB;IAAU,CAAC,CAAC;;IAEzD;IACA,IAAIyB,MAAM,GAAG,IAAI,CAACC,aAAa,CAACJ,QAAQ,CAAC,CAAC,CAAC,EAAE3B,SAAS,EAAED,SAAS,EAAE,CAAC,EAAEG,OAAO,CAAC;IAC9E,IAAIyB,QAAQ,CAAC,CAAC,CAAC,CAACC,MAAM,GAAG,CAAC,IAAIZ,MAAM,IAAIc,MAAM,GAAG,CAAC,IAAIf,MAAM,EAAE;MAC5D;MACA,OAAOP,IAAI,CAACwB,WAAW,CAACzB,IAAI,EAAEoB,QAAQ,CAAC,CAAC,CAAC,CAACE,aAAa,EAAE7B,SAAS,EAAED,SAAS,EAAEQ,IAAI,CAAC0B,eAAe,CAAC,CAAC;IACvG;;IAEA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA;IACA,IAAIC,qBAAqB,GAAG,CAACX,QAAQ;MAAEY,qBAAqB,GAAGZ,QAAQ;;IAEvE;IACA,SAASa,cAAcA,CAAA,EAAG;MACxB,KACE,IAAIC,YAAY,GAAGlB,IAAI,CAACmB,GAAG,CAACJ,qBAAqB,EAAE,CAACjB,UAAU,CAAC,EAC/DoB,YAAY,IAAIlB,IAAI,CAACC,GAAG,CAACe,qBAAqB,EAAElB,UAAU,CAAC,EAC3DoB,YAAY,IAAI,CAAC,EACjB;QACA,IAAIE,QAAQ;QAAA;QAAA;QAAA;QAAA;QACZ,IAAIC,UAAU,GAAGb,QAAQ,CAACU,YAAY,GAAG,CAAC,CAAC;UACvCI,OAAO,GAAGd,QAAQ,CAACU,YAAY,GAAG,CAAC,CAAC;QACxC,IAAIG,UAAU,EAAE;UACd;UACAb,QAAQ,CAACU,YAAY,GAAG,CAAC,CAAC,GAAGhC,SAAS;QACxC;QAEA,IAAIqC,MAAM,GAAG,KAAK;QAClB,IAAID,OAAO,EAAE;UACX;UACA,IAAME,aAAa,GAAGF,OAAO,CAACb,MAAM,GAAGS,YAAY;UACnDK,MAAM,GAAGD,OAAO,IAAI,CAAC,IAAIE,aAAa,IAAIA,aAAa,GAAG5B,MAAM;QAClE;QAEA,IAAI6B,SAAS,GAAGJ,UAAU,IAAIA,UAAU,CAACZ,MAAM,GAAG,CAAC,GAAGZ,MAAM;QAC5D,IAAI,CAAC0B,MAAM,IAAI,CAACE,SAAS,EAAE;UACzB;UACAjB,QAAQ,CAACU,YAAY,CAAC,GAAGhC,SAAS;UAClC;QACF;;QAEA;QACA;QACA;QACA,IAAI,CAACuC,SAAS,IAAKF,MAAM,IAAIF,UAAU,CAACZ,MAAM,GAAGa,OAAO,CAACb,MAAO,EAAE;UAChEW,QAAQ,GAAGhC,IAAI,CAACsC,SAAS,CAACJ,OAAO,EAAE,IAAI,EAAE,KAAK,EAAE,CAAC,EAAEvC,OAAO,CAAC;QAC7D,CAAC,MAAM;UACLqC,QAAQ,GAAGhC,IAAI,CAACsC,SAAS,CAACL,UAAU,EAAE,KAAK,EAAE,IAAI,EAAE,CAAC,EAAEtC,OAAO,CAAC;QAChE;QAEA4B,MAAM,GAAGvB,IAAI,CAACwB,aAAa,CAACQ,QAAQ,EAAEvC,SAAS,EAAED,SAAS,EAAEsC,YAAY,EAAEnC,OAAO,CAAC;QAElF,IAAIqC,QAAQ,CAACX,MAAM,GAAG,CAAC,IAAIZ,MAAM,IAAIc,MAAM,GAAG,CAAC,IAAIf,MAAM,EAAE;UACzD;UACA,OAAOP,IAAI,CAACwB,WAAW,CAACzB,IAAI,EAAEgC,QAAQ,CAACV,aAAa,EAAE7B,SAAS,EAAED,SAAS,EAAEQ,IAAI,CAAC0B,eAAe,CAAC,CAAC;QACpG,CAAC,MAAM;UACLN,QAAQ,CAACU,YAAY,CAAC,GAAGE,QAAQ;UACjC,IAAIA,QAAQ,CAACX,MAAM,GAAG,CAAC,IAAIZ,MAAM,EAAE;YACjCmB,qBAAqB,GAAGhB,IAAI,CAACC,GAAG,CAACe,qBAAqB,EAAEE,YAAY,GAAG,CAAC,CAAC;UAC3E;UACA,IAAIP,MAAM,GAAG,CAAC,IAAIf,MAAM,EAAE;YACxBmB,qBAAqB,GAAGf,IAAI,CAACmB,GAAG,CAACJ,qBAAqB,EAAEG,YAAY,GAAG,CAAC,CAAC;UAC3E;QACF;MACF;MAEApB,UAAU,EAAE;IACd;;IAEA;IACA;IACA;IACA;IACA,IAAIX,QAAQ,EAAE;MACX,UAASwC,IAAIA,CAAA,EAAG;QACfnC,UAAU,CAAC,YAAW;UACpB,IAAIM,UAAU,GAAGC,aAAa,IAAIO,IAAI,CAACC,GAAG,CAAC,CAAC,GAAGF,mBAAmB,EAAE;YAClE,OAAOlB,QAAQ,CAAC,CAAC;UACnB;UAEA,IAAI,CAAC8B,cAAc,CAAC,CAAC,EAAE;YACrBU,IAAI,CAAC,CAAC;UACR;QACF,CAAC,EAAE,CAAC,CAAC;MACP,CAAC,EAAC,CAAC;IACL,CAAC,MAAM;MACL,OAAO7B,UAAU,IAAIC,aAAa,IAAIO,IAAI,CAACC,GAAG,CAAC,CAAC,IAAIF,mBAAmB,EAAE;QACvE,IAAIuB,GAAG,GAAGX,cAAc,CAAC,CAAC;QAC1B,IAAIW,GAAG,EAAE;UACP,OAAOA,GAAG;QACZ;MACF;IACF;EACF,CAAC;EAAA;EAAA;EAEDF,SAAS,WAAAA,UAACG,IAAI,EAAEC,KAAK,EAAEC,OAAO,EAAEC,SAAS,EAAEjD,OAAO,EAAE;IAClD,IAAIkD,IAAI,GAAGJ,IAAI,CAACnB,aAAa;IAC7B,IAAIuB,IAAI,IAAI,CAAClD,OAAO,CAACmD,iBAAiB,IAAID,IAAI,CAACH,KAAK,KAAKA,KAAK,IAAIG,IAAI,CAACF,OAAO,KAAKA,OAAO,EAAE;MAC1F,OAAO;QACLtB,MAAM,EAAEoB,IAAI,CAACpB,MAAM,GAAGuB,SAAS;QAC/BtB,aAAa,EAAE;UAACyB,KAAK,EAAEF,IAAI,CAACE,KAAK,GAAG,CAAC;UAAEL,KAAK,EAAEA,KAAK;UAAEC,OAAO,EAAEA,OAAO;UAAEK,iBAAiB,EAAEH,IAAI,CAACG;QAAkB;MACnH,CAAC;IACH,CAAC,MAAM;MACL,OAAO;QACL3B,MAAM,EAAEoB,IAAI,CAACpB,MAAM,GAAGuB,SAAS;QAC/BtB,aAAa,EAAE;UAACyB,KAAK,EAAE,CAAC;UAAEL,KAAK,EAAEA,KAAK;UAAEC,OAAO,EAAEA,OAAO;UAAEK,iBAAiB,EAAEH;QAAK;MACpF,CAAC;IACH;EACF,CAAC;EAAA;EAAA;EACDrB,aAAa,WAAAA,cAACQ,QAAQ,EAAEvC,SAAS,EAAED,SAAS,EAAEsC,YAAY,EAAEnC,OAAO,EAAE;IACnE,IAAIa,MAAM,GAAGf,SAAS,CAACI,MAAM;MACzBY,MAAM,GAAGjB,SAAS,CAACK,MAAM;MACzBwB,MAAM,GAAGW,QAAQ,CAACX,MAAM;MACxBE,MAAM,GAAGF,MAAM,GAAGS,YAAY;MAE9BmB,WAAW,GAAG,CAAC;IACnB,OAAO1B,MAAM,GAAG,CAAC,GAAGf,MAAM,IAAIa,MAAM,GAAG,CAAC,GAAGZ,MAAM,IAAI,IAAI,CAACyC,MAAM,CAAC1D,SAAS,CAAC6B,MAAM,GAAG,CAAC,CAAC,EAAE5B,SAAS,CAAC8B,MAAM,GAAG,CAAC,CAAC,EAAE5B,OAAO,CAAC,EAAE;MACvH4B,MAAM,EAAE;MACRF,MAAM,EAAE;MACR4B,WAAW,EAAE;MACb,IAAItD,OAAO,CAACmD,iBAAiB,EAAE;QAC7Bd,QAAQ,CAACV,aAAa,GAAG;UAACyB,KAAK,EAAE,CAAC;UAAEC,iBAAiB,EAAEhB,QAAQ,CAACV,aAAa;UAAEoB,KAAK,EAAE,KAAK;UAAEC,OAAO,EAAE;QAAK,CAAC;MAC9G;IACF;IAEA,IAAIM,WAAW,IAAI,CAACtD,OAAO,CAACmD,iBAAiB,EAAE;MAC7Cd,QAAQ,CAACV,aAAa,GAAG;QAACyB,KAAK,EAAEE,WAAW;QAAED,iBAAiB,EAAEhB,QAAQ,CAACV,aAAa;QAAEoB,KAAK,EAAE,KAAK;QAAEC,OAAO,EAAE;MAAK,CAAC;IACxH;IAEAX,QAAQ,CAACX,MAAM,GAAGA,MAAM;IACxB,OAAOE,MAAM;EACf,CAAC;EAAA;EAAA;EAED2B,MAAM,WAAAA,OAACC,IAAI,EAAEC,KAAK,EAAEzD,OAAO,EAAE;IAC3B,IAAIA,OAAO,CAAC0D,UAAU,EAAE;MACtB,OAAO1D,OAAO,CAAC0D,UAAU,CAACF,IAAI,EAAEC,KAAK,CAAC;IACxC,CAAC,MAAM;MACL,OAAOD,IAAI,KAAKC,KAAK,IACfzD,OAAO,CAAC2D,UAAU,IAAIH,IAAI,CAACI,WAAW,CAAC,CAAC,KAAKH,KAAK,CAACG,WAAW,CAAC,CAAE;IACzE;EACF,CAAC;EAAA;EAAA;EACDjD,WAAW,WAAAA,YAACkD,KAAK,EAAE;IACjB,IAAIhB,GAAG,GAAG,EAAE;IACZ,KAAK,IAAIiB,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGD,KAAK,CAAC3D,MAAM,EAAE4D,CAAC,EAAE,EAAE;MACrC,IAAID,KAAK,CAACC,CAAC,CAAC,EAAE;QACZjB,GAAG,CAACkB,IAAI,CAACF,KAAK,CAACC,CAAC,CAAC,CAAC;MACpB;IACF;IACA,OAAOjB,GAAG;EACZ,CAAC;EAAA;EAAA;EACDnC,SAAS,WAAAA,UAACH,KAAK,EAAE;IACf,OAAOA,KAAK;EACd,CAAC;EAAA;EAAA;EACDK,QAAQ,WAAAA,SAACL,KAAK,EAAE;IACd,OAAOyD,KAAK,CAACC,IAAI,CAAC1D,KAAK,CAAC;EAC1B,CAAC;EAAA;EAAA;EACD2D,IAAI,WAAAA,KAACC,KAAK,EAAE;IACV,OAAOA,KAAK,CAACD,IAAI,CAAC,EAAE,CAAC;EACvB,CAAC;EAAA;EAAA;EACD1D,WAAW,WAAAA,YAAC4D,aAAa,EAAE;IACzB,OAAOA,aAAa;EACtB;AACF,CAAC;AAED,SAAStC,WAAWA,CAAClC,IAAI,EAAE+B,aAAa,EAAE7B,SAAS,EAAED,SAAS,EAAEkC,eAAe,EAAE;EAC/E;EACA;EACA,IAAMsC,UAAU,GAAG,EAAE;EACrB,IAAIC,aAAa;EACjB,OAAO3C,aAAa,EAAE;IACpB0C,UAAU,CAACN,IAAI,CAACpC,aAAa,CAAC;IAC9B2C,aAAa,GAAG3C,aAAa,CAAC0B,iBAAiB;IAC/C,OAAO1B,aAAa,CAAC0B,iBAAiB;IACtC1B,aAAa,GAAG2C,aAAa;EAC/B;EACAD,UAAU,CAACE,OAAO,CAAC,CAAC;EAEpB,IAAIC,YAAY,GAAG,CAAC;IAChBC,YAAY,GAAGJ,UAAU,CAACnE,MAAM;IAChC0B,MAAM,GAAG,CAAC;IACVF,MAAM,GAAG,CAAC;EAEd,OAAO8C,YAAY,GAAGC,YAAY,EAAED,YAAY,EAAE,EAAE;IAClD,IAAIE,SAAS,GAAGL,UAAU,CAACG,YAAY,CAAC;IACxC,IAAI,CAACE,SAAS,CAAC1B,OAAO,EAAE;MACtB,IAAI,CAAC0B,SAAS,CAAC3B,KAAK,IAAIhB,eAAe,EAAE;QACvC,IAAIxB,KAAK,GAAGT,SAAS,CAAC6E,KAAK,CAAC/C,MAAM,EAAEA,MAAM,GAAG8C,SAAS,CAACtB,KAAK,CAAC;QAC7D7C,KAAK,GAAGA,KAAK,CAACqE,GAAG,CAAC,UAASrE,KAAK,EAAEuD,CAAC,EAAE;UACnC,IAAIe,QAAQ,GAAGhF,SAAS,CAAC6B,MAAM,GAAGoC,CAAC,CAAC;UACpC,OAAOe,QAAQ,CAAC3E,MAAM,GAAGK,KAAK,CAACL,MAAM,GAAG2E,QAAQ,GAAGtE,KAAK;QAC1D,CAAC,CAAC;QAEFmE,SAAS,CAACnE,KAAK,GAAGX,IAAI,CAACsE,IAAI,CAAC3D,KAAK,CAAC;MACpC,CAAC,MAAM;QACLmE,SAAS,CAACnE,KAAK,GAAGX,IAAI,CAACsE,IAAI,CAACpE,SAAS,CAAC6E,KAAK,CAAC/C,MAAM,EAAEA,MAAM,GAAG8C,SAAS,CAACtB,KAAK,CAAC,CAAC;MAChF;MACAxB,MAAM,IAAI8C,SAAS,CAACtB,KAAK;;MAEzB;MACA,IAAI,CAACsB,SAAS,CAAC3B,KAAK,EAAE;QACpBrB,MAAM,IAAIgD,SAAS,CAACtB,KAAK;MAC3B;IACF,CAAC,MAAM;MACLsB,SAAS,CAACnE,KAAK,GAAGX,IAAI,CAACsE,IAAI,CAACrE,SAAS,CAAC8E,KAAK,CAACjD,MAAM,EAAEA,MAAM,GAAGgD,SAAS,CAACtB,KAAK,CAAC,CAAC;MAC9E1B,MAAM,IAAIgD,SAAS,CAACtB,KAAK;IAC3B;EACF;EAEA,OAAOiB,UAAU;AACnB","ignoreList":[]}
diff --git a/node_modules/diff/lib/diff/character.js b/node_modules/diff/lib/diff/character.js
deleted file mode 100644
index 6a3cf1c4d76d8..0000000000000
--- a/node_modules/diff/lib/diff/character.js
+++ /dev/null
@@ -1,33 +0,0 @@
-/*istanbul ignore start*/
-"use strict";
-
-Object.defineProperty(exports, "__esModule", {
-  value: true
-});
-exports.characterDiff = void 0;
-exports.diffChars = diffChars;
-/*istanbul ignore end*/
-var
-/*istanbul ignore start*/
-_base = _interopRequireDefault(require("./base"))
-/*istanbul ignore end*/
-;
-/*istanbul ignore start*/ function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
-/*istanbul ignore end*/
-var characterDiff =
-/*istanbul ignore start*/
-exports.characterDiff =
-/*istanbul ignore end*/
-new
-/*istanbul ignore start*/
-_base
-/*istanbul ignore end*/
-[
-/*istanbul ignore start*/
-"default"
-/*istanbul ignore end*/
-]();
-function diffChars(oldStr, newStr, options) {
-  return characterDiff.diff(oldStr, newStr, options);
-}
-//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJuYW1lcyI6WyJfYmFzZSIsIl9pbnRlcm9wUmVxdWlyZURlZmF1bHQiLCJyZXF1aXJlIiwib2JqIiwiX19lc01vZHVsZSIsImNoYXJhY3RlckRpZmYiLCJleHBvcnRzIiwiRGlmZiIsImRpZmZDaGFycyIsIm9sZFN0ciIsIm5ld1N0ciIsIm9wdGlvbnMiLCJkaWZmIl0sInNvdXJjZXMiOlsiLi4vLi4vc3JjL2RpZmYvY2hhcmFjdGVyLmpzIl0sInNvdXJjZXNDb250ZW50IjpbImltcG9ydCBEaWZmIGZyb20gJy4vYmFzZSc7XG5cbmV4cG9ydCBjb25zdCBjaGFyYWN0ZXJEaWZmID0gbmV3IERpZmYoKTtcbmV4cG9ydCBmdW5jdGlvbiBkaWZmQ2hhcnMob2xkU3RyLCBuZXdTdHIsIG9wdGlvbnMpIHsgcmV0dXJuIGNoYXJhY3RlckRpZmYuZGlmZihvbGRTdHIsIG5ld1N0ciwgb3B0aW9ucyk7IH1cbiJdLCJtYXBwaW5ncyI6Ijs7Ozs7Ozs7O0FBQUE7QUFBQTtBQUFBQSxLQUFBLEdBQUFDLHNCQUFBLENBQUFDLE9BQUE7QUFBQTtBQUFBO0FBQTBCLG1DQUFBRCx1QkFBQUUsR0FBQSxXQUFBQSxHQUFBLElBQUFBLEdBQUEsQ0FBQUMsVUFBQSxHQUFBRCxHQUFBLGdCQUFBQSxHQUFBO0FBQUE7QUFFbkIsSUFBTUUsYUFBYTtBQUFBO0FBQUFDLE9BQUEsQ0FBQUQsYUFBQTtBQUFBO0FBQUc7QUFBSUU7QUFBQUE7QUFBQUE7QUFBQUE7QUFBQUE7QUFBQUE7QUFBQUE7QUFBQUEsQ0FBSSxDQUFDLENBQUM7QUFDaEMsU0FBU0MsU0FBU0EsQ0FBQ0MsTUFBTSxFQUFFQyxNQUFNLEVBQUVDLE9BQU8sRUFBRTtFQUFFLE9BQU9OLGFBQWEsQ0FBQ08sSUFBSSxDQUFDSCxNQUFNLEVBQUVDLE1BQU0sRUFBRUMsT0FBTyxDQUFDO0FBQUUiLCJpZ25vcmVMaXN0IjpbXX0=
diff --git a/node_modules/diff/lib/diff/css.js b/node_modules/diff/lib/diff/css.js
deleted file mode 100644
index 6321827818347..0000000000000
--- a/node_modules/diff/lib/diff/css.js
+++ /dev/null
@@ -1,36 +0,0 @@
-/*istanbul ignore start*/
-"use strict";
-
-Object.defineProperty(exports, "__esModule", {
-  value: true
-});
-exports.cssDiff = void 0;
-exports.diffCss = diffCss;
-/*istanbul ignore end*/
-var
-/*istanbul ignore start*/
-_base = _interopRequireDefault(require("./base"))
-/*istanbul ignore end*/
-;
-/*istanbul ignore start*/ function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
-/*istanbul ignore end*/
-var cssDiff =
-/*istanbul ignore start*/
-exports.cssDiff =
-/*istanbul ignore end*/
-new
-/*istanbul ignore start*/
-_base
-/*istanbul ignore end*/
-[
-/*istanbul ignore start*/
-"default"
-/*istanbul ignore end*/
-]();
-cssDiff.tokenize = function (value) {
-  return value.split(/([{}:;,]|\s+)/);
-};
-function diffCss(oldStr, newStr, callback) {
-  return cssDiff.diff(oldStr, newStr, callback);
-}
-//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJuYW1lcyI6WyJfYmFzZSIsIl9pbnRlcm9wUmVxdWlyZURlZmF1bHQiLCJyZXF1aXJlIiwib2JqIiwiX19lc01vZHVsZSIsImNzc0RpZmYiLCJleHBvcnRzIiwiRGlmZiIsInRva2VuaXplIiwidmFsdWUiLCJzcGxpdCIsImRpZmZDc3MiLCJvbGRTdHIiLCJuZXdTdHIiLCJjYWxsYmFjayIsImRpZmYiXSwic291cmNlcyI6WyIuLi8uLi9zcmMvZGlmZi9jc3MuanMiXSwic291cmNlc0NvbnRlbnQiOlsiaW1wb3J0IERpZmYgZnJvbSAnLi9iYXNlJztcblxuZXhwb3J0IGNvbnN0IGNzc0RpZmYgPSBuZXcgRGlmZigpO1xuY3NzRGlmZi50b2tlbml6ZSA9IGZ1bmN0aW9uKHZhbHVlKSB7XG4gIHJldHVybiB2YWx1ZS5zcGxpdCgvKFt7fTo7LF18XFxzKykvKTtcbn07XG5cbmV4cG9ydCBmdW5jdGlvbiBkaWZmQ3NzKG9sZFN0ciwgbmV3U3RyLCBjYWxsYmFjaykgeyByZXR1cm4gY3NzRGlmZi5kaWZmKG9sZFN0ciwgbmV3U3RyLCBjYWxsYmFjayk7IH1cbiJdLCJtYXBwaW5ncyI6Ijs7Ozs7Ozs7O0FBQUE7QUFBQTtBQUFBQSxLQUFBLEdBQUFDLHNCQUFBLENBQUFDLE9BQUE7QUFBQTtBQUFBO0FBQTBCLG1DQUFBRCx1QkFBQUUsR0FBQSxXQUFBQSxHQUFBLElBQUFBLEdBQUEsQ0FBQUMsVUFBQSxHQUFBRCxHQUFBLGdCQUFBQSxHQUFBO0FBQUE7QUFFbkIsSUFBTUUsT0FBTztBQUFBO0FBQUFDLE9BQUEsQ0FBQUQsT0FBQTtBQUFBO0FBQUc7QUFBSUU7QUFBQUE7QUFBQUE7QUFBQUE7QUFBQUE7QUFBQUE7QUFBQUE7QUFBQUEsQ0FBSSxDQUFDLENBQUM7QUFDakNGLE9BQU8sQ0FBQ0csUUFBUSxHQUFHLFVBQVNDLEtBQUssRUFBRTtFQUNqQyxPQUFPQSxLQUFLLENBQUNDLEtBQUssQ0FBQyxlQUFlLENBQUM7QUFDckMsQ0FBQztBQUVNLFNBQVNDLE9BQU9BLENBQUNDLE1BQU0sRUFBRUMsTUFBTSxFQUFFQyxRQUFRLEVBQUU7RUFBRSxPQUFPVCxPQUFPLENBQUNVLElBQUksQ0FBQ0gsTUFBTSxFQUFFQyxNQUFNLEVBQUVDLFFBQVEsQ0FBQztBQUFFIiwiaWdub3JlTGlzdCI6W119
diff --git a/node_modules/diff/lib/diff/json.js b/node_modules/diff/lib/diff/json.js
deleted file mode 100644
index a3f07480ee7dd..0000000000000
--- a/node_modules/diff/lib/diff/json.js
+++ /dev/null
@@ -1,143 +0,0 @@
-/*istanbul ignore start*/
-"use strict";
-
-Object.defineProperty(exports, "__esModule", {
-  value: true
-});
-exports.canonicalize = canonicalize;
-exports.diffJson = diffJson;
-exports.jsonDiff = void 0;
-/*istanbul ignore end*/
-var
-/*istanbul ignore start*/
-_base = _interopRequireDefault(require("./base"))
-/*istanbul ignore end*/
-;
-var
-/*istanbul ignore start*/
-_line = require("./line")
-/*istanbul ignore end*/
-;
-/*istanbul ignore start*/ function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
-function _typeof(o) { "@babel/helpers - typeof"; return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (o) { return typeof o; } : function (o) { return o && "function" == typeof Symbol && o.constructor === Symbol && o !== Symbol.prototype ? "symbol" : typeof o; }, _typeof(o); }
-/*istanbul ignore end*/
-var jsonDiff =
-/*istanbul ignore start*/
-exports.jsonDiff =
-/*istanbul ignore end*/
-new
-/*istanbul ignore start*/
-_base
-/*istanbul ignore end*/
-[
-/*istanbul ignore start*/
-"default"
-/*istanbul ignore end*/
-]();
-// Discriminate between two lines of pretty-printed, serialized JSON where one of them has a
-// dangling comma and the other doesn't. Turns out including the dangling comma yields the nicest output:
-jsonDiff.useLongestToken = true;
-jsonDiff.tokenize =
-/*istanbul ignore start*/
-_line
-/*istanbul ignore end*/
-.
-/*istanbul ignore start*/
-lineDiff
-/*istanbul ignore end*/
-.tokenize;
-jsonDiff.castInput = function (value, options) {
-  var
-    /*istanbul ignore start*/
-    /*istanbul ignore end*/
-    undefinedReplacement = options.undefinedReplacement,
-    /*istanbul ignore start*/
-    _options$stringifyRep =
-    /*istanbul ignore end*/
-    options.stringifyReplacer,
-    /*istanbul ignore start*/
-    /*istanbul ignore end*/
-    stringifyReplacer = _options$stringifyRep === void 0 ? function (k, v)
-    /*istanbul ignore start*/
-    {
-      return (
-        /*istanbul ignore end*/
-        typeof v === 'undefined' ? undefinedReplacement : v
-      );
-    } : _options$stringifyRep;
-  return typeof value === 'string' ? value : JSON.stringify(canonicalize(value, null, null, stringifyReplacer), stringifyReplacer, '  ');
-};
-jsonDiff.equals = function (left, right, options) {
-  return (
-    /*istanbul ignore start*/
-    _base
-    /*istanbul ignore end*/
-    [
-    /*istanbul ignore start*/
-    "default"
-    /*istanbul ignore end*/
-    ].prototype.equals.call(jsonDiff, left.replace(/,([\r\n])/g, '$1'), right.replace(/,([\r\n])/g, '$1'), options)
-  );
-};
-function diffJson(oldObj, newObj, options) {
-  return jsonDiff.diff(oldObj, newObj, options);
-}
-
-// This function handles the presence of circular references by bailing out when encountering an
-// object that is already on the "stack" of items being processed. Accepts an optional replacer
-function canonicalize(obj, stack, replacementStack, replacer, key) {
-  stack = stack || [];
-  replacementStack = replacementStack || [];
-  if (replacer) {
-    obj = replacer(key, obj);
-  }
-  var i;
-  for (i = 0; i < stack.length; i += 1) {
-    if (stack[i] === obj) {
-      return replacementStack[i];
-    }
-  }
-  var canonicalizedObj;
-  if ('[object Array]' === Object.prototype.toString.call(obj)) {
-    stack.push(obj);
-    canonicalizedObj = new Array(obj.length);
-    replacementStack.push(canonicalizedObj);
-    for (i = 0; i < obj.length; i += 1) {
-      canonicalizedObj[i] = canonicalize(obj[i], stack, replacementStack, replacer, key);
-    }
-    stack.pop();
-    replacementStack.pop();
-    return canonicalizedObj;
-  }
-  if (obj && obj.toJSON) {
-    obj = obj.toJSON();
-  }
-  if (
-  /*istanbul ignore start*/
-  _typeof(
-  /*istanbul ignore end*/
-  obj) === 'object' && obj !== null) {
-    stack.push(obj);
-    canonicalizedObj = {};
-    replacementStack.push(canonicalizedObj);
-    var sortedKeys = [],
-      _key;
-    for (_key in obj) {
-      /* istanbul ignore else */
-      if (Object.prototype.hasOwnProperty.call(obj, _key)) {
-        sortedKeys.push(_key);
-      }
-    }
-    sortedKeys.sort();
-    for (i = 0; i < sortedKeys.length; i += 1) {
-      _key = sortedKeys[i];
-      canonicalizedObj[_key] = canonicalize(obj[_key], stack, replacementStack, replacer, _key);
-    }
-    stack.pop();
-    replacementStack.pop();
-  } else {
-    canonicalizedObj = obj;
-  }
-  return canonicalizedObj;
-}
-//# sourceMappingURL=data:application/json;charset=utf-8;base64,{"version":3,"names":["_base","_interopRequireDefault","require","_line","obj","__esModule","_typeof","o","Symbol","iterator","constructor","prototype","jsonDiff","exports","Diff","useLongestToken","tokenize","lineDiff","castInput","value","options","undefinedReplacement","_options$stringifyRep","stringifyReplacer","k","v","JSON","stringify","canonicalize","equals","left","right","call","replace","diffJson","oldObj","newObj","diff","stack","replacementStack","replacer","key","i","length","canonicalizedObj","Object","toString","push","Array","pop","toJSON","sortedKeys","hasOwnProperty","sort"],"sources":["../../src/diff/json.js"],"sourcesContent":["import Diff from './base';\nimport {lineDiff} from './line';\n\nexport const jsonDiff = new Diff();\n// Discriminate between two lines of pretty-printed, serialized JSON where one of them has a\n// dangling comma and the other doesn't. Turns out including the dangling comma yields the nicest output:\njsonDiff.useLongestToken = true;\n\njsonDiff.tokenize = lineDiff.tokenize;\njsonDiff.castInput = function(value, options) {\n  const {undefinedReplacement, stringifyReplacer = (k, v) => typeof v === 'undefined' ? undefinedReplacement : v} = options;\n\n  return typeof value === 'string' ? value : JSON.stringify(canonicalize(value, null, null, stringifyReplacer), stringifyReplacer, '  ');\n};\njsonDiff.equals = function(left, right, options) {\n  return Diff.prototype.equals.call(jsonDiff, left.replace(/,([\\r\\n])/g, '$1'), right.replace(/,([\\r\\n])/g, '$1'), options);\n};\n\nexport function diffJson(oldObj, newObj, options) { return jsonDiff.diff(oldObj, newObj, options); }\n\n// This function handles the presence of circular references by bailing out when encountering an\n// object that is already on the \"stack\" of items being processed. Accepts an optional replacer\nexport function canonicalize(obj, stack, replacementStack, replacer, key) {\n  stack = stack || [];\n  replacementStack = replacementStack || [];\n\n  if (replacer) {\n    obj = replacer(key, obj);\n  }\n\n  let i;\n\n  for (i = 0; i < stack.length; i += 1) {\n    if (stack[i] === obj) {\n      return replacementStack[i];\n    }\n  }\n\n  let canonicalizedObj;\n\n  if ('[object Array]' === Object.prototype.toString.call(obj)) {\n    stack.push(obj);\n    canonicalizedObj = new Array(obj.length);\n    replacementStack.push(canonicalizedObj);\n    for (i = 0; i < obj.length; i += 1) {\n      canonicalizedObj[i] = canonicalize(obj[i], stack, replacementStack, replacer, key);\n    }\n    stack.pop();\n    replacementStack.pop();\n    return canonicalizedObj;\n  }\n\n  if (obj && obj.toJSON) {\n    obj = obj.toJSON();\n  }\n\n  if (typeof obj === 'object' && obj !== null) {\n    stack.push(obj);\n    canonicalizedObj = {};\n    replacementStack.push(canonicalizedObj);\n    let sortedKeys = [],\n        key;\n    for (key in obj) {\n      /* istanbul ignore else */\n      if (Object.prototype.hasOwnProperty.call(obj, key)) {\n        sortedKeys.push(key);\n      }\n    }\n    sortedKeys.sort();\n    for (i = 0; i < sortedKeys.length; i += 1) {\n      key = sortedKeys[i];\n      canonicalizedObj[key] = canonicalize(obj[key], stack, replacementStack, replacer, key);\n    }\n    stack.pop();\n    replacementStack.pop();\n  } else {\n    canonicalizedObj = obj;\n  }\n  return canonicalizedObj;\n}\n"],"mappings":";;;;;;;;;;AAAA;AAAA;AAAAA,KAAA,GAAAC,sBAAA,CAAAC,OAAA;AAAA;AAAA;AACA;AAAA;AAAAC,KAAA,GAAAD,OAAA;AAAA;AAAA;AAAgC,mCAAAD,uBAAAG,GAAA,WAAAA,GAAA,IAAAA,GAAA,CAAAC,UAAA,GAAAD,GAAA,gBAAAA,GAAA;AAAA,SAAAE,QAAAC,CAAA,sCAAAD,OAAA,wBAAAE,MAAA,uBAAAA,MAAA,CAAAC,QAAA,aAAAF,CAAA,kBAAAA,CAAA,gBAAAA,CAAA,WAAAA,CAAA,yBAAAC,MAAA,IAAAD,CAAA,CAAAG,WAAA,KAAAF,MAAA,IAAAD,CAAA,KAAAC,MAAA,CAAAG,SAAA,qBAAAJ,CAAA,KAAAD,OAAA,CAAAC,CAAA;AAAA;AAEzB,IAAMK,QAAQ;AAAA;AAAAC,OAAA,CAAAD,QAAA;AAAA;AAAG;AAAIE;AAAAA;AAAAA;AAAAA;AAAAA;AAAAA;AAAAA;AAAAA,CAAI,CAAC,CAAC;AAClC;AACA;AACAF,QAAQ,CAACG,eAAe,GAAG,IAAI;AAE/BH,QAAQ,CAACI,QAAQ;AAAGC;AAAAA;AAAAA;AAAAA;AAAAA;AAAAA;AAAQ;AAAA,CAACD,QAAQ;AACrCJ,QAAQ,CAACM,SAAS,GAAG,UAASC,KAAK,EAAEC,OAAO,EAAE;EAC5C;IAAA;IAAA;IAAOC,oBAAoB,GAAuFD,OAAO,CAAlHC,oBAAoB;IAAA;IAAAC,qBAAA;IAAA;IAAuFF,OAAO,CAA5FG,iBAAiB;IAAA;IAAA;IAAjBA,iBAAiB,GAAAD,qBAAA,cAAG,UAACE,CAAC,EAAEC,CAAC;IAAA;IAAA;MAAA;QAAA;QAAK,OAAOA,CAAC,KAAK,WAAW,GAAGJ,oBAAoB,GAAGI;MAAC;IAAA,IAAAH,qBAAA;EAE9G,OAAO,OAAOH,KAAK,KAAK,QAAQ,GAAGA,KAAK,GAAGO,IAAI,CAACC,SAAS,CAACC,YAAY,CAACT,KAAK,EAAE,IAAI,EAAE,IAAI,EAAEI,iBAAiB,CAAC,EAAEA,iBAAiB,EAAE,IAAI,CAAC;AACxI,CAAC;AACDX,QAAQ,CAACiB,MAAM,GAAG,UAASC,IAAI,EAAEC,KAAK,EAAEX,OAAO,EAAE;EAC/C,OAAON;IAAAA;IAAAA;IAAAA;IAAAA;IAAAA;IAAAA;IAAAA;IAAAA,CAAI,CAACH,SAAS,CAACkB,MAAM,CAACG,IAAI,CAACpB,QAAQ,EAAEkB,IAAI,CAACG,OAAO,CAAC,YAAY,EAAE,IAAI,CAAC,EAAEF,KAAK,CAACE,OAAO,CAAC,YAAY,EAAE,IAAI,CAAC,EAAEb,OAAO;EAAC;AAC3H,CAAC;AAEM,SAASc,QAAQA,CAACC,MAAM,EAAEC,MAAM,EAAEhB,OAAO,EAAE;EAAE,OAAOR,QAAQ,CAACyB,IAAI,CAACF,MAAM,EAAEC,MAAM,EAAEhB,OAAO,CAAC;AAAE;;AAEnG;AACA;AACO,SAASQ,YAAYA,CAACxB,GAAG,EAAEkC,KAAK,EAAEC,gBAAgB,EAAEC,QAAQ,EAAEC,GAAG,EAAE;EACxEH,KAAK,GAAGA,KAAK,IAAI,EAAE;EACnBC,gBAAgB,GAAGA,gBAAgB,IAAI,EAAE;EAEzC,IAAIC,QAAQ,EAAE;IACZpC,GAAG,GAAGoC,QAAQ,CAACC,GAAG,EAAErC,GAAG,CAAC;EAC1B;EAEA,IAAIsC,CAAC;EAEL,KAAKA,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGJ,KAAK,CAACK,MAAM,EAAED,CAAC,IAAI,CAAC,EAAE;IACpC,IAAIJ,KAAK,CAACI,CAAC,CAAC,KAAKtC,GAAG,EAAE;MACpB,OAAOmC,gBAAgB,CAACG,CAAC,CAAC;IAC5B;EACF;EAEA,IAAIE,gBAAgB;EAEpB,IAAI,gBAAgB,KAAKC,MAAM,CAAClC,SAAS,CAACmC,QAAQ,CAACd,IAAI,CAAC5B,GAAG,CAAC,EAAE;IAC5DkC,KAAK,CAACS,IAAI,CAAC3C,GAAG,CAAC;IACfwC,gBAAgB,GAAG,IAAII,KAAK,CAAC5C,GAAG,CAACuC,MAAM,CAAC;IACxCJ,gBAAgB,CAACQ,IAAI,CAACH,gBAAgB,CAAC;IACvC,KAAKF,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGtC,GAAG,CAACuC,MAAM,EAAED,CAAC,IAAI,CAAC,EAAE;MAClCE,gBAAgB,CAACF,CAAC,CAAC,GAAGd,YAAY,CAACxB,GAAG,CAACsC,CAAC,CAAC,EAAEJ,KAAK,EAAEC,gBAAgB,EAAEC,QAAQ,EAAEC,GAAG,CAAC;IACpF;IACAH,KAAK,CAACW,GAAG,CAAC,CAAC;IACXV,gBAAgB,CAACU,GAAG,CAAC,CAAC;IACtB,OAAOL,gBAAgB;EACzB;EAEA,IAAIxC,GAAG,IAAIA,GAAG,CAAC8C,MAAM,EAAE;IACrB9C,GAAG,GAAGA,GAAG,CAAC8C,MAAM,CAAC,CAAC;EACpB;EAEA;EAAI;EAAA5C,OAAA;EAAA;EAAOF,GAAG,MAAK,QAAQ,IAAIA,GAAG,KAAK,IAAI,EAAE;IAC3CkC,KAAK,CAACS,IAAI,CAAC3C,GAAG,CAAC;IACfwC,gBAAgB,GAAG,CAAC,CAAC;IACrBL,gBAAgB,CAACQ,IAAI,CAACH,gBAAgB,CAAC;IACvC,IAAIO,UAAU,GAAG,EAAE;MACfV,IAAG;IACP,KAAKA,IAAG,IAAIrC,GAAG,EAAE;MACf;MACA,IAAIyC,MAAM,CAAClC,SAAS,CAACyC,cAAc,CAACpB,IAAI,CAAC5B,GAAG,EAAEqC,IAAG,CAAC,EAAE;QAClDU,UAAU,CAACJ,IAAI,CAACN,IAAG,CAAC;MACtB;IACF;IACAU,UAAU,CAACE,IAAI,CAAC,CAAC;IACjB,KAAKX,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGS,UAAU,CAACR,MAAM,EAAED,CAAC,IAAI,CAAC,EAAE;MACzCD,IAAG,GAAGU,UAAU,CAACT,CAAC,CAAC;MACnBE,gBAAgB,CAACH,IAAG,CAAC,GAAGb,YAAY,CAACxB,GAAG,CAACqC,IAAG,CAAC,EAAEH,KAAK,EAAEC,gBAAgB,EAAEC,QAAQ,EAAEC,IAAG,CAAC;IACxF;IACAH,KAAK,CAACW,GAAG,CAAC,CAAC;IACXV,gBAAgB,CAACU,GAAG,CAAC,CAAC;EACxB,CAAC,MAAM;IACLL,gBAAgB,GAAGxC,GAAG;EACxB;EACA,OAAOwC,gBAAgB;AACzB","ignoreList":[]}
diff --git a/node_modules/diff/lib/diff/line.js b/node_modules/diff/lib/diff/line.js
deleted file mode 100644
index 71f3f2471d109..0000000000000
--- a/node_modules/diff/lib/diff/line.js
+++ /dev/null
@@ -1,121 +0,0 @@
-/*istanbul ignore start*/
-"use strict";
-
-Object.defineProperty(exports, "__esModule", {
-  value: true
-});
-exports.diffLines = diffLines;
-exports.diffTrimmedLines = diffTrimmedLines;
-exports.lineDiff = void 0;
-/*istanbul ignore end*/
-var
-/*istanbul ignore start*/
-_base = _interopRequireDefault(require("./base"))
-/*istanbul ignore end*/
-;
-var
-/*istanbul ignore start*/
-_params = require("../util/params")
-/*istanbul ignore end*/
-;
-/*istanbul ignore start*/ function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
-/*istanbul ignore end*/
-var lineDiff =
-/*istanbul ignore start*/
-exports.lineDiff =
-/*istanbul ignore end*/
-new
-/*istanbul ignore start*/
-_base
-/*istanbul ignore end*/
-[
-/*istanbul ignore start*/
-"default"
-/*istanbul ignore end*/
-]();
-lineDiff.tokenize = function (value, options) {
-  if (options.stripTrailingCr) {
-    // remove one \r before \n to match GNU diff's --strip-trailing-cr behavior
-    value = value.replace(/\r\n/g, '\n');
-  }
-  var retLines = [],
-    linesAndNewlines = value.split(/(\n|\r\n)/);
-
-  // Ignore the final empty token that occurs if the string ends with a new line
-  if (!linesAndNewlines[linesAndNewlines.length - 1]) {
-    linesAndNewlines.pop();
-  }
-
-  // Merge the content and line separators into single tokens
-  for (var i = 0; i < linesAndNewlines.length; i++) {
-    var line = linesAndNewlines[i];
-    if (i % 2 && !options.newlineIsToken) {
-      retLines[retLines.length - 1] += line;
-    } else {
-      retLines.push(line);
-    }
-  }
-  return retLines;
-};
-lineDiff.equals = function (left, right, options) {
-  // If we're ignoring whitespace, we need to normalise lines by stripping
-  // whitespace before checking equality. (This has an annoying interaction
-  // with newlineIsToken that requires special handling: if newlines get their
-  // own token, then we DON'T want to trim the *newline* tokens down to empty
-  // strings, since this would cause us to treat whitespace-only line content
-  // as equal to a separator between lines, which would be weird and
-  // inconsistent with the documented behavior of the options.)
-  if (options.ignoreWhitespace) {
-    if (!options.newlineIsToken || !left.includes('\n')) {
-      left = left.trim();
-    }
-    if (!options.newlineIsToken || !right.includes('\n')) {
-      right = right.trim();
-    }
-  } else if (options.ignoreNewlineAtEof && !options.newlineIsToken) {
-    if (left.endsWith('\n')) {
-      left = left.slice(0, -1);
-    }
-    if (right.endsWith('\n')) {
-      right = right.slice(0, -1);
-    }
-  }
-  return (
-    /*istanbul ignore start*/
-    _base
-    /*istanbul ignore end*/
-    [
-    /*istanbul ignore start*/
-    "default"
-    /*istanbul ignore end*/
-    ].prototype.equals.call(this, left, right, options)
-  );
-};
-function diffLines(oldStr, newStr, callback) {
-  return lineDiff.diff(oldStr, newStr, callback);
-}
-
-// Kept for backwards compatibility. This is a rather arbitrary wrapper method
-// that just calls `diffLines` with `ignoreWhitespace: true`. It's confusing to
-// have two ways to do exactly the same thing in the API, so we no longer
-// document this one (library users should explicitly use `diffLines` with
-// `ignoreWhitespace: true` instead) but we keep it around to maintain
-// compatibility with code that used old versions.
-function diffTrimmedLines(oldStr, newStr, callback) {
-  var options =
-  /*istanbul ignore start*/
-  (0,
-  /*istanbul ignore end*/
-  /*istanbul ignore start*/
-  _params
-  /*istanbul ignore end*/
-  .
-  /*istanbul ignore start*/
-  generateOptions)
-  /*istanbul ignore end*/
-  (callback, {
-    ignoreWhitespace: true
-  });
-  return lineDiff.diff(oldStr, newStr, options);
-}
-//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJuYW1lcyI6WyJfYmFzZSIsIl9pbnRlcm9wUmVxdWlyZURlZmF1bHQiLCJyZXF1aXJlIiwiX3BhcmFtcyIsIm9iaiIsIl9fZXNNb2R1bGUiLCJsaW5lRGlmZiIsImV4cG9ydHMiLCJEaWZmIiwidG9rZW5pemUiLCJ2YWx1ZSIsIm9wdGlvbnMiLCJzdHJpcFRyYWlsaW5nQ3IiLCJyZXBsYWNlIiwicmV0TGluZXMiLCJsaW5lc0FuZE5ld2xpbmVzIiwic3BsaXQiLCJsZW5ndGgiLCJwb3AiLCJpIiwibGluZSIsIm5ld2xpbmVJc1Rva2VuIiwicHVzaCIsImVxdWFscyIsImxlZnQiLCJyaWdodCIsImlnbm9yZVdoaXRlc3BhY2UiLCJpbmNsdWRlcyIsInRyaW0iLCJpZ25vcmVOZXdsaW5lQXRFb2YiLCJlbmRzV2l0aCIsInNsaWNlIiwicHJvdG90eXBlIiwiY2FsbCIsImRpZmZMaW5lcyIsIm9sZFN0ciIsIm5ld1N0ciIsImNhbGxiYWNrIiwiZGlmZiIsImRpZmZUcmltbWVkTGluZXMiLCJnZW5lcmF0ZU9wdGlvbnMiXSwic291cmNlcyI6WyIuLi8uLi9zcmMvZGlmZi9saW5lLmpzIl0sInNvdXJjZXNDb250ZW50IjpbImltcG9ydCBEaWZmIGZyb20gJy4vYmFzZSc7XG5pbXBvcnQge2dlbmVyYXRlT3B0aW9uc30gZnJvbSAnLi4vdXRpbC9wYXJhbXMnO1xuXG5leHBvcnQgY29uc3QgbGluZURpZmYgPSBuZXcgRGlmZigpO1xubGluZURpZmYudG9rZW5pemUgPSBmdW5jdGlvbih2YWx1ZSwgb3B0aW9ucykge1xuICBpZihvcHRpb25zLnN0cmlwVHJhaWxpbmdDcikge1xuICAgIC8vIHJlbW92ZSBvbmUgXFxyIGJlZm9yZSBcXG4gdG8gbWF0Y2ggR05VIGRpZmYncyAtLXN0cmlwLXRyYWlsaW5nLWNyIGJlaGF2aW9yXG4gICAgdmFsdWUgPSB2YWx1ZS5yZXBsYWNlKC9cXHJcXG4vZywgJ1xcbicpO1xuICB9XG5cbiAgbGV0IHJldExpbmVzID0gW10sXG4gICAgICBsaW5lc0FuZE5ld2xpbmVzID0gdmFsdWUuc3BsaXQoLyhcXG58XFxyXFxuKS8pO1xuXG4gIC8vIElnbm9yZSB0aGUgZmluYWwgZW1wdHkgdG9rZW4gdGhhdCBvY2N1cnMgaWYgdGhlIHN0cmluZyBlbmRzIHdpdGggYSBuZXcgbGluZVxuICBpZiAoIWxpbmVzQW5kTmV3bGluZXNbbGluZXNBbmROZXdsaW5lcy5sZW5ndGggLSAxXSkge1xuICAgIGxpbmVzQW5kTmV3bGluZXMucG9wKCk7XG4gIH1cblxuICAvLyBNZXJnZSB0aGUgY29udGVudCBhbmQgbGluZSBzZXBhcmF0b3JzIGludG8gc2luZ2xlIHRva2Vuc1xuICBmb3IgKGxldCBpID0gMDsgaSA8IGxpbmVzQW5kTmV3bGluZXMubGVuZ3RoOyBpKyspIHtcbiAgICBsZXQgbGluZSA9IGxpbmVzQW5kTmV3bGluZXNbaV07XG5cbiAgICBpZiAoaSAlIDIgJiYgIW9wdGlvbnMubmV3bGluZUlzVG9rZW4pIHtcbiAgICAgIHJldExpbmVzW3JldExpbmVzLmxlbmd0aCAtIDFdICs9IGxpbmU7XG4gICAgfSBlbHNlIHtcbiAgICAgIHJldExpbmVzLnB1c2gobGluZSk7XG4gICAgfVxuICB9XG5cbiAgcmV0dXJuIHJldExpbmVzO1xufTtcblxubGluZURpZmYuZXF1YWxzID0gZnVuY3Rpb24obGVmdCwgcmlnaHQsIG9wdGlvbnMpIHtcbiAgLy8gSWYgd2UncmUgaWdub3Jpbmcgd2hpdGVzcGFjZSwgd2UgbmVlZCB0byBub3JtYWxpc2UgbGluZXMgYnkgc3RyaXBwaW5nXG4gIC8vIHdoaXRlc3BhY2UgYmVmb3JlIGNoZWNraW5nIGVxdWFsaXR5LiAoVGhpcyBoYXMgYW4gYW5ub3lpbmcgaW50ZXJhY3Rpb25cbiAgLy8gd2l0aCBuZXdsaW5lSXNUb2tlbiB0aGF0IHJlcXVpcmVzIHNwZWNpYWwgaGFuZGxpbmc6IGlmIG5ld2xpbmVzIGdldCB0aGVpclxuICAvLyBvd24gdG9rZW4sIHRoZW4gd2UgRE9OJ1Qgd2FudCB0byB0cmltIHRoZSAqbmV3bGluZSogdG9rZW5zIGRvd24gdG8gZW1wdHlcbiAgLy8gc3RyaW5ncywgc2luY2UgdGhpcyB3b3VsZCBjYXVzZSB1cyB0byB0cmVhdCB3aGl0ZXNwYWNlLW9ubHkgbGluZSBjb250ZW50XG4gIC8vIGFzIGVxdWFsIHRvIGEgc2VwYXJhdG9yIGJldHdlZW4gbGluZXMsIHdoaWNoIHdvdWxkIGJlIHdlaXJkIGFuZFxuICAvLyBpbmNvbnNpc3RlbnQgd2l0aCB0aGUgZG9jdW1lbnRlZCBiZWhhdmlvciBvZiB0aGUgb3B0aW9ucy4pXG4gIGlmIChvcHRpb25zLmlnbm9yZVdoaXRlc3BhY2UpIHtcbiAgICBpZiAoIW9wdGlvbnMubmV3bGluZUlzVG9rZW4gfHwgIWxlZnQuaW5jbHVkZXMoJ1xcbicpKSB7XG4gICAgICBsZWZ0ID0gbGVmdC50cmltKCk7XG4gICAgfVxuICAgIGlmICghb3B0aW9ucy5uZXdsaW5lSXNUb2tlbiB8fCAhcmlnaHQuaW5jbHVkZXMoJ1xcbicpKSB7XG4gICAgICByaWdodCA9IHJpZ2h0LnRyaW0oKTtcbiAgICB9XG4gIH0gZWxzZSBpZiAob3B0aW9ucy5pZ25vcmVOZXdsaW5lQXRFb2YgJiYgIW9wdGlvbnMubmV3bGluZUlzVG9rZW4pIHtcbiAgICBpZiAobGVmdC5lbmRzV2l0aCgnXFxuJykpIHtcbiAgICAgIGxlZnQgPSBsZWZ0LnNsaWNlKDAsIC0xKTtcbiAgICB9XG4gICAgaWYgKHJpZ2h0LmVuZHNXaXRoKCdcXG4nKSkge1xuICAgICAgcmlnaHQgPSByaWdodC5zbGljZSgwLCAtMSk7XG4gICAgfVxuICB9XG4gIHJldHVybiBEaWZmLnByb3RvdHlwZS5lcXVhbHMuY2FsbCh0aGlzLCBsZWZ0LCByaWdodCwgb3B0aW9ucyk7XG59O1xuXG5leHBvcnQgZnVuY3Rpb24gZGlmZkxpbmVzKG9sZFN0ciwgbmV3U3RyLCBjYWxsYmFjaykgeyByZXR1cm4gbGluZURpZmYuZGlmZihvbGRTdHIsIG5ld1N0ciwgY2FsbGJhY2spOyB9XG5cbi8vIEtlcHQgZm9yIGJhY2t3YXJkcyBjb21wYXRpYmlsaXR5LiBUaGlzIGlzIGEgcmF0aGVyIGFyYml0cmFyeSB3cmFwcGVyIG1ldGhvZFxuLy8gdGhhdCBqdXN0IGNhbGxzIGBkaWZmTGluZXNgIHdpdGggYGlnbm9yZVdoaXRlc3BhY2U6IHRydWVgLiBJdCdzIGNvbmZ1c2luZyB0b1xuLy8gaGF2ZSB0d28gd2F5cyB0byBkbyBleGFjdGx5IHRoZSBzYW1lIHRoaW5nIGluIHRoZSBBUEksIHNvIHdlIG5vIGxvbmdlclxuLy8gZG9jdW1lbnQgdGhpcyBvbmUgKGxpYnJhcnkgdXNlcnMgc2hvdWxkIGV4cGxpY2l0bHkgdXNlIGBkaWZmTGluZXNgIHdpdGhcbi8vIGBpZ25vcmVXaGl0ZXNwYWNlOiB0cnVlYCBpbnN0ZWFkKSBidXQgd2Uga2VlcCBpdCBhcm91bmQgdG8gbWFpbnRhaW5cbi8vIGNvbXBhdGliaWxpdHkgd2l0aCBjb2RlIHRoYXQgdXNlZCBvbGQgdmVyc2lvbnMuXG5leHBvcnQgZnVuY3Rpb24gZGlmZlRyaW1tZWRMaW5lcyhvbGRTdHIsIG5ld1N0ciwgY2FsbGJhY2spIHtcbiAgbGV0IG9wdGlvbnMgPSBnZW5lcmF0ZU9wdGlvbnMoY2FsbGJhY2ssIHtpZ25vcmVXaGl0ZXNwYWNlOiB0cnVlfSk7XG4gIHJldHVybiBsaW5lRGlmZi5kaWZmKG9sZFN0ciwgbmV3U3RyLCBvcHRpb25zKTtcbn1cbiJdLCJtYXBwaW5ncyI6Ijs7Ozs7Ozs7OztBQUFBO0FBQUE7QUFBQUEsS0FBQSxHQUFBQyxzQkFBQSxDQUFBQyxPQUFBO0FBQUE7QUFBQTtBQUNBO0FBQUE7QUFBQUMsT0FBQSxHQUFBRCxPQUFBO0FBQUE7QUFBQTtBQUErQyxtQ0FBQUQsdUJBQUFHLEdBQUEsV0FBQUEsR0FBQSxJQUFBQSxHQUFBLENBQUFDLFVBQUEsR0FBQUQsR0FBQSxnQkFBQUEsR0FBQTtBQUFBO0FBRXhDLElBQU1FLFFBQVE7QUFBQTtBQUFBQyxPQUFBLENBQUFELFFBQUE7QUFBQTtBQUFHO0FBQUlFO0FBQUFBO0FBQUFBO0FBQUFBO0FBQUFBO0FBQUFBO0FBQUFBO0FBQUFBLENBQUksQ0FBQyxDQUFDO0FBQ2xDRixRQUFRLENBQUNHLFFBQVEsR0FBRyxVQUFTQyxLQUFLLEVBQUVDLE9BQU8sRUFBRTtFQUMzQyxJQUFHQSxPQUFPLENBQUNDLGVBQWUsRUFBRTtJQUMxQjtJQUNBRixLQUFLLEdBQUdBLEtBQUssQ0FBQ0csT0FBTyxDQUFDLE9BQU8sRUFBRSxJQUFJLENBQUM7RUFDdEM7RUFFQSxJQUFJQyxRQUFRLEdBQUcsRUFBRTtJQUNiQyxnQkFBZ0IsR0FBR0wsS0FBSyxDQUFDTSxLQUFLLENBQUMsV0FBVyxDQUFDOztFQUUvQztFQUNBLElBQUksQ0FBQ0QsZ0JBQWdCLENBQUNBLGdCQUFnQixDQUFDRSxNQUFNLEdBQUcsQ0FBQyxDQUFDLEVBQUU7SUFDbERGLGdCQUFnQixDQUFDRyxHQUFHLENBQUMsQ0FBQztFQUN4Qjs7RUFFQTtFQUNBLEtBQUssSUFBSUMsQ0FBQyxHQUFHLENBQUMsRUFBRUEsQ0FBQyxHQUFHSixnQkFBZ0IsQ0FBQ0UsTUFBTSxFQUFFRSxDQUFDLEVBQUUsRUFBRTtJQUNoRCxJQUFJQyxJQUFJLEdBQUdMLGdCQUFnQixDQUFDSSxDQUFDLENBQUM7SUFFOUIsSUFBSUEsQ0FBQyxHQUFHLENBQUMsSUFBSSxDQUFDUixPQUFPLENBQUNVLGNBQWMsRUFBRTtNQUNwQ1AsUUFBUSxDQUFDQSxRQUFRLENBQUNHLE1BQU0sR0FBRyxDQUFDLENBQUMsSUFBSUcsSUFBSTtJQUN2QyxDQUFDLE1BQU07TUFDTE4sUUFBUSxDQUFDUSxJQUFJLENBQUNGLElBQUksQ0FBQztJQUNyQjtFQUNGO0VBRUEsT0FBT04sUUFBUTtBQUNqQixDQUFDO0FBRURSLFFBQVEsQ0FBQ2lCLE1BQU0sR0FBRyxVQUFTQyxJQUFJLEVBQUVDLEtBQUssRUFBRWQsT0FBTyxFQUFFO0VBQy9DO0VBQ0E7RUFDQTtFQUNBO0VBQ0E7RUFDQTtFQUNBO0VBQ0EsSUFBSUEsT0FBTyxDQUFDZSxnQkFBZ0IsRUFBRTtJQUM1QixJQUFJLENBQUNmLE9BQU8sQ0FBQ1UsY0FBYyxJQUFJLENBQUNHLElBQUksQ0FBQ0csUUFBUSxDQUFDLElBQUksQ0FBQyxFQUFFO01BQ25ESCxJQUFJLEdBQUdBLElBQUksQ0FBQ0ksSUFBSSxDQUFDLENBQUM7SUFDcEI7SUFDQSxJQUFJLENBQUNqQixPQUFPLENBQUNVLGNBQWMsSUFBSSxDQUFDSSxLQUFLLENBQUNFLFFBQVEsQ0FBQyxJQUFJLENBQUMsRUFBRTtNQUNwREYsS0FBSyxHQUFHQSxLQUFLLENBQUNHLElBQUksQ0FBQyxDQUFDO0lBQ3RCO0VBQ0YsQ0FBQyxNQUFNLElBQUlqQixPQUFPLENBQUNrQixrQkFBa0IsSUFBSSxDQUFDbEIsT0FBTyxDQUFDVSxjQUFjLEVBQUU7SUFDaEUsSUFBSUcsSUFBSSxDQUFDTSxRQUFRLENBQUMsSUFBSSxDQUFDLEVBQUU7TUFDdkJOLElBQUksR0FBR0EsSUFBSSxDQUFDTyxLQUFLLENBQUMsQ0FBQyxFQUFFLENBQUMsQ0FBQyxDQUFDO0lBQzFCO0lBQ0EsSUFBSU4sS0FBSyxDQUFDSyxRQUFRLENBQUMsSUFBSSxDQUFDLEVBQUU7TUFDeEJMLEtBQUssR0FBR0EsS0FBSyxDQUFDTSxLQUFLLENBQUMsQ0FBQyxFQUFFLENBQUMsQ0FBQyxDQUFDO0lBQzVCO0VBQ0Y7RUFDQSxPQUFPdkI7SUFBQUE7SUFBQUE7SUFBQUE7SUFBQUE7SUFBQUE7SUFBQUE7SUFBQUE7SUFBQUEsQ0FBSSxDQUFDd0IsU0FBUyxDQUFDVCxNQUFNLENBQUNVLElBQUksQ0FBQyxJQUFJLEVBQUVULElBQUksRUFBRUMsS0FBSyxFQUFFZCxPQUFPO0VBQUM7QUFDL0QsQ0FBQztBQUVNLFNBQVN1QixTQUFTQSxDQUFDQyxNQUFNLEVBQUVDLE1BQU0sRUFBRUMsUUFBUSxFQUFFO0VBQUUsT0FBTy9CLFFBQVEsQ0FBQ2dDLElBQUksQ0FBQ0gsTUFBTSxFQUFFQyxNQUFNLEVBQUVDLFFBQVEsQ0FBQztBQUFFOztBQUV0RztBQUNBO0FBQ0E7QUFDQTtBQUNBO0FBQ0E7QUFDTyxTQUFTRSxnQkFBZ0JBLENBQUNKLE1BQU0sRUFBRUMsTUFBTSxFQUFFQyxRQUFRLEVBQUU7RUFDekQsSUFBSTFCLE9BQU87RUFBRztFQUFBO0VBQUE7RUFBQTZCO0VBQUFBO0VBQUFBO0VBQUFBO0VBQUFBO0VBQUFBLGVBQWU7RUFBQTtFQUFBLENBQUNILFFBQVEsRUFBRTtJQUFDWCxnQkFBZ0IsRUFBRTtFQUFJLENBQUMsQ0FBQztFQUNqRSxPQUFPcEIsUUFBUSxDQUFDZ0MsSUFBSSxDQUFDSCxNQUFNLEVBQUVDLE1BQU0sRUFBRXpCLE9BQU8sQ0FBQztBQUMvQyIsImlnbm9yZUxpc3QiOltdfQ==
diff --git a/node_modules/diff/lib/diff/sentence.js b/node_modules/diff/lib/diff/sentence.js
deleted file mode 100644
index 66d8ece266938..0000000000000
--- a/node_modules/diff/lib/diff/sentence.js
+++ /dev/null
@@ -1,36 +0,0 @@
-/*istanbul ignore start*/
-"use strict";
-
-Object.defineProperty(exports, "__esModule", {
-  value: true
-});
-exports.diffSentences = diffSentences;
-exports.sentenceDiff = void 0;
-/*istanbul ignore end*/
-var
-/*istanbul ignore start*/
-_base = _interopRequireDefault(require("./base"))
-/*istanbul ignore end*/
-;
-/*istanbul ignore start*/ function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
-/*istanbul ignore end*/
-var sentenceDiff =
-/*istanbul ignore start*/
-exports.sentenceDiff =
-/*istanbul ignore end*/
-new
-/*istanbul ignore start*/
-_base
-/*istanbul ignore end*/
-[
-/*istanbul ignore start*/
-"default"
-/*istanbul ignore end*/
-]();
-sentenceDiff.tokenize = function (value) {
-  return value.split(/(\S.+?[.!?])(?=\s+|$)/);
-};
-function diffSentences(oldStr, newStr, callback) {
-  return sentenceDiff.diff(oldStr, newStr, callback);
-}
-//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJuYW1lcyI6WyJfYmFzZSIsIl9pbnRlcm9wUmVxdWlyZURlZmF1bHQiLCJyZXF1aXJlIiwib2JqIiwiX19lc01vZHVsZSIsInNlbnRlbmNlRGlmZiIsImV4cG9ydHMiLCJEaWZmIiwidG9rZW5pemUiLCJ2YWx1ZSIsInNwbGl0IiwiZGlmZlNlbnRlbmNlcyIsIm9sZFN0ciIsIm5ld1N0ciIsImNhbGxiYWNrIiwiZGlmZiJdLCJzb3VyY2VzIjpbIi4uLy4uL3NyYy9kaWZmL3NlbnRlbmNlLmpzIl0sInNvdXJjZXNDb250ZW50IjpbImltcG9ydCBEaWZmIGZyb20gJy4vYmFzZSc7XG5cblxuZXhwb3J0IGNvbnN0IHNlbnRlbmNlRGlmZiA9IG5ldyBEaWZmKCk7XG5zZW50ZW5jZURpZmYudG9rZW5pemUgPSBmdW5jdGlvbih2YWx1ZSkge1xuICByZXR1cm4gdmFsdWUuc3BsaXQoLyhcXFMuKz9bLiE/XSkoPz1cXHMrfCQpLyk7XG59O1xuXG5leHBvcnQgZnVuY3Rpb24gZGlmZlNlbnRlbmNlcyhvbGRTdHIsIG5ld1N0ciwgY2FsbGJhY2spIHsgcmV0dXJuIHNlbnRlbmNlRGlmZi5kaWZmKG9sZFN0ciwgbmV3U3RyLCBjYWxsYmFjayk7IH1cbiJdLCJtYXBwaW5ncyI6Ijs7Ozs7Ozs7O0FBQUE7QUFBQTtBQUFBQSxLQUFBLEdBQUFDLHNCQUFBLENBQUFDLE9BQUE7QUFBQTtBQUFBO0FBQTBCLG1DQUFBRCx1QkFBQUUsR0FBQSxXQUFBQSxHQUFBLElBQUFBLEdBQUEsQ0FBQUMsVUFBQSxHQUFBRCxHQUFBLGdCQUFBQSxHQUFBO0FBQUE7QUFHbkIsSUFBTUUsWUFBWTtBQUFBO0FBQUFDLE9BQUEsQ0FBQUQsWUFBQTtBQUFBO0FBQUc7QUFBSUU7QUFBQUE7QUFBQUE7QUFBQUE7QUFBQUE7QUFBQUE7QUFBQUE7QUFBQUEsQ0FBSSxDQUFDLENBQUM7QUFDdENGLFlBQVksQ0FBQ0csUUFBUSxHQUFHLFVBQVNDLEtBQUssRUFBRTtFQUN0QyxPQUFPQSxLQUFLLENBQUNDLEtBQUssQ0FBQyx1QkFBdUIsQ0FBQztBQUM3QyxDQUFDO0FBRU0sU0FBU0MsYUFBYUEsQ0FBQ0MsTUFBTSxFQUFFQyxNQUFNLEVBQUVDLFFBQVEsRUFBRTtFQUFFLE9BQU9ULFlBQVksQ0FBQ1UsSUFBSSxDQUFDSCxNQUFNLEVBQUVDLE1BQU0sRUFBRUMsUUFBUSxDQUFDO0FBQUUiLCJpZ25vcmVMaXN0IjpbXX0=
diff --git a/node_modules/diff/lib/diff/word.js b/node_modules/diff/lib/diff/word.js
deleted file mode 100644
index 64919db4f6ff9..0000000000000
--- a/node_modules/diff/lib/diff/word.js
+++ /dev/null
@@ -1,543 +0,0 @@
-/*istanbul ignore start*/
-"use strict";
-
-Object.defineProperty(exports, "__esModule", {
-  value: true
-});
-exports.diffWords = diffWords;
-exports.diffWordsWithSpace = diffWordsWithSpace;
-exports.wordWithSpaceDiff = exports.wordDiff = void 0;
-/*istanbul ignore end*/
-var
-/*istanbul ignore start*/
-_base = _interopRequireDefault(require("./base"))
-/*istanbul ignore end*/
-;
-var
-/*istanbul ignore start*/
-_string = require("../util/string")
-/*istanbul ignore end*/
-;
-/*istanbul ignore start*/ function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
-/*istanbul ignore end*/
-// Based on https://en.wikipedia.org/wiki/Latin_script_in_Unicode
-//
-// Ranges and exceptions:
-// Latin-1 Supplement, 0080–00FF
-//  - U+00D7  × Multiplication sign
-//  - U+00F7  ÷ Division sign
-// Latin Extended-A, 0100–017F
-// Latin Extended-B, 0180–024F
-// IPA Extensions, 0250–02AF
-// Spacing Modifier Letters, 02B0–02FF
-//  - U+02C7  ˇ ˇ  Caron
-//  - U+02D8  ˘ ˘  Breve
-//  - U+02D9  ˙ ˙  Dot Above
-//  - U+02DA  ˚ ˚  Ring Above
-//  - U+02DB  ˛ ˛  Ogonek
-//  - U+02DC  ˜ ˜  Small Tilde
-//  - U+02DD  ˝ ˝  Double Acute Accent
-// Latin Extended Additional, 1E00–1EFF
-var extendedWordChars = "a-zA-Z0-9_\\u{C0}-\\u{FF}\\u{D8}-\\u{F6}\\u{F8}-\\u{2C6}\\u{2C8}-\\u{2D7}\\u{2DE}-\\u{2FF}\\u{1E00}-\\u{1EFF}";
-
-// Each token is one of the following:
-// - A punctuation mark plus the surrounding whitespace
-// - A word plus the surrounding whitespace
-// - Pure whitespace (but only in the special case where this the entire text
-//   is just whitespace)
-//
-// We have to include surrounding whitespace in the tokens because the two
-// alternative approaches produce horribly broken results:
-// * If we just discard the whitespace, we can't fully reproduce the original
-//   text from the sequence of tokens and any attempt to render the diff will
-//   get the whitespace wrong.
-// * If we have separate tokens for whitespace, then in a typical text every
-//   second token will be a single space character. But this often results in
-//   the optimal diff between two texts being a perverse one that preserves
-//   the spaces between words but deletes and reinserts actual common words.
-//   See https://github.com/kpdecker/jsdiff/issues/160#issuecomment-1866099640
-//   for an example.
-//
-// Keeping the surrounding whitespace of course has implications for .equals
-// and .join, not just .tokenize.
-
-// This regex does NOT fully implement the tokenization rules described above.
-// Instead, it gives runs of whitespace their own "token". The tokenize method
-// then handles stitching whitespace tokens onto adjacent word or punctuation
-// tokens.
-var tokenizeIncludingWhitespace = new RegExp(
-/*istanbul ignore start*/
-"[".concat(
-/*istanbul ignore end*/
-extendedWordChars, "]+|\\s+|[^").concat(extendedWordChars, "]"), 'ug');
-var wordDiff =
-/*istanbul ignore start*/
-exports.wordDiff =
-/*istanbul ignore end*/
-new
-/*istanbul ignore start*/
-_base
-/*istanbul ignore end*/
-[
-/*istanbul ignore start*/
-"default"
-/*istanbul ignore end*/
-]();
-wordDiff.equals = function (left, right, options) {
-  if (options.ignoreCase) {
-    left = left.toLowerCase();
-    right = right.toLowerCase();
-  }
-  return left.trim() === right.trim();
-};
-wordDiff.tokenize = function (value) {
-  /*istanbul ignore start*/
-  var
-  /*istanbul ignore end*/
-  options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
-  var parts;
-  if (options.intlSegmenter) {
-    if (options.intlSegmenter.resolvedOptions().granularity != 'word') {
-      throw new Error('The segmenter passed must have a granularity of "word"');
-    }
-    parts = Array.from(options.intlSegmenter.segment(value), function (segment)
-    /*istanbul ignore start*/
-    {
-      return (
-        /*istanbul ignore end*/
-        segment.segment
-      );
-    });
-  } else {
-    parts = value.match(tokenizeIncludingWhitespace) || [];
-  }
-  var tokens = [];
-  var prevPart = null;
-  parts.forEach(function (part) {
-    if (/\s/.test(part)) {
-      if (prevPart == null) {
-        tokens.push(part);
-      } else {
-        tokens.push(tokens.pop() + part);
-      }
-    } else if (/\s/.test(prevPart)) {
-      if (tokens[tokens.length - 1] == prevPart) {
-        tokens.push(tokens.pop() + part);
-      } else {
-        tokens.push(prevPart + part);
-      }
-    } else {
-      tokens.push(part);
-    }
-    prevPart = part;
-  });
-  return tokens;
-};
-wordDiff.join = function (tokens) {
-  // Tokens being joined here will always have appeared consecutively in the
-  // same text, so we can simply strip off the leading whitespace from all the
-  // tokens except the first (and except any whitespace-only tokens - but such
-  // a token will always be the first and only token anyway) and then join them
-  // and the whitespace around words and punctuation will end up correct.
-  return tokens.map(function (token, i) {
-    if (i == 0) {
-      return token;
-    } else {
-      return token.replace(/^\s+/, '');
-    }
-  }).join('');
-};
-wordDiff.postProcess = function (changes, options) {
-  if (!changes || options.oneChangePerToken) {
-    return changes;
-  }
-  var lastKeep = null;
-  // Change objects representing any insertion or deletion since the last
-  // "keep" change object. There can be at most one of each.
-  var insertion = null;
-  var deletion = null;
-  changes.forEach(function (change) {
-    if (change.added) {
-      insertion = change;
-    } else if (change.removed) {
-      deletion = change;
-    } else {
-      if (insertion || deletion) {
-        // May be false at start of text
-        dedupeWhitespaceInChangeObjects(lastKeep, deletion, insertion, change);
-      }
-      lastKeep = change;
-      insertion = null;
-      deletion = null;
-    }
-  });
-  if (insertion || deletion) {
-    dedupeWhitespaceInChangeObjects(lastKeep, deletion, insertion, null);
-  }
-  return changes;
-};
-function diffWords(oldStr, newStr, options) {
-  // This option has never been documented and never will be (it's clearer to
-  // just call `diffWordsWithSpace` directly if you need that behavior), but
-  // has existed in jsdiff for a long time, so we retain support for it here
-  // for the sake of backwards compatibility.
-  if (
-  /*istanbul ignore start*/
-  (
-  /*istanbul ignore end*/
-  options === null || options === void 0 ? void 0 : options.ignoreWhitespace) != null && !options.ignoreWhitespace) {
-    return diffWordsWithSpace(oldStr, newStr, options);
-  }
-  return wordDiff.diff(oldStr, newStr, options);
-}
-function dedupeWhitespaceInChangeObjects(startKeep, deletion, insertion, endKeep) {
-  // Before returning, we tidy up the leading and trailing whitespace of the
-  // change objects to eliminate cases where trailing whitespace in one object
-  // is repeated as leading whitespace in the next.
-  // Below are examples of the outcomes we want here to explain the code.
-  // I=insert, K=keep, D=delete
-  // 1. diffing 'foo bar baz' vs 'foo baz'
-  //    Prior to cleanup, we have K:'foo ' D:' bar ' K:' baz'
-  //    After cleanup, we want:   K:'foo ' D:'bar ' K:'baz'
-  //
-  // 2. Diffing 'foo bar baz' vs 'foo qux baz'
-  //    Prior to cleanup, we have K:'foo ' D:' bar ' I:' qux ' K:' baz'
-  //    After cleanup, we want K:'foo ' D:'bar' I:'qux' K:' baz'
-  //
-  // 3. Diffing 'foo\nbar baz' vs 'foo baz'
-  //    Prior to cleanup, we have K:'foo ' D:'\nbar ' K:' baz'
-  //    After cleanup, we want K'foo' D:'\nbar' K:' baz'
-  //
-  // 4. Diffing 'foo baz' vs 'foo\nbar baz'
-  //    Prior to cleanup, we have K:'foo\n' I:'\nbar ' K:' baz'
-  //    After cleanup, we ideally want K'foo' I:'\nbar' K:' baz'
-  //    but don't actually manage this currently (the pre-cleanup change
-  //    objects don't contain enough information to make it possible).
-  //
-  // 5. Diffing 'foo   bar baz' vs 'foo  baz'
-  //    Prior to cleanup, we have K:'foo  ' D:'   bar ' K:'  baz'
-  //    After cleanup, we want K:'foo  ' D:' bar ' K:'baz'
-  //
-  // Our handling is unavoidably imperfect in the case where there's a single
-  // indel between keeps and the whitespace has changed. For instance, consider
-  // diffing 'foo\tbar\nbaz' vs 'foo baz'. Unless we create an extra change
-  // object to represent the insertion of the space character (which isn't even
-  // a token), we have no way to avoid losing information about the texts'
-  // original whitespace in the result we return. Still, we do our best to
-  // output something that will look sensible if we e.g. print it with
-  // insertions in green and deletions in red.
-
-  // Between two "keep" change objects (or before the first or after the last
-  // change object), we can have either:
-  // * A "delete" followed by an "insert"
-  // * Just an "insert"
-  // * Just a "delete"
-  // We handle the three cases separately.
-  if (deletion && insertion) {
-    var oldWsPrefix = deletion.value.match(/^\s*/)[0];
-    var oldWsSuffix = deletion.value.match(/\s*$/)[0];
-    var newWsPrefix = insertion.value.match(/^\s*/)[0];
-    var newWsSuffix = insertion.value.match(/\s*$/)[0];
-    if (startKeep) {
-      var commonWsPrefix =
-      /*istanbul ignore start*/
-      (0,
-      /*istanbul ignore end*/
-      /*istanbul ignore start*/
-      _string
-      /*istanbul ignore end*/
-      .
-      /*istanbul ignore start*/
-      longestCommonPrefix)
-      /*istanbul ignore end*/
-      (oldWsPrefix, newWsPrefix);
-      startKeep.value =
-      /*istanbul ignore start*/
-      (0,
-      /*istanbul ignore end*/
-      /*istanbul ignore start*/
-      _string
-      /*istanbul ignore end*/
-      .
-      /*istanbul ignore start*/
-      replaceSuffix)
-      /*istanbul ignore end*/
-      (startKeep.value, newWsPrefix, commonWsPrefix);
-      deletion.value =
-      /*istanbul ignore start*/
-      (0,
-      /*istanbul ignore end*/
-      /*istanbul ignore start*/
-      _string
-      /*istanbul ignore end*/
-      .
-      /*istanbul ignore start*/
-      removePrefix)
-      /*istanbul ignore end*/
-      (deletion.value, commonWsPrefix);
-      insertion.value =
-      /*istanbul ignore start*/
-      (0,
-      /*istanbul ignore end*/
-      /*istanbul ignore start*/
-      _string
-      /*istanbul ignore end*/
-      .
-      /*istanbul ignore start*/
-      removePrefix)
-      /*istanbul ignore end*/
-      (insertion.value, commonWsPrefix);
-    }
-    if (endKeep) {
-      var commonWsSuffix =
-      /*istanbul ignore start*/
-      (0,
-      /*istanbul ignore end*/
-      /*istanbul ignore start*/
-      _string
-      /*istanbul ignore end*/
-      .
-      /*istanbul ignore start*/
-      longestCommonSuffix)
-      /*istanbul ignore end*/
-      (oldWsSuffix, newWsSuffix);
-      endKeep.value =
-      /*istanbul ignore start*/
-      (0,
-      /*istanbul ignore end*/
-      /*istanbul ignore start*/
-      _string
-      /*istanbul ignore end*/
-      .
-      /*istanbul ignore start*/
-      replacePrefix)
-      /*istanbul ignore end*/
-      (endKeep.value, newWsSuffix, commonWsSuffix);
-      deletion.value =
-      /*istanbul ignore start*/
-      (0,
-      /*istanbul ignore end*/
-      /*istanbul ignore start*/
-      _string
-      /*istanbul ignore end*/
-      .
-      /*istanbul ignore start*/
-      removeSuffix)
-      /*istanbul ignore end*/
-      (deletion.value, commonWsSuffix);
-      insertion.value =
-      /*istanbul ignore start*/
-      (0,
-      /*istanbul ignore end*/
-      /*istanbul ignore start*/
-      _string
-      /*istanbul ignore end*/
-      .
-      /*istanbul ignore start*/
-      removeSuffix)
-      /*istanbul ignore end*/
-      (insertion.value, commonWsSuffix);
-    }
-  } else if (insertion) {
-    // The whitespaces all reflect what was in the new text rather than
-    // the old, so we essentially have no information about whitespace
-    // insertion or deletion. We just want to dedupe the whitespace.
-    // We do that by having each change object keep its trailing
-    // whitespace and deleting duplicate leading whitespace where
-    // present.
-    if (startKeep) {
-      insertion.value = insertion.value.replace(/^\s*/, '');
-    }
-    if (endKeep) {
-      endKeep.value = endKeep.value.replace(/^\s*/, '');
-    }
-    // otherwise we've got a deletion and no insertion
-  } else if (startKeep && endKeep) {
-    var newWsFull = endKeep.value.match(/^\s*/)[0],
-      delWsStart = deletion.value.match(/^\s*/)[0],
-      delWsEnd = deletion.value.match(/\s*$/)[0];
-
-    // Any whitespace that comes straight after startKeep in both the old and
-    // new texts, assign to startKeep and remove from the deletion.
-    var newWsStart =
-    /*istanbul ignore start*/
-    (0,
-    /*istanbul ignore end*/
-    /*istanbul ignore start*/
-    _string
-    /*istanbul ignore end*/
-    .
-    /*istanbul ignore start*/
-    longestCommonPrefix)
-    /*istanbul ignore end*/
-    (newWsFull, delWsStart);
-    deletion.value =
-    /*istanbul ignore start*/
-    (0,
-    /*istanbul ignore end*/
-    /*istanbul ignore start*/
-    _string
-    /*istanbul ignore end*/
-    .
-    /*istanbul ignore start*/
-    removePrefix)
-    /*istanbul ignore end*/
-    (deletion.value, newWsStart);
-
-    // Any whitespace that comes straight before endKeep in both the old and
-    // new texts, and hasn't already been assigned to startKeep, assign to
-    // endKeep and remove from the deletion.
-    var newWsEnd =
-    /*istanbul ignore start*/
-    (0,
-    /*istanbul ignore end*/
-    /*istanbul ignore start*/
-    _string
-    /*istanbul ignore end*/
-    .
-    /*istanbul ignore start*/
-    longestCommonSuffix)
-    /*istanbul ignore end*/
-    (
-    /*istanbul ignore start*/
-    (0,
-    /*istanbul ignore end*/
-    /*istanbul ignore start*/
-    _string
-    /*istanbul ignore end*/
-    .
-    /*istanbul ignore start*/
-    removePrefix)
-    /*istanbul ignore end*/
-    (newWsFull, newWsStart), delWsEnd);
-    deletion.value =
-    /*istanbul ignore start*/
-    (0,
-    /*istanbul ignore end*/
-    /*istanbul ignore start*/
-    _string
-    /*istanbul ignore end*/
-    .
-    /*istanbul ignore start*/
-    removeSuffix)
-    /*istanbul ignore end*/
-    (deletion.value, newWsEnd);
-    endKeep.value =
-    /*istanbul ignore start*/
-    (0,
-    /*istanbul ignore end*/
-    /*istanbul ignore start*/
-    _string
-    /*istanbul ignore end*/
-    .
-    /*istanbul ignore start*/
-    replacePrefix)
-    /*istanbul ignore end*/
-    (endKeep.value, newWsFull, newWsEnd);
-
-    // If there's any whitespace from the new text that HASN'T already been
-    // assigned, assign it to the start:
-    startKeep.value =
-    /*istanbul ignore start*/
-    (0,
-    /*istanbul ignore end*/
-    /*istanbul ignore start*/
-    _string
-    /*istanbul ignore end*/
-    .
-    /*istanbul ignore start*/
-    replaceSuffix)
-    /*istanbul ignore end*/
-    (startKeep.value, newWsFull, newWsFull.slice(0, newWsFull.length - newWsEnd.length));
-  } else if (endKeep) {
-    // We are at the start of the text. Preserve all the whitespace on
-    // endKeep, and just remove whitespace from the end of deletion to the
-    // extent that it overlaps with the start of endKeep.
-    var endKeepWsPrefix = endKeep.value.match(/^\s*/)[0];
-    var deletionWsSuffix = deletion.value.match(/\s*$/)[0];
-    var overlap =
-    /*istanbul ignore start*/
-    (0,
-    /*istanbul ignore end*/
-    /*istanbul ignore start*/
-    _string
-    /*istanbul ignore end*/
-    .
-    /*istanbul ignore start*/
-    maximumOverlap)
-    /*istanbul ignore end*/
-    (deletionWsSuffix, endKeepWsPrefix);
-    deletion.value =
-    /*istanbul ignore start*/
-    (0,
-    /*istanbul ignore end*/
-    /*istanbul ignore start*/
-    _string
-    /*istanbul ignore end*/
-    .
-    /*istanbul ignore start*/
-    removeSuffix)
-    /*istanbul ignore end*/
-    (deletion.value, overlap);
-  } else if (startKeep) {
-    // We are at the END of the text. Preserve all the whitespace on
-    // startKeep, and just remove whitespace from the start of deletion to
-    // the extent that it overlaps with the end of startKeep.
-    var startKeepWsSuffix = startKeep.value.match(/\s*$/)[0];
-    var deletionWsPrefix = deletion.value.match(/^\s*/)[0];
-    var _overlap =
-    /*istanbul ignore start*/
-    (0,
-    /*istanbul ignore end*/
-    /*istanbul ignore start*/
-    _string
-    /*istanbul ignore end*/
-    .
-    /*istanbul ignore start*/
-    maximumOverlap)
-    /*istanbul ignore end*/
-    (startKeepWsSuffix, deletionWsPrefix);
-    deletion.value =
-    /*istanbul ignore start*/
-    (0,
-    /*istanbul ignore end*/
-    /*istanbul ignore start*/
-    _string
-    /*istanbul ignore end*/
-    .
-    /*istanbul ignore start*/
-    removePrefix)
-    /*istanbul ignore end*/
-    (deletion.value, _overlap);
-  }
-}
-var wordWithSpaceDiff =
-/*istanbul ignore start*/
-exports.wordWithSpaceDiff =
-/*istanbul ignore end*/
-new
-/*istanbul ignore start*/
-_base
-/*istanbul ignore end*/
-[
-/*istanbul ignore start*/
-"default"
-/*istanbul ignore end*/
-]();
-wordWithSpaceDiff.tokenize = function (value) {
-  // Slightly different to the tokenizeIncludingWhitespace regex used above in
-  // that this one treats each individual newline as a distinct tokens, rather
-  // than merging them into other surrounding whitespace. This was requested
-  // in https://github.com/kpdecker/jsdiff/issues/180 &
-  //    https://github.com/kpdecker/jsdiff/issues/211
-  var regex = new RegExp(
-  /*istanbul ignore start*/
-  "(\\r?\\n)|[".concat(
-  /*istanbul ignore end*/
-  extendedWordChars, "]+|[^\\S\\n\\r]+|[^").concat(extendedWordChars, "]"), 'ug');
-  return value.match(regex) || [];
-};
-function diffWordsWithSpace(oldStr, newStr, options) {
-  return wordWithSpaceDiff.diff(oldStr, newStr, options);
-}
-//# sourceMappingURL=data:application/json;charset=utf-8;base64,{"version":3,"names":["_base","_interopRequireDefault","require","_string","obj","__esModule","extendedWordChars","tokenizeIncludingWhitespace","RegExp","concat","wordDiff","exports","Diff","equals","left","right","options","ignoreCase","toLowerCase","trim","tokenize","value","arguments","length","undefined","parts","intlSegmenter","resolvedOptions","granularity","Error","Array","from","segment","match","tokens","prevPart","forEach","part","test","push","pop","join","map","token","i","replace","postProcess","changes","oneChangePerToken","lastKeep","insertion","deletion","change","added","removed","dedupeWhitespaceInChangeObjects","diffWords","oldStr","newStr","ignoreWhitespace","diffWordsWithSpace","diff","startKeep","endKeep","oldWsPrefix","oldWsSuffix","newWsPrefix","newWsSuffix","commonWsPrefix","longestCommonPrefix","replaceSuffix","removePrefix","commonWsSuffix","longestCommonSuffix","replacePrefix","removeSuffix","newWsFull","delWsStart","delWsEnd","newWsStart","newWsEnd","slice","endKeepWsPrefix","deletionWsSuffix","overlap","maximumOverlap","startKeepWsSuffix","deletionWsPrefix","wordWithSpaceDiff","regex"],"sources":["../../src/diff/word.js"],"sourcesContent":["import Diff from './base';\nimport { longestCommonPrefix, longestCommonSuffix, replacePrefix, replaceSuffix, removePrefix, removeSuffix, maximumOverlap } from '../util/string';\n\n// Based on https://en.wikipedia.org/wiki/Latin_script_in_Unicode\n//\n// Ranges and exceptions:\n// Latin-1 Supplement, 0080–00FF\n//  - U+00D7  × Multiplication sign\n//  - U+00F7  ÷ Division sign\n// Latin Extended-A, 0100–017F\n// Latin Extended-B, 0180–024F\n// IPA Extensions, 0250–02AF\n// Spacing Modifier Letters, 02B0–02FF\n//  - U+02C7  ˇ &#711;  Caron\n//  - U+02D8  ˘ &#728;  Breve\n//  - U+02D9  ˙ &#729;  Dot Above\n//  - U+02DA  ˚ &#730;  Ring Above\n//  - U+02DB  ˛ &#731;  Ogonek\n//  - U+02DC  ˜ &#732;  Small Tilde\n//  - U+02DD  ˝ &#733;  Double Acute Accent\n// Latin Extended Additional, 1E00–1EFF\nconst extendedWordChars = 'a-zA-Z0-9_\\\\u{C0}-\\\\u{FF}\\\\u{D8}-\\\\u{F6}\\\\u{F8}-\\\\u{2C6}\\\\u{2C8}-\\\\u{2D7}\\\\u{2DE}-\\\\u{2FF}\\\\u{1E00}-\\\\u{1EFF}';\n\n// Each token is one of the following:\n// - A punctuation mark plus the surrounding whitespace\n// - A word plus the surrounding whitespace\n// - Pure whitespace (but only in the special case where this the entire text\n//   is just whitespace)\n//\n// We have to include surrounding whitespace in the tokens because the two\n// alternative approaches produce horribly broken results:\n// * If we just discard the whitespace, we can't fully reproduce the original\n//   text from the sequence of tokens and any attempt to render the diff will\n//   get the whitespace wrong.\n// * If we have separate tokens for whitespace, then in a typical text every\n//   second token will be a single space character. But this often results in\n//   the optimal diff between two texts being a perverse one that preserves\n//   the spaces between words but deletes and reinserts actual common words.\n//   See https://github.com/kpdecker/jsdiff/issues/160#issuecomment-1866099640\n//   for an example.\n//\n// Keeping the surrounding whitespace of course has implications for .equals\n// and .join, not just .tokenize.\n\n// This regex does NOT fully implement the tokenization rules described above.\n// Instead, it gives runs of whitespace their own \"token\". The tokenize method\n// then handles stitching whitespace tokens onto adjacent word or punctuation\n// tokens.\nconst tokenizeIncludingWhitespace = new RegExp(`[${extendedWordChars}]+|\\\\s+|[^${extendedWordChars}]`, 'ug');\n\nexport const wordDiff = new Diff();\nwordDiff.equals = function(left, right, options) {\n  if (options.ignoreCase) {\n    left = left.toLowerCase();\n    right = right.toLowerCase();\n  }\n\n  return left.trim() === right.trim();\n};\n\nwordDiff.tokenize = function(value, options = {}) {\n  let parts;\n  if (options.intlSegmenter) {\n    if (options.intlSegmenter.resolvedOptions().granularity != 'word') {\n      throw new Error('The segmenter passed must have a granularity of \"word\"');\n    }\n    parts = Array.from(options.intlSegmenter.segment(value), segment => segment.segment);\n  } else {\n    parts = value.match(tokenizeIncludingWhitespace) || [];\n  }\n  const tokens = [];\n  let prevPart = null;\n  parts.forEach(part => {\n    if ((/\\s/).test(part)) {\n      if (prevPart == null) {\n        tokens.push(part);\n      } else {\n        tokens.push(tokens.pop() + part);\n      }\n    } else if ((/\\s/).test(prevPart)) {\n      if (tokens[tokens.length - 1] == prevPart) {\n        tokens.push(tokens.pop() + part);\n      } else {\n        tokens.push(prevPart + part);\n      }\n    } else {\n      tokens.push(part);\n    }\n\n    prevPart = part;\n  });\n  return tokens;\n};\n\nwordDiff.join = function(tokens) {\n  // Tokens being joined here will always have appeared consecutively in the\n  // same text, so we can simply strip off the leading whitespace from all the\n  // tokens except the first (and except any whitespace-only tokens - but such\n  // a token will always be the first and only token anyway) and then join them\n  // and the whitespace around words and punctuation will end up correct.\n  return tokens.map((token, i) => {\n    if (i == 0) {\n      return token;\n    } else {\n      return token.replace((/^\\s+/), '');\n    }\n  }).join('');\n};\n\nwordDiff.postProcess = function(changes, options) {\n  if (!changes || options.oneChangePerToken) {\n    return changes;\n  }\n\n  let lastKeep = null;\n  // Change objects representing any insertion or deletion since the last\n  // \"keep\" change object. There can be at most one of each.\n  let insertion = null;\n  let deletion = null;\n  changes.forEach(change => {\n    if (change.added) {\n      insertion = change;\n    } else if (change.removed) {\n      deletion = change;\n    } else {\n      if (insertion || deletion) { // May be false at start of text\n        dedupeWhitespaceInChangeObjects(lastKeep, deletion, insertion, change);\n      }\n      lastKeep = change;\n      insertion = null;\n      deletion = null;\n    }\n  });\n  if (insertion || deletion) {\n    dedupeWhitespaceInChangeObjects(lastKeep, deletion, insertion, null);\n  }\n  return changes;\n};\n\nexport function diffWords(oldStr, newStr, options) {\n  // This option has never been documented and never will be (it's clearer to\n  // just call `diffWordsWithSpace` directly if you need that behavior), but\n  // has existed in jsdiff for a long time, so we retain support for it here\n  // for the sake of backwards compatibility.\n  if (options?.ignoreWhitespace != null && !options.ignoreWhitespace) {\n    return diffWordsWithSpace(oldStr, newStr, options);\n  }\n\n  return wordDiff.diff(oldStr, newStr, options);\n}\n\nfunction dedupeWhitespaceInChangeObjects(startKeep, deletion, insertion, endKeep) {\n  // Before returning, we tidy up the leading and trailing whitespace of the\n  // change objects to eliminate cases where trailing whitespace in one object\n  // is repeated as leading whitespace in the next.\n  // Below are examples of the outcomes we want here to explain the code.\n  // I=insert, K=keep, D=delete\n  // 1. diffing 'foo bar baz' vs 'foo baz'\n  //    Prior to cleanup, we have K:'foo ' D:' bar ' K:' baz'\n  //    After cleanup, we want:   K:'foo ' D:'bar ' K:'baz'\n  //\n  // 2. Diffing 'foo bar baz' vs 'foo qux baz'\n  //    Prior to cleanup, we have K:'foo ' D:' bar ' I:' qux ' K:' baz'\n  //    After cleanup, we want K:'foo ' D:'bar' I:'qux' K:' baz'\n  //\n  // 3. Diffing 'foo\\nbar baz' vs 'foo baz'\n  //    Prior to cleanup, we have K:'foo ' D:'\\nbar ' K:' baz'\n  //    After cleanup, we want K'foo' D:'\\nbar' K:' baz'\n  //\n  // 4. Diffing 'foo baz' vs 'foo\\nbar baz'\n  //    Prior to cleanup, we have K:'foo\\n' I:'\\nbar ' K:' baz'\n  //    After cleanup, we ideally want K'foo' I:'\\nbar' K:' baz'\n  //    but don't actually manage this currently (the pre-cleanup change\n  //    objects don't contain enough information to make it possible).\n  //\n  // 5. Diffing 'foo   bar baz' vs 'foo  baz'\n  //    Prior to cleanup, we have K:'foo  ' D:'   bar ' K:'  baz'\n  //    After cleanup, we want K:'foo  ' D:' bar ' K:'baz'\n  //\n  // Our handling is unavoidably imperfect in the case where there's a single\n  // indel between keeps and the whitespace has changed. For instance, consider\n  // diffing 'foo\\tbar\\nbaz' vs 'foo baz'. Unless we create an extra change\n  // object to represent the insertion of the space character (which isn't even\n  // a token), we have no way to avoid losing information about the texts'\n  // original whitespace in the result we return. Still, we do our best to\n  // output something that will look sensible if we e.g. print it with\n  // insertions in green and deletions in red.\n\n  // Between two \"keep\" change objects (or before the first or after the last\n  // change object), we can have either:\n  // * A \"delete\" followed by an \"insert\"\n  // * Just an \"insert\"\n  // * Just a \"delete\"\n  // We handle the three cases separately.\n  if (deletion && insertion) {\n    const oldWsPrefix = deletion.value.match(/^\\s*/)[0];\n    const oldWsSuffix = deletion.value.match(/\\s*$/)[0];\n    const newWsPrefix = insertion.value.match(/^\\s*/)[0];\n    const newWsSuffix = insertion.value.match(/\\s*$/)[0];\n\n    if (startKeep) {\n      const commonWsPrefix = longestCommonPrefix(oldWsPrefix, newWsPrefix);\n      startKeep.value = replaceSuffix(startKeep.value, newWsPrefix, commonWsPrefix);\n      deletion.value = removePrefix(deletion.value, commonWsPrefix);\n      insertion.value = removePrefix(insertion.value, commonWsPrefix);\n    }\n    if (endKeep) {\n      const commonWsSuffix = longestCommonSuffix(oldWsSuffix, newWsSuffix);\n      endKeep.value = replacePrefix(endKeep.value, newWsSuffix, commonWsSuffix);\n      deletion.value = removeSuffix(deletion.value, commonWsSuffix);\n      insertion.value = removeSuffix(insertion.value, commonWsSuffix);\n    }\n  } else if (insertion) {\n    // The whitespaces all reflect what was in the new text rather than\n    // the old, so we essentially have no information about whitespace\n    // insertion or deletion. We just want to dedupe the whitespace.\n    // We do that by having each change object keep its trailing\n    // whitespace and deleting duplicate leading whitespace where\n    // present.\n    if (startKeep) {\n      insertion.value = insertion.value.replace(/^\\s*/, '');\n    }\n    if (endKeep) {\n      endKeep.value = endKeep.value.replace(/^\\s*/, '');\n    }\n  // otherwise we've got a deletion and no insertion\n  } else if (startKeep && endKeep) {\n    const newWsFull = endKeep.value.match(/^\\s*/)[0],\n        delWsStart = deletion.value.match(/^\\s*/)[0],\n        delWsEnd = deletion.value.match(/\\s*$/)[0];\n\n    // Any whitespace that comes straight after startKeep in both the old and\n    // new texts, assign to startKeep and remove from the deletion.\n    const newWsStart = longestCommonPrefix(newWsFull, delWsStart);\n    deletion.value = removePrefix(deletion.value, newWsStart);\n\n    // Any whitespace that comes straight before endKeep in both the old and\n    // new texts, and hasn't already been assigned to startKeep, assign to\n    // endKeep and remove from the deletion.\n    const newWsEnd = longestCommonSuffix(\n      removePrefix(newWsFull, newWsStart),\n      delWsEnd\n    );\n    deletion.value = removeSuffix(deletion.value, newWsEnd);\n    endKeep.value = replacePrefix(endKeep.value, newWsFull, newWsEnd);\n\n    // If there's any whitespace from the new text that HASN'T already been\n    // assigned, assign it to the start:\n    startKeep.value = replaceSuffix(\n      startKeep.value,\n      newWsFull,\n      newWsFull.slice(0, newWsFull.length - newWsEnd.length)\n    );\n  } else if (endKeep) {\n    // We are at the start of the text. Preserve all the whitespace on\n    // endKeep, and just remove whitespace from the end of deletion to the\n    // extent that it overlaps with the start of endKeep.\n    const endKeepWsPrefix = endKeep.value.match(/^\\s*/)[0];\n    const deletionWsSuffix = deletion.value.match(/\\s*$/)[0];\n    const overlap = maximumOverlap(deletionWsSuffix, endKeepWsPrefix);\n    deletion.value = removeSuffix(deletion.value, overlap);\n  } else if (startKeep) {\n    // We are at the END of the text. Preserve all the whitespace on\n    // startKeep, and just remove whitespace from the start of deletion to\n    // the extent that it overlaps with the end of startKeep.\n    const startKeepWsSuffix = startKeep.value.match(/\\s*$/)[0];\n    const deletionWsPrefix = deletion.value.match(/^\\s*/)[0];\n    const overlap = maximumOverlap(startKeepWsSuffix, deletionWsPrefix);\n    deletion.value = removePrefix(deletion.value, overlap);\n  }\n}\n\n\nexport const wordWithSpaceDiff = new Diff();\nwordWithSpaceDiff.tokenize = function(value) {\n  // Slightly different to the tokenizeIncludingWhitespace regex used above in\n  // that this one treats each individual newline as a distinct tokens, rather\n  // than merging them into other surrounding whitespace. This was requested\n  // in https://github.com/kpdecker/jsdiff/issues/180 &\n  //    https://github.com/kpdecker/jsdiff/issues/211\n  const regex = new RegExp(`(\\\\r?\\\\n)|[${extendedWordChars}]+|[^\\\\S\\\\n\\\\r]+|[^${extendedWordChars}]`, 'ug');\n  return value.match(regex) || [];\n};\nexport function diffWordsWithSpace(oldStr, newStr, options) {\n  return wordWithSpaceDiff.diff(oldStr, newStr, options);\n}\n"],"mappings":";;;;;;;;;;AAAA;AAAA;AAAAA,KAAA,GAAAC,sBAAA,CAAAC,OAAA;AAAA;AAAA;AACA;AAAA;AAAAC,OAAA,GAAAD,OAAA;AAAA;AAAA;AAAoJ,mCAAAD,uBAAAG,GAAA,WAAAA,GAAA,IAAAA,GAAA,CAAAC,UAAA,GAAAD,GAAA,gBAAAA,GAAA;AAAA;AAEpJ;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA,IAAME,iBAAiB,GAAG,+GAA+G;;AAEzI;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;AAEA;AACA;AACA;AACA;AACA,IAAMC,2BAA2B,GAAG,IAAIC,MAAM;AAAA;AAAA,IAAAC,MAAA;AAAA;AAAKH,iBAAiB,gBAAAG,MAAA,CAAaH,iBAAiB,QAAK,IAAI,CAAC;AAErG,IAAMI,QAAQ;AAAA;AAAAC,OAAA,CAAAD,QAAA;AAAA;AAAG;AAAIE;AAAAA;AAAAA;AAAAA;AAAAA;AAAAA;AAAAA;AAAAA,CAAI,CAAC,CAAC;AAClCF,QAAQ,CAACG,MAAM,GAAG,UAASC,IAAI,EAAEC,KAAK,EAAEC,OAAO,EAAE;EAC/C,IAAIA,OAAO,CAACC,UAAU,EAAE;IACtBH,IAAI,GAAGA,IAAI,CAACI,WAAW,CAAC,CAAC;IACzBH,KAAK,GAAGA,KAAK,CAACG,WAAW,CAAC,CAAC;EAC7B;EAEA,OAAOJ,IAAI,CAACK,IAAI,CAAC,CAAC,KAAKJ,KAAK,CAACI,IAAI,CAAC,CAAC;AACrC,CAAC;AAEDT,QAAQ,CAACU,QAAQ,GAAG,UAASC,KAAK,EAAgB;EAAA;EAAA;EAAA;EAAdL,OAAO,GAAAM,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,CAAC,CAAC;EAC9C,IAAIG,KAAK;EACT,IAAIT,OAAO,CAACU,aAAa,EAAE;IACzB,IAAIV,OAAO,CAACU,aAAa,CAACC,eAAe,CAAC,CAAC,CAACC,WAAW,IAAI,MAAM,EAAE;MACjE,MAAM,IAAIC,KAAK,CAAC,wDAAwD,CAAC;IAC3E;IACAJ,KAAK,GAAGK,KAAK,CAACC,IAAI,CAACf,OAAO,CAACU,aAAa,CAACM,OAAO,CAACX,KAAK,CAAC,EAAE,UAAAW,OAAO;IAAA;IAAA;MAAA;QAAA;QAAIA,OAAO,CAACA;MAAO;IAAA,EAAC;EACtF,CAAC,MAAM;IACLP,KAAK,GAAGJ,KAAK,CAACY,KAAK,CAAC1B,2BAA2B,CAAC,IAAI,EAAE;EACxD;EACA,IAAM2B,MAAM,GAAG,EAAE;EACjB,IAAIC,QAAQ,GAAG,IAAI;EACnBV,KAAK,CAACW,OAAO,CAAC,UAAAC,IAAI,EAAI;IACpB,IAAK,IAAI,CAAEC,IAAI,CAACD,IAAI,CAAC,EAAE;MACrB,IAAIF,QAAQ,IAAI,IAAI,EAAE;QACpBD,MAAM,CAACK,IAAI,CAACF,IAAI,CAAC;MACnB,CAAC,MAAM;QACLH,MAAM,CAACK,IAAI,CAACL,MAAM,CAACM,GAAG,CAAC,CAAC,GAAGH,IAAI,CAAC;MAClC;IACF,CAAC,MAAM,IAAK,IAAI,CAAEC,IAAI,CAACH,QAAQ,CAAC,EAAE;MAChC,IAAID,MAAM,CAACA,MAAM,CAACX,MAAM,GAAG,CAAC,CAAC,IAAIY,QAAQ,EAAE;QACzCD,MAAM,CAACK,IAAI,CAACL,MAAM,CAACM,GAAG,CAAC,CAAC,GAAGH,IAAI,CAAC;MAClC,CAAC,MAAM;QACLH,MAAM,CAACK,IAAI,CAACJ,QAAQ,GAAGE,IAAI,CAAC;MAC9B;IACF,CAAC,MAAM;MACLH,MAAM,CAACK,IAAI,CAACF,IAAI,CAAC;IACnB;IAEAF,QAAQ,GAAGE,IAAI;EACjB,CAAC,CAAC;EACF,OAAOH,MAAM;AACf,CAAC;AAEDxB,QAAQ,CAAC+B,IAAI,GAAG,UAASP,MAAM,EAAE;EAC/B;EACA;EACA;EACA;EACA;EACA,OAAOA,MAAM,CAACQ,GAAG,CAAC,UAACC,KAAK,EAAEC,CAAC,EAAK;IAC9B,IAAIA,CAAC,IAAI,CAAC,EAAE;MACV,OAAOD,KAAK;IACd,CAAC,MAAM;MACL,OAAOA,KAAK,CAACE,OAAO,CAAE,MAAM,EAAG,EAAE,CAAC;IACpC;EACF,CAAC,CAAC,CAACJ,IAAI,CAAC,EAAE,CAAC;AACb,CAAC;AAED/B,QAAQ,CAACoC,WAAW,GAAG,UAASC,OAAO,EAAE/B,OAAO,EAAE;EAChD,IAAI,CAAC+B,OAAO,IAAI/B,OAAO,CAACgC,iBAAiB,EAAE;IACzC,OAAOD,OAAO;EAChB;EAEA,IAAIE,QAAQ,GAAG,IAAI;EACnB;EACA;EACA,IAAIC,SAAS,GAAG,IAAI;EACpB,IAAIC,QAAQ,GAAG,IAAI;EACnBJ,OAAO,CAACX,OAAO,CAAC,UAAAgB,MAAM,EAAI;IACxB,IAAIA,MAAM,CAACC,KAAK,EAAE;MAChBH,SAAS,GAAGE,MAAM;IACpB,CAAC,MAAM,IAAIA,MAAM,CAACE,OAAO,EAAE;MACzBH,QAAQ,GAAGC,MAAM;IACnB,CAAC,MAAM;MACL,IAAIF,SAAS,IAAIC,QAAQ,EAAE;QAAE;QAC3BI,+BAA+B,CAACN,QAAQ,EAAEE,QAAQ,EAAED,SAAS,EAAEE,MAAM,CAAC;MACxE;MACAH,QAAQ,GAAGG,MAAM;MACjBF,SAAS,GAAG,IAAI;MAChBC,QAAQ,GAAG,IAAI;IACjB;EACF,CAAC,CAAC;EACF,IAAID,SAAS,IAAIC,QAAQ,EAAE;IACzBI,+BAA+B,CAACN,QAAQ,EAAEE,QAAQ,EAAED,SAAS,EAAE,IAAI,CAAC;EACtE;EACA,OAAOH,OAAO;AAChB,CAAC;AAEM,SAASS,SAASA,CAACC,MAAM,EAAEC,MAAM,EAAE1C,OAAO,EAAE;EACjD;EACA;EACA;EACA;EACA;EAAI;EAAA;EAAA;EAAAA,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAE2C,gBAAgB,KAAI,IAAI,IAAI,CAAC3C,OAAO,CAAC2C,gBAAgB,EAAE;IAClE,OAAOC,kBAAkB,CAACH,MAAM,EAAEC,MAAM,EAAE1C,OAAO,CAAC;EACpD;EAEA,OAAON,QAAQ,CAACmD,IAAI,CAACJ,MAAM,EAAEC,MAAM,EAAE1C,OAAO,CAAC;AAC/C;AAEA,SAASuC,+BAA+BA,CAACO,SAAS,EAAEX,QAAQ,EAAED,SAAS,EAAEa,OAAO,EAAE;EAChF;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;EACA;;EAEA;EACA;EACA;EACA;EACA;EACA;EACA,IAAIZ,QAAQ,IAAID,SAAS,EAAE;IACzB,IAAMc,WAAW,GAAGb,QAAQ,CAAC9B,KAAK,CAACY,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;IACnD,IAAMgC,WAAW,GAAGd,QAAQ,CAAC9B,KAAK,CAACY,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;IACnD,IAAMiC,WAAW,GAAGhB,SAAS,CAAC7B,KAAK,CAACY,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;IACpD,IAAMkC,WAAW,GAAGjB,SAAS,CAAC7B,KAAK,CAACY,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;IAEpD,IAAI6B,SAAS,EAAE;MACb,IAAMM,cAAc;MAAG;MAAA;MAAA;MAAAC;MAAAA;MAAAA;MAAAA;MAAAA;MAAAA,mBAAmB;MAAA;MAAA,CAACL,WAAW,EAAEE,WAAW,CAAC;MACpEJ,SAAS,CAACzC,KAAK;MAAG;MAAA;MAAA;MAAAiD;MAAAA;MAAAA;MAAAA;MAAAA;MAAAA,aAAa;MAAA;MAAA,CAACR,SAAS,CAACzC,KAAK,EAAE6C,WAAW,EAAEE,cAAc,CAAC;MAC7EjB,QAAQ,CAAC9B,KAAK;MAAG;MAAA;MAAA;MAAAkD;MAAAA;MAAAA;MAAAA;MAAAA;MAAAA,YAAY;MAAA;MAAA,CAACpB,QAAQ,CAAC9B,KAAK,EAAE+C,cAAc,CAAC;MAC7DlB,SAAS,CAAC7B,KAAK;MAAG;MAAA;MAAA;MAAAkD;MAAAA;MAAAA;MAAAA;MAAAA;MAAAA,YAAY;MAAA;MAAA,CAACrB,SAAS,CAAC7B,KAAK,EAAE+C,cAAc,CAAC;IACjE;IACA,IAAIL,OAAO,EAAE;MACX,IAAMS,cAAc;MAAG;MAAA;MAAA;MAAAC;MAAAA;MAAAA;MAAAA;MAAAA;MAAAA,mBAAmB;MAAA;MAAA,CAACR,WAAW,EAAEE,WAAW,CAAC;MACpEJ,OAAO,CAAC1C,KAAK;MAAG;MAAA;MAAA;MAAAqD;MAAAA;MAAAA;MAAAA;MAAAA;MAAAA,aAAa;MAAA;MAAA,CAACX,OAAO,CAAC1C,KAAK,EAAE8C,WAAW,EAAEK,cAAc,CAAC;MACzErB,QAAQ,CAAC9B,KAAK;MAAG;MAAA;MAAA;MAAAsD;MAAAA;MAAAA;MAAAA;MAAAA;MAAAA,YAAY;MAAA;MAAA,CAACxB,QAAQ,CAAC9B,KAAK,EAAEmD,cAAc,CAAC;MAC7DtB,SAAS,CAAC7B,KAAK;MAAG;MAAA;MAAA;MAAAsD;MAAAA;MAAAA;MAAAA;MAAAA;MAAAA,YAAY;MAAA;MAAA,CAACzB,SAAS,CAAC7B,KAAK,EAAEmD,cAAc,CAAC;IACjE;EACF,CAAC,MAAM,IAAItB,SAAS,EAAE;IACpB;IACA;IACA;IACA;IACA;IACA;IACA,IAAIY,SAAS,EAAE;MACbZ,SAAS,CAAC7B,KAAK,GAAG6B,SAAS,CAAC7B,KAAK,CAACwB,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC;IACvD;IACA,IAAIkB,OAAO,EAAE;MACXA,OAAO,CAAC1C,KAAK,GAAG0C,OAAO,CAAC1C,KAAK,CAACwB,OAAO,CAAC,MAAM,EAAE,EAAE,CAAC;IACnD;IACF;EACA,CAAC,MAAM,IAAIiB,SAAS,IAAIC,OAAO,EAAE;IAC/B,IAAMa,SAAS,GAAGb,OAAO,CAAC1C,KAAK,CAACY,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;MAC5C4C,UAAU,GAAG1B,QAAQ,CAAC9B,KAAK,CAACY,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;MAC5C6C,QAAQ,GAAG3B,QAAQ,CAAC9B,KAAK,CAACY,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;;IAE9C;IACA;IACA,IAAM8C,UAAU;IAAG;IAAA;IAAA;IAAAV;IAAAA;IAAAA;IAAAA;IAAAA;IAAAA,mBAAmB;IAAA;IAAA,CAACO,SAAS,EAAEC,UAAU,CAAC;IAC7D1B,QAAQ,CAAC9B,KAAK;IAAG;IAAA;IAAA;IAAAkD;IAAAA;IAAAA;IAAAA;IAAAA;IAAAA,YAAY;IAAA;IAAA,CAACpB,QAAQ,CAAC9B,KAAK,EAAE0D,UAAU,CAAC;;IAEzD;IACA;IACA;IACA,IAAMC,QAAQ;IAAG;IAAA;IAAA;IAAAP;IAAAA;IAAAA;IAAAA;IAAAA;IAAAA,mBAAmB;IAAA;IAAA;IAClC;IAAA;IAAA;IAAAF;IAAAA;IAAAA;IAAAA;IAAAA;IAAAA,YAAY;IAAA;IAAA,CAACK,SAAS,EAAEG,UAAU,CAAC,EACnCD,QACF,CAAC;IACD3B,QAAQ,CAAC9B,KAAK;IAAG;IAAA;IAAA;IAAAsD;IAAAA;IAAAA;IAAAA;IAAAA;IAAAA,YAAY;IAAA;IAAA,CAACxB,QAAQ,CAAC9B,KAAK,EAAE2D,QAAQ,CAAC;IACvDjB,OAAO,CAAC1C,KAAK;IAAG;IAAA;IAAA;IAAAqD;IAAAA;IAAAA;IAAAA;IAAAA;IAAAA,aAAa;IAAA;IAAA,CAACX,OAAO,CAAC1C,KAAK,EAAEuD,SAAS,EAAEI,QAAQ,CAAC;;IAEjE;IACA;IACAlB,SAAS,CAACzC,KAAK;IAAG;IAAA;IAAA;IAAAiD;IAAAA;IAAAA;IAAAA;IAAAA;IAAAA,aAAa;IAAA;IAAA,CAC7BR,SAAS,CAACzC,KAAK,EACfuD,SAAS,EACTA,SAAS,CAACK,KAAK,CAAC,CAAC,EAAEL,SAAS,CAACrD,MAAM,GAAGyD,QAAQ,CAACzD,MAAM,CACvD,CAAC;EACH,CAAC,MAAM,IAAIwC,OAAO,EAAE;IAClB;IACA;IACA;IACA,IAAMmB,eAAe,GAAGnB,OAAO,CAAC1C,KAAK,CAACY,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;IACtD,IAAMkD,gBAAgB,GAAGhC,QAAQ,CAAC9B,KAAK,CAACY,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;IACxD,IAAMmD,OAAO;IAAG;IAAA;IAAA;IAAAC;IAAAA;IAAAA;IAAAA;IAAAA;IAAAA,cAAc;IAAA;IAAA,CAACF,gBAAgB,EAAED,eAAe,CAAC;IACjE/B,QAAQ,CAAC9B,KAAK;IAAG;IAAA;IAAA;IAAAsD;IAAAA;IAAAA;IAAAA;IAAAA;IAAAA,YAAY;IAAA;IAAA,CAACxB,QAAQ,CAAC9B,KAAK,EAAE+D,OAAO,CAAC;EACxD,CAAC,MAAM,IAAItB,SAAS,EAAE;IACpB;IACA;IACA;IACA,IAAMwB,iBAAiB,GAAGxB,SAAS,CAACzC,KAAK,CAACY,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;IAC1D,IAAMsD,gBAAgB,GAAGpC,QAAQ,CAAC9B,KAAK,CAACY,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;IACxD,IAAMmD,QAAO;IAAG;IAAA;IAAA;IAAAC;IAAAA;IAAAA;IAAAA;IAAAA;IAAAA,cAAc;IAAA;IAAA,CAACC,iBAAiB,EAAEC,gBAAgB,CAAC;IACnEpC,QAAQ,CAAC9B,KAAK;IAAG;IAAA;IAAA;IAAAkD;IAAAA;IAAAA;IAAAA;IAAAA;IAAAA,YAAY;IAAA;IAAA,CAACpB,QAAQ,CAAC9B,KAAK,EAAE+D,QAAO,CAAC;EACxD;AACF;AAGO,IAAMI,iBAAiB;AAAA;AAAA7E,OAAA,CAAA6E,iBAAA;AAAA;AAAG;AAAI5E;AAAAA;AAAAA;AAAAA;AAAAA;AAAAA;AAAAA;AAAAA,CAAI,CAAC,CAAC;AAC3C4E,iBAAiB,CAACpE,QAAQ,GAAG,UAASC,KAAK,EAAE;EAC3C;EACA;EACA;EACA;EACA;EACA,IAAMoE,KAAK,GAAG,IAAIjF,MAAM;EAAA;EAAA,cAAAC,MAAA;EAAA;EAAeH,iBAAiB,yBAAAG,MAAA,CAAsBH,iBAAiB,QAAK,IAAI,CAAC;EACzG,OAAOe,KAAK,CAACY,KAAK,CAACwD,KAAK,CAAC,IAAI,EAAE;AACjC,CAAC;AACM,SAAS7B,kBAAkBA,CAACH,MAAM,EAAEC,MAAM,EAAE1C,OAAO,EAAE;EAC1D,OAAOwE,iBAAiB,CAAC3B,IAAI,CAACJ,MAAM,EAAEC,MAAM,EAAE1C,OAAO,CAAC;AACxD","ignoreList":[]}
diff --git a/node_modules/diff/lib/index.es6.js b/node_modules/diff/lib/index.es6.js
deleted file mode 100644
index 6e872723d8581..0000000000000
--- a/node_modules/diff/lib/index.es6.js
+++ /dev/null
@@ -1,2041 +0,0 @@
-function Diff() {}
-Diff.prototype = {
-  diff: function diff(oldString, newString) {
-    var _options$timeout;
-    var options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
-    var callback = options.callback;
-    if (typeof options === 'function') {
-      callback = options;
-      options = {};
-    }
-    var self = this;
-    function done(value) {
-      value = self.postProcess(value, options);
-      if (callback) {
-        setTimeout(function () {
-          callback(value);
-        }, 0);
-        return true;
-      } else {
-        return value;
-      }
-    }
-
-    // Allow subclasses to massage the input prior to running
-    oldString = this.castInput(oldString, options);
-    newString = this.castInput(newString, options);
-    oldString = this.removeEmpty(this.tokenize(oldString, options));
-    newString = this.removeEmpty(this.tokenize(newString, options));
-    var newLen = newString.length,
-      oldLen = oldString.length;
-    var editLength = 1;
-    var maxEditLength = newLen + oldLen;
-    if (options.maxEditLength != null) {
-      maxEditLength = Math.min(maxEditLength, options.maxEditLength);
-    }
-    var maxExecutionTime = (_options$timeout = options.timeout) !== null && _options$timeout !== void 0 ? _options$timeout : Infinity;
-    var abortAfterTimestamp = Date.now() + maxExecutionTime;
-    var bestPath = [{
-      oldPos: -1,
-      lastComponent: undefined
-    }];
-
-    // Seed editLength = 0, i.e. the content starts with the same values
-    var newPos = this.extractCommon(bestPath[0], newString, oldString, 0, options);
-    if (bestPath[0].oldPos + 1 >= oldLen && newPos + 1 >= newLen) {
-      // Identity per the equality and tokenizer
-      return done(buildValues(self, bestPath[0].lastComponent, newString, oldString, self.useLongestToken));
-    }
-
-    // Once we hit the right edge of the edit graph on some diagonal k, we can
-    // definitely reach the end of the edit graph in no more than k edits, so
-    // there's no point in considering any moves to diagonal k+1 any more (from
-    // which we're guaranteed to need at least k+1 more edits).
-    // Similarly, once we've reached the bottom of the edit graph, there's no
-    // point considering moves to lower diagonals.
-    // We record this fact by setting minDiagonalToConsider and
-    // maxDiagonalToConsider to some finite value once we've hit the edge of
-    // the edit graph.
-    // This optimization is not faithful to the original algorithm presented in
-    // Myers's paper, which instead pointlessly extends D-paths off the end of
-    // the edit graph - see page 7 of Myers's paper which notes this point
-    // explicitly and illustrates it with a diagram. This has major performance
-    // implications for some common scenarios. For instance, to compute a diff
-    // where the new text simply appends d characters on the end of the
-    // original text of length n, the true Myers algorithm will take O(n+d^2)
-    // time while this optimization needs only O(n+d) time.
-    var minDiagonalToConsider = -Infinity,
-      maxDiagonalToConsider = Infinity;
-
-    // Main worker method. checks all permutations of a given edit length for acceptance.
-    function execEditLength() {
-      for (var diagonalPath = Math.max(minDiagonalToConsider, -editLength); diagonalPath <= Math.min(maxDiagonalToConsider, editLength); diagonalPath += 2) {
-        var basePath = void 0;
-        var removePath = bestPath[diagonalPath - 1],
-          addPath = bestPath[diagonalPath + 1];
-        if (removePath) {
-          // No one else is going to attempt to use this value, clear it
-          bestPath[diagonalPath - 1] = undefined;
-        }
-        var canAdd = false;
-        if (addPath) {
-          // what newPos will be after we do an insertion:
-          var addPathNewPos = addPath.oldPos - diagonalPath;
-          canAdd = addPath && 0 <= addPathNewPos && addPathNewPos < newLen;
-        }
-        var canRemove = removePath && removePath.oldPos + 1 < oldLen;
-        if (!canAdd && !canRemove) {
-          // If this path is a terminal then prune
-          bestPath[diagonalPath] = undefined;
-          continue;
-        }
-
-        // Select the diagonal that we want to branch from. We select the prior
-        // path whose position in the old string is the farthest from the origin
-        // and does not pass the bounds of the diff graph
-        if (!canRemove || canAdd && removePath.oldPos < addPath.oldPos) {
-          basePath = self.addToPath(addPath, true, false, 0, options);
-        } else {
-          basePath = self.addToPath(removePath, false, true, 1, options);
-        }
-        newPos = self.extractCommon(basePath, newString, oldString, diagonalPath, options);
-        if (basePath.oldPos + 1 >= oldLen && newPos + 1 >= newLen) {
-          // If we have hit the end of both strings, then we are done
-          return done(buildValues(self, basePath.lastComponent, newString, oldString, self.useLongestToken));
-        } else {
-          bestPath[diagonalPath] = basePath;
-          if (basePath.oldPos + 1 >= oldLen) {
-            maxDiagonalToConsider = Math.min(maxDiagonalToConsider, diagonalPath - 1);
-          }
-          if (newPos + 1 >= newLen) {
-            minDiagonalToConsider = Math.max(minDiagonalToConsider, diagonalPath + 1);
-          }
-        }
-      }
-      editLength++;
-    }
-
-    // Performs the length of edit iteration. Is a bit fugly as this has to support the
-    // sync and async mode which is never fun. Loops over execEditLength until a value
-    // is produced, or until the edit length exceeds options.maxEditLength (if given),
-    // in which case it will return undefined.
-    if (callback) {
-      (function exec() {
-        setTimeout(function () {
-          if (editLength > maxEditLength || Date.now() > abortAfterTimestamp) {
-            return callback();
-          }
-          if (!execEditLength()) {
-            exec();
-          }
-        }, 0);
-      })();
-    } else {
-      while (editLength <= maxEditLength && Date.now() <= abortAfterTimestamp) {
-        var ret = execEditLength();
-        if (ret) {
-          return ret;
-        }
-      }
-    }
-  },
-  addToPath: function addToPath(path, added, removed, oldPosInc, options) {
-    var last = path.lastComponent;
-    if (last && !options.oneChangePerToken && last.added === added && last.removed === removed) {
-      return {
-        oldPos: path.oldPos + oldPosInc,
-        lastComponent: {
-          count: last.count + 1,
-          added: added,
-          removed: removed,
-          previousComponent: last.previousComponent
-        }
-      };
-    } else {
-      return {
-        oldPos: path.oldPos + oldPosInc,
-        lastComponent: {
-          count: 1,
-          added: added,
-          removed: removed,
-          previousComponent: last
-        }
-      };
-    }
-  },
-  extractCommon: function extractCommon(basePath, newString, oldString, diagonalPath, options) {
-    var newLen = newString.length,
-      oldLen = oldString.length,
-      oldPos = basePath.oldPos,
-      newPos = oldPos - diagonalPath,
-      commonCount = 0;
-    while (newPos + 1 < newLen && oldPos + 1 < oldLen && this.equals(oldString[oldPos + 1], newString[newPos + 1], options)) {
-      newPos++;
-      oldPos++;
-      commonCount++;
-      if (options.oneChangePerToken) {
-        basePath.lastComponent = {
-          count: 1,
-          previousComponent: basePath.lastComponent,
-          added: false,
-          removed: false
-        };
-      }
-    }
-    if (commonCount && !options.oneChangePerToken) {
-      basePath.lastComponent = {
-        count: commonCount,
-        previousComponent: basePath.lastComponent,
-        added: false,
-        removed: false
-      };
-    }
-    basePath.oldPos = oldPos;
-    return newPos;
-  },
-  equals: function equals(left, right, options) {
-    if (options.comparator) {
-      return options.comparator(left, right);
-    } else {
-      return left === right || options.ignoreCase && left.toLowerCase() === right.toLowerCase();
-    }
-  },
-  removeEmpty: function removeEmpty(array) {
-    var ret = [];
-    for (var i = 0; i < array.length; i++) {
-      if (array[i]) {
-        ret.push(array[i]);
-      }
-    }
-    return ret;
-  },
-  castInput: function castInput(value) {
-    return value;
-  },
-  tokenize: function tokenize(value) {
-    return Array.from(value);
-  },
-  join: function join(chars) {
-    return chars.join('');
-  },
-  postProcess: function postProcess(changeObjects) {
-    return changeObjects;
-  }
-};
-function buildValues(diff, lastComponent, newString, oldString, useLongestToken) {
-  // First we convert our linked list of components in reverse order to an
-  // array in the right order:
-  var components = [];
-  var nextComponent;
-  while (lastComponent) {
-    components.push(lastComponent);
-    nextComponent = lastComponent.previousComponent;
-    delete lastComponent.previousComponent;
-    lastComponent = nextComponent;
-  }
-  components.reverse();
-  var componentPos = 0,
-    componentLen = components.length,
-    newPos = 0,
-    oldPos = 0;
-  for (; componentPos < componentLen; componentPos++) {
-    var component = components[componentPos];
-    if (!component.removed) {
-      if (!component.added && useLongestToken) {
-        var value = newString.slice(newPos, newPos + component.count);
-        value = value.map(function (value, i) {
-          var oldValue = oldString[oldPos + i];
-          return oldValue.length > value.length ? oldValue : value;
-        });
-        component.value = diff.join(value);
-      } else {
-        component.value = diff.join(newString.slice(newPos, newPos + component.count));
-      }
-      newPos += component.count;
-
-      // Common case
-      if (!component.added) {
-        oldPos += component.count;
-      }
-    } else {
-      component.value = diff.join(oldString.slice(oldPos, oldPos + component.count));
-      oldPos += component.count;
-    }
-  }
-  return components;
-}
-
-var characterDiff = new Diff();
-function diffChars(oldStr, newStr, options) {
-  return characterDiff.diff(oldStr, newStr, options);
-}
-
-function longestCommonPrefix(str1, str2) {
-  var i;
-  for (i = 0; i < str1.length && i < str2.length; i++) {
-    if (str1[i] != str2[i]) {
-      return str1.slice(0, i);
-    }
-  }
-  return str1.slice(0, i);
-}
-function longestCommonSuffix(str1, str2) {
-  var i;
-
-  // Unlike longestCommonPrefix, we need a special case to handle all scenarios
-  // where we return the empty string since str1.slice(-0) will return the
-  // entire string.
-  if (!str1 || !str2 || str1[str1.length - 1] != str2[str2.length - 1]) {
-    return '';
-  }
-  for (i = 0; i < str1.length && i < str2.length; i++) {
-    if (str1[str1.length - (i + 1)] != str2[str2.length - (i + 1)]) {
-      return str1.slice(-i);
-    }
-  }
-  return str1.slice(-i);
-}
-function replacePrefix(string, oldPrefix, newPrefix) {
-  if (string.slice(0, oldPrefix.length) != oldPrefix) {
-    throw Error("string ".concat(JSON.stringify(string), " doesn't start with prefix ").concat(JSON.stringify(oldPrefix), "; this is a bug"));
-  }
-  return newPrefix + string.slice(oldPrefix.length);
-}
-function replaceSuffix(string, oldSuffix, newSuffix) {
-  if (!oldSuffix) {
-    return string + newSuffix;
-  }
-  if (string.slice(-oldSuffix.length) != oldSuffix) {
-    throw Error("string ".concat(JSON.stringify(string), " doesn't end with suffix ").concat(JSON.stringify(oldSuffix), "; this is a bug"));
-  }
-  return string.slice(0, -oldSuffix.length) + newSuffix;
-}
-function removePrefix(string, oldPrefix) {
-  return replacePrefix(string, oldPrefix, '');
-}
-function removeSuffix(string, oldSuffix) {
-  return replaceSuffix(string, oldSuffix, '');
-}
-function maximumOverlap(string1, string2) {
-  return string2.slice(0, overlapCount(string1, string2));
-}
-
-// Nicked from https://stackoverflow.com/a/60422853/1709587
-function overlapCount(a, b) {
-  // Deal with cases where the strings differ in length
-  var startA = 0;
-  if (a.length > b.length) {
-    startA = a.length - b.length;
-  }
-  var endB = b.length;
-  if (a.length < b.length) {
-    endB = a.length;
-  }
-  // Create a back-reference for each index
-  //   that should be followed in case of a mismatch.
-  //   We only need B to make these references:
-  var map = Array(endB);
-  var k = 0; // Index that lags behind j
-  map[0] = 0;
-  for (var j = 1; j < endB; j++) {
-    if (b[j] == b[k]) {
-      map[j] = map[k]; // skip over the same character (optional optimisation)
-    } else {
-      map[j] = k;
-    }
-    while (k > 0 && b[j] != b[k]) {
-      k = map[k];
-    }
-    if (b[j] == b[k]) {
-      k++;
-    }
-  }
-  // Phase 2: use these references while iterating over A
-  k = 0;
-  for (var i = startA; i < a.length; i++) {
-    while (k > 0 && a[i] != b[k]) {
-      k = map[k];
-    }
-    if (a[i] == b[k]) {
-      k++;
-    }
-  }
-  return k;
-}
-
-/**
- * Returns true if the string consistently uses Windows line endings.
- */
-function hasOnlyWinLineEndings(string) {
-  return string.includes('\r\n') && !string.startsWith('\n') && !string.match(/[^\r]\n/);
-}
-
-/**
- * Returns true if the string consistently uses Unix line endings.
- */
-function hasOnlyUnixLineEndings(string) {
-  return !string.includes('\r\n') && string.includes('\n');
-}
-
-// Based on https://en.wikipedia.org/wiki/Latin_script_in_Unicode
-//
-// Ranges and exceptions:
-// Latin-1 Supplement, 0080–00FF
-//  - U+00D7  × Multiplication sign
-//  - U+00F7  ÷ Division sign
-// Latin Extended-A, 0100–017F
-// Latin Extended-B, 0180–024F
-// IPA Extensions, 0250–02AF
-// Spacing Modifier Letters, 02B0–02FF
-//  - U+02C7  ˇ ˇ  Caron
-//  - U+02D8  ˘ ˘  Breve
-//  - U+02D9  ˙ ˙  Dot Above
-//  - U+02DA  ˚ ˚  Ring Above
-//  - U+02DB  ˛ ˛  Ogonek
-//  - U+02DC  ˜ ˜  Small Tilde
-//  - U+02DD  ˝ ˝  Double Acute Accent
-// Latin Extended Additional, 1E00–1EFF
-var extendedWordChars = "a-zA-Z0-9_\\u{C0}-\\u{FF}\\u{D8}-\\u{F6}\\u{F8}-\\u{2C6}\\u{2C8}-\\u{2D7}\\u{2DE}-\\u{2FF}\\u{1E00}-\\u{1EFF}";
-
-// Each token is one of the following:
-// - A punctuation mark plus the surrounding whitespace
-// - A word plus the surrounding whitespace
-// - Pure whitespace (but only in the special case where this the entire text
-//   is just whitespace)
-//
-// We have to include surrounding whitespace in the tokens because the two
-// alternative approaches produce horribly broken results:
-// * If we just discard the whitespace, we can't fully reproduce the original
-//   text from the sequence of tokens and any attempt to render the diff will
-//   get the whitespace wrong.
-// * If we have separate tokens for whitespace, then in a typical text every
-//   second token will be a single space character. But this often results in
-//   the optimal diff between two texts being a perverse one that preserves
-//   the spaces between words but deletes and reinserts actual common words.
-//   See https://github.com/kpdecker/jsdiff/issues/160#issuecomment-1866099640
-//   for an example.
-//
-// Keeping the surrounding whitespace of course has implications for .equals
-// and .join, not just .tokenize.
-
-// This regex does NOT fully implement the tokenization rules described above.
-// Instead, it gives runs of whitespace their own "token". The tokenize method
-// then handles stitching whitespace tokens onto adjacent word or punctuation
-// tokens.
-var tokenizeIncludingWhitespace = new RegExp("[".concat(extendedWordChars, "]+|\\s+|[^").concat(extendedWordChars, "]"), 'ug');
-var wordDiff = new Diff();
-wordDiff.equals = function (left, right, options) {
-  if (options.ignoreCase) {
-    left = left.toLowerCase();
-    right = right.toLowerCase();
-  }
-  return left.trim() === right.trim();
-};
-wordDiff.tokenize = function (value) {
-  var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
-  var parts;
-  if (options.intlSegmenter) {
-    if (options.intlSegmenter.resolvedOptions().granularity != 'word') {
-      throw new Error('The segmenter passed must have a granularity of "word"');
-    }
-    parts = Array.from(options.intlSegmenter.segment(value), function (segment) {
-      return segment.segment;
-    });
-  } else {
-    parts = value.match(tokenizeIncludingWhitespace) || [];
-  }
-  var tokens = [];
-  var prevPart = null;
-  parts.forEach(function (part) {
-    if (/\s/.test(part)) {
-      if (prevPart == null) {
-        tokens.push(part);
-      } else {
-        tokens.push(tokens.pop() + part);
-      }
-    } else if (/\s/.test(prevPart)) {
-      if (tokens[tokens.length - 1] == prevPart) {
-        tokens.push(tokens.pop() + part);
-      } else {
-        tokens.push(prevPart + part);
-      }
-    } else {
-      tokens.push(part);
-    }
-    prevPart = part;
-  });
-  return tokens;
-};
-wordDiff.join = function (tokens) {
-  // Tokens being joined here will always have appeared consecutively in the
-  // same text, so we can simply strip off the leading whitespace from all the
-  // tokens except the first (and except any whitespace-only tokens - but such
-  // a token will always be the first and only token anyway) and then join them
-  // and the whitespace around words and punctuation will end up correct.
-  return tokens.map(function (token, i) {
-    if (i == 0) {
-      return token;
-    } else {
-      return token.replace(/^\s+/, '');
-    }
-  }).join('');
-};
-wordDiff.postProcess = function (changes, options) {
-  if (!changes || options.oneChangePerToken) {
-    return changes;
-  }
-  var lastKeep = null;
-  // Change objects representing any insertion or deletion since the last
-  // "keep" change object. There can be at most one of each.
-  var insertion = null;
-  var deletion = null;
-  changes.forEach(function (change) {
-    if (change.added) {
-      insertion = change;
-    } else if (change.removed) {
-      deletion = change;
-    } else {
-      if (insertion || deletion) {
-        // May be false at start of text
-        dedupeWhitespaceInChangeObjects(lastKeep, deletion, insertion, change);
-      }
-      lastKeep = change;
-      insertion = null;
-      deletion = null;
-    }
-  });
-  if (insertion || deletion) {
-    dedupeWhitespaceInChangeObjects(lastKeep, deletion, insertion, null);
-  }
-  return changes;
-};
-function diffWords(oldStr, newStr, options) {
-  // This option has never been documented and never will be (it's clearer to
-  // just call `diffWordsWithSpace` directly if you need that behavior), but
-  // has existed in jsdiff for a long time, so we retain support for it here
-  // for the sake of backwards compatibility.
-  if ((options === null || options === void 0 ? void 0 : options.ignoreWhitespace) != null && !options.ignoreWhitespace) {
-    return diffWordsWithSpace(oldStr, newStr, options);
-  }
-  return wordDiff.diff(oldStr, newStr, options);
-}
-function dedupeWhitespaceInChangeObjects(startKeep, deletion, insertion, endKeep) {
-  // Before returning, we tidy up the leading and trailing whitespace of the
-  // change objects to eliminate cases where trailing whitespace in one object
-  // is repeated as leading whitespace in the next.
-  // Below are examples of the outcomes we want here to explain the code.
-  // I=insert, K=keep, D=delete
-  // 1. diffing 'foo bar baz' vs 'foo baz'
-  //    Prior to cleanup, we have K:'foo ' D:' bar ' K:' baz'
-  //    After cleanup, we want:   K:'foo ' D:'bar ' K:'baz'
-  //
-  // 2. Diffing 'foo bar baz' vs 'foo qux baz'
-  //    Prior to cleanup, we have K:'foo ' D:' bar ' I:' qux ' K:' baz'
-  //    After cleanup, we want K:'foo ' D:'bar' I:'qux' K:' baz'
-  //
-  // 3. Diffing 'foo\nbar baz' vs 'foo baz'
-  //    Prior to cleanup, we have K:'foo ' D:'\nbar ' K:' baz'
-  //    After cleanup, we want K'foo' D:'\nbar' K:' baz'
-  //
-  // 4. Diffing 'foo baz' vs 'foo\nbar baz'
-  //    Prior to cleanup, we have K:'foo\n' I:'\nbar ' K:' baz'
-  //    After cleanup, we ideally want K'foo' I:'\nbar' K:' baz'
-  //    but don't actually manage this currently (the pre-cleanup change
-  //    objects don't contain enough information to make it possible).
-  //
-  // 5. Diffing 'foo   bar baz' vs 'foo  baz'
-  //    Prior to cleanup, we have K:'foo  ' D:'   bar ' K:'  baz'
-  //    After cleanup, we want K:'foo  ' D:' bar ' K:'baz'
-  //
-  // Our handling is unavoidably imperfect in the case where there's a single
-  // indel between keeps and the whitespace has changed. For instance, consider
-  // diffing 'foo\tbar\nbaz' vs 'foo baz'. Unless we create an extra change
-  // object to represent the insertion of the space character (which isn't even
-  // a token), we have no way to avoid losing information about the texts'
-  // original whitespace in the result we return. Still, we do our best to
-  // output something that will look sensible if we e.g. print it with
-  // insertions in green and deletions in red.
-
-  // Between two "keep" change objects (or before the first or after the last
-  // change object), we can have either:
-  // * A "delete" followed by an "insert"
-  // * Just an "insert"
-  // * Just a "delete"
-  // We handle the three cases separately.
-  if (deletion && insertion) {
-    var oldWsPrefix = deletion.value.match(/^\s*/)[0];
-    var oldWsSuffix = deletion.value.match(/\s*$/)[0];
-    var newWsPrefix = insertion.value.match(/^\s*/)[0];
-    var newWsSuffix = insertion.value.match(/\s*$/)[0];
-    if (startKeep) {
-      var commonWsPrefix = longestCommonPrefix(oldWsPrefix, newWsPrefix);
-      startKeep.value = replaceSuffix(startKeep.value, newWsPrefix, commonWsPrefix);
-      deletion.value = removePrefix(deletion.value, commonWsPrefix);
-      insertion.value = removePrefix(insertion.value, commonWsPrefix);
-    }
-    if (endKeep) {
-      var commonWsSuffix = longestCommonSuffix(oldWsSuffix, newWsSuffix);
-      endKeep.value = replacePrefix(endKeep.value, newWsSuffix, commonWsSuffix);
-      deletion.value = removeSuffix(deletion.value, commonWsSuffix);
-      insertion.value = removeSuffix(insertion.value, commonWsSuffix);
-    }
-  } else if (insertion) {
-    // The whitespaces all reflect what was in the new text rather than
-    // the old, so we essentially have no information about whitespace
-    // insertion or deletion. We just want to dedupe the whitespace.
-    // We do that by having each change object keep its trailing
-    // whitespace and deleting duplicate leading whitespace where
-    // present.
-    if (startKeep) {
-      insertion.value = insertion.value.replace(/^\s*/, '');
-    }
-    if (endKeep) {
-      endKeep.value = endKeep.value.replace(/^\s*/, '');
-    }
-    // otherwise we've got a deletion and no insertion
-  } else if (startKeep && endKeep) {
-    var newWsFull = endKeep.value.match(/^\s*/)[0],
-      delWsStart = deletion.value.match(/^\s*/)[0],
-      delWsEnd = deletion.value.match(/\s*$/)[0];
-
-    // Any whitespace that comes straight after startKeep in both the old and
-    // new texts, assign to startKeep and remove from the deletion.
-    var newWsStart = longestCommonPrefix(newWsFull, delWsStart);
-    deletion.value = removePrefix(deletion.value, newWsStart);
-
-    // Any whitespace that comes straight before endKeep in both the old and
-    // new texts, and hasn't already been assigned to startKeep, assign to
-    // endKeep and remove from the deletion.
-    var newWsEnd = longestCommonSuffix(removePrefix(newWsFull, newWsStart), delWsEnd);
-    deletion.value = removeSuffix(deletion.value, newWsEnd);
-    endKeep.value = replacePrefix(endKeep.value, newWsFull, newWsEnd);
-
-    // If there's any whitespace from the new text that HASN'T already been
-    // assigned, assign it to the start:
-    startKeep.value = replaceSuffix(startKeep.value, newWsFull, newWsFull.slice(0, newWsFull.length - newWsEnd.length));
-  } else if (endKeep) {
-    // We are at the start of the text. Preserve all the whitespace on
-    // endKeep, and just remove whitespace from the end of deletion to the
-    // extent that it overlaps with the start of endKeep.
-    var endKeepWsPrefix = endKeep.value.match(/^\s*/)[0];
-    var deletionWsSuffix = deletion.value.match(/\s*$/)[0];
-    var overlap = maximumOverlap(deletionWsSuffix, endKeepWsPrefix);
-    deletion.value = removeSuffix(deletion.value, overlap);
-  } else if (startKeep) {
-    // We are at the END of the text. Preserve all the whitespace on
-    // startKeep, and just remove whitespace from the start of deletion to
-    // the extent that it overlaps with the end of startKeep.
-    var startKeepWsSuffix = startKeep.value.match(/\s*$/)[0];
-    var deletionWsPrefix = deletion.value.match(/^\s*/)[0];
-    var _overlap = maximumOverlap(startKeepWsSuffix, deletionWsPrefix);
-    deletion.value = removePrefix(deletion.value, _overlap);
-  }
-}
-var wordWithSpaceDiff = new Diff();
-wordWithSpaceDiff.tokenize = function (value) {
-  // Slightly different to the tokenizeIncludingWhitespace regex used above in
-  // that this one treats each individual newline as a distinct tokens, rather
-  // than merging them into other surrounding whitespace. This was requested
-  // in https://github.com/kpdecker/jsdiff/issues/180 &
-  //    https://github.com/kpdecker/jsdiff/issues/211
-  var regex = new RegExp("(\\r?\\n)|[".concat(extendedWordChars, "]+|[^\\S\\n\\r]+|[^").concat(extendedWordChars, "]"), 'ug');
-  return value.match(regex) || [];
-};
-function diffWordsWithSpace(oldStr, newStr, options) {
-  return wordWithSpaceDiff.diff(oldStr, newStr, options);
-}
-
-function generateOptions(options, defaults) {
-  if (typeof options === 'function') {
-    defaults.callback = options;
-  } else if (options) {
-    for (var name in options) {
-      /* istanbul ignore else */
-      if (options.hasOwnProperty(name)) {
-        defaults[name] = options[name];
-      }
-    }
-  }
-  return defaults;
-}
-
-var lineDiff = new Diff();
-lineDiff.tokenize = function (value, options) {
-  if (options.stripTrailingCr) {
-    // remove one \r before \n to match GNU diff's --strip-trailing-cr behavior
-    value = value.replace(/\r\n/g, '\n');
-  }
-  var retLines = [],
-    linesAndNewlines = value.split(/(\n|\r\n)/);
-
-  // Ignore the final empty token that occurs if the string ends with a new line
-  if (!linesAndNewlines[linesAndNewlines.length - 1]) {
-    linesAndNewlines.pop();
-  }
-
-  // Merge the content and line separators into single tokens
-  for (var i = 0; i < linesAndNewlines.length; i++) {
-    var line = linesAndNewlines[i];
-    if (i % 2 && !options.newlineIsToken) {
-      retLines[retLines.length - 1] += line;
-    } else {
-      retLines.push(line);
-    }
-  }
-  return retLines;
-};
-lineDiff.equals = function (left, right, options) {
-  // If we're ignoring whitespace, we need to normalise lines by stripping
-  // whitespace before checking equality. (This has an annoying interaction
-  // with newlineIsToken that requires special handling: if newlines get their
-  // own token, then we DON'T want to trim the *newline* tokens down to empty
-  // strings, since this would cause us to treat whitespace-only line content
-  // as equal to a separator between lines, which would be weird and
-  // inconsistent with the documented behavior of the options.)
-  if (options.ignoreWhitespace) {
-    if (!options.newlineIsToken || !left.includes('\n')) {
-      left = left.trim();
-    }
-    if (!options.newlineIsToken || !right.includes('\n')) {
-      right = right.trim();
-    }
-  } else if (options.ignoreNewlineAtEof && !options.newlineIsToken) {
-    if (left.endsWith('\n')) {
-      left = left.slice(0, -1);
-    }
-    if (right.endsWith('\n')) {
-      right = right.slice(0, -1);
-    }
-  }
-  return Diff.prototype.equals.call(this, left, right, options);
-};
-function diffLines(oldStr, newStr, callback) {
-  return lineDiff.diff(oldStr, newStr, callback);
-}
-
-// Kept for backwards compatibility. This is a rather arbitrary wrapper method
-// that just calls `diffLines` with `ignoreWhitespace: true`. It's confusing to
-// have two ways to do exactly the same thing in the API, so we no longer
-// document this one (library users should explicitly use `diffLines` with
-// `ignoreWhitespace: true` instead) but we keep it around to maintain
-// compatibility with code that used old versions.
-function diffTrimmedLines(oldStr, newStr, callback) {
-  var options = generateOptions(callback, {
-    ignoreWhitespace: true
-  });
-  return lineDiff.diff(oldStr, newStr, options);
-}
-
-var sentenceDiff = new Diff();
-sentenceDiff.tokenize = function (value) {
-  return value.split(/(\S.+?[.!?])(?=\s+|$)/);
-};
-function diffSentences(oldStr, newStr, callback) {
-  return sentenceDiff.diff(oldStr, newStr, callback);
-}
-
-var cssDiff = new Diff();
-cssDiff.tokenize = function (value) {
-  return value.split(/([{}:;,]|\s+)/);
-};
-function diffCss(oldStr, newStr, callback) {
-  return cssDiff.diff(oldStr, newStr, callback);
-}
-
-function ownKeys(e, r) {
-  var t = Object.keys(e);
-  if (Object.getOwnPropertySymbols) {
-    var o = Object.getOwnPropertySymbols(e);
-    r && (o = o.filter(function (r) {
-      return Object.getOwnPropertyDescriptor(e, r).enumerable;
-    })), t.push.apply(t, o);
-  }
-  return t;
-}
-function _objectSpread2(e) {
-  for (var r = 1; r < arguments.length; r++) {
-    var t = null != arguments[r] ? arguments[r] : {};
-    r % 2 ? ownKeys(Object(t), !0).forEach(function (r) {
-      _defineProperty(e, r, t[r]);
-    }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(e, Object.getOwnPropertyDescriptors(t)) : ownKeys(Object(t)).forEach(function (r) {
-      Object.defineProperty(e, r, Object.getOwnPropertyDescriptor(t, r));
-    });
-  }
-  return e;
-}
-function _toPrimitive(t, r) {
-  if ("object" != typeof t || !t) return t;
-  var e = t[Symbol.toPrimitive];
-  if (void 0 !== e) {
-    var i = e.call(t, r || "default");
-    if ("object" != typeof i) return i;
-    throw new TypeError("@@toPrimitive must return a primitive value.");
-  }
-  return ("string" === r ? String : Number)(t);
-}
-function _toPropertyKey(t) {
-  var i = _toPrimitive(t, "string");
-  return "symbol" == typeof i ? i : i + "";
-}
-function _typeof(o) {
-  "@babel/helpers - typeof";
-
-  return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (o) {
-    return typeof o;
-  } : function (o) {
-    return o && "function" == typeof Symbol && o.constructor === Symbol && o !== Symbol.prototype ? "symbol" : typeof o;
-  }, _typeof(o);
-}
-function _defineProperty(obj, key, value) {
-  key = _toPropertyKey(key);
-  if (key in obj) {
-    Object.defineProperty(obj, key, {
-      value: value,
-      enumerable: true,
-      configurable: true,
-      writable: true
-    });
-  } else {
-    obj[key] = value;
-  }
-  return obj;
-}
-function _toConsumableArray(arr) {
-  return _arrayWithoutHoles(arr) || _iterableToArray(arr) || _unsupportedIterableToArray(arr) || _nonIterableSpread();
-}
-function _arrayWithoutHoles(arr) {
-  if (Array.isArray(arr)) return _arrayLikeToArray(arr);
-}
-function _iterableToArray(iter) {
-  if (typeof Symbol !== "undefined" && iter[Symbol.iterator] != null || iter["@@iterator"] != null) return Array.from(iter);
-}
-function _unsupportedIterableToArray(o, minLen) {
-  if (!o) return;
-  if (typeof o === "string") return _arrayLikeToArray(o, minLen);
-  var n = Object.prototype.toString.call(o).slice(8, -1);
-  if (n === "Object" && o.constructor) n = o.constructor.name;
-  if (n === "Map" || n === "Set") return Array.from(o);
-  if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen);
-}
-function _arrayLikeToArray(arr, len) {
-  if (len == null || len > arr.length) len = arr.length;
-  for (var i = 0, arr2 = new Array(len); i < len; i++) arr2[i] = arr[i];
-  return arr2;
-}
-function _nonIterableSpread() {
-  throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.");
-}
-
-var jsonDiff = new Diff();
-// Discriminate between two lines of pretty-printed, serialized JSON where one of them has a
-// dangling comma and the other doesn't. Turns out including the dangling comma yields the nicest output:
-jsonDiff.useLongestToken = true;
-jsonDiff.tokenize = lineDiff.tokenize;
-jsonDiff.castInput = function (value, options) {
-  var undefinedReplacement = options.undefinedReplacement,
-    _options$stringifyRep = options.stringifyReplacer,
-    stringifyReplacer = _options$stringifyRep === void 0 ? function (k, v) {
-      return typeof v === 'undefined' ? undefinedReplacement : v;
-    } : _options$stringifyRep;
-  return typeof value === 'string' ? value : JSON.stringify(canonicalize(value, null, null, stringifyReplacer), stringifyReplacer, '  ');
-};
-jsonDiff.equals = function (left, right, options) {
-  return Diff.prototype.equals.call(jsonDiff, left.replace(/,([\r\n])/g, '$1'), right.replace(/,([\r\n])/g, '$1'), options);
-};
-function diffJson(oldObj, newObj, options) {
-  return jsonDiff.diff(oldObj, newObj, options);
-}
-
-// This function handles the presence of circular references by bailing out when encountering an
-// object that is already on the "stack" of items being processed. Accepts an optional replacer
-function canonicalize(obj, stack, replacementStack, replacer, key) {
-  stack = stack || [];
-  replacementStack = replacementStack || [];
-  if (replacer) {
-    obj = replacer(key, obj);
-  }
-  var i;
-  for (i = 0; i < stack.length; i += 1) {
-    if (stack[i] === obj) {
-      return replacementStack[i];
-    }
-  }
-  var canonicalizedObj;
-  if ('[object Array]' === Object.prototype.toString.call(obj)) {
-    stack.push(obj);
-    canonicalizedObj = new Array(obj.length);
-    replacementStack.push(canonicalizedObj);
-    for (i = 0; i < obj.length; i += 1) {
-      canonicalizedObj[i] = canonicalize(obj[i], stack, replacementStack, replacer, key);
-    }
-    stack.pop();
-    replacementStack.pop();
-    return canonicalizedObj;
-  }
-  if (obj && obj.toJSON) {
-    obj = obj.toJSON();
-  }
-  if (_typeof(obj) === 'object' && obj !== null) {
-    stack.push(obj);
-    canonicalizedObj = {};
-    replacementStack.push(canonicalizedObj);
-    var sortedKeys = [],
-      _key;
-    for (_key in obj) {
-      /* istanbul ignore else */
-      if (Object.prototype.hasOwnProperty.call(obj, _key)) {
-        sortedKeys.push(_key);
-      }
-    }
-    sortedKeys.sort();
-    for (i = 0; i < sortedKeys.length; i += 1) {
-      _key = sortedKeys[i];
-      canonicalizedObj[_key] = canonicalize(obj[_key], stack, replacementStack, replacer, _key);
-    }
-    stack.pop();
-    replacementStack.pop();
-  } else {
-    canonicalizedObj = obj;
-  }
-  return canonicalizedObj;
-}
-
-var arrayDiff = new Diff();
-arrayDiff.tokenize = function (value) {
-  return value.slice();
-};
-arrayDiff.join = arrayDiff.removeEmpty = function (value) {
-  return value;
-};
-function diffArrays(oldArr, newArr, callback) {
-  return arrayDiff.diff(oldArr, newArr, callback);
-}
-
-function unixToWin(patch) {
-  if (Array.isArray(patch)) {
-    return patch.map(unixToWin);
-  }
-  return _objectSpread2(_objectSpread2({}, patch), {}, {
-    hunks: patch.hunks.map(function (hunk) {
-      return _objectSpread2(_objectSpread2({}, hunk), {}, {
-        lines: hunk.lines.map(function (line, i) {
-          var _hunk$lines;
-          return line.startsWith('\\') || line.endsWith('\r') || (_hunk$lines = hunk.lines[i + 1]) !== null && _hunk$lines !== void 0 && _hunk$lines.startsWith('\\') ? line : line + '\r';
-        })
-      });
-    })
-  });
-}
-function winToUnix(patch) {
-  if (Array.isArray(patch)) {
-    return patch.map(winToUnix);
-  }
-  return _objectSpread2(_objectSpread2({}, patch), {}, {
-    hunks: patch.hunks.map(function (hunk) {
-      return _objectSpread2(_objectSpread2({}, hunk), {}, {
-        lines: hunk.lines.map(function (line) {
-          return line.endsWith('\r') ? line.substring(0, line.length - 1) : line;
-        })
-      });
-    })
-  });
-}
-
-/**
- * Returns true if the patch consistently uses Unix line endings (or only involves one line and has
- * no line endings).
- */
-function isUnix(patch) {
-  if (!Array.isArray(patch)) {
-    patch = [patch];
-  }
-  return !patch.some(function (index) {
-    return index.hunks.some(function (hunk) {
-      return hunk.lines.some(function (line) {
-        return !line.startsWith('\\') && line.endsWith('\r');
-      });
-    });
-  });
-}
-
-/**
- * Returns true if the patch uses Windows line endings and only Windows line endings.
- */
-function isWin(patch) {
-  if (!Array.isArray(patch)) {
-    patch = [patch];
-  }
-  return patch.some(function (index) {
-    return index.hunks.some(function (hunk) {
-      return hunk.lines.some(function (line) {
-        return line.endsWith('\r');
-      });
-    });
-  }) && patch.every(function (index) {
-    return index.hunks.every(function (hunk) {
-      return hunk.lines.every(function (line, i) {
-        var _hunk$lines2;
-        return line.startsWith('\\') || line.endsWith('\r') || ((_hunk$lines2 = hunk.lines[i + 1]) === null || _hunk$lines2 === void 0 ? void 0 : _hunk$lines2.startsWith('\\'));
-      });
-    });
-  });
-}
-
-function parsePatch(uniDiff) {
-  var diffstr = uniDiff.split(/\n/),
-    list = [],
-    i = 0;
-  function parseIndex() {
-    var index = {};
-    list.push(index);
-
-    // Parse diff metadata
-    while (i < diffstr.length) {
-      var line = diffstr[i];
-
-      // File header found, end parsing diff metadata
-      if (/^(\-\-\-|\+\+\+|@@)\s/.test(line)) {
-        break;
-      }
-
-      // Diff index
-      var header = /^(?:Index:|diff(?: -r \w+)+)\s+(.+?)\s*$/.exec(line);
-      if (header) {
-        index.index = header[1];
-      }
-      i++;
-    }
-
-    // Parse file headers if they are defined. Unified diff requires them, but
-    // there's no technical issues to have an isolated hunk without file header
-    parseFileHeader(index);
-    parseFileHeader(index);
-
-    // Parse hunks
-    index.hunks = [];
-    while (i < diffstr.length) {
-      var _line = diffstr[i];
-      if (/^(Index:\s|diff\s|\-\-\-\s|\+\+\+\s|===================================================================)/.test(_line)) {
-        break;
-      } else if (/^@@/.test(_line)) {
-        index.hunks.push(parseHunk());
-      } else if (_line) {
-        throw new Error('Unknown line ' + (i + 1) + ' ' + JSON.stringify(_line));
-      } else {
-        i++;
-      }
-    }
-  }
-
-  // Parses the --- and +++ headers, if none are found, no lines
-  // are consumed.
-  function parseFileHeader(index) {
-    var fileHeader = /^(---|\+\+\+)\s+(.*)\r?$/.exec(diffstr[i]);
-    if (fileHeader) {
-      var keyPrefix = fileHeader[1] === '---' ? 'old' : 'new';
-      var data = fileHeader[2].split('\t', 2);
-      var fileName = data[0].replace(/\\\\/g, '\\');
-      if (/^".*"$/.test(fileName)) {
-        fileName = fileName.substr(1, fileName.length - 2);
-      }
-      index[keyPrefix + 'FileName'] = fileName;
-      index[keyPrefix + 'Header'] = (data[1] || '').trim();
-      i++;
-    }
-  }
-
-  // Parses a hunk
-  // This assumes that we are at the start of a hunk.
-  function parseHunk() {
-    var chunkHeaderIndex = i,
-      chunkHeaderLine = diffstr[i++],
-      chunkHeader = chunkHeaderLine.split(/@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@/);
-    var hunk = {
-      oldStart: +chunkHeader[1],
-      oldLines: typeof chunkHeader[2] === 'undefined' ? 1 : +chunkHeader[2],
-      newStart: +chunkHeader[3],
-      newLines: typeof chunkHeader[4] === 'undefined' ? 1 : +chunkHeader[4],
-      lines: []
-    };
-
-    // Unified Diff Format quirk: If the chunk size is 0,
-    // the first number is one lower than one would expect.
-    // https://www.artima.com/weblogs/viewpost.jsp?thread=164293
-    if (hunk.oldLines === 0) {
-      hunk.oldStart += 1;
-    }
-    if (hunk.newLines === 0) {
-      hunk.newStart += 1;
-    }
-    var addCount = 0,
-      removeCount = 0;
-    for (; i < diffstr.length && (removeCount < hunk.oldLines || addCount < hunk.newLines || (_diffstr$i = diffstr[i]) !== null && _diffstr$i !== void 0 && _diffstr$i.startsWith('\\')); i++) {
-      var _diffstr$i;
-      var operation = diffstr[i].length == 0 && i != diffstr.length - 1 ? ' ' : diffstr[i][0];
-      if (operation === '+' || operation === '-' || operation === ' ' || operation === '\\') {
-        hunk.lines.push(diffstr[i]);
-        if (operation === '+') {
-          addCount++;
-        } else if (operation === '-') {
-          removeCount++;
-        } else if (operation === ' ') {
-          addCount++;
-          removeCount++;
-        }
-      } else {
-        throw new Error("Hunk at line ".concat(chunkHeaderIndex + 1, " contained invalid line ").concat(diffstr[i]));
-      }
-    }
-
-    // Handle the empty block count case
-    if (!addCount && hunk.newLines === 1) {
-      hunk.newLines = 0;
-    }
-    if (!removeCount && hunk.oldLines === 1) {
-      hunk.oldLines = 0;
-    }
-
-    // Perform sanity checking
-    if (addCount !== hunk.newLines) {
-      throw new Error('Added line count did not match for hunk at line ' + (chunkHeaderIndex + 1));
-    }
-    if (removeCount !== hunk.oldLines) {
-      throw new Error('Removed line count did not match for hunk at line ' + (chunkHeaderIndex + 1));
-    }
-    return hunk;
-  }
-  while (i < diffstr.length) {
-    parseIndex();
-  }
-  return list;
-}
-
-// Iterator that traverses in the range of [min, max], stepping
-// by distance from a given start position. I.e. for [0, 4], with
-// start of 2, this will iterate 2, 3, 1, 4, 0.
-function distanceIterator (start, minLine, maxLine) {
-  var wantForward = true,
-    backwardExhausted = false,
-    forwardExhausted = false,
-    localOffset = 1;
-  return function iterator() {
-    if (wantForward && !forwardExhausted) {
-      if (backwardExhausted) {
-        localOffset++;
-      } else {
-        wantForward = false;
-      }
-
-      // Check if trying to fit beyond text length, and if not, check it fits
-      // after offset location (or desired location on first iteration)
-      if (start + localOffset <= maxLine) {
-        return start + localOffset;
-      }
-      forwardExhausted = true;
-    }
-    if (!backwardExhausted) {
-      if (!forwardExhausted) {
-        wantForward = true;
-      }
-
-      // Check if trying to fit before text beginning, and if not, check it fits
-      // before offset location
-      if (minLine <= start - localOffset) {
-        return start - localOffset++;
-      }
-      backwardExhausted = true;
-      return iterator();
-    }
-
-    // We tried to fit hunk before text beginning and beyond text length, then
-    // hunk can't fit on the text. Return undefined
-  };
-}
-
-function applyPatch(source, uniDiff) {
-  var options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
-  if (typeof uniDiff === 'string') {
-    uniDiff = parsePatch(uniDiff);
-  }
-  if (Array.isArray(uniDiff)) {
-    if (uniDiff.length > 1) {
-      throw new Error('applyPatch only works with a single input.');
-    }
-    uniDiff = uniDiff[0];
-  }
-  if (options.autoConvertLineEndings || options.autoConvertLineEndings == null) {
-    if (hasOnlyWinLineEndings(source) && isUnix(uniDiff)) {
-      uniDiff = unixToWin(uniDiff);
-    } else if (hasOnlyUnixLineEndings(source) && isWin(uniDiff)) {
-      uniDiff = winToUnix(uniDiff);
-    }
-  }
-
-  // Apply the diff to the input
-  var lines = source.split('\n'),
-    hunks = uniDiff.hunks,
-    compareLine = options.compareLine || function (lineNumber, line, operation, patchContent) {
-      return line === patchContent;
-    },
-    fuzzFactor = options.fuzzFactor || 0,
-    minLine = 0;
-  if (fuzzFactor < 0 || !Number.isInteger(fuzzFactor)) {
-    throw new Error('fuzzFactor must be a non-negative integer');
-  }
-
-  // Special case for empty patch.
-  if (!hunks.length) {
-    return source;
-  }
-
-  // Before anything else, handle EOFNL insertion/removal. If the patch tells us to make a change
-  // to the EOFNL that is redundant/impossible - i.e. to remove a newline that's not there, or add a
-  // newline that already exists - then we either return false and fail to apply the patch (if
-  // fuzzFactor is 0) or simply ignore the problem and do nothing (if fuzzFactor is >0).
-  // If we do need to remove/add a newline at EOF, this will always be in the final hunk:
-  var prevLine = '',
-    removeEOFNL = false,
-    addEOFNL = false;
-  for (var i = 0; i < hunks[hunks.length - 1].lines.length; i++) {
-    var line = hunks[hunks.length - 1].lines[i];
-    if (line[0] == '\\') {
-      if (prevLine[0] == '+') {
-        removeEOFNL = true;
-      } else if (prevLine[0] == '-') {
-        addEOFNL = true;
-      }
-    }
-    prevLine = line;
-  }
-  if (removeEOFNL) {
-    if (addEOFNL) {
-      // This means the final line gets changed but doesn't have a trailing newline in either the
-      // original or patched version. In that case, we do nothing if fuzzFactor > 0, and if
-      // fuzzFactor is 0, we simply validate that the source file has no trailing newline.
-      if (!fuzzFactor && lines[lines.length - 1] == '') {
-        return false;
-      }
-    } else if (lines[lines.length - 1] == '') {
-      lines.pop();
-    } else if (!fuzzFactor) {
-      return false;
-    }
-  } else if (addEOFNL) {
-    if (lines[lines.length - 1] != '') {
-      lines.push('');
-    } else if (!fuzzFactor) {
-      return false;
-    }
-  }
-
-  /**
-   * Checks if the hunk can be made to fit at the provided location with at most `maxErrors`
-   * insertions, substitutions, or deletions, while ensuring also that:
-   * - lines deleted in the hunk match exactly, and
-   * - wherever an insertion operation or block of insertion operations appears in the hunk, the
-   *   immediately preceding and following lines of context match exactly
-   *
-   * `toPos` should be set such that lines[toPos] is meant to match hunkLines[0].
-   *
-   * If the hunk can be applied, returns an object with properties `oldLineLastI` and
-   * `replacementLines`. Otherwise, returns null.
-   */
-  function applyHunk(hunkLines, toPos, maxErrors) {
-    var hunkLinesI = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : 0;
-    var lastContextLineMatched = arguments.length > 4 && arguments[4] !== undefined ? arguments[4] : true;
-    var patchedLines = arguments.length > 5 && arguments[5] !== undefined ? arguments[5] : [];
-    var patchedLinesLength = arguments.length > 6 && arguments[6] !== undefined ? arguments[6] : 0;
-    var nConsecutiveOldContextLines = 0;
-    var nextContextLineMustMatch = false;
-    for (; hunkLinesI < hunkLines.length; hunkLinesI++) {
-      var hunkLine = hunkLines[hunkLinesI],
-        operation = hunkLine.length > 0 ? hunkLine[0] : ' ',
-        content = hunkLine.length > 0 ? hunkLine.substr(1) : hunkLine;
-      if (operation === '-') {
-        if (compareLine(toPos + 1, lines[toPos], operation, content)) {
-          toPos++;
-          nConsecutiveOldContextLines = 0;
-        } else {
-          if (!maxErrors || lines[toPos] == null) {
-            return null;
-          }
-          patchedLines[patchedLinesLength] = lines[toPos];
-          return applyHunk(hunkLines, toPos + 1, maxErrors - 1, hunkLinesI, false, patchedLines, patchedLinesLength + 1);
-        }
-      }
-      if (operation === '+') {
-        if (!lastContextLineMatched) {
-          return null;
-        }
-        patchedLines[patchedLinesLength] = content;
-        patchedLinesLength++;
-        nConsecutiveOldContextLines = 0;
-        nextContextLineMustMatch = true;
-      }
-      if (operation === ' ') {
-        nConsecutiveOldContextLines++;
-        patchedLines[patchedLinesLength] = lines[toPos];
-        if (compareLine(toPos + 1, lines[toPos], operation, content)) {
-          patchedLinesLength++;
-          lastContextLineMatched = true;
-          nextContextLineMustMatch = false;
-          toPos++;
-        } else {
-          if (nextContextLineMustMatch || !maxErrors) {
-            return null;
-          }
-
-          // Consider 3 possibilities in sequence:
-          // 1. lines contains a *substitution* not included in the patch context, or
-          // 2. lines contains an *insertion* not included in the patch context, or
-          // 3. lines contains a *deletion* not included in the patch context
-          // The first two options are of course only possible if the line from lines is non-null -
-          // i.e. only option 3 is possible if we've overrun the end of the old file.
-          return lines[toPos] && (applyHunk(hunkLines, toPos + 1, maxErrors - 1, hunkLinesI + 1, false, patchedLines, patchedLinesLength + 1) || applyHunk(hunkLines, toPos + 1, maxErrors - 1, hunkLinesI, false, patchedLines, patchedLinesLength + 1)) || applyHunk(hunkLines, toPos, maxErrors - 1, hunkLinesI + 1, false, patchedLines, patchedLinesLength);
-        }
-      }
-    }
-
-    // Before returning, trim any unmodified context lines off the end of patchedLines and reduce
-    // toPos (and thus oldLineLastI) accordingly. This allows later hunks to be applied to a region
-    // that starts in this hunk's trailing context.
-    patchedLinesLength -= nConsecutiveOldContextLines;
-    toPos -= nConsecutiveOldContextLines;
-    patchedLines.length = patchedLinesLength;
-    return {
-      patchedLines: patchedLines,
-      oldLineLastI: toPos - 1
-    };
-  }
-  var resultLines = [];
-
-  // Search best fit offsets for each hunk based on the previous ones
-  var prevHunkOffset = 0;
-  for (var _i = 0; _i < hunks.length; _i++) {
-    var hunk = hunks[_i];
-    var hunkResult = void 0;
-    var maxLine = lines.length - hunk.oldLines + fuzzFactor;
-    var toPos = void 0;
-    for (var maxErrors = 0; maxErrors <= fuzzFactor; maxErrors++) {
-      toPos = hunk.oldStart + prevHunkOffset - 1;
-      var iterator = distanceIterator(toPos, minLine, maxLine);
-      for (; toPos !== undefined; toPos = iterator()) {
-        hunkResult = applyHunk(hunk.lines, toPos, maxErrors);
-        if (hunkResult) {
-          break;
-        }
-      }
-      if (hunkResult) {
-        break;
-      }
-    }
-    if (!hunkResult) {
-      return false;
-    }
-
-    // Copy everything from the end of where we applied the last hunk to the start of this hunk
-    for (var _i2 = minLine; _i2 < toPos; _i2++) {
-      resultLines.push(lines[_i2]);
-    }
-
-    // Add the lines produced by applying the hunk:
-    for (var _i3 = 0; _i3 < hunkResult.patchedLines.length; _i3++) {
-      var _line = hunkResult.patchedLines[_i3];
-      resultLines.push(_line);
-    }
-
-    // Set lower text limit to end of the current hunk, so next ones don't try
-    // to fit over already patched text
-    minLine = hunkResult.oldLineLastI + 1;
-
-    // Note the offset between where the patch said the hunk should've applied and where we
-    // applied it, so we can adjust future hunks accordingly:
-    prevHunkOffset = toPos + 1 - hunk.oldStart;
-  }
-
-  // Copy over the rest of the lines from the old text
-  for (var _i4 = minLine; _i4 < lines.length; _i4++) {
-    resultLines.push(lines[_i4]);
-  }
-  return resultLines.join('\n');
-}
-
-// Wrapper that supports multiple file patches via callbacks.
-function applyPatches(uniDiff, options) {
-  if (typeof uniDiff === 'string') {
-    uniDiff = parsePatch(uniDiff);
-  }
-  var currentIndex = 0;
-  function processIndex() {
-    var index = uniDiff[currentIndex++];
-    if (!index) {
-      return options.complete();
-    }
-    options.loadFile(index, function (err, data) {
-      if (err) {
-        return options.complete(err);
-      }
-      var updatedContent = applyPatch(data, index, options);
-      options.patched(index, updatedContent, function (err) {
-        if (err) {
-          return options.complete(err);
-        }
-        processIndex();
-      });
-    });
-  }
-  processIndex();
-}
-
-function structuredPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options) {
-  if (!options) {
-    options = {};
-  }
-  if (typeof options === 'function') {
-    options = {
-      callback: options
-    };
-  }
-  if (typeof options.context === 'undefined') {
-    options.context = 4;
-  }
-  if (options.newlineIsToken) {
-    throw new Error('newlineIsToken may not be used with patch-generation functions, only with diffing functions');
-  }
-  if (!options.callback) {
-    return diffLinesResultToPatch(diffLines(oldStr, newStr, options));
-  } else {
-    var _options = options,
-      _callback = _options.callback;
-    diffLines(oldStr, newStr, _objectSpread2(_objectSpread2({}, options), {}, {
-      callback: function callback(diff) {
-        var patch = diffLinesResultToPatch(diff);
-        _callback(patch);
-      }
-    }));
-  }
-  function diffLinesResultToPatch(diff) {
-    // STEP 1: Build up the patch with no "\ No newline at end of file" lines and with the arrays
-    //         of lines containing trailing newline characters. We'll tidy up later...
-
-    if (!diff) {
-      return;
-    }
-    diff.push({
-      value: '',
-      lines: []
-    }); // Append an empty value to make cleanup easier
-
-    function contextLines(lines) {
-      return lines.map(function (entry) {
-        return ' ' + entry;
-      });
-    }
-    var hunks = [];
-    var oldRangeStart = 0,
-      newRangeStart = 0,
-      curRange = [],
-      oldLine = 1,
-      newLine = 1;
-    var _loop = function _loop() {
-      var current = diff[i],
-        lines = current.lines || splitLines(current.value);
-      current.lines = lines;
-      if (current.added || current.removed) {
-        var _curRange;
-        // If we have previous context, start with that
-        if (!oldRangeStart) {
-          var prev = diff[i - 1];
-          oldRangeStart = oldLine;
-          newRangeStart = newLine;
-          if (prev) {
-            curRange = options.context > 0 ? contextLines(prev.lines.slice(-options.context)) : [];
-            oldRangeStart -= curRange.length;
-            newRangeStart -= curRange.length;
-          }
-        }
-
-        // Output our changes
-        (_curRange = curRange).push.apply(_curRange, _toConsumableArray(lines.map(function (entry) {
-          return (current.added ? '+' : '-') + entry;
-        })));
-
-        // Track the updated file position
-        if (current.added) {
-          newLine += lines.length;
-        } else {
-          oldLine += lines.length;
-        }
-      } else {
-        // Identical context lines. Track line changes
-        if (oldRangeStart) {
-          // Close out any changes that have been output (or join overlapping)
-          if (lines.length <= options.context * 2 && i < diff.length - 2) {
-            var _curRange2;
-            // Overlapping
-            (_curRange2 = curRange).push.apply(_curRange2, _toConsumableArray(contextLines(lines)));
-          } else {
-            var _curRange3;
-            // end the range and output
-            var contextSize = Math.min(lines.length, options.context);
-            (_curRange3 = curRange).push.apply(_curRange3, _toConsumableArray(contextLines(lines.slice(0, contextSize))));
-            var _hunk = {
-              oldStart: oldRangeStart,
-              oldLines: oldLine - oldRangeStart + contextSize,
-              newStart: newRangeStart,
-              newLines: newLine - newRangeStart + contextSize,
-              lines: curRange
-            };
-            hunks.push(_hunk);
-            oldRangeStart = 0;
-            newRangeStart = 0;
-            curRange = [];
-          }
-        }
-        oldLine += lines.length;
-        newLine += lines.length;
-      }
-    };
-    for (var i = 0; i < diff.length; i++) {
-      _loop();
-    }
-
-    // Step 2: eliminate the trailing `\n` from each line of each hunk, and, where needed, add
-    //         "\ No newline at end of file".
-    for (var _i = 0, _hunks = hunks; _i < _hunks.length; _i++) {
-      var hunk = _hunks[_i];
-      for (var _i2 = 0; _i2 < hunk.lines.length; _i2++) {
-        if (hunk.lines[_i2].endsWith('\n')) {
-          hunk.lines[_i2] = hunk.lines[_i2].slice(0, -1);
-        } else {
-          hunk.lines.splice(_i2 + 1, 0, '\\ No newline at end of file');
-          _i2++; // Skip the line we just added, then continue iterating
-        }
-      }
-    }
-    return {
-      oldFileName: oldFileName,
-      newFileName: newFileName,
-      oldHeader: oldHeader,
-      newHeader: newHeader,
-      hunks: hunks
-    };
-  }
-}
-function formatPatch(diff) {
-  if (Array.isArray(diff)) {
-    return diff.map(formatPatch).join('\n');
-  }
-  var ret = [];
-  if (diff.oldFileName == diff.newFileName) {
-    ret.push('Index: ' + diff.oldFileName);
-  }
-  ret.push('===================================================================');
-  ret.push('--- ' + diff.oldFileName + (typeof diff.oldHeader === 'undefined' ? '' : '\t' + diff.oldHeader));
-  ret.push('+++ ' + diff.newFileName + (typeof diff.newHeader === 'undefined' ? '' : '\t' + diff.newHeader));
-  for (var i = 0; i < diff.hunks.length; i++) {
-    var hunk = diff.hunks[i];
-    // Unified Diff Format quirk: If the chunk size is 0,
-    // the first number is one lower than one would expect.
-    // https://www.artima.com/weblogs/viewpost.jsp?thread=164293
-    if (hunk.oldLines === 0) {
-      hunk.oldStart -= 1;
-    }
-    if (hunk.newLines === 0) {
-      hunk.newStart -= 1;
-    }
-    ret.push('@@ -' + hunk.oldStart + ',' + hunk.oldLines + ' +' + hunk.newStart + ',' + hunk.newLines + ' @@');
-    ret.push.apply(ret, hunk.lines);
-  }
-  return ret.join('\n') + '\n';
-}
-function createTwoFilesPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options) {
-  var _options2;
-  if (typeof options === 'function') {
-    options = {
-      callback: options
-    };
-  }
-  if (!((_options2 = options) !== null && _options2 !== void 0 && _options2.callback)) {
-    var patchObj = structuredPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options);
-    if (!patchObj) {
-      return;
-    }
-    return formatPatch(patchObj);
-  } else {
-    var _options3 = options,
-      _callback2 = _options3.callback;
-    structuredPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, _objectSpread2(_objectSpread2({}, options), {}, {
-      callback: function callback(patchObj) {
-        if (!patchObj) {
-          _callback2();
-        } else {
-          _callback2(formatPatch(patchObj));
-        }
-      }
-    }));
-  }
-}
-function createPatch(fileName, oldStr, newStr, oldHeader, newHeader, options) {
-  return createTwoFilesPatch(fileName, fileName, oldStr, newStr, oldHeader, newHeader, options);
-}
-
-/**
- * Split `text` into an array of lines, including the trailing newline character (where present)
- */
-function splitLines(text) {
-  var hasTrailingNl = text.endsWith('\n');
-  var result = text.split('\n').map(function (line) {
-    return line + '\n';
-  });
-  if (hasTrailingNl) {
-    result.pop();
-  } else {
-    result.push(result.pop().slice(0, -1));
-  }
-  return result;
-}
-
-function arrayEqual(a, b) {
-  if (a.length !== b.length) {
-    return false;
-  }
-  return arrayStartsWith(a, b);
-}
-function arrayStartsWith(array, start) {
-  if (start.length > array.length) {
-    return false;
-  }
-  for (var i = 0; i < start.length; i++) {
-    if (start[i] !== array[i]) {
-      return false;
-    }
-  }
-  return true;
-}
-
-function calcLineCount(hunk) {
-  var _calcOldNewLineCount = calcOldNewLineCount(hunk.lines),
-    oldLines = _calcOldNewLineCount.oldLines,
-    newLines = _calcOldNewLineCount.newLines;
-  if (oldLines !== undefined) {
-    hunk.oldLines = oldLines;
-  } else {
-    delete hunk.oldLines;
-  }
-  if (newLines !== undefined) {
-    hunk.newLines = newLines;
-  } else {
-    delete hunk.newLines;
-  }
-}
-function merge(mine, theirs, base) {
-  mine = loadPatch(mine, base);
-  theirs = loadPatch(theirs, base);
-  var ret = {};
-
-  // For index we just let it pass through as it doesn't have any necessary meaning.
-  // Leaving sanity checks on this to the API consumer that may know more about the
-  // meaning in their own context.
-  if (mine.index || theirs.index) {
-    ret.index = mine.index || theirs.index;
-  }
-  if (mine.newFileName || theirs.newFileName) {
-    if (!fileNameChanged(mine)) {
-      // No header or no change in ours, use theirs (and ours if theirs does not exist)
-      ret.oldFileName = theirs.oldFileName || mine.oldFileName;
-      ret.newFileName = theirs.newFileName || mine.newFileName;
-      ret.oldHeader = theirs.oldHeader || mine.oldHeader;
-      ret.newHeader = theirs.newHeader || mine.newHeader;
-    } else if (!fileNameChanged(theirs)) {
-      // No header or no change in theirs, use ours
-      ret.oldFileName = mine.oldFileName;
-      ret.newFileName = mine.newFileName;
-      ret.oldHeader = mine.oldHeader;
-      ret.newHeader = mine.newHeader;
-    } else {
-      // Both changed... figure it out
-      ret.oldFileName = selectField(ret, mine.oldFileName, theirs.oldFileName);
-      ret.newFileName = selectField(ret, mine.newFileName, theirs.newFileName);
-      ret.oldHeader = selectField(ret, mine.oldHeader, theirs.oldHeader);
-      ret.newHeader = selectField(ret, mine.newHeader, theirs.newHeader);
-    }
-  }
-  ret.hunks = [];
-  var mineIndex = 0,
-    theirsIndex = 0,
-    mineOffset = 0,
-    theirsOffset = 0;
-  while (mineIndex < mine.hunks.length || theirsIndex < theirs.hunks.length) {
-    var mineCurrent = mine.hunks[mineIndex] || {
-        oldStart: Infinity
-      },
-      theirsCurrent = theirs.hunks[theirsIndex] || {
-        oldStart: Infinity
-      };
-    if (hunkBefore(mineCurrent, theirsCurrent)) {
-      // This patch does not overlap with any of the others, yay.
-      ret.hunks.push(cloneHunk(mineCurrent, mineOffset));
-      mineIndex++;
-      theirsOffset += mineCurrent.newLines - mineCurrent.oldLines;
-    } else if (hunkBefore(theirsCurrent, mineCurrent)) {
-      // This patch does not overlap with any of the others, yay.
-      ret.hunks.push(cloneHunk(theirsCurrent, theirsOffset));
-      theirsIndex++;
-      mineOffset += theirsCurrent.newLines - theirsCurrent.oldLines;
-    } else {
-      // Overlap, merge as best we can
-      var mergedHunk = {
-        oldStart: Math.min(mineCurrent.oldStart, theirsCurrent.oldStart),
-        oldLines: 0,
-        newStart: Math.min(mineCurrent.newStart + mineOffset, theirsCurrent.oldStart + theirsOffset),
-        newLines: 0,
-        lines: []
-      };
-      mergeLines(mergedHunk, mineCurrent.oldStart, mineCurrent.lines, theirsCurrent.oldStart, theirsCurrent.lines);
-      theirsIndex++;
-      mineIndex++;
-      ret.hunks.push(mergedHunk);
-    }
-  }
-  return ret;
-}
-function loadPatch(param, base) {
-  if (typeof param === 'string') {
-    if (/^@@/m.test(param) || /^Index:/m.test(param)) {
-      return parsePatch(param)[0];
-    }
-    if (!base) {
-      throw new Error('Must provide a base reference or pass in a patch');
-    }
-    return structuredPatch(undefined, undefined, base, param);
-  }
-  return param;
-}
-function fileNameChanged(patch) {
-  return patch.newFileName && patch.newFileName !== patch.oldFileName;
-}
-function selectField(index, mine, theirs) {
-  if (mine === theirs) {
-    return mine;
-  } else {
-    index.conflict = true;
-    return {
-      mine: mine,
-      theirs: theirs
-    };
-  }
-}
-function hunkBefore(test, check) {
-  return test.oldStart < check.oldStart && test.oldStart + test.oldLines < check.oldStart;
-}
-function cloneHunk(hunk, offset) {
-  return {
-    oldStart: hunk.oldStart,
-    oldLines: hunk.oldLines,
-    newStart: hunk.newStart + offset,
-    newLines: hunk.newLines,
-    lines: hunk.lines
-  };
-}
-function mergeLines(hunk, mineOffset, mineLines, theirOffset, theirLines) {
-  // This will generally result in a conflicted hunk, but there are cases where the context
-  // is the only overlap where we can successfully merge the content here.
-  var mine = {
-      offset: mineOffset,
-      lines: mineLines,
-      index: 0
-    },
-    their = {
-      offset: theirOffset,
-      lines: theirLines,
-      index: 0
-    };
-
-  // Handle any leading content
-  insertLeading(hunk, mine, their);
-  insertLeading(hunk, their, mine);
-
-  // Now in the overlap content. Scan through and select the best changes from each.
-  while (mine.index < mine.lines.length && their.index < their.lines.length) {
-    var mineCurrent = mine.lines[mine.index],
-      theirCurrent = their.lines[their.index];
-    if ((mineCurrent[0] === '-' || mineCurrent[0] === '+') && (theirCurrent[0] === '-' || theirCurrent[0] === '+')) {
-      // Both modified ...
-      mutualChange(hunk, mine, their);
-    } else if (mineCurrent[0] === '+' && theirCurrent[0] === ' ') {
-      var _hunk$lines;
-      // Mine inserted
-      (_hunk$lines = hunk.lines).push.apply(_hunk$lines, _toConsumableArray(collectChange(mine)));
-    } else if (theirCurrent[0] === '+' && mineCurrent[0] === ' ') {
-      var _hunk$lines2;
-      // Theirs inserted
-      (_hunk$lines2 = hunk.lines).push.apply(_hunk$lines2, _toConsumableArray(collectChange(their)));
-    } else if (mineCurrent[0] === '-' && theirCurrent[0] === ' ') {
-      // Mine removed or edited
-      removal(hunk, mine, their);
-    } else if (theirCurrent[0] === '-' && mineCurrent[0] === ' ') {
-      // Their removed or edited
-      removal(hunk, their, mine, true);
-    } else if (mineCurrent === theirCurrent) {
-      // Context identity
-      hunk.lines.push(mineCurrent);
-      mine.index++;
-      their.index++;
-    } else {
-      // Context mismatch
-      conflict(hunk, collectChange(mine), collectChange(their));
-    }
-  }
-
-  // Now push anything that may be remaining
-  insertTrailing(hunk, mine);
-  insertTrailing(hunk, their);
-  calcLineCount(hunk);
-}
-function mutualChange(hunk, mine, their) {
-  var myChanges = collectChange(mine),
-    theirChanges = collectChange(their);
-  if (allRemoves(myChanges) && allRemoves(theirChanges)) {
-    // Special case for remove changes that are supersets of one another
-    if (arrayStartsWith(myChanges, theirChanges) && skipRemoveSuperset(their, myChanges, myChanges.length - theirChanges.length)) {
-      var _hunk$lines3;
-      (_hunk$lines3 = hunk.lines).push.apply(_hunk$lines3, _toConsumableArray(myChanges));
-      return;
-    } else if (arrayStartsWith(theirChanges, myChanges) && skipRemoveSuperset(mine, theirChanges, theirChanges.length - myChanges.length)) {
-      var _hunk$lines4;
-      (_hunk$lines4 = hunk.lines).push.apply(_hunk$lines4, _toConsumableArray(theirChanges));
-      return;
-    }
-  } else if (arrayEqual(myChanges, theirChanges)) {
-    var _hunk$lines5;
-    (_hunk$lines5 = hunk.lines).push.apply(_hunk$lines5, _toConsumableArray(myChanges));
-    return;
-  }
-  conflict(hunk, myChanges, theirChanges);
-}
-function removal(hunk, mine, their, swap) {
-  var myChanges = collectChange(mine),
-    theirChanges = collectContext(their, myChanges);
-  if (theirChanges.merged) {
-    var _hunk$lines6;
-    (_hunk$lines6 = hunk.lines).push.apply(_hunk$lines6, _toConsumableArray(theirChanges.merged));
-  } else {
-    conflict(hunk, swap ? theirChanges : myChanges, swap ? myChanges : theirChanges);
-  }
-}
-function conflict(hunk, mine, their) {
-  hunk.conflict = true;
-  hunk.lines.push({
-    conflict: true,
-    mine: mine,
-    theirs: their
-  });
-}
-function insertLeading(hunk, insert, their) {
-  while (insert.offset < their.offset && insert.index < insert.lines.length) {
-    var line = insert.lines[insert.index++];
-    hunk.lines.push(line);
-    insert.offset++;
-  }
-}
-function insertTrailing(hunk, insert) {
-  while (insert.index < insert.lines.length) {
-    var line = insert.lines[insert.index++];
-    hunk.lines.push(line);
-  }
-}
-function collectChange(state) {
-  var ret = [],
-    operation = state.lines[state.index][0];
-  while (state.index < state.lines.length) {
-    var line = state.lines[state.index];
-
-    // Group additions that are immediately after subtractions and treat them as one "atomic" modify change.
-    if (operation === '-' && line[0] === '+') {
-      operation = '+';
-    }
-    if (operation === line[0]) {
-      ret.push(line);
-      state.index++;
-    } else {
-      break;
-    }
-  }
-  return ret;
-}
-function collectContext(state, matchChanges) {
-  var changes = [],
-    merged = [],
-    matchIndex = 0,
-    contextChanges = false,
-    conflicted = false;
-  while (matchIndex < matchChanges.length && state.index < state.lines.length) {
-    var change = state.lines[state.index],
-      match = matchChanges[matchIndex];
-
-    // Once we've hit our add, then we are done
-    if (match[0] === '+') {
-      break;
-    }
-    contextChanges = contextChanges || change[0] !== ' ';
-    merged.push(match);
-    matchIndex++;
-
-    // Consume any additions in the other block as a conflict to attempt
-    // to pull in the remaining context after this
-    if (change[0] === '+') {
-      conflicted = true;
-      while (change[0] === '+') {
-        changes.push(change);
-        change = state.lines[++state.index];
-      }
-    }
-    if (match.substr(1) === change.substr(1)) {
-      changes.push(change);
-      state.index++;
-    } else {
-      conflicted = true;
-    }
-  }
-  if ((matchChanges[matchIndex] || '')[0] === '+' && contextChanges) {
-    conflicted = true;
-  }
-  if (conflicted) {
-    return changes;
-  }
-  while (matchIndex < matchChanges.length) {
-    merged.push(matchChanges[matchIndex++]);
-  }
-  return {
-    merged: merged,
-    changes: changes
-  };
-}
-function allRemoves(changes) {
-  return changes.reduce(function (prev, change) {
-    return prev && change[0] === '-';
-  }, true);
-}
-function skipRemoveSuperset(state, removeChanges, delta) {
-  for (var i = 0; i < delta; i++) {
-    var changeContent = removeChanges[removeChanges.length - delta + i].substr(1);
-    if (state.lines[state.index + i] !== ' ' + changeContent) {
-      return false;
-    }
-  }
-  state.index += delta;
-  return true;
-}
-function calcOldNewLineCount(lines) {
-  var oldLines = 0;
-  var newLines = 0;
-  lines.forEach(function (line) {
-    if (typeof line !== 'string') {
-      var myCount = calcOldNewLineCount(line.mine);
-      var theirCount = calcOldNewLineCount(line.theirs);
-      if (oldLines !== undefined) {
-        if (myCount.oldLines === theirCount.oldLines) {
-          oldLines += myCount.oldLines;
-        } else {
-          oldLines = undefined;
-        }
-      }
-      if (newLines !== undefined) {
-        if (myCount.newLines === theirCount.newLines) {
-          newLines += myCount.newLines;
-        } else {
-          newLines = undefined;
-        }
-      }
-    } else {
-      if (newLines !== undefined && (line[0] === '+' || line[0] === ' ')) {
-        newLines++;
-      }
-      if (oldLines !== undefined && (line[0] === '-' || line[0] === ' ')) {
-        oldLines++;
-      }
-    }
-  });
-  return {
-    oldLines: oldLines,
-    newLines: newLines
-  };
-}
-
-function reversePatch(structuredPatch) {
-  if (Array.isArray(structuredPatch)) {
-    return structuredPatch.map(reversePatch).reverse();
-  }
-  return _objectSpread2(_objectSpread2({}, structuredPatch), {}, {
-    oldFileName: structuredPatch.newFileName,
-    oldHeader: structuredPatch.newHeader,
-    newFileName: structuredPatch.oldFileName,
-    newHeader: structuredPatch.oldHeader,
-    hunks: structuredPatch.hunks.map(function (hunk) {
-      return {
-        oldLines: hunk.newLines,
-        oldStart: hunk.newStart,
-        newLines: hunk.oldLines,
-        newStart: hunk.oldStart,
-        lines: hunk.lines.map(function (l) {
-          if (l.startsWith('-')) {
-            return "+".concat(l.slice(1));
-          }
-          if (l.startsWith('+')) {
-            return "-".concat(l.slice(1));
-          }
-          return l;
-        })
-      };
-    })
-  });
-}
-
-// See: http://code.google.com/p/google-diff-match-patch/wiki/API
-function convertChangesToDMP(changes) {
-  var ret = [],
-    change,
-    operation;
-  for (var i = 0; i < changes.length; i++) {
-    change = changes[i];
-    if (change.added) {
-      operation = 1;
-    } else if (change.removed) {
-      operation = -1;
-    } else {
-      operation = 0;
-    }
-    ret.push([operation, change.value]);
-  }
-  return ret;
-}
-
-function convertChangesToXML(changes) {
-  var ret = [];
-  for (var i = 0; i < changes.length; i++) {
-    var change = changes[i];
-    if (change.added) {
-      ret.push('');
-    } else if (change.removed) {
-      ret.push('');
-    }
-    ret.push(escapeHTML(change.value));
-    if (change.added) {
-      ret.push('');
-    } else if (change.removed) {
-      ret.push('');
-    }
-  }
-  return ret.join('');
-}
-function escapeHTML(s) {
-  var n = s;
-  n = n.replace(/&/g, '&');
-  n = n.replace(//g, '>');
-  n = n.replace(/"/g, '"');
-  return n;
-}
-
-export { Diff, applyPatch, applyPatches, canonicalize, convertChangesToDMP, convertChangesToXML, createPatch, createTwoFilesPatch, diffArrays, diffChars, diffCss, diffJson, diffLines, diffSentences, diffTrimmedLines, diffWords, diffWordsWithSpace, formatPatch, merge, parsePatch, reversePatch, structuredPatch };
diff --git a/node_modules/diff/lib/index.js b/node_modules/diff/lib/index.js
deleted file mode 100644
index 518b3dee33d30..0000000000000
--- a/node_modules/diff/lib/index.js
+++ /dev/null
@@ -1,217 +0,0 @@
-/*istanbul ignore start*/
-"use strict";
-
-Object.defineProperty(exports, "__esModule", {
-  value: true
-});
-Object.defineProperty(exports, "Diff", {
-  enumerable: true,
-  get: function get() {
-    return _base["default"];
-  }
-});
-Object.defineProperty(exports, "applyPatch", {
-  enumerable: true,
-  get: function get() {
-    return _apply.applyPatch;
-  }
-});
-Object.defineProperty(exports, "applyPatches", {
-  enumerable: true,
-  get: function get() {
-    return _apply.applyPatches;
-  }
-});
-Object.defineProperty(exports, "canonicalize", {
-  enumerable: true,
-  get: function get() {
-    return _json.canonicalize;
-  }
-});
-Object.defineProperty(exports, "convertChangesToDMP", {
-  enumerable: true,
-  get: function get() {
-    return _dmp.convertChangesToDMP;
-  }
-});
-Object.defineProperty(exports, "convertChangesToXML", {
-  enumerable: true,
-  get: function get() {
-    return _xml.convertChangesToXML;
-  }
-});
-Object.defineProperty(exports, "createPatch", {
-  enumerable: true,
-  get: function get() {
-    return _create.createPatch;
-  }
-});
-Object.defineProperty(exports, "createTwoFilesPatch", {
-  enumerable: true,
-  get: function get() {
-    return _create.createTwoFilesPatch;
-  }
-});
-Object.defineProperty(exports, "diffArrays", {
-  enumerable: true,
-  get: function get() {
-    return _array.diffArrays;
-  }
-});
-Object.defineProperty(exports, "diffChars", {
-  enumerable: true,
-  get: function get() {
-    return _character.diffChars;
-  }
-});
-Object.defineProperty(exports, "diffCss", {
-  enumerable: true,
-  get: function get() {
-    return _css.diffCss;
-  }
-});
-Object.defineProperty(exports, "diffJson", {
-  enumerable: true,
-  get: function get() {
-    return _json.diffJson;
-  }
-});
-Object.defineProperty(exports, "diffLines", {
-  enumerable: true,
-  get: function get() {
-    return _line.diffLines;
-  }
-});
-Object.defineProperty(exports, "diffSentences", {
-  enumerable: true,
-  get: function get() {
-    return _sentence.diffSentences;
-  }
-});
-Object.defineProperty(exports, "diffTrimmedLines", {
-  enumerable: true,
-  get: function get() {
-    return _line.diffTrimmedLines;
-  }
-});
-Object.defineProperty(exports, "diffWords", {
-  enumerable: true,
-  get: function get() {
-    return _word.diffWords;
-  }
-});
-Object.defineProperty(exports, "diffWordsWithSpace", {
-  enumerable: true,
-  get: function get() {
-    return _word.diffWordsWithSpace;
-  }
-});
-Object.defineProperty(exports, "formatPatch", {
-  enumerable: true,
-  get: function get() {
-    return _create.formatPatch;
-  }
-});
-Object.defineProperty(exports, "merge", {
-  enumerable: true,
-  get: function get() {
-    return _merge.merge;
-  }
-});
-Object.defineProperty(exports, "parsePatch", {
-  enumerable: true,
-  get: function get() {
-    return _parse.parsePatch;
-  }
-});
-Object.defineProperty(exports, "reversePatch", {
-  enumerable: true,
-  get: function get() {
-    return _reverse.reversePatch;
-  }
-});
-Object.defineProperty(exports, "structuredPatch", {
-  enumerable: true,
-  get: function get() {
-    return _create.structuredPatch;
-  }
-});
-/*istanbul ignore end*/
-var
-/*istanbul ignore start*/
-_base = _interopRequireDefault(require("./diff/base"))
-/*istanbul ignore end*/
-;
-var
-/*istanbul ignore start*/
-_character = require("./diff/character")
-/*istanbul ignore end*/
-;
-var
-/*istanbul ignore start*/
-_word = require("./diff/word")
-/*istanbul ignore end*/
-;
-var
-/*istanbul ignore start*/
-_line = require("./diff/line")
-/*istanbul ignore end*/
-;
-var
-/*istanbul ignore start*/
-_sentence = require("./diff/sentence")
-/*istanbul ignore end*/
-;
-var
-/*istanbul ignore start*/
-_css = require("./diff/css")
-/*istanbul ignore end*/
-;
-var
-/*istanbul ignore start*/
-_json = require("./diff/json")
-/*istanbul ignore end*/
-;
-var
-/*istanbul ignore start*/
-_array = require("./diff/array")
-/*istanbul ignore end*/
-;
-var
-/*istanbul ignore start*/
-_apply = require("./patch/apply")
-/*istanbul ignore end*/
-;
-var
-/*istanbul ignore start*/
-_parse = require("./patch/parse")
-/*istanbul ignore end*/
-;
-var
-/*istanbul ignore start*/
-_merge = require("./patch/merge")
-/*istanbul ignore end*/
-;
-var
-/*istanbul ignore start*/
-_reverse = require("./patch/reverse")
-/*istanbul ignore end*/
-;
-var
-/*istanbul ignore start*/
-_create = require("./patch/create")
-/*istanbul ignore end*/
-;
-var
-/*istanbul ignore start*/
-_dmp = require("./convert/dmp")
-/*istanbul ignore end*/
-;
-var
-/*istanbul ignore start*/
-_xml = require("./convert/xml")
-/*istanbul ignore end*/
-;
-/*istanbul ignore start*/ function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
-/*istanbul ignore end*/
-//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJuYW1lcyI6WyJfYmFzZSIsIl9pbnRlcm9wUmVxdWlyZURlZmF1bHQiLCJyZXF1aXJlIiwiX2NoYXJhY3RlciIsIl93b3JkIiwiX2xpbmUiLCJfc2VudGVuY2UiLCJfY3NzIiwiX2pzb24iLCJfYXJyYXkiLCJfYXBwbHkiLCJfcGFyc2UiLCJfbWVyZ2UiLCJfcmV2ZXJzZSIsIl9jcmVhdGUiLCJfZG1wIiwiX3htbCIsIm9iaiIsIl9fZXNNb2R1bGUiXSwic291cmNlcyI6WyIuLi9zcmMvaW5kZXguanMiXSwic291cmNlc0NvbnRlbnQiOlsiLyogU2VlIExJQ0VOU0UgZmlsZSBmb3IgdGVybXMgb2YgdXNlICovXG5cbi8qXG4gKiBUZXh0IGRpZmYgaW1wbGVtZW50YXRpb24uXG4gKlxuICogVGhpcyBsaWJyYXJ5IHN1cHBvcnRzIHRoZSBmb2xsb3dpbmcgQVBJczpcbiAqIERpZmYuZGlmZkNoYXJzOiBDaGFyYWN0ZXIgYnkgY2hhcmFjdGVyIGRpZmZcbiAqIERpZmYuZGlmZldvcmRzOiBXb3JkIChhcyBkZWZpbmVkIGJ5IFxcYiByZWdleCkgZGlmZiB3aGljaCBpZ25vcmVzIHdoaXRlc3BhY2VcbiAqIERpZmYuZGlmZkxpbmVzOiBMaW5lIGJhc2VkIGRpZmZcbiAqXG4gKiBEaWZmLmRpZmZDc3M6IERpZmYgdGFyZ2V0ZWQgYXQgQ1NTIGNvbnRlbnRcbiAqXG4gKiBUaGVzZSBtZXRob2RzIGFyZSBiYXNlZCBvbiB0aGUgaW1wbGVtZW50YXRpb24gcHJvcG9zZWQgaW5cbiAqIFwiQW4gTyhORCkgRGlmZmVyZW5jZSBBbGdvcml0aG0gYW5kIGl0cyBWYXJpYXRpb25zXCIgKE15ZXJzLCAxOTg2KS5cbiAqIGh0dHA6Ly9jaXRlc2VlcnguaXN0LnBzdS5lZHUvdmlld2RvYy9zdW1tYXJ5P2RvaT0xMC4xLjEuNC42OTI3XG4gKi9cbmltcG9ydCBEaWZmIGZyb20gJy4vZGlmZi9iYXNlJztcbmltcG9ydCB7ZGlmZkNoYXJzfSBmcm9tICcuL2RpZmYvY2hhcmFjdGVyJztcbmltcG9ydCB7ZGlmZldvcmRzLCBkaWZmV29yZHNXaXRoU3BhY2V9IGZyb20gJy4vZGlmZi93b3JkJztcbmltcG9ydCB7ZGlmZkxpbmVzLCBkaWZmVHJpbW1lZExpbmVzfSBmcm9tICcuL2RpZmYvbGluZSc7XG5pbXBvcnQge2RpZmZTZW50ZW5jZXN9IGZyb20gJy4vZGlmZi9zZW50ZW5jZSc7XG5cbmltcG9ydCB7ZGlmZkNzc30gZnJvbSAnLi9kaWZmL2Nzcyc7XG5pbXBvcnQge2RpZmZKc29uLCBjYW5vbmljYWxpemV9IGZyb20gJy4vZGlmZi9qc29uJztcblxuaW1wb3J0IHtkaWZmQXJyYXlzfSBmcm9tICcuL2RpZmYvYXJyYXknO1xuXG5pbXBvcnQge2FwcGx5UGF0Y2gsIGFwcGx5UGF0Y2hlc30gZnJvbSAnLi9wYXRjaC9hcHBseSc7XG5pbXBvcnQge3BhcnNlUGF0Y2h9IGZyb20gJy4vcGF0Y2gvcGFyc2UnO1xuaW1wb3J0IHttZXJnZX0gZnJvbSAnLi9wYXRjaC9tZXJnZSc7XG5pbXBvcnQge3JldmVyc2VQYXRjaH0gZnJvbSAnLi9wYXRjaC9yZXZlcnNlJztcbmltcG9ydCB7c3RydWN0dXJlZFBhdGNoLCBjcmVhdGVUd29GaWxlc1BhdGNoLCBjcmVhdGVQYXRjaCwgZm9ybWF0UGF0Y2h9IGZyb20gJy4vcGF0Y2gvY3JlYXRlJztcblxuaW1wb3J0IHtjb252ZXJ0Q2hhbmdlc1RvRE1QfSBmcm9tICcuL2NvbnZlcnQvZG1wJztcbmltcG9ydCB7Y29udmVydENoYW5nZXNUb1hNTH0gZnJvbSAnLi9jb252ZXJ0L3htbCc7XG5cbmV4cG9ydCB7XG4gIERpZmYsXG5cbiAgZGlmZkNoYXJzLFxuICBkaWZmV29yZHMsXG4gIGRpZmZXb3Jkc1dpdGhTcGFjZSxcbiAgZGlmZkxpbmVzLFxuICBkaWZmVHJpbW1lZExpbmVzLFxuICBkaWZmU2VudGVuY2VzLFxuXG4gIGRpZmZDc3MsXG4gIGRpZmZKc29uLFxuXG4gIGRpZmZBcnJheXMsXG5cbiAgc3RydWN0dXJlZFBhdGNoLFxuICBjcmVhdGVUd29GaWxlc1BhdGNoLFxuICBjcmVhdGVQYXRjaCxcbiAgZm9ybWF0UGF0Y2gsXG4gIGFwcGx5UGF0Y2gsXG4gIGFwcGx5UGF0Y2hlcyxcbiAgcGFyc2VQYXRjaCxcbiAgbWVyZ2UsXG4gIHJldmVyc2VQYXRjaCxcbiAgY29udmVydENoYW5nZXNUb0RNUCxcbiAgY29udmVydENoYW5nZXNUb1hNTCxcbiAgY2Fub25pY2FsaXplXG59O1xuIl0sIm1hcHBpbmdzIjoiOzs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7O0FBZ0JBO0FBQUE7QUFBQUEsS0FBQSxHQUFBQyxzQkFBQSxDQUFBQyxPQUFBO0FBQUE7QUFBQTtBQUNBO0FBQUE7QUFBQUMsVUFBQSxHQUFBRCxPQUFBO0FBQUE7QUFBQTtBQUNBO0FBQUE7QUFBQUUsS0FBQSxHQUFBRixPQUFBO0FBQUE7QUFBQTtBQUNBO0FBQUE7QUFBQUcsS0FBQSxHQUFBSCxPQUFBO0FBQUE7QUFBQTtBQUNBO0FBQUE7QUFBQUksU0FBQSxHQUFBSixPQUFBO0FBQUE7QUFBQTtBQUVBO0FBQUE7QUFBQUssSUFBQSxHQUFBTCxPQUFBO0FBQUE7QUFBQTtBQUNBO0FBQUE7QUFBQU0sS0FBQSxHQUFBTixPQUFBO0FBQUE7QUFBQTtBQUVBO0FBQUE7QUFBQU8sTUFBQSxHQUFBUCxPQUFBO0FBQUE7QUFBQTtBQUVBO0FBQUE7QUFBQVEsTUFBQSxHQUFBUixPQUFBO0FBQUE7QUFBQTtBQUNBO0FBQUE7QUFBQVMsTUFBQSxHQUFBVCxPQUFBO0FBQUE7QUFBQTtBQUNBO0FBQUE7QUFBQVUsTUFBQSxHQUFBVixPQUFBO0FBQUE7QUFBQTtBQUNBO0FBQUE7QUFBQVcsUUFBQSxHQUFBWCxPQUFBO0FBQUE7QUFBQTtBQUNBO0FBQUE7QUFBQVksT0FBQSxHQUFBWixPQUFBO0FBQUE7QUFBQTtBQUVBO0FBQUE7QUFBQWEsSUFBQSxHQUFBYixPQUFBO0FBQUE7QUFBQTtBQUNBO0FBQUE7QUFBQWMsSUFBQSxHQUFBZCxPQUFBO0FBQUE7QUFBQTtBQUFrRCxtQ0FBQUQsdUJBQUFnQixHQUFBLFdBQUFBLEdBQUEsSUFBQUEsR0FBQSxDQUFBQyxVQUFBLEdBQUFELEdBQUEsZ0JBQUFBLEdBQUE7QUFBQSIsImlnbm9yZUxpc3QiOltdfQ==
diff --git a/node_modules/diff/lib/index.mjs b/node_modules/diff/lib/index.mjs
deleted file mode 100644
index 6e872723d8581..0000000000000
--- a/node_modules/diff/lib/index.mjs
+++ /dev/null
@@ -1,2041 +0,0 @@
-function Diff() {}
-Diff.prototype = {
-  diff: function diff(oldString, newString) {
-    var _options$timeout;
-    var options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
-    var callback = options.callback;
-    if (typeof options === 'function') {
-      callback = options;
-      options = {};
-    }
-    var self = this;
-    function done(value) {
-      value = self.postProcess(value, options);
-      if (callback) {
-        setTimeout(function () {
-          callback(value);
-        }, 0);
-        return true;
-      } else {
-        return value;
-      }
-    }
-
-    // Allow subclasses to massage the input prior to running
-    oldString = this.castInput(oldString, options);
-    newString = this.castInput(newString, options);
-    oldString = this.removeEmpty(this.tokenize(oldString, options));
-    newString = this.removeEmpty(this.tokenize(newString, options));
-    var newLen = newString.length,
-      oldLen = oldString.length;
-    var editLength = 1;
-    var maxEditLength = newLen + oldLen;
-    if (options.maxEditLength != null) {
-      maxEditLength = Math.min(maxEditLength, options.maxEditLength);
-    }
-    var maxExecutionTime = (_options$timeout = options.timeout) !== null && _options$timeout !== void 0 ? _options$timeout : Infinity;
-    var abortAfterTimestamp = Date.now() + maxExecutionTime;
-    var bestPath = [{
-      oldPos: -1,
-      lastComponent: undefined
-    }];
-
-    // Seed editLength = 0, i.e. the content starts with the same values
-    var newPos = this.extractCommon(bestPath[0], newString, oldString, 0, options);
-    if (bestPath[0].oldPos + 1 >= oldLen && newPos + 1 >= newLen) {
-      // Identity per the equality and tokenizer
-      return done(buildValues(self, bestPath[0].lastComponent, newString, oldString, self.useLongestToken));
-    }
-
-    // Once we hit the right edge of the edit graph on some diagonal k, we can
-    // definitely reach the end of the edit graph in no more than k edits, so
-    // there's no point in considering any moves to diagonal k+1 any more (from
-    // which we're guaranteed to need at least k+1 more edits).
-    // Similarly, once we've reached the bottom of the edit graph, there's no
-    // point considering moves to lower diagonals.
-    // We record this fact by setting minDiagonalToConsider and
-    // maxDiagonalToConsider to some finite value once we've hit the edge of
-    // the edit graph.
-    // This optimization is not faithful to the original algorithm presented in
-    // Myers's paper, which instead pointlessly extends D-paths off the end of
-    // the edit graph - see page 7 of Myers's paper which notes this point
-    // explicitly and illustrates it with a diagram. This has major performance
-    // implications for some common scenarios. For instance, to compute a diff
-    // where the new text simply appends d characters on the end of the
-    // original text of length n, the true Myers algorithm will take O(n+d^2)
-    // time while this optimization needs only O(n+d) time.
-    var minDiagonalToConsider = -Infinity,
-      maxDiagonalToConsider = Infinity;
-
-    // Main worker method. checks all permutations of a given edit length for acceptance.
-    function execEditLength() {
-      for (var diagonalPath = Math.max(minDiagonalToConsider, -editLength); diagonalPath <= Math.min(maxDiagonalToConsider, editLength); diagonalPath += 2) {
-        var basePath = void 0;
-        var removePath = bestPath[diagonalPath - 1],
-          addPath = bestPath[diagonalPath + 1];
-        if (removePath) {
-          // No one else is going to attempt to use this value, clear it
-          bestPath[diagonalPath - 1] = undefined;
-        }
-        var canAdd = false;
-        if (addPath) {
-          // what newPos will be after we do an insertion:
-          var addPathNewPos = addPath.oldPos - diagonalPath;
-          canAdd = addPath && 0 <= addPathNewPos && addPathNewPos < newLen;
-        }
-        var canRemove = removePath && removePath.oldPos + 1 < oldLen;
-        if (!canAdd && !canRemove) {
-          // If this path is a terminal then prune
-          bestPath[diagonalPath] = undefined;
-          continue;
-        }
-
-        // Select the diagonal that we want to branch from. We select the prior
-        // path whose position in the old string is the farthest from the origin
-        // and does not pass the bounds of the diff graph
-        if (!canRemove || canAdd && removePath.oldPos < addPath.oldPos) {
-          basePath = self.addToPath(addPath, true, false, 0, options);
-        } else {
-          basePath = self.addToPath(removePath, false, true, 1, options);
-        }
-        newPos = self.extractCommon(basePath, newString, oldString, diagonalPath, options);
-        if (basePath.oldPos + 1 >= oldLen && newPos + 1 >= newLen) {
-          // If we have hit the end of both strings, then we are done
-          return done(buildValues(self, basePath.lastComponent, newString, oldString, self.useLongestToken));
-        } else {
-          bestPath[diagonalPath] = basePath;
-          if (basePath.oldPos + 1 >= oldLen) {
-            maxDiagonalToConsider = Math.min(maxDiagonalToConsider, diagonalPath - 1);
-          }
-          if (newPos + 1 >= newLen) {
-            minDiagonalToConsider = Math.max(minDiagonalToConsider, diagonalPath + 1);
-          }
-        }
-      }
-      editLength++;
-    }
-
-    // Performs the length of edit iteration. Is a bit fugly as this has to support the
-    // sync and async mode which is never fun. Loops over execEditLength until a value
-    // is produced, or until the edit length exceeds options.maxEditLength (if given),
-    // in which case it will return undefined.
-    if (callback) {
-      (function exec() {
-        setTimeout(function () {
-          if (editLength > maxEditLength || Date.now() > abortAfterTimestamp) {
-            return callback();
-          }
-          if (!execEditLength()) {
-            exec();
-          }
-        }, 0);
-      })();
-    } else {
-      while (editLength <= maxEditLength && Date.now() <= abortAfterTimestamp) {
-        var ret = execEditLength();
-        if (ret) {
-          return ret;
-        }
-      }
-    }
-  },
-  addToPath: function addToPath(path, added, removed, oldPosInc, options) {
-    var last = path.lastComponent;
-    if (last && !options.oneChangePerToken && last.added === added && last.removed === removed) {
-      return {
-        oldPos: path.oldPos + oldPosInc,
-        lastComponent: {
-          count: last.count + 1,
-          added: added,
-          removed: removed,
-          previousComponent: last.previousComponent
-        }
-      };
-    } else {
-      return {
-        oldPos: path.oldPos + oldPosInc,
-        lastComponent: {
-          count: 1,
-          added: added,
-          removed: removed,
-          previousComponent: last
-        }
-      };
-    }
-  },
-  extractCommon: function extractCommon(basePath, newString, oldString, diagonalPath, options) {
-    var newLen = newString.length,
-      oldLen = oldString.length,
-      oldPos = basePath.oldPos,
-      newPos = oldPos - diagonalPath,
-      commonCount = 0;
-    while (newPos + 1 < newLen && oldPos + 1 < oldLen && this.equals(oldString[oldPos + 1], newString[newPos + 1], options)) {
-      newPos++;
-      oldPos++;
-      commonCount++;
-      if (options.oneChangePerToken) {
-        basePath.lastComponent = {
-          count: 1,
-          previousComponent: basePath.lastComponent,
-          added: false,
-          removed: false
-        };
-      }
-    }
-    if (commonCount && !options.oneChangePerToken) {
-      basePath.lastComponent = {
-        count: commonCount,
-        previousComponent: basePath.lastComponent,
-        added: false,
-        removed: false
-      };
-    }
-    basePath.oldPos = oldPos;
-    return newPos;
-  },
-  equals: function equals(left, right, options) {
-    if (options.comparator) {
-      return options.comparator(left, right);
-    } else {
-      return left === right || options.ignoreCase && left.toLowerCase() === right.toLowerCase();
-    }
-  },
-  removeEmpty: function removeEmpty(array) {
-    var ret = [];
-    for (var i = 0; i < array.length; i++) {
-      if (array[i]) {
-        ret.push(array[i]);
-      }
-    }
-    return ret;
-  },
-  castInput: function castInput(value) {
-    return value;
-  },
-  tokenize: function tokenize(value) {
-    return Array.from(value);
-  },
-  join: function join(chars) {
-    return chars.join('');
-  },
-  postProcess: function postProcess(changeObjects) {
-    return changeObjects;
-  }
-};
-function buildValues(diff, lastComponent, newString, oldString, useLongestToken) {
-  // First we convert our linked list of components in reverse order to an
-  // array in the right order:
-  var components = [];
-  var nextComponent;
-  while (lastComponent) {
-    components.push(lastComponent);
-    nextComponent = lastComponent.previousComponent;
-    delete lastComponent.previousComponent;
-    lastComponent = nextComponent;
-  }
-  components.reverse();
-  var componentPos = 0,
-    componentLen = components.length,
-    newPos = 0,
-    oldPos = 0;
-  for (; componentPos < componentLen; componentPos++) {
-    var component = components[componentPos];
-    if (!component.removed) {
-      if (!component.added && useLongestToken) {
-        var value = newString.slice(newPos, newPos + component.count);
-        value = value.map(function (value, i) {
-          var oldValue = oldString[oldPos + i];
-          return oldValue.length > value.length ? oldValue : value;
-        });
-        component.value = diff.join(value);
-      } else {
-        component.value = diff.join(newString.slice(newPos, newPos + component.count));
-      }
-      newPos += component.count;
-
-      // Common case
-      if (!component.added) {
-        oldPos += component.count;
-      }
-    } else {
-      component.value = diff.join(oldString.slice(oldPos, oldPos + component.count));
-      oldPos += component.count;
-    }
-  }
-  return components;
-}
-
-var characterDiff = new Diff();
-function diffChars(oldStr, newStr, options) {
-  return characterDiff.diff(oldStr, newStr, options);
-}
-
-function longestCommonPrefix(str1, str2) {
-  var i;
-  for (i = 0; i < str1.length && i < str2.length; i++) {
-    if (str1[i] != str2[i]) {
-      return str1.slice(0, i);
-    }
-  }
-  return str1.slice(0, i);
-}
-function longestCommonSuffix(str1, str2) {
-  var i;
-
-  // Unlike longestCommonPrefix, we need a special case to handle all scenarios
-  // where we return the empty string since str1.slice(-0) will return the
-  // entire string.
-  if (!str1 || !str2 || str1[str1.length - 1] != str2[str2.length - 1]) {
-    return '';
-  }
-  for (i = 0; i < str1.length && i < str2.length; i++) {
-    if (str1[str1.length - (i + 1)] != str2[str2.length - (i + 1)]) {
-      return str1.slice(-i);
-    }
-  }
-  return str1.slice(-i);
-}
-function replacePrefix(string, oldPrefix, newPrefix) {
-  if (string.slice(0, oldPrefix.length) != oldPrefix) {
-    throw Error("string ".concat(JSON.stringify(string), " doesn't start with prefix ").concat(JSON.stringify(oldPrefix), "; this is a bug"));
-  }
-  return newPrefix + string.slice(oldPrefix.length);
-}
-function replaceSuffix(string, oldSuffix, newSuffix) {
-  if (!oldSuffix) {
-    return string + newSuffix;
-  }
-  if (string.slice(-oldSuffix.length) != oldSuffix) {
-    throw Error("string ".concat(JSON.stringify(string), " doesn't end with suffix ").concat(JSON.stringify(oldSuffix), "; this is a bug"));
-  }
-  return string.slice(0, -oldSuffix.length) + newSuffix;
-}
-function removePrefix(string, oldPrefix) {
-  return replacePrefix(string, oldPrefix, '');
-}
-function removeSuffix(string, oldSuffix) {
-  return replaceSuffix(string, oldSuffix, '');
-}
-function maximumOverlap(string1, string2) {
-  return string2.slice(0, overlapCount(string1, string2));
-}
-
-// Nicked from https://stackoverflow.com/a/60422853/1709587
-function overlapCount(a, b) {
-  // Deal with cases where the strings differ in length
-  var startA = 0;
-  if (a.length > b.length) {
-    startA = a.length - b.length;
-  }
-  var endB = b.length;
-  if (a.length < b.length) {
-    endB = a.length;
-  }
-  // Create a back-reference for each index
-  //   that should be followed in case of a mismatch.
-  //   We only need B to make these references:
-  var map = Array(endB);
-  var k = 0; // Index that lags behind j
-  map[0] = 0;
-  for (var j = 1; j < endB; j++) {
-    if (b[j] == b[k]) {
-      map[j] = map[k]; // skip over the same character (optional optimisation)
-    } else {
-      map[j] = k;
-    }
-    while (k > 0 && b[j] != b[k]) {
-      k = map[k];
-    }
-    if (b[j] == b[k]) {
-      k++;
-    }
-  }
-  // Phase 2: use these references while iterating over A
-  k = 0;
-  for (var i = startA; i < a.length; i++) {
-    while (k > 0 && a[i] != b[k]) {
-      k = map[k];
-    }
-    if (a[i] == b[k]) {
-      k++;
-    }
-  }
-  return k;
-}
-
-/**
- * Returns true if the string consistently uses Windows line endings.
- */
-function hasOnlyWinLineEndings(string) {
-  return string.includes('\r\n') && !string.startsWith('\n') && !string.match(/[^\r]\n/);
-}
-
-/**
- * Returns true if the string consistently uses Unix line endings.
- */
-function hasOnlyUnixLineEndings(string) {
-  return !string.includes('\r\n') && string.includes('\n');
-}
-
-// Based on https://en.wikipedia.org/wiki/Latin_script_in_Unicode
-//
-// Ranges and exceptions:
-// Latin-1 Supplement, 0080–00FF
-//  - U+00D7  × Multiplication sign
-//  - U+00F7  ÷ Division sign
-// Latin Extended-A, 0100–017F
-// Latin Extended-B, 0180–024F
-// IPA Extensions, 0250–02AF
-// Spacing Modifier Letters, 02B0–02FF
-//  - U+02C7  ˇ ˇ  Caron
-//  - U+02D8  ˘ ˘  Breve
-//  - U+02D9  ˙ ˙  Dot Above
-//  - U+02DA  ˚ ˚  Ring Above
-//  - U+02DB  ˛ ˛  Ogonek
-//  - U+02DC  ˜ ˜  Small Tilde
-//  - U+02DD  ˝ ˝  Double Acute Accent
-// Latin Extended Additional, 1E00–1EFF
-var extendedWordChars = "a-zA-Z0-9_\\u{C0}-\\u{FF}\\u{D8}-\\u{F6}\\u{F8}-\\u{2C6}\\u{2C8}-\\u{2D7}\\u{2DE}-\\u{2FF}\\u{1E00}-\\u{1EFF}";
-
-// Each token is one of the following:
-// - A punctuation mark plus the surrounding whitespace
-// - A word plus the surrounding whitespace
-// - Pure whitespace (but only in the special case where this the entire text
-//   is just whitespace)
-//
-// We have to include surrounding whitespace in the tokens because the two
-// alternative approaches produce horribly broken results:
-// * If we just discard the whitespace, we can't fully reproduce the original
-//   text from the sequence of tokens and any attempt to render the diff will
-//   get the whitespace wrong.
-// * If we have separate tokens for whitespace, then in a typical text every
-//   second token will be a single space character. But this often results in
-//   the optimal diff between two texts being a perverse one that preserves
-//   the spaces between words but deletes and reinserts actual common words.
-//   See https://github.com/kpdecker/jsdiff/issues/160#issuecomment-1866099640
-//   for an example.
-//
-// Keeping the surrounding whitespace of course has implications for .equals
-// and .join, not just .tokenize.
-
-// This regex does NOT fully implement the tokenization rules described above.
-// Instead, it gives runs of whitespace their own "token". The tokenize method
-// then handles stitching whitespace tokens onto adjacent word or punctuation
-// tokens.
-var tokenizeIncludingWhitespace = new RegExp("[".concat(extendedWordChars, "]+|\\s+|[^").concat(extendedWordChars, "]"), 'ug');
-var wordDiff = new Diff();
-wordDiff.equals = function (left, right, options) {
-  if (options.ignoreCase) {
-    left = left.toLowerCase();
-    right = right.toLowerCase();
-  }
-  return left.trim() === right.trim();
-};
-wordDiff.tokenize = function (value) {
-  var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
-  var parts;
-  if (options.intlSegmenter) {
-    if (options.intlSegmenter.resolvedOptions().granularity != 'word') {
-      throw new Error('The segmenter passed must have a granularity of "word"');
-    }
-    parts = Array.from(options.intlSegmenter.segment(value), function (segment) {
-      return segment.segment;
-    });
-  } else {
-    parts = value.match(tokenizeIncludingWhitespace) || [];
-  }
-  var tokens = [];
-  var prevPart = null;
-  parts.forEach(function (part) {
-    if (/\s/.test(part)) {
-      if (prevPart == null) {
-        tokens.push(part);
-      } else {
-        tokens.push(tokens.pop() + part);
-      }
-    } else if (/\s/.test(prevPart)) {
-      if (tokens[tokens.length - 1] == prevPart) {
-        tokens.push(tokens.pop() + part);
-      } else {
-        tokens.push(prevPart + part);
-      }
-    } else {
-      tokens.push(part);
-    }
-    prevPart = part;
-  });
-  return tokens;
-};
-wordDiff.join = function (tokens) {
-  // Tokens being joined here will always have appeared consecutively in the
-  // same text, so we can simply strip off the leading whitespace from all the
-  // tokens except the first (and except any whitespace-only tokens - but such
-  // a token will always be the first and only token anyway) and then join them
-  // and the whitespace around words and punctuation will end up correct.
-  return tokens.map(function (token, i) {
-    if (i == 0) {
-      return token;
-    } else {
-      return token.replace(/^\s+/, '');
-    }
-  }).join('');
-};
-wordDiff.postProcess = function (changes, options) {
-  if (!changes || options.oneChangePerToken) {
-    return changes;
-  }
-  var lastKeep = null;
-  // Change objects representing any insertion or deletion since the last
-  // "keep" change object. There can be at most one of each.
-  var insertion = null;
-  var deletion = null;
-  changes.forEach(function (change) {
-    if (change.added) {
-      insertion = change;
-    } else if (change.removed) {
-      deletion = change;
-    } else {
-      if (insertion || deletion) {
-        // May be false at start of text
-        dedupeWhitespaceInChangeObjects(lastKeep, deletion, insertion, change);
-      }
-      lastKeep = change;
-      insertion = null;
-      deletion = null;
-    }
-  });
-  if (insertion || deletion) {
-    dedupeWhitespaceInChangeObjects(lastKeep, deletion, insertion, null);
-  }
-  return changes;
-};
-function diffWords(oldStr, newStr, options) {
-  // This option has never been documented and never will be (it's clearer to
-  // just call `diffWordsWithSpace` directly if you need that behavior), but
-  // has existed in jsdiff for a long time, so we retain support for it here
-  // for the sake of backwards compatibility.
-  if ((options === null || options === void 0 ? void 0 : options.ignoreWhitespace) != null && !options.ignoreWhitespace) {
-    return diffWordsWithSpace(oldStr, newStr, options);
-  }
-  return wordDiff.diff(oldStr, newStr, options);
-}
-function dedupeWhitespaceInChangeObjects(startKeep, deletion, insertion, endKeep) {
-  // Before returning, we tidy up the leading and trailing whitespace of the
-  // change objects to eliminate cases where trailing whitespace in one object
-  // is repeated as leading whitespace in the next.
-  // Below are examples of the outcomes we want here to explain the code.
-  // I=insert, K=keep, D=delete
-  // 1. diffing 'foo bar baz' vs 'foo baz'
-  //    Prior to cleanup, we have K:'foo ' D:' bar ' K:' baz'
-  //    After cleanup, we want:   K:'foo ' D:'bar ' K:'baz'
-  //
-  // 2. Diffing 'foo bar baz' vs 'foo qux baz'
-  //    Prior to cleanup, we have K:'foo ' D:' bar ' I:' qux ' K:' baz'
-  //    After cleanup, we want K:'foo ' D:'bar' I:'qux' K:' baz'
-  //
-  // 3. Diffing 'foo\nbar baz' vs 'foo baz'
-  //    Prior to cleanup, we have K:'foo ' D:'\nbar ' K:' baz'
-  //    After cleanup, we want K'foo' D:'\nbar' K:' baz'
-  //
-  // 4. Diffing 'foo baz' vs 'foo\nbar baz'
-  //    Prior to cleanup, we have K:'foo\n' I:'\nbar ' K:' baz'
-  //    After cleanup, we ideally want K'foo' I:'\nbar' K:' baz'
-  //    but don't actually manage this currently (the pre-cleanup change
-  //    objects don't contain enough information to make it possible).
-  //
-  // 5. Diffing 'foo   bar baz' vs 'foo  baz'
-  //    Prior to cleanup, we have K:'foo  ' D:'   bar ' K:'  baz'
-  //    After cleanup, we want K:'foo  ' D:' bar ' K:'baz'
-  //
-  // Our handling is unavoidably imperfect in the case where there's a single
-  // indel between keeps and the whitespace has changed. For instance, consider
-  // diffing 'foo\tbar\nbaz' vs 'foo baz'. Unless we create an extra change
-  // object to represent the insertion of the space character (which isn't even
-  // a token), we have no way to avoid losing information about the texts'
-  // original whitespace in the result we return. Still, we do our best to
-  // output something that will look sensible if we e.g. print it with
-  // insertions in green and deletions in red.
-
-  // Between two "keep" change objects (or before the first or after the last
-  // change object), we can have either:
-  // * A "delete" followed by an "insert"
-  // * Just an "insert"
-  // * Just a "delete"
-  // We handle the three cases separately.
-  if (deletion && insertion) {
-    var oldWsPrefix = deletion.value.match(/^\s*/)[0];
-    var oldWsSuffix = deletion.value.match(/\s*$/)[0];
-    var newWsPrefix = insertion.value.match(/^\s*/)[0];
-    var newWsSuffix = insertion.value.match(/\s*$/)[0];
-    if (startKeep) {
-      var commonWsPrefix = longestCommonPrefix(oldWsPrefix, newWsPrefix);
-      startKeep.value = replaceSuffix(startKeep.value, newWsPrefix, commonWsPrefix);
-      deletion.value = removePrefix(deletion.value, commonWsPrefix);
-      insertion.value = removePrefix(insertion.value, commonWsPrefix);
-    }
-    if (endKeep) {
-      var commonWsSuffix = longestCommonSuffix(oldWsSuffix, newWsSuffix);
-      endKeep.value = replacePrefix(endKeep.value, newWsSuffix, commonWsSuffix);
-      deletion.value = removeSuffix(deletion.value, commonWsSuffix);
-      insertion.value = removeSuffix(insertion.value, commonWsSuffix);
-    }
-  } else if (insertion) {
-    // The whitespaces all reflect what was in the new text rather than
-    // the old, so we essentially have no information about whitespace
-    // insertion or deletion. We just want to dedupe the whitespace.
-    // We do that by having each change object keep its trailing
-    // whitespace and deleting duplicate leading whitespace where
-    // present.
-    if (startKeep) {
-      insertion.value = insertion.value.replace(/^\s*/, '');
-    }
-    if (endKeep) {
-      endKeep.value = endKeep.value.replace(/^\s*/, '');
-    }
-    // otherwise we've got a deletion and no insertion
-  } else if (startKeep && endKeep) {
-    var newWsFull = endKeep.value.match(/^\s*/)[0],
-      delWsStart = deletion.value.match(/^\s*/)[0],
-      delWsEnd = deletion.value.match(/\s*$/)[0];
-
-    // Any whitespace that comes straight after startKeep in both the old and
-    // new texts, assign to startKeep and remove from the deletion.
-    var newWsStart = longestCommonPrefix(newWsFull, delWsStart);
-    deletion.value = removePrefix(deletion.value, newWsStart);
-
-    // Any whitespace that comes straight before endKeep in both the old and
-    // new texts, and hasn't already been assigned to startKeep, assign to
-    // endKeep and remove from the deletion.
-    var newWsEnd = longestCommonSuffix(removePrefix(newWsFull, newWsStart), delWsEnd);
-    deletion.value = removeSuffix(deletion.value, newWsEnd);
-    endKeep.value = replacePrefix(endKeep.value, newWsFull, newWsEnd);
-
-    // If there's any whitespace from the new text that HASN'T already been
-    // assigned, assign it to the start:
-    startKeep.value = replaceSuffix(startKeep.value, newWsFull, newWsFull.slice(0, newWsFull.length - newWsEnd.length));
-  } else if (endKeep) {
-    // We are at the start of the text. Preserve all the whitespace on
-    // endKeep, and just remove whitespace from the end of deletion to the
-    // extent that it overlaps with the start of endKeep.
-    var endKeepWsPrefix = endKeep.value.match(/^\s*/)[0];
-    var deletionWsSuffix = deletion.value.match(/\s*$/)[0];
-    var overlap = maximumOverlap(deletionWsSuffix, endKeepWsPrefix);
-    deletion.value = removeSuffix(deletion.value, overlap);
-  } else if (startKeep) {
-    // We are at the END of the text. Preserve all the whitespace on
-    // startKeep, and just remove whitespace from the start of deletion to
-    // the extent that it overlaps with the end of startKeep.
-    var startKeepWsSuffix = startKeep.value.match(/\s*$/)[0];
-    var deletionWsPrefix = deletion.value.match(/^\s*/)[0];
-    var _overlap = maximumOverlap(startKeepWsSuffix, deletionWsPrefix);
-    deletion.value = removePrefix(deletion.value, _overlap);
-  }
-}
-var wordWithSpaceDiff = new Diff();
-wordWithSpaceDiff.tokenize = function (value) {
-  // Slightly different to the tokenizeIncludingWhitespace regex used above in
-  // that this one treats each individual newline as a distinct tokens, rather
-  // than merging them into other surrounding whitespace. This was requested
-  // in https://github.com/kpdecker/jsdiff/issues/180 &
-  //    https://github.com/kpdecker/jsdiff/issues/211
-  var regex = new RegExp("(\\r?\\n)|[".concat(extendedWordChars, "]+|[^\\S\\n\\r]+|[^").concat(extendedWordChars, "]"), 'ug');
-  return value.match(regex) || [];
-};
-function diffWordsWithSpace(oldStr, newStr, options) {
-  return wordWithSpaceDiff.diff(oldStr, newStr, options);
-}
-
-function generateOptions(options, defaults) {
-  if (typeof options === 'function') {
-    defaults.callback = options;
-  } else if (options) {
-    for (var name in options) {
-      /* istanbul ignore else */
-      if (options.hasOwnProperty(name)) {
-        defaults[name] = options[name];
-      }
-    }
-  }
-  return defaults;
-}
-
-var lineDiff = new Diff();
-lineDiff.tokenize = function (value, options) {
-  if (options.stripTrailingCr) {
-    // remove one \r before \n to match GNU diff's --strip-trailing-cr behavior
-    value = value.replace(/\r\n/g, '\n');
-  }
-  var retLines = [],
-    linesAndNewlines = value.split(/(\n|\r\n)/);
-
-  // Ignore the final empty token that occurs if the string ends with a new line
-  if (!linesAndNewlines[linesAndNewlines.length - 1]) {
-    linesAndNewlines.pop();
-  }
-
-  // Merge the content and line separators into single tokens
-  for (var i = 0; i < linesAndNewlines.length; i++) {
-    var line = linesAndNewlines[i];
-    if (i % 2 && !options.newlineIsToken) {
-      retLines[retLines.length - 1] += line;
-    } else {
-      retLines.push(line);
-    }
-  }
-  return retLines;
-};
-lineDiff.equals = function (left, right, options) {
-  // If we're ignoring whitespace, we need to normalise lines by stripping
-  // whitespace before checking equality. (This has an annoying interaction
-  // with newlineIsToken that requires special handling: if newlines get their
-  // own token, then we DON'T want to trim the *newline* tokens down to empty
-  // strings, since this would cause us to treat whitespace-only line content
-  // as equal to a separator between lines, which would be weird and
-  // inconsistent with the documented behavior of the options.)
-  if (options.ignoreWhitespace) {
-    if (!options.newlineIsToken || !left.includes('\n')) {
-      left = left.trim();
-    }
-    if (!options.newlineIsToken || !right.includes('\n')) {
-      right = right.trim();
-    }
-  } else if (options.ignoreNewlineAtEof && !options.newlineIsToken) {
-    if (left.endsWith('\n')) {
-      left = left.slice(0, -1);
-    }
-    if (right.endsWith('\n')) {
-      right = right.slice(0, -1);
-    }
-  }
-  return Diff.prototype.equals.call(this, left, right, options);
-};
-function diffLines(oldStr, newStr, callback) {
-  return lineDiff.diff(oldStr, newStr, callback);
-}
-
-// Kept for backwards compatibility. This is a rather arbitrary wrapper method
-// that just calls `diffLines` with `ignoreWhitespace: true`. It's confusing to
-// have two ways to do exactly the same thing in the API, so we no longer
-// document this one (library users should explicitly use `diffLines` with
-// `ignoreWhitespace: true` instead) but we keep it around to maintain
-// compatibility with code that used old versions.
-function diffTrimmedLines(oldStr, newStr, callback) {
-  var options = generateOptions(callback, {
-    ignoreWhitespace: true
-  });
-  return lineDiff.diff(oldStr, newStr, options);
-}
-
-var sentenceDiff = new Diff();
-sentenceDiff.tokenize = function (value) {
-  return value.split(/(\S.+?[.!?])(?=\s+|$)/);
-};
-function diffSentences(oldStr, newStr, callback) {
-  return sentenceDiff.diff(oldStr, newStr, callback);
-}
-
-var cssDiff = new Diff();
-cssDiff.tokenize = function (value) {
-  return value.split(/([{}:;,]|\s+)/);
-};
-function diffCss(oldStr, newStr, callback) {
-  return cssDiff.diff(oldStr, newStr, callback);
-}
-
-function ownKeys(e, r) {
-  var t = Object.keys(e);
-  if (Object.getOwnPropertySymbols) {
-    var o = Object.getOwnPropertySymbols(e);
-    r && (o = o.filter(function (r) {
-      return Object.getOwnPropertyDescriptor(e, r).enumerable;
-    })), t.push.apply(t, o);
-  }
-  return t;
-}
-function _objectSpread2(e) {
-  for (var r = 1; r < arguments.length; r++) {
-    var t = null != arguments[r] ? arguments[r] : {};
-    r % 2 ? ownKeys(Object(t), !0).forEach(function (r) {
-      _defineProperty(e, r, t[r]);
-    }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(e, Object.getOwnPropertyDescriptors(t)) : ownKeys(Object(t)).forEach(function (r) {
-      Object.defineProperty(e, r, Object.getOwnPropertyDescriptor(t, r));
-    });
-  }
-  return e;
-}
-function _toPrimitive(t, r) {
-  if ("object" != typeof t || !t) return t;
-  var e = t[Symbol.toPrimitive];
-  if (void 0 !== e) {
-    var i = e.call(t, r || "default");
-    if ("object" != typeof i) return i;
-    throw new TypeError("@@toPrimitive must return a primitive value.");
-  }
-  return ("string" === r ? String : Number)(t);
-}
-function _toPropertyKey(t) {
-  var i = _toPrimitive(t, "string");
-  return "symbol" == typeof i ? i : i + "";
-}
-function _typeof(o) {
-  "@babel/helpers - typeof";
-
-  return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (o) {
-    return typeof o;
-  } : function (o) {
-    return o && "function" == typeof Symbol && o.constructor === Symbol && o !== Symbol.prototype ? "symbol" : typeof o;
-  }, _typeof(o);
-}
-function _defineProperty(obj, key, value) {
-  key = _toPropertyKey(key);
-  if (key in obj) {
-    Object.defineProperty(obj, key, {
-      value: value,
-      enumerable: true,
-      configurable: true,
-      writable: true
-    });
-  } else {
-    obj[key] = value;
-  }
-  return obj;
-}
-function _toConsumableArray(arr) {
-  return _arrayWithoutHoles(arr) || _iterableToArray(arr) || _unsupportedIterableToArray(arr) || _nonIterableSpread();
-}
-function _arrayWithoutHoles(arr) {
-  if (Array.isArray(arr)) return _arrayLikeToArray(arr);
-}
-function _iterableToArray(iter) {
-  if (typeof Symbol !== "undefined" && iter[Symbol.iterator] != null || iter["@@iterator"] != null) return Array.from(iter);
-}
-function _unsupportedIterableToArray(o, minLen) {
-  if (!o) return;
-  if (typeof o === "string") return _arrayLikeToArray(o, minLen);
-  var n = Object.prototype.toString.call(o).slice(8, -1);
-  if (n === "Object" && o.constructor) n = o.constructor.name;
-  if (n === "Map" || n === "Set") return Array.from(o);
-  if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen);
-}
-function _arrayLikeToArray(arr, len) {
-  if (len == null || len > arr.length) len = arr.length;
-  for (var i = 0, arr2 = new Array(len); i < len; i++) arr2[i] = arr[i];
-  return arr2;
-}
-function _nonIterableSpread() {
-  throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.");
-}
-
-var jsonDiff = new Diff();
-// Discriminate between two lines of pretty-printed, serialized JSON where one of them has a
-// dangling comma and the other doesn't. Turns out including the dangling comma yields the nicest output:
-jsonDiff.useLongestToken = true;
-jsonDiff.tokenize = lineDiff.tokenize;
-jsonDiff.castInput = function (value, options) {
-  var undefinedReplacement = options.undefinedReplacement,
-    _options$stringifyRep = options.stringifyReplacer,
-    stringifyReplacer = _options$stringifyRep === void 0 ? function (k, v) {
-      return typeof v === 'undefined' ? undefinedReplacement : v;
-    } : _options$stringifyRep;
-  return typeof value === 'string' ? value : JSON.stringify(canonicalize(value, null, null, stringifyReplacer), stringifyReplacer, '  ');
-};
-jsonDiff.equals = function (left, right, options) {
-  return Diff.prototype.equals.call(jsonDiff, left.replace(/,([\r\n])/g, '$1'), right.replace(/,([\r\n])/g, '$1'), options);
-};
-function diffJson(oldObj, newObj, options) {
-  return jsonDiff.diff(oldObj, newObj, options);
-}
-
-// This function handles the presence of circular references by bailing out when encountering an
-// object that is already on the "stack" of items being processed. Accepts an optional replacer
-function canonicalize(obj, stack, replacementStack, replacer, key) {
-  stack = stack || [];
-  replacementStack = replacementStack || [];
-  if (replacer) {
-    obj = replacer(key, obj);
-  }
-  var i;
-  for (i = 0; i < stack.length; i += 1) {
-    if (stack[i] === obj) {
-      return replacementStack[i];
-    }
-  }
-  var canonicalizedObj;
-  if ('[object Array]' === Object.prototype.toString.call(obj)) {
-    stack.push(obj);
-    canonicalizedObj = new Array(obj.length);
-    replacementStack.push(canonicalizedObj);
-    for (i = 0; i < obj.length; i += 1) {
-      canonicalizedObj[i] = canonicalize(obj[i], stack, replacementStack, replacer, key);
-    }
-    stack.pop();
-    replacementStack.pop();
-    return canonicalizedObj;
-  }
-  if (obj && obj.toJSON) {
-    obj = obj.toJSON();
-  }
-  if (_typeof(obj) === 'object' && obj !== null) {
-    stack.push(obj);
-    canonicalizedObj = {};
-    replacementStack.push(canonicalizedObj);
-    var sortedKeys = [],
-      _key;
-    for (_key in obj) {
-      /* istanbul ignore else */
-      if (Object.prototype.hasOwnProperty.call(obj, _key)) {
-        sortedKeys.push(_key);
-      }
-    }
-    sortedKeys.sort();
-    for (i = 0; i < sortedKeys.length; i += 1) {
-      _key = sortedKeys[i];
-      canonicalizedObj[_key] = canonicalize(obj[_key], stack, replacementStack, replacer, _key);
-    }
-    stack.pop();
-    replacementStack.pop();
-  } else {
-    canonicalizedObj = obj;
-  }
-  return canonicalizedObj;
-}
-
-var arrayDiff = new Diff();
-arrayDiff.tokenize = function (value) {
-  return value.slice();
-};
-arrayDiff.join = arrayDiff.removeEmpty = function (value) {
-  return value;
-};
-function diffArrays(oldArr, newArr, callback) {
-  return arrayDiff.diff(oldArr, newArr, callback);
-}
-
-function unixToWin(patch) {
-  if (Array.isArray(patch)) {
-    return patch.map(unixToWin);
-  }
-  return _objectSpread2(_objectSpread2({}, patch), {}, {
-    hunks: patch.hunks.map(function (hunk) {
-      return _objectSpread2(_objectSpread2({}, hunk), {}, {
-        lines: hunk.lines.map(function (line, i) {
-          var _hunk$lines;
-          return line.startsWith('\\') || line.endsWith('\r') || (_hunk$lines = hunk.lines[i + 1]) !== null && _hunk$lines !== void 0 && _hunk$lines.startsWith('\\') ? line : line + '\r';
-        })
-      });
-    })
-  });
-}
-function winToUnix(patch) {
-  if (Array.isArray(patch)) {
-    return patch.map(winToUnix);
-  }
-  return _objectSpread2(_objectSpread2({}, patch), {}, {
-    hunks: patch.hunks.map(function (hunk) {
-      return _objectSpread2(_objectSpread2({}, hunk), {}, {
-        lines: hunk.lines.map(function (line) {
-          return line.endsWith('\r') ? line.substring(0, line.length - 1) : line;
-        })
-      });
-    })
-  });
-}
-
-/**
- * Returns true if the patch consistently uses Unix line endings (or only involves one line and has
- * no line endings).
- */
-function isUnix(patch) {
-  if (!Array.isArray(patch)) {
-    patch = [patch];
-  }
-  return !patch.some(function (index) {
-    return index.hunks.some(function (hunk) {
-      return hunk.lines.some(function (line) {
-        return !line.startsWith('\\') && line.endsWith('\r');
-      });
-    });
-  });
-}
-
-/**
- * Returns true if the patch uses Windows line endings and only Windows line endings.
- */
-function isWin(patch) {
-  if (!Array.isArray(patch)) {
-    patch = [patch];
-  }
-  return patch.some(function (index) {
-    return index.hunks.some(function (hunk) {
-      return hunk.lines.some(function (line) {
-        return line.endsWith('\r');
-      });
-    });
-  }) && patch.every(function (index) {
-    return index.hunks.every(function (hunk) {
-      return hunk.lines.every(function (line, i) {
-        var _hunk$lines2;
-        return line.startsWith('\\') || line.endsWith('\r') || ((_hunk$lines2 = hunk.lines[i + 1]) === null || _hunk$lines2 === void 0 ? void 0 : _hunk$lines2.startsWith('\\'));
-      });
-    });
-  });
-}
-
-function parsePatch(uniDiff) {
-  var diffstr = uniDiff.split(/\n/),
-    list = [],
-    i = 0;
-  function parseIndex() {
-    var index = {};
-    list.push(index);
-
-    // Parse diff metadata
-    while (i < diffstr.length) {
-      var line = diffstr[i];
-
-      // File header found, end parsing diff metadata
-      if (/^(\-\-\-|\+\+\+|@@)\s/.test(line)) {
-        break;
-      }
-
-      // Diff index
-      var header = /^(?:Index:|diff(?: -r \w+)+)\s+(.+?)\s*$/.exec(line);
-      if (header) {
-        index.index = header[1];
-      }
-      i++;
-    }
-
-    // Parse file headers if they are defined. Unified diff requires them, but
-    // there's no technical issues to have an isolated hunk without file header
-    parseFileHeader(index);
-    parseFileHeader(index);
-
-    // Parse hunks
-    index.hunks = [];
-    while (i < diffstr.length) {
-      var _line = diffstr[i];
-      if (/^(Index:\s|diff\s|\-\-\-\s|\+\+\+\s|===================================================================)/.test(_line)) {
-        break;
-      } else if (/^@@/.test(_line)) {
-        index.hunks.push(parseHunk());
-      } else if (_line) {
-        throw new Error('Unknown line ' + (i + 1) + ' ' + JSON.stringify(_line));
-      } else {
-        i++;
-      }
-    }
-  }
-
-  // Parses the --- and +++ headers, if none are found, no lines
-  // are consumed.
-  function parseFileHeader(index) {
-    var fileHeader = /^(---|\+\+\+)\s+(.*)\r?$/.exec(diffstr[i]);
-    if (fileHeader) {
-      var keyPrefix = fileHeader[1] === '---' ? 'old' : 'new';
-      var data = fileHeader[2].split('\t', 2);
-      var fileName = data[0].replace(/\\\\/g, '\\');
-      if (/^".*"$/.test(fileName)) {
-        fileName = fileName.substr(1, fileName.length - 2);
-      }
-      index[keyPrefix + 'FileName'] = fileName;
-      index[keyPrefix + 'Header'] = (data[1] || '').trim();
-      i++;
-    }
-  }
-
-  // Parses a hunk
-  // This assumes that we are at the start of a hunk.
-  function parseHunk() {
-    var chunkHeaderIndex = i,
-      chunkHeaderLine = diffstr[i++],
-      chunkHeader = chunkHeaderLine.split(/@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@/);
-    var hunk = {
-      oldStart: +chunkHeader[1],
-      oldLines: typeof chunkHeader[2] === 'undefined' ? 1 : +chunkHeader[2],
-      newStart: +chunkHeader[3],
-      newLines: typeof chunkHeader[4] === 'undefined' ? 1 : +chunkHeader[4],
-      lines: []
-    };
-
-    // Unified Diff Format quirk: If the chunk size is 0,
-    // the first number is one lower than one would expect.
-    // https://www.artima.com/weblogs/viewpost.jsp?thread=164293
-    if (hunk.oldLines === 0) {
-      hunk.oldStart += 1;
-    }
-    if (hunk.newLines === 0) {
-      hunk.newStart += 1;
-    }
-    var addCount = 0,
-      removeCount = 0;
-    for (; i < diffstr.length && (removeCount < hunk.oldLines || addCount < hunk.newLines || (_diffstr$i = diffstr[i]) !== null && _diffstr$i !== void 0 && _diffstr$i.startsWith('\\')); i++) {
-      var _diffstr$i;
-      var operation = diffstr[i].length == 0 && i != diffstr.length - 1 ? ' ' : diffstr[i][0];
-      if (operation === '+' || operation === '-' || operation === ' ' || operation === '\\') {
-        hunk.lines.push(diffstr[i]);
-        if (operation === '+') {
-          addCount++;
-        } else if (operation === '-') {
-          removeCount++;
-        } else if (operation === ' ') {
-          addCount++;
-          removeCount++;
-        }
-      } else {
-        throw new Error("Hunk at line ".concat(chunkHeaderIndex + 1, " contained invalid line ").concat(diffstr[i]));
-      }
-    }
-
-    // Handle the empty block count case
-    if (!addCount && hunk.newLines === 1) {
-      hunk.newLines = 0;
-    }
-    if (!removeCount && hunk.oldLines === 1) {
-      hunk.oldLines = 0;
-    }
-
-    // Perform sanity checking
-    if (addCount !== hunk.newLines) {
-      throw new Error('Added line count did not match for hunk at line ' + (chunkHeaderIndex + 1));
-    }
-    if (removeCount !== hunk.oldLines) {
-      throw new Error('Removed line count did not match for hunk at line ' + (chunkHeaderIndex + 1));
-    }
-    return hunk;
-  }
-  while (i < diffstr.length) {
-    parseIndex();
-  }
-  return list;
-}
-
-// Iterator that traverses in the range of [min, max], stepping
-// by distance from a given start position. I.e. for [0, 4], with
-// start of 2, this will iterate 2, 3, 1, 4, 0.
-function distanceIterator (start, minLine, maxLine) {
-  var wantForward = true,
-    backwardExhausted = false,
-    forwardExhausted = false,
-    localOffset = 1;
-  return function iterator() {
-    if (wantForward && !forwardExhausted) {
-      if (backwardExhausted) {
-        localOffset++;
-      } else {
-        wantForward = false;
-      }
-
-      // Check if trying to fit beyond text length, and if not, check it fits
-      // after offset location (or desired location on first iteration)
-      if (start + localOffset <= maxLine) {
-        return start + localOffset;
-      }
-      forwardExhausted = true;
-    }
-    if (!backwardExhausted) {
-      if (!forwardExhausted) {
-        wantForward = true;
-      }
-
-      // Check if trying to fit before text beginning, and if not, check it fits
-      // before offset location
-      if (minLine <= start - localOffset) {
-        return start - localOffset++;
-      }
-      backwardExhausted = true;
-      return iterator();
-    }
-
-    // We tried to fit hunk before text beginning and beyond text length, then
-    // hunk can't fit on the text. Return undefined
-  };
-}
-
-function applyPatch(source, uniDiff) {
-  var options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
-  if (typeof uniDiff === 'string') {
-    uniDiff = parsePatch(uniDiff);
-  }
-  if (Array.isArray(uniDiff)) {
-    if (uniDiff.length > 1) {
-      throw new Error('applyPatch only works with a single input.');
-    }
-    uniDiff = uniDiff[0];
-  }
-  if (options.autoConvertLineEndings || options.autoConvertLineEndings == null) {
-    if (hasOnlyWinLineEndings(source) && isUnix(uniDiff)) {
-      uniDiff = unixToWin(uniDiff);
-    } else if (hasOnlyUnixLineEndings(source) && isWin(uniDiff)) {
-      uniDiff = winToUnix(uniDiff);
-    }
-  }
-
-  // Apply the diff to the input
-  var lines = source.split('\n'),
-    hunks = uniDiff.hunks,
-    compareLine = options.compareLine || function (lineNumber, line, operation, patchContent) {
-      return line === patchContent;
-    },
-    fuzzFactor = options.fuzzFactor || 0,
-    minLine = 0;
-  if (fuzzFactor < 0 || !Number.isInteger(fuzzFactor)) {
-    throw new Error('fuzzFactor must be a non-negative integer');
-  }
-
-  // Special case for empty patch.
-  if (!hunks.length) {
-    return source;
-  }
-
-  // Before anything else, handle EOFNL insertion/removal. If the patch tells us to make a change
-  // to the EOFNL that is redundant/impossible - i.e. to remove a newline that's not there, or add a
-  // newline that already exists - then we either return false and fail to apply the patch (if
-  // fuzzFactor is 0) or simply ignore the problem and do nothing (if fuzzFactor is >0).
-  // If we do need to remove/add a newline at EOF, this will always be in the final hunk:
-  var prevLine = '',
-    removeEOFNL = false,
-    addEOFNL = false;
-  for (var i = 0; i < hunks[hunks.length - 1].lines.length; i++) {
-    var line = hunks[hunks.length - 1].lines[i];
-    if (line[0] == '\\') {
-      if (prevLine[0] == '+') {
-        removeEOFNL = true;
-      } else if (prevLine[0] == '-') {
-        addEOFNL = true;
-      }
-    }
-    prevLine = line;
-  }
-  if (removeEOFNL) {
-    if (addEOFNL) {
-      // This means the final line gets changed but doesn't have a trailing newline in either the
-      // original or patched version. In that case, we do nothing if fuzzFactor > 0, and if
-      // fuzzFactor is 0, we simply validate that the source file has no trailing newline.
-      if (!fuzzFactor && lines[lines.length - 1] == '') {
-        return false;
-      }
-    } else if (lines[lines.length - 1] == '') {
-      lines.pop();
-    } else if (!fuzzFactor) {
-      return false;
-    }
-  } else if (addEOFNL) {
-    if (lines[lines.length - 1] != '') {
-      lines.push('');
-    } else if (!fuzzFactor) {
-      return false;
-    }
-  }
-
-  /**
-   * Checks if the hunk can be made to fit at the provided location with at most `maxErrors`
-   * insertions, substitutions, or deletions, while ensuring also that:
-   * - lines deleted in the hunk match exactly, and
-   * - wherever an insertion operation or block of insertion operations appears in the hunk, the
-   *   immediately preceding and following lines of context match exactly
-   *
-   * `toPos` should be set such that lines[toPos] is meant to match hunkLines[0].
-   *
-   * If the hunk can be applied, returns an object with properties `oldLineLastI` and
-   * `replacementLines`. Otherwise, returns null.
-   */
-  function applyHunk(hunkLines, toPos, maxErrors) {
-    var hunkLinesI = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : 0;
-    var lastContextLineMatched = arguments.length > 4 && arguments[4] !== undefined ? arguments[4] : true;
-    var patchedLines = arguments.length > 5 && arguments[5] !== undefined ? arguments[5] : [];
-    var patchedLinesLength = arguments.length > 6 && arguments[6] !== undefined ? arguments[6] : 0;
-    var nConsecutiveOldContextLines = 0;
-    var nextContextLineMustMatch = false;
-    for (; hunkLinesI < hunkLines.length; hunkLinesI++) {
-      var hunkLine = hunkLines[hunkLinesI],
-        operation = hunkLine.length > 0 ? hunkLine[0] : ' ',
-        content = hunkLine.length > 0 ? hunkLine.substr(1) : hunkLine;
-      if (operation === '-') {
-        if (compareLine(toPos + 1, lines[toPos], operation, content)) {
-          toPos++;
-          nConsecutiveOldContextLines = 0;
-        } else {
-          if (!maxErrors || lines[toPos] == null) {
-            return null;
-          }
-          patchedLines[patchedLinesLength] = lines[toPos];
-          return applyHunk(hunkLines, toPos + 1, maxErrors - 1, hunkLinesI, false, patchedLines, patchedLinesLength + 1);
-        }
-      }
-      if (operation === '+') {
-        if (!lastContextLineMatched) {
-          return null;
-        }
-        patchedLines[patchedLinesLength] = content;
-        patchedLinesLength++;
-        nConsecutiveOldContextLines = 0;
-        nextContextLineMustMatch = true;
-      }
-      if (operation === ' ') {
-        nConsecutiveOldContextLines++;
-        patchedLines[patchedLinesLength] = lines[toPos];
-        if (compareLine(toPos + 1, lines[toPos], operation, content)) {
-          patchedLinesLength++;
-          lastContextLineMatched = true;
-          nextContextLineMustMatch = false;
-          toPos++;
-        } else {
-          if (nextContextLineMustMatch || !maxErrors) {
-            return null;
-          }
-
-          // Consider 3 possibilities in sequence:
-          // 1. lines contains a *substitution* not included in the patch context, or
-          // 2. lines contains an *insertion* not included in the patch context, or
-          // 3. lines contains a *deletion* not included in the patch context
-          // The first two options are of course only possible if the line from lines is non-null -
-          // i.e. only option 3 is possible if we've overrun the end of the old file.
-          return lines[toPos] && (applyHunk(hunkLines, toPos + 1, maxErrors - 1, hunkLinesI + 1, false, patchedLines, patchedLinesLength + 1) || applyHunk(hunkLines, toPos + 1, maxErrors - 1, hunkLinesI, false, patchedLines, patchedLinesLength + 1)) || applyHunk(hunkLines, toPos, maxErrors - 1, hunkLinesI + 1, false, patchedLines, patchedLinesLength);
-        }
-      }
-    }
-
-    // Before returning, trim any unmodified context lines off the end of patchedLines and reduce
-    // toPos (and thus oldLineLastI) accordingly. This allows later hunks to be applied to a region
-    // that starts in this hunk's trailing context.
-    patchedLinesLength -= nConsecutiveOldContextLines;
-    toPos -= nConsecutiveOldContextLines;
-    patchedLines.length = patchedLinesLength;
-    return {
-      patchedLines: patchedLines,
-      oldLineLastI: toPos - 1
-    };
-  }
-  var resultLines = [];
-
-  // Search best fit offsets for each hunk based on the previous ones
-  var prevHunkOffset = 0;
-  for (var _i = 0; _i < hunks.length; _i++) {
-    var hunk = hunks[_i];
-    var hunkResult = void 0;
-    var maxLine = lines.length - hunk.oldLines + fuzzFactor;
-    var toPos = void 0;
-    for (var maxErrors = 0; maxErrors <= fuzzFactor; maxErrors++) {
-      toPos = hunk.oldStart + prevHunkOffset - 1;
-      var iterator = distanceIterator(toPos, minLine, maxLine);
-      for (; toPos !== undefined; toPos = iterator()) {
-        hunkResult = applyHunk(hunk.lines, toPos, maxErrors);
-        if (hunkResult) {
-          break;
-        }
-      }
-      if (hunkResult) {
-        break;
-      }
-    }
-    if (!hunkResult) {
-      return false;
-    }
-
-    // Copy everything from the end of where we applied the last hunk to the start of this hunk
-    for (var _i2 = minLine; _i2 < toPos; _i2++) {
-      resultLines.push(lines[_i2]);
-    }
-
-    // Add the lines produced by applying the hunk:
-    for (var _i3 = 0; _i3 < hunkResult.patchedLines.length; _i3++) {
-      var _line = hunkResult.patchedLines[_i3];
-      resultLines.push(_line);
-    }
-
-    // Set lower text limit to end of the current hunk, so next ones don't try
-    // to fit over already patched text
-    minLine = hunkResult.oldLineLastI + 1;
-
-    // Note the offset between where the patch said the hunk should've applied and where we
-    // applied it, so we can adjust future hunks accordingly:
-    prevHunkOffset = toPos + 1 - hunk.oldStart;
-  }
-
-  // Copy over the rest of the lines from the old text
-  for (var _i4 = minLine; _i4 < lines.length; _i4++) {
-    resultLines.push(lines[_i4]);
-  }
-  return resultLines.join('\n');
-}
-
-// Wrapper that supports multiple file patches via callbacks.
-function applyPatches(uniDiff, options) {
-  if (typeof uniDiff === 'string') {
-    uniDiff = parsePatch(uniDiff);
-  }
-  var currentIndex = 0;
-  function processIndex() {
-    var index = uniDiff[currentIndex++];
-    if (!index) {
-      return options.complete();
-    }
-    options.loadFile(index, function (err, data) {
-      if (err) {
-        return options.complete(err);
-      }
-      var updatedContent = applyPatch(data, index, options);
-      options.patched(index, updatedContent, function (err) {
-        if (err) {
-          return options.complete(err);
-        }
-        processIndex();
-      });
-    });
-  }
-  processIndex();
-}
-
-function structuredPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options) {
-  if (!options) {
-    options = {};
-  }
-  if (typeof options === 'function') {
-    options = {
-      callback: options
-    };
-  }
-  if (typeof options.context === 'undefined') {
-    options.context = 4;
-  }
-  if (options.newlineIsToken) {
-    throw new Error('newlineIsToken may not be used with patch-generation functions, only with diffing functions');
-  }
-  if (!options.callback) {
-    return diffLinesResultToPatch(diffLines(oldStr, newStr, options));
-  } else {
-    var _options = options,
-      _callback = _options.callback;
-    diffLines(oldStr, newStr, _objectSpread2(_objectSpread2({}, options), {}, {
-      callback: function callback(diff) {
-        var patch = diffLinesResultToPatch(diff);
-        _callback(patch);
-      }
-    }));
-  }
-  function diffLinesResultToPatch(diff) {
-    // STEP 1: Build up the patch with no "\ No newline at end of file" lines and with the arrays
-    //         of lines containing trailing newline characters. We'll tidy up later...
-
-    if (!diff) {
-      return;
-    }
-    diff.push({
-      value: '',
-      lines: []
-    }); // Append an empty value to make cleanup easier
-
-    function contextLines(lines) {
-      return lines.map(function (entry) {
-        return ' ' + entry;
-      });
-    }
-    var hunks = [];
-    var oldRangeStart = 0,
-      newRangeStart = 0,
-      curRange = [],
-      oldLine = 1,
-      newLine = 1;
-    var _loop = function _loop() {
-      var current = diff[i],
-        lines = current.lines || splitLines(current.value);
-      current.lines = lines;
-      if (current.added || current.removed) {
-        var _curRange;
-        // If we have previous context, start with that
-        if (!oldRangeStart) {
-          var prev = diff[i - 1];
-          oldRangeStart = oldLine;
-          newRangeStart = newLine;
-          if (prev) {
-            curRange = options.context > 0 ? contextLines(prev.lines.slice(-options.context)) : [];
-            oldRangeStart -= curRange.length;
-            newRangeStart -= curRange.length;
-          }
-        }
-
-        // Output our changes
-        (_curRange = curRange).push.apply(_curRange, _toConsumableArray(lines.map(function (entry) {
-          return (current.added ? '+' : '-') + entry;
-        })));
-
-        // Track the updated file position
-        if (current.added) {
-          newLine += lines.length;
-        } else {
-          oldLine += lines.length;
-        }
-      } else {
-        // Identical context lines. Track line changes
-        if (oldRangeStart) {
-          // Close out any changes that have been output (or join overlapping)
-          if (lines.length <= options.context * 2 && i < diff.length - 2) {
-            var _curRange2;
-            // Overlapping
-            (_curRange2 = curRange).push.apply(_curRange2, _toConsumableArray(contextLines(lines)));
-          } else {
-            var _curRange3;
-            // end the range and output
-            var contextSize = Math.min(lines.length, options.context);
-            (_curRange3 = curRange).push.apply(_curRange3, _toConsumableArray(contextLines(lines.slice(0, contextSize))));
-            var _hunk = {
-              oldStart: oldRangeStart,
-              oldLines: oldLine - oldRangeStart + contextSize,
-              newStart: newRangeStart,
-              newLines: newLine - newRangeStart + contextSize,
-              lines: curRange
-            };
-            hunks.push(_hunk);
-            oldRangeStart = 0;
-            newRangeStart = 0;
-            curRange = [];
-          }
-        }
-        oldLine += lines.length;
-        newLine += lines.length;
-      }
-    };
-    for (var i = 0; i < diff.length; i++) {
-      _loop();
-    }
-
-    // Step 2: eliminate the trailing `\n` from each line of each hunk, and, where needed, add
-    //         "\ No newline at end of file".
-    for (var _i = 0, _hunks = hunks; _i < _hunks.length; _i++) {
-      var hunk = _hunks[_i];
-      for (var _i2 = 0; _i2 < hunk.lines.length; _i2++) {
-        if (hunk.lines[_i2].endsWith('\n')) {
-          hunk.lines[_i2] = hunk.lines[_i2].slice(0, -1);
-        } else {
-          hunk.lines.splice(_i2 + 1, 0, '\\ No newline at end of file');
-          _i2++; // Skip the line we just added, then continue iterating
-        }
-      }
-    }
-    return {
-      oldFileName: oldFileName,
-      newFileName: newFileName,
-      oldHeader: oldHeader,
-      newHeader: newHeader,
-      hunks: hunks
-    };
-  }
-}
-function formatPatch(diff) {
-  if (Array.isArray(diff)) {
-    return diff.map(formatPatch).join('\n');
-  }
-  var ret = [];
-  if (diff.oldFileName == diff.newFileName) {
-    ret.push('Index: ' + diff.oldFileName);
-  }
-  ret.push('===================================================================');
-  ret.push('--- ' + diff.oldFileName + (typeof diff.oldHeader === 'undefined' ? '' : '\t' + diff.oldHeader));
-  ret.push('+++ ' + diff.newFileName + (typeof diff.newHeader === 'undefined' ? '' : '\t' + diff.newHeader));
-  for (var i = 0; i < diff.hunks.length; i++) {
-    var hunk = diff.hunks[i];
-    // Unified Diff Format quirk: If the chunk size is 0,
-    // the first number is one lower than one would expect.
-    // https://www.artima.com/weblogs/viewpost.jsp?thread=164293
-    if (hunk.oldLines === 0) {
-      hunk.oldStart -= 1;
-    }
-    if (hunk.newLines === 0) {
-      hunk.newStart -= 1;
-    }
-    ret.push('@@ -' + hunk.oldStart + ',' + hunk.oldLines + ' +' + hunk.newStart + ',' + hunk.newLines + ' @@');
-    ret.push.apply(ret, hunk.lines);
-  }
-  return ret.join('\n') + '\n';
-}
-function createTwoFilesPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options) {
-  var _options2;
-  if (typeof options === 'function') {
-    options = {
-      callback: options
-    };
-  }
-  if (!((_options2 = options) !== null && _options2 !== void 0 && _options2.callback)) {
-    var patchObj = structuredPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options);
-    if (!patchObj) {
-      return;
-    }
-    return formatPatch(patchObj);
-  } else {
-    var _options3 = options,
-      _callback2 = _options3.callback;
-    structuredPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, _objectSpread2(_objectSpread2({}, options), {}, {
-      callback: function callback(patchObj) {
-        if (!patchObj) {
-          _callback2();
-        } else {
-          _callback2(formatPatch(patchObj));
-        }
-      }
-    }));
-  }
-}
-function createPatch(fileName, oldStr, newStr, oldHeader, newHeader, options) {
-  return createTwoFilesPatch(fileName, fileName, oldStr, newStr, oldHeader, newHeader, options);
-}
-
-/**
- * Split `text` into an array of lines, including the trailing newline character (where present)
- */
-function splitLines(text) {
-  var hasTrailingNl = text.endsWith('\n');
-  var result = text.split('\n').map(function (line) {
-    return line + '\n';
-  });
-  if (hasTrailingNl) {
-    result.pop();
-  } else {
-    result.push(result.pop().slice(0, -1));
-  }
-  return result;
-}
-
-function arrayEqual(a, b) {
-  if (a.length !== b.length) {
-    return false;
-  }
-  return arrayStartsWith(a, b);
-}
-function arrayStartsWith(array, start) {
-  if (start.length > array.length) {
-    return false;
-  }
-  for (var i = 0; i < start.length; i++) {
-    if (start[i] !== array[i]) {
-      return false;
-    }
-  }
-  return true;
-}
-
-function calcLineCount(hunk) {
-  var _calcOldNewLineCount = calcOldNewLineCount(hunk.lines),
-    oldLines = _calcOldNewLineCount.oldLines,
-    newLines = _calcOldNewLineCount.newLines;
-  if (oldLines !== undefined) {
-    hunk.oldLines = oldLines;
-  } else {
-    delete hunk.oldLines;
-  }
-  if (newLines !== undefined) {
-    hunk.newLines = newLines;
-  } else {
-    delete hunk.newLines;
-  }
-}
-function merge(mine, theirs, base) {
-  mine = loadPatch(mine, base);
-  theirs = loadPatch(theirs, base);
-  var ret = {};
-
-  // For index we just let it pass through as it doesn't have any necessary meaning.
-  // Leaving sanity checks on this to the API consumer that may know more about the
-  // meaning in their own context.
-  if (mine.index || theirs.index) {
-    ret.index = mine.index || theirs.index;
-  }
-  if (mine.newFileName || theirs.newFileName) {
-    if (!fileNameChanged(mine)) {
-      // No header or no change in ours, use theirs (and ours if theirs does not exist)
-      ret.oldFileName = theirs.oldFileName || mine.oldFileName;
-      ret.newFileName = theirs.newFileName || mine.newFileName;
-      ret.oldHeader = theirs.oldHeader || mine.oldHeader;
-      ret.newHeader = theirs.newHeader || mine.newHeader;
-    } else if (!fileNameChanged(theirs)) {
-      // No header or no change in theirs, use ours
-      ret.oldFileName = mine.oldFileName;
-      ret.newFileName = mine.newFileName;
-      ret.oldHeader = mine.oldHeader;
-      ret.newHeader = mine.newHeader;
-    } else {
-      // Both changed... figure it out
-      ret.oldFileName = selectField(ret, mine.oldFileName, theirs.oldFileName);
-      ret.newFileName = selectField(ret, mine.newFileName, theirs.newFileName);
-      ret.oldHeader = selectField(ret, mine.oldHeader, theirs.oldHeader);
-      ret.newHeader = selectField(ret, mine.newHeader, theirs.newHeader);
-    }
-  }
-  ret.hunks = [];
-  var mineIndex = 0,
-    theirsIndex = 0,
-    mineOffset = 0,
-    theirsOffset = 0;
-  while (mineIndex < mine.hunks.length || theirsIndex < theirs.hunks.length) {
-    var mineCurrent = mine.hunks[mineIndex] || {
-        oldStart: Infinity
-      },
-      theirsCurrent = theirs.hunks[theirsIndex] || {
-        oldStart: Infinity
-      };
-    if (hunkBefore(mineCurrent, theirsCurrent)) {
-      // This patch does not overlap with any of the others, yay.
-      ret.hunks.push(cloneHunk(mineCurrent, mineOffset));
-      mineIndex++;
-      theirsOffset += mineCurrent.newLines - mineCurrent.oldLines;
-    } else if (hunkBefore(theirsCurrent, mineCurrent)) {
-      // This patch does not overlap with any of the others, yay.
-      ret.hunks.push(cloneHunk(theirsCurrent, theirsOffset));
-      theirsIndex++;
-      mineOffset += theirsCurrent.newLines - theirsCurrent.oldLines;
-    } else {
-      // Overlap, merge as best we can
-      var mergedHunk = {
-        oldStart: Math.min(mineCurrent.oldStart, theirsCurrent.oldStart),
-        oldLines: 0,
-        newStart: Math.min(mineCurrent.newStart + mineOffset, theirsCurrent.oldStart + theirsOffset),
-        newLines: 0,
-        lines: []
-      };
-      mergeLines(mergedHunk, mineCurrent.oldStart, mineCurrent.lines, theirsCurrent.oldStart, theirsCurrent.lines);
-      theirsIndex++;
-      mineIndex++;
-      ret.hunks.push(mergedHunk);
-    }
-  }
-  return ret;
-}
-function loadPatch(param, base) {
-  if (typeof param === 'string') {
-    if (/^@@/m.test(param) || /^Index:/m.test(param)) {
-      return parsePatch(param)[0];
-    }
-    if (!base) {
-      throw new Error('Must provide a base reference or pass in a patch');
-    }
-    return structuredPatch(undefined, undefined, base, param);
-  }
-  return param;
-}
-function fileNameChanged(patch) {
-  return patch.newFileName && patch.newFileName !== patch.oldFileName;
-}
-function selectField(index, mine, theirs) {
-  if (mine === theirs) {
-    return mine;
-  } else {
-    index.conflict = true;
-    return {
-      mine: mine,
-      theirs: theirs
-    };
-  }
-}
-function hunkBefore(test, check) {
-  return test.oldStart < check.oldStart && test.oldStart + test.oldLines < check.oldStart;
-}
-function cloneHunk(hunk, offset) {
-  return {
-    oldStart: hunk.oldStart,
-    oldLines: hunk.oldLines,
-    newStart: hunk.newStart + offset,
-    newLines: hunk.newLines,
-    lines: hunk.lines
-  };
-}
-function mergeLines(hunk, mineOffset, mineLines, theirOffset, theirLines) {
-  // This will generally result in a conflicted hunk, but there are cases where the context
-  // is the only overlap where we can successfully merge the content here.
-  var mine = {
-      offset: mineOffset,
-      lines: mineLines,
-      index: 0
-    },
-    their = {
-      offset: theirOffset,
-      lines: theirLines,
-      index: 0
-    };
-
-  // Handle any leading content
-  insertLeading(hunk, mine, their);
-  insertLeading(hunk, their, mine);
-
-  // Now in the overlap content. Scan through and select the best changes from each.
-  while (mine.index < mine.lines.length && their.index < their.lines.length) {
-    var mineCurrent = mine.lines[mine.index],
-      theirCurrent = their.lines[their.index];
-    if ((mineCurrent[0] === '-' || mineCurrent[0] === '+') && (theirCurrent[0] === '-' || theirCurrent[0] === '+')) {
-      // Both modified ...
-      mutualChange(hunk, mine, their);
-    } else if (mineCurrent[0] === '+' && theirCurrent[0] === ' ') {
-      var _hunk$lines;
-      // Mine inserted
-      (_hunk$lines = hunk.lines).push.apply(_hunk$lines, _toConsumableArray(collectChange(mine)));
-    } else if (theirCurrent[0] === '+' && mineCurrent[0] === ' ') {
-      var _hunk$lines2;
-      // Theirs inserted
-      (_hunk$lines2 = hunk.lines).push.apply(_hunk$lines2, _toConsumableArray(collectChange(their)));
-    } else if (mineCurrent[0] === '-' && theirCurrent[0] === ' ') {
-      // Mine removed or edited
-      removal(hunk, mine, their);
-    } else if (theirCurrent[0] === '-' && mineCurrent[0] === ' ') {
-      // Their removed or edited
-      removal(hunk, their, mine, true);
-    } else if (mineCurrent === theirCurrent) {
-      // Context identity
-      hunk.lines.push(mineCurrent);
-      mine.index++;
-      their.index++;
-    } else {
-      // Context mismatch
-      conflict(hunk, collectChange(mine), collectChange(their));
-    }
-  }
-
-  // Now push anything that may be remaining
-  insertTrailing(hunk, mine);
-  insertTrailing(hunk, their);
-  calcLineCount(hunk);
-}
-function mutualChange(hunk, mine, their) {
-  var myChanges = collectChange(mine),
-    theirChanges = collectChange(their);
-  if (allRemoves(myChanges) && allRemoves(theirChanges)) {
-    // Special case for remove changes that are supersets of one another
-    if (arrayStartsWith(myChanges, theirChanges) && skipRemoveSuperset(their, myChanges, myChanges.length - theirChanges.length)) {
-      var _hunk$lines3;
-      (_hunk$lines3 = hunk.lines).push.apply(_hunk$lines3, _toConsumableArray(myChanges));
-      return;
-    } else if (arrayStartsWith(theirChanges, myChanges) && skipRemoveSuperset(mine, theirChanges, theirChanges.length - myChanges.length)) {
-      var _hunk$lines4;
-      (_hunk$lines4 = hunk.lines).push.apply(_hunk$lines4, _toConsumableArray(theirChanges));
-      return;
-    }
-  } else if (arrayEqual(myChanges, theirChanges)) {
-    var _hunk$lines5;
-    (_hunk$lines5 = hunk.lines).push.apply(_hunk$lines5, _toConsumableArray(myChanges));
-    return;
-  }
-  conflict(hunk, myChanges, theirChanges);
-}
-function removal(hunk, mine, their, swap) {
-  var myChanges = collectChange(mine),
-    theirChanges = collectContext(their, myChanges);
-  if (theirChanges.merged) {
-    var _hunk$lines6;
-    (_hunk$lines6 = hunk.lines).push.apply(_hunk$lines6, _toConsumableArray(theirChanges.merged));
-  } else {
-    conflict(hunk, swap ? theirChanges : myChanges, swap ? myChanges : theirChanges);
-  }
-}
-function conflict(hunk, mine, their) {
-  hunk.conflict = true;
-  hunk.lines.push({
-    conflict: true,
-    mine: mine,
-    theirs: their
-  });
-}
-function insertLeading(hunk, insert, their) {
-  while (insert.offset < their.offset && insert.index < insert.lines.length) {
-    var line = insert.lines[insert.index++];
-    hunk.lines.push(line);
-    insert.offset++;
-  }
-}
-function insertTrailing(hunk, insert) {
-  while (insert.index < insert.lines.length) {
-    var line = insert.lines[insert.index++];
-    hunk.lines.push(line);
-  }
-}
-function collectChange(state) {
-  var ret = [],
-    operation = state.lines[state.index][0];
-  while (state.index < state.lines.length) {
-    var line = state.lines[state.index];
-
-    // Group additions that are immediately after subtractions and treat them as one "atomic" modify change.
-    if (operation === '-' && line[0] === '+') {
-      operation = '+';
-    }
-    if (operation === line[0]) {
-      ret.push(line);
-      state.index++;
-    } else {
-      break;
-    }
-  }
-  return ret;
-}
-function collectContext(state, matchChanges) {
-  var changes = [],
-    merged = [],
-    matchIndex = 0,
-    contextChanges = false,
-    conflicted = false;
-  while (matchIndex < matchChanges.length && state.index < state.lines.length) {
-    var change = state.lines[state.index],
-      match = matchChanges[matchIndex];
-
-    // Once we've hit our add, then we are done
-    if (match[0] === '+') {
-      break;
-    }
-    contextChanges = contextChanges || change[0] !== ' ';
-    merged.push(match);
-    matchIndex++;
-
-    // Consume any additions in the other block as a conflict to attempt
-    // to pull in the remaining context after this
-    if (change[0] === '+') {
-      conflicted = true;
-      while (change[0] === '+') {
-        changes.push(change);
-        change = state.lines[++state.index];
-      }
-    }
-    if (match.substr(1) === change.substr(1)) {
-      changes.push(change);
-      state.index++;
-    } else {
-      conflicted = true;
-    }
-  }
-  if ((matchChanges[matchIndex] || '')[0] === '+' && contextChanges) {
-    conflicted = true;
-  }
-  if (conflicted) {
-    return changes;
-  }
-  while (matchIndex < matchChanges.length) {
-    merged.push(matchChanges[matchIndex++]);
-  }
-  return {
-    merged: merged,
-    changes: changes
-  };
-}
-function allRemoves(changes) {
-  return changes.reduce(function (prev, change) {
-    return prev && change[0] === '-';
-  }, true);
-}
-function skipRemoveSuperset(state, removeChanges, delta) {
-  for (var i = 0; i < delta; i++) {
-    var changeContent = removeChanges[removeChanges.length - delta + i].substr(1);
-    if (state.lines[state.index + i] !== ' ' + changeContent) {
-      return false;
-    }
-  }
-  state.index += delta;
-  return true;
-}
-function calcOldNewLineCount(lines) {
-  var oldLines = 0;
-  var newLines = 0;
-  lines.forEach(function (line) {
-    if (typeof line !== 'string') {
-      var myCount = calcOldNewLineCount(line.mine);
-      var theirCount = calcOldNewLineCount(line.theirs);
-      if (oldLines !== undefined) {
-        if (myCount.oldLines === theirCount.oldLines) {
-          oldLines += myCount.oldLines;
-        } else {
-          oldLines = undefined;
-        }
-      }
-      if (newLines !== undefined) {
-        if (myCount.newLines === theirCount.newLines) {
-          newLines += myCount.newLines;
-        } else {
-          newLines = undefined;
-        }
-      }
-    } else {
-      if (newLines !== undefined && (line[0] === '+' || line[0] === ' ')) {
-        newLines++;
-      }
-      if (oldLines !== undefined && (line[0] === '-' || line[0] === ' ')) {
-        oldLines++;
-      }
-    }
-  });
-  return {
-    oldLines: oldLines,
-    newLines: newLines
-  };
-}
-
-function reversePatch(structuredPatch) {
-  if (Array.isArray(structuredPatch)) {
-    return structuredPatch.map(reversePatch).reverse();
-  }
-  return _objectSpread2(_objectSpread2({}, structuredPatch), {}, {
-    oldFileName: structuredPatch.newFileName,
-    oldHeader: structuredPatch.newHeader,
-    newFileName: structuredPatch.oldFileName,
-    newHeader: structuredPatch.oldHeader,
-    hunks: structuredPatch.hunks.map(function (hunk) {
-      return {
-        oldLines: hunk.newLines,
-        oldStart: hunk.newStart,
-        newLines: hunk.oldLines,
-        newStart: hunk.oldStart,
-        lines: hunk.lines.map(function (l) {
-          if (l.startsWith('-')) {
-            return "+".concat(l.slice(1));
-          }
-          if (l.startsWith('+')) {
-            return "-".concat(l.slice(1));
-          }
-          return l;
-        })
-      };
-    })
-  });
-}
-
-// See: http://code.google.com/p/google-diff-match-patch/wiki/API
-function convertChangesToDMP(changes) {
-  var ret = [],
-    change,
-    operation;
-  for (var i = 0; i < changes.length; i++) {
-    change = changes[i];
-    if (change.added) {
-      operation = 1;
-    } else if (change.removed) {
-      operation = -1;
-    } else {
-      operation = 0;
-    }
-    ret.push([operation, change.value]);
-  }
-  return ret;
-}
-
-function convertChangesToXML(changes) {
-  var ret = [];
-  for (var i = 0; i < changes.length; i++) {
-    var change = changes[i];
-    if (change.added) {
-      ret.push('');
-    } else if (change.removed) {
-      ret.push('');
-    }
-    ret.push(escapeHTML(change.value));
-    if (change.added) {
-      ret.push('');
-    } else if (change.removed) {
-      ret.push('');
-    }
-  }
-  return ret.join('');
-}
-function escapeHTML(s) {
-  var n = s;
-  n = n.replace(/&/g, '&');
-  n = n.replace(//g, '>');
-  n = n.replace(/"/g, '"');
-  return n;
-}
-
-export { Diff, applyPatch, applyPatches, canonicalize, convertChangesToDMP, convertChangesToXML, createPatch, createTwoFilesPatch, diffArrays, diffChars, diffCss, diffJson, diffLines, diffSentences, diffTrimmedLines, diffWords, diffWordsWithSpace, formatPatch, merge, parsePatch, reversePatch, structuredPatch };
diff --git a/node_modules/diff/lib/patch/apply.js b/node_modules/diff/lib/patch/apply.js
deleted file mode 100644
index 619def1f48efa..0000000000000
--- a/node_modules/diff/lib/patch/apply.js
+++ /dev/null
@@ -1,393 +0,0 @@
-/*istanbul ignore start*/
-"use strict";
-
-Object.defineProperty(exports, "__esModule", {
-  value: true
-});
-exports.applyPatch = applyPatch;
-exports.applyPatches = applyPatches;
-/*istanbul ignore end*/
-var
-/*istanbul ignore start*/
-_string = require("../util/string")
-/*istanbul ignore end*/
-;
-var
-/*istanbul ignore start*/
-_lineEndings = require("./line-endings")
-/*istanbul ignore end*/
-;
-var
-/*istanbul ignore start*/
-_parse = require("./parse")
-/*istanbul ignore end*/
-;
-var
-/*istanbul ignore start*/
-_distanceIterator = _interopRequireDefault(require("../util/distance-iterator"))
-/*istanbul ignore end*/
-;
-/*istanbul ignore start*/ function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { "default": obj }; }
-/*istanbul ignore end*/
-function applyPatch(source, uniDiff) {
-  /*istanbul ignore start*/
-  var
-  /*istanbul ignore end*/
-  options = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : {};
-  if (typeof uniDiff === 'string') {
-    uniDiff =
-    /*istanbul ignore start*/
-    (0,
-    /*istanbul ignore end*/
-    /*istanbul ignore start*/
-    _parse
-    /*istanbul ignore end*/
-    .
-    /*istanbul ignore start*/
-    parsePatch)
-    /*istanbul ignore end*/
-    (uniDiff);
-  }
-  if (Array.isArray(uniDiff)) {
-    if (uniDiff.length > 1) {
-      throw new Error('applyPatch only works with a single input.');
-    }
-    uniDiff = uniDiff[0];
-  }
-  if (options.autoConvertLineEndings || options.autoConvertLineEndings == null) {
-    if (
-    /*istanbul ignore start*/
-    (0,
-    /*istanbul ignore end*/
-    /*istanbul ignore start*/
-    _string
-    /*istanbul ignore end*/
-    .
-    /*istanbul ignore start*/
-    hasOnlyWinLineEndings)
-    /*istanbul ignore end*/
-    (source) &&
-    /*istanbul ignore start*/
-    (0,
-    /*istanbul ignore end*/
-    /*istanbul ignore start*/
-    _lineEndings
-    /*istanbul ignore end*/
-    .
-    /*istanbul ignore start*/
-    isUnix)
-    /*istanbul ignore end*/
-    (uniDiff)) {
-      uniDiff =
-      /*istanbul ignore start*/
-      (0,
-      /*istanbul ignore end*/
-      /*istanbul ignore start*/
-      _lineEndings
-      /*istanbul ignore end*/
-      .
-      /*istanbul ignore start*/
-      unixToWin)
-      /*istanbul ignore end*/
-      (uniDiff);
-    } else if (
-    /*istanbul ignore start*/
-    (0,
-    /*istanbul ignore end*/
-    /*istanbul ignore start*/
-    _string
-    /*istanbul ignore end*/
-    .
-    /*istanbul ignore start*/
-    hasOnlyUnixLineEndings)
-    /*istanbul ignore end*/
-    (source) &&
-    /*istanbul ignore start*/
-    (0,
-    /*istanbul ignore end*/
-    /*istanbul ignore start*/
-    _lineEndings
-    /*istanbul ignore end*/
-    .
-    /*istanbul ignore start*/
-    isWin)
-    /*istanbul ignore end*/
-    (uniDiff)) {
-      uniDiff =
-      /*istanbul ignore start*/
-      (0,
-      /*istanbul ignore end*/
-      /*istanbul ignore start*/
-      _lineEndings
-      /*istanbul ignore end*/
-      .
-      /*istanbul ignore start*/
-      winToUnix)
-      /*istanbul ignore end*/
-      (uniDiff);
-    }
-  }
-
-  // Apply the diff to the input
-  var lines = source.split('\n'),
-    hunks = uniDiff.hunks,
-    compareLine = options.compareLine || function (lineNumber, line, operation, patchContent)
-    /*istanbul ignore start*/
-    {
-      return (
-        /*istanbul ignore end*/
-        line === patchContent
-      );
-    },
-    fuzzFactor = options.fuzzFactor || 0,
-    minLine = 0;
-  if (fuzzFactor < 0 || !Number.isInteger(fuzzFactor)) {
-    throw new Error('fuzzFactor must be a non-negative integer');
-  }
-
-  // Special case for empty patch.
-  if (!hunks.length) {
-    return source;
-  }
-
-  // Before anything else, handle EOFNL insertion/removal. If the patch tells us to make a change
-  // to the EOFNL that is redundant/impossible - i.e. to remove a newline that's not there, or add a
-  // newline that already exists - then we either return false and fail to apply the patch (if
-  // fuzzFactor is 0) or simply ignore the problem and do nothing (if fuzzFactor is >0).
-  // If we do need to remove/add a newline at EOF, this will always be in the final hunk:
-  var prevLine = '',
-    removeEOFNL = false,
-    addEOFNL = false;
-  for (var i = 0; i < hunks[hunks.length - 1].lines.length; i++) {
-    var line = hunks[hunks.length - 1].lines[i];
-    if (line[0] == '\\') {
-      if (prevLine[0] == '+') {
-        removeEOFNL = true;
-      } else if (prevLine[0] == '-') {
-        addEOFNL = true;
-      }
-    }
-    prevLine = line;
-  }
-  if (removeEOFNL) {
-    if (addEOFNL) {
-      // This means the final line gets changed but doesn't have a trailing newline in either the
-      // original or patched version. In that case, we do nothing if fuzzFactor > 0, and if
-      // fuzzFactor is 0, we simply validate that the source file has no trailing newline.
-      if (!fuzzFactor && lines[lines.length - 1] == '') {
-        return false;
-      }
-    } else if (lines[lines.length - 1] == '') {
-      lines.pop();
-    } else if (!fuzzFactor) {
-      return false;
-    }
-  } else if (addEOFNL) {
-    if (lines[lines.length - 1] != '') {
-      lines.push('');
-    } else if (!fuzzFactor) {
-      return false;
-    }
-  }
-
-  /**
-   * Checks if the hunk can be made to fit at the provided location with at most `maxErrors`
-   * insertions, substitutions, or deletions, while ensuring also that:
-   * - lines deleted in the hunk match exactly, and
-   * - wherever an insertion operation or block of insertion operations appears in the hunk, the
-   *   immediately preceding and following lines of context match exactly
-   *
-   * `toPos` should be set such that lines[toPos] is meant to match hunkLines[0].
-   *
-   * If the hunk can be applied, returns an object with properties `oldLineLastI` and
-   * `replacementLines`. Otherwise, returns null.
-   */
-  function applyHunk(hunkLines, toPos, maxErrors) {
-    /*istanbul ignore start*/
-    var
-    /*istanbul ignore end*/
-    hunkLinesI = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : 0;
-    /*istanbul ignore start*/
-    var
-    /*istanbul ignore end*/
-    lastContextLineMatched = arguments.length > 4 && arguments[4] !== undefined ? arguments[4] : true;
-    /*istanbul ignore start*/
-    var
-    /*istanbul ignore end*/
-    patchedLines = arguments.length > 5 && arguments[5] !== undefined ? arguments[5] : [];
-    /*istanbul ignore start*/
-    var
-    /*istanbul ignore end*/
-    patchedLinesLength = arguments.length > 6 && arguments[6] !== undefined ? arguments[6] : 0;
-    var nConsecutiveOldContextLines = 0;
-    var nextContextLineMustMatch = false;
-    for (; hunkLinesI < hunkLines.length; hunkLinesI++) {
-      var hunkLine = hunkLines[hunkLinesI],
-        operation = hunkLine.length > 0 ? hunkLine[0] : ' ',
-        content = hunkLine.length > 0 ? hunkLine.substr(1) : hunkLine;
-      if (operation === '-') {
-        if (compareLine(toPos + 1, lines[toPos], operation, content)) {
-          toPos++;
-          nConsecutiveOldContextLines = 0;
-        } else {
-          if (!maxErrors || lines[toPos] == null) {
-            return null;
-          }
-          patchedLines[patchedLinesLength] = lines[toPos];
-          return applyHunk(hunkLines, toPos + 1, maxErrors - 1, hunkLinesI, false, patchedLines, patchedLinesLength + 1);
-        }
-      }
-      if (operation === '+') {
-        if (!lastContextLineMatched) {
-          return null;
-        }
-        patchedLines[patchedLinesLength] = content;
-        patchedLinesLength++;
-        nConsecutiveOldContextLines = 0;
-        nextContextLineMustMatch = true;
-      }
-      if (operation === ' ') {
-        nConsecutiveOldContextLines++;
-        patchedLines[patchedLinesLength] = lines[toPos];
-        if (compareLine(toPos + 1, lines[toPos], operation, content)) {
-          patchedLinesLength++;
-          lastContextLineMatched = true;
-          nextContextLineMustMatch = false;
-          toPos++;
-        } else {
-          if (nextContextLineMustMatch || !maxErrors) {
-            return null;
-          }
-
-          // Consider 3 possibilities in sequence:
-          // 1. lines contains a *substitution* not included in the patch context, or
-          // 2. lines contains an *insertion* not included in the patch context, or
-          // 3. lines contains a *deletion* not included in the patch context
-          // The first two options are of course only possible if the line from lines is non-null -
-          // i.e. only option 3 is possible if we've overrun the end of the old file.
-          return lines[toPos] && (applyHunk(hunkLines, toPos + 1, maxErrors - 1, hunkLinesI + 1, false, patchedLines, patchedLinesLength + 1) || applyHunk(hunkLines, toPos + 1, maxErrors - 1, hunkLinesI, false, patchedLines, patchedLinesLength + 1)) || applyHunk(hunkLines, toPos, maxErrors - 1, hunkLinesI + 1, false, patchedLines, patchedLinesLength);
-        }
-      }
-    }
-
-    // Before returning, trim any unmodified context lines off the end of patchedLines and reduce
-    // toPos (and thus oldLineLastI) accordingly. This allows later hunks to be applied to a region
-    // that starts in this hunk's trailing context.
-    patchedLinesLength -= nConsecutiveOldContextLines;
-    toPos -= nConsecutiveOldContextLines;
-    patchedLines.length = patchedLinesLength;
-    return {
-      patchedLines: patchedLines,
-      oldLineLastI: toPos - 1
-    };
-  }
-  var resultLines = [];
-
-  // Search best fit offsets for each hunk based on the previous ones
-  var prevHunkOffset = 0;
-  for (var _i = 0; _i < hunks.length; _i++) {
-    var hunk = hunks[_i];
-    var hunkResult =
-    /*istanbul ignore start*/
-    void 0
-    /*istanbul ignore end*/
-    ;
-    var maxLine = lines.length - hunk.oldLines + fuzzFactor;
-    var toPos =
-    /*istanbul ignore start*/
-    void 0
-    /*istanbul ignore end*/
-    ;
-    for (var maxErrors = 0; maxErrors <= fuzzFactor; maxErrors++) {
-      toPos = hunk.oldStart + prevHunkOffset - 1;
-      var iterator =
-      /*istanbul ignore start*/
-      (0,
-      /*istanbul ignore end*/
-      /*istanbul ignore start*/
-      _distanceIterator
-      /*istanbul ignore end*/
-      [
-      /*istanbul ignore start*/
-      "default"
-      /*istanbul ignore end*/
-      ])(toPos, minLine, maxLine);
-      for (; toPos !== undefined; toPos = iterator()) {
-        hunkResult = applyHunk(hunk.lines, toPos, maxErrors);
-        if (hunkResult) {
-          break;
-        }
-      }
-      if (hunkResult) {
-        break;
-      }
-    }
-    if (!hunkResult) {
-      return false;
-    }
-
-    // Copy everything from the end of where we applied the last hunk to the start of this hunk
-    for (var _i2 = minLine; _i2 < toPos; _i2++) {
-      resultLines.push(lines[_i2]);
-    }
-
-    // Add the lines produced by applying the hunk:
-    for (var _i3 = 0; _i3 < hunkResult.patchedLines.length; _i3++) {
-      var _line = hunkResult.patchedLines[_i3];
-      resultLines.push(_line);
-    }
-
-    // Set lower text limit to end of the current hunk, so next ones don't try
-    // to fit over already patched text
-    minLine = hunkResult.oldLineLastI + 1;
-
-    // Note the offset between where the patch said the hunk should've applied and where we
-    // applied it, so we can adjust future hunks accordingly:
-    prevHunkOffset = toPos + 1 - hunk.oldStart;
-  }
-
-  // Copy over the rest of the lines from the old text
-  for (var _i4 = minLine; _i4 < lines.length; _i4++) {
-    resultLines.push(lines[_i4]);
-  }
-  return resultLines.join('\n');
-}
-
-// Wrapper that supports multiple file patches via callbacks.
-function applyPatches(uniDiff, options) {
-  if (typeof uniDiff === 'string') {
-    uniDiff =
-    /*istanbul ignore start*/
-    (0,
-    /*istanbul ignore end*/
-    /*istanbul ignore start*/
-    _parse
-    /*istanbul ignore end*/
-    .
-    /*istanbul ignore start*/
-    parsePatch)
-    /*istanbul ignore end*/
-    (uniDiff);
-  }
-  var currentIndex = 0;
-  function processIndex() {
-    var index = uniDiff[currentIndex++];
-    if (!index) {
-      return options.complete();
-    }
-    options.loadFile(index, function (err, data) {
-      if (err) {
-        return options.complete(err);
-      }
-      var updatedContent = applyPatch(data, index, options);
-      options.patched(index, updatedContent, function (err) {
-        if (err) {
-          return options.complete(err);
-        }
-        processIndex();
-      });
-    });
-  }
-  processIndex();
-}
-//# sourceMappingURL=data:application/json;charset=utf-8;base64,{"version":3,"names":["_string","require","_lineEndings","_parse","_distanceIterator","_interopRequireDefault","obj","__esModule","applyPatch","source","uniDiff","options","arguments","length","undefined","parsePatch","Array","isArray","Error","autoConvertLineEndings","hasOnlyWinLineEndings","isUnix","unixToWin","hasOnlyUnixLineEndings","isWin","winToUnix","lines","split","hunks","compareLine","lineNumber","line","operation","patchContent","fuzzFactor","minLine","Number","isInteger","prevLine","removeEOFNL","addEOFNL","i","pop","push","applyHunk","hunkLines","toPos","maxErrors","hunkLinesI","lastContextLineMatched","patchedLines","patchedLinesLength","nConsecutiveOldContextLines","nextContextLineMustMatch","hunkLine","content","substr","oldLineLastI","resultLines","prevHunkOffset","hunk","hunkResult","maxLine","oldLines","oldStart","iterator","distanceIterator","join","applyPatches","currentIndex","processIndex","index","complete","loadFile","err","data","updatedContent","patched"],"sources":["../../src/patch/apply.js"],"sourcesContent":["import {hasOnlyWinLineEndings, hasOnlyUnixLineEndings} from '../util/string';\nimport {isWin, isUnix, unixToWin, winToUnix} from './line-endings';\nimport {parsePatch} from './parse';\nimport distanceIterator from '../util/distance-iterator';\n\nexport function applyPatch(source, uniDiff, options = {}) {\n  if (typeof uniDiff === 'string') {\n    uniDiff = parsePatch(uniDiff);\n  }\n\n  if (Array.isArray(uniDiff)) {\n    if (uniDiff.length > 1) {\n      throw new Error('applyPatch only works with a single input.');\n    }\n\n    uniDiff = uniDiff[0];\n  }\n\n  if (options.autoConvertLineEndings || options.autoConvertLineEndings == null) {\n    if (hasOnlyWinLineEndings(source) && isUnix(uniDiff)) {\n      uniDiff = unixToWin(uniDiff);\n    } else if (hasOnlyUnixLineEndings(source) && isWin(uniDiff)) {\n      uniDiff = winToUnix(uniDiff);\n    }\n  }\n\n  // Apply the diff to the input\n  let lines = source.split('\\n'),\n      hunks = uniDiff.hunks,\n\n      compareLine = options.compareLine || ((lineNumber, line, operation, patchContent) => line === patchContent),\n      fuzzFactor = options.fuzzFactor || 0,\n      minLine = 0;\n\n  if (fuzzFactor < 0 || !Number.isInteger(fuzzFactor)) {\n    throw new Error('fuzzFactor must be a non-negative integer');\n  }\n\n  // Special case for empty patch.\n  if (!hunks.length) {\n    return source;\n  }\n\n  // Before anything else, handle EOFNL insertion/removal. If the patch tells us to make a change\n  // to the EOFNL that is redundant/impossible - i.e. to remove a newline that's not there, or add a\n  // newline that already exists - then we either return false and fail to apply the patch (if\n  // fuzzFactor is 0) or simply ignore the problem and do nothing (if fuzzFactor is >0).\n  // If we do need to remove/add a newline at EOF, this will always be in the final hunk:\n  let prevLine = '',\n      removeEOFNL = false,\n      addEOFNL = false;\n  for (let i = 0; i < hunks[hunks.length - 1].lines.length; i++) {\n    const line = hunks[hunks.length - 1].lines[i];\n    if (line[0] == '\\\\') {\n      if (prevLine[0] == '+') {\n        removeEOFNL = true;\n      } else if (prevLine[0] == '-') {\n        addEOFNL = true;\n      }\n    }\n    prevLine = line;\n  }\n  if (removeEOFNL) {\n    if (addEOFNL) {\n      // This means the final line gets changed but doesn't have a trailing newline in either the\n      // original or patched version. In that case, we do nothing if fuzzFactor > 0, and if\n      // fuzzFactor is 0, we simply validate that the source file has no trailing newline.\n      if (!fuzzFactor && lines[lines.length - 1] == '') {\n        return false;\n      }\n    } else if (lines[lines.length - 1] == '') {\n      lines.pop();\n    } else if (!fuzzFactor) {\n      return false;\n    }\n  } else if (addEOFNL) {\n    if (lines[lines.length - 1] != '') {\n      lines.push('');\n    } else if (!fuzzFactor) {\n      return false;\n    }\n  }\n\n  /**\n   * Checks if the hunk can be made to fit at the provided location with at most `maxErrors`\n   * insertions, substitutions, or deletions, while ensuring also that:\n   * - lines deleted in the hunk match exactly, and\n   * - wherever an insertion operation or block of insertion operations appears in the hunk, the\n   *   immediately preceding and following lines of context match exactly\n   *\n   * `toPos` should be set such that lines[toPos] is meant to match hunkLines[0].\n   *\n   * If the hunk can be applied, returns an object with properties `oldLineLastI` and\n   * `replacementLines`. Otherwise, returns null.\n   */\n  function applyHunk(\n    hunkLines,\n    toPos,\n    maxErrors,\n    hunkLinesI = 0,\n    lastContextLineMatched = true,\n    patchedLines = [],\n    patchedLinesLength = 0,\n  ) {\n    let nConsecutiveOldContextLines = 0;\n    let nextContextLineMustMatch = false;\n    for (; hunkLinesI < hunkLines.length; hunkLinesI++) {\n      let hunkLine = hunkLines[hunkLinesI],\n          operation = (hunkLine.length > 0 ? hunkLine[0] : ' '),\n          content = (hunkLine.length > 0 ? hunkLine.substr(1) : hunkLine);\n\n      if (operation === '-') {\n        if (compareLine(toPos + 1, lines[toPos], operation, content)) {\n          toPos++;\n          nConsecutiveOldContextLines = 0;\n        } else {\n          if (!maxErrors || lines[toPos] == null) {\n            return null;\n          }\n          patchedLines[patchedLinesLength] = lines[toPos];\n          return applyHunk(\n            hunkLines,\n            toPos + 1,\n            maxErrors - 1,\n            hunkLinesI,\n            false,\n            patchedLines,\n            patchedLinesLength + 1,\n          );\n        }\n      }\n\n      if (operation === '+') {\n        if (!lastContextLineMatched) {\n          return null;\n        }\n        patchedLines[patchedLinesLength] = content;\n        patchedLinesLength++;\n        nConsecutiveOldContextLines = 0;\n        nextContextLineMustMatch = true;\n      }\n\n      if (operation === ' ') {\n        nConsecutiveOldContextLines++;\n        patchedLines[patchedLinesLength] = lines[toPos];\n        if (compareLine(toPos + 1, lines[toPos], operation, content)) {\n          patchedLinesLength++;\n          lastContextLineMatched = true;\n          nextContextLineMustMatch = false;\n          toPos++;\n        } else {\n          if (nextContextLineMustMatch || !maxErrors) {\n            return null;\n          }\n\n          // Consider 3 possibilities in sequence:\n          // 1. lines contains a *substitution* not included in the patch context, or\n          // 2. lines contains an *insertion* not included in the patch context, or\n          // 3. lines contains a *deletion* not included in the patch context\n          // The first two options are of course only possible if the line from lines is non-null -\n          // i.e. only option 3 is possible if we've overrun the end of the old file.\n          return (\n            lines[toPos] && (\n              applyHunk(\n                hunkLines,\n                toPos + 1,\n                maxErrors - 1,\n                hunkLinesI + 1,\n                false,\n                patchedLines,\n                patchedLinesLength + 1\n              ) || applyHunk(\n                hunkLines,\n                toPos + 1,\n                maxErrors - 1,\n                hunkLinesI,\n                false,\n                patchedLines,\n                patchedLinesLength + 1\n              )\n            ) || applyHunk(\n              hunkLines,\n              toPos,\n              maxErrors - 1,\n              hunkLinesI + 1,\n              false,\n              patchedLines,\n              patchedLinesLength\n            )\n          );\n        }\n      }\n    }\n\n    // Before returning, trim any unmodified context lines off the end of patchedLines and reduce\n    // toPos (and thus oldLineLastI) accordingly. This allows later hunks to be applied to a region\n    // that starts in this hunk's trailing context.\n    patchedLinesLength -= nConsecutiveOldContextLines;\n    toPos -= nConsecutiveOldContextLines;\n    patchedLines.length = patchedLinesLength;\n    return {\n      patchedLines,\n      oldLineLastI: toPos - 1\n    };\n  }\n\n  const resultLines = [];\n\n  // Search best fit offsets for each hunk based on the previous ones\n  let prevHunkOffset = 0;\n  for (let i = 0; i < hunks.length; i++) {\n    const hunk = hunks[i];\n    let hunkResult;\n    let maxLine = lines.length - hunk.oldLines + fuzzFactor;\n    let toPos;\n    for (let maxErrors = 0; maxErrors <= fuzzFactor; maxErrors++) {\n      toPos = hunk.oldStart + prevHunkOffset - 1;\n      let iterator = distanceIterator(toPos, minLine, maxLine);\n      for (; toPos !== undefined; toPos = iterator()) {\n        hunkResult = applyHunk(hunk.lines, toPos, maxErrors);\n        if (hunkResult) {\n          break;\n        }\n      }\n      if (hunkResult) {\n        break;\n      }\n    }\n\n    if (!hunkResult) {\n      return false;\n    }\n\n    // Copy everything from the end of where we applied the last hunk to the start of this hunk\n    for (let i = minLine; i < toPos; i++) {\n      resultLines.push(lines[i]);\n    }\n\n    // Add the lines produced by applying the hunk:\n    for (let i = 0; i < hunkResult.patchedLines.length; i++) {\n      const line = hunkResult.patchedLines[i];\n      resultLines.push(line);\n    }\n\n    // Set lower text limit to end of the current hunk, so next ones don't try\n    // to fit over already patched text\n    minLine = hunkResult.oldLineLastI + 1;\n\n    // Note the offset between where the patch said the hunk should've applied and where we\n    // applied it, so we can adjust future hunks accordingly:\n    prevHunkOffset = toPos + 1 - hunk.oldStart;\n  }\n\n  // Copy over the rest of the lines from the old text\n  for (let i = minLine; i < lines.length; i++) {\n    resultLines.push(lines[i]);\n  }\n\n  return resultLines.join('\\n');\n}\n\n// Wrapper that supports multiple file patches via callbacks.\nexport function applyPatches(uniDiff, options) {\n  if (typeof uniDiff === 'string') {\n    uniDiff = parsePatch(uniDiff);\n  }\n\n  let currentIndex = 0;\n  function processIndex() {\n    let index = uniDiff[currentIndex++];\n    if (!index) {\n      return options.complete();\n    }\n\n    options.loadFile(index, function(err, data) {\n      if (err) {\n        return options.complete(err);\n      }\n\n      let updatedContent = applyPatch(data, index, options);\n      options.patched(index, updatedContent, function(err) {\n        if (err) {\n          return options.complete(err);\n        }\n\n        processIndex();\n      });\n    });\n  }\n  processIndex();\n}\n"],"mappings":";;;;;;;;;AAAA;AAAA;AAAAA,OAAA,GAAAC,OAAA;AAAA;AAAA;AACA;AAAA;AAAAC,YAAA,GAAAD,OAAA;AAAA;AAAA;AACA;AAAA;AAAAE,MAAA,GAAAF,OAAA;AAAA;AAAA;AACA;AAAA;AAAAG,iBAAA,GAAAC,sBAAA,CAAAJ,OAAA;AAAA;AAAA;AAAyD,mCAAAI,uBAAAC,GAAA,WAAAA,GAAA,IAAAA,GAAA,CAAAC,UAAA,GAAAD,GAAA,gBAAAA,GAAA;AAAA;AAElD,SAASE,UAAUA,CAACC,MAAM,EAAEC,OAAO,EAAgB;EAAA;EAAA;EAAA;EAAdC,OAAO,GAAAC,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,CAAC,CAAC;EACtD,IAAI,OAAOF,OAAO,KAAK,QAAQ,EAAE;IAC/BA,OAAO;IAAG;IAAA;IAAA;IAAAK;IAAAA;IAAAA;IAAAA;IAAAA;IAAAA,UAAU;IAAA;IAAA,CAACL,OAAO,CAAC;EAC/B;EAEA,IAAIM,KAAK,CAACC,OAAO,CAACP,OAAO,CAAC,EAAE;IAC1B,IAAIA,OAAO,CAACG,MAAM,GAAG,CAAC,EAAE;MACtB,MAAM,IAAIK,KAAK,CAAC,4CAA4C,CAAC;IAC/D;IAEAR,OAAO,GAAGA,OAAO,CAAC,CAAC,CAAC;EACtB;EAEA,IAAIC,OAAO,CAACQ,sBAAsB,IAAIR,OAAO,CAACQ,sBAAsB,IAAI,IAAI,EAAE;IAC5E;IAAI;IAAA;IAAA;IAAAC;IAAAA;IAAAA;IAAAA;IAAAA;IAAAA,qBAAqB;IAAA;IAAA,CAACX,MAAM,CAAC;IAAI;IAAA;IAAA;IAAAY;IAAAA;IAAAA;IAAAA;IAAAA;IAAAA,MAAM;IAAA;IAAA,CAACX,OAAO,CAAC,EAAE;MACpDA,OAAO;MAAG;MAAA;MAAA;MAAAY;MAAAA;MAAAA;MAAAA;MAAAA;MAAAA,SAAS;MAAA;MAAA,CAACZ,OAAO,CAAC;IAC9B,CAAC,MAAM;IAAI;IAAA;IAAA;IAAAa;IAAAA;IAAAA;IAAAA;IAAAA;IAAAA,sBAAsB;IAAA;IAAA,CAACd,MAAM,CAAC;IAAI;IAAA;IAAA;IAAAe;IAAAA;IAAAA;IAAAA;IAAAA;IAAAA,KAAK;IAAA;IAAA,CAACd,OAAO,CAAC,EAAE;MAC3DA,OAAO;MAAG;MAAA;MAAA;MAAAe;MAAAA;MAAAA;MAAAA;MAAAA;MAAAA,SAAS;MAAA;MAAA,CAACf,OAAO,CAAC;IAC9B;EACF;;EAEA;EACA,IAAIgB,KAAK,GAAGjB,MAAM,CAACkB,KAAK,CAAC,IAAI,CAAC;IAC1BC,KAAK,GAAGlB,OAAO,CAACkB,KAAK;IAErBC,WAAW,GAAGlB,OAAO,CAACkB,WAAW,IAAK,UAACC,UAAU,EAAEC,IAAI,EAAEC,SAAS,EAAEC,YAAY;IAAA;IAAA;MAAA;QAAA;QAAKF,IAAI,KAAKE;MAAY;IAAA,CAAC;IAC3GC,UAAU,GAAGvB,OAAO,CAACuB,UAAU,IAAI,CAAC;IACpCC,OAAO,GAAG,CAAC;EAEf,IAAID,UAAU,GAAG,CAAC,IAAI,CAACE,MAAM,CAACC,SAAS,CAACH,UAAU,CAAC,EAAE;IACnD,MAAM,IAAIhB,KAAK,CAAC,2CAA2C,CAAC;EAC9D;;EAEA;EACA,IAAI,CAACU,KAAK,CAACf,MAAM,EAAE;IACjB,OAAOJ,MAAM;EACf;;EAEA;EACA;EACA;EACA;EACA;EACA,IAAI6B,QAAQ,GAAG,EAAE;IACbC,WAAW,GAAG,KAAK;IACnBC,QAAQ,GAAG,KAAK;EACpB,KAAK,IAAIC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGb,KAAK,CAACA,KAAK,CAACf,MAAM,GAAG,CAAC,CAAC,CAACa,KAAK,CAACb,MAAM,EAAE4B,CAAC,EAAE,EAAE;IAC7D,IAAMV,IAAI,GAAGH,KAAK,CAACA,KAAK,CAACf,MAAM,GAAG,CAAC,CAAC,CAACa,KAAK,CAACe,CAAC,CAAC;IAC7C,IAAIV,IAAI,CAAC,CAAC,CAAC,IAAI,IAAI,EAAE;MACnB,IAAIO,QAAQ,CAAC,CAAC,CAAC,IAAI,GAAG,EAAE;QACtBC,WAAW,GAAG,IAAI;MACpB,CAAC,MAAM,IAAID,QAAQ,CAAC,CAAC,CAAC,IAAI,GAAG,EAAE;QAC7BE,QAAQ,GAAG,IAAI;MACjB;IACF;IACAF,QAAQ,GAAGP,IAAI;EACjB;EACA,IAAIQ,WAAW,EAAE;IACf,IAAIC,QAAQ,EAAE;MACZ;MACA;MACA;MACA,IAAI,CAACN,UAAU,IAAIR,KAAK,CAACA,KAAK,CAACb,MAAM,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE;QAChD,OAAO,KAAK;MACd;IACF,CAAC,MAAM,IAAIa,KAAK,CAACA,KAAK,CAACb,MAAM,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE;MACxCa,KAAK,CAACgB,GAAG,CAAC,CAAC;IACb,CAAC,MAAM,IAAI,CAACR,UAAU,EAAE;MACtB,OAAO,KAAK;IACd;EACF,CAAC,MAAM,IAAIM,QAAQ,EAAE;IACnB,IAAId,KAAK,CAACA,KAAK,CAACb,MAAM,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE;MACjCa,KAAK,CAACiB,IAAI,CAAC,EAAE,CAAC;IAChB,CAAC,MAAM,IAAI,CAACT,UAAU,EAAE;MACtB,OAAO,KAAK;IACd;EACF;;EAEA;AACF;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;EACE,SAASU,SAASA,CAChBC,SAAS,EACTC,KAAK,EACLC,SAAS,EAKT;IAAA;IAAA;IAAA;IAJAC,UAAU,GAAApC,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,CAAC;IAAA;IAAA;IAAA;IACdqC,sBAAsB,GAAArC,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,IAAI;IAAA;IAAA;IAAA;IAC7BsC,YAAY,GAAAtC,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,EAAE;IAAA;IAAA;IAAA;IACjBuC,kBAAkB,GAAAvC,SAAA,CAAAC,MAAA,QAAAD,SAAA,QAAAE,SAAA,GAAAF,SAAA,MAAG,CAAC;IAEtB,IAAIwC,2BAA2B,GAAG,CAAC;IACnC,IAAIC,wBAAwB,GAAG,KAAK;IACpC,OAAOL,UAAU,GAAGH,SAAS,CAAChC,MAAM,EAAEmC,UAAU,EAAE,EAAE;MAClD,IAAIM,QAAQ,GAAGT,SAAS,CAACG,UAAU,CAAC;QAChChB,SAAS,GAAIsB,QAAQ,CAACzC,MAAM,GAAG,CAAC,GAAGyC,QAAQ,CAAC,CAAC,CAAC,GAAG,GAAI;QACrDC,OAAO,GAAID,QAAQ,CAACzC,MAAM,GAAG,CAAC,GAAGyC,QAAQ,CAACE,MAAM,CAAC,CAAC,CAAC,GAAGF,QAAS;MAEnE,IAAItB,SAAS,KAAK,GAAG,EAAE;QACrB,IAAIH,WAAW,CAACiB,KAAK,GAAG,CAAC,EAAEpB,KAAK,CAACoB,KAAK,CAAC,EAAEd,SAAS,EAAEuB,OAAO,CAAC,EAAE;UAC5DT,KAAK,EAAE;UACPM,2BAA2B,GAAG,CAAC;QACjC,CAAC,MAAM;UACL,IAAI,CAACL,SAAS,IAAIrB,KAAK,CAACoB,KAAK,CAAC,IAAI,IAAI,EAAE;YACtC,OAAO,IAAI;UACb;UACAI,YAAY,CAACC,kBAAkB,CAAC,GAAGzB,KAAK,CAACoB,KAAK,CAAC;UAC/C,OAAOF,SAAS,CACdC,SAAS,EACTC,KAAK,GAAG,CAAC,EACTC,SAAS,GAAG,CAAC,EACbC,UAAU,EACV,KAAK,EACLE,YAAY,EACZC,kBAAkB,GAAG,CACvB,CAAC;QACH;MACF;MAEA,IAAInB,SAAS,KAAK,GAAG,EAAE;QACrB,IAAI,CAACiB,sBAAsB,EAAE;UAC3B,OAAO,IAAI;QACb;QACAC,YAAY,CAACC,kBAAkB,CAAC,GAAGI,OAAO;QAC1CJ,kBAAkB,EAAE;QACpBC,2BAA2B,GAAG,CAAC;QAC/BC,wBAAwB,GAAG,IAAI;MACjC;MAEA,IAAIrB,SAAS,KAAK,GAAG,EAAE;QACrBoB,2BAA2B,EAAE;QAC7BF,YAAY,CAACC,kBAAkB,CAAC,GAAGzB,KAAK,CAACoB,KAAK,CAAC;QAC/C,IAAIjB,WAAW,CAACiB,KAAK,GAAG,CAAC,EAAEpB,KAAK,CAACoB,KAAK,CAAC,EAAEd,SAAS,EAAEuB,OAAO,CAAC,EAAE;UAC5DJ,kBAAkB,EAAE;UACpBF,sBAAsB,GAAG,IAAI;UAC7BI,wBAAwB,GAAG,KAAK;UAChCP,KAAK,EAAE;QACT,CAAC,MAAM;UACL,IAAIO,wBAAwB,IAAI,CAACN,SAAS,EAAE;YAC1C,OAAO,IAAI;UACb;;UAEA;UACA;UACA;UACA;UACA;UACA;UACA,OACErB,KAAK,CAACoB,KAAK,CAAC,KACVF,SAAS,CACPC,SAAS,EACTC,KAAK,GAAG,CAAC,EACTC,SAAS,GAAG,CAAC,EACbC,UAAU,GAAG,CAAC,EACd,KAAK,EACLE,YAAY,EACZC,kBAAkB,GAAG,CACvB,CAAC,IAAIP,SAAS,CACZC,SAAS,EACTC,KAAK,GAAG,CAAC,EACTC,SAAS,GAAG,CAAC,EACbC,UAAU,EACV,KAAK,EACLE,YAAY,EACZC,kBAAkB,GAAG,CACvB,CAAC,CACF,IAAIP,SAAS,CACZC,SAAS,EACTC,KAAK,EACLC,SAAS,GAAG,CAAC,EACbC,UAAU,GAAG,CAAC,EACd,KAAK,EACLE,YAAY,EACZC,kBACF,CAAC;QAEL;MACF;IACF;;IAEA;IACA;IACA;IACAA,kBAAkB,IAAIC,2BAA2B;IACjDN,KAAK,IAAIM,2BAA2B;IACpCF,YAAY,CAACrC,MAAM,GAAGsC,kBAAkB;IACxC,OAAO;MACLD,YAAY,EAAZA,YAAY;MACZO,YAAY,EAAEX,KAAK,GAAG;IACxB,CAAC;EACH;EAEA,IAAMY,WAAW,GAAG,EAAE;;EAEtB;EACA,IAAIC,cAAc,GAAG,CAAC;EACtB,KAAK,IAAIlB,EAAC,GAAG,CAAC,EAAEA,EAAC,GAAGb,KAAK,CAACf,MAAM,EAAE4B,EAAC,EAAE,EAAE;IACrC,IAAMmB,IAAI,GAAGhC,KAAK,CAACa,EAAC,CAAC;IACrB,IAAIoB,UAAU;IAAA;IAAA;IAAA;IAAA;IACd,IAAIC,OAAO,GAAGpC,KAAK,CAACb,MAAM,GAAG+C,IAAI,CAACG,QAAQ,GAAG7B,UAAU;IACvD,IAAIY,KAAK;IAAA;IAAA;IAAA;IAAA;IACT,KAAK,IAAIC,SAAS,GAAG,CAAC,EAAEA,SAAS,IAAIb,UAAU,EAAEa,SAAS,EAAE,EAAE;MAC5DD,KAAK,GAAGc,IAAI,CAACI,QAAQ,GAAGL,cAAc,GAAG,CAAC;MAC1C,IAAIM,QAAQ;MAAG;MAAA;MAAA;MAAAC;MAAAA;MAAAA;MAAAA;MAAAA;MAAAA;MAAAA;MAAAA,CAAgB,EAACpB,KAAK,EAAEX,OAAO,EAAE2B,OAAO,CAAC;MACxD,OAAOhB,KAAK,KAAKhC,SAAS,EAAEgC,KAAK,GAAGmB,QAAQ,CAAC,CAAC,EAAE;QAC9CJ,UAAU,GAAGjB,SAAS,CAACgB,IAAI,CAAClC,KAAK,EAAEoB,KAAK,EAAEC,SAAS,CAAC;QACpD,IAAIc,UAAU,EAAE;UACd;QACF;MACF;MACA,IAAIA,UAAU,EAAE;QACd;MACF;IACF;IAEA,IAAI,CAACA,UAAU,EAAE;MACf,OAAO,KAAK;IACd;;IAEA;IACA,KAAK,IAAIpB,GAAC,GAAGN,OAAO,EAAEM,GAAC,GAAGK,KAAK,EAAEL,GAAC,EAAE,EAAE;MACpCiB,WAAW,CAACf,IAAI,CAACjB,KAAK,CAACe,GAAC,CAAC,CAAC;IAC5B;;IAEA;IACA,KAAK,IAAIA,GAAC,GAAG,CAAC,EAAEA,GAAC,GAAGoB,UAAU,CAACX,YAAY,CAACrC,MAAM,EAAE4B,GAAC,EAAE,EAAE;MACvD,IAAMV,KAAI,GAAG8B,UAAU,CAACX,YAAY,CAACT,GAAC,CAAC;MACvCiB,WAAW,CAACf,IAAI,CAACZ,KAAI,CAAC;IACxB;;IAEA;IACA;IACAI,OAAO,GAAG0B,UAAU,CAACJ,YAAY,GAAG,CAAC;;IAErC;IACA;IACAE,cAAc,GAAGb,KAAK,GAAG,CAAC,GAAGc,IAAI,CAACI,QAAQ;EAC5C;;EAEA;EACA,KAAK,IAAIvB,GAAC,GAAGN,OAAO,EAAEM,GAAC,GAAGf,KAAK,CAACb,MAAM,EAAE4B,GAAC,EAAE,EAAE;IAC3CiB,WAAW,CAACf,IAAI,CAACjB,KAAK,CAACe,GAAC,CAAC,CAAC;EAC5B;EAEA,OAAOiB,WAAW,CAACS,IAAI,CAAC,IAAI,CAAC;AAC/B;;AAEA;AACO,SAASC,YAAYA,CAAC1D,OAAO,EAAEC,OAAO,EAAE;EAC7C,IAAI,OAAOD,OAAO,KAAK,QAAQ,EAAE;IAC/BA,OAAO;IAAG;IAAA;IAAA;IAAAK;IAAAA;IAAAA;IAAAA;IAAAA;IAAAA,UAAU;IAAA;IAAA,CAACL,OAAO,CAAC;EAC/B;EAEA,IAAI2D,YAAY,GAAG,CAAC;EACpB,SAASC,YAAYA,CAAA,EAAG;IACtB,IAAIC,KAAK,GAAG7D,OAAO,CAAC2D,YAAY,EAAE,CAAC;IACnC,IAAI,CAACE,KAAK,EAAE;MACV,OAAO5D,OAAO,CAAC6D,QAAQ,CAAC,CAAC;IAC3B;IAEA7D,OAAO,CAAC8D,QAAQ,CAACF,KAAK,EAAE,UAASG,GAAG,EAAEC,IAAI,EAAE;MAC1C,IAAID,GAAG,EAAE;QACP,OAAO/D,OAAO,CAAC6D,QAAQ,CAACE,GAAG,CAAC;MAC9B;MAEA,IAAIE,cAAc,GAAGpE,UAAU,CAACmE,IAAI,EAAEJ,KAAK,EAAE5D,OAAO,CAAC;MACrDA,OAAO,CAACkE,OAAO,CAACN,KAAK,EAAEK,cAAc,EAAE,UAASF,GAAG,EAAE;QACnD,IAAIA,GAAG,EAAE;UACP,OAAO/D,OAAO,CAAC6D,QAAQ,CAACE,GAAG,CAAC;QAC9B;QAEAJ,YAAY,CAAC,CAAC;MAChB,CAAC,CAAC;IACJ,CAAC,CAAC;EACJ;EACAA,YAAY,CAAC,CAAC;AAChB","ignoreList":[]}
diff --git a/node_modules/diff/lib/patch/create.js b/node_modules/diff/lib/patch/create.js
deleted file mode 100644
index 10ec2d46ff6e8..0000000000000
--- a/node_modules/diff/lib/patch/create.js
+++ /dev/null
@@ -1,369 +0,0 @@
-/*istanbul ignore start*/
-"use strict";
-
-Object.defineProperty(exports, "__esModule", {
-  value: true
-});
-exports.createPatch = createPatch;
-exports.createTwoFilesPatch = createTwoFilesPatch;
-exports.formatPatch = formatPatch;
-exports.structuredPatch = structuredPatch;
-/*istanbul ignore end*/
-var
-/*istanbul ignore start*/
-_line = require("../diff/line")
-/*istanbul ignore end*/
-;
-/*istanbul ignore start*/ function _typeof(o) { "@babel/helpers - typeof"; return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (o) { return typeof o; } : function (o) { return o && "function" == typeof Symbol && o.constructor === Symbol && o !== Symbol.prototype ? "symbol" : typeof o; }, _typeof(o); }
-function _toConsumableArray(arr) { return _arrayWithoutHoles(arr) || _iterableToArray(arr) || _unsupportedIterableToArray(arr) || _nonIterableSpread(); }
-function _nonIterableSpread() { throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); }
-function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
-function _iterableToArray(iter) { if (typeof Symbol !== "undefined" && iter[Symbol.iterator] != null || iter["@@iterator"] != null) return Array.from(iter); }
-function _arrayWithoutHoles(arr) { if (Array.isArray(arr)) return _arrayLikeToArray(arr); }
-function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) arr2[i] = arr[i]; return arr2; }
-function ownKeys(e, r) { var t = Object.keys(e); if (Object.getOwnPropertySymbols) { var o = Object.getOwnPropertySymbols(e); r && (o = o.filter(function (r) { return Object.getOwnPropertyDescriptor(e, r).enumerable; })), t.push.apply(t, o); } return t; }
-function _objectSpread(e) { for (var r = 1; r < arguments.length; r++) { var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? ownKeys(Object(t), !0).forEach(function (r) { _defineProperty(e, r, t[r]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(e, Object.getOwnPropertyDescriptors(t)) : ownKeys(Object(t)).forEach(function (r) { Object.defineProperty(e, r, Object.getOwnPropertyDescriptor(t, r)); }); } return e; }
-function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
-function _toPropertyKey(t) { var i = _toPrimitive(t, "string"); return "symbol" == _typeof(i) ? i : i + ""; }
-function _toPrimitive(t, r) { if ("object" != _typeof(t) || !t) return t; var e = t[Symbol.toPrimitive]; if (void 0 !== e) { var i = e.call(t, r || "default"); if ("object" != _typeof(i)) return i; throw new TypeError("@@toPrimitive must return a primitive value."); } return ("string" === r ? String : Number)(t); }
-/*istanbul ignore end*/
-function structuredPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options) {
-  if (!options) {
-    options = {};
-  }
-  if (typeof options === 'function') {
-    options = {
-      callback: options
-    };
-  }
-  if (typeof options.context === 'undefined') {
-    options.context = 4;
-  }
-  if (options.newlineIsToken) {
-    throw new Error('newlineIsToken may not be used with patch-generation functions, only with diffing functions');
-  }
-  if (!options.callback) {
-    return diffLinesResultToPatch(
-    /*istanbul ignore start*/
-    (0,
-    /*istanbul ignore end*/
-    /*istanbul ignore start*/
-    _line
-    /*istanbul ignore end*/
-    .
-    /*istanbul ignore start*/
-    diffLines)
-    /*istanbul ignore end*/
-    (oldStr, newStr, options));
-  } else {
-    var
-      /*istanbul ignore start*/
-      _options =
-      /*istanbul ignore end*/
-      options,
-      /*istanbul ignore start*/
-      /*istanbul ignore end*/
-      _callback = _options.callback;
-    /*istanbul ignore start*/
-    (0,
-    /*istanbul ignore end*/
-    /*istanbul ignore start*/
-    _line
-    /*istanbul ignore end*/
-    .
-    /*istanbul ignore start*/
-    diffLines)
-    /*istanbul ignore end*/
-    (oldStr, newStr,
-    /*istanbul ignore start*/
-    _objectSpread(_objectSpread({},
-    /*istanbul ignore end*/
-    options), {}, {
-      callback: function
-      /*istanbul ignore start*/
-      callback
-      /*istanbul ignore end*/
-      (diff) {
-        var patch = diffLinesResultToPatch(diff);
-        _callback(patch);
-      }
-    }));
-  }
-  function diffLinesResultToPatch(diff) {
-    // STEP 1: Build up the patch with no "\ No newline at end of file" lines and with the arrays
-    //         of lines containing trailing newline characters. We'll tidy up later...
-
-    if (!diff) {
-      return;
-    }
-    diff.push({
-      value: '',
-      lines: []
-    }); // Append an empty value to make cleanup easier
-
-    function contextLines(lines) {
-      return lines.map(function (entry) {
-        return ' ' + entry;
-      });
-    }
-    var hunks = [];
-    var oldRangeStart = 0,
-      newRangeStart = 0,
-      curRange = [],
-      oldLine = 1,
-      newLine = 1;
-    /*istanbul ignore start*/
-    var _loop = function _loop()
-    /*istanbul ignore end*/
-    {
-      var current = diff[i],
-        lines = current.lines || splitLines(current.value);
-      current.lines = lines;
-      if (current.added || current.removed) {
-        /*istanbul ignore start*/
-        var _curRange;
-        /*istanbul ignore end*/
-        // If we have previous context, start with that
-        if (!oldRangeStart) {
-          var prev = diff[i - 1];
-          oldRangeStart = oldLine;
-          newRangeStart = newLine;
-          if (prev) {
-            curRange = options.context > 0 ? contextLines(prev.lines.slice(-options.context)) : [];
-            oldRangeStart -= curRange.length;
-            newRangeStart -= curRange.length;
-          }
-        }
-
-        // Output our changes
-        /*istanbul ignore start*/
-        /*istanbul ignore end*/
-        /*istanbul ignore start*/
-        (_curRange =
-        /*istanbul ignore end*/
-        curRange).push.apply(
-        /*istanbul ignore start*/
-        _curRange
-        /*istanbul ignore end*/
-        ,
-        /*istanbul ignore start*/
-        _toConsumableArray(
-        /*istanbul ignore end*/
-        lines.map(function (entry) {
-          return (current.added ? '+' : '-') + entry;
-        })));
-
-        // Track the updated file position
-        if (current.added) {
-          newLine += lines.length;
-        } else {
-          oldLine += lines.length;
-        }
-      } else {
-        // Identical context lines. Track line changes
-        if (oldRangeStart) {
-          // Close out any changes that have been output (or join overlapping)
-          if (lines.length <= options.context * 2 && i < diff.length - 2) {
-            /*istanbul ignore start*/
-            var _curRange2;
-            /*istanbul ignore end*/
-            // Overlapping
-            /*istanbul ignore start*/
-            /*istanbul ignore end*/
-            /*istanbul ignore start*/
-            (_curRange2 =
-            /*istanbul ignore end*/
-            curRange).push.apply(
-            /*istanbul ignore start*/
-            _curRange2
-            /*istanbul ignore end*/
-            ,
-            /*istanbul ignore start*/
-            _toConsumableArray(
-            /*istanbul ignore end*/
-            contextLines(lines)));
-          } else {
-            /*istanbul ignore start*/
-            var _curRange3;
-            /*istanbul ignore end*/
-            // end the range and output
-            var contextSize = Math.min(lines.length, options.context);
-            /*istanbul ignore start*/
-            /*istanbul ignore end*/
-            /*istanbul ignore start*/
-            (_curRange3 =
-            /*istanbul ignore end*/
-            curRange).push.apply(
-            /*istanbul ignore start*/
-            _curRange3
-            /*istanbul ignore end*/
-            ,
-            /*istanbul ignore start*/
-            _toConsumableArray(
-            /*istanbul ignore end*/
-            contextLines(lines.slice(0, contextSize))));
-            var _hunk = {
-              oldStart: oldRangeStart,
-              oldLines: oldLine - oldRangeStart + contextSize,
-              newStart: newRangeStart,
-              newLines: newLine - newRangeStart + contextSize,
-              lines: curRange
-            };
-            hunks.push(_hunk);
-            oldRangeStart = 0;
-            newRangeStart = 0;
-            curRange = [];
-          }
-        }
-        oldLine += lines.length;
-        newLine += lines.length;
-      }
-    };
-    for (var i = 0; i < diff.length; i++)
-    /*istanbul ignore start*/
-    {
-      _loop();
-    }
-
-    // Step 2: eliminate the trailing `\n` from each line of each hunk, and, where needed, add
-    //         "\ No newline at end of file".
-    /*istanbul ignore end*/
-    for (
-    /*istanbul ignore start*/
-    var _i = 0, _hunks =
-      /*istanbul ignore end*/
-      hunks;
-    /*istanbul ignore start*/
-    _i < _hunks.length
-    /*istanbul ignore end*/
-    ;
-    /*istanbul ignore start*/
-    _i++
-    /*istanbul ignore end*/
-    ) {
-      var hunk =
-      /*istanbul ignore start*/
-      _hunks[_i]
-      /*istanbul ignore end*/
-      ;
-      for (var _i2 = 0; _i2 < hunk.lines.length; _i2++) {
-        if (hunk.lines[_i2].endsWith('\n')) {
-          hunk.lines[_i2] = hunk.lines[_i2].slice(0, -1);
-        } else {
-          hunk.lines.splice(_i2 + 1, 0, '\\ No newline at end of file');
-          _i2++; // Skip the line we just added, then continue iterating
-        }
-      }
-    }
-    return {
-      oldFileName: oldFileName,
-      newFileName: newFileName,
-      oldHeader: oldHeader,
-      newHeader: newHeader,
-      hunks: hunks
-    };
-  }
-}
-function formatPatch(diff) {
-  if (Array.isArray(diff)) {
-    return diff.map(formatPatch).join('\n');
-  }
-  var ret = [];
-  if (diff.oldFileName == diff.newFileName) {
-    ret.push('Index: ' + diff.oldFileName);
-  }
-  ret.push('===================================================================');
-  ret.push('--- ' + diff.oldFileName + (typeof diff.oldHeader === 'undefined' ? '' : '\t' + diff.oldHeader));
-  ret.push('+++ ' + diff.newFileName + (typeof diff.newHeader === 'undefined' ? '' : '\t' + diff.newHeader));
-  for (var i = 0; i < diff.hunks.length; i++) {
-    var hunk = diff.hunks[i];
-    // Unified Diff Format quirk: If the chunk size is 0,
-    // the first number is one lower than one would expect.
-    // https://www.artima.com/weblogs/viewpost.jsp?thread=164293
-    if (hunk.oldLines === 0) {
-      hunk.oldStart -= 1;
-    }
-    if (hunk.newLines === 0) {
-      hunk.newStart -= 1;
-    }
-    ret.push('@@ -' + hunk.oldStart + ',' + hunk.oldLines + ' +' + hunk.newStart + ',' + hunk.newLines + ' @@');
-    ret.push.apply(ret, hunk.lines);
-  }
-  return ret.join('\n') + '\n';
-}
-function createTwoFilesPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options) {
-  /*istanbul ignore start*/
-  var _options2;
-  /*istanbul ignore end*/
-  if (typeof options === 'function') {
-    options = {
-      callback: options
-    };
-  }
-  if (!
-  /*istanbul ignore start*/
-  ((_options2 =
-  /*istanbul ignore end*/
-  options) !== null && _options2 !== void 0 &&
-  /*istanbul ignore start*/
-  _options2
-  /*istanbul ignore end*/
-  .callback)) {
-    var patchObj = structuredPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options);
-    if (!patchObj) {
-      return;
-    }
-    return formatPatch(patchObj);
-  } else {
-    var
-      /*istanbul ignore start*/
-      _options3 =
-      /*istanbul ignore end*/
-      options,
-      /*istanbul ignore start*/
-      /*istanbul ignore end*/
-      _callback2 = _options3.callback;
-    structuredPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader,
-    /*istanbul ignore start*/
-    _objectSpread(_objectSpread({},
-    /*istanbul ignore end*/
-    options), {}, {
-      callback: function
-      /*istanbul ignore start*/
-      callback
-      /*istanbul ignore end*/
-      (patchObj) {
-        if (!patchObj) {
-          _callback2();
-        } else {
-          _callback2(formatPatch(patchObj));
-        }
-      }
-    }));
-  }
-}
-function createPatch(fileName, oldStr, newStr, oldHeader, newHeader, options) {
-  return createTwoFilesPatch(fileName, fileName, oldStr, newStr, oldHeader, newHeader, options);
-}
-
-/**
- * Split `text` into an array of lines, including the trailing newline character (where present)
- */
-function splitLines(text) {
-  var hasTrailingNl = text.endsWith('\n');
-  var result = text.split('\n').map(function (line)
-  /*istanbul ignore start*/
-  {
-    return (
-      /*istanbul ignore end*/
-      line + '\n'
-    );
-  });
-  if (hasTrailingNl) {
-    result.pop();
-  } else {
-    result.push(result.pop().slice(0, -1));
-  }
-  return result;
-}
-//# sourceMappingURL=data:application/json;charset=utf-8;base64,{"version":3,"names":["_line","require","_typeof","o","Symbol","iterator","constructor","prototype","_toConsumableArray","arr","_arrayWithoutHoles","_iterableToArray","_unsupportedIterableToArray","_nonIterableSpread","TypeError","minLen","_arrayLikeToArray","n","Object","toString","call","slice","name","Array","from","test","iter","isArray","len","length","i","arr2","ownKeys","e","r","t","keys","getOwnPropertySymbols","filter","getOwnPropertyDescriptor","enumerable","push","apply","_objectSpread","arguments","forEach","_defineProperty","getOwnPropertyDescriptors","defineProperties","defineProperty","obj","key","value","_toPropertyKey","configurable","writable","_toPrimitive","toPrimitive","String","Number","structuredPatch","oldFileName","newFileName","oldStr","newStr","oldHeader","newHeader","options","callback","context","newlineIsToken","Error","diffLinesResultToPatch","diffLines","_options","diff","patch","lines","contextLines","map","entry","hunks","oldRangeStart","newRangeStart","curRange","oldLine","newLine","_loop","current","splitLines","added","removed","_curRange","prev","_curRange2","_curRange3","contextSize","Math","min","hunk","oldStart","oldLines","newStart","newLines","_i","_hunks","endsWith","splice","formatPatch","join","ret","createTwoFilesPatch","_options2","patchObj","_options3","createPatch","fileName","text","hasTrailingNl","result","split","line","pop"],"sources":["../../src/patch/create.js"],"sourcesContent":["import {diffLines} from '../diff/line';\n\nexport function structuredPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options) {\n  if (!options) {\n    options = {};\n  }\n  if (typeof options === 'function') {\n    options = {callback: options};\n  }\n  if (typeof options.context === 'undefined') {\n    options.context = 4;\n  }\n  if (options.newlineIsToken) {\n    throw new Error('newlineIsToken may not be used with patch-generation functions, only with diffing functions');\n  }\n\n  if (!options.callback) {\n    return diffLinesResultToPatch(diffLines(oldStr, newStr, options));\n  } else {\n    const {callback} = options;\n    diffLines(\n      oldStr,\n      newStr,\n      {\n        ...options,\n        callback: (diff) => {\n          const patch = diffLinesResultToPatch(diff);\n          callback(patch);\n        }\n      }\n    );\n  }\n\n  function diffLinesResultToPatch(diff) {\n    // STEP 1: Build up the patch with no \"\\ No newline at end of file\" lines and with the arrays\n    //         of lines containing trailing newline characters. We'll tidy up later...\n\n    if(!diff) {\n      return;\n    }\n\n    diff.push({value: '', lines: []}); // Append an empty value to make cleanup easier\n\n    function contextLines(lines) {\n      return lines.map(function(entry) { return ' ' + entry; });\n    }\n\n    let hunks = [];\n    let oldRangeStart = 0, newRangeStart = 0, curRange = [],\n        oldLine = 1, newLine = 1;\n    for (let i = 0; i < diff.length; i++) {\n      const current = diff[i],\n            lines = current.lines || splitLines(current.value);\n      current.lines = lines;\n\n      if (current.added || current.removed) {\n        // If we have previous context, start with that\n        if (!oldRangeStart) {\n          const prev = diff[i - 1];\n          oldRangeStart = oldLine;\n          newRangeStart = newLine;\n\n          if (prev) {\n            curRange = options.context > 0 ? contextLines(prev.lines.slice(-options.context)) : [];\n            oldRangeStart -= curRange.length;\n            newRangeStart -= curRange.length;\n          }\n        }\n\n        // Output our changes\n        curRange.push(... lines.map(function(entry) {\n          return (current.added ? '+' : '-') + entry;\n        }));\n\n        // Track the updated file position\n        if (current.added) {\n          newLine += lines.length;\n        } else {\n          oldLine += lines.length;\n        }\n      } else {\n        // Identical context lines. Track line changes\n        if (oldRangeStart) {\n          // Close out any changes that have been output (or join overlapping)\n          if (lines.length <= options.context * 2 && i < diff.length - 2) {\n            // Overlapping\n            curRange.push(... contextLines(lines));\n          } else {\n            // end the range and output\n            let contextSize = Math.min(lines.length, options.context);\n            curRange.push(... contextLines(lines.slice(0, contextSize)));\n\n            let hunk = {\n              oldStart: oldRangeStart,\n              oldLines: (oldLine - oldRangeStart + contextSize),\n              newStart: newRangeStart,\n              newLines: (newLine - newRangeStart + contextSize),\n              lines: curRange\n            };\n            hunks.push(hunk);\n\n            oldRangeStart = 0;\n            newRangeStart = 0;\n            curRange = [];\n          }\n        }\n        oldLine += lines.length;\n        newLine += lines.length;\n      }\n    }\n\n    // Step 2: eliminate the trailing `\\n` from each line of each hunk, and, where needed, add\n    //         \"\\ No newline at end of file\".\n    for (const hunk of hunks) {\n      for (let i = 0; i < hunk.lines.length; i++) {\n        if (hunk.lines[i].endsWith('\\n')) {\n          hunk.lines[i] = hunk.lines[i].slice(0, -1);\n        } else {\n          hunk.lines.splice(i + 1, 0, '\\\\ No newline at end of file');\n          i++; // Skip the line we just added, then continue iterating\n        }\n      }\n    }\n\n    return {\n      oldFileName: oldFileName, newFileName: newFileName,\n      oldHeader: oldHeader, newHeader: newHeader,\n      hunks: hunks\n    };\n  }\n}\n\nexport function formatPatch(diff) {\n  if (Array.isArray(diff)) {\n    return diff.map(formatPatch).join('\\n');\n  }\n\n  const ret = [];\n  if (diff.oldFileName == diff.newFileName) {\n    ret.push('Index: ' + diff.oldFileName);\n  }\n  ret.push('===================================================================');\n  ret.push('--- ' + diff.oldFileName + (typeof diff.oldHeader === 'undefined' ? '' : '\\t' + diff.oldHeader));\n  ret.push('+++ ' + diff.newFileName + (typeof diff.newHeader === 'undefined' ? '' : '\\t' + diff.newHeader));\n\n  for (let i = 0; i < diff.hunks.length; i++) {\n    const hunk = diff.hunks[i];\n    // Unified Diff Format quirk: If the chunk size is 0,\n    // the first number is one lower than one would expect.\n    // https://www.artima.com/weblogs/viewpost.jsp?thread=164293\n    if (hunk.oldLines === 0) {\n      hunk.oldStart -= 1;\n    }\n    if (hunk.newLines === 0) {\n      hunk.newStart -= 1;\n    }\n    ret.push(\n      '@@ -' + hunk.oldStart + ',' + hunk.oldLines\n      + ' +' + hunk.newStart + ',' + hunk.newLines\n      + ' @@'\n    );\n    ret.push.apply(ret, hunk.lines);\n  }\n\n  return ret.join('\\n') + '\\n';\n}\n\nexport function createTwoFilesPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options) {\n  if (typeof options === 'function') {\n    options = {callback: options};\n  }\n\n  if (!options?.callback) {\n    const patchObj = structuredPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options);\n    if (!patchObj) {\n      return;\n    }\n    return formatPatch(patchObj);\n  } else {\n    const {callback} = options;\n    structuredPatch(\n      oldFileName,\n      newFileName,\n      oldStr,\n      newStr,\n      oldHeader,\n      newHeader,\n      {\n        ...options,\n        callback: patchObj => {\n          if (!patchObj) {\n            callback();\n          } else {\n            callback(formatPatch(patchObj));\n          }\n        }\n      }\n    );\n  }\n}\n\nexport function createPatch(fileName, oldStr, newStr, oldHeader, newHeader, options) {\n  return createTwoFilesPatch(fileName, fileName, oldStr, newStr, oldHeader, newHeader, options);\n}\n\n/**\n * Split `text` into an array of lines, including the trailing newline character (where present)\n */\nfunction splitLines(text) {\n  const hasTrailingNl = text.endsWith('\\n');\n  const result = text.split('\\n').map(line => line + '\\n');\n  if (hasTrailingNl) {\n    result.pop();\n  } else {\n    result.push(result.pop().slice(0, -1));\n  }\n  return result;\n}\n"],"mappings":";;;;;;;;;;;AAAA;AAAA;AAAAA,KAAA,GAAAC,OAAA;AAAA;AAAA;AAAuC,mCAAAC,QAAAC,CAAA,sCAAAD,OAAA,wBAAAE,MAAA,uBAAAA,MAAA,CAAAC,QAAA,aAAAF,CAAA,kBAAAA,CAAA,gBAAAA,CAAA,WAAAA,CAAA,yBAAAC,MAAA,IAAAD,CAAA,CAAAG,WAAA,KAAAF,MAAA,IAAAD,CAAA,KAAAC,MAAA,CAAAG,SAAA,qBAAAJ,CAAA,KAAAD,OAAA,CAAAC,CAAA;AAAA,SAAAK,mBAAAC,GAAA,WAAAC,kBAAA,CAAAD,GAAA,KAAAE,gBAAA,CAAAF,GAAA,KAAAG,2BAAA,CAAAH,GAAA,KAAAI,kBAAA;AAAA,SAAAA,mBAAA,cAAAC,SAAA;AAAA,SAAAF,4BAAAT,CAAA,EAAAY,MAAA,SAAAZ,CAAA,qBAAAA,CAAA,sBAAAa,iBAAA,CAAAb,CAAA,EAAAY,MAAA,OAAAE,CAAA,GAAAC,MAAA,CAAAX,SAAA,CAAAY,QAAA,CAAAC,IAAA,CAAAjB,CAAA,EAAAkB,KAAA,aAAAJ,CAAA,iBAAAd,CAAA,CAAAG,WAAA,EAAAW,CAAA,GAAAd,CAAA,CAAAG,WAAA,CAAAgB,IAAA,MAAAL,CAAA,cAAAA,CAAA,mBAAAM,KAAA,CAAAC,IAAA,CAAArB,CAAA,OAAAc,CAAA,+DAAAQ,IAAA,CAAAR,CAAA,UAAAD,iBAAA,CAAAb,CAAA,EAAAY,MAAA;AAAA,SAAAJ,iBAAAe,IAAA,eAAAtB,MAAA,oBAAAsB,IAAA,CAAAtB,MAAA,CAAAC,QAAA,aAAAqB,IAAA,+BAAAH,KAAA,CAAAC,IAAA,CAAAE,IAAA;AAAA,SAAAhB,mBAAAD,GAAA,QAAAc,KAAA,CAAAI,OAAA,CAAAlB,GAAA,UAAAO,iBAAA,CAAAP,GAAA;AAAA,SAAAO,kBAAAP,GAAA,EAAAmB,GAAA,QAAAA,GAAA,YAAAA,GAAA,GAAAnB,GAAA,CAAAoB,MAAA,EAAAD,GAAA,GAAAnB,GAAA,CAAAoB,MAAA,WAAAC,CAAA,MAAAC,IAAA,OAAAR,KAAA,CAAAK,GAAA,GAAAE,CAAA,GAAAF,GAAA,EAAAE,CAAA,IAAAC,IAAA,CAAAD,CAAA,IAAArB,GAAA,CAAAqB,CAAA,UAAAC,IAAA;AAAA,SAAAC,QAAAC,CAAA,EAAAC,CAAA,QAAAC,CAAA,GAAAjB,MAAA,CAAAkB,IAAA,CAAAH,CAAA,OAAAf,MAAA,CAAAmB,qBAAA,QAAAlC,CAAA,GAAAe,MAAA,CAAAmB,qBAAA,CAAAJ,CAAA,GAAAC,CAAA,KAAA/B,CAAA,GAAAA,CAAA,CAAAmC,MAAA,WAAAJ,CAAA,WAAAhB,MAAA,CAAAqB,wBAAA,CAAAN,CAAA,EAAAC,CAAA,EAAAM,UAAA,OAAAL,CAAA,CAAAM,IAAA,CAAAC,KAAA,CAAAP,CAAA,EAAAhC,CAAA,YAAAgC,CAAA;AAAA,SAAAQ,cAAAV,CAAA,aAAAC,CAAA,MAAAA,CAAA,GAAAU,SAAA,CAAAf,MAAA,EAAAK,CAAA,UAAAC,CAAA,WAAAS,SAAA,CAAAV,CAAA,IAAAU,SAAA,CAAAV,CAAA,QAAAA,CAAA,OAAAF,OAAA,CAAAd,MAAA,CAAAiB,CAAA,OAAAU,OAAA,WAAAX,CAAA,IAAAY,eAAA,CAAAb,CAAA,EAAAC,CAAA,EAAAC,CAAA,CAAAD,CAAA,SAAAhB,MAAA,CAAA6B,yBAAA,GAAA7B,MAAA,CAAA8B,gBAAA,CAAAf,CAAA,EAAAf,MAAA,CAAA6B,yBAAA,CAAAZ,CAAA,KAAAH,OAAA,CAAAd,MAAA,CAAAiB,CAAA,GAAAU,OAAA,WAAAX,CAAA,IAAAhB,MAAA,CAAA+B,cAAA,CAAAhB,CAAA,EAAAC,CAAA,EAAAhB,MAAA,CAAAqB,wBAAA,CAAAJ,CAAA,EAAAD,CAAA,iBAAAD,CAAA;AAAA,SAAAa,gBAAAI,GAAA,EAAAC,GAAA,EAAAC,KAAA,IAAAD,GAAA,GAAAE,cAAA,CAAAF,GAAA,OAAAA,GAAA,IAAAD,GAAA,IAAAhC,MAAA,CAAA+B,cAAA,CAAAC,GAAA,EAAAC,GAAA,IAAAC,KAAA,EAAAA,KAAA,EAAAZ,UAAA,QAAAc,YAAA,QAAAC,QAAA,oBAAAL,GAAA,CAAAC,GAAA,IAAAC,KAAA,WAAAF,GAAA;AAAA,SAAAG,eAAAlB,CAAA,QAAAL,CAAA,GAAA0B,YAAA,CAAArB,CAAA,gCAAAjC,OAAA,CAAA4B,CAAA,IAAAA,CAAA,GAAAA,CAAA;AAAA,SAAA0B,aAAArB,CAAA,EAAAD,CAAA,oBAAAhC,OAAA,CAAAiC,CAAA,MAAAA,CAAA,SAAAA,CAAA,MAAAF,CAAA,GAAAE,CAAA,CAAA/B,MAAA,CAAAqD,WAAA,kBAAAxB,CAAA,QAAAH,CAAA,GAAAG,CAAA,CAAAb,IAAA,CAAAe,CAAA,EAAAD,CAAA,gCAAAhC,OAAA,CAAA4B,CAAA,UAAAA,CAAA,YAAAhB,SAAA,yEAAAoB,CAAA,GAAAwB,MAAA,GAAAC,MAAA,EAAAxB,CAAA;AAAA;AAEhC,SAASyB,eAAeA,CAACC,WAAW,EAAEC,WAAW,EAAEC,MAAM,EAAEC,MAAM,EAAEC,SAAS,EAAEC,SAAS,EAAEC,OAAO,EAAE;EACvG,IAAI,CAACA,OAAO,EAAE;IACZA,OAAO,GAAG,CAAC,CAAC;EACd;EACA,IAAI,OAAOA,OAAO,KAAK,UAAU,EAAE;IACjCA,OAAO,GAAG;MAACC,QAAQ,EAAED;IAAO,CAAC;EAC/B;EACA,IAAI,OAAOA,OAAO,CAACE,OAAO,KAAK,WAAW,EAAE;IAC1CF,OAAO,CAACE,OAAO,GAAG,CAAC;EACrB;EACA,IAAIF,OAAO,CAACG,cAAc,EAAE;IAC1B,MAAM,IAAIC,KAAK,CAAC,6FAA6F,CAAC;EAChH;EAEA,IAAI,CAACJ,OAAO,CAACC,QAAQ,EAAE;IACrB,OAAOI,sBAAsB;IAAC;IAAA;IAAA;IAAAC;IAAAA;IAAAA;IAAAA;IAAAA;IAAAA,SAAS;IAAA;IAAA,CAACV,MAAM,EAAEC,MAAM,EAAEG,OAAO,CAAC,CAAC;EACnE,CAAC,MAAM;IACL;MAAA;MAAAO,QAAA;MAAA;MAAmBP,OAAO;MAAA;MAAA;MAAnBC,SAAQ,GAAAM,QAAA,CAARN,QAAQ;IACf;IAAA;IAAA;IAAAK;IAAAA;IAAAA;IAAAA;IAAAA;IAAAA,SAAS;IAAA;IAAA,CACPV,MAAM,EACNC,MAAM;IAAA;IAAArB,aAAA,CAAAA,aAAA;IAAA;IAEDwB,OAAO;MACVC,QAAQ,EAAE;MAAA;MAAAA;MAAAA;MAAA,CAACO,IAAI,EAAK;QAClB,IAAMC,KAAK,GAAGJ,sBAAsB,CAACG,IAAI,CAAC;QAC1CP,SAAQ,CAACQ,KAAK,CAAC;MACjB;IAAC,EAEL,CAAC;EACH;EAEA,SAASJ,sBAAsBA,CAACG,IAAI,EAAE;IACpC;IACA;;IAEA,IAAG,CAACA,IAAI,EAAE;MACR;IACF;IAEAA,IAAI,CAAClC,IAAI,CAAC;MAACW,KAAK,EAAE,EAAE;MAAEyB,KAAK,EAAE;IAAE,CAAC,CAAC,CAAC,CAAC;;IAEnC,SAASC,YAAYA,CAACD,KAAK,EAAE;MAC3B,OAAOA,KAAK,CAACE,GAAG,CAAC,UAASC,KAAK,EAAE;QAAE,OAAO,GAAG,GAAGA,KAAK;MAAE,CAAC,CAAC;IAC3D;IAEA,IAAIC,KAAK,GAAG,EAAE;IACd,IAAIC,aAAa,GAAG,CAAC;MAAEC,aAAa,GAAG,CAAC;MAAEC,QAAQ,GAAG,EAAE;MACnDC,OAAO,GAAG,CAAC;MAAEC,OAAO,GAAG,CAAC;IAAC;IAAA,IAAAC,KAAA,YAAAA,MAAA;IAAA;IACS;MACpC,IAAMC,OAAO,GAAGb,IAAI,CAAC7C,CAAC,CAAC;QACjB+C,KAAK,GAAGW,OAAO,CAACX,KAAK,IAAIY,UAAU,CAACD,OAAO,CAACpC,KAAK,CAAC;MACxDoC,OAAO,CAACX,KAAK,GAAGA,KAAK;MAErB,IAAIW,OAAO,CAACE,KAAK,IAAIF,OAAO,CAACG,OAAO,EAAE;QAAA;QAAA,IAAAC,SAAA;QAAA;QACpC;QACA,IAAI,CAACV,aAAa,EAAE;UAClB,IAAMW,IAAI,GAAGlB,IAAI,CAAC7C,CAAC,GAAG,CAAC,CAAC;UACxBoD,aAAa,GAAGG,OAAO;UACvBF,aAAa,GAAGG,OAAO;UAEvB,IAAIO,IAAI,EAAE;YACRT,QAAQ,GAAGjB,OAAO,CAACE,OAAO,GAAG,CAAC,GAAGS,YAAY,CAACe,IAAI,CAAChB,KAAK,CAACxD,KAAK,CAAC,CAAC8C,OAAO,CAACE,OAAO,CAAC,CAAC,GAAG,EAAE;YACtFa,aAAa,IAAIE,QAAQ,CAACvD,MAAM;YAChCsD,aAAa,IAAIC,QAAQ,CAACvD,MAAM;UAClC;QACF;;QAEA;QACA;QAAA;QAAA;QAAA,CAAA+D,SAAA;QAAA;QAAAR,QAAQ,EAAC3C,IAAI,CAAAC,KAAA;QAAA;QAAAkD;QAAA;QAAA;QAAA;QAAApF,kBAAA;QAAA;QAAKqE,KAAK,CAACE,GAAG,CAAC,UAASC,KAAK,EAAE;UAC1C,OAAO,CAACQ,OAAO,CAACE,KAAK,GAAG,GAAG,GAAG,GAAG,IAAIV,KAAK;QAC5C,CAAC,CAAC,EAAC;;QAEH;QACA,IAAIQ,OAAO,CAACE,KAAK,EAAE;UACjBJ,OAAO,IAAIT,KAAK,CAAChD,MAAM;QACzB,CAAC,MAAM;UACLwD,OAAO,IAAIR,KAAK,CAAChD,MAAM;QACzB;MACF,CAAC,MAAM;QACL;QACA,IAAIqD,aAAa,EAAE;UACjB;UACA,IAAIL,KAAK,CAAChD,MAAM,IAAIsC,OAAO,CAACE,OAAO,GAAG,CAAC,IAAIvC,CAAC,GAAG6C,IAAI,CAAC9C,MAAM,GAAG,CAAC,EAAE;YAAA;YAAA,IAAAiE,UAAA;YAAA;YAC9D;YACA;YAAA;YAAA;YAAA,CAAAA,UAAA;YAAA;YAAAV,QAAQ,EAAC3C,IAAI,CAAAC,KAAA;YAAA;YAAAoD;YAAA;YAAA;YAAA;YAAAtF,kBAAA;YAAA;YAAKsE,YAAY,CAACD,KAAK,CAAC,EAAC;UACxC,CAAC,MAAM;YAAA;YAAA,IAAAkB,UAAA;YAAA;YACL;YACA,IAAIC,WAAW,GAAGC,IAAI,CAACC,GAAG,CAACrB,KAAK,CAAChD,MAAM,EAAEsC,OAAO,CAACE,OAAO,CAAC;YACzD;YAAA;YAAA;YAAA,CAAA0B,UAAA;YAAA;YAAAX,QAAQ,EAAC3C,IAAI,CAAAC,KAAA;YAAA;YAAAqD;YAAA;YAAA;YAAA;YAAAvF,kBAAA;YAAA;YAAKsE,YAAY,CAACD,KAAK,CAACxD,KAAK,CAAC,CAAC,EAAE2E,WAAW,CAAC,CAAC,EAAC;YAE5D,IAAIG,KAAI,GAAG;cACTC,QAAQ,EAAElB,aAAa;cACvBmB,QAAQ,EAAGhB,OAAO,GAAGH,aAAa,GAAGc,WAAY;cACjDM,QAAQ,EAAEnB,aAAa;cACvBoB,QAAQ,EAAGjB,OAAO,GAAGH,aAAa,GAAGa,WAAY;cACjDnB,KAAK,EAAEO;YACT,CAAC;YACDH,KAAK,CAACxC,IAAI,CAAC0D,KAAI,CAAC;YAEhBjB,aAAa,GAAG,CAAC;YACjBC,aAAa,GAAG,CAAC;YACjBC,QAAQ,GAAG,EAAE;UACf;QACF;QACAC,OAAO,IAAIR,KAAK,CAAChD,MAAM;QACvByD,OAAO,IAAIT,KAAK,CAAChD,MAAM;MACzB;IACF,CAAC;IA3DD,KAAK,IAAIC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAG6C,IAAI,CAAC9C,MAAM,EAAEC,CAAC,EAAE;IAAA;IAAA;MAAAyD,KAAA;IAAA;;IA6DpC;IACA;IAAA;IACA;IAAA;IAAA,IAAAiB,EAAA,MAAAC,MAAA;MAAA;MAAmBxB,KAAK;IAAA;IAAAuB,EAAA,GAAAC,MAAA,CAAA5E;IAAA;IAAA;IAAA;IAAA2E,EAAA;IAAA;IAAA,EAAE;MAArB,IAAML,IAAI;MAAA;MAAAM,MAAA,CAAAD,EAAA;MAAA;MAAA;MACb,KAAK,IAAI1E,GAAC,GAAG,CAAC,EAAEA,GAAC,GAAGqE,IAAI,CAACtB,KAAK,CAAChD,MAAM,EAAEC,GAAC,EAAE,EAAE;QAC1C,IAAIqE,IAAI,CAACtB,KAAK,CAAC/C,GAAC,CAAC,CAAC4E,QAAQ,CAAC,IAAI,CAAC,EAAE;UAChCP,IAAI,CAACtB,KAAK,CAAC/C,GAAC,CAAC,GAAGqE,IAAI,CAACtB,KAAK,CAAC/C,GAAC,CAAC,CAACT,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;QAC5C,CAAC,MAAM;UACL8E,IAAI,CAACtB,KAAK,CAAC8B,MAAM,CAAC7E,GAAC,GAAG,CAAC,EAAE,CAAC,EAAE,8BAA8B,CAAC;UAC3DA,GAAC,EAAE,CAAC,CAAC;QACP;MACF;IACF;IAEA,OAAO;MACL+B,WAAW,EAAEA,WAAW;MAAEC,WAAW,EAAEA,WAAW;MAClDG,SAAS,EAAEA,SAAS;MAAEC,SAAS,EAAEA,SAAS;MAC1Ce,KAAK,EAAEA;IACT,CAAC;EACH;AACF;AAEO,SAAS2B,WAAWA,CAACjC,IAAI,EAAE;EAChC,IAAIpD,KAAK,CAACI,OAAO,CAACgD,IAAI,CAAC,EAAE;IACvB,OAAOA,IAAI,CAACI,GAAG,CAAC6B,WAAW,CAAC,CAACC,IAAI,CAAC,IAAI,CAAC;EACzC;EAEA,IAAMC,GAAG,GAAG,EAAE;EACd,IAAInC,IAAI,CAACd,WAAW,IAAIc,IAAI,CAACb,WAAW,EAAE;IACxCgD,GAAG,CAACrE,IAAI,CAAC,SAAS,GAAGkC,IAAI,CAACd,WAAW,CAAC;EACxC;EACAiD,GAAG,CAACrE,IAAI,CAAC,qEAAqE,CAAC;EAC/EqE,GAAG,CAACrE,IAAI,CAAC,MAAM,GAAGkC,IAAI,CAACd,WAAW,IAAI,OAAOc,IAAI,CAACV,SAAS,KAAK,WAAW,GAAG,EAAE,GAAG,IAAI,GAAGU,IAAI,CAACV,SAAS,CAAC,CAAC;EAC1G6C,GAAG,CAACrE,IAAI,CAAC,MAAM,GAAGkC,IAAI,CAACb,WAAW,IAAI,OAAOa,IAAI,CAACT,SAAS,KAAK,WAAW,GAAG,EAAE,GAAG,IAAI,GAAGS,IAAI,CAACT,SAAS,CAAC,CAAC;EAE1G,KAAK,IAAIpC,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAG6C,IAAI,CAACM,KAAK,CAACpD,MAAM,EAAEC,CAAC,EAAE,EAAE;IAC1C,IAAMqE,IAAI,GAAGxB,IAAI,CAACM,KAAK,CAACnD,CAAC,CAAC;IAC1B;IACA;IACA;IACA,IAAIqE,IAAI,CAACE,QAAQ,KAAK,CAAC,EAAE;MACvBF,IAAI,CAACC,QAAQ,IAAI,CAAC;IACpB;IACA,IAAID,IAAI,CAACI,QAAQ,KAAK,CAAC,EAAE;MACvBJ,IAAI,CAACG,QAAQ,IAAI,CAAC;IACpB;IACAQ,GAAG,CAACrE,IAAI,CACN,MAAM,GAAG0D,IAAI,CAACC,QAAQ,GAAG,GAAG,GAAGD,IAAI,CAACE,QAAQ,GAC1C,IAAI,GAAGF,IAAI,CAACG,QAAQ,GAAG,GAAG,GAAGH,IAAI,CAACI,QAAQ,GAC1C,KACJ,CAAC;IACDO,GAAG,CAACrE,IAAI,CAACC,KAAK,CAACoE,GAAG,EAAEX,IAAI,CAACtB,KAAK,CAAC;EACjC;EAEA,OAAOiC,GAAG,CAACD,IAAI,CAAC,IAAI,CAAC,GAAG,IAAI;AAC9B;AAEO,SAASE,mBAAmBA,CAAClD,WAAW,EAAEC,WAAW,EAAEC,MAAM,EAAEC,MAAM,EAAEC,SAAS,EAAEC,SAAS,EAAEC,OAAO,EAAE;EAAA;EAAA,IAAA6C,SAAA;EAAA;EAC3G,IAAI,OAAO7C,OAAO,KAAK,UAAU,EAAE;IACjCA,OAAO,GAAG;MAACC,QAAQ,EAAED;IAAO,CAAC;EAC/B;EAEA,IAAI;EAAA;EAAA,EAAA6C,SAAA;EAAA;EAAC7C,OAAO,cAAA6C,SAAA;EAAP;EAAAA;EAAA;EAAA,CAAS5C,QAAQ,GAAE;IACtB,IAAM6C,QAAQ,GAAGrD,eAAe,CAACC,WAAW,EAAEC,WAAW,EAAEC,MAAM,EAAEC,MAAM,EAAEC,SAAS,EAAEC,SAAS,EAAEC,OAAO,CAAC;IACzG,IAAI,CAAC8C,QAAQ,EAAE;MACb;IACF;IACA,OAAOL,WAAW,CAACK,QAAQ,CAAC;EAC9B,CAAC,MAAM;IACL;MAAA;MAAAC,SAAA;MAAA;MAAmB/C,OAAO;MAAA;MAAA;MAAnBC,UAAQ,GAAA8C,SAAA,CAAR9C,QAAQ;IACfR,eAAe,CACbC,WAAW,EACXC,WAAW,EACXC,MAAM,EACNC,MAAM,EACNC,SAAS,EACTC,SAAS;IAAA;IAAAvB,aAAA,CAAAA,aAAA;IAAA;IAEJwB,OAAO;MACVC,QAAQ,EAAE;MAAA;MAAAA;MAAAA;MAAA,CAAA6C,QAAQ,EAAI;QACpB,IAAI,CAACA,QAAQ,EAAE;UACb7C,UAAQ,CAAC,CAAC;QACZ,CAAC,MAAM;UACLA,UAAQ,CAACwC,WAAW,CAACK,QAAQ,CAAC,CAAC;QACjC;MACF;IAAC,EAEL,CAAC;EACH;AACF;AAEO,SAASE,WAAWA,CAACC,QAAQ,EAAErD,MAAM,EAAEC,MAAM,EAAEC,SAAS,EAAEC,SAAS,EAAEC,OAAO,EAAE;EACnF,OAAO4C,mBAAmB,CAACK,QAAQ,EAAEA,QAAQ,EAAErD,MAAM,EAAEC,MAAM,EAAEC,SAAS,EAAEC,SAAS,EAAEC,OAAO,CAAC;AAC/F;;AAEA;AACA;AACA;AACA,SAASsB,UAAUA,CAAC4B,IAAI,EAAE;EACxB,IAAMC,aAAa,GAAGD,IAAI,CAACX,QAAQ,CAAC,IAAI,CAAC;EACzC,IAAMa,MAAM,GAAGF,IAAI,CAACG,KAAK,CAAC,IAAI,CAAC,CAACzC,GAAG,CAAC,UAAA0C,IAAI;EAAA;EAAA;IAAA;MAAA;MAAIA,IAAI,GAAG;IAAI;EAAA,EAAC;EACxD,IAAIH,aAAa,EAAE;IACjBC,MAAM,CAACG,GAAG,CAAC,CAAC;EACd,CAAC,MAAM;IACLH,MAAM,CAAC9E,IAAI,CAAC8E,MAAM,CAACG,GAAG,CAAC,CAAC,CAACrG,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;EACxC;EACA,OAAOkG,MAAM;AACf","ignoreList":[]}
diff --git a/node_modules/diff/lib/patch/line-endings.js b/node_modules/diff/lib/patch/line-endings.js
deleted file mode 100644
index 8d00bd22030ab..0000000000000
--- a/node_modules/diff/lib/patch/line-endings.js
+++ /dev/null
@@ -1,176 +0,0 @@
-/*istanbul ignore start*/
-"use strict";
-
-Object.defineProperty(exports, "__esModule", {
-  value: true
-});
-exports.isUnix = isUnix;
-exports.isWin = isWin;
-exports.unixToWin = unixToWin;
-exports.winToUnix = winToUnix;
-function _typeof(o) { "@babel/helpers - typeof"; return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (o) { return typeof o; } : function (o) { return o && "function" == typeof Symbol && o.constructor === Symbol && o !== Symbol.prototype ? "symbol" : typeof o; }, _typeof(o); }
-function ownKeys(e, r) { var t = Object.keys(e); if (Object.getOwnPropertySymbols) { var o = Object.getOwnPropertySymbols(e); r && (o = o.filter(function (r) { return Object.getOwnPropertyDescriptor(e, r).enumerable; })), t.push.apply(t, o); } return t; }
-function _objectSpread(e) { for (var r = 1; r < arguments.length; r++) { var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? ownKeys(Object(t), !0).forEach(function (r) { _defineProperty(e, r, t[r]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(e, Object.getOwnPropertyDescriptors(t)) : ownKeys(Object(t)).forEach(function (r) { Object.defineProperty(e, r, Object.getOwnPropertyDescriptor(t, r)); }); } return e; }
-function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
-function _toPropertyKey(t) { var i = _toPrimitive(t, "string"); return "symbol" == _typeof(i) ? i : i + ""; }
-function _toPrimitive(t, r) { if ("object" != _typeof(t) || !t) return t; var e = t[Symbol.toPrimitive]; if (void 0 !== e) { var i = e.call(t, r || "default"); if ("object" != _typeof(i)) return i; throw new TypeError("@@toPrimitive must return a primitive value."); } return ("string" === r ? String : Number)(t); }
-/*istanbul ignore end*/
-function unixToWin(patch) {
-  if (Array.isArray(patch)) {
-    return patch.map(unixToWin);
-  }
-  return (
-    /*istanbul ignore start*/
-    _objectSpread(_objectSpread({},
-    /*istanbul ignore end*/
-    patch), {}, {
-      hunks: patch.hunks.map(function (hunk)
-      /*istanbul ignore start*/
-      {
-        return _objectSpread(_objectSpread({},
-        /*istanbul ignore end*/
-        hunk), {}, {
-          lines: hunk.lines.map(function (line, i)
-          /*istanbul ignore start*/
-          {
-            var _hunk$lines;
-            return (
-              /*istanbul ignore end*/
-              line.startsWith('\\') || line.endsWith('\r') ||
-              /*istanbul ignore start*/
-              (_hunk$lines =
-              /*istanbul ignore end*/
-              hunk.lines[i + 1]) !== null && _hunk$lines !== void 0 &&
-              /*istanbul ignore start*/
-              _hunk$lines
-              /*istanbul ignore end*/
-              .startsWith('\\') ? line : line + '\r'
-            );
-          })
-        });
-      })
-    })
-  );
-}
-function winToUnix(patch) {
-  if (Array.isArray(patch)) {
-    return patch.map(winToUnix);
-  }
-  return (
-    /*istanbul ignore start*/
-    _objectSpread(_objectSpread({},
-    /*istanbul ignore end*/
-    patch), {}, {
-      hunks: patch.hunks.map(function (hunk)
-      /*istanbul ignore start*/
-      {
-        return _objectSpread(_objectSpread({},
-        /*istanbul ignore end*/
-        hunk), {}, {
-          lines: hunk.lines.map(function (line)
-          /*istanbul ignore start*/
-          {
-            return (
-              /*istanbul ignore end*/
-              line.endsWith('\r') ? line.substring(0, line.length - 1) : line
-            );
-          })
-        });
-      })
-    })
-  );
-}
-
-/**
- * Returns true if the patch consistently uses Unix line endings (or only involves one line and has
- * no line endings).
- */
-function isUnix(patch) {
-  if (!Array.isArray(patch)) {
-    patch = [patch];
-  }
-  return !patch.some(function (index)
-  /*istanbul ignore start*/
-  {
-    return (
-      /*istanbul ignore end*/
-      index.hunks.some(function (hunk)
-      /*istanbul ignore start*/
-      {
-        return (
-          /*istanbul ignore end*/
-          hunk.lines.some(function (line)
-          /*istanbul ignore start*/
-          {
-            return (
-              /*istanbul ignore end*/
-              !line.startsWith('\\') && line.endsWith('\r')
-            );
-          })
-        );
-      })
-    );
-  });
-}
-
-/**
- * Returns true if the patch uses Windows line endings and only Windows line endings.
- */
-function isWin(patch) {
-  if (!Array.isArray(patch)) {
-    patch = [patch];
-  }
-  return patch.some(function (index)
-  /*istanbul ignore start*/
-  {
-    return (
-      /*istanbul ignore end*/
-      index.hunks.some(function (hunk)
-      /*istanbul ignore start*/
-      {
-        return (
-          /*istanbul ignore end*/
-          hunk.lines.some(function (line)
-          /*istanbul ignore start*/
-          {
-            return (
-              /*istanbul ignore end*/
-              line.endsWith('\r')
-            );
-          })
-        );
-      })
-    );
-  }) && patch.every(function (index)
-  /*istanbul ignore start*/
-  {
-    return (
-      /*istanbul ignore end*/
-      index.hunks.every(function (hunk)
-      /*istanbul ignore start*/
-      {
-        return (
-          /*istanbul ignore end*/
-          hunk.lines.every(function (line, i)
-          /*istanbul ignore start*/
-          {
-            var _hunk$lines2;
-            return (
-              /*istanbul ignore end*/
-              line.startsWith('\\') || line.endsWith('\r') ||
-              /*istanbul ignore start*/
-              ((_hunk$lines2 =
-              /*istanbul ignore end*/
-              hunk.lines[i + 1]) === null || _hunk$lines2 === void 0 ? void 0 :
-              /*istanbul ignore start*/
-              _hunk$lines2
-              /*istanbul ignore end*/
-              .startsWith('\\'))
-            );
-          })
-        );
-      })
-    );
-  });
-}
-//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJuYW1lcyI6WyJ1bml4VG9XaW4iLCJwYXRjaCIsIkFycmF5IiwiaXNBcnJheSIsIm1hcCIsIl9vYmplY3RTcHJlYWQiLCJodW5rcyIsImh1bmsiLCJsaW5lcyIsImxpbmUiLCJpIiwiX2h1bmskbGluZXMiLCJzdGFydHNXaXRoIiwiZW5kc1dpdGgiLCJ3aW5Ub1VuaXgiLCJzdWJzdHJpbmciLCJsZW5ndGgiLCJpc1VuaXgiLCJzb21lIiwiaW5kZXgiLCJpc1dpbiIsImV2ZXJ5IiwiX2h1bmskbGluZXMyIl0sInNvdXJjZXMiOlsiLi4vLi4vc3JjL3BhdGNoL2xpbmUtZW5kaW5ncy5qcyJdLCJzb3VyY2VzQ29udGVudCI6WyJleHBvcnQgZnVuY3Rpb24gdW5peFRvV2luKHBhdGNoKSB7XG4gIGlmIChBcnJheS5pc0FycmF5KHBhdGNoKSkge1xuICAgIHJldHVybiBwYXRjaC5tYXAodW5peFRvV2luKTtcbiAgfVxuXG4gIHJldHVybiB7XG4gICAgLi4ucGF0Y2gsXG4gICAgaHVua3M6IHBhdGNoLmh1bmtzLm1hcChodW5rID0+ICh7XG4gICAgICAuLi5odW5rLFxuICAgICAgbGluZXM6IGh1bmsubGluZXMubWFwKFxuICAgICAgICAobGluZSwgaSkgPT5cbiAgICAgICAgICAobGluZS5zdGFydHNXaXRoKCdcXFxcJykgfHwgbGluZS5lbmRzV2l0aCgnXFxyJykgfHwgaHVuay5saW5lc1tpICsgMV0/LnN0YXJ0c1dpdGgoJ1xcXFwnKSlcbiAgICAgICAgICAgID8gbGluZVxuICAgICAgICAgICAgOiBsaW5lICsgJ1xccidcbiAgICAgIClcbiAgICB9KSlcbiAgfTtcbn1cblxuZXhwb3J0IGZ1bmN0aW9uIHdpblRvVW5peChwYXRjaCkge1xuICBpZiAoQXJyYXkuaXNBcnJheShwYXRjaCkpIHtcbiAgICByZXR1cm4gcGF0Y2gubWFwKHdpblRvVW5peCk7XG4gIH1cblxuICByZXR1cm4ge1xuICAgIC4uLnBhdGNoLFxuICAgIGh1bmtzOiBwYXRjaC5odW5rcy5tYXAoaHVuayA9PiAoe1xuICAgICAgLi4uaHVuayxcbiAgICAgIGxpbmVzOiBodW5rLmxpbmVzLm1hcChsaW5lID0+IGxpbmUuZW5kc1dpdGgoJ1xccicpID8gbGluZS5zdWJzdHJpbmcoMCwgbGluZS5sZW5ndGggLSAxKSA6IGxpbmUpXG4gICAgfSkpXG4gIH07XG59XG5cbi8qKlxuICogUmV0dXJucyB0cnVlIGlmIHRoZSBwYXRjaCBjb25zaXN0ZW50bHkgdXNlcyBVbml4IGxpbmUgZW5kaW5ncyAob3Igb25seSBpbnZvbHZlcyBvbmUgbGluZSBhbmQgaGFzXG4gKiBubyBsaW5lIGVuZGluZ3MpLlxuICovXG5leHBvcnQgZnVuY3Rpb24gaXNVbml4KHBhdGNoKSB7XG4gIGlmICghQXJyYXkuaXNBcnJheShwYXRjaCkpIHsgcGF0Y2ggPSBbcGF0Y2hdOyB9XG4gIHJldHVybiAhcGF0Y2guc29tZShcbiAgICBpbmRleCA9PiBpbmRleC5odW5rcy5zb21lKFxuICAgICAgaHVuayA9PiBodW5rLmxpbmVzLnNvbWUoXG4gICAgICAgIGxpbmUgPT4gIWxpbmUuc3RhcnRzV2l0aCgnXFxcXCcpICYmIGxpbmUuZW5kc1dpdGgoJ1xccicpXG4gICAgICApXG4gICAgKVxuICApO1xufVxuXG4vKipcbiAqIFJldHVybnMgdHJ1ZSBpZiB0aGUgcGF0Y2ggdXNlcyBXaW5kb3dzIGxpbmUgZW5kaW5ncyBhbmQgb25seSBXaW5kb3dzIGxpbmUgZW5kaW5ncy5cbiAqL1xuZXhwb3J0IGZ1bmN0aW9uIGlzV2luKHBhdGNoKSB7XG4gIGlmICghQXJyYXkuaXNBcnJheShwYXRjaCkpIHsgcGF0Y2ggPSBbcGF0Y2hdOyB9XG4gIHJldHVybiBwYXRjaC5zb21lKGluZGV4ID0+IGluZGV4Lmh1bmtzLnNvbWUoaHVuayA9PiBodW5rLmxpbmVzLnNvbWUobGluZSA9PiBsaW5lLmVuZHNXaXRoKCdcXHInKSkpKVxuICAgICYmIHBhdGNoLmV2ZXJ5KFxuICAgICAgaW5kZXggPT4gaW5kZXguaHVua3MuZXZlcnkoXG4gICAgICAgIGh1bmsgPT4gaHVuay5saW5lcy5ldmVyeShcbiAgICAgICAgICAobGluZSwgaSkgPT4gbGluZS5zdGFydHNXaXRoKCdcXFxcJykgfHwgbGluZS5lbmRzV2l0aCgnXFxyJykgfHwgaHVuay5saW5lc1tpICsgMV0/LnN0YXJ0c1dpdGgoJ1xcXFwnKVxuICAgICAgICApXG4gICAgICApXG4gICAgKTtcbn1cbiJdLCJtYXBwaW5ncyI6Ijs7Ozs7Ozs7Ozs7Ozs7Ozs7QUFBTyxTQUFTQSxTQUFTQSxDQUFDQyxLQUFLLEVBQUU7RUFDL0IsSUFBSUMsS0FBSyxDQUFDQyxPQUFPLENBQUNGLEtBQUssQ0FBQyxFQUFFO0lBQ3hCLE9BQU9BLEtBQUssQ0FBQ0csR0FBRyxDQUFDSixTQUFTLENBQUM7RUFDN0I7RUFFQTtJQUFBO0lBQUFLLGFBQUEsQ0FBQUEsYUFBQTtJQUFBO0lBQ0tKLEtBQUs7TUFDUkssS0FBSyxFQUFFTCxLQUFLLENBQUNLLEtBQUssQ0FBQ0YsR0FBRyxDQUFDLFVBQUFHLElBQUk7TUFBQTtNQUFBO1FBQUEsT0FBQUYsYUFBQSxDQUFBQSxhQUFBO1FBQUE7UUFDdEJFLElBQUk7VUFDUEMsS0FBSyxFQUFFRCxJQUFJLENBQUNDLEtBQUssQ0FBQ0osR0FBRyxDQUNuQixVQUFDSyxJQUFJLEVBQUVDLENBQUM7VUFBQTtVQUFBO1lBQUEsSUFBQUMsV0FBQTtZQUFBO2NBQUE7Y0FDTEYsSUFBSSxDQUFDRyxVQUFVLENBQUMsSUFBSSxDQUFDLElBQUlILElBQUksQ0FBQ0ksUUFBUSxDQUFDLElBQUksQ0FBQztjQUFBO2NBQUEsQ0FBQUYsV0FBQTtjQUFBO2NBQUlKLElBQUksQ0FBQ0MsS0FBSyxDQUFDRSxDQUFDLEdBQUcsQ0FBQyxDQUFDLGNBQUFDLFdBQUE7Y0FBakI7Y0FBQUE7Y0FBQTtjQUFBLENBQW1CQyxVQUFVLENBQUMsSUFBSSxDQUFDLEdBQ2hGSCxJQUFJLEdBQ0pBLElBQUksR0FBRztZQUFJO1VBQUEsQ0FDbkI7UUFBQztNQUFBLENBQ0Q7SUFBQztFQUFBO0FBRVA7QUFFTyxTQUFTSyxTQUFTQSxDQUFDYixLQUFLLEVBQUU7RUFDL0IsSUFBSUMsS0FBSyxDQUFDQyxPQUFPLENBQUNGLEtBQUssQ0FBQyxFQUFFO0lBQ3hCLE9BQU9BLEtBQUssQ0FBQ0csR0FBRyxDQUFDVSxTQUFTLENBQUM7RUFDN0I7RUFFQTtJQUFBO0lBQUFULGFBQUEsQ0FBQUEsYUFBQTtJQUFBO0lBQ0tKLEtBQUs7TUFDUkssS0FBSyxFQUFFTCxLQUFLLENBQUNLLEtBQUssQ0FBQ0YsR0FBRyxDQUFDLFVBQUFHLElBQUk7TUFBQTtNQUFBO1FBQUEsT0FBQUYsYUFBQSxDQUFBQSxhQUFBO1FBQUE7UUFDdEJFLElBQUk7VUFDUEMsS0FBSyxFQUFFRCxJQUFJLENBQUNDLEtBQUssQ0FBQ0osR0FBRyxDQUFDLFVBQUFLLElBQUk7VUFBQTtVQUFBO1lBQUE7Y0FBQTtjQUFJQSxJQUFJLENBQUNJLFFBQVEsQ0FBQyxJQUFJLENBQUMsR0FBR0osSUFBSSxDQUFDTSxTQUFTLENBQUMsQ0FBQyxFQUFFTixJQUFJLENBQUNPLE1BQU0sR0FBRyxDQUFDLENBQUMsR0FBR1A7WUFBSTtVQUFBO1FBQUM7TUFBQSxDQUM5RjtJQUFDO0VBQUE7QUFFUDs7QUFFQTtBQUNBO0FBQ0E7QUFDQTtBQUNPLFNBQVNRLE1BQU1BLENBQUNoQixLQUFLLEVBQUU7RUFDNUIsSUFBSSxDQUFDQyxLQUFLLENBQUNDLE9BQU8sQ0FBQ0YsS0FBSyxDQUFDLEVBQUU7SUFBRUEsS0FBSyxHQUFHLENBQUNBLEtBQUssQ0FBQztFQUFFO0VBQzlDLE9BQU8sQ0FBQ0EsS0FBSyxDQUFDaUIsSUFBSSxDQUNoQixVQUFBQyxLQUFLO0VBQUE7RUFBQTtJQUFBO01BQUE7TUFBSUEsS0FBSyxDQUFDYixLQUFLLENBQUNZLElBQUksQ0FDdkIsVUFBQVgsSUFBSTtNQUFBO01BQUE7UUFBQTtVQUFBO1VBQUlBLElBQUksQ0FBQ0MsS0FBSyxDQUFDVSxJQUFJLENBQ3JCLFVBQUFULElBQUk7VUFBQTtVQUFBO1lBQUE7Y0FBQTtjQUFJLENBQUNBLElBQUksQ0FBQ0csVUFBVSxDQUFDLElBQUksQ0FBQyxJQUFJSCxJQUFJLENBQUNJLFFBQVEsQ0FBQyxJQUFJO1lBQUM7VUFBQSxDQUN2RDtRQUFDO01BQUEsQ0FDSDtJQUFDO0VBQUEsQ0FDSCxDQUFDO0FBQ0g7O0FBRUE7QUFDQTtBQUNBO0FBQ08sU0FBU08sS0FBS0EsQ0FBQ25CLEtBQUssRUFBRTtFQUMzQixJQUFJLENBQUNDLEtBQUssQ0FBQ0MsT0FBTyxDQUFDRixLQUFLLENBQUMsRUFBRTtJQUFFQSxLQUFLLEdBQUcsQ0FBQ0EsS0FBSyxDQUFDO0VBQUU7RUFDOUMsT0FBT0EsS0FBSyxDQUFDaUIsSUFBSSxDQUFDLFVBQUFDLEtBQUs7RUFBQTtFQUFBO0lBQUE7TUFBQTtNQUFJQSxLQUFLLENBQUNiLEtBQUssQ0FBQ1ksSUFBSSxDQUFDLFVBQUFYLElBQUk7TUFBQTtNQUFBO1FBQUE7VUFBQTtVQUFJQSxJQUFJLENBQUNDLEtBQUssQ0FBQ1UsSUFBSSxDQUFDLFVBQUFULElBQUk7VUFBQTtVQUFBO1lBQUE7Y0FBQTtjQUFJQSxJQUFJLENBQUNJLFFBQVEsQ0FBQyxJQUFJO1lBQUM7VUFBQTtRQUFDO01BQUE7SUFBQztFQUFBLEVBQUMsSUFDN0ZaLEtBQUssQ0FBQ29CLEtBQUssQ0FDWixVQUFBRixLQUFLO0VBQUE7RUFBQTtJQUFBO01BQUE7TUFBSUEsS0FBSyxDQUFDYixLQUFLLENBQUNlLEtBQUssQ0FDeEIsVUFBQWQsSUFBSTtNQUFBO01BQUE7UUFBQTtVQUFBO1VBQUlBLElBQUksQ0FBQ0MsS0FBSyxDQUFDYSxLQUFLLENBQ3RCLFVBQUNaLElBQUksRUFBRUMsQ0FBQztVQUFBO1VBQUE7WUFBQSxJQUFBWSxZQUFBO1lBQUE7Y0FBQTtjQUFLYixJQUFJLENBQUNHLFVBQVUsQ0FBQyxJQUFJLENBQUMsSUFBSUgsSUFBSSxDQUFDSSxRQUFRLENBQUMsSUFBSSxDQUFDO2NBQUE7Y0FBQSxFQUFBUyxZQUFBO2NBQUE7Y0FBSWYsSUFBSSxDQUFDQyxLQUFLLENBQUNFLENBQUMsR0FBRyxDQUFDLENBQUMsY0FBQVksWUFBQTtjQUFqQjtjQUFBQTtjQUFBO2NBQUEsQ0FBbUJWLFVBQVUsQ0FBQyxJQUFJLENBQUM7WUFBQTtVQUFBLENBQ2xHO1FBQUM7TUFBQSxDQUNIO0lBQUM7RUFBQSxDQUNILENBQUM7QUFDTCIsImlnbm9yZUxpc3QiOltdfQ==
diff --git a/node_modules/diff/lib/patch/merge.js b/node_modules/diff/lib/patch/merge.js
deleted file mode 100644
index fead4e011df0d..0000000000000
--- a/node_modules/diff/lib/patch/merge.js
+++ /dev/null
@@ -1,535 +0,0 @@
-/*istanbul ignore start*/
-"use strict";
-
-Object.defineProperty(exports, "__esModule", {
-  value: true
-});
-exports.calcLineCount = calcLineCount;
-exports.merge = merge;
-/*istanbul ignore end*/
-var
-/*istanbul ignore start*/
-_create = require("./create")
-/*istanbul ignore end*/
-;
-var
-/*istanbul ignore start*/
-_parse = require("./parse")
-/*istanbul ignore end*/
-;
-var
-/*istanbul ignore start*/
-_array = require("../util/array")
-/*istanbul ignore end*/
-;
-/*istanbul ignore start*/ function _toConsumableArray(arr) { return _arrayWithoutHoles(arr) || _iterableToArray(arr) || _unsupportedIterableToArray(arr) || _nonIterableSpread(); }
-function _nonIterableSpread() { throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); }
-function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
-function _iterableToArray(iter) { if (typeof Symbol !== "undefined" && iter[Symbol.iterator] != null || iter["@@iterator"] != null) return Array.from(iter); }
-function _arrayWithoutHoles(arr) { if (Array.isArray(arr)) return _arrayLikeToArray(arr); }
-function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) arr2[i] = arr[i]; return arr2; }
-/*istanbul ignore end*/
-function calcLineCount(hunk) {
-  var
-    /*istanbul ignore start*/
-    _calcOldNewLineCount =
-    /*istanbul ignore end*/
-    calcOldNewLineCount(hunk.lines),
-    /*istanbul ignore start*/
-    /*istanbul ignore end*/
-    oldLines = _calcOldNewLineCount.oldLines,
-    /*istanbul ignore start*/
-    /*istanbul ignore end*/
-    newLines = _calcOldNewLineCount.newLines;
-  if (oldLines !== undefined) {
-    hunk.oldLines = oldLines;
-  } else {
-    delete hunk.oldLines;
-  }
-  if (newLines !== undefined) {
-    hunk.newLines = newLines;
-  } else {
-    delete hunk.newLines;
-  }
-}
-function merge(mine, theirs, base) {
-  mine = loadPatch(mine, base);
-  theirs = loadPatch(theirs, base);
-  var ret = {};
-
-  // For index we just let it pass through as it doesn't have any necessary meaning.
-  // Leaving sanity checks on this to the API consumer that may know more about the
-  // meaning in their own context.
-  if (mine.index || theirs.index) {
-    ret.index = mine.index || theirs.index;
-  }
-  if (mine.newFileName || theirs.newFileName) {
-    if (!fileNameChanged(mine)) {
-      // No header or no change in ours, use theirs (and ours if theirs does not exist)
-      ret.oldFileName = theirs.oldFileName || mine.oldFileName;
-      ret.newFileName = theirs.newFileName || mine.newFileName;
-      ret.oldHeader = theirs.oldHeader || mine.oldHeader;
-      ret.newHeader = theirs.newHeader || mine.newHeader;
-    } else if (!fileNameChanged(theirs)) {
-      // No header or no change in theirs, use ours
-      ret.oldFileName = mine.oldFileName;
-      ret.newFileName = mine.newFileName;
-      ret.oldHeader = mine.oldHeader;
-      ret.newHeader = mine.newHeader;
-    } else {
-      // Both changed... figure it out
-      ret.oldFileName = selectField(ret, mine.oldFileName, theirs.oldFileName);
-      ret.newFileName = selectField(ret, mine.newFileName, theirs.newFileName);
-      ret.oldHeader = selectField(ret, mine.oldHeader, theirs.oldHeader);
-      ret.newHeader = selectField(ret, mine.newHeader, theirs.newHeader);
-    }
-  }
-  ret.hunks = [];
-  var mineIndex = 0,
-    theirsIndex = 0,
-    mineOffset = 0,
-    theirsOffset = 0;
-  while (mineIndex < mine.hunks.length || theirsIndex < theirs.hunks.length) {
-    var mineCurrent = mine.hunks[mineIndex] || {
-        oldStart: Infinity
-      },
-      theirsCurrent = theirs.hunks[theirsIndex] || {
-        oldStart: Infinity
-      };
-    if (hunkBefore(mineCurrent, theirsCurrent)) {
-      // This patch does not overlap with any of the others, yay.
-      ret.hunks.push(cloneHunk(mineCurrent, mineOffset));
-      mineIndex++;
-      theirsOffset += mineCurrent.newLines - mineCurrent.oldLines;
-    } else if (hunkBefore(theirsCurrent, mineCurrent)) {
-      // This patch does not overlap with any of the others, yay.
-      ret.hunks.push(cloneHunk(theirsCurrent, theirsOffset));
-      theirsIndex++;
-      mineOffset += theirsCurrent.newLines - theirsCurrent.oldLines;
-    } else {
-      // Overlap, merge as best we can
-      var mergedHunk = {
-        oldStart: Math.min(mineCurrent.oldStart, theirsCurrent.oldStart),
-        oldLines: 0,
-        newStart: Math.min(mineCurrent.newStart + mineOffset, theirsCurrent.oldStart + theirsOffset),
-        newLines: 0,
-        lines: []
-      };
-      mergeLines(mergedHunk, mineCurrent.oldStart, mineCurrent.lines, theirsCurrent.oldStart, theirsCurrent.lines);
-      theirsIndex++;
-      mineIndex++;
-      ret.hunks.push(mergedHunk);
-    }
-  }
-  return ret;
-}
-function loadPatch(param, base) {
-  if (typeof param === 'string') {
-    if (/^@@/m.test(param) || /^Index:/m.test(param)) {
-      return (
-        /*istanbul ignore start*/
-        (0,
-        /*istanbul ignore end*/
-        /*istanbul ignore start*/
-        _parse
-        /*istanbul ignore end*/
-        .
-        /*istanbul ignore start*/
-        parsePatch)
-        /*istanbul ignore end*/
-        (param)[0]
-      );
-    }
-    if (!base) {
-      throw new Error('Must provide a base reference or pass in a patch');
-    }
-    return (
-      /*istanbul ignore start*/
-      (0,
-      /*istanbul ignore end*/
-      /*istanbul ignore start*/
-      _create
-      /*istanbul ignore end*/
-      .
-      /*istanbul ignore start*/
-      structuredPatch)
-      /*istanbul ignore end*/
-      (undefined, undefined, base, param)
-    );
-  }
-  return param;
-}
-function fileNameChanged(patch) {
-  return patch.newFileName && patch.newFileName !== patch.oldFileName;
-}
-function selectField(index, mine, theirs) {
-  if (mine === theirs) {
-    return mine;
-  } else {
-    index.conflict = true;
-    return {
-      mine: mine,
-      theirs: theirs
-    };
-  }
-}
-function hunkBefore(test, check) {
-  return test.oldStart < check.oldStart && test.oldStart + test.oldLines < check.oldStart;
-}
-function cloneHunk(hunk, offset) {
-  return {
-    oldStart: hunk.oldStart,
-    oldLines: hunk.oldLines,
-    newStart: hunk.newStart + offset,
-    newLines: hunk.newLines,
-    lines: hunk.lines
-  };
-}
-function mergeLines(hunk, mineOffset, mineLines, theirOffset, theirLines) {
-  // This will generally result in a conflicted hunk, but there are cases where the context
-  // is the only overlap where we can successfully merge the content here.
-  var mine = {
-      offset: mineOffset,
-      lines: mineLines,
-      index: 0
-    },
-    their = {
-      offset: theirOffset,
-      lines: theirLines,
-      index: 0
-    };
-
-  // Handle any leading content
-  insertLeading(hunk, mine, their);
-  insertLeading(hunk, their, mine);
-
-  // Now in the overlap content. Scan through and select the best changes from each.
-  while (mine.index < mine.lines.length && their.index < their.lines.length) {
-    var mineCurrent = mine.lines[mine.index],
-      theirCurrent = their.lines[their.index];
-    if ((mineCurrent[0] === '-' || mineCurrent[0] === '+') && (theirCurrent[0] === '-' || theirCurrent[0] === '+')) {
-      // Both modified ...
-      mutualChange(hunk, mine, their);
-    } else if (mineCurrent[0] === '+' && theirCurrent[0] === ' ') {
-      /*istanbul ignore start*/
-      var _hunk$lines;
-      /*istanbul ignore end*/
-      // Mine inserted
-      /*istanbul ignore start*/
-      /*istanbul ignore end*/
-      /*istanbul ignore start*/
-      (_hunk$lines =
-      /*istanbul ignore end*/
-      hunk.lines).push.apply(
-      /*istanbul ignore start*/
-      _hunk$lines
-      /*istanbul ignore end*/
-      ,
-      /*istanbul ignore start*/
-      _toConsumableArray(
-      /*istanbul ignore end*/
-      collectChange(mine)));
-    } else if (theirCurrent[0] === '+' && mineCurrent[0] === ' ') {
-      /*istanbul ignore start*/
-      var _hunk$lines2;
-      /*istanbul ignore end*/
-      // Theirs inserted
-      /*istanbul ignore start*/
-      /*istanbul ignore end*/
-      /*istanbul ignore start*/
-      (_hunk$lines2 =
-      /*istanbul ignore end*/
-      hunk.lines).push.apply(
-      /*istanbul ignore start*/
-      _hunk$lines2
-      /*istanbul ignore end*/
-      ,
-      /*istanbul ignore start*/
-      _toConsumableArray(
-      /*istanbul ignore end*/
-      collectChange(their)));
-    } else if (mineCurrent[0] === '-' && theirCurrent[0] === ' ') {
-      // Mine removed or edited
-      removal(hunk, mine, their);
-    } else if (theirCurrent[0] === '-' && mineCurrent[0] === ' ') {
-      // Their removed or edited
-      removal(hunk, their, mine, true);
-    } else if (mineCurrent === theirCurrent) {
-      // Context identity
-      hunk.lines.push(mineCurrent);
-      mine.index++;
-      their.index++;
-    } else {
-      // Context mismatch
-      conflict(hunk, collectChange(mine), collectChange(their));
-    }
-  }
-
-  // Now push anything that may be remaining
-  insertTrailing(hunk, mine);
-  insertTrailing(hunk, their);
-  calcLineCount(hunk);
-}
-function mutualChange(hunk, mine, their) {
-  var myChanges = collectChange(mine),
-    theirChanges = collectChange(their);
-  if (allRemoves(myChanges) && allRemoves(theirChanges)) {
-    // Special case for remove changes that are supersets of one another
-    if (
-    /*istanbul ignore start*/
-    (0,
-    /*istanbul ignore end*/
-    /*istanbul ignore start*/
-    _array
-    /*istanbul ignore end*/
-    .
-    /*istanbul ignore start*/
-    arrayStartsWith)
-    /*istanbul ignore end*/
-    (myChanges, theirChanges) && skipRemoveSuperset(their, myChanges, myChanges.length - theirChanges.length)) {
-      /*istanbul ignore start*/
-      var _hunk$lines3;
-      /*istanbul ignore end*/
-      /*istanbul ignore start*/
-      /*istanbul ignore end*/
-      /*istanbul ignore start*/
-      (_hunk$lines3 =
-      /*istanbul ignore end*/
-      hunk.lines).push.apply(
-      /*istanbul ignore start*/
-      _hunk$lines3
-      /*istanbul ignore end*/
-      ,
-      /*istanbul ignore start*/
-      _toConsumableArray(
-      /*istanbul ignore end*/
-      myChanges));
-      return;
-    } else if (
-    /*istanbul ignore start*/
-    (0,
-    /*istanbul ignore end*/
-    /*istanbul ignore start*/
-    _array
-    /*istanbul ignore end*/
-    .
-    /*istanbul ignore start*/
-    arrayStartsWith)
-    /*istanbul ignore end*/
-    (theirChanges, myChanges) && skipRemoveSuperset(mine, theirChanges, theirChanges.length - myChanges.length)) {
-      /*istanbul ignore start*/
-      var _hunk$lines4;
-      /*istanbul ignore end*/
-      /*istanbul ignore start*/
-      /*istanbul ignore end*/
-      /*istanbul ignore start*/
-      (_hunk$lines4 =
-      /*istanbul ignore end*/
-      hunk.lines).push.apply(
-      /*istanbul ignore start*/
-      _hunk$lines4
-      /*istanbul ignore end*/
-      ,
-      /*istanbul ignore start*/
-      _toConsumableArray(
-      /*istanbul ignore end*/
-      theirChanges));
-      return;
-    }
-  } else if (
-  /*istanbul ignore start*/
-  (0,
-  /*istanbul ignore end*/
-  /*istanbul ignore start*/
-  _array
-  /*istanbul ignore end*/
-  .
-  /*istanbul ignore start*/
-  arrayEqual)
-  /*istanbul ignore end*/
-  (myChanges, theirChanges)) {
-    /*istanbul ignore start*/
-    var _hunk$lines5;
-    /*istanbul ignore end*/
-    /*istanbul ignore start*/
-    /*istanbul ignore end*/
-    /*istanbul ignore start*/
-    (_hunk$lines5 =
-    /*istanbul ignore end*/
-    hunk.lines).push.apply(
-    /*istanbul ignore start*/
-    _hunk$lines5
-    /*istanbul ignore end*/
-    ,
-    /*istanbul ignore start*/
-    _toConsumableArray(
-    /*istanbul ignore end*/
-    myChanges));
-    return;
-  }
-  conflict(hunk, myChanges, theirChanges);
-}
-function removal(hunk, mine, their, swap) {
-  var myChanges = collectChange(mine),
-    theirChanges = collectContext(their, myChanges);
-  if (theirChanges.merged) {
-    /*istanbul ignore start*/
-    var _hunk$lines6;
-    /*istanbul ignore end*/
-    /*istanbul ignore start*/
-    /*istanbul ignore end*/
-    /*istanbul ignore start*/
-    (_hunk$lines6 =
-    /*istanbul ignore end*/
-    hunk.lines).push.apply(
-    /*istanbul ignore start*/
-    _hunk$lines6
-    /*istanbul ignore end*/
-    ,
-    /*istanbul ignore start*/
-    _toConsumableArray(
-    /*istanbul ignore end*/
-    theirChanges.merged));
-  } else {
-    conflict(hunk, swap ? theirChanges : myChanges, swap ? myChanges : theirChanges);
-  }
-}
-function conflict(hunk, mine, their) {
-  hunk.conflict = true;
-  hunk.lines.push({
-    conflict: true,
-    mine: mine,
-    theirs: their
-  });
-}
-function insertLeading(hunk, insert, their) {
-  while (insert.offset < their.offset && insert.index < insert.lines.length) {
-    var line = insert.lines[insert.index++];
-    hunk.lines.push(line);
-    insert.offset++;
-  }
-}
-function insertTrailing(hunk, insert) {
-  while (insert.index < insert.lines.length) {
-    var line = insert.lines[insert.index++];
-    hunk.lines.push(line);
-  }
-}
-function collectChange(state) {
-  var ret = [],
-    operation = state.lines[state.index][0];
-  while (state.index < state.lines.length) {
-    var line = state.lines[state.index];
-
-    // Group additions that are immediately after subtractions and treat them as one "atomic" modify change.
-    if (operation === '-' && line[0] === '+') {
-      operation = '+';
-    }
-    if (operation === line[0]) {
-      ret.push(line);
-      state.index++;
-    } else {
-      break;
-    }
-  }
-  return ret;
-}
-function collectContext(state, matchChanges) {
-  var changes = [],
-    merged = [],
-    matchIndex = 0,
-    contextChanges = false,
-    conflicted = false;
-  while (matchIndex < matchChanges.length && state.index < state.lines.length) {
-    var change = state.lines[state.index],
-      match = matchChanges[matchIndex];
-
-    // Once we've hit our add, then we are done
-    if (match[0] === '+') {
-      break;
-    }
-    contextChanges = contextChanges || change[0] !== ' ';
-    merged.push(match);
-    matchIndex++;
-
-    // Consume any additions in the other block as a conflict to attempt
-    // to pull in the remaining context after this
-    if (change[0] === '+') {
-      conflicted = true;
-      while (change[0] === '+') {
-        changes.push(change);
-        change = state.lines[++state.index];
-      }
-    }
-    if (match.substr(1) === change.substr(1)) {
-      changes.push(change);
-      state.index++;
-    } else {
-      conflicted = true;
-    }
-  }
-  if ((matchChanges[matchIndex] || '')[0] === '+' && contextChanges) {
-    conflicted = true;
-  }
-  if (conflicted) {
-    return changes;
-  }
-  while (matchIndex < matchChanges.length) {
-    merged.push(matchChanges[matchIndex++]);
-  }
-  return {
-    merged: merged,
-    changes: changes
-  };
-}
-function allRemoves(changes) {
-  return changes.reduce(function (prev, change) {
-    return prev && change[0] === '-';
-  }, true);
-}
-function skipRemoveSuperset(state, removeChanges, delta) {
-  for (var i = 0; i < delta; i++) {
-    var changeContent = removeChanges[removeChanges.length - delta + i].substr(1);
-    if (state.lines[state.index + i] !== ' ' + changeContent) {
-      return false;
-    }
-  }
-  state.index += delta;
-  return true;
-}
-function calcOldNewLineCount(lines) {
-  var oldLines = 0;
-  var newLines = 0;
-  lines.forEach(function (line) {
-    if (typeof line !== 'string') {
-      var myCount = calcOldNewLineCount(line.mine);
-      var theirCount = calcOldNewLineCount(line.theirs);
-      if (oldLines !== undefined) {
-        if (myCount.oldLines === theirCount.oldLines) {
-          oldLines += myCount.oldLines;
-        } else {
-          oldLines = undefined;
-        }
-      }
-      if (newLines !== undefined) {
-        if (myCount.newLines === theirCount.newLines) {
-          newLines += myCount.newLines;
-        } else {
-          newLines = undefined;
-        }
-      }
-    } else {
-      if (newLines !== undefined && (line[0] === '+' || line[0] === ' ')) {
-        newLines++;
-      }
-      if (oldLines !== undefined && (line[0] === '-' || line[0] === ' ')) {
-        oldLines++;
-      }
-    }
-  });
-  return {
-    oldLines: oldLines,
-    newLines: newLines
-  };
-}
-//# sourceMappingURL=data:application/json;charset=utf-8;base64,{"version":3,"names":["_create","require","_parse","_array","_toConsumableArray","arr","_arrayWithoutHoles","_iterableToArray","_unsupportedIterableToArray","_nonIterableSpread","TypeError","o","minLen","_arrayLikeToArray","n","Object","prototype","toString","call","slice","constructor","name","Array","from","test","iter","Symbol","iterator","isArray","len","length","i","arr2","calcLineCount","hunk","_calcOldNewLineCount","calcOldNewLineCount","lines","oldLines","newLines","undefined","merge","mine","theirs","base","loadPatch","ret","index","newFileName","fileNameChanged","oldFileName","oldHeader","newHeader","selectField","hunks","mineIndex","theirsIndex","mineOffset","theirsOffset","mineCurrent","oldStart","Infinity","theirsCurrent","hunkBefore","push","cloneHunk","mergedHunk","Math","min","newStart","mergeLines","param","parsePatch","Error","structuredPatch","patch","conflict","check","offset","mineLines","theirOffset","theirLines","their","insertLeading","theirCurrent","mutualChange","_hunk$lines","apply","collectChange","_hunk$lines2","removal","insertTrailing","myChanges","theirChanges","allRemoves","arrayStartsWith","skipRemoveSuperset","_hunk$lines3","_hunk$lines4","arrayEqual","_hunk$lines5","swap","collectContext","merged","_hunk$lines6","insert","line","state","operation","matchChanges","changes","matchIndex","contextChanges","conflicted","change","match","substr","reduce","prev","removeChanges","delta","changeContent","forEach","myCount","theirCount"],"sources":["../../src/patch/merge.js"],"sourcesContent":["import {structuredPatch} from './create';\nimport {parsePatch} from './parse';\n\nimport {arrayEqual, arrayStartsWith} from '../util/array';\n\nexport function calcLineCount(hunk) {\n  const {oldLines, newLines} = calcOldNewLineCount(hunk.lines);\n\n  if (oldLines !== undefined) {\n    hunk.oldLines = oldLines;\n  } else {\n    delete hunk.oldLines;\n  }\n\n  if (newLines !== undefined) {\n    hunk.newLines = newLines;\n  } else {\n    delete hunk.newLines;\n  }\n}\n\nexport function merge(mine, theirs, base) {\n  mine = loadPatch(mine, base);\n  theirs = loadPatch(theirs, base);\n\n  let ret = {};\n\n  // For index we just let it pass through as it doesn't have any necessary meaning.\n  // Leaving sanity checks on this to the API consumer that may know more about the\n  // meaning in their own context.\n  if (mine.index || theirs.index) {\n    ret.index = mine.index || theirs.index;\n  }\n\n  if (mine.newFileName || theirs.newFileName) {\n    if (!fileNameChanged(mine)) {\n      // No header or no change in ours, use theirs (and ours if theirs does not exist)\n      ret.oldFileName = theirs.oldFileName || mine.oldFileName;\n      ret.newFileName = theirs.newFileName || mine.newFileName;\n      ret.oldHeader = theirs.oldHeader || mine.oldHeader;\n      ret.newHeader = theirs.newHeader || mine.newHeader;\n    } else if (!fileNameChanged(theirs)) {\n      // No header or no change in theirs, use ours\n      ret.oldFileName = mine.oldFileName;\n      ret.newFileName = mine.newFileName;\n      ret.oldHeader = mine.oldHeader;\n      ret.newHeader = mine.newHeader;\n    } else {\n      // Both changed... figure it out\n      ret.oldFileName = selectField(ret, mine.oldFileName, theirs.oldFileName);\n      ret.newFileName = selectField(ret, mine.newFileName, theirs.newFileName);\n      ret.oldHeader = selectField(ret, mine.oldHeader, theirs.oldHeader);\n      ret.newHeader = selectField(ret, mine.newHeader, theirs.newHeader);\n    }\n  }\n\n  ret.hunks = [];\n\n  let mineIndex = 0,\n      theirsIndex = 0,\n      mineOffset = 0,\n      theirsOffset = 0;\n\n  while (mineIndex < mine.hunks.length || theirsIndex < theirs.hunks.length) {\n    let mineCurrent = mine.hunks[mineIndex] || {oldStart: Infinity},\n        theirsCurrent = theirs.hunks[theirsIndex] || {oldStart: Infinity};\n\n    if (hunkBefore(mineCurrent, theirsCurrent)) {\n      // This patch does not overlap with any of the others, yay.\n      ret.hunks.push(cloneHunk(mineCurrent, mineOffset));\n      mineIndex++;\n      theirsOffset += mineCurrent.newLines - mineCurrent.oldLines;\n    } else if (hunkBefore(theirsCurrent, mineCurrent)) {\n      // This patch does not overlap with any of the others, yay.\n      ret.hunks.push(cloneHunk(theirsCurrent, theirsOffset));\n      theirsIndex++;\n      mineOffset += theirsCurrent.newLines - theirsCurrent.oldLines;\n    } else {\n      // Overlap, merge as best we can\n      let mergedHunk = {\n        oldStart: Math.min(mineCurrent.oldStart, theirsCurrent.oldStart),\n        oldLines: 0,\n        newStart: Math.min(mineCurrent.newStart + mineOffset, theirsCurrent.oldStart + theirsOffset),\n        newLines: 0,\n        lines: []\n      };\n      mergeLines(mergedHunk, mineCurrent.oldStart, mineCurrent.lines, theirsCurrent.oldStart, theirsCurrent.lines);\n      theirsIndex++;\n      mineIndex++;\n\n      ret.hunks.push(mergedHunk);\n    }\n  }\n\n  return ret;\n}\n\nfunction loadPatch(param, base) {\n  if (typeof param === 'string') {\n    if ((/^@@/m).test(param) || ((/^Index:/m).test(param))) {\n      return parsePatch(param)[0];\n    }\n\n    if (!base) {\n      throw new Error('Must provide a base reference or pass in a patch');\n    }\n    return structuredPatch(undefined, undefined, base, param);\n  }\n\n  return param;\n}\n\nfunction fileNameChanged(patch) {\n  return patch.newFileName && patch.newFileName !== patch.oldFileName;\n}\n\nfunction selectField(index, mine, theirs) {\n  if (mine === theirs) {\n    return mine;\n  } else {\n    index.conflict = true;\n    return {mine, theirs};\n  }\n}\n\nfunction hunkBefore(test, check) {\n  return test.oldStart < check.oldStart\n    && (test.oldStart + test.oldLines) < check.oldStart;\n}\n\nfunction cloneHunk(hunk, offset) {\n  return {\n    oldStart: hunk.oldStart, oldLines: hunk.oldLines,\n    newStart: hunk.newStart + offset, newLines: hunk.newLines,\n    lines: hunk.lines\n  };\n}\n\nfunction mergeLines(hunk, mineOffset, mineLines, theirOffset, theirLines) {\n  // This will generally result in a conflicted hunk, but there are cases where the context\n  // is the only overlap where we can successfully merge the content here.\n  let mine = {offset: mineOffset, lines: mineLines, index: 0},\n      their = {offset: theirOffset, lines: theirLines, index: 0};\n\n  // Handle any leading content\n  insertLeading(hunk, mine, their);\n  insertLeading(hunk, their, mine);\n\n  // Now in the overlap content. Scan through and select the best changes from each.\n  while (mine.index < mine.lines.length && their.index < their.lines.length) {\n    let mineCurrent = mine.lines[mine.index],\n        theirCurrent = their.lines[their.index];\n\n    if ((mineCurrent[0] === '-' || mineCurrent[0] === '+')\n        && (theirCurrent[0] === '-' || theirCurrent[0] === '+')) {\n      // Both modified ...\n      mutualChange(hunk, mine, their);\n    } else if (mineCurrent[0] === '+' && theirCurrent[0] === ' ') {\n      // Mine inserted\n      hunk.lines.push(... collectChange(mine));\n    } else if (theirCurrent[0] === '+' && mineCurrent[0] === ' ') {\n      // Theirs inserted\n      hunk.lines.push(... collectChange(their));\n    } else if (mineCurrent[0] === '-' && theirCurrent[0] === ' ') {\n      // Mine removed or edited\n      removal(hunk, mine, their);\n    } else if (theirCurrent[0] === '-' && mineCurrent[0] === ' ') {\n      // Their removed or edited\n      removal(hunk, their, mine, true);\n    } else if (mineCurrent === theirCurrent) {\n      // Context identity\n      hunk.lines.push(mineCurrent);\n      mine.index++;\n      their.index++;\n    } else {\n      // Context mismatch\n      conflict(hunk, collectChange(mine), collectChange(their));\n    }\n  }\n\n  // Now push anything that may be remaining\n  insertTrailing(hunk, mine);\n  insertTrailing(hunk, their);\n\n  calcLineCount(hunk);\n}\n\nfunction mutualChange(hunk, mine, their) {\n  let myChanges = collectChange(mine),\n      theirChanges = collectChange(their);\n\n  if (allRemoves(myChanges) && allRemoves(theirChanges)) {\n    // Special case for remove changes that are supersets of one another\n    if (arrayStartsWith(myChanges, theirChanges)\n        && skipRemoveSuperset(their, myChanges, myChanges.length - theirChanges.length)) {\n      hunk.lines.push(... myChanges);\n      return;\n    } else if (arrayStartsWith(theirChanges, myChanges)\n        && skipRemoveSuperset(mine, theirChanges, theirChanges.length - myChanges.length)) {\n      hunk.lines.push(... theirChanges);\n      return;\n    }\n  } else if (arrayEqual(myChanges, theirChanges)) {\n    hunk.lines.push(... myChanges);\n    return;\n  }\n\n  conflict(hunk, myChanges, theirChanges);\n}\n\nfunction removal(hunk, mine, their, swap) {\n  let myChanges = collectChange(mine),\n      theirChanges = collectContext(their, myChanges);\n  if (theirChanges.merged) {\n    hunk.lines.push(... theirChanges.merged);\n  } else {\n    conflict(hunk, swap ? theirChanges : myChanges, swap ? myChanges : theirChanges);\n  }\n}\n\nfunction conflict(hunk, mine, their) {\n  hunk.conflict = true;\n  hunk.lines.push({\n    conflict: true,\n    mine: mine,\n    theirs: their\n  });\n}\n\nfunction insertLeading(hunk, insert, their) {\n  while (insert.offset < their.offset && insert.index < insert.lines.length) {\n    let line = insert.lines[insert.index++];\n    hunk.lines.push(line);\n    insert.offset++;\n  }\n}\nfunction insertTrailing(hunk, insert) {\n  while (insert.index < insert.lines.length) {\n    let line = insert.lines[insert.index++];\n    hunk.lines.push(line);\n  }\n}\n\nfunction collectChange(state) {\n  let ret = [],\n      operation = state.lines[state.index][0];\n  while (state.index < state.lines.length) {\n    let line = state.lines[state.index];\n\n    // Group additions that are immediately after subtractions and treat them as one \"atomic\" modify change.\n    if (operation === '-' && line[0] === '+') {\n      operation = '+';\n    }\n\n    if (operation === line[0]) {\n      ret.push(line);\n      state.index++;\n    } else {\n      break;\n    }\n  }\n\n  return ret;\n}\nfunction collectContext(state, matchChanges) {\n  let changes = [],\n      merged = [],\n      matchIndex = 0,\n      contextChanges = false,\n      conflicted = false;\n  while (matchIndex < matchChanges.length\n        && state.index < state.lines.length) {\n    let change = state.lines[state.index],\n        match = matchChanges[matchIndex];\n\n    // Once we've hit our add, then we are done\n    if (match[0] === '+') {\n      break;\n    }\n\n    contextChanges = contextChanges || change[0] !== ' ';\n\n    merged.push(match);\n    matchIndex++;\n\n    // Consume any additions in the other block as a conflict to attempt\n    // to pull in the remaining context after this\n    if (change[0] === '+') {\n      conflicted = true;\n\n      while (change[0] === '+') {\n        changes.push(change);\n        change = state.lines[++state.index];\n      }\n    }\n\n    if (match.substr(1) === change.substr(1)) {\n      changes.push(change);\n      state.index++;\n    } else {\n      conflicted = true;\n    }\n  }\n\n  if ((matchChanges[matchIndex] || '')[0] === '+'\n      && contextChanges) {\n    conflicted = true;\n  }\n\n  if (conflicted) {\n    return changes;\n  }\n\n  while (matchIndex < matchChanges.length) {\n    merged.push(matchChanges[matchIndex++]);\n  }\n\n  return {\n    merged,\n    changes\n  };\n}\n\nfunction allRemoves(changes) {\n  return changes.reduce(function(prev, change) {\n    return prev && change[0] === '-';\n  }, true);\n}\nfunction skipRemoveSuperset(state, removeChanges, delta) {\n  for (let i = 0; i < delta; i++) {\n    let changeContent = removeChanges[removeChanges.length - delta + i].substr(1);\n    if (state.lines[state.index + i] !== ' ' + changeContent) {\n      return false;\n    }\n  }\n\n  state.index += delta;\n  return true;\n}\n\nfunction calcOldNewLineCount(lines) {\n  let oldLines = 0;\n  let newLines = 0;\n\n  lines.forEach(function(line) {\n    if (typeof line !== 'string') {\n      let myCount = calcOldNewLineCount(line.mine);\n      let theirCount = calcOldNewLineCount(line.theirs);\n\n      if (oldLines !== undefined) {\n        if (myCount.oldLines === theirCount.oldLines) {\n          oldLines += myCount.oldLines;\n        } else {\n          oldLines = undefined;\n        }\n      }\n\n      if (newLines !== undefined) {\n        if (myCount.newLines === theirCount.newLines) {\n          newLines += myCount.newLines;\n        } else {\n          newLines = undefined;\n        }\n      }\n    } else {\n      if (newLines !== undefined && (line[0] === '+' || line[0] === ' ')) {\n        newLines++;\n      }\n      if (oldLines !== undefined && (line[0] === '-' || line[0] === ' ')) {\n        oldLines++;\n      }\n    }\n  });\n\n  return {oldLines, newLines};\n}\n"],"mappings":";;;;;;;;;AAAA;AAAA;AAAAA,OAAA,GAAAC,OAAA;AAAA;AAAA;AACA;AAAA;AAAAC,MAAA,GAAAD,OAAA;AAAA;AAAA;AAEA;AAAA;AAAAE,MAAA,GAAAF,OAAA;AAAA;AAAA;AAA0D,mCAAAG,mBAAAC,GAAA,WAAAC,kBAAA,CAAAD,GAAA,KAAAE,gBAAA,CAAAF,GAAA,KAAAG,2BAAA,CAAAH,GAAA,KAAAI,kBAAA;AAAA,SAAAA,mBAAA,cAAAC,SAAA;AAAA,SAAAF,4BAAAG,CAAA,EAAAC,MAAA,SAAAD,CAAA,qBAAAA,CAAA,sBAAAE,iBAAA,CAAAF,CAAA,EAAAC,MAAA,OAAAE,CAAA,GAAAC,MAAA,CAAAC,SAAA,CAAAC,QAAA,CAAAC,IAAA,CAAAP,CAAA,EAAAQ,KAAA,aAAAL,CAAA,iBAAAH,CAAA,CAAAS,WAAA,EAAAN,CAAA,GAAAH,CAAA,CAAAS,WAAA,CAAAC,IAAA,MAAAP,CAAA,cAAAA,CAAA,mBAAAQ,KAAA,CAAAC,IAAA,CAAAZ,CAAA,OAAAG,CAAA,+DAAAU,IAAA,CAAAV,CAAA,UAAAD,iBAAA,CAAAF,CAAA,EAAAC,MAAA;AAAA,SAAAL,iBAAAkB,IAAA,eAAAC,MAAA,oBAAAD,IAAA,CAAAC,MAAA,CAAAC,QAAA,aAAAF,IAAA,+BAAAH,KAAA,CAAAC,IAAA,CAAAE,IAAA;AAAA,SAAAnB,mBAAAD,GAAA,QAAAiB,KAAA,CAAAM,OAAA,CAAAvB,GAAA,UAAAQ,iBAAA,CAAAR,GAAA;AAAA,SAAAQ,kBAAAR,GAAA,EAAAwB,GAAA,QAAAA,GAAA,YAAAA,GAAA,GAAAxB,GAAA,CAAAyB,MAAA,EAAAD,GAAA,GAAAxB,GAAA,CAAAyB,MAAA,WAAAC,CAAA,MAAAC,IAAA,OAAAV,KAAA,CAAAO,GAAA,GAAAE,CAAA,GAAAF,GAAA,EAAAE,CAAA,IAAAC,IAAA,CAAAD,CAAA,IAAA1B,GAAA,CAAA0B,CAAA,UAAAC,IAAA;AAAA;AAEnD,SAASC,aAAaA,CAACC,IAAI,EAAE;EAClC;IAAA;IAAAC,oBAAA;IAAA;IAA6BC,mBAAmB,CAACF,IAAI,CAACG,KAAK,CAAC;IAAA;IAAA;IAArDC,QAAQ,GAAAH,oBAAA,CAARG,QAAQ;IAAA;IAAA;IAAEC,QAAQ,GAAAJ,oBAAA,CAARI,QAAQ;EAEzB,IAAID,QAAQ,KAAKE,SAAS,EAAE;IAC1BN,IAAI,CAACI,QAAQ,GAAGA,QAAQ;EAC1B,CAAC,MAAM;IACL,OAAOJ,IAAI,CAACI,QAAQ;EACtB;EAEA,IAAIC,QAAQ,KAAKC,SAAS,EAAE;IAC1BN,IAAI,CAACK,QAAQ,GAAGA,QAAQ;EAC1B,CAAC,MAAM;IACL,OAAOL,IAAI,CAACK,QAAQ;EACtB;AACF;AAEO,SAASE,KAAKA,CAACC,IAAI,EAAEC,MAAM,EAAEC,IAAI,EAAE;EACxCF,IAAI,GAAGG,SAAS,CAACH,IAAI,EAAEE,IAAI,CAAC;EAC5BD,MAAM,GAAGE,SAAS,CAACF,MAAM,EAAEC,IAAI,CAAC;EAEhC,IAAIE,GAAG,GAAG,CAAC,CAAC;;EAEZ;EACA;EACA;EACA,IAAIJ,IAAI,CAACK,KAAK,IAAIJ,MAAM,CAACI,KAAK,EAAE;IAC9BD,GAAG,CAACC,KAAK,GAAGL,IAAI,CAACK,KAAK,IAAIJ,MAAM,CAACI,KAAK;EACxC;EAEA,IAAIL,IAAI,CAACM,WAAW,IAAIL,MAAM,CAACK,WAAW,EAAE;IAC1C,IAAI,CAACC,eAAe,CAACP,IAAI,CAAC,EAAE;MAC1B;MACAI,GAAG,CAACI,WAAW,GAAGP,MAAM,CAACO,WAAW,IAAIR,IAAI,CAACQ,WAAW;MACxDJ,GAAG,CAACE,WAAW,GAAGL,MAAM,CAACK,WAAW,IAAIN,IAAI,CAACM,WAAW;MACxDF,GAAG,CAACK,SAAS,GAAGR,MAAM,CAACQ,SAAS,IAAIT,IAAI,CAACS,SAAS;MAClDL,GAAG,CAACM,SAAS,GAAGT,MAAM,CAACS,SAAS,IAAIV,IAAI,CAACU,SAAS;IACpD,CAAC,MAAM,IAAI,CAACH,eAAe,CAACN,MAAM,CAAC,EAAE;MACnC;MACAG,GAAG,CAACI,WAAW,GAAGR,IAAI,CAACQ,WAAW;MAClCJ,GAAG,CAACE,WAAW,GAAGN,IAAI,CAACM,WAAW;MAClCF,GAAG,CAACK,SAAS,GAAGT,IAAI,CAACS,SAAS;MAC9BL,GAAG,CAACM,SAAS,GAAGV,IAAI,CAACU,SAAS;IAChC,CAAC,MAAM;MACL;MACAN,GAAG,CAACI,WAAW,GAAGG,WAAW,CAACP,GAAG,EAAEJ,IAAI,CAACQ,WAAW,EAAEP,MAAM,CAACO,WAAW,CAAC;MACxEJ,GAAG,CAACE,WAAW,GAAGK,WAAW,CAACP,GAAG,EAAEJ,IAAI,CAACM,WAAW,EAAEL,MAAM,CAACK,WAAW,CAAC;MACxEF,GAAG,CAACK,SAAS,GAAGE,WAAW,CAACP,GAAG,EAAEJ,IAAI,CAACS,SAAS,EAAER,MAAM,CAACQ,SAAS,CAAC;MAClEL,GAAG,CAACM,SAAS,GAAGC,WAAW,CAACP,GAAG,EAAEJ,IAAI,CAACU,SAAS,EAAET,MAAM,CAACS,SAAS,CAAC;IACpE;EACF;EAEAN,GAAG,CAACQ,KAAK,GAAG,EAAE;EAEd,IAAIC,SAAS,GAAG,CAAC;IACbC,WAAW,GAAG,CAAC;IACfC,UAAU,GAAG,CAAC;IACdC,YAAY,GAAG,CAAC;EAEpB,OAAOH,SAAS,GAAGb,IAAI,CAACY,KAAK,CAACxB,MAAM,IAAI0B,WAAW,GAAGb,MAAM,CAACW,KAAK,CAACxB,MAAM,EAAE;IACzE,IAAI6B,WAAW,GAAGjB,IAAI,CAACY,KAAK,CAACC,SAAS,CAAC,IAAI;QAACK,QAAQ,EAAEC;MAAQ,CAAC;MAC3DC,aAAa,GAAGnB,MAAM,CAACW,KAAK,CAACE,WAAW,CAAC,IAAI;QAACI,QAAQ,EAAEC;MAAQ,CAAC;IAErE,IAAIE,UAAU,CAACJ,WAAW,EAAEG,aAAa,CAAC,EAAE;MAC1C;MACAhB,GAAG,CAACQ,KAAK,CAACU,IAAI,CAACC,SAAS,CAACN,WAAW,EAAEF,UAAU,CAAC,CAAC;MAClDF,SAAS,EAAE;MACXG,YAAY,IAAIC,WAAW,CAACpB,QAAQ,GAAGoB,WAAW,CAACrB,QAAQ;IAC7D,CAAC,MAAM,IAAIyB,UAAU,CAACD,aAAa,EAAEH,WAAW,CAAC,EAAE;MACjD;MACAb,GAAG,CAACQ,KAAK,CAACU,IAAI,CAACC,SAAS,CAACH,aAAa,EAAEJ,YAAY,CAAC,CAAC;MACtDF,WAAW,EAAE;MACbC,UAAU,IAAIK,aAAa,CAACvB,QAAQ,GAAGuB,aAAa,CAACxB,QAAQ;IAC/D,CAAC,MAAM;MACL;MACA,IAAI4B,UAAU,GAAG;QACfN,QAAQ,EAAEO,IAAI,CAACC,GAAG,CAACT,WAAW,CAACC,QAAQ,EAAEE,aAAa,CAACF,QAAQ,CAAC;QAChEtB,QAAQ,EAAE,CAAC;QACX+B,QAAQ,EAAEF,IAAI,CAACC,GAAG,CAACT,WAAW,CAACU,QAAQ,GAAGZ,UAAU,EAAEK,aAAa,CAACF,QAAQ,GAAGF,YAAY,CAAC;QAC5FnB,QAAQ,EAAE,CAAC;QACXF,KAAK,EAAE;MACT,CAAC;MACDiC,UAAU,CAACJ,UAAU,EAAEP,WAAW,CAACC,QAAQ,EAAED,WAAW,CAACtB,KAAK,EAAEyB,aAAa,CAACF,QAAQ,EAAEE,aAAa,CAACzB,KAAK,CAAC;MAC5GmB,WAAW,EAAE;MACbD,SAAS,EAAE;MAEXT,GAAG,CAACQ,KAAK,CAACU,IAAI,CAACE,UAAU,CAAC;IAC5B;EACF;EAEA,OAAOpB,GAAG;AACZ;AAEA,SAASD,SAASA,CAAC0B,KAAK,EAAE3B,IAAI,EAAE;EAC9B,IAAI,OAAO2B,KAAK,KAAK,QAAQ,EAAE;IAC7B,IAAK,MAAM,CAAE/C,IAAI,CAAC+C,KAAK,CAAC,IAAM,UAAU,CAAE/C,IAAI,CAAC+C,KAAK,CAAE,EAAE;MACtD,OAAO;QAAA;QAAA;QAAA;QAAAC;QAAAA;QAAAA;QAAAA;QAAAA;QAAAA,UAAU;QAAA;QAAA,CAACD,KAAK,CAAC,CAAC,CAAC;MAAC;IAC7B;IAEA,IAAI,CAAC3B,IAAI,EAAE;MACT,MAAM,IAAI6B,KAAK,CAAC,kDAAkD,CAAC;IACrE;IACA,OAAO;MAAA;MAAA;MAAA;MAAAC;MAAAA;MAAAA;MAAAA;MAAAA;MAAAA,eAAe;MAAA;MAAA,CAAClC,SAAS,EAAEA,SAAS,EAAEI,IAAI,EAAE2B,KAAK;IAAC;EAC3D;EAEA,OAAOA,KAAK;AACd;AAEA,SAAStB,eAAeA,CAAC0B,KAAK,EAAE;EAC9B,OAAOA,KAAK,CAAC3B,WAAW,IAAI2B,KAAK,CAAC3B,WAAW,KAAK2B,KAAK,CAACzB,WAAW;AACrE;AAEA,SAASG,WAAWA,CAACN,KAAK,EAAEL,IAAI,EAAEC,MAAM,EAAE;EACxC,IAAID,IAAI,KAAKC,MAAM,EAAE;IACnB,OAAOD,IAAI;EACb,CAAC,MAAM;IACLK,KAAK,CAAC6B,QAAQ,GAAG,IAAI;IACrB,OAAO;MAAClC,IAAI,EAAJA,IAAI;MAAEC,MAAM,EAANA;IAAM,CAAC;EACvB;AACF;AAEA,SAASoB,UAAUA,CAACvC,IAAI,EAAEqD,KAAK,EAAE;EAC/B,OAAOrD,IAAI,CAACoC,QAAQ,GAAGiB,KAAK,CAACjB,QAAQ,IAC/BpC,IAAI,CAACoC,QAAQ,GAAGpC,IAAI,CAACc,QAAQ,GAAIuC,KAAK,CAACjB,QAAQ;AACvD;AAEA,SAASK,SAASA,CAAC/B,IAAI,EAAE4C,MAAM,EAAE;EAC/B,OAAO;IACLlB,QAAQ,EAAE1B,IAAI,CAAC0B,QAAQ;IAAEtB,QAAQ,EAAEJ,IAAI,CAACI,QAAQ;IAChD+B,QAAQ,EAAEnC,IAAI,CAACmC,QAAQ,GAAGS,MAAM;IAAEvC,QAAQ,EAAEL,IAAI,CAACK,QAAQ;IACzDF,KAAK,EAAEH,IAAI,CAACG;EACd,CAAC;AACH;AAEA,SAASiC,UAAUA,CAACpC,IAAI,EAAEuB,UAAU,EAAEsB,SAAS,EAAEC,WAAW,EAAEC,UAAU,EAAE;EACxE;EACA;EACA,IAAIvC,IAAI,GAAG;MAACoC,MAAM,EAAErB,UAAU;MAAEpB,KAAK,EAAE0C,SAAS;MAAEhC,KAAK,EAAE;IAAC,CAAC;IACvDmC,KAAK,GAAG;MAACJ,MAAM,EAAEE,WAAW;MAAE3C,KAAK,EAAE4C,UAAU;MAAElC,KAAK,EAAE;IAAC,CAAC;;EAE9D;EACAoC,aAAa,CAACjD,IAAI,EAAEQ,IAAI,EAAEwC,KAAK,CAAC;EAChCC,aAAa,CAACjD,IAAI,EAAEgD,KAAK,EAAExC,IAAI,CAAC;;EAEhC;EACA,OAAOA,IAAI,CAACK,KAAK,GAAGL,IAAI,CAACL,KAAK,CAACP,MAAM,IAAIoD,KAAK,CAACnC,KAAK,GAAGmC,KAAK,CAAC7C,KAAK,CAACP,MAAM,EAAE;IACzE,IAAI6B,WAAW,GAAGjB,IAAI,CAACL,KAAK,CAACK,IAAI,CAACK,KAAK,CAAC;MACpCqC,YAAY,GAAGF,KAAK,CAAC7C,KAAK,CAAC6C,KAAK,CAACnC,KAAK,CAAC;IAE3C,IAAI,CAACY,WAAW,CAAC,CAAC,CAAC,KAAK,GAAG,IAAIA,WAAW,CAAC,CAAC,CAAC,KAAK,GAAG,MAC7CyB,YAAY,CAAC,CAAC,CAAC,KAAK,GAAG,IAAIA,YAAY,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,EAAE;MAC3D;MACAC,YAAY,CAACnD,IAAI,EAAEQ,IAAI,EAAEwC,KAAK,CAAC;IACjC,CAAC,MAAM,IAAIvB,WAAW,CAAC,CAAC,CAAC,KAAK,GAAG,IAAIyB,YAAY,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;MAAA;MAAA,IAAAE,WAAA;MAAA;MAC5D;MACA;MAAA;MAAA;MAAA,CAAAA,WAAA;MAAA;MAAApD,IAAI,CAACG,KAAK,EAAC2B,IAAI,CAAAuB,KAAA;MAAA;MAAAD;MAAA;MAAA;MAAA;MAAAlF,kBAAA;MAAA;MAAKoF,aAAa,CAAC9C,IAAI,CAAC,EAAC;IAC1C,CAAC,MAAM,IAAI0C,YAAY,CAAC,CAAC,CAAC,KAAK,GAAG,IAAIzB,WAAW,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;MAAA;MAAA,IAAA8B,YAAA;MAAA;MAC5D;MACA;MAAA;MAAA;MAAA,CAAAA,YAAA;MAAA;MAAAvD,IAAI,CAACG,KAAK,EAAC2B,IAAI,CAAAuB,KAAA;MAAA;MAAAE;MAAA;MAAA;MAAA;MAAArF,kBAAA;MAAA;MAAKoF,aAAa,CAACN,KAAK,CAAC,EAAC;IAC3C,CAAC,MAAM,IAAIvB,WAAW,CAAC,CAAC,CAAC,KAAK,GAAG,IAAIyB,YAAY,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;MAC5D;MACAM,OAAO,CAACxD,IAAI,EAAEQ,IAAI,EAAEwC,KAAK,CAAC;IAC5B,CAAC,MAAM,IAAIE,YAAY,CAAC,CAAC,CAAC,KAAK,GAAG,IAAIzB,WAAW,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;MAC5D;MACA+B,OAAO,CAACxD,IAAI,EAAEgD,KAAK,EAAExC,IAAI,EAAE,IAAI,CAAC;IAClC,CAAC,MAAM,IAAIiB,WAAW,KAAKyB,YAAY,EAAE;MACvC;MACAlD,IAAI,CAACG,KAAK,CAAC2B,IAAI,CAACL,WAAW,CAAC;MAC5BjB,IAAI,CAACK,KAAK,EAAE;MACZmC,KAAK,CAACnC,KAAK,EAAE;IACf,CAAC,MAAM;MACL;MACA6B,QAAQ,CAAC1C,IAAI,EAAEsD,aAAa,CAAC9C,IAAI,CAAC,EAAE8C,aAAa,CAACN,KAAK,CAAC,CAAC;IAC3D;EACF;;EAEA;EACAS,cAAc,CAACzD,IAAI,EAAEQ,IAAI,CAAC;EAC1BiD,cAAc,CAACzD,IAAI,EAAEgD,KAAK,CAAC;EAE3BjD,aAAa,CAACC,IAAI,CAAC;AACrB;AAEA,SAASmD,YAAYA,CAACnD,IAAI,EAAEQ,IAAI,EAAEwC,KAAK,EAAE;EACvC,IAAIU,SAAS,GAAGJ,aAAa,CAAC9C,IAAI,CAAC;IAC/BmD,YAAY,GAAGL,aAAa,CAACN,KAAK,CAAC;EAEvC,IAAIY,UAAU,CAACF,SAAS,CAAC,IAAIE,UAAU,CAACD,YAAY,CAAC,EAAE;IACrD;IACA;IAAI;IAAA;IAAA;IAAAE;IAAAA;IAAAA;IAAAA;IAAAA;IAAAA,eAAe;IAAA;IAAA,CAACH,SAAS,EAAEC,YAAY,CAAC,IACrCG,kBAAkB,CAACd,KAAK,EAAEU,SAAS,EAAEA,SAAS,CAAC9D,MAAM,GAAG+D,YAAY,CAAC/D,MAAM,CAAC,EAAE;MAAA;MAAA,IAAAmE,YAAA;MAAA;MACnF;MAAA;MAAA;MAAA,CAAAA,YAAA;MAAA;MAAA/D,IAAI,CAACG,KAAK,EAAC2B,IAAI,CAAAuB,KAAA;MAAA;MAAAU;MAAA;MAAA;MAAA;MAAA7F,kBAAA;MAAA;MAAKwF,SAAS,EAAC;MAC9B;IACF,CAAC,MAAM;IAAI;IAAA;IAAA;IAAAG;IAAAA;IAAAA;IAAAA;IAAAA;IAAAA,eAAe;IAAA;IAAA,CAACF,YAAY,EAAED,SAAS,CAAC,IAC5CI,kBAAkB,CAACtD,IAAI,EAAEmD,YAAY,EAAEA,YAAY,CAAC/D,MAAM,GAAG8D,SAAS,CAAC9D,MAAM,CAAC,EAAE;MAAA;MAAA,IAAAoE,YAAA;MAAA;MACrF;MAAA;MAAA;MAAA,CAAAA,YAAA;MAAA;MAAAhE,IAAI,CAACG,KAAK,EAAC2B,IAAI,CAAAuB,KAAA;MAAA;MAAAW;MAAA;MAAA;MAAA;MAAA9F,kBAAA;MAAA;MAAKyF,YAAY,EAAC;MACjC;IACF;EACF,CAAC,MAAM;EAAI;EAAA;EAAA;EAAAM;EAAAA;EAAAA;EAAAA;EAAAA;EAAAA,UAAU;EAAA;EAAA,CAACP,SAAS,EAAEC,YAAY,CAAC,EAAE;IAAA;IAAA,IAAAO,YAAA;IAAA;IAC9C;IAAA;IAAA;IAAA,CAAAA,YAAA;IAAA;IAAAlE,IAAI,CAACG,KAAK,EAAC2B,IAAI,CAAAuB,KAAA;IAAA;IAAAa;IAAA;IAAA;IAAA;IAAAhG,kBAAA;IAAA;IAAKwF,SAAS,EAAC;IAC9B;EACF;EAEAhB,QAAQ,CAAC1C,IAAI,EAAE0D,SAAS,EAAEC,YAAY,CAAC;AACzC;AAEA,SAASH,OAAOA,CAACxD,IAAI,EAAEQ,IAAI,EAAEwC,KAAK,EAAEmB,IAAI,EAAE;EACxC,IAAIT,SAAS,GAAGJ,aAAa,CAAC9C,IAAI,CAAC;IAC/BmD,YAAY,GAAGS,cAAc,CAACpB,KAAK,EAAEU,SAAS,CAAC;EACnD,IAAIC,YAAY,CAACU,MAAM,EAAE;IAAA;IAAA,IAAAC,YAAA;IAAA;IACvB;IAAA;IAAA;IAAA,CAAAA,YAAA;IAAA;IAAAtE,IAAI,CAACG,KAAK,EAAC2B,IAAI,CAAAuB,KAAA;IAAA;IAAAiB;IAAA;IAAA;IAAA;IAAApG,kBAAA;IAAA;IAAKyF,YAAY,CAACU,MAAM,EAAC;EAC1C,CAAC,MAAM;IACL3B,QAAQ,CAAC1C,IAAI,EAAEmE,IAAI,GAAGR,YAAY,GAAGD,SAAS,EAAES,IAAI,GAAGT,SAAS,GAAGC,YAAY,CAAC;EAClF;AACF;AAEA,SAASjB,QAAQA,CAAC1C,IAAI,EAAEQ,IAAI,EAAEwC,KAAK,EAAE;EACnChD,IAAI,CAAC0C,QAAQ,GAAG,IAAI;EACpB1C,IAAI,CAACG,KAAK,CAAC2B,IAAI,CAAC;IACdY,QAAQ,EAAE,IAAI;IACdlC,IAAI,EAAEA,IAAI;IACVC,MAAM,EAAEuC;EACV,CAAC,CAAC;AACJ;AAEA,SAASC,aAAaA,CAACjD,IAAI,EAAEuE,MAAM,EAAEvB,KAAK,EAAE;EAC1C,OAAOuB,MAAM,CAAC3B,MAAM,GAAGI,KAAK,CAACJ,MAAM,IAAI2B,MAAM,CAAC1D,KAAK,GAAG0D,MAAM,CAACpE,KAAK,CAACP,MAAM,EAAE;IACzE,IAAI4E,IAAI,GAAGD,MAAM,CAACpE,KAAK,CAACoE,MAAM,CAAC1D,KAAK,EAAE,CAAC;IACvCb,IAAI,CAACG,KAAK,CAAC2B,IAAI,CAAC0C,IAAI,CAAC;IACrBD,MAAM,CAAC3B,MAAM,EAAE;EACjB;AACF;AACA,SAASa,cAAcA,CAACzD,IAAI,EAAEuE,MAAM,EAAE;EACpC,OAAOA,MAAM,CAAC1D,KAAK,GAAG0D,MAAM,CAACpE,KAAK,CAACP,MAAM,EAAE;IACzC,IAAI4E,IAAI,GAAGD,MAAM,CAACpE,KAAK,CAACoE,MAAM,CAAC1D,KAAK,EAAE,CAAC;IACvCb,IAAI,CAACG,KAAK,CAAC2B,IAAI,CAAC0C,IAAI,CAAC;EACvB;AACF;AAEA,SAASlB,aAAaA,CAACmB,KAAK,EAAE;EAC5B,IAAI7D,GAAG,GAAG,EAAE;IACR8D,SAAS,GAAGD,KAAK,CAACtE,KAAK,CAACsE,KAAK,CAAC5D,KAAK,CAAC,CAAC,CAAC,CAAC;EAC3C,OAAO4D,KAAK,CAAC5D,KAAK,GAAG4D,KAAK,CAACtE,KAAK,CAACP,MAAM,EAAE;IACvC,IAAI4E,IAAI,GAAGC,KAAK,CAACtE,KAAK,CAACsE,KAAK,CAAC5D,KAAK,CAAC;;IAEnC;IACA,IAAI6D,SAAS,KAAK,GAAG,IAAIF,IAAI,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;MACxCE,SAAS,GAAG,GAAG;IACjB;IAEA,IAAIA,SAAS,KAAKF,IAAI,CAAC,CAAC,CAAC,EAAE;MACzB5D,GAAG,CAACkB,IAAI,CAAC0C,IAAI,CAAC;MACdC,KAAK,CAAC5D,KAAK,EAAE;IACf,CAAC,MAAM;MACL;IACF;EACF;EAEA,OAAOD,GAAG;AACZ;AACA,SAASwD,cAAcA,CAACK,KAAK,EAAEE,YAAY,EAAE;EAC3C,IAAIC,OAAO,GAAG,EAAE;IACZP,MAAM,GAAG,EAAE;IACXQ,UAAU,GAAG,CAAC;IACdC,cAAc,GAAG,KAAK;IACtBC,UAAU,GAAG,KAAK;EACtB,OAAOF,UAAU,GAAGF,YAAY,CAAC/E,MAAM,IAC9B6E,KAAK,CAAC5D,KAAK,GAAG4D,KAAK,CAACtE,KAAK,CAACP,MAAM,EAAE;IACzC,IAAIoF,MAAM,GAAGP,KAAK,CAACtE,KAAK,CAACsE,KAAK,CAAC5D,KAAK,CAAC;MACjCoE,KAAK,GAAGN,YAAY,CAACE,UAAU,CAAC;;IAEpC;IACA,IAAII,KAAK,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;MACpB;IACF;IAEAH,cAAc,GAAGA,cAAc,IAAIE,MAAM,CAAC,CAAC,CAAC,KAAK,GAAG;IAEpDX,MAAM,CAACvC,IAAI,CAACmD,KAAK,CAAC;IAClBJ,UAAU,EAAE;;IAEZ;IACA;IACA,IAAIG,MAAM,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;MACrBD,UAAU,GAAG,IAAI;MAEjB,OAAOC,MAAM,CAAC,CAAC,CAAC,KAAK,GAAG,EAAE;QACxBJ,OAAO,CAAC9C,IAAI,CAACkD,MAAM,CAAC;QACpBA,MAAM,GAAGP,KAAK,CAACtE,KAAK,CAAC,EAAEsE,KAAK,CAAC5D,KAAK,CAAC;MACrC;IACF;IAEA,IAAIoE,KAAK,CAACC,MAAM,CAAC,CAAC,CAAC,KAAKF,MAAM,CAACE,MAAM,CAAC,CAAC,CAAC,EAAE;MACxCN,OAAO,CAAC9C,IAAI,CAACkD,MAAM,CAAC;MACpBP,KAAK,CAAC5D,KAAK,EAAE;IACf,CAAC,MAAM;MACLkE,UAAU,GAAG,IAAI;IACnB;EACF;EAEA,IAAI,CAACJ,YAAY,CAACE,UAAU,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,KAAK,GAAG,IACxCC,cAAc,EAAE;IACrBC,UAAU,GAAG,IAAI;EACnB;EAEA,IAAIA,UAAU,EAAE;IACd,OAAOH,OAAO;EAChB;EAEA,OAAOC,UAAU,GAAGF,YAAY,CAAC/E,MAAM,EAAE;IACvCyE,MAAM,CAACvC,IAAI,CAAC6C,YAAY,CAACE,UAAU,EAAE,CAAC,CAAC;EACzC;EAEA,OAAO;IACLR,MAAM,EAANA,MAAM;IACNO,OAAO,EAAPA;EACF,CAAC;AACH;AAEA,SAAShB,UAAUA,CAACgB,OAAO,EAAE;EAC3B,OAAOA,OAAO,CAACO,MAAM,CAAC,UAASC,IAAI,EAAEJ,MAAM,EAAE;IAC3C,OAAOI,IAAI,IAAIJ,MAAM,CAAC,CAAC,CAAC,KAAK,GAAG;EAClC,CAAC,EAAE,IAAI,CAAC;AACV;AACA,SAASlB,kBAAkBA,CAACW,KAAK,EAAEY,aAAa,EAAEC,KAAK,EAAE;EACvD,KAAK,IAAIzF,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGyF,KAAK,EAAEzF,CAAC,EAAE,EAAE;IAC9B,IAAI0F,aAAa,GAAGF,aAAa,CAACA,aAAa,CAACzF,MAAM,GAAG0F,KAAK,GAAGzF,CAAC,CAAC,CAACqF,MAAM,CAAC,CAAC,CAAC;IAC7E,IAAIT,KAAK,CAACtE,KAAK,CAACsE,KAAK,CAAC5D,KAAK,GAAGhB,CAAC,CAAC,KAAK,GAAG,GAAG0F,aAAa,EAAE;MACxD,OAAO,KAAK;IACd;EACF;EAEAd,KAAK,CAAC5D,KAAK,IAAIyE,KAAK;EACpB,OAAO,IAAI;AACb;AAEA,SAASpF,mBAAmBA,CAACC,KAAK,EAAE;EAClC,IAAIC,QAAQ,GAAG,CAAC;EAChB,IAAIC,QAAQ,GAAG,CAAC;EAEhBF,KAAK,CAACqF,OAAO,CAAC,UAAShB,IAAI,EAAE;IAC3B,IAAI,OAAOA,IAAI,KAAK,QAAQ,EAAE;MAC5B,IAAIiB,OAAO,GAAGvF,mBAAmB,CAACsE,IAAI,CAAChE,IAAI,CAAC;MAC5C,IAAIkF,UAAU,GAAGxF,mBAAmB,CAACsE,IAAI,CAAC/D,MAAM,CAAC;MAEjD,IAAIL,QAAQ,KAAKE,SAAS,EAAE;QAC1B,IAAImF,OAAO,CAACrF,QAAQ,KAAKsF,UAAU,CAACtF,QAAQ,EAAE;UAC5CA,QAAQ,IAAIqF,OAAO,CAACrF,QAAQ;QAC9B,CAAC,MAAM;UACLA,QAAQ,GAAGE,SAAS;QACtB;MACF;MAEA,IAAID,QAAQ,KAAKC,SAAS,EAAE;QAC1B,IAAImF,OAAO,CAACpF,QAAQ,KAAKqF,UAAU,CAACrF,QAAQ,EAAE;UAC5CA,QAAQ,IAAIoF,OAAO,CAACpF,QAAQ;QAC9B,CAAC,MAAM;UACLA,QAAQ,GAAGC,SAAS;QACtB;MACF;IACF,CAAC,MAAM;MACL,IAAID,QAAQ,KAAKC,SAAS,KAAKkE,IAAI,CAAC,CAAC,CAAC,KAAK,GAAG,IAAIA,IAAI,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,EAAE;QAClEnE,QAAQ,EAAE;MACZ;MACA,IAAID,QAAQ,KAAKE,SAAS,KAAKkE,IAAI,CAAC,CAAC,CAAC,KAAK,GAAG,IAAIA,IAAI,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,EAAE;QAClEpE,QAAQ,EAAE;MACZ;IACF;EACF,CAAC,CAAC;EAEF,OAAO;IAACA,QAAQ,EAARA,QAAQ;IAAEC,QAAQ,EAARA;EAAQ,CAAC;AAC7B","ignoreList":[]}
diff --git a/node_modules/diff/lib/patch/parse.js b/node_modules/diff/lib/patch/parse.js
deleted file mode 100644
index 15acdd9a0e1c2..0000000000000
--- a/node_modules/diff/lib/patch/parse.js
+++ /dev/null
@@ -1,151 +0,0 @@
-/*istanbul ignore start*/
-"use strict";
-
-Object.defineProperty(exports, "__esModule", {
-  value: true
-});
-exports.parsePatch = parsePatch;
-/*istanbul ignore end*/
-function parsePatch(uniDiff) {
-  var diffstr = uniDiff.split(/\n/),
-    list = [],
-    i = 0;
-  function parseIndex() {
-    var index = {};
-    list.push(index);
-
-    // Parse diff metadata
-    while (i < diffstr.length) {
-      var line = diffstr[i];
-
-      // File header found, end parsing diff metadata
-      if (/^(\-\-\-|\+\+\+|@@)\s/.test(line)) {
-        break;
-      }
-
-      // Diff index
-      var header = /^(?:Index:|diff(?: -r \w+)+)\s+(.+?)\s*$/.exec(line);
-      if (header) {
-        index.index = header[1];
-      }
-      i++;
-    }
-
-    // Parse file headers if they are defined. Unified diff requires them, but
-    // there's no technical issues to have an isolated hunk without file header
-    parseFileHeader(index);
-    parseFileHeader(index);
-
-    // Parse hunks
-    index.hunks = [];
-    while (i < diffstr.length) {
-      var _line = diffstr[i];
-      if (/^(Index:\s|diff\s|\-\-\-\s|\+\+\+\s|===================================================================)/.test(_line)) {
-        break;
-      } else if (/^@@/.test(_line)) {
-        index.hunks.push(parseHunk());
-      } else if (_line) {
-        throw new Error('Unknown line ' + (i + 1) + ' ' + JSON.stringify(_line));
-      } else {
-        i++;
-      }
-    }
-  }
-
-  // Parses the --- and +++ headers, if none are found, no lines
-  // are consumed.
-  function parseFileHeader(index) {
-    var fileHeader = /^(---|\+\+\+)\s+(.*)\r?$/.exec(diffstr[i]);
-    if (fileHeader) {
-      var keyPrefix = fileHeader[1] === '---' ? 'old' : 'new';
-      var data = fileHeader[2].split('\t', 2);
-      var fileName = data[0].replace(/\\\\/g, '\\');
-      if (/^".*"$/.test(fileName)) {
-        fileName = fileName.substr(1, fileName.length - 2);
-      }
-      index[keyPrefix + 'FileName'] = fileName;
-      index[keyPrefix + 'Header'] = (data[1] || '').trim();
-      i++;
-    }
-  }
-
-  // Parses a hunk
-  // This assumes that we are at the start of a hunk.
-  function parseHunk() {
-    var chunkHeaderIndex = i,
-      chunkHeaderLine = diffstr[i++],
-      chunkHeader = chunkHeaderLine.split(/@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@/);
-    var hunk = {
-      oldStart: +chunkHeader[1],
-      oldLines: typeof chunkHeader[2] === 'undefined' ? 1 : +chunkHeader[2],
-      newStart: +chunkHeader[3],
-      newLines: typeof chunkHeader[4] === 'undefined' ? 1 : +chunkHeader[4],
-      lines: []
-    };
-
-    // Unified Diff Format quirk: If the chunk size is 0,
-    // the first number is one lower than one would expect.
-    // https://www.artima.com/weblogs/viewpost.jsp?thread=164293
-    if (hunk.oldLines === 0) {
-      hunk.oldStart += 1;
-    }
-    if (hunk.newLines === 0) {
-      hunk.newStart += 1;
-    }
-    var addCount = 0,
-      removeCount = 0;
-    for (; i < diffstr.length && (removeCount < hunk.oldLines || addCount < hunk.newLines ||
-    /*istanbul ignore start*/
-    (_diffstr$i =
-    /*istanbul ignore end*/
-    diffstr[i]) !== null && _diffstr$i !== void 0 &&
-    /*istanbul ignore start*/
-    _diffstr$i
-    /*istanbul ignore end*/
-    .startsWith('\\')); i++) {
-      /*istanbul ignore start*/
-      var _diffstr$i;
-      /*istanbul ignore end*/
-      var operation = diffstr[i].length == 0 && i != diffstr.length - 1 ? ' ' : diffstr[i][0];
-      if (operation === '+' || operation === '-' || operation === ' ' || operation === '\\') {
-        hunk.lines.push(diffstr[i]);
-        if (operation === '+') {
-          addCount++;
-        } else if (operation === '-') {
-          removeCount++;
-        } else if (operation === ' ') {
-          addCount++;
-          removeCount++;
-        }
-      } else {
-        throw new Error(
-        /*istanbul ignore start*/
-        "Hunk at line ".concat(
-        /*istanbul ignore end*/
-        chunkHeaderIndex + 1, " contained invalid line ").concat(diffstr[i]));
-      }
-    }
-
-    // Handle the empty block count case
-    if (!addCount && hunk.newLines === 1) {
-      hunk.newLines = 0;
-    }
-    if (!removeCount && hunk.oldLines === 1) {
-      hunk.oldLines = 0;
-    }
-
-    // Perform sanity checking
-    if (addCount !== hunk.newLines) {
-      throw new Error('Added line count did not match for hunk at line ' + (chunkHeaderIndex + 1));
-    }
-    if (removeCount !== hunk.oldLines) {
-      throw new Error('Removed line count did not match for hunk at line ' + (chunkHeaderIndex + 1));
-    }
-    return hunk;
-  }
-  while (i < diffstr.length) {
-    parseIndex();
-  }
-  return list;
-}
-//# sourceMappingURL=data:application/json;charset=utf-8;base64,{"version":3,"names":["parsePatch","uniDiff","diffstr","split","list","i","parseIndex","index","push","length","line","test","header","exec","parseFileHeader","hunks","parseHunk","Error","JSON","stringify","fileHeader","keyPrefix","data","fileName","replace","substr","trim","chunkHeaderIndex","chunkHeaderLine","chunkHeader","hunk","oldStart","oldLines","newStart","newLines","lines","addCount","removeCount","_diffstr$i","startsWith","operation","concat"],"sources":["../../src/patch/parse.js"],"sourcesContent":["export function parsePatch(uniDiff) {\n  let diffstr = uniDiff.split(/\\n/),\n      list = [],\n      i = 0;\n\n  function parseIndex() {\n    let index = {};\n    list.push(index);\n\n    // Parse diff metadata\n    while (i < diffstr.length) {\n      let line = diffstr[i];\n\n      // File header found, end parsing diff metadata\n      if ((/^(\\-\\-\\-|\\+\\+\\+|@@)\\s/).test(line)) {\n        break;\n      }\n\n      // Diff index\n      let header = (/^(?:Index:|diff(?: -r \\w+)+)\\s+(.+?)\\s*$/).exec(line);\n      if (header) {\n        index.index = header[1];\n      }\n\n      i++;\n    }\n\n    // Parse file headers if they are defined. Unified diff requires them, but\n    // there's no technical issues to have an isolated hunk without file header\n    parseFileHeader(index);\n    parseFileHeader(index);\n\n    // Parse hunks\n    index.hunks = [];\n\n    while (i < diffstr.length) {\n      let line = diffstr[i];\n      if ((/^(Index:\\s|diff\\s|\\-\\-\\-\\s|\\+\\+\\+\\s|===================================================================)/).test(line)) {\n        break;\n      } else if ((/^@@/).test(line)) {\n        index.hunks.push(parseHunk());\n      } else if (line) {\n        throw new Error('Unknown line ' + (i + 1) + ' ' + JSON.stringify(line));\n      } else {\n        i++;\n      }\n    }\n  }\n\n  // Parses the --- and +++ headers, if none are found, no lines\n  // are consumed.\n  function parseFileHeader(index) {\n    const fileHeader = (/^(---|\\+\\+\\+)\\s+(.*)\\r?$/).exec(diffstr[i]);\n    if (fileHeader) {\n      let keyPrefix = fileHeader[1] === '---' ? 'old' : 'new';\n      const data = fileHeader[2].split('\\t', 2);\n      let fileName = data[0].replace(/\\\\\\\\/g, '\\\\');\n      if ((/^\".*\"$/).test(fileName)) {\n        fileName = fileName.substr(1, fileName.length - 2);\n      }\n      index[keyPrefix + 'FileName'] = fileName;\n      index[keyPrefix + 'Header'] = (data[1] || '').trim();\n\n      i++;\n    }\n  }\n\n  // Parses a hunk\n  // This assumes that we are at the start of a hunk.\n  function parseHunk() {\n    let chunkHeaderIndex = i,\n        chunkHeaderLine = diffstr[i++],\n        chunkHeader = chunkHeaderLine.split(/@@ -(\\d+)(?:,(\\d+))? \\+(\\d+)(?:,(\\d+))? @@/);\n\n    let hunk = {\n      oldStart: +chunkHeader[1],\n      oldLines: typeof chunkHeader[2] === 'undefined' ? 1 : +chunkHeader[2],\n      newStart: +chunkHeader[3],\n      newLines: typeof chunkHeader[4] === 'undefined' ? 1 : +chunkHeader[4],\n      lines: []\n    };\n\n    // Unified Diff Format quirk: If the chunk size is 0,\n    // the first number is one lower than one would expect.\n    // https://www.artima.com/weblogs/viewpost.jsp?thread=164293\n    if (hunk.oldLines === 0) {\n      hunk.oldStart += 1;\n    }\n    if (hunk.newLines === 0) {\n      hunk.newStart += 1;\n    }\n\n    let addCount = 0,\n        removeCount = 0;\n    for (\n      ;\n      i < diffstr.length && (removeCount < hunk.oldLines || addCount < hunk.newLines || diffstr[i]?.startsWith('\\\\'));\n      i++\n    ) {\n      let operation = (diffstr[i].length == 0 && i != (diffstr.length - 1)) ? ' ' : diffstr[i][0];\n      if (operation === '+' || operation === '-' || operation === ' ' || operation === '\\\\') {\n        hunk.lines.push(diffstr[i]);\n\n        if (operation === '+') {\n          addCount++;\n        } else if (operation === '-') {\n          removeCount++;\n        } else if (operation === ' ') {\n          addCount++;\n          removeCount++;\n        }\n      } else {\n        throw new Error(`Hunk at line ${chunkHeaderIndex + 1} contained invalid line ${diffstr[i]}`);\n      }\n    }\n\n    // Handle the empty block count case\n    if (!addCount && hunk.newLines === 1) {\n      hunk.newLines = 0;\n    }\n    if (!removeCount && hunk.oldLines === 1) {\n      hunk.oldLines = 0;\n    }\n\n    // Perform sanity checking\n    if (addCount !== hunk.newLines) {\n      throw new Error('Added line count did not match for hunk at line ' + (chunkHeaderIndex + 1));\n    }\n    if (removeCount !== hunk.oldLines) {\n      throw new Error('Removed line count did not match for hunk at line ' + (chunkHeaderIndex + 1));\n    }\n\n    return hunk;\n  }\n\n  while (i < diffstr.length) {\n    parseIndex();\n  }\n\n  return list;\n}\n"],"mappings":";;;;;;;;AAAO,SAASA,UAAUA,CAACC,OAAO,EAAE;EAClC,IAAIC,OAAO,GAAGD,OAAO,CAACE,KAAK,CAAC,IAAI,CAAC;IAC7BC,IAAI,GAAG,EAAE;IACTC,CAAC,GAAG,CAAC;EAET,SAASC,UAAUA,CAAA,EAAG;IACpB,IAAIC,KAAK,GAAG,CAAC,CAAC;IACdH,IAAI,CAACI,IAAI,CAACD,KAAK,CAAC;;IAEhB;IACA,OAAOF,CAAC,GAAGH,OAAO,CAACO,MAAM,EAAE;MACzB,IAAIC,IAAI,GAAGR,OAAO,CAACG,CAAC,CAAC;;MAErB;MACA,IAAK,uBAAuB,CAAEM,IAAI,CAACD,IAAI,CAAC,EAAE;QACxC;MACF;;MAEA;MACA,IAAIE,MAAM,GAAI,0CAA0C,CAAEC,IAAI,CAACH,IAAI,CAAC;MACpE,IAAIE,MAAM,EAAE;QACVL,KAAK,CAACA,KAAK,GAAGK,MAAM,CAAC,CAAC,CAAC;MACzB;MAEAP,CAAC,EAAE;IACL;;IAEA;IACA;IACAS,eAAe,CAACP,KAAK,CAAC;IACtBO,eAAe,CAACP,KAAK,CAAC;;IAEtB;IACAA,KAAK,CAACQ,KAAK,GAAG,EAAE;IAEhB,OAAOV,CAAC,GAAGH,OAAO,CAACO,MAAM,EAAE;MACzB,IAAIC,KAAI,GAAGR,OAAO,CAACG,CAAC,CAAC;MACrB,IAAK,0GAA0G,CAAEM,IAAI,CAACD,KAAI,CAAC,EAAE;QAC3H;MACF,CAAC,MAAM,IAAK,KAAK,CAAEC,IAAI,CAACD,KAAI,CAAC,EAAE;QAC7BH,KAAK,CAACQ,KAAK,CAACP,IAAI,CAACQ,SAAS,CAAC,CAAC,CAAC;MAC/B,CAAC,MAAM,IAAIN,KAAI,EAAE;QACf,MAAM,IAAIO,KAAK,CAAC,eAAe,IAAIZ,CAAC,GAAG,CAAC,CAAC,GAAG,GAAG,GAAGa,IAAI,CAACC,SAAS,CAACT,KAAI,CAAC,CAAC;MACzE,CAAC,MAAM;QACLL,CAAC,EAAE;MACL;IACF;EACF;;EAEA;EACA;EACA,SAASS,eAAeA,CAACP,KAAK,EAAE;IAC9B,IAAMa,UAAU,GAAI,0BAA0B,CAAEP,IAAI,CAACX,OAAO,CAACG,CAAC,CAAC,CAAC;IAChE,IAAIe,UAAU,EAAE;MACd,IAAIC,SAAS,GAAGD,UAAU,CAAC,CAAC,CAAC,KAAK,KAAK,GAAG,KAAK,GAAG,KAAK;MACvD,IAAME,IAAI,GAAGF,UAAU,CAAC,CAAC,CAAC,CAACjB,KAAK,CAAC,IAAI,EAAE,CAAC,CAAC;MACzC,IAAIoB,QAAQ,GAAGD,IAAI,CAAC,CAAC,CAAC,CAACE,OAAO,CAAC,OAAO,EAAE,IAAI,CAAC;MAC7C,IAAK,QAAQ,CAAEb,IAAI,CAACY,QAAQ,CAAC,EAAE;QAC7BA,QAAQ,GAAGA,QAAQ,CAACE,MAAM,CAAC,CAAC,EAAEF,QAAQ,CAACd,MAAM,GAAG,CAAC,CAAC;MACpD;MACAF,KAAK,CAACc,SAAS,GAAG,UAAU,CAAC,GAAGE,QAAQ;MACxChB,KAAK,CAACc,SAAS,GAAG,QAAQ,CAAC,GAAG,CAACC,IAAI,CAAC,CAAC,CAAC,IAAI,EAAE,EAAEI,IAAI,CAAC,CAAC;MAEpDrB,CAAC,EAAE;IACL;EACF;;EAEA;EACA;EACA,SAASW,SAASA,CAAA,EAAG;IACnB,IAAIW,gBAAgB,GAAGtB,CAAC;MACpBuB,eAAe,GAAG1B,OAAO,CAACG,CAAC,EAAE,CAAC;MAC9BwB,WAAW,GAAGD,eAAe,CAACzB,KAAK,CAAC,4CAA4C,CAAC;IAErF,IAAI2B,IAAI,GAAG;MACTC,QAAQ,EAAE,CAACF,WAAW,CAAC,CAAC,CAAC;MACzBG,QAAQ,EAAE,OAAOH,WAAW,CAAC,CAAC,CAAC,KAAK,WAAW,GAAG,CAAC,GAAG,CAACA,WAAW,CAAC,CAAC,CAAC;MACrEI,QAAQ,EAAE,CAACJ,WAAW,CAAC,CAAC,CAAC;MACzBK,QAAQ,EAAE,OAAOL,WAAW,CAAC,CAAC,CAAC,KAAK,WAAW,GAAG,CAAC,GAAG,CAACA,WAAW,CAAC,CAAC,CAAC;MACrEM,KAAK,EAAE;IACT,CAAC;;IAED;IACA;IACA;IACA,IAAIL,IAAI,CAACE,QAAQ,KAAK,CAAC,EAAE;MACvBF,IAAI,CAACC,QAAQ,IAAI,CAAC;IACpB;IACA,IAAID,IAAI,CAACI,QAAQ,KAAK,CAAC,EAAE;MACvBJ,IAAI,CAACG,QAAQ,IAAI,CAAC;IACpB;IAEA,IAAIG,QAAQ,GAAG,CAAC;MACZC,WAAW,GAAG,CAAC;IACnB,OAEEhC,CAAC,GAAGH,OAAO,CAACO,MAAM,KAAK4B,WAAW,GAAGP,IAAI,CAACE,QAAQ,IAAII,QAAQ,GAAGN,IAAI,CAACI,QAAQ;IAAA;IAAA,CAAAI,UAAA;IAAA;IAAIpC,OAAO,CAACG,CAAC,CAAC,cAAAiC,UAAA;IAAV;IAAAA;IAAA;IAAA,CAAYC,UAAU,CAAC,IAAI,CAAC,CAAC,EAC/GlC,CAAC,EAAE,EACH;MAAA;MAAA,IAAAiC,UAAA;MAAA;MACA,IAAIE,SAAS,GAAItC,OAAO,CAACG,CAAC,CAAC,CAACI,MAAM,IAAI,CAAC,IAAIJ,CAAC,IAAKH,OAAO,CAACO,MAAM,GAAG,CAAE,GAAI,GAAG,GAAGP,OAAO,CAACG,CAAC,CAAC,CAAC,CAAC,CAAC;MAC3F,IAAImC,SAAS,KAAK,GAAG,IAAIA,SAAS,KAAK,GAAG,IAAIA,SAAS,KAAK,GAAG,IAAIA,SAAS,KAAK,IAAI,EAAE;QACrFV,IAAI,CAACK,KAAK,CAAC3B,IAAI,CAACN,OAAO,CAACG,CAAC,CAAC,CAAC;QAE3B,IAAImC,SAAS,KAAK,GAAG,EAAE;UACrBJ,QAAQ,EAAE;QACZ,CAAC,MAAM,IAAII,SAAS,KAAK,GAAG,EAAE;UAC5BH,WAAW,EAAE;QACf,CAAC,MAAM,IAAIG,SAAS,KAAK,GAAG,EAAE;UAC5BJ,QAAQ,EAAE;UACVC,WAAW,EAAE;QACf;MACF,CAAC,MAAM;QACL,MAAM,IAAIpB,KAAK;QAAA;QAAA,gBAAAwB,MAAA;QAAA;QAAiBd,gBAAgB,GAAG,CAAC,8BAAAc,MAAA,CAA2BvC,OAAO,CAACG,CAAC,CAAC,CAAE,CAAC;MAC9F;IACF;;IAEA;IACA,IAAI,CAAC+B,QAAQ,IAAIN,IAAI,CAACI,QAAQ,KAAK,CAAC,EAAE;MACpCJ,IAAI,CAACI,QAAQ,GAAG,CAAC;IACnB;IACA,IAAI,CAACG,WAAW,IAAIP,IAAI,CAACE,QAAQ,KAAK,CAAC,EAAE;MACvCF,IAAI,CAACE,QAAQ,GAAG,CAAC;IACnB;;IAEA;IACA,IAAII,QAAQ,KAAKN,IAAI,CAACI,QAAQ,EAAE;MAC9B,MAAM,IAAIjB,KAAK,CAAC,kDAAkD,IAAIU,gBAAgB,GAAG,CAAC,CAAC,CAAC;IAC9F;IACA,IAAIU,WAAW,KAAKP,IAAI,CAACE,QAAQ,EAAE;MACjC,MAAM,IAAIf,KAAK,CAAC,oDAAoD,IAAIU,gBAAgB,GAAG,CAAC,CAAC,CAAC;IAChG;IAEA,OAAOG,IAAI;EACb;EAEA,OAAOzB,CAAC,GAAGH,OAAO,CAACO,MAAM,EAAE;IACzBH,UAAU,CAAC,CAAC;EACd;EAEA,OAAOF,IAAI;AACb","ignoreList":[]}
diff --git a/node_modules/diff/lib/patch/reverse.js b/node_modules/diff/lib/patch/reverse.js
deleted file mode 100644
index 3c8723e4d5fe6..0000000000000
--- a/node_modules/diff/lib/patch/reverse.js
+++ /dev/null
@@ -1,58 +0,0 @@
-/*istanbul ignore start*/
-"use strict";
-
-Object.defineProperty(exports, "__esModule", {
-  value: true
-});
-exports.reversePatch = reversePatch;
-function _typeof(o) { "@babel/helpers - typeof"; return _typeof = "function" == typeof Symbol && "symbol" == typeof Symbol.iterator ? function (o) { return typeof o; } : function (o) { return o && "function" == typeof Symbol && o.constructor === Symbol && o !== Symbol.prototype ? "symbol" : typeof o; }, _typeof(o); }
-function ownKeys(e, r) { var t = Object.keys(e); if (Object.getOwnPropertySymbols) { var o = Object.getOwnPropertySymbols(e); r && (o = o.filter(function (r) { return Object.getOwnPropertyDescriptor(e, r).enumerable; })), t.push.apply(t, o); } return t; }
-function _objectSpread(e) { for (var r = 1; r < arguments.length; r++) { var t = null != arguments[r] ? arguments[r] : {}; r % 2 ? ownKeys(Object(t), !0).forEach(function (r) { _defineProperty(e, r, t[r]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(e, Object.getOwnPropertyDescriptors(t)) : ownKeys(Object(t)).forEach(function (r) { Object.defineProperty(e, r, Object.getOwnPropertyDescriptor(t, r)); }); } return e; }
-function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; }
-function _toPropertyKey(t) { var i = _toPrimitive(t, "string"); return "symbol" == _typeof(i) ? i : i + ""; }
-function _toPrimitive(t, r) { if ("object" != _typeof(t) || !t) return t; var e = t[Symbol.toPrimitive]; if (void 0 !== e) { var i = e.call(t, r || "default"); if ("object" != _typeof(i)) return i; throw new TypeError("@@toPrimitive must return a primitive value."); } return ("string" === r ? String : Number)(t); }
-/*istanbul ignore end*/
-function reversePatch(structuredPatch) {
-  if (Array.isArray(structuredPatch)) {
-    return structuredPatch.map(reversePatch).reverse();
-  }
-  return (
-    /*istanbul ignore start*/
-    _objectSpread(_objectSpread({},
-    /*istanbul ignore end*/
-    structuredPatch), {}, {
-      oldFileName: structuredPatch.newFileName,
-      oldHeader: structuredPatch.newHeader,
-      newFileName: structuredPatch.oldFileName,
-      newHeader: structuredPatch.oldHeader,
-      hunks: structuredPatch.hunks.map(function (hunk) {
-        return {
-          oldLines: hunk.newLines,
-          oldStart: hunk.newStart,
-          newLines: hunk.oldLines,
-          newStart: hunk.oldStart,
-          lines: hunk.lines.map(function (l) {
-            if (l.startsWith('-')) {
-              return (
-                /*istanbul ignore start*/
-                "+".concat(
-                /*istanbul ignore end*/
-                l.slice(1))
-              );
-            }
-            if (l.startsWith('+')) {
-              return (
-                /*istanbul ignore start*/
-                "-".concat(
-                /*istanbul ignore end*/
-                l.slice(1))
-              );
-            }
-            return l;
-          })
-        };
-      })
-    })
-  );
-}
-//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJuYW1lcyI6WyJyZXZlcnNlUGF0Y2giLCJzdHJ1Y3R1cmVkUGF0Y2giLCJBcnJheSIsImlzQXJyYXkiLCJtYXAiLCJyZXZlcnNlIiwiX29iamVjdFNwcmVhZCIsIm9sZEZpbGVOYW1lIiwibmV3RmlsZU5hbWUiLCJvbGRIZWFkZXIiLCJuZXdIZWFkZXIiLCJodW5rcyIsImh1bmsiLCJvbGRMaW5lcyIsIm5ld0xpbmVzIiwib2xkU3RhcnQiLCJuZXdTdGFydCIsImxpbmVzIiwibCIsInN0YXJ0c1dpdGgiLCJjb25jYXQiLCJzbGljZSJdLCJzb3VyY2VzIjpbIi4uLy4uL3NyYy9wYXRjaC9yZXZlcnNlLmpzIl0sInNvdXJjZXNDb250ZW50IjpbImV4cG9ydCBmdW5jdGlvbiByZXZlcnNlUGF0Y2goc3RydWN0dXJlZFBhdGNoKSB7XG4gIGlmIChBcnJheS5pc0FycmF5KHN0cnVjdHVyZWRQYXRjaCkpIHtcbiAgICByZXR1cm4gc3RydWN0dXJlZFBhdGNoLm1hcChyZXZlcnNlUGF0Y2gpLnJldmVyc2UoKTtcbiAgfVxuXG4gIHJldHVybiB7XG4gICAgLi4uc3RydWN0dXJlZFBhdGNoLFxuICAgIG9sZEZpbGVOYW1lOiBzdHJ1Y3R1cmVkUGF0Y2gubmV3RmlsZU5hbWUsXG4gICAgb2xkSGVhZGVyOiBzdHJ1Y3R1cmVkUGF0Y2gubmV3SGVhZGVyLFxuICAgIG5ld0ZpbGVOYW1lOiBzdHJ1Y3R1cmVkUGF0Y2gub2xkRmlsZU5hbWUsXG4gICAgbmV3SGVhZGVyOiBzdHJ1Y3R1cmVkUGF0Y2gub2xkSGVhZGVyLFxuICAgIGh1bmtzOiBzdHJ1Y3R1cmVkUGF0Y2guaHVua3MubWFwKGh1bmsgPT4ge1xuICAgICAgcmV0dXJuIHtcbiAgICAgICAgb2xkTGluZXM6IGh1bmsubmV3TGluZXMsXG4gICAgICAgIG9sZFN0YXJ0OiBodW5rLm5ld1N0YXJ0LFxuICAgICAgICBuZXdMaW5lczogaHVuay5vbGRMaW5lcyxcbiAgICAgICAgbmV3U3RhcnQ6IGh1bmsub2xkU3RhcnQsXG4gICAgICAgIGxpbmVzOiBodW5rLmxpbmVzLm1hcChsID0+IHtcbiAgICAgICAgICBpZiAobC5zdGFydHNXaXRoKCctJykpIHsgcmV0dXJuIGArJHtsLnNsaWNlKDEpfWA7IH1cbiAgICAgICAgICBpZiAobC5zdGFydHNXaXRoKCcrJykpIHsgcmV0dXJuIGAtJHtsLnNsaWNlKDEpfWA7IH1cbiAgICAgICAgICByZXR1cm4gbDtcbiAgICAgICAgfSlcbiAgICAgIH07XG4gICAgfSlcbiAgfTtcbn1cbiJdLCJtYXBwaW5ncyI6Ijs7Ozs7Ozs7Ozs7Ozs7QUFBTyxTQUFTQSxZQUFZQSxDQUFDQyxlQUFlLEVBQUU7RUFDNUMsSUFBSUMsS0FBSyxDQUFDQyxPQUFPLENBQUNGLGVBQWUsQ0FBQyxFQUFFO0lBQ2xDLE9BQU9BLGVBQWUsQ0FBQ0csR0FBRyxDQUFDSixZQUFZLENBQUMsQ0FBQ0ssT0FBTyxDQUFDLENBQUM7RUFDcEQ7RUFFQTtJQUFBO0lBQUFDLGFBQUEsQ0FBQUEsYUFBQTtJQUFBO0lBQ0tMLGVBQWU7TUFDbEJNLFdBQVcsRUFBRU4sZUFBZSxDQUFDTyxXQUFXO01BQ3hDQyxTQUFTLEVBQUVSLGVBQWUsQ0FBQ1MsU0FBUztNQUNwQ0YsV0FBVyxFQUFFUCxlQUFlLENBQUNNLFdBQVc7TUFDeENHLFNBQVMsRUFBRVQsZUFBZSxDQUFDUSxTQUFTO01BQ3BDRSxLQUFLLEVBQUVWLGVBQWUsQ0FBQ1UsS0FBSyxDQUFDUCxHQUFHLENBQUMsVUFBQVEsSUFBSSxFQUFJO1FBQ3ZDLE9BQU87VUFDTEMsUUFBUSxFQUFFRCxJQUFJLENBQUNFLFFBQVE7VUFDdkJDLFFBQVEsRUFBRUgsSUFBSSxDQUFDSSxRQUFRO1VBQ3ZCRixRQUFRLEVBQUVGLElBQUksQ0FBQ0MsUUFBUTtVQUN2QkcsUUFBUSxFQUFFSixJQUFJLENBQUNHLFFBQVE7VUFDdkJFLEtBQUssRUFBRUwsSUFBSSxDQUFDSyxLQUFLLENBQUNiLEdBQUcsQ0FBQyxVQUFBYyxDQUFDLEVBQUk7WUFDekIsSUFBSUEsQ0FBQyxDQUFDQyxVQUFVLENBQUMsR0FBRyxDQUFDLEVBQUU7Y0FBRTtnQkFBQTtnQkFBQSxJQUFBQyxNQUFBO2dCQUFBO2dCQUFXRixDQUFDLENBQUNHLEtBQUssQ0FBQyxDQUFDLENBQUM7Y0FBQTtZQUFJO1lBQ2xELElBQUlILENBQUMsQ0FBQ0MsVUFBVSxDQUFDLEdBQUcsQ0FBQyxFQUFFO2NBQUU7Z0JBQUE7Z0JBQUEsSUFBQUMsTUFBQTtnQkFBQTtnQkFBV0YsQ0FBQyxDQUFDRyxLQUFLLENBQUMsQ0FBQyxDQUFDO2NBQUE7WUFBSTtZQUNsRCxPQUFPSCxDQUFDO1VBQ1YsQ0FBQztRQUNILENBQUM7TUFDSCxDQUFDO0lBQUM7RUFBQTtBQUVOIiwiaWdub3JlTGlzdCI6W119
diff --git a/node_modules/diff/lib/util/array.js b/node_modules/diff/lib/util/array.js
deleted file mode 100644
index af10977a70ac6..0000000000000
--- a/node_modules/diff/lib/util/array.js
+++ /dev/null
@@ -1,27 +0,0 @@
-/*istanbul ignore start*/
-"use strict";
-
-Object.defineProperty(exports, "__esModule", {
-  value: true
-});
-exports.arrayEqual = arrayEqual;
-exports.arrayStartsWith = arrayStartsWith;
-/*istanbul ignore end*/
-function arrayEqual(a, b) {
-  if (a.length !== b.length) {
-    return false;
-  }
-  return arrayStartsWith(a, b);
-}
-function arrayStartsWith(array, start) {
-  if (start.length > array.length) {
-    return false;
-  }
-  for (var i = 0; i < start.length; i++) {
-    if (start[i] !== array[i]) {
-      return false;
-    }
-  }
-  return true;
-}
-//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJuYW1lcyI6WyJhcnJheUVxdWFsIiwiYSIsImIiLCJsZW5ndGgiLCJhcnJheVN0YXJ0c1dpdGgiLCJhcnJheSIsInN0YXJ0IiwiaSJdLCJzb3VyY2VzIjpbIi4uLy4uL3NyYy91dGlsL2FycmF5LmpzIl0sInNvdXJjZXNDb250ZW50IjpbImV4cG9ydCBmdW5jdGlvbiBhcnJheUVxdWFsKGEsIGIpIHtcbiAgaWYgKGEubGVuZ3RoICE9PSBiLmxlbmd0aCkge1xuICAgIHJldHVybiBmYWxzZTtcbiAgfVxuXG4gIHJldHVybiBhcnJheVN0YXJ0c1dpdGgoYSwgYik7XG59XG5cbmV4cG9ydCBmdW5jdGlvbiBhcnJheVN0YXJ0c1dpdGgoYXJyYXksIHN0YXJ0KSB7XG4gIGlmIChzdGFydC5sZW5ndGggPiBhcnJheS5sZW5ndGgpIHtcbiAgICByZXR1cm4gZmFsc2U7XG4gIH1cblxuICBmb3IgKGxldCBpID0gMDsgaSA8IHN0YXJ0Lmxlbmd0aDsgaSsrKSB7XG4gICAgaWYgKHN0YXJ0W2ldICE9PSBhcnJheVtpXSkge1xuICAgICAgcmV0dXJuIGZhbHNlO1xuICAgIH1cbiAgfVxuXG4gIHJldHVybiB0cnVlO1xufVxuIl0sIm1hcHBpbmdzIjoiOzs7Ozs7Ozs7QUFBTyxTQUFTQSxVQUFVQSxDQUFDQyxDQUFDLEVBQUVDLENBQUMsRUFBRTtFQUMvQixJQUFJRCxDQUFDLENBQUNFLE1BQU0sS0FBS0QsQ0FBQyxDQUFDQyxNQUFNLEVBQUU7SUFDekIsT0FBTyxLQUFLO0VBQ2Q7RUFFQSxPQUFPQyxlQUFlLENBQUNILENBQUMsRUFBRUMsQ0FBQyxDQUFDO0FBQzlCO0FBRU8sU0FBU0UsZUFBZUEsQ0FBQ0MsS0FBSyxFQUFFQyxLQUFLLEVBQUU7RUFDNUMsSUFBSUEsS0FBSyxDQUFDSCxNQUFNLEdBQUdFLEtBQUssQ0FBQ0YsTUFBTSxFQUFFO0lBQy9CLE9BQU8sS0FBSztFQUNkO0VBRUEsS0FBSyxJQUFJSSxDQUFDLEdBQUcsQ0FBQyxFQUFFQSxDQUFDLEdBQUdELEtBQUssQ0FBQ0gsTUFBTSxFQUFFSSxDQUFDLEVBQUUsRUFBRTtJQUNyQyxJQUFJRCxLQUFLLENBQUNDLENBQUMsQ0FBQyxLQUFLRixLQUFLLENBQUNFLENBQUMsQ0FBQyxFQUFFO01BQ3pCLE9BQU8sS0FBSztJQUNkO0VBQ0Y7RUFFQSxPQUFPLElBQUk7QUFDYiIsImlnbm9yZUxpc3QiOltdfQ==
diff --git a/node_modules/diff/lib/util/distance-iterator.js b/node_modules/diff/lib/util/distance-iterator.js
deleted file mode 100644
index 63893731fb150..0000000000000
--- a/node_modules/diff/lib/util/distance-iterator.js
+++ /dev/null
@@ -1,54 +0,0 @@
-/*istanbul ignore start*/
-"use strict";
-
-Object.defineProperty(exports, "__esModule", {
-  value: true
-});
-exports["default"] = _default;
-/*istanbul ignore end*/
-// Iterator that traverses in the range of [min, max], stepping
-// by distance from a given start position. I.e. for [0, 4], with
-// start of 2, this will iterate 2, 3, 1, 4, 0.
-function
-/*istanbul ignore start*/
-_default
-/*istanbul ignore end*/
-(start, minLine, maxLine) {
-  var wantForward = true,
-    backwardExhausted = false,
-    forwardExhausted = false,
-    localOffset = 1;
-  return function iterator() {
-    if (wantForward && !forwardExhausted) {
-      if (backwardExhausted) {
-        localOffset++;
-      } else {
-        wantForward = false;
-      }
-
-      // Check if trying to fit beyond text length, and if not, check it fits
-      // after offset location (or desired location on first iteration)
-      if (start + localOffset <= maxLine) {
-        return start + localOffset;
-      }
-      forwardExhausted = true;
-    }
-    if (!backwardExhausted) {
-      if (!forwardExhausted) {
-        wantForward = true;
-      }
-
-      // Check if trying to fit before text beginning, and if not, check it fits
-      // before offset location
-      if (minLine <= start - localOffset) {
-        return start - localOffset++;
-      }
-      backwardExhausted = true;
-      return iterator();
-    }
-
-    // We tried to fit hunk before text beginning and beyond text length, then
-    // hunk can't fit on the text. Return undefined
-  };
-}
-//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJuYW1lcyI6WyJfZGVmYXVsdCIsInN0YXJ0IiwibWluTGluZSIsIm1heExpbmUiLCJ3YW50Rm9yd2FyZCIsImJhY2t3YXJkRXhoYXVzdGVkIiwiZm9yd2FyZEV4aGF1c3RlZCIsImxvY2FsT2Zmc2V0IiwiaXRlcmF0b3IiXSwic291cmNlcyI6WyIuLi8uLi9zcmMvdXRpbC9kaXN0YW5jZS1pdGVyYXRvci5qcyJdLCJzb3VyY2VzQ29udGVudCI6WyIvLyBJdGVyYXRvciB0aGF0IHRyYXZlcnNlcyBpbiB0aGUgcmFuZ2Ugb2YgW21pbiwgbWF4XSwgc3RlcHBpbmdcbi8vIGJ5IGRpc3RhbmNlIGZyb20gYSBnaXZlbiBzdGFydCBwb3NpdGlvbi4gSS5lLiBmb3IgWzAsIDRdLCB3aXRoXG4vLyBzdGFydCBvZiAyLCB0aGlzIHdpbGwgaXRlcmF0ZSAyLCAzLCAxLCA0LCAwLlxuZXhwb3J0IGRlZmF1bHQgZnVuY3Rpb24oc3RhcnQsIG1pbkxpbmUsIG1heExpbmUpIHtcbiAgbGV0IHdhbnRGb3J3YXJkID0gdHJ1ZSxcbiAgICAgIGJhY2t3YXJkRXhoYXVzdGVkID0gZmFsc2UsXG4gICAgICBmb3J3YXJkRXhoYXVzdGVkID0gZmFsc2UsXG4gICAgICBsb2NhbE9mZnNldCA9IDE7XG5cbiAgcmV0dXJuIGZ1bmN0aW9uIGl0ZXJhdG9yKCkge1xuICAgIGlmICh3YW50Rm9yd2FyZCAmJiAhZm9yd2FyZEV4aGF1c3RlZCkge1xuICAgICAgaWYgKGJhY2t3YXJkRXhoYXVzdGVkKSB7XG4gICAgICAgIGxvY2FsT2Zmc2V0Kys7XG4gICAgICB9IGVsc2Uge1xuICAgICAgICB3YW50Rm9yd2FyZCA9IGZhbHNlO1xuICAgICAgfVxuXG4gICAgICAvLyBDaGVjayBpZiB0cnlpbmcgdG8gZml0IGJleW9uZCB0ZXh0IGxlbmd0aCwgYW5kIGlmIG5vdCwgY2hlY2sgaXQgZml0c1xuICAgICAgLy8gYWZ0ZXIgb2Zmc2V0IGxvY2F0aW9uIChvciBkZXNpcmVkIGxvY2F0aW9uIG9uIGZpcnN0IGl0ZXJhdGlvbilcbiAgICAgIGlmIChzdGFydCArIGxvY2FsT2Zmc2V0IDw9IG1heExpbmUpIHtcbiAgICAgICAgcmV0dXJuIHN0YXJ0ICsgbG9jYWxPZmZzZXQ7XG4gICAgICB9XG5cbiAgICAgIGZvcndhcmRFeGhhdXN0ZWQgPSB0cnVlO1xuICAgIH1cblxuICAgIGlmICghYmFja3dhcmRFeGhhdXN0ZWQpIHtcbiAgICAgIGlmICghZm9yd2FyZEV4aGF1c3RlZCkge1xuICAgICAgICB3YW50Rm9yd2FyZCA9IHRydWU7XG4gICAgICB9XG5cbiAgICAgIC8vIENoZWNrIGlmIHRyeWluZyB0byBmaXQgYmVmb3JlIHRleHQgYmVnaW5uaW5nLCBhbmQgaWYgbm90LCBjaGVjayBpdCBmaXRzXG4gICAgICAvLyBiZWZvcmUgb2Zmc2V0IGxvY2F0aW9uXG4gICAgICBpZiAobWluTGluZSA8PSBzdGFydCAtIGxvY2FsT2Zmc2V0KSB7XG4gICAgICAgIHJldHVybiBzdGFydCAtIGxvY2FsT2Zmc2V0Kys7XG4gICAgICB9XG5cbiAgICAgIGJhY2t3YXJkRXhoYXVzdGVkID0gdHJ1ZTtcbiAgICAgIHJldHVybiBpdGVyYXRvcigpO1xuICAgIH1cblxuICAgIC8vIFdlIHRyaWVkIHRvIGZpdCBodW5rIGJlZm9yZSB0ZXh0IGJlZ2lubmluZyBhbmQgYmV5b25kIHRleHQgbGVuZ3RoLCB0aGVuXG4gICAgLy8gaHVuayBjYW4ndCBmaXQgb24gdGhlIHRleHQuIFJldHVybiB1bmRlZmluZWRcbiAgfTtcbn1cbiJdLCJtYXBwaW5ncyI6Ijs7Ozs7Ozs7QUFBQTtBQUNBO0FBQ0E7QUFDZTtBQUFBO0FBQUFBO0FBQUFBO0FBQUEsQ0FBU0MsS0FBSyxFQUFFQyxPQUFPLEVBQUVDLE9BQU8sRUFBRTtFQUMvQyxJQUFJQyxXQUFXLEdBQUcsSUFBSTtJQUNsQkMsaUJBQWlCLEdBQUcsS0FBSztJQUN6QkMsZ0JBQWdCLEdBQUcsS0FBSztJQUN4QkMsV0FBVyxHQUFHLENBQUM7RUFFbkIsT0FBTyxTQUFTQyxRQUFRQSxDQUFBLEVBQUc7SUFDekIsSUFBSUosV0FBVyxJQUFJLENBQUNFLGdCQUFnQixFQUFFO01BQ3BDLElBQUlELGlCQUFpQixFQUFFO1FBQ3JCRSxXQUFXLEVBQUU7TUFDZixDQUFDLE1BQU07UUFDTEgsV0FBVyxHQUFHLEtBQUs7TUFDckI7O01BRUE7TUFDQTtNQUNBLElBQUlILEtBQUssR0FBR00sV0FBVyxJQUFJSixPQUFPLEVBQUU7UUFDbEMsT0FBT0YsS0FBSyxHQUFHTSxXQUFXO01BQzVCO01BRUFELGdCQUFnQixHQUFHLElBQUk7SUFDekI7SUFFQSxJQUFJLENBQUNELGlCQUFpQixFQUFFO01BQ3RCLElBQUksQ0FBQ0MsZ0JBQWdCLEVBQUU7UUFDckJGLFdBQVcsR0FBRyxJQUFJO01BQ3BCOztNQUVBO01BQ0E7TUFDQSxJQUFJRixPQUFPLElBQUlELEtBQUssR0FBR00sV0FBVyxFQUFFO1FBQ2xDLE9BQU9OLEtBQUssR0FBR00sV0FBVyxFQUFFO01BQzlCO01BRUFGLGlCQUFpQixHQUFHLElBQUk7TUFDeEIsT0FBT0csUUFBUSxDQUFDLENBQUM7SUFDbkI7O0lBRUE7SUFDQTtFQUNGLENBQUM7QUFDSCIsImlnbm9yZUxpc3QiOltdfQ==
diff --git a/node_modules/diff/lib/util/params.js b/node_modules/diff/lib/util/params.js
deleted file mode 100644
index 283c2472bc601..0000000000000
--- a/node_modules/diff/lib/util/params.js
+++ /dev/null
@@ -1,22 +0,0 @@
-/*istanbul ignore start*/
-"use strict";
-
-Object.defineProperty(exports, "__esModule", {
-  value: true
-});
-exports.generateOptions = generateOptions;
-/*istanbul ignore end*/
-function generateOptions(options, defaults) {
-  if (typeof options === 'function') {
-    defaults.callback = options;
-  } else if (options) {
-    for (var name in options) {
-      /* istanbul ignore else */
-      if (options.hasOwnProperty(name)) {
-        defaults[name] = options[name];
-      }
-    }
-  }
-  return defaults;
-}
-//# sourceMappingURL=data:application/json;charset=utf-8;base64,eyJ2ZXJzaW9uIjozLCJuYW1lcyI6WyJnZW5lcmF0ZU9wdGlvbnMiLCJvcHRpb25zIiwiZGVmYXVsdHMiLCJjYWxsYmFjayIsIm5hbWUiLCJoYXNPd25Qcm9wZXJ0eSJdLCJzb3VyY2VzIjpbIi4uLy4uL3NyYy91dGlsL3BhcmFtcy5qcyJdLCJzb3VyY2VzQ29udGVudCI6WyJleHBvcnQgZnVuY3Rpb24gZ2VuZXJhdGVPcHRpb25zKG9wdGlvbnMsIGRlZmF1bHRzKSB7XG4gIGlmICh0eXBlb2Ygb3B0aW9ucyA9PT0gJ2Z1bmN0aW9uJykge1xuICAgIGRlZmF1bHRzLmNhbGxiYWNrID0gb3B0aW9ucztcbiAgfSBlbHNlIGlmIChvcHRpb25zKSB7XG4gICAgZm9yIChsZXQgbmFtZSBpbiBvcHRpb25zKSB7XG4gICAgICAvKiBpc3RhbmJ1bCBpZ25vcmUgZWxzZSAqL1xuICAgICAgaWYgKG9wdGlvbnMuaGFzT3duUHJvcGVydHkobmFtZSkpIHtcbiAgICAgICAgZGVmYXVsdHNbbmFtZV0gPSBvcHRpb25zW25hbWVdO1xuICAgICAgfVxuICAgIH1cbiAgfVxuICByZXR1cm4gZGVmYXVsdHM7XG59XG4iXSwibWFwcGluZ3MiOiI7Ozs7Ozs7O0FBQU8sU0FBU0EsZUFBZUEsQ0FBQ0MsT0FBTyxFQUFFQyxRQUFRLEVBQUU7RUFDakQsSUFBSSxPQUFPRCxPQUFPLEtBQUssVUFBVSxFQUFFO0lBQ2pDQyxRQUFRLENBQUNDLFFBQVEsR0FBR0YsT0FBTztFQUM3QixDQUFDLE1BQU0sSUFBSUEsT0FBTyxFQUFFO0lBQ2xCLEtBQUssSUFBSUcsSUFBSSxJQUFJSCxPQUFPLEVBQUU7TUFDeEI7TUFDQSxJQUFJQSxPQUFPLENBQUNJLGNBQWMsQ0FBQ0QsSUFBSSxDQUFDLEVBQUU7UUFDaENGLFFBQVEsQ0FBQ0UsSUFBSSxDQUFDLEdBQUdILE9BQU8sQ0FBQ0csSUFBSSxDQUFDO01BQ2hDO0lBQ0Y7RUFDRjtFQUNBLE9BQU9GLFFBQVE7QUFDakIiLCJpZ25vcmVMaXN0IjpbXX0=
diff --git a/node_modules/diff/lib/util/string.js b/node_modules/diff/lib/util/string.js
deleted file mode 100644
index f81c6827be731..0000000000000
--- a/node_modules/diff/lib/util/string.js
+++ /dev/null
@@ -1,131 +0,0 @@
-/*istanbul ignore start*/
-"use strict";
-
-Object.defineProperty(exports, "__esModule", {
-  value: true
-});
-exports.hasOnlyUnixLineEndings = hasOnlyUnixLineEndings;
-exports.hasOnlyWinLineEndings = hasOnlyWinLineEndings;
-exports.longestCommonPrefix = longestCommonPrefix;
-exports.longestCommonSuffix = longestCommonSuffix;
-exports.maximumOverlap = maximumOverlap;
-exports.removePrefix = removePrefix;
-exports.removeSuffix = removeSuffix;
-exports.replacePrefix = replacePrefix;
-exports.replaceSuffix = replaceSuffix;
-/*istanbul ignore end*/
-function longestCommonPrefix(str1, str2) {
-  var i;
-  for (i = 0; i < str1.length && i < str2.length; i++) {
-    if (str1[i] != str2[i]) {
-      return str1.slice(0, i);
-    }
-  }
-  return str1.slice(0, i);
-}
-function longestCommonSuffix(str1, str2) {
-  var i;
-
-  // Unlike longestCommonPrefix, we need a special case to handle all scenarios
-  // where we return the empty string since str1.slice(-0) will return the
-  // entire string.
-  if (!str1 || !str2 || str1[str1.length - 1] != str2[str2.length - 1]) {
-    return '';
-  }
-  for (i = 0; i < str1.length && i < str2.length; i++) {
-    if (str1[str1.length - (i + 1)] != str2[str2.length - (i + 1)]) {
-      return str1.slice(-i);
-    }
-  }
-  return str1.slice(-i);
-}
-function replacePrefix(string, oldPrefix, newPrefix) {
-  if (string.slice(0, oldPrefix.length) != oldPrefix) {
-    throw Error(
-    /*istanbul ignore start*/
-    "string ".concat(
-    /*istanbul ignore end*/
-    JSON.stringify(string), " doesn't start with prefix ").concat(JSON.stringify(oldPrefix), "; this is a bug"));
-  }
-  return newPrefix + string.slice(oldPrefix.length);
-}
-function replaceSuffix(string, oldSuffix, newSuffix) {
-  if (!oldSuffix) {
-    return string + newSuffix;
-  }
-  if (string.slice(-oldSuffix.length) != oldSuffix) {
-    throw Error(
-    /*istanbul ignore start*/
-    "string ".concat(
-    /*istanbul ignore end*/
-    JSON.stringify(string), " doesn't end with suffix ").concat(JSON.stringify(oldSuffix), "; this is a bug"));
-  }
-  return string.slice(0, -oldSuffix.length) + newSuffix;
-}
-function removePrefix(string, oldPrefix) {
-  return replacePrefix(string, oldPrefix, '');
-}
-function removeSuffix(string, oldSuffix) {
-  return replaceSuffix(string, oldSuffix, '');
-}
-function maximumOverlap(string1, string2) {
-  return string2.slice(0, overlapCount(string1, string2));
-}
-
-// Nicked from https://stackoverflow.com/a/60422853/1709587
-function overlapCount(a, b) {
-  // Deal with cases where the strings differ in length
-  var startA = 0;
-  if (a.length > b.length) {
-    startA = a.length - b.length;
-  }
-  var endB = b.length;
-  if (a.length < b.length) {
-    endB = a.length;
-  }
-  // Create a back-reference for each index
-  //   that should be followed in case of a mismatch.
-  //   We only need B to make these references:
-  var map = Array(endB);
-  var k = 0; // Index that lags behind j
-  map[0] = 0;
-  for (var j = 1; j < endB; j++) {
-    if (b[j] == b[k]) {
-      map[j] = map[k]; // skip over the same character (optional optimisation)
-    } else {
-      map[j] = k;
-    }
-    while (k > 0 && b[j] != b[k]) {
-      k = map[k];
-    }
-    if (b[j] == b[k]) {
-      k++;
-    }
-  }
-  // Phase 2: use these references while iterating over A
-  k = 0;
-  for (var i = startA; i < a.length; i++) {
-    while (k > 0 && a[i] != b[k]) {
-      k = map[k];
-    }
-    if (a[i] == b[k]) {
-      k++;
-    }
-  }
-  return k;
-}
-
-/**
- * Returns true if the string consistently uses Windows line endings.
- */
-function hasOnlyWinLineEndings(string) {
-  return string.includes('\r\n') && !string.startsWith('\n') && !string.match(/[^\r]\n/);
-}
-
-/**
- * Returns true if the string consistently uses Unix line endings.
- */
-function hasOnlyUnixLineEndings(string) {
-  return !string.includes('\r\n') && string.includes('\n');
-}
-//# sourceMappingURL=data:application/json;charset=utf-8;base64,{"version":3,"names":["longestCommonPrefix","str1","str2","i","length","slice","longestCommonSuffix","replacePrefix","string","oldPrefix","newPrefix","Error","concat","JSON","stringify","replaceSuffix","oldSuffix","newSuffix","removePrefix","removeSuffix","maximumOverlap","string1","string2","overlapCount","a","b","startA","endB","map","Array","k","j","hasOnlyWinLineEndings","includes","startsWith","match","hasOnlyUnixLineEndings"],"sources":["../../src/util/string.js"],"sourcesContent":["export function longestCommonPrefix(str1, str2) {\n  let i;\n  for (i = 0; i < str1.length && i < str2.length; i++) {\n    if (str1[i] != str2[i]) {\n      return str1.slice(0, i);\n    }\n  }\n  return str1.slice(0, i);\n}\n\nexport function longestCommonSuffix(str1, str2) {\n  let i;\n\n  // Unlike longestCommonPrefix, we need a special case to handle all scenarios\n  // where we return the empty string since str1.slice(-0) will return the\n  // entire string.\n  if (!str1 || !str2 || str1[str1.length - 1] != str2[str2.length - 1]) {\n    return '';\n  }\n\n  for (i = 0; i < str1.length && i < str2.length; i++) {\n    if (str1[str1.length - (i + 1)] != str2[str2.length - (i + 1)]) {\n      return str1.slice(-i);\n    }\n  }\n  return str1.slice(-i);\n}\n\nexport function replacePrefix(string, oldPrefix, newPrefix) {\n  if (string.slice(0, oldPrefix.length) != oldPrefix) {\n    throw Error(`string ${JSON.stringify(string)} doesn't start with prefix ${JSON.stringify(oldPrefix)}; this is a bug`);\n  }\n  return newPrefix + string.slice(oldPrefix.length);\n}\n\nexport function replaceSuffix(string, oldSuffix, newSuffix) {\n  if (!oldSuffix) {\n    return string + newSuffix;\n  }\n\n  if (string.slice(-oldSuffix.length) != oldSuffix) {\n    throw Error(`string ${JSON.stringify(string)} doesn't end with suffix ${JSON.stringify(oldSuffix)}; this is a bug`);\n  }\n  return string.slice(0, -oldSuffix.length) + newSuffix;\n}\n\nexport function removePrefix(string, oldPrefix) {\n  return replacePrefix(string, oldPrefix, '');\n}\n\nexport function removeSuffix(string, oldSuffix) {\n  return replaceSuffix(string, oldSuffix, '');\n}\n\nexport function maximumOverlap(string1, string2) {\n  return string2.slice(0, overlapCount(string1, string2));\n}\n\n// Nicked from https://stackoverflow.com/a/60422853/1709587\nfunction overlapCount(a, b) {\n  // Deal with cases where the strings differ in length\n  let startA = 0;\n  if (a.length > b.length) { startA = a.length - b.length; }\n  let endB = b.length;\n  if (a.length < b.length) { endB = a.length; }\n  // Create a back-reference for each index\n  //   that should be followed in case of a mismatch.\n  //   We only need B to make these references:\n  let map = Array(endB);\n  let k = 0; // Index that lags behind j\n  map[0] = 0;\n  for (let j = 1; j < endB; j++) {\n      if (b[j] == b[k]) {\n          map[j] = map[k]; // skip over the same character (optional optimisation)\n      } else {\n          map[j] = k;\n      }\n      while (k > 0 && b[j] != b[k]) { k = map[k]; }\n      if (b[j] == b[k]) { k++; }\n  }\n  // Phase 2: use these references while iterating over A\n  k = 0;\n  for (let i = startA; i < a.length; i++) {\n      while (k > 0 && a[i] != b[k]) { k = map[k]; }\n      if (a[i] == b[k]) { k++; }\n  }\n  return k;\n}\n\n\n/**\n * Returns true if the string consistently uses Windows line endings.\n */\nexport function hasOnlyWinLineEndings(string) {\n  return string.includes('\\r\\n') && !string.startsWith('\\n') && !string.match(/[^\\r]\\n/);\n}\n\n/**\n * Returns true if the string consistently uses Unix line endings.\n */\nexport function hasOnlyUnixLineEndings(string) {\n  return !string.includes('\\r\\n') && string.includes('\\n');\n}\n"],"mappings":";;;;;;;;;;;;;;;;AAAO,SAASA,mBAAmBA,CAACC,IAAI,EAAEC,IAAI,EAAE;EAC9C,IAAIC,CAAC;EACL,KAAKA,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGF,IAAI,CAACG,MAAM,IAAID,CAAC,GAAGD,IAAI,CAACE,MAAM,EAAED,CAAC,EAAE,EAAE;IACnD,IAAIF,IAAI,CAACE,CAAC,CAAC,IAAID,IAAI,CAACC,CAAC,CAAC,EAAE;MACtB,OAAOF,IAAI,CAACI,KAAK,CAAC,CAAC,EAAEF,CAAC,CAAC;IACzB;EACF;EACA,OAAOF,IAAI,CAACI,KAAK,CAAC,CAAC,EAAEF,CAAC,CAAC;AACzB;AAEO,SAASG,mBAAmBA,CAACL,IAAI,EAAEC,IAAI,EAAE;EAC9C,IAAIC,CAAC;;EAEL;EACA;EACA;EACA,IAAI,CAACF,IAAI,IAAI,CAACC,IAAI,IAAID,IAAI,CAACA,IAAI,CAACG,MAAM,GAAG,CAAC,CAAC,IAAIF,IAAI,CAACA,IAAI,CAACE,MAAM,GAAG,CAAC,CAAC,EAAE;IACpE,OAAO,EAAE;EACX;EAEA,KAAKD,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGF,IAAI,CAACG,MAAM,IAAID,CAAC,GAAGD,IAAI,CAACE,MAAM,EAAED,CAAC,EAAE,EAAE;IACnD,IAAIF,IAAI,CAACA,IAAI,CAACG,MAAM,IAAID,CAAC,GAAG,CAAC,CAAC,CAAC,IAAID,IAAI,CAACA,IAAI,CAACE,MAAM,IAAID,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE;MAC9D,OAAOF,IAAI,CAACI,KAAK,CAAC,CAACF,CAAC,CAAC;IACvB;EACF;EACA,OAAOF,IAAI,CAACI,KAAK,CAAC,CAACF,CAAC,CAAC;AACvB;AAEO,SAASI,aAAaA,CAACC,MAAM,EAAEC,SAAS,EAAEC,SAAS,EAAE;EAC1D,IAAIF,MAAM,CAACH,KAAK,CAAC,CAAC,EAAEI,SAAS,CAACL,MAAM,CAAC,IAAIK,SAAS,EAAE;IAClD,MAAME,KAAK;IAAA;IAAA,UAAAC,MAAA;IAAA;IAAWC,IAAI,CAACC,SAAS,CAACN,MAAM,CAAC,iCAAAI,MAAA,CAA8BC,IAAI,CAACC,SAAS,CAACL,SAAS,CAAC,oBAAiB,CAAC;EACvH;EACA,OAAOC,SAAS,GAAGF,MAAM,CAACH,KAAK,CAACI,SAAS,CAACL,MAAM,CAAC;AACnD;AAEO,SAASW,aAAaA,CAACP,MAAM,EAAEQ,SAAS,EAAEC,SAAS,EAAE;EAC1D,IAAI,CAACD,SAAS,EAAE;IACd,OAAOR,MAAM,GAAGS,SAAS;EAC3B;EAEA,IAAIT,MAAM,CAACH,KAAK,CAAC,CAACW,SAAS,CAACZ,MAAM,CAAC,IAAIY,SAAS,EAAE;IAChD,MAAML,KAAK;IAAA;IAAA,UAAAC,MAAA;IAAA;IAAWC,IAAI,CAACC,SAAS,CAACN,MAAM,CAAC,+BAAAI,MAAA,CAA4BC,IAAI,CAACC,SAAS,CAACE,SAAS,CAAC,oBAAiB,CAAC;EACrH;EACA,OAAOR,MAAM,CAACH,KAAK,CAAC,CAAC,EAAE,CAACW,SAAS,CAACZ,MAAM,CAAC,GAAGa,SAAS;AACvD;AAEO,SAASC,YAAYA,CAACV,MAAM,EAAEC,SAAS,EAAE;EAC9C,OAAOF,aAAa,CAACC,MAAM,EAAEC,SAAS,EAAE,EAAE,CAAC;AAC7C;AAEO,SAASU,YAAYA,CAACX,MAAM,EAAEQ,SAAS,EAAE;EAC9C,OAAOD,aAAa,CAACP,MAAM,EAAEQ,SAAS,EAAE,EAAE,CAAC;AAC7C;AAEO,SAASI,cAAcA,CAACC,OAAO,EAAEC,OAAO,EAAE;EAC/C,OAAOA,OAAO,CAACjB,KAAK,CAAC,CAAC,EAAEkB,YAAY,CAACF,OAAO,EAAEC,OAAO,CAAC,CAAC;AACzD;;AAEA;AACA,SAASC,YAAYA,CAACC,CAAC,EAAEC,CAAC,EAAE;EAC1B;EACA,IAAIC,MAAM,GAAG,CAAC;EACd,IAAIF,CAAC,CAACpB,MAAM,GAAGqB,CAAC,CAACrB,MAAM,EAAE;IAAEsB,MAAM,GAAGF,CAAC,CAACpB,MAAM,GAAGqB,CAAC,CAACrB,MAAM;EAAE;EACzD,IAAIuB,IAAI,GAAGF,CAAC,CAACrB,MAAM;EACnB,IAAIoB,CAAC,CAACpB,MAAM,GAAGqB,CAAC,CAACrB,MAAM,EAAE;IAAEuB,IAAI,GAAGH,CAAC,CAACpB,MAAM;EAAE;EAC5C;EACA;EACA;EACA,IAAIwB,GAAG,GAAGC,KAAK,CAACF,IAAI,CAAC;EACrB,IAAIG,CAAC,GAAG,CAAC,CAAC,CAAC;EACXF,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC;EACV,KAAK,IAAIG,CAAC,GAAG,CAAC,EAAEA,CAAC,GAAGJ,IAAI,EAAEI,CAAC,EAAE,EAAE;IAC3B,IAAIN,CAAC,CAACM,CAAC,CAAC,IAAIN,CAAC,CAACK,CAAC,CAAC,EAAE;MACdF,GAAG,CAACG,CAAC,CAAC,GAAGH,GAAG,CAACE,CAAC,CAAC,CAAC,CAAC;IACrB,CAAC,MAAM;MACHF,GAAG,CAACG,CAAC,CAAC,GAAGD,CAAC;IACd;IACA,OAAOA,CAAC,GAAG,CAAC,IAAIL,CAAC,CAACM,CAAC,CAAC,IAAIN,CAAC,CAACK,CAAC,CAAC,EAAE;MAAEA,CAAC,GAAGF,GAAG,CAACE,CAAC,CAAC;IAAE;IAC5C,IAAIL,CAAC,CAACM,CAAC,CAAC,IAAIN,CAAC,CAACK,CAAC,CAAC,EAAE;MAAEA,CAAC,EAAE;IAAE;EAC7B;EACA;EACAA,CAAC,GAAG,CAAC;EACL,KAAK,IAAI3B,CAAC,GAAGuB,MAAM,EAAEvB,CAAC,GAAGqB,CAAC,CAACpB,MAAM,EAAED,CAAC,EAAE,EAAE;IACpC,OAAO2B,CAAC,GAAG,CAAC,IAAIN,CAAC,CAACrB,CAAC,CAAC,IAAIsB,CAAC,CAACK,CAAC,CAAC,EAAE;MAAEA,CAAC,GAAGF,GAAG,CAACE,CAAC,CAAC;IAAE;IAC5C,IAAIN,CAAC,CAACrB,CAAC,CAAC,IAAIsB,CAAC,CAACK,CAAC,CAAC,EAAE;MAAEA,CAAC,EAAE;IAAE;EAC7B;EACA,OAAOA,CAAC;AACV;;AAGA;AACA;AACA;AACO,SAASE,qBAAqBA,CAACxB,MAAM,EAAE;EAC5C,OAAOA,MAAM,CAACyB,QAAQ,CAAC,MAAM,CAAC,IAAI,CAACzB,MAAM,CAAC0B,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC1B,MAAM,CAAC2B,KAAK,CAAC,SAAS,CAAC;AACxF;;AAEA;AACA;AACA;AACO,SAASC,sBAAsBA,CAAC5B,MAAM,EAAE;EAC7C,OAAO,CAACA,MAAM,CAACyB,QAAQ,CAAC,MAAM,CAAC,IAAIzB,MAAM,CAACyB,QAAQ,CAAC,IAAI,CAAC;AAC1D","ignoreList":[]}
diff --git a/node_modules/diff/libcjs/convert/dmp.js b/node_modules/diff/libcjs/convert/dmp.js
new file mode 100644
index 0000000000000..10680ff38801f
--- /dev/null
+++ b/node_modules/diff/libcjs/convert/dmp.js
@@ -0,0 +1,24 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.convertChangesToDMP = convertChangesToDMP;
+/**
+ * converts a list of change objects to the format returned by Google's [diff-match-patch](https://github.com/google/diff-match-patch) library
+ */
+function convertChangesToDMP(changes) {
+    var ret = [];
+    var change, operation;
+    for (var i = 0; i < changes.length; i++) {
+        change = changes[i];
+        if (change.added) {
+            operation = 1;
+        }
+        else if (change.removed) {
+            operation = -1;
+        }
+        else {
+            operation = 0;
+        }
+        ret.push([operation, change.value]);
+    }
+    return ret;
+}
diff --git a/node_modules/diff/libcjs/convert/xml.js b/node_modules/diff/libcjs/convert/xml.js
new file mode 100644
index 0000000000000..5ecd8aa255b86
--- /dev/null
+++ b/node_modules/diff/libcjs/convert/xml.js
@@ -0,0 +1,34 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.convertChangesToXML = convertChangesToXML;
+/**
+ * converts a list of change objects to a serialized XML format
+ */
+function convertChangesToXML(changes) {
+    var ret = [];
+    for (var i = 0; i < changes.length; i++) {
+        var change = changes[i];
+        if (change.added) {
+            ret.push('');
+        }
+        else if (change.removed) {
+            ret.push('');
+        }
+        ret.push(escapeHTML(change.value));
+        if (change.added) {
+            ret.push('');
+        }
+        else if (change.removed) {
+            ret.push('');
+        }
+    }
+    return ret.join('');
+}
+function escapeHTML(s) {
+    var n = s;
+    n = n.replace(/&/g, '&');
+    n = n.replace(//g, '>');
+    n = n.replace(/"/g, '"');
+    return n;
+}
diff --git a/node_modules/diff/libcjs/diff/array.js b/node_modules/diff/libcjs/diff/array.js
new file mode 100644
index 0000000000000..2050261be823f
--- /dev/null
+++ b/node_modules/diff/libcjs/diff/array.js
@@ -0,0 +1,40 @@
+"use strict";
+var __extends = (this && this.__extends) || (function () {
+    var extendStatics = function (d, b) {
+        extendStatics = Object.setPrototypeOf ||
+            ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
+            function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
+        return extendStatics(d, b);
+    };
+    return function (d, b) {
+        if (typeof b !== "function" && b !== null)
+            throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
+        extendStatics(d, b);
+        function __() { this.constructor = d; }
+        d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
+    };
+})();
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.arrayDiff = void 0;
+exports.diffArrays = diffArrays;
+var base_js_1 = require("./base.js");
+var ArrayDiff = /** @class */ (function (_super) {
+    __extends(ArrayDiff, _super);
+    function ArrayDiff() {
+        return _super !== null && _super.apply(this, arguments) || this;
+    }
+    ArrayDiff.prototype.tokenize = function (value) {
+        return value.slice();
+    };
+    ArrayDiff.prototype.join = function (value) {
+        return value;
+    };
+    ArrayDiff.prototype.removeEmpty = function (value) {
+        return value;
+    };
+    return ArrayDiff;
+}(base_js_1.default));
+exports.arrayDiff = new ArrayDiff();
+function diffArrays(oldArr, newArr, options) {
+    return exports.arrayDiff.diff(oldArr, newArr, options);
+}
diff --git a/node_modules/diff/libcjs/diff/base.js b/node_modules/diff/libcjs/diff/base.js
new file mode 100644
index 0000000000000..5248d95693009
--- /dev/null
+++ b/node_modules/diff/libcjs/diff/base.js
@@ -0,0 +1,265 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+var Diff = /** @class */ (function () {
+    function Diff() {
+    }
+    Diff.prototype.diff = function (oldStr, newStr, 
+    // Type below is not accurate/complete - see above for full possibilities - but it compiles
+    options) {
+        if (options === void 0) { options = {}; }
+        var callback;
+        if (typeof options === 'function') {
+            callback = options;
+            options = {};
+        }
+        else if ('callback' in options) {
+            callback = options.callback;
+        }
+        // Allow subclasses to massage the input prior to running
+        var oldString = this.castInput(oldStr, options);
+        var newString = this.castInput(newStr, options);
+        var oldTokens = this.removeEmpty(this.tokenize(oldString, options));
+        var newTokens = this.removeEmpty(this.tokenize(newString, options));
+        return this.diffWithOptionsObj(oldTokens, newTokens, options, callback);
+    };
+    Diff.prototype.diffWithOptionsObj = function (oldTokens, newTokens, options, callback) {
+        var _this = this;
+        var _a;
+        var done = function (value) {
+            value = _this.postProcess(value, options);
+            if (callback) {
+                setTimeout(function () { callback(value); }, 0);
+                return undefined;
+            }
+            else {
+                return value;
+            }
+        };
+        var newLen = newTokens.length, oldLen = oldTokens.length;
+        var editLength = 1;
+        var maxEditLength = newLen + oldLen;
+        if (options.maxEditLength != null) {
+            maxEditLength = Math.min(maxEditLength, options.maxEditLength);
+        }
+        var maxExecutionTime = (_a = options.timeout) !== null && _a !== void 0 ? _a : Infinity;
+        var abortAfterTimestamp = Date.now() + maxExecutionTime;
+        var bestPath = [{ oldPos: -1, lastComponent: undefined }];
+        // Seed editLength = 0, i.e. the content starts with the same values
+        var newPos = this.extractCommon(bestPath[0], newTokens, oldTokens, 0, options);
+        if (bestPath[0].oldPos + 1 >= oldLen && newPos + 1 >= newLen) {
+            // Identity per the equality and tokenizer
+            return done(this.buildValues(bestPath[0].lastComponent, newTokens, oldTokens));
+        }
+        // Once we hit the right edge of the edit graph on some diagonal k, we can
+        // definitely reach the end of the edit graph in no more than k edits, so
+        // there's no point in considering any moves to diagonal k+1 any more (from
+        // which we're guaranteed to need at least k+1 more edits).
+        // Similarly, once we've reached the bottom of the edit graph, there's no
+        // point considering moves to lower diagonals.
+        // We record this fact by setting minDiagonalToConsider and
+        // maxDiagonalToConsider to some finite value once we've hit the edge of
+        // the edit graph.
+        // This optimization is not faithful to the original algorithm presented in
+        // Myers's paper, which instead pointlessly extends D-paths off the end of
+        // the edit graph - see page 7 of Myers's paper which notes this point
+        // explicitly and illustrates it with a diagram. This has major performance
+        // implications for some common scenarios. For instance, to compute a diff
+        // where the new text simply appends d characters on the end of the
+        // original text of length n, the true Myers algorithm will take O(n+d^2)
+        // time while this optimization needs only O(n+d) time.
+        var minDiagonalToConsider = -Infinity, maxDiagonalToConsider = Infinity;
+        // Main worker method. checks all permutations of a given edit length for acceptance.
+        var execEditLength = function () {
+            for (var diagonalPath = Math.max(minDiagonalToConsider, -editLength); diagonalPath <= Math.min(maxDiagonalToConsider, editLength); diagonalPath += 2) {
+                var basePath = void 0;
+                var removePath = bestPath[diagonalPath - 1], addPath = bestPath[diagonalPath + 1];
+                if (removePath) {
+                    // No one else is going to attempt to use this value, clear it
+                    // @ts-expect-error - perf optimisation. This type-violating value will never be read.
+                    bestPath[diagonalPath - 1] = undefined;
+                }
+                var canAdd = false;
+                if (addPath) {
+                    // what newPos will be after we do an insertion:
+                    var addPathNewPos = addPath.oldPos - diagonalPath;
+                    canAdd = addPath && 0 <= addPathNewPos && addPathNewPos < newLen;
+                }
+                var canRemove = removePath && removePath.oldPos + 1 < oldLen;
+                if (!canAdd && !canRemove) {
+                    // If this path is a terminal then prune
+                    // @ts-expect-error - perf optimisation. This type-violating value will never be read.
+                    bestPath[diagonalPath] = undefined;
+                    continue;
+                }
+                // Select the diagonal that we want to branch from. We select the prior
+                // path whose position in the old string is the farthest from the origin
+                // and does not pass the bounds of the diff graph
+                if (!canRemove || (canAdd && removePath.oldPos < addPath.oldPos)) {
+                    basePath = _this.addToPath(addPath, true, false, 0, options);
+                }
+                else {
+                    basePath = _this.addToPath(removePath, false, true, 1, options);
+                }
+                newPos = _this.extractCommon(basePath, newTokens, oldTokens, diagonalPath, options);
+                if (basePath.oldPos + 1 >= oldLen && newPos + 1 >= newLen) {
+                    // If we have hit the end of both strings, then we are done
+                    return done(_this.buildValues(basePath.lastComponent, newTokens, oldTokens)) || true;
+                }
+                else {
+                    bestPath[diagonalPath] = basePath;
+                    if (basePath.oldPos + 1 >= oldLen) {
+                        maxDiagonalToConsider = Math.min(maxDiagonalToConsider, diagonalPath - 1);
+                    }
+                    if (newPos + 1 >= newLen) {
+                        minDiagonalToConsider = Math.max(minDiagonalToConsider, diagonalPath + 1);
+                    }
+                }
+            }
+            editLength++;
+        };
+        // Performs the length of edit iteration. Is a bit fugly as this has to support the
+        // sync and async mode which is never fun. Loops over execEditLength until a value
+        // is produced, or until the edit length exceeds options.maxEditLength (if given),
+        // in which case it will return undefined.
+        if (callback) {
+            (function exec() {
+                setTimeout(function () {
+                    if (editLength > maxEditLength || Date.now() > abortAfterTimestamp) {
+                        return callback(undefined);
+                    }
+                    if (!execEditLength()) {
+                        exec();
+                    }
+                }, 0);
+            }());
+        }
+        else {
+            while (editLength <= maxEditLength && Date.now() <= abortAfterTimestamp) {
+                var ret = execEditLength();
+                if (ret) {
+                    return ret;
+                }
+            }
+        }
+    };
+    Diff.prototype.addToPath = function (path, added, removed, oldPosInc, options) {
+        var last = path.lastComponent;
+        if (last && !options.oneChangePerToken && last.added === added && last.removed === removed) {
+            return {
+                oldPos: path.oldPos + oldPosInc,
+                lastComponent: { count: last.count + 1, added: added, removed: removed, previousComponent: last.previousComponent }
+            };
+        }
+        else {
+            return {
+                oldPos: path.oldPos + oldPosInc,
+                lastComponent: { count: 1, added: added, removed: removed, previousComponent: last }
+            };
+        }
+    };
+    Diff.prototype.extractCommon = function (basePath, newTokens, oldTokens, diagonalPath, options) {
+        var newLen = newTokens.length, oldLen = oldTokens.length;
+        var oldPos = basePath.oldPos, newPos = oldPos - diagonalPath, commonCount = 0;
+        while (newPos + 1 < newLen && oldPos + 1 < oldLen && this.equals(oldTokens[oldPos + 1], newTokens[newPos + 1], options)) {
+            newPos++;
+            oldPos++;
+            commonCount++;
+            if (options.oneChangePerToken) {
+                basePath.lastComponent = { count: 1, previousComponent: basePath.lastComponent, added: false, removed: false };
+            }
+        }
+        if (commonCount && !options.oneChangePerToken) {
+            basePath.lastComponent = { count: commonCount, previousComponent: basePath.lastComponent, added: false, removed: false };
+        }
+        basePath.oldPos = oldPos;
+        return newPos;
+    };
+    Diff.prototype.equals = function (left, right, options) {
+        if (options.comparator) {
+            return options.comparator(left, right);
+        }
+        else {
+            return left === right
+                || (!!options.ignoreCase && left.toLowerCase() === right.toLowerCase());
+        }
+    };
+    Diff.prototype.removeEmpty = function (array) {
+        var ret = [];
+        for (var i = 0; i < array.length; i++) {
+            if (array[i]) {
+                ret.push(array[i]);
+            }
+        }
+        return ret;
+    };
+    // eslint-disable-next-line @typescript-eslint/no-unused-vars
+    Diff.prototype.castInput = function (value, options) {
+        return value;
+    };
+    // eslint-disable-next-line @typescript-eslint/no-unused-vars
+    Diff.prototype.tokenize = function (value, options) {
+        return Array.from(value);
+    };
+    Diff.prototype.join = function (chars) {
+        // Assumes ValueT is string, which is the case for most subclasses.
+        // When it's false, e.g. in diffArrays, this method needs to be overridden (e.g. with a no-op)
+        // Yes, the casts are verbose and ugly, because this pattern - of having the base class SORT OF
+        // assume tokens and values are strings, but not completely - is weird and janky.
+        return chars.join('');
+    };
+    Diff.prototype.postProcess = function (changeObjects, 
+    // eslint-disable-next-line @typescript-eslint/no-unused-vars
+    options) {
+        return changeObjects;
+    };
+    Object.defineProperty(Diff.prototype, "useLongestToken", {
+        get: function () {
+            return false;
+        },
+        enumerable: false,
+        configurable: true
+    });
+    Diff.prototype.buildValues = function (lastComponent, newTokens, oldTokens) {
+        // First we convert our linked list of components in reverse order to an
+        // array in the right order:
+        var components = [];
+        var nextComponent;
+        while (lastComponent) {
+            components.push(lastComponent);
+            nextComponent = lastComponent.previousComponent;
+            delete lastComponent.previousComponent;
+            lastComponent = nextComponent;
+        }
+        components.reverse();
+        var componentLen = components.length;
+        var componentPos = 0, newPos = 0, oldPos = 0;
+        for (; componentPos < componentLen; componentPos++) {
+            var component = components[componentPos];
+            if (!component.removed) {
+                if (!component.added && this.useLongestToken) {
+                    var value = newTokens.slice(newPos, newPos + component.count);
+                    value = value.map(function (value, i) {
+                        var oldValue = oldTokens[oldPos + i];
+                        return oldValue.length > value.length ? oldValue : value;
+                    });
+                    component.value = this.join(value);
+                }
+                else {
+                    component.value = this.join(newTokens.slice(newPos, newPos + component.count));
+                }
+                newPos += component.count;
+                // Common case
+                if (!component.added) {
+                    oldPos += component.count;
+                }
+            }
+            else {
+                component.value = this.join(oldTokens.slice(oldPos, oldPos + component.count));
+                oldPos += component.count;
+            }
+        }
+        return components;
+    };
+    return Diff;
+}());
+exports.default = Diff;
diff --git a/node_modules/diff/libcjs/diff/character.js b/node_modules/diff/libcjs/diff/character.js
new file mode 100644
index 0000000000000..8e974ef9ad551
--- /dev/null
+++ b/node_modules/diff/libcjs/diff/character.js
@@ -0,0 +1,31 @@
+"use strict";
+var __extends = (this && this.__extends) || (function () {
+    var extendStatics = function (d, b) {
+        extendStatics = Object.setPrototypeOf ||
+            ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
+            function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
+        return extendStatics(d, b);
+    };
+    return function (d, b) {
+        if (typeof b !== "function" && b !== null)
+            throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
+        extendStatics(d, b);
+        function __() { this.constructor = d; }
+        d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
+    };
+})();
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.characterDiff = void 0;
+exports.diffChars = diffChars;
+var base_js_1 = require("./base.js");
+var CharacterDiff = /** @class */ (function (_super) {
+    __extends(CharacterDiff, _super);
+    function CharacterDiff() {
+        return _super !== null && _super.apply(this, arguments) || this;
+    }
+    return CharacterDiff;
+}(base_js_1.default));
+exports.characterDiff = new CharacterDiff();
+function diffChars(oldStr, newStr, options) {
+    return exports.characterDiff.diff(oldStr, newStr, options);
+}
diff --git a/node_modules/diff/libcjs/diff/css.js b/node_modules/diff/libcjs/diff/css.js
new file mode 100644
index 0000000000000..45c5559c00cc1
--- /dev/null
+++ b/node_modules/diff/libcjs/diff/css.js
@@ -0,0 +1,34 @@
+"use strict";
+var __extends = (this && this.__extends) || (function () {
+    var extendStatics = function (d, b) {
+        extendStatics = Object.setPrototypeOf ||
+            ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
+            function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
+        return extendStatics(d, b);
+    };
+    return function (d, b) {
+        if (typeof b !== "function" && b !== null)
+            throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
+        extendStatics(d, b);
+        function __() { this.constructor = d; }
+        d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
+    };
+})();
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.cssDiff = void 0;
+exports.diffCss = diffCss;
+var base_js_1 = require("./base.js");
+var CssDiff = /** @class */ (function (_super) {
+    __extends(CssDiff, _super);
+    function CssDiff() {
+        return _super !== null && _super.apply(this, arguments) || this;
+    }
+    CssDiff.prototype.tokenize = function (value) {
+        return value.split(/([{}:;,]|\s+)/);
+    };
+    return CssDiff;
+}(base_js_1.default));
+exports.cssDiff = new CssDiff();
+function diffCss(oldStr, newStr, options) {
+    return exports.cssDiff.diff(oldStr, newStr, options);
+}
diff --git a/node_modules/diff/libcjs/diff/json.js b/node_modules/diff/libcjs/diff/json.js
new file mode 100644
index 0000000000000..15f942b4b9168
--- /dev/null
+++ b/node_modules/diff/libcjs/diff/json.js
@@ -0,0 +1,105 @@
+"use strict";
+var __extends = (this && this.__extends) || (function () {
+    var extendStatics = function (d, b) {
+        extendStatics = Object.setPrototypeOf ||
+            ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
+            function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
+        return extendStatics(d, b);
+    };
+    return function (d, b) {
+        if (typeof b !== "function" && b !== null)
+            throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
+        extendStatics(d, b);
+        function __() { this.constructor = d; }
+        d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
+    };
+})();
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.jsonDiff = void 0;
+exports.diffJson = diffJson;
+exports.canonicalize = canonicalize;
+var base_js_1 = require("./base.js");
+var line_js_1 = require("./line.js");
+var JsonDiff = /** @class */ (function (_super) {
+    __extends(JsonDiff, _super);
+    function JsonDiff() {
+        var _this = _super !== null && _super.apply(this, arguments) || this;
+        _this.tokenize = line_js_1.tokenize;
+        return _this;
+    }
+    Object.defineProperty(JsonDiff.prototype, "useLongestToken", {
+        get: function () {
+            // Discriminate between two lines of pretty-printed, serialized JSON where one of them has a
+            // dangling comma and the other doesn't. Turns out including the dangling comma yields the nicest output:
+            return true;
+        },
+        enumerable: false,
+        configurable: true
+    });
+    JsonDiff.prototype.castInput = function (value, options) {
+        var undefinedReplacement = options.undefinedReplacement, _a = options.stringifyReplacer, stringifyReplacer = _a === void 0 ? function (k, v) { return typeof v === 'undefined' ? undefinedReplacement : v; } : _a;
+        return typeof value === 'string' ? value : JSON.stringify(canonicalize(value, null, null, stringifyReplacer), null, '  ');
+    };
+    JsonDiff.prototype.equals = function (left, right, options) {
+        return _super.prototype.equals.call(this, left.replace(/,([\r\n])/g, '$1'), right.replace(/,([\r\n])/g, '$1'), options);
+    };
+    return JsonDiff;
+}(base_js_1.default));
+exports.jsonDiff = new JsonDiff();
+function diffJson(oldStr, newStr, options) {
+    return exports.jsonDiff.diff(oldStr, newStr, options);
+}
+// This function handles the presence of circular references by bailing out when encountering an
+// object that is already on the "stack" of items being processed. Accepts an optional replacer
+function canonicalize(obj, stack, replacementStack, replacer, key) {
+    stack = stack || [];
+    replacementStack = replacementStack || [];
+    if (replacer) {
+        obj = replacer(key === undefined ? '' : key, obj);
+    }
+    var i;
+    for (i = 0; i < stack.length; i += 1) {
+        if (stack[i] === obj) {
+            return replacementStack[i];
+        }
+    }
+    var canonicalizedObj;
+    if ('[object Array]' === Object.prototype.toString.call(obj)) {
+        stack.push(obj);
+        canonicalizedObj = new Array(obj.length);
+        replacementStack.push(canonicalizedObj);
+        for (i = 0; i < obj.length; i += 1) {
+            canonicalizedObj[i] = canonicalize(obj[i], stack, replacementStack, replacer, String(i));
+        }
+        stack.pop();
+        replacementStack.pop();
+        return canonicalizedObj;
+    }
+    if (obj && obj.toJSON) {
+        obj = obj.toJSON();
+    }
+    if (typeof obj === 'object' && obj !== null) {
+        stack.push(obj);
+        canonicalizedObj = {};
+        replacementStack.push(canonicalizedObj);
+        var sortedKeys = [];
+        var key_1;
+        for (key_1 in obj) {
+            /* istanbul ignore else */
+            if (Object.prototype.hasOwnProperty.call(obj, key_1)) {
+                sortedKeys.push(key_1);
+            }
+        }
+        sortedKeys.sort();
+        for (i = 0; i < sortedKeys.length; i += 1) {
+            key_1 = sortedKeys[i];
+            canonicalizedObj[key_1] = canonicalize(obj[key_1], stack, replacementStack, replacer, key_1);
+        }
+        stack.pop();
+        replacementStack.pop();
+    }
+    else {
+        canonicalizedObj = obj;
+    }
+    return canonicalizedObj;
+}
diff --git a/node_modules/diff/libcjs/diff/line.js b/node_modules/diff/libcjs/diff/line.js
new file mode 100644
index 0000000000000..8f4a1f412c171
--- /dev/null
+++ b/node_modules/diff/libcjs/diff/line.js
@@ -0,0 +1,89 @@
+"use strict";
+var __extends = (this && this.__extends) || (function () {
+    var extendStatics = function (d, b) {
+        extendStatics = Object.setPrototypeOf ||
+            ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
+            function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
+        return extendStatics(d, b);
+    };
+    return function (d, b) {
+        if (typeof b !== "function" && b !== null)
+            throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
+        extendStatics(d, b);
+        function __() { this.constructor = d; }
+        d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
+    };
+})();
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.lineDiff = void 0;
+exports.diffLines = diffLines;
+exports.diffTrimmedLines = diffTrimmedLines;
+exports.tokenize = tokenize;
+var base_js_1 = require("./base.js");
+var params_js_1 = require("../util/params.js");
+var LineDiff = /** @class */ (function (_super) {
+    __extends(LineDiff, _super);
+    function LineDiff() {
+        var _this = _super !== null && _super.apply(this, arguments) || this;
+        _this.tokenize = tokenize;
+        return _this;
+    }
+    LineDiff.prototype.equals = function (left, right, options) {
+        // If we're ignoring whitespace, we need to normalise lines by stripping
+        // whitespace before checking equality. (This has an annoying interaction
+        // with newlineIsToken that requires special handling: if newlines get their
+        // own token, then we DON'T want to trim the *newline* tokens down to empty
+        // strings, since this would cause us to treat whitespace-only line content
+        // as equal to a separator between lines, which would be weird and
+        // inconsistent with the documented behavior of the options.)
+        if (options.ignoreWhitespace) {
+            if (!options.newlineIsToken || !left.includes('\n')) {
+                left = left.trim();
+            }
+            if (!options.newlineIsToken || !right.includes('\n')) {
+                right = right.trim();
+            }
+        }
+        else if (options.ignoreNewlineAtEof && !options.newlineIsToken) {
+            if (left.endsWith('\n')) {
+                left = left.slice(0, -1);
+            }
+            if (right.endsWith('\n')) {
+                right = right.slice(0, -1);
+            }
+        }
+        return _super.prototype.equals.call(this, left, right, options);
+    };
+    return LineDiff;
+}(base_js_1.default));
+exports.lineDiff = new LineDiff();
+function diffLines(oldStr, newStr, options) {
+    return exports.lineDiff.diff(oldStr, newStr, options);
+}
+function diffTrimmedLines(oldStr, newStr, options) {
+    options = (0, params_js_1.generateOptions)(options, { ignoreWhitespace: true });
+    return exports.lineDiff.diff(oldStr, newStr, options);
+}
+// Exported standalone so it can be used from jsonDiff too.
+function tokenize(value, options) {
+    if (options.stripTrailingCr) {
+        // remove one \r before \n to match GNU diff's --strip-trailing-cr behavior
+        value = value.replace(/\r\n/g, '\n');
+    }
+    var retLines = [], linesAndNewlines = value.split(/(\n|\r\n)/);
+    // Ignore the final empty token that occurs if the string ends with a new line
+    if (!linesAndNewlines[linesAndNewlines.length - 1]) {
+        linesAndNewlines.pop();
+    }
+    // Merge the content and line separators into single tokens
+    for (var i = 0; i < linesAndNewlines.length; i++) {
+        var line = linesAndNewlines[i];
+        if (i % 2 && !options.newlineIsToken) {
+            retLines[retLines.length - 1] += line;
+        }
+        else {
+            retLines.push(line);
+        }
+    }
+    return retLines;
+}
diff --git a/node_modules/diff/libcjs/diff/sentence.js b/node_modules/diff/libcjs/diff/sentence.js
new file mode 100644
index 0000000000000..dac837fbdc90a
--- /dev/null
+++ b/node_modules/diff/libcjs/diff/sentence.js
@@ -0,0 +1,67 @@
+"use strict";
+var __extends = (this && this.__extends) || (function () {
+    var extendStatics = function (d, b) {
+        extendStatics = Object.setPrototypeOf ||
+            ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
+            function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
+        return extendStatics(d, b);
+    };
+    return function (d, b) {
+        if (typeof b !== "function" && b !== null)
+            throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
+        extendStatics(d, b);
+        function __() { this.constructor = d; }
+        d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
+    };
+})();
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.sentenceDiff = void 0;
+exports.diffSentences = diffSentences;
+var base_js_1 = require("./base.js");
+function isSentenceEndPunct(char) {
+    return char == '.' || char == '!' || char == '?';
+}
+var SentenceDiff = /** @class */ (function (_super) {
+    __extends(SentenceDiff, _super);
+    function SentenceDiff() {
+        return _super !== null && _super.apply(this, arguments) || this;
+    }
+    SentenceDiff.prototype.tokenize = function (value) {
+        var _a;
+        // If in future we drop support for environments that don't support lookbehinds, we can replace
+        // this entire function with:
+        //     return value.split(/(?<=[.!?])(\s+|$)/);
+        // but until then, for similar reasons to the trailingWs function in string.ts, we are forced
+        // to do this verbosely "by hand" instead of using a regex.
+        var result = [];
+        var tokenStartI = 0;
+        for (var i = 0; i < value.length; i++) {
+            if (i == value.length - 1) {
+                result.push(value.slice(tokenStartI));
+                break;
+            }
+            if (isSentenceEndPunct(value[i]) && value[i + 1].match(/\s/)) {
+                // We've hit a sentence break - i.e. a punctuation mark followed by whitespace.
+                // We now want to push TWO tokens to the result:
+                // 1. the sentence
+                result.push(value.slice(tokenStartI, i + 1));
+                // 2. the whitespace
+                i = tokenStartI = i + 1;
+                while ((_a = value[i + 1]) === null || _a === void 0 ? void 0 : _a.match(/\s/)) {
+                    i++;
+                }
+                result.push(value.slice(tokenStartI, i + 1));
+                // Then the next token (a sentence) starts on the character after the whitespace.
+                // (It's okay if this is off the end of the string - then the outer loop will terminate
+                // here anyway.)
+                tokenStartI = i + 1;
+            }
+        }
+        return result;
+    };
+    return SentenceDiff;
+}(base_js_1.default));
+exports.sentenceDiff = new SentenceDiff();
+function diffSentences(oldStr, newStr, options) {
+    return exports.sentenceDiff.diff(oldStr, newStr, options);
+}
diff --git a/node_modules/diff/libcjs/diff/word.js b/node_modules/diff/libcjs/diff/word.js
new file mode 100644
index 0000000000000..8c76eb2691a64
--- /dev/null
+++ b/node_modules/diff/libcjs/diff/word.js
@@ -0,0 +1,307 @@
+"use strict";
+var __extends = (this && this.__extends) || (function () {
+    var extendStatics = function (d, b) {
+        extendStatics = Object.setPrototypeOf ||
+            ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
+            function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
+        return extendStatics(d, b);
+    };
+    return function (d, b) {
+        if (typeof b !== "function" && b !== null)
+            throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
+        extendStatics(d, b);
+        function __() { this.constructor = d; }
+        d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
+    };
+})();
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.wordsWithSpaceDiff = exports.wordDiff = void 0;
+exports.diffWords = diffWords;
+exports.diffWordsWithSpace = diffWordsWithSpace;
+var base_js_1 = require("./base.js");
+var string_js_1 = require("../util/string.js");
+// Based on https://en.wikipedia.org/wiki/Latin_script_in_Unicode
+//
+// Ranges and exceptions:
+// Latin-1 Supplement, 0080–00FF
+//  - U+00D7  × Multiplication sign
+//  - U+00F7  ÷ Division sign
+// Latin Extended-A, 0100–017F
+// Latin Extended-B, 0180–024F
+// IPA Extensions, 0250–02AF
+// Spacing Modifier Letters, 02B0–02FF
+//  - U+02C7  ˇ ˇ  Caron
+//  - U+02D8  ˘ ˘  Breve
+//  - U+02D9  ˙ ˙  Dot Above
+//  - U+02DA  ˚ ˚  Ring Above
+//  - U+02DB  ˛ ˛  Ogonek
+//  - U+02DC  ˜ ˜  Small Tilde
+//  - U+02DD  ˝ ˝  Double Acute Accent
+// Latin Extended Additional, 1E00–1EFF
+var extendedWordChars = 'a-zA-Z0-9_\\u{C0}-\\u{FF}\\u{D8}-\\u{F6}\\u{F8}-\\u{2C6}\\u{2C8}-\\u{2D7}\\u{2DE}-\\u{2FF}\\u{1E00}-\\u{1EFF}';
+// Each token is one of the following:
+// - A punctuation mark plus the surrounding whitespace
+// - A word plus the surrounding whitespace
+// - Pure whitespace (but only in the special case where this the entire text
+//   is just whitespace)
+//
+// We have to include surrounding whitespace in the tokens because the two
+// alternative approaches produce horribly broken results:
+// * If we just discard the whitespace, we can't fully reproduce the original
+//   text from the sequence of tokens and any attempt to render the diff will
+//   get the whitespace wrong.
+// * If we have separate tokens for whitespace, then in a typical text every
+//   second token will be a single space character. But this often results in
+//   the optimal diff between two texts being a perverse one that preserves
+//   the spaces between words but deletes and reinserts actual common words.
+//   See https://github.com/kpdecker/jsdiff/issues/160#issuecomment-1866099640
+//   for an example.
+//
+// Keeping the surrounding whitespace of course has implications for .equals
+// and .join, not just .tokenize.
+// This regex does NOT fully implement the tokenization rules described above.
+// Instead, it gives runs of whitespace their own "token". The tokenize method
+// then handles stitching whitespace tokens onto adjacent word or punctuation
+// tokens.
+var tokenizeIncludingWhitespace = new RegExp("[".concat(extendedWordChars, "]+|\\s+|[^").concat(extendedWordChars, "]"), 'ug');
+var WordDiff = /** @class */ (function (_super) {
+    __extends(WordDiff, _super);
+    function WordDiff() {
+        return _super !== null && _super.apply(this, arguments) || this;
+    }
+    WordDiff.prototype.equals = function (left, right, options) {
+        if (options.ignoreCase) {
+            left = left.toLowerCase();
+            right = right.toLowerCase();
+        }
+        return left.trim() === right.trim();
+    };
+    WordDiff.prototype.tokenize = function (value, options) {
+        if (options === void 0) { options = {}; }
+        var parts;
+        if (options.intlSegmenter) {
+            var segmenter = options.intlSegmenter;
+            if (segmenter.resolvedOptions().granularity != 'word') {
+                throw new Error('The segmenter passed must have a granularity of "word"');
+            }
+            parts = Array.from(segmenter.segment(value), function (segment) { return segment.segment; });
+        }
+        else {
+            parts = value.match(tokenizeIncludingWhitespace) || [];
+        }
+        var tokens = [];
+        var prevPart = null;
+        parts.forEach(function (part) {
+            if ((/\s/).test(part)) {
+                if (prevPart == null) {
+                    tokens.push(part);
+                }
+                else {
+                    tokens.push(tokens.pop() + part);
+                }
+            }
+            else if (prevPart != null && (/\s/).test(prevPart)) {
+                if (tokens[tokens.length - 1] == prevPart) {
+                    tokens.push(tokens.pop() + part);
+                }
+                else {
+                    tokens.push(prevPart + part);
+                }
+            }
+            else {
+                tokens.push(part);
+            }
+            prevPart = part;
+        });
+        return tokens;
+    };
+    WordDiff.prototype.join = function (tokens) {
+        // Tokens being joined here will always have appeared consecutively in the
+        // same text, so we can simply strip off the leading whitespace from all the
+        // tokens except the first (and except any whitespace-only tokens - but such
+        // a token will always be the first and only token anyway) and then join them
+        // and the whitespace around words and punctuation will end up correct.
+        return tokens.map(function (token, i) {
+            if (i == 0) {
+                return token;
+            }
+            else {
+                return token.replace((/^\s+/), '');
+            }
+        }).join('');
+    };
+    WordDiff.prototype.postProcess = function (changes, options) {
+        if (!changes || options.oneChangePerToken) {
+            return changes;
+        }
+        var lastKeep = null;
+        // Change objects representing any insertion or deletion since the last
+        // "keep" change object. There can be at most one of each.
+        var insertion = null;
+        var deletion = null;
+        changes.forEach(function (change) {
+            if (change.added) {
+                insertion = change;
+            }
+            else if (change.removed) {
+                deletion = change;
+            }
+            else {
+                if (insertion || deletion) { // May be false at start of text
+                    dedupeWhitespaceInChangeObjects(lastKeep, deletion, insertion, change);
+                }
+                lastKeep = change;
+                insertion = null;
+                deletion = null;
+            }
+        });
+        if (insertion || deletion) {
+            dedupeWhitespaceInChangeObjects(lastKeep, deletion, insertion, null);
+        }
+        return changes;
+    };
+    return WordDiff;
+}(base_js_1.default));
+exports.wordDiff = new WordDiff();
+function diffWords(oldStr, newStr, options) {
+    // This option has never been documented and never will be (it's clearer to
+    // just call `diffWordsWithSpace` directly if you need that behavior), but
+    // has existed in jsdiff for a long time, so we retain support for it here
+    // for the sake of backwards compatibility.
+    if ((options === null || options === void 0 ? void 0 : options.ignoreWhitespace) != null && !options.ignoreWhitespace) {
+        return diffWordsWithSpace(oldStr, newStr, options);
+    }
+    return exports.wordDiff.diff(oldStr, newStr, options);
+}
+function dedupeWhitespaceInChangeObjects(startKeep, deletion, insertion, endKeep) {
+    // Before returning, we tidy up the leading and trailing whitespace of the
+    // change objects to eliminate cases where trailing whitespace in one object
+    // is repeated as leading whitespace in the next.
+    // Below are examples of the outcomes we want here to explain the code.
+    // I=insert, K=keep, D=delete
+    // 1. diffing 'foo bar baz' vs 'foo baz'
+    //    Prior to cleanup, we have K:'foo ' D:' bar ' K:' baz'
+    //    After cleanup, we want:   K:'foo ' D:'bar ' K:'baz'
+    //
+    // 2. Diffing 'foo bar baz' vs 'foo qux baz'
+    //    Prior to cleanup, we have K:'foo ' D:' bar ' I:' qux ' K:' baz'
+    //    After cleanup, we want K:'foo ' D:'bar' I:'qux' K:' baz'
+    //
+    // 3. Diffing 'foo\nbar baz' vs 'foo baz'
+    //    Prior to cleanup, we have K:'foo ' D:'\nbar ' K:' baz'
+    //    After cleanup, we want K'foo' D:'\nbar' K:' baz'
+    //
+    // 4. Diffing 'foo baz' vs 'foo\nbar baz'
+    //    Prior to cleanup, we have K:'foo\n' I:'\nbar ' K:' baz'
+    //    After cleanup, we ideally want K'foo' I:'\nbar' K:' baz'
+    //    but don't actually manage this currently (the pre-cleanup change
+    //    objects don't contain enough information to make it possible).
+    //
+    // 5. Diffing 'foo   bar baz' vs 'foo  baz'
+    //    Prior to cleanup, we have K:'foo  ' D:'   bar ' K:'  baz'
+    //    After cleanup, we want K:'foo  ' D:' bar ' K:'baz'
+    //
+    // Our handling is unavoidably imperfect in the case where there's a single
+    // indel between keeps and the whitespace has changed. For instance, consider
+    // diffing 'foo\tbar\nbaz' vs 'foo baz'. Unless we create an extra change
+    // object to represent the insertion of the space character (which isn't even
+    // a token), we have no way to avoid losing information about the texts'
+    // original whitespace in the result we return. Still, we do our best to
+    // output something that will look sensible if we e.g. print it with
+    // insertions in green and deletions in red.
+    // Between two "keep" change objects (or before the first or after the last
+    // change object), we can have either:
+    // * A "delete" followed by an "insert"
+    // * Just an "insert"
+    // * Just a "delete"
+    // We handle the three cases separately.
+    if (deletion && insertion) {
+        var oldWsPrefix = (0, string_js_1.leadingWs)(deletion.value);
+        var oldWsSuffix = (0, string_js_1.trailingWs)(deletion.value);
+        var newWsPrefix = (0, string_js_1.leadingWs)(insertion.value);
+        var newWsSuffix = (0, string_js_1.trailingWs)(insertion.value);
+        if (startKeep) {
+            var commonWsPrefix = (0, string_js_1.longestCommonPrefix)(oldWsPrefix, newWsPrefix);
+            startKeep.value = (0, string_js_1.replaceSuffix)(startKeep.value, newWsPrefix, commonWsPrefix);
+            deletion.value = (0, string_js_1.removePrefix)(deletion.value, commonWsPrefix);
+            insertion.value = (0, string_js_1.removePrefix)(insertion.value, commonWsPrefix);
+        }
+        if (endKeep) {
+            var commonWsSuffix = (0, string_js_1.longestCommonSuffix)(oldWsSuffix, newWsSuffix);
+            endKeep.value = (0, string_js_1.replacePrefix)(endKeep.value, newWsSuffix, commonWsSuffix);
+            deletion.value = (0, string_js_1.removeSuffix)(deletion.value, commonWsSuffix);
+            insertion.value = (0, string_js_1.removeSuffix)(insertion.value, commonWsSuffix);
+        }
+    }
+    else if (insertion) {
+        // The whitespaces all reflect what was in the new text rather than
+        // the old, so we essentially have no information about whitespace
+        // insertion or deletion. We just want to dedupe the whitespace.
+        // We do that by having each change object keep its trailing
+        // whitespace and deleting duplicate leading whitespace where
+        // present.
+        if (startKeep) {
+            var ws = (0, string_js_1.leadingWs)(insertion.value);
+            insertion.value = insertion.value.substring(ws.length);
+        }
+        if (endKeep) {
+            var ws = (0, string_js_1.leadingWs)(endKeep.value);
+            endKeep.value = endKeep.value.substring(ws.length);
+        }
+        // otherwise we've got a deletion and no insertion
+    }
+    else if (startKeep && endKeep) {
+        var newWsFull = (0, string_js_1.leadingWs)(endKeep.value), delWsStart = (0, string_js_1.leadingWs)(deletion.value), delWsEnd = (0, string_js_1.trailingWs)(deletion.value);
+        // Any whitespace that comes straight after startKeep in both the old and
+        // new texts, assign to startKeep and remove from the deletion.
+        var newWsStart = (0, string_js_1.longestCommonPrefix)(newWsFull, delWsStart);
+        deletion.value = (0, string_js_1.removePrefix)(deletion.value, newWsStart);
+        // Any whitespace that comes straight before endKeep in both the old and
+        // new texts, and hasn't already been assigned to startKeep, assign to
+        // endKeep and remove from the deletion.
+        var newWsEnd = (0, string_js_1.longestCommonSuffix)((0, string_js_1.removePrefix)(newWsFull, newWsStart), delWsEnd);
+        deletion.value = (0, string_js_1.removeSuffix)(deletion.value, newWsEnd);
+        endKeep.value = (0, string_js_1.replacePrefix)(endKeep.value, newWsFull, newWsEnd);
+        // If there's any whitespace from the new text that HASN'T already been
+        // assigned, assign it to the start:
+        startKeep.value = (0, string_js_1.replaceSuffix)(startKeep.value, newWsFull, newWsFull.slice(0, newWsFull.length - newWsEnd.length));
+    }
+    else if (endKeep) {
+        // We are at the start of the text. Preserve all the whitespace on
+        // endKeep, and just remove whitespace from the end of deletion to the
+        // extent that it overlaps with the start of endKeep.
+        var endKeepWsPrefix = (0, string_js_1.leadingWs)(endKeep.value);
+        var deletionWsSuffix = (0, string_js_1.trailingWs)(deletion.value);
+        var overlap = (0, string_js_1.maximumOverlap)(deletionWsSuffix, endKeepWsPrefix);
+        deletion.value = (0, string_js_1.removeSuffix)(deletion.value, overlap);
+    }
+    else if (startKeep) {
+        // We are at the END of the text. Preserve all the whitespace on
+        // startKeep, and just remove whitespace from the start of deletion to
+        // the extent that it overlaps with the end of startKeep.
+        var startKeepWsSuffix = (0, string_js_1.trailingWs)(startKeep.value);
+        var deletionWsPrefix = (0, string_js_1.leadingWs)(deletion.value);
+        var overlap = (0, string_js_1.maximumOverlap)(startKeepWsSuffix, deletionWsPrefix);
+        deletion.value = (0, string_js_1.removePrefix)(deletion.value, overlap);
+    }
+}
+var WordsWithSpaceDiff = /** @class */ (function (_super) {
+    __extends(WordsWithSpaceDiff, _super);
+    function WordsWithSpaceDiff() {
+        return _super !== null && _super.apply(this, arguments) || this;
+    }
+    WordsWithSpaceDiff.prototype.tokenize = function (value) {
+        // Slightly different to the tokenizeIncludingWhitespace regex used above in
+        // that this one treats each individual newline as a distinct tokens, rather
+        // than merging them into other surrounding whitespace. This was requested
+        // in https://github.com/kpdecker/jsdiff/issues/180 &
+        //    https://github.com/kpdecker/jsdiff/issues/211
+        var regex = new RegExp("(\\r?\\n)|[".concat(extendedWordChars, "]+|[^\\S\\n\\r]+|[^").concat(extendedWordChars, "]"), 'ug');
+        return value.match(regex) || [];
+    };
+    return WordsWithSpaceDiff;
+}(base_js_1.default));
+exports.wordsWithSpaceDiff = new WordsWithSpaceDiff();
+function diffWordsWithSpace(oldStr, newStr, options) {
+    return exports.wordsWithSpaceDiff.diff(oldStr, newStr, options);
+}
diff --git a/node_modules/diff/libcjs/index.js b/node_modules/diff/libcjs/index.js
new file mode 100644
index 0000000000000..e07c46b0dd404
--- /dev/null
+++ b/node_modules/diff/libcjs/index.js
@@ -0,0 +1,61 @@
+"use strict";
+/* See LICENSE file for terms of use */
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.canonicalize = exports.convertChangesToXML = exports.convertChangesToDMP = exports.reversePatch = exports.parsePatch = exports.applyPatches = exports.applyPatch = exports.formatPatch = exports.createPatch = exports.createTwoFilesPatch = exports.structuredPatch = exports.arrayDiff = exports.diffArrays = exports.jsonDiff = exports.diffJson = exports.cssDiff = exports.diffCss = exports.sentenceDiff = exports.diffSentences = exports.diffTrimmedLines = exports.lineDiff = exports.diffLines = exports.wordsWithSpaceDiff = exports.diffWordsWithSpace = exports.wordDiff = exports.diffWords = exports.characterDiff = exports.diffChars = exports.Diff = void 0;
+/*
+ * Text diff implementation.
+ *
+ * This library supports the following APIs:
+ * Diff.diffChars: Character by character diff
+ * Diff.diffWords: Word (as defined by \b regex) diff which ignores whitespace
+ * Diff.diffLines: Line based diff
+ *
+ * Diff.diffCss: Diff targeted at CSS content
+ *
+ * These methods are based on the implementation proposed in
+ * "An O(ND) Difference Algorithm and its Variations" (Myers, 1986).
+ * http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.4.6927
+ */
+var base_js_1 = require("./diff/base.js");
+exports.Diff = base_js_1.default;
+var character_js_1 = require("./diff/character.js");
+Object.defineProperty(exports, "diffChars", { enumerable: true, get: function () { return character_js_1.diffChars; } });
+Object.defineProperty(exports, "characterDiff", { enumerable: true, get: function () { return character_js_1.characterDiff; } });
+var word_js_1 = require("./diff/word.js");
+Object.defineProperty(exports, "diffWords", { enumerable: true, get: function () { return word_js_1.diffWords; } });
+Object.defineProperty(exports, "diffWordsWithSpace", { enumerable: true, get: function () { return word_js_1.diffWordsWithSpace; } });
+Object.defineProperty(exports, "wordDiff", { enumerable: true, get: function () { return word_js_1.wordDiff; } });
+Object.defineProperty(exports, "wordsWithSpaceDiff", { enumerable: true, get: function () { return word_js_1.wordsWithSpaceDiff; } });
+var line_js_1 = require("./diff/line.js");
+Object.defineProperty(exports, "diffLines", { enumerable: true, get: function () { return line_js_1.diffLines; } });
+Object.defineProperty(exports, "diffTrimmedLines", { enumerable: true, get: function () { return line_js_1.diffTrimmedLines; } });
+Object.defineProperty(exports, "lineDiff", { enumerable: true, get: function () { return line_js_1.lineDiff; } });
+var sentence_js_1 = require("./diff/sentence.js");
+Object.defineProperty(exports, "diffSentences", { enumerable: true, get: function () { return sentence_js_1.diffSentences; } });
+Object.defineProperty(exports, "sentenceDiff", { enumerable: true, get: function () { return sentence_js_1.sentenceDiff; } });
+var css_js_1 = require("./diff/css.js");
+Object.defineProperty(exports, "diffCss", { enumerable: true, get: function () { return css_js_1.diffCss; } });
+Object.defineProperty(exports, "cssDiff", { enumerable: true, get: function () { return css_js_1.cssDiff; } });
+var json_js_1 = require("./diff/json.js");
+Object.defineProperty(exports, "diffJson", { enumerable: true, get: function () { return json_js_1.diffJson; } });
+Object.defineProperty(exports, "canonicalize", { enumerable: true, get: function () { return json_js_1.canonicalize; } });
+Object.defineProperty(exports, "jsonDiff", { enumerable: true, get: function () { return json_js_1.jsonDiff; } });
+var array_js_1 = require("./diff/array.js");
+Object.defineProperty(exports, "diffArrays", { enumerable: true, get: function () { return array_js_1.diffArrays; } });
+Object.defineProperty(exports, "arrayDiff", { enumerable: true, get: function () { return array_js_1.arrayDiff; } });
+var apply_js_1 = require("./patch/apply.js");
+Object.defineProperty(exports, "applyPatch", { enumerable: true, get: function () { return apply_js_1.applyPatch; } });
+Object.defineProperty(exports, "applyPatches", { enumerable: true, get: function () { return apply_js_1.applyPatches; } });
+var parse_js_1 = require("./patch/parse.js");
+Object.defineProperty(exports, "parsePatch", { enumerable: true, get: function () { return parse_js_1.parsePatch; } });
+var reverse_js_1 = require("./patch/reverse.js");
+Object.defineProperty(exports, "reversePatch", { enumerable: true, get: function () { return reverse_js_1.reversePatch; } });
+var create_js_1 = require("./patch/create.js");
+Object.defineProperty(exports, "structuredPatch", { enumerable: true, get: function () { return create_js_1.structuredPatch; } });
+Object.defineProperty(exports, "createTwoFilesPatch", { enumerable: true, get: function () { return create_js_1.createTwoFilesPatch; } });
+Object.defineProperty(exports, "createPatch", { enumerable: true, get: function () { return create_js_1.createPatch; } });
+Object.defineProperty(exports, "formatPatch", { enumerable: true, get: function () { return create_js_1.formatPatch; } });
+var dmp_js_1 = require("./convert/dmp.js");
+Object.defineProperty(exports, "convertChangesToDMP", { enumerable: true, get: function () { return dmp_js_1.convertChangesToDMP; } });
+var xml_js_1 = require("./convert/xml.js");
+Object.defineProperty(exports, "convertChangesToXML", { enumerable: true, get: function () { return xml_js_1.convertChangesToXML; } });
diff --git a/node_modules/diff/libcjs/package.json b/node_modules/diff/libcjs/package.json
new file mode 100644
index 0000000000000..731cf3f1d319d
--- /dev/null
+++ b/node_modules/diff/libcjs/package.json
@@ -0,0 +1 @@
+{"type":"commonjs","sideEffects":false}
\ No newline at end of file
diff --git a/node_modules/diff/libcjs/patch/apply.js b/node_modules/diff/libcjs/patch/apply.js
new file mode 100644
index 0000000000000..4f49c7c6d08b4
--- /dev/null
+++ b/node_modules/diff/libcjs/patch/apply.js
@@ -0,0 +1,267 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.applyPatch = applyPatch;
+exports.applyPatches = applyPatches;
+var string_js_1 = require("../util/string.js");
+var line_endings_js_1 = require("./line-endings.js");
+var parse_js_1 = require("./parse.js");
+var distance_iterator_js_1 = require("../util/distance-iterator.js");
+/**
+ * attempts to apply a unified diff patch.
+ *
+ * Hunks are applied first to last.
+ * `applyPatch` first tries to apply the first hunk at the line number specified in the hunk header, and with all context lines matching exactly.
+ * If that fails, it tries scanning backwards and forwards, one line at a time, to find a place to apply the hunk where the context lines match exactly.
+ * If that still fails, and `fuzzFactor` is greater than zero, it increments the maximum number of mismatches (missing, extra, or changed context lines) that there can be between the hunk context and a region where we are trying to apply the patch such that the hunk will still be considered to match.
+ * Regardless of `fuzzFactor`, lines to be deleted in the hunk *must* be present for a hunk to match, and the context lines *immediately* before and after an insertion must match exactly.
+ *
+ * Once a hunk is successfully fitted, the process begins again with the next hunk.
+ * Regardless of `fuzzFactor`, later hunks must be applied later in the file than earlier hunks.
+ *
+ * If a hunk cannot be successfully fitted *anywhere* with fewer than `fuzzFactor` mismatches, `applyPatch` fails and returns `false`.
+ *
+ * If a hunk is successfully fitted but not at the line number specified by the hunk header, all subsequent hunks have their target line number adjusted accordingly.
+ * (e.g. if the first hunk is applied 10 lines below where the hunk header said it should fit, `applyPatch` will *start* looking for somewhere to apply the second hunk 10 lines below where its hunk header says it goes.)
+ *
+ * If the patch was applied successfully, returns a string containing the patched text.
+ * If the patch could not be applied (because some hunks in the patch couldn't be fitted to the text in `source`), `applyPatch` returns false.
+ *
+ * @param patch a string diff or the output from the `parsePatch` or `structuredPatch` methods.
+ */
+function applyPatch(source, patch, options) {
+    if (options === void 0) { options = {}; }
+    var patches;
+    if (typeof patch === 'string') {
+        patches = (0, parse_js_1.parsePatch)(patch);
+    }
+    else if (Array.isArray(patch)) {
+        patches = patch;
+    }
+    else {
+        patches = [patch];
+    }
+    if (patches.length > 1) {
+        throw new Error('applyPatch only works with a single input.');
+    }
+    return applyStructuredPatch(source, patches[0], options);
+}
+function applyStructuredPatch(source, patch, options) {
+    if (options === void 0) { options = {}; }
+    if (options.autoConvertLineEndings || options.autoConvertLineEndings == null) {
+        if ((0, string_js_1.hasOnlyWinLineEndings)(source) && (0, line_endings_js_1.isUnix)(patch)) {
+            patch = (0, line_endings_js_1.unixToWin)(patch);
+        }
+        else if ((0, string_js_1.hasOnlyUnixLineEndings)(source) && (0, line_endings_js_1.isWin)(patch)) {
+            patch = (0, line_endings_js_1.winToUnix)(patch);
+        }
+    }
+    // Apply the diff to the input
+    var lines = source.split('\n'), hunks = patch.hunks, compareLine = options.compareLine || (function (lineNumber, line, operation, patchContent) { return line === patchContent; }), fuzzFactor = options.fuzzFactor || 0;
+    var minLine = 0;
+    if (fuzzFactor < 0 || !Number.isInteger(fuzzFactor)) {
+        throw new Error('fuzzFactor must be a non-negative integer');
+    }
+    // Special case for empty patch.
+    if (!hunks.length) {
+        return source;
+    }
+    // Before anything else, handle EOFNL insertion/removal. If the patch tells us to make a change
+    // to the EOFNL that is redundant/impossible - i.e. to remove a newline that's not there, or add a
+    // newline that already exists - then we either return false and fail to apply the patch (if
+    // fuzzFactor is 0) or simply ignore the problem and do nothing (if fuzzFactor is >0).
+    // If we do need to remove/add a newline at EOF, this will always be in the final hunk:
+    var prevLine = '', removeEOFNL = false, addEOFNL = false;
+    for (var i = 0; i < hunks[hunks.length - 1].lines.length; i++) {
+        var line = hunks[hunks.length - 1].lines[i];
+        if (line[0] == '\\') {
+            if (prevLine[0] == '+') {
+                removeEOFNL = true;
+            }
+            else if (prevLine[0] == '-') {
+                addEOFNL = true;
+            }
+        }
+        prevLine = line;
+    }
+    if (removeEOFNL) {
+        if (addEOFNL) {
+            // This means the final line gets changed but doesn't have a trailing newline in either the
+            // original or patched version. In that case, we do nothing if fuzzFactor > 0, and if
+            // fuzzFactor is 0, we simply validate that the source file has no trailing newline.
+            if (!fuzzFactor && lines[lines.length - 1] == '') {
+                return false;
+            }
+        }
+        else if (lines[lines.length - 1] == '') {
+            lines.pop();
+        }
+        else if (!fuzzFactor) {
+            return false;
+        }
+    }
+    else if (addEOFNL) {
+        if (lines[lines.length - 1] != '') {
+            lines.push('');
+        }
+        else if (!fuzzFactor) {
+            return false;
+        }
+    }
+    /**
+     * Checks if the hunk can be made to fit at the provided location with at most `maxErrors`
+     * insertions, substitutions, or deletions, while ensuring also that:
+     * - lines deleted in the hunk match exactly, and
+     * - wherever an insertion operation or block of insertion operations appears in the hunk, the
+     *   immediately preceding and following lines of context match exactly
+     *
+     * `toPos` should be set such that lines[toPos] is meant to match hunkLines[0].
+     *
+     * If the hunk can be applied, returns an object with properties `oldLineLastI` and
+     * `replacementLines`. Otherwise, returns null.
+     */
+    function applyHunk(hunkLines, toPos, maxErrors, hunkLinesI, lastContextLineMatched, patchedLines, patchedLinesLength) {
+        if (hunkLinesI === void 0) { hunkLinesI = 0; }
+        if (lastContextLineMatched === void 0) { lastContextLineMatched = true; }
+        if (patchedLines === void 0) { patchedLines = []; }
+        if (patchedLinesLength === void 0) { patchedLinesLength = 0; }
+        var nConsecutiveOldContextLines = 0;
+        var nextContextLineMustMatch = false;
+        for (; hunkLinesI < hunkLines.length; hunkLinesI++) {
+            var hunkLine = hunkLines[hunkLinesI], operation = (hunkLine.length > 0 ? hunkLine[0] : ' '), content = (hunkLine.length > 0 ? hunkLine.substr(1) : hunkLine);
+            if (operation === '-') {
+                if (compareLine(toPos + 1, lines[toPos], operation, content)) {
+                    toPos++;
+                    nConsecutiveOldContextLines = 0;
+                }
+                else {
+                    if (!maxErrors || lines[toPos] == null) {
+                        return null;
+                    }
+                    patchedLines[patchedLinesLength] = lines[toPos];
+                    return applyHunk(hunkLines, toPos + 1, maxErrors - 1, hunkLinesI, false, patchedLines, patchedLinesLength + 1);
+                }
+            }
+            if (operation === '+') {
+                if (!lastContextLineMatched) {
+                    return null;
+                }
+                patchedLines[patchedLinesLength] = content;
+                patchedLinesLength++;
+                nConsecutiveOldContextLines = 0;
+                nextContextLineMustMatch = true;
+            }
+            if (operation === ' ') {
+                nConsecutiveOldContextLines++;
+                patchedLines[patchedLinesLength] = lines[toPos];
+                if (compareLine(toPos + 1, lines[toPos], operation, content)) {
+                    patchedLinesLength++;
+                    lastContextLineMatched = true;
+                    nextContextLineMustMatch = false;
+                    toPos++;
+                }
+                else {
+                    if (nextContextLineMustMatch || !maxErrors) {
+                        return null;
+                    }
+                    // Consider 3 possibilities in sequence:
+                    // 1. lines contains a *substitution* not included in the patch context, or
+                    // 2. lines contains an *insertion* not included in the patch context, or
+                    // 3. lines contains a *deletion* not included in the patch context
+                    // The first two options are of course only possible if the line from lines is non-null -
+                    // i.e. only option 3 is possible if we've overrun the end of the old file.
+                    return (lines[toPos] && (applyHunk(hunkLines, toPos + 1, maxErrors - 1, hunkLinesI + 1, false, patchedLines, patchedLinesLength + 1) || applyHunk(hunkLines, toPos + 1, maxErrors - 1, hunkLinesI, false, patchedLines, patchedLinesLength + 1)) || applyHunk(hunkLines, toPos, maxErrors - 1, hunkLinesI + 1, false, patchedLines, patchedLinesLength));
+                }
+            }
+        }
+        // Before returning, trim any unmodified context lines off the end of patchedLines and reduce
+        // toPos (and thus oldLineLastI) accordingly. This allows later hunks to be applied to a region
+        // that starts in this hunk's trailing context.
+        patchedLinesLength -= nConsecutiveOldContextLines;
+        toPos -= nConsecutiveOldContextLines;
+        patchedLines.length = patchedLinesLength;
+        return {
+            patchedLines: patchedLines,
+            oldLineLastI: toPos - 1
+        };
+    }
+    var resultLines = [];
+    // Search best fit offsets for each hunk based on the previous ones
+    var prevHunkOffset = 0;
+    for (var i = 0; i < hunks.length; i++) {
+        var hunk = hunks[i];
+        var hunkResult = void 0;
+        var maxLine = lines.length - hunk.oldLines + fuzzFactor;
+        var toPos = void 0;
+        for (var maxErrors = 0; maxErrors <= fuzzFactor; maxErrors++) {
+            toPos = hunk.oldStart + prevHunkOffset - 1;
+            var iterator = (0, distance_iterator_js_1.default)(toPos, minLine, maxLine);
+            for (; toPos !== undefined; toPos = iterator()) {
+                hunkResult = applyHunk(hunk.lines, toPos, maxErrors);
+                if (hunkResult) {
+                    break;
+                }
+            }
+            if (hunkResult) {
+                break;
+            }
+        }
+        if (!hunkResult) {
+            return false;
+        }
+        // Copy everything from the end of where we applied the last hunk to the start of this hunk
+        for (var i_1 = minLine; i_1 < toPos; i_1++) {
+            resultLines.push(lines[i_1]);
+        }
+        // Add the lines produced by applying the hunk:
+        for (var i_2 = 0; i_2 < hunkResult.patchedLines.length; i_2++) {
+            var line = hunkResult.patchedLines[i_2];
+            resultLines.push(line);
+        }
+        // Set lower text limit to end of the current hunk, so next ones don't try
+        // to fit over already patched text
+        minLine = hunkResult.oldLineLastI + 1;
+        // Note the offset between where the patch said the hunk should've applied and where we
+        // applied it, so we can adjust future hunks accordingly:
+        prevHunkOffset = toPos + 1 - hunk.oldStart;
+    }
+    // Copy over the rest of the lines from the old text
+    for (var i = minLine; i < lines.length; i++) {
+        resultLines.push(lines[i]);
+    }
+    return resultLines.join('\n');
+}
+/**
+ * applies one or more patches.
+ *
+ * `patch` may be either an array of structured patch objects, or a string representing a patch in unified diff format (which may patch one or more files).
+ *
+ * This method will iterate over the contents of the patch and apply to data provided through callbacks. The general flow for each patch index is:
+ *
+ * - `options.loadFile(index, callback)` is called. The caller should then load the contents of the file and then pass that to the `callback(err, data)` callback. Passing an `err` will terminate further patch execution.
+ * - `options.patched(index, content, callback)` is called once the patch has been applied. `content` will be the return value from `applyPatch`. When it's ready, the caller should call `callback(err)` callback. Passing an `err` will terminate further patch execution.
+ *
+ * Once all patches have been applied or an error occurs, the `options.complete(err)` callback is made.
+ */
+function applyPatches(uniDiff, options) {
+    var spDiff = typeof uniDiff === 'string' ? (0, parse_js_1.parsePatch)(uniDiff) : uniDiff;
+    var currentIndex = 0;
+    function processIndex() {
+        var index = spDiff[currentIndex++];
+        if (!index) {
+            return options.complete();
+        }
+        options.loadFile(index, function (err, data) {
+            if (err) {
+                return options.complete(err);
+            }
+            var updatedContent = applyPatch(data, index, options);
+            options.patched(index, updatedContent, function (err) {
+                if (err) {
+                    return options.complete(err);
+                }
+                processIndex();
+            });
+        });
+    }
+    processIndex();
+}
diff --git a/node_modules/diff/libcjs/patch/create.js b/node_modules/diff/libcjs/patch/create.js
new file mode 100644
index 0000000000000..0f0a9ee723928
--- /dev/null
+++ b/node_modules/diff/libcjs/patch/create.js
@@ -0,0 +1,223 @@
+"use strict";
+var __assign = (this && this.__assign) || function () {
+    __assign = Object.assign || function(t) {
+        for (var s, i = 1, n = arguments.length; i < n; i++) {
+            s = arguments[i];
+            for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
+                t[p] = s[p];
+        }
+        return t;
+    };
+    return __assign.apply(this, arguments);
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.structuredPatch = structuredPatch;
+exports.formatPatch = formatPatch;
+exports.createTwoFilesPatch = createTwoFilesPatch;
+exports.createPatch = createPatch;
+var line_js_1 = require("../diff/line.js");
+function structuredPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options) {
+    var optionsObj;
+    if (!options) {
+        optionsObj = {};
+    }
+    else if (typeof options === 'function') {
+        optionsObj = { callback: options };
+    }
+    else {
+        optionsObj = options;
+    }
+    if (typeof optionsObj.context === 'undefined') {
+        optionsObj.context = 4;
+    }
+    // We copy this into its own variable to placate TypeScript, which thinks
+    // optionsObj.context might be undefined in the callbacks below.
+    var context = optionsObj.context;
+    // @ts-expect-error (runtime check for something that is correctly a static type error)
+    if (optionsObj.newlineIsToken) {
+        throw new Error('newlineIsToken may not be used with patch-generation functions, only with diffing functions');
+    }
+    if (!optionsObj.callback) {
+        return diffLinesResultToPatch((0, line_js_1.diffLines)(oldStr, newStr, optionsObj));
+    }
+    else {
+        var callback_1 = optionsObj.callback;
+        (0, line_js_1.diffLines)(oldStr, newStr, __assign(__assign({}, optionsObj), { callback: function (diff) {
+                var patch = diffLinesResultToPatch(diff);
+                // TypeScript is unhappy without the cast because it does not understand that `patch` may
+                // be undefined here only if `callback` is StructuredPatchCallbackAbortable:
+                callback_1(patch);
+            } }));
+    }
+    function diffLinesResultToPatch(diff) {
+        // STEP 1: Build up the patch with no "\ No newline at end of file" lines and with the arrays
+        //         of lines containing trailing newline characters. We'll tidy up later...
+        if (!diff) {
+            return;
+        }
+        diff.push({ value: '', lines: [] }); // Append an empty value to make cleanup easier
+        function contextLines(lines) {
+            return lines.map(function (entry) { return ' ' + entry; });
+        }
+        var hunks = [];
+        var oldRangeStart = 0, newRangeStart = 0, curRange = [], oldLine = 1, newLine = 1;
+        for (var i = 0; i < diff.length; i++) {
+            var current = diff[i], lines = current.lines || splitLines(current.value);
+            current.lines = lines;
+            if (current.added || current.removed) {
+                // If we have previous context, start with that
+                if (!oldRangeStart) {
+                    var prev = diff[i - 1];
+                    oldRangeStart = oldLine;
+                    newRangeStart = newLine;
+                    if (prev) {
+                        curRange = context > 0 ? contextLines(prev.lines.slice(-context)) : [];
+                        oldRangeStart -= curRange.length;
+                        newRangeStart -= curRange.length;
+                    }
+                }
+                // Output our changes
+                for (var _i = 0, lines_1 = lines; _i < lines_1.length; _i++) {
+                    var line = lines_1[_i];
+                    curRange.push((current.added ? '+' : '-') + line);
+                }
+                // Track the updated file position
+                if (current.added) {
+                    newLine += lines.length;
+                }
+                else {
+                    oldLine += lines.length;
+                }
+            }
+            else {
+                // Identical context lines. Track line changes
+                if (oldRangeStart) {
+                    // Close out any changes that have been output (or join overlapping)
+                    if (lines.length <= context * 2 && i < diff.length - 2) {
+                        // Overlapping
+                        for (var _a = 0, _b = contextLines(lines); _a < _b.length; _a++) {
+                            var line = _b[_a];
+                            curRange.push(line);
+                        }
+                    }
+                    else {
+                        // end the range and output
+                        var contextSize = Math.min(lines.length, context);
+                        for (var _c = 0, _d = contextLines(lines.slice(0, contextSize)); _c < _d.length; _c++) {
+                            var line = _d[_c];
+                            curRange.push(line);
+                        }
+                        var hunk = {
+                            oldStart: oldRangeStart,
+                            oldLines: (oldLine - oldRangeStart + contextSize),
+                            newStart: newRangeStart,
+                            newLines: (newLine - newRangeStart + contextSize),
+                            lines: curRange
+                        };
+                        hunks.push(hunk);
+                        oldRangeStart = 0;
+                        newRangeStart = 0;
+                        curRange = [];
+                    }
+                }
+                oldLine += lines.length;
+                newLine += lines.length;
+            }
+        }
+        // Step 2: eliminate the trailing `\n` from each line of each hunk, and, where needed, add
+        //         "\ No newline at end of file".
+        for (var _e = 0, hunks_1 = hunks; _e < hunks_1.length; _e++) {
+            var hunk = hunks_1[_e];
+            for (var i = 0; i < hunk.lines.length; i++) {
+                if (hunk.lines[i].endsWith('\n')) {
+                    hunk.lines[i] = hunk.lines[i].slice(0, -1);
+                }
+                else {
+                    hunk.lines.splice(i + 1, 0, '\\ No newline at end of file');
+                    i++; // Skip the line we just added, then continue iterating
+                }
+            }
+        }
+        return {
+            oldFileName: oldFileName, newFileName: newFileName,
+            oldHeader: oldHeader, newHeader: newHeader,
+            hunks: hunks
+        };
+    }
+}
+/**
+ * creates a unified diff patch.
+ * @param patch either a single structured patch object (as returned by `structuredPatch`) or an array of them (as returned by `parsePatch`)
+ */
+function formatPatch(patch) {
+    if (Array.isArray(patch)) {
+        return patch.map(formatPatch).join('\n');
+    }
+    var ret = [];
+    if (patch.oldFileName == patch.newFileName) {
+        ret.push('Index: ' + patch.oldFileName);
+    }
+    ret.push('===================================================================');
+    ret.push('--- ' + patch.oldFileName + (typeof patch.oldHeader === 'undefined' ? '' : '\t' + patch.oldHeader));
+    ret.push('+++ ' + patch.newFileName + (typeof patch.newHeader === 'undefined' ? '' : '\t' + patch.newHeader));
+    for (var i = 0; i < patch.hunks.length; i++) {
+        var hunk = patch.hunks[i];
+        // Unified Diff Format quirk: If the chunk size is 0,
+        // the first number is one lower than one would expect.
+        // https://www.artima.com/weblogs/viewpost.jsp?thread=164293
+        if (hunk.oldLines === 0) {
+            hunk.oldStart -= 1;
+        }
+        if (hunk.newLines === 0) {
+            hunk.newStart -= 1;
+        }
+        ret.push('@@ -' + hunk.oldStart + ',' + hunk.oldLines
+            + ' +' + hunk.newStart + ',' + hunk.newLines
+            + ' @@');
+        for (var _i = 0, _a = hunk.lines; _i < _a.length; _i++) {
+            var line = _a[_i];
+            ret.push(line);
+        }
+    }
+    return ret.join('\n') + '\n';
+}
+function createTwoFilesPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options) {
+    if (typeof options === 'function') {
+        options = { callback: options };
+    }
+    if (!(options === null || options === void 0 ? void 0 : options.callback)) {
+        var patchObj = structuredPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options);
+        if (!patchObj) {
+            return;
+        }
+        return formatPatch(patchObj);
+    }
+    else {
+        var callback_2 = options.callback;
+        structuredPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, __assign(__assign({}, options), { callback: function (patchObj) {
+                if (!patchObj) {
+                    callback_2(undefined);
+                }
+                else {
+                    callback_2(formatPatch(patchObj));
+                }
+            } }));
+    }
+}
+function createPatch(fileName, oldStr, newStr, oldHeader, newHeader, options) {
+    return createTwoFilesPatch(fileName, fileName, oldStr, newStr, oldHeader, newHeader, options);
+}
+/**
+ * Split `text` into an array of lines, including the trailing newline character (where present)
+ */
+function splitLines(text) {
+    var hasTrailingNl = text.endsWith('\n');
+    var result = text.split('\n').map(function (line) { return line + '\n'; });
+    if (hasTrailingNl) {
+        result.pop();
+    }
+    else {
+        result.push(result.pop().slice(0, -1));
+    }
+    return result;
+}
diff --git a/node_modules/diff/libcjs/patch/line-endings.js b/node_modules/diff/libcjs/patch/line-endings.js
new file mode 100644
index 0000000000000..be45f0c8a326f
--- /dev/null
+++ b/node_modules/diff/libcjs/patch/line-endings.js
@@ -0,0 +1,61 @@
+"use strict";
+var __assign = (this && this.__assign) || function () {
+    __assign = Object.assign || function(t) {
+        for (var s, i = 1, n = arguments.length; i < n; i++) {
+            s = arguments[i];
+            for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
+                t[p] = s[p];
+        }
+        return t;
+    };
+    return __assign.apply(this, arguments);
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.unixToWin = unixToWin;
+exports.winToUnix = winToUnix;
+exports.isUnix = isUnix;
+exports.isWin = isWin;
+function unixToWin(patch) {
+    if (Array.isArray(patch)) {
+        // It would be cleaner if instead of the line below we could just write
+        //     return patch.map(unixToWin)
+        // but mysteriously TypeScript (v5.7.3 at the time of writing) does not like this and it will
+        // refuse to compile, thinking that unixToWin could then return StructuredPatch[][] and the
+        // result would be incompatible with the overload signatures.
+        // See bug report at https://github.com/microsoft/TypeScript/issues/61398.
+        return patch.map(function (p) { return unixToWin(p); });
+    }
+    return __assign(__assign({}, patch), { hunks: patch.hunks.map(function (hunk) { return (__assign(__assign({}, hunk), { lines: hunk.lines.map(function (line, i) {
+                var _a;
+                return (line.startsWith('\\') || line.endsWith('\r') || ((_a = hunk.lines[i + 1]) === null || _a === void 0 ? void 0 : _a.startsWith('\\')))
+                    ? line
+                    : line + '\r';
+            }) })); }) });
+}
+function winToUnix(patch) {
+    if (Array.isArray(patch)) {
+        // (See comment above equivalent line in unixToWin)
+        return patch.map(function (p) { return winToUnix(p); });
+    }
+    return __assign(__assign({}, patch), { hunks: patch.hunks.map(function (hunk) { return (__assign(__assign({}, hunk), { lines: hunk.lines.map(function (line) { return line.endsWith('\r') ? line.substring(0, line.length - 1) : line; }) })); }) });
+}
+/**
+ * Returns true if the patch consistently uses Unix line endings (or only involves one line and has
+ * no line endings).
+ */
+function isUnix(patch) {
+    if (!Array.isArray(patch)) {
+        patch = [patch];
+    }
+    return !patch.some(function (index) { return index.hunks.some(function (hunk) { return hunk.lines.some(function (line) { return !line.startsWith('\\') && line.endsWith('\r'); }); }); });
+}
+/**
+ * Returns true if the patch uses Windows line endings and only Windows line endings.
+ */
+function isWin(patch) {
+    if (!Array.isArray(patch)) {
+        patch = [patch];
+    }
+    return patch.some(function (index) { return index.hunks.some(function (hunk) { return hunk.lines.some(function (line) { return line.endsWith('\r'); }); }); })
+        && patch.every(function (index) { return index.hunks.every(function (hunk) { return hunk.lines.every(function (line, i) { var _a; return line.startsWith('\\') || line.endsWith('\r') || ((_a = hunk.lines[i + 1]) === null || _a === void 0 ? void 0 : _a.startsWith('\\')); }); }); });
+}
diff --git a/node_modules/diff/libcjs/patch/parse.js b/node_modules/diff/libcjs/patch/parse.js
new file mode 100644
index 0000000000000..247262032e34a
--- /dev/null
+++ b/node_modules/diff/libcjs/patch/parse.js
@@ -0,0 +1,133 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.parsePatch = parsePatch;
+/**
+ * Parses a patch into structured data, in the same structure returned by `structuredPatch`.
+ *
+ * @return a JSON object representation of the a patch, suitable for use with the `applyPatch` method.
+ */
+function parsePatch(uniDiff) {
+    var diffstr = uniDiff.split(/\n/), list = [];
+    var i = 0;
+    function parseIndex() {
+        var index = {};
+        list.push(index);
+        // Parse diff metadata
+        while (i < diffstr.length) {
+            var line = diffstr[i];
+            // File header found, end parsing diff metadata
+            if ((/^(---|\+\+\+|@@)\s/).test(line)) {
+                break;
+            }
+            // Diff index
+            var header = (/^(?:Index:|diff(?: -r \w+)+)\s+(.+?)\s*$/).exec(line);
+            if (header) {
+                index.index = header[1];
+            }
+            i++;
+        }
+        // Parse file headers if they are defined. Unified diff requires them, but
+        // there's no technical issues to have an isolated hunk without file header
+        parseFileHeader(index);
+        parseFileHeader(index);
+        // Parse hunks
+        index.hunks = [];
+        while (i < diffstr.length) {
+            var line = diffstr[i];
+            if ((/^(Index:\s|diff\s|---\s|\+\+\+\s|===================================================================)/).test(line)) {
+                break;
+            }
+            else if ((/^@@/).test(line)) {
+                index.hunks.push(parseHunk());
+            }
+            else if (line) {
+                throw new Error('Unknown line ' + (i + 1) + ' ' + JSON.stringify(line));
+            }
+            else {
+                i++;
+            }
+        }
+    }
+    // Parses the --- and +++ headers, if none are found, no lines
+    // are consumed.
+    function parseFileHeader(index) {
+        var fileHeader = (/^(---|\+\+\+)\s+(.*)\r?$/).exec(diffstr[i]);
+        if (fileHeader) {
+            var data = fileHeader[2].split('\t', 2), header = (data[1] || '').trim();
+            var fileName = data[0].replace(/\\\\/g, '\\');
+            if ((/^".*"$/).test(fileName)) {
+                fileName = fileName.substr(1, fileName.length - 2);
+            }
+            if (fileHeader[1] === '---') {
+                index.oldFileName = fileName;
+                index.oldHeader = header;
+            }
+            else {
+                index.newFileName = fileName;
+                index.newHeader = header;
+            }
+            i++;
+        }
+    }
+    // Parses a hunk
+    // This assumes that we are at the start of a hunk.
+    function parseHunk() {
+        var _a;
+        var chunkHeaderIndex = i, chunkHeaderLine = diffstr[i++], chunkHeader = chunkHeaderLine.split(/@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@/);
+        var hunk = {
+            oldStart: +chunkHeader[1],
+            oldLines: typeof chunkHeader[2] === 'undefined' ? 1 : +chunkHeader[2],
+            newStart: +chunkHeader[3],
+            newLines: typeof chunkHeader[4] === 'undefined' ? 1 : +chunkHeader[4],
+            lines: []
+        };
+        // Unified Diff Format quirk: If the chunk size is 0,
+        // the first number is one lower than one would expect.
+        // https://www.artima.com/weblogs/viewpost.jsp?thread=164293
+        if (hunk.oldLines === 0) {
+            hunk.oldStart += 1;
+        }
+        if (hunk.newLines === 0) {
+            hunk.newStart += 1;
+        }
+        var addCount = 0, removeCount = 0;
+        for (; i < diffstr.length && (removeCount < hunk.oldLines || addCount < hunk.newLines || ((_a = diffstr[i]) === null || _a === void 0 ? void 0 : _a.startsWith('\\'))); i++) {
+            var operation = (diffstr[i].length == 0 && i != (diffstr.length - 1)) ? ' ' : diffstr[i][0];
+            if (operation === '+' || operation === '-' || operation === ' ' || operation === '\\') {
+                hunk.lines.push(diffstr[i]);
+                if (operation === '+') {
+                    addCount++;
+                }
+                else if (operation === '-') {
+                    removeCount++;
+                }
+                else if (operation === ' ') {
+                    addCount++;
+                    removeCount++;
+                }
+            }
+            else {
+                throw new Error("Hunk at line ".concat(chunkHeaderIndex + 1, " contained invalid line ").concat(diffstr[i]));
+            }
+        }
+        // Handle the empty block count case
+        if (!addCount && hunk.newLines === 1) {
+            hunk.newLines = 0;
+        }
+        if (!removeCount && hunk.oldLines === 1) {
+            hunk.oldLines = 0;
+        }
+        // Perform sanity checking
+        if (addCount !== hunk.newLines) {
+            throw new Error('Added line count did not match for hunk at line ' + (chunkHeaderIndex + 1));
+        }
+        if (removeCount !== hunk.oldLines) {
+            throw new Error('Removed line count did not match for hunk at line ' + (chunkHeaderIndex + 1));
+        }
+        return hunk;
+    }
+    while (i < diffstr.length) {
+        parseIndex();
+    }
+    return list;
+}
diff --git a/node_modules/diff/libcjs/patch/reverse.js b/node_modules/diff/libcjs/patch/reverse.js
new file mode 100644
index 0000000000000..078fcdaea0bbc
--- /dev/null
+++ b/node_modules/diff/libcjs/patch/reverse.js
@@ -0,0 +1,37 @@
+"use strict";
+var __assign = (this && this.__assign) || function () {
+    __assign = Object.assign || function(t) {
+        for (var s, i = 1, n = arguments.length; i < n; i++) {
+            s = arguments[i];
+            for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p))
+                t[p] = s[p];
+        }
+        return t;
+    };
+    return __assign.apply(this, arguments);
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.reversePatch = reversePatch;
+function reversePatch(structuredPatch) {
+    if (Array.isArray(structuredPatch)) {
+        // (See comment in unixToWin for why we need the pointless-looking anonymous function here)
+        return structuredPatch.map(function (patch) { return reversePatch(patch); }).reverse();
+    }
+    return __assign(__assign({}, structuredPatch), { oldFileName: structuredPatch.newFileName, oldHeader: structuredPatch.newHeader, newFileName: structuredPatch.oldFileName, newHeader: structuredPatch.oldHeader, hunks: structuredPatch.hunks.map(function (hunk) {
+            return {
+                oldLines: hunk.newLines,
+                oldStart: hunk.newStart,
+                newLines: hunk.oldLines,
+                newStart: hunk.oldStart,
+                lines: hunk.lines.map(function (l) {
+                    if (l.startsWith('-')) {
+                        return "+".concat(l.slice(1));
+                    }
+                    if (l.startsWith('+')) {
+                        return "-".concat(l.slice(1));
+                    }
+                    return l;
+                })
+            };
+        }) });
+}
diff --git a/node_modules/diff/libcjs/types.js b/node_modules/diff/libcjs/types.js
new file mode 100644
index 0000000000000..c8ad2e549bdc6
--- /dev/null
+++ b/node_modules/diff/libcjs/types.js
@@ -0,0 +1,2 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
diff --git a/node_modules/diff/libcjs/util/array.js b/node_modules/diff/libcjs/util/array.js
new file mode 100644
index 0000000000000..c21937ee0fe51
--- /dev/null
+++ b/node_modules/diff/libcjs/util/array.js
@@ -0,0 +1,21 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.arrayEqual = arrayEqual;
+exports.arrayStartsWith = arrayStartsWith;
+function arrayEqual(a, b) {
+    if (a.length !== b.length) {
+        return false;
+    }
+    return arrayStartsWith(a, b);
+}
+function arrayStartsWith(array, start) {
+    if (start.length > array.length) {
+        return false;
+    }
+    for (var i = 0; i < start.length; i++) {
+        if (start[i] !== array[i]) {
+            return false;
+        }
+    }
+    return true;
+}
diff --git a/node_modules/diff/libcjs/util/distance-iterator.js b/node_modules/diff/libcjs/util/distance-iterator.js
new file mode 100644
index 0000000000000..2421553c444ea
--- /dev/null
+++ b/node_modules/diff/libcjs/util/distance-iterator.js
@@ -0,0 +1,40 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.default = default_1;
+// Iterator that traverses in the range of [min, max], stepping
+// by distance from a given start position. I.e. for [0, 4], with
+// start of 2, this will iterate 2, 3, 1, 4, 0.
+function default_1(start, minLine, maxLine) {
+    var wantForward = true, backwardExhausted = false, forwardExhausted = false, localOffset = 1;
+    return function iterator() {
+        if (wantForward && !forwardExhausted) {
+            if (backwardExhausted) {
+                localOffset++;
+            }
+            else {
+                wantForward = false;
+            }
+            // Check if trying to fit beyond text length, and if not, check it fits
+            // after offset location (or desired location on first iteration)
+            if (start + localOffset <= maxLine) {
+                return start + localOffset;
+            }
+            forwardExhausted = true;
+        }
+        if (!backwardExhausted) {
+            if (!forwardExhausted) {
+                wantForward = true;
+            }
+            // Check if trying to fit before text beginning, and if not, check it fits
+            // before offset location
+            if (minLine <= start - localOffset) {
+                return start - localOffset++;
+            }
+            backwardExhausted = true;
+            return iterator();
+        }
+        // We tried to fit hunk before text beginning and beyond text length, then
+        // hunk can't fit on the text. Return undefined
+        return undefined;
+    };
+}
diff --git a/node_modules/diff/libcjs/util/params.js b/node_modules/diff/libcjs/util/params.js
new file mode 100644
index 0000000000000..6eefddba7922c
--- /dev/null
+++ b/node_modules/diff/libcjs/util/params.js
@@ -0,0 +1,17 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.generateOptions = generateOptions;
+function generateOptions(options, defaults) {
+    if (typeof options === 'function') {
+        defaults.callback = options;
+    }
+    else if (options) {
+        for (var name in options) {
+            /* istanbul ignore else */
+            if (Object.prototype.hasOwnProperty.call(options, name)) {
+                defaults[name] = options[name];
+            }
+        }
+    }
+    return defaults;
+}
diff --git a/node_modules/diff/libcjs/util/string.js b/node_modules/diff/libcjs/util/string.js
new file mode 100644
index 0000000000000..847ec88a88f5d
--- /dev/null
+++ b/node_modules/diff/libcjs/util/string.js
@@ -0,0 +1,141 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", { value: true });
+exports.longestCommonPrefix = longestCommonPrefix;
+exports.longestCommonSuffix = longestCommonSuffix;
+exports.replacePrefix = replacePrefix;
+exports.replaceSuffix = replaceSuffix;
+exports.removePrefix = removePrefix;
+exports.removeSuffix = removeSuffix;
+exports.maximumOverlap = maximumOverlap;
+exports.hasOnlyWinLineEndings = hasOnlyWinLineEndings;
+exports.hasOnlyUnixLineEndings = hasOnlyUnixLineEndings;
+exports.trailingWs = trailingWs;
+exports.leadingWs = leadingWs;
+function longestCommonPrefix(str1, str2) {
+    var i;
+    for (i = 0; i < str1.length && i < str2.length; i++) {
+        if (str1[i] != str2[i]) {
+            return str1.slice(0, i);
+        }
+    }
+    return str1.slice(0, i);
+}
+function longestCommonSuffix(str1, str2) {
+    var i;
+    // Unlike longestCommonPrefix, we need a special case to handle all scenarios
+    // where we return the empty string since str1.slice(-0) will return the
+    // entire string.
+    if (!str1 || !str2 || str1[str1.length - 1] != str2[str2.length - 1]) {
+        return '';
+    }
+    for (i = 0; i < str1.length && i < str2.length; i++) {
+        if (str1[str1.length - (i + 1)] != str2[str2.length - (i + 1)]) {
+            return str1.slice(-i);
+        }
+    }
+    return str1.slice(-i);
+}
+function replacePrefix(string, oldPrefix, newPrefix) {
+    if (string.slice(0, oldPrefix.length) != oldPrefix) {
+        throw Error("string ".concat(JSON.stringify(string), " doesn't start with prefix ").concat(JSON.stringify(oldPrefix), "; this is a bug"));
+    }
+    return newPrefix + string.slice(oldPrefix.length);
+}
+function replaceSuffix(string, oldSuffix, newSuffix) {
+    if (!oldSuffix) {
+        return string + newSuffix;
+    }
+    if (string.slice(-oldSuffix.length) != oldSuffix) {
+        throw Error("string ".concat(JSON.stringify(string), " doesn't end with suffix ").concat(JSON.stringify(oldSuffix), "; this is a bug"));
+    }
+    return string.slice(0, -oldSuffix.length) + newSuffix;
+}
+function removePrefix(string, oldPrefix) {
+    return replacePrefix(string, oldPrefix, '');
+}
+function removeSuffix(string, oldSuffix) {
+    return replaceSuffix(string, oldSuffix, '');
+}
+function maximumOverlap(string1, string2) {
+    return string2.slice(0, overlapCount(string1, string2));
+}
+// Nicked from https://stackoverflow.com/a/60422853/1709587
+function overlapCount(a, b) {
+    // Deal with cases where the strings differ in length
+    var startA = 0;
+    if (a.length > b.length) {
+        startA = a.length - b.length;
+    }
+    var endB = b.length;
+    if (a.length < b.length) {
+        endB = a.length;
+    }
+    // Create a back-reference for each index
+    //   that should be followed in case of a mismatch.
+    //   We only need B to make these references:
+    var map = Array(endB);
+    var k = 0; // Index that lags behind j
+    map[0] = 0;
+    for (var j = 1; j < endB; j++) {
+        if (b[j] == b[k]) {
+            map[j] = map[k]; // skip over the same character (optional optimisation)
+        }
+        else {
+            map[j] = k;
+        }
+        while (k > 0 && b[j] != b[k]) {
+            k = map[k];
+        }
+        if (b[j] == b[k]) {
+            k++;
+        }
+    }
+    // Phase 2: use these references while iterating over A
+    k = 0;
+    for (var i = startA; i < a.length; i++) {
+        while (k > 0 && a[i] != b[k]) {
+            k = map[k];
+        }
+        if (a[i] == b[k]) {
+            k++;
+        }
+    }
+    return k;
+}
+/**
+ * Returns true if the string consistently uses Windows line endings.
+ */
+function hasOnlyWinLineEndings(string) {
+    return string.includes('\r\n') && !string.startsWith('\n') && !string.match(/[^\r]\n/);
+}
+/**
+ * Returns true if the string consistently uses Unix line endings.
+ */
+function hasOnlyUnixLineEndings(string) {
+    return !string.includes('\r\n') && string.includes('\n');
+}
+function trailingWs(string) {
+    // Yes, this looks overcomplicated and dumb - why not replace the whole function with
+    //     return string match(/\s*$/)[0]
+    // you ask? Because:
+    // 1. the trap described at https://markamery.com/blog/quadratic-time-regexes/ would mean doing
+    //    this would cause this function to take O(n²) time in the worst case (specifically when
+    //    there is a massive run of NON-TRAILING whitespace in `string`), and
+    // 2. the fix proposed in the same blog post, of using a negative lookbehind, is incompatible
+    //    with old Safari versions that we'd like to not break if possible (see
+    //    https://github.com/kpdecker/jsdiff/pull/550)
+    // It feels absurd to do this with an explicit loop instead of a regex, but I really can't see a
+    // better way that doesn't result in broken behaviour.
+    var i;
+    for (i = string.length - 1; i >= 0; i--) {
+        if (!string[i].match(/\s/)) {
+            break;
+        }
+    }
+    return string.substring(i + 1);
+}
+function leadingWs(string) {
+    // Thankfully the annoying considerations described in trailingWs don't apply here:
+    var match = string.match(/^\s*/);
+    return match ? match[0] : '';
+}
diff --git a/node_modules/diff/libesm/convert/dmp.js b/node_modules/diff/libesm/convert/dmp.js
new file mode 100644
index 0000000000000..44d2841465887
--- /dev/null
+++ b/node_modules/diff/libesm/convert/dmp.js
@@ -0,0 +1,21 @@
+/**
+ * converts a list of change objects to the format returned by Google's [diff-match-patch](https://github.com/google/diff-match-patch) library
+ */
+export function convertChangesToDMP(changes) {
+    const ret = [];
+    let change, operation;
+    for (let i = 0; i < changes.length; i++) {
+        change = changes[i];
+        if (change.added) {
+            operation = 1;
+        }
+        else if (change.removed) {
+            operation = -1;
+        }
+        else {
+            operation = 0;
+        }
+        ret.push([operation, change.value]);
+    }
+    return ret;
+}
diff --git a/node_modules/diff/libesm/convert/xml.js b/node_modules/diff/libesm/convert/xml.js
new file mode 100644
index 0000000000000..90ea8a2b8c667
--- /dev/null
+++ b/node_modules/diff/libesm/convert/xml.js
@@ -0,0 +1,31 @@
+/**
+ * converts a list of change objects to a serialized XML format
+ */
+export function convertChangesToXML(changes) {
+    const ret = [];
+    for (let i = 0; i < changes.length; i++) {
+        const change = changes[i];
+        if (change.added) {
+            ret.push('');
+        }
+        else if (change.removed) {
+            ret.push('');
+        }
+        ret.push(escapeHTML(change.value));
+        if (change.added) {
+            ret.push('');
+        }
+        else if (change.removed) {
+            ret.push('');
+        }
+    }
+    return ret.join('');
+}
+function escapeHTML(s) {
+    let n = s;
+    n = n.replace(/&/g, '&');
+    n = n.replace(//g, '>');
+    n = n.replace(/"/g, '"');
+    return n;
+}
diff --git a/node_modules/diff/libesm/diff/array.js b/node_modules/diff/libesm/diff/array.js
new file mode 100644
index 0000000000000..d92aeb485682d
--- /dev/null
+++ b/node_modules/diff/libesm/diff/array.js
@@ -0,0 +1,16 @@
+import Diff from './base.js';
+class ArrayDiff extends Diff {
+    tokenize(value) {
+        return value.slice();
+    }
+    join(value) {
+        return value;
+    }
+    removeEmpty(value) {
+        return value;
+    }
+}
+export const arrayDiff = new ArrayDiff();
+export function diffArrays(oldArr, newArr, options) {
+    return arrayDiff.diff(oldArr, newArr, options);
+}
diff --git a/node_modules/diff/libesm/diff/base.js b/node_modules/diff/libesm/diff/base.js
new file mode 100644
index 0000000000000..db02845d419b9
--- /dev/null
+++ b/node_modules/diff/libesm/diff/base.js
@@ -0,0 +1,253 @@
+export default class Diff {
+    diff(oldStr, newStr, 
+    // Type below is not accurate/complete - see above for full possibilities - but it compiles
+    options = {}) {
+        let callback;
+        if (typeof options === 'function') {
+            callback = options;
+            options = {};
+        }
+        else if ('callback' in options) {
+            callback = options.callback;
+        }
+        // Allow subclasses to massage the input prior to running
+        const oldString = this.castInput(oldStr, options);
+        const newString = this.castInput(newStr, options);
+        const oldTokens = this.removeEmpty(this.tokenize(oldString, options));
+        const newTokens = this.removeEmpty(this.tokenize(newString, options));
+        return this.diffWithOptionsObj(oldTokens, newTokens, options, callback);
+    }
+    diffWithOptionsObj(oldTokens, newTokens, options, callback) {
+        var _a;
+        const done = (value) => {
+            value = this.postProcess(value, options);
+            if (callback) {
+                setTimeout(function () { callback(value); }, 0);
+                return undefined;
+            }
+            else {
+                return value;
+            }
+        };
+        const newLen = newTokens.length, oldLen = oldTokens.length;
+        let editLength = 1;
+        let maxEditLength = newLen + oldLen;
+        if (options.maxEditLength != null) {
+            maxEditLength = Math.min(maxEditLength, options.maxEditLength);
+        }
+        const maxExecutionTime = (_a = options.timeout) !== null && _a !== void 0 ? _a : Infinity;
+        const abortAfterTimestamp = Date.now() + maxExecutionTime;
+        const bestPath = [{ oldPos: -1, lastComponent: undefined }];
+        // Seed editLength = 0, i.e. the content starts with the same values
+        let newPos = this.extractCommon(bestPath[0], newTokens, oldTokens, 0, options);
+        if (bestPath[0].oldPos + 1 >= oldLen && newPos + 1 >= newLen) {
+            // Identity per the equality and tokenizer
+            return done(this.buildValues(bestPath[0].lastComponent, newTokens, oldTokens));
+        }
+        // Once we hit the right edge of the edit graph on some diagonal k, we can
+        // definitely reach the end of the edit graph in no more than k edits, so
+        // there's no point in considering any moves to diagonal k+1 any more (from
+        // which we're guaranteed to need at least k+1 more edits).
+        // Similarly, once we've reached the bottom of the edit graph, there's no
+        // point considering moves to lower diagonals.
+        // We record this fact by setting minDiagonalToConsider and
+        // maxDiagonalToConsider to some finite value once we've hit the edge of
+        // the edit graph.
+        // This optimization is not faithful to the original algorithm presented in
+        // Myers's paper, which instead pointlessly extends D-paths off the end of
+        // the edit graph - see page 7 of Myers's paper which notes this point
+        // explicitly and illustrates it with a diagram. This has major performance
+        // implications for some common scenarios. For instance, to compute a diff
+        // where the new text simply appends d characters on the end of the
+        // original text of length n, the true Myers algorithm will take O(n+d^2)
+        // time while this optimization needs only O(n+d) time.
+        let minDiagonalToConsider = -Infinity, maxDiagonalToConsider = Infinity;
+        // Main worker method. checks all permutations of a given edit length for acceptance.
+        const execEditLength = () => {
+            for (let diagonalPath = Math.max(minDiagonalToConsider, -editLength); diagonalPath <= Math.min(maxDiagonalToConsider, editLength); diagonalPath += 2) {
+                let basePath;
+                const removePath = bestPath[diagonalPath - 1], addPath = bestPath[diagonalPath + 1];
+                if (removePath) {
+                    // No one else is going to attempt to use this value, clear it
+                    // @ts-expect-error - perf optimisation. This type-violating value will never be read.
+                    bestPath[diagonalPath - 1] = undefined;
+                }
+                let canAdd = false;
+                if (addPath) {
+                    // what newPos will be after we do an insertion:
+                    const addPathNewPos = addPath.oldPos - diagonalPath;
+                    canAdd = addPath && 0 <= addPathNewPos && addPathNewPos < newLen;
+                }
+                const canRemove = removePath && removePath.oldPos + 1 < oldLen;
+                if (!canAdd && !canRemove) {
+                    // If this path is a terminal then prune
+                    // @ts-expect-error - perf optimisation. This type-violating value will never be read.
+                    bestPath[diagonalPath] = undefined;
+                    continue;
+                }
+                // Select the diagonal that we want to branch from. We select the prior
+                // path whose position in the old string is the farthest from the origin
+                // and does not pass the bounds of the diff graph
+                if (!canRemove || (canAdd && removePath.oldPos < addPath.oldPos)) {
+                    basePath = this.addToPath(addPath, true, false, 0, options);
+                }
+                else {
+                    basePath = this.addToPath(removePath, false, true, 1, options);
+                }
+                newPos = this.extractCommon(basePath, newTokens, oldTokens, diagonalPath, options);
+                if (basePath.oldPos + 1 >= oldLen && newPos + 1 >= newLen) {
+                    // If we have hit the end of both strings, then we are done
+                    return done(this.buildValues(basePath.lastComponent, newTokens, oldTokens)) || true;
+                }
+                else {
+                    bestPath[diagonalPath] = basePath;
+                    if (basePath.oldPos + 1 >= oldLen) {
+                        maxDiagonalToConsider = Math.min(maxDiagonalToConsider, diagonalPath - 1);
+                    }
+                    if (newPos + 1 >= newLen) {
+                        minDiagonalToConsider = Math.max(minDiagonalToConsider, diagonalPath + 1);
+                    }
+                }
+            }
+            editLength++;
+        };
+        // Performs the length of edit iteration. Is a bit fugly as this has to support the
+        // sync and async mode which is never fun. Loops over execEditLength until a value
+        // is produced, or until the edit length exceeds options.maxEditLength (if given),
+        // in which case it will return undefined.
+        if (callback) {
+            (function exec() {
+                setTimeout(function () {
+                    if (editLength > maxEditLength || Date.now() > abortAfterTimestamp) {
+                        return callback(undefined);
+                    }
+                    if (!execEditLength()) {
+                        exec();
+                    }
+                }, 0);
+            }());
+        }
+        else {
+            while (editLength <= maxEditLength && Date.now() <= abortAfterTimestamp) {
+                const ret = execEditLength();
+                if (ret) {
+                    return ret;
+                }
+            }
+        }
+    }
+    addToPath(path, added, removed, oldPosInc, options) {
+        const last = path.lastComponent;
+        if (last && !options.oneChangePerToken && last.added === added && last.removed === removed) {
+            return {
+                oldPos: path.oldPos + oldPosInc,
+                lastComponent: { count: last.count + 1, added: added, removed: removed, previousComponent: last.previousComponent }
+            };
+        }
+        else {
+            return {
+                oldPos: path.oldPos + oldPosInc,
+                lastComponent: { count: 1, added: added, removed: removed, previousComponent: last }
+            };
+        }
+    }
+    extractCommon(basePath, newTokens, oldTokens, diagonalPath, options) {
+        const newLen = newTokens.length, oldLen = oldTokens.length;
+        let oldPos = basePath.oldPos, newPos = oldPos - diagonalPath, commonCount = 0;
+        while (newPos + 1 < newLen && oldPos + 1 < oldLen && this.equals(oldTokens[oldPos + 1], newTokens[newPos + 1], options)) {
+            newPos++;
+            oldPos++;
+            commonCount++;
+            if (options.oneChangePerToken) {
+                basePath.lastComponent = { count: 1, previousComponent: basePath.lastComponent, added: false, removed: false };
+            }
+        }
+        if (commonCount && !options.oneChangePerToken) {
+            basePath.lastComponent = { count: commonCount, previousComponent: basePath.lastComponent, added: false, removed: false };
+        }
+        basePath.oldPos = oldPos;
+        return newPos;
+    }
+    equals(left, right, options) {
+        if (options.comparator) {
+            return options.comparator(left, right);
+        }
+        else {
+            return left === right
+                || (!!options.ignoreCase && left.toLowerCase() === right.toLowerCase());
+        }
+    }
+    removeEmpty(array) {
+        const ret = [];
+        for (let i = 0; i < array.length; i++) {
+            if (array[i]) {
+                ret.push(array[i]);
+            }
+        }
+        return ret;
+    }
+    // eslint-disable-next-line @typescript-eslint/no-unused-vars
+    castInput(value, options) {
+        return value;
+    }
+    // eslint-disable-next-line @typescript-eslint/no-unused-vars
+    tokenize(value, options) {
+        return Array.from(value);
+    }
+    join(chars) {
+        // Assumes ValueT is string, which is the case for most subclasses.
+        // When it's false, e.g. in diffArrays, this method needs to be overridden (e.g. with a no-op)
+        // Yes, the casts are verbose and ugly, because this pattern - of having the base class SORT OF
+        // assume tokens and values are strings, but not completely - is weird and janky.
+        return chars.join('');
+    }
+    postProcess(changeObjects, 
+    // eslint-disable-next-line @typescript-eslint/no-unused-vars
+    options) {
+        return changeObjects;
+    }
+    get useLongestToken() {
+        return false;
+    }
+    buildValues(lastComponent, newTokens, oldTokens) {
+        // First we convert our linked list of components in reverse order to an
+        // array in the right order:
+        const components = [];
+        let nextComponent;
+        while (lastComponent) {
+            components.push(lastComponent);
+            nextComponent = lastComponent.previousComponent;
+            delete lastComponent.previousComponent;
+            lastComponent = nextComponent;
+        }
+        components.reverse();
+        const componentLen = components.length;
+        let componentPos = 0, newPos = 0, oldPos = 0;
+        for (; componentPos < componentLen; componentPos++) {
+            const component = components[componentPos];
+            if (!component.removed) {
+                if (!component.added && this.useLongestToken) {
+                    let value = newTokens.slice(newPos, newPos + component.count);
+                    value = value.map(function (value, i) {
+                        const oldValue = oldTokens[oldPos + i];
+                        return oldValue.length > value.length ? oldValue : value;
+                    });
+                    component.value = this.join(value);
+                }
+                else {
+                    component.value = this.join(newTokens.slice(newPos, newPos + component.count));
+                }
+                newPos += component.count;
+                // Common case
+                if (!component.added) {
+                    oldPos += component.count;
+                }
+            }
+            else {
+                component.value = this.join(oldTokens.slice(oldPos, oldPos + component.count));
+                oldPos += component.count;
+            }
+        }
+        return components;
+    }
+}
diff --git a/node_modules/diff/libesm/diff/character.js b/node_modules/diff/libesm/diff/character.js
new file mode 100644
index 0000000000000..ca70d065d37cb
--- /dev/null
+++ b/node_modules/diff/libesm/diff/character.js
@@ -0,0 +1,7 @@
+import Diff from './base.js';
+class CharacterDiff extends Diff {
+}
+export const characterDiff = new CharacterDiff();
+export function diffChars(oldStr, newStr, options) {
+    return characterDiff.diff(oldStr, newStr, options);
+}
diff --git a/node_modules/diff/libesm/diff/css.js b/node_modules/diff/libesm/diff/css.js
new file mode 100644
index 0000000000000..2e7adcc3c2c3d
--- /dev/null
+++ b/node_modules/diff/libesm/diff/css.js
@@ -0,0 +1,10 @@
+import Diff from './base.js';
+class CssDiff extends Diff {
+    tokenize(value) {
+        return value.split(/([{}:;,]|\s+)/);
+    }
+}
+export const cssDiff = new CssDiff();
+export function diffCss(oldStr, newStr, options) {
+    return cssDiff.diff(oldStr, newStr, options);
+}
diff --git a/node_modules/diff/libesm/diff/json.js b/node_modules/diff/libesm/diff/json.js
new file mode 100644
index 0000000000000..be9f7617df997
--- /dev/null
+++ b/node_modules/diff/libesm/diff/json.js
@@ -0,0 +1,78 @@
+import Diff from './base.js';
+import { tokenize } from './line.js';
+class JsonDiff extends Diff {
+    constructor() {
+        super(...arguments);
+        this.tokenize = tokenize;
+    }
+    get useLongestToken() {
+        // Discriminate between two lines of pretty-printed, serialized JSON where one of them has a
+        // dangling comma and the other doesn't. Turns out including the dangling comma yields the nicest output:
+        return true;
+    }
+    castInput(value, options) {
+        const { undefinedReplacement, stringifyReplacer = (k, v) => typeof v === 'undefined' ? undefinedReplacement : v } = options;
+        return typeof value === 'string' ? value : JSON.stringify(canonicalize(value, null, null, stringifyReplacer), null, '  ');
+    }
+    equals(left, right, options) {
+        return super.equals(left.replace(/,([\r\n])/g, '$1'), right.replace(/,([\r\n])/g, '$1'), options);
+    }
+}
+export const jsonDiff = new JsonDiff();
+export function diffJson(oldStr, newStr, options) {
+    return jsonDiff.diff(oldStr, newStr, options);
+}
+// This function handles the presence of circular references by bailing out when encountering an
+// object that is already on the "stack" of items being processed. Accepts an optional replacer
+export function canonicalize(obj, stack, replacementStack, replacer, key) {
+    stack = stack || [];
+    replacementStack = replacementStack || [];
+    if (replacer) {
+        obj = replacer(key === undefined ? '' : key, obj);
+    }
+    let i;
+    for (i = 0; i < stack.length; i += 1) {
+        if (stack[i] === obj) {
+            return replacementStack[i];
+        }
+    }
+    let canonicalizedObj;
+    if ('[object Array]' === Object.prototype.toString.call(obj)) {
+        stack.push(obj);
+        canonicalizedObj = new Array(obj.length);
+        replacementStack.push(canonicalizedObj);
+        for (i = 0; i < obj.length; i += 1) {
+            canonicalizedObj[i] = canonicalize(obj[i], stack, replacementStack, replacer, String(i));
+        }
+        stack.pop();
+        replacementStack.pop();
+        return canonicalizedObj;
+    }
+    if (obj && obj.toJSON) {
+        obj = obj.toJSON();
+    }
+    if (typeof obj === 'object' && obj !== null) {
+        stack.push(obj);
+        canonicalizedObj = {};
+        replacementStack.push(canonicalizedObj);
+        const sortedKeys = [];
+        let key;
+        for (key in obj) {
+            /* istanbul ignore else */
+            if (Object.prototype.hasOwnProperty.call(obj, key)) {
+                sortedKeys.push(key);
+            }
+        }
+        sortedKeys.sort();
+        for (i = 0; i < sortedKeys.length; i += 1) {
+            key = sortedKeys[i];
+            canonicalizedObj[key] = canonicalize(obj[key], stack, replacementStack, replacer, key);
+        }
+        stack.pop();
+        replacementStack.pop();
+    }
+    else {
+        canonicalizedObj = obj;
+    }
+    return canonicalizedObj;
+}
diff --git a/node_modules/diff/libesm/diff/line.js b/node_modules/diff/libesm/diff/line.js
new file mode 100644
index 0000000000000..0675d4fb003f9
--- /dev/null
+++ b/node_modules/diff/libesm/diff/line.js
@@ -0,0 +1,65 @@
+import Diff from './base.js';
+import { generateOptions } from '../util/params.js';
+class LineDiff extends Diff {
+    constructor() {
+        super(...arguments);
+        this.tokenize = tokenize;
+    }
+    equals(left, right, options) {
+        // If we're ignoring whitespace, we need to normalise lines by stripping
+        // whitespace before checking equality. (This has an annoying interaction
+        // with newlineIsToken that requires special handling: if newlines get their
+        // own token, then we DON'T want to trim the *newline* tokens down to empty
+        // strings, since this would cause us to treat whitespace-only line content
+        // as equal to a separator between lines, which would be weird and
+        // inconsistent with the documented behavior of the options.)
+        if (options.ignoreWhitespace) {
+            if (!options.newlineIsToken || !left.includes('\n')) {
+                left = left.trim();
+            }
+            if (!options.newlineIsToken || !right.includes('\n')) {
+                right = right.trim();
+            }
+        }
+        else if (options.ignoreNewlineAtEof && !options.newlineIsToken) {
+            if (left.endsWith('\n')) {
+                left = left.slice(0, -1);
+            }
+            if (right.endsWith('\n')) {
+                right = right.slice(0, -1);
+            }
+        }
+        return super.equals(left, right, options);
+    }
+}
+export const lineDiff = new LineDiff();
+export function diffLines(oldStr, newStr, options) {
+    return lineDiff.diff(oldStr, newStr, options);
+}
+export function diffTrimmedLines(oldStr, newStr, options) {
+    options = generateOptions(options, { ignoreWhitespace: true });
+    return lineDiff.diff(oldStr, newStr, options);
+}
+// Exported standalone so it can be used from jsonDiff too.
+export function tokenize(value, options) {
+    if (options.stripTrailingCr) {
+        // remove one \r before \n to match GNU diff's --strip-trailing-cr behavior
+        value = value.replace(/\r\n/g, '\n');
+    }
+    const retLines = [], linesAndNewlines = value.split(/(\n|\r\n)/);
+    // Ignore the final empty token that occurs if the string ends with a new line
+    if (!linesAndNewlines[linesAndNewlines.length - 1]) {
+        linesAndNewlines.pop();
+    }
+    // Merge the content and line separators into single tokens
+    for (let i = 0; i < linesAndNewlines.length; i++) {
+        const line = linesAndNewlines[i];
+        if (i % 2 && !options.newlineIsToken) {
+            retLines[retLines.length - 1] += line;
+        }
+        else {
+            retLines.push(line);
+        }
+    }
+    return retLines;
+}
diff --git a/node_modules/diff/libesm/diff/sentence.js b/node_modules/diff/libesm/diff/sentence.js
new file mode 100644
index 0000000000000..db37010ef6472
--- /dev/null
+++ b/node_modules/diff/libesm/diff/sentence.js
@@ -0,0 +1,43 @@
+import Diff from './base.js';
+function isSentenceEndPunct(char) {
+    return char == '.' || char == '!' || char == '?';
+}
+class SentenceDiff extends Diff {
+    tokenize(value) {
+        var _a;
+        // If in future we drop support for environments that don't support lookbehinds, we can replace
+        // this entire function with:
+        //     return value.split(/(?<=[.!?])(\s+|$)/);
+        // but until then, for similar reasons to the trailingWs function in string.ts, we are forced
+        // to do this verbosely "by hand" instead of using a regex.
+        const result = [];
+        let tokenStartI = 0;
+        for (let i = 0; i < value.length; i++) {
+            if (i == value.length - 1) {
+                result.push(value.slice(tokenStartI));
+                break;
+            }
+            if (isSentenceEndPunct(value[i]) && value[i + 1].match(/\s/)) {
+                // We've hit a sentence break - i.e. a punctuation mark followed by whitespace.
+                // We now want to push TWO tokens to the result:
+                // 1. the sentence
+                result.push(value.slice(tokenStartI, i + 1));
+                // 2. the whitespace
+                i = tokenStartI = i + 1;
+                while ((_a = value[i + 1]) === null || _a === void 0 ? void 0 : _a.match(/\s/)) {
+                    i++;
+                }
+                result.push(value.slice(tokenStartI, i + 1));
+                // Then the next token (a sentence) starts on the character after the whitespace.
+                // (It's okay if this is off the end of the string - then the outer loop will terminate
+                // here anyway.)
+                tokenStartI = i + 1;
+            }
+        }
+        return result;
+    }
+}
+export const sentenceDiff = new SentenceDiff();
+export function diffSentences(oldStr, newStr, options) {
+    return sentenceDiff.diff(oldStr, newStr, options);
+}
diff --git a/node_modules/diff/libesm/diff/word.js b/node_modules/diff/libesm/diff/word.js
new file mode 100644
index 0000000000000..5f8e03a09283e
--- /dev/null
+++ b/node_modules/diff/libesm/diff/word.js
@@ -0,0 +1,276 @@
+import Diff from './base.js';
+import { longestCommonPrefix, longestCommonSuffix, replacePrefix, replaceSuffix, removePrefix, removeSuffix, maximumOverlap, leadingWs, trailingWs } from '../util/string.js';
+// Based on https://en.wikipedia.org/wiki/Latin_script_in_Unicode
+//
+// Ranges and exceptions:
+// Latin-1 Supplement, 0080–00FF
+//  - U+00D7  × Multiplication sign
+//  - U+00F7  ÷ Division sign
+// Latin Extended-A, 0100–017F
+// Latin Extended-B, 0180–024F
+// IPA Extensions, 0250–02AF
+// Spacing Modifier Letters, 02B0–02FF
+//  - U+02C7  ˇ ˇ  Caron
+//  - U+02D8  ˘ ˘  Breve
+//  - U+02D9  ˙ ˙  Dot Above
+//  - U+02DA  ˚ ˚  Ring Above
+//  - U+02DB  ˛ ˛  Ogonek
+//  - U+02DC  ˜ ˜  Small Tilde
+//  - U+02DD  ˝ ˝  Double Acute Accent
+// Latin Extended Additional, 1E00–1EFF
+const extendedWordChars = 'a-zA-Z0-9_\\u{C0}-\\u{FF}\\u{D8}-\\u{F6}\\u{F8}-\\u{2C6}\\u{2C8}-\\u{2D7}\\u{2DE}-\\u{2FF}\\u{1E00}-\\u{1EFF}';
+// Each token is one of the following:
+// - A punctuation mark plus the surrounding whitespace
+// - A word plus the surrounding whitespace
+// - Pure whitespace (but only in the special case where this the entire text
+//   is just whitespace)
+//
+// We have to include surrounding whitespace in the tokens because the two
+// alternative approaches produce horribly broken results:
+// * If we just discard the whitespace, we can't fully reproduce the original
+//   text from the sequence of tokens and any attempt to render the diff will
+//   get the whitespace wrong.
+// * If we have separate tokens for whitespace, then in a typical text every
+//   second token will be a single space character. But this often results in
+//   the optimal diff between two texts being a perverse one that preserves
+//   the spaces between words but deletes and reinserts actual common words.
+//   See https://github.com/kpdecker/jsdiff/issues/160#issuecomment-1866099640
+//   for an example.
+//
+// Keeping the surrounding whitespace of course has implications for .equals
+// and .join, not just .tokenize.
+// This regex does NOT fully implement the tokenization rules described above.
+// Instead, it gives runs of whitespace their own "token". The tokenize method
+// then handles stitching whitespace tokens onto adjacent word or punctuation
+// tokens.
+const tokenizeIncludingWhitespace = new RegExp(`[${extendedWordChars}]+|\\s+|[^${extendedWordChars}]`, 'ug');
+class WordDiff extends Diff {
+    equals(left, right, options) {
+        if (options.ignoreCase) {
+            left = left.toLowerCase();
+            right = right.toLowerCase();
+        }
+        return left.trim() === right.trim();
+    }
+    tokenize(value, options = {}) {
+        let parts;
+        if (options.intlSegmenter) {
+            const segmenter = options.intlSegmenter;
+            if (segmenter.resolvedOptions().granularity != 'word') {
+                throw new Error('The segmenter passed must have a granularity of "word"');
+            }
+            parts = Array.from(segmenter.segment(value), segment => segment.segment);
+        }
+        else {
+            parts = value.match(tokenizeIncludingWhitespace) || [];
+        }
+        const tokens = [];
+        let prevPart = null;
+        parts.forEach(part => {
+            if ((/\s/).test(part)) {
+                if (prevPart == null) {
+                    tokens.push(part);
+                }
+                else {
+                    tokens.push(tokens.pop() + part);
+                }
+            }
+            else if (prevPart != null && (/\s/).test(prevPart)) {
+                if (tokens[tokens.length - 1] == prevPart) {
+                    tokens.push(tokens.pop() + part);
+                }
+                else {
+                    tokens.push(prevPart + part);
+                }
+            }
+            else {
+                tokens.push(part);
+            }
+            prevPart = part;
+        });
+        return tokens;
+    }
+    join(tokens) {
+        // Tokens being joined here will always have appeared consecutively in the
+        // same text, so we can simply strip off the leading whitespace from all the
+        // tokens except the first (and except any whitespace-only tokens - but such
+        // a token will always be the first and only token anyway) and then join them
+        // and the whitespace around words and punctuation will end up correct.
+        return tokens.map((token, i) => {
+            if (i == 0) {
+                return token;
+            }
+            else {
+                return token.replace((/^\s+/), '');
+            }
+        }).join('');
+    }
+    postProcess(changes, options) {
+        if (!changes || options.oneChangePerToken) {
+            return changes;
+        }
+        let lastKeep = null;
+        // Change objects representing any insertion or deletion since the last
+        // "keep" change object. There can be at most one of each.
+        let insertion = null;
+        let deletion = null;
+        changes.forEach(change => {
+            if (change.added) {
+                insertion = change;
+            }
+            else if (change.removed) {
+                deletion = change;
+            }
+            else {
+                if (insertion || deletion) { // May be false at start of text
+                    dedupeWhitespaceInChangeObjects(lastKeep, deletion, insertion, change);
+                }
+                lastKeep = change;
+                insertion = null;
+                deletion = null;
+            }
+        });
+        if (insertion || deletion) {
+            dedupeWhitespaceInChangeObjects(lastKeep, deletion, insertion, null);
+        }
+        return changes;
+    }
+}
+export const wordDiff = new WordDiff();
+export function diffWords(oldStr, newStr, options) {
+    // This option has never been documented and never will be (it's clearer to
+    // just call `diffWordsWithSpace` directly if you need that behavior), but
+    // has existed in jsdiff for a long time, so we retain support for it here
+    // for the sake of backwards compatibility.
+    if ((options === null || options === void 0 ? void 0 : options.ignoreWhitespace) != null && !options.ignoreWhitespace) {
+        return diffWordsWithSpace(oldStr, newStr, options);
+    }
+    return wordDiff.diff(oldStr, newStr, options);
+}
+function dedupeWhitespaceInChangeObjects(startKeep, deletion, insertion, endKeep) {
+    // Before returning, we tidy up the leading and trailing whitespace of the
+    // change objects to eliminate cases where trailing whitespace in one object
+    // is repeated as leading whitespace in the next.
+    // Below are examples of the outcomes we want here to explain the code.
+    // I=insert, K=keep, D=delete
+    // 1. diffing 'foo bar baz' vs 'foo baz'
+    //    Prior to cleanup, we have K:'foo ' D:' bar ' K:' baz'
+    //    After cleanup, we want:   K:'foo ' D:'bar ' K:'baz'
+    //
+    // 2. Diffing 'foo bar baz' vs 'foo qux baz'
+    //    Prior to cleanup, we have K:'foo ' D:' bar ' I:' qux ' K:' baz'
+    //    After cleanup, we want K:'foo ' D:'bar' I:'qux' K:' baz'
+    //
+    // 3. Diffing 'foo\nbar baz' vs 'foo baz'
+    //    Prior to cleanup, we have K:'foo ' D:'\nbar ' K:' baz'
+    //    After cleanup, we want K'foo' D:'\nbar' K:' baz'
+    //
+    // 4. Diffing 'foo baz' vs 'foo\nbar baz'
+    //    Prior to cleanup, we have K:'foo\n' I:'\nbar ' K:' baz'
+    //    After cleanup, we ideally want K'foo' I:'\nbar' K:' baz'
+    //    but don't actually manage this currently (the pre-cleanup change
+    //    objects don't contain enough information to make it possible).
+    //
+    // 5. Diffing 'foo   bar baz' vs 'foo  baz'
+    //    Prior to cleanup, we have K:'foo  ' D:'   bar ' K:'  baz'
+    //    After cleanup, we want K:'foo  ' D:' bar ' K:'baz'
+    //
+    // Our handling is unavoidably imperfect in the case where there's a single
+    // indel between keeps and the whitespace has changed. For instance, consider
+    // diffing 'foo\tbar\nbaz' vs 'foo baz'. Unless we create an extra change
+    // object to represent the insertion of the space character (which isn't even
+    // a token), we have no way to avoid losing information about the texts'
+    // original whitespace in the result we return. Still, we do our best to
+    // output something that will look sensible if we e.g. print it with
+    // insertions in green and deletions in red.
+    // Between two "keep" change objects (or before the first or after the last
+    // change object), we can have either:
+    // * A "delete" followed by an "insert"
+    // * Just an "insert"
+    // * Just a "delete"
+    // We handle the three cases separately.
+    if (deletion && insertion) {
+        const oldWsPrefix = leadingWs(deletion.value);
+        const oldWsSuffix = trailingWs(deletion.value);
+        const newWsPrefix = leadingWs(insertion.value);
+        const newWsSuffix = trailingWs(insertion.value);
+        if (startKeep) {
+            const commonWsPrefix = longestCommonPrefix(oldWsPrefix, newWsPrefix);
+            startKeep.value = replaceSuffix(startKeep.value, newWsPrefix, commonWsPrefix);
+            deletion.value = removePrefix(deletion.value, commonWsPrefix);
+            insertion.value = removePrefix(insertion.value, commonWsPrefix);
+        }
+        if (endKeep) {
+            const commonWsSuffix = longestCommonSuffix(oldWsSuffix, newWsSuffix);
+            endKeep.value = replacePrefix(endKeep.value, newWsSuffix, commonWsSuffix);
+            deletion.value = removeSuffix(deletion.value, commonWsSuffix);
+            insertion.value = removeSuffix(insertion.value, commonWsSuffix);
+        }
+    }
+    else if (insertion) {
+        // The whitespaces all reflect what was in the new text rather than
+        // the old, so we essentially have no information about whitespace
+        // insertion or deletion. We just want to dedupe the whitespace.
+        // We do that by having each change object keep its trailing
+        // whitespace and deleting duplicate leading whitespace where
+        // present.
+        if (startKeep) {
+            const ws = leadingWs(insertion.value);
+            insertion.value = insertion.value.substring(ws.length);
+        }
+        if (endKeep) {
+            const ws = leadingWs(endKeep.value);
+            endKeep.value = endKeep.value.substring(ws.length);
+        }
+        // otherwise we've got a deletion and no insertion
+    }
+    else if (startKeep && endKeep) {
+        const newWsFull = leadingWs(endKeep.value), delWsStart = leadingWs(deletion.value), delWsEnd = trailingWs(deletion.value);
+        // Any whitespace that comes straight after startKeep in both the old and
+        // new texts, assign to startKeep and remove from the deletion.
+        const newWsStart = longestCommonPrefix(newWsFull, delWsStart);
+        deletion.value = removePrefix(deletion.value, newWsStart);
+        // Any whitespace that comes straight before endKeep in both the old and
+        // new texts, and hasn't already been assigned to startKeep, assign to
+        // endKeep and remove from the deletion.
+        const newWsEnd = longestCommonSuffix(removePrefix(newWsFull, newWsStart), delWsEnd);
+        deletion.value = removeSuffix(deletion.value, newWsEnd);
+        endKeep.value = replacePrefix(endKeep.value, newWsFull, newWsEnd);
+        // If there's any whitespace from the new text that HASN'T already been
+        // assigned, assign it to the start:
+        startKeep.value = replaceSuffix(startKeep.value, newWsFull, newWsFull.slice(0, newWsFull.length - newWsEnd.length));
+    }
+    else if (endKeep) {
+        // We are at the start of the text. Preserve all the whitespace on
+        // endKeep, and just remove whitespace from the end of deletion to the
+        // extent that it overlaps with the start of endKeep.
+        const endKeepWsPrefix = leadingWs(endKeep.value);
+        const deletionWsSuffix = trailingWs(deletion.value);
+        const overlap = maximumOverlap(deletionWsSuffix, endKeepWsPrefix);
+        deletion.value = removeSuffix(deletion.value, overlap);
+    }
+    else if (startKeep) {
+        // We are at the END of the text. Preserve all the whitespace on
+        // startKeep, and just remove whitespace from the start of deletion to
+        // the extent that it overlaps with the end of startKeep.
+        const startKeepWsSuffix = trailingWs(startKeep.value);
+        const deletionWsPrefix = leadingWs(deletion.value);
+        const overlap = maximumOverlap(startKeepWsSuffix, deletionWsPrefix);
+        deletion.value = removePrefix(deletion.value, overlap);
+    }
+}
+class WordsWithSpaceDiff extends Diff {
+    tokenize(value) {
+        // Slightly different to the tokenizeIncludingWhitespace regex used above in
+        // that this one treats each individual newline as a distinct tokens, rather
+        // than merging them into other surrounding whitespace. This was requested
+        // in https://github.com/kpdecker/jsdiff/issues/180 &
+        //    https://github.com/kpdecker/jsdiff/issues/211
+        const regex = new RegExp(`(\\r?\\n)|[${extendedWordChars}]+|[^\\S\\n\\r]+|[^${extendedWordChars}]`, 'ug');
+        return value.match(regex) || [];
+    }
+}
+export const wordsWithSpaceDiff = new WordsWithSpaceDiff();
+export function diffWordsWithSpace(oldStr, newStr, options) {
+    return wordsWithSpaceDiff.diff(oldStr, newStr, options);
+}
diff --git a/node_modules/diff/libesm/index.js b/node_modules/diff/libesm/index.js
new file mode 100644
index 0000000000000..48c8a7af6a412
--- /dev/null
+++ b/node_modules/diff/libesm/index.js
@@ -0,0 +1,30 @@
+/* See LICENSE file for terms of use */
+/*
+ * Text diff implementation.
+ *
+ * This library supports the following APIs:
+ * Diff.diffChars: Character by character diff
+ * Diff.diffWords: Word (as defined by \b regex) diff which ignores whitespace
+ * Diff.diffLines: Line based diff
+ *
+ * Diff.diffCss: Diff targeted at CSS content
+ *
+ * These methods are based on the implementation proposed in
+ * "An O(ND) Difference Algorithm and its Variations" (Myers, 1986).
+ * http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.4.6927
+ */
+import Diff from './diff/base.js';
+import { diffChars, characterDiff } from './diff/character.js';
+import { diffWords, diffWordsWithSpace, wordDiff, wordsWithSpaceDiff } from './diff/word.js';
+import { diffLines, diffTrimmedLines, lineDiff } from './diff/line.js';
+import { diffSentences, sentenceDiff } from './diff/sentence.js';
+import { diffCss, cssDiff } from './diff/css.js';
+import { diffJson, canonicalize, jsonDiff } from './diff/json.js';
+import { diffArrays, arrayDiff } from './diff/array.js';
+import { applyPatch, applyPatches } from './patch/apply.js';
+import { parsePatch } from './patch/parse.js';
+import { reversePatch } from './patch/reverse.js';
+import { structuredPatch, createTwoFilesPatch, createPatch, formatPatch } from './patch/create.js';
+import { convertChangesToDMP } from './convert/dmp.js';
+import { convertChangesToXML } from './convert/xml.js';
+export { Diff, diffChars, characterDiff, diffWords, wordDiff, diffWordsWithSpace, wordsWithSpaceDiff, diffLines, lineDiff, diffTrimmedLines, diffSentences, sentenceDiff, diffCss, cssDiff, diffJson, jsonDiff, diffArrays, arrayDiff, structuredPatch, createTwoFilesPatch, createPatch, formatPatch, applyPatch, applyPatches, parsePatch, reversePatch, convertChangesToDMP, convertChangesToXML, canonicalize };
diff --git a/node_modules/diff/libesm/package.json b/node_modules/diff/libesm/package.json
new file mode 100644
index 0000000000000..2bd6e5099f38c
--- /dev/null
+++ b/node_modules/diff/libesm/package.json
@@ -0,0 +1 @@
+{"type":"module","sideEffects":false}
\ No newline at end of file
diff --git a/node_modules/diff/libesm/patch/apply.js b/node_modules/diff/libesm/patch/apply.js
new file mode 100644
index 0000000000000..fe2e8db5c465d
--- /dev/null
+++ b/node_modules/diff/libesm/patch/apply.js
@@ -0,0 +1,257 @@
+import { hasOnlyWinLineEndings, hasOnlyUnixLineEndings } from '../util/string.js';
+import { isWin, isUnix, unixToWin, winToUnix } from './line-endings.js';
+import { parsePatch } from './parse.js';
+import distanceIterator from '../util/distance-iterator.js';
+/**
+ * attempts to apply a unified diff patch.
+ *
+ * Hunks are applied first to last.
+ * `applyPatch` first tries to apply the first hunk at the line number specified in the hunk header, and with all context lines matching exactly.
+ * If that fails, it tries scanning backwards and forwards, one line at a time, to find a place to apply the hunk where the context lines match exactly.
+ * If that still fails, and `fuzzFactor` is greater than zero, it increments the maximum number of mismatches (missing, extra, or changed context lines) that there can be between the hunk context and a region where we are trying to apply the patch such that the hunk will still be considered to match.
+ * Regardless of `fuzzFactor`, lines to be deleted in the hunk *must* be present for a hunk to match, and the context lines *immediately* before and after an insertion must match exactly.
+ *
+ * Once a hunk is successfully fitted, the process begins again with the next hunk.
+ * Regardless of `fuzzFactor`, later hunks must be applied later in the file than earlier hunks.
+ *
+ * If a hunk cannot be successfully fitted *anywhere* with fewer than `fuzzFactor` mismatches, `applyPatch` fails and returns `false`.
+ *
+ * If a hunk is successfully fitted but not at the line number specified by the hunk header, all subsequent hunks have their target line number adjusted accordingly.
+ * (e.g. if the first hunk is applied 10 lines below where the hunk header said it should fit, `applyPatch` will *start* looking for somewhere to apply the second hunk 10 lines below where its hunk header says it goes.)
+ *
+ * If the patch was applied successfully, returns a string containing the patched text.
+ * If the patch could not be applied (because some hunks in the patch couldn't be fitted to the text in `source`), `applyPatch` returns false.
+ *
+ * @param patch a string diff or the output from the `parsePatch` or `structuredPatch` methods.
+ */
+export function applyPatch(source, patch, options = {}) {
+    let patches;
+    if (typeof patch === 'string') {
+        patches = parsePatch(patch);
+    }
+    else if (Array.isArray(patch)) {
+        patches = patch;
+    }
+    else {
+        patches = [patch];
+    }
+    if (patches.length > 1) {
+        throw new Error('applyPatch only works with a single input.');
+    }
+    return applyStructuredPatch(source, patches[0], options);
+}
+function applyStructuredPatch(source, patch, options = {}) {
+    if (options.autoConvertLineEndings || options.autoConvertLineEndings == null) {
+        if (hasOnlyWinLineEndings(source) && isUnix(patch)) {
+            patch = unixToWin(patch);
+        }
+        else if (hasOnlyUnixLineEndings(source) && isWin(patch)) {
+            patch = winToUnix(patch);
+        }
+    }
+    // Apply the diff to the input
+    const lines = source.split('\n'), hunks = patch.hunks, compareLine = options.compareLine || ((lineNumber, line, operation, patchContent) => line === patchContent), fuzzFactor = options.fuzzFactor || 0;
+    let minLine = 0;
+    if (fuzzFactor < 0 || !Number.isInteger(fuzzFactor)) {
+        throw new Error('fuzzFactor must be a non-negative integer');
+    }
+    // Special case for empty patch.
+    if (!hunks.length) {
+        return source;
+    }
+    // Before anything else, handle EOFNL insertion/removal. If the patch tells us to make a change
+    // to the EOFNL that is redundant/impossible - i.e. to remove a newline that's not there, or add a
+    // newline that already exists - then we either return false and fail to apply the patch (if
+    // fuzzFactor is 0) or simply ignore the problem and do nothing (if fuzzFactor is >0).
+    // If we do need to remove/add a newline at EOF, this will always be in the final hunk:
+    let prevLine = '', removeEOFNL = false, addEOFNL = false;
+    for (let i = 0; i < hunks[hunks.length - 1].lines.length; i++) {
+        const line = hunks[hunks.length - 1].lines[i];
+        if (line[0] == '\\') {
+            if (prevLine[0] == '+') {
+                removeEOFNL = true;
+            }
+            else if (prevLine[0] == '-') {
+                addEOFNL = true;
+            }
+        }
+        prevLine = line;
+    }
+    if (removeEOFNL) {
+        if (addEOFNL) {
+            // This means the final line gets changed but doesn't have a trailing newline in either the
+            // original or patched version. In that case, we do nothing if fuzzFactor > 0, and if
+            // fuzzFactor is 0, we simply validate that the source file has no trailing newline.
+            if (!fuzzFactor && lines[lines.length - 1] == '') {
+                return false;
+            }
+        }
+        else if (lines[lines.length - 1] == '') {
+            lines.pop();
+        }
+        else if (!fuzzFactor) {
+            return false;
+        }
+    }
+    else if (addEOFNL) {
+        if (lines[lines.length - 1] != '') {
+            lines.push('');
+        }
+        else if (!fuzzFactor) {
+            return false;
+        }
+    }
+    /**
+     * Checks if the hunk can be made to fit at the provided location with at most `maxErrors`
+     * insertions, substitutions, or deletions, while ensuring also that:
+     * - lines deleted in the hunk match exactly, and
+     * - wherever an insertion operation or block of insertion operations appears in the hunk, the
+     *   immediately preceding and following lines of context match exactly
+     *
+     * `toPos` should be set such that lines[toPos] is meant to match hunkLines[0].
+     *
+     * If the hunk can be applied, returns an object with properties `oldLineLastI` and
+     * `replacementLines`. Otherwise, returns null.
+     */
+    function applyHunk(hunkLines, toPos, maxErrors, hunkLinesI = 0, lastContextLineMatched = true, patchedLines = [], patchedLinesLength = 0) {
+        let nConsecutiveOldContextLines = 0;
+        let nextContextLineMustMatch = false;
+        for (; hunkLinesI < hunkLines.length; hunkLinesI++) {
+            const hunkLine = hunkLines[hunkLinesI], operation = (hunkLine.length > 0 ? hunkLine[0] : ' '), content = (hunkLine.length > 0 ? hunkLine.substr(1) : hunkLine);
+            if (operation === '-') {
+                if (compareLine(toPos + 1, lines[toPos], operation, content)) {
+                    toPos++;
+                    nConsecutiveOldContextLines = 0;
+                }
+                else {
+                    if (!maxErrors || lines[toPos] == null) {
+                        return null;
+                    }
+                    patchedLines[patchedLinesLength] = lines[toPos];
+                    return applyHunk(hunkLines, toPos + 1, maxErrors - 1, hunkLinesI, false, patchedLines, patchedLinesLength + 1);
+                }
+            }
+            if (operation === '+') {
+                if (!lastContextLineMatched) {
+                    return null;
+                }
+                patchedLines[patchedLinesLength] = content;
+                patchedLinesLength++;
+                nConsecutiveOldContextLines = 0;
+                nextContextLineMustMatch = true;
+            }
+            if (operation === ' ') {
+                nConsecutiveOldContextLines++;
+                patchedLines[patchedLinesLength] = lines[toPos];
+                if (compareLine(toPos + 1, lines[toPos], operation, content)) {
+                    patchedLinesLength++;
+                    lastContextLineMatched = true;
+                    nextContextLineMustMatch = false;
+                    toPos++;
+                }
+                else {
+                    if (nextContextLineMustMatch || !maxErrors) {
+                        return null;
+                    }
+                    // Consider 3 possibilities in sequence:
+                    // 1. lines contains a *substitution* not included in the patch context, or
+                    // 2. lines contains an *insertion* not included in the patch context, or
+                    // 3. lines contains a *deletion* not included in the patch context
+                    // The first two options are of course only possible if the line from lines is non-null -
+                    // i.e. only option 3 is possible if we've overrun the end of the old file.
+                    return (lines[toPos] && (applyHunk(hunkLines, toPos + 1, maxErrors - 1, hunkLinesI + 1, false, patchedLines, patchedLinesLength + 1) || applyHunk(hunkLines, toPos + 1, maxErrors - 1, hunkLinesI, false, patchedLines, patchedLinesLength + 1)) || applyHunk(hunkLines, toPos, maxErrors - 1, hunkLinesI + 1, false, patchedLines, patchedLinesLength));
+                }
+            }
+        }
+        // Before returning, trim any unmodified context lines off the end of patchedLines and reduce
+        // toPos (and thus oldLineLastI) accordingly. This allows later hunks to be applied to a region
+        // that starts in this hunk's trailing context.
+        patchedLinesLength -= nConsecutiveOldContextLines;
+        toPos -= nConsecutiveOldContextLines;
+        patchedLines.length = patchedLinesLength;
+        return {
+            patchedLines,
+            oldLineLastI: toPos - 1
+        };
+    }
+    const resultLines = [];
+    // Search best fit offsets for each hunk based on the previous ones
+    let prevHunkOffset = 0;
+    for (let i = 0; i < hunks.length; i++) {
+        const hunk = hunks[i];
+        let hunkResult;
+        const maxLine = lines.length - hunk.oldLines + fuzzFactor;
+        let toPos;
+        for (let maxErrors = 0; maxErrors <= fuzzFactor; maxErrors++) {
+            toPos = hunk.oldStart + prevHunkOffset - 1;
+            const iterator = distanceIterator(toPos, minLine, maxLine);
+            for (; toPos !== undefined; toPos = iterator()) {
+                hunkResult = applyHunk(hunk.lines, toPos, maxErrors);
+                if (hunkResult) {
+                    break;
+                }
+            }
+            if (hunkResult) {
+                break;
+            }
+        }
+        if (!hunkResult) {
+            return false;
+        }
+        // Copy everything from the end of where we applied the last hunk to the start of this hunk
+        for (let i = minLine; i < toPos; i++) {
+            resultLines.push(lines[i]);
+        }
+        // Add the lines produced by applying the hunk:
+        for (let i = 0; i < hunkResult.patchedLines.length; i++) {
+            const line = hunkResult.patchedLines[i];
+            resultLines.push(line);
+        }
+        // Set lower text limit to end of the current hunk, so next ones don't try
+        // to fit over already patched text
+        minLine = hunkResult.oldLineLastI + 1;
+        // Note the offset between where the patch said the hunk should've applied and where we
+        // applied it, so we can adjust future hunks accordingly:
+        prevHunkOffset = toPos + 1 - hunk.oldStart;
+    }
+    // Copy over the rest of the lines from the old text
+    for (let i = minLine; i < lines.length; i++) {
+        resultLines.push(lines[i]);
+    }
+    return resultLines.join('\n');
+}
+/**
+ * applies one or more patches.
+ *
+ * `patch` may be either an array of structured patch objects, or a string representing a patch in unified diff format (which may patch one or more files).
+ *
+ * This method will iterate over the contents of the patch and apply to data provided through callbacks. The general flow for each patch index is:
+ *
+ * - `options.loadFile(index, callback)` is called. The caller should then load the contents of the file and then pass that to the `callback(err, data)` callback. Passing an `err` will terminate further patch execution.
+ * - `options.patched(index, content, callback)` is called once the patch has been applied. `content` will be the return value from `applyPatch`. When it's ready, the caller should call `callback(err)` callback. Passing an `err` will terminate further patch execution.
+ *
+ * Once all patches have been applied or an error occurs, the `options.complete(err)` callback is made.
+ */
+export function applyPatches(uniDiff, options) {
+    const spDiff = typeof uniDiff === 'string' ? parsePatch(uniDiff) : uniDiff;
+    let currentIndex = 0;
+    function processIndex() {
+        const index = spDiff[currentIndex++];
+        if (!index) {
+            return options.complete();
+        }
+        options.loadFile(index, function (err, data) {
+            if (err) {
+                return options.complete(err);
+            }
+            const updatedContent = applyPatch(data, index, options);
+            options.patched(index, updatedContent, function (err) {
+                if (err) {
+                    return options.complete(err);
+                }
+                processIndex();
+            });
+        });
+    }
+    processIndex();
+}
diff --git a/node_modules/diff/libesm/patch/create.js b/node_modules/diff/libesm/patch/create.js
new file mode 100644
index 0000000000000..7019c3c5ec46e
--- /dev/null
+++ b/node_modules/diff/libesm/patch/create.js
@@ -0,0 +1,201 @@
+import { diffLines } from '../diff/line.js';
+export function structuredPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options) {
+    let optionsObj;
+    if (!options) {
+        optionsObj = {};
+    }
+    else if (typeof options === 'function') {
+        optionsObj = { callback: options };
+    }
+    else {
+        optionsObj = options;
+    }
+    if (typeof optionsObj.context === 'undefined') {
+        optionsObj.context = 4;
+    }
+    // We copy this into its own variable to placate TypeScript, which thinks
+    // optionsObj.context might be undefined in the callbacks below.
+    const context = optionsObj.context;
+    // @ts-expect-error (runtime check for something that is correctly a static type error)
+    if (optionsObj.newlineIsToken) {
+        throw new Error('newlineIsToken may not be used with patch-generation functions, only with diffing functions');
+    }
+    if (!optionsObj.callback) {
+        return diffLinesResultToPatch(diffLines(oldStr, newStr, optionsObj));
+    }
+    else {
+        const { callback } = optionsObj;
+        diffLines(oldStr, newStr, Object.assign(Object.assign({}, optionsObj), { callback: (diff) => {
+                const patch = diffLinesResultToPatch(diff);
+                // TypeScript is unhappy without the cast because it does not understand that `patch` may
+                // be undefined here only if `callback` is StructuredPatchCallbackAbortable:
+                callback(patch);
+            } }));
+    }
+    function diffLinesResultToPatch(diff) {
+        // STEP 1: Build up the patch with no "\ No newline at end of file" lines and with the arrays
+        //         of lines containing trailing newline characters. We'll tidy up later...
+        if (!diff) {
+            return;
+        }
+        diff.push({ value: '', lines: [] }); // Append an empty value to make cleanup easier
+        function contextLines(lines) {
+            return lines.map(function (entry) { return ' ' + entry; });
+        }
+        const hunks = [];
+        let oldRangeStart = 0, newRangeStart = 0, curRange = [], oldLine = 1, newLine = 1;
+        for (let i = 0; i < diff.length; i++) {
+            const current = diff[i], lines = current.lines || splitLines(current.value);
+            current.lines = lines;
+            if (current.added || current.removed) {
+                // If we have previous context, start with that
+                if (!oldRangeStart) {
+                    const prev = diff[i - 1];
+                    oldRangeStart = oldLine;
+                    newRangeStart = newLine;
+                    if (prev) {
+                        curRange = context > 0 ? contextLines(prev.lines.slice(-context)) : [];
+                        oldRangeStart -= curRange.length;
+                        newRangeStart -= curRange.length;
+                    }
+                }
+                // Output our changes
+                for (const line of lines) {
+                    curRange.push((current.added ? '+' : '-') + line);
+                }
+                // Track the updated file position
+                if (current.added) {
+                    newLine += lines.length;
+                }
+                else {
+                    oldLine += lines.length;
+                }
+            }
+            else {
+                // Identical context lines. Track line changes
+                if (oldRangeStart) {
+                    // Close out any changes that have been output (or join overlapping)
+                    if (lines.length <= context * 2 && i < diff.length - 2) {
+                        // Overlapping
+                        for (const line of contextLines(lines)) {
+                            curRange.push(line);
+                        }
+                    }
+                    else {
+                        // end the range and output
+                        const contextSize = Math.min(lines.length, context);
+                        for (const line of contextLines(lines.slice(0, contextSize))) {
+                            curRange.push(line);
+                        }
+                        const hunk = {
+                            oldStart: oldRangeStart,
+                            oldLines: (oldLine - oldRangeStart + contextSize),
+                            newStart: newRangeStart,
+                            newLines: (newLine - newRangeStart + contextSize),
+                            lines: curRange
+                        };
+                        hunks.push(hunk);
+                        oldRangeStart = 0;
+                        newRangeStart = 0;
+                        curRange = [];
+                    }
+                }
+                oldLine += lines.length;
+                newLine += lines.length;
+            }
+        }
+        // Step 2: eliminate the trailing `\n` from each line of each hunk, and, where needed, add
+        //         "\ No newline at end of file".
+        for (const hunk of hunks) {
+            for (let i = 0; i < hunk.lines.length; i++) {
+                if (hunk.lines[i].endsWith('\n')) {
+                    hunk.lines[i] = hunk.lines[i].slice(0, -1);
+                }
+                else {
+                    hunk.lines.splice(i + 1, 0, '\\ No newline at end of file');
+                    i++; // Skip the line we just added, then continue iterating
+                }
+            }
+        }
+        return {
+            oldFileName: oldFileName, newFileName: newFileName,
+            oldHeader: oldHeader, newHeader: newHeader,
+            hunks: hunks
+        };
+    }
+}
+/**
+ * creates a unified diff patch.
+ * @param patch either a single structured patch object (as returned by `structuredPatch`) or an array of them (as returned by `parsePatch`)
+ */
+export function formatPatch(patch) {
+    if (Array.isArray(patch)) {
+        return patch.map(formatPatch).join('\n');
+    }
+    const ret = [];
+    if (patch.oldFileName == patch.newFileName) {
+        ret.push('Index: ' + patch.oldFileName);
+    }
+    ret.push('===================================================================');
+    ret.push('--- ' + patch.oldFileName + (typeof patch.oldHeader === 'undefined' ? '' : '\t' + patch.oldHeader));
+    ret.push('+++ ' + patch.newFileName + (typeof patch.newHeader === 'undefined' ? '' : '\t' + patch.newHeader));
+    for (let i = 0; i < patch.hunks.length; i++) {
+        const hunk = patch.hunks[i];
+        // Unified Diff Format quirk: If the chunk size is 0,
+        // the first number is one lower than one would expect.
+        // https://www.artima.com/weblogs/viewpost.jsp?thread=164293
+        if (hunk.oldLines === 0) {
+            hunk.oldStart -= 1;
+        }
+        if (hunk.newLines === 0) {
+            hunk.newStart -= 1;
+        }
+        ret.push('@@ -' + hunk.oldStart + ',' + hunk.oldLines
+            + ' +' + hunk.newStart + ',' + hunk.newLines
+            + ' @@');
+        for (const line of hunk.lines) {
+            ret.push(line);
+        }
+    }
+    return ret.join('\n') + '\n';
+}
+export function createTwoFilesPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options) {
+    if (typeof options === 'function') {
+        options = { callback: options };
+    }
+    if (!(options === null || options === void 0 ? void 0 : options.callback)) {
+        const patchObj = structuredPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, options);
+        if (!patchObj) {
+            return;
+        }
+        return formatPatch(patchObj);
+    }
+    else {
+        const { callback } = options;
+        structuredPatch(oldFileName, newFileName, oldStr, newStr, oldHeader, newHeader, Object.assign(Object.assign({}, options), { callback: patchObj => {
+                if (!patchObj) {
+                    callback(undefined);
+                }
+                else {
+                    callback(formatPatch(patchObj));
+                }
+            } }));
+    }
+}
+export function createPatch(fileName, oldStr, newStr, oldHeader, newHeader, options) {
+    return createTwoFilesPatch(fileName, fileName, oldStr, newStr, oldHeader, newHeader, options);
+}
+/**
+ * Split `text` into an array of lines, including the trailing newline character (where present)
+ */
+function splitLines(text) {
+    const hasTrailingNl = text.endsWith('\n');
+    const result = text.split('\n').map(line => line + '\n');
+    if (hasTrailingNl) {
+        result.pop();
+    }
+    else {
+        result.push(result.pop().slice(0, -1));
+    }
+    return result;
+}
diff --git a/node_modules/diff/libesm/patch/line-endings.js b/node_modules/diff/libesm/patch/line-endings.js
new file mode 100644
index 0000000000000..ab54b715f0047
--- /dev/null
+++ b/node_modules/diff/libesm/patch/line-endings.js
@@ -0,0 +1,44 @@
+export function unixToWin(patch) {
+    if (Array.isArray(patch)) {
+        // It would be cleaner if instead of the line below we could just write
+        //     return patch.map(unixToWin)
+        // but mysteriously TypeScript (v5.7.3 at the time of writing) does not like this and it will
+        // refuse to compile, thinking that unixToWin could then return StructuredPatch[][] and the
+        // result would be incompatible with the overload signatures.
+        // See bug report at https://github.com/microsoft/TypeScript/issues/61398.
+        return patch.map(p => unixToWin(p));
+    }
+    return Object.assign(Object.assign({}, patch), { hunks: patch.hunks.map(hunk => (Object.assign(Object.assign({}, hunk), { lines: hunk.lines.map((line, i) => {
+                var _a;
+                return (line.startsWith('\\') || line.endsWith('\r') || ((_a = hunk.lines[i + 1]) === null || _a === void 0 ? void 0 : _a.startsWith('\\')))
+                    ? line
+                    : line + '\r';
+            }) }))) });
+}
+export function winToUnix(patch) {
+    if (Array.isArray(patch)) {
+        // (See comment above equivalent line in unixToWin)
+        return patch.map(p => winToUnix(p));
+    }
+    return Object.assign(Object.assign({}, patch), { hunks: patch.hunks.map(hunk => (Object.assign(Object.assign({}, hunk), { lines: hunk.lines.map(line => line.endsWith('\r') ? line.substring(0, line.length - 1) : line) }))) });
+}
+/**
+ * Returns true if the patch consistently uses Unix line endings (or only involves one line and has
+ * no line endings).
+ */
+export function isUnix(patch) {
+    if (!Array.isArray(patch)) {
+        patch = [patch];
+    }
+    return !patch.some(index => index.hunks.some(hunk => hunk.lines.some(line => !line.startsWith('\\') && line.endsWith('\r'))));
+}
+/**
+ * Returns true if the patch uses Windows line endings and only Windows line endings.
+ */
+export function isWin(patch) {
+    if (!Array.isArray(patch)) {
+        patch = [patch];
+    }
+    return patch.some(index => index.hunks.some(hunk => hunk.lines.some(line => line.endsWith('\r'))))
+        && patch.every(index => index.hunks.every(hunk => hunk.lines.every((line, i) => { var _a; return line.startsWith('\\') || line.endsWith('\r') || ((_a = hunk.lines[i + 1]) === null || _a === void 0 ? void 0 : _a.startsWith('\\')); })));
+}
diff --git a/node_modules/diff/libesm/patch/parse.js b/node_modules/diff/libesm/patch/parse.js
new file mode 100644
index 0000000000000..3f9a0d7904f60
--- /dev/null
+++ b/node_modules/diff/libesm/patch/parse.js
@@ -0,0 +1,130 @@
+/**
+ * Parses a patch into structured data, in the same structure returned by `structuredPatch`.
+ *
+ * @return a JSON object representation of the a patch, suitable for use with the `applyPatch` method.
+ */
+export function parsePatch(uniDiff) {
+    const diffstr = uniDiff.split(/\n/), list = [];
+    let i = 0;
+    function parseIndex() {
+        const index = {};
+        list.push(index);
+        // Parse diff metadata
+        while (i < diffstr.length) {
+            const line = diffstr[i];
+            // File header found, end parsing diff metadata
+            if ((/^(---|\+\+\+|@@)\s/).test(line)) {
+                break;
+            }
+            // Diff index
+            const header = (/^(?:Index:|diff(?: -r \w+)+)\s+(.+?)\s*$/).exec(line);
+            if (header) {
+                index.index = header[1];
+            }
+            i++;
+        }
+        // Parse file headers if they are defined. Unified diff requires them, but
+        // there's no technical issues to have an isolated hunk without file header
+        parseFileHeader(index);
+        parseFileHeader(index);
+        // Parse hunks
+        index.hunks = [];
+        while (i < diffstr.length) {
+            const line = diffstr[i];
+            if ((/^(Index:\s|diff\s|---\s|\+\+\+\s|===================================================================)/).test(line)) {
+                break;
+            }
+            else if ((/^@@/).test(line)) {
+                index.hunks.push(parseHunk());
+            }
+            else if (line) {
+                throw new Error('Unknown line ' + (i + 1) + ' ' + JSON.stringify(line));
+            }
+            else {
+                i++;
+            }
+        }
+    }
+    // Parses the --- and +++ headers, if none are found, no lines
+    // are consumed.
+    function parseFileHeader(index) {
+        const fileHeader = (/^(---|\+\+\+)\s+(.*)\r?$/).exec(diffstr[i]);
+        if (fileHeader) {
+            const data = fileHeader[2].split('\t', 2), header = (data[1] || '').trim();
+            let fileName = data[0].replace(/\\\\/g, '\\');
+            if ((/^".*"$/).test(fileName)) {
+                fileName = fileName.substr(1, fileName.length - 2);
+            }
+            if (fileHeader[1] === '---') {
+                index.oldFileName = fileName;
+                index.oldHeader = header;
+            }
+            else {
+                index.newFileName = fileName;
+                index.newHeader = header;
+            }
+            i++;
+        }
+    }
+    // Parses a hunk
+    // This assumes that we are at the start of a hunk.
+    function parseHunk() {
+        var _a;
+        const chunkHeaderIndex = i, chunkHeaderLine = diffstr[i++], chunkHeader = chunkHeaderLine.split(/@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@/);
+        const hunk = {
+            oldStart: +chunkHeader[1],
+            oldLines: typeof chunkHeader[2] === 'undefined' ? 1 : +chunkHeader[2],
+            newStart: +chunkHeader[3],
+            newLines: typeof chunkHeader[4] === 'undefined' ? 1 : +chunkHeader[4],
+            lines: []
+        };
+        // Unified Diff Format quirk: If the chunk size is 0,
+        // the first number is one lower than one would expect.
+        // https://www.artima.com/weblogs/viewpost.jsp?thread=164293
+        if (hunk.oldLines === 0) {
+            hunk.oldStart += 1;
+        }
+        if (hunk.newLines === 0) {
+            hunk.newStart += 1;
+        }
+        let addCount = 0, removeCount = 0;
+        for (; i < diffstr.length && (removeCount < hunk.oldLines || addCount < hunk.newLines || ((_a = diffstr[i]) === null || _a === void 0 ? void 0 : _a.startsWith('\\'))); i++) {
+            const operation = (diffstr[i].length == 0 && i != (diffstr.length - 1)) ? ' ' : diffstr[i][0];
+            if (operation === '+' || operation === '-' || operation === ' ' || operation === '\\') {
+                hunk.lines.push(diffstr[i]);
+                if (operation === '+') {
+                    addCount++;
+                }
+                else if (operation === '-') {
+                    removeCount++;
+                }
+                else if (operation === ' ') {
+                    addCount++;
+                    removeCount++;
+                }
+            }
+            else {
+                throw new Error(`Hunk at line ${chunkHeaderIndex + 1} contained invalid line ${diffstr[i]}`);
+            }
+        }
+        // Handle the empty block count case
+        if (!addCount && hunk.newLines === 1) {
+            hunk.newLines = 0;
+        }
+        if (!removeCount && hunk.oldLines === 1) {
+            hunk.oldLines = 0;
+        }
+        // Perform sanity checking
+        if (addCount !== hunk.newLines) {
+            throw new Error('Added line count did not match for hunk at line ' + (chunkHeaderIndex + 1));
+        }
+        if (removeCount !== hunk.oldLines) {
+            throw new Error('Removed line count did not match for hunk at line ' + (chunkHeaderIndex + 1));
+        }
+        return hunk;
+    }
+    while (i < diffstr.length) {
+        parseIndex();
+    }
+    return list;
+}
diff --git a/node_modules/diff/libesm/patch/reverse.js b/node_modules/diff/libesm/patch/reverse.js
new file mode 100644
index 0000000000000..9207b51c63c55
--- /dev/null
+++ b/node_modules/diff/libesm/patch/reverse.js
@@ -0,0 +1,23 @@
+export function reversePatch(structuredPatch) {
+    if (Array.isArray(structuredPatch)) {
+        // (See comment in unixToWin for why we need the pointless-looking anonymous function here)
+        return structuredPatch.map(patch => reversePatch(patch)).reverse();
+    }
+    return Object.assign(Object.assign({}, structuredPatch), { oldFileName: structuredPatch.newFileName, oldHeader: structuredPatch.newHeader, newFileName: structuredPatch.oldFileName, newHeader: structuredPatch.oldHeader, hunks: structuredPatch.hunks.map(hunk => {
+            return {
+                oldLines: hunk.newLines,
+                oldStart: hunk.newStart,
+                newLines: hunk.oldLines,
+                newStart: hunk.oldStart,
+                lines: hunk.lines.map(l => {
+                    if (l.startsWith('-')) {
+                        return `+${l.slice(1)}`;
+                    }
+                    if (l.startsWith('+')) {
+                        return `-${l.slice(1)}`;
+                    }
+                    return l;
+                })
+            };
+        }) });
+}
diff --git a/node_modules/diff/libesm/types.js b/node_modules/diff/libesm/types.js
new file mode 100644
index 0000000000000..cb0ff5c3b541f
--- /dev/null
+++ b/node_modules/diff/libesm/types.js
@@ -0,0 +1 @@
+export {};
diff --git a/node_modules/diff/libesm/util/array.js b/node_modules/diff/libesm/util/array.js
new file mode 100644
index 0000000000000..c3e00f8500390
--- /dev/null
+++ b/node_modules/diff/libesm/util/array.js
@@ -0,0 +1,17 @@
+export function arrayEqual(a, b) {
+    if (a.length !== b.length) {
+        return false;
+    }
+    return arrayStartsWith(a, b);
+}
+export function arrayStartsWith(array, start) {
+    if (start.length > array.length) {
+        return false;
+    }
+    for (let i = 0; i < start.length; i++) {
+        if (start[i] !== array[i]) {
+            return false;
+        }
+    }
+    return true;
+}
diff --git a/node_modules/diff/libesm/util/distance-iterator.js b/node_modules/diff/libesm/util/distance-iterator.js
new file mode 100644
index 0000000000000..afa638143ece1
--- /dev/null
+++ b/node_modules/diff/libesm/util/distance-iterator.js
@@ -0,0 +1,37 @@
+// Iterator that traverses in the range of [min, max], stepping
+// by distance from a given start position. I.e. for [0, 4], with
+// start of 2, this will iterate 2, 3, 1, 4, 0.
+export default function (start, minLine, maxLine) {
+    let wantForward = true, backwardExhausted = false, forwardExhausted = false, localOffset = 1;
+    return function iterator() {
+        if (wantForward && !forwardExhausted) {
+            if (backwardExhausted) {
+                localOffset++;
+            }
+            else {
+                wantForward = false;
+            }
+            // Check if trying to fit beyond text length, and if not, check it fits
+            // after offset location (or desired location on first iteration)
+            if (start + localOffset <= maxLine) {
+                return start + localOffset;
+            }
+            forwardExhausted = true;
+        }
+        if (!backwardExhausted) {
+            if (!forwardExhausted) {
+                wantForward = true;
+            }
+            // Check if trying to fit before text beginning, and if not, check it fits
+            // before offset location
+            if (minLine <= start - localOffset) {
+                return start - localOffset++;
+            }
+            backwardExhausted = true;
+            return iterator();
+        }
+        // We tried to fit hunk before text beginning and beyond text length, then
+        // hunk can't fit on the text. Return undefined
+        return undefined;
+    };
+}
diff --git a/node_modules/diff/libesm/util/params.js b/node_modules/diff/libesm/util/params.js
new file mode 100644
index 0000000000000..c9921a2106257
--- /dev/null
+++ b/node_modules/diff/libesm/util/params.js
@@ -0,0 +1,14 @@
+export function generateOptions(options, defaults) {
+    if (typeof options === 'function') {
+        defaults.callback = options;
+    }
+    else if (options) {
+        for (const name in options) {
+            /* istanbul ignore else */
+            if (Object.prototype.hasOwnProperty.call(options, name)) {
+                defaults[name] = options[name];
+            }
+        }
+    }
+    return defaults;
+}
diff --git a/node_modules/diff/libesm/util/string.js b/node_modules/diff/libesm/util/string.js
new file mode 100644
index 0000000000000..36cfb3aa85ddf
--- /dev/null
+++ b/node_modules/diff/libesm/util/string.js
@@ -0,0 +1,128 @@
+export function longestCommonPrefix(str1, str2) {
+    let i;
+    for (i = 0; i < str1.length && i < str2.length; i++) {
+        if (str1[i] != str2[i]) {
+            return str1.slice(0, i);
+        }
+    }
+    return str1.slice(0, i);
+}
+export function longestCommonSuffix(str1, str2) {
+    let i;
+    // Unlike longestCommonPrefix, we need a special case to handle all scenarios
+    // where we return the empty string since str1.slice(-0) will return the
+    // entire string.
+    if (!str1 || !str2 || str1[str1.length - 1] != str2[str2.length - 1]) {
+        return '';
+    }
+    for (i = 0; i < str1.length && i < str2.length; i++) {
+        if (str1[str1.length - (i + 1)] != str2[str2.length - (i + 1)]) {
+            return str1.slice(-i);
+        }
+    }
+    return str1.slice(-i);
+}
+export function replacePrefix(string, oldPrefix, newPrefix) {
+    if (string.slice(0, oldPrefix.length) != oldPrefix) {
+        throw Error(`string ${JSON.stringify(string)} doesn't start with prefix ${JSON.stringify(oldPrefix)}; this is a bug`);
+    }
+    return newPrefix + string.slice(oldPrefix.length);
+}
+export function replaceSuffix(string, oldSuffix, newSuffix) {
+    if (!oldSuffix) {
+        return string + newSuffix;
+    }
+    if (string.slice(-oldSuffix.length) != oldSuffix) {
+        throw Error(`string ${JSON.stringify(string)} doesn't end with suffix ${JSON.stringify(oldSuffix)}; this is a bug`);
+    }
+    return string.slice(0, -oldSuffix.length) + newSuffix;
+}
+export function removePrefix(string, oldPrefix) {
+    return replacePrefix(string, oldPrefix, '');
+}
+export function removeSuffix(string, oldSuffix) {
+    return replaceSuffix(string, oldSuffix, '');
+}
+export function maximumOverlap(string1, string2) {
+    return string2.slice(0, overlapCount(string1, string2));
+}
+// Nicked from https://stackoverflow.com/a/60422853/1709587
+function overlapCount(a, b) {
+    // Deal with cases where the strings differ in length
+    let startA = 0;
+    if (a.length > b.length) {
+        startA = a.length - b.length;
+    }
+    let endB = b.length;
+    if (a.length < b.length) {
+        endB = a.length;
+    }
+    // Create a back-reference for each index
+    //   that should be followed in case of a mismatch.
+    //   We only need B to make these references:
+    const map = Array(endB);
+    let k = 0; // Index that lags behind j
+    map[0] = 0;
+    for (let j = 1; j < endB; j++) {
+        if (b[j] == b[k]) {
+            map[j] = map[k]; // skip over the same character (optional optimisation)
+        }
+        else {
+            map[j] = k;
+        }
+        while (k > 0 && b[j] != b[k]) {
+            k = map[k];
+        }
+        if (b[j] == b[k]) {
+            k++;
+        }
+    }
+    // Phase 2: use these references while iterating over A
+    k = 0;
+    for (let i = startA; i < a.length; i++) {
+        while (k > 0 && a[i] != b[k]) {
+            k = map[k];
+        }
+        if (a[i] == b[k]) {
+            k++;
+        }
+    }
+    return k;
+}
+/**
+ * Returns true if the string consistently uses Windows line endings.
+ */
+export function hasOnlyWinLineEndings(string) {
+    return string.includes('\r\n') && !string.startsWith('\n') && !string.match(/[^\r]\n/);
+}
+/**
+ * Returns true if the string consistently uses Unix line endings.
+ */
+export function hasOnlyUnixLineEndings(string) {
+    return !string.includes('\r\n') && string.includes('\n');
+}
+export function trailingWs(string) {
+    // Yes, this looks overcomplicated and dumb - why not replace the whole function with
+    //     return string match(/\s*$/)[0]
+    // you ask? Because:
+    // 1. the trap described at https://markamery.com/blog/quadratic-time-regexes/ would mean doing
+    //    this would cause this function to take O(n²) time in the worst case (specifically when
+    //    there is a massive run of NON-TRAILING whitespace in `string`), and
+    // 2. the fix proposed in the same blog post, of using a negative lookbehind, is incompatible
+    //    with old Safari versions that we'd like to not break if possible (see
+    //    https://github.com/kpdecker/jsdiff/pull/550)
+    // It feels absurd to do this with an explicit loop instead of a regex, but I really can't see a
+    // better way that doesn't result in broken behaviour.
+    let i;
+    for (i = string.length - 1; i >= 0; i--) {
+        if (!string[i].match(/\s/)) {
+            break;
+        }
+    }
+    return string.substring(i + 1);
+}
+export function leadingWs(string) {
+    // Thankfully the annoying considerations described in trailingWs don't apply here:
+    const match = string.match(/^\s*/);
+    return match ? match[0] : '';
+}
diff --git a/node_modules/diff/package.json b/node_modules/diff/package.json
index 400c8dd8fe9b3..b941f247c27e4 100644
--- a/node_modules/diff/package.json
+++ b/node_modules/diff/package.json
@@ -1,6 +1,6 @@
 {
   "name": "diff",
-  "version": "7.0.0",
+  "version": "8.0.2",
   "description": "A JavaScript text diff implementation.",
   "keywords": [
     "diff",
@@ -28,61 +28,104 @@
   "engines": {
     "node": ">=0.3.1"
   },
-  "main": "./lib/index.js",
-  "module": "./lib/index.es6.js",
+  "main": "./libcjs/index.js",
+  "module": "./libesm/index.js",
   "browser": "./dist/diff.js",
   "unpkg": "./dist/diff.js",
   "exports": {
     ".": {
-      "import": "./lib/index.mjs",
-      "require": "./lib/index.js"
+      "import": {
+        "types": "./libesm/index.d.ts",
+        "default": "./libesm/index.js"
+      },
+      "require": {
+        "types": "./libcjs/index.d.ts",
+        "default": "./libcjs/index.js"
+      }
     },
     "./package.json": "./package.json",
-    "./": "./",
-    "./*": "./*"
+    "./lib/*.js": {
+      "import": {
+        "types": "./libesm/*.d.ts",
+        "default": "./libesm/*.js"
+      },
+      "require": {
+        "types": "./libcjs/*.d.ts",
+        "default": "./libcjs/*.js"
+      }
+    },
+    "./lib/": {
+      "import": {
+        "types": "./libesm/",
+        "default": "./libesm/"
+      },
+      "require": {
+        "types": "./libcjs/",
+        "default": "./libcjs/"
+      }
+    }
   },
+  "type": "module",
+  "types": "libcjs/index.d.ts",
   "scripts": {
-    "clean": "rm -rf lib/ dist/",
-    "build:node": "yarn babel --out-dir lib  --source-maps=inline src",
-    "test": "grunt"
+    "clean": "rm -rf libcjs/ libesm/ dist/ coverage/ .nyc_output/",
+    "lint": "yarn eslint",
+    "build": "yarn lint && yarn generate-esm && yarn generate-cjs && yarn check-types && yarn run-rollup && yarn run-uglify",
+    "generate-cjs": "yarn tsc --module commonjs --outDir libcjs && node --eval \"fs.writeFileSync('libcjs/package.json', JSON.stringify({type:'commonjs',sideEffects:false}))\"",
+    "generate-esm": "yarn tsc --module nodenext --outDir libesm --target es6 && node --eval \"fs.writeFileSync('libesm/package.json', JSON.stringify({type:'module',sideEffects:false}))\"",
+    "check-types": "yarn run-tsd && yarn run-attw",
+    "test": "nyc yarn _test",
+    "_test": "yarn build && cross-env NODE_ENV=test yarn run-mocha",
+    "run-attw": "yarn attw --pack --entrypoints . && yarn attw --pack --entrypoints lib/diff/word.js --profile node16",
+    "run-tsd": "yarn tsd --typings libesm/ && yarn tsd --files test-d/",
+    "run-rollup": "rollup -c rollup.config.mjs",
+    "run-uglify": "uglifyjs dist/diff.js -c -o dist/diff.min.js",
+    "run-mocha": "mocha --require ./runtime 'test/**/*.js'"
   },
   "devDependencies": {
-    "@babel/cli": "^7.24.1",
-    "@babel/core": "^7.24.1",
-    "@babel/plugin-transform-modules-commonjs": "^7.24.1",
-    "@babel/preset-env": "^7.24.1",
-    "@babel/register": "^7.23.7",
+    "@arethetypeswrong/cli": "^0.17.4",
+    "@babel/core": "^7.26.9",
+    "@babel/preset-env": "^7.26.9",
+    "@babel/register": "^7.25.9",
     "@colors/colors": "^1.6.0",
-    "babel-eslint": "^10.0.1",
-    "babel-loader": "^9.1.3",
-    "chai": "^4.2.0",
-    "eslint": "^5.12.0",
-    "grunt": "^1.6.1",
-    "grunt-babel": "^8.0.0",
-    "grunt-cli": "^1.4.3",
-    "grunt-contrib-clean": "^2.0.1",
-    "grunt-contrib-copy": "^1.0.0",
-    "grunt-contrib-uglify": "^5.2.2",
-    "grunt-contrib-watch": "^1.1.0",
-    "grunt-eslint": "^24.3.0",
-    "grunt-exec": "^3.0.0",
-    "grunt-karma": "^4.0.2",
-    "grunt-mocha-istanbul": "^5.0.2",
-    "grunt-mocha-test": "^0.13.3",
-    "grunt-webpack": "^6.0.0",
-    "istanbul": "github:kpdecker/istanbul",
-    "karma": "^6.4.3",
-    "karma-chrome-launcher": "^3.2.0",
+    "@eslint/js": "^9.25.1",
+    "babel-loader": "^10.0.0",
+    "babel-plugin-istanbul": "^7.0.0",
+    "chai": "^5.2.0",
+    "cross-env": "^7.0.3",
+    "eslint": "^9.25.1",
+    "globals": "^16.0.0",
+    "karma": "^6.4.4",
     "karma-mocha": "^2.0.1",
     "karma-mocha-reporter": "^2.2.5",
     "karma-sourcemap-loader": "^0.4.0",
     "karma-webpack": "^5.0.1",
-    "mocha": "^7.0.0",
-    "rollup": "^4.13.0",
-    "rollup-plugin-babel": "^4.2.0",
-    "semver": "^7.6.0",
-    "webpack": "^5.90.3",
-    "webpack-dev-server": "^5.0.3"
+    "mocha": "^11.1.0",
+    "nyc": "^17.1.0",
+    "rollup": "^4.40.1",
+    "tsd": "^0.32.0",
+    "typescript": "^5.8.3",
+    "typescript-eslint": "^8.31.0",
+    "uglify-js": "^3.19.3",
+    "webpack": "^5.99.7",
+    "webpack-dev-server": "^5.2.1"
   },
-  "optionalDependencies": {}
+  "optionalDependencies": {},
+  "dependencies": {},
+  "nyc": {
+    "require": [
+      "@babel/register"
+    ],
+    "reporter": [
+      "lcov",
+      "text"
+    ],
+    "sourceMap": false,
+    "instrument": false,
+    "check-coverage": true,
+    "branches": 100,
+    "lines": 100,
+    "functions": 100,
+    "statements": 100
+  }
 }
diff --git a/node_modules/diff/release-notes.md b/node_modules/diff/release-notes.md
index 21b5d41d6188b..28219b2b0e5d4 100644
--- a/node_modules/diff/release-notes.md
+++ b/node_modules/diff/release-notes.md
@@ -1,5 +1,41 @@
 # Release Notes
 
+## 8.0.2
+
+- [#616](https://github.com/kpdecker/jsdiff/pull/616) **Restored compatibility of `diffSentences` with old Safari versions.** This was broken in 8.0.0 by the introduction of a regex with a [lookbehind assertion](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Regular_expressions/Lookbehind_assertion); these weren't supported in Safari prior to version 16.4.
+- [#612](https://github.com/kpdecker/jsdiff/pull/612) **Improved tree shakeability** by marking the built CJS and ESM packages with `sideEffects: false`.
+
+## 8.0.1
+
+- [#610](https://github.com/kpdecker/jsdiff/pull/610) **Fixes types for `diffJson` which were broken by 8.0.0**. The new bundled types in 8.0.0 only allowed `diffJson` to be passed string arguments, but it should've been possible to pass either strings or objects (and now is). Thanks to Josh Kelley for the fix.
+
+## 8.0.0
+
+- [#580](https://github.com/kpdecker/jsdiff/pull/580) **Multiple tweaks to `diffSentences`**:
+  * tokenization no longer takes quadratic time on pathological inputs (reported as a ReDOS vulnerability by Snyk); is now linear instead
+  * the final sentence in the string is now handled the same by the tokenizer regardless of whether it has a trailing punctuation mark or not. (Previously, "foo. bar." tokenized to `["foo.", " ", "bar."]` but "foo. bar" tokenized to `["foo.", " bar"]` - i.e. whether the space between sentences was treated as a separate token depended upon whether the final sentence had trailing punctuation or not. This was arbitrary and surprising; it is no longer the case.)
+  * in a string that starts with a sentence end, like "! hello.", the "!" is now treated as a separate sentence
+  * the README now correctly documents the tokenization behaviour (it was wrong before)
+- [#581](https://github.com/kpdecker/jsdiff/pull/581) - **fixed some regex operations used for tokenization in `diffWords` taking O(n^2) time** in pathological cases
+- [#595](https://github.com/kpdecker/jsdiff/pull/595) - **fixed a crash in patch creation functions when handling a single hunk consisting of a very large number (e.g. >130k) of lines**. (This was caused by spreading indefinitely-large arrays to `.push()` using `.apply` or the spread operator and hitting the JS-implementation-specific limit on the maximum number of arguments to a function, as shown at https://stackoverflow.com/a/56809779/1709587; thus the exact threshold to hit the error will depend on the environment in which you were running JsDiff.)
+- [#596](https://github.com/kpdecker/jsdiff/pull/596) - **removed the `merge` function**. Previously JsDiff included an undocumented function called `merge` that was meant to, in some sense, merge patches. It had at least a couple of serious bugs that could lead to it returning unambiguously wrong results, and it was difficult to simply "fix" because it was [unclear precisely what it was meant to do](https://github.com/kpdecker/jsdiff/issues/181#issuecomment-2198319542). For now, the fix is to remove it entirely.
+- [#591](https://github.com/kpdecker/jsdiff/pull/591) - JsDiff's source code has been rewritten in TypeScript. This change entails the following changes for end users:
+  * **the `diff` package on npm now includes its own TypeScript type definitions**. Users who previously used the `@types/diff` npm package from DefinitelyTyped should remove that dependency when upgrading JsDiff to v8.
+
+    Note that the transition from the DefinitelyTyped types to JsDiff's own type definitions includes multiple fixes and also removes many exported types previously used for `options` arguments to diffing and patch-generation functions. (There are now different exported options types for abortable calls - ones with a `timeout` or `maxEditLength` that may give a result of `undefined` - and non-abortable calls.) See the TypeScript section of the README for some usage tips.
+
+  * **The `Diff` object is now a class**. Custom extensions of `Diff`, as described in the "Defining custom diffing behaviors" section of the README, can therefore now be done by writing a `class CustomDiff extends Diff` and overriding methods, instead of the old way based on prototype inheritance. (I *think* code that did things the old way should still work, though!)
+
+  * **`diff/lib/index.es6.js` and `diff/lib/index.mjs` no longer exist, and the ESM version of the library is no longer bundled into a single file.**
+
+  * **The `ignoreWhitespace` option for `diffWords` is no longer included in the type declarations**. The effect of passing `ignoreWhitespace: true` has always been to make `diffWords` just call `diffWordsWithSpace` instead, which was confusing, because that behaviour doesn't seem properly described as "ignoring" whitespace at all. The property remains available to non-TypeScript applications for the sake of backwards compatability, but TypeScript applications will now see a type error if they try to pass `ignoreWhitespace: true` to `diffWords` and should change their code to call `diffWordsWithSpace` instead.
+
+  * JsDiff no longer purports to support ES3 environments. (I'm pretty sure it never truly did, despite claiming to in its README, since even the 1.0.0 release used `Array.map` which was added in ES5.)
+- [#601](https://github.com/kpdecker/jsdiff/pull/601) - **`diffJson`'s `stringifyReplacer` option behaves more like `JSON.stringify`'s `replacer` argument now.** In particular:
+  * Each key/value pair now gets passed through the replacer once instead of twice
+  * The `key` passed to the replacer when the top-level object is passed in as `value` is now `""` (previously, was `undefined`), and the `key` passed with an array element is the array index as a string, like `"0"` or `"1"` (previously was whatever the key for the entire array was). Both the new behaviours match that of `JSON.stringify`.
+- [#602](https://github.com/kpdecker/jsdiff/pull/602) - **diffing functions now consistently return `undefined` when called in async mode** (i.e. with a callback). Previously, there was an odd quirk where they would return `true` if the strings being diffed were equal and `undefined` otherwise.
+
 ## 7.0.0
 
 Just a single (breaking) bugfix, undoing a behaviour change introduced accidentally in 6.0.0:
@@ -33,14 +69,14 @@ This is a release containing many, *many* breaking changes. The objective of thi
 - [#490](https://github.com/kpdecker/jsdiff/pull/490) **When calling diffing functions in async mode by passing a `callback` option, the diff result will now be passed as the *first* argument to the callback instead of the second.** (Previously, the first argument was never used at all and would always have value `undefined`.)
 - [#489](github.com/kpdecker/jsdiff/pull/489) **`this.options` no longer exists on `Diff` objects.** Instead, `options` is now passed as an argument to methods that rely on options, like `equals(left, right, options)`. This fixes a race condition in async mode, where diffing behaviour could be changed mid-execution if a concurrent usage of the same `Diff` instances overwrote its `options`.
 - [#518](https://github.com/kpdecker/jsdiff/pull/518) **`linedelimiters` no longer exists** on patch objects; instead, when a patch with Windows-style CRLF line endings is parsed, **the lines in `lines` will end with `\r`**. There is now a **new `autoConvertLineEndings` option, on by default**, which makes it so that when a patch with Windows-style line endings is applied to a source file with Unix style line endings, the patch gets autoconverted to use Unix-style line endings, and when a patch with Unix-style line endings is applied to a source file with Windows-style line endings, it gets autoconverted to use Windows-style line endings.
-- [#521](https://github.com/kpdecker/jsdiff/pull/521) **the `callback` option is now supported by `structuredPatch`, `createPatch
+- [#521](https://github.com/kpdecker/jsdiff/pull/521) **the `callback` option is now supported by `structuredPatch`, `createPatch`, and `createTwoFilesPatch`**
 - [#529](https://github.com/kpdecker/jsdiff/pull/529) **`parsePatch` can now parse patches where lines starting with `--` or `++` are deleted/inserted**; previously, there were edge cases where the parser would choke on valid patches or give wrong results.
-- [#530](https://github.com/kpdecker/jsdiff/pull/530) **Added `ignoreNewlineAtEof` option` to `diffLines`**
+- [#530](https://github.com/kpdecker/jsdiff/pull/530) **Added `ignoreNewlineAtEof` option to `diffLines`**
 - [#533](https://github.com/kpdecker/jsdiff/pull/533) **`applyPatch` uses an entirely new algorithm for fuzzy matching.** Differences between the old and new algorithm are as follows:
   * The `fuzzFactor` now indicates the maximum [*Levenshtein* distance](https://en.wikipedia.org/wiki/Levenshtein_distance) that there can be between the context shown in a hunk and the actual file content at a location where we try to apply the hunk. (Previously, it represented a maximum [*Hamming* distance](https://en.wikipedia.org/wiki/Hamming_distance), meaning that a single insertion or deletion in the source file could stop a hunk from applying even with a high `fuzzFactor`.)
   * A hunk containing a deletion can now only be applied in a context where the line to be deleted actually appears verbatim. (Previously, as long as enough context lines in the hunk matched, `applyPatch` would apply the hunk anyway and delete a completely different line.)
   * The context line immediately before and immediately after an insertion must match exactly between the hunk and the file for a hunk to apply. (Previously this was not required.)
-- [#535](https://github.com/kpdecker/jsdiff/pull/535) **A bug in patch generation functions is now fixed** that would sometimes previously cause `\ No newline at end of file` to appear in the wrong place in the generated patch, resulting in the patch being invalid.
+- [#535](https://github.com/kpdecker/jsdiff/pull/535) **A bug in patch generation functions is now fixed** that would sometimes previously cause `\ No newline at end of file` to appear in the wrong place in the generated patch, resulting in the patch being invalid. **These invalid patches can also no longer be applied successfully with `applyPatch`.** (It was already the case that tools other than jsdiff, like GNU `patch`, would consider them malformed and refuse to apply them; versions of jsdiff with this fix now do the same thing if you ask them to apply a malformed patch emitted by jsdiff v5.)
 - [#535](https://github.com/kpdecker/jsdiff/pull/535) **Passing `newlineIsToken: true` to *patch*-generation functions is no longer allowed.** (Passing it to `diffLines` is still supported - it's only functions like `createPatch` where passing `newlineIsToken` is now an error.) Allowing it to be passed never really made sense, since in cases where the option had any effect on the output at all, the effect tended to be causing a garbled patch to be created that couldn't actually be applied to the source file.
 - [#539](https://github.com/kpdecker/jsdiff/pull/539) **`diffWords` now takes an optional `intlSegmenter` option** which should be an `Intl.Segmenter` with word-level granularity. This provides better tokenization of text into words than the default behaviour, even for English but especially for some other languages for which the default behaviour is poor.
 
@@ -49,7 +85,7 @@ This is a release containing many, *many* breaking changes. The objective of thi
 [Commits](https://github.com/kpdecker/jsdiff/compare/v5.1.0...v5.2.0)
 
 - [#411](https://github.com/kpdecker/jsdiff/pull/411) Big performance improvement. Previously an O(n) array-copying operation inside the innermost loop of jsdiff's base diffing code increased the overall worst-case time complexity of computing a diff from O(n²) to O(n³). This is now fixed, bringing the worst-case time complexity down to what it theoretically should be for a Myers diff implementation.
-- [#448](https://github.com/kpdecker/jsdiff/pull/411) Performance improvement. Diagonals whose furthest-reaching D-path would go off the edge of the edit graph are now skipped, rather than being pointlessly considered as called for by the original Myers diff algorithm. This dramatically speeds up computing diffs where the new text just appends or truncates content at the end of the old text.
+- [#448](https://github.com/kpdecker/jsdiff/pull/448) Performance improvement. Diagonals whose furthest-reaching D-path would go off the edge of the edit graph are now skipped, rather than being pointlessly considered as called for by the original Myers diff algorithm. This dramatically speeds up computing diffs where the new text just appends or truncates content at the end of the old text.
 - [#351](https://github.com/kpdecker/jsdiff/issues/351) Importing from the lib folder - e.g. `require("diff/lib/diff/word.js")` - will work again now. This had been broken for users on the latest version of Node since Node 17.5.0, which changed how Node interprets the `exports` property in jsdiff's `package.json` file.
 - [#344](https://github.com/kpdecker/jsdiff/issues/344) `diffLines`, `createTwoFilesPatch`, and other patch-creation methods now take an optional `stripTrailingCr: true` option which causes Windows-style `\r\n` line endings to be replaced with Unix-style `\n` line endings before calculating the diff, just like GNU `diff`'s `--strip-trailing-cr` flag.
 - [#451](https://github.com/kpdecker/jsdiff/pull/451) Added `diff.formatPatch`.
diff --git a/node_modules/diff/runtime.js b/node_modules/diff/runtime.js
deleted file mode 100644
index 82ea7e696aa01..0000000000000
--- a/node_modules/diff/runtime.js
+++ /dev/null
@@ -1,3 +0,0 @@
-require('@babel/register')({
-  ignore: ['lib', 'node_modules']
-});
diff --git a/package-lock.json b/package-lock.json
index 97d0cc81e6fae..bb64e44f0bc78 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -167,7 +167,7 @@
         "ajv-formats": "^2.1.1",
         "ajv-formats-draft2019": "^1.6.1",
         "cli-table3": "^0.6.4",
-        "diff": "^7.0.0",
+        "diff": "^8.0.2",
         "nock": "^13.4.0",
         "npm-packlist": "^10.0.0",
         "remark": "^14.0.2",
@@ -772,8 +772,6 @@
     },
     "node_modules/@conventional-commits/parser": {
       "version": "0.4.1",
-      "resolved": "https://registry.npmjs.org/@conventional-commits/parser/-/parser-0.4.1.tgz",
-      "integrity": "sha512-H2ZmUVt6q+KBccXfMBhbBF14NlANeqHTXL4qCL6QGbMzrc4HDXyzWuxPxPNbz71f/5UkR5DrycP5VO9u7crahg==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -999,8 +997,6 @@
     },
     "node_modules/@google-automations/git-file-utils": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/@google-automations/git-file-utils/-/git-file-utils-3.0.0.tgz",
-      "integrity": "sha512-e+WLoKR0TchIhKsSDOnd/su171eXKAAdLpP2tS825UAloTgfYus53kW8uKoVj9MAsMjXGXsJ2s1ASgjq81xVdA==",
       "dev": true,
       "license": "Apache-2.0",
       "dependencies": {
@@ -1014,8 +1010,6 @@
     },
     "node_modules/@google-automations/git-file-utils/node_modules/@octokit/auth-token": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-4.0.0.tgz",
-      "integrity": "sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -1024,8 +1018,6 @@
     },
     "node_modules/@google-automations/git-file-utils/node_modules/@octokit/core": {
       "version": "5.2.2",
-      "resolved": "https://registry.npmjs.org/@octokit/core/-/core-5.2.2.tgz",
-      "integrity": "sha512-/g2d4sW9nUDJOMz3mabVQvOGhVa4e/BN/Um7yca9Bb2XTzPPnfTWHWQg+IsEYO7M3Vx+EXvaM/I2pJWIMun1bg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1043,8 +1035,6 @@
     },
     "node_modules/@google-automations/git-file-utils/node_modules/@octokit/endpoint": {
       "version": "9.0.6",
-      "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-9.0.6.tgz",
-      "integrity": "sha512-H1fNTMA57HbkFESSt3Y9+FBICv+0jFceJFPWDePYlR/iMGrwM5ph+Dd4XRQs+8X+PUFURLQgX9ChPfhJ/1uNQw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1057,8 +1047,6 @@
     },
     "node_modules/@google-automations/git-file-utils/node_modules/@octokit/graphql": {
       "version": "7.1.1",
-      "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-7.1.1.tgz",
-      "integrity": "sha512-3mkDltSfcDUoa176nlGoA32RGjeWjl3K7F/BwHwRMJUW/IteSa4bnSV8p2ThNkcIcZU2umkZWxwETSSCJf2Q7g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1072,15 +1060,11 @@
     },
     "node_modules/@google-automations/git-file-utils/node_modules/@octokit/openapi-types": {
       "version": "24.2.0",
-      "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz",
-      "integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/@google-automations/git-file-utils/node_modules/@octokit/plugin-paginate-rest": {
       "version": "11.4.4-cjs.2",
-      "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-11.4.4-cjs.2.tgz",
-      "integrity": "sha512-2dK6z8fhs8lla5PaOTgqfCGBxgAv/le+EhPs27KklPhm1bKObpu6lXzwfUEQ16ajXzqNrKMujsFyo9K2eaoISw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1095,8 +1079,6 @@
     },
     "node_modules/@google-automations/git-file-utils/node_modules/@octokit/plugin-request-log": {
       "version": "4.0.1",
-      "resolved": "https://registry.npmjs.org/@octokit/plugin-request-log/-/plugin-request-log-4.0.1.tgz",
-      "integrity": "sha512-GihNqNpGHorUrO7Qa9JbAl0dbLnqJVrV8OXe2Zm5/Y4wFkZQDfTreBzVmiRfJVfE4mClXdihHnbpyyO9FSX4HA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -1108,8 +1090,6 @@
     },
     "node_modules/@google-automations/git-file-utils/node_modules/@octokit/plugin-rest-endpoint-methods": {
       "version": "13.3.2-cjs.1",
-      "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-13.3.2-cjs.1.tgz",
-      "integrity": "sha512-VUjIjOOvF2oELQmiFpWA1aOPdawpyaCUqcEBc/UOUnj3Xp6DJGrJ1+bjUIIDzdHjnFNO6q57ODMfdEZnoBkCwQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1124,8 +1104,6 @@
     },
     "node_modules/@google-automations/git-file-utils/node_modules/@octokit/request": {
       "version": "8.4.1",
-      "resolved": "https://registry.npmjs.org/@octokit/request/-/request-8.4.1.tgz",
-      "integrity": "sha512-qnB2+SY3hkCmBxZsR/MPCybNmbJe4KAlfWErXq+rBKkQJlbjdJeS85VI9r8UqeLYLvnAenU8Q1okM/0MBsAGXw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1140,8 +1118,6 @@
     },
     "node_modules/@google-automations/git-file-utils/node_modules/@octokit/request-error": {
       "version": "5.1.1",
-      "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.1.tgz",
-      "integrity": "sha512-v9iyEQJH6ZntoENr9/yXxjuezh4My67CBSu9r6Ve/05Iu5gNgnisNWOsoJHTP6k0Rr0+HQIpnH+kyammu90q/g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1155,8 +1131,6 @@
     },
     "node_modules/@google-automations/git-file-utils/node_modules/@octokit/rest": {
       "version": "20.1.2",
-      "resolved": "https://registry.npmjs.org/@octokit/rest/-/rest-20.1.2.tgz",
-      "integrity": "sha512-GmYiltypkHHtihFwPRxlaorG5R9VAHuk/vbszVoRTGXnAsY60wYLkh/E2XiFmdZmqrisw+9FaazS1i5SbdWYgA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1171,8 +1145,6 @@
     },
     "node_modules/@google-automations/git-file-utils/node_modules/@octokit/types": {
       "version": "13.10.0",
-      "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz",
-      "integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1181,15 +1153,11 @@
     },
     "node_modules/@google-automations/git-file-utils/node_modules/before-after-hook": {
       "version": "2.2.3",
-      "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz",
-      "integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==",
       "dev": true,
       "license": "Apache-2.0"
     },
     "node_modules/@google-automations/git-file-utils/node_modules/minimatch": {
       "version": "5.1.6",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz",
-      "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -1201,8 +1169,6 @@
     },
     "node_modules/@google-automations/git-file-utils/node_modules/universal-user-agent": {
       "version": "6.0.1",
-      "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz",
-      "integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==",
       "dev": true,
       "license": "ISC"
     },
@@ -1263,8 +1229,6 @@
     },
     "node_modules/@iarna/toml": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/@iarna/toml/-/toml-3.0.0.tgz",
-      "integrity": "sha512-td6ZUkz2oS3VeleBcN+m//Q6HlCFCPrnI0FZhrt/h4XqLEdOyYp2u21nd8MdsR+WJy5r9PTDaHTDDfhf4H4l6Q==",
       "dev": true,
       "license": "ISC"
     },
@@ -1506,8 +1470,6 @@
     },
     "node_modules/@jsep-plugin/assignment": {
       "version": "1.3.0",
-      "resolved": "https://registry.npmjs.org/@jsep-plugin/assignment/-/assignment-1.3.0.tgz",
-      "integrity": "sha512-VVgV+CXrhbMI3aSusQyclHkenWSAm95WaiKrMxRFam3JSUiIaQjoMIw2sEs/OX4XifnqeQUN4DYbJjlA8EfktQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -1519,8 +1481,6 @@
     },
     "node_modules/@jsep-plugin/regex": {
       "version": "1.0.4",
-      "resolved": "https://registry.npmjs.org/@jsep-plugin/regex/-/regex-1.0.4.tgz",
-      "integrity": "sha512-q7qL4Mgjs1vByCaTnDFcBnV9HS7GVPJX5vyVoCgZHNSC9rjwIlmbXG5sUuorR5ndfHAIlJ8pVStxvjXHbNvtUg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -1776,8 +1736,6 @@
     },
     "node_modules/@npmcli/template-oss": {
       "version": "4.25.1",
-      "resolved": "https://registry.npmjs.org/@npmcli/template-oss/-/template-oss-4.25.1.tgz",
-      "integrity": "sha512-odmdn3CQCUqmT5+Vjiz/UTAORc8xDVU591WLBMotGb35hfIB/zf6RbUB/sEbR1JEjIHQtjhMa6qojoo8f8LmnQ==",
       "dev": true,
       "hasInstallScript": true,
       "license": "ISC",
@@ -1824,8 +1782,6 @@
     },
     "node_modules/@npmcli/template-oss/node_modules/diff": {
       "version": "8.0.2",
-      "resolved": "https://registry.npmjs.org/diff/-/diff-8.0.2.tgz",
-      "integrity": "sha512-sSuxWU5j5SR9QQji/o2qMvqRNYRDOcBTgsJ/DeCf4iSN4gW+gNMXM7wFIP+fdXZxoNiAnHUTGjCr+TSWXdRDKg==",
       "dev": true,
       "license": "BSD-3-Clause",
       "engines": {
@@ -1834,8 +1790,6 @@
     },
     "node_modules/@octokit/auth-token": {
       "version": "6.0.0",
-      "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-6.0.0.tgz",
-      "integrity": "sha512-P4YJBPdPSpWTQ1NU4XYdvHvXJJDxM6YwpS0FZHRgP7YFkdVxsWcpWGy/NVqlAA7PcPCnMacXlRm1y2PFZRWL/w==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -1844,8 +1798,6 @@
     },
     "node_modules/@octokit/core": {
       "version": "7.0.4",
-      "resolved": "https://registry.npmjs.org/@octokit/core/-/core-7.0.4.tgz",
-      "integrity": "sha512-jOT8V1Ba5BdC79sKrRWDdMT5l1R+XNHTPR6CPWzUP2EcfAcvIHZWF0eAbmRcpOOP5gVIwnqNg0C4nvh6Abc3OA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1863,8 +1815,6 @@
     },
     "node_modules/@octokit/endpoint": {
       "version": "11.0.0",
-      "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-11.0.0.tgz",
-      "integrity": "sha512-hoYicJZaqISMAI3JfaDr1qMNi48OctWuOih1m80bkYow/ayPw6Jj52tqWJ6GEoFTk1gBqfanSoI1iY99Z5+ekQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1877,15 +1827,11 @@
     },
     "node_modules/@octokit/endpoint/node_modules/@octokit/openapi-types": {
       "version": "25.1.0",
-      "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-25.1.0.tgz",
-      "integrity": "sha512-idsIggNXUKkk0+BExUn1dQ92sfysJrje03Q0bv0e+KPLrvyqZF8MnBpFz8UNfYDwB3Ie7Z0TByjWfzxt7vseaA==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/@octokit/endpoint/node_modules/@octokit/types": {
       "version": "14.1.0",
-      "resolved": "https://registry.npmjs.org/@octokit/types/-/types-14.1.0.tgz",
-      "integrity": "sha512-1y6DgTy8Jomcpu33N+p5w58l6xyt55Ar2I91RPiIA0xCJBXyUAhXCcmZaDWSANiha7R9a6qJJ2CRomGPZ6f46g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1894,8 +1840,6 @@
     },
     "node_modules/@octokit/graphql": {
       "version": "9.0.1",
-      "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-9.0.1.tgz",
-      "integrity": "sha512-j1nQNU1ZxNFx2ZtKmL4sMrs4egy5h65OMDmSbVyuCzjOcwsHq6EaYjOTGXPQxgfiN8dJ4CriYHk6zF050WEULg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1909,15 +1853,11 @@
     },
     "node_modules/@octokit/graphql/node_modules/@octokit/openapi-types": {
       "version": "25.1.0",
-      "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-25.1.0.tgz",
-      "integrity": "sha512-idsIggNXUKkk0+BExUn1dQ92sfysJrje03Q0bv0e+KPLrvyqZF8MnBpFz8UNfYDwB3Ie7Z0TByjWfzxt7vseaA==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/@octokit/graphql/node_modules/@octokit/types": {
       "version": "14.1.0",
-      "resolved": "https://registry.npmjs.org/@octokit/types/-/types-14.1.0.tgz",
-      "integrity": "sha512-1y6DgTy8Jomcpu33N+p5w58l6xyt55Ar2I91RPiIA0xCJBXyUAhXCcmZaDWSANiha7R9a6qJJ2CRomGPZ6f46g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1926,15 +1866,11 @@
     },
     "node_modules/@octokit/openapi-types": {
       "version": "26.0.0",
-      "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-26.0.0.tgz",
-      "integrity": "sha512-7AtcfKtpo77j7Ts73b4OWhOZHTKo/gGY8bB3bNBQz4H+GRSWqx2yvj8TXRsbdTE0eRmYmXOEY66jM7mJ7LzfsA==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/@octokit/plugin-paginate-rest": {
       "version": "13.1.1",
-      "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-13.1.1.tgz",
-      "integrity": "sha512-q9iQGlZlxAVNRN2jDNskJW/Cafy7/XE52wjZ5TTvyhyOD904Cvx//DNyoO3J/MXJ0ve3rPoNWKEg5iZrisQSuw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1949,15 +1885,11 @@
     },
     "node_modules/@octokit/plugin-paginate-rest/node_modules/@octokit/openapi-types": {
       "version": "25.1.0",
-      "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-25.1.0.tgz",
-      "integrity": "sha512-idsIggNXUKkk0+BExUn1dQ92sfysJrje03Q0bv0e+KPLrvyqZF8MnBpFz8UNfYDwB3Ie7Z0TByjWfzxt7vseaA==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/@octokit/plugin-paginate-rest/node_modules/@octokit/types": {
       "version": "14.1.0",
-      "resolved": "https://registry.npmjs.org/@octokit/types/-/types-14.1.0.tgz",
-      "integrity": "sha512-1y6DgTy8Jomcpu33N+p5w58l6xyt55Ar2I91RPiIA0xCJBXyUAhXCcmZaDWSANiha7R9a6qJJ2CRomGPZ6f46g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1966,8 +1898,6 @@
     },
     "node_modules/@octokit/plugin-request-log": {
       "version": "6.0.0",
-      "resolved": "https://registry.npmjs.org/@octokit/plugin-request-log/-/plugin-request-log-6.0.0.tgz",
-      "integrity": "sha512-UkOzeEN3W91/eBq9sPZNQ7sUBvYCqYbrrD8gTbBuGtHEuycE4/awMXcYvx6sVYo7LypPhmQwwpUe4Yyu4QZN5Q==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -1979,8 +1909,6 @@
     },
     "node_modules/@octokit/plugin-rest-endpoint-methods": {
       "version": "16.1.0",
-      "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-16.1.0.tgz",
-      "integrity": "sha512-nCsyiKoGRnhH5LkH8hJEZb9swpqOcsW+VXv1QoyUNQXJeVODG4+xM6UICEqyqe9XFr6LkL8BIiFCPev8zMDXPw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -1995,8 +1923,6 @@
     },
     "node_modules/@octokit/request": {
       "version": "10.0.3",
-      "resolved": "https://registry.npmjs.org/@octokit/request/-/request-10.0.3.tgz",
-      "integrity": "sha512-V6jhKokg35vk098iBqp2FBKunk3kMTXlmq+PtbV9Gl3TfskWlebSofU9uunVKhUN7xl+0+i5vt0TGTG8/p/7HA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2012,8 +1938,6 @@
     },
     "node_modules/@octokit/request-error": {
       "version": "7.0.0",
-      "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-7.0.0.tgz",
-      "integrity": "sha512-KRA7VTGdVyJlh0cP5Tf94hTiYVVqmt2f3I6mnimmaVz4UG3gQV/k4mDJlJv3X67iX6rmN7gSHCF8ssqeMnmhZg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2025,15 +1949,11 @@
     },
     "node_modules/@octokit/request-error/node_modules/@octokit/openapi-types": {
       "version": "25.1.0",
-      "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-25.1.0.tgz",
-      "integrity": "sha512-idsIggNXUKkk0+BExUn1dQ92sfysJrje03Q0bv0e+KPLrvyqZF8MnBpFz8UNfYDwB3Ie7Z0TByjWfzxt7vseaA==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/@octokit/request-error/node_modules/@octokit/types": {
       "version": "14.1.0",
-      "resolved": "https://registry.npmjs.org/@octokit/types/-/types-14.1.0.tgz",
-      "integrity": "sha512-1y6DgTy8Jomcpu33N+p5w58l6xyt55Ar2I91RPiIA0xCJBXyUAhXCcmZaDWSANiha7R9a6qJJ2CRomGPZ6f46g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2042,15 +1962,11 @@
     },
     "node_modules/@octokit/request/node_modules/@octokit/openapi-types": {
       "version": "25.1.0",
-      "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-25.1.0.tgz",
-      "integrity": "sha512-idsIggNXUKkk0+BExUn1dQ92sfysJrje03Q0bv0e+KPLrvyqZF8MnBpFz8UNfYDwB3Ie7Z0TByjWfzxt7vseaA==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/@octokit/request/node_modules/@octokit/types": {
       "version": "14.1.0",
-      "resolved": "https://registry.npmjs.org/@octokit/types/-/types-14.1.0.tgz",
-      "integrity": "sha512-1y6DgTy8Jomcpu33N+p5w58l6xyt55Ar2I91RPiIA0xCJBXyUAhXCcmZaDWSANiha7R9a6qJJ2CRomGPZ6f46g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2059,8 +1975,6 @@
     },
     "node_modules/@octokit/rest": {
       "version": "22.0.0",
-      "resolved": "https://registry.npmjs.org/@octokit/rest/-/rest-22.0.0.tgz",
-      "integrity": "sha512-z6tmTu9BTnw51jYGulxrlernpsQYXpui1RK21vmXn8yF5bp6iX16yfTtJYGK5Mh1qDkvDOmp2n8sRMcQmR8jiA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2075,8 +1989,6 @@
     },
     "node_modules/@octokit/types": {
       "version": "15.0.0",
-      "resolved": "https://registry.npmjs.org/@octokit/types/-/types-15.0.0.tgz",
-      "integrity": "sha512-8o6yDfmoGJUIeR9OfYU0/TUJTnMPG2r68+1yEdUeG2Fdqpj8Qetg0ziKIgcBm0RW/j29H41WP37CYCEhp6GoHQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2252,8 +2164,6 @@
     },
     "node_modules/@types/minimist": {
       "version": "1.2.5",
-      "resolved": "https://registry.npmjs.org/@types/minimist/-/minimist-1.2.5.tgz",
-      "integrity": "sha512-hov8bUuiLiyFPGyFPE1lwWhmzYbirOXQNNo40+y3zow8aFVTeyn3VWL0VFFfdNddA8S4Vf0Tc062rzyNr7Paag==",
       "dev": true,
       "license": "MIT"
     },
@@ -2272,15 +2182,11 @@
     },
     "node_modules/@types/normalize-package-data": {
       "version": "2.4.4",
-      "resolved": "https://registry.npmjs.org/@types/normalize-package-data/-/normalize-package-data-2.4.4.tgz",
-      "integrity": "sha512-37i+OaWTh9qeK4LSHPsyRC7NahnGotNuZvjLSgcPzblpHB3rrCJxAOgI5gCdKm7coonsaX1Of0ILiTcnZjbfxA==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/@types/npm-package-arg": {
       "version": "6.1.4",
-      "resolved": "https://registry.npmjs.org/@types/npm-package-arg/-/npm-package-arg-6.1.4.tgz",
-      "integrity": "sha512-vDgdbMy2QXHnAruzlv68pUtXCjmqUk3WrBAsRboRovsOmxbfn/WiYCjmecyKjGztnMps5dWp4Uq2prp+Ilo17Q==",
       "dev": true,
       "license": "MIT"
     },
@@ -2296,8 +2202,6 @@
     },
     "node_modules/@types/yargs": {
       "version": "16.0.9",
-      "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-16.0.9.tgz",
-      "integrity": "sha512-tHhzvkFXZQeTECenFoRljLBYPZJ7jAVxqqtEI0qTLOmuultnFp4I9yKE17vTuhf7BkhCu7I4XuemPgikDVuYqA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2306,8 +2210,6 @@
     },
     "node_modules/@types/yargs-parser": {
       "version": "21.0.3",
-      "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.3.tgz",
-      "integrity": "sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==",
       "dev": true,
       "license": "MIT"
     },
@@ -2319,8 +2221,6 @@
     },
     "node_modules/@xmldom/xmldom": {
       "version": "0.8.11",
-      "resolved": "https://registry.npmjs.org/@xmldom/xmldom/-/xmldom-0.8.11.tgz",
-      "integrity": "sha512-cQzWCtO6C8TQiYl1ruKNn2U6Ao4o4WBBcbL61yJl84x+j5sOWWFU9X7DpND8XZG3daDppSsigMdfAIl2upQBRw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -2454,8 +2354,6 @@
     },
     "node_modules/anymatch/node_modules/picomatch": {
       "version": "2.3.1",
-      "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
-      "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -2707,8 +2605,6 @@
     },
     "node_modules/arrify": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/arrify/-/arrify-1.0.1.tgz",
-      "integrity": "sha512-3CYzex9M9FGQjCGMGyi6/31c8GJbgb0qGyrx5HWxPd0aCwh4cB2YjMb2Xf9UuoogrMrlO9cTqnB5rI5GHZTcUA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -2734,8 +2630,6 @@
     },
     "node_modules/async-retry": {
       "version": "1.3.3",
-      "resolved": "https://registry.npmjs.org/async-retry/-/async-retry-1.3.3.tgz",
-      "integrity": "sha512-wfr/jstw9xNi/0teMHrRW7dsz3Lt5ARhYNZ2ewpadnhaIp5mbALhOAP+EAdsC7t4Z6wqsDVv9+W6gm1Dk9mEyw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2744,8 +2638,6 @@
     },
     "node_modules/async-retry/node_modules/retry": {
       "version": "0.13.1",
-      "resolved": "https://registry.npmjs.org/retry/-/retry-0.13.1.tgz",
-      "integrity": "sha512-XQBQ3I8W1Cge0Seh+6gjj03LbmRFWuoszgK9ooCpwYIrhhoO80pfq4cUkU5DkknwfOfFteRwlZ56PYOGYyFWdg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -2819,8 +2711,6 @@
     },
     "node_modules/before-after-hook": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-4.0.0.tgz",
-      "integrity": "sha512-q6tR3RPqIB1pMiTRMFcZwuG5T8vwp+vUvEG0vuI6B+Rikh5BfPp2fQ82c925FOs+b0lcFQ8CFrL+KbilfZFhOQ==",
       "dev": true,
       "license": "Apache-2.0"
     },
@@ -2857,8 +2747,6 @@
     },
     "node_modules/boolbase": {
       "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz",
-      "integrity": "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==",
       "dev": true,
       "license": "ISC"
     },
@@ -3038,8 +2926,6 @@
     },
     "node_modules/camelcase-keys": {
       "version": "6.2.2",
-      "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-6.2.2.tgz",
-      "integrity": "sha512-YrwaA0vEKazPBkn0ipTiMpSajYDSe+KjQfrjhcBMxJt/znbvlHd8Pw/Vamaz5EB4Wfhs3SUR3Z9mwRu/P3s3Yg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3273,8 +3159,6 @@
     },
     "node_modules/code-suggester": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/code-suggester/-/code-suggester-5.0.0.tgz",
-      "integrity": "sha512-/xyGfSM/hMYxl12kqoYoOwUm0D1uuVT2nWcMiTq2Fn5MLi+BlWkHq5AUvtniDJwVSdI3jgbK4AOzGws+v/dFPQ==",
       "dev": true,
       "license": "Apache-2.0",
       "dependencies": {
@@ -3295,8 +3179,6 @@
     },
     "node_modules/code-suggester/node_modules/@octokit/auth-token": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-4.0.0.tgz",
-      "integrity": "sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -3305,8 +3187,6 @@
     },
     "node_modules/code-suggester/node_modules/@octokit/core": {
       "version": "5.2.2",
-      "resolved": "https://registry.npmjs.org/@octokit/core/-/core-5.2.2.tgz",
-      "integrity": "sha512-/g2d4sW9nUDJOMz3mabVQvOGhVa4e/BN/Um7yca9Bb2XTzPPnfTWHWQg+IsEYO7M3Vx+EXvaM/I2pJWIMun1bg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3324,8 +3204,6 @@
     },
     "node_modules/code-suggester/node_modules/@octokit/endpoint": {
       "version": "9.0.6",
-      "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-9.0.6.tgz",
-      "integrity": "sha512-H1fNTMA57HbkFESSt3Y9+FBICv+0jFceJFPWDePYlR/iMGrwM5ph+Dd4XRQs+8X+PUFURLQgX9ChPfhJ/1uNQw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3338,8 +3216,6 @@
     },
     "node_modules/code-suggester/node_modules/@octokit/graphql": {
       "version": "7.1.1",
-      "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-7.1.1.tgz",
-      "integrity": "sha512-3mkDltSfcDUoa176nlGoA32RGjeWjl3K7F/BwHwRMJUW/IteSa4bnSV8p2ThNkcIcZU2umkZWxwETSSCJf2Q7g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3353,15 +3229,11 @@
     },
     "node_modules/code-suggester/node_modules/@octokit/openapi-types": {
       "version": "24.2.0",
-      "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz",
-      "integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/code-suggester/node_modules/@octokit/plugin-paginate-rest": {
       "version": "11.4.4-cjs.2",
-      "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-11.4.4-cjs.2.tgz",
-      "integrity": "sha512-2dK6z8fhs8lla5PaOTgqfCGBxgAv/le+EhPs27KklPhm1bKObpu6lXzwfUEQ16ajXzqNrKMujsFyo9K2eaoISw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3376,8 +3248,6 @@
     },
     "node_modules/code-suggester/node_modules/@octokit/plugin-request-log": {
       "version": "4.0.1",
-      "resolved": "https://registry.npmjs.org/@octokit/plugin-request-log/-/plugin-request-log-4.0.1.tgz",
-      "integrity": "sha512-GihNqNpGHorUrO7Qa9JbAl0dbLnqJVrV8OXe2Zm5/Y4wFkZQDfTreBzVmiRfJVfE4mClXdihHnbpyyO9FSX4HA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -3389,8 +3259,6 @@
     },
     "node_modules/code-suggester/node_modules/@octokit/plugin-rest-endpoint-methods": {
       "version": "13.3.2-cjs.1",
-      "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-13.3.2-cjs.1.tgz",
-      "integrity": "sha512-VUjIjOOvF2oELQmiFpWA1aOPdawpyaCUqcEBc/UOUnj3Xp6DJGrJ1+bjUIIDzdHjnFNO6q57ODMfdEZnoBkCwQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3405,8 +3273,6 @@
     },
     "node_modules/code-suggester/node_modules/@octokit/request": {
       "version": "8.4.1",
-      "resolved": "https://registry.npmjs.org/@octokit/request/-/request-8.4.1.tgz",
-      "integrity": "sha512-qnB2+SY3hkCmBxZsR/MPCybNmbJe4KAlfWErXq+rBKkQJlbjdJeS85VI9r8UqeLYLvnAenU8Q1okM/0MBsAGXw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3421,8 +3287,6 @@
     },
     "node_modules/code-suggester/node_modules/@octokit/request-error": {
       "version": "5.1.1",
-      "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.1.tgz",
-      "integrity": "sha512-v9iyEQJH6ZntoENr9/yXxjuezh4My67CBSu9r6Ve/05Iu5gNgnisNWOsoJHTP6k0Rr0+HQIpnH+kyammu90q/g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3436,8 +3300,6 @@
     },
     "node_modules/code-suggester/node_modules/@octokit/rest": {
       "version": "20.1.2",
-      "resolved": "https://registry.npmjs.org/@octokit/rest/-/rest-20.1.2.tgz",
-      "integrity": "sha512-GmYiltypkHHtihFwPRxlaorG5R9VAHuk/vbszVoRTGXnAsY60wYLkh/E2XiFmdZmqrisw+9FaazS1i5SbdWYgA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3452,8 +3314,6 @@
     },
     "node_modules/code-suggester/node_modules/@octokit/types": {
       "version": "13.10.0",
-      "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz",
-      "integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3462,8 +3322,6 @@
     },
     "node_modules/code-suggester/node_modules/ansi-styles": {
       "version": "4.3.0",
-      "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
-      "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3478,15 +3336,11 @@
     },
     "node_modules/code-suggester/node_modules/before-after-hook": {
       "version": "2.2.3",
-      "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz",
-      "integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==",
       "dev": true,
       "license": "Apache-2.0"
     },
     "node_modules/code-suggester/node_modules/brace-expansion": {
       "version": "1.1.12",
-      "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
-      "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3496,8 +3350,6 @@
     },
     "node_modules/code-suggester/node_modules/cliui": {
       "version": "7.0.4",
-      "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz",
-      "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3508,8 +3360,6 @@
     },
     "node_modules/code-suggester/node_modules/diff": {
       "version": "5.2.0",
-      "resolved": "https://registry.npmjs.org/diff/-/diff-5.2.0.tgz",
-      "integrity": "sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A==",
       "dev": true,
       "license": "BSD-3-Clause",
       "engines": {
@@ -3518,9 +3368,6 @@
     },
     "node_modules/code-suggester/node_modules/glob": {
       "version": "7.2.3",
-      "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
-      "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
-      "deprecated": "Glob versions prior to v9 are no longer supported",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3540,8 +3387,6 @@
     },
     "node_modules/code-suggester/node_modules/minimatch": {
       "version": "3.1.2",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
-      "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3553,15 +3398,11 @@
     },
     "node_modules/code-suggester/node_modules/universal-user-agent": {
       "version": "6.0.1",
-      "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz",
-      "integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/code-suggester/node_modules/wrap-ansi": {
       "version": "7.0.0",
-      "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
-      "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3578,8 +3419,6 @@
     },
     "node_modules/code-suggester/node_modules/yargs": {
       "version": "16.2.0",
-      "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz",
-      "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3597,8 +3436,6 @@
     },
     "node_modules/code-suggester/node_modules/yargs-parser": {
       "version": "20.2.9",
-      "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz",
-      "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -3701,8 +3538,6 @@
     },
     "node_modules/conventional-changelog-writer": {
       "version": "6.0.1",
-      "resolved": "https://registry.npmjs.org/conventional-changelog-writer/-/conventional-changelog-writer-6.0.1.tgz",
-      "integrity": "sha512-359t9aHorPw+U+nHzUXHS5ZnPBOizRxfQsWT5ZDHBfvfxQOAik+yfuhKXG66CN5LEWPpMNnIMHUTCKeYNprvHQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3723,8 +3558,6 @@
     },
     "node_modules/conventional-changelog-writer/node_modules/hosted-git-info": {
       "version": "4.1.0",
-      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-4.1.0.tgz",
-      "integrity": "sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3736,8 +3569,6 @@
     },
     "node_modules/conventional-changelog-writer/node_modules/lru-cache": {
       "version": "6.0.0",
-      "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
-      "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -3749,8 +3580,6 @@
     },
     "node_modules/conventional-changelog-writer/node_modules/meow": {
       "version": "8.1.2",
-      "resolved": "https://registry.npmjs.org/meow/-/meow-8.1.2.tgz",
-      "integrity": "sha512-r85E3NdZ+mpYk1C6RjPFEMSE+s1iZMuHtsHAqY0DT3jZczl0diWUZ8g6oU7h0M9cD2EL+PzaYghhCLzR0ZNn5Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3775,8 +3604,6 @@
     },
     "node_modules/conventional-changelog-writer/node_modules/normalize-package-data": {
       "version": "3.0.3",
-      "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-3.0.3.tgz",
-      "integrity": "sha512-p2W1sgqij3zMMyRC067Dg16bfzVH+w7hyegmpIvZ4JNjqtGOVAIvLmjBx3yP7YTe9vKJgkoNOPjwQGogDoMXFA==",
       "dev": true,
       "license": "BSD-2-Clause",
       "dependencies": {
@@ -3791,8 +3618,6 @@
     },
     "node_modules/conventional-changelog-writer/node_modules/type-fest": {
       "version": "0.18.1",
-      "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.18.1.tgz",
-      "integrity": "sha512-OIAYXk8+ISY+qTOwkHtKqzAuxchoMiD9Udx+FSGQDuiRR+PJKJHc2NJAXlbhkGwTt/4/nKZxELY1w3ReWOL8mw==",
       "dev": true,
       "license": "(MIT OR CC0-1.0)",
       "engines": {
@@ -3804,8 +3629,6 @@
     },
     "node_modules/conventional-changelog-writer/node_modules/yargs-parser": {
       "version": "20.2.9",
-      "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz",
-      "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==",
       "dev": true,
       "license": "ISC",
       "engines": {
@@ -3814,8 +3637,6 @@
     },
     "node_modules/conventional-commits-filter": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/conventional-commits-filter/-/conventional-commits-filter-3.0.0.tgz",
-      "integrity": "sha512-1ymej8b5LouPx9Ox0Dw/qAO2dVdfpRFq28e5Y0jJEU8ZrLdy0vOSkkIInwmxErFGhg6SALro60ZrwYFVTUDo4Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3923,8 +3744,6 @@
     },
     "node_modules/css-select": {
       "version": "5.2.2",
-      "resolved": "https://registry.npmjs.org/css-select/-/css-select-5.2.2.tgz",
-      "integrity": "sha512-TizTzUddG/xYLA3NXodFM0fSbNizXjOKhqiQQwvhlspadZokn1KDy0NZFS0wuEubIYAV5/c1/lAr0TaaFXEXzw==",
       "dev": true,
       "license": "BSD-2-Clause",
       "dependencies": {
@@ -3940,8 +3759,6 @@
     },
     "node_modules/css-what": {
       "version": "6.2.2",
-      "resolved": "https://registry.npmjs.org/css-what/-/css-what-6.2.2.tgz",
-      "integrity": "sha512-u/O3vwbptzhMs3L1fQE82ZSLHQQfto5gyZzwteVIEyeaY5Fc7R4dapF/BvRoSYFeqfBk4m0V1Vafq5Pjv25wvA==",
       "dev": true,
       "license": "BSD-2-Clause",
       "engines": {
@@ -4085,8 +3902,6 @@
     },
     "node_modules/dateformat": {
       "version": "3.0.3",
-      "resolved": "https://registry.npmjs.org/dateformat/-/dateformat-3.0.3.tgz",
-      "integrity": "sha512-jyCETtSl3VMZMWeRo7iY1FL19ges1t55hMo5yaam4Jrsm5EPL89UQkoQRyiI+Yf4k8r2ZpdngkV8hr1lIdjb3Q==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -4119,8 +3934,6 @@
     },
     "node_modules/decamelize-keys": {
       "version": "1.1.1",
-      "resolved": "https://registry.npmjs.org/decamelize-keys/-/decamelize-keys-1.1.1.tgz",
-      "integrity": "sha512-WiPxgEirIV0/eIOMcnFBA3/IJZAZqKnwAwWyvvdi4lsr1WCN22nhdf/3db3DoZcUjTV2SqfzIwNyp6y2xs3nmg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -4136,8 +3949,6 @@
     },
     "node_modules/decamelize-keys/node_modules/map-obj": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-1.0.1.tgz",
-      "integrity": "sha512-7N/q3lyZ+LVCp7PzuxrJr4KMbBE2hW7BT7YNia330OFxIf4d3r5zVpicP2650l7CPN6RM9zOJRl3NGpqSiw3Eg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -4238,8 +4049,6 @@
     },
     "node_modules/deprecation": {
       "version": "2.3.1",
-      "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz",
-      "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==",
       "dev": true,
       "license": "ISC"
     },
@@ -4253,8 +4062,6 @@
     },
     "node_modules/detect-indent": {
       "version": "6.1.0",
-      "resolved": "https://registry.npmjs.org/detect-indent/-/detect-indent-6.1.0.tgz",
-      "integrity": "sha512-reYkTUJAZb9gUuZ2RvVCNhVHdg62RHnJ7WJl8ftMi4diZ6NWlciOzQN88pUhSELEwflJht4oQDv0F0BMlwaYtA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -4262,7 +4069,7 @@
       }
     },
     "node_modules/diff": {
-      "version": "7.0.0",
+      "version": "8.0.2",
       "license": "BSD-3-Clause",
       "engines": {
         "node": ">=0.3.1"
@@ -4287,8 +4094,6 @@
     },
     "node_modules/dom-serializer": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz",
-      "integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -4302,8 +4107,6 @@
     },
     "node_modules/domelementtype": {
       "version": "2.3.0",
-      "resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz",
-      "integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==",
       "dev": true,
       "funding": [
         {
@@ -4315,8 +4118,6 @@
     },
     "node_modules/domhandler": {
       "version": "5.0.3",
-      "resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz",
-      "integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==",
       "dev": true,
       "license": "BSD-2-Clause",
       "dependencies": {
@@ -4331,8 +4132,6 @@
     },
     "node_modules/domutils": {
       "version": "3.2.2",
-      "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.2.2.tgz",
-      "integrity": "sha512-6kZKyUajlDuqlHKVX1w7gyslj9MPIXzIFiz/rGu35uC1wMi+kMhQwGhl4lt9unC9Vb9INnY9Z3/ZA3+FhASLaw==",
       "dev": true,
       "license": "BSD-2-Clause",
       "dependencies": {
@@ -4394,8 +4193,6 @@
     },
     "node_modules/entities": {
       "version": "4.5.0",
-      "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz",
-      "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==",
       "dev": true,
       "license": "BSD-2-Clause",
       "engines": {
@@ -5169,8 +4966,6 @@
     },
     "node_modules/fast-content-type-parse": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/fast-content-type-parse/-/fast-content-type-parse-3.0.0.tgz",
-      "integrity": "sha512-ZvLdcY8P+N8mGQJahJV5G4U88CSvT1rP8ApL6uETe88MBXrBHAkZlSEySdUlyztF7ccb+Znos3TFqaepHxdhBg==",
       "dev": true,
       "funding": [
         {
@@ -5240,8 +5035,6 @@
     },
     "node_modules/figures": {
       "version": "3.2.0",
-      "resolved": "https://registry.npmjs.org/figures/-/figures-3.2.0.tgz",
-      "integrity": "sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5256,8 +5049,6 @@
     },
     "node_modules/figures/node_modules/escape-string-regexp": {
       "version": "1.0.5",
-      "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz",
-      "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -5773,8 +5564,6 @@
     },
     "node_modules/handlebars": {
       "version": "4.7.8",
-      "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.8.tgz",
-      "integrity": "sha512-vafaFqs8MZkRrSX7sFVUdo3ap/eNiLnb4IakshzvP56X5Nr1iGKAIqdX6tMlm6HcNRIkr6AxO5jFEoJzzpT8aQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5795,8 +5584,6 @@
     },
     "node_modules/hard-rejection": {
       "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/hard-rejection/-/hard-rejection-2.1.0.tgz",
-      "integrity": "sha512-VIZB+ibDhx7ObhAe7OVtoEbuP4h/MuOTHJ+J8h/eBXotJYl0fBgR72xDFCKgIh22OJZIOVNxBMWuhAr10r8HdA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -6059,8 +5846,6 @@
     },
     "node_modules/he": {
       "version": "1.2.0",
-      "resolved": "https://registry.npmjs.org/he/-/he-1.2.0.tgz",
-      "integrity": "sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw==",
       "dev": true,
       "license": "MIT",
       "bin": {
@@ -7090,8 +6875,6 @@
     },
     "node_modules/jsep": {
       "version": "1.4.0",
-      "resolved": "https://registry.npmjs.org/jsep/-/jsep-1.4.0.tgz",
-      "integrity": "sha512-B7qPcEVE3NVkmSJbaYxvv4cHkVW7DQsZz13pUMrfS8z8Q/BuShN+gcTXrUlPiGqM2/t/EEaI030bpxMqY8gMlw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -7167,8 +6950,6 @@
     },
     "node_modules/jsonpath-plus": {
       "version": "10.3.0",
-      "resolved": "https://registry.npmjs.org/jsonpath-plus/-/jsonpath-plus-10.3.0.tgz",
-      "integrity": "sha512-8TNmfeTCk2Le33A3vRRwtuworG/L5RrgMvdjhKZxvyShO+mBu2fP50OWUjRLNtvw344DdDarFh9buFAZs5ujeA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7240,8 +7021,6 @@
     },
     "node_modules/kind-of": {
       "version": "6.0.3",
-      "resolved": "https://registry.npmjs.org/kind-of/-/kind-of-6.0.3.tgz",
-      "integrity": "sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -7403,8 +7182,6 @@
     },
     "node_modules/lodash.ismatch": {
       "version": "4.4.0",
-      "resolved": "https://registry.npmjs.org/lodash.ismatch/-/lodash.ismatch-4.4.0.tgz",
-      "integrity": "sha512-fPMfXjGQEV9Xsq/8MTSgUf255gawYRbjwMyDbcvDhXgV7enSZA0hynz6vMPnpAb5iONEzBHBPsT+0zes5Z301g==",
       "dev": true,
       "license": "MIT"
     },
@@ -7510,8 +7287,6 @@
     },
     "node_modules/map-obj": {
       "version": "4.3.0",
-      "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.3.0.tgz",
-      "integrity": "sha512-hdN1wVrZbb29eBGiGjJbeP8JbKjq1urkHJ/LIP/NY48MZ1QVXUsQBV1G1zvYFHn1XE06cwjBsOI2K3Ulnj1YXQ==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -8413,8 +8188,6 @@
     },
     "node_modules/min-indent": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/min-indent/-/min-indent-1.0.1.tgz",
-      "integrity": "sha512-I9jwMn07Sy/IwOj3zVkVik2JTvgpaykDZEigL6Rx6N9LbMywwUSMtxET+7lVoDLLd3O3IXwJwvuuns8UB/HeAg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -8453,8 +8226,6 @@
     },
     "node_modules/minimist-options": {
       "version": "4.1.0",
-      "resolved": "https://registry.npmjs.org/minimist-options/-/minimist-options-4.1.0.tgz",
-      "integrity": "sha512-Q4r8ghd80yhO/0j1O3B2BjweX3fiHg9cdOwjJd2J76Q135c+NDxGCqdYKQ1SKBuFfgWbAUzBfvYjPUEeNgqN1A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8468,8 +8239,6 @@
     },
     "node_modules/minimist-options/node_modules/is-plain-obj": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz",
-      "integrity": "sha512-yvkRyxmFKEOQ4pNXCmJG5AEQNlXJS5LaONXo5/cLdTZdWvsZ1ioJEonLGAosKlMWE8lwUy/bJzMjcw8az73+Fg==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -8604,8 +8373,6 @@
     },
     "node_modules/modify-values": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/modify-values/-/modify-values-1.0.1.tgz",
-      "integrity": "sha512-xV2bxeN6F7oYjZWTe/YPAy6MN2M+sL4u/Rlm2AHCIVGfo2p1yGmBHQ6vHehl4bRTZBdHu3TSkWdYgkwpYzAGSw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -8683,8 +8450,6 @@
     },
     "node_modules/neo-async": {
       "version": "2.6.2",
-      "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz",
-      "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==",
       "dev": true,
       "license": "MIT"
     },
@@ -8883,8 +8648,6 @@
     },
     "node_modules/node-html-parser": {
       "version": "6.1.13",
-      "resolved": "https://registry.npmjs.org/node-html-parser/-/node-html-parser-6.1.13.tgz",
-      "integrity": "sha512-qIsTMOY4C/dAa5Q5vsobRpOOvPfC4pB61UVW2uSwZNUp0QU/jCekTal1vMmbO0DgdHeLUJpv/ARmDqErVxA3Sg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -9060,8 +8823,6 @@
     },
     "node_modules/nth-check": {
       "version": "2.1.1",
-      "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz",
-      "integrity": "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==",
       "dev": true,
       "license": "BSD-2-Clause",
       "dependencies": {
@@ -9636,15 +9397,11 @@
     },
     "node_modules/parse-diff": {
       "version": "0.11.1",
-      "resolved": "https://registry.npmjs.org/parse-diff/-/parse-diff-0.11.1.tgz",
-      "integrity": "sha512-Oq4j8LAOPOcssanQkIjxosjATBIEJhCxMCxPhMu+Ci4wdNmAEdx0O+a7gzbR2PyKXgKPvRLIN5g224+dJAsKHA==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/parse-github-repo-url": {
       "version": "1.4.1",
-      "resolved": "https://registry.npmjs.org/parse-github-repo-url/-/parse-github-repo-url-1.4.1.tgz",
-      "integrity": "sha512-bSWyzBKqcSL4RrncTpGsEKoJ7H8a4L3++ifTAbTFeMHyq2wRV+42DGmQcHIrJIvdcacjIOxEuKH/w4tthF17gg==",
       "dev": true,
       "license": "MIT"
     },
@@ -9991,8 +9748,6 @@
     },
     "node_modules/quick-lru": {
       "version": "4.0.1",
-      "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-4.0.1.tgz",
-      "integrity": "sha512-ARhCpm70fzdcvNQfPoy49IaanKkTlRWF2JMzqhcJbhSFRZv7nPTvZJdcY7301IPmvW+/p0RgIWnQDLJxifsQ7g==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -10036,8 +9791,6 @@
     },
     "node_modules/read-pkg": {
       "version": "5.2.0",
-      "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-5.2.0.tgz",
-      "integrity": "sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10052,8 +9805,6 @@
     },
     "node_modules/read-pkg-up": {
       "version": "7.0.1",
-      "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-7.0.1.tgz",
-      "integrity": "sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10070,8 +9821,6 @@
     },
     "node_modules/read-pkg-up/node_modules/find-up": {
       "version": "4.1.0",
-      "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz",
-      "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10084,8 +9833,6 @@
     },
     "node_modules/read-pkg-up/node_modules/locate-path": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz",
-      "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10097,8 +9844,6 @@
     },
     "node_modules/read-pkg-up/node_modules/p-limit": {
       "version": "2.3.0",
-      "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz",
-      "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10113,8 +9858,6 @@
     },
     "node_modules/read-pkg-up/node_modules/p-locate": {
       "version": "4.1.0",
-      "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz",
-      "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10126,8 +9869,6 @@
     },
     "node_modules/read-pkg-up/node_modules/path-exists": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
-      "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -10136,8 +9877,6 @@
     },
     "node_modules/read-pkg-up/node_modules/type-fest": {
       "version": "0.8.1",
-      "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.8.1.tgz",
-      "integrity": "sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA==",
       "dev": true,
       "license": "(MIT OR CC0-1.0)",
       "engines": {
@@ -10146,15 +9885,11 @@
     },
     "node_modules/read-pkg/node_modules/hosted-git-info": {
       "version": "2.8.9",
-      "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-2.8.9.tgz",
-      "integrity": "sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==",
       "dev": true,
       "license": "ISC"
     },
     "node_modules/read-pkg/node_modules/normalize-package-data": {
       "version": "2.5.0",
-      "resolved": "https://registry.npmjs.org/normalize-package-data/-/normalize-package-data-2.5.0.tgz",
-      "integrity": "sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA==",
       "dev": true,
       "license": "BSD-2-Clause",
       "dependencies": {
@@ -10166,8 +9901,6 @@
     },
     "node_modules/read-pkg/node_modules/semver": {
       "version": "5.7.2",
-      "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz",
-      "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==",
       "dev": true,
       "license": "ISC",
       "bin": {
@@ -10176,8 +9909,6 @@
     },
     "node_modules/read-pkg/node_modules/type-fest": {
       "version": "0.6.0",
-      "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.6.0.tgz",
-      "integrity": "sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg==",
       "dev": true,
       "license": "(MIT OR CC0-1.0)",
       "engines": {
@@ -10197,8 +9928,6 @@
     },
     "node_modules/readdirp/node_modules/picomatch": {
       "version": "2.3.1",
-      "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz",
-      "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -10210,8 +9939,6 @@
     },
     "node_modules/redent": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/redent/-/redent-3.0.0.tgz",
-      "integrity": "sha512-6tDA8g98We0zd0GvVeMT9arEOnTw9qM03L9cJXaCjrip1OO764RDBLBfrB4cwzNGDj5OA5ioymC9GkizgWJDUg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10292,8 +10019,6 @@
     },
     "node_modules/release-please": {
       "version": "17.1.2",
-      "resolved": "https://registry.npmjs.org/release-please/-/release-please-17.1.2.tgz",
-      "integrity": "sha512-5p+w8Ex4fcNUr4pLX+Dog5t8fXNLp4UK5tyr//bQ0Vn3g8mnzCErwpRStAimTZdxWNQrC0TeF2gG9gixerS7Hg==",
       "dev": true,
       "license": "Apache-2.0",
       "dependencies": {
@@ -10338,8 +10063,6 @@
     },
     "node_modules/release-please/node_modules/@octokit/auth-token": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-4.0.0.tgz",
-      "integrity": "sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -10348,8 +10071,6 @@
     },
     "node_modules/release-please/node_modules/@octokit/core": {
       "version": "5.2.2",
-      "resolved": "https://registry.npmjs.org/@octokit/core/-/core-5.2.2.tgz",
-      "integrity": "sha512-/g2d4sW9nUDJOMz3mabVQvOGhVa4e/BN/Um7yca9Bb2XTzPPnfTWHWQg+IsEYO7M3Vx+EXvaM/I2pJWIMun1bg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10367,8 +10088,6 @@
     },
     "node_modules/release-please/node_modules/@octokit/endpoint": {
       "version": "9.0.6",
-      "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-9.0.6.tgz",
-      "integrity": "sha512-H1fNTMA57HbkFESSt3Y9+FBICv+0jFceJFPWDePYlR/iMGrwM5ph+Dd4XRQs+8X+PUFURLQgX9ChPfhJ/1uNQw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10381,8 +10100,6 @@
     },
     "node_modules/release-please/node_modules/@octokit/graphql": {
       "version": "7.1.1",
-      "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-7.1.1.tgz",
-      "integrity": "sha512-3mkDltSfcDUoa176nlGoA32RGjeWjl3K7F/BwHwRMJUW/IteSa4bnSV8p2ThNkcIcZU2umkZWxwETSSCJf2Q7g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10396,15 +10113,11 @@
     },
     "node_modules/release-please/node_modules/@octokit/openapi-types": {
       "version": "24.2.0",
-      "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-24.2.0.tgz",
-      "integrity": "sha512-9sIH3nSUttelJSXUrmGzl7QUBFul0/mB8HRYl3fOlgHbIWG+WnYDXU3v/2zMtAvuzZ/ed00Ei6on975FhBfzrg==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/release-please/node_modules/@octokit/plugin-paginate-rest": {
       "version": "11.4.4-cjs.2",
-      "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-11.4.4-cjs.2.tgz",
-      "integrity": "sha512-2dK6z8fhs8lla5PaOTgqfCGBxgAv/le+EhPs27KklPhm1bKObpu6lXzwfUEQ16ajXzqNrKMujsFyo9K2eaoISw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10419,8 +10132,6 @@
     },
     "node_modules/release-please/node_modules/@octokit/plugin-request-log": {
       "version": "4.0.1",
-      "resolved": "https://registry.npmjs.org/@octokit/plugin-request-log/-/plugin-request-log-4.0.1.tgz",
-      "integrity": "sha512-GihNqNpGHorUrO7Qa9JbAl0dbLnqJVrV8OXe2Zm5/Y4wFkZQDfTreBzVmiRfJVfE4mClXdihHnbpyyO9FSX4HA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -10432,8 +10143,6 @@
     },
     "node_modules/release-please/node_modules/@octokit/plugin-rest-endpoint-methods": {
       "version": "13.3.2-cjs.1",
-      "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-13.3.2-cjs.1.tgz",
-      "integrity": "sha512-VUjIjOOvF2oELQmiFpWA1aOPdawpyaCUqcEBc/UOUnj3Xp6DJGrJ1+bjUIIDzdHjnFNO6q57ODMfdEZnoBkCwQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10448,8 +10157,6 @@
     },
     "node_modules/release-please/node_modules/@octokit/request": {
       "version": "8.4.1",
-      "resolved": "https://registry.npmjs.org/@octokit/request/-/request-8.4.1.tgz",
-      "integrity": "sha512-qnB2+SY3hkCmBxZsR/MPCybNmbJe4KAlfWErXq+rBKkQJlbjdJeS85VI9r8UqeLYLvnAenU8Q1okM/0MBsAGXw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10464,8 +10171,6 @@
     },
     "node_modules/release-please/node_modules/@octokit/request-error": {
       "version": "5.1.1",
-      "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.1.1.tgz",
-      "integrity": "sha512-v9iyEQJH6ZntoENr9/yXxjuezh4My67CBSu9r6Ve/05Iu5gNgnisNWOsoJHTP6k0Rr0+HQIpnH+kyammu90q/g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10479,8 +10184,6 @@
     },
     "node_modules/release-please/node_modules/@octokit/rest": {
       "version": "20.1.2",
-      "resolved": "https://registry.npmjs.org/@octokit/rest/-/rest-20.1.2.tgz",
-      "integrity": "sha512-GmYiltypkHHtihFwPRxlaorG5R9VAHuk/vbszVoRTGXnAsY60wYLkh/E2XiFmdZmqrisw+9FaazS1i5SbdWYgA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10495,8 +10198,6 @@
     },
     "node_modules/release-please/node_modules/@octokit/types": {
       "version": "13.10.0",
-      "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.10.0.tgz",
-      "integrity": "sha512-ifLaO34EbbPj0Xgro4G5lP5asESjwHracYJvVaPIyXMuiuXLlhic3S47cBdTb+jfODkTE5YtGCLt3Ay3+J97sA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10505,8 +10206,6 @@
     },
     "node_modules/release-please/node_modules/ansi-styles": {
       "version": "4.3.0",
-      "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
-      "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10521,15 +10220,11 @@
     },
     "node_modules/release-please/node_modules/before-after-hook": {
       "version": "2.2.3",
-      "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz",
-      "integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==",
       "dev": true,
       "license": "Apache-2.0"
     },
     "node_modules/release-please/node_modules/chalk": {
       "version": "4.1.2",
-      "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
-      "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10545,8 +10240,6 @@
     },
     "node_modules/release-please/node_modules/conventional-changelog-conventionalcommits": {
       "version": "6.1.0",
-      "resolved": "https://registry.npmjs.org/conventional-changelog-conventionalcommits/-/conventional-changelog-conventionalcommits-6.1.0.tgz",
-      "integrity": "sha512-3cS3GEtR78zTfMzk0AizXKKIdN4OvSh7ibNz6/DPbhWWQu7LqE/8+/GqSodV+sywUR2gpJAdP/1JFf4XtN7Zpw==",
       "dev": true,
       "license": "ISC",
       "dependencies": {
@@ -10556,10 +10249,18 @@
         "node": ">=14"
       }
     },
+    "node_modules/release-please/node_modules/diff": {
+      "version": "7.0.0",
+      "resolved": "https://registry.npmjs.org/diff/-/diff-7.0.0.tgz",
+      "integrity": "sha512-PJWHUb1RFevKCwaFA9RlG5tCd+FO5iRh9A8HEtkmBH2Li03iJriB6m6JIN4rGz3K3JLawI7/veA1xzRKP6ISBw==",
+      "dev": true,
+      "license": "BSD-3-Clause",
+      "engines": {
+        "node": ">=0.3.1"
+      }
+    },
     "node_modules/release-please/node_modules/supports-color": {
       "version": "7.2.0",
-      "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
-      "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10571,8 +10272,6 @@
     },
     "node_modules/release-please/node_modules/type-fest": {
       "version": "3.13.1",
-      "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-3.13.1.tgz",
-      "integrity": "sha512-tLq3bSNx+xSpwvAJnzrK0Ep5CLNWjvFTOp71URMaAEWBfRb9nnJiBoUe0tF8bI4ZFO3omgBR6NvnbzVUT3Ly4g==",
       "dev": true,
       "license": "(MIT OR CC0-1.0)",
       "engines": {
@@ -10584,8 +10283,6 @@
     },
     "node_modules/release-please/node_modules/typescript": {
       "version": "4.9.5",
-      "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.9.5.tgz",
-      "integrity": "sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g==",
       "dev": true,
       "license": "Apache-2.0",
       "bin": {
@@ -10598,8 +10295,6 @@
     },
     "node_modules/release-please/node_modules/universal-user-agent": {
       "version": "6.0.1",
-      "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz",
-      "integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==",
       "dev": true,
       "license": "ISC"
     },
@@ -11403,8 +11098,6 @@
     },
     "node_modules/split": {
       "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/split/-/split-1.0.1.tgz",
-      "integrity": "sha512-mTyOoPbrivtXnwnIxZRFYRrPNtEFKlpB2fvjSnCQUiAA6qAZzqwna5envK4uk6OIeP17CsdF3rSBGYVBsU0Tkg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11611,8 +11304,6 @@
     },
     "node_modules/strip-indent": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/strip-indent/-/strip-indent-3.0.0.tgz",
-      "integrity": "sha512-laJTa3Jb+VQpaC6DseHhF7dXVqHTfJPCRDaEbid/drOhgitgYku/letMUqOXFoWV0zIIUbjpdH2t+tYj4bQMRQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12624,8 +12315,6 @@
     },
     "node_modules/tap/node_modules/cliui/node_modules/ansi-styles": {
       "version": "4.3.0",
-      "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
-      "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12640,8 +12329,6 @@
     },
     "node_modules/tap/node_modules/cliui/node_modules/color-convert": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
-      "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12653,15 +12340,11 @@
     },
     "node_modules/tap/node_modules/cliui/node_modules/color-name": {
       "version": "1.1.4",
-      "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
-      "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/tap/node_modules/cliui/node_modules/wrap-ansi": {
       "version": "7.0.0",
-      "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
-      "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14152,8 +13835,6 @@
     },
     "node_modules/trim-newlines": {
       "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/trim-newlines/-/trim-newlines-3.0.1.tgz",
-      "integrity": "sha512-c1PTsA3tYrIsLGkJkzHF+w9F2EyxfXGo4UyJc4pFL++FMjnq0HJS69T3M7d//gKrFKwy429bouPescbjecU+Zw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -14352,8 +14033,6 @@
     },
     "node_modules/uglify-js": {
       "version": "3.19.3",
-      "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.19.3.tgz",
-      "integrity": "sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ==",
       "dev": true,
       "license": "BSD-2-Clause",
       "optional": true,
@@ -14493,8 +14172,6 @@
     },
     "node_modules/unist-util-visit": {
       "version": "2.0.3",
-      "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-2.0.3.tgz",
-      "integrity": "sha512-iJ4/RczbJMkD0712mGktuGpm/U4By4FfDonL7N/9tATGIF4imikjOuagyMY53tnZq3NP6BcmlrHhEKAfGWjh7Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14509,8 +14186,6 @@
     },
     "node_modules/unist-util-visit-parents": {
       "version": "3.1.1",
-      "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-3.1.1.tgz",
-      "integrity": "sha512-1KROIZWo6bcMrZEwiH2UrXDyalAa0uqzWCxCJj6lPOvTve2WkfgCytoDTPaMnodXh1WrXOq0haVYHj99ynJlsg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14524,8 +14199,6 @@
     },
     "node_modules/unist-util-visit-parents/node_modules/unist-util-is": {
       "version": "4.1.0",
-      "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-4.1.0.tgz",
-      "integrity": "sha512-ZOQSsnce92GrxSqlnEEseX0gi7GH9zTJZ0p9dtu87WRb/37mMPO2Ilx1s/t9vBHrFhbgweUwb+t7cIn5dxPhZg==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -14535,8 +14208,6 @@
     },
     "node_modules/unist-util-visit/node_modules/unist-util-is": {
       "version": "4.1.0",
-      "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-4.1.0.tgz",
-      "integrity": "sha512-ZOQSsnce92GrxSqlnEEseX0gi7GH9zTJZ0p9dtu87WRb/37mMPO2Ilx1s/t9vBHrFhbgweUwb+t7cIn5dxPhZg==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -14546,8 +14217,6 @@
     },
     "node_modules/universal-user-agent": {
       "version": "7.0.3",
-      "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-7.0.3.tgz",
-      "integrity": "sha512-TmnEAEAsBJVZM/AADELsK76llnwcf9vMKuPz8JflO1frO8Lchitr0fNaN9d+Ap0BjKtqWqd/J17qeDnXh8CL2A==",
       "dev": true,
       "license": "ISC"
     },
@@ -14870,8 +14539,6 @@
     },
     "node_modules/wordwrap": {
       "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz",
-      "integrity": "sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==",
       "dev": true,
       "license": "MIT"
     },
@@ -15019,8 +14686,6 @@
     },
     "node_modules/xpath": {
       "version": "0.0.34",
-      "resolved": "https://registry.npmjs.org/xpath/-/xpath-0.0.34.tgz",
-      "integrity": "sha512-FxF6+rkr1rNSQrhUNYrAFJpRXNzlDoMxeXN5qI84939ylEv3qqPFKa85Oxr6tDaJKqwW6KKyo2v26TSv3k6LeA==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -15219,7 +14884,7 @@
         "@npmcli/arborist": "^9.1.4",
         "@npmcli/installed-package-contents": "^3.0.0",
         "binary-extensions": "^3.0.0",
-        "diff": "^7.0.0",
+        "diff": "^8.0.2",
         "minimatch": "^10.0.3",
         "npm-package-arg": "^13.0.0",
         "pacote": "^21.0.2",
diff --git a/package.json b/package.json
index b1130b5891c7c..3e6e056b834f2 100644
--- a/package.json
+++ b/package.json
@@ -198,7 +198,7 @@
     "ajv-formats": "^2.1.1",
     "ajv-formats-draft2019": "^1.6.1",
     "cli-table3": "^0.6.4",
-    "diff": "^7.0.0",
+    "diff": "^8.0.2",
     "nock": "^13.4.0",
     "npm-packlist": "^10.0.0",
     "remark": "^14.0.2",
diff --git a/workspaces/libnpmdiff/package.json b/workspaces/libnpmdiff/package.json
index 4c4901b9c9764..605f1691d95b5 100644
--- a/workspaces/libnpmdiff/package.json
+++ b/workspaces/libnpmdiff/package.json
@@ -50,7 +50,7 @@
     "@npmcli/arborist": "^9.1.4",
     "@npmcli/installed-package-contents": "^3.0.0",
     "binary-extensions": "^3.0.0",
-    "diff": "^7.0.0",
+    "diff": "^8.0.2",
     "minimatch": "^10.0.3",
     "npm-package-arg": "^13.0.0",
     "pacote": "^21.0.2",

From 6afdda99ed20c7e1fb95ed379fcc9665ef4f340d Mon Sep 17 00:00:00 2001
From: Gar 
Date: Thu, 18 Sep 2025 13:20:12 -0700
Subject: [PATCH 47/63] chore: ajv-formats@3.0.1

---
 node_modules/.gitignore                       |   2 +
 .../fs-minipass/node_modules/yallist/LICENSE  |  15 +
 .../node_modules/yallist/iterator.js          |   8 +
 .../node_modules/yallist/package.json         |  29 ++
 .../node_modules/yallist/yallist.js           | 426 ++++++++++++++++++
 .../minizlib/node_modules/yallist/LICENSE     |  15 +
 .../minizlib/node_modules/yallist/iterator.js |   8 +
 .../node_modules/yallist/package.json         |  29 ++
 .../minizlib/node_modules/yallist/yallist.js  | 426 ++++++++++++++++++
 package-lock.json                             |  18 +-
 package.json                                  |   2 +-
 11 files changed, 973 insertions(+), 5 deletions(-)
 create mode 100644 node_modules/tar/node_modules/fs-minipass/node_modules/yallist/LICENSE
 create mode 100644 node_modules/tar/node_modules/fs-minipass/node_modules/yallist/iterator.js
 create mode 100644 node_modules/tar/node_modules/fs-minipass/node_modules/yallist/package.json
 create mode 100644 node_modules/tar/node_modules/fs-minipass/node_modules/yallist/yallist.js
 create mode 100644 node_modules/tar/node_modules/minizlib/node_modules/yallist/LICENSE
 create mode 100644 node_modules/tar/node_modules/minizlib/node_modules/yallist/iterator.js
 create mode 100644 node_modules/tar/node_modules/minizlib/node_modules/yallist/package.json
 create mode 100644 node_modules/tar/node_modules/minizlib/node_modules/yallist/yallist.js

diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index f146e9040bbae..3bfc954920036 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -209,11 +209,13 @@
 !/tar/node_modules/fs-minipass/node_modules/
 /tar/node_modules/fs-minipass/node_modules/*
 !/tar/node_modules/fs-minipass/node_modules/minipass
+!/tar/node_modules/fs-minipass/node_modules/yallist
 !/tar/node_modules/minipass
 !/tar/node_modules/minizlib
 !/tar/node_modules/minizlib/node_modules/
 /tar/node_modules/minizlib/node_modules/*
 !/tar/node_modules/minizlib/node_modules/minipass
+!/tar/node_modules/minizlib/node_modules/yallist
 !/tar/node_modules/mkdirp
 !/text-table
 !/tiny-relative-date
diff --git a/node_modules/tar/node_modules/fs-minipass/node_modules/yallist/LICENSE b/node_modules/tar/node_modules/fs-minipass/node_modules/yallist/LICENSE
new file mode 100644
index 0000000000000..19129e315fe59
--- /dev/null
+++ b/node_modules/tar/node_modules/fs-minipass/node_modules/yallist/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/tar/node_modules/fs-minipass/node_modules/yallist/iterator.js b/node_modules/tar/node_modules/fs-minipass/node_modules/yallist/iterator.js
new file mode 100644
index 0000000000000..d41c97a19f984
--- /dev/null
+++ b/node_modules/tar/node_modules/fs-minipass/node_modules/yallist/iterator.js
@@ -0,0 +1,8 @@
+'use strict'
+module.exports = function (Yallist) {
+  Yallist.prototype[Symbol.iterator] = function* () {
+    for (let walker = this.head; walker; walker = walker.next) {
+      yield walker.value
+    }
+  }
+}
diff --git a/node_modules/tar/node_modules/fs-minipass/node_modules/yallist/package.json b/node_modules/tar/node_modules/fs-minipass/node_modules/yallist/package.json
new file mode 100644
index 0000000000000..8a083867d72e0
--- /dev/null
+++ b/node_modules/tar/node_modules/fs-minipass/node_modules/yallist/package.json
@@ -0,0 +1,29 @@
+{
+  "name": "yallist",
+  "version": "4.0.0",
+  "description": "Yet Another Linked List",
+  "main": "yallist.js",
+  "directories": {
+    "test": "test"
+  },
+  "files": [
+    "yallist.js",
+    "iterator.js"
+  ],
+  "dependencies": {},
+  "devDependencies": {
+    "tap": "^12.1.0"
+  },
+  "scripts": {
+    "test": "tap test/*.js --100",
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "postpublish": "git push origin --all; git push origin --tags"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/isaacs/yallist.git"
+  },
+  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
+  "license": "ISC"
+}
diff --git a/node_modules/tar/node_modules/fs-minipass/node_modules/yallist/yallist.js b/node_modules/tar/node_modules/fs-minipass/node_modules/yallist/yallist.js
new file mode 100644
index 0000000000000..4e83ab1c542a5
--- /dev/null
+++ b/node_modules/tar/node_modules/fs-minipass/node_modules/yallist/yallist.js
@@ -0,0 +1,426 @@
+'use strict'
+module.exports = Yallist
+
+Yallist.Node = Node
+Yallist.create = Yallist
+
+function Yallist (list) {
+  var self = this
+  if (!(self instanceof Yallist)) {
+    self = new Yallist()
+  }
+
+  self.tail = null
+  self.head = null
+  self.length = 0
+
+  if (list && typeof list.forEach === 'function') {
+    list.forEach(function (item) {
+      self.push(item)
+    })
+  } else if (arguments.length > 0) {
+    for (var i = 0, l = arguments.length; i < l; i++) {
+      self.push(arguments[i])
+    }
+  }
+
+  return self
+}
+
+Yallist.prototype.removeNode = function (node) {
+  if (node.list !== this) {
+    throw new Error('removing node which does not belong to this list')
+  }
+
+  var next = node.next
+  var prev = node.prev
+
+  if (next) {
+    next.prev = prev
+  }
+
+  if (prev) {
+    prev.next = next
+  }
+
+  if (node === this.head) {
+    this.head = next
+  }
+  if (node === this.tail) {
+    this.tail = prev
+  }
+
+  node.list.length--
+  node.next = null
+  node.prev = null
+  node.list = null
+
+  return next
+}
+
+Yallist.prototype.unshiftNode = function (node) {
+  if (node === this.head) {
+    return
+  }
+
+  if (node.list) {
+    node.list.removeNode(node)
+  }
+
+  var head = this.head
+  node.list = this
+  node.next = head
+  if (head) {
+    head.prev = node
+  }
+
+  this.head = node
+  if (!this.tail) {
+    this.tail = node
+  }
+  this.length++
+}
+
+Yallist.prototype.pushNode = function (node) {
+  if (node === this.tail) {
+    return
+  }
+
+  if (node.list) {
+    node.list.removeNode(node)
+  }
+
+  var tail = this.tail
+  node.list = this
+  node.prev = tail
+  if (tail) {
+    tail.next = node
+  }
+
+  this.tail = node
+  if (!this.head) {
+    this.head = node
+  }
+  this.length++
+}
+
+Yallist.prototype.push = function () {
+  for (var i = 0, l = arguments.length; i < l; i++) {
+    push(this, arguments[i])
+  }
+  return this.length
+}
+
+Yallist.prototype.unshift = function () {
+  for (var i = 0, l = arguments.length; i < l; i++) {
+    unshift(this, arguments[i])
+  }
+  return this.length
+}
+
+Yallist.prototype.pop = function () {
+  if (!this.tail) {
+    return undefined
+  }
+
+  var res = this.tail.value
+  this.tail = this.tail.prev
+  if (this.tail) {
+    this.tail.next = null
+  } else {
+    this.head = null
+  }
+  this.length--
+  return res
+}
+
+Yallist.prototype.shift = function () {
+  if (!this.head) {
+    return undefined
+  }
+
+  var res = this.head.value
+  this.head = this.head.next
+  if (this.head) {
+    this.head.prev = null
+  } else {
+    this.tail = null
+  }
+  this.length--
+  return res
+}
+
+Yallist.prototype.forEach = function (fn, thisp) {
+  thisp = thisp || this
+  for (var walker = this.head, i = 0; walker !== null; i++) {
+    fn.call(thisp, walker.value, i, this)
+    walker = walker.next
+  }
+}
+
+Yallist.prototype.forEachReverse = function (fn, thisp) {
+  thisp = thisp || this
+  for (var walker = this.tail, i = this.length - 1; walker !== null; i--) {
+    fn.call(thisp, walker.value, i, this)
+    walker = walker.prev
+  }
+}
+
+Yallist.prototype.get = function (n) {
+  for (var i = 0, walker = this.head; walker !== null && i < n; i++) {
+    // abort out of the list early if we hit a cycle
+    walker = walker.next
+  }
+  if (i === n && walker !== null) {
+    return walker.value
+  }
+}
+
+Yallist.prototype.getReverse = function (n) {
+  for (var i = 0, walker = this.tail; walker !== null && i < n; i++) {
+    // abort out of the list early if we hit a cycle
+    walker = walker.prev
+  }
+  if (i === n && walker !== null) {
+    return walker.value
+  }
+}
+
+Yallist.prototype.map = function (fn, thisp) {
+  thisp = thisp || this
+  var res = new Yallist()
+  for (var walker = this.head; walker !== null;) {
+    res.push(fn.call(thisp, walker.value, this))
+    walker = walker.next
+  }
+  return res
+}
+
+Yallist.prototype.mapReverse = function (fn, thisp) {
+  thisp = thisp || this
+  var res = new Yallist()
+  for (var walker = this.tail; walker !== null;) {
+    res.push(fn.call(thisp, walker.value, this))
+    walker = walker.prev
+  }
+  return res
+}
+
+Yallist.prototype.reduce = function (fn, initial) {
+  var acc
+  var walker = this.head
+  if (arguments.length > 1) {
+    acc = initial
+  } else if (this.head) {
+    walker = this.head.next
+    acc = this.head.value
+  } else {
+    throw new TypeError('Reduce of empty list with no initial value')
+  }
+
+  for (var i = 0; walker !== null; i++) {
+    acc = fn(acc, walker.value, i)
+    walker = walker.next
+  }
+
+  return acc
+}
+
+Yallist.prototype.reduceReverse = function (fn, initial) {
+  var acc
+  var walker = this.tail
+  if (arguments.length > 1) {
+    acc = initial
+  } else if (this.tail) {
+    walker = this.tail.prev
+    acc = this.tail.value
+  } else {
+    throw new TypeError('Reduce of empty list with no initial value')
+  }
+
+  for (var i = this.length - 1; walker !== null; i--) {
+    acc = fn(acc, walker.value, i)
+    walker = walker.prev
+  }
+
+  return acc
+}
+
+Yallist.prototype.toArray = function () {
+  var arr = new Array(this.length)
+  for (var i = 0, walker = this.head; walker !== null; i++) {
+    arr[i] = walker.value
+    walker = walker.next
+  }
+  return arr
+}
+
+Yallist.prototype.toArrayReverse = function () {
+  var arr = new Array(this.length)
+  for (var i = 0, walker = this.tail; walker !== null; i++) {
+    arr[i] = walker.value
+    walker = walker.prev
+  }
+  return arr
+}
+
+Yallist.prototype.slice = function (from, to) {
+  to = to || this.length
+  if (to < 0) {
+    to += this.length
+  }
+  from = from || 0
+  if (from < 0) {
+    from += this.length
+  }
+  var ret = new Yallist()
+  if (to < from || to < 0) {
+    return ret
+  }
+  if (from < 0) {
+    from = 0
+  }
+  if (to > this.length) {
+    to = this.length
+  }
+  for (var i = 0, walker = this.head; walker !== null && i < from; i++) {
+    walker = walker.next
+  }
+  for (; walker !== null && i < to; i++, walker = walker.next) {
+    ret.push(walker.value)
+  }
+  return ret
+}
+
+Yallist.prototype.sliceReverse = function (from, to) {
+  to = to || this.length
+  if (to < 0) {
+    to += this.length
+  }
+  from = from || 0
+  if (from < 0) {
+    from += this.length
+  }
+  var ret = new Yallist()
+  if (to < from || to < 0) {
+    return ret
+  }
+  if (from < 0) {
+    from = 0
+  }
+  if (to > this.length) {
+    to = this.length
+  }
+  for (var i = this.length, walker = this.tail; walker !== null && i > to; i--) {
+    walker = walker.prev
+  }
+  for (; walker !== null && i > from; i--, walker = walker.prev) {
+    ret.push(walker.value)
+  }
+  return ret
+}
+
+Yallist.prototype.splice = function (start, deleteCount, ...nodes) {
+  if (start > this.length) {
+    start = this.length - 1
+  }
+  if (start < 0) {
+    start = this.length + start;
+  }
+
+  for (var i = 0, walker = this.head; walker !== null && i < start; i++) {
+    walker = walker.next
+  }
+
+  var ret = []
+  for (var i = 0; walker && i < deleteCount; i++) {
+    ret.push(walker.value)
+    walker = this.removeNode(walker)
+  }
+  if (walker === null) {
+    walker = this.tail
+  }
+
+  if (walker !== this.head && walker !== this.tail) {
+    walker = walker.prev
+  }
+
+  for (var i = 0; i < nodes.length; i++) {
+    walker = insert(this, walker, nodes[i])
+  }
+  return ret;
+}
+
+Yallist.prototype.reverse = function () {
+  var head = this.head
+  var tail = this.tail
+  for (var walker = head; walker !== null; walker = walker.prev) {
+    var p = walker.prev
+    walker.prev = walker.next
+    walker.next = p
+  }
+  this.head = tail
+  this.tail = head
+  return this
+}
+
+function insert (self, node, value) {
+  var inserted = node === self.head ?
+    new Node(value, null, node, self) :
+    new Node(value, node, node.next, self)
+
+  if (inserted.next === null) {
+    self.tail = inserted
+  }
+  if (inserted.prev === null) {
+    self.head = inserted
+  }
+
+  self.length++
+
+  return inserted
+}
+
+function push (self, item) {
+  self.tail = new Node(item, self.tail, null, self)
+  if (!self.head) {
+    self.head = self.tail
+  }
+  self.length++
+}
+
+function unshift (self, item) {
+  self.head = new Node(item, null, self.head, self)
+  if (!self.tail) {
+    self.tail = self.head
+  }
+  self.length++
+}
+
+function Node (value, prev, next, list) {
+  if (!(this instanceof Node)) {
+    return new Node(value, prev, next, list)
+  }
+
+  this.list = list
+  this.value = value
+
+  if (prev) {
+    prev.next = this
+    this.prev = prev
+  } else {
+    this.prev = null
+  }
+
+  if (next) {
+    next.prev = this
+    this.next = next
+  } else {
+    this.next = null
+  }
+}
+
+try {
+  // add if support for Symbol.iterator is present
+  require('./iterator.js')(Yallist)
+} catch (er) {}
diff --git a/node_modules/tar/node_modules/minizlib/node_modules/yallist/LICENSE b/node_modules/tar/node_modules/minizlib/node_modules/yallist/LICENSE
new file mode 100644
index 0000000000000..19129e315fe59
--- /dev/null
+++ b/node_modules/tar/node_modules/minizlib/node_modules/yallist/LICENSE
@@ -0,0 +1,15 @@
+The ISC License
+
+Copyright (c) Isaac Z. Schlueter and Contributors
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
+IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/tar/node_modules/minizlib/node_modules/yallist/iterator.js b/node_modules/tar/node_modules/minizlib/node_modules/yallist/iterator.js
new file mode 100644
index 0000000000000..d41c97a19f984
--- /dev/null
+++ b/node_modules/tar/node_modules/minizlib/node_modules/yallist/iterator.js
@@ -0,0 +1,8 @@
+'use strict'
+module.exports = function (Yallist) {
+  Yallist.prototype[Symbol.iterator] = function* () {
+    for (let walker = this.head; walker; walker = walker.next) {
+      yield walker.value
+    }
+  }
+}
diff --git a/node_modules/tar/node_modules/minizlib/node_modules/yallist/package.json b/node_modules/tar/node_modules/minizlib/node_modules/yallist/package.json
new file mode 100644
index 0000000000000..8a083867d72e0
--- /dev/null
+++ b/node_modules/tar/node_modules/minizlib/node_modules/yallist/package.json
@@ -0,0 +1,29 @@
+{
+  "name": "yallist",
+  "version": "4.0.0",
+  "description": "Yet Another Linked List",
+  "main": "yallist.js",
+  "directories": {
+    "test": "test"
+  },
+  "files": [
+    "yallist.js",
+    "iterator.js"
+  ],
+  "dependencies": {},
+  "devDependencies": {
+    "tap": "^12.1.0"
+  },
+  "scripts": {
+    "test": "tap test/*.js --100",
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "postpublish": "git push origin --all; git push origin --tags"
+  },
+  "repository": {
+    "type": "git",
+    "url": "git+https://github.com/isaacs/yallist.git"
+  },
+  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
+  "license": "ISC"
+}
diff --git a/node_modules/tar/node_modules/minizlib/node_modules/yallist/yallist.js b/node_modules/tar/node_modules/minizlib/node_modules/yallist/yallist.js
new file mode 100644
index 0000000000000..4e83ab1c542a5
--- /dev/null
+++ b/node_modules/tar/node_modules/minizlib/node_modules/yallist/yallist.js
@@ -0,0 +1,426 @@
+'use strict'
+module.exports = Yallist
+
+Yallist.Node = Node
+Yallist.create = Yallist
+
+function Yallist (list) {
+  var self = this
+  if (!(self instanceof Yallist)) {
+    self = new Yallist()
+  }
+
+  self.tail = null
+  self.head = null
+  self.length = 0
+
+  if (list && typeof list.forEach === 'function') {
+    list.forEach(function (item) {
+      self.push(item)
+    })
+  } else if (arguments.length > 0) {
+    for (var i = 0, l = arguments.length; i < l; i++) {
+      self.push(arguments[i])
+    }
+  }
+
+  return self
+}
+
+Yallist.prototype.removeNode = function (node) {
+  if (node.list !== this) {
+    throw new Error('removing node which does not belong to this list')
+  }
+
+  var next = node.next
+  var prev = node.prev
+
+  if (next) {
+    next.prev = prev
+  }
+
+  if (prev) {
+    prev.next = next
+  }
+
+  if (node === this.head) {
+    this.head = next
+  }
+  if (node === this.tail) {
+    this.tail = prev
+  }
+
+  node.list.length--
+  node.next = null
+  node.prev = null
+  node.list = null
+
+  return next
+}
+
+Yallist.prototype.unshiftNode = function (node) {
+  if (node === this.head) {
+    return
+  }
+
+  if (node.list) {
+    node.list.removeNode(node)
+  }
+
+  var head = this.head
+  node.list = this
+  node.next = head
+  if (head) {
+    head.prev = node
+  }
+
+  this.head = node
+  if (!this.tail) {
+    this.tail = node
+  }
+  this.length++
+}
+
+Yallist.prototype.pushNode = function (node) {
+  if (node === this.tail) {
+    return
+  }
+
+  if (node.list) {
+    node.list.removeNode(node)
+  }
+
+  var tail = this.tail
+  node.list = this
+  node.prev = tail
+  if (tail) {
+    tail.next = node
+  }
+
+  this.tail = node
+  if (!this.head) {
+    this.head = node
+  }
+  this.length++
+}
+
+Yallist.prototype.push = function () {
+  for (var i = 0, l = arguments.length; i < l; i++) {
+    push(this, arguments[i])
+  }
+  return this.length
+}
+
+Yallist.prototype.unshift = function () {
+  for (var i = 0, l = arguments.length; i < l; i++) {
+    unshift(this, arguments[i])
+  }
+  return this.length
+}
+
+Yallist.prototype.pop = function () {
+  if (!this.tail) {
+    return undefined
+  }
+
+  var res = this.tail.value
+  this.tail = this.tail.prev
+  if (this.tail) {
+    this.tail.next = null
+  } else {
+    this.head = null
+  }
+  this.length--
+  return res
+}
+
+Yallist.prototype.shift = function () {
+  if (!this.head) {
+    return undefined
+  }
+
+  var res = this.head.value
+  this.head = this.head.next
+  if (this.head) {
+    this.head.prev = null
+  } else {
+    this.tail = null
+  }
+  this.length--
+  return res
+}
+
+Yallist.prototype.forEach = function (fn, thisp) {
+  thisp = thisp || this
+  for (var walker = this.head, i = 0; walker !== null; i++) {
+    fn.call(thisp, walker.value, i, this)
+    walker = walker.next
+  }
+}
+
+Yallist.prototype.forEachReverse = function (fn, thisp) {
+  thisp = thisp || this
+  for (var walker = this.tail, i = this.length - 1; walker !== null; i--) {
+    fn.call(thisp, walker.value, i, this)
+    walker = walker.prev
+  }
+}
+
+Yallist.prototype.get = function (n) {
+  for (var i = 0, walker = this.head; walker !== null && i < n; i++) {
+    // abort out of the list early if we hit a cycle
+    walker = walker.next
+  }
+  if (i === n && walker !== null) {
+    return walker.value
+  }
+}
+
+Yallist.prototype.getReverse = function (n) {
+  for (var i = 0, walker = this.tail; walker !== null && i < n; i++) {
+    // abort out of the list early if we hit a cycle
+    walker = walker.prev
+  }
+  if (i === n && walker !== null) {
+    return walker.value
+  }
+}
+
+Yallist.prototype.map = function (fn, thisp) {
+  thisp = thisp || this
+  var res = new Yallist()
+  for (var walker = this.head; walker !== null;) {
+    res.push(fn.call(thisp, walker.value, this))
+    walker = walker.next
+  }
+  return res
+}
+
+Yallist.prototype.mapReverse = function (fn, thisp) {
+  thisp = thisp || this
+  var res = new Yallist()
+  for (var walker = this.tail; walker !== null;) {
+    res.push(fn.call(thisp, walker.value, this))
+    walker = walker.prev
+  }
+  return res
+}
+
+Yallist.prototype.reduce = function (fn, initial) {
+  var acc
+  var walker = this.head
+  if (arguments.length > 1) {
+    acc = initial
+  } else if (this.head) {
+    walker = this.head.next
+    acc = this.head.value
+  } else {
+    throw new TypeError('Reduce of empty list with no initial value')
+  }
+
+  for (var i = 0; walker !== null; i++) {
+    acc = fn(acc, walker.value, i)
+    walker = walker.next
+  }
+
+  return acc
+}
+
+Yallist.prototype.reduceReverse = function (fn, initial) {
+  var acc
+  var walker = this.tail
+  if (arguments.length > 1) {
+    acc = initial
+  } else if (this.tail) {
+    walker = this.tail.prev
+    acc = this.tail.value
+  } else {
+    throw new TypeError('Reduce of empty list with no initial value')
+  }
+
+  for (var i = this.length - 1; walker !== null; i--) {
+    acc = fn(acc, walker.value, i)
+    walker = walker.prev
+  }
+
+  return acc
+}
+
+Yallist.prototype.toArray = function () {
+  var arr = new Array(this.length)
+  for (var i = 0, walker = this.head; walker !== null; i++) {
+    arr[i] = walker.value
+    walker = walker.next
+  }
+  return arr
+}
+
+Yallist.prototype.toArrayReverse = function () {
+  var arr = new Array(this.length)
+  for (var i = 0, walker = this.tail; walker !== null; i++) {
+    arr[i] = walker.value
+    walker = walker.prev
+  }
+  return arr
+}
+
+Yallist.prototype.slice = function (from, to) {
+  to = to || this.length
+  if (to < 0) {
+    to += this.length
+  }
+  from = from || 0
+  if (from < 0) {
+    from += this.length
+  }
+  var ret = new Yallist()
+  if (to < from || to < 0) {
+    return ret
+  }
+  if (from < 0) {
+    from = 0
+  }
+  if (to > this.length) {
+    to = this.length
+  }
+  for (var i = 0, walker = this.head; walker !== null && i < from; i++) {
+    walker = walker.next
+  }
+  for (; walker !== null && i < to; i++, walker = walker.next) {
+    ret.push(walker.value)
+  }
+  return ret
+}
+
+Yallist.prototype.sliceReverse = function (from, to) {
+  to = to || this.length
+  if (to < 0) {
+    to += this.length
+  }
+  from = from || 0
+  if (from < 0) {
+    from += this.length
+  }
+  var ret = new Yallist()
+  if (to < from || to < 0) {
+    return ret
+  }
+  if (from < 0) {
+    from = 0
+  }
+  if (to > this.length) {
+    to = this.length
+  }
+  for (var i = this.length, walker = this.tail; walker !== null && i > to; i--) {
+    walker = walker.prev
+  }
+  for (; walker !== null && i > from; i--, walker = walker.prev) {
+    ret.push(walker.value)
+  }
+  return ret
+}
+
+Yallist.prototype.splice = function (start, deleteCount, ...nodes) {
+  if (start > this.length) {
+    start = this.length - 1
+  }
+  if (start < 0) {
+    start = this.length + start;
+  }
+
+  for (var i = 0, walker = this.head; walker !== null && i < start; i++) {
+    walker = walker.next
+  }
+
+  var ret = []
+  for (var i = 0; walker && i < deleteCount; i++) {
+    ret.push(walker.value)
+    walker = this.removeNode(walker)
+  }
+  if (walker === null) {
+    walker = this.tail
+  }
+
+  if (walker !== this.head && walker !== this.tail) {
+    walker = walker.prev
+  }
+
+  for (var i = 0; i < nodes.length; i++) {
+    walker = insert(this, walker, nodes[i])
+  }
+  return ret;
+}
+
+Yallist.prototype.reverse = function () {
+  var head = this.head
+  var tail = this.tail
+  for (var walker = head; walker !== null; walker = walker.prev) {
+    var p = walker.prev
+    walker.prev = walker.next
+    walker.next = p
+  }
+  this.head = tail
+  this.tail = head
+  return this
+}
+
+function insert (self, node, value) {
+  var inserted = node === self.head ?
+    new Node(value, null, node, self) :
+    new Node(value, node, node.next, self)
+
+  if (inserted.next === null) {
+    self.tail = inserted
+  }
+  if (inserted.prev === null) {
+    self.head = inserted
+  }
+
+  self.length++
+
+  return inserted
+}
+
+function push (self, item) {
+  self.tail = new Node(item, self.tail, null, self)
+  if (!self.head) {
+    self.head = self.tail
+  }
+  self.length++
+}
+
+function unshift (self, item) {
+  self.head = new Node(item, null, self.head, self)
+  if (!self.tail) {
+    self.tail = self.head
+  }
+  self.length++
+}
+
+function Node (value, prev, next, list) {
+  if (!(this instanceof Node)) {
+    return new Node(value, prev, next, list)
+  }
+
+  this.list = list
+  this.value = value
+
+  if (prev) {
+    prev.next = this
+    this.prev = prev
+  } else {
+    this.prev = null
+  }
+
+  if (next) {
+    next.prev = this
+    this.next = next
+  } else {
+    this.next = null
+  }
+}
+
+try {
+  // add if support for Symbol.iterator is present
+  require('./iterator.js')(Yallist)
+} catch (er) {}
diff --git a/package-lock.json b/package-lock.json
index bb64e44f0bc78..38db201b7dff0 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -164,7 +164,7 @@
         "@npmcli/template-oss": "4.25.1",
         "@tufjs/repo-mock": "^4.0.0",
         "ajv": "^8.12.0",
-        "ajv-formats": "^2.1.1",
+        "ajv-formats": "^3.0.1",
         "ajv-formats-draft2019": "^1.6.1",
         "cli-table3": "^0.6.4",
         "diff": "^8.0.2",
@@ -2292,7 +2292,9 @@
       }
     },
     "node_modules/ajv-formats": {
-      "version": "2.1.1",
+      "version": "3.0.1",
+      "resolved": "https://registry.npmjs.org/ajv-formats/-/ajv-formats-3.0.1.tgz",
+      "integrity": "sha512-8iUql50EUR+uUcdRQ3HDqa6EVyo3docL8g5WJ3FNcWmu62IbkGUue/pEyLBW8VGKKucTPgqeks4fIU1DA4yowQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10251,8 +10253,6 @@
     },
     "node_modules/release-please/node_modules/diff": {
       "version": "7.0.0",
-      "resolved": "https://registry.npmjs.org/diff/-/diff-7.0.0.tgz",
-      "integrity": "sha512-PJWHUb1RFevKCwaFA9RlG5tCd+FO5iRh9A8HEtkmBH2Li03iJriB6m6JIN4rGz3K3JLawI7/veA1xzRKP6ISBw==",
       "dev": true,
       "license": "BSD-3-Clause",
       "engines": {
@@ -13597,6 +13597,11 @@
         "node": ">=8"
       }
     },
+    "node_modules/tar/node_modules/fs-minipass/node_modules/yallist": {
+      "version": "4.0.0",
+      "inBundle": true,
+      "license": "ISC"
+    },
     "node_modules/tar/node_modules/minipass": {
       "version": "5.0.0",
       "inBundle": true,
@@ -13628,6 +13633,11 @@
         "node": ">=8"
       }
     },
+    "node_modules/tar/node_modules/minizlib/node_modules/yallist": {
+      "version": "4.0.0",
+      "inBundle": true,
+      "license": "ISC"
+    },
     "node_modules/tar/node_modules/mkdirp": {
       "version": "1.0.4",
       "inBundle": true,
diff --git a/package.json b/package.json
index 3e6e056b834f2..caed68281aec9 100644
--- a/package.json
+++ b/package.json
@@ -195,7 +195,7 @@
     "@npmcli/template-oss": "4.25.1",
     "@tufjs/repo-mock": "^4.0.0",
     "ajv": "^8.12.0",
-    "ajv-formats": "^2.1.1",
+    "ajv-formats": "^3.0.1",
     "ajv-formats-draft2019": "^1.6.1",
     "cli-table3": "^0.6.4",
     "diff": "^8.0.2",

From 07bf5402fbec900f1d69c05b7cb73a987d963d2c Mon Sep 17 00:00:00 2001
From: Gar 
Date: Thu, 18 Sep 2025 13:22:41 -0700
Subject: [PATCH 48/63] deps: is-cidr@6.0.0

---
 node_modules/cidr-regex/package.json | 25 ++++++++++++-------------
 node_modules/is-cidr/package.json    | 27 +++++++++++++--------------
 package-lock.json                    | 18 ++++++++++++------
 package.json                         |  2 +-
 4 files changed, 38 insertions(+), 34 deletions(-)

diff --git a/node_modules/cidr-regex/package.json b/node_modules/cidr-regex/package.json
index 815837e9a3786..7e8cf3e044a2d 100644
--- a/node_modules/cidr-regex/package.json
+++ b/node_modules/cidr-regex/package.json
@@ -1,6 +1,6 @@
 {
   "name": "cidr-regex",
-  "version": "4.1.3",
+  "version": "5.0.0",
   "description": "Regular expression for matching IP addresses in CIDR notation",
   "author": "silverwind ",
   "contributors": [
@@ -17,23 +17,22 @@
     "dist"
   ],
   "engines": {
-    "node": ">=14"
+    "node": ">=20"
   },
   "dependencies": {
     "ip-regex": "^5.0.0"
   },
   "devDependencies": {
-    "@types/node": "22.13.4",
+    "@types/node": "24.1.0",
     "eslint": "8.57.0",
-    "eslint-config-silverwind": "99.0.0",
-    "eslint-config-silverwind-typescript": "9.2.2",
-    "typescript": "5.7.3",
-    "typescript-config-silverwind": "8.0.0",
-    "updates": "16.4.2",
-    "versions": "12.1.3",
-    "vite": "6.1.0",
-    "vite-config-silverwind": "4.0.0",
-    "vitest": "3.0.5",
-    "vitest-config-silverwind": "10.0.0"
+    "eslint-config-silverwind": "101.4.1",
+    "typescript": "5.8.3",
+    "typescript-config-silverwind": "9.0.8",
+    "updates": "16.5.2",
+    "versions": "13.1.1",
+    "vite": "7.0.6",
+    "vite-config-silverwind": "5.4.0",
+    "vitest": "3.2.4",
+    "vitest-config-silverwind": "10.2.0"
   }
 }
diff --git a/node_modules/is-cidr/package.json b/node_modules/is-cidr/package.json
index 2e512b947e7f1..267af3c20fc5b 100644
--- a/node_modules/is-cidr/package.json
+++ b/node_modules/is-cidr/package.json
@@ -1,6 +1,6 @@
 {
   "name": "is-cidr",
-  "version": "5.1.1",
+  "version": "6.0.0",
   "description": "Check if a string is an IP address in CIDR notation",
   "author": "silverwind ",
   "contributors": [
@@ -17,23 +17,22 @@
     "dist"
   ],
   "engines": {
-    "node": ">=14"
+    "node": ">=20"
   },
   "dependencies": {
-    "cidr-regex": "^4.1.1"
+    "cidr-regex": "^5.0.0"
   },
   "devDependencies": {
-    "@types/node": "22.13.4",
+    "@types/node": "24.1.0",
     "eslint": "8.57.0",
-    "eslint-config-silverwind": "99.0.0",
-    "eslint-config-silverwind-typescript": "9.2.2",
-    "typescript": "5.7.3",
-    "typescript-config-silverwind": "7.0.0",
-    "updates": "16.4.2",
-    "versions": "12.1.3",
-    "vite": "6.1.0",
-    "vite-config-silverwind": "4.0.0",
-    "vitest": "3.0.5",
-    "vitest-config-silverwind": "10.0.0"
+    "eslint-config-silverwind": "101.4.1",
+    "typescript": "5.8.3",
+    "typescript-config-silverwind": "9.0.8",
+    "updates": "16.5.2",
+    "versions": "13.1.1",
+    "vite": "7.0.6",
+    "vite-config-silverwind": "5.4.0",
+    "vitest": "3.2.4",
+    "vitest-config-silverwind": "10.2.0"
   }
 }
diff --git a/package-lock.json b/package-lock.json
index 38db201b7dff0..7fdf426bdecdc 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -107,7 +107,7 @@
         "hosted-git-info": "^9.0.0",
         "ini": "^5.0.0",
         "init-package-json": "^8.2.2",
-        "is-cidr": "^5.1.1",
+        "is-cidr": "^6.0.0",
         "json-parse-even-better-errors": "^4.0.0",
         "libnpmaccess": "^10.0.1",
         "libnpmdiff": "^8.0.7",
@@ -3065,14 +3065,16 @@
       }
     },
     "node_modules/cidr-regex": {
-      "version": "4.1.3",
+      "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/cidr-regex/-/cidr-regex-5.0.0.tgz",
+      "integrity": "sha512-9FT511D25oLAQYkfKLqWUMzoitgITToOqNThDAM8ujXaeXDulDPffJQflag918J8DN8mUPXRpS9J3U5GlIHGSQ==",
       "inBundle": true,
       "license": "BSD-2-Clause",
       "dependencies": {
         "ip-regex": "^5.0.0"
       },
       "engines": {
-        "node": ">=14"
+        "node": ">=20"
       }
     },
     "node_modules/clean-stack": {
@@ -6062,6 +6064,8 @@
     },
     "node_modules/ip-regex": {
       "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/ip-regex/-/ip-regex-5.0.0.tgz",
+      "integrity": "sha512-fOCG6lhoKKakwv+C6KdsOnGvgXnmgfmp0myi3bcNwj3qfwPAxRKWEuFhvEFF7ceYIz6+1jRZ+yguLFAmUNPEfw==",
       "inBundle": true,
       "license": "MIT",
       "engines": {
@@ -6200,14 +6204,16 @@
       }
     },
     "node_modules/is-cidr": {
-      "version": "5.1.1",
+      "version": "6.0.0",
+      "resolved": "https://registry.npmjs.org/is-cidr/-/is-cidr-6.0.0.tgz",
+      "integrity": "sha512-LM62mX6QmYvLL7c0AZ2rnqGUAHcgkNwre56e8rrAdRLjUmwqrOrqGj6E/iVSrL7xxZfGQUR0gBVx9pW5CLIbig==",
       "inBundle": true,
       "license": "BSD-2-Clause",
       "dependencies": {
-        "cidr-regex": "^4.1.1"
+        "cidr-regex": "^5.0.0"
       },
       "engines": {
-        "node": ">=14"
+        "node": ">=20"
       }
     },
     "node_modules/is-core-module": {
diff --git a/package.json b/package.json
index caed68281aec9..73470d1da8ea3 100644
--- a/package.json
+++ b/package.json
@@ -74,7 +74,7 @@
     "hosted-git-info": "^9.0.0",
     "ini": "^5.0.0",
     "init-package-json": "^8.2.2",
-    "is-cidr": "^5.1.1",
+    "is-cidr": "^6.0.0",
     "json-parse-even-better-errors": "^4.0.0",
     "libnpmaccess": "^10.0.1",
     "libnpmdiff": "^8.0.7",

From 0f41bace5677d0d624c67ff3fac5e2caeebcb399 Mon Sep 17 00:00:00 2001
From: Gar 
Date: Thu, 18 Sep 2025 13:34:09 -0700
Subject: [PATCH 49/63] deps: tiny-relative-date@2.0.2

---
 .../tiny-relative-date/lib/factory.js         |  32 ++---
 node_modules/tiny-relative-date/package.json  |  26 ++--
 .../tiny-relative-date/src/factory.js         | 113 +++++++++++-------
 .../tiny-relative-date/translations/fa.js     |  31 +++++
 .../tiny-relative-date/translations/ne.js     |  31 +++++
 package-lock.json                             |   6 +-
 package.json                                  |   2 +-
 7 files changed, 164 insertions(+), 77 deletions(-)
 create mode 100644 node_modules/tiny-relative-date/translations/fa.js
 create mode 100644 node_modules/tiny-relative-date/translations/ne.js

diff --git a/node_modules/tiny-relative-date/lib/factory.js b/node_modules/tiny-relative-date/lib/factory.js
index ac901614457c9..bde0b693690f9 100644
--- a/node_modules/tiny-relative-date/lib/factory.js
+++ b/node_modules/tiny-relative-date/lib/factory.js
@@ -32,7 +32,7 @@ function relativeDateFactory(translations) {
       delta = calculateDelta(now, date);
     }
 
-    var translate = function translate(translatePhrase, timeValue) {
+    var translate = function translate(translatePhrase, timeValue, rawValue) {
       var key = void 0;
 
       if (translatePhrase === 'justNow') {
@@ -46,7 +46,7 @@ function relativeDateFactory(translations) {
       var translation = translations[key];
 
       if (typeof translation === 'function') {
-        return translation(timeValue);
+        return translation(timeValue, rawValue);
       }
 
       return translation.replace('{{time}}', timeValue);
@@ -54,46 +54,46 @@ function relativeDateFactory(translations) {
 
     switch (false) {
       case !(delta < 30):
-        return translate('justNow');
+        return translate('justNow', delta, delta);
 
       case !(delta < minute):
-        return translate('seconds', delta);
+        return translate('seconds', delta, delta);
 
       case !(delta < 2 * minute):
-        return translate('aMinute');
+        return translate('aMinute', 1, delta);
 
       case !(delta < hour):
-        return translate('minutes', Math.floor(delta / minute));
+        return translate('minutes', Math.floor(delta / minute), delta);
 
       case Math.floor(delta / hour) !== 1:
-        return translate('anHour');
+        return translate('anHour', Math.floor(delta / minute), delta);
 
       case !(delta < day):
-        return translate('hours', Math.floor(delta / hour));
+        return translate('hours', Math.floor(delta / hour), delta);
 
       case !(delta < day * 2):
-        return translate('aDay');
+        return translate('aDay', 1, delta);
 
       case !(delta < week):
-        return translate('days', Math.floor(delta / day));
+        return translate('days', Math.floor(delta / day), delta);
 
       case Math.floor(delta / week) !== 1:
-        return translate('aWeek');
+        return translate('aWeek', 1, delta);
 
       case !(delta < month):
-        return translate('weeks', Math.floor(delta / week));
+        return translate('weeks', Math.floor(delta / week), delta);
 
       case Math.floor(delta / month) !== 1:
-        return translate('aMonth');
+        return translate('aMonth', 1, delta);
 
       case !(delta < year):
-        return translate('months', Math.floor(delta / month));
+        return translate('months', Math.floor(delta / month), delta);
 
       case Math.floor(delta / year) !== 1:
-        return translate('aYear');
+        return translate('aYear', 1, delta);
 
       default:
-        return translate('overAYear');
+        return translate('overAYear', Math.floor(delta / year), delta);
     }
   };
 }
diff --git a/node_modules/tiny-relative-date/package.json b/node_modules/tiny-relative-date/package.json
index 26c88147f9e69..deb0cea29a4bd 100644
--- a/node_modules/tiny-relative-date/package.json
+++ b/node_modules/tiny-relative-date/package.json
@@ -1,14 +1,14 @@
 {
   "name": "tiny-relative-date",
-  "version": "1.3.0",
+  "version": "2.0.2",
   "description": "Tiny function that provides relative, human-readable dates.",
   "main": "lib/index.js",
   "module": "src/index.js",
   "scripts": {
-    "build": "babel src -d lib",
+    "build": "babel src -d lib && cp src/*.d.ts lib/",
     "test": "npm run eslint && npm run jasmine",
-    "eslint": "eslint --fix src/**/*.js",
-    "jasmine": "jasmine",
+    "eslint": "eslint --fix src/**/*.js spec/*.js",
+    "jasmine": "TZ=UTC jasmine",
     "prepublish": "npm run build"
   },
   "files": [
@@ -23,17 +23,17 @@
     "url": "https://github.com/wildlyinaccurate/relative-date.git"
   },
   "devDependencies": {
-    "babel-cli": "^6.24.1",
+    "babel-cli": "^6.26.0",
     "babel-plugin-add-module-exports": "^0.2.1",
     "babel-preset-es2015": "^6.24.1",
-    "babel-register": "^6.24.1",
-    "eslint": "^4.1.0",
-    "eslint-config-standard": "^10.2.1",
-    "eslint-plugin-import": "^2.6.0",
-    "eslint-plugin-node": "^5.0.0",
-    "eslint-plugin-promise": "^3.5.0",
+    "babel-register": "^6.26.0",
+    "eslint": "^4.19.1",
+    "eslint-config-standard": "^11.0.0",
+    "eslint-plugin-import": "^2.11.0",
+    "eslint-plugin-node": "^6.0.1",
+    "eslint-plugin-promise": "^3.7.0",
     "eslint-plugin-standard": "^3.0.1",
-    "jasmine": "^2.6.0",
-    "jasmine-spec-reporter": "^4.1.1"
+    "jasmine": "^3.1.0",
+    "jasmine-spec-reporter": "^4.2.1"
   }
 }
diff --git a/node_modules/tiny-relative-date/src/factory.js b/node_modules/tiny-relative-date/src/factory.js
index 689359bcf9bc9..65d310c9444a0 100644
--- a/node_modules/tiny-relative-date/src/factory.js
+++ b/node_modules/tiny-relative-date/src/factory.js
@@ -1,89 +1,112 @@
 const calculateDelta = (now, date) => Math.round(Math.abs(now - date) / 1000)
 
+const minute = 60
+const hour = minute * 60
+const day = hour * 24
+const week = day * 7
+const month = day * 30
+const year = day * 365
+
 export default function relativeDateFactory (translations) {
-  return function relativeDate (date, now = new Date()) {
-    if (!(date instanceof Date)) {
-      date = new Date(date)
+  const translate = (date, now, translatePhrase, timeValue, rawValue) => {
+    let key
+
+    if (translatePhrase === 'justNow') {
+      key = translatePhrase
+    } else if (now >= date) {
+      key = `${translatePhrase}Ago`
+    } else {
+      key = `${translatePhrase}FromNow`
     }
 
-    let delta = null
+    const translation = translations[key]
 
-    const minute = 60
-    const hour = minute * 60
-    const day = hour * 24
-    const week = day * 7
-    const month = day * 30
-    const year = day * 365
-
-    delta = calculateDelta(now, date)
-
-    if (delta > day && delta < week) {
-      date = new Date(date.getFullYear(), date.getMonth(), date.getDate(), 0, 0, 0)
-      delta = calculateDelta(now, date)
+    if (typeof translation === 'function') {
+      return translation(timeValue, rawValue)
     }
 
-    const translate = (translatePhrase, timeValue) => {
-      let key
-
-      if (translatePhrase === 'justNow') {
-        key = translatePhrase
-      } else if (now >= date) {
-        key = `${translatePhrase}Ago`
-      } else {
-        key = `${translatePhrase}FromNow`
-      }
+    return translation.replace('{{time}}', timeValue)
+  }
 
-      const translation = translations[key]
+  return function relativeDate (date, now = new Date()) {
+    if (!(date instanceof Date)) {
+      date = new Date(date)
+    }
 
-      if (typeof translation === 'function') {
-        return translation(timeValue)
-      }
+    let delta = calculateDelta(now, date)
 
-      return translation.replace('{{time}}', timeValue)
+    if (delta > day && delta < week) {
+      date = new Date(
+        date.getFullYear(),
+        date.getMonth(),
+        date.getDate(),
+        0,
+        0,
+        0
+      )
+      delta = calculateDelta(now, date)
     }
 
     switch (false) {
       case !(delta < 30):
-        return translate('justNow')
+        return translate(date, now, 'justNow', delta, delta)
 
       case !(delta < minute):
-        return translate('seconds', delta)
+        return translate(date, now, 'seconds', delta, delta)
 
       case !(delta < 2 * minute):
-        return translate('aMinute')
+        return translate(date, now, 'aMinute', 1, delta)
 
       case !(delta < hour):
-        return translate('minutes', Math.floor(delta / minute))
+        return translate(
+          date,
+          now,
+          'minutes',
+          Math.floor(delta / minute),
+          delta
+        )
 
       case Math.floor(delta / hour) !== 1:
-        return translate('anHour')
+        return translate(
+          date,
+          now,
+          'anHour',
+          Math.floor(delta / minute),
+          delta
+        )
 
       case !(delta < day):
-        return translate('hours', Math.floor(delta / hour))
+        return translate(date, now, 'hours', Math.floor(delta / hour), delta)
 
       case !(delta < day * 2):
-        return translate('aDay')
+        return translate(date, now, 'aDay', 1, delta)
 
       case !(delta < week):
-        return translate('days', Math.floor(delta / day))
+        return translate(date, now, 'days', Math.floor(delta / day), delta)
 
       case Math.floor(delta / week) !== 1:
-        return translate('aWeek')
+        return translate(date, now, 'aWeek', 1, delta)
 
       case !(delta < month):
-        return translate('weeks', Math.floor(delta / week))
+        return translate(date, now, 'weeks', Math.floor(delta / week), delta)
 
       case Math.floor(delta / month) !== 1:
-        return translate('aMonth')
+        return translate(date, now, 'aMonth', 1, delta)
 
       case !(delta < year):
-        return translate('months', Math.floor(delta / month))
+        return translate(date, now, 'months', Math.floor(delta / month), delta)
 
       case Math.floor(delta / year) !== 1:
-        return translate('aYear')
+        return translate(date, now, 'aYear', 1, delta)
 
       default:
-        return translate('overAYear')
+        return translate(
+          date,
+          now,
+          'overAYear',
+          Math.floor(delta / year),
+          delta
+        )
     }
   }
 }
diff --git a/node_modules/tiny-relative-date/translations/fa.js b/node_modules/tiny-relative-date/translations/fa.js
new file mode 100644
index 0000000000000..2a92ba19bab95
--- /dev/null
+++ b/node_modules/tiny-relative-date/translations/fa.js
@@ -0,0 +1,31 @@
+module.exports = {
+  justNow: "اکنون",
+  secondsAgo: "{{time}} ثانیه قبل",
+  aMinuteAgo: "یک دقیقه قبل",
+  minutesAgo: "{{time}} دقیقه قبل",
+  anHourAgo: "یک ساعت قبل",
+  hoursAgo: "{{time}} ساعت قبل",
+  aDayAgo: "دیروز",
+  daysAgo: "{{time}} روز قبل",
+  aWeekAgo: "یک هفته قبل",
+  weeksAgo: "{{time}} هفته قبل",
+  aMonthAgo: "یک ماه قبل",
+  monthsAgo: "{{time}} ماه قبل",
+  aYearAgo: "یک سال قبل",
+  yearsAgo: "{{time}} سال قبل",
+  overAYearAgo: "بیش از یک سال قبل",
+  secondsFromNow: "{{time}} ثانیه بعد",
+  aMinuteFromNow: "یک دقیقه بعد",
+  minutesFromNow: "{{time}} دقیقه بعد",
+  anHourFromNow: "an hour from now",
+  hoursFromNow: "{{time}} ساعت بعد",
+  aDayFromNow: "فردا",
+  daysFromNow: "{{time}} روز بعد",
+  aWeekFromNow: "یک هفته بعد",
+  weeksFromNow: "{{time}} هفته بعد",
+  aMonthFromNow: "یک ماه بعد",
+  monthsFromNow: "{{time}} ماه بعد",
+  aYearFromNow: "یک سال بعد",
+  yearsFromNow: "{{time}} سال بعد",
+  overAYearFromNow: "بیش از یک سال بعد"
+}
diff --git a/node_modules/tiny-relative-date/translations/ne.js b/node_modules/tiny-relative-date/translations/ne.js
new file mode 100644
index 0000000000000..331128ced0e9a
--- /dev/null
+++ b/node_modules/tiny-relative-date/translations/ne.js
@@ -0,0 +1,31 @@
+module.exports = {
+  justNow: 'भर्खर',
+  secondsAgo: '{{time}} सेकेण्ड अघि',
+  aMinuteAgo: '१ मिनेट अघि',
+  minutesAgo: '{{time}} मिनेट अघि',
+  anHourAgo: '१ घण्टा अघि',
+  hoursAgo: '{{time}} घण्टा अघि',
+  aDayAgo: 'हिजो',
+  daysAgo: '{{time}} दिन अघि',
+  aWeekAgo: '१ हप्ता अघि',
+  weeksAgo: '{{time}} हप्ता अघि',
+  aMonthAgo: '१ महिना अघि',
+  monthsAgo: '{{time}} महिना अघि',
+  aYearAgo: '१ वर्ष अघि',
+  yearsAgo: '{{time}} वर्ष अघि',
+  overAYearAgo: '१ वर्षभन्दा धेरै',
+  secondsFromNow: 'अहिलेदेखि {{time}} सेकेण्ड',
+  aMinuteFromNow: 'अहिलेदेखि १ मिनेट',
+  minutesFromNow: 'अहिलेदेखि {{time}} मिनेट',
+  anHourFromNow: 'अहिलेदेखि १ घण्टा',
+  hoursFromNow: 'अहिलेदेखि {{time}} घण्टा',
+  aDayFromNow: 'भोलि',
+  daysFromNow: 'अहिलेदेखि {{time}} दिन',
+  aWeekFromNow: 'अहिलेदेखि १ हप्ता',
+  weeksFromNow: 'अहिलेदेखि {{time}} हप्ता',
+  aMonthFromNow: 'अहिलेदेखि १ महिना',
+  monthsFromNow: 'अहिलेदेखि {{time}} महिना',
+  aYearFromNow: 'अहिलेदेखि १ वर्ष',
+  yearsFromNow: 'अहिलेदेखि {{time}} वर्ष',
+  overAYearFromNow: 'अहिलेदेखि १ वर्ष भन्दा धेरै'
+}
diff --git a/package-lock.json b/package-lock.json
index 7fdf426bdecdc..47b8496967384 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -146,7 +146,7 @@
         "supports-color": "^10.2.2",
         "tar": "^6.2.1",
         "text-table": "~0.2.0",
-        "tiny-relative-date": "^1.3.0",
+        "tiny-relative-date": "^2.0.2",
         "treeverse": "^3.0.0",
         "validate-npm-package-name": "^6.0.2",
         "which": "^5.0.0"
@@ -13756,7 +13756,9 @@
       "license": "MIT"
     },
     "node_modules/tiny-relative-date": {
-      "version": "1.3.0",
+      "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/tiny-relative-date/-/tiny-relative-date-2.0.2.tgz",
+      "integrity": "sha512-rGxAbeL9z3J4pI2GtBEoFaavHdO4RKAU54hEuOef5kfx5aPqiQtbhYktMOTL5OA33db8BjsDcLXuNp+/v19PHw==",
       "inBundle": true,
       "license": "MIT"
     },
diff --git a/package.json b/package.json
index 73470d1da8ea3..ea15ca636ec43 100644
--- a/package.json
+++ b/package.json
@@ -113,7 +113,7 @@
     "supports-color": "^10.2.2",
     "tar": "^6.2.1",
     "text-table": "~0.2.0",
-    "tiny-relative-date": "^1.3.0",
+    "tiny-relative-date": "^2.0.2",
     "treeverse": "^3.0.0",
     "validate-npm-package-name": "^6.0.2",
     "which": "^5.0.0"

From 05301a49fb3feed88736722c8b511dde3a1117e6 Mon Sep 17 00:00:00 2001
From: Gar 
Date: Thu, 18 Sep 2025 13:40:49 -0700
Subject: [PATCH 50/63] chore: remark@15.0.1

---
 package-lock.json | 945 +++++++++++++++++++++++++++++++++++++++++++++-
 package.json      |   2 +-
 2 files changed, 926 insertions(+), 21 deletions(-)

diff --git a/package-lock.json b/package-lock.json
index 47b8496967384..9bc466812a8f4 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -170,7 +170,7 @@
         "diff": "^8.0.2",
         "nock": "^13.4.0",
         "npm-packlist": "^10.0.0",
-        "remark": "^14.0.2",
+        "remark": "^15.0.1",
         "remark-gfm": "^3.0.1",
         "remark-github": "^11.2.4",
         "rimraf": "^6.0.1",
@@ -2293,8 +2293,6 @@
     },
     "node_modules/ajv-formats": {
       "version": "3.0.1",
-      "resolved": "https://registry.npmjs.org/ajv-formats/-/ajv-formats-3.0.1.tgz",
-      "integrity": "sha512-8iUql50EUR+uUcdRQ3HDqa6EVyo3docL8g5WJ3FNcWmu62IbkGUue/pEyLBW8VGKKucTPgqeks4fIU1DA4yowQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -3066,8 +3064,6 @@
     },
     "node_modules/cidr-regex": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/cidr-regex/-/cidr-regex-5.0.0.tgz",
-      "integrity": "sha512-9FT511D25oLAQYkfKLqWUMzoitgITToOqNThDAM8ujXaeXDulDPffJQflag918J8DN8mUPXRpS9J3U5GlIHGSQ==",
       "inBundle": true,
       "license": "BSD-2-Clause",
       "dependencies": {
@@ -4072,6 +4068,20 @@
         "node": ">=8"
       }
     },
+    "node_modules/devlop": {
+      "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/devlop/-/devlop-1.1.0.tgz",
+      "integrity": "sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "dequal": "^2.0.0"
+      },
+      "funding": {
+        "type": "github",
+        "url": "https://github.com/sponsors/wooorm"
+      }
+    },
     "node_modules/diff": {
       "version": "8.0.2",
       "license": "BSD-3-Clause",
@@ -6064,8 +6074,6 @@
     },
     "node_modules/ip-regex": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/ip-regex/-/ip-regex-5.0.0.tgz",
-      "integrity": "sha512-fOCG6lhoKKakwv+C6KdsOnGvgXnmgfmp0myi3bcNwj3qfwPAxRKWEuFhvEFF7ceYIz6+1jRZ+yguLFAmUNPEfw==",
       "inBundle": true,
       "license": "MIT",
       "engines": {
@@ -6205,8 +6213,6 @@
     },
     "node_modules/is-cidr": {
       "version": "6.0.0",
-      "resolved": "https://registry.npmjs.org/is-cidr/-/is-cidr-6.0.0.tgz",
-      "integrity": "sha512-LM62mX6QmYvLL7c0AZ2rnqGUAHcgkNwre56e8rrAdRLjUmwqrOrqGj6E/iVSrL7xxZfGQUR0gBVx9pW5CLIbig==",
       "inBundle": true,
       "license": "BSD-2-Clause",
       "dependencies": {
@@ -10316,14 +10322,16 @@
       }
     },
     "node_modules/remark": {
-      "version": "14.0.3",
+      "version": "15.0.1",
+      "resolved": "https://registry.npmjs.org/remark/-/remark-15.0.1.tgz",
+      "integrity": "sha512-Eht5w30ruCXgFmxVUSlNWQ9iiimq07URKeFS3hNc8cUWy1llX4KDWfyEDZRycMc+znsN9Ux5/tJ/BFdgdOwA3A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/mdast": "^3.0.0",
-        "remark-parse": "^10.0.0",
-        "remark-stringify": "^10.0.0",
-        "unified": "^10.0.0"
+        "@types/mdast": "^4.0.0",
+        "remark-parse": "^11.0.0",
+        "remark-stringify": "^11.0.0",
+        "unified": "^11.0.0"
       },
       "funding": {
         "type": "opencollective",
@@ -10466,13 +10474,912 @@
       }
     },
     "node_modules/remark-stringify": {
-      "version": "10.0.3",
+      "version": "11.0.0",
+      "resolved": "https://registry.npmjs.org/remark-stringify/-/remark-stringify-11.0.0.tgz",
+      "integrity": "sha512-1OSmLd3awB/t8qdoEOMazZkNsfVTeY4fTsgzcQFdXNq8ToTN4ZGwrMnlda4K6smTFKD+GRV6O48i6Z4iKgPPpw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/mdast": "^3.0.0",
-        "mdast-util-to-markdown": "^1.0.0",
-        "unified": "^10.0.0"
+        "@types/mdast": "^4.0.0",
+        "mdast-util-to-markdown": "^2.0.0",
+        "unified": "^11.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark-stringify/node_modules/@types/mdast": {
+      "version": "4.0.4",
+      "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz",
+      "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "*"
+      }
+    },
+    "node_modules/remark-stringify/node_modules/@types/unist": {
+      "version": "3.0.3",
+      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
+      "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
+      "dev": true,
+      "license": "MIT"
+    },
+    "node_modules/remark-stringify/node_modules/mdast-util-phrasing": {
+      "version": "4.1.0",
+      "resolved": "https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-4.1.0.tgz",
+      "integrity": "sha512-TqICwyvJJpBwvGAMZjj4J2n0X8QWp21b9l0o7eXyVJ25YNWYbJDVIyD1bZXE6WtV6RmKJVYmQAKWa0zWOABz2w==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/mdast": "^4.0.0",
+        "unist-util-is": "^6.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark-stringify/node_modules/mdast-util-to-markdown": {
+      "version": "2.1.2",
+      "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-2.1.2.tgz",
+      "integrity": "sha512-xj68wMTvGXVOKonmog6LwyJKrYXZPvlwabaryTjLh9LuvovB/KAH+kvi8Gjj+7rJjsFi23nkUxRQv1KqSroMqA==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/mdast": "^4.0.0",
+        "@types/unist": "^3.0.0",
+        "longest-streak": "^3.0.0",
+        "mdast-util-phrasing": "^4.0.0",
+        "mdast-util-to-string": "^4.0.0",
+        "micromark-util-classify-character": "^2.0.0",
+        "micromark-util-decode-string": "^2.0.0",
+        "unist-util-visit": "^5.0.0",
+        "zwitch": "^2.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark-stringify/node_modules/mdast-util-to-string": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-4.0.0.tgz",
+      "integrity": "sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/mdast": "^4.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark-stringify/node_modules/micromark-util-character": {
+      "version": "2.1.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
+      "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      }
+    },
+    "node_modules/remark-stringify/node_modules/micromark-util-classify-character": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-classify-character/-/micromark-util-classify-character-2.0.1.tgz",
+      "integrity": "sha512-K0kHzM6afW/MbeWYWLjoHQv1sgg2Q9EccHEDzSkxiP/EaagNzCm7T/WMKZ3rjMbvIpvBiZgwR3dKMygtA4mG1Q==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      }
+    },
+    "node_modules/remark-stringify/node_modules/micromark-util-decode-numeric-character-reference": {
+      "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-2.0.2.tgz",
+      "integrity": "sha512-ccUbYk6CwVdkmCQMyr64dXz42EfHGkPQlBj5p7YVGzq8I7CtjXZJrubAYezf7Rp+bjPseiROqe7G6foFd+lEuw==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-symbol": "^2.0.0"
+      }
+    },
+    "node_modules/remark-stringify/node_modules/micromark-util-decode-string": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-decode-string/-/micromark-util-decode-string-2.0.1.tgz",
+      "integrity": "sha512-nDV/77Fj6eH1ynwscYTOsbK7rR//Uj0bZXBwJZRfaLEJ1iGBR6kIfNmlNqaqJf649EP0F3NWNdeJi03elllNUQ==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "decode-named-character-reference": "^1.0.0",
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-decode-numeric-character-reference": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0"
+      }
+    },
+    "node_modules/remark-stringify/node_modules/micromark-util-symbol": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
+      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/remark-stringify/node_modules/micromark-util-types": {
+      "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
+      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/remark-stringify/node_modules/unified": {
+      "version": "11.0.5",
+      "resolved": "https://registry.npmjs.org/unified/-/unified-11.0.5.tgz",
+      "integrity": "sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0",
+        "bail": "^2.0.0",
+        "devlop": "^1.0.0",
+        "extend": "^3.0.0",
+        "is-plain-obj": "^4.0.0",
+        "trough": "^2.0.0",
+        "vfile": "^6.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark-stringify/node_modules/unist-util-is": {
+      "version": "6.0.0",
+      "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.0.tgz",
+      "integrity": "sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark-stringify/node_modules/unist-util-stringify-position": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz",
+      "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark-stringify/node_modules/unist-util-visit": {
+      "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-5.0.0.tgz",
+      "integrity": "sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0",
+        "unist-util-is": "^6.0.0",
+        "unist-util-visit-parents": "^6.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark-stringify/node_modules/unist-util-visit-parents": {
+      "version": "6.0.1",
+      "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.1.tgz",
+      "integrity": "sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0",
+        "unist-util-is": "^6.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark-stringify/node_modules/vfile": {
+      "version": "6.0.3",
+      "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.3.tgz",
+      "integrity": "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0",
+        "vfile-message": "^4.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark-stringify/node_modules/vfile-message": {
+      "version": "4.0.3",
+      "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.3.tgz",
+      "integrity": "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0",
+        "unist-util-stringify-position": "^4.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark/node_modules/@types/mdast": {
+      "version": "4.0.4",
+      "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz",
+      "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "*"
+      }
+    },
+    "node_modules/remark/node_modules/@types/unist": {
+      "version": "3.0.3",
+      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
+      "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
+      "dev": true,
+      "license": "MIT"
+    },
+    "node_modules/remark/node_modules/mdast-util-from-markdown": {
+      "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-2.0.2.tgz",
+      "integrity": "sha512-uZhTV/8NBuw0WHkPTrCqDOl0zVe1BIng5ZtHoDk49ME1qqcjYmmLmOf0gELgcRMxN4w2iuIeVso5/6QymSrgmA==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/mdast": "^4.0.0",
+        "@types/unist": "^3.0.0",
+        "decode-named-character-reference": "^1.0.0",
+        "devlop": "^1.0.0",
+        "mdast-util-to-string": "^4.0.0",
+        "micromark": "^4.0.0",
+        "micromark-util-decode-numeric-character-reference": "^2.0.0",
+        "micromark-util-decode-string": "^2.0.0",
+        "micromark-util-normalize-identifier": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0",
+        "unist-util-stringify-position": "^4.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark/node_modules/mdast-util-to-string": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-4.0.0.tgz",
+      "integrity": "sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/mdast": "^4.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark/node_modules/micromark": {
+      "version": "4.0.2",
+      "resolved": "https://registry.npmjs.org/micromark/-/micromark-4.0.2.tgz",
+      "integrity": "sha512-zpe98Q6kvavpCr1NPVSCMebCKfD7CA2NqZ+rykeNhONIJBpc1tFKt9hucLGwha3jNTNI8lHpctWJWoimVF4PfA==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "@types/debug": "^4.0.0",
+        "debug": "^4.0.0",
+        "decode-named-character-reference": "^1.0.0",
+        "devlop": "^1.0.0",
+        "micromark-core-commonmark": "^2.0.0",
+        "micromark-factory-space": "^2.0.0",
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-chunked": "^2.0.0",
+        "micromark-util-combine-extensions": "^2.0.0",
+        "micromark-util-decode-numeric-character-reference": "^2.0.0",
+        "micromark-util-encode": "^2.0.0",
+        "micromark-util-normalize-identifier": "^2.0.0",
+        "micromark-util-resolve-all": "^2.0.0",
+        "micromark-util-sanitize-uri": "^2.0.0",
+        "micromark-util-subtokenize": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      }
+    },
+    "node_modules/remark/node_modules/micromark-core-commonmark": {
+      "version": "2.0.3",
+      "resolved": "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-2.0.3.tgz",
+      "integrity": "sha512-RDBrHEMSxVFLg6xvnXmb1Ayr2WzLAWjeSATAoxwKYJV94TeNavgoIdA0a9ytzDSVzBy2YKFK+emCPOEibLeCrg==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "decode-named-character-reference": "^1.0.0",
+        "devlop": "^1.0.0",
+        "micromark-factory-destination": "^2.0.0",
+        "micromark-factory-label": "^2.0.0",
+        "micromark-factory-space": "^2.0.0",
+        "micromark-factory-title": "^2.0.0",
+        "micromark-factory-whitespace": "^2.0.0",
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-chunked": "^2.0.0",
+        "micromark-util-classify-character": "^2.0.0",
+        "micromark-util-html-tag-name": "^2.0.0",
+        "micromark-util-normalize-identifier": "^2.0.0",
+        "micromark-util-resolve-all": "^2.0.0",
+        "micromark-util-subtokenize": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      }
+    },
+    "node_modules/remark/node_modules/micromark-factory-destination": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-2.0.1.tgz",
+      "integrity": "sha512-Xe6rDdJlkmbFRExpTOmRj9N3MaWmbAgdpSrBQvCFqhezUn4AHqJHbaEnfbVYYiexVSs//tqOdY/DxhjdCiJnIA==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      }
+    },
+    "node_modules/remark/node_modules/micromark-factory-label": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-factory-label/-/micromark-factory-label-2.0.1.tgz",
+      "integrity": "sha512-VFMekyQExqIW7xIChcXn4ok29YE3rnuyveW3wZQWWqF4Nv9Wk5rgJ99KzPvHjkmPXF93FXIbBp6YdW3t71/7Vg==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "devlop": "^1.0.0",
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      }
+    },
+    "node_modules/remark/node_modules/micromark-factory-space": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-2.0.1.tgz",
+      "integrity": "sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      }
+    },
+    "node_modules/remark/node_modules/micromark-factory-title": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-factory-title/-/micromark-factory-title-2.0.1.tgz",
+      "integrity": "sha512-5bZ+3CjhAd9eChYTHsjy6TGxpOFSKgKKJPJxr293jTbfry2KDoWkhBb6TcPVB4NmzaPhMs1Frm9AZH7OD4Cjzw==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "micromark-factory-space": "^2.0.0",
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      }
+    },
+    "node_modules/remark/node_modules/micromark-factory-whitespace": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-factory-whitespace/-/micromark-factory-whitespace-2.0.1.tgz",
+      "integrity": "sha512-Ob0nuZ3PKt/n0hORHyvoD9uZhr+Za8sFoP+OnMcnWK5lngSzALgQYKMr9RJVOWLqQYuyn6ulqGWSXdwf6F80lQ==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "micromark-factory-space": "^2.0.0",
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      }
+    },
+    "node_modules/remark/node_modules/micromark-util-character": {
+      "version": "2.1.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
+      "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      }
+    },
+    "node_modules/remark/node_modules/micromark-util-chunked": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-chunked/-/micromark-util-chunked-2.0.1.tgz",
+      "integrity": "sha512-QUNFEOPELfmvv+4xiNg2sRYeS/P84pTW0TCgP5zc9FpXetHY0ab7SxKyAQCNCc1eK0459uoLI1y5oO5Vc1dbhA==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-symbol": "^2.0.0"
+      }
+    },
+    "node_modules/remark/node_modules/micromark-util-classify-character": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-classify-character/-/micromark-util-classify-character-2.0.1.tgz",
+      "integrity": "sha512-K0kHzM6afW/MbeWYWLjoHQv1sgg2Q9EccHEDzSkxiP/EaagNzCm7T/WMKZ3rjMbvIpvBiZgwR3dKMygtA4mG1Q==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      }
+    },
+    "node_modules/remark/node_modules/micromark-util-combine-extensions": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-combine-extensions/-/micromark-util-combine-extensions-2.0.1.tgz",
+      "integrity": "sha512-OnAnH8Ujmy59JcyZw8JSbK9cGpdVY44NKgSM7E9Eh7DiLS2E9RNQf0dONaGDzEG9yjEl5hcqeIsj4hfRkLH/Bg==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-chunked": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      }
+    },
+    "node_modules/remark/node_modules/micromark-util-decode-numeric-character-reference": {
+      "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-2.0.2.tgz",
+      "integrity": "sha512-ccUbYk6CwVdkmCQMyr64dXz42EfHGkPQlBj5p7YVGzq8I7CtjXZJrubAYezf7Rp+bjPseiROqe7G6foFd+lEuw==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-symbol": "^2.0.0"
+      }
+    },
+    "node_modules/remark/node_modules/micromark-util-decode-string": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-decode-string/-/micromark-util-decode-string-2.0.1.tgz",
+      "integrity": "sha512-nDV/77Fj6eH1ynwscYTOsbK7rR//Uj0bZXBwJZRfaLEJ1iGBR6kIfNmlNqaqJf649EP0F3NWNdeJi03elllNUQ==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "decode-named-character-reference": "^1.0.0",
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-decode-numeric-character-reference": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0"
+      }
+    },
+    "node_modules/remark/node_modules/micromark-util-encode": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-2.0.1.tgz",
+      "integrity": "sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/remark/node_modules/micromark-util-html-tag-name": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-html-tag-name/-/micromark-util-html-tag-name-2.0.1.tgz",
+      "integrity": "sha512-2cNEiYDhCWKI+Gs9T0Tiysk136SnR13hhO8yW6BGNyhOC4qYFnwF1nKfD3HFAIXA5c45RrIG1ub11GiXeYd1xA==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/remark/node_modules/micromark-util-normalize-identifier": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-2.0.1.tgz",
+      "integrity": "sha512-sxPqmo70LyARJs0w2UclACPUUEqltCkJ6PhKdMIDuJ3gSf/Q+/GIe3WKl0Ijb/GyH9lOpUkRAO2wp0GVkLvS9Q==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-symbol": "^2.0.0"
+      }
+    },
+    "node_modules/remark/node_modules/micromark-util-resolve-all": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-resolve-all/-/micromark-util-resolve-all-2.0.1.tgz",
+      "integrity": "sha512-VdQyxFWFT2/FGJgwQnJYbe1jjQoNTS4RjglmSjTUlpUMa95Htx9NHeYW4rGDJzbjvCsl9eLjMQwGeElsqmzcHg==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-types": "^2.0.0"
+      }
+    },
+    "node_modules/remark/node_modules/micromark-util-sanitize-uri": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.1.tgz",
+      "integrity": "sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-encode": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0"
+      }
+    },
+    "node_modules/remark/node_modules/micromark-util-subtokenize": {
+      "version": "2.1.0",
+      "resolved": "https://registry.npmjs.org/micromark-util-subtokenize/-/micromark-util-subtokenize-2.1.0.tgz",
+      "integrity": "sha512-XQLu552iSctvnEcgXw6+Sx75GflAPNED1qx7eBJ+wydBb2KCbRZe+NwvIEEMM83uml1+2WSXpBAcp9IUCgCYWA==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "devlop": "^1.0.0",
+        "micromark-util-chunked": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      }
+    },
+    "node_modules/remark/node_modules/micromark-util-symbol": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
+      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/remark/node_modules/micromark-util-types": {
+      "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
+      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/remark/node_modules/remark-parse": {
+      "version": "11.0.0",
+      "resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-11.0.0.tgz",
+      "integrity": "sha512-FCxlKLNGknS5ba/1lmpYijMUzX2esxW5xQqjWxw2eHFfS2MSdaHVINFmhjo+qN1WhZhNimq0dZATN9pH0IDrpA==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/mdast": "^4.0.0",
+        "mdast-util-from-markdown": "^2.0.0",
+        "micromark-util-types": "^2.0.0",
+        "unified": "^11.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark/node_modules/unified": {
+      "version": "11.0.5",
+      "resolved": "https://registry.npmjs.org/unified/-/unified-11.0.5.tgz",
+      "integrity": "sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0",
+        "bail": "^2.0.0",
+        "devlop": "^1.0.0",
+        "extend": "^3.0.0",
+        "is-plain-obj": "^4.0.0",
+        "trough": "^2.0.0",
+        "vfile": "^6.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark/node_modules/unist-util-stringify-position": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz",
+      "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark/node_modules/vfile": {
+      "version": "6.0.3",
+      "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.3.tgz",
+      "integrity": "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0",
+        "vfile-message": "^4.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark/node_modules/vfile-message": {
+      "version": "4.0.3",
+      "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.3.tgz",
+      "integrity": "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0",
+        "unist-util-stringify-position": "^4.0.0"
       },
       "funding": {
         "type": "opencollective",
@@ -13757,8 +14664,6 @@
     },
     "node_modules/tiny-relative-date": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/tiny-relative-date/-/tiny-relative-date-2.0.2.tgz",
-      "integrity": "sha512-rGxAbeL9z3J4pI2GtBEoFaavHdO4RKAU54hEuOef5kfx5aPqiQtbhYktMOTL5OA33db8BjsDcLXuNp+/v19PHw==",
       "inBundle": true,
       "license": "MIT"
     },
diff --git a/package.json b/package.json
index ea15ca636ec43..f2b325a11333b 100644
--- a/package.json
+++ b/package.json
@@ -201,7 +201,7 @@
     "diff": "^8.0.2",
     "nock": "^13.4.0",
     "npm-packlist": "^10.0.0",
-    "remark": "^14.0.2",
+    "remark": "^15.0.1",
     "remark-gfm": "^3.0.1",
     "remark-github": "^11.2.4",
     "rimraf": "^6.0.1",

From 93d190bcb02342ce4d159168f12b86f071d6fca7 Mon Sep 17 00:00:00 2001
From: Gar 
Date: Thu, 18 Sep 2025 13:43:13 -0700
Subject: [PATCH 51/63] chore: remark-parse@11.0.0

---
 docs/package.json |   2 +-
 package-lock.json | 871 +++++++++++++++++++++++-----------------------
 2 files changed, 443 insertions(+), 430 deletions(-)

diff --git a/docs/package.json b/docs/package.json
index 1946a8b6e9664..25b42440e3d38 100644
--- a/docs/package.json
+++ b/docs/package.json
@@ -30,7 +30,7 @@
     "rehype-stringify": "^9.0.3",
     "remark-gfm": "^3.0.1",
     "remark-man": "^8.0.1",
-    "remark-parse": "^10.0.1",
+    "remark-parse": "^11.0.0",
     "remark-rehype": "^10.1.0",
     "semver": "^7.3.8",
     "tap": "^16.3.8",
diff --git a/package-lock.json b/package-lock.json
index 9bc466812a8f4..77429d1dc885a 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -195,7 +195,7 @@
         "rehype-stringify": "^9.0.3",
         "remark-gfm": "^3.0.1",
         "remark-man": "^8.0.1",
-        "remark-parse": "^10.0.1",
+        "remark-parse": "^11.0.0",
         "remark-rehype": "^10.1.0",
         "semver": "^7.3.8",
         "tap": "^16.3.8",
@@ -4070,8 +4070,6 @@
     },
     "node_modules/devlop": {
       "version": "1.1.0",
-      "resolved": "https://registry.npmjs.org/devlop/-/devlop-1.1.0.tgz",
-      "integrity": "sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10323,8 +10321,6 @@
     },
     "node_modules/remark": {
       "version": "15.0.1",
-      "resolved": "https://registry.npmjs.org/remark/-/remark-15.0.1.tgz",
-      "integrity": "sha512-Eht5w30ruCXgFmxVUSlNWQ9iiimq07URKeFS3hNc8cUWy1llX4KDWfyEDZRycMc+znsN9Ux5/tJ/BFdgdOwA3A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10445,43 +10441,15 @@
       }
     },
     "node_modules/remark-parse": {
-      "version": "10.0.2",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/mdast": "^3.0.0",
-        "mdast-util-from-markdown": "^1.0.0",
-        "unified": "^10.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark-rehype": {
-      "version": "10.1.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/hast": "^2.0.0",
-        "@types/mdast": "^3.0.0",
-        "mdast-util-to-hast": "^12.1.0",
-        "unified": "^10.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark-stringify": {
       "version": "11.0.0",
-      "resolved": "https://registry.npmjs.org/remark-stringify/-/remark-stringify-11.0.0.tgz",
-      "integrity": "sha512-1OSmLd3awB/t8qdoEOMazZkNsfVTeY4fTsgzcQFdXNq8ToTN4ZGwrMnlda4K6smTFKD+GRV6O48i6Z4iKgPPpw==",
+      "resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-11.0.0.tgz",
+      "integrity": "sha512-FCxlKLNGknS5ba/1lmpYijMUzX2esxW5xQqjWxw2eHFfS2MSdaHVINFmhjo+qN1WhZhNimq0dZATN9pH0IDrpA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
         "@types/mdast": "^4.0.0",
-        "mdast-util-to-markdown": "^2.0.0",
+        "mdast-util-from-markdown": "^2.0.0",
+        "micromark-util-types": "^2.0.0",
         "unified": "^11.0.0"
       },
       "funding": {
@@ -10489,7 +10457,7 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/remark-stringify/node_modules/@types/mdast": {
+    "node_modules/remark-parse/node_modules/@types/mdast": {
       "version": "4.0.4",
       "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz",
       "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==",
@@ -10499,51 +10467,39 @@
         "@types/unist": "*"
       }
     },
-    "node_modules/remark-stringify/node_modules/@types/unist": {
+    "node_modules/remark-parse/node_modules/@types/unist": {
       "version": "3.0.3",
       "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
       "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
       "dev": true,
       "license": "MIT"
     },
-    "node_modules/remark-stringify/node_modules/mdast-util-phrasing": {
-      "version": "4.1.0",
-      "resolved": "https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-4.1.0.tgz",
-      "integrity": "sha512-TqICwyvJJpBwvGAMZjj4J2n0X8QWp21b9l0o7eXyVJ25YNWYbJDVIyD1bZXE6WtV6RmKJVYmQAKWa0zWOABz2w==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/mdast": "^4.0.0",
-        "unist-util-is": "^6.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark-stringify/node_modules/mdast-util-to-markdown": {
-      "version": "2.1.2",
-      "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-2.1.2.tgz",
-      "integrity": "sha512-xj68wMTvGXVOKonmog6LwyJKrYXZPvlwabaryTjLh9LuvovB/KAH+kvi8Gjj+7rJjsFi23nkUxRQv1KqSroMqA==",
+    "node_modules/remark-parse/node_modules/mdast-util-from-markdown": {
+      "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-2.0.2.tgz",
+      "integrity": "sha512-uZhTV/8NBuw0WHkPTrCqDOl0zVe1BIng5ZtHoDk49ME1qqcjYmmLmOf0gELgcRMxN4w2iuIeVso5/6QymSrgmA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
         "@types/mdast": "^4.0.0",
         "@types/unist": "^3.0.0",
-        "longest-streak": "^3.0.0",
-        "mdast-util-phrasing": "^4.0.0",
+        "decode-named-character-reference": "^1.0.0",
+        "devlop": "^1.0.0",
         "mdast-util-to-string": "^4.0.0",
-        "micromark-util-classify-character": "^2.0.0",
+        "micromark": "^4.0.0",
+        "micromark-util-decode-numeric-character-reference": "^2.0.0",
         "micromark-util-decode-string": "^2.0.0",
-        "unist-util-visit": "^5.0.0",
-        "zwitch": "^2.0.0"
+        "micromark-util-normalize-identifier": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0",
+        "unist-util-stringify-position": "^4.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/remark-stringify/node_modules/mdast-util-to-string": {
+    "node_modules/remark-parse/node_modules/mdast-util-to-string": {
       "version": "4.0.0",
       "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-4.0.0.tgz",
       "integrity": "sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg==",
@@ -10557,10 +10513,10 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/remark-stringify/node_modules/micromark-util-character": {
-      "version": "2.1.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
-      "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
+    "node_modules/remark-parse/node_modules/micromark": {
+      "version": "4.0.2",
+      "resolved": "https://registry.npmjs.org/micromark/-/micromark-4.0.2.tgz",
+      "integrity": "sha512-zpe98Q6kvavpCr1NPVSCMebCKfD7CA2NqZ+rykeNhONIJBpc1tFKt9hucLGwha3jNTNI8lHpctWJWoimVF4PfA==",
       "dev": true,
       "funding": [
         {
@@ -10574,14 +10530,29 @@
       ],
       "license": "MIT",
       "dependencies": {
+        "@types/debug": "^4.0.0",
+        "debug": "^4.0.0",
+        "decode-named-character-reference": "^1.0.0",
+        "devlop": "^1.0.0",
+        "micromark-core-commonmark": "^2.0.0",
+        "micromark-factory-space": "^2.0.0",
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-chunked": "^2.0.0",
+        "micromark-util-combine-extensions": "^2.0.0",
+        "micromark-util-decode-numeric-character-reference": "^2.0.0",
+        "micromark-util-encode": "^2.0.0",
+        "micromark-util-normalize-identifier": "^2.0.0",
+        "micromark-util-resolve-all": "^2.0.0",
+        "micromark-util-sanitize-uri": "^2.0.0",
+        "micromark-util-subtokenize": "^2.0.0",
         "micromark-util-symbol": "^2.0.0",
         "micromark-util-types": "^2.0.0"
       }
     },
-    "node_modules/remark-stringify/node_modules/micromark-util-classify-character": {
-      "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-classify-character/-/micromark-util-classify-character-2.0.1.tgz",
-      "integrity": "sha512-K0kHzM6afW/MbeWYWLjoHQv1sgg2Q9EccHEDzSkxiP/EaagNzCm7T/WMKZ3rjMbvIpvBiZgwR3dKMygtA4mG1Q==",
+    "node_modules/remark-parse/node_modules/micromark-core-commonmark": {
+      "version": "2.0.3",
+      "resolved": "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-2.0.3.tgz",
+      "integrity": "sha512-RDBrHEMSxVFLg6xvnXmb1Ayr2WzLAWjeSATAoxwKYJV94TeNavgoIdA0a9ytzDSVzBy2YKFK+emCPOEibLeCrg==",
       "dev": true,
       "funding": [
         {
@@ -10595,15 +10566,28 @@
       ],
       "license": "MIT",
       "dependencies": {
+        "decode-named-character-reference": "^1.0.0",
+        "devlop": "^1.0.0",
+        "micromark-factory-destination": "^2.0.0",
+        "micromark-factory-label": "^2.0.0",
+        "micromark-factory-space": "^2.0.0",
+        "micromark-factory-title": "^2.0.0",
+        "micromark-factory-whitespace": "^2.0.0",
         "micromark-util-character": "^2.0.0",
+        "micromark-util-chunked": "^2.0.0",
+        "micromark-util-classify-character": "^2.0.0",
+        "micromark-util-html-tag-name": "^2.0.0",
+        "micromark-util-normalize-identifier": "^2.0.0",
+        "micromark-util-resolve-all": "^2.0.0",
+        "micromark-util-subtokenize": "^2.0.0",
         "micromark-util-symbol": "^2.0.0",
         "micromark-util-types": "^2.0.0"
       }
     },
-    "node_modules/remark-stringify/node_modules/micromark-util-decode-numeric-character-reference": {
-      "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-2.0.2.tgz",
-      "integrity": "sha512-ccUbYk6CwVdkmCQMyr64dXz42EfHGkPQlBj5p7YVGzq8I7CtjXZJrubAYezf7Rp+bjPseiROqe7G6foFd+lEuw==",
+    "node_modules/remark-parse/node_modules/micromark-factory-destination": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-2.0.1.tgz",
+      "integrity": "sha512-Xe6rDdJlkmbFRExpTOmRj9N3MaWmbAgdpSrBQvCFqhezUn4AHqJHbaEnfbVYYiexVSs//tqOdY/DxhjdCiJnIA==",
       "dev": true,
       "funding": [
         {
@@ -10617,13 +10601,15 @@
       ],
       "license": "MIT",
       "dependencies": {
-        "micromark-util-symbol": "^2.0.0"
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
       }
     },
-    "node_modules/remark-stringify/node_modules/micromark-util-decode-string": {
+    "node_modules/remark-parse/node_modules/micromark-factory-label": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-decode-string/-/micromark-util-decode-string-2.0.1.tgz",
-      "integrity": "sha512-nDV/77Fj6eH1ynwscYTOsbK7rR//Uj0bZXBwJZRfaLEJ1iGBR6kIfNmlNqaqJf649EP0F3NWNdeJi03elllNUQ==",
+      "resolved": "https://registry.npmjs.org/micromark-factory-label/-/micromark-factory-label-2.0.1.tgz",
+      "integrity": "sha512-VFMekyQExqIW7xIChcXn4ok29YE3rnuyveW3wZQWWqF4Nv9Wk5rgJ99KzPvHjkmPXF93FXIbBp6YdW3t71/7Vg==",
       "dev": true,
       "funding": [
         {
@@ -10637,16 +10623,16 @@
       ],
       "license": "MIT",
       "dependencies": {
-        "decode-named-character-reference": "^1.0.0",
+        "devlop": "^1.0.0",
         "micromark-util-character": "^2.0.0",
-        "micromark-util-decode-numeric-character-reference": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0"
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
       }
     },
-    "node_modules/remark-stringify/node_modules/micromark-util-symbol": {
+    "node_modules/remark-parse/node_modules/micromark-factory-space": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
-      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
+      "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-2.0.1.tgz",
+      "integrity": "sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg==",
       "dev": true,
       "funding": [
         {
@@ -10658,12 +10644,16 @@
           "url": "https://opencollective.com/unified"
         }
       ],
-      "license": "MIT"
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      }
     },
-    "node_modules/remark-stringify/node_modules/micromark-util-types": {
-      "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
-      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
+    "node_modules/remark-parse/node_modules/micromark-factory-title": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-factory-title/-/micromark-factory-title-2.0.1.tgz",
+      "integrity": "sha512-5bZ+3CjhAd9eChYTHsjy6TGxpOFSKgKKJPJxr293jTbfry2KDoWkhBb6TcPVB4NmzaPhMs1Frm9AZH7OD4Cjzw==",
       "dev": true,
       "funding": [
         {
@@ -10675,177 +10665,82 @@
           "url": "https://opencollective.com/unified"
         }
       ],
-      "license": "MIT"
-    },
-    "node_modules/remark-stringify/node_modules/unified": {
-      "version": "11.0.5",
-      "resolved": "https://registry.npmjs.org/unified/-/unified-11.0.5.tgz",
-      "integrity": "sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA==",
-      "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/unist": "^3.0.0",
-        "bail": "^2.0.0",
-        "devlop": "^1.0.0",
-        "extend": "^3.0.0",
-        "is-plain-obj": "^4.0.0",
-        "trough": "^2.0.0",
-        "vfile": "^6.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
+        "micromark-factory-space": "^2.0.0",
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
       }
     },
-    "node_modules/remark-stringify/node_modules/unist-util-is": {
-      "version": "6.0.0",
-      "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.0.tgz",
-      "integrity": "sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==",
+    "node_modules/remark-parse/node_modules/micromark-factory-whitespace": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-factory-whitespace/-/micromark-factory-whitespace-2.0.1.tgz",
+      "integrity": "sha512-Ob0nuZ3PKt/n0hORHyvoD9uZhr+Za8sFoP+OnMcnWK5lngSzALgQYKMr9RJVOWLqQYuyn6ulqGWSXdwf6F80lQ==",
       "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
       "license": "MIT",
       "dependencies": {
-        "@types/unist": "^3.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
+        "micromark-factory-space": "^2.0.0",
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
       }
     },
-    "node_modules/remark-stringify/node_modules/unist-util-stringify-position": {
-      "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz",
-      "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==",
+    "node_modules/remark-parse/node_modules/micromark-util-character": {
+      "version": "2.1.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
+      "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
       "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
       "license": "MIT",
       "dependencies": {
-        "@types/unist": "^3.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
       }
     },
-    "node_modules/remark-stringify/node_modules/unist-util-visit": {
-      "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-5.0.0.tgz",
-      "integrity": "sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg==",
+    "node_modules/remark-parse/node_modules/micromark-util-chunked": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-chunked/-/micromark-util-chunked-2.0.1.tgz",
+      "integrity": "sha512-QUNFEOPELfmvv+4xiNg2sRYeS/P84pTW0TCgP5zc9FpXetHY0ab7SxKyAQCNCc1eK0459uoLI1y5oO5Vc1dbhA==",
       "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
       "license": "MIT",
       "dependencies": {
-        "@types/unist": "^3.0.0",
-        "unist-util-is": "^6.0.0",
-        "unist-util-visit-parents": "^6.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
+        "micromark-util-symbol": "^2.0.0"
       }
     },
-    "node_modules/remark-stringify/node_modules/unist-util-visit-parents": {
-      "version": "6.0.1",
-      "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.1.tgz",
-      "integrity": "sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "unist-util-is": "^6.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark-stringify/node_modules/vfile": {
-      "version": "6.0.3",
-      "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.3.tgz",
-      "integrity": "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "vfile-message": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark-stringify/node_modules/vfile-message": {
-      "version": "4.0.3",
-      "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.3.tgz",
-      "integrity": "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "unist-util-stringify-position": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark/node_modules/@types/mdast": {
-      "version": "4.0.4",
-      "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz",
-      "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "*"
-      }
-    },
-    "node_modules/remark/node_modules/@types/unist": {
-      "version": "3.0.3",
-      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
-      "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
-      "dev": true,
-      "license": "MIT"
-    },
-    "node_modules/remark/node_modules/mdast-util-from-markdown": {
-      "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-2.0.2.tgz",
-      "integrity": "sha512-uZhTV/8NBuw0WHkPTrCqDOl0zVe1BIng5ZtHoDk49ME1qqcjYmmLmOf0gELgcRMxN4w2iuIeVso5/6QymSrgmA==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/mdast": "^4.0.0",
-        "@types/unist": "^3.0.0",
-        "decode-named-character-reference": "^1.0.0",
-        "devlop": "^1.0.0",
-        "mdast-util-to-string": "^4.0.0",
-        "micromark": "^4.0.0",
-        "micromark-util-decode-numeric-character-reference": "^2.0.0",
-        "micromark-util-decode-string": "^2.0.0",
-        "micromark-util-normalize-identifier": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0",
-        "unist-util-stringify-position": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark/node_modules/mdast-util-to-string": {
-      "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-4.0.0.tgz",
-      "integrity": "sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/mdast": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark/node_modules/micromark": {
-      "version": "4.0.2",
-      "resolved": "https://registry.npmjs.org/micromark/-/micromark-4.0.2.tgz",
-      "integrity": "sha512-zpe98Q6kvavpCr1NPVSCMebCKfD7CA2NqZ+rykeNhONIJBpc1tFKt9hucLGwha3jNTNI8lHpctWJWoimVF4PfA==",
+    "node_modules/remark-parse/node_modules/micromark-util-classify-character": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-classify-character/-/micromark-util-classify-character-2.0.1.tgz",
+      "integrity": "sha512-K0kHzM6afW/MbeWYWLjoHQv1sgg2Q9EccHEDzSkxiP/EaagNzCm7T/WMKZ3rjMbvIpvBiZgwR3dKMygtA4mG1Q==",
       "dev": true,
       "funding": [
         {
@@ -10859,29 +10754,15 @@
       ],
       "license": "MIT",
       "dependencies": {
-        "@types/debug": "^4.0.0",
-        "debug": "^4.0.0",
-        "decode-named-character-reference": "^1.0.0",
-        "devlop": "^1.0.0",
-        "micromark-core-commonmark": "^2.0.0",
-        "micromark-factory-space": "^2.0.0",
         "micromark-util-character": "^2.0.0",
-        "micromark-util-chunked": "^2.0.0",
-        "micromark-util-combine-extensions": "^2.0.0",
-        "micromark-util-decode-numeric-character-reference": "^2.0.0",
-        "micromark-util-encode": "^2.0.0",
-        "micromark-util-normalize-identifier": "^2.0.0",
-        "micromark-util-resolve-all": "^2.0.0",
-        "micromark-util-sanitize-uri": "^2.0.0",
-        "micromark-util-subtokenize": "^2.0.0",
         "micromark-util-symbol": "^2.0.0",
         "micromark-util-types": "^2.0.0"
       }
     },
-    "node_modules/remark/node_modules/micromark-core-commonmark": {
-      "version": "2.0.3",
-      "resolved": "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-2.0.3.tgz",
-      "integrity": "sha512-RDBrHEMSxVFLg6xvnXmb1Ayr2WzLAWjeSATAoxwKYJV94TeNavgoIdA0a9ytzDSVzBy2YKFK+emCPOEibLeCrg==",
+    "node_modules/remark-parse/node_modules/micromark-util-combine-extensions": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-combine-extensions/-/micromark-util-combine-extensions-2.0.1.tgz",
+      "integrity": "sha512-OnAnH8Ujmy59JcyZw8JSbK9cGpdVY44NKgSM7E9Eh7DiLS2E9RNQf0dONaGDzEG9yjEl5hcqeIsj4hfRkLH/Bg==",
       "dev": true,
       "funding": [
         {
@@ -10895,28 +10776,14 @@
       ],
       "license": "MIT",
       "dependencies": {
-        "decode-named-character-reference": "^1.0.0",
-        "devlop": "^1.0.0",
-        "micromark-factory-destination": "^2.0.0",
-        "micromark-factory-label": "^2.0.0",
-        "micromark-factory-space": "^2.0.0",
-        "micromark-factory-title": "^2.0.0",
-        "micromark-factory-whitespace": "^2.0.0",
-        "micromark-util-character": "^2.0.0",
         "micromark-util-chunked": "^2.0.0",
-        "micromark-util-classify-character": "^2.0.0",
-        "micromark-util-html-tag-name": "^2.0.0",
-        "micromark-util-normalize-identifier": "^2.0.0",
-        "micromark-util-resolve-all": "^2.0.0",
-        "micromark-util-subtokenize": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0",
         "micromark-util-types": "^2.0.0"
       }
     },
-    "node_modules/remark/node_modules/micromark-factory-destination": {
-      "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-2.0.1.tgz",
-      "integrity": "sha512-Xe6rDdJlkmbFRExpTOmRj9N3MaWmbAgdpSrBQvCFqhezUn4AHqJHbaEnfbVYYiexVSs//tqOdY/DxhjdCiJnIA==",
+    "node_modules/remark-parse/node_modules/micromark-util-decode-numeric-character-reference": {
+      "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-2.0.2.tgz",
+      "integrity": "sha512-ccUbYk6CwVdkmCQMyr64dXz42EfHGkPQlBj5p7YVGzq8I7CtjXZJrubAYezf7Rp+bjPseiROqe7G6foFd+lEuw==",
       "dev": true,
       "funding": [
         {
@@ -10930,15 +10797,13 @@
       ],
       "license": "MIT",
       "dependencies": {
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
+        "micromark-util-symbol": "^2.0.0"
       }
     },
-    "node_modules/remark/node_modules/micromark-factory-label": {
+    "node_modules/remark-parse/node_modules/micromark-util-decode-string": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-factory-label/-/micromark-factory-label-2.0.1.tgz",
-      "integrity": "sha512-VFMekyQExqIW7xIChcXn4ok29YE3rnuyveW3wZQWWqF4Nv9Wk5rgJ99KzPvHjkmPXF93FXIbBp6YdW3t71/7Vg==",
+      "resolved": "https://registry.npmjs.org/micromark-util-decode-string/-/micromark-util-decode-string-2.0.1.tgz",
+      "integrity": "sha512-nDV/77Fj6eH1ynwscYTOsbK7rR//Uj0bZXBwJZRfaLEJ1iGBR6kIfNmlNqaqJf649EP0F3NWNdeJi03elllNUQ==",
       "dev": true,
       "funding": [
         {
@@ -10952,16 +10817,16 @@
       ],
       "license": "MIT",
       "dependencies": {
-        "devlop": "^1.0.0",
+        "decode-named-character-reference": "^1.0.0",
         "micromark-util-character": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
+        "micromark-util-decode-numeric-character-reference": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0"
       }
     },
-    "node_modules/remark/node_modules/micromark-factory-space": {
+    "node_modules/remark-parse/node_modules/micromark-util-encode": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-2.0.1.tgz",
-      "integrity": "sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg==",
+      "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-2.0.1.tgz",
+      "integrity": "sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw==",
       "dev": true,
       "funding": [
         {
@@ -10973,16 +10838,12 @@
           "url": "https://opencollective.com/unified"
         }
       ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      }
+      "license": "MIT"
     },
-    "node_modules/remark/node_modules/micromark-factory-title": {
+    "node_modules/remark-parse/node_modules/micromark-util-html-tag-name": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-factory-title/-/micromark-factory-title-2.0.1.tgz",
-      "integrity": "sha512-5bZ+3CjhAd9eChYTHsjy6TGxpOFSKgKKJPJxr293jTbfry2KDoWkhBb6TcPVB4NmzaPhMs1Frm9AZH7OD4Cjzw==",
+      "resolved": "https://registry.npmjs.org/micromark-util-html-tag-name/-/micromark-util-html-tag-name-2.0.1.tgz",
+      "integrity": "sha512-2cNEiYDhCWKI+Gs9T0Tiysk136SnR13hhO8yW6BGNyhOC4qYFnwF1nKfD3HFAIXA5c45RrIG1ub11GiXeYd1xA==",
       "dev": true,
       "funding": [
         {
@@ -10994,18 +10855,12 @@
           "url": "https://opencollective.com/unified"
         }
       ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-factory-space": "^2.0.0",
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      }
+      "license": "MIT"
     },
-    "node_modules/remark/node_modules/micromark-factory-whitespace": {
+    "node_modules/remark-parse/node_modules/micromark-util-normalize-identifier": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-factory-whitespace/-/micromark-factory-whitespace-2.0.1.tgz",
-      "integrity": "sha512-Ob0nuZ3PKt/n0hORHyvoD9uZhr+Za8sFoP+OnMcnWK5lngSzALgQYKMr9RJVOWLqQYuyn6ulqGWSXdwf6F80lQ==",
+      "resolved": "https://registry.npmjs.org/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-2.0.1.tgz",
+      "integrity": "sha512-sxPqmo70LyARJs0w2UclACPUUEqltCkJ6PhKdMIDuJ3gSf/Q+/GIe3WKl0Ijb/GyH9lOpUkRAO2wp0GVkLvS9Q==",
       "dev": true,
       "funding": [
         {
@@ -11019,16 +10874,13 @@
       ],
       "license": "MIT",
       "dependencies": {
-        "micromark-factory-space": "^2.0.0",
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
+        "micromark-util-symbol": "^2.0.0"
       }
     },
-    "node_modules/remark/node_modules/micromark-util-character": {
-      "version": "2.1.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
-      "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
+    "node_modules/remark-parse/node_modules/micromark-util-resolve-all": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-resolve-all/-/micromark-util-resolve-all-2.0.1.tgz",
+      "integrity": "sha512-VdQyxFWFT2/FGJgwQnJYbe1jjQoNTS4RjglmSjTUlpUMa95Htx9NHeYW4rGDJzbjvCsl9eLjMQwGeElsqmzcHg==",
       "dev": true,
       "funding": [
         {
@@ -11042,14 +10894,13 @@
       ],
       "license": "MIT",
       "dependencies": {
-        "micromark-util-symbol": "^2.0.0",
         "micromark-util-types": "^2.0.0"
       }
     },
-    "node_modules/remark/node_modules/micromark-util-chunked": {
+    "node_modules/remark-parse/node_modules/micromark-util-sanitize-uri": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-chunked/-/micromark-util-chunked-2.0.1.tgz",
-      "integrity": "sha512-QUNFEOPELfmvv+4xiNg2sRYeS/P84pTW0TCgP5zc9FpXetHY0ab7SxKyAQCNCc1eK0459uoLI1y5oO5Vc1dbhA==",
+      "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.1.tgz",
+      "integrity": "sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ==",
       "dev": true,
       "funding": [
         {
@@ -11063,13 +10914,15 @@
       ],
       "license": "MIT",
       "dependencies": {
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-encode": "^2.0.0",
         "micromark-util-symbol": "^2.0.0"
       }
     },
-    "node_modules/remark/node_modules/micromark-util-classify-character": {
-      "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-classify-character/-/micromark-util-classify-character-2.0.1.tgz",
-      "integrity": "sha512-K0kHzM6afW/MbeWYWLjoHQv1sgg2Q9EccHEDzSkxiP/EaagNzCm7T/WMKZ3rjMbvIpvBiZgwR3dKMygtA4mG1Q==",
+    "node_modules/remark-parse/node_modules/micromark-util-subtokenize": {
+      "version": "2.1.0",
+      "resolved": "https://registry.npmjs.org/micromark-util-subtokenize/-/micromark-util-subtokenize-2.1.0.tgz",
+      "integrity": "sha512-XQLu552iSctvnEcgXw6+Sx75GflAPNED1qx7eBJ+wydBb2KCbRZe+NwvIEEMM83uml1+2WSXpBAcp9IUCgCYWA==",
       "dev": true,
       "funding": [
         {
@@ -11083,15 +10936,16 @@
       ],
       "license": "MIT",
       "dependencies": {
-        "micromark-util-character": "^2.0.0",
+        "devlop": "^1.0.0",
+        "micromark-util-chunked": "^2.0.0",
         "micromark-util-symbol": "^2.0.0",
         "micromark-util-types": "^2.0.0"
       }
     },
-    "node_modules/remark/node_modules/micromark-util-combine-extensions": {
+    "node_modules/remark-parse/node_modules/micromark-util-symbol": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-combine-extensions/-/micromark-util-combine-extensions-2.0.1.tgz",
-      "integrity": "sha512-OnAnH8Ujmy59JcyZw8JSbK9cGpdVY44NKgSM7E9Eh7DiLS2E9RNQf0dONaGDzEG9yjEl5hcqeIsj4hfRkLH/Bg==",
+      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
+      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
       "dev": true,
       "funding": [
         {
@@ -11103,16 +10957,12 @@
           "url": "https://opencollective.com/unified"
         }
       ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-chunked": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      }
+      "license": "MIT"
     },
-    "node_modules/remark/node_modules/micromark-util-decode-numeric-character-reference": {
+    "node_modules/remark-parse/node_modules/micromark-util-types": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-2.0.2.tgz",
-      "integrity": "sha512-ccUbYk6CwVdkmCQMyr64dXz42EfHGkPQlBj5p7YVGzq8I7CtjXZJrubAYezf7Rp+bjPseiROqe7G6foFd+lEuw==",
+      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
+      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
       "dev": true,
       "funding": [
         {
@@ -11124,72 +10974,161 @@
           "url": "https://opencollective.com/unified"
         }
       ],
+      "license": "MIT"
+    },
+    "node_modules/remark-parse/node_modules/unified": {
+      "version": "11.0.5",
+      "resolved": "https://registry.npmjs.org/unified/-/unified-11.0.5.tgz",
+      "integrity": "sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA==",
+      "dev": true,
       "license": "MIT",
       "dependencies": {
-        "micromark-util-symbol": "^2.0.0"
+        "@types/unist": "^3.0.0",
+        "bail": "^2.0.0",
+        "devlop": "^1.0.0",
+        "extend": "^3.0.0",
+        "is-plain-obj": "^4.0.0",
+        "trough": "^2.0.0",
+        "vfile": "^6.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/remark/node_modules/micromark-util-decode-string": {
-      "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-decode-string/-/micromark-util-decode-string-2.0.1.tgz",
-      "integrity": "sha512-nDV/77Fj6eH1ynwscYTOsbK7rR//Uj0bZXBwJZRfaLEJ1iGBR6kIfNmlNqaqJf649EP0F3NWNdeJi03elllNUQ==",
+    "node_modules/remark-parse/node_modules/unist-util-stringify-position": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz",
+      "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==",
       "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
       "license": "MIT",
       "dependencies": {
-        "decode-named-character-reference": "^1.0.0",
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-decode-numeric-character-reference": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0"
+        "@types/unist": "^3.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/remark/node_modules/micromark-util-encode": {
-      "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-2.0.1.tgz",
-      "integrity": "sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw==",
+    "node_modules/remark-parse/node_modules/vfile": {
+      "version": "6.0.3",
+      "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.3.tgz",
+      "integrity": "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==",
       "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0",
+        "vfile-message": "^4.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
     },
-    "node_modules/remark/node_modules/micromark-util-html-tag-name": {
-      "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-html-tag-name/-/micromark-util-html-tag-name-2.0.1.tgz",
-      "integrity": "sha512-2cNEiYDhCWKI+Gs9T0Tiysk136SnR13hhO8yW6BGNyhOC4qYFnwF1nKfD3HFAIXA5c45RrIG1ub11GiXeYd1xA==",
+    "node_modules/remark-parse/node_modules/vfile-message": {
+      "version": "4.0.3",
+      "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.3.tgz",
+      "integrity": "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0",
+        "unist-util-stringify-position": "^4.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark-rehype": {
+      "version": "10.1.0",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/hast": "^2.0.0",
+        "@types/mdast": "^3.0.0",
+        "mdast-util-to-hast": "^12.1.0",
+        "unified": "^10.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark-stringify": {
+      "version": "11.0.0",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/mdast": "^4.0.0",
+        "mdast-util-to-markdown": "^2.0.0",
+        "unified": "^11.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark-stringify/node_modules/@types/mdast": {
+      "version": "4.0.4",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "*"
+      }
+    },
+    "node_modules/remark-stringify/node_modules/@types/unist": {
+      "version": "3.0.3",
       "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
       "license": "MIT"
     },
-    "node_modules/remark/node_modules/micromark-util-normalize-identifier": {
-      "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-2.0.1.tgz",
-      "integrity": "sha512-sxPqmo70LyARJs0w2UclACPUUEqltCkJ6PhKdMIDuJ3gSf/Q+/GIe3WKl0Ijb/GyH9lOpUkRAO2wp0GVkLvS9Q==",
+    "node_modules/remark-stringify/node_modules/mdast-util-phrasing": {
+      "version": "4.1.0",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/mdast": "^4.0.0",
+        "unist-util-is": "^6.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark-stringify/node_modules/mdast-util-to-markdown": {
+      "version": "2.1.2",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/mdast": "^4.0.0",
+        "@types/unist": "^3.0.0",
+        "longest-streak": "^3.0.0",
+        "mdast-util-phrasing": "^4.0.0",
+        "mdast-util-to-string": "^4.0.0",
+        "micromark-util-classify-character": "^2.0.0",
+        "micromark-util-decode-string": "^2.0.0",
+        "unist-util-visit": "^5.0.0",
+        "zwitch": "^2.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark-stringify/node_modules/mdast-util-to-string": {
+      "version": "4.0.0",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/mdast": "^4.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark-stringify/node_modules/micromark-util-character": {
+      "version": "2.1.1",
       "dev": true,
       "funding": [
         {
@@ -11203,13 +11142,12 @@
       ],
       "license": "MIT",
       "dependencies": {
-        "micromark-util-symbol": "^2.0.0"
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
       }
     },
-    "node_modules/remark/node_modules/micromark-util-resolve-all": {
+    "node_modules/remark-stringify/node_modules/micromark-util-classify-character": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-resolve-all/-/micromark-util-resolve-all-2.0.1.tgz",
-      "integrity": "sha512-VdQyxFWFT2/FGJgwQnJYbe1jjQoNTS4RjglmSjTUlpUMa95Htx9NHeYW4rGDJzbjvCsl9eLjMQwGeElsqmzcHg==",
       "dev": true,
       "funding": [
         {
@@ -11223,13 +11161,13 @@
       ],
       "license": "MIT",
       "dependencies": {
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0",
         "micromark-util-types": "^2.0.0"
       }
     },
-    "node_modules/remark/node_modules/micromark-util-sanitize-uri": {
-      "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.1.tgz",
-      "integrity": "sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ==",
+    "node_modules/remark-stringify/node_modules/micromark-util-decode-numeric-character-reference": {
+      "version": "2.0.2",
       "dev": true,
       "funding": [
         {
@@ -11243,15 +11181,11 @@
       ],
       "license": "MIT",
       "dependencies": {
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-encode": "^2.0.0",
         "micromark-util-symbol": "^2.0.0"
       }
     },
-    "node_modules/remark/node_modules/micromark-util-subtokenize": {
-      "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-util-subtokenize/-/micromark-util-subtokenize-2.1.0.tgz",
-      "integrity": "sha512-XQLu552iSctvnEcgXw6+Sx75GflAPNED1qx7eBJ+wydBb2KCbRZe+NwvIEEMM83uml1+2WSXpBAcp9IUCgCYWA==",
+    "node_modules/remark-stringify/node_modules/micromark-util-decode-string": {
+      "version": "2.0.1",
       "dev": true,
       "funding": [
         {
@@ -11265,16 +11199,14 @@
       ],
       "license": "MIT",
       "dependencies": {
-        "devlop": "^1.0.0",
-        "micromark-util-chunked": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
+        "decode-named-character-reference": "^1.0.0",
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-decode-numeric-character-reference": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0"
       }
     },
-    "node_modules/remark/node_modules/micromark-util-symbol": {
+    "node_modules/remark-stringify/node_modules/micromark-util-symbol": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
-      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
       "dev": true,
       "funding": [
         {
@@ -11288,10 +11220,8 @@
       ],
       "license": "MIT"
     },
-    "node_modules/remark/node_modules/micromark-util-types": {
+    "node_modules/remark-stringify/node_modules/micromark-util-types": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
-      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
       "dev": true,
       "funding": [
         {
@@ -11305,27 +11235,116 @@
       ],
       "license": "MIT"
     },
-    "node_modules/remark/node_modules/remark-parse": {
-      "version": "11.0.0",
-      "resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-11.0.0.tgz",
-      "integrity": "sha512-FCxlKLNGknS5ba/1lmpYijMUzX2esxW5xQqjWxw2eHFfS2MSdaHVINFmhjo+qN1WhZhNimq0dZATN9pH0IDrpA==",
+    "node_modules/remark-stringify/node_modules/unified": {
+      "version": "11.0.5",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/mdast": "^4.0.0",
-        "mdast-util-from-markdown": "^2.0.0",
-        "micromark-util-types": "^2.0.0",
-        "unified": "^11.0.0"
+        "@types/unist": "^3.0.0",
+        "bail": "^2.0.0",
+        "devlop": "^1.0.0",
+        "extend": "^3.0.0",
+        "is-plain-obj": "^4.0.0",
+        "trough": "^2.0.0",
+        "vfile": "^6.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark-stringify/node_modules/unist-util-is": {
+      "version": "6.0.0",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark-stringify/node_modules/unist-util-stringify-position": {
+      "version": "4.0.0",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark-stringify/node_modules/unist-util-visit": {
+      "version": "5.0.0",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0",
+        "unist-util-is": "^6.0.0",
+        "unist-util-visit-parents": "^6.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark-stringify/node_modules/unist-util-visit-parents": {
+      "version": "6.0.1",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0",
+        "unist-util-is": "^6.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark-stringify/node_modules/vfile": {
+      "version": "6.0.3",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0",
+        "vfile-message": "^4.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark-stringify/node_modules/vfile-message": {
+      "version": "4.0.3",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0",
+        "unist-util-stringify-position": "^4.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
+    "node_modules/remark/node_modules/@types/mdast": {
+      "version": "4.0.4",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "*"
+      }
+    },
+    "node_modules/remark/node_modules/@types/unist": {
+      "version": "3.0.3",
+      "dev": true,
+      "license": "MIT"
+    },
     "node_modules/remark/node_modules/unified": {
       "version": "11.0.5",
-      "resolved": "https://registry.npmjs.org/unified/-/unified-11.0.5.tgz",
-      "integrity": "sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11344,8 +11363,6 @@
     },
     "node_modules/remark/node_modules/unist-util-stringify-position": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz",
-      "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11358,8 +11375,6 @@
     },
     "node_modules/remark/node_modules/vfile": {
       "version": "6.0.3",
-      "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.3.tgz",
-      "integrity": "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11373,8 +11388,6 @@
     },
     "node_modules/remark/node_modules/vfile-message": {
       "version": "4.0.3",
-      "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.3.tgz",
-      "integrity": "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {

From 4a46b5aaaeaa68ce718d4d4a95a74b9e49da8129 Mon Sep 17 00:00:00 2001
From: Gar 
Date: Thu, 18 Sep 2025 13:44:38 -0700
Subject: [PATCH 52/63] chore: remark-github@12.0.0

---
 package-lock.json | 219 +++++++++++++++++++++++++++++++++++++++++++---
 package.json      |   2 +-
 2 files changed, 206 insertions(+), 15 deletions(-)

diff --git a/package-lock.json b/package-lock.json
index 77429d1dc885a..4bb20711fc69c 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -172,7 +172,7 @@
         "npm-packlist": "^10.0.0",
         "remark": "^15.0.1",
         "remark-gfm": "^3.0.1",
-        "remark-github": "^11.2.4",
+        "remark-github": "^12.0.0",
         "rimraf": "^6.0.1",
         "spawk": "^1.7.1",
         "tap": "^16.3.9"
@@ -10350,15 +10350,107 @@
       }
     },
     "node_modules/remark-github": {
-      "version": "11.2.4",
+      "version": "12.0.0",
+      "resolved": "https://registry.npmjs.org/remark-github/-/remark-github-12.0.0.tgz",
+      "integrity": "sha512-ByefQKFN184LeiGRCabfl7zUJsdlMYWEhiLX1gpmQ11yFg6xSuOTW7LVCv0oc1x+YvUMJW23NU36sJX2RWGgvg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/mdast": "^3.0.0",
-        "mdast-util-find-and-replace": "^2.0.0",
-        "mdast-util-to-string": "^3.0.0",
-        "unified": "^10.0.0",
-        "unist-util-visit": "^4.0.0"
+        "@types/mdast": "^4.0.0",
+        "mdast-util-find-and-replace": "^3.0.0",
+        "mdast-util-to-string": "^4.0.0",
+        "to-vfile": "^8.0.0",
+        "unist-util-visit": "^5.0.0",
+        "vfile": "^6.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark-github/node_modules/@types/mdast": {
+      "version": "4.0.4",
+      "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz",
+      "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "*"
+      }
+    },
+    "node_modules/remark-github/node_modules/@types/unist": {
+      "version": "3.0.3",
+      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
+      "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
+      "dev": true,
+      "license": "MIT"
+    },
+    "node_modules/remark-github/node_modules/escape-string-regexp": {
+      "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz",
+      "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==",
+      "dev": true,
+      "license": "MIT",
+      "engines": {
+        "node": ">=12"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/sindresorhus"
+      }
+    },
+    "node_modules/remark-github/node_modules/mdast-util-find-and-replace": {
+      "version": "3.0.2",
+      "resolved": "https://registry.npmjs.org/mdast-util-find-and-replace/-/mdast-util-find-and-replace-3.0.2.tgz",
+      "integrity": "sha512-Tmd1Vg/m3Xz43afeNxDIhWRtFZgM2VLyaf4vSTYwudTyeuTneoL3qtWMA5jeLyz/O1vDJmmV4QuScFCA2tBPwg==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/mdast": "^4.0.0",
+        "escape-string-regexp": "^5.0.0",
+        "unist-util-is": "^6.0.0",
+        "unist-util-visit-parents": "^6.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark-github/node_modules/mdast-util-to-string": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-4.0.0.tgz",
+      "integrity": "sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/mdast": "^4.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark-github/node_modules/unist-util-is": {
+      "version": "6.0.0",
+      "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.0.tgz",
+      "integrity": "sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark-github/node_modules/unist-util-stringify-position": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz",
+      "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0"
       },
       "funding": {
         "type": "opencollective",
@@ -10366,13 +10458,15 @@
       }
     },
     "node_modules/remark-github/node_modules/unist-util-visit": {
-      "version": "4.1.2",
+      "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-5.0.0.tgz",
+      "integrity": "sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/unist": "^2.0.0",
-        "unist-util-is": "^5.0.0",
-        "unist-util-visit-parents": "^5.1.1"
+        "@types/unist": "^3.0.0",
+        "unist-util-is": "^6.0.0",
+        "unist-util-visit-parents": "^6.0.0"
       },
       "funding": {
         "type": "opencollective",
@@ -10380,12 +10474,44 @@
       }
     },
     "node_modules/remark-github/node_modules/unist-util-visit-parents": {
-      "version": "5.1.3",
+      "version": "6.0.1",
+      "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.1.tgz",
+      "integrity": "sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/unist": "^2.0.0",
-        "unist-util-is": "^5.0.0"
+        "@types/unist": "^3.0.0",
+        "unist-util-is": "^6.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark-github/node_modules/vfile": {
+      "version": "6.0.3",
+      "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.3.tgz",
+      "integrity": "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0",
+        "vfile-message": "^4.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark-github/node_modules/vfile-message": {
+      "version": "4.0.3",
+      "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.3.tgz",
+      "integrity": "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0",
+        "unist-util-stringify-position": "^4.0.0"
       },
       "funding": {
         "type": "opencollective",
@@ -14738,6 +14864,71 @@
         "node": ">=8.0"
       }
     },
+    "node_modules/to-vfile": {
+      "version": "8.0.0",
+      "resolved": "https://registry.npmjs.org/to-vfile/-/to-vfile-8.0.0.tgz",
+      "integrity": "sha512-IcmH1xB5576MJc9qcfEC/m/nQCFt3fzMHz45sSlgJyTWjRbKW1HAkJpuf3DgE57YzIlZcwcBZA5ENQbBo4aLkg==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "vfile": "^6.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/to-vfile/node_modules/@types/unist": {
+      "version": "3.0.3",
+      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
+      "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
+      "dev": true,
+      "license": "MIT"
+    },
+    "node_modules/to-vfile/node_modules/unist-util-stringify-position": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz",
+      "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/to-vfile/node_modules/vfile": {
+      "version": "6.0.3",
+      "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.3.tgz",
+      "integrity": "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0",
+        "vfile-message": "^4.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/to-vfile/node_modules/vfile-message": {
+      "version": "4.0.3",
+      "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.3.tgz",
+      "integrity": "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0",
+        "unist-util-stringify-position": "^4.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
     "node_modules/tough-cookie": {
       "version": "4.1.4",
       "dev": true,
diff --git a/package.json b/package.json
index f2b325a11333b..9c4fe309adb76 100644
--- a/package.json
+++ b/package.json
@@ -203,7 +203,7 @@
     "npm-packlist": "^10.0.0",
     "remark": "^15.0.1",
     "remark-gfm": "^3.0.1",
-    "remark-github": "^11.2.4",
+    "remark-github": "^12.0.0",
     "rimraf": "^6.0.1",
     "spawk": "^1.7.1",
     "tap": "^16.3.9"

From 208cb93fabae2b11993497382ceb48dacc41e490 Mon Sep 17 00:00:00 2001
From: Gar 
Date: Thu, 18 Sep 2025 13:47:00 -0700
Subject: [PATCH 53/63] chore: remark-gfm@4.0.1

---
 docs/package.json |    2 +-
 package-lock.json | 3198 +++++++++++++++++++++++++++------------------
 package.json      |    2 +-
 3 files changed, 1940 insertions(+), 1262 deletions(-)

diff --git a/docs/package.json b/docs/package.json
index 25b42440e3d38..fbd0b3ca1d936 100644
--- a/docs/package.json
+++ b/docs/package.json
@@ -28,7 +28,7 @@
     "ignore-walk": "^8.0.0",
     "jsdom": "^24.0.0",
     "rehype-stringify": "^9.0.3",
-    "remark-gfm": "^3.0.1",
+    "remark-gfm": "^4.0.1",
     "remark-man": "^8.0.1",
     "remark-parse": "^11.0.0",
     "remark-rehype": "^10.1.0",
diff --git a/package-lock.json b/package-lock.json
index 4bb20711fc69c..b4aa9ba548e0e 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -171,7 +171,7 @@
         "nock": "^13.4.0",
         "npm-packlist": "^10.0.0",
         "remark": "^15.0.1",
-        "remark-gfm": "^3.0.1",
+        "remark-gfm": "^4.0.1",
         "remark-github": "^12.0.0",
         "rimraf": "^6.0.1",
         "spawk": "^1.7.1",
@@ -193,7 +193,7 @@
         "ignore-walk": "^8.0.0",
         "jsdom": "^24.0.0",
         "rehype-stringify": "^9.0.3",
-        "remark-gfm": "^3.0.1",
+        "remark-gfm": "^4.0.1",
         "remark-man": "^8.0.1",
         "remark-parse": "^11.0.0",
         "remark-rehype": "^10.1.0",
@@ -2134,6 +2134,8 @@
     },
     "node_modules/@types/debug": {
       "version": "4.1.12",
+      "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz",
+      "integrity": "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2169,6 +2171,8 @@
     },
     "node_modules/@types/ms": {
       "version": "2.1.0",
+      "resolved": "https://registry.npmjs.org/@types/ms/-/ms-2.1.0.tgz",
+      "integrity": "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==",
       "dev": true,
       "license": "MIT"
     },
@@ -7039,14 +7043,6 @@
         "node": ">=0.10.0"
       }
     },
-    "node_modules/kleur": {
-      "version": "4.1.5",
-      "dev": true,
-      "license": "MIT",
-      "engines": {
-        "node": ">=6"
-      }
-    },
     "node_modules/leven": {
       "version": "2.1.0",
       "dev": true,
@@ -7310,6 +7306,8 @@
     },
     "node_modules/markdown-table": {
       "version": "3.0.4",
+      "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-3.0.4.tgz",
+      "integrity": "sha512-wiYz4+JrLyb/DqW2hkFJxP7Vd7JuTDm77fvbM8VfEQdmSMqcImWeeRbHwZjBjIFki/VaMK2BhFi7oUUZeM5bqw==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -7367,22 +7365,43 @@
       }
     },
     "node_modules/mdast-util-find-and-replace": {
-      "version": "2.2.2",
+      "version": "3.0.2",
+      "resolved": "https://registry.npmjs.org/mdast-util-find-and-replace/-/mdast-util-find-and-replace-3.0.2.tgz",
+      "integrity": "sha512-Tmd1Vg/m3Xz43afeNxDIhWRtFZgM2VLyaf4vSTYwudTyeuTneoL3qtWMA5jeLyz/O1vDJmmV4QuScFCA2tBPwg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/mdast": "^3.0.0",
+        "@types/mdast": "^4.0.0",
         "escape-string-regexp": "^5.0.0",
-        "unist-util-is": "^5.0.0",
-        "unist-util-visit-parents": "^5.0.0"
+        "unist-util-is": "^6.0.0",
+        "unist-util-visit-parents": "^6.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
+    "node_modules/mdast-util-find-and-replace/node_modules/@types/mdast": {
+      "version": "4.0.4",
+      "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz",
+      "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "*"
+      }
+    },
+    "node_modules/mdast-util-find-and-replace/node_modules/@types/unist": {
+      "version": "3.0.3",
+      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
+      "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
+      "dev": true,
+      "license": "MIT"
+    },
     "node_modules/mdast-util-find-and-replace/node_modules/escape-string-regexp": {
       "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz",
+      "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -7392,461 +7411,568 @@
         "url": "https://github.com/sponsors/sindresorhus"
       }
     },
-    "node_modules/mdast-util-find-and-replace/node_modules/unist-util-visit-parents": {
-      "version": "5.1.3",
+    "node_modules/mdast-util-find-and-replace/node_modules/unist-util-is": {
+      "version": "6.0.0",
+      "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.0.tgz",
+      "integrity": "sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/unist": "^2.0.0",
-        "unist-util-is": "^5.0.0"
+        "@types/unist": "^3.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/mdast-util-from-markdown": {
-      "version": "1.3.1",
+    "node_modules/mdast-util-find-and-replace/node_modules/unist-util-visit-parents": {
+      "version": "6.0.1",
+      "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.1.tgz",
+      "integrity": "sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/mdast": "^3.0.0",
-        "@types/unist": "^2.0.0",
-        "decode-named-character-reference": "^1.0.0",
-        "mdast-util-to-string": "^3.1.0",
-        "micromark": "^3.0.0",
-        "micromark-util-decode-numeric-character-reference": "^1.0.0",
-        "micromark-util-decode-string": "^1.0.0",
-        "micromark-util-normalize-identifier": "^1.0.0",
-        "micromark-util-symbol": "^1.0.0",
-        "micromark-util-types": "^1.0.0",
-        "unist-util-stringify-position": "^3.0.0",
-        "uvu": "^0.5.0"
+        "@types/unist": "^3.0.0",
+        "unist-util-is": "^6.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/mdast-util-gfm": {
+    "node_modules/mdast-util-from-markdown": {
       "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-2.0.2.tgz",
+      "integrity": "sha512-uZhTV/8NBuw0WHkPTrCqDOl0zVe1BIng5ZtHoDk49ME1qqcjYmmLmOf0gELgcRMxN4w2iuIeVso5/6QymSrgmA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "mdast-util-from-markdown": "^1.0.0",
-        "mdast-util-gfm-autolink-literal": "^1.0.0",
-        "mdast-util-gfm-footnote": "^1.0.0",
-        "mdast-util-gfm-strikethrough": "^1.0.0",
-        "mdast-util-gfm-table": "^1.0.0",
-        "mdast-util-gfm-task-list-item": "^1.0.0",
-        "mdast-util-to-markdown": "^1.0.0"
+        "@types/mdast": "^4.0.0",
+        "@types/unist": "^3.0.0",
+        "decode-named-character-reference": "^1.0.0",
+        "devlop": "^1.0.0",
+        "mdast-util-to-string": "^4.0.0",
+        "micromark": "^4.0.0",
+        "micromark-util-decode-numeric-character-reference": "^2.0.0",
+        "micromark-util-decode-string": "^2.0.0",
+        "micromark-util-normalize-identifier": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0",
+        "unist-util-stringify-position": "^4.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/mdast-util-gfm-autolink-literal": {
-      "version": "1.0.3",
+    "node_modules/mdast-util-from-markdown/node_modules/@types/mdast": {
+      "version": "4.0.4",
+      "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz",
+      "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/mdast": "^3.0.0",
-        "ccount": "^2.0.0",
-        "mdast-util-find-and-replace": "^2.0.0",
-        "micromark-util-character": "^1.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
+        "@types/unist": "*"
       }
     },
-    "node_modules/mdast-util-gfm-footnote": {
-      "version": "1.0.2",
+    "node_modules/mdast-util-from-markdown/node_modules/@types/unist": {
+      "version": "3.0.3",
+      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
+      "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
+      "dev": true,
+      "license": "MIT"
+    },
+    "node_modules/mdast-util-from-markdown/node_modules/mdast-util-to-string": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-4.0.0.tgz",
+      "integrity": "sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/mdast": "^3.0.0",
-        "mdast-util-to-markdown": "^1.3.0",
-        "micromark-util-normalize-identifier": "^1.0.0"
+        "@types/mdast": "^4.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/mdast-util-gfm-strikethrough": {
-      "version": "1.0.3",
+    "node_modules/mdast-util-from-markdown/node_modules/micromark-util-symbol": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
+      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/mdast-util-from-markdown/node_modules/micromark-util-types": {
+      "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
+      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/mdast-util-from-markdown/node_modules/unist-util-stringify-position": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz",
+      "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/mdast": "^3.0.0",
-        "mdast-util-to-markdown": "^1.3.0"
+        "@types/unist": "^3.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/mdast-util-gfm-table": {
-      "version": "1.0.7",
+    "node_modules/mdast-util-gfm": {
+      "version": "3.1.0",
+      "resolved": "https://registry.npmjs.org/mdast-util-gfm/-/mdast-util-gfm-3.1.0.tgz",
+      "integrity": "sha512-0ulfdQOM3ysHhCJ1p06l0b0VKlhU0wuQs3thxZQagjcjPrlFRqY215uZGHHJan9GEAXd9MbfPjFJz+qMkVR6zQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/mdast": "^3.0.0",
-        "markdown-table": "^3.0.0",
-        "mdast-util-from-markdown": "^1.0.0",
-        "mdast-util-to-markdown": "^1.3.0"
+        "mdast-util-from-markdown": "^2.0.0",
+        "mdast-util-gfm-autolink-literal": "^2.0.0",
+        "mdast-util-gfm-footnote": "^2.0.0",
+        "mdast-util-gfm-strikethrough": "^2.0.0",
+        "mdast-util-gfm-table": "^2.0.0",
+        "mdast-util-gfm-task-list-item": "^2.0.0",
+        "mdast-util-to-markdown": "^2.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/mdast-util-gfm-task-list-item": {
-      "version": "1.0.2",
+    "node_modules/mdast-util-gfm-autolink-literal": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/mdast-util-gfm-autolink-literal/-/mdast-util-gfm-autolink-literal-2.0.1.tgz",
+      "integrity": "sha512-5HVP2MKaP6L+G6YaxPNjuL0BPrq9orG3TsrZ9YXbA3vDw/ACI4MEsnoDpn6ZNm7GnZgtAcONJyPhOP8tNJQavQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/mdast": "^3.0.0",
-        "mdast-util-to-markdown": "^1.3.0"
+        "@types/mdast": "^4.0.0",
+        "ccount": "^2.0.0",
+        "devlop": "^1.0.0",
+        "mdast-util-find-and-replace": "^3.0.0",
+        "micromark-util-character": "^2.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/mdast-util-phrasing": {
-      "version": "3.0.1",
+    "node_modules/mdast-util-gfm-autolink-literal/node_modules/@types/mdast": {
+      "version": "4.0.4",
+      "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz",
+      "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/mdast": "^3.0.0",
-        "unist-util-is": "^5.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
+        "@types/unist": "*"
       }
     },
-    "node_modules/mdast-util-to-hast": {
-      "version": "12.3.0",
+    "node_modules/mdast-util-gfm-autolink-literal/node_modules/micromark-util-character": {
+      "version": "2.1.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
+      "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
       "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
       "license": "MIT",
       "dependencies": {
-        "@types/hast": "^2.0.0",
-        "@types/mdast": "^3.0.0",
-        "mdast-util-definitions": "^5.0.0",
-        "micromark-util-sanitize-uri": "^1.1.0",
-        "trim-lines": "^3.0.0",
-        "unist-util-generated": "^2.0.0",
-        "unist-util-position": "^4.0.0",
-        "unist-util-visit": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
       }
     },
-    "node_modules/mdast-util-to-hast/node_modules/unist-util-visit": {
-      "version": "4.1.2",
+    "node_modules/mdast-util-gfm-autolink-literal/node_modules/micromark-util-symbol": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
+      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/mdast-util-gfm-autolink-literal/node_modules/micromark-util-types": {
+      "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
+      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/mdast-util-gfm-footnote": {
+      "version": "2.1.0",
+      "resolved": "https://registry.npmjs.org/mdast-util-gfm-footnote/-/mdast-util-gfm-footnote-2.1.0.tgz",
+      "integrity": "sha512-sqpDWlsHn7Ac9GNZQMeUzPQSMzR6Wv0WKRNvQRg0KqHh02fpTz69Qc1QSseNX29bhz1ROIyNyxExfawVKTm1GQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/unist": "^2.0.0",
-        "unist-util-is": "^5.0.0",
-        "unist-util-visit-parents": "^5.1.1"
+        "@types/mdast": "^4.0.0",
+        "devlop": "^1.1.0",
+        "mdast-util-from-markdown": "^2.0.0",
+        "mdast-util-to-markdown": "^2.0.0",
+        "micromark-util-normalize-identifier": "^2.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/mdast-util-to-hast/node_modules/unist-util-visit-parents": {
-      "version": "5.1.3",
+    "node_modules/mdast-util-gfm-footnote/node_modules/@types/mdast": {
+      "version": "4.0.4",
+      "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz",
+      "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/unist": "^2.0.0",
-        "unist-util-is": "^5.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
+        "@types/unist": "*"
       }
     },
-    "node_modules/mdast-util-to-markdown": {
-      "version": "1.5.0",
+    "node_modules/mdast-util-gfm-strikethrough": {
+      "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/mdast-util-gfm-strikethrough/-/mdast-util-gfm-strikethrough-2.0.0.tgz",
+      "integrity": "sha512-mKKb915TF+OC5ptj5bJ7WFRPdYtuHv0yTRxK2tJvi+BDqbkiG7h7u/9SI89nRAYcmap2xHQL9D+QG/6wSrTtXg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/mdast": "^3.0.0",
-        "@types/unist": "^2.0.0",
-        "longest-streak": "^3.0.0",
-        "mdast-util-phrasing": "^3.0.0",
-        "mdast-util-to-string": "^3.0.0",
-        "micromark-util-decode-string": "^1.0.0",
-        "unist-util-visit": "^4.0.0",
-        "zwitch": "^2.0.0"
+        "@types/mdast": "^4.0.0",
+        "mdast-util-from-markdown": "^2.0.0",
+        "mdast-util-to-markdown": "^2.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/mdast-util-to-markdown/node_modules/unist-util-visit": {
-      "version": "4.1.2",
+    "node_modules/mdast-util-gfm-strikethrough/node_modules/@types/mdast": {
+      "version": "4.0.4",
+      "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz",
+      "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/unist": "^2.0.0",
-        "unist-util-is": "^5.0.0",
-        "unist-util-visit-parents": "^5.1.1"
+        "@types/unist": "*"
+      }
+    },
+    "node_modules/mdast-util-gfm-table": {
+      "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/mdast-util-gfm-table/-/mdast-util-gfm-table-2.0.0.tgz",
+      "integrity": "sha512-78UEvebzz/rJIxLvE7ZtDd/vIQ0RHv+3Mh5DR96p7cS7HsBhYIICDBCu8csTNWNO6tBWfqXPWekRuj2FNOGOZg==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/mdast": "^4.0.0",
+        "devlop": "^1.0.0",
+        "markdown-table": "^3.0.0",
+        "mdast-util-from-markdown": "^2.0.0",
+        "mdast-util-to-markdown": "^2.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/mdast-util-to-markdown/node_modules/unist-util-visit-parents": {
-      "version": "5.1.3",
+    "node_modules/mdast-util-gfm-table/node_modules/@types/mdast": {
+      "version": "4.0.4",
+      "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz",
+      "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/unist": "^2.0.0",
-        "unist-util-is": "^5.0.0"
+        "@types/unist": "*"
+      }
+    },
+    "node_modules/mdast-util-gfm-task-list-item": {
+      "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/mdast-util-gfm-task-list-item/-/mdast-util-gfm-task-list-item-2.0.0.tgz",
+      "integrity": "sha512-IrtvNvjxC1o06taBAVJznEnkiHxLFTzgonUdy8hzFVeDun0uTjxxrRGVaNFqkU1wJR3RBPEfsxmU6jDWPofrTQ==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/mdast": "^4.0.0",
+        "devlop": "^1.0.0",
+        "mdast-util-from-markdown": "^2.0.0",
+        "mdast-util-to-markdown": "^2.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/mdast-util-to-string": {
-      "version": "3.2.0",
+    "node_modules/mdast-util-gfm-task-list-item/node_modules/@types/mdast": {
+      "version": "4.0.4",
+      "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz",
+      "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/mdast": "^3.0.0"
+        "@types/unist": "*"
+      }
+    },
+    "node_modules/mdast-util-phrasing": {
+      "version": "4.1.0",
+      "resolved": "https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-4.1.0.tgz",
+      "integrity": "sha512-TqICwyvJJpBwvGAMZjj4J2n0X8QWp21b9l0o7eXyVJ25YNWYbJDVIyD1bZXE6WtV6RmKJVYmQAKWa0zWOABz2w==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/mdast": "^4.0.0",
+        "unist-util-is": "^6.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/meow": {
-      "version": "12.1.1",
+    "node_modules/mdast-util-phrasing/node_modules/@types/mdast": {
+      "version": "4.0.4",
+      "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz",
+      "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==",
       "dev": true,
       "license": "MIT",
-      "engines": {
-        "node": ">=16.10"
+      "dependencies": {
+        "@types/unist": "*"
+      }
+    },
+    "node_modules/mdast-util-phrasing/node_modules/@types/unist": {
+      "version": "3.0.3",
+      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
+      "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
+      "dev": true,
+      "license": "MIT"
+    },
+    "node_modules/mdast-util-phrasing/node_modules/unist-util-is": {
+      "version": "6.0.0",
+      "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.0.tgz",
+      "integrity": "sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0"
       },
       "funding": {
-        "url": "https://github.com/sponsors/sindresorhus"
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/micromark": {
-      "version": "3.2.0",
+    "node_modules/mdast-util-to-hast": {
+      "version": "12.3.0",
       "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
       "license": "MIT",
       "dependencies": {
-        "@types/debug": "^4.0.0",
-        "debug": "^4.0.0",
-        "decode-named-character-reference": "^1.0.0",
-        "micromark-core-commonmark": "^1.0.1",
-        "micromark-factory-space": "^1.0.0",
-        "micromark-util-character": "^1.0.0",
-        "micromark-util-chunked": "^1.0.0",
-        "micromark-util-combine-extensions": "^1.0.0",
-        "micromark-util-decode-numeric-character-reference": "^1.0.0",
-        "micromark-util-encode": "^1.0.0",
-        "micromark-util-normalize-identifier": "^1.0.0",
-        "micromark-util-resolve-all": "^1.0.0",
-        "micromark-util-sanitize-uri": "^1.0.0",
-        "micromark-util-subtokenize": "^1.0.0",
-        "micromark-util-symbol": "^1.0.0",
-        "micromark-util-types": "^1.0.1",
-        "uvu": "^0.5.0"
+        "@types/hast": "^2.0.0",
+        "@types/mdast": "^3.0.0",
+        "mdast-util-definitions": "^5.0.0",
+        "micromark-util-sanitize-uri": "^1.1.0",
+        "trim-lines": "^3.0.0",
+        "unist-util-generated": "^2.0.0",
+        "unist-util-position": "^4.0.0",
+        "unist-util-visit": "^4.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/micromark-core-commonmark": {
-      "version": "1.1.0",
+    "node_modules/mdast-util-to-hast/node_modules/unist-util-visit": {
+      "version": "4.1.2",
       "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
       "license": "MIT",
       "dependencies": {
-        "decode-named-character-reference": "^1.0.0",
-        "micromark-factory-destination": "^1.0.0",
-        "micromark-factory-label": "^1.0.0",
-        "micromark-factory-space": "^1.0.0",
-        "micromark-factory-title": "^1.0.0",
-        "micromark-factory-whitespace": "^1.0.0",
-        "micromark-util-character": "^1.0.0",
-        "micromark-util-chunked": "^1.0.0",
-        "micromark-util-classify-character": "^1.0.0",
-        "micromark-util-html-tag-name": "^1.0.0",
-        "micromark-util-normalize-identifier": "^1.0.0",
-        "micromark-util-resolve-all": "^1.0.0",
-        "micromark-util-subtokenize": "^1.0.0",
-        "micromark-util-symbol": "^1.0.0",
-        "micromark-util-types": "^1.0.1",
-        "uvu": "^0.5.0"
+        "@types/unist": "^2.0.0",
+        "unist-util-is": "^5.0.0",
+        "unist-util-visit-parents": "^5.1.1"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/micromark-extension-gfm": {
-      "version": "2.0.3",
+    "node_modules/mdast-util-to-hast/node_modules/unist-util-visit-parents": {
+      "version": "5.1.3",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "micromark-extension-gfm-autolink-literal": "^1.0.0",
-        "micromark-extension-gfm-footnote": "^1.0.0",
-        "micromark-extension-gfm-strikethrough": "^1.0.0",
-        "micromark-extension-gfm-table": "^1.0.0",
-        "micromark-extension-gfm-tagfilter": "^1.0.0",
-        "micromark-extension-gfm-task-list-item": "^1.0.0",
-        "micromark-util-combine-extensions": "^1.0.0",
-        "micromark-util-types": "^1.0.0"
+        "@types/unist": "^2.0.0",
+        "unist-util-is": "^5.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/micromark-extension-gfm-autolink-literal": {
-      "version": "1.0.5",
+    "node_modules/mdast-util-to-markdown": {
+      "version": "2.1.2",
+      "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-2.1.2.tgz",
+      "integrity": "sha512-xj68wMTvGXVOKonmog6LwyJKrYXZPvlwabaryTjLh9LuvovB/KAH+kvi8Gjj+7rJjsFi23nkUxRQv1KqSroMqA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "micromark-util-character": "^1.0.0",
-        "micromark-util-sanitize-uri": "^1.0.0",
-        "micromark-util-symbol": "^1.0.0",
-        "micromark-util-types": "^1.0.0"
+        "@types/mdast": "^4.0.0",
+        "@types/unist": "^3.0.0",
+        "longest-streak": "^3.0.0",
+        "mdast-util-phrasing": "^4.0.0",
+        "mdast-util-to-string": "^4.0.0",
+        "micromark-util-classify-character": "^2.0.0",
+        "micromark-util-decode-string": "^2.0.0",
+        "unist-util-visit": "^5.0.0",
+        "zwitch": "^2.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/micromark-extension-gfm-footnote": {
-      "version": "1.1.2",
+    "node_modules/mdast-util-to-markdown/node_modules/@types/mdast": {
+      "version": "4.0.4",
+      "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz",
+      "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "micromark-core-commonmark": "^1.0.0",
-        "micromark-factory-space": "^1.0.0",
-        "micromark-util-character": "^1.0.0",
-        "micromark-util-normalize-identifier": "^1.0.0",
-        "micromark-util-sanitize-uri": "^1.0.0",
-        "micromark-util-symbol": "^1.0.0",
-        "micromark-util-types": "^1.0.0",
-        "uvu": "^0.5.0"
+        "@types/unist": "*"
+      }
+    },
+    "node_modules/mdast-util-to-markdown/node_modules/@types/unist": {
+      "version": "3.0.3",
+      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
+      "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
+      "dev": true,
+      "license": "MIT"
+    },
+    "node_modules/mdast-util-to-markdown/node_modules/mdast-util-to-string": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-4.0.0.tgz",
+      "integrity": "sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/mdast": "^4.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/micromark-extension-gfm-strikethrough": {
-      "version": "1.0.7",
+    "node_modules/mdast-util-to-markdown/node_modules/unist-util-is": {
+      "version": "6.0.0",
+      "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.0.tgz",
+      "integrity": "sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "micromark-util-chunked": "^1.0.0",
-        "micromark-util-classify-character": "^1.0.0",
-        "micromark-util-resolve-all": "^1.0.0",
-        "micromark-util-symbol": "^1.0.0",
-        "micromark-util-types": "^1.0.0",
-        "uvu": "^0.5.0"
+        "@types/unist": "^3.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/micromark-extension-gfm-table": {
-      "version": "1.0.7",
+    "node_modules/mdast-util-to-markdown/node_modules/unist-util-visit": {
+      "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-5.0.0.tgz",
+      "integrity": "sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "micromark-factory-space": "^1.0.0",
-        "micromark-util-character": "^1.0.0",
-        "micromark-util-symbol": "^1.0.0",
-        "micromark-util-types": "^1.0.0",
-        "uvu": "^0.5.0"
+        "@types/unist": "^3.0.0",
+        "unist-util-is": "^6.0.0",
+        "unist-util-visit-parents": "^6.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/micromark-extension-gfm-tagfilter": {
-      "version": "1.0.2",
+    "node_modules/mdast-util-to-markdown/node_modules/unist-util-visit-parents": {
+      "version": "6.0.1",
+      "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.1.tgz",
+      "integrity": "sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "micromark-util-types": "^1.0.0"
+        "@types/unist": "^3.0.0",
+        "unist-util-is": "^6.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/micromark-extension-gfm-task-list-item": {
-      "version": "1.0.5",
+    "node_modules/mdast-util-to-string": {
+      "version": "3.2.0",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "micromark-factory-space": "^1.0.0",
-        "micromark-util-character": "^1.0.0",
-        "micromark-util-symbol": "^1.0.0",
-        "micromark-util-types": "^1.0.0",
-        "uvu": "^0.5.0"
+        "@types/mdast": "^3.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/micromark-factory-destination": {
-      "version": "1.1.0",
+    "node_modules/meow": {
+      "version": "12.1.1",
       "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
       "license": "MIT",
-      "dependencies": {
-        "micromark-util-character": "^1.0.0",
-        "micromark-util-symbol": "^1.0.0",
-        "micromark-util-types": "^1.0.0"
+      "engines": {
+        "node": ">=16.10"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/sindresorhus"
       }
     },
-    "node_modules/micromark-factory-label": {
-      "version": "1.1.0",
+    "node_modules/micromark": {
+      "version": "4.0.2",
+      "resolved": "https://registry.npmjs.org/micromark/-/micromark-4.0.2.tgz",
+      "integrity": "sha512-zpe98Q6kvavpCr1NPVSCMebCKfD7CA2NqZ+rykeNhONIJBpc1tFKt9hucLGwha3jNTNI8lHpctWJWoimVF4PfA==",
       "dev": true,
       "funding": [
         {
@@ -7860,33 +7986,29 @@
       ],
       "license": "MIT",
       "dependencies": {
-        "micromark-util-character": "^1.0.0",
-        "micromark-util-symbol": "^1.0.0",
-        "micromark-util-types": "^1.0.0",
-        "uvu": "^0.5.0"
-      }
-    },
-    "node_modules/micromark-factory-space": {
-      "version": "1.1.0",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-character": "^1.0.0",
-        "micromark-util-types": "^1.0.0"
+        "@types/debug": "^4.0.0",
+        "debug": "^4.0.0",
+        "decode-named-character-reference": "^1.0.0",
+        "devlop": "^1.0.0",
+        "micromark-core-commonmark": "^2.0.0",
+        "micromark-factory-space": "^2.0.0",
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-chunked": "^2.0.0",
+        "micromark-util-combine-extensions": "^2.0.0",
+        "micromark-util-decode-numeric-character-reference": "^2.0.0",
+        "micromark-util-encode": "^2.0.0",
+        "micromark-util-normalize-identifier": "^2.0.0",
+        "micromark-util-resolve-all": "^2.0.0",
+        "micromark-util-sanitize-uri": "^2.0.0",
+        "micromark-util-subtokenize": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
       }
     },
-    "node_modules/micromark-factory-title": {
-      "version": "1.1.0",
+    "node_modules/micromark-core-commonmark": {
+      "version": "2.0.3",
+      "resolved": "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-2.0.3.tgz",
+      "integrity": "sha512-RDBrHEMSxVFLg6xvnXmb1Ayr2WzLAWjeSATAoxwKYJV94TeNavgoIdA0a9ytzDSVzBy2YKFK+emCPOEibLeCrg==",
       "dev": true,
       "funding": [
         {
@@ -7900,14 +8022,28 @@
       ],
       "license": "MIT",
       "dependencies": {
-        "micromark-factory-space": "^1.0.0",
-        "micromark-util-character": "^1.0.0",
-        "micromark-util-symbol": "^1.0.0",
-        "micromark-util-types": "^1.0.0"
+        "decode-named-character-reference": "^1.0.0",
+        "devlop": "^1.0.0",
+        "micromark-factory-destination": "^2.0.0",
+        "micromark-factory-label": "^2.0.0",
+        "micromark-factory-space": "^2.0.0",
+        "micromark-factory-title": "^2.0.0",
+        "micromark-factory-whitespace": "^2.0.0",
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-chunked": "^2.0.0",
+        "micromark-util-classify-character": "^2.0.0",
+        "micromark-util-html-tag-name": "^2.0.0",
+        "micromark-util-normalize-identifier": "^2.0.0",
+        "micromark-util-resolve-all": "^2.0.0",
+        "micromark-util-subtokenize": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
       }
     },
-    "node_modules/micromark-factory-whitespace": {
-      "version": "1.1.0",
+    "node_modules/micromark-core-commonmark/node_modules/micromark-util-character": {
+      "version": "2.1.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
+      "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
       "dev": true,
       "funding": [
         {
@@ -7921,14 +8057,14 @@
       ],
       "license": "MIT",
       "dependencies": {
-        "micromark-factory-space": "^1.0.0",
-        "micromark-util-character": "^1.0.0",
-        "micromark-util-symbol": "^1.0.0",
-        "micromark-util-types": "^1.0.0"
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
       }
     },
-    "node_modules/micromark-util-character": {
-      "version": "1.2.0",
+    "node_modules/micromark-core-commonmark/node_modules/micromark-util-symbol": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
+      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
       "dev": true,
       "funding": [
         {
@@ -7940,14 +8076,12 @@
           "url": "https://opencollective.com/unified"
         }
       ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-symbol": "^1.0.0",
-        "micromark-util-types": "^1.0.0"
-      }
+      "license": "MIT"
     },
-    "node_modules/micromark-util-chunked": {
-      "version": "1.1.0",
+    "node_modules/micromark-core-commonmark/node_modules/micromark-util-types": {
+      "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
+      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
       "dev": true,
       "funding": [
         {
@@ -7959,13 +8093,50 @@
           "url": "https://opencollective.com/unified"
         }
       ],
+      "license": "MIT"
+    },
+    "node_modules/micromark-extension-gfm": {
+      "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/micromark-extension-gfm/-/micromark-extension-gfm-3.0.0.tgz",
+      "integrity": "sha512-vsKArQsicm7t0z2GugkCKtZehqUm31oeGBV/KVSorWSy8ZlNAv7ytjFhvaryUiCUJYqs+NoE6AFhpQvBTM6Q4w==",
+      "dev": true,
       "license": "MIT",
       "dependencies": {
-        "micromark-util-symbol": "^1.0.0"
+        "micromark-extension-gfm-autolink-literal": "^2.0.0",
+        "micromark-extension-gfm-footnote": "^2.0.0",
+        "micromark-extension-gfm-strikethrough": "^2.0.0",
+        "micromark-extension-gfm-table": "^2.0.0",
+        "micromark-extension-gfm-tagfilter": "^2.0.0",
+        "micromark-extension-gfm-task-list-item": "^2.0.0",
+        "micromark-util-combine-extensions": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/micromark-util-classify-character": {
-      "version": "1.1.0",
+    "node_modules/micromark-extension-gfm-autolink-literal": {
+      "version": "2.1.0",
+      "resolved": "https://registry.npmjs.org/micromark-extension-gfm-autolink-literal/-/micromark-extension-gfm-autolink-literal-2.1.0.tgz",
+      "integrity": "sha512-oOg7knzhicgQ3t4QCjCWgTmfNhvQbDDnJeVu9v81r7NltNCVmhPy1fJRX27pISafdjL+SVc4d3l48Gb6pbRypw==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-sanitize-uri": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/micromark-extension-gfm-autolink-literal/node_modules/micromark-util-character": {
+      "version": "2.1.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
+      "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
       "dev": true,
       "funding": [
         {
@@ -7979,13 +8150,14 @@
       ],
       "license": "MIT",
       "dependencies": {
-        "micromark-util-character": "^1.0.0",
-        "micromark-util-symbol": "^1.0.0",
-        "micromark-util-types": "^1.0.0"
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
       }
     },
-    "node_modules/micromark-util-combine-extensions": {
-      "version": "1.1.0",
+    "node_modules/micromark-extension-gfm-autolink-literal/node_modules/micromark-util-encode": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-2.0.1.tgz",
+      "integrity": "sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw==",
       "dev": true,
       "funding": [
         {
@@ -7997,14 +8169,12 @@
           "url": "https://opencollective.com/unified"
         }
       ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-chunked": "^1.0.0",
-        "micromark-util-types": "^1.0.0"
-      }
+      "license": "MIT"
     },
-    "node_modules/micromark-util-decode-numeric-character-reference": {
-      "version": "1.1.0",
+    "node_modules/micromark-extension-gfm-autolink-literal/node_modules/micromark-util-sanitize-uri": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.1.tgz",
+      "integrity": "sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ==",
       "dev": true,
       "funding": [
         {
@@ -8018,11 +8188,15 @@
       ],
       "license": "MIT",
       "dependencies": {
-        "micromark-util-symbol": "^1.0.0"
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-encode": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0"
       }
     },
-    "node_modules/micromark-util-decode-string": {
-      "version": "1.1.0",
+    "node_modules/micromark-extension-gfm-autolink-literal/node_modules/micromark-util-symbol": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
+      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
       "dev": true,
       "funding": [
         {
@@ -8034,16 +8208,12 @@
           "url": "https://opencollective.com/unified"
         }
       ],
-      "license": "MIT",
-      "dependencies": {
-        "decode-named-character-reference": "^1.0.0",
-        "micromark-util-character": "^1.0.0",
-        "micromark-util-decode-numeric-character-reference": "^1.0.0",
-        "micromark-util-symbol": "^1.0.0"
-      }
+      "license": "MIT"
     },
-    "node_modules/micromark-util-encode": {
-      "version": "1.1.0",
+    "node_modules/micromark-extension-gfm-autolink-literal/node_modules/micromark-util-types": {
+      "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
+      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
       "dev": true,
       "funding": [
         {
@@ -8057,8 +8227,31 @@
       ],
       "license": "MIT"
     },
-    "node_modules/micromark-util-html-tag-name": {
-      "version": "1.2.0",
+    "node_modules/micromark-extension-gfm-footnote": {
+      "version": "2.1.0",
+      "resolved": "https://registry.npmjs.org/micromark-extension-gfm-footnote/-/micromark-extension-gfm-footnote-2.1.0.tgz",
+      "integrity": "sha512-/yPhxI1ntnDNsiHtzLKYnE3vf9JZ6cAisqVDauhp4CEHxlb4uoOTxOCJ+9s51bIB8U1N1FJ1RXOKTIlD5B/gqw==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "devlop": "^1.0.0",
+        "micromark-core-commonmark": "^2.0.0",
+        "micromark-factory-space": "^2.0.0",
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-normalize-identifier": "^2.0.0",
+        "micromark-util-sanitize-uri": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/micromark-extension-gfm-footnote/node_modules/micromark-util-character": {
+      "version": "2.1.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
+      "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
       "dev": true,
       "funding": [
         {
@@ -8070,10 +8263,16 @@
           "url": "https://opencollective.com/unified"
         }
       ],
-      "license": "MIT"
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      }
     },
-    "node_modules/micromark-util-normalize-identifier": {
-      "version": "1.1.0",
+    "node_modules/micromark-extension-gfm-footnote/node_modules/micromark-util-encode": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-2.0.1.tgz",
+      "integrity": "sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw==",
       "dev": true,
       "funding": [
         {
@@ -8085,13 +8284,12 @@
           "url": "https://opencollective.com/unified"
         }
       ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-symbol": "^1.0.0"
-      }
+      "license": "MIT"
     },
-    "node_modules/micromark-util-resolve-all": {
-      "version": "1.1.0",
+    "node_modules/micromark-extension-gfm-footnote/node_modules/micromark-util-sanitize-uri": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.1.tgz",
+      "integrity": "sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ==",
       "dev": true,
       "funding": [
         {
@@ -8105,11 +8303,15 @@
       ],
       "license": "MIT",
       "dependencies": {
-        "micromark-util-types": "^1.0.0"
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-encode": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0"
       }
     },
-    "node_modules/micromark-util-sanitize-uri": {
-      "version": "1.2.0",
+    "node_modules/micromark-extension-gfm-footnote/node_modules/micromark-util-symbol": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
+      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
       "dev": true,
       "funding": [
         {
@@ -8121,15 +8323,12 @@
           "url": "https://opencollective.com/unified"
         }
       ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-character": "^1.0.0",
-        "micromark-util-encode": "^1.0.0",
-        "micromark-util-symbol": "^1.0.0"
-      }
+      "license": "MIT"
     },
-    "node_modules/micromark-util-subtokenize": {
-      "version": "1.1.0",
+    "node_modules/micromark-extension-gfm-footnote/node_modules/micromark-util-types": {
+      "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
+      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
       "dev": true,
       "funding": [
         {
@@ -8141,16 +8340,31 @@
           "url": "https://opencollective.com/unified"
         }
       ],
+      "license": "MIT"
+    },
+    "node_modules/micromark-extension-gfm-strikethrough": {
+      "version": "2.1.0",
+      "resolved": "https://registry.npmjs.org/micromark-extension-gfm-strikethrough/-/micromark-extension-gfm-strikethrough-2.1.0.tgz",
+      "integrity": "sha512-ADVjpOOkjz1hhkZLlBiYA9cR2Anf8F4HqZUO6e5eDcPQd0Txw5fxLzzxnEkSkfnD0wziSGiv7sYhk/ktvbf1uw==",
+      "dev": true,
       "license": "MIT",
       "dependencies": {
-        "micromark-util-chunked": "^1.0.0",
-        "micromark-util-symbol": "^1.0.0",
-        "micromark-util-types": "^1.0.0",
-        "uvu": "^0.5.0"
+        "devlop": "^1.0.0",
+        "micromark-util-chunked": "^2.0.0",
+        "micromark-util-classify-character": "^2.0.0",
+        "micromark-util-resolve-all": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/micromark-util-symbol": {
-      "version": "1.1.0",
+    "node_modules/micromark-extension-gfm-strikethrough/node_modules/micromark-util-symbol": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
+      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
       "dev": true,
       "funding": [
         {
@@ -8164,8 +8378,10 @@
       ],
       "license": "MIT"
     },
-    "node_modules/micromark-util-types": {
-      "version": "1.1.0",
+    "node_modules/micromark-extension-gfm-strikethrough/node_modules/micromark-util-types": {
+      "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
+      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
       "dev": true,
       "funding": [
         {
@@ -8179,20 +8395,1194 @@
       ],
       "license": "MIT"
     },
-    "node_modules/mime-db": {
-      "version": "1.52.0",
-      "dev": true,
-      "license": "MIT",
-      "engines": {
-        "node": ">= 0.6"
-      }
-    },
-    "node_modules/mime-types": {
-      "version": "2.1.35",
+    "node_modules/micromark-extension-gfm-table": {
+      "version": "2.1.1",
+      "resolved": "https://registry.npmjs.org/micromark-extension-gfm-table/-/micromark-extension-gfm-table-2.1.1.tgz",
+      "integrity": "sha512-t2OU/dXXioARrC6yWfJ4hqB7rct14e8f7m0cbI5hUmDyyIlwv5vEtooptH8INkbLzOatzKuVbQmAYcbWoyz6Dg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "mime-db": "1.52.0"
+        "devlop": "^1.0.0",
+        "micromark-factory-space": "^2.0.0",
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/micromark-extension-gfm-table/node_modules/micromark-util-character": {
+      "version": "2.1.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
+      "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      }
+    },
+    "node_modules/micromark-extension-gfm-table/node_modules/micromark-util-symbol": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
+      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/micromark-extension-gfm-table/node_modules/micromark-util-types": {
+      "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
+      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/micromark-extension-gfm-tagfilter": {
+      "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/micromark-extension-gfm-tagfilter/-/micromark-extension-gfm-tagfilter-2.0.0.tgz",
+      "integrity": "sha512-xHlTOmuCSotIA8TW1mDIM6X2O1SiX5P9IuDtqGonFhEK0qgRI4yeC6vMxEV2dgyr2TiD+2PQ10o+cOhdVAcwfg==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-types": "^2.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/micromark-extension-gfm-tagfilter/node_modules/micromark-util-types": {
+      "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
+      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/micromark-extension-gfm-task-list-item": {
+      "version": "2.1.0",
+      "resolved": "https://registry.npmjs.org/micromark-extension-gfm-task-list-item/-/micromark-extension-gfm-task-list-item-2.1.0.tgz",
+      "integrity": "sha512-qIBZhqxqI6fjLDYFTBIa4eivDMnP+OZqsNwmQ3xNLE4Cxwc+zfQEfbs6tzAo2Hjq+bh6q5F+Z8/cksrLFYWQQw==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "devlop": "^1.0.0",
+        "micromark-factory-space": "^2.0.0",
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/micromark-extension-gfm-task-list-item/node_modules/micromark-util-character": {
+      "version": "2.1.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
+      "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      }
+    },
+    "node_modules/micromark-extension-gfm-task-list-item/node_modules/micromark-util-symbol": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
+      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/micromark-extension-gfm-task-list-item/node_modules/micromark-util-types": {
+      "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
+      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/micromark-extension-gfm/node_modules/micromark-util-types": {
+      "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
+      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/micromark-factory-destination": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-2.0.1.tgz",
+      "integrity": "sha512-Xe6rDdJlkmbFRExpTOmRj9N3MaWmbAgdpSrBQvCFqhezUn4AHqJHbaEnfbVYYiexVSs//tqOdY/DxhjdCiJnIA==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      }
+    },
+    "node_modules/micromark-factory-destination/node_modules/micromark-util-character": {
+      "version": "2.1.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
+      "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      }
+    },
+    "node_modules/micromark-factory-destination/node_modules/micromark-util-symbol": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
+      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/micromark-factory-destination/node_modules/micromark-util-types": {
+      "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
+      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/micromark-factory-label": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-factory-label/-/micromark-factory-label-2.0.1.tgz",
+      "integrity": "sha512-VFMekyQExqIW7xIChcXn4ok29YE3rnuyveW3wZQWWqF4Nv9Wk5rgJ99KzPvHjkmPXF93FXIbBp6YdW3t71/7Vg==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "devlop": "^1.0.0",
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      }
+    },
+    "node_modules/micromark-factory-label/node_modules/micromark-util-character": {
+      "version": "2.1.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
+      "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      }
+    },
+    "node_modules/micromark-factory-label/node_modules/micromark-util-symbol": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
+      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/micromark-factory-label/node_modules/micromark-util-types": {
+      "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
+      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/micromark-factory-space": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-2.0.1.tgz",
+      "integrity": "sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      }
+    },
+    "node_modules/micromark-factory-space/node_modules/micromark-util-character": {
+      "version": "2.1.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
+      "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      }
+    },
+    "node_modules/micromark-factory-space/node_modules/micromark-util-symbol": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
+      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/micromark-factory-space/node_modules/micromark-util-types": {
+      "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
+      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/micromark-factory-title": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-factory-title/-/micromark-factory-title-2.0.1.tgz",
+      "integrity": "sha512-5bZ+3CjhAd9eChYTHsjy6TGxpOFSKgKKJPJxr293jTbfry2KDoWkhBb6TcPVB4NmzaPhMs1Frm9AZH7OD4Cjzw==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "micromark-factory-space": "^2.0.0",
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      }
+    },
+    "node_modules/micromark-factory-title/node_modules/micromark-util-character": {
+      "version": "2.1.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
+      "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      }
+    },
+    "node_modules/micromark-factory-title/node_modules/micromark-util-symbol": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
+      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/micromark-factory-title/node_modules/micromark-util-types": {
+      "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
+      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/micromark-factory-whitespace": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-factory-whitespace/-/micromark-factory-whitespace-2.0.1.tgz",
+      "integrity": "sha512-Ob0nuZ3PKt/n0hORHyvoD9uZhr+Za8sFoP+OnMcnWK5lngSzALgQYKMr9RJVOWLqQYuyn6ulqGWSXdwf6F80lQ==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "micromark-factory-space": "^2.0.0",
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      }
+    },
+    "node_modules/micromark-factory-whitespace/node_modules/micromark-util-character": {
+      "version": "2.1.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
+      "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      }
+    },
+    "node_modules/micromark-factory-whitespace/node_modules/micromark-util-symbol": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
+      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/micromark-factory-whitespace/node_modules/micromark-util-types": {
+      "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
+      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/micromark-util-character": {
+      "version": "1.2.0",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-symbol": "^1.0.0",
+        "micromark-util-types": "^1.0.0"
+      }
+    },
+    "node_modules/micromark-util-chunked": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-chunked/-/micromark-util-chunked-2.0.1.tgz",
+      "integrity": "sha512-QUNFEOPELfmvv+4xiNg2sRYeS/P84pTW0TCgP5zc9FpXetHY0ab7SxKyAQCNCc1eK0459uoLI1y5oO5Vc1dbhA==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-symbol": "^2.0.0"
+      }
+    },
+    "node_modules/micromark-util-chunked/node_modules/micromark-util-symbol": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
+      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/micromark-util-classify-character": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-classify-character/-/micromark-util-classify-character-2.0.1.tgz",
+      "integrity": "sha512-K0kHzM6afW/MbeWYWLjoHQv1sgg2Q9EccHEDzSkxiP/EaagNzCm7T/WMKZ3rjMbvIpvBiZgwR3dKMygtA4mG1Q==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      }
+    },
+    "node_modules/micromark-util-classify-character/node_modules/micromark-util-character": {
+      "version": "2.1.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
+      "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      }
+    },
+    "node_modules/micromark-util-classify-character/node_modules/micromark-util-symbol": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
+      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/micromark-util-classify-character/node_modules/micromark-util-types": {
+      "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
+      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/micromark-util-combine-extensions": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-combine-extensions/-/micromark-util-combine-extensions-2.0.1.tgz",
+      "integrity": "sha512-OnAnH8Ujmy59JcyZw8JSbK9cGpdVY44NKgSM7E9Eh7DiLS2E9RNQf0dONaGDzEG9yjEl5hcqeIsj4hfRkLH/Bg==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-chunked": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      }
+    },
+    "node_modules/micromark-util-combine-extensions/node_modules/micromark-util-types": {
+      "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
+      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/micromark-util-decode-numeric-character-reference": {
+      "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-2.0.2.tgz",
+      "integrity": "sha512-ccUbYk6CwVdkmCQMyr64dXz42EfHGkPQlBj5p7YVGzq8I7CtjXZJrubAYezf7Rp+bjPseiROqe7G6foFd+lEuw==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-symbol": "^2.0.0"
+      }
+    },
+    "node_modules/micromark-util-decode-numeric-character-reference/node_modules/micromark-util-symbol": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
+      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/micromark-util-decode-string": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-decode-string/-/micromark-util-decode-string-2.0.1.tgz",
+      "integrity": "sha512-nDV/77Fj6eH1ynwscYTOsbK7rR//Uj0bZXBwJZRfaLEJ1iGBR6kIfNmlNqaqJf649EP0F3NWNdeJi03elllNUQ==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "decode-named-character-reference": "^1.0.0",
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-decode-numeric-character-reference": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0"
+      }
+    },
+    "node_modules/micromark-util-decode-string/node_modules/micromark-util-character": {
+      "version": "2.1.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
+      "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      }
+    },
+    "node_modules/micromark-util-decode-string/node_modules/micromark-util-symbol": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
+      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/micromark-util-decode-string/node_modules/micromark-util-types": {
+      "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
+      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/micromark-util-encode": {
+      "version": "1.1.0",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/micromark-util-html-tag-name": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-html-tag-name/-/micromark-util-html-tag-name-2.0.1.tgz",
+      "integrity": "sha512-2cNEiYDhCWKI+Gs9T0Tiysk136SnR13hhO8yW6BGNyhOC4qYFnwF1nKfD3HFAIXA5c45RrIG1ub11GiXeYd1xA==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/micromark-util-normalize-identifier": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-2.0.1.tgz",
+      "integrity": "sha512-sxPqmo70LyARJs0w2UclACPUUEqltCkJ6PhKdMIDuJ3gSf/Q+/GIe3WKl0Ijb/GyH9lOpUkRAO2wp0GVkLvS9Q==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-symbol": "^2.0.0"
+      }
+    },
+    "node_modules/micromark-util-normalize-identifier/node_modules/micromark-util-symbol": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
+      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/micromark-util-resolve-all": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-resolve-all/-/micromark-util-resolve-all-2.0.1.tgz",
+      "integrity": "sha512-VdQyxFWFT2/FGJgwQnJYbe1jjQoNTS4RjglmSjTUlpUMa95Htx9NHeYW4rGDJzbjvCsl9eLjMQwGeElsqmzcHg==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-types": "^2.0.0"
+      }
+    },
+    "node_modules/micromark-util-resolve-all/node_modules/micromark-util-types": {
+      "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
+      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/micromark-util-sanitize-uri": {
+      "version": "1.2.0",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-character": "^1.0.0",
+        "micromark-util-encode": "^1.0.0",
+        "micromark-util-symbol": "^1.0.0"
+      }
+    },
+    "node_modules/micromark-util-subtokenize": {
+      "version": "2.1.0",
+      "resolved": "https://registry.npmjs.org/micromark-util-subtokenize/-/micromark-util-subtokenize-2.1.0.tgz",
+      "integrity": "sha512-XQLu552iSctvnEcgXw6+Sx75GflAPNED1qx7eBJ+wydBb2KCbRZe+NwvIEEMM83uml1+2WSXpBAcp9IUCgCYWA==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "devlop": "^1.0.0",
+        "micromark-util-chunked": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      }
+    },
+    "node_modules/micromark-util-subtokenize/node_modules/micromark-util-symbol": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
+      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/micromark-util-subtokenize/node_modules/micromark-util-types": {
+      "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
+      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/micromark-util-symbol": {
+      "version": "1.1.0",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/micromark-util-types": {
+      "version": "1.1.0",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/micromark/node_modules/micromark-util-character": {
+      "version": "2.1.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
+      "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      }
+    },
+    "node_modules/micromark/node_modules/micromark-util-encode": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-2.0.1.tgz",
+      "integrity": "sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/micromark/node_modules/micromark-util-sanitize-uri": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.1.tgz",
+      "integrity": "sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-encode": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0"
+      }
+    },
+    "node_modules/micromark/node_modules/micromark-util-symbol": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
+      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/micromark/node_modules/micromark-util-types": {
+      "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
+      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/mime-db": {
+      "version": "1.52.0",
+      "dev": true,
+      "license": "MIT",
+      "engines": {
+        "node": ">= 0.6"
+      }
+    },
+    "node_modules/mime-types": {
+      "version": "2.1.35",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "mime-db": "1.52.0"
       },
       "engines": {
         "node": ">= 0.6"
@@ -8404,14 +9794,6 @@
       "dev": true,
       "license": "BSD-3-Clause"
     },
-    "node_modules/mri": {
-      "version": "1.2.0",
-      "dev": true,
-      "license": "MIT",
-      "engines": {
-        "node": ">=4"
-      }
-    },
     "node_modules/ms": {
       "version": "2.1.3",
       "inBundle": true,
@@ -10314,268 +11696,38 @@
       "license": "ISC",
       "dependencies": {
         "es6-error": "^4.0.1"
-      },
-      "engines": {
-        "node": ">=4"
-      }
-    },
-    "node_modules/remark": {
-      "version": "15.0.1",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/mdast": "^4.0.0",
-        "remark-parse": "^11.0.0",
-        "remark-stringify": "^11.0.0",
-        "unified": "^11.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark-gfm": {
-      "version": "3.0.1",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/mdast": "^3.0.0",
-        "mdast-util-gfm": "^2.0.0",
-        "micromark-extension-gfm": "^2.0.0",
-        "unified": "^10.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark-github": {
-      "version": "12.0.0",
-      "resolved": "https://registry.npmjs.org/remark-github/-/remark-github-12.0.0.tgz",
-      "integrity": "sha512-ByefQKFN184LeiGRCabfl7zUJsdlMYWEhiLX1gpmQ11yFg6xSuOTW7LVCv0oc1x+YvUMJW23NU36sJX2RWGgvg==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/mdast": "^4.0.0",
-        "mdast-util-find-and-replace": "^3.0.0",
-        "mdast-util-to-string": "^4.0.0",
-        "to-vfile": "^8.0.0",
-        "unist-util-visit": "^5.0.0",
-        "vfile": "^6.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark-github/node_modules/@types/mdast": {
-      "version": "4.0.4",
-      "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz",
-      "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "*"
-      }
-    },
-    "node_modules/remark-github/node_modules/@types/unist": {
-      "version": "3.0.3",
-      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
-      "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
-      "dev": true,
-      "license": "MIT"
-    },
-    "node_modules/remark-github/node_modules/escape-string-regexp": {
-      "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz",
-      "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==",
-      "dev": true,
-      "license": "MIT",
-      "engines": {
-        "node": ">=12"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/sindresorhus"
-      }
-    },
-    "node_modules/remark-github/node_modules/mdast-util-find-and-replace": {
-      "version": "3.0.2",
-      "resolved": "https://registry.npmjs.org/mdast-util-find-and-replace/-/mdast-util-find-and-replace-3.0.2.tgz",
-      "integrity": "sha512-Tmd1Vg/m3Xz43afeNxDIhWRtFZgM2VLyaf4vSTYwudTyeuTneoL3qtWMA5jeLyz/O1vDJmmV4QuScFCA2tBPwg==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/mdast": "^4.0.0",
-        "escape-string-regexp": "^5.0.0",
-        "unist-util-is": "^6.0.0",
-        "unist-util-visit-parents": "^6.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark-github/node_modules/mdast-util-to-string": {
-      "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-4.0.0.tgz",
-      "integrity": "sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/mdast": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark-github/node_modules/unist-util-is": {
-      "version": "6.0.0",
-      "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.0.tgz",
-      "integrity": "sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark-github/node_modules/unist-util-stringify-position": {
-      "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz",
-      "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark-github/node_modules/unist-util-visit": {
-      "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-5.0.0.tgz",
-      "integrity": "sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "unist-util-is": "^6.0.0",
-        "unist-util-visit-parents": "^6.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark-github/node_modules/unist-util-visit-parents": {
-      "version": "6.0.1",
-      "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.1.tgz",
-      "integrity": "sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "unist-util-is": "^6.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark-github/node_modules/vfile": {
-      "version": "6.0.3",
-      "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.3.tgz",
-      "integrity": "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "vfile-message": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark-github/node_modules/vfile-message": {
-      "version": "4.0.3",
-      "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.3.tgz",
-      "integrity": "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "unist-util-stringify-position": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark-man": {
-      "version": "8.0.1",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/mdast": "^3.0.0",
-        "@types/unist": "^2.0.0",
-        "github-slugger": "^1.0.0",
-        "groff-escape": "^2.0.0",
-        "mdast-util-definitions": "^5.0.0",
-        "mdast-util-to-string": "^3.0.0",
-        "months": "^2.0.0",
-        "unified": "^10.0.0",
-        "unist-util-visit": "^4.0.0",
-        "zwitch": "^2.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark-man/node_modules/unist-util-visit": {
-      "version": "4.1.2",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^2.0.0",
-        "unist-util-is": "^5.0.0",
-        "unist-util-visit-parents": "^5.1.1"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
+      },
+      "engines": {
+        "node": ">=4"
       }
     },
-    "node_modules/remark-man/node_modules/unist-util-visit-parents": {
-      "version": "5.1.3",
+    "node_modules/remark": {
+      "version": "15.0.1",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/unist": "^2.0.0",
-        "unist-util-is": "^5.0.0"
+        "@types/mdast": "^4.0.0",
+        "remark-parse": "^11.0.0",
+        "remark-stringify": "^11.0.0",
+        "unified": "^11.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/remark-parse": {
-      "version": "11.0.0",
-      "resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-11.0.0.tgz",
-      "integrity": "sha512-FCxlKLNGknS5ba/1lmpYijMUzX2esxW5xQqjWxw2eHFfS2MSdaHVINFmhjo+qN1WhZhNimq0dZATN9pH0IDrpA==",
+    "node_modules/remark-gfm": {
+      "version": "4.0.1",
+      "resolved": "https://registry.npmjs.org/remark-gfm/-/remark-gfm-4.0.1.tgz",
+      "integrity": "sha512-1quofZ2RQ9EWdeN34S79+KExV1764+wCUGop5CPL1WGdD0ocPpu91lzPGbwWMECpEpd42kJGQwzRfyov9j4yNg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
         "@types/mdast": "^4.0.0",
-        "mdast-util-from-markdown": "^2.0.0",
-        "micromark-util-types": "^2.0.0",
+        "mdast-util-gfm": "^3.0.0",
+        "micromark-extension-gfm": "^3.0.0",
+        "remark-parse": "^11.0.0",
+        "remark-stringify": "^11.0.0",
         "unified": "^11.0.0"
       },
       "funding": {
@@ -10583,7 +11735,7 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/remark-parse/node_modules/@types/mdast": {
+    "node_modules/remark-gfm/node_modules/@types/mdast": {
       "version": "4.0.4",
       "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz",
       "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==",
@@ -10593,502 +11745,274 @@
         "@types/unist": "*"
       }
     },
-    "node_modules/remark-parse/node_modules/@types/unist": {
+    "node_modules/remark-gfm/node_modules/@types/unist": {
       "version": "3.0.3",
       "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
       "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
       "dev": true,
       "license": "MIT"
     },
-    "node_modules/remark-parse/node_modules/mdast-util-from-markdown": {
-      "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-2.0.2.tgz",
-      "integrity": "sha512-uZhTV/8NBuw0WHkPTrCqDOl0zVe1BIng5ZtHoDk49ME1qqcjYmmLmOf0gELgcRMxN4w2iuIeVso5/6QymSrgmA==",
+    "node_modules/remark-gfm/node_modules/unified": {
+      "version": "11.0.5",
+      "resolved": "https://registry.npmjs.org/unified/-/unified-11.0.5.tgz",
+      "integrity": "sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/mdast": "^4.0.0",
         "@types/unist": "^3.0.0",
-        "decode-named-character-reference": "^1.0.0",
+        "bail": "^2.0.0",
         "devlop": "^1.0.0",
-        "mdast-util-to-string": "^4.0.0",
-        "micromark": "^4.0.0",
-        "micromark-util-decode-numeric-character-reference": "^2.0.0",
-        "micromark-util-decode-string": "^2.0.0",
-        "micromark-util-normalize-identifier": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0",
-        "unist-util-stringify-position": "^4.0.0"
+        "extend": "^3.0.0",
+        "is-plain-obj": "^4.0.0",
+        "trough": "^2.0.0",
+        "vfile": "^6.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/remark-parse/node_modules/mdast-util-to-string": {
+    "node_modules/remark-gfm/node_modules/unist-util-stringify-position": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-4.0.0.tgz",
-      "integrity": "sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg==",
+      "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz",
+      "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/mdast": "^4.0.0"
+        "@types/unist": "^3.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/remark-parse/node_modules/micromark": {
-      "version": "4.0.2",
-      "resolved": "https://registry.npmjs.org/micromark/-/micromark-4.0.2.tgz",
-      "integrity": "sha512-zpe98Q6kvavpCr1NPVSCMebCKfD7CA2NqZ+rykeNhONIJBpc1tFKt9hucLGwha3jNTNI8lHpctWJWoimVF4PfA==",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "@types/debug": "^4.0.0",
-        "debug": "^4.0.0",
-        "decode-named-character-reference": "^1.0.0",
-        "devlop": "^1.0.0",
-        "micromark-core-commonmark": "^2.0.0",
-        "micromark-factory-space": "^2.0.0",
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-chunked": "^2.0.0",
-        "micromark-util-combine-extensions": "^2.0.0",
-        "micromark-util-decode-numeric-character-reference": "^2.0.0",
-        "micromark-util-encode": "^2.0.0",
-        "micromark-util-normalize-identifier": "^2.0.0",
-        "micromark-util-resolve-all": "^2.0.0",
-        "micromark-util-sanitize-uri": "^2.0.0",
-        "micromark-util-subtokenize": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      }
-    },
-    "node_modules/remark-parse/node_modules/micromark-core-commonmark": {
-      "version": "2.0.3",
-      "resolved": "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-2.0.3.tgz",
-      "integrity": "sha512-RDBrHEMSxVFLg6xvnXmb1Ayr2WzLAWjeSATAoxwKYJV94TeNavgoIdA0a9ytzDSVzBy2YKFK+emCPOEibLeCrg==",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "decode-named-character-reference": "^1.0.0",
-        "devlop": "^1.0.0",
-        "micromark-factory-destination": "^2.0.0",
-        "micromark-factory-label": "^2.0.0",
-        "micromark-factory-space": "^2.0.0",
-        "micromark-factory-title": "^2.0.0",
-        "micromark-factory-whitespace": "^2.0.0",
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-chunked": "^2.0.0",
-        "micromark-util-classify-character": "^2.0.0",
-        "micromark-util-html-tag-name": "^2.0.0",
-        "micromark-util-normalize-identifier": "^2.0.0",
-        "micromark-util-resolve-all": "^2.0.0",
-        "micromark-util-subtokenize": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      }
-    },
-    "node_modules/remark-parse/node_modules/micromark-factory-destination": {
-      "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-2.0.1.tgz",
-      "integrity": "sha512-Xe6rDdJlkmbFRExpTOmRj9N3MaWmbAgdpSrBQvCFqhezUn4AHqJHbaEnfbVYYiexVSs//tqOdY/DxhjdCiJnIA==",
+    "node_modules/remark-gfm/node_modules/vfile": {
+      "version": "6.0.3",
+      "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.3.tgz",
+      "integrity": "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==",
       "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
       "license": "MIT",
       "dependencies": {
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
+        "@types/unist": "^3.0.0",
+        "vfile-message": "^4.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/remark-parse/node_modules/micromark-factory-label": {
-      "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-factory-label/-/micromark-factory-label-2.0.1.tgz",
-      "integrity": "sha512-VFMekyQExqIW7xIChcXn4ok29YE3rnuyveW3wZQWWqF4Nv9Wk5rgJ99KzPvHjkmPXF93FXIbBp6YdW3t71/7Vg==",
+    "node_modules/remark-gfm/node_modules/vfile-message": {
+      "version": "4.0.3",
+      "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.3.tgz",
+      "integrity": "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==",
       "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
       "license": "MIT",
       "dependencies": {
-        "devlop": "^1.0.0",
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
+        "@types/unist": "^3.0.0",
+        "unist-util-stringify-position": "^4.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/remark-parse/node_modules/micromark-factory-space": {
-      "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-2.0.1.tgz",
-      "integrity": "sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg==",
+    "node_modules/remark-github": {
+      "version": "12.0.0",
       "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
       "license": "MIT",
       "dependencies": {
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
+        "@types/mdast": "^4.0.0",
+        "mdast-util-find-and-replace": "^3.0.0",
+        "mdast-util-to-string": "^4.0.0",
+        "to-vfile": "^8.0.0",
+        "unist-util-visit": "^5.0.0",
+        "vfile": "^6.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/remark-parse/node_modules/micromark-factory-title": {
-      "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-factory-title/-/micromark-factory-title-2.0.1.tgz",
-      "integrity": "sha512-5bZ+3CjhAd9eChYTHsjy6TGxpOFSKgKKJPJxr293jTbfry2KDoWkhBb6TcPVB4NmzaPhMs1Frm9AZH7OD4Cjzw==",
+    "node_modules/remark-github/node_modules/@types/mdast": {
+      "version": "4.0.4",
       "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
       "license": "MIT",
       "dependencies": {
-        "micromark-factory-space": "^2.0.0",
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
+        "@types/unist": "*"
       }
     },
-    "node_modules/remark-parse/node_modules/micromark-factory-whitespace": {
-      "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-factory-whitespace/-/micromark-factory-whitespace-2.0.1.tgz",
-      "integrity": "sha512-Ob0nuZ3PKt/n0hORHyvoD9uZhr+Za8sFoP+OnMcnWK5lngSzALgQYKMr9RJVOWLqQYuyn6ulqGWSXdwf6F80lQ==",
+    "node_modules/remark-github/node_modules/@types/unist": {
+      "version": "3.0.3",
       "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-factory-space": "^2.0.0",
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      }
+      "license": "MIT"
     },
-    "node_modules/remark-parse/node_modules/micromark-util-character": {
-      "version": "2.1.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
-      "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
+    "node_modules/remark-github/node_modules/mdast-util-to-string": {
+      "version": "4.0.0",
+      "dev": true,
       "license": "MIT",
       "dependencies": {
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
+        "@types/mdast": "^4.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/remark-parse/node_modules/micromark-util-chunked": {
-      "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-chunked/-/micromark-util-chunked-2.0.1.tgz",
-      "integrity": "sha512-QUNFEOPELfmvv+4xiNg2sRYeS/P84pTW0TCgP5zc9FpXetHY0ab7SxKyAQCNCc1eK0459uoLI1y5oO5Vc1dbhA==",
+    "node_modules/remark-github/node_modules/unist-util-is": {
+      "version": "6.0.0",
       "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
       "license": "MIT",
       "dependencies": {
-        "micromark-util-symbol": "^2.0.0"
+        "@types/unist": "^3.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/remark-parse/node_modules/micromark-util-classify-character": {
-      "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-classify-character/-/micromark-util-classify-character-2.0.1.tgz",
-      "integrity": "sha512-K0kHzM6afW/MbeWYWLjoHQv1sgg2Q9EccHEDzSkxiP/EaagNzCm7T/WMKZ3rjMbvIpvBiZgwR3dKMygtA4mG1Q==",
+    "node_modules/remark-github/node_modules/unist-util-stringify-position": {
+      "version": "4.0.0",
       "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
       "license": "MIT",
       "dependencies": {
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
+        "@types/unist": "^3.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/remark-parse/node_modules/micromark-util-combine-extensions": {
-      "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-combine-extensions/-/micromark-util-combine-extensions-2.0.1.tgz",
-      "integrity": "sha512-OnAnH8Ujmy59JcyZw8JSbK9cGpdVY44NKgSM7E9Eh7DiLS2E9RNQf0dONaGDzEG9yjEl5hcqeIsj4hfRkLH/Bg==",
+    "node_modules/remark-github/node_modules/unist-util-visit": {
+      "version": "5.0.0",
       "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
       "license": "MIT",
       "dependencies": {
-        "micromark-util-chunked": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
+        "@types/unist": "^3.0.0",
+        "unist-util-is": "^6.0.0",
+        "unist-util-visit-parents": "^6.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/remark-parse/node_modules/micromark-util-decode-numeric-character-reference": {
-      "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-2.0.2.tgz",
-      "integrity": "sha512-ccUbYk6CwVdkmCQMyr64dXz42EfHGkPQlBj5p7YVGzq8I7CtjXZJrubAYezf7Rp+bjPseiROqe7G6foFd+lEuw==",
+    "node_modules/remark-github/node_modules/unist-util-visit-parents": {
+      "version": "6.0.1",
       "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
       "license": "MIT",
       "dependencies": {
-        "micromark-util-symbol": "^2.0.0"
+        "@types/unist": "^3.0.0",
+        "unist-util-is": "^6.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/remark-parse/node_modules/micromark-util-decode-string": {
-      "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-decode-string/-/micromark-util-decode-string-2.0.1.tgz",
-      "integrity": "sha512-nDV/77Fj6eH1ynwscYTOsbK7rR//Uj0bZXBwJZRfaLEJ1iGBR6kIfNmlNqaqJf649EP0F3NWNdeJi03elllNUQ==",
+    "node_modules/remark-github/node_modules/vfile": {
+      "version": "6.0.3",
       "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
       "license": "MIT",
       "dependencies": {
-        "decode-named-character-reference": "^1.0.0",
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-decode-numeric-character-reference": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0"
+        "@types/unist": "^3.0.0",
+        "vfile-message": "^4.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/remark-parse/node_modules/micromark-util-encode": {
-      "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-2.0.1.tgz",
-      "integrity": "sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw==",
+    "node_modules/remark-github/node_modules/vfile-message": {
+      "version": "4.0.3",
       "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0",
+        "unist-util-stringify-position": "^4.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
     },
-    "node_modules/remark-parse/node_modules/micromark-util-html-tag-name": {
-      "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-html-tag-name/-/micromark-util-html-tag-name-2.0.1.tgz",
-      "integrity": "sha512-2cNEiYDhCWKI+Gs9T0Tiysk136SnR13hhO8yW6BGNyhOC4qYFnwF1nKfD3HFAIXA5c45RrIG1ub11GiXeYd1xA==",
+    "node_modules/remark-man": {
+      "version": "8.0.1",
       "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
+      "license": "MIT",
+      "dependencies": {
+        "@types/mdast": "^3.0.0",
+        "@types/unist": "^2.0.0",
+        "github-slugger": "^1.0.0",
+        "groff-escape": "^2.0.0",
+        "mdast-util-definitions": "^5.0.0",
+        "mdast-util-to-string": "^3.0.0",
+        "months": "^2.0.0",
+        "unified": "^10.0.0",
+        "unist-util-visit": "^4.0.0",
+        "zwitch": "^2.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
     },
-    "node_modules/remark-parse/node_modules/micromark-util-normalize-identifier": {
-      "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-2.0.1.tgz",
-      "integrity": "sha512-sxPqmo70LyARJs0w2UclACPUUEqltCkJ6PhKdMIDuJ3gSf/Q+/GIe3WKl0Ijb/GyH9lOpUkRAO2wp0GVkLvS9Q==",
+    "node_modules/remark-man/node_modules/unist-util-visit": {
+      "version": "4.1.2",
       "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
       "license": "MIT",
       "dependencies": {
-        "micromark-util-symbol": "^2.0.0"
+        "@types/unist": "^2.0.0",
+        "unist-util-is": "^5.0.0",
+        "unist-util-visit-parents": "^5.1.1"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/remark-parse/node_modules/micromark-util-resolve-all": {
-      "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-resolve-all/-/micromark-util-resolve-all-2.0.1.tgz",
-      "integrity": "sha512-VdQyxFWFT2/FGJgwQnJYbe1jjQoNTS4RjglmSjTUlpUMa95Htx9NHeYW4rGDJzbjvCsl9eLjMQwGeElsqmzcHg==",
+    "node_modules/remark-man/node_modules/unist-util-visit-parents": {
+      "version": "5.1.3",
       "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
       "license": "MIT",
       "dependencies": {
-        "micromark-util-types": "^2.0.0"
+        "@types/unist": "^2.0.0",
+        "unist-util-is": "^5.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/remark-parse/node_modules/micromark-util-sanitize-uri": {
-      "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.1.tgz",
-      "integrity": "sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ==",
+    "node_modules/remark-parse": {
+      "version": "11.0.0",
       "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
       "license": "MIT",
       "dependencies": {
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-encode": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0"
+        "@types/mdast": "^4.0.0",
+        "mdast-util-from-markdown": "^2.0.0",
+        "micromark-util-types": "^2.0.0",
+        "unified": "^11.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/remark-parse/node_modules/micromark-util-subtokenize": {
-      "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-util-subtokenize/-/micromark-util-subtokenize-2.1.0.tgz",
-      "integrity": "sha512-XQLu552iSctvnEcgXw6+Sx75GflAPNED1qx7eBJ+wydBb2KCbRZe+NwvIEEMM83uml1+2WSXpBAcp9IUCgCYWA==",
+    "node_modules/remark-parse/node_modules/@types/mdast": {
+      "version": "4.0.4",
       "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
       "license": "MIT",
       "dependencies": {
-        "devlop": "^1.0.0",
-        "micromark-util-chunked": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
+        "@types/unist": "*"
       }
     },
-    "node_modules/remark-parse/node_modules/micromark-util-symbol": {
-      "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
-      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
+    "node_modules/remark-parse/node_modules/@types/unist": {
+      "version": "3.0.3",
       "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
       "license": "MIT"
     },
     "node_modules/remark-parse/node_modules/micromark-util-types": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
-      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
       "dev": true,
       "funding": [
         {
@@ -11104,8 +12028,6 @@
     },
     "node_modules/remark-parse/node_modules/unified": {
       "version": "11.0.5",
-      "resolved": "https://registry.npmjs.org/unified/-/unified-11.0.5.tgz",
-      "integrity": "sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11124,8 +12046,6 @@
     },
     "node_modules/remark-parse/node_modules/unist-util-stringify-position": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz",
-      "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11138,8 +12058,6 @@
     },
     "node_modules/remark-parse/node_modules/vfile": {
       "version": "6.0.3",
-      "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.3.tgz",
-      "integrity": "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11153,8 +12071,6 @@
     },
     "node_modules/remark-parse/node_modules/vfile-message": {
       "version": "4.0.3",
-      "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.3.tgz",
-      "integrity": "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11208,159 +12124,6 @@
       "dev": true,
       "license": "MIT"
     },
-    "node_modules/remark-stringify/node_modules/mdast-util-phrasing": {
-      "version": "4.1.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/mdast": "^4.0.0",
-        "unist-util-is": "^6.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark-stringify/node_modules/mdast-util-to-markdown": {
-      "version": "2.1.2",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/mdast": "^4.0.0",
-        "@types/unist": "^3.0.0",
-        "longest-streak": "^3.0.0",
-        "mdast-util-phrasing": "^4.0.0",
-        "mdast-util-to-string": "^4.0.0",
-        "micromark-util-classify-character": "^2.0.0",
-        "micromark-util-decode-string": "^2.0.0",
-        "unist-util-visit": "^5.0.0",
-        "zwitch": "^2.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark-stringify/node_modules/mdast-util-to-string": {
-      "version": "4.0.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/mdast": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark-stringify/node_modules/micromark-util-character": {
-      "version": "2.1.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      }
-    },
-    "node_modules/remark-stringify/node_modules/micromark-util-classify-character": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      }
-    },
-    "node_modules/remark-stringify/node_modules/micromark-util-decode-numeric-character-reference": {
-      "version": "2.0.2",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-symbol": "^2.0.0"
-      }
-    },
-    "node_modules/remark-stringify/node_modules/micromark-util-decode-string": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "decode-named-character-reference": "^1.0.0",
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-decode-numeric-character-reference": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0"
-      }
-    },
-    "node_modules/remark-stringify/node_modules/micromark-util-symbol": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "node_modules/remark-stringify/node_modules/micromark-util-types": {
-      "version": "2.0.2",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
     "node_modules/remark-stringify/node_modules/unified": {
       "version": "11.0.5",
       "dev": true,
@@ -11379,18 +12142,6 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/remark-stringify/node_modules/unist-util-is": {
-      "version": "6.0.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
     "node_modules/remark-stringify/node_modules/unist-util-stringify-position": {
       "version": "4.0.0",
       "dev": true,
@@ -11403,33 +12154,6 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/remark-stringify/node_modules/unist-util-visit": {
-      "version": "5.0.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "unist-util-is": "^6.0.0",
-        "unist-util-visit-parents": "^6.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark-stringify/node_modules/unist-util-visit-parents": {
-      "version": "6.0.1",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "unist-util-is": "^6.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
     "node_modules/remark-stringify/node_modules/vfile": {
       "version": "6.0.3",
       "dev": true,
@@ -11658,17 +12382,6 @@
         "queue-microtask": "^1.2.2"
       }
     },
-    "node_modules/sade": {
-      "version": "1.8.1",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "mri": "^1.1.0"
-      },
-      "engines": {
-        "node": ">=6"
-      }
-    },
     "node_modules/safe-array-concat": {
       "version": "1.1.3",
       "dev": true,
@@ -14866,8 +15579,6 @@
     },
     "node_modules/to-vfile": {
       "version": "8.0.0",
-      "resolved": "https://registry.npmjs.org/to-vfile/-/to-vfile-8.0.0.tgz",
-      "integrity": "sha512-IcmH1xB5576MJc9qcfEC/m/nQCFt3fzMHz45sSlgJyTWjRbKW1HAkJpuf3DgE57YzIlZcwcBZA5ENQbBo4aLkg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14880,15 +15591,11 @@
     },
     "node_modules/to-vfile/node_modules/@types/unist": {
       "version": "3.0.3",
-      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
-      "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/to-vfile/node_modules/unist-util-stringify-position": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz",
-      "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14901,8 +15608,6 @@
     },
     "node_modules/to-vfile/node_modules/vfile": {
       "version": "6.0.3",
-      "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.3.tgz",
-      "integrity": "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14916,8 +15621,6 @@
     },
     "node_modules/to-vfile/node_modules/vfile-message": {
       "version": "4.0.3",
-      "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.3.tgz",
-      "integrity": "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -15413,31 +16116,6 @@
         "uuid": "dist/bin/uuid"
       }
     },
-    "node_modules/uvu": {
-      "version": "0.5.6",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "dequal": "^2.0.0",
-        "diff": "^5.0.0",
-        "kleur": "^4.0.3",
-        "sade": "^1.7.3"
-      },
-      "bin": {
-        "uvu": "bin.js"
-      },
-      "engines": {
-        "node": ">=8"
-      }
-    },
-    "node_modules/uvu/node_modules/diff": {
-      "version": "5.2.0",
-      "dev": true,
-      "license": "BSD-3-Clause",
-      "engines": {
-        "node": ">=0.3.1"
-      }
-    },
     "node_modules/validate-npm-package-license": {
       "version": "3.0.4",
       "inBundle": true,
diff --git a/package.json b/package.json
index 9c4fe309adb76..e6f1a95c142a9 100644
--- a/package.json
+++ b/package.json
@@ -202,7 +202,7 @@
     "nock": "^13.4.0",
     "npm-packlist": "^10.0.0",
     "remark": "^15.0.1",
-    "remark-gfm": "^3.0.1",
+    "remark-gfm": "^4.0.1",
     "remark-github": "^12.0.0",
     "rimraf": "^6.0.1",
     "spawk": "^1.7.1",

From 1c6bb4c54f515fdb7ead06cb05d24e0b9d403f8b Mon Sep 17 00:00:00 2001
From: Gar 
Date: Thu, 18 Sep 2025 13:48:41 -0700
Subject: [PATCH 54/63] chore: rehype-stringify@10.0.1

---
 docs/package.json |   2 +-
 package-lock.json | 675 ++++++++++++++++++++++------------------------
 2 files changed, 319 insertions(+), 358 deletions(-)

diff --git a/docs/package.json b/docs/package.json
index fbd0b3ca1d936..6144e37955133 100644
--- a/docs/package.json
+++ b/docs/package.json
@@ -27,7 +27,7 @@
     "front-matter": "^4.0.2",
     "ignore-walk": "^8.0.0",
     "jsdom": "^24.0.0",
-    "rehype-stringify": "^9.0.3",
+    "rehype-stringify": "^10.0.1",
     "remark-gfm": "^4.0.1",
     "remark-man": "^8.0.1",
     "remark-parse": "^11.0.0",
diff --git a/package-lock.json b/package-lock.json
index b4aa9ba548e0e..25a28c3cc2fc4 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -192,7 +192,7 @@
         "front-matter": "^4.0.2",
         "ignore-walk": "^8.0.0",
         "jsdom": "^24.0.0",
-        "rehype-stringify": "^9.0.3",
+        "rehype-stringify": "^10.0.1",
         "remark-gfm": "^4.0.1",
         "remark-man": "^8.0.1",
         "remark-parse": "^11.0.0",
@@ -2134,8 +2134,6 @@
     },
     "node_modules/@types/debug": {
       "version": "4.1.12",
-      "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz",
-      "integrity": "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2171,8 +2169,6 @@
     },
     "node_modules/@types/ms": {
       "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/@types/ms/-/ms-2.1.0.tgz",
-      "integrity": "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==",
       "dev": true,
       "license": "MIT"
     },
@@ -2194,11 +2190,6 @@
       "dev": true,
       "license": "MIT"
     },
-    "node_modules/@types/parse5": {
-      "version": "6.0.3",
-      "dev": true,
-      "license": "MIT"
-    },
     "node_modules/@types/unist": {
       "version": "2.0.11",
       "dev": true,
@@ -2220,8 +2211,7 @@
     "node_modules/@ungap/structured-clone": {
       "version": "1.3.0",
       "dev": true,
-      "license": "ISC",
-      "peer": true
+      "license": "ISC"
     },
     "node_modules/@xmldom/xmldom": {
       "version": "0.8.11",
@@ -2994,6 +2984,8 @@
     },
     "node_modules/character-entities-html4": {
       "version": "2.1.0",
+      "resolved": "https://registry.npmjs.org/character-entities-html4/-/character-entities-html4-2.1.0.tgz",
+      "integrity": "sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -3003,6 +2995,8 @@
     },
     "node_modules/character-entities-legacy": {
       "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-3.0.0.tgz",
+      "integrity": "sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -3483,6 +3477,8 @@
     },
     "node_modules/comma-separated-tokens": {
       "version": "2.0.3",
+      "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-2.0.3.tgz",
+      "integrity": "sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -5712,154 +5708,300 @@
         "node": ">= 0.4"
       }
     },
-    "node_modules/hast-util-from-parse5": {
-      "version": "7.1.2",
+    "node_modules/hast-util-to-html": {
+      "version": "9.0.5",
+      "resolved": "https://registry.npmjs.org/hast-util-to-html/-/hast-util-to-html-9.0.5.tgz",
+      "integrity": "sha512-OguPdidb+fbHQSU4Q4ZiLKnzWo8Wwsf5bZfbvu7//a9oTYoqD/fWpe96NuHkoS9h0ccGOTe0C4NGXdtS0iObOw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/hast": "^2.0.0",
-        "@types/unist": "^2.0.0",
-        "hastscript": "^7.0.0",
-        "property-information": "^6.0.0",
-        "vfile": "^5.0.0",
-        "vfile-location": "^4.0.0",
-        "web-namespaces": "^2.0.0"
+        "@types/hast": "^3.0.0",
+        "@types/unist": "^3.0.0",
+        "ccount": "^2.0.0",
+        "comma-separated-tokens": "^2.0.0",
+        "hast-util-whitespace": "^3.0.0",
+        "html-void-elements": "^3.0.0",
+        "mdast-util-to-hast": "^13.0.0",
+        "property-information": "^7.0.0",
+        "space-separated-tokens": "^2.0.0",
+        "stringify-entities": "^4.0.0",
+        "zwitch": "^2.0.4"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/hast-util-parse-selector": {
-      "version": "3.1.1",
+    "node_modules/hast-util-to-html/node_modules/@types/hast": {
+      "version": "3.0.4",
+      "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz",
+      "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "*"
+      }
+    },
+    "node_modules/hast-util-to-html/node_modules/@types/mdast": {
+      "version": "4.0.4",
+      "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz",
+      "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "*"
+      }
+    },
+    "node_modules/hast-util-to-html/node_modules/@types/unist": {
+      "version": "3.0.3",
+      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
+      "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
+      "dev": true,
+      "license": "MIT"
+    },
+    "node_modules/hast-util-to-html/node_modules/mdast-util-to-hast": {
+      "version": "13.2.0",
+      "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-13.2.0.tgz",
+      "integrity": "sha512-QGYKEuUsYT9ykKBCMOEDLsU5JRObWQusAolFMeko/tYPufNkRffBAQjIE+99jbA87xv6FgmjLtwjh9wBWajwAA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/hast": "^2.0.0"
+        "@types/hast": "^3.0.0",
+        "@types/mdast": "^4.0.0",
+        "@ungap/structured-clone": "^1.0.0",
+        "devlop": "^1.0.0",
+        "micromark-util-sanitize-uri": "^2.0.0",
+        "trim-lines": "^3.0.0",
+        "unist-util-position": "^5.0.0",
+        "unist-util-visit": "^5.0.0",
+        "vfile": "^6.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/hast-util-raw": {
-      "version": "7.2.3",
+    "node_modules/hast-util-to-html/node_modules/micromark-util-character": {
+      "version": "2.1.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
+      "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
       "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
       "license": "MIT",
       "dependencies": {
-        "@types/hast": "^2.0.0",
-        "@types/parse5": "^6.0.0",
-        "hast-util-from-parse5": "^7.0.0",
-        "hast-util-to-parse5": "^7.0.0",
-        "html-void-elements": "^2.0.0",
-        "parse5": "^6.0.0",
-        "unist-util-position": "^4.0.0",
-        "unist-util-visit": "^4.0.0",
-        "vfile": "^5.0.0",
-        "web-namespaces": "^2.0.0",
-        "zwitch": "^2.0.0"
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      }
+    },
+    "node_modules/hast-util-to-html/node_modules/micromark-util-encode": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-2.0.1.tgz",
+      "integrity": "sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/hast-util-to-html/node_modules/micromark-util-sanitize-uri": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.1.tgz",
+      "integrity": "sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-encode": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0"
+      }
+    },
+    "node_modules/hast-util-to-html/node_modules/micromark-util-symbol": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
+      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/hast-util-to-html/node_modules/micromark-util-types": {
+      "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
+      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "GitHub Sponsors",
+          "url": "https://github.com/sponsors/unifiedjs"
+        },
+        {
+          "type": "OpenCollective",
+          "url": "https://opencollective.com/unified"
+        }
+      ],
+      "license": "MIT"
+    },
+    "node_modules/hast-util-to-html/node_modules/unist-util-is": {
+      "version": "6.0.0",
+      "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.0.tgz",
+      "integrity": "sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/hast-util-raw/node_modules/parse5": {
-      "version": "6.0.1",
+    "node_modules/hast-util-to-html/node_modules/unist-util-position": {
+      "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-5.0.0.tgz",
+      "integrity": "sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA==",
       "dev": true,
-      "license": "MIT"
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
     },
-    "node_modules/hast-util-raw/node_modules/unist-util-visit": {
-      "version": "4.1.2",
+    "node_modules/hast-util-to-html/node_modules/unist-util-stringify-position": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz",
+      "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/unist": "^2.0.0",
-        "unist-util-is": "^5.0.0",
-        "unist-util-visit-parents": "^5.1.1"
+        "@types/unist": "^3.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/hast-util-raw/node_modules/unist-util-visit-parents": {
-      "version": "5.1.3",
+    "node_modules/hast-util-to-html/node_modules/unist-util-visit": {
+      "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-5.0.0.tgz",
+      "integrity": "sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/unist": "^2.0.0",
-        "unist-util-is": "^5.0.0"
+        "@types/unist": "^3.0.0",
+        "unist-util-is": "^6.0.0",
+        "unist-util-visit-parents": "^6.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/hast-util-to-html": {
-      "version": "8.0.4",
+    "node_modules/hast-util-to-html/node_modules/unist-util-visit-parents": {
+      "version": "6.0.1",
+      "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.1.tgz",
+      "integrity": "sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/hast": "^2.0.0",
-        "@types/unist": "^2.0.0",
-        "ccount": "^2.0.0",
-        "comma-separated-tokens": "^2.0.0",
-        "hast-util-raw": "^7.0.0",
-        "hast-util-whitespace": "^2.0.0",
-        "html-void-elements": "^2.0.0",
-        "property-information": "^6.0.0",
-        "space-separated-tokens": "^2.0.0",
-        "stringify-entities": "^4.0.0",
-        "zwitch": "^2.0.4"
+        "@types/unist": "^3.0.0",
+        "unist-util-is": "^6.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/hast-util-to-parse5": {
-      "version": "7.1.0",
+    "node_modules/hast-util-to-html/node_modules/vfile": {
+      "version": "6.0.3",
+      "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.3.tgz",
+      "integrity": "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/hast": "^2.0.0",
-        "comma-separated-tokens": "^2.0.0",
-        "property-information": "^6.0.0",
-        "space-separated-tokens": "^2.0.0",
-        "web-namespaces": "^2.0.0",
-        "zwitch": "^2.0.0"
+        "@types/unist": "^3.0.0",
+        "vfile-message": "^4.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/hast-util-whitespace": {
-      "version": "2.0.1",
+    "node_modules/hast-util-to-html/node_modules/vfile-message": {
+      "version": "4.0.3",
+      "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.3.tgz",
+      "integrity": "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==",
       "dev": true,
       "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0",
+        "unist-util-stringify-position": "^4.0.0"
+      },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/hastscript": {
-      "version": "7.2.0",
+    "node_modules/hast-util-whitespace": {
+      "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/hast-util-whitespace/-/hast-util-whitespace-3.0.0.tgz",
+      "integrity": "sha512-88JUN06ipLwsnv+dVn+OIYOvAuvBMy/Qoi6O7mQHxdPXpjy+Cd6xRkWwux7DKO+4sYILtLBRIKgsdpS2gQc7qw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/hast": "^2.0.0",
-        "comma-separated-tokens": "^2.0.0",
-        "hast-util-parse-selector": "^3.0.0",
-        "property-information": "^6.0.0",
-        "space-separated-tokens": "^2.0.0"
+        "@types/hast": "^3.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
+    "node_modules/hast-util-whitespace/node_modules/@types/hast": {
+      "version": "3.0.4",
+      "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz",
+      "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "*"
+      }
+    },
     "node_modules/he": {
       "version": "1.2.0",
       "dev": true,
@@ -5896,7 +6038,9 @@
       "license": "MIT"
     },
     "node_modules/html-void-elements": {
-      "version": "2.0.1",
+      "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/html-void-elements/-/html-void-elements-3.0.0.tgz",
+      "integrity": "sha512-bEqo66MRXsUGxWHV5IP0PUiAWwoEjba4VCzg0LjFJBpchPaTfyfCKTG6bc5F8ucKec3q5y6qOdGyYTSBEvhCrg==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -7306,8 +7450,6 @@
     },
     "node_modules/markdown-table": {
       "version": "3.0.4",
-      "resolved": "https://registry.npmjs.org/markdown-table/-/markdown-table-3.0.4.tgz",
-      "integrity": "sha512-wiYz4+JrLyb/DqW2hkFJxP7Vd7JuTDm77fvbM8VfEQdmSMqcImWeeRbHwZjBjIFki/VaMK2BhFi7oUUZeM5bqw==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -7366,8 +7508,6 @@
     },
     "node_modules/mdast-util-find-and-replace": {
       "version": "3.0.2",
-      "resolved": "https://registry.npmjs.org/mdast-util-find-and-replace/-/mdast-util-find-and-replace-3.0.2.tgz",
-      "integrity": "sha512-Tmd1Vg/m3Xz43afeNxDIhWRtFZgM2VLyaf4vSTYwudTyeuTneoL3qtWMA5jeLyz/O1vDJmmV4QuScFCA2tBPwg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7383,8 +7523,6 @@
     },
     "node_modules/mdast-util-find-and-replace/node_modules/@types/mdast": {
       "version": "4.0.4",
-      "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz",
-      "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7393,15 +7531,11 @@
     },
     "node_modules/mdast-util-find-and-replace/node_modules/@types/unist": {
       "version": "3.0.3",
-      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
-      "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/mdast-util-find-and-replace/node_modules/escape-string-regexp": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz",
-      "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==",
       "dev": true,
       "license": "MIT",
       "engines": {
@@ -7413,8 +7547,6 @@
     },
     "node_modules/mdast-util-find-and-replace/node_modules/unist-util-is": {
       "version": "6.0.0",
-      "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.0.tgz",
-      "integrity": "sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7427,8 +7559,6 @@
     },
     "node_modules/mdast-util-find-and-replace/node_modules/unist-util-visit-parents": {
       "version": "6.0.1",
-      "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.1.tgz",
-      "integrity": "sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7442,8 +7572,6 @@
     },
     "node_modules/mdast-util-from-markdown": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-2.0.2.tgz",
-      "integrity": "sha512-uZhTV/8NBuw0WHkPTrCqDOl0zVe1BIng5ZtHoDk49ME1qqcjYmmLmOf0gELgcRMxN4w2iuIeVso5/6QymSrgmA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7467,8 +7595,6 @@
     },
     "node_modules/mdast-util-from-markdown/node_modules/@types/mdast": {
       "version": "4.0.4",
-      "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz",
-      "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7477,15 +7603,11 @@
     },
     "node_modules/mdast-util-from-markdown/node_modules/@types/unist": {
       "version": "3.0.3",
-      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
-      "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/mdast-util-from-markdown/node_modules/mdast-util-to-string": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-4.0.0.tgz",
-      "integrity": "sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7498,8 +7620,6 @@
     },
     "node_modules/mdast-util-from-markdown/node_modules/micromark-util-symbol": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
-      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
       "dev": true,
       "funding": [
         {
@@ -7515,8 +7635,6 @@
     },
     "node_modules/mdast-util-from-markdown/node_modules/micromark-util-types": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
-      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
       "dev": true,
       "funding": [
         {
@@ -7532,8 +7650,6 @@
     },
     "node_modules/mdast-util-from-markdown/node_modules/unist-util-stringify-position": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz",
-      "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7546,8 +7662,6 @@
     },
     "node_modules/mdast-util-gfm": {
       "version": "3.1.0",
-      "resolved": "https://registry.npmjs.org/mdast-util-gfm/-/mdast-util-gfm-3.1.0.tgz",
-      "integrity": "sha512-0ulfdQOM3ysHhCJ1p06l0b0VKlhU0wuQs3thxZQagjcjPrlFRqY215uZGHHJan9GEAXd9MbfPjFJz+qMkVR6zQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7566,8 +7680,6 @@
     },
     "node_modules/mdast-util-gfm-autolink-literal": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/mdast-util-gfm-autolink-literal/-/mdast-util-gfm-autolink-literal-2.0.1.tgz",
-      "integrity": "sha512-5HVP2MKaP6L+G6YaxPNjuL0BPrq9orG3TsrZ9YXbA3vDw/ACI4MEsnoDpn6ZNm7GnZgtAcONJyPhOP8tNJQavQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7584,8 +7696,6 @@
     },
     "node_modules/mdast-util-gfm-autolink-literal/node_modules/@types/mdast": {
       "version": "4.0.4",
-      "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz",
-      "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7594,8 +7704,6 @@
     },
     "node_modules/mdast-util-gfm-autolink-literal/node_modules/micromark-util-character": {
       "version": "2.1.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
-      "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
       "dev": true,
       "funding": [
         {
@@ -7615,8 +7723,6 @@
     },
     "node_modules/mdast-util-gfm-autolink-literal/node_modules/micromark-util-symbol": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
-      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
       "dev": true,
       "funding": [
         {
@@ -7632,8 +7738,6 @@
     },
     "node_modules/mdast-util-gfm-autolink-literal/node_modules/micromark-util-types": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
-      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
       "dev": true,
       "funding": [
         {
@@ -7649,8 +7753,6 @@
     },
     "node_modules/mdast-util-gfm-footnote": {
       "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/mdast-util-gfm-footnote/-/mdast-util-gfm-footnote-2.1.0.tgz",
-      "integrity": "sha512-sqpDWlsHn7Ac9GNZQMeUzPQSMzR6Wv0WKRNvQRg0KqHh02fpTz69Qc1QSseNX29bhz1ROIyNyxExfawVKTm1GQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7667,8 +7769,6 @@
     },
     "node_modules/mdast-util-gfm-footnote/node_modules/@types/mdast": {
       "version": "4.0.4",
-      "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz",
-      "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7677,8 +7777,6 @@
     },
     "node_modules/mdast-util-gfm-strikethrough": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/mdast-util-gfm-strikethrough/-/mdast-util-gfm-strikethrough-2.0.0.tgz",
-      "integrity": "sha512-mKKb915TF+OC5ptj5bJ7WFRPdYtuHv0yTRxK2tJvi+BDqbkiG7h7u/9SI89nRAYcmap2xHQL9D+QG/6wSrTtXg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7693,8 +7791,6 @@
     },
     "node_modules/mdast-util-gfm-strikethrough/node_modules/@types/mdast": {
       "version": "4.0.4",
-      "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz",
-      "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7703,8 +7799,6 @@
     },
     "node_modules/mdast-util-gfm-table": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/mdast-util-gfm-table/-/mdast-util-gfm-table-2.0.0.tgz",
-      "integrity": "sha512-78UEvebzz/rJIxLvE7ZtDd/vIQ0RHv+3Mh5DR96p7cS7HsBhYIICDBCu8csTNWNO6tBWfqXPWekRuj2FNOGOZg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7721,8 +7815,6 @@
     },
     "node_modules/mdast-util-gfm-table/node_modules/@types/mdast": {
       "version": "4.0.4",
-      "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz",
-      "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7731,8 +7823,6 @@
     },
     "node_modules/mdast-util-gfm-task-list-item": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/mdast-util-gfm-task-list-item/-/mdast-util-gfm-task-list-item-2.0.0.tgz",
-      "integrity": "sha512-IrtvNvjxC1o06taBAVJznEnkiHxLFTzgonUdy8hzFVeDun0uTjxxrRGVaNFqkU1wJR3RBPEfsxmU6jDWPofrTQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7748,8 +7838,6 @@
     },
     "node_modules/mdast-util-gfm-task-list-item/node_modules/@types/mdast": {
       "version": "4.0.4",
-      "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz",
-      "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7758,8 +7846,6 @@
     },
     "node_modules/mdast-util-phrasing": {
       "version": "4.1.0",
-      "resolved": "https://registry.npmjs.org/mdast-util-phrasing/-/mdast-util-phrasing-4.1.0.tgz",
-      "integrity": "sha512-TqICwyvJJpBwvGAMZjj4J2n0X8QWp21b9l0o7eXyVJ25YNWYbJDVIyD1bZXE6WtV6RmKJVYmQAKWa0zWOABz2w==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7773,8 +7859,6 @@
     },
     "node_modules/mdast-util-phrasing/node_modules/@types/mdast": {
       "version": "4.0.4",
-      "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz",
-      "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7783,15 +7867,11 @@
     },
     "node_modules/mdast-util-phrasing/node_modules/@types/unist": {
       "version": "3.0.3",
-      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
-      "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/mdast-util-phrasing/node_modules/unist-util-is": {
       "version": "6.0.0",
-      "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.0.tgz",
-      "integrity": "sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7850,8 +7930,6 @@
     },
     "node_modules/mdast-util-to-markdown": {
       "version": "2.1.2",
-      "resolved": "https://registry.npmjs.org/mdast-util-to-markdown/-/mdast-util-to-markdown-2.1.2.tgz",
-      "integrity": "sha512-xj68wMTvGXVOKonmog6LwyJKrYXZPvlwabaryTjLh9LuvovB/KAH+kvi8Gjj+7rJjsFi23nkUxRQv1KqSroMqA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7872,8 +7950,6 @@
     },
     "node_modules/mdast-util-to-markdown/node_modules/@types/mdast": {
       "version": "4.0.4",
-      "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz",
-      "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7882,15 +7958,11 @@
     },
     "node_modules/mdast-util-to-markdown/node_modules/@types/unist": {
       "version": "3.0.3",
-      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
-      "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/mdast-util-to-markdown/node_modules/mdast-util-to-string": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-4.0.0.tgz",
-      "integrity": "sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7903,8 +7975,6 @@
     },
     "node_modules/mdast-util-to-markdown/node_modules/unist-util-is": {
       "version": "6.0.0",
-      "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.0.tgz",
-      "integrity": "sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7917,8 +7987,6 @@
     },
     "node_modules/mdast-util-to-markdown/node_modules/unist-util-visit": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-5.0.0.tgz",
-      "integrity": "sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7933,8 +8001,6 @@
     },
     "node_modules/mdast-util-to-markdown/node_modules/unist-util-visit-parents": {
       "version": "6.0.1",
-      "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.1.tgz",
-      "integrity": "sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7971,8 +8037,6 @@
     },
     "node_modules/micromark": {
       "version": "4.0.2",
-      "resolved": "https://registry.npmjs.org/micromark/-/micromark-4.0.2.tgz",
-      "integrity": "sha512-zpe98Q6kvavpCr1NPVSCMebCKfD7CA2NqZ+rykeNhONIJBpc1tFKt9hucLGwha3jNTNI8lHpctWJWoimVF4PfA==",
       "dev": true,
       "funding": [
         {
@@ -8007,8 +8071,6 @@
     },
     "node_modules/micromark-core-commonmark": {
       "version": "2.0.3",
-      "resolved": "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-2.0.3.tgz",
-      "integrity": "sha512-RDBrHEMSxVFLg6xvnXmb1Ayr2WzLAWjeSATAoxwKYJV94TeNavgoIdA0a9ytzDSVzBy2YKFK+emCPOEibLeCrg==",
       "dev": true,
       "funding": [
         {
@@ -8042,8 +8104,6 @@
     },
     "node_modules/micromark-core-commonmark/node_modules/micromark-util-character": {
       "version": "2.1.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
-      "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
       "dev": true,
       "funding": [
         {
@@ -8063,8 +8123,6 @@
     },
     "node_modules/micromark-core-commonmark/node_modules/micromark-util-symbol": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
-      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
       "dev": true,
       "funding": [
         {
@@ -8080,8 +8138,6 @@
     },
     "node_modules/micromark-core-commonmark/node_modules/micromark-util-types": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
-      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
       "dev": true,
       "funding": [
         {
@@ -8097,8 +8153,6 @@
     },
     "node_modules/micromark-extension-gfm": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/micromark-extension-gfm/-/micromark-extension-gfm-3.0.0.tgz",
-      "integrity": "sha512-vsKArQsicm7t0z2GugkCKtZehqUm31oeGBV/KVSorWSy8ZlNAv7ytjFhvaryUiCUJYqs+NoE6AFhpQvBTM6Q4w==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8118,8 +8172,6 @@
     },
     "node_modules/micromark-extension-gfm-autolink-literal": {
       "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-extension-gfm-autolink-literal/-/micromark-extension-gfm-autolink-literal-2.1.0.tgz",
-      "integrity": "sha512-oOg7knzhicgQ3t4QCjCWgTmfNhvQbDDnJeVu9v81r7NltNCVmhPy1fJRX27pISafdjL+SVc4d3l48Gb6pbRypw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8135,8 +8187,6 @@
     },
     "node_modules/micromark-extension-gfm-autolink-literal/node_modules/micromark-util-character": {
       "version": "2.1.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
-      "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
       "dev": true,
       "funding": [
         {
@@ -8156,8 +8206,6 @@
     },
     "node_modules/micromark-extension-gfm-autolink-literal/node_modules/micromark-util-encode": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-2.0.1.tgz",
-      "integrity": "sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw==",
       "dev": true,
       "funding": [
         {
@@ -8173,8 +8221,6 @@
     },
     "node_modules/micromark-extension-gfm-autolink-literal/node_modules/micromark-util-sanitize-uri": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.1.tgz",
-      "integrity": "sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ==",
       "dev": true,
       "funding": [
         {
@@ -8195,8 +8241,6 @@
     },
     "node_modules/micromark-extension-gfm-autolink-literal/node_modules/micromark-util-symbol": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
-      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
       "dev": true,
       "funding": [
         {
@@ -8212,8 +8256,6 @@
     },
     "node_modules/micromark-extension-gfm-autolink-literal/node_modules/micromark-util-types": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
-      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
       "dev": true,
       "funding": [
         {
@@ -8229,8 +8271,6 @@
     },
     "node_modules/micromark-extension-gfm-footnote": {
       "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-extension-gfm-footnote/-/micromark-extension-gfm-footnote-2.1.0.tgz",
-      "integrity": "sha512-/yPhxI1ntnDNsiHtzLKYnE3vf9JZ6cAisqVDauhp4CEHxlb4uoOTxOCJ+9s51bIB8U1N1FJ1RXOKTIlD5B/gqw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8250,8 +8290,6 @@
     },
     "node_modules/micromark-extension-gfm-footnote/node_modules/micromark-util-character": {
       "version": "2.1.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
-      "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
       "dev": true,
       "funding": [
         {
@@ -8271,8 +8309,6 @@
     },
     "node_modules/micromark-extension-gfm-footnote/node_modules/micromark-util-encode": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-2.0.1.tgz",
-      "integrity": "sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw==",
       "dev": true,
       "funding": [
         {
@@ -8288,8 +8324,6 @@
     },
     "node_modules/micromark-extension-gfm-footnote/node_modules/micromark-util-sanitize-uri": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.1.tgz",
-      "integrity": "sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ==",
       "dev": true,
       "funding": [
         {
@@ -8310,8 +8344,6 @@
     },
     "node_modules/micromark-extension-gfm-footnote/node_modules/micromark-util-symbol": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
-      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
       "dev": true,
       "funding": [
         {
@@ -8327,8 +8359,6 @@
     },
     "node_modules/micromark-extension-gfm-footnote/node_modules/micromark-util-types": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
-      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
       "dev": true,
       "funding": [
         {
@@ -8344,8 +8374,6 @@
     },
     "node_modules/micromark-extension-gfm-strikethrough": {
       "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-extension-gfm-strikethrough/-/micromark-extension-gfm-strikethrough-2.1.0.tgz",
-      "integrity": "sha512-ADVjpOOkjz1hhkZLlBiYA9cR2Anf8F4HqZUO6e5eDcPQd0Txw5fxLzzxnEkSkfnD0wziSGiv7sYhk/ktvbf1uw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8363,8 +8391,6 @@
     },
     "node_modules/micromark-extension-gfm-strikethrough/node_modules/micromark-util-symbol": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
-      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
       "dev": true,
       "funding": [
         {
@@ -8380,8 +8406,6 @@
     },
     "node_modules/micromark-extension-gfm-strikethrough/node_modules/micromark-util-types": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
-      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
       "dev": true,
       "funding": [
         {
@@ -8397,8 +8421,6 @@
     },
     "node_modules/micromark-extension-gfm-table": {
       "version": "2.1.1",
-      "resolved": "https://registry.npmjs.org/micromark-extension-gfm-table/-/micromark-extension-gfm-table-2.1.1.tgz",
-      "integrity": "sha512-t2OU/dXXioARrC6yWfJ4hqB7rct14e8f7m0cbI5hUmDyyIlwv5vEtooptH8INkbLzOatzKuVbQmAYcbWoyz6Dg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8415,8 +8437,6 @@
     },
     "node_modules/micromark-extension-gfm-table/node_modules/micromark-util-character": {
       "version": "2.1.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
-      "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
       "dev": true,
       "funding": [
         {
@@ -8436,8 +8456,6 @@
     },
     "node_modules/micromark-extension-gfm-table/node_modules/micromark-util-symbol": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
-      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
       "dev": true,
       "funding": [
         {
@@ -8453,8 +8471,6 @@
     },
     "node_modules/micromark-extension-gfm-table/node_modules/micromark-util-types": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
-      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
       "dev": true,
       "funding": [
         {
@@ -8470,8 +8486,6 @@
     },
     "node_modules/micromark-extension-gfm-tagfilter": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/micromark-extension-gfm-tagfilter/-/micromark-extension-gfm-tagfilter-2.0.0.tgz",
-      "integrity": "sha512-xHlTOmuCSotIA8TW1mDIM6X2O1SiX5P9IuDtqGonFhEK0qgRI4yeC6vMxEV2dgyr2TiD+2PQ10o+cOhdVAcwfg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8484,8 +8498,6 @@
     },
     "node_modules/micromark-extension-gfm-tagfilter/node_modules/micromark-util-types": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
-      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
       "dev": true,
       "funding": [
         {
@@ -8501,8 +8513,6 @@
     },
     "node_modules/micromark-extension-gfm-task-list-item": {
       "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-extension-gfm-task-list-item/-/micromark-extension-gfm-task-list-item-2.1.0.tgz",
-      "integrity": "sha512-qIBZhqxqI6fjLDYFTBIa4eivDMnP+OZqsNwmQ3xNLE4Cxwc+zfQEfbs6tzAo2Hjq+bh6q5F+Z8/cksrLFYWQQw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -8519,8 +8529,6 @@
     },
     "node_modules/micromark-extension-gfm-task-list-item/node_modules/micromark-util-character": {
       "version": "2.1.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
-      "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
       "dev": true,
       "funding": [
         {
@@ -8540,8 +8548,6 @@
     },
     "node_modules/micromark-extension-gfm-task-list-item/node_modules/micromark-util-symbol": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
-      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
       "dev": true,
       "funding": [
         {
@@ -8557,8 +8563,6 @@
     },
     "node_modules/micromark-extension-gfm-task-list-item/node_modules/micromark-util-types": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
-      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
       "dev": true,
       "funding": [
         {
@@ -8574,8 +8578,6 @@
     },
     "node_modules/micromark-extension-gfm/node_modules/micromark-util-types": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
-      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
       "dev": true,
       "funding": [
         {
@@ -8591,8 +8593,6 @@
     },
     "node_modules/micromark-factory-destination": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-2.0.1.tgz",
-      "integrity": "sha512-Xe6rDdJlkmbFRExpTOmRj9N3MaWmbAgdpSrBQvCFqhezUn4AHqJHbaEnfbVYYiexVSs//tqOdY/DxhjdCiJnIA==",
       "dev": true,
       "funding": [
         {
@@ -8613,8 +8613,6 @@
     },
     "node_modules/micromark-factory-destination/node_modules/micromark-util-character": {
       "version": "2.1.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
-      "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
       "dev": true,
       "funding": [
         {
@@ -8634,8 +8632,6 @@
     },
     "node_modules/micromark-factory-destination/node_modules/micromark-util-symbol": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
-      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
       "dev": true,
       "funding": [
         {
@@ -8651,8 +8647,6 @@
     },
     "node_modules/micromark-factory-destination/node_modules/micromark-util-types": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
-      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
       "dev": true,
       "funding": [
         {
@@ -8668,8 +8662,6 @@
     },
     "node_modules/micromark-factory-label": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-factory-label/-/micromark-factory-label-2.0.1.tgz",
-      "integrity": "sha512-VFMekyQExqIW7xIChcXn4ok29YE3rnuyveW3wZQWWqF4Nv9Wk5rgJ99KzPvHjkmPXF93FXIbBp6YdW3t71/7Vg==",
       "dev": true,
       "funding": [
         {
@@ -8691,8 +8683,6 @@
     },
     "node_modules/micromark-factory-label/node_modules/micromark-util-character": {
       "version": "2.1.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
-      "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
       "dev": true,
       "funding": [
         {
@@ -8712,8 +8702,6 @@
     },
     "node_modules/micromark-factory-label/node_modules/micromark-util-symbol": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
-      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
       "dev": true,
       "funding": [
         {
@@ -8729,8 +8717,6 @@
     },
     "node_modules/micromark-factory-label/node_modules/micromark-util-types": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
-      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
       "dev": true,
       "funding": [
         {
@@ -8746,8 +8732,6 @@
     },
     "node_modules/micromark-factory-space": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-2.0.1.tgz",
-      "integrity": "sha512-zRkxjtBxxLd2Sc0d+fbnEunsTj46SWXgXciZmHq0kDYGnck/ZSGj9/wULTV95uoeYiK5hRXP2mJ98Uo4cq/LQg==",
       "dev": true,
       "funding": [
         {
@@ -8767,8 +8751,6 @@
     },
     "node_modules/micromark-factory-space/node_modules/micromark-util-character": {
       "version": "2.1.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
-      "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
       "dev": true,
       "funding": [
         {
@@ -8788,8 +8770,6 @@
     },
     "node_modules/micromark-factory-space/node_modules/micromark-util-symbol": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
-      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
       "dev": true,
       "funding": [
         {
@@ -8805,8 +8785,6 @@
     },
     "node_modules/micromark-factory-space/node_modules/micromark-util-types": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
-      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
       "dev": true,
       "funding": [
         {
@@ -8822,8 +8800,6 @@
     },
     "node_modules/micromark-factory-title": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-factory-title/-/micromark-factory-title-2.0.1.tgz",
-      "integrity": "sha512-5bZ+3CjhAd9eChYTHsjy6TGxpOFSKgKKJPJxr293jTbfry2KDoWkhBb6TcPVB4NmzaPhMs1Frm9AZH7OD4Cjzw==",
       "dev": true,
       "funding": [
         {
@@ -8845,8 +8821,6 @@
     },
     "node_modules/micromark-factory-title/node_modules/micromark-util-character": {
       "version": "2.1.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
-      "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
       "dev": true,
       "funding": [
         {
@@ -8866,8 +8840,6 @@
     },
     "node_modules/micromark-factory-title/node_modules/micromark-util-symbol": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
-      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
       "dev": true,
       "funding": [
         {
@@ -8883,8 +8855,6 @@
     },
     "node_modules/micromark-factory-title/node_modules/micromark-util-types": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
-      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
       "dev": true,
       "funding": [
         {
@@ -8900,8 +8870,6 @@
     },
     "node_modules/micromark-factory-whitespace": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-factory-whitespace/-/micromark-factory-whitespace-2.0.1.tgz",
-      "integrity": "sha512-Ob0nuZ3PKt/n0hORHyvoD9uZhr+Za8sFoP+OnMcnWK5lngSzALgQYKMr9RJVOWLqQYuyn6ulqGWSXdwf6F80lQ==",
       "dev": true,
       "funding": [
         {
@@ -8923,8 +8891,6 @@
     },
     "node_modules/micromark-factory-whitespace/node_modules/micromark-util-character": {
       "version": "2.1.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
-      "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
       "dev": true,
       "funding": [
         {
@@ -8944,8 +8910,6 @@
     },
     "node_modules/micromark-factory-whitespace/node_modules/micromark-util-symbol": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
-      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
       "dev": true,
       "funding": [
         {
@@ -8961,8 +8925,6 @@
     },
     "node_modules/micromark-factory-whitespace/node_modules/micromark-util-types": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
-      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
       "dev": true,
       "funding": [
         {
@@ -8997,8 +8959,6 @@
     },
     "node_modules/micromark-util-chunked": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-chunked/-/micromark-util-chunked-2.0.1.tgz",
-      "integrity": "sha512-QUNFEOPELfmvv+4xiNg2sRYeS/P84pTW0TCgP5zc9FpXetHY0ab7SxKyAQCNCc1eK0459uoLI1y5oO5Vc1dbhA==",
       "dev": true,
       "funding": [
         {
@@ -9017,8 +8977,6 @@
     },
     "node_modules/micromark-util-chunked/node_modules/micromark-util-symbol": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
-      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
       "dev": true,
       "funding": [
         {
@@ -9034,8 +8992,6 @@
     },
     "node_modules/micromark-util-classify-character": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-classify-character/-/micromark-util-classify-character-2.0.1.tgz",
-      "integrity": "sha512-K0kHzM6afW/MbeWYWLjoHQv1sgg2Q9EccHEDzSkxiP/EaagNzCm7T/WMKZ3rjMbvIpvBiZgwR3dKMygtA4mG1Q==",
       "dev": true,
       "funding": [
         {
@@ -9056,8 +9012,6 @@
     },
     "node_modules/micromark-util-classify-character/node_modules/micromark-util-character": {
       "version": "2.1.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
-      "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
       "dev": true,
       "funding": [
         {
@@ -9077,8 +9031,6 @@
     },
     "node_modules/micromark-util-classify-character/node_modules/micromark-util-symbol": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
-      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
       "dev": true,
       "funding": [
         {
@@ -9094,8 +9046,6 @@
     },
     "node_modules/micromark-util-classify-character/node_modules/micromark-util-types": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
-      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
       "dev": true,
       "funding": [
         {
@@ -9111,8 +9061,6 @@
     },
     "node_modules/micromark-util-combine-extensions": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-combine-extensions/-/micromark-util-combine-extensions-2.0.1.tgz",
-      "integrity": "sha512-OnAnH8Ujmy59JcyZw8JSbK9cGpdVY44NKgSM7E9Eh7DiLS2E9RNQf0dONaGDzEG9yjEl5hcqeIsj4hfRkLH/Bg==",
       "dev": true,
       "funding": [
         {
@@ -9132,8 +9080,6 @@
     },
     "node_modules/micromark-util-combine-extensions/node_modules/micromark-util-types": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
-      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
       "dev": true,
       "funding": [
         {
@@ -9149,8 +9095,6 @@
     },
     "node_modules/micromark-util-decode-numeric-character-reference": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-2.0.2.tgz",
-      "integrity": "sha512-ccUbYk6CwVdkmCQMyr64dXz42EfHGkPQlBj5p7YVGzq8I7CtjXZJrubAYezf7Rp+bjPseiROqe7G6foFd+lEuw==",
       "dev": true,
       "funding": [
         {
@@ -9169,8 +9113,6 @@
     },
     "node_modules/micromark-util-decode-numeric-character-reference/node_modules/micromark-util-symbol": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
-      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
       "dev": true,
       "funding": [
         {
@@ -9186,8 +9128,6 @@
     },
     "node_modules/micromark-util-decode-string": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-decode-string/-/micromark-util-decode-string-2.0.1.tgz",
-      "integrity": "sha512-nDV/77Fj6eH1ynwscYTOsbK7rR//Uj0bZXBwJZRfaLEJ1iGBR6kIfNmlNqaqJf649EP0F3NWNdeJi03elllNUQ==",
       "dev": true,
       "funding": [
         {
@@ -9209,8 +9149,6 @@
     },
     "node_modules/micromark-util-decode-string/node_modules/micromark-util-character": {
       "version": "2.1.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
-      "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
       "dev": true,
       "funding": [
         {
@@ -9230,8 +9168,6 @@
     },
     "node_modules/micromark-util-decode-string/node_modules/micromark-util-symbol": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
-      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
       "dev": true,
       "funding": [
         {
@@ -9247,8 +9183,6 @@
     },
     "node_modules/micromark-util-decode-string/node_modules/micromark-util-types": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
-      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
       "dev": true,
       "funding": [
         {
@@ -9279,8 +9213,6 @@
     },
     "node_modules/micromark-util-html-tag-name": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-html-tag-name/-/micromark-util-html-tag-name-2.0.1.tgz",
-      "integrity": "sha512-2cNEiYDhCWKI+Gs9T0Tiysk136SnR13hhO8yW6BGNyhOC4qYFnwF1nKfD3HFAIXA5c45RrIG1ub11GiXeYd1xA==",
       "dev": true,
       "funding": [
         {
@@ -9296,8 +9228,6 @@
     },
     "node_modules/micromark-util-normalize-identifier": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-2.0.1.tgz",
-      "integrity": "sha512-sxPqmo70LyARJs0w2UclACPUUEqltCkJ6PhKdMIDuJ3gSf/Q+/GIe3WKl0Ijb/GyH9lOpUkRAO2wp0GVkLvS9Q==",
       "dev": true,
       "funding": [
         {
@@ -9316,8 +9246,6 @@
     },
     "node_modules/micromark-util-normalize-identifier/node_modules/micromark-util-symbol": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
-      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
       "dev": true,
       "funding": [
         {
@@ -9333,8 +9261,6 @@
     },
     "node_modules/micromark-util-resolve-all": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-resolve-all/-/micromark-util-resolve-all-2.0.1.tgz",
-      "integrity": "sha512-VdQyxFWFT2/FGJgwQnJYbe1jjQoNTS4RjglmSjTUlpUMa95Htx9NHeYW4rGDJzbjvCsl9eLjMQwGeElsqmzcHg==",
       "dev": true,
       "funding": [
         {
@@ -9353,8 +9279,6 @@
     },
     "node_modules/micromark-util-resolve-all/node_modules/micromark-util-types": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
-      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
       "dev": true,
       "funding": [
         {
@@ -9390,8 +9314,6 @@
     },
     "node_modules/micromark-util-subtokenize": {
       "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/micromark-util-subtokenize/-/micromark-util-subtokenize-2.1.0.tgz",
-      "integrity": "sha512-XQLu552iSctvnEcgXw6+Sx75GflAPNED1qx7eBJ+wydBb2KCbRZe+NwvIEEMM83uml1+2WSXpBAcp9IUCgCYWA==",
       "dev": true,
       "funding": [
         {
@@ -9413,8 +9335,6 @@
     },
     "node_modules/micromark-util-subtokenize/node_modules/micromark-util-symbol": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
-      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
       "dev": true,
       "funding": [
         {
@@ -9430,8 +9350,6 @@
     },
     "node_modules/micromark-util-subtokenize/node_modules/micromark-util-types": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
-      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
       "dev": true,
       "funding": [
         {
@@ -9477,8 +9395,6 @@
     },
     "node_modules/micromark/node_modules/micromark-util-character": {
       "version": "2.1.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
-      "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
       "dev": true,
       "funding": [
         {
@@ -9498,8 +9414,6 @@
     },
     "node_modules/micromark/node_modules/micromark-util-encode": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-2.0.1.tgz",
-      "integrity": "sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw==",
       "dev": true,
       "funding": [
         {
@@ -9515,8 +9429,6 @@
     },
     "node_modules/micromark/node_modules/micromark-util-sanitize-uri": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.1.tgz",
-      "integrity": "sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ==",
       "dev": true,
       "funding": [
         {
@@ -9537,8 +9449,6 @@
     },
     "node_modules/micromark/node_modules/micromark-util-symbol": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
-      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
       "dev": true,
       "funding": [
         {
@@ -9554,8 +9464,6 @@
     },
     "node_modules/micromark/node_modules/micromark-util-types": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
-      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
       "dev": true,
       "funding": [
         {
@@ -11065,7 +10973,9 @@
       }
     },
     "node_modules/property-information": {
-      "version": "6.5.0",
+      "version": "7.1.0",
+      "resolved": "https://registry.npmjs.org/property-information/-/property-information-7.1.0.tgz",
+      "integrity": "sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -11398,13 +11308,96 @@
       }
     },
     "node_modules/rehype-stringify": {
-      "version": "9.0.4",
+      "version": "10.0.1",
+      "resolved": "https://registry.npmjs.org/rehype-stringify/-/rehype-stringify-10.0.1.tgz",
+      "integrity": "sha512-k9ecfXHmIPuFVI61B9DeLPN0qFHfawM6RsuX48hoqlaKSF61RskNjSm1lI8PhBEM0MRdLxVVm4WmTqJQccH9mA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/hast": "^2.0.0",
-        "hast-util-to-html": "^8.0.0",
-        "unified": "^10.0.0"
+        "@types/hast": "^3.0.0",
+        "hast-util-to-html": "^9.0.0",
+        "unified": "^11.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/rehype-stringify/node_modules/@types/hast": {
+      "version": "3.0.4",
+      "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz",
+      "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "*"
+      }
+    },
+    "node_modules/rehype-stringify/node_modules/@types/unist": {
+      "version": "3.0.3",
+      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
+      "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
+      "dev": true,
+      "license": "MIT"
+    },
+    "node_modules/rehype-stringify/node_modules/unified": {
+      "version": "11.0.5",
+      "resolved": "https://registry.npmjs.org/unified/-/unified-11.0.5.tgz",
+      "integrity": "sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0",
+        "bail": "^2.0.0",
+        "devlop": "^1.0.0",
+        "extend": "^3.0.0",
+        "is-plain-obj": "^4.0.0",
+        "trough": "^2.0.0",
+        "vfile": "^6.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/rehype-stringify/node_modules/unist-util-stringify-position": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz",
+      "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/rehype-stringify/node_modules/vfile": {
+      "version": "6.0.3",
+      "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.3.tgz",
+      "integrity": "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0",
+        "vfile-message": "^4.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/rehype-stringify/node_modules/vfile-message": {
+      "version": "4.0.3",
+      "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.3.tgz",
+      "integrity": "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0",
+        "unist-util-stringify-position": "^4.0.0"
       },
       "funding": {
         "type": "opencollective",
@@ -11718,8 +11711,6 @@
     },
     "node_modules/remark-gfm": {
       "version": "4.0.1",
-      "resolved": "https://registry.npmjs.org/remark-gfm/-/remark-gfm-4.0.1.tgz",
-      "integrity": "sha512-1quofZ2RQ9EWdeN34S79+KExV1764+wCUGop5CPL1WGdD0ocPpu91lzPGbwWMECpEpd42kJGQwzRfyov9j4yNg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11737,8 +11728,6 @@
     },
     "node_modules/remark-gfm/node_modules/@types/mdast": {
       "version": "4.0.4",
-      "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz",
-      "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11747,15 +11736,11 @@
     },
     "node_modules/remark-gfm/node_modules/@types/unist": {
       "version": "3.0.3",
-      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
-      "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/remark-gfm/node_modules/unified": {
       "version": "11.0.5",
-      "resolved": "https://registry.npmjs.org/unified/-/unified-11.0.5.tgz",
-      "integrity": "sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11774,8 +11759,6 @@
     },
     "node_modules/remark-gfm/node_modules/unist-util-stringify-position": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz",
-      "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11788,8 +11771,6 @@
     },
     "node_modules/remark-gfm/node_modules/vfile": {
       "version": "6.0.3",
-      "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.3.tgz",
-      "integrity": "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11803,8 +11784,6 @@
     },
     "node_modules/remark-gfm/node_modules/vfile-message": {
       "version": "4.0.3",
-      "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.3.tgz",
-      "integrity": "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12704,6 +12683,8 @@
     },
     "node_modules/space-separated-tokens": {
       "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-2.0.2.tgz",
+      "integrity": "sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -13025,6 +13006,8 @@
     },
     "node_modules/stringify-entities": {
       "version": "4.0.4",
+      "resolved": "https://registry.npmjs.org/stringify-entities/-/stringify-entities-4.0.4.tgz",
+      "integrity": "sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -16157,19 +16140,6 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/vfile-location": {
-      "version": "4.1.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^2.0.0",
-        "vfile": "^5.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
     "node_modules/vfile-message": {
       "version": "3.1.4",
       "dev": true,
@@ -16201,15 +16171,6 @@
         "node": "20 || >=22"
       }
     },
-    "node_modules/web-namespaces": {
-      "version": "2.0.1",
-      "dev": true,
-      "license": "MIT",
-      "funding": {
-        "type": "github",
-        "url": "https://github.com/sponsors/wooorm"
-      }
-    },
     "node_modules/whatwg-encoding": {
       "version": "3.1.1",
       "dev": true,

From 30fe3ba2455caa66e0aaf7d1e9343ed9872faba0 Mon Sep 17 00:00:00 2001
From: Gar 
Date: Thu, 18 Sep 2025 13:49:50 -0700
Subject: [PATCH 55/63] chore: remark-man@9.0.0

---
 docs/package.json |   2 +-
 package-lock.json | 271 +++++++++++++++++++++++++---------------------
 2 files changed, 151 insertions(+), 122 deletions(-)

diff --git a/docs/package.json b/docs/package.json
index 6144e37955133..213ab615c0d00 100644
--- a/docs/package.json
+++ b/docs/package.json
@@ -29,7 +29,7 @@
     "jsdom": "^24.0.0",
     "rehype-stringify": "^10.0.1",
     "remark-gfm": "^4.0.1",
-    "remark-man": "^8.0.1",
+    "remark-man": "^9.0.0",
     "remark-parse": "^11.0.0",
     "remark-rehype": "^10.1.0",
     "semver": "^7.3.8",
diff --git a/package-lock.json b/package-lock.json
index 25a28c3cc2fc4..01cc6a1aba4d7 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -194,7 +194,7 @@
         "jsdom": "^24.0.0",
         "rehype-stringify": "^10.0.1",
         "remark-gfm": "^4.0.1",
-        "remark-man": "^8.0.1",
+        "remark-man": "^9.0.0",
         "remark-parse": "^11.0.0",
         "remark-rehype": "^10.1.0",
         "semver": "^7.3.8",
@@ -2984,8 +2984,6 @@
     },
     "node_modules/character-entities-html4": {
       "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/character-entities-html4/-/character-entities-html4-2.1.0.tgz",
-      "integrity": "sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -2995,8 +2993,6 @@
     },
     "node_modules/character-entities-legacy": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/character-entities-legacy/-/character-entities-legacy-3.0.0.tgz",
-      "integrity": "sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -3477,8 +3473,6 @@
     },
     "node_modules/comma-separated-tokens": {
       "version": "2.0.3",
-      "resolved": "https://registry.npmjs.org/comma-separated-tokens/-/comma-separated-tokens-2.0.3.tgz",
-      "integrity": "sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -5452,7 +5446,9 @@
       }
     },
     "node_modules/github-slugger": {
-      "version": "1.5.0",
+      "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/github-slugger/-/github-slugger-2.0.0.tgz",
+      "integrity": "sha512-IaOQ9puYtjrkq7Y0Ygl9KDZnrf/aiUJYUpVf89y8kyaxbRG7Y1SrX/jaumrv81vc61+kiMempujsM3Yw7w5qcw==",
       "dev": true,
       "license": "ISC"
     },
@@ -5710,8 +5706,6 @@
     },
     "node_modules/hast-util-to-html": {
       "version": "9.0.5",
-      "resolved": "https://registry.npmjs.org/hast-util-to-html/-/hast-util-to-html-9.0.5.tgz",
-      "integrity": "sha512-OguPdidb+fbHQSU4Q4ZiLKnzWo8Wwsf5bZfbvu7//a9oTYoqD/fWpe96NuHkoS9h0ccGOTe0C4NGXdtS0iObOw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5734,8 +5728,6 @@
     },
     "node_modules/hast-util-to-html/node_modules/@types/hast": {
       "version": "3.0.4",
-      "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz",
-      "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5744,8 +5736,6 @@
     },
     "node_modules/hast-util-to-html/node_modules/@types/mdast": {
       "version": "4.0.4",
-      "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz",
-      "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5754,15 +5744,11 @@
     },
     "node_modules/hast-util-to-html/node_modules/@types/unist": {
       "version": "3.0.3",
-      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
-      "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/hast-util-to-html/node_modules/mdast-util-to-hast": {
       "version": "13.2.0",
-      "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-13.2.0.tgz",
-      "integrity": "sha512-QGYKEuUsYT9ykKBCMOEDLsU5JRObWQusAolFMeko/tYPufNkRffBAQjIE+99jbA87xv6FgmjLtwjh9wBWajwAA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5783,8 +5769,6 @@
     },
     "node_modules/hast-util-to-html/node_modules/micromark-util-character": {
       "version": "2.1.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
-      "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
       "dev": true,
       "funding": [
         {
@@ -5804,8 +5788,6 @@
     },
     "node_modules/hast-util-to-html/node_modules/micromark-util-encode": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-2.0.1.tgz",
-      "integrity": "sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw==",
       "dev": true,
       "funding": [
         {
@@ -5821,8 +5803,6 @@
     },
     "node_modules/hast-util-to-html/node_modules/micromark-util-sanitize-uri": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.1.tgz",
-      "integrity": "sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ==",
       "dev": true,
       "funding": [
         {
@@ -5843,8 +5823,6 @@
     },
     "node_modules/hast-util-to-html/node_modules/micromark-util-symbol": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
-      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
       "dev": true,
       "funding": [
         {
@@ -5860,8 +5838,6 @@
     },
     "node_modules/hast-util-to-html/node_modules/micromark-util-types": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
-      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
       "dev": true,
       "funding": [
         {
@@ -5877,8 +5853,6 @@
     },
     "node_modules/hast-util-to-html/node_modules/unist-util-is": {
       "version": "6.0.0",
-      "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.0.tgz",
-      "integrity": "sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5891,8 +5865,6 @@
     },
     "node_modules/hast-util-to-html/node_modules/unist-util-position": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-5.0.0.tgz",
-      "integrity": "sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5905,8 +5877,6 @@
     },
     "node_modules/hast-util-to-html/node_modules/unist-util-stringify-position": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz",
-      "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5919,8 +5889,6 @@
     },
     "node_modules/hast-util-to-html/node_modules/unist-util-visit": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-5.0.0.tgz",
-      "integrity": "sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5935,8 +5903,6 @@
     },
     "node_modules/hast-util-to-html/node_modules/unist-util-visit-parents": {
       "version": "6.0.1",
-      "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.1.tgz",
-      "integrity": "sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5950,8 +5916,6 @@
     },
     "node_modules/hast-util-to-html/node_modules/vfile": {
       "version": "6.0.3",
-      "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.3.tgz",
-      "integrity": "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5965,8 +5929,6 @@
     },
     "node_modules/hast-util-to-html/node_modules/vfile-message": {
       "version": "4.0.3",
-      "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.3.tgz",
-      "integrity": "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5980,8 +5942,6 @@
     },
     "node_modules/hast-util-whitespace": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/hast-util-whitespace/-/hast-util-whitespace-3.0.0.tgz",
-      "integrity": "sha512-88JUN06ipLwsnv+dVn+OIYOvAuvBMy/Qoi6O7mQHxdPXpjy+Cd6xRkWwux7DKO+4sYILtLBRIKgsdpS2gQc7qw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -5994,8 +5954,6 @@
     },
     "node_modules/hast-util-whitespace/node_modules/@types/hast": {
       "version": "3.0.4",
-      "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz",
-      "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6039,8 +5997,6 @@
     },
     "node_modules/html-void-elements": {
       "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/html-void-elements/-/html-void-elements-3.0.0.tgz",
-      "integrity": "sha512-bEqo66MRXsUGxWHV5IP0PUiAWwoEjba4VCzg0LjFJBpchPaTfyfCKTG6bc5F8ucKec3q5y6qOdGyYTSBEvhCrg==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -7606,18 +7562,6 @@
       "dev": true,
       "license": "MIT"
     },
-    "node_modules/mdast-util-from-markdown/node_modules/mdast-util-to-string": {
-      "version": "4.0.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/mdast": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
     "node_modules/mdast-util-from-markdown/node_modules/micromark-util-symbol": {
       "version": "2.0.1",
       "dev": true,
@@ -7961,18 +7905,6 @@
       "dev": true,
       "license": "MIT"
     },
-    "node_modules/mdast-util-to-markdown/node_modules/mdast-util-to-string": {
-      "version": "4.0.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/mdast": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
     "node_modules/mdast-util-to-markdown/node_modules/unist-util-is": {
       "version": "6.0.0",
       "dev": true,
@@ -8013,17 +7945,29 @@
       }
     },
     "node_modules/mdast-util-to-string": {
-      "version": "3.2.0",
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-4.0.0.tgz",
+      "integrity": "sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/mdast": "^3.0.0"
+        "@types/mdast": "^4.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
+    "node_modules/mdast-util-to-string/node_modules/@types/mdast": {
+      "version": "4.0.4",
+      "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz",
+      "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "*"
+      }
+    },
     "node_modules/meow": {
       "version": "12.1.1",
       "dev": true,
@@ -10974,8 +10918,6 @@
     },
     "node_modules/property-information": {
       "version": "7.1.0",
-      "resolved": "https://registry.npmjs.org/property-information/-/property-information-7.1.0.tgz",
-      "integrity": "sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -11309,8 +11251,6 @@
     },
     "node_modules/rehype-stringify": {
       "version": "10.0.1",
-      "resolved": "https://registry.npmjs.org/rehype-stringify/-/rehype-stringify-10.0.1.tgz",
-      "integrity": "sha512-k9ecfXHmIPuFVI61B9DeLPN0qFHfawM6RsuX48hoqlaKSF61RskNjSm1lI8PhBEM0MRdLxVVm4WmTqJQccH9mA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11325,8 +11265,6 @@
     },
     "node_modules/rehype-stringify/node_modules/@types/hast": {
       "version": "3.0.4",
-      "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz",
-      "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11335,15 +11273,11 @@
     },
     "node_modules/rehype-stringify/node_modules/@types/unist": {
       "version": "3.0.3",
-      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
-      "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/rehype-stringify/node_modules/unified": {
       "version": "11.0.5",
-      "resolved": "https://registry.npmjs.org/unified/-/unified-11.0.5.tgz",
-      "integrity": "sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11362,8 +11296,6 @@
     },
     "node_modules/rehype-stringify/node_modules/unist-util-stringify-position": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz",
-      "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11376,8 +11308,6 @@
     },
     "node_modules/rehype-stringify/node_modules/vfile": {
       "version": "6.0.3",
-      "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.3.tgz",
-      "integrity": "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11391,8 +11321,6 @@
     },
     "node_modules/rehype-stringify/node_modules/vfile-message": {
       "version": "4.0.3",
-      "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.3.tgz",
-      "integrity": "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11825,18 +11753,6 @@
       "dev": true,
       "license": "MIT"
     },
-    "node_modules/remark-github/node_modules/mdast-util-to-string": {
-      "version": "4.0.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/mdast": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
     "node_modules/remark-github/node_modules/unist-util-is": {
       "version": "6.0.0",
       "dev": true,
@@ -11915,19 +11831,21 @@
       }
     },
     "node_modules/remark-man": {
-      "version": "8.0.1",
+      "version": "9.0.0",
+      "resolved": "https://registry.npmjs.org/remark-man/-/remark-man-9.0.0.tgz",
+      "integrity": "sha512-aikxsc6tqbYQt17oDQxY0EpwmnXFr8mmLCuQI6hGa1f6I9E/ht20hKnxAcnTScXTnafRQZYWuUpwTJiwbAtpuQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/mdast": "^3.0.0",
-        "@types/unist": "^2.0.0",
-        "github-slugger": "^1.0.0",
+        "@types/mdast": "^4.0.0",
+        "@types/unist": "^3.0.0",
+        "github-slugger": "^2.0.0",
         "groff-escape": "^2.0.0",
-        "mdast-util-definitions": "^5.0.0",
-        "mdast-util-to-string": "^3.0.0",
+        "mdast-util-definitions": "^6.0.0",
+        "mdast-util-to-string": "^4.0.0",
         "months": "^2.0.0",
-        "unified": "^10.0.0",
-        "unist-util-visit": "^4.0.0",
+        "unified": "^11.0.0",
+        "unist-util-visit": "^5.0.0",
         "zwitch": "^2.0.0"
       },
       "funding": {
@@ -11935,14 +11853,97 @@
         "url": "https://opencollective.com/unified"
       }
     },
+    "node_modules/remark-man/node_modules/@types/mdast": {
+      "version": "4.0.4",
+      "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz",
+      "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "*"
+      }
+    },
+    "node_modules/remark-man/node_modules/@types/unist": {
+      "version": "3.0.3",
+      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
+      "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
+      "dev": true,
+      "license": "MIT"
+    },
+    "node_modules/remark-man/node_modules/mdast-util-definitions": {
+      "version": "6.0.0",
+      "resolved": "https://registry.npmjs.org/mdast-util-definitions/-/mdast-util-definitions-6.0.0.tgz",
+      "integrity": "sha512-scTllyX6pnYNZH/AIp/0ePz6s4cZtARxImwoPJ7kS42n+MnVsI4XbnG6d4ibehRIldYMWM2LD7ImQblVhUejVQ==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/mdast": "^4.0.0",
+        "@types/unist": "^3.0.0",
+        "unist-util-visit": "^5.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark-man/node_modules/unified": {
+      "version": "11.0.5",
+      "resolved": "https://registry.npmjs.org/unified/-/unified-11.0.5.tgz",
+      "integrity": "sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0",
+        "bail": "^2.0.0",
+        "devlop": "^1.0.0",
+        "extend": "^3.0.0",
+        "is-plain-obj": "^4.0.0",
+        "trough": "^2.0.0",
+        "vfile": "^6.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark-man/node_modules/unist-util-is": {
+      "version": "6.0.0",
+      "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.0.tgz",
+      "integrity": "sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark-man/node_modules/unist-util-stringify-position": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz",
+      "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
     "node_modules/remark-man/node_modules/unist-util-visit": {
-      "version": "4.1.2",
+      "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-5.0.0.tgz",
+      "integrity": "sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/unist": "^2.0.0",
-        "unist-util-is": "^5.0.0",
-        "unist-util-visit-parents": "^5.1.1"
+        "@types/unist": "^3.0.0",
+        "unist-util-is": "^6.0.0",
+        "unist-util-visit-parents": "^6.0.0"
       },
       "funding": {
         "type": "opencollective",
@@ -11950,12 +11951,44 @@
       }
     },
     "node_modules/remark-man/node_modules/unist-util-visit-parents": {
-      "version": "5.1.3",
+      "version": "6.0.1",
+      "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.1.tgz",
+      "integrity": "sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/unist": "^2.0.0",
-        "unist-util-is": "^5.0.0"
+        "@types/unist": "^3.0.0",
+        "unist-util-is": "^6.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark-man/node_modules/vfile": {
+      "version": "6.0.3",
+      "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.3.tgz",
+      "integrity": "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0",
+        "vfile-message": "^4.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark-man/node_modules/vfile-message": {
+      "version": "4.0.3",
+      "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.3.tgz",
+      "integrity": "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0",
+        "unist-util-stringify-position": "^4.0.0"
       },
       "funding": {
         "type": "opencollective",
@@ -12683,8 +12716,6 @@
     },
     "node_modules/space-separated-tokens": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-2.0.2.tgz",
-      "integrity": "sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==",
       "dev": true,
       "license": "MIT",
       "funding": {
@@ -13006,8 +13037,6 @@
     },
     "node_modules/stringify-entities": {
       "version": "4.0.4",
-      "resolved": "https://registry.npmjs.org/stringify-entities/-/stringify-entities-4.0.4.tgz",
-      "integrity": "sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {

From 064deb3b329a953d86c3cbaee26805987ff82d0d Mon Sep 17 00:00:00 2001
From: Gar 
Date: Thu, 18 Sep 2025 13:50:58 -0700
Subject: [PATCH 56/63] chore: remark-rehype@11.1.2

---
 docs/package.json |    2 +-
 package-lock.json | 1941 +++++++--------------------------------------
 2 files changed, 305 insertions(+), 1638 deletions(-)

diff --git a/docs/package.json b/docs/package.json
index 213ab615c0d00..6cf2497cdb888 100644
--- a/docs/package.json
+++ b/docs/package.json
@@ -31,7 +31,7 @@
     "remark-gfm": "^4.0.1",
     "remark-man": "^9.0.0",
     "remark-parse": "^11.0.0",
-    "remark-rehype": "^10.1.0",
+    "remark-rehype": "^11.1.2",
     "semver": "^7.3.8",
     "tap": "^16.3.8",
     "unified": "^10.1.2",
diff --git a/package-lock.json b/package-lock.json
index 01cc6a1aba4d7..95cdb21a50a08 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -196,7 +196,7 @@
         "remark-gfm": "^4.0.1",
         "remark-man": "^9.0.0",
         "remark-parse": "^11.0.0",
-        "remark-rehype": "^10.1.0",
+        "remark-rehype": "^11.1.2",
         "semver": "^7.3.8",
         "tap": "^16.3.8",
         "unified": "^10.1.2",
@@ -2141,11 +2141,13 @@
       }
     },
     "node_modules/@types/hast": {
-      "version": "2.3.10",
+      "version": "3.0.4",
+      "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz",
+      "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/unist": "^2"
+        "@types/unist": "*"
       }
     },
     "node_modules/@types/json5": {
@@ -2155,11 +2157,13 @@
       "peer": true
     },
     "node_modules/@types/mdast": {
-      "version": "3.0.15",
+      "version": "4.0.4",
+      "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz",
+      "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/unist": "^2"
+        "@types/unist": "*"
       }
     },
     "node_modules/@types/minimist": {
@@ -5447,8 +5451,6 @@
     },
     "node_modules/github-slugger": {
       "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/github-slugger/-/github-slugger-2.0.0.tgz",
-      "integrity": "sha512-IaOQ9puYtjrkq7Y0Ygl9KDZnrf/aiUJYUpVf89y8kyaxbRG7Y1SrX/jaumrv81vc61+kiMempujsM3Yw7w5qcw==",
       "dev": true,
       "license": "ISC"
     },
@@ -5726,220 +5728,11 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/hast-util-to-html/node_modules/@types/hast": {
-      "version": "3.0.4",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "*"
-      }
-    },
-    "node_modules/hast-util-to-html/node_modules/@types/mdast": {
-      "version": "4.0.4",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "*"
-      }
-    },
     "node_modules/hast-util-to-html/node_modules/@types/unist": {
       "version": "3.0.3",
       "dev": true,
       "license": "MIT"
     },
-    "node_modules/hast-util-to-html/node_modules/mdast-util-to-hast": {
-      "version": "13.2.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/hast": "^3.0.0",
-        "@types/mdast": "^4.0.0",
-        "@ungap/structured-clone": "^1.0.0",
-        "devlop": "^1.0.0",
-        "micromark-util-sanitize-uri": "^2.0.0",
-        "trim-lines": "^3.0.0",
-        "unist-util-position": "^5.0.0",
-        "unist-util-visit": "^5.0.0",
-        "vfile": "^6.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/hast-util-to-html/node_modules/micromark-util-character": {
-      "version": "2.1.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      }
-    },
-    "node_modules/hast-util-to-html/node_modules/micromark-util-encode": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "node_modules/hast-util-to-html/node_modules/micromark-util-sanitize-uri": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-encode": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0"
-      }
-    },
-    "node_modules/hast-util-to-html/node_modules/micromark-util-symbol": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "node_modules/hast-util-to-html/node_modules/micromark-util-types": {
-      "version": "2.0.2",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "node_modules/hast-util-to-html/node_modules/unist-util-is": {
-      "version": "6.0.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/hast-util-to-html/node_modules/unist-util-position": {
-      "version": "5.0.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/hast-util-to-html/node_modules/unist-util-stringify-position": {
-      "version": "4.0.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/hast-util-to-html/node_modules/unist-util-visit": {
-      "version": "5.0.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "unist-util-is": "^6.0.0",
-        "unist-util-visit-parents": "^6.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/hast-util-to-html/node_modules/unist-util-visit-parents": {
-      "version": "6.0.1",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "unist-util-is": "^6.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/hast-util-to-html/node_modules/vfile": {
-      "version": "6.0.3",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "vfile-message": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/hast-util-to-html/node_modules/vfile-message": {
-      "version": "4.0.3",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "unist-util-stringify-position": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
     "node_modules/hast-util-whitespace": {
       "version": "3.0.0",
       "dev": true,
@@ -5952,14 +5745,6 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/hast-util-whitespace/node_modules/@types/hast": {
-      "version": "3.0.4",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "*"
-      }
-    },
     "node_modules/he": {
       "version": "1.2.0",
       "dev": true,
@@ -7421,47 +7206,6 @@
         "node": ">= 0.4"
       }
     },
-    "node_modules/mdast-util-definitions": {
-      "version": "5.1.2",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/mdast": "^3.0.0",
-        "@types/unist": "^2.0.0",
-        "unist-util-visit": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/mdast-util-definitions/node_modules/unist-util-visit": {
-      "version": "4.1.2",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^2.0.0",
-        "unist-util-is": "^5.0.0",
-        "unist-util-visit-parents": "^5.1.1"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/mdast-util-definitions/node_modules/unist-util-visit-parents": {
-      "version": "5.1.3",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^2.0.0",
-        "unist-util-is": "^5.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
     "node_modules/mdast-util-find-and-replace": {
       "version": "3.0.2",
       "dev": true,
@@ -7477,14 +7221,6 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/mdast-util-find-and-replace/node_modules/@types/mdast": {
-      "version": "4.0.4",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "*"
-      }
-    },
     "node_modules/mdast-util-find-and-replace/node_modules/@types/unist": {
       "version": "3.0.3",
       "dev": true,
@@ -7501,33 +7237,21 @@
         "url": "https://github.com/sponsors/sindresorhus"
       }
     },
-    "node_modules/mdast-util-find-and-replace/node_modules/unist-util-is": {
-      "version": "6.0.0",
+    "node_modules/mdast-util-find-and-replace/node_modules/unist-util-visit-parents": {
+      "version": "6.0.1",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/unist": "^3.0.0"
+        "@types/unist": "^3.0.0",
+        "unist-util-is": "^6.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/mdast-util-find-and-replace/node_modules/unist-util-visit-parents": {
-      "version": "6.0.1",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "unist-util-is": "^6.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/mdast-util-from-markdown": {
-      "version": "2.0.2",
+    "node_modules/mdast-util-from-markdown": {
+      "version": "2.0.2",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7549,49 +7273,11 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/mdast-util-from-markdown/node_modules/@types/mdast": {
-      "version": "4.0.4",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "*"
-      }
-    },
     "node_modules/mdast-util-from-markdown/node_modules/@types/unist": {
       "version": "3.0.3",
       "dev": true,
       "license": "MIT"
     },
-    "node_modules/mdast-util-from-markdown/node_modules/micromark-util-symbol": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "node_modules/mdast-util-from-markdown/node_modules/micromark-util-types": {
-      "version": "2.0.2",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
     "node_modules/mdast-util-from-markdown/node_modules/unist-util-stringify-position": {
       "version": "4.0.0",
       "dev": true,
@@ -7638,63 +7324,6 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/mdast-util-gfm-autolink-literal/node_modules/@types/mdast": {
-      "version": "4.0.4",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "*"
-      }
-    },
-    "node_modules/mdast-util-gfm-autolink-literal/node_modules/micromark-util-character": {
-      "version": "2.1.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      }
-    },
-    "node_modules/mdast-util-gfm-autolink-literal/node_modules/micromark-util-symbol": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "node_modules/mdast-util-gfm-autolink-literal/node_modules/micromark-util-types": {
-      "version": "2.0.2",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
     "node_modules/mdast-util-gfm-footnote": {
       "version": "2.1.0",
       "dev": true,
@@ -7711,14 +7340,6 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/mdast-util-gfm-footnote/node_modules/@types/mdast": {
-      "version": "4.0.4",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "*"
-      }
-    },
     "node_modules/mdast-util-gfm-strikethrough": {
       "version": "2.0.0",
       "dev": true,
@@ -7733,14 +7354,6 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/mdast-util-gfm-strikethrough/node_modules/@types/mdast": {
-      "version": "4.0.4",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "*"
-      }
-    },
     "node_modules/mdast-util-gfm-table": {
       "version": "2.0.0",
       "dev": true,
@@ -7757,14 +7370,6 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/mdast-util-gfm-table/node_modules/@types/mdast": {
-      "version": "4.0.4",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "*"
-      }
-    },
     "node_modules/mdast-util-gfm-task-list-item": {
       "version": "2.0.0",
       "dev": true,
@@ -7780,14 +7385,6 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/mdast-util-gfm-task-list-item/node_modules/@types/mdast": {
-      "version": "4.0.4",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "*"
-      }
-    },
     "node_modules/mdast-util-phrasing": {
       "version": "4.1.0",
       "dev": true,
@@ -7801,21 +7398,39 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/mdast-util-phrasing/node_modules/@types/mdast": {
-      "version": "4.0.4",
+    "node_modules/mdast-util-to-hast": {
+      "version": "13.2.0",
+      "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-13.2.0.tgz",
+      "integrity": "sha512-QGYKEuUsYT9ykKBCMOEDLsU5JRObWQusAolFMeko/tYPufNkRffBAQjIE+99jbA87xv6FgmjLtwjh9wBWajwAA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/unist": "*"
+        "@types/hast": "^3.0.0",
+        "@types/mdast": "^4.0.0",
+        "@ungap/structured-clone": "^1.0.0",
+        "devlop": "^1.0.0",
+        "micromark-util-sanitize-uri": "^2.0.0",
+        "trim-lines": "^3.0.0",
+        "unist-util-position": "^5.0.0",
+        "unist-util-visit": "^5.0.0",
+        "vfile": "^6.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/mdast-util-phrasing/node_modules/@types/unist": {
+    "node_modules/mdast-util-to-hast/node_modules/@types/unist": {
       "version": "3.0.3",
+      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
+      "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
       "dev": true,
       "license": "MIT"
     },
-    "node_modules/mdast-util-phrasing/node_modules/unist-util-is": {
-      "version": "6.0.0",
+    "node_modules/mdast-util-to-hast/node_modules/unist-util-stringify-position": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz",
+      "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7826,46 +7441,61 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/mdast-util-to-hast": {
-      "version": "12.3.0",
+    "node_modules/mdast-util-to-hast/node_modules/unist-util-visit": {
+      "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-5.0.0.tgz",
+      "integrity": "sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/hast": "^2.0.0",
-        "@types/mdast": "^3.0.0",
-        "mdast-util-definitions": "^5.0.0",
-        "micromark-util-sanitize-uri": "^1.1.0",
-        "trim-lines": "^3.0.0",
-        "unist-util-generated": "^2.0.0",
-        "unist-util-position": "^4.0.0",
-        "unist-util-visit": "^4.0.0"
+        "@types/unist": "^3.0.0",
+        "unist-util-is": "^6.0.0",
+        "unist-util-visit-parents": "^6.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/mdast-util-to-hast/node_modules/unist-util-visit": {
-      "version": "4.1.2",
+    "node_modules/mdast-util-to-hast/node_modules/unist-util-visit-parents": {
+      "version": "6.0.1",
+      "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.1.tgz",
+      "integrity": "sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/unist": "^2.0.0",
-        "unist-util-is": "^5.0.0",
-        "unist-util-visit-parents": "^5.1.1"
+        "@types/unist": "^3.0.0",
+        "unist-util-is": "^6.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/mdast-util-to-hast/node_modules/unist-util-visit-parents": {
-      "version": "5.1.3",
+    "node_modules/mdast-util-to-hast/node_modules/vfile": {
+      "version": "6.0.3",
+      "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.3.tgz",
+      "integrity": "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/unist": "^2.0.0",
-        "unist-util-is": "^5.0.0"
+        "@types/unist": "^3.0.0",
+        "vfile-message": "^4.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/mdast-util-to-hast/node_modules/vfile-message": {
+      "version": "4.0.3",
+      "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.3.tgz",
+      "integrity": "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0",
+        "unist-util-stringify-position": "^4.0.0"
       },
       "funding": {
         "type": "opencollective",
@@ -7892,31 +7522,11 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/mdast-util-to-markdown/node_modules/@types/mdast": {
-      "version": "4.0.4",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "*"
-      }
-    },
     "node_modules/mdast-util-to-markdown/node_modules/@types/unist": {
       "version": "3.0.3",
       "dev": true,
       "license": "MIT"
     },
-    "node_modules/mdast-util-to-markdown/node_modules/unist-util-is": {
-      "version": "6.0.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
     "node_modules/mdast-util-to-markdown/node_modules/unist-util-visit": {
       "version": "5.0.0",
       "dev": true,
@@ -7946,8 +7556,6 @@
     },
     "node_modules/mdast-util-to-string": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-4.0.0.tgz",
-      "integrity": "sha512-0H44vDimn51F0YwvxSJSm0eCDOJTRlmN0R1yBh4HLj9wiV1Dn0QoXGbvFAWj2hSItVTlCmBF1hqKlIyUBVFLPg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7958,16 +7566,6 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/mdast-util-to-string/node_modules/@types/mdast": {
-      "version": "4.0.4",
-      "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz",
-      "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "*"
-      }
-    },
     "node_modules/meow": {
       "version": "12.1.1",
       "dev": true,
@@ -8046,67 +7644,33 @@
         "micromark-util-types": "^2.0.0"
       }
     },
-    "node_modules/micromark-core-commonmark/node_modules/micromark-util-character": {
-      "version": "2.1.1",
+    "node_modules/micromark-extension-gfm": {
+      "version": "3.0.0",
       "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
       "license": "MIT",
       "dependencies": {
-        "micromark-util-symbol": "^2.0.0",
+        "micromark-extension-gfm-autolink-literal": "^2.0.0",
+        "micromark-extension-gfm-footnote": "^2.0.0",
+        "micromark-extension-gfm-strikethrough": "^2.0.0",
+        "micromark-extension-gfm-table": "^2.0.0",
+        "micromark-extension-gfm-tagfilter": "^2.0.0",
+        "micromark-extension-gfm-task-list-item": "^2.0.0",
+        "micromark-util-combine-extensions": "^2.0.0",
         "micromark-util-types": "^2.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/micromark-core-commonmark/node_modules/micromark-util-symbol": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "node_modules/micromark-core-commonmark/node_modules/micromark-util-types": {
-      "version": "2.0.2",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "node_modules/micromark-extension-gfm": {
-      "version": "3.0.0",
+    "node_modules/micromark-extension-gfm-autolink-literal": {
+      "version": "2.1.0",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "micromark-extension-gfm-autolink-literal": "^2.0.0",
-        "micromark-extension-gfm-footnote": "^2.0.0",
-        "micromark-extension-gfm-strikethrough": "^2.0.0",
-        "micromark-extension-gfm-table": "^2.0.0",
-        "micromark-extension-gfm-tagfilter": "^2.0.0",
-        "micromark-extension-gfm-task-list-item": "^2.0.0",
-        "micromark-util-combine-extensions": "^2.0.0",
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-sanitize-uri": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0",
         "micromark-util-types": "^2.0.0"
       },
       "funding": {
@@ -8114,12 +7678,16 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/micromark-extension-gfm-autolink-literal": {
+    "node_modules/micromark-extension-gfm-footnote": {
       "version": "2.1.0",
       "dev": true,
       "license": "MIT",
       "dependencies": {
+        "devlop": "^1.0.0",
+        "micromark-core-commonmark": "^2.0.0",
+        "micromark-factory-space": "^2.0.0",
         "micromark-util-character": "^2.0.0",
+        "micromark-util-normalize-identifier": "^2.0.0",
         "micromark-util-sanitize-uri": "^2.0.0",
         "micromark-util-symbol": "^2.0.0",
         "micromark-util-types": "^2.0.0"
@@ -8129,101 +7697,59 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/micromark-extension-gfm-autolink-literal/node_modules/micromark-util-character": {
-      "version": "2.1.1",
+    "node_modules/micromark-extension-gfm-strikethrough": {
+      "version": "2.1.0",
       "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
       "license": "MIT",
       "dependencies": {
+        "devlop": "^1.0.0",
+        "micromark-util-chunked": "^2.0.0",
+        "micromark-util-classify-character": "^2.0.0",
+        "micromark-util-resolve-all": "^2.0.0",
         "micromark-util-symbol": "^2.0.0",
         "micromark-util-types": "^2.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/micromark-extension-gfm-autolink-literal/node_modules/micromark-util-encode": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "node_modules/micromark-extension-gfm-autolink-literal/node_modules/micromark-util-sanitize-uri": {
-      "version": "2.0.1",
+    "node_modules/micromark-extension-gfm-table": {
+      "version": "2.1.1",
       "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
       "license": "MIT",
       "dependencies": {
+        "devlop": "^1.0.0",
+        "micromark-factory-space": "^2.0.0",
         "micromark-util-character": "^2.0.0",
-        "micromark-util-encode": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0"
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/micromark-extension-gfm-autolink-literal/node_modules/micromark-util-symbol": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "node_modules/micromark-extension-gfm-autolink-literal/node_modules/micromark-util-types": {
-      "version": "2.0.2",
+    "node_modules/micromark-extension-gfm-tagfilter": {
+      "version": "2.0.0",
       "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-types": "^2.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
     },
-    "node_modules/micromark-extension-gfm-footnote": {
+    "node_modules/micromark-extension-gfm-task-list-item": {
       "version": "2.1.0",
       "dev": true,
       "license": "MIT",
       "dependencies": {
         "devlop": "^1.0.0",
-        "micromark-core-commonmark": "^2.0.0",
         "micromark-factory-space": "^2.0.0",
         "micromark-util-character": "^2.0.0",
-        "micromark-util-normalize-identifier": "^2.0.0",
-        "micromark-util-sanitize-uri": "^2.0.0",
         "micromark-util-symbol": "^2.0.0",
         "micromark-util-types": "^2.0.0"
       },
@@ -8232,8 +7758,8 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/micromark-extension-gfm-footnote/node_modules/micromark-util-character": {
-      "version": "2.1.1",
+    "node_modules/micromark-factory-destination": {
+      "version": "2.0.1",
       "dev": true,
       "funding": [
         {
@@ -8247,11 +7773,12 @@
       ],
       "license": "MIT",
       "dependencies": {
+        "micromark-util-character": "^2.0.0",
         "micromark-util-symbol": "^2.0.0",
         "micromark-util-types": "^2.0.0"
       }
     },
-    "node_modules/micromark-extension-gfm-footnote/node_modules/micromark-util-encode": {
+    "node_modules/micromark-factory-label": {
       "version": "2.0.1",
       "dev": true,
       "funding": [
@@ -8264,9 +7791,15 @@
           "url": "https://opencollective.com/unified"
         }
       ],
-      "license": "MIT"
+      "license": "MIT",
+      "dependencies": {
+        "devlop": "^1.0.0",
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
+      }
     },
-    "node_modules/micromark-extension-gfm-footnote/node_modules/micromark-util-sanitize-uri": {
+    "node_modules/micromark-factory-space": {
       "version": "2.0.1",
       "dev": true,
       "funding": [
@@ -8282,11 +7815,10 @@
       "license": "MIT",
       "dependencies": {
         "micromark-util-character": "^2.0.0",
-        "micromark-util-encode": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0"
+        "micromark-util-types": "^2.0.0"
       }
     },
-    "node_modules/micromark-extension-gfm-footnote/node_modules/micromark-util-symbol": {
+    "node_modules/micromark-factory-title": {
       "version": "2.0.1",
       "dev": true,
       "funding": [
@@ -8299,41 +7831,15 @@
           "url": "https://opencollective.com/unified"
         }
       ],
-      "license": "MIT"
-    },
-    "node_modules/micromark-extension-gfm-footnote/node_modules/micromark-util-types": {
-      "version": "2.0.2",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "node_modules/micromark-extension-gfm-strikethrough": {
-      "version": "2.1.0",
-      "dev": true,
       "license": "MIT",
       "dependencies": {
-        "devlop": "^1.0.0",
-        "micromark-util-chunked": "^2.0.0",
-        "micromark-util-classify-character": "^2.0.0",
-        "micromark-util-resolve-all": "^2.0.0",
+        "micromark-factory-space": "^2.0.0",
+        "micromark-util-character": "^2.0.0",
         "micromark-util-symbol": "^2.0.0",
         "micromark-util-types": "^2.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/micromark-extension-gfm-strikethrough/node_modules/micromark-util-symbol": {
+    "node_modules/micromark-factory-whitespace": {
       "version": "2.0.1",
       "dev": true,
       "funding": [
@@ -8346,41 +7852,18 @@
           "url": "https://opencollective.com/unified"
         }
       ],
-      "license": "MIT"
-    },
-    "node_modules/micromark-extension-gfm-strikethrough/node_modules/micromark-util-types": {
-      "version": "2.0.2",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "node_modules/micromark-extension-gfm-table": {
-      "version": "2.1.1",
-      "dev": true,
       "license": "MIT",
       "dependencies": {
-        "devlop": "^1.0.0",
         "micromark-factory-space": "^2.0.0",
         "micromark-util-character": "^2.0.0",
         "micromark-util-symbol": "^2.0.0",
         "micromark-util-types": "^2.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/micromark-extension-gfm-table/node_modules/micromark-util-character": {
+    "node_modules/micromark-util-character": {
       "version": "2.1.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
+      "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
       "dev": true,
       "funding": [
         {
@@ -8398,7 +7881,7 @@
         "micromark-util-types": "^2.0.0"
       }
     },
-    "node_modules/micromark-extension-gfm-table/node_modules/micromark-util-symbol": {
+    "node_modules/micromark-util-chunked": {
       "version": "2.0.1",
       "dev": true,
       "funding": [
@@ -8411,37 +7894,13 @@
           "url": "https://opencollective.com/unified"
         }
       ],
-      "license": "MIT"
-    },
-    "node_modules/micromark-extension-gfm-table/node_modules/micromark-util-types": {
-      "version": "2.0.2",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "node_modules/micromark-extension-gfm-tagfilter": {
-      "version": "2.0.0",
-      "dev": true,
       "license": "MIT",
       "dependencies": {
-        "micromark-util-types": "^2.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
+        "micromark-util-symbol": "^2.0.0"
       }
     },
-    "node_modules/micromark-extension-gfm-tagfilter/node_modules/micromark-util-types": {
-      "version": "2.0.2",
+    "node_modules/micromark-util-classify-character": {
+      "version": "2.0.1",
       "dev": true,
       "funding": [
         {
@@ -8453,26 +7912,15 @@
           "url": "https://opencollective.com/unified"
         }
       ],
-      "license": "MIT"
-    },
-    "node_modules/micromark-extension-gfm-task-list-item": {
-      "version": "2.1.0",
-      "dev": true,
       "license": "MIT",
       "dependencies": {
-        "devlop": "^1.0.0",
-        "micromark-factory-space": "^2.0.0",
         "micromark-util-character": "^2.0.0",
         "micromark-util-symbol": "^2.0.0",
         "micromark-util-types": "^2.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/micromark-extension-gfm-task-list-item/node_modules/micromark-util-character": {
-      "version": "2.1.1",
+    "node_modules/micromark-util-combine-extensions": {
+      "version": "2.0.1",
       "dev": true,
       "funding": [
         {
@@ -8486,12 +7934,12 @@
       ],
       "license": "MIT",
       "dependencies": {
-        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-chunked": "^2.0.0",
         "micromark-util-types": "^2.0.0"
       }
     },
-    "node_modules/micromark-extension-gfm-task-list-item/node_modules/micromark-util-symbol": {
-      "version": "2.0.1",
+    "node_modules/micromark-util-decode-numeric-character-reference": {
+      "version": "2.0.2",
       "dev": true,
       "funding": [
         {
@@ -8502,782 +7950,13 @@
           "type": "OpenCollective",
           "url": "https://opencollective.com/unified"
         }
-      ],
-      "license": "MIT"
-    },
-    "node_modules/micromark-extension-gfm-task-list-item/node_modules/micromark-util-types": {
-      "version": "2.0.2",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "node_modules/micromark-extension-gfm/node_modules/micromark-util-types": {
-      "version": "2.0.2",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "node_modules/micromark-factory-destination": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      }
-    },
-    "node_modules/micromark-factory-destination/node_modules/micromark-util-character": {
-      "version": "2.1.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      }
-    },
-    "node_modules/micromark-factory-destination/node_modules/micromark-util-symbol": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "node_modules/micromark-factory-destination/node_modules/micromark-util-types": {
-      "version": "2.0.2",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "node_modules/micromark-factory-label": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "devlop": "^1.0.0",
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      }
-    },
-    "node_modules/micromark-factory-label/node_modules/micromark-util-character": {
-      "version": "2.1.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      }
-    },
-    "node_modules/micromark-factory-label/node_modules/micromark-util-symbol": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "node_modules/micromark-factory-label/node_modules/micromark-util-types": {
-      "version": "2.0.2",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "node_modules/micromark-factory-space": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      }
-    },
-    "node_modules/micromark-factory-space/node_modules/micromark-util-character": {
-      "version": "2.1.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      }
-    },
-    "node_modules/micromark-factory-space/node_modules/micromark-util-symbol": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "node_modules/micromark-factory-space/node_modules/micromark-util-types": {
-      "version": "2.0.2",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "node_modules/micromark-factory-title": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-factory-space": "^2.0.0",
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      }
-    },
-    "node_modules/micromark-factory-title/node_modules/micromark-util-character": {
-      "version": "2.1.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      }
-    },
-    "node_modules/micromark-factory-title/node_modules/micromark-util-symbol": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "node_modules/micromark-factory-title/node_modules/micromark-util-types": {
-      "version": "2.0.2",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "node_modules/micromark-factory-whitespace": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-factory-space": "^2.0.0",
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      }
-    },
-    "node_modules/micromark-factory-whitespace/node_modules/micromark-util-character": {
-      "version": "2.1.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      }
-    },
-    "node_modules/micromark-factory-whitespace/node_modules/micromark-util-symbol": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "node_modules/micromark-factory-whitespace/node_modules/micromark-util-types": {
-      "version": "2.0.2",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "node_modules/micromark-util-character": {
-      "version": "1.2.0",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-symbol": "^1.0.0",
-        "micromark-util-types": "^1.0.0"
-      }
-    },
-    "node_modules/micromark-util-chunked": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-symbol": "^2.0.0"
-      }
-    },
-    "node_modules/micromark-util-chunked/node_modules/micromark-util-symbol": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "node_modules/micromark-util-classify-character": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      }
-    },
-    "node_modules/micromark-util-classify-character/node_modules/micromark-util-character": {
-      "version": "2.1.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      }
-    },
-    "node_modules/micromark-util-classify-character/node_modules/micromark-util-symbol": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "node_modules/micromark-util-classify-character/node_modules/micromark-util-types": {
-      "version": "2.0.2",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "node_modules/micromark-util-combine-extensions": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-chunked": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      }
-    },
-    "node_modules/micromark-util-combine-extensions/node_modules/micromark-util-types": {
-      "version": "2.0.2",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "node_modules/micromark-util-decode-numeric-character-reference": {
-      "version": "2.0.2",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-symbol": "^2.0.0"
-      }
-    },
-    "node_modules/micromark-util-decode-numeric-character-reference/node_modules/micromark-util-symbol": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "node_modules/micromark-util-decode-string": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "decode-named-character-reference": "^1.0.0",
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-decode-numeric-character-reference": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0"
-      }
-    },
-    "node_modules/micromark-util-decode-string/node_modules/micromark-util-character": {
-      "version": "2.1.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
-      }
-    },
-    "node_modules/micromark-util-decode-string/node_modules/micromark-util-symbol": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "node_modules/micromark-util-decode-string/node_modules/micromark-util-types": {
-      "version": "2.0.2",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "node_modules/micromark-util-encode": {
-      "version": "1.1.0",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "node_modules/micromark-util-html-tag-name": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "node_modules/micromark-util-normalize-identifier": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-symbol": "^2.0.0"
-      }
-    },
-    "node_modules/micromark-util-normalize-identifier/node_modules/micromark-util-symbol": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "node_modules/micromark-util-resolve-all": {
-      "version": "2.0.1",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-types": "^2.0.0"
-      }
-    },
-    "node_modules/micromark-util-resolve-all/node_modules/micromark-util-types": {
-      "version": "2.0.2",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
-    "node_modules/micromark-util-sanitize-uri": {
-      "version": "1.2.0",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "micromark-util-character": "^1.0.0",
-        "micromark-util-encode": "^1.0.0",
-        "micromark-util-symbol": "^1.0.0"
-      }
-    },
-    "node_modules/micromark-util-subtokenize": {
-      "version": "2.1.0",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT",
-      "dependencies": {
-        "devlop": "^1.0.0",
-        "micromark-util-chunked": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0",
-        "micromark-util-types": "^2.0.0"
+      ],
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-symbol": "^2.0.0"
       }
     },
-    "node_modules/micromark-util-subtokenize/node_modules/micromark-util-symbol": {
+    "node_modules/micromark-util-decode-string": {
       "version": "2.0.1",
       "dev": true,
       "funding": [
@@ -9290,10 +7969,18 @@
           "url": "https://opencollective.com/unified"
         }
       ],
-      "license": "MIT"
+      "license": "MIT",
+      "dependencies": {
+        "decode-named-character-reference": "^1.0.0",
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-decode-numeric-character-reference": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0"
+      }
     },
-    "node_modules/micromark-util-subtokenize/node_modules/micromark-util-types": {
-      "version": "2.0.2",
+    "node_modules/micromark-util-encode": {
+      "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-2.0.1.tgz",
+      "integrity": "sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw==",
       "dev": true,
       "funding": [
         {
@@ -9307,8 +7994,8 @@
       ],
       "license": "MIT"
     },
-    "node_modules/micromark-util-symbol": {
-      "version": "1.1.0",
+    "node_modules/micromark-util-html-tag-name": {
+      "version": "2.0.1",
       "dev": true,
       "funding": [
         {
@@ -9322,8 +8009,8 @@
       ],
       "license": "MIT"
     },
-    "node_modules/micromark-util-types": {
-      "version": "1.1.0",
+    "node_modules/micromark-util-normalize-identifier": {
+      "version": "2.0.1",
       "dev": true,
       "funding": [
         {
@@ -9335,10 +8022,13 @@
           "url": "https://opencollective.com/unified"
         }
       ],
-      "license": "MIT"
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-symbol": "^2.0.0"
+      }
     },
-    "node_modules/micromark/node_modules/micromark-util-character": {
-      "version": "2.1.1",
+    "node_modules/micromark-util-resolve-all": {
+      "version": "2.0.1",
       "dev": true,
       "funding": [
         {
@@ -9352,12 +8042,13 @@
       ],
       "license": "MIT",
       "dependencies": {
-        "micromark-util-symbol": "^2.0.0",
         "micromark-util-types": "^2.0.0"
       }
     },
-    "node_modules/micromark/node_modules/micromark-util-encode": {
+    "node_modules/micromark-util-sanitize-uri": {
       "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.1.tgz",
+      "integrity": "sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ==",
       "dev": true,
       "funding": [
         {
@@ -9369,10 +8060,15 @@
           "url": "https://opencollective.com/unified"
         }
       ],
-      "license": "MIT"
+      "license": "MIT",
+      "dependencies": {
+        "micromark-util-character": "^2.0.0",
+        "micromark-util-encode": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0"
+      }
     },
-    "node_modules/micromark/node_modules/micromark-util-sanitize-uri": {
-      "version": "2.0.1",
+    "node_modules/micromark-util-subtokenize": {
+      "version": "2.1.0",
       "dev": true,
       "funding": [
         {
@@ -9386,13 +8082,16 @@
       ],
       "license": "MIT",
       "dependencies": {
-        "micromark-util-character": "^2.0.0",
-        "micromark-util-encode": "^2.0.0",
-        "micromark-util-symbol": "^2.0.0"
+        "devlop": "^1.0.0",
+        "micromark-util-chunked": "^2.0.0",
+        "micromark-util-symbol": "^2.0.0",
+        "micromark-util-types": "^2.0.0"
       }
     },
-    "node_modules/micromark/node_modules/micromark-util-symbol": {
+    "node_modules/micromark-util-symbol": {
       "version": "2.0.1",
+      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
+      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
       "dev": true,
       "funding": [
         {
@@ -9406,8 +8105,10 @@
       ],
       "license": "MIT"
     },
-    "node_modules/micromark/node_modules/micromark-util-types": {
+    "node_modules/micromark-util-types": {
       "version": "2.0.2",
+      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
+      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
       "dev": true,
       "funding": [
         {
@@ -11263,14 +9964,6 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/rehype-stringify/node_modules/@types/hast": {
-      "version": "3.0.4",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "*"
-      }
-    },
     "node_modules/rehype-stringify/node_modules/@types/unist": {
       "version": "3.0.3",
       "dev": true,
@@ -11654,14 +10347,6 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/remark-gfm/node_modules/@types/mdast": {
-      "version": "4.0.4",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "*"
-      }
-    },
     "node_modules/remark-gfm/node_modules/@types/unist": {
       "version": "3.0.3",
       "dev": true,
@@ -11740,31 +10425,11 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/remark-github/node_modules/@types/mdast": {
-      "version": "4.0.4",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "*"
-      }
-    },
     "node_modules/remark-github/node_modules/@types/unist": {
       "version": "3.0.3",
       "dev": true,
       "license": "MIT"
     },
-    "node_modules/remark-github/node_modules/unist-util-is": {
-      "version": "6.0.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
     "node_modules/remark-github/node_modules/unist-util-stringify-position": {
       "version": "4.0.0",
       "dev": true,
@@ -11832,8 +10497,6 @@
     },
     "node_modules/remark-man": {
       "version": "9.0.0",
-      "resolved": "https://registry.npmjs.org/remark-man/-/remark-man-9.0.0.tgz",
-      "integrity": "sha512-aikxsc6tqbYQt17oDQxY0EpwmnXFr8mmLCuQI6hGa1f6I9E/ht20hKnxAcnTScXTnafRQZYWuUpwTJiwbAtpuQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11853,27 +10516,13 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/remark-man/node_modules/@types/mdast": {
-      "version": "4.0.4",
-      "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz",
-      "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "*"
-      }
-    },
     "node_modules/remark-man/node_modules/@types/unist": {
       "version": "3.0.3",
-      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
-      "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/remark-man/node_modules/mdast-util-definitions": {
       "version": "6.0.0",
-      "resolved": "https://registry.npmjs.org/mdast-util-definitions/-/mdast-util-definitions-6.0.0.tgz",
-      "integrity": "sha512-scTllyX6pnYNZH/AIp/0ePz6s4cZtARxImwoPJ7kS42n+MnVsI4XbnG6d4ibehRIldYMWM2LD7ImQblVhUejVQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11888,8 +10537,6 @@
     },
     "node_modules/remark-man/node_modules/unified": {
       "version": "11.0.5",
-      "resolved": "https://registry.npmjs.org/unified/-/unified-11.0.5.tgz",
-      "integrity": "sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11906,24 +10553,8 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/remark-man/node_modules/unist-util-is": {
-      "version": "6.0.0",
-      "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.0.tgz",
-      "integrity": "sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
     "node_modules/remark-man/node_modules/unist-util-stringify-position": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz",
-      "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11936,8 +10567,6 @@
     },
     "node_modules/remark-man/node_modules/unist-util-visit": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-5.0.0.tgz",
-      "integrity": "sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11952,8 +10581,6 @@
     },
     "node_modules/remark-man/node_modules/unist-util-visit-parents": {
       "version": "6.0.1",
-      "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.1.tgz",
-      "integrity": "sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11967,8 +10594,6 @@
     },
     "node_modules/remark-man/node_modules/vfile": {
       "version": "6.0.3",
-      "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.3.tgz",
-      "integrity": "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -11982,8 +10607,6 @@
     },
     "node_modules/remark-man/node_modules/vfile-message": {
       "version": "4.0.3",
-      "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.3.tgz",
-      "integrity": "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -12010,34 +10633,11 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/remark-parse/node_modules/@types/mdast": {
-      "version": "4.0.4",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "*"
-      }
-    },
     "node_modules/remark-parse/node_modules/@types/unist": {
       "version": "3.0.3",
       "dev": true,
       "license": "MIT"
     },
-    "node_modules/remark-parse/node_modules/micromark-util-types": {
-      "version": "2.0.2",
-      "dev": true,
-      "funding": [
-        {
-          "type": "GitHub Sponsors",
-          "url": "https://github.com/sponsors/unifiedjs"
-        },
-        {
-          "type": "OpenCollective",
-          "url": "https://opencollective.com/unified"
-        }
-      ],
-      "license": "MIT"
-    },
     "node_modules/remark-parse/node_modules/unified": {
       "version": "11.0.5",
       "dev": true,
@@ -12095,40 +10695,106 @@
       }
     },
     "node_modules/remark-rehype": {
-      "version": "10.1.0",
+      "version": "11.1.2",
+      "resolved": "https://registry.npmjs.org/remark-rehype/-/remark-rehype-11.1.2.tgz",
+      "integrity": "sha512-Dh7l57ianaEoIpzbp0PC9UKAdCSVklD8E5Rpw7ETfbTl3FqcOOgq5q2LVDhgGCkaBv7p24JXikPdvhhmHvKMsw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/hast": "^2.0.0",
-        "@types/mdast": "^3.0.0",
-        "mdast-util-to-hast": "^12.1.0",
-        "unified": "^10.0.0"
+        "@types/hast": "^3.0.0",
+        "@types/mdast": "^4.0.0",
+        "mdast-util-to-hast": "^13.0.0",
+        "unified": "^11.0.0",
+        "vfile": "^6.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/remark-stringify": {
-      "version": "11.0.0",
+    "node_modules/remark-rehype/node_modules/@types/unist": {
+      "version": "3.0.3",
+      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
+      "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
+      "dev": true,
+      "license": "MIT"
+    },
+    "node_modules/remark-rehype/node_modules/unified": {
+      "version": "11.0.5",
+      "resolved": "https://registry.npmjs.org/unified/-/unified-11.0.5.tgz",
+      "integrity": "sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/mdast": "^4.0.0",
-        "mdast-util-to-markdown": "^2.0.0",
-        "unified": "^11.0.0"
+        "@types/unist": "^3.0.0",
+        "bail": "^2.0.0",
+        "devlop": "^1.0.0",
+        "extend": "^3.0.0",
+        "is-plain-obj": "^4.0.0",
+        "trough": "^2.0.0",
+        "vfile": "^6.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/remark-stringify/node_modules/@types/mdast": {
-      "version": "4.0.4",
+    "node_modules/remark-rehype/node_modules/unist-util-stringify-position": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz",
+      "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/unist": "*"
+        "@types/unist": "^3.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark-rehype/node_modules/vfile": {
+      "version": "6.0.3",
+      "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.3.tgz",
+      "integrity": "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0",
+        "vfile-message": "^4.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark-rehype/node_modules/vfile-message": {
+      "version": "4.0.3",
+      "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.3.tgz",
+      "integrity": "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/unist": "^3.0.0",
+        "unist-util-stringify-position": "^4.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
+      }
+    },
+    "node_modules/remark-stringify": {
+      "version": "11.0.0",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "@types/mdast": "^4.0.0",
+        "mdast-util-to-markdown": "^2.0.0",
+        "unified": "^11.0.0"
+      },
+      "funding": {
+        "type": "opencollective",
+        "url": "https://opencollective.com/unified"
       }
     },
     "node_modules/remark-stringify/node_modules/@types/unist": {
@@ -12192,14 +10858,6 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/remark/node_modules/@types/mdast": {
-      "version": "4.0.4",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "*"
-      }
-    },
     "node_modules/remark/node_modules/@types/unist": {
       "version": "3.0.3",
       "dev": true,
@@ -15967,39 +14625,48 @@
         "node": "^18.17.0 || >=20.5.0"
       }
     },
-    "node_modules/unist-util-generated": {
-      "version": "2.0.1",
-      "dev": true,
-      "license": "MIT",
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
     "node_modules/unist-util-is": {
-      "version": "5.2.1",
+      "version": "6.0.0",
+      "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.0.tgz",
+      "integrity": "sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/unist": "^2.0.0"
+        "@types/unist": "^3.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
+    "node_modules/unist-util-is/node_modules/@types/unist": {
+      "version": "3.0.3",
+      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
+      "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
+      "dev": true,
+      "license": "MIT"
+    },
     "node_modules/unist-util-position": {
-      "version": "4.0.4",
+      "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-5.0.0.tgz",
+      "integrity": "sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/unist": "^2.0.0"
+        "@types/unist": "^3.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
+    "node_modules/unist-util-position/node_modules/@types/unist": {
+      "version": "3.0.3",
+      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
+      "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
+      "dev": true,
+      "license": "MIT"
+    },
     "node_modules/unist-util-stringify-position": {
       "version": "3.0.3",
       "dev": true,

From 420a569762e65b50d18338706420a85f24e3e0ee Mon Sep 17 00:00:00 2001
From: Gar 
Date: Thu, 18 Sep 2025 13:52:22 -0700
Subject: [PATCH 57/63] chore: unified@11.0.5

---
 docs/package.json |   2 +-
 package-lock.json | 719 +++++-----------------------------------------
 2 files changed, 66 insertions(+), 655 deletions(-)

diff --git a/docs/package.json b/docs/package.json
index 6cf2497cdb888..0ea96f7597c4f 100644
--- a/docs/package.json
+++ b/docs/package.json
@@ -34,7 +34,7 @@
     "remark-rehype": "^11.1.2",
     "semver": "^7.3.8",
     "tap": "^16.3.8",
-    "unified": "^10.1.2",
+    "unified": "^11.0.5",
     "yaml": "^2.2.1"
   },
   "author": "GitHub Inc.",
diff --git a/package-lock.json b/package-lock.json
index 95cdb21a50a08..fcbec14acdf40 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -199,7 +199,7 @@
         "remark-rehype": "^11.1.2",
         "semver": "^7.3.8",
         "tap": "^16.3.8",
-        "unified": "^10.1.2",
+        "unified": "^11.0.5",
         "yaml": "^2.2.1"
       },
       "engines": {
@@ -2142,8 +2142,6 @@
     },
     "node_modules/@types/hast": {
       "version": "3.0.4",
-      "resolved": "https://registry.npmjs.org/@types/hast/-/hast-3.0.4.tgz",
-      "integrity": "sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -2158,8 +2156,6 @@
     },
     "node_modules/@types/mdast": {
       "version": "4.0.4",
-      "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-4.0.4.tgz",
-      "integrity": "sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -6064,28 +6060,6 @@
         "url": "https://github.com/sponsors/ljharb"
       }
     },
-    "node_modules/is-buffer": {
-      "version": "2.0.5",
-      "dev": true,
-      "funding": [
-        {
-          "type": "github",
-          "url": "https://github.com/sponsors/feross"
-        },
-        {
-          "type": "patreon",
-          "url": "https://www.patreon.com/feross"
-        },
-        {
-          "type": "consulting",
-          "url": "https://feross.org/support"
-        }
-      ],
-      "license": "MIT",
-      "engines": {
-        "node": ">=4"
-      }
-    },
     "node_modules/is-callable": {
       "version": "1.2.7",
       "dev": true,
@@ -7278,18 +7252,6 @@
       "dev": true,
       "license": "MIT"
     },
-    "node_modules/mdast-util-from-markdown/node_modules/unist-util-stringify-position": {
-      "version": "4.0.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
     "node_modules/mdast-util-gfm": {
       "version": "3.1.0",
       "dev": true,
@@ -7400,8 +7362,6 @@
     },
     "node_modules/mdast-util-to-hast": {
       "version": "13.2.0",
-      "resolved": "https://registry.npmjs.org/mdast-util-to-hast/-/mdast-util-to-hast-13.2.0.tgz",
-      "integrity": "sha512-QGYKEuUsYT9ykKBCMOEDLsU5JRObWQusAolFMeko/tYPufNkRffBAQjIE+99jbA87xv6FgmjLtwjh9wBWajwAA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7422,29 +7382,11 @@
     },
     "node_modules/mdast-util-to-hast/node_modules/@types/unist": {
       "version": "3.0.3",
-      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
-      "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
       "dev": true,
       "license": "MIT"
     },
-    "node_modules/mdast-util-to-hast/node_modules/unist-util-stringify-position": {
-      "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz",
-      "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
     "node_modules/mdast-util-to-hast/node_modules/unist-util-visit": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/unist-util-visit/-/unist-util-visit-5.0.0.tgz",
-      "integrity": "sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7459,8 +7401,6 @@
     },
     "node_modules/mdast-util-to-hast/node_modules/unist-util-visit-parents": {
       "version": "6.0.1",
-      "resolved": "https://registry.npmjs.org/unist-util-visit-parents/-/unist-util-visit-parents-6.0.1.tgz",
-      "integrity": "sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -7472,36 +7412,6 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/mdast-util-to-hast/node_modules/vfile": {
-      "version": "6.0.3",
-      "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.3.tgz",
-      "integrity": "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "vfile-message": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/mdast-util-to-hast/node_modules/vfile-message": {
-      "version": "4.0.3",
-      "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.3.tgz",
-      "integrity": "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "unist-util-stringify-position": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
     "node_modules/mdast-util-to-markdown": {
       "version": "2.1.2",
       "dev": true,
@@ -7862,8 +7772,6 @@
     },
     "node_modules/micromark-util-character": {
       "version": "2.1.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-2.1.1.tgz",
-      "integrity": "sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==",
       "dev": true,
       "funding": [
         {
@@ -7979,8 +7887,6 @@
     },
     "node_modules/micromark-util-encode": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-2.0.1.tgz",
-      "integrity": "sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw==",
       "dev": true,
       "funding": [
         {
@@ -8047,8 +7953,6 @@
     },
     "node_modules/micromark-util-sanitize-uri": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-2.0.1.tgz",
-      "integrity": "sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ==",
       "dev": true,
       "funding": [
         {
@@ -8090,8 +7994,6 @@
     },
     "node_modules/micromark-util-symbol": {
       "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-2.0.1.tgz",
-      "integrity": "sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==",
       "dev": true,
       "funding": [
         {
@@ -8107,8 +8009,6 @@
     },
     "node_modules/micromark-util-types": {
       "version": "2.0.2",
-      "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-2.0.2.tgz",
-      "integrity": "sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==",
       "dev": true,
       "funding": [
         {
@@ -9964,67 +9864,6 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/rehype-stringify/node_modules/@types/unist": {
-      "version": "3.0.3",
-      "dev": true,
-      "license": "MIT"
-    },
-    "node_modules/rehype-stringify/node_modules/unified": {
-      "version": "11.0.5",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "bail": "^2.0.0",
-        "devlop": "^1.0.0",
-        "extend": "^3.0.0",
-        "is-plain-obj": "^4.0.0",
-        "trough": "^2.0.0",
-        "vfile": "^6.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/rehype-stringify/node_modules/unist-util-stringify-position": {
-      "version": "4.0.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/rehype-stringify/node_modules/vfile": {
-      "version": "6.0.3",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "vfile-message": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/rehype-stringify/node_modules/vfile-message": {
-      "version": "4.0.3",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "unist-util-stringify-position": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
     "node_modules/release-please": {
       "version": "17.1.2",
       "dev": true,
@@ -10347,67 +10186,6 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/remark-gfm/node_modules/@types/unist": {
-      "version": "3.0.3",
-      "dev": true,
-      "license": "MIT"
-    },
-    "node_modules/remark-gfm/node_modules/unified": {
-      "version": "11.0.5",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "bail": "^2.0.0",
-        "devlop": "^1.0.0",
-        "extend": "^3.0.0",
-        "is-plain-obj": "^4.0.0",
-        "trough": "^2.0.0",
-        "vfile": "^6.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark-gfm/node_modules/unist-util-stringify-position": {
-      "version": "4.0.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark-gfm/node_modules/vfile": {
-      "version": "6.0.3",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "vfile-message": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark-gfm/node_modules/vfile-message": {
-      "version": "4.0.3",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "unist-util-stringify-position": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
     "node_modules/remark-github": {
       "version": "12.0.0",
       "dev": true,
@@ -10430,18 +10208,6 @@
       "dev": true,
       "license": "MIT"
     },
-    "node_modules/remark-github/node_modules/unist-util-stringify-position": {
-      "version": "4.0.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
     "node_modules/remark-github/node_modules/unist-util-visit": {
       "version": "5.0.0",
       "dev": true,
@@ -10469,32 +10235,6 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/remark-github/node_modules/vfile": {
-      "version": "6.0.3",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "vfile-message": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark-github/node_modules/vfile-message": {
-      "version": "4.0.3",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "unist-util-stringify-position": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
     "node_modules/remark-man": {
       "version": "9.0.0",
       "dev": true,
@@ -10535,169 +10275,50 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/remark-man/node_modules/unified": {
-      "version": "11.0.5",
+    "node_modules/remark-man/node_modules/unist-util-visit": {
+      "version": "5.0.0",
       "dev": true,
       "license": "MIT",
       "dependencies": {
         "@types/unist": "^3.0.0",
-        "bail": "^2.0.0",
-        "devlop": "^1.0.0",
-        "extend": "^3.0.0",
-        "is-plain-obj": "^4.0.0",
-        "trough": "^2.0.0",
-        "vfile": "^6.0.0"
+        "unist-util-is": "^6.0.0",
+        "unist-util-visit-parents": "^6.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/remark-man/node_modules/unist-util-stringify-position": {
-      "version": "4.0.0",
+    "node_modules/remark-man/node_modules/unist-util-visit-parents": {
+      "version": "6.0.1",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/unist": "^3.0.0"
+        "@types/unist": "^3.0.0",
+        "unist-util-is": "^6.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/remark-man/node_modules/unist-util-visit": {
-      "version": "5.0.0",
+    "node_modules/remark-parse": {
+      "version": "11.0.0",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/unist": "^3.0.0",
-        "unist-util-is": "^6.0.0",
-        "unist-util-visit-parents": "^6.0.0"
+        "@types/mdast": "^4.0.0",
+        "mdast-util-from-markdown": "^2.0.0",
+        "micromark-util-types": "^2.0.0",
+        "unified": "^11.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/remark-man/node_modules/unist-util-visit-parents": {
-      "version": "6.0.1",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "unist-util-is": "^6.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark-man/node_modules/vfile": {
-      "version": "6.0.3",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "vfile-message": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark-man/node_modules/vfile-message": {
-      "version": "4.0.3",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "unist-util-stringify-position": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark-parse": {
-      "version": "11.0.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/mdast": "^4.0.0",
-        "mdast-util-from-markdown": "^2.0.0",
-        "micromark-util-types": "^2.0.0",
-        "unified": "^11.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark-parse/node_modules/@types/unist": {
-      "version": "3.0.3",
-      "dev": true,
-      "license": "MIT"
-    },
-    "node_modules/remark-parse/node_modules/unified": {
-      "version": "11.0.5",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "bail": "^2.0.0",
-        "devlop": "^1.0.0",
-        "extend": "^3.0.0",
-        "is-plain-obj": "^4.0.0",
-        "trough": "^2.0.0",
-        "vfile": "^6.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark-parse/node_modules/unist-util-stringify-position": {
-      "version": "4.0.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark-parse/node_modules/vfile": {
-      "version": "6.0.3",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "vfile-message": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark-parse/node_modules/vfile-message": {
-      "version": "4.0.3",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "unist-util-stringify-position": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark-rehype": {
-      "version": "11.1.2",
-      "resolved": "https://registry.npmjs.org/remark-rehype/-/remark-rehype-11.1.2.tgz",
-      "integrity": "sha512-Dh7l57ianaEoIpzbp0PC9UKAdCSVklD8E5Rpw7ETfbTl3FqcOOgq5q2LVDhgGCkaBv7p24JXikPdvhhmHvKMsw==",
+    "node_modules/remark-rehype": {
+      "version": "11.1.2",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -10712,77 +10333,6 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/remark-rehype/node_modules/@types/unist": {
-      "version": "3.0.3",
-      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
-      "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
-      "dev": true,
-      "license": "MIT"
-    },
-    "node_modules/remark-rehype/node_modules/unified": {
-      "version": "11.0.5",
-      "resolved": "https://registry.npmjs.org/unified/-/unified-11.0.5.tgz",
-      "integrity": "sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "bail": "^2.0.0",
-        "devlop": "^1.0.0",
-        "extend": "^3.0.0",
-        "is-plain-obj": "^4.0.0",
-        "trough": "^2.0.0",
-        "vfile": "^6.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark-rehype/node_modules/unist-util-stringify-position": {
-      "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz",
-      "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark-rehype/node_modules/vfile": {
-      "version": "6.0.3",
-      "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.3.tgz",
-      "integrity": "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "vfile-message": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark-rehype/node_modules/vfile-message": {
-      "version": "4.0.3",
-      "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.3.tgz",
-      "integrity": "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "unist-util-stringify-position": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
     "node_modules/remark-stringify": {
       "version": "11.0.0",
       "dev": true,
@@ -10797,128 +10347,6 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/remark-stringify/node_modules/@types/unist": {
-      "version": "3.0.3",
-      "dev": true,
-      "license": "MIT"
-    },
-    "node_modules/remark-stringify/node_modules/unified": {
-      "version": "11.0.5",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "bail": "^2.0.0",
-        "devlop": "^1.0.0",
-        "extend": "^3.0.0",
-        "is-plain-obj": "^4.0.0",
-        "trough": "^2.0.0",
-        "vfile": "^6.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark-stringify/node_modules/unist-util-stringify-position": {
-      "version": "4.0.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark-stringify/node_modules/vfile": {
-      "version": "6.0.3",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "vfile-message": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark-stringify/node_modules/vfile-message": {
-      "version": "4.0.3",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "unist-util-stringify-position": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark/node_modules/@types/unist": {
-      "version": "3.0.3",
-      "dev": true,
-      "license": "MIT"
-    },
-    "node_modules/remark/node_modules/unified": {
-      "version": "11.0.5",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "bail": "^2.0.0",
-        "devlop": "^1.0.0",
-        "extend": "^3.0.0",
-        "is-plain-obj": "^4.0.0",
-        "trough": "^2.0.0",
-        "vfile": "^6.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark/node_modules/unist-util-stringify-position": {
-      "version": "4.0.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark/node_modules/vfile": {
-      "version": "6.0.3",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "vfile-message": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/remark/node_modules/vfile-message": {
-      "version": "4.0.3",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "unist-util-stringify-position": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
     "node_modules/require-directory": {
       "version": "2.1.1",
       "dev": true,
@@ -14259,49 +13687,6 @@
         "url": "https://opencollective.com/unified"
       }
     },
-    "node_modules/to-vfile/node_modules/@types/unist": {
-      "version": "3.0.3",
-      "dev": true,
-      "license": "MIT"
-    },
-    "node_modules/to-vfile/node_modules/unist-util-stringify-position": {
-      "version": "4.0.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/to-vfile/node_modules/vfile": {
-      "version": "6.0.3",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "vfile-message": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
-    "node_modules/to-vfile/node_modules/vfile-message": {
-      "version": "4.0.3",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "@types/unist": "^3.0.0",
-        "unist-util-stringify-position": "^4.0.0"
-      },
-      "funding": {
-        "type": "opencollective",
-        "url": "https://opencollective.com/unified"
-      }
-    },
     "node_modules/tough-cookie": {
       "version": "4.1.4",
       "dev": true,
@@ -14586,23 +13971,32 @@
       }
     },
     "node_modules/unified": {
-      "version": "10.1.2",
+      "version": "11.0.5",
+      "resolved": "https://registry.npmjs.org/unified/-/unified-11.0.5.tgz",
+      "integrity": "sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/unist": "^2.0.0",
+        "@types/unist": "^3.0.0",
         "bail": "^2.0.0",
+        "devlop": "^1.0.0",
         "extend": "^3.0.0",
-        "is-buffer": "^2.0.0",
         "is-plain-obj": "^4.0.0",
         "trough": "^2.0.0",
-        "vfile": "^5.0.0"
+        "vfile": "^6.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
+    "node_modules/unified/node_modules/@types/unist": {
+      "version": "3.0.3",
+      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
+      "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
+      "dev": true,
+      "license": "MIT"
+    },
     "node_modules/unique-filename": {
       "version": "4.0.0",
       "inBundle": true,
@@ -14627,8 +14021,6 @@
     },
     "node_modules/unist-util-is": {
       "version": "6.0.0",
-      "resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.0.tgz",
-      "integrity": "sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14641,15 +14033,11 @@
     },
     "node_modules/unist-util-is/node_modules/@types/unist": {
       "version": "3.0.3",
-      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
-      "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/unist-util-position": {
       "version": "5.0.0",
-      "resolved": "https://registry.npmjs.org/unist-util-position/-/unist-util-position-5.0.0.tgz",
-      "integrity": "sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14662,23 +14050,30 @@
     },
     "node_modules/unist-util-position/node_modules/@types/unist": {
       "version": "3.0.3",
-      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
-      "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/unist-util-stringify-position": {
-      "version": "3.0.3",
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz",
+      "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/unist": "^2.0.0"
+        "@types/unist": "^3.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
+    "node_modules/unist-util-stringify-position/node_modules/@types/unist": {
+      "version": "3.0.3",
+      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
+      "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
+      "dev": true,
+      "license": "MIT"
+    },
     "node_modules/unist-util-visit": {
       "version": "2.0.3",
       "dev": true,
@@ -14822,14 +14217,14 @@
       }
     },
     "node_modules/vfile": {
-      "version": "5.3.7",
+      "version": "6.0.3",
+      "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.3.tgz",
+      "integrity": "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/unist": "^2.0.0",
-        "is-buffer": "^2.0.0",
-        "unist-util-stringify-position": "^3.0.0",
-        "vfile-message": "^3.0.0"
+        "@types/unist": "^3.0.0",
+        "vfile-message": "^4.0.0"
       },
       "funding": {
         "type": "opencollective",
@@ -14837,18 +14232,34 @@
       }
     },
     "node_modules/vfile-message": {
-      "version": "3.1.4",
+      "version": "4.0.3",
+      "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.3.tgz",
+      "integrity": "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@types/unist": "^2.0.0",
-        "unist-util-stringify-position": "^3.0.0"
+        "@types/unist": "^3.0.0",
+        "unist-util-stringify-position": "^4.0.0"
       },
       "funding": {
         "type": "opencollective",
         "url": "https://opencollective.com/unified"
       }
     },
+    "node_modules/vfile-message/node_modules/@types/unist": {
+      "version": "3.0.3",
+      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
+      "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
+      "dev": true,
+      "license": "MIT"
+    },
+    "node_modules/vfile/node_modules/@types/unist": {
+      "version": "3.0.3",
+      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
+      "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
+      "dev": true,
+      "license": "MIT"
+    },
     "node_modules/w3c-xmlserializer": {
       "version": "5.0.0",
       "dev": true,

From 0d00fd862c75d743a38ed4c5336636696129cf3b Mon Sep 17 00:00:00 2001
From: Gar 
Date: Thu, 18 Sep 2025 13:56:44 -0700
Subject: [PATCH 58/63] chore: jsdom@27.0.0

---
 docs/package.json |   2 +-
 package-lock.json | 481 +++++++++++++++++++++++++---------------------
 2 files changed, 259 insertions(+), 224 deletions(-)

diff --git a/docs/package.json b/docs/package.json
index 0ea96f7597c4f..b581361c10b87 100644
--- a/docs/package.json
+++ b/docs/package.json
@@ -26,7 +26,7 @@
     "@npmcli/template-oss": "4.25.1",
     "front-matter": "^4.0.2",
     "ignore-walk": "^8.0.0",
-    "jsdom": "^24.0.0",
+    "jsdom": "^27.0.0",
     "rehype-stringify": "^10.0.1",
     "remark-gfm": "^4.0.1",
     "remark-man": "^9.0.0",
diff --git a/package-lock.json b/package-lock.json
index fcbec14acdf40..4bdfdeea0464b 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -191,7 +191,7 @@
         "@npmcli/template-oss": "4.25.1",
         "front-matter": "^4.0.2",
         "ignore-walk": "^8.0.0",
-        "jsdom": "^24.0.0",
+        "jsdom": "^27.0.0",
         "rehype-stringify": "^10.0.1",
         "remark-gfm": "^4.0.1",
         "remark-man": "^9.0.0",
@@ -280,21 +280,38 @@
       "license": "MIT"
     },
     "node_modules/@asamuzakjp/css-color": {
-      "version": "3.2.0",
+      "version": "4.0.4",
+      "resolved": "https://registry.npmjs.org/@asamuzakjp/css-color/-/css-color-4.0.4.tgz",
+      "integrity": "sha512-cKjSKvWGmAziQWbCouOsFwb14mp1betm8Y7Fn+yglDMUUu3r9DCbJ9iJbeFDenLMqFbIMC0pQP8K+B8LAxX3OQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@csstools/css-calc": "^2.1.3",
-        "@csstools/css-color-parser": "^3.0.9",
-        "@csstools/css-parser-algorithms": "^3.0.4",
-        "@csstools/css-tokenizer": "^3.0.3",
-        "lru-cache": "^10.4.3"
+        "@csstools/css-calc": "^2.1.4",
+        "@csstools/css-color-parser": "^3.0.10",
+        "@csstools/css-parser-algorithms": "^3.0.5",
+        "@csstools/css-tokenizer": "^3.0.4",
+        "lru-cache": "^11.1.0"
       }
     },
-    "node_modules/@asamuzakjp/css-color/node_modules/lru-cache": {
-      "version": "10.4.3",
+    "node_modules/@asamuzakjp/dom-selector": {
+      "version": "6.5.5",
+      "resolved": "https://registry.npmjs.org/@asamuzakjp/dom-selector/-/dom-selector-6.5.5.tgz",
+      "integrity": "sha512-kI2MX9pmImjxWT8nxDZY+MuN6r1jJGe7WxizEbsAEPB/zxfW5wYLIiPG1v3UKgEOOP8EsDkp0ZL99oRFAdPM8g==",
       "dev": true,
-      "license": "ISC"
+      "license": "MIT",
+      "dependencies": {
+        "@asamuzakjp/nwsapi": "^2.3.9",
+        "bidi-js": "^1.0.3",
+        "css-tree": "^3.1.0",
+        "is-potential-custom-element-name": "^1.0.1"
+      }
+    },
+    "node_modules/@asamuzakjp/nwsapi": {
+      "version": "2.3.9",
+      "resolved": "https://registry.npmjs.org/@asamuzakjp/nwsapi/-/nwsapi-2.3.9.tgz",
+      "integrity": "sha512-n8GuYSrI9bF7FFZ/SjhwevlHc8xaVlb/7HmHelnc/PZXBD2ZR49NnN9sMMuDdEGPeeRQ5d0hqlSlEpgCX3Wl0Q==",
+      "dev": true,
+      "license": "MIT"
     },
     "node_modules/@babel/code-frame": {
       "version": "7.27.1",
@@ -781,6 +798,8 @@
     },
     "node_modules/@csstools/color-helpers": {
       "version": "5.1.0",
+      "resolved": "https://registry.npmjs.org/@csstools/color-helpers/-/color-helpers-5.1.0.tgz",
+      "integrity": "sha512-S11EXWJyy0Mz5SYvRmY8nJYTFFd1LCNV+7cXyAgQtOOuzb4EsgfqDufL+9esx72/eLhsRdGZwaldu/h+E4t4BA==",
       "dev": true,
       "funding": [
         {
@@ -799,6 +818,8 @@
     },
     "node_modules/@csstools/css-calc": {
       "version": "2.1.4",
+      "resolved": "https://registry.npmjs.org/@csstools/css-calc/-/css-calc-2.1.4.tgz",
+      "integrity": "sha512-3N8oaj+0juUw/1H3YwmDDJXCgTB1gKU6Hc/bB502u9zR0q2vd786XJH9QfrKIEgFlZmhZiq6epXl4rHqhzsIgQ==",
       "dev": true,
       "funding": [
         {
@@ -821,6 +842,8 @@
     },
     "node_modules/@csstools/css-color-parser": {
       "version": "3.1.0",
+      "resolved": "https://registry.npmjs.org/@csstools/css-color-parser/-/css-color-parser-3.1.0.tgz",
+      "integrity": "sha512-nbtKwh3a6xNVIp/VRuXV64yTKnb1IjTAEEh3irzS+HkKjAOYLTGNb9pmVNntZ8iVBHcWDA2Dof0QtPgFI1BaTA==",
       "dev": true,
       "funding": [
         {
@@ -847,6 +870,8 @@
     },
     "node_modules/@csstools/css-parser-algorithms": {
       "version": "3.0.5",
+      "resolved": "https://registry.npmjs.org/@csstools/css-parser-algorithms/-/css-parser-algorithms-3.0.5.tgz",
+      "integrity": "sha512-DaDeUkXZKjdGhgYaHNJTV9pV7Y9B3b644jCLs9Upc3VeNGg6LWARAT6O+Q+/COo+2gg/bM5rhpMAtf70WqfBdQ==",
       "dev": true,
       "funding": [
         {
@@ -866,8 +891,33 @@
         "@csstools/css-tokenizer": "^3.0.4"
       }
     },
+    "node_modules/@csstools/css-syntax-patches-for-csstree": {
+      "version": "1.0.14",
+      "resolved": "https://registry.npmjs.org/@csstools/css-syntax-patches-for-csstree/-/css-syntax-patches-for-csstree-1.0.14.tgz",
+      "integrity": "sha512-zSlIxa20WvMojjpCSy8WrNpcZ61RqfTfX3XTaOeVlGJrt/8HF3YbzgFZa01yTbT4GWQLwfTcC3EB8i3XnB647Q==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "github",
+          "url": "https://github.com/sponsors/csstools"
+        },
+        {
+          "type": "opencollective",
+          "url": "https://opencollective.com/csstools"
+        }
+      ],
+      "license": "MIT-0",
+      "engines": {
+        "node": ">=18"
+      },
+      "peerDependencies": {
+        "postcss": "^8.4"
+      }
+    },
     "node_modules/@csstools/css-tokenizer": {
       "version": "3.0.4",
+      "resolved": "https://registry.npmjs.org/@csstools/css-tokenizer/-/css-tokenizer-3.0.4.tgz",
+      "integrity": "sha512-Vd/9EVDiu6PPJt9yAh6roZP6El1xHrdvIVGjyBsHR0RYwNHgL7FJPyIIW4fANJNG6FtyZfvlRPpFI4ZM/lubvw==",
       "dev": true,
       "funding": [
         {
@@ -2638,11 +2688,6 @@
         "node": ">= 4"
       }
     },
-    "node_modules/asynckit": {
-      "version": "0.4.0",
-      "dev": true,
-      "license": "MIT"
-    },
     "node_modules/available-typed-arrays": {
       "version": "1.0.7",
       "dev": true,
@@ -2717,6 +2762,16 @@
         "platform": "^1.3.3"
       }
     },
+    "node_modules/bidi-js": {
+      "version": "1.0.3",
+      "resolved": "https://registry.npmjs.org/bidi-js/-/bidi-js-1.0.3.tgz",
+      "integrity": "sha512-RKshQI1R3YQ+n9YJz2QQ147P66ELpa1FQEg20Dk8oW9t2KgLbpDLLp9aGZ7y8WHSshDknG0bknqGw5/tyCs5tw==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "require-from-string": "^2.0.2"
+      }
+    },
     "node_modules/bin-links": {
       "version": "5.0.0",
       "license": "ISC",
@@ -2873,6 +2928,7 @@
       "version": "1.0.2",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "es-errors": "^1.3.0",
         "function-bind": "^1.1.2"
@@ -3460,17 +3516,6 @@
         "color-support": "bin.js"
       }
     },
-    "node_modules/combined-stream": {
-      "version": "1.0.8",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "delayed-stream": "~1.0.0"
-      },
-      "engines": {
-        "node": ">= 0.8"
-      }
-    },
     "node_modules/comma-separated-tokens": {
       "version": "2.0.3",
       "dev": true,
@@ -3751,6 +3796,20 @@
         "url": "https://github.com/sponsors/fb55"
       }
     },
+    "node_modules/css-tree": {
+      "version": "3.1.0",
+      "resolved": "https://registry.npmjs.org/css-tree/-/css-tree-3.1.0.tgz",
+      "integrity": "sha512-0eW44TGN5SQXU1mWSkKwFstI/22X2bG1nYzZTYMAWjylYURhse752YgbE4Cx46AC+bAvI+/dYTPRk1LqSUnu6w==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "mdn-data": "2.12.2",
+        "source-map-js": "^1.0.1"
+      },
+      "engines": {
+        "node": "^10 || ^12.20.0 || ^14.13.0 || >=15.0.0"
+      }
+    },
     "node_modules/css-what": {
       "version": "6.2.2",
       "dev": true,
@@ -3773,22 +3832,20 @@
       }
     },
     "node_modules/cssstyle": {
-      "version": "4.6.0",
+      "version": "5.3.0",
+      "resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-5.3.0.tgz",
+      "integrity": "sha512-RveJPnk3m7aarYQ2bJ6iw+Urh55S6FzUiqtBq+TihnTDP4cI8y/TYDqGOyqgnG1J1a6BxJXZsV9JFSTulm9Z7g==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "@asamuzakjp/css-color": "^3.2.0",
-        "rrweb-cssom": "^0.8.0"
+        "@asamuzakjp/css-color": "^4.0.3",
+        "@csstools/css-syntax-patches-for-csstree": "^1.0.14",
+        "css-tree": "^3.1.0"
       },
       "engines": {
-        "node": ">=18"
+        "node": ">=20"
       }
     },
-    "node_modules/cssstyle/node_modules/rrweb-cssom": {
-      "version": "0.8.0",
-      "dev": true,
-      "license": "MIT"
-    },
     "node_modules/dargs": {
       "version": "8.1.0",
       "dev": true,
@@ -3801,46 +3858,17 @@
       }
     },
     "node_modules/data-urls": {
-      "version": "5.0.0",
+      "version": "6.0.0",
+      "resolved": "https://registry.npmjs.org/data-urls/-/data-urls-6.0.0.tgz",
+      "integrity": "sha512-BnBS08aLUM+DKamupXs3w2tJJoqU+AkaE/+6vQxi/G/DPmIZFJJp9Dkb1kM03AZx8ADehDUZgsNxju3mPXZYIA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
         "whatwg-mimetype": "^4.0.0",
-        "whatwg-url": "^14.0.0"
-      },
-      "engines": {
-        "node": ">=18"
-      }
-    },
-    "node_modules/data-urls/node_modules/tr46": {
-      "version": "5.1.1",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "punycode": "^2.3.1"
+        "whatwg-url": "^15.0.0"
       },
       "engines": {
-        "node": ">=18"
-      }
-    },
-    "node_modules/data-urls/node_modules/webidl-conversions": {
-      "version": "7.0.0",
-      "dev": true,
-      "license": "BSD-2-Clause",
-      "engines": {
-        "node": ">=12"
-      }
-    },
-    "node_modules/data-urls/node_modules/whatwg-url": {
-      "version": "14.2.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "tr46": "^5.1.0",
-        "webidl-conversions": "^7.0.0"
-      },
-      "engines": {
-        "node": ">=18"
+        "node": ">=20"
       }
     },
     "node_modules/data-view-buffer": {
@@ -4033,14 +4061,6 @@
         "url": "https://github.com/sponsors/ljharb"
       }
     },
-    "node_modules/delayed-stream": {
-      "version": "1.0.0",
-      "dev": true,
-      "license": "MIT",
-      "engines": {
-        "node": ">=0.4.0"
-      }
-    },
     "node_modules/deprecation": {
       "version": "2.3.1",
       "dev": true,
@@ -4164,6 +4184,7 @@
       "version": "1.0.1",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bind-apply-helpers": "^1.0.1",
         "es-errors": "^1.3.0",
@@ -4301,6 +4322,7 @@
       "version": "1.0.1",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "engines": {
         "node": ">= 0.4"
       }
@@ -4309,6 +4331,7 @@
       "version": "1.3.0",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "engines": {
         "node": ">= 0.4"
       }
@@ -4317,6 +4340,7 @@
       "version": "1.1.1",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "es-errors": "^1.3.0"
       },
@@ -4328,6 +4352,7 @@
       "version": "2.1.0",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "es-errors": "^1.3.0",
         "get-intrinsic": "^1.2.6",
@@ -5228,21 +5253,6 @@
         "url": "https://github.com/sponsors/isaacs"
       }
     },
-    "node_modules/form-data": {
-      "version": "4.0.4",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "asynckit": "^0.4.0",
-        "combined-stream": "^1.0.8",
-        "es-set-tostringtag": "^2.1.0",
-        "hasown": "^2.0.2",
-        "mime-types": "^2.1.12"
-      },
-      "engines": {
-        "node": ">= 6"
-      }
-    },
     "node_modules/fromentries": {
       "version": "1.3.2",
       "dev": true,
@@ -5373,6 +5383,7 @@
       "version": "1.3.0",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "call-bind-apply-helpers": "^1.0.2",
         "es-define-property": "^1.0.1",
@@ -5404,6 +5415,7 @@
       "version": "1.0.1",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "dunder-proto": "^1.0.1",
         "es-object-atoms": "^1.0.0"
@@ -5541,6 +5553,7 @@
       "version": "1.2.0",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "engines": {
         "node": ">= 0.4"
       },
@@ -5647,6 +5660,7 @@
       "version": "1.1.0",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "engines": {
         "node": ">= 0.4"
       },
@@ -5658,6 +5672,7 @@
       "version": "1.0.2",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "has-symbols": "^1.0.3"
       },
@@ -6679,37 +6694,38 @@
       }
     },
     "node_modules/jsdom": {
-      "version": "24.1.3",
+      "version": "27.0.0",
+      "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-27.0.0.tgz",
+      "integrity": "sha512-lIHeR1qlIRrIN5VMccd8tI2Sgw6ieYXSVktcSHaNe3Z5nE/tcPQYQWOq00wxMvYOsz+73eAkNenVvmPC6bba9A==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
-        "cssstyle": "^4.0.1",
-        "data-urls": "^5.0.0",
-        "decimal.js": "^10.4.3",
-        "form-data": "^4.0.0",
+        "@asamuzakjp/dom-selector": "^6.5.4",
+        "cssstyle": "^5.3.0",
+        "data-urls": "^6.0.0",
+        "decimal.js": "^10.5.0",
         "html-encoding-sniffer": "^4.0.0",
         "http-proxy-agent": "^7.0.2",
-        "https-proxy-agent": "^7.0.5",
+        "https-proxy-agent": "^7.0.6",
         "is-potential-custom-element-name": "^1.0.1",
-        "nwsapi": "^2.2.12",
-        "parse5": "^7.1.2",
-        "rrweb-cssom": "^0.7.1",
+        "parse5": "^7.3.0",
+        "rrweb-cssom": "^0.8.0",
         "saxes": "^6.0.0",
         "symbol-tree": "^3.2.4",
-        "tough-cookie": "^4.1.4",
+        "tough-cookie": "^6.0.0",
         "w3c-xmlserializer": "^5.0.0",
-        "webidl-conversions": "^7.0.0",
+        "webidl-conversions": "^8.0.0",
         "whatwg-encoding": "^3.1.1",
         "whatwg-mimetype": "^4.0.0",
-        "whatwg-url": "^14.0.0",
-        "ws": "^8.18.0",
+        "whatwg-url": "^15.0.0",
+        "ws": "^8.18.2",
         "xml-name-validator": "^5.0.0"
       },
       "engines": {
-        "node": ">=18"
+        "node": ">=20"
       },
       "peerDependencies": {
-        "canvas": "^2.11.2"
+        "canvas": "^3.0.0"
       },
       "peerDependenciesMeta": {
         "canvas": {
@@ -6717,37 +6733,6 @@
         }
       }
     },
-    "node_modules/jsdom/node_modules/tr46": {
-      "version": "5.1.1",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "punycode": "^2.3.1"
-      },
-      "engines": {
-        "node": ">=18"
-      }
-    },
-    "node_modules/jsdom/node_modules/webidl-conversions": {
-      "version": "7.0.0",
-      "dev": true,
-      "license": "BSD-2-Clause",
-      "engines": {
-        "node": ">=12"
-      }
-    },
-    "node_modules/jsdom/node_modules/whatwg-url": {
-      "version": "14.2.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "tr46": "^5.1.0",
-        "webidl-conversions": "^7.0.0"
-      },
-      "engines": {
-        "node": ">=18"
-      }
-    },
     "node_modules/jsep": {
       "version": "1.4.0",
       "dev": true,
@@ -7176,6 +7161,7 @@
       "version": "1.1.0",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "engines": {
         "node": ">= 0.4"
       }
@@ -7476,6 +7462,13 @@
         "url": "https://opencollective.com/unified"
       }
     },
+    "node_modules/mdn-data": {
+      "version": "2.12.2",
+      "resolved": "https://registry.npmjs.org/mdn-data/-/mdn-data-2.12.2.tgz",
+      "integrity": "sha512-IEn+pegP1aManZuckezWCO+XZQDplx1366JoVhTpMpBB1sPey/SbveZQUosKiKiGYjg1wH4pMlNgXbCiYgihQA==",
+      "dev": true,
+      "license": "CC0-1.0"
+    },
     "node_modules/meow": {
       "version": "12.1.1",
       "dev": true,
@@ -8022,25 +8015,6 @@
       ],
       "license": "MIT"
     },
-    "node_modules/mime-db": {
-      "version": "1.52.0",
-      "dev": true,
-      "license": "MIT",
-      "engines": {
-        "node": ">= 0.6"
-      }
-    },
-    "node_modules/mime-types": {
-      "version": "2.1.35",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "mime-db": "1.52.0"
-      },
-      "engines": {
-        "node": ">= 0.6"
-      }
-    },
     "node_modules/min-indent": {
       "version": "1.0.1",
       "dev": true,
@@ -8260,6 +8234,26 @@
         "node": "^18.17.0 || >=20.5.0"
       }
     },
+    "node_modules/nanoid": {
+      "version": "3.3.11",
+      "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz",
+      "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "github",
+          "url": "https://github.com/sponsors/ai"
+        }
+      ],
+      "license": "MIT",
+      "peer": true,
+      "bin": {
+        "nanoid": "bin/nanoid.cjs"
+      },
+      "engines": {
+        "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1"
+      }
+    },
     "node_modules/natural-compare": {
       "version": "1.4.0",
       "dev": true,
@@ -8679,11 +8673,6 @@
         "url": "https://github.com/fb55/nth-check?sponsor=1"
       }
     },
-    "node_modules/nwsapi": {
-      "version": "2.2.22",
-      "dev": true,
-      "license": "MIT"
-    },
     "node_modules/nyc": {
       "version": "15.1.0",
       "dev": true,
@@ -9426,6 +9415,36 @@
         "node": ">= 0.4"
       }
     },
+    "node_modules/postcss": {
+      "version": "8.5.6",
+      "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz",
+      "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==",
+      "dev": true,
+      "funding": [
+        {
+          "type": "opencollective",
+          "url": "https://opencollective.com/postcss/"
+        },
+        {
+          "type": "tidelift",
+          "url": "https://tidelift.com/funding/github/npm/postcss"
+        },
+        {
+          "type": "github",
+          "url": "https://github.com/sponsors/ai"
+        }
+      ],
+      "license": "MIT",
+      "peer": true,
+      "dependencies": {
+        "nanoid": "^3.3.11",
+        "picocolors": "^1.1.1",
+        "source-map-js": "^1.2.1"
+      },
+      "engines": {
+        "node": "^10 || ^12 || >=14"
+      }
+    },
     "node_modules/postcss-selector-parser": {
       "version": "7.1.0",
       "license": "MIT",
@@ -9542,17 +9561,6 @@
         "node": ">= 14"
       }
     },
-    "node_modules/psl": {
-      "version": "1.15.0",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "punycode": "^2.3.1"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/lupomontero"
-      }
-    },
     "node_modules/punycode": {
       "version": "2.3.1",
       "dev": true,
@@ -9568,11 +9576,6 @@
         "qrcode-terminal": "bin/qrcode-terminal.js"
       }
     },
-    "node_modules/querystringify": {
-      "version": "2.2.0",
-      "dev": true,
-      "license": "MIT"
-    },
     "node_modules/queue-microtask": {
       "version": "1.2.3",
       "dev": true,
@@ -10376,11 +10379,6 @@
       "dev": true,
       "license": "ISC"
     },
-    "node_modules/requires-port": {
-      "version": "1.0.0",
-      "dev": true,
-      "license": "MIT"
-    },
     "node_modules/resolve": {
       "version": "1.22.10",
       "dev": true,
@@ -10453,7 +10451,9 @@
       }
     },
     "node_modules/rrweb-cssom": {
-      "version": "0.7.1",
+      "version": "0.8.0",
+      "resolved": "https://registry.npmjs.org/rrweb-cssom/-/rrweb-cssom-0.8.0.tgz",
+      "integrity": "sha512-guoltQEx+9aMf2gDZ0s62EcV8lsXR+0w8915TC3ITdn2YueuNjdAYh/levpU9nFaoChh9RUS5ZdQMrKfVEN9tw==",
       "dev": true,
       "license": "MIT"
     },
@@ -10791,6 +10791,16 @@
         "node": ">=0.10.0"
       }
     },
+    "node_modules/source-map-js": {
+      "version": "1.2.1",
+      "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz",
+      "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==",
+      "dev": true,
+      "license": "BSD-3-Clause",
+      "engines": {
+        "node": ">=0.10.0"
+      }
+    },
     "node_modules/source-map-support": {
       "version": "0.5.21",
       "dev": true,
@@ -13664,6 +13674,26 @@
         "url": "https://github.com/sponsors/jonschlinkert"
       }
     },
+    "node_modules/tldts": {
+      "version": "7.0.14",
+      "resolved": "https://registry.npmjs.org/tldts/-/tldts-7.0.14.tgz",
+      "integrity": "sha512-lMNHE4aSI3LlkMUMicTmAG3tkkitjOQGDTFboPJwAg2kJXKP1ryWEyqujktg5qhrFZOkk5YFzgkxg3jErE+i5w==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "tldts-core": "^7.0.14"
+      },
+      "bin": {
+        "tldts": "bin/cli.js"
+      }
+    },
+    "node_modules/tldts-core": {
+      "version": "7.0.14",
+      "resolved": "https://registry.npmjs.org/tldts-core/-/tldts-core-7.0.14.tgz",
+      "integrity": "sha512-viZGNK6+NdluOJWwTO9olaugx0bkKhscIdriQQ+lNNhwitIKvb+SvhbYgnCz6j9p7dX3cJntt4agQAKMXLjJ5g==",
+      "dev": true,
+      "license": "MIT"
+    },
     "node_modules/to-regex-range": {
       "version": "5.0.1",
       "dev": true,
@@ -13688,17 +13718,29 @@
       }
     },
     "node_modules/tough-cookie": {
-      "version": "4.1.4",
+      "version": "6.0.0",
+      "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-6.0.0.tgz",
+      "integrity": "sha512-kXuRi1mtaKMrsLUxz3sQYvVl37B0Ns6MzfrtV5DvJceE9bPyspOqk9xxv7XbZWcfLWbFmm997vl83qUWVJA64w==",
       "dev": true,
       "license": "BSD-3-Clause",
       "dependencies": {
-        "psl": "^1.1.33",
-        "punycode": "^2.1.1",
-        "universalify": "^0.2.0",
-        "url-parse": "^1.5.3"
+        "tldts": "^7.0.5"
       },
       "engines": {
-        "node": ">=6"
+        "node": ">=16"
+      }
+    },
+    "node_modules/tr46": {
+      "version": "6.0.0",
+      "resolved": "https://registry.npmjs.org/tr46/-/tr46-6.0.0.tgz",
+      "integrity": "sha512-bLVMLPtstlZ4iMQHpFHTR7GAGj2jxi8Dg0s2h2MafAE4uSWF98FC/3MomU51iQAMf8/qDUbKWf5GxuvvVcXEhw==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "punycode": "^2.3.1"
+      },
+      "engines": {
+        "node": ">=20"
       }
     },
     "node_modules/treeverse": {
@@ -13972,8 +14014,6 @@
     },
     "node_modules/unified": {
       "version": "11.0.5",
-      "resolved": "https://registry.npmjs.org/unified/-/unified-11.0.5.tgz",
-      "integrity": "sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -13992,8 +14032,6 @@
     },
     "node_modules/unified/node_modules/@types/unist": {
       "version": "3.0.3",
-      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
-      "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
       "dev": true,
       "license": "MIT"
     },
@@ -14055,8 +14093,6 @@
     },
     "node_modules/unist-util-stringify-position": {
       "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz",
-      "integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14069,8 +14105,6 @@
     },
     "node_modules/unist-util-stringify-position/node_modules/@types/unist": {
       "version": "3.0.3",
-      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
-      "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
       "dev": true,
       "license": "MIT"
     },
@@ -14124,14 +14158,6 @@
       "dev": true,
       "license": "ISC"
     },
-    "node_modules/universalify": {
-      "version": "0.2.0",
-      "dev": true,
-      "license": "MIT",
-      "engines": {
-        "node": ">= 4.0.0"
-      }
-    },
     "node_modules/update-browserslist-db": {
       "version": "1.1.3",
       "dev": true,
@@ -14169,15 +14195,6 @@
         "punycode": "^2.1.0"
       }
     },
-    "node_modules/url-parse": {
-      "version": "1.5.10",
-      "dev": true,
-      "license": "MIT",
-      "dependencies": {
-        "querystringify": "^2.1.1",
-        "requires-port": "^1.0.0"
-      }
-    },
     "node_modules/util-deprecate": {
       "version": "1.0.2",
       "license": "MIT"
@@ -14218,8 +14235,6 @@
     },
     "node_modules/vfile": {
       "version": "6.0.3",
-      "resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.3.tgz",
-      "integrity": "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14233,8 +14248,6 @@
     },
     "node_modules/vfile-message": {
       "version": "4.0.3",
-      "resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.3.tgz",
-      "integrity": "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==",
       "dev": true,
       "license": "MIT",
       "dependencies": {
@@ -14248,15 +14261,11 @@
     },
     "node_modules/vfile-message/node_modules/@types/unist": {
       "version": "3.0.3",
-      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
-      "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
       "dev": true,
       "license": "MIT"
     },
     "node_modules/vfile/node_modules/@types/unist": {
       "version": "3.0.3",
-      "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
-      "integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
       "dev": true,
       "license": "MIT"
     },
@@ -14278,6 +14287,16 @@
         "node": "20 || >=22"
       }
     },
+    "node_modules/webidl-conversions": {
+      "version": "8.0.0",
+      "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-8.0.0.tgz",
+      "integrity": "sha512-n4W4YFyz5JzOfQeA8oN7dUYpR+MBP3PIUsn2jLjWXwK5ASUzt0Jc/A5sAUZoCYFJRGF0FBKJ+1JjN43rNdsQzA==",
+      "dev": true,
+      "license": "BSD-2-Clause",
+      "engines": {
+        "node": ">=20"
+      }
+    },
     "node_modules/whatwg-encoding": {
       "version": "3.1.1",
       "dev": true,
@@ -14291,12 +14310,28 @@
     },
     "node_modules/whatwg-mimetype": {
       "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-4.0.0.tgz",
+      "integrity": "sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg==",
       "dev": true,
       "license": "MIT",
       "engines": {
         "node": ">=18"
       }
     },
+    "node_modules/whatwg-url": {
+      "version": "15.1.0",
+      "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-15.1.0.tgz",
+      "integrity": "sha512-2ytDk0kiEj/yu90JOAp44PVPUkO9+jVhyf+SybKlRHSDlvOOZhdPIrr7xTH64l4WixO2cP+wQIcgujkGBPPz6g==",
+      "dev": true,
+      "license": "MIT",
+      "dependencies": {
+        "tr46": "^6.0.0",
+        "webidl-conversions": "^8.0.0"
+      },
+      "engines": {
+        "node": ">=20"
+      }
+    },
     "node_modules/which": {
       "version": "5.0.0",
       "inBundle": true,

From ea15731e3246ca698ad3f63fadd696479a906633 Mon Sep 17 00:00:00 2001
From: Gar 
Date: Tue, 23 Sep 2025 09:16:37 -0700
Subject: [PATCH 59/63] deps: binary-extensions@3.1.0

re-hoisting
---
 node_modules/.gitignore                       |   1 +
 .../binary-extensions/binary-extensions.json  | 264 ++++++++++++++++++
 node_modules/binary-extensions/index.js       |   3 +
 node_modules/binary-extensions/license        |  10 +
 node_modules/binary-extensions/package.json   |  45 +++
 package-lock.json                             |  22 +-
 6 files changed, 335 insertions(+), 10 deletions(-)
 create mode 100644 node_modules/binary-extensions/binary-extensions.json
 create mode 100644 node_modules/binary-extensions/index.js
 create mode 100644 node_modules/binary-extensions/license
 create mode 100644 node_modules/binary-extensions/package.json

diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 3bfc954920036..2f70b335d6fa5 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -57,6 +57,7 @@
 !/archy
 !/balanced-match
 !/bin-links
+!/binary-extensions
 !/brace-expansion
 !/cacache
 !/chalk
diff --git a/node_modules/binary-extensions/binary-extensions.json b/node_modules/binary-extensions/binary-extensions.json
new file mode 100644
index 0000000000000..9a57d80cd08fb
--- /dev/null
+++ b/node_modules/binary-extensions/binary-extensions.json
@@ -0,0 +1,264 @@
+[
+	"3dm",
+	"3ds",
+	"3g2",
+	"3gp",
+	"7z",
+	"a",
+	"aac",
+	"adp",
+	"afdesign",
+	"afphoto",
+	"afpub",
+	"ai",
+	"aif",
+	"aiff",
+	"alz",
+	"ape",
+	"apk",
+	"appimage",
+	"ar",
+	"arj",
+	"asf",
+	"au",
+	"avi",
+	"bak",
+	"baml",
+	"bh",
+	"bin",
+	"bk",
+	"bmp",
+	"btif",
+	"bz2",
+	"bzip2",
+	"cab",
+	"caf",
+	"cgm",
+	"class",
+	"cmx",
+	"cpio",
+	"cr2",
+	"cr3",
+	"cur",
+	"dat",
+	"dcm",
+	"deb",
+	"dex",
+	"djvu",
+	"dll",
+	"dmg",
+	"dng",
+	"doc",
+	"docm",
+	"docx",
+	"dot",
+	"dotm",
+	"dra",
+	"DS_Store",
+	"dsk",
+	"dts",
+	"dtshd",
+	"dvb",
+	"dwg",
+	"dxf",
+	"ecelp4800",
+	"ecelp7470",
+	"ecelp9600",
+	"egg",
+	"eol",
+	"eot",
+	"epub",
+	"exe",
+	"f4v",
+	"fbs",
+	"fh",
+	"fla",
+	"flac",
+	"flatpak",
+	"fli",
+	"flv",
+	"fpx",
+	"fst",
+	"fvt",
+	"g3",
+	"gh",
+	"gif",
+	"graffle",
+	"gz",
+	"gzip",
+	"h261",
+	"h263",
+	"h264",
+	"icns",
+	"ico",
+	"ief",
+	"img",
+	"ipa",
+	"iso",
+	"jar",
+	"jpeg",
+	"jpg",
+	"jpgv",
+	"jpm",
+	"jxr",
+	"key",
+	"ktx",
+	"lha",
+	"lib",
+	"lvp",
+	"lz",
+	"lzh",
+	"lzma",
+	"lzo",
+	"m3u",
+	"m4a",
+	"m4v",
+	"mar",
+	"mdi",
+	"mht",
+	"mid",
+	"midi",
+	"mj2",
+	"mka",
+	"mkv",
+	"mmr",
+	"mng",
+	"mobi",
+	"mov",
+	"movie",
+	"mp3",
+	"mp4",
+	"mp4a",
+	"mpeg",
+	"mpg",
+	"mpga",
+	"mxu",
+	"nef",
+	"npx",
+	"numbers",
+	"nupkg",
+	"o",
+	"odp",
+	"ods",
+	"odt",
+	"oga",
+	"ogg",
+	"ogv",
+	"otf",
+	"ott",
+	"pages",
+	"pbm",
+	"pcx",
+	"pdb",
+	"pdf",
+	"pea",
+	"pgm",
+	"pic",
+	"png",
+	"pnm",
+	"pot",
+	"potm",
+	"potx",
+	"ppa",
+	"ppam",
+	"ppm",
+	"pps",
+	"ppsm",
+	"ppsx",
+	"ppt",
+	"pptm",
+	"pptx",
+	"psd",
+	"pya",
+	"pyc",
+	"pyo",
+	"pyv",
+	"qt",
+	"rar",
+	"ras",
+	"raw",
+	"resources",
+	"rgb",
+	"rip",
+	"rlc",
+	"rmf",
+	"rmvb",
+	"rpm",
+	"rtf",
+	"rz",
+	"s3m",
+	"s7z",
+	"scpt",
+	"sgi",
+	"shar",
+	"snap",
+	"sil",
+	"sketch",
+	"slk",
+	"smv",
+	"snk",
+	"so",
+	"stl",
+	"suo",
+	"sub",
+	"swf",
+	"tar",
+	"tbz",
+	"tbz2",
+	"tga",
+	"tgz",
+	"thmx",
+	"tif",
+	"tiff",
+	"tlz",
+	"ttc",
+	"ttf",
+	"txz",
+	"udf",
+	"uvh",
+	"uvi",
+	"uvm",
+	"uvp",
+	"uvs",
+	"uvu",
+	"viv",
+	"vob",
+	"war",
+	"wav",
+	"wax",
+	"wbmp",
+	"wdp",
+	"weba",
+	"webm",
+	"webp",
+	"whl",
+	"wim",
+	"wm",
+	"wma",
+	"wmv",
+	"wmx",
+	"woff",
+	"woff2",
+	"wrm",
+	"wvx",
+	"xbm",
+	"xif",
+	"xla",
+	"xlam",
+	"xls",
+	"xlsb",
+	"xlsm",
+	"xlsx",
+	"xlt",
+	"xltm",
+	"xltx",
+	"xm",
+	"xmind",
+	"xpi",
+	"xpm",
+	"xwd",
+	"xz",
+	"z",
+	"zip",
+	"zipx"
+]
diff --git a/node_modules/binary-extensions/index.js b/node_modules/binary-extensions/index.js
new file mode 100644
index 0000000000000..6c99c7eb54f17
--- /dev/null
+++ b/node_modules/binary-extensions/index.js
@@ -0,0 +1,3 @@
+import binaryExtensions from './binary-extensions.json' with {type: 'json'};
+
+export default binaryExtensions;
diff --git a/node_modules/binary-extensions/license b/node_modules/binary-extensions/license
new file mode 100644
index 0000000000000..5493a1a6e3f9a
--- /dev/null
+++ b/node_modules/binary-extensions/license
@@ -0,0 +1,10 @@
+MIT License
+
+Copyright (c) Sindre Sorhus  (https://sindresorhus.com)
+Copyright (c) Paul Miller (https://paulmillr.com)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/node_modules/binary-extensions/package.json b/node_modules/binary-extensions/package.json
new file mode 100644
index 0000000000000..abe49c2e9a34a
--- /dev/null
+++ b/node_modules/binary-extensions/package.json
@@ -0,0 +1,45 @@
+{
+	"name": "binary-extensions",
+	"version": "3.1.0",
+	"description": "List of binary file extensions",
+	"license": "MIT",
+	"repository": "sindresorhus/binary-extensions",
+	"funding": "https://github.com/sponsors/sindresorhus",
+	"author": {
+		"name": "Sindre Sorhus",
+		"email": "sindresorhus@gmail.com",
+		"url": "https://sindresorhus.com"
+	},
+	"type": "module",
+	"exports": {
+		"types": "./index.d.ts",
+		"default": "./index.js"
+	},
+	"sideEffects": false,
+	"engines": {
+		"node": ">=18.20"
+	},
+	"scripts": {
+		"//test": "xo && ava && tsd",
+		"test": "ava && tsd"
+	},
+	"files": [
+		"index.js",
+		"index.d.ts",
+		"binary-extensions.json"
+	],
+	"keywords": [
+		"binary",
+		"extensions",
+		"extension",
+		"file",
+		"json",
+		"list",
+		"array"
+	],
+	"devDependencies": {
+		"ava": "^6.1.2",
+		"tsd": "^0.31.0",
+		"xo": "^0.58.0"
+	}
+}
diff --git a/package-lock.json b/package-lock.json
index 4bdfdeea0464b..0f7f9c8873b43 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -2786,6 +2786,18 @@
         "node": "^18.17.0 || >=20.5.0"
       }
     },
+    "node_modules/binary-extensions": {
+      "version": "3.1.0",
+      "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-3.1.0.tgz",
+      "integrity": "sha512-Jvvd9hy1w+xUad8+ckQsWA/V1AoyubOvqn0aygjMOVM4BfIaRav1NFS3LsTSDaV4n4FtcCtQXvzep1E6MboqwQ==",
+      "license": "MIT",
+      "engines": {
+        "node": ">=18.20"
+      },
+      "funding": {
+        "url": "https://github.com/sponsors/sindresorhus"
+      }
+    },
     "node_modules/bind-obj-methods": {
       "version": "3.0.0",
       "dev": true,
@@ -14807,16 +14819,6 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "workspaces/libnpmdiff/node_modules/binary-extensions": {
-      "version": "3.1.0",
-      "license": "MIT",
-      "engines": {
-        "node": ">=18.20"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/sindresorhus"
-      }
-    },
     "workspaces/libnpmexec": {
       "version": "10.1.6",
       "license": "ISC",

From 4059dfa47b0afc982703d8d83fce5574fdc6308f Mon Sep 17 00:00:00 2001
From: Michael Smith 
Date: Mon, 22 Sep 2025 14:21:07 -0700
Subject: [PATCH 60/63] chore: properly use arborist and cache in test

---
 workspaces/arborist/test/audit-report.js | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git a/workspaces/arborist/test/audit-report.js b/workspaces/arborist/test/audit-report.js
index f546793688490..0fc1aac7d1c0d 100644
--- a/workspaces/arborist/test/audit-report.js
+++ b/workspaces/arborist/test/audit-report.js
@@ -380,6 +380,8 @@ t.test('audit supports alias deps', async t => {
   const registry = createRegistry(t)
   registry.audit({ results: require(resolve(path, 'advisory-bulk.json')) })
   registry.mocks({ dir: join(__dirname, 'fixtures') })
+  const cache = t.testdir()
+  const arb = newArb(path, { cache })
   const tree = new Node({
     path,
     pkg: {
@@ -414,7 +416,7 @@ t.test('audit supports alias deps', async t => {
     ],
   })
 
-  const report = await AuditReport.load(tree, { path })
+  const report = await AuditReport.load(tree, arb.options)
   t.matchSnapshot(JSON.stringify(report, 0, 2), 'json version')
   t.equal(report.get('mkdirp').simpleRange, '0.4.1 - 0.5.1')
 })

From 7eb5c09eb4c9d20095fd285a32275743f10cf80b Mon Sep 17 00:00:00 2001
From: Gar 
Date: Tue, 23 Sep 2025 11:17:05 -0700
Subject: [PATCH 61/63] chore: update package-lock with peer flag fixes

---
 package-lock.json | 234 +++++++---------------------------------------
 1 file changed, 32 insertions(+), 202 deletions(-)

diff --git a/package-lock.json b/package-lock.json
index 0f7f9c8873b43..25b4d10c29f37 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -338,6 +338,7 @@
       "version": "7.28.4",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "@babel/code-frame": "^7.27.1",
         "@babel/generator": "^7.28.3",
@@ -884,6 +885,7 @@
         }
       ],
       "license": "MIT",
+      "peer": true,
       "engines": {
         "node": ">=18"
       },
@@ -930,6 +932,7 @@
         }
       ],
       "license": "MIT",
+      "peer": true,
       "engines": {
         "node": ">=18"
       }
@@ -938,7 +941,6 @@
       "version": "4.9.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "eslint-visitor-keys": "^3.4.3"
       },
@@ -956,7 +958,6 @@
       "version": "4.12.1",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "engines": {
         "node": "^12.0.0 || ^14.0.0 || >=16.0.0"
       }
@@ -965,7 +966,6 @@
       "version": "2.1.4",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "ajv": "^6.12.4",
         "debug": "^4.3.2",
@@ -988,7 +988,6 @@
       "version": "6.12.6",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "fast-deep-equal": "^3.1.1",
         "fast-json-stable-stringify": "^2.0.0",
@@ -1004,7 +1003,6 @@
       "version": "1.1.12",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "balanced-match": "^1.0.0",
         "concat-map": "0.0.1"
@@ -1013,14 +1011,12 @@
     "node_modules/@eslint/eslintrc/node_modules/json-schema-traverse": {
       "version": "0.4.1",
       "dev": true,
-      "license": "MIT",
-      "peer": true
+      "license": "MIT"
     },
     "node_modules/@eslint/eslintrc/node_modules/minimatch": {
       "version": "3.1.2",
       "dev": true,
       "license": "ISC",
-      "peer": true,
       "dependencies": {
         "brace-expansion": "^1.1.7"
       },
@@ -1032,7 +1028,6 @@
       "version": "8.57.1",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "engines": {
         "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
       }
@@ -1070,6 +1065,7 @@
       "version": "5.2.2",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "@octokit/auth-token": "^4.0.0",
         "@octokit/graphql": "^7.1.0",
@@ -1226,7 +1222,6 @@
       "version": "0.13.0",
       "dev": true,
       "license": "Apache-2.0",
-      "peer": true,
       "dependencies": {
         "@humanwhocodes/object-schema": "^2.0.3",
         "debug": "^4.3.1",
@@ -1240,7 +1235,6 @@
       "version": "1.1.12",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "balanced-match": "^1.0.0",
         "concat-map": "0.0.1"
@@ -1250,7 +1244,6 @@
       "version": "3.1.2",
       "dev": true,
       "license": "ISC",
-      "peer": true,
       "dependencies": {
         "brace-expansion": "^1.1.7"
       },
@@ -1262,7 +1255,6 @@
       "version": "1.0.1",
       "dev": true,
       "license": "Apache-2.0",
-      "peer": true,
       "engines": {
         "node": ">=12.22"
       },
@@ -1274,8 +1266,7 @@
     "node_modules/@humanwhocodes/object-schema": {
       "version": "2.0.3",
       "dev": true,
-      "license": "BSD-3-Clause",
-      "peer": true
+      "license": "BSD-3-Clause"
     },
     "node_modules/@iarna/toml": {
       "version": "3.0.0",
@@ -1544,7 +1535,6 @@
       "version": "2.1.5",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "@nodelib/fs.stat": "2.0.5",
         "run-parallel": "^1.1.9"
@@ -1557,7 +1547,6 @@
       "version": "2.0.5",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "engines": {
         "node": ">= 8"
       }
@@ -1566,7 +1555,6 @@
       "version": "1.2.8",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "@nodelib/fs.scandir": "2.1.5",
         "fastq": "^1.6.0"
@@ -1850,6 +1838,7 @@
       "version": "7.0.4",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "@octokit/auth-token": "^6.0.0",
         "@octokit/graphql": "^9.0.1",
@@ -2057,8 +2046,7 @@
     "node_modules/@rtsao/scc": {
       "version": "1.1.0",
       "dev": true,
-      "license": "MIT",
-      "peer": true
+      "license": "MIT"
     },
     "node_modules/@sigstore/bundle": {
       "version": "4.0.0",
@@ -2201,8 +2189,7 @@
     "node_modules/@types/json5": {
       "version": "0.0.29",
       "dev": true,
-      "license": "MIT",
-      "peer": true
+      "license": "MIT"
     },
     "node_modules/@types/mdast": {
       "version": "4.0.4",
@@ -2226,6 +2213,7 @@
       "version": "24.5.2",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "undici-types": "~7.12.0"
       }
@@ -2295,7 +2283,6 @@
       "version": "5.3.2",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "peerDependencies": {
         "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0"
       }
@@ -2324,6 +2311,7 @@
       "version": "8.17.1",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "fast-deep-equal": "^3.1.3",
         "fast-uri": "^3.0.1",
@@ -2530,7 +2518,6 @@
       "version": "1.0.2",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.3",
         "is-array-buffer": "^3.0.5"
@@ -2551,7 +2538,6 @@
       "version": "3.1.9",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bind": "^1.0.8",
         "call-bound": "^1.0.4",
@@ -2573,7 +2559,6 @@
       "version": "1.2.6",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bind": "^1.0.8",
         "call-bound": "^1.0.4",
@@ -2594,7 +2579,6 @@
       "version": "1.3.3",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bind": "^1.0.8",
         "define-properties": "^1.2.1",
@@ -2612,7 +2596,6 @@
       "version": "1.3.3",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bind": "^1.0.8",
         "define-properties": "^1.2.1",
@@ -2630,7 +2613,6 @@
       "version": "1.0.4",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "array-buffer-byte-length": "^1.0.1",
         "call-bind": "^1.0.8",
@@ -2659,7 +2641,6 @@
       "version": "1.0.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "engines": {
         "node": ">= 0.4"
       }
@@ -2692,7 +2673,6 @@
       "version": "1.0.7",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "possible-typed-array-names": "^1.0.0"
       },
@@ -2848,6 +2828,7 @@
         }
       ],
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "baseline-browser-mapping": "^2.8.3",
         "caniuse-lite": "^1.0.30001741",
@@ -2922,7 +2903,6 @@
       "version": "1.0.8",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bind-apply-helpers": "^1.0.0",
         "es-define-property": "^1.0.0",
@@ -2940,7 +2920,6 @@
       "version": "1.0.2",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "es-errors": "^1.3.0",
         "function-bind": "^1.1.2"
@@ -2953,7 +2932,6 @@
       "version": "1.0.4",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bind-apply-helpers": "^1.0.2",
         "get-intrinsic": "^1.3.0"
@@ -3251,6 +3229,7 @@
       "version": "5.2.2",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "@octokit/auth-token": "^4.0.0",
         "@octokit/graphql": "^7.1.0",
@@ -3724,6 +3703,7 @@
       "version": "9.0.0",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "env-paths": "^2.2.1",
         "import-fresh": "^3.3.0",
@@ -3887,7 +3867,6 @@
       "version": "1.0.2",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.3",
         "es-errors": "^1.3.0",
@@ -3904,7 +3883,6 @@
       "version": "1.0.2",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.3",
         "es-errors": "^1.3.0",
@@ -3921,7 +3899,6 @@
       "version": "1.0.1",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.2",
         "es-errors": "^1.3.0",
@@ -4022,8 +3999,7 @@
     "node_modules/deep-is": {
       "version": "0.1.4",
       "dev": true,
-      "license": "MIT",
-      "peer": true
+      "license": "MIT"
     },
     "node_modules/default-require-extensions": {
       "version": "3.0.1",
@@ -4043,7 +4019,6 @@
       "version": "1.1.4",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "es-define-property": "^1.0.0",
         "es-errors": "^1.3.0",
@@ -4060,7 +4035,6 @@
       "version": "1.2.1",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "define-data-property": "^1.0.1",
         "has-property-descriptors": "^1.0.0",
@@ -4122,7 +4096,6 @@
       "version": "3.0.0",
       "dev": true,
       "license": "Apache-2.0",
-      "peer": true,
       "dependencies": {
         "esutils": "^2.0.2"
       },
@@ -4196,7 +4169,6 @@
       "version": "1.0.1",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bind-apply-helpers": "^1.0.1",
         "es-errors": "^1.3.0",
@@ -4266,7 +4238,6 @@
       "version": "1.24.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "array-buffer-byte-length": "^1.0.2",
         "arraybuffer.prototype.slice": "^1.0.4",
@@ -4334,7 +4305,6 @@
       "version": "1.0.1",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "engines": {
         "node": ">= 0.4"
       }
@@ -4343,7 +4313,6 @@
       "version": "1.3.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "engines": {
         "node": ">= 0.4"
       }
@@ -4352,7 +4321,6 @@
       "version": "1.1.1",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "es-errors": "^1.3.0"
       },
@@ -4364,7 +4332,6 @@
       "version": "2.1.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "es-errors": "^1.3.0",
         "get-intrinsic": "^1.2.6",
@@ -4379,7 +4346,6 @@
       "version": "1.1.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "hasown": "^2.0.2"
       },
@@ -4391,7 +4357,6 @@
       "version": "1.3.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "is-callable": "^1.2.7",
         "is-date-object": "^1.0.5",
@@ -4421,7 +4386,6 @@
       "version": "4.0.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "engines": {
         "node": ">=10"
       },
@@ -4488,7 +4452,6 @@
       "version": "0.3.9",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "debug": "^3.2.7",
         "is-core-module": "^2.13.0",
@@ -4499,7 +4462,6 @@
       "version": "3.2.7",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "ms": "^2.1.1"
       }
@@ -4508,7 +4470,6 @@
       "version": "2.12.1",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "debug": "^3.2.7"
       },
@@ -4525,7 +4486,6 @@
       "version": "3.2.7",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "ms": "^2.1.1"
       }
@@ -4534,7 +4494,6 @@
       "version": "3.0.1",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "eslint-utils": "^2.0.0",
         "regexpp": "^3.0.0"
@@ -4553,7 +4512,6 @@
       "version": "2.32.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "@rtsao/scc": "^1.1.0",
         "array-includes": "^3.1.9",
@@ -4586,7 +4544,6 @@
       "version": "1.1.12",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "balanced-match": "^1.0.0",
         "concat-map": "0.0.1"
@@ -4596,7 +4553,6 @@
       "version": "3.2.7",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "ms": "^2.1.1"
       }
@@ -4605,7 +4561,6 @@
       "version": "2.1.0",
       "dev": true,
       "license": "Apache-2.0",
-      "peer": true,
       "dependencies": {
         "esutils": "^2.0.2"
       },
@@ -4617,7 +4572,6 @@
       "version": "3.1.2",
       "dev": true,
       "license": "ISC",
-      "peer": true,
       "dependencies": {
         "brace-expansion": "^1.1.7"
       },
@@ -4629,7 +4583,6 @@
       "version": "6.3.1",
       "dev": true,
       "license": "ISC",
-      "peer": true,
       "bin": {
         "semver": "bin/semver.js"
       }
@@ -4638,7 +4591,6 @@
       "version": "11.1.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "eslint-plugin-es": "^3.0.0",
         "eslint-utils": "^2.0.0",
@@ -4658,7 +4610,6 @@
       "version": "1.1.12",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "balanced-match": "^1.0.0",
         "concat-map": "0.0.1"
@@ -4668,7 +4619,6 @@
       "version": "3.1.2",
       "dev": true,
       "license": "ISC",
-      "peer": true,
       "dependencies": {
         "brace-expansion": "^1.1.7"
       },
@@ -4680,7 +4630,6 @@
       "version": "6.3.1",
       "dev": true,
       "license": "ISC",
-      "peer": true,
       "bin": {
         "semver": "bin/semver.js"
       }
@@ -4704,7 +4653,6 @@
       "version": "7.2.2",
       "dev": true,
       "license": "BSD-2-Clause",
-      "peer": true,
       "dependencies": {
         "esrecurse": "^4.3.0",
         "estraverse": "^5.2.0"
@@ -4720,7 +4668,6 @@
       "version": "2.1.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "eslint-visitor-keys": "^1.1.0"
       },
@@ -4735,7 +4682,6 @@
       "version": "1.3.0",
       "dev": true,
       "license": "Apache-2.0",
-      "peer": true,
       "engines": {
         "node": ">=4"
       }
@@ -4744,7 +4690,6 @@
       "version": "3.4.3",
       "dev": true,
       "license": "Apache-2.0",
-      "peer": true,
       "engines": {
         "node": "^12.22.0 || ^14.17.0 || >=16.0.0"
       },
@@ -4756,7 +4701,6 @@
       "version": "6.12.6",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "fast-deep-equal": "^3.1.1",
         "fast-json-stable-stringify": "^2.0.0",
@@ -4772,7 +4716,6 @@
       "version": "4.3.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "color-convert": "^2.0.1"
       },
@@ -4787,7 +4730,6 @@
       "version": "1.1.12",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "balanced-match": "^1.0.0",
         "concat-map": "0.0.1"
@@ -4797,7 +4739,6 @@
       "version": "4.1.2",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "ansi-styles": "^4.1.0",
         "supports-color": "^7.1.0"
@@ -4813,7 +4754,6 @@
       "version": "5.0.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "locate-path": "^6.0.0",
         "path-exists": "^4.0.0"
@@ -4828,14 +4768,12 @@
     "node_modules/eslint/node_modules/json-schema-traverse": {
       "version": "0.4.1",
       "dev": true,
-      "license": "MIT",
-      "peer": true
+      "license": "MIT"
     },
     "node_modules/eslint/node_modules/locate-path": {
       "version": "6.0.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "p-locate": "^5.0.0"
       },
@@ -4850,7 +4788,6 @@
       "version": "3.1.2",
       "dev": true,
       "license": "ISC",
-      "peer": true,
       "dependencies": {
         "brace-expansion": "^1.1.7"
       },
@@ -4862,7 +4799,6 @@
       "version": "3.1.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "yocto-queue": "^0.1.0"
       },
@@ -4877,7 +4813,6 @@
       "version": "5.0.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "p-limit": "^3.0.2"
       },
@@ -4892,7 +4827,6 @@
       "version": "4.0.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "engines": {
         "node": ">=8"
       }
@@ -4901,7 +4835,6 @@
       "version": "7.2.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "has-flag": "^4.0.0"
       },
@@ -4913,7 +4846,6 @@
       "version": "0.1.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "engines": {
         "node": ">=10"
       },
@@ -4925,7 +4857,6 @@
       "version": "9.6.1",
       "dev": true,
       "license": "BSD-2-Clause",
-      "peer": true,
       "dependencies": {
         "acorn": "^8.9.0",
         "acorn-jsx": "^5.3.2",
@@ -4954,7 +4885,6 @@
       "version": "1.6.0",
       "dev": true,
       "license": "BSD-3-Clause",
-      "peer": true,
       "dependencies": {
         "estraverse": "^5.1.0"
       },
@@ -4966,7 +4896,6 @@
       "version": "4.3.0",
       "dev": true,
       "license": "BSD-2-Clause",
-      "peer": true,
       "dependencies": {
         "estraverse": "^5.2.0"
       },
@@ -4978,7 +4907,6 @@
       "version": "5.3.0",
       "dev": true,
       "license": "BSD-2-Clause",
-      "peer": true,
       "engines": {
         "node": ">=4.0"
       }
@@ -4987,7 +4915,6 @@
       "version": "2.0.3",
       "dev": true,
       "license": "BSD-2-Clause",
-      "peer": true,
       "engines": {
         "node": ">=0.10.0"
       }
@@ -5035,14 +4962,12 @@
     "node_modules/fast-json-stable-stringify": {
       "version": "2.1.0",
       "dev": true,
-      "license": "MIT",
-      "peer": true
+      "license": "MIT"
     },
     "node_modules/fast-levenshtein": {
       "version": "2.0.6",
       "dev": true,
-      "license": "MIT",
-      "peer": true
+      "license": "MIT"
     },
     "node_modules/fast-uri": {
       "version": "3.1.0",
@@ -5071,7 +4996,6 @@
       "version": "1.19.1",
       "dev": true,
       "license": "ISC",
-      "peer": true,
       "dependencies": {
         "reusify": "^1.0.4"
       }
@@ -5102,7 +5026,6 @@
       "version": "6.0.1",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "flat-cache": "^3.0.4"
       },
@@ -5162,7 +5085,6 @@
       "version": "3.2.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "flatted": "^3.2.9",
         "keyv": "^4.5.3",
@@ -5176,7 +5098,6 @@
       "version": "1.1.12",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "balanced-match": "^1.0.0",
         "concat-map": "0.0.1"
@@ -5186,7 +5107,6 @@
       "version": "7.2.3",
       "dev": true,
       "license": "ISC",
-      "peer": true,
       "dependencies": {
         "fs.realpath": "^1.0.0",
         "inflight": "^1.0.4",
@@ -5206,7 +5126,6 @@
       "version": "3.1.2",
       "dev": true,
       "license": "ISC",
-      "peer": true,
       "dependencies": {
         "brace-expansion": "^1.1.7"
       },
@@ -5218,7 +5137,6 @@
       "version": "3.0.2",
       "dev": true,
       "license": "ISC",
-      "peer": true,
       "dependencies": {
         "glob": "^7.1.3"
       },
@@ -5232,14 +5150,12 @@
     "node_modules/flatted": {
       "version": "3.3.3",
       "dev": true,
-      "license": "ISC",
-      "peer": true
+      "license": "ISC"
     },
     "node_modules/for-each": {
       "version": "0.3.5",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "is-callable": "^1.2.7"
       },
@@ -5350,7 +5266,6 @@
       "version": "1.1.8",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bind": "^1.0.8",
         "call-bound": "^1.0.3",
@@ -5370,7 +5285,6 @@
       "version": "1.2.3",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "funding": {
         "url": "https://github.com/sponsors/ljharb"
       }
@@ -5395,7 +5309,6 @@
       "version": "1.3.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bind-apply-helpers": "^1.0.2",
         "es-define-property": "^1.0.1",
@@ -5427,7 +5340,6 @@
       "version": "1.0.1",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "dunder-proto": "^1.0.1",
         "es-object-atoms": "^1.0.0"
@@ -5440,7 +5352,6 @@
       "version": "1.1.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.3",
         "es-errors": "^1.3.0",
@@ -5500,7 +5411,6 @@
       "version": "6.0.2",
       "dev": true,
       "license": "ISC",
-      "peer": true,
       "dependencies": {
         "is-glob": "^4.0.3"
       },
@@ -5534,7 +5444,6 @@
       "version": "13.24.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "type-fest": "^0.20.2"
       },
@@ -5549,7 +5458,6 @@
       "version": "1.0.4",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "define-properties": "^1.2.1",
         "gopd": "^1.0.1"
@@ -5565,7 +5473,6 @@
       "version": "1.2.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "engines": {
         "node": ">= 0.4"
       },
@@ -5581,8 +5488,7 @@
     "node_modules/graphemer": {
       "version": "1.4.0",
       "dev": true,
-      "license": "MIT",
-      "peer": true
+      "license": "MIT"
     },
     "node_modules/groff-escape": {
       "version": "2.0.1",
@@ -5625,7 +5531,6 @@
       "version": "1.1.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "engines": {
         "node": ">= 0.4"
       },
@@ -5645,7 +5550,6 @@
       "version": "1.0.2",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "es-define-property": "^1.0.0"
       },
@@ -5657,7 +5561,6 @@
       "version": "1.2.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "dunder-proto": "^1.0.0"
       },
@@ -5672,7 +5575,6 @@
       "version": "1.1.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "engines": {
         "node": ">= 0.4"
       },
@@ -5684,7 +5586,6 @@
       "version": "1.0.2",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "has-symbols": "^1.0.3"
       },
@@ -5857,7 +5758,6 @@
       "version": "5.3.2",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "engines": {
         "node": ">= 4"
       }
@@ -5964,7 +5864,6 @@
       "version": "1.1.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "es-errors": "^1.3.0",
         "hasown": "^2.0.2",
@@ -5997,7 +5896,6 @@
       "version": "3.0.5",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bind": "^1.0.8",
         "call-bound": "^1.0.3",
@@ -6019,7 +5917,6 @@
       "version": "2.1.1",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "async-function": "^1.0.0",
         "call-bound": "^1.0.3",
@@ -6038,7 +5935,6 @@
       "version": "1.1.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "has-bigints": "^1.0.2"
       },
@@ -6075,7 +5971,6 @@
       "version": "1.2.2",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.3",
         "has-tostringtag": "^1.0.2"
@@ -6091,7 +5986,6 @@
       "version": "1.2.7",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "engines": {
         "node": ">= 0.4"
       },
@@ -6128,7 +6022,6 @@
       "version": "1.0.2",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.2",
         "get-intrinsic": "^1.2.6",
@@ -6145,7 +6038,6 @@
       "version": "1.1.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.2",
         "has-tostringtag": "^1.0.2"
@@ -6169,7 +6061,6 @@
       "version": "1.1.1",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.3"
       },
@@ -6192,7 +6083,6 @@
       "version": "1.1.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.3",
         "get-proto": "^1.0.0",
@@ -6221,7 +6111,6 @@
       "version": "2.0.3",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "engines": {
         "node": ">= 0.4"
       },
@@ -6233,7 +6122,6 @@
       "version": "2.0.3",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "engines": {
         "node": ">= 0.4"
       },
@@ -6253,7 +6141,6 @@
       "version": "1.1.1",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.3",
         "has-tostringtag": "^1.0.2"
@@ -6277,7 +6164,6 @@
       "version": "3.0.3",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "engines": {
         "node": ">=8"
       }
@@ -6302,7 +6188,6 @@
       "version": "1.2.1",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.2",
         "gopd": "^1.2.0",
@@ -6320,7 +6205,6 @@
       "version": "2.0.3",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "engines": {
         "node": ">= 0.4"
       },
@@ -6332,7 +6216,6 @@
       "version": "1.0.4",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.3"
       },
@@ -6358,7 +6241,6 @@
       "version": "1.1.1",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.3",
         "has-tostringtag": "^1.0.2"
@@ -6374,7 +6256,6 @@
       "version": "1.1.1",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.2",
         "has-symbols": "^1.1.0",
@@ -6402,7 +6283,6 @@
       "version": "1.1.15",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "which-typed-array": "^1.1.16"
       },
@@ -6422,7 +6302,6 @@
       "version": "2.0.2",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "engines": {
         "node": ">= 0.4"
       },
@@ -6434,7 +6313,6 @@
       "version": "1.1.1",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.3"
       },
@@ -6449,7 +6327,6 @@
       "version": "2.0.4",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.3",
         "get-intrinsic": "^1.2.6"
@@ -6472,8 +6349,7 @@
     "node_modules/isarray": {
       "version": "2.0.5",
       "dev": true,
-      "license": "MIT",
-      "peer": true
+      "license": "MIT"
     },
     "node_modules/isexe": {
       "version": "3.1.1",
@@ -6749,6 +6625,7 @@
       "version": "1.4.0",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "engines": {
         "node": ">= 10.16.0"
       }
@@ -6767,8 +6644,7 @@
     "node_modules/json-buffer": {
       "version": "3.0.1",
       "dev": true,
-      "license": "MIT",
-      "peer": true
+      "license": "MIT"
     },
     "node_modules/json-parse-even-better-errors": {
       "version": "4.0.0",
@@ -6786,8 +6662,7 @@
     "node_modules/json-stable-stringify-without-jsonify": {
       "version": "1.0.1",
       "dev": true,
-      "license": "MIT",
-      "peer": true
+      "license": "MIT"
     },
     "node_modules/json-stringify-nice": {
       "version": "1.1.4",
@@ -6886,7 +6761,6 @@
       "version": "4.5.4",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "json-buffer": "3.0.1"
       }
@@ -6911,7 +6785,6 @@
       "version": "0.4.1",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "prelude-ls": "^1.2.1",
         "type-check": "~0.4.0"
@@ -7173,7 +7046,6 @@
       "version": "1.1.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "engines": {
         "node": ">= 0.4"
       }
@@ -8258,7 +8130,6 @@
         }
       ],
       "license": "MIT",
-      "peer": true,
       "bin": {
         "nanoid": "bin/nanoid.cjs"
       },
@@ -8269,8 +8140,7 @@
     "node_modules/natural-compare": {
       "version": "1.4.0",
       "dev": true,
-      "license": "MIT",
-      "peer": true
+      "license": "MIT"
     },
     "node_modules/nearley": {
       "version": "2.20.1",
@@ -8941,7 +8811,6 @@
       "version": "1.13.4",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "engines": {
         "node": ">= 0.4"
       },
@@ -8953,7 +8822,6 @@
       "version": "1.1.1",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "engines": {
         "node": ">= 0.4"
       }
@@ -8962,7 +8830,6 @@
       "version": "4.1.7",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bind": "^1.0.8",
         "call-bound": "^1.0.3",
@@ -8982,7 +8849,6 @@
       "version": "2.0.8",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bind": "^1.0.7",
         "define-properties": "^1.2.1",
@@ -9000,7 +8866,6 @@
       "version": "1.0.3",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bind": "^1.0.7",
         "define-properties": "^1.2.1",
@@ -9014,7 +8879,6 @@
       "version": "1.2.1",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bind": "^1.0.8",
         "call-bound": "^1.0.3",
@@ -9048,7 +8912,6 @@
       "version": "0.9.4",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "deep-is": "^0.1.3",
         "fast-levenshtein": "^2.0.6",
@@ -9065,7 +8928,6 @@
       "version": "1.0.1",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "get-intrinsic": "^1.2.6",
         "object-keys": "^1.1.1",
@@ -9422,7 +9284,6 @@
       "version": "1.1.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "engines": {
         "node": ">= 0.4"
       }
@@ -9472,7 +9333,6 @@
       "version": "1.2.1",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "engines": {
         "node": ">= 0.8.0"
       }
@@ -9605,8 +9465,7 @@
           "url": "https://feross.org/support"
         }
       ],
-      "license": "MIT",
-      "peer": true
+      "license": "MIT"
     },
     "node_modules/quick-lru": {
       "version": "4.0.1",
@@ -9815,7 +9674,6 @@
       "version": "1.0.10",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bind": "^1.0.8",
         "define-properties": "^1.2.1",
@@ -9837,7 +9695,6 @@
       "version": "1.5.4",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bind": "^1.0.8",
         "define-properties": "^1.2.1",
@@ -9857,7 +9714,6 @@
       "version": "3.2.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "engines": {
         "node": ">=8"
       },
@@ -9935,6 +9791,7 @@
       "version": "5.2.2",
       "dev": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "@octokit/auth-token": "^4.0.0",
         "@octokit/graphql": "^7.1.0",
@@ -10438,7 +10295,6 @@
       "version": "1.1.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "engines": {
         "iojs": ">=1.0.0",
         "node": ">=0.10.0"
@@ -10487,7 +10343,6 @@
         }
       ],
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "queue-microtask": "^1.2.2"
       }
@@ -10496,7 +10351,6 @@
       "version": "1.1.3",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bind": "^1.0.8",
         "call-bound": "^1.0.2",
@@ -10515,7 +10369,6 @@
       "version": "1.0.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "es-errors": "^1.3.0",
         "isarray": "^2.0.5"
@@ -10531,7 +10384,6 @@
       "version": "1.1.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.2",
         "es-errors": "^1.3.0",
@@ -10589,7 +10441,6 @@
       "version": "1.2.2",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "define-data-property": "^1.1.4",
         "es-errors": "^1.3.0",
@@ -10606,7 +10457,6 @@
       "version": "2.0.2",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "define-data-property": "^1.1.4",
         "es-errors": "^1.3.0",
@@ -10621,7 +10471,6 @@
       "version": "1.0.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "dunder-proto": "^1.0.1",
         "es-errors": "^1.3.0",
@@ -10654,7 +10503,6 @@
       "version": "1.1.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "es-errors": "^1.3.0",
         "object-inspect": "^1.13.3",
@@ -10673,7 +10521,6 @@
       "version": "1.0.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "es-errors": "^1.3.0",
         "object-inspect": "^1.13.3"
@@ -10689,7 +10536,6 @@
       "version": "1.0.1",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.2",
         "es-errors": "^1.3.0",
@@ -10707,7 +10553,6 @@
       "version": "1.0.2",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.2",
         "es-errors": "^1.3.0",
@@ -11039,7 +10884,6 @@
       "version": "1.1.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "es-errors": "^1.3.0",
         "internal-slot": "^1.1.0"
@@ -11091,7 +10935,6 @@
       "version": "1.2.10",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bind": "^1.0.8",
         "call-bound": "^1.0.2",
@@ -11112,7 +10955,6 @@
       "version": "1.0.9",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bind": "^1.0.8",
         "call-bound": "^1.0.2",
@@ -11130,7 +10972,6 @@
       "version": "1.0.8",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bind": "^1.0.7",
         "define-properties": "^1.2.1",
@@ -11202,7 +11043,6 @@
       "version": "3.1.1",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "engines": {
         "node": ">=8"
       },
@@ -11465,6 +11305,7 @@
       "dev": true,
       "inBundle": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "@ampproject/remapping": "^2.2.0",
         "@babel/code-frame": "^7.23.5",
@@ -11921,6 +11762,7 @@
       "dev": true,
       "inBundle": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "@types/prop-types": "*",
         "@types/scheduler": "*",
@@ -12049,6 +11891,7 @@
       ],
       "inBundle": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "caniuse-lite": "^1.0.30001565",
         "electron-to-chromium": "^1.4.601",
@@ -12914,6 +12757,7 @@
       "dev": true,
       "inBundle": true,
       "license": "MIT",
+      "peer": true,
       "dependencies": {
         "loose-envify": "^1.1.0",
         "object-assign": "^4.1.1"
@@ -13679,6 +13523,7 @@
       "version": "4.0.3",
       "inBundle": true,
       "license": "MIT",
+      "peer": true,
       "engines": {
         "node": ">=12"
       },
@@ -13801,7 +13646,6 @@
       "version": "3.15.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "@types/json5": "^0.0.29",
         "json5": "^1.0.2",
@@ -13813,7 +13657,6 @@
       "version": "1.0.2",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "minimist": "^1.2.0"
       },
@@ -13825,7 +13668,6 @@
       "version": "3.0.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "engines": {
         "node": ">=4"
       }
@@ -13855,7 +13697,6 @@
       "version": "0.4.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "prelude-ls": "^1.2.1"
       },
@@ -13867,7 +13708,6 @@
       "version": "0.20.2",
       "dev": true,
       "license": "(MIT OR CC0-1.0)",
-      "peer": true,
       "engines": {
         "node": ">=10"
       },
@@ -13879,7 +13719,6 @@
       "version": "1.0.3",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.3",
         "es-errors": "^1.3.0",
@@ -13893,7 +13732,6 @@
       "version": "1.0.3",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bind": "^1.0.8",
         "for-each": "^0.3.3",
@@ -13912,7 +13750,6 @@
       "version": "1.0.4",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "available-typed-arrays": "^1.0.7",
         "call-bind": "^1.0.8",
@@ -13933,7 +13770,6 @@
       "version": "1.0.7",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bind": "^1.0.7",
         "for-each": "^0.3.3",
@@ -13986,7 +13822,6 @@
       "version": "1.1.0",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.3",
         "has-bigints": "^1.0.2",
@@ -14362,7 +14197,6 @@
       "version": "1.1.1",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "is-bigint": "^1.1.0",
         "is-boolean-object": "^1.2.1",
@@ -14381,7 +14215,6 @@
       "version": "1.2.1",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "call-bound": "^1.0.2",
         "function.prototype.name": "^1.1.6",
@@ -14408,7 +14241,6 @@
       "version": "1.0.2",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "is-map": "^2.0.3",
         "is-set": "^2.0.3",
@@ -14431,7 +14263,6 @@
       "version": "1.1.19",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "dependencies": {
         "available-typed-arrays": "^1.0.7",
         "call-bind": "^1.0.8",
@@ -14452,7 +14283,6 @@
       "version": "1.2.5",
       "dev": true,
       "license": "MIT",
-      "peer": true,
       "engines": {
         "node": ">=0.10.0"
       }

From 60aa94b0379b2f4491c5d6857c1cff3036d9a3a9 Mon Sep 17 00:00:00 2001
From: Gar 
Date: Tue, 23 Sep 2025 13:44:01 -0700
Subject: [PATCH 62/63] fix: attach path to json parse error

With a TODO to move this to the right place eventually
---
 workspaces/arborist/lib/arborist/load-actual.js | 6 +++++-
 1 file changed, 5 insertions(+), 1 deletion(-)

diff --git a/workspaces/arborist/lib/arborist/load-actual.js b/workspaces/arborist/lib/arborist/load-actual.js
index 75836d2fbe4a5..02914a8861bc5 100644
--- a/workspaces/arborist/lib/arborist/load-actual.js
+++ b/workspaces/arborist/lib/arborist/load-actual.js
@@ -1,6 +1,6 @@
 // mix-in implementing the loadActual method
 
-const { relative, dirname, resolve, normalize } = require('node:path')
+const { dirname, join, normalize, relative, resolve } = require('node:path')
 
 const PackageJson = require('@npmcli/package-json')
 const { readdirScoped } = require('@npmcli/fs')
@@ -285,6 +285,10 @@ module.exports = cls => class ActualLoader extends cls {
           params.overrides = root.overrides.getNodeRule({ name: pkg.name, version: pkg.version })
         }
       } catch (err) {
+        if (err.code === 'EJSONPARSE') {
+          // TODO @npmcli/package-json should be doing this
+          err.path = join(real, 'package.json')
+        }
         params.error = err
       }
 

From 849dcb6dc22a16f01869ba9c6bf9146143000b25 Mon Sep 17 00:00:00 2001
From: Gar 
Date: Tue, 23 Sep 2025 14:49:52 -0700
Subject: [PATCH 63/63] deps: tar@7.5.1 (#8589)

---
 DEPENDENCIES.md                               |   2 -
 node_modules/.gitignore                       |  22 +-
 node_modules/chownr/LICENSE                   |  15 -
 .../node_modules => }/chownr/LICENSE.md       |   0
 node_modules/chownr/chownr.js                 | 167 ----
 .../chownr/dist/commonjs/index.js             |   0
 .../chownr/dist/commonjs/package.json         |   0
 .../chownr/dist/esm/index.js                  |   0
 .../dist/mjs => chownr/dist/esm}/package.json |   0
 node_modules/chownr/package.json              |  61 +-
 node_modules/minizlib/dist/commonjs/index.js  |  40 +-
 node_modules/minizlib/dist/esm/index.js       |  37 +-
 node_modules/minizlib/package.json            |   4 +-
 node_modules/mkdirp/LICENSE                   |  21 -
 node_modules/mkdirp/dist/cjs/package.json     |  91 --
 node_modules/mkdirp/dist/cjs/src/bin.js       |  80 --
 node_modules/mkdirp/dist/cjs/src/find-made.js |  35 -
 node_modules/mkdirp/dist/cjs/src/index.js     |  53 -
 .../mkdirp/dist/cjs/src/mkdirp-manual.js      |  79 --
 .../mkdirp/dist/cjs/src/mkdirp-native.js      |  50 -
 node_modules/mkdirp/dist/cjs/src/opts-arg.js  |  38 -
 node_modules/mkdirp/dist/cjs/src/path-arg.js  |  28 -
 .../mkdirp/dist/cjs/src/use-native.js         |  17 -
 node_modules/mkdirp/dist/mjs/find-made.js     |  30 -
 node_modules/mkdirp/dist/mjs/index.js         |  43 -
 node_modules/mkdirp/dist/mjs/mkdirp-manual.js |  75 --
 node_modules/mkdirp/dist/mjs/mkdirp-native.js |  46 -
 node_modules/mkdirp/dist/mjs/opts-arg.js      |  34 -
 node_modules/mkdirp/dist/mjs/path-arg.js      |  24 -
 node_modules/mkdirp/dist/mjs/use-native.js    |  14 -
 node_modules/mkdirp/package.json              |  91 --
 .../node-gyp/node_modules/chownr/package.json |  69 --
 .../node-gyp/node_modules/tar/LICENSE         |  15 -
 .../node_modules/tar/dist/commonjs/list.js    | 136 ---
 .../node_modules/tar/dist/commonjs/mkdir.js   | 209 ----
 .../tar/dist/commonjs/normalize-unicode.js    |  17 -
 .../node_modules/tar/dist/commonjs/parse.js   | 599 ------------
 .../node_modules/tar/dist/commonjs/replace.js | 231 -----
 .../node_modules/tar/dist/commonjs/unpack.js  | 919 -----------------
 .../node_modules/tar/dist/esm/list.js         | 106 --
 .../node_modules/tar/dist/esm/mkdir.js        | 201 ----
 .../tar/dist/esm/normalize-unicode.js         |  13 -
 .../node_modules/tar/dist/esm/unpack.js       | 888 -----------------
 .../node-gyp/node_modules/tar/package.json    | 325 ------
 .../yallist/dist/esm/package.json             |   3 -
 .../pacote/node_modules/chownr/LICENSE.md     |  63 --
 .../chownr/dist/commonjs/index.js             |  93 --
 .../chownr/dist/commonjs/package.json         |   3 -
 .../node_modules/chownr/dist/esm/index.js     |  85 --
 .../node_modules/chownr/dist/esm/package.json |   3 -
 .../pacote/node_modules/chownr/package.json   |  69 --
 node_modules/pacote/node_modules/tar/LICENSE  |  15 -
 .../node_modules/tar/dist/commonjs/create.js  |  83 --
 .../tar/dist/commonjs/cwd-error.js            |  18 -
 .../node_modules/tar/dist/commonjs/extract.js |  78 --
 .../tar/dist/commonjs/get-write-flag.js       |  29 -
 .../node_modules/tar/dist/commonjs/header.js  | 306 ------
 .../node_modules/tar/dist/commonjs/index.js   |  54 -
 .../tar/dist/commonjs/large-numbers.js        |  99 --
 .../tar/dist/commonjs/make-command.js         |  61 --
 .../tar/dist/commonjs/mode-fix.js             |  29 -
 .../dist/commonjs/normalize-windows-path.js   |  12 -
 .../node_modules/tar/dist/commonjs/options.js |  66 --
 .../node_modules/tar/dist/commonjs/pack.js    | 477 ---------
 .../tar/dist/commonjs/package.json            |   3 -
 .../tar/dist/commonjs/path-reservations.js    | 170 ----
 .../node_modules/tar/dist/commonjs/pax.js     | 158 ---
 .../tar/dist/commonjs/read-entry.js           | 140 ---
 .../tar/dist/commonjs/strip-absolute-path.js  |  29 -
 .../dist/commonjs/strip-trailing-slashes.js   |  18 -
 .../tar/dist/commonjs/symlink-error.js        |  19 -
 .../node_modules/tar/dist/commonjs/types.js   |  50 -
 .../node_modules/tar/dist/commonjs/update.js  |  33 -
 .../tar/dist/commonjs/warn-method.js          |  31 -
 .../tar/dist/commonjs/winchars.js             |  14 -
 .../tar/dist/commonjs/write-entry.js          | 689 -------------
 .../node_modules/tar/dist/esm/create.js       |  77 --
 .../node_modules/tar/dist/esm/cwd-error.js    |  14 -
 .../node_modules/tar/dist/esm/extract.js      |  49 -
 .../tar/dist/esm/get-write-flag.js            |  23 -
 .../node_modules/tar/dist/esm/header.js       | 279 ------
 .../pacote/node_modules/tar/dist/esm/index.js |  20 -
 .../tar/dist/esm/large-numbers.js             |  94 --
 .../node_modules/tar/dist/esm/make-command.js |  57 --
 .../node_modules/tar/dist/esm/mode-fix.js     |  25 -
 .../tar/dist/esm/normalize-unicode.js         |  13 -
 .../tar/dist/esm/normalize-windows-path.js    |   9 -
 .../node_modules/tar/dist/esm/options.js      |  54 -
 .../pacote/node_modules/tar/dist/esm/pack.js  | 445 ---------
 .../node_modules/tar/dist/esm/package.json    |   3 -
 .../pacote/node_modules/tar/dist/esm/parse.js | 595 -----------
 .../tar/dist/esm/path-reservations.js         | 166 ----
 .../pacote/node_modules/tar/dist/esm/pax.js   | 154 ---
 .../node_modules/tar/dist/esm/read-entry.js   | 136 ---
 .../node_modules/tar/dist/esm/replace.js      | 225 -----
 .../tar/dist/esm/strip-absolute-path.js       |  25 -
 .../tar/dist/esm/strip-trailing-slashes.js    |  14 -
 .../tar/dist/esm/symlink-error.js             |  15 -
 .../pacote/node_modules/tar/dist/esm/types.js |  45 -
 .../node_modules/tar/dist/esm/update.js       |  30 -
 .../node_modules/tar/dist/esm/warn-method.js  |  27 -
 .../node_modules/tar/dist/esm/winchars.js     |   9 -
 .../node_modules/tar/dist/esm/write-entry.js  | 657 -------------
 .../pacote/node_modules/tar/package.json      | 325 ------
 .../pacote/node_modules/yallist/LICENSE.md    |  63 --
 .../yallist/dist/commonjs/index.js            | 384 --------
 .../yallist/dist/commonjs/package.json        |   3 -
 .../node_modules/yallist/dist/esm/index.js    | 379 -------
 .../yallist/dist/esm/package.json             |   3 -
 .../pacote/node_modules/yallist/package.json  |  68 --
 .../tar/dist/commonjs/create.js               |   0
 .../tar/dist/commonjs/cwd-error.js            |   0
 .../tar/dist/commonjs/extract.js              |   0
 .../tar/dist/commonjs/get-write-flag.js       |   0
 .../tar/dist/commonjs/header.js               |   0
 .../tar/dist/commonjs/index.js                |   0
 .../tar/dist/commonjs/large-numbers.js        |   0
 .../tar/dist/commonjs/list.js                 |   8 +-
 .../tar/dist/commonjs/make-command.js         |   0
 .../tar/dist/commonjs/mkdir.js                |  65 +-
 .../tar/dist/commonjs/mode-fix.js             |   0
 .../tar/dist/commonjs/normalize-unicode.js    |  23 +-
 .../dist/commonjs/normalize-windows-path.js   |   0
 .../tar/dist/commonjs/options.js              |   0
 .../tar/dist/commonjs/pack.js                 |  20 +-
 .../tar/dist/commonjs/package.json            |   0
 .../tar/dist/commonjs/parse.js                |  43 +-
 .../tar/dist/commonjs/path-reservations.js    |   0
 .../tar/dist/commonjs/pax.js                  |   0
 .../tar/dist/commonjs/read-entry.js           |   0
 .../tar/dist/commonjs/replace.js              |   1 +
 .../tar/dist/commonjs/strip-absolute-path.js  |   0
 .../dist/commonjs/strip-trailing-slashes.js   |   0
 .../tar/dist/commonjs/symlink-error.js        |   0
 .../tar/dist/commonjs/types.js                |   0
 .../tar/dist/commonjs/unpack.js               |  58 +-
 .../tar/dist/commonjs/update.js               |   0
 .../tar/dist/commonjs/warn-method.js          |   0
 .../tar/dist/commonjs/winchars.js             |   0
 .../tar/dist/commonjs/write-entry.js          |   0
 .../node_modules => }/tar/dist/esm/create.js  |   0
 .../tar/dist/esm/cwd-error.js                 |   0
 .../node_modules => }/tar/dist/esm/extract.js |   0
 .../tar/dist/esm/get-write-flag.js            |   0
 .../node_modules => }/tar/dist/esm/header.js  |   0
 .../node_modules => }/tar/dist/esm/index.js   |   0
 .../tar/dist/esm/large-numbers.js             |   0
 .../node_modules => }/tar/dist/esm/list.js    |   8 +-
 .../tar/dist/esm/make-command.js              |   0
 .../node_modules => }/tar/dist/esm/mkdir.js   |  45 +-
 .../tar/dist/esm/mode-fix.js                  |   0
 .../tar/dist/esm/normalize-unicode.js         |  30 +
 .../tar/dist/esm/normalize-windows-path.js    |   0
 .../node_modules => }/tar/dist/esm/options.js |   0
 .../node_modules => }/tar/dist/esm/pack.js    |  20 +-
 .../chownr => tar}/dist/esm/package.json      |   0
 .../node_modules => }/tar/dist/esm/parse.js   |  45 +-
 .../tar/dist/esm/path-reservations.js         |   0
 .../node_modules => }/tar/dist/esm/pax.js     |   0
 .../tar/dist/esm/read-entry.js                |   0
 .../node_modules => }/tar/dist/esm/replace.js |   1 +
 .../tar/dist/esm/strip-absolute-path.js       |   0
 .../tar/dist/esm/strip-trailing-slashes.js    |   0
 .../tar/dist/esm/symlink-error.js             |   0
 .../node_modules => }/tar/dist/esm/types.js   |   0
 .../node_modules => }/tar/dist/esm/unpack.js  |  58 +-
 .../node_modules => }/tar/dist/esm/update.js  |   0
 .../tar/dist/esm/warn-method.js               |   0
 .../tar/dist/esm/winchars.js                  |   0
 .../tar/dist/esm/write-entry.js               |   0
 node_modules/tar/index.js                     |  18 -
 node_modules/tar/lib/create.js                | 111 ---
 node_modules/tar/lib/extract.js               | 113 ---
 node_modules/tar/lib/get-write-flag.js        |  20 -
 node_modules/tar/lib/header.js                | 304 ------
 node_modules/tar/lib/high-level-opt.js        |  29 -
 node_modules/tar/lib/large-numbers.js         | 104 --
 node_modules/tar/lib/list.js                  | 139 ---
 node_modules/tar/lib/mkdir.js                 | 229 -----
 node_modules/tar/lib/mode-fix.js              |  27 -
 node_modules/tar/lib/normalize-unicode.js     |  12 -
 .../tar/lib/normalize-windows-path.js         |   8 -
 node_modules/tar/lib/pack.js                  | 432 --------
 node_modules/tar/lib/parse.js                 | 552 -----------
 node_modules/tar/lib/path-reservations.js     | 156 ---
 node_modules/tar/lib/pax.js                   | 150 ---
 node_modules/tar/lib/read-entry.js            | 107 --
 node_modules/tar/lib/replace.js               | 246 -----
 node_modules/tar/lib/strip-absolute-path.js   |  24 -
 .../tar/lib/strip-trailing-slashes.js         |  13 -
 node_modules/tar/lib/types.js                 |  44 -
 node_modules/tar/lib/unpack.js                | 923 ------------------
 node_modules/tar/lib/update.js                |  40 -
 node_modules/tar/lib/warn-mixin.js            |  24 -
 node_modules/tar/lib/winchars.js              |  23 -
 node_modules/tar/lib/write-entry.js           | 546 -----------
 .../tar/node_modules/fs-minipass/LICENSE      |  15 -
 .../tar/node_modules/fs-minipass/index.js     | 422 --------
 .../fs-minipass/node_modules/minipass/LICENSE |  15 -
 .../node_modules/minipass/index.js            | 649 ------------
 .../node_modules/minipass/package.json        |  56 --
 .../fs-minipass/node_modules/yallist/LICENSE  |  15 -
 .../node_modules/yallist/iterator.js          |   8 -
 .../node_modules/yallist/package.json         |  29 -
 .../node_modules/yallist/yallist.js           | 426 --------
 .../tar/node_modules/fs-minipass/package.json |  39 -
 .../tar/node_modules/minipass/LICENSE         |  15 -
 .../tar/node_modules/minipass/index.js        | 702 -------------
 .../tar/node_modules/minipass/index.mjs       | 702 -------------
 .../tar/node_modules/minipass/package.json    |  76 --
 .../tar/node_modules/minizlib/LICENSE         |  26 -
 .../tar/node_modules/minizlib/constants.js    | 115 ---
 .../tar/node_modules/minizlib/index.js        | 348 -------
 .../minizlib/node_modules/minipass/LICENSE    |  15 -
 .../minizlib/node_modules/minipass/index.js   | 649 ------------
 .../node_modules/minipass/package.json        |  56 --
 .../minizlib/node_modules/yallist/LICENSE     |  15 -
 .../minizlib/node_modules/yallist/iterator.js |   8 -
 .../node_modules/yallist/package.json         |  29 -
 .../minizlib/node_modules/yallist/yallist.js  | 426 --------
 .../tar/node_modules/minizlib/package.json    |  42 -
 node_modules/tar/node_modules/mkdirp/LICENSE  |  21 -
 .../tar/node_modules/mkdirp/bin/cmd.js        |  68 --
 node_modules/tar/node_modules/mkdirp/index.js |  31 -
 .../tar/node_modules/mkdirp/lib/find-made.js  |  29 -
 .../node_modules/mkdirp/lib/mkdirp-manual.js  |  64 --
 .../node_modules/mkdirp/lib/mkdirp-native.js  |  39 -
 .../tar/node_modules/mkdirp/lib/opts-arg.js   |  23 -
 .../tar/node_modules/mkdirp/lib/path-arg.js   |  29 -
 .../tar/node_modules/mkdirp/lib/use-native.js |  10 -
 .../tar/node_modules/mkdirp/package.json      |  44 -
 .../node_modules/yallist/LICENSE.md           |   0
 .../yallist/dist/commonjs/index.js            |   0
 .../yallist/dist/commonjs/package.json        |   0
 .../node_modules/yallist/dist/esm/index.js    |   0
 .../yallist}/dist/esm/package.json            |   0
 .../node_modules/yallist/package.json         |   0
 node_modules/tar/package.json                 | 338 ++++++-
 package-lock.json                             | 185 +---
 package.json                                  |   2 +-
 workspaces/libnpmdiff/lib/untar.js            |   4 +-
 workspaces/libnpmdiff/package.json            |   2 +-
 242 files changed, 646 insertions(+), 22370 deletions(-)
 delete mode 100644 node_modules/chownr/LICENSE
 rename node_modules/{node-gyp/node_modules => }/chownr/LICENSE.md (100%)
 delete mode 100644 node_modules/chownr/chownr.js
 rename node_modules/{node-gyp/node_modules => }/chownr/dist/commonjs/index.js (100%)
 rename node_modules/{node-gyp/node_modules => }/chownr/dist/commonjs/package.json (100%)
 rename node_modules/{node-gyp/node_modules => }/chownr/dist/esm/index.js (100%)
 rename node_modules/{mkdirp/dist/mjs => chownr/dist/esm}/package.json (100%)
 delete mode 100644 node_modules/mkdirp/LICENSE
 delete mode 100644 node_modules/mkdirp/dist/cjs/package.json
 delete mode 100755 node_modules/mkdirp/dist/cjs/src/bin.js
 delete mode 100644 node_modules/mkdirp/dist/cjs/src/find-made.js
 delete mode 100644 node_modules/mkdirp/dist/cjs/src/index.js
 delete mode 100644 node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js
 delete mode 100644 node_modules/mkdirp/dist/cjs/src/mkdirp-native.js
 delete mode 100644 node_modules/mkdirp/dist/cjs/src/opts-arg.js
 delete mode 100644 node_modules/mkdirp/dist/cjs/src/path-arg.js
 delete mode 100644 node_modules/mkdirp/dist/cjs/src/use-native.js
 delete mode 100644 node_modules/mkdirp/dist/mjs/find-made.js
 delete mode 100644 node_modules/mkdirp/dist/mjs/index.js
 delete mode 100644 node_modules/mkdirp/dist/mjs/mkdirp-manual.js
 delete mode 100644 node_modules/mkdirp/dist/mjs/mkdirp-native.js
 delete mode 100644 node_modules/mkdirp/dist/mjs/opts-arg.js
 delete mode 100644 node_modules/mkdirp/dist/mjs/path-arg.js
 delete mode 100644 node_modules/mkdirp/dist/mjs/use-native.js
 delete mode 100644 node_modules/mkdirp/package.json
 delete mode 100644 node_modules/node-gyp/node_modules/chownr/package.json
 delete mode 100644 node_modules/node-gyp/node_modules/tar/LICENSE
 delete mode 100644 node_modules/node-gyp/node_modules/tar/dist/commonjs/list.js
 delete mode 100644 node_modules/node-gyp/node_modules/tar/dist/commonjs/mkdir.js
 delete mode 100644 node_modules/node-gyp/node_modules/tar/dist/commonjs/normalize-unicode.js
 delete mode 100644 node_modules/node-gyp/node_modules/tar/dist/commonjs/parse.js
 delete mode 100644 node_modules/node-gyp/node_modules/tar/dist/commonjs/replace.js
 delete mode 100644 node_modules/node-gyp/node_modules/tar/dist/commonjs/unpack.js
 delete mode 100644 node_modules/node-gyp/node_modules/tar/dist/esm/list.js
 delete mode 100644 node_modules/node-gyp/node_modules/tar/dist/esm/mkdir.js
 delete mode 100644 node_modules/node-gyp/node_modules/tar/dist/esm/normalize-unicode.js
 delete mode 100644 node_modules/node-gyp/node_modules/tar/dist/esm/unpack.js
 delete mode 100644 node_modules/node-gyp/node_modules/tar/package.json
 delete mode 100644 node_modules/node-gyp/node_modules/yallist/dist/esm/package.json
 delete mode 100644 node_modules/pacote/node_modules/chownr/LICENSE.md
 delete mode 100644 node_modules/pacote/node_modules/chownr/dist/commonjs/index.js
 delete mode 100644 node_modules/pacote/node_modules/chownr/dist/commonjs/package.json
 delete mode 100644 node_modules/pacote/node_modules/chownr/dist/esm/index.js
 delete mode 100644 node_modules/pacote/node_modules/chownr/dist/esm/package.json
 delete mode 100644 node_modules/pacote/node_modules/chownr/package.json
 delete mode 100644 node_modules/pacote/node_modules/tar/LICENSE
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/create.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/cwd-error.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/extract.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/get-write-flag.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/header.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/index.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/large-numbers.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/make-command.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/mode-fix.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/normalize-windows-path.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/options.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/pack.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/package.json
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/path-reservations.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/pax.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/read-entry.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/strip-absolute-path.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/strip-trailing-slashes.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/symlink-error.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/types.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/update.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/warn-method.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/winchars.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/commonjs/write-entry.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/esm/create.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/esm/cwd-error.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/esm/extract.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/esm/get-write-flag.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/esm/header.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/esm/index.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/esm/large-numbers.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/esm/make-command.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/esm/mode-fix.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/esm/normalize-unicode.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/esm/normalize-windows-path.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/esm/options.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/esm/pack.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/esm/package.json
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/esm/parse.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/esm/path-reservations.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/esm/pax.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/esm/read-entry.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/esm/replace.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/esm/strip-absolute-path.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/esm/strip-trailing-slashes.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/esm/symlink-error.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/esm/types.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/esm/update.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/esm/warn-method.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/esm/winchars.js
 delete mode 100644 node_modules/pacote/node_modules/tar/dist/esm/write-entry.js
 delete mode 100644 node_modules/pacote/node_modules/tar/package.json
 delete mode 100644 node_modules/pacote/node_modules/yallist/LICENSE.md
 delete mode 100644 node_modules/pacote/node_modules/yallist/dist/commonjs/index.js
 delete mode 100644 node_modules/pacote/node_modules/yallist/dist/commonjs/package.json
 delete mode 100644 node_modules/pacote/node_modules/yallist/dist/esm/index.js
 delete mode 100644 node_modules/pacote/node_modules/yallist/dist/esm/package.json
 delete mode 100644 node_modules/pacote/node_modules/yallist/package.json
 rename node_modules/{node-gyp/node_modules => }/tar/dist/commonjs/create.js (100%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/commonjs/cwd-error.js (100%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/commonjs/extract.js (100%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/commonjs/get-write-flag.js (100%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/commonjs/header.js (100%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/commonjs/index.js (100%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/commonjs/large-numbers.js (100%)
 rename node_modules/{pacote/node_modules => }/tar/dist/commonjs/list.js (94%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/commonjs/make-command.js (100%)
 rename node_modules/{pacote/node_modules => }/tar/dist/commonjs/mkdir.js (71%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/commonjs/mode-fix.js (100%)
 rename node_modules/{pacote/node_modules => }/tar/dist/commonjs/normalize-unicode.js (50%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/commonjs/normalize-windows-path.js (100%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/commonjs/options.js (100%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/commonjs/pack.js (93%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/commonjs/package.json (100%)
 rename node_modules/{pacote/node_modules => }/tar/dist/commonjs/parse.js (93%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/commonjs/path-reservations.js (100%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/commonjs/pax.js (100%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/commonjs/read-entry.js (100%)
 rename node_modules/{pacote/node_modules => }/tar/dist/commonjs/replace.js (99%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/commonjs/strip-absolute-path.js (100%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/commonjs/strip-trailing-slashes.js (100%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/commonjs/symlink-error.js (100%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/commonjs/types.js (100%)
 rename node_modules/{pacote/node_modules => }/tar/dist/commonjs/unpack.js (92%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/commonjs/update.js (100%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/commonjs/warn-method.js (100%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/commonjs/winchars.js (100%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/commonjs/write-entry.js (100%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/esm/create.js (100%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/esm/cwd-error.js (100%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/esm/extract.js (100%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/esm/get-write-flag.js (100%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/esm/header.js (100%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/esm/index.js (100%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/esm/large-numbers.js (100%)
 rename node_modules/{pacote/node_modules => }/tar/dist/esm/list.js (93%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/esm/make-command.js (100%)
 rename node_modules/{pacote/node_modules => }/tar/dist/esm/mkdir.js (77%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/esm/mode-fix.js (100%)
 create mode 100644 node_modules/tar/dist/esm/normalize-unicode.js
 rename node_modules/{node-gyp/node_modules => }/tar/dist/esm/normalize-windows-path.js (100%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/esm/options.js (100%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/esm/pack.js (92%)
 rename node_modules/{node-gyp/node_modules/chownr => tar}/dist/esm/package.json (100%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/esm/parse.js (92%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/esm/path-reservations.js (100%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/esm/pax.js (100%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/esm/read-entry.js (100%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/esm/replace.js (99%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/esm/strip-absolute-path.js (100%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/esm/strip-trailing-slashes.js (100%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/esm/symlink-error.js (100%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/esm/types.js (100%)
 rename node_modules/{pacote/node_modules => }/tar/dist/esm/unpack.js (92%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/esm/update.js (100%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/esm/warn-method.js (100%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/esm/winchars.js (100%)
 rename node_modules/{node-gyp/node_modules => }/tar/dist/esm/write-entry.js (100%)
 delete mode 100644 node_modules/tar/index.js
 delete mode 100644 node_modules/tar/lib/create.js
 delete mode 100644 node_modules/tar/lib/extract.js
 delete mode 100644 node_modules/tar/lib/get-write-flag.js
 delete mode 100644 node_modules/tar/lib/header.js
 delete mode 100644 node_modules/tar/lib/high-level-opt.js
 delete mode 100644 node_modules/tar/lib/large-numbers.js
 delete mode 100644 node_modules/tar/lib/list.js
 delete mode 100644 node_modules/tar/lib/mkdir.js
 delete mode 100644 node_modules/tar/lib/mode-fix.js
 delete mode 100644 node_modules/tar/lib/normalize-unicode.js
 delete mode 100644 node_modules/tar/lib/normalize-windows-path.js
 delete mode 100644 node_modules/tar/lib/pack.js
 delete mode 100644 node_modules/tar/lib/parse.js
 delete mode 100644 node_modules/tar/lib/path-reservations.js
 delete mode 100644 node_modules/tar/lib/pax.js
 delete mode 100644 node_modules/tar/lib/read-entry.js
 delete mode 100644 node_modules/tar/lib/replace.js
 delete mode 100644 node_modules/tar/lib/strip-absolute-path.js
 delete mode 100644 node_modules/tar/lib/strip-trailing-slashes.js
 delete mode 100644 node_modules/tar/lib/types.js
 delete mode 100644 node_modules/tar/lib/unpack.js
 delete mode 100644 node_modules/tar/lib/update.js
 delete mode 100644 node_modules/tar/lib/warn-mixin.js
 delete mode 100644 node_modules/tar/lib/winchars.js
 delete mode 100644 node_modules/tar/lib/write-entry.js
 delete mode 100644 node_modules/tar/node_modules/fs-minipass/LICENSE
 delete mode 100644 node_modules/tar/node_modules/fs-minipass/index.js
 delete mode 100644 node_modules/tar/node_modules/fs-minipass/node_modules/minipass/LICENSE
 delete mode 100644 node_modules/tar/node_modules/fs-minipass/node_modules/minipass/index.js
 delete mode 100644 node_modules/tar/node_modules/fs-minipass/node_modules/minipass/package.json
 delete mode 100644 node_modules/tar/node_modules/fs-minipass/node_modules/yallist/LICENSE
 delete mode 100644 node_modules/tar/node_modules/fs-minipass/node_modules/yallist/iterator.js
 delete mode 100644 node_modules/tar/node_modules/fs-minipass/node_modules/yallist/package.json
 delete mode 100644 node_modules/tar/node_modules/fs-minipass/node_modules/yallist/yallist.js
 delete mode 100644 node_modules/tar/node_modules/fs-minipass/package.json
 delete mode 100644 node_modules/tar/node_modules/minipass/LICENSE
 delete mode 100644 node_modules/tar/node_modules/minipass/index.js
 delete mode 100644 node_modules/tar/node_modules/minipass/index.mjs
 delete mode 100644 node_modules/tar/node_modules/minipass/package.json
 delete mode 100644 node_modules/tar/node_modules/minizlib/LICENSE
 delete mode 100644 node_modules/tar/node_modules/minizlib/constants.js
 delete mode 100644 node_modules/tar/node_modules/minizlib/index.js
 delete mode 100644 node_modules/tar/node_modules/minizlib/node_modules/minipass/LICENSE
 delete mode 100644 node_modules/tar/node_modules/minizlib/node_modules/minipass/index.js
 delete mode 100644 node_modules/tar/node_modules/minizlib/node_modules/minipass/package.json
 delete mode 100644 node_modules/tar/node_modules/minizlib/node_modules/yallist/LICENSE
 delete mode 100644 node_modules/tar/node_modules/minizlib/node_modules/yallist/iterator.js
 delete mode 100644 node_modules/tar/node_modules/minizlib/node_modules/yallist/package.json
 delete mode 100644 node_modules/tar/node_modules/minizlib/node_modules/yallist/yallist.js
 delete mode 100644 node_modules/tar/node_modules/minizlib/package.json
 delete mode 100644 node_modules/tar/node_modules/mkdirp/LICENSE
 delete mode 100755 node_modules/tar/node_modules/mkdirp/bin/cmd.js
 delete mode 100644 node_modules/tar/node_modules/mkdirp/index.js
 delete mode 100644 node_modules/tar/node_modules/mkdirp/lib/find-made.js
 delete mode 100644 node_modules/tar/node_modules/mkdirp/lib/mkdirp-manual.js
 delete mode 100644 node_modules/tar/node_modules/mkdirp/lib/mkdirp-native.js
 delete mode 100644 node_modules/tar/node_modules/mkdirp/lib/opts-arg.js
 delete mode 100644 node_modules/tar/node_modules/mkdirp/lib/path-arg.js
 delete mode 100644 node_modules/tar/node_modules/mkdirp/lib/use-native.js
 delete mode 100644 node_modules/tar/node_modules/mkdirp/package.json
 rename node_modules/{node-gyp => tar}/node_modules/yallist/LICENSE.md (100%)
 rename node_modules/{node-gyp => tar}/node_modules/yallist/dist/commonjs/index.js (100%)
 rename node_modules/{node-gyp => tar}/node_modules/yallist/dist/commonjs/package.json (100%)
 rename node_modules/{node-gyp => tar}/node_modules/yallist/dist/esm/index.js (100%)
 rename node_modules/{node-gyp/node_modules/tar => tar/node_modules/yallist}/dist/esm/package.json (100%)
 rename node_modules/{node-gyp => tar}/node_modules/yallist/package.json (100%)

diff --git a/DEPENDENCIES.md b/DEPENDENCIES.md
index e7b7e57f50615..fe2088c69d843 100644
--- a/DEPENDENCIES.md
+++ b/DEPENDENCIES.md
@@ -738,11 +738,9 @@ graph LR;
   string-width-->strip-ansi;
   strip-ansi-->ansi-regex;
   tar-->chownr;
-  tar-->fs-minipass;
   tar-->isaacs-fs-minipass["@isaacs/fs-minipass"];
   tar-->minipass;
   tar-->minizlib;
-  tar-->mkdirp;
   tar-->yallist;
   tinyglobby-->fdir;
   tinyglobby-->picomatch;
diff --git a/node_modules/.gitignore b/node_modules/.gitignore
index 2f70b335d6fa5..42ee4e89b73fa 100644
--- a/node_modules/.gitignore
+++ b/node_modules/.gitignore
@@ -127,7 +127,6 @@
 !/minipass-sized/node_modules/minipass
 !/minipass
 !/minizlib
-!/mkdirp
 !/ms
 !/mute-stream
 !/negotiator
@@ -138,15 +137,12 @@
 /node-gyp/node_modules/@npmcli/*
 !/node-gyp/node_modules/@npmcli/agent
 !/node-gyp/node_modules/cacache
-!/node-gyp/node_modules/chownr
 !/node-gyp/node_modules/glob
 !/node-gyp/node_modules/jackspeak
 !/node-gyp/node_modules/lru-cache
 !/node-gyp/node_modules/make-fetch-happen
 !/node-gyp/node_modules/minimatch
 !/node-gyp/node_modules/path-scurry
-!/node-gyp/node_modules/tar
-!/node-gyp/node_modules/yallist
 !/nopt
 !/normalize-package-data
 !/npm-audit-report
@@ -162,11 +158,6 @@
 !/p-map
 !/package-json-from-dist
 !/pacote
-!/pacote/node_modules/
-/pacote/node_modules/*
-!/pacote/node_modules/chownr
-!/pacote/node_modules/tar
-!/pacote/node_modules/yallist
 !/parse-conflict-json
 !/path-key
 !/path-scurry
@@ -206,18 +197,7 @@
 !/tar
 !/tar/node_modules/
 /tar/node_modules/*
-!/tar/node_modules/fs-minipass
-!/tar/node_modules/fs-minipass/node_modules/
-/tar/node_modules/fs-minipass/node_modules/*
-!/tar/node_modules/fs-minipass/node_modules/minipass
-!/tar/node_modules/fs-minipass/node_modules/yallist
-!/tar/node_modules/minipass
-!/tar/node_modules/minizlib
-!/tar/node_modules/minizlib/node_modules/
-/tar/node_modules/minizlib/node_modules/*
-!/tar/node_modules/minizlib/node_modules/minipass
-!/tar/node_modules/minizlib/node_modules/yallist
-!/tar/node_modules/mkdirp
+!/tar/node_modules/yallist
 !/text-table
 !/tiny-relative-date
 !/tinyglobby
diff --git a/node_modules/chownr/LICENSE b/node_modules/chownr/LICENSE
deleted file mode 100644
index c925dbe826b67..0000000000000
--- a/node_modules/chownr/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) 2016-2022 Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/node-gyp/node_modules/chownr/LICENSE.md b/node_modules/chownr/LICENSE.md
similarity index 100%
rename from node_modules/node-gyp/node_modules/chownr/LICENSE.md
rename to node_modules/chownr/LICENSE.md
diff --git a/node_modules/chownr/chownr.js b/node_modules/chownr/chownr.js
deleted file mode 100644
index 0d40932169654..0000000000000
--- a/node_modules/chownr/chownr.js
+++ /dev/null
@@ -1,167 +0,0 @@
-'use strict'
-const fs = require('fs')
-const path = require('path')
-
-/* istanbul ignore next */
-const LCHOWN = fs.lchown ? 'lchown' : 'chown'
-/* istanbul ignore next */
-const LCHOWNSYNC = fs.lchownSync ? 'lchownSync' : 'chownSync'
-
-/* istanbul ignore next */
-const needEISDIRHandled = fs.lchown &&
-  !process.version.match(/v1[1-9]+\./) &&
-  !process.version.match(/v10\.[6-9]/)
-
-const lchownSync = (path, uid, gid) => {
-  try {
-    return fs[LCHOWNSYNC](path, uid, gid)
-  } catch (er) {
-    if (er.code !== 'ENOENT')
-      throw er
-  }
-}
-
-/* istanbul ignore next */
-const chownSync = (path, uid, gid) => {
-  try {
-    return fs.chownSync(path, uid, gid)
-  } catch (er) {
-    if (er.code !== 'ENOENT')
-      throw er
-  }
-}
-
-/* istanbul ignore next */
-const handleEISDIR =
-  needEISDIRHandled ? (path, uid, gid, cb) => er => {
-    // Node prior to v10 had a very questionable implementation of
-    // fs.lchown, which would always try to call fs.open on a directory
-    // Fall back to fs.chown in those cases.
-    if (!er || er.code !== 'EISDIR')
-      cb(er)
-    else
-      fs.chown(path, uid, gid, cb)
-  }
-  : (_, __, ___, cb) => cb
-
-/* istanbul ignore next */
-const handleEISDirSync =
-  needEISDIRHandled ? (path, uid, gid) => {
-    try {
-      return lchownSync(path, uid, gid)
-    } catch (er) {
-      if (er.code !== 'EISDIR')
-        throw er
-      chownSync(path, uid, gid)
-    }
-  }
-  : (path, uid, gid) => lchownSync(path, uid, gid)
-
-// fs.readdir could only accept an options object as of node v6
-const nodeVersion = process.version
-let readdir = (path, options, cb) => fs.readdir(path, options, cb)
-let readdirSync = (path, options) => fs.readdirSync(path, options)
-/* istanbul ignore next */
-if (/^v4\./.test(nodeVersion))
-  readdir = (path, options, cb) => fs.readdir(path, cb)
-
-const chown = (cpath, uid, gid, cb) => {
-  fs[LCHOWN](cpath, uid, gid, handleEISDIR(cpath, uid, gid, er => {
-    // Skip ENOENT error
-    cb(er && er.code !== 'ENOENT' ? er : null)
-  }))
-}
-
-const chownrKid = (p, child, uid, gid, cb) => {
-  if (typeof child === 'string')
-    return fs.lstat(path.resolve(p, child), (er, stats) => {
-      // Skip ENOENT error
-      if (er)
-        return cb(er.code !== 'ENOENT' ? er : null)
-      stats.name = child
-      chownrKid(p, stats, uid, gid, cb)
-    })
-
-  if (child.isDirectory()) {
-    chownr(path.resolve(p, child.name), uid, gid, er => {
-      if (er)
-        return cb(er)
-      const cpath = path.resolve(p, child.name)
-      chown(cpath, uid, gid, cb)
-    })
-  } else {
-    const cpath = path.resolve(p, child.name)
-    chown(cpath, uid, gid, cb)
-  }
-}
-
-
-const chownr = (p, uid, gid, cb) => {
-  readdir(p, { withFileTypes: true }, (er, children) => {
-    // any error other than ENOTDIR or ENOTSUP means it's not readable,
-    // or doesn't exist.  give up.
-    if (er) {
-      if (er.code === 'ENOENT')
-        return cb()
-      else if (er.code !== 'ENOTDIR' && er.code !== 'ENOTSUP')
-        return cb(er)
-    }
-    if (er || !children.length)
-      return chown(p, uid, gid, cb)
-
-    let len = children.length
-    let errState = null
-    const then = er => {
-      if (errState)
-        return
-      if (er)
-        return cb(errState = er)
-      if (-- len === 0)
-        return chown(p, uid, gid, cb)
-    }
-
-    children.forEach(child => chownrKid(p, child, uid, gid, then))
-  })
-}
-
-const chownrKidSync = (p, child, uid, gid) => {
-  if (typeof child === 'string') {
-    try {
-      const stats = fs.lstatSync(path.resolve(p, child))
-      stats.name = child
-      child = stats
-    } catch (er) {
-      if (er.code === 'ENOENT')
-        return
-      else
-        throw er
-    }
-  }
-
-  if (child.isDirectory())
-    chownrSync(path.resolve(p, child.name), uid, gid)
-
-  handleEISDirSync(path.resolve(p, child.name), uid, gid)
-}
-
-const chownrSync = (p, uid, gid) => {
-  let children
-  try {
-    children = readdirSync(p, { withFileTypes: true })
-  } catch (er) {
-    if (er.code === 'ENOENT')
-      return
-    else if (er.code === 'ENOTDIR' || er.code === 'ENOTSUP')
-      return handleEISDirSync(p, uid, gid)
-    else
-      throw er
-  }
-
-  if (children && children.length)
-    children.forEach(child => chownrKidSync(p, child, uid, gid))
-
-  return handleEISDirSync(p, uid, gid)
-}
-
-module.exports = chownr
-chownr.sync = chownrSync
diff --git a/node_modules/node-gyp/node_modules/chownr/dist/commonjs/index.js b/node_modules/chownr/dist/commonjs/index.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/chownr/dist/commonjs/index.js
rename to node_modules/chownr/dist/commonjs/index.js
diff --git a/node_modules/node-gyp/node_modules/chownr/dist/commonjs/package.json b/node_modules/chownr/dist/commonjs/package.json
similarity index 100%
rename from node_modules/node-gyp/node_modules/chownr/dist/commonjs/package.json
rename to node_modules/chownr/dist/commonjs/package.json
diff --git a/node_modules/node-gyp/node_modules/chownr/dist/esm/index.js b/node_modules/chownr/dist/esm/index.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/chownr/dist/esm/index.js
rename to node_modules/chownr/dist/esm/index.js
diff --git a/node_modules/mkdirp/dist/mjs/package.json b/node_modules/chownr/dist/esm/package.json
similarity index 100%
rename from node_modules/mkdirp/dist/mjs/package.json
rename to node_modules/chownr/dist/esm/package.json
diff --git a/node_modules/chownr/package.json b/node_modules/chownr/package.json
index 5b0214ca12e3f..09aa6b2e2e576 100644
--- a/node_modules/chownr/package.json
+++ b/node_modules/chownr/package.json
@@ -2,31 +2,68 @@
   "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
   "name": "chownr",
   "description": "like `chown -R`",
-  "version": "2.0.0",
+  "version": "3.0.0",
   "repository": {
     "type": "git",
     "url": "git://github.com/isaacs/chownr.git"
   },
-  "main": "chownr.js",
   "files": [
-    "chownr.js"
+    "dist"
   ],
   "devDependencies": {
-    "mkdirp": "0.3",
-    "rimraf": "^2.7.1",
-    "tap": "^14.10.6"
-  },
-  "tap": {
-    "check-coverage": true
+    "@types/node": "^20.12.5",
+    "mkdirp": "^3.0.1",
+    "prettier": "^3.2.5",
+    "rimraf": "^5.0.5",
+    "tap": "^18.7.2",
+    "tshy": "^1.13.1",
+    "typedoc": "^0.25.12"
   },
   "scripts": {
+    "prepare": "tshy",
+    "pretest": "npm run prepare",
     "test": "tap",
     "preversion": "npm test",
     "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags"
+    "prepublishOnly": "git push origin --follow-tags",
+    "format": "prettier --write . --loglevel warn",
+    "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
   },
-  "license": "ISC",
+  "license": "BlueOak-1.0.0",
   "engines": {
-    "node": ">=10"
+    "node": ">=18"
+  },
+  "tshy": {
+    "exports": {
+      "./package.json": "./package.json",
+      ".": "./src/index.ts"
+    }
+  },
+  "exports": {
+    "./package.json": "./package.json",
+    ".": {
+      "import": {
+        "types": "./dist/esm/index.d.ts",
+        "default": "./dist/esm/index.js"
+      },
+      "require": {
+        "types": "./dist/commonjs/index.d.ts",
+        "default": "./dist/commonjs/index.js"
+      }
+    }
+  },
+  "main": "./dist/commonjs/index.js",
+  "types": "./dist/commonjs/index.d.ts",
+  "type": "module",
+  "prettier": {
+    "semi": false,
+    "printWidth": 75,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
   }
 }
diff --git a/node_modules/minizlib/dist/commonjs/index.js b/node_modules/minizlib/dist/commonjs/index.js
index b4906d2783372..78c6536baf6be 100644
--- a/node_modules/minizlib/dist/commonjs/index.js
+++ b/node_modules/minizlib/dist/commonjs/index.js
@@ -36,7 +36,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
     return (mod && mod.__esModule) ? mod : { "default": mod };
 };
 Object.defineProperty(exports, "__esModule", { value: true });
-exports.BrotliDecompress = exports.BrotliCompress = exports.Brotli = exports.Unzip = exports.InflateRaw = exports.DeflateRaw = exports.Gunzip = exports.Gzip = exports.Inflate = exports.Deflate = exports.Zlib = exports.ZlibError = exports.constants = void 0;
+exports.ZstdDecompress = exports.ZstdCompress = exports.BrotliDecompress = exports.BrotliCompress = exports.Unzip = exports.InflateRaw = exports.DeflateRaw = exports.Gunzip = exports.Gzip = exports.Inflate = exports.Deflate = exports.Zlib = exports.ZlibError = exports.constants = void 0;
 const assert_1 = __importDefault(require("assert"));
 const buffer_1 = require("buffer");
 const minipass_1 = require("minipass");
@@ -56,15 +56,15 @@ const _superWrite = Symbol('_superWrite');
 class ZlibError extends Error {
     code;
     errno;
-    constructor(err) {
-        super('zlib: ' + err.message);
+    constructor(err, origin) {
+        super('zlib: ' + err.message, { cause: err });
         this.code = err.code;
         this.errno = err.errno;
         /* c8 ignore next */
         if (!this.code)
             this.code = 'ZLIB_ERROR';
         this.message = 'zlib: ' + err.message;
-        Error.captureStackTrace(this, this.constructor);
+        Error.captureStackTrace(this, origin ?? this.constructor);
     }
     get name() {
         return 'ZlibError';
@@ -105,6 +105,10 @@ class ZlibBase extends minipass_1.Minipass {
         this.#finishFlushFlag = opts.finishFlush ?? 0;
         this.#fullFlushFlag = opts.fullFlushFlag ?? 0;
         /* c8 ignore stop */
+        //@ts-ignore
+        if (typeof realZlib[mode] !== 'function') {
+            throw new TypeError('Compression method not supported: ' + mode);
+        }
         // this will throw if any options are invalid for the class selected
         try {
             // @types/node doesn't know that it exports the classes, but they're there
@@ -113,7 +117,7 @@ class ZlibBase extends minipass_1.Minipass {
         }
         catch (er) {
             // make sure that all errors get decorated properly
-            throw new ZlibError(er);
+            throw new ZlibError(er, this.constructor);
         }
         this.#onError = err => {
             // no sense raising multiple errors, since we abort on the first one.
@@ -213,7 +217,7 @@ class ZlibBase extends minipass_1.Minipass {
             // or if we do, put Buffer.concat() back before we emit error
             // Error events call into user code, which may call Buffer.concat()
             passthroughBufferConcat(false);
-            this.#onError(new ZlibError(err));
+            this.#onError(new ZlibError(err, this.write));
         }
         finally {
             if (this.#handle) {
@@ -232,7 +236,7 @@ class ZlibBase extends minipass_1.Minipass {
             }
         }
         if (this.#handle)
-            this.#handle.on('error', er => this.#onError(new ZlibError(er)));
+            this.#handle.on('error', er => this.#onError(new ZlibError(er, this.write)));
         let writeReturn;
         if (result) {
             if (Array.isArray(result) && result.length > 0) {
@@ -376,7 +380,6 @@ class Brotli extends ZlibBase {
         super(opts, mode);
     }
 }
-exports.Brotli = Brotli;
 class BrotliCompress extends Brotli {
     constructor(opts) {
         super(opts, 'BrotliCompress');
@@ -389,4 +392,25 @@ class BrotliDecompress extends Brotli {
     }
 }
 exports.BrotliDecompress = BrotliDecompress;
+class Zstd extends ZlibBase {
+    constructor(opts, mode) {
+        opts = opts || {};
+        opts.flush = opts.flush || constants_js_1.constants.ZSTD_e_continue;
+        opts.finishFlush = opts.finishFlush || constants_js_1.constants.ZSTD_e_end;
+        opts.fullFlushFlag = constants_js_1.constants.ZSTD_e_flush;
+        super(opts, mode);
+    }
+}
+class ZstdCompress extends Zstd {
+    constructor(opts) {
+        super(opts, 'ZstdCompress');
+    }
+}
+exports.ZstdCompress = ZstdCompress;
+class ZstdDecompress extends Zstd {
+    constructor(opts) {
+        super(opts, 'ZstdDecompress');
+    }
+}
+exports.ZstdDecompress = ZstdDecompress;
 //# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/minizlib/dist/esm/index.js b/node_modules/minizlib/dist/esm/index.js
index f33586a8ab0ec..b70ba1f2cd84f 100644
--- a/node_modules/minizlib/dist/esm/index.js
+++ b/node_modules/minizlib/dist/esm/index.js
@@ -16,15 +16,15 @@ const _superWrite = Symbol('_superWrite');
 export class ZlibError extends Error {
     code;
     errno;
-    constructor(err) {
-        super('zlib: ' + err.message);
+    constructor(err, origin) {
+        super('zlib: ' + err.message, { cause: err });
         this.code = err.code;
         this.errno = err.errno;
         /* c8 ignore next */
         if (!this.code)
             this.code = 'ZLIB_ERROR';
         this.message = 'zlib: ' + err.message;
-        Error.captureStackTrace(this, this.constructor);
+        Error.captureStackTrace(this, origin ?? this.constructor);
     }
     get name() {
         return 'ZlibError';
@@ -64,6 +64,10 @@ class ZlibBase extends Minipass {
         this.#finishFlushFlag = opts.finishFlush ?? 0;
         this.#fullFlushFlag = opts.fullFlushFlag ?? 0;
         /* c8 ignore stop */
+        //@ts-ignore
+        if (typeof realZlib[mode] !== 'function') {
+            throw new TypeError('Compression method not supported: ' + mode);
+        }
         // this will throw if any options are invalid for the class selected
         try {
             // @types/node doesn't know that it exports the classes, but they're there
@@ -72,7 +76,7 @@ class ZlibBase extends Minipass {
         }
         catch (er) {
             // make sure that all errors get decorated properly
-            throw new ZlibError(er);
+            throw new ZlibError(er, this.constructor);
         }
         this.#onError = err => {
             // no sense raising multiple errors, since we abort on the first one.
@@ -172,7 +176,7 @@ class ZlibBase extends Minipass {
             // or if we do, put Buffer.concat() back before we emit error
             // Error events call into user code, which may call Buffer.concat()
             passthroughBufferConcat(false);
-            this.#onError(new ZlibError(err));
+            this.#onError(new ZlibError(err, this.write));
         }
         finally {
             if (this.#handle) {
@@ -191,7 +195,7 @@ class ZlibBase extends Minipass {
             }
         }
         if (this.#handle)
-            this.#handle.on('error', er => this.#onError(new ZlibError(er)));
+            this.#handle.on('error', er => this.#onError(new ZlibError(er, this.write)));
         let writeReturn;
         if (result) {
             if (Array.isArray(result) && result.length > 0) {
@@ -317,7 +321,7 @@ export class Unzip extends Zlib {
         super(opts, 'Unzip');
     }
 }
-export class Brotli extends ZlibBase {
+class Brotli extends ZlibBase {
     constructor(opts, mode) {
         opts = opts || {};
         opts.flush = opts.flush || constants.BROTLI_OPERATION_PROCESS;
@@ -337,4 +341,23 @@ export class BrotliDecompress extends Brotli {
         super(opts, 'BrotliDecompress');
     }
 }
+class Zstd extends ZlibBase {
+    constructor(opts, mode) {
+        opts = opts || {};
+        opts.flush = opts.flush || constants.ZSTD_e_continue;
+        opts.finishFlush = opts.finishFlush || constants.ZSTD_e_end;
+        opts.fullFlushFlag = constants.ZSTD_e_flush;
+        super(opts, mode);
+    }
+}
+export class ZstdCompress extends Zstd {
+    constructor(opts) {
+        super(opts, 'ZstdCompress');
+    }
+}
+export class ZstdDecompress extends Zstd {
+    constructor(opts) {
+        super(opts, 'ZstdDecompress');
+    }
+}
 //# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/minizlib/package.json b/node_modules/minizlib/package.json
index 43cb855e15a5d..dceaed923d3db 100644
--- a/node_modules/minizlib/package.json
+++ b/node_modules/minizlib/package.json
@@ -1,6 +1,6 @@
 {
   "name": "minizlib",
-  "version": "3.0.2",
+  "version": "3.1.0",
   "description": "A small fast zlib stream built on [minipass](http://npm.im/minipass) and Node.js's zlib binding.",
   "main": "./dist/commonjs/index.js",
   "dependencies": {
@@ -33,7 +33,7 @@
   "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
   "license": "MIT",
   "devDependencies": {
-    "@types/node": "^22.13.14",
+    "@types/node": "^24.5.2",
     "tap": "^21.1.0",
     "tshy": "^3.0.2",
     "typedoc": "^0.28.1"
diff --git a/node_modules/mkdirp/LICENSE b/node_modules/mkdirp/LICENSE
deleted file mode 100644
index 0a034db7a73b5..0000000000000
--- a/node_modules/mkdirp/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-Copyright (c) 2011-2023 James Halliday (mail@substack.net) and Isaac Z. Schlueter (i@izs.me)
-
-This project is free software released under the MIT license:
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
diff --git a/node_modules/mkdirp/dist/cjs/package.json b/node_modules/mkdirp/dist/cjs/package.json
deleted file mode 100644
index 9d04a66e16cd9..0000000000000
--- a/node_modules/mkdirp/dist/cjs/package.json
+++ /dev/null
@@ -1,91 +0,0 @@
-{
-    "name": "mkdirp",
-    "description": "Recursively mkdir, like `mkdir -p`",
-    "version": "3.0.1",
-    "keywords": [
-        "mkdir",
-        "directory",
-        "make dir",
-        "make",
-        "dir",
-        "recursive",
-        "native"
-    ],
-    "bin": "./dist/cjs/src/bin.js",
-    "main": "./dist/cjs/src/index.js",
-    "module": "./dist/mjs/index.js",
-    "types": "./dist/mjs/index.d.ts",
-    "exports": {
-        ".": {
-            "import": {
-                "types": "./dist/mjs/index.d.ts",
-                "default": "./dist/mjs/index.js"
-            },
-            "require": {
-                "types": "./dist/cjs/src/index.d.ts",
-                "default": "./dist/cjs/src/index.js"
-            }
-        }
-    },
-    "files": [
-        "dist"
-    ],
-    "scripts": {
-        "preversion": "npm test",
-        "postversion": "npm publish",
-        "prepublishOnly": "git push origin --follow-tags",
-        "preprepare": "rm -rf dist",
-        "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json",
-        "postprepare": "bash fixup.sh",
-        "pretest": "npm run prepare",
-        "presnap": "npm run prepare",
-        "test": "c8 tap",
-        "snap": "c8 tap",
-        "format": "prettier --write . --loglevel warn",
-        "benchmark": "node benchmark/index.js",
-        "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
-    },
-    "prettier": {
-        "semi": false,
-        "printWidth": 80,
-        "tabWidth": 2,
-        "useTabs": false,
-        "singleQuote": true,
-        "jsxSingleQuote": false,
-        "bracketSameLine": true,
-        "arrowParens": "avoid",
-        "endOfLine": "lf"
-    },
-    "devDependencies": {
-        "@types/brace-expansion": "^1.1.0",
-        "@types/node": "^18.11.9",
-        "@types/tap": "^15.0.7",
-        "c8": "^7.12.0",
-        "eslint-config-prettier": "^8.6.0",
-        "prettier": "^2.8.2",
-        "tap": "^16.3.3",
-        "ts-node": "^10.9.1",
-        "typedoc": "^0.23.21",
-        "typescript": "^4.9.3"
-    },
-    "tap": {
-        "coverage": false,
-        "node-arg": [
-            "--no-warnings",
-            "--loader",
-            "ts-node/esm"
-        ],
-        "ts": false
-    },
-    "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-    },
-    "repository": {
-        "type": "git",
-        "url": "https://github.com/isaacs/node-mkdirp.git"
-    },
-    "license": "MIT",
-    "engines": {
-        "node": ">=10"
-    }
-}
diff --git a/node_modules/mkdirp/dist/cjs/src/bin.js b/node_modules/mkdirp/dist/cjs/src/bin.js
deleted file mode 100755
index 757aae1fd96cb..0000000000000
--- a/node_modules/mkdirp/dist/cjs/src/bin.js
+++ /dev/null
@@ -1,80 +0,0 @@
-#!/usr/bin/env node
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-const package_json_1 = require("../package.json");
-const usage = () => `
-usage: mkdirp [DIR1,DIR2..] {OPTIONS}
-
-  Create each supplied directory including any necessary parent directories
-  that don't yet exist.
-
-  If the directory already exists, do nothing.
-
-OPTIONS are:
-
-  -m       If a directory needs to be created, set the mode as an octal
-  --mode=  permission string.
-
-  -v --version   Print the mkdirp version number
-
-  -h --help      Print this helpful banner
-
-  -p --print     Print the first directories created for each path provided
-
-  --manual       Use manual implementation, even if native is available
-`;
-const dirs = [];
-const opts = {};
-let doPrint = false;
-let dashdash = false;
-let manual = false;
-for (const arg of process.argv.slice(2)) {
-    if (dashdash)
-        dirs.push(arg);
-    else if (arg === '--')
-        dashdash = true;
-    else if (arg === '--manual')
-        manual = true;
-    else if (/^-h/.test(arg) || /^--help/.test(arg)) {
-        console.log(usage());
-        process.exit(0);
-    }
-    else if (arg === '-v' || arg === '--version') {
-        console.log(package_json_1.version);
-        process.exit(0);
-    }
-    else if (arg === '-p' || arg === '--print') {
-        doPrint = true;
-    }
-    else if (/^-m/.test(arg) || /^--mode=/.test(arg)) {
-        // these don't get covered in CI, but work locally
-        // weird because the tests below show as passing in the output.
-        /* c8 ignore start */
-        const mode = parseInt(arg.replace(/^(-m|--mode=)/, ''), 8);
-        if (isNaN(mode)) {
-            console.error(`invalid mode argument: ${arg}\nMust be an octal number.`);
-            process.exit(1);
-        }
-        /* c8 ignore stop */
-        opts.mode = mode;
-    }
-    else
-        dirs.push(arg);
-}
-const index_js_1 = require("./index.js");
-const impl = manual ? index_js_1.mkdirp.manual : index_js_1.mkdirp;
-if (dirs.length === 0) {
-    console.error(usage());
-}
-// these don't get covered in CI, but work locally
-/* c8 ignore start */
-Promise.all(dirs.map(dir => impl(dir, opts)))
-    .then(made => (doPrint ? made.forEach(m => m && console.log(m)) : null))
-    .catch(er => {
-    console.error(er.message);
-    if (er.code)
-        console.error('  code: ' + er.code);
-    process.exit(1);
-});
-/* c8 ignore stop */
-//# sourceMappingURL=bin.js.map
\ No newline at end of file
diff --git a/node_modules/mkdirp/dist/cjs/src/find-made.js b/node_modules/mkdirp/dist/cjs/src/find-made.js
deleted file mode 100644
index e831ef27cadc1..0000000000000
--- a/node_modules/mkdirp/dist/cjs/src/find-made.js
+++ /dev/null
@@ -1,35 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.findMadeSync = exports.findMade = void 0;
-const path_1 = require("path");
-const findMade = async (opts, parent, path) => {
-    // we never want the 'made' return value to be a root directory
-    if (path === parent) {
-        return;
-    }
-    return opts.statAsync(parent).then(st => (st.isDirectory() ? path : undefined), // will fail later
-    // will fail later
-    er => {
-        const fer = er;
-        return fer && fer.code === 'ENOENT'
-            ? (0, exports.findMade)(opts, (0, path_1.dirname)(parent), parent)
-            : undefined;
-    });
-};
-exports.findMade = findMade;
-const findMadeSync = (opts, parent, path) => {
-    if (path === parent) {
-        return undefined;
-    }
-    try {
-        return opts.statSync(parent).isDirectory() ? path : undefined;
-    }
-    catch (er) {
-        const fer = er;
-        return fer && fer.code === 'ENOENT'
-            ? (0, exports.findMadeSync)(opts, (0, path_1.dirname)(parent), parent)
-            : undefined;
-    }
-};
-exports.findMadeSync = findMadeSync;
-//# sourceMappingURL=find-made.js.map
\ No newline at end of file
diff --git a/node_modules/mkdirp/dist/cjs/src/index.js b/node_modules/mkdirp/dist/cjs/src/index.js
deleted file mode 100644
index ab9dc62cddda3..0000000000000
--- a/node_modules/mkdirp/dist/cjs/src/index.js
+++ /dev/null
@@ -1,53 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.mkdirp = exports.nativeSync = exports.native = exports.manualSync = exports.manual = exports.sync = exports.mkdirpSync = exports.useNativeSync = exports.useNative = exports.mkdirpNativeSync = exports.mkdirpNative = exports.mkdirpManualSync = exports.mkdirpManual = void 0;
-const mkdirp_manual_js_1 = require("./mkdirp-manual.js");
-const mkdirp_native_js_1 = require("./mkdirp-native.js");
-const opts_arg_js_1 = require("./opts-arg.js");
-const path_arg_js_1 = require("./path-arg.js");
-const use_native_js_1 = require("./use-native.js");
-/* c8 ignore start */
-var mkdirp_manual_js_2 = require("./mkdirp-manual.js");
-Object.defineProperty(exports, "mkdirpManual", { enumerable: true, get: function () { return mkdirp_manual_js_2.mkdirpManual; } });
-Object.defineProperty(exports, "mkdirpManualSync", { enumerable: true, get: function () { return mkdirp_manual_js_2.mkdirpManualSync; } });
-var mkdirp_native_js_2 = require("./mkdirp-native.js");
-Object.defineProperty(exports, "mkdirpNative", { enumerable: true, get: function () { return mkdirp_native_js_2.mkdirpNative; } });
-Object.defineProperty(exports, "mkdirpNativeSync", { enumerable: true, get: function () { return mkdirp_native_js_2.mkdirpNativeSync; } });
-var use_native_js_2 = require("./use-native.js");
-Object.defineProperty(exports, "useNative", { enumerable: true, get: function () { return use_native_js_2.useNative; } });
-Object.defineProperty(exports, "useNativeSync", { enumerable: true, get: function () { return use_native_js_2.useNativeSync; } });
-/* c8 ignore stop */
-const mkdirpSync = (path, opts) => {
-    path = (0, path_arg_js_1.pathArg)(path);
-    const resolved = (0, opts_arg_js_1.optsArg)(opts);
-    return (0, use_native_js_1.useNativeSync)(resolved)
-        ? (0, mkdirp_native_js_1.mkdirpNativeSync)(path, resolved)
-        : (0, mkdirp_manual_js_1.mkdirpManualSync)(path, resolved);
-};
-exports.mkdirpSync = mkdirpSync;
-exports.sync = exports.mkdirpSync;
-exports.manual = mkdirp_manual_js_1.mkdirpManual;
-exports.manualSync = mkdirp_manual_js_1.mkdirpManualSync;
-exports.native = mkdirp_native_js_1.mkdirpNative;
-exports.nativeSync = mkdirp_native_js_1.mkdirpNativeSync;
-exports.mkdirp = Object.assign(async (path, opts) => {
-    path = (0, path_arg_js_1.pathArg)(path);
-    const resolved = (0, opts_arg_js_1.optsArg)(opts);
-    return (0, use_native_js_1.useNative)(resolved)
-        ? (0, mkdirp_native_js_1.mkdirpNative)(path, resolved)
-        : (0, mkdirp_manual_js_1.mkdirpManual)(path, resolved);
-}, {
-    mkdirpSync: exports.mkdirpSync,
-    mkdirpNative: mkdirp_native_js_1.mkdirpNative,
-    mkdirpNativeSync: mkdirp_native_js_1.mkdirpNativeSync,
-    mkdirpManual: mkdirp_manual_js_1.mkdirpManual,
-    mkdirpManualSync: mkdirp_manual_js_1.mkdirpManualSync,
-    sync: exports.mkdirpSync,
-    native: mkdirp_native_js_1.mkdirpNative,
-    nativeSync: mkdirp_native_js_1.mkdirpNativeSync,
-    manual: mkdirp_manual_js_1.mkdirpManual,
-    manualSync: mkdirp_manual_js_1.mkdirpManualSync,
-    useNative: use_native_js_1.useNative,
-    useNativeSync: use_native_js_1.useNativeSync,
-});
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js b/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js
deleted file mode 100644
index d9bd1d8bb5a49..0000000000000
--- a/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js
+++ /dev/null
@@ -1,79 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.mkdirpManual = exports.mkdirpManualSync = void 0;
-const path_1 = require("path");
-const opts_arg_js_1 = require("./opts-arg.js");
-const mkdirpManualSync = (path, options, made) => {
-    const parent = (0, path_1.dirname)(path);
-    const opts = { ...(0, opts_arg_js_1.optsArg)(options), recursive: false };
-    if (parent === path) {
-        try {
-            return opts.mkdirSync(path, opts);
-        }
-        catch (er) {
-            // swallowed by recursive implementation on posix systems
-            // any other error is a failure
-            const fer = er;
-            if (fer && fer.code !== 'EISDIR') {
-                throw er;
-            }
-            return;
-        }
-    }
-    try {
-        opts.mkdirSync(path, opts);
-        return made || path;
-    }
-    catch (er) {
-        const fer = er;
-        if (fer && fer.code === 'ENOENT') {
-            return (0, exports.mkdirpManualSync)(path, opts, (0, exports.mkdirpManualSync)(parent, opts, made));
-        }
-        if (fer && fer.code !== 'EEXIST' && fer && fer.code !== 'EROFS') {
-            throw er;
-        }
-        try {
-            if (!opts.statSync(path).isDirectory())
-                throw er;
-        }
-        catch (_) {
-            throw er;
-        }
-    }
-};
-exports.mkdirpManualSync = mkdirpManualSync;
-exports.mkdirpManual = Object.assign(async (path, options, made) => {
-    const opts = (0, opts_arg_js_1.optsArg)(options);
-    opts.recursive = false;
-    const parent = (0, path_1.dirname)(path);
-    if (parent === path) {
-        return opts.mkdirAsync(path, opts).catch(er => {
-            // swallowed by recursive implementation on posix systems
-            // any other error is a failure
-            const fer = er;
-            if (fer && fer.code !== 'EISDIR') {
-                throw er;
-            }
-        });
-    }
-    return opts.mkdirAsync(path, opts).then(() => made || path, async (er) => {
-        const fer = er;
-        if (fer && fer.code === 'ENOENT') {
-            return (0, exports.mkdirpManual)(parent, opts).then((made) => (0, exports.mkdirpManual)(path, opts, made));
-        }
-        if (fer && fer.code !== 'EEXIST' && fer.code !== 'EROFS') {
-            throw er;
-        }
-        return opts.statAsync(path).then(st => {
-            if (st.isDirectory()) {
-                return made;
-            }
-            else {
-                throw er;
-            }
-        }, () => {
-            throw er;
-        });
-    });
-}, { sync: exports.mkdirpManualSync });
-//# sourceMappingURL=mkdirp-manual.js.map
\ No newline at end of file
diff --git a/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js b/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js
deleted file mode 100644
index 9f00567d7cc20..0000000000000
--- a/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js
+++ /dev/null
@@ -1,50 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.mkdirpNative = exports.mkdirpNativeSync = void 0;
-const path_1 = require("path");
-const find_made_js_1 = require("./find-made.js");
-const mkdirp_manual_js_1 = require("./mkdirp-manual.js");
-const opts_arg_js_1 = require("./opts-arg.js");
-const mkdirpNativeSync = (path, options) => {
-    const opts = (0, opts_arg_js_1.optsArg)(options);
-    opts.recursive = true;
-    const parent = (0, path_1.dirname)(path);
-    if (parent === path) {
-        return opts.mkdirSync(path, opts);
-    }
-    const made = (0, find_made_js_1.findMadeSync)(opts, path);
-    try {
-        opts.mkdirSync(path, opts);
-        return made;
-    }
-    catch (er) {
-        const fer = er;
-        if (fer && fer.code === 'ENOENT') {
-            return (0, mkdirp_manual_js_1.mkdirpManualSync)(path, opts);
-        }
-        else {
-            throw er;
-        }
-    }
-};
-exports.mkdirpNativeSync = mkdirpNativeSync;
-exports.mkdirpNative = Object.assign(async (path, options) => {
-    const opts = { ...(0, opts_arg_js_1.optsArg)(options), recursive: true };
-    const parent = (0, path_1.dirname)(path);
-    if (parent === path) {
-        return await opts.mkdirAsync(path, opts);
-    }
-    return (0, find_made_js_1.findMade)(opts, path).then((made) => opts
-        .mkdirAsync(path, opts)
-        .then(m => made || m)
-        .catch(er => {
-        const fer = er;
-        if (fer && fer.code === 'ENOENT') {
-            return (0, mkdirp_manual_js_1.mkdirpManual)(path, opts);
-        }
-        else {
-            throw er;
-        }
-    }));
-}, { sync: exports.mkdirpNativeSync });
-//# sourceMappingURL=mkdirp-native.js.map
\ No newline at end of file
diff --git a/node_modules/mkdirp/dist/cjs/src/opts-arg.js b/node_modules/mkdirp/dist/cjs/src/opts-arg.js
deleted file mode 100644
index e8f486c090595..0000000000000
--- a/node_modules/mkdirp/dist/cjs/src/opts-arg.js
+++ /dev/null
@@ -1,38 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.optsArg = void 0;
-const fs_1 = require("fs");
-const optsArg = (opts) => {
-    if (!opts) {
-        opts = { mode: 0o777 };
-    }
-    else if (typeof opts === 'object') {
-        opts = { mode: 0o777, ...opts };
-    }
-    else if (typeof opts === 'number') {
-        opts = { mode: opts };
-    }
-    else if (typeof opts === 'string') {
-        opts = { mode: parseInt(opts, 8) };
-    }
-    else {
-        throw new TypeError('invalid options argument');
-    }
-    const resolved = opts;
-    const optsFs = opts.fs || {};
-    opts.mkdir = opts.mkdir || optsFs.mkdir || fs_1.mkdir;
-    opts.mkdirAsync = opts.mkdirAsync
-        ? opts.mkdirAsync
-        : async (path, options) => {
-            return new Promise((res, rej) => resolved.mkdir(path, options, (er, made) => er ? rej(er) : res(made)));
-        };
-    opts.stat = opts.stat || optsFs.stat || fs_1.stat;
-    opts.statAsync = opts.statAsync
-        ? opts.statAsync
-        : async (path) => new Promise((res, rej) => resolved.stat(path, (err, stats) => (err ? rej(err) : res(stats))));
-    opts.statSync = opts.statSync || optsFs.statSync || fs_1.statSync;
-    opts.mkdirSync = opts.mkdirSync || optsFs.mkdirSync || fs_1.mkdirSync;
-    return resolved;
-};
-exports.optsArg = optsArg;
-//# sourceMappingURL=opts-arg.js.map
\ No newline at end of file
diff --git a/node_modules/mkdirp/dist/cjs/src/path-arg.js b/node_modules/mkdirp/dist/cjs/src/path-arg.js
deleted file mode 100644
index a6b457f6e23d5..0000000000000
--- a/node_modules/mkdirp/dist/cjs/src/path-arg.js
+++ /dev/null
@@ -1,28 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.pathArg = void 0;
-const platform = process.env.__TESTING_MKDIRP_PLATFORM__ || process.platform;
-const path_1 = require("path");
-const pathArg = (path) => {
-    if (/\0/.test(path)) {
-        // simulate same failure that node raises
-        throw Object.assign(new TypeError('path must be a string without null bytes'), {
-            path,
-            code: 'ERR_INVALID_ARG_VALUE',
-        });
-    }
-    path = (0, path_1.resolve)(path);
-    if (platform === 'win32') {
-        const badWinChars = /[*|"<>?:]/;
-        const { root } = (0, path_1.parse)(path);
-        if (badWinChars.test(path.substring(root.length))) {
-            throw Object.assign(new Error('Illegal characters in path.'), {
-                path,
-                code: 'EINVAL',
-            });
-        }
-    }
-    return path;
-};
-exports.pathArg = pathArg;
-//# sourceMappingURL=path-arg.js.map
\ No newline at end of file
diff --git a/node_modules/mkdirp/dist/cjs/src/use-native.js b/node_modules/mkdirp/dist/cjs/src/use-native.js
deleted file mode 100644
index 550b3452688ee..0000000000000
--- a/node_modules/mkdirp/dist/cjs/src/use-native.js
+++ /dev/null
@@ -1,17 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.useNative = exports.useNativeSync = void 0;
-const fs_1 = require("fs");
-const opts_arg_js_1 = require("./opts-arg.js");
-const version = process.env.__TESTING_MKDIRP_NODE_VERSION__ || process.version;
-const versArr = version.replace(/^v/, '').split('.');
-const hasNative = +versArr[0] > 10 || (+versArr[0] === 10 && +versArr[1] >= 12);
-exports.useNativeSync = !hasNative
-    ? () => false
-    : (opts) => (0, opts_arg_js_1.optsArg)(opts).mkdirSync === fs_1.mkdirSync;
-exports.useNative = Object.assign(!hasNative
-    ? () => false
-    : (opts) => (0, opts_arg_js_1.optsArg)(opts).mkdir === fs_1.mkdir, {
-    sync: exports.useNativeSync,
-});
-//# sourceMappingURL=use-native.js.map
\ No newline at end of file
diff --git a/node_modules/mkdirp/dist/mjs/find-made.js b/node_modules/mkdirp/dist/mjs/find-made.js
deleted file mode 100644
index 3e72fd59a2c1f..0000000000000
--- a/node_modules/mkdirp/dist/mjs/find-made.js
+++ /dev/null
@@ -1,30 +0,0 @@
-import { dirname } from 'path';
-export const findMade = async (opts, parent, path) => {
-    // we never want the 'made' return value to be a root directory
-    if (path === parent) {
-        return;
-    }
-    return opts.statAsync(parent).then(st => (st.isDirectory() ? path : undefined), // will fail later
-    // will fail later
-    er => {
-        const fer = er;
-        return fer && fer.code === 'ENOENT'
-            ? findMade(opts, dirname(parent), parent)
-            : undefined;
-    });
-};
-export const findMadeSync = (opts, parent, path) => {
-    if (path === parent) {
-        return undefined;
-    }
-    try {
-        return opts.statSync(parent).isDirectory() ? path : undefined;
-    }
-    catch (er) {
-        const fer = er;
-        return fer && fer.code === 'ENOENT'
-            ? findMadeSync(opts, dirname(parent), parent)
-            : undefined;
-    }
-};
-//# sourceMappingURL=find-made.js.map
\ No newline at end of file
diff --git a/node_modules/mkdirp/dist/mjs/index.js b/node_modules/mkdirp/dist/mjs/index.js
deleted file mode 100644
index 0217ecc8cdd83..0000000000000
--- a/node_modules/mkdirp/dist/mjs/index.js
+++ /dev/null
@@ -1,43 +0,0 @@
-import { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js';
-import { mkdirpNative, mkdirpNativeSync } from './mkdirp-native.js';
-import { optsArg } from './opts-arg.js';
-import { pathArg } from './path-arg.js';
-import { useNative, useNativeSync } from './use-native.js';
-/* c8 ignore start */
-export { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js';
-export { mkdirpNative, mkdirpNativeSync } from './mkdirp-native.js';
-export { useNative, useNativeSync } from './use-native.js';
-/* c8 ignore stop */
-export const mkdirpSync = (path, opts) => {
-    path = pathArg(path);
-    const resolved = optsArg(opts);
-    return useNativeSync(resolved)
-        ? mkdirpNativeSync(path, resolved)
-        : mkdirpManualSync(path, resolved);
-};
-export const sync = mkdirpSync;
-export const manual = mkdirpManual;
-export const manualSync = mkdirpManualSync;
-export const native = mkdirpNative;
-export const nativeSync = mkdirpNativeSync;
-export const mkdirp = Object.assign(async (path, opts) => {
-    path = pathArg(path);
-    const resolved = optsArg(opts);
-    return useNative(resolved)
-        ? mkdirpNative(path, resolved)
-        : mkdirpManual(path, resolved);
-}, {
-    mkdirpSync,
-    mkdirpNative,
-    mkdirpNativeSync,
-    mkdirpManual,
-    mkdirpManualSync,
-    sync: mkdirpSync,
-    native: mkdirpNative,
-    nativeSync: mkdirpNativeSync,
-    manual: mkdirpManual,
-    manualSync: mkdirpManualSync,
-    useNative,
-    useNativeSync,
-});
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/mkdirp/dist/mjs/mkdirp-manual.js b/node_modules/mkdirp/dist/mjs/mkdirp-manual.js
deleted file mode 100644
index a4d044e02d3bf..0000000000000
--- a/node_modules/mkdirp/dist/mjs/mkdirp-manual.js
+++ /dev/null
@@ -1,75 +0,0 @@
-import { dirname } from 'path';
-import { optsArg } from './opts-arg.js';
-export const mkdirpManualSync = (path, options, made) => {
-    const parent = dirname(path);
-    const opts = { ...optsArg(options), recursive: false };
-    if (parent === path) {
-        try {
-            return opts.mkdirSync(path, opts);
-        }
-        catch (er) {
-            // swallowed by recursive implementation on posix systems
-            // any other error is a failure
-            const fer = er;
-            if (fer && fer.code !== 'EISDIR') {
-                throw er;
-            }
-            return;
-        }
-    }
-    try {
-        opts.mkdirSync(path, opts);
-        return made || path;
-    }
-    catch (er) {
-        const fer = er;
-        if (fer && fer.code === 'ENOENT') {
-            return mkdirpManualSync(path, opts, mkdirpManualSync(parent, opts, made));
-        }
-        if (fer && fer.code !== 'EEXIST' && fer && fer.code !== 'EROFS') {
-            throw er;
-        }
-        try {
-            if (!opts.statSync(path).isDirectory())
-                throw er;
-        }
-        catch (_) {
-            throw er;
-        }
-    }
-};
-export const mkdirpManual = Object.assign(async (path, options, made) => {
-    const opts = optsArg(options);
-    opts.recursive = false;
-    const parent = dirname(path);
-    if (parent === path) {
-        return opts.mkdirAsync(path, opts).catch(er => {
-            // swallowed by recursive implementation on posix systems
-            // any other error is a failure
-            const fer = er;
-            if (fer && fer.code !== 'EISDIR') {
-                throw er;
-            }
-        });
-    }
-    return opts.mkdirAsync(path, opts).then(() => made || path, async (er) => {
-        const fer = er;
-        if (fer && fer.code === 'ENOENT') {
-            return mkdirpManual(parent, opts).then((made) => mkdirpManual(path, opts, made));
-        }
-        if (fer && fer.code !== 'EEXIST' && fer.code !== 'EROFS') {
-            throw er;
-        }
-        return opts.statAsync(path).then(st => {
-            if (st.isDirectory()) {
-                return made;
-            }
-            else {
-                throw er;
-            }
-        }, () => {
-            throw er;
-        });
-    });
-}, { sync: mkdirpManualSync });
-//# sourceMappingURL=mkdirp-manual.js.map
\ No newline at end of file
diff --git a/node_modules/mkdirp/dist/mjs/mkdirp-native.js b/node_modules/mkdirp/dist/mjs/mkdirp-native.js
deleted file mode 100644
index 99d10a5425dad..0000000000000
--- a/node_modules/mkdirp/dist/mjs/mkdirp-native.js
+++ /dev/null
@@ -1,46 +0,0 @@
-import { dirname } from 'path';
-import { findMade, findMadeSync } from './find-made.js';
-import { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js';
-import { optsArg } from './opts-arg.js';
-export const mkdirpNativeSync = (path, options) => {
-    const opts = optsArg(options);
-    opts.recursive = true;
-    const parent = dirname(path);
-    if (parent === path) {
-        return opts.mkdirSync(path, opts);
-    }
-    const made = findMadeSync(opts, path);
-    try {
-        opts.mkdirSync(path, opts);
-        return made;
-    }
-    catch (er) {
-        const fer = er;
-        if (fer && fer.code === 'ENOENT') {
-            return mkdirpManualSync(path, opts);
-        }
-        else {
-            throw er;
-        }
-    }
-};
-export const mkdirpNative = Object.assign(async (path, options) => {
-    const opts = { ...optsArg(options), recursive: true };
-    const parent = dirname(path);
-    if (parent === path) {
-        return await opts.mkdirAsync(path, opts);
-    }
-    return findMade(opts, path).then((made) => opts
-        .mkdirAsync(path, opts)
-        .then(m => made || m)
-        .catch(er => {
-        const fer = er;
-        if (fer && fer.code === 'ENOENT') {
-            return mkdirpManual(path, opts);
-        }
-        else {
-            throw er;
-        }
-    }));
-}, { sync: mkdirpNativeSync });
-//# sourceMappingURL=mkdirp-native.js.map
\ No newline at end of file
diff --git a/node_modules/mkdirp/dist/mjs/opts-arg.js b/node_modules/mkdirp/dist/mjs/opts-arg.js
deleted file mode 100644
index d47e2927fee4c..0000000000000
--- a/node_modules/mkdirp/dist/mjs/opts-arg.js
+++ /dev/null
@@ -1,34 +0,0 @@
-import { mkdir, mkdirSync, stat, statSync, } from 'fs';
-export const optsArg = (opts) => {
-    if (!opts) {
-        opts = { mode: 0o777 };
-    }
-    else if (typeof opts === 'object') {
-        opts = { mode: 0o777, ...opts };
-    }
-    else if (typeof opts === 'number') {
-        opts = { mode: opts };
-    }
-    else if (typeof opts === 'string') {
-        opts = { mode: parseInt(opts, 8) };
-    }
-    else {
-        throw new TypeError('invalid options argument');
-    }
-    const resolved = opts;
-    const optsFs = opts.fs || {};
-    opts.mkdir = opts.mkdir || optsFs.mkdir || mkdir;
-    opts.mkdirAsync = opts.mkdirAsync
-        ? opts.mkdirAsync
-        : async (path, options) => {
-            return new Promise((res, rej) => resolved.mkdir(path, options, (er, made) => er ? rej(er) : res(made)));
-        };
-    opts.stat = opts.stat || optsFs.stat || stat;
-    opts.statAsync = opts.statAsync
-        ? opts.statAsync
-        : async (path) => new Promise((res, rej) => resolved.stat(path, (err, stats) => (err ? rej(err) : res(stats))));
-    opts.statSync = opts.statSync || optsFs.statSync || statSync;
-    opts.mkdirSync = opts.mkdirSync || optsFs.mkdirSync || mkdirSync;
-    return resolved;
-};
-//# sourceMappingURL=opts-arg.js.map
\ No newline at end of file
diff --git a/node_modules/mkdirp/dist/mjs/path-arg.js b/node_modules/mkdirp/dist/mjs/path-arg.js
deleted file mode 100644
index 03539cc5a94f9..0000000000000
--- a/node_modules/mkdirp/dist/mjs/path-arg.js
+++ /dev/null
@@ -1,24 +0,0 @@
-const platform = process.env.__TESTING_MKDIRP_PLATFORM__ || process.platform;
-import { parse, resolve } from 'path';
-export const pathArg = (path) => {
-    if (/\0/.test(path)) {
-        // simulate same failure that node raises
-        throw Object.assign(new TypeError('path must be a string without null bytes'), {
-            path,
-            code: 'ERR_INVALID_ARG_VALUE',
-        });
-    }
-    path = resolve(path);
-    if (platform === 'win32') {
-        const badWinChars = /[*|"<>?:]/;
-        const { root } = parse(path);
-        if (badWinChars.test(path.substring(root.length))) {
-            throw Object.assign(new Error('Illegal characters in path.'), {
-                path,
-                code: 'EINVAL',
-            });
-        }
-    }
-    return path;
-};
-//# sourceMappingURL=path-arg.js.map
\ No newline at end of file
diff --git a/node_modules/mkdirp/dist/mjs/use-native.js b/node_modules/mkdirp/dist/mjs/use-native.js
deleted file mode 100644
index ad2093867eb74..0000000000000
--- a/node_modules/mkdirp/dist/mjs/use-native.js
+++ /dev/null
@@ -1,14 +0,0 @@
-import { mkdir, mkdirSync } from 'fs';
-import { optsArg } from './opts-arg.js';
-const version = process.env.__TESTING_MKDIRP_NODE_VERSION__ || process.version;
-const versArr = version.replace(/^v/, '').split('.');
-const hasNative = +versArr[0] > 10 || (+versArr[0] === 10 && +versArr[1] >= 12);
-export const useNativeSync = !hasNative
-    ? () => false
-    : (opts) => optsArg(opts).mkdirSync === mkdirSync;
-export const useNative = Object.assign(!hasNative
-    ? () => false
-    : (opts) => optsArg(opts).mkdir === mkdir, {
-    sync: useNativeSync,
-});
-//# sourceMappingURL=use-native.js.map
\ No newline at end of file
diff --git a/node_modules/mkdirp/package.json b/node_modules/mkdirp/package.json
deleted file mode 100644
index f31ac3314d6f6..0000000000000
--- a/node_modules/mkdirp/package.json
+++ /dev/null
@@ -1,91 +0,0 @@
-{
-  "name": "mkdirp",
-  "description": "Recursively mkdir, like `mkdir -p`",
-  "version": "3.0.1",
-  "keywords": [
-    "mkdir",
-    "directory",
-    "make dir",
-    "make",
-    "dir",
-    "recursive",
-    "native"
-  ],
-  "bin": "./dist/cjs/src/bin.js",
-  "main": "./dist/cjs/src/index.js",
-  "module": "./dist/mjs/index.js",
-  "types": "./dist/mjs/index.d.ts",
-  "exports": {
-    ".": {
-      "import": {
-        "types": "./dist/mjs/index.d.ts",
-        "default": "./dist/mjs/index.js"
-      },
-      "require": {
-        "types": "./dist/cjs/src/index.d.ts",
-        "default": "./dist/cjs/src/index.js"
-      }
-    }
-  },
-  "files": [
-    "dist"
-  ],
-  "scripts": {
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "preprepare": "rm -rf dist",
-    "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json",
-    "postprepare": "bash fixup.sh",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "test": "c8 tap",
-    "snap": "c8 tap",
-    "format": "prettier --write . --loglevel warn",
-    "benchmark": "node benchmark/index.js",
-    "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts"
-  },
-  "prettier": {
-    "semi": false,
-    "printWidth": 80,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "devDependencies": {
-    "@types/brace-expansion": "^1.1.0",
-    "@types/node": "^18.11.9",
-    "@types/tap": "^15.0.7",
-    "c8": "^7.12.0",
-    "eslint-config-prettier": "^8.6.0",
-    "prettier": "^2.8.2",
-    "tap": "^16.3.3",
-    "ts-node": "^10.9.1",
-    "typedoc": "^0.23.21",
-    "typescript": "^4.9.3"
-  },
-  "tap": {
-    "coverage": false,
-    "node-arg": [
-      "--no-warnings",
-      "--loader",
-      "ts-node/esm"
-    ],
-    "ts": false
-  },
-  "funding": {
-    "url": "https://github.com/sponsors/isaacs"
-  },
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/isaacs/node-mkdirp.git"
-  },
-  "license": "MIT",
-  "engines": {
-    "node": ">=10"
-  }
-}
diff --git a/node_modules/node-gyp/node_modules/chownr/package.json b/node_modules/node-gyp/node_modules/chownr/package.json
deleted file mode 100644
index 09aa6b2e2e576..0000000000000
--- a/node_modules/node-gyp/node_modules/chownr/package.json
+++ /dev/null
@@ -1,69 +0,0 @@
-{
-  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
-  "name": "chownr",
-  "description": "like `chown -R`",
-  "version": "3.0.0",
-  "repository": {
-    "type": "git",
-    "url": "git://github.com/isaacs/chownr.git"
-  },
-  "files": [
-    "dist"
-  ],
-  "devDependencies": {
-    "@types/node": "^20.12.5",
-    "mkdirp": "^3.0.1",
-    "prettier": "^3.2.5",
-    "rimraf": "^5.0.5",
-    "tap": "^18.7.2",
-    "tshy": "^1.13.1",
-    "typedoc": "^0.25.12"
-  },
-  "scripts": {
-    "prepare": "tshy",
-    "pretest": "npm run prepare",
-    "test": "tap",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "format": "prettier --write . --loglevel warn",
-    "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
-  },
-  "license": "BlueOak-1.0.0",
-  "engines": {
-    "node": ">=18"
-  },
-  "tshy": {
-    "exports": {
-      "./package.json": "./package.json",
-      ".": "./src/index.ts"
-    }
-  },
-  "exports": {
-    "./package.json": "./package.json",
-    ".": {
-      "import": {
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.js"
-      },
-      "require": {
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.js"
-      }
-    }
-  },
-  "main": "./dist/commonjs/index.js",
-  "types": "./dist/commonjs/index.d.ts",
-  "type": "module",
-  "prettier": {
-    "semi": false,
-    "printWidth": 75,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  }
-}
diff --git a/node_modules/node-gyp/node_modules/tar/LICENSE b/node_modules/node-gyp/node_modules/tar/LICENSE
deleted file mode 100644
index 19129e315fe59..0000000000000
--- a/node_modules/node-gyp/node_modules/tar/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/node-gyp/node_modules/tar/dist/commonjs/list.js b/node_modules/node-gyp/node_modules/tar/dist/commonjs/list.js
deleted file mode 100644
index 3cd34bb4bad48..0000000000000
--- a/node_modules/node-gyp/node_modules/tar/dist/commonjs/list.js
+++ /dev/null
@@ -1,136 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.list = exports.filesFilter = void 0;
-// tar -t
-const fsm = __importStar(require("@isaacs/fs-minipass"));
-const node_fs_1 = __importDefault(require("node:fs"));
-const path_1 = require("path");
-const make_command_js_1 = require("./make-command.js");
-const parse_js_1 = require("./parse.js");
-const strip_trailing_slashes_js_1 = require("./strip-trailing-slashes.js");
-const onReadEntryFunction = (opt) => {
-    const onReadEntry = opt.onReadEntry;
-    opt.onReadEntry =
-        onReadEntry ?
-            e => {
-                onReadEntry(e);
-                e.resume();
-            }
-            : e => e.resume();
-};
-// construct a filter that limits the file entries listed
-// include child entries if a dir is included
-const filesFilter = (opt, files) => {
-    const map = new Map(files.map(f => [(0, strip_trailing_slashes_js_1.stripTrailingSlashes)(f), true]));
-    const filter = opt.filter;
-    const mapHas = (file, r = '') => {
-        const root = r || (0, path_1.parse)(file).root || '.';
-        let ret;
-        if (file === root)
-            ret = false;
-        else {
-            const m = map.get(file);
-            if (m !== undefined) {
-                ret = m;
-            }
-            else {
-                ret = mapHas((0, path_1.dirname)(file), root);
-            }
-        }
-        map.set(file, ret);
-        return ret;
-    };
-    opt.filter =
-        filter ?
-            (file, entry) => filter(file, entry) && mapHas((0, strip_trailing_slashes_js_1.stripTrailingSlashes)(file))
-            : file => mapHas((0, strip_trailing_slashes_js_1.stripTrailingSlashes)(file));
-};
-exports.filesFilter = filesFilter;
-const listFileSync = (opt) => {
-    const p = new parse_js_1.Parser(opt);
-    const file = opt.file;
-    let fd;
-    try {
-        const stat = node_fs_1.default.statSync(file);
-        const readSize = opt.maxReadSize || 16 * 1024 * 1024;
-        if (stat.size < readSize) {
-            p.end(node_fs_1.default.readFileSync(file));
-        }
-        else {
-            let pos = 0;
-            const buf = Buffer.allocUnsafe(readSize);
-            fd = node_fs_1.default.openSync(file, 'r');
-            while (pos < stat.size) {
-                const bytesRead = node_fs_1.default.readSync(fd, buf, 0, readSize, pos);
-                pos += bytesRead;
-                p.write(buf.subarray(0, bytesRead));
-            }
-            p.end();
-        }
-    }
-    finally {
-        if (typeof fd === 'number') {
-            try {
-                node_fs_1.default.closeSync(fd);
-                /* c8 ignore next */
-            }
-            catch (er) { }
-        }
-    }
-};
-const listFile = (opt, _files) => {
-    const parse = new parse_js_1.Parser(opt);
-    const readSize = opt.maxReadSize || 16 * 1024 * 1024;
-    const file = opt.file;
-    const p = new Promise((resolve, reject) => {
-        parse.on('error', reject);
-        parse.on('end', resolve);
-        node_fs_1.default.stat(file, (er, stat) => {
-            if (er) {
-                reject(er);
-            }
-            else {
-                const stream = new fsm.ReadStream(file, {
-                    readSize: readSize,
-                    size: stat.size,
-                });
-                stream.on('error', reject);
-                stream.pipe(parse);
-            }
-        });
-    });
-    return p;
-};
-exports.list = (0, make_command_js_1.makeCommand)(listFileSync, listFile, opt => new parse_js_1.Parser(opt), opt => new parse_js_1.Parser(opt), (opt, files) => {
-    if (files?.length)
-        (0, exports.filesFilter)(opt, files);
-    if (!opt.noResume)
-        onReadEntryFunction(opt);
-});
-//# sourceMappingURL=list.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/tar/dist/commonjs/mkdir.js b/node_modules/node-gyp/node_modules/tar/dist/commonjs/mkdir.js
deleted file mode 100644
index 2b13ecbab6723..0000000000000
--- a/node_modules/node-gyp/node_modules/tar/dist/commonjs/mkdir.js
+++ /dev/null
@@ -1,209 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.mkdirSync = exports.mkdir = void 0;
-const chownr_1 = require("chownr");
-const fs_1 = __importDefault(require("fs"));
-const mkdirp_1 = require("mkdirp");
-const node_path_1 = __importDefault(require("node:path"));
-const cwd_error_js_1 = require("./cwd-error.js");
-const normalize_windows_path_js_1 = require("./normalize-windows-path.js");
-const symlink_error_js_1 = require("./symlink-error.js");
-const cGet = (cache, key) => cache.get((0, normalize_windows_path_js_1.normalizeWindowsPath)(key));
-const cSet = (cache, key, val) => cache.set((0, normalize_windows_path_js_1.normalizeWindowsPath)(key), val);
-const checkCwd = (dir, cb) => {
-    fs_1.default.stat(dir, (er, st) => {
-        if (er || !st.isDirectory()) {
-            er = new cwd_error_js_1.CwdError(dir, er?.code || 'ENOTDIR');
-        }
-        cb(er);
-    });
-};
-/**
- * Wrapper around mkdirp for tar's needs.
- *
- * The main purpose is to avoid creating directories if we know that
- * they already exist (and track which ones exist for this purpose),
- * and prevent entries from being extracted into symlinked folders,
- * if `preservePaths` is not set.
- */
-const mkdir = (dir, opt, cb) => {
-    dir = (0, normalize_windows_path_js_1.normalizeWindowsPath)(dir);
-    // if there's any overlap between mask and mode,
-    // then we'll need an explicit chmod
-    /* c8 ignore next */
-    const umask = opt.umask ?? 0o22;
-    const mode = opt.mode | 0o0700;
-    const needChmod = (mode & umask) !== 0;
-    const uid = opt.uid;
-    const gid = opt.gid;
-    const doChown = typeof uid === 'number' &&
-        typeof gid === 'number' &&
-        (uid !== opt.processUid || gid !== opt.processGid);
-    const preserve = opt.preserve;
-    const unlink = opt.unlink;
-    const cache = opt.cache;
-    const cwd = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.cwd);
-    const done = (er, created) => {
-        if (er) {
-            cb(er);
-        }
-        else {
-            cSet(cache, dir, true);
-            if (created && doChown) {
-                (0, chownr_1.chownr)(created, uid, gid, er => done(er));
-            }
-            else if (needChmod) {
-                fs_1.default.chmod(dir, mode, cb);
-            }
-            else {
-                cb();
-            }
-        }
-    };
-    if (cache && cGet(cache, dir) === true) {
-        return done();
-    }
-    if (dir === cwd) {
-        return checkCwd(dir, done);
-    }
-    if (preserve) {
-        return (0, mkdirp_1.mkdirp)(dir, { mode }).then(made => done(null, made ?? undefined), // oh, ts
-        done);
-    }
-    const sub = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.relative(cwd, dir));
-    const parts = sub.split('/');
-    mkdir_(cwd, parts, mode, cache, unlink, cwd, undefined, done);
-};
-exports.mkdir = mkdir;
-const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => {
-    if (!parts.length) {
-        return cb(null, created);
-    }
-    const p = parts.shift();
-    const part = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(base + '/' + p));
-    if (cGet(cache, part)) {
-        return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
-    }
-    fs_1.default.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb));
-};
-const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => (er) => {
-    if (er) {
-        fs_1.default.lstat(part, (statEr, st) => {
-            if (statEr) {
-                statEr.path =
-                    statEr.path && (0, normalize_windows_path_js_1.normalizeWindowsPath)(statEr.path);
-                cb(statEr);
-            }
-            else if (st.isDirectory()) {
-                mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
-            }
-            else if (unlink) {
-                fs_1.default.unlink(part, er => {
-                    if (er) {
-                        return cb(er);
-                    }
-                    fs_1.default.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb));
-                });
-            }
-            else if (st.isSymbolicLink()) {
-                return cb(new symlink_error_js_1.SymlinkError(part, part + '/' + parts.join('/')));
-            }
-            else {
-                cb(er);
-            }
-        });
-    }
-    else {
-        created = created || part;
-        mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
-    }
-};
-const checkCwdSync = (dir) => {
-    let ok = false;
-    let code = undefined;
-    try {
-        ok = fs_1.default.statSync(dir).isDirectory();
-    }
-    catch (er) {
-        code = er?.code;
-    }
-    finally {
-        if (!ok) {
-            throw new cwd_error_js_1.CwdError(dir, code ?? 'ENOTDIR');
-        }
-    }
-};
-const mkdirSync = (dir, opt) => {
-    dir = (0, normalize_windows_path_js_1.normalizeWindowsPath)(dir);
-    // if there's any overlap between mask and mode,
-    // then we'll need an explicit chmod
-    /* c8 ignore next */
-    const umask = opt.umask ?? 0o22;
-    const mode = opt.mode | 0o700;
-    const needChmod = (mode & umask) !== 0;
-    const uid = opt.uid;
-    const gid = opt.gid;
-    const doChown = typeof uid === 'number' &&
-        typeof gid === 'number' &&
-        (uid !== opt.processUid || gid !== opt.processGid);
-    const preserve = opt.preserve;
-    const unlink = opt.unlink;
-    const cache = opt.cache;
-    const cwd = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.cwd);
-    const done = (created) => {
-        cSet(cache, dir, true);
-        if (created && doChown) {
-            (0, chownr_1.chownrSync)(created, uid, gid);
-        }
-        if (needChmod) {
-            fs_1.default.chmodSync(dir, mode);
-        }
-    };
-    if (cache && cGet(cache, dir) === true) {
-        return done();
-    }
-    if (dir === cwd) {
-        checkCwdSync(cwd);
-        return done();
-    }
-    if (preserve) {
-        return done((0, mkdirp_1.mkdirpSync)(dir, mode) ?? undefined);
-    }
-    const sub = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.relative(cwd, dir));
-    const parts = sub.split('/');
-    let created = undefined;
-    for (let p = parts.shift(), part = cwd; p && (part += '/' + p); p = parts.shift()) {
-        part = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(part));
-        if (cGet(cache, part)) {
-            continue;
-        }
-        try {
-            fs_1.default.mkdirSync(part, mode);
-            created = created || part;
-            cSet(cache, part, true);
-        }
-        catch (er) {
-            const st = fs_1.default.lstatSync(part);
-            if (st.isDirectory()) {
-                cSet(cache, part, true);
-                continue;
-            }
-            else if (unlink) {
-                fs_1.default.unlinkSync(part);
-                fs_1.default.mkdirSync(part, mode);
-                created = created || part;
-                cSet(cache, part, true);
-                continue;
-            }
-            else if (st.isSymbolicLink()) {
-                return new symlink_error_js_1.SymlinkError(part, part + '/' + parts.join('/'));
-            }
-        }
-    }
-    return done(created);
-};
-exports.mkdirSync = mkdirSync;
-//# sourceMappingURL=mkdir.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/tar/dist/commonjs/normalize-unicode.js b/node_modules/node-gyp/node_modules/tar/dist/commonjs/normalize-unicode.js
deleted file mode 100644
index 2f08ce46d98c4..0000000000000
--- a/node_modules/node-gyp/node_modules/tar/dist/commonjs/normalize-unicode.js
+++ /dev/null
@@ -1,17 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.normalizeUnicode = void 0;
-// warning: extremely hot code path.
-// This has been meticulously optimized for use
-// within npm install on large package trees.
-// Do not edit without careful benchmarking.
-const normalizeCache = Object.create(null);
-const { hasOwnProperty } = Object.prototype;
-const normalizeUnicode = (s) => {
-    if (!hasOwnProperty.call(normalizeCache, s)) {
-        normalizeCache[s] = s.normalize('NFD');
-    }
-    return normalizeCache[s];
-};
-exports.normalizeUnicode = normalizeUnicode;
-//# sourceMappingURL=normalize-unicode.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/tar/dist/commonjs/parse.js b/node_modules/node-gyp/node_modules/tar/dist/commonjs/parse.js
deleted file mode 100644
index 9746a25899e6e..0000000000000
--- a/node_modules/node-gyp/node_modules/tar/dist/commonjs/parse.js
+++ /dev/null
@@ -1,599 +0,0 @@
-"use strict";
-// this[BUFFER] is the remainder of a chunk if we're waiting for
-// the full 512 bytes of a header to come in.  We will Buffer.concat()
-// it to the next write(), which is a mem copy, but a small one.
-//
-// this[QUEUE] is a Yallist of entries that haven't been emitted
-// yet this can only get filled up if the user keeps write()ing after
-// a write() returns false, or does a write() with more than one entry
-//
-// We don't buffer chunks, we always parse them and either create an
-// entry, or push it into the active entry.  The ReadEntry class knows
-// to throw data away if .ignore=true
-//
-// Shift entry off the buffer when it emits 'end', and emit 'entry' for
-// the next one in the list.
-//
-// At any time, we're pushing body chunks into the entry at WRITEENTRY,
-// and waiting for 'end' on the entry at READENTRY
-//
-// ignored entries get .resume() called on them straight away
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Parser = void 0;
-const events_1 = require("events");
-const minizlib_1 = require("minizlib");
-const yallist_1 = require("yallist");
-const header_js_1 = require("./header.js");
-const pax_js_1 = require("./pax.js");
-const read_entry_js_1 = require("./read-entry.js");
-const warn_method_js_1 = require("./warn-method.js");
-const maxMetaEntrySize = 1024 * 1024;
-const gzipHeader = Buffer.from([0x1f, 0x8b]);
-const STATE = Symbol('state');
-const WRITEENTRY = Symbol('writeEntry');
-const READENTRY = Symbol('readEntry');
-const NEXTENTRY = Symbol('nextEntry');
-const PROCESSENTRY = Symbol('processEntry');
-const EX = Symbol('extendedHeader');
-const GEX = Symbol('globalExtendedHeader');
-const META = Symbol('meta');
-const EMITMETA = Symbol('emitMeta');
-const BUFFER = Symbol('buffer');
-const QUEUE = Symbol('queue');
-const ENDED = Symbol('ended');
-const EMITTEDEND = Symbol('emittedEnd');
-const EMIT = Symbol('emit');
-const UNZIP = Symbol('unzip');
-const CONSUMECHUNK = Symbol('consumeChunk');
-const CONSUMECHUNKSUB = Symbol('consumeChunkSub');
-const CONSUMEBODY = Symbol('consumeBody');
-const CONSUMEMETA = Symbol('consumeMeta');
-const CONSUMEHEADER = Symbol('consumeHeader');
-const CONSUMING = Symbol('consuming');
-const BUFFERCONCAT = Symbol('bufferConcat');
-const MAYBEEND = Symbol('maybeEnd');
-const WRITING = Symbol('writing');
-const ABORTED = Symbol('aborted');
-const DONE = Symbol('onDone');
-const SAW_VALID_ENTRY = Symbol('sawValidEntry');
-const SAW_NULL_BLOCK = Symbol('sawNullBlock');
-const SAW_EOF = Symbol('sawEOF');
-const CLOSESTREAM = Symbol('closeStream');
-const noop = () => true;
-class Parser extends events_1.EventEmitter {
-    file;
-    strict;
-    maxMetaEntrySize;
-    filter;
-    brotli;
-    writable = true;
-    readable = false;
-    [QUEUE] = new yallist_1.Yallist();
-    [BUFFER];
-    [READENTRY];
-    [WRITEENTRY];
-    [STATE] = 'begin';
-    [META] = '';
-    [EX];
-    [GEX];
-    [ENDED] = false;
-    [UNZIP];
-    [ABORTED] = false;
-    [SAW_VALID_ENTRY];
-    [SAW_NULL_BLOCK] = false;
-    [SAW_EOF] = false;
-    [WRITING] = false;
-    [CONSUMING] = false;
-    [EMITTEDEND] = false;
-    constructor(opt = {}) {
-        super();
-        this.file = opt.file || '';
-        // these BADARCHIVE errors can't be detected early. listen on DONE.
-        this.on(DONE, () => {
-            if (this[STATE] === 'begin' ||
-                this[SAW_VALID_ENTRY] === false) {
-                // either less than 1 block of data, or all entries were invalid.
-                // Either way, probably not even a tarball.
-                this.warn('TAR_BAD_ARCHIVE', 'Unrecognized archive format');
-            }
-        });
-        if (opt.ondone) {
-            this.on(DONE, opt.ondone);
-        }
-        else {
-            this.on(DONE, () => {
-                this.emit('prefinish');
-                this.emit('finish');
-                this.emit('end');
-            });
-        }
-        this.strict = !!opt.strict;
-        this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize;
-        this.filter = typeof opt.filter === 'function' ? opt.filter : noop;
-        // Unlike gzip, brotli doesn't have any magic bytes to identify it
-        // Users need to explicitly tell us they're extracting a brotli file
-        // Or we infer from the file extension
-        const isTBR = opt.file &&
-            (opt.file.endsWith('.tar.br') || opt.file.endsWith('.tbr'));
-        // if it's a tbr file it MIGHT be brotli, but we don't know until
-        // we look at it and verify it's not a valid tar file.
-        this.brotli =
-            !opt.gzip && opt.brotli !== undefined ? opt.brotli
-                : isTBR ? undefined
-                    : false;
-        // have to set this so that streams are ok piping into it
-        this.on('end', () => this[CLOSESTREAM]());
-        if (typeof opt.onwarn === 'function') {
-            this.on('warn', opt.onwarn);
-        }
-        if (typeof opt.onReadEntry === 'function') {
-            this.on('entry', opt.onReadEntry);
-        }
-    }
-    warn(code, message, data = {}) {
-        (0, warn_method_js_1.warnMethod)(this, code, message, data);
-    }
-    [CONSUMEHEADER](chunk, position) {
-        if (this[SAW_VALID_ENTRY] === undefined) {
-            this[SAW_VALID_ENTRY] = false;
-        }
-        let header;
-        try {
-            header = new header_js_1.Header(chunk, position, this[EX], this[GEX]);
-        }
-        catch (er) {
-            return this.warn('TAR_ENTRY_INVALID', er);
-        }
-        if (header.nullBlock) {
-            if (this[SAW_NULL_BLOCK]) {
-                this[SAW_EOF] = true;
-                // ending an archive with no entries.  pointless, but legal.
-                if (this[STATE] === 'begin') {
-                    this[STATE] = 'header';
-                }
-                this[EMIT]('eof');
-            }
-            else {
-                this[SAW_NULL_BLOCK] = true;
-                this[EMIT]('nullBlock');
-            }
-        }
-        else {
-            this[SAW_NULL_BLOCK] = false;
-            if (!header.cksumValid) {
-                this.warn('TAR_ENTRY_INVALID', 'checksum failure', { header });
-            }
-            else if (!header.path) {
-                this.warn('TAR_ENTRY_INVALID', 'path is required', { header });
-            }
-            else {
-                const type = header.type;
-                if (/^(Symbolic)?Link$/.test(type) && !header.linkpath) {
-                    this.warn('TAR_ENTRY_INVALID', 'linkpath required', {
-                        header,
-                    });
-                }
-                else if (!/^(Symbolic)?Link$/.test(type) &&
-                    !/^(Global)?ExtendedHeader$/.test(type) &&
-                    header.linkpath) {
-                    this.warn('TAR_ENTRY_INVALID', 'linkpath forbidden', {
-                        header,
-                    });
-                }
-                else {
-                    const entry = (this[WRITEENTRY] = new read_entry_js_1.ReadEntry(header, this[EX], this[GEX]));
-                    // we do this for meta & ignored entries as well, because they
-                    // are still valid tar, or else we wouldn't know to ignore them
-                    if (!this[SAW_VALID_ENTRY]) {
-                        if (entry.remain) {
-                            // this might be the one!
-                            const onend = () => {
-                                if (!entry.invalid) {
-                                    this[SAW_VALID_ENTRY] = true;
-                                }
-                            };
-                            entry.on('end', onend);
-                        }
-                        else {
-                            this[SAW_VALID_ENTRY] = true;
-                        }
-                    }
-                    if (entry.meta) {
-                        if (entry.size > this.maxMetaEntrySize) {
-                            entry.ignore = true;
-                            this[EMIT]('ignoredEntry', entry);
-                            this[STATE] = 'ignore';
-                            entry.resume();
-                        }
-                        else if (entry.size > 0) {
-                            this[META] = '';
-                            entry.on('data', c => (this[META] += c));
-                            this[STATE] = 'meta';
-                        }
-                    }
-                    else {
-                        this[EX] = undefined;
-                        entry.ignore =
-                            entry.ignore || !this.filter(entry.path, entry);
-                        if (entry.ignore) {
-                            // probably valid, just not something we care about
-                            this[EMIT]('ignoredEntry', entry);
-                            this[STATE] = entry.remain ? 'ignore' : 'header';
-                            entry.resume();
-                        }
-                        else {
-                            if (entry.remain) {
-                                this[STATE] = 'body';
-                            }
-                            else {
-                                this[STATE] = 'header';
-                                entry.end();
-                            }
-                            if (!this[READENTRY]) {
-                                this[QUEUE].push(entry);
-                                this[NEXTENTRY]();
-                            }
-                            else {
-                                this[QUEUE].push(entry);
-                            }
-                        }
-                    }
-                }
-            }
-        }
-    }
-    [CLOSESTREAM]() {
-        queueMicrotask(() => this.emit('close'));
-    }
-    [PROCESSENTRY](entry) {
-        let go = true;
-        if (!entry) {
-            this[READENTRY] = undefined;
-            go = false;
-        }
-        else if (Array.isArray(entry)) {
-            const [ev, ...args] = entry;
-            this.emit(ev, ...args);
-        }
-        else {
-            this[READENTRY] = entry;
-            this.emit('entry', entry);
-            if (!entry.emittedEnd) {
-                entry.on('end', () => this[NEXTENTRY]());
-                go = false;
-            }
-        }
-        return go;
-    }
-    [NEXTENTRY]() {
-        do { } while (this[PROCESSENTRY](this[QUEUE].shift()));
-        if (!this[QUEUE].length) {
-            // At this point, there's nothing in the queue, but we may have an
-            // entry which is being consumed (readEntry).
-            // If we don't, then we definitely can handle more data.
-            // If we do, and either it's flowing, or it has never had any data
-            // written to it, then it needs more.
-            // The only other possibility is that it has returned false from a
-            // write() call, so we wait for the next drain to continue.
-            const re = this[READENTRY];
-            const drainNow = !re || re.flowing || re.size === re.remain;
-            if (drainNow) {
-                if (!this[WRITING]) {
-                    this.emit('drain');
-                }
-            }
-            else {
-                re.once('drain', () => this.emit('drain'));
-            }
-        }
-    }
-    [CONSUMEBODY](chunk, position) {
-        // write up to but no  more than writeEntry.blockRemain
-        const entry = this[WRITEENTRY];
-        /* c8 ignore start */
-        if (!entry) {
-            throw new Error('attempt to consume body without entry??');
-        }
-        const br = entry.blockRemain ?? 0;
-        /* c8 ignore stop */
-        const c = br >= chunk.length && position === 0 ?
-            chunk
-            : chunk.subarray(position, position + br);
-        entry.write(c);
-        if (!entry.blockRemain) {
-            this[STATE] = 'header';
-            this[WRITEENTRY] = undefined;
-            entry.end();
-        }
-        return c.length;
-    }
-    [CONSUMEMETA](chunk, position) {
-        const entry = this[WRITEENTRY];
-        const ret = this[CONSUMEBODY](chunk, position);
-        // if we finished, then the entry is reset
-        if (!this[WRITEENTRY] && entry) {
-            this[EMITMETA](entry);
-        }
-        return ret;
-    }
-    [EMIT](ev, data, extra) {
-        if (!this[QUEUE].length && !this[READENTRY]) {
-            this.emit(ev, data, extra);
-        }
-        else {
-            this[QUEUE].push([ev, data, extra]);
-        }
-    }
-    [EMITMETA](entry) {
-        this[EMIT]('meta', this[META]);
-        switch (entry.type) {
-            case 'ExtendedHeader':
-            case 'OldExtendedHeader':
-                this[EX] = pax_js_1.Pax.parse(this[META], this[EX], false);
-                break;
-            case 'GlobalExtendedHeader':
-                this[GEX] = pax_js_1.Pax.parse(this[META], this[GEX], true);
-                break;
-            case 'NextFileHasLongPath':
-            case 'OldGnuLongPath': {
-                const ex = this[EX] ?? Object.create(null);
-                this[EX] = ex;
-                ex.path = this[META].replace(/\0.*/, '');
-                break;
-            }
-            case 'NextFileHasLongLinkpath': {
-                const ex = this[EX] || Object.create(null);
-                this[EX] = ex;
-                ex.linkpath = this[META].replace(/\0.*/, '');
-                break;
-            }
-            /* c8 ignore start */
-            default:
-                throw new Error('unknown meta: ' + entry.type);
-            /* c8 ignore stop */
-        }
-    }
-    abort(error) {
-        this[ABORTED] = true;
-        this.emit('abort', error);
-        // always throws, even in non-strict mode
-        this.warn('TAR_ABORT', error, { recoverable: false });
-    }
-    write(chunk, encoding, cb) {
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        if (typeof chunk === 'string') {
-            chunk = Buffer.from(chunk, 
-            /* c8 ignore next */
-            typeof encoding === 'string' ? encoding : 'utf8');
-        }
-        if (this[ABORTED]) {
-            /* c8 ignore next */
-            cb?.();
-            return false;
-        }
-        // first write, might be gzipped
-        const needSniff = this[UNZIP] === undefined ||
-            (this.brotli === undefined && this[UNZIP] === false);
-        if (needSniff && chunk) {
-            if (this[BUFFER]) {
-                chunk = Buffer.concat([this[BUFFER], chunk]);
-                this[BUFFER] = undefined;
-            }
-            if (chunk.length < gzipHeader.length) {
-                this[BUFFER] = chunk;
-                /* c8 ignore next */
-                cb?.();
-                return true;
-            }
-            // look for gzip header
-            for (let i = 0; this[UNZIP] === undefined && i < gzipHeader.length; i++) {
-                if (chunk[i] !== gzipHeader[i]) {
-                    this[UNZIP] = false;
-                }
-            }
-            const maybeBrotli = this.brotli === undefined;
-            if (this[UNZIP] === false && maybeBrotli) {
-                // read the first header to see if it's a valid tar file. If so,
-                // we can safely assume that it's not actually brotli, despite the
-                // .tbr or .tar.br file extension.
-                // if we ended before getting a full chunk, yes, def brotli
-                if (chunk.length < 512) {
-                    if (this[ENDED]) {
-                        this.brotli = true;
-                    }
-                    else {
-                        this[BUFFER] = chunk;
-                        /* c8 ignore next */
-                        cb?.();
-                        return true;
-                    }
-                }
-                else {
-                    // if it's tar, it's pretty reliably not brotli, chances of
-                    // that happening are astronomical.
-                    try {
-                        new header_js_1.Header(chunk.subarray(0, 512));
-                        this.brotli = false;
-                    }
-                    catch (_) {
-                        this.brotli = true;
-                    }
-                }
-            }
-            if (this[UNZIP] === undefined ||
-                (this[UNZIP] === false && this.brotli)) {
-                const ended = this[ENDED];
-                this[ENDED] = false;
-                this[UNZIP] =
-                    this[UNZIP] === undefined ?
-                        new minizlib_1.Unzip({})
-                        : new minizlib_1.BrotliDecompress({});
-                this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk));
-                this[UNZIP].on('error', er => this.abort(er));
-                this[UNZIP].on('end', () => {
-                    this[ENDED] = true;
-                    this[CONSUMECHUNK]();
-                });
-                this[WRITING] = true;
-                const ret = !!this[UNZIP][ended ? 'end' : 'write'](chunk);
-                this[WRITING] = false;
-                cb?.();
-                return ret;
-            }
-        }
-        this[WRITING] = true;
-        if (this[UNZIP]) {
-            this[UNZIP].write(chunk);
-        }
-        else {
-            this[CONSUMECHUNK](chunk);
-        }
-        this[WRITING] = false;
-        // return false if there's a queue, or if the current entry isn't flowing
-        const ret = this[QUEUE].length ? false
-            : this[READENTRY] ? this[READENTRY].flowing
-                : true;
-        // if we have no queue, then that means a clogged READENTRY
-        if (!ret && !this[QUEUE].length) {
-            this[READENTRY]?.once('drain', () => this.emit('drain'));
-        }
-        /* c8 ignore next */
-        cb?.();
-        return ret;
-    }
-    [BUFFERCONCAT](c) {
-        if (c && !this[ABORTED]) {
-            this[BUFFER] =
-                this[BUFFER] ? Buffer.concat([this[BUFFER], c]) : c;
-        }
-    }
-    [MAYBEEND]() {
-        if (this[ENDED] &&
-            !this[EMITTEDEND] &&
-            !this[ABORTED] &&
-            !this[CONSUMING]) {
-            this[EMITTEDEND] = true;
-            const entry = this[WRITEENTRY];
-            if (entry && entry.blockRemain) {
-                // truncated, likely a damaged file
-                const have = this[BUFFER] ? this[BUFFER].length : 0;
-                this.warn('TAR_BAD_ARCHIVE', `Truncated input (needed ${entry.blockRemain} more bytes, only ${have} available)`, { entry });
-                if (this[BUFFER]) {
-                    entry.write(this[BUFFER]);
-                }
-                entry.end();
-            }
-            this[EMIT](DONE);
-        }
-    }
-    [CONSUMECHUNK](chunk) {
-        if (this[CONSUMING] && chunk) {
-            this[BUFFERCONCAT](chunk);
-        }
-        else if (!chunk && !this[BUFFER]) {
-            this[MAYBEEND]();
-        }
-        else if (chunk) {
-            this[CONSUMING] = true;
-            if (this[BUFFER]) {
-                this[BUFFERCONCAT](chunk);
-                const c = this[BUFFER];
-                this[BUFFER] = undefined;
-                this[CONSUMECHUNKSUB](c);
-            }
-            else {
-                this[CONSUMECHUNKSUB](chunk);
-            }
-            while (this[BUFFER] &&
-                this[BUFFER]?.length >= 512 &&
-                !this[ABORTED] &&
-                !this[SAW_EOF]) {
-                const c = this[BUFFER];
-                this[BUFFER] = undefined;
-                this[CONSUMECHUNKSUB](c);
-            }
-            this[CONSUMING] = false;
-        }
-        if (!this[BUFFER] || this[ENDED]) {
-            this[MAYBEEND]();
-        }
-    }
-    [CONSUMECHUNKSUB](chunk) {
-        // we know that we are in CONSUMING mode, so anything written goes into
-        // the buffer.  Advance the position and put any remainder in the buffer.
-        let position = 0;
-        const length = chunk.length;
-        while (position + 512 <= length &&
-            !this[ABORTED] &&
-            !this[SAW_EOF]) {
-            switch (this[STATE]) {
-                case 'begin':
-                case 'header':
-                    this[CONSUMEHEADER](chunk, position);
-                    position += 512;
-                    break;
-                case 'ignore':
-                case 'body':
-                    position += this[CONSUMEBODY](chunk, position);
-                    break;
-                case 'meta':
-                    position += this[CONSUMEMETA](chunk, position);
-                    break;
-                /* c8 ignore start */
-                default:
-                    throw new Error('invalid state: ' + this[STATE]);
-                /* c8 ignore stop */
-            }
-        }
-        if (position < length) {
-            if (this[BUFFER]) {
-                this[BUFFER] = Buffer.concat([
-                    chunk.subarray(position),
-                    this[BUFFER],
-                ]);
-            }
-            else {
-                this[BUFFER] = chunk.subarray(position);
-            }
-        }
-    }
-    end(chunk, encoding, cb) {
-        if (typeof chunk === 'function') {
-            cb = chunk;
-            encoding = undefined;
-            chunk = undefined;
-        }
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        if (typeof chunk === 'string') {
-            chunk = Buffer.from(chunk, encoding);
-        }
-        if (cb)
-            this.once('finish', cb);
-        if (!this[ABORTED]) {
-            if (this[UNZIP]) {
-                /* c8 ignore start */
-                if (chunk)
-                    this[UNZIP].write(chunk);
-                /* c8 ignore stop */
-                this[UNZIP].end();
-            }
-            else {
-                this[ENDED] = true;
-                if (this.brotli === undefined)
-                    chunk = chunk || Buffer.alloc(0);
-                if (chunk)
-                    this.write(chunk);
-                this[MAYBEEND]();
-            }
-        }
-        return this;
-    }
-}
-exports.Parser = Parser;
-//# sourceMappingURL=parse.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/tar/dist/commonjs/replace.js b/node_modules/node-gyp/node_modules/tar/dist/commonjs/replace.js
deleted file mode 100644
index 262deecd12f9f..0000000000000
--- a/node_modules/node-gyp/node_modules/tar/dist/commonjs/replace.js
+++ /dev/null
@@ -1,231 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.replace = void 0;
-// tar -r
-const fs_minipass_1 = require("@isaacs/fs-minipass");
-const node_fs_1 = __importDefault(require("node:fs"));
-const node_path_1 = __importDefault(require("node:path"));
-const header_js_1 = require("./header.js");
-const list_js_1 = require("./list.js");
-const make_command_js_1 = require("./make-command.js");
-const options_js_1 = require("./options.js");
-const pack_js_1 = require("./pack.js");
-// starting at the head of the file, read a Header
-// If the checksum is invalid, that's our position to start writing
-// If it is, jump forward by the specified size (round up to 512)
-// and try again.
-// Write the new Pack stream starting there.
-const replaceSync = (opt, files) => {
-    const p = new pack_js_1.PackSync(opt);
-    let threw = true;
-    let fd;
-    let position;
-    try {
-        try {
-            fd = node_fs_1.default.openSync(opt.file, 'r+');
-        }
-        catch (er) {
-            if (er?.code === 'ENOENT') {
-                fd = node_fs_1.default.openSync(opt.file, 'w+');
-            }
-            else {
-                throw er;
-            }
-        }
-        const st = node_fs_1.default.fstatSync(fd);
-        const headBuf = Buffer.alloc(512);
-        POSITION: for (position = 0; position < st.size; position += 512) {
-            for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) {
-                bytes = node_fs_1.default.readSync(fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos);
-                if (position === 0 &&
-                    headBuf[0] === 0x1f &&
-                    headBuf[1] === 0x8b) {
-                    throw new Error('cannot append to compressed archives');
-                }
-                if (!bytes) {
-                    break POSITION;
-                }
-            }
-            const h = new header_js_1.Header(headBuf);
-            if (!h.cksumValid) {
-                break;
-            }
-            const entryBlockSize = 512 * Math.ceil((h.size || 0) / 512);
-            if (position + entryBlockSize + 512 > st.size) {
-                break;
-            }
-            // the 512 for the header we just parsed will be added as well
-            // also jump ahead all the blocks for the body
-            position += entryBlockSize;
-            if (opt.mtimeCache && h.mtime) {
-                opt.mtimeCache.set(String(h.path), h.mtime);
-            }
-        }
-        threw = false;
-        streamSync(opt, p, position, fd, files);
-    }
-    finally {
-        if (threw) {
-            try {
-                node_fs_1.default.closeSync(fd);
-            }
-            catch (er) { }
-        }
-    }
-};
-const streamSync = (opt, p, position, fd, files) => {
-    const stream = new fs_minipass_1.WriteStreamSync(opt.file, {
-        fd: fd,
-        start: position,
-    });
-    p.pipe(stream);
-    addFilesSync(p, files);
-};
-const replaceAsync = (opt, files) => {
-    files = Array.from(files);
-    const p = new pack_js_1.Pack(opt);
-    const getPos = (fd, size, cb_) => {
-        const cb = (er, pos) => {
-            if (er) {
-                node_fs_1.default.close(fd, _ => cb_(er));
-            }
-            else {
-                cb_(null, pos);
-            }
-        };
-        let position = 0;
-        if (size === 0) {
-            return cb(null, 0);
-        }
-        let bufPos = 0;
-        const headBuf = Buffer.alloc(512);
-        const onread = (er, bytes) => {
-            if (er || typeof bytes === 'undefined') {
-                return cb(er);
-            }
-            bufPos += bytes;
-            if (bufPos < 512 && bytes) {
-                return node_fs_1.default.read(fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos, onread);
-            }
-            if (position === 0 &&
-                headBuf[0] === 0x1f &&
-                headBuf[1] === 0x8b) {
-                return cb(new Error('cannot append to compressed archives'));
-            }
-            // truncated header
-            if (bufPos < 512) {
-                return cb(null, position);
-            }
-            const h = new header_js_1.Header(headBuf);
-            if (!h.cksumValid) {
-                return cb(null, position);
-            }
-            /* c8 ignore next */
-            const entryBlockSize = 512 * Math.ceil((h.size ?? 0) / 512);
-            if (position + entryBlockSize + 512 > size) {
-                return cb(null, position);
-            }
-            position += entryBlockSize + 512;
-            if (position >= size) {
-                return cb(null, position);
-            }
-            if (opt.mtimeCache && h.mtime) {
-                opt.mtimeCache.set(String(h.path), h.mtime);
-            }
-            bufPos = 0;
-            node_fs_1.default.read(fd, headBuf, 0, 512, position, onread);
-        };
-        node_fs_1.default.read(fd, headBuf, 0, 512, position, onread);
-    };
-    const promise = new Promise((resolve, reject) => {
-        p.on('error', reject);
-        let flag = 'r+';
-        const onopen = (er, fd) => {
-            if (er && er.code === 'ENOENT' && flag === 'r+') {
-                flag = 'w+';
-                return node_fs_1.default.open(opt.file, flag, onopen);
-            }
-            if (er || !fd) {
-                return reject(er);
-            }
-            node_fs_1.default.fstat(fd, (er, st) => {
-                if (er) {
-                    return node_fs_1.default.close(fd, () => reject(er));
-                }
-                getPos(fd, st.size, (er, position) => {
-                    if (er) {
-                        return reject(er);
-                    }
-                    const stream = new fs_minipass_1.WriteStream(opt.file, {
-                        fd: fd,
-                        start: position,
-                    });
-                    p.pipe(stream);
-                    stream.on('error', reject);
-                    stream.on('close', resolve);
-                    addFilesAsync(p, files);
-                });
-            });
-        };
-        node_fs_1.default.open(opt.file, flag, onopen);
-    });
-    return promise;
-};
-const addFilesSync = (p, files) => {
-    files.forEach(file => {
-        if (file.charAt(0) === '@') {
-            (0, list_js_1.list)({
-                file: node_path_1.default.resolve(p.cwd, file.slice(1)),
-                sync: true,
-                noResume: true,
-                onReadEntry: entry => p.add(entry),
-            });
-        }
-        else {
-            p.add(file);
-        }
-    });
-    p.end();
-};
-const addFilesAsync = async (p, files) => {
-    for (let i = 0; i < files.length; i++) {
-        const file = String(files[i]);
-        if (file.charAt(0) === '@') {
-            await (0, list_js_1.list)({
-                file: node_path_1.default.resolve(String(p.cwd), file.slice(1)),
-                noResume: true,
-                onReadEntry: entry => p.add(entry),
-            });
-        }
-        else {
-            p.add(file);
-        }
-    }
-    p.end();
-};
-exports.replace = (0, make_command_js_1.makeCommand)(replaceSync, replaceAsync, 
-/* c8 ignore start */
-() => {
-    throw new TypeError('file is required');
-}, () => {
-    throw new TypeError('file is required');
-}, 
-/* c8 ignore stop */
-(opt, entries) => {
-    if (!(0, options_js_1.isFile)(opt)) {
-        throw new TypeError('file is required');
-    }
-    if (opt.gzip ||
-        opt.brotli ||
-        opt.file.endsWith('.br') ||
-        opt.file.endsWith('.tbr')) {
-        throw new TypeError('cannot append to compressed archives');
-    }
-    if (!entries?.length) {
-        throw new TypeError('no paths specified to add/replace');
-    }
-});
-//# sourceMappingURL=replace.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/tar/dist/commonjs/unpack.js b/node_modules/node-gyp/node_modules/tar/dist/commonjs/unpack.js
deleted file mode 100644
index edf8acbb18c40..0000000000000
--- a/node_modules/node-gyp/node_modules/tar/dist/commonjs/unpack.js
+++ /dev/null
@@ -1,919 +0,0 @@
-"use strict";
-// the PEND/UNPEND stuff tracks whether we're ready to emit end/close yet.
-// but the path reservations are required to avoid race conditions where
-// parallelized unpack ops may mess with one another, due to dependencies
-// (like a Link depending on its target) or destructive operations (like
-// clobbering an fs object to create one of a different type.)
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.UnpackSync = exports.Unpack = void 0;
-const fsm = __importStar(require("@isaacs/fs-minipass"));
-const node_assert_1 = __importDefault(require("node:assert"));
-const node_crypto_1 = require("node:crypto");
-const node_fs_1 = __importDefault(require("node:fs"));
-const node_path_1 = __importDefault(require("node:path"));
-const get_write_flag_js_1 = require("./get-write-flag.js");
-const mkdir_js_1 = require("./mkdir.js");
-const normalize_unicode_js_1 = require("./normalize-unicode.js");
-const normalize_windows_path_js_1 = require("./normalize-windows-path.js");
-const parse_js_1 = require("./parse.js");
-const strip_absolute_path_js_1 = require("./strip-absolute-path.js");
-const strip_trailing_slashes_js_1 = require("./strip-trailing-slashes.js");
-const wc = __importStar(require("./winchars.js"));
-const path_reservations_js_1 = require("./path-reservations.js");
-const ONENTRY = Symbol('onEntry');
-const CHECKFS = Symbol('checkFs');
-const CHECKFS2 = Symbol('checkFs2');
-const PRUNECACHE = Symbol('pruneCache');
-const ISREUSABLE = Symbol('isReusable');
-const MAKEFS = Symbol('makeFs');
-const FILE = Symbol('file');
-const DIRECTORY = Symbol('directory');
-const LINK = Symbol('link');
-const SYMLINK = Symbol('symlink');
-const HARDLINK = Symbol('hardlink');
-const UNSUPPORTED = Symbol('unsupported');
-const CHECKPATH = Symbol('checkPath');
-const MKDIR = Symbol('mkdir');
-const ONERROR = Symbol('onError');
-const PENDING = Symbol('pending');
-const PEND = Symbol('pend');
-const UNPEND = Symbol('unpend');
-const ENDED = Symbol('ended');
-const MAYBECLOSE = Symbol('maybeClose');
-const SKIP = Symbol('skip');
-const DOCHOWN = Symbol('doChown');
-const UID = Symbol('uid');
-const GID = Symbol('gid');
-const CHECKED_CWD = Symbol('checkedCwd');
-const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform;
-const isWindows = platform === 'win32';
-const DEFAULT_MAX_DEPTH = 1024;
-// Unlinks on Windows are not atomic.
-//
-// This means that if you have a file entry, followed by another
-// file entry with an identical name, and you cannot re-use the file
-// (because it's a hardlink, or because unlink:true is set, or it's
-// Windows, which does not have useful nlink values), then the unlink
-// will be committed to the disk AFTER the new file has been written
-// over the old one, deleting the new file.
-//
-// To work around this, on Windows systems, we rename the file and then
-// delete the renamed file.  It's a sloppy kludge, but frankly, I do not
-// know of a better way to do this, given windows' non-atomic unlink
-// semantics.
-//
-// See: https://github.com/npm/node-tar/issues/183
-/* c8 ignore start */
-const unlinkFile = (path, cb) => {
-    if (!isWindows) {
-        return node_fs_1.default.unlink(path, cb);
-    }
-    const name = path + '.DELETE.' + (0, node_crypto_1.randomBytes)(16).toString('hex');
-    node_fs_1.default.rename(path, name, er => {
-        if (er) {
-            return cb(er);
-        }
-        node_fs_1.default.unlink(name, cb);
-    });
-};
-/* c8 ignore stop */
-/* c8 ignore start */
-const unlinkFileSync = (path) => {
-    if (!isWindows) {
-        return node_fs_1.default.unlinkSync(path);
-    }
-    const name = path + '.DELETE.' + (0, node_crypto_1.randomBytes)(16).toString('hex');
-    node_fs_1.default.renameSync(path, name);
-    node_fs_1.default.unlinkSync(name);
-};
-/* c8 ignore stop */
-// this.gid, entry.gid, this.processUid
-const uint32 = (a, b, c) => a !== undefined && a === a >>> 0 ? a
-    : b !== undefined && b === b >>> 0 ? b
-        : c;
-// clear the cache if it's a case-insensitive unicode-squashing match.
-// we can't know if the current file system is case-sensitive or supports
-// unicode fully, so we check for similarity on the maximally compatible
-// representation.  Err on the side of pruning, since all it's doing is
-// preventing lstats, and it's not the end of the world if we get a false
-// positive.
-// Note that on windows, we always drop the entire cache whenever a
-// symbolic link is encountered, because 8.3 filenames are impossible
-// to reason about, and collisions are hazards rather than just failures.
-const cacheKeyNormalize = (path) => (0, strip_trailing_slashes_js_1.stripTrailingSlashes)((0, normalize_windows_path_js_1.normalizeWindowsPath)((0, normalize_unicode_js_1.normalizeUnicode)(path))).toLowerCase();
-// remove all cache entries matching ${abs}/**
-const pruneCache = (cache, abs) => {
-    abs = cacheKeyNormalize(abs);
-    for (const path of cache.keys()) {
-        const pnorm = cacheKeyNormalize(path);
-        if (pnorm === abs || pnorm.indexOf(abs + '/') === 0) {
-            cache.delete(path);
-        }
-    }
-};
-const dropCache = (cache) => {
-    for (const key of cache.keys()) {
-        cache.delete(key);
-    }
-};
-class Unpack extends parse_js_1.Parser {
-    [ENDED] = false;
-    [CHECKED_CWD] = false;
-    [PENDING] = 0;
-    reservations = new path_reservations_js_1.PathReservations();
-    transform;
-    writable = true;
-    readable = false;
-    dirCache;
-    uid;
-    gid;
-    setOwner;
-    preserveOwner;
-    processGid;
-    processUid;
-    maxDepth;
-    forceChown;
-    win32;
-    newer;
-    keep;
-    noMtime;
-    preservePaths;
-    unlink;
-    cwd;
-    strip;
-    processUmask;
-    umask;
-    dmode;
-    fmode;
-    chmod;
-    constructor(opt = {}) {
-        opt.ondone = () => {
-            this[ENDED] = true;
-            this[MAYBECLOSE]();
-        };
-        super(opt);
-        this.transform = opt.transform;
-        this.dirCache = opt.dirCache || new Map();
-        this.chmod = !!opt.chmod;
-        if (typeof opt.uid === 'number' || typeof opt.gid === 'number') {
-            // need both or neither
-            if (typeof opt.uid !== 'number' ||
-                typeof opt.gid !== 'number') {
-                throw new TypeError('cannot set owner without number uid and gid');
-            }
-            if (opt.preserveOwner) {
-                throw new TypeError('cannot preserve owner in archive and also set owner explicitly');
-            }
-            this.uid = opt.uid;
-            this.gid = opt.gid;
-            this.setOwner = true;
-        }
-        else {
-            this.uid = undefined;
-            this.gid = undefined;
-            this.setOwner = false;
-        }
-        // default true for root
-        if (opt.preserveOwner === undefined &&
-            typeof opt.uid !== 'number') {
-            this.preserveOwner = !!(process.getuid && process.getuid() === 0);
-        }
-        else {
-            this.preserveOwner = !!opt.preserveOwner;
-        }
-        this.processUid =
-            (this.preserveOwner || this.setOwner) && process.getuid ?
-                process.getuid()
-                : undefined;
-        this.processGid =
-            (this.preserveOwner || this.setOwner) && process.getgid ?
-                process.getgid()
-                : undefined;
-        // prevent excessively deep nesting of subfolders
-        // set to `Infinity` to remove this restriction
-        this.maxDepth =
-            typeof opt.maxDepth === 'number' ?
-                opt.maxDepth
-                : DEFAULT_MAX_DEPTH;
-        // mostly just for testing, but useful in some cases.
-        // Forcibly trigger a chown on every entry, no matter what
-        this.forceChown = opt.forceChown === true;
-        // turn > this[ONENTRY](entry));
-    }
-    // a bad or damaged archive is a warning for Parser, but an error
-    // when extracting.  Mark those errors as unrecoverable, because
-    // the Unpack contract cannot be met.
-    warn(code, msg, data = {}) {
-        if (code === 'TAR_BAD_ARCHIVE' || code === 'TAR_ABORT') {
-            data.recoverable = false;
-        }
-        return super.warn(code, msg, data);
-    }
-    [MAYBECLOSE]() {
-        if (this[ENDED] && this[PENDING] === 0) {
-            this.emit('prefinish');
-            this.emit('finish');
-            this.emit('end');
-        }
-    }
-    [CHECKPATH](entry) {
-        const p = (0, normalize_windows_path_js_1.normalizeWindowsPath)(entry.path);
-        const parts = p.split('/');
-        if (this.strip) {
-            if (parts.length < this.strip) {
-                return false;
-            }
-            if (entry.type === 'Link') {
-                const linkparts = (0, normalize_windows_path_js_1.normalizeWindowsPath)(String(entry.linkpath)).split('/');
-                if (linkparts.length >= this.strip) {
-                    entry.linkpath = linkparts.slice(this.strip).join('/');
-                }
-                else {
-                    return false;
-                }
-            }
-            parts.splice(0, this.strip);
-            entry.path = parts.join('/');
-        }
-        if (isFinite(this.maxDepth) && parts.length > this.maxDepth) {
-            this.warn('TAR_ENTRY_ERROR', 'path excessively deep', {
-                entry,
-                path: p,
-                depth: parts.length,
-                maxDepth: this.maxDepth,
-            });
-            return false;
-        }
-        if (!this.preservePaths) {
-            if (parts.includes('..') ||
-                /* c8 ignore next */
-                (isWindows && /^[a-z]:\.\.$/i.test(parts[0] ?? ''))) {
-                this.warn('TAR_ENTRY_ERROR', `path contains '..'`, {
-                    entry,
-                    path: p,
-                });
-                return false;
-            }
-            // strip off the root
-            const [root, stripped] = (0, strip_absolute_path_js_1.stripAbsolutePath)(p);
-            if (root) {
-                entry.path = String(stripped);
-                this.warn('TAR_ENTRY_INFO', `stripping ${root} from absolute path`, {
-                    entry,
-                    path: p,
-                });
-            }
-        }
-        if (node_path_1.default.isAbsolute(entry.path)) {
-            entry.absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(entry.path));
-        }
-        else {
-            entry.absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(this.cwd, entry.path));
-        }
-        // if we somehow ended up with a path that escapes the cwd, and we are
-        // not in preservePaths mode, then something is fishy!  This should have
-        // been prevented above, so ignore this for coverage.
-        /* c8 ignore start - defense in depth */
-        if (!this.preservePaths &&
-            typeof entry.absolute === 'string' &&
-            entry.absolute.indexOf(this.cwd + '/') !== 0 &&
-            entry.absolute !== this.cwd) {
-            this.warn('TAR_ENTRY_ERROR', 'path escaped extraction target', {
-                entry,
-                path: (0, normalize_windows_path_js_1.normalizeWindowsPath)(entry.path),
-                resolvedPath: entry.absolute,
-                cwd: this.cwd,
-            });
-            return false;
-        }
-        /* c8 ignore stop */
-        // an archive can set properties on the extraction directory, but it
-        // may not replace the cwd with a different kind of thing entirely.
-        if (entry.absolute === this.cwd &&
-            entry.type !== 'Directory' &&
-            entry.type !== 'GNUDumpDir') {
-            return false;
-        }
-        // only encode : chars that aren't drive letter indicators
-        if (this.win32) {
-            const { root: aRoot } = node_path_1.default.win32.parse(String(entry.absolute));
-            entry.absolute =
-                aRoot + wc.encode(String(entry.absolute).slice(aRoot.length));
-            const { root: pRoot } = node_path_1.default.win32.parse(entry.path);
-            entry.path = pRoot + wc.encode(entry.path.slice(pRoot.length));
-        }
-        return true;
-    }
-    [ONENTRY](entry) {
-        if (!this[CHECKPATH](entry)) {
-            return entry.resume();
-        }
-        node_assert_1.default.equal(typeof entry.absolute, 'string');
-        switch (entry.type) {
-            case 'Directory':
-            case 'GNUDumpDir':
-                if (entry.mode) {
-                    entry.mode = entry.mode | 0o700;
-                }
-            // eslint-disable-next-line no-fallthrough
-            case 'File':
-            case 'OldFile':
-            case 'ContiguousFile':
-            case 'Link':
-            case 'SymbolicLink':
-                return this[CHECKFS](entry);
-            case 'CharacterDevice':
-            case 'BlockDevice':
-            case 'FIFO':
-            default:
-                return this[UNSUPPORTED](entry);
-        }
-    }
-    [ONERROR](er, entry) {
-        // Cwd has to exist, or else nothing works. That's serious.
-        // Other errors are warnings, which raise the error in strict
-        // mode, but otherwise continue on.
-        if (er.name === 'CwdError') {
-            this.emit('error', er);
-        }
-        else {
-            this.warn('TAR_ENTRY_ERROR', er, { entry });
-            this[UNPEND]();
-            entry.resume();
-        }
-    }
-    [MKDIR](dir, mode, cb) {
-        (0, mkdir_js_1.mkdir)((0, normalize_windows_path_js_1.normalizeWindowsPath)(dir), {
-            uid: this.uid,
-            gid: this.gid,
-            processUid: this.processUid,
-            processGid: this.processGid,
-            umask: this.processUmask,
-            preserve: this.preservePaths,
-            unlink: this.unlink,
-            cache: this.dirCache,
-            cwd: this.cwd,
-            mode: mode,
-        }, cb);
-    }
-    [DOCHOWN](entry) {
-        // in preserve owner mode, chown if the entry doesn't match process
-        // in set owner mode, chown if setting doesn't match process
-        return (this.forceChown ||
-            (this.preserveOwner &&
-                ((typeof entry.uid === 'number' &&
-                    entry.uid !== this.processUid) ||
-                    (typeof entry.gid === 'number' &&
-                        entry.gid !== this.processGid))) ||
-            (typeof this.uid === 'number' &&
-                this.uid !== this.processUid) ||
-            (typeof this.gid === 'number' && this.gid !== this.processGid));
-    }
-    [UID](entry) {
-        return uint32(this.uid, entry.uid, this.processUid);
-    }
-    [GID](entry) {
-        return uint32(this.gid, entry.gid, this.processGid);
-    }
-    [FILE](entry, fullyDone) {
-        const mode = typeof entry.mode === 'number' ?
-            entry.mode & 0o7777
-            : this.fmode;
-        const stream = new fsm.WriteStream(String(entry.absolute), {
-            // slight lie, but it can be numeric flags
-            flags: (0, get_write_flag_js_1.getWriteFlag)(entry.size),
-            mode: mode,
-            autoClose: false,
-        });
-        stream.on('error', (er) => {
-            if (stream.fd) {
-                node_fs_1.default.close(stream.fd, () => { });
-            }
-            // flush all the data out so that we aren't left hanging
-            // if the error wasn't actually fatal.  otherwise the parse
-            // is blocked, and we never proceed.
-            stream.write = () => true;
-            this[ONERROR](er, entry);
-            fullyDone();
-        });
-        let actions = 1;
-        const done = (er) => {
-            if (er) {
-                /* c8 ignore start - we should always have a fd by now */
-                if (stream.fd) {
-                    node_fs_1.default.close(stream.fd, () => { });
-                }
-                /* c8 ignore stop */
-                this[ONERROR](er, entry);
-                fullyDone();
-                return;
-            }
-            if (--actions === 0) {
-                if (stream.fd !== undefined) {
-                    node_fs_1.default.close(stream.fd, er => {
-                        if (er) {
-                            this[ONERROR](er, entry);
-                        }
-                        else {
-                            this[UNPEND]();
-                        }
-                        fullyDone();
-                    });
-                }
-            }
-        };
-        stream.on('finish', () => {
-            // if futimes fails, try utimes
-            // if utimes fails, fail with the original error
-            // same for fchown/chown
-            const abs = String(entry.absolute);
-            const fd = stream.fd;
-            if (typeof fd === 'number' && entry.mtime && !this.noMtime) {
-                actions++;
-                const atime = entry.atime || new Date();
-                const mtime = entry.mtime;
-                node_fs_1.default.futimes(fd, atime, mtime, er => er ?
-                    node_fs_1.default.utimes(abs, atime, mtime, er2 => done(er2 && er))
-                    : done());
-            }
-            if (typeof fd === 'number' && this[DOCHOWN](entry)) {
-                actions++;
-                const uid = this[UID](entry);
-                const gid = this[GID](entry);
-                if (typeof uid === 'number' && typeof gid === 'number') {
-                    node_fs_1.default.fchown(fd, uid, gid, er => er ?
-                        node_fs_1.default.chown(abs, uid, gid, er2 => done(er2 && er))
-                        : done());
-                }
-            }
-            done();
-        });
-        const tx = this.transform ? this.transform(entry) || entry : entry;
-        if (tx !== entry) {
-            tx.on('error', (er) => {
-                this[ONERROR](er, entry);
-                fullyDone();
-            });
-            entry.pipe(tx);
-        }
-        tx.pipe(stream);
-    }
-    [DIRECTORY](entry, fullyDone) {
-        const mode = typeof entry.mode === 'number' ?
-            entry.mode & 0o7777
-            : this.dmode;
-        this[MKDIR](String(entry.absolute), mode, er => {
-            if (er) {
-                this[ONERROR](er, entry);
-                fullyDone();
-                return;
-            }
-            let actions = 1;
-            const done = () => {
-                if (--actions === 0) {
-                    fullyDone();
-                    this[UNPEND]();
-                    entry.resume();
-                }
-            };
-            if (entry.mtime && !this.noMtime) {
-                actions++;
-                node_fs_1.default.utimes(String(entry.absolute), entry.atime || new Date(), entry.mtime, done);
-            }
-            if (this[DOCHOWN](entry)) {
-                actions++;
-                node_fs_1.default.chown(String(entry.absolute), Number(this[UID](entry)), Number(this[GID](entry)), done);
-            }
-            done();
-        });
-    }
-    [UNSUPPORTED](entry) {
-        entry.unsupported = true;
-        this.warn('TAR_ENTRY_UNSUPPORTED', `unsupported entry type: ${entry.type}`, { entry });
-        entry.resume();
-    }
-    [SYMLINK](entry, done) {
-        this[LINK](entry, String(entry.linkpath), 'symlink', done);
-    }
-    [HARDLINK](entry, done) {
-        const linkpath = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(this.cwd, String(entry.linkpath)));
-        this[LINK](entry, linkpath, 'link', done);
-    }
-    [PEND]() {
-        this[PENDING]++;
-    }
-    [UNPEND]() {
-        this[PENDING]--;
-        this[MAYBECLOSE]();
-    }
-    [SKIP](entry) {
-        this[UNPEND]();
-        entry.resume();
-    }
-    // Check if we can reuse an existing filesystem entry safely and
-    // overwrite it, rather than unlinking and recreating
-    // Windows doesn't report a useful nlink, so we just never reuse entries
-    [ISREUSABLE](entry, st) {
-        return (entry.type === 'File' &&
-            !this.unlink &&
-            st.isFile() &&
-            st.nlink <= 1 &&
-            !isWindows);
-    }
-    // check if a thing is there, and if so, try to clobber it
-    [CHECKFS](entry) {
-        this[PEND]();
-        const paths = [entry.path];
-        if (entry.linkpath) {
-            paths.push(entry.linkpath);
-        }
-        this.reservations.reserve(paths, done => this[CHECKFS2](entry, done));
-    }
-    [PRUNECACHE](entry) {
-        // if we are not creating a directory, and the path is in the dirCache,
-        // then that means we are about to delete the directory we created
-        // previously, and it is no longer going to be a directory, and neither
-        // is any of its children.
-        // If a symbolic link is encountered, all bets are off.  There is no
-        // reasonable way to sanitize the cache in such a way we will be able to
-        // avoid having filesystem collisions.  If this happens with a non-symlink
-        // entry, it'll just fail to unpack, but a symlink to a directory, using an
-        // 8.3 shortname or certain unicode attacks, can evade detection and lead
-        // to arbitrary writes to anywhere on the system.
-        if (entry.type === 'SymbolicLink') {
-            dropCache(this.dirCache);
-        }
-        else if (entry.type !== 'Directory') {
-            pruneCache(this.dirCache, String(entry.absolute));
-        }
-    }
-    [CHECKFS2](entry, fullyDone) {
-        this[PRUNECACHE](entry);
-        const done = (er) => {
-            this[PRUNECACHE](entry);
-            fullyDone(er);
-        };
-        const checkCwd = () => {
-            this[MKDIR](this.cwd, this.dmode, er => {
-                if (er) {
-                    this[ONERROR](er, entry);
-                    done();
-                    return;
-                }
-                this[CHECKED_CWD] = true;
-                start();
-            });
-        };
-        const start = () => {
-            if (entry.absolute !== this.cwd) {
-                const parent = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.dirname(String(entry.absolute)));
-                if (parent !== this.cwd) {
-                    return this[MKDIR](parent, this.dmode, er => {
-                        if (er) {
-                            this[ONERROR](er, entry);
-                            done();
-                            return;
-                        }
-                        afterMakeParent();
-                    });
-                }
-            }
-            afterMakeParent();
-        };
-        const afterMakeParent = () => {
-            node_fs_1.default.lstat(String(entry.absolute), (lstatEr, st) => {
-                if (st &&
-                    (this.keep ||
-                        /* c8 ignore next */
-                        (this.newer && st.mtime > (entry.mtime ?? st.mtime)))) {
-                    this[SKIP](entry);
-                    done();
-                    return;
-                }
-                if (lstatEr || this[ISREUSABLE](entry, st)) {
-                    return this[MAKEFS](null, entry, done);
-                }
-                if (st.isDirectory()) {
-                    if (entry.type === 'Directory') {
-                        const needChmod = this.chmod &&
-                            entry.mode &&
-                            (st.mode & 0o7777) !== entry.mode;
-                        const afterChmod = (er) => this[MAKEFS](er ?? null, entry, done);
-                        if (!needChmod) {
-                            return afterChmod();
-                        }
-                        return node_fs_1.default.chmod(String(entry.absolute), Number(entry.mode), afterChmod);
-                    }
-                    // Not a dir entry, have to remove it.
-                    // NB: the only way to end up with an entry that is the cwd
-                    // itself, in such a way that == does not detect, is a
-                    // tricky windows absolute path with UNC or 8.3 parts (and
-                    // preservePaths:true, or else it will have been stripped).
-                    // In that case, the user has opted out of path protections
-                    // explicitly, so if they blow away the cwd, c'est la vie.
-                    if (entry.absolute !== this.cwd) {
-                        return node_fs_1.default.rmdir(String(entry.absolute), (er) => this[MAKEFS](er ?? null, entry, done));
-                    }
-                }
-                // not a dir, and not reusable
-                // don't remove if the cwd, we want that error
-                if (entry.absolute === this.cwd) {
-                    return this[MAKEFS](null, entry, done);
-                }
-                unlinkFile(String(entry.absolute), er => this[MAKEFS](er ?? null, entry, done));
-            });
-        };
-        if (this[CHECKED_CWD]) {
-            start();
-        }
-        else {
-            checkCwd();
-        }
-    }
-    [MAKEFS](er, entry, done) {
-        if (er) {
-            this[ONERROR](er, entry);
-            done();
-            return;
-        }
-        switch (entry.type) {
-            case 'File':
-            case 'OldFile':
-            case 'ContiguousFile':
-                return this[FILE](entry, done);
-            case 'Link':
-                return this[HARDLINK](entry, done);
-            case 'SymbolicLink':
-                return this[SYMLINK](entry, done);
-            case 'Directory':
-            case 'GNUDumpDir':
-                return this[DIRECTORY](entry, done);
-        }
-    }
-    [LINK](entry, linkpath, link, done) {
-        // XXX: get the type ('symlink' or 'junction') for windows
-        node_fs_1.default[link](linkpath, String(entry.absolute), er => {
-            if (er) {
-                this[ONERROR](er, entry);
-            }
-            else {
-                this[UNPEND]();
-                entry.resume();
-            }
-            done();
-        });
-    }
-}
-exports.Unpack = Unpack;
-const callSync = (fn) => {
-    try {
-        return [null, fn()];
-    }
-    catch (er) {
-        return [er, null];
-    }
-};
-class UnpackSync extends Unpack {
-    sync = true;
-    [MAKEFS](er, entry) {
-        return super[MAKEFS](er, entry, () => { });
-    }
-    [CHECKFS](entry) {
-        this[PRUNECACHE](entry);
-        if (!this[CHECKED_CWD]) {
-            const er = this[MKDIR](this.cwd, this.dmode);
-            if (er) {
-                return this[ONERROR](er, entry);
-            }
-            this[CHECKED_CWD] = true;
-        }
-        // don't bother to make the parent if the current entry is the cwd,
-        // we've already checked it.
-        if (entry.absolute !== this.cwd) {
-            const parent = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.dirname(String(entry.absolute)));
-            if (parent !== this.cwd) {
-                const mkParent = this[MKDIR](parent, this.dmode);
-                if (mkParent) {
-                    return this[ONERROR](mkParent, entry);
-                }
-            }
-        }
-        const [lstatEr, st] = callSync(() => node_fs_1.default.lstatSync(String(entry.absolute)));
-        if (st &&
-            (this.keep ||
-                /* c8 ignore next */
-                (this.newer && st.mtime > (entry.mtime ?? st.mtime)))) {
-            return this[SKIP](entry);
-        }
-        if (lstatEr || this[ISREUSABLE](entry, st)) {
-            return this[MAKEFS](null, entry);
-        }
-        if (st.isDirectory()) {
-            if (entry.type === 'Directory') {
-                const needChmod = this.chmod &&
-                    entry.mode &&
-                    (st.mode & 0o7777) !== entry.mode;
-                const [er] = needChmod ?
-                    callSync(() => {
-                        node_fs_1.default.chmodSync(String(entry.absolute), Number(entry.mode));
-                    })
-                    : [];
-                return this[MAKEFS](er, entry);
-            }
-            // not a dir entry, have to remove it
-            const [er] = callSync(() => node_fs_1.default.rmdirSync(String(entry.absolute)));
-            this[MAKEFS](er, entry);
-        }
-        // not a dir, and not reusable.
-        // don't remove if it's the cwd, since we want that error.
-        const [er] = entry.absolute === this.cwd ?
-            []
-            : callSync(() => unlinkFileSync(String(entry.absolute)));
-        this[MAKEFS](er, entry);
-    }
-    [FILE](entry, done) {
-        const mode = typeof entry.mode === 'number' ?
-            entry.mode & 0o7777
-            : this.fmode;
-        const oner = (er) => {
-            let closeError;
-            try {
-                node_fs_1.default.closeSync(fd);
-            }
-            catch (e) {
-                closeError = e;
-            }
-            if (er || closeError) {
-                this[ONERROR](er || closeError, entry);
-            }
-            done();
-        };
-        let fd;
-        try {
-            fd = node_fs_1.default.openSync(String(entry.absolute), (0, get_write_flag_js_1.getWriteFlag)(entry.size), mode);
-        }
-        catch (er) {
-            return oner(er);
-        }
-        const tx = this.transform ? this.transform(entry) || entry : entry;
-        if (tx !== entry) {
-            tx.on('error', (er) => this[ONERROR](er, entry));
-            entry.pipe(tx);
-        }
-        tx.on('data', (chunk) => {
-            try {
-                node_fs_1.default.writeSync(fd, chunk, 0, chunk.length);
-            }
-            catch (er) {
-                oner(er);
-            }
-        });
-        tx.on('end', () => {
-            let er = null;
-            // try both, falling futimes back to utimes
-            // if either fails, handle the first error
-            if (entry.mtime && !this.noMtime) {
-                const atime = entry.atime || new Date();
-                const mtime = entry.mtime;
-                try {
-                    node_fs_1.default.futimesSync(fd, atime, mtime);
-                }
-                catch (futimeser) {
-                    try {
-                        node_fs_1.default.utimesSync(String(entry.absolute), atime, mtime);
-                    }
-                    catch (utimeser) {
-                        er = futimeser;
-                    }
-                }
-            }
-            if (this[DOCHOWN](entry)) {
-                const uid = this[UID](entry);
-                const gid = this[GID](entry);
-                try {
-                    node_fs_1.default.fchownSync(fd, Number(uid), Number(gid));
-                }
-                catch (fchowner) {
-                    try {
-                        node_fs_1.default.chownSync(String(entry.absolute), Number(uid), Number(gid));
-                    }
-                    catch (chowner) {
-                        er = er || fchowner;
-                    }
-                }
-            }
-            oner(er);
-        });
-    }
-    [DIRECTORY](entry, done) {
-        const mode = typeof entry.mode === 'number' ?
-            entry.mode & 0o7777
-            : this.dmode;
-        const er = this[MKDIR](String(entry.absolute), mode);
-        if (er) {
-            this[ONERROR](er, entry);
-            done();
-            return;
-        }
-        if (entry.mtime && !this.noMtime) {
-            try {
-                node_fs_1.default.utimesSync(String(entry.absolute), entry.atime || new Date(), entry.mtime);
-                /* c8 ignore next */
-            }
-            catch (er) { }
-        }
-        if (this[DOCHOWN](entry)) {
-            try {
-                node_fs_1.default.chownSync(String(entry.absolute), Number(this[UID](entry)), Number(this[GID](entry)));
-            }
-            catch (er) { }
-        }
-        done();
-        entry.resume();
-    }
-    [MKDIR](dir, mode) {
-        try {
-            return (0, mkdir_js_1.mkdirSync)((0, normalize_windows_path_js_1.normalizeWindowsPath)(dir), {
-                uid: this.uid,
-                gid: this.gid,
-                processUid: this.processUid,
-                processGid: this.processGid,
-                umask: this.processUmask,
-                preserve: this.preservePaths,
-                unlink: this.unlink,
-                cache: this.dirCache,
-                cwd: this.cwd,
-                mode: mode,
-            });
-        }
-        catch (er) {
-            return er;
-        }
-    }
-    [LINK](entry, linkpath, link, done) {
-        const ls = `${link}Sync`;
-        try {
-            node_fs_1.default[ls](linkpath, String(entry.absolute));
-            done();
-            entry.resume();
-        }
-        catch (er) {
-            return this[ONERROR](er, entry);
-        }
-    }
-}
-exports.UnpackSync = UnpackSync;
-//# sourceMappingURL=unpack.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/tar/dist/esm/list.js b/node_modules/node-gyp/node_modules/tar/dist/esm/list.js
deleted file mode 100644
index f49068400b6c9..0000000000000
--- a/node_modules/node-gyp/node_modules/tar/dist/esm/list.js
+++ /dev/null
@@ -1,106 +0,0 @@
-// tar -t
-import * as fsm from '@isaacs/fs-minipass';
-import fs from 'node:fs';
-import { dirname, parse } from 'path';
-import { makeCommand } from './make-command.js';
-import { Parser } from './parse.js';
-import { stripTrailingSlashes } from './strip-trailing-slashes.js';
-const onReadEntryFunction = (opt) => {
-    const onReadEntry = opt.onReadEntry;
-    opt.onReadEntry =
-        onReadEntry ?
-            e => {
-                onReadEntry(e);
-                e.resume();
-            }
-            : e => e.resume();
-};
-// construct a filter that limits the file entries listed
-// include child entries if a dir is included
-export const filesFilter = (opt, files) => {
-    const map = new Map(files.map(f => [stripTrailingSlashes(f), true]));
-    const filter = opt.filter;
-    const mapHas = (file, r = '') => {
-        const root = r || parse(file).root || '.';
-        let ret;
-        if (file === root)
-            ret = false;
-        else {
-            const m = map.get(file);
-            if (m !== undefined) {
-                ret = m;
-            }
-            else {
-                ret = mapHas(dirname(file), root);
-            }
-        }
-        map.set(file, ret);
-        return ret;
-    };
-    opt.filter =
-        filter ?
-            (file, entry) => filter(file, entry) && mapHas(stripTrailingSlashes(file))
-            : file => mapHas(stripTrailingSlashes(file));
-};
-const listFileSync = (opt) => {
-    const p = new Parser(opt);
-    const file = opt.file;
-    let fd;
-    try {
-        const stat = fs.statSync(file);
-        const readSize = opt.maxReadSize || 16 * 1024 * 1024;
-        if (stat.size < readSize) {
-            p.end(fs.readFileSync(file));
-        }
-        else {
-            let pos = 0;
-            const buf = Buffer.allocUnsafe(readSize);
-            fd = fs.openSync(file, 'r');
-            while (pos < stat.size) {
-                const bytesRead = fs.readSync(fd, buf, 0, readSize, pos);
-                pos += bytesRead;
-                p.write(buf.subarray(0, bytesRead));
-            }
-            p.end();
-        }
-    }
-    finally {
-        if (typeof fd === 'number') {
-            try {
-                fs.closeSync(fd);
-                /* c8 ignore next */
-            }
-            catch (er) { }
-        }
-    }
-};
-const listFile = (opt, _files) => {
-    const parse = new Parser(opt);
-    const readSize = opt.maxReadSize || 16 * 1024 * 1024;
-    const file = opt.file;
-    const p = new Promise((resolve, reject) => {
-        parse.on('error', reject);
-        parse.on('end', resolve);
-        fs.stat(file, (er, stat) => {
-            if (er) {
-                reject(er);
-            }
-            else {
-                const stream = new fsm.ReadStream(file, {
-                    readSize: readSize,
-                    size: stat.size,
-                });
-                stream.on('error', reject);
-                stream.pipe(parse);
-            }
-        });
-    });
-    return p;
-};
-export const list = makeCommand(listFileSync, listFile, opt => new Parser(opt), opt => new Parser(opt), (opt, files) => {
-    if (files?.length)
-        filesFilter(opt, files);
-    if (!opt.noResume)
-        onReadEntryFunction(opt);
-});
-//# sourceMappingURL=list.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/tar/dist/esm/mkdir.js b/node_modules/node-gyp/node_modules/tar/dist/esm/mkdir.js
deleted file mode 100644
index 13498ef0082f0..0000000000000
--- a/node_modules/node-gyp/node_modules/tar/dist/esm/mkdir.js
+++ /dev/null
@@ -1,201 +0,0 @@
-import { chownr, chownrSync } from 'chownr';
-import fs from 'fs';
-import { mkdirp, mkdirpSync } from 'mkdirp';
-import path from 'node:path';
-import { CwdError } from './cwd-error.js';
-import { normalizeWindowsPath } from './normalize-windows-path.js';
-import { SymlinkError } from './symlink-error.js';
-const cGet = (cache, key) => cache.get(normalizeWindowsPath(key));
-const cSet = (cache, key, val) => cache.set(normalizeWindowsPath(key), val);
-const checkCwd = (dir, cb) => {
-    fs.stat(dir, (er, st) => {
-        if (er || !st.isDirectory()) {
-            er = new CwdError(dir, er?.code || 'ENOTDIR');
-        }
-        cb(er);
-    });
-};
-/**
- * Wrapper around mkdirp for tar's needs.
- *
- * The main purpose is to avoid creating directories if we know that
- * they already exist (and track which ones exist for this purpose),
- * and prevent entries from being extracted into symlinked folders,
- * if `preservePaths` is not set.
- */
-export const mkdir = (dir, opt, cb) => {
-    dir = normalizeWindowsPath(dir);
-    // if there's any overlap between mask and mode,
-    // then we'll need an explicit chmod
-    /* c8 ignore next */
-    const umask = opt.umask ?? 0o22;
-    const mode = opt.mode | 0o0700;
-    const needChmod = (mode & umask) !== 0;
-    const uid = opt.uid;
-    const gid = opt.gid;
-    const doChown = typeof uid === 'number' &&
-        typeof gid === 'number' &&
-        (uid !== opt.processUid || gid !== opt.processGid);
-    const preserve = opt.preserve;
-    const unlink = opt.unlink;
-    const cache = opt.cache;
-    const cwd = normalizeWindowsPath(opt.cwd);
-    const done = (er, created) => {
-        if (er) {
-            cb(er);
-        }
-        else {
-            cSet(cache, dir, true);
-            if (created && doChown) {
-                chownr(created, uid, gid, er => done(er));
-            }
-            else if (needChmod) {
-                fs.chmod(dir, mode, cb);
-            }
-            else {
-                cb();
-            }
-        }
-    };
-    if (cache && cGet(cache, dir) === true) {
-        return done();
-    }
-    if (dir === cwd) {
-        return checkCwd(dir, done);
-    }
-    if (preserve) {
-        return mkdirp(dir, { mode }).then(made => done(null, made ?? undefined), // oh, ts
-        done);
-    }
-    const sub = normalizeWindowsPath(path.relative(cwd, dir));
-    const parts = sub.split('/');
-    mkdir_(cwd, parts, mode, cache, unlink, cwd, undefined, done);
-};
-const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => {
-    if (!parts.length) {
-        return cb(null, created);
-    }
-    const p = parts.shift();
-    const part = normalizeWindowsPath(path.resolve(base + '/' + p));
-    if (cGet(cache, part)) {
-        return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
-    }
-    fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb));
-};
-const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => (er) => {
-    if (er) {
-        fs.lstat(part, (statEr, st) => {
-            if (statEr) {
-                statEr.path =
-                    statEr.path && normalizeWindowsPath(statEr.path);
-                cb(statEr);
-            }
-            else if (st.isDirectory()) {
-                mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
-            }
-            else if (unlink) {
-                fs.unlink(part, er => {
-                    if (er) {
-                        return cb(er);
-                    }
-                    fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb));
-                });
-            }
-            else if (st.isSymbolicLink()) {
-                return cb(new SymlinkError(part, part + '/' + parts.join('/')));
-            }
-            else {
-                cb(er);
-            }
-        });
-    }
-    else {
-        created = created || part;
-        mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
-    }
-};
-const checkCwdSync = (dir) => {
-    let ok = false;
-    let code = undefined;
-    try {
-        ok = fs.statSync(dir).isDirectory();
-    }
-    catch (er) {
-        code = er?.code;
-    }
-    finally {
-        if (!ok) {
-            throw new CwdError(dir, code ?? 'ENOTDIR');
-        }
-    }
-};
-export const mkdirSync = (dir, opt) => {
-    dir = normalizeWindowsPath(dir);
-    // if there's any overlap between mask and mode,
-    // then we'll need an explicit chmod
-    /* c8 ignore next */
-    const umask = opt.umask ?? 0o22;
-    const mode = opt.mode | 0o700;
-    const needChmod = (mode & umask) !== 0;
-    const uid = opt.uid;
-    const gid = opt.gid;
-    const doChown = typeof uid === 'number' &&
-        typeof gid === 'number' &&
-        (uid !== opt.processUid || gid !== opt.processGid);
-    const preserve = opt.preserve;
-    const unlink = opt.unlink;
-    const cache = opt.cache;
-    const cwd = normalizeWindowsPath(opt.cwd);
-    const done = (created) => {
-        cSet(cache, dir, true);
-        if (created && doChown) {
-            chownrSync(created, uid, gid);
-        }
-        if (needChmod) {
-            fs.chmodSync(dir, mode);
-        }
-    };
-    if (cache && cGet(cache, dir) === true) {
-        return done();
-    }
-    if (dir === cwd) {
-        checkCwdSync(cwd);
-        return done();
-    }
-    if (preserve) {
-        return done(mkdirpSync(dir, mode) ?? undefined);
-    }
-    const sub = normalizeWindowsPath(path.relative(cwd, dir));
-    const parts = sub.split('/');
-    let created = undefined;
-    for (let p = parts.shift(), part = cwd; p && (part += '/' + p); p = parts.shift()) {
-        part = normalizeWindowsPath(path.resolve(part));
-        if (cGet(cache, part)) {
-            continue;
-        }
-        try {
-            fs.mkdirSync(part, mode);
-            created = created || part;
-            cSet(cache, part, true);
-        }
-        catch (er) {
-            const st = fs.lstatSync(part);
-            if (st.isDirectory()) {
-                cSet(cache, part, true);
-                continue;
-            }
-            else if (unlink) {
-                fs.unlinkSync(part);
-                fs.mkdirSync(part, mode);
-                created = created || part;
-                cSet(cache, part, true);
-                continue;
-            }
-            else if (st.isSymbolicLink()) {
-                return new SymlinkError(part, part + '/' + parts.join('/'));
-            }
-        }
-    }
-    return done(created);
-};
-//# sourceMappingURL=mkdir.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/tar/dist/esm/normalize-unicode.js b/node_modules/node-gyp/node_modules/tar/dist/esm/normalize-unicode.js
deleted file mode 100644
index 94e5095476d6e..0000000000000
--- a/node_modules/node-gyp/node_modules/tar/dist/esm/normalize-unicode.js
+++ /dev/null
@@ -1,13 +0,0 @@
-// warning: extremely hot code path.
-// This has been meticulously optimized for use
-// within npm install on large package trees.
-// Do not edit without careful benchmarking.
-const normalizeCache = Object.create(null);
-const { hasOwnProperty } = Object.prototype;
-export const normalizeUnicode = (s) => {
-    if (!hasOwnProperty.call(normalizeCache, s)) {
-        normalizeCache[s] = s.normalize('NFD');
-    }
-    return normalizeCache[s];
-};
-//# sourceMappingURL=normalize-unicode.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/tar/dist/esm/unpack.js b/node_modules/node-gyp/node_modules/tar/dist/esm/unpack.js
deleted file mode 100644
index 6e744cfc1a6f9..0000000000000
--- a/node_modules/node-gyp/node_modules/tar/dist/esm/unpack.js
+++ /dev/null
@@ -1,888 +0,0 @@
-// the PEND/UNPEND stuff tracks whether we're ready to emit end/close yet.
-// but the path reservations are required to avoid race conditions where
-// parallelized unpack ops may mess with one another, due to dependencies
-// (like a Link depending on its target) or destructive operations (like
-// clobbering an fs object to create one of a different type.)
-import * as fsm from '@isaacs/fs-minipass';
-import assert from 'node:assert';
-import { randomBytes } from 'node:crypto';
-import fs from 'node:fs';
-import path from 'node:path';
-import { getWriteFlag } from './get-write-flag.js';
-import { mkdir, mkdirSync } from './mkdir.js';
-import { normalizeUnicode } from './normalize-unicode.js';
-import { normalizeWindowsPath } from './normalize-windows-path.js';
-import { Parser } from './parse.js';
-import { stripAbsolutePath } from './strip-absolute-path.js';
-import { stripTrailingSlashes } from './strip-trailing-slashes.js';
-import * as wc from './winchars.js';
-import { PathReservations } from './path-reservations.js';
-const ONENTRY = Symbol('onEntry');
-const CHECKFS = Symbol('checkFs');
-const CHECKFS2 = Symbol('checkFs2');
-const PRUNECACHE = Symbol('pruneCache');
-const ISREUSABLE = Symbol('isReusable');
-const MAKEFS = Symbol('makeFs');
-const FILE = Symbol('file');
-const DIRECTORY = Symbol('directory');
-const LINK = Symbol('link');
-const SYMLINK = Symbol('symlink');
-const HARDLINK = Symbol('hardlink');
-const UNSUPPORTED = Symbol('unsupported');
-const CHECKPATH = Symbol('checkPath');
-const MKDIR = Symbol('mkdir');
-const ONERROR = Symbol('onError');
-const PENDING = Symbol('pending');
-const PEND = Symbol('pend');
-const UNPEND = Symbol('unpend');
-const ENDED = Symbol('ended');
-const MAYBECLOSE = Symbol('maybeClose');
-const SKIP = Symbol('skip');
-const DOCHOWN = Symbol('doChown');
-const UID = Symbol('uid');
-const GID = Symbol('gid');
-const CHECKED_CWD = Symbol('checkedCwd');
-const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform;
-const isWindows = platform === 'win32';
-const DEFAULT_MAX_DEPTH = 1024;
-// Unlinks on Windows are not atomic.
-//
-// This means that if you have a file entry, followed by another
-// file entry with an identical name, and you cannot re-use the file
-// (because it's a hardlink, or because unlink:true is set, or it's
-// Windows, which does not have useful nlink values), then the unlink
-// will be committed to the disk AFTER the new file has been written
-// over the old one, deleting the new file.
-//
-// To work around this, on Windows systems, we rename the file and then
-// delete the renamed file.  It's a sloppy kludge, but frankly, I do not
-// know of a better way to do this, given windows' non-atomic unlink
-// semantics.
-//
-// See: https://github.com/npm/node-tar/issues/183
-/* c8 ignore start */
-const unlinkFile = (path, cb) => {
-    if (!isWindows) {
-        return fs.unlink(path, cb);
-    }
-    const name = path + '.DELETE.' + randomBytes(16).toString('hex');
-    fs.rename(path, name, er => {
-        if (er) {
-            return cb(er);
-        }
-        fs.unlink(name, cb);
-    });
-};
-/* c8 ignore stop */
-/* c8 ignore start */
-const unlinkFileSync = (path) => {
-    if (!isWindows) {
-        return fs.unlinkSync(path);
-    }
-    const name = path + '.DELETE.' + randomBytes(16).toString('hex');
-    fs.renameSync(path, name);
-    fs.unlinkSync(name);
-};
-/* c8 ignore stop */
-// this.gid, entry.gid, this.processUid
-const uint32 = (a, b, c) => a !== undefined && a === a >>> 0 ? a
-    : b !== undefined && b === b >>> 0 ? b
-        : c;
-// clear the cache if it's a case-insensitive unicode-squashing match.
-// we can't know if the current file system is case-sensitive or supports
-// unicode fully, so we check for similarity on the maximally compatible
-// representation.  Err on the side of pruning, since all it's doing is
-// preventing lstats, and it's not the end of the world if we get a false
-// positive.
-// Note that on windows, we always drop the entire cache whenever a
-// symbolic link is encountered, because 8.3 filenames are impossible
-// to reason about, and collisions are hazards rather than just failures.
-const cacheKeyNormalize = (path) => stripTrailingSlashes(normalizeWindowsPath(normalizeUnicode(path))).toLowerCase();
-// remove all cache entries matching ${abs}/**
-const pruneCache = (cache, abs) => {
-    abs = cacheKeyNormalize(abs);
-    for (const path of cache.keys()) {
-        const pnorm = cacheKeyNormalize(path);
-        if (pnorm === abs || pnorm.indexOf(abs + '/') === 0) {
-            cache.delete(path);
-        }
-    }
-};
-const dropCache = (cache) => {
-    for (const key of cache.keys()) {
-        cache.delete(key);
-    }
-};
-export class Unpack extends Parser {
-    [ENDED] = false;
-    [CHECKED_CWD] = false;
-    [PENDING] = 0;
-    reservations = new PathReservations();
-    transform;
-    writable = true;
-    readable = false;
-    dirCache;
-    uid;
-    gid;
-    setOwner;
-    preserveOwner;
-    processGid;
-    processUid;
-    maxDepth;
-    forceChown;
-    win32;
-    newer;
-    keep;
-    noMtime;
-    preservePaths;
-    unlink;
-    cwd;
-    strip;
-    processUmask;
-    umask;
-    dmode;
-    fmode;
-    chmod;
-    constructor(opt = {}) {
-        opt.ondone = () => {
-            this[ENDED] = true;
-            this[MAYBECLOSE]();
-        };
-        super(opt);
-        this.transform = opt.transform;
-        this.dirCache = opt.dirCache || new Map();
-        this.chmod = !!opt.chmod;
-        if (typeof opt.uid === 'number' || typeof opt.gid === 'number') {
-            // need both or neither
-            if (typeof opt.uid !== 'number' ||
-                typeof opt.gid !== 'number') {
-                throw new TypeError('cannot set owner without number uid and gid');
-            }
-            if (opt.preserveOwner) {
-                throw new TypeError('cannot preserve owner in archive and also set owner explicitly');
-            }
-            this.uid = opt.uid;
-            this.gid = opt.gid;
-            this.setOwner = true;
-        }
-        else {
-            this.uid = undefined;
-            this.gid = undefined;
-            this.setOwner = false;
-        }
-        // default true for root
-        if (opt.preserveOwner === undefined &&
-            typeof opt.uid !== 'number') {
-            this.preserveOwner = !!(process.getuid && process.getuid() === 0);
-        }
-        else {
-            this.preserveOwner = !!opt.preserveOwner;
-        }
-        this.processUid =
-            (this.preserveOwner || this.setOwner) && process.getuid ?
-                process.getuid()
-                : undefined;
-        this.processGid =
-            (this.preserveOwner || this.setOwner) && process.getgid ?
-                process.getgid()
-                : undefined;
-        // prevent excessively deep nesting of subfolders
-        // set to `Infinity` to remove this restriction
-        this.maxDepth =
-            typeof opt.maxDepth === 'number' ?
-                opt.maxDepth
-                : DEFAULT_MAX_DEPTH;
-        // mostly just for testing, but useful in some cases.
-        // Forcibly trigger a chown on every entry, no matter what
-        this.forceChown = opt.forceChown === true;
-        // turn > this[ONENTRY](entry));
-    }
-    // a bad or damaged archive is a warning for Parser, but an error
-    // when extracting.  Mark those errors as unrecoverable, because
-    // the Unpack contract cannot be met.
-    warn(code, msg, data = {}) {
-        if (code === 'TAR_BAD_ARCHIVE' || code === 'TAR_ABORT') {
-            data.recoverable = false;
-        }
-        return super.warn(code, msg, data);
-    }
-    [MAYBECLOSE]() {
-        if (this[ENDED] && this[PENDING] === 0) {
-            this.emit('prefinish');
-            this.emit('finish');
-            this.emit('end');
-        }
-    }
-    [CHECKPATH](entry) {
-        const p = normalizeWindowsPath(entry.path);
-        const parts = p.split('/');
-        if (this.strip) {
-            if (parts.length < this.strip) {
-                return false;
-            }
-            if (entry.type === 'Link') {
-                const linkparts = normalizeWindowsPath(String(entry.linkpath)).split('/');
-                if (linkparts.length >= this.strip) {
-                    entry.linkpath = linkparts.slice(this.strip).join('/');
-                }
-                else {
-                    return false;
-                }
-            }
-            parts.splice(0, this.strip);
-            entry.path = parts.join('/');
-        }
-        if (isFinite(this.maxDepth) && parts.length > this.maxDepth) {
-            this.warn('TAR_ENTRY_ERROR', 'path excessively deep', {
-                entry,
-                path: p,
-                depth: parts.length,
-                maxDepth: this.maxDepth,
-            });
-            return false;
-        }
-        if (!this.preservePaths) {
-            if (parts.includes('..') ||
-                /* c8 ignore next */
-                (isWindows && /^[a-z]:\.\.$/i.test(parts[0] ?? ''))) {
-                this.warn('TAR_ENTRY_ERROR', `path contains '..'`, {
-                    entry,
-                    path: p,
-                });
-                return false;
-            }
-            // strip off the root
-            const [root, stripped] = stripAbsolutePath(p);
-            if (root) {
-                entry.path = String(stripped);
-                this.warn('TAR_ENTRY_INFO', `stripping ${root} from absolute path`, {
-                    entry,
-                    path: p,
-                });
-            }
-        }
-        if (path.isAbsolute(entry.path)) {
-            entry.absolute = normalizeWindowsPath(path.resolve(entry.path));
-        }
-        else {
-            entry.absolute = normalizeWindowsPath(path.resolve(this.cwd, entry.path));
-        }
-        // if we somehow ended up with a path that escapes the cwd, and we are
-        // not in preservePaths mode, then something is fishy!  This should have
-        // been prevented above, so ignore this for coverage.
-        /* c8 ignore start - defense in depth */
-        if (!this.preservePaths &&
-            typeof entry.absolute === 'string' &&
-            entry.absolute.indexOf(this.cwd + '/') !== 0 &&
-            entry.absolute !== this.cwd) {
-            this.warn('TAR_ENTRY_ERROR', 'path escaped extraction target', {
-                entry,
-                path: normalizeWindowsPath(entry.path),
-                resolvedPath: entry.absolute,
-                cwd: this.cwd,
-            });
-            return false;
-        }
-        /* c8 ignore stop */
-        // an archive can set properties on the extraction directory, but it
-        // may not replace the cwd with a different kind of thing entirely.
-        if (entry.absolute === this.cwd &&
-            entry.type !== 'Directory' &&
-            entry.type !== 'GNUDumpDir') {
-            return false;
-        }
-        // only encode : chars that aren't drive letter indicators
-        if (this.win32) {
-            const { root: aRoot } = path.win32.parse(String(entry.absolute));
-            entry.absolute =
-                aRoot + wc.encode(String(entry.absolute).slice(aRoot.length));
-            const { root: pRoot } = path.win32.parse(entry.path);
-            entry.path = pRoot + wc.encode(entry.path.slice(pRoot.length));
-        }
-        return true;
-    }
-    [ONENTRY](entry) {
-        if (!this[CHECKPATH](entry)) {
-            return entry.resume();
-        }
-        assert.equal(typeof entry.absolute, 'string');
-        switch (entry.type) {
-            case 'Directory':
-            case 'GNUDumpDir':
-                if (entry.mode) {
-                    entry.mode = entry.mode | 0o700;
-                }
-            // eslint-disable-next-line no-fallthrough
-            case 'File':
-            case 'OldFile':
-            case 'ContiguousFile':
-            case 'Link':
-            case 'SymbolicLink':
-                return this[CHECKFS](entry);
-            case 'CharacterDevice':
-            case 'BlockDevice':
-            case 'FIFO':
-            default:
-                return this[UNSUPPORTED](entry);
-        }
-    }
-    [ONERROR](er, entry) {
-        // Cwd has to exist, or else nothing works. That's serious.
-        // Other errors are warnings, which raise the error in strict
-        // mode, but otherwise continue on.
-        if (er.name === 'CwdError') {
-            this.emit('error', er);
-        }
-        else {
-            this.warn('TAR_ENTRY_ERROR', er, { entry });
-            this[UNPEND]();
-            entry.resume();
-        }
-    }
-    [MKDIR](dir, mode, cb) {
-        mkdir(normalizeWindowsPath(dir), {
-            uid: this.uid,
-            gid: this.gid,
-            processUid: this.processUid,
-            processGid: this.processGid,
-            umask: this.processUmask,
-            preserve: this.preservePaths,
-            unlink: this.unlink,
-            cache: this.dirCache,
-            cwd: this.cwd,
-            mode: mode,
-        }, cb);
-    }
-    [DOCHOWN](entry) {
-        // in preserve owner mode, chown if the entry doesn't match process
-        // in set owner mode, chown if setting doesn't match process
-        return (this.forceChown ||
-            (this.preserveOwner &&
-                ((typeof entry.uid === 'number' &&
-                    entry.uid !== this.processUid) ||
-                    (typeof entry.gid === 'number' &&
-                        entry.gid !== this.processGid))) ||
-            (typeof this.uid === 'number' &&
-                this.uid !== this.processUid) ||
-            (typeof this.gid === 'number' && this.gid !== this.processGid));
-    }
-    [UID](entry) {
-        return uint32(this.uid, entry.uid, this.processUid);
-    }
-    [GID](entry) {
-        return uint32(this.gid, entry.gid, this.processGid);
-    }
-    [FILE](entry, fullyDone) {
-        const mode = typeof entry.mode === 'number' ?
-            entry.mode & 0o7777
-            : this.fmode;
-        const stream = new fsm.WriteStream(String(entry.absolute), {
-            // slight lie, but it can be numeric flags
-            flags: getWriteFlag(entry.size),
-            mode: mode,
-            autoClose: false,
-        });
-        stream.on('error', (er) => {
-            if (stream.fd) {
-                fs.close(stream.fd, () => { });
-            }
-            // flush all the data out so that we aren't left hanging
-            // if the error wasn't actually fatal.  otherwise the parse
-            // is blocked, and we never proceed.
-            stream.write = () => true;
-            this[ONERROR](er, entry);
-            fullyDone();
-        });
-        let actions = 1;
-        const done = (er) => {
-            if (er) {
-                /* c8 ignore start - we should always have a fd by now */
-                if (stream.fd) {
-                    fs.close(stream.fd, () => { });
-                }
-                /* c8 ignore stop */
-                this[ONERROR](er, entry);
-                fullyDone();
-                return;
-            }
-            if (--actions === 0) {
-                if (stream.fd !== undefined) {
-                    fs.close(stream.fd, er => {
-                        if (er) {
-                            this[ONERROR](er, entry);
-                        }
-                        else {
-                            this[UNPEND]();
-                        }
-                        fullyDone();
-                    });
-                }
-            }
-        };
-        stream.on('finish', () => {
-            // if futimes fails, try utimes
-            // if utimes fails, fail with the original error
-            // same for fchown/chown
-            const abs = String(entry.absolute);
-            const fd = stream.fd;
-            if (typeof fd === 'number' && entry.mtime && !this.noMtime) {
-                actions++;
-                const atime = entry.atime || new Date();
-                const mtime = entry.mtime;
-                fs.futimes(fd, atime, mtime, er => er ?
-                    fs.utimes(abs, atime, mtime, er2 => done(er2 && er))
-                    : done());
-            }
-            if (typeof fd === 'number' && this[DOCHOWN](entry)) {
-                actions++;
-                const uid = this[UID](entry);
-                const gid = this[GID](entry);
-                if (typeof uid === 'number' && typeof gid === 'number') {
-                    fs.fchown(fd, uid, gid, er => er ?
-                        fs.chown(abs, uid, gid, er2 => done(er2 && er))
-                        : done());
-                }
-            }
-            done();
-        });
-        const tx = this.transform ? this.transform(entry) || entry : entry;
-        if (tx !== entry) {
-            tx.on('error', (er) => {
-                this[ONERROR](er, entry);
-                fullyDone();
-            });
-            entry.pipe(tx);
-        }
-        tx.pipe(stream);
-    }
-    [DIRECTORY](entry, fullyDone) {
-        const mode = typeof entry.mode === 'number' ?
-            entry.mode & 0o7777
-            : this.dmode;
-        this[MKDIR](String(entry.absolute), mode, er => {
-            if (er) {
-                this[ONERROR](er, entry);
-                fullyDone();
-                return;
-            }
-            let actions = 1;
-            const done = () => {
-                if (--actions === 0) {
-                    fullyDone();
-                    this[UNPEND]();
-                    entry.resume();
-                }
-            };
-            if (entry.mtime && !this.noMtime) {
-                actions++;
-                fs.utimes(String(entry.absolute), entry.atime || new Date(), entry.mtime, done);
-            }
-            if (this[DOCHOWN](entry)) {
-                actions++;
-                fs.chown(String(entry.absolute), Number(this[UID](entry)), Number(this[GID](entry)), done);
-            }
-            done();
-        });
-    }
-    [UNSUPPORTED](entry) {
-        entry.unsupported = true;
-        this.warn('TAR_ENTRY_UNSUPPORTED', `unsupported entry type: ${entry.type}`, { entry });
-        entry.resume();
-    }
-    [SYMLINK](entry, done) {
-        this[LINK](entry, String(entry.linkpath), 'symlink', done);
-    }
-    [HARDLINK](entry, done) {
-        const linkpath = normalizeWindowsPath(path.resolve(this.cwd, String(entry.linkpath)));
-        this[LINK](entry, linkpath, 'link', done);
-    }
-    [PEND]() {
-        this[PENDING]++;
-    }
-    [UNPEND]() {
-        this[PENDING]--;
-        this[MAYBECLOSE]();
-    }
-    [SKIP](entry) {
-        this[UNPEND]();
-        entry.resume();
-    }
-    // Check if we can reuse an existing filesystem entry safely and
-    // overwrite it, rather than unlinking and recreating
-    // Windows doesn't report a useful nlink, so we just never reuse entries
-    [ISREUSABLE](entry, st) {
-        return (entry.type === 'File' &&
-            !this.unlink &&
-            st.isFile() &&
-            st.nlink <= 1 &&
-            !isWindows);
-    }
-    // check if a thing is there, and if so, try to clobber it
-    [CHECKFS](entry) {
-        this[PEND]();
-        const paths = [entry.path];
-        if (entry.linkpath) {
-            paths.push(entry.linkpath);
-        }
-        this.reservations.reserve(paths, done => this[CHECKFS2](entry, done));
-    }
-    [PRUNECACHE](entry) {
-        // if we are not creating a directory, and the path is in the dirCache,
-        // then that means we are about to delete the directory we created
-        // previously, and it is no longer going to be a directory, and neither
-        // is any of its children.
-        // If a symbolic link is encountered, all bets are off.  There is no
-        // reasonable way to sanitize the cache in such a way we will be able to
-        // avoid having filesystem collisions.  If this happens with a non-symlink
-        // entry, it'll just fail to unpack, but a symlink to a directory, using an
-        // 8.3 shortname or certain unicode attacks, can evade detection and lead
-        // to arbitrary writes to anywhere on the system.
-        if (entry.type === 'SymbolicLink') {
-            dropCache(this.dirCache);
-        }
-        else if (entry.type !== 'Directory') {
-            pruneCache(this.dirCache, String(entry.absolute));
-        }
-    }
-    [CHECKFS2](entry, fullyDone) {
-        this[PRUNECACHE](entry);
-        const done = (er) => {
-            this[PRUNECACHE](entry);
-            fullyDone(er);
-        };
-        const checkCwd = () => {
-            this[MKDIR](this.cwd, this.dmode, er => {
-                if (er) {
-                    this[ONERROR](er, entry);
-                    done();
-                    return;
-                }
-                this[CHECKED_CWD] = true;
-                start();
-            });
-        };
-        const start = () => {
-            if (entry.absolute !== this.cwd) {
-                const parent = normalizeWindowsPath(path.dirname(String(entry.absolute)));
-                if (parent !== this.cwd) {
-                    return this[MKDIR](parent, this.dmode, er => {
-                        if (er) {
-                            this[ONERROR](er, entry);
-                            done();
-                            return;
-                        }
-                        afterMakeParent();
-                    });
-                }
-            }
-            afterMakeParent();
-        };
-        const afterMakeParent = () => {
-            fs.lstat(String(entry.absolute), (lstatEr, st) => {
-                if (st &&
-                    (this.keep ||
-                        /* c8 ignore next */
-                        (this.newer && st.mtime > (entry.mtime ?? st.mtime)))) {
-                    this[SKIP](entry);
-                    done();
-                    return;
-                }
-                if (lstatEr || this[ISREUSABLE](entry, st)) {
-                    return this[MAKEFS](null, entry, done);
-                }
-                if (st.isDirectory()) {
-                    if (entry.type === 'Directory') {
-                        const needChmod = this.chmod &&
-                            entry.mode &&
-                            (st.mode & 0o7777) !== entry.mode;
-                        const afterChmod = (er) => this[MAKEFS](er ?? null, entry, done);
-                        if (!needChmod) {
-                            return afterChmod();
-                        }
-                        return fs.chmod(String(entry.absolute), Number(entry.mode), afterChmod);
-                    }
-                    // Not a dir entry, have to remove it.
-                    // NB: the only way to end up with an entry that is the cwd
-                    // itself, in such a way that == does not detect, is a
-                    // tricky windows absolute path with UNC or 8.3 parts (and
-                    // preservePaths:true, or else it will have been stripped).
-                    // In that case, the user has opted out of path protections
-                    // explicitly, so if they blow away the cwd, c'est la vie.
-                    if (entry.absolute !== this.cwd) {
-                        return fs.rmdir(String(entry.absolute), (er) => this[MAKEFS](er ?? null, entry, done));
-                    }
-                }
-                // not a dir, and not reusable
-                // don't remove if the cwd, we want that error
-                if (entry.absolute === this.cwd) {
-                    return this[MAKEFS](null, entry, done);
-                }
-                unlinkFile(String(entry.absolute), er => this[MAKEFS](er ?? null, entry, done));
-            });
-        };
-        if (this[CHECKED_CWD]) {
-            start();
-        }
-        else {
-            checkCwd();
-        }
-    }
-    [MAKEFS](er, entry, done) {
-        if (er) {
-            this[ONERROR](er, entry);
-            done();
-            return;
-        }
-        switch (entry.type) {
-            case 'File':
-            case 'OldFile':
-            case 'ContiguousFile':
-                return this[FILE](entry, done);
-            case 'Link':
-                return this[HARDLINK](entry, done);
-            case 'SymbolicLink':
-                return this[SYMLINK](entry, done);
-            case 'Directory':
-            case 'GNUDumpDir':
-                return this[DIRECTORY](entry, done);
-        }
-    }
-    [LINK](entry, linkpath, link, done) {
-        // XXX: get the type ('symlink' or 'junction') for windows
-        fs[link](linkpath, String(entry.absolute), er => {
-            if (er) {
-                this[ONERROR](er, entry);
-            }
-            else {
-                this[UNPEND]();
-                entry.resume();
-            }
-            done();
-        });
-    }
-}
-const callSync = (fn) => {
-    try {
-        return [null, fn()];
-    }
-    catch (er) {
-        return [er, null];
-    }
-};
-export class UnpackSync extends Unpack {
-    sync = true;
-    [MAKEFS](er, entry) {
-        return super[MAKEFS](er, entry, () => { });
-    }
-    [CHECKFS](entry) {
-        this[PRUNECACHE](entry);
-        if (!this[CHECKED_CWD]) {
-            const er = this[MKDIR](this.cwd, this.dmode);
-            if (er) {
-                return this[ONERROR](er, entry);
-            }
-            this[CHECKED_CWD] = true;
-        }
-        // don't bother to make the parent if the current entry is the cwd,
-        // we've already checked it.
-        if (entry.absolute !== this.cwd) {
-            const parent = normalizeWindowsPath(path.dirname(String(entry.absolute)));
-            if (parent !== this.cwd) {
-                const mkParent = this[MKDIR](parent, this.dmode);
-                if (mkParent) {
-                    return this[ONERROR](mkParent, entry);
-                }
-            }
-        }
-        const [lstatEr, st] = callSync(() => fs.lstatSync(String(entry.absolute)));
-        if (st &&
-            (this.keep ||
-                /* c8 ignore next */
-                (this.newer && st.mtime > (entry.mtime ?? st.mtime)))) {
-            return this[SKIP](entry);
-        }
-        if (lstatEr || this[ISREUSABLE](entry, st)) {
-            return this[MAKEFS](null, entry);
-        }
-        if (st.isDirectory()) {
-            if (entry.type === 'Directory') {
-                const needChmod = this.chmod &&
-                    entry.mode &&
-                    (st.mode & 0o7777) !== entry.mode;
-                const [er] = needChmod ?
-                    callSync(() => {
-                        fs.chmodSync(String(entry.absolute), Number(entry.mode));
-                    })
-                    : [];
-                return this[MAKEFS](er, entry);
-            }
-            // not a dir entry, have to remove it
-            const [er] = callSync(() => fs.rmdirSync(String(entry.absolute)));
-            this[MAKEFS](er, entry);
-        }
-        // not a dir, and not reusable.
-        // don't remove if it's the cwd, since we want that error.
-        const [er] = entry.absolute === this.cwd ?
-            []
-            : callSync(() => unlinkFileSync(String(entry.absolute)));
-        this[MAKEFS](er, entry);
-    }
-    [FILE](entry, done) {
-        const mode = typeof entry.mode === 'number' ?
-            entry.mode & 0o7777
-            : this.fmode;
-        const oner = (er) => {
-            let closeError;
-            try {
-                fs.closeSync(fd);
-            }
-            catch (e) {
-                closeError = e;
-            }
-            if (er || closeError) {
-                this[ONERROR](er || closeError, entry);
-            }
-            done();
-        };
-        let fd;
-        try {
-            fd = fs.openSync(String(entry.absolute), getWriteFlag(entry.size), mode);
-        }
-        catch (er) {
-            return oner(er);
-        }
-        const tx = this.transform ? this.transform(entry) || entry : entry;
-        if (tx !== entry) {
-            tx.on('error', (er) => this[ONERROR](er, entry));
-            entry.pipe(tx);
-        }
-        tx.on('data', (chunk) => {
-            try {
-                fs.writeSync(fd, chunk, 0, chunk.length);
-            }
-            catch (er) {
-                oner(er);
-            }
-        });
-        tx.on('end', () => {
-            let er = null;
-            // try both, falling futimes back to utimes
-            // if either fails, handle the first error
-            if (entry.mtime && !this.noMtime) {
-                const atime = entry.atime || new Date();
-                const mtime = entry.mtime;
-                try {
-                    fs.futimesSync(fd, atime, mtime);
-                }
-                catch (futimeser) {
-                    try {
-                        fs.utimesSync(String(entry.absolute), atime, mtime);
-                    }
-                    catch (utimeser) {
-                        er = futimeser;
-                    }
-                }
-            }
-            if (this[DOCHOWN](entry)) {
-                const uid = this[UID](entry);
-                const gid = this[GID](entry);
-                try {
-                    fs.fchownSync(fd, Number(uid), Number(gid));
-                }
-                catch (fchowner) {
-                    try {
-                        fs.chownSync(String(entry.absolute), Number(uid), Number(gid));
-                    }
-                    catch (chowner) {
-                        er = er || fchowner;
-                    }
-                }
-            }
-            oner(er);
-        });
-    }
-    [DIRECTORY](entry, done) {
-        const mode = typeof entry.mode === 'number' ?
-            entry.mode & 0o7777
-            : this.dmode;
-        const er = this[MKDIR](String(entry.absolute), mode);
-        if (er) {
-            this[ONERROR](er, entry);
-            done();
-            return;
-        }
-        if (entry.mtime && !this.noMtime) {
-            try {
-                fs.utimesSync(String(entry.absolute), entry.atime || new Date(), entry.mtime);
-                /* c8 ignore next */
-            }
-            catch (er) { }
-        }
-        if (this[DOCHOWN](entry)) {
-            try {
-                fs.chownSync(String(entry.absolute), Number(this[UID](entry)), Number(this[GID](entry)));
-            }
-            catch (er) { }
-        }
-        done();
-        entry.resume();
-    }
-    [MKDIR](dir, mode) {
-        try {
-            return mkdirSync(normalizeWindowsPath(dir), {
-                uid: this.uid,
-                gid: this.gid,
-                processUid: this.processUid,
-                processGid: this.processGid,
-                umask: this.processUmask,
-                preserve: this.preservePaths,
-                unlink: this.unlink,
-                cache: this.dirCache,
-                cwd: this.cwd,
-                mode: mode,
-            });
-        }
-        catch (er) {
-            return er;
-        }
-    }
-    [LINK](entry, linkpath, link, done) {
-        const ls = `${link}Sync`;
-        try {
-            fs[ls](linkpath, String(entry.absolute));
-            done();
-            entry.resume();
-        }
-        catch (er) {
-            return this[ONERROR](er, entry);
-        }
-    }
-}
-//# sourceMappingURL=unpack.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/tar/package.json b/node_modules/node-gyp/node_modules/tar/package.json
deleted file mode 100644
index 0283103ee9eaf..0000000000000
--- a/node_modules/node-gyp/node_modules/tar/package.json
+++ /dev/null
@@ -1,325 +0,0 @@
-{
-  "author": "Isaac Z. Schlueter",
-  "name": "tar",
-  "description": "tar for node",
-  "version": "7.4.3",
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/isaacs/node-tar.git"
-  },
-  "scripts": {
-    "genparse": "node scripts/generate-parse-fixtures.js",
-    "snap": "tap",
-    "test": "tap",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "prepare": "tshy",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "format": "prettier --write . --log-level warn",
-    "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
-  },
-  "dependencies": {
-    "@isaacs/fs-minipass": "^4.0.0",
-    "chownr": "^3.0.0",
-    "minipass": "^7.1.2",
-    "minizlib": "^3.0.1",
-    "mkdirp": "^3.0.1",
-    "yallist": "^5.0.0"
-  },
-  "devDependencies": {
-    "chmodr": "^1.2.0",
-    "end-of-stream": "^1.4.3",
-    "events-to-array": "^2.0.3",
-    "mutate-fs": "^2.1.1",
-    "nock": "^13.5.4",
-    "prettier": "^3.2.5",
-    "rimraf": "^5.0.5",
-    "tap": "^18.7.2",
-    "tshy": "^1.13.1",
-    "typedoc": "^0.25.13"
-  },
-  "license": "ISC",
-  "engines": {
-    "node": ">=18"
-  },
-  "files": [
-    "dist"
-  ],
-  "tap": {
-    "coverage-map": "map.js",
-    "timeout": 0,
-    "typecheck": true
-  },
-  "prettier": {
-    "experimentalTernaries": true,
-    "semi": false,
-    "printWidth": 70,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "tshy": {
-    "exports": {
-      "./package.json": "./package.json",
-      ".": "./src/index.ts",
-      "./c": "./src/create.ts",
-      "./create": "./src/create.ts",
-      "./replace": "./src/create.ts",
-      "./r": "./src/create.ts",
-      "./list": "./src/list.ts",
-      "./t": "./src/list.ts",
-      "./update": "./src/update.ts",
-      "./u": "./src/update.ts",
-      "./extract": "./src/extract.ts",
-      "./x": "./src/extract.ts",
-      "./pack": "./src/pack.ts",
-      "./unpack": "./src/unpack.ts",
-      "./parse": "./src/parse.ts",
-      "./read-entry": "./src/read-entry.ts",
-      "./write-entry": "./src/write-entry.ts",
-      "./header": "./src/header.ts",
-      "./pax": "./src/pax.ts",
-      "./types": "./src/types.ts"
-    }
-  },
-  "exports": {
-    "./package.json": "./package.json",
-    ".": {
-      "import": {
-        "source": "./src/index.ts",
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.js"
-      },
-      "require": {
-        "source": "./src/index.ts",
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.js"
-      }
-    },
-    "./c": {
-      "import": {
-        "source": "./src/create.ts",
-        "types": "./dist/esm/create.d.ts",
-        "default": "./dist/esm/create.js"
-      },
-      "require": {
-        "source": "./src/create.ts",
-        "types": "./dist/commonjs/create.d.ts",
-        "default": "./dist/commonjs/create.js"
-      }
-    },
-    "./create": {
-      "import": {
-        "source": "./src/create.ts",
-        "types": "./dist/esm/create.d.ts",
-        "default": "./dist/esm/create.js"
-      },
-      "require": {
-        "source": "./src/create.ts",
-        "types": "./dist/commonjs/create.d.ts",
-        "default": "./dist/commonjs/create.js"
-      }
-    },
-    "./replace": {
-      "import": {
-        "source": "./src/create.ts",
-        "types": "./dist/esm/create.d.ts",
-        "default": "./dist/esm/create.js"
-      },
-      "require": {
-        "source": "./src/create.ts",
-        "types": "./dist/commonjs/create.d.ts",
-        "default": "./dist/commonjs/create.js"
-      }
-    },
-    "./r": {
-      "import": {
-        "source": "./src/create.ts",
-        "types": "./dist/esm/create.d.ts",
-        "default": "./dist/esm/create.js"
-      },
-      "require": {
-        "source": "./src/create.ts",
-        "types": "./dist/commonjs/create.d.ts",
-        "default": "./dist/commonjs/create.js"
-      }
-    },
-    "./list": {
-      "import": {
-        "source": "./src/list.ts",
-        "types": "./dist/esm/list.d.ts",
-        "default": "./dist/esm/list.js"
-      },
-      "require": {
-        "source": "./src/list.ts",
-        "types": "./dist/commonjs/list.d.ts",
-        "default": "./dist/commonjs/list.js"
-      }
-    },
-    "./t": {
-      "import": {
-        "source": "./src/list.ts",
-        "types": "./dist/esm/list.d.ts",
-        "default": "./dist/esm/list.js"
-      },
-      "require": {
-        "source": "./src/list.ts",
-        "types": "./dist/commonjs/list.d.ts",
-        "default": "./dist/commonjs/list.js"
-      }
-    },
-    "./update": {
-      "import": {
-        "source": "./src/update.ts",
-        "types": "./dist/esm/update.d.ts",
-        "default": "./dist/esm/update.js"
-      },
-      "require": {
-        "source": "./src/update.ts",
-        "types": "./dist/commonjs/update.d.ts",
-        "default": "./dist/commonjs/update.js"
-      }
-    },
-    "./u": {
-      "import": {
-        "source": "./src/update.ts",
-        "types": "./dist/esm/update.d.ts",
-        "default": "./dist/esm/update.js"
-      },
-      "require": {
-        "source": "./src/update.ts",
-        "types": "./dist/commonjs/update.d.ts",
-        "default": "./dist/commonjs/update.js"
-      }
-    },
-    "./extract": {
-      "import": {
-        "source": "./src/extract.ts",
-        "types": "./dist/esm/extract.d.ts",
-        "default": "./dist/esm/extract.js"
-      },
-      "require": {
-        "source": "./src/extract.ts",
-        "types": "./dist/commonjs/extract.d.ts",
-        "default": "./dist/commonjs/extract.js"
-      }
-    },
-    "./x": {
-      "import": {
-        "source": "./src/extract.ts",
-        "types": "./dist/esm/extract.d.ts",
-        "default": "./dist/esm/extract.js"
-      },
-      "require": {
-        "source": "./src/extract.ts",
-        "types": "./dist/commonjs/extract.d.ts",
-        "default": "./dist/commonjs/extract.js"
-      }
-    },
-    "./pack": {
-      "import": {
-        "source": "./src/pack.ts",
-        "types": "./dist/esm/pack.d.ts",
-        "default": "./dist/esm/pack.js"
-      },
-      "require": {
-        "source": "./src/pack.ts",
-        "types": "./dist/commonjs/pack.d.ts",
-        "default": "./dist/commonjs/pack.js"
-      }
-    },
-    "./unpack": {
-      "import": {
-        "source": "./src/unpack.ts",
-        "types": "./dist/esm/unpack.d.ts",
-        "default": "./dist/esm/unpack.js"
-      },
-      "require": {
-        "source": "./src/unpack.ts",
-        "types": "./dist/commonjs/unpack.d.ts",
-        "default": "./dist/commonjs/unpack.js"
-      }
-    },
-    "./parse": {
-      "import": {
-        "source": "./src/parse.ts",
-        "types": "./dist/esm/parse.d.ts",
-        "default": "./dist/esm/parse.js"
-      },
-      "require": {
-        "source": "./src/parse.ts",
-        "types": "./dist/commonjs/parse.d.ts",
-        "default": "./dist/commonjs/parse.js"
-      }
-    },
-    "./read-entry": {
-      "import": {
-        "source": "./src/read-entry.ts",
-        "types": "./dist/esm/read-entry.d.ts",
-        "default": "./dist/esm/read-entry.js"
-      },
-      "require": {
-        "source": "./src/read-entry.ts",
-        "types": "./dist/commonjs/read-entry.d.ts",
-        "default": "./dist/commonjs/read-entry.js"
-      }
-    },
-    "./write-entry": {
-      "import": {
-        "source": "./src/write-entry.ts",
-        "types": "./dist/esm/write-entry.d.ts",
-        "default": "./dist/esm/write-entry.js"
-      },
-      "require": {
-        "source": "./src/write-entry.ts",
-        "types": "./dist/commonjs/write-entry.d.ts",
-        "default": "./dist/commonjs/write-entry.js"
-      }
-    },
-    "./header": {
-      "import": {
-        "source": "./src/header.ts",
-        "types": "./dist/esm/header.d.ts",
-        "default": "./dist/esm/header.js"
-      },
-      "require": {
-        "source": "./src/header.ts",
-        "types": "./dist/commonjs/header.d.ts",
-        "default": "./dist/commonjs/header.js"
-      }
-    },
-    "./pax": {
-      "import": {
-        "source": "./src/pax.ts",
-        "types": "./dist/esm/pax.d.ts",
-        "default": "./dist/esm/pax.js"
-      },
-      "require": {
-        "source": "./src/pax.ts",
-        "types": "./dist/commonjs/pax.d.ts",
-        "default": "./dist/commonjs/pax.js"
-      }
-    },
-    "./types": {
-      "import": {
-        "source": "./src/types.ts",
-        "types": "./dist/esm/types.d.ts",
-        "default": "./dist/esm/types.js"
-      },
-      "require": {
-        "source": "./src/types.ts",
-        "types": "./dist/commonjs/types.d.ts",
-        "default": "./dist/commonjs/types.js"
-      }
-    }
-  },
-  "type": "module",
-  "main": "./dist/commonjs/index.js",
-  "types": "./dist/commonjs/index.d.ts"
-}
diff --git a/node_modules/node-gyp/node_modules/yallist/dist/esm/package.json b/node_modules/node-gyp/node_modules/yallist/dist/esm/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/node-gyp/node_modules/yallist/dist/esm/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/pacote/node_modules/chownr/LICENSE.md b/node_modules/pacote/node_modules/chownr/LICENSE.md
deleted file mode 100644
index 881248b6d7f0c..0000000000000
--- a/node_modules/pacote/node_modules/chownr/LICENSE.md
+++ /dev/null
@@ -1,63 +0,0 @@
-All packages under `src/` are licensed according to the terms in
-their respective `LICENSE` or `LICENSE.md` files.
-
-The remainder of this project is licensed under the Blue Oak
-Model License, as follows:
-
------
-
-# Blue Oak Model License
-
-Version 1.0.0
-
-## Purpose
-
-This license gives everyone as much permission to work with
-this software as possible, while protecting contributors
-from liability.
-
-## Acceptance
-
-In order to receive this license, you must agree to its
-rules.  The rules of this license are both obligations
-under that agreement and conditions to your license.
-You must not do anything with this software that triggers
-a rule that you cannot or will not follow.
-
-## Copyright
-
-Each contributor licenses you to do everything with this
-software that would otherwise infringe that contributor's
-copyright in it.
-
-## Notices
-
-You must ensure that everyone who gets a copy of
-any part of this software from you, with or without
-changes, also gets the text of this license or a link to
-.
-
-## Excuse
-
-If anyone notifies you in writing that you have not
-complied with [Notices](#notices), you can keep your
-license by taking all practical steps to comply within 30
-days after the notice.  If you do not do so, your license
-ends immediately.
-
-## Patent
-
-Each contributor licenses you to do everything with this
-software that would otherwise infringe any patent claims
-they can license or become able to license.
-
-## Reliability
-
-No contributor can revoke this license.
-
-## No Liability
-
-***As far as the law allows, this software comes as is,
-without any warranty or condition, and no contributor
-will be liable to anyone for any damages related to this
-software or this license, under any kind of legal claim.***
diff --git a/node_modules/pacote/node_modules/chownr/dist/commonjs/index.js b/node_modules/pacote/node_modules/chownr/dist/commonjs/index.js
deleted file mode 100644
index 6a7b68d5eac26..0000000000000
--- a/node_modules/pacote/node_modules/chownr/dist/commonjs/index.js
+++ /dev/null
@@ -1,93 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.chownrSync = exports.chownr = void 0;
-const node_fs_1 = __importDefault(require("node:fs"));
-const node_path_1 = __importDefault(require("node:path"));
-const lchownSync = (path, uid, gid) => {
-    try {
-        return node_fs_1.default.lchownSync(path, uid, gid);
-    }
-    catch (er) {
-        if (er?.code !== 'ENOENT')
-            throw er;
-    }
-};
-const chown = (cpath, uid, gid, cb) => {
-    node_fs_1.default.lchown(cpath, uid, gid, er => {
-        // Skip ENOENT error
-        cb(er && er?.code !== 'ENOENT' ? er : null);
-    });
-};
-const chownrKid = (p, child, uid, gid, cb) => {
-    if (child.isDirectory()) {
-        (0, exports.chownr)(node_path_1.default.resolve(p, child.name), uid, gid, (er) => {
-            if (er)
-                return cb(er);
-            const cpath = node_path_1.default.resolve(p, child.name);
-            chown(cpath, uid, gid, cb);
-        });
-    }
-    else {
-        const cpath = node_path_1.default.resolve(p, child.name);
-        chown(cpath, uid, gid, cb);
-    }
-};
-const chownr = (p, uid, gid, cb) => {
-    node_fs_1.default.readdir(p, { withFileTypes: true }, (er, children) => {
-        // any error other than ENOTDIR or ENOTSUP means it's not readable,
-        // or doesn't exist.  give up.
-        if (er) {
-            if (er.code === 'ENOENT')
-                return cb();
-            else if (er.code !== 'ENOTDIR' && er.code !== 'ENOTSUP')
-                return cb(er);
-        }
-        if (er || !children.length)
-            return chown(p, uid, gid, cb);
-        let len = children.length;
-        let errState = null;
-        const then = (er) => {
-            /* c8 ignore start */
-            if (errState)
-                return;
-            /* c8 ignore stop */
-            if (er)
-                return cb((errState = er));
-            if (--len === 0)
-                return chown(p, uid, gid, cb);
-        };
-        for (const child of children) {
-            chownrKid(p, child, uid, gid, then);
-        }
-    });
-};
-exports.chownr = chownr;
-const chownrKidSync = (p, child, uid, gid) => {
-    if (child.isDirectory())
-        (0, exports.chownrSync)(node_path_1.default.resolve(p, child.name), uid, gid);
-    lchownSync(node_path_1.default.resolve(p, child.name), uid, gid);
-};
-const chownrSync = (p, uid, gid) => {
-    let children;
-    try {
-        children = node_fs_1.default.readdirSync(p, { withFileTypes: true });
-    }
-    catch (er) {
-        const e = er;
-        if (e?.code === 'ENOENT')
-            return;
-        else if (e?.code === 'ENOTDIR' || e?.code === 'ENOTSUP')
-            return lchownSync(p, uid, gid);
-        else
-            throw e;
-    }
-    for (const child of children) {
-        chownrKidSync(p, child, uid, gid);
-    }
-    return lchownSync(p, uid, gid);
-};
-exports.chownrSync = chownrSync;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/chownr/dist/commonjs/package.json b/node_modules/pacote/node_modules/chownr/dist/commonjs/package.json
deleted file mode 100644
index 5bbefffbabee3..0000000000000
--- a/node_modules/pacote/node_modules/chownr/dist/commonjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "commonjs"
-}
diff --git a/node_modules/pacote/node_modules/chownr/dist/esm/index.js b/node_modules/pacote/node_modules/chownr/dist/esm/index.js
deleted file mode 100644
index 5c2815297a67c..0000000000000
--- a/node_modules/pacote/node_modules/chownr/dist/esm/index.js
+++ /dev/null
@@ -1,85 +0,0 @@
-import fs from 'node:fs';
-import path from 'node:path';
-const lchownSync = (path, uid, gid) => {
-    try {
-        return fs.lchownSync(path, uid, gid);
-    }
-    catch (er) {
-        if (er?.code !== 'ENOENT')
-            throw er;
-    }
-};
-const chown = (cpath, uid, gid, cb) => {
-    fs.lchown(cpath, uid, gid, er => {
-        // Skip ENOENT error
-        cb(er && er?.code !== 'ENOENT' ? er : null);
-    });
-};
-const chownrKid = (p, child, uid, gid, cb) => {
-    if (child.isDirectory()) {
-        chownr(path.resolve(p, child.name), uid, gid, (er) => {
-            if (er)
-                return cb(er);
-            const cpath = path.resolve(p, child.name);
-            chown(cpath, uid, gid, cb);
-        });
-    }
-    else {
-        const cpath = path.resolve(p, child.name);
-        chown(cpath, uid, gid, cb);
-    }
-};
-export const chownr = (p, uid, gid, cb) => {
-    fs.readdir(p, { withFileTypes: true }, (er, children) => {
-        // any error other than ENOTDIR or ENOTSUP means it's not readable,
-        // or doesn't exist.  give up.
-        if (er) {
-            if (er.code === 'ENOENT')
-                return cb();
-            else if (er.code !== 'ENOTDIR' && er.code !== 'ENOTSUP')
-                return cb(er);
-        }
-        if (er || !children.length)
-            return chown(p, uid, gid, cb);
-        let len = children.length;
-        let errState = null;
-        const then = (er) => {
-            /* c8 ignore start */
-            if (errState)
-                return;
-            /* c8 ignore stop */
-            if (er)
-                return cb((errState = er));
-            if (--len === 0)
-                return chown(p, uid, gid, cb);
-        };
-        for (const child of children) {
-            chownrKid(p, child, uid, gid, then);
-        }
-    });
-};
-const chownrKidSync = (p, child, uid, gid) => {
-    if (child.isDirectory())
-        chownrSync(path.resolve(p, child.name), uid, gid);
-    lchownSync(path.resolve(p, child.name), uid, gid);
-};
-export const chownrSync = (p, uid, gid) => {
-    let children;
-    try {
-        children = fs.readdirSync(p, { withFileTypes: true });
-    }
-    catch (er) {
-        const e = er;
-        if (e?.code === 'ENOENT')
-            return;
-        else if (e?.code === 'ENOTDIR' || e?.code === 'ENOTSUP')
-            return lchownSync(p, uid, gid);
-        else
-            throw e;
-    }
-    for (const child of children) {
-        chownrKidSync(p, child, uid, gid);
-    }
-    return lchownSync(p, uid, gid);
-};
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/chownr/dist/esm/package.json b/node_modules/pacote/node_modules/chownr/dist/esm/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/pacote/node_modules/chownr/dist/esm/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/pacote/node_modules/chownr/package.json b/node_modules/pacote/node_modules/chownr/package.json
deleted file mode 100644
index 09aa6b2e2e576..0000000000000
--- a/node_modules/pacote/node_modules/chownr/package.json
+++ /dev/null
@@ -1,69 +0,0 @@
-{
-  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
-  "name": "chownr",
-  "description": "like `chown -R`",
-  "version": "3.0.0",
-  "repository": {
-    "type": "git",
-    "url": "git://github.com/isaacs/chownr.git"
-  },
-  "files": [
-    "dist"
-  ],
-  "devDependencies": {
-    "@types/node": "^20.12.5",
-    "mkdirp": "^3.0.1",
-    "prettier": "^3.2.5",
-    "rimraf": "^5.0.5",
-    "tap": "^18.7.2",
-    "tshy": "^1.13.1",
-    "typedoc": "^0.25.12"
-  },
-  "scripts": {
-    "prepare": "tshy",
-    "pretest": "npm run prepare",
-    "test": "tap",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "format": "prettier --write . --loglevel warn",
-    "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
-  },
-  "license": "BlueOak-1.0.0",
-  "engines": {
-    "node": ">=18"
-  },
-  "tshy": {
-    "exports": {
-      "./package.json": "./package.json",
-      ".": "./src/index.ts"
-    }
-  },
-  "exports": {
-    "./package.json": "./package.json",
-    ".": {
-      "import": {
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.js"
-      },
-      "require": {
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.js"
-      }
-    }
-  },
-  "main": "./dist/commonjs/index.js",
-  "types": "./dist/commonjs/index.d.ts",
-  "type": "module",
-  "prettier": {
-    "semi": false,
-    "printWidth": 75,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  }
-}
diff --git a/node_modules/pacote/node_modules/tar/LICENSE b/node_modules/pacote/node_modules/tar/LICENSE
deleted file mode 100644
index 19129e315fe59..0000000000000
--- a/node_modules/pacote/node_modules/tar/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/create.js b/node_modules/pacote/node_modules/tar/dist/commonjs/create.js
deleted file mode 100644
index 3190afc48318f..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/commonjs/create.js
+++ /dev/null
@@ -1,83 +0,0 @@
-"use strict";
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.create = void 0;
-const fs_minipass_1 = require("@isaacs/fs-minipass");
-const node_path_1 = __importDefault(require("node:path"));
-const list_js_1 = require("./list.js");
-const make_command_js_1 = require("./make-command.js");
-const pack_js_1 = require("./pack.js");
-const createFileSync = (opt, files) => {
-    const p = new pack_js_1.PackSync(opt);
-    const stream = new fs_minipass_1.WriteStreamSync(opt.file, {
-        mode: opt.mode || 0o666,
-    });
-    p.pipe(stream);
-    addFilesSync(p, files);
-};
-const createFile = (opt, files) => {
-    const p = new pack_js_1.Pack(opt);
-    const stream = new fs_minipass_1.WriteStream(opt.file, {
-        mode: opt.mode || 0o666,
-    });
-    p.pipe(stream);
-    const promise = new Promise((res, rej) => {
-        stream.on('error', rej);
-        stream.on('close', res);
-        p.on('error', rej);
-    });
-    addFilesAsync(p, files);
-    return promise;
-};
-const addFilesSync = (p, files) => {
-    files.forEach(file => {
-        if (file.charAt(0) === '@') {
-            (0, list_js_1.list)({
-                file: node_path_1.default.resolve(p.cwd, file.slice(1)),
-                sync: true,
-                noResume: true,
-                onReadEntry: entry => p.add(entry),
-            });
-        }
-        else {
-            p.add(file);
-        }
-    });
-    p.end();
-};
-const addFilesAsync = async (p, files) => {
-    for (let i = 0; i < files.length; i++) {
-        const file = String(files[i]);
-        if (file.charAt(0) === '@') {
-            await (0, list_js_1.list)({
-                file: node_path_1.default.resolve(String(p.cwd), file.slice(1)),
-                noResume: true,
-                onReadEntry: entry => {
-                    p.add(entry);
-                },
-            });
-        }
-        else {
-            p.add(file);
-        }
-    }
-    p.end();
-};
-const createSync = (opt, files) => {
-    const p = new pack_js_1.PackSync(opt);
-    addFilesSync(p, files);
-    return p;
-};
-const createAsync = (opt, files) => {
-    const p = new pack_js_1.Pack(opt);
-    addFilesAsync(p, files);
-    return p;
-};
-exports.create = (0, make_command_js_1.makeCommand)(createFileSync, createFile, createSync, createAsync, (_opt, files) => {
-    if (!files?.length) {
-        throw new TypeError('no paths specified to add to archive');
-    }
-});
-//# sourceMappingURL=create.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/cwd-error.js b/node_modules/pacote/node_modules/tar/dist/commonjs/cwd-error.js
deleted file mode 100644
index d703a7772be3a..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/commonjs/cwd-error.js
+++ /dev/null
@@ -1,18 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.CwdError = void 0;
-class CwdError extends Error {
-    path;
-    code;
-    syscall = 'chdir';
-    constructor(path, code) {
-        super(`${code}: Cannot cd into '${path}'`);
-        this.path = path;
-        this.code = code;
-    }
-    get name() {
-        return 'CwdError';
-    }
-}
-exports.CwdError = CwdError;
-//# sourceMappingURL=cwd-error.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/extract.js b/node_modules/pacote/node_modules/tar/dist/commonjs/extract.js
deleted file mode 100644
index f848cbcbf779e..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/commonjs/extract.js
+++ /dev/null
@@ -1,78 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.extract = void 0;
-// tar -x
-const fsm = __importStar(require("@isaacs/fs-minipass"));
-const node_fs_1 = __importDefault(require("node:fs"));
-const list_js_1 = require("./list.js");
-const make_command_js_1 = require("./make-command.js");
-const unpack_js_1 = require("./unpack.js");
-const extractFileSync = (opt) => {
-    const u = new unpack_js_1.UnpackSync(opt);
-    const file = opt.file;
-    const stat = node_fs_1.default.statSync(file);
-    // This trades a zero-byte read() syscall for a stat
-    // However, it will usually result in less memory allocation
-    const readSize = opt.maxReadSize || 16 * 1024 * 1024;
-    const stream = new fsm.ReadStreamSync(file, {
-        readSize: readSize,
-        size: stat.size,
-    });
-    stream.pipe(u);
-};
-const extractFile = (opt, _) => {
-    const u = new unpack_js_1.Unpack(opt);
-    const readSize = opt.maxReadSize || 16 * 1024 * 1024;
-    const file = opt.file;
-    const p = new Promise((resolve, reject) => {
-        u.on('error', reject);
-        u.on('close', resolve);
-        // This trades a zero-byte read() syscall for a stat
-        // However, it will usually result in less memory allocation
-        node_fs_1.default.stat(file, (er, stat) => {
-            if (er) {
-                reject(er);
-            }
-            else {
-                const stream = new fsm.ReadStream(file, {
-                    readSize: readSize,
-                    size: stat.size,
-                });
-                stream.on('error', reject);
-                stream.pipe(u);
-            }
-        });
-    });
-    return p;
-};
-exports.extract = (0, make_command_js_1.makeCommand)(extractFileSync, extractFile, opt => new unpack_js_1.UnpackSync(opt), opt => new unpack_js_1.Unpack(opt), (opt, files) => {
-    if (files?.length)
-        (0, list_js_1.filesFilter)(opt, files);
-});
-//# sourceMappingURL=extract.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/get-write-flag.js b/node_modules/pacote/node_modules/tar/dist/commonjs/get-write-flag.js
deleted file mode 100644
index 94add8f6b2231..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/commonjs/get-write-flag.js
+++ /dev/null
@@ -1,29 +0,0 @@
-"use strict";
-// Get the appropriate flag to use for creating files
-// We use fmap on Windows platforms for files less than
-// 512kb.  This is a fairly low limit, but avoids making
-// things slower in some cases.  Since most of what this
-// library is used for is extracting tarballs of many
-// relatively small files in npm packages and the like,
-// it can be a big boost on Windows platforms.
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.getWriteFlag = void 0;
-const fs_1 = __importDefault(require("fs"));
-const platform = process.env.__FAKE_PLATFORM__ || process.platform;
-const isWindows = platform === 'win32';
-/* c8 ignore start */
-const { O_CREAT, O_TRUNC, O_WRONLY } = fs_1.default.constants;
-const UV_FS_O_FILEMAP = Number(process.env.__FAKE_FS_O_FILENAME__) ||
-    fs_1.default.constants.UV_FS_O_FILEMAP ||
-    0;
-/* c8 ignore stop */
-const fMapEnabled = isWindows && !!UV_FS_O_FILEMAP;
-const fMapLimit = 512 * 1024;
-const fMapFlag = UV_FS_O_FILEMAP | O_TRUNC | O_CREAT | O_WRONLY;
-exports.getWriteFlag = !fMapEnabled ?
-    () => 'w'
-    : (size) => (size < fMapLimit ? fMapFlag : 'w');
-//# sourceMappingURL=get-write-flag.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/header.js b/node_modules/pacote/node_modules/tar/dist/commonjs/header.js
deleted file mode 100644
index b3a48037b849a..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/commonjs/header.js
+++ /dev/null
@@ -1,306 +0,0 @@
-"use strict";
-// parse a 512-byte header block to a data object, or vice-versa
-// encode returns `true` if a pax extended header is needed, because
-// the data could not be faithfully encoded in a simple header.
-// (Also, check header.needPax to see if it needs a pax header.)
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Header = void 0;
-const node_path_1 = require("node:path");
-const large = __importStar(require("./large-numbers.js"));
-const types = __importStar(require("./types.js"));
-class Header {
-    cksumValid = false;
-    needPax = false;
-    nullBlock = false;
-    block;
-    path;
-    mode;
-    uid;
-    gid;
-    size;
-    cksum;
-    #type = 'Unsupported';
-    linkpath;
-    uname;
-    gname;
-    devmaj = 0;
-    devmin = 0;
-    atime;
-    ctime;
-    mtime;
-    charset;
-    comment;
-    constructor(data, off = 0, ex, gex) {
-        if (Buffer.isBuffer(data)) {
-            this.decode(data, off || 0, ex, gex);
-        }
-        else if (data) {
-            this.#slurp(data);
-        }
-    }
-    decode(buf, off, ex, gex) {
-        if (!off) {
-            off = 0;
-        }
-        if (!buf || !(buf.length >= off + 512)) {
-            throw new Error('need 512 bytes for header');
-        }
-        this.path = decString(buf, off, 100);
-        this.mode = decNumber(buf, off + 100, 8);
-        this.uid = decNumber(buf, off + 108, 8);
-        this.gid = decNumber(buf, off + 116, 8);
-        this.size = decNumber(buf, off + 124, 12);
-        this.mtime = decDate(buf, off + 136, 12);
-        this.cksum = decNumber(buf, off + 148, 12);
-        // if we have extended or global extended headers, apply them now
-        // See https://github.com/npm/node-tar/pull/187
-        // Apply global before local, so it overrides
-        if (gex)
-            this.#slurp(gex, true);
-        if (ex)
-            this.#slurp(ex);
-        // old tar versions marked dirs as a file with a trailing /
-        const t = decString(buf, off + 156, 1);
-        if (types.isCode(t)) {
-            this.#type = t || '0';
-        }
-        if (this.#type === '0' && this.path.slice(-1) === '/') {
-            this.#type = '5';
-        }
-        // tar implementations sometimes incorrectly put the stat(dir).size
-        // as the size in the tarball, even though Directory entries are
-        // not able to have any body at all.  In the very rare chance that
-        // it actually DOES have a body, we weren't going to do anything with
-        // it anyway, and it'll just be a warning about an invalid header.
-        if (this.#type === '5') {
-            this.size = 0;
-        }
-        this.linkpath = decString(buf, off + 157, 100);
-        if (buf.subarray(off + 257, off + 265).toString() ===
-            'ustar\u000000') {
-            this.uname = decString(buf, off + 265, 32);
-            this.gname = decString(buf, off + 297, 32);
-            /* c8 ignore start */
-            this.devmaj = decNumber(buf, off + 329, 8) ?? 0;
-            this.devmin = decNumber(buf, off + 337, 8) ?? 0;
-            /* c8 ignore stop */
-            if (buf[off + 475] !== 0) {
-                // definitely a prefix, definitely >130 chars.
-                const prefix = decString(buf, off + 345, 155);
-                this.path = prefix + '/' + this.path;
-            }
-            else {
-                const prefix = decString(buf, off + 345, 130);
-                if (prefix) {
-                    this.path = prefix + '/' + this.path;
-                }
-                this.atime = decDate(buf, off + 476, 12);
-                this.ctime = decDate(buf, off + 488, 12);
-            }
-        }
-        let sum = 8 * 0x20;
-        for (let i = off; i < off + 148; i++) {
-            sum += buf[i];
-        }
-        for (let i = off + 156; i < off + 512; i++) {
-            sum += buf[i];
-        }
-        this.cksumValid = sum === this.cksum;
-        if (this.cksum === undefined && sum === 8 * 0x20) {
-            this.nullBlock = true;
-        }
-    }
-    #slurp(ex, gex = false) {
-        Object.assign(this, Object.fromEntries(Object.entries(ex).filter(([k, v]) => {
-            // we slurp in everything except for the path attribute in
-            // a global extended header, because that's weird. Also, any
-            // null/undefined values are ignored.
-            return !(v === null ||
-                v === undefined ||
-                (k === 'path' && gex) ||
-                (k === 'linkpath' && gex) ||
-                k === 'global');
-        })));
-    }
-    encode(buf, off = 0) {
-        if (!buf) {
-            buf = this.block = Buffer.alloc(512);
-        }
-        if (this.#type === 'Unsupported') {
-            this.#type = '0';
-        }
-        if (!(buf.length >= off + 512)) {
-            throw new Error('need 512 bytes for header');
-        }
-        const prefixSize = this.ctime || this.atime ? 130 : 155;
-        const split = splitPrefix(this.path || '', prefixSize);
-        const path = split[0];
-        const prefix = split[1];
-        this.needPax = !!split[2];
-        this.needPax = encString(buf, off, 100, path) || this.needPax;
-        this.needPax =
-            encNumber(buf, off + 100, 8, this.mode) || this.needPax;
-        this.needPax =
-            encNumber(buf, off + 108, 8, this.uid) || this.needPax;
-        this.needPax =
-            encNumber(buf, off + 116, 8, this.gid) || this.needPax;
-        this.needPax =
-            encNumber(buf, off + 124, 12, this.size) || this.needPax;
-        this.needPax =
-            encDate(buf, off + 136, 12, this.mtime) || this.needPax;
-        buf[off + 156] = this.#type.charCodeAt(0);
-        this.needPax =
-            encString(buf, off + 157, 100, this.linkpath) || this.needPax;
-        buf.write('ustar\u000000', off + 257, 8);
-        this.needPax =
-            encString(buf, off + 265, 32, this.uname) || this.needPax;
-        this.needPax =
-            encString(buf, off + 297, 32, this.gname) || this.needPax;
-        this.needPax =
-            encNumber(buf, off + 329, 8, this.devmaj) || this.needPax;
-        this.needPax =
-            encNumber(buf, off + 337, 8, this.devmin) || this.needPax;
-        this.needPax =
-            encString(buf, off + 345, prefixSize, prefix) || this.needPax;
-        if (buf[off + 475] !== 0) {
-            this.needPax =
-                encString(buf, off + 345, 155, prefix) || this.needPax;
-        }
-        else {
-            this.needPax =
-                encString(buf, off + 345, 130, prefix) || this.needPax;
-            this.needPax =
-                encDate(buf, off + 476, 12, this.atime) || this.needPax;
-            this.needPax =
-                encDate(buf, off + 488, 12, this.ctime) || this.needPax;
-        }
-        let sum = 8 * 0x20;
-        for (let i = off; i < off + 148; i++) {
-            sum += buf[i];
-        }
-        for (let i = off + 156; i < off + 512; i++) {
-            sum += buf[i];
-        }
-        this.cksum = sum;
-        encNumber(buf, off + 148, 8, this.cksum);
-        this.cksumValid = true;
-        return this.needPax;
-    }
-    get type() {
-        return (this.#type === 'Unsupported' ?
-            this.#type
-            : types.name.get(this.#type));
-    }
-    get typeKey() {
-        return this.#type;
-    }
-    set type(type) {
-        const c = String(types.code.get(type));
-        if (types.isCode(c) || c === 'Unsupported') {
-            this.#type = c;
-        }
-        else if (types.isCode(type)) {
-            this.#type = type;
-        }
-        else {
-            throw new TypeError('invalid entry type: ' + type);
-        }
-    }
-}
-exports.Header = Header;
-const splitPrefix = (p, prefixSize) => {
-    const pathSize = 100;
-    let pp = p;
-    let prefix = '';
-    let ret = undefined;
-    const root = node_path_1.posix.parse(p).root || '.';
-    if (Buffer.byteLength(pp) < pathSize) {
-        ret = [pp, prefix, false];
-    }
-    else {
-        // first set prefix to the dir, and path to the base
-        prefix = node_path_1.posix.dirname(pp);
-        pp = node_path_1.posix.basename(pp);
-        do {
-            if (Buffer.byteLength(pp) <= pathSize &&
-                Buffer.byteLength(prefix) <= prefixSize) {
-                // both fit!
-                ret = [pp, prefix, false];
-            }
-            else if (Buffer.byteLength(pp) > pathSize &&
-                Buffer.byteLength(prefix) <= prefixSize) {
-                // prefix fits in prefix, but path doesn't fit in path
-                ret = [pp.slice(0, pathSize - 1), prefix, true];
-            }
-            else {
-                // make path take a bit from prefix
-                pp = node_path_1.posix.join(node_path_1.posix.basename(prefix), pp);
-                prefix = node_path_1.posix.dirname(prefix);
-            }
-        } while (prefix !== root && ret === undefined);
-        // at this point, found no resolution, just truncate
-        if (!ret) {
-            ret = [p.slice(0, pathSize - 1), '', true];
-        }
-    }
-    return ret;
-};
-const decString = (buf, off, size) => buf
-    .subarray(off, off + size)
-    .toString('utf8')
-    .replace(/\0.*/, '');
-const decDate = (buf, off, size) => numToDate(decNumber(buf, off, size));
-const numToDate = (num) => num === undefined ? undefined : new Date(num * 1000);
-const decNumber = (buf, off, size) => Number(buf[off]) & 0x80 ?
-    large.parse(buf.subarray(off, off + size))
-    : decSmallNumber(buf, off, size);
-const nanUndef = (value) => (isNaN(value) ? undefined : value);
-const decSmallNumber = (buf, off, size) => nanUndef(parseInt(buf
-    .subarray(off, off + size)
-    .toString('utf8')
-    .replace(/\0.*$/, '')
-    .trim(), 8));
-// the maximum encodable as a null-terminated octal, by field size
-const MAXNUM = {
-    12: 0o77777777777,
-    8: 0o7777777,
-};
-const encNumber = (buf, off, size, num) => num === undefined ? false
-    : num > MAXNUM[size] || num < 0 ?
-        (large.encode(num, buf.subarray(off, off + size)), true)
-        : (encSmallNumber(buf, off, size, num), false);
-const encSmallNumber = (buf, off, size, num) => buf.write(octalString(num, size), off, size, 'ascii');
-const octalString = (num, size) => padOctal(Math.floor(num).toString(8), size);
-const padOctal = (str, size) => (str.length === size - 1 ?
-    str
-    : new Array(size - str.length - 1).join('0') + str + ' ') + '\0';
-const encDate = (buf, off, size, date) => date === undefined ? false : (encNumber(buf, off, size, date.getTime() / 1000));
-// enough to fill the longest string we've got
-const NULLS = new Array(156).join('\0');
-// pad with nulls, return true if it's longer or non-ascii
-const encString = (buf, off, size, str) => str === undefined ? false : ((buf.write(str + NULLS, off, size, 'utf8'),
-    str.length !== Buffer.byteLength(str) || str.length > size));
-//# sourceMappingURL=header.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/index.js b/node_modules/pacote/node_modules/tar/dist/commonjs/index.js
deleted file mode 100644
index e93ed5ad54aa6..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/commonjs/index.js
+++ /dev/null
@@ -1,54 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __exportStar = (this && this.__exportStar) || function(m, exports) {
-    for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
-};
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.u = exports.types = exports.r = exports.t = exports.x = exports.c = void 0;
-__exportStar(require("./create.js"), exports);
-var create_js_1 = require("./create.js");
-Object.defineProperty(exports, "c", { enumerable: true, get: function () { return create_js_1.create; } });
-__exportStar(require("./extract.js"), exports);
-var extract_js_1 = require("./extract.js");
-Object.defineProperty(exports, "x", { enumerable: true, get: function () { return extract_js_1.extract; } });
-__exportStar(require("./header.js"), exports);
-__exportStar(require("./list.js"), exports);
-var list_js_1 = require("./list.js");
-Object.defineProperty(exports, "t", { enumerable: true, get: function () { return list_js_1.list; } });
-// classes
-__exportStar(require("./pack.js"), exports);
-__exportStar(require("./parse.js"), exports);
-__exportStar(require("./pax.js"), exports);
-__exportStar(require("./read-entry.js"), exports);
-__exportStar(require("./replace.js"), exports);
-var replace_js_1 = require("./replace.js");
-Object.defineProperty(exports, "r", { enumerable: true, get: function () { return replace_js_1.replace; } });
-exports.types = __importStar(require("./types.js"));
-__exportStar(require("./unpack.js"), exports);
-__exportStar(require("./update.js"), exports);
-var update_js_1 = require("./update.js");
-Object.defineProperty(exports, "u", { enumerable: true, get: function () { return update_js_1.update; } });
-__exportStar(require("./write-entry.js"), exports);
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/large-numbers.js b/node_modules/pacote/node_modules/tar/dist/commonjs/large-numbers.js
deleted file mode 100644
index 5b07aa7f71b48..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/commonjs/large-numbers.js
+++ /dev/null
@@ -1,99 +0,0 @@
-"use strict";
-// Tar can encode large and negative numbers using a leading byte of
-// 0xff for negative, and 0x80 for positive.
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.parse = exports.encode = void 0;
-const encode = (num, buf) => {
-    if (!Number.isSafeInteger(num)) {
-        // The number is so large that javascript cannot represent it with integer
-        // precision.
-        throw Error('cannot encode number outside of javascript safe integer range');
-    }
-    else if (num < 0) {
-        encodeNegative(num, buf);
-    }
-    else {
-        encodePositive(num, buf);
-    }
-    return buf;
-};
-exports.encode = encode;
-const encodePositive = (num, buf) => {
-    buf[0] = 0x80;
-    for (var i = buf.length; i > 1; i--) {
-        buf[i - 1] = num & 0xff;
-        num = Math.floor(num / 0x100);
-    }
-};
-const encodeNegative = (num, buf) => {
-    buf[0] = 0xff;
-    var flipped = false;
-    num = num * -1;
-    for (var i = buf.length; i > 1; i--) {
-        var byte = num & 0xff;
-        num = Math.floor(num / 0x100);
-        if (flipped) {
-            buf[i - 1] = onesComp(byte);
-        }
-        else if (byte === 0) {
-            buf[i - 1] = 0;
-        }
-        else {
-            flipped = true;
-            buf[i - 1] = twosComp(byte);
-        }
-    }
-};
-const parse = (buf) => {
-    const pre = buf[0];
-    const value = pre === 0x80 ? pos(buf.subarray(1, buf.length))
-        : pre === 0xff ? twos(buf)
-            : null;
-    if (value === null) {
-        throw Error('invalid base256 encoding');
-    }
-    if (!Number.isSafeInteger(value)) {
-        // The number is so large that javascript cannot represent it with integer
-        // precision.
-        throw Error('parsed number outside of javascript safe integer range');
-    }
-    return value;
-};
-exports.parse = parse;
-const twos = (buf) => {
-    var len = buf.length;
-    var sum = 0;
-    var flipped = false;
-    for (var i = len - 1; i > -1; i--) {
-        var byte = Number(buf[i]);
-        var f;
-        if (flipped) {
-            f = onesComp(byte);
-        }
-        else if (byte === 0) {
-            f = byte;
-        }
-        else {
-            flipped = true;
-            f = twosComp(byte);
-        }
-        if (f !== 0) {
-            sum -= f * Math.pow(256, len - i - 1);
-        }
-    }
-    return sum;
-};
-const pos = (buf) => {
-    var len = buf.length;
-    var sum = 0;
-    for (var i = len - 1; i > -1; i--) {
-        var byte = Number(buf[i]);
-        if (byte !== 0) {
-            sum += byte * Math.pow(256, len - i - 1);
-        }
-    }
-    return sum;
-};
-const onesComp = (byte) => (0xff ^ byte) & 0xff;
-const twosComp = (byte) => ((0xff ^ byte) + 1) & 0xff;
-//# sourceMappingURL=large-numbers.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/make-command.js b/node_modules/pacote/node_modules/tar/dist/commonjs/make-command.js
deleted file mode 100644
index 1814319e78bc6..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/commonjs/make-command.js
+++ /dev/null
@@ -1,61 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.makeCommand = void 0;
-const options_js_1 = require("./options.js");
-const makeCommand = (syncFile, asyncFile, syncNoFile, asyncNoFile, validate) => {
-    return Object.assign((opt_ = [], entries, cb) => {
-        if (Array.isArray(opt_)) {
-            entries = opt_;
-            opt_ = {};
-        }
-        if (typeof entries === 'function') {
-            cb = entries;
-            entries = undefined;
-        }
-        if (!entries) {
-            entries = [];
-        }
-        else {
-            entries = Array.from(entries);
-        }
-        const opt = (0, options_js_1.dealias)(opt_);
-        validate?.(opt, entries);
-        if ((0, options_js_1.isSyncFile)(opt)) {
-            if (typeof cb === 'function') {
-                throw new TypeError('callback not supported for sync tar functions');
-            }
-            return syncFile(opt, entries);
-        }
-        else if ((0, options_js_1.isAsyncFile)(opt)) {
-            const p = asyncFile(opt, entries);
-            // weirdness to make TS happy
-            const c = cb ? cb : undefined;
-            return c ? p.then(() => c(), c) : p;
-        }
-        else if ((0, options_js_1.isSyncNoFile)(opt)) {
-            if (typeof cb === 'function') {
-                throw new TypeError('callback not supported for sync tar functions');
-            }
-            return syncNoFile(opt, entries);
-        }
-        else if ((0, options_js_1.isAsyncNoFile)(opt)) {
-            if (typeof cb === 'function') {
-                throw new TypeError('callback only supported with file option');
-            }
-            return asyncNoFile(opt, entries);
-            /* c8 ignore start */
-        }
-        else {
-            throw new Error('impossible options??');
-        }
-        /* c8 ignore stop */
-    }, {
-        syncFile,
-        asyncFile,
-        syncNoFile,
-        asyncNoFile,
-        validate,
-    });
-};
-exports.makeCommand = makeCommand;
-//# sourceMappingURL=make-command.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/mode-fix.js b/node_modules/pacote/node_modules/tar/dist/commonjs/mode-fix.js
deleted file mode 100644
index 49dd727961d29..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/commonjs/mode-fix.js
+++ /dev/null
@@ -1,29 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.modeFix = void 0;
-const modeFix = (mode, isDir, portable) => {
-    mode &= 0o7777;
-    // in portable mode, use the minimum reasonable umask
-    // if this system creates files with 0o664 by default
-    // (as some linux distros do), then we'll write the
-    // archive with 0o644 instead.  Also, don't ever create
-    // a file that is not readable/writable by the owner.
-    if (portable) {
-        mode = (mode | 0o600) & ~0o22;
-    }
-    // if dirs are readable, then they should be listable
-    if (isDir) {
-        if (mode & 0o400) {
-            mode |= 0o100;
-        }
-        if (mode & 0o40) {
-            mode |= 0o10;
-        }
-        if (mode & 0o4) {
-            mode |= 0o1;
-        }
-    }
-    return mode;
-};
-exports.modeFix = modeFix;
-//# sourceMappingURL=mode-fix.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/normalize-windows-path.js b/node_modules/pacote/node_modules/tar/dist/commonjs/normalize-windows-path.js
deleted file mode 100644
index b0c7aaa9f2d17..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/commonjs/normalize-windows-path.js
+++ /dev/null
@@ -1,12 +0,0 @@
-"use strict";
-// on windows, either \ or / are valid directory separators.
-// on unix, \ is a valid character in filenames.
-// so, on windows, and only on windows, we replace all \ chars with /,
-// so that we can use / as our one and only directory separator char.
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.normalizeWindowsPath = void 0;
-const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform;
-exports.normalizeWindowsPath = platform !== 'win32' ?
-    (p) => p
-    : (p) => p && p.replace(/\\/g, '/');
-//# sourceMappingURL=normalize-windows-path.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/options.js b/node_modules/pacote/node_modules/tar/dist/commonjs/options.js
deleted file mode 100644
index 4cd06505bc72b..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/commonjs/options.js
+++ /dev/null
@@ -1,66 +0,0 @@
-"use strict";
-// turn tar(1) style args like `C` into the more verbose things like `cwd`
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.dealias = exports.isNoFile = exports.isFile = exports.isAsync = exports.isSync = exports.isAsyncNoFile = exports.isSyncNoFile = exports.isAsyncFile = exports.isSyncFile = void 0;
-const argmap = new Map([
-    ['C', 'cwd'],
-    ['f', 'file'],
-    ['z', 'gzip'],
-    ['P', 'preservePaths'],
-    ['U', 'unlink'],
-    ['strip-components', 'strip'],
-    ['stripComponents', 'strip'],
-    ['keep-newer', 'newer'],
-    ['keepNewer', 'newer'],
-    ['keep-newer-files', 'newer'],
-    ['keepNewerFiles', 'newer'],
-    ['k', 'keep'],
-    ['keep-existing', 'keep'],
-    ['keepExisting', 'keep'],
-    ['m', 'noMtime'],
-    ['no-mtime', 'noMtime'],
-    ['p', 'preserveOwner'],
-    ['L', 'follow'],
-    ['h', 'follow'],
-    ['onentry', 'onReadEntry'],
-]);
-const isSyncFile = (o) => !!o.sync && !!o.file;
-exports.isSyncFile = isSyncFile;
-const isAsyncFile = (o) => !o.sync && !!o.file;
-exports.isAsyncFile = isAsyncFile;
-const isSyncNoFile = (o) => !!o.sync && !o.file;
-exports.isSyncNoFile = isSyncNoFile;
-const isAsyncNoFile = (o) => !o.sync && !o.file;
-exports.isAsyncNoFile = isAsyncNoFile;
-const isSync = (o) => !!o.sync;
-exports.isSync = isSync;
-const isAsync = (o) => !o.sync;
-exports.isAsync = isAsync;
-const isFile = (o) => !!o.file;
-exports.isFile = isFile;
-const isNoFile = (o) => !o.file;
-exports.isNoFile = isNoFile;
-const dealiasKey = (k) => {
-    const d = argmap.get(k);
-    if (d)
-        return d;
-    return k;
-};
-const dealias = (opt = {}) => {
-    if (!opt)
-        return {};
-    const result = {};
-    for (const [key, v] of Object.entries(opt)) {
-        // TS doesn't know that aliases are going to always be the same type
-        const k = dealiasKey(key);
-        result[k] = v;
-    }
-    // affordance for deprecated noChmod -> chmod
-    if (result.chmod === undefined && result.noChmod === false) {
-        result.chmod = true;
-    }
-    delete result.noChmod;
-    return result;
-};
-exports.dealias = dealias;
-//# sourceMappingURL=options.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/pack.js b/node_modules/pacote/node_modules/tar/dist/commonjs/pack.js
deleted file mode 100644
index 303e93063c2db..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/commonjs/pack.js
+++ /dev/null
@@ -1,477 +0,0 @@
-"use strict";
-// A readable tar stream creator
-// Technically, this is a transform stream that you write paths into,
-// and tar format comes out of.
-// The `add()` method is like `write()` but returns this,
-// and end() return `this` as well, so you can
-// do `new Pack(opt).add('files').add('dir').end().pipe(output)
-// You could also do something like:
-// streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar'))
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.PackSync = exports.Pack = exports.PackJob = void 0;
-const fs_1 = __importDefault(require("fs"));
-const write_entry_js_1 = require("./write-entry.js");
-class PackJob {
-    path;
-    absolute;
-    entry;
-    stat;
-    readdir;
-    pending = false;
-    ignore = false;
-    piped = false;
-    constructor(path, absolute) {
-        this.path = path || './';
-        this.absolute = absolute;
-    }
-}
-exports.PackJob = PackJob;
-const minipass_1 = require("minipass");
-const zlib = __importStar(require("minizlib"));
-const yallist_1 = require("yallist");
-const read_entry_js_1 = require("./read-entry.js");
-const warn_method_js_1 = require("./warn-method.js");
-const EOF = Buffer.alloc(1024);
-const ONSTAT = Symbol('onStat');
-const ENDED = Symbol('ended');
-const QUEUE = Symbol('queue');
-const CURRENT = Symbol('current');
-const PROCESS = Symbol('process');
-const PROCESSING = Symbol('processing');
-const PROCESSJOB = Symbol('processJob');
-const JOBS = Symbol('jobs');
-const JOBDONE = Symbol('jobDone');
-const ADDFSENTRY = Symbol('addFSEntry');
-const ADDTARENTRY = Symbol('addTarEntry');
-const STAT = Symbol('stat');
-const READDIR = Symbol('readdir');
-const ONREADDIR = Symbol('onreaddir');
-const PIPE = Symbol('pipe');
-const ENTRY = Symbol('entry');
-const ENTRYOPT = Symbol('entryOpt');
-const WRITEENTRYCLASS = Symbol('writeEntryClass');
-const WRITE = Symbol('write');
-const ONDRAIN = Symbol('ondrain');
-const path_1 = __importDefault(require("path"));
-const normalize_windows_path_js_1 = require("./normalize-windows-path.js");
-class Pack extends minipass_1.Minipass {
-    opt;
-    cwd;
-    maxReadSize;
-    preservePaths;
-    strict;
-    noPax;
-    prefix;
-    linkCache;
-    statCache;
-    file;
-    portable;
-    zip;
-    readdirCache;
-    noDirRecurse;
-    follow;
-    noMtime;
-    mtime;
-    filter;
-    jobs;
-    [WRITEENTRYCLASS];
-    onWriteEntry;
-    [QUEUE];
-    [JOBS] = 0;
-    [PROCESSING] = false;
-    [ENDED] = false;
-    constructor(opt = {}) {
-        //@ts-ignore
-        super();
-        this.opt = opt;
-        this.file = opt.file || '';
-        this.cwd = opt.cwd || process.cwd();
-        this.maxReadSize = opt.maxReadSize;
-        this.preservePaths = !!opt.preservePaths;
-        this.strict = !!opt.strict;
-        this.noPax = !!opt.noPax;
-        this.prefix = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.prefix || '');
-        this.linkCache = opt.linkCache || new Map();
-        this.statCache = opt.statCache || new Map();
-        this.readdirCache = opt.readdirCache || new Map();
-        this.onWriteEntry = opt.onWriteEntry;
-        this[WRITEENTRYCLASS] = write_entry_js_1.WriteEntry;
-        if (typeof opt.onwarn === 'function') {
-            this.on('warn', opt.onwarn);
-        }
-        this.portable = !!opt.portable;
-        if (opt.gzip || opt.brotli) {
-            if (opt.gzip && opt.brotli) {
-                throw new TypeError('gzip and brotli are mutually exclusive');
-            }
-            if (opt.gzip) {
-                if (typeof opt.gzip !== 'object') {
-                    opt.gzip = {};
-                }
-                if (this.portable) {
-                    opt.gzip.portable = true;
-                }
-                this.zip = new zlib.Gzip(opt.gzip);
-            }
-            if (opt.brotli) {
-                if (typeof opt.brotli !== 'object') {
-                    opt.brotli = {};
-                }
-                this.zip = new zlib.BrotliCompress(opt.brotli);
-            }
-            /* c8 ignore next */
-            if (!this.zip)
-                throw new Error('impossible');
-            const zip = this.zip;
-            zip.on('data', chunk => super.write(chunk));
-            zip.on('end', () => super.end());
-            zip.on('drain', () => this[ONDRAIN]());
-            this.on('resume', () => zip.resume());
-        }
-        else {
-            this.on('drain', this[ONDRAIN]);
-        }
-        this.noDirRecurse = !!opt.noDirRecurse;
-        this.follow = !!opt.follow;
-        this.noMtime = !!opt.noMtime;
-        if (opt.mtime)
-            this.mtime = opt.mtime;
-        this.filter =
-            typeof opt.filter === 'function' ? opt.filter : () => true;
-        this[QUEUE] = new yallist_1.Yallist();
-        this[JOBS] = 0;
-        this.jobs = Number(opt.jobs) || 4;
-        this[PROCESSING] = false;
-        this[ENDED] = false;
-    }
-    [WRITE](chunk) {
-        return super.write(chunk);
-    }
-    add(path) {
-        this.write(path);
-        return this;
-    }
-    end(path, encoding, cb) {
-        /* c8 ignore start */
-        if (typeof path === 'function') {
-            cb = path;
-            path = undefined;
-        }
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        /* c8 ignore stop */
-        if (path) {
-            this.add(path);
-        }
-        this[ENDED] = true;
-        this[PROCESS]();
-        /* c8 ignore next */
-        if (cb)
-            cb();
-        return this;
-    }
-    write(path) {
-        if (this[ENDED]) {
-            throw new Error('write after end');
-        }
-        if (path instanceof read_entry_js_1.ReadEntry) {
-            this[ADDTARENTRY](path);
-        }
-        else {
-            this[ADDFSENTRY](path);
-        }
-        return this.flowing;
-    }
-    [ADDTARENTRY](p) {
-        const absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(path_1.default.resolve(this.cwd, p.path));
-        // in this case, we don't have to wait for the stat
-        if (!this.filter(p.path, p)) {
-            p.resume();
-        }
-        else {
-            const job = new PackJob(p.path, absolute);
-            job.entry = new write_entry_js_1.WriteEntryTar(p, this[ENTRYOPT](job));
-            job.entry.on('end', () => this[JOBDONE](job));
-            this[JOBS] += 1;
-            this[QUEUE].push(job);
-        }
-        this[PROCESS]();
-    }
-    [ADDFSENTRY](p) {
-        const absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(path_1.default.resolve(this.cwd, p));
-        this[QUEUE].push(new PackJob(p, absolute));
-        this[PROCESS]();
-    }
-    [STAT](job) {
-        job.pending = true;
-        this[JOBS] += 1;
-        const stat = this.follow ? 'stat' : 'lstat';
-        fs_1.default[stat](job.absolute, (er, stat) => {
-            job.pending = false;
-            this[JOBS] -= 1;
-            if (er) {
-                this.emit('error', er);
-            }
-            else {
-                this[ONSTAT](job, stat);
-            }
-        });
-    }
-    [ONSTAT](job, stat) {
-        this.statCache.set(job.absolute, stat);
-        job.stat = stat;
-        // now we have the stat, we can filter it.
-        if (!this.filter(job.path, stat)) {
-            job.ignore = true;
-        }
-        this[PROCESS]();
-    }
-    [READDIR](job) {
-        job.pending = true;
-        this[JOBS] += 1;
-        fs_1.default.readdir(job.absolute, (er, entries) => {
-            job.pending = false;
-            this[JOBS] -= 1;
-            if (er) {
-                return this.emit('error', er);
-            }
-            this[ONREADDIR](job, entries);
-        });
-    }
-    [ONREADDIR](job, entries) {
-        this.readdirCache.set(job.absolute, entries);
-        job.readdir = entries;
-        this[PROCESS]();
-    }
-    [PROCESS]() {
-        if (this[PROCESSING]) {
-            return;
-        }
-        this[PROCESSING] = true;
-        for (let w = this[QUEUE].head; !!w && this[JOBS] < this.jobs; w = w.next) {
-            this[PROCESSJOB](w.value);
-            if (w.value.ignore) {
-                const p = w.next;
-                this[QUEUE].removeNode(w);
-                w.next = p;
-            }
-        }
-        this[PROCESSING] = false;
-        if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) {
-            if (this.zip) {
-                this.zip.end(EOF);
-            }
-            else {
-                super.write(EOF);
-                super.end();
-            }
-        }
-    }
-    get [CURRENT]() {
-        return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value;
-    }
-    [JOBDONE](_job) {
-        this[QUEUE].shift();
-        this[JOBS] -= 1;
-        this[PROCESS]();
-    }
-    [PROCESSJOB](job) {
-        if (job.pending) {
-            return;
-        }
-        if (job.entry) {
-            if (job === this[CURRENT] && !job.piped) {
-                this[PIPE](job);
-            }
-            return;
-        }
-        if (!job.stat) {
-            const sc = this.statCache.get(job.absolute);
-            if (sc) {
-                this[ONSTAT](job, sc);
-            }
-            else {
-                this[STAT](job);
-            }
-        }
-        if (!job.stat) {
-            return;
-        }
-        // filtered out!
-        if (job.ignore) {
-            return;
-        }
-        if (!this.noDirRecurse &&
-            job.stat.isDirectory() &&
-            !job.readdir) {
-            const rc = this.readdirCache.get(job.absolute);
-            if (rc) {
-                this[ONREADDIR](job, rc);
-            }
-            else {
-                this[READDIR](job);
-            }
-            if (!job.readdir) {
-                return;
-            }
-        }
-        // we know it doesn't have an entry, because that got checked above
-        job.entry = this[ENTRY](job);
-        if (!job.entry) {
-            job.ignore = true;
-            return;
-        }
-        if (job === this[CURRENT] && !job.piped) {
-            this[PIPE](job);
-        }
-    }
-    [ENTRYOPT](job) {
-        return {
-            onwarn: (code, msg, data) => this.warn(code, msg, data),
-            noPax: this.noPax,
-            cwd: this.cwd,
-            absolute: job.absolute,
-            preservePaths: this.preservePaths,
-            maxReadSize: this.maxReadSize,
-            strict: this.strict,
-            portable: this.portable,
-            linkCache: this.linkCache,
-            statCache: this.statCache,
-            noMtime: this.noMtime,
-            mtime: this.mtime,
-            prefix: this.prefix,
-            onWriteEntry: this.onWriteEntry,
-        };
-    }
-    [ENTRY](job) {
-        this[JOBS] += 1;
-        try {
-            const e = new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job));
-            return e
-                .on('end', () => this[JOBDONE](job))
-                .on('error', er => this.emit('error', er));
-        }
-        catch (er) {
-            this.emit('error', er);
-        }
-    }
-    [ONDRAIN]() {
-        if (this[CURRENT] && this[CURRENT].entry) {
-            this[CURRENT].entry.resume();
-        }
-    }
-    // like .pipe() but using super, because our write() is special
-    [PIPE](job) {
-        job.piped = true;
-        if (job.readdir) {
-            job.readdir.forEach(entry => {
-                const p = job.path;
-                const base = p === './' ? '' : p.replace(/\/*$/, '/');
-                this[ADDFSENTRY](base + entry);
-            });
-        }
-        const source = job.entry;
-        const zip = this.zip;
-        /* c8 ignore start */
-        if (!source)
-            throw new Error('cannot pipe without source');
-        /* c8 ignore stop */
-        if (zip) {
-            source.on('data', chunk => {
-                if (!zip.write(chunk)) {
-                    source.pause();
-                }
-            });
-        }
-        else {
-            source.on('data', chunk => {
-                if (!super.write(chunk)) {
-                    source.pause();
-                }
-            });
-        }
-    }
-    pause() {
-        if (this.zip) {
-            this.zip.pause();
-        }
-        return super.pause();
-    }
-    warn(code, message, data = {}) {
-        (0, warn_method_js_1.warnMethod)(this, code, message, data);
-    }
-}
-exports.Pack = Pack;
-class PackSync extends Pack {
-    sync = true;
-    constructor(opt) {
-        super(opt);
-        this[WRITEENTRYCLASS] = write_entry_js_1.WriteEntrySync;
-    }
-    // pause/resume are no-ops in sync streams.
-    pause() { }
-    resume() { }
-    [STAT](job) {
-        const stat = this.follow ? 'statSync' : 'lstatSync';
-        this[ONSTAT](job, fs_1.default[stat](job.absolute));
-    }
-    [READDIR](job) {
-        this[ONREADDIR](job, fs_1.default.readdirSync(job.absolute));
-    }
-    // gotta get it all in this tick
-    [PIPE](job) {
-        const source = job.entry;
-        const zip = this.zip;
-        if (job.readdir) {
-            job.readdir.forEach(entry => {
-                const p = job.path;
-                const base = p === './' ? '' : p.replace(/\/*$/, '/');
-                this[ADDFSENTRY](base + entry);
-            });
-        }
-        /* c8 ignore start */
-        if (!source)
-            throw new Error('Cannot pipe without source');
-        /* c8 ignore stop */
-        if (zip) {
-            source.on('data', chunk => {
-                zip.write(chunk);
-            });
-        }
-        else {
-            source.on('data', chunk => {
-                super[WRITE](chunk);
-            });
-        }
-    }
-}
-exports.PackSync = PackSync;
-//# sourceMappingURL=pack.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/package.json b/node_modules/pacote/node_modules/tar/dist/commonjs/package.json
deleted file mode 100644
index 5bbefffbabee3..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/commonjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "commonjs"
-}
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/path-reservations.js b/node_modules/pacote/node_modules/tar/dist/commonjs/path-reservations.js
deleted file mode 100644
index 9ff391c44092c..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/commonjs/path-reservations.js
+++ /dev/null
@@ -1,170 +0,0 @@
-"use strict";
-// A path exclusive reservation system
-// reserve([list, of, paths], fn)
-// When the fn is first in line for all its paths, it
-// is called with a cb that clears the reservation.
-//
-// Used by async unpack to avoid clobbering paths in use,
-// while still allowing maximal safe parallelization.
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.PathReservations = void 0;
-const node_path_1 = require("node:path");
-const normalize_unicode_js_1 = require("./normalize-unicode.js");
-const strip_trailing_slashes_js_1 = require("./strip-trailing-slashes.js");
-const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform;
-const isWindows = platform === 'win32';
-// return a set of parent dirs for a given path
-// '/a/b/c/d' -> ['/', '/a', '/a/b', '/a/b/c', '/a/b/c/d']
-const getDirs = (path) => {
-    const dirs = path
-        .split('/')
-        .slice(0, -1)
-        .reduce((set, path) => {
-        const s = set[set.length - 1];
-        if (s !== undefined) {
-            path = (0, node_path_1.join)(s, path);
-        }
-        set.push(path || '/');
-        return set;
-    }, []);
-    return dirs;
-};
-class PathReservations {
-    // path => [function or Set]
-    // A Set object means a directory reservation
-    // A fn is a direct reservation on that path
-    #queues = new Map();
-    // fn => {paths:[path,...], dirs:[path, ...]}
-    #reservations = new Map();
-    // functions currently running
-    #running = new Set();
-    reserve(paths, fn) {
-        paths =
-            isWindows ?
-                ['win32 parallelization disabled']
-                : paths.map(p => {
-                    // don't need normPath, because we skip this entirely for windows
-                    return (0, strip_trailing_slashes_js_1.stripTrailingSlashes)((0, node_path_1.join)((0, normalize_unicode_js_1.normalizeUnicode)(p))).toLowerCase();
-                });
-        const dirs = new Set(paths.map(path => getDirs(path)).reduce((a, b) => a.concat(b)));
-        this.#reservations.set(fn, { dirs, paths });
-        for (const p of paths) {
-            const q = this.#queues.get(p);
-            if (!q) {
-                this.#queues.set(p, [fn]);
-            }
-            else {
-                q.push(fn);
-            }
-        }
-        for (const dir of dirs) {
-            const q = this.#queues.get(dir);
-            if (!q) {
-                this.#queues.set(dir, [new Set([fn])]);
-            }
-            else {
-                const l = q[q.length - 1];
-                if (l instanceof Set) {
-                    l.add(fn);
-                }
-                else {
-                    q.push(new Set([fn]));
-                }
-            }
-        }
-        return this.#run(fn);
-    }
-    // return the queues for each path the function cares about
-    // fn => {paths, dirs}
-    #getQueues(fn) {
-        const res = this.#reservations.get(fn);
-        /* c8 ignore start */
-        if (!res) {
-            throw new Error('function does not have any path reservations');
-        }
-        /* c8 ignore stop */
-        return {
-            paths: res.paths.map((path) => this.#queues.get(path)),
-            dirs: [...res.dirs].map(path => this.#queues.get(path)),
-        };
-    }
-    // check if fn is first in line for all its paths, and is
-    // included in the first set for all its dir queues
-    check(fn) {
-        const { paths, dirs } = this.#getQueues(fn);
-        return (paths.every(q => q && q[0] === fn) &&
-            dirs.every(q => q && q[0] instanceof Set && q[0].has(fn)));
-    }
-    // run the function if it's first in line and not already running
-    #run(fn) {
-        if (this.#running.has(fn) || !this.check(fn)) {
-            return false;
-        }
-        this.#running.add(fn);
-        fn(() => this.#clear(fn));
-        return true;
-    }
-    #clear(fn) {
-        if (!this.#running.has(fn)) {
-            return false;
-        }
-        const res = this.#reservations.get(fn);
-        /* c8 ignore start */
-        if (!res) {
-            throw new Error('invalid reservation');
-        }
-        /* c8 ignore stop */
-        const { paths, dirs } = res;
-        const next = new Set();
-        for (const path of paths) {
-            const q = this.#queues.get(path);
-            /* c8 ignore start */
-            if (!q || q?.[0] !== fn) {
-                continue;
-            }
-            /* c8 ignore stop */
-            const q0 = q[1];
-            if (!q0) {
-                this.#queues.delete(path);
-                continue;
-            }
-            q.shift();
-            if (typeof q0 === 'function') {
-                next.add(q0);
-            }
-            else {
-                for (const f of q0) {
-                    next.add(f);
-                }
-            }
-        }
-        for (const dir of dirs) {
-            const q = this.#queues.get(dir);
-            const q0 = q?.[0];
-            /* c8 ignore next - type safety only */
-            if (!q || !(q0 instanceof Set))
-                continue;
-            if (q0.size === 1 && q.length === 1) {
-                this.#queues.delete(dir);
-                continue;
-            }
-            else if (q0.size === 1) {
-                q.shift();
-                // next one must be a function,
-                // or else the Set would've been reused
-                const n = q[0];
-                if (typeof n === 'function') {
-                    next.add(n);
-                }
-            }
-            else {
-                q0.delete(fn);
-            }
-        }
-        this.#running.delete(fn);
-        next.forEach(fn => this.#run(fn));
-        return true;
-    }
-}
-exports.PathReservations = PathReservations;
-//# sourceMappingURL=path-reservations.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/pax.js b/node_modules/pacote/node_modules/tar/dist/commonjs/pax.js
deleted file mode 100644
index d30c0f3efbe9e..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/commonjs/pax.js
+++ /dev/null
@@ -1,158 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Pax = void 0;
-const node_path_1 = require("node:path");
-const header_js_1 = require("./header.js");
-class Pax {
-    atime;
-    mtime;
-    ctime;
-    charset;
-    comment;
-    gid;
-    uid;
-    gname;
-    uname;
-    linkpath;
-    dev;
-    ino;
-    nlink;
-    path;
-    size;
-    mode;
-    global;
-    constructor(obj, global = false) {
-        this.atime = obj.atime;
-        this.charset = obj.charset;
-        this.comment = obj.comment;
-        this.ctime = obj.ctime;
-        this.dev = obj.dev;
-        this.gid = obj.gid;
-        this.global = global;
-        this.gname = obj.gname;
-        this.ino = obj.ino;
-        this.linkpath = obj.linkpath;
-        this.mtime = obj.mtime;
-        this.nlink = obj.nlink;
-        this.path = obj.path;
-        this.size = obj.size;
-        this.uid = obj.uid;
-        this.uname = obj.uname;
-    }
-    encode() {
-        const body = this.encodeBody();
-        if (body === '') {
-            return Buffer.allocUnsafe(0);
-        }
-        const bodyLen = Buffer.byteLength(body);
-        // round up to 512 bytes
-        // add 512 for header
-        const bufLen = 512 * Math.ceil(1 + bodyLen / 512);
-        const buf = Buffer.allocUnsafe(bufLen);
-        // 0-fill the header section, it might not hit every field
-        for (let i = 0; i < 512; i++) {
-            buf[i] = 0;
-        }
-        new header_js_1.Header({
-            // XXX split the path
-            // then the path should be PaxHeader + basename, but less than 99,
-            // prepend with the dirname
-            /* c8 ignore start */
-            path: ('PaxHeader/' + (0, node_path_1.basename)(this.path ?? '')).slice(0, 99),
-            /* c8 ignore stop */
-            mode: this.mode || 0o644,
-            uid: this.uid,
-            gid: this.gid,
-            size: bodyLen,
-            mtime: this.mtime,
-            type: this.global ? 'GlobalExtendedHeader' : 'ExtendedHeader',
-            linkpath: '',
-            uname: this.uname || '',
-            gname: this.gname || '',
-            devmaj: 0,
-            devmin: 0,
-            atime: this.atime,
-            ctime: this.ctime,
-        }).encode(buf);
-        buf.write(body, 512, bodyLen, 'utf8');
-        // null pad after the body
-        for (let i = bodyLen + 512; i < buf.length; i++) {
-            buf[i] = 0;
-        }
-        return buf;
-    }
-    encodeBody() {
-        return (this.encodeField('path') +
-            this.encodeField('ctime') +
-            this.encodeField('atime') +
-            this.encodeField('dev') +
-            this.encodeField('ino') +
-            this.encodeField('nlink') +
-            this.encodeField('charset') +
-            this.encodeField('comment') +
-            this.encodeField('gid') +
-            this.encodeField('gname') +
-            this.encodeField('linkpath') +
-            this.encodeField('mtime') +
-            this.encodeField('size') +
-            this.encodeField('uid') +
-            this.encodeField('uname'));
-    }
-    encodeField(field) {
-        if (this[field] === undefined) {
-            return '';
-        }
-        const r = this[field];
-        const v = r instanceof Date ? r.getTime() / 1000 : r;
-        const s = ' ' +
-            (field === 'dev' || field === 'ino' || field === 'nlink' ?
-                'SCHILY.'
-                : '') +
-            field +
-            '=' +
-            v +
-            '\n';
-        const byteLen = Buffer.byteLength(s);
-        // the digits includes the length of the digits in ascii base-10
-        // so if it's 9 characters, then adding 1 for the 9 makes it 10
-        // which makes it 11 chars.
-        let digits = Math.floor(Math.log(byteLen) / Math.log(10)) + 1;
-        if (byteLen + digits >= Math.pow(10, digits)) {
-            digits += 1;
-        }
-        const len = digits + byteLen;
-        return len + s;
-    }
-    static parse(str, ex, g = false) {
-        return new Pax(merge(parseKV(str), ex), g);
-    }
-}
-exports.Pax = Pax;
-const merge = (a, b) => b ? Object.assign({}, b, a) : a;
-const parseKV = (str) => str
-    .replace(/\n$/, '')
-    .split('\n')
-    .reduce(parseKVLine, Object.create(null));
-const parseKVLine = (set, line) => {
-    const n = parseInt(line, 10);
-    // XXX Values with \n in them will fail this.
-    // Refactor to not be a naive line-by-line parse.
-    if (n !== Buffer.byteLength(line) + 1) {
-        return set;
-    }
-    line = line.slice((n + ' ').length);
-    const kv = line.split('=');
-    const r = kv.shift();
-    if (!r) {
-        return set;
-    }
-    const k = r.replace(/^SCHILY\.(dev|ino|nlink)/, '$1');
-    const v = kv.join('=');
-    set[k] =
-        /^([A-Z]+\.)?([mac]|birth|creation)time$/.test(k) ?
-            new Date(Number(v) * 1000)
-            : /^[0-9]+$/.test(v) ? +v
-                : v;
-    return set;
-};
-//# sourceMappingURL=pax.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/read-entry.js b/node_modules/pacote/node_modules/tar/dist/commonjs/read-entry.js
deleted file mode 100644
index 15e2d55c938a4..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/commonjs/read-entry.js
+++ /dev/null
@@ -1,140 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.ReadEntry = void 0;
-const minipass_1 = require("minipass");
-const normalize_windows_path_js_1 = require("./normalize-windows-path.js");
-class ReadEntry extends minipass_1.Minipass {
-    extended;
-    globalExtended;
-    header;
-    startBlockSize;
-    blockRemain;
-    remain;
-    type;
-    meta = false;
-    ignore = false;
-    path;
-    mode;
-    uid;
-    gid;
-    uname;
-    gname;
-    size = 0;
-    mtime;
-    atime;
-    ctime;
-    linkpath;
-    dev;
-    ino;
-    nlink;
-    invalid = false;
-    absolute;
-    unsupported = false;
-    constructor(header, ex, gex) {
-        super({});
-        // read entries always start life paused.  this is to avoid the
-        // situation where Minipass's auto-ending empty streams results
-        // in an entry ending before we're ready for it.
-        this.pause();
-        this.extended = ex;
-        this.globalExtended = gex;
-        this.header = header;
-        /* c8 ignore start */
-        this.remain = header.size ?? 0;
-        /* c8 ignore stop */
-        this.startBlockSize = 512 * Math.ceil(this.remain / 512);
-        this.blockRemain = this.startBlockSize;
-        this.type = header.type;
-        switch (this.type) {
-            case 'File':
-            case 'OldFile':
-            case 'Link':
-            case 'SymbolicLink':
-            case 'CharacterDevice':
-            case 'BlockDevice':
-            case 'Directory':
-            case 'FIFO':
-            case 'ContiguousFile':
-            case 'GNUDumpDir':
-                break;
-            case 'NextFileHasLongLinkpath':
-            case 'NextFileHasLongPath':
-            case 'OldGnuLongPath':
-            case 'GlobalExtendedHeader':
-            case 'ExtendedHeader':
-            case 'OldExtendedHeader':
-                this.meta = true;
-                break;
-            // NOTE: gnutar and bsdtar treat unrecognized types as 'File'
-            // it may be worth doing the same, but with a warning.
-            default:
-                this.ignore = true;
-        }
-        /* c8 ignore start */
-        if (!header.path) {
-            throw new Error('no path provided for tar.ReadEntry');
-        }
-        /* c8 ignore stop */
-        this.path = (0, normalize_windows_path_js_1.normalizeWindowsPath)(header.path);
-        this.mode = header.mode;
-        if (this.mode) {
-            this.mode = this.mode & 0o7777;
-        }
-        this.uid = header.uid;
-        this.gid = header.gid;
-        this.uname = header.uname;
-        this.gname = header.gname;
-        this.size = this.remain;
-        this.mtime = header.mtime;
-        this.atime = header.atime;
-        this.ctime = header.ctime;
-        /* c8 ignore start */
-        this.linkpath =
-            header.linkpath ?
-                (0, normalize_windows_path_js_1.normalizeWindowsPath)(header.linkpath)
-                : undefined;
-        /* c8 ignore stop */
-        this.uname = header.uname;
-        this.gname = header.gname;
-        if (ex) {
-            this.#slurp(ex);
-        }
-        if (gex) {
-            this.#slurp(gex, true);
-        }
-    }
-    write(data) {
-        const writeLen = data.length;
-        if (writeLen > this.blockRemain) {
-            throw new Error('writing more to entry than is appropriate');
-        }
-        const r = this.remain;
-        const br = this.blockRemain;
-        this.remain = Math.max(0, r - writeLen);
-        this.blockRemain = Math.max(0, br - writeLen);
-        if (this.ignore) {
-            return true;
-        }
-        if (r >= writeLen) {
-            return super.write(data);
-        }
-        // r < writeLen
-        return super.write(data.subarray(0, r));
-    }
-    #slurp(ex, gex = false) {
-        if (ex.path)
-            ex.path = (0, normalize_windows_path_js_1.normalizeWindowsPath)(ex.path);
-        if (ex.linkpath)
-            ex.linkpath = (0, normalize_windows_path_js_1.normalizeWindowsPath)(ex.linkpath);
-        Object.assign(this, Object.fromEntries(Object.entries(ex).filter(([k, v]) => {
-            // we slurp in everything except for the path attribute in
-            // a global extended header, because that's weird. Also, any
-            // null/undefined values are ignored.
-            return !(v === null ||
-                v === undefined ||
-                (k === 'path' && gex));
-        })));
-    }
-}
-exports.ReadEntry = ReadEntry;
-//# sourceMappingURL=read-entry.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/strip-absolute-path.js b/node_modules/pacote/node_modules/tar/dist/commonjs/strip-absolute-path.js
deleted file mode 100644
index bb7639c35a110..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/commonjs/strip-absolute-path.js
+++ /dev/null
@@ -1,29 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.stripAbsolutePath = void 0;
-// unix absolute paths are also absolute on win32, so we use this for both
-const node_path_1 = require("node:path");
-const { isAbsolute, parse } = node_path_1.win32;
-// returns [root, stripped]
-// Note that windows will think that //x/y/z/a has a "root" of //x/y, and in
-// those cases, we want to sanitize it to x/y/z/a, not z/a, so we strip /
-// explicitly if it's the first character.
-// drive-specific relative paths on Windows get their root stripped off even
-// though they are not absolute, so `c:../foo` becomes ['c:', '../foo']
-const stripAbsolutePath = (path) => {
-    let r = '';
-    let parsed = parse(path);
-    while (isAbsolute(path) || parsed.root) {
-        // windows will think that //x/y/z has a "root" of //x/y/
-        // but strip the //?/C:/ off of //?/C:/path
-        const root = path.charAt(0) === '/' && path.slice(0, 4) !== '//?/' ?
-            '/'
-            : parsed.root;
-        path = path.slice(root.length);
-        r += root;
-        parsed = parse(path);
-    }
-    return [r, path];
-};
-exports.stripAbsolutePath = stripAbsolutePath;
-//# sourceMappingURL=strip-absolute-path.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/strip-trailing-slashes.js b/node_modules/pacote/node_modules/tar/dist/commonjs/strip-trailing-slashes.js
deleted file mode 100644
index 6fa74ad6a4ac9..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/commonjs/strip-trailing-slashes.js
+++ /dev/null
@@ -1,18 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.stripTrailingSlashes = void 0;
-// warning: extremely hot code path.
-// This has been meticulously optimized for use
-// within npm install on large package trees.
-// Do not edit without careful benchmarking.
-const stripTrailingSlashes = (str) => {
-    let i = str.length - 1;
-    let slashesStart = -1;
-    while (i > -1 && str.charAt(i) === '/') {
-        slashesStart = i;
-        i--;
-    }
-    return slashesStart === -1 ? str : str.slice(0, slashesStart);
-};
-exports.stripTrailingSlashes = stripTrailingSlashes;
-//# sourceMappingURL=strip-trailing-slashes.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/symlink-error.js b/node_modules/pacote/node_modules/tar/dist/commonjs/symlink-error.js
deleted file mode 100644
index cc19ac1a2e3c6..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/commonjs/symlink-error.js
+++ /dev/null
@@ -1,19 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.SymlinkError = void 0;
-class SymlinkError extends Error {
-    path;
-    symlink;
-    syscall = 'symlink';
-    code = 'TAR_SYMLINK_ERROR';
-    constructor(symlink, path) {
-        super('TAR_SYMLINK_ERROR: Cannot extract through symbolic link');
-        this.symlink = symlink;
-        this.path = path;
-    }
-    get name() {
-        return 'SymlinkError';
-    }
-}
-exports.SymlinkError = SymlinkError;
-//# sourceMappingURL=symlink-error.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/types.js b/node_modules/pacote/node_modules/tar/dist/commonjs/types.js
deleted file mode 100644
index cb9b684e843b7..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/commonjs/types.js
+++ /dev/null
@@ -1,50 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.code = exports.name = exports.isName = exports.isCode = void 0;
-const isCode = (c) => exports.name.has(c);
-exports.isCode = isCode;
-const isName = (c) => exports.code.has(c);
-exports.isName = isName;
-// map types from key to human-friendly name
-exports.name = new Map([
-    ['0', 'File'],
-    // same as File
-    ['', 'OldFile'],
-    ['1', 'Link'],
-    ['2', 'SymbolicLink'],
-    // Devices and FIFOs aren't fully supported
-    // they are parsed, but skipped when unpacking
-    ['3', 'CharacterDevice'],
-    ['4', 'BlockDevice'],
-    ['5', 'Directory'],
-    ['6', 'FIFO'],
-    // same as File
-    ['7', 'ContiguousFile'],
-    // pax headers
-    ['g', 'GlobalExtendedHeader'],
-    ['x', 'ExtendedHeader'],
-    // vendor-specific stuff
-    // skip
-    ['A', 'SolarisACL'],
-    // like 5, but with data, which should be skipped
-    ['D', 'GNUDumpDir'],
-    // metadata only, skip
-    ['I', 'Inode'],
-    // data = link path of next file
-    ['K', 'NextFileHasLongLinkpath'],
-    // data = path of next file
-    ['L', 'NextFileHasLongPath'],
-    // skip
-    ['M', 'ContinuationFile'],
-    // like L
-    ['N', 'OldGnuLongPath'],
-    // skip
-    ['S', 'SparseFile'],
-    // skip
-    ['V', 'TapeVolumeHeader'],
-    // like x
-    ['X', 'OldExtendedHeader'],
-]);
-// map the other direction
-exports.code = new Map(Array.from(exports.name).map(kv => [kv[1], kv[0]]));
-//# sourceMappingURL=types.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/update.js b/node_modules/pacote/node_modules/tar/dist/commonjs/update.js
deleted file mode 100644
index 7687896f4bfee..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/commonjs/update.js
+++ /dev/null
@@ -1,33 +0,0 @@
-"use strict";
-// tar -u
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.update = void 0;
-const make_command_js_1 = require("./make-command.js");
-const replace_js_1 = require("./replace.js");
-// just call tar.r with the filter and mtimeCache
-exports.update = (0, make_command_js_1.makeCommand)(replace_js_1.replace.syncFile, replace_js_1.replace.asyncFile, replace_js_1.replace.syncNoFile, replace_js_1.replace.asyncNoFile, (opt, entries = []) => {
-    replace_js_1.replace.validate?.(opt, entries);
-    mtimeFilter(opt);
-});
-const mtimeFilter = (opt) => {
-    const filter = opt.filter;
-    if (!opt.mtimeCache) {
-        opt.mtimeCache = new Map();
-    }
-    opt.filter =
-        filter ?
-            (path, stat) => filter(path, stat) &&
-                !(
-                /* c8 ignore start */
-                ((opt.mtimeCache?.get(path) ?? stat.mtime ?? 0) >
-                    (stat.mtime ?? 0))
-                /* c8 ignore stop */
-                )
-            : (path, stat) => !(
-            /* c8 ignore start */
-            ((opt.mtimeCache?.get(path) ?? stat.mtime ?? 0) >
-                (stat.mtime ?? 0))
-            /* c8 ignore stop */
-            );
-};
-//# sourceMappingURL=update.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/warn-method.js b/node_modules/pacote/node_modules/tar/dist/commonjs/warn-method.js
deleted file mode 100644
index f25502776e36a..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/commonjs/warn-method.js
+++ /dev/null
@@ -1,31 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.warnMethod = void 0;
-const warnMethod = (self, code, message, data = {}) => {
-    if (self.file) {
-        data.file = self.file;
-    }
-    if (self.cwd) {
-        data.cwd = self.cwd;
-    }
-    data.code =
-        (message instanceof Error &&
-            message.code) ||
-            code;
-    data.tarCode = code;
-    if (!self.strict && data.recoverable !== false) {
-        if (message instanceof Error) {
-            data = Object.assign(message, data);
-            message = message.message;
-        }
-        self.emit('warn', code, message, data);
-    }
-    else if (message instanceof Error) {
-        self.emit('error', Object.assign(message, data));
-    }
-    else {
-        self.emit('error', Object.assign(new Error(`${code}: ${message}`), data));
-    }
-};
-exports.warnMethod = warnMethod;
-//# sourceMappingURL=warn-method.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/winchars.js b/node_modules/pacote/node_modules/tar/dist/commonjs/winchars.js
deleted file mode 100644
index c0a4405812929..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/commonjs/winchars.js
+++ /dev/null
@@ -1,14 +0,0 @@
-"use strict";
-// When writing files on Windows, translate the characters to their
-// 0xf000 higher-encoded versions.
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.decode = exports.encode = void 0;
-const raw = ['|', '<', '>', '?', ':'];
-const win = raw.map(char => String.fromCharCode(0xf000 + char.charCodeAt(0)));
-const toWin = new Map(raw.map((char, i) => [char, win[i]]));
-const toRaw = new Map(win.map((char, i) => [char, raw[i]]));
-const encode = (s) => raw.reduce((s, c) => s.split(c).join(toWin.get(c)), s);
-exports.encode = encode;
-const decode = (s) => win.reduce((s, c) => s.split(c).join(toRaw.get(c)), s);
-exports.decode = decode;
-//# sourceMappingURL=winchars.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/write-entry.js b/node_modules/pacote/node_modules/tar/dist/commonjs/write-entry.js
deleted file mode 100644
index 45b7efeb79502..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/commonjs/write-entry.js
+++ /dev/null
@@ -1,689 +0,0 @@
-"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    var desc = Object.getOwnPropertyDescriptor(m, k);
-    if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
-      desc = { enumerable: true, get: function() { return m[k]; } };
-    }
-    Object.defineProperty(o, k2, desc);
-}) : (function(o, m, k, k2) {
-    if (k2 === undefined) k2 = k;
-    o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
-    Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
-    o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
-    if (mod && mod.__esModule) return mod;
-    var result = {};
-    if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
-    __setModuleDefault(result, mod);
-    return result;
-};
-var __importDefault = (this && this.__importDefault) || function (mod) {
-    return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.WriteEntryTar = exports.WriteEntrySync = exports.WriteEntry = void 0;
-const fs_1 = __importDefault(require("fs"));
-const minipass_1 = require("minipass");
-const path_1 = __importDefault(require("path"));
-const header_js_1 = require("./header.js");
-const mode_fix_js_1 = require("./mode-fix.js");
-const normalize_windows_path_js_1 = require("./normalize-windows-path.js");
-const options_js_1 = require("./options.js");
-const pax_js_1 = require("./pax.js");
-const strip_absolute_path_js_1 = require("./strip-absolute-path.js");
-const strip_trailing_slashes_js_1 = require("./strip-trailing-slashes.js");
-const warn_method_js_1 = require("./warn-method.js");
-const winchars = __importStar(require("./winchars.js"));
-const prefixPath = (path, prefix) => {
-    if (!prefix) {
-        return (0, normalize_windows_path_js_1.normalizeWindowsPath)(path);
-    }
-    path = (0, normalize_windows_path_js_1.normalizeWindowsPath)(path).replace(/^\.(\/|$)/, '');
-    return (0, strip_trailing_slashes_js_1.stripTrailingSlashes)(prefix) + '/' + path;
-};
-const maxReadSize = 16 * 1024 * 1024;
-const PROCESS = Symbol('process');
-const FILE = Symbol('file');
-const DIRECTORY = Symbol('directory');
-const SYMLINK = Symbol('symlink');
-const HARDLINK = Symbol('hardlink');
-const HEADER = Symbol('header');
-const READ = Symbol('read');
-const LSTAT = Symbol('lstat');
-const ONLSTAT = Symbol('onlstat');
-const ONREAD = Symbol('onread');
-const ONREADLINK = Symbol('onreadlink');
-const OPENFILE = Symbol('openfile');
-const ONOPENFILE = Symbol('onopenfile');
-const CLOSE = Symbol('close');
-const MODE = Symbol('mode');
-const AWAITDRAIN = Symbol('awaitDrain');
-const ONDRAIN = Symbol('ondrain');
-const PREFIX = Symbol('prefix');
-class WriteEntry extends minipass_1.Minipass {
-    path;
-    portable;
-    myuid = (process.getuid && process.getuid()) || 0;
-    // until node has builtin pwnam functions, this'll have to do
-    myuser = process.env.USER || '';
-    maxReadSize;
-    linkCache;
-    statCache;
-    preservePaths;
-    cwd;
-    strict;
-    mtime;
-    noPax;
-    noMtime;
-    prefix;
-    fd;
-    blockLen = 0;
-    blockRemain = 0;
-    buf;
-    pos = 0;
-    remain = 0;
-    length = 0;
-    offset = 0;
-    win32;
-    absolute;
-    header;
-    type;
-    linkpath;
-    stat;
-    onWriteEntry;
-    #hadError = false;
-    constructor(p, opt_ = {}) {
-        const opt = (0, options_js_1.dealias)(opt_);
-        super();
-        this.path = (0, normalize_windows_path_js_1.normalizeWindowsPath)(p);
-        // suppress atime, ctime, uid, gid, uname, gname
-        this.portable = !!opt.portable;
-        this.maxReadSize = opt.maxReadSize || maxReadSize;
-        this.linkCache = opt.linkCache || new Map();
-        this.statCache = opt.statCache || new Map();
-        this.preservePaths = !!opt.preservePaths;
-        this.cwd = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.cwd || process.cwd());
-        this.strict = !!opt.strict;
-        this.noPax = !!opt.noPax;
-        this.noMtime = !!opt.noMtime;
-        this.mtime = opt.mtime;
-        this.prefix =
-            opt.prefix ? (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.prefix) : undefined;
-        this.onWriteEntry = opt.onWriteEntry;
-        if (typeof opt.onwarn === 'function') {
-            this.on('warn', opt.onwarn);
-        }
-        let pathWarn = false;
-        if (!this.preservePaths) {
-            const [root, stripped] = (0, strip_absolute_path_js_1.stripAbsolutePath)(this.path);
-            if (root && typeof stripped === 'string') {
-                this.path = stripped;
-                pathWarn = root;
-            }
-        }
-        this.win32 = !!opt.win32 || process.platform === 'win32';
-        if (this.win32) {
-            // force the \ to / normalization, since we might not *actually*
-            // be on windows, but want \ to be considered a path separator.
-            this.path = winchars.decode(this.path.replace(/\\/g, '/'));
-            p = p.replace(/\\/g, '/');
-        }
-        this.absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.absolute || path_1.default.resolve(this.cwd, p));
-        if (this.path === '') {
-            this.path = './';
-        }
-        if (pathWarn) {
-            this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, {
-                entry: this,
-                path: pathWarn + this.path,
-            });
-        }
-        const cs = this.statCache.get(this.absolute);
-        if (cs) {
-            this[ONLSTAT](cs);
-        }
-        else {
-            this[LSTAT]();
-        }
-    }
-    warn(code, message, data = {}) {
-        return (0, warn_method_js_1.warnMethod)(this, code, message, data);
-    }
-    emit(ev, ...data) {
-        if (ev === 'error') {
-            this.#hadError = true;
-        }
-        return super.emit(ev, ...data);
-    }
-    [LSTAT]() {
-        fs_1.default.lstat(this.absolute, (er, stat) => {
-            if (er) {
-                return this.emit('error', er);
-            }
-            this[ONLSTAT](stat);
-        });
-    }
-    [ONLSTAT](stat) {
-        this.statCache.set(this.absolute, stat);
-        this.stat = stat;
-        if (!stat.isFile()) {
-            stat.size = 0;
-        }
-        this.type = getType(stat);
-        this.emit('stat', stat);
-        this[PROCESS]();
-    }
-    [PROCESS]() {
-        switch (this.type) {
-            case 'File':
-                return this[FILE]();
-            case 'Directory':
-                return this[DIRECTORY]();
-            case 'SymbolicLink':
-                return this[SYMLINK]();
-            // unsupported types are ignored.
-            default:
-                return this.end();
-        }
-    }
-    [MODE](mode) {
-        return (0, mode_fix_js_1.modeFix)(mode, this.type === 'Directory', this.portable);
-    }
-    [PREFIX](path) {
-        return prefixPath(path, this.prefix);
-    }
-    [HEADER]() {
-        /* c8 ignore start */
-        if (!this.stat) {
-            throw new Error('cannot write header before stat');
-        }
-        /* c8 ignore stop */
-        if (this.type === 'Directory' && this.portable) {
-            this.noMtime = true;
-        }
-        this.onWriteEntry?.(this);
-        this.header = new header_js_1.Header({
-            path: this[PREFIX](this.path),
-            // only apply the prefix to hard links.
-            linkpath: this.type === 'Link' && this.linkpath !== undefined ?
-                this[PREFIX](this.linkpath)
-                : this.linkpath,
-            // only the permissions and setuid/setgid/sticky bitflags
-            // not the higher-order bits that specify file type
-            mode: this[MODE](this.stat.mode),
-            uid: this.portable ? undefined : this.stat.uid,
-            gid: this.portable ? undefined : this.stat.gid,
-            size: this.stat.size,
-            mtime: this.noMtime ? undefined : this.mtime || this.stat.mtime,
-            /* c8 ignore next */
-            type: this.type === 'Unsupported' ? undefined : this.type,
-            uname: this.portable ? undefined
-                : this.stat.uid === this.myuid ? this.myuser
-                    : '',
-            atime: this.portable ? undefined : this.stat.atime,
-            ctime: this.portable ? undefined : this.stat.ctime,
-        });
-        if (this.header.encode() && !this.noPax) {
-            super.write(new pax_js_1.Pax({
-                atime: this.portable ? undefined : this.header.atime,
-                ctime: this.portable ? undefined : this.header.ctime,
-                gid: this.portable ? undefined : this.header.gid,
-                mtime: this.noMtime ? undefined : (this.mtime || this.header.mtime),
-                path: this[PREFIX](this.path),
-                linkpath: this.type === 'Link' && this.linkpath !== undefined ?
-                    this[PREFIX](this.linkpath)
-                    : this.linkpath,
-                size: this.header.size,
-                uid: this.portable ? undefined : this.header.uid,
-                uname: this.portable ? undefined : this.header.uname,
-                dev: this.portable ? undefined : this.stat.dev,
-                ino: this.portable ? undefined : this.stat.ino,
-                nlink: this.portable ? undefined : this.stat.nlink,
-            }).encode());
-        }
-        const block = this.header?.block;
-        /* c8 ignore start */
-        if (!block) {
-            throw new Error('failed to encode header');
-        }
-        /* c8 ignore stop */
-        super.write(block);
-    }
-    [DIRECTORY]() {
-        /* c8 ignore start */
-        if (!this.stat) {
-            throw new Error('cannot create directory entry without stat');
-        }
-        /* c8 ignore stop */
-        if (this.path.slice(-1) !== '/') {
-            this.path += '/';
-        }
-        this.stat.size = 0;
-        this[HEADER]();
-        this.end();
-    }
-    [SYMLINK]() {
-        fs_1.default.readlink(this.absolute, (er, linkpath) => {
-            if (er) {
-                return this.emit('error', er);
-            }
-            this[ONREADLINK](linkpath);
-        });
-    }
-    [ONREADLINK](linkpath) {
-        this.linkpath = (0, normalize_windows_path_js_1.normalizeWindowsPath)(linkpath);
-        this[HEADER]();
-        this.end();
-    }
-    [HARDLINK](linkpath) {
-        /* c8 ignore start */
-        if (!this.stat) {
-            throw new Error('cannot create link entry without stat');
-        }
-        /* c8 ignore stop */
-        this.type = 'Link';
-        this.linkpath = (0, normalize_windows_path_js_1.normalizeWindowsPath)(path_1.default.relative(this.cwd, linkpath));
-        this.stat.size = 0;
-        this[HEADER]();
-        this.end();
-    }
-    [FILE]() {
-        /* c8 ignore start */
-        if (!this.stat) {
-            throw new Error('cannot create file entry without stat');
-        }
-        /* c8 ignore stop */
-        if (this.stat.nlink > 1) {
-            const linkKey = `${this.stat.dev}:${this.stat.ino}`;
-            const linkpath = this.linkCache.get(linkKey);
-            if (linkpath?.indexOf(this.cwd) === 0) {
-                return this[HARDLINK](linkpath);
-            }
-            this.linkCache.set(linkKey, this.absolute);
-        }
-        this[HEADER]();
-        if (this.stat.size === 0) {
-            return this.end();
-        }
-        this[OPENFILE]();
-    }
-    [OPENFILE]() {
-        fs_1.default.open(this.absolute, 'r', (er, fd) => {
-            if (er) {
-                return this.emit('error', er);
-            }
-            this[ONOPENFILE](fd);
-        });
-    }
-    [ONOPENFILE](fd) {
-        this.fd = fd;
-        if (this.#hadError) {
-            return this[CLOSE]();
-        }
-        /* c8 ignore start */
-        if (!this.stat) {
-            throw new Error('should stat before calling onopenfile');
-        }
-        /* c8 ignore start */
-        this.blockLen = 512 * Math.ceil(this.stat.size / 512);
-        this.blockRemain = this.blockLen;
-        const bufLen = Math.min(this.blockLen, this.maxReadSize);
-        this.buf = Buffer.allocUnsafe(bufLen);
-        this.offset = 0;
-        this.pos = 0;
-        this.remain = this.stat.size;
-        this.length = this.buf.length;
-        this[READ]();
-    }
-    [READ]() {
-        const { fd, buf, offset, length, pos } = this;
-        if (fd === undefined || buf === undefined) {
-            throw new Error('cannot read file without first opening');
-        }
-        fs_1.default.read(fd, buf, offset, length, pos, (er, bytesRead) => {
-            if (er) {
-                // ignoring the error from close(2) is a bad practice, but at
-                // this point we already have an error, don't need another one
-                return this[CLOSE](() => this.emit('error', er));
-            }
-            this[ONREAD](bytesRead);
-        });
-    }
-    /* c8 ignore start */
-    [CLOSE](cb = () => { }) {
-        /* c8 ignore stop */
-        if (this.fd !== undefined)
-            fs_1.default.close(this.fd, cb);
-    }
-    [ONREAD](bytesRead) {
-        if (bytesRead <= 0 && this.remain > 0) {
-            const er = Object.assign(new Error('encountered unexpected EOF'), {
-                path: this.absolute,
-                syscall: 'read',
-                code: 'EOF',
-            });
-            return this[CLOSE](() => this.emit('error', er));
-        }
-        if (bytesRead > this.remain) {
-            const er = Object.assign(new Error('did not encounter expected EOF'), {
-                path: this.absolute,
-                syscall: 'read',
-                code: 'EOF',
-            });
-            return this[CLOSE](() => this.emit('error', er));
-        }
-        /* c8 ignore start */
-        if (!this.buf) {
-            throw new Error('should have created buffer prior to reading');
-        }
-        /* c8 ignore stop */
-        // null out the rest of the buffer, if we could fit the block padding
-        // at the end of this loop, we've incremented bytesRead and this.remain
-        // to be incremented up to the blockRemain level, as if we had expected
-        // to get a null-padded file, and read it until the end.  then we will
-        // decrement both remain and blockRemain by bytesRead, and know that we
-        // reached the expected EOF, without any null buffer to append.
-        if (bytesRead === this.remain) {
-            for (let i = bytesRead; i < this.length && bytesRead < this.blockRemain; i++) {
-                this.buf[i + this.offset] = 0;
-                bytesRead++;
-                this.remain++;
-            }
-        }
-        const chunk = this.offset === 0 && bytesRead === this.buf.length ?
-            this.buf
-            : this.buf.subarray(this.offset, this.offset + bytesRead);
-        const flushed = this.write(chunk);
-        if (!flushed) {
-            this[AWAITDRAIN](() => this[ONDRAIN]());
-        }
-        else {
-            this[ONDRAIN]();
-        }
-    }
-    [AWAITDRAIN](cb) {
-        this.once('drain', cb);
-    }
-    write(chunk, encoding, cb) {
-        /* c8 ignore start - just junk to comply with NodeJS.WritableStream */
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        if (typeof chunk === 'string') {
-            chunk = Buffer.from(chunk, typeof encoding === 'string' ? encoding : 'utf8');
-        }
-        /* c8 ignore stop */
-        if (this.blockRemain < chunk.length) {
-            const er = Object.assign(new Error('writing more data than expected'), {
-                path: this.absolute,
-            });
-            return this.emit('error', er);
-        }
-        this.remain -= chunk.length;
-        this.blockRemain -= chunk.length;
-        this.pos += chunk.length;
-        this.offset += chunk.length;
-        return super.write(chunk, null, cb);
-    }
-    [ONDRAIN]() {
-        if (!this.remain) {
-            if (this.blockRemain) {
-                super.write(Buffer.alloc(this.blockRemain));
-            }
-            return this[CLOSE](er => er ? this.emit('error', er) : this.end());
-        }
-        /* c8 ignore start */
-        if (!this.buf) {
-            throw new Error('buffer lost somehow in ONDRAIN');
-        }
-        /* c8 ignore stop */
-        if (this.offset >= this.length) {
-            // if we only have a smaller bit left to read, alloc a smaller buffer
-            // otherwise, keep it the same length it was before.
-            this.buf = Buffer.allocUnsafe(Math.min(this.blockRemain, this.buf.length));
-            this.offset = 0;
-        }
-        this.length = this.buf.length - this.offset;
-        this[READ]();
-    }
-}
-exports.WriteEntry = WriteEntry;
-class WriteEntrySync extends WriteEntry {
-    sync = true;
-    [LSTAT]() {
-        this[ONLSTAT](fs_1.default.lstatSync(this.absolute));
-    }
-    [SYMLINK]() {
-        this[ONREADLINK](fs_1.default.readlinkSync(this.absolute));
-    }
-    [OPENFILE]() {
-        this[ONOPENFILE](fs_1.default.openSync(this.absolute, 'r'));
-    }
-    [READ]() {
-        let threw = true;
-        try {
-            const { fd, buf, offset, length, pos } = this;
-            /* c8 ignore start */
-            if (fd === undefined || buf === undefined) {
-                throw new Error('fd and buf must be set in READ method');
-            }
-            /* c8 ignore stop */
-            const bytesRead = fs_1.default.readSync(fd, buf, offset, length, pos);
-            this[ONREAD](bytesRead);
-            threw = false;
-        }
-        finally {
-            // ignoring the error from close(2) is a bad practice, but at
-            // this point we already have an error, don't need another one
-            if (threw) {
-                try {
-                    this[CLOSE](() => { });
-                }
-                catch (er) { }
-            }
-        }
-    }
-    [AWAITDRAIN](cb) {
-        cb();
-    }
-    /* c8 ignore start */
-    [CLOSE](cb = () => { }) {
-        /* c8 ignore stop */
-        if (this.fd !== undefined)
-            fs_1.default.closeSync(this.fd);
-        cb();
-    }
-}
-exports.WriteEntrySync = WriteEntrySync;
-class WriteEntryTar extends minipass_1.Minipass {
-    blockLen = 0;
-    blockRemain = 0;
-    buf = 0;
-    pos = 0;
-    remain = 0;
-    length = 0;
-    preservePaths;
-    portable;
-    strict;
-    noPax;
-    noMtime;
-    readEntry;
-    type;
-    prefix;
-    path;
-    mode;
-    uid;
-    gid;
-    uname;
-    gname;
-    header;
-    mtime;
-    atime;
-    ctime;
-    linkpath;
-    size;
-    onWriteEntry;
-    warn(code, message, data = {}) {
-        return (0, warn_method_js_1.warnMethod)(this, code, message, data);
-    }
-    constructor(readEntry, opt_ = {}) {
-        const opt = (0, options_js_1.dealias)(opt_);
-        super();
-        this.preservePaths = !!opt.preservePaths;
-        this.portable = !!opt.portable;
-        this.strict = !!opt.strict;
-        this.noPax = !!opt.noPax;
-        this.noMtime = !!opt.noMtime;
-        this.onWriteEntry = opt.onWriteEntry;
-        this.readEntry = readEntry;
-        const { type } = readEntry;
-        /* c8 ignore start */
-        if (type === 'Unsupported') {
-            throw new Error('writing entry that should be ignored');
-        }
-        /* c8 ignore stop */
-        this.type = type;
-        if (this.type === 'Directory' && this.portable) {
-            this.noMtime = true;
-        }
-        this.prefix = opt.prefix;
-        this.path = (0, normalize_windows_path_js_1.normalizeWindowsPath)(readEntry.path);
-        this.mode =
-            readEntry.mode !== undefined ?
-                this[MODE](readEntry.mode)
-                : undefined;
-        this.uid = this.portable ? undefined : readEntry.uid;
-        this.gid = this.portable ? undefined : readEntry.gid;
-        this.uname = this.portable ? undefined : readEntry.uname;
-        this.gname = this.portable ? undefined : readEntry.gname;
-        this.size = readEntry.size;
-        this.mtime =
-            this.noMtime ? undefined : opt.mtime || readEntry.mtime;
-        this.atime = this.portable ? undefined : readEntry.atime;
-        this.ctime = this.portable ? undefined : readEntry.ctime;
-        this.linkpath =
-            readEntry.linkpath !== undefined ?
-                (0, normalize_windows_path_js_1.normalizeWindowsPath)(readEntry.linkpath)
-                : undefined;
-        if (typeof opt.onwarn === 'function') {
-            this.on('warn', opt.onwarn);
-        }
-        let pathWarn = false;
-        if (!this.preservePaths) {
-            const [root, stripped] = (0, strip_absolute_path_js_1.stripAbsolutePath)(this.path);
-            if (root && typeof stripped === 'string') {
-                this.path = stripped;
-                pathWarn = root;
-            }
-        }
-        this.remain = readEntry.size;
-        this.blockRemain = readEntry.startBlockSize;
-        this.onWriteEntry?.(this);
-        this.header = new header_js_1.Header({
-            path: this[PREFIX](this.path),
-            linkpath: this.type === 'Link' && this.linkpath !== undefined ?
-                this[PREFIX](this.linkpath)
-                : this.linkpath,
-            // only the permissions and setuid/setgid/sticky bitflags
-            // not the higher-order bits that specify file type
-            mode: this.mode,
-            uid: this.portable ? undefined : this.uid,
-            gid: this.portable ? undefined : this.gid,
-            size: this.size,
-            mtime: this.noMtime ? undefined : this.mtime,
-            type: this.type,
-            uname: this.portable ? undefined : this.uname,
-            atime: this.portable ? undefined : this.atime,
-            ctime: this.portable ? undefined : this.ctime,
-        });
-        if (pathWarn) {
-            this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, {
-                entry: this,
-                path: pathWarn + this.path,
-            });
-        }
-        if (this.header.encode() && !this.noPax) {
-            super.write(new pax_js_1.Pax({
-                atime: this.portable ? undefined : this.atime,
-                ctime: this.portable ? undefined : this.ctime,
-                gid: this.portable ? undefined : this.gid,
-                mtime: this.noMtime ? undefined : this.mtime,
-                path: this[PREFIX](this.path),
-                linkpath: this.type === 'Link' && this.linkpath !== undefined ?
-                    this[PREFIX](this.linkpath)
-                    : this.linkpath,
-                size: this.size,
-                uid: this.portable ? undefined : this.uid,
-                uname: this.portable ? undefined : this.uname,
-                dev: this.portable ? undefined : this.readEntry.dev,
-                ino: this.portable ? undefined : this.readEntry.ino,
-                nlink: this.portable ? undefined : this.readEntry.nlink,
-            }).encode());
-        }
-        const b = this.header?.block;
-        /* c8 ignore start */
-        if (!b)
-            throw new Error('failed to encode header');
-        /* c8 ignore stop */
-        super.write(b);
-        readEntry.pipe(this);
-    }
-    [PREFIX](path) {
-        return prefixPath(path, this.prefix);
-    }
-    [MODE](mode) {
-        return (0, mode_fix_js_1.modeFix)(mode, this.type === 'Directory', this.portable);
-    }
-    write(chunk, encoding, cb) {
-        /* c8 ignore start - just junk to comply with NodeJS.WritableStream */
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        if (typeof chunk === 'string') {
-            chunk = Buffer.from(chunk, typeof encoding === 'string' ? encoding : 'utf8');
-        }
-        /* c8 ignore stop */
-        const writeLen = chunk.length;
-        if (writeLen > this.blockRemain) {
-            throw new Error('writing more to entry than is appropriate');
-        }
-        this.blockRemain -= writeLen;
-        return super.write(chunk, cb);
-    }
-    end(chunk, encoding, cb) {
-        if (this.blockRemain) {
-            super.write(Buffer.alloc(this.blockRemain));
-        }
-        /* c8 ignore start - just junk to comply with NodeJS.WritableStream */
-        if (typeof chunk === 'function') {
-            cb = chunk;
-            encoding = undefined;
-            chunk = undefined;
-        }
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        if (typeof chunk === 'string') {
-            chunk = Buffer.from(chunk, encoding ?? 'utf8');
-        }
-        if (cb)
-            this.once('finish', cb);
-        chunk ? super.end(chunk, cb) : super.end(cb);
-        /* c8 ignore stop */
-        return this;
-    }
-}
-exports.WriteEntryTar = WriteEntryTar;
-const getType = (stat) => stat.isFile() ? 'File'
-    : stat.isDirectory() ? 'Directory'
-        : stat.isSymbolicLink() ? 'SymbolicLink'
-            : 'Unsupported';
-//# sourceMappingURL=write-entry.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/create.js b/node_modules/pacote/node_modules/tar/dist/esm/create.js
deleted file mode 100644
index 512a9911d70d5..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/esm/create.js
+++ /dev/null
@@ -1,77 +0,0 @@
-import { WriteStream, WriteStreamSync } from '@isaacs/fs-minipass';
-import path from 'node:path';
-import { list } from './list.js';
-import { makeCommand } from './make-command.js';
-import { Pack, PackSync } from './pack.js';
-const createFileSync = (opt, files) => {
-    const p = new PackSync(opt);
-    const stream = new WriteStreamSync(opt.file, {
-        mode: opt.mode || 0o666,
-    });
-    p.pipe(stream);
-    addFilesSync(p, files);
-};
-const createFile = (opt, files) => {
-    const p = new Pack(opt);
-    const stream = new WriteStream(opt.file, {
-        mode: opt.mode || 0o666,
-    });
-    p.pipe(stream);
-    const promise = new Promise((res, rej) => {
-        stream.on('error', rej);
-        stream.on('close', res);
-        p.on('error', rej);
-    });
-    addFilesAsync(p, files);
-    return promise;
-};
-const addFilesSync = (p, files) => {
-    files.forEach(file => {
-        if (file.charAt(0) === '@') {
-            list({
-                file: path.resolve(p.cwd, file.slice(1)),
-                sync: true,
-                noResume: true,
-                onReadEntry: entry => p.add(entry),
-            });
-        }
-        else {
-            p.add(file);
-        }
-    });
-    p.end();
-};
-const addFilesAsync = async (p, files) => {
-    for (let i = 0; i < files.length; i++) {
-        const file = String(files[i]);
-        if (file.charAt(0) === '@') {
-            await list({
-                file: path.resolve(String(p.cwd), file.slice(1)),
-                noResume: true,
-                onReadEntry: entry => {
-                    p.add(entry);
-                },
-            });
-        }
-        else {
-            p.add(file);
-        }
-    }
-    p.end();
-};
-const createSync = (opt, files) => {
-    const p = new PackSync(opt);
-    addFilesSync(p, files);
-    return p;
-};
-const createAsync = (opt, files) => {
-    const p = new Pack(opt);
-    addFilesAsync(p, files);
-    return p;
-};
-export const create = makeCommand(createFileSync, createFile, createSync, createAsync, (_opt, files) => {
-    if (!files?.length) {
-        throw new TypeError('no paths specified to add to archive');
-    }
-});
-//# sourceMappingURL=create.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/cwd-error.js b/node_modules/pacote/node_modules/tar/dist/esm/cwd-error.js
deleted file mode 100644
index 289a066b8e031..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/esm/cwd-error.js
+++ /dev/null
@@ -1,14 +0,0 @@
-export class CwdError extends Error {
-    path;
-    code;
-    syscall = 'chdir';
-    constructor(path, code) {
-        super(`${code}: Cannot cd into '${path}'`);
-        this.path = path;
-        this.code = code;
-    }
-    get name() {
-        return 'CwdError';
-    }
-}
-//# sourceMappingURL=cwd-error.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/extract.js b/node_modules/pacote/node_modules/tar/dist/esm/extract.js
deleted file mode 100644
index 2274feef26e78..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/esm/extract.js
+++ /dev/null
@@ -1,49 +0,0 @@
-// tar -x
-import * as fsm from '@isaacs/fs-minipass';
-import fs from 'node:fs';
-import { filesFilter } from './list.js';
-import { makeCommand } from './make-command.js';
-import { Unpack, UnpackSync } from './unpack.js';
-const extractFileSync = (opt) => {
-    const u = new UnpackSync(opt);
-    const file = opt.file;
-    const stat = fs.statSync(file);
-    // This trades a zero-byte read() syscall for a stat
-    // However, it will usually result in less memory allocation
-    const readSize = opt.maxReadSize || 16 * 1024 * 1024;
-    const stream = new fsm.ReadStreamSync(file, {
-        readSize: readSize,
-        size: stat.size,
-    });
-    stream.pipe(u);
-};
-const extractFile = (opt, _) => {
-    const u = new Unpack(opt);
-    const readSize = opt.maxReadSize || 16 * 1024 * 1024;
-    const file = opt.file;
-    const p = new Promise((resolve, reject) => {
-        u.on('error', reject);
-        u.on('close', resolve);
-        // This trades a zero-byte read() syscall for a stat
-        // However, it will usually result in less memory allocation
-        fs.stat(file, (er, stat) => {
-            if (er) {
-                reject(er);
-            }
-            else {
-                const stream = new fsm.ReadStream(file, {
-                    readSize: readSize,
-                    size: stat.size,
-                });
-                stream.on('error', reject);
-                stream.pipe(u);
-            }
-        });
-    });
-    return p;
-};
-export const extract = makeCommand(extractFileSync, extractFile, opt => new UnpackSync(opt), opt => new Unpack(opt), (opt, files) => {
-    if (files?.length)
-        filesFilter(opt, files);
-});
-//# sourceMappingURL=extract.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/get-write-flag.js b/node_modules/pacote/node_modules/tar/dist/esm/get-write-flag.js
deleted file mode 100644
index 2c7f3e8b28fda..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/esm/get-write-flag.js
+++ /dev/null
@@ -1,23 +0,0 @@
-// Get the appropriate flag to use for creating files
-// We use fmap on Windows platforms for files less than
-// 512kb.  This is a fairly low limit, but avoids making
-// things slower in some cases.  Since most of what this
-// library is used for is extracting tarballs of many
-// relatively small files in npm packages and the like,
-// it can be a big boost on Windows platforms.
-import fs from 'fs';
-const platform = process.env.__FAKE_PLATFORM__ || process.platform;
-const isWindows = platform === 'win32';
-/* c8 ignore start */
-const { O_CREAT, O_TRUNC, O_WRONLY } = fs.constants;
-const UV_FS_O_FILEMAP = Number(process.env.__FAKE_FS_O_FILENAME__) ||
-    fs.constants.UV_FS_O_FILEMAP ||
-    0;
-/* c8 ignore stop */
-const fMapEnabled = isWindows && !!UV_FS_O_FILEMAP;
-const fMapLimit = 512 * 1024;
-const fMapFlag = UV_FS_O_FILEMAP | O_TRUNC | O_CREAT | O_WRONLY;
-export const getWriteFlag = !fMapEnabled ?
-    () => 'w'
-    : (size) => (size < fMapLimit ? fMapFlag : 'w');
-//# sourceMappingURL=get-write-flag.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/header.js b/node_modules/pacote/node_modules/tar/dist/esm/header.js
deleted file mode 100644
index e15192b14b16e..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/esm/header.js
+++ /dev/null
@@ -1,279 +0,0 @@
-// parse a 512-byte header block to a data object, or vice-versa
-// encode returns `true` if a pax extended header is needed, because
-// the data could not be faithfully encoded in a simple header.
-// (Also, check header.needPax to see if it needs a pax header.)
-import { posix as pathModule } from 'node:path';
-import * as large from './large-numbers.js';
-import * as types from './types.js';
-export class Header {
-    cksumValid = false;
-    needPax = false;
-    nullBlock = false;
-    block;
-    path;
-    mode;
-    uid;
-    gid;
-    size;
-    cksum;
-    #type = 'Unsupported';
-    linkpath;
-    uname;
-    gname;
-    devmaj = 0;
-    devmin = 0;
-    atime;
-    ctime;
-    mtime;
-    charset;
-    comment;
-    constructor(data, off = 0, ex, gex) {
-        if (Buffer.isBuffer(data)) {
-            this.decode(data, off || 0, ex, gex);
-        }
-        else if (data) {
-            this.#slurp(data);
-        }
-    }
-    decode(buf, off, ex, gex) {
-        if (!off) {
-            off = 0;
-        }
-        if (!buf || !(buf.length >= off + 512)) {
-            throw new Error('need 512 bytes for header');
-        }
-        this.path = decString(buf, off, 100);
-        this.mode = decNumber(buf, off + 100, 8);
-        this.uid = decNumber(buf, off + 108, 8);
-        this.gid = decNumber(buf, off + 116, 8);
-        this.size = decNumber(buf, off + 124, 12);
-        this.mtime = decDate(buf, off + 136, 12);
-        this.cksum = decNumber(buf, off + 148, 12);
-        // if we have extended or global extended headers, apply them now
-        // See https://github.com/npm/node-tar/pull/187
-        // Apply global before local, so it overrides
-        if (gex)
-            this.#slurp(gex, true);
-        if (ex)
-            this.#slurp(ex);
-        // old tar versions marked dirs as a file with a trailing /
-        const t = decString(buf, off + 156, 1);
-        if (types.isCode(t)) {
-            this.#type = t || '0';
-        }
-        if (this.#type === '0' && this.path.slice(-1) === '/') {
-            this.#type = '5';
-        }
-        // tar implementations sometimes incorrectly put the stat(dir).size
-        // as the size in the tarball, even though Directory entries are
-        // not able to have any body at all.  In the very rare chance that
-        // it actually DOES have a body, we weren't going to do anything with
-        // it anyway, and it'll just be a warning about an invalid header.
-        if (this.#type === '5') {
-            this.size = 0;
-        }
-        this.linkpath = decString(buf, off + 157, 100);
-        if (buf.subarray(off + 257, off + 265).toString() ===
-            'ustar\u000000') {
-            this.uname = decString(buf, off + 265, 32);
-            this.gname = decString(buf, off + 297, 32);
-            /* c8 ignore start */
-            this.devmaj = decNumber(buf, off + 329, 8) ?? 0;
-            this.devmin = decNumber(buf, off + 337, 8) ?? 0;
-            /* c8 ignore stop */
-            if (buf[off + 475] !== 0) {
-                // definitely a prefix, definitely >130 chars.
-                const prefix = decString(buf, off + 345, 155);
-                this.path = prefix + '/' + this.path;
-            }
-            else {
-                const prefix = decString(buf, off + 345, 130);
-                if (prefix) {
-                    this.path = prefix + '/' + this.path;
-                }
-                this.atime = decDate(buf, off + 476, 12);
-                this.ctime = decDate(buf, off + 488, 12);
-            }
-        }
-        let sum = 8 * 0x20;
-        for (let i = off; i < off + 148; i++) {
-            sum += buf[i];
-        }
-        for (let i = off + 156; i < off + 512; i++) {
-            sum += buf[i];
-        }
-        this.cksumValid = sum === this.cksum;
-        if (this.cksum === undefined && sum === 8 * 0x20) {
-            this.nullBlock = true;
-        }
-    }
-    #slurp(ex, gex = false) {
-        Object.assign(this, Object.fromEntries(Object.entries(ex).filter(([k, v]) => {
-            // we slurp in everything except for the path attribute in
-            // a global extended header, because that's weird. Also, any
-            // null/undefined values are ignored.
-            return !(v === null ||
-                v === undefined ||
-                (k === 'path' && gex) ||
-                (k === 'linkpath' && gex) ||
-                k === 'global');
-        })));
-    }
-    encode(buf, off = 0) {
-        if (!buf) {
-            buf = this.block = Buffer.alloc(512);
-        }
-        if (this.#type === 'Unsupported') {
-            this.#type = '0';
-        }
-        if (!(buf.length >= off + 512)) {
-            throw new Error('need 512 bytes for header');
-        }
-        const prefixSize = this.ctime || this.atime ? 130 : 155;
-        const split = splitPrefix(this.path || '', prefixSize);
-        const path = split[0];
-        const prefix = split[1];
-        this.needPax = !!split[2];
-        this.needPax = encString(buf, off, 100, path) || this.needPax;
-        this.needPax =
-            encNumber(buf, off + 100, 8, this.mode) || this.needPax;
-        this.needPax =
-            encNumber(buf, off + 108, 8, this.uid) || this.needPax;
-        this.needPax =
-            encNumber(buf, off + 116, 8, this.gid) || this.needPax;
-        this.needPax =
-            encNumber(buf, off + 124, 12, this.size) || this.needPax;
-        this.needPax =
-            encDate(buf, off + 136, 12, this.mtime) || this.needPax;
-        buf[off + 156] = this.#type.charCodeAt(0);
-        this.needPax =
-            encString(buf, off + 157, 100, this.linkpath) || this.needPax;
-        buf.write('ustar\u000000', off + 257, 8);
-        this.needPax =
-            encString(buf, off + 265, 32, this.uname) || this.needPax;
-        this.needPax =
-            encString(buf, off + 297, 32, this.gname) || this.needPax;
-        this.needPax =
-            encNumber(buf, off + 329, 8, this.devmaj) || this.needPax;
-        this.needPax =
-            encNumber(buf, off + 337, 8, this.devmin) || this.needPax;
-        this.needPax =
-            encString(buf, off + 345, prefixSize, prefix) || this.needPax;
-        if (buf[off + 475] !== 0) {
-            this.needPax =
-                encString(buf, off + 345, 155, prefix) || this.needPax;
-        }
-        else {
-            this.needPax =
-                encString(buf, off + 345, 130, prefix) || this.needPax;
-            this.needPax =
-                encDate(buf, off + 476, 12, this.atime) || this.needPax;
-            this.needPax =
-                encDate(buf, off + 488, 12, this.ctime) || this.needPax;
-        }
-        let sum = 8 * 0x20;
-        for (let i = off; i < off + 148; i++) {
-            sum += buf[i];
-        }
-        for (let i = off + 156; i < off + 512; i++) {
-            sum += buf[i];
-        }
-        this.cksum = sum;
-        encNumber(buf, off + 148, 8, this.cksum);
-        this.cksumValid = true;
-        return this.needPax;
-    }
-    get type() {
-        return (this.#type === 'Unsupported' ?
-            this.#type
-            : types.name.get(this.#type));
-    }
-    get typeKey() {
-        return this.#type;
-    }
-    set type(type) {
-        const c = String(types.code.get(type));
-        if (types.isCode(c) || c === 'Unsupported') {
-            this.#type = c;
-        }
-        else if (types.isCode(type)) {
-            this.#type = type;
-        }
-        else {
-            throw new TypeError('invalid entry type: ' + type);
-        }
-    }
-}
-const splitPrefix = (p, prefixSize) => {
-    const pathSize = 100;
-    let pp = p;
-    let prefix = '';
-    let ret = undefined;
-    const root = pathModule.parse(p).root || '.';
-    if (Buffer.byteLength(pp) < pathSize) {
-        ret = [pp, prefix, false];
-    }
-    else {
-        // first set prefix to the dir, and path to the base
-        prefix = pathModule.dirname(pp);
-        pp = pathModule.basename(pp);
-        do {
-            if (Buffer.byteLength(pp) <= pathSize &&
-                Buffer.byteLength(prefix) <= prefixSize) {
-                // both fit!
-                ret = [pp, prefix, false];
-            }
-            else if (Buffer.byteLength(pp) > pathSize &&
-                Buffer.byteLength(prefix) <= prefixSize) {
-                // prefix fits in prefix, but path doesn't fit in path
-                ret = [pp.slice(0, pathSize - 1), prefix, true];
-            }
-            else {
-                // make path take a bit from prefix
-                pp = pathModule.join(pathModule.basename(prefix), pp);
-                prefix = pathModule.dirname(prefix);
-            }
-        } while (prefix !== root && ret === undefined);
-        // at this point, found no resolution, just truncate
-        if (!ret) {
-            ret = [p.slice(0, pathSize - 1), '', true];
-        }
-    }
-    return ret;
-};
-const decString = (buf, off, size) => buf
-    .subarray(off, off + size)
-    .toString('utf8')
-    .replace(/\0.*/, '');
-const decDate = (buf, off, size) => numToDate(decNumber(buf, off, size));
-const numToDate = (num) => num === undefined ? undefined : new Date(num * 1000);
-const decNumber = (buf, off, size) => Number(buf[off]) & 0x80 ?
-    large.parse(buf.subarray(off, off + size))
-    : decSmallNumber(buf, off, size);
-const nanUndef = (value) => (isNaN(value) ? undefined : value);
-const decSmallNumber = (buf, off, size) => nanUndef(parseInt(buf
-    .subarray(off, off + size)
-    .toString('utf8')
-    .replace(/\0.*$/, '')
-    .trim(), 8));
-// the maximum encodable as a null-terminated octal, by field size
-const MAXNUM = {
-    12: 0o77777777777,
-    8: 0o7777777,
-};
-const encNumber = (buf, off, size, num) => num === undefined ? false
-    : num > MAXNUM[size] || num < 0 ?
-        (large.encode(num, buf.subarray(off, off + size)), true)
-        : (encSmallNumber(buf, off, size, num), false);
-const encSmallNumber = (buf, off, size, num) => buf.write(octalString(num, size), off, size, 'ascii');
-const octalString = (num, size) => padOctal(Math.floor(num).toString(8), size);
-const padOctal = (str, size) => (str.length === size - 1 ?
-    str
-    : new Array(size - str.length - 1).join('0') + str + ' ') + '\0';
-const encDate = (buf, off, size, date) => date === undefined ? false : (encNumber(buf, off, size, date.getTime() / 1000));
-// enough to fill the longest string we've got
-const NULLS = new Array(156).join('\0');
-// pad with nulls, return true if it's longer or non-ascii
-const encString = (buf, off, size, str) => str === undefined ? false : ((buf.write(str + NULLS, off, size, 'utf8'),
-    str.length !== Buffer.byteLength(str) || str.length > size));
-//# sourceMappingURL=header.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/index.js b/node_modules/pacote/node_modules/tar/dist/esm/index.js
deleted file mode 100644
index 1bac6415c8d73..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/esm/index.js
+++ /dev/null
@@ -1,20 +0,0 @@
-export * from './create.js';
-export { create as c } from './create.js';
-export * from './extract.js';
-export { extract as x } from './extract.js';
-export * from './header.js';
-export * from './list.js';
-export { list as t } from './list.js';
-// classes
-export * from './pack.js';
-export * from './parse.js';
-export * from './pax.js';
-export * from './read-entry.js';
-export * from './replace.js';
-export { replace as r } from './replace.js';
-export * as types from './types.js';
-export * from './unpack.js';
-export * from './update.js';
-export { update as u } from './update.js';
-export * from './write-entry.js';
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/large-numbers.js b/node_modules/pacote/node_modules/tar/dist/esm/large-numbers.js
deleted file mode 100644
index 4f2f7e5f14fc1..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/esm/large-numbers.js
+++ /dev/null
@@ -1,94 +0,0 @@
-// Tar can encode large and negative numbers using a leading byte of
-// 0xff for negative, and 0x80 for positive.
-export const encode = (num, buf) => {
-    if (!Number.isSafeInteger(num)) {
-        // The number is so large that javascript cannot represent it with integer
-        // precision.
-        throw Error('cannot encode number outside of javascript safe integer range');
-    }
-    else if (num < 0) {
-        encodeNegative(num, buf);
-    }
-    else {
-        encodePositive(num, buf);
-    }
-    return buf;
-};
-const encodePositive = (num, buf) => {
-    buf[0] = 0x80;
-    for (var i = buf.length; i > 1; i--) {
-        buf[i - 1] = num & 0xff;
-        num = Math.floor(num / 0x100);
-    }
-};
-const encodeNegative = (num, buf) => {
-    buf[0] = 0xff;
-    var flipped = false;
-    num = num * -1;
-    for (var i = buf.length; i > 1; i--) {
-        var byte = num & 0xff;
-        num = Math.floor(num / 0x100);
-        if (flipped) {
-            buf[i - 1] = onesComp(byte);
-        }
-        else if (byte === 0) {
-            buf[i - 1] = 0;
-        }
-        else {
-            flipped = true;
-            buf[i - 1] = twosComp(byte);
-        }
-    }
-};
-export const parse = (buf) => {
-    const pre = buf[0];
-    const value = pre === 0x80 ? pos(buf.subarray(1, buf.length))
-        : pre === 0xff ? twos(buf)
-            : null;
-    if (value === null) {
-        throw Error('invalid base256 encoding');
-    }
-    if (!Number.isSafeInteger(value)) {
-        // The number is so large that javascript cannot represent it with integer
-        // precision.
-        throw Error('parsed number outside of javascript safe integer range');
-    }
-    return value;
-};
-const twos = (buf) => {
-    var len = buf.length;
-    var sum = 0;
-    var flipped = false;
-    for (var i = len - 1; i > -1; i--) {
-        var byte = Number(buf[i]);
-        var f;
-        if (flipped) {
-            f = onesComp(byte);
-        }
-        else if (byte === 0) {
-            f = byte;
-        }
-        else {
-            flipped = true;
-            f = twosComp(byte);
-        }
-        if (f !== 0) {
-            sum -= f * Math.pow(256, len - i - 1);
-        }
-    }
-    return sum;
-};
-const pos = (buf) => {
-    var len = buf.length;
-    var sum = 0;
-    for (var i = len - 1; i > -1; i--) {
-        var byte = Number(buf[i]);
-        if (byte !== 0) {
-            sum += byte * Math.pow(256, len - i - 1);
-        }
-    }
-    return sum;
-};
-const onesComp = (byte) => (0xff ^ byte) & 0xff;
-const twosComp = (byte) => ((0xff ^ byte) + 1) & 0xff;
-//# sourceMappingURL=large-numbers.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/make-command.js b/node_modules/pacote/node_modules/tar/dist/esm/make-command.js
deleted file mode 100644
index f2f737bca78fd..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/esm/make-command.js
+++ /dev/null
@@ -1,57 +0,0 @@
-import { dealias, isAsyncFile, isAsyncNoFile, isSyncFile, isSyncNoFile, } from './options.js';
-export const makeCommand = (syncFile, asyncFile, syncNoFile, asyncNoFile, validate) => {
-    return Object.assign((opt_ = [], entries, cb) => {
-        if (Array.isArray(opt_)) {
-            entries = opt_;
-            opt_ = {};
-        }
-        if (typeof entries === 'function') {
-            cb = entries;
-            entries = undefined;
-        }
-        if (!entries) {
-            entries = [];
-        }
-        else {
-            entries = Array.from(entries);
-        }
-        const opt = dealias(opt_);
-        validate?.(opt, entries);
-        if (isSyncFile(opt)) {
-            if (typeof cb === 'function') {
-                throw new TypeError('callback not supported for sync tar functions');
-            }
-            return syncFile(opt, entries);
-        }
-        else if (isAsyncFile(opt)) {
-            const p = asyncFile(opt, entries);
-            // weirdness to make TS happy
-            const c = cb ? cb : undefined;
-            return c ? p.then(() => c(), c) : p;
-        }
-        else if (isSyncNoFile(opt)) {
-            if (typeof cb === 'function') {
-                throw new TypeError('callback not supported for sync tar functions');
-            }
-            return syncNoFile(opt, entries);
-        }
-        else if (isAsyncNoFile(opt)) {
-            if (typeof cb === 'function') {
-                throw new TypeError('callback only supported with file option');
-            }
-            return asyncNoFile(opt, entries);
-            /* c8 ignore start */
-        }
-        else {
-            throw new Error('impossible options??');
-        }
-        /* c8 ignore stop */
-    }, {
-        syncFile,
-        asyncFile,
-        syncNoFile,
-        asyncNoFile,
-        validate,
-    });
-};
-//# sourceMappingURL=make-command.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/mode-fix.js b/node_modules/pacote/node_modules/tar/dist/esm/mode-fix.js
deleted file mode 100644
index 5fd3bb88c1cb2..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/esm/mode-fix.js
+++ /dev/null
@@ -1,25 +0,0 @@
-export const modeFix = (mode, isDir, portable) => {
-    mode &= 0o7777;
-    // in portable mode, use the minimum reasonable umask
-    // if this system creates files with 0o664 by default
-    // (as some linux distros do), then we'll write the
-    // archive with 0o644 instead.  Also, don't ever create
-    // a file that is not readable/writable by the owner.
-    if (portable) {
-        mode = (mode | 0o600) & ~0o22;
-    }
-    // if dirs are readable, then they should be listable
-    if (isDir) {
-        if (mode & 0o400) {
-            mode |= 0o100;
-        }
-        if (mode & 0o40) {
-            mode |= 0o10;
-        }
-        if (mode & 0o4) {
-            mode |= 0o1;
-        }
-    }
-    return mode;
-};
-//# sourceMappingURL=mode-fix.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/normalize-unicode.js b/node_modules/pacote/node_modules/tar/dist/esm/normalize-unicode.js
deleted file mode 100644
index 94e5095476d6e..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/esm/normalize-unicode.js
+++ /dev/null
@@ -1,13 +0,0 @@
-// warning: extremely hot code path.
-// This has been meticulously optimized for use
-// within npm install on large package trees.
-// Do not edit without careful benchmarking.
-const normalizeCache = Object.create(null);
-const { hasOwnProperty } = Object.prototype;
-export const normalizeUnicode = (s) => {
-    if (!hasOwnProperty.call(normalizeCache, s)) {
-        normalizeCache[s] = s.normalize('NFD');
-    }
-    return normalizeCache[s];
-};
-//# sourceMappingURL=normalize-unicode.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/normalize-windows-path.js b/node_modules/pacote/node_modules/tar/dist/esm/normalize-windows-path.js
deleted file mode 100644
index 2d97d2b884e62..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/esm/normalize-windows-path.js
+++ /dev/null
@@ -1,9 +0,0 @@
-// on windows, either \ or / are valid directory separators.
-// on unix, \ is a valid character in filenames.
-// so, on windows, and only on windows, we replace all \ chars with /,
-// so that we can use / as our one and only directory separator char.
-const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform;
-export const normalizeWindowsPath = platform !== 'win32' ?
-    (p) => p
-    : (p) => p && p.replace(/\\/g, '/');
-//# sourceMappingURL=normalize-windows-path.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/options.js b/node_modules/pacote/node_modules/tar/dist/esm/options.js
deleted file mode 100644
index a006d36c23c92..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/esm/options.js
+++ /dev/null
@@ -1,54 +0,0 @@
-// turn tar(1) style args like `C` into the more verbose things like `cwd`
-const argmap = new Map([
-    ['C', 'cwd'],
-    ['f', 'file'],
-    ['z', 'gzip'],
-    ['P', 'preservePaths'],
-    ['U', 'unlink'],
-    ['strip-components', 'strip'],
-    ['stripComponents', 'strip'],
-    ['keep-newer', 'newer'],
-    ['keepNewer', 'newer'],
-    ['keep-newer-files', 'newer'],
-    ['keepNewerFiles', 'newer'],
-    ['k', 'keep'],
-    ['keep-existing', 'keep'],
-    ['keepExisting', 'keep'],
-    ['m', 'noMtime'],
-    ['no-mtime', 'noMtime'],
-    ['p', 'preserveOwner'],
-    ['L', 'follow'],
-    ['h', 'follow'],
-    ['onentry', 'onReadEntry'],
-]);
-export const isSyncFile = (o) => !!o.sync && !!o.file;
-export const isAsyncFile = (o) => !o.sync && !!o.file;
-export const isSyncNoFile = (o) => !!o.sync && !o.file;
-export const isAsyncNoFile = (o) => !o.sync && !o.file;
-export const isSync = (o) => !!o.sync;
-export const isAsync = (o) => !o.sync;
-export const isFile = (o) => !!o.file;
-export const isNoFile = (o) => !o.file;
-const dealiasKey = (k) => {
-    const d = argmap.get(k);
-    if (d)
-        return d;
-    return k;
-};
-export const dealias = (opt = {}) => {
-    if (!opt)
-        return {};
-    const result = {};
-    for (const [key, v] of Object.entries(opt)) {
-        // TS doesn't know that aliases are going to always be the same type
-        const k = dealiasKey(key);
-        result[k] = v;
-    }
-    // affordance for deprecated noChmod -> chmod
-    if (result.chmod === undefined && result.noChmod === false) {
-        result.chmod = true;
-    }
-    delete result.noChmod;
-    return result;
-};
-//# sourceMappingURL=options.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/pack.js b/node_modules/pacote/node_modules/tar/dist/esm/pack.js
deleted file mode 100644
index f59f32f94201f..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/esm/pack.js
+++ /dev/null
@@ -1,445 +0,0 @@
-// A readable tar stream creator
-// Technically, this is a transform stream that you write paths into,
-// and tar format comes out of.
-// The `add()` method is like `write()` but returns this,
-// and end() return `this` as well, so you can
-// do `new Pack(opt).add('files').add('dir').end().pipe(output)
-// You could also do something like:
-// streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar'))
-import fs from 'fs';
-import { WriteEntry, WriteEntrySync, WriteEntryTar, } from './write-entry.js';
-export class PackJob {
-    path;
-    absolute;
-    entry;
-    stat;
-    readdir;
-    pending = false;
-    ignore = false;
-    piped = false;
-    constructor(path, absolute) {
-        this.path = path || './';
-        this.absolute = absolute;
-    }
-}
-import { Minipass } from 'minipass';
-import * as zlib from 'minizlib';
-import { Yallist } from 'yallist';
-import { ReadEntry } from './read-entry.js';
-import { warnMethod, } from './warn-method.js';
-const EOF = Buffer.alloc(1024);
-const ONSTAT = Symbol('onStat');
-const ENDED = Symbol('ended');
-const QUEUE = Symbol('queue');
-const CURRENT = Symbol('current');
-const PROCESS = Symbol('process');
-const PROCESSING = Symbol('processing');
-const PROCESSJOB = Symbol('processJob');
-const JOBS = Symbol('jobs');
-const JOBDONE = Symbol('jobDone');
-const ADDFSENTRY = Symbol('addFSEntry');
-const ADDTARENTRY = Symbol('addTarEntry');
-const STAT = Symbol('stat');
-const READDIR = Symbol('readdir');
-const ONREADDIR = Symbol('onreaddir');
-const PIPE = Symbol('pipe');
-const ENTRY = Symbol('entry');
-const ENTRYOPT = Symbol('entryOpt');
-const WRITEENTRYCLASS = Symbol('writeEntryClass');
-const WRITE = Symbol('write');
-const ONDRAIN = Symbol('ondrain');
-import path from 'path';
-import { normalizeWindowsPath } from './normalize-windows-path.js';
-export class Pack extends Minipass {
-    opt;
-    cwd;
-    maxReadSize;
-    preservePaths;
-    strict;
-    noPax;
-    prefix;
-    linkCache;
-    statCache;
-    file;
-    portable;
-    zip;
-    readdirCache;
-    noDirRecurse;
-    follow;
-    noMtime;
-    mtime;
-    filter;
-    jobs;
-    [WRITEENTRYCLASS];
-    onWriteEntry;
-    [QUEUE];
-    [JOBS] = 0;
-    [PROCESSING] = false;
-    [ENDED] = false;
-    constructor(opt = {}) {
-        //@ts-ignore
-        super();
-        this.opt = opt;
-        this.file = opt.file || '';
-        this.cwd = opt.cwd || process.cwd();
-        this.maxReadSize = opt.maxReadSize;
-        this.preservePaths = !!opt.preservePaths;
-        this.strict = !!opt.strict;
-        this.noPax = !!opt.noPax;
-        this.prefix = normalizeWindowsPath(opt.prefix || '');
-        this.linkCache = opt.linkCache || new Map();
-        this.statCache = opt.statCache || new Map();
-        this.readdirCache = opt.readdirCache || new Map();
-        this.onWriteEntry = opt.onWriteEntry;
-        this[WRITEENTRYCLASS] = WriteEntry;
-        if (typeof opt.onwarn === 'function') {
-            this.on('warn', opt.onwarn);
-        }
-        this.portable = !!opt.portable;
-        if (opt.gzip || opt.brotli) {
-            if (opt.gzip && opt.brotli) {
-                throw new TypeError('gzip and brotli are mutually exclusive');
-            }
-            if (opt.gzip) {
-                if (typeof opt.gzip !== 'object') {
-                    opt.gzip = {};
-                }
-                if (this.portable) {
-                    opt.gzip.portable = true;
-                }
-                this.zip = new zlib.Gzip(opt.gzip);
-            }
-            if (opt.brotli) {
-                if (typeof opt.brotli !== 'object') {
-                    opt.brotli = {};
-                }
-                this.zip = new zlib.BrotliCompress(opt.brotli);
-            }
-            /* c8 ignore next */
-            if (!this.zip)
-                throw new Error('impossible');
-            const zip = this.zip;
-            zip.on('data', chunk => super.write(chunk));
-            zip.on('end', () => super.end());
-            zip.on('drain', () => this[ONDRAIN]());
-            this.on('resume', () => zip.resume());
-        }
-        else {
-            this.on('drain', this[ONDRAIN]);
-        }
-        this.noDirRecurse = !!opt.noDirRecurse;
-        this.follow = !!opt.follow;
-        this.noMtime = !!opt.noMtime;
-        if (opt.mtime)
-            this.mtime = opt.mtime;
-        this.filter =
-            typeof opt.filter === 'function' ? opt.filter : () => true;
-        this[QUEUE] = new Yallist();
-        this[JOBS] = 0;
-        this.jobs = Number(opt.jobs) || 4;
-        this[PROCESSING] = false;
-        this[ENDED] = false;
-    }
-    [WRITE](chunk) {
-        return super.write(chunk);
-    }
-    add(path) {
-        this.write(path);
-        return this;
-    }
-    end(path, encoding, cb) {
-        /* c8 ignore start */
-        if (typeof path === 'function') {
-            cb = path;
-            path = undefined;
-        }
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        /* c8 ignore stop */
-        if (path) {
-            this.add(path);
-        }
-        this[ENDED] = true;
-        this[PROCESS]();
-        /* c8 ignore next */
-        if (cb)
-            cb();
-        return this;
-    }
-    write(path) {
-        if (this[ENDED]) {
-            throw new Error('write after end');
-        }
-        if (path instanceof ReadEntry) {
-            this[ADDTARENTRY](path);
-        }
-        else {
-            this[ADDFSENTRY](path);
-        }
-        return this.flowing;
-    }
-    [ADDTARENTRY](p) {
-        const absolute = normalizeWindowsPath(path.resolve(this.cwd, p.path));
-        // in this case, we don't have to wait for the stat
-        if (!this.filter(p.path, p)) {
-            p.resume();
-        }
-        else {
-            const job = new PackJob(p.path, absolute);
-            job.entry = new WriteEntryTar(p, this[ENTRYOPT](job));
-            job.entry.on('end', () => this[JOBDONE](job));
-            this[JOBS] += 1;
-            this[QUEUE].push(job);
-        }
-        this[PROCESS]();
-    }
-    [ADDFSENTRY](p) {
-        const absolute = normalizeWindowsPath(path.resolve(this.cwd, p));
-        this[QUEUE].push(new PackJob(p, absolute));
-        this[PROCESS]();
-    }
-    [STAT](job) {
-        job.pending = true;
-        this[JOBS] += 1;
-        const stat = this.follow ? 'stat' : 'lstat';
-        fs[stat](job.absolute, (er, stat) => {
-            job.pending = false;
-            this[JOBS] -= 1;
-            if (er) {
-                this.emit('error', er);
-            }
-            else {
-                this[ONSTAT](job, stat);
-            }
-        });
-    }
-    [ONSTAT](job, stat) {
-        this.statCache.set(job.absolute, stat);
-        job.stat = stat;
-        // now we have the stat, we can filter it.
-        if (!this.filter(job.path, stat)) {
-            job.ignore = true;
-        }
-        this[PROCESS]();
-    }
-    [READDIR](job) {
-        job.pending = true;
-        this[JOBS] += 1;
-        fs.readdir(job.absolute, (er, entries) => {
-            job.pending = false;
-            this[JOBS] -= 1;
-            if (er) {
-                return this.emit('error', er);
-            }
-            this[ONREADDIR](job, entries);
-        });
-    }
-    [ONREADDIR](job, entries) {
-        this.readdirCache.set(job.absolute, entries);
-        job.readdir = entries;
-        this[PROCESS]();
-    }
-    [PROCESS]() {
-        if (this[PROCESSING]) {
-            return;
-        }
-        this[PROCESSING] = true;
-        for (let w = this[QUEUE].head; !!w && this[JOBS] < this.jobs; w = w.next) {
-            this[PROCESSJOB](w.value);
-            if (w.value.ignore) {
-                const p = w.next;
-                this[QUEUE].removeNode(w);
-                w.next = p;
-            }
-        }
-        this[PROCESSING] = false;
-        if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) {
-            if (this.zip) {
-                this.zip.end(EOF);
-            }
-            else {
-                super.write(EOF);
-                super.end();
-            }
-        }
-    }
-    get [CURRENT]() {
-        return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value;
-    }
-    [JOBDONE](_job) {
-        this[QUEUE].shift();
-        this[JOBS] -= 1;
-        this[PROCESS]();
-    }
-    [PROCESSJOB](job) {
-        if (job.pending) {
-            return;
-        }
-        if (job.entry) {
-            if (job === this[CURRENT] && !job.piped) {
-                this[PIPE](job);
-            }
-            return;
-        }
-        if (!job.stat) {
-            const sc = this.statCache.get(job.absolute);
-            if (sc) {
-                this[ONSTAT](job, sc);
-            }
-            else {
-                this[STAT](job);
-            }
-        }
-        if (!job.stat) {
-            return;
-        }
-        // filtered out!
-        if (job.ignore) {
-            return;
-        }
-        if (!this.noDirRecurse &&
-            job.stat.isDirectory() &&
-            !job.readdir) {
-            const rc = this.readdirCache.get(job.absolute);
-            if (rc) {
-                this[ONREADDIR](job, rc);
-            }
-            else {
-                this[READDIR](job);
-            }
-            if (!job.readdir) {
-                return;
-            }
-        }
-        // we know it doesn't have an entry, because that got checked above
-        job.entry = this[ENTRY](job);
-        if (!job.entry) {
-            job.ignore = true;
-            return;
-        }
-        if (job === this[CURRENT] && !job.piped) {
-            this[PIPE](job);
-        }
-    }
-    [ENTRYOPT](job) {
-        return {
-            onwarn: (code, msg, data) => this.warn(code, msg, data),
-            noPax: this.noPax,
-            cwd: this.cwd,
-            absolute: job.absolute,
-            preservePaths: this.preservePaths,
-            maxReadSize: this.maxReadSize,
-            strict: this.strict,
-            portable: this.portable,
-            linkCache: this.linkCache,
-            statCache: this.statCache,
-            noMtime: this.noMtime,
-            mtime: this.mtime,
-            prefix: this.prefix,
-            onWriteEntry: this.onWriteEntry,
-        };
-    }
-    [ENTRY](job) {
-        this[JOBS] += 1;
-        try {
-            const e = new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job));
-            return e
-                .on('end', () => this[JOBDONE](job))
-                .on('error', er => this.emit('error', er));
-        }
-        catch (er) {
-            this.emit('error', er);
-        }
-    }
-    [ONDRAIN]() {
-        if (this[CURRENT] && this[CURRENT].entry) {
-            this[CURRENT].entry.resume();
-        }
-    }
-    // like .pipe() but using super, because our write() is special
-    [PIPE](job) {
-        job.piped = true;
-        if (job.readdir) {
-            job.readdir.forEach(entry => {
-                const p = job.path;
-                const base = p === './' ? '' : p.replace(/\/*$/, '/');
-                this[ADDFSENTRY](base + entry);
-            });
-        }
-        const source = job.entry;
-        const zip = this.zip;
-        /* c8 ignore start */
-        if (!source)
-            throw new Error('cannot pipe without source');
-        /* c8 ignore stop */
-        if (zip) {
-            source.on('data', chunk => {
-                if (!zip.write(chunk)) {
-                    source.pause();
-                }
-            });
-        }
-        else {
-            source.on('data', chunk => {
-                if (!super.write(chunk)) {
-                    source.pause();
-                }
-            });
-        }
-    }
-    pause() {
-        if (this.zip) {
-            this.zip.pause();
-        }
-        return super.pause();
-    }
-    warn(code, message, data = {}) {
-        warnMethod(this, code, message, data);
-    }
-}
-export class PackSync extends Pack {
-    sync = true;
-    constructor(opt) {
-        super(opt);
-        this[WRITEENTRYCLASS] = WriteEntrySync;
-    }
-    // pause/resume are no-ops in sync streams.
-    pause() { }
-    resume() { }
-    [STAT](job) {
-        const stat = this.follow ? 'statSync' : 'lstatSync';
-        this[ONSTAT](job, fs[stat](job.absolute));
-    }
-    [READDIR](job) {
-        this[ONREADDIR](job, fs.readdirSync(job.absolute));
-    }
-    // gotta get it all in this tick
-    [PIPE](job) {
-        const source = job.entry;
-        const zip = this.zip;
-        if (job.readdir) {
-            job.readdir.forEach(entry => {
-                const p = job.path;
-                const base = p === './' ? '' : p.replace(/\/*$/, '/');
-                this[ADDFSENTRY](base + entry);
-            });
-        }
-        /* c8 ignore start */
-        if (!source)
-            throw new Error('Cannot pipe without source');
-        /* c8 ignore stop */
-        if (zip) {
-            source.on('data', chunk => {
-                zip.write(chunk);
-            });
-        }
-        else {
-            source.on('data', chunk => {
-                super[WRITE](chunk);
-            });
-        }
-    }
-}
-//# sourceMappingURL=pack.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/package.json b/node_modules/pacote/node_modules/tar/dist/esm/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/esm/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/parse.js b/node_modules/pacote/node_modules/tar/dist/esm/parse.js
deleted file mode 100644
index cce430479cd0c..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/esm/parse.js
+++ /dev/null
@@ -1,595 +0,0 @@
-// this[BUFFER] is the remainder of a chunk if we're waiting for
-// the full 512 bytes of a header to come in.  We will Buffer.concat()
-// it to the next write(), which is a mem copy, but a small one.
-//
-// this[QUEUE] is a Yallist of entries that haven't been emitted
-// yet this can only get filled up if the user keeps write()ing after
-// a write() returns false, or does a write() with more than one entry
-//
-// We don't buffer chunks, we always parse them and either create an
-// entry, or push it into the active entry.  The ReadEntry class knows
-// to throw data away if .ignore=true
-//
-// Shift entry off the buffer when it emits 'end', and emit 'entry' for
-// the next one in the list.
-//
-// At any time, we're pushing body chunks into the entry at WRITEENTRY,
-// and waiting for 'end' on the entry at READENTRY
-//
-// ignored entries get .resume() called on them straight away
-import { EventEmitter as EE } from 'events';
-import { BrotliDecompress, Unzip } from 'minizlib';
-import { Yallist } from 'yallist';
-import { Header } from './header.js';
-import { Pax } from './pax.js';
-import { ReadEntry } from './read-entry.js';
-import { warnMethod, } from './warn-method.js';
-const maxMetaEntrySize = 1024 * 1024;
-const gzipHeader = Buffer.from([0x1f, 0x8b]);
-const STATE = Symbol('state');
-const WRITEENTRY = Symbol('writeEntry');
-const READENTRY = Symbol('readEntry');
-const NEXTENTRY = Symbol('nextEntry');
-const PROCESSENTRY = Symbol('processEntry');
-const EX = Symbol('extendedHeader');
-const GEX = Symbol('globalExtendedHeader');
-const META = Symbol('meta');
-const EMITMETA = Symbol('emitMeta');
-const BUFFER = Symbol('buffer');
-const QUEUE = Symbol('queue');
-const ENDED = Symbol('ended');
-const EMITTEDEND = Symbol('emittedEnd');
-const EMIT = Symbol('emit');
-const UNZIP = Symbol('unzip');
-const CONSUMECHUNK = Symbol('consumeChunk');
-const CONSUMECHUNKSUB = Symbol('consumeChunkSub');
-const CONSUMEBODY = Symbol('consumeBody');
-const CONSUMEMETA = Symbol('consumeMeta');
-const CONSUMEHEADER = Symbol('consumeHeader');
-const CONSUMING = Symbol('consuming');
-const BUFFERCONCAT = Symbol('bufferConcat');
-const MAYBEEND = Symbol('maybeEnd');
-const WRITING = Symbol('writing');
-const ABORTED = Symbol('aborted');
-const DONE = Symbol('onDone');
-const SAW_VALID_ENTRY = Symbol('sawValidEntry');
-const SAW_NULL_BLOCK = Symbol('sawNullBlock');
-const SAW_EOF = Symbol('sawEOF');
-const CLOSESTREAM = Symbol('closeStream');
-const noop = () => true;
-export class Parser extends EE {
-    file;
-    strict;
-    maxMetaEntrySize;
-    filter;
-    brotli;
-    writable = true;
-    readable = false;
-    [QUEUE] = new Yallist();
-    [BUFFER];
-    [READENTRY];
-    [WRITEENTRY];
-    [STATE] = 'begin';
-    [META] = '';
-    [EX];
-    [GEX];
-    [ENDED] = false;
-    [UNZIP];
-    [ABORTED] = false;
-    [SAW_VALID_ENTRY];
-    [SAW_NULL_BLOCK] = false;
-    [SAW_EOF] = false;
-    [WRITING] = false;
-    [CONSUMING] = false;
-    [EMITTEDEND] = false;
-    constructor(opt = {}) {
-        super();
-        this.file = opt.file || '';
-        // these BADARCHIVE errors can't be detected early. listen on DONE.
-        this.on(DONE, () => {
-            if (this[STATE] === 'begin' ||
-                this[SAW_VALID_ENTRY] === false) {
-                // either less than 1 block of data, or all entries were invalid.
-                // Either way, probably not even a tarball.
-                this.warn('TAR_BAD_ARCHIVE', 'Unrecognized archive format');
-            }
-        });
-        if (opt.ondone) {
-            this.on(DONE, opt.ondone);
-        }
-        else {
-            this.on(DONE, () => {
-                this.emit('prefinish');
-                this.emit('finish');
-                this.emit('end');
-            });
-        }
-        this.strict = !!opt.strict;
-        this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize;
-        this.filter = typeof opt.filter === 'function' ? opt.filter : noop;
-        // Unlike gzip, brotli doesn't have any magic bytes to identify it
-        // Users need to explicitly tell us they're extracting a brotli file
-        // Or we infer from the file extension
-        const isTBR = opt.file &&
-            (opt.file.endsWith('.tar.br') || opt.file.endsWith('.tbr'));
-        // if it's a tbr file it MIGHT be brotli, but we don't know until
-        // we look at it and verify it's not a valid tar file.
-        this.brotli =
-            !opt.gzip && opt.brotli !== undefined ? opt.brotli
-                : isTBR ? undefined
-                    : false;
-        // have to set this so that streams are ok piping into it
-        this.on('end', () => this[CLOSESTREAM]());
-        if (typeof opt.onwarn === 'function') {
-            this.on('warn', opt.onwarn);
-        }
-        if (typeof opt.onReadEntry === 'function') {
-            this.on('entry', opt.onReadEntry);
-        }
-    }
-    warn(code, message, data = {}) {
-        warnMethod(this, code, message, data);
-    }
-    [CONSUMEHEADER](chunk, position) {
-        if (this[SAW_VALID_ENTRY] === undefined) {
-            this[SAW_VALID_ENTRY] = false;
-        }
-        let header;
-        try {
-            header = new Header(chunk, position, this[EX], this[GEX]);
-        }
-        catch (er) {
-            return this.warn('TAR_ENTRY_INVALID', er);
-        }
-        if (header.nullBlock) {
-            if (this[SAW_NULL_BLOCK]) {
-                this[SAW_EOF] = true;
-                // ending an archive with no entries.  pointless, but legal.
-                if (this[STATE] === 'begin') {
-                    this[STATE] = 'header';
-                }
-                this[EMIT]('eof');
-            }
-            else {
-                this[SAW_NULL_BLOCK] = true;
-                this[EMIT]('nullBlock');
-            }
-        }
-        else {
-            this[SAW_NULL_BLOCK] = false;
-            if (!header.cksumValid) {
-                this.warn('TAR_ENTRY_INVALID', 'checksum failure', { header });
-            }
-            else if (!header.path) {
-                this.warn('TAR_ENTRY_INVALID', 'path is required', { header });
-            }
-            else {
-                const type = header.type;
-                if (/^(Symbolic)?Link$/.test(type) && !header.linkpath) {
-                    this.warn('TAR_ENTRY_INVALID', 'linkpath required', {
-                        header,
-                    });
-                }
-                else if (!/^(Symbolic)?Link$/.test(type) &&
-                    !/^(Global)?ExtendedHeader$/.test(type) &&
-                    header.linkpath) {
-                    this.warn('TAR_ENTRY_INVALID', 'linkpath forbidden', {
-                        header,
-                    });
-                }
-                else {
-                    const entry = (this[WRITEENTRY] = new ReadEntry(header, this[EX], this[GEX]));
-                    // we do this for meta & ignored entries as well, because they
-                    // are still valid tar, or else we wouldn't know to ignore them
-                    if (!this[SAW_VALID_ENTRY]) {
-                        if (entry.remain) {
-                            // this might be the one!
-                            const onend = () => {
-                                if (!entry.invalid) {
-                                    this[SAW_VALID_ENTRY] = true;
-                                }
-                            };
-                            entry.on('end', onend);
-                        }
-                        else {
-                            this[SAW_VALID_ENTRY] = true;
-                        }
-                    }
-                    if (entry.meta) {
-                        if (entry.size > this.maxMetaEntrySize) {
-                            entry.ignore = true;
-                            this[EMIT]('ignoredEntry', entry);
-                            this[STATE] = 'ignore';
-                            entry.resume();
-                        }
-                        else if (entry.size > 0) {
-                            this[META] = '';
-                            entry.on('data', c => (this[META] += c));
-                            this[STATE] = 'meta';
-                        }
-                    }
-                    else {
-                        this[EX] = undefined;
-                        entry.ignore =
-                            entry.ignore || !this.filter(entry.path, entry);
-                        if (entry.ignore) {
-                            // probably valid, just not something we care about
-                            this[EMIT]('ignoredEntry', entry);
-                            this[STATE] = entry.remain ? 'ignore' : 'header';
-                            entry.resume();
-                        }
-                        else {
-                            if (entry.remain) {
-                                this[STATE] = 'body';
-                            }
-                            else {
-                                this[STATE] = 'header';
-                                entry.end();
-                            }
-                            if (!this[READENTRY]) {
-                                this[QUEUE].push(entry);
-                                this[NEXTENTRY]();
-                            }
-                            else {
-                                this[QUEUE].push(entry);
-                            }
-                        }
-                    }
-                }
-            }
-        }
-    }
-    [CLOSESTREAM]() {
-        queueMicrotask(() => this.emit('close'));
-    }
-    [PROCESSENTRY](entry) {
-        let go = true;
-        if (!entry) {
-            this[READENTRY] = undefined;
-            go = false;
-        }
-        else if (Array.isArray(entry)) {
-            const [ev, ...args] = entry;
-            this.emit(ev, ...args);
-        }
-        else {
-            this[READENTRY] = entry;
-            this.emit('entry', entry);
-            if (!entry.emittedEnd) {
-                entry.on('end', () => this[NEXTENTRY]());
-                go = false;
-            }
-        }
-        return go;
-    }
-    [NEXTENTRY]() {
-        do { } while (this[PROCESSENTRY](this[QUEUE].shift()));
-        if (!this[QUEUE].length) {
-            // At this point, there's nothing in the queue, but we may have an
-            // entry which is being consumed (readEntry).
-            // If we don't, then we definitely can handle more data.
-            // If we do, and either it's flowing, or it has never had any data
-            // written to it, then it needs more.
-            // The only other possibility is that it has returned false from a
-            // write() call, so we wait for the next drain to continue.
-            const re = this[READENTRY];
-            const drainNow = !re || re.flowing || re.size === re.remain;
-            if (drainNow) {
-                if (!this[WRITING]) {
-                    this.emit('drain');
-                }
-            }
-            else {
-                re.once('drain', () => this.emit('drain'));
-            }
-        }
-    }
-    [CONSUMEBODY](chunk, position) {
-        // write up to but no  more than writeEntry.blockRemain
-        const entry = this[WRITEENTRY];
-        /* c8 ignore start */
-        if (!entry) {
-            throw new Error('attempt to consume body without entry??');
-        }
-        const br = entry.blockRemain ?? 0;
-        /* c8 ignore stop */
-        const c = br >= chunk.length && position === 0 ?
-            chunk
-            : chunk.subarray(position, position + br);
-        entry.write(c);
-        if (!entry.blockRemain) {
-            this[STATE] = 'header';
-            this[WRITEENTRY] = undefined;
-            entry.end();
-        }
-        return c.length;
-    }
-    [CONSUMEMETA](chunk, position) {
-        const entry = this[WRITEENTRY];
-        const ret = this[CONSUMEBODY](chunk, position);
-        // if we finished, then the entry is reset
-        if (!this[WRITEENTRY] && entry) {
-            this[EMITMETA](entry);
-        }
-        return ret;
-    }
-    [EMIT](ev, data, extra) {
-        if (!this[QUEUE].length && !this[READENTRY]) {
-            this.emit(ev, data, extra);
-        }
-        else {
-            this[QUEUE].push([ev, data, extra]);
-        }
-    }
-    [EMITMETA](entry) {
-        this[EMIT]('meta', this[META]);
-        switch (entry.type) {
-            case 'ExtendedHeader':
-            case 'OldExtendedHeader':
-                this[EX] = Pax.parse(this[META], this[EX], false);
-                break;
-            case 'GlobalExtendedHeader':
-                this[GEX] = Pax.parse(this[META], this[GEX], true);
-                break;
-            case 'NextFileHasLongPath':
-            case 'OldGnuLongPath': {
-                const ex = this[EX] ?? Object.create(null);
-                this[EX] = ex;
-                ex.path = this[META].replace(/\0.*/, '');
-                break;
-            }
-            case 'NextFileHasLongLinkpath': {
-                const ex = this[EX] || Object.create(null);
-                this[EX] = ex;
-                ex.linkpath = this[META].replace(/\0.*/, '');
-                break;
-            }
-            /* c8 ignore start */
-            default:
-                throw new Error('unknown meta: ' + entry.type);
-            /* c8 ignore stop */
-        }
-    }
-    abort(error) {
-        this[ABORTED] = true;
-        this.emit('abort', error);
-        // always throws, even in non-strict mode
-        this.warn('TAR_ABORT', error, { recoverable: false });
-    }
-    write(chunk, encoding, cb) {
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        if (typeof chunk === 'string') {
-            chunk = Buffer.from(chunk, 
-            /* c8 ignore next */
-            typeof encoding === 'string' ? encoding : 'utf8');
-        }
-        if (this[ABORTED]) {
-            /* c8 ignore next */
-            cb?.();
-            return false;
-        }
-        // first write, might be gzipped
-        const needSniff = this[UNZIP] === undefined ||
-            (this.brotli === undefined && this[UNZIP] === false);
-        if (needSniff && chunk) {
-            if (this[BUFFER]) {
-                chunk = Buffer.concat([this[BUFFER], chunk]);
-                this[BUFFER] = undefined;
-            }
-            if (chunk.length < gzipHeader.length) {
-                this[BUFFER] = chunk;
-                /* c8 ignore next */
-                cb?.();
-                return true;
-            }
-            // look for gzip header
-            for (let i = 0; this[UNZIP] === undefined && i < gzipHeader.length; i++) {
-                if (chunk[i] !== gzipHeader[i]) {
-                    this[UNZIP] = false;
-                }
-            }
-            const maybeBrotli = this.brotli === undefined;
-            if (this[UNZIP] === false && maybeBrotli) {
-                // read the first header to see if it's a valid tar file. If so,
-                // we can safely assume that it's not actually brotli, despite the
-                // .tbr or .tar.br file extension.
-                // if we ended before getting a full chunk, yes, def brotli
-                if (chunk.length < 512) {
-                    if (this[ENDED]) {
-                        this.brotli = true;
-                    }
-                    else {
-                        this[BUFFER] = chunk;
-                        /* c8 ignore next */
-                        cb?.();
-                        return true;
-                    }
-                }
-                else {
-                    // if it's tar, it's pretty reliably not brotli, chances of
-                    // that happening are astronomical.
-                    try {
-                        new Header(chunk.subarray(0, 512));
-                        this.brotli = false;
-                    }
-                    catch (_) {
-                        this.brotli = true;
-                    }
-                }
-            }
-            if (this[UNZIP] === undefined ||
-                (this[UNZIP] === false && this.brotli)) {
-                const ended = this[ENDED];
-                this[ENDED] = false;
-                this[UNZIP] =
-                    this[UNZIP] === undefined ?
-                        new Unzip({})
-                        : new BrotliDecompress({});
-                this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk));
-                this[UNZIP].on('error', er => this.abort(er));
-                this[UNZIP].on('end', () => {
-                    this[ENDED] = true;
-                    this[CONSUMECHUNK]();
-                });
-                this[WRITING] = true;
-                const ret = !!this[UNZIP][ended ? 'end' : 'write'](chunk);
-                this[WRITING] = false;
-                cb?.();
-                return ret;
-            }
-        }
-        this[WRITING] = true;
-        if (this[UNZIP]) {
-            this[UNZIP].write(chunk);
-        }
-        else {
-            this[CONSUMECHUNK](chunk);
-        }
-        this[WRITING] = false;
-        // return false if there's a queue, or if the current entry isn't flowing
-        const ret = this[QUEUE].length ? false
-            : this[READENTRY] ? this[READENTRY].flowing
-                : true;
-        // if we have no queue, then that means a clogged READENTRY
-        if (!ret && !this[QUEUE].length) {
-            this[READENTRY]?.once('drain', () => this.emit('drain'));
-        }
-        /* c8 ignore next */
-        cb?.();
-        return ret;
-    }
-    [BUFFERCONCAT](c) {
-        if (c && !this[ABORTED]) {
-            this[BUFFER] =
-                this[BUFFER] ? Buffer.concat([this[BUFFER], c]) : c;
-        }
-    }
-    [MAYBEEND]() {
-        if (this[ENDED] &&
-            !this[EMITTEDEND] &&
-            !this[ABORTED] &&
-            !this[CONSUMING]) {
-            this[EMITTEDEND] = true;
-            const entry = this[WRITEENTRY];
-            if (entry && entry.blockRemain) {
-                // truncated, likely a damaged file
-                const have = this[BUFFER] ? this[BUFFER].length : 0;
-                this.warn('TAR_BAD_ARCHIVE', `Truncated input (needed ${entry.blockRemain} more bytes, only ${have} available)`, { entry });
-                if (this[BUFFER]) {
-                    entry.write(this[BUFFER]);
-                }
-                entry.end();
-            }
-            this[EMIT](DONE);
-        }
-    }
-    [CONSUMECHUNK](chunk) {
-        if (this[CONSUMING] && chunk) {
-            this[BUFFERCONCAT](chunk);
-        }
-        else if (!chunk && !this[BUFFER]) {
-            this[MAYBEEND]();
-        }
-        else if (chunk) {
-            this[CONSUMING] = true;
-            if (this[BUFFER]) {
-                this[BUFFERCONCAT](chunk);
-                const c = this[BUFFER];
-                this[BUFFER] = undefined;
-                this[CONSUMECHUNKSUB](c);
-            }
-            else {
-                this[CONSUMECHUNKSUB](chunk);
-            }
-            while (this[BUFFER] &&
-                this[BUFFER]?.length >= 512 &&
-                !this[ABORTED] &&
-                !this[SAW_EOF]) {
-                const c = this[BUFFER];
-                this[BUFFER] = undefined;
-                this[CONSUMECHUNKSUB](c);
-            }
-            this[CONSUMING] = false;
-        }
-        if (!this[BUFFER] || this[ENDED]) {
-            this[MAYBEEND]();
-        }
-    }
-    [CONSUMECHUNKSUB](chunk) {
-        // we know that we are in CONSUMING mode, so anything written goes into
-        // the buffer.  Advance the position and put any remainder in the buffer.
-        let position = 0;
-        const length = chunk.length;
-        while (position + 512 <= length &&
-            !this[ABORTED] &&
-            !this[SAW_EOF]) {
-            switch (this[STATE]) {
-                case 'begin':
-                case 'header':
-                    this[CONSUMEHEADER](chunk, position);
-                    position += 512;
-                    break;
-                case 'ignore':
-                case 'body':
-                    position += this[CONSUMEBODY](chunk, position);
-                    break;
-                case 'meta':
-                    position += this[CONSUMEMETA](chunk, position);
-                    break;
-                /* c8 ignore start */
-                default:
-                    throw new Error('invalid state: ' + this[STATE]);
-                /* c8 ignore stop */
-            }
-        }
-        if (position < length) {
-            if (this[BUFFER]) {
-                this[BUFFER] = Buffer.concat([
-                    chunk.subarray(position),
-                    this[BUFFER],
-                ]);
-            }
-            else {
-                this[BUFFER] = chunk.subarray(position);
-            }
-        }
-    }
-    end(chunk, encoding, cb) {
-        if (typeof chunk === 'function') {
-            cb = chunk;
-            encoding = undefined;
-            chunk = undefined;
-        }
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        if (typeof chunk === 'string') {
-            chunk = Buffer.from(chunk, encoding);
-        }
-        if (cb)
-            this.once('finish', cb);
-        if (!this[ABORTED]) {
-            if (this[UNZIP]) {
-                /* c8 ignore start */
-                if (chunk)
-                    this[UNZIP].write(chunk);
-                /* c8 ignore stop */
-                this[UNZIP].end();
-            }
-            else {
-                this[ENDED] = true;
-                if (this.brotli === undefined)
-                    chunk = chunk || Buffer.alloc(0);
-                if (chunk)
-                    this.write(chunk);
-                this[MAYBEEND]();
-            }
-        }
-        return this;
-    }
-}
-//# sourceMappingURL=parse.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/path-reservations.js b/node_modules/pacote/node_modules/tar/dist/esm/path-reservations.js
deleted file mode 100644
index e63b9c91e9a80..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/esm/path-reservations.js
+++ /dev/null
@@ -1,166 +0,0 @@
-// A path exclusive reservation system
-// reserve([list, of, paths], fn)
-// When the fn is first in line for all its paths, it
-// is called with a cb that clears the reservation.
-//
-// Used by async unpack to avoid clobbering paths in use,
-// while still allowing maximal safe parallelization.
-import { join } from 'node:path';
-import { normalizeUnicode } from './normalize-unicode.js';
-import { stripTrailingSlashes } from './strip-trailing-slashes.js';
-const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform;
-const isWindows = platform === 'win32';
-// return a set of parent dirs for a given path
-// '/a/b/c/d' -> ['/', '/a', '/a/b', '/a/b/c', '/a/b/c/d']
-const getDirs = (path) => {
-    const dirs = path
-        .split('/')
-        .slice(0, -1)
-        .reduce((set, path) => {
-        const s = set[set.length - 1];
-        if (s !== undefined) {
-            path = join(s, path);
-        }
-        set.push(path || '/');
-        return set;
-    }, []);
-    return dirs;
-};
-export class PathReservations {
-    // path => [function or Set]
-    // A Set object means a directory reservation
-    // A fn is a direct reservation on that path
-    #queues = new Map();
-    // fn => {paths:[path,...], dirs:[path, ...]}
-    #reservations = new Map();
-    // functions currently running
-    #running = new Set();
-    reserve(paths, fn) {
-        paths =
-            isWindows ?
-                ['win32 parallelization disabled']
-                : paths.map(p => {
-                    // don't need normPath, because we skip this entirely for windows
-                    return stripTrailingSlashes(join(normalizeUnicode(p))).toLowerCase();
-                });
-        const dirs = new Set(paths.map(path => getDirs(path)).reduce((a, b) => a.concat(b)));
-        this.#reservations.set(fn, { dirs, paths });
-        for (const p of paths) {
-            const q = this.#queues.get(p);
-            if (!q) {
-                this.#queues.set(p, [fn]);
-            }
-            else {
-                q.push(fn);
-            }
-        }
-        for (const dir of dirs) {
-            const q = this.#queues.get(dir);
-            if (!q) {
-                this.#queues.set(dir, [new Set([fn])]);
-            }
-            else {
-                const l = q[q.length - 1];
-                if (l instanceof Set) {
-                    l.add(fn);
-                }
-                else {
-                    q.push(new Set([fn]));
-                }
-            }
-        }
-        return this.#run(fn);
-    }
-    // return the queues for each path the function cares about
-    // fn => {paths, dirs}
-    #getQueues(fn) {
-        const res = this.#reservations.get(fn);
-        /* c8 ignore start */
-        if (!res) {
-            throw new Error('function does not have any path reservations');
-        }
-        /* c8 ignore stop */
-        return {
-            paths: res.paths.map((path) => this.#queues.get(path)),
-            dirs: [...res.dirs].map(path => this.#queues.get(path)),
-        };
-    }
-    // check if fn is first in line for all its paths, and is
-    // included in the first set for all its dir queues
-    check(fn) {
-        const { paths, dirs } = this.#getQueues(fn);
-        return (paths.every(q => q && q[0] === fn) &&
-            dirs.every(q => q && q[0] instanceof Set && q[0].has(fn)));
-    }
-    // run the function if it's first in line and not already running
-    #run(fn) {
-        if (this.#running.has(fn) || !this.check(fn)) {
-            return false;
-        }
-        this.#running.add(fn);
-        fn(() => this.#clear(fn));
-        return true;
-    }
-    #clear(fn) {
-        if (!this.#running.has(fn)) {
-            return false;
-        }
-        const res = this.#reservations.get(fn);
-        /* c8 ignore start */
-        if (!res) {
-            throw new Error('invalid reservation');
-        }
-        /* c8 ignore stop */
-        const { paths, dirs } = res;
-        const next = new Set();
-        for (const path of paths) {
-            const q = this.#queues.get(path);
-            /* c8 ignore start */
-            if (!q || q?.[0] !== fn) {
-                continue;
-            }
-            /* c8 ignore stop */
-            const q0 = q[1];
-            if (!q0) {
-                this.#queues.delete(path);
-                continue;
-            }
-            q.shift();
-            if (typeof q0 === 'function') {
-                next.add(q0);
-            }
-            else {
-                for (const f of q0) {
-                    next.add(f);
-                }
-            }
-        }
-        for (const dir of dirs) {
-            const q = this.#queues.get(dir);
-            const q0 = q?.[0];
-            /* c8 ignore next - type safety only */
-            if (!q || !(q0 instanceof Set))
-                continue;
-            if (q0.size === 1 && q.length === 1) {
-                this.#queues.delete(dir);
-                continue;
-            }
-            else if (q0.size === 1) {
-                q.shift();
-                // next one must be a function,
-                // or else the Set would've been reused
-                const n = q[0];
-                if (typeof n === 'function') {
-                    next.add(n);
-                }
-            }
-            else {
-                q0.delete(fn);
-            }
-        }
-        this.#running.delete(fn);
-        next.forEach(fn => this.#run(fn));
-        return true;
-    }
-}
-//# sourceMappingURL=path-reservations.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/pax.js b/node_modules/pacote/node_modules/tar/dist/esm/pax.js
deleted file mode 100644
index 832808f344da5..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/esm/pax.js
+++ /dev/null
@@ -1,154 +0,0 @@
-import { basename } from 'node:path';
-import { Header } from './header.js';
-export class Pax {
-    atime;
-    mtime;
-    ctime;
-    charset;
-    comment;
-    gid;
-    uid;
-    gname;
-    uname;
-    linkpath;
-    dev;
-    ino;
-    nlink;
-    path;
-    size;
-    mode;
-    global;
-    constructor(obj, global = false) {
-        this.atime = obj.atime;
-        this.charset = obj.charset;
-        this.comment = obj.comment;
-        this.ctime = obj.ctime;
-        this.dev = obj.dev;
-        this.gid = obj.gid;
-        this.global = global;
-        this.gname = obj.gname;
-        this.ino = obj.ino;
-        this.linkpath = obj.linkpath;
-        this.mtime = obj.mtime;
-        this.nlink = obj.nlink;
-        this.path = obj.path;
-        this.size = obj.size;
-        this.uid = obj.uid;
-        this.uname = obj.uname;
-    }
-    encode() {
-        const body = this.encodeBody();
-        if (body === '') {
-            return Buffer.allocUnsafe(0);
-        }
-        const bodyLen = Buffer.byteLength(body);
-        // round up to 512 bytes
-        // add 512 for header
-        const bufLen = 512 * Math.ceil(1 + bodyLen / 512);
-        const buf = Buffer.allocUnsafe(bufLen);
-        // 0-fill the header section, it might not hit every field
-        for (let i = 0; i < 512; i++) {
-            buf[i] = 0;
-        }
-        new Header({
-            // XXX split the path
-            // then the path should be PaxHeader + basename, but less than 99,
-            // prepend with the dirname
-            /* c8 ignore start */
-            path: ('PaxHeader/' + basename(this.path ?? '')).slice(0, 99),
-            /* c8 ignore stop */
-            mode: this.mode || 0o644,
-            uid: this.uid,
-            gid: this.gid,
-            size: bodyLen,
-            mtime: this.mtime,
-            type: this.global ? 'GlobalExtendedHeader' : 'ExtendedHeader',
-            linkpath: '',
-            uname: this.uname || '',
-            gname: this.gname || '',
-            devmaj: 0,
-            devmin: 0,
-            atime: this.atime,
-            ctime: this.ctime,
-        }).encode(buf);
-        buf.write(body, 512, bodyLen, 'utf8');
-        // null pad after the body
-        for (let i = bodyLen + 512; i < buf.length; i++) {
-            buf[i] = 0;
-        }
-        return buf;
-    }
-    encodeBody() {
-        return (this.encodeField('path') +
-            this.encodeField('ctime') +
-            this.encodeField('atime') +
-            this.encodeField('dev') +
-            this.encodeField('ino') +
-            this.encodeField('nlink') +
-            this.encodeField('charset') +
-            this.encodeField('comment') +
-            this.encodeField('gid') +
-            this.encodeField('gname') +
-            this.encodeField('linkpath') +
-            this.encodeField('mtime') +
-            this.encodeField('size') +
-            this.encodeField('uid') +
-            this.encodeField('uname'));
-    }
-    encodeField(field) {
-        if (this[field] === undefined) {
-            return '';
-        }
-        const r = this[field];
-        const v = r instanceof Date ? r.getTime() / 1000 : r;
-        const s = ' ' +
-            (field === 'dev' || field === 'ino' || field === 'nlink' ?
-                'SCHILY.'
-                : '') +
-            field +
-            '=' +
-            v +
-            '\n';
-        const byteLen = Buffer.byteLength(s);
-        // the digits includes the length of the digits in ascii base-10
-        // so if it's 9 characters, then adding 1 for the 9 makes it 10
-        // which makes it 11 chars.
-        let digits = Math.floor(Math.log(byteLen) / Math.log(10)) + 1;
-        if (byteLen + digits >= Math.pow(10, digits)) {
-            digits += 1;
-        }
-        const len = digits + byteLen;
-        return len + s;
-    }
-    static parse(str, ex, g = false) {
-        return new Pax(merge(parseKV(str), ex), g);
-    }
-}
-const merge = (a, b) => b ? Object.assign({}, b, a) : a;
-const parseKV = (str) => str
-    .replace(/\n$/, '')
-    .split('\n')
-    .reduce(parseKVLine, Object.create(null));
-const parseKVLine = (set, line) => {
-    const n = parseInt(line, 10);
-    // XXX Values with \n in them will fail this.
-    // Refactor to not be a naive line-by-line parse.
-    if (n !== Buffer.byteLength(line) + 1) {
-        return set;
-    }
-    line = line.slice((n + ' ').length);
-    const kv = line.split('=');
-    const r = kv.shift();
-    if (!r) {
-        return set;
-    }
-    const k = r.replace(/^SCHILY\.(dev|ino|nlink)/, '$1');
-    const v = kv.join('=');
-    set[k] =
-        /^([A-Z]+\.)?([mac]|birth|creation)time$/.test(k) ?
-            new Date(Number(v) * 1000)
-            : /^[0-9]+$/.test(v) ? +v
-                : v;
-    return set;
-};
-//# sourceMappingURL=pax.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/read-entry.js b/node_modules/pacote/node_modules/tar/dist/esm/read-entry.js
deleted file mode 100644
index 23cc673e61087..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/esm/read-entry.js
+++ /dev/null
@@ -1,136 +0,0 @@
-import { Minipass } from 'minipass';
-import { normalizeWindowsPath } from './normalize-windows-path.js';
-export class ReadEntry extends Minipass {
-    extended;
-    globalExtended;
-    header;
-    startBlockSize;
-    blockRemain;
-    remain;
-    type;
-    meta = false;
-    ignore = false;
-    path;
-    mode;
-    uid;
-    gid;
-    uname;
-    gname;
-    size = 0;
-    mtime;
-    atime;
-    ctime;
-    linkpath;
-    dev;
-    ino;
-    nlink;
-    invalid = false;
-    absolute;
-    unsupported = false;
-    constructor(header, ex, gex) {
-        super({});
-        // read entries always start life paused.  this is to avoid the
-        // situation where Minipass's auto-ending empty streams results
-        // in an entry ending before we're ready for it.
-        this.pause();
-        this.extended = ex;
-        this.globalExtended = gex;
-        this.header = header;
-        /* c8 ignore start */
-        this.remain = header.size ?? 0;
-        /* c8 ignore stop */
-        this.startBlockSize = 512 * Math.ceil(this.remain / 512);
-        this.blockRemain = this.startBlockSize;
-        this.type = header.type;
-        switch (this.type) {
-            case 'File':
-            case 'OldFile':
-            case 'Link':
-            case 'SymbolicLink':
-            case 'CharacterDevice':
-            case 'BlockDevice':
-            case 'Directory':
-            case 'FIFO':
-            case 'ContiguousFile':
-            case 'GNUDumpDir':
-                break;
-            case 'NextFileHasLongLinkpath':
-            case 'NextFileHasLongPath':
-            case 'OldGnuLongPath':
-            case 'GlobalExtendedHeader':
-            case 'ExtendedHeader':
-            case 'OldExtendedHeader':
-                this.meta = true;
-                break;
-            // NOTE: gnutar and bsdtar treat unrecognized types as 'File'
-            // it may be worth doing the same, but with a warning.
-            default:
-                this.ignore = true;
-        }
-        /* c8 ignore start */
-        if (!header.path) {
-            throw new Error('no path provided for tar.ReadEntry');
-        }
-        /* c8 ignore stop */
-        this.path = normalizeWindowsPath(header.path);
-        this.mode = header.mode;
-        if (this.mode) {
-            this.mode = this.mode & 0o7777;
-        }
-        this.uid = header.uid;
-        this.gid = header.gid;
-        this.uname = header.uname;
-        this.gname = header.gname;
-        this.size = this.remain;
-        this.mtime = header.mtime;
-        this.atime = header.atime;
-        this.ctime = header.ctime;
-        /* c8 ignore start */
-        this.linkpath =
-            header.linkpath ?
-                normalizeWindowsPath(header.linkpath)
-                : undefined;
-        /* c8 ignore stop */
-        this.uname = header.uname;
-        this.gname = header.gname;
-        if (ex) {
-            this.#slurp(ex);
-        }
-        if (gex) {
-            this.#slurp(gex, true);
-        }
-    }
-    write(data) {
-        const writeLen = data.length;
-        if (writeLen > this.blockRemain) {
-            throw new Error('writing more to entry than is appropriate');
-        }
-        const r = this.remain;
-        const br = this.blockRemain;
-        this.remain = Math.max(0, r - writeLen);
-        this.blockRemain = Math.max(0, br - writeLen);
-        if (this.ignore) {
-            return true;
-        }
-        if (r >= writeLen) {
-            return super.write(data);
-        }
-        // r < writeLen
-        return super.write(data.subarray(0, r));
-    }
-    #slurp(ex, gex = false) {
-        if (ex.path)
-            ex.path = normalizeWindowsPath(ex.path);
-        if (ex.linkpath)
-            ex.linkpath = normalizeWindowsPath(ex.linkpath);
-        Object.assign(this, Object.fromEntries(Object.entries(ex).filter(([k, v]) => {
-            // we slurp in everything except for the path attribute in
-            // a global extended header, because that's weird. Also, any
-            // null/undefined values are ignored.
-            return !(v === null ||
-                v === undefined ||
-                (k === 'path' && gex));
-        })));
-    }
-}
-//# sourceMappingURL=read-entry.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/replace.js b/node_modules/pacote/node_modules/tar/dist/esm/replace.js
deleted file mode 100644
index bab622bfdf1f1..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/esm/replace.js
+++ /dev/null
@@ -1,225 +0,0 @@
-// tar -r
-import { WriteStream, WriteStreamSync } from '@isaacs/fs-minipass';
-import fs from 'node:fs';
-import path from 'node:path';
-import { Header } from './header.js';
-import { list } from './list.js';
-import { makeCommand } from './make-command.js';
-import { isFile, } from './options.js';
-import { Pack, PackSync } from './pack.js';
-// starting at the head of the file, read a Header
-// If the checksum is invalid, that's our position to start writing
-// If it is, jump forward by the specified size (round up to 512)
-// and try again.
-// Write the new Pack stream starting there.
-const replaceSync = (opt, files) => {
-    const p = new PackSync(opt);
-    let threw = true;
-    let fd;
-    let position;
-    try {
-        try {
-            fd = fs.openSync(opt.file, 'r+');
-        }
-        catch (er) {
-            if (er?.code === 'ENOENT') {
-                fd = fs.openSync(opt.file, 'w+');
-            }
-            else {
-                throw er;
-            }
-        }
-        const st = fs.fstatSync(fd);
-        const headBuf = Buffer.alloc(512);
-        POSITION: for (position = 0; position < st.size; position += 512) {
-            for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) {
-                bytes = fs.readSync(fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos);
-                if (position === 0 &&
-                    headBuf[0] === 0x1f &&
-                    headBuf[1] === 0x8b) {
-                    throw new Error('cannot append to compressed archives');
-                }
-                if (!bytes) {
-                    break POSITION;
-                }
-            }
-            const h = new Header(headBuf);
-            if (!h.cksumValid) {
-                break;
-            }
-            const entryBlockSize = 512 * Math.ceil((h.size || 0) / 512);
-            if (position + entryBlockSize + 512 > st.size) {
-                break;
-            }
-            // the 512 for the header we just parsed will be added as well
-            // also jump ahead all the blocks for the body
-            position += entryBlockSize;
-            if (opt.mtimeCache && h.mtime) {
-                opt.mtimeCache.set(String(h.path), h.mtime);
-            }
-        }
-        threw = false;
-        streamSync(opt, p, position, fd, files);
-    }
-    finally {
-        if (threw) {
-            try {
-                fs.closeSync(fd);
-            }
-            catch (er) { }
-        }
-    }
-};
-const streamSync = (opt, p, position, fd, files) => {
-    const stream = new WriteStreamSync(opt.file, {
-        fd: fd,
-        start: position,
-    });
-    p.pipe(stream);
-    addFilesSync(p, files);
-};
-const replaceAsync = (opt, files) => {
-    files = Array.from(files);
-    const p = new Pack(opt);
-    const getPos = (fd, size, cb_) => {
-        const cb = (er, pos) => {
-            if (er) {
-                fs.close(fd, _ => cb_(er));
-            }
-            else {
-                cb_(null, pos);
-            }
-        };
-        let position = 0;
-        if (size === 0) {
-            return cb(null, 0);
-        }
-        let bufPos = 0;
-        const headBuf = Buffer.alloc(512);
-        const onread = (er, bytes) => {
-            if (er || typeof bytes === 'undefined') {
-                return cb(er);
-            }
-            bufPos += bytes;
-            if (bufPos < 512 && bytes) {
-                return fs.read(fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos, onread);
-            }
-            if (position === 0 &&
-                headBuf[0] === 0x1f &&
-                headBuf[1] === 0x8b) {
-                return cb(new Error('cannot append to compressed archives'));
-            }
-            // truncated header
-            if (bufPos < 512) {
-                return cb(null, position);
-            }
-            const h = new Header(headBuf);
-            if (!h.cksumValid) {
-                return cb(null, position);
-            }
-            /* c8 ignore next */
-            const entryBlockSize = 512 * Math.ceil((h.size ?? 0) / 512);
-            if (position + entryBlockSize + 512 > size) {
-                return cb(null, position);
-            }
-            position += entryBlockSize + 512;
-            if (position >= size) {
-                return cb(null, position);
-            }
-            if (opt.mtimeCache && h.mtime) {
-                opt.mtimeCache.set(String(h.path), h.mtime);
-            }
-            bufPos = 0;
-            fs.read(fd, headBuf, 0, 512, position, onread);
-        };
-        fs.read(fd, headBuf, 0, 512, position, onread);
-    };
-    const promise = new Promise((resolve, reject) => {
-        p.on('error', reject);
-        let flag = 'r+';
-        const onopen = (er, fd) => {
-            if (er && er.code === 'ENOENT' && flag === 'r+') {
-                flag = 'w+';
-                return fs.open(opt.file, flag, onopen);
-            }
-            if (er || !fd) {
-                return reject(er);
-            }
-            fs.fstat(fd, (er, st) => {
-                if (er) {
-                    return fs.close(fd, () => reject(er));
-                }
-                getPos(fd, st.size, (er, position) => {
-                    if (er) {
-                        return reject(er);
-                    }
-                    const stream = new WriteStream(opt.file, {
-                        fd: fd,
-                        start: position,
-                    });
-                    p.pipe(stream);
-                    stream.on('error', reject);
-                    stream.on('close', resolve);
-                    addFilesAsync(p, files);
-                });
-            });
-        };
-        fs.open(opt.file, flag, onopen);
-    });
-    return promise;
-};
-const addFilesSync = (p, files) => {
-    files.forEach(file => {
-        if (file.charAt(0) === '@') {
-            list({
-                file: path.resolve(p.cwd, file.slice(1)),
-                sync: true,
-                noResume: true,
-                onReadEntry: entry => p.add(entry),
-            });
-        }
-        else {
-            p.add(file);
-        }
-    });
-    p.end();
-};
-const addFilesAsync = async (p, files) => {
-    for (let i = 0; i < files.length; i++) {
-        const file = String(files[i]);
-        if (file.charAt(0) === '@') {
-            await list({
-                file: path.resolve(String(p.cwd), file.slice(1)),
-                noResume: true,
-                onReadEntry: entry => p.add(entry),
-            });
-        }
-        else {
-            p.add(file);
-        }
-    }
-    p.end();
-};
-export const replace = makeCommand(replaceSync, replaceAsync, 
-/* c8 ignore start */
-() => {
-    throw new TypeError('file is required');
-}, () => {
-    throw new TypeError('file is required');
-}, 
-/* c8 ignore stop */
-(opt, entries) => {
-    if (!isFile(opt)) {
-        throw new TypeError('file is required');
-    }
-    if (opt.gzip ||
-        opt.brotli ||
-        opt.file.endsWith('.br') ||
-        opt.file.endsWith('.tbr')) {
-        throw new TypeError('cannot append to compressed archives');
-    }
-    if (!entries?.length) {
-        throw new TypeError('no paths specified to add/replace');
-    }
-});
-//# sourceMappingURL=replace.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/strip-absolute-path.js b/node_modules/pacote/node_modules/tar/dist/esm/strip-absolute-path.js
deleted file mode 100644
index cce5ff80b00db..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/esm/strip-absolute-path.js
+++ /dev/null
@@ -1,25 +0,0 @@
-// unix absolute paths are also absolute on win32, so we use this for both
-import { win32 } from 'node:path';
-const { isAbsolute, parse } = win32;
-// returns [root, stripped]
-// Note that windows will think that //x/y/z/a has a "root" of //x/y, and in
-// those cases, we want to sanitize it to x/y/z/a, not z/a, so we strip /
-// explicitly if it's the first character.
-// drive-specific relative paths on Windows get their root stripped off even
-// though they are not absolute, so `c:../foo` becomes ['c:', '../foo']
-export const stripAbsolutePath = (path) => {
-    let r = '';
-    let parsed = parse(path);
-    while (isAbsolute(path) || parsed.root) {
-        // windows will think that //x/y/z has a "root" of //x/y/
-        // but strip the //?/C:/ off of //?/C:/path
-        const root = path.charAt(0) === '/' && path.slice(0, 4) !== '//?/' ?
-            '/'
-            : parsed.root;
-        path = path.slice(root.length);
-        r += root;
-        parsed = parse(path);
-    }
-    return [r, path];
-};
-//# sourceMappingURL=strip-absolute-path.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/strip-trailing-slashes.js b/node_modules/pacote/node_modules/tar/dist/esm/strip-trailing-slashes.js
deleted file mode 100644
index ace4218a7547b..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/esm/strip-trailing-slashes.js
+++ /dev/null
@@ -1,14 +0,0 @@
-// warning: extremely hot code path.
-// This has been meticulously optimized for use
-// within npm install on large package trees.
-// Do not edit without careful benchmarking.
-export const stripTrailingSlashes = (str) => {
-    let i = str.length - 1;
-    let slashesStart = -1;
-    while (i > -1 && str.charAt(i) === '/') {
-        slashesStart = i;
-        i--;
-    }
-    return slashesStart === -1 ? str : str.slice(0, slashesStart);
-};
-//# sourceMappingURL=strip-trailing-slashes.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/symlink-error.js b/node_modules/pacote/node_modules/tar/dist/esm/symlink-error.js
deleted file mode 100644
index d31766e2e0afa..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/esm/symlink-error.js
+++ /dev/null
@@ -1,15 +0,0 @@
-export class SymlinkError extends Error {
-    path;
-    symlink;
-    syscall = 'symlink';
-    code = 'TAR_SYMLINK_ERROR';
-    constructor(symlink, path) {
-        super('TAR_SYMLINK_ERROR: Cannot extract through symbolic link');
-        this.symlink = symlink;
-        this.path = path;
-    }
-    get name() {
-        return 'SymlinkError';
-    }
-}
-//# sourceMappingURL=symlink-error.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/types.js b/node_modules/pacote/node_modules/tar/dist/esm/types.js
deleted file mode 100644
index 27b982ae1e092..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/esm/types.js
+++ /dev/null
@@ -1,45 +0,0 @@
-export const isCode = (c) => name.has(c);
-export const isName = (c) => code.has(c);
-// map types from key to human-friendly name
-export const name = new Map([
-    ['0', 'File'],
-    // same as File
-    ['', 'OldFile'],
-    ['1', 'Link'],
-    ['2', 'SymbolicLink'],
-    // Devices and FIFOs aren't fully supported
-    // they are parsed, but skipped when unpacking
-    ['3', 'CharacterDevice'],
-    ['4', 'BlockDevice'],
-    ['5', 'Directory'],
-    ['6', 'FIFO'],
-    // same as File
-    ['7', 'ContiguousFile'],
-    // pax headers
-    ['g', 'GlobalExtendedHeader'],
-    ['x', 'ExtendedHeader'],
-    // vendor-specific stuff
-    // skip
-    ['A', 'SolarisACL'],
-    // like 5, but with data, which should be skipped
-    ['D', 'GNUDumpDir'],
-    // metadata only, skip
-    ['I', 'Inode'],
-    // data = link path of next file
-    ['K', 'NextFileHasLongLinkpath'],
-    // data = path of next file
-    ['L', 'NextFileHasLongPath'],
-    // skip
-    ['M', 'ContinuationFile'],
-    // like L
-    ['N', 'OldGnuLongPath'],
-    // skip
-    ['S', 'SparseFile'],
-    // skip
-    ['V', 'TapeVolumeHeader'],
-    // like x
-    ['X', 'OldExtendedHeader'],
-]);
-// map the other direction
-export const code = new Map(Array.from(name).map(kv => [kv[1], kv[0]]));
-//# sourceMappingURL=types.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/update.js b/node_modules/pacote/node_modules/tar/dist/esm/update.js
deleted file mode 100644
index 21398e9766663..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/esm/update.js
+++ /dev/null
@@ -1,30 +0,0 @@
-// tar -u
-import { makeCommand } from './make-command.js';
-import { replace as r } from './replace.js';
-// just call tar.r with the filter and mtimeCache
-export const update = makeCommand(r.syncFile, r.asyncFile, r.syncNoFile, r.asyncNoFile, (opt, entries = []) => {
-    r.validate?.(opt, entries);
-    mtimeFilter(opt);
-});
-const mtimeFilter = (opt) => {
-    const filter = opt.filter;
-    if (!opt.mtimeCache) {
-        opt.mtimeCache = new Map();
-    }
-    opt.filter =
-        filter ?
-            (path, stat) => filter(path, stat) &&
-                !(
-                /* c8 ignore start */
-                ((opt.mtimeCache?.get(path) ?? stat.mtime ?? 0) >
-                    (stat.mtime ?? 0))
-                /* c8 ignore stop */
-                )
-            : (path, stat) => !(
-            /* c8 ignore start */
-            ((opt.mtimeCache?.get(path) ?? stat.mtime ?? 0) >
-                (stat.mtime ?? 0))
-            /* c8 ignore stop */
-            );
-};
-//# sourceMappingURL=update.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/warn-method.js b/node_modules/pacote/node_modules/tar/dist/esm/warn-method.js
deleted file mode 100644
index 13e798afefc85..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/esm/warn-method.js
+++ /dev/null
@@ -1,27 +0,0 @@
-export const warnMethod = (self, code, message, data = {}) => {
-    if (self.file) {
-        data.file = self.file;
-    }
-    if (self.cwd) {
-        data.cwd = self.cwd;
-    }
-    data.code =
-        (message instanceof Error &&
-            message.code) ||
-            code;
-    data.tarCode = code;
-    if (!self.strict && data.recoverable !== false) {
-        if (message instanceof Error) {
-            data = Object.assign(message, data);
-            message = message.message;
-        }
-        self.emit('warn', code, message, data);
-    }
-    else if (message instanceof Error) {
-        self.emit('error', Object.assign(message, data));
-    }
-    else {
-        self.emit('error', Object.assign(new Error(`${code}: ${message}`), data));
-    }
-};
-//# sourceMappingURL=warn-method.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/winchars.js b/node_modules/pacote/node_modules/tar/dist/esm/winchars.js
deleted file mode 100644
index c41eb86d69a4b..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/esm/winchars.js
+++ /dev/null
@@ -1,9 +0,0 @@
-// When writing files on Windows, translate the characters to their
-// 0xf000 higher-encoded versions.
-const raw = ['|', '<', '>', '?', ':'];
-const win = raw.map(char => String.fromCharCode(0xf000 + char.charCodeAt(0)));
-const toWin = new Map(raw.map((char, i) => [char, win[i]]));
-const toRaw = new Map(win.map((char, i) => [char, raw[i]]));
-export const encode = (s) => raw.reduce((s, c) => s.split(c).join(toWin.get(c)), s);
-export const decode = (s) => win.reduce((s, c) => s.split(c).join(toRaw.get(c)), s);
-//# sourceMappingURL=winchars.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/write-entry.js b/node_modules/pacote/node_modules/tar/dist/esm/write-entry.js
deleted file mode 100644
index 9028cd676b4cd..0000000000000
--- a/node_modules/pacote/node_modules/tar/dist/esm/write-entry.js
+++ /dev/null
@@ -1,657 +0,0 @@
-import fs from 'fs';
-import { Minipass } from 'minipass';
-import path from 'path';
-import { Header } from './header.js';
-import { modeFix } from './mode-fix.js';
-import { normalizeWindowsPath } from './normalize-windows-path.js';
-import { dealias, } from './options.js';
-import { Pax } from './pax.js';
-import { stripAbsolutePath } from './strip-absolute-path.js';
-import { stripTrailingSlashes } from './strip-trailing-slashes.js';
-import { warnMethod, } from './warn-method.js';
-import * as winchars from './winchars.js';
-const prefixPath = (path, prefix) => {
-    if (!prefix) {
-        return normalizeWindowsPath(path);
-    }
-    path = normalizeWindowsPath(path).replace(/^\.(\/|$)/, '');
-    return stripTrailingSlashes(prefix) + '/' + path;
-};
-const maxReadSize = 16 * 1024 * 1024;
-const PROCESS = Symbol('process');
-const FILE = Symbol('file');
-const DIRECTORY = Symbol('directory');
-const SYMLINK = Symbol('symlink');
-const HARDLINK = Symbol('hardlink');
-const HEADER = Symbol('header');
-const READ = Symbol('read');
-const LSTAT = Symbol('lstat');
-const ONLSTAT = Symbol('onlstat');
-const ONREAD = Symbol('onread');
-const ONREADLINK = Symbol('onreadlink');
-const OPENFILE = Symbol('openfile');
-const ONOPENFILE = Symbol('onopenfile');
-const CLOSE = Symbol('close');
-const MODE = Symbol('mode');
-const AWAITDRAIN = Symbol('awaitDrain');
-const ONDRAIN = Symbol('ondrain');
-const PREFIX = Symbol('prefix');
-export class WriteEntry extends Minipass {
-    path;
-    portable;
-    myuid = (process.getuid && process.getuid()) || 0;
-    // until node has builtin pwnam functions, this'll have to do
-    myuser = process.env.USER || '';
-    maxReadSize;
-    linkCache;
-    statCache;
-    preservePaths;
-    cwd;
-    strict;
-    mtime;
-    noPax;
-    noMtime;
-    prefix;
-    fd;
-    blockLen = 0;
-    blockRemain = 0;
-    buf;
-    pos = 0;
-    remain = 0;
-    length = 0;
-    offset = 0;
-    win32;
-    absolute;
-    header;
-    type;
-    linkpath;
-    stat;
-    onWriteEntry;
-    #hadError = false;
-    constructor(p, opt_ = {}) {
-        const opt = dealias(opt_);
-        super();
-        this.path = normalizeWindowsPath(p);
-        // suppress atime, ctime, uid, gid, uname, gname
-        this.portable = !!opt.portable;
-        this.maxReadSize = opt.maxReadSize || maxReadSize;
-        this.linkCache = opt.linkCache || new Map();
-        this.statCache = opt.statCache || new Map();
-        this.preservePaths = !!opt.preservePaths;
-        this.cwd = normalizeWindowsPath(opt.cwd || process.cwd());
-        this.strict = !!opt.strict;
-        this.noPax = !!opt.noPax;
-        this.noMtime = !!opt.noMtime;
-        this.mtime = opt.mtime;
-        this.prefix =
-            opt.prefix ? normalizeWindowsPath(opt.prefix) : undefined;
-        this.onWriteEntry = opt.onWriteEntry;
-        if (typeof opt.onwarn === 'function') {
-            this.on('warn', opt.onwarn);
-        }
-        let pathWarn = false;
-        if (!this.preservePaths) {
-            const [root, stripped] = stripAbsolutePath(this.path);
-            if (root && typeof stripped === 'string') {
-                this.path = stripped;
-                pathWarn = root;
-            }
-        }
-        this.win32 = !!opt.win32 || process.platform === 'win32';
-        if (this.win32) {
-            // force the \ to / normalization, since we might not *actually*
-            // be on windows, but want \ to be considered a path separator.
-            this.path = winchars.decode(this.path.replace(/\\/g, '/'));
-            p = p.replace(/\\/g, '/');
-        }
-        this.absolute = normalizeWindowsPath(opt.absolute || path.resolve(this.cwd, p));
-        if (this.path === '') {
-            this.path = './';
-        }
-        if (pathWarn) {
-            this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, {
-                entry: this,
-                path: pathWarn + this.path,
-            });
-        }
-        const cs = this.statCache.get(this.absolute);
-        if (cs) {
-            this[ONLSTAT](cs);
-        }
-        else {
-            this[LSTAT]();
-        }
-    }
-    warn(code, message, data = {}) {
-        return warnMethod(this, code, message, data);
-    }
-    emit(ev, ...data) {
-        if (ev === 'error') {
-            this.#hadError = true;
-        }
-        return super.emit(ev, ...data);
-    }
-    [LSTAT]() {
-        fs.lstat(this.absolute, (er, stat) => {
-            if (er) {
-                return this.emit('error', er);
-            }
-            this[ONLSTAT](stat);
-        });
-    }
-    [ONLSTAT](stat) {
-        this.statCache.set(this.absolute, stat);
-        this.stat = stat;
-        if (!stat.isFile()) {
-            stat.size = 0;
-        }
-        this.type = getType(stat);
-        this.emit('stat', stat);
-        this[PROCESS]();
-    }
-    [PROCESS]() {
-        switch (this.type) {
-            case 'File':
-                return this[FILE]();
-            case 'Directory':
-                return this[DIRECTORY]();
-            case 'SymbolicLink':
-                return this[SYMLINK]();
-            // unsupported types are ignored.
-            default:
-                return this.end();
-        }
-    }
-    [MODE](mode) {
-        return modeFix(mode, this.type === 'Directory', this.portable);
-    }
-    [PREFIX](path) {
-        return prefixPath(path, this.prefix);
-    }
-    [HEADER]() {
-        /* c8 ignore start */
-        if (!this.stat) {
-            throw new Error('cannot write header before stat');
-        }
-        /* c8 ignore stop */
-        if (this.type === 'Directory' && this.portable) {
-            this.noMtime = true;
-        }
-        this.onWriteEntry?.(this);
-        this.header = new Header({
-            path: this[PREFIX](this.path),
-            // only apply the prefix to hard links.
-            linkpath: this.type === 'Link' && this.linkpath !== undefined ?
-                this[PREFIX](this.linkpath)
-                : this.linkpath,
-            // only the permissions and setuid/setgid/sticky bitflags
-            // not the higher-order bits that specify file type
-            mode: this[MODE](this.stat.mode),
-            uid: this.portable ? undefined : this.stat.uid,
-            gid: this.portable ? undefined : this.stat.gid,
-            size: this.stat.size,
-            mtime: this.noMtime ? undefined : this.mtime || this.stat.mtime,
-            /* c8 ignore next */
-            type: this.type === 'Unsupported' ? undefined : this.type,
-            uname: this.portable ? undefined
-                : this.stat.uid === this.myuid ? this.myuser
-                    : '',
-            atime: this.portable ? undefined : this.stat.atime,
-            ctime: this.portable ? undefined : this.stat.ctime,
-        });
-        if (this.header.encode() && !this.noPax) {
-            super.write(new Pax({
-                atime: this.portable ? undefined : this.header.atime,
-                ctime: this.portable ? undefined : this.header.ctime,
-                gid: this.portable ? undefined : this.header.gid,
-                mtime: this.noMtime ? undefined : (this.mtime || this.header.mtime),
-                path: this[PREFIX](this.path),
-                linkpath: this.type === 'Link' && this.linkpath !== undefined ?
-                    this[PREFIX](this.linkpath)
-                    : this.linkpath,
-                size: this.header.size,
-                uid: this.portable ? undefined : this.header.uid,
-                uname: this.portable ? undefined : this.header.uname,
-                dev: this.portable ? undefined : this.stat.dev,
-                ino: this.portable ? undefined : this.stat.ino,
-                nlink: this.portable ? undefined : this.stat.nlink,
-            }).encode());
-        }
-        const block = this.header?.block;
-        /* c8 ignore start */
-        if (!block) {
-            throw new Error('failed to encode header');
-        }
-        /* c8 ignore stop */
-        super.write(block);
-    }
-    [DIRECTORY]() {
-        /* c8 ignore start */
-        if (!this.stat) {
-            throw new Error('cannot create directory entry without stat');
-        }
-        /* c8 ignore stop */
-        if (this.path.slice(-1) !== '/') {
-            this.path += '/';
-        }
-        this.stat.size = 0;
-        this[HEADER]();
-        this.end();
-    }
-    [SYMLINK]() {
-        fs.readlink(this.absolute, (er, linkpath) => {
-            if (er) {
-                return this.emit('error', er);
-            }
-            this[ONREADLINK](linkpath);
-        });
-    }
-    [ONREADLINK](linkpath) {
-        this.linkpath = normalizeWindowsPath(linkpath);
-        this[HEADER]();
-        this.end();
-    }
-    [HARDLINK](linkpath) {
-        /* c8 ignore start */
-        if (!this.stat) {
-            throw new Error('cannot create link entry without stat');
-        }
-        /* c8 ignore stop */
-        this.type = 'Link';
-        this.linkpath = normalizeWindowsPath(path.relative(this.cwd, linkpath));
-        this.stat.size = 0;
-        this[HEADER]();
-        this.end();
-    }
-    [FILE]() {
-        /* c8 ignore start */
-        if (!this.stat) {
-            throw new Error('cannot create file entry without stat');
-        }
-        /* c8 ignore stop */
-        if (this.stat.nlink > 1) {
-            const linkKey = `${this.stat.dev}:${this.stat.ino}`;
-            const linkpath = this.linkCache.get(linkKey);
-            if (linkpath?.indexOf(this.cwd) === 0) {
-                return this[HARDLINK](linkpath);
-            }
-            this.linkCache.set(linkKey, this.absolute);
-        }
-        this[HEADER]();
-        if (this.stat.size === 0) {
-            return this.end();
-        }
-        this[OPENFILE]();
-    }
-    [OPENFILE]() {
-        fs.open(this.absolute, 'r', (er, fd) => {
-            if (er) {
-                return this.emit('error', er);
-            }
-            this[ONOPENFILE](fd);
-        });
-    }
-    [ONOPENFILE](fd) {
-        this.fd = fd;
-        if (this.#hadError) {
-            return this[CLOSE]();
-        }
-        /* c8 ignore start */
-        if (!this.stat) {
-            throw new Error('should stat before calling onopenfile');
-        }
-        /* c8 ignore start */
-        this.blockLen = 512 * Math.ceil(this.stat.size / 512);
-        this.blockRemain = this.blockLen;
-        const bufLen = Math.min(this.blockLen, this.maxReadSize);
-        this.buf = Buffer.allocUnsafe(bufLen);
-        this.offset = 0;
-        this.pos = 0;
-        this.remain = this.stat.size;
-        this.length = this.buf.length;
-        this[READ]();
-    }
-    [READ]() {
-        const { fd, buf, offset, length, pos } = this;
-        if (fd === undefined || buf === undefined) {
-            throw new Error('cannot read file without first opening');
-        }
-        fs.read(fd, buf, offset, length, pos, (er, bytesRead) => {
-            if (er) {
-                // ignoring the error from close(2) is a bad practice, but at
-                // this point we already have an error, don't need another one
-                return this[CLOSE](() => this.emit('error', er));
-            }
-            this[ONREAD](bytesRead);
-        });
-    }
-    /* c8 ignore start */
-    [CLOSE](cb = () => { }) {
-        /* c8 ignore stop */
-        if (this.fd !== undefined)
-            fs.close(this.fd, cb);
-    }
-    [ONREAD](bytesRead) {
-        if (bytesRead <= 0 && this.remain > 0) {
-            const er = Object.assign(new Error('encountered unexpected EOF'), {
-                path: this.absolute,
-                syscall: 'read',
-                code: 'EOF',
-            });
-            return this[CLOSE](() => this.emit('error', er));
-        }
-        if (bytesRead > this.remain) {
-            const er = Object.assign(new Error('did not encounter expected EOF'), {
-                path: this.absolute,
-                syscall: 'read',
-                code: 'EOF',
-            });
-            return this[CLOSE](() => this.emit('error', er));
-        }
-        /* c8 ignore start */
-        if (!this.buf) {
-            throw new Error('should have created buffer prior to reading');
-        }
-        /* c8 ignore stop */
-        // null out the rest of the buffer, if we could fit the block padding
-        // at the end of this loop, we've incremented bytesRead and this.remain
-        // to be incremented up to the blockRemain level, as if we had expected
-        // to get a null-padded file, and read it until the end.  then we will
-        // decrement both remain and blockRemain by bytesRead, and know that we
-        // reached the expected EOF, without any null buffer to append.
-        if (bytesRead === this.remain) {
-            for (let i = bytesRead; i < this.length && bytesRead < this.blockRemain; i++) {
-                this.buf[i + this.offset] = 0;
-                bytesRead++;
-                this.remain++;
-            }
-        }
-        const chunk = this.offset === 0 && bytesRead === this.buf.length ?
-            this.buf
-            : this.buf.subarray(this.offset, this.offset + bytesRead);
-        const flushed = this.write(chunk);
-        if (!flushed) {
-            this[AWAITDRAIN](() => this[ONDRAIN]());
-        }
-        else {
-            this[ONDRAIN]();
-        }
-    }
-    [AWAITDRAIN](cb) {
-        this.once('drain', cb);
-    }
-    write(chunk, encoding, cb) {
-        /* c8 ignore start - just junk to comply with NodeJS.WritableStream */
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        if (typeof chunk === 'string') {
-            chunk = Buffer.from(chunk, typeof encoding === 'string' ? encoding : 'utf8');
-        }
-        /* c8 ignore stop */
-        if (this.blockRemain < chunk.length) {
-            const er = Object.assign(new Error('writing more data than expected'), {
-                path: this.absolute,
-            });
-            return this.emit('error', er);
-        }
-        this.remain -= chunk.length;
-        this.blockRemain -= chunk.length;
-        this.pos += chunk.length;
-        this.offset += chunk.length;
-        return super.write(chunk, null, cb);
-    }
-    [ONDRAIN]() {
-        if (!this.remain) {
-            if (this.blockRemain) {
-                super.write(Buffer.alloc(this.blockRemain));
-            }
-            return this[CLOSE](er => er ? this.emit('error', er) : this.end());
-        }
-        /* c8 ignore start */
-        if (!this.buf) {
-            throw new Error('buffer lost somehow in ONDRAIN');
-        }
-        /* c8 ignore stop */
-        if (this.offset >= this.length) {
-            // if we only have a smaller bit left to read, alloc a smaller buffer
-            // otherwise, keep it the same length it was before.
-            this.buf = Buffer.allocUnsafe(Math.min(this.blockRemain, this.buf.length));
-            this.offset = 0;
-        }
-        this.length = this.buf.length - this.offset;
-        this[READ]();
-    }
-}
-export class WriteEntrySync extends WriteEntry {
-    sync = true;
-    [LSTAT]() {
-        this[ONLSTAT](fs.lstatSync(this.absolute));
-    }
-    [SYMLINK]() {
-        this[ONREADLINK](fs.readlinkSync(this.absolute));
-    }
-    [OPENFILE]() {
-        this[ONOPENFILE](fs.openSync(this.absolute, 'r'));
-    }
-    [READ]() {
-        let threw = true;
-        try {
-            const { fd, buf, offset, length, pos } = this;
-            /* c8 ignore start */
-            if (fd === undefined || buf === undefined) {
-                throw new Error('fd and buf must be set in READ method');
-            }
-            /* c8 ignore stop */
-            const bytesRead = fs.readSync(fd, buf, offset, length, pos);
-            this[ONREAD](bytesRead);
-            threw = false;
-        }
-        finally {
-            // ignoring the error from close(2) is a bad practice, but at
-            // this point we already have an error, don't need another one
-            if (threw) {
-                try {
-                    this[CLOSE](() => { });
-                }
-                catch (er) { }
-            }
-        }
-    }
-    [AWAITDRAIN](cb) {
-        cb();
-    }
-    /* c8 ignore start */
-    [CLOSE](cb = () => { }) {
-        /* c8 ignore stop */
-        if (this.fd !== undefined)
-            fs.closeSync(this.fd);
-        cb();
-    }
-}
-export class WriteEntryTar extends Minipass {
-    blockLen = 0;
-    blockRemain = 0;
-    buf = 0;
-    pos = 0;
-    remain = 0;
-    length = 0;
-    preservePaths;
-    portable;
-    strict;
-    noPax;
-    noMtime;
-    readEntry;
-    type;
-    prefix;
-    path;
-    mode;
-    uid;
-    gid;
-    uname;
-    gname;
-    header;
-    mtime;
-    atime;
-    ctime;
-    linkpath;
-    size;
-    onWriteEntry;
-    warn(code, message, data = {}) {
-        return warnMethod(this, code, message, data);
-    }
-    constructor(readEntry, opt_ = {}) {
-        const opt = dealias(opt_);
-        super();
-        this.preservePaths = !!opt.preservePaths;
-        this.portable = !!opt.portable;
-        this.strict = !!opt.strict;
-        this.noPax = !!opt.noPax;
-        this.noMtime = !!opt.noMtime;
-        this.onWriteEntry = opt.onWriteEntry;
-        this.readEntry = readEntry;
-        const { type } = readEntry;
-        /* c8 ignore start */
-        if (type === 'Unsupported') {
-            throw new Error('writing entry that should be ignored');
-        }
-        /* c8 ignore stop */
-        this.type = type;
-        if (this.type === 'Directory' && this.portable) {
-            this.noMtime = true;
-        }
-        this.prefix = opt.prefix;
-        this.path = normalizeWindowsPath(readEntry.path);
-        this.mode =
-            readEntry.mode !== undefined ?
-                this[MODE](readEntry.mode)
-                : undefined;
-        this.uid = this.portable ? undefined : readEntry.uid;
-        this.gid = this.portable ? undefined : readEntry.gid;
-        this.uname = this.portable ? undefined : readEntry.uname;
-        this.gname = this.portable ? undefined : readEntry.gname;
-        this.size = readEntry.size;
-        this.mtime =
-            this.noMtime ? undefined : opt.mtime || readEntry.mtime;
-        this.atime = this.portable ? undefined : readEntry.atime;
-        this.ctime = this.portable ? undefined : readEntry.ctime;
-        this.linkpath =
-            readEntry.linkpath !== undefined ?
-                normalizeWindowsPath(readEntry.linkpath)
-                : undefined;
-        if (typeof opt.onwarn === 'function') {
-            this.on('warn', opt.onwarn);
-        }
-        let pathWarn = false;
-        if (!this.preservePaths) {
-            const [root, stripped] = stripAbsolutePath(this.path);
-            if (root && typeof stripped === 'string') {
-                this.path = stripped;
-                pathWarn = root;
-            }
-        }
-        this.remain = readEntry.size;
-        this.blockRemain = readEntry.startBlockSize;
-        this.onWriteEntry?.(this);
-        this.header = new Header({
-            path: this[PREFIX](this.path),
-            linkpath: this.type === 'Link' && this.linkpath !== undefined ?
-                this[PREFIX](this.linkpath)
-                : this.linkpath,
-            // only the permissions and setuid/setgid/sticky bitflags
-            // not the higher-order bits that specify file type
-            mode: this.mode,
-            uid: this.portable ? undefined : this.uid,
-            gid: this.portable ? undefined : this.gid,
-            size: this.size,
-            mtime: this.noMtime ? undefined : this.mtime,
-            type: this.type,
-            uname: this.portable ? undefined : this.uname,
-            atime: this.portable ? undefined : this.atime,
-            ctime: this.portable ? undefined : this.ctime,
-        });
-        if (pathWarn) {
-            this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, {
-                entry: this,
-                path: pathWarn + this.path,
-            });
-        }
-        if (this.header.encode() && !this.noPax) {
-            super.write(new Pax({
-                atime: this.portable ? undefined : this.atime,
-                ctime: this.portable ? undefined : this.ctime,
-                gid: this.portable ? undefined : this.gid,
-                mtime: this.noMtime ? undefined : this.mtime,
-                path: this[PREFIX](this.path),
-                linkpath: this.type === 'Link' && this.linkpath !== undefined ?
-                    this[PREFIX](this.linkpath)
-                    : this.linkpath,
-                size: this.size,
-                uid: this.portable ? undefined : this.uid,
-                uname: this.portable ? undefined : this.uname,
-                dev: this.portable ? undefined : this.readEntry.dev,
-                ino: this.portable ? undefined : this.readEntry.ino,
-                nlink: this.portable ? undefined : this.readEntry.nlink,
-            }).encode());
-        }
-        const b = this.header?.block;
-        /* c8 ignore start */
-        if (!b)
-            throw new Error('failed to encode header');
-        /* c8 ignore stop */
-        super.write(b);
-        readEntry.pipe(this);
-    }
-    [PREFIX](path) {
-        return prefixPath(path, this.prefix);
-    }
-    [MODE](mode) {
-        return modeFix(mode, this.type === 'Directory', this.portable);
-    }
-    write(chunk, encoding, cb) {
-        /* c8 ignore start - just junk to comply with NodeJS.WritableStream */
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        if (typeof chunk === 'string') {
-            chunk = Buffer.from(chunk, typeof encoding === 'string' ? encoding : 'utf8');
-        }
-        /* c8 ignore stop */
-        const writeLen = chunk.length;
-        if (writeLen > this.blockRemain) {
-            throw new Error('writing more to entry than is appropriate');
-        }
-        this.blockRemain -= writeLen;
-        return super.write(chunk, cb);
-    }
-    end(chunk, encoding, cb) {
-        if (this.blockRemain) {
-            super.write(Buffer.alloc(this.blockRemain));
-        }
-        /* c8 ignore start - just junk to comply with NodeJS.WritableStream */
-        if (typeof chunk === 'function') {
-            cb = chunk;
-            encoding = undefined;
-            chunk = undefined;
-        }
-        if (typeof encoding === 'function') {
-            cb = encoding;
-            encoding = undefined;
-        }
-        if (typeof chunk === 'string') {
-            chunk = Buffer.from(chunk, encoding ?? 'utf8');
-        }
-        if (cb)
-            this.once('finish', cb);
-        chunk ? super.end(chunk, cb) : super.end(cb);
-        /* c8 ignore stop */
-        return this;
-    }
-}
-const getType = (stat) => stat.isFile() ? 'File'
-    : stat.isDirectory() ? 'Directory'
-        : stat.isSymbolicLink() ? 'SymbolicLink'
-            : 'Unsupported';
-//# sourceMappingURL=write-entry.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/tar/package.json b/node_modules/pacote/node_modules/tar/package.json
deleted file mode 100644
index 0283103ee9eaf..0000000000000
--- a/node_modules/pacote/node_modules/tar/package.json
+++ /dev/null
@@ -1,325 +0,0 @@
-{
-  "author": "Isaac Z. Schlueter",
-  "name": "tar",
-  "description": "tar for node",
-  "version": "7.4.3",
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/isaacs/node-tar.git"
-  },
-  "scripts": {
-    "genparse": "node scripts/generate-parse-fixtures.js",
-    "snap": "tap",
-    "test": "tap",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "prepare": "tshy",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "format": "prettier --write . --log-level warn",
-    "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
-  },
-  "dependencies": {
-    "@isaacs/fs-minipass": "^4.0.0",
-    "chownr": "^3.0.0",
-    "minipass": "^7.1.2",
-    "minizlib": "^3.0.1",
-    "mkdirp": "^3.0.1",
-    "yallist": "^5.0.0"
-  },
-  "devDependencies": {
-    "chmodr": "^1.2.0",
-    "end-of-stream": "^1.4.3",
-    "events-to-array": "^2.0.3",
-    "mutate-fs": "^2.1.1",
-    "nock": "^13.5.4",
-    "prettier": "^3.2.5",
-    "rimraf": "^5.0.5",
-    "tap": "^18.7.2",
-    "tshy": "^1.13.1",
-    "typedoc": "^0.25.13"
-  },
-  "license": "ISC",
-  "engines": {
-    "node": ">=18"
-  },
-  "files": [
-    "dist"
-  ],
-  "tap": {
-    "coverage-map": "map.js",
-    "timeout": 0,
-    "typecheck": true
-  },
-  "prettier": {
-    "experimentalTernaries": true,
-    "semi": false,
-    "printWidth": 70,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "tshy": {
-    "exports": {
-      "./package.json": "./package.json",
-      ".": "./src/index.ts",
-      "./c": "./src/create.ts",
-      "./create": "./src/create.ts",
-      "./replace": "./src/create.ts",
-      "./r": "./src/create.ts",
-      "./list": "./src/list.ts",
-      "./t": "./src/list.ts",
-      "./update": "./src/update.ts",
-      "./u": "./src/update.ts",
-      "./extract": "./src/extract.ts",
-      "./x": "./src/extract.ts",
-      "./pack": "./src/pack.ts",
-      "./unpack": "./src/unpack.ts",
-      "./parse": "./src/parse.ts",
-      "./read-entry": "./src/read-entry.ts",
-      "./write-entry": "./src/write-entry.ts",
-      "./header": "./src/header.ts",
-      "./pax": "./src/pax.ts",
-      "./types": "./src/types.ts"
-    }
-  },
-  "exports": {
-    "./package.json": "./package.json",
-    ".": {
-      "import": {
-        "source": "./src/index.ts",
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.js"
-      },
-      "require": {
-        "source": "./src/index.ts",
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.js"
-      }
-    },
-    "./c": {
-      "import": {
-        "source": "./src/create.ts",
-        "types": "./dist/esm/create.d.ts",
-        "default": "./dist/esm/create.js"
-      },
-      "require": {
-        "source": "./src/create.ts",
-        "types": "./dist/commonjs/create.d.ts",
-        "default": "./dist/commonjs/create.js"
-      }
-    },
-    "./create": {
-      "import": {
-        "source": "./src/create.ts",
-        "types": "./dist/esm/create.d.ts",
-        "default": "./dist/esm/create.js"
-      },
-      "require": {
-        "source": "./src/create.ts",
-        "types": "./dist/commonjs/create.d.ts",
-        "default": "./dist/commonjs/create.js"
-      }
-    },
-    "./replace": {
-      "import": {
-        "source": "./src/create.ts",
-        "types": "./dist/esm/create.d.ts",
-        "default": "./dist/esm/create.js"
-      },
-      "require": {
-        "source": "./src/create.ts",
-        "types": "./dist/commonjs/create.d.ts",
-        "default": "./dist/commonjs/create.js"
-      }
-    },
-    "./r": {
-      "import": {
-        "source": "./src/create.ts",
-        "types": "./dist/esm/create.d.ts",
-        "default": "./dist/esm/create.js"
-      },
-      "require": {
-        "source": "./src/create.ts",
-        "types": "./dist/commonjs/create.d.ts",
-        "default": "./dist/commonjs/create.js"
-      }
-    },
-    "./list": {
-      "import": {
-        "source": "./src/list.ts",
-        "types": "./dist/esm/list.d.ts",
-        "default": "./dist/esm/list.js"
-      },
-      "require": {
-        "source": "./src/list.ts",
-        "types": "./dist/commonjs/list.d.ts",
-        "default": "./dist/commonjs/list.js"
-      }
-    },
-    "./t": {
-      "import": {
-        "source": "./src/list.ts",
-        "types": "./dist/esm/list.d.ts",
-        "default": "./dist/esm/list.js"
-      },
-      "require": {
-        "source": "./src/list.ts",
-        "types": "./dist/commonjs/list.d.ts",
-        "default": "./dist/commonjs/list.js"
-      }
-    },
-    "./update": {
-      "import": {
-        "source": "./src/update.ts",
-        "types": "./dist/esm/update.d.ts",
-        "default": "./dist/esm/update.js"
-      },
-      "require": {
-        "source": "./src/update.ts",
-        "types": "./dist/commonjs/update.d.ts",
-        "default": "./dist/commonjs/update.js"
-      }
-    },
-    "./u": {
-      "import": {
-        "source": "./src/update.ts",
-        "types": "./dist/esm/update.d.ts",
-        "default": "./dist/esm/update.js"
-      },
-      "require": {
-        "source": "./src/update.ts",
-        "types": "./dist/commonjs/update.d.ts",
-        "default": "./dist/commonjs/update.js"
-      }
-    },
-    "./extract": {
-      "import": {
-        "source": "./src/extract.ts",
-        "types": "./dist/esm/extract.d.ts",
-        "default": "./dist/esm/extract.js"
-      },
-      "require": {
-        "source": "./src/extract.ts",
-        "types": "./dist/commonjs/extract.d.ts",
-        "default": "./dist/commonjs/extract.js"
-      }
-    },
-    "./x": {
-      "import": {
-        "source": "./src/extract.ts",
-        "types": "./dist/esm/extract.d.ts",
-        "default": "./dist/esm/extract.js"
-      },
-      "require": {
-        "source": "./src/extract.ts",
-        "types": "./dist/commonjs/extract.d.ts",
-        "default": "./dist/commonjs/extract.js"
-      }
-    },
-    "./pack": {
-      "import": {
-        "source": "./src/pack.ts",
-        "types": "./dist/esm/pack.d.ts",
-        "default": "./dist/esm/pack.js"
-      },
-      "require": {
-        "source": "./src/pack.ts",
-        "types": "./dist/commonjs/pack.d.ts",
-        "default": "./dist/commonjs/pack.js"
-      }
-    },
-    "./unpack": {
-      "import": {
-        "source": "./src/unpack.ts",
-        "types": "./dist/esm/unpack.d.ts",
-        "default": "./dist/esm/unpack.js"
-      },
-      "require": {
-        "source": "./src/unpack.ts",
-        "types": "./dist/commonjs/unpack.d.ts",
-        "default": "./dist/commonjs/unpack.js"
-      }
-    },
-    "./parse": {
-      "import": {
-        "source": "./src/parse.ts",
-        "types": "./dist/esm/parse.d.ts",
-        "default": "./dist/esm/parse.js"
-      },
-      "require": {
-        "source": "./src/parse.ts",
-        "types": "./dist/commonjs/parse.d.ts",
-        "default": "./dist/commonjs/parse.js"
-      }
-    },
-    "./read-entry": {
-      "import": {
-        "source": "./src/read-entry.ts",
-        "types": "./dist/esm/read-entry.d.ts",
-        "default": "./dist/esm/read-entry.js"
-      },
-      "require": {
-        "source": "./src/read-entry.ts",
-        "types": "./dist/commonjs/read-entry.d.ts",
-        "default": "./dist/commonjs/read-entry.js"
-      }
-    },
-    "./write-entry": {
-      "import": {
-        "source": "./src/write-entry.ts",
-        "types": "./dist/esm/write-entry.d.ts",
-        "default": "./dist/esm/write-entry.js"
-      },
-      "require": {
-        "source": "./src/write-entry.ts",
-        "types": "./dist/commonjs/write-entry.d.ts",
-        "default": "./dist/commonjs/write-entry.js"
-      }
-    },
-    "./header": {
-      "import": {
-        "source": "./src/header.ts",
-        "types": "./dist/esm/header.d.ts",
-        "default": "./dist/esm/header.js"
-      },
-      "require": {
-        "source": "./src/header.ts",
-        "types": "./dist/commonjs/header.d.ts",
-        "default": "./dist/commonjs/header.js"
-      }
-    },
-    "./pax": {
-      "import": {
-        "source": "./src/pax.ts",
-        "types": "./dist/esm/pax.d.ts",
-        "default": "./dist/esm/pax.js"
-      },
-      "require": {
-        "source": "./src/pax.ts",
-        "types": "./dist/commonjs/pax.d.ts",
-        "default": "./dist/commonjs/pax.js"
-      }
-    },
-    "./types": {
-      "import": {
-        "source": "./src/types.ts",
-        "types": "./dist/esm/types.d.ts",
-        "default": "./dist/esm/types.js"
-      },
-      "require": {
-        "source": "./src/types.ts",
-        "types": "./dist/commonjs/types.d.ts",
-        "default": "./dist/commonjs/types.js"
-      }
-    }
-  },
-  "type": "module",
-  "main": "./dist/commonjs/index.js",
-  "types": "./dist/commonjs/index.d.ts"
-}
diff --git a/node_modules/pacote/node_modules/yallist/LICENSE.md b/node_modules/pacote/node_modules/yallist/LICENSE.md
deleted file mode 100644
index 881248b6d7f0c..0000000000000
--- a/node_modules/pacote/node_modules/yallist/LICENSE.md
+++ /dev/null
@@ -1,63 +0,0 @@
-All packages under `src/` are licensed according to the terms in
-their respective `LICENSE` or `LICENSE.md` files.
-
-The remainder of this project is licensed under the Blue Oak
-Model License, as follows:
-
------
-
-# Blue Oak Model License
-
-Version 1.0.0
-
-## Purpose
-
-This license gives everyone as much permission to work with
-this software as possible, while protecting contributors
-from liability.
-
-## Acceptance
-
-In order to receive this license, you must agree to its
-rules.  The rules of this license are both obligations
-under that agreement and conditions to your license.
-You must not do anything with this software that triggers
-a rule that you cannot or will not follow.
-
-## Copyright
-
-Each contributor licenses you to do everything with this
-software that would otherwise infringe that contributor's
-copyright in it.
-
-## Notices
-
-You must ensure that everyone who gets a copy of
-any part of this software from you, with or without
-changes, also gets the text of this license or a link to
-.
-
-## Excuse
-
-If anyone notifies you in writing that you have not
-complied with [Notices](#notices), you can keep your
-license by taking all practical steps to comply within 30
-days after the notice.  If you do not do so, your license
-ends immediately.
-
-## Patent
-
-Each contributor licenses you to do everything with this
-software that would otherwise infringe any patent claims
-they can license or become able to license.
-
-## Reliability
-
-No contributor can revoke this license.
-
-## No Liability
-
-***As far as the law allows, this software comes as is,
-without any warranty or condition, and no contributor
-will be liable to anyone for any damages related to this
-software or this license, under any kind of legal claim.***
diff --git a/node_modules/pacote/node_modules/yallist/dist/commonjs/index.js b/node_modules/pacote/node_modules/yallist/dist/commonjs/index.js
deleted file mode 100644
index c1e1e4741689d..0000000000000
--- a/node_modules/pacote/node_modules/yallist/dist/commonjs/index.js
+++ /dev/null
@@ -1,384 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-exports.Node = exports.Yallist = void 0;
-class Yallist {
-    tail;
-    head;
-    length = 0;
-    static create(list = []) {
-        return new Yallist(list);
-    }
-    constructor(list = []) {
-        for (const item of list) {
-            this.push(item);
-        }
-    }
-    *[Symbol.iterator]() {
-        for (let walker = this.head; walker; walker = walker.next) {
-            yield walker.value;
-        }
-    }
-    removeNode(node) {
-        if (node.list !== this) {
-            throw new Error('removing node which does not belong to this list');
-        }
-        const next = node.next;
-        const prev = node.prev;
-        if (next) {
-            next.prev = prev;
-        }
-        if (prev) {
-            prev.next = next;
-        }
-        if (node === this.head) {
-            this.head = next;
-        }
-        if (node === this.tail) {
-            this.tail = prev;
-        }
-        this.length--;
-        node.next = undefined;
-        node.prev = undefined;
-        node.list = undefined;
-        return next;
-    }
-    unshiftNode(node) {
-        if (node === this.head) {
-            return;
-        }
-        if (node.list) {
-            node.list.removeNode(node);
-        }
-        const head = this.head;
-        node.list = this;
-        node.next = head;
-        if (head) {
-            head.prev = node;
-        }
-        this.head = node;
-        if (!this.tail) {
-            this.tail = node;
-        }
-        this.length++;
-    }
-    pushNode(node) {
-        if (node === this.tail) {
-            return;
-        }
-        if (node.list) {
-            node.list.removeNode(node);
-        }
-        const tail = this.tail;
-        node.list = this;
-        node.prev = tail;
-        if (tail) {
-            tail.next = node;
-        }
-        this.tail = node;
-        if (!this.head) {
-            this.head = node;
-        }
-        this.length++;
-    }
-    push(...args) {
-        for (let i = 0, l = args.length; i < l; i++) {
-            push(this, args[i]);
-        }
-        return this.length;
-    }
-    unshift(...args) {
-        for (var i = 0, l = args.length; i < l; i++) {
-            unshift(this, args[i]);
-        }
-        return this.length;
-    }
-    pop() {
-        if (!this.tail) {
-            return undefined;
-        }
-        const res = this.tail.value;
-        const t = this.tail;
-        this.tail = this.tail.prev;
-        if (this.tail) {
-            this.tail.next = undefined;
-        }
-        else {
-            this.head = undefined;
-        }
-        t.list = undefined;
-        this.length--;
-        return res;
-    }
-    shift() {
-        if (!this.head) {
-            return undefined;
-        }
-        const res = this.head.value;
-        const h = this.head;
-        this.head = this.head.next;
-        if (this.head) {
-            this.head.prev = undefined;
-        }
-        else {
-            this.tail = undefined;
-        }
-        h.list = undefined;
-        this.length--;
-        return res;
-    }
-    forEach(fn, thisp) {
-        thisp = thisp || this;
-        for (let walker = this.head, i = 0; !!walker; i++) {
-            fn.call(thisp, walker.value, i, this);
-            walker = walker.next;
-        }
-    }
-    forEachReverse(fn, thisp) {
-        thisp = thisp || this;
-        for (let walker = this.tail, i = this.length - 1; !!walker; i--) {
-            fn.call(thisp, walker.value, i, this);
-            walker = walker.prev;
-        }
-    }
-    get(n) {
-        let i = 0;
-        let walker = this.head;
-        for (; !!walker && i < n; i++) {
-            walker = walker.next;
-        }
-        if (i === n && !!walker) {
-            return walker.value;
-        }
-    }
-    getReverse(n) {
-        let i = 0;
-        let walker = this.tail;
-        for (; !!walker && i < n; i++) {
-            // abort out of the list early if we hit a cycle
-            walker = walker.prev;
-        }
-        if (i === n && !!walker) {
-            return walker.value;
-        }
-    }
-    map(fn, thisp) {
-        thisp = thisp || this;
-        const res = new Yallist();
-        for (let walker = this.head; !!walker;) {
-            res.push(fn.call(thisp, walker.value, this));
-            walker = walker.next;
-        }
-        return res;
-    }
-    mapReverse(fn, thisp) {
-        thisp = thisp || this;
-        var res = new Yallist();
-        for (let walker = this.tail; !!walker;) {
-            res.push(fn.call(thisp, walker.value, this));
-            walker = walker.prev;
-        }
-        return res;
-    }
-    reduce(fn, initial) {
-        let acc;
-        let walker = this.head;
-        if (arguments.length > 1) {
-            acc = initial;
-        }
-        else if (this.head) {
-            walker = this.head.next;
-            acc = this.head.value;
-        }
-        else {
-            throw new TypeError('Reduce of empty list with no initial value');
-        }
-        for (var i = 0; !!walker; i++) {
-            acc = fn(acc, walker.value, i);
-            walker = walker.next;
-        }
-        return acc;
-    }
-    reduceReverse(fn, initial) {
-        let acc;
-        let walker = this.tail;
-        if (arguments.length > 1) {
-            acc = initial;
-        }
-        else if (this.tail) {
-            walker = this.tail.prev;
-            acc = this.tail.value;
-        }
-        else {
-            throw new TypeError('Reduce of empty list with no initial value');
-        }
-        for (let i = this.length - 1; !!walker; i--) {
-            acc = fn(acc, walker.value, i);
-            walker = walker.prev;
-        }
-        return acc;
-    }
-    toArray() {
-        const arr = new Array(this.length);
-        for (let i = 0, walker = this.head; !!walker; i++) {
-            arr[i] = walker.value;
-            walker = walker.next;
-        }
-        return arr;
-    }
-    toArrayReverse() {
-        const arr = new Array(this.length);
-        for (let i = 0, walker = this.tail; !!walker; i++) {
-            arr[i] = walker.value;
-            walker = walker.prev;
-        }
-        return arr;
-    }
-    slice(from = 0, to = this.length) {
-        if (to < 0) {
-            to += this.length;
-        }
-        if (from < 0) {
-            from += this.length;
-        }
-        const ret = new Yallist();
-        if (to < from || to < 0) {
-            return ret;
-        }
-        if (from < 0) {
-            from = 0;
-        }
-        if (to > this.length) {
-            to = this.length;
-        }
-        let walker = this.head;
-        let i = 0;
-        for (i = 0; !!walker && i < from; i++) {
-            walker = walker.next;
-        }
-        for (; !!walker && i < to; i++, walker = walker.next) {
-            ret.push(walker.value);
-        }
-        return ret;
-    }
-    sliceReverse(from = 0, to = this.length) {
-        if (to < 0) {
-            to += this.length;
-        }
-        if (from < 0) {
-            from += this.length;
-        }
-        const ret = new Yallist();
-        if (to < from || to < 0) {
-            return ret;
-        }
-        if (from < 0) {
-            from = 0;
-        }
-        if (to > this.length) {
-            to = this.length;
-        }
-        let i = this.length;
-        let walker = this.tail;
-        for (; !!walker && i > to; i--) {
-            walker = walker.prev;
-        }
-        for (; !!walker && i > from; i--, walker = walker.prev) {
-            ret.push(walker.value);
-        }
-        return ret;
-    }
-    splice(start, deleteCount = 0, ...nodes) {
-        if (start > this.length) {
-            start = this.length - 1;
-        }
-        if (start < 0) {
-            start = this.length + start;
-        }
-        let walker = this.head;
-        for (let i = 0; !!walker && i < start; i++) {
-            walker = walker.next;
-        }
-        const ret = [];
-        for (let i = 0; !!walker && i < deleteCount; i++) {
-            ret.push(walker.value);
-            walker = this.removeNode(walker);
-        }
-        if (!walker) {
-            walker = this.tail;
-        }
-        else if (walker !== this.tail) {
-            walker = walker.prev;
-        }
-        for (const v of nodes) {
-            walker = insertAfter(this, walker, v);
-        }
-        return ret;
-    }
-    reverse() {
-        const head = this.head;
-        const tail = this.tail;
-        for (let walker = head; !!walker; walker = walker.prev) {
-            const p = walker.prev;
-            walker.prev = walker.next;
-            walker.next = p;
-        }
-        this.head = tail;
-        this.tail = head;
-        return this;
-    }
-}
-exports.Yallist = Yallist;
-// insertAfter undefined means "make the node the new head of list"
-function insertAfter(self, node, value) {
-    const prev = node;
-    const next = node ? node.next : self.head;
-    const inserted = new Node(value, prev, next, self);
-    if (inserted.next === undefined) {
-        self.tail = inserted;
-    }
-    if (inserted.prev === undefined) {
-        self.head = inserted;
-    }
-    self.length++;
-    return inserted;
-}
-function push(self, item) {
-    self.tail = new Node(item, self.tail, undefined, self);
-    if (!self.head) {
-        self.head = self.tail;
-    }
-    self.length++;
-}
-function unshift(self, item) {
-    self.head = new Node(item, undefined, self.head, self);
-    if (!self.tail) {
-        self.tail = self.head;
-    }
-    self.length++;
-}
-class Node {
-    list;
-    next;
-    prev;
-    value;
-    constructor(value, prev, next, list) {
-        this.list = list;
-        this.value = value;
-        if (prev) {
-            prev.next = this;
-            this.prev = prev;
-        }
-        else {
-            this.prev = undefined;
-        }
-        if (next) {
-            next.prev = this;
-            this.next = next;
-        }
-        else {
-            this.next = undefined;
-        }
-    }
-}
-exports.Node = Node;
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/yallist/dist/commonjs/package.json b/node_modules/pacote/node_modules/yallist/dist/commonjs/package.json
deleted file mode 100644
index 5bbefffbabee3..0000000000000
--- a/node_modules/pacote/node_modules/yallist/dist/commonjs/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "commonjs"
-}
diff --git a/node_modules/pacote/node_modules/yallist/dist/esm/index.js b/node_modules/pacote/node_modules/yallist/dist/esm/index.js
deleted file mode 100644
index 3d81c5113b93a..0000000000000
--- a/node_modules/pacote/node_modules/yallist/dist/esm/index.js
+++ /dev/null
@@ -1,379 +0,0 @@
-export class Yallist {
-    tail;
-    head;
-    length = 0;
-    static create(list = []) {
-        return new Yallist(list);
-    }
-    constructor(list = []) {
-        for (const item of list) {
-            this.push(item);
-        }
-    }
-    *[Symbol.iterator]() {
-        for (let walker = this.head; walker; walker = walker.next) {
-            yield walker.value;
-        }
-    }
-    removeNode(node) {
-        if (node.list !== this) {
-            throw new Error('removing node which does not belong to this list');
-        }
-        const next = node.next;
-        const prev = node.prev;
-        if (next) {
-            next.prev = prev;
-        }
-        if (prev) {
-            prev.next = next;
-        }
-        if (node === this.head) {
-            this.head = next;
-        }
-        if (node === this.tail) {
-            this.tail = prev;
-        }
-        this.length--;
-        node.next = undefined;
-        node.prev = undefined;
-        node.list = undefined;
-        return next;
-    }
-    unshiftNode(node) {
-        if (node === this.head) {
-            return;
-        }
-        if (node.list) {
-            node.list.removeNode(node);
-        }
-        const head = this.head;
-        node.list = this;
-        node.next = head;
-        if (head) {
-            head.prev = node;
-        }
-        this.head = node;
-        if (!this.tail) {
-            this.tail = node;
-        }
-        this.length++;
-    }
-    pushNode(node) {
-        if (node === this.tail) {
-            return;
-        }
-        if (node.list) {
-            node.list.removeNode(node);
-        }
-        const tail = this.tail;
-        node.list = this;
-        node.prev = tail;
-        if (tail) {
-            tail.next = node;
-        }
-        this.tail = node;
-        if (!this.head) {
-            this.head = node;
-        }
-        this.length++;
-    }
-    push(...args) {
-        for (let i = 0, l = args.length; i < l; i++) {
-            push(this, args[i]);
-        }
-        return this.length;
-    }
-    unshift(...args) {
-        for (var i = 0, l = args.length; i < l; i++) {
-            unshift(this, args[i]);
-        }
-        return this.length;
-    }
-    pop() {
-        if (!this.tail) {
-            return undefined;
-        }
-        const res = this.tail.value;
-        const t = this.tail;
-        this.tail = this.tail.prev;
-        if (this.tail) {
-            this.tail.next = undefined;
-        }
-        else {
-            this.head = undefined;
-        }
-        t.list = undefined;
-        this.length--;
-        return res;
-    }
-    shift() {
-        if (!this.head) {
-            return undefined;
-        }
-        const res = this.head.value;
-        const h = this.head;
-        this.head = this.head.next;
-        if (this.head) {
-            this.head.prev = undefined;
-        }
-        else {
-            this.tail = undefined;
-        }
-        h.list = undefined;
-        this.length--;
-        return res;
-    }
-    forEach(fn, thisp) {
-        thisp = thisp || this;
-        for (let walker = this.head, i = 0; !!walker; i++) {
-            fn.call(thisp, walker.value, i, this);
-            walker = walker.next;
-        }
-    }
-    forEachReverse(fn, thisp) {
-        thisp = thisp || this;
-        for (let walker = this.tail, i = this.length - 1; !!walker; i--) {
-            fn.call(thisp, walker.value, i, this);
-            walker = walker.prev;
-        }
-    }
-    get(n) {
-        let i = 0;
-        let walker = this.head;
-        for (; !!walker && i < n; i++) {
-            walker = walker.next;
-        }
-        if (i === n && !!walker) {
-            return walker.value;
-        }
-    }
-    getReverse(n) {
-        let i = 0;
-        let walker = this.tail;
-        for (; !!walker && i < n; i++) {
-            // abort out of the list early if we hit a cycle
-            walker = walker.prev;
-        }
-        if (i === n && !!walker) {
-            return walker.value;
-        }
-    }
-    map(fn, thisp) {
-        thisp = thisp || this;
-        const res = new Yallist();
-        for (let walker = this.head; !!walker;) {
-            res.push(fn.call(thisp, walker.value, this));
-            walker = walker.next;
-        }
-        return res;
-    }
-    mapReverse(fn, thisp) {
-        thisp = thisp || this;
-        var res = new Yallist();
-        for (let walker = this.tail; !!walker;) {
-            res.push(fn.call(thisp, walker.value, this));
-            walker = walker.prev;
-        }
-        return res;
-    }
-    reduce(fn, initial) {
-        let acc;
-        let walker = this.head;
-        if (arguments.length > 1) {
-            acc = initial;
-        }
-        else if (this.head) {
-            walker = this.head.next;
-            acc = this.head.value;
-        }
-        else {
-            throw new TypeError('Reduce of empty list with no initial value');
-        }
-        for (var i = 0; !!walker; i++) {
-            acc = fn(acc, walker.value, i);
-            walker = walker.next;
-        }
-        return acc;
-    }
-    reduceReverse(fn, initial) {
-        let acc;
-        let walker = this.tail;
-        if (arguments.length > 1) {
-            acc = initial;
-        }
-        else if (this.tail) {
-            walker = this.tail.prev;
-            acc = this.tail.value;
-        }
-        else {
-            throw new TypeError('Reduce of empty list with no initial value');
-        }
-        for (let i = this.length - 1; !!walker; i--) {
-            acc = fn(acc, walker.value, i);
-            walker = walker.prev;
-        }
-        return acc;
-    }
-    toArray() {
-        const arr = new Array(this.length);
-        for (let i = 0, walker = this.head; !!walker; i++) {
-            arr[i] = walker.value;
-            walker = walker.next;
-        }
-        return arr;
-    }
-    toArrayReverse() {
-        const arr = new Array(this.length);
-        for (let i = 0, walker = this.tail; !!walker; i++) {
-            arr[i] = walker.value;
-            walker = walker.prev;
-        }
-        return arr;
-    }
-    slice(from = 0, to = this.length) {
-        if (to < 0) {
-            to += this.length;
-        }
-        if (from < 0) {
-            from += this.length;
-        }
-        const ret = new Yallist();
-        if (to < from || to < 0) {
-            return ret;
-        }
-        if (from < 0) {
-            from = 0;
-        }
-        if (to > this.length) {
-            to = this.length;
-        }
-        let walker = this.head;
-        let i = 0;
-        for (i = 0; !!walker && i < from; i++) {
-            walker = walker.next;
-        }
-        for (; !!walker && i < to; i++, walker = walker.next) {
-            ret.push(walker.value);
-        }
-        return ret;
-    }
-    sliceReverse(from = 0, to = this.length) {
-        if (to < 0) {
-            to += this.length;
-        }
-        if (from < 0) {
-            from += this.length;
-        }
-        const ret = new Yallist();
-        if (to < from || to < 0) {
-            return ret;
-        }
-        if (from < 0) {
-            from = 0;
-        }
-        if (to > this.length) {
-            to = this.length;
-        }
-        let i = this.length;
-        let walker = this.tail;
-        for (; !!walker && i > to; i--) {
-            walker = walker.prev;
-        }
-        for (; !!walker && i > from; i--, walker = walker.prev) {
-            ret.push(walker.value);
-        }
-        return ret;
-    }
-    splice(start, deleteCount = 0, ...nodes) {
-        if (start > this.length) {
-            start = this.length - 1;
-        }
-        if (start < 0) {
-            start = this.length + start;
-        }
-        let walker = this.head;
-        for (let i = 0; !!walker && i < start; i++) {
-            walker = walker.next;
-        }
-        const ret = [];
-        for (let i = 0; !!walker && i < deleteCount; i++) {
-            ret.push(walker.value);
-            walker = this.removeNode(walker);
-        }
-        if (!walker) {
-            walker = this.tail;
-        }
-        else if (walker !== this.tail) {
-            walker = walker.prev;
-        }
-        for (const v of nodes) {
-            walker = insertAfter(this, walker, v);
-        }
-        return ret;
-    }
-    reverse() {
-        const head = this.head;
-        const tail = this.tail;
-        for (let walker = head; !!walker; walker = walker.prev) {
-            const p = walker.prev;
-            walker.prev = walker.next;
-            walker.next = p;
-        }
-        this.head = tail;
-        this.tail = head;
-        return this;
-    }
-}
-// insertAfter undefined means "make the node the new head of list"
-function insertAfter(self, node, value) {
-    const prev = node;
-    const next = node ? node.next : self.head;
-    const inserted = new Node(value, prev, next, self);
-    if (inserted.next === undefined) {
-        self.tail = inserted;
-    }
-    if (inserted.prev === undefined) {
-        self.head = inserted;
-    }
-    self.length++;
-    return inserted;
-}
-function push(self, item) {
-    self.tail = new Node(item, self.tail, undefined, self);
-    if (!self.head) {
-        self.head = self.tail;
-    }
-    self.length++;
-}
-function unshift(self, item) {
-    self.head = new Node(item, undefined, self.head, self);
-    if (!self.tail) {
-        self.tail = self.head;
-    }
-    self.length++;
-}
-export class Node {
-    list;
-    next;
-    prev;
-    value;
-    constructor(value, prev, next, list) {
-        this.list = list;
-        this.value = value;
-        if (prev) {
-            prev.next = this;
-            this.prev = prev;
-        }
-        else {
-            this.prev = undefined;
-        }
-        if (next) {
-            next.prev = this;
-            this.next = next;
-        }
-        else {
-            this.next = undefined;
-        }
-    }
-}
-//# sourceMappingURL=index.js.map
\ No newline at end of file
diff --git a/node_modules/pacote/node_modules/yallist/dist/esm/package.json b/node_modules/pacote/node_modules/yallist/dist/esm/package.json
deleted file mode 100644
index 3dbc1ca591c05..0000000000000
--- a/node_modules/pacote/node_modules/yallist/dist/esm/package.json
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "type": "module"
-}
diff --git a/node_modules/pacote/node_modules/yallist/package.json b/node_modules/pacote/node_modules/yallist/package.json
deleted file mode 100644
index 2f5247808bbea..0000000000000
--- a/node_modules/pacote/node_modules/yallist/package.json
+++ /dev/null
@@ -1,68 +0,0 @@
-{
-  "name": "yallist",
-  "version": "5.0.0",
-  "description": "Yet Another Linked List",
-  "files": [
-    "dist"
-  ],
-  "devDependencies": {
-    "prettier": "^3.2.5",
-    "tap": "^18.7.2",
-    "tshy": "^1.13.1",
-    "typedoc": "^0.25.13"
-  },
-  "scripts": {
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "prepublishOnly": "git push origin --follow-tags",
-    "prepare": "tshy",
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "test": "tap",
-    "snap": "tap",
-    "format": "prettier --write . --loglevel warn --ignore-path ../../.prettierignore --cache",
-    "typedoc": "typedoc"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/isaacs/yallist.git"
-  },
-  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
-  "license": "BlueOak-1.0.0",
-  "tshy": {
-    "exports": {
-      "./package.json": "./package.json",
-      ".": "./src/index.ts"
-    }
-  },
-  "exports": {
-    "./package.json": "./package.json",
-    ".": {
-      "import": {
-        "types": "./dist/esm/index.d.ts",
-        "default": "./dist/esm/index.js"
-      },
-      "require": {
-        "types": "./dist/commonjs/index.d.ts",
-        "default": "./dist/commonjs/index.js"
-      }
-    }
-  },
-  "main": "./dist/commonjs/index.js",
-  "types": "./dist/commonjs/index.d.ts",
-  "type": "module",
-  "prettier": {
-    "semi": false,
-    "printWidth": 70,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  },
-  "engines": {
-    "node": ">=18"
-  }
-}
diff --git a/node_modules/node-gyp/node_modules/tar/dist/commonjs/create.js b/node_modules/tar/dist/commonjs/create.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/commonjs/create.js
rename to node_modules/tar/dist/commonjs/create.js
diff --git a/node_modules/node-gyp/node_modules/tar/dist/commonjs/cwd-error.js b/node_modules/tar/dist/commonjs/cwd-error.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/commonjs/cwd-error.js
rename to node_modules/tar/dist/commonjs/cwd-error.js
diff --git a/node_modules/node-gyp/node_modules/tar/dist/commonjs/extract.js b/node_modules/tar/dist/commonjs/extract.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/commonjs/extract.js
rename to node_modules/tar/dist/commonjs/extract.js
diff --git a/node_modules/node-gyp/node_modules/tar/dist/commonjs/get-write-flag.js b/node_modules/tar/dist/commonjs/get-write-flag.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/commonjs/get-write-flag.js
rename to node_modules/tar/dist/commonjs/get-write-flag.js
diff --git a/node_modules/node-gyp/node_modules/tar/dist/commonjs/header.js b/node_modules/tar/dist/commonjs/header.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/commonjs/header.js
rename to node_modules/tar/dist/commonjs/header.js
diff --git a/node_modules/node-gyp/node_modules/tar/dist/commonjs/index.js b/node_modules/tar/dist/commonjs/index.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/commonjs/index.js
rename to node_modules/tar/dist/commonjs/index.js
diff --git a/node_modules/node-gyp/node_modules/tar/dist/commonjs/large-numbers.js b/node_modules/tar/dist/commonjs/large-numbers.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/commonjs/large-numbers.js
rename to node_modules/tar/dist/commonjs/large-numbers.js
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/list.js b/node_modules/tar/dist/commonjs/list.js
similarity index 94%
rename from node_modules/pacote/node_modules/tar/dist/commonjs/list.js
rename to node_modules/tar/dist/commonjs/list.js
index 3cd34bb4bad48..3bc56453f5ed6 100644
--- a/node_modules/pacote/node_modules/tar/dist/commonjs/list.js
+++ b/node_modules/tar/dist/commonjs/list.js
@@ -77,15 +77,17 @@ const listFileSync = (opt) => {
     const file = opt.file;
     let fd;
     try {
-        const stat = node_fs_1.default.statSync(file);
+        fd = node_fs_1.default.openSync(file, 'r');
+        const stat = node_fs_1.default.fstatSync(fd);
         const readSize = opt.maxReadSize || 16 * 1024 * 1024;
         if (stat.size < readSize) {
-            p.end(node_fs_1.default.readFileSync(file));
+            const buf = Buffer.allocUnsafe(stat.size);
+            node_fs_1.default.readSync(fd, buf, 0, stat.size, 0);
+            p.end(buf);
         }
         else {
             let pos = 0;
             const buf = Buffer.allocUnsafe(readSize);
-            fd = node_fs_1.default.openSync(file, 'r');
             while (pos < stat.size) {
                 const bytesRead = node_fs_1.default.readSync(fd, buf, 0, readSize, pos);
                 pos += bytesRead;
diff --git a/node_modules/node-gyp/node_modules/tar/dist/commonjs/make-command.js b/node_modules/tar/dist/commonjs/make-command.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/commonjs/make-command.js
rename to node_modules/tar/dist/commonjs/make-command.js
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/mkdir.js b/node_modules/tar/dist/commonjs/mkdir.js
similarity index 71%
rename from node_modules/pacote/node_modules/tar/dist/commonjs/mkdir.js
rename to node_modules/tar/dist/commonjs/mkdir.js
index 2b13ecbab6723..606619efbcde3 100644
--- a/node_modules/pacote/node_modules/tar/dist/commonjs/mkdir.js
+++ b/node_modules/tar/dist/commonjs/mkdir.js
@@ -5,16 +5,14 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
 Object.defineProperty(exports, "__esModule", { value: true });
 exports.mkdirSync = exports.mkdir = void 0;
 const chownr_1 = require("chownr");
-const fs_1 = __importDefault(require("fs"));
-const mkdirp_1 = require("mkdirp");
+const node_fs_1 = __importDefault(require("node:fs"));
+const promises_1 = __importDefault(require("node:fs/promises"));
 const node_path_1 = __importDefault(require("node:path"));
 const cwd_error_js_1 = require("./cwd-error.js");
 const normalize_windows_path_js_1 = require("./normalize-windows-path.js");
 const symlink_error_js_1 = require("./symlink-error.js");
-const cGet = (cache, key) => cache.get((0, normalize_windows_path_js_1.normalizeWindowsPath)(key));
-const cSet = (cache, key, val) => cache.set((0, normalize_windows_path_js_1.normalizeWindowsPath)(key), val);
 const checkCwd = (dir, cb) => {
-    fs_1.default.stat(dir, (er, st) => {
+    node_fs_1.default.stat(dir, (er, st) => {
         if (er || !st.isDirectory()) {
             er = new cwd_error_js_1.CwdError(dir, er?.code || 'ENOTDIR');
         }
@@ -22,7 +20,7 @@ const checkCwd = (dir, cb) => {
     });
 };
 /**
- * Wrapper around mkdirp for tar's needs.
+ * Wrapper around fs/promises.mkdir for tar's needs.
  *
  * The main purpose is to avoid creating directories if we know that
  * they already exist (and track which ones exist for this purpose),
@@ -44,68 +42,60 @@ const mkdir = (dir, opt, cb) => {
         (uid !== opt.processUid || gid !== opt.processGid);
     const preserve = opt.preserve;
     const unlink = opt.unlink;
-    const cache = opt.cache;
     const cwd = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.cwd);
     const done = (er, created) => {
         if (er) {
             cb(er);
         }
         else {
-            cSet(cache, dir, true);
             if (created && doChown) {
                 (0, chownr_1.chownr)(created, uid, gid, er => done(er));
             }
             else if (needChmod) {
-                fs_1.default.chmod(dir, mode, cb);
+                node_fs_1.default.chmod(dir, mode, cb);
             }
             else {
                 cb();
             }
         }
     };
-    if (cache && cGet(cache, dir) === true) {
-        return done();
-    }
     if (dir === cwd) {
         return checkCwd(dir, done);
     }
     if (preserve) {
-        return (0, mkdirp_1.mkdirp)(dir, { mode }).then(made => done(null, made ?? undefined), // oh, ts
+        return promises_1.default.mkdir(dir, { mode, recursive: true }).then(made => done(null, made ?? undefined), // oh, ts
         done);
     }
     const sub = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.relative(cwd, dir));
     const parts = sub.split('/');
-    mkdir_(cwd, parts, mode, cache, unlink, cwd, undefined, done);
+    mkdir_(cwd, parts, mode, unlink, cwd, undefined, done);
 };
 exports.mkdir = mkdir;
-const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => {
+const mkdir_ = (base, parts, mode, unlink, cwd, created, cb) => {
     if (!parts.length) {
         return cb(null, created);
     }
     const p = parts.shift();
     const part = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(base + '/' + p));
-    if (cGet(cache, part)) {
-        return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
-    }
-    fs_1.default.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb));
+    node_fs_1.default.mkdir(part, mode, onmkdir(part, parts, mode, unlink, cwd, created, cb));
 };
-const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => (er) => {
+const onmkdir = (part, parts, mode, unlink, cwd, created, cb) => (er) => {
     if (er) {
-        fs_1.default.lstat(part, (statEr, st) => {
+        node_fs_1.default.lstat(part, (statEr, st) => {
             if (statEr) {
                 statEr.path =
                     statEr.path && (0, normalize_windows_path_js_1.normalizeWindowsPath)(statEr.path);
                 cb(statEr);
             }
             else if (st.isDirectory()) {
-                mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
+                mkdir_(part, parts, mode, unlink, cwd, created, cb);
             }
             else if (unlink) {
-                fs_1.default.unlink(part, er => {
+                node_fs_1.default.unlink(part, er => {
                     if (er) {
                         return cb(er);
                     }
-                    fs_1.default.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb));
+                    node_fs_1.default.mkdir(part, mode, onmkdir(part, parts, mode, unlink, cwd, created, cb));
                 });
             }
             else if (st.isSymbolicLink()) {
@@ -118,14 +108,14 @@ const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => (er) =>
     }
     else {
         created = created || part;
-        mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
+        mkdir_(part, parts, mode, unlink, cwd, created, cb);
     }
 };
 const checkCwdSync = (dir) => {
     let ok = false;
     let code = undefined;
     try {
-        ok = fs_1.default.statSync(dir).isDirectory();
+        ok = node_fs_1.default.statSync(dir).isDirectory();
     }
     catch (er) {
         code = er?.code;
@@ -151,51 +141,40 @@ const mkdirSync = (dir, opt) => {
         (uid !== opt.processUid || gid !== opt.processGid);
     const preserve = opt.preserve;
     const unlink = opt.unlink;
-    const cache = opt.cache;
     const cwd = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.cwd);
     const done = (created) => {
-        cSet(cache, dir, true);
         if (created && doChown) {
             (0, chownr_1.chownrSync)(created, uid, gid);
         }
         if (needChmod) {
-            fs_1.default.chmodSync(dir, mode);
+            node_fs_1.default.chmodSync(dir, mode);
         }
     };
-    if (cache && cGet(cache, dir) === true) {
-        return done();
-    }
     if (dir === cwd) {
         checkCwdSync(cwd);
         return done();
     }
     if (preserve) {
-        return done((0, mkdirp_1.mkdirpSync)(dir, mode) ?? undefined);
+        return done(node_fs_1.default.mkdirSync(dir, { mode, recursive: true }) ?? undefined);
     }
     const sub = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.relative(cwd, dir));
     const parts = sub.split('/');
     let created = undefined;
     for (let p = parts.shift(), part = cwd; p && (part += '/' + p); p = parts.shift()) {
         part = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(part));
-        if (cGet(cache, part)) {
-            continue;
-        }
         try {
-            fs_1.default.mkdirSync(part, mode);
+            node_fs_1.default.mkdirSync(part, mode);
             created = created || part;
-            cSet(cache, part, true);
         }
         catch (er) {
-            const st = fs_1.default.lstatSync(part);
+            const st = node_fs_1.default.lstatSync(part);
             if (st.isDirectory()) {
-                cSet(cache, part, true);
                 continue;
             }
             else if (unlink) {
-                fs_1.default.unlinkSync(part);
-                fs_1.default.mkdirSync(part, mode);
+                node_fs_1.default.unlinkSync(part);
+                node_fs_1.default.mkdirSync(part, mode);
                 created = created || part;
-                cSet(cache, part, true);
                 continue;
             }
             else if (st.isSymbolicLink()) {
diff --git a/node_modules/node-gyp/node_modules/tar/dist/commonjs/mode-fix.js b/node_modules/tar/dist/commonjs/mode-fix.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/commonjs/mode-fix.js
rename to node_modules/tar/dist/commonjs/mode-fix.js
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/normalize-unicode.js b/node_modules/tar/dist/commonjs/normalize-unicode.js
similarity index 50%
rename from node_modules/pacote/node_modules/tar/dist/commonjs/normalize-unicode.js
rename to node_modules/tar/dist/commonjs/normalize-unicode.js
index 2f08ce46d98c4..6ce3342d43bcf 100644
--- a/node_modules/pacote/node_modules/tar/dist/commonjs/normalize-unicode.js
+++ b/node_modules/tar/dist/commonjs/normalize-unicode.js
@@ -6,12 +6,29 @@ exports.normalizeUnicode = void 0;
 // within npm install on large package trees.
 // Do not edit without careful benchmarking.
 const normalizeCache = Object.create(null);
-const { hasOwnProperty } = Object.prototype;
+// Limit the size of this. Very low-sophistication LRU cache
+const MAX = 10000;
+const cache = new Set();
 const normalizeUnicode = (s) => {
-    if (!hasOwnProperty.call(normalizeCache, s)) {
+    if (!cache.has(s)) {
         normalizeCache[s] = s.normalize('NFD');
     }
-    return normalizeCache[s];
+    else {
+        cache.delete(s);
+    }
+    cache.add(s);
+    const ret = normalizeCache[s];
+    let i = cache.size - MAX;
+    // only prune when we're 10% over the max
+    if (i > MAX / 10) {
+        for (const s of cache) {
+            cache.delete(s);
+            delete normalizeCache[s];
+            if (--i <= 0)
+                break;
+        }
+    }
+    return ret;
 };
 exports.normalizeUnicode = normalizeUnicode;
 //# sourceMappingURL=normalize-unicode.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/tar/dist/commonjs/normalize-windows-path.js b/node_modules/tar/dist/commonjs/normalize-windows-path.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/commonjs/normalize-windows-path.js
rename to node_modules/tar/dist/commonjs/normalize-windows-path.js
diff --git a/node_modules/node-gyp/node_modules/tar/dist/commonjs/options.js b/node_modules/tar/dist/commonjs/options.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/commonjs/options.js
rename to node_modules/tar/dist/commonjs/options.js
diff --git a/node_modules/node-gyp/node_modules/tar/dist/commonjs/pack.js b/node_modules/tar/dist/commonjs/pack.js
similarity index 93%
rename from node_modules/node-gyp/node_modules/tar/dist/commonjs/pack.js
rename to node_modules/tar/dist/commonjs/pack.js
index 303e93063c2db..07e921ca959bf 100644
--- a/node_modules/node-gyp/node_modules/tar/dist/commonjs/pack.js
+++ b/node_modules/tar/dist/commonjs/pack.js
@@ -102,6 +102,14 @@ class Pack extends minipass_1.Minipass {
     jobs;
     [WRITEENTRYCLASS];
     onWriteEntry;
+    // Note: we actually DO need a linked list here, because we
+    // shift() to update the head of the list where we start, but still
+    // while that happens, need to know what the next item in the queue
+    // will be. Since we do multiple jobs in parallel, it's not as simple
+    // as just an Array.shift(), since that would lose the information about
+    // the next job in the list. We could add a .next field on the PackJob
+    // class, but then we'd have to be tracking the tail of the queue the
+    // whole time, and Yallist just does that for us anyway.
     [QUEUE];
     [JOBS] = 0;
     [PROCESSING] = false;
@@ -126,9 +134,9 @@ class Pack extends minipass_1.Minipass {
             this.on('warn', opt.onwarn);
         }
         this.portable = !!opt.portable;
-        if (opt.gzip || opt.brotli) {
-            if (opt.gzip && opt.brotli) {
-                throw new TypeError('gzip and brotli are mutually exclusive');
+        if (opt.gzip || opt.brotli || opt.zstd) {
+            if ((opt.gzip ? 1 : 0) + (opt.brotli ? 1 : 0) + (opt.zstd ? 1 : 0) > 1) {
+                throw new TypeError('gzip, brotli, zstd are mutually exclusive');
             }
             if (opt.gzip) {
                 if (typeof opt.gzip !== 'object') {
@@ -145,6 +153,12 @@ class Pack extends minipass_1.Minipass {
                 }
                 this.zip = new zlib.BrotliCompress(opt.brotli);
             }
+            if (opt.zstd) {
+                if (typeof opt.zstd !== 'object') {
+                    opt.zstd = {};
+                }
+                this.zip = new zlib.ZstdCompress(opt.zstd);
+            }
             /* c8 ignore next */
             if (!this.zip)
                 throw new Error('impossible');
diff --git a/node_modules/node-gyp/node_modules/tar/dist/commonjs/package.json b/node_modules/tar/dist/commonjs/package.json
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/commonjs/package.json
rename to node_modules/tar/dist/commonjs/package.json
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/parse.js b/node_modules/tar/dist/commonjs/parse.js
similarity index 93%
rename from node_modules/pacote/node_modules/tar/dist/commonjs/parse.js
rename to node_modules/tar/dist/commonjs/parse.js
index 9746a25899e6e..0222b5547439f 100644
--- a/node_modules/pacote/node_modules/tar/dist/commonjs/parse.js
+++ b/node_modules/tar/dist/commonjs/parse.js
@@ -3,7 +3,7 @@
 // the full 512 bytes of a header to come in.  We will Buffer.concat()
 // it to the next write(), which is a mem copy, but a small one.
 //
-// this[QUEUE] is a Yallist of entries that haven't been emitted
+// this[QUEUE] is a list of entries that haven't been emitted
 // yet this can only get filled up if the user keeps write()ing after
 // a write() returns false, or does a write() with more than one entry
 //
@@ -22,13 +22,14 @@ Object.defineProperty(exports, "__esModule", { value: true });
 exports.Parser = void 0;
 const events_1 = require("events");
 const minizlib_1 = require("minizlib");
-const yallist_1 = require("yallist");
 const header_js_1 = require("./header.js");
 const pax_js_1 = require("./pax.js");
 const read_entry_js_1 = require("./read-entry.js");
 const warn_method_js_1 = require("./warn-method.js");
 const maxMetaEntrySize = 1024 * 1024;
 const gzipHeader = Buffer.from([0x1f, 0x8b]);
+const zstdHeader = Buffer.from([0x28, 0xb5, 0x2f, 0xfd]);
+const ZIP_HEADER_LEN = Math.max(gzipHeader.length, zstdHeader.length);
 const STATE = Symbol('state');
 const WRITEENTRY = Symbol('writeEntry');
 const READENTRY = Symbol('readEntry');
@@ -66,9 +67,10 @@ class Parser extends events_1.EventEmitter {
     maxMetaEntrySize;
     filter;
     brotli;
+    zstd;
     writable = true;
     readable = false;
-    [QUEUE] = new yallist_1.Yallist();
+    [QUEUE] = [];
     [BUFFER];
     [READENTRY];
     [WRITEENTRY];
@@ -118,9 +120,17 @@ class Parser extends events_1.EventEmitter {
         // if it's a tbr file it MIGHT be brotli, but we don't know until
         // we look at it and verify it's not a valid tar file.
         this.brotli =
-            !opt.gzip && opt.brotli !== undefined ? opt.brotli
+            !(opt.gzip || opt.zstd) && opt.brotli !== undefined ? opt.brotli
                 : isTBR ? undefined
                     : false;
+        // zstd has magic bytes to identify it, but we also support explicit options
+        // and file extension detection
+        const isTZST = opt.file &&
+            (opt.file.endsWith('.tar.zst') || opt.file.endsWith('.tzst'));
+        this.zstd =
+            !(opt.gzip || opt.brotli) && opt.zstd !== undefined ? opt.zstd
+                : isTZST ? true
+                    : undefined;
         // have to set this so that streams are ok piping into it
         this.on('end', () => this[CLOSESTREAM]());
         if (typeof opt.onwarn === 'function') {
@@ -374,7 +384,7 @@ class Parser extends events_1.EventEmitter {
             cb?.();
             return false;
         }
-        // first write, might be gzipped
+        // first write, might be gzipped, zstd, or brotli compressed
         const needSniff = this[UNZIP] === undefined ||
             (this.brotli === undefined && this[UNZIP] === false);
         if (needSniff && chunk) {
@@ -382,7 +392,7 @@ class Parser extends events_1.EventEmitter {
                 chunk = Buffer.concat([this[BUFFER], chunk]);
                 this[BUFFER] = undefined;
             }
-            if (chunk.length < gzipHeader.length) {
+            if (chunk.length < ZIP_HEADER_LEN) {
                 this[BUFFER] = chunk;
                 /* c8 ignore next */
                 cb?.();
@@ -394,7 +404,18 @@ class Parser extends events_1.EventEmitter {
                     this[UNZIP] = false;
                 }
             }
-            const maybeBrotli = this.brotli === undefined;
+            // look for zstd header if gzip header not found
+            let isZstd = false;
+            if (this[UNZIP] === false && this.zstd !== false) {
+                isZstd = true;
+                for (let i = 0; i < zstdHeader.length; i++) {
+                    if (chunk[i] !== zstdHeader[i]) {
+                        isZstd = false;
+                        break;
+                    }
+                }
+            }
+            const maybeBrotli = this.brotli === undefined && !isZstd;
             if (this[UNZIP] === false && maybeBrotli) {
                 // read the first header to see if it's a valid tar file. If so,
                 // we can safely assume that it's not actually brotli, despite the
@@ -424,13 +445,15 @@ class Parser extends events_1.EventEmitter {
                 }
             }
             if (this[UNZIP] === undefined ||
-                (this[UNZIP] === false && this.brotli)) {
+                (this[UNZIP] === false && (this.brotli || isZstd))) {
                 const ended = this[ENDED];
                 this[ENDED] = false;
                 this[UNZIP] =
                     this[UNZIP] === undefined ?
                         new minizlib_1.Unzip({})
-                        : new minizlib_1.BrotliDecompress({});
+                        : isZstd ?
+                            new minizlib_1.ZstdDecompress({})
+                            : new minizlib_1.BrotliDecompress({});
                 this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk));
                 this[UNZIP].on('error', er => this.abort(er));
                 this[UNZIP].on('end', () => {
@@ -585,7 +608,7 @@ class Parser extends events_1.EventEmitter {
             }
             else {
                 this[ENDED] = true;
-                if (this.brotli === undefined)
+                if (this.brotli === undefined || this.zstd === undefined)
                     chunk = chunk || Buffer.alloc(0);
                 if (chunk)
                     this.write(chunk);
diff --git a/node_modules/node-gyp/node_modules/tar/dist/commonjs/path-reservations.js b/node_modules/tar/dist/commonjs/path-reservations.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/commonjs/path-reservations.js
rename to node_modules/tar/dist/commonjs/path-reservations.js
diff --git a/node_modules/node-gyp/node_modules/tar/dist/commonjs/pax.js b/node_modules/tar/dist/commonjs/pax.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/commonjs/pax.js
rename to node_modules/tar/dist/commonjs/pax.js
diff --git a/node_modules/node-gyp/node_modules/tar/dist/commonjs/read-entry.js b/node_modules/tar/dist/commonjs/read-entry.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/commonjs/read-entry.js
rename to node_modules/tar/dist/commonjs/read-entry.js
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/replace.js b/node_modules/tar/dist/commonjs/replace.js
similarity index 99%
rename from node_modules/pacote/node_modules/tar/dist/commonjs/replace.js
rename to node_modules/tar/dist/commonjs/replace.js
index 262deecd12f9f..5442c2a5bde5e 100644
--- a/node_modules/pacote/node_modules/tar/dist/commonjs/replace.js
+++ b/node_modules/tar/dist/commonjs/replace.js
@@ -220,6 +220,7 @@ exports.replace = (0, make_command_js_1.makeCommand)(replaceSync, replaceAsync,
     }
     if (opt.gzip ||
         opt.brotli ||
+        opt.zstd ||
         opt.file.endsWith('.br') ||
         opt.file.endsWith('.tbr')) {
         throw new TypeError('cannot append to compressed archives');
diff --git a/node_modules/node-gyp/node_modules/tar/dist/commonjs/strip-absolute-path.js b/node_modules/tar/dist/commonjs/strip-absolute-path.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/commonjs/strip-absolute-path.js
rename to node_modules/tar/dist/commonjs/strip-absolute-path.js
diff --git a/node_modules/node-gyp/node_modules/tar/dist/commonjs/strip-trailing-slashes.js b/node_modules/tar/dist/commonjs/strip-trailing-slashes.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/commonjs/strip-trailing-slashes.js
rename to node_modules/tar/dist/commonjs/strip-trailing-slashes.js
diff --git a/node_modules/node-gyp/node_modules/tar/dist/commonjs/symlink-error.js b/node_modules/tar/dist/commonjs/symlink-error.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/commonjs/symlink-error.js
rename to node_modules/tar/dist/commonjs/symlink-error.js
diff --git a/node_modules/node-gyp/node_modules/tar/dist/commonjs/types.js b/node_modules/tar/dist/commonjs/types.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/commonjs/types.js
rename to node_modules/tar/dist/commonjs/types.js
diff --git a/node_modules/pacote/node_modules/tar/dist/commonjs/unpack.js b/node_modules/tar/dist/commonjs/unpack.js
similarity index 92%
rename from node_modules/pacote/node_modules/tar/dist/commonjs/unpack.js
rename to node_modules/tar/dist/commonjs/unpack.js
index edf8acbb18c40..23b1f81156dbd 100644
--- a/node_modules/pacote/node_modules/tar/dist/commonjs/unpack.js
+++ b/node_modules/tar/dist/commonjs/unpack.js
@@ -39,17 +39,14 @@ const node_fs_1 = __importDefault(require("node:fs"));
 const node_path_1 = __importDefault(require("node:path"));
 const get_write_flag_js_1 = require("./get-write-flag.js");
 const mkdir_js_1 = require("./mkdir.js");
-const normalize_unicode_js_1 = require("./normalize-unicode.js");
 const normalize_windows_path_js_1 = require("./normalize-windows-path.js");
 const parse_js_1 = require("./parse.js");
 const strip_absolute_path_js_1 = require("./strip-absolute-path.js");
-const strip_trailing_slashes_js_1 = require("./strip-trailing-slashes.js");
 const wc = __importStar(require("./winchars.js"));
 const path_reservations_js_1 = require("./path-reservations.js");
 const ONENTRY = Symbol('onEntry');
 const CHECKFS = Symbol('checkFs');
 const CHECKFS2 = Symbol('checkFs2');
-const PRUNECACHE = Symbol('pruneCache');
 const ISREUSABLE = Symbol('isReusable');
 const MAKEFS = Symbol('makeFs');
 const FILE = Symbol('file');
@@ -117,31 +114,6 @@ const unlinkFileSync = (path) => {
 const uint32 = (a, b, c) => a !== undefined && a === a >>> 0 ? a
     : b !== undefined && b === b >>> 0 ? b
         : c;
-// clear the cache if it's a case-insensitive unicode-squashing match.
-// we can't know if the current file system is case-sensitive or supports
-// unicode fully, so we check for similarity on the maximally compatible
-// representation.  Err on the side of pruning, since all it's doing is
-// preventing lstats, and it's not the end of the world if we get a false
-// positive.
-// Note that on windows, we always drop the entire cache whenever a
-// symbolic link is encountered, because 8.3 filenames are impossible
-// to reason about, and collisions are hazards rather than just failures.
-const cacheKeyNormalize = (path) => (0, strip_trailing_slashes_js_1.stripTrailingSlashes)((0, normalize_windows_path_js_1.normalizeWindowsPath)((0, normalize_unicode_js_1.normalizeUnicode)(path))).toLowerCase();
-// remove all cache entries matching ${abs}/**
-const pruneCache = (cache, abs) => {
-    abs = cacheKeyNormalize(abs);
-    for (const path of cache.keys()) {
-        const pnorm = cacheKeyNormalize(path);
-        if (pnorm === abs || pnorm.indexOf(abs + '/') === 0) {
-            cache.delete(path);
-        }
-    }
-};
-const dropCache = (cache) => {
-    for (const key of cache.keys()) {
-        cache.delete(key);
-    }
-};
 class Unpack extends parse_js_1.Parser {
     [ENDED] = false;
     [CHECKED_CWD] = false;
@@ -150,7 +122,6 @@ class Unpack extends parse_js_1.Parser {
     transform;
     writable = true;
     readable = false;
-    dirCache;
     uid;
     gid;
     setOwner;
@@ -179,7 +150,6 @@ class Unpack extends parse_js_1.Parser {
         };
         super(opt);
         this.transform = opt.transform;
-        this.dirCache = opt.dirCache || new Map();
         this.chmod = !!opt.chmod;
         if (typeof opt.uid === 'number' || typeof opt.gid === 'number') {
             // need both or neither
@@ -404,7 +374,6 @@ class Unpack extends parse_js_1.Parser {
             umask: this.processUmask,
             preserve: this.preservePaths,
             unlink: this.unlink,
-            cache: this.dirCache,
             cwd: this.cwd,
             mode: mode,
         }, cb);
@@ -582,28 +551,8 @@ class Unpack extends parse_js_1.Parser {
         }
         this.reservations.reserve(paths, done => this[CHECKFS2](entry, done));
     }
-    [PRUNECACHE](entry) {
-        // if we are not creating a directory, and the path is in the dirCache,
-        // then that means we are about to delete the directory we created
-        // previously, and it is no longer going to be a directory, and neither
-        // is any of its children.
-        // If a symbolic link is encountered, all bets are off.  There is no
-        // reasonable way to sanitize the cache in such a way we will be able to
-        // avoid having filesystem collisions.  If this happens with a non-symlink
-        // entry, it'll just fail to unpack, but a symlink to a directory, using an
-        // 8.3 shortname or certain unicode attacks, can evade detection and lead
-        // to arbitrary writes to anywhere on the system.
-        if (entry.type === 'SymbolicLink') {
-            dropCache(this.dirCache);
-        }
-        else if (entry.type !== 'Directory') {
-            pruneCache(this.dirCache, String(entry.absolute));
-        }
-    }
     [CHECKFS2](entry, fullyDone) {
-        this[PRUNECACHE](entry);
         const done = (er) => {
-            this[PRUNECACHE](entry);
             fullyDone(er);
         };
         const checkCwd = () => {
@@ -732,7 +681,6 @@ class UnpackSync extends Unpack {
         return super[MAKEFS](er, entry, () => { });
     }
     [CHECKFS](entry) {
-        this[PRUNECACHE](entry);
         if (!this[CHECKED_CWD]) {
             const er = this[MKDIR](this.cwd, this.dmode);
             if (er) {
@@ -804,10 +752,15 @@ class UnpackSync extends Unpack {
         let fd;
         try {
             fd = node_fs_1.default.openSync(String(entry.absolute), (0, get_write_flag_js_1.getWriteFlag)(entry.size), mode);
+            /* c8 ignore start - This is only a problem if the file was successfully
+             * statted, BUT failed to open. Testing this is annoying, and we
+             * already have ample testint for other uses of oner() methods.
+             */
         }
         catch (er) {
             return oner(er);
         }
+        /* c8 ignore stop */
         const tx = this.transform ? this.transform(entry) || entry : entry;
         if (tx !== entry) {
             tx.on('error', (er) => this[ONERROR](er, entry));
@@ -894,7 +847,6 @@ class UnpackSync extends Unpack {
                 umask: this.processUmask,
                 preserve: this.preservePaths,
                 unlink: this.unlink,
-                cache: this.dirCache,
                 cwd: this.cwd,
                 mode: mode,
             });
diff --git a/node_modules/node-gyp/node_modules/tar/dist/commonjs/update.js b/node_modules/tar/dist/commonjs/update.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/commonjs/update.js
rename to node_modules/tar/dist/commonjs/update.js
diff --git a/node_modules/node-gyp/node_modules/tar/dist/commonjs/warn-method.js b/node_modules/tar/dist/commonjs/warn-method.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/commonjs/warn-method.js
rename to node_modules/tar/dist/commonjs/warn-method.js
diff --git a/node_modules/node-gyp/node_modules/tar/dist/commonjs/winchars.js b/node_modules/tar/dist/commonjs/winchars.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/commonjs/winchars.js
rename to node_modules/tar/dist/commonjs/winchars.js
diff --git a/node_modules/node-gyp/node_modules/tar/dist/commonjs/write-entry.js b/node_modules/tar/dist/commonjs/write-entry.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/commonjs/write-entry.js
rename to node_modules/tar/dist/commonjs/write-entry.js
diff --git a/node_modules/node-gyp/node_modules/tar/dist/esm/create.js b/node_modules/tar/dist/esm/create.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/esm/create.js
rename to node_modules/tar/dist/esm/create.js
diff --git a/node_modules/node-gyp/node_modules/tar/dist/esm/cwd-error.js b/node_modules/tar/dist/esm/cwd-error.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/esm/cwd-error.js
rename to node_modules/tar/dist/esm/cwd-error.js
diff --git a/node_modules/node-gyp/node_modules/tar/dist/esm/extract.js b/node_modules/tar/dist/esm/extract.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/esm/extract.js
rename to node_modules/tar/dist/esm/extract.js
diff --git a/node_modules/node-gyp/node_modules/tar/dist/esm/get-write-flag.js b/node_modules/tar/dist/esm/get-write-flag.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/esm/get-write-flag.js
rename to node_modules/tar/dist/esm/get-write-flag.js
diff --git a/node_modules/node-gyp/node_modules/tar/dist/esm/header.js b/node_modules/tar/dist/esm/header.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/esm/header.js
rename to node_modules/tar/dist/esm/header.js
diff --git a/node_modules/node-gyp/node_modules/tar/dist/esm/index.js b/node_modules/tar/dist/esm/index.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/esm/index.js
rename to node_modules/tar/dist/esm/index.js
diff --git a/node_modules/node-gyp/node_modules/tar/dist/esm/large-numbers.js b/node_modules/tar/dist/esm/large-numbers.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/esm/large-numbers.js
rename to node_modules/tar/dist/esm/large-numbers.js
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/list.js b/node_modules/tar/dist/esm/list.js
similarity index 93%
rename from node_modules/pacote/node_modules/tar/dist/esm/list.js
rename to node_modules/tar/dist/esm/list.js
index f49068400b6c9..489ece51b9fa3 100644
--- a/node_modules/pacote/node_modules/tar/dist/esm/list.js
+++ b/node_modules/tar/dist/esm/list.js
@@ -47,15 +47,17 @@ const listFileSync = (opt) => {
     const file = opt.file;
     let fd;
     try {
-        const stat = fs.statSync(file);
+        fd = fs.openSync(file, 'r');
+        const stat = fs.fstatSync(fd);
         const readSize = opt.maxReadSize || 16 * 1024 * 1024;
         if (stat.size < readSize) {
-            p.end(fs.readFileSync(file));
+            const buf = Buffer.allocUnsafe(stat.size);
+            fs.readSync(fd, buf, 0, stat.size, 0);
+            p.end(buf);
         }
         else {
             let pos = 0;
             const buf = Buffer.allocUnsafe(readSize);
-            fd = fs.openSync(file, 'r');
             while (pos < stat.size) {
                 const bytesRead = fs.readSync(fd, buf, 0, readSize, pos);
                 pos += bytesRead;
diff --git a/node_modules/node-gyp/node_modules/tar/dist/esm/make-command.js b/node_modules/tar/dist/esm/make-command.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/esm/make-command.js
rename to node_modules/tar/dist/esm/make-command.js
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/mkdir.js b/node_modules/tar/dist/esm/mkdir.js
similarity index 77%
rename from node_modules/pacote/node_modules/tar/dist/esm/mkdir.js
rename to node_modules/tar/dist/esm/mkdir.js
index 13498ef0082f0..9dba701f2973f 100644
--- a/node_modules/pacote/node_modules/tar/dist/esm/mkdir.js
+++ b/node_modules/tar/dist/esm/mkdir.js
@@ -1,12 +1,10 @@
 import { chownr, chownrSync } from 'chownr';
-import fs from 'fs';
-import { mkdirp, mkdirpSync } from 'mkdirp';
+import fs from 'node:fs';
+import fsp from 'node:fs/promises';
 import path from 'node:path';
 import { CwdError } from './cwd-error.js';
 import { normalizeWindowsPath } from './normalize-windows-path.js';
 import { SymlinkError } from './symlink-error.js';
-const cGet = (cache, key) => cache.get(normalizeWindowsPath(key));
-const cSet = (cache, key, val) => cache.set(normalizeWindowsPath(key), val);
 const checkCwd = (dir, cb) => {
     fs.stat(dir, (er, st) => {
         if (er || !st.isDirectory()) {
@@ -16,7 +14,7 @@ const checkCwd = (dir, cb) => {
     });
 };
 /**
- * Wrapper around mkdirp for tar's needs.
+ * Wrapper around fs/promises.mkdir for tar's needs.
  *
  * The main purpose is to avoid creating directories if we know that
  * they already exist (and track which ones exist for this purpose),
@@ -38,14 +36,12 @@ export const mkdir = (dir, opt, cb) => {
         (uid !== opt.processUid || gid !== opt.processGid);
     const preserve = opt.preserve;
     const unlink = opt.unlink;
-    const cache = opt.cache;
     const cwd = normalizeWindowsPath(opt.cwd);
     const done = (er, created) => {
         if (er) {
             cb(er);
         }
         else {
-            cSet(cache, dir, true);
             if (created && doChown) {
                 chownr(created, uid, gid, er => done(er));
             }
@@ -57,32 +53,26 @@ export const mkdir = (dir, opt, cb) => {
             }
         }
     };
-    if (cache && cGet(cache, dir) === true) {
-        return done();
-    }
     if (dir === cwd) {
         return checkCwd(dir, done);
     }
     if (preserve) {
-        return mkdirp(dir, { mode }).then(made => done(null, made ?? undefined), // oh, ts
+        return fsp.mkdir(dir, { mode, recursive: true }).then(made => done(null, made ?? undefined), // oh, ts
         done);
     }
     const sub = normalizeWindowsPath(path.relative(cwd, dir));
     const parts = sub.split('/');
-    mkdir_(cwd, parts, mode, cache, unlink, cwd, undefined, done);
+    mkdir_(cwd, parts, mode, unlink, cwd, undefined, done);
 };
-const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => {
+const mkdir_ = (base, parts, mode, unlink, cwd, created, cb) => {
     if (!parts.length) {
         return cb(null, created);
     }
     const p = parts.shift();
     const part = normalizeWindowsPath(path.resolve(base + '/' + p));
-    if (cGet(cache, part)) {
-        return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
-    }
-    fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb));
+    fs.mkdir(part, mode, onmkdir(part, parts, mode, unlink, cwd, created, cb));
 };
-const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => (er) => {
+const onmkdir = (part, parts, mode, unlink, cwd, created, cb) => (er) => {
     if (er) {
         fs.lstat(part, (statEr, st) => {
             if (statEr) {
@@ -91,14 +81,14 @@ const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => (er) =>
                 cb(statEr);
             }
             else if (st.isDirectory()) {
-                mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
+                mkdir_(part, parts, mode, unlink, cwd, created, cb);
             }
             else if (unlink) {
                 fs.unlink(part, er => {
                     if (er) {
                         return cb(er);
                     }
-                    fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb));
+                    fs.mkdir(part, mode, onmkdir(part, parts, mode, unlink, cwd, created, cb));
                 });
             }
             else if (st.isSymbolicLink()) {
@@ -111,7 +101,7 @@ const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => (er) =>
     }
     else {
         created = created || part;
-        mkdir_(part, parts, mode, cache, unlink, cwd, created, cb);
+        mkdir_(part, parts, mode, unlink, cwd, created, cb);
     }
 };
 const checkCwdSync = (dir) => {
@@ -144,10 +134,8 @@ export const mkdirSync = (dir, opt) => {
         (uid !== opt.processUid || gid !== opt.processGid);
     const preserve = opt.preserve;
     const unlink = opt.unlink;
-    const cache = opt.cache;
     const cwd = normalizeWindowsPath(opt.cwd);
     const done = (created) => {
-        cSet(cache, dir, true);
         if (created && doChown) {
             chownrSync(created, uid, gid);
         }
@@ -155,40 +143,31 @@ export const mkdirSync = (dir, opt) => {
             fs.chmodSync(dir, mode);
         }
     };
-    if (cache && cGet(cache, dir) === true) {
-        return done();
-    }
     if (dir === cwd) {
         checkCwdSync(cwd);
         return done();
     }
     if (preserve) {
-        return done(mkdirpSync(dir, mode) ?? undefined);
+        return done(fs.mkdirSync(dir, { mode, recursive: true }) ?? undefined);
     }
     const sub = normalizeWindowsPath(path.relative(cwd, dir));
     const parts = sub.split('/');
     let created = undefined;
     for (let p = parts.shift(), part = cwd; p && (part += '/' + p); p = parts.shift()) {
         part = normalizeWindowsPath(path.resolve(part));
-        if (cGet(cache, part)) {
-            continue;
-        }
         try {
             fs.mkdirSync(part, mode);
             created = created || part;
-            cSet(cache, part, true);
         }
         catch (er) {
             const st = fs.lstatSync(part);
             if (st.isDirectory()) {
-                cSet(cache, part, true);
                 continue;
             }
             else if (unlink) {
                 fs.unlinkSync(part);
                 fs.mkdirSync(part, mode);
                 created = created || part;
-                cSet(cache, part, true);
                 continue;
             }
             else if (st.isSymbolicLink()) {
diff --git a/node_modules/node-gyp/node_modules/tar/dist/esm/mode-fix.js b/node_modules/tar/dist/esm/mode-fix.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/esm/mode-fix.js
rename to node_modules/tar/dist/esm/mode-fix.js
diff --git a/node_modules/tar/dist/esm/normalize-unicode.js b/node_modules/tar/dist/esm/normalize-unicode.js
new file mode 100644
index 0000000000000..e9b8f14b01347
--- /dev/null
+++ b/node_modules/tar/dist/esm/normalize-unicode.js
@@ -0,0 +1,30 @@
+// warning: extremely hot code path.
+// This has been meticulously optimized for use
+// within npm install on large package trees.
+// Do not edit without careful benchmarking.
+const normalizeCache = Object.create(null);
+// Limit the size of this. Very low-sophistication LRU cache
+const MAX = 10000;
+const cache = new Set();
+export const normalizeUnicode = (s) => {
+    if (!cache.has(s)) {
+        normalizeCache[s] = s.normalize('NFD');
+    }
+    else {
+        cache.delete(s);
+    }
+    cache.add(s);
+    const ret = normalizeCache[s];
+    let i = cache.size - MAX;
+    // only prune when we're 10% over the max
+    if (i > MAX / 10) {
+        for (const s of cache) {
+            cache.delete(s);
+            delete normalizeCache[s];
+            if (--i <= 0)
+                break;
+        }
+    }
+    return ret;
+};
+//# sourceMappingURL=normalize-unicode.js.map
\ No newline at end of file
diff --git a/node_modules/node-gyp/node_modules/tar/dist/esm/normalize-windows-path.js b/node_modules/tar/dist/esm/normalize-windows-path.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/esm/normalize-windows-path.js
rename to node_modules/tar/dist/esm/normalize-windows-path.js
diff --git a/node_modules/node-gyp/node_modules/tar/dist/esm/options.js b/node_modules/tar/dist/esm/options.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/esm/options.js
rename to node_modules/tar/dist/esm/options.js
diff --git a/node_modules/node-gyp/node_modules/tar/dist/esm/pack.js b/node_modules/tar/dist/esm/pack.js
similarity index 92%
rename from node_modules/node-gyp/node_modules/tar/dist/esm/pack.js
rename to node_modules/tar/dist/esm/pack.js
index f59f32f94201f..14661783455d5 100644
--- a/node_modules/node-gyp/node_modules/tar/dist/esm/pack.js
+++ b/node_modules/tar/dist/esm/pack.js
@@ -72,6 +72,14 @@ export class Pack extends Minipass {
     jobs;
     [WRITEENTRYCLASS];
     onWriteEntry;
+    // Note: we actually DO need a linked list here, because we
+    // shift() to update the head of the list where we start, but still
+    // while that happens, need to know what the next item in the queue
+    // will be. Since we do multiple jobs in parallel, it's not as simple
+    // as just an Array.shift(), since that would lose the information about
+    // the next job in the list. We could add a .next field on the PackJob
+    // class, but then we'd have to be tracking the tail of the queue the
+    // whole time, and Yallist just does that for us anyway.
     [QUEUE];
     [JOBS] = 0;
     [PROCESSING] = false;
@@ -96,9 +104,9 @@ export class Pack extends Minipass {
             this.on('warn', opt.onwarn);
         }
         this.portable = !!opt.portable;
-        if (opt.gzip || opt.brotli) {
-            if (opt.gzip && opt.brotli) {
-                throw new TypeError('gzip and brotli are mutually exclusive');
+        if (opt.gzip || opt.brotli || opt.zstd) {
+            if ((opt.gzip ? 1 : 0) + (opt.brotli ? 1 : 0) + (opt.zstd ? 1 : 0) > 1) {
+                throw new TypeError('gzip, brotli, zstd are mutually exclusive');
             }
             if (opt.gzip) {
                 if (typeof opt.gzip !== 'object') {
@@ -115,6 +123,12 @@ export class Pack extends Minipass {
                 }
                 this.zip = new zlib.BrotliCompress(opt.brotli);
             }
+            if (opt.zstd) {
+                if (typeof opt.zstd !== 'object') {
+                    opt.zstd = {};
+                }
+                this.zip = new zlib.ZstdCompress(opt.zstd);
+            }
             /* c8 ignore next */
             if (!this.zip)
                 throw new Error('impossible');
diff --git a/node_modules/node-gyp/node_modules/chownr/dist/esm/package.json b/node_modules/tar/dist/esm/package.json
similarity index 100%
rename from node_modules/node-gyp/node_modules/chownr/dist/esm/package.json
rename to node_modules/tar/dist/esm/package.json
diff --git a/node_modules/node-gyp/node_modules/tar/dist/esm/parse.js b/node_modules/tar/dist/esm/parse.js
similarity index 92%
rename from node_modules/node-gyp/node_modules/tar/dist/esm/parse.js
rename to node_modules/tar/dist/esm/parse.js
index cce430479cd0c..5b6bfe4bc4f15 100644
--- a/node_modules/node-gyp/node_modules/tar/dist/esm/parse.js
+++ b/node_modules/tar/dist/esm/parse.js
@@ -2,7 +2,7 @@
 // the full 512 bytes of a header to come in.  We will Buffer.concat()
 // it to the next write(), which is a mem copy, but a small one.
 //
-// this[QUEUE] is a Yallist of entries that haven't been emitted
+// this[QUEUE] is a list of entries that haven't been emitted
 // yet this can only get filled up if the user keeps write()ing after
 // a write() returns false, or does a write() with more than one entry
 //
@@ -18,14 +18,15 @@
 //
 // ignored entries get .resume() called on them straight away
 import { EventEmitter as EE } from 'events';
-import { BrotliDecompress, Unzip } from 'minizlib';
-import { Yallist } from 'yallist';
+import { BrotliDecompress, Unzip, ZstdDecompress } from 'minizlib';
 import { Header } from './header.js';
 import { Pax } from './pax.js';
 import { ReadEntry } from './read-entry.js';
 import { warnMethod, } from './warn-method.js';
 const maxMetaEntrySize = 1024 * 1024;
 const gzipHeader = Buffer.from([0x1f, 0x8b]);
+const zstdHeader = Buffer.from([0x28, 0xb5, 0x2f, 0xfd]);
+const ZIP_HEADER_LEN = Math.max(gzipHeader.length, zstdHeader.length);
 const STATE = Symbol('state');
 const WRITEENTRY = Symbol('writeEntry');
 const READENTRY = Symbol('readEntry');
@@ -63,9 +64,10 @@ export class Parser extends EE {
     maxMetaEntrySize;
     filter;
     brotli;
+    zstd;
     writable = true;
     readable = false;
-    [QUEUE] = new Yallist();
+    [QUEUE] = [];
     [BUFFER];
     [READENTRY];
     [WRITEENTRY];
@@ -115,9 +117,17 @@ export class Parser extends EE {
         // if it's a tbr file it MIGHT be brotli, but we don't know until
         // we look at it and verify it's not a valid tar file.
         this.brotli =
-            !opt.gzip && opt.brotli !== undefined ? opt.brotli
+            !(opt.gzip || opt.zstd) && opt.brotli !== undefined ? opt.brotli
                 : isTBR ? undefined
                     : false;
+        // zstd has magic bytes to identify it, but we also support explicit options
+        // and file extension detection
+        const isTZST = opt.file &&
+            (opt.file.endsWith('.tar.zst') || opt.file.endsWith('.tzst'));
+        this.zstd =
+            !(opt.gzip || opt.brotli) && opt.zstd !== undefined ? opt.zstd
+                : isTZST ? true
+                    : undefined;
         // have to set this so that streams are ok piping into it
         this.on('end', () => this[CLOSESTREAM]());
         if (typeof opt.onwarn === 'function') {
@@ -371,7 +381,7 @@ export class Parser extends EE {
             cb?.();
             return false;
         }
-        // first write, might be gzipped
+        // first write, might be gzipped, zstd, or brotli compressed
         const needSniff = this[UNZIP] === undefined ||
             (this.brotli === undefined && this[UNZIP] === false);
         if (needSniff && chunk) {
@@ -379,7 +389,7 @@ export class Parser extends EE {
                 chunk = Buffer.concat([this[BUFFER], chunk]);
                 this[BUFFER] = undefined;
             }
-            if (chunk.length < gzipHeader.length) {
+            if (chunk.length < ZIP_HEADER_LEN) {
                 this[BUFFER] = chunk;
                 /* c8 ignore next */
                 cb?.();
@@ -391,7 +401,18 @@ export class Parser extends EE {
                     this[UNZIP] = false;
                 }
             }
-            const maybeBrotli = this.brotli === undefined;
+            // look for zstd header if gzip header not found
+            let isZstd = false;
+            if (this[UNZIP] === false && this.zstd !== false) {
+                isZstd = true;
+                for (let i = 0; i < zstdHeader.length; i++) {
+                    if (chunk[i] !== zstdHeader[i]) {
+                        isZstd = false;
+                        break;
+                    }
+                }
+            }
+            const maybeBrotli = this.brotli === undefined && !isZstd;
             if (this[UNZIP] === false && maybeBrotli) {
                 // read the first header to see if it's a valid tar file. If so,
                 // we can safely assume that it's not actually brotli, despite the
@@ -421,13 +442,15 @@ export class Parser extends EE {
                 }
             }
             if (this[UNZIP] === undefined ||
-                (this[UNZIP] === false && this.brotli)) {
+                (this[UNZIP] === false && (this.brotli || isZstd))) {
                 const ended = this[ENDED];
                 this[ENDED] = false;
                 this[UNZIP] =
                     this[UNZIP] === undefined ?
                         new Unzip({})
-                        : new BrotliDecompress({});
+                        : isZstd ?
+                            new ZstdDecompress({})
+                            : new BrotliDecompress({});
                 this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk));
                 this[UNZIP].on('error', er => this.abort(er));
                 this[UNZIP].on('end', () => {
@@ -582,7 +605,7 @@ export class Parser extends EE {
             }
             else {
                 this[ENDED] = true;
-                if (this.brotli === undefined)
+                if (this.brotli === undefined || this.zstd === undefined)
                     chunk = chunk || Buffer.alloc(0);
                 if (chunk)
                     this.write(chunk);
diff --git a/node_modules/node-gyp/node_modules/tar/dist/esm/path-reservations.js b/node_modules/tar/dist/esm/path-reservations.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/esm/path-reservations.js
rename to node_modules/tar/dist/esm/path-reservations.js
diff --git a/node_modules/node-gyp/node_modules/tar/dist/esm/pax.js b/node_modules/tar/dist/esm/pax.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/esm/pax.js
rename to node_modules/tar/dist/esm/pax.js
diff --git a/node_modules/node-gyp/node_modules/tar/dist/esm/read-entry.js b/node_modules/tar/dist/esm/read-entry.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/esm/read-entry.js
rename to node_modules/tar/dist/esm/read-entry.js
diff --git a/node_modules/node-gyp/node_modules/tar/dist/esm/replace.js b/node_modules/tar/dist/esm/replace.js
similarity index 99%
rename from node_modules/node-gyp/node_modules/tar/dist/esm/replace.js
rename to node_modules/tar/dist/esm/replace.js
index bab622bfdf1f1..214aa92446cc6 100644
--- a/node_modules/node-gyp/node_modules/tar/dist/esm/replace.js
+++ b/node_modules/tar/dist/esm/replace.js
@@ -214,6 +214,7 @@ export const replace = makeCommand(replaceSync, replaceAsync,
     }
     if (opt.gzip ||
         opt.brotli ||
+        opt.zstd ||
         opt.file.endsWith('.br') ||
         opt.file.endsWith('.tbr')) {
         throw new TypeError('cannot append to compressed archives');
diff --git a/node_modules/node-gyp/node_modules/tar/dist/esm/strip-absolute-path.js b/node_modules/tar/dist/esm/strip-absolute-path.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/esm/strip-absolute-path.js
rename to node_modules/tar/dist/esm/strip-absolute-path.js
diff --git a/node_modules/node-gyp/node_modules/tar/dist/esm/strip-trailing-slashes.js b/node_modules/tar/dist/esm/strip-trailing-slashes.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/esm/strip-trailing-slashes.js
rename to node_modules/tar/dist/esm/strip-trailing-slashes.js
diff --git a/node_modules/node-gyp/node_modules/tar/dist/esm/symlink-error.js b/node_modules/tar/dist/esm/symlink-error.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/esm/symlink-error.js
rename to node_modules/tar/dist/esm/symlink-error.js
diff --git a/node_modules/node-gyp/node_modules/tar/dist/esm/types.js b/node_modules/tar/dist/esm/types.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/esm/types.js
rename to node_modules/tar/dist/esm/types.js
diff --git a/node_modules/pacote/node_modules/tar/dist/esm/unpack.js b/node_modules/tar/dist/esm/unpack.js
similarity index 92%
rename from node_modules/pacote/node_modules/tar/dist/esm/unpack.js
rename to node_modules/tar/dist/esm/unpack.js
index 6e744cfc1a6f9..4e8fc5c117a05 100644
--- a/node_modules/pacote/node_modules/tar/dist/esm/unpack.js
+++ b/node_modules/tar/dist/esm/unpack.js
@@ -10,17 +10,14 @@ import fs from 'node:fs';
 import path from 'node:path';
 import { getWriteFlag } from './get-write-flag.js';
 import { mkdir, mkdirSync } from './mkdir.js';
-import { normalizeUnicode } from './normalize-unicode.js';
 import { normalizeWindowsPath } from './normalize-windows-path.js';
 import { Parser } from './parse.js';
 import { stripAbsolutePath } from './strip-absolute-path.js';
-import { stripTrailingSlashes } from './strip-trailing-slashes.js';
 import * as wc from './winchars.js';
 import { PathReservations } from './path-reservations.js';
 const ONENTRY = Symbol('onEntry');
 const CHECKFS = Symbol('checkFs');
 const CHECKFS2 = Symbol('checkFs2');
-const PRUNECACHE = Symbol('pruneCache');
 const ISREUSABLE = Symbol('isReusable');
 const MAKEFS = Symbol('makeFs');
 const FILE = Symbol('file');
@@ -88,31 +85,6 @@ const unlinkFileSync = (path) => {
 const uint32 = (a, b, c) => a !== undefined && a === a >>> 0 ? a
     : b !== undefined && b === b >>> 0 ? b
         : c;
-// clear the cache if it's a case-insensitive unicode-squashing match.
-// we can't know if the current file system is case-sensitive or supports
-// unicode fully, so we check for similarity on the maximally compatible
-// representation.  Err on the side of pruning, since all it's doing is
-// preventing lstats, and it's not the end of the world if we get a false
-// positive.
-// Note that on windows, we always drop the entire cache whenever a
-// symbolic link is encountered, because 8.3 filenames are impossible
-// to reason about, and collisions are hazards rather than just failures.
-const cacheKeyNormalize = (path) => stripTrailingSlashes(normalizeWindowsPath(normalizeUnicode(path))).toLowerCase();
-// remove all cache entries matching ${abs}/**
-const pruneCache = (cache, abs) => {
-    abs = cacheKeyNormalize(abs);
-    for (const path of cache.keys()) {
-        const pnorm = cacheKeyNormalize(path);
-        if (pnorm === abs || pnorm.indexOf(abs + '/') === 0) {
-            cache.delete(path);
-        }
-    }
-};
-const dropCache = (cache) => {
-    for (const key of cache.keys()) {
-        cache.delete(key);
-    }
-};
 export class Unpack extends Parser {
     [ENDED] = false;
     [CHECKED_CWD] = false;
@@ -121,7 +93,6 @@ export class Unpack extends Parser {
     transform;
     writable = true;
     readable = false;
-    dirCache;
     uid;
     gid;
     setOwner;
@@ -150,7 +121,6 @@ export class Unpack extends Parser {
         };
         super(opt);
         this.transform = opt.transform;
-        this.dirCache = opt.dirCache || new Map();
         this.chmod = !!opt.chmod;
         if (typeof opt.uid === 'number' || typeof opt.gid === 'number') {
             // need both or neither
@@ -375,7 +345,6 @@ export class Unpack extends Parser {
             umask: this.processUmask,
             preserve: this.preservePaths,
             unlink: this.unlink,
-            cache: this.dirCache,
             cwd: this.cwd,
             mode: mode,
         }, cb);
@@ -553,28 +522,8 @@ export class Unpack extends Parser {
         }
         this.reservations.reserve(paths, done => this[CHECKFS2](entry, done));
     }
-    [PRUNECACHE](entry) {
-        // if we are not creating a directory, and the path is in the dirCache,
-        // then that means we are about to delete the directory we created
-        // previously, and it is no longer going to be a directory, and neither
-        // is any of its children.
-        // If a symbolic link is encountered, all bets are off.  There is no
-        // reasonable way to sanitize the cache in such a way we will be able to
-        // avoid having filesystem collisions.  If this happens with a non-symlink
-        // entry, it'll just fail to unpack, but a symlink to a directory, using an
-        // 8.3 shortname or certain unicode attacks, can evade detection and lead
-        // to arbitrary writes to anywhere on the system.
-        if (entry.type === 'SymbolicLink') {
-            dropCache(this.dirCache);
-        }
-        else if (entry.type !== 'Directory') {
-            pruneCache(this.dirCache, String(entry.absolute));
-        }
-    }
     [CHECKFS2](entry, fullyDone) {
-        this[PRUNECACHE](entry);
         const done = (er) => {
-            this[PRUNECACHE](entry);
             fullyDone(er);
         };
         const checkCwd = () => {
@@ -702,7 +651,6 @@ export class UnpackSync extends Unpack {
         return super[MAKEFS](er, entry, () => { });
     }
     [CHECKFS](entry) {
-        this[PRUNECACHE](entry);
         if (!this[CHECKED_CWD]) {
             const er = this[MKDIR](this.cwd, this.dmode);
             if (er) {
@@ -774,10 +722,15 @@ export class UnpackSync extends Unpack {
         let fd;
         try {
             fd = fs.openSync(String(entry.absolute), getWriteFlag(entry.size), mode);
+            /* c8 ignore start - This is only a problem if the file was successfully
+             * statted, BUT failed to open. Testing this is annoying, and we
+             * already have ample testint for other uses of oner() methods.
+             */
         }
         catch (er) {
             return oner(er);
         }
+        /* c8 ignore stop */
         const tx = this.transform ? this.transform(entry) || entry : entry;
         if (tx !== entry) {
             tx.on('error', (er) => this[ONERROR](er, entry));
@@ -864,7 +817,6 @@ export class UnpackSync extends Unpack {
                 umask: this.processUmask,
                 preserve: this.preservePaths,
                 unlink: this.unlink,
-                cache: this.dirCache,
                 cwd: this.cwd,
                 mode: mode,
             });
diff --git a/node_modules/node-gyp/node_modules/tar/dist/esm/update.js b/node_modules/tar/dist/esm/update.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/esm/update.js
rename to node_modules/tar/dist/esm/update.js
diff --git a/node_modules/node-gyp/node_modules/tar/dist/esm/warn-method.js b/node_modules/tar/dist/esm/warn-method.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/esm/warn-method.js
rename to node_modules/tar/dist/esm/warn-method.js
diff --git a/node_modules/node-gyp/node_modules/tar/dist/esm/winchars.js b/node_modules/tar/dist/esm/winchars.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/esm/winchars.js
rename to node_modules/tar/dist/esm/winchars.js
diff --git a/node_modules/node-gyp/node_modules/tar/dist/esm/write-entry.js b/node_modules/tar/dist/esm/write-entry.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/esm/write-entry.js
rename to node_modules/tar/dist/esm/write-entry.js
diff --git a/node_modules/tar/index.js b/node_modules/tar/index.js
deleted file mode 100644
index c9ae06e7906c4..0000000000000
--- a/node_modules/tar/index.js
+++ /dev/null
@@ -1,18 +0,0 @@
-'use strict'
-
-// high-level commands
-exports.c = exports.create = require('./lib/create.js')
-exports.r = exports.replace = require('./lib/replace.js')
-exports.t = exports.list = require('./lib/list.js')
-exports.u = exports.update = require('./lib/update.js')
-exports.x = exports.extract = require('./lib/extract.js')
-
-// classes
-exports.Pack = require('./lib/pack.js')
-exports.Unpack = require('./lib/unpack.js')
-exports.Parse = require('./lib/parse.js')
-exports.ReadEntry = require('./lib/read-entry.js')
-exports.WriteEntry = require('./lib/write-entry.js')
-exports.Header = require('./lib/header.js')
-exports.Pax = require('./lib/pax.js')
-exports.types = require('./lib/types.js')
diff --git a/node_modules/tar/lib/create.js b/node_modules/tar/lib/create.js
deleted file mode 100644
index 9c860d4e4a764..0000000000000
--- a/node_modules/tar/lib/create.js
+++ /dev/null
@@ -1,111 +0,0 @@
-'use strict'
-
-// tar -c
-const hlo = require('./high-level-opt.js')
-
-const Pack = require('./pack.js')
-const fsm = require('fs-minipass')
-const t = require('./list.js')
-const path = require('path')
-
-module.exports = (opt_, files, cb) => {
-  if (typeof files === 'function') {
-    cb = files
-  }
-
-  if (Array.isArray(opt_)) {
-    files = opt_, opt_ = {}
-  }
-
-  if (!files || !Array.isArray(files) || !files.length) {
-    throw new TypeError('no files or directories specified')
-  }
-
-  files = Array.from(files)
-
-  const opt = hlo(opt_)
-
-  if (opt.sync && typeof cb === 'function') {
-    throw new TypeError('callback not supported for sync tar functions')
-  }
-
-  if (!opt.file && typeof cb === 'function') {
-    throw new TypeError('callback only supported with file option')
-  }
-
-  return opt.file && opt.sync ? createFileSync(opt, files)
-    : opt.file ? createFile(opt, files, cb)
-    : opt.sync ? createSync(opt, files)
-    : create(opt, files)
-}
-
-const createFileSync = (opt, files) => {
-  const p = new Pack.Sync(opt)
-  const stream = new fsm.WriteStreamSync(opt.file, {
-    mode: opt.mode || 0o666,
-  })
-  p.pipe(stream)
-  addFilesSync(p, files)
-}
-
-const createFile = (opt, files, cb) => {
-  const p = new Pack(opt)
-  const stream = new fsm.WriteStream(opt.file, {
-    mode: opt.mode || 0o666,
-  })
-  p.pipe(stream)
-
-  const promise = new Promise((res, rej) => {
-    stream.on('error', rej)
-    stream.on('close', res)
-    p.on('error', rej)
-  })
-
-  addFilesAsync(p, files)
-
-  return cb ? promise.then(cb, cb) : promise
-}
-
-const addFilesSync = (p, files) => {
-  files.forEach(file => {
-    if (file.charAt(0) === '@') {
-      t({
-        file: path.resolve(p.cwd, file.slice(1)),
-        sync: true,
-        noResume: true,
-        onentry: entry => p.add(entry),
-      })
-    } else {
-      p.add(file)
-    }
-  })
-  p.end()
-}
-
-const addFilesAsync = (p, files) => {
-  while (files.length) {
-    const file = files.shift()
-    if (file.charAt(0) === '@') {
-      return t({
-        file: path.resolve(p.cwd, file.slice(1)),
-        noResume: true,
-        onentry: entry => p.add(entry),
-      }).then(_ => addFilesAsync(p, files))
-    } else {
-      p.add(file)
-    }
-  }
-  p.end()
-}
-
-const createSync = (opt, files) => {
-  const p = new Pack.Sync(opt)
-  addFilesSync(p, files)
-  return p
-}
-
-const create = (opt, files) => {
-  const p = new Pack(opt)
-  addFilesAsync(p, files)
-  return p
-}
diff --git a/node_modules/tar/lib/extract.js b/node_modules/tar/lib/extract.js
deleted file mode 100644
index 54767982583f2..0000000000000
--- a/node_modules/tar/lib/extract.js
+++ /dev/null
@@ -1,113 +0,0 @@
-'use strict'
-
-// tar -x
-const hlo = require('./high-level-opt.js')
-const Unpack = require('./unpack.js')
-const fs = require('fs')
-const fsm = require('fs-minipass')
-const path = require('path')
-const stripSlash = require('./strip-trailing-slashes.js')
-
-module.exports = (opt_, files, cb) => {
-  if (typeof opt_ === 'function') {
-    cb = opt_, files = null, opt_ = {}
-  } else if (Array.isArray(opt_)) {
-    files = opt_, opt_ = {}
-  }
-
-  if (typeof files === 'function') {
-    cb = files, files = null
-  }
-
-  if (!files) {
-    files = []
-  } else {
-    files = Array.from(files)
-  }
-
-  const opt = hlo(opt_)
-
-  if (opt.sync && typeof cb === 'function') {
-    throw new TypeError('callback not supported for sync tar functions')
-  }
-
-  if (!opt.file && typeof cb === 'function') {
-    throw new TypeError('callback only supported with file option')
-  }
-
-  if (files.length) {
-    filesFilter(opt, files)
-  }
-
-  return opt.file && opt.sync ? extractFileSync(opt)
-    : opt.file ? extractFile(opt, cb)
-    : opt.sync ? extractSync(opt)
-    : extract(opt)
-}
-
-// construct a filter that limits the file entries listed
-// include child entries if a dir is included
-const filesFilter = (opt, files) => {
-  const map = new Map(files.map(f => [stripSlash(f), true]))
-  const filter = opt.filter
-
-  const mapHas = (file, r) => {
-    const root = r || path.parse(file).root || '.'
-    const ret = file === root ? false
-      : map.has(file) ? map.get(file)
-      : mapHas(path.dirname(file), root)
-
-    map.set(file, ret)
-    return ret
-  }
-
-  opt.filter = filter
-    ? (file, entry) => filter(file, entry) && mapHas(stripSlash(file))
-    : file => mapHas(stripSlash(file))
-}
-
-const extractFileSync = opt => {
-  const u = new Unpack.Sync(opt)
-
-  const file = opt.file
-  const stat = fs.statSync(file)
-  // This trades a zero-byte read() syscall for a stat
-  // However, it will usually result in less memory allocation
-  const readSize = opt.maxReadSize || 16 * 1024 * 1024
-  const stream = new fsm.ReadStreamSync(file, {
-    readSize: readSize,
-    size: stat.size,
-  })
-  stream.pipe(u)
-}
-
-const extractFile = (opt, cb) => {
-  const u = new Unpack(opt)
-  const readSize = opt.maxReadSize || 16 * 1024 * 1024
-
-  const file = opt.file
-  const p = new Promise((resolve, reject) => {
-    u.on('error', reject)
-    u.on('close', resolve)
-
-    // This trades a zero-byte read() syscall for a stat
-    // However, it will usually result in less memory allocation
-    fs.stat(file, (er, stat) => {
-      if (er) {
-        reject(er)
-      } else {
-        const stream = new fsm.ReadStream(file, {
-          readSize: readSize,
-          size: stat.size,
-        })
-        stream.on('error', reject)
-        stream.pipe(u)
-      }
-    })
-  })
-  return cb ? p.then(cb, cb) : p
-}
-
-const extractSync = opt => new Unpack.Sync(opt)
-
-const extract = opt => new Unpack(opt)
diff --git a/node_modules/tar/lib/get-write-flag.js b/node_modules/tar/lib/get-write-flag.js
deleted file mode 100644
index e86959996623c..0000000000000
--- a/node_modules/tar/lib/get-write-flag.js
+++ /dev/null
@@ -1,20 +0,0 @@
-// Get the appropriate flag to use for creating files
-// We use fmap on Windows platforms for files less than
-// 512kb.  This is a fairly low limit, but avoids making
-// things slower in some cases.  Since most of what this
-// library is used for is extracting tarballs of many
-// relatively small files in npm packages and the like,
-// it can be a big boost on Windows platforms.
-// Only supported in Node v12.9.0 and above.
-const platform = process.env.__FAKE_PLATFORM__ || process.platform
-const isWindows = platform === 'win32'
-const fs = global.__FAKE_TESTING_FS__ || require('fs')
-
-/* istanbul ignore next */
-const { O_CREAT, O_TRUNC, O_WRONLY, UV_FS_O_FILEMAP = 0 } = fs.constants
-
-const fMapEnabled = isWindows && !!UV_FS_O_FILEMAP
-const fMapLimit = 512 * 1024
-const fMapFlag = UV_FS_O_FILEMAP | O_TRUNC | O_CREAT | O_WRONLY
-module.exports = !fMapEnabled ? () => 'w'
-  : size => size < fMapLimit ? fMapFlag : 'w'
diff --git a/node_modules/tar/lib/header.js b/node_modules/tar/lib/header.js
deleted file mode 100644
index 411d5e45e879a..0000000000000
--- a/node_modules/tar/lib/header.js
+++ /dev/null
@@ -1,304 +0,0 @@
-'use strict'
-// parse a 512-byte header block to a data object, or vice-versa
-// encode returns `true` if a pax extended header is needed, because
-// the data could not be faithfully encoded in a simple header.
-// (Also, check header.needPax to see if it needs a pax header.)
-
-const types = require('./types.js')
-const pathModule = require('path').posix
-const large = require('./large-numbers.js')
-
-const SLURP = Symbol('slurp')
-const TYPE = Symbol('type')
-
-class Header {
-  constructor (data, off, ex, gex) {
-    this.cksumValid = false
-    this.needPax = false
-    this.nullBlock = false
-
-    this.block = null
-    this.path = null
-    this.mode = null
-    this.uid = null
-    this.gid = null
-    this.size = null
-    this.mtime = null
-    this.cksum = null
-    this[TYPE] = '0'
-    this.linkpath = null
-    this.uname = null
-    this.gname = null
-    this.devmaj = 0
-    this.devmin = 0
-    this.atime = null
-    this.ctime = null
-
-    if (Buffer.isBuffer(data)) {
-      this.decode(data, off || 0, ex, gex)
-    } else if (data) {
-      this.set(data)
-    }
-  }
-
-  decode (buf, off, ex, gex) {
-    if (!off) {
-      off = 0
-    }
-
-    if (!buf || !(buf.length >= off + 512)) {
-      throw new Error('need 512 bytes for header')
-    }
-
-    this.path = decString(buf, off, 100)
-    this.mode = decNumber(buf, off + 100, 8)
-    this.uid = decNumber(buf, off + 108, 8)
-    this.gid = decNumber(buf, off + 116, 8)
-    this.size = decNumber(buf, off + 124, 12)
-    this.mtime = decDate(buf, off + 136, 12)
-    this.cksum = decNumber(buf, off + 148, 12)
-
-    // if we have extended or global extended headers, apply them now
-    // See https://github.com/npm/node-tar/pull/187
-    this[SLURP](ex)
-    this[SLURP](gex, true)
-
-    // old tar versions marked dirs as a file with a trailing /
-    this[TYPE] = decString(buf, off + 156, 1)
-    if (this[TYPE] === '') {
-      this[TYPE] = '0'
-    }
-    if (this[TYPE] === '0' && this.path.slice(-1) === '/') {
-      this[TYPE] = '5'
-    }
-
-    // tar implementations sometimes incorrectly put the stat(dir).size
-    // as the size in the tarball, even though Directory entries are
-    // not able to have any body at all.  In the very rare chance that
-    // it actually DOES have a body, we weren't going to do anything with
-    // it anyway, and it'll just be a warning about an invalid header.
-    if (this[TYPE] === '5') {
-      this.size = 0
-    }
-
-    this.linkpath = decString(buf, off + 157, 100)
-    if (buf.slice(off + 257, off + 265).toString() === 'ustar\u000000') {
-      this.uname = decString(buf, off + 265, 32)
-      this.gname = decString(buf, off + 297, 32)
-      this.devmaj = decNumber(buf, off + 329, 8)
-      this.devmin = decNumber(buf, off + 337, 8)
-      if (buf[off + 475] !== 0) {
-        // definitely a prefix, definitely >130 chars.
-        const prefix = decString(buf, off + 345, 155)
-        this.path = prefix + '/' + this.path
-      } else {
-        const prefix = decString(buf, off + 345, 130)
-        if (prefix) {
-          this.path = prefix + '/' + this.path
-        }
-        this.atime = decDate(buf, off + 476, 12)
-        this.ctime = decDate(buf, off + 488, 12)
-      }
-    }
-
-    let sum = 8 * 0x20
-    for (let i = off; i < off + 148; i++) {
-      sum += buf[i]
-    }
-
-    for (let i = off + 156; i < off + 512; i++) {
-      sum += buf[i]
-    }
-
-    this.cksumValid = sum === this.cksum
-    if (this.cksum === null && sum === 8 * 0x20) {
-      this.nullBlock = true
-    }
-  }
-
-  [SLURP] (ex, global) {
-    for (const k in ex) {
-      // we slurp in everything except for the path attribute in
-      // a global extended header, because that's weird.
-      if (ex[k] !== null && ex[k] !== undefined &&
-          !(global && k === 'path')) {
-        this[k] = ex[k]
-      }
-    }
-  }
-
-  encode (buf, off) {
-    if (!buf) {
-      buf = this.block = Buffer.alloc(512)
-      off = 0
-    }
-
-    if (!off) {
-      off = 0
-    }
-
-    if (!(buf.length >= off + 512)) {
-      throw new Error('need 512 bytes for header')
-    }
-
-    const prefixSize = this.ctime || this.atime ? 130 : 155
-    const split = splitPrefix(this.path || '', prefixSize)
-    const path = split[0]
-    const prefix = split[1]
-    this.needPax = split[2]
-
-    this.needPax = encString(buf, off, 100, path) || this.needPax
-    this.needPax = encNumber(buf, off + 100, 8, this.mode) || this.needPax
-    this.needPax = encNumber(buf, off + 108, 8, this.uid) || this.needPax
-    this.needPax = encNumber(buf, off + 116, 8, this.gid) || this.needPax
-    this.needPax = encNumber(buf, off + 124, 12, this.size) || this.needPax
-    this.needPax = encDate(buf, off + 136, 12, this.mtime) || this.needPax
-    buf[off + 156] = this[TYPE].charCodeAt(0)
-    this.needPax = encString(buf, off + 157, 100, this.linkpath) || this.needPax
-    buf.write('ustar\u000000', off + 257, 8)
-    this.needPax = encString(buf, off + 265, 32, this.uname) || this.needPax
-    this.needPax = encString(buf, off + 297, 32, this.gname) || this.needPax
-    this.needPax = encNumber(buf, off + 329, 8, this.devmaj) || this.needPax
-    this.needPax = encNumber(buf, off + 337, 8, this.devmin) || this.needPax
-    this.needPax = encString(buf, off + 345, prefixSize, prefix) || this.needPax
-    if (buf[off + 475] !== 0) {
-      this.needPax = encString(buf, off + 345, 155, prefix) || this.needPax
-    } else {
-      this.needPax = encString(buf, off + 345, 130, prefix) || this.needPax
-      this.needPax = encDate(buf, off + 476, 12, this.atime) || this.needPax
-      this.needPax = encDate(buf, off + 488, 12, this.ctime) || this.needPax
-    }
-
-    let sum = 8 * 0x20
-    for (let i = off; i < off + 148; i++) {
-      sum += buf[i]
-    }
-
-    for (let i = off + 156; i < off + 512; i++) {
-      sum += buf[i]
-    }
-
-    this.cksum = sum
-    encNumber(buf, off + 148, 8, this.cksum)
-    this.cksumValid = true
-
-    return this.needPax
-  }
-
-  set (data) {
-    for (const i in data) {
-      if (data[i] !== null && data[i] !== undefined) {
-        this[i] = data[i]
-      }
-    }
-  }
-
-  get type () {
-    return types.name.get(this[TYPE]) || this[TYPE]
-  }
-
-  get typeKey () {
-    return this[TYPE]
-  }
-
-  set type (type) {
-    if (types.code.has(type)) {
-      this[TYPE] = types.code.get(type)
-    } else {
-      this[TYPE] = type
-    }
-  }
-}
-
-const splitPrefix = (p, prefixSize) => {
-  const pathSize = 100
-  let pp = p
-  let prefix = ''
-  let ret
-  const root = pathModule.parse(p).root || '.'
-
-  if (Buffer.byteLength(pp) < pathSize) {
-    ret = [pp, prefix, false]
-  } else {
-    // first set prefix to the dir, and path to the base
-    prefix = pathModule.dirname(pp)
-    pp = pathModule.basename(pp)
-
-    do {
-      if (Buffer.byteLength(pp) <= pathSize &&
-          Buffer.byteLength(prefix) <= prefixSize) {
-        // both fit!
-        ret = [pp, prefix, false]
-      } else if (Buffer.byteLength(pp) > pathSize &&
-          Buffer.byteLength(prefix) <= prefixSize) {
-        // prefix fits in prefix, but path doesn't fit in path
-        ret = [pp.slice(0, pathSize - 1), prefix, true]
-      } else {
-        // make path take a bit from prefix
-        pp = pathModule.join(pathModule.basename(prefix), pp)
-        prefix = pathModule.dirname(prefix)
-      }
-    } while (prefix !== root && !ret)
-
-    // at this point, found no resolution, just truncate
-    if (!ret) {
-      ret = [p.slice(0, pathSize - 1), '', true]
-    }
-  }
-  return ret
-}
-
-const decString = (buf, off, size) =>
-  buf.slice(off, off + size).toString('utf8').replace(/\0.*/, '')
-
-const decDate = (buf, off, size) =>
-  numToDate(decNumber(buf, off, size))
-
-const numToDate = num => num === null ? null : new Date(num * 1000)
-
-const decNumber = (buf, off, size) =>
-  buf[off] & 0x80 ? large.parse(buf.slice(off, off + size))
-  : decSmallNumber(buf, off, size)
-
-const nanNull = value => isNaN(value) ? null : value
-
-const decSmallNumber = (buf, off, size) =>
-  nanNull(parseInt(
-    buf.slice(off, off + size)
-      .toString('utf8').replace(/\0.*$/, '').trim(), 8))
-
-// the maximum encodable as a null-terminated octal, by field size
-const MAXNUM = {
-  12: 0o77777777777,
-  8: 0o7777777,
-}
-
-const encNumber = (buf, off, size, number) =>
-  number === null ? false :
-  number > MAXNUM[size] || number < 0
-    ? (large.encode(number, buf.slice(off, off + size)), true)
-    : (encSmallNumber(buf, off, size, number), false)
-
-const encSmallNumber = (buf, off, size, number) =>
-  buf.write(octalString(number, size), off, size, 'ascii')
-
-const octalString = (number, size) =>
-  padOctal(Math.floor(number).toString(8), size)
-
-const padOctal = (string, size) =>
-  (string.length === size - 1 ? string
-  : new Array(size - string.length - 1).join('0') + string + ' ') + '\0'
-
-const encDate = (buf, off, size, date) =>
-  date === null ? false :
-  encNumber(buf, off, size, date.getTime() / 1000)
-
-// enough to fill the longest string we've got
-const NULLS = new Array(156).join('\0')
-// pad with nulls, return true if it's longer or non-ascii
-const encString = (buf, off, size, string) =>
-  string === null ? false :
-  (buf.write(string + NULLS, off, size, 'utf8'),
-  string.length !== Buffer.byteLength(string) || string.length > size)
-
-module.exports = Header
diff --git a/node_modules/tar/lib/high-level-opt.js b/node_modules/tar/lib/high-level-opt.js
deleted file mode 100644
index 40e44180e1669..0000000000000
--- a/node_modules/tar/lib/high-level-opt.js
+++ /dev/null
@@ -1,29 +0,0 @@
-'use strict'
-
-// turn tar(1) style args like `C` into the more verbose things like `cwd`
-
-const argmap = new Map([
-  ['C', 'cwd'],
-  ['f', 'file'],
-  ['z', 'gzip'],
-  ['P', 'preservePaths'],
-  ['U', 'unlink'],
-  ['strip-components', 'strip'],
-  ['stripComponents', 'strip'],
-  ['keep-newer', 'newer'],
-  ['keepNewer', 'newer'],
-  ['keep-newer-files', 'newer'],
-  ['keepNewerFiles', 'newer'],
-  ['k', 'keep'],
-  ['keep-existing', 'keep'],
-  ['keepExisting', 'keep'],
-  ['m', 'noMtime'],
-  ['no-mtime', 'noMtime'],
-  ['p', 'preserveOwner'],
-  ['L', 'follow'],
-  ['h', 'follow'],
-])
-
-module.exports = opt => opt ? Object.keys(opt).map(k => [
-  argmap.has(k) ? argmap.get(k) : k, opt[k],
-]).reduce((set, kv) => (set[kv[0]] = kv[1], set), Object.create(null)) : {}
diff --git a/node_modules/tar/lib/large-numbers.js b/node_modules/tar/lib/large-numbers.js
deleted file mode 100644
index b11e72d996fde..0000000000000
--- a/node_modules/tar/lib/large-numbers.js
+++ /dev/null
@@ -1,104 +0,0 @@
-'use strict'
-// Tar can encode large and negative numbers using a leading byte of
-// 0xff for negative, and 0x80 for positive.
-
-const encode = (num, buf) => {
-  if (!Number.isSafeInteger(num)) {
-  // The number is so large that javascript cannot represent it with integer
-  // precision.
-    throw Error('cannot encode number outside of javascript safe integer range')
-  } else if (num < 0) {
-    encodeNegative(num, buf)
-  } else {
-    encodePositive(num, buf)
-  }
-  return buf
-}
-
-const encodePositive = (num, buf) => {
-  buf[0] = 0x80
-
-  for (var i = buf.length; i > 1; i--) {
-    buf[i - 1] = num & 0xff
-    num = Math.floor(num / 0x100)
-  }
-}
-
-const encodeNegative = (num, buf) => {
-  buf[0] = 0xff
-  var flipped = false
-  num = num * -1
-  for (var i = buf.length; i > 1; i--) {
-    var byte = num & 0xff
-    num = Math.floor(num / 0x100)
-    if (flipped) {
-      buf[i - 1] = onesComp(byte)
-    } else if (byte === 0) {
-      buf[i - 1] = 0
-    } else {
-      flipped = true
-      buf[i - 1] = twosComp(byte)
-    }
-  }
-}
-
-const parse = (buf) => {
-  const pre = buf[0]
-  const value = pre === 0x80 ? pos(buf.slice(1, buf.length))
-    : pre === 0xff ? twos(buf)
-    : null
-  if (value === null) {
-    throw Error('invalid base256 encoding')
-  }
-
-  if (!Number.isSafeInteger(value)) {
-  // The number is so large that javascript cannot represent it with integer
-  // precision.
-    throw Error('parsed number outside of javascript safe integer range')
-  }
-
-  return value
-}
-
-const twos = (buf) => {
-  var len = buf.length
-  var sum = 0
-  var flipped = false
-  for (var i = len - 1; i > -1; i--) {
-    var byte = buf[i]
-    var f
-    if (flipped) {
-      f = onesComp(byte)
-    } else if (byte === 0) {
-      f = byte
-    } else {
-      flipped = true
-      f = twosComp(byte)
-    }
-    if (f !== 0) {
-      sum -= f * Math.pow(256, len - i - 1)
-    }
-  }
-  return sum
-}
-
-const pos = (buf) => {
-  var len = buf.length
-  var sum = 0
-  for (var i = len - 1; i > -1; i--) {
-    var byte = buf[i]
-    if (byte !== 0) {
-      sum += byte * Math.pow(256, len - i - 1)
-    }
-  }
-  return sum
-}
-
-const onesComp = byte => (0xff ^ byte) & 0xff
-
-const twosComp = byte => ((0xff ^ byte) + 1) & 0xff
-
-module.exports = {
-  encode,
-  parse,
-}
diff --git a/node_modules/tar/lib/list.js b/node_modules/tar/lib/list.js
deleted file mode 100644
index f2358c25410b5..0000000000000
--- a/node_modules/tar/lib/list.js
+++ /dev/null
@@ -1,139 +0,0 @@
-'use strict'
-
-// XXX: This shares a lot in common with extract.js
-// maybe some DRY opportunity here?
-
-// tar -t
-const hlo = require('./high-level-opt.js')
-const Parser = require('./parse.js')
-const fs = require('fs')
-const fsm = require('fs-minipass')
-const path = require('path')
-const stripSlash = require('./strip-trailing-slashes.js')
-
-module.exports = (opt_, files, cb) => {
-  if (typeof opt_ === 'function') {
-    cb = opt_, files = null, opt_ = {}
-  } else if (Array.isArray(opt_)) {
-    files = opt_, opt_ = {}
-  }
-
-  if (typeof files === 'function') {
-    cb = files, files = null
-  }
-
-  if (!files) {
-    files = []
-  } else {
-    files = Array.from(files)
-  }
-
-  const opt = hlo(opt_)
-
-  if (opt.sync && typeof cb === 'function') {
-    throw new TypeError('callback not supported for sync tar functions')
-  }
-
-  if (!opt.file && typeof cb === 'function') {
-    throw new TypeError('callback only supported with file option')
-  }
-
-  if (files.length) {
-    filesFilter(opt, files)
-  }
-
-  if (!opt.noResume) {
-    onentryFunction(opt)
-  }
-
-  return opt.file && opt.sync ? listFileSync(opt)
-    : opt.file ? listFile(opt, cb)
-    : list(opt)
-}
-
-const onentryFunction = opt => {
-  const onentry = opt.onentry
-  opt.onentry = onentry ? e => {
-    onentry(e)
-    e.resume()
-  } : e => e.resume()
-}
-
-// construct a filter that limits the file entries listed
-// include child entries if a dir is included
-const filesFilter = (opt, files) => {
-  const map = new Map(files.map(f => [stripSlash(f), true]))
-  const filter = opt.filter
-
-  const mapHas = (file, r) => {
-    const root = r || path.parse(file).root || '.'
-    const ret = file === root ? false
-      : map.has(file) ? map.get(file)
-      : mapHas(path.dirname(file), root)
-
-    map.set(file, ret)
-    return ret
-  }
-
-  opt.filter = filter
-    ? (file, entry) => filter(file, entry) && mapHas(stripSlash(file))
-    : file => mapHas(stripSlash(file))
-}
-
-const listFileSync = opt => {
-  const p = list(opt)
-  const file = opt.file
-  let threw = true
-  let fd
-  try {
-    const stat = fs.statSync(file)
-    const readSize = opt.maxReadSize || 16 * 1024 * 1024
-    if (stat.size < readSize) {
-      p.end(fs.readFileSync(file))
-    } else {
-      let pos = 0
-      const buf = Buffer.allocUnsafe(readSize)
-      fd = fs.openSync(file, 'r')
-      while (pos < stat.size) {
-        const bytesRead = fs.readSync(fd, buf, 0, readSize, pos)
-        pos += bytesRead
-        p.write(buf.slice(0, bytesRead))
-      }
-      p.end()
-    }
-    threw = false
-  } finally {
-    if (threw && fd) {
-      try {
-        fs.closeSync(fd)
-      } catch (er) {}
-    }
-  }
-}
-
-const listFile = (opt, cb) => {
-  const parse = new Parser(opt)
-  const readSize = opt.maxReadSize || 16 * 1024 * 1024
-
-  const file = opt.file
-  const p = new Promise((resolve, reject) => {
-    parse.on('error', reject)
-    parse.on('end', resolve)
-
-    fs.stat(file, (er, stat) => {
-      if (er) {
-        reject(er)
-      } else {
-        const stream = new fsm.ReadStream(file, {
-          readSize: readSize,
-          size: stat.size,
-        })
-        stream.on('error', reject)
-        stream.pipe(parse)
-      }
-    })
-  })
-  return cb ? p.then(cb, cb) : p
-}
-
-const list = opt => new Parser(opt)
diff --git a/node_modules/tar/lib/mkdir.js b/node_modules/tar/lib/mkdir.js
deleted file mode 100644
index 8ee8de7852d12..0000000000000
--- a/node_modules/tar/lib/mkdir.js
+++ /dev/null
@@ -1,229 +0,0 @@
-'use strict'
-// wrapper around mkdirp for tar's needs.
-
-// TODO: This should probably be a class, not functionally
-// passing around state in a gazillion args.
-
-const mkdirp = require('mkdirp')
-const fs = require('fs')
-const path = require('path')
-const chownr = require('chownr')
-const normPath = require('./normalize-windows-path.js')
-
-class SymlinkError extends Error {
-  constructor (symlink, path) {
-    super('Cannot extract through symbolic link')
-    this.path = path
-    this.symlink = symlink
-  }
-
-  get name () {
-    return 'SylinkError'
-  }
-}
-
-class CwdError extends Error {
-  constructor (path, code) {
-    super(code + ': Cannot cd into \'' + path + '\'')
-    this.path = path
-    this.code = code
-  }
-
-  get name () {
-    return 'CwdError'
-  }
-}
-
-const cGet = (cache, key) => cache.get(normPath(key))
-const cSet = (cache, key, val) => cache.set(normPath(key), val)
-
-const checkCwd = (dir, cb) => {
-  fs.stat(dir, (er, st) => {
-    if (er || !st.isDirectory()) {
-      er = new CwdError(dir, er && er.code || 'ENOTDIR')
-    }
-    cb(er)
-  })
-}
-
-module.exports = (dir, opt, cb) => {
-  dir = normPath(dir)
-
-  // if there's any overlap between mask and mode,
-  // then we'll need an explicit chmod
-  const umask = opt.umask
-  const mode = opt.mode | 0o0700
-  const needChmod = (mode & umask) !== 0
-
-  const uid = opt.uid
-  const gid = opt.gid
-  const doChown = typeof uid === 'number' &&
-    typeof gid === 'number' &&
-    (uid !== opt.processUid || gid !== opt.processGid)
-
-  const preserve = opt.preserve
-  const unlink = opt.unlink
-  const cache = opt.cache
-  const cwd = normPath(opt.cwd)
-
-  const done = (er, created) => {
-    if (er) {
-      cb(er)
-    } else {
-      cSet(cache, dir, true)
-      if (created && doChown) {
-        chownr(created, uid, gid, er => done(er))
-      } else if (needChmod) {
-        fs.chmod(dir, mode, cb)
-      } else {
-        cb()
-      }
-    }
-  }
-
-  if (cache && cGet(cache, dir) === true) {
-    return done()
-  }
-
-  if (dir === cwd) {
-    return checkCwd(dir, done)
-  }
-
-  if (preserve) {
-    return mkdirp(dir, { mode }).then(made => done(null, made), done)
-  }
-
-  const sub = normPath(path.relative(cwd, dir))
-  const parts = sub.split('/')
-  mkdir_(cwd, parts, mode, cache, unlink, cwd, null, done)
-}
-
-const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => {
-  if (!parts.length) {
-    return cb(null, created)
-  }
-  const p = parts.shift()
-  const part = normPath(path.resolve(base + '/' + p))
-  if (cGet(cache, part)) {
-    return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
-  }
-  fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb))
-}
-
-const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => er => {
-  if (er) {
-    fs.lstat(part, (statEr, st) => {
-      if (statEr) {
-        statEr.path = statEr.path && normPath(statEr.path)
-        cb(statEr)
-      } else if (st.isDirectory()) {
-        mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
-      } else if (unlink) {
-        fs.unlink(part, er => {
-          if (er) {
-            return cb(er)
-          }
-          fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb))
-        })
-      } else if (st.isSymbolicLink()) {
-        return cb(new SymlinkError(part, part + '/' + parts.join('/')))
-      } else {
-        cb(er)
-      }
-    })
-  } else {
-    created = created || part
-    mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
-  }
-}
-
-const checkCwdSync = dir => {
-  let ok = false
-  let code = 'ENOTDIR'
-  try {
-    ok = fs.statSync(dir).isDirectory()
-  } catch (er) {
-    code = er.code
-  } finally {
-    if (!ok) {
-      throw new CwdError(dir, code)
-    }
-  }
-}
-
-module.exports.sync = (dir, opt) => {
-  dir = normPath(dir)
-  // if there's any overlap between mask and mode,
-  // then we'll need an explicit chmod
-  const umask = opt.umask
-  const mode = opt.mode | 0o0700
-  const needChmod = (mode & umask) !== 0
-
-  const uid = opt.uid
-  const gid = opt.gid
-  const doChown = typeof uid === 'number' &&
-    typeof gid === 'number' &&
-    (uid !== opt.processUid || gid !== opt.processGid)
-
-  const preserve = opt.preserve
-  const unlink = opt.unlink
-  const cache = opt.cache
-  const cwd = normPath(opt.cwd)
-
-  const done = (created) => {
-    cSet(cache, dir, true)
-    if (created && doChown) {
-      chownr.sync(created, uid, gid)
-    }
-    if (needChmod) {
-      fs.chmodSync(dir, mode)
-    }
-  }
-
-  if (cache && cGet(cache, dir) === true) {
-    return done()
-  }
-
-  if (dir === cwd) {
-    checkCwdSync(cwd)
-    return done()
-  }
-
-  if (preserve) {
-    return done(mkdirp.sync(dir, mode))
-  }
-
-  const sub = normPath(path.relative(cwd, dir))
-  const parts = sub.split('/')
-  let created = null
-  for (let p = parts.shift(), part = cwd;
-    p && (part += '/' + p);
-    p = parts.shift()) {
-    part = normPath(path.resolve(part))
-    if (cGet(cache, part)) {
-      continue
-    }
-
-    try {
-      fs.mkdirSync(part, mode)
-      created = created || part
-      cSet(cache, part, true)
-    } catch (er) {
-      const st = fs.lstatSync(part)
-      if (st.isDirectory()) {
-        cSet(cache, part, true)
-        continue
-      } else if (unlink) {
-        fs.unlinkSync(part)
-        fs.mkdirSync(part, mode)
-        created = created || part
-        cSet(cache, part, true)
-        continue
-      } else if (st.isSymbolicLink()) {
-        return new SymlinkError(part, part + '/' + parts.join('/'))
-      }
-    }
-  }
-
-  return done(created)
-}
diff --git a/node_modules/tar/lib/mode-fix.js b/node_modules/tar/lib/mode-fix.js
deleted file mode 100644
index 42f1d6e657b1a..0000000000000
--- a/node_modules/tar/lib/mode-fix.js
+++ /dev/null
@@ -1,27 +0,0 @@
-'use strict'
-module.exports = (mode, isDir, portable) => {
-  mode &= 0o7777
-
-  // in portable mode, use the minimum reasonable umask
-  // if this system creates files with 0o664 by default
-  // (as some linux distros do), then we'll write the
-  // archive with 0o644 instead.  Also, don't ever create
-  // a file that is not readable/writable by the owner.
-  if (portable) {
-    mode = (mode | 0o600) & ~0o22
-  }
-
-  // if dirs are readable, then they should be listable
-  if (isDir) {
-    if (mode & 0o400) {
-      mode |= 0o100
-    }
-    if (mode & 0o40) {
-      mode |= 0o10
-    }
-    if (mode & 0o4) {
-      mode |= 0o1
-    }
-  }
-  return mode
-}
diff --git a/node_modules/tar/lib/normalize-unicode.js b/node_modules/tar/lib/normalize-unicode.js
deleted file mode 100644
index 79e285ab30d57..0000000000000
--- a/node_modules/tar/lib/normalize-unicode.js
+++ /dev/null
@@ -1,12 +0,0 @@
-// warning: extremely hot code path.
-// This has been meticulously optimized for use
-// within npm install on large package trees.
-// Do not edit without careful benchmarking.
-const normalizeCache = Object.create(null)
-const { hasOwnProperty } = Object.prototype
-module.exports = s => {
-  if (!hasOwnProperty.call(normalizeCache, s)) {
-    normalizeCache[s] = s.normalize('NFD')
-  }
-  return normalizeCache[s]
-}
diff --git a/node_modules/tar/lib/normalize-windows-path.js b/node_modules/tar/lib/normalize-windows-path.js
deleted file mode 100644
index eb13ba01b7b04..0000000000000
--- a/node_modules/tar/lib/normalize-windows-path.js
+++ /dev/null
@@ -1,8 +0,0 @@
-// on windows, either \ or / are valid directory separators.
-// on unix, \ is a valid character in filenames.
-// so, on windows, and only on windows, we replace all \ chars with /,
-// so that we can use / as our one and only directory separator char.
-
-const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform
-module.exports = platform !== 'win32' ? p => p
-  : p => p && p.replace(/\\/g, '/')
diff --git a/node_modules/tar/lib/pack.js b/node_modules/tar/lib/pack.js
deleted file mode 100644
index d533a068f579f..0000000000000
--- a/node_modules/tar/lib/pack.js
+++ /dev/null
@@ -1,432 +0,0 @@
-'use strict'
-
-// A readable tar stream creator
-// Technically, this is a transform stream that you write paths into,
-// and tar format comes out of.
-// The `add()` method is like `write()` but returns this,
-// and end() return `this` as well, so you can
-// do `new Pack(opt).add('files').add('dir').end().pipe(output)
-// You could also do something like:
-// streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar'))
-
-class PackJob {
-  constructor (path, absolute) {
-    this.path = path || './'
-    this.absolute = absolute
-    this.entry = null
-    this.stat = null
-    this.readdir = null
-    this.pending = false
-    this.ignore = false
-    this.piped = false
-  }
-}
-
-const { Minipass } = require('minipass')
-const zlib = require('minizlib')
-const ReadEntry = require('./read-entry.js')
-const WriteEntry = require('./write-entry.js')
-const WriteEntrySync = WriteEntry.Sync
-const WriteEntryTar = WriteEntry.Tar
-const Yallist = require('yallist')
-const EOF = Buffer.alloc(1024)
-const ONSTAT = Symbol('onStat')
-const ENDED = Symbol('ended')
-const QUEUE = Symbol('queue')
-const CURRENT = Symbol('current')
-const PROCESS = Symbol('process')
-const PROCESSING = Symbol('processing')
-const PROCESSJOB = Symbol('processJob')
-const JOBS = Symbol('jobs')
-const JOBDONE = Symbol('jobDone')
-const ADDFSENTRY = Symbol('addFSEntry')
-const ADDTARENTRY = Symbol('addTarEntry')
-const STAT = Symbol('stat')
-const READDIR = Symbol('readdir')
-const ONREADDIR = Symbol('onreaddir')
-const PIPE = Symbol('pipe')
-const ENTRY = Symbol('entry')
-const ENTRYOPT = Symbol('entryOpt')
-const WRITEENTRYCLASS = Symbol('writeEntryClass')
-const WRITE = Symbol('write')
-const ONDRAIN = Symbol('ondrain')
-
-const fs = require('fs')
-const path = require('path')
-const warner = require('./warn-mixin.js')
-const normPath = require('./normalize-windows-path.js')
-
-const Pack = warner(class Pack extends Minipass {
-  constructor (opt) {
-    super(opt)
-    opt = opt || Object.create(null)
-    this.opt = opt
-    this.file = opt.file || ''
-    this.cwd = opt.cwd || process.cwd()
-    this.maxReadSize = opt.maxReadSize
-    this.preservePaths = !!opt.preservePaths
-    this.strict = !!opt.strict
-    this.noPax = !!opt.noPax
-    this.prefix = normPath(opt.prefix || '')
-    this.linkCache = opt.linkCache || new Map()
-    this.statCache = opt.statCache || new Map()
-    this.readdirCache = opt.readdirCache || new Map()
-
-    this[WRITEENTRYCLASS] = WriteEntry
-    if (typeof opt.onwarn === 'function') {
-      this.on('warn', opt.onwarn)
-    }
-
-    this.portable = !!opt.portable
-    this.zip = null
-
-    if (opt.gzip || opt.brotli) {
-      if (opt.gzip && opt.brotli) {
-        throw new TypeError('gzip and brotli are mutually exclusive')
-      }
-      if (opt.gzip) {
-        if (typeof opt.gzip !== 'object') {
-          opt.gzip = {}
-        }
-        if (this.portable) {
-          opt.gzip.portable = true
-        }
-        this.zip = new zlib.Gzip(opt.gzip)
-      }
-      if (opt.brotli) {
-        if (typeof opt.brotli !== 'object') {
-          opt.brotli = {}
-        }
-        this.zip = new zlib.BrotliCompress(opt.brotli)
-      }
-      this.zip.on('data', chunk => super.write(chunk))
-      this.zip.on('end', _ => super.end())
-      this.zip.on('drain', _ => this[ONDRAIN]())
-      this.on('resume', _ => this.zip.resume())
-    } else {
-      this.on('drain', this[ONDRAIN])
-    }
-
-    this.noDirRecurse = !!opt.noDirRecurse
-    this.follow = !!opt.follow
-    this.noMtime = !!opt.noMtime
-    this.mtime = opt.mtime || null
-
-    this.filter = typeof opt.filter === 'function' ? opt.filter : _ => true
-
-    this[QUEUE] = new Yallist()
-    this[JOBS] = 0
-    this.jobs = +opt.jobs || 4
-    this[PROCESSING] = false
-    this[ENDED] = false
-  }
-
-  [WRITE] (chunk) {
-    return super.write(chunk)
-  }
-
-  add (path) {
-    this.write(path)
-    return this
-  }
-
-  end (path) {
-    if (path) {
-      this.write(path)
-    }
-    this[ENDED] = true
-    this[PROCESS]()
-    return this
-  }
-
-  write (path) {
-    if (this[ENDED]) {
-      throw new Error('write after end')
-    }
-
-    if (path instanceof ReadEntry) {
-      this[ADDTARENTRY](path)
-    } else {
-      this[ADDFSENTRY](path)
-    }
-    return this.flowing
-  }
-
-  [ADDTARENTRY] (p) {
-    const absolute = normPath(path.resolve(this.cwd, p.path))
-    // in this case, we don't have to wait for the stat
-    if (!this.filter(p.path, p)) {
-      p.resume()
-    } else {
-      const job = new PackJob(p.path, absolute, false)
-      job.entry = new WriteEntryTar(p, this[ENTRYOPT](job))
-      job.entry.on('end', _ => this[JOBDONE](job))
-      this[JOBS] += 1
-      this[QUEUE].push(job)
-    }
-
-    this[PROCESS]()
-  }
-
-  [ADDFSENTRY] (p) {
-    const absolute = normPath(path.resolve(this.cwd, p))
-    this[QUEUE].push(new PackJob(p, absolute))
-    this[PROCESS]()
-  }
-
-  [STAT] (job) {
-    job.pending = true
-    this[JOBS] += 1
-    const stat = this.follow ? 'stat' : 'lstat'
-    fs[stat](job.absolute, (er, stat) => {
-      job.pending = false
-      this[JOBS] -= 1
-      if (er) {
-        this.emit('error', er)
-      } else {
-        this[ONSTAT](job, stat)
-      }
-    })
-  }
-
-  [ONSTAT] (job, stat) {
-    this.statCache.set(job.absolute, stat)
-    job.stat = stat
-
-    // now we have the stat, we can filter it.
-    if (!this.filter(job.path, stat)) {
-      job.ignore = true
-    }
-
-    this[PROCESS]()
-  }
-
-  [READDIR] (job) {
-    job.pending = true
-    this[JOBS] += 1
-    fs.readdir(job.absolute, (er, entries) => {
-      job.pending = false
-      this[JOBS] -= 1
-      if (er) {
-        return this.emit('error', er)
-      }
-      this[ONREADDIR](job, entries)
-    })
-  }
-
-  [ONREADDIR] (job, entries) {
-    this.readdirCache.set(job.absolute, entries)
-    job.readdir = entries
-    this[PROCESS]()
-  }
-
-  [PROCESS] () {
-    if (this[PROCESSING]) {
-      return
-    }
-
-    this[PROCESSING] = true
-    for (let w = this[QUEUE].head;
-      w !== null && this[JOBS] < this.jobs;
-      w = w.next) {
-      this[PROCESSJOB](w.value)
-      if (w.value.ignore) {
-        const p = w.next
-        this[QUEUE].removeNode(w)
-        w.next = p
-      }
-    }
-
-    this[PROCESSING] = false
-
-    if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) {
-      if (this.zip) {
-        this.zip.end(EOF)
-      } else {
-        super.write(EOF)
-        super.end()
-      }
-    }
-  }
-
-  get [CURRENT] () {
-    return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value
-  }
-
-  [JOBDONE] (job) {
-    this[QUEUE].shift()
-    this[JOBS] -= 1
-    this[PROCESS]()
-  }
-
-  [PROCESSJOB] (job) {
-    if (job.pending) {
-      return
-    }
-
-    if (job.entry) {
-      if (job === this[CURRENT] && !job.piped) {
-        this[PIPE](job)
-      }
-      return
-    }
-
-    if (!job.stat) {
-      if (this.statCache.has(job.absolute)) {
-        this[ONSTAT](job, this.statCache.get(job.absolute))
-      } else {
-        this[STAT](job)
-      }
-    }
-    if (!job.stat) {
-      return
-    }
-
-    // filtered out!
-    if (job.ignore) {
-      return
-    }
-
-    if (!this.noDirRecurse && job.stat.isDirectory() && !job.readdir) {
-      if (this.readdirCache.has(job.absolute)) {
-        this[ONREADDIR](job, this.readdirCache.get(job.absolute))
-      } else {
-        this[READDIR](job)
-      }
-      if (!job.readdir) {
-        return
-      }
-    }
-
-    // we know it doesn't have an entry, because that got checked above
-    job.entry = this[ENTRY](job)
-    if (!job.entry) {
-      job.ignore = true
-      return
-    }
-
-    if (job === this[CURRENT] && !job.piped) {
-      this[PIPE](job)
-    }
-  }
-
-  [ENTRYOPT] (job) {
-    return {
-      onwarn: (code, msg, data) => this.warn(code, msg, data),
-      noPax: this.noPax,
-      cwd: this.cwd,
-      absolute: job.absolute,
-      preservePaths: this.preservePaths,
-      maxReadSize: this.maxReadSize,
-      strict: this.strict,
-      portable: this.portable,
-      linkCache: this.linkCache,
-      statCache: this.statCache,
-      noMtime: this.noMtime,
-      mtime: this.mtime,
-      prefix: this.prefix,
-    }
-  }
-
-  [ENTRY] (job) {
-    this[JOBS] += 1
-    try {
-      return new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job))
-        .on('end', () => this[JOBDONE](job))
-        .on('error', er => this.emit('error', er))
-    } catch (er) {
-      this.emit('error', er)
-    }
-  }
-
-  [ONDRAIN] () {
-    if (this[CURRENT] && this[CURRENT].entry) {
-      this[CURRENT].entry.resume()
-    }
-  }
-
-  // like .pipe() but using super, because our write() is special
-  [PIPE] (job) {
-    job.piped = true
-
-    if (job.readdir) {
-      job.readdir.forEach(entry => {
-        const p = job.path
-        const base = p === './' ? '' : p.replace(/\/*$/, '/')
-        this[ADDFSENTRY](base + entry)
-      })
-    }
-
-    const source = job.entry
-    const zip = this.zip
-
-    if (zip) {
-      source.on('data', chunk => {
-        if (!zip.write(chunk)) {
-          source.pause()
-        }
-      })
-    } else {
-      source.on('data', chunk => {
-        if (!super.write(chunk)) {
-          source.pause()
-        }
-      })
-    }
-  }
-
-  pause () {
-    if (this.zip) {
-      this.zip.pause()
-    }
-    return super.pause()
-  }
-})
-
-class PackSync extends Pack {
-  constructor (opt) {
-    super(opt)
-    this[WRITEENTRYCLASS] = WriteEntrySync
-  }
-
-  // pause/resume are no-ops in sync streams.
-  pause () {}
-  resume () {}
-
-  [STAT] (job) {
-    const stat = this.follow ? 'statSync' : 'lstatSync'
-    this[ONSTAT](job, fs[stat](job.absolute))
-  }
-
-  [READDIR] (job, stat) {
-    this[ONREADDIR](job, fs.readdirSync(job.absolute))
-  }
-
-  // gotta get it all in this tick
-  [PIPE] (job) {
-    const source = job.entry
-    const zip = this.zip
-
-    if (job.readdir) {
-      job.readdir.forEach(entry => {
-        const p = job.path
-        const base = p === './' ? '' : p.replace(/\/*$/, '/')
-        this[ADDFSENTRY](base + entry)
-      })
-    }
-
-    if (zip) {
-      source.on('data', chunk => {
-        zip.write(chunk)
-      })
-    } else {
-      source.on('data', chunk => {
-        super[WRITE](chunk)
-      })
-    }
-  }
-}
-
-Pack.Sync = PackSync
-
-module.exports = Pack
diff --git a/node_modules/tar/lib/parse.js b/node_modules/tar/lib/parse.js
deleted file mode 100644
index 94e53042fad56..0000000000000
--- a/node_modules/tar/lib/parse.js
+++ /dev/null
@@ -1,552 +0,0 @@
-'use strict'
-
-// this[BUFFER] is the remainder of a chunk if we're waiting for
-// the full 512 bytes of a header to come in.  We will Buffer.concat()
-// it to the next write(), which is a mem copy, but a small one.
-//
-// this[QUEUE] is a Yallist of entries that haven't been emitted
-// yet this can only get filled up if the user keeps write()ing after
-// a write() returns false, or does a write() with more than one entry
-//
-// We don't buffer chunks, we always parse them and either create an
-// entry, or push it into the active entry.  The ReadEntry class knows
-// to throw data away if .ignore=true
-//
-// Shift entry off the buffer when it emits 'end', and emit 'entry' for
-// the next one in the list.
-//
-// At any time, we're pushing body chunks into the entry at WRITEENTRY,
-// and waiting for 'end' on the entry at READENTRY
-//
-// ignored entries get .resume() called on them straight away
-
-const warner = require('./warn-mixin.js')
-const Header = require('./header.js')
-const EE = require('events')
-const Yallist = require('yallist')
-const maxMetaEntrySize = 1024 * 1024
-const Entry = require('./read-entry.js')
-const Pax = require('./pax.js')
-const zlib = require('minizlib')
-const { nextTick } = require('process')
-
-const gzipHeader = Buffer.from([0x1f, 0x8b])
-const STATE = Symbol('state')
-const WRITEENTRY = Symbol('writeEntry')
-const READENTRY = Symbol('readEntry')
-const NEXTENTRY = Symbol('nextEntry')
-const PROCESSENTRY = Symbol('processEntry')
-const EX = Symbol('extendedHeader')
-const GEX = Symbol('globalExtendedHeader')
-const META = Symbol('meta')
-const EMITMETA = Symbol('emitMeta')
-const BUFFER = Symbol('buffer')
-const QUEUE = Symbol('queue')
-const ENDED = Symbol('ended')
-const EMITTEDEND = Symbol('emittedEnd')
-const EMIT = Symbol('emit')
-const UNZIP = Symbol('unzip')
-const CONSUMECHUNK = Symbol('consumeChunk')
-const CONSUMECHUNKSUB = Symbol('consumeChunkSub')
-const CONSUMEBODY = Symbol('consumeBody')
-const CONSUMEMETA = Symbol('consumeMeta')
-const CONSUMEHEADER = Symbol('consumeHeader')
-const CONSUMING = Symbol('consuming')
-const BUFFERCONCAT = Symbol('bufferConcat')
-const MAYBEEND = Symbol('maybeEnd')
-const WRITING = Symbol('writing')
-const ABORTED = Symbol('aborted')
-const DONE = Symbol('onDone')
-const SAW_VALID_ENTRY = Symbol('sawValidEntry')
-const SAW_NULL_BLOCK = Symbol('sawNullBlock')
-const SAW_EOF = Symbol('sawEOF')
-const CLOSESTREAM = Symbol('closeStream')
-
-const noop = _ => true
-
-module.exports = warner(class Parser extends EE {
-  constructor (opt) {
-    opt = opt || {}
-    super(opt)
-
-    this.file = opt.file || ''
-
-    // set to boolean false when an entry starts.  1024 bytes of \0
-    // is technically a valid tarball, albeit a boring one.
-    this[SAW_VALID_ENTRY] = null
-
-    // these BADARCHIVE errors can't be detected early. listen on DONE.
-    this.on(DONE, _ => {
-      if (this[STATE] === 'begin' || this[SAW_VALID_ENTRY] === false) {
-        // either less than 1 block of data, or all entries were invalid.
-        // Either way, probably not even a tarball.
-        this.warn('TAR_BAD_ARCHIVE', 'Unrecognized archive format')
-      }
-    })
-
-    if (opt.ondone) {
-      this.on(DONE, opt.ondone)
-    } else {
-      this.on(DONE, _ => {
-        this.emit('prefinish')
-        this.emit('finish')
-        this.emit('end')
-      })
-    }
-
-    this.strict = !!opt.strict
-    this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize
-    this.filter = typeof opt.filter === 'function' ? opt.filter : noop
-    // Unlike gzip, brotli doesn't have any magic bytes to identify it
-    // Users need to explicitly tell us they're extracting a brotli file
-    // Or we infer from the file extension
-    const isTBR = (opt.file && (
-        opt.file.endsWith('.tar.br') || opt.file.endsWith('.tbr')))
-    // if it's a tbr file it MIGHT be brotli, but we don't know until
-    // we look at it and verify it's not a valid tar file.
-    this.brotli = !opt.gzip && opt.brotli !== undefined ? opt.brotli
-      : isTBR ? undefined
-      : false
-
-    // have to set this so that streams are ok piping into it
-    this.writable = true
-    this.readable = false
-
-    this[QUEUE] = new Yallist()
-    this[BUFFER] = null
-    this[READENTRY] = null
-    this[WRITEENTRY] = null
-    this[STATE] = 'begin'
-    this[META] = ''
-    this[EX] = null
-    this[GEX] = null
-    this[ENDED] = false
-    this[UNZIP] = null
-    this[ABORTED] = false
-    this[SAW_NULL_BLOCK] = false
-    this[SAW_EOF] = false
-
-    this.on('end', () => this[CLOSESTREAM]())
-
-    if (typeof opt.onwarn === 'function') {
-      this.on('warn', opt.onwarn)
-    }
-    if (typeof opt.onentry === 'function') {
-      this.on('entry', opt.onentry)
-    }
-  }
-
-  [CONSUMEHEADER] (chunk, position) {
-    if (this[SAW_VALID_ENTRY] === null) {
-      this[SAW_VALID_ENTRY] = false
-    }
-    let header
-    try {
-      header = new Header(chunk, position, this[EX], this[GEX])
-    } catch (er) {
-      return this.warn('TAR_ENTRY_INVALID', er)
-    }
-
-    if (header.nullBlock) {
-      if (this[SAW_NULL_BLOCK]) {
-        this[SAW_EOF] = true
-        // ending an archive with no entries.  pointless, but legal.
-        if (this[STATE] === 'begin') {
-          this[STATE] = 'header'
-        }
-        this[EMIT]('eof')
-      } else {
-        this[SAW_NULL_BLOCK] = true
-        this[EMIT]('nullBlock')
-      }
-    } else {
-      this[SAW_NULL_BLOCK] = false
-      if (!header.cksumValid) {
-        this.warn('TAR_ENTRY_INVALID', 'checksum failure', { header })
-      } else if (!header.path) {
-        this.warn('TAR_ENTRY_INVALID', 'path is required', { header })
-      } else {
-        const type = header.type
-        if (/^(Symbolic)?Link$/.test(type) && !header.linkpath) {
-          this.warn('TAR_ENTRY_INVALID', 'linkpath required', { header })
-        } else if (!/^(Symbolic)?Link$/.test(type) && header.linkpath) {
-          this.warn('TAR_ENTRY_INVALID', 'linkpath forbidden', { header })
-        } else {
-          const entry = this[WRITEENTRY] = new Entry(header, this[EX], this[GEX])
-
-          // we do this for meta & ignored entries as well, because they
-          // are still valid tar, or else we wouldn't know to ignore them
-          if (!this[SAW_VALID_ENTRY]) {
-            if (entry.remain) {
-              // this might be the one!
-              const onend = () => {
-                if (!entry.invalid) {
-                  this[SAW_VALID_ENTRY] = true
-                }
-              }
-              entry.on('end', onend)
-            } else {
-              this[SAW_VALID_ENTRY] = true
-            }
-          }
-
-          if (entry.meta) {
-            if (entry.size > this.maxMetaEntrySize) {
-              entry.ignore = true
-              this[EMIT]('ignoredEntry', entry)
-              this[STATE] = 'ignore'
-              entry.resume()
-            } else if (entry.size > 0) {
-              this[META] = ''
-              entry.on('data', c => this[META] += c)
-              this[STATE] = 'meta'
-            }
-          } else {
-            this[EX] = null
-            entry.ignore = entry.ignore || !this.filter(entry.path, entry)
-
-            if (entry.ignore) {
-              // probably valid, just not something we care about
-              this[EMIT]('ignoredEntry', entry)
-              this[STATE] = entry.remain ? 'ignore' : 'header'
-              entry.resume()
-            } else {
-              if (entry.remain) {
-                this[STATE] = 'body'
-              } else {
-                this[STATE] = 'header'
-                entry.end()
-              }
-
-              if (!this[READENTRY]) {
-                this[QUEUE].push(entry)
-                this[NEXTENTRY]()
-              } else {
-                this[QUEUE].push(entry)
-              }
-            }
-          }
-        }
-      }
-    }
-  }
-
-  [CLOSESTREAM] () {
-    nextTick(() => this.emit('close'))
-  }
-
-  [PROCESSENTRY] (entry) {
-    let go = true
-
-    if (!entry) {
-      this[READENTRY] = null
-      go = false
-    } else if (Array.isArray(entry)) {
-      this.emit.apply(this, entry)
-    } else {
-      this[READENTRY] = entry
-      this.emit('entry', entry)
-      if (!entry.emittedEnd) {
-        entry.on('end', _ => this[NEXTENTRY]())
-        go = false
-      }
-    }
-
-    return go
-  }
-
-  [NEXTENTRY] () {
-    do {} while (this[PROCESSENTRY](this[QUEUE].shift()))
-
-    if (!this[QUEUE].length) {
-      // At this point, there's nothing in the queue, but we may have an
-      // entry which is being consumed (readEntry).
-      // If we don't, then we definitely can handle more data.
-      // If we do, and either it's flowing, or it has never had any data
-      // written to it, then it needs more.
-      // The only other possibility is that it has returned false from a
-      // write() call, so we wait for the next drain to continue.
-      const re = this[READENTRY]
-      const drainNow = !re || re.flowing || re.size === re.remain
-      if (drainNow) {
-        if (!this[WRITING]) {
-          this.emit('drain')
-        }
-      } else {
-        re.once('drain', _ => this.emit('drain'))
-      }
-    }
-  }
-
-  [CONSUMEBODY] (chunk, position) {
-    // write up to but no  more than writeEntry.blockRemain
-    const entry = this[WRITEENTRY]
-    const br = entry.blockRemain
-    const c = (br >= chunk.length && position === 0) ? chunk
-      : chunk.slice(position, position + br)
-
-    entry.write(c)
-
-    if (!entry.blockRemain) {
-      this[STATE] = 'header'
-      this[WRITEENTRY] = null
-      entry.end()
-    }
-
-    return c.length
-  }
-
-  [CONSUMEMETA] (chunk, position) {
-    const entry = this[WRITEENTRY]
-    const ret = this[CONSUMEBODY](chunk, position)
-
-    // if we finished, then the entry is reset
-    if (!this[WRITEENTRY]) {
-      this[EMITMETA](entry)
-    }
-
-    return ret
-  }
-
-  [EMIT] (ev, data, extra) {
-    if (!this[QUEUE].length && !this[READENTRY]) {
-      this.emit(ev, data, extra)
-    } else {
-      this[QUEUE].push([ev, data, extra])
-    }
-  }
-
-  [EMITMETA] (entry) {
-    this[EMIT]('meta', this[META])
-    switch (entry.type) {
-      case 'ExtendedHeader':
-      case 'OldExtendedHeader':
-        this[EX] = Pax.parse(this[META], this[EX], false)
-        break
-
-      case 'GlobalExtendedHeader':
-        this[GEX] = Pax.parse(this[META], this[GEX], true)
-        break
-
-      case 'NextFileHasLongPath':
-      case 'OldGnuLongPath':
-        this[EX] = this[EX] || Object.create(null)
-        this[EX].path = this[META].replace(/\0.*/, '')
-        break
-
-      case 'NextFileHasLongLinkpath':
-        this[EX] = this[EX] || Object.create(null)
-        this[EX].linkpath = this[META].replace(/\0.*/, '')
-        break
-
-      /* istanbul ignore next */
-      default: throw new Error('unknown meta: ' + entry.type)
-    }
-  }
-
-  abort (error) {
-    this[ABORTED] = true
-    this.emit('abort', error)
-    // always throws, even in non-strict mode
-    this.warn('TAR_ABORT', error, { recoverable: false })
-  }
-
-  write (chunk) {
-    if (this[ABORTED]) {
-      return
-    }
-
-    // first write, might be gzipped
-    const needSniff = this[UNZIP] === null ||
-      this.brotli === undefined && this[UNZIP] === false
-    if (needSniff && chunk) {
-      if (this[BUFFER]) {
-        chunk = Buffer.concat([this[BUFFER], chunk])
-        this[BUFFER] = null
-      }
-      if (chunk.length < gzipHeader.length) {
-        this[BUFFER] = chunk
-        return true
-      }
-
-      // look for gzip header
-      for (let i = 0; this[UNZIP] === null && i < gzipHeader.length; i++) {
-        if (chunk[i] !== gzipHeader[i]) {
-          this[UNZIP] = false
-        }
-      }
-
-      const maybeBrotli = this.brotli === undefined
-      if (this[UNZIP] === false && maybeBrotli) {
-        // read the first header to see if it's a valid tar file. If so,
-        // we can safely assume that it's not actually brotli, despite the
-        // .tbr or .tar.br file extension.
-        // if we ended before getting a full chunk, yes, def brotli
-        if (chunk.length < 512) {
-          if (this[ENDED]) {
-            this.brotli = true
-          } else {
-            this[BUFFER] = chunk
-            return true
-          }
-        } else {
-          // if it's tar, it's pretty reliably not brotli, chances of
-          // that happening are astronomical.
-          try {
-            new Header(chunk.slice(0, 512))
-            this.brotli = false
-          } catch (_) {
-            this.brotli = true
-          }
-        }
-      }
-
-      if (this[UNZIP] === null || (this[UNZIP] === false && this.brotli)) {
-        const ended = this[ENDED]
-        this[ENDED] = false
-        this[UNZIP] = this[UNZIP] === null
-          ? new zlib.Unzip()
-          : new zlib.BrotliDecompress()
-        this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk))
-        this[UNZIP].on('error', er => this.abort(er))
-        this[UNZIP].on('end', _ => {
-          this[ENDED] = true
-          this[CONSUMECHUNK]()
-        })
-        this[WRITING] = true
-        const ret = this[UNZIP][ended ? 'end' : 'write'](chunk)
-        this[WRITING] = false
-        return ret
-      }
-    }
-
-    this[WRITING] = true
-    if (this[UNZIP]) {
-      this[UNZIP].write(chunk)
-    } else {
-      this[CONSUMECHUNK](chunk)
-    }
-    this[WRITING] = false
-
-    // return false if there's a queue, or if the current entry isn't flowing
-    const ret =
-      this[QUEUE].length ? false :
-      this[READENTRY] ? this[READENTRY].flowing :
-      true
-
-    // if we have no queue, then that means a clogged READENTRY
-    if (!ret && !this[QUEUE].length) {
-      this[READENTRY].once('drain', _ => this.emit('drain'))
-    }
-
-    return ret
-  }
-
-  [BUFFERCONCAT] (c) {
-    if (c && !this[ABORTED]) {
-      this[BUFFER] = this[BUFFER] ? Buffer.concat([this[BUFFER], c]) : c
-    }
-  }
-
-  [MAYBEEND] () {
-    if (this[ENDED] &&
-        !this[EMITTEDEND] &&
-        !this[ABORTED] &&
-        !this[CONSUMING]) {
-      this[EMITTEDEND] = true
-      const entry = this[WRITEENTRY]
-      if (entry && entry.blockRemain) {
-        // truncated, likely a damaged file
-        const have = this[BUFFER] ? this[BUFFER].length : 0
-        this.warn('TAR_BAD_ARCHIVE', `Truncated input (needed ${
-          entry.blockRemain} more bytes, only ${have} available)`, { entry })
-        if (this[BUFFER]) {
-          entry.write(this[BUFFER])
-        }
-        entry.end()
-      }
-      this[EMIT](DONE)
-    }
-  }
-
-  [CONSUMECHUNK] (chunk) {
-    if (this[CONSUMING]) {
-      this[BUFFERCONCAT](chunk)
-    } else if (!chunk && !this[BUFFER]) {
-      this[MAYBEEND]()
-    } else {
-      this[CONSUMING] = true
-      if (this[BUFFER]) {
-        this[BUFFERCONCAT](chunk)
-        const c = this[BUFFER]
-        this[BUFFER] = null
-        this[CONSUMECHUNKSUB](c)
-      } else {
-        this[CONSUMECHUNKSUB](chunk)
-      }
-
-      while (this[BUFFER] &&
-          this[BUFFER].length >= 512 &&
-          !this[ABORTED] &&
-          !this[SAW_EOF]) {
-        const c = this[BUFFER]
-        this[BUFFER] = null
-        this[CONSUMECHUNKSUB](c)
-      }
-      this[CONSUMING] = false
-    }
-
-    if (!this[BUFFER] || this[ENDED]) {
-      this[MAYBEEND]()
-    }
-  }
-
-  [CONSUMECHUNKSUB] (chunk) {
-    // we know that we are in CONSUMING mode, so anything written goes into
-    // the buffer.  Advance the position and put any remainder in the buffer.
-    let position = 0
-    const length = chunk.length
-    while (position + 512 <= length && !this[ABORTED] && !this[SAW_EOF]) {
-      switch (this[STATE]) {
-        case 'begin':
-        case 'header':
-          this[CONSUMEHEADER](chunk, position)
-          position += 512
-          break
-
-        case 'ignore':
-        case 'body':
-          position += this[CONSUMEBODY](chunk, position)
-          break
-
-        case 'meta':
-          position += this[CONSUMEMETA](chunk, position)
-          break
-
-        /* istanbul ignore next */
-        default:
-          throw new Error('invalid state: ' + this[STATE])
-      }
-    }
-
-    if (position < length) {
-      if (this[BUFFER]) {
-        this[BUFFER] = Buffer.concat([chunk.slice(position), this[BUFFER]])
-      } else {
-        this[BUFFER] = chunk.slice(position)
-      }
-    }
-  }
-
-  end (chunk) {
-    if (!this[ABORTED]) {
-      if (this[UNZIP]) {
-        this[UNZIP].end(chunk)
-      } else {
-        this[ENDED] = true
-        if (this.brotli === undefined) chunk = chunk || Buffer.alloc(0)
-        this.write(chunk)
-      }
-    }
-  }
-})
diff --git a/node_modules/tar/lib/path-reservations.js b/node_modules/tar/lib/path-reservations.js
deleted file mode 100644
index 8d349d584513f..0000000000000
--- a/node_modules/tar/lib/path-reservations.js
+++ /dev/null
@@ -1,156 +0,0 @@
-// A path exclusive reservation system
-// reserve([list, of, paths], fn)
-// When the fn is first in line for all its paths, it
-// is called with a cb that clears the reservation.
-//
-// Used by async unpack to avoid clobbering paths in use,
-// while still allowing maximal safe parallelization.
-
-const assert = require('assert')
-const normalize = require('./normalize-unicode.js')
-const stripSlashes = require('./strip-trailing-slashes.js')
-const { join } = require('path')
-
-const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform
-const isWindows = platform === 'win32'
-
-module.exports = () => {
-  // path => [function or Set]
-  // A Set object means a directory reservation
-  // A fn is a direct reservation on that path
-  const queues = new Map()
-
-  // fn => {paths:[path,...], dirs:[path, ...]}
-  const reservations = new Map()
-
-  // return a set of parent dirs for a given path
-  // '/a/b/c/d' -> ['/', '/a', '/a/b', '/a/b/c', '/a/b/c/d']
-  const getDirs = path => {
-    const dirs = path.split('/').slice(0, -1).reduce((set, path) => {
-      if (set.length) {
-        path = join(set[set.length - 1], path)
-      }
-      set.push(path || '/')
-      return set
-    }, [])
-    return dirs
-  }
-
-  // functions currently running
-  const running = new Set()
-
-  // return the queues for each path the function cares about
-  // fn => {paths, dirs}
-  const getQueues = fn => {
-    const res = reservations.get(fn)
-    /* istanbul ignore if - unpossible */
-    if (!res) {
-      throw new Error('function does not have any path reservations')
-    }
-    return {
-      paths: res.paths.map(path => queues.get(path)),
-      dirs: [...res.dirs].map(path => queues.get(path)),
-    }
-  }
-
-  // check if fn is first in line for all its paths, and is
-  // included in the first set for all its dir queues
-  const check = fn => {
-    const { paths, dirs } = getQueues(fn)
-    return paths.every(q => q[0] === fn) &&
-      dirs.every(q => q[0] instanceof Set && q[0].has(fn))
-  }
-
-  // run the function if it's first in line and not already running
-  const run = fn => {
-    if (running.has(fn) || !check(fn)) {
-      return false
-    }
-    running.add(fn)
-    fn(() => clear(fn))
-    return true
-  }
-
-  const clear = fn => {
-    if (!running.has(fn)) {
-      return false
-    }
-
-    const { paths, dirs } = reservations.get(fn)
-    const next = new Set()
-
-    paths.forEach(path => {
-      const q = queues.get(path)
-      assert.equal(q[0], fn)
-      if (q.length === 1) {
-        queues.delete(path)
-      } else {
-        q.shift()
-        if (typeof q[0] === 'function') {
-          next.add(q[0])
-        } else {
-          q[0].forEach(fn => next.add(fn))
-        }
-      }
-    })
-
-    dirs.forEach(dir => {
-      const q = queues.get(dir)
-      assert(q[0] instanceof Set)
-      if (q[0].size === 1 && q.length === 1) {
-        queues.delete(dir)
-      } else if (q[0].size === 1) {
-        q.shift()
-
-        // must be a function or else the Set would've been reused
-        next.add(q[0])
-      } else {
-        q[0].delete(fn)
-      }
-    })
-    running.delete(fn)
-
-    next.forEach(fn => run(fn))
-    return true
-  }
-
-  const reserve = (paths, fn) => {
-    // collide on matches across case and unicode normalization
-    // On windows, thanks to the magic of 8.3 shortnames, it is fundamentally
-    // impossible to determine whether two paths refer to the same thing on
-    // disk, without asking the kernel for a shortname.
-    // So, we just pretend that every path matches every other path here,
-    // effectively removing all parallelization on windows.
-    paths = isWindows ? ['win32 parallelization disabled'] : paths.map(p => {
-      // don't need normPath, because we skip this entirely for windows
-      return stripSlashes(join(normalize(p))).toLowerCase()
-    })
-
-    const dirs = new Set(
-      paths.map(path => getDirs(path)).reduce((a, b) => a.concat(b))
-    )
-    reservations.set(fn, { dirs, paths })
-    paths.forEach(path => {
-      const q = queues.get(path)
-      if (!q) {
-        queues.set(path, [fn])
-      } else {
-        q.push(fn)
-      }
-    })
-    dirs.forEach(dir => {
-      const q = queues.get(dir)
-      if (!q) {
-        queues.set(dir, [new Set([fn])])
-      } else if (q[q.length - 1] instanceof Set) {
-        q[q.length - 1].add(fn)
-      } else {
-        q.push(new Set([fn]))
-      }
-    })
-
-    return run(fn)
-  }
-
-  return { check, reserve }
-}
diff --git a/node_modules/tar/lib/pax.js b/node_modules/tar/lib/pax.js
deleted file mode 100644
index 4a7ca85386e83..0000000000000
--- a/node_modules/tar/lib/pax.js
+++ /dev/null
@@ -1,150 +0,0 @@
-'use strict'
-const Header = require('./header.js')
-const path = require('path')
-
-class Pax {
-  constructor (obj, global) {
-    this.atime = obj.atime || null
-    this.charset = obj.charset || null
-    this.comment = obj.comment || null
-    this.ctime = obj.ctime || null
-    this.gid = obj.gid || null
-    this.gname = obj.gname || null
-    this.linkpath = obj.linkpath || null
-    this.mtime = obj.mtime || null
-    this.path = obj.path || null
-    this.size = obj.size || null
-    this.uid = obj.uid || null
-    this.uname = obj.uname || null
-    this.dev = obj.dev || null
-    this.ino = obj.ino || null
-    this.nlink = obj.nlink || null
-    this.global = global || false
-  }
-
-  encode () {
-    const body = this.encodeBody()
-    if (body === '') {
-      return null
-    }
-
-    const bodyLen = Buffer.byteLength(body)
-    // round up to 512 bytes
-    // add 512 for header
-    const bufLen = 512 * Math.ceil(1 + bodyLen / 512)
-    const buf = Buffer.allocUnsafe(bufLen)
-
-    // 0-fill the header section, it might not hit every field
-    for (let i = 0; i < 512; i++) {
-      buf[i] = 0
-    }
-
-    new Header({
-      // XXX split the path
-      // then the path should be PaxHeader + basename, but less than 99,
-      // prepend with the dirname
-      path: ('PaxHeader/' + path.basename(this.path)).slice(0, 99),
-      mode: this.mode || 0o644,
-      uid: this.uid || null,
-      gid: this.gid || null,
-      size: bodyLen,
-      mtime: this.mtime || null,
-      type: this.global ? 'GlobalExtendedHeader' : 'ExtendedHeader',
-      linkpath: '',
-      uname: this.uname || '',
-      gname: this.gname || '',
-      devmaj: 0,
-      devmin: 0,
-      atime: this.atime || null,
-      ctime: this.ctime || null,
-    }).encode(buf)
-
-    buf.write(body, 512, bodyLen, 'utf8')
-
-    // null pad after the body
-    for (let i = bodyLen + 512; i < buf.length; i++) {
-      buf[i] = 0
-    }
-
-    return buf
-  }
-
-  encodeBody () {
-    return (
-      this.encodeField('path') +
-      this.encodeField('ctime') +
-      this.encodeField('atime') +
-      this.encodeField('dev') +
-      this.encodeField('ino') +
-      this.encodeField('nlink') +
-      this.encodeField('charset') +
-      this.encodeField('comment') +
-      this.encodeField('gid') +
-      this.encodeField('gname') +
-      this.encodeField('linkpath') +
-      this.encodeField('mtime') +
-      this.encodeField('size') +
-      this.encodeField('uid') +
-      this.encodeField('uname')
-    )
-  }
-
-  encodeField (field) {
-    if (this[field] === null || this[field] === undefined) {
-      return ''
-    }
-    const v = this[field] instanceof Date ? this[field].getTime() / 1000
-      : this[field]
-    const s = ' ' +
-      (field === 'dev' || field === 'ino' || field === 'nlink'
-        ? 'SCHILY.' : '') +
-      field + '=' + v + '\n'
-    const byteLen = Buffer.byteLength(s)
-    // the digits includes the length of the digits in ascii base-10
-    // so if it's 9 characters, then adding 1 for the 9 makes it 10
-    // which makes it 11 chars.
-    let digits = Math.floor(Math.log(byteLen) / Math.log(10)) + 1
-    if (byteLen + digits >= Math.pow(10, digits)) {
-      digits += 1
-    }
-    const len = digits + byteLen
-    return len + s
-  }
-}
-
-Pax.parse = (string, ex, g) => new Pax(merge(parseKV(string), ex), g)
-
-const merge = (a, b) =>
-  b ? Object.keys(a).reduce((s, k) => (s[k] = a[k], s), b) : a
-
-const parseKV = string =>
-  string
-    .replace(/\n$/, '')
-    .split('\n')
-    .reduce(parseKVLine, Object.create(null))
-
-const parseKVLine = (set, line) => {
-  const n = parseInt(line, 10)
-
-  // XXX Values with \n in them will fail this.
-  // Refactor to not be a naive line-by-line parse.
-  if (n !== Buffer.byteLength(line) + 1) {
-    return set
-  }
-
-  line = line.slice((n + ' ').length)
-  const kv = line.split('=')
-  const k = kv.shift().replace(/^SCHILY\.(dev|ino|nlink)/, '$1')
-  if (!k) {
-    return set
-  }
-
-  const v = kv.join('=')
-  set[k] = /^([A-Z]+\.)?([mac]|birth|creation)time$/.test(k)
-    ? new Date(v * 1000)
-    : /^[0-9]+$/.test(v) ? +v
-    : v
-  return set
-}
-
-module.exports = Pax
diff --git a/node_modules/tar/lib/read-entry.js b/node_modules/tar/lib/read-entry.js
deleted file mode 100644
index 6186266e89c0a..0000000000000
--- a/node_modules/tar/lib/read-entry.js
+++ /dev/null
@@ -1,107 +0,0 @@
-'use strict'
-const { Minipass } = require('minipass')
-const normPath = require('./normalize-windows-path.js')
-
-const SLURP = Symbol('slurp')
-module.exports = class ReadEntry extends Minipass {
-  constructor (header, ex, gex) {
-    super()
-    // read entries always start life paused.  this is to avoid the
-    // situation where Minipass's auto-ending empty streams results
-    // in an entry ending before we're ready for it.
-    this.pause()
-    this.extended = ex
-    this.globalExtended = gex
-    this.header = header
-    this.startBlockSize = 512 * Math.ceil(header.size / 512)
-    this.blockRemain = this.startBlockSize
-    this.remain = header.size
-    this.type = header.type
-    this.meta = false
-    this.ignore = false
-    switch (this.type) {
-      case 'File':
-      case 'OldFile':
-      case 'Link':
-      case 'SymbolicLink':
-      case 'CharacterDevice':
-      case 'BlockDevice':
-      case 'Directory':
-      case 'FIFO':
-      case 'ContiguousFile':
-      case 'GNUDumpDir':
-        break
-
-      case 'NextFileHasLongLinkpath':
-      case 'NextFileHasLongPath':
-      case 'OldGnuLongPath':
-      case 'GlobalExtendedHeader':
-      case 'ExtendedHeader':
-      case 'OldExtendedHeader':
-        this.meta = true
-        break
-
-      // NOTE: gnutar and bsdtar treat unrecognized types as 'File'
-      // it may be worth doing the same, but with a warning.
-      default:
-        this.ignore = true
-    }
-
-    this.path = normPath(header.path)
-    this.mode = header.mode
-    if (this.mode) {
-      this.mode = this.mode & 0o7777
-    }
-    this.uid = header.uid
-    this.gid = header.gid
-    this.uname = header.uname
-    this.gname = header.gname
-    this.size = header.size
-    this.mtime = header.mtime
-    this.atime = header.atime
-    this.ctime = header.ctime
-    this.linkpath = normPath(header.linkpath)
-    this.uname = header.uname
-    this.gname = header.gname
-
-    if (ex) {
-      this[SLURP](ex)
-    }
-    if (gex) {
-      this[SLURP](gex, true)
-    }
-  }
-
-  write (data) {
-    const writeLen = data.length
-    if (writeLen > this.blockRemain) {
-      throw new Error('writing more to entry than is appropriate')
-    }
-
-    const r = this.remain
-    const br = this.blockRemain
-    this.remain = Math.max(0, r - writeLen)
-    this.blockRemain = Math.max(0, br - writeLen)
-    if (this.ignore) {
-      return true
-    }
-
-    if (r >= writeLen) {
-      return super.write(data)
-    }
-
-    // r < writeLen
-    return super.write(data.slice(0, r))
-  }
-
-  [SLURP] (ex, global) {
-    for (const k in ex) {
-      // we slurp in everything except for the path attribute in
-      // a global extended header, because that's weird.
-      if (ex[k] !== null && ex[k] !== undefined &&
-          !(global && k === 'path')) {
-        this[k] = k === 'path' || k === 'linkpath' ? normPath(ex[k]) : ex[k]
-      }
-    }
-  }
-}
diff --git a/node_modules/tar/lib/replace.js b/node_modules/tar/lib/replace.js
deleted file mode 100644
index 8db6800bdf464..0000000000000
--- a/node_modules/tar/lib/replace.js
+++ /dev/null
@@ -1,246 +0,0 @@
-'use strict'
-
-// tar -r
-const hlo = require('./high-level-opt.js')
-const Pack = require('./pack.js')
-const fs = require('fs')
-const fsm = require('fs-minipass')
-const t = require('./list.js')
-const path = require('path')
-
-// starting at the head of the file, read a Header
-// If the checksum is invalid, that's our position to start writing
-// If it is, jump forward by the specified size (round up to 512)
-// and try again.
-// Write the new Pack stream starting there.
-
-const Header = require('./header.js')
-
-module.exports = (opt_, files, cb) => {
-  const opt = hlo(opt_)
-
-  if (!opt.file) {
-    throw new TypeError('file is required')
-  }
-
-  if (opt.gzip || opt.brotli || opt.file.endsWith('.br') || opt.file.endsWith('.tbr')) {
-    throw new TypeError('cannot append to compressed archives')
-  }
-
-  if (!files || !Array.isArray(files) || !files.length) {
-    throw new TypeError('no files or directories specified')
-  }
-
-  files = Array.from(files)
-
-  return opt.sync ? replaceSync(opt, files)
-    : replace(opt, files, cb)
-}
-
-const replaceSync = (opt, files) => {
-  const p = new Pack.Sync(opt)
-
-  let threw = true
-  let fd
-  let position
-
-  try {
-    try {
-      fd = fs.openSync(opt.file, 'r+')
-    } catch (er) {
-      if (er.code === 'ENOENT') {
-        fd = fs.openSync(opt.file, 'w+')
-      } else {
-        throw er
-      }
-    }
-
-    const st = fs.fstatSync(fd)
-    const headBuf = Buffer.alloc(512)
-
-    POSITION: for (position = 0; position < st.size; position += 512) {
-      for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) {
-        bytes = fs.readSync(
-          fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos
-        )
-
-        if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b) {
-          throw new Error('cannot append to compressed archives')
-        }
-
-        if (!bytes) {
-          break POSITION
-        }
-      }
-
-      const h = new Header(headBuf)
-      if (!h.cksumValid) {
-        break
-      }
-      const entryBlockSize = 512 * Math.ceil(h.size / 512)
-      if (position + entryBlockSize + 512 > st.size) {
-        break
-      }
-      // the 512 for the header we just parsed will be added as well
-      // also jump ahead all the blocks for the body
-      position += entryBlockSize
-      if (opt.mtimeCache) {
-        opt.mtimeCache.set(h.path, h.mtime)
-      }
-    }
-    threw = false
-
-    streamSync(opt, p, position, fd, files)
-  } finally {
-    if (threw) {
-      try {
-        fs.closeSync(fd)
-      } catch (er) {}
-    }
-  }
-}
-
-const streamSync = (opt, p, position, fd, files) => {
-  const stream = new fsm.WriteStreamSync(opt.file, {
-    fd: fd,
-    start: position,
-  })
-  p.pipe(stream)
-  addFilesSync(p, files)
-}
-
-const replace = (opt, files, cb) => {
-  files = Array.from(files)
-  const p = new Pack(opt)
-
-  const getPos = (fd, size, cb_) => {
-    const cb = (er, pos) => {
-      if (er) {
-        fs.close(fd, _ => cb_(er))
-      } else {
-        cb_(null, pos)
-      }
-    }
-
-    let position = 0
-    if (size === 0) {
-      return cb(null, 0)
-    }
-
-    let bufPos = 0
-    const headBuf = Buffer.alloc(512)
-    const onread = (er, bytes) => {
-      if (er) {
-        return cb(er)
-      }
-      bufPos += bytes
-      if (bufPos < 512 && bytes) {
-        return fs.read(
-          fd, headBuf, bufPos, headBuf.length - bufPos,
-          position + bufPos, onread
-        )
-      }
-
-      if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b) {
-        return cb(new Error('cannot append to compressed archives'))
-      }
-
-      // truncated header
-      if (bufPos < 512) {
-        return cb(null, position)
-      }
-
-      const h = new Header(headBuf)
-      if (!h.cksumValid) {
-        return cb(null, position)
-      }
-
-      const entryBlockSize = 512 * Math.ceil(h.size / 512)
-      if (position + entryBlockSize + 512 > size) {
-        return cb(null, position)
-      }
-
-      position += entryBlockSize + 512
-      if (position >= size) {
-        return cb(null, position)
-      }
-
-      if (opt.mtimeCache) {
-        opt.mtimeCache.set(h.path, h.mtime)
-      }
-      bufPos = 0
-      fs.read(fd, headBuf, 0, 512, position, onread)
-    }
-    fs.read(fd, headBuf, 0, 512, position, onread)
-  }
-
-  const promise = new Promise((resolve, reject) => {
-    p.on('error', reject)
-    let flag = 'r+'
-    const onopen = (er, fd) => {
-      if (er && er.code === 'ENOENT' && flag === 'r+') {
-        flag = 'w+'
-        return fs.open(opt.file, flag, onopen)
-      }
-
-      if (er) {
-        return reject(er)
-      }
-
-      fs.fstat(fd, (er, st) => {
-        if (er) {
-          return fs.close(fd, () => reject(er))
-        }
-
-        getPos(fd, st.size, (er, position) => {
-          if (er) {
-            return reject(er)
-          }
-          const stream = new fsm.WriteStream(opt.file, {
-            fd: fd,
-            start: position,
-          })
-          p.pipe(stream)
-          stream.on('error', reject)
-          stream.on('close', resolve)
-          addFilesAsync(p, files)
-        })
-      })
-    }
-    fs.open(opt.file, flag, onopen)
-  })
-
-  return cb ? promise.then(cb, cb) : promise
-}
-
-const addFilesSync = (p, files) => {
-  files.forEach(file => {
-    if (file.charAt(0) === '@') {
-      t({
-        file: path.resolve(p.cwd, file.slice(1)),
-        sync: true,
-        noResume: true,
-        onentry: entry => p.add(entry),
-      })
-    } else {
-      p.add(file)
-    }
-  })
-  p.end()
-}
-
-const addFilesAsync = (p, files) => {
-  while (files.length) {
-    const file = files.shift()
-    if (file.charAt(0) === '@') {
-      return t({
-        file: path.resolve(p.cwd, file.slice(1)),
-        noResume: true,
-        onentry: entry => p.add(entry),
-      }).then(_ => addFilesAsync(p, files))
-    } else {
-      p.add(file)
-    }
-  }
-  p.end()
-}
diff --git a/node_modules/tar/lib/strip-absolute-path.js b/node_modules/tar/lib/strip-absolute-path.js
deleted file mode 100644
index 185e2dead3929..0000000000000
--- a/node_modules/tar/lib/strip-absolute-path.js
+++ /dev/null
@@ -1,24 +0,0 @@
-// unix absolute paths are also absolute on win32, so we use this for both
-const { isAbsolute, parse } = require('path').win32
-
-// returns [root, stripped]
-// Note that windows will think that //x/y/z/a has a "root" of //x/y, and in
-// those cases, we want to sanitize it to x/y/z/a, not z/a, so we strip /
-// explicitly if it's the first character.
-// drive-specific relative paths on Windows get their root stripped off even
-// though they are not absolute, so `c:../foo` becomes ['c:', '../foo']
-module.exports = path => {
-  let r = ''
-
-  let parsed = parse(path)
-  while (isAbsolute(path) || parsed.root) {
-    // windows will think that //x/y/z has a "root" of //x/y/
-    // but strip the //?/C:/ off of //?/C:/path
-    const root = path.charAt(0) === '/' && path.slice(0, 4) !== '//?/' ? '/'
-      : parsed.root
-    path = path.slice(root.length)
-    r += root
-    parsed = parse(path)
-  }
-  return [r, path]
-}
diff --git a/node_modules/tar/lib/strip-trailing-slashes.js b/node_modules/tar/lib/strip-trailing-slashes.js
deleted file mode 100644
index 3e3ecec5a402b..0000000000000
--- a/node_modules/tar/lib/strip-trailing-slashes.js
+++ /dev/null
@@ -1,13 +0,0 @@
-// warning: extremely hot code path.
-// This has been meticulously optimized for use
-// within npm install on large package trees.
-// Do not edit without careful benchmarking.
-module.exports = str => {
-  let i = str.length - 1
-  let slashesStart = -1
-  while (i > -1 && str.charAt(i) === '/') {
-    slashesStart = i
-    i--
-  }
-  return slashesStart === -1 ? str : str.slice(0, slashesStart)
-}
diff --git a/node_modules/tar/lib/types.js b/node_modules/tar/lib/types.js
deleted file mode 100644
index 7bfc254658f4e..0000000000000
--- a/node_modules/tar/lib/types.js
+++ /dev/null
@@ -1,44 +0,0 @@
-'use strict'
-// map types from key to human-friendly name
-exports.name = new Map([
-  ['0', 'File'],
-  // same as File
-  ['', 'OldFile'],
-  ['1', 'Link'],
-  ['2', 'SymbolicLink'],
-  // Devices and FIFOs aren't fully supported
-  // they are parsed, but skipped when unpacking
-  ['3', 'CharacterDevice'],
-  ['4', 'BlockDevice'],
-  ['5', 'Directory'],
-  ['6', 'FIFO'],
-  // same as File
-  ['7', 'ContiguousFile'],
-  // pax headers
-  ['g', 'GlobalExtendedHeader'],
-  ['x', 'ExtendedHeader'],
-  // vendor-specific stuff
-  // skip
-  ['A', 'SolarisACL'],
-  // like 5, but with data, which should be skipped
-  ['D', 'GNUDumpDir'],
-  // metadata only, skip
-  ['I', 'Inode'],
-  // data = link path of next file
-  ['K', 'NextFileHasLongLinkpath'],
-  // data = path of next file
-  ['L', 'NextFileHasLongPath'],
-  // skip
-  ['M', 'ContinuationFile'],
-  // like L
-  ['N', 'OldGnuLongPath'],
-  // skip
-  ['S', 'SparseFile'],
-  // skip
-  ['V', 'TapeVolumeHeader'],
-  // like x
-  ['X', 'OldExtendedHeader'],
-])
-
-// map the other direction
-exports.code = new Map(Array.from(exports.name).map(kv => [kv[1], kv[0]]))
diff --git a/node_modules/tar/lib/unpack.js b/node_modules/tar/lib/unpack.js
deleted file mode 100644
index 03172e2c95d97..0000000000000
--- a/node_modules/tar/lib/unpack.js
+++ /dev/null
@@ -1,923 +0,0 @@
-'use strict'
-
-// the PEND/UNPEND stuff tracks whether we're ready to emit end/close yet.
-// but the path reservations are required to avoid race conditions where
-// parallelized unpack ops may mess with one another, due to dependencies
-// (like a Link depending on its target) or destructive operations (like
-// clobbering an fs object to create one of a different type.)
-
-const assert = require('assert')
-const Parser = require('./parse.js')
-const fs = require('fs')
-const fsm = require('fs-minipass')
-const path = require('path')
-const mkdir = require('./mkdir.js')
-const wc = require('./winchars.js')
-const pathReservations = require('./path-reservations.js')
-const stripAbsolutePath = require('./strip-absolute-path.js')
-const normPath = require('./normalize-windows-path.js')
-const stripSlash = require('./strip-trailing-slashes.js')
-const normalize = require('./normalize-unicode.js')
-
-const ONENTRY = Symbol('onEntry')
-const CHECKFS = Symbol('checkFs')
-const CHECKFS2 = Symbol('checkFs2')
-const PRUNECACHE = Symbol('pruneCache')
-const ISREUSABLE = Symbol('isReusable')
-const MAKEFS = Symbol('makeFs')
-const FILE = Symbol('file')
-const DIRECTORY = Symbol('directory')
-const LINK = Symbol('link')
-const SYMLINK = Symbol('symlink')
-const HARDLINK = Symbol('hardlink')
-const UNSUPPORTED = Symbol('unsupported')
-const CHECKPATH = Symbol('checkPath')
-const MKDIR = Symbol('mkdir')
-const ONERROR = Symbol('onError')
-const PENDING = Symbol('pending')
-const PEND = Symbol('pend')
-const UNPEND = Symbol('unpend')
-const ENDED = Symbol('ended')
-const MAYBECLOSE = Symbol('maybeClose')
-const SKIP = Symbol('skip')
-const DOCHOWN = Symbol('doChown')
-const UID = Symbol('uid')
-const GID = Symbol('gid')
-const CHECKED_CWD = Symbol('checkedCwd')
-const crypto = require('crypto')
-const getFlag = require('./get-write-flag.js')
-const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform
-const isWindows = platform === 'win32'
-const DEFAULT_MAX_DEPTH = 1024
-
-// Unlinks on Windows are not atomic.
-//
-// This means that if you have a file entry, followed by another
-// file entry with an identical name, and you cannot re-use the file
-// (because it's a hardlink, or because unlink:true is set, or it's
-// Windows, which does not have useful nlink values), then the unlink
-// will be committed to the disk AFTER the new file has been written
-// over the old one, deleting the new file.
-//
-// To work around this, on Windows systems, we rename the file and then
-// delete the renamed file.  It's a sloppy kludge, but frankly, I do not
-// know of a better way to do this, given windows' non-atomic unlink
-// semantics.
-//
-// See: https://github.com/npm/node-tar/issues/183
-/* istanbul ignore next */
-const unlinkFile = (path, cb) => {
-  if (!isWindows) {
-    return fs.unlink(path, cb)
-  }
-
-  const name = path + '.DELETE.' + crypto.randomBytes(16).toString('hex')
-  fs.rename(path, name, er => {
-    if (er) {
-      return cb(er)
-    }
-    fs.unlink(name, cb)
-  })
-}
-
-/* istanbul ignore next */
-const unlinkFileSync = path => {
-  if (!isWindows) {
-    return fs.unlinkSync(path)
-  }
-
-  const name = path + '.DELETE.' + crypto.randomBytes(16).toString('hex')
-  fs.renameSync(path, name)
-  fs.unlinkSync(name)
-}
-
-// this.gid, entry.gid, this.processUid
-const uint32 = (a, b, c) =>
-  a === a >>> 0 ? a
-  : b === b >>> 0 ? b
-  : c
-
-// clear the cache if it's a case-insensitive unicode-squashing match.
-// we can't know if the current file system is case-sensitive or supports
-// unicode fully, so we check for similarity on the maximally compatible
-// representation.  Err on the side of pruning, since all it's doing is
-// preventing lstats, and it's not the end of the world if we get a false
-// positive.
-// Note that on windows, we always drop the entire cache whenever a
-// symbolic link is encountered, because 8.3 filenames are impossible
-// to reason about, and collisions are hazards rather than just failures.
-const cacheKeyNormalize = path => stripSlash(normPath(normalize(path)))
-  .toLowerCase()
-
-const pruneCache = (cache, abs) => {
-  abs = cacheKeyNormalize(abs)
-  for (const path of cache.keys()) {
-    const pnorm = cacheKeyNormalize(path)
-    if (pnorm === abs || pnorm.indexOf(abs + '/') === 0) {
-      cache.delete(path)
-    }
-  }
-}
-
-const dropCache = cache => {
-  for (const key of cache.keys()) {
-    cache.delete(key)
-  }
-}
-
-class Unpack extends Parser {
-  constructor (opt) {
-    if (!opt) {
-      opt = {}
-    }
-
-    opt.ondone = _ => {
-      this[ENDED] = true
-      this[MAYBECLOSE]()
-    }
-
-    super(opt)
-
-    this[CHECKED_CWD] = false
-
-    this.reservations = pathReservations()
-
-    this.transform = typeof opt.transform === 'function' ? opt.transform : null
-
-    this.writable = true
-    this.readable = false
-
-    this[PENDING] = 0
-    this[ENDED] = false
-
-    this.dirCache = opt.dirCache || new Map()
-
-    if (typeof opt.uid === 'number' || typeof opt.gid === 'number') {
-      // need both or neither
-      if (typeof opt.uid !== 'number' || typeof opt.gid !== 'number') {
-        throw new TypeError('cannot set owner without number uid and gid')
-      }
-      if (opt.preserveOwner) {
-        throw new TypeError(
-          'cannot preserve owner in archive and also set owner explicitly')
-      }
-      this.uid = opt.uid
-      this.gid = opt.gid
-      this.setOwner = true
-    } else {
-      this.uid = null
-      this.gid = null
-      this.setOwner = false
-    }
-
-    // default true for root
-    if (opt.preserveOwner === undefined && typeof opt.uid !== 'number') {
-      this.preserveOwner = process.getuid && process.getuid() === 0
-    } else {
-      this.preserveOwner = !!opt.preserveOwner
-    }
-
-    this.processUid = (this.preserveOwner || this.setOwner) && process.getuid ?
-      process.getuid() : null
-    this.processGid = (this.preserveOwner || this.setOwner) && process.getgid ?
-      process.getgid() : null
-
-    // prevent excessively deep nesting of subfolders
-    // set to `Infinity` to remove this restriction
-    this.maxDepth = typeof opt.maxDepth === 'number'
-      ? opt.maxDepth
-      : DEFAULT_MAX_DEPTH
-
-    // mostly just for testing, but useful in some cases.
-    // Forcibly trigger a chown on every entry, no matter what
-    this.forceChown = opt.forceChown === true
-
-    // turn > this[ONENTRY](entry))
-  }
-
-  // a bad or damaged archive is a warning for Parser, but an error
-  // when extracting.  Mark those errors as unrecoverable, because
-  // the Unpack contract cannot be met.
-  warn (code, msg, data = {}) {
-    if (code === 'TAR_BAD_ARCHIVE' || code === 'TAR_ABORT') {
-      data.recoverable = false
-    }
-    return super.warn(code, msg, data)
-  }
-
-  [MAYBECLOSE] () {
-    if (this[ENDED] && this[PENDING] === 0) {
-      this.emit('prefinish')
-      this.emit('finish')
-      this.emit('end')
-    }
-  }
-
-  [CHECKPATH] (entry) {
-    const p = normPath(entry.path)
-    const parts = p.split('/')
-
-    if (this.strip) {
-      if (parts.length < this.strip) {
-        return false
-      }
-      if (entry.type === 'Link') {
-        const linkparts = normPath(entry.linkpath).split('/')
-        if (linkparts.length >= this.strip) {
-          entry.linkpath = linkparts.slice(this.strip).join('/')
-        } else {
-          return false
-        }
-      }
-      parts.splice(0, this.strip)
-      entry.path = parts.join('/')
-    }
-
-    if (isFinite(this.maxDepth) && parts.length > this.maxDepth) {
-      this.warn('TAR_ENTRY_ERROR', 'path excessively deep', {
-        entry,
-        path: p,
-        depth: parts.length,
-        maxDepth: this.maxDepth,
-      })
-      return false
-    }
-
-    if (!this.preservePaths) {
-      if (parts.includes('..') || isWindows && /^[a-z]:\.\.$/i.test(parts[0])) {
-        this.warn('TAR_ENTRY_ERROR', `path contains '..'`, {
-          entry,
-          path: p,
-        })
-        return false
-      }
-
-      // strip off the root
-      const [root, stripped] = stripAbsolutePath(p)
-      if (root) {
-        entry.path = stripped
-        this.warn('TAR_ENTRY_INFO', `stripping ${root} from absolute path`, {
-          entry,
-          path: p,
-        })
-      }
-    }
-
-    if (path.isAbsolute(entry.path)) {
-      entry.absolute = normPath(path.resolve(entry.path))
-    } else {
-      entry.absolute = normPath(path.resolve(this.cwd, entry.path))
-    }
-
-    // if we somehow ended up with a path that escapes the cwd, and we are
-    // not in preservePaths mode, then something is fishy!  This should have
-    // been prevented above, so ignore this for coverage.
-    /* istanbul ignore if - defense in depth */
-    if (!this.preservePaths &&
-        entry.absolute.indexOf(this.cwd + '/') !== 0 &&
-        entry.absolute !== this.cwd) {
-      this.warn('TAR_ENTRY_ERROR', 'path escaped extraction target', {
-        entry,
-        path: normPath(entry.path),
-        resolvedPath: entry.absolute,
-        cwd: this.cwd,
-      })
-      return false
-    }
-
-    // an archive can set properties on the extraction directory, but it
-    // may not replace the cwd with a different kind of thing entirely.
-    if (entry.absolute === this.cwd &&
-        entry.type !== 'Directory' &&
-        entry.type !== 'GNUDumpDir') {
-      return false
-    }
-
-    // only encode : chars that aren't drive letter indicators
-    if (this.win32) {
-      const { root: aRoot } = path.win32.parse(entry.absolute)
-      entry.absolute = aRoot + wc.encode(entry.absolute.slice(aRoot.length))
-      const { root: pRoot } = path.win32.parse(entry.path)
-      entry.path = pRoot + wc.encode(entry.path.slice(pRoot.length))
-    }
-
-    return true
-  }
-
-  [ONENTRY] (entry) {
-    if (!this[CHECKPATH](entry)) {
-      return entry.resume()
-    }
-
-    assert.equal(typeof entry.absolute, 'string')
-
-    switch (entry.type) {
-      case 'Directory':
-      case 'GNUDumpDir':
-        if (entry.mode) {
-          entry.mode = entry.mode | 0o700
-        }
-
-      // eslint-disable-next-line no-fallthrough
-      case 'File':
-      case 'OldFile':
-      case 'ContiguousFile':
-      case 'Link':
-      case 'SymbolicLink':
-        return this[CHECKFS](entry)
-
-      case 'CharacterDevice':
-      case 'BlockDevice':
-      case 'FIFO':
-      default:
-        return this[UNSUPPORTED](entry)
-    }
-  }
-
-  [ONERROR] (er, entry) {
-    // Cwd has to exist, or else nothing works. That's serious.
-    // Other errors are warnings, which raise the error in strict
-    // mode, but otherwise continue on.
-    if (er.name === 'CwdError') {
-      this.emit('error', er)
-    } else {
-      this.warn('TAR_ENTRY_ERROR', er, { entry })
-      this[UNPEND]()
-      entry.resume()
-    }
-  }
-
-  [MKDIR] (dir, mode, cb) {
-    mkdir(normPath(dir), {
-      uid: this.uid,
-      gid: this.gid,
-      processUid: this.processUid,
-      processGid: this.processGid,
-      umask: this.processUmask,
-      preserve: this.preservePaths,
-      unlink: this.unlink,
-      cache: this.dirCache,
-      cwd: this.cwd,
-      mode: mode,
-      noChmod: this.noChmod,
-    }, cb)
-  }
-
-  [DOCHOWN] (entry) {
-    // in preserve owner mode, chown if the entry doesn't match process
-    // in set owner mode, chown if setting doesn't match process
-    return this.forceChown ||
-      this.preserveOwner &&
-      (typeof entry.uid === 'number' && entry.uid !== this.processUid ||
-        typeof entry.gid === 'number' && entry.gid !== this.processGid)
-      ||
-      (typeof this.uid === 'number' && this.uid !== this.processUid ||
-        typeof this.gid === 'number' && this.gid !== this.processGid)
-  }
-
-  [UID] (entry) {
-    return uint32(this.uid, entry.uid, this.processUid)
-  }
-
-  [GID] (entry) {
-    return uint32(this.gid, entry.gid, this.processGid)
-  }
-
-  [FILE] (entry, fullyDone) {
-    const mode = entry.mode & 0o7777 || this.fmode
-    const stream = new fsm.WriteStream(entry.absolute, {
-      flags: getFlag(entry.size),
-      mode: mode,
-      autoClose: false,
-    })
-    stream.on('error', er => {
-      if (stream.fd) {
-        fs.close(stream.fd, () => {})
-      }
-
-      // flush all the data out so that we aren't left hanging
-      // if the error wasn't actually fatal.  otherwise the parse
-      // is blocked, and we never proceed.
-      stream.write = () => true
-      this[ONERROR](er, entry)
-      fullyDone()
-    })
-
-    let actions = 1
-    const done = er => {
-      if (er) {
-        /* istanbul ignore else - we should always have a fd by now */
-        if (stream.fd) {
-          fs.close(stream.fd, () => {})
-        }
-
-        this[ONERROR](er, entry)
-        fullyDone()
-        return
-      }
-
-      if (--actions === 0) {
-        fs.close(stream.fd, er => {
-          if (er) {
-            this[ONERROR](er, entry)
-          } else {
-            this[UNPEND]()
-          }
-          fullyDone()
-        })
-      }
-    }
-
-    stream.on('finish', _ => {
-      // if futimes fails, try utimes
-      // if utimes fails, fail with the original error
-      // same for fchown/chown
-      const abs = entry.absolute
-      const fd = stream.fd
-
-      if (entry.mtime && !this.noMtime) {
-        actions++
-        const atime = entry.atime || new Date()
-        const mtime = entry.mtime
-        fs.futimes(fd, atime, mtime, er =>
-          er ? fs.utimes(abs, atime, mtime, er2 => done(er2 && er))
-          : done())
-      }
-
-      if (this[DOCHOWN](entry)) {
-        actions++
-        const uid = this[UID](entry)
-        const gid = this[GID](entry)
-        fs.fchown(fd, uid, gid, er =>
-          er ? fs.chown(abs, uid, gid, er2 => done(er2 && er))
-          : done())
-      }
-
-      done()
-    })
-
-    const tx = this.transform ? this.transform(entry) || entry : entry
-    if (tx !== entry) {
-      tx.on('error', er => {
-        this[ONERROR](er, entry)
-        fullyDone()
-      })
-      entry.pipe(tx)
-    }
-    tx.pipe(stream)
-  }
-
-  [DIRECTORY] (entry, fullyDone) {
-    const mode = entry.mode & 0o7777 || this.dmode
-    this[MKDIR](entry.absolute, mode, er => {
-      if (er) {
-        this[ONERROR](er, entry)
-        fullyDone()
-        return
-      }
-
-      let actions = 1
-      const done = _ => {
-        if (--actions === 0) {
-          fullyDone()
-          this[UNPEND]()
-          entry.resume()
-        }
-      }
-
-      if (entry.mtime && !this.noMtime) {
-        actions++
-        fs.utimes(entry.absolute, entry.atime || new Date(), entry.mtime, done)
-      }
-
-      if (this[DOCHOWN](entry)) {
-        actions++
-        fs.chown(entry.absolute, this[UID](entry), this[GID](entry), done)
-      }
-
-      done()
-    })
-  }
-
-  [UNSUPPORTED] (entry) {
-    entry.unsupported = true
-    this.warn('TAR_ENTRY_UNSUPPORTED',
-      `unsupported entry type: ${entry.type}`, { entry })
-    entry.resume()
-  }
-
-  [SYMLINK] (entry, done) {
-    this[LINK](entry, entry.linkpath, 'symlink', done)
-  }
-
-  [HARDLINK] (entry, done) {
-    const linkpath = normPath(path.resolve(this.cwd, entry.linkpath))
-    this[LINK](entry, linkpath, 'link', done)
-  }
-
-  [PEND] () {
-    this[PENDING]++
-  }
-
-  [UNPEND] () {
-    this[PENDING]--
-    this[MAYBECLOSE]()
-  }
-
-  [SKIP] (entry) {
-    this[UNPEND]()
-    entry.resume()
-  }
-
-  // Check if we can reuse an existing filesystem entry safely and
-  // overwrite it, rather than unlinking and recreating
-  // Windows doesn't report a useful nlink, so we just never reuse entries
-  [ISREUSABLE] (entry, st) {
-    return entry.type === 'File' &&
-      !this.unlink &&
-      st.isFile() &&
-      st.nlink <= 1 &&
-      !isWindows
-  }
-
-  // check if a thing is there, and if so, try to clobber it
-  [CHECKFS] (entry) {
-    this[PEND]()
-    const paths = [entry.path]
-    if (entry.linkpath) {
-      paths.push(entry.linkpath)
-    }
-    this.reservations.reserve(paths, done => this[CHECKFS2](entry, done))
-  }
-
-  [PRUNECACHE] (entry) {
-    // if we are not creating a directory, and the path is in the dirCache,
-    // then that means we are about to delete the directory we created
-    // previously, and it is no longer going to be a directory, and neither
-    // is any of its children.
-    // If a symbolic link is encountered, all bets are off.  There is no
-    // reasonable way to sanitize the cache in such a way we will be able to
-    // avoid having filesystem collisions.  If this happens with a non-symlink
-    // entry, it'll just fail to unpack, but a symlink to a directory, using an
-    // 8.3 shortname or certain unicode attacks, can evade detection and lead
-    // to arbitrary writes to anywhere on the system.
-    if (entry.type === 'SymbolicLink') {
-      dropCache(this.dirCache)
-    } else if (entry.type !== 'Directory') {
-      pruneCache(this.dirCache, entry.absolute)
-    }
-  }
-
-  [CHECKFS2] (entry, fullyDone) {
-    this[PRUNECACHE](entry)
-
-    const done = er => {
-      this[PRUNECACHE](entry)
-      fullyDone(er)
-    }
-
-    const checkCwd = () => {
-      this[MKDIR](this.cwd, this.dmode, er => {
-        if (er) {
-          this[ONERROR](er, entry)
-          done()
-          return
-        }
-        this[CHECKED_CWD] = true
-        start()
-      })
-    }
-
-    const start = () => {
-      if (entry.absolute !== this.cwd) {
-        const parent = normPath(path.dirname(entry.absolute))
-        if (parent !== this.cwd) {
-          return this[MKDIR](parent, this.dmode, er => {
-            if (er) {
-              this[ONERROR](er, entry)
-              done()
-              return
-            }
-            afterMakeParent()
-          })
-        }
-      }
-      afterMakeParent()
-    }
-
-    const afterMakeParent = () => {
-      fs.lstat(entry.absolute, (lstatEr, st) => {
-        if (st && (this.keep || this.newer && st.mtime > entry.mtime)) {
-          this[SKIP](entry)
-          done()
-          return
-        }
-        if (lstatEr || this[ISREUSABLE](entry, st)) {
-          return this[MAKEFS](null, entry, done)
-        }
-
-        if (st.isDirectory()) {
-          if (entry.type === 'Directory') {
-            const needChmod = !this.noChmod &&
-              entry.mode &&
-              (st.mode & 0o7777) !== entry.mode
-            const afterChmod = er => this[MAKEFS](er, entry, done)
-            if (!needChmod) {
-              return afterChmod()
-            }
-            return fs.chmod(entry.absolute, entry.mode, afterChmod)
-          }
-          // Not a dir entry, have to remove it.
-          // NB: the only way to end up with an entry that is the cwd
-          // itself, in such a way that == does not detect, is a
-          // tricky windows absolute path with UNC or 8.3 parts (and
-          // preservePaths:true, or else it will have been stripped).
-          // In that case, the user has opted out of path protections
-          // explicitly, so if they blow away the cwd, c'est la vie.
-          if (entry.absolute !== this.cwd) {
-            return fs.rmdir(entry.absolute, er =>
-              this[MAKEFS](er, entry, done))
-          }
-        }
-
-        // not a dir, and not reusable
-        // don't remove if the cwd, we want that error
-        if (entry.absolute === this.cwd) {
-          return this[MAKEFS](null, entry, done)
-        }
-
-        unlinkFile(entry.absolute, er =>
-          this[MAKEFS](er, entry, done))
-      })
-    }
-
-    if (this[CHECKED_CWD]) {
-      start()
-    } else {
-      checkCwd()
-    }
-  }
-
-  [MAKEFS] (er, entry, done) {
-    if (er) {
-      this[ONERROR](er, entry)
-      done()
-      return
-    }
-
-    switch (entry.type) {
-      case 'File':
-      case 'OldFile':
-      case 'ContiguousFile':
-        return this[FILE](entry, done)
-
-      case 'Link':
-        return this[HARDLINK](entry, done)
-
-      case 'SymbolicLink':
-        return this[SYMLINK](entry, done)
-
-      case 'Directory':
-      case 'GNUDumpDir':
-        return this[DIRECTORY](entry, done)
-    }
-  }
-
-  [LINK] (entry, linkpath, link, done) {
-    // XXX: get the type ('symlink' or 'junction') for windows
-    fs[link](linkpath, entry.absolute, er => {
-      if (er) {
-        this[ONERROR](er, entry)
-      } else {
-        this[UNPEND]()
-        entry.resume()
-      }
-      done()
-    })
-  }
-}
-
-const callSync = fn => {
-  try {
-    return [null, fn()]
-  } catch (er) {
-    return [er, null]
-  }
-}
-class UnpackSync extends Unpack {
-  [MAKEFS] (er, entry) {
-    return super[MAKEFS](er, entry, () => {})
-  }
-
-  [CHECKFS] (entry) {
-    this[PRUNECACHE](entry)
-
-    if (!this[CHECKED_CWD]) {
-      const er = this[MKDIR](this.cwd, this.dmode)
-      if (er) {
-        return this[ONERROR](er, entry)
-      }
-      this[CHECKED_CWD] = true
-    }
-
-    // don't bother to make the parent if the current entry is the cwd,
-    // we've already checked it.
-    if (entry.absolute !== this.cwd) {
-      const parent = normPath(path.dirname(entry.absolute))
-      if (parent !== this.cwd) {
-        const mkParent = this[MKDIR](parent, this.dmode)
-        if (mkParent) {
-          return this[ONERROR](mkParent, entry)
-        }
-      }
-    }
-
-    const [lstatEr, st] = callSync(() => fs.lstatSync(entry.absolute))
-    if (st && (this.keep || this.newer && st.mtime > entry.mtime)) {
-      return this[SKIP](entry)
-    }
-
-    if (lstatEr || this[ISREUSABLE](entry, st)) {
-      return this[MAKEFS](null, entry)
-    }
-
-    if (st.isDirectory()) {
-      if (entry.type === 'Directory') {
-        const needChmod = !this.noChmod &&
-          entry.mode &&
-          (st.mode & 0o7777) !== entry.mode
-        const [er] = needChmod ? callSync(() => {
-          fs.chmodSync(entry.absolute, entry.mode)
-        }) : []
-        return this[MAKEFS](er, entry)
-      }
-      // not a dir entry, have to remove it
-      const [er] = callSync(() => fs.rmdirSync(entry.absolute))
-      this[MAKEFS](er, entry)
-    }
-
-    // not a dir, and not reusable.
-    // don't remove if it's the cwd, since we want that error.
-    const [er] = entry.absolute === this.cwd ? []
-      : callSync(() => unlinkFileSync(entry.absolute))
-    this[MAKEFS](er, entry)
-  }
-
-  [FILE] (entry, done) {
-    const mode = entry.mode & 0o7777 || this.fmode
-
-    const oner = er => {
-      let closeError
-      try {
-        fs.closeSync(fd)
-      } catch (e) {
-        closeError = e
-      }
-      if (er || closeError) {
-        this[ONERROR](er || closeError, entry)
-      }
-      done()
-    }
-
-    let fd
-    try {
-      fd = fs.openSync(entry.absolute, getFlag(entry.size), mode)
-    } catch (er) {
-      return oner(er)
-    }
-    const tx = this.transform ? this.transform(entry) || entry : entry
-    if (tx !== entry) {
-      tx.on('error', er => this[ONERROR](er, entry))
-      entry.pipe(tx)
-    }
-
-    tx.on('data', chunk => {
-      try {
-        fs.writeSync(fd, chunk, 0, chunk.length)
-      } catch (er) {
-        oner(er)
-      }
-    })
-
-    tx.on('end', _ => {
-      let er = null
-      // try both, falling futimes back to utimes
-      // if either fails, handle the first error
-      if (entry.mtime && !this.noMtime) {
-        const atime = entry.atime || new Date()
-        const mtime = entry.mtime
-        try {
-          fs.futimesSync(fd, atime, mtime)
-        } catch (futimeser) {
-          try {
-            fs.utimesSync(entry.absolute, atime, mtime)
-          } catch (utimeser) {
-            er = futimeser
-          }
-        }
-      }
-
-      if (this[DOCHOWN](entry)) {
-        const uid = this[UID](entry)
-        const gid = this[GID](entry)
-
-        try {
-          fs.fchownSync(fd, uid, gid)
-        } catch (fchowner) {
-          try {
-            fs.chownSync(entry.absolute, uid, gid)
-          } catch (chowner) {
-            er = er || fchowner
-          }
-        }
-      }
-
-      oner(er)
-    })
-  }
-
-  [DIRECTORY] (entry, done) {
-    const mode = entry.mode & 0o7777 || this.dmode
-    const er = this[MKDIR](entry.absolute, mode)
-    if (er) {
-      this[ONERROR](er, entry)
-      done()
-      return
-    }
-    if (entry.mtime && !this.noMtime) {
-      try {
-        fs.utimesSync(entry.absolute, entry.atime || new Date(), entry.mtime)
-      } catch (er) {}
-    }
-    if (this[DOCHOWN](entry)) {
-      try {
-        fs.chownSync(entry.absolute, this[UID](entry), this[GID](entry))
-      } catch (er) {}
-    }
-    done()
-    entry.resume()
-  }
-
-  [MKDIR] (dir, mode) {
-    try {
-      return mkdir.sync(normPath(dir), {
-        uid: this.uid,
-        gid: this.gid,
-        processUid: this.processUid,
-        processGid: this.processGid,
-        umask: this.processUmask,
-        preserve: this.preservePaths,
-        unlink: this.unlink,
-        cache: this.dirCache,
-        cwd: this.cwd,
-        mode: mode,
-      })
-    } catch (er) {
-      return er
-    }
-  }
-
-  [LINK] (entry, linkpath, link, done) {
-    try {
-      fs[link + 'Sync'](linkpath, entry.absolute)
-      done()
-      entry.resume()
-    } catch (er) {
-      return this[ONERROR](er, entry)
-    }
-  }
-}
-
-Unpack.Sync = UnpackSync
-module.exports = Unpack
diff --git a/node_modules/tar/lib/update.js b/node_modules/tar/lib/update.js
deleted file mode 100644
index 4d328543b315e..0000000000000
--- a/node_modules/tar/lib/update.js
+++ /dev/null
@@ -1,40 +0,0 @@
-'use strict'
-
-// tar -u
-
-const hlo = require('./high-level-opt.js')
-const r = require('./replace.js')
-// just call tar.r with the filter and mtimeCache
-
-module.exports = (opt_, files, cb) => {
-  const opt = hlo(opt_)
-
-  if (!opt.file) {
-    throw new TypeError('file is required')
-  }
-
-  if (opt.gzip || opt.brotli || opt.file.endsWith('.br') || opt.file.endsWith('.tbr')) {
-    throw new TypeError('cannot append to compressed archives')
-  }
-
-  if (!files || !Array.isArray(files) || !files.length) {
-    throw new TypeError('no files or directories specified')
-  }
-
-  files = Array.from(files)
-
-  mtimeFilter(opt)
-  return r(opt, files, cb)
-}
-
-const mtimeFilter = opt => {
-  const filter = opt.filter
-
-  if (!opt.mtimeCache) {
-    opt.mtimeCache = new Map()
-  }
-
-  opt.filter = filter ? (path, stat) =>
-    filter(path, stat) && !(opt.mtimeCache.get(path) > stat.mtime)
-    : (path, stat) => !(opt.mtimeCache.get(path) > stat.mtime)
-}
diff --git a/node_modules/tar/lib/warn-mixin.js b/node_modules/tar/lib/warn-mixin.js
deleted file mode 100644
index a940639636133..0000000000000
--- a/node_modules/tar/lib/warn-mixin.js
+++ /dev/null
@@ -1,24 +0,0 @@
-'use strict'
-module.exports = Base => class extends Base {
-  warn (code, message, data = {}) {
-    if (this.file) {
-      data.file = this.file
-    }
-    if (this.cwd) {
-      data.cwd = this.cwd
-    }
-    data.code = message instanceof Error && message.code || code
-    data.tarCode = code
-    if (!this.strict && data.recoverable !== false) {
-      if (message instanceof Error) {
-        data = Object.assign(message, data)
-        message = message.message
-      }
-      this.emit('warn', data.tarCode, message, data)
-    } else if (message instanceof Error) {
-      this.emit('error', Object.assign(message, data))
-    } else {
-      this.emit('error', Object.assign(new Error(`${code}: ${message}`), data))
-    }
-  }
-}
diff --git a/node_modules/tar/lib/winchars.js b/node_modules/tar/lib/winchars.js
deleted file mode 100644
index ebcab4aed3e52..0000000000000
--- a/node_modules/tar/lib/winchars.js
+++ /dev/null
@@ -1,23 +0,0 @@
-'use strict'
-
-// When writing files on Windows, translate the characters to their
-// 0xf000 higher-encoded versions.
-
-const raw = [
-  '|',
-  '<',
-  '>',
-  '?',
-  ':',
-]
-
-const win = raw.map(char =>
-  String.fromCharCode(0xf000 + char.charCodeAt(0)))
-
-const toWin = new Map(raw.map((char, i) => [char, win[i]]))
-const toRaw = new Map(win.map((char, i) => [char, raw[i]]))
-
-module.exports = {
-  encode: s => raw.reduce((s, c) => s.split(c).join(toWin.get(c)), s),
-  decode: s => win.reduce((s, c) => s.split(c).join(toRaw.get(c)), s),
-}
diff --git a/node_modules/tar/lib/write-entry.js b/node_modules/tar/lib/write-entry.js
deleted file mode 100644
index 7d2f3eb1acc8c..0000000000000
--- a/node_modules/tar/lib/write-entry.js
+++ /dev/null
@@ -1,546 +0,0 @@
-'use strict'
-const { Minipass } = require('minipass')
-const Pax = require('./pax.js')
-const Header = require('./header.js')
-const fs = require('fs')
-const path = require('path')
-const normPath = require('./normalize-windows-path.js')
-const stripSlash = require('./strip-trailing-slashes.js')
-
-const prefixPath = (path, prefix) => {
-  if (!prefix) {
-    return normPath(path)
-  }
-  path = normPath(path).replace(/^\.(\/|$)/, '')
-  return stripSlash(prefix) + '/' + path
-}
-
-const maxReadSize = 16 * 1024 * 1024
-const PROCESS = Symbol('process')
-const FILE = Symbol('file')
-const DIRECTORY = Symbol('directory')
-const SYMLINK = Symbol('symlink')
-const HARDLINK = Symbol('hardlink')
-const HEADER = Symbol('header')
-const READ = Symbol('read')
-const LSTAT = Symbol('lstat')
-const ONLSTAT = Symbol('onlstat')
-const ONREAD = Symbol('onread')
-const ONREADLINK = Symbol('onreadlink')
-const OPENFILE = Symbol('openfile')
-const ONOPENFILE = Symbol('onopenfile')
-const CLOSE = Symbol('close')
-const MODE = Symbol('mode')
-const AWAITDRAIN = Symbol('awaitDrain')
-const ONDRAIN = Symbol('ondrain')
-const PREFIX = Symbol('prefix')
-const HAD_ERROR = Symbol('hadError')
-const warner = require('./warn-mixin.js')
-const winchars = require('./winchars.js')
-const stripAbsolutePath = require('./strip-absolute-path.js')
-
-const modeFix = require('./mode-fix.js')
-
-const WriteEntry = warner(class WriteEntry extends Minipass {
-  constructor (p, opt) {
-    opt = opt || {}
-    super(opt)
-    if (typeof p !== 'string') {
-      throw new TypeError('path is required')
-    }
-    this.path = normPath(p)
-    // suppress atime, ctime, uid, gid, uname, gname
-    this.portable = !!opt.portable
-    // until node has builtin pwnam functions, this'll have to do
-    this.myuid = process.getuid && process.getuid() || 0
-    this.myuser = process.env.USER || ''
-    this.maxReadSize = opt.maxReadSize || maxReadSize
-    this.linkCache = opt.linkCache || new Map()
-    this.statCache = opt.statCache || new Map()
-    this.preservePaths = !!opt.preservePaths
-    this.cwd = normPath(opt.cwd || process.cwd())
-    this.strict = !!opt.strict
-    this.noPax = !!opt.noPax
-    this.noMtime = !!opt.noMtime
-    this.mtime = opt.mtime || null
-    this.prefix = opt.prefix ? normPath(opt.prefix) : null
-
-    this.fd = null
-    this.blockLen = null
-    this.blockRemain = null
-    this.buf = null
-    this.offset = null
-    this.length = null
-    this.pos = null
-    this.remain = null
-
-    if (typeof opt.onwarn === 'function') {
-      this.on('warn', opt.onwarn)
-    }
-
-    let pathWarn = false
-    if (!this.preservePaths) {
-      const [root, stripped] = stripAbsolutePath(this.path)
-      if (root) {
-        this.path = stripped
-        pathWarn = root
-      }
-    }
-
-    this.win32 = !!opt.win32 || process.platform === 'win32'
-    if (this.win32) {
-      // force the \ to / normalization, since we might not *actually*
-      // be on windows, but want \ to be considered a path separator.
-      this.path = winchars.decode(this.path.replace(/\\/g, '/'))
-      p = p.replace(/\\/g, '/')
-    }
-
-    this.absolute = normPath(opt.absolute || path.resolve(this.cwd, p))
-
-    if (this.path === '') {
-      this.path = './'
-    }
-
-    if (pathWarn) {
-      this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, {
-        entry: this,
-        path: pathWarn + this.path,
-      })
-    }
-
-    if (this.statCache.has(this.absolute)) {
-      this[ONLSTAT](this.statCache.get(this.absolute))
-    } else {
-      this[LSTAT]()
-    }
-  }
-
-  emit (ev, ...data) {
-    if (ev === 'error') {
-      this[HAD_ERROR] = true
-    }
-    return super.emit(ev, ...data)
-  }
-
-  [LSTAT] () {
-    fs.lstat(this.absolute, (er, stat) => {
-      if (er) {
-        return this.emit('error', er)
-      }
-      this[ONLSTAT](stat)
-    })
-  }
-
-  [ONLSTAT] (stat) {
-    this.statCache.set(this.absolute, stat)
-    this.stat = stat
-    if (!stat.isFile()) {
-      stat.size = 0
-    }
-    this.type = getType(stat)
-    this.emit('stat', stat)
-    this[PROCESS]()
-  }
-
-  [PROCESS] () {
-    switch (this.type) {
-      case 'File': return this[FILE]()
-      case 'Directory': return this[DIRECTORY]()
-      case 'SymbolicLink': return this[SYMLINK]()
-      // unsupported types are ignored.
-      default: return this.end()
-    }
-  }
-
-  [MODE] (mode) {
-    return modeFix(mode, this.type === 'Directory', this.portable)
-  }
-
-  [PREFIX] (path) {
-    return prefixPath(path, this.prefix)
-  }
-
-  [HEADER] () {
-    if (this.type === 'Directory' && this.portable) {
-      this.noMtime = true
-    }
-
-    this.header = new Header({
-      path: this[PREFIX](this.path),
-      // only apply the prefix to hard links.
-      linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath)
-      : this.linkpath,
-      // only the permissions and setuid/setgid/sticky bitflags
-      // not the higher-order bits that specify file type
-      mode: this[MODE](this.stat.mode),
-      uid: this.portable ? null : this.stat.uid,
-      gid: this.portable ? null : this.stat.gid,
-      size: this.stat.size,
-      mtime: this.noMtime ? null : this.mtime || this.stat.mtime,
-      type: this.type,
-      uname: this.portable ? null :
-      this.stat.uid === this.myuid ? this.myuser : '',
-      atime: this.portable ? null : this.stat.atime,
-      ctime: this.portable ? null : this.stat.ctime,
-    })
-
-    if (this.header.encode() && !this.noPax) {
-      super.write(new Pax({
-        atime: this.portable ? null : this.header.atime,
-        ctime: this.portable ? null : this.header.ctime,
-        gid: this.portable ? null : this.header.gid,
-        mtime: this.noMtime ? null : this.mtime || this.header.mtime,
-        path: this[PREFIX](this.path),
-        linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath)
-        : this.linkpath,
-        size: this.header.size,
-        uid: this.portable ? null : this.header.uid,
-        uname: this.portable ? null : this.header.uname,
-        dev: this.portable ? null : this.stat.dev,
-        ino: this.portable ? null : this.stat.ino,
-        nlink: this.portable ? null : this.stat.nlink,
-      }).encode())
-    }
-    super.write(this.header.block)
-  }
-
-  [DIRECTORY] () {
-    if (this.path.slice(-1) !== '/') {
-      this.path += '/'
-    }
-    this.stat.size = 0
-    this[HEADER]()
-    this.end()
-  }
-
-  [SYMLINK] () {
-    fs.readlink(this.absolute, (er, linkpath) => {
-      if (er) {
-        return this.emit('error', er)
-      }
-      this[ONREADLINK](linkpath)
-    })
-  }
-
-  [ONREADLINK] (linkpath) {
-    this.linkpath = normPath(linkpath)
-    this[HEADER]()
-    this.end()
-  }
-
-  [HARDLINK] (linkpath) {
-    this.type = 'Link'
-    this.linkpath = normPath(path.relative(this.cwd, linkpath))
-    this.stat.size = 0
-    this[HEADER]()
-    this.end()
-  }
-
-  [FILE] () {
-    if (this.stat.nlink > 1) {
-      const linkKey = this.stat.dev + ':' + this.stat.ino
-      if (this.linkCache.has(linkKey)) {
-        const linkpath = this.linkCache.get(linkKey)
-        if (linkpath.indexOf(this.cwd) === 0) {
-          return this[HARDLINK](linkpath)
-        }
-      }
-      this.linkCache.set(linkKey, this.absolute)
-    }
-
-    this[HEADER]()
-    if (this.stat.size === 0) {
-      return this.end()
-    }
-
-    this[OPENFILE]()
-  }
-
-  [OPENFILE] () {
-    fs.open(this.absolute, 'r', (er, fd) => {
-      if (er) {
-        return this.emit('error', er)
-      }
-      this[ONOPENFILE](fd)
-    })
-  }
-
-  [ONOPENFILE] (fd) {
-    this.fd = fd
-    if (this[HAD_ERROR]) {
-      return this[CLOSE]()
-    }
-
-    this.blockLen = 512 * Math.ceil(this.stat.size / 512)
-    this.blockRemain = this.blockLen
-    const bufLen = Math.min(this.blockLen, this.maxReadSize)
-    this.buf = Buffer.allocUnsafe(bufLen)
-    this.offset = 0
-    this.pos = 0
-    this.remain = this.stat.size
-    this.length = this.buf.length
-    this[READ]()
-  }
-
-  [READ] () {
-    const { fd, buf, offset, length, pos } = this
-    fs.read(fd, buf, offset, length, pos, (er, bytesRead) => {
-      if (er) {
-        // ignoring the error from close(2) is a bad practice, but at
-        // this point we already have an error, don't need another one
-        return this[CLOSE](() => this.emit('error', er))
-      }
-      this[ONREAD](bytesRead)
-    })
-  }
-
-  [CLOSE] (cb) {
-    fs.close(this.fd, cb)
-  }
-
-  [ONREAD] (bytesRead) {
-    if (bytesRead <= 0 && this.remain > 0) {
-      const er = new Error('encountered unexpected EOF')
-      er.path = this.absolute
-      er.syscall = 'read'
-      er.code = 'EOF'
-      return this[CLOSE](() => this.emit('error', er))
-    }
-
-    if (bytesRead > this.remain) {
-      const er = new Error('did not encounter expected EOF')
-      er.path = this.absolute
-      er.syscall = 'read'
-      er.code = 'EOF'
-      return this[CLOSE](() => this.emit('error', er))
-    }
-
-    // null out the rest of the buffer, if we could fit the block padding
-    // at the end of this loop, we've incremented bytesRead and this.remain
-    // to be incremented up to the blockRemain level, as if we had expected
-    // to get a null-padded file, and read it until the end.  then we will
-    // decrement both remain and blockRemain by bytesRead, and know that we
-    // reached the expected EOF, without any null buffer to append.
-    if (bytesRead === this.remain) {
-      for (let i = bytesRead; i < this.length && bytesRead < this.blockRemain; i++) {
-        this.buf[i + this.offset] = 0
-        bytesRead++
-        this.remain++
-      }
-    }
-
-    const writeBuf = this.offset === 0 && bytesRead === this.buf.length ?
-      this.buf : this.buf.slice(this.offset, this.offset + bytesRead)
-
-    const flushed = this.write(writeBuf)
-    if (!flushed) {
-      this[AWAITDRAIN](() => this[ONDRAIN]())
-    } else {
-      this[ONDRAIN]()
-    }
-  }
-
-  [AWAITDRAIN] (cb) {
-    this.once('drain', cb)
-  }
-
-  write (writeBuf) {
-    if (this.blockRemain < writeBuf.length) {
-      const er = new Error('writing more data than expected')
-      er.path = this.absolute
-      return this.emit('error', er)
-    }
-    this.remain -= writeBuf.length
-    this.blockRemain -= writeBuf.length
-    this.pos += writeBuf.length
-    this.offset += writeBuf.length
-    return super.write(writeBuf)
-  }
-
-  [ONDRAIN] () {
-    if (!this.remain) {
-      if (this.blockRemain) {
-        super.write(Buffer.alloc(this.blockRemain))
-      }
-      return this[CLOSE](er => er ? this.emit('error', er) : this.end())
-    }
-
-    if (this.offset >= this.length) {
-      // if we only have a smaller bit left to read, alloc a smaller buffer
-      // otherwise, keep it the same length it was before.
-      this.buf = Buffer.allocUnsafe(Math.min(this.blockRemain, this.buf.length))
-      this.offset = 0
-    }
-    this.length = this.buf.length - this.offset
-    this[READ]()
-  }
-})
-
-class WriteEntrySync extends WriteEntry {
-  [LSTAT] () {
-    this[ONLSTAT](fs.lstatSync(this.absolute))
-  }
-
-  [SYMLINK] () {
-    this[ONREADLINK](fs.readlinkSync(this.absolute))
-  }
-
-  [OPENFILE] () {
-    this[ONOPENFILE](fs.openSync(this.absolute, 'r'))
-  }
-
-  [READ] () {
-    let threw = true
-    try {
-      const { fd, buf, offset, length, pos } = this
-      const bytesRead = fs.readSync(fd, buf, offset, length, pos)
-      this[ONREAD](bytesRead)
-      threw = false
-    } finally {
-      // ignoring the error from close(2) is a bad practice, but at
-      // this point we already have an error, don't need another one
-      if (threw) {
-        try {
-          this[CLOSE](() => {})
-        } catch (er) {}
-      }
-    }
-  }
-
-  [AWAITDRAIN] (cb) {
-    cb()
-  }
-
-  [CLOSE] (cb) {
-    fs.closeSync(this.fd)
-    cb()
-  }
-}
-
-const WriteEntryTar = warner(class WriteEntryTar extends Minipass {
-  constructor (readEntry, opt) {
-    opt = opt || {}
-    super(opt)
-    this.preservePaths = !!opt.preservePaths
-    this.portable = !!opt.portable
-    this.strict = !!opt.strict
-    this.noPax = !!opt.noPax
-    this.noMtime = !!opt.noMtime
-
-    this.readEntry = readEntry
-    this.type = readEntry.type
-    if (this.type === 'Directory' && this.portable) {
-      this.noMtime = true
-    }
-
-    this.prefix = opt.prefix || null
-
-    this.path = normPath(readEntry.path)
-    this.mode = this[MODE](readEntry.mode)
-    this.uid = this.portable ? null : readEntry.uid
-    this.gid = this.portable ? null : readEntry.gid
-    this.uname = this.portable ? null : readEntry.uname
-    this.gname = this.portable ? null : readEntry.gname
-    this.size = readEntry.size
-    this.mtime = this.noMtime ? null : opt.mtime || readEntry.mtime
-    this.atime = this.portable ? null : readEntry.atime
-    this.ctime = this.portable ? null : readEntry.ctime
-    this.linkpath = normPath(readEntry.linkpath)
-
-    if (typeof opt.onwarn === 'function') {
-      this.on('warn', opt.onwarn)
-    }
-
-    let pathWarn = false
-    if (!this.preservePaths) {
-      const [root, stripped] = stripAbsolutePath(this.path)
-      if (root) {
-        this.path = stripped
-        pathWarn = root
-      }
-    }
-
-    this.remain = readEntry.size
-    this.blockRemain = readEntry.startBlockSize
-
-    this.header = new Header({
-      path: this[PREFIX](this.path),
-      linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath)
-      : this.linkpath,
-      // only the permissions and setuid/setgid/sticky bitflags
-      // not the higher-order bits that specify file type
-      mode: this.mode,
-      uid: this.portable ? null : this.uid,
-      gid: this.portable ? null : this.gid,
-      size: this.size,
-      mtime: this.noMtime ? null : this.mtime,
-      type: this.type,
-      uname: this.portable ? null : this.uname,
-      atime: this.portable ? null : this.atime,
-      ctime: this.portable ? null : this.ctime,
-    })
-
-    if (pathWarn) {
-      this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, {
-        entry: this,
-        path: pathWarn + this.path,
-      })
-    }
-
-    if (this.header.encode() && !this.noPax) {
-      super.write(new Pax({
-        atime: this.portable ? null : this.atime,
-        ctime: this.portable ? null : this.ctime,
-        gid: this.portable ? null : this.gid,
-        mtime: this.noMtime ? null : this.mtime,
-        path: this[PREFIX](this.path),
-        linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath)
-        : this.linkpath,
-        size: this.size,
-        uid: this.portable ? null : this.uid,
-        uname: this.portable ? null : this.uname,
-        dev: this.portable ? null : this.readEntry.dev,
-        ino: this.portable ? null : this.readEntry.ino,
-        nlink: this.portable ? null : this.readEntry.nlink,
-      }).encode())
-    }
-
-    super.write(this.header.block)
-    readEntry.pipe(this)
-  }
-
-  [PREFIX] (path) {
-    return prefixPath(path, this.prefix)
-  }
-
-  [MODE] (mode) {
-    return modeFix(mode, this.type === 'Directory', this.portable)
-  }
-
-  write (data) {
-    const writeLen = data.length
-    if (writeLen > this.blockRemain) {
-      throw new Error('writing more to entry than is appropriate')
-    }
-    this.blockRemain -= writeLen
-    return super.write(data)
-  }
-
-  end () {
-    if (this.blockRemain) {
-      super.write(Buffer.alloc(this.blockRemain))
-    }
-    return super.end()
-  }
-})
-
-WriteEntry.Sync = WriteEntrySync
-WriteEntry.Tar = WriteEntryTar
-
-const getType = stat =>
-  stat.isFile() ? 'File'
-  : stat.isDirectory() ? 'Directory'
-  : stat.isSymbolicLink() ? 'SymbolicLink'
-  : 'Unsupported'
-
-module.exports = WriteEntry
diff --git a/node_modules/tar/node_modules/fs-minipass/LICENSE b/node_modules/tar/node_modules/fs-minipass/LICENSE
deleted file mode 100644
index 19129e315fe59..0000000000000
--- a/node_modules/tar/node_modules/fs-minipass/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/tar/node_modules/fs-minipass/index.js b/node_modules/tar/node_modules/fs-minipass/index.js
deleted file mode 100644
index 9b0779c80c55e..0000000000000
--- a/node_modules/tar/node_modules/fs-minipass/index.js
+++ /dev/null
@@ -1,422 +0,0 @@
-'use strict'
-const MiniPass = require('minipass')
-const EE = require('events').EventEmitter
-const fs = require('fs')
-
-let writev = fs.writev
-/* istanbul ignore next */
-if (!writev) {
-  // This entire block can be removed if support for earlier than Node.js
-  // 12.9.0 is not needed.
-  const binding = process.binding('fs')
-  const FSReqWrap = binding.FSReqWrap || binding.FSReqCallback
-
-  writev = (fd, iovec, pos, cb) => {
-    const done = (er, bw) => cb(er, bw, iovec)
-    const req = new FSReqWrap()
-    req.oncomplete = done
-    binding.writeBuffers(fd, iovec, pos, req)
-  }
-}
-
-const _autoClose = Symbol('_autoClose')
-const _close = Symbol('_close')
-const _ended = Symbol('_ended')
-const _fd = Symbol('_fd')
-const _finished = Symbol('_finished')
-const _flags = Symbol('_flags')
-const _flush = Symbol('_flush')
-const _handleChunk = Symbol('_handleChunk')
-const _makeBuf = Symbol('_makeBuf')
-const _mode = Symbol('_mode')
-const _needDrain = Symbol('_needDrain')
-const _onerror = Symbol('_onerror')
-const _onopen = Symbol('_onopen')
-const _onread = Symbol('_onread')
-const _onwrite = Symbol('_onwrite')
-const _open = Symbol('_open')
-const _path = Symbol('_path')
-const _pos = Symbol('_pos')
-const _queue = Symbol('_queue')
-const _read = Symbol('_read')
-const _readSize = Symbol('_readSize')
-const _reading = Symbol('_reading')
-const _remain = Symbol('_remain')
-const _size = Symbol('_size')
-const _write = Symbol('_write')
-const _writing = Symbol('_writing')
-const _defaultFlag = Symbol('_defaultFlag')
-const _errored = Symbol('_errored')
-
-class ReadStream extends MiniPass {
-  constructor (path, opt) {
-    opt = opt || {}
-    super(opt)
-
-    this.readable = true
-    this.writable = false
-
-    if (typeof path !== 'string')
-      throw new TypeError('path must be a string')
-
-    this[_errored] = false
-    this[_fd] = typeof opt.fd === 'number' ? opt.fd : null
-    this[_path] = path
-    this[_readSize] = opt.readSize || 16*1024*1024
-    this[_reading] = false
-    this[_size] = typeof opt.size === 'number' ? opt.size : Infinity
-    this[_remain] = this[_size]
-    this[_autoClose] = typeof opt.autoClose === 'boolean' ?
-      opt.autoClose : true
-
-    if (typeof this[_fd] === 'number')
-      this[_read]()
-    else
-      this[_open]()
-  }
-
-  get fd () { return this[_fd] }
-  get path () { return this[_path] }
-
-  write () {
-    throw new TypeError('this is a readable stream')
-  }
-
-  end () {
-    throw new TypeError('this is a readable stream')
-  }
-
-  [_open] () {
-    fs.open(this[_path], 'r', (er, fd) => this[_onopen](er, fd))
-  }
-
-  [_onopen] (er, fd) {
-    if (er)
-      this[_onerror](er)
-    else {
-      this[_fd] = fd
-      this.emit('open', fd)
-      this[_read]()
-    }
-  }
-
-  [_makeBuf] () {
-    return Buffer.allocUnsafe(Math.min(this[_readSize], this[_remain]))
-  }
-
-  [_read] () {
-    if (!this[_reading]) {
-      this[_reading] = true
-      const buf = this[_makeBuf]()
-      /* istanbul ignore if */
-      if (buf.length === 0)
-        return process.nextTick(() => this[_onread](null, 0, buf))
-      fs.read(this[_fd], buf, 0, buf.length, null, (er, br, buf) =>
-        this[_onread](er, br, buf))
-    }
-  }
-
-  [_onread] (er, br, buf) {
-    this[_reading] = false
-    if (er)
-      this[_onerror](er)
-    else if (this[_handleChunk](br, buf))
-      this[_read]()
-  }
-
-  [_close] () {
-    if (this[_autoClose] && typeof this[_fd] === 'number') {
-      const fd = this[_fd]
-      this[_fd] = null
-      fs.close(fd, er => er ? this.emit('error', er) : this.emit('close'))
-    }
-  }
-
-  [_onerror] (er) {
-    this[_reading] = true
-    this[_close]()
-    this.emit('error', er)
-  }
-
-  [_handleChunk] (br, buf) {
-    let ret = false
-    // no effect if infinite
-    this[_remain] -= br
-    if (br > 0)
-      ret = super.write(br < buf.length ? buf.slice(0, br) : buf)
-
-    if (br === 0 || this[_remain] <= 0) {
-      ret = false
-      this[_close]()
-      super.end()
-    }
-
-    return ret
-  }
-
-  emit (ev, data) {
-    switch (ev) {
-      case 'prefinish':
-      case 'finish':
-        break
-
-      case 'drain':
-        if (typeof this[_fd] === 'number')
-          this[_read]()
-        break
-
-      case 'error':
-        if (this[_errored])
-          return
-        this[_errored] = true
-        return super.emit(ev, data)
-
-      default:
-        return super.emit(ev, data)
-    }
-  }
-}
-
-class ReadStreamSync extends ReadStream {
-  [_open] () {
-    let threw = true
-    try {
-      this[_onopen](null, fs.openSync(this[_path], 'r'))
-      threw = false
-    } finally {
-      if (threw)
-        this[_close]()
-    }
-  }
-
-  [_read] () {
-    let threw = true
-    try {
-      if (!this[_reading]) {
-        this[_reading] = true
-        do {
-          const buf = this[_makeBuf]()
-          /* istanbul ignore next */
-          const br = buf.length === 0 ? 0
-            : fs.readSync(this[_fd], buf, 0, buf.length, null)
-          if (!this[_handleChunk](br, buf))
-            break
-        } while (true)
-        this[_reading] = false
-      }
-      threw = false
-    } finally {
-      if (threw)
-        this[_close]()
-    }
-  }
-
-  [_close] () {
-    if (this[_autoClose] && typeof this[_fd] === 'number') {
-      const fd = this[_fd]
-      this[_fd] = null
-      fs.closeSync(fd)
-      this.emit('close')
-    }
-  }
-}
-
-class WriteStream extends EE {
-  constructor (path, opt) {
-    opt = opt || {}
-    super(opt)
-    this.readable = false
-    this.writable = true
-    this[_errored] = false
-    this[_writing] = false
-    this[_ended] = false
-    this[_needDrain] = false
-    this[_queue] = []
-    this[_path] = path
-    this[_fd] = typeof opt.fd === 'number' ? opt.fd : null
-    this[_mode] = opt.mode === undefined ? 0o666 : opt.mode
-    this[_pos] = typeof opt.start === 'number' ? opt.start : null
-    this[_autoClose] = typeof opt.autoClose === 'boolean' ?
-      opt.autoClose : true
-
-    // truncating makes no sense when writing into the middle
-    const defaultFlag = this[_pos] !== null ? 'r+' : 'w'
-    this[_defaultFlag] = opt.flags === undefined
-    this[_flags] = this[_defaultFlag] ? defaultFlag : opt.flags
-
-    if (this[_fd] === null)
-      this[_open]()
-  }
-
-  emit (ev, data) {
-    if (ev === 'error') {
-      if (this[_errored])
-        return
-      this[_errored] = true
-    }
-    return super.emit(ev, data)
-  }
-
-
-  get fd () { return this[_fd] }
-  get path () { return this[_path] }
-
-  [_onerror] (er) {
-    this[_close]()
-    this[_writing] = true
-    this.emit('error', er)
-  }
-
-  [_open] () {
-    fs.open(this[_path], this[_flags], this[_mode],
-      (er, fd) => this[_onopen](er, fd))
-  }
-
-  [_onopen] (er, fd) {
-    if (this[_defaultFlag] &&
-        this[_flags] === 'r+' &&
-        er && er.code === 'ENOENT') {
-      this[_flags] = 'w'
-      this[_open]()
-    } else if (er)
-      this[_onerror](er)
-    else {
-      this[_fd] = fd
-      this.emit('open', fd)
-      this[_flush]()
-    }
-  }
-
-  end (buf, enc) {
-    if (buf)
-      this.write(buf, enc)
-
-    this[_ended] = true
-
-    // synthetic after-write logic, where drain/finish live
-    if (!this[_writing] && !this[_queue].length &&
-        typeof this[_fd] === 'number')
-      this[_onwrite](null, 0)
-    return this
-  }
-
-  write (buf, enc) {
-    if (typeof buf === 'string')
-      buf = Buffer.from(buf, enc)
-
-    if (this[_ended]) {
-      this.emit('error', new Error('write() after end()'))
-      return false
-    }
-
-    if (this[_fd] === null || this[_writing] || this[_queue].length) {
-      this[_queue].push(buf)
-      this[_needDrain] = true
-      return false
-    }
-
-    this[_writing] = true
-    this[_write](buf)
-    return true
-  }
-
-  [_write] (buf) {
-    fs.write(this[_fd], buf, 0, buf.length, this[_pos], (er, bw) =>
-      this[_onwrite](er, bw))
-  }
-
-  [_onwrite] (er, bw) {
-    if (er)
-      this[_onerror](er)
-    else {
-      if (this[_pos] !== null)
-        this[_pos] += bw
-      if (this[_queue].length)
-        this[_flush]()
-      else {
-        this[_writing] = false
-
-        if (this[_ended] && !this[_finished]) {
-          this[_finished] = true
-          this[_close]()
-          this.emit('finish')
-        } else if (this[_needDrain]) {
-          this[_needDrain] = false
-          this.emit('drain')
-        }
-      }
-    }
-  }
-
-  [_flush] () {
-    if (this[_queue].length === 0) {
-      if (this[_ended])
-        this[_onwrite](null, 0)
-    } else if (this[_queue].length === 1)
-      this[_write](this[_queue].pop())
-    else {
-      const iovec = this[_queue]
-      this[_queue] = []
-      writev(this[_fd], iovec, this[_pos],
-        (er, bw) => this[_onwrite](er, bw))
-    }
-  }
-
-  [_close] () {
-    if (this[_autoClose] && typeof this[_fd] === 'number') {
-      const fd = this[_fd]
-      this[_fd] = null
-      fs.close(fd, er => er ? this.emit('error', er) : this.emit('close'))
-    }
-  }
-}
-
-class WriteStreamSync extends WriteStream {
-  [_open] () {
-    let fd
-    // only wrap in a try{} block if we know we'll retry, to avoid
-    // the rethrow obscuring the error's source frame in most cases.
-    if (this[_defaultFlag] && this[_flags] === 'r+') {
-      try {
-        fd = fs.openSync(this[_path], this[_flags], this[_mode])
-      } catch (er) {
-        if (er.code === 'ENOENT') {
-          this[_flags] = 'w'
-          return this[_open]()
-        } else
-          throw er
-      }
-    } else
-      fd = fs.openSync(this[_path], this[_flags], this[_mode])
-
-    this[_onopen](null, fd)
-  }
-
-  [_close] () {
-    if (this[_autoClose] && typeof this[_fd] === 'number') {
-      const fd = this[_fd]
-      this[_fd] = null
-      fs.closeSync(fd)
-      this.emit('close')
-    }
-  }
-
-  [_write] (buf) {
-    // throw the original, but try to close if it fails
-    let threw = true
-    try {
-      this[_onwrite](null,
-        fs.writeSync(this[_fd], buf, 0, buf.length, this[_pos]))
-      threw = false
-    } finally {
-      if (threw)
-        try { this[_close]() } catch (_) {}
-    }
-  }
-}
-
-exports.ReadStream = ReadStream
-exports.ReadStreamSync = ReadStreamSync
-
-exports.WriteStream = WriteStream
-exports.WriteStreamSync = WriteStreamSync
diff --git a/node_modules/tar/node_modules/fs-minipass/node_modules/minipass/LICENSE b/node_modules/tar/node_modules/fs-minipass/node_modules/minipass/LICENSE
deleted file mode 100644
index bf1dece2e1f12..0000000000000
--- a/node_modules/tar/node_modules/fs-minipass/node_modules/minipass/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) 2017-2022 npm, Inc., Isaac Z. Schlueter, and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/tar/node_modules/fs-minipass/node_modules/minipass/index.js b/node_modules/tar/node_modules/fs-minipass/node_modules/minipass/index.js
deleted file mode 100644
index e8797aab6cc27..0000000000000
--- a/node_modules/tar/node_modules/fs-minipass/node_modules/minipass/index.js
+++ /dev/null
@@ -1,649 +0,0 @@
-'use strict'
-const proc = typeof process === 'object' && process ? process : {
-  stdout: null,
-  stderr: null,
-}
-const EE = require('events')
-const Stream = require('stream')
-const SD = require('string_decoder').StringDecoder
-
-const EOF = Symbol('EOF')
-const MAYBE_EMIT_END = Symbol('maybeEmitEnd')
-const EMITTED_END = Symbol('emittedEnd')
-const EMITTING_END = Symbol('emittingEnd')
-const EMITTED_ERROR = Symbol('emittedError')
-const CLOSED = Symbol('closed')
-const READ = Symbol('read')
-const FLUSH = Symbol('flush')
-const FLUSHCHUNK = Symbol('flushChunk')
-const ENCODING = Symbol('encoding')
-const DECODER = Symbol('decoder')
-const FLOWING = Symbol('flowing')
-const PAUSED = Symbol('paused')
-const RESUME = Symbol('resume')
-const BUFFERLENGTH = Symbol('bufferLength')
-const BUFFERPUSH = Symbol('bufferPush')
-const BUFFERSHIFT = Symbol('bufferShift')
-const OBJECTMODE = Symbol('objectMode')
-const DESTROYED = Symbol('destroyed')
-const EMITDATA = Symbol('emitData')
-const EMITEND = Symbol('emitEnd')
-const EMITEND2 = Symbol('emitEnd2')
-const ASYNC = Symbol('async')
-
-const defer = fn => Promise.resolve().then(fn)
-
-// TODO remove when Node v8 support drops
-const doIter = global._MP_NO_ITERATOR_SYMBOLS_  !== '1'
-const ASYNCITERATOR = doIter && Symbol.asyncIterator
-  || Symbol('asyncIterator not implemented')
-const ITERATOR = doIter && Symbol.iterator
-  || Symbol('iterator not implemented')
-
-// events that mean 'the stream is over'
-// these are treated specially, and re-emitted
-// if they are listened for after emitting.
-const isEndish = ev =>
-  ev === 'end' ||
-  ev === 'finish' ||
-  ev === 'prefinish'
-
-const isArrayBuffer = b => b instanceof ArrayBuffer ||
-  typeof b === 'object' &&
-  b.constructor &&
-  b.constructor.name === 'ArrayBuffer' &&
-  b.byteLength >= 0
-
-const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b)
-
-class Pipe {
-  constructor (src, dest, opts) {
-    this.src = src
-    this.dest = dest
-    this.opts = opts
-    this.ondrain = () => src[RESUME]()
-    dest.on('drain', this.ondrain)
-  }
-  unpipe () {
-    this.dest.removeListener('drain', this.ondrain)
-  }
-  // istanbul ignore next - only here for the prototype
-  proxyErrors () {}
-  end () {
-    this.unpipe()
-    if (this.opts.end)
-      this.dest.end()
-  }
-}
-
-class PipeProxyErrors extends Pipe {
-  unpipe () {
-    this.src.removeListener('error', this.proxyErrors)
-    super.unpipe()
-  }
-  constructor (src, dest, opts) {
-    super(src, dest, opts)
-    this.proxyErrors = er => dest.emit('error', er)
-    src.on('error', this.proxyErrors)
-  }
-}
-
-module.exports = class Minipass extends Stream {
-  constructor (options) {
-    super()
-    this[FLOWING] = false
-    // whether we're explicitly paused
-    this[PAUSED] = false
-    this.pipes = []
-    this.buffer = []
-    this[OBJECTMODE] = options && options.objectMode || false
-    if (this[OBJECTMODE])
-      this[ENCODING] = null
-    else
-      this[ENCODING] = options && options.encoding || null
-    if (this[ENCODING] === 'buffer')
-      this[ENCODING] = null
-    this[ASYNC] = options && !!options.async || false
-    this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null
-    this[EOF] = false
-    this[EMITTED_END] = false
-    this[EMITTING_END] = false
-    this[CLOSED] = false
-    this[EMITTED_ERROR] = null
-    this.writable = true
-    this.readable = true
-    this[BUFFERLENGTH] = 0
-    this[DESTROYED] = false
-  }
-
-  get bufferLength () { return this[BUFFERLENGTH] }
-
-  get encoding () { return this[ENCODING] }
-  set encoding (enc) {
-    if (this[OBJECTMODE])
-      throw new Error('cannot set encoding in objectMode')
-
-    if (this[ENCODING] && enc !== this[ENCODING] &&
-        (this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH]))
-      throw new Error('cannot change encoding')
-
-    if (this[ENCODING] !== enc) {
-      this[DECODER] = enc ? new SD(enc) : null
-      if (this.buffer.length)
-        this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk))
-    }
-
-    this[ENCODING] = enc
-  }
-
-  setEncoding (enc) {
-    this.encoding = enc
-  }
-
-  get objectMode () { return this[OBJECTMODE] }
-  set objectMode (om) { this[OBJECTMODE] = this[OBJECTMODE] || !!om }
-
-  get ['async'] () { return this[ASYNC] }
-  set ['async'] (a) { this[ASYNC] = this[ASYNC] || !!a }
-
-  write (chunk, encoding, cb) {
-    if (this[EOF])
-      throw new Error('write after end')
-
-    if (this[DESTROYED]) {
-      this.emit('error', Object.assign(
-        new Error('Cannot call write after a stream was destroyed'),
-        { code: 'ERR_STREAM_DESTROYED' }
-      ))
-      return true
-    }
-
-    if (typeof encoding === 'function')
-      cb = encoding, encoding = 'utf8'
-
-    if (!encoding)
-      encoding = 'utf8'
-
-    const fn = this[ASYNC] ? defer : f => f()
-
-    // convert array buffers and typed array views into buffers
-    // at some point in the future, we may want to do the opposite!
-    // leave strings and buffers as-is
-    // anything else switches us into object mode
-    if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
-      if (isArrayBufferView(chunk))
-        chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
-      else if (isArrayBuffer(chunk))
-        chunk = Buffer.from(chunk)
-      else if (typeof chunk !== 'string')
-        // use the setter so we throw if we have encoding set
-        this.objectMode = true
-    }
-
-    // handle object mode up front, since it's simpler
-    // this yields better performance, fewer checks later.
-    if (this[OBJECTMODE]) {
-      /* istanbul ignore if - maybe impossible? */
-      if (this.flowing && this[BUFFERLENGTH] !== 0)
-        this[FLUSH](true)
-
-      if (this.flowing)
-        this.emit('data', chunk)
-      else
-        this[BUFFERPUSH](chunk)
-
-      if (this[BUFFERLENGTH] !== 0)
-        this.emit('readable')
-
-      if (cb)
-        fn(cb)
-
-      return this.flowing
-    }
-
-    // at this point the chunk is a buffer or string
-    // don't buffer it up or send it to the decoder
-    if (!chunk.length) {
-      if (this[BUFFERLENGTH] !== 0)
-        this.emit('readable')
-      if (cb)
-        fn(cb)
-      return this.flowing
-    }
-
-    // fast-path writing strings of same encoding to a stream with
-    // an empty buffer, skipping the buffer/decoder dance
-    if (typeof chunk === 'string' &&
-        // unless it is a string already ready for us to use
-        !(encoding === this[ENCODING] && !this[DECODER].lastNeed)) {
-      chunk = Buffer.from(chunk, encoding)
-    }
-
-    if (Buffer.isBuffer(chunk) && this[ENCODING])
-      chunk = this[DECODER].write(chunk)
-
-    // Note: flushing CAN potentially switch us into not-flowing mode
-    if (this.flowing && this[BUFFERLENGTH] !== 0)
-      this[FLUSH](true)
-
-    if (this.flowing)
-      this.emit('data', chunk)
-    else
-      this[BUFFERPUSH](chunk)
-
-    if (this[BUFFERLENGTH] !== 0)
-      this.emit('readable')
-
-    if (cb)
-      fn(cb)
-
-    return this.flowing
-  }
-
-  read (n) {
-    if (this[DESTROYED])
-      return null
-
-    if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) {
-      this[MAYBE_EMIT_END]()
-      return null
-    }
-
-    if (this[OBJECTMODE])
-      n = null
-
-    if (this.buffer.length > 1 && !this[OBJECTMODE]) {
-      if (this.encoding)
-        this.buffer = [this.buffer.join('')]
-      else
-        this.buffer = [Buffer.concat(this.buffer, this[BUFFERLENGTH])]
-    }
-
-    const ret = this[READ](n || null, this.buffer[0])
-    this[MAYBE_EMIT_END]()
-    return ret
-  }
-
-  [READ] (n, chunk) {
-    if (n === chunk.length || n === null)
-      this[BUFFERSHIFT]()
-    else {
-      this.buffer[0] = chunk.slice(n)
-      chunk = chunk.slice(0, n)
-      this[BUFFERLENGTH] -= n
-    }
-
-    this.emit('data', chunk)
-
-    if (!this.buffer.length && !this[EOF])
-      this.emit('drain')
-
-    return chunk
-  }
-
-  end (chunk, encoding, cb) {
-    if (typeof chunk === 'function')
-      cb = chunk, chunk = null
-    if (typeof encoding === 'function')
-      cb = encoding, encoding = 'utf8'
-    if (chunk)
-      this.write(chunk, encoding)
-    if (cb)
-      this.once('end', cb)
-    this[EOF] = true
-    this.writable = false
-
-    // if we haven't written anything, then go ahead and emit,
-    // even if we're not reading.
-    // we'll re-emit if a new 'end' listener is added anyway.
-    // This makes MP more suitable to write-only use cases.
-    if (this.flowing || !this[PAUSED])
-      this[MAYBE_EMIT_END]()
-    return this
-  }
-
-  // don't let the internal resume be overwritten
-  [RESUME] () {
-    if (this[DESTROYED])
-      return
-
-    this[PAUSED] = false
-    this[FLOWING] = true
-    this.emit('resume')
-    if (this.buffer.length)
-      this[FLUSH]()
-    else if (this[EOF])
-      this[MAYBE_EMIT_END]()
-    else
-      this.emit('drain')
-  }
-
-  resume () {
-    return this[RESUME]()
-  }
-
-  pause () {
-    this[FLOWING] = false
-    this[PAUSED] = true
-  }
-
-  get destroyed () {
-    return this[DESTROYED]
-  }
-
-  get flowing () {
-    return this[FLOWING]
-  }
-
-  get paused () {
-    return this[PAUSED]
-  }
-
-  [BUFFERPUSH] (chunk) {
-    if (this[OBJECTMODE])
-      this[BUFFERLENGTH] += 1
-    else
-      this[BUFFERLENGTH] += chunk.length
-    this.buffer.push(chunk)
-  }
-
-  [BUFFERSHIFT] () {
-    if (this.buffer.length) {
-      if (this[OBJECTMODE])
-        this[BUFFERLENGTH] -= 1
-      else
-        this[BUFFERLENGTH] -= this.buffer[0].length
-    }
-    return this.buffer.shift()
-  }
-
-  [FLUSH] (noDrain) {
-    do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()))
-
-    if (!noDrain && !this.buffer.length && !this[EOF])
-      this.emit('drain')
-  }
-
-  [FLUSHCHUNK] (chunk) {
-    return chunk ? (this.emit('data', chunk), this.flowing) : false
-  }
-
-  pipe (dest, opts) {
-    if (this[DESTROYED])
-      return
-
-    const ended = this[EMITTED_END]
-    opts = opts || {}
-    if (dest === proc.stdout || dest === proc.stderr)
-      opts.end = false
-    else
-      opts.end = opts.end !== false
-    opts.proxyErrors = !!opts.proxyErrors
-
-    // piping an ended stream ends immediately
-    if (ended) {
-      if (opts.end)
-        dest.end()
-    } else {
-      this.pipes.push(!opts.proxyErrors ? new Pipe(this, dest, opts)
-        : new PipeProxyErrors(this, dest, opts))
-      if (this[ASYNC])
-        defer(() => this[RESUME]())
-      else
-        this[RESUME]()
-    }
-
-    return dest
-  }
-
-  unpipe (dest) {
-    const p = this.pipes.find(p => p.dest === dest)
-    if (p) {
-      this.pipes.splice(this.pipes.indexOf(p), 1)
-      p.unpipe()
-    }
-  }
-
-  addListener (ev, fn) {
-    return this.on(ev, fn)
-  }
-
-  on (ev, fn) {
-    const ret = super.on(ev, fn)
-    if (ev === 'data' && !this.pipes.length && !this.flowing)
-      this[RESUME]()
-    else if (ev === 'readable' && this[BUFFERLENGTH] !== 0)
-      super.emit('readable')
-    else if (isEndish(ev) && this[EMITTED_END]) {
-      super.emit(ev)
-      this.removeAllListeners(ev)
-    } else if (ev === 'error' && this[EMITTED_ERROR]) {
-      if (this[ASYNC])
-        defer(() => fn.call(this, this[EMITTED_ERROR]))
-      else
-        fn.call(this, this[EMITTED_ERROR])
-    }
-    return ret
-  }
-
-  get emittedEnd () {
-    return this[EMITTED_END]
-  }
-
-  [MAYBE_EMIT_END] () {
-    if (!this[EMITTING_END] &&
-        !this[EMITTED_END] &&
-        !this[DESTROYED] &&
-        this.buffer.length === 0 &&
-        this[EOF]) {
-      this[EMITTING_END] = true
-      this.emit('end')
-      this.emit('prefinish')
-      this.emit('finish')
-      if (this[CLOSED])
-        this.emit('close')
-      this[EMITTING_END] = false
-    }
-  }
-
-  emit (ev, data, ...extra) {
-    // error and close are only events allowed after calling destroy()
-    if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED])
-      return
-    else if (ev === 'data') {
-      return !data ? false
-        : this[ASYNC] ? defer(() => this[EMITDATA](data))
-        : this[EMITDATA](data)
-    } else if (ev === 'end') {
-      return this[EMITEND]()
-    } else if (ev === 'close') {
-      this[CLOSED] = true
-      // don't emit close before 'end' and 'finish'
-      if (!this[EMITTED_END] && !this[DESTROYED])
-        return
-      const ret = super.emit('close')
-      this.removeAllListeners('close')
-      return ret
-    } else if (ev === 'error') {
-      this[EMITTED_ERROR] = data
-      const ret = super.emit('error', data)
-      this[MAYBE_EMIT_END]()
-      return ret
-    } else if (ev === 'resume') {
-      const ret = super.emit('resume')
-      this[MAYBE_EMIT_END]()
-      return ret
-    } else if (ev === 'finish' || ev === 'prefinish') {
-      const ret = super.emit(ev)
-      this.removeAllListeners(ev)
-      return ret
-    }
-
-    // Some other unknown event
-    const ret = super.emit(ev, data, ...extra)
-    this[MAYBE_EMIT_END]()
-    return ret
-  }
-
-  [EMITDATA] (data) {
-    for (const p of this.pipes) {
-      if (p.dest.write(data) === false)
-        this.pause()
-    }
-    const ret = super.emit('data', data)
-    this[MAYBE_EMIT_END]()
-    return ret
-  }
-
-  [EMITEND] () {
-    if (this[EMITTED_END])
-      return
-
-    this[EMITTED_END] = true
-    this.readable = false
-    if (this[ASYNC])
-      defer(() => this[EMITEND2]())
-    else
-      this[EMITEND2]()
-  }
-
-  [EMITEND2] () {
-    if (this[DECODER]) {
-      const data = this[DECODER].end()
-      if (data) {
-        for (const p of this.pipes) {
-          p.dest.write(data)
-        }
-        super.emit('data', data)
-      }
-    }
-
-    for (const p of this.pipes) {
-      p.end()
-    }
-    const ret = super.emit('end')
-    this.removeAllListeners('end')
-    return ret
-  }
-
-  // const all = await stream.collect()
-  collect () {
-    const buf = []
-    if (!this[OBJECTMODE])
-      buf.dataLength = 0
-    // set the promise first, in case an error is raised
-    // by triggering the flow here.
-    const p = this.promise()
-    this.on('data', c => {
-      buf.push(c)
-      if (!this[OBJECTMODE])
-        buf.dataLength += c.length
-    })
-    return p.then(() => buf)
-  }
-
-  // const data = await stream.concat()
-  concat () {
-    return this[OBJECTMODE]
-      ? Promise.reject(new Error('cannot concat in objectMode'))
-      : this.collect().then(buf =>
-          this[OBJECTMODE]
-            ? Promise.reject(new Error('cannot concat in objectMode'))
-            : this[ENCODING] ? buf.join('') : Buffer.concat(buf, buf.dataLength))
-  }
-
-  // stream.promise().then(() => done, er => emitted error)
-  promise () {
-    return new Promise((resolve, reject) => {
-      this.on(DESTROYED, () => reject(new Error('stream destroyed')))
-      this.on('error', er => reject(er))
-      this.on('end', () => resolve())
-    })
-  }
-
-  // for await (let chunk of stream)
-  [ASYNCITERATOR] () {
-    const next = () => {
-      const res = this.read()
-      if (res !== null)
-        return Promise.resolve({ done: false, value: res })
-
-      if (this[EOF])
-        return Promise.resolve({ done: true })
-
-      let resolve = null
-      let reject = null
-      const onerr = er => {
-        this.removeListener('data', ondata)
-        this.removeListener('end', onend)
-        reject(er)
-      }
-      const ondata = value => {
-        this.removeListener('error', onerr)
-        this.removeListener('end', onend)
-        this.pause()
-        resolve({ value: value, done: !!this[EOF] })
-      }
-      const onend = () => {
-        this.removeListener('error', onerr)
-        this.removeListener('data', ondata)
-        resolve({ done: true })
-      }
-      const ondestroy = () => onerr(new Error('stream destroyed'))
-      return new Promise((res, rej) => {
-        reject = rej
-        resolve = res
-        this.once(DESTROYED, ondestroy)
-        this.once('error', onerr)
-        this.once('end', onend)
-        this.once('data', ondata)
-      })
-    }
-
-    return { next }
-  }
-
-  // for (let chunk of stream)
-  [ITERATOR] () {
-    const next = () => {
-      const value = this.read()
-      const done = value === null
-      return { value, done }
-    }
-    return { next }
-  }
-
-  destroy (er) {
-    if (this[DESTROYED]) {
-      if (er)
-        this.emit('error', er)
-      else
-        this.emit(DESTROYED)
-      return this
-    }
-
-    this[DESTROYED] = true
-
-    // throw away all buffered data, it's never coming out
-    this.buffer.length = 0
-    this[BUFFERLENGTH] = 0
-
-    if (typeof this.close === 'function' && !this[CLOSED])
-      this.close()
-
-    if (er)
-      this.emit('error', er)
-    else // if no error to emit, still reject pending promises
-      this.emit(DESTROYED)
-
-    return this
-  }
-
-  static isStream (s) {
-    return !!s && (s instanceof Minipass || s instanceof Stream ||
-      s instanceof EE && (
-        typeof s.pipe === 'function' || // readable
-        (typeof s.write === 'function' && typeof s.end === 'function') // writable
-      ))
-  }
-}
diff --git a/node_modules/tar/node_modules/fs-minipass/node_modules/minipass/package.json b/node_modules/tar/node_modules/fs-minipass/node_modules/minipass/package.json
deleted file mode 100644
index 548d03fa6d5d4..0000000000000
--- a/node_modules/tar/node_modules/fs-minipass/node_modules/minipass/package.json
+++ /dev/null
@@ -1,56 +0,0 @@
-{
-  "name": "minipass",
-  "version": "3.3.6",
-  "description": "minimal implementation of a PassThrough stream",
-  "main": "index.js",
-  "types": "index.d.ts",
-  "dependencies": {
-    "yallist": "^4.0.0"
-  },
-  "devDependencies": {
-    "@types/node": "^17.0.41",
-    "end-of-stream": "^1.4.0",
-    "prettier": "^2.6.2",
-    "tap": "^16.2.0",
-    "through2": "^2.0.3",
-    "ts-node": "^10.8.1",
-    "typescript": "^4.7.3"
-  },
-  "scripts": {
-    "test": "tap",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "postpublish": "git push origin --follow-tags"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/isaacs/minipass.git"
-  },
-  "keywords": [
-    "passthrough",
-    "stream"
-  ],
-  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
-  "license": "ISC",
-  "files": [
-    "index.d.ts",
-    "index.js"
-  ],
-  "tap": {
-    "check-coverage": true
-  },
-  "engines": {
-    "node": ">=8"
-  },
-  "prettier": {
-    "semi": false,
-    "printWidth": 80,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  }
-}
diff --git a/node_modules/tar/node_modules/fs-minipass/node_modules/yallist/LICENSE b/node_modules/tar/node_modules/fs-minipass/node_modules/yallist/LICENSE
deleted file mode 100644
index 19129e315fe59..0000000000000
--- a/node_modules/tar/node_modules/fs-minipass/node_modules/yallist/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/tar/node_modules/fs-minipass/node_modules/yallist/iterator.js b/node_modules/tar/node_modules/fs-minipass/node_modules/yallist/iterator.js
deleted file mode 100644
index d41c97a19f984..0000000000000
--- a/node_modules/tar/node_modules/fs-minipass/node_modules/yallist/iterator.js
+++ /dev/null
@@ -1,8 +0,0 @@
-'use strict'
-module.exports = function (Yallist) {
-  Yallist.prototype[Symbol.iterator] = function* () {
-    for (let walker = this.head; walker; walker = walker.next) {
-      yield walker.value
-    }
-  }
-}
diff --git a/node_modules/tar/node_modules/fs-minipass/node_modules/yallist/package.json b/node_modules/tar/node_modules/fs-minipass/node_modules/yallist/package.json
deleted file mode 100644
index 8a083867d72e0..0000000000000
--- a/node_modules/tar/node_modules/fs-minipass/node_modules/yallist/package.json
+++ /dev/null
@@ -1,29 +0,0 @@
-{
-  "name": "yallist",
-  "version": "4.0.0",
-  "description": "Yet Another Linked List",
-  "main": "yallist.js",
-  "directories": {
-    "test": "test"
-  },
-  "files": [
-    "yallist.js",
-    "iterator.js"
-  ],
-  "dependencies": {},
-  "devDependencies": {
-    "tap": "^12.1.0"
-  },
-  "scripts": {
-    "test": "tap test/*.js --100",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "postpublish": "git push origin --all; git push origin --tags"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/isaacs/yallist.git"
-  },
-  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
-  "license": "ISC"
-}
diff --git a/node_modules/tar/node_modules/fs-minipass/node_modules/yallist/yallist.js b/node_modules/tar/node_modules/fs-minipass/node_modules/yallist/yallist.js
deleted file mode 100644
index 4e83ab1c542a5..0000000000000
--- a/node_modules/tar/node_modules/fs-minipass/node_modules/yallist/yallist.js
+++ /dev/null
@@ -1,426 +0,0 @@
-'use strict'
-module.exports = Yallist
-
-Yallist.Node = Node
-Yallist.create = Yallist
-
-function Yallist (list) {
-  var self = this
-  if (!(self instanceof Yallist)) {
-    self = new Yallist()
-  }
-
-  self.tail = null
-  self.head = null
-  self.length = 0
-
-  if (list && typeof list.forEach === 'function') {
-    list.forEach(function (item) {
-      self.push(item)
-    })
-  } else if (arguments.length > 0) {
-    for (var i = 0, l = arguments.length; i < l; i++) {
-      self.push(arguments[i])
-    }
-  }
-
-  return self
-}
-
-Yallist.prototype.removeNode = function (node) {
-  if (node.list !== this) {
-    throw new Error('removing node which does not belong to this list')
-  }
-
-  var next = node.next
-  var prev = node.prev
-
-  if (next) {
-    next.prev = prev
-  }
-
-  if (prev) {
-    prev.next = next
-  }
-
-  if (node === this.head) {
-    this.head = next
-  }
-  if (node === this.tail) {
-    this.tail = prev
-  }
-
-  node.list.length--
-  node.next = null
-  node.prev = null
-  node.list = null
-
-  return next
-}
-
-Yallist.prototype.unshiftNode = function (node) {
-  if (node === this.head) {
-    return
-  }
-
-  if (node.list) {
-    node.list.removeNode(node)
-  }
-
-  var head = this.head
-  node.list = this
-  node.next = head
-  if (head) {
-    head.prev = node
-  }
-
-  this.head = node
-  if (!this.tail) {
-    this.tail = node
-  }
-  this.length++
-}
-
-Yallist.prototype.pushNode = function (node) {
-  if (node === this.tail) {
-    return
-  }
-
-  if (node.list) {
-    node.list.removeNode(node)
-  }
-
-  var tail = this.tail
-  node.list = this
-  node.prev = tail
-  if (tail) {
-    tail.next = node
-  }
-
-  this.tail = node
-  if (!this.head) {
-    this.head = node
-  }
-  this.length++
-}
-
-Yallist.prototype.push = function () {
-  for (var i = 0, l = arguments.length; i < l; i++) {
-    push(this, arguments[i])
-  }
-  return this.length
-}
-
-Yallist.prototype.unshift = function () {
-  for (var i = 0, l = arguments.length; i < l; i++) {
-    unshift(this, arguments[i])
-  }
-  return this.length
-}
-
-Yallist.prototype.pop = function () {
-  if (!this.tail) {
-    return undefined
-  }
-
-  var res = this.tail.value
-  this.tail = this.tail.prev
-  if (this.tail) {
-    this.tail.next = null
-  } else {
-    this.head = null
-  }
-  this.length--
-  return res
-}
-
-Yallist.prototype.shift = function () {
-  if (!this.head) {
-    return undefined
-  }
-
-  var res = this.head.value
-  this.head = this.head.next
-  if (this.head) {
-    this.head.prev = null
-  } else {
-    this.tail = null
-  }
-  this.length--
-  return res
-}
-
-Yallist.prototype.forEach = function (fn, thisp) {
-  thisp = thisp || this
-  for (var walker = this.head, i = 0; walker !== null; i++) {
-    fn.call(thisp, walker.value, i, this)
-    walker = walker.next
-  }
-}
-
-Yallist.prototype.forEachReverse = function (fn, thisp) {
-  thisp = thisp || this
-  for (var walker = this.tail, i = this.length - 1; walker !== null; i--) {
-    fn.call(thisp, walker.value, i, this)
-    walker = walker.prev
-  }
-}
-
-Yallist.prototype.get = function (n) {
-  for (var i = 0, walker = this.head; walker !== null && i < n; i++) {
-    // abort out of the list early if we hit a cycle
-    walker = walker.next
-  }
-  if (i === n && walker !== null) {
-    return walker.value
-  }
-}
-
-Yallist.prototype.getReverse = function (n) {
-  for (var i = 0, walker = this.tail; walker !== null && i < n; i++) {
-    // abort out of the list early if we hit a cycle
-    walker = walker.prev
-  }
-  if (i === n && walker !== null) {
-    return walker.value
-  }
-}
-
-Yallist.prototype.map = function (fn, thisp) {
-  thisp = thisp || this
-  var res = new Yallist()
-  for (var walker = this.head; walker !== null;) {
-    res.push(fn.call(thisp, walker.value, this))
-    walker = walker.next
-  }
-  return res
-}
-
-Yallist.prototype.mapReverse = function (fn, thisp) {
-  thisp = thisp || this
-  var res = new Yallist()
-  for (var walker = this.tail; walker !== null;) {
-    res.push(fn.call(thisp, walker.value, this))
-    walker = walker.prev
-  }
-  return res
-}
-
-Yallist.prototype.reduce = function (fn, initial) {
-  var acc
-  var walker = this.head
-  if (arguments.length > 1) {
-    acc = initial
-  } else if (this.head) {
-    walker = this.head.next
-    acc = this.head.value
-  } else {
-    throw new TypeError('Reduce of empty list with no initial value')
-  }
-
-  for (var i = 0; walker !== null; i++) {
-    acc = fn(acc, walker.value, i)
-    walker = walker.next
-  }
-
-  return acc
-}
-
-Yallist.prototype.reduceReverse = function (fn, initial) {
-  var acc
-  var walker = this.tail
-  if (arguments.length > 1) {
-    acc = initial
-  } else if (this.tail) {
-    walker = this.tail.prev
-    acc = this.tail.value
-  } else {
-    throw new TypeError('Reduce of empty list with no initial value')
-  }
-
-  for (var i = this.length - 1; walker !== null; i--) {
-    acc = fn(acc, walker.value, i)
-    walker = walker.prev
-  }
-
-  return acc
-}
-
-Yallist.prototype.toArray = function () {
-  var arr = new Array(this.length)
-  for (var i = 0, walker = this.head; walker !== null; i++) {
-    arr[i] = walker.value
-    walker = walker.next
-  }
-  return arr
-}
-
-Yallist.prototype.toArrayReverse = function () {
-  var arr = new Array(this.length)
-  for (var i = 0, walker = this.tail; walker !== null; i++) {
-    arr[i] = walker.value
-    walker = walker.prev
-  }
-  return arr
-}
-
-Yallist.prototype.slice = function (from, to) {
-  to = to || this.length
-  if (to < 0) {
-    to += this.length
-  }
-  from = from || 0
-  if (from < 0) {
-    from += this.length
-  }
-  var ret = new Yallist()
-  if (to < from || to < 0) {
-    return ret
-  }
-  if (from < 0) {
-    from = 0
-  }
-  if (to > this.length) {
-    to = this.length
-  }
-  for (var i = 0, walker = this.head; walker !== null && i < from; i++) {
-    walker = walker.next
-  }
-  for (; walker !== null && i < to; i++, walker = walker.next) {
-    ret.push(walker.value)
-  }
-  return ret
-}
-
-Yallist.prototype.sliceReverse = function (from, to) {
-  to = to || this.length
-  if (to < 0) {
-    to += this.length
-  }
-  from = from || 0
-  if (from < 0) {
-    from += this.length
-  }
-  var ret = new Yallist()
-  if (to < from || to < 0) {
-    return ret
-  }
-  if (from < 0) {
-    from = 0
-  }
-  if (to > this.length) {
-    to = this.length
-  }
-  for (var i = this.length, walker = this.tail; walker !== null && i > to; i--) {
-    walker = walker.prev
-  }
-  for (; walker !== null && i > from; i--, walker = walker.prev) {
-    ret.push(walker.value)
-  }
-  return ret
-}
-
-Yallist.prototype.splice = function (start, deleteCount, ...nodes) {
-  if (start > this.length) {
-    start = this.length - 1
-  }
-  if (start < 0) {
-    start = this.length + start;
-  }
-
-  for (var i = 0, walker = this.head; walker !== null && i < start; i++) {
-    walker = walker.next
-  }
-
-  var ret = []
-  for (var i = 0; walker && i < deleteCount; i++) {
-    ret.push(walker.value)
-    walker = this.removeNode(walker)
-  }
-  if (walker === null) {
-    walker = this.tail
-  }
-
-  if (walker !== this.head && walker !== this.tail) {
-    walker = walker.prev
-  }
-
-  for (var i = 0; i < nodes.length; i++) {
-    walker = insert(this, walker, nodes[i])
-  }
-  return ret;
-}
-
-Yallist.prototype.reverse = function () {
-  var head = this.head
-  var tail = this.tail
-  for (var walker = head; walker !== null; walker = walker.prev) {
-    var p = walker.prev
-    walker.prev = walker.next
-    walker.next = p
-  }
-  this.head = tail
-  this.tail = head
-  return this
-}
-
-function insert (self, node, value) {
-  var inserted = node === self.head ?
-    new Node(value, null, node, self) :
-    new Node(value, node, node.next, self)
-
-  if (inserted.next === null) {
-    self.tail = inserted
-  }
-  if (inserted.prev === null) {
-    self.head = inserted
-  }
-
-  self.length++
-
-  return inserted
-}
-
-function push (self, item) {
-  self.tail = new Node(item, self.tail, null, self)
-  if (!self.head) {
-    self.head = self.tail
-  }
-  self.length++
-}
-
-function unshift (self, item) {
-  self.head = new Node(item, null, self.head, self)
-  if (!self.tail) {
-    self.tail = self.head
-  }
-  self.length++
-}
-
-function Node (value, prev, next, list) {
-  if (!(this instanceof Node)) {
-    return new Node(value, prev, next, list)
-  }
-
-  this.list = list
-  this.value = value
-
-  if (prev) {
-    prev.next = this
-    this.prev = prev
-  } else {
-    this.prev = null
-  }
-
-  if (next) {
-    next.prev = this
-    this.next = next
-  } else {
-    this.next = null
-  }
-}
-
-try {
-  // add if support for Symbol.iterator is present
-  require('./iterator.js')(Yallist)
-} catch (er) {}
diff --git a/node_modules/tar/node_modules/fs-minipass/package.json b/node_modules/tar/node_modules/fs-minipass/package.json
deleted file mode 100644
index 2f2436cb5c3b1..0000000000000
--- a/node_modules/tar/node_modules/fs-minipass/package.json
+++ /dev/null
@@ -1,39 +0,0 @@
-{
-  "name": "fs-minipass",
-  "version": "2.1.0",
-  "main": "index.js",
-  "scripts": {
-    "test": "tap",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "postpublish": "git push origin --follow-tags"
-  },
-  "keywords": [],
-  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
-  "license": "ISC",
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/npm/fs-minipass.git"
-  },
-  "bugs": {
-    "url": "https://github.com/npm/fs-minipass/issues"
-  },
-  "homepage": "https://github.com/npm/fs-minipass#readme",
-  "description": "fs read and write streams based on minipass",
-  "dependencies": {
-    "minipass": "^3.0.0"
-  },
-  "devDependencies": {
-    "mutate-fs": "^2.0.1",
-    "tap": "^14.6.4"
-  },
-  "files": [
-    "index.js"
-  ],
-  "tap": {
-    "check-coverage": true
-  },
-  "engines": {
-    "node": ">= 8"
-  }
-}
diff --git a/node_modules/tar/node_modules/minipass/LICENSE b/node_modules/tar/node_modules/minipass/LICENSE
deleted file mode 100644
index 97f8e32ed82e4..0000000000000
--- a/node_modules/tar/node_modules/minipass/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) 2017-2023 npm, Inc., Isaac Z. Schlueter, and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/tar/node_modules/minipass/index.js b/node_modules/tar/node_modules/minipass/index.js
deleted file mode 100644
index ed07c17acd97b..0000000000000
--- a/node_modules/tar/node_modules/minipass/index.js
+++ /dev/null
@@ -1,702 +0,0 @@
-'use strict'
-const proc =
-  typeof process === 'object' && process
-    ? process
-    : {
-        stdout: null,
-        stderr: null,
-      }
-const EE = require('events')
-const Stream = require('stream')
-const stringdecoder = require('string_decoder')
-const SD = stringdecoder.StringDecoder
-
-const EOF = Symbol('EOF')
-const MAYBE_EMIT_END = Symbol('maybeEmitEnd')
-const EMITTED_END = Symbol('emittedEnd')
-const EMITTING_END = Symbol('emittingEnd')
-const EMITTED_ERROR = Symbol('emittedError')
-const CLOSED = Symbol('closed')
-const READ = Symbol('read')
-const FLUSH = Symbol('flush')
-const FLUSHCHUNK = Symbol('flushChunk')
-const ENCODING = Symbol('encoding')
-const DECODER = Symbol('decoder')
-const FLOWING = Symbol('flowing')
-const PAUSED = Symbol('paused')
-const RESUME = Symbol('resume')
-const BUFFER = Symbol('buffer')
-const PIPES = Symbol('pipes')
-const BUFFERLENGTH = Symbol('bufferLength')
-const BUFFERPUSH = Symbol('bufferPush')
-const BUFFERSHIFT = Symbol('bufferShift')
-const OBJECTMODE = Symbol('objectMode')
-// internal event when stream is destroyed
-const DESTROYED = Symbol('destroyed')
-// internal event when stream has an error
-const ERROR = Symbol('error')
-const EMITDATA = Symbol('emitData')
-const EMITEND = Symbol('emitEnd')
-const EMITEND2 = Symbol('emitEnd2')
-const ASYNC = Symbol('async')
-const ABORT = Symbol('abort')
-const ABORTED = Symbol('aborted')
-const SIGNAL = Symbol('signal')
-
-const defer = fn => Promise.resolve().then(fn)
-
-// TODO remove when Node v8 support drops
-const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1'
-const ASYNCITERATOR =
-  (doIter && Symbol.asyncIterator) || Symbol('asyncIterator not implemented')
-const ITERATOR =
-  (doIter && Symbol.iterator) || Symbol('iterator not implemented')
-
-// events that mean 'the stream is over'
-// these are treated specially, and re-emitted
-// if they are listened for after emitting.
-const isEndish = ev => ev === 'end' || ev === 'finish' || ev === 'prefinish'
-
-const isArrayBuffer = b =>
-  b instanceof ArrayBuffer ||
-  (typeof b === 'object' &&
-    b.constructor &&
-    b.constructor.name === 'ArrayBuffer' &&
-    b.byteLength >= 0)
-
-const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b)
-
-class Pipe {
-  constructor(src, dest, opts) {
-    this.src = src
-    this.dest = dest
-    this.opts = opts
-    this.ondrain = () => src[RESUME]()
-    dest.on('drain', this.ondrain)
-  }
-  unpipe() {
-    this.dest.removeListener('drain', this.ondrain)
-  }
-  // istanbul ignore next - only here for the prototype
-  proxyErrors() {}
-  end() {
-    this.unpipe()
-    if (this.opts.end) this.dest.end()
-  }
-}
-
-class PipeProxyErrors extends Pipe {
-  unpipe() {
-    this.src.removeListener('error', this.proxyErrors)
-    super.unpipe()
-  }
-  constructor(src, dest, opts) {
-    super(src, dest, opts)
-    this.proxyErrors = er => dest.emit('error', er)
-    src.on('error', this.proxyErrors)
-  }
-}
-
-class Minipass extends Stream {
-  constructor(options) {
-    super()
-    this[FLOWING] = false
-    // whether we're explicitly paused
-    this[PAUSED] = false
-    this[PIPES] = []
-    this[BUFFER] = []
-    this[OBJECTMODE] = (options && options.objectMode) || false
-    if (this[OBJECTMODE]) this[ENCODING] = null
-    else this[ENCODING] = (options && options.encoding) || null
-    if (this[ENCODING] === 'buffer') this[ENCODING] = null
-    this[ASYNC] = (options && !!options.async) || false
-    this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null
-    this[EOF] = false
-    this[EMITTED_END] = false
-    this[EMITTING_END] = false
-    this[CLOSED] = false
-    this[EMITTED_ERROR] = null
-    this.writable = true
-    this.readable = true
-    this[BUFFERLENGTH] = 0
-    this[DESTROYED] = false
-    if (options && options.debugExposeBuffer === true) {
-      Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] })
-    }
-    if (options && options.debugExposePipes === true) {
-      Object.defineProperty(this, 'pipes', { get: () => this[PIPES] })
-    }
-    this[SIGNAL] = options && options.signal
-    this[ABORTED] = false
-    if (this[SIGNAL]) {
-      this[SIGNAL].addEventListener('abort', () => this[ABORT]())
-      if (this[SIGNAL].aborted) {
-        this[ABORT]()
-      }
-    }
-  }
-
-  get bufferLength() {
-    return this[BUFFERLENGTH]
-  }
-
-  get encoding() {
-    return this[ENCODING]
-  }
-  set encoding(enc) {
-    if (this[OBJECTMODE]) throw new Error('cannot set encoding in objectMode')
-
-    if (
-      this[ENCODING] &&
-      enc !== this[ENCODING] &&
-      ((this[DECODER] && this[DECODER].lastNeed) || this[BUFFERLENGTH])
-    )
-      throw new Error('cannot change encoding')
-
-    if (this[ENCODING] !== enc) {
-      this[DECODER] = enc ? new SD(enc) : null
-      if (this[BUFFER].length)
-        this[BUFFER] = this[BUFFER].map(chunk => this[DECODER].write(chunk))
-    }
-
-    this[ENCODING] = enc
-  }
-
-  setEncoding(enc) {
-    this.encoding = enc
-  }
-
-  get objectMode() {
-    return this[OBJECTMODE]
-  }
-  set objectMode(om) {
-    this[OBJECTMODE] = this[OBJECTMODE] || !!om
-  }
-
-  get ['async']() {
-    return this[ASYNC]
-  }
-  set ['async'](a) {
-    this[ASYNC] = this[ASYNC] || !!a
-  }
-
-  // drop everything and get out of the flow completely
-  [ABORT]() {
-    this[ABORTED] = true
-    this.emit('abort', this[SIGNAL].reason)
-    this.destroy(this[SIGNAL].reason)
-  }
-
-  get aborted() {
-    return this[ABORTED]
-  }
-  set aborted(_) {}
-
-  write(chunk, encoding, cb) {
-    if (this[ABORTED]) return false
-    if (this[EOF]) throw new Error('write after end')
-
-    if (this[DESTROYED]) {
-      this.emit(
-        'error',
-        Object.assign(
-          new Error('Cannot call write after a stream was destroyed'),
-          { code: 'ERR_STREAM_DESTROYED' }
-        )
-      )
-      return true
-    }
-
-    if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8')
-
-    if (!encoding) encoding = 'utf8'
-
-    const fn = this[ASYNC] ? defer : f => f()
-
-    // convert array buffers and typed array views into buffers
-    // at some point in the future, we may want to do the opposite!
-    // leave strings and buffers as-is
-    // anything else switches us into object mode
-    if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
-      if (isArrayBufferView(chunk))
-        chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
-      else if (isArrayBuffer(chunk)) chunk = Buffer.from(chunk)
-      else if (typeof chunk !== 'string')
-        // use the setter so we throw if we have encoding set
-        this.objectMode = true
-    }
-
-    // handle object mode up front, since it's simpler
-    // this yields better performance, fewer checks later.
-    if (this[OBJECTMODE]) {
-      /* istanbul ignore if - maybe impossible? */
-      if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true)
-
-      if (this.flowing) this.emit('data', chunk)
-      else this[BUFFERPUSH](chunk)
-
-      if (this[BUFFERLENGTH] !== 0) this.emit('readable')
-
-      if (cb) fn(cb)
-
-      return this.flowing
-    }
-
-    // at this point the chunk is a buffer or string
-    // don't buffer it up or send it to the decoder
-    if (!chunk.length) {
-      if (this[BUFFERLENGTH] !== 0) this.emit('readable')
-      if (cb) fn(cb)
-      return this.flowing
-    }
-
-    // fast-path writing strings of same encoding to a stream with
-    // an empty buffer, skipping the buffer/decoder dance
-    if (
-      typeof chunk === 'string' &&
-      // unless it is a string already ready for us to use
-      !(encoding === this[ENCODING] && !this[DECODER].lastNeed)
-    ) {
-      chunk = Buffer.from(chunk, encoding)
-    }
-
-    if (Buffer.isBuffer(chunk) && this[ENCODING])
-      chunk = this[DECODER].write(chunk)
-
-    // Note: flushing CAN potentially switch us into not-flowing mode
-    if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true)
-
-    if (this.flowing) this.emit('data', chunk)
-    else this[BUFFERPUSH](chunk)
-
-    if (this[BUFFERLENGTH] !== 0) this.emit('readable')
-
-    if (cb) fn(cb)
-
-    return this.flowing
-  }
-
-  read(n) {
-    if (this[DESTROYED]) return null
-
-    if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) {
-      this[MAYBE_EMIT_END]()
-      return null
-    }
-
-    if (this[OBJECTMODE]) n = null
-
-    if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
-      if (this.encoding) this[BUFFER] = [this[BUFFER].join('')]
-      else this[BUFFER] = [Buffer.concat(this[BUFFER], this[BUFFERLENGTH])]
-    }
-
-    const ret = this[READ](n || null, this[BUFFER][0])
-    this[MAYBE_EMIT_END]()
-    return ret
-  }
-
-  [READ](n, chunk) {
-    if (n === chunk.length || n === null) this[BUFFERSHIFT]()
-    else {
-      this[BUFFER][0] = chunk.slice(n)
-      chunk = chunk.slice(0, n)
-      this[BUFFERLENGTH] -= n
-    }
-
-    this.emit('data', chunk)
-
-    if (!this[BUFFER].length && !this[EOF]) this.emit('drain')
-
-    return chunk
-  }
-
-  end(chunk, encoding, cb) {
-    if (typeof chunk === 'function') (cb = chunk), (chunk = null)
-    if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8')
-    if (chunk) this.write(chunk, encoding)
-    if (cb) this.once('end', cb)
-    this[EOF] = true
-    this.writable = false
-
-    // if we haven't written anything, then go ahead and emit,
-    // even if we're not reading.
-    // we'll re-emit if a new 'end' listener is added anyway.
-    // This makes MP more suitable to write-only use cases.
-    if (this.flowing || !this[PAUSED]) this[MAYBE_EMIT_END]()
-    return this
-  }
-
-  // don't let the internal resume be overwritten
-  [RESUME]() {
-    if (this[DESTROYED]) return
-
-    this[PAUSED] = false
-    this[FLOWING] = true
-    this.emit('resume')
-    if (this[BUFFER].length) this[FLUSH]()
-    else if (this[EOF]) this[MAYBE_EMIT_END]()
-    else this.emit('drain')
-  }
-
-  resume() {
-    return this[RESUME]()
-  }
-
-  pause() {
-    this[FLOWING] = false
-    this[PAUSED] = true
-  }
-
-  get destroyed() {
-    return this[DESTROYED]
-  }
-
-  get flowing() {
-    return this[FLOWING]
-  }
-
-  get paused() {
-    return this[PAUSED]
-  }
-
-  [BUFFERPUSH](chunk) {
-    if (this[OBJECTMODE]) this[BUFFERLENGTH] += 1
-    else this[BUFFERLENGTH] += chunk.length
-    this[BUFFER].push(chunk)
-  }
-
-  [BUFFERSHIFT]() {
-    if (this[OBJECTMODE]) this[BUFFERLENGTH] -= 1
-    else this[BUFFERLENGTH] -= this[BUFFER][0].length
-    return this[BUFFER].shift()
-  }
-
-  [FLUSH](noDrain) {
-    do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && this[BUFFER].length)
-
-    if (!noDrain && !this[BUFFER].length && !this[EOF]) this.emit('drain')
-  }
-
-  [FLUSHCHUNK](chunk) {
-    this.emit('data', chunk)
-    return this.flowing
-  }
-
-  pipe(dest, opts) {
-    if (this[DESTROYED]) return
-
-    const ended = this[EMITTED_END]
-    opts = opts || {}
-    if (dest === proc.stdout || dest === proc.stderr) opts.end = false
-    else opts.end = opts.end !== false
-    opts.proxyErrors = !!opts.proxyErrors
-
-    // piping an ended stream ends immediately
-    if (ended) {
-      if (opts.end) dest.end()
-    } else {
-      this[PIPES].push(
-        !opts.proxyErrors
-          ? new Pipe(this, dest, opts)
-          : new PipeProxyErrors(this, dest, opts)
-      )
-      if (this[ASYNC]) defer(() => this[RESUME]())
-      else this[RESUME]()
-    }
-
-    return dest
-  }
-
-  unpipe(dest) {
-    const p = this[PIPES].find(p => p.dest === dest)
-    if (p) {
-      this[PIPES].splice(this[PIPES].indexOf(p), 1)
-      p.unpipe()
-    }
-  }
-
-  addListener(ev, fn) {
-    return this.on(ev, fn)
-  }
-
-  on(ev, fn) {
-    const ret = super.on(ev, fn)
-    if (ev === 'data' && !this[PIPES].length && !this.flowing) this[RESUME]()
-    else if (ev === 'readable' && this[BUFFERLENGTH] !== 0)
-      super.emit('readable')
-    else if (isEndish(ev) && this[EMITTED_END]) {
-      super.emit(ev)
-      this.removeAllListeners(ev)
-    } else if (ev === 'error' && this[EMITTED_ERROR]) {
-      if (this[ASYNC]) defer(() => fn.call(this, this[EMITTED_ERROR]))
-      else fn.call(this, this[EMITTED_ERROR])
-    }
-    return ret
-  }
-
-  get emittedEnd() {
-    return this[EMITTED_END]
-  }
-
-  [MAYBE_EMIT_END]() {
-    if (
-      !this[EMITTING_END] &&
-      !this[EMITTED_END] &&
-      !this[DESTROYED] &&
-      this[BUFFER].length === 0 &&
-      this[EOF]
-    ) {
-      this[EMITTING_END] = true
-      this.emit('end')
-      this.emit('prefinish')
-      this.emit('finish')
-      if (this[CLOSED]) this.emit('close')
-      this[EMITTING_END] = false
-    }
-  }
-
-  emit(ev, data, ...extra) {
-    // error and close are only events allowed after calling destroy()
-    if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED])
-      return
-    else if (ev === 'data') {
-      return !this[OBJECTMODE] && !data
-        ? false
-        : this[ASYNC]
-        ? defer(() => this[EMITDATA](data))
-        : this[EMITDATA](data)
-    } else if (ev === 'end') {
-      return this[EMITEND]()
-    } else if (ev === 'close') {
-      this[CLOSED] = true
-      // don't emit close before 'end' and 'finish'
-      if (!this[EMITTED_END] && !this[DESTROYED]) return
-      const ret = super.emit('close')
-      this.removeAllListeners('close')
-      return ret
-    } else if (ev === 'error') {
-      this[EMITTED_ERROR] = data
-      super.emit(ERROR, data)
-      const ret =
-        !this[SIGNAL] || this.listeners('error').length
-          ? super.emit('error', data)
-          : false
-      this[MAYBE_EMIT_END]()
-      return ret
-    } else if (ev === 'resume') {
-      const ret = super.emit('resume')
-      this[MAYBE_EMIT_END]()
-      return ret
-    } else if (ev === 'finish' || ev === 'prefinish') {
-      const ret = super.emit(ev)
-      this.removeAllListeners(ev)
-      return ret
-    }
-
-    // Some other unknown event
-    const ret = super.emit(ev, data, ...extra)
-    this[MAYBE_EMIT_END]()
-    return ret
-  }
-
-  [EMITDATA](data) {
-    for (const p of this[PIPES]) {
-      if (p.dest.write(data) === false) this.pause()
-    }
-    const ret = super.emit('data', data)
-    this[MAYBE_EMIT_END]()
-    return ret
-  }
-
-  [EMITEND]() {
-    if (this[EMITTED_END]) return
-
-    this[EMITTED_END] = true
-    this.readable = false
-    if (this[ASYNC]) defer(() => this[EMITEND2]())
-    else this[EMITEND2]()
-  }
-
-  [EMITEND2]() {
-    if (this[DECODER]) {
-      const data = this[DECODER].end()
-      if (data) {
-        for (const p of this[PIPES]) {
-          p.dest.write(data)
-        }
-        super.emit('data', data)
-      }
-    }
-
-    for (const p of this[PIPES]) {
-      p.end()
-    }
-    const ret = super.emit('end')
-    this.removeAllListeners('end')
-    return ret
-  }
-
-  // const all = await stream.collect()
-  collect() {
-    const buf = []
-    if (!this[OBJECTMODE]) buf.dataLength = 0
-    // set the promise first, in case an error is raised
-    // by triggering the flow here.
-    const p = this.promise()
-    this.on('data', c => {
-      buf.push(c)
-      if (!this[OBJECTMODE]) buf.dataLength += c.length
-    })
-    return p.then(() => buf)
-  }
-
-  // const data = await stream.concat()
-  concat() {
-    return this[OBJECTMODE]
-      ? Promise.reject(new Error('cannot concat in objectMode'))
-      : this.collect().then(buf =>
-          this[OBJECTMODE]
-            ? Promise.reject(new Error('cannot concat in objectMode'))
-            : this[ENCODING]
-            ? buf.join('')
-            : Buffer.concat(buf, buf.dataLength)
-        )
-  }
-
-  // stream.promise().then(() => done, er => emitted error)
-  promise() {
-    return new Promise((resolve, reject) => {
-      this.on(DESTROYED, () => reject(new Error('stream destroyed')))
-      this.on('error', er => reject(er))
-      this.on('end', () => resolve())
-    })
-  }
-
-  // for await (let chunk of stream)
-  [ASYNCITERATOR]() {
-    let stopped = false
-    const stop = () => {
-      this.pause()
-      stopped = true
-      return Promise.resolve({ done: true })
-    }
-    const next = () => {
-      if (stopped) return stop()
-      const res = this.read()
-      if (res !== null) return Promise.resolve({ done: false, value: res })
-
-      if (this[EOF]) return stop()
-
-      let resolve = null
-      let reject = null
-      const onerr = er => {
-        this.removeListener('data', ondata)
-        this.removeListener('end', onend)
-        this.removeListener(DESTROYED, ondestroy)
-        stop()
-        reject(er)
-      }
-      const ondata = value => {
-        this.removeListener('error', onerr)
-        this.removeListener('end', onend)
-        this.removeListener(DESTROYED, ondestroy)
-        this.pause()
-        resolve({ value: value, done: !!this[EOF] })
-      }
-      const onend = () => {
-        this.removeListener('error', onerr)
-        this.removeListener('data', ondata)
-        this.removeListener(DESTROYED, ondestroy)
-        stop()
-        resolve({ done: true })
-      }
-      const ondestroy = () => onerr(new Error('stream destroyed'))
-      return new Promise((res, rej) => {
-        reject = rej
-        resolve = res
-        this.once(DESTROYED, ondestroy)
-        this.once('error', onerr)
-        this.once('end', onend)
-        this.once('data', ondata)
-      })
-    }
-
-    return {
-      next,
-      throw: stop,
-      return: stop,
-      [ASYNCITERATOR]() {
-        return this
-      },
-    }
-  }
-
-  // for (let chunk of stream)
-  [ITERATOR]() {
-    let stopped = false
-    const stop = () => {
-      this.pause()
-      this.removeListener(ERROR, stop)
-      this.removeListener(DESTROYED, stop)
-      this.removeListener('end', stop)
-      stopped = true
-      return { done: true }
-    }
-
-    const next = () => {
-      if (stopped) return stop()
-      const value = this.read()
-      return value === null ? stop() : { value }
-    }
-    this.once('end', stop)
-    this.once(ERROR, stop)
-    this.once(DESTROYED, stop)
-
-    return {
-      next,
-      throw: stop,
-      return: stop,
-      [ITERATOR]() {
-        return this
-      },
-    }
-  }
-
-  destroy(er) {
-    if (this[DESTROYED]) {
-      if (er) this.emit('error', er)
-      else this.emit(DESTROYED)
-      return this
-    }
-
-    this[DESTROYED] = true
-
-    // throw away all buffered data, it's never coming out
-    this[BUFFER].length = 0
-    this[BUFFERLENGTH] = 0
-
-    if (typeof this.close === 'function' && !this[CLOSED]) this.close()
-
-    if (er) this.emit('error', er)
-    // if no error to emit, still reject pending promises
-    else this.emit(DESTROYED)
-
-    return this
-  }
-
-  static isStream(s) {
-    return (
-      !!s &&
-      (s instanceof Minipass ||
-        s instanceof Stream ||
-        (s instanceof EE &&
-          // readable
-          (typeof s.pipe === 'function' ||
-            // writable
-            (typeof s.write === 'function' && typeof s.end === 'function'))))
-    )
-  }
-}
-
-exports.Minipass = Minipass
diff --git a/node_modules/tar/node_modules/minipass/index.mjs b/node_modules/tar/node_modules/minipass/index.mjs
deleted file mode 100644
index 6ef6cd8cf0703..0000000000000
--- a/node_modules/tar/node_modules/minipass/index.mjs
+++ /dev/null
@@ -1,702 +0,0 @@
-'use strict'
-const proc =
-  typeof process === 'object' && process
-    ? process
-    : {
-        stdout: null,
-        stderr: null,
-      }
-import EE from 'events'
-import Stream from 'stream'
-import stringdecoder from 'string_decoder'
-const SD = stringdecoder.StringDecoder
-
-const EOF = Symbol('EOF')
-const MAYBE_EMIT_END = Symbol('maybeEmitEnd')
-const EMITTED_END = Symbol('emittedEnd')
-const EMITTING_END = Symbol('emittingEnd')
-const EMITTED_ERROR = Symbol('emittedError')
-const CLOSED = Symbol('closed')
-const READ = Symbol('read')
-const FLUSH = Symbol('flush')
-const FLUSHCHUNK = Symbol('flushChunk')
-const ENCODING = Symbol('encoding')
-const DECODER = Symbol('decoder')
-const FLOWING = Symbol('flowing')
-const PAUSED = Symbol('paused')
-const RESUME = Symbol('resume')
-const BUFFER = Symbol('buffer')
-const PIPES = Symbol('pipes')
-const BUFFERLENGTH = Symbol('bufferLength')
-const BUFFERPUSH = Symbol('bufferPush')
-const BUFFERSHIFT = Symbol('bufferShift')
-const OBJECTMODE = Symbol('objectMode')
-// internal event when stream is destroyed
-const DESTROYED = Symbol('destroyed')
-// internal event when stream has an error
-const ERROR = Symbol('error')
-const EMITDATA = Symbol('emitData')
-const EMITEND = Symbol('emitEnd')
-const EMITEND2 = Symbol('emitEnd2')
-const ASYNC = Symbol('async')
-const ABORT = Symbol('abort')
-const ABORTED = Symbol('aborted')
-const SIGNAL = Symbol('signal')
-
-const defer = fn => Promise.resolve().then(fn)
-
-// TODO remove when Node v8 support drops
-const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1'
-const ASYNCITERATOR =
-  (doIter && Symbol.asyncIterator) || Symbol('asyncIterator not implemented')
-const ITERATOR =
-  (doIter && Symbol.iterator) || Symbol('iterator not implemented')
-
-// events that mean 'the stream is over'
-// these are treated specially, and re-emitted
-// if they are listened for after emitting.
-const isEndish = ev => ev === 'end' || ev === 'finish' || ev === 'prefinish'
-
-const isArrayBuffer = b =>
-  b instanceof ArrayBuffer ||
-  (typeof b === 'object' &&
-    b.constructor &&
-    b.constructor.name === 'ArrayBuffer' &&
-    b.byteLength >= 0)
-
-const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b)
-
-class Pipe {
-  constructor(src, dest, opts) {
-    this.src = src
-    this.dest = dest
-    this.opts = opts
-    this.ondrain = () => src[RESUME]()
-    dest.on('drain', this.ondrain)
-  }
-  unpipe() {
-    this.dest.removeListener('drain', this.ondrain)
-  }
-  // istanbul ignore next - only here for the prototype
-  proxyErrors() {}
-  end() {
-    this.unpipe()
-    if (this.opts.end) this.dest.end()
-  }
-}
-
-class PipeProxyErrors extends Pipe {
-  unpipe() {
-    this.src.removeListener('error', this.proxyErrors)
-    super.unpipe()
-  }
-  constructor(src, dest, opts) {
-    super(src, dest, opts)
-    this.proxyErrors = er => dest.emit('error', er)
-    src.on('error', this.proxyErrors)
-  }
-}
-
-export class Minipass extends Stream {
-  constructor(options) {
-    super()
-    this[FLOWING] = false
-    // whether we're explicitly paused
-    this[PAUSED] = false
-    this[PIPES] = []
-    this[BUFFER] = []
-    this[OBJECTMODE] = (options && options.objectMode) || false
-    if (this[OBJECTMODE]) this[ENCODING] = null
-    else this[ENCODING] = (options && options.encoding) || null
-    if (this[ENCODING] === 'buffer') this[ENCODING] = null
-    this[ASYNC] = (options && !!options.async) || false
-    this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null
-    this[EOF] = false
-    this[EMITTED_END] = false
-    this[EMITTING_END] = false
-    this[CLOSED] = false
-    this[EMITTED_ERROR] = null
-    this.writable = true
-    this.readable = true
-    this[BUFFERLENGTH] = 0
-    this[DESTROYED] = false
-    if (options && options.debugExposeBuffer === true) {
-      Object.defineProperty(this, 'buffer', { get: () => this[BUFFER] })
-    }
-    if (options && options.debugExposePipes === true) {
-      Object.defineProperty(this, 'pipes', { get: () => this[PIPES] })
-    }
-    this[SIGNAL] = options && options.signal
-    this[ABORTED] = false
-    if (this[SIGNAL]) {
-      this[SIGNAL].addEventListener('abort', () => this[ABORT]())
-      if (this[SIGNAL].aborted) {
-        this[ABORT]()
-      }
-    }
-  }
-
-  get bufferLength() {
-    return this[BUFFERLENGTH]
-  }
-
-  get encoding() {
-    return this[ENCODING]
-  }
-  set encoding(enc) {
-    if (this[OBJECTMODE]) throw new Error('cannot set encoding in objectMode')
-
-    if (
-      this[ENCODING] &&
-      enc !== this[ENCODING] &&
-      ((this[DECODER] && this[DECODER].lastNeed) || this[BUFFERLENGTH])
-    )
-      throw new Error('cannot change encoding')
-
-    if (this[ENCODING] !== enc) {
-      this[DECODER] = enc ? new SD(enc) : null
-      if (this[BUFFER].length)
-        this[BUFFER] = this[BUFFER].map(chunk => this[DECODER].write(chunk))
-    }
-
-    this[ENCODING] = enc
-  }
-
-  setEncoding(enc) {
-    this.encoding = enc
-  }
-
-  get objectMode() {
-    return this[OBJECTMODE]
-  }
-  set objectMode(om) {
-    this[OBJECTMODE] = this[OBJECTMODE] || !!om
-  }
-
-  get ['async']() {
-    return this[ASYNC]
-  }
-  set ['async'](a) {
-    this[ASYNC] = this[ASYNC] || !!a
-  }
-
-  // drop everything and get out of the flow completely
-  [ABORT]() {
-    this[ABORTED] = true
-    this.emit('abort', this[SIGNAL].reason)
-    this.destroy(this[SIGNAL].reason)
-  }
-
-  get aborted() {
-    return this[ABORTED]
-  }
-  set aborted(_) {}
-
-  write(chunk, encoding, cb) {
-    if (this[ABORTED]) return false
-    if (this[EOF]) throw new Error('write after end')
-
-    if (this[DESTROYED]) {
-      this.emit(
-        'error',
-        Object.assign(
-          new Error('Cannot call write after a stream was destroyed'),
-          { code: 'ERR_STREAM_DESTROYED' }
-        )
-      )
-      return true
-    }
-
-    if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8')
-
-    if (!encoding) encoding = 'utf8'
-
-    const fn = this[ASYNC] ? defer : f => f()
-
-    // convert array buffers and typed array views into buffers
-    // at some point in the future, we may want to do the opposite!
-    // leave strings and buffers as-is
-    // anything else switches us into object mode
-    if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
-      if (isArrayBufferView(chunk))
-        chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
-      else if (isArrayBuffer(chunk)) chunk = Buffer.from(chunk)
-      else if (typeof chunk !== 'string')
-        // use the setter so we throw if we have encoding set
-        this.objectMode = true
-    }
-
-    // handle object mode up front, since it's simpler
-    // this yields better performance, fewer checks later.
-    if (this[OBJECTMODE]) {
-      /* istanbul ignore if - maybe impossible? */
-      if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true)
-
-      if (this.flowing) this.emit('data', chunk)
-      else this[BUFFERPUSH](chunk)
-
-      if (this[BUFFERLENGTH] !== 0) this.emit('readable')
-
-      if (cb) fn(cb)
-
-      return this.flowing
-    }
-
-    // at this point the chunk is a buffer or string
-    // don't buffer it up or send it to the decoder
-    if (!chunk.length) {
-      if (this[BUFFERLENGTH] !== 0) this.emit('readable')
-      if (cb) fn(cb)
-      return this.flowing
-    }
-
-    // fast-path writing strings of same encoding to a stream with
-    // an empty buffer, skipping the buffer/decoder dance
-    if (
-      typeof chunk === 'string' &&
-      // unless it is a string already ready for us to use
-      !(encoding === this[ENCODING] && !this[DECODER].lastNeed)
-    ) {
-      chunk = Buffer.from(chunk, encoding)
-    }
-
-    if (Buffer.isBuffer(chunk) && this[ENCODING])
-      chunk = this[DECODER].write(chunk)
-
-    // Note: flushing CAN potentially switch us into not-flowing mode
-    if (this.flowing && this[BUFFERLENGTH] !== 0) this[FLUSH](true)
-
-    if (this.flowing) this.emit('data', chunk)
-    else this[BUFFERPUSH](chunk)
-
-    if (this[BUFFERLENGTH] !== 0) this.emit('readable')
-
-    if (cb) fn(cb)
-
-    return this.flowing
-  }
-
-  read(n) {
-    if (this[DESTROYED]) return null
-
-    if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) {
-      this[MAYBE_EMIT_END]()
-      return null
-    }
-
-    if (this[OBJECTMODE]) n = null
-
-    if (this[BUFFER].length > 1 && !this[OBJECTMODE]) {
-      if (this.encoding) this[BUFFER] = [this[BUFFER].join('')]
-      else this[BUFFER] = [Buffer.concat(this[BUFFER], this[BUFFERLENGTH])]
-    }
-
-    const ret = this[READ](n || null, this[BUFFER][0])
-    this[MAYBE_EMIT_END]()
-    return ret
-  }
-
-  [READ](n, chunk) {
-    if (n === chunk.length || n === null) this[BUFFERSHIFT]()
-    else {
-      this[BUFFER][0] = chunk.slice(n)
-      chunk = chunk.slice(0, n)
-      this[BUFFERLENGTH] -= n
-    }
-
-    this.emit('data', chunk)
-
-    if (!this[BUFFER].length && !this[EOF]) this.emit('drain')
-
-    return chunk
-  }
-
-  end(chunk, encoding, cb) {
-    if (typeof chunk === 'function') (cb = chunk), (chunk = null)
-    if (typeof encoding === 'function') (cb = encoding), (encoding = 'utf8')
-    if (chunk) this.write(chunk, encoding)
-    if (cb) this.once('end', cb)
-    this[EOF] = true
-    this.writable = false
-
-    // if we haven't written anything, then go ahead and emit,
-    // even if we're not reading.
-    // we'll re-emit if a new 'end' listener is added anyway.
-    // This makes MP more suitable to write-only use cases.
-    if (this.flowing || !this[PAUSED]) this[MAYBE_EMIT_END]()
-    return this
-  }
-
-  // don't let the internal resume be overwritten
-  [RESUME]() {
-    if (this[DESTROYED]) return
-
-    this[PAUSED] = false
-    this[FLOWING] = true
-    this.emit('resume')
-    if (this[BUFFER].length) this[FLUSH]()
-    else if (this[EOF]) this[MAYBE_EMIT_END]()
-    else this.emit('drain')
-  }
-
-  resume() {
-    return this[RESUME]()
-  }
-
-  pause() {
-    this[FLOWING] = false
-    this[PAUSED] = true
-  }
-
-  get destroyed() {
-    return this[DESTROYED]
-  }
-
-  get flowing() {
-    return this[FLOWING]
-  }
-
-  get paused() {
-    return this[PAUSED]
-  }
-
-  [BUFFERPUSH](chunk) {
-    if (this[OBJECTMODE]) this[BUFFERLENGTH] += 1
-    else this[BUFFERLENGTH] += chunk.length
-    this[BUFFER].push(chunk)
-  }
-
-  [BUFFERSHIFT]() {
-    if (this[OBJECTMODE]) this[BUFFERLENGTH] -= 1
-    else this[BUFFERLENGTH] -= this[BUFFER][0].length
-    return this[BUFFER].shift()
-  }
-
-  [FLUSH](noDrain) {
-    do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()) && this[BUFFER].length)
-
-    if (!noDrain && !this[BUFFER].length && !this[EOF]) this.emit('drain')
-  }
-
-  [FLUSHCHUNK](chunk) {
-    this.emit('data', chunk)
-    return this.flowing
-  }
-
-  pipe(dest, opts) {
-    if (this[DESTROYED]) return
-
-    const ended = this[EMITTED_END]
-    opts = opts || {}
-    if (dest === proc.stdout || dest === proc.stderr) opts.end = false
-    else opts.end = opts.end !== false
-    opts.proxyErrors = !!opts.proxyErrors
-
-    // piping an ended stream ends immediately
-    if (ended) {
-      if (opts.end) dest.end()
-    } else {
-      this[PIPES].push(
-        !opts.proxyErrors
-          ? new Pipe(this, dest, opts)
-          : new PipeProxyErrors(this, dest, opts)
-      )
-      if (this[ASYNC]) defer(() => this[RESUME]())
-      else this[RESUME]()
-    }
-
-    return dest
-  }
-
-  unpipe(dest) {
-    const p = this[PIPES].find(p => p.dest === dest)
-    if (p) {
-      this[PIPES].splice(this[PIPES].indexOf(p), 1)
-      p.unpipe()
-    }
-  }
-
-  addListener(ev, fn) {
-    return this.on(ev, fn)
-  }
-
-  on(ev, fn) {
-    const ret = super.on(ev, fn)
-    if (ev === 'data' && !this[PIPES].length && !this.flowing) this[RESUME]()
-    else if (ev === 'readable' && this[BUFFERLENGTH] !== 0)
-      super.emit('readable')
-    else if (isEndish(ev) && this[EMITTED_END]) {
-      super.emit(ev)
-      this.removeAllListeners(ev)
-    } else if (ev === 'error' && this[EMITTED_ERROR]) {
-      if (this[ASYNC]) defer(() => fn.call(this, this[EMITTED_ERROR]))
-      else fn.call(this, this[EMITTED_ERROR])
-    }
-    return ret
-  }
-
-  get emittedEnd() {
-    return this[EMITTED_END]
-  }
-
-  [MAYBE_EMIT_END]() {
-    if (
-      !this[EMITTING_END] &&
-      !this[EMITTED_END] &&
-      !this[DESTROYED] &&
-      this[BUFFER].length === 0 &&
-      this[EOF]
-    ) {
-      this[EMITTING_END] = true
-      this.emit('end')
-      this.emit('prefinish')
-      this.emit('finish')
-      if (this[CLOSED]) this.emit('close')
-      this[EMITTING_END] = false
-    }
-  }
-
-  emit(ev, data, ...extra) {
-    // error and close are only events allowed after calling destroy()
-    if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED])
-      return
-    else if (ev === 'data') {
-      return !this[OBJECTMODE] && !data
-        ? false
-        : this[ASYNC]
-        ? defer(() => this[EMITDATA](data))
-        : this[EMITDATA](data)
-    } else if (ev === 'end') {
-      return this[EMITEND]()
-    } else if (ev === 'close') {
-      this[CLOSED] = true
-      // don't emit close before 'end' and 'finish'
-      if (!this[EMITTED_END] && !this[DESTROYED]) return
-      const ret = super.emit('close')
-      this.removeAllListeners('close')
-      return ret
-    } else if (ev === 'error') {
-      this[EMITTED_ERROR] = data
-      super.emit(ERROR, data)
-      const ret =
-        !this[SIGNAL] || this.listeners('error').length
-          ? super.emit('error', data)
-          : false
-      this[MAYBE_EMIT_END]()
-      return ret
-    } else if (ev === 'resume') {
-      const ret = super.emit('resume')
-      this[MAYBE_EMIT_END]()
-      return ret
-    } else if (ev === 'finish' || ev === 'prefinish') {
-      const ret = super.emit(ev)
-      this.removeAllListeners(ev)
-      return ret
-    }
-
-    // Some other unknown event
-    const ret = super.emit(ev, data, ...extra)
-    this[MAYBE_EMIT_END]()
-    return ret
-  }
-
-  [EMITDATA](data) {
-    for (const p of this[PIPES]) {
-      if (p.dest.write(data) === false) this.pause()
-    }
-    const ret = super.emit('data', data)
-    this[MAYBE_EMIT_END]()
-    return ret
-  }
-
-  [EMITEND]() {
-    if (this[EMITTED_END]) return
-
-    this[EMITTED_END] = true
-    this.readable = false
-    if (this[ASYNC]) defer(() => this[EMITEND2]())
-    else this[EMITEND2]()
-  }
-
-  [EMITEND2]() {
-    if (this[DECODER]) {
-      const data = this[DECODER].end()
-      if (data) {
-        for (const p of this[PIPES]) {
-          p.dest.write(data)
-        }
-        super.emit('data', data)
-      }
-    }
-
-    for (const p of this[PIPES]) {
-      p.end()
-    }
-    const ret = super.emit('end')
-    this.removeAllListeners('end')
-    return ret
-  }
-
-  // const all = await stream.collect()
-  collect() {
-    const buf = []
-    if (!this[OBJECTMODE]) buf.dataLength = 0
-    // set the promise first, in case an error is raised
-    // by triggering the flow here.
-    const p = this.promise()
-    this.on('data', c => {
-      buf.push(c)
-      if (!this[OBJECTMODE]) buf.dataLength += c.length
-    })
-    return p.then(() => buf)
-  }
-
-  // const data = await stream.concat()
-  concat() {
-    return this[OBJECTMODE]
-      ? Promise.reject(new Error('cannot concat in objectMode'))
-      : this.collect().then(buf =>
-          this[OBJECTMODE]
-            ? Promise.reject(new Error('cannot concat in objectMode'))
-            : this[ENCODING]
-            ? buf.join('')
-            : Buffer.concat(buf, buf.dataLength)
-        )
-  }
-
-  // stream.promise().then(() => done, er => emitted error)
-  promise() {
-    return new Promise((resolve, reject) => {
-      this.on(DESTROYED, () => reject(new Error('stream destroyed')))
-      this.on('error', er => reject(er))
-      this.on('end', () => resolve())
-    })
-  }
-
-  // for await (let chunk of stream)
-  [ASYNCITERATOR]() {
-    let stopped = false
-    const stop = () => {
-      this.pause()
-      stopped = true
-      return Promise.resolve({ done: true })
-    }
-    const next = () => {
-      if (stopped) return stop()
-      const res = this.read()
-      if (res !== null) return Promise.resolve({ done: false, value: res })
-
-      if (this[EOF]) return stop()
-
-      let resolve = null
-      let reject = null
-      const onerr = er => {
-        this.removeListener('data', ondata)
-        this.removeListener('end', onend)
-        this.removeListener(DESTROYED, ondestroy)
-        stop()
-        reject(er)
-      }
-      const ondata = value => {
-        this.removeListener('error', onerr)
-        this.removeListener('end', onend)
-        this.removeListener(DESTROYED, ondestroy)
-        this.pause()
-        resolve({ value: value, done: !!this[EOF] })
-      }
-      const onend = () => {
-        this.removeListener('error', onerr)
-        this.removeListener('data', ondata)
-        this.removeListener(DESTROYED, ondestroy)
-        stop()
-        resolve({ done: true })
-      }
-      const ondestroy = () => onerr(new Error('stream destroyed'))
-      return new Promise((res, rej) => {
-        reject = rej
-        resolve = res
-        this.once(DESTROYED, ondestroy)
-        this.once('error', onerr)
-        this.once('end', onend)
-        this.once('data', ondata)
-      })
-    }
-
-    return {
-      next,
-      throw: stop,
-      return: stop,
-      [ASYNCITERATOR]() {
-        return this
-      },
-    }
-  }
-
-  // for (let chunk of stream)
-  [ITERATOR]() {
-    let stopped = false
-    const stop = () => {
-      this.pause()
-      this.removeListener(ERROR, stop)
-      this.removeListener(DESTROYED, stop)
-      this.removeListener('end', stop)
-      stopped = true
-      return { done: true }
-    }
-
-    const next = () => {
-      if (stopped) return stop()
-      const value = this.read()
-      return value === null ? stop() : { value }
-    }
-    this.once('end', stop)
-    this.once(ERROR, stop)
-    this.once(DESTROYED, stop)
-
-    return {
-      next,
-      throw: stop,
-      return: stop,
-      [ITERATOR]() {
-        return this
-      },
-    }
-  }
-
-  destroy(er) {
-    if (this[DESTROYED]) {
-      if (er) this.emit('error', er)
-      else this.emit(DESTROYED)
-      return this
-    }
-
-    this[DESTROYED] = true
-
-    // throw away all buffered data, it's never coming out
-    this[BUFFER].length = 0
-    this[BUFFERLENGTH] = 0
-
-    if (typeof this.close === 'function' && !this[CLOSED]) this.close()
-
-    if (er) this.emit('error', er)
-    // if no error to emit, still reject pending promises
-    else this.emit(DESTROYED)
-
-    return this
-  }
-
-  static isStream(s) {
-    return (
-      !!s &&
-      (s instanceof Minipass ||
-        s instanceof Stream ||
-        (s instanceof EE &&
-          // readable
-          (typeof s.pipe === 'function' ||
-            // writable
-            (typeof s.write === 'function' && typeof s.end === 'function'))))
-    )
-  }
-}
-
-
diff --git a/node_modules/tar/node_modules/minipass/package.json b/node_modules/tar/node_modules/minipass/package.json
deleted file mode 100644
index 0e20e988047f2..0000000000000
--- a/node_modules/tar/node_modules/minipass/package.json
+++ /dev/null
@@ -1,76 +0,0 @@
-{
-  "name": "minipass",
-  "version": "5.0.0",
-  "description": "minimal implementation of a PassThrough stream",
-  "main": "./index.js",
-  "module": "./index.mjs",
-  "types": "./index.d.ts",
-  "exports": {
-    ".": {
-      "import": {
-        "types": "./index.d.ts",
-        "default": "./index.mjs"
-      },
-      "require": {
-        "types": "./index.d.ts",
-        "default": "./index.js"
-      }
-    },
-    "./package.json": "./package.json"
-  },
-  "devDependencies": {
-    "@types/node": "^17.0.41",
-    "end-of-stream": "^1.4.0",
-    "node-abort-controller": "^3.1.1",
-    "prettier": "^2.6.2",
-    "tap": "^16.2.0",
-    "through2": "^2.0.3",
-    "ts-node": "^10.8.1",
-    "typedoc": "^0.23.24",
-    "typescript": "^4.7.3"
-  },
-  "scripts": {
-    "pretest": "npm run prepare",
-    "presnap": "npm run prepare",
-    "prepare": "node ./scripts/transpile-to-esm.js",
-    "snap": "tap",
-    "test": "tap",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "postpublish": "git push origin --follow-tags",
-    "typedoc": "typedoc ./index.d.ts",
-    "format": "prettier --write . --loglevel warn"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/isaacs/minipass.git"
-  },
-  "keywords": [
-    "passthrough",
-    "stream"
-  ],
-  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
-  "license": "ISC",
-  "files": [
-    "index.d.ts",
-    "index.js",
-    "index.mjs"
-  ],
-  "tap": {
-    "check-coverage": true
-  },
-  "engines": {
-    "node": ">=8"
-  },
-  "prettier": {
-    "semi": false,
-    "printWidth": 80,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  }
-}
diff --git a/node_modules/tar/node_modules/minizlib/LICENSE b/node_modules/tar/node_modules/minizlib/LICENSE
deleted file mode 100644
index ffce7383f53e7..0000000000000
--- a/node_modules/tar/node_modules/minizlib/LICENSE
+++ /dev/null
@@ -1,26 +0,0 @@
-Minizlib was created by Isaac Z. Schlueter.
-It is a derivative work of the Node.js project.
-
-"""
-Copyright Isaac Z. Schlueter and Contributors
-Copyright Node.js contributors. All rights reserved.
-Copyright Joyent, Inc. and other Node contributors. All rights reserved.
-
-Permission is hereby granted, free of charge, to any person obtaining a
-copy of this software and associated documentation files (the "Software"),
-to deal in the Software without restriction, including without limitation
-the rights to use, copy, modify, merge, publish, distribute, sublicense,
-and/or sell copies of the Software, and to permit persons to whom the
-Software is furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
-IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
-CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
-TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
-SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-"""
diff --git a/node_modules/tar/node_modules/minizlib/constants.js b/node_modules/tar/node_modules/minizlib/constants.js
deleted file mode 100644
index 641ebc73129bf..0000000000000
--- a/node_modules/tar/node_modules/minizlib/constants.js
+++ /dev/null
@@ -1,115 +0,0 @@
-// Update with any zlib constants that are added or changed in the future.
-// Node v6 didn't export this, so we just hard code the version and rely
-// on all the other hard-coded values from zlib v4736.  When node v6
-// support drops, we can just export the realZlibConstants object.
-const realZlibConstants = require('zlib').constants ||
-  /* istanbul ignore next */ { ZLIB_VERNUM: 4736 }
-
-module.exports = Object.freeze(Object.assign(Object.create(null), {
-  Z_NO_FLUSH: 0,
-  Z_PARTIAL_FLUSH: 1,
-  Z_SYNC_FLUSH: 2,
-  Z_FULL_FLUSH: 3,
-  Z_FINISH: 4,
-  Z_BLOCK: 5,
-  Z_OK: 0,
-  Z_STREAM_END: 1,
-  Z_NEED_DICT: 2,
-  Z_ERRNO: -1,
-  Z_STREAM_ERROR: -2,
-  Z_DATA_ERROR: -3,
-  Z_MEM_ERROR: -4,
-  Z_BUF_ERROR: -5,
-  Z_VERSION_ERROR: -6,
-  Z_NO_COMPRESSION: 0,
-  Z_BEST_SPEED: 1,
-  Z_BEST_COMPRESSION: 9,
-  Z_DEFAULT_COMPRESSION: -1,
-  Z_FILTERED: 1,
-  Z_HUFFMAN_ONLY: 2,
-  Z_RLE: 3,
-  Z_FIXED: 4,
-  Z_DEFAULT_STRATEGY: 0,
-  DEFLATE: 1,
-  INFLATE: 2,
-  GZIP: 3,
-  GUNZIP: 4,
-  DEFLATERAW: 5,
-  INFLATERAW: 6,
-  UNZIP: 7,
-  BROTLI_DECODE: 8,
-  BROTLI_ENCODE: 9,
-  Z_MIN_WINDOWBITS: 8,
-  Z_MAX_WINDOWBITS: 15,
-  Z_DEFAULT_WINDOWBITS: 15,
-  Z_MIN_CHUNK: 64,
-  Z_MAX_CHUNK: Infinity,
-  Z_DEFAULT_CHUNK: 16384,
-  Z_MIN_MEMLEVEL: 1,
-  Z_MAX_MEMLEVEL: 9,
-  Z_DEFAULT_MEMLEVEL: 8,
-  Z_MIN_LEVEL: -1,
-  Z_MAX_LEVEL: 9,
-  Z_DEFAULT_LEVEL: -1,
-  BROTLI_OPERATION_PROCESS: 0,
-  BROTLI_OPERATION_FLUSH: 1,
-  BROTLI_OPERATION_FINISH: 2,
-  BROTLI_OPERATION_EMIT_METADATA: 3,
-  BROTLI_MODE_GENERIC: 0,
-  BROTLI_MODE_TEXT: 1,
-  BROTLI_MODE_FONT: 2,
-  BROTLI_DEFAULT_MODE: 0,
-  BROTLI_MIN_QUALITY: 0,
-  BROTLI_MAX_QUALITY: 11,
-  BROTLI_DEFAULT_QUALITY: 11,
-  BROTLI_MIN_WINDOW_BITS: 10,
-  BROTLI_MAX_WINDOW_BITS: 24,
-  BROTLI_LARGE_MAX_WINDOW_BITS: 30,
-  BROTLI_DEFAULT_WINDOW: 22,
-  BROTLI_MIN_INPUT_BLOCK_BITS: 16,
-  BROTLI_MAX_INPUT_BLOCK_BITS: 24,
-  BROTLI_PARAM_MODE: 0,
-  BROTLI_PARAM_QUALITY: 1,
-  BROTLI_PARAM_LGWIN: 2,
-  BROTLI_PARAM_LGBLOCK: 3,
-  BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4,
-  BROTLI_PARAM_SIZE_HINT: 5,
-  BROTLI_PARAM_LARGE_WINDOW: 6,
-  BROTLI_PARAM_NPOSTFIX: 7,
-  BROTLI_PARAM_NDIRECT: 8,
-  BROTLI_DECODER_RESULT_ERROR: 0,
-  BROTLI_DECODER_RESULT_SUCCESS: 1,
-  BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2,
-  BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3,
-  BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0,
-  BROTLI_DECODER_PARAM_LARGE_WINDOW: 1,
-  BROTLI_DECODER_NO_ERROR: 0,
-  BROTLI_DECODER_SUCCESS: 1,
-  BROTLI_DECODER_NEEDS_MORE_INPUT: 2,
-  BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3,
-  BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1,
-  BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2,
-  BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3,
-  BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4,
-  BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5,
-  BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6,
-  BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7,
-  BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8,
-  BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9,
-  BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10,
-  BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11,
-  BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12,
-  BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13,
-  BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14,
-  BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15,
-  BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16,
-  BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19,
-  BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20,
-  BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21,
-  BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22,
-  BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25,
-  BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26,
-  BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27,
-  BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30,
-  BROTLI_DECODER_ERROR_UNREACHABLE: -31,
-}, realZlibConstants))
diff --git a/node_modules/tar/node_modules/minizlib/index.js b/node_modules/tar/node_modules/minizlib/index.js
deleted file mode 100644
index fbaf69e19f209..0000000000000
--- a/node_modules/tar/node_modules/minizlib/index.js
+++ /dev/null
@@ -1,348 +0,0 @@
-'use strict'
-
-const assert = require('assert')
-const Buffer = require('buffer').Buffer
-const realZlib = require('zlib')
-
-const constants = exports.constants = require('./constants.js')
-const Minipass = require('minipass')
-
-const OriginalBufferConcat = Buffer.concat
-
-const _superWrite = Symbol('_superWrite')
-class ZlibError extends Error {
-  constructor (err) {
-    super('zlib: ' + err.message)
-    this.code = err.code
-    this.errno = err.errno
-    /* istanbul ignore if */
-    if (!this.code)
-      this.code = 'ZLIB_ERROR'
-
-    this.message = 'zlib: ' + err.message
-    Error.captureStackTrace(this, this.constructor)
-  }
-
-  get name () {
-    return 'ZlibError'
-  }
-}
-
-// the Zlib class they all inherit from
-// This thing manages the queue of requests, and returns
-// true or false if there is anything in the queue when
-// you call the .write() method.
-const _opts = Symbol('opts')
-const _flushFlag = Symbol('flushFlag')
-const _finishFlushFlag = Symbol('finishFlushFlag')
-const _fullFlushFlag = Symbol('fullFlushFlag')
-const _handle = Symbol('handle')
-const _onError = Symbol('onError')
-const _sawError = Symbol('sawError')
-const _level = Symbol('level')
-const _strategy = Symbol('strategy')
-const _ended = Symbol('ended')
-const _defaultFullFlush = Symbol('_defaultFullFlush')
-
-class ZlibBase extends Minipass {
-  constructor (opts, mode) {
-    if (!opts || typeof opts !== 'object')
-      throw new TypeError('invalid options for ZlibBase constructor')
-
-    super(opts)
-    this[_sawError] = false
-    this[_ended] = false
-    this[_opts] = opts
-
-    this[_flushFlag] = opts.flush
-    this[_finishFlushFlag] = opts.finishFlush
-    // this will throw if any options are invalid for the class selected
-    try {
-      this[_handle] = new realZlib[mode](opts)
-    } catch (er) {
-      // make sure that all errors get decorated properly
-      throw new ZlibError(er)
-    }
-
-    this[_onError] = (err) => {
-      // no sense raising multiple errors, since we abort on the first one.
-      if (this[_sawError])
-        return
-
-      this[_sawError] = true
-
-      // there is no way to cleanly recover.
-      // continuing only obscures problems.
-      this.close()
-      this.emit('error', err)
-    }
-
-    this[_handle].on('error', er => this[_onError](new ZlibError(er)))
-    this.once('end', () => this.close)
-  }
-
-  close () {
-    if (this[_handle]) {
-      this[_handle].close()
-      this[_handle] = null
-      this.emit('close')
-    }
-  }
-
-  reset () {
-    if (!this[_sawError]) {
-      assert(this[_handle], 'zlib binding closed')
-      return this[_handle].reset()
-    }
-  }
-
-  flush (flushFlag) {
-    if (this.ended)
-      return
-
-    if (typeof flushFlag !== 'number')
-      flushFlag = this[_fullFlushFlag]
-    this.write(Object.assign(Buffer.alloc(0), { [_flushFlag]: flushFlag }))
-  }
-
-  end (chunk, encoding, cb) {
-    if (chunk)
-      this.write(chunk, encoding)
-    this.flush(this[_finishFlushFlag])
-    this[_ended] = true
-    return super.end(null, null, cb)
-  }
-
-  get ended () {
-    return this[_ended]
-  }
-
-  write (chunk, encoding, cb) {
-    // process the chunk using the sync process
-    // then super.write() all the outputted chunks
-    if (typeof encoding === 'function')
-      cb = encoding, encoding = 'utf8'
-
-    if (typeof chunk === 'string')
-      chunk = Buffer.from(chunk, encoding)
-
-    if (this[_sawError])
-      return
-    assert(this[_handle], 'zlib binding closed')
-
-    // _processChunk tries to .close() the native handle after it's done, so we
-    // intercept that by temporarily making it a no-op.
-    const nativeHandle = this[_handle]._handle
-    const originalNativeClose = nativeHandle.close
-    nativeHandle.close = () => {}
-    const originalClose = this[_handle].close
-    this[_handle].close = () => {}
-    // It also calls `Buffer.concat()` at the end, which may be convenient
-    // for some, but which we are not interested in as it slows us down.
-    Buffer.concat = (args) => args
-    let result
-    try {
-      const flushFlag = typeof chunk[_flushFlag] === 'number'
-        ? chunk[_flushFlag] : this[_flushFlag]
-      result = this[_handle]._processChunk(chunk, flushFlag)
-      // if we don't throw, reset it back how it was
-      Buffer.concat = OriginalBufferConcat
-    } catch (err) {
-      // or if we do, put Buffer.concat() back before we emit error
-      // Error events call into user code, which may call Buffer.concat()
-      Buffer.concat = OriginalBufferConcat
-      this[_onError](new ZlibError(err))
-    } finally {
-      if (this[_handle]) {
-        // Core zlib resets `_handle` to null after attempting to close the
-        // native handle. Our no-op handler prevented actual closure, but we
-        // need to restore the `._handle` property.
-        this[_handle]._handle = nativeHandle
-        nativeHandle.close = originalNativeClose
-        this[_handle].close = originalClose
-        // `_processChunk()` adds an 'error' listener. If we don't remove it
-        // after each call, these handlers start piling up.
-        this[_handle].removeAllListeners('error')
-        // make sure OUR error listener is still attached tho
-      }
-    }
-
-    if (this[_handle])
-      this[_handle].on('error', er => this[_onError](new ZlibError(er)))
-
-    let writeReturn
-    if (result) {
-      if (Array.isArray(result) && result.length > 0) {
-        // The first buffer is always `handle._outBuffer`, which would be
-        // re-used for later invocations; so, we always have to copy that one.
-        writeReturn = this[_superWrite](Buffer.from(result[0]))
-        for (let i = 1; i < result.length; i++) {
-          writeReturn = this[_superWrite](result[i])
-        }
-      } else {
-        writeReturn = this[_superWrite](Buffer.from(result))
-      }
-    }
-
-    if (cb)
-      cb()
-    return writeReturn
-  }
-
-  [_superWrite] (data) {
-    return super.write(data)
-  }
-}
-
-class Zlib extends ZlibBase {
-  constructor (opts, mode) {
-    opts = opts || {}
-
-    opts.flush = opts.flush || constants.Z_NO_FLUSH
-    opts.finishFlush = opts.finishFlush || constants.Z_FINISH
-    super(opts, mode)
-
-    this[_fullFlushFlag] = constants.Z_FULL_FLUSH
-    this[_level] = opts.level
-    this[_strategy] = opts.strategy
-  }
-
-  params (level, strategy) {
-    if (this[_sawError])
-      return
-
-    if (!this[_handle])
-      throw new Error('cannot switch params when binding is closed')
-
-    // no way to test this without also not supporting params at all
-    /* istanbul ignore if */
-    if (!this[_handle].params)
-      throw new Error('not supported in this implementation')
-
-    if (this[_level] !== level || this[_strategy] !== strategy) {
-      this.flush(constants.Z_SYNC_FLUSH)
-      assert(this[_handle], 'zlib binding closed')
-      // .params() calls .flush(), but the latter is always async in the
-      // core zlib. We override .flush() temporarily to intercept that and
-      // flush synchronously.
-      const origFlush = this[_handle].flush
-      this[_handle].flush = (flushFlag, cb) => {
-        this.flush(flushFlag)
-        cb()
-      }
-      try {
-        this[_handle].params(level, strategy)
-      } finally {
-        this[_handle].flush = origFlush
-      }
-      /* istanbul ignore else */
-      if (this[_handle]) {
-        this[_level] = level
-        this[_strategy] = strategy
-      }
-    }
-  }
-}
-
-// minimal 2-byte header
-class Deflate extends Zlib {
-  constructor (opts) {
-    super(opts, 'Deflate')
-  }
-}
-
-class Inflate extends Zlib {
-  constructor (opts) {
-    super(opts, 'Inflate')
-  }
-}
-
-// gzip - bigger header, same deflate compression
-const _portable = Symbol('_portable')
-class Gzip extends Zlib {
-  constructor (opts) {
-    super(opts, 'Gzip')
-    this[_portable] = opts && !!opts.portable
-  }
-
-  [_superWrite] (data) {
-    if (!this[_portable])
-      return super[_superWrite](data)
-
-    // we'll always get the header emitted in one first chunk
-    // overwrite the OS indicator byte with 0xFF
-    this[_portable] = false
-    data[9] = 255
-    return super[_superWrite](data)
-  }
-}
-
-class Gunzip extends Zlib {
-  constructor (opts) {
-    super(opts, 'Gunzip')
-  }
-}
-
-// raw - no header
-class DeflateRaw extends Zlib {
-  constructor (opts) {
-    super(opts, 'DeflateRaw')
-  }
-}
-
-class InflateRaw extends Zlib {
-  constructor (opts) {
-    super(opts, 'InflateRaw')
-  }
-}
-
-// auto-detect header.
-class Unzip extends Zlib {
-  constructor (opts) {
-    super(opts, 'Unzip')
-  }
-}
-
-class Brotli extends ZlibBase {
-  constructor (opts, mode) {
-    opts = opts || {}
-
-    opts.flush = opts.flush || constants.BROTLI_OPERATION_PROCESS
-    opts.finishFlush = opts.finishFlush || constants.BROTLI_OPERATION_FINISH
-
-    super(opts, mode)
-
-    this[_fullFlushFlag] = constants.BROTLI_OPERATION_FLUSH
-  }
-}
-
-class BrotliCompress extends Brotli {
-  constructor (opts) {
-    super(opts, 'BrotliCompress')
-  }
-}
-
-class BrotliDecompress extends Brotli {
-  constructor (opts) {
-    super(opts, 'BrotliDecompress')
-  }
-}
-
-exports.Deflate = Deflate
-exports.Inflate = Inflate
-exports.Gzip = Gzip
-exports.Gunzip = Gunzip
-exports.DeflateRaw = DeflateRaw
-exports.InflateRaw = InflateRaw
-exports.Unzip = Unzip
-/* istanbul ignore else */
-if (typeof realZlib.BrotliCompress === 'function') {
-  exports.BrotliCompress = BrotliCompress
-  exports.BrotliDecompress = BrotliDecompress
-} else {
-  exports.BrotliCompress = exports.BrotliDecompress = class {
-    constructor () {
-      throw new Error('Brotli is not supported in this version of Node.js')
-    }
-  }
-}
diff --git a/node_modules/tar/node_modules/minizlib/node_modules/minipass/LICENSE b/node_modules/tar/node_modules/minizlib/node_modules/minipass/LICENSE
deleted file mode 100644
index bf1dece2e1f12..0000000000000
--- a/node_modules/tar/node_modules/minizlib/node_modules/minipass/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) 2017-2022 npm, Inc., Isaac Z. Schlueter, and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/tar/node_modules/minizlib/node_modules/minipass/index.js b/node_modules/tar/node_modules/minizlib/node_modules/minipass/index.js
deleted file mode 100644
index e8797aab6cc27..0000000000000
--- a/node_modules/tar/node_modules/minizlib/node_modules/minipass/index.js
+++ /dev/null
@@ -1,649 +0,0 @@
-'use strict'
-const proc = typeof process === 'object' && process ? process : {
-  stdout: null,
-  stderr: null,
-}
-const EE = require('events')
-const Stream = require('stream')
-const SD = require('string_decoder').StringDecoder
-
-const EOF = Symbol('EOF')
-const MAYBE_EMIT_END = Symbol('maybeEmitEnd')
-const EMITTED_END = Symbol('emittedEnd')
-const EMITTING_END = Symbol('emittingEnd')
-const EMITTED_ERROR = Symbol('emittedError')
-const CLOSED = Symbol('closed')
-const READ = Symbol('read')
-const FLUSH = Symbol('flush')
-const FLUSHCHUNK = Symbol('flushChunk')
-const ENCODING = Symbol('encoding')
-const DECODER = Symbol('decoder')
-const FLOWING = Symbol('flowing')
-const PAUSED = Symbol('paused')
-const RESUME = Symbol('resume')
-const BUFFERLENGTH = Symbol('bufferLength')
-const BUFFERPUSH = Symbol('bufferPush')
-const BUFFERSHIFT = Symbol('bufferShift')
-const OBJECTMODE = Symbol('objectMode')
-const DESTROYED = Symbol('destroyed')
-const EMITDATA = Symbol('emitData')
-const EMITEND = Symbol('emitEnd')
-const EMITEND2 = Symbol('emitEnd2')
-const ASYNC = Symbol('async')
-
-const defer = fn => Promise.resolve().then(fn)
-
-// TODO remove when Node v8 support drops
-const doIter = global._MP_NO_ITERATOR_SYMBOLS_  !== '1'
-const ASYNCITERATOR = doIter && Symbol.asyncIterator
-  || Symbol('asyncIterator not implemented')
-const ITERATOR = doIter && Symbol.iterator
-  || Symbol('iterator not implemented')
-
-// events that mean 'the stream is over'
-// these are treated specially, and re-emitted
-// if they are listened for after emitting.
-const isEndish = ev =>
-  ev === 'end' ||
-  ev === 'finish' ||
-  ev === 'prefinish'
-
-const isArrayBuffer = b => b instanceof ArrayBuffer ||
-  typeof b === 'object' &&
-  b.constructor &&
-  b.constructor.name === 'ArrayBuffer' &&
-  b.byteLength >= 0
-
-const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b)
-
-class Pipe {
-  constructor (src, dest, opts) {
-    this.src = src
-    this.dest = dest
-    this.opts = opts
-    this.ondrain = () => src[RESUME]()
-    dest.on('drain', this.ondrain)
-  }
-  unpipe () {
-    this.dest.removeListener('drain', this.ondrain)
-  }
-  // istanbul ignore next - only here for the prototype
-  proxyErrors () {}
-  end () {
-    this.unpipe()
-    if (this.opts.end)
-      this.dest.end()
-  }
-}
-
-class PipeProxyErrors extends Pipe {
-  unpipe () {
-    this.src.removeListener('error', this.proxyErrors)
-    super.unpipe()
-  }
-  constructor (src, dest, opts) {
-    super(src, dest, opts)
-    this.proxyErrors = er => dest.emit('error', er)
-    src.on('error', this.proxyErrors)
-  }
-}
-
-module.exports = class Minipass extends Stream {
-  constructor (options) {
-    super()
-    this[FLOWING] = false
-    // whether we're explicitly paused
-    this[PAUSED] = false
-    this.pipes = []
-    this.buffer = []
-    this[OBJECTMODE] = options && options.objectMode || false
-    if (this[OBJECTMODE])
-      this[ENCODING] = null
-    else
-      this[ENCODING] = options && options.encoding || null
-    if (this[ENCODING] === 'buffer')
-      this[ENCODING] = null
-    this[ASYNC] = options && !!options.async || false
-    this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null
-    this[EOF] = false
-    this[EMITTED_END] = false
-    this[EMITTING_END] = false
-    this[CLOSED] = false
-    this[EMITTED_ERROR] = null
-    this.writable = true
-    this.readable = true
-    this[BUFFERLENGTH] = 0
-    this[DESTROYED] = false
-  }
-
-  get bufferLength () { return this[BUFFERLENGTH] }
-
-  get encoding () { return this[ENCODING] }
-  set encoding (enc) {
-    if (this[OBJECTMODE])
-      throw new Error('cannot set encoding in objectMode')
-
-    if (this[ENCODING] && enc !== this[ENCODING] &&
-        (this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH]))
-      throw new Error('cannot change encoding')
-
-    if (this[ENCODING] !== enc) {
-      this[DECODER] = enc ? new SD(enc) : null
-      if (this.buffer.length)
-        this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk))
-    }
-
-    this[ENCODING] = enc
-  }
-
-  setEncoding (enc) {
-    this.encoding = enc
-  }
-
-  get objectMode () { return this[OBJECTMODE] }
-  set objectMode (om) { this[OBJECTMODE] = this[OBJECTMODE] || !!om }
-
-  get ['async'] () { return this[ASYNC] }
-  set ['async'] (a) { this[ASYNC] = this[ASYNC] || !!a }
-
-  write (chunk, encoding, cb) {
-    if (this[EOF])
-      throw new Error('write after end')
-
-    if (this[DESTROYED]) {
-      this.emit('error', Object.assign(
-        new Error('Cannot call write after a stream was destroyed'),
-        { code: 'ERR_STREAM_DESTROYED' }
-      ))
-      return true
-    }
-
-    if (typeof encoding === 'function')
-      cb = encoding, encoding = 'utf8'
-
-    if (!encoding)
-      encoding = 'utf8'
-
-    const fn = this[ASYNC] ? defer : f => f()
-
-    // convert array buffers and typed array views into buffers
-    // at some point in the future, we may want to do the opposite!
-    // leave strings and buffers as-is
-    // anything else switches us into object mode
-    if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
-      if (isArrayBufferView(chunk))
-        chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
-      else if (isArrayBuffer(chunk))
-        chunk = Buffer.from(chunk)
-      else if (typeof chunk !== 'string')
-        // use the setter so we throw if we have encoding set
-        this.objectMode = true
-    }
-
-    // handle object mode up front, since it's simpler
-    // this yields better performance, fewer checks later.
-    if (this[OBJECTMODE]) {
-      /* istanbul ignore if - maybe impossible? */
-      if (this.flowing && this[BUFFERLENGTH] !== 0)
-        this[FLUSH](true)
-
-      if (this.flowing)
-        this.emit('data', chunk)
-      else
-        this[BUFFERPUSH](chunk)
-
-      if (this[BUFFERLENGTH] !== 0)
-        this.emit('readable')
-
-      if (cb)
-        fn(cb)
-
-      return this.flowing
-    }
-
-    // at this point the chunk is a buffer or string
-    // don't buffer it up or send it to the decoder
-    if (!chunk.length) {
-      if (this[BUFFERLENGTH] !== 0)
-        this.emit('readable')
-      if (cb)
-        fn(cb)
-      return this.flowing
-    }
-
-    // fast-path writing strings of same encoding to a stream with
-    // an empty buffer, skipping the buffer/decoder dance
-    if (typeof chunk === 'string' &&
-        // unless it is a string already ready for us to use
-        !(encoding === this[ENCODING] && !this[DECODER].lastNeed)) {
-      chunk = Buffer.from(chunk, encoding)
-    }
-
-    if (Buffer.isBuffer(chunk) && this[ENCODING])
-      chunk = this[DECODER].write(chunk)
-
-    // Note: flushing CAN potentially switch us into not-flowing mode
-    if (this.flowing && this[BUFFERLENGTH] !== 0)
-      this[FLUSH](true)
-
-    if (this.flowing)
-      this.emit('data', chunk)
-    else
-      this[BUFFERPUSH](chunk)
-
-    if (this[BUFFERLENGTH] !== 0)
-      this.emit('readable')
-
-    if (cb)
-      fn(cb)
-
-    return this.flowing
-  }
-
-  read (n) {
-    if (this[DESTROYED])
-      return null
-
-    if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) {
-      this[MAYBE_EMIT_END]()
-      return null
-    }
-
-    if (this[OBJECTMODE])
-      n = null
-
-    if (this.buffer.length > 1 && !this[OBJECTMODE]) {
-      if (this.encoding)
-        this.buffer = [this.buffer.join('')]
-      else
-        this.buffer = [Buffer.concat(this.buffer, this[BUFFERLENGTH])]
-    }
-
-    const ret = this[READ](n || null, this.buffer[0])
-    this[MAYBE_EMIT_END]()
-    return ret
-  }
-
-  [READ] (n, chunk) {
-    if (n === chunk.length || n === null)
-      this[BUFFERSHIFT]()
-    else {
-      this.buffer[0] = chunk.slice(n)
-      chunk = chunk.slice(0, n)
-      this[BUFFERLENGTH] -= n
-    }
-
-    this.emit('data', chunk)
-
-    if (!this.buffer.length && !this[EOF])
-      this.emit('drain')
-
-    return chunk
-  }
-
-  end (chunk, encoding, cb) {
-    if (typeof chunk === 'function')
-      cb = chunk, chunk = null
-    if (typeof encoding === 'function')
-      cb = encoding, encoding = 'utf8'
-    if (chunk)
-      this.write(chunk, encoding)
-    if (cb)
-      this.once('end', cb)
-    this[EOF] = true
-    this.writable = false
-
-    // if we haven't written anything, then go ahead and emit,
-    // even if we're not reading.
-    // we'll re-emit if a new 'end' listener is added anyway.
-    // This makes MP more suitable to write-only use cases.
-    if (this.flowing || !this[PAUSED])
-      this[MAYBE_EMIT_END]()
-    return this
-  }
-
-  // don't let the internal resume be overwritten
-  [RESUME] () {
-    if (this[DESTROYED])
-      return
-
-    this[PAUSED] = false
-    this[FLOWING] = true
-    this.emit('resume')
-    if (this.buffer.length)
-      this[FLUSH]()
-    else if (this[EOF])
-      this[MAYBE_EMIT_END]()
-    else
-      this.emit('drain')
-  }
-
-  resume () {
-    return this[RESUME]()
-  }
-
-  pause () {
-    this[FLOWING] = false
-    this[PAUSED] = true
-  }
-
-  get destroyed () {
-    return this[DESTROYED]
-  }
-
-  get flowing () {
-    return this[FLOWING]
-  }
-
-  get paused () {
-    return this[PAUSED]
-  }
-
-  [BUFFERPUSH] (chunk) {
-    if (this[OBJECTMODE])
-      this[BUFFERLENGTH] += 1
-    else
-      this[BUFFERLENGTH] += chunk.length
-    this.buffer.push(chunk)
-  }
-
-  [BUFFERSHIFT] () {
-    if (this.buffer.length) {
-      if (this[OBJECTMODE])
-        this[BUFFERLENGTH] -= 1
-      else
-        this[BUFFERLENGTH] -= this.buffer[0].length
-    }
-    return this.buffer.shift()
-  }
-
-  [FLUSH] (noDrain) {
-    do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()))
-
-    if (!noDrain && !this.buffer.length && !this[EOF])
-      this.emit('drain')
-  }
-
-  [FLUSHCHUNK] (chunk) {
-    return chunk ? (this.emit('data', chunk), this.flowing) : false
-  }
-
-  pipe (dest, opts) {
-    if (this[DESTROYED])
-      return
-
-    const ended = this[EMITTED_END]
-    opts = opts || {}
-    if (dest === proc.stdout || dest === proc.stderr)
-      opts.end = false
-    else
-      opts.end = opts.end !== false
-    opts.proxyErrors = !!opts.proxyErrors
-
-    // piping an ended stream ends immediately
-    if (ended) {
-      if (opts.end)
-        dest.end()
-    } else {
-      this.pipes.push(!opts.proxyErrors ? new Pipe(this, dest, opts)
-        : new PipeProxyErrors(this, dest, opts))
-      if (this[ASYNC])
-        defer(() => this[RESUME]())
-      else
-        this[RESUME]()
-    }
-
-    return dest
-  }
-
-  unpipe (dest) {
-    const p = this.pipes.find(p => p.dest === dest)
-    if (p) {
-      this.pipes.splice(this.pipes.indexOf(p), 1)
-      p.unpipe()
-    }
-  }
-
-  addListener (ev, fn) {
-    return this.on(ev, fn)
-  }
-
-  on (ev, fn) {
-    const ret = super.on(ev, fn)
-    if (ev === 'data' && !this.pipes.length && !this.flowing)
-      this[RESUME]()
-    else if (ev === 'readable' && this[BUFFERLENGTH] !== 0)
-      super.emit('readable')
-    else if (isEndish(ev) && this[EMITTED_END]) {
-      super.emit(ev)
-      this.removeAllListeners(ev)
-    } else if (ev === 'error' && this[EMITTED_ERROR]) {
-      if (this[ASYNC])
-        defer(() => fn.call(this, this[EMITTED_ERROR]))
-      else
-        fn.call(this, this[EMITTED_ERROR])
-    }
-    return ret
-  }
-
-  get emittedEnd () {
-    return this[EMITTED_END]
-  }
-
-  [MAYBE_EMIT_END] () {
-    if (!this[EMITTING_END] &&
-        !this[EMITTED_END] &&
-        !this[DESTROYED] &&
-        this.buffer.length === 0 &&
-        this[EOF]) {
-      this[EMITTING_END] = true
-      this.emit('end')
-      this.emit('prefinish')
-      this.emit('finish')
-      if (this[CLOSED])
-        this.emit('close')
-      this[EMITTING_END] = false
-    }
-  }
-
-  emit (ev, data, ...extra) {
-    // error and close are only events allowed after calling destroy()
-    if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED])
-      return
-    else if (ev === 'data') {
-      return !data ? false
-        : this[ASYNC] ? defer(() => this[EMITDATA](data))
-        : this[EMITDATA](data)
-    } else if (ev === 'end') {
-      return this[EMITEND]()
-    } else if (ev === 'close') {
-      this[CLOSED] = true
-      // don't emit close before 'end' and 'finish'
-      if (!this[EMITTED_END] && !this[DESTROYED])
-        return
-      const ret = super.emit('close')
-      this.removeAllListeners('close')
-      return ret
-    } else if (ev === 'error') {
-      this[EMITTED_ERROR] = data
-      const ret = super.emit('error', data)
-      this[MAYBE_EMIT_END]()
-      return ret
-    } else if (ev === 'resume') {
-      const ret = super.emit('resume')
-      this[MAYBE_EMIT_END]()
-      return ret
-    } else if (ev === 'finish' || ev === 'prefinish') {
-      const ret = super.emit(ev)
-      this.removeAllListeners(ev)
-      return ret
-    }
-
-    // Some other unknown event
-    const ret = super.emit(ev, data, ...extra)
-    this[MAYBE_EMIT_END]()
-    return ret
-  }
-
-  [EMITDATA] (data) {
-    for (const p of this.pipes) {
-      if (p.dest.write(data) === false)
-        this.pause()
-    }
-    const ret = super.emit('data', data)
-    this[MAYBE_EMIT_END]()
-    return ret
-  }
-
-  [EMITEND] () {
-    if (this[EMITTED_END])
-      return
-
-    this[EMITTED_END] = true
-    this.readable = false
-    if (this[ASYNC])
-      defer(() => this[EMITEND2]())
-    else
-      this[EMITEND2]()
-  }
-
-  [EMITEND2] () {
-    if (this[DECODER]) {
-      const data = this[DECODER].end()
-      if (data) {
-        for (const p of this.pipes) {
-          p.dest.write(data)
-        }
-        super.emit('data', data)
-      }
-    }
-
-    for (const p of this.pipes) {
-      p.end()
-    }
-    const ret = super.emit('end')
-    this.removeAllListeners('end')
-    return ret
-  }
-
-  // const all = await stream.collect()
-  collect () {
-    const buf = []
-    if (!this[OBJECTMODE])
-      buf.dataLength = 0
-    // set the promise first, in case an error is raised
-    // by triggering the flow here.
-    const p = this.promise()
-    this.on('data', c => {
-      buf.push(c)
-      if (!this[OBJECTMODE])
-        buf.dataLength += c.length
-    })
-    return p.then(() => buf)
-  }
-
-  // const data = await stream.concat()
-  concat () {
-    return this[OBJECTMODE]
-      ? Promise.reject(new Error('cannot concat in objectMode'))
-      : this.collect().then(buf =>
-          this[OBJECTMODE]
-            ? Promise.reject(new Error('cannot concat in objectMode'))
-            : this[ENCODING] ? buf.join('') : Buffer.concat(buf, buf.dataLength))
-  }
-
-  // stream.promise().then(() => done, er => emitted error)
-  promise () {
-    return new Promise((resolve, reject) => {
-      this.on(DESTROYED, () => reject(new Error('stream destroyed')))
-      this.on('error', er => reject(er))
-      this.on('end', () => resolve())
-    })
-  }
-
-  // for await (let chunk of stream)
-  [ASYNCITERATOR] () {
-    const next = () => {
-      const res = this.read()
-      if (res !== null)
-        return Promise.resolve({ done: false, value: res })
-
-      if (this[EOF])
-        return Promise.resolve({ done: true })
-
-      let resolve = null
-      let reject = null
-      const onerr = er => {
-        this.removeListener('data', ondata)
-        this.removeListener('end', onend)
-        reject(er)
-      }
-      const ondata = value => {
-        this.removeListener('error', onerr)
-        this.removeListener('end', onend)
-        this.pause()
-        resolve({ value: value, done: !!this[EOF] })
-      }
-      const onend = () => {
-        this.removeListener('error', onerr)
-        this.removeListener('data', ondata)
-        resolve({ done: true })
-      }
-      const ondestroy = () => onerr(new Error('stream destroyed'))
-      return new Promise((res, rej) => {
-        reject = rej
-        resolve = res
-        this.once(DESTROYED, ondestroy)
-        this.once('error', onerr)
-        this.once('end', onend)
-        this.once('data', ondata)
-      })
-    }
-
-    return { next }
-  }
-
-  // for (let chunk of stream)
-  [ITERATOR] () {
-    const next = () => {
-      const value = this.read()
-      const done = value === null
-      return { value, done }
-    }
-    return { next }
-  }
-
-  destroy (er) {
-    if (this[DESTROYED]) {
-      if (er)
-        this.emit('error', er)
-      else
-        this.emit(DESTROYED)
-      return this
-    }
-
-    this[DESTROYED] = true
-
-    // throw away all buffered data, it's never coming out
-    this.buffer.length = 0
-    this[BUFFERLENGTH] = 0
-
-    if (typeof this.close === 'function' && !this[CLOSED])
-      this.close()
-
-    if (er)
-      this.emit('error', er)
-    else // if no error to emit, still reject pending promises
-      this.emit(DESTROYED)
-
-    return this
-  }
-
-  static isStream (s) {
-    return !!s && (s instanceof Minipass || s instanceof Stream ||
-      s instanceof EE && (
-        typeof s.pipe === 'function' || // readable
-        (typeof s.write === 'function' && typeof s.end === 'function') // writable
-      ))
-  }
-}
diff --git a/node_modules/tar/node_modules/minizlib/node_modules/minipass/package.json b/node_modules/tar/node_modules/minizlib/node_modules/minipass/package.json
deleted file mode 100644
index 548d03fa6d5d4..0000000000000
--- a/node_modules/tar/node_modules/minizlib/node_modules/minipass/package.json
+++ /dev/null
@@ -1,56 +0,0 @@
-{
-  "name": "minipass",
-  "version": "3.3.6",
-  "description": "minimal implementation of a PassThrough stream",
-  "main": "index.js",
-  "types": "index.d.ts",
-  "dependencies": {
-    "yallist": "^4.0.0"
-  },
-  "devDependencies": {
-    "@types/node": "^17.0.41",
-    "end-of-stream": "^1.4.0",
-    "prettier": "^2.6.2",
-    "tap": "^16.2.0",
-    "through2": "^2.0.3",
-    "ts-node": "^10.8.1",
-    "typescript": "^4.7.3"
-  },
-  "scripts": {
-    "test": "tap",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "postpublish": "git push origin --follow-tags"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/isaacs/minipass.git"
-  },
-  "keywords": [
-    "passthrough",
-    "stream"
-  ],
-  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
-  "license": "ISC",
-  "files": [
-    "index.d.ts",
-    "index.js"
-  ],
-  "tap": {
-    "check-coverage": true
-  },
-  "engines": {
-    "node": ">=8"
-  },
-  "prettier": {
-    "semi": false,
-    "printWidth": 80,
-    "tabWidth": 2,
-    "useTabs": false,
-    "singleQuote": true,
-    "jsxSingleQuote": false,
-    "bracketSameLine": true,
-    "arrowParens": "avoid",
-    "endOfLine": "lf"
-  }
-}
diff --git a/node_modules/tar/node_modules/minizlib/node_modules/yallist/LICENSE b/node_modules/tar/node_modules/minizlib/node_modules/yallist/LICENSE
deleted file mode 100644
index 19129e315fe59..0000000000000
--- a/node_modules/tar/node_modules/minizlib/node_modules/yallist/LICENSE
+++ /dev/null
@@ -1,15 +0,0 @@
-The ISC License
-
-Copyright (c) Isaac Z. Schlueter and Contributors
-
-Permission to use, copy, modify, and/or distribute this software for any
-purpose with or without fee is hereby granted, provided that the above
-copyright notice and this permission notice appear in all copies.
-
-THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
-WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
-ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
-WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
-ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
-IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/node_modules/tar/node_modules/minizlib/node_modules/yallist/iterator.js b/node_modules/tar/node_modules/minizlib/node_modules/yallist/iterator.js
deleted file mode 100644
index d41c97a19f984..0000000000000
--- a/node_modules/tar/node_modules/minizlib/node_modules/yallist/iterator.js
+++ /dev/null
@@ -1,8 +0,0 @@
-'use strict'
-module.exports = function (Yallist) {
-  Yallist.prototype[Symbol.iterator] = function* () {
-    for (let walker = this.head; walker; walker = walker.next) {
-      yield walker.value
-    }
-  }
-}
diff --git a/node_modules/tar/node_modules/minizlib/node_modules/yallist/package.json b/node_modules/tar/node_modules/minizlib/node_modules/yallist/package.json
deleted file mode 100644
index 8a083867d72e0..0000000000000
--- a/node_modules/tar/node_modules/minizlib/node_modules/yallist/package.json
+++ /dev/null
@@ -1,29 +0,0 @@
-{
-  "name": "yallist",
-  "version": "4.0.0",
-  "description": "Yet Another Linked List",
-  "main": "yallist.js",
-  "directories": {
-    "test": "test"
-  },
-  "files": [
-    "yallist.js",
-    "iterator.js"
-  ],
-  "dependencies": {},
-  "devDependencies": {
-    "tap": "^12.1.0"
-  },
-  "scripts": {
-    "test": "tap test/*.js --100",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "postpublish": "git push origin --all; git push origin --tags"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/isaacs/yallist.git"
-  },
-  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
-  "license": "ISC"
-}
diff --git a/node_modules/tar/node_modules/minizlib/node_modules/yallist/yallist.js b/node_modules/tar/node_modules/minizlib/node_modules/yallist/yallist.js
deleted file mode 100644
index 4e83ab1c542a5..0000000000000
--- a/node_modules/tar/node_modules/minizlib/node_modules/yallist/yallist.js
+++ /dev/null
@@ -1,426 +0,0 @@
-'use strict'
-module.exports = Yallist
-
-Yallist.Node = Node
-Yallist.create = Yallist
-
-function Yallist (list) {
-  var self = this
-  if (!(self instanceof Yallist)) {
-    self = new Yallist()
-  }
-
-  self.tail = null
-  self.head = null
-  self.length = 0
-
-  if (list && typeof list.forEach === 'function') {
-    list.forEach(function (item) {
-      self.push(item)
-    })
-  } else if (arguments.length > 0) {
-    for (var i = 0, l = arguments.length; i < l; i++) {
-      self.push(arguments[i])
-    }
-  }
-
-  return self
-}
-
-Yallist.prototype.removeNode = function (node) {
-  if (node.list !== this) {
-    throw new Error('removing node which does not belong to this list')
-  }
-
-  var next = node.next
-  var prev = node.prev
-
-  if (next) {
-    next.prev = prev
-  }
-
-  if (prev) {
-    prev.next = next
-  }
-
-  if (node === this.head) {
-    this.head = next
-  }
-  if (node === this.tail) {
-    this.tail = prev
-  }
-
-  node.list.length--
-  node.next = null
-  node.prev = null
-  node.list = null
-
-  return next
-}
-
-Yallist.prototype.unshiftNode = function (node) {
-  if (node === this.head) {
-    return
-  }
-
-  if (node.list) {
-    node.list.removeNode(node)
-  }
-
-  var head = this.head
-  node.list = this
-  node.next = head
-  if (head) {
-    head.prev = node
-  }
-
-  this.head = node
-  if (!this.tail) {
-    this.tail = node
-  }
-  this.length++
-}
-
-Yallist.prototype.pushNode = function (node) {
-  if (node === this.tail) {
-    return
-  }
-
-  if (node.list) {
-    node.list.removeNode(node)
-  }
-
-  var tail = this.tail
-  node.list = this
-  node.prev = tail
-  if (tail) {
-    tail.next = node
-  }
-
-  this.tail = node
-  if (!this.head) {
-    this.head = node
-  }
-  this.length++
-}
-
-Yallist.prototype.push = function () {
-  for (var i = 0, l = arguments.length; i < l; i++) {
-    push(this, arguments[i])
-  }
-  return this.length
-}
-
-Yallist.prototype.unshift = function () {
-  for (var i = 0, l = arguments.length; i < l; i++) {
-    unshift(this, arguments[i])
-  }
-  return this.length
-}
-
-Yallist.prototype.pop = function () {
-  if (!this.tail) {
-    return undefined
-  }
-
-  var res = this.tail.value
-  this.tail = this.tail.prev
-  if (this.tail) {
-    this.tail.next = null
-  } else {
-    this.head = null
-  }
-  this.length--
-  return res
-}
-
-Yallist.prototype.shift = function () {
-  if (!this.head) {
-    return undefined
-  }
-
-  var res = this.head.value
-  this.head = this.head.next
-  if (this.head) {
-    this.head.prev = null
-  } else {
-    this.tail = null
-  }
-  this.length--
-  return res
-}
-
-Yallist.prototype.forEach = function (fn, thisp) {
-  thisp = thisp || this
-  for (var walker = this.head, i = 0; walker !== null; i++) {
-    fn.call(thisp, walker.value, i, this)
-    walker = walker.next
-  }
-}
-
-Yallist.prototype.forEachReverse = function (fn, thisp) {
-  thisp = thisp || this
-  for (var walker = this.tail, i = this.length - 1; walker !== null; i--) {
-    fn.call(thisp, walker.value, i, this)
-    walker = walker.prev
-  }
-}
-
-Yallist.prototype.get = function (n) {
-  for (var i = 0, walker = this.head; walker !== null && i < n; i++) {
-    // abort out of the list early if we hit a cycle
-    walker = walker.next
-  }
-  if (i === n && walker !== null) {
-    return walker.value
-  }
-}
-
-Yallist.prototype.getReverse = function (n) {
-  for (var i = 0, walker = this.tail; walker !== null && i < n; i++) {
-    // abort out of the list early if we hit a cycle
-    walker = walker.prev
-  }
-  if (i === n && walker !== null) {
-    return walker.value
-  }
-}
-
-Yallist.prototype.map = function (fn, thisp) {
-  thisp = thisp || this
-  var res = new Yallist()
-  for (var walker = this.head; walker !== null;) {
-    res.push(fn.call(thisp, walker.value, this))
-    walker = walker.next
-  }
-  return res
-}
-
-Yallist.prototype.mapReverse = function (fn, thisp) {
-  thisp = thisp || this
-  var res = new Yallist()
-  for (var walker = this.tail; walker !== null;) {
-    res.push(fn.call(thisp, walker.value, this))
-    walker = walker.prev
-  }
-  return res
-}
-
-Yallist.prototype.reduce = function (fn, initial) {
-  var acc
-  var walker = this.head
-  if (arguments.length > 1) {
-    acc = initial
-  } else if (this.head) {
-    walker = this.head.next
-    acc = this.head.value
-  } else {
-    throw new TypeError('Reduce of empty list with no initial value')
-  }
-
-  for (var i = 0; walker !== null; i++) {
-    acc = fn(acc, walker.value, i)
-    walker = walker.next
-  }
-
-  return acc
-}
-
-Yallist.prototype.reduceReverse = function (fn, initial) {
-  var acc
-  var walker = this.tail
-  if (arguments.length > 1) {
-    acc = initial
-  } else if (this.tail) {
-    walker = this.tail.prev
-    acc = this.tail.value
-  } else {
-    throw new TypeError('Reduce of empty list with no initial value')
-  }
-
-  for (var i = this.length - 1; walker !== null; i--) {
-    acc = fn(acc, walker.value, i)
-    walker = walker.prev
-  }
-
-  return acc
-}
-
-Yallist.prototype.toArray = function () {
-  var arr = new Array(this.length)
-  for (var i = 0, walker = this.head; walker !== null; i++) {
-    arr[i] = walker.value
-    walker = walker.next
-  }
-  return arr
-}
-
-Yallist.prototype.toArrayReverse = function () {
-  var arr = new Array(this.length)
-  for (var i = 0, walker = this.tail; walker !== null; i++) {
-    arr[i] = walker.value
-    walker = walker.prev
-  }
-  return arr
-}
-
-Yallist.prototype.slice = function (from, to) {
-  to = to || this.length
-  if (to < 0) {
-    to += this.length
-  }
-  from = from || 0
-  if (from < 0) {
-    from += this.length
-  }
-  var ret = new Yallist()
-  if (to < from || to < 0) {
-    return ret
-  }
-  if (from < 0) {
-    from = 0
-  }
-  if (to > this.length) {
-    to = this.length
-  }
-  for (var i = 0, walker = this.head; walker !== null && i < from; i++) {
-    walker = walker.next
-  }
-  for (; walker !== null && i < to; i++, walker = walker.next) {
-    ret.push(walker.value)
-  }
-  return ret
-}
-
-Yallist.prototype.sliceReverse = function (from, to) {
-  to = to || this.length
-  if (to < 0) {
-    to += this.length
-  }
-  from = from || 0
-  if (from < 0) {
-    from += this.length
-  }
-  var ret = new Yallist()
-  if (to < from || to < 0) {
-    return ret
-  }
-  if (from < 0) {
-    from = 0
-  }
-  if (to > this.length) {
-    to = this.length
-  }
-  for (var i = this.length, walker = this.tail; walker !== null && i > to; i--) {
-    walker = walker.prev
-  }
-  for (; walker !== null && i > from; i--, walker = walker.prev) {
-    ret.push(walker.value)
-  }
-  return ret
-}
-
-Yallist.prototype.splice = function (start, deleteCount, ...nodes) {
-  if (start > this.length) {
-    start = this.length - 1
-  }
-  if (start < 0) {
-    start = this.length + start;
-  }
-
-  for (var i = 0, walker = this.head; walker !== null && i < start; i++) {
-    walker = walker.next
-  }
-
-  var ret = []
-  for (var i = 0; walker && i < deleteCount; i++) {
-    ret.push(walker.value)
-    walker = this.removeNode(walker)
-  }
-  if (walker === null) {
-    walker = this.tail
-  }
-
-  if (walker !== this.head && walker !== this.tail) {
-    walker = walker.prev
-  }
-
-  for (var i = 0; i < nodes.length; i++) {
-    walker = insert(this, walker, nodes[i])
-  }
-  return ret;
-}
-
-Yallist.prototype.reverse = function () {
-  var head = this.head
-  var tail = this.tail
-  for (var walker = head; walker !== null; walker = walker.prev) {
-    var p = walker.prev
-    walker.prev = walker.next
-    walker.next = p
-  }
-  this.head = tail
-  this.tail = head
-  return this
-}
-
-function insert (self, node, value) {
-  var inserted = node === self.head ?
-    new Node(value, null, node, self) :
-    new Node(value, node, node.next, self)
-
-  if (inserted.next === null) {
-    self.tail = inserted
-  }
-  if (inserted.prev === null) {
-    self.head = inserted
-  }
-
-  self.length++
-
-  return inserted
-}
-
-function push (self, item) {
-  self.tail = new Node(item, self.tail, null, self)
-  if (!self.head) {
-    self.head = self.tail
-  }
-  self.length++
-}
-
-function unshift (self, item) {
-  self.head = new Node(item, null, self.head, self)
-  if (!self.tail) {
-    self.tail = self.head
-  }
-  self.length++
-}
-
-function Node (value, prev, next, list) {
-  if (!(this instanceof Node)) {
-    return new Node(value, prev, next, list)
-  }
-
-  this.list = list
-  this.value = value
-
-  if (prev) {
-    prev.next = this
-    this.prev = prev
-  } else {
-    this.prev = null
-  }
-
-  if (next) {
-    next.prev = this
-    this.next = next
-  } else {
-    this.next = null
-  }
-}
-
-try {
-  // add if support for Symbol.iterator is present
-  require('./iterator.js')(Yallist)
-} catch (er) {}
diff --git a/node_modules/tar/node_modules/minizlib/package.json b/node_modules/tar/node_modules/minizlib/package.json
deleted file mode 100644
index 98825a549f3fd..0000000000000
--- a/node_modules/tar/node_modules/minizlib/package.json
+++ /dev/null
@@ -1,42 +0,0 @@
-{
-  "name": "minizlib",
-  "version": "2.1.2",
-  "description": "A small fast zlib stream built on [minipass](http://npm.im/minipass) and Node.js's zlib binding.",
-  "main": "index.js",
-  "dependencies": {
-    "minipass": "^3.0.0",
-    "yallist": "^4.0.0"
-  },
-  "scripts": {
-    "test": "tap test/*.js --100 -J",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "postpublish": "git push origin --all; git push origin --tags"
-  },
-  "repository": {
-    "type": "git",
-    "url": "git+https://github.com/isaacs/minizlib.git"
-  },
-  "keywords": [
-    "zlib",
-    "gzip",
-    "gunzip",
-    "deflate",
-    "inflate",
-    "compression",
-    "zip",
-    "unzip"
-  ],
-  "author": "Isaac Z. Schlueter  (http://blog.izs.me/)",
-  "license": "MIT",
-  "devDependencies": {
-    "tap": "^14.6.9"
-  },
-  "files": [
-    "index.js",
-    "constants.js"
-  ],
-  "engines": {
-    "node": ">= 8"
-  }
-}
diff --git a/node_modules/tar/node_modules/mkdirp/LICENSE b/node_modules/tar/node_modules/mkdirp/LICENSE
deleted file mode 100644
index 13fcd15f0e0be..0000000000000
--- a/node_modules/tar/node_modules/mkdirp/LICENSE
+++ /dev/null
@@ -1,21 +0,0 @@
-Copyright James Halliday (mail@substack.net) and Isaac Z. Schlueter (i@izs.me)
-
-This project is free software released under the MIT license:
-
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in
-all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-THE SOFTWARE.
diff --git a/node_modules/tar/node_modules/mkdirp/bin/cmd.js b/node_modules/tar/node_modules/mkdirp/bin/cmd.js
deleted file mode 100755
index 6e0aa8dc4667b..0000000000000
--- a/node_modules/tar/node_modules/mkdirp/bin/cmd.js
+++ /dev/null
@@ -1,68 +0,0 @@
-#!/usr/bin/env node
-
-const usage = () => `
-usage: mkdirp [DIR1,DIR2..] {OPTIONS}
-
-  Create each supplied directory including any necessary parent directories
-  that don't yet exist.
-
-  If the directory already exists, do nothing.
-
-OPTIONS are:
-
-  -m       If a directory needs to be created, set the mode as an octal
-  --mode=  permission string.
-
-  -v --version   Print the mkdirp version number
-
-  -h --help      Print this helpful banner
-
-  -p --print     Print the first directories created for each path provided
-
-  --manual       Use manual implementation, even if native is available
-`
-
-const dirs = []
-const opts = {}
-let print = false
-let dashdash = false
-let manual = false
-for (const arg of process.argv.slice(2)) {
-  if (dashdash)
-    dirs.push(arg)
-  else if (arg === '--')
-    dashdash = true
-  else if (arg === '--manual')
-    manual = true
-  else if (/^-h/.test(arg) || /^--help/.test(arg)) {
-    console.log(usage())
-    process.exit(0)
-  } else if (arg === '-v' || arg === '--version') {
-    console.log(require('../package.json').version)
-    process.exit(0)
-  } else if (arg === '-p' || arg === '--print') {
-    print = true
-  } else if (/^-m/.test(arg) || /^--mode=/.test(arg)) {
-    const mode = parseInt(arg.replace(/^(-m|--mode=)/, ''), 8)
-    if (isNaN(mode)) {
-      console.error(`invalid mode argument: ${arg}\nMust be an octal number.`)
-      process.exit(1)
-    }
-    opts.mode = mode
-  } else
-    dirs.push(arg)
-}
-
-const mkdirp = require('../')
-const impl = manual ? mkdirp.manual : mkdirp
-if (dirs.length === 0)
-  console.error(usage())
-
-Promise.all(dirs.map(dir => impl(dir, opts)))
-  .then(made => print ? made.forEach(m => m && console.log(m)) : null)
-  .catch(er => {
-    console.error(er.message)
-    if (er.code)
-      console.error('  code: ' + er.code)
-    process.exit(1)
-  })
diff --git a/node_modules/tar/node_modules/mkdirp/index.js b/node_modules/tar/node_modules/mkdirp/index.js
deleted file mode 100644
index ad7a16c9f45d9..0000000000000
--- a/node_modules/tar/node_modules/mkdirp/index.js
+++ /dev/null
@@ -1,31 +0,0 @@
-const optsArg = require('./lib/opts-arg.js')
-const pathArg = require('./lib/path-arg.js')
-
-const {mkdirpNative, mkdirpNativeSync} = require('./lib/mkdirp-native.js')
-const {mkdirpManual, mkdirpManualSync} = require('./lib/mkdirp-manual.js')
-const {useNative, useNativeSync} = require('./lib/use-native.js')
-
-
-const mkdirp = (path, opts) => {
-  path = pathArg(path)
-  opts = optsArg(opts)
-  return useNative(opts)
-    ? mkdirpNative(path, opts)
-    : mkdirpManual(path, opts)
-}
-
-const mkdirpSync = (path, opts) => {
-  path = pathArg(path)
-  opts = optsArg(opts)
-  return useNativeSync(opts)
-    ? mkdirpNativeSync(path, opts)
-    : mkdirpManualSync(path, opts)
-}
-
-mkdirp.sync = mkdirpSync
-mkdirp.native = (path, opts) => mkdirpNative(pathArg(path), optsArg(opts))
-mkdirp.manual = (path, opts) => mkdirpManual(pathArg(path), optsArg(opts))
-mkdirp.nativeSync = (path, opts) => mkdirpNativeSync(pathArg(path), optsArg(opts))
-mkdirp.manualSync = (path, opts) => mkdirpManualSync(pathArg(path), optsArg(opts))
-
-module.exports = mkdirp
diff --git a/node_modules/tar/node_modules/mkdirp/lib/find-made.js b/node_modules/tar/node_modules/mkdirp/lib/find-made.js
deleted file mode 100644
index 022e492c085da..0000000000000
--- a/node_modules/tar/node_modules/mkdirp/lib/find-made.js
+++ /dev/null
@@ -1,29 +0,0 @@
-const {dirname} = require('path')
-
-const findMade = (opts, parent, path = undefined) => {
-  // we never want the 'made' return value to be a root directory
-  if (path === parent)
-    return Promise.resolve()
-
-  return opts.statAsync(parent).then(
-    st => st.isDirectory() ? path : undefined, // will fail later
-    er => er.code === 'ENOENT'
-      ? findMade(opts, dirname(parent), parent)
-      : undefined
-  )
-}
-
-const findMadeSync = (opts, parent, path = undefined) => {
-  if (path === parent)
-    return undefined
-
-  try {
-    return opts.statSync(parent).isDirectory() ? path : undefined
-  } catch (er) {
-    return er.code === 'ENOENT'
-      ? findMadeSync(opts, dirname(parent), parent)
-      : undefined
-  }
-}
-
-module.exports = {findMade, findMadeSync}
diff --git a/node_modules/tar/node_modules/mkdirp/lib/mkdirp-manual.js b/node_modules/tar/node_modules/mkdirp/lib/mkdirp-manual.js
deleted file mode 100644
index 2eb18cd64eb79..0000000000000
--- a/node_modules/tar/node_modules/mkdirp/lib/mkdirp-manual.js
+++ /dev/null
@@ -1,64 +0,0 @@
-const {dirname} = require('path')
-
-const mkdirpManual = (path, opts, made) => {
-  opts.recursive = false
-  const parent = dirname(path)
-  if (parent === path) {
-    return opts.mkdirAsync(path, opts).catch(er => {
-      // swallowed by recursive implementation on posix systems
-      // any other error is a failure
-      if (er.code !== 'EISDIR')
-        throw er
-    })
-  }
-
-  return opts.mkdirAsync(path, opts).then(() => made || path, er => {
-    if (er.code === 'ENOENT')
-      return mkdirpManual(parent, opts)
-        .then(made => mkdirpManual(path, opts, made))
-    if (er.code !== 'EEXIST' && er.code !== 'EROFS')
-      throw er
-    return opts.statAsync(path).then(st => {
-      if (st.isDirectory())
-        return made
-      else
-        throw er
-    }, () => { throw er })
-  })
-}
-
-const mkdirpManualSync = (path, opts, made) => {
-  const parent = dirname(path)
-  opts.recursive = false
-
-  if (parent === path) {
-    try {
-      return opts.mkdirSync(path, opts)
-    } catch (er) {
-      // swallowed by recursive implementation on posix systems
-      // any other error is a failure
-      if (er.code !== 'EISDIR')
-        throw er
-      else
-        return
-    }
-  }
-
-  try {
-    opts.mkdirSync(path, opts)
-    return made || path
-  } catch (er) {
-    if (er.code === 'ENOENT')
-      return mkdirpManualSync(path, opts, mkdirpManualSync(parent, opts, made))
-    if (er.code !== 'EEXIST' && er.code !== 'EROFS')
-      throw er
-    try {
-      if (!opts.statSync(path).isDirectory())
-        throw er
-    } catch (_) {
-      throw er
-    }
-  }
-}
-
-module.exports = {mkdirpManual, mkdirpManualSync}
diff --git a/node_modules/tar/node_modules/mkdirp/lib/mkdirp-native.js b/node_modules/tar/node_modules/mkdirp/lib/mkdirp-native.js
deleted file mode 100644
index c7a6b69800f62..0000000000000
--- a/node_modules/tar/node_modules/mkdirp/lib/mkdirp-native.js
+++ /dev/null
@@ -1,39 +0,0 @@
-const {dirname} = require('path')
-const {findMade, findMadeSync} = require('./find-made.js')
-const {mkdirpManual, mkdirpManualSync} = require('./mkdirp-manual.js')
-
-const mkdirpNative = (path, opts) => {
-  opts.recursive = true
-  const parent = dirname(path)
-  if (parent === path)
-    return opts.mkdirAsync(path, opts)
-
-  return findMade(opts, path).then(made =>
-    opts.mkdirAsync(path, opts).then(() => made)
-    .catch(er => {
-      if (er.code === 'ENOENT')
-        return mkdirpManual(path, opts)
-      else
-        throw er
-    }))
-}
-
-const mkdirpNativeSync = (path, opts) => {
-  opts.recursive = true
-  const parent = dirname(path)
-  if (parent === path)
-    return opts.mkdirSync(path, opts)
-
-  const made = findMadeSync(opts, path)
-  try {
-    opts.mkdirSync(path, opts)
-    return made
-  } catch (er) {
-    if (er.code === 'ENOENT')
-      return mkdirpManualSync(path, opts)
-    else
-      throw er
-  }
-}
-
-module.exports = {mkdirpNative, mkdirpNativeSync}
diff --git a/node_modules/tar/node_modules/mkdirp/lib/opts-arg.js b/node_modules/tar/node_modules/mkdirp/lib/opts-arg.js
deleted file mode 100644
index 2fa4833faacc7..0000000000000
--- a/node_modules/tar/node_modules/mkdirp/lib/opts-arg.js
+++ /dev/null
@@ -1,23 +0,0 @@
-const { promisify } = require('util')
-const fs = require('fs')
-const optsArg = opts => {
-  if (!opts)
-    opts = { mode: 0o777, fs }
-  else if (typeof opts === 'object')
-    opts = { mode: 0o777, fs, ...opts }
-  else if (typeof opts === 'number')
-    opts = { mode: opts, fs }
-  else if (typeof opts === 'string')
-    opts = { mode: parseInt(opts, 8), fs }
-  else
-    throw new TypeError('invalid options argument')
-
-  opts.mkdir = opts.mkdir || opts.fs.mkdir || fs.mkdir
-  opts.mkdirAsync = promisify(opts.mkdir)
-  opts.stat = opts.stat || opts.fs.stat || fs.stat
-  opts.statAsync = promisify(opts.stat)
-  opts.statSync = opts.statSync || opts.fs.statSync || fs.statSync
-  opts.mkdirSync = opts.mkdirSync || opts.fs.mkdirSync || fs.mkdirSync
-  return opts
-}
-module.exports = optsArg
diff --git a/node_modules/tar/node_modules/mkdirp/lib/path-arg.js b/node_modules/tar/node_modules/mkdirp/lib/path-arg.js
deleted file mode 100644
index cc07de5a6f992..0000000000000
--- a/node_modules/tar/node_modules/mkdirp/lib/path-arg.js
+++ /dev/null
@@ -1,29 +0,0 @@
-const platform = process.env.__TESTING_MKDIRP_PLATFORM__ || process.platform
-const { resolve, parse } = require('path')
-const pathArg = path => {
-  if (/\0/.test(path)) {
-    // simulate same failure that node raises
-    throw Object.assign(
-      new TypeError('path must be a string without null bytes'),
-      {
-        path,
-        code: 'ERR_INVALID_ARG_VALUE',
-      }
-    )
-  }
-
-  path = resolve(path)
-  if (platform === 'win32') {
-    const badWinChars = /[*|"<>?:]/
-    const {root} = parse(path)
-    if (badWinChars.test(path.substr(root.length))) {
-      throw Object.assign(new Error('Illegal characters in path.'), {
-        path,
-        code: 'EINVAL',
-      })
-    }
-  }
-
-  return path
-}
-module.exports = pathArg
diff --git a/node_modules/tar/node_modules/mkdirp/lib/use-native.js b/node_modules/tar/node_modules/mkdirp/lib/use-native.js
deleted file mode 100644
index 079361de19fd8..0000000000000
--- a/node_modules/tar/node_modules/mkdirp/lib/use-native.js
+++ /dev/null
@@ -1,10 +0,0 @@
-const fs = require('fs')
-
-const version = process.env.__TESTING_MKDIRP_NODE_VERSION__ || process.version
-const versArr = version.replace(/^v/, '').split('.')
-const hasNative = +versArr[0] > 10 || +versArr[0] === 10 && +versArr[1] >= 12
-
-const useNative = !hasNative ? () => false : opts => opts.mkdir === fs.mkdir
-const useNativeSync = !hasNative ? () => false : opts => opts.mkdirSync === fs.mkdirSync
-
-module.exports = {useNative, useNativeSync}
diff --git a/node_modules/tar/node_modules/mkdirp/package.json b/node_modules/tar/node_modules/mkdirp/package.json
deleted file mode 100644
index 2913ed09bddd6..0000000000000
--- a/node_modules/tar/node_modules/mkdirp/package.json
+++ /dev/null
@@ -1,44 +0,0 @@
-{
-  "name": "mkdirp",
-  "description": "Recursively mkdir, like `mkdir -p`",
-  "version": "1.0.4",
-  "main": "index.js",
-  "keywords": [
-    "mkdir",
-    "directory",
-    "make dir",
-    "make",
-    "dir",
-    "recursive",
-    "native"
-  ],
-  "repository": {
-    "type": "git",
-    "url": "https://github.com/isaacs/node-mkdirp.git"
-  },
-  "scripts": {
-    "test": "tap",
-    "snap": "tap",
-    "preversion": "npm test",
-    "postversion": "npm publish",
-    "postpublish": "git push origin --follow-tags"
-  },
-  "tap": {
-    "check-coverage": true,
-    "coverage-map": "map.js"
-  },
-  "devDependencies": {
-    "require-inject": "^1.4.4",
-    "tap": "^14.10.7"
-  },
-  "bin": "bin/cmd.js",
-  "license": "MIT",
-  "engines": {
-    "node": ">=10"
-  },
-  "files": [
-    "bin",
-    "lib",
-    "index.js"
-  ]
-}
diff --git a/node_modules/node-gyp/node_modules/yallist/LICENSE.md b/node_modules/tar/node_modules/yallist/LICENSE.md
similarity index 100%
rename from node_modules/node-gyp/node_modules/yallist/LICENSE.md
rename to node_modules/tar/node_modules/yallist/LICENSE.md
diff --git a/node_modules/node-gyp/node_modules/yallist/dist/commonjs/index.js b/node_modules/tar/node_modules/yallist/dist/commonjs/index.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/yallist/dist/commonjs/index.js
rename to node_modules/tar/node_modules/yallist/dist/commonjs/index.js
diff --git a/node_modules/node-gyp/node_modules/yallist/dist/commonjs/package.json b/node_modules/tar/node_modules/yallist/dist/commonjs/package.json
similarity index 100%
rename from node_modules/node-gyp/node_modules/yallist/dist/commonjs/package.json
rename to node_modules/tar/node_modules/yallist/dist/commonjs/package.json
diff --git a/node_modules/node-gyp/node_modules/yallist/dist/esm/index.js b/node_modules/tar/node_modules/yallist/dist/esm/index.js
similarity index 100%
rename from node_modules/node-gyp/node_modules/yallist/dist/esm/index.js
rename to node_modules/tar/node_modules/yallist/dist/esm/index.js
diff --git a/node_modules/node-gyp/node_modules/tar/dist/esm/package.json b/node_modules/tar/node_modules/yallist/dist/esm/package.json
similarity index 100%
rename from node_modules/node-gyp/node_modules/tar/dist/esm/package.json
rename to node_modules/tar/node_modules/yallist/dist/esm/package.json
diff --git a/node_modules/node-gyp/node_modules/yallist/package.json b/node_modules/tar/node_modules/yallist/package.json
similarity index 100%
rename from node_modules/node-gyp/node_modules/yallist/package.json
rename to node_modules/tar/node_modules/yallist/package.json
diff --git a/node_modules/tar/package.json b/node_modules/tar/package.json
index f84a41cca5af5..be0f1e8fd8000 100644
--- a/node_modules/tar/package.json
+++ b/node_modules/tar/package.json
@@ -1,8 +1,8 @@
 {
-  "author": "GitHub Inc.",
+  "author": "Isaac Z. Schlueter",
   "name": "tar",
   "description": "tar for node",
-  "version": "6.2.1",
+  "version": "7.5.1",
   "repository": {
     "type": "git",
     "url": "https://github.com/isaacs/node-tar.git"
@@ -10,61 +10,317 @@
   "scripts": {
     "genparse": "node scripts/generate-parse-fixtures.js",
     "snap": "tap",
-    "test": "tap"
+    "test": "tap",
+    "pretest": "npm run prepare",
+    "presnap": "npm run prepare",
+    "prepare": "tshy",
+    "preversion": "npm test",
+    "postversion": "npm publish",
+    "prepublishOnly": "git push origin --follow-tags",
+    "format": "prettier --write . --log-level warn",
+    "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts"
   },
   "dependencies": {
-    "chownr": "^2.0.0",
-    "fs-minipass": "^2.0.0",
-    "minipass": "^5.0.0",
-    "minizlib": "^2.1.1",
-    "mkdirp": "^1.0.3",
-    "yallist": "^4.0.0"
+    "@isaacs/fs-minipass": "^4.0.0",
+    "chownr": "^3.0.0",
+    "minipass": "^7.1.2",
+    "minizlib": "^3.1.0",
+    "yallist": "^5.0.0"
   },
   "devDependencies": {
-    "@npmcli/eslint-config": "^4.0.0",
-    "@npmcli/template-oss": "4.11.0",
+    "@types/node": "^22.15.29",
     "chmodr": "^1.2.0",
     "end-of-stream": "^1.4.3",
     "events-to-array": "^2.0.3",
     "mutate-fs": "^2.1.1",
-    "nock": "^13.2.9",
-    "rimraf": "^3.0.2",
-    "tap": "^16.0.1"
+    "nock": "^13.5.4",
+    "prettier": "^3.2.5",
+    "rimraf": "^5.0.5",
+    "tap": "^18.7.2",
+    "tshy": "^1.13.1",
+    "typedoc": "^0.25.13"
   },
   "license": "ISC",
   "engines": {
-    "node": ">=10"
+    "node": ">=18"
   },
   "files": [
-    "bin/",
-    "lib/",
-    "index.js"
+    "dist"
   ],
   "tap": {
     "coverage-map": "map.js",
     "timeout": 0,
-    "nyc-arg": [
-      "--exclude",
-      "tap-snapshots/**"
-    ]
+    "typecheck": true
   },
-  "templateOSS": {
-    "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",
-    "version": "4.11.0",
-    "content": "scripts/template-oss",
-    "engines": ">=10",
-    "distPaths": [
-      "index.js"
-    ],
-    "allowPaths": [
-      "/index.js"
-    ],
-    "ciVersions": [
-      "10.x",
-      "12.x",
-      "14.x",
-      "16.x",
-      "18.x"
-    ]
-  }
+  "prettier": {
+    "experimentalTernaries": true,
+    "semi": false,
+    "printWidth": 70,
+    "tabWidth": 2,
+    "useTabs": false,
+    "singleQuote": true,
+    "jsxSingleQuote": false,
+    "bracketSameLine": true,
+    "arrowParens": "avoid",
+    "endOfLine": "lf"
+  },
+  "tshy": {
+    "exports": {
+      "./package.json": "./package.json",
+      ".": "./src/index.ts",
+      "./c": "./src/create.ts",
+      "./create": "./src/create.ts",
+      "./replace": "./src/create.ts",
+      "./r": "./src/create.ts",
+      "./list": "./src/list.ts",
+      "./t": "./src/list.ts",
+      "./update": "./src/update.ts",
+      "./u": "./src/update.ts",
+      "./extract": "./src/extract.ts",
+      "./x": "./src/extract.ts",
+      "./pack": "./src/pack.ts",
+      "./unpack": "./src/unpack.ts",
+      "./parse": "./src/parse.ts",
+      "./read-entry": "./src/read-entry.ts",
+      "./write-entry": "./src/write-entry.ts",
+      "./header": "./src/header.ts",
+      "./pax": "./src/pax.ts",
+      "./types": "./src/types.ts"
+    }
+  },
+  "exports": {
+    "./package.json": "./package.json",
+    ".": {
+      "import": {
+        "source": "./src/index.ts",
+        "types": "./dist/esm/index.d.ts",
+        "default": "./dist/esm/index.js"
+      },
+      "require": {
+        "source": "./src/index.ts",
+        "types": "./dist/commonjs/index.d.ts",
+        "default": "./dist/commonjs/index.js"
+      }
+    },
+    "./c": {
+      "import": {
+        "source": "./src/create.ts",
+        "types": "./dist/esm/create.d.ts",
+        "default": "./dist/esm/create.js"
+      },
+      "require": {
+        "source": "./src/create.ts",
+        "types": "./dist/commonjs/create.d.ts",
+        "default": "./dist/commonjs/create.js"
+      }
+    },
+    "./create": {
+      "import": {
+        "source": "./src/create.ts",
+        "types": "./dist/esm/create.d.ts",
+        "default": "./dist/esm/create.js"
+      },
+      "require": {
+        "source": "./src/create.ts",
+        "types": "./dist/commonjs/create.d.ts",
+        "default": "./dist/commonjs/create.js"
+      }
+    },
+    "./replace": {
+      "import": {
+        "source": "./src/create.ts",
+        "types": "./dist/esm/create.d.ts",
+        "default": "./dist/esm/create.js"
+      },
+      "require": {
+        "source": "./src/create.ts",
+        "types": "./dist/commonjs/create.d.ts",
+        "default": "./dist/commonjs/create.js"
+      }
+    },
+    "./r": {
+      "import": {
+        "source": "./src/create.ts",
+        "types": "./dist/esm/create.d.ts",
+        "default": "./dist/esm/create.js"
+      },
+      "require": {
+        "source": "./src/create.ts",
+        "types": "./dist/commonjs/create.d.ts",
+        "default": "./dist/commonjs/create.js"
+      }
+    },
+    "./list": {
+      "import": {
+        "source": "./src/list.ts",
+        "types": "./dist/esm/list.d.ts",
+        "default": "./dist/esm/list.js"
+      },
+      "require": {
+        "source": "./src/list.ts",
+        "types": "./dist/commonjs/list.d.ts",
+        "default": "./dist/commonjs/list.js"
+      }
+    },
+    "./t": {
+      "import": {
+        "source": "./src/list.ts",
+        "types": "./dist/esm/list.d.ts",
+        "default": "./dist/esm/list.js"
+      },
+      "require": {
+        "source": "./src/list.ts",
+        "types": "./dist/commonjs/list.d.ts",
+        "default": "./dist/commonjs/list.js"
+      }
+    },
+    "./update": {
+      "import": {
+        "source": "./src/update.ts",
+        "types": "./dist/esm/update.d.ts",
+        "default": "./dist/esm/update.js"
+      },
+      "require": {
+        "source": "./src/update.ts",
+        "types": "./dist/commonjs/update.d.ts",
+        "default": "./dist/commonjs/update.js"
+      }
+    },
+    "./u": {
+      "import": {
+        "source": "./src/update.ts",
+        "types": "./dist/esm/update.d.ts",
+        "default": "./dist/esm/update.js"
+      },
+      "require": {
+        "source": "./src/update.ts",
+        "types": "./dist/commonjs/update.d.ts",
+        "default": "./dist/commonjs/update.js"
+      }
+    },
+    "./extract": {
+      "import": {
+        "source": "./src/extract.ts",
+        "types": "./dist/esm/extract.d.ts",
+        "default": "./dist/esm/extract.js"
+      },
+      "require": {
+        "source": "./src/extract.ts",
+        "types": "./dist/commonjs/extract.d.ts",
+        "default": "./dist/commonjs/extract.js"
+      }
+    },
+    "./x": {
+      "import": {
+        "source": "./src/extract.ts",
+        "types": "./dist/esm/extract.d.ts",
+        "default": "./dist/esm/extract.js"
+      },
+      "require": {
+        "source": "./src/extract.ts",
+        "types": "./dist/commonjs/extract.d.ts",
+        "default": "./dist/commonjs/extract.js"
+      }
+    },
+    "./pack": {
+      "import": {
+        "source": "./src/pack.ts",
+        "types": "./dist/esm/pack.d.ts",
+        "default": "./dist/esm/pack.js"
+      },
+      "require": {
+        "source": "./src/pack.ts",
+        "types": "./dist/commonjs/pack.d.ts",
+        "default": "./dist/commonjs/pack.js"
+      }
+    },
+    "./unpack": {
+      "import": {
+        "source": "./src/unpack.ts",
+        "types": "./dist/esm/unpack.d.ts",
+        "default": "./dist/esm/unpack.js"
+      },
+      "require": {
+        "source": "./src/unpack.ts",
+        "types": "./dist/commonjs/unpack.d.ts",
+        "default": "./dist/commonjs/unpack.js"
+      }
+    },
+    "./parse": {
+      "import": {
+        "source": "./src/parse.ts",
+        "types": "./dist/esm/parse.d.ts",
+        "default": "./dist/esm/parse.js"
+      },
+      "require": {
+        "source": "./src/parse.ts",
+        "types": "./dist/commonjs/parse.d.ts",
+        "default": "./dist/commonjs/parse.js"
+      }
+    },
+    "./read-entry": {
+      "import": {
+        "source": "./src/read-entry.ts",
+        "types": "./dist/esm/read-entry.d.ts",
+        "default": "./dist/esm/read-entry.js"
+      },
+      "require": {
+        "source": "./src/read-entry.ts",
+        "types": "./dist/commonjs/read-entry.d.ts",
+        "default": "./dist/commonjs/read-entry.js"
+      }
+    },
+    "./write-entry": {
+      "import": {
+        "source": "./src/write-entry.ts",
+        "types": "./dist/esm/write-entry.d.ts",
+        "default": "./dist/esm/write-entry.js"
+      },
+      "require": {
+        "source": "./src/write-entry.ts",
+        "types": "./dist/commonjs/write-entry.d.ts",
+        "default": "./dist/commonjs/write-entry.js"
+      }
+    },
+    "./header": {
+      "import": {
+        "source": "./src/header.ts",
+        "types": "./dist/esm/header.d.ts",
+        "default": "./dist/esm/header.js"
+      },
+      "require": {
+        "source": "./src/header.ts",
+        "types": "./dist/commonjs/header.d.ts",
+        "default": "./dist/commonjs/header.js"
+      }
+    },
+    "./pax": {
+      "import": {
+        "source": "./src/pax.ts",
+        "types": "./dist/esm/pax.d.ts",
+        "default": "./dist/esm/pax.js"
+      },
+      "require": {
+        "source": "./src/pax.ts",
+        "types": "./dist/commonjs/pax.d.ts",
+        "default": "./dist/commonjs/pax.js"
+      }
+    },
+    "./types": {
+      "import": {
+        "source": "./src/types.ts",
+        "types": "./dist/esm/types.d.ts",
+        "default": "./dist/esm/types.js"
+      },
+      "require": {
+        "source": "./src/types.ts",
+        "types": "./dist/commonjs/types.d.ts",
+        "default": "./dist/commonjs/types.js"
+      }
+    }
+  },
+  "type": "module",
+  "main": "./dist/commonjs/index.js",
+  "types": "./dist/commonjs/index.d.ts",
+  "module": "./dist/esm/index.js"
 }
diff --git a/package-lock.json b/package-lock.json
index 25b4d10c29f37..7eac7aabe2124 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -144,7 +144,7 @@
         "spdx-expression-parse": "^4.0.0",
         "ssri": "^12.0.0",
         "supports-color": "^10.2.2",
-        "tar": "^6.2.1",
+        "tar": "^7.5.1",
         "text-table": "~0.2.0",
         "tiny-relative-date": "^2.0.2",
         "treeverse": "^3.0.0",
@@ -3081,11 +3081,13 @@
       }
     },
     "node_modules/chownr": {
-      "version": "2.0.0",
+      "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz",
+      "integrity": "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==",
       "inBundle": true,
-      "license": "ISC",
+      "license": "BlueOak-1.0.0",
       "engines": {
-        "node": ">=10"
+        "node": ">=18"
       }
     },
     "node_modules/ci-info": {
@@ -8060,7 +8062,9 @@
       }
     },
     "node_modules/minizlib": {
-      "version": "3.0.2",
+      "version": "3.1.0",
+      "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.1.0.tgz",
+      "integrity": "sha512-KZxYo1BUkWD2TVFLr0MQoM8vUUigWD3LlD83a/75BqC+4qE0Hb1Vo5v1FgcfaNXvfXzr+5EhQ6ing/CaBijTlw==",
       "inBundle": true,
       "license": "MIT",
       "dependencies": {
@@ -8070,20 +8074,6 @@
         "node": ">= 18"
       }
     },
-    "node_modules/mkdirp": {
-      "version": "3.0.1",
-      "inBundle": true,
-      "license": "MIT",
-      "bin": {
-        "mkdirp": "dist/cjs/src/bin.js"
-      },
-      "engines": {
-        "node": ">=10"
-      },
-      "funding": {
-        "url": "https://github.com/sponsors/isaacs"
-      }
-    },
     "node_modules/modify-values": {
       "version": "1.0.1",
       "dev": true,
@@ -8249,14 +8239,6 @@
         "node": "^18.17.0 || >=20.5.0"
       }
     },
-    "node_modules/node-gyp/node_modules/chownr": {
-      "version": "3.0.0",
-      "inBundle": true,
-      "license": "BlueOak-1.0.0",
-      "engines": {
-        "node": ">=18"
-      }
-    },
     "node_modules/node-gyp/node_modules/glob": {
       "version": "10.4.5",
       "inBundle": true,
@@ -8345,30 +8327,6 @@
         "url": "https://github.com/sponsors/isaacs"
       }
     },
-    "node_modules/node-gyp/node_modules/tar": {
-      "version": "7.4.3",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "@isaacs/fs-minipass": "^4.0.0",
-        "chownr": "^3.0.0",
-        "minipass": "^7.1.2",
-        "minizlib": "^3.0.1",
-        "mkdirp": "^3.0.1",
-        "yallist": "^5.0.0"
-      },
-      "engines": {
-        "node": ">=18"
-      }
-    },
-    "node_modules/node-gyp/node_modules/yallist": {
-      "version": "5.0.0",
-      "inBundle": true,
-      "license": "BlueOak-1.0.0",
-      "engines": {
-        "node": ">=18"
-      }
-    },
     "node_modules/node-html-parser": {
       "version": "6.1.13",
       "dev": true,
@@ -9049,38 +9007,6 @@
         "node": "^20.17.0 || >=22.9.0"
       }
     },
-    "node_modules/pacote/node_modules/chownr": {
-      "version": "3.0.0",
-      "inBundle": true,
-      "license": "BlueOak-1.0.0",
-      "engines": {
-        "node": ">=18"
-      }
-    },
-    "node_modules/pacote/node_modules/tar": {
-      "version": "7.4.3",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "@isaacs/fs-minipass": "^4.0.0",
-        "chownr": "^3.0.0",
-        "minipass": "^7.1.2",
-        "minizlib": "^3.0.1",
-        "mkdirp": "^3.0.1",
-        "yallist": "^5.0.0"
-      },
-      "engines": {
-        "node": ">=18"
-      }
-    },
-    "node_modules/pacote/node_modules/yallist": {
-      "version": "5.0.0",
-      "inBundle": true,
-      "license": "BlueOak-1.0.0",
-      "engines": {
-        "node": ">=18"
-      }
-    },
     "node_modules/parent-module": {
       "version": "1.0.1",
       "dev": true,
@@ -13279,19 +13205,20 @@
       }
     },
     "node_modules/tar": {
-      "version": "6.2.1",
+      "version": "7.5.1",
+      "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.1.tgz",
+      "integrity": "sha512-nlGpxf+hv0v7GkWBK2V9spgactGOp0qvfWRxUMjqHyzrt3SgwE48DIv/FhqPHJYLHpgW1opq3nERbz5Anq7n1g==",
       "inBundle": true,
       "license": "ISC",
       "dependencies": {
-        "chownr": "^2.0.0",
-        "fs-minipass": "^2.0.0",
-        "minipass": "^5.0.0",
-        "minizlib": "^2.1.1",
-        "mkdirp": "^1.0.3",
-        "yallist": "^4.0.0"
+        "@isaacs/fs-minipass": "^4.0.0",
+        "chownr": "^3.0.0",
+        "minipass": "^7.1.2",
+        "minizlib": "^3.1.0",
+        "yallist": "^5.0.0"
       },
       "engines": {
-        "node": ">=10"
+        "node": ">=18"
       }
     },
     "node_modules/tar-stream": {
@@ -13304,78 +13231,14 @@
         "streamx": "^2.15.0"
       }
     },
-    "node_modules/tar/node_modules/fs-minipass": {
-      "version": "2.1.0",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "minipass": "^3.0.0"
-      },
-      "engines": {
-        "node": ">= 8"
-      }
-    },
-    "node_modules/tar/node_modules/fs-minipass/node_modules/minipass": {
-      "version": "3.3.6",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "yallist": "^4.0.0"
-      },
-      "engines": {
-        "node": ">=8"
-      }
-    },
-    "node_modules/tar/node_modules/fs-minipass/node_modules/yallist": {
-      "version": "4.0.0",
-      "inBundle": true,
-      "license": "ISC"
-    },
-    "node_modules/tar/node_modules/minipass": {
+    "node_modules/tar/node_modules/yallist": {
       "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz",
+      "integrity": "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==",
       "inBundle": true,
-      "license": "ISC",
-      "engines": {
-        "node": ">=8"
-      }
-    },
-    "node_modules/tar/node_modules/minizlib": {
-      "version": "2.1.2",
-      "inBundle": true,
-      "license": "MIT",
-      "dependencies": {
-        "minipass": "^3.0.0",
-        "yallist": "^4.0.0"
-      },
-      "engines": {
-        "node": ">= 8"
-      }
-    },
-    "node_modules/tar/node_modules/minizlib/node_modules/minipass": {
-      "version": "3.3.6",
-      "inBundle": true,
-      "license": "ISC",
-      "dependencies": {
-        "yallist": "^4.0.0"
-      },
-      "engines": {
-        "node": ">=8"
-      }
-    },
-    "node_modules/tar/node_modules/minizlib/node_modules/yallist": {
-      "version": "4.0.0",
-      "inBundle": true,
-      "license": "ISC"
-    },
-    "node_modules/tar/node_modules/mkdirp": {
-      "version": "1.0.4",
-      "inBundle": true,
-      "license": "MIT",
-      "bin": {
-        "mkdirp": "bin/cmd.js"
-      },
+      "license": "BlueOak-1.0.0",
       "engines": {
-        "node": ">=10"
+        "node": ">=18"
       }
     },
     "node_modules/tcompare": {
@@ -14638,7 +14501,7 @@
         "minimatch": "^10.0.3",
         "npm-package-arg": "^13.0.0",
         "pacote": "^21.0.2",
-        "tar": "^6.2.1"
+        "tar": "^7.5.1"
       },
       "devDependencies": {
         "@npmcli/eslint-config": "^5.0.1",
diff --git a/package.json b/package.json
index e6f1a95c142a9..5576e5502207f 100644
--- a/package.json
+++ b/package.json
@@ -111,7 +111,7 @@
     "spdx-expression-parse": "^4.0.0",
     "ssri": "^12.0.0",
     "supports-color": "^10.2.2",
-    "tar": "^6.2.1",
+    "tar": "^7.5.1",
     "text-table": "~0.2.0",
     "tiny-relative-date": "^2.0.2",
     "treeverse": "^3.0.0",
diff --git a/workspaces/libnpmdiff/lib/untar.js b/workspaces/libnpmdiff/lib/untar.js
index 341ae27d1e826..6bbecd8a59ce0 100644
--- a/workspaces/libnpmdiff/lib/untar.js
+++ b/workspaces/libnpmdiff/lib/untar.js
@@ -37,7 +37,6 @@ const untar = ({ files, refs }, { filterFiles, item, prefix }) => {
         // should skip reading file when using --name-only option
         let content
         try {
-          entry.setEncoding('utf8')
           content = entry.concat()
         } catch (e) {
           /* istanbul ignore next */
@@ -80,11 +79,12 @@ const readTarballs = async (tarballs, opts = {}) => {
   }
 
   // await to read all content from included files
+  // TODO this feels like it could be one in one pass instead of three (values, map, forEach)
   const allRefs = [...refs.values()]
   const contents = await Promise.all(allRefs.map(async ref => ref.content))
 
   contents.forEach((content, index) => {
-    allRefs[index].content = content
+    allRefs[index].content = content.toString('utf8')
   })
 
   return {
diff --git a/workspaces/libnpmdiff/package.json b/workspaces/libnpmdiff/package.json
index 605f1691d95b5..59c9b354229cf 100644
--- a/workspaces/libnpmdiff/package.json
+++ b/workspaces/libnpmdiff/package.json
@@ -54,7 +54,7 @@
     "minimatch": "^10.0.3",
     "npm-package-arg": "^13.0.0",
     "pacote": "^21.0.2",
-    "tar": "^6.2.1"
+    "tar": "^7.5.1"
   },
   "templateOSS": {
     "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.",